diff --git a/_/css/site.css b/_/css/site.css deleted file mode 100644 index 794c0176..00000000 --- a/_/css/site.css +++ /dev/null @@ -1,3 +0,0 @@ -@font-face{font-family:Roboto;font-style:normal;font-weight:400;src:local("Roboto Regular"),local("Roboto-Regular"),url(../font/roboto-latin-400.woff2) format("woff2"),url(../font/roboto-latin-400.woff) format("woff")}@font-face{font-family:Roboto;font-style:italic;font-weight:400;src:local("Roboto Italic"),local("Roboto-Italic"),url(../font/roboto-latin-400italic.woff2) format("woff2"),url(../font/roboto-latin-400italic.woff) format("woff")}@font-face{font-family:Roboto;font-style:normal;font-weight:500;src:local("Roboto Medium"),local("Roboto-Medium"),url(../font/roboto-latin-500.woff2) format("woff2"),url(../font/roboto-latin-500.woff) format("woff")}@font-face{font-family:Roboto;font-style:italic;font-weight:500;src:local("Roboto Medium Italic"),local("Roboto-MediumItalic"),url(../font/roboto-latin-500italic.woff2) format("woff2"),url(../font/roboto-latin-500italic.woff) format("woff")}@font-face{font-family:Roboto Mono;font-style:normal;font-weight:400;src:local("Roboto Mono Regular"),local("RobotoMono-Regular"),url(../font/roboto-mono-latin-400.woff2) format("woff2"),url(../font/roboto-mono-latin-400.woff) format("woff")}@font-face{font-family:Roboto Mono;font-style:normal;font-weight:500;src:local("Roboto Mono Medium"),local("RobotoMono-Medium"),url(../font/roboto-mono-latin-500.woff2) format("woff2"),url(../font/roboto-mono-latin-500.woff) format("woff")}*,::after,::before{-webkit-box-sizing:inherit;box-sizing:inherit}html{-webkit-box-sizing:border-box;box-sizing:border-box;font-size:1.0625em;height:100%}@media screen and (min-width:1024px){html{font-size:1.125em}}body{background:#fff;color:#222;font-family:Roboto,sans-serif;line-height:1.15;margin:0;word-wrap:anywhere}a{text-decoration:none}a:hover{text-decoration:underline}a:active{background-color:none}code,kbd,pre{font-family:Roboto Mono,monospace}b,dt,strong,th{font-weight:500}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}em em{font-style:normal}strong strong{font-weight:400}button{cursor:pointer;font-family:inherit;font-size:1em;line-height:1.15;margin:0}button::-moz-focus-inner{border:none;padding:0}summary{cursor:pointer;-webkit-tap-highlight-color:transparent;outline:none}table{border-collapse:collapse;word-wrap:normal}object[type="image/svg+xml"]:not([width]){width:-webkit-fit-content;width:-moz-fit-content;width:fit-content}@supports (scrollbar-width:thin){body *{scrollbar-width:thin;scrollbar-color:#c1c1c1 transparent}}body ::-webkit-scrollbar{height:.25rem;width:.25rem}body ::-webkit-scrollbar-thumb{background-color:#c1c1c1}@media screen and (min-width:1024px){.body{display:-webkit-box;display:-ms-flexbox;display:flex}}.nav-container{position:fixed;top:3.5rem;left:0;width:100%;font-size:.94444rem;z-index:1;visibility:hidden}@media screen and (min-width:769px){.nav-container{width:15rem}}@media screen and (min-width:1024px){.nav-container{font-size:.86111rem;-webkit-box-flex:0;-ms-flex:none;flex:none;position:static;top:0;visibility:visible}}.nav-container.is-active{visibility:visible}.nav{background:#fafafa;position:relative;top:2.5rem;height:calc(100vh - 6rem)}@media screen and (min-width:769px){.nav{-webkit-box-shadow:.5px 0 3px #c1c1c1;box-shadow:.5px 0 3px #c1c1c1}}@media screen and (min-width:1024px){.nav{top:3.5rem;-webkit-box-shadow:none;box-shadow:none;position:sticky;height:calc(100vh - 3.5rem)}}.nav .panels{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;height:inherit}html.is-clipped--nav{overflow-y:hidden}.nav-panel-menu{overflow-y:scroll;-ms-scroll-chaining:none;overscroll-behavior:none;height:calc(100% - 2.5rem)}.nav-panel-menu:not(.is-active) .nav-menu{opacity:.75}.nav-panel-menu:not(.is-active)::after{content:"";background:rgba(0,0,0,.5);display:block;position:absolute;top:0;right:0;bottom:0;left:0}.nav-menu{min-height:100%;padding:.5rem .75rem;line-height:1.35;position:relative}.nav-menu h3.title{color:#424242;font-size:inherit;font-weight:500;margin:0;padding:.25em 0 .125em}.nav-menu a{color:inherit}.nav-list{margin:0 0 0 .75rem;padding:0}.nav-menu>.nav-list+.nav-list{margin-top:.5rem}.nav-item{list-style:none;margin-top:.5em}.nav-item-toggle~.nav-list{padding-bottom:.125rem}.nav-item[data-depth="0"]>.nav-list:first-child{display:block;margin:0}.nav-item:not(.is-active)>.nav-list{display:none}.nav-item-toggle{background:transparent url(../img/caret.svg) no-repeat 50%/50%;border:none;outline:none;line-height:inherit;position:absolute;height:1.35em;width:1.35em;margin-top:-.05em;margin-left:-1.35em}.nav-item.is-active>.nav-item-toggle{-webkit-transform:rotate(90deg);transform:rotate(90deg)}.is-current-page>.nav-link,.is-current-page>.nav-text{font-weight:500}.nav-panel-explore{background:#fafafa;display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;position:absolute;top:0;right:0;bottom:0;left:0}.nav-panel-explore:not(:first-child){top:auto;max-height:calc(50% + 2.5rem)}.nav-panel-explore .context{font-size:.83333rem;-ms-flex-negative:0;flex-shrink:0;color:#5d5d5d;-webkit-box-shadow:0 -1px 0 #e1e1e1;box-shadow:0 -1px 0 #e1e1e1;padding:0 .5rem;display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-align:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:justify;-ms-flex-pack:justify;justify-content:space-between;cursor:pointer;line-height:1;height:2.5rem}.nav-panel-explore .context .version{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-align:inherit;-ms-flex-align:inherit;align-items:inherit}.nav-panel-explore .context .version::after{content:"";background:url(../img/chevron.svg) no-repeat 100%/auto 100%;width:1.25em;height:.75em}.nav-panel-explore .components{line-height:1.6;-webkit-box-flex:1;-ms-flex-positive:1;flex-grow:1;-webkit-box-shadow:inset 0 1px 5px #e1e1e1;box-shadow:inset 0 1px 5px #e1e1e1;background:#f0f0f0;padding:.5rem .75rem 0;margin:0;overflow-y:scroll;max-height:100%;display:block}.nav-panel-explore:not(.is-active) .components{display:none}.nav-panel-explore .component{display:block}.nav-panel-explore .component+.component{margin-top:.5rem}.nav-panel-explore .component:last-child{margin-bottom:.75rem}.nav-panel-explore .component .title{font-weight:500;color:inherit}.nav-panel-explore .versions{display:-webkit-box;display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;list-style:none;padding-left:0;margin-top:-.25rem;line-height:1}.nav-panel-explore .component .version{display:block;margin:.375rem .375rem 0 0}.nav-panel-explore .component .version a{border:1px solid #c1c1c1;border-radius:.25rem;color:inherit;opacity:.75;white-space:nowrap;padding:.125em .25em;display:inherit}.nav-panel-explore .component .is-current a{border-color:currentColor;opacity:.9;font-weight:500}@media screen and (max-width:1023.5px){aside.toc.sidebar{display:none}main>.content{overflow-x:auto}}@media screen and (min-width:1024px){main{-webkit-box-flex:1;-ms-flex:auto;flex:auto;min-width:0}main>.content{display:-webkit-box;display:-ms-flexbox;display:flex}aside.toc.embedded{display:none}aside.toc.sidebar{-webkit-box-flex:0;-ms-flex:0 0 9rem;flex:0 0 9rem;-webkit-box-ordinal-group:2;-ms-flex-order:1;order:1}}@media screen and (min-width:1216px){aside.toc.sidebar{-ms-flex-preferred-size:12rem;flex-basis:12rem}}.toolbar{color:#5d5d5d;-webkit-box-align:center;-ms-flex-align:center;align-items:center;background-color:#fafafa;-webkit-box-shadow:0 1px 0 #e1e1e1;box-shadow:0 1px 0 #e1e1e1;display:-webkit-box;display:-ms-flexbox;display:flex;font-size:.83333rem;height:2.5rem;-webkit-box-pack:start;-ms-flex-pack:start;justify-content:flex-start;position:sticky;top:3.5rem;z-index:2}.toolbar a{color:inherit}.nav-toggle{background:url(../img/menu.svg) no-repeat 50% 47.5%;background-size:49%;border:none;outline:none;line-height:inherit;padding:0;height:2.5rem;width:2.5rem;margin-right:-.25rem}@media screen and (min-width:1024px){.nav-toggle{display:none}}.nav-toggle.is-active{background-image:url(../img/back.svg);background-size:41.5%}.home-link{display:block;background:url(../img/home-o.svg) no-repeat 50%;height:1.25rem;width:1.25rem;margin:.625rem}.home-link.is-current,.home-link:hover{background-image:url(../img/home.svg)}.edit-this-page{display:none;padding-right:.5rem}@media screen and (min-width:1024px){.edit-this-page{display:block}}.toolbar .edit-this-page a{color:#8e8e8e}.breadcrumbs{display:none;-webkit-box-flex:1;-ms-flex:1 1;flex:1 1;padding:0 .5rem 0 .75rem;line-height:1.35}@media screen and (min-width:1024px){.breadcrumbs{display:block}}a+.breadcrumbs{padding-left:.05rem}.breadcrumbs ul{display:-webkit-box;display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;margin:0;padding:0;list-style:none}.breadcrumbs li{display:inline;margin:0}.breadcrumbs li::after{content:"/";padding:0 .5rem}.breadcrumbs li:last-of-type::after{content:none}.page-versions{margin:0 .2rem 0 auto;position:relative;line-height:1}@media screen and (min-width:1024px){.page-versions{margin-right:.7rem}}.page-versions .version-menu-toggle{color:inherit;background:url(../img/chevron.svg) no-repeat;background-position:right .5rem top 50%;background-size:auto .75em;border:none;outline:none;line-height:inherit;padding:.5rem 1.5rem .5rem .5rem;position:relative;z-index:3}.page-versions .version-menu{display:-webkit-box;display:-ms-flexbox;display:flex;min-width:100%;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:end;-ms-flex-align:end;align-items:flex-end;background:-webkit-gradient(linear,left top,left bottom,from(#f0f0f0),to(#f0f0f0)) no-repeat;background:linear-gradient(180deg,#f0f0f0 0,#f0f0f0) no-repeat;padding:1.375rem 1.5rem .5rem .5rem;position:absolute;top:0;right:0;white-space:nowrap}.page-versions:not(.is-active) .version-menu{display:none}.page-versions .version{display:block;padding-top:.5rem}.page-versions .version.is-current{display:none}.page-versions .version.is-missing{color:#8e8e8e;font-style:italic;text-decoration:none}.toc-menu{color:#5d5d5d}.toc.sidebar .toc-menu{margin-right:.75rem;position:sticky;top:6rem}.toc .toc-menu h3{color:#333;font-size:.88889rem;font-weight:500;line-height:1.3;margin:0 -.5px;padding-bottom:.25rem}.toc.sidebar .toc-menu h3{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;height:2.5rem;-webkit-box-pack:end;-ms-flex-pack:end;justify-content:flex-end}.toc .toc-menu ul{font-size:.83333rem;line-height:1.2;list-style:none;margin:0;padding:0}.toc.sidebar .toc-menu ul{max-height:calc(100vh - 8.5rem);overflow-y:auto;-ms-scroll-chaining:none;overscroll-behavior:none;scrollbar-width:none}.toc .toc-menu ul::-webkit-scrollbar{width:0;height:0}@media screen and (min-width:1024px){.toc .toc-menu h3{font-size:.83333rem}.toc .toc-menu ul{font-size:.75rem}}.toc .toc-menu li{margin:0}.toc .toc-menu li[data-level="2"] a{padding-left:1.25rem}.toc .toc-menu li[data-level="3"] a{padding-left:2rem}.toc .toc-menu a{color:inherit;border-left:2px solid #e1e1e1;display:inline-block;padding:.25rem 0 .25rem .5rem;text-decoration:none}.sidebar.toc .toc-menu a{display:block;outline:none}.toc .toc-menu a:hover{color:#1565c0}.toc .toc-menu a.is-active{border-left-color:#1565c0;color:#333}.sidebar.toc .toc-menu a:focus{background:#fafafa}.toc .toc-menu .is-hidden-toc{display:none!important}.doc{color:#333;font-size:inherit;-webkit-hyphens:auto;-ms-hyphens:auto;hyphens:auto;line-height:1.6;margin:0 auto;max-width:40rem;padding:0 1rem 4rem}@media screen and (min-width:1024px){.doc{-webkit-box-flex:1;-ms-flex:auto;flex:auto;font-size:.94444rem;margin:0 2rem;max-width:46rem;min-width:0}}.doc h1,.doc h2,.doc h3,.doc h4,.doc h5,.doc h6{color:#191919;font-weight:400;-webkit-hyphens:none;-ms-hyphens:none;hyphens:none;line-height:1.3;margin:1rem 0 0}.doc>h1.page:first-child{font-size:2rem;margin:1.5rem 0}@media screen and (min-width:769px){.doc>h1.page:first-child{margin-top:2.5rem}}.doc>h1.page:first-child+aside.toc.embedded{margin-top:-.5rem}.doc>h2#name+.sectionbody{margin-top:1rem}#preamble+.sect1,.doc .sect1+.sect1{margin-top:2rem}.doc h1.sect0{background:#f0f0f0;font-size:1.8em;margin:1.5rem -1rem 0;padding:.5rem 1rem}.doc h2:not(.discrete){border-bottom:1px solid #e1e1e1;margin-left:-1rem;margin-right:-1rem;padding:.4rem 1rem .1rem}.doc h3:not(.discrete),.doc h4:not(.discrete){font-weight:500}.doc h1 .anchor,.doc h2 .anchor,.doc h3 .anchor,.doc h4 .anchor,.doc h5 .anchor,.doc h6 .anchor{position:absolute;text-decoration:none;width:1.75ex;margin-left:-1.5ex;visibility:hidden;font-size:.8em;font-weight:400;padding-top:.05em}.doc h1 .anchor::before,.doc h2 .anchor::before,.doc h3 .anchor::before,.doc h4 .anchor::before,.doc h5 .anchor::before,.doc h6 .anchor::before{content:"\00a7"}.doc h1:hover .anchor,.doc h2:hover .anchor,.doc h3:hover .anchor,.doc h4:hover .anchor,.doc h5:hover .anchor,.doc h6:hover .anchor{visibility:visible}.doc dl,.doc p{margin:0}.doc a{color:#1565c0}.doc a:hover{color:#104d92}.doc a.bare{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none}.doc a.unresolved{color:#d32f2f}.doc i.fa{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none;font-style:normal}.doc .colist>table code,.doc p code,.doc thead code{color:#222;background:#fafafa;border-radius:.25em;font-size:.95em;padding:.125em .25em}.doc code,.doc pre{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none}.doc pre{font-size:.88889rem;line-height:1.5;margin:0}.doc blockquote{margin:0}.doc .paragraph.lead>p{font-size:1rem}.doc .right{float:right}.doc .left{float:left}.doc .stretch{width:100%}.doc .underline{text-decoration:underline}.doc .line-through{text-decoration:line-through}.doc .dlist,.doc .exampleblock,.doc .hdlist,.doc .imageblock,.doc .listingblock,.doc .literalblock,.doc .olist,.doc .paragraph,.doc .partintro,.doc .quoteblock,.doc .sidebarblock,.doc .ulist,.doc .verseblock,.doc details,.doc hr{margin:1rem 0 0}.doc table.tableblock{font-size:.83333rem;margin:1.5rem 0 0}.doc table.tableblock+*{margin-top:1.5rem}.doc p.tableblock+p.tableblock{margin-top:.5rem}.doc td.tableblock>.content>:first-child{margin-top:0}.doc table.tableblock td,.doc table.tableblock th{padding:.5rem}.doc table.tableblock,.doc table.tableblock>*>tr>*{border:0 solid #e1e1e1}.doc table.grid-all>*>tr>*{border-width:1px}.doc table.grid-cols>*>tr>*{border-width:0 1px}.doc table.grid-rows>*>tr>*{border-width:1px 0}.doc table.grid-all>thead th,.doc table.grid-rows>thead th{border-bottom-width:2.5px}.doc table.frame-all{border-width:1px}.doc table.frame-ends{border-width:1px 0}.doc table.frame-sides{border-width:0 1px}.doc table.frame-none>colgroup+*>:first-child>*,.doc table.frame-sides>colgroup+*>:first-child>*{border-top-width:0}.doc table.frame-sides>:last-child>:last-child>*{border-bottom-width:0}.doc table.frame-ends>*>tr>:first-child,.doc table.frame-none>*>tr>:first-child{border-left-width:0}.doc table.frame-ends>*>tr>:last-child,.doc table.frame-none>*>tr>:last-child{border-right-width:0}.doc table.stripes-all>tbody>tr,.doc table.stripes-even>tbody>tr:nth-of-type(2n),.doc table.stripes-hover>tbody>tr:hover,.doc table.stripes-odd>tbody>tr:nth-of-type(odd){background:#fafafa}.doc table.tableblock>tfoot{background:-webkit-gradient(linear,left top,left bottom,from(#f0f0f0),to(#fff));background:linear-gradient(180deg,#f0f0f0 0,#fff)}.doc .halign-left{text-align:left}.doc .halign-right{text-align:right}.doc .halign-center{text-align:center}.doc .valign-top{vertical-align:top}.doc .valign-bottom{vertical-align:bottom}.doc .valign-middle{vertical-align:middle}.doc .admonitionblock{margin:1.4rem 0 0}.doc .admonitionblock p,.doc .admonitionblock td.content{font-size:.88889rem}.doc .admonitionblock td.content>.title+*,.doc .admonitionblock td.content>:not(.title):first-child{margin-top:0}.doc .admonitionblock pre{font-size:.83333rem}.doc .admonitionblock>table{table-layout:fixed;position:relative;width:100%}.doc .admonitionblock td.content{padding:1rem 1rem .75rem;background:#fafafa;width:100%;word-wrap:anywhere}.doc .admonitionblock .icon{position:absolute;top:0;left:0;font-size:.83333rem;padding:0 .5rem;height:1.25rem;line-height:1;font-weight:500;text-transform:uppercase;border-radius:.45rem;-webkit-transform:translate(-.5rem,-50%);transform:translate(-.5rem,-50%)}.doc .admonitionblock.caution .icon{background-color:#a0439c;color:#fff}.doc .admonitionblock.important .icon{background-color:#d32f2f;color:#fff}.doc .admonitionblock.note .icon{background-color:#217ee7;color:#fff}.doc .admonitionblock.tip .icon{background-color:#41af46;color:#fff}.doc .admonitionblock.warning .icon{background-color:#e18114;color:#fff}.doc .admonitionblock .icon i{display:-webkit-inline-box;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-align:center;-ms-flex-align:center;align-items:center;height:100%}.doc .admonitionblock .icon i::after{content:attr(title)}.doc .imageblock{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-ms-flex-align:center;align-items:center}.doc .image>img,.doc .image>object,.doc .image>svg,.doc .imageblock img,.doc .imageblock object,.doc .imageblock svg{display:inline-block;height:auto;max-width:100%;vertical-align:middle}.doc .image:not(.left):not(.right)>img{margin-top:-.2em}#preamble .abstract blockquote{background:#f0f0f0;border-left:5px solid #e1e1e1;color:#4a4a4a;font-size:.88889rem;padding:.75em 1em}.doc .quoteblock,.doc .verseblock{background:#fafafa;border-left:5px solid #5d5d5d;color:#5d5d5d}.doc .quoteblock{padding:.25rem 2rem 1.25rem}.doc .quoteblock .attribution{color:#8e8e8e;font-size:.83333rem;margin-top:.75rem}.doc .quoteblock blockquote{margin-top:1rem}.doc .quoteblock .paragraph{font-style:italic}.doc .quoteblock cite{padding-left:1em}.doc .verseblock{font-size:1.15em;padding:1rem 2rem}.doc .verseblock pre{font-family:inherit;font-size:inherit}.doc ol,.doc ul{margin:0;padding:0 0 0 2rem}.doc ol.none,.doc ol.unnumbered,.doc ol.unstyled,.doc ul.checklist,.doc ul.no-bullet,.doc ul.none,.doc ul.unstyled{list-style-type:none}.doc ol.unnumbered,.doc ul.no-bullet{padding-left:1.25rem}.doc ol.unstyled,.doc ul.unstyled{padding-left:0}.doc ul.circle,.doc ul.disc,.doc ul.square{list-style-type:square}.doc ol.arabic{list-style-type:decimal}.doc ol.decimal{list-style-type:decimal-leading-zero}.doc ol.loweralpha{list-style-type:lower-alpha}.doc ol.upperalpha{list-style-type:upper-alpha}.doc ol.lowerroman{list-style-type:lower-roman}.doc ol.upperroman{list-style-type:upper-roman}.doc ol.lowergreek{list-style-type:lower-greek}.doc ul.checklist{padding-left:.5rem}.doc ul.checklist p>i.fa-check-square-o:first-child,.doc ul.checklist p>i.fa-square-o:first-child{display:-webkit-inline-box;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-pack:center;-ms-flex-pack:center;justify-content:center;width:1.25rem}.doc ul.checklist i.fa-check-square-o::before{content:"\2713"}.doc ul.checklist i.fa-square-o::before{content:"\274f"}.doc .dlist .dlist,.doc .dlist .olist,.doc .dlist .ulist,.doc .olist .dlist,.doc .olist .olist,.doc .olist .ulist,.doc .olist li+li,.doc .ulist .dlist,.doc .ulist .olist,.doc .ulist .ulist,.doc .ulist li+li{margin-top:.5rem}.doc .admonitionblock .listingblock,.doc .olist .listingblock,.doc .ulist .listingblock{padding:0}.doc .admonitionblock .title,.doc .exampleblock .title,.doc .imageblock .title,.doc .listingblock .title,.doc .literalblock .title,.doc .openblock .title,.doc .tableblock caption{color:#5d5d5d;font-size:.88889rem;font-weight:500;font-style:italic;-webkit-hyphens:none;-ms-hyphens:none;hyphens:none;letter-spacing:.01em;padding-bottom:.075rem;text-align:left}.doc .imageblock .title{margin-top:.5rem;padding-bottom:0}.doc details{margin-left:1rem}.doc details>summary{display:block;position:relative;line-height:1.6;margin-bottom:.5rem}.doc details>summary::-webkit-details-marker{display:none}.doc details>summary::before{content:"";border:solid transparent;border-left:solid;border-width:.3em 0 .3em .5em;position:absolute;top:.5em;left:-1rem;-webkit-transform:translateX(15%);transform:translateX(15%)}.doc details[open]>summary::before{border-color:currentColor transparent transparent;border-width:.5rem .3rem 0;-webkit-transform:translateY(15%);transform:translateY(15%)}.doc details>summary::after{content:"";width:1rem;height:1em;position:absolute;top:.3em;left:-1rem}.doc details.result{margin-top:.25rem}.doc details.result>summary{color:#5d5d5d;font-style:italic;margin-bottom:0}.doc details.result>.content{margin-left:-1rem}.doc .exampleblock>.content,.doc details.result>.content{background:#fff;border:.25rem solid #5d5d5d;border-radius:.5rem;padding:.75rem}.doc .exampleblock>.content::after,.doc details.result>.content::after{content:"";display:table;clear:both}.doc .exampleblock>.content>:first-child,.doc details>.content>:first-child{margin-top:0}.doc .sidebarblock{background:#e1e1e1;border-radius:.75rem;padding:.75rem 1.5rem}.doc .sidebarblock>.content>.title{font-size:1.25rem;font-weight:500;line-height:1.3;margin-bottom:-.3em;text-align:center}.doc .sidebarblock>.content>:not(.title):first-child{margin-top:0}.doc .listingblock.wrap pre,.doc .tableblock pre{white-space:pre-wrap}.doc .listingblock pre:not(.highlight),.doc .literalblock pre,.doc pre.highlight code{background:#fafafa;-webkit-box-shadow:inset 0 0 1.75px #e1e1e1;box-shadow:inset 0 0 1.75px #e1e1e1;display:block;overflow-x:auto;padding:.75rem}.doc pre.highlight{position:relative}.doc .source-toolbox{display:-webkit-box;display:-ms-flexbox;display:flex;visibility:hidden;position:absolute;top:.25rem;right:.5rem;color:grey;font-family:Roboto,sans-serif;font-size:.75rem;line-height:1}.doc .listingblock:hover .source-toolbox{visibility:visible}.doc .source-toolbox .source-lang{text-transform:uppercase;letter-spacing:.075em;font-size:.96em;line-height:1.0425}.doc .source-toolbox>:not(:last-child)::after{content:"|";letter-spacing:0;padding:0 1ch}.doc .source-toolbox .copy-button{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-ms-flex-align:center;align-items:center;background:transparent;border:none;color:inherit;outline:none;padding:0;font-size:inherit;line-height:inherit;width:1em;height:1em}.doc .source-toolbox .copy-icon{-webkit-box-flex:0;-ms-flex:none;flex:none;width:inherit;height:inherit}.doc .source-toolbox img.copy-icon{-webkit-filter:invert(50.2%);filter:invert(50.2%)}.doc .source-toolbox svg.copy-icon{fill:currentColor}.doc .source-toolbox .copy-toast{-webkit-box-flex:0;-ms-flex:none;flex:none;position:relative;display:-webkit-inline-box;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-pack:center;-ms-flex-pack:center;justify-content:center;margin-top:1em;background-color:#333;border-radius:.25em;padding:.5em;color:#fff;cursor:auto;opacity:0;-webkit-transition:opacity .5s ease .75s;transition:opacity .5s ease .75s}.doc .source-toolbox .copy-toast::after{content:"";position:absolute;top:0;width:1em;height:1em;border:.55em solid transparent;border-left-color:#333;-webkit-transform:rotate(-90deg) translateX(50%) translateY(50%);transform:rotate(-90deg) translateX(50%) translateY(50%);-webkit-transform-origin:left;transform-origin:left}.doc .source-toolbox .copy-button.clicked .copy-toast{opacity:1;-webkit-transition:none;transition:none}.doc .language-console .hljs-meta{-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.doc .dlist dt{font-style:italic}.doc .dlist dd{margin:0 0 .25rem 1.5rem}.doc .dlist dd:last-of-type{margin-bottom:0}.doc td.hdlist1,.doc td.hdlist2{padding:.5rem 0 0;vertical-align:top}.doc tr:first-child>.hdlist1,.doc tr:first-child>.hdlist2{padding-top:0}.doc td.hdlist1{font-weight:500;padding-right:.25rem}.doc td.hdlist2{padding-left:.25rem}.doc .colist{font-size:.88889rem;margin:.25rem 0 -.25rem}.doc .colist>table>tbody>tr>:first-child,.doc .colist>table>tr>:first-child{padding:.25em .5rem 0;vertical-align:top}.doc .colist>table>tbody>tr>:last-child,.doc .colist>table>tr>:last-child{padding:.25rem 0}.doc .conum[data-value]{border:1px solid;border-radius:100%;display:inline-block;font-family:Roboto,sans-serif;font-size:.75rem;font-style:normal;line-height:1.2;text-align:center;width:1.25em;height:1.25em;letter-spacing:-.25ex;text-indent:-.25ex}.doc .conum[data-value]::after{content:attr(data-value)}.doc .conum[data-value]+b{display:none}.doc hr{border:solid #e1e1e1;border-width:2px 0 0;height:0}.doc b.button{white-space:nowrap}.doc b.button::before{content:"[";padding-right:.25em}.doc b.button::after{content:"]";padding-left:.25em}.doc kbd{display:inline-block;font-size:.66667rem;background:#fafafa;border:1px solid #c1c1c1;border-radius:.25em;-webkit-box-shadow:0 1px 0 #c1c1c1,0 0 0 .1em #fff inset;box-shadow:0 1px 0 #c1c1c1,inset 0 0 0 .1em #fff;padding:.25em .5em;vertical-align:text-bottom;white-space:nowrap}.doc .keyseq,.doc kbd{line-height:1}.doc .keyseq{font-size:.88889rem}.doc .keyseq kbd{margin:0 .125em}.doc .keyseq kbd:first-child{margin-left:0}.doc .keyseq kbd:last-child{margin-right:0}.doc .menuseq,.doc .path{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none}.doc .menuseq i.caret::before{content:"\203a";font-size:1.1em;font-weight:500;line-height:.90909}.doc :not(pre).nowrap{white-space:nowrap}.doc .nobreak{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none;word-wrap:normal}#footnotes{font-size:.85em;line-height:1.5;margin:2rem -.5rem 0}.doc td.tableblock>.content #footnotes{margin:2rem 0 0}#footnotes hr{border-top-width:1px;margin-top:0;width:20%}#footnotes .footnote{margin:.5em 0 0 1em}#footnotes .footnote+.footnote{margin-top:.25em}#footnotes .footnote>a:first-of-type{display:inline-block;margin-left:-2em;text-align:right;width:1.5em}nav.pagination{border-top:1px solid #e1e1e1;line-height:1;margin:2rem -1rem -1rem;padding:.75rem 1rem 0}nav.pagination,nav.pagination span{display:-webkit-box;display:-ms-flexbox;display:flex}nav.pagination span{-webkit-box-flex:50%;-ms-flex:50%;flex:50%;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column}nav.pagination .prev{padding-right:.5rem}nav.pagination .next{margin-left:auto;padding-left:.5rem;text-align:right}nav.pagination span::before{color:#8e8e8e;font-size:.75em;padding-bottom:.1em}nav.pagination .prev::before{content:"Prev"}nav.pagination .next::before{content:"Next"}nav.pagination a{font-weight:500;line-height:1.3;position:relative}nav.pagination a::after,nav.pagination a::before{color:#8e8e8e;font-weight:400;font-size:1.5em;line-height:.75;position:absolute;top:0;width:1rem}nav.pagination .prev a::before{content:"\2039";-webkit-transform:translateX(-100%);transform:translateX(-100%)}nav.pagination .next a::after{content:"\203a"}html.is-clipped--navbar{overflow-y:hidden}body{padding-top:3.5rem}.navbar{background:#191919;color:#fff;font-size:.88889rem;height:3.5rem;position:fixed;top:0;width:100%;z-index:4}.navbar a{text-decoration:none}.navbar-brand{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-flex:1;-ms-flex:auto;flex:auto;padding-left:1rem}.navbar-brand .navbar-item:first-child{-ms-flex-item-align:center;align-self:center;padding:0;color:#fff;font-size:1.22222rem;-ms-flex-wrap:wrap;flex-wrap:wrap;line-height:1}.navbar-brand .navbar-item:first-child a{color:inherit;word-wrap:normal}.navbar-brand .navbar-item:first-child :not(:last-child){padding-right:.375rem}.navbar-brand .navbar-item.search{-webkit-box-flex:1;-ms-flex:auto;flex:auto;-webkit-box-pack:end;-ms-flex-pack:end;justify-content:flex-end}#search-input{color:#333;font-family:inherit;font-size:.95rem;width:150px;border:1px solid #dbdbdb;border-radius:.1em;line-height:1.5;padding:0 .25em}.navbar-burger{background:none;border:none;outline:none;line-height:1;position:relative;width:3rem;padding:0;display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-ms-flex-pack:center;justify-content:center;margin-left:auto;min-width:0}.navbar-burger span{background-color:#fff;height:1.5px;width:1rem}.navbar-burger:not(.is-active) span{-webkit-transition:opacity 0s .25s,margin-top .25s ease-out .25s,-webkit-transform .25s ease-out;transition:opacity 0s .25s,margin-top .25s ease-out .25s,-webkit-transform .25s ease-out;transition:transform .25s ease-out,opacity 0s .25s,margin-top .25s ease-out .25s;transition:transform .25s ease-out,opacity 0s .25s,margin-top .25s ease-out .25s,-webkit-transform .25s ease-out}.navbar-burger span+span{margin-top:.25rem}.navbar-burger.is-active span+span{margin-top:-1.5px}.navbar-burger.is-active span:first-child{-webkit-transform:rotate(45deg);transform:rotate(45deg)}.navbar-burger.is-active span:nth-child(2){opacity:0}.navbar-burger.is-active span:nth-child(3){-webkit-transform:rotate(-45deg);transform:rotate(-45deg)}.navbar-item,.navbar-link{color:#222;display:block;line-height:1.6;padding:.5rem 1rem}.navbar-item.has-dropdown{padding:0}.navbar-item .icon{width:1.25rem;height:1.25rem;display:block}.navbar-item .icon img,.navbar-item .icon svg{fill:currentColor;width:inherit;height:inherit}.navbar-link{padding-right:2.5em}.navbar-dropdown .navbar-item{padding-left:1.5rem;padding-right:1.5rem}.navbar-divider{background-color:#e1e1e1;border:none;height:1px;margin:.25rem 0}.navbar .button{display:-webkit-inline-box;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-align:center;-ms-flex-align:center;align-items:center;background:#fff;border:1px solid #e1e1e1;border-radius:.15rem;height:1.75rem;color:#222;padding:0 .75em;white-space:nowrap}@media screen and (max-width:768.5px){.navbar-brand .navbar-item.search{padding-left:0;padding-right:0}}@media screen and (min-width:769px){#search-input{width:200px}}@media screen and (max-width:1023.5px){.navbar-brand{height:inherit}.navbar-brand .navbar-item{-webkit-box-align:center;-ms-flex-align:center;align-items:center;display:-webkit-box;display:-ms-flexbox;display:flex}.navbar-menu{background:#fff;-webkit-box-shadow:0 8px 16px rgba(10,10,10,.1);box-shadow:0 8px 16px rgba(10,10,10,.1);max-height:calc(100vh - 3.5rem);overflow-y:auto;-ms-scroll-chaining:none;overscroll-behavior:none;padding:.5rem 0}.navbar-menu:not(.is-active){display:none}.navbar-menu .navbar-link:hover,.navbar-menu a.navbar-item:hover{background:#f5f5f5}}@media screen and (min-width:1024px){.navbar-burger{display:none}.navbar,.navbar-end,.navbar-item,.navbar-link,.navbar-menu{display:-webkit-box;display:-ms-flexbox;display:flex}.navbar-item,.navbar-link{position:relative;-webkit-box-flex:0;-ms-flex:none;flex:none}.navbar-item:not(.has-dropdown),.navbar-link{-webkit-box-align:center;-ms-flex-align:center;align-items:center}.navbar-item.is-hoverable:hover .navbar-dropdown{display:block}.navbar-link::after{border-width:0 0 1px 1px;border-style:solid;content:"";display:block;height:.5em;pointer-events:none;position:absolute;-webkit-transform:rotate(-45deg);transform:rotate(-45deg);width:.5em;margin-top:-.375em;right:1.125em;top:50%}.navbar-end .navbar-link,.navbar-end>.navbar-item{color:#fff}.navbar-end .navbar-item.has-dropdown:hover .navbar-link,.navbar-end .navbar-link:hover,.navbar-end>a.navbar-item:hover{background:#000;color:#fff}.navbar-end .navbar-link::after{border-color:currentColor}.navbar-dropdown{background:#fff;border:1px solid #e1e1e1;border-top:none;border-radius:0 0 .25rem .25rem;display:none;top:100%;left:0;min-width:100%;position:absolute}.navbar-dropdown .navbar-item{padding:.5rem 3rem .5rem 1rem;white-space:nowrap}.navbar-dropdown .navbar-item:last-child{border-radius:inherit}.navbar-dropdown.is-right{left:auto;right:0}.navbar-dropdown a.navbar-item:hover{background:#f5f5f5}}footer.footer{background-color:#e1e1e1;color:#5d5d5d;font-size:.83333rem;line-height:1.6;padding:1.5rem}.footer p{margin:.5rem 0}.footer a{color:#191919} - -/*! Adapted from the GitHub style by Vasily Polovnyov */.hljs-comment,.hljs-quote{color:#998;font-style:italic}.hljs-keyword,.hljs-selector-tag,.hljs-subst{color:#333;font-weight:500}.hljs-literal,.hljs-number,.hljs-tag .hljs-attr,.hljs-template-variable,.hljs-variable{color:teal}.hljs-doctag,.hljs-string{color:#d14}.hljs-section,.hljs-selector-id,.hljs-title{color:#900;font-weight:500}.hljs-subst{font-weight:400}.hljs-class .hljs-title,.hljs-type{color:#458;font-weight:500}.hljs-attribute,.hljs-name,.hljs-tag{color:navy;font-weight:400}.hljs-link,.hljs-regexp{color:#009926}.hljs-bullet,.hljs-symbol{color:#990073}.hljs-built_in,.hljs-builtin-name{color:#0086b3}.hljs-meta{color:#999;font-weight:500}.hljs-deletion{background:#fdd}.hljs-addition{background:#dfd}.hljs-emphasis{font-style:italic}.hljs-strong{font-weight:500}@page{margin:.5in}@media print{.hide-for-print{display:none!important}html{font-size:.9375em}a{color:inherit!important;text-decoration:underline}a.bare,a[href^="#"],a[href^="mailto:"]{text-decoration:none}img,object,svg,tr{page-break-inside:avoid}thead{display:table-header-group}pre{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none;white-space:pre-wrap}body{padding-top:2rem}.navbar{background:none;color:inherit;position:absolute}.navbar *{color:inherit!important}.nav-container,.navbar>:not(.navbar-brand),.toolbar,aside.toc,nav.pagination{display:none}.doc{color:inherit;margin:auto;max-width:none;padding-bottom:2rem}.doc .listingblock code[data-lang]::before{display:block}footer.footer{background:none;border-top:1px solid #e1e1e1;color:#8e8e8e;padding:.25rem .5rem 0}.footer *{color:inherit}} \ No newline at end of file diff --git a/_/js/site.js b/_/js/site.js deleted file mode 100644 index b22d4b45..00000000 --- a/_/js/site.js +++ /dev/null @@ -1,6 +0,0 @@ -!function(){"use strict";var o=/^sect(\d)$/,i=document.querySelector(".nav-container"),a=document.querySelector(".nav-toggle");a.addEventListener("click",function(e){if(a.classList.contains("is-active"))return u(e);v(e);var t=document.documentElement;t.classList.add("is-clipped--nav"),a.classList.add("is-active"),i.classList.add("is-active");var n=c.getBoundingClientRect(),e=window.innerHeight-Math.round(n.top);Math.round(n.height)!==e&&(c.style.height=e+"px");t.addEventListener("click",u)}),i.addEventListener("click",v);var e,c,r,s,l=i.querySelector("[data-panel=menu]");function t(){var e,t,n=window.location.hash;if(n&&(n.indexOf("%")&&(n=decodeURIComponent(n)),!(e=l.querySelector('.nav-link[href="'+n+'"]')))){n=document.getElementById(n.slice(1));if(n)for(var i=n,a=document.querySelector("article.doc");(i=i.parentNode)&&i!==a;){var c=i.id;if((c=!c&&(c=o.test(i.className))?(i.firstElementChild||{}).id:c)&&(e=l.querySelector('.nav-link[href="#'+c+'"]')))break}}if(e)t=e.parentNode;else{if(!s)return;e=(t=s).querySelector(".nav-link")}t!==r&&(h(l,".nav-item.is-active").forEach(function(e){e.classList.remove("is-active","is-current-path","is-current-page")}),t.classList.add("is-current-page"),d(r=t),p(l,e))}function d(e){for(var t,n=e.parentNode;!(t=n.classList).contains("nav-menu");)"LI"===n.tagName&&t.contains("nav-item")&&t.add("is-active","is-current-path"),n=n.parentNode;e.classList.add("is-active")}function n(){var e,t,n,i;this.classList.toggle("is-active")&&(e=parseFloat(window.getComputedStyle(this).marginTop),t=this.getBoundingClientRect(),n=l.getBoundingClientRect(),0<(i=(t.bottom-n.top-n.height+e).toFixed())&&(l.scrollTop+=Math.min((t.top-n.top-e).toFixed(),i)))}function u(e){v(e);e=document.documentElement;e.classList.remove("is-clipped--nav"),a.classList.remove("is-active"),i.classList.remove("is-active"),e.removeEventListener("click",u)}function v(e){e.stopPropagation()}function p(e,t){var n=e.getBoundingClientRect(),i=n.height,a=window.getComputedStyle(c);"sticky"===a.position&&(i-=n.top-parseFloat(a.top)),e.scrollTop=Math.max(0,.5*(t.getBoundingClientRect().height-i)+t.offsetTop)}function h(e,t){return[].slice.call(e.querySelectorAll(t))}l&&(e=i.querySelector("[data-panel=explore]"),c=i.querySelector(".nav"),r=l.querySelector(".is-current-page"),(s=r)?(d(r),p(l,r.querySelector(".nav-link"))):l.scrollTop=0,h(l,".nav-item-toggle").forEach(function(e){var t=e.parentElement;e.addEventListener("click",n.bind(t));e=function(e,t){e=e.nextElementSibling;return(!e||!t||e[e.matches?"matches":"msMatchesSelector"](t))&&e}(e,".nav-text");e&&(e.style.cursor="pointer",e.addEventListener("click",n.bind(t)))}),e&&e.querySelector(".context").addEventListener("click",function(){h(c,"[data-panel]").forEach(function(e){e.classList.toggle("is-active")})}),l.addEventListener("mousedown",function(e){1":"")+".sect"+a);r.push("h"+(i+1)+"[id]")}else r.push("h1[id].sect0");n.push(r.join(">"))}var c,s,l,u=(c=n.join(","),s=d.parentNode,[].slice.call((s||document).querySelectorAll(c)));if(!u.length)return e.parentNode.removeChild(e);var f={},m=u.reduce(function(e,t){var o=document.createElement("a");o.textContent=t.textContent,f[o.href="#"+t.id]=o;var n=document.createElement("li");return n.dataset.level=parseInt(t.nodeName.slice(1),10)-1,n.appendChild(o),e.appendChild(n),e},document.createElement("ul")),p=e.querySelector(".toc-menu");p||((p=document.createElement("div")).className="toc-menu");var v=document.createElement("h3");v.textContent=e.dataset.title||"Contents",p.appendChild(v),p.appendChild(m);e=!document.getElementById("toc")&&d.querySelector("h1.page ~ :not(.is-before-toc)");e&&((v=document.createElement("aside")).className="toc embedded",v.appendChild(p.cloneNode(!0)),e.parentNode.insertBefore(v,e)),window.addEventListener("load",function(){h(),window.addEventListener("scroll",h)})}}function h(){var t,e=window.pageYOffset,o=1.15*g(document.documentElement,"fontSize"),n=d.offsetTop;if(e&&window.innerHeight+e+2>=document.documentElement.scrollHeight){l=Array.isArray(l)?l:Array(l||0);var i=[],r=u.length-1;return u.forEach(function(e,t){var o="#"+e.id;t===r||e.getBoundingClientRect().top+g(e,"paddingTop")>n?(i.push(o),l.indexOf(o)<0&&f[o].classList.add("is-active")):~l.indexOf(o)&&f[l.shift()].classList.remove("is-active")}),m.scrollTop=m.scrollHeight-m.offsetHeight,void(l=1n||void(t="#"+e.id)}),t?t!==l&&(l&&f[l].classList.remove("is-active"),(e=f[t]).classList.add("is-active"),m.scrollHeight>m.offsetHeight&&(m.scrollTop=Math.max(0,e.offsetTop+e.offsetHeight-m.offsetHeight)),l=t):l&&(f[l].classList.remove("is-active"),l=void 0)}function g(e,t){return parseFloat(window.getComputedStyle(e)[t])}}(); -!function(){"use strict";var o=document.querySelector("article.doc"),t=document.querySelector(".toolbar");function i(e){return e&&(~e.indexOf("%")?decodeURIComponent(e):e).slice(1)}function r(e){if(e){if(e.altKey||e.ctrlKey)return;window.location.hash="#"+this.id,e.preventDefault()}window.scrollTo(0,function e(t,n){return o.contains(t)?e(t.offsetParent,t.offsetTop+n):n}(this,0)-t.getBoundingClientRect().bottom)}window.addEventListener("load",function e(t){var n,o;(n=i(window.location.hash))&&(o=document.getElementById(n))&&(r.bind(o)(),setTimeout(r.bind(o),0)),window.removeEventListener("load",e)}),Array.prototype.slice.call(document.querySelectorAll('a[href^="#"]')).forEach(function(e){var t,n;(t=i(e.hash))&&(n=document.getElementById(t))&&e.addEventListener("click",r.bind(n))})}(); -!function(){"use strict";var t,e=document.querySelector(".page-versions .version-menu-toggle");e&&(t=document.querySelector(".page-versions"),e.addEventListener("click",function(e){t.classList.toggle("is-active"),e.stopPropagation()}),document.documentElement.addEventListener("click",function(){t.classList.remove("is-active")}))}(); -!function(){"use strict";var t=document.querySelector(".navbar-burger");t&&t.addEventListener("click",function(t){t.stopPropagation(),document.documentElement.classList.toggle("is-clipped--navbar"),this.classList.toggle("is-active");var e=document.getElementById(this.dataset.target);e.classList.toggle("is-active")&&(e.style.maxHeight="",t=window.innerHeight-Math.round(e.getBoundingClientRect().top),parseInt(window.getComputedStyle(e).maxHeight,10)!==t&&(e.style.maxHeight=t+"px"))}.bind(t))}(); -!function(){"use strict";var s=/^\$ (\S[^\\\n]*(\\\n(?!\$ )[^\\\n]*)*)(?=\n|$)/gm,l=/( ) *\\\n *|\\\n( ?) */g,d=/ +$/gm,r=(document.getElementById("site-script")||{dataset:{}}).dataset;[].slice.call(document.querySelectorAll(".doc pre.highlight, .doc .literalblock pre")).forEach(function(e){var t,n,c,i,a;if(e.classList.contains("highlight"))(c=(t=e.querySelector("code")).dataset.lang)&&"console"!==c&&((i=document.createElement("span")).className="source-lang",i.appendChild(document.createTextNode(c)));else{if(!e.innerText.startsWith("$ "))return;var o=e.parentNode.parentNode;o.classList.remove("literalblock"),o.classList.add("listingblock"),e.classList.add("highlightjs","highlight"),(t=document.createElement("code")).className="language-console hljs",t.dataset.lang="console",t.appendChild(e.firstChild),e.appendChild(t)}(c=document.createElement("div")).className="source-toolbox",i&&c.appendChild(i),window.navigator.clipboard&&((n=document.createElement("button")).className="copy-button",n.setAttribute("title","Copy to clipboard"),"svg"===r.svgAs?((o=document.createElementNS("http://www.w3.org/2000/svg","svg")).setAttribute("class","copy-icon"),(i=document.createElementNS("http://www.w3.org/2000/svg","use")).setAttribute("href",window.uiRootPath+"/img/octicons-16.svg#icon-clippy"),o.appendChild(i),n.appendChild(o)):((a=document.createElement("img")).src=window.uiRootPath+"/img/octicons-16.svg#view-clippy",a.alt="copy icon",a.className="copy-icon",n.appendChild(a)),(a=document.createElement("span")).className="copy-toast",a.appendChild(document.createTextNode("Copied!")),n.appendChild(a),c.appendChild(n)),e.appendChild(c),n&&n.addEventListener("click",function(e){var t=e.innerText.replace(d,"");"console"===e.dataset.lang&&t.startsWith("$ ")&&(t=function(e){var t,n=[];for(;t=s.exec(e);)n.push(t[1].replace(l,"$1$2"));return n.join(" && ")}(t));window.navigator.clipboard.writeText(t).then(function(){this.classList.add("clicked"),this.offsetHeight,this.classList.remove("clicked")}.bind(this),function(){})}.bind(n,t))})}(); \ No newline at end of file diff --git a/_/js/vendor/highlight.js b/_/js/vendor/highlight.js deleted file mode 100644 index 418de730..00000000 --- a/_/js/vendor/highlight.js +++ /dev/null @@ -1 +0,0 @@ -!function(){var e,n,a={};e=function(t){var a,g=[],s=Object.keys,w=Object.create(null),r=Object.create(null),O=!0,n=/^(no-?highlight|plain|text)$/i,l=/\blang(?:uage)?-([\w-]+)\b/i,i=/((^(<[^>]+>|\t|)+|(?:\n)))/gm,M="",C="Could not find the language '{}', did you forget to load/include a language module?",x={classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:void 0},o="of and for in not or if then".split(" ");function S(e){return e.replace(/&/g,"&").replace(//g,">")}function u(e){return e.nodeName.toLowerCase()}function c(e){return n.test(e)}function d(e){var n,a={},t=Array.prototype.slice.call(arguments,1);for(n in e)a[n]=e[n];return t.forEach(function(e){for(n in e)a[n]=e[n]}),a}function m(e){var i=[];return function e(n,a){for(var t=n.firstChild;t;t=t.nextSibling)3===t.nodeType?a+=t.nodeValue.length:1===t.nodeType&&(i.push({event:"start",offset:a,node:t}),a=e(t,a),u(t).match(/br|hr|img|input/)||i.push({event:"stop",offset:a,node:t}));return a}(e,0),i}function _(e,n,a){var t=0,i="",s=[];function r(){return e.length&&n.length?e[0].offset!==n[0].offset?e[0].offset"}function o(e){i+=""}function c(e){("start"===e.event?l:o)(e.node)}for(;e.length||n.length;){var d=r();if(i+=S(a.substring(t,d[0].offset)),t=d[0].offset,d===e){for(s.reverse().forEach(o);c(d.splice(0,1)[0]),(d=r())===e&&d.length&&d[0].offset===t;);s.reverse().forEach(l)}else"start"===d[0].event?s.push(d[0].node):s.pop(),c(d.splice(0,1)[0])}return i+S(a.substr(t))}function b(n){return n.variants&&!n.cached_variants&&(n.cached_variants=n.variants.map(function(e){return d(n,{variants:null},e)})),n.cached_variants||(function e(n){return!!n&&(n.endsWithParent||e(n.starts))}(n)?[d(n,{starts:n.starts?d(n.starts):null})]:Object.isFrozen(n)?[d(n)]:[n])}function p(e){if(a&&!e.langApiRestored){for(var n in e.langApiRestored=!0,a)e[n]&&(e[a[n]]=e[n]);(e.contains||[]).concat(e.variants||[]).forEach(p)}}function f(n,t){var i={};return"string"==typeof n?a("keyword",n):s(n).forEach(function(e){a(e,n[e])}),i;function a(a,e){(e=t?e.toLowerCase():e).split(" ").forEach(function(e){var n=e.split("|");i[n[0]]=[a,(e=n[0],(n=n[1])?Number(n):function(e){return-1!=o.indexOf(e.toLowerCase())}(e)?0:1)]})}}function T(t){function d(e){return e&&e.source||e}function g(e,n){return new RegExp(d(e),"m"+(t.case_insensitive?"i":"")+(n?"g":""))}function i(i){var s={},r=[],l={},a=1;function e(e,n){s[a]=e,r.push([e,n]),a+=new RegExp(n.toString()+"|").exec("").length-1+1}for(var n=0;n')+n+(a?"":M)}function r(){var e,n,a,t,i;if(!_.keywords)return S(E);for(a="",_.lexemesRe.lastIndex=e=0,n=_.lexemesRe.exec(E);n;)a+=S(E.substring(e,n.index)),t=_,i=n,i=m.case_insensitive?i[0].toLowerCase():i[0],(i=t.keywords.hasOwnProperty(i)&&t.keywords[i])?(N+=i[1],a+=s(i[0],S(n[0]))):a+=S(n[0]),e=_.lexemesRe.lastIndex,n=_.lexemesRe.exec(E);return a+S(E.substr(e))}function l(){p+=(null!=_.subLanguage?function(){var e="string"==typeof _.subLanguage;if(e&&!w[_.subLanguage])return S(E);var n=e?k(_.subLanguage,E,!0,b[_.subLanguage]):A(E,_.subLanguage.length?_.subLanguage:void 0);return 0<_.relevance&&(N+=n.relevance),e&&(b[_.subLanguage]=n.top),s(n.language,n.value,!1,!0)}:r)(),E=""}function o(e){p+=e.className?s(e.className,"",!0):"",_=Object.create(e,{parent:{value:_}})}function c(e){var n=e[0],e=e.rule;return e&&e.endSameAsBegin&&(e.endRe=new RegExp(n.replace(/[-\/\\^$*+?.()|[\]{}]/g,"\\$&"),"m")),e.skip?E+=n:(e.excludeBegin&&(E+=n),l(),e.returnBegin||e.excludeBegin||(E=n)),o(e),e.returnBegin?0:n.length}function d(e){var n=e[0],e=i.substr(e.index),a=function e(n,a){if(t=n.endRe,i=a,(i=t&&t.exec(i))&&0===i.index){for(;n.endsParent&&n.parent;)n=n.parent;return n}var t,i;if(n.endsWithParent)return e(n.parent,a)}(_,e);if(a){e=_;for(e.skip?E+=n:(e.returnEnd||e.excludeEnd||(E+=n),l(),e.excludeEnd&&(E=n));_.className&&(p+=M),_.skip||_.subLanguage||(N+=_.relevance),(_=_.parent)!==a.parent;);return a.starts&&(a.endSameAsBegin&&(a.starts.endRe=a.endRe),o(a.starts)),e.returnEnd?0:n.length}}var g={};function u(e,n){var a=n&&n[0];if(E+=e,null==a)return l(),0;if("begin"==g.type&&"end"==n.type&&g.index==n.index&&""===a)return E+=i.slice(n.index,n.index+1),1;if("illegal"===g.type&&""===a)return E+=i.slice(n.index,n.index+1),1;if("begin"===(g=n).type)return c(n);if("illegal"===n.type&&!t)throw new Error('Illegal lexeme "'+a+'" for mode "'+(_.className||"")+'"');if("end"===n.type){n=d(n);if(null!=n)return n}return E+=a,a.length}var m=R(n);if(!m)throw console.error(C.replace("{}",n)),new Error('Unknown language: "'+n+'"');T(m);for(var _=a||m,b={},p="",f=_;f!==m;f=f.parent)f.className&&(p=s(f.className,"",!0)+p);var E="",N=0;try{for(var h,v,y=0;;){if(_.terminators.lastIndex=y,!(h=_.terminators.exec(i)))break;v=u(i.substring(y,h.index),h),y=h.index+v}for(u(i.substr(y)),f=_;f.parent;f=f.parent)f.className&&(p+=M);return{relevance:N,value:p,illegal:!1,language:n,top:_}}catch(e){if(e.message&&-1!==e.message.indexOf("Illegal"))return{illegal:!0,relevance:0,value:S(i)};if(O)return{relevance:0,value:S(i),language:n,top:_,errorRaised:e};throw e}}function A(a,e){e=e||x.languages||s(w);var t={relevance:0,value:S(a)},i=t;return e.filter(R).filter(y).forEach(function(e){var n=k(e,a,!1);n.language=e,n.relevance>i.relevance&&(i=n),n.relevance>t.relevance&&(i=t,t=n)}),i.language&&(t.second_best=i),t}function E(e){return x.tabReplace||x.useBR?e.replace(i,function(e,n){return x.useBR&&"\n"===e?"
":x.tabReplace?n.replace(/\t/g,x.tabReplace):""}):e}function N(e){var n,a,t,i,s=function(e){var n,a,t,i,s=e.className+" ";if(s+=e.parentNode?e.parentNode.className:"",a=l.exec(s)){var r=R(a[1]);return r||(console.warn(C.replace("{}",a[1])),console.warn("Falling back to no-highlight mode for this block.",e)),r?a[1]:"no-highlight"}for(n=0,t=(s=s.split(/\s+/)).length;n/g,"\n"):n=e,i=n.textContent,a=s?k(s,i,!0):A(i),(n=m(n)).length&&((t=document.createElement("div")).innerHTML=a.value,a.value=_(n,m(t),i)),a.value=E(a.value),e.innerHTML=a.value,e.className=(t=e.className,i=s,s=a.language,i=i?r[i]:s,s=[t.trim()],t.match(/\bhljs\b/)||s.push("hljs"),-1===t.indexOf(i)&&s.push(i),s.join(" ").trim()),e.result={language:a.language,re:a.relevance},a.second_best&&(e.second_best={language:a.second_best.language,re:a.second_best.relevance}))}function h(){var e;h.called||(h.called=!0,e=document.querySelectorAll("pre code"),g.forEach.call(e,N))}var v={disableAutodetect:!0};function R(e){return e=(e||"").toLowerCase(),w[e]||w[r[e]]}function y(e){e=R(e);return e&&!e.disableAutodetect}return t.highlight=k,t.highlightAuto=A,t.fixMarkup=E,t.highlightBlock=N,t.configure=function(e){x=d(x,e)},t.initHighlighting=h,t.initHighlightingOnLoad=function(){window.addEventListener("DOMContentLoaded",h,!1),window.addEventListener("load",h,!1)},t.registerLanguage=function(n,e){var a;try{a=e(t)}catch(e){if(console.error("Language definition for '{}' could not be registered.".replace("{}",n)),!O)throw e;console.error(e),a=v}p(w[n]=a),a.rawDefinition=e.bind(null,t),a.aliases&&a.aliases.forEach(function(e){r[e]=n})},t.listLanguages=function(){return s(w)},t.getLanguage=R,t.requireLanguage=function(e){var n=R(e);if(n)return n;throw new Error("The '{}' language is required, but not loaded.".replace("{}",e))},t.autoDetection=y,t.inherit=d,t.debugMode=function(){O=!1},t.IDENT_RE="[a-zA-Z]\\w*",t.UNDERSCORE_IDENT_RE="[a-zA-Z_]\\w*",t.NUMBER_RE="\\b\\d+(\\.\\d+)?",t.C_NUMBER_RE="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",t.BINARY_NUMBER_RE="\\b(0b[01]+)",t.RE_STARTERS_RE="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",t.BACKSLASH_ESCAPE={begin:"\\\\[\\s\\S]",relevance:0},t.APOS_STRING_MODE={className:"string",begin:"'",end:"'",illegal:"\\n",contains:[t.BACKSLASH_ESCAPE]},t.QUOTE_STRING_MODE={className:"string",begin:'"',end:'"',illegal:"\\n",contains:[t.BACKSLASH_ESCAPE]},t.PHRASAL_WORDS_MODE={begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},t.COMMENT=function(e,n,a){a=t.inherit({className:"comment",begin:e,end:n,contains:[]},a||{});return a.contains.push(t.PHRASAL_WORDS_MODE),a.contains.push({className:"doctag",begin:"(?:TODO|FIXME|NOTE|BUG|XXX):",relevance:0}),a},t.C_LINE_COMMENT_MODE=t.COMMENT("//","$"),t.C_BLOCK_COMMENT_MODE=t.COMMENT("/\\*","\\*/"),t.HASH_COMMENT_MODE=t.COMMENT("#","$"),t.NUMBER_MODE={className:"number",begin:t.NUMBER_RE,relevance:0},t.C_NUMBER_MODE={className:"number",begin:t.C_NUMBER_RE,relevance:0},t.BINARY_NUMBER_MODE={className:"number",begin:t.BINARY_NUMBER_RE,relevance:0},t.CSS_NUMBER_MODE={className:"number",begin:t.NUMBER_RE+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",relevance:0},t.REGEXP_MODE={className:"regexp",begin:/\//,end:/\/[gimuy]*/,illegal:/\n/,contains:[t.BACKSLASH_ESCAPE,{begin:/\[/,end:/\]/,relevance:0,contains:[t.BACKSLASH_ESCAPE]}]},t.TITLE_MODE={className:"title",begin:t.IDENT_RE,relevance:0},t.UNDERSCORE_TITLE_MODE={className:"title",begin:t.UNDERSCORE_IDENT_RE,relevance:0},t.METHOD_GUARD={begin:"\\.\\s*"+t.UNDERSCORE_IDENT_RE,relevance:0},[t.BACKSLASH_ESCAPE,t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,t.PHRASAL_WORDS_MODE,t.COMMENT,t.C_LINE_COMMENT_MODE,t.C_BLOCK_COMMENT_MODE,t.HASH_COMMENT_MODE,t.NUMBER_MODE,t.C_NUMBER_MODE,t.BINARY_NUMBER_MODE,t.CSS_NUMBER_MODE,t.REGEXP_MODE,t.TITLE_MODE,t.UNDERSCORE_TITLE_MODE,t.METHOD_GUARD].forEach(function(e){!function n(a){Object.freeze(a);var t="function"==typeof a;Object.getOwnPropertyNames(a).forEach(function(e){!a.hasOwnProperty(e)||null===a[e]||"object"!=typeof a[e]&&"function"!=typeof a[e]||t&&("caller"===e||"callee"===e||"arguments"===e)||Object.isFrozen(a[e])||n(a[e])});return a}(e)}),t},n="object"==typeof window&&window||"object"==typeof self&&self,void 0===a||a.nodeType?n&&(n.hljs=e({}),"function"==typeof define&&define.amd&&define([],function(){return n.hljs})):e(a);function t(e){return{aliases:["adoc"],contains:[e.COMMENT("^/{4,}\\n","\\n/{4,}$",{relevance:10}),e.COMMENT("^//","$",{relevance:0}),{className:"title",begin:"^\\.\\w.*$"},{begin:"^[=\\*]{4,}\\n",end:"\\n^[=\\*]{4,}$",relevance:10},{className:"section",relevance:10,variants:[{begin:"^(={1,5}) .+?( \\1)?$"},{begin:"^[^\\[\\]\\n]+?\\n[=\\-~\\^\\+]{2,}$"}]},{className:"meta",begin:"^:.+?:",end:"\\s",excludeEnd:!0,relevance:10},{className:"meta",begin:"^\\[.+?\\]$",relevance:0},{className:"quote",begin:"^_{4,}\\n",end:"\\n_{4,}$",relevance:10},{className:"code",begin:"^[\\-\\.]{4,}\\n",end:"\\n[\\-\\.]{4,}$",relevance:10},{begin:"^\\+{4,}\\n",end:"\\n\\+{4,}$",contains:[{begin:"<",end:">",subLanguage:"xml",relevance:0}],relevance:10},{className:"bullet",begin:"^(\\*+|\\-+|\\.+|[^\\n]+?::)\\s+"},{className:"symbol",begin:"^(NOTE|TIP|IMPORTANT|WARNING|CAUTION):\\s+",relevance:10},{className:"strong",begin:"\\B\\*(?![\\*\\s])",end:"(\\n{2}|\\*)",contains:[{begin:"\\\\*\\w",relevance:0}]},{className:"emphasis",begin:"\\B'(?!['\\s])",end:"(\\n{2}|')",contains:[{begin:"\\\\'\\w",relevance:0}],relevance:0},{className:"emphasis",begin:"_(?![_\\s])",end:"(\\n{2}|_)",relevance:0},{className:"string",variants:[{begin:"``.+?''"},{begin:"`.+?'"}]},{className:"code",begin:"(`.+?`|\\+.+?\\+)",relevance:0},{className:"code",begin:"^[ \\t]",end:"$",relevance:0},{begin:"^'{3,}[ \\t]*$",relevance:10},{begin:"(link:)?(http|https|ftp|file|irc|image:?):\\S+\\[.*?\\]",returnBegin:!0,contains:[{begin:"(link|image:?):",relevance:0},{className:"link",begin:"\\w",end:"[^\\[]+",relevance:0},{className:"string",begin:"\\[",end:"\\]",excludeBegin:!0,excludeEnd:!0,relevance:0}],relevance:10}]}}function i(e){var n={className:"variable",variants:[{begin:/\$[\w\d#@][\w\d_]*/},{begin:/\$\{(.*?)}/}]},a={className:"string",begin:/"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,n,{className:"variable",begin:/\$\(/,end:/\)/,contains:[e.BACKSLASH_ESCAPE]}]};return{aliases:["sh","zsh"],lexemes:/\b-?[a-z\._]+\b/,keywords:{keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},contains:[{className:"meta",begin:/^#![^\n]+sh\s*$/,relevance:10},{className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0},e.HASH_COMMENT_MODE,a,{className:"",begin:/\\"/},{className:"string",begin:/'/,end:/'/},n]}}function s(e){var n="a-zA-Z_\\-!.?+*=<>&#'",a={begin:u="["+n+"]["+n+"0-9/;:]*",relevance:0},t={className:"number",begin:"[-+]?\\d+(\\.\\d+)?",relevance:0},i=e.inherit(e.QUOTE_STRING_MODE,{illegal:null}),s=e.COMMENT(";","$",{relevance:0}),r={className:"literal",begin:/\b(true|false|nil)\b/},l={begin:"[\\[\\{]",end:"[\\]\\}]"},o={className:"comment",begin:"\\^"+u},c=e.COMMENT("\\^\\{","\\}"),d={className:"symbol",begin:"[:]{1,2}"+u},g={begin:"\\(",end:"\\)"},u={keywords:{"builtin-name":"def defonce cond apply if-not if-let if not not= = < > <= >= == + / * - rem quot neg? pos? delay? symbol? keyword? true? false? integer? empty? coll? list? set? ifn? fn? associative? sequential? sorted? counted? reversible? number? decimal? class? distinct? isa? float? rational? reduced? ratio? odd? even? char? seq? vector? string? map? nil? contains? zero? instance? not-every? not-any? libspec? -> ->> .. . inc compare do dotimes mapcat take remove take-while drop letfn drop-last take-last drop-while while intern condp case reduced cycle split-at split-with repeat replicate iterate range merge zipmap declare line-seq sort comparator sort-by dorun doall nthnext nthrest partition eval doseq await await-for let agent atom send send-off release-pending-sends add-watch mapv filterv remove-watch agent-error restart-agent set-error-handler error-handler set-error-mode! error-mode shutdown-agents quote var fn loop recur throw try monitor-enter monitor-exit defmacro defn defn- macroexpand macroexpand-1 for dosync and or when when-not when-let comp juxt partial sequence memoize constantly complement identity assert peek pop doto proxy defstruct first rest cons defprotocol cast coll deftype defrecord last butlast sigs reify second ffirst fnext nfirst nnext defmulti defmethod meta with-meta ns in-ns create-ns import refer keys select-keys vals key val rseq name namespace promise into transient persistent! conj! assoc! dissoc! pop! disj! use class type num float double short byte boolean bigint biginteger bigdec print-method print-dup throw-if printf format load compile get-in update-in pr pr-on newline flush read slurp read-line subvec with-open memfn time re-find re-groups rand-int rand mod locking assert-valid-fdecl alias resolve ref deref refset swap! reset! set-validator! compare-and-set! alter-meta! reset-meta! commute get-validator alter ref-set ref-history-count ref-min-history ref-max-history ensure sync io! new next conj set! to-array future future-call into-array aset gen-class reduce map filter find empty hash-map hash-set sorted-map sorted-map-by sorted-set sorted-set-by vec vector seq flatten reverse assoc dissoc list disj get union difference intersection extend extend-type extend-protocol int nth delay count concat chunk chunk-buffer chunk-append chunk-first chunk-rest max min dec unchecked-inc-int unchecked-inc unchecked-dec-inc unchecked-dec unchecked-negate unchecked-add-int unchecked-add unchecked-subtract-int unchecked-subtract chunk-next chunk-cons chunked-seq? prn vary-meta lazy-seq spread list* str find-keyword keyword symbol gensym force rationalize"},lexemes:u,className:"name",begin:u,starts:n={endsWithParent:!0,relevance:0}},a=[g,i,o,c,s,d,l,t,r,a];return g.contains=[e.COMMENT("comment",""),u,n],n.contains=a,l.contains=a,c.contains=[l],{aliases:["clj"],illegal:/\S/,contains:[g,i,o,c,s,d,l,t,r]}}function r(e){function n(e){return"(?:"+e+")?"}var a="decltype\\(auto\\)",t="[a-zA-Z_]\\w*::",i={className:"keyword",begin:"\\b[a-z\\d_]*_t\\b"},s={className:"string",variants:[{begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)",end:"'",illegal:"."},{begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\((?:.|\n)*?\)\1"/}]},r={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},l={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{"meta-keyword":"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include"},contains:[{begin:/\\\n/,relevance:0},e.inherit(s,{className:"meta-string"}),{className:"meta-string",begin:/<.*?>/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},o={className:"title",begin:n(t)+e.IDENT_RE,relevance:0},c=n(t)+e.IDENT_RE+"\\s*\\(",d={keyword:"int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid wchar_tshort reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignas alignof constexpr consteval constinit decltype concept co_await co_return co_yield requires noexcept static_assert thread_local restrict final override atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq",built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary",literal:"true false nullptr NULL"},g=[i,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,r,s],t={variants:[{begin:/=/,end:/;/},{begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}],keywords:d,contains:g.concat([{begin:/\(/,end:/\)/,keywords:d,contains:g.concat(["self"]),relevance:0}]),relevance:0},r={className:"function",begin:"((decltype\\(auto\\)|(?:[a-zA-Z_]\\w*::)?[a-zA-Z_]\\w*(?:<.*?>)?)[\\*&\\s]+)+"+c,returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:d,illegal:/[^\w\s\*&:<>]/,contains:[{begin:a,keywords:d,relevance:0},{begin:c,returnBegin:!0,contains:[o],relevance:0},{className:"params",begin:/\(/,end:/\)/,keywords:d,relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,r,i,{begin:/\(/,end:/\)/,keywords:d,relevance:0,contains:["self",e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,r,i]}]},i,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,l]};return{aliases:["c","cc","h","c++","h++","hpp","hh","hxx","cxx"],keywords:d,illegal:"",keywords:d,contains:["self",i]},{begin:e.IDENT_RE+"::",keywords:d},{className:"class",beginKeywords:"class struct",end:/[{;:]/,contains:[{begin://,contains:["self"]},e.TITLE_MODE]}]),exports:{preprocessor:l,strings:s,keywords:d}}}function l(e){var n={keyword:"abstract as base bool break byte case catch char checked const continue decimal default delegate do double enum event explicit extern finally fixed float for foreach goto if implicit in int interface internal is lock long object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this try typeof uint ulong unchecked unsafe ushort using virtual void volatile while add alias ascending async await by descending dynamic equals from get global group into join let nameof on orderby partial remove select set value var when where yield",literal:"null false true"},a={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},t={className:"string",begin:'@"',end:'"',contains:[{begin:'""'}]},i=e.inherit(t,{illegal:/\n/}),s={className:"subst",begin:"{",end:"}",keywords:n},r=e.inherit(s,{illegal:/\n/}),l={className:"string",begin:/\$"/,end:'"',illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},e.BACKSLASH_ESCAPE,r]},o={className:"string",begin:/\$@"/,end:'"',contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},s]},c=e.inherit(o,{illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},r]});return s.contains=[o,l,t,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.C_BLOCK_COMMENT_MODE],r.contains=[c,l,i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.inherit(e.C_BLOCK_COMMENT_MODE,{illegal:/\n/})],l={variants:[o,l,t,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},t=e.IDENT_RE+"(<"+e.IDENT_RE+"(\\s*,\\s*"+e.IDENT_RE+")*>)?(\\[\\])?",{aliases:["csharp","c#"],keywords:n,illegal:/::/,contains:[e.COMMENT("///","$",{returnBegin:!0,contains:[{className:"doctag",variants:[{begin:"///",relevance:0},{begin:"\x3c!--|--\x3e"},{begin:""}]}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"meta",begin:"#",end:"$",keywords:{"meta-keyword":"if else elif endif define undef warning error line region endregion pragma checksum"}},l,a,{beginKeywords:"class interface",end:/[{;=]/,illegal:/[^\s:,]/,contains:[e.TITLE_MODE,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"namespace",end:/[{;=]/,illegal:/[^\s:]/,contains:[e.inherit(e.TITLE_MODE,{begin:"[a-zA-Z](\\.?\\w)*"}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"meta",begin:"^\\s*\\[",excludeBegin:!0,end:"\\]",excludeEnd:!0,contains:[{className:"meta-string",begin:/"/,end:/"/}]},{beginKeywords:"new return throw await else",relevance:0},{className:"function",begin:"("+t+"\\s+)+"+e.IDENT_RE+"\\s*\\(",returnBegin:!0,end:/\s*[{;=]/,excludeEnd:!0,keywords:n,contains:[{begin:e.IDENT_RE+"\\s*\\(",returnBegin:!0,contains:[e.TITLE_MODE],relevance:0},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:n,relevance:0,contains:[l,a,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]}]}}function o(e){var n={className:"attribute",begin:/\S/,end:":",excludeEnd:!0,starts:{endsWithParent:!0,excludeEnd:!0,contains:[{begin:/[\w-]+\(/,returnBegin:!0,contains:[{className:"built_in",begin:/[\w-]+/},{begin:/\(/,end:/\)/,contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{className:"number",begin:"#[0-9A-Fa-f]+"},{className:"meta",begin:"!important"}]}};return{case_insensitive:!0,illegal:/[=\/|'\$]/,contains:[e.C_BLOCK_COMMENT_MODE,{className:"selector-id",begin:/#[A-Za-z0-9_-]+/},{className:"selector-class",begin:/\.[A-Za-z0-9_-]+/},{className:"selector-attr",begin:/\[/,end:/\]/,illegal:"$",contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},{className:"selector-pseudo",begin:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{begin:"@(page|font-face)",lexemes:"@[a-z-]+",keywords:"@page @font-face"},{begin:"@",end:"[{;]",illegal:/:/,returnBegin:!0,contains:[{className:"keyword",begin:/@\-?\w[\w]*(\-\w+)*/},{begin:/\s/,endsWithParent:!0,excludeEnd:!0,relevance:0,keywords:"and or not only",contains:[{begin:/[a-z-]+:/,className:"attribute"},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},{className:"selector-tag",begin:"[a-zA-Z-][a-zA-Z0-9_-]*",relevance:0},{begin:"{",end:"}",illegal:/\S/,contains:[e.C_BLOCK_COMMENT_MODE,{begin:/(?:[A-Z\_\.\-]+|--[a-zA-Z0-9_-]+)\s*:/,returnBegin:!0,end:";",endsWithParent:!0,contains:[n]}]}]}}function c(e){return{aliases:["patch"],contains:[{className:"meta",relevance:10,variants:[{begin:/^@@ +\-\d+,\d+ +\+\d+,\d+ +@@$/},{begin:/^\*\*\* +\d+,\d+ +\*\*\*\*$/},{begin:/^\-\-\- +\d+,\d+ +\-\-\-\-$/}]},{className:"comment",variants:[{begin:/Index: /,end:/$/},{begin:/={3,}/,end:/$/},{begin:/^\-{3}/,end:/$/},{begin:/^\*{3} /,end:/$/},{begin:/^\+{3}/,end:/$/},{begin:/^\*{15}$/}]},{className:"addition",begin:"^\\+",end:"$"},{className:"deletion",begin:"^\\-",end:"$"},{className:"addition",begin:"^\\!",end:"$"}]}}function d(e){return{aliases:["docker"],case_insensitive:!0,keywords:"from maintainer expose env arg user onbuild stopsignal",contains:[e.HASH_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.NUMBER_MODE,{beginKeywords:"run cmd entrypoint volume add copy workdir label healthcheck shell",starts:{end:/[^\\]$/,subLanguage:"bash"}}],illegal:"/}]}]}]},s={className:"string",begin:"~[A-Z](?="+o+")",contains:[{begin:/"/,end:/"/},{begin:/'/,end:/'/},{begin:/\//,end:/\//},{begin:/\|/,end:/\|/},{begin:/\(/,end:/\)/},{begin:/\[/,end:/\]/},{begin:/\{/,end:/\}/},{begin:/\/}]},r={className:"string",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:/"""/,end:/"""/},{begin:/'''/,end:/'''/},{begin:/~S"""/,end:/"""/,contains:[]},{begin:/~S"/,end:/"/,contains:[]},{begin:/~S'''/,end:/'''/,contains:[]},{begin:/~S'/,end:/'/,contains:[]},{begin:/'/,end:/'/},{begin:/"/,end:/"/}]},l={className:"function",beginKeywords:"def defp defmacro",end:/\B\b/,contains:[e.inherit(e.TITLE_MODE,{begin:n,endsParent:!0})]},o=e.inherit(l,{className:"class",beginKeywords:"defimpl defmodule defprotocol defrecord",end:/\bdo\b|$|;/}),e=[r,s,i,e.HASH_COMMENT_MODE,o,l,{begin:"::"},{className:"symbol",begin:":(?![\\s:])",contains:[r,{begin:"[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?"}],relevance:0},{className:"symbol",begin:n+":(?!:)",relevance:0},{className:"number",begin:"(\\b0o[0-7_]+)|(\\b0b[01_]+)|(\\b0x[0-9a-fA-F_]+)|(-?\\b[1-9][0-9_]*(.[0-9_]+([eE][-+]?[0-9]+)?)?)",relevance:0},{className:"variable",begin:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{begin:"->"},{begin:"("+e.RE_STARTERS_RE+")\\s*",contains:[e.HASH_COMMENT_MODE,{className:"regexp",illegal:"\\n",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:"/",end:"/[a-z]*"},{begin:"%r\\[",end:"\\][a-z]*"}]}],relevance:0}];return{lexemes:n,keywords:a,contains:t.contains=e}}function u(e){var n={keyword:"break default func interface select case map struct chan else goto package switch const fallthrough if range type continue for import return var go defer bool byte complex64 complex128 float32 float64 int8 int16 int32 int64 string uint8 uint16 uint32 uint64 int uint uintptr rune",literal:"true false iota nil",built_in:"append cap close complex copy imag len make new panic print println real recover delete"};return{aliases:["golang"],keywords:n,illegal:"|<-"}]}}function b(e){var n="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",a={className:"number",begin:"\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",relevance:0};return{aliases:["jsp"],keywords:n,illegal:/<\/|#/,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/,relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"class",beginKeywords:"class interface",end:/[{;=]/,excludeEnd:!0,keywords:"class interface",illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"new throw return else",relevance:0},{className:"function",begin:"([À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(<[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(\\s*,\\s*[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*)*>)?\\s+)+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:n,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/,keywords:n,relevance:0,contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},a,{className:"meta",begin:"@[A-Za-z]+"}]}}function p(e){var n="<>",a="",t=/<[A-Za-z0-9\\._:-]+/,i=/\/[A-Za-z0-9\\._:-]+>|\/>/,s="[A-Za-z$_][0-9A-Za-z$_]*",r={keyword:"in of if for while finally var new function do return void else break catch instanceof with throw case default try this switch continue typeof delete let yield const export super debugger as async await static import from as",literal:"true false null undefined NaN Infinity",built_in:"eval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent encodeURI encodeURIComponent escape unescape Object Function Boolean Error EvalError InternalError RangeError ReferenceError StopIteration SyntaxError TypeError URIError Number Math Date String RegExp Array Float32Array Float64Array Int16Array Int32Array Int8Array Uint16Array Uint32Array Uint8Array Uint8ClampedArray ArrayBuffer DataView JSON Intl arguments require module console window document Symbol Set Map WeakSet WeakMap Proxy Reflect Promise"},l={className:"number",variants:[{begin:"\\b(0[bB][01]+)n?"},{begin:"\\b(0[oO][0-7]+)n?"},{begin:e.C_NUMBER_RE+"n?"}],relevance:0},o={className:"subst",begin:"\\$\\{",end:"\\}",keywords:r,contains:[]},c={begin:"html`",end:"",starts:{end:"`",returnEnd:!1,contains:[e.BACKSLASH_ESCAPE,o],subLanguage:"xml"}},d={begin:"css`",end:"",starts:{end:"`",returnEnd:!1,contains:[e.BACKSLASH_ESCAPE,o],subLanguage:"css"}},g={className:"string",begin:"`",end:"`",contains:[e.BACKSLASH_ESCAPE,o]};return o.contains=[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,c,d,g,l,e.REGEXP_MODE],o=o.contains.concat([e.C_BLOCK_COMMENT_MODE,e.C_LINE_COMMENT_MODE]),{aliases:["js","jsx","mjs","cjs"],keywords:r,contains:[{className:"meta",relevance:10,begin:/^\s*['"]use (strict|asm)['"]/},{className:"meta",begin:/^#!/,end:/$/},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,c,d,g,e.C_LINE_COMMENT_MODE,e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+",contains:[{className:"type",begin:"\\{",end:"\\}",relevance:0},{className:"variable",begin:s+"(?=\\s*(-)|$)",endsParent:!0,relevance:0},{begin:/(?=[^\n])\s/,relevance:0}]}]}),e.C_BLOCK_COMMENT_MODE,l,{begin:/[{,\n]\s*/,relevance:0,contains:[{begin:s+"\\s*:",returnBegin:!0,relevance:0,contains:[{className:"attr",begin:s,relevance:0}]}]},{begin:"("+e.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*",keywords:"return throw case",contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.REGEXP_MODE,{className:"function",begin:"(\\(.*?\\)|"+s+")\\s*=>",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:s},{begin:/\(\s*\)/},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:r,contains:o}]}]},{className:"",begin:/\s/,end:/\s*/,skip:!0},{variants:[{begin:n,end:a},{begin:t,end:i}],subLanguage:"xml",contains:[{begin:t,end:i,skip:!0,contains:["self"]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/\{/,excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:s}),{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,contains:o}],illegal:/\[|%/},{begin:/\$[(.]/},e.METHOD_GUARD,{className:"class",beginKeywords:"class",end:/[{;=]/,excludeEnd:!0,illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"constructor get set",end:/\{/,excludeEnd:!0}],illegal:/#(?!!)/}}function f(e){var n={literal:"true false null"},a=[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE],t=[e.QUOTE_STRING_MODE,e.C_NUMBER_MODE],i={end:",",endsWithParent:!0,excludeEnd:!0,contains:t,keywords:n},s={begin:"{",end:"}",contains:[{className:"attr",begin:/"/,end:/"/,contains:[e.BACKSLASH_ESCAPE],illegal:"\\n"},e.inherit(i,{begin:/:/})].concat(a),illegal:"\\S"},i={begin:"\\[",end:"\\]",contains:[e.inherit(i)],illegal:"\\S"};return t.push(s,i),a.forEach(function(e){t.push(e)}),{contains:t,keywords:n,illegal:"\\S"}}function E(e){var n={keyword:"abstract as val var vararg get set class object open private protected public noinline crossinline dynamic final enum if else do while for when throw try catch finally import package is in fun override companion reified inline lateinit init interface annotation data sealed internal infix operator out by constructor super tailrec where const inner suspend typealias external expect actual trait volatile transient native default",built_in:"Byte Short Char Int Long Boolean Float Double Void Unit Nothing",literal:"true false null"},a={className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"@"},t={className:"subst",begin:"\\${",end:"}",contains:[e.C_NUMBER_MODE]},i={className:"string",variants:[{begin:'"""',end:'"""(?=[^"])',contains:[c={className:"variable",begin:"\\$"+e.UNDERSCORE_IDENT_RE},t]},{begin:"'",end:"'",illegal:/\n/,contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"',illegal:/\n/,contains:[e.BACKSLASH_ESCAPE,c,t]}]};t.contains.push(i);var s={className:"meta",begin:"@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\s*:(?:\\s*"+e.UNDERSCORE_IDENT_RE+")?"},r={className:"meta",begin:"@"+e.UNDERSCORE_IDENT_RE,contains:[{begin:/\(/,end:/\)/,contains:[e.inherit(i,{className:"meta-string"})]}]},l={className:"number",begin:"\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",relevance:0},o=e.COMMENT("/\\*","\\*/",{contains:[e.C_BLOCK_COMMENT_MODE]}),c={variants:[{className:"type",begin:e.UNDERSCORE_IDENT_RE},{begin:/\(/,end:/\)/,contains:[]}]};return(t=c).variants[1].contains=[c],c.variants[1].contains=[t],{aliases:["kt"],keywords:n,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,o,{className:"keyword",begin:/\b(break|continue|return|this)\b/,starts:{contains:[{className:"symbol",begin:/@\w+/}]}},a,s,r,{className:"function",beginKeywords:"fun",end:"[(]|$",returnBegin:!0,excludeEnd:!0,keywords:n,illegal:/fun\s+(<.*>)?[^\s\(]+(\s+[^\s\(]+)\s*=/,relevance:5,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"type",begin://,keywords:"reified",relevance:0},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:n,relevance:0,contains:[{begin:/:/,end:/[=,\/]/,endsWithParent:!0,contains:[c,e.C_LINE_COMMENT_MODE,o],relevance:0},e.C_LINE_COMMENT_MODE,o,s,r,i,e.C_NUMBER_MODE]},o]},{className:"class",beginKeywords:"class interface trait",end:/[:\{(]|$/,excludeEnd:!0,illegal:"extends implements",contains:[{beginKeywords:"public protected internal private constructor"},e.UNDERSCORE_TITLE_MODE,{className:"type",begin://,excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:/[,:]\s*/,end:/[<\(,]|$/,excludeBegin:!0,returnEnd:!0},s,r]},i,{className:"meta",begin:"^#!/usr/bin/env",end:"$",illegal:"\n"},l]}}function N(e){return{aliases:["md","mkdown","mkd"],contains:[{className:"section",variants:[{begin:"^#{1,6}",end:"$"},{begin:"^.+?\\n[=-]{2,}$"}]},{begin:"<",end:">",subLanguage:"xml",relevance:0},{className:"bullet",begin:"^\\s*([*+-]|(\\d+\\.))\\s+"},{className:"strong",begin:"[*_]{2}.+?[*_]{2}"},{className:"emphasis",variants:[{begin:"\\*.+?\\*"},{begin:"_.+?_",relevance:0}]},{className:"quote",begin:"^>\\s+",end:"$"},{className:"code",variants:[{begin:"^```\\w*\\s*$",end:"^```[ ]*$"},{begin:"`.+?`"},{begin:"^( {4}|\\t)",end:"$",relevance:0}]},{begin:"^[-\\*]{3,}",end:"$"},{begin:"\\[.+?\\][\\(\\[].*?[\\)\\]]",returnBegin:!0,contains:[{className:"string",begin:"\\[",end:"\\]",excludeBegin:!0,returnEnd:!0,relevance:0},{className:"link",begin:"\\]\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0},{className:"symbol",begin:"\\]\\[",end:"\\]",excludeBegin:!0,excludeEnd:!0}],relevance:10},{begin:/^\[[^\n]+\]:/,returnBegin:!0,contains:[{className:"symbol",begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0},{className:"link",begin:/:\s*/,end:/$/,excludeBegin:!0}]}]}}function h(e){var n={keyword:"rec with let in inherit assert if else then",literal:"true false or and null",built_in:"import abort baseNameOf dirOf isNull builtins map removeAttrs throw toString derivation"},a={className:"subst",begin:/\$\{/,end:/}/,keywords:n},e=[e.NUMBER_MODE,e.HASH_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"string",contains:[a],variants:[{begin:"''",end:"''"},{begin:'"',end:'"'}]},{begin:/[a-zA-Z0-9-_]+(\s*=)/,returnBegin:!0,relevance:0,contains:[{className:"attr",begin:/\S+/}]}];return{aliases:["nixos"],keywords:n,contains:a.contains=e}}function v(e){var n=/[a-zA-Z@][a-zA-Z0-9_]*/,a="@interface @class @protocol @implementation";return{aliases:["mm","objc","obj-c"],keywords:{keyword:"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required @encode @package @import @defs @compatibility_alias __bridge __bridge_transfer __bridge_retained __bridge_retain __covariant __contravariant __kindof _Nonnull _Nullable _Null_unspecified __FUNCTION__ __PRETTY_FUNCTION__ __attribute__ getter setter retain unsafe_unretained nonnull nullable null_unspecified null_resettable class instancetype NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN",literal:"false true FALSE TRUE nil YES NO NULL",built_in:"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once"},lexemes:n,illegal:"/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"class",begin:"("+a.split(" ").join("|")+")\\b",end:"({|$)",excludeEnd:!0,keywords:a,lexemes:n,contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"\\."+e.UNDERSCORE_IDENT_RE,relevance:0}]}}function y(e){var n="getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qqfileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent shutdown dump chomp connect getsockname die socketpair close flock exists index shmgetsub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedirioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when",a={className:"subst",begin:"[$@]\\{",end:"\\}",keywords:n},t={begin:"->{",end:"}"},i={variants:[{begin:/\$\d/},{begin:/[\$%@](\^\w\b|#\w+(::\w+)*|{\w+}|\w+(::\w*)*)/},{begin:/[\$%@][^\s\w{]/,relevance:0}]},s=[e.BACKSLASH_ESCAPE,a,i],e=[i,e.HASH_COMMENT_MODE,e.COMMENT("^\\=\\w","\\=cut",{endsWithParent:!0}),t,{className:"string",contains:s,variants:[{begin:"q[qwxr]?\\s*\\(",end:"\\)",relevance:5},{begin:"q[qwxr]?\\s*\\[",end:"\\]",relevance:5},{begin:"q[qwxr]?\\s*\\{",end:"\\}",relevance:5},{begin:"q[qwxr]?\\s*\\|",end:"\\|",relevance:5},{begin:"q[qwxr]?\\s*\\<",end:"\\>",relevance:5},{begin:"qw\\s+q",end:"q",relevance:5},{begin:"'",end:"'",contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"'},{begin:"`",end:"`",contains:[e.BACKSLASH_ESCAPE]},{begin:"{\\w+}",contains:[],relevance:0},{begin:"-?\\w+\\s*\\=\\>",contains:[],relevance:0}]},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\/\\/|"+e.RE_STARTERS_RE+"|\\b(split|return|print|reverse|grep)\\b)\\s*",keywords:"split return print reverse grep",relevance:0,contains:[e.HASH_COMMENT_MODE,{className:"regexp",begin:"(s|tr|y)/(\\\\.|[^/])*/(\\\\.|[^/])*/[a-z]*",relevance:10},{className:"regexp",begin:"(m|qr)?/",end:"/[a-z]*",contains:[e.BACKSLASH_ESCAPE],relevance:0}]},{className:"function",beginKeywords:"sub",end:"(\\s*\\(.*?\\))?[;{]",excludeEnd:!0,relevance:5,contains:[e.TITLE_MODE]},{begin:"-\\w\\b",relevance:0},{begin:"^__DATA__$",end:"^__END__$",subLanguage:"mojolicious",contains:[{begin:"^@@.*",end:"$",className:"comment"}]}];return a.contains=e,{aliases:["pl","pm"],lexemes:/[\w\.]+/,keywords:n,contains:t.contains=e}}function w(e){var n={begin:"\\$+[a-zA-Z_-ÿ][a-zA-Z0-9_-ÿ]*"},a={className:"meta",begin:/<\?(php)?|\?>/},t={className:"string",contains:[e.BACKSLASH_ESCAPE,a],variants:[{begin:'b"',end:'"'},{begin:"b'",end:"'"},e.inherit(e.APOS_STRING_MODE,{illegal:null}),e.inherit(e.QUOTE_STRING_MODE,{illegal:null})]},i={variants:[e.BINARY_NUMBER_MODE,e.C_NUMBER_MODE]};return{aliases:["php","php3","php4","php5","php6","php7"],case_insensitive:!0,keywords:"and include_once list abstract global private echo interface as static endswitch array null if endwhile or const for endforeach self var while isset public protected exit foreach throw elseif include __FILE__ empty require_once do xor return parent clone use __CLASS__ __LINE__ else break print eval new catch __METHOD__ case exception default die require __FUNCTION__ enddeclare final try switch continue endfor endif declare unset true false trait goto instanceof insteadof __DIR__ __NAMESPACE__ yield finally",contains:[e.HASH_COMMENT_MODE,e.COMMENT("//","$",{contains:[a]}),e.COMMENT("/\\*","\\*/",{contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.COMMENT("__halt_compiler.+?;",!1,{endsWithParent:!0,keywords:"__halt_compiler",lexemes:e.UNDERSCORE_IDENT_RE}),{className:"string",begin:/<<<['"]?\w+['"]?$/,end:/^\w+;?$/,contains:[e.BACKSLASH_ESCAPE,{className:"subst",variants:[{begin:/\$\w+/},{begin:/\{\$/,end:/\}/}]}]},a,{className:"keyword",begin:/\$this\b/},n,{begin:/(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/},{className:"function",beginKeywords:"function",end:/[;{]/,excludeEnd:!0,illegal:"\\$|\\[|%",contains:[e.UNDERSCORE_TITLE_MODE,{className:"params",begin:"\\(",end:"\\)",contains:["self",n,e.C_BLOCK_COMMENT_MODE,t,i]}]},{className:"class",beginKeywords:"class interface",end:"{",excludeEnd:!0,illegal:/[:\(\$"]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"namespace",end:";",illegal:/[\.']/,contains:[e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"use",end:";",contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"=>"},t,i]}}function O(e){var n="[ \\t\\f]*",a="("+n+"[:=]"+n+"|[ \\t\\f]+)",t="([^\\\\\\W:= \\t\\f\\n]|\\\\.)+",i="([^\\\\:= \\t\\f\\n]|\\\\.)+",s={end:a,relevance:0,starts:{className:"string",end:/$/,relevance:0,contains:[{begin:"\\\\\\n"}]}};return{case_insensitive:!0,illegal:/\S/,contains:[e.COMMENT("^\\s*[!#]","$"),{begin:t+a,returnBegin:!0,contains:[{className:"attr",begin:t,endsParent:!0,relevance:0}],starts:s},{begin:i+a,returnBegin:!0,relevance:0,contains:[{className:"meta",begin:i,endsParent:!0,relevance:0}],starts:s},{className:"attr",relevance:0,begin:i+n+"$"}]}}function M(e){var n=e.COMMENT("#","$"),a="([A-Za-z_]|::)(\\w|::)*",t=e.inherit(e.TITLE_MODE,{begin:a}),i={className:"variable",begin:"\\$"+a},a={className:"string",contains:[e.BACKSLASH_ESCAPE,i],variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/}]};return{aliases:["pp"],contains:[n,i,a,{beginKeywords:"class",end:"\\{|;",illegal:/=/,contains:[t,n]},{beginKeywords:"define",end:/\{/,contains:[{className:"section",begin:e.IDENT_RE,endsParent:!0}]},{begin:e.IDENT_RE+"\\s+\\{",returnBegin:!0,end:/\S/,contains:[{className:"keyword",begin:e.IDENT_RE},{begin:/\{/,end:/\}/,keywords:{keyword:"and case default else elsif false if in import enherits node or true undef unless main settings $string ",literal:"alias audit before loglevel noop require subscribe tag owner ensure group mode name|0 changes context force incl lens load_path onlyif provider returns root show_diff type_check en_address ip_address realname command environment hour monute month monthday special target weekday creates cwd ogoutput refresh refreshonly tries try_sleep umask backup checksum content ctime force ignore links mtime purge recurse recurselimit replace selinux_ignore_defaults selrange selrole seltype seluser source souirce_permissions sourceselect validate_cmd validate_replacement allowdupe attribute_membership auth_membership forcelocal gid ia_load_module members system host_aliases ip allowed_trunk_vlans description device_url duplex encapsulation etherchannel native_vlan speed principals allow_root auth_class auth_type authenticate_user k_of_n mechanisms rule session_owner shared options device fstype enable hasrestart directory present absent link atboot blockdevice device dump pass remounts poller_tag use message withpath adminfile allow_virtual allowcdrom category configfiles flavor install_options instance package_settings platform responsefile status uninstall_options vendor unless_system_user unless_uid binary control flags hasstatus manifest pattern restart running start stop allowdupe auths expiry gid groups home iterations key_membership keys managehome membership password password_max_age password_min_age profile_membership profiles project purge_ssh_keys role_membership roles salt shell uid baseurl cost descr enabled enablegroups exclude failovermethod gpgcheck gpgkey http_caching include includepkgs keepalive metadata_expire metalink mirrorlist priority protect proxy proxy_password proxy_username repo_gpgcheck s3_enabled skip_if_unavailable sslcacert sslclientcert sslclientkey sslverify mounted",built_in:"architecture augeasversion blockdevices boardmanufacturer boardproductname boardserialnumber cfkey dhcp_servers domain ec2_ ec2_userdata facterversion filesystems ldom fqdn gid hardwareisa hardwaremodel hostname id|0 interfaces ipaddress ipaddress_ ipaddress6 ipaddress6_ iphostnumber is_virtual kernel kernelmajversion kernelrelease kernelversion kernelrelease kernelversion lsbdistcodename lsbdistdescription lsbdistid lsbdistrelease lsbmajdistrelease lsbminordistrelease lsbrelease macaddress macaddress_ macosx_buildversion macosx_productname macosx_productversion macosx_productverson_major macosx_productversion_minor manufacturer memoryfree memorysize netmask metmask_ network_ operatingsystem operatingsystemmajrelease operatingsystemrelease osfamily partitions path physicalprocessorcount processor processorcount productname ps puppetversion rubysitedir rubyversion selinux selinux_config_mode selinux_config_policy selinux_current_mode selinux_current_mode selinux_enforced selinux_policyversion serialnumber sp_ sshdsakey sshecdsakey sshrsakey swapencrypted swapfree swapsize timezone type uniqueid uptime uptime_days uptime_hours uptime_seconds uuid virtual vlans xendomains zfs_version zonenae zones zpool_version"},relevance:0,contains:[a,n,{begin:"[a-zA-Z_]+\\s*=>",returnBegin:!0,end:"=>",contains:[{className:"attr",begin:e.IDENT_RE}]},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},i]}],relevance:0}]}}function C(e){var n={keyword:"and elif is global as in if from raise for except finally print import pass return exec else break not with class assert yield try while continue del or def lambda async await nonlocal|10",built_in:"Ellipsis NotImplemented",literal:"False None True"},a={className:"meta",begin:/^(>>>|\.\.\.) /},t={className:"subst",begin:/\{/,end:/\}/,keywords:n,illegal:/#/},i={begin:/\{\{/,relevance:0},s={className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:/(u|b)?r?'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(u|b)?r?"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(fr|rf|f)'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a,i,t]},{begin:/(fr|rf|f)"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a,i,t]},{begin:/(u|r|ur)'/,end:/'/,relevance:10},{begin:/(u|r|ur)"/,end:/"/,relevance:10},{begin:/(b|br)'/,end:/'/},{begin:/(b|br)"/,end:/"/},{begin:/(fr|rf|f)'/,end:/'/,contains:[e.BACKSLASH_ESCAPE,i,t]},{begin:/(fr|rf|f)"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,i,t]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},r={className:"number",relevance:0,variants:[{begin:e.BINARY_NUMBER_RE+"[lLjJ]?"},{begin:"\\b(0o[0-7]+)[lLjJ]?"},{begin:e.C_NUMBER_RE+"[lLjJ]?"}]},i={className:"params",begin:/\(/,end:/\)/,contains:["self",a,r,s,e.HASH_COMMENT_MODE]};return t.contains=[s,r,a],{aliases:["py","gyp","ipython"],keywords:n,illegal:/(<\/|->|\?)|=>/,contains:[a,r,{beginKeywords:"if",relevance:0},s,e.HASH_COMMENT_MODE,{variants:[{className:"function",beginKeywords:"def"},{className:"class",beginKeywords:"class"}],end:/:/,illegal:/[${=;\n,]/,contains:[e.UNDERSCORE_TITLE_MODE,i,{begin:/->/,endsWithParent:!0,keywords:"None"}]},{className:"meta",begin:/^[\t ]*@/,end:/$/},{begin:/\b(print|exec)\(/}]}}function x(e){var n="[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?",a={keyword:"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor",literal:"true false nil"},t={className:"doctag",begin:"@[A-Za-z]+"},i={begin:"#<",end:">"},s=[e.COMMENT("#","$",{contains:[t]}),e.COMMENT("^\\=begin","^\\=end",{contains:[t],relevance:10}),e.COMMENT("^__END__","\\n$")],r={className:"subst",begin:"#\\{",end:"}",keywords:a},l={className:"string",contains:[e.BACKSLASH_ESCAPE,r],variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/`/,end:/`/},{begin:"%[qQwWx]?\\(",end:"\\)"},{begin:"%[qQwWx]?\\[",end:"\\]"},{begin:"%[qQwWx]?{",end:"}"},{begin:"%[qQwWx]?<",end:">"},{begin:"%[qQwWx]?/",end:"/"},{begin:"%[qQwWx]?%",end:"%"},{begin:"%[qQwWx]?-",end:"-"},{begin:"%[qQwWx]?\\|",end:"\\|"},{begin:/\B\?(\\\d{1,3}|\\x[A-Fa-f0-9]{1,2}|\\u[A-Fa-f0-9]{4}|\\?\S)\b/},{begin:/<<[-~]?'?(\w+)(?:.|\n)*?\n\s*\1\b/,returnBegin:!0,contains:[{begin:/<<[-~]?'?/},{begin:/\w+/,endSameAsBegin:!0,contains:[e.BACKSLASH_ESCAPE,r]}]}]},t={className:"params",begin:"\\(",end:"\\)",endsParent:!0,keywords:a},e=[l,i,{className:"class",beginKeywords:"class module",end:"$|;",illegal:/=/,contains:[e.inherit(e.TITLE_MODE,{begin:"[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?"}),{begin:"<\\s*",contains:[{begin:"("+e.IDENT_RE+"::)?"+e.IDENT_RE}]}].concat(s)},{className:"function",beginKeywords:"def",end:"$|;",contains:[e.inherit(e.TITLE_MODE,{begin:n}),t].concat(s)},{begin:e.IDENT_RE+"::"},{className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"(\\!|\\?)?:",relevance:0},{className:"symbol",begin:":(?!\\s)",contains:[l,{begin:n}],relevance:0},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{className:"params",begin:/\|/,end:/\|/,keywords:a},{begin:"("+e.RE_STARTERS_RE+"|unless)\\s*",keywords:"unless",contains:[i,{className:"regexp",contains:[e.BACKSLASH_ESCAPE,r],illegal:/\n/,variants:[{begin:"/",end:"/[a-z]*"},{begin:"%r{",end:"}[a-z]*"},{begin:"%r\\(",end:"\\)[a-z]*"},{begin:"%r!",end:"![a-z]*"},{begin:"%r\\[",end:"\\][a-z]*"}]}].concat(s),relevance:0}].concat(s);return r.contains=e,t.contains=e,{aliases:["rb","gemspec","podspec","thor","irb"],keywords:a,illegal:/\/\*/,contains:s.concat([{begin:/^\s*=>/,starts:{end:"$",contains:e}},{className:"meta",begin:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+>|(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>)",starts:{end:"$",contains:e}}]).concat(e)}}function S(e){var n="([ui](8|16|32|64|128|size)|f(32|64))?",a="drop i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize f32 f64 str char bool Box Option Result String Vec Copy Send Sized Sync Drop Fn FnMut FnOnce ToOwned Clone Debug PartialEq PartialOrd Eq Ord AsRef AsMut Into From Default Iterator Extend IntoIterator DoubleEndedIterator ExactSizeIterator SliceConcatExt ToString assert! assert_eq! bitflags! bytes! cfg! col! concat! concat_idents! debug_assert! debug_assert_eq! env! panic! file! format! format_args! include_bin! include_str! line! local_data_key! module_path! option_env! print! println! select! stringify! try! unimplemented! unreachable! vec! write! writeln! macro_rules! assert_ne! debug_assert_ne!";return{aliases:["rs"],keywords:{keyword:"abstract as async await become box break const continue crate do dyn else enum extern false final fn for if impl in let loop macro match mod move mut override priv pub ref return self Self static struct super trait true try type typeof unsafe unsized use virtual where while yield",literal:"true false Some None Ok Err",built_in:a},lexemes:e.IDENT_RE+"!?",illegal:""}]}}function T(e){var n={className:"subst",variants:[{begin:"\\$[A-Za-z0-9_]+"},{begin:"\\${",end:"}"}]},a={className:"string",variants:[{begin:'"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:'"""',end:'"""',relevance:10},{begin:'[a-z]+"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE,n]},{className:"string",begin:'[a-z]+"""',end:'"""',contains:[n],relevance:10}]},t={className:"type",begin:"\\b[A-Z][A-Za-z0-9_]*",relevance:0},n={className:"title",begin:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,relevance:0};return{keywords:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,{className:"symbol",begin:"'\\w[\\w\\d_]*(?!')"},t,{className:"function",beginKeywords:"def",end:/[:={\[(\n;]/,excludeEnd:!0,contains:[n]},{className:"class",beginKeywords:"class object trait type",end:/[:={\[\n;]/,excludeEnd:!0,contains:[{beginKeywords:"extends with",relevance:10},{begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[t]},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[t]},n]},e.C_NUMBER_MODE,{className:"meta",begin:"@[A-Za-z]+"}]}}function k(e){return{aliases:["console"],contains:[{className:"meta",begin:"^\\s{0,3}[/\\w\\d\\[\\]()@-]*[>%$#]",starts:{end:"$",subLanguage:"bash"}}]}}function A(e){var n=e.COMMENT("--","$");return{case_insensitive:!0,illegal:/[<>{}*]/,contains:[{beginKeywords:"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke comment values with",end:/;/,endsWithParent:!0,lexemes:/[\w\.]+/,keywords:{keyword:"as abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias all allocate allow alter always analyze ancillary and anti any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound bucket buffer_cache buffer_pool build bulk by byte byteordermark bytes cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain explode export export_set extended extent external external_1 external_2 externally extract failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force foreign form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour hours http id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists keep keep_duplicates key keys kill language large last last_day last_insert_id last_value lateral lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minutes minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notnull notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second seconds section securefile security seed segment select self semi sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime table tables tablespace tablesample tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unnest unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace window with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek",literal:"true false null unknown",built_in:"array bigint binary bit blob bool boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text time timestamp tinyint varchar varchar2 varying void"},contains:[{className:"string",begin:"'",end:"'",contains:[{begin:"''"}]},{className:"string",begin:'"',end:'"',contains:[{begin:'""'}]},{className:"string",begin:"`",end:"`"},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,n,e.HASH_COMMENT_MODE]},e.C_BLOCK_COMMENT_MODE,n,e.HASH_COMMENT_MODE]}}function R(e){var n={keyword:"#available #colorLiteral #column #else #elseif #endif #file #fileLiteral #function #if #imageLiteral #line #selector #sourceLocation _ __COLUMN__ __FILE__ __FUNCTION__ __LINE__ Any as as! as? associatedtype associativity break case catch class continue convenience default defer deinit didSet do dynamic dynamicType else enum extension fallthrough false fileprivate final for func get guard if import in indirect infix init inout internal is lazy left let mutating nil none nonmutating open operator optional override postfix precedence prefix private protocol Protocol public repeat required rethrows return right self Self set static struct subscript super switch throw throws true try try! try? Type typealias unowned var weak where while willSet",literal:"true false nil",built_in:"abs advance alignof alignofValue anyGenerator assert assertionFailure bridgeFromObjectiveC bridgeFromObjectiveCUnconditional bridgeToObjectiveC bridgeToObjectiveCUnconditional c contains count countElements countLeadingZeros debugPrint debugPrintln distance dropFirst dropLast dump encodeBitsAsWords enumerate equal fatalError filter find getBridgedObjectiveCType getVaList indices insertionSort isBridgedToObjectiveC isBridgedVerbatimToObjectiveC isUniquelyReferenced isUniquelyReferencedNonObjC join lazy lexicographicalCompare map max maxElement min minElement numericCast overlaps partition posix precondition preconditionFailure print println quickSort readLine reduce reflect reinterpretCast reverse roundUpToAlignment sizeof sizeofValue sort split startsWith stride strideof strideofValue swap toString transcode underestimateCount unsafeAddressOf unsafeBitCast unsafeDowncast unsafeUnwrap unsafeReflect withExtendedLifetime withObjectAtPlusZero withUnsafePointer withUnsafePointerToObject withUnsafeMutablePointer withUnsafeMutablePointers withUnsafePointer withUnsafePointers withVaList zip"},a=e.COMMENT("/\\*","\\*/",{contains:["self"]}),t={className:"subst",begin:/\\\(/,end:"\\)",keywords:n,contains:[]},i={className:"string",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:/"""/,end:/"""/},{begin:/"/,end:/"/}]},s={className:"number",begin:"\\b([\\d_]+(\\.[\\deE_]+)?|0x[a-fA-F0-9_]+(\\.[a-fA-F0-9p_]+)?|0b[01_]+|0o[0-7_]+)\\b",relevance:0};return t.contains=[s],{keywords:n,contains:[i,e.C_LINE_COMMENT_MODE,a,{className:"type",begin:"\\b[A-Z][\\wÀ-ʸ']*[!?]"},{className:"type",begin:"\\b[A-Z][\\wÀ-ʸ']*",relevance:0},s,{className:"function",beginKeywords:"func",end:"{",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][0-9A-Za-z$_]*/}),{begin://},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:n,contains:["self",s,i,e.C_BLOCK_COMMENT_MODE,{begin:":"}],illegal:/["']/}],illegal:/\[|%/},{className:"class",beginKeywords:"struct protocol class extension enum",keywords:n,end:"\\{",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][\u00C0-\u02B80-9A-Za-z$_]*/})]},{className:"meta",begin:"(@discardableResult|@warn_unused_result|@exported|@lazy|@noescape|@NSCopying|@NSManaged|@objc|@objcMembers|@convention|@required|@noreturn|@IBAction|@IBDesignable|@IBInspectable|@IBOutlet|@infix|@prefix|@postfix|@autoclosure|@testable|@available|@nonobjc|@NSApplicationMain|@UIApplicationMain|@dynamicMemberLookup|@propertyWrapper)"},{beginKeywords:"import",end:/$/,contains:[e.C_LINE_COMMENT_MODE,a]}]}}function D(e){var n={className:"symbol",begin:"&[a-z]+;|&#[0-9]+;|&#x[a-f0-9]+;"},a={begin:"\\s",contains:[{className:"meta-keyword",begin:"#?[a-z_][a-z1-9_-]+",illegal:"\\n"}]},t=e.inherit(a,{begin:"\\(",end:"\\)"}),i=e.inherit(e.APOS_STRING_MODE,{className:"meta-string"}),s=e.inherit(e.QUOTE_STRING_MODE,{className:"meta-string"}),r={endsWithParent:!0,illegal:/`]+/}]}]}]};return{aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"],case_insensitive:!0,contains:[{className:"meta",begin:"",relevance:10,contains:[a,s,i,t,{begin:"\\[",end:"\\]",contains:[{className:"meta",begin:"",contains:[a,t,s,i]}]}]},e.COMMENT("\x3c!--","--\x3e",{relevance:10}),{begin:"<\\!\\[CDATA\\[",end:"\\]\\]>",relevance:10},n,{className:"meta",begin:/<\?xml/,end:/\?>/,relevance:10},{begin:/<\?(php)?/,end:/\?>/,subLanguage:"php",contains:[{begin:"/\\*",end:"\\*/",skip:!0},{begin:'b"',end:'"',skip:!0},{begin:"b'",end:"'",skip:!0},e.inherit(e.APOS_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0}),e.inherit(e.QUOTE_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0})]},{className:"tag",begin:")",end:">",keywords:{name:"style"},contains:[r],starts:{end:"",returnEnd:!0,subLanguage:["css","xml"]}},{className:"tag",begin:")",end:">",keywords:{name:"script"},contains:[r],starts:{end:"<\/script>",returnEnd:!0,subLanguage:["actionscript","javascript","handlebars","xml"]}},{className:"tag",begin:"",contains:[{className:"name",begin:/[^\/><\s]+/,relevance:0},r]}]}}function L(e){var n="true false yes no null",a={className:"string",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/\S+/}],contains:[e.BACKSLASH_ESCAPE,{className:"template-variable",variants:[{begin:"{{",end:"}}"},{begin:"%{",end:"}"}]}]};return{case_insensitive:!0,aliases:["yml","YAML","yaml"],contains:[{className:"attr",variants:[{begin:"\\w[\\w :\\/.-]*:(?=[ \t]|$)"},{begin:'"\\w[\\w :\\/.-]*":(?=[ \t]|$)'},{begin:"'\\w[\\w :\\/.-]*':(?=[ \t]|$)"}]},{className:"meta",begin:"^---s*$",relevance:10},{className:"string",begin:"[\\|>]([0-9]?[+-])?[ ]*\\n( *)[\\S ]+\\n(\\2[\\S ]+\\n?)*"},{begin:"<%[%=-]?",end:"[%-]?%>",subLanguage:"ruby",excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:"!"+e.UNDERSCORE_IDENT_RE},{className:"type",begin:"!!"+e.UNDERSCORE_IDENT_RE},{className:"meta",begin:"&"+e.UNDERSCORE_IDENT_RE+"$"},{className:"meta",begin:"\\*"+e.UNDERSCORE_IDENT_RE+"$"},{className:"bullet",begin:"\\-(?=[ ]|$)",relevance:0},e.HASH_COMMENT_MODE,{beginKeywords:n,keywords:{literal:n}},{className:"number",begin:e.C_NUMBER_RE+"\\b"},a]}}!function(){"use strict";a.registerLanguage("asciidoc",t),a.registerLanguage("bash",i),a.registerLanguage("clojure",s),a.registerLanguage("cpp",r),a.registerLanguage("cs",l),a.registerLanguage("css",o),a.registerLanguage("diff",c),a.registerLanguage("dockerfile",d),a.registerLanguage("elixir",g),a.registerLanguage("go",u),a.registerLanguage("groovy",m),a.registerLanguage("haskell",_),a.registerLanguage("java",b),a.registerLanguage("javascript",p),a.registerLanguage("json",f),a.registerLanguage("kotlin",E),a.registerLanguage("markdown",N),a.registerLanguage("nix",h),a.registerLanguage("objectivec",v),a.registerLanguage("perl",y),a.registerLanguage("php",w),a.registerLanguage("properties",O),a.registerLanguage("puppet",M),a.registerLanguage("python",C),a.registerLanguage("ruby",x),a.registerLanguage("rust",S),a.registerLanguage("scala",T),a.registerLanguage("shell",k),a.registerLanguage("sql",A),a.registerLanguage("swift",R),a.registerLanguage("xml",D),a.registerLanguage("yaml",L),[].slice.call(document.querySelectorAll("pre code.hljs")).forEach(function(e){a.highlightBlock(e)})}()}(); \ No newline at end of file diff --git a/antora-playbook.yml b/antora-playbook.yml deleted file mode 100644 index e6a43124..00000000 --- a/antora-playbook.yml +++ /dev/null @@ -1,22 +0,0 @@ -site: - title: Antora Docs - start_page: component-b::index.adoc -content: - sources: - - url: https://gitlab.com/antora/demo/demo-component-a.git - branches: HEAD - - url: https://gitlab.com/antora/demo/demo-component-b.git - branches: [v2.0, v1.0] - start_path: docs -ui: - bundle: - url: https://gitlab.com/antora/antora-ui-default/-/jobs/artifacts/HEAD/raw/build/ui-bundle.zip?job=bundle-stable - snapshot: true - supplemental_files: - - path: ui.yml - contents: | - static_files: [ .nojekyll ] - - path: .nojekyll - -runtime: - fetch: true diff --git a/antora.yml b/antora.yml new file mode 100644 index 00000000..f87c098b --- /dev/null +++ b/antora.yml @@ -0,0 +1,4 @@ +# Descriptor of the common Antora module that contains just a landing page for the whole documentation site +name: index +title: Index +version: index diff --git a/component-a/1.5.6/admonition.html b/component-a/1.5.6/admonition.html deleted file mode 100644 index 8de8163f..00000000 --- a/component-a/1.5.6/admonition.html +++ /dev/null @@ -1,311 +0,0 @@ - - - - - - Admonition :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

Admonition

-
-

Admonition styles

-
-
-

An admonition, also known as a notice, helps draw attention to a line or block of text with a special label or icon.

-
-
-

Asciidoctor comes with five built-in styles.

-
-
-
    -
  • -

    NOTE

    -
  • -
  • -

    TIP

    -
  • -
  • -

    IMPORTANT

    -
  • -
  • -

    CAUTION

    -
  • -
  • -

    WARNING

    -
  • -
-
-
-
-
-

Basic admonition

-
-
-
Basic admonitions
-
-
CAUTION: Don't stick forks in electric sockets.
-
-TIP: After someone sticks a fork in a socket, you'll need to reset the circuit in the breaker box in the dark and scary basement.
-
-WARNING: Never go into the basement.
-
-IMPORTANT: A monster lives in the basement.
-
-NOTE: If you go into the basement, see if you can find Kenny's orange parka.
-
-
-
- - - - - -
- - -Don’t stick forks in electric sockets. -
-
-
- - - - - -
- - -After someone sticks a fork in a socket, you’ll need to reset the circuit in the breaker box in the dark and scary basement. -
-
-
- - - - - -
- - -Never go into the basement. -
-
-
- - - - - -
- - -A monster lives in the basement. -
-
-
- - - - - -
- - -If you go into the basement, see if you can find Kenny’s orange parka. -
-
-
-
-
-

Complex admonition

-
-
-
Example block masquerading as an admonition
-
-
[IMPORTANT]
-.Optional Title
-====
-Use an example block to create an admonition that contains complex content, such as (but not limited to):
-
-* Lists
-* Multiple paragraphs
-* Source code
-* Images
-====
-
-
-
- - - - - -
- - -
Optional Title
-
-

Use an example block to create an admonition that contains complex content, such as (but not limited to):

-
-
-
    -
  • -

    Lists

    -
  • -
  • -

    Multiple paragraphs

    -
  • -
  • -

    Source code

    -
  • -
  • -

    Images

    -
  • -
-
-
-
-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/component-a/1.5.6/index.html b/component-a/1.5.6/index.html deleted file mode 100644 index 5bbd8602..00000000 --- a/component-a/1.5.6/index.html +++ /dev/null @@ -1,8 +0,0 @@ - - - - - -Redirect Notice -

Redirect Notice

-

The page you requested has been relocated to inline-text-formatting.html.

diff --git a/component-a/1.5.6/inline-text-formatting.html b/component-a/1.5.6/inline-text-formatting.html deleted file mode 100644 index 7d50b118..00000000 --- a/component-a/1.5.6/inline-text-formatting.html +++ /dev/null @@ -1,267 +0,0 @@ - - - - - - Basic Inline Text Formatting :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

Basic Inline Text Formatting

-
-
-
-

You can add the following inline styles to your content:

-
-
-
    -
  • -

    Bold

    -
  • -
  • -

    Italic

    -
  • -
  • -

    Monospace

    -
  • -
  • -

    Highlight

    -
  • -
-
-
-
-
-

Bold & Italic

-
-
-

You can bold and italic entire phrases, a word, and characters in a word.

-
-
-
Bold and italic inline formatting
-
-
*bold phrase* & **char**acter**s**
-
-_italic phrase_ & __char__acter__s__
-
-*_bold italic phrase_* & **__char__**acter**__s__**
-
-
-
-
Result
-
-
-

bold phrase & characters

-
-
-

italic phrase & characters

-
-
-

bold italic phrase & characters

-
-
-
-
-
-
-

Monospace

-
-
-

You can monospace entire phrases, a word, and characters in a word.

-
-
-
Monospace inline formatting
-
-
`monospace phrase` & ``char``acter``s``
-
-`*monospace bold phrase*` & ``**char**``acter``**s**``
-
-`_monospace italic phrase_` & ``__char__``acter``__s__``
-
-`*_monospace bold italic phrase_*` &
-``**__char__**``acter``**__s__**``
-
-
-
-
Result
-
-
-

monospace phrase & characters

-
-
-

monospace bold phrase & characters

-
-
-

monospace italic phrase & characters

-
-
-

monospace bold italic phrase & -characters

-
-
-
-
-
-
-

Highlight

-
-
-

You can highlight entire phrases, a word, and characters in a word.

-
-
-
Highlight inline formatting
-
-
Let's #highlight this phrase# and part of th##is##.
-
-
-
-
Result
-
-
-

Let’s highlight this phrase and part of this.

-
-
-
-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/component-a/1.5.6/lists/ordered-list.html b/component-a/1.5.6/lists/ordered-list.html deleted file mode 100644 index edb3e074..00000000 --- a/component-a/1.5.6/lists/ordered-list.html +++ /dev/null @@ -1,267 +0,0 @@ - - - - - - Ordered Lists :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

Ordered Lists

-
-

Basic Ordered List

-
-
-
Basic Ordered List
-
-
.Optional Title of List
-. Step 1
-. Step 2
-. Step 3
-
-
-
-
Optional Title of List
-
    -
  1. -

    Step 1

    -
  2. -
  3. -

    Step 2

    -
  4. -
  5. -

    Step 3

    -
  6. -
-
-
-
-
-

Nested Ordered List

-
-
-
Ordered list with nested levels
-
-
. Step 1
-. Step 2
-.. Step 2a
-.. Step 2b
-. Step 3
-
-
-
-
    -
  1. -

    Step 1

    -
  2. -
  3. -

    Step 2

    -
    -
      -
    1. -

      Step 2a

      -
    2. -
    3. -

      Step 2b

      -
    4. -
    -
    -
  4. -
  5. -

    Step 3

    -
  6. -
-
-
-
Maximum nested level
-
-
. level 1
-.. level 2
-... level 3
-.... level 4
-..... level 5
-. level 1
-
-
-
-
    -
  1. -

    level 1

    -
    -
      -
    1. -

      level 2

      -
      -
        -
      1. -

        level 3

        -
        -
          -
        1. -

          level 4

          -
          -
            -
          1. -

            level 5

            -
          2. -
          -
          -
        2. -
        -
        -
      2. -
      -
      -
    2. -
    -
    -
  2. -
  3. -

    level 1

    -
  4. -
-
-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/component-a/1.5.6/lists/unordered-list.html b/component-a/1.5.6/lists/unordered-list.html deleted file mode 100644 index 306c4b82..00000000 --- a/component-a/1.5.6/lists/unordered-list.html +++ /dev/null @@ -1,267 +0,0 @@ - - - - - - Unordered Lists :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

Unordered Lists

-
-

Basic Unordered List

-
-
-
Basic Unordered List
-
-
.Optional Title of List
-* Item A
-* Item B
-* Item C
-
-
-
-
Optional Title of List
-
    -
  • -

    Item A

    -
  • -
  • -

    Item B

    -
  • -
  • -

    Item C

    -
  • -
-
-
-
-
-

Nested Unordered List

-
-
-
Unordered list with nested levels
-
-
* Item A
-* Item B
-** Item B1
-** Item B2
-* Item C
-
-
-
-
    -
  • -

    Item A

    -
  • -
  • -

    Item B

    -
    -
      -
    • -

      Item B1

      -
    • -
    • -

      Item B2

      -
    • -
    -
    -
  • -
  • -

    Item C

    -
  • -
-
-
-
Maximum nested level
-
-
* level 1
-** level 2
-*** level 3
-**** level 4
-***** level 5
-* level 1
-
-
-
-
    -
  • -

    level 1

    -
    -
      -
    • -

      level 2

      -
      -
        -
      • -

        level 3

        -
        -
          -
        • -

          level 4

          -
          -
            -
          • -

            level 5

            -
          • -
          -
          -
        • -
        -
        -
      • -
      -
      -
    • -
    -
    -
  • -
  • -

    level 1

    -
  • -
-
-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/component-a/1.5.6/sidebar.html b/component-a/1.5.6/sidebar.html deleted file mode 100644 index 0ebc8ea4..00000000 --- a/component-a/1.5.6/sidebar.html +++ /dev/null @@ -1,176 +0,0 @@ - - - - - - Sidebar :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

Sidebar

-
- -
-
-

A sidebar can be titled and contain any type of content such as source code and images.

-
-
-
Sidebar
-
-
.Optional Title of Sidebar
-****
-Sidebars are used to visually separate short, auxiliary bits of content that supplement the main text.
-****
-
-
-
-
-
Optional Title of Sidebar
-
-

Sidebars are used to visually separate short, auxiliary bits of content that supplement the main text.

-
-
-
-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/component-a/1.5.6/special-characters.html b/component-a/1.5.6/special-characters.html deleted file mode 100644 index 7fd9e459..00000000 --- a/component-a/1.5.6/special-characters.html +++ /dev/null @@ -1,288 +0,0 @@ - - - - - - Special Characters and Symbols :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

Special Characters and Symbols

-
-
-
-

Special character and symbol replacement is built into Asciidoctor.

-
-
-

During conversion, the characters in the table below are replaced with the appropriate character or Unicode entity. -Replacement of special characters and symbols occurs in all inline and block elements except for comments and certain passthroughs. -The three special characters, <, >, and &, are always replaced first.

-
-
-

HTML and XML character entities as well as decimal and hexadecimal Unicode code point references are also recognized and replaced.

-
-
-

For example, to produce the § symbol you can use &sect;, &#x00A7;, or &#167;. -When the document is processed, the replacements substitution will replace the section symbol reference, regardless of whether it is a character entity reference or a numeric character reference, with &#167;. -In turn, &#167; will display as §.

-
-
-
-
-

Special character and symbol syntax

-
- ------- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameSyntaxReplacementRenderedNotes

Ampersand

&

&amp;

&

Apostrophe, curved

Sam's

Sam&#8217;s

Sam’s

The vertical form apostrophe is replaced with the curved form apostrophe.

Copyright

(C)

&#169;

©

Ellipses

...

&#8230;

…​

Em dash

--

&#8212;

 — 

Only replaced if between two word characters, between a word character and a line boundary, or flanked by spaces.

-

When flanked by space characters (e.g., a -- b), the normal spaces are replaced by thin spaces (&#8201;).

Greater than

>

&gt;

>

Left double arrow

<=

&#8656;

Left single arrow

<-

&#8592;

Less than

<

&lt;

Registered

(R)

&#174;

®

Right double arrow

=>

&#8658;

Right single arrow

->

&#8594;

Trademark

(TM)

&#8482;

-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/component-a/1.5.6/ui-macros.html b/component-a/1.5.6/ui-macros.html deleted file mode 100644 index 2c16ba28..00000000 --- a/component-a/1.5.6/ui-macros.html +++ /dev/null @@ -1,269 +0,0 @@ - - - - - - User Interface Macros :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

User Interface Macros

-
-
-
-

Asciidoctor has three user interface (UI) macros:

-
-
-
    -
  • -

    Button btn:[]

    -
  • -
  • -

    Keyboard kbd:[]

    -
  • -
  • -

    Menu menu:[]

    -
  • -
-
-
- - - - - -
- - -The :experimental: attribute must be set in the document header or globally to enable UI macros. -
-
-
-
-
-

Button

-
-
-

Communicate that a user should press a button with the button macro.

-
-
-
Button UI macro
-
-
Press the btn:[Submit] button when you are finished the survey.
-
-Select a file in the file navigator and click btn:[Open].
-
-
-
-
Result
-
-
-

Press the Submit button when you are finished the survey.

-
-
-

Select a file in the file navigator and click Open.

-
-
-
-
-
-
-

Keyboard

-
-
-

Create keyboard shortcuts with the keyboard macro.

-
-
-
Keyboard UI macro
-
-
Press kbd:[esc] to exit insert mode.
-
-Use the shortcut kbd:[Ctrl+T] to open a new tab in your browser.
-
-kbd:[Ctrl+Shift+N] will open a new incognito window.
-
-
-
-
Result
-
-
-

Press esc to exit insert mode.

-
-
-

Use the shortcut Ctrl+T to open a new tab in your browser.

-
-
-

Ctrl+Shift+N will open a new incognito window.

-
-
-
-
-
-
- -
-
-

Show readers how to select a menu item with the menu macro.

-
-
-
Menu UI macro
-
-
To save the file, select menu:File[Save].
-
-Select menu:View[Zoom > Reset] to reset the zoom level to the default setting.
-
-
-
-
Result
-
-
-

To save the file, select File  Save.

-
-
-

Select View  Zoom  Reset to reset the zoom level to the default setting.

-
-
-
-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/component-b/1.0/index.html b/component-b/1.0/index.html deleted file mode 100644 index 1205f611..00000000 --- a/component-b/1.0/index.html +++ /dev/null @@ -1,180 +0,0 @@ - - - - - - Welcome to Component B! :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

Welcome to Component B!

-
-
-
-

This is the automatic start page for version 1.0 of Component B.

-
-
-

Component B is one of two documentation components in the Antora Demo. -The other docs component, Component A, can be accessed via the component selector menu (aka component drawer) at the bottom of the menu on the left side of the page.

-
-
-
-
-

Page source

-
-
-

This page is sourced from the AsciiDoc file named index.adoc that is located at demo-component-b/docs/modules/ROOT/pages.

-
-
-

Why is this the home page of Component B?

-
-

This page is automatically used as the start page of Component B because it is stored in the ROOT module and named index.adoc.

-
-
-
-
-
-

Cross reference syntax to target this page

-
-
-

To create a cross reference to this page from another page in the ROOT module of Component B, the xref syntax would be xref:index.adoc[].

-
-
-

To create a cross reference to this page from a page in Module One of Component B, the xref syntax would be xref:ROOT:index.adoc[].

-
-
-

To create a cross reference to this page from a page in Component A, the xref syntax would be xref:1.0@component-b::index.adoc[].

-
-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/component-b/1.0/module-one/overview.html b/component-b/1.0/module-one/overview.html deleted file mode 100644 index ed96a49d..00000000 --- a/component-b/1.0/module-one/overview.html +++ /dev/null @@ -1,170 +0,0 @@ - - - - - - Module One Overview :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

Module One Overview

-
-
-
-

This is version 1.0 of Module One in Component B.

-
-
-
-
-

Page source

-
-
-

This page is sourced from demo-component-b/docs/modules/module-one/pages/overview.adoc.

-
-
-
-
-

Cross reference syntax to target this page

-
-
-

To create a cross reference to this page from another page in Module One, the xref syntax would be xref:overview.adoc[].

-
-
-

To create a cross reference to this page from a page in the ROOT module of Component B, the xref syntax would be xref:module-one:overview.adoc[].

-
-
-

To create a cross reference to this page from a page in Component A, the xref syntax would be xref:1.0@component-b:module-one:overview.adoc[].

-
-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/component-b/2.0/index.html b/component-b/2.0/index.html deleted file mode 100644 index 1048cb71..00000000 --- a/component-b/2.0/index.html +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - Welcome to the Antora Demo! :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

Welcome to the Antora Demo!

-
-
-
-

This is the home page of the Antora Demo site. -It is also the automatic start page for version 2.0 of Component B.

-
-
-

Component B is one of two documentation components in the Antora Demo. -The other docs component, Component A, can be accessed via the component selector menu (aka component drawer) at the bottom of the menu on the left side of the page.

-
-
-

To switch to version 1.0 of this page, use the page version selector. -The selector is on the task bar, located near the upper right corner of the page.

-
-
-
-
-

Page source

-
-
-

This page is sourced from the AsciiDoc file named index.adoc that is located at demo-component-b/docs/modules/ROOT/pages.

-
-
-

Why is this the home page of Component B?

-
-

This page is automatically used as the start page of Component B because it is stored in the ROOT module and named index.adoc.

-
-
-
-

Why is this the home page of the Antora Demo site?

-
-

This page is used as the home page for the Antora Demo because it is assigned as the site start_page in the Demo site’s playbook file site.yml.

-
-
-
-
-
-

Cross reference syntax to target this page

-
-
-

To create a cross reference to this page from another page in the ROOT module of Component B, the xref syntax would be xref:index.adoc[].

-
-
-

To create a cross reference to this page from a page in Module One of Component B, the xref syntax would be xref:ROOT:index.adoc[].

-
-
-

Always target the latest version of this page

-
-

To create a cross reference to the latest version of this page from a page in Component A, the xref syntax would be xref:component-b::index.adoc[].

-
-
-
-

Target a specific version of this page

-
-

To create a cross reference to version 1.0 of this page from a page in Component A, the xref syntax would be xref:1.0@component-b::index.adoc[].

-
-
-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/component-b/2.0/module-one/overview.html b/component-b/2.0/module-one/overview.html deleted file mode 100644 index a3607a1d..00000000 --- a/component-b/2.0/module-one/overview.html +++ /dev/null @@ -1,183 +0,0 @@ - - - - - - Module One Overview :: Antora Docs - - - - - -
- -
-
- -
- -
- -
-

Module One Overview

-
-
-
-

This is version 2.0 of Module One in Component B.

-
-
-

To switch to version 1.0 of this page, use the page version selector. -The selector is on the task bar, located near the upper right corner of the page.

-
-
-
-
-

Page source

-
-
-

This page is sourced from demo-component-b/docs/modules/module-one/pages/overview.adoc.

-
-
-
-
-

Cross reference syntax to target this page

-
-
-

To create a cross reference to this page from another page in Module One, the xref syntax would be xref:overview.adoc[].

-
-
-

To create a cross reference to this page from a page in the ROOT module of Component B, the xref syntax would be xref:module-one:overview.adoc[].

-
-
-

Always target the latest version of this page

-
-

To create a cross reference to the latest version of this page from a page in Component A, the xref syntax would be xref:component-b:module-one:overview.adoc[].

-
-
-
-

Target a specific version of this page

-
-

To create a cross reference to version 1.0 of this page from a page in Component A, the xref syntax would be xref:1.0@component-b:module-one:overview.adoc[].

-
-
-
-
-
-
-
-
-
-

This page was built using the Antora default UI.

-

The source code for this UI is licensed under the terms of the MPL-2.0 license.

-
- - - - diff --git a/docs/_/css/site.css b/docs/_/css/site.css new file mode 100644 index 00000000..c378b644 --- /dev/null +++ b/docs/_/css/site.css @@ -0,0 +1,3 @@ +@font-face{font-family:Roboto;font-style:normal;font-weight:400;src:local("Roboto Regular"),local("Roboto-Regular"),url(../font/roboto-latin-400.woff2) format("woff2"),url(../font/roboto-latin-400.woff) format("woff")}@font-face{font-family:Roboto;font-style:italic;font-weight:400;src:local("Roboto Italic"),local("Roboto-Italic"),url(../font/roboto-latin-400italic.woff2) format("woff2"),url(../font/roboto-latin-400italic.woff) format("woff")}@font-face{font-family:Roboto;font-style:normal;font-weight:500;src:local("Roboto Medium"),local("Roboto-Medium"),url(../font/roboto-latin-500.woff2) format("woff2"),url(../font/roboto-latin-500.woff) format("woff")}@font-face{font-family:Roboto;font-style:italic;font-weight:500;src:local("Roboto Medium Italic"),local("Roboto-MediumItalic"),url(../font/roboto-latin-500italic.woff2) format("woff2"),url(../font/roboto-latin-500italic.woff) format("woff")}@font-face{font-family:Roboto Mono;font-style:normal;font-weight:400;src:local("Roboto Mono Regular"),local("RobotoMono-Regular"),url(../font/roboto-mono-latin-400.woff2) format("woff2"),url(../font/roboto-mono-latin-400.woff) format("woff")}@font-face{font-family:Roboto Mono;font-style:normal;font-weight:500;src:local("Roboto Mono Medium"),local("RobotoMono-Medium"),url(../font/roboto-mono-latin-500.woff2) format("woff2"),url(../font/roboto-mono-latin-500.woff) format("woff")}*,::after,::before{-webkit-box-sizing:inherit;box-sizing:inherit}html{-webkit-box-sizing:border-box;box-sizing:border-box;font-size:1.0625em;height:100%}@media screen and (min-width:1024px){html{font-size:1.125em}}body{background:#fff;color:#222;font-family:Roboto,sans-serif;line-height:1.15;margin:0;word-wrap:anywhere}a{text-decoration:none}a:hover{text-decoration:underline}a:active{background-color:none}code,kbd,pre{font-family:Roboto Mono,monospace}b,dt,strong,th{font-weight:500}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}em em{font-style:normal}strong strong{font-weight:400}button{cursor:pointer;font-family:inherit;font-size:1em;line-height:1.15;margin:0}button::-moz-focus-inner{border:none;padding:0}summary{cursor:pointer;-webkit-tap-highlight-color:transparent;outline:none}table{border-collapse:collapse;word-wrap:normal}object[type="image/svg+xml"]:not([width]){width:-webkit-fit-content;width:-moz-fit-content;width:fit-content}@supports (scrollbar-width:thin){body *{scrollbar-width:thin;scrollbar-color:#c1c1c1 transparent}}body ::-webkit-scrollbar{height:.25rem;width:.25rem}body ::-webkit-scrollbar-thumb{background-color:#c1c1c1}@media screen and (min-width:1024px){.body{display:-webkit-box;display:-ms-flexbox;display:flex}}.nav-container{position:fixed;top:3.5rem;left:0;width:100%;font-size:.94444rem;z-index:1;visibility:hidden}@media screen and (min-width:769px){.nav-container{width:15rem}}@media screen and (min-width:1024px){.nav-container{font-size:.86111rem;-webkit-box-flex:0;-ms-flex:none;flex:none;position:static;top:0;visibility:visible}}.nav-container.is-active{visibility:visible}.nav{background:#fafafa;position:relative;top:2.5rem;height:calc(100vh - 6rem)}@media screen and (min-width:769px){.nav{-webkit-box-shadow:.5px 0 3px #c1c1c1;box-shadow:.5px 0 3px #c1c1c1}}@media screen and (min-width:1024px){.nav{top:3.5rem;-webkit-box-shadow:none;box-shadow:none;position:sticky;height:calc(100vh - 3.5rem)}}.nav .panels{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;height:inherit}html.is-clipped--nav{overflow-y:hidden}.nav-panel-menu{overflow-y:scroll;-ms-scroll-chaining:none;overscroll-behavior:none;height:calc(100% - 2.5rem)}.nav-panel-menu:not(.is-active) .nav-menu{opacity:.75}.nav-panel-menu:not(.is-active)::after{content:"";background:rgba(0,0,0,.5);display:block;position:absolute;top:0;right:0;bottom:0;left:0}.nav-menu{min-height:100%;padding:.5rem .75rem;line-height:1.35;position:relative}.nav-menu h3.title{color:#424242;font-size:inherit;font-weight:500;margin:0;padding:.25em 0 .125em}.nav-menu a{color:inherit}.nav-list{margin:0 0 0 .75rem;padding:0}.nav-menu>.nav-list+.nav-list{margin-top:.5rem}.nav-item{list-style:none;margin-top:.5em}.nav-item-toggle~.nav-list{padding-bottom:.125rem}.nav-item[data-depth="0"]>.nav-list:first-child{display:block;margin:0}.nav-item:not(.is-active)>.nav-list{display:none}.nav-item-toggle{background:transparent url(../img/caret.svg) no-repeat 50%/50%;border:none;outline:none;line-height:inherit;position:absolute;height:1.35em;width:1.35em;margin-top:-.05em;margin-left:-1.35em}.nav-item.is-active>.nav-item-toggle{-webkit-transform:rotate(90deg);transform:rotate(90deg)}.is-current-page>.nav-link,.is-current-page>.nav-text{font-weight:500}.nav-panel-explore{background:#fafafa;display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;position:absolute;top:0;right:0;bottom:0;left:0}.nav-panel-explore:not(:first-child){top:auto;max-height:calc(50% + 2.5rem)}.nav-panel-explore .context{font-size:.83333rem;-ms-flex-negative:0;flex-shrink:0;color:#5d5d5d;-webkit-box-shadow:0 -1px 0 #e1e1e1;box-shadow:0 -1px 0 #e1e1e1;padding:0 .5rem;display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-align:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:justify;-ms-flex-pack:justify;justify-content:space-between;cursor:pointer;line-height:1;height:2.5rem}.nav-panel-explore .context .version{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-align:inherit;-ms-flex-align:inherit;align-items:inherit}.nav-panel-explore .context .version::after{content:"";background:url(../img/chevron.svg) no-repeat 100%/auto 100%;width:1.25em;height:.75em}.nav-panel-explore .components{line-height:1.6;-webkit-box-flex:1;-ms-flex-positive:1;flex-grow:1;-webkit-box-shadow:inset 0 1px 5px #e1e1e1;box-shadow:inset 0 1px 5px #e1e1e1;background:#f0f0f0;padding:.5rem .75rem 0;margin:0;overflow-y:scroll;max-height:100%;display:block}.nav-panel-explore:not(.is-active) .components{display:none}.nav-panel-explore .component{display:block}.nav-panel-explore .component+.component{margin-top:.5rem}.nav-panel-explore .component:last-child{margin-bottom:.75rem}.nav-panel-explore .component .title{font-weight:500;color:inherit}.nav-panel-explore .versions{display:-webkit-box;display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;list-style:none;padding-left:0;margin-top:-.25rem;line-height:1}.nav-panel-explore .component .version{display:block;margin:.375rem .375rem 0 0}.nav-panel-explore .component .version a{border:1px solid #c1c1c1;border-radius:.25rem;color:inherit;opacity:.75;white-space:nowrap;padding:.125em .25em;display:inherit}.nav-panel-explore .component .is-current a{border-color:currentColor;opacity:.9;font-weight:500}@media screen and (max-width:1023.5px){aside.toc.sidebar{display:none}main>.content{overflow-x:auto}}@media screen and (min-width:1024px){main{-webkit-box-flex:1;-ms-flex:auto;flex:auto;min-width:0}main>.content{display:-webkit-box;display:-ms-flexbox;display:flex}aside.toc.embedded{display:none}aside.toc.sidebar{-webkit-box-flex:0;-ms-flex:0 0 9rem;flex:0 0 9rem;-webkit-box-ordinal-group:2;-ms-flex-order:1;order:1}}@media screen and (min-width:1216px){aside.toc.sidebar{-ms-flex-preferred-size:12rem;flex-basis:12rem}}.toolbar{color:#5d5d5d;-webkit-box-align:center;-ms-flex-align:center;align-items:center;background-color:#fafafa;-webkit-box-shadow:0 1px 0 #e1e1e1;box-shadow:0 1px 0 #e1e1e1;display:-webkit-box;display:-ms-flexbox;display:flex;font-size:.83333rem;height:2.5rem;-webkit-box-pack:start;-ms-flex-pack:start;justify-content:flex-start;position:sticky;top:3.5rem;z-index:2}.toolbar a{color:inherit}.nav-toggle{background:url(../img/menu.svg) no-repeat 50% 47.5%;background-size:49%;border:none;outline:none;line-height:inherit;padding:0;height:2.5rem;width:2.5rem;margin-right:-.25rem}@media screen and (min-width:1024px){.nav-toggle{display:none}}.nav-toggle.is-active{background-image:url(../img/back.svg);background-size:41.5%}.home-link{display:block;background:url(../img/home-o.svg) no-repeat 50%;height:1.25rem;width:1.25rem;margin:.625rem}.home-link.is-current,.home-link:hover{background-image:url(../img/home.svg)}.edit-this-page{display:none;padding-right:.5rem}@media screen and (min-width:1024px){.edit-this-page{display:block}}.toolbar .edit-this-page a{color:#8e8e8e}.breadcrumbs{display:none;-webkit-box-flex:1;-ms-flex:1 1;flex:1 1;padding:0 .5rem 0 .75rem;line-height:1.35}@media screen and (min-width:1024px){.breadcrumbs{display:block}}a+.breadcrumbs{padding-left:.05rem}.breadcrumbs ul{display:-webkit-box;display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;margin:0;padding:0;list-style:none}.breadcrumbs li{display:inline;margin:0}.breadcrumbs li::after{content:"/";padding:0 .5rem}.breadcrumbs li:last-of-type::after{content:none}.page-versions{margin:0 .2rem 0 auto;position:relative;line-height:1}@media screen and (min-width:1024px){.page-versions{margin-right:.7rem}}.page-versions .version-menu-toggle{color:inherit;background:url(../img/chevron.svg) no-repeat;background-position:right .5rem top 50%;background-size:auto .75em;border:none;outline:none;line-height:inherit;padding:.5rem 1.5rem .5rem .5rem;position:relative;z-index:3}.page-versions .version-menu{display:-webkit-box;display:-ms-flexbox;display:flex;min-width:100%;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:end;-ms-flex-align:end;align-items:flex-end;background:-webkit-gradient(linear,left top,left bottom,from(#f0f0f0),to(#f0f0f0)) no-repeat;background:linear-gradient(180deg,#f0f0f0 0,#f0f0f0) no-repeat;padding:1.375rem 1.5rem .5rem .5rem;position:absolute;top:0;right:0;white-space:nowrap}.page-versions:not(.is-active) .version-menu{display:none}.page-versions .version{display:block;padding-top:.5rem}.page-versions .version.is-current{display:none}.page-versions .version.is-missing{color:#8e8e8e;font-style:italic;text-decoration:none}.toc-menu{color:#5d5d5d}.toc.sidebar .toc-menu{margin-right:.75rem;position:sticky;top:6rem}.toc .toc-menu h3{color:#333;font-size:.88889rem;font-weight:500;line-height:1.3;margin:0 -.5px;padding-bottom:.25rem}.toc.sidebar .toc-menu h3{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;height:2.5rem;-webkit-box-pack:end;-ms-flex-pack:end;justify-content:flex-end}.toc .toc-menu ul{font-size:.83333rem;line-height:1.2;list-style:none;margin:0;padding:0}.toc.sidebar .toc-menu ul{max-height:calc(100vh - 8.5rem);overflow-y:auto;-ms-scroll-chaining:none;overscroll-behavior:none;scrollbar-width:none}.toc .toc-menu ul::-webkit-scrollbar{width:0;height:0}@media screen and (min-width:1024px){.toc .toc-menu h3{font-size:.83333rem}.toc .toc-menu ul{font-size:.75rem}}.toc .toc-menu li{margin:0}.toc .toc-menu li[data-level="2"] a{padding-left:1.25rem}.toc .toc-menu li[data-level="3"] a{padding-left:2rem}.toc .toc-menu a{color:inherit;border-left:2px solid #e1e1e1;display:inline-block;padding:.25rem 0 .25rem .5rem;text-decoration:none}.sidebar.toc .toc-menu a{display:block;outline:none}.toc .toc-menu a:hover{color:#1565c0}.toc .toc-menu a.is-active{border-left-color:#1565c0;color:#333}.sidebar.toc .toc-menu a:focus{background:#fafafa}.toc .toc-menu .is-hidden-toc{display:none!important}.doc{color:#333;font-size:inherit;-webkit-hyphens:auto;-ms-hyphens:auto;hyphens:auto;line-height:1.6;margin:0 auto;max-width:40rem;padding:0 1rem 4rem}@media screen and (min-width:1024px){.doc{-webkit-box-flex:1;-ms-flex:auto;flex:auto;font-size:.94444rem;margin:0 2rem;max-width:46rem;min-width:0}}.doc h1,.doc h2,.doc h3,.doc h4,.doc h5,.doc h6{color:#191919;font-weight:400;-webkit-hyphens:none;-ms-hyphens:none;hyphens:none;line-height:1.3;margin:1rem 0 0}.doc>h1.page:first-child{font-size:2rem;margin:1.5rem 0}@media screen and (min-width:769px){.doc>h1.page:first-child{margin-top:2.5rem}}.doc>h1.page:first-child+aside.toc.embedded{margin-top:-.5rem}.doc>h2#name+.sectionbody{margin-top:1rem}#preamble+.sect1,.doc .sect1+.sect1{margin-top:2rem}.doc h1.sect0{background:#f0f0f0;font-size:1.8em;margin:1.5rem -1rem 0;padding:.5rem 1rem}.doc h2:not(.discrete){border-bottom:1px solid #e1e1e1;margin-left:-1rem;margin-right:-1rem;padding:.4rem 1rem .1rem}.doc h3:not(.discrete),.doc h4:not(.discrete){font-weight:500}.doc h1 .anchor,.doc h2 .anchor,.doc h3 .anchor,.doc h4 .anchor,.doc h5 .anchor,.doc h6 .anchor{position:absolute;text-decoration:none;width:1.75ex;margin-left:-1.5ex;visibility:hidden;font-size:.8em;font-weight:400;padding-top:.05em}.doc h1 .anchor::before,.doc h2 .anchor::before,.doc h3 .anchor::before,.doc h4 .anchor::before,.doc h5 .anchor::before,.doc h6 .anchor::before{content:"\00a7"}.doc h1:hover .anchor,.doc h2:hover .anchor,.doc h3:hover .anchor,.doc h4:hover .anchor,.doc h5:hover .anchor,.doc h6:hover .anchor{visibility:visible}.doc dl,.doc p{margin:0}.doc a{color:#1565c0}.doc a:hover{color:#104d92}.doc a.bare{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none}.doc a.unresolved{color:#d32f2f}.doc i.fa{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none;font-style:normal}.doc .colist>table code,.doc p code,.doc thead code{color:#222;background:#fafafa;border-radius:.25em;font-size:.95em;padding:.125em .25em}.doc code,.doc pre{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none}.doc pre{font-size:.88889rem;line-height:1.5;margin:0}.doc blockquote{margin:0}.doc .paragraph.lead>p{font-size:1rem}.doc .right{float:right}.doc .left{float:left}.doc .stretch{width:100%}.doc .underline{text-decoration:underline}.doc .line-through{text-decoration:line-through}.doc .dlist,.doc .exampleblock,.doc .hdlist,.doc .imageblock,.doc .listingblock,.doc .literalblock,.doc .olist,.doc .paragraph,.doc .partintro,.doc .quoteblock,.doc .sidebarblock,.doc .ulist,.doc .verseblock,.doc .videoblock,.doc details,.doc hr{margin:1rem 0 0}.doc table.tableblock{font-size:.83333rem;margin:1.5rem 0 0}.doc table.tableblock+*{margin-top:1.5rem}.doc p.tableblock+p.tableblock{margin-top:.5rem}.doc td.tableblock>.content>:first-child{margin-top:0}.doc table.tableblock td,.doc table.tableblock th{padding:.5rem}.doc table.tableblock,.doc table.tableblock>*>tr>*{border:0 solid #e1e1e1}.doc table.grid-all>*>tr>*{border-width:1px}.doc table.grid-cols>*>tr>*{border-width:0 1px}.doc table.grid-rows>*>tr>*{border-width:1px 0}.doc table.grid-all>thead th,.doc table.grid-rows>thead th{border-bottom-width:2.5px}.doc table.frame-all{border-width:1px}.doc table.frame-ends{border-width:1px 0}.doc table.frame-sides{border-width:0 1px}.doc table.frame-none>colgroup+*>:first-child>*,.doc table.frame-sides>colgroup+*>:first-child>*{border-top-width:0}.doc table.frame-sides>:last-child>:last-child>*{border-bottom-width:0}.doc table.frame-ends>*>tr>:first-child,.doc table.frame-none>*>tr>:first-child{border-left-width:0}.doc table.frame-ends>*>tr>:last-child,.doc table.frame-none>*>tr>:last-child{border-right-width:0}.doc table.stripes-all>tbody>tr,.doc table.stripes-even>tbody>tr:nth-of-type(2n),.doc table.stripes-hover>tbody>tr:hover,.doc table.stripes-odd>tbody>tr:nth-of-type(odd){background:#fafafa}.doc table.tableblock>tfoot{background:-webkit-gradient(linear,left top,left bottom,from(#f0f0f0),to(#fff));background:linear-gradient(180deg,#f0f0f0 0,#fff)}.doc .halign-left{text-align:left}.doc .halign-right{text-align:right}.doc .halign-center{text-align:center}.doc .valign-top{vertical-align:top}.doc .valign-bottom{vertical-align:bottom}.doc .valign-middle{vertical-align:middle}.doc .admonitionblock{margin:1.4rem 0 0}.doc .admonitionblock p,.doc .admonitionblock td.content{font-size:.88889rem}.doc .admonitionblock td.content>.title+*,.doc .admonitionblock td.content>:not(.title):first-child{margin-top:0}.doc .admonitionblock pre{font-size:.83333rem}.doc .admonitionblock>table{table-layout:fixed;position:relative;width:100%}.doc .admonitionblock td.content{padding:1rem 1rem .75rem;background:#fafafa;width:100%;word-wrap:anywhere}.doc .admonitionblock .icon{position:absolute;top:0;left:0;font-size:.83333rem;padding:0 .5rem;height:1.25rem;line-height:1;font-weight:500;text-transform:uppercase;border-radius:.45rem;-webkit-transform:translate(-.5rem,-50%);transform:translate(-.5rem,-50%)}.doc .admonitionblock.caution .icon{background-color:#a0439c;color:#fff}.doc .admonitionblock.important .icon{background-color:#d32f2f;color:#fff}.doc .admonitionblock.note .icon{background-color:#217ee7;color:#fff}.doc .admonitionblock.tip .icon{background-color:#41af46;color:#fff}.doc .admonitionblock.warning .icon{background-color:#e18114;color:#fff}.doc .admonitionblock .icon i{display:-webkit-inline-box;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-align:center;-ms-flex-align:center;align-items:center;height:100%}.doc .admonitionblock .icon i::after{content:attr(title)}.doc .imageblock,.doc .videoblock{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-ms-flex-align:center;align-items:center}.doc .image>img,.doc .image>object,.doc .image>svg,.doc .imageblock img,.doc .imageblock object,.doc .imageblock svg{display:inline-block;height:auto;max-width:100%;vertical-align:middle}.doc .image:not(.left):not(.right)>img{margin-top:-.2em}.doc .videoblock iframe{max-width:100%;vertical-align:middle}#preamble .abstract blockquote{background:#f0f0f0;border-left:5px solid #e1e1e1;color:#4a4a4a;font-size:.88889rem;padding:.75em 1em}.doc .quoteblock,.doc .verseblock{background:#fafafa;border-left:5px solid #5d5d5d;color:#5d5d5d}.doc .quoteblock{padding:.25rem 2rem 1.25rem}.doc .quoteblock .attribution{color:#8e8e8e;font-size:.83333rem;margin-top:.75rem}.doc .quoteblock blockquote{margin-top:1rem}.doc .quoteblock .paragraph{font-style:italic}.doc .quoteblock cite{padding-left:1em}.doc .verseblock{font-size:1.15em;padding:1rem 2rem}.doc .verseblock pre{font-family:inherit;font-size:inherit}.doc ol,.doc ul{margin:0;padding:0 0 0 2rem}.doc ol.none,.doc ol.unnumbered,.doc ol.unstyled,.doc ul.checklist,.doc ul.no-bullet,.doc ul.none,.doc ul.unstyled{list-style-type:none}.doc ol.unnumbered,.doc ul.no-bullet{padding-left:1.25rem}.doc ol.unstyled,.doc ul.unstyled{padding-left:0}.doc ul.circle,.doc ul.disc,.doc ul.square{list-style-type:square}.doc ol.arabic{list-style-type:decimal}.doc ol.decimal{list-style-type:decimal-leading-zero}.doc ol.loweralpha{list-style-type:lower-alpha}.doc ol.upperalpha{list-style-type:upper-alpha}.doc ol.lowerroman{list-style-type:lower-roman}.doc ol.upperroman{list-style-type:upper-roman}.doc ol.lowergreek{list-style-type:lower-greek}.doc ul.checklist{padding-left:.5rem}.doc ul.checklist p>i.fa-check-square-o:first-child,.doc ul.checklist p>i.fa-square-o:first-child{display:-webkit-inline-box;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-pack:center;-ms-flex-pack:center;justify-content:center;width:1.25rem}.doc ul.checklist i.fa-check-square-o::before{content:"\2713"}.doc ul.checklist i.fa-square-o::before{content:"\274f"}.doc .dlist .dlist,.doc .dlist .olist,.doc .dlist .ulist,.doc .olist .dlist,.doc .olist .olist,.doc .olist .ulist,.doc .olist li+li,.doc .ulist .dlist,.doc .ulist .olist,.doc .ulist .ulist,.doc .ulist li+li{margin-top:.5rem}.doc .admonitionblock .listingblock,.doc .olist .listingblock,.doc .ulist .listingblock{padding:0}.doc .admonitionblock .title,.doc .exampleblock .title,.doc .imageblock .title,.doc .listingblock .title,.doc .literalblock .title,.doc .openblock .title,.doc .tableblock caption,.doc .videoblock .title{color:#5d5d5d;font-size:.88889rem;font-weight:500;font-style:italic;-webkit-hyphens:none;-ms-hyphens:none;hyphens:none;letter-spacing:.01em;padding-bottom:.075rem;text-align:left}.doc .imageblock .title{margin-top:.5rem;padding-bottom:0}.doc details{margin-left:1rem}.doc details>summary{display:block;position:relative;line-height:1.6;margin-bottom:.5rem}.doc details>summary::-webkit-details-marker{display:none}.doc details>summary::before{content:"";border:solid transparent;border-left:solid;border-width:.3em 0 .3em .5em;position:absolute;top:.5em;left:-1rem;-webkit-transform:translateX(15%);transform:translateX(15%)}.doc details[open]>summary::before{border-color:currentColor transparent transparent;border-width:.5rem .3rem 0;-webkit-transform:translateY(15%);transform:translateY(15%)}.doc details>summary::after{content:"";width:1rem;height:1em;position:absolute;top:.3em;left:-1rem}.doc details.result{margin-top:.25rem}.doc details.result>summary{color:#5d5d5d;font-style:italic;margin-bottom:0}.doc details.result>.content{margin-left:-1rem}.doc .exampleblock>.content,.doc details.result>.content{background:#fff;border:.25rem solid #5d5d5d;border-radius:.5rem;padding:.75rem}.doc .exampleblock>.content::after,.doc details.result>.content::after{content:"";display:table;clear:both}.doc .exampleblock>.content>:first-child,.doc details>.content>:first-child{margin-top:0}.doc .sidebarblock{background:#e1e1e1;border-radius:.75rem;padding:.75rem 1.5rem}.doc .sidebarblock>.content>.title{font-size:1.25rem;font-weight:500;line-height:1.3;margin-bottom:-.3em;text-align:center}.doc .sidebarblock>.content>:not(.title):first-child{margin-top:0}.doc .listingblock.wrap pre,.doc .tableblock pre{white-space:pre-wrap}.doc .listingblock pre:not(.highlight),.doc .literalblock pre,.doc pre.highlight code{background:#fafafa;-webkit-box-shadow:inset 0 0 1.75px #e1e1e1;box-shadow:inset 0 0 1.75px #e1e1e1;display:block;overflow-x:auto;padding:.75rem}.doc pre.highlight{position:relative}.doc .source-toolbox{display:-webkit-box;display:-ms-flexbox;display:flex;visibility:hidden;position:absolute;top:.25rem;right:.5rem;color:grey;font-family:Roboto,sans-serif;font-size:.75rem;line-height:1}.doc .listingblock:hover .source-toolbox{visibility:visible}.doc .source-toolbox .source-lang{text-transform:uppercase;letter-spacing:.075em;font-size:.96em;line-height:1.0425}.doc .source-toolbox>:not(:last-child)::after{content:"|";letter-spacing:0;padding:0 1ch}.doc .source-toolbox .copy-button{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-ms-flex-align:center;align-items:center;background:transparent;border:none;color:inherit;outline:none;padding:0;font-size:inherit;line-height:inherit;width:1em;height:1em}.doc .source-toolbox .copy-icon{-webkit-box-flex:0;-ms-flex:none;flex:none;width:inherit;height:inherit}.doc .source-toolbox img.copy-icon{-webkit-filter:invert(50.2%);filter:invert(50.2%)}.doc .source-toolbox svg.copy-icon{fill:currentColor}.doc .source-toolbox .copy-toast{-webkit-box-flex:0;-ms-flex:none;flex:none;position:relative;display:-webkit-inline-box;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-pack:center;-ms-flex-pack:center;justify-content:center;margin-top:1em;background-color:#333;border-radius:.25em;padding:.5em;color:#fff;cursor:auto;opacity:0;-webkit-transition:opacity .5s ease .75s;transition:opacity .5s ease .75s}.doc .source-toolbox .copy-toast::after{content:"";position:absolute;top:0;width:1em;height:1em;border:.55em solid transparent;border-left-color:#333;-webkit-transform:rotate(-90deg) translateX(50%) translateY(50%);transform:rotate(-90deg) translateX(50%) translateY(50%);-webkit-transform-origin:left;transform-origin:left}.doc .source-toolbox .copy-button.clicked .copy-toast{opacity:1;-webkit-transition:none;transition:none}.doc .language-console .hljs-meta{-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.doc .dlist dt{font-style:italic}.doc .dlist dd{margin:0 0 .25rem 1.5rem}.doc .dlist dd:last-of-type{margin-bottom:0}.doc td.hdlist1,.doc td.hdlist2{padding:.5rem 0 0;vertical-align:top}.doc tr:first-child>.hdlist1,.doc tr:first-child>.hdlist2{padding-top:0}.doc td.hdlist1{font-weight:500;padding-right:.25rem}.doc td.hdlist2{padding-left:.25rem}.doc .colist{font-size:.88889rem;margin:.25rem 0 -.25rem}.doc .colist>table>tbody>tr>:first-child,.doc .colist>table>tr>:first-child{padding:.25em .5rem 0;vertical-align:top}.doc .colist>table>tbody>tr>:last-child,.doc .colist>table>tr>:last-child{padding:.25rem 0}.doc .conum[data-value]{border:1px solid;border-radius:100%;display:inline-block;font-family:Roboto,sans-serif;font-size:.75rem;font-style:normal;line-height:1.2;text-align:center;width:1.25em;height:1.25em;letter-spacing:-.25ex;text-indent:-.25ex}.doc .conum[data-value]::after{content:attr(data-value)}.doc .conum[data-value]+b{display:none}.doc hr{border:solid #e1e1e1;border-width:2px 0 0;height:0}.doc b.button{white-space:nowrap}.doc b.button::before{content:"[";padding-right:.25em}.doc b.button::after{content:"]";padding-left:.25em}.doc kbd{display:inline-block;font-size:.66667rem;background:#fafafa;border:1px solid #c1c1c1;border-radius:.25em;-webkit-box-shadow:0 1px 0 #c1c1c1,0 0 0 .1em #fff inset;box-shadow:0 1px 0 #c1c1c1,inset 0 0 0 .1em #fff;padding:.25em .5em;vertical-align:text-bottom;white-space:nowrap}.doc .keyseq,.doc kbd{line-height:1}.doc .keyseq{font-size:.88889rem}.doc .keyseq kbd{margin:0 .125em}.doc .keyseq kbd:first-child{margin-left:0}.doc .keyseq kbd:last-child{margin-right:0}.doc .menuseq,.doc .path{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none}.doc .menuseq i.caret::before{content:"\203a";font-size:1.1em;font-weight:500;line-height:.90909}.doc :not(pre).nowrap{white-space:nowrap}.doc .nobreak{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none;word-wrap:normal}#footnotes{font-size:.85em;line-height:1.5;margin:2rem -.5rem 0}.doc td.tableblock>.content #footnotes{margin:2rem 0 0}#footnotes hr{border-top-width:1px;margin-top:0;width:20%}#footnotes .footnote{margin:.5em 0 0 1em}#footnotes .footnote+.footnote{margin-top:.25em}#footnotes .footnote>a:first-of-type{display:inline-block;margin-left:-2em;text-align:right;width:1.5em}nav.pagination{border-top:1px solid #e1e1e1;line-height:1;margin:2rem -1rem -1rem;padding:.75rem 1rem 0}nav.pagination,nav.pagination span{display:-webkit-box;display:-ms-flexbox;display:flex}nav.pagination span{-webkit-box-flex:50%;-ms-flex:50%;flex:50%;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column}nav.pagination .prev{padding-right:.5rem}nav.pagination .next{margin-left:auto;padding-left:.5rem;text-align:right}nav.pagination span::before{color:#8e8e8e;font-size:.75em;padding-bottom:.1em}nav.pagination .prev::before{content:"Prev"}nav.pagination .next::before{content:"Next"}nav.pagination a{font-weight:500;line-height:1.3;position:relative}nav.pagination a::after,nav.pagination a::before{color:#8e8e8e;font-weight:400;font-size:1.5em;line-height:.75;position:absolute;top:0;width:1rem}nav.pagination .prev a::before{content:"\2039";-webkit-transform:translateX(-100%);transform:translateX(-100%)}nav.pagination .next a::after{content:"\203a"}html.is-clipped--navbar{overflow-y:hidden}body{padding-top:3.5rem}.navbar{background:#191919;color:#fff;font-size:.88889rem;height:3.5rem;position:fixed;top:0;width:100%;z-index:4}.navbar a{text-decoration:none}.navbar-brand{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-flex:1;-ms-flex:auto;flex:auto;padding-left:1rem}.navbar-brand .navbar-item:first-child{-ms-flex-item-align:center;align-self:center;padding:0;color:#fff;font-size:1.22222rem;-ms-flex-wrap:wrap;flex-wrap:wrap;line-height:1}.navbar-brand .navbar-item:first-child a{color:inherit;word-wrap:normal}.navbar-brand .navbar-item:first-child :not(:last-child){padding-right:.375rem}.navbar-brand .navbar-item.search{-webkit-box-flex:1;-ms-flex:auto;flex:auto;-webkit-box-pack:end;-ms-flex-pack:end;justify-content:flex-end}#search-input{color:#333;font-family:inherit;font-size:.95rem;width:150px;border:1px solid #dbdbdb;border-radius:.1em;line-height:1.5;padding:0 .25em}#search-input:disabled{background-color:#dbdbdb;cursor:not-allowed;pointer-events:all!important}#search-input:disabled::-webkit-input-placeholder{color:#4c4c4c}#search-input:disabled::-moz-placeholder{color:#4c4c4c}#search-input:disabled:-ms-input-placeholder{color:#4c4c4c}#search-input:disabled::-ms-input-placeholder{color:#4c4c4c}#search-input:disabled::placeholder{color:#4c4c4c}.navbar-burger{background:none;border:none;outline:none;line-height:1;position:relative;width:3rem;padding:0;display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-ms-flex-pack:center;justify-content:center;margin-left:auto;min-width:0}.navbar-burger span{background-color:#fff;height:1.5px;width:1rem}.navbar-burger:not(.is-active) span{-webkit-transition:opacity 0s .25s,margin-top .25s ease-out .25s,-webkit-transform .25s ease-out;transition:opacity 0s .25s,margin-top .25s ease-out .25s,-webkit-transform .25s ease-out;transition:transform .25s ease-out,opacity 0s .25s,margin-top .25s ease-out .25s;transition:transform .25s ease-out,opacity 0s .25s,margin-top .25s ease-out .25s,-webkit-transform .25s ease-out}.navbar-burger span+span{margin-top:.25rem}.navbar-burger.is-active span+span{margin-top:-1.5px}.navbar-burger.is-active span:first-child{-webkit-transform:rotate(45deg);transform:rotate(45deg)}.navbar-burger.is-active span:nth-child(2){opacity:0}.navbar-burger.is-active span:nth-child(3){-webkit-transform:rotate(-45deg);transform:rotate(-45deg)}.navbar-item,.navbar-link{color:#222;display:block;line-height:1.6;padding:.5rem 1rem}.navbar-item.has-dropdown{padding:0}.navbar-item .icon{width:1.25rem;height:1.25rem;display:block}.navbar-item .icon img,.navbar-item .icon svg{fill:currentColor;width:inherit;height:inherit}.navbar-link{padding-right:2.5em}.navbar-dropdown .navbar-item{padding-left:1.5rem;padding-right:1.5rem}.navbar-divider{background-color:#e1e1e1;border:none;height:1px;margin:.25rem 0}.navbar .button{display:-webkit-inline-box;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-align:center;-ms-flex-align:center;align-items:center;background:#fff;border:1px solid #e1e1e1;border-radius:.15rem;height:1.75rem;color:#222;padding:0 .75em;white-space:nowrap}@media screen and (max-width:768.5px){.navbar-brand .navbar-item.search{padding-left:0;padding-right:0}}@media screen and (min-width:769px){#search-input{width:200px}}@media screen and (max-width:1023.5px){.navbar-brand{height:inherit}.navbar-brand .navbar-item{-webkit-box-align:center;-ms-flex-align:center;align-items:center;display:-webkit-box;display:-ms-flexbox;display:flex}.navbar-menu{background:#fff;-webkit-box-shadow:0 8px 16px rgba(10,10,10,.1);box-shadow:0 8px 16px rgba(10,10,10,.1);max-height:calc(100vh - 3.5rem);overflow-y:auto;-ms-scroll-chaining:none;overscroll-behavior:none;padding:.5rem 0}.navbar-menu:not(.is-active){display:none}.navbar-menu .navbar-link:hover,.navbar-menu a.navbar-item:hover{background:#f5f5f5}}@media screen and (min-width:1024px){.navbar-burger{display:none}.navbar,.navbar-end,.navbar-item,.navbar-link,.navbar-menu{display:-webkit-box;display:-ms-flexbox;display:flex}.navbar-item,.navbar-link{position:relative;-webkit-box-flex:0;-ms-flex:none;flex:none}.navbar-item:not(.has-dropdown),.navbar-link{-webkit-box-align:center;-ms-flex-align:center;align-items:center}.navbar-item.is-hoverable:hover .navbar-dropdown{display:block}.navbar-link::after{border-width:0 0 1px 1px;border-style:solid;content:"";display:block;height:.5em;pointer-events:none;position:absolute;-webkit-transform:rotate(-45deg);transform:rotate(-45deg);width:.5em;margin-top:-.375em;right:1.125em;top:50%}.navbar-end .navbar-link,.navbar-end>.navbar-item{color:#fff}.navbar-end .navbar-item.has-dropdown:hover .navbar-link,.navbar-end .navbar-link:hover,.navbar-end>a.navbar-item:hover{background:#000;color:#fff}.navbar-end .navbar-link::after{border-color:currentColor}.navbar-dropdown{background:#fff;border:1px solid #e1e1e1;border-top:none;border-radius:0 0 .25rem .25rem;display:none;top:100%;left:0;min-width:100%;position:absolute}.navbar-dropdown .navbar-item{padding:.5rem 3rem .5rem 1rem;white-space:nowrap}.navbar-dropdown .navbar-item:last-child{border-radius:inherit}.navbar-dropdown.is-right{left:auto;right:0}.navbar-dropdown a.navbar-item:hover{background:#f5f5f5}}footer.footer{background-color:#e1e1e1;color:#5d5d5d;font-size:.83333rem;line-height:1.6;padding:1.5rem}.footer p{margin:.5rem 0}.footer a{color:#191919} + +/*! Adapted from the GitHub style by Vasily Polovnyov */.hljs-comment,.hljs-quote{color:#998;font-style:italic}.hljs-keyword,.hljs-selector-tag,.hljs-subst{color:#333;font-weight:500}.hljs-literal,.hljs-number,.hljs-tag .hljs-attr,.hljs-template-variable,.hljs-variable{color:teal}.hljs-doctag,.hljs-string{color:#d14}.hljs-section,.hljs-selector-id,.hljs-title{color:#900;font-weight:500}.hljs-subst{font-weight:400}.hljs-class .hljs-title,.hljs-type{color:#458;font-weight:500}.hljs-attribute,.hljs-name,.hljs-tag{color:navy;font-weight:400}.hljs-link,.hljs-regexp{color:#009926}.hljs-bullet,.hljs-symbol{color:#990073}.hljs-built_in,.hljs-builtin-name{color:#0086b3}.hljs-meta{color:#999;font-weight:500}.hljs-deletion{background:#fdd}.hljs-addition{background:#dfd}.hljs-emphasis{font-style:italic}.hljs-strong{font-weight:500}@page{margin:.5in}@media print{.hide-for-print{display:none!important}html{font-size:.9375em}a{color:inherit!important;text-decoration:underline}a.bare,a[href^="#"],a[href^="mailto:"]{text-decoration:none}img,object,svg,tr{page-break-inside:avoid}thead{display:table-header-group}pre{-webkit-hyphens:none;-ms-hyphens:none;hyphens:none;white-space:pre-wrap}body{padding-top:2rem}.navbar{background:none;color:inherit;position:absolute}.navbar *{color:inherit!important}.nav-container,.navbar>:not(.navbar-brand),.toolbar,aside.toc,nav.pagination{display:none}.doc{color:inherit;margin:auto;max-width:none;padding-bottom:2rem}.doc .listingblock code[data-lang]::before{display:block}footer.footer{background:none;border-top:1px solid #e1e1e1;color:#8e8e8e;padding:.25rem .5rem 0}.footer *{color:inherit}} \ No newline at end of file diff --git a/_/font/roboto-latin-400.woff b/docs/_/font/roboto-latin-400.woff similarity index 100% rename from _/font/roboto-latin-400.woff rename to docs/_/font/roboto-latin-400.woff diff --git a/_/font/roboto-latin-400.woff2 b/docs/_/font/roboto-latin-400.woff2 similarity index 100% rename from _/font/roboto-latin-400.woff2 rename to docs/_/font/roboto-latin-400.woff2 diff --git a/_/font/roboto-latin-400italic.woff b/docs/_/font/roboto-latin-400italic.woff similarity index 100% rename from _/font/roboto-latin-400italic.woff rename to docs/_/font/roboto-latin-400italic.woff diff --git a/_/font/roboto-latin-400italic.woff2 b/docs/_/font/roboto-latin-400italic.woff2 similarity index 100% rename from _/font/roboto-latin-400italic.woff2 rename to docs/_/font/roboto-latin-400italic.woff2 diff --git a/_/font/roboto-latin-500.woff b/docs/_/font/roboto-latin-500.woff similarity index 100% rename from _/font/roboto-latin-500.woff rename to docs/_/font/roboto-latin-500.woff diff --git a/_/font/roboto-latin-500.woff2 b/docs/_/font/roboto-latin-500.woff2 similarity index 100% rename from _/font/roboto-latin-500.woff2 rename to docs/_/font/roboto-latin-500.woff2 diff --git a/_/font/roboto-latin-500italic.woff b/docs/_/font/roboto-latin-500italic.woff similarity index 100% rename from _/font/roboto-latin-500italic.woff rename to docs/_/font/roboto-latin-500italic.woff diff --git a/_/font/roboto-latin-500italic.woff2 b/docs/_/font/roboto-latin-500italic.woff2 similarity index 100% rename from _/font/roboto-latin-500italic.woff2 rename to docs/_/font/roboto-latin-500italic.woff2 diff --git a/_/font/roboto-mono-latin-400.woff b/docs/_/font/roboto-mono-latin-400.woff similarity index 100% rename from _/font/roboto-mono-latin-400.woff rename to docs/_/font/roboto-mono-latin-400.woff diff --git a/_/font/roboto-mono-latin-400.woff2 b/docs/_/font/roboto-mono-latin-400.woff2 similarity index 100% rename from _/font/roboto-mono-latin-400.woff2 rename to docs/_/font/roboto-mono-latin-400.woff2 diff --git a/_/font/roboto-mono-latin-500.woff b/docs/_/font/roboto-mono-latin-500.woff similarity index 100% rename from _/font/roboto-mono-latin-500.woff rename to docs/_/font/roboto-mono-latin-500.woff diff --git a/_/font/roboto-mono-latin-500.woff2 b/docs/_/font/roboto-mono-latin-500.woff2 similarity index 100% rename from _/font/roboto-mono-latin-500.woff2 rename to docs/_/font/roboto-mono-latin-500.woff2 diff --git a/_/img/back.svg b/docs/_/img/back.svg similarity index 100% rename from _/img/back.svg rename to docs/_/img/back.svg diff --git a/_/img/caret.svg b/docs/_/img/caret.svg similarity index 100% rename from _/img/caret.svg rename to docs/_/img/caret.svg diff --git a/_/img/chevron.svg b/docs/_/img/chevron.svg similarity index 100% rename from _/img/chevron.svg rename to docs/_/img/chevron.svg diff --git a/_/img/home-o.svg b/docs/_/img/home-o.svg similarity index 100% rename from _/img/home-o.svg rename to docs/_/img/home-o.svg diff --git a/_/img/home.svg b/docs/_/img/home.svg similarity index 100% rename from _/img/home.svg rename to docs/_/img/home.svg diff --git a/_/img/menu.svg b/docs/_/img/menu.svg similarity index 100% rename from _/img/menu.svg rename to docs/_/img/menu.svg diff --git a/_/img/octicons-16.svg b/docs/_/img/octicons-16.svg similarity index 100% rename from _/img/octicons-16.svg rename to docs/_/img/octicons-16.svg diff --git a/docs/_/js/site.js b/docs/_/js/site.js new file mode 100644 index 00000000..c12c5743 --- /dev/null +++ b/docs/_/js/site.js @@ -0,0 +1,6 @@ +!function(){"use strict";var e,c,o,r,v=/^sect(\d)$/,i=document.querySelector(".nav-container"),a=document.querySelector(".nav-toggle"),s=(a.addEventListener("click",function(e){if(a.classList.contains("is-active"))return l(e);d(e);var e=document.documentElement,t=(e.classList.add("is-clipped--nav"),a.classList.add("is-active"),i.classList.add("is-active"),c.getBoundingClientRect()),n=window.innerHeight-Math.round(t.top);Math.round(t.height)!==n&&(c.style.height=n+"px");e.addEventListener("click",l)}),i.addEventListener("click",d),i.querySelector("[data-panel=menu]"));function t(){var e,t,n=window.location.hash;if(n&&(n.indexOf("%")&&(n=decodeURIComponent(n)),!(e=s.querySelector('.nav-link[href="'+n+'"]')))){n=document.getElementById(n.slice(1));if(n)for(var i=n,a=document.querySelector("article.doc");(i=i.parentNode)&&i!==a;){var c=i.id;if((c=!c&&(c=v.test(i.className))?(i.firstElementChild||{}).id:c)&&(e=s.querySelector('.nav-link[href="#'+c+'"]')))break}}if(e)t=e.parentNode;else{if(!r)return;e=(t=r).querySelector(".nav-link")}t!==o&&(u(s,".nav-item.is-active").forEach(function(e){e.classList.remove("is-active","is-current-path","is-current-page")}),t.classList.add("is-current-page"),p(o=t),h(s,e))}function p(e){for(var t,n=e.parentNode;!(t=n.classList).contains("nav-menu");)"LI"===n.tagName&&t.contains("nav-item")&&t.add("is-active","is-current-path"),n=n.parentNode;e.classList.add("is-active")}function n(){var e,t,n,i;this.classList.toggle("is-active")&&(e=parseFloat(window.getComputedStyle(this).marginTop),t=this.getBoundingClientRect(),n=s.getBoundingClientRect(),0<(i=(t.bottom-n.top-n.height+e).toFixed())&&(s.scrollTop+=Math.min((t.top-n.top-e).toFixed(),i)))}function l(e){d(e);e=document.documentElement;e.classList.remove("is-clipped--nav"),a.classList.remove("is-active"),i.classList.remove("is-active"),e.removeEventListener("click",l)}function d(e){e.stopPropagation()}function h(e,t){var n=e.getBoundingClientRect(),i=n.height,a=window.getComputedStyle(c);"sticky"===a.position&&(i-=n.top-parseFloat(a.top)),e.scrollTop=Math.max(0,.5*(t.getBoundingClientRect().height-i)+t.offsetTop)}function u(e,t){return[].slice.call(e.querySelectorAll(t))}s&&(e=i.querySelector("[data-panel=explore]"),c=i.querySelector(".nav"),o=s.querySelector(".is-current-page"),(r=o)?(p(o),h(s,o.querySelector(".nav-link"))):s.scrollTop=0,u(s,".nav-item-toggle").forEach(function(e){var t=e.parentElement,e=(e.addEventListener("click",n.bind(t)),function(e,t){e=e.nextElementSibling;return(!e||!t||e[e.matches?"matches":"msMatchesSelector"](t))&&e}(e,".nav-text"));e&&(e.style.cursor="pointer",e.addEventListener("click",n.bind(t)))}),e&&e.querySelector(".context").addEventListener("click",function(){u(c,"[data-panel]").forEach(function(e){e.classList.toggle("is-active")})}),s.addEventListener("mousedown",function(e){1":"")+".sect"+n);o.push("h"+(t+1)+"[id]")}else o.push("h1[id].sect0");m.push(o.join(">"))}r=m.join(","),i=f.parentNode;var d,c=[].slice.call((i||document).querySelectorAll(r));if(!c.length)return e.parentNode.removeChild(e);var a={},s=c.reduce(function(e,t){var o=document.createElement("a"),n=(o.textContent=t.textContent,a[o.href="#"+t.id]=o,document.createElement("li"));return n.dataset.level=parseInt(t.nodeName.slice(1),10)-1,n.appendChild(o),e.appendChild(n),e},document.createElement("ul")),i=e.querySelector(".toc-menu"),r=(i||((i=document.createElement("div")).className="toc-menu"),document.createElement("h3")),e=(r.textContent=e.dataset.title||"Contents",i.appendChild(r),i.appendChild(s),!document.getElementById("toc")&&f.querySelector("h1.page ~ :not(.is-before-toc)"));e&&((r=document.createElement("aside")).className="toc embedded",r.appendChild(i.cloneNode(!0)),e.parentNode.insertBefore(r,e)),window.addEventListener("load",function(){p(),window.addEventListener("scroll",p)})}}function p(){var n,i,t,e=window.pageYOffset,o=1.15*h(document.documentElement,"fontSize"),r=f.offsetTop;if(e&&window.innerHeight+e+2>=document.documentElement.scrollHeight)return d=Array.isArray(d)?d:Array(d||0),n=[],i=c.length-1,c.forEach(function(e,t){var o="#"+e.id;t===i||e.getBoundingClientRect().top+h(e,"paddingTop")>r?(n.push(o),d.indexOf(o)<0&&a[o].classList.add("is-active")):~d.indexOf(o)&&a[d.shift()].classList.remove("is-active")}),s.scrollTop=s.scrollHeight-s.offsetHeight,void(d=1r)return!0;t="#"+e.id}),t?t!==d&&(d&&a[d].classList.remove("is-active"),(e=a[t]).classList.add("is-active"),s.scrollHeight>s.offsetHeight&&(s.scrollTop=Math.max(0,e.offsetTop+e.offsetHeight-s.offsetHeight)),d=t):d&&(a[d].classList.remove("is-active"),d=void 0)}function h(e,t){return parseFloat(window.getComputedStyle(e)[t])}}(); +!function(){"use strict";var o=document.querySelector("article.doc"),t=document.querySelector(".toolbar");function i(e){return e&&(~e.indexOf("%")?decodeURIComponent(e):e).slice(1)}function r(e){if(e){if(e.altKey||e.ctrlKey)return;window.location.hash="#"+this.id,e.preventDefault()}window.scrollTo(0,function e(t,n){return o.contains(t)?e(t.offsetParent,t.offsetTop+n):n}(this,0)-t.getBoundingClientRect().bottom)}window.addEventListener("load",function e(t){var n,o;(n=i(window.location.hash))&&(o=document.getElementById(n))&&(r.bind(o)(),setTimeout(r.bind(o),0)),window.removeEventListener("load",e)}),Array.prototype.slice.call(document.querySelectorAll('a[href^="#"]')).forEach(function(e){var t,n;(t=i(e.hash))&&(n=document.getElementById(t))&&e.addEventListener("click",r.bind(n))})}(); +!function(){"use strict";var t,e=document.querySelector(".page-versions .version-menu-toggle");e&&(t=document.querySelector(".page-versions"),e.addEventListener("click",function(e){t.classList.toggle("is-active"),e.stopPropagation()}),document.documentElement.addEventListener("click",function(){t.classList.remove("is-active")}))}(); +!function(){"use strict";var t=document.querySelector(".navbar-burger");t&&t.addEventListener("click",function(t){t.stopPropagation(),document.documentElement.classList.toggle("is-clipped--navbar"),this.classList.toggle("is-active");t=document.getElementById(this.dataset.target);{var e;t.classList.toggle("is-active")&&(t.style.maxHeight="",e=window.innerHeight-Math.round(t.getBoundingClientRect().top),parseInt(window.getComputedStyle(t).maxHeight,10)!==e&&(t.style.maxHeight=e+"px"))}}.bind(t))}(); +!function(){"use strict";var o=/^\$ (\S[^\\\n]*(\\\n(?!\$ )[^\\\n]*)*)(?=\n|$)/gm,s=/( ) *\\\n *|\\\n( ?) */g,l=/ +$/gm,d=(document.getElementById("site-script")||{dataset:{}}).dataset;[].slice.call(document.querySelectorAll(".doc pre.highlight, .doc .literalblock pre")).forEach(function(e){var t,n,c,i;if(e.classList.contains("highlight"))(a=(t=e.querySelector("code")).dataset.lang)&&"console"!==a&&((c=document.createElement("span")).className="source-lang",c.appendChild(document.createTextNode(a)));else{if(!e.innerText.startsWith("$ "))return;var a=e.parentNode.parentNode;a.classList.remove("literalblock"),a.classList.add("listingblock"),e.classList.add("highlightjs","highlight"),(t=document.createElement("code")).className="language-console hljs",t.dataset.lang="console",t.appendChild(e.firstChild),e.appendChild(t)}(a=document.createElement("div")).className="source-toolbox",c&&a.appendChild(c),window.navigator.clipboard&&((n=document.createElement("button")).className="copy-button",n.setAttribute("title","Copy to clipboard"),"svg"===d.svgAs?((c=document.createElementNS("http://www.w3.org/2000/svg","svg")).setAttribute("class","copy-icon"),(i=document.createElementNS("http://www.w3.org/2000/svg","use")).setAttribute("href",window.uiRootPath+"/img/octicons-16.svg#icon-clippy"),c.appendChild(i),n.appendChild(c)):((i=document.createElement("img")).src=window.uiRootPath+"/img/octicons-16.svg#view-clippy",i.alt="copy icon",i.className="copy-icon",n.appendChild(i)),(c=document.createElement("span")).className="copy-toast",c.appendChild(document.createTextNode("Copied!")),n.appendChild(c),a.appendChild(n)),e.appendChild(a),n&&n.addEventListener("click",function(e){var t=e.innerText.replace(l,"");"console"===e.dataset.lang&&t.startsWith("$ ")&&(t=function(e){var t,n=[];for(;t=o.exec(e);)n.push(t[1].replace(s,"$1$2"));return n.join(" && ")}(t));window.navigator.clipboard.writeText(t).then(function(){this.classList.add("clicked"),this.offsetHeight,this.classList.remove("clicked")}.bind(this),function(){})}.bind(n,t))})}(); \ No newline at end of file diff --git a/docs/_/js/vendor/highlight.js b/docs/_/js/vendor/highlight.js new file mode 100644 index 00000000..7eff502e --- /dev/null +++ b/docs/_/js/vendor/highlight.js @@ -0,0 +1 @@ +!function(){function e(e){return{aliases:["adoc"],contains:[e.COMMENT("^/{4,}\\n","\\n/{4,}$",{relevance:10}),e.COMMENT("^//","$",{relevance:0}),{className:"title",begin:"^\\.\\w.*$"},{begin:"^[=\\*]{4,}\\n",end:"\\n^[=\\*]{4,}$",relevance:10},{className:"section",relevance:10,variants:[{begin:"^(={1,5}) .+?( \\1)?$"},{begin:"^[^\\[\\]\\n]+?\\n[=\\-~\\^\\+]{2,}$"}]},{className:"meta",begin:"^:.+?:",end:"\\s",excludeEnd:!0,relevance:10},{className:"meta",begin:"^\\[.+?\\]$",relevance:0},{className:"quote",begin:"^_{4,}\\n",end:"\\n_{4,}$",relevance:10},{className:"code",begin:"^[\\-\\.]{4,}\\n",end:"\\n[\\-\\.]{4,}$",relevance:10},{begin:"^\\+{4,}\\n",end:"\\n\\+{4,}$",contains:[{begin:"<",end:">",subLanguage:"xml",relevance:0}],relevance:10},{className:"bullet",begin:"^(\\*+|\\-+|\\.+|[^\\n]+?::)\\s+"},{className:"symbol",begin:"^(NOTE|TIP|IMPORTANT|WARNING|CAUTION):\\s+",relevance:10},{className:"strong",begin:"\\B\\*(?![\\*\\s])",end:"(\\n{2}|\\*)",contains:[{begin:"\\\\*\\w",relevance:0}]},{className:"emphasis",begin:"\\B'(?!['\\s])",end:"(\\n{2}|')",contains:[{begin:"\\\\'\\w",relevance:0}],relevance:0},{className:"emphasis",begin:"_(?![_\\s])",end:"(\\n{2}|_)",relevance:0},{className:"string",variants:[{begin:"``.+?''"},{begin:"`.+?'"}]},{className:"code",begin:"(`.+?`|\\+.+?\\+)",relevance:0},{className:"code",begin:"^[ \\t]",end:"$",relevance:0},{begin:"^'{3,}[ \\t]*$",relevance:10},{begin:"(link:)?(http|https|ftp|file|irc|image:?):\\S+\\[.*?\\]",returnBegin:!0,contains:[{begin:"(link|image:?):",relevance:0},{className:"link",begin:"\\w",end:"[^\\[]+",relevance:0},{className:"string",begin:"\\[",end:"\\]",excludeBegin:!0,excludeEnd:!0,relevance:0}],relevance:10}]}}function n(e){var n={className:"variable",variants:[{begin:/\$[\w\d#@][\w\d_]*/},{begin:/\$\{(.*?)}/}]},a={className:"string",begin:/"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,n,{className:"variable",begin:/\$\(/,end:/\)/,contains:[e.BACKSLASH_ESCAPE]}]};return{aliases:["sh","zsh"],lexemes:/\b-?[a-z\._]+\b/,keywords:{keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},contains:[{className:"meta",begin:/^#![^\n]+sh\s*$/,relevance:10},{className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0},e.HASH_COMMENT_MODE,a,{className:"",begin:/\\"/},{className:"string",begin:/'/,end:/'/},n]}}function a(e){var n={begin:c="["+(c="a-zA-Z_\\-!.?+*=<>&#'")+"]["+c+"0-9/;:]*",relevance:0},a={className:"number",begin:"[-+]?\\d+(\\.\\d+)?",relevance:0},t=e.inherit(e.QUOTE_STRING_MODE,{illegal:null}),i=e.COMMENT(";","$",{relevance:0}),d={className:"literal",begin:/\b(true|false|nil)\b/},s={begin:"[\\[\\{]",end:"[\\]\\}]"},g={className:"comment",begin:"\\^"+c},r=e.COMMENT("\\^\\{","\\}"),u={className:"symbol",begin:"[:]{1,2}"+c},l={begin:"\\(",end:"\\)"},o={endsWithParent:!0,relevance:0},c={keywords:{"builtin-name":"def defonce cond apply if-not if-let if not not= = < > <= >= == + / * - rem quot neg? pos? delay? symbol? keyword? true? false? integer? empty? coll? list? set? ifn? fn? associative? sequential? sorted? counted? reversible? number? decimal? class? distinct? isa? float? rational? reduced? ratio? odd? even? char? seq? vector? string? map? nil? contains? zero? instance? not-every? not-any? libspec? -> ->> .. . inc compare do dotimes mapcat take remove take-while drop letfn drop-last take-last drop-while while intern condp case reduced cycle split-at split-with repeat replicate iterate range merge zipmap declare line-seq sort comparator sort-by dorun doall nthnext nthrest partition eval doseq await await-for let agent atom send send-off release-pending-sends add-watch mapv filterv remove-watch agent-error restart-agent set-error-handler error-handler set-error-mode! error-mode shutdown-agents quote var fn loop recur throw try monitor-enter monitor-exit defmacro defn defn- macroexpand macroexpand-1 for dosync and or when when-not when-let comp juxt partial sequence memoize constantly complement identity assert peek pop doto proxy defstruct first rest cons defprotocol cast coll deftype defrecord last butlast sigs reify second ffirst fnext nfirst nnext defmulti defmethod meta with-meta ns in-ns create-ns import refer keys select-keys vals key val rseq name namespace promise into transient persistent! conj! assoc! dissoc! pop! disj! use class type num float double short byte boolean bigint biginteger bigdec print-method print-dup throw-if printf format load compile get-in update-in pr pr-on newline flush read slurp read-line subvec with-open memfn time re-find re-groups rand-int rand mod locking assert-valid-fdecl alias resolve ref deref refset swap! reset! set-validator! compare-and-set! alter-meta! reset-meta! commute get-validator alter ref-set ref-history-count ref-min-history ref-max-history ensure sync io! new next conj set! to-array future future-call into-array aset gen-class reduce map filter find empty hash-map hash-set sorted-map sorted-map-by sorted-set sorted-set-by vec vector seq flatten reverse assoc dissoc list disj get union difference intersection extend extend-type extend-protocol int nth delay count concat chunk chunk-buffer chunk-append chunk-first chunk-rest max min dec unchecked-inc-int unchecked-inc unchecked-dec-inc unchecked-dec unchecked-negate unchecked-add-int unchecked-add unchecked-subtract-int unchecked-subtract chunk-next chunk-cons chunked-seq? prn vary-meta lazy-seq spread list* str find-keyword keyword symbol gensym force rationalize"},lexemes:c,className:"name",begin:c,starts:o},n=[l,t,g,r,i,u,s,a,d,n];return l.contains=[e.COMMENT("comment",""),c,o],o.contains=n,s.contains=n,r.contains=[s],{aliases:["clj"],illegal:/\S/,contains:[l,t,g,r,i,u,s,a,d]}}function t(e){function n(e){return"(?:"+e+")?"}var a="decltype\\(auto\\)",t="[a-zA-Z_]\\w*::",i={className:"keyword",begin:"\\b[a-z\\d_]*_t\\b"},s={className:"string",variants:[{begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)",end:"'",illegal:"."},{begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\((?:.|\n)*?\)\1"/}]},r={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},l={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{"meta-keyword":"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include"},contains:[{begin:/\\\n/,relevance:0},e.inherit(s,{className:"meta-string"}),{className:"meta-string",begin:/<.*?>/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},d={className:"title",begin:n(t)+e.IDENT_RE,relevance:0},t=n(t)+e.IDENT_RE+"\\s*\\(",o={keyword:"int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid wchar_tshort reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignas alignof constexpr consteval constinit decltype concept co_await co_return co_yield requires noexcept static_assert thread_local restrict final override atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq",built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary",literal:"true false nullptr NULL"},c=[i,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,r,s],g={variants:[{begin:/=/,end:/;/},{begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}],keywords:o,contains:c.concat([{begin:/\(/,end:/\)/,keywords:o,contains:c.concat(["self"]),relevance:0}]),relevance:0},a={className:"function",begin:"((decltype\\(auto\\)|(?:[a-zA-Z_]\\w*::)?[a-zA-Z_]\\w*(?:<.*?>)?)[\\*&\\s]+)+"+t,returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:o,illegal:/[^\w\s\*&:<>]/,contains:[{begin:a,keywords:o,relevance:0},{begin:t,returnBegin:!0,contains:[d],relevance:0},{className:"params",begin:/\(/,end:/\)/,keywords:o,relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,r,i,{begin:/\(/,end:/\)/,keywords:o,relevance:0,contains:["self",e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,r,i]}]},i,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,l]};return{aliases:["c","cc","h","c++","h++","hpp","hh","hxx","cxx"],keywords:o,illegal:"",keywords:o,contains:["self",i]},{begin:e.IDENT_RE+"::",keywords:o},{className:"class",beginKeywords:"class struct",end:/[{;:]/,contains:[{begin://,contains:["self"]},e.TITLE_MODE]}]),exports:{preprocessor:l,strings:s,keywords:o}}}function i(e){var n={keyword:"abstract as base bool break byte case catch char checked const continue decimal default delegate do double enum event explicit extern finally fixed float for foreach goto if implicit in int interface internal is lock long object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this try typeof uint ulong unchecked unsafe ushort using virtual void volatile while add alias ascending async await by descending dynamic equals from get global group into join let nameof on orderby partial remove select set value var when where yield",literal:"null false true"},a={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},t={className:"string",begin:'@"',end:'"',contains:[{begin:'""'}]},i=e.inherit(t,{illegal:/\n/}),s={className:"subst",begin:"{",end:"}",keywords:n},r=e.inherit(s,{illegal:/\n/}),l={className:"string",begin:/\$"/,end:'"',illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},e.BACKSLASH_ESCAPE,r]},o={className:"string",begin:/\$@"/,end:'"',contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},s]},c=e.inherit(o,{illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},r]}),s=(s.contains=[o,l,t,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.C_BLOCK_COMMENT_MODE],r.contains=[c,l,i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.inherit(e.C_BLOCK_COMMENT_MODE,{illegal:/\n/})],{variants:[o,l,t,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]}),r=e.IDENT_RE+"(<"+e.IDENT_RE+"(\\s*,\\s*"+e.IDENT_RE+")*>)?(\\[\\])?";return{aliases:["csharp","c#"],keywords:n,illegal:/::/,contains:[e.COMMENT("///","$",{returnBegin:!0,contains:[{className:"doctag",variants:[{begin:"///",relevance:0},{begin:"\x3c!--|--\x3e"},{begin:""}]}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"meta",begin:"#",end:"$",keywords:{"meta-keyword":"if else elif endif define undef warning error line region endregion pragma checksum"}},s,a,{beginKeywords:"class interface",end:/[{;=]/,illegal:/[^\s:,]/,contains:[e.TITLE_MODE,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"namespace",end:/[{;=]/,illegal:/[^\s:]/,contains:[e.inherit(e.TITLE_MODE,{begin:"[a-zA-Z](\\.?\\w)*"}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"meta",begin:"^\\s*\\[",excludeBegin:!0,end:"\\]",excludeEnd:!0,contains:[{className:"meta-string",begin:/"/,end:/"/}]},{beginKeywords:"new return throw await else",relevance:0},{className:"function",begin:"("+r+"\\s+)+"+e.IDENT_RE+"\\s*\\(",returnBegin:!0,end:/\s*[{;=]/,excludeEnd:!0,keywords:n,contains:[{begin:e.IDENT_RE+"\\s*\\(",returnBegin:!0,contains:[e.TITLE_MODE],relevance:0},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:n,relevance:0,contains:[s,a,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]}]}}function s(e){var n={className:"attribute",begin:/\S/,end:":",excludeEnd:!0,starts:{endsWithParent:!0,excludeEnd:!0,contains:[{begin:/[\w-]+\(/,returnBegin:!0,contains:[{className:"built_in",begin:/[\w-]+/},{begin:/\(/,end:/\)/,contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{className:"number",begin:"#[0-9A-Fa-f]+"},{className:"meta",begin:"!important"}]}};return{case_insensitive:!0,illegal:/[=\/|'\$]/,contains:[e.C_BLOCK_COMMENT_MODE,{className:"selector-id",begin:/#[A-Za-z0-9_-]+/},{className:"selector-class",begin:/\.[A-Za-z0-9_-]+/},{className:"selector-attr",begin:/\[/,end:/\]/,illegal:"$",contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},{className:"selector-pseudo",begin:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{begin:"@(page|font-face)",lexemes:"@[a-z-]+",keywords:"@page @font-face"},{begin:"@",end:"[{;]",illegal:/:/,returnBegin:!0,contains:[{className:"keyword",begin:/@\-?\w[\w]*(\-\w+)*/},{begin:/\s/,endsWithParent:!0,excludeEnd:!0,relevance:0,keywords:"and or not only",contains:[{begin:/[a-z-]+:/,className:"attribute"},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},{className:"selector-tag",begin:"[a-zA-Z-][a-zA-Z0-9_-]*",relevance:0},{begin:"{",end:"}",illegal:/\S/,contains:[e.C_BLOCK_COMMENT_MODE,{begin:/(?:[A-Z\_\.\-]+|--[a-zA-Z0-9_-]+)\s*:/,returnBegin:!0,end:";",endsWithParent:!0,contains:[n]}]}]}}function r(e){return{aliases:["patch"],contains:[{className:"meta",relevance:10,variants:[{begin:/^@@ +\-\d+,\d+ +\+\d+,\d+ +@@$/},{begin:/^\*\*\* +\d+,\d+ +\*\*\*\*$/},{begin:/^\-\-\- +\d+,\d+ +\-\-\-\-$/}]},{className:"comment",variants:[{begin:/Index: /,end:/$/},{begin:/={3,}/,end:/$/},{begin:/^\-{3}/,end:/$/},{begin:/^\*{3} /,end:/$/},{begin:/^\+{3}/,end:/$/},{begin:/^\*{15}$/}]},{className:"addition",begin:"^\\+",end:"$"},{className:"deletion",begin:"^\\-",end:"$"},{className:"addition",begin:"^\\!",end:"$"}]}}function d(e){return{aliases:["docker"],case_insensitive:!0,keywords:"from maintainer expose env arg user onbuild stopsignal",contains:[e.HASH_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.NUMBER_MODE,{beginKeywords:"run cmd entrypoint volume add copy workdir label healthcheck shell",starts:{end:/[^\\]$/,subLanguage:"bash"}}],illegal:"/}]}]}]},s={className:"string",begin:"~[A-Z](?="+s+")",contains:[{begin:/"/,end:/"/},{begin:/'/,end:/'/},{begin:/\//,end:/\//},{begin:/\|/,end:/\|/},{begin:/\(/,end:/\)/},{begin:/\[/,end:/\]/},{begin:/\{/,end:/\}/},{begin:/\/}]},r={className:"string",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:/"""/,end:/"""/},{begin:/'''/,end:/'''/},{begin:/~S"""/,end:/"""/,contains:[]},{begin:/~S"/,end:/"/,contains:[]},{begin:/~S'''/,end:/'''/,contains:[]},{begin:/~S'/,end:/'/,contains:[]},{begin:/'/,end:/'/},{begin:/"/,end:/"/}]},l={className:"function",beginKeywords:"def defp defmacro",end:/\B\b/,contains:[e.inherit(e.TITLE_MODE,{begin:n,endsParent:!0})]},o=e.inherit(l,{className:"class",beginKeywords:"defimpl defmodule defprotocol defrecord",end:/\bdo\b|$|;/}),s=[r,s,i,e.HASH_COMMENT_MODE,o,l,{begin:"::"},{className:"symbol",begin:":(?![\\s:])",contains:[r,{begin:"[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?"}],relevance:0},{className:"symbol",begin:n+":(?!:)",relevance:0},{className:"number",begin:"(\\b0o[0-7_]+)|(\\b0b[01_]+)|(\\b0x[0-9a-fA-F_]+)|(-?\\b[1-9][0-9_]*(.[0-9_]+([eE][-+]?[0-9]+)?)?)",relevance:0},{className:"variable",begin:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{begin:"->"},{begin:"("+e.RE_STARTERS_RE+")\\s*",contains:[e.HASH_COMMENT_MODE,{className:"regexp",illegal:"\\n",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:"/",end:"/[a-z]*"},{begin:"%r\\[",end:"\\][a-z]*"}]}],relevance:0}];return{lexemes:n,keywords:a,contains:t.contains=s}}function u(e){var n={keyword:"break default func interface select case map struct chan else goto package switch const fallthrough if range type continue for import return var go defer bool byte complex64 complex128 float32 float64 int8 int16 int32 int64 string uint8 uint16 uint32 uint64 int uint uintptr rune",literal:"true false iota nil",built_in:"append cap close complex copy imag len make new panic print println real recover delete"};return{aliases:["golang"],keywords:n,illegal:"|<-"}]}}function b(e){var n="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",a={className:"number",begin:"\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",relevance:0};return{aliases:["jsp"],keywords:n,illegal:/<\/|#/,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/,relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"class",beginKeywords:"class interface",end:/[{;=]/,excludeEnd:!0,keywords:"class interface",illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"new throw return else",relevance:0},{className:"function",begin:"([À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(<[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(\\s*,\\s*[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*)*>)?\\s+)+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:n,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/,keywords:n,relevance:0,contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},a,{className:"meta",begin:"@[A-Za-z]+"}]}}function p(e){var d="<>",g="",n=/<[A-Za-z0-9\\._:-]+/,a=/\/[A-Za-z0-9\\._:-]+>|\/>/,t="[A-Za-z$_][0-9A-Za-z$_]*",i={keyword:"in of if for while finally var new function do return void else break catch instanceof with throw case default try this switch continue typeof delete let yield const export super debugger as async await static import from as",literal:"true false null undefined NaN Infinity",built_in:"eval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent encodeURI encodeURIComponent escape unescape Object Function Boolean Error EvalError InternalError RangeError ReferenceError StopIteration SyntaxError TypeError URIError Number Math Date String RegExp Array Float32Array Float64Array Int16Array Int32Array Int8Array Uint16Array Uint32Array Uint8Array Uint8ClampedArray ArrayBuffer DataView JSON Intl arguments require module console window document Symbol Set Map WeakSet WeakMap Proxy Reflect Promise"},s={className:"number",variants:[{begin:"\\b(0[bB][01]+)n?"},{begin:"\\b(0[oO][0-7]+)n?"},{begin:e.C_NUMBER_RE+"n?"}],relevance:0},r={className:"subst",begin:"\\$\\{",end:"\\}",keywords:i,contains:[]},l={begin:"html`",end:"",starts:{end:"`",returnEnd:!1,contains:[e.BACKSLASH_ESCAPE,r],subLanguage:"xml"}},o={begin:"css`",end:"",starts:{end:"`",returnEnd:!1,contains:[e.BACKSLASH_ESCAPE,r],subLanguage:"css"}},c={className:"string",begin:"`",end:"`",contains:[e.BACKSLASH_ESCAPE,r]},r=(r.contains=[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,l,o,c,s,e.REGEXP_MODE],r.contains.concat([e.C_BLOCK_COMMENT_MODE,e.C_LINE_COMMENT_MODE]));return{aliases:["js","jsx","mjs","cjs"],keywords:i,contains:[{className:"meta",relevance:10,begin:/^\s*['"]use (strict|asm)['"]/},{className:"meta",begin:/^#!/,end:/$/},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,l,o,c,e.C_LINE_COMMENT_MODE,e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+",contains:[{className:"type",begin:"\\{",end:"\\}",relevance:0},{className:"variable",begin:t+"(?=\\s*(-)|$)",endsParent:!0,relevance:0},{begin:/(?=[^\n])\s/,relevance:0}]}]}),e.C_BLOCK_COMMENT_MODE,s,{begin:/[{,\n]\s*/,relevance:0,contains:[{begin:t+"\\s*:",returnBegin:!0,relevance:0,contains:[{className:"attr",begin:t,relevance:0}]}]},{begin:"("+e.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*",keywords:"return throw case",contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.REGEXP_MODE,{className:"function",begin:"(\\(.*?\\)|"+t+")\\s*=>",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:t},{begin:/\(\s*\)/},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:i,contains:r}]}]},{className:"",begin:/\s/,end:/\s*/,skip:!0},{variants:[{begin:d,end:g},{begin:n,end:a}],subLanguage:"xml",contains:[{begin:n,end:a,skip:!0,contains:["self"]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/\{/,excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:t}),{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,contains:r}],illegal:/\[|%/},{begin:/\$[(.]/},e.METHOD_GUARD,{className:"class",beginKeywords:"class",end:/[{;=]/,excludeEnd:!0,illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"constructor get set",end:/\{/,excludeEnd:!0}],illegal:/#(?!!)/}}function f(e){var n={literal:"true false null"},a=[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE],t=[e.QUOTE_STRING_MODE,e.C_NUMBER_MODE],i={end:",",endsWithParent:!0,excludeEnd:!0,contains:t,keywords:n},s={begin:"{",end:"}",contains:[{className:"attr",begin:/"/,end:/"/,contains:[e.BACKSLASH_ESCAPE],illegal:"\\n"},e.inherit(i,{begin:/:/})].concat(a),illegal:"\\S"},e={begin:"\\[",end:"\\]",contains:[e.inherit(i)],illegal:"\\S"};return t.push(s,e),a.forEach(function(e){t.push(e)}),{contains:t,keywords:n,illegal:"\\S"}}function E(e){var n={keyword:"abstract as val var vararg get set class object open private protected public noinline crossinline dynamic final enum if else do while for when throw try catch finally import package is in fun override companion reified inline lateinit init interface annotation data sealed internal infix operator out by constructor super tailrec where const inner suspend typealias external expect actual trait volatile transient native default",built_in:"Byte Short Char Int Long Boolean Float Double Void Unit Nothing",literal:"true false null"},a={className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"@"},t={className:"subst",begin:"\\${",end:"}",contains:[e.C_NUMBER_MODE]},i={className:"string",variants:[{begin:'"""',end:'"""(?=[^"])',contains:[i={className:"variable",begin:"\\$"+e.UNDERSCORE_IDENT_RE},t]},{begin:"'",end:"'",illegal:/\n/,contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"',illegal:/\n/,contains:[e.BACKSLASH_ESCAPE,i,t]}]},t=(t.contains.push(i),{className:"meta",begin:"@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\s*:(?:\\s*"+e.UNDERSCORE_IDENT_RE+")?"}),s={className:"meta",begin:"@"+e.UNDERSCORE_IDENT_RE,contains:[{begin:/\(/,end:/\)/,contains:[e.inherit(i,{className:"meta-string"})]}]},r={className:"number",begin:"\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",relevance:0},l=e.COMMENT("/\\*","\\*/",{contains:[e.C_BLOCK_COMMENT_MODE]}),o={variants:[{className:"type",begin:e.UNDERSCORE_IDENT_RE},{begin:/\(/,end:/\)/,contains:[]}]},c=o;return c.variants[1].contains=[o],o.variants[1].contains=[c],{aliases:["kt"],keywords:n,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,l,{className:"keyword",begin:/\b(break|continue|return|this)\b/,starts:{contains:[{className:"symbol",begin:/@\w+/}]}},a,t,s,{className:"function",beginKeywords:"fun",end:"[(]|$",returnBegin:!0,excludeEnd:!0,keywords:n,illegal:/fun\s+(<.*>)?[^\s\(]+(\s+[^\s\(]+)\s*=/,relevance:5,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"type",begin://,keywords:"reified",relevance:0},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:n,relevance:0,contains:[{begin:/:/,end:/[=,\/]/,endsWithParent:!0,contains:[o,e.C_LINE_COMMENT_MODE,l],relevance:0},e.C_LINE_COMMENT_MODE,l,t,s,i,e.C_NUMBER_MODE]},l]},{className:"class",beginKeywords:"class interface trait",end:/[:\{(]|$/,excludeEnd:!0,illegal:"extends implements",contains:[{beginKeywords:"public protected internal private constructor"},e.UNDERSCORE_TITLE_MODE,{className:"type",begin://,excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:/[,:]\s*/,end:/[<\(,]|$/,excludeBegin:!0,returnEnd:!0},t,s]},i,{className:"meta",begin:"^#!/usr/bin/env",end:"$",illegal:"\n"},r]}}function N(e){return{aliases:["md","mkdown","mkd"],contains:[{className:"section",variants:[{begin:"^#{1,6}",end:"$"},{begin:"^.+?\\n[=-]{2,}$"}]},{begin:"<",end:">",subLanguage:"xml",relevance:0},{className:"bullet",begin:"^\\s*([*+-]|(\\d+\\.))\\s+"},{className:"strong",begin:"[*_]{2}.+?[*_]{2}"},{className:"emphasis",variants:[{begin:"\\*.+?\\*"},{begin:"_.+?_",relevance:0}]},{className:"quote",begin:"^>\\s+",end:"$"},{className:"code",variants:[{begin:"^```\\w*\\s*$",end:"^```[ ]*$"},{begin:"`.+?`"},{begin:"^( {4}|\\t)",end:"$",relevance:0}]},{begin:"^[-\\*]{3,}",end:"$"},{begin:"\\[.+?\\][\\(\\[].*?[\\)\\]]",returnBegin:!0,contains:[{className:"string",begin:"\\[",end:"\\]",excludeBegin:!0,returnEnd:!0,relevance:0},{className:"link",begin:"\\]\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0},{className:"symbol",begin:"\\]\\[",end:"\\]",excludeBegin:!0,excludeEnd:!0}],relevance:10},{begin:/^\[[^\n]+\]:/,returnBegin:!0,contains:[{className:"symbol",begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0},{className:"link",begin:/:\s*/,end:/$/,excludeBegin:!0}]}]}}function h(e){var n={keyword:"rec with let in inherit assert if else then",literal:"true false or and null",built_in:"import abort baseNameOf dirOf isNull builtins map removeAttrs throw toString derivation"},a={className:"subst",begin:/\$\{/,end:/}/,keywords:n},e=[e.NUMBER_MODE,e.HASH_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"string",contains:[a],variants:[{begin:"''",end:"''"},{begin:'"',end:'"'}]},{begin:/[a-zA-Z0-9-_]+(\s*=)/,returnBegin:!0,relevance:0,contains:[{className:"attr",begin:/\S+/}]}];return{aliases:["nixos"],keywords:n,contains:a.contains=e}}function v(e){return{disableAutodetect:!0}}function y(e){var n=/[a-zA-Z@][a-zA-Z0-9_]*/,a="@interface @class @protocol @implementation";return{aliases:["mm","objc","obj-c"],keywords:{keyword:"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required @encode @package @import @defs @compatibility_alias __bridge __bridge_transfer __bridge_retained __bridge_retain __covariant __contravariant __kindof _Nonnull _Nullable _Null_unspecified __FUNCTION__ __PRETTY_FUNCTION__ __attribute__ getter setter retain unsafe_unretained nonnull nullable null_unspecified null_resettable class instancetype NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN",literal:"false true FALSE TRUE nil YES NO NULL",built_in:"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once"},lexemes:n,illegal:"/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"class",begin:"("+a.split(" ").join("|")+")\\b",end:"({|$)",excludeEnd:!0,keywords:a,lexemes:n,contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"\\."+e.UNDERSCORE_IDENT_RE,relevance:0}]}}function w(e){var n="getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qqfileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent shutdown dump chomp connect getsockname die socketpair close flock exists index shmgetsub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedirioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when",a={className:"subst",begin:"[$@]\\{",end:"\\}",keywords:n},t={begin:"->{",end:"}"},i={variants:[{begin:/\$\d/},{begin:/[\$%@](\^\w\b|#\w+(::\w+)*|{\w+}|\w+(::\w*)*)/},{begin:/[\$%@][^\s\w{]/,relevance:0}]},s=[e.BACKSLASH_ESCAPE,a,i],i=[i,e.HASH_COMMENT_MODE,e.COMMENT("^\\=\\w","\\=cut",{endsWithParent:!0}),t,{className:"string",contains:s,variants:[{begin:"q[qwxr]?\\s*\\(",end:"\\)",relevance:5},{begin:"q[qwxr]?\\s*\\[",end:"\\]",relevance:5},{begin:"q[qwxr]?\\s*\\{",end:"\\}",relevance:5},{begin:"q[qwxr]?\\s*\\|",end:"\\|",relevance:5},{begin:"q[qwxr]?\\s*\\<",end:"\\>",relevance:5},{begin:"qw\\s+q",end:"q",relevance:5},{begin:"'",end:"'",contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"'},{begin:"`",end:"`",contains:[e.BACKSLASH_ESCAPE]},{begin:"{\\w+}",contains:[],relevance:0},{begin:"-?\\w+\\s*\\=\\>",contains:[],relevance:0}]},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\/\\/|"+e.RE_STARTERS_RE+"|\\b(split|return|print|reverse|grep)\\b)\\s*",keywords:"split return print reverse grep",relevance:0,contains:[e.HASH_COMMENT_MODE,{className:"regexp",begin:"(s|tr|y)/(\\\\.|[^/])*/(\\\\.|[^/])*/[a-z]*",relevance:10},{className:"regexp",begin:"(m|qr)?/",end:"/[a-z]*",contains:[e.BACKSLASH_ESCAPE],relevance:0}]},{className:"function",beginKeywords:"sub",end:"(\\s*\\(.*?\\))?[;{]",excludeEnd:!0,relevance:5,contains:[e.TITLE_MODE]},{begin:"-\\w\\b",relevance:0},{begin:"^__DATA__$",end:"^__END__$",subLanguage:"mojolicious",contains:[{begin:"^@@.*",end:"$",className:"comment"}]}];return a.contains=i,{aliases:["pl","pm"],lexemes:/[\w\.]+/,keywords:n,contains:t.contains=i}}function O(e){var n={begin:"\\$+[a-zA-Z_-ÿ][a-zA-Z0-9_-ÿ]*"},a={className:"meta",begin:/<\?(php)?|\?>/},t={className:"string",contains:[e.BACKSLASH_ESCAPE,a],variants:[{begin:'b"',end:'"'},{begin:"b'",end:"'"},e.inherit(e.APOS_STRING_MODE,{illegal:null}),e.inherit(e.QUOTE_STRING_MODE,{illegal:null})]},i={variants:[e.BINARY_NUMBER_MODE,e.C_NUMBER_MODE]};return{aliases:["php","php3","php4","php5","php6","php7"],case_insensitive:!0,keywords:"and include_once list abstract global private echo interface as static endswitch array null if endwhile or const for endforeach self var while isset public protected exit foreach throw elseif include __FILE__ empty require_once do xor return parent clone use __CLASS__ __LINE__ else break print eval new catch __METHOD__ case exception default die require __FUNCTION__ enddeclare final try switch continue endfor endif declare unset true false trait goto instanceof insteadof __DIR__ __NAMESPACE__ yield finally",contains:[e.HASH_COMMENT_MODE,e.COMMENT("//","$",{contains:[a]}),e.COMMENT("/\\*","\\*/",{contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.COMMENT("__halt_compiler.+?;",!1,{endsWithParent:!0,keywords:"__halt_compiler",lexemes:e.UNDERSCORE_IDENT_RE}),{className:"string",begin:/<<<['"]?\w+['"]?$/,end:/^\w+;?$/,contains:[e.BACKSLASH_ESCAPE,{className:"subst",variants:[{begin:/\$\w+/},{begin:/\{\$/,end:/\}/}]}]},a,{className:"keyword",begin:/\$this\b/},n,{begin:/(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/},{className:"function",beginKeywords:"function",end:/[;{]/,excludeEnd:!0,illegal:"\\$|\\[|%",contains:[e.UNDERSCORE_TITLE_MODE,{className:"params",begin:"\\(",end:"\\)",contains:["self",n,e.C_BLOCK_COMMENT_MODE,t,i]}]},{className:"class",beginKeywords:"class interface",end:"{",excludeEnd:!0,illegal:/[:\(\$"]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"namespace",end:";",illegal:/[\.']/,contains:[e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"use",end:";",contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"=>"},t,i]}}function M(e){var n="[ \\t\\f]*",a="("+n+"[:=]"+n+"|[ \\t\\f]+)",t="([^\\\\\\W:= \\t\\f\\n]|\\\\.)+",i="([^\\\\:= \\t\\f\\n]|\\\\.)+",s={end:a,relevance:0,starts:{className:"string",end:/$/,relevance:0,contains:[{begin:"\\\\\\n"}]}};return{case_insensitive:!0,illegal:/\S/,contains:[e.COMMENT("^\\s*[!#]","$"),{begin:t+a,returnBegin:!0,contains:[{className:"attr",begin:t,endsParent:!0,relevance:0}],starts:s},{begin:i+a,returnBegin:!0,relevance:0,contains:[{className:"meta",begin:i,endsParent:!0,relevance:0}],starts:s},{className:"attr",relevance:0,begin:i+n+"$"}]}}function C(e){var n=e.COMMENT("#","$"),a="([A-Za-z_]|::)(\\w|::)*",t=e.inherit(e.TITLE_MODE,{begin:a}),a={className:"variable",begin:"\\$"+a},i={className:"string",contains:[e.BACKSLASH_ESCAPE,a],variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/}]};return{aliases:["pp"],contains:[n,a,i,{beginKeywords:"class",end:"\\{|;",illegal:/=/,contains:[t,n]},{beginKeywords:"define",end:/\{/,contains:[{className:"section",begin:e.IDENT_RE,endsParent:!0}]},{begin:e.IDENT_RE+"\\s+\\{",returnBegin:!0,end:/\S/,contains:[{className:"keyword",begin:e.IDENT_RE},{begin:/\{/,end:/\}/,keywords:{keyword:"and case default else elsif false if in import enherits node or true undef unless main settings $string ",literal:"alias audit before loglevel noop require subscribe tag owner ensure group mode name|0 changes context force incl lens load_path onlyif provider returns root show_diff type_check en_address ip_address realname command environment hour monute month monthday special target weekday creates cwd ogoutput refresh refreshonly tries try_sleep umask backup checksum content ctime force ignore links mtime purge recurse recurselimit replace selinux_ignore_defaults selrange selrole seltype seluser source souirce_permissions sourceselect validate_cmd validate_replacement allowdupe attribute_membership auth_membership forcelocal gid ia_load_module members system host_aliases ip allowed_trunk_vlans description device_url duplex encapsulation etherchannel native_vlan speed principals allow_root auth_class auth_type authenticate_user k_of_n mechanisms rule session_owner shared options device fstype enable hasrestart directory present absent link atboot blockdevice device dump pass remounts poller_tag use message withpath adminfile allow_virtual allowcdrom category configfiles flavor install_options instance package_settings platform responsefile status uninstall_options vendor unless_system_user unless_uid binary control flags hasstatus manifest pattern restart running start stop allowdupe auths expiry gid groups home iterations key_membership keys managehome membership password password_max_age password_min_age profile_membership profiles project purge_ssh_keys role_membership roles salt shell uid baseurl cost descr enabled enablegroups exclude failovermethod gpgcheck gpgkey http_caching include includepkgs keepalive metadata_expire metalink mirrorlist priority protect proxy proxy_password proxy_username repo_gpgcheck s3_enabled skip_if_unavailable sslcacert sslclientcert sslclientkey sslverify mounted",built_in:"architecture augeasversion blockdevices boardmanufacturer boardproductname boardserialnumber cfkey dhcp_servers domain ec2_ ec2_userdata facterversion filesystems ldom fqdn gid hardwareisa hardwaremodel hostname id|0 interfaces ipaddress ipaddress_ ipaddress6 ipaddress6_ iphostnumber is_virtual kernel kernelmajversion kernelrelease kernelversion kernelrelease kernelversion lsbdistcodename lsbdistdescription lsbdistid lsbdistrelease lsbmajdistrelease lsbminordistrelease lsbrelease macaddress macaddress_ macosx_buildversion macosx_productname macosx_productversion macosx_productverson_major macosx_productversion_minor manufacturer memoryfree memorysize netmask metmask_ network_ operatingsystem operatingsystemmajrelease operatingsystemrelease osfamily partitions path physicalprocessorcount processor processorcount productname ps puppetversion rubysitedir rubyversion selinux selinux_config_mode selinux_config_policy selinux_current_mode selinux_current_mode selinux_enforced selinux_policyversion serialnumber sp_ sshdsakey sshecdsakey sshrsakey swapencrypted swapfree swapsize timezone type uniqueid uptime uptime_days uptime_hours uptime_seconds uuid virtual vlans xendomains zfs_version zonenae zones zpool_version"},relevance:0,contains:[i,n,{begin:"[a-zA-Z_]+\\s*=>",returnBegin:!0,end:"=>",contains:[{className:"attr",begin:e.IDENT_RE}]},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},a]}],relevance:0}]}}function x(e){var n={keyword:"and elif is global as in if from raise for except finally print import pass return exec else break not with class assert yield try while continue del or def lambda async await nonlocal|10",built_in:"Ellipsis NotImplemented",literal:"False None True"},a={className:"meta",begin:/^(>>>|\.\.\.) /},t={className:"subst",begin:/\{/,end:/\}/,keywords:n,illegal:/#/},i={begin:/\{\{/,relevance:0},i={className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:/(u|b)?r?'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(u|b)?r?"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(fr|rf|f)'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a,i,t]},{begin:/(fr|rf|f)"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a,i,t]},{begin:/(u|r|ur)'/,end:/'/,relevance:10},{begin:/(u|r|ur)"/,end:/"/,relevance:10},{begin:/(b|br)'/,end:/'/},{begin:/(b|br)"/,end:/"/},{begin:/(fr|rf|f)'/,end:/'/,contains:[e.BACKSLASH_ESCAPE,i,t]},{begin:/(fr|rf|f)"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,i,t]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},s={className:"number",relevance:0,variants:[{begin:e.BINARY_NUMBER_RE+"[lLjJ]?"},{begin:"\\b(0o[0-7]+)[lLjJ]?"},{begin:e.C_NUMBER_RE+"[lLjJ]?"}]},r={className:"params",begin:/\(/,end:/\)/,contains:["self",a,s,i,e.HASH_COMMENT_MODE]};return t.contains=[i,s,a],{aliases:["py","gyp","ipython"],keywords:n,illegal:/(<\/|->|\?)|=>/,contains:[a,s,{beginKeywords:"if",relevance:0},i,e.HASH_COMMENT_MODE,{variants:[{className:"function",beginKeywords:"def"},{className:"class",beginKeywords:"class"}],end:/:/,illegal:/[${=;\n,]/,contains:[e.UNDERSCORE_TITLE_MODE,r,{begin:/->/,endsWithParent:!0,keywords:"None"}]},{className:"meta",begin:/^[\t ]*@/,end:/$/},{begin:/\b(print|exec)\(/}]}}function S(e){var n="[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?",a={keyword:"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor",literal:"true false nil"},t={className:"doctag",begin:"@[A-Za-z]+"},i={begin:"#<",end:">"},t=[e.COMMENT("#","$",{contains:[t]}),e.COMMENT("^\\=begin","^\\=end",{contains:[t],relevance:10}),e.COMMENT("^__END__","\\n$")],s={className:"subst",begin:"#\\{",end:"}",keywords:a},r={className:"string",contains:[e.BACKSLASH_ESCAPE,s],variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/`/,end:/`/},{begin:"%[qQwWx]?\\(",end:"\\)"},{begin:"%[qQwWx]?\\[",end:"\\]"},{begin:"%[qQwWx]?{",end:"}"},{begin:"%[qQwWx]?<",end:">"},{begin:"%[qQwWx]?/",end:"/"},{begin:"%[qQwWx]?%",end:"%"},{begin:"%[qQwWx]?-",end:"-"},{begin:"%[qQwWx]?\\|",end:"\\|"},{begin:/\B\?(\\\d{1,3}|\\x[A-Fa-f0-9]{1,2}|\\u[A-Fa-f0-9]{4}|\\?\S)\b/},{begin:/<<[-~]?'?(\w+)(?:.|\n)*?\n\s*\1\b/,returnBegin:!0,contains:[{begin:/<<[-~]?'?/},{begin:/\w+/,endSameAsBegin:!0,contains:[e.BACKSLASH_ESCAPE,s]}]}]},l={className:"params",begin:"\\(",end:"\\)",endsParent:!0,keywords:a},r=[r,i,{className:"class",beginKeywords:"class module",end:"$|;",illegal:/=/,contains:[e.inherit(e.TITLE_MODE,{begin:"[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?"}),{begin:"<\\s*",contains:[{begin:"("+e.IDENT_RE+"::)?"+e.IDENT_RE}]}].concat(t)},{className:"function",beginKeywords:"def",end:"$|;",contains:[e.inherit(e.TITLE_MODE,{begin:n}),l].concat(t)},{begin:e.IDENT_RE+"::"},{className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"(\\!|\\?)?:",relevance:0},{className:"symbol",begin:":(?!\\s)",contains:[r,{begin:n}],relevance:0},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{className:"params",begin:/\|/,end:/\|/,keywords:a},{begin:"("+e.RE_STARTERS_RE+"|unless)\\s*",keywords:"unless",contains:[i,{className:"regexp",contains:[e.BACKSLASH_ESCAPE,s],illegal:/\n/,variants:[{begin:"/",end:"/[a-z]*"},{begin:"%r{",end:"}[a-z]*"},{begin:"%r\\(",end:"\\)[a-z]*"},{begin:"%r!",end:"![a-z]*"},{begin:"%r\\[",end:"\\][a-z]*"}]}].concat(t),relevance:0}].concat(t);return s.contains=r,l.contains=r,{aliases:["rb","gemspec","podspec","thor","irb"],keywords:a,illegal:/\/\*/,contains:t.concat([{begin:/^\s*=>/,starts:{end:"$",contains:r}},{className:"meta",begin:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+>|(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>)",starts:{end:"$",contains:r}}]).concat(r)}}function T(e){var n="([ui](8|16|32|64|128|size)|f(32|64))?",a="drop i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize f32 f64 str char bool Box Option Result String Vec Copy Send Sized Sync Drop Fn FnMut FnOnce ToOwned Clone Debug PartialEq PartialOrd Eq Ord AsRef AsMut Into From Default Iterator Extend IntoIterator DoubleEndedIterator ExactSizeIterator SliceConcatExt ToString assert! assert_eq! bitflags! bytes! cfg! col! concat! concat_idents! debug_assert! debug_assert_eq! env! panic! file! format! format_args! include_bin! include_str! line! local_data_key! module_path! option_env! print! println! select! stringify! try! unimplemented! unreachable! vec! write! writeln! macro_rules! assert_ne! debug_assert_ne!";return{aliases:["rs"],keywords:{keyword:"abstract as async await become box break const continue crate do dyn else enum extern false final fn for if impl in let loop macro match mod move mut override priv pub ref return self Self static struct super trait true try type typeof unsafe unsized use virtual where while yield",literal:"true false Some None Ok Err",built_in:a},lexemes:e.IDENT_RE+"!?",illegal:""}]}}function k(e){var n={className:"subst",variants:[{begin:"\\$[A-Za-z0-9_]+"},{begin:"\\${",end:"}"}]},n={className:"string",variants:[{begin:'"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:'"""',end:'"""',relevance:10},{begin:'[a-z]+"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE,n]},{className:"string",begin:'[a-z]+"""',end:'"""',contains:[n],relevance:10}]},a={className:"type",begin:"\\b[A-Z][A-Za-z0-9_]*",relevance:0},t={className:"title",begin:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,relevance:0};return{keywords:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,n,{className:"symbol",begin:"'\\w[\\w\\d_]*(?!')"},a,{className:"function",beginKeywords:"def",end:/[:={\[(\n;]/,excludeEnd:!0,contains:[t]},{className:"class",beginKeywords:"class object trait type",end:/[:={\[\n;]/,excludeEnd:!0,contains:[{beginKeywords:"extends with",relevance:10},{begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[a]},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[a]},t]},e.C_NUMBER_MODE,{className:"meta",begin:"@[A-Za-z]+"}]}}function A(e){return{aliases:["console"],contains:[{className:"meta",begin:"^\\s{0,3}[/\\w\\d\\[\\]()@-]*[>%$#]",starts:{end:"$",subLanguage:"bash"}}]}}function R(e){var n=e.COMMENT("--","$");return{case_insensitive:!0,illegal:/[<>{}*]/,contains:[{beginKeywords:"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke comment values with",end:/;/,endsWithParent:!0,lexemes:/[\w\.]+/,keywords:{keyword:"as abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias all allocate allow alter always analyze ancillary and anti any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound bucket buffer_cache buffer_pool build bulk by byte byteordermark bytes cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain explode export export_set extended extent external external_1 external_2 externally extract failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force foreign form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour hours http id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists keep keep_duplicates key keys kill language large last last_day last_insert_id last_value lateral lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minutes minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notnull notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second seconds section securefile security seed segment select self semi sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime table tables tablespace tablesample tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unnest unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace window with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek",literal:"true false null unknown",built_in:"array bigint binary bit blob bool boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text time timestamp tinyint varchar varchar2 varying void"},contains:[{className:"string",begin:"'",end:"'",contains:[{begin:"''"}]},{className:"string",begin:'"',end:'"',contains:[{begin:'""'}]},{className:"string",begin:"`",end:"`"},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,n,e.HASH_COMMENT_MODE]},e.C_BLOCK_COMMENT_MODE,n,e.HASH_COMMENT_MODE]}}function D(e){var n={keyword:"#available #colorLiteral #column #else #elseif #endif #file #fileLiteral #function #if #imageLiteral #line #selector #sourceLocation _ __COLUMN__ __FILE__ __FUNCTION__ __LINE__ Any as as! as? associatedtype associativity break case catch class continue convenience default defer deinit didSet do dynamic dynamicType else enum extension fallthrough false fileprivate final for func get guard if import in indirect infix init inout internal is lazy left let mutating nil none nonmutating open operator optional override postfix precedence prefix private protocol Protocol public repeat required rethrows return right self Self set static struct subscript super switch throw throws true try try! try? Type typealias unowned var weak where while willSet",literal:"true false nil",built_in:"abs advance alignof alignofValue anyGenerator assert assertionFailure bridgeFromObjectiveC bridgeFromObjectiveCUnconditional bridgeToObjectiveC bridgeToObjectiveCUnconditional c contains count countElements countLeadingZeros debugPrint debugPrintln distance dropFirst dropLast dump encodeBitsAsWords enumerate equal fatalError filter find getBridgedObjectiveCType getVaList indices insertionSort isBridgedToObjectiveC isBridgedVerbatimToObjectiveC isUniquelyReferenced isUniquelyReferencedNonObjC join lazy lexicographicalCompare map max maxElement min minElement numericCast overlaps partition posix precondition preconditionFailure print println quickSort readLine reduce reflect reinterpretCast reverse roundUpToAlignment sizeof sizeofValue sort split startsWith stride strideof strideofValue swap toString transcode underestimateCount unsafeAddressOf unsafeBitCast unsafeDowncast unsafeUnwrap unsafeReflect withExtendedLifetime withObjectAtPlusZero withUnsafePointer withUnsafePointerToObject withUnsafeMutablePointer withUnsafeMutablePointers withUnsafePointer withUnsafePointers withVaList zip"},a=e.COMMENT("/\\*","\\*/",{contains:["self"]}),t={className:"subst",begin:/\\\(/,end:"\\)",keywords:n,contains:[]},i={className:"string",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:/"""/,end:/"""/},{begin:/"/,end:/"/}]},s={className:"number",begin:"\\b([\\d_]+(\\.[\\deE_]+)?|0x[a-fA-F0-9_]+(\\.[a-fA-F0-9p_]+)?|0b[01_]+|0o[0-7_]+)\\b",relevance:0};return t.contains=[s],{keywords:n,contains:[i,e.C_LINE_COMMENT_MODE,a,{className:"type",begin:"\\b[A-Z][\\wÀ-ʸ']*[!?]"},{className:"type",begin:"\\b[A-Z][\\wÀ-ʸ']*",relevance:0},s,{className:"function",beginKeywords:"func",end:"{",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][0-9A-Za-z$_]*/}),{begin://},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:n,contains:["self",s,i,e.C_BLOCK_COMMENT_MODE,{begin:":"}],illegal:/["']/}],illegal:/\[|%/},{className:"class",beginKeywords:"struct protocol class extension enum",keywords:n,end:"\\{",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][\u00C0-\u02B80-9A-Za-z$_]*/})]},{className:"meta",begin:"(@discardableResult|@warn_unused_result|@exported|@lazy|@noescape|@NSCopying|@NSManaged|@objc|@objcMembers|@convention|@required|@noreturn|@IBAction|@IBDesignable|@IBInspectable|@IBOutlet|@infix|@prefix|@postfix|@autoclosure|@testable|@available|@nonobjc|@NSApplicationMain|@UIApplicationMain|@dynamicMemberLookup|@propertyWrapper)"},{beginKeywords:"import",end:/$/,contains:[e.C_LINE_COMMENT_MODE,a]}]}}function L(e){var n={className:"symbol",begin:"&[a-z]+;|&#[0-9]+;|&#x[a-f0-9]+;"},a={begin:"\\s",contains:[{className:"meta-keyword",begin:"#?[a-z_][a-z1-9_-]+",illegal:"\\n"}]},t=e.inherit(a,{begin:"\\(",end:"\\)"}),i=e.inherit(e.APOS_STRING_MODE,{className:"meta-string"}),s=e.inherit(e.QUOTE_STRING_MODE,{className:"meta-string"}),r={endsWithParent:!0,illegal:/`]+/}]}]}]};return{aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"],case_insensitive:!0,contains:[{className:"meta",begin:"",relevance:10,contains:[a,s,i,t,{begin:"\\[",end:"\\]",contains:[{className:"meta",begin:"",contains:[a,t,s,i]}]}]},e.COMMENT("\x3c!--","--\x3e",{relevance:10}),{begin:"<\\!\\[CDATA\\[",end:"\\]\\]>",relevance:10},n,{className:"meta",begin:/<\?xml/,end:/\?>/,relevance:10},{begin:/<\?(php)?/,end:/\?>/,subLanguage:"php",contains:[{begin:"/\\*",end:"\\*/",skip:!0},{begin:'b"',end:'"',skip:!0},{begin:"b'",end:"'",skip:!0},e.inherit(e.APOS_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0}),e.inherit(e.QUOTE_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0})]},{className:"tag",begin:")",end:">",keywords:{name:"style"},contains:[r],starts:{end:"",returnEnd:!0,subLanguage:["css","xml"]}},{className:"tag",begin:")",end:">",keywords:{name:"script"},contains:[r],starts:{end:"<\/script>",returnEnd:!0,subLanguage:["actionscript","javascript","handlebars","xml"]}},{className:"tag",begin:"",contains:[{className:"name",begin:/[^\/><\s]+/,relevance:0},r]}]}}function I(e){var n="true false yes no null",a={className:"string",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/\S+/}],contains:[e.BACKSLASH_ESCAPE,{className:"template-variable",variants:[{begin:"{{",end:"}}"},{begin:"%{",end:"}"}]}]};return{case_insensitive:!0,aliases:["yml","YAML","yaml"],contains:[{className:"attr",variants:[{begin:"\\w[\\w :\\/.-]*:(?=[ \t]|$)"},{begin:'"\\w[\\w :\\/.-]*":(?=[ \t]|$)'},{begin:"'\\w[\\w :\\/.-]*':(?=[ \t]|$)"}]},{className:"meta",begin:"^---s*$",relevance:10},{className:"string",begin:"[\\|>]([0-9]?[+-])?[ ]*\\n( *)[\\S ]+\\n(\\2[\\S ]+\\n?)*"},{begin:"<%[%=-]?",end:"[%-]?%>",subLanguage:"ruby",excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:"!"+e.UNDERSCORE_IDENT_RE},{className:"type",begin:"!!"+e.UNDERSCORE_IDENT_RE},{className:"meta",begin:"&"+e.UNDERSCORE_IDENT_RE+"$"},{className:"meta",begin:"\\*"+e.UNDERSCORE_IDENT_RE+"$"},{className:"bullet",begin:"\\-(?=[ ]|$)",relevance:0},e.HASH_COMMENT_MODE,{beginKeywords:n,keywords:{literal:n}},{className:"number",begin:e.C_NUMBER_RE+"\\b"},a]}}var l,o,c={};l=function(t){var a,g=[],s=Object.keys,w=Object.create(null),r=Object.create(null),O=!0,l=/^(no-?highlight|plain|text)$/i,o=/\blang(?:uage)?-([\w-]+)\b/i,c=/((^(<[^>]+>|\t|)+|(?:\n)))/gm,T="",k="Could not find the language '{}', did you forget to load/include a language module?",M={classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:void 0},d="of and for in not or if then".split(" ");function C(e){return e.replace(/&/g,"&").replace(//g,">")}function u(e){return e.nodeName.toLowerCase()}function m(e){return l.test(e)}function i(e){var n,a={},t=Array.prototype.slice.call(arguments,1);for(n in e)a[n]=e[n];return t.forEach(function(e){for(n in e)a[n]=e[n]}),a}function _(e){var i=[];return function e(n,a){for(var t=n.firstChild;t;t=t.nextSibling)3===t.nodeType?a+=t.nodeValue.length:1===t.nodeType&&(i.push({event:"start",offset:a,node:t}),a=e(t,a),u(t).match(/br|hr|img|input/)||i.push({event:"stop",offset:a,node:t}));return a}(e,0),i}function b(e,n,a){var t=0,i="",s=[];function r(){return e.length&&n.length?e[0].offset!==n[0].offset?e[0].offset"}function o(e){i+=""}function d(e){("start"===e.event?l:o)(e.node)}for(;e.length||n.length;){var c=r();if(i+=C(a.substring(t,c[0].offset)),t=c[0].offset,c===e){for(s.reverse().forEach(o);d(c.splice(0,1)[0]),(c=r())===e&&c.length&&c[0].offset===t;);s.reverse().forEach(l)}else"start"===c[0].event?s.push(c[0].node):s.pop(),d(c.splice(0,1)[0])}return i+C(a.substr(t))}function p(n){return n.variants&&!n.cached_variants&&(n.cached_variants=n.variants.map(function(e){return i(n,{variants:null},e)})),n.cached_variants||(function e(n){return!!n&&(n.endsWithParent||e(n.starts))}(n)?[i(n,{starts:n.starts?i(n.starts):null})]:Object.isFrozen(n)?[i(n)]:[n])}function f(e){if(a&&!e.langApiRestored){for(var n in e.langApiRestored=!0,a)e[n]&&(e[a[n]]=e[n]);(e.contains||[]).concat(e.variants||[]).forEach(f)}}function E(n,t){var i={};return"string"==typeof n?a("keyword",n):s(n).forEach(function(e){a(e,n[e])}),i;function a(a,e){(e=t?e.toLowerCase():e).split(" ").forEach(function(e){var n,e=e.split("|");i[e[0]]=[a,(n=e[0],(e=e[1])?Number(e):function(e){return-1!=d.indexOf(e.toLowerCase())}(n)?0:1)]})}}function A(t){function d(e){return e&&e.source||e}function g(e,n){return new RegExp(d(e),"m"+(t.case_insensitive?"i":"")+(n?"g":""))}function i(i){var s={},r=[],l={},a=1;function e(e,n){s[a]=e,r.push([e,n]),a+=new RegExp(n.toString()+"|").exec("").length-1+1}for(var n=0;n')+n+(a?"":T)}function m(){var e,n,a,t,i;if(!l.keywords)return C(c);for(a="",l.lexemesRe.lastIndex=e=0,n=l.lexemesRe.exec(c);n;)a+=C(c.substring(e,n.index)),t=l,i=n,i=r.case_insensitive?i[0].toLowerCase():i[0],(t=t.keywords.hasOwnProperty(i)&&t.keywords[i])?(d+=t[1],a+=s(t[0],C(n[0]))):a+=C(n[0]),e=l.lexemesRe.lastIndex,n=l.lexemesRe.exec(c);return a+C(c.substr(e))}function _(){o+=(null!=l.subLanguage?function(){var e="string"==typeof l.subLanguage;if(e&&!w[l.subLanguage])return C(c);var n=e?x(l.subLanguage,c,!0,N[l.subLanguage]):R(c,l.subLanguage.length?l.subLanguage:void 0);return 0")+'"');if("end"===n.type){e=f(n);if(null!=e)return e}return c+=a,a.length}var r=S(n);if(!r)throw console.error(k.replace("{}",n)),new Error('Unknown language: "'+n+'"');A(r);for(var l=u||r,N={},o="",e=l;e!==r;e=e.parent)e.className&&(o=s(e.className,"",!0)+o);var c="",d=0;try{for(var h,v,y=0;;){if(l.terminators.lastIndex=y,!(h=l.terminators.exec(t)))break;v=E(t.substring(y,h.index),h),y=h.index+v}for(E(t.substr(y)),e=l;e.parent;e=e.parent)e.className&&(o+=T);return{relevance:d,value:o,illegal:!1,language:n,top:l}}catch(e){if(e.message&&-1!==e.message.indexOf("Illegal"))return{illegal:!0,relevance:0,value:C(t)};if(O)return{relevance:0,value:C(t),language:n,top:l,errorRaised:e};throw e}}function R(a,e){e=e||M.languages||s(w);var t={relevance:0,value:C(a)},i=t;return e.filter(S).filter(y).forEach(function(e){var n=x(e,a,!1);n.language=e,n.relevance>i.relevance&&(i=n),n.relevance>t.relevance&&(i=t,t=n)}),i.language&&(t.second_best=i),t}function N(e){return M.tabReplace||M.useBR?e.replace(c,function(e,n){return M.useBR&&"\n"===e?"
":M.tabReplace?n.replace(/\t/g,M.tabReplace):""}):e}function h(e){var n,a,t,i,s=function(e){var n,a,t,i,s,r=e.className+" ";if(r+=e.parentNode?e.parentNode.className:"",a=o.exec(r))return(s=S(a[1]))||(console.warn(k.replace("{}",a[1])),console.warn("Falling back to no-highlight mode for this block.",e)),s?a[1]:"no-highlight";for(n=0,t=(r=r.split(/\s+/)).length;n/g,"\n"):a=e,i=a.textContent,n=s?x(s,i,!0):R(i),(a=_(a)).length&&((t=document.createElement("div")).innerHTML=n.value,n.value=b(a,_(t),i)),n.value=N(n.value),e.innerHTML=n.value,e.className=(a=e.className,t=s,i=n.language,t=t?r[t]:i,i=[a.trim()],a.match(/\bhljs\b/)||i.push("hljs"),-1===a.indexOf(t)&&i.push(t),i.join(" ").trim()),e.result={language:n.language,re:n.relevance},n.second_best&&(e.second_best={language:n.second_best.language,re:n.second_best.relevance}))}function n(){var e;n.called||(n.called=!0,e=document.querySelectorAll("pre code"),g.forEach.call(e,h))}var v={disableAutodetect:!0};function S(e){return e=(e||"").toLowerCase(),w[e]||w[r[e]]}function y(e){e=S(e);return e&&!e.disableAutodetect}return t.highlight=x,t.highlightAuto=R,t.fixMarkup=N,t.highlightBlock=h,t.configure=function(e){M=i(M,e)},t.initHighlighting=n,t.initHighlightingOnLoad=function(){window.addEventListener("DOMContentLoaded",n,!1),window.addEventListener("load",n,!1)},t.registerLanguage=function(n,e){var a;try{a=e(t)}catch(e){if(console.error("Language definition for '{}' could not be registered.".replace("{}",n)),!O)throw e;console.error(e),a=v}f(w[n]=a),a.rawDefinition=e.bind(null,t),a.aliases&&a.aliases.forEach(function(e){r[e]=n})},t.listLanguages=function(){return s(w)},t.getLanguage=S,t.requireLanguage=function(e){var n=S(e);if(n)return n;throw new Error("The '{}' language is required, but not loaded.".replace("{}",e))},t.autoDetection=y,t.inherit=i,t.debugMode=function(){O=!1},t.IDENT_RE="[a-zA-Z]\\w*",t.UNDERSCORE_IDENT_RE="[a-zA-Z_]\\w*",t.NUMBER_RE="\\b\\d+(\\.\\d+)?",t.C_NUMBER_RE="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",t.BINARY_NUMBER_RE="\\b(0b[01]+)",t.RE_STARTERS_RE="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",t.BACKSLASH_ESCAPE={begin:"\\\\[\\s\\S]",relevance:0},t.APOS_STRING_MODE={className:"string",begin:"'",end:"'",illegal:"\\n",contains:[t.BACKSLASH_ESCAPE]},t.QUOTE_STRING_MODE={className:"string",begin:'"',end:'"',illegal:"\\n",contains:[t.BACKSLASH_ESCAPE]},t.PHRASAL_WORDS_MODE={begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},t.COMMENT=function(e,n,a){e=t.inherit({className:"comment",begin:e,end:n,contains:[]},a||{});return e.contains.push(t.PHRASAL_WORDS_MODE),e.contains.push({className:"doctag",begin:"(?:TODO|FIXME|NOTE|BUG|XXX):",relevance:0}),e},t.C_LINE_COMMENT_MODE=t.COMMENT("//","$"),t.C_BLOCK_COMMENT_MODE=t.COMMENT("/\\*","\\*/"),t.HASH_COMMENT_MODE=t.COMMENT("#","$"),t.NUMBER_MODE={className:"number",begin:t.NUMBER_RE,relevance:0},t.C_NUMBER_MODE={className:"number",begin:t.C_NUMBER_RE,relevance:0},t.BINARY_NUMBER_MODE={className:"number",begin:t.BINARY_NUMBER_RE,relevance:0},t.CSS_NUMBER_MODE={className:"number",begin:t.NUMBER_RE+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",relevance:0},t.REGEXP_MODE={className:"regexp",begin:/\//,end:/\/[gimuy]*/,illegal:/\n/,contains:[t.BACKSLASH_ESCAPE,{begin:/\[/,end:/\]/,relevance:0,contains:[t.BACKSLASH_ESCAPE]}]},t.TITLE_MODE={className:"title",begin:t.IDENT_RE,relevance:0},t.UNDERSCORE_TITLE_MODE={className:"title",begin:t.UNDERSCORE_IDENT_RE,relevance:0},t.METHOD_GUARD={begin:"\\.\\s*"+t.UNDERSCORE_IDENT_RE,relevance:0},[t.BACKSLASH_ESCAPE,t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,t.PHRASAL_WORDS_MODE,t.COMMENT,t.C_LINE_COMMENT_MODE,t.C_BLOCK_COMMENT_MODE,t.HASH_COMMENT_MODE,t.NUMBER_MODE,t.C_NUMBER_MODE,t.BINARY_NUMBER_MODE,t.CSS_NUMBER_MODE,t.REGEXP_MODE,t.TITLE_MODE,t.UNDERSCORE_TITLE_MODE,t.METHOD_GUARD].forEach(function(e){!function n(a){Object.freeze(a);var t="function"==typeof a;Object.getOwnPropertyNames(a).forEach(function(e){!a.hasOwnProperty(e)||null===a[e]||"object"!=typeof a[e]&&"function"!=typeof a[e]||t&&("caller"===e||"callee"===e||"arguments"===e)||Object.isFrozen(a[e])||n(a[e])});return a}(e)}),t},o="object"==typeof window&&window||"object"==typeof self&&self,void 0===c||c.nodeType?o&&(o.hljs=l({}),"function"==typeof define&&define.amd&&define([],function(){return o.hljs})):l(c);!function(){"use strict";c.registerLanguage("asciidoc",e),c.registerLanguage("bash",n),c.registerLanguage("clojure",a),c.registerLanguage("cpp",t),c.registerLanguage("cs",i),c.registerLanguage("css",s),c.registerLanguage("diff",r),c.registerLanguage("dockerfile",d),c.registerLanguage("elixir",g),c.registerLanguage("go",u),c.registerLanguage("groovy",m),c.registerLanguage("haskell",_),c.registerLanguage("java",b),c.registerLanguage("javascript",p),c.registerLanguage("json",f),c.registerLanguage("kotlin",E),c.registerLanguage("markdown",N),c.registerLanguage("nix",h),c.registerLanguage("none",v),c.registerLanguage("objectivec",y),c.registerLanguage("perl",w),c.registerLanguage("php",O),c.registerLanguage("properties",M),c.registerLanguage("puppet",C),c.registerLanguage("python",x),c.registerLanguage("ruby",S),c.registerLanguage("rust",T),c.registerLanguage("scala",k),c.registerLanguage("shell",A),c.registerLanguage("sql",R),c.registerLanguage("swift",D),c.registerLanguage("xml",L),c.registerLanguage("yaml",I),[].slice.call(document.querySelectorAll("pre code.hljs")).forEach(function(e){c.highlightBlock(e)})}()}(); \ No newline at end of file diff --git a/docs/architecture-guidelines/1.0/index.html b/docs/architecture-guidelines/1.0/index.html new file mode 100644 index 00000000..3e78795f --- /dev/null +++ b/docs/architecture-guidelines/1.0/index.html @@ -0,0 +1,269 @@ + + + + + + Architecture Guidelines :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+

Architecture Guidelines

+
+

Architecture guidelines explained here.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/Home.html b/docs/cicdgen/1.0/Home.html new file mode 100644 index 00000000..89022def --- /dev/null +++ b/docs/cicdgen/1.0/Home.html @@ -0,0 +1,211 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==CICDGEN

+
+
+

cicdgen is a devonfw tool for generate all code/files related to CICD. It will include/modify into your project all files that the project needs run a Jenkins cicd pipeline, to create a docker image based on your project, etc. It’s based on angular schematics, so you can add it as a dependency into your project and generate the code using ng generate. In addition, it has its own CLI for those projects that are not angular based.

+
+
+

What is angular schematics?

+
+
+

Schematics are generators that transform an existing filesystem. They can create files, refactor existing files, or move files from one place to another.

+
+
+

What distinguishes Schematics from other generators, such as Yeoman or Yarn Create, is that schematics are purely descriptive; no changes are applied to the actual filesystem until everything is ready to be committed. There is no side effect, by design, in Schematics.

+
+
+
+
+

cicdgen CLI

+
+
+

In order to know more about how to use the cicdgen CLI, you can check the CLI page

+
+
+
+
+

cicdgen Schematics

+
+
+

In order to know more about how to use the cicdgen schematics, you can check the schematics page

+
+
+
+
+

Usage example

+
+
+

A specific page about how to use cicdgen is also available.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/_images/images/example/chrome-stable.png b/docs/cicdgen/1.0/_images/images/example/chrome-stable.png new file mode 100644 index 00000000..68f662c5 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/chrome-stable.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/cicdgen-command.png b/docs/cicdgen/1.0/_images/images/example/cicdgen-command.png new file mode 100644 index 00000000..31301861 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/cicdgen-command.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/docker-global.png b/docs/cicdgen/1.0/_images/images/example/docker-global.png new file mode 100644 index 00000000..70c99574 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/docker-global.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/gitlab-2.png b/docs/cicdgen/1.0/_images/images/example/gitlab-2.png new file mode 100644 index 00000000..2cf98212 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/gitlab-2.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/gitlab-webhook.png b/docs/cicdgen/1.0/_images/images/example/gitlab-webhook.png new file mode 100644 index 00000000..5d12afb5 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/gitlab-webhook.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/gitlab.png b/docs/cicdgen/1.0/_images/images/example/gitlab.png new file mode 100644 index 00000000..bcf569f3 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/gitlab.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/global-settings-id.png b/docs/cicdgen/1.0/_images/images/example/global-settings-id.png new file mode 100644 index 00000000..75aa37b9 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/global-settings-id.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/help-1.png b/docs/cicdgen/1.0/_images/images/example/help-1.png new file mode 100644 index 00000000..eb387525 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/help-1.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/help-2.png b/docs/cicdgen/1.0/_images/images/example/help-2.png new file mode 100644 index 00000000..56edeb48 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/help-2.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/maven-installation.png b/docs/cicdgen/1.0/_images/images/example/maven-installation.png new file mode 100644 index 00000000..401d01cd Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/maven-installation.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/new-pipeline.png b/docs/cicdgen/1.0/_images/images/example/new-pipeline.png new file mode 100644 index 00000000..5c0d365e Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/new-pipeline.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/push-code.png b/docs/cicdgen/1.0/_images/images/example/push-code.png new file mode 100644 index 00000000..1a816516 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/push-code.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/repository-id.png b/docs/cicdgen/1.0/_images/images/example/repository-id.png new file mode 100644 index 00000000..6a99f8b7 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/repository-id.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/sonar-env.png b/docs/cicdgen/1.0/_images/images/example/sonar-env.png new file mode 100644 index 00000000..47d4ba81 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/sonar-env.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/sonar-tool.png b/docs/cicdgen/1.0/_images/images/example/sonar-tool.png new file mode 100644 index 00000000..ad15e518 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/sonar-tool.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/teams-1.png b/docs/cicdgen/1.0/_images/images/example/teams-1.png new file mode 100644 index 00000000..afafd9b0 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/teams-1.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/teams-2.png b/docs/cicdgen/1.0/_images/images/example/teams-2.png new file mode 100644 index 00000000..8636fb07 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/teams-2.png differ diff --git a/docs/cicdgen/1.0/_images/images/example/teams-3.png b/docs/cicdgen/1.0/_images/images/example/teams-3.png new file mode 100644 index 00000000..8de6fec2 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/example/teams-3.png differ diff --git a/docs/cicdgen/1.0/_images/images/merge-combine-vscode.png b/docs/cicdgen/1.0/_images/images/merge-combine-vscode.png new file mode 100644 index 00000000..46871bb7 Binary files /dev/null and b/docs/cicdgen/1.0/_images/images/merge-combine-vscode.png differ diff --git a/docs/cicdgen/1.0/cicdgen-cli.html b/docs/cicdgen/1.0/cicdgen-cli.html new file mode 100644 index 00000000..23714ca1 --- /dev/null +++ b/docs/cicdgen/1.0/cicdgen-cli.html @@ -0,0 +1,377 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

CICDGEN CLI

+
+
+

cicdgen is a command line interface that helps you with some CICD in a devonfw project. At this moment we can only generate files related to CICD in a project but we plan to add more functionality in a future.

+
+
+

Installation

+
+
+
$ npm i -g @devonfw/cicdgen
+
+
+
+
+

Usage

+
+

Global arguments

+
+
    +
  • +

    --version

    +
    +
    +
    Prints the cicdgen version number
    +
    +
    +
  • +
  • +

    --help

    +
    +
    +
    Shows the usage of the command
    +
    +
    +
  • +
+
+
+
+

Commands

+
+
Generate.
+
+

This command wraps the usage of angular schematics CLI. With this we generate files in a easy way and also print a better help about usage.

+
+
+

Available schematics that generate the code:

+
+
+ +
+
+
+
+

Examples

+
+
    +
  • +

    Generate all CICD files related to a devon4j project

    +
    +
    +
    $ cicdgen generate devon4j
    +
    +
    +
  • +
  • +

    Generate all CICD files related to a devon4ng project with docker deployment.

    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --docker --registryurl docker-registry-devon.s2-eu.capgemini.com
    +
    +
    +
  • +
  • +

    Generate all CICD files related to a devon4node project with OpenShift deployment.

    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --openshift --registryurl docker-registry-devon.s2-eu.capgemini.com --ocname default --ocn devonfw
    +
    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/cicdgen-schematics.html b/docs/cicdgen/1.0/cicdgen-schematics.html new file mode 100644 index 00000000..b196bc2d --- /dev/null +++ b/docs/cicdgen/1.0/cicdgen-schematics.html @@ -0,0 +1,338 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

CICDGEN SCHEMATICS

+
+
+

We use angular schematics to create and update an existing devonfw project in order to adapt it to a CICD environment. All schematics are prepared to work with Production Line, a Capgemini CICD platform, but it can also work in other environment which have the following tools:

+
+
+
    +
  • +

    Jenkins

    +
  • +
  • +

    GitLab

    +
  • +
  • +

    Nexus 3

    +
  • +
  • +

    SonarQube

    +
  • +
+
+
+

The list of available schematics are:

+
+
+ +
+
+

How to run the schematics

+
+

You can run the schematics using the schematics CLI provided by the angular team, but the easiest way to run it is using the cicdgen CLI which is a wrapper for the schematics CLI in order to use it in a easy way.

+
+
+

To generate files you only need to run the command

+
+
+
+
$ cicdgen generate <schematic-name> [arguments]
+
+
+
+

<schematic-name> is the name of the schematic that you want to execute.

+
+
+

You can find all information about arguments in the schematic section.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/devon4j-schematic.html b/docs/cicdgen/1.0/devon4j-schematic.html new file mode 100644 index 00000000..33a2036b --- /dev/null +++ b/docs/cicdgen/1.0/devon4j-schematic.html @@ -0,0 +1,581 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4j schematic

+
+
+

With the cicdgen generate devon4j command you will be able to generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

devon4j schematic arguments

+
+

When you execute the cicdgen generate devon4j command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for docker (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+

Devon4ng generated files

+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    .gitignore

    +
    +

    Defines all files that git will ignore. e.g: compiled files, IDE configurations. It will download the content from: https://gitignore.io/api/java,maven,eclipse,intellij,intellij+all,intellij+iml,visualstudiocode

    +
    +
  • +
  • +

    pom.xml

    +
    +

    The pom.xml is modified in order to add, if needed, the distributionManagement.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        Java 11 installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker to deploy:

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift to deploy:

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options to the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: Load some custom tools that can not be loaded in the tools section. Also set some variables depending on the git branch which you are executing. Also, we set properly the version number in all pom files. It means that if your branch is develop, your version should end with the word -SNAPSHOT, in order case, if -SNAPSHOT is present it will be removed.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your java project.

          +
        • +
        • +

          Unit Tests: execute the mvn test command.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Deliver application into Nexus: build the project and send all bundle files to Nexsus3.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4j Docker generated files

+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker --registryurl docker-registry-test.s2-eu.capgemini.com. +
+
+
+

Files

+
+
    +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a maven image in order to compile the source code, then it will use a java image to run the application. With the multi-stage build we keep the final image as clean as possible.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes the compiled war from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/devon4j/devon4j-schematic.html b/docs/cicdgen/1.0/devon4j/devon4j-schematic.html new file mode 100644 index 00000000..428e7e98 --- /dev/null +++ b/docs/cicdgen/1.0/devon4j/devon4j-schematic.html @@ -0,0 +1,557 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Devon4j schematic

+
+
+

With the cicdgen generate devon4j command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

Devon4j schematic arguments

+
+

When you execute the cicdgen generate devon4j command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this paramter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --plurl

    +
    +

    Url of Production Line. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this paramter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for docker (same as --docker)

    +
    +
  • +
  • +

    --ocurl

    +
    +

    OpenShift cluster url where the application will be builded and deployed.

    +
    +
  • +
  • +

    --ocn

    +
    +

    Openshift cluster namespace

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
+
+
+
+

Devon4ng generated files

+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    .gitignore

    +
    +

    Defines all files that git will ignore. e.g: compiled files, IDE configurations.

    +
    +
  • +
  • +

    pom.xml

    +
    +

    The pom.xml is modified in order to add the distributionManagement.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        Java installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker to deploy:

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift to deploy:

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agente where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options to the pipeline. By default, we add a build discarded to delete old artifacts/buils of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Setup pipeline: We set some variables depending on the git branch which you are executing. Also, we set properly the version number in all pom files. It means that if your branch is develop, your version should end with the word -SNAPSHOT, in order case, if -SNAPSHOT is present it will be removed.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your java project.

          +
        • +
        • +

          Unit Tests: execute the mvn test command.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Deliver application into Nexus: build the project and send all bundle files to Nexsu3.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

Devon4j Docker generated files

+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --plurl is also required. It will be used to upload the images to the Nexus3 inside Production Line. Example: if your PL url is test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4ng --groupid com.devonfw --docker --plurl test.s2-eu.capgemini.com, and it will use docker-registry-test.s2-eu.capgemini.com as docker registry. +
+
+
+

Files

+
+
    +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a maven image in order to compile the source code, then it will use a java image to run the application. With the multi-stage build we keep the final image as clean as possible.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes the compiled war from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/devon4net-schematic.html b/docs/cicdgen/1.0/devon4net-schematic.html new file mode 100644 index 00000000..b3866af9 --- /dev/null +++ b/docs/cicdgen/1.0/devon4net-schematic.html @@ -0,0 +1,597 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4net schematic

+
+
+

With the cicdgen generate devon4net command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

devon4net schematic arguments

+
+

When you execute the cicdgen generate devon4net command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --appname

    +
    +

    The name of your devon4net application.

    +
    +
  • +
  • +

    --appversion

    +
    +

    The initial version of your devon4net application

    +
    +
  • +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the xref:`jenkinsfile-teams`[Jenkinsfile].

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+

devon4net generated files

+
+

When you execute the generate devon4net command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        dotnet core installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all dependencies need to build/run your dotnet project.

          +
        • +
        • +

          Execute dotnet tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4net Docker generated files

+
+

When you generate the files for devon4net you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4net --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for your project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/devon4net.html b/docs/cicdgen/1.0/devon4net.html new file mode 100644 index 00000000..91666050 --- /dev/null +++ b/docs/cicdgen/1.0/devon4net.html @@ -0,0 +1,578 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4net schematic

+
+
+

With the cicdgen generate devon4net command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

devon4net schematic arguments

+
+

When you execute the cicdgen generate devon4net command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --appname

    +
    +

    The name of your devon4net application.

    +
    +
  • +
  • +

    --appversion

    +
    +

    The initial version of your devon4net application

    +
    +
  • +
  • +

    --docker

    +
    +

    The type of this paramter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this paramter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your Openshift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    Openshift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
+
+
+
+

devon4net generated files

+
+

When you execute the generate devon4net command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        dotnet core installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agente where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/buils of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all dependencies need to build/run your dotnet project.

          +
        • +
        • +

          Execute dotnet tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4net Docker generated files

+
+

When you generate the files for devon4net you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4net --groupid com.devonfw --docker --registryurl docker-registry-test.s2-eu.capgemini.com. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for your project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/devon4ng-schematic.html b/docs/cicdgen/1.0/devon4ng-schematic.html new file mode 100644 index 00000000..22e4d5a3 --- /dev/null +++ b/docs/cicdgen/1.0/devon4ng-schematic.html @@ -0,0 +1,615 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4ng schematic

+
+
+

With the cicdgen generate devon4ng command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

devon4ng schematic arguments

+
+

When you execute the cicdgen generate devon4ng command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+

devon4ng generated files

+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    angular.json

    +
    +

    The angular.json is modified in order to change the compiled files destination folder. Now, when you make a build of your project, the compiled files will be generated into dist folder instead of dist/<project-name> folder.

    +
    +
  • +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for test the application using Chrome Headless instead of a regular chrome. This script is called test:ci.

    +
    +
  • +
  • +

    karma.conf.js

    +
    +

    The karma.conf.js is also modified in order to add the Chrome Headless as a browser to execute test. The coverage output folder is change to ./coverage instead of ./coverage/<project-name>

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your angular project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute Angular tests: execute the angular test in a Chrome Headless.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4ng Docker generated files

+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a node image in order to compile the source code, then it will use a nginx image as a web server for our devon4ng application. With the multi-stage build we avoid everything related to node.js in our final image, where we only have a nginx with our application compiled.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files and the nginx.conf from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/devon4ng/devon4ng-schematic.html b/docs/cicdgen/1.0/devon4ng/devon4ng-schematic.html new file mode 100644 index 00000000..4dcf2734 --- /dev/null +++ b/docs/cicdgen/1.0/devon4ng/devon4ng-schematic.html @@ -0,0 +1,590 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Devon4ng schematic

+
+
+

With the cicdgen generate devon4ng command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

Devon4ng schematic arguments

+
+

When you execute the cicdgen generate devon4ng command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this paramter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --plurl

    +
    +

    Url of Production Line. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this paramter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocurl

    +
    +

    OpenShift cluster url where the application will be builded and deployed.

    +
    +
  • +
  • +

    --ocn

    +
    +

    Openshift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
+
+
+
+

Devon4ng generated files

+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    angular.json

    +
    +

    The angular.json is modified in order to change the compiled files destination folder. Now, when you make a build of your project, the compiled files will be generated into dist folder instead of dist/<project-name> folder.

    +
    +
  • +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for test the application using Chrome Headless instead of a regular chrome. This script is called test:ci.

    +
    +
  • +
  • +

    karma.conf.js

    +
    +

    The karma.conf.js is also modified in order to add the Chrome Headless as a browser to execute test. The coverage output folder is change to ./coverage instead of ./coverage/<project-name>

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agente where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/buils of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your angular project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute Angular tests: execute the angular test in a Chrome Headless.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

Devon4ng Docker generated files

+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --plurl is also required. It will be used to upload the images to the Nexus3 inside Production Line. Example: if your PL url is test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4ng --groupid com.devonfw --docker --plurl test.s2-eu.capgemini.com, and it will use docker-registry-test.s2-eu.capgemini.com as docker registry. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a node image in order to compile the source code, then it will use a nginx image as a web server for our devon4ng application. With the multi-stage build we avoid everything related to node.js in our final image, where we only have a nginx with our application compiled.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files and the nginx.conf from Jenkins to the image.

    +
    +
  • +
  • +

    nginx.conf

    +
    +

    Configuration file for our nginx server. It defines the root folder of our application where docker copy the files to. Also it defines a fallback route to the index as described in the angular deployment guide in oder to enable the angular routes.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/devon4node-schematic.html b/docs/cicdgen/1.0/devon4node-schematic.html new file mode 100644 index 00000000..b8aa2a1b --- /dev/null +++ b/docs/cicdgen/1.0/devon4node-schematic.html @@ -0,0 +1,603 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node schematic

+
+
+

With the cicdgen generate devon4node command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

devon4node schematic arguments

+
+

When you execute the cicdgen generate devon4node command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+

devon4node generated files

+
+

When you execute the generate devon4node command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for run the linter and generate the json report. This script is called lint:ci.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your node project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4node Docker generated files

+
+

When you generate the files for a devon4node you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is installs all dependencies in ordre to build the project and then remove all devDependencies in order to keep only the production dependencies.

    +
    +
  • +
  • +

    .dockerignore.ci

    +
    +

    Another .dockerignore. The purpose of this one is to define de file exclusions in your CI pipeline.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/devon4node/devon4node-schematic.html b/docs/cicdgen/1.0/devon4node/devon4node-schematic.html new file mode 100644 index 00000000..727b93a2 --- /dev/null +++ b/docs/cicdgen/1.0/devon4node/devon4node-schematic.html @@ -0,0 +1,578 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Devon4node schematic

+
+
+

With the cicdgen generate devon4node command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

Devon4node schematic arguments

+
+

When you execute the cicdgen generate devon4node command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this paramter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --plurl

    +
    +

    Url of Production Line. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this paramter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocurl

    +
    +

    OpenShift cluster url where the application will be builded and deployed.

    +
    +
  • +
  • +

    --ocn

    +
    +

    Openshift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
+
+
+
+

Devon4node generated files

+
+

When you execute the generate devon4node command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script to run the application in a docker container. It is necessary because we change a little bit the folder structure when we put all files in a docker image, so the script start:prod does not work.

    +
    +
  • +
  • +

    .gitignore

    +
    +

    Defines all files that git will ignore. e.g: compiled files, IDE configurations.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agente where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/buils of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your node project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

Devon4node Docker generated files

+
+

When you generate the files for a devon4node you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --plurl is also required. It will be used to upload the images to the Nexus3 inside Production Line. Example: if your PL url is test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker --plurl test.s2-eu.capgemini.com, and it will use docker-registry-test.s2-eu.capgemini.com as docker registry. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is installs all dependencies in ordre to build the project and then remove all devDependencies in order to keep only the production dependencies.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/index.html b/docs/cicdgen/1.0/index.html new file mode 100644 index 00000000..5feef2b4 --- /dev/null +++ b/docs/cicdgen/1.0/index.html @@ -0,0 +1,315 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==CICDGEN

+
+
+

cicdgen is a devonfw tool for generate all code/files related to CICD. It will include/modify into your project all files that the project needs run a Jenkins cicd pipeline, to create a docker image based on your project, etc. It’s based on angular schematics, so you can add it as a dependency into your project and generate the code using ng generate. In addition, it has its own CLI for those projects that are not angular based.

+
+
+

What is angular schematics?

+
+
+

Schematics are generators that transform an existing filesystem. They can create files, refactor existing files, or move files from one place to another.

+
+
+

What distinguishes Schematics from other generators, such as Yeoman or Yarn Create, is that schematics are purely descriptive; no changes are applied to the actual filesystem until everything is ready to be committed. There is no side effect, by design, in Schematics.

+
+
+
+
+

cicdgen CLI

+
+
+

In order to know more about how to use the cicdgen CLI, you can check the CLI page

+
+
+
+
+

cicdgen Schematics

+
+
+

In order to know more about how to use the cicdgen schematics, you can check the schematics page

+
+
+
+
+

Usage example

+
+
+

A specific page about how to use cicdgen is also available.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/master-cicdgen.html b/docs/cicdgen/1.0/master-cicdgen.html new file mode 100644 index 00000000..e7824acf --- /dev/null +++ b/docs/cicdgen/1.0/master-cicdgen.html @@ -0,0 +1,2059 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==cicdgen

+
+
+

Unresolved include directive in modules/ROOT/pages/master-cicdgen.adoc - include::Home.adoc[]

+
+
+

cicdgen CLI

+
+ +
+
CICDGEN CLI
+
+

cicdgen is a command line interface that helps you with some CICD in a devonfw project. At this moment we can only generate files related to CICD in a project but we plan to add more functionality in a future.

+
+
+
Installation
+
+
+
$ npm i -g @devonfw/cicdgen
+
+
+
+
+
Usage
+
+Global arguments +
+
    +
  • +

    --version

    +
    +
    +
    Prints the cicdgen version number
    +
    +
    +
  • +
  • +

    --help

    +
    +
    +
    Shows the usage of the command
    +
    +
    +
  • +
+
+
+
+Commands +
+Generate. +
+

This command wraps the usage of angular schematics CLI. With this we generate files in a easy way and also print a better help about usage.

+
+
+

Available schematics that generate the code:

+
+
+ +
+
+
+
+Examples +
+
    +
  • +

    Generate all CICD files related to a devon4j project

    +
    +
    +
    $ cicdgen generate devon4j
    +
    +
    +
  • +
  • +

    Generate all CICD files related to a devon4ng project with docker deployment.

    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --docker --registryurl docker-registry-devon.s2-eu.capgemini.com
    +
    +
    +
  • +
  • +

    Generate all CICD files related to a devon4node project with OpenShift deployment.

    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --openshift --registryurl docker-registry-devon.s2-eu.capgemini.com --ocname default --ocn devonfw
    +
    +
    +
  • +
+
+ +
+
+
+
+
cicdgen usage example
+
+

In this example we are going to show how to use cicdgen step by step in a devon4ng project.

+
+
+
    +
  1. +

    Install cicdgen

    +
    +

    cicdgen is already included in the devonfw distribution, but if you want to use it outside the devonfw console you can execute the following command:

    +
    +
    +
    +
    $ npm i -g cicdgen
    +
    +
    +
  2. +
  3. +

    Generate a new devon4ng project using devonfw ide.

    +
    +

    Inside a devonfw ide distribution execute the command (devon ng create <app-name>):

    +
    +
    +
    +
    $ devon ng create devon4ng
    +
    +
    +
  4. +
  5. +

    Execute cicdgen generate command

    +
    +

    As we want to send notifications to MS Teams, we need to create the connector first:

    +
    +
    +
    +
    +
      +
    • +

      Go to a channel in teams and click at the connectors button. Then click at the jenkins configure button.

      +
      +

      teams 1

      +
      +
    • +
    • +

      Put a name for the connector

      +
      +

      teams 2

      +
      +
    • +
    • +

      Copy the name and the Webhook URL, we will use it later.

      +
      +

      teams 3

      +
      +
    • +
    +
    +
    +
    +
    +

    With the values that we get in the previous steps, we will execute the cicdgen command inside the project folder. If you have any doubt you can use the help.

    +
    +
    +

    help 1

    +
    +
    +

    help 2

    +
    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --docker --dockerurl tpc://127.0.0.1:2376 `--registryurl docker-registry-devon.s2-eu.capgemini.com --teams --teamsname devon4ng --teamsurl https://outlook.office.com/webhook/...`
    +
    +
    +
    +

    cicdgen command

    +
    +
  6. +
  7. +

    Create a git repository and upload the code

    +
    +

    gitlab

    +
    +
    +

    gitlab 2

    +
    +
    +
    +
    $ git remote add origin https://devon.s2-eu.capgemini.com/gitlab/darrodri/devon4ng.git
    +$ git push -u origin master
    +
    +
    +
    +

    push code

    +
    +
    +

    As you can see, no git init or git commit is required, cicdgen do it for you.

    +
    +
  8. +
  9. +

    Create a multibranch-pipeline in Jenkins

    +
    +

    new pipeline

    +
    +
    +

    When you push the save button, it will download the repository and execute the pipeline defined in the Jenkinsfile. If you get any problem, check the environment variables defined in the Jenkinsfile. Here we show all variables related with Jenkins:

    +
    +
    +
    +
    +
      +
    • +

      chrome

      +
      +

      chrome stable

      +
      +
    • +
    • +

      sonarTool

      +
      +

      sonar tool

      +
      +
    • +
    • +

      sonarEnv

      +
      +

      sonar env

      +
      +
    • +
    • +

      repositoryId

      +
      +

      repository id

      +
      +
    • +
    • +

      globalSettingsId

      +
      +

      global settings id

      +
      +
    • +
    • +

      mavenInstallation

      +
      +

      maven installation

      +
      +
    • +
    • +

      dockerTool

      +
      +

      docker global

      +
      +
    • +
    +
    +
    +
    +
  10. +
  11. +

    Add a webhook in GitLab

    +
    +

    In order to run the pipeline every time that you push code to GitLab, you need to configure a webhook in your repository.

    +
    +
    +

    gitlab webhook

    +
    +
  12. +
+
+
+

Now your project is ready to work following a CICD strategy.

+
+
+

The last thing to take into account is the branch naming. We prepare the pipeline in order to work following the git-flow strategy. So all stages of the pipeline will be executed for the branches: develop, release/*, master. For the branches: feature/*, hotfix/*, bugfix/* only the steps related to unit testing will be executed.

+
+
+
+
+
+

cicdgen Schematics

+
+ +
+
CICDGEN SCHEMATICS
+
+

We use angular schematics to create and update an existing devonfw project in order to adapt it to a CICD environment. All schematics are prepared to work with Production Line, a Capgemini CICD platform, but it can also work in other environment which have the following tools:

+
+
+
    +
  • +

    Jenkins

    +
  • +
  • +

    GitLab

    +
  • +
  • +

    Nexus 3

    +
  • +
  • +

    SonarQube

    +
  • +
+
+
+

The list of available schematics are:

+
+
+ +
+
+
How to run the schematics
+
+

You can run the schematics using the schematics CLI provided by the angular team, but the easiest way to run it is using the cicdgen CLI which is a wrapper for the schematics CLI in order to use it in a easy way.

+
+
+

To generate files you only need to run the command

+
+
+
+
$ cicdgen generate <schematic-name> [arguments]
+
+
+
+

<schematic-name> is the name of the schematic that you want to execute.

+
+
+

You can find all information about arguments in the schematic section.

+
+ +
+
+
Merge Strategies
+
+

When you execute cicdgen in a project, is possible that you already have some files that cicdgen will generate. Until version 1.5 the behaviour in these cases was to throw an error and not create/modify any file. Since version 1.6 you can choose what to do in case of conflict. In this page we will explain who to choose one merge strategy and how it works.

+
+
+Choose a merge strategy +
+

To choose a merge strategy, you must pass to cicdgen the merge parameter followed by the name of the strategy. The strategies available are: error, keep, override, combine.

+
+
+

Example:

+
+
+
+
$ cicdgen generate devon4j --merge keep
+
+
+
+
+Merge strategies +
+
    +
  • +

    error: The error strategy is the same as until version 1.5, throwing an error and do not create/modify any file. This is the default value, if you do not pass the merge parameter this value will be taken.

    +
  • +
  • +

    keep: The keep strategy will keep the actual content of your files in case of conflict. If there is no conflict, the file will be created with the new content.

    +
  • +
  • +

    override: The override strategy will override your current files, without throwing any error, and create a new ones with the new content. If there is no conflict, the file will be created with the new content.

    +
  • +
  • +

    combine: The combine strategy will create a new file combining the current content with the new content. In order to combine both files, it will apply a diff algorithm and it will show the conflicts in the same way that git does. If there is no conflict, the file will be created with the new content.

    +
    +

    By resolving the conflicts in the same way as git, you can use the same tools in order to solve them. For example, you can use VSCode:

    +
    +
    +

    merge combine vscode

    +
    +
  • +
+
+
+

Examples:

+
+
+

keep +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

override +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

combine +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
<<<<<<< HEAD
+Line 1
+=======
+Line 5
+>>>>>>> new_content
+Line 2
+Line 3
+Line 4
+
+
+ +
+
+
+
+
devon4j schematic
+
+

With the cicdgen generate devon4j command you will be able to generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4j schematic arguments
+
+

When you execute the cicdgen generate devon4j command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for docker (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
Devon4ng generated files
+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    .gitignore

    +
    +

    Defines all files that git will ignore. e.g: compiled files, IDE configurations. It will download the content from: https://gitignore.io/api/java,maven,eclipse,intellij,intellij+all,intellij+iml,visualstudiocode

    +
    +
  • +
  • +

    pom.xml

    +
    +

    The pom.xml is modified in order to add, if needed, the distributionManagement.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        Java 11 installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker to deploy:

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift to deploy:

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options to the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: Load some custom tools that can not be loaded in the tools section. Also set some variables depending on the git branch which you are executing. Also, we set properly the version number in all pom files. It means that if your branch is develop, your version should end with the word -SNAPSHOT, in order case, if -SNAPSHOT is present it will be removed.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your java project.

          +
        • +
        • +

          Unit Tests: execute the mvn test command.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Deliver application into Nexus: build the project and send all bundle files to Nexsus3.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4j Docker generated files
+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker --registryurl docker-registry-test.s2-eu.capgemini.com. +
+
+
+Files +
+
    +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a maven image in order to compile the source code, then it will use a java image to run the application. With the multi-stage build we keep the final image as clean as possible.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes the compiled war from Jenkins to the image.

    +
    +
  • +
+
+ +
+
+
+
+
devon4ng schematic
+
+

With the cicdgen generate devon4ng command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4ng schematic arguments
+
+

When you execute the cicdgen generate devon4ng command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
devon4ng generated files
+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    angular.json

    +
    +

    The angular.json is modified in order to change the compiled files destination folder. Now, when you make a build of your project, the compiled files will be generated into dist folder instead of dist/<project-name> folder.

    +
    +
  • +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for test the application using Chrome Headless instead of a regular chrome. This script is called test:ci.

    +
    +
  • +
  • +

    karma.conf.js

    +
    +

    The karma.conf.js is also modified in order to add the Chrome Headless as a browser to execute test. The coverage output folder is change to ./coverage instead of ./coverage/<project-name>

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your angular project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute Angular tests: execute the angular test in a Chrome Headless.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4ng Docker generated files
+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+Files +
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a node image in order to compile the source code, then it will use a nginx image as a web server for our devon4ng application. With the multi-stage build we avoid everything related to node.js in our final image, where we only have a nginx with our application compiled.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files and the nginx.conf from Jenkins to the image.

    +
    +
  • +
+
+ +
+
+
+
+
devon4net schematic
+
+

With the cicdgen generate devon4net command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4net schematic arguments
+
+

When you execute the cicdgen generate devon4net command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --appname

    +
    +

    The name of your devon4net application.

    +
    +
  • +
  • +

    --appversion

    +
    +

    The initial version of your devon4net application

    +
    +
  • +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the xref:`jenkinsfile-teams`[Jenkinsfile].

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
devon4net generated files
+
+

When you execute the generate devon4net command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        dotnet core installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all dependencies need to build/run your dotnet project.

          +
        • +
        • +

          Execute dotnet tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4net Docker generated files
+
+

When you generate the files for devon4net you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4net --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+Files +
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for your project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+ +
+
+
+
+
devon4node schematic
+
+

With the cicdgen generate devon4node command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4node schematic arguments
+
+

When you execute the cicdgen generate devon4node command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
devon4node generated files
+
+

When you execute the generate devon4node command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for run the linter and generate the json report. This script is called lint:ci.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your node project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4node Docker generated files
+
+

When you generate the files for a devon4node you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+Files +
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is installs all dependencies in ordre to build the project and then remove all devDependencies in order to keep only the production dependencies.

    +
    +
  • +
  • +

    .dockerignore.ci

    +
    +

    Another .dockerignore. The purpose of this one is to define de file exclusions in your CI pipeline.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/merge-strategies.html b/docs/cicdgen/1.0/merge-strategies.html new file mode 100644 index 00000000..92af1e04 --- /dev/null +++ b/docs/cicdgen/1.0/merge-strategies.html @@ -0,0 +1,432 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Merge Strategies

+
+
+

When you execute cicdgen in a project, is possible that you already have some files that cicdgen will generate. Until version 1.5 the behaviour in these cases was to throw an error and not create/modify any file. Since version 1.6 you can choose what to do in case of conflict. In this page we will explain who to choose one merge strategy and how it works.

+
+
+

Choose a merge strategy

+
+

To choose a merge strategy, you must pass to cicdgen the merge parameter followed by the name of the strategy. The strategies available are: error, keep, override, combine.

+
+
+

Example:

+
+
+
+
$ cicdgen generate devon4j --merge keep
+
+
+
+
+

Merge strategies

+
+
    +
  • +

    error: The error strategy is the same as until version 1.5, throwing an error and do not create/modify any file. This is the default value, if you do not pass the merge parameter this value will be taken.

    +
  • +
  • +

    keep: The keep strategy will keep the actual content of your files in case of conflict. If there is no conflict, the file will be created with the new content.

    +
  • +
  • +

    override: The override strategy will override your current files, without throwing any error, and create a new ones with the new content. If there is no conflict, the file will be created with the new content.

    +
  • +
  • +

    combine: The combine strategy will create a new file combining the current content with the new content. In order to combine both files, it will apply a diff algorithm and it will show the conflicts in the same way that git does. If there is no conflict, the file will be created with the new content.

    +
    +

    By resolving the conflicts in the same way as git, you can use the same tools in order to solve them. For example, you can use VSCode:

    +
    +
    +

    merge combine vscode

    +
    +
  • +
+
+
+

Examples:

+
+
+

keep +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

override +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

combine +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
<<<<<<< HEAD
+Line 1
+=======
+Line 5
+>>>>>>> new_content
+Line 2
+Line 3
+Line 4
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cicdgen/1.0/usage-example.html b/docs/cicdgen/1.0/usage-example.html new file mode 100644 index 00000000..673d1871 --- /dev/null +++ b/docs/cicdgen/1.0/usage-example.html @@ -0,0 +1,449 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

cicdgen usage example

+
+
+

In this example we are going to show how to use cicdgen step by step in a devon4ng project.

+
+
+
    +
  1. +

    Install cicdgen

    +
    +

    cicdgen is already included in the devonfw distribution, but if you want to use it outside the devonfw console you can execute the following command:

    +
    +
    +
    +
    $ npm i -g cicdgen
    +
    +
    +
  2. +
  3. +

    Generate a new devon4ng project using devonfw ide.

    +
    +

    Inside a devonfw ide distribution execute the command (devon ng create <app-name>):

    +
    +
    +
    +
    $ devon ng create devon4ng
    +
    +
    +
  4. +
  5. +

    Execute cicdgen generate command

    +
    +

    As we want to send notifications to MS Teams, we need to create the connector first:

    +
    +
    +
    +
    +
      +
    • +

      Go to a channel in teams and click at the connectors button. Then click at the jenkins configure button.

      +
      +

      teams 1

      +
      +
    • +
    • +

      Put a name for the connector

      +
      +

      teams 2

      +
      +
    • +
    • +

      Copy the name and the Webhook URL, we will use it later.

      +
      +

      teams 3

      +
      +
    • +
    +
    +
    +
    +
    +

    With the values that we get in the previous steps, we will execute the cicdgen command inside the project folder. If you have any doubt you can use the help.

    +
    +
    +

    help 1

    +
    +
    +

    help 2

    +
    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --docker --dockerurl tpc://127.0.0.1:2376 `--registryurl docker-registry-devon.s2-eu.capgemini.com --teams --teamsname devon4ng --teamsurl https://outlook.office.com/webhook/...`
    +
    +
    +
    +

    cicdgen command

    +
    +
  6. +
  7. +

    Create a git repository and upload the code

    +
    +

    gitlab

    +
    +
    +

    gitlab 2

    +
    +
    +
    +
    $ git remote add origin https://devon.s2-eu.capgemini.com/gitlab/darrodri/devon4ng.git
    +$ git push -u origin master
    +
    +
    +
    +

    push code

    +
    +
    +

    As you can see, no git init or git commit is required, cicdgen do it for you.

    +
    +
  8. +
  9. +

    Create a multibranch-pipeline in Jenkins

    +
    +

    new pipeline

    +
    +
    +

    When you push the save button, it will download the repository and execute the pipeline defined in the Jenkinsfile. If you get any problem, check the environment variables defined in the Jenkinsfile. Here we show all variables related with Jenkins:

    +
    +
    +
    +
    +
      +
    • +

      chrome

      +
      +

      chrome stable

      +
      +
    • +
    • +

      sonarTool

      +
      +

      sonar tool

      +
      +
    • +
    • +

      sonarEnv

      +
      +

      sonar env

      +
      +
    • +
    • +

      repositoryId

      +
      +

      repository id

      +
      +
    • +
    • +

      globalSettingsId

      +
      +

      global settings id

      +
      +
    • +
    • +

      mavenInstallation

      +
      +

      maven installation

      +
      +
    • +
    • +

      dockerTool

      +
      +

      docker global

      +
      +
    • +
    +
    +
    +
    +
  10. +
  11. +

    Add a webhook in GitLab

    +
    +

    In order to run the pipeline every time that you push code to GitLab, you need to configure a webhook in your repository.

    +
    +
    +

    gitlab webhook

    +
    +
  12. +
+
+
+

Now your project is ready to work following a CICD strategy.

+
+
+

The last thing to take into account is the branch naming. We prepare the pipeline in order to work following the git-flow strategy. So all stages of the pipeline will be executed for the branches: develop, release/*, master. For the branches: feature/*, hotfix/*, bugfix/* only the steps related to unit testing will be executed.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/CobiGen.html b/docs/cobigen/1.0/CobiGen.html new file mode 100644 index 00000000..2dcf3c58 --- /dev/null +++ b/docs/cobigen/1.0/CobiGen.html @@ -0,0 +1,3195 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

CobiGen — Code-based incremental Generator

+
+ +
+
+
+

Document Description

+
+
+

This document contains the documentation of the CobiGen core module as well as all CobiGen plug-ins and the CobiGen eclipse integration.

+
+
+

Current versions:

+
+
+
    +
  • +

    CobiGen - Eclipse Plug-in v4.4.1

    +
  • +
  • +

    CobiGen - Maven Build Plug-in v4.1.0

    +
  • +
+
+
+
+
    +
  • +

    CobiGen v5.3.1

    +
  • +
  • +

    CobiGen - Java Plug-in v2.1.0

    +
  • +
  • +

    CobiGen - XML Plug-in v4.1.0

    +
  • +
  • +

    CobiGen - TypeScript Plug-in v2.2.0

    +
  • +
  • +

    CobiGen - Property Plug-in v2.0.0

    +
  • +
  • +

    CobiGen - Text Merger v2.0.0

    +
  • +
  • +

    CobiGen - JSON Plug-in v2.0.0

    +
  • +
  • +

    CobiGen - HTML Plug-in v2.0.1

    +
  • +
  • +

    CobiGen - Open API Plug-in v2.3.0

    +
  • +
  • +

    CobiGen - FreeMaker Template Engine v2.0.0

    +
  • +
  • +

    CobiGen - Velocity Template Engine v2.0.0

    +
  • +
+
+
+

Authors:

+
+
+
    +
  • +

    Malte Brunnlieb

    +
  • +
  • +

    Jaime Diaz Gonzalez

    +
  • +
  • +

    Steffen Holzer

    +
  • +
  • +

    Ruben Diaz Martinez

    +
  • +
  • +

    Joerg Hohwiller

    +
  • +
  • +

    Fabian Kreis

    +
  • +
  • +

    Lukas Goerlach

    +
  • +
  • +

    Krati Shah

    +
  • +
  • +

    Christian Richter

    +
  • +
  • +

    Erik Grüner

    +
  • +
  • +

    Mike Schumacher

    +
  • +
  • +

    Marco Rose

    +
  • +
+
+
+

==Guide to the Reader

+
+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If this is your first contact with CobiGen, you will be interested in the general purpose of CobiGen, in the licensing of CobiGen, as well as in the Shared Service provided for CobiGen. Additionally, there are some general use cases, which are currently implemented and maintained to be used out of the box.

    +
  • +
  • +

    As a user of the CobiGen Eclipse integration, you should focus on the Installation and Usage chapters to get a good introduction about how to use CobiGen in eclipse.

    +
  • +
  • +

    As a user of the Maven integration, you should focus on the Maven configuration chapter, which guides you through the integration of CobiGen into your build configuration.

    +
  • +
  • +

    If you like to adapt the configuration of CobiGen, you have to step deeper into the configuration guide as well as into the plug-in configuration extensions for the Java Plug-in, XML-Plugin, Java Property Plug-in, as well as for the Text-Merger Plug-in.

    +
  • +
  • +

    Finally, if you want to develop your own templates, you will be thankful for helpful links in addition to the plug-ins documentation as referenced in the previous point.

    +
  • +
+
+
+

Unresolved include directive in modules/ROOT/pages/CobiGen.adoc - include::Home[]

+
+ +
+

==General use cases

+
+
+

In addition to the selection of CobiGen applications introduced before, this chapter provides a more detailed overview about the currently implemented and maintained general use cases. These can be used by any project following a supported reference architecture as e.g. the devonfw or Register Factory.

+
+
+
+
+

devon4j

+
+
+

With our templates for devon4j, you can generate a whole CRUD application from a single Entity class. You save the effort for creating, DAOs, Transfer Objects, simple CRUD use cases with REST services and even the client application can be generated.

+
+
+

CRUD server application for devon4j

+
+

For the server, the required files for all architectural layers (Data access, logic, and service layer) can be created based on your Entity class. After the generation, you have CRUD functionality for the entity from bottom to top which can be accessed via a RESTful web service. Details are provided in the devonfw wiki.

+
+
+
+

CRUD client application for devon4ng

+
+

Based on the REST services on the server, you can also generate an Angular client based on devon4ng. With the help of Node.js, you have a working client application for displaying your entities within minutes!

+
+
+
+

Test data Builder for devon4j

+
+

Generating a builder pattern for POJOs to easily create test data in your tests. CobiGen is not only able to generate a plain builder pattern but rather builder, which follow a specific concept to minimize test data generation efforts in your unit tests. The following Person class as an example:

+
+
+
Person class
+
+
public class Person {
+
+    private String firstname;
+    private String lastname;
+    private int birthyear;
+    @NotNull
+    private Address address;
+
+    @NotNull
+    public String getFirstname() {
+        return this.firstname;
+    }
+
+    // additional default setter and getter
+}
+
+
+
+

It is a simple POJO with a validation annotation, to indicate, that firstname should never be null. Creating this object in a test would imply to call every setter, which is kind of nasty. Therefore, the Builder Pattern has been introduced for quite a long time in software engineering, allowing to easily create POJOs with a fluent API. See below.

+
+
+
Builder pattern example
+
+
Person person = new PersonBuilder()
+                .firstname("Heinz")
+                .lastname("Erhardt")
+                .birthyear(1909)
+                .address(
+                    new AddressBuilder().postcode("22222")
+                        .city("Hamburg").street("Luebecker Str. 123")
+                        .createNew())
+                .addChild(
+                    new PersonBuilder()[...].createNew()).createNew();
+
+
+
+

The Builder API generated by CobiGen allows you to set any setter accessible field of a POJO in a fluent way. But in addition lets assume a test, which should check the birth year as precondition for any business operation. So specifying all other fields of Person, especially firstname as it is mandatory to enter business code, would not make sense. The test behavior should just depend on the specification of the birth year and on no other data. So we would like to just provide this data to the test.

+
+
+

The Builder classes generated by CobiGen try to tackle this inconvenience by providing the ability to declare default values for any mandatory field due to validation or database constraints.

+
+
+
Builder Outline
+
+
public class PersonBuilder {
+
+    private void fillMandatoryFields() {
+        firstname("lasdjfaöskdlfja");
+        address(new AddressBuilder().createNew());
+    };
+    private void fillMandatoryFields_custom() {...};
+
+    public PersonBuilder firstname(String value);
+    public PersonBuilder lastname(String value);
+    ...
+
+    public Person createNew();
+    public Person persist(EntityManager em);
+    public List<Person> persistAndDuplicate(EntityManager em, int count);
+}
+
+
+
+

Looking at the plotted builder API generated by CobiGen, you will find two private methods. The method fillMandatoryFields will be generated by CobiGen and regenerated every time CobiGen generation will be triggered for the Person class. This method will set every automatically detected field with not null constraints to a default value. However, by implementing fillMandatoryFields_custom on your own, you can reset these values or even specify more default values for any other field of the object. Thus, running new PersonBuilder().birthyear(1909).createNew(); will create a valid object of Person, which is already pre-filled such that it does not influence the test execution besides the fact that it circumvents database and validation issues.

+
+
+

This even holds for complex data structures as indicated by address(new AddressBuilder().createNew());. Due to the use of the AddressBuilder for setting the default value for the field address, also the default values for Address will be set automatically.

+
+
+

Finally, the builder API provides different methods to create new objects.

+
+
+
    +
  • +

    createNew() just creates a new object from the builder specification and returns it.

    +
  • +
  • +

    persist(EntityManager) will create a new object from the builder specification and persists it to the database.

    +
  • +
  • +

    persistAndDuplicate(EntityManager, int) will create the given amount of objects form the builder specification and persists all of these. After the initial generation of each builder, you might want to adapt the method body as you will most probably not be able to persist more than one object with the same field assignments to the database due to unique constraints. Thus, please see the generated comment in the method to adapt unique fields accordingly before persisting to the database.

    +
  • +
+
+
+

Custom Builder for Business Needs

+
+

CobiGen just generates basic builder for any POJO. However, for project needs you probably would like to have even more complex builders, which enable the easy generation of more complex test data which are encoded in a large object hierarchy. Therefore, the generated builders can just be seen as a tool to achieve this. You can define your own business driven builders in the same way as the generated builders, but explicitly focusing on your business needs. Just take this example as a demonstration of that idea:

+
+
+
+
  University uni = new ComplexUniversityBuilder()
+    .withStudents(200)
+    .withProfessors(4)
+    .withExternalStudent()
+    .createNew();
+
+
+
+

E.g. the method withExternalStudent() might create a person, which is a student and is flagged to be an external student. Basing this implementation on the generated builders will even assure that you would benefit from any default values you have set before. In addition, you can even imagine any more complex builder methods setting values driven your reusable testing needs based on the specific business knowledge.

+
+
+
+
+
+
+

Register Factory

+
+
+

CRUD server application

+
+

Generates a CRUD application with persistence entities as inputs. This includes DAOs, TOs, use cases, as well as a CRUD JSF user interface if needed.

+
+
+
+

Test data Builder

+ +
+
+

Test documentation

+
+

Generate test documentation from test classes. The input are the doclet tags of several test classes, which e.g. can specify a description, a cross-reference, or a test target description. The result currently is a csv file, which lists all tests with the corresponding meta-information. Afterwards, this file might be styled and passed to the customer if needed and it will be up-to-date every time!

+
+
+
+
+
+

CobiGen

+
+ +
+

==Configuration

+
+
+

CobiGen is maintaining a home directory further referenced in this documentation as $cghome, which is used to maintain temporary or transient data. The home folder is determined with the following location fall-back:

+
+
+
    +
  1. +

    System environment variable COBIGEN_HOME (e.g. C:\project\ide\conf\cobigen-home)

    +
  2. +
  3. +

    .cobigen directory in OS user home (e.g. ~/.cobigen)

    +
  4. +
+
+
+

The actual configuration of CobiGen is maintained by a single folder or jar. The location can be configured with respect to the implemented configuration fall-back mechanism. CobiGen will search for the location of the configuration in the following order:

+
+
+
    +
  1. +

    A configuration jar or directory, which is passed to CobiGen by the Maven or Eclipse integration or any other program using the CobiGen programming interface: +1.1. the Maven integration allows to configure a jar dependency to be included in the currently running classpath (of interest for maven configuration +1.2. the Eclipse integration allows to specify a CobiGen_Templates project in the eclipse workspace

    +
  2. +
  3. +

    The file $cghome/.cobigen exists and the property templates is set to a valid configuration (e.g. templates=C:\project\ide\conf\templates or templates=C:\project\ide\conf\templates.jar) Hint: Check for log entry like Value of property templates in $cghome/.cobigen is invalid to identify an invalid configuration which is not taken up as expected

    +
  4. +
  5. +

    The folder $cghome/templates/CobiGen_Templates exists

    +
  6. +
  7. +

    The lexicographical sorted first configuration jar of the following path pattern $cghome/templates/templates-([^-]+)-(\\d+\\.?)+.jar if exists (e.g. templates-devon4j-2020.04.001)

    +
  8. +
  9. +

    CobiGen will automatically download the latest jar configuration from maven central with groupId com.devonfw.cobigen and artifactId templates-devon4j and take it like described in 4.

    +
  10. +
+
+
+

Within the configuration jar or directory you will find the following structure:

+
+
+
+
CobiGen_Templates
+ |- templateFolder1
+    |- templates.xml
+ |- templateFolder2
+    |- templates.xml
+ |- context.xml
+
+
+
+

Find some examples here.

+
+
+

Context Configuration

+
+

The context configuration (context.xml) always has the following root structure:

+
+
+
Context Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        ...
+    </triggers>
+</contextConfiguration>
+
+
+
+

The context configuration has a version attribute, which should match the XSD version the context configuration is an instance of. It should not state the version of the currently released version of CobiGen. This attribute should be maintained by the context configuration developers. If configured correctly, it will provide a better feedback for the user and thus higher user experience. Currently there is only the version v1.0. For further version there will be a changelog later on.

+
+
+
Trigger Node
+
+

As children of the <triggers> node you can define different triggers. By defining a <trigger> you declare a mapping between special inputs and a templateFolder, which contains all templates, which are worth to be generated with the given input.

+
+
+
trigger configuration
+
+
<trigger id="..." type="..." templateFolder="..." inputCharset="UTF-8" >
+    ...
+</trigger>
+
+
+
+
    +
  • +

    The attribute id should be unique within an context configuration. It is necessary for efficient internal processing.

    +
  • +
  • +

    The attribute type declares a specific trigger interpreter, which might be provided by additional plug-ins. A trigger interpreter has to provide an input reader, which reads specific inputs and creates a template object model out of it to be processed by the FreeMarker template engine later on. Have a look at the plug-in’s documentation of your interest and see, which trigger types and thus inputs are currently supported.

    +
  • +
  • +

    The attribute templateFolder declares the relative path to the template folder, which will be used if the trigger gets activated.

    +
  • +
  • +

    The attribute inputCharset (optional) determines the charset to be used for reading any input file.

    +
  • +
+
+
+
+
Matcher Node
+
+

A trigger will be activated if its matchers hold the following formula:

+
+
+

!(NOT || …​ || NOT) && AND && …​ && AND && (OR || …​ || OR)

+
+
+

Whereas NOT/AND/OR describes the accumulationType of a matcher (see below) and e.g. NOT means 'a matcher with accumulationType NOT matches a given input'. Thus additionally to an input reader, a trigger interpreter has to define at least one set of matchers, which are satisfiable, to be fully functional. A <matcher> node declares a specific characteristics a valid input should have.

+
+
+
Matcher Configuration
+
+
<matcher type="..." value="..." accumulationType="...">
+    ...
+</matcher>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute accumulationType (optional) specifies how the matcher will influence the trigger activation. Valid values are:

    +
    +
      +
    • +

      OR (default): if any matcher of accumulation type OR matches, the trigger will be activated as long as there are no further matchers with different accumulation types

      +
    • +
    • +

      AND: if any matcher with AND accumulation type does not match, the trigger will not be activated

      +
    • +
    • +

      NOT: if any matcher with NOT accumulation type matches, the trigger will not be activated

      +
    • +
    +
    +
  • +
+
+
+
+
Variable Assignment Node
+
+

Finally, a <matcher> node can have multiple <variableAssignment> nodes as children. Variable assignments allow to parametrize the generation by additional values, which will be added to the object model for template processing. The variables declared using variable assignments, will be made accessible in the templates.xml as well in the object model for template processing via the namespace variables.*.

+
+
+
Complete Configuration Pattern
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="...">
+            <matcher type="..." value="...">
+                <variableAssignment type="..." key="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares the type of variable assignment to be processed by the trigger interpreter providing plug-in. This attribute enables variable assignments with different dynamic value resolutions.

    +
  • +
  • +

    The attribute key declares the namespace under which the resolved value will be accessible later on.

    +
  • +
  • +

    The attribute value might declare a constant value to be assigned or any hint for value resolution done by the trigger interpreter providing plug-in. For instance, if type is regex, then on value you will assign the matched group number by the regex (1, 2, 3…​)

    +
  • +
+
+
+
+
Container Matcher Node
+
+

The <containerMatcher> node is an additional matcher for matching containers of multiple input objects. +Such a container might be a package, which encloses multiple types or---more generic---a model, which encloses multiple elements. A container matcher can be declared side by side with other matchers:

+
+
+
ContainerMatcher Declaration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="..." >
+            <containerMatcher type="..." value="..." retrieveObjectsRecursively="..." />
+            <matcher type="..." value="...">
+                <variableAssignment type="..." variable="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute retrieveObjectsRecursively (optional boolean) states, whether the children of the input should be retrieved recursively to find matching inputs for generation.

    +
  • +
+
+
+

The semantics of a container matchers are the following:

+
+
+
    +
  • +

    A <containerMatcher> does not declare any <variableAssignment> nodes

    +
  • +
  • +

    A <containerMatcher> matches an input if and only if one of its enclosed elements satisfies a set of <matcher> nodes of the same <trigger>

    +
  • +
  • +

    Inputs, which match a <containerMatcher> will cause a generation for each enclosed element

    +
  • +
+
+
+
+
+

Templates Configuration

+
+

The template configuration (templates.xml) specifies, which templates exist and under which circumstances it will be generated. There are two possible configuration styles:

+
+
+
    +
  1. +

    Configure the template meta-data for each template file by template nodes

    +
  2. +
  3. +

    (since cobigen-core-v1.2.0): Configure templateScan nodes to automatically retrieve a default configuration for all files within a configured folder and possibly modify the automatically configured templates using templateExtension nodes

    +
  4. +
+
+
+

To get an intuition of the idea, the following will initially describe the first (more extensive) configuration style. Such an configuration root structure looks as follows:

+
+
+
Extensive Templates Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.0" templateEngine="FreeMarker">
+    <templates>
+            ...
+    </templates>
+    <increments>
+            ...
+    </increments>
+</templatesConfiguration>
+
+
+
+

The root node <templatesConfiguration> specifies two attributes. The attribute version provides further usability support and will be handled analogous to the version attribute of the context configuration. The optional attribute templateEngine specifies the template engine to be used for processing the templates (since `cobigen-core-4.0.0`). By default it is set to FreeMarker.

+
+
+

The node <templatesConfiguration> allows two different grouping nodes as children. First, there is the <templates> node, which groups all declarations of templates. Second, there is the <increments> node, which groups all declarations about increments.

+
+
+
Template Node
+
+

The <templates> node groups multiple <template> declarations, which enables further generation. Each template file should be registered at least once as a template to be considered.

+
+
+
Example Template Configuration
+
+
<templates>
+    <template name="..." destinationPath="..." templateFile="..." mergeStrategy="..." targetCharset="..." />
+    ...
+</templates>
+
+
+
+

A template declaration consist of multiple information:

+
+
+
    +
  • +

    The attribute name specifies an unique ID within the templates configuration, which will later be reused in the increment definitions.

    +
  • +
  • +

    The attribute destinationPath specifies the destination path the template will be generated to. It is possible to use all variables defined by variable assignments within the path declaration using the FreeMarker syntax ${variables.*}. While resolving the variable expressions, each dot within the value will be automatically replaced by a slash. This behavior is accounted for by the transformations of Java packages to paths as CobiGen has first been developed in the context of the Java world. Furthermore, the destination path variable resolution provides the following additional built-in operators analogue to the FreeMarker syntax:

    +
    +
      +
    • +

      ?cap_first analogue to FreeMarker

      +
    • +
    • +

      ?uncap_first analogue to FreeMarker

      +
    • +
    • +

      ?lower_case analogue to FreeMarker

      +
    • +
    • +

      ?upper_case analogue to FreeMarker

      +
    • +
    • +

      ?replace(regex, replacement) - Replaces all occurrences of the regular expression regex in the variable’s value with the given replacement string. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removeSuffix(suffix) - Removes the given suffix in the variable’s value iff the variable’s value ends with the given suffix. Otherwise nothing will happen. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removePrefix(prefix) - Analogue to ?removeSuffix but removes the prefix of the variable’s value. (since cobigen-core v1.1.0)

      +
    • +
    +
    +
  • +
  • +

    The attribute templateFile describes the relative path dependent on the template folder specified in the trigger to the template file to be generated.

    +
  • +
  • +

    The attribute mergeStrategy (optional) can be optionally specified and declares the type of merge mechanism to be used, when the destinationPath points to an already existing file. CobiGen by itself just comes with a mergeStrategy override, which enforces file regeneration in total. Additional available merge strategies have to be obtained from the different plug-in’s documentations (see here for java, XML, properties, and text). Default: not set (means not mergeable)

    +
  • +
  • +

    The attribute targetCharset (optional) can be optionally specified and declares the encoding with which the contents will be written into the destination file. This also includes reading an existing file at the destination path for merging its contents with the newly generated ones. Default: UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external template (templates defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Template Scan Node
+
+

(since cobigen-core-v1.2.0)

+
+
+

The second configuration style for template meta-data is driven by initially scanning all available templates and automatically configure them with a default set of meta-data. A scanning configuration might look like this:

+
+
+
Example of Template-scan configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.2">
+    <templateScans>
+        <templateScan templatePath="templates" templateNamePrefix="prefix_" destinationPath="src/main/java"/>
+    </templateScans>
+</templatesConfiguration>
+
+
+
+

You can specify multiple <templateScan …​> nodes for different templatePaths and different templateNamePrefixes.

+
+
+
    +
  • +

    The name can be specified to later on reference the templates found by a template-scan within an increment. (since cobigen-core-v2.1.)

    +
  • +
  • +

    The templatePath specifies the relative path from the templates.xml to the root folder from which the template scan should be performed.

    +
  • +
  • +

    The templateNamePrefix (optional) defines a common id prefix, which will be added to all found and automatically configured templates.

    +
  • +
  • +

    The destinationPath defines the root folder all found templates should be generated to, whereas the root folder will be a prefix for all found and automatically configured templates.

    +
  • +
+
+
+

A templateScan will result in the following default configuration of templates. For each file found, new template will be created virtually with the following default values:

+
+
+
    +
  • +

    id: file name without .ftl extension prefixed by templateNamePrefix from template-scan

    +
  • +
  • +

    destinationPath: relative file path of the file found with the prefix defined by destinationPath from template-scan. Furthermore,

    +
    +
      +
    • +

      it is possible to use the syntax for accessing and modifying variables as described for the attribute destinationPath of the template node, besides the only difference, that due to file system restrictions you have to replace all ?-signs (for built-ins) with #-signs.

      +
    • +
    • +

      the files to be scanned, should provide their final file extension by the following file naming convention: <filename>.<extension>.ftl Thus the file extension .ftl will be removed after generation.

      +
    • +
    +
    +
  • +
  • +

    templateFile: relative path to the file found

    +
  • +
  • +

    mergeStrategy: (optional) not set means not mergeable

    +
  • +
  • +

    targetCharset: (optional) defaults to UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external templateScan (templateScans defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Template Extension Node
+
+

(since cobigen-core-v1.2.0)

+
+
+

Additionally to the templateScan declaration it is easily possible to rewrite specific attributes for any scanned and automatically configured template.

+
+
+
Example Configuration of a TemplateExtension
+
+
<templates>
+    <templateExtension ref="prefix_FooClass.java" mergeStrategy="javamerge" />
+</templates>
+
+<templateScans>
+    <templateScan templatePath="foo" templateNamePrefix="prefix_" destinationPath="src/main/java/foo"/>
+</templateScans>
+
+
+
+

Lets assume, that the above example declares a template-scan for the folder foo, which contains a file FooClass.java.ftl in any folder depth. Thus the template scan will automatically create a virtual template declaration with id=prefix_FooClass.java and further default configuration.

+
+
+

Using the templateExtension declaration above will reference the scanned template by the attribute ref and overrides the mergeStrategy of the automatically configured template by the value javamerge. Thus we are able to minimize the needed templates configuration.

+
+
+

(Since version 4.1.0) It is possible to reference external templateExtension (templateExtensions defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Increment Node
+
+

The <increments> node groups multiple <increment> nodes, which can be seen as a collection of templates to be generated. An increment will be defined by a unique id and a human readable description.

+
+
+
+
<increments>
+    <increment id="..." description="...">
+        <incrementRef ref="..." />
+        <templateRef ref="..." />
+        <templateScanRef ref="..." />
+    </increment>
+</increments>
+
+
+
+

An increment might contain multiple increments and/or templates, which will be referenced using <incrementRef …​>, <templateRef …​>, resp. <templateScanRef …​> nodes. These nodes only declare the attribute ref, which will reference an increment, a template, or a template-scan by its id or name.

+
+
+

(Since version 4.1.0) An special case of <incrementRef …​> is the external incrementsRef. By default, <incrementRef …​> are used to reference increments defined in the same templates.xml file. So for example, we could have:

+
+
+
+
<increments>
+    <increment id="incA" description="...">
+        <incrementRef ref="incB" />
+    </increment>
+    <increment id="incB" description="...">
+        <templateRef .... />
+        <templateScan .... />
+    </increment>
+</increments>
+
+
+
+

However, if we want to reference an increment that it is not defined inside our templates.xml (an increment defined for another trigger), then we can use external incrementRef as shown below:

+
+
+
+
<increment name="..." description="...">
+    <incrementRef ref="trigger_id::increment_id"/>
+</increment>
+
+
+
+

The ref string is split using as delimiter ::. The first part of the string, is the trigger_id to reference. That trigger contains an increment_id. Currently, this functionality only works when both templates use the same kind of input file.

+
+
+
+
+

Java Template Logic

+
+

since cobigen-core-3.0.0 which is included in the Eclipse and Maven Plugin since version 2.0.0 +In addition, it is possible to implement more complex template logic by custom Java code. To enable this feature, you can simply import the the CobiGen_Templates by clicking on Adapt Templates, turn it into a simple maven project (if it is not already) and implement any Java logic in the common maven layout (e.g. in the source folder src/main/java). Each Java class will be instantiated by CobiGen for each generation process. Thus, you can even store any state within a Java class instance during generation. However, there is currently no guarantee according to the template processing order.

+
+
+

As a consequence, you have to implement your Java classes with a public default (non-parameter) constructor to be used by any template. Methods of the implemented Java classes can be called within templates by the simple standard FreeMarker expression for calling Bean methods: SimpleType.methodName(param1). Until now, CobiGen will shadow multiple types with the same simple name non-deterministically. So please prevent yourself from that situation.

+
+
+

Finally, if you would like to do some reflection within your Java code accessing any type of the template project or any type referenced by the input, you should load classes by making use of the classloader of the util classes. CobiGen will take care of the correct classloader building including the classpath of the input source as well as of the classpath of the template project. If you use any other classloader or build it by your own, there will be no guarantee, that generation succeeds.

+
+
+
+

Template Properties

+
+

since cobigen-core-4.0.0` +Using a configuration with `template scan, you can make use of properties in templates specified in property files named cobigen.properties next to the templates. The property files are specified as Java property files. Property files can be nested in sub-folders. Properties will be resolved including property shading. Properties defined nearest to the template to be generated will take precedence. +In addition, a cobigen.properties file can be specified in the target folder root (in eclipse plugin, this is equal to the source project root). These properties take precedence over template properties specified in the template folder.

+
+
+ + + + + +
+ + +It is not allowed to override context variables in cobigen.properties specifications as we have not found any interesting use case. This is most probably an error of the template designer, CobiGen will raise an error in this case. +
+
+
+
Multi module support or template target path redirects
+
+

since cobigen-core-4.0.0` +One special property you can specify in the template properties is the property `relocate. It will cause the current folder and its sub-folders to be relocated at destination path resolution time. Take the following example:

+
+
+
+
folder
+  - sub1
+    Template.java.ftl
+    cobigen.properties
+
+
+
+

Let the cobigen.properties file contain the line relocate=../sub2/${cwd}. Given that, the relative destination path of Template.java.ftl will be resolved to folder/sub2/Template.java. Compare template scan configuration for more information about basic path resolution. The relocate property specifies a relative path from the location of the cobigen.properties. The ${cwd} placeholder will contain the remaining relative path from the cobigen.properties location to the template file. In this basic example it just contains Template.java.ftl, but it may even be any relative path including sub-folders of sub1 and its templates. +Given the relocate feature, you can even step out of the root path, which in general is the project/maven module the input is located in. This enables template designers to even address, e.g., maven modules located next to the module the input is coming from.

+
+
+
+
+

Basic Template Model

+
+

In addition to what is served by the different model builders of the different plug-ins, CobiGen provides a minimal model based on context variables as well as CobiGen properties. The following model is independent of the input format and will be served as a template model all the time:

+
+
+ +
+
+
+

Plugin Mechanism

+
+

Since cobigen-core 4.1.0, we changed the plug-in discovery mechanism. So far it was necessary to register new plugins programmatically, which introduces the need to let every tool integration, i.e. for eclipse or maven, be dependent on every plug-in, which should be released. This made release cycles take long time as all plug-ins have to be integrated into a final release of maven or eclipse integration.

+
+
+

Now, plug-ins are automatically discovered by the Java Service Loader mechanism from the classpath. This also effects the setup of eclipse and maven integration to allow modular releases of CobiGen in future. We are now able to provide faster rollouts of bug-fixes in any of the plug-ins as they can be released completely independently.

+
+
+
+
+
+

Plug-ins

+
+ +
+

==Java Plug-in +The CobiGen Java Plug-in comes with a new input reader for java artifacts, new java related trigger and matchers, as well as a merging mechanism for Java sources.

+
+
+
Trigger extension
+
+

The Java Plug-in provides a new trigger for Java related inputs. It accepts different representations as inputs (see Java input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'java'

    +
    +
    Example of a java trigger definition
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables Java elements as inputs.

    +
    +
  • +
+
+
+
Matcher types
+
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type fqn → full qualified name matching

    +
    +
    Example of a java trigger definition with a full qualified name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the full qualified name (fqn) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'package' → package name of the input

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="package" value="(.+)\.persistence\.([^\.]+)\.entity">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the package name (package) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'expression'

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="expression" value="instanceof java.lang.String">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the expression evaluates to true. Valid expressions are

    +
    +
  • +
  • +

    instanceof fqn: checks an 'is a' relation of the input type

    +
  • +
  • +

    isAbstract: checks, whether the input type is declared abstract

    +
  • +
+
+
+
+
Container Matcher types
+
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'package'

    +
    +
    Example of a java trigger definition with a container matcher for packages
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <containerMatcher type="package" value="com\.example\.app\.component1\.persistence.entity" />
    +</trigger>
    +
    +
    +
    +

    The container matcher matches packages provided by the type com.capgemini.cobigen.javaplugin.inputreader.to.PackageFolder with a regular expression stated in the value attribute. (See containerMatcher semantics to get more information about containerMatchers itself.)

    +
    +
  • +
+
+
+
+
Variable Assignment types
+
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The Java Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'regex' → regular expression group

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="regex" key="rootPackage" value="1" />
    +        <variableAssignment type="regex" key="component" value="2" />
    +        <variableAssignment type="regex" key="pojoName" value="3" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key.

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+
Java input reader
+
+

The CobiGen Java Plug-in implements an input reader for parsed java sources as well as for java Class<?> objects (loaded by reflection). So API user can pass Class<?> objects as well as JavaClass objects for generation. The latter depends on QDox, which will be used for parsing and merging java sources. For getting the right parsed java inputs you can easily use the JavaParserUtil, which provides static functionality to parse java files and get the appropriate JavaClass object.

+
+
+

Furthermore, due to restrictions on both inputs according to model building (see below), it is also possible to provide an array of length two as an input, which contains the Class<?> as well as the JavaClass object of the same class.

+
+
+Template object model +
+

No matter whether you use reflection objects or parsed java classes as input, you will get the following object model for template creation:

+
+
+
    +
  • +

    classObject ('Class' :: Class object of the Java input)

    +
  • +
  • +

    POJO

    +
    +
      +
    • +

      name ('String' :: Simple name of the input class)

      +
    • +
    • +

      package ('String' :: Package name of the input class)

      +
    • +
    • +

      canonicalName ('String' :: Full qualified name of the input class)

      +
    • +
    • +

      annotations ('Map<String, Object>' :: Annotations, which will be represented by a mapping of the full qualified type of an annotation to its value. To gain template compatibility, the key will be stored with '_' instead of '.' in the full qualified annotation type. Furthermore, the annotation might be recursively defined and thus be accessed using the same type of mapping. Example ${pojo.annotations.javax_persistence_Id})

      +
    • +
    • +

      JavaDoc ('Map<String, Object>') :: A generic way of addressing all available JavaDoc doclets and comments. The only fixed variable is comment (see below). All other provided variables depend on the doclets found while parsing. The value of a doclet can be accessed by the doclets name (e.g. ${…​JavaDoc.author}). In case of doclet tags that can be declared multiple times (currently @param and @throws), you will get a map, which you access in a specific way (see below).

      +
      +
        +
      • +

        comment ('String' :: JavaDoc comment, which does not include any doclets)

        +
      • +
      • +

        params ('Map<String,String> :: JavaDoc parameter info. If the comment follows proper conventions, the key will be the name of the parameter and the value being its description. You can also access the parameters by their number, as in arg0, arg1 etc, following the order of declaration in the signature, not in order of JavaDoc)

        +
      • +
      • +

        throws ('Map<String,String> :: JavaDoc exception info. If the comment follows proper conventions, the key will be the name of the thrown exception and the value being its description)

        +
      • +
      +
      +
    • +
    • +

      extendedType ('Map<String, Object>' :: The supertype, represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        name ('String' :: Simple name of the supertype)

        +
      • +
      • +

        canonicalName ('String' :: Full qualified name of the supertype)

        +
      • +
      • +

        package ('String' :: Package name of the supertype)

        +
      • +
      +
      +
    • +
    • +

      implementedTypes ('List<Map<String, Object>>' :: A list of all implementedTypes (interfaces) represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        interface ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Simple name of the interface)

          +
        • +
        • +

          canonicalName ('String' :: Full qualified name of the interface)

          +
        • +
        • +

          package ('String' :: Package name of the interface)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      fields ('List<Map<String, Object>>' :: List of fields of the input class) (renamed since cobigen-javaplugin v1.2.0; previously attributes)

      +
      +
        +
      • +

        field ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the Java field)

          +
        • +
        • +

          type ('String' :: Type of the Java field)

          +
        • +
        • +

          canonicalType ('String' :: Full qualified type declaration of the Java field’s type)

          +
        • +
        • +

          'isId' (Deprecated :: boolean :: true if the Java field or its setter or its getter is annotated with the javax.persistence.Id annotation, false otherwise. Equivalent to ${pojo.attributes[i].annotations.javax_persistence_Id?has_content})

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations with the remark, that for fields all annotations of its setter and getter will also be collected)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      methodAccessibleFields ('List<Map<String, Object>>' :: List of fields of the input class or its inherited classes, which are accessible using setter and getter methods)

      +
      +
        +
      • +

        same as for field (but without JavaDoc!)

        +
      • +
      +
      +
    • +
    • +

      methods ('List<Map<String, Object>>' :: The list of all methods, whereas one method will be represented by a set of property mappings)

      +
      +
        +
      • +

        method ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the method)

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

Furthermore, when providing a Class<?> object as input, the Java Plug-in will provide additional functionalities as template methods (deprecated):

+
+
+
    +
  1. +

    isAbstract(String fqn) (Checks whether the type with the given full qualified name is an abstract class. Returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  2. +
  3. +

    isSubtypeOf(String subType, String superType) (Checks whether the subType declared by its full qualified name is a sub type of the superType declared by its full qualified name. Equals the Java expression subType instanceof superType and so also returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  4. +
+
+
+
+Model Restrictions +
+

As stated before both inputs (Class<?> objects and JavaClass objects ) have their restrictions according to model building. In the following these restrictions are listed for both models, the ParsedJava Model which results from an JavaClass input and the ReflectedJava Model, which results from a Class<?> input.

+
+
+

It is important to understand, that these restrictions are only present if you work with either Parsed Model OR the Reflected Model. If you use the Maven Build Plug-in or Eclipse Plug-in these two models are merged together so that they can mutually compensate their weaknesses.

+
+
+Parsed Model +
+
    +
  • +

    annotations of the input’s supertype are not accessible due to restrictions in the QDox library. So pojo.methodAccessibleFields[i].annotations will always be empty for super type fields.

    +
  • +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Reflected Model.

    +
  • +
  • +

    fields of "supertypes" of the input JavaClass are not available at all. So pojo.methodAccessibleFields will only contain the input type’s and the direct superclass’s fields.

    +
  • +
  • +

    [resolved, since cobigen-javaplugin 1.3.1] field types of supertypes are always canonical. So pojo.methodAccessibleFields[i].type will always provide the same value as pojo.methodAccessibleFields[i].canonicalType (e.g. java.lang.String instead of the expected String) for super type fields.

    +
  • +
+
+
+
+Reflected Model +
+
    +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Parsed Model.

    +
  • +
  • +

    annotations are only available if the respective annotation has @Retention(value=RUNTIME), otherwise the annotations are to be discarded by the compiler or by the VM at run time. For more information see RetentionPolicy.

    +
  • +
  • +

    information about generic types is lost. E.g. a field’s/ methodAccessibleField’s type for List<String> can only be provided as List<?>.

    +
  • +
+
+
+
+
+
+
+
Merger extensions
+
+

The Java Plug-in provides two additional merging strategies for Java sources, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy javamerge (merges two Java resources and keeps the existing Java elements on conflicts)

    +
  • +
  • +

    Merge strategy javamerge_override (merges two Java resources and overrides the existing Java elements on conflicts)

    +
  • +
+
+
+

In general merging of two Java sources will be processed as follows:

+
+
+

Precondition of processing a merge of generated contents and existing ones is a common Java root class resp. surrounding class. If this is the case this class and all further inner classes will be merged recursively. Therefore, the following Java elements will be merged and conflicts will be resolved according to the configured merge strategy:

+
+
+
    +
  • +

    extends and implements relations of a class: Conflicts can only occur for the extends relation.

    +
  • +
  • +

    Annotations of a class: Conflicted if an annotation declaration already exists.

    +
  • +
  • +

    Fields of a class: Conflicted if there is already a field with the same name in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
  • +

    Methods of a class: Conflicted if there is already a method with the same signature in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
+
+ +
+

==Property Plug-in +The CobiGen Property Plug-in currently only provides different merge mechanisms for documents written in Java property syntax.

+
+
+
+
Merger extensions
+
+

There are two merge strategies for Java properties, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy propertymerge (merges two properties documents and keeps the existing properties on conflicts)

    +
  • +
  • +

    Merge strategy propertymerge_override (merges two properties documents and overrides the existing properties on conflicts)

    +
  • +
+
+
+

Both documents (base and patch) will be parsed using the Java 7 API and will be compared according their keys. Conflicts will occur if a key in the patch already exists in the base document.

+
+ +
+

==XML Plug-in +The CobiGen XML Plug-in comes with an input reader for XML artifacts, XML related trigger and matchers and provides different merge mechanisms for XML result documents.

+
+
+
+
Trigger extension
+
+

(since cobigen-xmlplugin v2.0.0)

+
+
+

The XML Plug-in provides a trigger for XML related inputs. It accepts XML documents as input (see XML input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'xml'

    +
    +
    Example of a XML trigger definition.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as inputs.

    +
    +
  • +
  • +

    type xpath

    +
    +
    Example of a xpath trigger definition.
    +
    +
    <trigger id="..." type="xpath" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as container inputs, which consists of several sub-documents.

    +
    +
  • +
+
+
+
Container Matcher type
+
+

A ContainerMatcher check if the input is a valid container.

+
+
+
    +
  • +

    xpath: type: xpath

    +
    +
    Example of a XML trigger definition with a node name matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <containerMatcher type="xpath" value="./uml:Model//packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    Before applying any Matcher, this containerMatcher checks if the XML file contains a node uml:Model with a childnode packagedElement which contains an attribute xmi:type with the value uml:Class.

    +
    +
  • +
+
+
+
+
Matcher types
+
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    XML: type nodename → document’s root name matching

    +
    +
    Example of a XML trigger definition with a node name matcher
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the root name of the declaring input document matches the given regular expression (value).

    +
    +
  • +
  • +

    xpath: type: xpath → matching a node with a xpath value

    +
    +
    Example of a xpath trigger definition with a xpath matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="xpath" value="/packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the XML file contains a node /packagedElement where the xmi:type property equals uml:Class.

    +
    +
  • +
+
+
+
+
Variable Assignment types
+
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The XML Plug-in currently provides only one mechanism:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+
XML input reader
+
+

The CobiGen XML Plug-in implements an input reader for parsed XML documents. So API user can pass org.w3c.dom.Document objects for generation. For getting the right parsed XML inputs you can easily use the xmlplugin.util.XmlUtil, which provides static functionality to parse XML files or input streams and get the appropriate Document object.

+
+
+Template object +
+

Due to the heterogeneous structure an XML document can have, the XML input reader does not always create exactly the same model structure (in contrast to the java input reader). For example the model’s depth differs strongly, according to it’s input document. To allow navigational access to the nodes, the model also depends on the document’s element’s node names. All child elements with unique names, are directly accessible via their names. In addition it is possible to iterate over all child elements with held of the child list Children. So it is also possible to access child elements with non unique names.

+
+
+

The XML input reader will create the following object model for template creation (EXAMPLEROOT, EXAMPLENODE1, EXAMPLENODE2, EXAMPLEATTR1,…​ are just used here as examples. Of course they will be replaced later by the actual node or attribute names):

+
+
+
    +
  • +

    ~EXAMPLEROOT~ ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      _nodeName_ ('String' :: Simple name of the root node)

      +
    • +
    • +

      _text_ ('String' :: Concatenated text content (PCDATA) of the root node)

      +
    • +
    • +

      TextNodes ('List<String>' :: List of all the root’s text node contents)

      +
    • +
    • +

      _at_~EXAMPLEATTR1~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_~EXAMPLEATTR2~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_…​

      +
    • +
    • +

      Attributes ('List<Map<String, Object>>' :: List of the root’s attributes

      +
      +
        +
      • +

        at ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          _attName_ ('String' :: Name of the attribute)

          +
        • +
        • +

          _attValue_ ('String' :: String representation of the attribute’s value)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      Children ('List<Map<String, Object>>' :: List of the root’s child elements

      +
      +
        +
      • +

        child ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          …​common element sub structure…​

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE1~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element structure…​

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE2~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element sub structure…​

        +
      • +
      • +

        ~EXAMPLENODE21~ ('Map<String, Object>' :: One of the nodes' child nodes)

        +
        +
          +
        • +

          …​common element structure…​

          +
        • +
        +
        +
      • +
      • +

        ~EXAMPLENODE…​~

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE…​~

      +
    • +
    +
    +
  • +
+
+
+

In contrast to the java input reader, this XML input reader does currently not provide any additional template methods.

+
+
+
+
+
+
Merger extensions
+
+

The XML plugin uses the LeXeMe merger library to produce semantically correct merge products. The merge strategies can be found in the MergeType enum and can be configured in the templates.xml as a mergeStrategy attribute:

+
+
+
    +
  • +

    mergeStrategy xmlmerge

    +
    +
    Example of a template using the mergeStrategy xmlmerge
    +
    +
    <templates>
    +	<template name="..." destinationPath="..." templateFile="..." mergeStrategy="xmlmerge"/>
    +</templates>
    +
    +
    +
  • +
+
+
+

Currently only the document types included in LeXeMe are supported. +On how the merger works consult the LeXeMe Wiki.

+
+ +
+

==Text Merger Plug-in +The Text Merger Plug-in enables merging result free text documents to existing free text documents. Therefore, the algorithms are also very rudimentary.

+
+
+
+
Merger extensions
+
+

There are currently three main merge strategies that apply for the whole document:

+
+
+
    +
  • +

    merge strategy textmerge_append (appends the text directly to the end of the existing document) +_Remark_: If no anchors are defined, this will simply append the patch.

    +
  • +
  • +

    merge strategy textmerge_appendWithNewLine (appends the text after adding a new line break to the existing document) +_Remark_: empty patches will not result in appending a new line any more since v1.0.1 +Remark: Only suitable if no anchors are defined, otherwise it will simply act as textmerge_append

    +
  • +
  • +

    merge strategy textmerge_override (replaces the contents of the existing file with the patch) +_Remark_: If anchors are defined, override is set as the default mergestrategy for every text block if not redefined in an anchor specification.

    +
  • +
+
+
+
+
Anchor functionality
+
+

If a template contains text that fits the definition of anchor:${documentpart}:${mergestrategy}:anchorend or more specifically the regular expression (.*)anchor:([:]+):(newline_)?([:]+)(_newline)?:anchorend\\s*(\\r\\n|\\r|\\n), some additional functionality becomes available about specific parts of the incoming text and the way it will be merged with the existing text. These anchors always change things about the text to come up until the next anchor, text before it is ignored.

+
+
+

If no anchors are defined, the complete patch will be appended depending on your choice for the template in the file templates.xml.

+
+
+

[[anchordef]]

+
+
+
Anchor Definition
+
+

Anchors should always be defined as a comment of the language the template results in, as you do not want them to appear in your readable version, but cannot define them as FreeMarker comments in the template, or the merger will not know about them. +Anchors will also be read when they are not comments due to the merger being able to merge multiple types of text-based languages, thus making it practically impossible to filter for the correct comment declaration. That is why anchors have to always be followed by line breaks. That way there is a universal way to filter anchors that should have anchor functionality and ones that should appear in the text. +Remark: If the resulting language has closing tags for comments, they have to appear in the next line. +Remark: If you do not put the anchor into a new line, all the text that appears before it will be added to the anchor.

+
+
+
+
Document parts
+
+

In general, ${documentpart} is an id to mark a part of the document, that way the merger knows what parts of the text to merge with which parts of the patch (e.g. if the existing text contains anchor:table:${}:anchorend that part will be merged with the part tagged anchor:table:${}:anchorend of the patch).

+
+
+

If the same documentpart is defined multiple times, it can lead to errors, so instead of defining table multiple times, use table1, table2, table3 etc.

+
+
+

If a ${documentpart} is defined in the document but not in the patch and they are in the same position, it is processed in the following way: If only the documentparts header, test and footer are defined in the document in that order, and the patch contains header, order and footer, the resulting order will be header, test, order then footer.

+
+
+

The following documentparts have default functionality:

+
+
+
    +
  1. +

    anchor:header:${mergestrategy}:anchorend marks the beginning of a header, that will be added once when the document is created, but not again. +Remark: This is only done once, if you have header in another anchor, it will be ignored

    +
  2. +
  3. +

    anchor:footer:${mergestrategy}:anchorend marks the beginning of a footer, that will be added once when the document is created, but not again. Once this is invoked, all following text will be included in the footer, including other anchors.

    +
  4. +
+
+
+

[[mergestrategies]]

+
+
+
+
Mergestrategies
+
+

Mergestrategies are only relevant in the patch, as the merger is only interested in how text in the patch should be managed, not how it was managed in the past.

+
+
+
    +
  1. +

    anchor:${documentpart}::anchorend will use the merge strategy from templates.xml, see Merger-Extensions.

    +
  2. +
  3. +

    anchor:${}:${mergestrategy}_newline:anchorend or anchor:${}:newline_${mergestrategy}:anchorend states that a new line should be appended before or after this anchors text, depending on where the newline is (before or after the mergestrategy). anchor:${documentpart}:newline:anchorend puts a new line after the anchors text. +Remark: Only works with appending strategies, not merging/replacing ones. These strategies currently include: appendbefore, append/appendafter

    +
  4. +
  5. +

    anchor:${documentpart}:override:anchorend means that the new text of this documentpart will replace the existing one completely

    +
  6. +
  7. +

    anchor:${documentpart}:appendbefore:anchorend or anchor:${documentpart}:appendafter:anchorend/anchor:${documentpart}:append:anchorend specifies whether the text of the patch should come before the existing text or after.

    +
  8. +
+
+
+
+
+
Usage Examples
+
+
General
+
+

Below you can see how a file with anchors might look like (using adoc comment tags), with examples of what you might want to use the different functions for.

+
+
+
+
// anchor:header:append:anchorend
+
+Table of contents
+Introduction/Header
+
+// anchor:part1:appendafter:anchorend
+
+Lists
+Table entries
+
+// anchor:part2:nomerge:anchorend
+
+Document Separators
+adoc table definitions
+
+// anchor:part3:override:anchorend
+
+Anything that you only want once but changes from time to time
+
+// anchor:footer:append:anchorend
+
+Copyright Info
+Imprint
+
+
+
+
+
Merging
+
+

In this section you will see a comparison on what files look like before and after merging

+
+
+override +
+
Before
+
+
// anchor:part:override:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
+Appending +
+
Before
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+// anchor:part3:appendbefore:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:append:anchorend
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+Lorem Ipsum
+
+
+
+
+Newline +
+
Before
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+(end of file)
+
+
+
+
Patch
+
+
// anchor:part:newline_append:anchorend
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Dolor Sit
+(end of file)
+
+
+
+
After
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+Dolor Sit
+
+(end of file)
+
+
+
+
+
+
+
Error List
+
+
    +
  • +

    If there are anchors in the text, but either base or patch do not start with one, the merging process will be aborted, as text might go missing this way.

    +
  • +
  • +

    Using _newline or newline_ with mergestrategies that don’t support it , like override, will abort the merging process. See <<`mergestrategies`,Merge Strategies>> →2 for details.

    +
  • +
  • +

    Using undefined mergestrategies will abort the merging process.

    +
  • +
  • +

    Wrong anchor definitions, for example anchor:${}:anchorend will abort the merging process, see <<`anchordef`,Anchor Definition>> for details.

    +
  • +
+
+ +
+

==JSON Plug-in +At the moment the plug-in can be used for merge generic JSON files depending on the merge strategy defined at the templates.

+
+
+
+
Merger extensions
+
+

There are currently these merge strategies:

+
+
+

Generic JSON Merge

+
+
+
    +
  • +

    merge strategy jsonmerge(add the new code respecting the existent is case of conflict)

    +
  • +
  • +

    merge strategy jsonmerge_override (add the new code overwriting the existent in case of conflict)

    +
    +
      +
    1. +

      JsonArray’s will be ignored / replaced in total

      +
    2. +
    3. +

      JsonObjects in conflict will be processed recursively ignoring adding non existent elements.

      +
    4. +
    +
    +
  • +
+
+
+
+
Merge Process
+
+
Generic JSON Merging
+
+

The merge process will be:

+
+
+
    +
  1. +

    Add non existent JSON Objects from patch file to base file.

    +
  2. +
  3. +

    For existent object in both files, will add non existent keys from patch to base object. This process will be done recursively for all existent objects.

    +
  4. +
  5. +

    For JSON Arrays existent in both files, the arrays will be just concatenated.

    +
  6. +
+
+ +
+

==TypeScript Plug-in

+
+
+

The TypeScript Plug-in enables merging result TS files to existing ones. This plug-in is used at the moment for generate an Angular2 client with all CRUD functionalities enabled. The plug-in also generates i18n functionality just appending at the end of the word the ES or EN suffixes, to put into the developer knowledge that this words must been translated to the correspondent language. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+
+
+
Trigger Extensions
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
Merger extensions
+
+

This plugin uses the TypeScript Merger to merge files. There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy tsmerge (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy tsmerge_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

<<<<<<< HEAD +The merge algorithm mainly handles the following AST nodes:

+
+
+
    +
  • +

    ImportDeclaration

    +
    +
      +
    • +

      Will add non existent imports whatever the merge strategy is.

      +
    • +
    • +

      For different imports from same module, the import clauses will be merged.

      +
      +
      +
      import { a } from 'b';
      +import { c } from 'b';
      +//Result
      +import { a, c } from 'b';
      +
      +
      +
    • +
    +
    +
  • +
  • +

    ClassDeclaration

    +
    +
      +
    • +

      Adds non existent base properties from patch based on the name property.

      +
    • +
    • +

      Adds non existent base methods from patch based on the name signature.

      +
    • +
    • +

      Adds non existent annotations to class, properties and methods.

      +
    • +
    +
    +
  • +
  • +

    PropertyDeclaration

    +
    +
      +
    • +

      Adds non existent decorators.

      +
    • +
    • +

      Merge existent decorators.

      +
    • +
    • +

      With override strategy, the value of the property will be replaced by the patch value.

      +
    • +
    +
    +
  • +
  • +

    MethodDeclaration

    +
    +
      +
    • +

      With override strategy, the body will be replaced.

      +
    • +
    • +

      The parameters will be merged.

      +
    • +
    +
    +
  • +
  • +

    ParameterDeclaration

    +
    +
      +
    • +

      Replace type and modifiers with override merge strategy, adding non existent from patch into base.

      +
    • +
    +
    +
  • +
  • +

    ConstructorDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
  • +

    FunctionDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
+
+
+
+
Input reader
+
+

The TypeScript input reader is based on the one that the TypeScript merger uses. The current extensions are additional module fields giving from which library any entity originates. +module: null specifies a standard entity or type as string or number.

+
+
+
Object model
+
+

To get a first impression of the created object after parsing, let us start with analyzing a small example, namely the parsing of a simple type-orm model written in TypeScript.

+
+
+
+
import {Entity, PrimaryGeneratedColumn, Column} from "typeorm";
+
+@Entity()
+export class User {
+
+    @PrimaryGeneratedColumn()
+    id: number;
+
+    @Column()
+    firstName: string;
+
+    @Column()
+    lastName: string;
+
+    @Column()
+    age: number;
+
+}
+
+
+
+

The returned object has the following structure

+
+
+
+
{
+  "importDeclarations": [
+    {
+      "module": "typeorm",
+      "named": [
+        "Entity",
+        "PrimaryGeneratedColumn",
+        "Column"
+      ],
+      "spaceBinding": true
+    }
+  ],
+  "classes": [
+    {
+      "identifier": "User",
+      "modifiers": [
+        "export"
+      ],
+      "decorators": [
+        {
+          "identifier": {
+            "name": "Entity",
+            "module": "typeorm"
+          },
+          "isCallExpression": true
+        }
+      ],
+      "properties": [
+        {
+          "identifier": "id",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "PrimaryGeneratedColumn",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "firstName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "lastName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "age",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
+
+
+
+

If we only consider the first level of the JSON response, we spot two lists of imports and classes, providing information about the only import statement and the only User class, respectively. Moving one level deeper we observe that:

+
+
+
    +
  • +

    Every import statement is translated to an import declaration entry in the declarations list, containing the module name, as well as a list of entities imported from the given module.

    +
  • +
  • +

    Every class entry provides besides the class identifier, its decoration(s), modifier(s), as well as a list of properties that the original class contains.

    +
  • +
+
+
+

Note that, for each given type, the module from which it is imported is also given as in

+
+
+
+
  "identifier": {
+    "name": "Column",
+    "module": "typeorm"
+  }
+
+
+
+

Returning to the general case, independently from the given TypeScript file, an object having the following Structure will be created.

+
+
+
    +
  • +

    importDeclarations: A list of import statement as described above

    +
  • +
  • +

    exportDeclarations: A list of export declarations

    +
  • +
  • +

    classes: A list of classes extracted from the given file, where each entry is full of class specific fields, describing its properties and decorator for example.

    +
  • +
  • +

    interfaces: A list of interfaces.

    +
  • +
  • +

    variables: A list of variables.

    +
  • +
  • +

    functions: A list of functions.

    +
  • +
  • +

    enums: A list of enumerations.

    +
  • +
+
+ +
+

==HTML Plug-in

+
+
+

The HTML Plug-in enables merging result HTML files to existing ones. This plug-in is used at the moment for generate an Angular2 client. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+
+
+
Trigger Extensions
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
Merger extensions
+
+

There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy html-ng* (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy html-ng*_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

The merging of two Angular2 files will be processed as follows:

+
+
+

The merge algorithm handles the following AST nodes:

+
+
+
    +
  • +

    md-nav-list

    +
  • +
  • +

    a

    +
  • +
  • +

    form

    +
  • +
  • +

    md-input-container

    +
  • +
  • +

    input

    +
  • +
  • +

    name (for name attribute)

    +
  • +
  • +

    ngIf

    +
  • +
+
+
+ + + + + +
+ + +Be aware, that the HTML merger is not generic and only handles the described tags needed for merging code of a basic Angular client implementation. For future versions, it is planned to implement a more generic solution. +
+
+
+
+
+
+

Maven Build Integration

+
+ +
+

==Maven Build Integration

+
+
+

For maven integration of CobiGen you can include the following build plugin into your build:

+
+
+
Build integration of CobiGen
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        <execution>
+          <id>cobigen-generate</id>
+          <phase>generate-resources</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

Available goals

+
+
+
    +
  • +

    generate: Generates contents configured by the standard non-compiled configuration folder. Thus generation can be controlled/configured due to an location URI of the configuration and template or increment ids to be generated for a set of inputs.

    +
  • +
+
+
+

Available phases are all phases, which already provide compiled sources such that CobiGen can perform reflection on it. Thus possible phases are for example package, site.

+
+
+

Provide Template Set

+
+

For generation using the CobiGen maven plug-in, the CobiGen configuration can be provided in two different styles:

+
+
+
    +
  1. +

    By a configurationFolder, which should be available on the file system whenever you are running the generation. The value of configurationFolder should correspond to the maven file path syntax.

    +
    +
    Provide CobiGen configuration by configuration folder (file)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <configuration>
    +        <configurationFolder>cobigen-templates</configurationFolder>
    +      </configuration>
    +       ...
    +     </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
  2. +
  3. +

    By maven dependency, whereas the maven dependency should stick on the same conventions as the configuration folder. This explicitly means that it should contain non-compiled resources as well as the context.xml on top-level.

    +
    +
    Provide CobiGen configuration by maven dependency (jar)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <dependencies>
    +        <dependency>
    +          <groupId>com.devonfw.cobigen</groupId>
    +          <artifactId>templates-XYZ</artifactId>
    +          <version>VERSION-YOU-LIKE</version>
    +        </dependency>
    +      </dependencies>
    +      ...
    +    </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
    +

    We currently provide a generic deployed version of the templates on the devonfw-nexus for Register Factory (<artifactId>cobigen-templates-rf</artifactId>) and for the devonfw itself (<artifactId>cobigen-templates-devonfw</artifactId>).

    +
    +
  4. +
+
+
+
+

Build Configuration

+
+

Using the following configuration you will be able to customize your generation as follows:

+
+
+
    +
  • +

    <destinationRoot> specifies the root directory the relative destinationPath of CobiGen templates configuration should depend on. Default ${basedir}

    +
  • +
  • +

    <inputPackage> declares a package name to be used as input for batch generation. This refers directly to the CobiGen Java Plug-in container matchers of type package configuration.

    +
  • +
  • +

    <inputFile> declares a file to be used as input. The CobiGen maven plug-in will try to parse this file to get an appropriate input to be interpreted by any CobiGen plug-in.

    +
  • +
  • +

    <increment> specifies an increment ID to be generated. You can specify one single increment with content ALL to generate all increments matching the input(s).

    +
  • +
  • +

    <template> specifies a template ID to be generated. You can specify one single template with content ALL to generate all templates matching the input(s).

    +
  • +
  • +

    <forceOverride> specifies an overriding behavior, which enables non-mergeable resources to be completely rewritten by generated contents. For mergeable resources this flag indicates, that conflicting fragments during merge will be replaced by generated content. Default: false

    +
  • +
  • +

    <failOnNothingGenerated> specifies whether the build should fail if the execution does not generate anything.

    +
  • +
+
+
+
Example for a simple build configuration
+
+
<build>
+  <plugins>
+    <plugin>
+       ...
+      <configuration>
+        <destinationRoot>${basedir}</destinationRoot>
+        <inputPackages>
+          <inputPackage>package.to.be.used.as.input</inputPackage>
+        </inputPackages>
+        <inputFiles>
+          <inputFile>path/to/file/to/be/used/as/input</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>IncrementID</increment>
+        </increments>
+        <templates>
+          <template>TemplateID</template>
+        </templates>
+        <forceOverride>false</forceOverride>
+      </configuration>
+        ...
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+

Plugin Injection Since v3

+
+

Since version 3.0.0, the plug-in mechanism has changed to support modular releases of the CobiGen plug-ins. Therefore, you need to add all plug-ins to be used for generation. Take the following example to get the idea:

+
+
+
Example of a full configuration including plugins
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        ...
+      </executions>
+      <configuration>
+        ...
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen<groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>1.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+

A full example

+
+
    +
  1. +

    A complete maven configuration example

    +
  2. +
+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>6.0.0</version>
+      <executions>
+        <execution>
+          <id>generate</id>
+          <phase>package</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <inputFiles>
+          <inputFile>src/main/java/io/github/devonfw/cobigen/generator/dataaccess/api/InputEntity.java</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>dataaccess_infrastructure</increment>
+          <increment>daos</increment>
+        </increments>
+        <failOnNothingGenerated>false</failOnNothingGenerated>
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+
+

Eclipse Integration

+
+ +
+

==Installation

+
+
+
+
+

Remark: CobiGen is preinstalled in the devonfw/devon-ide.

+
+
+
+
+

Preconditions

+
+
    +
  • +

    Eclipse 4.x

    +
  • +
  • +

    Java 7 Runtime (for starting eclipse with CobiGen). This is independent from the target version of your developed code.

    +
  • +
+
+
+
+

Installation steps

+
+
    +
  1. +

    Open the eclipse installation dialog
    +menu bar → HelpInstall new Software…​

    +
    +

    01 install new software

    +
    +
  2. +
  3. +

    Open CobiGen’s update site
    +Insert the update site of your interest into the filed Work with and press Add …​
    +Unless you know what you are doing we recommend you install every plugin as shown in the picture below.

    +
    + +
    +
  4. +
  5. +

    Follow the installation wizard
    +Select CobiGen Eclipse Plug-inNextNext → accept the license → FinishOKYes

    +
  6. +
  7. +

    Once installed, a new menu entry named "CobiGen" will show up in the Package Explorer’s context menu. In the sub menu there will the Generate…​ command, which may ask you to update the templates, and then you can start the generation wizard of CobiGen. You can adapt the templates by clicking on Adapt Templates which will give you the possibility to import the CobiGen_Templates automatically so that you can modified them.

    +
  8. +
  9. +

    Checkout (clone) your project’s templates folder or use the current templates released with CobiGen (https://github.com/devonfw/cobigen/tree/master/cobigen-templates) and then choose Import -> General -> Existing Projects into Workspace to import the templates into your workspace.

    +
  10. +
  11. +

    Now you can start generating. To get an introduction of CobiGen try the devon4j templates and work on the devon4j sample application. There you might want to start with Entity objects as a selection to run CobiGen with, which will give you a good overview of what CobiGen can be used for right out of the box in devon4j based development. If you need some more introduction in how to come up with your templates and increments, please be referred to the documentation of the context configuration and the templates configuration

    +
  12. +
+
+
+

Dependent on your context configuration menu entry Generate…​ may be gray out or not. See for more information about valid selections for generation.

+
+
+
+

Updating

+
+

In general updating CobiGen for eclipse is done via the update mechanism of eclipse directly, as shown on image below:

+
+
+

03 update software

+
+
+

Upgrading eclipse CobiGen plug-in to v3.0.0 needs some more attention of the user due to a changed plug-in architecture of CobiGen’s core module and the eclipse integration. Eventually, we were able to provide any plug-in of CobiGen separately as its own eclipse bundle (fragment), which is automatically discovered by the main CobiGen Eclipse plug-in after installation.

+
+ +
+

==Usage

+
+
+

CobiGen has two different generation modes depending on the input selected for generation. The first one is the simple mode, which will be started if the input contains only one input artifact, e.g. for Java an input artifact currently is a Java file. The second one is the batch mode, which will be started if the input contains multiple input artifacts, e.g. for Java this means a list of files. In general this means also that the batch mode might be started when selecting complex models as inputs, which contain multiple input artifacts. The latter scenario has only been covered in the research group,yet.

+
+
+
+

Simple Mode

+
+

Selecting the menu entry Generate…​ the generation wizard will be opened:

+
+
+

generate wizard page1

+
+
+

The left side of the wizard shows all available increments, which can be selected to be generated. Increments are a container like concept encompassing multiple files to be generated, which should result in a semantically closed generation output. +On the right side of the wizard all files are shown, which might be effected by the generation - dependent on the increment selection of files on the left side. The type of modification of each file will be encoded into following color scheme if the files are selected for generation:

+
+
+
    +
  • +

    green: files, which are currently non-existent in the file system. These files will be created during generation

    +
  • +
  • +

    yellow: files, which are currently existent in the file system and which are configured to be merged with generated contents.

    +
  • +
  • +

    red: files, which are currently existent in the file system. These files will be overwritten if manually selected.

    +
  • +
  • +

    no color: files, which are currently existent in the file system. Additionally files, which were deselected and thus will be ignored during generation.

    +
  • +
+
+
+

Selecting an increment on the left side will initialize the selection of all shown files to be generated on the right side, whereas green and yellow categorized files will be selected initially. A manual modification of the pre-selection can be performed by switching to the customization tree using the Customize button on the right lower corner.

+
+
+
+
+

Optional: If you want to customize the generation object model of a Java input class, you might continue with the Next > button instead of finishing the generation wizard. The next generation wizard page is currently available for Java file inputs and lists all non-static fields of the input. deselecting entries will lead to an adapted object model for generation, such that deselected fields will be removed in the object model for generation. By default all fields will be included in the object model.

+
+
+
+
+

Using the Finish button, the generation will be performed. Finally, CobiGen runs the eclipse internal organize imports and format source code for all generated sources and modified sources. Thus it is possible, that---especially organize imports opens a dialog if some types could not be determined automatically. This dialog can be easily closed by pressing on Continue. If the generation is finished, the Success! dialog will pop up.

+
+
+
+

Batch mode

+
+

If there are multiple input elements selected, e.g., Java files, CobiGen will be started in batch mode. For the generation wizard dialog this means, that the generation preview will be constrained to the first selected input element. It does not preview the generation for each element of the selection or of a complex input. The selection of the files to be generated will be generated for each input element analogously afterwards.

+
+
+

generate wizard page1 batch

+
+
+

Thus the color encoding differs also a little bit:

+
+
+
    +
  • +

    yellow: files, which are configured to be merged.

    +
  • +
  • +

    red: files, which are not configured with any merge strategy and thus will be created if the file does not exist or overwritten if the file already exists

    +
  • +
  • +

    no color: files, which will be ignored during generation

    +
  • +
+
+
+

Initially all possible files to be generated will be selected.

+
+
+
+

Health Check

+
+

To check whether CobiGen runs appropriately for the selected element(s) the user can perform a Health Check by activating the respective menu entry as shown below.

+
+
+

health check menu entry

+
+
+

The simple Health Check includes 3 checks. As long as any of these steps fails, the Generate menu entry is grayed out.

+
+
+

The first step is to check whether the generation configuration is available at all. If this check fails you will see the following message:

+
+
+

health check no templates

+
+
+

This indicates, that there is no Project named CobiGen_Templates available in the current workspace. To run CobiGen appropriately, it is necessary to have a configuration project named CobiGen_Templates imported into your workspace. For more information see chapter Eclipse Installation.

+
+
+

The second step is to check whether the template project includes a valid context.xml. If this check fails, you will see the following message:

+
+
+

health check invalid config

+
+
+

This means that either your context.xml

+
+
+
    +
  • +

    does not exist (or has another name)

    +
  • +
  • +

    or it is not valid one in any released version of CobiGen

    +
  • +
  • +

    or there is simply no automatic routine of upgrading your context configuration to a valid state.

    +
  • +
+
+
+

If all this is not the case, such as, there is a context.xml, which can be successfully read by CobiGen, you might get the following information:

+
+
+

health check old context

+
+
+

This means that your context.xml is available with the correct name but it is outdated (belongs to an older CobiGen version). In this case just click on Upgrade Context Configuration to get the latest version.

+
+
+
+
+

Remark: This will create a backup of your current context configuration and converts your old configuration to the new format. The upgrade will remove all comments from the file, which could be retrieved later on again from the backup. +If the creation of the backup fails, you will be asked to continue or to abort.

+
+
+
+
+

The third step checks whether there are templates for the selected element(s). If this check fails, you will see the following message:

+
+
+

health check no matching triggers

+
+
+

This indicates, that there no trigger has been activated, which matches the current selection. The reason might be that your selection is faulty or that you imported the wrong template project (e.g. you are working on a devon4j project, but imported the Templates for the Register Factory). If you are a template developer, have a look at the trigger configuration and at the corresponding available plug-in implementations of triggers, like e.g., Java Plug-in or XML Plug-in.

+
+
+

If all the checks are passed you see the following message:

+
+
+

health check all OK

+
+
+

In this case everything is OK and the Generate button is not grayed out anymore so that you are able to trigger it and see the [simple-mode].

+
+
+

In addition to the basic check of the context configuration, you also have the opportunity to perform an Advanced Health Check, which will check all available templates configurations (templates.xml) of path-depth=1 from the configuration project root according to their compatibility.

+
+
+

health check advanced up to date

+
+
+

Analogous to the upgrade of the context configuration, the Advanced Health Check will also provide upgrade functionality for templates configurations if available.

+
+
+
+

Update Templates

+
+

Update Template: Select Entity file and right click then select CobiGen Update Templates after that click on download then download successfully message will be come .

+
+
+
+

Adapt Templates

+
+

Adapt Template: Select any file and right click, then select `cobigen → Adapt Templates `.If CobiGen templates jar is not available then it downloads them automatically. If CobiGen templates is already present then it will override existing template in workspace and click on OK then imported template successfully message will be come.

+
+
+

Finally, please change the Java version of the project to 1.8 so that you don’t have any compilation errors.

+
+ +
+

==Logging

+
+
+

If you have any problem with the CobiGen eclipse plug-in, you might want to enable logging to provide more information for further problem analysis. This can be done easily by adding the logback.xml to the root of the CobiGen_templates configuration folder. The file should contain at least the following contents, whereas you should specify an absolute path to the target log file (at the TODO). If you are using the (cobigen-templates project, you might have the contents already specified but partially commented.

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<!-- This file is for logback classic. The file contains the configuration for sl4j logging -->
+<configuration>
+    <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+        <file><!-- TODO choose your log file location --></file>
+        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+            <Pattern>%n%date %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
+            </Pattern>
+        </encoder>
+    </appender>
+    <root level="DEBUG">
+        <appender-ref ref="FILE" />
+    </root>
+</configuration>
+
+
+
+
+
+
+

Template Development

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/Guide-to-the-Reader.html b/docs/cobigen/1.0/Guide-to-the-Reader.html new file mode 100644 index 00000000..755194bc --- /dev/null +++ b/docs/cobigen/1.0/Guide-to-the-Reader.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Guide to the Reader

+
+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If this is your first contact with CobiGen, you will be interested in the general purpose of CobiGen, in the licensing of CobiGen, as well as in the Shared Service provided for CobiGen. Additionally, there are some general use cases, which are currently implemented and maintained to be used out of the box.

    +
  • +
  • +

    As a user of the CobiGen Eclipse integration, you should focus on the Installation and Usage chapters to get a good introduction about how to use CobiGen in eclipse.

    +
  • +
  • +

    As a user of the Maven integration, you should focus on the Maven configuration chapter, which guides you through the integration of CobiGen into your build configuration.

    +
  • +
  • +

    If you like to adapt the configuration of CobiGen, you have to step deeper into the configuration guide as well as into the plug-in configuration extensions for the Java Plug-in, XML-Plugin, Java Property Plug-in, as well as for the Text-Merger Plug-in.

    +
  • +
  • +

    Finally, if you want to develop your own templates, you will be thankful for helpful links in addition to the plug-ins documentation as referenced in the previous point.

    +
  • +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/Home.html b/docs/cobigen/1.0/Home.html new file mode 100644 index 00000000..c32ad58a --- /dev/null +++ b/docs/cobigen/1.0/Home.html @@ -0,0 +1,448 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +

CobiGen - Code-based incremental Generator

+
+

Overview

+
+
+

CobiGen is a generic incremental generator for end to end code generation tasks, mostly used in Java projects. +Due to a template-based approach, CobiGen generates any set of text-based documents and document fragments.

+
+
+

Input (currently):

+
+
+
    +
  • +

    Java classes

    +
  • +
  • +

    XML-based files

    +
  • +
  • +

    OpenAPI documents

    +
  • +
  • +

    Possibly more inputs like WSDL, which is currently not implemented.

    +
  • +
+
+
+

Output:

+
+
+
    +
  • +

    any text-based document or document fragments specified by templates

    +
  • +
+
+
+
+
+

Architecture

+
+
+

CobiGen is build as an extensible framework for incremental code generation. It provides extension points for new input readers which allow reading new input types and converting them to an internally processed model. The model is used to process templates of different kinds to generate patches. The template processing will be done by different template engines. There is an extension point for template engines to support multiple ones as well. Finally, the patch will be structurally merged into potentially already existing code. To allow structural merge on different programming languages, the extension point for structural mergers has been introduced. Here you will see an overview of the currently available extension points and plug-ins:

+
+
+
+
+

Features and Characteristics

+
+
+
    +
  • +

    Generate fresh files across all the layers of a application - ready to run.

    +
  • +
  • +

    Add on to existing files merging code into it. E.g. generate new methods into existing java classes or adding nodes to an XML file. Merging of contents into existing files will be done using structural merge mechanisms.

    +
  • +
  • +

    Structural merge mechanisms are currently implemented for Java, XML, Java Property Syntax, JSON, Basic HTML, Text Append, TypeScript.

    +
  • +
  • +

    Conflicts can be resolved individually but automatically by former configuration for each template.

    +
  • +
  • +

    CobiGen provides an Eclipse integration as well as a Maven Integration.

    +
  • +
  • +

    CobiGen comes with an extensive documentation for users and developers.

    +
  • +
  • +

    Templates can be fully tailored to project needs - this is considered as a simple task.

    +
  • +
+
+
+
+
+

Selection of current and past CobiGen applications

+
+
+

General applications:

+
+
+
    +
  • +

    Generation of a Java CRUD application based on devonfw architecture including all software-layers on the server plus code for JS-clients (Angular). You can find details here.

    +
  • +
  • +

    Generation of a Java CRUD application according to the Register Factory architecture. Persistence entities are the input for generation.

    +
  • +
  • +

    Generation of builder classes for generating test data for JUnit-Tests. Input are the persistence entities.

    +
  • +
  • +

    Generation of a EXT JS 6 client with full CRUD operations connected a devon4j server.

    +
  • +
  • +

    Generation of a Angular 6 client with full CRUD operations connected a devon4j server.

    +
  • +
+
+
+

Project-specific applications in the past:

+
+
+
    +
  • +

    Generation of an additional Java type hierarchy on top of existing Java classes in combination with additional methods to be integrated in the modified classes. Hibernate entities were considered as input as well as output of the generation. The rational in this case, was to generate an additional business object hierarchy on top of an existing data model for efficient business processing.

    +
  • +
  • +

    Generation of hash- and equals-methods as well as copy constructors depending on the field types of the input Java class. Furthermore, CobiGen is able to re-generate these methods/constructors triggered by the user, i.e, when fields have been changed.

    +
  • +
  • +

    Extraction of JavaDoc of test classes and their methods for generating a csv test documentation. This test documentation has been further processed manually in Excel to provide a good overview about the currently available tests in the software system, which enables further human analysis.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/_images/images/CLIArgumentImage.PNG b/docs/cobigen/1.0/_images/images/CLIArgumentImage.PNG new file mode 100644 index 00000000..f5b81889 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/CLIArgumentImage.PNG differ diff --git a/docs/cobigen/1.0/_images/images/WithoutParam.PNG b/docs/cobigen/1.0/_images/images/WithoutParam.PNG new file mode 100644 index 00000000..bde8d17a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/WithoutParam.PNG differ diff --git a/docs/cobigen/1.0/_images/images/capgeminiLogo.jpg b/docs/cobigen/1.0/_images/images/capgeminiLogo.jpg new file mode 100644 index 00000000..4260a548 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/capgeminiLogo.jpg differ diff --git a/docs/cobigen/1.0/_images/images/devonfw.png b/docs/cobigen/1.0/_images/images/devonfw.png new file mode 100644 index 00000000..ceeec3ee Binary files /dev/null and b/docs/cobigen/1.0/_images/images/devonfw.png differ diff --git a/docs/cobigen/1.0/_images/images/generate_wizard_page1.png b/docs/cobigen/1.0/_images/images/generate_wizard_page1.png new file mode 100644 index 00000000..376173a9 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/generate_wizard_page1.png differ diff --git a/docs/cobigen/1.0/_images/images/generate_wizard_page1_batch.png b/docs/cobigen/1.0/_images/images/generate_wizard_page1_batch.png new file mode 100644 index 00000000..93d686f7 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/generate_wizard_page1_batch.png differ diff --git a/docs/cobigen/1.0/_images/images/generate_wizard_page1_customizing.png b/docs/cobigen/1.0/_images/images/generate_wizard_page1_customizing.png new file mode 100644 index 00000000..084d9250 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/generate_wizard_page1_customizing.png differ diff --git a/docs/cobigen/1.0/_images/images/health_check_advanced_up_to_date.png b/docs/cobigen/1.0/_images/images/health_check_advanced_up_to_date.png new file mode 100644 index 00000000..b63df93e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/health_check_advanced_up_to_date.png differ diff --git a/docs/cobigen/1.0/_images/images/health_check_all_OK.png b/docs/cobigen/1.0/_images/images/health_check_all_OK.png new file mode 100644 index 00000000..1cb5aea0 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/health_check_all_OK.png differ diff --git a/docs/cobigen/1.0/_images/images/health_check_invalid_config.png b/docs/cobigen/1.0/_images/images/health_check_invalid_config.png new file mode 100644 index 00000000..2bf1f10e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/health_check_invalid_config.png differ diff --git a/docs/cobigen/1.0/_images/images/health_check_menu_entry.png b/docs/cobigen/1.0/_images/images/health_check_menu_entry.png new file mode 100644 index 00000000..4c7ee051 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/health_check_menu_entry.png differ diff --git a/docs/cobigen/1.0/_images/images/health_check_menu_entry1.png b/docs/cobigen/1.0/_images/images/health_check_menu_entry1.png new file mode 100644 index 00000000..38647b53 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/health_check_menu_entry1.png differ diff --git a/docs/cobigen/1.0/_images/images/health_check_no_matching_triggers.png b/docs/cobigen/1.0/_images/images/health_check_no_matching_triggers.png new file mode 100644 index 00000000..efb67c5b Binary files /dev/null and b/docs/cobigen/1.0/_images/images/health_check_no_matching_triggers.png differ diff --git a/docs/cobigen/1.0/_images/images/health_check_no_templates.png b/docs/cobigen/1.0/_images/images/health_check_no_templates.png new file mode 100644 index 00000000..7162828a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/health_check_no_templates.png differ diff --git a/docs/cobigen/1.0/_images/images/health_check_old_context.png b/docs/cobigen/1.0/_images/images/health_check_old_context.png new file mode 100644 index 00000000..ac04b9a4 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/health_check_old_context.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/CLI/WithoutParam.PNG b/docs/cobigen/1.0/_images/images/howtos/CLI/WithoutParam.PNG new file mode 100644 index 00000000..bde8d17a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/CLI/WithoutParam.PNG differ diff --git a/docs/cobigen/1.0/_images/images/howtos/CLI/selectedIncr.PNG b/docs/cobigen/1.0/_images/images/howtos/CLI/selectedIncr.PNG new file mode 100644 index 00000000..51ed90bb Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/CLI/selectedIncr.PNG differ diff --git a/docs/cobigen/1.0/_images/images/howtos/CLI/updateCommand.PNG b/docs/cobigen/1.0/_images/images/howtos/CLI/updateCommand.PNG new file mode 100644 index 00000000..1f49778d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/CLI/updateCommand.PNG differ diff --git a/docs/cobigen/1.0/_images/images/howtos/CLI/updatePlugin.PNG b/docs/cobigen/1.0/_images/images/howtos/CLI/updatePlugin.PNG new file mode 100644 index 00000000..d4992808 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/CLI/updatePlugin.PNG differ diff --git a/docs/cobigen/1.0/_images/images/howtos/EA-gen/classdiagram.png b/docs/cobigen/1.0/_images/images/howtos/EA-gen/classdiagram.png new file mode 100644 index 00000000..98afca92 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/EA-gen/classdiagram.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/EA-gen/exporting.png b/docs/cobigen/1.0/_images/images/howtos/EA-gen/exporting.png new file mode 100644 index 00000000..7c93230e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/EA-gen/exporting.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/EA-gen/generating.png b/docs/cobigen/1.0/_images/images/howtos/EA-gen/generating.png new file mode 100644 index 00000000..c33eb136 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/EA-gen/generating.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_1.png b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_1.png new file mode 100644 index 00000000..06ed8e1b Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_1.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_2.png b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_2.png new file mode 100644 index 00000000..69f161b5 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_2.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_3.png b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_3.png new file mode 100644 index 00000000..5ac1508e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_3.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_4.png b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_4.png new file mode 100644 index 00000000..885f4f04 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_4.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_5.png b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_5.png new file mode 100644 index 00000000..d7e23f66 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_5.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_6.png b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_6.png new file mode 100644 index 00000000..6260106a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_6.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_7.png b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_7.png new file mode 100644 index 00000000..ee474dd4 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/angular4-gen/ng4gen_7.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/cobigen-core/cobigen-core_diag6.png b/docs/cobigen/1.0/_images/images/howtos/cobigen-core/cobigen-core_diag6.png new file mode 100644 index 00000000..d9624cd8 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/cobigen-core/cobigen-core_diag6.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/cobigen-core/cobigen-core_sshot1.png b/docs/cobigen/1.0/_images/images/howtos/cobigen-core/cobigen-core_sshot1.png new file mode 100644 index 00000000..89150444 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/cobigen-core/cobigen-core_sshot1.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/cobigen-core/core_01.png b/docs/cobigen/1.0/_images/images/howtos/cobigen-core/core_01.png new file mode 100644 index 00000000..d4487265 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/cobigen-core/core_01.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/cobigen-core/core_02.png b/docs/cobigen/1.0/_images/images/howtos/cobigen-core/core_02.png new file mode 100644 index 00000000..04e70889 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/cobigen-core/core_02.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/cobigen-core/core_03.png b/docs/cobigen/1.0/_images/images/howtos/cobigen-core/core_03.png new file mode 100644 index 00000000..1f50af8a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/cobigen-core/core_03.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/devon4net/BussinessCommonConfiguration.png b/docs/cobigen/1.0/_images/images/howtos/devon4net/BussinessCommonConfiguration.png new file mode 100644 index 00000000..4b440616 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/devon4net/BussinessCommonConfiguration.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/devon4net/CobigenContextLocation.png b/docs/cobigen/1.0/_images/images/howtos/devon4net/CobigenContextLocation.png new file mode 100644 index 00000000..9744124a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/devon4net/CobigenContextLocation.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/devon4net/OpenAPI_file_root_folder.png b/docs/cobigen/1.0/_images/images/howtos/devon4net/OpenAPI_file_root_folder.png new file mode 100644 index 00000000..033bc985 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/devon4net/OpenAPI_file_root_folder.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/devon4net/Project_selection.png b/docs/cobigen/1.0/_images/images/howtos/devon4net/Project_selection.png new file mode 100644 index 00000000..76c5b862 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/devon4net/Project_selection.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/devon4net/Services.png b/docs/cobigen/1.0/_images/images/howtos/devon4net/Services.png new file mode 100644 index 00000000..07d3fe25 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/devon4net/Services.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen0.png b/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen0.png new file mode 100644 index 00000000..b649a8fa Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen0.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_generate0.png b/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_generate0.png new file mode 100644 index 00000000..3f9ddfcb Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_generate0.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_generate1.png b/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_generate1.png new file mode 100644 index 00000000..3917e4ee Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_generate1.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_health1.png b/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_health1.png new file mode 100644 index 00000000..dc29b377 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_health1.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_health2.png b/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_health2.png new file mode 100644 index 00000000..1580452b Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/devon4net/cobigen_health2.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/FEGenOptions.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/FEGenOptions.png new file mode 100644 index 00000000..ebccdc35 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/FEGenOptions.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/application_properties.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/application_properties.png new file mode 100644 index 00000000..e15d8ab1 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/application_properties.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/devonextracted.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/devonextracted.png new file mode 100644 index 00000000..a8ba12c8 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/devonextracted.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/dir-after-ide-install.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/dir-after-ide-install.png new file mode 100644 index 00000000..c446547a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/dir-after-ide-install.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/docs.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/docs.png new file mode 100644 index 00000000..b639e7a8 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/docs.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/ide-install.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/ide-install.png new file mode 100644 index 00000000..e0e5ff53 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/ide-install.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image 74.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image 74.png new file mode 100644 index 00000000..ea5e95a7 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image 74.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image10.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image10.png new file mode 100644 index 00000000..e92d4c07 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image10.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image11.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image11.png new file mode 100644 index 00000000..5e15c899 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image11.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image12.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image12.png new file mode 100644 index 00000000..c6406b22 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image12.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image14.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image14.png new file mode 100644 index 00000000..39c8403d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image14.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image15.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image15.png new file mode 100644 index 00000000..2b5efb2f Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image15.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image16.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image16.png new file mode 100644 index 00000000..3347ef87 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image16.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image17.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image17.png new file mode 100644 index 00000000..f1fd81ab Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image17.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image18.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image18.png new file mode 100644 index 00000000..e422122f Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image18.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image19.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image19.png new file mode 100644 index 00000000..6e8f6d43 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image19.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image2.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image2.png new file mode 100644 index 00000000..c799c647 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image2.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image20.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image20.png new file mode 100644 index 00000000..632c481d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image20.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image21.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image21.png new file mode 100644 index 00000000..5cf668d3 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image21.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image22.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image22.png new file mode 100644 index 00000000..1245e4d8 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image22.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image23.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image23.png new file mode 100644 index 00000000..26568da0 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image23.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image24.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image24.png new file mode 100644 index 00000000..01194514 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image24.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image25.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image25.png new file mode 100644 index 00000000..96757043 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image25.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image26.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image26.png new file mode 100644 index 00000000..0fc3fa10 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image26.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image27.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image27.png new file mode 100644 index 00000000..28627a5f Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image27.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image28.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image28.png new file mode 100644 index 00000000..117d4a5a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image28.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image29.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image29.png new file mode 100644 index 00000000..2b6080ef Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image29.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image3.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image3.png new file mode 100644 index 00000000..b33915c9 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image3.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image30.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image30.png new file mode 100644 index 00000000..066f3177 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image30.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image31.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image31.png new file mode 100644 index 00000000..5c68b77f Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image31.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image32.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image32.png new file mode 100644 index 00000000..c402685e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image32.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image33.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image33.png new file mode 100644 index 00000000..bb06d082 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image33.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image34.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image34.png new file mode 100644 index 00000000..da3a93ff Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image34.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image35.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image35.png new file mode 100644 index 00000000..9cdddf83 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image35.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image36.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image36.png new file mode 100644 index 00000000..e54292fc Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image36.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image37.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image37.png new file mode 100644 index 00000000..835fd3e8 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image37.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image38.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image38.png new file mode 100644 index 00000000..b26f31d7 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image38.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image39.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image39.png new file mode 100644 index 00000000..61720c65 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image39.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image4.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image4.png new file mode 100644 index 00000000..56029ea9 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image4.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image40.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image40.png new file mode 100644 index 00000000..532be7c5 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image40.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image41.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image41.png new file mode 100644 index 00000000..85bfb3ba Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image41.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image42.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image42.png new file mode 100644 index 00000000..49be4c20 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image42.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image43.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image43.png new file mode 100644 index 00000000..e34e3018 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image43.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image44.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image44.png new file mode 100644 index 00000000..41ae73e8 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image44.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image45.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image45.png new file mode 100644 index 00000000..b9b1e867 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image45.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image46.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image46.png new file mode 100644 index 00000000..2b24cab2 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image46.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image47.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image47.png new file mode 100644 index 00000000..91852f06 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image47.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image48.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image48.png new file mode 100644 index 00000000..47d7d367 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image48.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image49.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image49.png new file mode 100644 index 00000000..cadea205 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image49.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image5.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image5.png new file mode 100644 index 00000000..1701e399 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image5.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image50.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image50.png new file mode 100644 index 00000000..d674eb34 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image50.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image51.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image51.png new file mode 100644 index 00000000..a59eec96 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image51.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image52.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image52.png new file mode 100644 index 00000000..8019c14a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image52.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image53.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image53.png new file mode 100644 index 00000000..a3d98569 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image53.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image54.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image54.png new file mode 100644 index 00000000..a26c3797 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image54.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image55.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image55.png new file mode 100644 index 00000000..0044dc22 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image55.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image56.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image56.png new file mode 100644 index 00000000..88fefb88 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image56.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image57.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image57.png new file mode 100644 index 00000000..0841154c Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image57.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image58.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image58.png new file mode 100644 index 00000000..24117f2e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image58.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image59.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image59.png new file mode 100644 index 00000000..ad1aad11 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image59.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image6.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image6.png new file mode 100644 index 00000000..95e55da0 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image6.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image60.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image60.png new file mode 100644 index 00000000..00f834d0 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image60.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image61.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image61.png new file mode 100644 index 00000000..18e975af Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image61.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image62.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image62.png new file mode 100644 index 00000000..9bcbc70a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image62.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image63.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image63.png new file mode 100644 index 00000000..ded149b7 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image63.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image64.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image64.png new file mode 100644 index 00000000..d349f010 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image64.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image65.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image65.png new file mode 100644 index 00000000..4f9adf1a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image65.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image66.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image66.png new file mode 100644 index 00000000..873712d2 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image66.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image67.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image67.png new file mode 100644 index 00000000..8cd4bf59 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image67.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image68.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image68.png new file mode 100644 index 00000000..7e173536 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image68.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image69.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image69.png new file mode 100644 index 00000000..2b7d9631 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image69.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image7.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image7.png new file mode 100644 index 00000000..fb55847d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image7.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image70.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image70.png new file mode 100644 index 00000000..a962dcdc Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image70.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image71.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image71.png new file mode 100644 index 00000000..57b0914d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image71.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image72.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image72.png new file mode 100644 index 00000000..f2ecbc5d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image72.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image73.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image73.png new file mode 100644 index 00000000..30f8098a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image73.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image74.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image74.png new file mode 100644 index 00000000..ea5e95a7 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image74.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image75.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image75.png new file mode 100644 index 00000000..94804431 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image75.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image76.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image76.png new file mode 100644 index 00000000..da4a3b07 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image76.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image77.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image77.png new file mode 100644 index 00000000..2c62c423 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image77.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image78.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image78.png new file mode 100644 index 00000000..24c99cd0 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image78.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image79.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image79.png new file mode 100644 index 00000000..d39810df Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image79.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image8.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image8.png new file mode 100644 index 00000000..89b1fdaa Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image8.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image80.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image80.png new file mode 100644 index 00000000..2bec7b2e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image80.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image81.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image81.png new file mode 100644 index 00000000..5622afcc Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image81.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image82.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image82.png new file mode 100644 index 00000000..d81178da Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image82.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image83.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image83.png new file mode 100644 index 00000000..e78ce630 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image83.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image84.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image84.png new file mode 100644 index 00000000..faa0ee35 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image84.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image85.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image85.png new file mode 100644 index 00000000..bf44bbde Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image85.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image86.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image86.png new file mode 100644 index 00000000..2fac7b4c Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image86.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image9.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image9.png new file mode 100644 index 00000000..c5627160 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/image9.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/nav-bar.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/nav-bar.png new file mode 100644 index 00000000..e188ca7a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/nav-bar.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/poc-entity-package.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/poc-entity-package.png new file mode 100644 index 00000000..f373443c Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/poc-entity-package.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/e2e_gen/routing-module.png b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/routing-module.png new file mode 100644 index 00000000..db29b403 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/e2e_gen/routing-module.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag1.png b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag1.png new file mode 100644 index 00000000..8e6e9b91 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag1.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag2.png b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag2.png new file mode 100644 index 00000000..7512ce6a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag2.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag3.png b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag3.png new file mode 100644 index 00000000..805bea21 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag3.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag4.png b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag4.png new file mode 100644 index 00000000..ffb52794 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag4.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag5.png b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag5.png new file mode 100644 index 00000000..93554533 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_diag5.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_sshot1.png b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_sshot1.png new file mode 100644 index 00000000..4c7ee051 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse-plugin/eclipse-plugin_sshot1.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse/generate_wizard_page1.png b/docs/cobigen/1.0/_images/images/howtos/eclipse/generate_wizard_page1.png new file mode 100644 index 00000000..376173a9 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse/generate_wizard_page1.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse/generate_wizard_page1_batch.png b/docs/cobigen/1.0/_images/images/howtos/eclipse/generate_wizard_page1_batch.png new file mode 100644 index 00000000..93d686f7 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse/generate_wizard_page1_batch.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_advanced_up_to_date.png b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_advanced_up_to_date.png new file mode 100644 index 00000000..b63df93e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_advanced_up_to_date.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_all_OK.png b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_all_OK.png new file mode 100644 index 00000000..1cb5aea0 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_all_OK.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_invalid_config.png b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_invalid_config.png new file mode 100644 index 00000000..2bf1f10e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_invalid_config.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_menu_entry.png b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_menu_entry.png new file mode 100644 index 00000000..4c7ee051 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_menu_entry.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_no_matching_triggers.png b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_no_matching_triggers.png new file mode 100644 index 00000000..efb67c5b Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_no_matching_triggers.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_no_templates.png b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_no_templates.png new file mode 100644 index 00000000..7162828a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_no_templates.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_old_context.png b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_old_context.png new file mode 100644 index 00000000..ac04b9a4 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/eclipse/health_check_old_context.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/01_installer.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/01_installer.png new file mode 100644 index 00000000..72a82344 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/01_installer.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/02_installer.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/02_installer.png new file mode 100644 index 00000000..b00b26a1 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/02_installer.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/03_installer.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/03_installer.png new file mode 100644 index 00000000..d5105133 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/03_installer.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/04_installer.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/04_installer.png new file mode 100644 index 00000000..41ec9ea8 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/04_installer.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/05_installer.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/05_installer.png new file mode 100644 index 00000000..58a7406d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/05_installer.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/06_installer.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/06_installer.png new file mode 100644 index 00000000..0c32ced9 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/06_installer.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/07_preferences.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/07_preferences.png new file mode 100644 index 00000000..017bebc2 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/07_preferences.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/08_tool-bar.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/08_tool-bar.png new file mode 100644 index 00000000..10ec3ada Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup-oomph/08_tool-bar.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/01_extract.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/01_extract.png new file mode 100644 index 00000000..164e2dca Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/01_extract.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/02_extract_result.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/02_extract_result.png new file mode 100644 index 00000000..c7d24c85 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/02_extract_result.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/03_clone_menu.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/03_clone_menu.png new file mode 100644 index 00000000..1d0dfb58 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/03_clone_menu.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/04_clone_https.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/04_clone_https.png new file mode 100644 index 00000000..811ac61d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/04_clone_https.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/05_clone_ssh.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/05_clone_ssh.png new file mode 100644 index 00000000..8e6eba2a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/05_clone_ssh.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/06_update-all-workspaces.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/06_update-all-workspaces.png new file mode 100644 index 00000000..4ed903d3 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/06_update-all-workspaces.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/07_update-all-workspaces_out.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/07_update-all-workspaces_out.png new file mode 100644 index 00000000..e328de41 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/07_update-all-workspaces_out.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/08_final_folder_tree.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/08_final_folder_tree.png new file mode 100644 index 00000000..0361812b Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/08_final_folder_tree.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/08_final_scripts.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/08_final_scripts.png new file mode 100644 index 00000000..cecb9bb3 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/08_final_scripts.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/09_import_menu.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/09_import_menu.png new file mode 100644 index 00000000..77dda6dd Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/09_import_menu.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/10_import_mvn.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/10_import_mvn.png new file mode 100644 index 00000000..aeda4616 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/10_import_mvn.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/11_import_mvn_browse.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/11_import_mvn_browse.png new file mode 100644 index 00000000..2dd28964 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/11_import_mvn_browse.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/12_import_mvn_OK.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/12_import_mvn_OK.png new file mode 100644 index 00000000..6d749db8 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/12_import_mvn_OK.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/13_import_mvn_projects.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/13_import_mvn_projects.png new file mode 100644 index 00000000..a993cab3 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/13_import_mvn_projects.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/14_import_mvn_out.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/14_import_mvn_out.png new file mode 100644 index 00000000..46b1a567 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/14_import_mvn_out.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/15_import_cobigen-eclipse.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/15_import_cobigen-eclipse.png new file mode 100644 index 00000000..2e168685 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/15_import_cobigen-eclipse.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/16_import_cobigen-eclipse_select.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/16_import_cobigen-eclipse_select.png new file mode 100644 index 00000000..d4648cd5 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/16_import_cobigen-eclipse_select.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/17_import_cobigen-eclipse_classpath-error.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/17_import_cobigen-eclipse_classpath-error.png new file mode 100644 index 00000000..822fb57b Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/17_import_cobigen-eclipse_classpath-error.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/18_import_cobigen-eclipse_startexplorer.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/18_import_cobigen-eclipse_startexplorer.png new file mode 100644 index 00000000..e0763ae4 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/18_import_cobigen-eclipse_startexplorer.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/19_import_cobigen-eclipse_out.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/19_import_cobigen-eclipse_out.png new file mode 100644 index 00000000..d38a789a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/19_import_cobigen-eclipse_out.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/20_import_cobigen-eclipse_project_out.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/20_import_cobigen-eclipse_project_out.png new file mode 100644 index 00000000..33f60186 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/20_import_cobigen-eclipse_project_out.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/21_run_cobigen-eclipse.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/21_run_cobigen-eclipse.png new file mode 100644 index 00000000..4ea59f93 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/21_run_cobigen-eclipse.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/22_convert_to_maven_project.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/22_convert_to_maven_project.png new file mode 100644 index 00000000..26fed09f Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/22_convert_to_maven_project.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ide-setup/File_Structure.png b/docs/cobigen/1.0/_images/images/howtos/ide-setup/File_Structure.png new file mode 100644 index 00000000..ad1dcc08 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ide-setup/File_Structure.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/installation/01-install-new-software.png b/docs/cobigen/1.0/_images/images/howtos/installation/01-install-new-software.png new file mode 100644 index 00000000..be2acd4d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/installation/01-install-new-software.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/installation/02-select-update-site.png b/docs/cobigen/1.0/_images/images/howtos/installation/02-select-update-site.png new file mode 100644 index 00000000..830b2ace Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/installation/02-select-update-site.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/installation/03-update-software.png b/docs/cobigen/1.0/_images/images/howtos/installation/03-update-software.png new file mode 100644 index 00000000..c06ea267 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/installation/03-update-software.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-build-apk.png b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-build-apk.png new file mode 100644 index 00000000..f321c1df Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-build-apk.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-build-run.png b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-build-run.png new file mode 100644 index 00000000..aaad2c91 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-build-run.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-make-app.png b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-make-app.png new file mode 100644 index 00000000..52fa226f Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-make-app.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-make.png b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-make.png new file mode 100644 index 00000000..384e9079 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/and-vsc-make.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ionic-gen/downloadTemplates.png b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/downloadTemplates.png new file mode 100644 index 00000000..8f74c505 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/downloadTemplates.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ionic-gen/pathOfGeneration.png b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/pathOfGeneration.png new file mode 100644 index 00000000..12e5254c Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/pathOfGeneration.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ionic-gen/rightClick.png b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/rightClick.png new file mode 100644 index 00000000..cea2e175 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/rightClick.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/ionic-gen/wizardCobiGen.png b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/wizardCobiGen.png new file mode 100644 index 00000000..26db5113 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/ionic-gen/wizardCobiGen.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/openapi-gen/extensionPropertyFile.png b/docs/cobigen/1.0/_images/images/howtos/openapi-gen/extensionPropertyFile.png new file mode 100644 index 00000000..f2dba463 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/openapi-gen/extensionPropertyFile.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/openapi-gen/openapi_howto1.png b/docs/cobigen/1.0/_images/images/howtos/openapi-gen/openapi_howto1.png new file mode 100644 index 00000000..e422122f Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/openapi-gen/openapi_howto1.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/openapi-gen/service_based.PNG b/docs/cobigen/1.0/_images/images/howtos/openapi-gen/service_based.PNG new file mode 100644 index 00000000..86c7781a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/openapi-gen/service_based.PNG differ diff --git a/docs/cobigen/1.0/_images/images/howtos/sencha-json-plugins/01_ast.png b/docs/cobigen/1.0/_images/images/howtos/sencha-json-plugins/01_ast.png new file mode 100644 index 00000000..923f4144 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/sencha-json-plugins/01_ast.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/sencha-json-plugins/02_ids.png b/docs/cobigen/1.0/_images/images/howtos/sencha-json-plugins/02_ids.png new file mode 100644 index 00000000..a880630a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/sencha-json-plugins/02_ids.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9_sencha_app_gen.png b/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9_sencha_app_gen.png new file mode 100644 index 00000000..f718b67c Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9_sencha_app_gen.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9a_sencha_app_gen.png b/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9a_sencha_app_gen.png new file mode 100644 index 00000000..49ec367e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9a_sencha_app_gen.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9b_sencha_app_gen.png b/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9b_sencha_app_gen.png new file mode 100644 index 00000000..c65925bd Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9b_sencha_app_gen.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9c_sencha_app_gen.png b/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9c_sencha_app_gen.png new file mode 100644 index 00000000..fa2d74bb Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/senchacligen/devon_guide_cobigen9c_sencha_app_gen.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/setup-jre/execution-environments.PNG b/docs/cobigen/1.0/_images/images/howtos/setup-jre/execution-environments.PNG new file mode 100644 index 00000000..c0ffa7d1 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/setup-jre/execution-environments.PNG differ diff --git a/docs/cobigen/1.0/_images/images/howtos/setup-jre/installed-jre.PNG b/docs/cobigen/1.0/_images/images/howtos/setup-jre/installed-jre.PNG new file mode 100644 index 00000000..efed084e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/setup-jre/installed-jre.PNG differ diff --git a/docs/cobigen/1.0/_images/images/howtos/setup-jre/java-11.PNG b/docs/cobigen/1.0/_images/images/howtos/setup-jre/java-11.PNG new file mode 100644 index 00000000..dc19102a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/setup-jre/java-11.PNG differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/cliPom.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/cliPom.png new file mode 100644 index 00000000..5f95cc05 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/cliPom.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/extractFilesCLI.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/extractFilesCLI.png new file mode 100644 index 00000000..12634bc2 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/extractFilesCLI.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/failingTest.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/failingTest.png new file mode 100644 index 00000000..9d07f28a Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/failingTest.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/importPluginEclipse.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/importPluginEclipse.png new file mode 100644 index 00000000..c5179a14 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/importPluginEclipse.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/newCobiGen.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/newCobiGen.png new file mode 100644 index 00000000..580c58ac Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/newCobiGen.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/oldCobiGen.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/oldCobiGen.png new file mode 100644 index 00000000..185e09b3 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/oldCobiGen.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/packageStructure.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/packageStructure.png new file mode 100644 index 00000000..771e1540 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/packageStructure.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/pluginActivator.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/pluginActivator.png new file mode 100644 index 00000000..82edd134 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/pluginActivator.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/renameCheckbox.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/renameCheckbox.png new file mode 100644 index 00000000..9833b796 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/renameCheckbox.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/renamePlugin.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/renamePlugin.png new file mode 100644 index 00000000..a25d5e71 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/renamePlugin.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/runAsArgs.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/runAsArgs.png new file mode 100644 index 00000000..ccbcda3b Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/runAsArgs.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/runConfigurations.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/runConfigurations.png new file mode 100644 index 00000000..51dc4fee Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/runConfigurations.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/runningCLI.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/runningCLI.png new file mode 100644 index 00000000..a7f093fc Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/runningCLI.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/setPomProperties.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/setPomProperties.png new file mode 100644 index 00000000..b38839a6 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/setPomProperties.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/templatesInside.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/templatesInside.png new file mode 100644 index 00000000..4d90632c Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/templatesInside.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/templatesProject.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/templatesProject.png new file mode 100644 index 00000000..ae2be137 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/templatesProject.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/templatesSnapshot.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/templatesSnapshot.png new file mode 100644 index 00000000..ec04ee06 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/templatesSnapshot.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/usePluginTemplate.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/usePluginTemplate.png new file mode 100644 index 00000000..e8596a7e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/usePluginTemplate.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/todo-plugin/useServerTemplate.png b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/useServerTemplate.png new file mode 100644 index 00000000..269a9741 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/todo-plugin/useServerTemplate.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/about_eclipse.png b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/about_eclipse.png new file mode 100644 index 00000000..3c48db6b Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/about_eclipse.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/all_updates.png b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/all_updates.png new file mode 100644 index 00000000..b86c1fb8 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/all_updates.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/check_updates.png b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/check_updates.png new file mode 100644 index 00000000..9b491a43 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/check_updates.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/details_all_udpates.png b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/details_all_udpates.png new file mode 100644 index 00000000..a3b833b5 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/details_all_udpates.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/installation_details.png b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/installation_details.png new file mode 100644 index 00000000..b840ef8d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/installation_details.png differ diff --git a/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/update_templates.png b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/update_templates.png new file mode 100644 index 00000000..44bc12a6 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/howtos/update_CobiGen/update_templates.png differ diff --git a/docs/cobigen/1.0/_images/images/installation/01-install-new-software.png b/docs/cobigen/1.0/_images/images/installation/01-install-new-software.png new file mode 100644 index 00000000..be2acd4d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/installation/01-install-new-software.png differ diff --git a/docs/cobigen/1.0/_images/images/installation/02-select-update-site.png b/docs/cobigen/1.0/_images/images/installation/02-select-update-site.png new file mode 100644 index 00000000..3d5f1739 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/installation/02-select-update-site.png differ diff --git a/docs/cobigen/1.0/_images/images/installation/03-update-software.png b/docs/cobigen/1.0/_images/images/installation/03-update-software.png new file mode 100644 index 00000000..c06ea267 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/installation/03-update-software.png differ diff --git a/docs/cobigen/1.0/_images/images/logo/cobigen_logo.jpg b/docs/cobigen/1.0/_images/images/logo/cobigen_logo.jpg new file mode 100644 index 00000000..d3f95ec3 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/logo/cobigen_logo.jpg differ diff --git a/docs/cobigen/1.0/_images/images/logo/cobigen_logo.png b/docs/cobigen/1.0/_images/images/logo/cobigen_logo.png new file mode 100644 index 00000000..ceeec3ee Binary files /dev/null and b/docs/cobigen/1.0/_images/images/logo/cobigen_logo.png differ diff --git a/docs/cobigen/1.0/_images/images/logo/cobigen_logo_trans.png b/docs/cobigen/1.0/_images/images/logo/cobigen_logo_trans.png new file mode 100644 index 00000000..bfb1b264 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/logo/cobigen_logo_trans.png differ diff --git a/docs/cobigen/1.0/_images/images/selectedIncr.PNG b/docs/cobigen/1.0/_images/images/selectedIncr.PNG new file mode 100644 index 00000000..51ed90bb Binary files /dev/null and b/docs/cobigen/1.0/_images/images/selectedIncr.PNG differ diff --git a/docs/cobigen/1.0/_images/images/topLeftLogo.png b/docs/cobigen/1.0/_images/images/topLeftLogo.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/topLeftLogo.png differ diff --git a/docs/cobigen/1.0/_images/images/topRightLogo.png b/docs/cobigen/1.0/_images/images/topRightLogo.png new file mode 100644 index 00000000..4fa5e56e Binary files /dev/null and b/docs/cobigen/1.0/_images/images/topRightLogo.png differ diff --git a/docs/cobigen/1.0/_images/images/updateCommand.PNG b/docs/cobigen/1.0/_images/images/updateCommand.PNG new file mode 100644 index 00000000..1f49778d Binary files /dev/null and b/docs/cobigen/1.0/_images/images/updateCommand.PNG differ diff --git a/docs/cobigen/1.0/_images/images/updatePlugin.PNG b/docs/cobigen/1.0/_images/images/updatePlugin.PNG new file mode 100644 index 00000000..d4992808 Binary files /dev/null and b/docs/cobigen/1.0/_images/images/updatePlugin.PNG differ diff --git a/docs/cobigen/1.0/cobigen-core_configuration.html b/docs/cobigen/1.0/cobigen-core_configuration.html new file mode 100644 index 00000000..ee23fdf5 --- /dev/null +++ b/docs/cobigen/1.0/cobigen-core_configuration.html @@ -0,0 +1,905 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Configuration

+
+
+

CobiGen is maintaining a home directory further referenced in this documentation as $cghome, which is used to maintain temporary or transient data. The home folder is determined with the following location fall-back:

+
+
+
    +
  1. +

    System environment variable COBIGEN_HOME (e.g. C:\project\ide\conf\cobigen-home)

    +
  2. +
  3. +

    .cobigen directory in OS user home (e.g. ~/.cobigen)

    +
  4. +
+
+
+

The actual configuration of CobiGen is maintained by a single folder or jar. The location can be configured with respect to the implemented configuration fall-back mechanism. CobiGen will search for the location of the configuration in the following order:

+
+
+
    +
  1. +

    A configuration jar or directory, which is passed to CobiGen by the Maven or Eclipse integration or any other program using the CobiGen programming interface: +1.1. the Maven integration allows to configure a jar dependency to be included in the currently running classpath (of interest for maven configuration +1.2. the Eclipse integration allows to specify a CobiGen_Templates project in the eclipse workspace

    +
  2. +
  3. +

    The file $cghome/.cobigen exists and the property templates is set to a valid configuration (e.g. templates=C:\project\ide\conf\templates or templates=C:\project\ide\conf\templates.jar) Hint: Check for log entry like Value of property templates in $cghome/.cobigen is invalid to identify an invalid configuration which is not taken up as expected

    +
  4. +
  5. +

    The folder $cghome/templates/CobiGen_Templates exists

    +
  6. +
  7. +

    The lexicographical sorted first configuration jar of the following path pattern $cghome/templates/templates-([^-]+)-(\\d+\\.?)+.jar if exists (e.g. templates-devon4j-2020.04.001)

    +
  8. +
  9. +

    CobiGen will automatically download the latest jar configuration from maven central with groupId com.devonfw.cobigen and artifactId templates-devon4j and take it like described in 4.

    +
  10. +
+
+
+

Within the configuration jar or directory you will find the following structure:

+
+
+
+
CobiGen_Templates
+ |- templateFolder1
+    |- templates.xml
+ |- templateFolder2
+    |- templates.xml
+ |- context.xml
+
+
+
+

Find some examples here.

+
+
+

Context Configuration

+
+
+

The context configuration (context.xml) always has the following root structure:

+
+
+
Context Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        ...
+    </triggers>
+</contextConfiguration>
+
+
+
+

The context configuration has a version attribute, which should match the XSD version the context configuration is an instance of. It should not state the version of the currently released version of CobiGen. This attribute should be maintained by the context configuration developers. If configured correctly, it will provide a better feedback for the user and thus higher user experience. Currently there is only the version v1.0. For further version there will be a changelog later on.

+
+
+

Trigger Node

+
+

As children of the <triggers> node you can define different triggers. By defining a <trigger> you declare a mapping between special inputs and a templateFolder, which contains all templates, which are worth to be generated with the given input.

+
+
+
trigger configuration
+
+
<trigger id="..." type="..." templateFolder="..." inputCharset="UTF-8" >
+    ...
+</trigger>
+
+
+
+
    +
  • +

    The attribute id should be unique within an context configuration. It is necessary for efficient internal processing.

    +
  • +
  • +

    The attribute type declares a specific trigger interpreter, which might be provided by additional plug-ins. A trigger interpreter has to provide an input reader, which reads specific inputs and creates a template object model out of it to be processed by the FreeMarker template engine later on. Have a look at the plug-in’s documentation of your interest and see, which trigger types and thus inputs are currently supported.

    +
  • +
  • +

    The attribute templateFolder declares the relative path to the template folder, which will be used if the trigger gets activated.

    +
  • +
  • +

    The attribute inputCharset (optional) determines the charset to be used for reading any input file.

    +
  • +
+
+
+
+

Matcher Node

+
+

A trigger will be activated if its matchers hold the following formula:

+
+
+

!(NOT || …​ || NOT) && AND && …​ && AND && (OR || …​ || OR)

+
+
+

Whereas NOT/AND/OR describes the accumulationType of a matcher (see below) and e.g. NOT means 'a matcher with accumulationType NOT matches a given input'. Thus additionally to an input reader, a trigger interpreter has to define at least one set of matchers, which are satisfiable, to be fully functional. A <matcher> node declares a specific characteristics a valid input should have.

+
+
+
Matcher Configuration
+
+
<matcher type="..." value="..." accumulationType="...">
+    ...
+</matcher>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute accumulationType (optional) specifies how the matcher will influence the trigger activation. Valid values are:

    +
    +
      +
    • +

      OR (default): if any matcher of accumulation type OR matches, the trigger will be activated as long as there are no further matchers with different accumulation types

      +
    • +
    • +

      AND: if any matcher with AND accumulation type does not match, the trigger will not be activated

      +
    • +
    • +

      NOT: if any matcher with NOT accumulation type matches, the trigger will not be activated

      +
    • +
    +
    +
  • +
+
+
+
+

Variable Assignment Node

+
+

Finally, a <matcher> node can have multiple <variableAssignment> nodes as children. Variable assignments allow to parametrize the generation by additional values, which will be added to the object model for template processing. The variables declared using variable assignments, will be made accessible in the templates.xml as well in the object model for template processing via the namespace variables.*.

+
+
+
Complete Configuration Pattern
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="...">
+            <matcher type="..." value="...">
+                <variableAssignment type="..." key="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares the type of variable assignment to be processed by the trigger interpreter providing plug-in. This attribute enables variable assignments with different dynamic value resolutions.

    +
  • +
  • +

    The attribute key declares the namespace under which the resolved value will be accessible later on.

    +
  • +
  • +

    The attribute value might declare a constant value to be assigned or any hint for value resolution done by the trigger interpreter providing plug-in. For instance, if type is regex, then on value you will assign the matched group number by the regex (1, 2, 3…​)

    +
  • +
+
+
+
+

Container Matcher Node

+
+

The <containerMatcher> node is an additional matcher for matching containers of multiple input objects. +Such a container might be a package, which encloses multiple types or---more generic---a model, which encloses multiple elements. A container matcher can be declared side by side with other matchers:

+
+
+
ContainerMatcher Declaration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="..." >
+            <containerMatcher type="..." value="..." retrieveObjectsRecursively="..." />
+            <matcher type="..." value="...">
+                <variableAssignment type="..." variable="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute retrieveObjectsRecursively (optional boolean) states, whether the children of the input should be retrieved recursively to find matching inputs for generation.

    +
  • +
+
+
+

The semantics of a container matchers are the following:

+
+
+
    +
  • +

    A <containerMatcher> does not declare any <variableAssignment> nodes

    +
  • +
  • +

    A <containerMatcher> matches an input if and only if one of its enclosed elements satisfies a set of <matcher> nodes of the same <trigger>

    +
  • +
  • +

    Inputs, which match a <containerMatcher> will cause a generation for each enclosed element

    +
  • +
+
+
+
+
+
+

Templates Configuration

+
+
+

The template configuration (templates.xml) specifies, which templates exist and under which circumstances it will be generated. There are two possible configuration styles:

+
+
+
    +
  1. +

    Configure the template meta-data for each template file by template nodes

    +
  2. +
  3. +

    (since cobigen-core-v1.2.0): Configure templateScan nodes to automatically retrieve a default configuration for all files within a configured folder and possibly modify the automatically configured templates using templateExtension nodes

    +
  4. +
+
+
+

To get an intuition of the idea, the following will initially describe the first (more extensive) configuration style. Such an configuration root structure looks as follows:

+
+
+
Extensive Templates Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.0" templateEngine="FreeMarker">
+    <templates>
+            ...
+    </templates>
+    <increments>
+            ...
+    </increments>
+</templatesConfiguration>
+
+
+
+

The root node <templatesConfiguration> specifies two attributes. The attribute version provides further usability support and will be handled analogous to the version attribute of the context configuration. The optional attribute templateEngine specifies the template engine to be used for processing the templates (since `cobigen-core-4.0.0`). By default it is set to FreeMarker.

+
+
+

The node <templatesConfiguration> allows two different grouping nodes as children. First, there is the <templates> node, which groups all declarations of templates. Second, there is the <increments> node, which groups all declarations about increments.

+
+
+

Template Node

+
+

The <templates> node groups multiple <template> declarations, which enables further generation. Each template file should be registered at least once as a template to be considered.

+
+
+
Example Template Configuration
+
+
<templates>
+    <template name="..." destinationPath="..." templateFile="..." mergeStrategy="..." targetCharset="..." />
+    ...
+</templates>
+
+
+
+

A template declaration consist of multiple information:

+
+
+
    +
  • +

    The attribute name specifies an unique ID within the templates configuration, which will later be reused in the increment definitions.

    +
  • +
  • +

    The attribute destinationPath specifies the destination path the template will be generated to. It is possible to use all variables defined by variable assignments within the path declaration using the FreeMarker syntax ${variables.*}. While resolving the variable expressions, each dot within the value will be automatically replaced by a slash. This behavior is accounted for by the transformations of Java packages to paths as CobiGen has first been developed in the context of the Java world. Furthermore, the destination path variable resolution provides the following additional built-in operators analogue to the FreeMarker syntax:

    +
    +
      +
    • +

      ?cap_first analogue to FreeMarker

      +
    • +
    • +

      ?uncap_first analogue to FreeMarker

      +
    • +
    • +

      ?lower_case analogue to FreeMarker

      +
    • +
    • +

      ?upper_case analogue to FreeMarker

      +
    • +
    • +

      ?replace(regex, replacement) - Replaces all occurrences of the regular expression regex in the variable’s value with the given replacement string. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removeSuffix(suffix) - Removes the given suffix in the variable’s value iff the variable’s value ends with the given suffix. Otherwise nothing will happen. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removePrefix(prefix) - Analogue to ?removeSuffix but removes the prefix of the variable’s value. (since cobigen-core v1.1.0)

      +
    • +
    +
    +
  • +
  • +

    The attribute templateFile describes the relative path dependent on the template folder specified in the trigger to the template file to be generated.

    +
  • +
  • +

    The attribute mergeStrategy (optional) can be optionally specified and declares the type of merge mechanism to be used, when the destinationPath points to an already existing file. CobiGen by itself just comes with a mergeStrategy override, which enforces file regeneration in total. Additional available merge strategies have to be obtained from the different plug-in’s documentations (see here for java, XML, properties, and text). Default: not set (means not mergeable)

    +
  • +
  • +

    The attribute targetCharset (optional) can be optionally specified and declares the encoding with which the contents will be written into the destination file. This also includes reading an existing file at the destination path for merging its contents with the newly generated ones. Default: UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external template (templates defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+

Template Scan Node

+
+

(since cobigen-core-v1.2.0)

+
+
+

The second configuration style for template meta-data is driven by initially scanning all available templates and automatically configure them with a default set of meta-data. A scanning configuration might look like this:

+
+
+
Example of Template-scan configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.2">
+    <templateScans>
+        <templateScan templatePath="templates" templateNamePrefix="prefix_" destinationPath="src/main/java"/>
+    </templateScans>
+</templatesConfiguration>
+
+
+
+

You can specify multiple <templateScan …​> nodes for different templatePaths and different templateNamePrefixes.

+
+
+
    +
  • +

    The name can be specified to later on reference the templates found by a template-scan within an increment. (since cobigen-core-v2.1.)

    +
  • +
  • +

    The templatePath specifies the relative path from the templates.xml to the root folder from which the template scan should be performed.

    +
  • +
  • +

    The templateNamePrefix (optional) defines a common id prefix, which will be added to all found and automatically configured templates.

    +
  • +
  • +

    The destinationPath defines the root folder all found templates should be generated to, whereas the root folder will be a prefix for all found and automatically configured templates.

    +
  • +
+
+
+

A templateScan will result in the following default configuration of templates. For each file found, new template will be created virtually with the following default values:

+
+
+
    +
  • +

    id: file name without .ftl extension prefixed by templateNamePrefix from template-scan

    +
  • +
  • +

    destinationPath: relative file path of the file found with the prefix defined by destinationPath from template-scan. Furthermore,

    +
    +
      +
    • +

      it is possible to use the syntax for accessing and modifying variables as described for the attribute destinationPath of the template node, besides the only difference, that due to file system restrictions you have to replace all ?-signs (for built-ins) with #-signs.

      +
    • +
    • +

      the files to be scanned, should provide their final file extension by the following file naming convention: <filename>.<extension>.ftl Thus the file extension .ftl will be removed after generation.

      +
    • +
    +
    +
  • +
  • +

    templateFile: relative path to the file found

    +
  • +
  • +

    mergeStrategy: (optional) not set means not mergeable

    +
  • +
  • +

    targetCharset: (optional) defaults to UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external templateScan (templateScans defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+

Template Extension Node

+
+

(since cobigen-core-v1.2.0)

+
+
+

Additionally to the templateScan declaration it is easily possible to rewrite specific attributes for any scanned and automatically configured template.

+
+
+
Example Configuration of a TemplateExtension
+
+
<templates>
+    <templateExtension ref="prefix_FooClass.java" mergeStrategy="javamerge" />
+</templates>
+
+<templateScans>
+    <templateScan templatePath="foo" templateNamePrefix="prefix_" destinationPath="src/main/java/foo"/>
+</templateScans>
+
+
+
+

Lets assume, that the above example declares a template-scan for the folder foo, which contains a file FooClass.java.ftl in any folder depth. Thus the template scan will automatically create a virtual template declaration with id=prefix_FooClass.java and further default configuration.

+
+
+

Using the templateExtension declaration above will reference the scanned template by the attribute ref and overrides the mergeStrategy of the automatically configured template by the value javamerge. Thus we are able to minimize the needed templates configuration.

+
+
+

(Since version 4.1.0) It is possible to reference external templateExtension (templateExtensions defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+

Increment Node

+
+

The <increments> node groups multiple <increment> nodes, which can be seen as a collection of templates to be generated. An increment will be defined by a unique id and a human readable description.

+
+
+
+
<increments>
+    <increment id="..." description="...">
+        <incrementRef ref="..." />
+        <templateRef ref="..." />
+        <templateScanRef ref="..." />
+    </increment>
+</increments>
+
+
+
+

An increment might contain multiple increments and/or templates, which will be referenced using <incrementRef …​>, <templateRef …​>, resp. <templateScanRef …​> nodes. These nodes only declare the attribute ref, which will reference an increment, a template, or a template-scan by its id or name.

+
+
+

(Since version 4.1.0) An special case of <incrementRef …​> is the external incrementsRef. By default, <incrementRef …​> are used to reference increments defined in the same templates.xml file. So for example, we could have:

+
+
+
+
<increments>
+    <increment id="incA" description="...">
+        <incrementRef ref="incB" />
+    </increment>
+    <increment id="incB" description="...">
+        <templateRef .... />
+        <templateScan .... />
+    </increment>
+</increments>
+
+
+
+

However, if we want to reference an increment that it is not defined inside our templates.xml (an increment defined for another trigger), then we can use external incrementRef as shown below:

+
+
+
+
<increment name="..." description="...">
+    <incrementRef ref="trigger_id::increment_id"/>
+</increment>
+
+
+
+

The ref string is split using as delimiter ::. The first part of the string, is the trigger_id to reference. That trigger contains an increment_id. Currently, this functionality only works when both templates use the same kind of input file.

+
+
+
+
+
+

Java Template Logic

+
+
+

since cobigen-core-3.0.0 which is included in the Eclipse and Maven Plugin since version 2.0.0 +In addition, it is possible to implement more complex template logic by custom Java code. To enable this feature, you can simply import the the CobiGen_Templates by clicking on Adapt Templates, turn it into a simple maven project (if it is not already) and implement any Java logic in the common maven layout (e.g. in the source folder src/main/java). Each Java class will be instantiated by CobiGen for each generation process. Thus, you can even store any state within a Java class instance during generation. However, there is currently no guarantee according to the template processing order.

+
+
+

As a consequence, you have to implement your Java classes with a public default (non-parameter) constructor to be used by any template. Methods of the implemented Java classes can be called within templates by the simple standard FreeMarker expression for calling Bean methods: SimpleType.methodName(param1). Until now, CobiGen will shadow multiple types with the same simple name non-deterministically. So please prevent yourself from that situation.

+
+
+

Finally, if you would like to do some reflection within your Java code accessing any type of the template project or any type referenced by the input, you should load classes by making use of the classloader of the util classes. CobiGen will take care of the correct classloader building including the classpath of the input source as well as of the classpath of the template project. If you use any other classloader or build it by your own, there will be no guarantee, that generation succeeds.

+
+
+
+
+

Template Properties

+
+
+

since cobigen-core-4.0.0` +Using a configuration with `template scan, you can make use of properties in templates specified in property files named cobigen.properties next to the templates. The property files are specified as Java property files. Property files can be nested in sub-folders. Properties will be resolved including property shading. Properties defined nearest to the template to be generated will take precedence. +In addition, a cobigen.properties file can be specified in the target folder root (in eclipse plugin, this is equal to the source project root). These properties take precedence over template properties specified in the template folder.

+
+
+ + + + + +
+ + +It is not allowed to override context variables in cobigen.properties specifications as we have not found any interesting use case. This is most probably an error of the template designer, CobiGen will raise an error in this case. +
+
+
+

Multi module support or template target path redirects

+
+

since cobigen-core-4.0.0` +One special property you can specify in the template properties is the property `relocate. It will cause the current folder and its sub-folders to be relocated at destination path resolution time. Take the following example:

+
+
+
+
folder
+  - sub1
+    Template.java.ftl
+    cobigen.properties
+
+
+
+

Let the cobigen.properties file contain the line relocate=../sub2/${cwd}. Given that, the relative destination path of Template.java.ftl will be resolved to folder/sub2/Template.java. Compare template scan configuration for more information about basic path resolution. The relocate property specifies a relative path from the location of the cobigen.properties. The ${cwd} placeholder will contain the remaining relative path from the cobigen.properties location to the template file. In this basic example it just contains Template.java.ftl, but it may even be any relative path including sub-folders of sub1 and its templates. +Given the relocate feature, you can even step out of the root path, which in general is the project/maven module the input is located in. This enables template designers to even address, e.g., maven modules located next to the module the input is coming from.

+
+
+
+
+
+

Basic Template Model

+
+
+

In addition to what is served by the different model builders of the different plug-ins, CobiGen provides a minimal model based on context variables as well as CobiGen properties. The following model is independent of the input format and will be served as a template model all the time:

+
+
+ +
+
+
+
+

Plugin Mechanism

+
+
+

Since cobigen-core 4.1.0, we changed the plug-in discovery mechanism. So far it was necessary to register new plugins programmatically, which introduces the need to let every tool integration, i.e. for eclipse or maven, be dependent on every plug-in, which should be released. This made release cycles take long time as all plug-ins have to be integrated into a final release of maven or eclipse integration.

+
+
+

Now, plug-ins are automatically discovered by the Java Service Loader mechanism from the classpath. This also effects the setup of eclipse and maven integration to allow modular releases of CobiGen in future. We are now able to provide faster rollouts of bug-fixes in any of the plug-ins as they can be released completely independently.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-core_development.html b/docs/cobigen/1.0/cobigen-core_development.html new file mode 100644 index 00000000..bca3b3cc --- /dev/null +++ b/docs/cobigen/1.0/cobigen-core_development.html @@ -0,0 +1,784 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==CobiGen Core Development

+
+
+

CobiGen uses Apache FreeMarker as engine for generation through FreeMarker templates.

+
+
+ + + + + +
+ + +
+ +
+
+
+
+

The core implementation are divided in three projects:

+
+
+
    +
  • +

    cobigen-core-api: Mainly composed by interfaces that will be called from the Eclipse plug-in.

    +
  • +
  • +

    cobigen-core: The implementation of the interfaces are within.

    +
  • +
  • +

    cobigen-core-test: As the name suggests, used for test purposes.

    +
  • +
+
+
+

Extension Mechanism

+
+
+

The extension package from the API project contains the interfaces to be implemented if necessary by the sub plugins:

+
+
+
    +
  • +

    `GeneratorPluginActivator.java`

    +
  • +
  • +

    `InputReader.java`

    +
  • +
  • +

    `MatcherInterpreter.java`

    +
  • +
  • +

    Merger.java

    +
  • +
  • +

    `TriggerInterpreter.java`

    +
  • +
  • +

    `ModelBuilder.java`

    +
  • +
+
+
+

The ModelBuilder is an interface for accessing the internal model builder instance. Is implemented by `ModelBuilder.java` from the model package from the implementation project that provides the methods to call the createModel() from the correspondent input reader from the correspondent trigger interpreter to create the object models for a given object.

+
+
+

The to package have the transfer objects of template, matcher, increment and variable assignment classes that will be used as "communication channel" between the core and sub plug-ins methods

+
+
+
+
+

Plugin Registry

+
+
+

The core must load all the sub plugins to get their Merger, Matcher, TriggerInterpreter and InputReader. That elements must implement their respective interfaces from the core.

+
+
+

Diagram 1

+
+
+

Is important to note that not all the sub plug-ins need to have implemented a Matcher and/or an InputReader (advanced information here)

+
+
+

Load Plugin

+
+

The process of loading plugins to the core is done at the eclipse-plugin initialization.

+
+
+

Each sub plugin has an activator class that extends the `GeneratorPluginActivator` interface from the extension package. That class implements the methods bindMerger() and bindTriggerInterpreter().

+
+
+

Diagram 2

+
+
+

This is the class passed as argument to the loadPlugin() method of `PluginRegister.java` of the pluginmanager package.

+
+
+

This method registers the mergers and the trigger interpreter of the sub plugins to the core. +The trigger interpreter has the correspondent input reader of the plugin.

+
+
+ + + + + +
+ + + +
+
+
+
+
+
+

CobiGen Initialization

+
+
+

The CobiGen initialization must initialize the context configuration and the FreeMarker configuration

+
+
+

FreeMarker Initialization

+
+

When a CobiGen object is instantiated, the constructor initializes the FreeMarker configuration creating a configuration instance from the class freemarker.template.Configuration and adjust its settings.

+
+
+
+
freeMarkerConfig = new Configuration(Configuration.VERSION_2_3_23);
+freeMarkerConfig.setObjectWrapper(new DefaultObjectWrapperBuilder(Configuration.VERSION_2_3_23).build());
+freeMarkerConfig.clearEncodingMap();
+freeMarkerConfig.setDefaultEncoding("UTF-8");
+freeMarkerConfig.setLocalizedLookup(false);
+freeMarkerConfig.setTemplateLoader(new NioFileSystemTemplateLoader(`configFolder`));
+
+
+
+

Using the FileSystemUtil from the util package the URI of the root folder containing the context.xml and all templates, configurations etc…​ is converted to a Path object passing it as argument to the ContextConfiguration constructor. +The ContextConfiguration creates a new ContextConfiguration from the config package with the contents initially loaded from the context.xml

+
+
+ + + + + +
+ + +
+

How the ContextConfiguration works explained deeply here.

+
+
+
+
+

The Configuration initialization requires the version of FreeMarker to be used and at the ObjectWrapper initialization as well. +The DefaultObjectWrapperBuilder creates an DefaultObjectWrapper object that maps Java objects to the type-system of FreeMarker Template Language (FTL) with the given incompatibleImprovements specified by the version used as argument.

+
+
+

The configuration of FreeMarker requires to specify to a `TemplateLoader`. A `TemplateLoader` is an interface provided by FreeMarker library that the developer should implement to fit the needs. The `TemplateLoader` implementation at CobiGen is the class `NioFileSystemTemplateLoader.java` from the config.nio package.

+
+
+

Diagram 5

+
+
+
+

Context Configuration

+
+

The context configuration reads the context.xml file from the template project (default: CobiGen_Templates) passing the path as argument to the constructor. At the constructor, it is created an instance of ContextConfigurationReader.java from the config.reader package.

+
+
+ + + + + +
+ + +
+

Please, check the CobiGen configuration for extended information about the context.xml and templates.xml configuration.

+
+
+
+
+

That reader uses the JAXB, JAXB (Java Architecture for XML Binding) provides a fast and convenient way to bind XML schemas and Java representations, making it easy for Java developers to incorporate XML data and processing functions in Java applications. As part of this process, JAXB provides methods for unmarshalling (reading) XML instance documents into Java content trees.

+
+
+

Java Architecture for XML Binding

+
+

<<<<<<< HEAD +JAXB auto generates the Java object within the JAXBContext specified at the xmlns attribute of the contextConfiguration field from the context.xml file

+
+
+
+
Unmarshaller unmarshaller = JAXBContext.newInstance(ContextConfiguration.class).createUnmarshaller();
+
+
+
+

That auto-generation follows the contextConfiguration.xsd schema. Each Java object follows the template specified with the field <xs:CompleType> from the schema file.

+
+
+
+
<xs:complexType name="trigger">
+    <xs:sequence>
+         <xs:element name="containerMatcher" type="tns:containerMatcher" minOccurs="0" maxOccurs="unbounded"/>
+         <xs:element name="matcher" type="tns:matcher" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="id" use="required" type="xs:NCName"/>
+    <xs:attribute name="type" use="required" type="xs:string"/>
+    <xs:attribute name="templateFolder" use="required" type="xs:string"/>
+    <xs:attribute name="inputCharset" use="optional" type="xs:string" default="UTF-8"/>
+ </xs:complexType>
+ <xs:complexType name="matcher">
+    <xs:sequence>
+        <xs:element name="variableAssignment" type="tns:variableAssignment" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="type" type="xs:string" use="required"/>
+    <xs:attribute name="value" type="xs:string" use="required"/>
+    <xs:attribute name="accumulationType" type="tns:accumulationType" use="optional" default="OR"/>
+  </xs:complexType>
+
+
+
+

<code>JAXB</code>

+
+
+

The generated Java objects has the elements and attributes specified at the schema:

+
+
+
+
@XmlAccessorType(XmlAccessType.FIELD)
+@XmlType(name = "trigger", namespace = "http://capgemini.com/devonfw/cobigen/ContextConfiguration", propOrder = {
+    "containerMatcher",
+    "matcher"
+})
+public class Trigger {
+    @XmlElement(namespace = "http://capgemini.com/devonfw/cobigen/ContextConfiguration")
+    protected List<ContainerMatcher> containerMatcher;
+    @XmlElement(namespace = "http://capgemini.com/devonfw/cobigen/ContextConfiguration")
+    protected List<Matcher> matcher;
+    @XmlAttribute(name = "id", required = true)
+    @XmlJavaTypeAdapter(CollapsedStringAdapter.class)
+    @XmlSchemaType(name = "NCName")
+    protected String id;
+    @XmlAttribute(name = "type", required = true)
+    protected String type;
+    @XmlAttribute(name = "templateFolder", required = true)
+    protected String templateFolder;
+    @XmlAttribute(name = "inputCharset")
+    protected String inputCharset;
+    ...
+    ..
+    .
+}
+
+
+
+

This process it is done when calling the unmarshal() method.

+
+
+
+
Object rootNode = unmarshaller.unmarshal(Files.newInputStream(contextFile));
+
+
+
+ + + + + +
+ + +
+

For extended information about JAXB check the offical documentation.

+
+
+
+
+
+

Version Validation

+
+

If the version retrieved after the unmarshal process is null, an InvalidConfigurationException defined at exceptions package will be thrown.

+
+
+

If it is not null, will be compared using the validate() method from `VersionValidator.java` from config.versioning package with the project version retrieved by the `MavenMetadata.java`. The `MavenMetadata.java` file is provided by the POM while building the JAR file

+
+
+
+
<build>
+    <plugins>
+      <!-- Inject Maven Properties in java-templates source folder -->
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>templating-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>generate-version-class</id>
+            <goals>
+              <goal>filter-sources</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      ...
+      ..
+      .
+    </plugins>
+</build>
+
+
+
+

MavenMetadata gets the current CobiGen version by reading the <version> label inside the <project> label from the POM file

+
+
+
+
public class MavenMetadata {
+    /** Maven version */
+    public static final String VERSION = "${project.version}";
+}
+
+
+
+
+
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>cobigen-core</artifactId>
+  <name>CobiGen</name>
+  <version>2.2.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+  ...
+  ..
+  .
+}
+
+
+
+

The comparison has three possibilities:

+
+
+
    +
  1. +

    Versions are equal → Valid +<<<<<<< HEAD

    +
  2. +
  3. +

    context.xml version is greater than current CobiGen version → InvalidConfigurationException

    +
  4. +
  5. +

    Current CobiGen version is greater that context.xml version → Compatible if there not exists a version step (breaking change) in between, otherwise, throw an error.

    +
  6. +
+
+
+

Reaching this point, the configuration version and root node has been validated. Unmarshal with schema checks for checking the correctness and give the user more hints to correct his failures.

+
+
+
+
SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
+ContextConfigurationVersion latestConfigurationVersion = ContextConfigurationVersion.getLatest();
+try (
+    InputStream schemaStream = getClass().getResourceAsStream("/schema/" + latestConfigurationVersion
+                                                              + "/contextConfiguration.xsd");
+    InputStream configInputStream = Files.newInputStream(contextFile)) {
+    Schema schema = schemaFactory.newSchema(new StreamSource(schemaStream));
+    unmarshaller.setSchema(schema);
+    rootNode = unmarshaller.unmarshal(configInputStream);
+    contextNode = (ContextConfiguration) rootNode;
+}
+
+
+
+
+

Load Triggers, Matchers, container Matcher, Accumulation Types and Variable Assignments

+
+

To finish the context configuration initialization, the, trigger, matchers, container matchers, accumulation types and variables assignments are retrieved from the correspondent Java objects generated by JAXB.

+
+
+
+
public Map<String, Trigger> loadTriggers()
+private List<Matcher> loadMatchers(Trigger trigger)
+private List<ContainerMatcher> loadContainerMatchers(Trigger trigger)
+private List<VariableAssignment> loadVariableAssignments(Matcher matcher)
+
+
+
+
+
+
+
+

Perform Generation

+
+
+

Depending on the input, the generation process can begin from two different generate() methods called at the CobiGenWrapper from the eclipse-plugin:

+
+
+
+
public void generate(TemplateTo template, boolean forceOverride) throws IOException, TemplateException, MergeException {
+    if (singleNonContainerInput) {
+        Map<String, Object> model = cobiGen.getModelBuilder(inputs.get(0), template.getTriggerId()).createModel();
+        adaptModel(model);
+        cobiGen.generate(inputs.get(0), template, model, forceOverride);
+    } else {
+        for (Object input : inputs) {
+            cobiGen.generate(input, template, forceOverride);
+        }
+    }
+}
+
+
+
+

Single Non Container Input

+
+

If the input is a single non container input, first step is to create the model, then allow customization by the user (adaptModel()) and finally call the generate() method from CobiGen using the input, template, model and the boolean forceOverride.

+
+
+

The generation process in this case will follow this main steps:

+
+
+
    +
  1. +

    Check if the input is not null

    +
  2. +
  3. +

    Get the trigger interpreter for the type of the trigger of the template

    +
  4. +
  5. +

    Set the root folder for the templates to use for the generation

    +
  6. +
  7. +

    Get the input reader for the trigger interpreter retrieved

    +
  8. +
  9. +

    Test if the input is a package.
    +This only can be possible in the case of java inputs. As the input is a single non container input, this check will fail and the execution will continue.

    +
  10. +
  11. +

    Check if the model parameter is null and if it is, create a new model
    +As the model has been created at the CobiGenWrapper, there is no need to create it again.

    +
  12. +
  13. +

    Get the destination file.

    +
  14. +
  15. +

    Check if the destination file already exists
    +If it exists, but the forceOverride is set to true or the merge strategy of the template is null, the file will be overwritten, not merged. Otherwise, first generate output into a writer object, get the merger and merge the original file with the writer and write the file with the merge result.

    +
  16. +
  17. +

    If the file does not exist, simple write the file.

    +
  18. +
+
+
+
+

Single Container Input or multiple files selection

+
+

The other case is, or the input is multiple files selection, the generation process will be performed for each individual file of the selection, but the model will be created at the step 6 of the steps of the Single Non Container Input and not allowing the user customization.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-documentation.html b/docs/cobigen/1.0/cobigen-documentation.html new file mode 100644 index 00000000..9a525d14 --- /dev/null +++ b/docs/cobigen/1.0/cobigen-documentation.html @@ -0,0 +1,292 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Wiki documentation — conventions & hints +== Conventions +* Stick to the devonfw-docgen conventions to make the generation of the PDF document from the wiki work properly. +* The source code of CobiGen should be documented completely and consistent using JavaDoc. Please check JavaDoc as well after changing any logic. +* Further documentation of more abstract and informative issues for users, template developers and CobiGen developers should be done using the GitHub Wiki + * All GitHub Wiki pages should be edited in adoc mode to ensure the PDF documentation generation possibility

+
+
+

Hints

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-eclipse_installation.html b/docs/cobigen/1.0/cobigen-eclipse_installation.html new file mode 100644 index 00000000..506e34db --- /dev/null +++ b/docs/cobigen/1.0/cobigen-eclipse_installation.html @@ -0,0 +1,358 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Installation

+
+
+
+
+

Remark: CobiGen is preinstalled in the devonfw/devon-ide.

+
+
+
+
+

Preconditions

+
+
+
    +
  • +

    Eclipse 4.x

    +
  • +
  • +

    Java 7 Runtime (for starting eclipse with CobiGen). This is independent from the target version of your developed code.

    +
  • +
+
+
+
+
+

Installation steps

+
+
+
    +
  1. +

    Open the eclipse installation dialog
    +menu bar → HelpInstall new Software…​

    +
    +

    01 install new software

    +
    +
  2. +
  3. +

    Open CobiGen’s update site
    +Insert the update site of your interest into the filed Work with and press Add …​
    +Unless you know what you are doing we recommend you install every plugin as shown in the picture below.

    +
    + +
    +
  4. +
  5. +

    Follow the installation wizard
    +Select CobiGen Eclipse Plug-inNextNext → accept the license → FinishOKYes

    +
  6. +
  7. +

    Once installed, a new menu entry named "CobiGen" will show up in the Package Explorer’s context menu. In the sub menu there will the Generate…​ command, which may ask you to update the templates, and then you can start the generation wizard of CobiGen. You can adapt the templates by clicking on Adapt Templates which will give you the possibility to import the CobiGen_Templates automatically so that you can modified them.

    +
  8. +
  9. +

    Checkout (clone) your project’s templates folder or use the current templates released with CobiGen (https://github.com/devonfw/cobigen/tree/master/cobigen-templates) and then choose Import -> General -> Existing Projects into Workspace to import the templates into your workspace.

    +
  10. +
  11. +

    Now you can start generating. To get an introduction of CobiGen try the devon4j templates and work on the devon4j sample application. There you might want to start with Entity objects as a selection to run CobiGen with, which will give you a good overview of what CobiGen can be used for right out of the box in devon4j based development. If you need some more introduction in how to come up with your templates and increments, please be referred to the documentation of the context configuration and the templates configuration

    +
  12. +
+
+
+

Dependent on your context configuration menu entry Generate…​ may be gray out or not. See for more information about valid selections for generation.

+
+
+
+
+

Updating

+
+
+

In general updating CobiGen for eclipse is done via the update mechanism of eclipse directly, as shown on image below:

+
+
+

03 update software

+
+
+

Upgrading eclipse CobiGen plug-in to v3.0.0 needs some more attention of the user due to a changed plug-in architecture of CobiGen’s core module and the eclipse integration. Eventually, we were able to provide any plug-in of CobiGen separately as its own eclipse bundle (fragment), which is automatically discovered by the main CobiGen Eclipse plug-in after installation.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-eclipse_logging.html b/docs/cobigen/1.0/cobigen-eclipse_logging.html new file mode 100644 index 00000000..a01fd6f8 --- /dev/null +++ b/docs/cobigen/1.0/cobigen-eclipse_logging.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Logging

+
+
+

If you have any problem with the CobiGen eclipse plug-in, you might want to enable logging to provide more information for further problem analysis. This can be done easily by adding the logback.xml to the root of the CobiGen_templates configuration folder. The file should contain at least the following contents, whereas you should specify an absolute path to the target log file (at the TODO). If you are using the (cobigen-templates project, you might have the contents already specified but partially commented.

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<!-- This file is for logback classic. The file contains the configuration for sl4j logging -->
+<configuration>
+    <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+        <file><!-- TODO choose your log file location --></file>
+        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+            <Pattern>%n%date %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
+            </Pattern>
+        </encoder>
+    </appender>
+    <root level="DEBUG">
+        <appender-ref ref="FILE" />
+    </root>
+</configuration>
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-eclipse_usage.html b/docs/cobigen/1.0/cobigen-eclipse_usage.html new file mode 100644 index 00000000..8a58c3cb --- /dev/null +++ b/docs/cobigen/1.0/cobigen-eclipse_usage.html @@ -0,0 +1,464 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Usage

+
+
+

CobiGen has two different generation modes depending on the input selected for generation. The first one is the simple mode, which will be started if the input contains only one input artifact, e.g. for Java an input artifact currently is a Java file. The second one is the batch mode, which will be started if the input contains multiple input artifacts, e.g. for Java this means a list of files. In general this means also that the batch mode might be started when selecting complex models as inputs, which contain multiple input artifacts. The latter scenario has only been covered in the research group,yet.

+
+
+

Simple Mode

+
+
+

Selecting the menu entry Generate…​ the generation wizard will be opened:

+
+
+

generate wizard page1

+
+
+

The left side of the wizard shows all available increments, which can be selected to be generated. Increments are a container like concept encompassing multiple files to be generated, which should result in a semantically closed generation output. +On the right side of the wizard all files are shown, which might be effected by the generation - dependent on the increment selection of files on the left side. The type of modification of each file will be encoded into following color scheme if the files are selected for generation:

+
+
+
    +
  • +

    green: files, which are currently non-existent in the file system. These files will be created during generation

    +
  • +
  • +

    yellow: files, which are currently existent in the file system and which are configured to be merged with generated contents.

    +
  • +
  • +

    red: files, which are currently existent in the file system. These files will be overwritten if manually selected.

    +
  • +
  • +

    no color: files, which are currently existent in the file system. Additionally files, which were deselected and thus will be ignored during generation.

    +
  • +
+
+
+

Selecting an increment on the left side will initialize the selection of all shown files to be generated on the right side, whereas green and yellow categorized files will be selected initially. A manual modification of the pre-selection can be performed by switching to the customization tree using the Customize button on the right lower corner.

+
+
+
+
+

Optional: If you want to customize the generation object model of a Java input class, you might continue with the Next > button instead of finishing the generation wizard. The next generation wizard page is currently available for Java file inputs and lists all non-static fields of the input. deselecting entries will lead to an adapted object model for generation, such that deselected fields will be removed in the object model for generation. By default all fields will be included in the object model.

+
+
+
+
+

Using the Finish button, the generation will be performed. Finally, CobiGen runs the eclipse internal organize imports and format source code for all generated sources and modified sources. Thus it is possible, that---especially organize imports opens a dialog if some types could not be determined automatically. This dialog can be easily closed by pressing on Continue. If the generation is finished, the Success! dialog will pop up.

+
+
+
+
+

Batch mode

+
+
+

If there are multiple input elements selected, e.g., Java files, CobiGen will be started in batch mode. For the generation wizard dialog this means, that the generation preview will be constrained to the first selected input element. It does not preview the generation for each element of the selection or of a complex input. The selection of the files to be generated will be generated for each input element analogously afterwards.

+
+
+

generate wizard page1 batch

+
+
+

Thus the color encoding differs also a little bit:

+
+
+
    +
  • +

    yellow: files, which are configured to be merged.

    +
  • +
  • +

    red: files, which are not configured with any merge strategy and thus will be created if the file does not exist or overwritten if the file already exists

    +
  • +
  • +

    no color: files, which will be ignored during generation

    +
  • +
+
+
+

Initially all possible files to be generated will be selected.

+
+
+
+
+

Health Check

+
+
+

To check whether CobiGen runs appropriately for the selected element(s) the user can perform a Health Check by activating the respective menu entry as shown below.

+
+
+

health check menu entry

+
+
+

The simple Health Check includes 3 checks. As long as any of these steps fails, the Generate menu entry is grayed out.

+
+
+

The first step is to check whether the generation configuration is available at all. If this check fails you will see the following message:

+
+
+

health check no templates

+
+
+

This indicates, that there is no Project named CobiGen_Templates available in the current workspace. To run CobiGen appropriately, it is necessary to have a configuration project named CobiGen_Templates imported into your workspace. For more information see chapter Eclipse Installation.

+
+
+

The second step is to check whether the template project includes a valid context.xml. If this check fails, you will see the following message:

+
+
+

health check invalid config

+
+
+

This means that either your context.xml

+
+
+
    +
  • +

    does not exist (or has another name)

    +
  • +
  • +

    or it is not valid one in any released version of CobiGen

    +
  • +
  • +

    or there is simply no automatic routine of upgrading your context configuration to a valid state.

    +
  • +
+
+
+

If all this is not the case, such as, there is a context.xml, which can be successfully read by CobiGen, you might get the following information:

+
+
+

health check old context

+
+
+

This means that your context.xml is available with the correct name but it is outdated (belongs to an older CobiGen version). In this case just click on Upgrade Context Configuration to get the latest version.

+
+
+
+
+

Remark: This will create a backup of your current context configuration and converts your old configuration to the new format. The upgrade will remove all comments from the file, which could be retrieved later on again from the backup. +If the creation of the backup fails, you will be asked to continue or to abort.

+
+
+
+
+

The third step checks whether there are templates for the selected element(s). If this check fails, you will see the following message:

+
+
+

health check no matching triggers

+
+
+

This indicates, that there no trigger has been activated, which matches the current selection. The reason might be that your selection is faulty or that you imported the wrong template project (e.g. you are working on a devon4j project, but imported the Templates for the Register Factory). If you are a template developer, have a look at the trigger configuration and at the corresponding available plug-in implementations of triggers, like e.g., Java Plug-in or XML Plug-in.

+
+
+

If all the checks are passed you see the following message:

+
+
+

health check all OK

+
+
+

In this case everything is OK and the Generate button is not grayed out anymore so that you are able to trigger it and see the [simple-mode].

+
+
+

In addition to the basic check of the context configuration, you also have the opportunity to perform an Advanced Health Check, which will check all available templates configurations (templates.xml) of path-depth=1 from the configuration project root according to their compatibility.

+
+
+

health check advanced up to date

+
+
+

Analogous to the upgrade of the context configuration, the Advanced Health Check will also provide upgrade functionality for templates configurations if available.

+
+
+
+
+

Update Templates

+
+
+

Update Template: Select Entity file and right click then select CobiGen Update Templates after that click on download then download successfully message will be come .

+
+
+
+
+

Adapt Templates

+
+
+

Adapt Template: Select any file and right click, then select `cobigen → Adapt Templates `.If CobiGen templates jar is not available then it downloads them automatically. If CobiGen templates is already present then it will override existing template in workspace and click on OK then imported template successfully message will be come.

+
+
+

Finally, please change the Java version of the project to 1.8 so that you don’t have any compilation errors.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-htmlplugin.html b/docs/cobigen/1.0/cobigen-htmlplugin.html new file mode 100644 index 00000000..c6196eeb --- /dev/null +++ b/docs/cobigen/1.0/cobigen-htmlplugin.html @@ -0,0 +1,348 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==HTML Plug-in

+
+
+

The HTML Plug-in enables merging result HTML files to existing ones. This plug-in is used at the moment for generate an Angular2 client. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+

Trigger Extensions

+
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
+

Merger extensions

+
+
+

There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy html-ng* (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy html-ng*_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

The merging of two Angular2 files will be processed as follows:

+
+
+

The merge algorithm handles the following AST nodes:

+
+
+
    +
  • +

    md-nav-list

    +
  • +
  • +

    a

    +
  • +
  • +

    form

    +
  • +
  • +

    md-input-container

    +
  • +
  • +

    input

    +
  • +
  • +

    name (for name attribute)

    +
  • +
  • +

    ngIf

    +
  • +
+
+
+ + + + + +
+ + +Be aware, that the HTML merger is not generic and only handles the described tags needed for merging code of a basic Angular client implementation. For future versions, it is planned to implement a more generic solution. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-javaplugin.html b/docs/cobigen/1.0/cobigen-javaplugin.html new file mode 100644 index 00000000..7b59456b --- /dev/null +++ b/docs/cobigen/1.0/cobigen-javaplugin.html @@ -0,0 +1,702 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Java Plug-in +The CobiGen Java Plug-in comes with a new input reader for java artifacts, new java related trigger and matchers, as well as a merging mechanism for Java sources.

+
+
+

Trigger extension

+
+
+

The Java Plug-in provides a new trigger for Java related inputs. It accepts different representations as inputs (see Java input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'java'

    +
    +
    Example of a java trigger definition
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables Java elements as inputs.

    +
    +
  • +
+
+
+

Matcher types

+
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type fqn → full qualified name matching

    +
    +
    Example of a java trigger definition with a full qualified name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the full qualified name (fqn) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'package' → package name of the input

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="package" value="(.+)\.persistence\.([^\.]+)\.entity">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the package name (package) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'expression'

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="expression" value="instanceof java.lang.String">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the expression evaluates to true. Valid expressions are

    +
    +
  • +
  • +

    instanceof fqn: checks an 'is a' relation of the input type

    +
  • +
  • +

    isAbstract: checks, whether the input type is declared abstract

    +
  • +
+
+
+
+

Container Matcher types

+
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'package'

    +
    +
    Example of a java trigger definition with a container matcher for packages
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <containerMatcher type="package" value="com\.example\.app\.component1\.persistence.entity" />
    +</trigger>
    +
    +
    +
    +

    The container matcher matches packages provided by the type com.capgemini.cobigen.javaplugin.inputreader.to.PackageFolder with a regular expression stated in the value attribute. (See containerMatcher semantics to get more information about containerMatchers itself.)

    +
    +
  • +
+
+
+
+

Variable Assignment types

+
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The Java Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'regex' → regular expression group

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="regex" key="rootPackage" value="1" />
    +        <variableAssignment type="regex" key="component" value="2" />
    +        <variableAssignment type="regex" key="pojoName" value="3" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key.

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+

Java input reader

+
+

The CobiGen Java Plug-in implements an input reader for parsed java sources as well as for java Class<?> objects (loaded by reflection). So API user can pass Class<?> objects as well as JavaClass objects for generation. The latter depends on QDox, which will be used for parsing and merging java sources. For getting the right parsed java inputs you can easily use the JavaParserUtil, which provides static functionality to parse java files and get the appropriate JavaClass object.

+
+
+

Furthermore, due to restrictions on both inputs according to model building (see below), it is also possible to provide an array of length two as an input, which contains the Class<?> as well as the JavaClass object of the same class.

+
+
+

Template object model

+
+

No matter whether you use reflection objects or parsed java classes as input, you will get the following object model for template creation:

+
+
+
    +
  • +

    classObject ('Class' :: Class object of the Java input)

    +
  • +
  • +

    POJO

    +
    +
      +
    • +

      name ('String' :: Simple name of the input class)

      +
    • +
    • +

      package ('String' :: Package name of the input class)

      +
    • +
    • +

      canonicalName ('String' :: Full qualified name of the input class)

      +
    • +
    • +

      annotations ('Map<String, Object>' :: Annotations, which will be represented by a mapping of the full qualified type of an annotation to its value. To gain template compatibility, the key will be stored with '_' instead of '.' in the full qualified annotation type. Furthermore, the annotation might be recursively defined and thus be accessed using the same type of mapping. Example ${pojo.annotations.javax_persistence_Id})

      +
    • +
    • +

      JavaDoc ('Map<String, Object>') :: A generic way of addressing all available JavaDoc doclets and comments. The only fixed variable is comment (see below). All other provided variables depend on the doclets found while parsing. The value of a doclet can be accessed by the doclets name (e.g. ${…​JavaDoc.author}). In case of doclet tags that can be declared multiple times (currently @param and @throws), you will get a map, which you access in a specific way (see below).

      +
      +
        +
      • +

        comment ('String' :: JavaDoc comment, which does not include any doclets)

        +
      • +
      • +

        params ('Map<String,String> :: JavaDoc parameter info. If the comment follows proper conventions, the key will be the name of the parameter and the value being its description. You can also access the parameters by their number, as in arg0, arg1 etc, following the order of declaration in the signature, not in order of JavaDoc)

        +
      • +
      • +

        throws ('Map<String,String> :: JavaDoc exception info. If the comment follows proper conventions, the key will be the name of the thrown exception and the value being its description)

        +
      • +
      +
      +
    • +
    • +

      extendedType ('Map<String, Object>' :: The supertype, represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        name ('String' :: Simple name of the supertype)

        +
      • +
      • +

        canonicalName ('String' :: Full qualified name of the supertype)

        +
      • +
      • +

        package ('String' :: Package name of the supertype)

        +
      • +
      +
      +
    • +
    • +

      implementedTypes ('List<Map<String, Object>>' :: A list of all implementedTypes (interfaces) represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        interface ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Simple name of the interface)

          +
        • +
        • +

          canonicalName ('String' :: Full qualified name of the interface)

          +
        • +
        • +

          package ('String' :: Package name of the interface)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      fields ('List<Map<String, Object>>' :: List of fields of the input class) (renamed since cobigen-javaplugin v1.2.0; previously attributes)

      +
      +
        +
      • +

        field ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the Java field)

          +
        • +
        • +

          type ('String' :: Type of the Java field)

          +
        • +
        • +

          canonicalType ('String' :: Full qualified type declaration of the Java field’s type)

          +
        • +
        • +

          'isId' (Deprecated :: boolean :: true if the Java field or its setter or its getter is annotated with the javax.persistence.Id annotation, false otherwise. Equivalent to ${pojo.attributes[i].annotations.javax_persistence_Id?has_content})

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations with the remark, that for fields all annotations of its setter and getter will also be collected)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      methodAccessibleFields ('List<Map<String, Object>>' :: List of fields of the input class or its inherited classes, which are accessible using setter and getter methods)

      +
      +
        +
      • +

        same as for field (but without JavaDoc!)

        +
      • +
      +
      +
    • +
    • +

      methods ('List<Map<String, Object>>' :: The list of all methods, whereas one method will be represented by a set of property mappings)

      +
      +
        +
      • +

        method ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the method)

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

Furthermore, when providing a Class<?> object as input, the Java Plug-in will provide additional functionalities as template methods (deprecated):

+
+
+
    +
  1. +

    isAbstract(String fqn) (Checks whether the type with the given full qualified name is an abstract class. Returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  2. +
  3. +

    isSubtypeOf(String subType, String superType) (Checks whether the subType declared by its full qualified name is a sub type of the superType declared by its full qualified name. Equals the Java expression subType instanceof superType and so also returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  4. +
+
+
+
+

Model Restrictions

+
+

As stated before both inputs (Class<?> objects and JavaClass objects ) have their restrictions according to model building. In the following these restrictions are listed for both models, the ParsedJava Model which results from an JavaClass input and the ReflectedJava Model, which results from a Class<?> input.

+
+
+

It is important to understand, that these restrictions are only present if you work with either Parsed Model OR the Reflected Model. If you use the Maven Build Plug-in or Eclipse Plug-in these two models are merged together so that they can mutually compensate their weaknesses.

+
+
+
Parsed Model
+
+
    +
  • +

    annotations of the input’s supertype are not accessible due to restrictions in the QDox library. So pojo.methodAccessibleFields[i].annotations will always be empty for super type fields.

    +
  • +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Reflected Model.

    +
  • +
  • +

    fields of "supertypes" of the input JavaClass are not available at all. So pojo.methodAccessibleFields will only contain the input type’s and the direct superclass’s fields.

    +
  • +
  • +

    [resolved, since cobigen-javaplugin 1.3.1] field types of supertypes are always canonical. So pojo.methodAccessibleFields[i].type will always provide the same value as pojo.methodAccessibleFields[i].canonicalType (e.g. java.lang.String instead of the expected String) for super type fields.

    +
  • +
+
+
+
+
Reflected Model
+
+
    +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Parsed Model.

    +
  • +
  • +

    annotations are only available if the respective annotation has @Retention(value=RUNTIME), otherwise the annotations are to be discarded by the compiler or by the VM at run time. For more information see RetentionPolicy.

    +
  • +
  • +

    information about generic types is lost. E.g. a field’s/ methodAccessibleField’s type for List<String> can only be provided as List<?>.

    +
  • +
+
+
+
+
+
+
+
+

Merger extensions

+
+
+

The Java Plug-in provides two additional merging strategies for Java sources, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy javamerge (merges two Java resources and keeps the existing Java elements on conflicts)

    +
  • +
  • +

    Merge strategy javamerge_override (merges two Java resources and overrides the existing Java elements on conflicts)

    +
  • +
+
+
+

In general merging of two Java sources will be processed as follows:

+
+
+

Precondition of processing a merge of generated contents and existing ones is a common Java root class resp. surrounding class. If this is the case this class and all further inner classes will be merged recursively. Therefore, the following Java elements will be merged and conflicts will be resolved according to the configured merge strategy:

+
+
+
    +
  • +

    extends and implements relations of a class: Conflicts can only occur for the extends relation.

    +
  • +
  • +

    Annotations of a class: Conflicted if an annotation declaration already exists.

    +
  • +
  • +

    Fields of a class: Conflicted if there is already a field with the same name in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
  • +

    Methods of a class: Conflicted if there is already a method with the same signature in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-jsonplugin.html b/docs/cobigen/1.0/cobigen-jsonplugin.html new file mode 100644 index 00000000..88199986 --- /dev/null +++ b/docs/cobigen/1.0/cobigen-jsonplugin.html @@ -0,0 +1,336 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==JSON Plug-in +At the moment the plug-in can be used for merge generic JSON files depending on the merge strategy defined at the templates.

+
+
+

Merger extensions

+
+
+

There are currently these merge strategies:

+
+
+

Generic JSON Merge

+
+
+
    +
  • +

    merge strategy jsonmerge(add the new code respecting the existent is case of conflict)

    +
  • +
  • +

    merge strategy jsonmerge_override (add the new code overwriting the existent in case of conflict)

    +
    +
      +
    1. +

      JsonArray’s will be ignored / replaced in total

      +
    2. +
    3. +

      JsonObjects in conflict will be processed recursively ignoring adding non existent elements.

      +
    4. +
    +
    +
  • +
+
+
+
+
+

Merge Process

+
+
+

Generic JSON Merging

+
+

The merge process will be:

+
+
+
    +
  1. +

    Add non existent JSON Objects from patch file to base file.

    +
  2. +
  3. +

    For existent object in both files, will add non existent keys from patch to base object. This process will be done recursively for all existent objects.

    +
  4. +
  5. +

    For JSON Arrays existent in both files, the arrays will be just concatenated.

    +
  6. +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-maven_configuration.html b/docs/cobigen/1.0/cobigen-maven_configuration.html new file mode 100644 index 00000000..8263a3af --- /dev/null +++ b/docs/cobigen/1.0/cobigen-maven_configuration.html @@ -0,0 +1,542 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Maven Build Integration

+
+
+

For maven integration of CobiGen you can include the following build plugin into your build:

+
+
+
Build integration of CobiGen
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        <execution>
+          <id>cobigen-generate</id>
+          <phase>generate-resources</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

Available goals

+
+
+
    +
  • +

    generate: Generates contents configured by the standard non-compiled configuration folder. Thus generation can be controlled/configured due to an location URI of the configuration and template or increment ids to be generated for a set of inputs.

    +
  • +
+
+
+

Available phases are all phases, which already provide compiled sources such that CobiGen can perform reflection on it. Thus possible phases are for example package, site.

+
+
+

Provide Template Set

+
+
+

For generation using the CobiGen maven plug-in, the CobiGen configuration can be provided in two different styles:

+
+
+
    +
  1. +

    By a configurationFolder, which should be available on the file system whenever you are running the generation. The value of configurationFolder should correspond to the maven file path syntax.

    +
    +
    Provide CobiGen configuration by configuration folder (file)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <configuration>
    +        <configurationFolder>cobigen-templates</configurationFolder>
    +      </configuration>
    +       ...
    +     </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
  2. +
  3. +

    By maven dependency, whereas the maven dependency should stick on the same conventions as the configuration folder. This explicitly means that it should contain non-compiled resources as well as the context.xml on top-level.

    +
    +
    Provide CobiGen configuration by maven dependency (jar)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <dependencies>
    +        <dependency>
    +          <groupId>com.devonfw.cobigen</groupId>
    +          <artifactId>templates-XYZ</artifactId>
    +          <version>VERSION-YOU-LIKE</version>
    +        </dependency>
    +      </dependencies>
    +      ...
    +    </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
    +

    We currently provide a generic deployed version of the templates on the devonfw-nexus for Register Factory (<artifactId>cobigen-templates-rf</artifactId>) and for the devonfw itself (<artifactId>cobigen-templates-devonfw</artifactId>).

    +
    +
  4. +
+
+
+
+
+

Build Configuration

+
+
+

Using the following configuration you will be able to customize your generation as follows:

+
+
+
    +
  • +

    <destinationRoot> specifies the root directory the relative destinationPath of CobiGen templates configuration should depend on. Default ${basedir}

    +
  • +
  • +

    <inputPackage> declares a package name to be used as input for batch generation. This refers directly to the CobiGen Java Plug-in container matchers of type package configuration.

    +
  • +
  • +

    <inputFile> declares a file to be used as input. The CobiGen maven plug-in will try to parse this file to get an appropriate input to be interpreted by any CobiGen plug-in.

    +
  • +
  • +

    <increment> specifies an increment ID to be generated. You can specify one single increment with content ALL to generate all increments matching the input(s).

    +
  • +
  • +

    <template> specifies a template ID to be generated. You can specify one single template with content ALL to generate all templates matching the input(s).

    +
  • +
  • +

    <forceOverride> specifies an overriding behavior, which enables non-mergeable resources to be completely rewritten by generated contents. For mergeable resources this flag indicates, that conflicting fragments during merge will be replaced by generated content. Default: false

    +
  • +
  • +

    <failOnNothingGenerated> specifies whether the build should fail if the execution does not generate anything.

    +
  • +
+
+
+
Example for a simple build configuration
+
+
<build>
+  <plugins>
+    <plugin>
+       ...
+      <configuration>
+        <destinationRoot>${basedir}</destinationRoot>
+        <inputPackages>
+          <inputPackage>package.to.be.used.as.input</inputPackage>
+        </inputPackages>
+        <inputFiles>
+          <inputFile>path/to/file/to/be/used/as/input</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>IncrementID</increment>
+        </increments>
+        <templates>
+          <template>TemplateID</template>
+        </templates>
+        <forceOverride>false</forceOverride>
+      </configuration>
+        ...
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+

Plugin Injection Since v3

+
+
+

Since version 3.0.0, the plug-in mechanism has changed to support modular releases of the CobiGen plug-ins. Therefore, you need to add all plug-ins to be used for generation. Take the following example to get the idea:

+
+
+
Example of a full configuration including plugins
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        ...
+      </executions>
+      <configuration>
+        ...
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen<groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>1.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+

A full example

+
+
+
    +
  1. +

    A complete maven configuration example

    +
  2. +
+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>6.0.0</version>
+      <executions>
+        <execution>
+          <id>generate</id>
+          <phase>package</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <inputFiles>
+          <inputFile>src/main/java/io/github/devonfw/cobigen/generator/dataaccess/api/InputEntity.java</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>dataaccess_infrastructure</increment>
+          <increment>daos</increment>
+        </increments>
+        <failOnNothingGenerated>false</failOnNothingGenerated>
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-openapiplugin.html b/docs/cobigen/1.0/cobigen-openapiplugin.html new file mode 100644 index 00000000..bb6f7ddd --- /dev/null +++ b/docs/cobigen/1.0/cobigen-openapiplugin.html @@ -0,0 +1,1084 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==OpenAPI Plug-in

+
+
+

The OpenAPI Plug-in enables the support for Swagger files that follows the OpenAPI 3.0 standard as input for CobiGen. Until now, CobiGen was thought to follow a "code first" generation, with this plugin, now it can also follow the "contract first" strategy

+
+
+
    +
  • +

    Code First

    +
    +
      +
    • +

      Generating from a file with code (Java/XML code in our case)

      +
    • +
    +
    +
  • +
  • +

    Contract First

    +
    +
      +
    • +

      Generation from a full definition file (Swagger in this case). This file contains all the information about entities, operations, etc…​

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +If you are not a CobiGen developer, you will be more interested in usage. +
+
+
+

Trigger Extensions

+
+
+

The OpenAPI Plug-in provides a new trigger for Swagger OpenAPI 3.0 related inputs. It accepts different representations as inputs (see OpenAPI input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type openapi

    +
    +
    Example of a OpenAPI trigger definition
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables OpenAPI elements as inputs.

    +
    +
  • +
+
+
+

Matcher type

+
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type 'element' → An object

    +
  • +
+
+
+

This trigger will be enabled if the element (Java Object) of the input file is and EntityDef (value).

+
+
+
+

Container Matcher type

+
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'element'

    +
  • +
+
+
+

The container matcher matches elements as Java Objects, in this case will be always an OpenAPIFile object. (See containerMatcher semantics to get more information about containerMatchers itself.)

+
+
+
+

Variable Assignment types

+
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The OpenAPI Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +        <variableAssignment type="constant" key="rootPackage" value="com.capgemini.demo" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key. +In this case, the constant type variableAssignment is used to specify the root package where the generate will place the files generated.

+
+
+
    +
  • +

    type 'extension' → Extraction of the info extensions and the extensions of each entity. (the tags that start with "x-…​").

    +
    +
    +
      <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +      <variableAssignment type="extension" key="testingAttribute" value="x-test"/>
    +      <variableAssignment type="extension" key="rootPackage" value="x-rootpackage"/>
    +      <variableAssignment type="extension" key="globalVariable" value="x-global"/>
    +    </matcher>
    +  </trigger>
    +
    +
    +
  • +
+
+
+

The 'extension' variable assignment tries to find 'extensions' (tags that start with "x-…​") on the 'info' +part of your file and on the extensions of each entity. value is the extension that our plug-in will try to find on your OpenAPI file. The result will +be stored in the variable key.

+
+
+

As you will see on the figure below, there are two types of variables: The global ones, that are defined +on the 'info' part of the file, and the local ones, that are defined inside each entity.

+
+
+

Therefore, if you want to define the root package, then you will have to declare it on the 'info' part. +That way, all your entities will be generated under the same root package (e.g. com.devonfw.project).

+
+
+

Swagger at devon4j Project

+
+
+

If no extension with that name was found, then an empty string will be assigned. In the case of not defining the root package, then the code will be generated into src/main/java.

+
+
+
    +
  • +

    type 'property' → property of the Java Object

    +
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +        <variableAssignment type="property" key="entityName" value="name" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

The 'property' variable assignment tries to find the property value of the entities defined on the schema. +The value is assigned to the key. The current properties that you will able to get are:

+
+
+
    +
  1. +

    ComponentDef component: It is an object that stores the configuration of an devon4j component. Its only +property is List<PathDef> paths which contains the paths as the ones shown here.

    +
  2. +
  3. +

    String componentName: Stores the name of the x-component tag for this entity.

    +
  4. +
  5. +

    String name: Name of this entity (as shown on the example above).

    +
  6. +
  7. +

    String description: Description of this entity.

    +
  8. +
  9. +

    List<PropertyDef> properties: List containing all the properties of this entity. PropertyDef is an object that has the next properties:

    +
    +
      +
    1. +

      String name.

      +
    2. +
    3. +

      String type.

      +
    4. +
    5. +

      String format.

      +
    6. +
    7. +

      String description.

      +
    8. +
    9. +

      Boolean isCollection.

      +
    10. +
    11. +

      Boolean isEntity.

      +
    12. +
    13. +

      Boolean required.

      +
    14. +
    15. +

      Map<String, Object> constraints

      +
    16. +
    +
    +
  10. +
+
+
+

If no property with that name was found, then it will be set to null.

+
+
+
+

Full trigger configuration

+
+
+
<trigger id="..." type="openapi" templateFolder="...">
+    <containerMatcher type="element" value="OpenApiFile">
+    <matcher type="element" value="EntityDef">
+        <variableAssignment type="constant" key="rootPackage" value="com.capgemini.demo" />
+        <variableAssignment type="property" key="component" value="componentName" />
+        <variableAssignment type="property" key="entityName" value="name" />
+    </matcher>
+</trigger>
+
+
+
+
+
+
+

Input reader

+
+
+

The CobiGen OpenAPI Plug-in implements an input reader for OpenAPI 3.0 files. The XML input reader will create the following object model for template creation:

+
+
+
    +
  • +

    model ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      header (HeaderDef :: Definition of the header found at the top of the file)

      +
    • +
    • +

      name ('String' :: Name of the current Entity)

      +
    • +
    • +

      componentName ('String' :: name of the component the entity belongs to)

      +
    • +
    • +

      component (ComponentDef :: Full definition of the component that entity belongs to)

      +
    • +
    • +

      description ('String' :: Description of the Entity)

      +
    • +
    • +

      properties (List<PropertyDef> :: List of properties the entity has)

      +
    • +
    • +

      relationShips (List<RelationShip> :: List of Relationships the entity has)

      +
    • +
    +
    +
  • +
  • +

    HeaderDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      info (InfoDef :: Definition of the info found in the header)

      +
    • +
    • +

      servers (List<ServerDef> :: List of servers the specification uses)

      +
    • +
    +
    +
  • +
  • +

    InfoDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      title ('String' :: The title of the specification)

      +
    • +
    • +

      description ('String' :: The description of the specification)

      +
    • +
    +
    +
  • +
  • +

    ServerDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      URI ('String' :: String representation of the Server location)

      +
    • +
    • +

      description ('String' :: description of the server)

      +
    • +
    +
    +
  • +
  • +

    ComponentDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      paths (List<PathDef> :: List of services for this component)

      +
    • +
    +
    +
  • +
  • +

    PropertyDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      name ('String' :: Name of the property)

      +
    • +
    • +

      type ('String' :: type of the property)

      +
    • +
    • +

      format ('String' :: format of the property (i.e. int64))

      +
    • +
    • +

      isCollection (boolean :: true if the property is a collection, false by default)

      +
    • +
    • +

      isEntity (boolean :: true if the property refers to another entity, false by default)

      +
    • +
    • +

      sameComponent (boolean :: true if the entity that the property refers to belongs to the same component, false by default)

      +
    • +
    • +

      description ('String' :: Description of the property)

      +
    • +
    • +

      required (boolean :: true if the property is set as required)

      +
    • +
    • +

      constraints ('Map<String, Object>')

      +
    • +
    +
    +
  • +
  • +

    RelationShip ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      type ('String' :: type of the relationship (OneToOne, ManyToMany, etc…​))

      +
    • +
    • +

      entity ('String' :: destination entity name)

      +
    • +
    • +

      sameComponent (boolean :: true if the destination entity belongs to the same component of the source entity, false by default)

      +
    • +
    • +

      unidirectional (boolean :: true if the relationship is unidirectional, false by default)

      +
    • +
    +
    +
  • +
  • +

    PathDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      rootComponent ('String' :: the first segment of the path)

      +
    • +
    • +

      version ('String' :: version of the service)

      +
    • +
    • +

      pathURI ('String' :: URI of the path, the segment after the version)

      +
    • +
    • +

      operations (List<OperationDef> :: List of operations for this path)

      +
    • +
    +
    +
  • +
  • +

    OperationDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      type ('String' :: type of the operation (GET, PUT, etc…​))

      +
    • +
    • +

      parameters (List<ParameterDef> :: List of parameters)

      +
    • +
    • +

      operationId ('String' :: name of the operation prototype)

      +
    • +
    • +

      description ('String' :: JavaDoc Description of the operation)

      +
    • +
    • +

      summary (List<PropertyDef> :: JavaDoc operation Summary)

      +
    • +
    • +

      tags ('List<String>' :: List of different tags)

      +
    • +
    • +

      responses (List<ResponseDef> :: Responses of the operation)

      +
    • +
    +
    +
  • +
  • +

    ParameterDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      isSearchCriteria (boolean :: true if the response is an SearchCriteria object)

      +
    • +
    • +

      inPath (boolean :: true if this parameter is contained in the request path)

      +
    • +
    • +

      inQuery (boolean :: true if this parameter is contained in a query)

      +
    • +
    • +

      isBody (boolean :: true if this parameter is a response body)

      +
    • +
    • +

      inHeader (boolean :: true if this parameter is contained in a header)

      +
    • +
    • +

      mediaType ('String' :: String representation of the media type of the parameter)

      +
    • +
    +
    +
  • +
  • +

    ResponseDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      isArray (boolean :: true if the type of the response is an Array)

      +
    • +
    • +

      isPaginated (boolean :: true if the type of the response is paginated)

      +
    • +
    • +

      isVoid (boolean :: true if there is no type/an empty type)

      +
    • +
    • +

      isEntity (boolean :: true if the type of the response is an Entity)

      +
    • +
    • +

      entityRef (EntityDef :: Incomplete EntityDef containing the name and properties of the referenced Entity)

      +
    • +
    • +

      type ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      code ('String' :: String representation of the HTTP status code)

      +
    • +
    • +

      mediaTypes ('List<String>' :: List of media types that can be returned)

      +
    • +
    • +

      description ('String' :: Description of the response)

      +
    • +
    +
    +
  • +
+
+
+
+
+

Merger extensions

+
+
+

This plugin only provides an input reader, there is no support for OpenAPI merging. Nevertheless, the files generated from an OpenAPI file will be Java, XML, JSON, TS, etc…​ so, +for each file to be generated defined at templates.xml, must set the mergeStrategy for the specific language (javamerge, javamerge_override, jsonmerge, etc…​)

+
+
+
+
<templates>
+    ...
+    <templateExtension ref="${variables.entityName}.java" mergeStrategy="javamerge"/>
+    ...
+    <templateExtension ref="${variables.entityName}dataGrid.component.ts" mergeStrategy="tsmerge"/>
+    ...
+    <templateExtension ref="en.json" mergeStrategy="jsonmerge"/>
+</templates>
+
+
+
+
+
+

Usage

+
+
+

Writing OpenAPI 3.0 contract file

+
+

The Swagger file must follow the OpenAPI 3.0 standard to be readable by CobiGen, otherwise and error will be thrown. +A full documentation about how to follow this standard can be found Swagger3 Docs.

+
+
+

The Swagger file must be at the core folder of your devon4j project, like shown below:

+
+
+

Swagger at devon4j Project

+
+
+

To be compatible with CobiGen and devon4j, it must follow some specific configurations. This configurations allows us to avoid redundant definitions as SearchCriteria and PaginatedList objects are used at the services definitions.

+
+
+
+

Paths

+
+
    +
  • +

    Just adding the tags property at the end of the service definitions with the items `SearchCriteria` and/or paginated put into CobiGen knowledge that an standard devon4j SearchCriteria and/or PaginateListTo object must be generated. That way, the Swagger file will be easier to write and even more understandable.

    +
  • +
  • +

    The path must start with the component name, and define an x-component tag with the component name. That way this service will be included into the component services list.

    +
  • +
+
+
+
+
  /componentnamemanagement/v1/entityname/customOperation/:
+    x-component: componentnamemanagement
+    post:
+      summary: 'Summary of the operation'
+      description: Description of the operation.
+      operationId: customOperation
+      responses:
+        '200':
+          description: Description of the response.
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/EntityName'
+      requestBody:
+        $ref: '#/components/requestBodies/EntityName'
+      tags:
+        - searchCriteria
+        - paginated
+
+
+
+

That way, CobiGen will be able to generate the endpoint (REST service) customOperation on componentmanagement. If you do not specify the component to generate to (the x-component tag) then this service will not be taken into account for generation.

+
+
+
+

Service based generation

+
+

In previous CobiGen versions, we were able to generate code from a contract-first OpenAPI specification only when we defined components like the following:

+
+
+
+
components:
+    schemas:
+        Shop:
+          x-component: shopmanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            shopExample:
+              type: string
+              maxLength: 100
+              minLength: 5
+              uniqueItems: true
+
+
+
+

We could not generate services without the definition of those components.

+
+
+

In our current version, we have overcome it, so that now we are able to generate all the services independently. You just need to add an x-component tag with the name of the component that will make use of that service. See here.

+
+
+

An small OpenAPI example defining only services can be found below:

+
+
+
+
openapi: 3.0.0
+servers:
+  - url: 'https://localhost:8081/server/services/rest'
+    description: Just some data
+info:
+  title: Devon Example
+  description: Example of a API definition
+  version: 1.0.0
+  x-rootpackage: com.capgemini.spoc.openapi
+paths:
+  /salemanagement/v1/sale/{saleId}:
+    x-component: salemanagement
+    get:
+      operationId: findSale
+      parameters:
+        - name: saleId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Any
+  /salemanagement/v1/sale/{bla}:
+    x-component: salemanagement
+    get:
+      operationId: findSaleBla
+      parameters:
+        - name: bla
+          in: path
+          required: true
+          schema:
+            type: integer
+            format: int64
+            minimum: 10
+            maximum: 200
+      responses:
+        '200':
+          description: Any
+
+
+
+

Then, the increment that you need to select for generating those services is Crud devon4ng Service based Angular:

+
+
+

Service based generation

+
+
+
+

Full example

+
+

This example yaml file can be download from here.

+
+
+ + + + + +
+ + +As you will see on the file, "x-component" tags are obligatory if you want to generate components (entities). They have to be defined for each one. +In addition, you will find the global variable "x-rootpackage" that are explained <<,here>>. +
+
+
+
+
openapi: 3.0.0
+servers:
+  - url: 'https://localhost:8081/server/services/rest'
+    description: Just some data
+info:
+  title: Devon Example
+  description: Example of a API definition
+  version: 1.0.0
+  x-rootpackage: com.devonfw.angular.test
+paths:
+  /shopmanagement/v1/shop/{shopId}:
+    x-component: shopmanagement
+    get:
+      operationId: findShop
+      parameters:
+        - name: shopId
+          in: path
+          required: true
+          schema:
+            type: integer
+            format: int64
+            minimum: 0
+            maximum: 50
+      responses:
+        '200':
+          description: Any
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Shop'
+            text/plain:
+              schema:
+                type: string
+        '404':
+          description: Not found
+  /salemanagement/v1/sale/{saleId}:
+    x-component: salemanagement
+    get:
+      operationId: findSale
+      parameters:
+        - name: saleId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Any
+  /salemanagement/v1/sale/:
+    x-component: salemanagement
+    post:
+      responses:
+        '200':
+          description: Any
+      requestBody:
+        $ref: '#/components/requestBodies/SaleData'
+      tags:
+       - searchCriteria
+  /shopmanagement/v1/shop/new:
+    x-component: shopmanagement
+    post:
+      responses:
+       '200':
+          description: Any
+      requestBody:
+        $ref: '#/components/requestBodies/ShopData'
+components:
+    schemas:
+        Shop:
+          x-component: shopmanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            shopExample:
+              type: string
+              maxLength: 100
+              minLength: 5
+              uniqueItems: true
+            sales:
+              type: array # Many to One relationship
+              items:
+                $ref: '#/components/schemas/Sale'
+        Sale:
+          x-component: salemanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            saleExample:
+              type: number
+              format: int64
+              maximum: 100
+              minimum: 0
+          required:
+            - saleExample
+
+    requestBodies:
+        ShopData:
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Shop'
+          required: true
+        SaleData:
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Sale'
+          required: true
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-propertyplugin.html b/docs/cobigen/1.0/cobigen-propertyplugin.html new file mode 100644 index 00000000..6439f3fd --- /dev/null +++ b/docs/cobigen/1.0/cobigen-propertyplugin.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Property Plug-in +The CobiGen Property Plug-in currently only provides different merge mechanisms for documents written in Java property syntax.

+
+
+

Merger extensions

+
+
+

There are two merge strategies for Java properties, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy propertymerge (merges two properties documents and keeps the existing properties on conflicts)

    +
  • +
  • +

    Merge strategy propertymerge_override (merges two properties documents and overrides the existing properties on conflicts)

    +
  • +
+
+
+

Both documents (base and patch) will be parsed using the Java 7 API and will be compared according their keys. Conflicts will occur if a key in the patch already exists in the base document.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-templates_helpful-links.html b/docs/cobigen/1.0/cobigen-templates_helpful-links.html new file mode 100644 index 00000000..d6399fef --- /dev/null +++ b/docs/cobigen/1.0/cobigen-templates_helpful-links.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-textmerger.html b/docs/cobigen/1.0/cobigen-textmerger.html new file mode 100644 index 00000000..ee62a166 --- /dev/null +++ b/docs/cobigen/1.0/cobigen-textmerger.html @@ -0,0 +1,555 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Text Merger Plug-in +The Text Merger Plug-in enables merging result free text documents to existing free text documents. Therefore, the algorithms are also very rudimentary.

+
+
+

Merger extensions

+
+
+

There are currently three main merge strategies that apply for the whole document:

+
+
+
    +
  • +

    merge strategy textmerge_append (appends the text directly to the end of the existing document) +_Remark_: If no anchors are defined, this will simply append the patch.

    +
  • +
  • +

    merge strategy textmerge_appendWithNewLine (appends the text after adding a new line break to the existing document) +_Remark_: empty patches will not result in appending a new line any more since v1.0.1 +Remark: Only suitable if no anchors are defined, otherwise it will simply act as textmerge_append

    +
  • +
  • +

    merge strategy textmerge_override (replaces the contents of the existing file with the patch) +_Remark_: If anchors are defined, override is set as the default mergestrategy for every text block if not redefined in an anchor specification.

    +
  • +
+
+
+
+
+

Anchor functionality

+
+
+

If a template contains text that fits the definition of anchor:${documentpart}:${mergestrategy}:anchorend or more specifically the regular expression (.*)anchor:([:]+):(newline_)?([:]+)(_newline)?:anchorend\\s*(\\r\\n|\\r|\\n), some additional functionality becomes available about specific parts of the incoming text and the way it will be merged with the existing text. These anchors always change things about the text to come up until the next anchor, text before it is ignored.

+
+
+

If no anchors are defined, the complete patch will be appended depending on your choice for the template in the file templates.xml.

+
+
+

[[anchordef]]

+
+
+

Anchor Definition

+
+

Anchors should always be defined as a comment of the language the template results in, as you do not want them to appear in your readable version, but cannot define them as FreeMarker comments in the template, or the merger will not know about them. +Anchors will also be read when they are not comments due to the merger being able to merge multiple types of text-based languages, thus making it practically impossible to filter for the correct comment declaration. That is why anchors have to always be followed by line breaks. That way there is a universal way to filter anchors that should have anchor functionality and ones that should appear in the text. +Remark: If the resulting language has closing tags for comments, they have to appear in the next line. +Remark: If you do not put the anchor into a new line, all the text that appears before it will be added to the anchor.

+
+
+
+

Document parts

+
+

In general, ${documentpart} is an id to mark a part of the document, that way the merger knows what parts of the text to merge with which parts of the patch (e.g. if the existing text contains anchor:table:${}:anchorend that part will be merged with the part tagged anchor:table:${}:anchorend of the patch).

+
+
+

If the same documentpart is defined multiple times, it can lead to errors, so instead of defining table multiple times, use table1, table2, table3 etc.

+
+
+

If a ${documentpart} is defined in the document but not in the patch and they are in the same position, it is processed in the following way: If only the documentparts header, test and footer are defined in the document in that order, and the patch contains header, order and footer, the resulting order will be header, test, order then footer.

+
+
+

The following documentparts have default functionality:

+
+
+
    +
  1. +

    anchor:header:${mergestrategy}:anchorend marks the beginning of a header, that will be added once when the document is created, but not again. +Remark: This is only done once, if you have header in another anchor, it will be ignored

    +
  2. +
  3. +

    anchor:footer:${mergestrategy}:anchorend marks the beginning of a footer, that will be added once when the document is created, but not again. Once this is invoked, all following text will be included in the footer, including other anchors.

    +
  4. +
+
+
+

[[mergestrategies]]

+
+
+
+

Mergestrategies

+
+

Mergestrategies are only relevant in the patch, as the merger is only interested in how text in the patch should be managed, not how it was managed in the past.

+
+
+
    +
  1. +

    anchor:${documentpart}::anchorend will use the merge strategy from templates.xml, see Merger-Extensions.

    +
  2. +
  3. +

    anchor:${}:${mergestrategy}_newline:anchorend or anchor:${}:newline_${mergestrategy}:anchorend states that a new line should be appended before or after this anchors text, depending on where the newline is (before or after the mergestrategy). anchor:${documentpart}:newline:anchorend puts a new line after the anchors text. +Remark: Only works with appending strategies, not merging/replacing ones. These strategies currently include: appendbefore, append/appendafter

    +
  4. +
  5. +

    anchor:${documentpart}:override:anchorend means that the new text of this documentpart will replace the existing one completely

    +
  6. +
  7. +

    anchor:${documentpart}:appendbefore:anchorend or anchor:${documentpart}:appendafter:anchorend/anchor:${documentpart}:append:anchorend specifies whether the text of the patch should come before the existing text or after.

    +
  8. +
+
+
+
+
+
+

Usage Examples

+
+
+

General

+
+

Below you can see how a file with anchors might look like (using adoc comment tags), with examples of what you might want to use the different functions for.

+
+
+
+
// anchor:header:append:anchorend
+
+Table of contents
+Introduction/Header
+
+// anchor:part1:appendafter:anchorend
+
+Lists
+Table entries
+
+// anchor:part2:nomerge:anchorend
+
+Document Separators
+adoc table definitions
+
+// anchor:part3:override:anchorend
+
+Anything that you only want once but changes from time to time
+
+// anchor:footer:append:anchorend
+
+Copyright Info
+Imprint
+
+
+
+
+

Merging

+
+

In this section you will see a comparison on what files look like before and after merging

+
+
+

override

+
+
Before
+
+
// anchor:part:override:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
+

Appending

+
+
Before
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+// anchor:part3:appendbefore:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:append:anchorend
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+Lorem Ipsum
+
+
+
+
+

Newline

+
+
Before
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+(end of file)
+
+
+
+
Patch
+
+
// anchor:part:newline_append:anchorend
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Dolor Sit
+(end of file)
+
+
+
+
After
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+Dolor Sit
+
+(end of file)
+
+
+
+
+
+
+
+

Error List

+
+
+
    +
  • +

    If there are anchors in the text, but either base or patch do not start with one, the merging process will be aborted, as text might go missing this way.

    +
  • +
  • +

    Using _newline or newline_ with mergestrategies that don’t support it , like override, will abort the merging process. See <<`mergestrategies`,Merge Strategies>> →2 for details.

    +
  • +
  • +

    Using undefined mergestrategies will abort the merging process.

    +
  • +
  • +

    Wrong anchor definitions, for example anchor:${}:anchorend will abort the merging process, see <<`anchordef`,Anchor Definition>> for details.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-tsplugin.html b/docs/cobigen/1.0/cobigen-tsplugin.html new file mode 100644 index 00000000..94f3ae89 --- /dev/null +++ b/docs/cobigen/1.0/cobigen-tsplugin.html @@ -0,0 +1,606 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==TypeScript Plug-in

+
+
+

The TypeScript Plug-in enables merging result TS files to existing ones. This plug-in is used at the moment for generate an Angular2 client with all CRUD functionalities enabled. The plug-in also generates i18n functionality just appending at the end of the word the ES or EN suffixes, to put into the developer knowledge that this words must been translated to the correspondent language. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+

Trigger Extensions

+
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
+

Merger extensions

+
+
+

This plugin uses the TypeScript Merger to merge files. There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy tsmerge (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy tsmerge_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

<<<<<<< HEAD +The merge algorithm mainly handles the following AST nodes:

+
+
+
    +
  • +

    ImportDeclaration

    +
    +
      +
    • +

      Will add non existent imports whatever the merge strategy is.

      +
    • +
    • +

      For different imports from same module, the import clauses will be merged.

      +
      +
      +
      import { a } from 'b';
      +import { c } from 'b';
      +//Result
      +import { a, c } from 'b';
      +
      +
      +
    • +
    +
    +
  • +
  • +

    ClassDeclaration

    +
    +
      +
    • +

      Adds non existent base properties from patch based on the name property.

      +
    • +
    • +

      Adds non existent base methods from patch based on the name signature.

      +
    • +
    • +

      Adds non existent annotations to class, properties and methods.

      +
    • +
    +
    +
  • +
  • +

    PropertyDeclaration

    +
    +
      +
    • +

      Adds non existent decorators.

      +
    • +
    • +

      Merge existent decorators.

      +
    • +
    • +

      With override strategy, the value of the property will be replaced by the patch value.

      +
    • +
    +
    +
  • +
  • +

    MethodDeclaration

    +
    +
      +
    • +

      With override strategy, the body will be replaced.

      +
    • +
    • +

      The parameters will be merged.

      +
    • +
    +
    +
  • +
  • +

    ParameterDeclaration

    +
    +
      +
    • +

      Replace type and modifiers with override merge strategy, adding non existent from patch into base.

      +
    • +
    +
    +
  • +
  • +

    ConstructorDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
  • +

    FunctionDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
+
+
+
+
+

Input reader

+
+
+

The TypeScript input reader is based on the one that the TypeScript merger uses. The current extensions are additional module fields giving from which library any entity originates. +module: null specifies a standard entity or type as string or number.

+
+
+

Object model

+
+

To get a first impression of the created object after parsing, let us start with analyzing a small example, namely the parsing of a simple type-orm model written in TypeScript.

+
+
+
+
import {Entity, PrimaryGeneratedColumn, Column} from "typeorm";
+
+@Entity()
+export class User {
+
+    @PrimaryGeneratedColumn()
+    id: number;
+
+    @Column()
+    firstName: string;
+
+    @Column()
+    lastName: string;
+
+    @Column()
+    age: number;
+
+}
+
+
+
+

The returned object has the following structure

+
+
+
+
{
+  "importDeclarations": [
+    {
+      "module": "typeorm",
+      "named": [
+        "Entity",
+        "PrimaryGeneratedColumn",
+        "Column"
+      ],
+      "spaceBinding": true
+    }
+  ],
+  "classes": [
+    {
+      "identifier": "User",
+      "modifiers": [
+        "export"
+      ],
+      "decorators": [
+        {
+          "identifier": {
+            "name": "Entity",
+            "module": "typeorm"
+          },
+          "isCallExpression": true
+        }
+      ],
+      "properties": [
+        {
+          "identifier": "id",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "PrimaryGeneratedColumn",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "firstName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "lastName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "age",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
+
+
+
+

If we only consider the first level of the JSON response, we spot two lists of imports and classes, providing information about the only import statement and the only User class, respectively. Moving one level deeper we observe that:

+
+
+
    +
  • +

    Every import statement is translated to an import declaration entry in the declarations list, containing the module name, as well as a list of entities imported from the given module.

    +
  • +
  • +

    Every class entry provides besides the class identifier, its decoration(s), modifier(s), as well as a list of properties that the original class contains.

    +
  • +
+
+
+

Note that, for each given type, the module from which it is imported is also given as in

+
+
+
+
  "identifier": {
+    "name": "Column",
+    "module": "typeorm"
+  }
+
+
+
+

Returning to the general case, independently from the given TypeScript file, an object having the following Structure will be created.

+
+
+
    +
  • +

    importDeclarations: A list of import statement as described above

    +
  • +
  • +

    exportDeclarations: A list of export declarations

    +
  • +
  • +

    classes: A list of classes extracted from the given file, where each entry is full of class specific fields, describing its properties and decorator for example.

    +
  • +
  • +

    interfaces: A list of interfaces.

    +
  • +
  • +

    variables: A list of variables.

    +
  • +
  • +

    functions: A list of functions.

    +
  • +
  • +

    enums: A list of enumerations.

    +
  • +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-usecases.html b/docs/cobigen/1.0/cobigen-usecases.html new file mode 100644 index 00000000..b335b7fd --- /dev/null +++ b/docs/cobigen/1.0/cobigen-usecases.html @@ -0,0 +1,445 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==General use cases

+
+
+

In addition to the selection of CobiGen applications introduced before, this chapter provides a more detailed overview about the currently implemented and maintained general use cases. These can be used by any project following a supported reference architecture as e.g. the devonfw or Register Factory.

+
+
+

devon4j

+
+
+

With our templates for devon4j, you can generate a whole CRUD application from a single Entity class. You save the effort for creating, DAOs, Transfer Objects, simple CRUD use cases with REST services and even the client application can be generated.

+
+
+

CRUD server application for devon4j

+
+

For the server, the required files for all architectural layers (Data access, logic, and service layer) can be created based on your Entity class. After the generation, you have CRUD functionality for the entity from bottom to top which can be accessed via a RESTful web service. Details are provided in the devonfw wiki.

+
+
+
+

CRUD client application for devon4ng

+
+

Based on the REST services on the server, you can also generate an Angular client based on devon4ng. With the help of Node.js, you have a working client application for displaying your entities within minutes!

+
+
+
+

Test data Builder for devon4j

+
+

Generating a builder pattern for POJOs to easily create test data in your tests. CobiGen is not only able to generate a plain builder pattern but rather builder, which follow a specific concept to minimize test data generation efforts in your unit tests. The following Person class as an example:

+
+
+
Person class
+
+
public class Person {
+
+    private String firstname;
+    private String lastname;
+    private int birthyear;
+    @NotNull
+    private Address address;
+
+    @NotNull
+    public String getFirstname() {
+        return this.firstname;
+    }
+
+    // additional default setter and getter
+}
+
+
+
+

It is a simple POJO with a validation annotation, to indicate, that firstname should never be null. Creating this object in a test would imply to call every setter, which is kind of nasty. Therefore, the Builder Pattern has been introduced for quite a long time in software engineering, allowing to easily create POJOs with a fluent API. See below.

+
+
+
Builder pattern example
+
+
Person person = new PersonBuilder()
+                .firstname("Heinz")
+                .lastname("Erhardt")
+                .birthyear(1909)
+                .address(
+                    new AddressBuilder().postcode("22222")
+                        .city("Hamburg").street("Luebecker Str. 123")
+                        .createNew())
+                .addChild(
+                    new PersonBuilder()[...].createNew()).createNew();
+
+
+
+

The Builder API generated by CobiGen allows you to set any setter accessible field of a POJO in a fluent way. But in addition lets assume a test, which should check the birth year as precondition for any business operation. So specifying all other fields of Person, especially firstname as it is mandatory to enter business code, would not make sense. The test behavior should just depend on the specification of the birth year and on no other data. So we would like to just provide this data to the test.

+
+
+

The Builder classes generated by CobiGen try to tackle this inconvenience by providing the ability to declare default values for any mandatory field due to validation or database constraints.

+
+
+
Builder Outline
+
+
public class PersonBuilder {
+
+    private void fillMandatoryFields() {
+        firstname("lasdjfaöskdlfja");
+        address(new AddressBuilder().createNew());
+    };
+    private void fillMandatoryFields_custom() {...};
+
+    public PersonBuilder firstname(String value);
+    public PersonBuilder lastname(String value);
+    ...
+
+    public Person createNew();
+    public Person persist(EntityManager em);
+    public List<Person> persistAndDuplicate(EntityManager em, int count);
+}
+
+
+
+

Looking at the plotted builder API generated by CobiGen, you will find two private methods. The method fillMandatoryFields will be generated by CobiGen and regenerated every time CobiGen generation will be triggered for the Person class. This method will set every automatically detected field with not null constraints to a default value. However, by implementing fillMandatoryFields_custom on your own, you can reset these values or even specify more default values for any other field of the object. Thus, running new PersonBuilder().birthyear(1909).createNew(); will create a valid object of Person, which is already pre-filled such that it does not influence the test execution besides the fact that it circumvents database and validation issues.

+
+
+

This even holds for complex data structures as indicated by address(new AddressBuilder().createNew());. Due to the use of the AddressBuilder for setting the default value for the field address, also the default values for Address will be set automatically.

+
+
+

Finally, the builder API provides different methods to create new objects.

+
+
+
    +
  • +

    createNew() just creates a new object from the builder specification and returns it.

    +
  • +
  • +

    persist(EntityManager) will create a new object from the builder specification and persists it to the database.

    +
  • +
  • +

    persistAndDuplicate(EntityManager, int) will create the given amount of objects form the builder specification and persists all of these. After the initial generation of each builder, you might want to adapt the method body as you will most probably not be able to persist more than one object with the same field assignments to the database due to unique constraints. Thus, please see the generated comment in the method to adapt unique fields accordingly before persisting to the database.

    +
  • +
+
+
+

Custom Builder for Business Needs

+
+

CobiGen just generates basic builder for any POJO. However, for project needs you probably would like to have even more complex builders, which enable the easy generation of more complex test data which are encoded in a large object hierarchy. Therefore, the generated builders can just be seen as a tool to achieve this. You can define your own business driven builders in the same way as the generated builders, but explicitly focusing on your business needs. Just take this example as a demonstration of that idea:

+
+
+
+
  University uni = new ComplexUniversityBuilder()
+    .withStudents(200)
+    .withProfessors(4)
+    .withExternalStudent()
+    .createNew();
+
+
+
+

E.g. the method withExternalStudent() might create a person, which is a student and is flagged to be an external student. Basing this implementation on the generated builders will even assure that you would benefit from any default values you have set before. In addition, you can even imagine any more complex builder methods setting values driven your reusable testing needs based on the specific business knowledge.

+
+
+
+
+
+
+

Register Factory

+
+
+

CRUD server application

+
+

Generates a CRUD application with persistence entities as inputs. This includes DAOs, TOs, use cases, as well as a CRUD JSF user interface if needed.

+
+
+
+

Test data Builder

+ +
+
+

Test documentation

+
+

Generate test documentation from test classes. The input are the doclet tags of several test classes, which e.g. can specify a description, a cross-reference, or a test target description. The result currently is a csv file, which lists all tests with the corresponding meta-information. Afterwards, this file might be styled and passed to the customer if needed and it will be up-to-date every time!

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/cobigen-xmlplugin.html b/docs/cobigen/1.0/cobigen-xmlplugin.html new file mode 100644 index 00000000..8daeff7a --- /dev/null +++ b/docs/cobigen/1.0/cobigen-xmlplugin.html @@ -0,0 +1,567 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==XML Plug-in +The CobiGen XML Plug-in comes with an input reader for XML artifacts, XML related trigger and matchers and provides different merge mechanisms for XML result documents.

+
+
+

Trigger extension

+
+
+

(since cobigen-xmlplugin v2.0.0)

+
+
+

The XML Plug-in provides a trigger for XML related inputs. It accepts XML documents as input (see XML input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'xml'

    +
    +
    Example of a XML trigger definition.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as inputs.

    +
    +
  • +
  • +

    type xpath

    +
    +
    Example of a xpath trigger definition.
    +
    +
    <trigger id="..." type="xpath" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as container inputs, which consists of several sub-documents.

    +
    +
  • +
+
+
+

Container Matcher type

+
+

A ContainerMatcher check if the input is a valid container.

+
+
+
    +
  • +

    xpath: type: xpath

    +
    +
    Example of a XML trigger definition with a node name matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <containerMatcher type="xpath" value="./uml:Model//packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    Before applying any Matcher, this containerMatcher checks if the XML file contains a node uml:Model with a childnode packagedElement which contains an attribute xmi:type with the value uml:Class.

    +
    +
  • +
+
+
+
+

Matcher types

+
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    XML: type nodename → document’s root name matching

    +
    +
    Example of a XML trigger definition with a node name matcher
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the root name of the declaring input document matches the given regular expression (value).

    +
    +
  • +
  • +

    xpath: type: xpath → matching a node with a xpath value

    +
    +
    Example of a xpath trigger definition with a xpath matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="xpath" value="/packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the XML file contains a node /packagedElement where the xmi:type property equals uml:Class.

    +
    +
  • +
+
+
+
+

Variable Assignment types

+
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The XML Plug-in currently provides only one mechanism:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+

XML input reader

+
+

The CobiGen XML Plug-in implements an input reader for parsed XML documents. So API user can pass org.w3c.dom.Document objects for generation. For getting the right parsed XML inputs you can easily use the xmlplugin.util.XmlUtil, which provides static functionality to parse XML files or input streams and get the appropriate Document object.

+
+
+

Template object

+
+

Due to the heterogeneous structure an XML document can have, the XML input reader does not always create exactly the same model structure (in contrast to the java input reader). For example the model’s depth differs strongly, according to it’s input document. To allow navigational access to the nodes, the model also depends on the document’s element’s node names. All child elements with unique names, are directly accessible via their names. In addition it is possible to iterate over all child elements with held of the child list Children. So it is also possible to access child elements with non unique names.

+
+
+

The XML input reader will create the following object model for template creation (EXAMPLEROOT, EXAMPLENODE1, EXAMPLENODE2, EXAMPLEATTR1,…​ are just used here as examples. Of course they will be replaced later by the actual node or attribute names):

+
+
+
    +
  • +

    ~EXAMPLEROOT~ ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      _nodeName_ ('String' :: Simple name of the root node)

      +
    • +
    • +

      _text_ ('String' :: Concatenated text content (PCDATA) of the root node)

      +
    • +
    • +

      TextNodes ('List<String>' :: List of all the root’s text node contents)

      +
    • +
    • +

      _at_~EXAMPLEATTR1~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_~EXAMPLEATTR2~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_…​

      +
    • +
    • +

      Attributes ('List<Map<String, Object>>' :: List of the root’s attributes

      +
      +
        +
      • +

        at ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          _attName_ ('String' :: Name of the attribute)

          +
        • +
        • +

          _attValue_ ('String' :: String representation of the attribute’s value)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      Children ('List<Map<String, Object>>' :: List of the root’s child elements

      +
      +
        +
      • +

        child ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          …​common element sub structure…​

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE1~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element structure…​

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE2~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element sub structure…​

        +
      • +
      • +

        ~EXAMPLENODE21~ ('Map<String, Object>' :: One of the nodes' child nodes)

        +
        +
          +
        • +

          …​common element structure…​

          +
        • +
        +
        +
      • +
      • +

        ~EXAMPLENODE…​~

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE…​~

      +
    • +
    +
    +
  • +
+
+
+

In contrast to the java input reader, this XML input reader does currently not provide any additional template methods.

+
+
+
+
+
+
+

Merger extensions

+
+
+

The XML plugin uses the LeXeMe merger library to produce semantically correct merge products. The merge strategies can be found in the MergeType enum and can be configured in the templates.xml as a mergeStrategy attribute:

+
+
+
    +
  • +

    mergeStrategy xmlmerge

    +
    +
    Example of a template using the mergeStrategy xmlmerge
    +
    +
    <templates>
    +	<template name="..." destinationPath="..." templateFile="..." mergeStrategy="xmlmerge"/>
    +</templates>
    +
    +
    +
  • +
+
+
+

Currently only the document types included in LeXeMe are supported. +On how the merger works consult the LeXeMe Wiki.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/eclipse-plugin_development.html b/docs/cobigen/1.0/eclipse-plugin_development.html new file mode 100644 index 00000000..4893ebe1 --- /dev/null +++ b/docs/cobigen/1.0/eclipse-plugin_development.html @@ -0,0 +1,793 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Eclipse Plugin Development

+
+
+

The Eclipse plugin is where all the other plugins (JavaPlugin, XMLPlugin, PropertyPlugin, TextMerger and the core) are loaded.

+
+
+

Configuration

+
+
+

Activator java

+
+

Activator class is the start point of the plugin. Activator class is loaded initially and it extends the AbstractUIPlugin, which tells the Eclipse Run-time that this Plugin is someway related to the Eclipse Platform UI. +An ID for the plugin is defined for configuration at Plugin.xml needs.

+
+
+
+
/**
+* The plug-in ID
+*/
+public static final String PLUGIN_ID = "com.capgemini.cobigen.eclipseplugin"; //$NON-NLS-1$
+
+
+
+

The overrode start() method starts the plugin and loads all the sub-plugins using the PluginRegistry from the core for each plug-in:

+
+
+
+
PluginRegistry.loadPlugin(PluginActivator.class);
+
+
+
+ + + + + +
+ + +
+

How the loadPlugin works is explained deeply at core development.

+
+
+
+
+

The activator has the listener `ConfigurationProjectListener.java` from the workbenchcontrol package that checks continuously changes on the templates project

+
+
+
+

Plugin XML

+
+

Plugin.xml file is used to initialize plugin. Here are defined the commands and the handler for each command, and also in which views should be shown the plugin menu with the commands.

+
+
+

The command configuration:

+
+
+
+
<extension point="org.eclipse.ui.commands">
+    <command
+        id="com.capgemini.cobigen.eclipseplugin.generate"
+        name="Generate">
+    </command>
+    <command
+        id="com.capgemini.cobigen.eclipseplugin.healthy_check"
+        name="Healthy Check">
+    </command>
+</extension>
+<extension point="org.eclipse.ui.handlers">
+    <handler
+        class="com.capgemini.cobigen.eclipse.workbenchcontrol.handler.GenerateHandler"
+        commandId="com.capgemini.cobigen.eclipseplugin.generate">
+    </handler>
+    <handler
+        class="com.capgemini.cobigen.eclipse.workbenchcontrol.handler.HealthCheckHandler"
+        commandId="com.capgemini.cobigen.eclipseplugin.health_check">
+    </handler>
+</extension>
+
+
+
+

As can be seen, to define the commands, the PLUGIN_ID defined at the Activator.java is used followed of the name of the command. Then, a handler from workbenchcontrol.handler package is assigned for each command.

+
+
+

After that, is defined the views where we want to show the CobiGen menu as Popup menu. +(e.g. Project Explorer view)

+
+
+
+
<extension point="org.eclipse.ui.menus">
+    <menuContribution
+        allPopups="false"
+        locationURI="popup:org.eclipse.ui.navigator.ProjectExplorer#PopupMenu">
+        <separator
+            name="com.capgemini.cobigen.eclipseplugin.separator3"
+            visible="true">
+        </separator>
+        <menu label="CobiGen">
+            <command
+                commandId="com.capgemini.cobigen.eclipseplugin.generate"
+                label="Generate..."
+                style="push">
+            </command>
+            <command
+                commandId="com.capgemini.cobigen.eclipseplugin.health_check"
+                label="Health Check..."
+                style="push">
+            </command>
+        </menu>
+        <separator
+            name="com.capgemini.cobigen.eclipseplugin.separator4"
+            visible="true">
+        </separator>
+    </menuContribution>
+</extension>
+
+
+
+

CobiGen Menu

+
+
+
+
+
+

Handlers

+
+
+

The workbenchcontrol package provides to the plugin the listener regarding to the templates project, the listener for logging needs and the handler for the two main use cases (Generate and HealthCheck).

+
+
+

Update Templates:

+
+

Update Template: Select Entity file and right click, then select CobiGen Update Templates after that click on download then download successfully will be come.

+
+
+
+

Adapt Templates

+
+

Adapt Template: Select Entity file and right click then select CobiGen Adapt Template .If CobiGen template jar not available then it download automatically. If CobiGen templates is already then it will override existing template in workspace and click on OK then imported template successfully message will come.

+
+
+
+

Generate Action Handler

+
+

The wizard launching is the responsibility of the generate handler (`GenerateHandler.java`). In case of Generate action and depending of the input provided for that, the handler will create a JavaGeneratorWrapper or XMlGeneratorWrapper object. +For JavaGeneratorWrapper, if the input is a package or a selection of multiple entity files, the wizard will be launched in batch mode calling the `GenerateBatchWizard.java` from the wizard.generate package. But if the input is a single entity java class file, it will be launched in normal mode calling the `GenerateWizard.java` from the same package.

+
+
+ + + + + +
+ + +
+

For both Wrapper objects, the inputs will be converted to valid inputs for FreeMarker using the `Xml/JavaInputConverter.java` from the generator.xml/java package.

+
+
+
+
+

Diagram 1

+
+
+

For XmlGeneratorWrapper, the input must be a single valid XML file. As only has a single file as input, the `GenerateWizard.java` will be called.

+
+
+

In summary, this will be the process for the Generate Action before calling the wizard:

+
+
+

diagram 2

+
+
+
+

Health Check Action Handler

+
+

At the case of Health Check action, a success/error dialog is shown instead of a wizard itself. The `HealtchCheckHandler.java` will call the execute method of `HealthCheck.java` from the healthcheck package. That class will test first if the templates project exists at the workspace opening and error dialog if not by throwing and handling the custom exception `GeneratorProjectNotExistentException.java` from the common.exceptions package.

+
+
+
+
try {
+    // check configuration project existence
+    //That method will throw GeneratorProjectNotExistentException
+    generatorConfProj = ResourcesPluginUtil.getGeneratorConfigurationProject();
+    ...
+    ..
+    .
+ } catch (GeneratorProjectNotExistentException e) {
+     LOG.warn("Configuration project not found!", e);
+     healthyCheckMessage = firstStep + "NOT FOUND!\n"
+                           + "=> Please import the configuration project into your workspace as stated in the "
+                           + "documentation of CobiGen or in the one of your project.";
+     PlatformUIUtil.openErrorDialog(HEALTH_CHECK_DIALOG_TITLE, healthyCheckMessage, null);
+}
+
+
+
+

If the project exists, HealthCheck will test if the context.xml file is valid. In case of invalid, HealthCheck will throw and handle the InvalidConfigurationException from the core and check if it is possible to upgrade the version of the XML file, showing an UPGRADE button at the dialog. If the upgrade is not possible, will show a dialog message telling the user to check the context.xml file for errors.

+
+
+
+
try {
+   //The CobiGen constructor will throw the InvalidConfigurationException
+   new CobiGen(generatorConfProj.getLocationURI());
+    ...
+    ..
+    .
+} catch (InvalidConfigurationException e) {
+    healthyCheckMessage = firstStep + "OK.";
+    healthyCheckMessage += secondStep + "INVALID!";
+    if (generatorConfProj != null) {
+        Path configurationProject = Paths.get(generatorConfProj.getLocationURI());
+        ContextConfigurationVersion currentVersion = new ContextConfigurationUpgrader()
+                                                     .resolveLatestCompatibleSchemaVersion(configurationProject);
+        if (currentVersion != null) {
+            // upgrade possible
+            healthyCheckMessage += "\n\nAutomatic upgrade of the context configuration available.\n" + "Detected: "
+                                   + currentVersion + " / Currently Supported: "
+                                   + ContextConfigurationVersion.getLatest();
+            boolean upgraded = openErrorDialogWithContextUpgrade(healthyCheckMessage, configurationProject);
+            if (upgraded) {
+                // re-run Health Check
+                Display.getCurrent().asyncExec(new Runnable() {
+                    @Override
+                    public void run() {
+                        execute();
+                    }
+                });
+            }
+            return;
+        } else {
+            healthyCheckMessage += "\n\nNo automatic upgrade of the context configuration possible. "
+                                   + "Maybe just a mistake in the context configuration?";
+            healthyCheckMessage += "\n\n=> " + e.getLocalizedMessage();
+        }
+}
+
+
+
+

At this point, if all is correct, the user can choose to finish the HealtCheck process or run the Advance Health Check running the `AdvancedHealthCheck.java` to check the the validity of template configurations. That check has three steps:

+
+
+
    +
  1. +

    Get configuration resources
    +Will get the template configuration file from the template folder corresponding to the input of the plugin provided by the triggers defined at the context.xml file for that input.

    +
  2. +
  3. +

    Determine current state
    +Will check if the template configuration file exists, if it is accessible and if the version is up-to-date allowing upgrading if not.

    +
  4. +
  5. +

    Show current status to the user
    +Will call the `AdvancedHealthCheckDialog.java` showing a dialog with the current state of each configuration template, showing an UPGRADE button if the configuration version can be upgraded.

    +
  6. +
+
+
+
+
+
+

Wizard Development

+
+
+

Starting the Wizard

+
+

To open a wizard, use the WizardDialog class from the org.eclipse.jface.wizard package. +The plugin does that at `GenerateHandler.java` as previously explained here:

+
+
+
+
if (((IStructuredSelection) sel).size() > 1 || (((IStructuredSelection) sel).size() == 1)
+     && ((IStructuredSelection) sel).getFirstElement() instanceof IPackageFragment) {
+     WizardDialog wiz = new WizardDialog(HandlerUtil.getActiveShell(event),
+                        new GenerateBatchWizard(generator));
+     wiz.setPageSize(new Point(800, 500));
+     wiz.open();
+     LOG.info("Generate Wizard (Batchmode) opened.");
+} else if (((IStructuredSelection) sel).size() == 1) {
+     WizardDialog wiz = new WizardDialog(HandlerUtil.getActiveShell(event), new GenerateWizard(generator));
+     wiz.setPageSize(new Point(800, 500));
+     wiz.open();
+     LOG.info("Generate Wizard opened.");
+}
+
+
+
+

Adapt Template: Select Entity file and right click then select CobiGen Adapt Template.If CobiGen template jar not available then it download automatically.If CobiGen templates is already then it will override existing template in workspace and click on OK then imported template successfully message will come .If Template not available the it automatically +=== Wizard and WizardPages

+
+
+

The Wizard class from the org.eclipse.jface.wizard package provides the functionality to build custom wizards. This class controls the navigation between the different pages and provides the base user interface, for example, an area for error and information messages.

+
+
+

A wizard contains one or several pages of the type WizardPage. Such a page is added to a Wizard object via the addPage() method.

+
+
+

A WizardPage must create a new Composite in its createControl() method. This new Composite must use the Composite of the method parameter as parent. It also must call the setControl() method with this new Composite as parameter. If this is omitted, Eclipse will throw an error.

+
+
+

On the CobiGen eclipse-plugin project: +Diagram 3

+
+
+

The WizardPage class defines the canFlipToNextPage() and setPageComplete() methods to control if the NEXT or the FINISH button in the wizard becomes active.

+
+
+

The Wizard class defines the canFinish() method in which you can define if the wizard can be completed. This last method is overrode at AbstractGenerateWizard.java.

+
+
+
+

Select Files Page and Select Attributes Page

+
+

In case that has been launched in batch mode, the wizard only will have the select increment and files page (initialized and configured at `SelectFilePage.java` from the package wizard.common)

+
+
+

In case of normal mode with an entity java class as input, the wizard will have an optional second page provided for `SelectAttributesPage.java` of the package wizard.generate.common for selecting attributes of the entity that will be used for the generation. The page is optional because the user can finish the wizard and perform the generation from the first page.

+
+
+

The pages of the CobiGen wizard is composed essentially for container. The containers have a CheckBoxTreeViewer object, a content provider object and a listener (that defines the behavior of the wizard when a check box is checked or unchecked)

+
+
+

Diagram 4

+
+
+

Select Files Page

+
+

The first page (`SelectFilesPage`) is composed by two containers:

+
+
+
    +
  1. +

    Left container - Increment Selector

    +
    +
      +
    • +

      Created as a CustomizedCheckBoxTreeViewer

      +
    • +
    • +

      The content provider is a SelectIncrementContentProvider

      +
    • +
    • +

      Setting the input will upgrade the labels to show

      +
    • +
    • +

      Set CheckStateListener as listener

      +
    • +
    +
    +
  2. +
+
+
+
+
incrementSelector = new CustomizedCheckboxTreeViewer(containerLeft);
+incrementSelector.setContentProvider(new SelectIncrementContentProvider());
+incrementSelector.setInput(cobigenWrapper.getAllIncrements());
+gd = new GridData(GridData.FILL_BOTH);
+gd.grabExcessVerticalSpace = true;
+incrementSelector.getTree().setLayoutData(gd);
+CheckStateListener checkListener = new CheckStateListener(cobigenWrapper, this, batch);
+incrementSelector.addCheckStateListener(checkListener);incrementSelector.expandAll();
+
+
+
+
    +
  1. +

    Right Container - Resources to be generated

    +
    +
      +
    • +

      Created as SimulatedCheckBoxTreeViewer if the Customize button is not enabled or as CustomizedCheckBoxTreeViewer if it is.

      +
    • +
    • +

      SelectFileContentProvider as content provider.

      +
    • +
    • +

      SelectFileLabelProvider as label provider

      +
    • +
    • +

      Generation target project as input

      +
    • +
    • +

      Set CheckStateListener as listener

      +
    • +
    +
    +
  2. +
+
+
+ + + + + +
+ + +
+

To know how a content provider works check the official documentation here.

+
+
+
+
+
+

Select Attributes Page

+
+

As previously explained, this page is optional, the user can press the Finish button at the previous page. Nevertheless, this page can only be accessed in case of a single entity file as input, never on batch mode.

+
+
+

The container is composed by a single CheckBoxTableViewer with a `SelectAttributesContentProvider` as content provider and a `SelectAttributesLabelProvider` as label provider.

+
+
+
+
+
+
+

Finish and perform generation

+
+
+

When the user press the Finish button, the generation process will begin. For that, a generation job will be created using as argument a list of templates to be generated retrieving them from the user selection of the first page (Select Files Page). +The generate wizard will use the `GenerateSelectionJob.java` or the `GenerateBatchSelectionJob.java` for normal mode or batch mode respectively.

+
+
+

Diagram 5

+
+
+

Normal Mode

+
+
+
@Override
+protected void generateContents(ProgressMonitorDialog dialog) {
+    if (cobigenWrapper instanceof JavaGeneratorWrapper) {
+        for (String attr : page2.getUncheckedAttributes()) {
+            ((JavaGeneratorWrapper) cobigenWrapper).removeFieldFromModel(attr);
+        }
+    }
+    //Here are retrieved the templates to use for the generation selected at the first page
+    GenerateSelectionJob job = new GenerateSelectionJob(cobigenWrapper, page1.getTemplatesToBeGenerated());
+    try {
+        dialog.run(true, false, job);
+    } catch (InvocationTargetException e) {
+        LOG.error("An internal error occurred while invoking the generation job.", e);
+    } catch (InterruptedException e) {
+        LOG.warn("The working thread doing the generation job has been interrupted.", e);
+    }
+}
+
+
+
+

The dialog.run(true, false, job) method will call the performGeneration() method from `GenerateSelectionJob.java`

+
+
+

Calling the generate() method from the CobiGenWrapper will call the method with the same name from the core and the generation will begin.

+
+
+
+

Batch Mode

+
+

At batch mode, the generation job will be instantiated depending if the selection was a container or a multiple files selection.

+
+
+
+
@Override
+protected void generateContents(ProgressMonitorDialog dialog) {
+    List<TemplateTo> templatesToBeGenerated = page1.getTemplatesToBeGenerated();
+    List<String> templateIds = Lists.newLinkedList();
+    for (TemplateTo template : templatesToBeGenerated) {
+        templateIds.add(template.getId());
+    }
+    GenerateBatchSelectionJob job;
+    if (container == null) {
+        job = new GenerateBatchSelectionJob(cobigenWrapper, cobigenWrapper.getTemplates(templateIds),
+                  inputTypes);
+    } else {
+        job = new GenerateBatchSelectionJob(cobigenWrapper, cobigenWrapper.getTemplates(templateIds),
+                  container);
+    }
+    try {
+        dialog.run(true, false, job);
+    } catch (InvocationTargetException e) {
+        LOG.error("An internal error occurred while invoking the generation batch job.", e);
+    } catch (InterruptedException e) {
+        LOG.warn("The working thread doing the generation job has been interrupted.", e);
+    }
+}
+
+
+
+

The dialog.run(true, false, job) method will call the performGeneration() method from `GenerateBatchSelectionJob.java`

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/guide_dev_troubleshooting.html b/docs/cobigen/1.0/guide_dev_troubleshooting.html new file mode 100644 index 00000000..01b01651 --- /dev/null +++ b/docs/cobigen/1.0/guide_dev_troubleshooting.html @@ -0,0 +1,446 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Troubleshooting for Developers

+
+
+

CobiGen-eclipse or CobiGen-eclipse-test has build errors after git clone or pull

+
+
+

This might be caused as of the fact, that the cobigen-eclipse*/lib folder is not available after initial cloning or the contents of the lib folder are not in sync with the dependencies specified in the pom.xml respectively with the classpath inclusions in the plugin.xml (tab runtime).

+
+
+

Solution

+
+

In any of these cases you can fix the issue by running a mvn clean package -Pp2-build-photon.

+
+
+
+
+
+

Getting Not authorized , Reason Phrase: Unauthorized

+
+
+

You are facing an error like

+
+
+
+
[ERROR] [ERROR] Some problems were encountered while processing the POMs:
+[ERROR] Unresolveable build extension: Plugin org.apache.maven.wagon:wagon-ftp:1.0-beta-6 or one of its dependencies could not be resolved: Failed to read artifact descriptor for org.apache.maven.wagon:wagon-ftp:jar:1.0-beta-6 @@
+[ERROR] The build could not read 1 project -> [Help 1]
+[ERROR]
+[ERROR]   The project com.capgemini:cobigen-htmlplugin:1.1.0 (D:\toolscobigen\tools-cobigen2\cobigen\cobigen\cobigen-htmlplugin\pom.xml) has 1 error
+[ERROR]     Unresolveable build extension: Plugin org.apache.maven.wagon:wagon-ftp:1.0-beta-6 or one of its dependencies could not be resolved: Failed to read a
+rtifact descriptor for org.apache.maven.wagon:wagon-ftp:jar:1.0-beta-6: Could not transfer artifact org.apache.maven.wagon:wagon-ftp:pom:1.0-beta-6 from/to publ
+ic (https://devon.s2-eu.capgemini.com/nexus/content/groups/public/): Not authorized , ReasonPhrase: Unauthorized. -> [Help 2]
+[ERROR]
+[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
+[ERROR] Re-run Maven using the -X switch to enable full debug logging.
+[ERROR]
+[ERROR] For more information about the errors and possible solutions, please read the following articles:
+[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/ProjectBuildingException
+[ERROR] [Help 2] http://cwiki.apache.org/confluence/display/MAVEN/PluginManagerException
+
+
+
+

Solution

+
+

Please note the message Not authorized , ReasonPhrase: Unauthorized. → [Help 2]!

+
+
+
    +
  1. +

    Please check, that you run the command by using the console.bat or a similar console initialized with the IDE environment variables.

    +
  2. +
  3. +

    Please check your corporate login in the variables-customized.bat to be correct (DEVON_NEXUS_USER and DEVON_NEXUS_PASSWD). Make sure, that you restart the console.bat you are working in after changing the variables-customized.bat. Same holds for eclipse instances running. Please restart to make the new values accessible.

    +
  4. +
  5. +

    Please check your password to escape special characters.

    +
  6. +
  7. +

    Please check whether you are able to login to https://devon.s2-eu.capgemini.com and Nexus is up and running. If you cannot login, contact one of the main developers.

    +
  8. +
+
+
+
+
+
+

Testing changes on the CobiGen-core

+
+
+

To test changes implemented on the cobigen-core you have to follow the next process:

+
+
+
    +
  1. +

    Open a console and step into cobigen/cobigen-core-parent. Run mvn clean install and remember the jar version you have just installed.

    +
  2. +
  3. +

    On Eclipse, go to cobigen/cobigen-eclipse pom.xml and change the <version> of your cobigen-core.

    +
  4. +
  5. +

    Also check on the cobigen/cobigen-core-parent pom-xml that it is using the just installed version.

    +
  6. +
  7. +

    Open a console and step into cobigen/cobigen-eclipse. Run mvn clean package -Pp2-build-photon,p2-build-stable,p2-build-experimental.

    +
  8. +
  9. +

    On Eclipse, go to cobigen/cobigen-eclipse and double-click 'plugins.xml'. On the bottom, click on 'runtime' tab. On 'classpath', add a new library and choose the jars you have just installed.

    +
  10. +
  11. +

    Refresh on Eclipse, press F5 on the cobigen-eclipse inside Eclipse.

    +
  12. +
  13. +

    If you still see compilation errors: On Eclipse, right-click cobigen/cobigen-eclipse → Maven → Update projects.

    +
  14. +
+
+
+
+
+

Issues with workspace when Oomph automatic updates don’t work (especially for Indian colleagues)

+
+
+

Executing eclipse-cobigen-development.bat file will open Eclipse with all the projects automatically imported. Oomph creates 'Working Sets' and set 'Top Level Elements' pointing to that working set. For Countries where proxy restricts Oomph to execute, we see no projects imported into project explorer/Navigator. Rather than trying manual import which later can give build issues we should follow below solution. +Build Issues could be like:

+
+
+
+
[ERROR] Cannot resolve project dependencies:
+[ERROR]   You requested to install 'com.capgemini.cobigen-htmlplugin 0.0.0' but it could not be found
+[ERROR]
+[ERROR] See http://wiki.eclipse.org/Tycho/Dependency_Resolution_Troubleshooting for help.
+[ERROR] Cannot resolve dependencies of MavenProject: com.capgemini:com.capgemini.cobigen.eclipse.test:3.0.1 @
+
+
+
+

Solution

+
+

In Eclipse, you can click the small downward arrow in the upper right corner of the Navigator/Project Explorer view and go to 'Top Level Elements' and point them to 'Projects'. This should show all the projects inside Project Explorer View. Also, Each plugin should point to respective branch.

+
+
+
+
+
+

Issue when testing Eclipse plugin by Running as Eclipse Application.

+
+
+

Error message will be like:

+
+
+
+
1) Caused by: java.lang.ClassNotFoundException: An error occurred while automatically activating bundle com.devonfw.cobigen.eclipse
+2) org.osgi.framework.BundleException: Error starting module.
+3) org.eclipse.core.runtime.CoreException: Plug-in com.devonfw.cobigen.eclipse was unable to load class com.devonfw.cobigen.eclipse.workbenchcontrol.handler.XXXXHandler.
+
+
+
+

Solution

+
+

Delete or rename the runtime-EclipseApplication inside workspaces folder. Re- run and try setting up workspace in that environment again. It should work!!

+
+
+
+
+
+
1) Caused by: java.lang.ClassNotFoundException: An error occurred while automatically activating bundle com.devonfw.cobigen.eclipse
+2) org.osgi.framework.BundleException: Error starting module.
+3) org.eclipse.core.runtime.CoreException: Plug-in com.devonfw.cobigen.eclipse was unable to load class com.devonfw.cobigen.eclipse.workbenchcontrol.handler.XXXXHandler.
+
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto-devonfw-CobiGen-OpenAPI.html b/docs/cobigen/1.0/howto-devonfw-CobiGen-OpenAPI.html new file mode 100644 index 00000000..8577a928 --- /dev/null +++ b/docs/cobigen/1.0/howto-devonfw-CobiGen-OpenAPI.html @@ -0,0 +1,937 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + + +
+

==End to End POC Code generation using OpenAPI +This article helps to create a sample application using cobigen.

+
+
+

Prerequisites

+
+
+

Download and install devonnfw IDE here,

+
+
+
+
+

Steps to create a Sample Project using Cobigen

+
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to star

+
+
+

t from the BE part …

+
+
+

Back End

+
+

run \devonfw-ide-scripts-3.2.4\eclipse-main.bat

+
+
+

It will open eclipse

+
+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  1. +

    Create a new SQL file (i.e: V0005CreateTables_ItaPoc.sql) inside jwtsample-__core and insert the following script:

    +
  2. +
+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment, modificationCounter *INTEGER* *NOT* *NULL*,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+

sql file

+
+
+
    +
  1. +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql) and add following script about the INSERT in order to populate the table created before

    +
  2. +
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Stefano','Rossini','stefano.rossini@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Angelo','Muresu', 'angelo.muresu@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Gonzalez', 'jaime.diaz-gonzalez@capgemini.com');
+
+
+
+

sql insert

+
+
+

Let’s create the yml file for the code generation

+
+
+
    +
  1. +

    Now create a new file devonfw.yml in the root of your core folder. This will be our OpenAPI contract, like shown below. Then, copy the contents of this file into your OpenAPI. It defines some REST service endpoints and a EmployeeEntity with its properties defined.

    +
  2. +
+
+
+

Important: if you want to know how to write an OpenAPI contract compatible with CobiGen, please read this tutorial.

+
+
+

Swagger at OASP4J Project

+
+
+
    +
  1. +

    Right click devonfw.yml. CobiGen → Generate

    +
  2. +
+
+
+

It will ask you to download the templates, click on update:

+
+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  1. +

    Click on all the option selected as below:

    +
  2. +
+
+
+

cobigen option selection

+
+
+
    +
  1. +

    Click on finish. Below Screen would be seen. Click on continue

    +
  2. +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (jwtsample-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (jwtsample-core), normally it will be implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+

Last but not least: We make a quick REST services test !

+
+
+

See in the application.properties the TCP Port and the PATH

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id}  (i.e: for  getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

For all employees

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+
+
+
+

For the specific employee

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

Now download Postman to test the rest services.

+
+
+

Once done, you have to create a POST Request for the LOGIN and insert in the body the JSON containing the username and password waiter

+
+
+

postman

+
+
+

Once done with success (Status: 200 OK) …

+
+
+

postman

+
+
+

… We create a NEW POST Request and We copy the Authorization Bearer field (see above) and We paste it in the Token field (see below)

+
+
+

postman

+
+
+

and specific the JSON parameters for the pagination of the Request that We’re going to send:

+
+
+

postman

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below list of Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+

Front End

+
+

Let’s start now with angular Web and then Ionic app.

+
+
+

Angular Web App

+
+
    +
  1. +

    To generate angular structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  2. +
+
+
+

devon dist folder

+
+
+
    +
  1. +

    Once done, right click on devonfw.yml again (the OpenAPI contract). CobiGen → Generate

    +
  2. +
  3. +

    Click on the selected options as seen in the screenshot:

    +
  4. +
+
+
+

eclipse generate

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
+
+
+

eclipse

+
+
+
    +
  1. +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  2. +
+
+
+

angular ee layer

+
+
+
    +
  1. +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  2. +
+
+
+
+
,\{
+path: 'employee',
+component: EmployeeGridComponent,
+canActivate: [AuthGuard],
+},
+
+
+
+

Following picture explain where to place the above content:

+
+
+

routes

+
+
+
    +
  1. +

    Open the command prompt and execute devon yarn install from the base folder, which would download all the required libraries..

    +
  2. +
+
+
+
    +
  1. +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  2. +
+
+
+

environment

+
+
+

In order to do that it’s important to look at the application.properties to see the values as PATH, TCP port etc …

+
+
+

configure

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  1. +

    Now run the *ng serve -o* command to run the Angular Application.

    +
  2. +
+
+
+

image44

+
+
+
    +
  1. +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  2. +
+
+
+

image45

+
+
+

WebApp DONE

+
+
+
+

Ionic Mobile App

+
+
    +
  1. +

    To generate Ionic structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  2. +
  3. +

    Once done, Right click on the devonfw.yml as you already did before in order to use CobiGen.

    +
  4. +
  5. +

    Click on the selected options as seen in the screenshot:

    +
  6. +
+
+
+

image46

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
  3. +

    The entire ionic structure will be auto generated.

    +
  4. +
+
+
+

image47

+
+
+
    +
  1. +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  2. +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  1. +

    Run npm install in the root folder to download the dependecies

    +
  2. +
  3. +

    Run ionic serve

    +
  4. +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App DONE*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+
Build APK
+
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
+

Adapt CobiGen_Templates

+
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto-devonfw-CobiGen.html b/docs/cobigen/1.0/howto-devonfw-CobiGen.html new file mode 100644 index 00000000..27d7f9c8 --- /dev/null +++ b/docs/cobigen/1.0/howto-devonfw-CobiGen.html @@ -0,0 +1,1089 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + + +
+

==End to End POC Code generation using OpenAPI and Entity class +This article helps to create a sample application using cobigen.

+
+
+

Prerequisites

+
+
+

Download and install devonnfw IDE here,

+
+
+
+
+

Steps to create a sample Project using Cobigen

+
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to start from the BE part …

+
+
+

Back End

+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

run +\devonfw-ide-scripts-<version>\eclipse-main.bat +to open eclipse

+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

Remove the existing configure method from myapp-core com.example.domain.myapp.general.service.impl.config.BaseWebSecurityConfig and copy below security configuration code and paste.

+
+
+
+
@Override
+ public void configure(HttpSecurity http) throws Exception {
+   http.authorizeRequests().anyRequest().permitAll().and().csrf().disable()
+       .addFilterAfter(getSimpleRestAuthenticationFilter(), BasicAuthenticationFilter.class)
+       .addFilterAfter(getSimpleRestLogoutFilter(), LogoutFilter.class);
+   if (this.corsEnabled) {
+     http.addFilterBefore(getCorsFilter(), CsrfFilter.class);
+   }
+ }
+
+
+
+

Check resources/config/application.properties to see the values as PATH, TCP port etc … +Also make sure the below property is present.

+
+
+

security.cors.enabled=true

+
+
+

configure

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  • +

    Create new SQL file inside myapp-core resources/db/migration/specific/h2 and insert the following script:

    +
  • +
+
+
+

V0005__CreateTables_ItaPoc.sql (Please note 2 underscores after V0005)

+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment,
+modificationCounter INT NOT NULL,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

sql file

+
+
+
    +
  • +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql ) and add following script about the INSERT in order to populate the table created before

    +
  • +
+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Stefano','Rossini','stefano.rossini@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Angelo','Muresu', 'angelo.muresu@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Gonzalez', 'jaime.diaz-gonzalez@capgemini.com');
+
+
+
+

sql insert

+
+
+
+
+
+

Back end Code Generation

+
+
+

Back end code can be generated from either of the below 2 methods

+
+
+
    +
  1. +

    OpenAPI .yml file

    +
  2. +
  3. +

    java Entity class.

    +
  4. +
+
+
+

Prepare OpenAPI .yml file

+
+
+

Create a .yml file which satisfies the OpenAPI standards or check the sample file devonfw_employee.yml and prepare.

+
+
+

Important: if you want to know how to write an OpenAPI contract compatible with CobiGen, please read this tutorial.

+
+
+

Swagger at OASP4J Project

+
+
+

Right click devonfw.yml. CobiGen → Generate

+
+
+

It will ask you to download the templates, click on update:

+
+
+

Prepare Entity class

+
+
+

Create a package com.example.domain.myapp.employeemanagement.dataaccess.api

+
+
+

under the folder myapp-core. Note: It is important to follow this naming convention for CobiGen to work properly.

+
+
+

package

+
+
+

Now create a JPA Entity class in this package

+
+
+
+
import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Column;
+@Entity
+@javax.persistence.Table(name = "EMPLOYEE")
+public class EmployeeEntity {
+ @Column(name = "EMPLOYEEID")
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ private Long employeeId;
+ @Column(name = "NAME")
+ private String name;
+ @Column(name = "SURNAME")
+ private String surname;
+ @Column(name = "EMAIL")
+ private String email;
+}
+
+
+
+

then generate getters and setters for all attributes

+
+
+
+
Use Cobigen to generate code. Right click on EmployeeEntity. CobiGen -> Generate
+
+
+
+

It will ask you to download the templates, click on update:

+
+
+

Code Generation

+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  • +

    Click on all the option selected as below:

    +
  • +
+
+
+

cobigen option selection

+
+
+
    +
  • +

    Click on finish. Below Screen would be seen. Click on continue

    +
  • +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (jwtsample-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (jwtsample-core), those are the implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+
+

Test the Services

+
+

Download Postman to test the rest services.

+
+
+

Get the port and path from application.properties

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id}  (i.e: for  getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

For all employees

+
+
+
+
POST
+http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+Content-Type    application/json
+{"name":"Angelo"}
+
+
+
+

For the specific employee

+
+
+
+
GET
+http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

In postman, create a POST Request for the LOGIN and insert in the body the JSON containing the username and password admin

+
+
+

Login Test using postman

+
+
+
+
    POST
+    http://localhost:8081/services/rest/login
+    Content-Type    application/json
+    {
+    "j_username":"admin",
+    "j_password":"admin"
+     }
+
+
+
+
    +
  • +

    Set the header

    +
  • +
+
+
+

Send will return 200 OK as response.

+
+
+

postman

+
+
+

postman

+
+
+

… We create a NEW POST Request and We copy the Authorization Bearer field (see above) and We paste it in the Token field (see below)

+
+
+

postman

+
+
+

and specific the JSON parameters for the pagination of the Request that We’re going to send:

+
+
+

postman

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below list of Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+

Front End

+
+

Let’s start now with angular Web and then Ionic app.

+
+
+

Angular Web App

+
+
    +
  • +

    To generate angular structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  • +
+
+
+

devon dist folder

+
+
+

Place the files inside workspace under the folder devon4ng-application-template.

+
+
+

eg: C:\projects\devonfw-ide-scripts-2020.08.002\workspaces\main\devon4ng-application-template

+
+
+

In Devon IDE, right click on EmployeeEto.java file present under the package com.example.domain.myapp.employeemanagement.logic.api.to

+
+
+

For OpenAPI, right click on devonfw.yml again.

+
+
+
    +
  • +

    CobiGen → Generate

    +
  • +
+
+
+

Click on the selected options as seen in the screenshot:

+
+
+

eclipse generate

+
+
+
    +
  • +

    Click on Finish

    +
  • +
+
+
+

eclipse

+
+
+
    +
  • +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  • +
+
+
+

angular ee layer

+
+
+
    +
  • +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  • +
+
+
+
+
{
+      path: 'employee',
+      loadChildren: () =>
+          import('./employee/employee.module').then(
+              m => m.EmployeeModule,
+          )
+}
+
+
+
+

Following picture explain where to place the above content, also remove if any duplicate code is there.

+
+
+

routes

+
+
+
    +
  • +

    Add newly generated module to the left menu. +Modify the file app\layout\nav-bar\nav-bar.component.html, add the below code.

    +
  • +
+
+
+
+
<a id="employee" mat-list-item [routerLink]="['./employee']" (click)="close()">
+       <mat-icon matListAvatar>
+        grid_on
+       </mat-icon> <h3 matLine> {{ 'employeemanagement.Employee.navData' | transloco }} </h3>
+       <p matLine class="desc"> {{ 'employeemanagement.Employee.navDataSub' | transloco }} </p>
+</a>
+
+
+
+

nav bar

+
+
+
    +
  • +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  • +
+
+
+

environment

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  • +

    Open the command prompt and execute below command from the base folder, which would download all the required libraries..

    +
  • +
+
+
+
+
devon yarn install
+
+
+
+

Run the below command for the front end.

+
+
+
+
devon ng serve
+
+
+
+

image44

+
+
+
    +
  • +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  • +
+
+
+

image45

+
+
+

WebApp Done

+
+
+
+

Ionic Mobile App

+
+
    +
  • +

    To generate Ionic structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  • +
+
+
+

right click on EmployeeEto.java file present under the package com.devonfw.poc.employeemanagement.logic.api.to

+
+
+

For OpenAPI, Right click on the devonfw.yml as you already did before in order to use CobiGen.

+
+
+
    +
  • +

    Click on the selected options as seen in the screenshot:

    +
  • +
+
+
+

image46

+
+
+
    +
  • +

    Click on Finish

    +
  • +
  • +

    The entire ionic structure will be auto generated.

    +
  • +
+
+
+

image47

+
+
+
    +
  • +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  • +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  • +

    Run npm install in the root folder to download the dependecies

    +
  • +
  • +

    Run ionic serve

    +
  • +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App Done*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+
Build APK
+
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
+

Adapt CobiGen_Templates

+
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 11

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto-devonfw-adapt_template.html b/docs/cobigen/1.0/howto-devonfw-adapt_template.html new file mode 100644 index 00000000..1e7b2319 --- /dev/null +++ b/docs/cobigen/1.0/howto-devonfw-adapt_template.html @@ -0,0 +1,332 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+
+ +
+

==Adapt Templates from CobiGen

+
+
+
+
+

Adapt CobiGen_Templates

+
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click OK:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto-devonfw-ide-CobiGen-PoC-E2E.html b/docs/cobigen/1.0/howto-devonfw-ide-CobiGen-PoC-E2E.html new file mode 100644 index 00000000..ee7fcd93 --- /dev/null +++ b/docs/cobigen/1.0/howto-devonfw-ide-CobiGen-PoC-E2E.html @@ -0,0 +1,946 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==End to End POC Code generation using Entity class +This article helps to create a sample application using cobigen.

+
+
+

Prerequisites

+
+
+

Download and install devonnfw IDE here,

+
+
+
+
+

Steps to create a Sample Project using Cobigen

+
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to start from the BE part …

+
+
+

Back End

+
+

run \devonfw-ide-scripts-3.2.4\eclipse-main.bat

+
+
+

It will open eclipse

+
+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  1. +

    Create a new SQL file (i.e: V0005CreateTables-ItaPoc.sql) inside myapp-__core and insert the following script:

    +
  2. +
+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment, modificationCounter INTEGER NOT NULL,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+

sql file

+
+
+
    +
  1. +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql) and add following script about the INSERT in order to populate the table created before

    +
  2. +
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Albert','Miller','albert.miller@capgemini.com');
+INSERT INTO  EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Wills','Smith', 'wills.smith@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Thomas', 'jaime.thomas@capgemini.com');
+
+
+
+

sql insert

+
+
+

Let’s create the Entity Class for the code generation

+
+
+
    +
  1. +

    Create a package employeemanagement.dataaccess.api under the folder myapp-core. Note: It is important to follow this naming convention for CobiGen to work properly.

    +
  2. +
+
+
+

package

+
+
+
    +
  1. +

    Now create a JPA Entity class in this package

    +
  2. +
+
+
+
+
import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Column;
+@Entity
+@javax.persistence.Table(name = "EMPLOYEE")
+public class EmployeeEntity {
+  @Column(name = "EMPLOYEEID")
+  @GeneratedValue(strategy = GenerationType.IDENTITY)
+  private Long employeeId;
+  @Column(name = "NAME")
+  private String name;
+  @Column(name = "SURNAME")
+  private String surname;
+  @Column(name = "EMAIL")
+  private String email;
+}
+
+
+
+

then generate getters and setters for all attributes …

+
+
+
    +
  1. +

    Use Cobigen to generate code. Right click on EmployeeEntity. CobiGen → Generate

    +
  2. +
+
+
+

It will ask you to download the templates, click on update:

+
+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  1. +

    Click on all the option selected as below:

    +
  2. +
+
+
+

cobigen option selection

+
+
+
    +
  1. +

    Click on finish. Below Screen would be seen. Click on continue

    +
  2. +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (myapp-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (myapp-core), normally it will be implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+

Last but not least: We make a quick REST services test !

+
+
+

See in the application.properties the TCP Port and the PATH

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id} (i.e: for getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

for all employees

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+
+
+
+

for the specific employee

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

Now download Postman to test the rest services.

+
+
+

Once done, you have to create a POST Request for the LOGIN and insert in the body the JSON containing the username and password waiter

+
+
+

postman

+
+
+

Once done with success (Status: 200 OK) …

+
+
+

… We create a NEW GET Request in order to get one employee

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+

Front End

+
+

Let’s start now with angular Web and then Ionic app.

+
+
+

Angular Web App

+
+
    +
  1. +

    To generate angular structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  2. +
+
+
+

devon dist folder

+
+
+
    +
  1. +

    Once done, right click on EmployeeEto.java file present under the package com.devonfw.poc.employeemanagement.logic.api.to

    +
  2. +
+
+
+

eclipse generate

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
+
+
+

eclipse

+
+
+
    +
  1. +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  2. +
+
+
+

angular ee layer

+
+
+
    +
  1. +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  2. +
+
+
+
+
,\{
+path: 'employee',
+component: EmployeeGridComponent,
+canActivate: [AuthGuard],
+},
+
+
+
+

Following picture explain where to place the above content:

+
+
+

routes

+
+
+
    +
  1. +

    Open the command prompt and execute devon yarn install from the base folder, which would download all the required libraries..

    +
  2. +
+
+
+
    +
  1. +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  2. +
+
+
+

environment

+
+
+

In order to do that it’s important to look at the application.properties to see the values as PATH, TCP port etc …

+
+
+

configure

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  1. +

    Now run the ng serve -o command to run the Angular Application.

    +
  2. +
+
+
+

image44

+
+
+
    +
  1. +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  2. +
+
+
+

image45

+
+
+

WebApp DONE

+
+
+
+

Ionic Mobile App

+
+
    +
  1. +

    To generate Ionic structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  2. +
  3. +

    Once done, Right click on the EmployeeEto as you already did before in order to use CobiGen.

    +
  4. +
  5. +

    Click on the selected options as seen in the screenshot:

    +
  6. +
+
+
+

image46

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
  3. +

    The entire ionic structure will be auto generated.

    +
  4. +
+
+
+

image47

+
+
+
    +
  1. +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  2. +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  1. +

    Run npm install in the root folder to download the dependecies

    +
  2. +
  3. +

    Run ionic serve

    +
  4. +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App DONE*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+
Build APK
+
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
+

Adapt CobiGen_Templates

+
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto_Cobigen-CLI-generation.html b/docs/cobigen/1.0/howto_Cobigen-CLI-generation.html new file mode 100644 index 00000000..49ace110 --- /dev/null +++ b/docs/cobigen/1.0/howto_Cobigen-CLI-generation.html @@ -0,0 +1,449 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==CobiGen CLI

+
+
+

The command line interface (CLI) for CobiGen enables the generation of code using few commands. This feature allows us to decouple CobiGen from Eclipse.

+
+
+

Install CobiGen CLI

+
+
+

In order to install the CobiGen CLI you will need to use the devonfw/ide. In a console run devon cobigen.

+
+
+
+
+

Commands and options

+
+
+

Using the following command and option you will be able to customize your generation as follows:

+
+
+
    +
  • +

    cobigen, cg: Main entry point of the CLI. If no arguments are passed, man page will be printed.

    +
  • +
  • +

    [generate, g]: Command used for code generation.

    +
    +
      +
    • +

      InputGlob: Glob pattern of the input file or the whole path of the input file from which the code will be generated.

      +
    • +
    • +

      < --increment, -i > : Specifies an increment ID to be generated. You can also search increments by name and CobiGen will output the resultant list. If an exact match found, code generation will happen.

      +
    • +
    • +

      < --template, -t > : specifies a template ID to be generated. You can also search templates by name and CobiGen will output the resultant list.

      +
    • +
    • +

      < --outputRootPath, -out >: The project file path in which you want to generate your code. If no output path is given, CobiGen will use the project of your input file.

      +
    • +
    +
    +
  • +
  • +

    [adapt-templates, a]: Generates a new templates folder next to the CobiGen CLI and stores its location inside a configuration file. After executing this command, the CLI will attempt to use the specified Templates folder.

    +
  • +
  • +

    < --verbose, -v > : Prints debug information, verbose log.

    +
  • +
  • +

    < --help, -h > : Prints man page.

    +
  • +
  • +

    < update, u> : This command compare the artificial pom plug-ins version with central latest version available and user can update any outdated plug-ins version .

    +
  • +
+
+
+
+
+

CLI Execution steps:

+
+
+

CobiGen CLI is installed inside your devonfw distribution. In order to execute it follow the next steps:

+
+
+
    +
  1. +

    Run console.bat, this will open a console.

    +
  2. +
  3. +

    Execute cobigen or cg and the man page should be printed.

    +
  4. +
  5. +

    Use a valid CobiGen input file and run cobigen generate <pathToInputFile>. Note: On the first execution of the CLI, CobiGen will download all the needed dependencies, please be patient.

    +
  6. +
  7. +

    A list of increments will be printed so that you can start the generation.

    +
  8. +
+
+
+

Preview of the man page for generate command:

+
+
+
+Generation path +
+
+
+
+
+

Examples

+
+
+

A selection of commands that you can use with the CLI:

+
+
+
    +
  • +

    cobigen generate foo\bar\EmployeeEntity.java: As no output path has been defined, CobiGen will try to find the pom.xml of the current project in order to set the generation root path.

    +
  • +
  • +

    cobigen generate foo\bar\*.java --out other\project: Will retrieve all the Java files on that input folder and generate the code on the path specified by --out.

    +
  • +
  • +

    cg g foo\bar\webServices.yml --increment TO: Performs a string search using TO and will print the closest increments like in the following image:

    +
  • +
+
+
+
+Generation path +
+
+
+
    +
  • +

    cg g foo\bar\webServices.yml -i 1,4,6: Directly generates increments with IDs 1, 4 and 6. CobiGen will not request you any other input.

    +
  • +
  • +

    cg a: Downloads the latest CobiGen_Templates and unpacks them next to the CLI. CobiGen will from now on use these unpacked Templates for generation.

    +
  • +
  • +

    cg a -cl C:\my\custom\location: Downloads the latest CobiGen_Templates and unpacks them in C:\my\custom\location. CobiGen will from now on use these unpacked Templates for generation.

    +
  • +
+
+
+
+
+

CLI update command

+
+
+

Example of Update Command :

+
+
+
+Generation path +
+
+
+

Select the plug-ins which you want to update like below :

+
+
+
+Generation path +
+
+
+
+
+

CLI custom templates

+
+
+

To use custom templates, it’s necessary to set up a custom configuration path as described here.

+
+
+
+
+

Troubleshooting

+
+
+

When generating code from a Java file, CobiGen makes use of Java reflection for generating templates. In order to do that, the CLI needs to find the compiled source code of your project.

+
+
+

If you find an error like Compiled class foo\bar\EmployeeEntity.java has not been found, it means you need to run mvn clean install on the input project so that a new target folder gets created with the needed compiled sources.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto_EA-client-generation.html b/docs/cobigen/1.0/howto_EA-client-generation.html new file mode 100644 index 00000000..696fb275 --- /dev/null +++ b/docs/cobigen/1.0/howto_EA-client-generation.html @@ -0,0 +1,353 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Enterprise Architect client generation

+
+
+

We are going to show you how to generate source code from an Enterprise Architect diagram +using CobiGen.

+
+
+

Prerequisites

+
+
+

If CobiGen_Templates is not already imported into your workspace, follow the next steps:

+
+
+
    +
  • +

    Click on the Eclipse’s menu File > Import > Existing Projects into Workspace and browse to select the workspaces/main/CobiGen_Templates directory.

    +
  • +
  • +

    Click Finish and you should have the CobiGen_Templates as a new project in Eclipse’s workspace.

    +
  • +
+
+
+

Also verify that you have the latest templates of CobiGen. Your templates folder must contain the crud_java_ea_uml folder. +If you do not see it, please follow the next steps:

+
+
+
    +
  • +

    Download the accumulative patch.

    +
  • +
  • +

    Open the zip file and extract its content inside the root folder of your Devonfw distribution Devon-dist_2.4.0/

    +
  • +
+
+
+

After following those steps correctly, you should have the latest version of the templates ready to use.

+
+
+
+
+

Generation

+
+
+

In this tutorial, we are going to generate the entity infrastructure using as input a class diagram, modelled with Enterprise Architect (EA). First, create a class diagram, an example is shown on figure below:

+
+
+
+Eclipse CobiGen generation +
+
+
+

When you are finished, you will have to export that UML diagram into an XMI version 2.1 file. This is the file format that CobiGen understands. See below a figure showing this process:

+
+
+
+Eclipse CobiGen generation +
+
+
+

To open that window, see this tutorial.

+
+
+

After having that exported file, change its extension from xmi to xml. Then create an devon4j project and import the exported file into the core of your devon4j project.

+
+
+

Now we are going to start the generation, right-click your exported file and select CobiGen > Generate, finally select the entity infrastructure increment:

+
+
+
+Eclipse CobiGen generation +
+
+
+

After following all these steps, your generated files should be inside src\main\java folder. If you want an XMI example, you will find it here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto_Release-creation.html b/docs/cobigen/1.0/howto_Release-creation.html new file mode 100644 index 00000000..973ffe02 --- /dev/null +++ b/docs/cobigen/1.0/howto_Release-creation.html @@ -0,0 +1,305 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==CobiGen Release creation +In this guide we explain how to create CobiGen related releases, i.e. release of a new core version using our useful release automation script.

+
+
+

Usage

+
+
+

Fire up a command prompt from the CobiGen IDE environment (using console.bat for example). Then, you will need to execute the following command:

+
+
+
+
python "<path_to_release_script_parent_folder>/create_release.py" -d -g devonfw/cobigen -r "<path_of_your_just_cloned_fork>" -k "yourcapgemini@mail.com" -c
+
+
+
+ + + + + +
+ + +The CobiGen development environment comes with all required python packages needed for the release script. However, if you encounter errors like no module named xyz found you might want to consider running the following command: +
+
+
+
+
python -m pip install -r "<path_to_release_script_parent_folder>/requirements.txt"
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto_angular-client-generation.html b/docs/cobigen/1.0/howto_angular-client-generation.html new file mode 100644 index 00000000..d5ddfffb --- /dev/null +++ b/docs/cobigen/1.0/howto_angular-client-generation.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Angular 8 Client Generation

+
+
+

The generation can create a full Angular 8 client using the devon4ng-application-template package located at workspaces/examples folder of the distribution. For more details about this package, please refer here.

+
+
+

Take into account that the TypeScript merging for CobiGen needs Node 6 or higher to be installed at your machine.

+
+
+ + + + + +
+ + +This is a short introduction to the Angular generation. For a deeper tutorial including the generation of the backend, we strongly recommend you to follow this document. +
+
+
+

Requisites

+
+
+

Install yarn globally:

+
+
+
+
npm install -g yarn
+
+
+
+
+
+

Angular 8 workspace

+
+
+

The output location of the generation can be defined editing the cobigen.properties file located at crud_angular_client_app/templates folder of the CobiGen_Templates project.

+
+
+
+`cobigen.properties file` +
+
+
+

By default, the output path would be into the devon4ng-application-template folder at the root of the devon4j project parent folder:

+
+
+
+
root/
+ |- devon4ng-application-template/
+ |- devon4j-project-parent/
+   |- core/
+   |- server/
+
+
+
+

However, this path can be changed, for example to src/main/client folder of the devon4j project:

+
+
+

relocate: ./src/main/client/${cwd}

+
+
+
+
root/
+ |- devon4j-project-parent/
+   |- core/
+      |- src
+        |- main
+          |- client
+   |- server/
+
+
+
+

Once the output path is chosen, copy the files of DEVON4NG-APPLICATION-TEMPLATE repository into this output path.

+
+
+
+
+

Install Node dependencies

+
+
+

Open a terminal into devon4ng-application-template copied and just run the command:

+
+
+
+
yarn
+
+
+
+

This will start the installation of all node packages needed by the project into the node_modules folder.

+
+
+
+
+

Generating

+
+
+

From an ETO object, right click, CobiGen → Generate will show the CobiGen wizard relative to client generation:

+
+
+
+CobiGen Client Generation Wizard +
+
+
+

Check all the increments relative to Angular:

+
+
+ + + + + +
+ + +
+

The Angular devon4j URL increment is only needed for the first generations however, checking it again on next generation will not cause any problem.

+
+
+
+
+

As we done on other generations, we click Next to choose which fields to include at the generation or simply clicking Finish will start the generation.

+
+
+
+CobiGen Client Generation Wizard 3 +
+
+
+
+
+

Routing

+
+
+

Due to the nature of the TypeScript merger, currently is not possible to merge properly the array of paths objects of the routings at app.routing.ts file so, this modification should be done by hand on this file. However, the import related to the new component generated is added.

+
+
+

This would be the generated app-routing.module file:

+
+
+
+
import { Routes, RouterModule } from '@angular/router';
+import { LoginComponent } from './login/login.component';
+import { AuthGuard } from './shared/security/auth-guard.service';
+import { InitialPageComponent } from './initial-page/initial-page.component';
+import { HomeComponent } from './home/home.component';
+import { SampleDataGridComponent } from './sampledata/sampledata-grid/sampledata-grid.component';
+//Routing array
+const appRoutes: Routes = [{
+    path: 'login',
+    component: LoginComponent
+}, {
+    path: 'home',
+    component: HomeComponent,
+    canActivate: [AuthGuard],
+    children: [{
+        path: '',
+        redirectTo: '/home/initialPage',
+        pathMatch: 'full',
+        canActivate: [AuthGuard]
+    }, {
+        path: 'initialPage',
+        component: InitialPageComponent,
+        canActivate: [AuthGuard]
+    }]
+}, {
+    path: '**',
+    redirectTo: '/login',
+    pathMatch: 'full'
+}];
+export const routing = RouterModule.forRoot(appRoutes);
+
+
+
+

Adding the following into the children object of home, will add into the side menu the entry for the component generated:

+
+
+
+
{
+    path: 'sampleData',
+    component: SampleDataGridComponent,
+    canActivate: [AuthGuard],
+}
+
+
+
+
+
import { Routes, RouterModule } from '@angular/router';
+import { LoginComponent } from './login/login.component';
+import { AuthGuard } from './shared/security/auth-guard.service';
+import { InitialPageComponent } from './initial-page/initial-page.component';
+import { HomeComponent } from './home/home.component';
+import { SampleDataGridComponent } from './sampledata/sampledata-grid/sampledata-grid.component';
+//Routing array
+const appRoutes: Routes = [{
+    path: 'login',
+    component: LoginComponent
+}, {
+    path: 'home',
+    component: HomeComponent,
+    canActivate: [AuthGuard],
+    children: [{
+        path: '',
+        redirectTo: '/home/initialPage',
+        pathMatch: 'full',
+        canActivate: [AuthGuard]
+    }, {
+        path: 'initialPage',
+        component: InitialPageComponent,
+        canActivate: [AuthGuard]
+    }, {
+        path: 'sampleData',
+        component: SampleDataGridComponent,
+        canActivate: [AuthGuard],
+    }]
+}, {
+    path: '**',
+    redirectTo: '/login',
+    pathMatch: 'full'
+}];
+export const routing = RouterModule.forRoot(appRoutes);
+
+
+
+
+`APP SideMenu` +
+
+
+
+
+

JWT Authentication

+
+
+

If you are using a backend server with JWT Authentication (there is a sample in workspaces/folder called sampleJwt) you have to specify the Angular application to use this kind of authentication.

+
+
+

By default the variable is set to CSRF but you can change it to JWT by going to the Enviroment.ts and setting security: 'jwt'.

+
+
+
+
+

Running

+
+
+

First of all, run your devon4j java server by right clicking over SpringBootApp.java Run As → Java Application. This will start to run the SpringBoot server. Once you see the Started SpringBoot in XX seconds, the backend is running.

+
+
+
+Starting `SpringBoot` +
+
+
+

Once the the server is running, open a Devon console at the output directory defined previously and run:

+
+
+
+
ng serve --open
+
+
+
+

This will run the Angular 8 application at:

+
+
+
+
http://localhost:4200
+
+
+
+
+Running Angular 8 app +
+
+
+

Once finished, the browser will open automatically at the previous localhost URL showing the Angular 8 application, using the credentials set at the devon4j java server you will be able to access.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto_create-a-new-plugin.html b/docs/cobigen/1.0/howto_create-a-new-plugin.html new file mode 100644 index 00000000..34ac1ba6 --- /dev/null +++ b/docs/cobigen/1.0/howto_create-a-new-plugin.html @@ -0,0 +1,615 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Implementing a new Plug-in

+
+
+

New plug-ins can implement an input reader, a merger, a matcher, a trigger interpreter, and/or a template engine as explained here.

+
+
+ + + + + +
+ + +
+

It is discouraged to have cobigen-core dependencies at runtime, except for cobigen-core-api which definitely must be present.

+
+
+
+
+

Plugin Activator

+
+
+

Each plug-in has to have an plug-in activator class implementing the interface GeneratorPluginActivator from the core-api. This class will be used to load the plug-in using the PluginRegistry as explained here. This class implements two methods:

+
+
+
    +
  1. +

    bindMerger() → returns a mapping of merge strategies and its implementation to be registered.

    +
  2. +
  3. +

    bindTriggerInterpreter()→ returns the trigger interpreters to be provided by this plug-in.

    +
  4. +
+
+
+

Both methods create and register instances of mergers and trigger interpreters to be provided by the new plug-in.

+
+
+
+
+

Adding Trigger Interpreter

+
+
+

The trigger interpreter has to implement the TriggerInterpreter interface from the core. The trigger interpreter defines the type for the new plugin and creates new InputReader and new Matcher objects.

+
+
+
+
+

Adding Input Reader

+
+
+

The input reader is responsible of read the input object and parse it into + FreeMarker models. The input reader must be implemented for the type of the + input file. If there is any existent plugin that has the same file type as input, + there will be no need to add a new input reader to the new plug-in.

+
+
+

Input Reader Interface

+
+

The interface needed to add a new input reader is defined at the core. Each new +sub plug-in must implements this interface if is needed an input reader for it.

+
+
+

The interface implements the basic methods that an input reader must have, +but if additional methods are required, the developer must add a new interface +that extends the original interface `InputReader.java` from the core-api +and implement that on the sub plug-in.

+
+
+

The methods to be implemented by the input reader of the new sub plugin are:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodReturn TypeDescription

isValidInput(Object input)

boolean

This function will be called if matching triggers or matching templates should be retrieved for a given input object.

createModel(Object input)

Map<String, Object>

This function should create the FreeMarker object model from the given input.

combinesMultipleInputObjects(Object input)

boolean

States whether the given input object combines multiple input objects to be used for generation.

getInputObjects(Object input, Charset inputCharset)

List<Object>

Will return the set of combined input objects if the given input combines multiple input objects.

getTemplateMethods(Object input)

Map<String, Object>

This method returns available template methods from the plugins as Map. If the plugin which corresponds to the input does not provide any template methods an empty Map will be returned.

getInputObjectsRecursively(Object input, Charset inputCharset)

List<Object>

Will return the set of combined input objects if the given input combines multiple input objects.

+
+
+

Model Constants

+
+

The Input reader will create a model for FreeMarker. A FreeMarker model must +have variables to use them at the .ftl template file. Refer to Java Model to see the FreeMarker model example for java input files.

+
+
+
+

Registering the Input Reader

+
+

The input reader is an object that can be retrieved using the correspondent get + method of the trigger interpreter object. The trigger interpreter object is + loaded at the eclipse plug-in using the load plug-in method explained + here. + That way, when the core needs the input reader, only needs to call that getInputReader method.

+
+
+
+
+
+

Adding Matcher

+
+
+

The matcher implements the MatcherInterpreter interface from the core-api. +Should be implemented for providing a new input matcher. Input matcher are +defined as part of a trigger and provide the ability to restrict specific +inputs to a set of templates. +This restriction is implemented with a MatcherType enum.

+
+
+

E.g JavaPlugin

+
+
+
+
private enum MatcherType {
+    /** Full Qualified Name Matching */
+    FQN,
+    /** Package Name Matching */
+    PACKAGE,
+    /** Expression interpretation */
+    EXPRESSION
+}
+
+
+
+

Furthermore, matchers may provide several variable assignments, which might be +dependent on any information of the matched input and thus should be resolvable +by the defined matcher.

+
+
+

E.g JavaPlugin

+
+
+
+
private enum VariableType {
+    /** Constant variable assignment */
+    CONSTANT,
+    /** Regular expression group assignment */
+    REGEX
+}
+
+
+
+
+
+

Adding Merger

+
+
+

The merger is responsible to perform merge action between new output with the +existent data at the file if it already exists. Must implement the Merger +interface from the core-api. +The implementation of the Merge interface must override the following methods:

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
MethodReturn TypeDescription

getType()

String

Returns the type, this merger should handle.

merge(File base, String patch, String targetCharset)

String

Merges the patch into the base file.

+
+

Is important to know that any exception caused by the merger must throw a MergeException from the core-api to the eclipse-plugin handle it.

+
+
+
+
+

Changes since Eclipse / Maven 3.x

+
+
+

Since version 3.x the Eclipse and Maven plugins of CobiGen utilize the Java ServiceLoader mechanic to find and register plugins at runtime. To enable a new plugin to be discovered by this mechanic the following steps are needed:

+
+
+
    +
  • +

    create the file META-INF/services/com.devonfw.cobigen.api.extension.GeneratorPluginActivator containing just the full qualified name of the class implementing the GeneratorPluginActivator interface, if the plugin provides a Merger and/or a TriggerInterpreter

    +
  • +
  • +

    create the file META-INF/services/com.devonfw.cobigen.api.extension.TextTemplateEngine containing just the full qualified name of the class implementing the TextTemplateEngine interface, if provided by the plugin

    +
  • +
  • +

    include META-INF into the target bundle (i.e. the folder META-INF has to be present in the target jar file)

    +
  • +
+
+
+
+
Example: Java Plugin
+
+

The java plugin provides both a Merger and a TriggerInterpreter. It contains therefore a com.devonfw.cobigen.api.extension.GeneratorPluginActivator file with the following content:

+
+
+
+
com.devonfw.cobigen.javaplugin.JavaPluginActivator
+
+
+
+

This makes the JavaPluginActivator class discoverable by the ServiceLoader at runtime.

+
+
+
+
+
    +
  • +

    to properly include the plugin into the current system and use existing infrastructure, you need to add the plugin as a module in /cobigen/pom.xml (in case of a Merger/TriggerInterpreter providing plugin) and declare that as the plugin’s parent in it’s own pom.xml via

    +
  • +
+
+
+
+
<parent>
+    <groupId>com.devonfw</groupId>
+    <artifactId>cobigen-parent</artifactId>
+    <version>dev-SNAPSHOT</version>
+</parent>
+
+
+
+

or /cobigen/cobigen-templateengines/pom.xml (in case of a Merger/TriggerInterpreter providing plugin) and declare that as the plugin’s parent in it’s own pom.xml via

+
+
+
+
<parent>
+    <groupId>com.devonfw</groupId>
+    <artifactId>cobigen-tempeng-parent</artifactId>
+    <version>dev-SNAPSHOT</version>
+</parent>
+
+
+
+

If the plugin provides both just use the /cobigen/pom.xml.

+
+
+
    +
  • +

    The dependencies of the plugin are included in the bundle

    +
  • +
  • +

    To make the plugin available to the Eclipse plugin it must be included into the current compositeContent.xml and compositeArtifacts.xml files. Both files are located in https://github.com/devonfw/cobigen/tree/gh-pages/updatesite/{test|stable}. To do so, add an <child> entry to the <children> tag in both files and adapt the size attribute to match the new number of references. The location attribute of the new <child> tag needs to be the artifact id of the plugins pom.xml.

    +
  • +
+
+
+
+
Example: Java Plugin
+
+

In case of the Java plugin, the entry is

+
+
+
+
<child location="cobigen-javaplugin"/>
+
+
+
+
+
+

Deployment

+
+

If you want to create a test release of eclipse you need to run the command

+
+
+
+
sh deploy.sh
+
+
+
+

on the cloned CobiGen repository while making sure, that your current version of CobiGen cloned is a snapshot version. This will automatically be detected by the deploy script.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto_create-external-plugin.html b/docs/cobigen/1.0/howto_create-external-plugin.html new file mode 100644 index 00000000..4343b780 --- /dev/null +++ b/docs/cobigen/1.0/howto_create-external-plugin.html @@ -0,0 +1,896 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Introduction to CobiGen external plug-ins

+
+
+

Since September of 2019, a major change on CobiGen has taken place. CobiGen is written in Java code and previously, it was very hard for developers to create new plug-ins in other languages.

+
+
+

Creating a new plug-in means:

+
+
+
    +
  • +

    Being able to parse a file in that language.

    +
  • +
  • +

    Create a human readable model that can be used to generate templates (by retrieving properties from the model).

    +
  • +
  • +

    Enable merging files, so that user’s code does not get removed.

    +
  • +
+
+
+

For the Java plug-in it was relatively easy. As you are inside the Java world, you can use multiple utilities or libraries in order to get the AST or to merge Java code. With this new feature, we wanted that behaviour to be possible for any programming language.

+
+
+

General intuition

+
+
+

Below you will find a very high level description of how CobiGen worked in previous versions:

+
+
+
+Old CobiGen +
+
+
+

Basically, when a new input file was sent to CobiGen, it called the input reader to create a model of it (see here an example of a model). That model was sent to the template engine.

+
+
+

Afterwards, the template engine generated a new file which had to be merged with the original one. All this code was implemented in Java.

+
+
+

On the new version, we have implemented a handler (ExternalProcessHandler) which connects through TCP/IP connection to a server (normally on localhost:5000). This server can be implemented in any language (.Net, Node.js, Python…​) it just needs to implement a REST API defined here. The most important services are the input reading and merging:

+
+
+
+New CobiGen +
+
+
+

CobiGen acts as a client that sends requests to the server in order to read the input file and create a model. The model is returned to the template engine so that it generates a new file. Finally, it is sent back to get merged with the original file.

+
+
+
+
+

How to create new external plug-in

+
+
+

The creation of a new plug-in consists mainly in three steps:

+
+
+
    +
  • +

    Creation of the server (external process).

    +
  • +
  • +

    Creation of a CobiGen plug-in.

    +
  • +
  • +

    Creation of templates.

    +
  • +
+
+
+

Server (external process)

+
+

The server can be programmed in any language that is able to implement REST services endpoints. The API that needs to implement is defined with this contract. You can paste the content to https://editor.swagger.io/ for a better look.

+
+
+

We have already created a NestJS server that implements the API defined above. You can find the code here which you can use as an example.

+
+
+

As you can see, the endpoints have the following naming convention: processmanagement/todoplugin/nameOfService where you will have to change todo to your plug-in name (e.g. rustplugin, pyplugin, goplugin…​)

+
+
+

When implementing service getInputModel which returns a model from the input file there are only two restrictions:

+
+
+
    +
  • +

    A path key must be added. Its value can be the full path of the input file or just the file name. It is needed because in CobiGen there is a batch mode, in which you can have multiple input objects inside the same input file. You do not need to worry about batch mode for now.

    +
  • +
  • +

    On the root of your model, for each found key that is an object (defined with brackets [{}]), CobiGen will try to use it as an input object. For example, this could be a valid model:

    +
    +
    +
    {
    +  "path": "example/path/employee.entity.ts"
    +  "classes": [
    +    {
    +      "identifier": "Employee",
    +      "modifiers": [
    +        "export"
    +      ],
    +      "decorators": [
    +        {
    +          "identifier": {
    +            "name": "Entity",
    +            "module": "typeorm"
    +          },
    +          "isCallExpression": true
    +        }
    +      ],
    +      "properties": [
    +        {
    +          "identifier": "id",
    +    ...
    +    ...
    +    ...
    +    }]
    +    "interfaces": [{
    +        ...
    +    }]
    +}
    +
    +
    +
  • +
+
+
+

For this model, CobiGen would use as input objects all the classes and interfaces defined. On the templates we would be able to do model.classes[0].identifier to get the class name. These input objects depend on the language, therefore you can use any key.

+
+
+

In order to test the server, you will have to deploy it on your local machine (localhost), default port is 5000. If that port is already in use, you can deploy it on higher port values (5001, 5002…​). Nevertheless, we explain later the testing process as you need to complete the next step before.

+
+
+ + + + + +
+ + +Your server must accept one argument when running it. The argument will be the port number (as an integer). This will be used for CobiGen in order to handle blocked ports when deploying your server. Check this code to see how we implemented that argument on our NestJS server. +
+
+
+
+

CobiGen plug-in

+
+

You will have to create a new CobiGen plug-in that connects to the server. But do not worry, you will not have to implement anything new. We have a CobiGen plug-in template available, the only changes needed are renaming files and setting some properties on the pom.xml. Please follow these steps:

+
+
+
    +
  • +

    Get the CobiGen plug-in template from here. It is a template repository (new GitHub feature), so you can click on "Use this template" as shown below:

    +
    +
    +Plugin CobiGen template +
    +
    +
  • +
  • +

    Name your repo as cobigen-name-plugin where name can be python, rust, go…​ In our case we will create a nest plug-in. It will create a repo with only one commit which contains all the needed files.

    +
  • +
  • +

    Clone your just created repo and import folder cobigen-todoplugin as a Maven project on any Java IDE, though we recommend you devonfw ;)

    +
    +
    +Import plugin +
    +
    +
  • +
  • +

    Rename all the todoplugin folders, files and class names to nameplugin. In our case nestplugin. In Eclipse you can easily rename by right clicking and then refactor → rename:

    +
  • +
+
+
+
+Rename plugin +
+
+
+ + + + + +
+ + +We recommend you to select all the checkboxes +
+
+
+
+Rename checkbox +
+
+
+
    +
  • +

    Remember to change in src/main/java and src/test/java all the package, files and class names to use your plug-in name. The final result would be:

    +
    +
    +Package structure +
    +
    +
  • +
  • +

    Now we just need to change some strings, this is needed for CobiGen to register all the different plugins (they need unique names). In class TodoPluginActivator (in our case NestPluginActivator), change all the todo to your plug-in name. See below the 3 strings that need to be changed:

    +
    +
    +Plugin activator +
    +
    +
  • +
  • +

    Finally, we will change some properties from the pom.xml of the project. These properties define the server (external process) that is going to be used:

    +
    +
      +
    1. +

      Inside pom.xml, press Ctrl + F to perform a find and replace operation. Replace all todo with your plugin name:

      +
      +
      +Pom properties +
      +
      +
    2. +
    3. +

      We are going to explain the server properties:

      +
      +
        +
      1. +

        artifactId: This is the name of your plug-in, that will be used for a future release on Maven Central.

        +
      2. +
      3. +

        plugin.name: does not need to be changed as it uses the property from the artifactId. When connecting to the server, it will send a request to localhost:5000/{plugin.name}plugin/isConnectionReady, that is why it is important to use an unique name for the plug-in.

        +
      4. +
      5. +

        server.name: This defines how the server executable (.exe) file will be named. This .exe file contains all the needed resources for deploying the server. You can use any name you want.

        +
      6. +
      7. +

        server.version: You will specify here the server version that needs to be used. The .exe file will be named as {server.name}-{server.version}.exe.

        +
      8. +
      9. +

        server.url: This will define from where to download the server. We really recommend you using NPM which is a package manager we know it works well. We explain here how to release the server on NPM. This will download the .exe file for Windows.

        +
      10. +
      11. +

        server.url.linux: Same as before, but this should download the .exe file for Linux systems. If you do not want to implement a Linux version of the plug-in, just use the same URL from Windows or MacOS.

        +
      12. +
      13. +

        server.url.macos: Same as before, but this should download the .exe file for MacOS systems. If you do not want to implement a MacOS version of the plug-in, just use the same URL from Linux or Windows.

        +
      14. +
      +
      +
    4. +
    +
    +
  • +
+
+
+
+
+
+

Testing phase

+
+
+

Now that you have finished with the implementation of the server and the creation of a new CobiGen plug-in, we are going to explain how you can test that everything works fine:

+
+
+
    +
  1. +

    Deploy the server on port 5000.

    +
  2. +
  3. +

    Run mvn clean test on the CobiGen-plugin or run the JUnit tests directly on Eclipse.

    +
    +
      +
    1. +

      If the server and the plug-in are working properly, some tests will pass and other will fail (we need to tweak them).

      +
    2. +
    3. +

      If every test fails, something is wrong in your code.

      +
    4. +
    +
    +
  4. +
  5. +

    In order to fix the failing tests, go to src/test/java. The failing tests make use of sample input files that we added in sake of example:

    +
    +
    +Pom properties +
    +
    +
  6. +
+
+
+

Replace those files (on src/test/resources/testadata/unittest/files/…​) with the correct input files for your server.

+
+
+
+
+

Releasing

+
+
+

Now that you have already tested that everything works fine, we are going to explain how to release the server and the plug-in.

+
+
+

Release the server

+
+

We are going to use NPM to store the executable of our server. Even though NPM is a package manager for JavaScript, it can be used for our purpose.

+
+
+
    +
  • +

    Get the CobiGen server template from here. It is a template repository (new GitHub feature), so you can click on "Use this template" as shown below:

    +
    +
    +Server CobiGen template +
    +
    +
  • +
  • +

    Name your repo as cobigen-name-server where name can be python, rust, go…​ In our case we will create a nest plug-in. It will create a repo with only one commit which contains all the needed files.

    +
  • +
  • +

    Clone your just created repo and go to folder cobigen-todo-server. It will just contain two files: ExternalProcessContract.yml is the OpenAPI definition which you can modify with your own server definition (this step is optional), and package.json is a file needed for NPM in order to define where to publish this package:

    +
    +
    +
    {
    +  "name": "@devonfw/cobigen-todo-server",
    +  "version": "1.0.0",
    +  "description": "Todo server to implement the input reader and merger for CobiGen",
    +  "author": "CobiGen Team",
    +  "license": "Apache"
    +}
    +
    +
    +
  • +
+
+
+

Those are the default properties. This would push a new package cobigen-todo-server on the devonfw organization, with version 1.0.0. We have no restrictions here, you can use any organization, though we always recommend devonfw.

+
+
+ + + + + +
+ + +Remember to change all the todo to your server name. +
+
+
+
    +
  • +

    Add your executable file into the cobigen-todo-server folder, just like below. As we said previously, this .exe is the server ready to be deployed.

    +
    +
    +
    cobigen-template-server/
    + |- cobigen-todo-server/
    +   |- ExternalProcessContract.yml
    +   |- package.json
    +   |- todoserver-1.0.0.exe
    +
    +
    +
  • +
  • +

    Finally, we have to publish to NPM. If you have never done it, you can follow this tutorial. Basically you need to login into NPM and run:

    +
    +
    +
    cd cobigen-todo-server/
    +npm publish --access=public
    +
    +
    +
  • +
+
+
+ + + + + +
+ + +To release Linux and MacOS versions of your plug-in, just add the suffix into the package name (e.g. @devonfw/cobigen-todo-server-linux) +
+
+
+

That’s it! You have published the first version of your server. Now you just need to modify the properties defined on the pom of your CobiGen plug-in. Please see next section for more information.

+
+
+
+

Releasing CobiGen plug-in

+
+
    +
  • +

    Change the pom.xml to define all the properties. You can see below a final example for nest:

    +
    +
    +
    ...
    +   <groupId>com.devonfw.cobigen</groupId>
    +   <artifactId>nestplugin</artifactId>
    +   <name>CobiGen - Nest Plug-in</name>
    +   <version>1.0.0</version>
    +   <packaging>jar</packaging>
    +   <description>CobiGen - nest Plug-in</description>
    +
    +   <properties>
    +      <!-- External server properties -->
    +      <plugin.name>${project.artifactId}</plugin.name>
    +      <server.name>nestserver</server.name>
    +      <server.version>1.0.0</server.version>
    +      <server.url>https\://registry.npmjs.org/@devonfw/cobigen-nest-server/-/cobigen-nest-server-${server.version}.tgz</server.url>
    +      <server.url.linux>https\://registry.npmjs.org/@devonfw/cobigen-nest-server-linux/-/cobigen-nest-server-linux-${server.version}.tgz</server.url.linux>
    +      <server.url.macos>https\://registry.npmjs.org/@devonfw/cobigen-nest-server-macos/-/cobigen-nest-server-macos-${server.version}.tgz</server.url.macos>
    +...
    +
    +
    +
  • +
  • +

    Deploy to Maven Central.

    +
  • +
+
+
+
+
+
+

Templates creation

+
+
+

After following above steps, we now have a CobiGen plug-in that connects to a server (external process) which reads your input files, returns a model and is able to merge files.

+
+
+

However, we need a key component for our plug-in to be useful. We need to define templates:

+
+
+
    +
  • +

    Fork our CobiGen main repository, from here and clone it into your PC. Stay in the master branch and import into your IDE cobigen-templates\templates-devon4j. Set the Java version of the project to 1.8 if needed.

    +
  • +
  • +

    Create a new folder on src/main/templates, this will contain all your templates. You can use any name, but please use underscores as separators. In our case, we created a folder crud_typescript_angular_client_app to generate an Angular client from a TypeORM entity (NodeJS entity).

    +
    +
    +Templates project +
    +
    +
  • +
  • +

    Inside your folder, create a templates folder. As you can see below, the folder structure of the generated files starts here (the sources). Also we need a configuration file templates.xml that should be on the same level as templates/ folder. For now, copy and paste a templates.xml file from any of the templates folder.

    +
    +
    +Templates project +
    +
    +
  • +
  • +

    Start creating your own templates. Our default templates language is Freemarker, but you can also use Velocity. Add the extension to the file (.ftl) and start developing templates! You can find useful documentation here.

    +
  • +
  • +

    After creating all the templates, you need to modify context.xml which is located on the root of src/main/templates. There you need to define a trigger, which is used for CobiGen to know when to trigger a plug-in. I recommend you to copy and paste the following trigger:

    +
    +
    +
      <trigger id="crud_typescript_angular_client_app" type="nest" templateFolder="crud_typescript_angular_client_app">
    +    <matcher type="fqn" value="([^\.]+).entity.ts">
    +      <variableAssignment type="regex" key="entityName" value="1"/>
    +      <variableAssignment type="regex" key="component" value="1"/>
    +      <variableAssignment type="constant" key="domain" value="demo"/>
    +    </matcher>
    +  </trigger>
    +
    +
    +
  • +
  • +

    Change templateFolder to your templates folder name. id you can use any, but it is recommendable to use the same as the template folder name. type is the TRIGGER_TYPE we defined above on the NestPluginActivator class. On matcher just change the value: ([^\.]+).entity.ts means that we will only accept input files that contain anyString.entity.ts. This improves usability, so that users only generate using the correct input files. You will find more info about variableAssignment here.

    +
  • +
  • +

    Finally, is time to configure templates.xml. It is needed for organizing templates into increments, please take a look into this documentation.

    +
  • +
+
+
+

Testing templates

+
+
    +
  • +

    When you have finished your templates you will like to test them. On the templates-devon4j pom.xml remove the SNAPSHOT from the version (in our case the version will be 3.1.8). Run mvn clean install -DskipTests on the project. We skip tests because you need special permissions to download artifacts from our Nexus. Remember the version that has just been installed:

    +
    +
    +Templates snapshot version +
    +
    +
  • +
+
+
+ + + + + +
+ + +We always recommend using the devonfw console, which already contains a working Maven version. +
+
+
+
    +
  • +

    Now we have your last version of the templates ready to be used. We need to use that latest version in CobiGen. We will use the CobiGen CLI that you will find in your cloned repo, at cobigen-cli/cli. Import the project into your IDE.

    +
  • +
  • +

    Inside the project, go to src/main/resources/pom.xml. This pom.xml is used on runtime in order to install all the CobiGen plug-ins and templates. Add there your latest templates version and the previously created plug-in:

    +
    +
    +CLI pom +
    +
    +
  • +
  • +

    Afterwards, run mvn clean install -DskipTests and CobiGen will get your plug-ins. Now you have three options to test templates:

    +
    +
      +
    1. +

      Using Eclipse run as:

      +
      +
        +
      1. +

        Inside Eclipse, you can run the CobiGen-CLI as a Java application. Right click class CobiGenCLI.java → run as → run configurations…​ and create a new Java application as shown below:

        +
        +
        +Create configuration +
        +
        +
      2. +
      3. +

        That will create a CobiGenCLI configuration where we can set arguments to the CLI. Let’s first begin with showing the CLI version, which should print a list of all plug-ins, including ours.

        +
        +
        +Run version +
        +
        +
        +
        +
         ...
        + name:= propertyplugin version = 2.0.0
        + name:= jsonplugin version = 2.0.0
        + name:= templates-devon4j version = 3.1.8
        + name:= nestplugin version = 1.0.0
        + ...
        +
        +
        +
      4. +
      5. +

        If that worked, now you can send any arguments to the CLI in order to generate with your templates. Please follow this guide that explains all the CLI commands.

        +
      6. +
      +
      +
    2. +
    3. +

      Modify the already present JUnit tests on the CLI project: They test the generation of templates from multiple plug-ins, you can add your own tests and input files.

      +
    4. +
    5. +

      Use the CLI jar to execute commands:

      +
      +
        +
      1. +

        The mvn clean install -DskipTests command will have created a Cobigen.jar inside your target folder (cobigen-cli/cli/target). Open the jar with any unzipper and extract to the current location class-loader-agent.jar, cobigen.bat and cg.bat:

        +
        +
        +Extract files +
        +
        +
      2. +
      3. +

        Now you can run any CobiGen CLI commands using a console. This guide explains all the CLI commands.

        +
        +
        +Run CLI +
        +
        +
      4. +
      +
      +
    6. +
    +
    +
  • +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto_devon4net.html b/docs/cobigen/1.0/howto_devon4net.html new file mode 100644 index 00000000..588636a6 --- /dev/null +++ b/docs/cobigen/1.0/howto_devon4net.html @@ -0,0 +1,443 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4net CobiGen Guide

+
+
+

Overview

+
+

In this guide we will explain how to generate a new WebAPI project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+

Getting things ready

+
+

devonfw-IDE

+
+

First, we will install the devonfw-IDE. It is a tool that will setup your IDE within minutes. Please follow the install guide here.

+
+
+
+

devon4net Templates

+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
`dotnet new -i Devon4Net.WebAPI.Template`
+
+
+
+

and then:

+
+
+
+
`dotnet new Devon4NetAPI`
+
+
+
+
+

OpenAPI File

+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+
+

Generating files

+
+

CobiGen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the CobiGen CLI tool.

+
+
+

Generating files through Eclipse

+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+CobiGen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+CobiGen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+CobiGen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+CobiGen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+

Generating files through CobiGen CLI

+
+

In order to generate the files using the CobiGen CLI it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    `cobigen generate {yourOpenAPIFile}.yml`
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+
+

Configuration

+
+

Data base

+
+

CobiGen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+CobiGen +
+
+
+
+

Run the application

+
+

After the configuration of the database, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
`dotnet run`
+
+
+
+

This will deploy our application in our localhost with the port 8082, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto_enable_composite_primary_keys_in_entity.html b/docs/cobigen/1.0/howto_enable_composite_primary_keys_in_entity.html new file mode 100644 index 00000000..e1f7a83c --- /dev/null +++ b/docs/cobigen/1.0/howto_enable_composite_primary_keys_in_entity.html @@ -0,0 +1,346 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Enable Composite Primary Keys in Entity

+
+
+

In order to enable Composite Primary Keys in entity in CobiGen, the below approach is suggested

+
+
+

The templates in CobiGen have been enhanced to support Composite primary keys while still supporting the default devonfw/Cobigen values with Long id.

+
+
+

Also, the current generation from Entity still holds good - right click from an Entity object, CobiGen → Generate will show the CobiGen wizard relative to the entity generation.

+
+
+

After generating, below example shows how composite primary keys can be enabled.

+
+
+
+
@Entity
+@Table(name = "employee")
+public class EmployeeEntity {
+	private CompositeEmployeeKey id;
+	private String name;
+	private String lastName;
+	@Override
+	@EmbeddedId
+	public CompositeEmployeeKey getId() {
+		return id;
+	}
+	@Override
+	public void setId(CompositeEmployeeKey id) {
+		this.id = id;
+	}
+	.
+	.
+	.
+
+
+
+
+
public class CompositeEmployeeKey implements Serializable {
+  private String companyId;
+  private String employeeId;
+
+
+
+

Once the generation is complete, implement PersistenceEntity<ID>.java in the EmployeeEntity and pass the composite primary key object which is CompositeEmployeeKey in this case as the parameter ID.

+
+
+
+
import com.devonfw.module.basic.common.api.entity.PersistenceEntity;
+@Entity
+@Table(name = "employee")
+public class EmployeeEntity implements PersistenceEntity<CompositeEmployeeKey> {
+	private CompositeEmployeeKey id;
+	private String name;
+	private String lastName;
+
+
+
+

Also, the modificationCounter methods needs to be implemented from the interface PersistenceEntity<ID>. The sample implementation of the modification counter can be referred below.

+
+
+
+
@Override
+  public int getModificationCounter() {
+    if (this.persistentEntity != null) {
+      // JPA implementations will update modification counter only after the transaction has been committed.
+      // Conversion will typically happen before and would result in the wrong (old) modification counter.
+      // Therefore we update the modification counter here (that has to be called before serialization takes
+      // place).
+      this.modificationCounter = this.persistentEntity.getModificationCounter();
+    }
+    return this.modificationCounter;
+  }
+  @Override
+  public void setModificationCounter(int version) {
+    this.modificationCounter = version;
+  }
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto_ionic-client-generation.html b/docs/cobigen/1.0/howto_ionic-client-generation.html new file mode 100644 index 00000000..089478eb --- /dev/null +++ b/docs/cobigen/1.0/howto_ionic-client-generation.html @@ -0,0 +1,520 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Ionic client generation

+
+
+

We are going to show you how to generate a CRUD Ionic application from an ETO +using CobiGen.

+
+
+ + + + + +
+ + +This is a short introduction to the Ionic generation. For a deeper tutorial including the generation of the backend, we strongly recommend you to follow this document. +
+
+
+

Prerequisites

+
+
+

Before starting, make sure you already have in your computer:

+
+
+
    +
  • +

    Ionic: by following the steps defined on that page. +It includes installing:

    +
    +
      +
    • +

      NodeJS: We have to use "NPM" for downloading packages.

      +
    • +
    • +

      Ionic CLI.

      +
    • +
    +
    +
  • +
  • +

    Capacitor: Necessary to access to native device features.

    +
  • +
+
+
+

If CobiGen_Templates are not already downloaded, follow the next steps:

+
+
+
    +
  • +

    Right click on any file of your workspace CobiGen > Update Templates and now you are able to start the generation.

    +
  • +
  • +

    If you want to adapt them, click Adapt Templates and you should have the CobiGen_Templates as a new project in Eclipse’s workspace.

    +
  • +
+
+
+

After following those steps correctly, you should have the latest version of the templates ready to use.

+
+
+
+
+

Generation

+
+
+

We are going to generate the CRUD into a sample application that we have developed for +testing this functionality. It is present on your workspaces/examples folder (devon4ng-ionic-application-template). If you do not see it, you can clone or download it from here.

+
+
+

After having that sample app, please create an devon4j project and then start implementing the ETO: You will find an example here.

+
+
+

As you can see, TableEto contains 3 attributes: 2 of them are Long and the third one TableState is an enum that you will find +here. +The Ionic generation works fine for any Java primitive attribute (Strings, floats, chars, boolean…​) and enums. However, if you want to use your own objects, you should +override the toString() method, as explained here.

+
+
+

The attributes explained above will be used for generating a page that shows a list. Each item of that list +will show the values of those attributes.

+
+
+

For generating the files:

+
+
+
    +
  • +

    Right click your ETO file and click on CobiGen > Generate as shown on the figure below.

    +
  • +
+
+
+
+Eclipse CobiGen generation +
+
+
+
    +
  • +

    Select the Ionic increments for generating as shown below. Increments group a set of templates for generating +different projects.

    +
    +
      +
    1. +

      Ionic List used for generating the page containing the list.

      +
    2. +
    3. +

      Ionic devon4ng environments is for stating the server path.

      +
    4. +
    5. +

      Ionic i18n used for generating the different language translations for the `translationService` (currently English and Spanish).

      +
    6. +
    7. +

      Ionic routing adds an app-routing.module.ts file to allow navigation similar to the one available in Angular.

      +
    8. +
    9. +

      Ionic theme generates the variables.scss file which contains variables to style the application.

      +
    10. +
    +
    +
  • +
+
+
+
+CobiGen Ionic Wizard +
+
+
+ + + + + +
+ + +By default, the generated files will be placed inside "devon4ng-ionic-application-template", next to the root of your project’s folder. +See the image below to know where they are generated. For changing the generation path and the name of the application go to CobiGen_Templates/crud_ionic_client_app/cobigen.properties. +
+
+
+
+Generation path +
+
+
+

Now that we have generated the files, lets start testing them:

+
+
+
    +
  • +

    First change the SERVER_URL of your application. For doing that, modify src/environments/environments.ts, also modify src/environments/environments.android.ts (android) and src/environments/environments.prod.ts (production) if you want to test in different environments.

    +
  • +
  • +

    Check that there are no duplicated imports. Sometimes there are duplicated imports in src/app/app.module.ts. +This happens because the merger of CobiGen prefers to duplicate rather than to delete.

    +
  • +
  • +

    Run npm install to install all the required dependencies.

    +
  • +
  • +

    Run `ionic serve on your console.

    +
  • +
+
+
+

After following all these steps your application should start. However, remember that you will need your server to be running for access to the list page.

+
+
+
+
+

Running it on Android

+
+
+

To run the application in an android emulated device, it is necessary to have Android Studio and Android SDK. After its installation, the following commands have to be run on your console:

+
+
+
    +
  • +

    npx cap init "name-for-the-app (between quotes)" "id-for-the-app (between quotes)"

    +
  • +
  • +

    ionic build --configuration=android. To use this command, you must add an android build configuration at angular.json

    +
  • +
+
+
+
+
    "build": {
+      ...
+      "configurations": {
+        ...
+        "android": {
+          "fileReplacements": [
+            {
+              "replace": "src/environments/environment.ts",
+              "with": "src/environments/environment.android.ts"
+            }
+          ]
+        },
+      }
+    }
+
+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+

The last steps are done in Android studio: make the project, make the app, build and APK and run in a device.

+
+
+
+Click on make project +
+
+
+
+click on make app +
+
+
+
+click on build APK +
+
+
+
+click on running device +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/howto_update_CobiGen.html b/docs/cobigen/1.0/howto_update_CobiGen.html new file mode 100644 index 00000000..b1420886 --- /dev/null +++ b/docs/cobigen/1.0/howto_update_CobiGen.html @@ -0,0 +1,366 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==How to update CobiGen

+
+
+

In order to update CobiGen from our devonfw distribution, we have two options:

+
+
+
    +
  • +

    Open Eclipse, click on HelpCheck for updates

    +
  • +
+
+
+
+Check updates +
+
+
+
    +
  • +

    Select all the CobiGen plugins listed and click on Next.

    +
  • +
+
+
+
+All the updates +
+
+
+

If this option is not working properly, then you can try the second option:

+
+
+
    +
  • +

    Open Eclipse, click on HelpAbout Eclipse IDE:

    +
  • +
+
+
+
+About Eclipse +
+
+
+
    +
  • +

    Click on Installation details:

    +
  • +
+
+
+
+Installation details +
+
+
+
    +
  • +

    Select all the CobiGen plugins and click on Update:

    +
  • +
+
+
+
+All updates details +
+
+
+

After the update process finishes, remember to restart Eclipse.

+
+
+

Updating templates:

+
+
+

To update your CobiGen templates to the latest version, you just need to do one step:

+
+
+
    +
  • +

    Right click any file on your package explorer, click on CobiGenUpdate templates, then click on download:

    +
  • +
+
+
+
+Update templates +
+
+
+

Now you will have the latest templates ready!

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/index.html b/docs/cobigen/1.0/index.html new file mode 100644 index 00000000..ec347c64 --- /dev/null +++ b/docs/cobigen/1.0/index.html @@ -0,0 +1,401 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

CobiGen - Code-based incremental Generator

+
+ +
+
+
+

Overview

+
+
+

CobiGen is a generic incremental generator for end to end code generation tasks, mostly used in Java projects. +Due to a template-based approach, CobiGen generates any set of text-based documents and document fragments.

+
+
+

Input (currently):

+
+
+
    +
  • +

    Java classes

    +
  • +
  • +

    XML-based files

    +
  • +
  • +

    OpenAPI documents

    +
  • +
  • +

    Possibly more inputs like WSDL, which is currently not implemented.

    +
  • +
+
+
+

Output:

+
+
+
    +
  • +

    any text-based document or document fragments specified by templates

    +
  • +
+
+
+
+
+

Architecture

+
+
+

CobiGen is build as an extensible framework for incremental code generation. It provides extension points for new input readers which allow reading new input types and converting them to an internally processed model. The model is used to process templates of different kinds to generate patches. The template processing will be done by different template engines. There is an extension point for template engines to support multiple ones as well. Finally, the patch will be structurally merged into potentially already existing code. To allow structural merge on different programming languages, the extension point for structural mergers has been introduced. Here you will see an overview of the currently available extension points and plug-ins:

+
+
+
+
+

Features and Characteristics

+
+
+
    +
  • +

    Generate fresh files across all the layers of a application - ready to run.

    +
  • +
  • +

    Add on to existing files merging code into it. E.g. generate new methods into existing java classes or adding nodes to an XML file. Merging of contents into existing files will be done using structural merge mechanisms.

    +
  • +
  • +

    Structural merge mechanisms are currently implemented for Java, XML, Java Property Syntax, JSON, Basic HTML, Text Append, TypeScript.

    +
  • +
  • +

    Conflicts can be resolved individually but automatically by former configuration for each template.

    +
  • +
  • +

    CobiGen provides an Eclipse integration as well as a Maven Integration.

    +
  • +
  • +

    CobiGen comes with an extensive documentation for users and developers.

    +
  • +
  • +

    Templates can be fully tailored to project needs - this is considered as a simple task.

    +
  • +
+
+
+
+
+

Selection of current and past CobiGen applications

+
+
+

General applications:

+
+
+
    +
  • +

    Generation of a Java CRUD application based on devonfw architecture including all software-layers on the server plus code for JS-clients (Angular). You can find details here.

    +
  • +
  • +

    Generation of a Java CRUD application according to the Register Factory architecture. Persistence entities are the input for generation.

    +
  • +
  • +

    Generation of builder classes for generating test data for JUnit-Tests. Input are the persistence entities.

    +
  • +
  • +

    Generation of a EXT JS 6 client with full CRUD operations connected a devon4j server.

    +
  • +
  • +

    Generation of a Angular 6 client with full CRUD operations connected a devon4j server.

    +
  • +
+
+
+

Project-specific applications in the past:

+
+
+
    +
  • +

    Generation of an additional Java type hierarchy on top of existing Java classes in combination with additional methods to be integrated in the modified classes. Hibernate entities were considered as input as well as output of the generation. The rational in this case, was to generate an additional business object hierarchy on top of an existing data model for efficient business processing.

    +
  • +
  • +

    Generation of hash- and equals-methods as well as copy constructors depending on the field types of the input Java class. Furthermore, CobiGen is able to re-generate these methods/constructors triggered by the user, i.e, when fields have been changed.

    +
  • +
  • +

    Extraction of JavaDoc of test classes and their methods for generating a csv test documentation. This test documentation has been further processed manually in Excel to provide a good overview about the currently available tests in the software system, which enables further human analysis.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/master-cobigen.html b/docs/cobigen/1.0/master-cobigen.html new file mode 100644 index 00000000..0f0a2b5a --- /dev/null +++ b/docs/cobigen/1.0/master-cobigen.html @@ -0,0 +1,7351 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==CobiGen — Code-based incremental Generator +:title-logo-image: images/logo/cobigen_logo.png

+
+
+

Document Description

+
+
+

This document contains the documentation of the CobiGen core module as well as all CobiGen plug-ins and the CobiGen eclipse integration.

+
+
+ + + + + +
+ + +
+

DISCLAIMER: All CobiGen plugins are compatible with the latest release of Devonfw unless otherwise denoted.

+
+
+
+
+

Current versions:

+
+
+
    +
  • +

    CobiGen - Eclipse Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - Maven Build Plug-in v7.1.0

    +
  • +
  • +

    CobiGen CLI v1.2.0

    +
  • +
+
+
+
+
    +
  • +

    CobiGen v7.1.0

    +
  • +
  • +

    CobiGen - Java Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - XML Plug-in v7.0.0

    +
  • +
  • +

    CobiGen - TypeScript Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - Property Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - Text Merger v7.1.1

    +
  • +
  • +

    CobiGen - JSON Plug-in v7.0.0

    +
  • +
  • +

    CobiGen - HTML Plug-in v7.0.0

    +
  • +
  • +

    CobiGen - Open API Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - FreeMarker Template Engine v7.0.0

    +
  • +
  • +

    CobiGen - Velocity Template Engine v7.0.0

    +
  • +
+
+
+

Authors:

+
+
+
+
* Malte Brunnlieb
+* Jaime Diaz Gonzalez
+* Steffen Holzer
+* Ruben Diaz Martinez
+* Joerg Hohwiller
+* Fabian Kreis
+* Lukas Goerlach
+* Krati Shah
+* Christian Richter
+* Erik Grüner
+* Mike Schumacher
+* Marco Rose
+* Mohamed Ghanmi
+
+
+
+

==Guide to the Reader

+
+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If this is your first contact with CobiGen, you will be interested in the general purpose of CobiGen, in the licensing of CobiGen, as well as in the Shared Service provided for CobiGen. Additionally, there are some general use cases, which are currently implemented and maintained to be used out of the box.

    +
  • +
  • +

    As a user of the CobiGen Eclipse integration, you should focus on the Installation and Usage chapters to get a good introduction about how to use CobiGen in eclipse.

    +
  • +
  • +

    As a user of the Maven integration, you should focus on the Maven configuration chapter, which guides you through the integration of CobiGen into your build configuration.

    +
  • +
  • +

    If you like to adapt the configuration of CobiGen, you have to step deeper into the configuration guide as well as into the plug-in configuration extensions for the Java Plug-in, XML-Plugin, Java Property Plug-in, as well as for the Text-Merger Plug-in.

    +
  • +
  • +

    Finally, if you want to develop your own templates, you will be thankful for helpful links in addition to the plug-ins documentation as referenced in the previous point.

    +
  • +
+
+
+

Unresolved include directive in modules/ROOT/pages/master-cobigen.adoc - include::Home.adoc[]

+
+ +
+

==General use cases

+
+
+

In addition to the selection of CobiGen applications introduced before, this chapter provides a more detailed overview about the currently implemented and maintained general use cases. These can be used by any project following a supported reference architecture as e.g. the devonfw or Register Factory.

+
+
+
devon4j
+
+

With our templates for devon4j, you can generate a whole CRUD application from a single Entity class. You save the effort for creating, DAOs, Transfer Objects, simple CRUD use cases with REST services and even the client application can be generated.

+
+
+
CRUD server application for devon4j
+
+

For the server, the required files for all architectural layers (Data access, logic, and service layer) can be created based on your Entity class. After the generation, you have CRUD functionality for the entity from bottom to top which can be accessed via a RESTful web service. Details are provided in the devonfw wiki.

+
+
+
+
CRUD client application for devon4ng
+
+

Based on the REST services on the server, you can also generate an Angular client based on devon4ng. With the help of Node.js, you have a working client application for displaying your entities within minutes!

+
+
+
+
Test data Builder for devon4j
+
+

Generating a builder pattern for POJOs to easily create test data in your tests. CobiGen is not only able to generate a plain builder pattern but rather builder, which follow a specific concept to minimize test data generation efforts in your unit tests. The following Person class as an example:

+
+
+
Person class
+
+
public class Person {
+
+    private String firstname;
+    private String lastname;
+    private int birthyear;
+    @NotNull
+    private Address address;
+
+    @NotNull
+    public String getFirstname() {
+        return this.firstname;
+    }
+
+    // additional default setter and getter
+}
+
+
+
+

It is a simple POJO with a validation annotation, to indicate, that firstname should never be null. Creating this object in a test would imply to call every setter, which is kind of nasty. Therefore, the Builder Pattern has been introduced for quite a long time in software engineering, allowing to easily create POJOs with a fluent API. See below.

+
+
+
Builder pattern example
+
+
Person person = new PersonBuilder()
+                .firstname("Heinz")
+                .lastname("Erhardt")
+                .birthyear(1909)
+                .address(
+                    new AddressBuilder().postcode("22222")
+                        .city("Hamburg").street("Luebecker Str. 123")
+                        .createNew())
+                .addChild(
+                    new PersonBuilder()[...].createNew()).createNew();
+
+
+
+

The Builder API generated by CobiGen allows you to set any setter accessible field of a POJO in a fluent way. But in addition lets assume a test, which should check the birth year as precondition for any business operation. So specifying all other fields of Person, especially firstname as it is mandatory to enter business code, would not make sense. The test behavior should just depend on the specification of the birth year and on no other data. So we would like to just provide this data to the test.

+
+
+

The Builder classes generated by CobiGen try to tackle this inconvenience by providing the ability to declare default values for any mandatory field due to validation or database constraints.

+
+
+
Builder Outline
+
+
public class PersonBuilder {
+
+    private void fillMandatoryFields() {
+        firstname("lasdjfaöskdlfja");
+        address(new AddressBuilder().createNew());
+    };
+    private void fillMandatoryFields_custom() {...};
+
+    public PersonBuilder firstname(String value);
+    public PersonBuilder lastname(String value);
+    ...
+
+    public Person createNew();
+    public Person persist(EntityManager em);
+    public List<Person> persistAndDuplicate(EntityManager em, int count);
+}
+
+
+
+

Looking at the plotted builder API generated by CobiGen, you will find two private methods. The method fillMandatoryFields will be generated by CobiGen and regenerated every time CobiGen generation will be triggered for the Person class. This method will set every automatically detected field with not null constraints to a default value. However, by implementing fillMandatoryFields_custom on your own, you can reset these values or even specify more default values for any other field of the object. Thus, running new PersonBuilder().birthyear(1909).createNew(); will create a valid object of Person, which is already pre-filled such that it does not influence the test execution besides the fact that it circumvents database and validation issues.

+
+
+

This even holds for complex data structures as indicated by address(new AddressBuilder().createNew());. Due to the use of the AddressBuilder for setting the default value for the field address, also the default values for Address will be set automatically.

+
+
+

Finally, the builder API provides different methods to create new objects.

+
+
+
    +
  • +

    createNew() just creates a new object from the builder specification and returns it.

    +
  • +
  • +

    persist(EntityManager) will create a new object from the builder specification and persists it to the database.

    +
  • +
  • +

    persistAndDuplicate(EntityManager, int) will create the given amount of objects form the builder specification and persists all of these. After the initial generation of each builder, you might want to adapt the method body as you will most probably not be able to persist more than one object with the same field assignments to the database due to unique constraints. Thus, please see the generated comment in the method to adapt unique fields accordingly before persisting to the database.

    +
  • +
+
+
+Custom Builder for Business Needs +
+

CobiGen just generates basic builder for any POJO. However, for project needs you probably would like to have even more complex builders, which enable the easy generation of more complex test data which are encoded in a large object hierarchy. Therefore, the generated builders can just be seen as a tool to achieve this. You can define your own business driven builders in the same way as the generated builders, but explicitly focusing on your business needs. Just take this example as a demonstration of that idea:

+
+
+
+
  University uni = new ComplexUniversityBuilder()
+    .withStudents(200)
+    .withProfessors(4)
+    .withExternalStudent()
+    .createNew();
+
+
+
+

E.g. the method withExternalStudent() might create a person, which is a student and is flagged to be an external student. Basing this implementation on the generated builders will even assure that you would benefit from any default values you have set before. In addition, you can even imagine any more complex builder methods setting values driven your reusable testing needs based on the specific business knowledge.

+
+
+
+
+
+
Register Factory
+
+
CRUD server application
+
+

Generates a CRUD application with persistence entities as inputs. This includes DAOs, TOs, use cases, as well as a CRUD JSF user interface if needed.

+
+
+
+
Test data Builder
+ +
+
+
Test documentation
+
+

Generate test documentation from test classes. The input are the doclet tags of several test classes, which e.g. can specify a description, a cross-reference, or a test target description. The result currently is a csv file, which lists all tests with the corresponding meta-information. Afterwards, this file might be styled and passed to the customer if needed and it will be up-to-date every time!

+
+
+
+
+
+
+

CobiGen

+
+ +
+

==Configuration

+
+
+

CobiGen is maintaining a home directory further referenced in this documentation as $cghome, which is used to maintain temporary or transient data. The home folder is determined with the following location fall-back:

+
+
+
    +
  1. +

    System environment variable COBIGEN_HOME (e.g. C:\project\ide\conf\cobigen-home)

    +
  2. +
  3. +

    .cobigen directory in OS user home (e.g. ~/.cobigen)

    +
  4. +
+
+
+

The actual configuration of CobiGen is maintained by a single folder or jar. The location can be configured with respect to the implemented configuration fall-back mechanism. CobiGen will search for the location of the configuration in the following order:

+
+
+
    +
  1. +

    A configuration jar or directory, which is passed to CobiGen by the Maven or Eclipse integration or any other program using the CobiGen programming interface: +1.1. the Maven integration allows to configure a jar dependency to be included in the currently running classpath (of interest for maven configuration +1.2. the Eclipse integration allows to specify a CobiGen_Templates project in the eclipse workspace

    +
  2. +
  3. +

    The file $cghome/.cobigen exists and the property templates is set to a valid configuration (e.g. templates=C:\project\ide\conf\templates or templates=C:\project\ide\conf\templates.jar) Hint: Check for log entry like Value of property templates in $cghome/.cobigen is invalid to identify an invalid configuration which is not taken up as expected

    +
  4. +
  5. +

    The folder $cghome/templates/CobiGen_Templates exists

    +
  6. +
  7. +

    The lexicographical sorted first configuration jar of the following path pattern $cghome/templates/templates-([^-]+)-(\\d+\\.?)+.jar if exists (e.g. templates-devon4j-2020.04.001)

    +
  8. +
  9. +

    CobiGen will automatically download the latest jar configuration from maven central with groupId com.devonfw.cobigen and artifactId templates-devon4j and take it like described in 4.

    +
  10. +
+
+
+

Within the configuration jar or directory you will find the following structure:

+
+
+
+
CobiGen_Templates
+ |- templateFolder1
+    |- templates.xml
+ |- templateFolder2
+    |- templates.xml
+ |- context.xml
+
+
+
+

Find some examples here.

+
+
+
Context Configuration
+
+

The context configuration (context.xml) always has the following root structure:

+
+
+
Context Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        ...
+    </triggers>
+</contextConfiguration>
+
+
+
+

The context configuration has a version attribute, which should match the XSD version the context configuration is an instance of. It should not state the version of the currently released version of CobiGen. This attribute should be maintained by the context configuration developers. If configured correctly, it will provide a better feedback for the user and thus higher user experience. Currently there is only the version v1.0. For further version there will be a changelog later on.

+
+
+
Trigger Node
+
+

As children of the <triggers> node you can define different triggers. By defining a <trigger> you declare a mapping between special inputs and a templateFolder, which contains all templates, which are worth to be generated with the given input.

+
+
+
trigger configuration
+
+
<trigger id="..." type="..." templateFolder="..." inputCharset="UTF-8" >
+    ...
+</trigger>
+
+
+
+
    +
  • +

    The attribute id should be unique within an context configuration. It is necessary for efficient internal processing.

    +
  • +
  • +

    The attribute type declares a specific trigger interpreter, which might be provided by additional plug-ins. A trigger interpreter has to provide an input reader, which reads specific inputs and creates a template object model out of it to be processed by the FreeMarker template engine later on. Have a look at the plug-in’s documentation of your interest and see, which trigger types and thus inputs are currently supported.

    +
  • +
  • +

    The attribute templateFolder declares the relative path to the template folder, which will be used if the trigger gets activated.

    +
  • +
  • +

    The attribute inputCharset (optional) determines the charset to be used for reading any input file.

    +
  • +
+
+
+
+
Matcher Node
+
+

A trigger will be activated if its matchers hold the following formula:

+
+
+

!(NOT || …​ || NOT) && AND && …​ && AND && (OR || …​ || OR)

+
+
+

Whereas NOT/AND/OR describes the accumulationType of a matcher (see below) and e.g. NOT means 'a matcher with accumulationType NOT matches a given input'. Thus additionally to an input reader, a trigger interpreter has to define at least one set of matchers, which are satisfiable, to be fully functional. A <matcher> node declares a specific characteristics a valid input should have.

+
+
+
Matcher Configuration
+
+
<matcher type="..." value="..." accumulationType="...">
+    ...
+</matcher>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute accumulationType (optional) specifies how the matcher will influence the trigger activation. Valid values are:

    +
    +
      +
    • +

      OR (default): if any matcher of accumulation type OR matches, the trigger will be activated as long as there are no further matchers with different accumulation types

      +
    • +
    • +

      AND: if any matcher with AND accumulation type does not match, the trigger will not be activated

      +
    • +
    • +

      NOT: if any matcher with NOT accumulation type matches, the trigger will not be activated

      +
    • +
    +
    +
  • +
+
+
+
+
Variable Assignment Node
+
+

Finally, a <matcher> node can have multiple <variableAssignment> nodes as children. Variable assignments allow to parametrize the generation by additional values, which will be added to the object model for template processing. The variables declared using variable assignments, will be made accessible in the templates.xml as well in the object model for template processing via the namespace variables.*.

+
+
+
Complete Configuration Pattern
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="...">
+            <matcher type="..." value="...">
+                <variableAssignment type="..." key="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares the type of variable assignment to be processed by the trigger interpreter providing plug-in. This attribute enables variable assignments with different dynamic value resolutions.

    +
  • +
  • +

    The attribute key declares the namespace under which the resolved value will be accessible later on.

    +
  • +
  • +

    The attribute value might declare a constant value to be assigned or any hint for value resolution done by the trigger interpreter providing plug-in. For instance, if type is regex, then on value you will assign the matched group number by the regex (1, 2, 3…​)

    +
  • +
+
+
+
+
Container Matcher Node
+
+

The <containerMatcher> node is an additional matcher for matching containers of multiple input objects. +Such a container might be a package, which encloses multiple types or---more generic---a model, which encloses multiple elements. A container matcher can be declared side by side with other matchers:

+
+
+
ContainerMatcher Declaration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="..." >
+            <containerMatcher type="..." value="..." retrieveObjectsRecursively="..." />
+            <matcher type="..." value="...">
+                <variableAssignment type="..." variable="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute retrieveObjectsRecursively (optional boolean) states, whether the children of the input should be retrieved recursively to find matching inputs for generation.

    +
  • +
+
+
+

The semantics of a container matchers are the following:

+
+
+
    +
  • +

    A <containerMatcher> does not declare any <variableAssignment> nodes

    +
  • +
  • +

    A <containerMatcher> matches an input if and only if one of its enclosed elements satisfies a set of <matcher> nodes of the same <trigger>

    +
  • +
  • +

    Inputs, which match a <containerMatcher> will cause a generation for each enclosed element

    +
  • +
+
+
+
+
+
Templates Configuration
+
+

The template configuration (templates.xml) specifies, which templates exist and under which circumstances it will be generated. There are two possible configuration styles:

+
+
+
    +
  1. +

    Configure the template meta-data for each template file by template nodes

    +
  2. +
  3. +

    (since cobigen-core-v1.2.0): Configure templateScan nodes to automatically retrieve a default configuration for all files within a configured folder and possibly modify the automatically configured templates using templateExtension nodes

    +
  4. +
+
+
+

To get an intuition of the idea, the following will initially describe the first (more extensive) configuration style. Such an configuration root structure looks as follows:

+
+
+
Extensive Templates Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.0" templateEngine="FreeMarker">
+    <templates>
+            ...
+    </templates>
+    <increments>
+            ...
+    </increments>
+</templatesConfiguration>
+
+
+
+

The root node <templatesConfiguration> specifies two attributes. The attribute version provides further usability support and will be handled analogous to the version attribute of the context configuration. The optional attribute templateEngine specifies the template engine to be used for processing the templates (since `cobigen-core-4.0.0`). By default it is set to FreeMarker.

+
+
+

The node <templatesConfiguration> allows two different grouping nodes as children. First, there is the <templates> node, which groups all declarations of templates. Second, there is the <increments> node, which groups all declarations about increments.

+
+
+
Template Node
+
+

The <templates> node groups multiple <template> declarations, which enables further generation. Each template file should be registered at least once as a template to be considered.

+
+
+
Example Template Configuration
+
+
<templates>
+    <template name="..." destinationPath="..." templateFile="..." mergeStrategy="..." targetCharset="..." />
+    ...
+</templates>
+
+
+
+

A template declaration consist of multiple information:

+
+
+
    +
  • +

    The attribute name specifies an unique ID within the templates configuration, which will later be reused in the increment definitions.

    +
  • +
  • +

    The attribute destinationPath specifies the destination path the template will be generated to. It is possible to use all variables defined by variable assignments within the path declaration using the FreeMarker syntax ${variables.*}. While resolving the variable expressions, each dot within the value will be automatically replaced by a slash. This behavior is accounted for by the transformations of Java packages to paths as CobiGen has first been developed in the context of the Java world. Furthermore, the destination path variable resolution provides the following additional built-in operators analogue to the FreeMarker syntax:

    +
    +
      +
    • +

      ?cap_first analogue to FreeMarker

      +
    • +
    • +

      ?uncap_first analogue to FreeMarker

      +
    • +
    • +

      ?lower_case analogue to FreeMarker

      +
    • +
    • +

      ?upper_case analogue to FreeMarker

      +
    • +
    • +

      ?replace(regex, replacement) - Replaces all occurrences of the regular expression regex in the variable’s value with the given replacement string. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removeSuffix(suffix) - Removes the given suffix in the variable’s value iff the variable’s value ends with the given suffix. Otherwise nothing will happen. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removePrefix(prefix) - Analogue to ?removeSuffix but removes the prefix of the variable’s value. (since cobigen-core v1.1.0)

      +
    • +
    +
    +
  • +
  • +

    The attribute templateFile describes the relative path dependent on the template folder specified in the trigger to the template file to be generated.

    +
  • +
  • +

    The attribute mergeStrategy (optional) can be optionally specified and declares the type of merge mechanism to be used, when the destinationPath points to an already existing file. CobiGen by itself just comes with a mergeStrategy override, which enforces file regeneration in total. Additional available merge strategies have to be obtained from the different plug-in’s documentations (see here for java, XML, properties, and text). Default: not set (means not mergeable)

    +
  • +
  • +

    The attribute targetCharset (optional) can be optionally specified and declares the encoding with which the contents will be written into the destination file. This also includes reading an existing file at the destination path for merging its contents with the newly generated ones. Default: UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external template (templates defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Template Scan Node
+
+

(since cobigen-core-v1.2.0)

+
+
+

The second configuration style for template meta-data is driven by initially scanning all available templates and automatically configure them with a default set of meta-data. A scanning configuration might look like this:

+
+
+
Example of Template-scan configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.2">
+    <templateScans>
+        <templateScan templatePath="templates" templateNamePrefix="prefix_" destinationPath="src/main/java"/>
+    </templateScans>
+</templatesConfiguration>
+
+
+
+

You can specify multiple <templateScan …​> nodes for different templatePaths and different templateNamePrefixes.

+
+
+
    +
  • +

    The name can be specified to later on reference the templates found by a template-scan within an increment. (since cobigen-core-v2.1.)

    +
  • +
  • +

    The templatePath specifies the relative path from the templates.xml to the root folder from which the template scan should be performed.

    +
  • +
  • +

    The templateNamePrefix (optional) defines a common id prefix, which will be added to all found and automatically configured templates.

    +
  • +
  • +

    The destinationPath defines the root folder all found templates should be generated to, whereas the root folder will be a prefix for all found and automatically configured templates.

    +
  • +
+
+
+

A templateScan will result in the following default configuration of templates. For each file found, new template will be created virtually with the following default values:

+
+
+
    +
  • +

    id: file name without .ftl extension prefixed by templateNamePrefix from template-scan

    +
  • +
  • +

    destinationPath: relative file path of the file found with the prefix defined by destinationPath from template-scan. Furthermore,

    +
    +
      +
    • +

      it is possible to use the syntax for accessing and modifying variables as described for the attribute destinationPath of the template node, besides the only difference, that due to file system restrictions you have to replace all ?-signs (for built-ins) with #-signs.

      +
    • +
    • +

      the files to be scanned, should provide their final file extension by the following file naming convention: <filename>.<extension>.ftl Thus the file extension .ftl will be removed after generation.

      +
    • +
    +
    +
  • +
  • +

    templateFile: relative path to the file found

    +
  • +
  • +

    mergeStrategy: (optional) not set means not mergeable

    +
  • +
  • +

    targetCharset: (optional) defaults to UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external templateScan (templateScans defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Template Extension Node
+
+

(since cobigen-core-v1.2.0)

+
+
+

Additionally to the templateScan declaration it is easily possible to rewrite specific attributes for any scanned and automatically configured template.

+
+
+
Example Configuration of a TemplateExtension
+
+
<templates>
+    <templateExtension ref="prefix_FooClass.java" mergeStrategy="javamerge" />
+</templates>
+
+<templateScans>
+    <templateScan templatePath="foo" templateNamePrefix="prefix_" destinationPath="src/main/java/foo"/>
+</templateScans>
+
+
+
+

Lets assume, that the above example declares a template-scan for the folder foo, which contains a file FooClass.java.ftl in any folder depth. Thus the template scan will automatically create a virtual template declaration with id=prefix_FooClass.java and further default configuration.

+
+
+

Using the templateExtension declaration above will reference the scanned template by the attribute ref and overrides the mergeStrategy of the automatically configured template by the value javamerge. Thus we are able to minimize the needed templates configuration.

+
+
+

(Since version 4.1.0) It is possible to reference external templateExtension (templateExtensions defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Increment Node
+
+

The <increments> node groups multiple <increment> nodes, which can be seen as a collection of templates to be generated. An increment will be defined by a unique id and a human readable description.

+
+
+
+
<increments>
+    <increment id="..." description="...">
+        <incrementRef ref="..." />
+        <templateRef ref="..." />
+        <templateScanRef ref="..." />
+    </increment>
+</increments>
+
+
+
+

An increment might contain multiple increments and/or templates, which will be referenced using <incrementRef …​>, <templateRef …​>, resp. <templateScanRef …​> nodes. These nodes only declare the attribute ref, which will reference an increment, a template, or a template-scan by its id or name.

+
+
+

(Since version 4.1.0) An special case of <incrementRef …​> is the external incrementsRef. By default, <incrementRef …​> are used to reference increments defined in the same templates.xml file. So for example, we could have:

+
+
+
+
<increments>
+    <increment id="incA" description="...">
+        <incrementRef ref="incB" />
+    </increment>
+    <increment id="incB" description="...">
+        <templateRef .... />
+        <templateScan .... />
+    </increment>
+</increments>
+
+
+
+

However, if we want to reference an increment that it is not defined inside our templates.xml (an increment defined for another trigger), then we can use external incrementRef as shown below:

+
+
+
+
<increment name="..." description="...">
+    <incrementRef ref="trigger_id::increment_id"/>
+</increment>
+
+
+
+

The ref string is split using as delimiter ::. The first part of the string, is the trigger_id to reference. That trigger contains an increment_id. Currently, this functionality only works when both templates use the same kind of input file.

+
+
+
+
+
Java Template Logic
+
+

since cobigen-core-3.0.0 which is included in the Eclipse and Maven Plugin since version 2.0.0 +In addition, it is possible to implement more complex template logic by custom Java code. To enable this feature, you can simply import the the CobiGen_Templates by clicking on Adapt Templates, turn it into a simple maven project (if it is not already) and implement any Java logic in the common maven layout (e.g. in the source folder src/main/java). Each Java class will be instantiated by CobiGen for each generation process. Thus, you can even store any state within a Java class instance during generation. However, there is currently no guarantee according to the template processing order.

+
+
+

As a consequence, you have to implement your Java classes with a public default (non-parameter) constructor to be used by any template. Methods of the implemented Java classes can be called within templates by the simple standard FreeMarker expression for calling Bean methods: SimpleType.methodName(param1). Until now, CobiGen will shadow multiple types with the same simple name non-deterministically. So please prevent yourself from that situation.

+
+
+

Finally, if you would like to do some reflection within your Java code accessing any type of the template project or any type referenced by the input, you should load classes by making use of the classloader of the util classes. CobiGen will take care of the correct classloader building including the classpath of the input source as well as of the classpath of the template project. If you use any other classloader or build it by your own, there will be no guarantee, that generation succeeds.

+
+
+
+
Template Properties
+
+

since cobigen-core-4.0.0` +Using a configuration with `template scan, you can make use of properties in templates specified in property files named cobigen.properties next to the templates. The property files are specified as Java property files. Property files can be nested in sub-folders. Properties will be resolved including property shading. Properties defined nearest to the template to be generated will take precedence. +In addition, a cobigen.properties file can be specified in the target folder root (in eclipse plugin, this is equal to the source project root). These properties take precedence over template properties specified in the template folder.

+
+
+ + + + + +
+ + +It is not allowed to override context variables in cobigen.properties specifications as we have not found any interesting use case. This is most probably an error of the template designer, CobiGen will raise an error in this case. +
+
+
+
Multi module support or template target path redirects
+
+

since cobigen-core-4.0.0` +One special property you can specify in the template properties is the property `relocate. It will cause the current folder and its sub-folders to be relocated at destination path resolution time. Take the following example:

+
+
+
+
folder
+  - sub1
+    Template.java.ftl
+    cobigen.properties
+
+
+
+

Let the cobigen.properties file contain the line relocate=../sub2/${cwd}. Given that, the relative destination path of Template.java.ftl will be resolved to folder/sub2/Template.java. Compare template scan configuration for more information about basic path resolution. The relocate property specifies a relative path from the location of the cobigen.properties. The ${cwd} placeholder will contain the remaining relative path from the cobigen.properties location to the template file. In this basic example it just contains Template.java.ftl, but it may even be any relative path including sub-folders of sub1 and its templates. +Given the relocate feature, you can even step out of the root path, which in general is the project/maven module the input is located in. This enables template designers to even address, e.g., maven modules located next to the module the input is coming from.

+
+
+
+
+
Basic Template Model
+
+

In addition to what is served by the different model builders of the different plug-ins, CobiGen provides a minimal model based on context variables as well as CobiGen properties. The following model is independent of the input format and will be served as a template model all the time:

+
+
+ +
+
+
+
Plugin Mechanism
+
+

Since cobigen-core 4.1.0, we changed the plug-in discovery mechanism. So far it was necessary to register new plugins programmatically, which introduces the need to let every tool integration, i.e. for eclipse or maven, be dependent on every plug-in, which should be released. This made release cycles take long time as all plug-ins have to be integrated into a final release of maven or eclipse integration.

+
+
+

Now, plug-ins are automatically discovered by the Java Service Loader mechanism from the classpath. This also effects the setup of eclipse and maven integration to allow modular releases of CobiGen in future. We are now able to provide faster rollouts of bug-fixes in any of the plug-ins as they can be released completely independently.

+
+
+
+

Plug-ins

+ +
+

==Java Plug-in +The CobiGen Java Plug-in comes with a new input reader for java artifacts, new java related trigger and matchers, as well as a merging mechanism for Java sources.

+
+
+
Trigger extension
+
+

The Java Plug-in provides a new trigger for Java related inputs. It accepts different representations as inputs (see Java input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'java'

    +
    +
    Example of a java trigger definition
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables Java elements as inputs.

    +
    +
  • +
+
+
+Matcher types +
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type fqn → full qualified name matching

    +
    +
    Example of a java trigger definition with a full qualified name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the full qualified name (fqn) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'package' → package name of the input

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="package" value="(.+)\.persistence\.([^\.]+)\.entity">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the package name (package) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'expression'

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="expression" value="instanceof java.lang.String">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the expression evaluates to true. Valid expressions are

    +
    +
  • +
  • +

    instanceof fqn: checks an 'is a' relation of the input type

    +
  • +
  • +

    isAbstract: checks, whether the input type is declared abstract

    +
  • +
+
+
+
+Container Matcher types +
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'package'

    +
    +
    Example of a java trigger definition with a container matcher for packages
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <containerMatcher type="package" value="com\.example\.app\.component1\.persistence.entity" />
    +</trigger>
    +
    +
    +
    +

    The container matcher matches packages provided by the type com.capgemini.cobigen.javaplugin.inputreader.to.PackageFolder with a regular expression stated in the value attribute. (See containerMatcher semantics to get more information about containerMatchers itself.)

    +
    +
  • +
+
+
+
+Variable Assignment types +
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The Java Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'regex' → regular expression group

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="regex" key="rootPackage" value="1" />
    +        <variableAssignment type="regex" key="component" value="2" />
    +        <variableAssignment type="regex" key="pojoName" value="3" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key.

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+Java input reader +
+

The CobiGen Java Plug-in implements an input reader for parsed java sources as well as for java Class<?> objects (loaded by reflection). So API user can pass Class<?> objects as well as JavaClass objects for generation. The latter depends on QDox, which will be used for parsing and merging java sources. For getting the right parsed java inputs you can easily use the JavaParserUtil, which provides static functionality to parse java files and get the appropriate JavaClass object.

+
+
+

Furthermore, due to restrictions on both inputs according to model building (see below), it is also possible to provide an array of length two as an input, which contains the Class<?> as well as the JavaClass object of the same class.

+
+
+Template object model +
+

No matter whether you use reflection objects or parsed java classes as input, you will get the following object model for template creation:

+
+
+
    +
  • +

    classObject ('Class' :: Class object of the Java input)

    +
  • +
  • +

    POJO

    +
    +
      +
    • +

      name ('String' :: Simple name of the input class)

      +
    • +
    • +

      package ('String' :: Package name of the input class)

      +
    • +
    • +

      canonicalName ('String' :: Full qualified name of the input class)

      +
    • +
    • +

      annotations ('Map<String, Object>' :: Annotations, which will be represented by a mapping of the full qualified type of an annotation to its value. To gain template compatibility, the key will be stored with '_' instead of '.' in the full qualified annotation type. Furthermore, the annotation might be recursively defined and thus be accessed using the same type of mapping. Example ${pojo.annotations.javax_persistence_Id})

      +
    • +
    • +

      JavaDoc ('Map<String, Object>') :: A generic way of addressing all available JavaDoc doclets and comments. The only fixed variable is comment (see below). All other provided variables depend on the doclets found while parsing. The value of a doclet can be accessed by the doclets name (e.g. ${…​JavaDoc.author}). In case of doclet tags that can be declared multiple times (currently @param and @throws), you will get a map, which you access in a specific way (see below).

      +
      +
        +
      • +

        comment ('String' :: JavaDoc comment, which does not include any doclets)

        +
      • +
      • +

        params ('Map<String,String> :: JavaDoc parameter info. If the comment follows proper conventions, the key will be the name of the parameter and the value being its description. You can also access the parameters by their number, as in arg0, arg1 etc, following the order of declaration in the signature, not in order of JavaDoc)

        +
      • +
      • +

        throws ('Map<String,String> :: JavaDoc exception info. If the comment follows proper conventions, the key will be the name of the thrown exception and the value being its description)

        +
      • +
      +
      +
    • +
    • +

      extendedType ('Map<String, Object>' :: The supertype, represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        name ('String' :: Simple name of the supertype)

        +
      • +
      • +

        canonicalName ('String' :: Full qualified name of the supertype)

        +
      • +
      • +

        package ('String' :: Package name of the supertype)

        +
      • +
      +
      +
    • +
    • +

      implementedTypes ('List<Map<String, Object>>' :: A list of all implementedTypes (interfaces) represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        interface ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Simple name of the interface)

          +
        • +
        • +

          canonicalName ('String' :: Full qualified name of the interface)

          +
        • +
        • +

          package ('String' :: Package name of the interface)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      fields ('List<Map<String, Object>>' :: List of fields of the input class) (renamed since cobigen-javaplugin v1.2.0; previously attributes)

      +
      +
        +
      • +

        field ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the Java field)

          +
        • +
        • +

          type ('String' :: Type of the Java field)

          +
        • +
        • +

          canonicalType ('String' :: Full qualified type declaration of the Java field’s type)

          +
        • +
        • +

          'isId' (Deprecated :: boolean :: true if the Java field or its setter or its getter is annotated with the javax.persistence.Id annotation, false otherwise. Equivalent to ${pojo.attributes[i].annotations.javax_persistence_Id?has_content})

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations with the remark, that for fields all annotations of its setter and getter will also be collected)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      methodAccessibleFields ('List<Map<String, Object>>' :: List of fields of the input class or its inherited classes, which are accessible using setter and getter methods)

      +
      +
        +
      • +

        same as for field (but without JavaDoc!)

        +
      • +
      +
      +
    • +
    • +

      methods ('List<Map<String, Object>>' :: The list of all methods, whereas one method will be represented by a set of property mappings)

      +
      +
        +
      • +

        method ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the method)

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

Furthermore, when providing a Class<?> object as input, the Java Plug-in will provide additional functionalities as template methods (deprecated):

+
+
+
    +
  1. +

    isAbstract(String fqn) (Checks whether the type with the given full qualified name is an abstract class. Returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  2. +
  3. +

    isSubtypeOf(String subType, String superType) (Checks whether the subType declared by its full qualified name is a sub type of the superType declared by its full qualified name. Equals the Java expression subType instanceof superType and so also returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  4. +
+
+
+
+Model Restrictions +
+

As stated before both inputs (Class<?> objects and JavaClass objects ) have their restrictions according to model building. In the following these restrictions are listed for both models, the ParsedJava Model which results from an JavaClass input and the ReflectedJava Model, which results from a Class<?> input.

+
+
+

It is important to understand, that these restrictions are only present if you work with either Parsed Model OR the Reflected Model. If you use the Maven Build Plug-in or Eclipse Plug-in these two models are merged together so that they can mutually compensate their weaknesses.

+
+
+Parsed Model +
+
    +
  • +

    annotations of the input’s supertype are not accessible due to restrictions in the QDox library. So pojo.methodAccessibleFields[i].annotations will always be empty for super type fields.

    +
  • +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Reflected Model.

    +
  • +
  • +

    fields of "supertypes" of the input JavaClass are not available at all. So pojo.methodAccessibleFields will only contain the input type’s and the direct superclass’s fields.

    +
  • +
  • +

    [resolved, since cobigen-javaplugin 1.3.1] field types of supertypes are always canonical. So pojo.methodAccessibleFields[i].type will always provide the same value as pojo.methodAccessibleFields[i].canonicalType (e.g. java.lang.String instead of the expected String) for super type fields.

    +
  • +
+
+
+
+Reflected Model +
+
    +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Parsed Model.

    +
  • +
  • +

    annotations are only available if the respective annotation has @Retention(value=RUNTIME), otherwise the annotations are to be discarded by the compiler or by the VM at run time. For more information see RetentionPolicy.

    +
  • +
  • +

    information about generic types is lost. E.g. a field’s/ methodAccessibleField’s type for List<String> can only be provided as List<?>.

    +
  • +
+
+
+
+
+
+
+
Merger extensions
+
+

The Java Plug-in provides two additional merging strategies for Java sources, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy javamerge (merges two Java resources and keeps the existing Java elements on conflicts)

    +
  • +
  • +

    Merge strategy javamerge_override (merges two Java resources and overrides the existing Java elements on conflicts)

    +
  • +
+
+
+

In general merging of two Java sources will be processed as follows:

+
+
+

Precondition of processing a merge of generated contents and existing ones is a common Java root class resp. surrounding class. If this is the case this class and all further inner classes will be merged recursively. Therefore, the following Java elements will be merged and conflicts will be resolved according to the configured merge strategy:

+
+
+
    +
  • +

    extends and implements relations of a class: Conflicts can only occur for the extends relation.

    +
  • +
  • +

    Annotations of a class: Conflicted if an annotation declaration already exists.

    +
  • +
  • +

    Fields of a class: Conflicted if there is already a field with the same name in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
  • +

    Methods of a class: Conflicted if there is already a method with the same signature in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
+
+ +
+

==Property Plug-in +The CobiGen Property Plug-in currently only provides different merge mechanisms for documents written in Java property syntax.

+
+
+
+
Merger extensions
+
+

There are two merge strategies for Java properties, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy propertymerge (merges two properties documents and keeps the existing properties on conflicts)

    +
  • +
  • +

    Merge strategy propertymerge_override (merges two properties documents and overrides the existing properties on conflicts)

    +
  • +
+
+
+

Both documents (base and patch) will be parsed using the Java 7 API and will be compared according their keys. Conflicts will occur if a key in the patch already exists in the base document.

+
+ +
+

==XML Plug-in +The CobiGen XML Plug-in comes with an input reader for XML artifacts, XML related trigger and matchers and provides different merge mechanisms for XML result documents.

+
+
+
+
Trigger extension
+
+

(since cobigen-xmlplugin v2.0.0)

+
+
+

The XML Plug-in provides a trigger for XML related inputs. It accepts XML documents as input (see XML input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'xml'

    +
    +
    Example of a XML trigger definition.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as inputs.

    +
    +
  • +
  • +

    type xpath

    +
    +
    Example of a xpath trigger definition.
    +
    +
    <trigger id="..." type="xpath" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as container inputs, which consists of several sub-documents.

    +
    +
  • +
+
+
+Container Matcher type +
+

A ContainerMatcher check if the input is a valid container.

+
+
+
    +
  • +

    xpath: type: xpath

    +
    +
    Example of a XML trigger definition with a node name matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <containerMatcher type="xpath" value="./uml:Model//packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    Before applying any Matcher, this containerMatcher checks if the XML file contains a node uml:Model with a childnode packagedElement which contains an attribute xmi:type with the value uml:Class.

    +
    +
  • +
+
+
+
+Matcher types +
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    XML: type nodename → document’s root name matching

    +
    +
    Example of a XML trigger definition with a node name matcher
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the root name of the declaring input document matches the given regular expression (value).

    +
    +
  • +
  • +

    xpath: type: xpath → matching a node with a xpath value

    +
    +
    Example of a xpath trigger definition with a xpath matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="xpath" value="/packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the XML file contains a node /packagedElement where the xmi:type property equals uml:Class.

    +
    +
  • +
+
+
+
+Variable Assignment types +
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The XML Plug-in currently provides only one mechanism:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+XML input reader +
+

The CobiGen XML Plug-in implements an input reader for parsed XML documents. So API user can pass org.w3c.dom.Document objects for generation. For getting the right parsed XML inputs you can easily use the xmlplugin.util.XmlUtil, which provides static functionality to parse XML files or input streams and get the appropriate Document object.

+
+
+Template object +
+

Due to the heterogeneous structure an XML document can have, the XML input reader does not always create exactly the same model structure (in contrast to the java input reader). For example the model’s depth differs strongly, according to it’s input document. To allow navigational access to the nodes, the model also depends on the document’s element’s node names. All child elements with unique names, are directly accessible via their names. In addition it is possible to iterate over all child elements with held of the child list Children. So it is also possible to access child elements with non unique names.

+
+
+

The XML input reader will create the following object model for template creation (EXAMPLEROOT, EXAMPLENODE1, EXAMPLENODE2, EXAMPLEATTR1,…​ are just used here as examples. Of course they will be replaced later by the actual node or attribute names):

+
+
+
    +
  • +

    ~EXAMPLEROOT~ ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      _nodeName_ ('String' :: Simple name of the root node)

      +
    • +
    • +

      _text_ ('String' :: Concatenated text content (PCDATA) of the root node)

      +
    • +
    • +

      TextNodes ('List<String>' :: List of all the root’s text node contents)

      +
    • +
    • +

      _at_~EXAMPLEATTR1~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_~EXAMPLEATTR2~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_…​

      +
    • +
    • +

      Attributes ('List<Map<String, Object>>' :: List of the root’s attributes

      +
      +
        +
      • +

        at ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          _attName_ ('String' :: Name of the attribute)

          +
        • +
        • +

          _attValue_ ('String' :: String representation of the attribute’s value)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      Children ('List<Map<String, Object>>' :: List of the root’s child elements

      +
      +
        +
      • +

        child ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          …​common element sub structure…​

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE1~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element structure…​

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE2~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element sub structure…​

        +
      • +
      • +

        ~EXAMPLENODE21~ ('Map<String, Object>' :: One of the nodes' child nodes)

        +
        +
          +
        • +

          …​common element structure…​

          +
        • +
        +
        +
      • +
      • +

        ~EXAMPLENODE…​~

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE…​~

      +
    • +
    +
    +
  • +
+
+
+

In contrast to the java input reader, this XML input reader does currently not provide any additional template methods.

+
+
+
+
+
+
Merger extensions
+
+

The XML plugin uses the LeXeMe merger library to produce semantically correct merge products. The merge strategies can be found in the MergeType enum and can be configured in the templates.xml as a mergeStrategy attribute:

+
+
+
    +
  • +

    mergeStrategy xmlmerge

    +
    +
    Example of a template using the mergeStrategy xmlmerge
    +
    +
    <templates>
    +	<template name="..." destinationPath="..." templateFile="..." mergeStrategy="xmlmerge"/>
    +</templates>
    +
    +
    +
  • +
+
+
+

Currently only the document types included in LeXeMe are supported. +On how the merger works consult the LeXeMe Wiki.

+
+ +
+

==Text Merger Plug-in +The Text Merger Plug-in enables merging result free text documents to existing free text documents. Therefore, the algorithms are also very rudimentary.

+
+
+
+
Merger extensions
+
+

There are currently three main merge strategies that apply for the whole document:

+
+
+
    +
  • +

    merge strategy textmerge_append (appends the text directly to the end of the existing document) +_Remark_: If no anchors are defined, this will simply append the patch.

    +
  • +
  • +

    merge strategy textmerge_appendWithNewLine (appends the text after adding a new line break to the existing document) +_Remark_: empty patches will not result in appending a new line any more since v1.0.1 +Remark: Only suitable if no anchors are defined, otherwise it will simply act as textmerge_append

    +
  • +
  • +

    merge strategy textmerge_override (replaces the contents of the existing file with the patch) +_Remark_: If anchors are defined, override is set as the default mergestrategy for every text block if not redefined in an anchor specification.

    +
  • +
+
+
+
+
Anchor functionality
+
+

If a template contains text that fits the definition of anchor:${documentpart}:${mergestrategy}:anchorend or more specifically the regular expression (.*)anchor:([:]+):(newline_)?([:]+)(_newline)?:anchorend\\s*(\\r\\n|\\r|\\n), some additional functionality becomes available about specific parts of the incoming text and the way it will be merged with the existing text. These anchors always change things about the text to come up until the next anchor, text before it is ignored.

+
+
+

If no anchors are defined, the complete patch will be appended depending on your choice for the template in the file templates.xml.

+
+
+

[[anchordef]]

+
+
+Anchor Definition +
+

Anchors should always be defined as a comment of the language the template results in, as you do not want them to appear in your readable version, but cannot define them as FreeMarker comments in the template, or the merger will not know about them. +Anchors will also be read when they are not comments due to the merger being able to merge multiple types of text-based languages, thus making it practically impossible to filter for the correct comment declaration. That is why anchors have to always be followed by line breaks. That way there is a universal way to filter anchors that should have anchor functionality and ones that should appear in the text. +Remark: If the resulting language has closing tags for comments, they have to appear in the next line. +Remark: If you do not put the anchor into a new line, all the text that appears before it will be added to the anchor.

+
+
+
+Document parts +
+

In general, ${documentpart} is an id to mark a part of the document, that way the merger knows what parts of the text to merge with which parts of the patch (e.g. if the existing text contains anchor:table:${}:anchorend that part will be merged with the part tagged anchor:table:${}:anchorend of the patch).

+
+
+

If the same documentpart is defined multiple times, it can lead to errors, so instead of defining table multiple times, use table1, table2, table3 etc.

+
+
+

If a ${documentpart} is defined in the document but not in the patch and they are in the same position, it is processed in the following way: If only the documentparts header, test and footer are defined in the document in that order, and the patch contains header, order and footer, the resulting order will be header, test, order then footer.

+
+
+

The following documentparts have default functionality:

+
+
+
    +
  1. +

    anchor:header:${mergestrategy}:anchorend marks the beginning of a header, that will be added once when the document is created, but not again. +Remark: This is only done once, if you have header in another anchor, it will be ignored

    +
  2. +
  3. +

    anchor:footer:${mergestrategy}:anchorend marks the beginning of a footer, that will be added once when the document is created, but not again. Once this is invoked, all following text will be included in the footer, including other anchors.

    +
  4. +
+
+
+

[[mergestrategies]]

+
+
+
+Mergestrategies +
+

Mergestrategies are only relevant in the patch, as the merger is only interested in how text in the patch should be managed, not how it was managed in the past.

+
+
+
    +
  1. +

    anchor:${documentpart}::anchorend will use the merge strategy from templates.xml, see Merger-Extensions.

    +
  2. +
  3. +

    anchor:${}:${mergestrategy}_newline:anchorend or anchor:${}:newline_${mergestrategy}:anchorend states that a new line should be appended before or after this anchors text, depending on where the newline is (before or after the mergestrategy). anchor:${documentpart}:newline:anchorend puts a new line after the anchors text. +Remark: Only works with appending strategies, not merging/replacing ones. These strategies currently include: appendbefore, append/appendafter

    +
  4. +
  5. +

    anchor:${documentpart}:override:anchorend means that the new text of this documentpart will replace the existing one completely

    +
  6. +
  7. +

    anchor:${documentpart}:appendbefore:anchorend or anchor:${documentpart}:appendafter:anchorend/anchor:${documentpart}:append:anchorend specifies whether the text of the patch should come before the existing text or after.

    +
  8. +
+
+
+
+
+
Usage Examples
+
+General +
+

Below you can see how a file with anchors might look like (using adoc comment tags), with examples of what you might want to use the different functions for.

+
+
+
+
// anchor:header:append:anchorend
+
+Table of contents
+Introduction/Header
+
+// anchor:part1:appendafter:anchorend
+
+Lists
+Table entries
+
+// anchor:part2:nomerge:anchorend
+
+Document Separators
+adoc table definitions
+
+// anchor:part3:override:anchorend
+
+Anything that you only want once but changes from time to time
+
+// anchor:footer:append:anchorend
+
+Copyright Info
+Imprint
+
+
+
+
+Merging +
+

In this section you will see a comparison on what files look like before and after merging

+
+
+override +
+
Before
+
+
// anchor:part:override:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
+Appending +
+
Before
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+// anchor:part3:appendbefore:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:append:anchorend
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+Lorem Ipsum
+
+
+
+
+Newline +
+
Before
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+(end of file)
+
+
+
+
Patch
+
+
// anchor:part:newline_append:anchorend
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Dolor Sit
+(end of file)
+
+
+
+
After
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+Dolor Sit
+
+(end of file)
+
+
+
+
+
+
+
Error List
+
+
    +
  • +

    If there are anchors in the text, but either base or patch do not start with one, the merging process will be aborted, as text might go missing this way.

    +
  • +
  • +

    Using _newline or newline_ with mergestrategies that don’t support it , like override, will abort the merging process. See <<`mergestrategies`,Merge Strategies>> →2 for details.

    +
  • +
  • +

    Using undefined mergestrategies will abort the merging process.

    +
  • +
  • +

    Wrong anchor definitions, for example anchor:${}:anchorend will abort the merging process, see <<`anchordef`,Anchor Definition>> for details.

    +
  • +
+
+ +
+

==JSON Plug-in +At the moment the plug-in can be used for merge generic JSON files depending on the merge strategy defined at the templates.

+
+
+
+
Merger extensions
+
+

There are currently these merge strategies:

+
+
+

Generic JSON Merge

+
+
+
    +
  • +

    merge strategy jsonmerge(add the new code respecting the existent is case of conflict)

    +
  • +
  • +

    merge strategy jsonmerge_override (add the new code overwriting the existent in case of conflict)

    +
    +
      +
    1. +

      JsonArray’s will be ignored / replaced in total

      +
    2. +
    3. +

      JsonObjects in conflict will be processed recursively ignoring adding non existent elements.

      +
    4. +
    +
    +
  • +
+
+
+
+
Merge Process
+
+Generic JSON Merging +
+

The merge process will be:

+
+
+
    +
  1. +

    Add non existent JSON Objects from patch file to base file.

    +
  2. +
  3. +

    For existent object in both files, will add non existent keys from patch to base object. This process will be done recursively for all existent objects.

    +
  4. +
  5. +

    For JSON Arrays existent in both files, the arrays will be just concatenated.

    +
  6. +
+
+ +
+

==TypeScript Plug-in

+
+
+

The TypeScript Plug-in enables merging result TS files to existing ones. This plug-in is used at the moment for generate an Angular2 client with all CRUD functionalities enabled. The plug-in also generates i18n functionality just appending at the end of the word the ES or EN suffixes, to put into the developer knowledge that this words must been translated to the correspondent language. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+
+
+
Trigger Extensions
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
Merger extensions
+
+

This plugin uses the TypeScript Merger to merge files. There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy tsmerge (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy tsmerge_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

<<<<<<< HEAD +The merge algorithm mainly handles the following AST nodes:

+
+
+
    +
  • +

    ImportDeclaration

    +
    +
      +
    • +

      Will add non existent imports whatever the merge strategy is.

      +
    • +
    • +

      For different imports from same module, the import clauses will be merged.

      +
      +
      +
      import { a } from 'b';
      +import { c } from 'b';
      +//Result
      +import { a, c } from 'b';
      +
      +
      +
    • +
    +
    +
  • +
  • +

    ClassDeclaration

    +
    +
      +
    • +

      Adds non existent base properties from patch based on the name property.

      +
    • +
    • +

      Adds non existent base methods from patch based on the name signature.

      +
    • +
    • +

      Adds non existent annotations to class, properties and methods.

      +
    • +
    +
    +
  • +
  • +

    PropertyDeclaration

    +
    +
      +
    • +

      Adds non existent decorators.

      +
    • +
    • +

      Merge existent decorators.

      +
    • +
    • +

      With override strategy, the value of the property will be replaced by the patch value.

      +
    • +
    +
    +
  • +
  • +

    MethodDeclaration

    +
    +
      +
    • +

      With override strategy, the body will be replaced.

      +
    • +
    • +

      The parameters will be merged.

      +
    • +
    +
    +
  • +
  • +

    ParameterDeclaration

    +
    +
      +
    • +

      Replace type and modifiers with override merge strategy, adding non existent from patch into base.

      +
    • +
    +
    +
  • +
  • +

    ConstructorDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
  • +

    FunctionDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
+
+
+
+
Input reader
+
+

The TypeScript input reader is based on the one that the TypeScript merger uses. The current extensions are additional module fields giving from which library any entity originates. +module: null specifies a standard entity or type as string or number.

+
+
+Object model +
+

To get a first impression of the created object after parsing, let us start with analyzing a small example, namely the parsing of a simple type-orm model written in TypeScript.

+
+
+
+
import {Entity, PrimaryGeneratedColumn, Column} from "typeorm";
+
+@Entity()
+export class User {
+
+    @PrimaryGeneratedColumn()
+    id: number;
+
+    @Column()
+    firstName: string;
+
+    @Column()
+    lastName: string;
+
+    @Column()
+    age: number;
+
+}
+
+
+
+

The returned object has the following structure

+
+
+
+
{
+  "importDeclarations": [
+    {
+      "module": "typeorm",
+      "named": [
+        "Entity",
+        "PrimaryGeneratedColumn",
+        "Column"
+      ],
+      "spaceBinding": true
+    }
+  ],
+  "classes": [
+    {
+      "identifier": "User",
+      "modifiers": [
+        "export"
+      ],
+      "decorators": [
+        {
+          "identifier": {
+            "name": "Entity",
+            "module": "typeorm"
+          },
+          "isCallExpression": true
+        }
+      ],
+      "properties": [
+        {
+          "identifier": "id",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "PrimaryGeneratedColumn",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "firstName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "lastName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "age",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
+
+
+
+

If we only consider the first level of the JSON response, we spot two lists of imports and classes, providing information about the only import statement and the only User class, respectively. Moving one level deeper we observe that:

+
+
+
    +
  • +

    Every import statement is translated to an import declaration entry in the declarations list, containing the module name, as well as a list of entities imported from the given module.

    +
  • +
  • +

    Every class entry provides besides the class identifier, its decoration(s), modifier(s), as well as a list of properties that the original class contains.

    +
  • +
+
+
+

Note that, for each given type, the module from which it is imported is also given as in

+
+
+
+
  "identifier": {
+    "name": "Column",
+    "module": "typeorm"
+  }
+
+
+
+

Returning to the general case, independently from the given TypeScript file, an object having the following Structure will be created.

+
+
+
    +
  • +

    importDeclarations: A list of import statement as described above

    +
  • +
  • +

    exportDeclarations: A list of export declarations

    +
  • +
  • +

    classes: A list of classes extracted from the given file, where each entry is full of class specific fields, describing its properties and decorator for example.

    +
  • +
  • +

    interfaces: A list of interfaces.

    +
  • +
  • +

    variables: A list of variables.

    +
  • +
  • +

    functions: A list of functions.

    +
  • +
  • +

    enums: A list of enumerations.

    +
  • +
+
+ +
+

==HTML Plug-in

+
+
+

The HTML Plug-in enables merging result HTML files to existing ones. This plug-in is used at the moment for generate an Angular2 client. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+
+
+
Trigger Extensions
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
Merger extensions
+
+

There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy html-ng* (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy html-ng*_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

The merging of two Angular2 files will be processed as follows:

+
+
+

The merge algorithm handles the following AST nodes:

+
+
+
    +
  • +

    md-nav-list

    +
  • +
  • +

    a

    +
  • +
  • +

    form

    +
  • +
  • +

    md-input-container

    +
  • +
  • +

    input

    +
  • +
  • +

    name (for name attribute)

    +
  • +
  • +

    ngIf

    +
  • +
+
+
+ + + + + +
+ + +Be aware, that the HTML merger is not generic and only handles the described tags needed for merging code of a basic Angular client implementation. For future versions, it is planned to implement a more generic solution. +
+
+ +
+

==OpenAPI Plug-in

+
+
+

The OpenAPI Plug-in enables the support for Swagger files that follows the OpenAPI 3.0 standard as input for CobiGen. Until now, CobiGen was thought to follow a "code first" generation, with this plugin, now it can also follow the "contract first" strategy

+
+
+
    +
  • +

    Code First

    +
    +
      +
    • +

      Generating from a file with code (Java/XML code in our case)

      +
    • +
    +
    +
  • +
  • +

    Contract First

    +
    +
      +
    • +

      Generation from a full definition file (Swagger in this case). This file contains all the information about entities, operations, etc…​

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +If you are not a CobiGen developer, you will be more interested in usage. +
+
+
+
+
Trigger Extensions
+
+

The OpenAPI Plug-in provides a new trigger for Swagger OpenAPI 3.0 related inputs. It accepts different representations as inputs (see OpenAPI input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type openapi

    +
    +
    Example of a OpenAPI trigger definition
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables OpenAPI elements as inputs.

    +
    +
  • +
+
+
+Matcher type +
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type 'element' → An object

    +
  • +
+
+
+

This trigger will be enabled if the element (Java Object) of the input file is and EntityDef (value).

+
+
+
+Container Matcher type +
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'element'

    +
  • +
+
+
+

The container matcher matches elements as Java Objects, in this case will be always an OpenAPIFile object. (See containerMatcher semantics to get more information about containerMatchers itself.)

+
+
+
+Variable Assignment types +
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The OpenAPI Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +        <variableAssignment type="constant" key="rootPackage" value="com.capgemini.demo" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key. +In this case, the constant type variableAssignment is used to specify the root package where the generate will place the files generated.

+
+
+
    +
  • +

    type 'extension' → Extraction of the info extensions and the extensions of each entity. (the tags that start with "x-…​").

    +
    +
    +
      <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +      <variableAssignment type="extension" key="testingAttribute" value="x-test"/>
    +      <variableAssignment type="extension" key="rootPackage" value="x-rootpackage"/>
    +      <variableAssignment type="extension" key="globalVariable" value="x-global"/>
    +    </matcher>
    +  </trigger>
    +
    +
    +
  • +
+
+
+

The 'extension' variable assignment tries to find 'extensions' (tags that start with "x-…​") on the 'info' +part of your file and on the extensions of each entity. value is the extension that our plug-in will try to find on your OpenAPI file. The result will +be stored in the variable key.

+
+
+

As you will see on the figure below, there are two types of variables: The global ones, that are defined +on the 'info' part of the file, and the local ones, that are defined inside each entity.

+
+
+

Therefore, if you want to define the root package, then you will have to declare it on the 'info' part. +That way, all your entities will be generated under the same root package (e.g. com.devonfw.project).

+
+
+

Swagger at devon4j Project

+
+
+

If no extension with that name was found, then an empty string will be assigned. In the case of not defining the root package, then the code will be generated into src/main/java.

+
+
+
    +
  • +

    type 'property' → property of the Java Object

    +
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +        <variableAssignment type="property" key="entityName" value="name" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

The 'property' variable assignment tries to find the property value of the entities defined on the schema. +The value is assigned to the key. The current properties that you will able to get are:

+
+
+
    +
  1. +

    ComponentDef component: It is an object that stores the configuration of an devon4j component. Its only +property is List<PathDef> paths which contains the paths as the ones shown here.

    +
  2. +
  3. +

    String componentName: Stores the name of the x-component tag for this entity.

    +
  4. +
  5. +

    String name: Name of this entity (as shown on the example above).

    +
  6. +
  7. +

    String description: Description of this entity.

    +
  8. +
  9. +

    List<PropertyDef> properties: List containing all the properties of this entity. PropertyDef is an object that has the next properties:

    +
    +
      +
    1. +

      String name.

      +
    2. +
    3. +

      String type.

      +
    4. +
    5. +

      String format.

      +
    6. +
    7. +

      String description.

      +
    8. +
    9. +

      Boolean isCollection.

      +
    10. +
    11. +

      Boolean isEntity.

      +
    12. +
    13. +

      Boolean required.

      +
    14. +
    15. +

      Map<String, Object> constraints

      +
    16. +
    +
    +
  10. +
+
+
+

If no property with that name was found, then it will be set to null.

+
+
+
+Full trigger configuration +
+
+
<trigger id="..." type="openapi" templateFolder="...">
+    <containerMatcher type="element" value="OpenApiFile">
+    <matcher type="element" value="EntityDef">
+        <variableAssignment type="constant" key="rootPackage" value="com.capgemini.demo" />
+        <variableAssignment type="property" key="component" value="componentName" />
+        <variableAssignment type="property" key="entityName" value="name" />
+    </matcher>
+</trigger>
+
+
+
+
+
+
Input reader
+
+

The CobiGen OpenAPI Plug-in implements an input reader for OpenAPI 3.0 files. The XML input reader will create the following object model for template creation:

+
+
+
    +
  • +

    model ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      header (HeaderDef :: Definition of the header found at the top of the file)

      +
    • +
    • +

      name ('String' :: Name of the current Entity)

      +
    • +
    • +

      componentName ('String' :: name of the component the entity belongs to)

      +
    • +
    • +

      component (ComponentDef :: Full definition of the component that entity belongs to)

      +
    • +
    • +

      description ('String' :: Description of the Entity)

      +
    • +
    • +

      properties (List<PropertyDef> :: List of properties the entity has)

      +
    • +
    • +

      relationShips (List<RelationShip> :: List of Relationships the entity has)

      +
    • +
    +
    +
  • +
  • +

    HeaderDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      info (InfoDef :: Definition of the info found in the header)

      +
    • +
    • +

      servers (List<ServerDef> :: List of servers the specification uses)

      +
    • +
    +
    +
  • +
  • +

    InfoDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      title ('String' :: The title of the specification)

      +
    • +
    • +

      description ('String' :: The description of the specification)

      +
    • +
    +
    +
  • +
  • +

    ServerDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      URI ('String' :: String representation of the Server location)

      +
    • +
    • +

      description ('String' :: description of the server)

      +
    • +
    +
    +
  • +
  • +

    ComponentDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      paths (List<PathDef> :: List of services for this component)

      +
    • +
    +
    +
  • +
  • +

    PropertyDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      name ('String' :: Name of the property)

      +
    • +
    • +

      type ('String' :: type of the property)

      +
    • +
    • +

      format ('String' :: format of the property (i.e. int64))

      +
    • +
    • +

      isCollection (boolean :: true if the property is a collection, false by default)

      +
    • +
    • +

      isEntity (boolean :: true if the property refers to another entity, false by default)

      +
    • +
    • +

      sameComponent (boolean :: true if the entity that the property refers to belongs to the same component, false by default)

      +
    • +
    • +

      description ('String' :: Description of the property)

      +
    • +
    • +

      required (boolean :: true if the property is set as required)

      +
    • +
    • +

      constraints ('Map<String, Object>')

      +
    • +
    +
    +
  • +
  • +

    RelationShip ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      type ('String' :: type of the relationship (OneToOne, ManyToMany, etc…​))

      +
    • +
    • +

      entity ('String' :: destination entity name)

      +
    • +
    • +

      sameComponent (boolean :: true if the destination entity belongs to the same component of the source entity, false by default)

      +
    • +
    • +

      unidirectional (boolean :: true if the relationship is unidirectional, false by default)

      +
    • +
    +
    +
  • +
  • +

    PathDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      rootComponent ('String' :: the first segment of the path)

      +
    • +
    • +

      version ('String' :: version of the service)

      +
    • +
    • +

      pathURI ('String' :: URI of the path, the segment after the version)

      +
    • +
    • +

      operations (List<OperationDef> :: List of operations for this path)

      +
    • +
    +
    +
  • +
  • +

    OperationDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      type ('String' :: type of the operation (GET, PUT, etc…​))

      +
    • +
    • +

      parameters (List<ParameterDef> :: List of parameters)

      +
    • +
    • +

      operationId ('String' :: name of the operation prototype)

      +
    • +
    • +

      description ('String' :: JavaDoc Description of the operation)

      +
    • +
    • +

      summary (List<PropertyDef> :: JavaDoc operation Summary)

      +
    • +
    • +

      tags ('List<String>' :: List of different tags)

      +
    • +
    • +

      responses (List<ResponseDef> :: Responses of the operation)

      +
    • +
    +
    +
  • +
  • +

    ParameterDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      isSearchCriteria (boolean :: true if the response is an SearchCriteria object)

      +
    • +
    • +

      inPath (boolean :: true if this parameter is contained in the request path)

      +
    • +
    • +

      inQuery (boolean :: true if this parameter is contained in a query)

      +
    • +
    • +

      isBody (boolean :: true if this parameter is a response body)

      +
    • +
    • +

      inHeader (boolean :: true if this parameter is contained in a header)

      +
    • +
    • +

      mediaType ('String' :: String representation of the media type of the parameter)

      +
    • +
    +
    +
  • +
  • +

    ResponseDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      isArray (boolean :: true if the type of the response is an Array)

      +
    • +
    • +

      isPaginated (boolean :: true if the type of the response is paginated)

      +
    • +
    • +

      isVoid (boolean :: true if there is no type/an empty type)

      +
    • +
    • +

      isEntity (boolean :: true if the type of the response is an Entity)

      +
    • +
    • +

      entityRef (EntityDef :: Incomplete EntityDef containing the name and properties of the referenced Entity)

      +
    • +
    • +

      type ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      code ('String' :: String representation of the HTTP status code)

      +
    • +
    • +

      mediaTypes ('List<String>' :: List of media types that can be returned)

      +
    • +
    • +

      description ('String' :: Description of the response)

      +
    • +
    +
    +
  • +
+
+
+
+
Merger extensions
+
+

This plugin only provides an input reader, there is no support for OpenAPI merging. Nevertheless, the files generated from an OpenAPI file will be Java, XML, JSON, TS, etc…​ so, +for each file to be generated defined at templates.xml, must set the mergeStrategy for the specific language (javamerge, javamerge_override, jsonmerge, etc…​)

+
+
+
+
<templates>
+    ...
+    <templateExtension ref="${variables.entityName}.java" mergeStrategy="javamerge"/>
+    ...
+    <templateExtension ref="${variables.entityName}dataGrid.component.ts" mergeStrategy="tsmerge"/>
+    ...
+    <templateExtension ref="en.json" mergeStrategy="jsonmerge"/>
+</templates>
+
+
+
+
+
Usage
+
+Writing OpenAPI 3.0 contract file +
+

The Swagger file must follow the OpenAPI 3.0 standard to be readable by CobiGen, otherwise and error will be thrown. +A full documentation about how to follow this standard can be found Swagger3 Docs.

+
+
+

The Swagger file must be at the core folder of your devon4j project, like shown below:

+
+
+

Swagger at devon4j Project

+
+
+

To be compatible with CobiGen and devon4j, it must follow some specific configurations. This configurations allows us to avoid redundant definitions as SearchCriteria and PaginatedList objects are used at the services definitions.

+
+
+
+Paths +
+
    +
  • +

    Just adding the tags property at the end of the service definitions with the items `SearchCriteria` and/or paginated put into CobiGen knowledge that an standard devon4j SearchCriteria and/or PaginateListTo object must be generated. That way, the Swagger file will be easier to write and even more understandable.

    +
  • +
  • +

    The path must start with the component name, and define an x-component tag with the component name. That way this service will be included into the component services list.

    +
  • +
+
+
+
+
  /componentnamemanagement/v1/entityname/customOperation/:
+    x-component: componentnamemanagement
+    post:
+      summary: 'Summary of the operation'
+      description: Description of the operation.
+      operationId: customOperation
+      responses:
+        '200':
+          description: Description of the response.
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/EntityName'
+      requestBody:
+        $ref: '#/components/requestBodies/EntityName'
+      tags:
+        - searchCriteria
+        - paginated
+
+
+
+

That way, CobiGen will be able to generate the endpoint (REST service) customOperation on componentmanagement. If you do not specify the component to generate to (the x-component tag) then this service will not be taken into account for generation.

+
+
+
+Service based generation +
+

In previous CobiGen versions, we were able to generate code from a contract-first OpenAPI specification only when we defined components like the following:

+
+
+
+
components:
+    schemas:
+        Shop:
+          x-component: shopmanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            shopExample:
+              type: string
+              maxLength: 100
+              minLength: 5
+              uniqueItems: true
+
+
+
+

We could not generate services without the definition of those components.

+
+
+

In our current version, we have overcome it, so that now we are able to generate all the services independently. You just need to add an x-component tag with the name of the component that will make use of that service. See here.

+
+
+

An small OpenAPI example defining only services can be found below:

+
+
+
+
openapi: 3.0.0
+servers:
+  - url: 'https://localhost:8081/server/services/rest'
+    description: Just some data
+info:
+  title: Devon Example
+  description: Example of a API definition
+  version: 1.0.0
+  x-rootpackage: com.capgemini.spoc.openapi
+paths:
+  /salemanagement/v1/sale/{saleId}:
+    x-component: salemanagement
+    get:
+      operationId: findSale
+      parameters:
+        - name: saleId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Any
+  /salemanagement/v1/sale/{bla}:
+    x-component: salemanagement
+    get:
+      operationId: findSaleBla
+      parameters:
+        - name: bla
+          in: path
+          required: true
+          schema:
+            type: integer
+            format: int64
+            minimum: 10
+            maximum: 200
+      responses:
+        '200':
+          description: Any
+
+
+
+

Then, the increment that you need to select for generating those services is Crud devon4ng Service based Angular:

+
+
+

Service based generation

+
+
+
+Full example +
+

This example yaml file can be download from here.

+
+
+ + + + + +
+ + +As you will see on the file, "x-component" tags are obligatory if you want to generate components (entities). They have to be defined for each one. +In addition, you will find the global variable "x-rootpackage" that are explained <<,here>>. +
+
+
+
+
openapi: 3.0.0
+servers:
+  - url: 'https://localhost:8081/server/services/rest'
+    description: Just some data
+info:
+  title: Devon Example
+  description: Example of a API definition
+  version: 1.0.0
+  x-rootpackage: com.devonfw.angular.test
+paths:
+  /shopmanagement/v1/shop/{shopId}:
+    x-component: shopmanagement
+    get:
+      operationId: findShop
+      parameters:
+        - name: shopId
+          in: path
+          required: true
+          schema:
+            type: integer
+            format: int64
+            minimum: 0
+            maximum: 50
+      responses:
+        '200':
+          description: Any
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Shop'
+            text/plain:
+              schema:
+                type: string
+        '404':
+          description: Not found
+  /salemanagement/v1/sale/{saleId}:
+    x-component: salemanagement
+    get:
+      operationId: findSale
+      parameters:
+        - name: saleId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Any
+  /salemanagement/v1/sale/:
+    x-component: salemanagement
+    post:
+      responses:
+        '200':
+          description: Any
+      requestBody:
+        $ref: '#/components/requestBodies/SaleData'
+      tags:
+       - searchCriteria
+  /shopmanagement/v1/shop/new:
+    x-component: shopmanagement
+    post:
+      responses:
+       '200':
+          description: Any
+      requestBody:
+        $ref: '#/components/requestBodies/ShopData'
+components:
+    schemas:
+        Shop:
+          x-component: shopmanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            shopExample:
+              type: string
+              maxLength: 100
+              minLength: 5
+              uniqueItems: true
+            sales:
+              type: array # Many to One relationship
+              items:
+                $ref: '#/components/schemas/Sale'
+        Sale:
+          x-component: salemanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            saleExample:
+              type: number
+              format: int64
+              maximum: 100
+              minimum: 0
+          required:
+            - saleExample
+
+    requestBodies:
+        ShopData:
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Shop'
+          required: true
+        SaleData:
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Sale'
+          required: true
+
+
+
+
+
+
+
+
+

CobiGen CLI

+
+ +
+

==CobiGen CLI

+
+
+

The command line interface (CLI) for CobiGen enables the generation of code using few commands. This feature allows us to decouple CobiGen from Eclipse.

+
+
+
Install CobiGen CLI
+
+

In order to install the CobiGen CLI you will need to use the devonfw/ide. In a console run devon cobigen.

+
+
+
+
Commands and options
+
+

Using the following command and option you will be able to customize your generation as follows:

+
+
+
    +
  • +

    cobigen, cg: Main entry point of the CLI. If no arguments are passed, man page will be printed.

    +
  • +
  • +

    [generate, g]: Command used for code generation.

    +
    +
      +
    • +

      InputGlob: Glob pattern of the input file or the whole path of the input file from which the code will be generated.

      +
    • +
    • +

      < --increment, -i > : Specifies an increment ID to be generated. You can also search increments by name and CobiGen will output the resultant list. If an exact match found, code generation will happen.

      +
    • +
    • +

      < --template, -t > : specifies a template ID to be generated. You can also search templates by name and CobiGen will output the resultant list.

      +
    • +
    • +

      < --outputRootPath, -out >: The project file path in which you want to generate your code. If no output path is given, CobiGen will use the project of your input file.

      +
    • +
    +
    +
  • +
  • +

    [adapt-templates, a]: Generates a new templates folder next to the CobiGen CLI and stores its location inside a configuration file. After executing this command, the CLI will attempt to use the specified Templates folder.

    +
  • +
  • +

    < --verbose, -v > : Prints debug information, verbose log.

    +
  • +
  • +

    < --help, -h > : Prints man page.

    +
  • +
  • +

    < update, u> : This command compare the artificial pom plug-ins version with central latest version available and user can update any outdated plug-ins version .

    +
  • +
+
+
+
+
CLI Execution steps:
+
+

CobiGen CLI is installed inside your devonfw distribution. In order to execute it follow the next steps:

+
+
+
    +
  1. +

    Run console.bat, this will open a console.

    +
  2. +
  3. +

    Execute cobigen or cg and the man page should be printed.

    +
  4. +
  5. +

    Use a valid CobiGen input file and run cobigen generate <pathToInputFile>. Note: On the first execution of the CLI, CobiGen will download all the needed dependencies, please be patient.

    +
  6. +
  7. +

    A list of increments will be printed so that you can start the generation.

    +
  8. +
+
+
+

Preview of the man page for generate command:

+
+
+
+Generation path +
+
+
+
+
Examples
+
+

A selection of commands that you can use with the CLI:

+
+
+
    +
  • +

    cobigen generate foo\bar\EmployeeEntity.java: As no output path has been defined, CobiGen will try to find the pom.xml of the current project in order to set the generation root path.

    +
  • +
  • +

    cobigen generate foo\bar\*.java --out other\project: Will retrieve all the Java files on that input folder and generate the code on the path specified by --out.

    +
  • +
  • +

    cg g foo\bar\webServices.yml --increment TO: Performs a string search using TO and will print the closest increments like in the following image:

    +
  • +
+
+
+
+Generation path +
+
+
+
    +
  • +

    cg g foo\bar\webServices.yml -i 1,4,6: Directly generates increments with IDs 1, 4 and 6. CobiGen will not request you any other input.

    +
  • +
  • +

    cg a: Downloads the latest CobiGen_Templates and unpacks them next to the CLI. CobiGen will from now on use these unpacked Templates for generation.

    +
  • +
  • +

    cg a -cl C:\my\custom\location: Downloads the latest CobiGen_Templates and unpacks them in C:\my\custom\location. CobiGen will from now on use these unpacked Templates for generation.

    +
  • +
+
+
+
+
CLI update command
+
+

Example of Update Command :

+
+
+
+Generation path +
+
+
+

Select the plug-ins which you want to update like below :

+
+
+
+Generation path +
+
+
+
+
CLI custom templates
+
+

To use custom templates, it’s necessary to set up a custom configuration path as described here.

+
+
+
+
Troubleshooting
+
+

When generating code from a Java file, CobiGen makes use of Java reflection for generating templates. In order to do that, the CLI needs to find the compiled source code of your project.

+
+
+

If you find an error like Compiled class foo\bar\EmployeeEntity.java has not been found, it means you need to run mvn clean install on the input project so that a new target folder gets created with the needed compiled sources.

+
+
+
+
+
+

Maven Build Integration

+
+ +
+

==Maven Build Integration

+
+
+

For maven integration of CobiGen you can include the following build plugin into your build:

+
+
+
Build integration of CobiGen
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        <execution>
+          <id>cobigen-generate</id>
+          <phase>generate-resources</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

Available goals

+
+
+
    +
  • +

    generate: Generates contents configured by the standard non-compiled configuration folder. Thus generation can be controlled/configured due to an location URI of the configuration and template or increment ids to be generated for a set of inputs.

    +
  • +
+
+
+

Available phases are all phases, which already provide compiled sources such that CobiGen can perform reflection on it. Thus possible phases are for example package, site.

+
+
+
Provide Template Set
+
+

For generation using the CobiGen maven plug-in, the CobiGen configuration can be provided in two different styles:

+
+
+
    +
  1. +

    By a configurationFolder, which should be available on the file system whenever you are running the generation. The value of configurationFolder should correspond to the maven file path syntax.

    +
    +
    Provide CobiGen configuration by configuration folder (file)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <configuration>
    +        <configurationFolder>cobigen-templates</configurationFolder>
    +      </configuration>
    +       ...
    +     </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
  2. +
  3. +

    By maven dependency, whereas the maven dependency should stick on the same conventions as the configuration folder. This explicitly means that it should contain non-compiled resources as well as the context.xml on top-level.

    +
    +
    Provide CobiGen configuration by maven dependency (jar)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <dependencies>
    +        <dependency>
    +          <groupId>com.devonfw.cobigen</groupId>
    +          <artifactId>templates-XYZ</artifactId>
    +          <version>VERSION-YOU-LIKE</version>
    +        </dependency>
    +      </dependencies>
    +      ...
    +    </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
    +

    We currently provide a generic deployed version of the templates on the devonfw-nexus for Register Factory (<artifactId>cobigen-templates-rf</artifactId>) and for the devonfw itself (<artifactId>cobigen-templates-devonfw</artifactId>).

    +
    +
  4. +
+
+
+
+
Build Configuration
+
+

Using the following configuration you will be able to customize your generation as follows:

+
+
+
    +
  • +

    <destinationRoot> specifies the root directory the relative destinationPath of CobiGen templates configuration should depend on. Default ${basedir}

    +
  • +
  • +

    <inputPackage> declares a package name to be used as input for batch generation. This refers directly to the CobiGen Java Plug-in container matchers of type package configuration.

    +
  • +
  • +

    <inputFile> declares a file to be used as input. The CobiGen maven plug-in will try to parse this file to get an appropriate input to be interpreted by any CobiGen plug-in.

    +
  • +
  • +

    <increment> specifies an increment ID to be generated. You can specify one single increment with content ALL to generate all increments matching the input(s).

    +
  • +
  • +

    <template> specifies a template ID to be generated. You can specify one single template with content ALL to generate all templates matching the input(s).

    +
  • +
  • +

    <forceOverride> specifies an overriding behavior, which enables non-mergeable resources to be completely rewritten by generated contents. For mergeable resources this flag indicates, that conflicting fragments during merge will be replaced by generated content. Default: false

    +
  • +
  • +

    <failOnNothingGenerated> specifies whether the build should fail if the execution does not generate anything.

    +
  • +
+
+
+
Example for a simple build configuration
+
+
<build>
+  <plugins>
+    <plugin>
+       ...
+      <configuration>
+        <destinationRoot>${basedir}</destinationRoot>
+        <inputPackages>
+          <inputPackage>package.to.be.used.as.input</inputPackage>
+        </inputPackages>
+        <inputFiles>
+          <inputFile>path/to/file/to/be/used/as/input</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>IncrementID</increment>
+        </increments>
+        <templates>
+          <template>TemplateID</template>
+        </templates>
+        <forceOverride>false</forceOverride>
+      </configuration>
+        ...
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
Plugin Injection Since v3
+
+

Since version 3.0.0, the plug-in mechanism has changed to support modular releases of the CobiGen plug-ins. Therefore, you need to add all plug-ins to be used for generation. Take the following example to get the idea:

+
+
+
Example of a full configuration including plugins
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        ...
+      </executions>
+      <configuration>
+        ...
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen<groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>1.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
A full example
+
+
    +
  1. +

    A complete maven configuration example

    +
  2. +
+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>6.0.0</version>
+      <executions>
+        <execution>
+          <id>generate</id>
+          <phase>package</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <inputFiles>
+          <inputFile>src/main/java/io/github/devonfw/cobigen/generator/dataaccess/api/InputEntity.java</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>dataaccess_infrastructure</increment>
+          <increment>daos</increment>
+        </increments>
+        <failOnNothingGenerated>false</failOnNothingGenerated>
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+
+

Eclipse Integration

+
+ +
+

==Installation

+
+
+
+
+

Remark: CobiGen is preinstalled in the devonfw/devon-ide.

+
+
+
+
+
Preconditions
+
+
    +
  • +

    Eclipse 4.x

    +
  • +
  • +

    Java 7 Runtime (for starting eclipse with CobiGen). This is independent from the target version of your developed code.

    +
  • +
+
+
+
+
Installation steps
+
+
    +
  1. +

    Open the eclipse installation dialog
    +menu bar → HelpInstall new Software…​

    +
    +

    01 install new software

    +
    +
  2. +
  3. +

    Open CobiGen’s update site
    +Insert the update site of your interest into the filed Work with and press Add …​
    +Unless you know what you are doing we recommend you install every plugin as shown in the picture below.

    +
    + +
    +
  4. +
  5. +

    Follow the installation wizard
    +Select CobiGen Eclipse Plug-inNextNext → accept the license → FinishOKYes

    +
  6. +
  7. +

    Once installed, a new menu entry named "CobiGen" will show up in the Package Explorer’s context menu. In the sub menu there will the Generate…​ command, which may ask you to update the templates, and then you can start the generation wizard of CobiGen. You can adapt the templates by clicking on Adapt Templates which will give you the possibility to import the CobiGen_Templates automatically so that you can modified them.

    +
  8. +
  9. +

    Checkout (clone) your project’s templates folder or use the current templates released with CobiGen (https://github.com/devonfw/cobigen/tree/master/cobigen-templates) and then choose Import -> General -> Existing Projects into Workspace to import the templates into your workspace.

    +
  10. +
  11. +

    Now you can start generating. To get an introduction of CobiGen try the devon4j templates and work on the devon4j sample application. There you might want to start with Entity objects as a selection to run CobiGen with, which will give you a good overview of what CobiGen can be used for right out of the box in devon4j based development. If you need some more introduction in how to come up with your templates and increments, please be referred to the documentation of the context configuration and the templates configuration

    +
  12. +
+
+
+

Dependent on your context configuration menu entry Generate…​ may be gray out or not. See for more information about valid selections for generation.

+
+
+
+
Updating
+
+

In general updating CobiGen for eclipse is done via the update mechanism of eclipse directly, as shown on image below:

+
+
+

03 update software

+
+
+

Upgrading eclipse CobiGen plug-in to v3.0.0 needs some more attention of the user due to a changed plug-in architecture of CobiGen’s core module and the eclipse integration. Eventually, we were able to provide any plug-in of CobiGen separately as its own eclipse bundle (fragment), which is automatically discovered by the main CobiGen Eclipse plug-in after installation.

+
+ +
+

==Usage

+
+
+

CobiGen has two different generation modes depending on the input selected for generation. The first one is the simple mode, which will be started if the input contains only one input artifact, e.g. for Java an input artifact currently is a Java file. The second one is the batch mode, which will be started if the input contains multiple input artifacts, e.g. for Java this means a list of files. In general this means also that the batch mode might be started when selecting complex models as inputs, which contain multiple input artifacts. The latter scenario has only been covered in the research group,yet.

+
+
+
+
Simple Mode
+
+

Selecting the menu entry Generate…​ the generation wizard will be opened:

+
+
+

generate wizard page1

+
+
+

The left side of the wizard shows all available increments, which can be selected to be generated. Increments are a container like concept encompassing multiple files to be generated, which should result in a semantically closed generation output. +On the right side of the wizard all files are shown, which might be effected by the generation - dependent on the increment selection of files on the left side. The type of modification of each file will be encoded into following color scheme if the files are selected for generation:

+
+
+
    +
  • +

    green: files, which are currently non-existent in the file system. These files will be created during generation

    +
  • +
  • +

    yellow: files, which are currently existent in the file system and which are configured to be merged with generated contents.

    +
  • +
  • +

    red: files, which are currently existent in the file system. These files will be overwritten if manually selected.

    +
  • +
  • +

    no color: files, which are currently existent in the file system. Additionally files, which were deselected and thus will be ignored during generation.

    +
  • +
+
+
+

Selecting an increment on the left side will initialize the selection of all shown files to be generated on the right side, whereas green and yellow categorized files will be selected initially. A manual modification of the pre-selection can be performed by switching to the customization tree using the Customize button on the right lower corner.

+
+
+
+
+

Optional: If you want to customize the generation object model of a Java input class, you might continue with the Next > button instead of finishing the generation wizard. The next generation wizard page is currently available for Java file inputs and lists all non-static fields of the input. deselecting entries will lead to an adapted object model for generation, such that deselected fields will be removed in the object model for generation. By default all fields will be included in the object model.

+
+
+
+
+

Using the Finish button, the generation will be performed. Finally, CobiGen runs the eclipse internal organize imports and format source code for all generated sources and modified sources. Thus it is possible, that---especially organize imports opens a dialog if some types could not be determined automatically. This dialog can be easily closed by pressing on Continue. If the generation is finished, the Success! dialog will pop up.

+
+
+
+
Batch mode
+
+

If there are multiple input elements selected, e.g., Java files, CobiGen will be started in batch mode. For the generation wizard dialog this means, that the generation preview will be constrained to the first selected input element. It does not preview the generation for each element of the selection or of a complex input. The selection of the files to be generated will be generated for each input element analogously afterwards.

+
+
+

generate wizard page1 batch

+
+
+

Thus the color encoding differs also a little bit:

+
+
+
    +
  • +

    yellow: files, which are configured to be merged.

    +
  • +
  • +

    red: files, which are not configured with any merge strategy and thus will be created if the file does not exist or overwritten if the file already exists

    +
  • +
  • +

    no color: files, which will be ignored during generation

    +
  • +
+
+
+

Initially all possible files to be generated will be selected.

+
+
+
+
Health Check
+
+

To check whether CobiGen runs appropriately for the selected element(s) the user can perform a Health Check by activating the respective menu entry as shown below.

+
+
+

health check menu entry

+
+
+

The simple Health Check includes 3 checks. As long as any of these steps fails, the Generate menu entry is grayed out.

+
+
+

The first step is to check whether the generation configuration is available at all. If this check fails you will see the following message:

+
+
+

health check no templates

+
+
+

This indicates, that there is no Project named CobiGen_Templates available in the current workspace. To run CobiGen appropriately, it is necessary to have a configuration project named CobiGen_Templates imported into your workspace. For more information see chapter Eclipse Installation.

+
+
+

The second step is to check whether the template project includes a valid context.xml. If this check fails, you will see the following message:

+
+
+

health check invalid config

+
+
+

This means that either your context.xml

+
+
+
    +
  • +

    does not exist (or has another name)

    +
  • +
  • +

    or it is not valid one in any released version of CobiGen

    +
  • +
  • +

    or there is simply no automatic routine of upgrading your context configuration to a valid state.

    +
  • +
+
+
+

If all this is not the case, such as, there is a context.xml, which can be successfully read by CobiGen, you might get the following information:

+
+
+

health check old context

+
+
+

This means that your context.xml is available with the correct name but it is outdated (belongs to an older CobiGen version). In this case just click on Upgrade Context Configuration to get the latest version.

+
+
+
+
+

Remark: This will create a backup of your current context configuration and converts your old configuration to the new format. The upgrade will remove all comments from the file, which could be retrieved later on again from the backup. +If the creation of the backup fails, you will be asked to continue or to abort.

+
+
+
+
+

The third step checks whether there are templates for the selected element(s). If this check fails, you will see the following message:

+
+
+

health check no matching triggers

+
+
+

This indicates, that there no trigger has been activated, which matches the current selection. The reason might be that your selection is faulty or that you imported the wrong template project (e.g. you are working on a devon4j project, but imported the Templates for the Register Factory). If you are a template developer, have a look at the trigger configuration and at the corresponding available plug-in implementations of triggers, like e.g., Java Plug-in or XML Plug-in.

+
+
+

If all the checks are passed you see the following message:

+
+
+

health check all OK

+
+
+

In this case everything is OK and the Generate button is not grayed out anymore so that you are able to trigger it and see the [simple-mode].

+
+
+

In addition to the basic check of the context configuration, you also have the opportunity to perform an Advanced Health Check, which will check all available templates configurations (templates.xml) of path-depth=1 from the configuration project root according to their compatibility.

+
+
+

health check advanced up to date

+
+
+

Analogous to the upgrade of the context configuration, the Advanced Health Check will also provide upgrade functionality for templates configurations if available.

+
+
+
+
Update Templates
+
+

Update Template: Select Entity file and right click then select CobiGen Update Templates after that click on download then download successfully message will be come .

+
+
+
+
Adapt Templates
+
+

Adapt Template: Select any file and right click, then select `cobigen → Adapt Templates `.If CobiGen templates jar is not available then it downloads them automatically. If CobiGen templates is already present then it will override existing template in workspace and click on OK then imported template successfully message will be come.

+
+
+

Finally, please change the Java version of the project to 1.8 so that you don’t have any compilation errors.

+
+ +
+

==Logging

+
+
+

If you have any problem with the CobiGen eclipse plug-in, you might want to enable logging to provide more information for further problem analysis. This can be done easily by adding the logback.xml to the root of the CobiGen_templates configuration folder. The file should contain at least the following contents, whereas you should specify an absolute path to the target log file (at the TODO). If you are using the (cobigen-templates project, you might have the contents already specified but partially commented.

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<!-- This file is for logback classic. The file contains the configuration for sl4j logging -->
+<configuration>
+    <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+        <file><!-- TODO choose your log file location --></file>
+        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+            <Pattern>%n%date %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
+            </Pattern>
+        </encoder>
+    </appender>
+    <root level="DEBUG">
+        <appender-ref ref="FILE" />
+    </root>
+</configuration>
+
+
+
+
+
+
+

How to

+
+ +
+

==Enterprise Architect client generation

+
+
+

We are going to show you how to generate source code from an Enterprise Architect diagram +using CobiGen.

+
+
+
Prerequisites
+
+

If CobiGen_Templates is not already imported into your workspace, follow the next steps:

+
+
+
    +
  • +

    Click on the Eclipse’s menu File > Import > Existing Projects into Workspace and browse to select the workspaces/main/CobiGen_Templates directory.

    +
  • +
  • +

    Click Finish and you should have the CobiGen_Templates as a new project in Eclipse’s workspace.

    +
  • +
+
+
+

Also verify that you have the latest templates of CobiGen. Your templates folder must contain the crud_java_ea_uml folder. +If you do not see it, please follow the next steps:

+
+
+
    +
  • +

    Download the accumulative patch.

    +
  • +
  • +

    Open the zip file and extract its content inside the root folder of your Devonfw distribution Devon-dist_2.4.0/

    +
  • +
+
+
+

After following those steps correctly, you should have the latest version of the templates ready to use.

+
+
+
+
Generation
+
+

In this tutorial, we are going to generate the entity infrastructure using as input a class diagram, modelled with Enterprise Architect (EA). First, create a class diagram, an example is shown on figure below:

+
+
+
+Eclipse CobiGen generation +
+
+
+

When you are finished, you will have to export that UML diagram into an XMI version 2.1 file. This is the file format that CobiGen understands. See below a figure showing this process:

+
+
+
+Eclipse CobiGen generation +
+
+
+

To open that window, see this tutorial.

+
+
+

After having that exported file, change its extension from xmi to xml. Then create an devon4j project and import the exported file into the core of your devon4j project.

+
+
+

Now we are going to start the generation, right-click your exported file and select CobiGen > Generate, finally select the entity infrastructure increment:

+
+
+
+Eclipse CobiGen generation +
+
+
+

After following all these steps, your generated files should be inside src\main\java folder. If you want an XMI example, you will find it here.

+
+ +
+

==Angular 8 Client Generation

+
+
+

The generation can create a full Angular 8 client using the devon4ng-application-template package located at workspaces/examples folder of the distribution. For more details about this package, please refer here.

+
+
+

Take into account that the TypeScript merging for CobiGen needs Node 6 or higher to be installed at your machine.

+
+
+ + + + + +
+ + +This is a short introduction to the Angular generation. For a deeper tutorial including the generation of the backend, we strongly recommend you to follow this document. +
+
+
+
+
Requisites
+
+

Install yarn globally:

+
+
+
+
npm install -g yarn
+
+
+
+
+
Angular 8 workspace
+
+

The output location of the generation can be defined editing the cobigen.properties file located at crud_angular_client_app/templates folder of the CobiGen_Templates project.

+
+
+
+`cobigen.properties file` +
+
+
+

By default, the output path would be into the devon4ng-application-template folder at the root of the devon4j project parent folder:

+
+
+
+
root/
+ |- devon4ng-application-template/
+ |- devon4j-project-parent/
+   |- core/
+   |- server/
+
+
+
+

However, this path can be changed, for example to src/main/client folder of the devon4j project:

+
+
+

relocate: ./src/main/client/${cwd}

+
+
+
+
root/
+ |- devon4j-project-parent/
+   |- core/
+      |- src
+        |- main
+          |- client
+   |- server/
+
+
+
+

Once the output path is chosen, copy the files of DEVON4NG-APPLICATION-TEMPLATE repository into this output path.

+
+
+
+
Install Node dependencies
+
+

Open a terminal into devon4ng-application-template copied and just run the command:

+
+
+
+
yarn
+
+
+
+

This will start the installation of all node packages needed by the project into the node_modules folder.

+
+
+
+
Generating
+
+

From an ETO object, right click, CobiGen → Generate will show the CobiGen wizard relative to client generation:

+
+
+
+CobiGen Client Generation Wizard +
+
+
+

Check all the increments relative to Angular:

+
+
+ + + + + +
+ + +
+

The Angular devon4j URL increment is only needed for the first generations however, checking it again on next generation will not cause any problem.

+
+
+
+
+

As we done on other generations, we click Next to choose which fields to include at the generation or simply clicking Finish will start the generation.

+
+
+
+CobiGen Client Generation Wizard 3 +
+
+
+
+
Routing
+
+

Due to the nature of the TypeScript merger, currently is not possible to merge properly the array of paths objects of the routings at app.routing.ts file so, this modification should be done by hand on this file. However, the import related to the new component generated is added.

+
+
+

This would be the generated app-routing.module file:

+
+
+
+
import { Routes, RouterModule } from '@angular/router';
+import { LoginComponent } from './login/login.component';
+import { AuthGuard } from './shared/security/auth-guard.service';
+import { InitialPageComponent } from './initial-page/initial-page.component';
+import { HomeComponent } from './home/home.component';
+import { SampleDataGridComponent } from './sampledata/sampledata-grid/sampledata-grid.component';
+//Routing array
+const appRoutes: Routes = [{
+    path: 'login',
+    component: LoginComponent
+}, {
+    path: 'home',
+    component: HomeComponent,
+    canActivate: [AuthGuard],
+    children: [{
+        path: '',
+        redirectTo: '/home/initialPage',
+        pathMatch: 'full',
+        canActivate: [AuthGuard]
+    }, {
+        path: 'initialPage',
+        component: InitialPageComponent,
+        canActivate: [AuthGuard]
+    }]
+}, {
+    path: '**',
+    redirectTo: '/login',
+    pathMatch: 'full'
+}];
+export const routing = RouterModule.forRoot(appRoutes);
+
+
+
+

Adding the following into the children object of home, will add into the side menu the entry for the component generated:

+
+
+
+
{
+    path: 'sampleData',
+    component: SampleDataGridComponent,
+    canActivate: [AuthGuard],
+}
+
+
+
+
+
import { Routes, RouterModule } from '@angular/router';
+import { LoginComponent } from './login/login.component';
+import { AuthGuard } from './shared/security/auth-guard.service';
+import { InitialPageComponent } from './initial-page/initial-page.component';
+import { HomeComponent } from './home/home.component';
+import { SampleDataGridComponent } from './sampledata/sampledata-grid/sampledata-grid.component';
+//Routing array
+const appRoutes: Routes = [{
+    path: 'login',
+    component: LoginComponent
+}, {
+    path: 'home',
+    component: HomeComponent,
+    canActivate: [AuthGuard],
+    children: [{
+        path: '',
+        redirectTo: '/home/initialPage',
+        pathMatch: 'full',
+        canActivate: [AuthGuard]
+    }, {
+        path: 'initialPage',
+        component: InitialPageComponent,
+        canActivate: [AuthGuard]
+    }, {
+        path: 'sampleData',
+        component: SampleDataGridComponent,
+        canActivate: [AuthGuard],
+    }]
+}, {
+    path: '**',
+    redirectTo: '/login',
+    pathMatch: 'full'
+}];
+export const routing = RouterModule.forRoot(appRoutes);
+
+
+
+
+`APP SideMenu` +
+
+
+
+
JWT Authentication
+
+

If you are using a backend server with JWT Authentication (there is a sample in workspaces/folder called sampleJwt) you have to specify the Angular application to use this kind of authentication.

+
+
+

By default the variable is set to CSRF but you can change it to JWT by going to the Enviroment.ts and setting security: 'jwt'.

+
+
+
+
Running
+
+

First of all, run your devon4j java server by right clicking over SpringBootApp.java Run As → Java Application. This will start to run the SpringBoot server. Once you see the Started SpringBoot in XX seconds, the backend is running.

+
+
+
+Starting `SpringBoot` +
+
+
+

Once the the server is running, open a Devon console at the output directory defined previously and run:

+
+
+
+
ng serve --open
+
+
+
+

This will run the Angular 8 application at:

+
+
+
+
http://localhost:4200
+
+
+
+
+Running Angular 8 app +
+
+
+

Once finished, the browser will open automatically at the previous localhost URL showing the Angular 8 application, using the credentials set at the devon4j java server you will be able to access.

+
+ +
+

==Ionic client generation

+
+
+

We are going to show you how to generate a CRUD Ionic application from an ETO +using CobiGen.

+
+
+ + + + + +
+ + +This is a short introduction to the Ionic generation. For a deeper tutorial including the generation of the backend, we strongly recommend you to follow this document. +
+
+
+
+
Prerequisites
+
+

Before starting, make sure you already have in your computer:

+
+
+
    +
  • +

    Ionic: by following the steps defined on that page. +It includes installing:

    +
    +
      +
    • +

      NodeJS: We have to use "NPM" for downloading packages.

      +
    • +
    • +

      Ionic CLI.

      +
    • +
    +
    +
  • +
  • +

    Capacitor: Necessary to access to native device features.

    +
  • +
+
+
+

If CobiGen_Templates are not already downloaded, follow the next steps:

+
+
+
    +
  • +

    Right click on any file of your workspace CobiGen > Update Templates and now you are able to start the generation.

    +
  • +
  • +

    If you want to adapt them, click Adapt Templates and you should have the CobiGen_Templates as a new project in Eclipse’s workspace.

    +
  • +
+
+
+

After following those steps correctly, you should have the latest version of the templates ready to use.

+
+
+
+
Generation
+
+

We are going to generate the CRUD into a sample application that we have developed for +testing this functionality. It is present on your workspaces/examples folder (devon4ng-ionic-application-template). If you do not see it, you can clone or download it from here.

+
+
+

After having that sample app, please create an devon4j project and then start implementing the ETO: You will find an example here.

+
+
+

As you can see, TableEto contains 3 attributes: 2 of them are Long and the third one TableState is an enum that you will find +here. +The Ionic generation works fine for any Java primitive attribute (Strings, floats, chars, boolean…​) and enums. However, if you want to use your own objects, you should +override the toString() method, as explained here.

+
+
+

The attributes explained above will be used for generating a page that shows a list. Each item of that list +will show the values of those attributes.

+
+
+

For generating the files:

+
+
+
    +
  • +

    Right click your ETO file and click on CobiGen > Generate as shown on the figure below.

    +
  • +
+
+
+
+Eclipse CobiGen generation +
+
+
+
    +
  • +

    Select the Ionic increments for generating as shown below. Increments group a set of templates for generating +different projects.

    +
    +
      +
    1. +

      Ionic List used for generating the page containing the list.

      +
    2. +
    3. +

      Ionic devon4ng environments is for stating the server path.

      +
    4. +
    5. +

      Ionic i18n used for generating the different language translations for the `translationService` (currently English and Spanish).

      +
    6. +
    7. +

      Ionic routing adds an app-routing.module.ts file to allow navigation similar to the one available in Angular.

      +
    8. +
    9. +

      Ionic theme generates the variables.scss file which contains variables to style the application.

      +
    10. +
    +
    +
  • +
+
+
+
+CobiGen Ionic Wizard +
+
+
+ + + + + +
+ + +By default, the generated files will be placed inside "devon4ng-ionic-application-template", next to the root of your project’s folder. +See the image below to know where they are generated. For changing the generation path and the name of the application go to CobiGen_Templates/crud_ionic_client_app/cobigen.properties. +
+
+
+
+Generation path +
+
+
+

Now that we have generated the files, lets start testing them:

+
+
+
    +
  • +

    First change the SERVER_URL of your application. For doing that, modify src/environments/environments.ts, also modify src/environments/environments.android.ts (android) and src/environments/environments.prod.ts (production) if you want to test in different environments.

    +
  • +
  • +

    Check that there are no duplicated imports. Sometimes there are duplicated imports in src/app/app.module.ts. +This happens because the merger of CobiGen prefers to duplicate rather than to delete.

    +
  • +
  • +

    Run npm install to install all the required dependencies.

    +
  • +
  • +

    Run `ionic serve on your console.

    +
  • +
+
+
+

After following all these steps your application should start. However, remember that you will need your server to be running for access to the list page.

+
+
+
+
Running it on Android
+
+

To run the application in an android emulated device, it is necessary to have Android Studio and Android SDK. After its installation, the following commands have to be run on your console:

+
+
+
    +
  • +

    npx cap init "name-for-the-app (between quotes)" "id-for-the-app (between quotes)"

    +
  • +
  • +

    ionic build --configuration=android. To use this command, you must add an android build configuration at angular.json

    +
  • +
+
+
+
+
    "build": {
+      ...
+      "configurations": {
+        ...
+        "android": {
+          "fileReplacements": [
+            {
+              "replace": "src/environments/environment.ts",
+              "with": "src/environments/environment.android.ts"
+            }
+          ]
+        },
+      }
+    }
+
+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+

The last steps are done in Android studio: make the project, make the app, build and APK and run in a device.

+
+
+
+Click on make project +
+
+
+
+click on make app +
+
+
+
+click on build APK +
+
+
+
+click on running device +
+
+ +
+

==Implementing a new Plug-in

+
+
+

New plug-ins can implement an input reader, a merger, a matcher, a trigger interpreter, and/or a template engine as explained here.

+
+
+ + + + + +
+ + +
+

It is discouraged to have cobigen-core dependencies at runtime, except for cobigen-core-api which definitely must be present.

+
+
+
+
+
+
Plugin Activator
+
+

Each plug-in has to have an plug-in activator class implementing the interface GeneratorPluginActivator from the core-api. This class will be used to load the plug-in using the PluginRegistry as explained here. This class implements two methods:

+
+
+
    +
  1. +

    bindMerger() → returns a mapping of merge strategies and its implementation to be registered.

    +
  2. +
  3. +

    bindTriggerInterpreter()→ returns the trigger interpreters to be provided by this plug-in.

    +
  4. +
+
+
+

Both methods create and register instances of mergers and trigger interpreters to be provided by the new plug-in.

+
+
+
+
Adding Trigger Interpreter
+
+

The trigger interpreter has to implement the TriggerInterpreter interface from the core. The trigger interpreter defines the type for the new plugin and creates new InputReader and new Matcher objects.

+
+
+
+
Adding Input Reader
+
+

The input reader is responsible of read the input object and parse it into + FreeMarker models. The input reader must be implemented for the type of the + input file. If there is any existent plugin that has the same file type as input, + there will be no need to add a new input reader to the new plug-in.

+
+
+
Input Reader Interface
+
+

The interface needed to add a new input reader is defined at the core. Each new +sub plug-in must implements this interface if is needed an input reader for it.

+
+
+

The interface implements the basic methods that an input reader must have, +but if additional methods are required, the developer must add a new interface +that extends the original interface `InputReader.java` from the core-api +and implement that on the sub plug-in.

+
+
+

The methods to be implemented by the input reader of the new sub plugin are:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodReturn TypeDescription

isValidInput(Object input)

boolean

This function will be called if matching triggers or matching templates should be retrieved for a given input object.

createModel(Object input)

Map<String, Object>

This function should create the FreeMarker object model from the given input.

combinesMultipleInputObjects(Object input)

boolean

States whether the given input object combines multiple input objects to be used for generation.

getInputObjects(Object input, Charset inputCharset)

List<Object>

Will return the set of combined input objects if the given input combines multiple input objects.

getTemplateMethods(Object input)

Map<String, Object>

This method returns available template methods from the plugins as Map. If the plugin which corresponds to the input does not provide any template methods an empty Map will be returned.

getInputObjectsRecursively(Object input, Charset inputCharset)

List<Object>

Will return the set of combined input objects if the given input combines multiple input objects.

+
+
+
Model Constants
+
+

The Input reader will create a model for FreeMarker. A FreeMarker model must +have variables to use them at the .ftl template file. Refer to Java Model to see the FreeMarker model example for java input files.

+
+
+
+
Registering the Input Reader
+
+

The input reader is an object that can be retrieved using the correspondent get + method of the trigger interpreter object. The trigger interpreter object is + loaded at the eclipse plug-in using the load plug-in method explained + here. + That way, when the core needs the input reader, only needs to call that getInputReader method.

+
+
+
+
+
Adding Matcher
+
+

The matcher implements the MatcherInterpreter interface from the core-api. +Should be implemented for providing a new input matcher. Input matcher are +defined as part of a trigger and provide the ability to restrict specific +inputs to a set of templates. +This restriction is implemented with a MatcherType enum.

+
+
+

E.g JavaPlugin

+
+
+
+
private enum MatcherType {
+    /** Full Qualified Name Matching */
+    FQN,
+    /** Package Name Matching */
+    PACKAGE,
+    /** Expression interpretation */
+    EXPRESSION
+}
+
+
+
+

Furthermore, matchers may provide several variable assignments, which might be +dependent on any information of the matched input and thus should be resolvable +by the defined matcher.

+
+
+

E.g JavaPlugin

+
+
+
+
private enum VariableType {
+    /** Constant variable assignment */
+    CONSTANT,
+    /** Regular expression group assignment */
+    REGEX
+}
+
+
+
+
+
Adding Merger
+
+

The merger is responsible to perform merge action between new output with the +existent data at the file if it already exists. Must implement the Merger +interface from the core-api. +The implementation of the Merge interface must override the following methods:

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
MethodReturn TypeDescription

getType()

String

Returns the type, this merger should handle.

merge(File base, String patch, String targetCharset)

String

Merges the patch into the base file.

+
+

Is important to know that any exception caused by the merger must throw a MergeException from the core-api to the eclipse-plugin handle it.

+
+
+
+
Changes since Eclipse / Maven 3.x
+
+

Since version 3.x the Eclipse and Maven plugins of CobiGen utilize the Java ServiceLoader mechanic to find and register plugins at runtime. To enable a new plugin to be discovered by this mechanic the following steps are needed:

+
+
+
    +
  • +

    create the file META-INF/services/com.devonfw.cobigen.api.extension.GeneratorPluginActivator containing just the full qualified name of the class implementing the GeneratorPluginActivator interface, if the plugin provides a Merger and/or a TriggerInterpreter

    +
  • +
  • +

    create the file META-INF/services/com.devonfw.cobigen.api.extension.TextTemplateEngine containing just the full qualified name of the class implementing the TextTemplateEngine interface, if provided by the plugin

    +
  • +
  • +

    include META-INF into the target bundle (i.e. the folder META-INF has to be present in the target jar file)

    +
  • +
+
+
+
+
Example: Java Plugin
+
+

The java plugin provides both a Merger and a TriggerInterpreter. It contains therefore a com.devonfw.cobigen.api.extension.GeneratorPluginActivator file with the following content:

+
+
+
+
com.devonfw.cobigen.javaplugin.JavaPluginActivator
+
+
+
+

This makes the JavaPluginActivator class discoverable by the ServiceLoader at runtime.

+
+
+
+
+
    +
  • +

    to properly include the plugin into the current system and use existing infrastructure, you need to add the plugin as a module in /cobigen/pom.xml (in case of a Merger/TriggerInterpreter providing plugin) and declare that as the plugin’s parent in it’s own pom.xml via

    +
  • +
+
+
+
+
<parent>
+    <groupId>com.devonfw</groupId>
+    <artifactId>cobigen-parent</artifactId>
+    <version>dev-SNAPSHOT</version>
+</parent>
+
+
+
+

or /cobigen/cobigen-templateengines/pom.xml (in case of a Merger/TriggerInterpreter providing plugin) and declare that as the plugin’s parent in it’s own pom.xml via

+
+
+
+
<parent>
+    <groupId>com.devonfw</groupId>
+    <artifactId>cobigen-tempeng-parent</artifactId>
+    <version>dev-SNAPSHOT</version>
+</parent>
+
+
+
+

If the plugin provides both just use the /cobigen/pom.xml.

+
+
+
    +
  • +

    The dependencies of the plugin are included in the bundle

    +
  • +
  • +

    To make the plugin available to the Eclipse plugin it must be included into the current compositeContent.xml and compositeArtifacts.xml files. Both files are located in https://github.com/devonfw/cobigen/tree/gh-pages/updatesite/{test|stable}. To do so, add an <child> entry to the <children> tag in both files and adapt the size attribute to match the new number of references. The location attribute of the new <child> tag needs to be the artifact id of the plugins pom.xml.

    +
  • +
+
+
+
+
Example: Java Plugin
+
+

In case of the Java plugin, the entry is

+
+
+
+
<child location="cobigen-javaplugin"/>
+
+
+
+
+
+
Deployment
+
+

If you want to create a test release of eclipse you need to run the command

+
+
+
+
sh deploy.sh
+
+
+
+

on the cloned CobiGen repository while making sure, that your current version of CobiGen cloned is a snapshot version. This will automatically be detected by the deploy script.

+
+ +
+

==Introduction to CobiGen external plug-ins

+
+
+

Since September of 2019, a major change on CobiGen has taken place. CobiGen is written in Java code and previously, it was very hard for developers to create new plug-ins in other languages.

+
+
+

Creating a new plug-in means:

+
+
+
    +
  • +

    Being able to parse a file in that language.

    +
  • +
  • +

    Create a human readable model that can be used to generate templates (by retrieving properties from the model).

    +
  • +
  • +

    Enable merging files, so that user’s code does not get removed.

    +
  • +
+
+
+

For the Java plug-in it was relatively easy. As you are inside the Java world, you can use multiple utilities or libraries in order to get the AST or to merge Java code. With this new feature, we wanted that behaviour to be possible for any programming language.

+
+
+
+
+
General intuition
+
+

Below you will find a very high level description of how CobiGen worked in previous versions:

+
+
+
+Old CobiGen +
+
+
+

Basically, when a new input file was sent to CobiGen, it called the input reader to create a model of it (see here an example of a model). That model was sent to the template engine.

+
+
+

Afterwards, the template engine generated a new file which had to be merged with the original one. All this code was implemented in Java.

+
+
+

On the new version, we have implemented a handler (ExternalProcessHandler) which connects through TCP/IP connection to a server (normally on localhost:5000). This server can be implemented in any language (.Net, Node.js, Python…​) it just needs to implement a REST API defined here. The most important services are the input reading and merging:

+
+
+
+New CobiGen +
+
+
+

CobiGen acts as a client that sends requests to the server in order to read the input file and create a model. The model is returned to the template engine so that it generates a new file. Finally, it is sent back to get merged with the original file.

+
+
+
+
How to create new external plug-in
+
+

The creation of a new plug-in consists mainly in three steps:

+
+
+
    +
  • +

    Creation of the server (external process).

    +
  • +
  • +

    Creation of a CobiGen plug-in.

    +
  • +
  • +

    Creation of templates.

    +
  • +
+
+
+
Server (external process)
+
+

The server can be programmed in any language that is able to implement REST services endpoints. The API that needs to implement is defined with this contract. You can paste the content to https://editor.swagger.io/ for a better look.

+
+
+

We have already created a NestJS server that implements the API defined above. You can find the code here which you can use as an example.

+
+
+

As you can see, the endpoints have the following naming convention: processmanagement/todoplugin/nameOfService where you will have to change todo to your plug-in name (e.g. rustplugin, pyplugin, goplugin…​)

+
+
+

When implementing service getInputModel which returns a model from the input file there are only two restrictions:

+
+
+
    +
  • +

    A path key must be added. Its value can be the full path of the input file or just the file name. It is needed because in CobiGen there is a batch mode, in which you can have multiple input objects inside the same input file. You do not need to worry about batch mode for now.

    +
  • +
  • +

    On the root of your model, for each found key that is an object (defined with brackets [{}]), CobiGen will try to use it as an input object. For example, this could be a valid model:

    +
    +
    +
    {
    +  "path": "example/path/employee.entity.ts"
    +  "classes": [
    +    {
    +      "identifier": "Employee",
    +      "modifiers": [
    +        "export"
    +      ],
    +      "decorators": [
    +        {
    +          "identifier": {
    +            "name": "Entity",
    +            "module": "typeorm"
    +          },
    +          "isCallExpression": true
    +        }
    +      ],
    +      "properties": [
    +        {
    +          "identifier": "id",
    +    ...
    +    ...
    +    ...
    +    }]
    +    "interfaces": [{
    +        ...
    +    }]
    +}
    +
    +
    +
  • +
+
+
+

For this model, CobiGen would use as input objects all the classes and interfaces defined. On the templates we would be able to do model.classes[0].identifier to get the class name. These input objects depend on the language, therefore you can use any key.

+
+
+

In order to test the server, you will have to deploy it on your local machine (localhost), default port is 5000. If that port is already in use, you can deploy it on higher port values (5001, 5002…​). Nevertheless, we explain later the testing process as you need to complete the next step before.

+
+
+ + + + + +
+ + +Your server must accept one argument when running it. The argument will be the port number (as an integer). This will be used for CobiGen in order to handle blocked ports when deploying your server. Check this code to see how we implemented that argument on our NestJS server. +
+
+
+
+
CobiGen plug-in
+
+

You will have to create a new CobiGen plug-in that connects to the server. But do not worry, you will not have to implement anything new. We have a CobiGen plug-in template available, the only changes needed are renaming files and setting some properties on the pom.xml. Please follow these steps:

+
+
+
    +
  • +

    Get the CobiGen plug-in template from here. It is a template repository (new GitHub feature), so you can click on "Use this template" as shown below:

    +
    +
    +Plugin CobiGen template +
    +
    +
  • +
  • +

    Name your repo as cobigen-name-plugin where name can be python, rust, go…​ In our case we will create a nest plug-in. It will create a repo with only one commit which contains all the needed files.

    +
  • +
  • +

    Clone your just created repo and import folder cobigen-todoplugin as a Maven project on any Java IDE, though we recommend you devonfw ;)

    +
    +
    +Import plugin +
    +
    +
  • +
  • +

    Rename all the todoplugin folders, files and class names to nameplugin. In our case nestplugin. In Eclipse you can easily rename by right clicking and then refactor → rename:

    +
  • +
+
+
+
+Rename plugin +
+
+
+ + + + + +
+ + +We recommend you to select all the checkboxes +
+
+
+
+Rename checkbox +
+
+
+
    +
  • +

    Remember to change in src/main/java and src/test/java all the package, files and class names to use your plug-in name. The final result would be:

    +
    +
    +Package structure +
    +
    +
  • +
  • +

    Now we just need to change some strings, this is needed for CobiGen to register all the different plugins (they need unique names). In class TodoPluginActivator (in our case NestPluginActivator), change all the todo to your plug-in name. See below the 3 strings that need to be changed:

    +
    +
    +Plugin activator +
    +
    +
  • +
  • +

    Finally, we will change some properties from the pom.xml of the project. These properties define the server (external process) that is going to be used:

    +
    +
      +
    1. +

      Inside pom.xml, press Ctrl + F to perform a find and replace operation. Replace all todo with your plugin name:

      +
      +
      +Pom properties +
      +
      +
    2. +
    3. +

      We are going to explain the server properties:

      +
      +
        +
      1. +

        artifactId: This is the name of your plug-in, that will be used for a future release on Maven Central.

        +
      2. +
      3. +

        plugin.name: does not need to be changed as it uses the property from the artifactId. When connecting to the server, it will send a request to localhost:5000/{plugin.name}plugin/isConnectionReady, that is why it is important to use an unique name for the plug-in.

        +
      4. +
      5. +

        server.name: This defines how the server executable (.exe) file will be named. This .exe file contains all the needed resources for deploying the server. You can use any name you want.

        +
      6. +
      7. +

        server.version: You will specify here the server version that needs to be used. The .exe file will be named as {server.name}-{server.version}.exe.

        +
      8. +
      9. +

        server.url: This will define from where to download the server. We really recommend you using NPM which is a package manager we know it works well. We explain here how to release the server on NPM. This will download the .exe file for Windows.

        +
      10. +
      11. +

        server.url.linux: Same as before, but this should download the .exe file for Linux systems. If you do not want to implement a Linux version of the plug-in, just use the same URL from Windows or MacOS.

        +
      12. +
      13. +

        server.url.macos: Same as before, but this should download the .exe file for MacOS systems. If you do not want to implement a MacOS version of the plug-in, just use the same URL from Linux or Windows.

        +
      14. +
      +
      +
    4. +
    +
    +
  • +
+
+
+
+
+
Testing phase
+
+

Now that you have finished with the implementation of the server and the creation of a new CobiGen plug-in, we are going to explain how you can test that everything works fine:

+
+
+
    +
  1. +

    Deploy the server on port 5000.

    +
  2. +
  3. +

    Run mvn clean test on the CobiGen-plugin or run the JUnit tests directly on Eclipse.

    +
    +
      +
    1. +

      If the server and the plug-in are working properly, some tests will pass and other will fail (we need to tweak them).

      +
    2. +
    3. +

      If every test fails, something is wrong in your code.

      +
    4. +
    +
    +
  4. +
  5. +

    In order to fix the failing tests, go to src/test/java. The failing tests make use of sample input files that we added in sake of example:

    +
    +
    +Pom properties +
    +
    +
  6. +
+
+
+

Replace those files (on src/test/resources/testadata/unittest/files/…​) with the correct input files for your server.

+
+
+
+
Releasing
+
+

Now that you have already tested that everything works fine, we are going to explain how to release the server and the plug-in.

+
+
+
Release the server
+
+

We are going to use NPM to store the executable of our server. Even though NPM is a package manager for JavaScript, it can be used for our purpose.

+
+
+
    +
  • +

    Get the CobiGen server template from here. It is a template repository (new GitHub feature), so you can click on "Use this template" as shown below:

    +
    +
    +Server CobiGen template +
    +
    +
  • +
  • +

    Name your repo as cobigen-name-server where name can be python, rust, go…​ In our case we will create a nest plug-in. It will create a repo with only one commit which contains all the needed files.

    +
  • +
  • +

    Clone your just created repo and go to folder cobigen-todo-server. It will just contain two files: ExternalProcessContract.yml is the OpenAPI definition which you can modify with your own server definition (this step is optional), and package.json is a file needed for NPM in order to define where to publish this package:

    +
    +
    +
    {
    +  "name": "@devonfw/cobigen-todo-server",
    +  "version": "1.0.0",
    +  "description": "Todo server to implement the input reader and merger for CobiGen",
    +  "author": "CobiGen Team",
    +  "license": "Apache"
    +}
    +
    +
    +
  • +
+
+
+

Those are the default properties. This would push a new package cobigen-todo-server on the devonfw organization, with version 1.0.0. We have no restrictions here, you can use any organization, though we always recommend devonfw.

+
+
+ + + + + +
+ + +Remember to change all the todo to your server name. +
+
+
+
    +
  • +

    Add your executable file into the cobigen-todo-server folder, just like below. As we said previously, this .exe is the server ready to be deployed.

    +
    +
    +
    cobigen-template-server/
    + |- cobigen-todo-server/
    +   |- ExternalProcessContract.yml
    +   |- package.json
    +   |- todoserver-1.0.0.exe
    +
    +
    +
  • +
  • +

    Finally, we have to publish to NPM. If you have never done it, you can follow this tutorial. Basically you need to login into NPM and run:

    +
    +
    +
    cd cobigen-todo-server/
    +npm publish --access=public
    +
    +
    +
  • +
+
+
+ + + + + +
+ + +To release Linux and MacOS versions of your plug-in, just add the suffix into the package name (e.g. @devonfw/cobigen-todo-server-linux) +
+
+
+

That’s it! You have published the first version of your server. Now you just need to modify the properties defined on the pom of your CobiGen plug-in. Please see next section for more information.

+
+
+
+
Releasing CobiGen plug-in
+
+
    +
  • +

    Change the pom.xml to define all the properties. You can see below a final example for nest:

    +
    +
    +
    ...
    +   <groupId>com.devonfw.cobigen</groupId>
    +   <artifactId>nestplugin</artifactId>
    +   <name>CobiGen - Nest Plug-in</name>
    +   <version>1.0.0</version>
    +   <packaging>jar</packaging>
    +   <description>CobiGen - nest Plug-in</description>
    +
    +   <properties>
    +      <!-- External server properties -->
    +      <plugin.name>${project.artifactId}</plugin.name>
    +      <server.name>nestserver</server.name>
    +      <server.version>1.0.0</server.version>
    +      <server.url>https\://registry.npmjs.org/@devonfw/cobigen-nest-server/-/cobigen-nest-server-${server.version}.tgz</server.url>
    +      <server.url.linux>https\://registry.npmjs.org/@devonfw/cobigen-nest-server-linux/-/cobigen-nest-server-linux-${server.version}.tgz</server.url.linux>
    +      <server.url.macos>https\://registry.npmjs.org/@devonfw/cobigen-nest-server-macos/-/cobigen-nest-server-macos-${server.version}.tgz</server.url.macos>
    +...
    +
    +
    +
  • +
  • +

    Deploy to Maven Central.

    +
  • +
+
+
+
+
+
Templates creation
+
+

After following above steps, we now have a CobiGen plug-in that connects to a server (external process) which reads your input files, returns a model and is able to merge files.

+
+
+

However, we need a key component for our plug-in to be useful. We need to define templates:

+
+
+
    +
  • +

    Fork our CobiGen main repository, from here and clone it into your PC. Stay in the master branch and import into your IDE cobigen-templates\templates-devon4j. Set the Java version of the project to 1.8 if needed.

    +
  • +
  • +

    Create a new folder on src/main/templates, this will contain all your templates. You can use any name, but please use underscores as separators. In our case, we created a folder crud_typescript_angular_client_app to generate an Angular client from a TypeORM entity (NodeJS entity).

    +
    +
    +Templates project +
    +
    +
  • +
  • +

    Inside your folder, create a templates folder. As you can see below, the folder structure of the generated files starts here (the sources). Also we need a configuration file templates.xml that should be on the same level as templates/ folder. For now, copy and paste a templates.xml file from any of the templates folder.

    +
    +
    +Templates project +
    +
    +
  • +
  • +

    Start creating your own templates. Our default templates language is Freemarker, but you can also use Velocity. Add the extension to the file (.ftl) and start developing templates! You can find useful documentation here.

    +
  • +
  • +

    After creating all the templates, you need to modify context.xml which is located on the root of src/main/templates. There you need to define a trigger, which is used for CobiGen to know when to trigger a plug-in. I recommend you to copy and paste the following trigger:

    +
    +
    +
      <trigger id="crud_typescript_angular_client_app" type="nest" templateFolder="crud_typescript_angular_client_app">
    +    <matcher type="fqn" value="([^\.]+).entity.ts">
    +      <variableAssignment type="regex" key="entityName" value="1"/>
    +      <variableAssignment type="regex" key="component" value="1"/>
    +      <variableAssignment type="constant" key="domain" value="demo"/>
    +    </matcher>
    +  </trigger>
    +
    +
    +
  • +
  • +

    Change templateFolder to your templates folder name. id you can use any, but it is recommendable to use the same as the template folder name. type is the TRIGGER_TYPE we defined above on the NestPluginActivator class. On matcher just change the value: ([^\.]+).entity.ts means that we will only accept input files that contain anyString.entity.ts. This improves usability, so that users only generate using the correct input files. You will find more info about variableAssignment here.

    +
  • +
  • +

    Finally, is time to configure templates.xml. It is needed for organizing templates into increments, please take a look into this documentation.

    +
  • +
+
+
+
Testing templates
+
+
    +
  • +

    When you have finished your templates you will like to test them. On the templates-devon4j pom.xml remove the SNAPSHOT from the version (in our case the version will be 3.1.8). Run mvn clean install -DskipTests on the project. We skip tests because you need special permissions to download artifacts from our Nexus. Remember the version that has just been installed:

    +
    +
    +Templates snapshot version +
    +
    +
  • +
+
+
+ + + + + +
+ + +We always recommend using the devonfw console, which already contains a working Maven version. +
+
+
+
    +
  • +

    Now we have your last version of the templates ready to be used. We need to use that latest version in CobiGen. We will use the CobiGen CLI that you will find in your cloned repo, at cobigen-cli/cli. Import the project into your IDE.

    +
  • +
  • +

    Inside the project, go to src/main/resources/pom.xml. This pom.xml is used on runtime in order to install all the CobiGen plug-ins and templates. Add there your latest templates version and the previously created plug-in:

    +
    +
    +CLI pom +
    +
    +
  • +
  • +

    Afterwards, run mvn clean install -DskipTests and CobiGen will get your plug-ins. Now you have three options to test templates:

    +
    +
      +
    1. +

      Using Eclipse run as:

      +
      +
        +
      1. +

        Inside Eclipse, you can run the CobiGen-CLI as a Java application. Right click class CobiGenCLI.java → run as → run configurations…​ and create a new Java application as shown below:

        +
        +
        +Create configuration +
        +
        +
      2. +
      3. +

        That will create a CobiGenCLI configuration where we can set arguments to the CLI. Let’s first begin with showing the CLI version, which should print a list of all plug-ins, including ours.

        +
        +
        +Run version +
        +
        +
        +
        +
         ...
        + name:= propertyplugin version = 2.0.0
        + name:= jsonplugin version = 2.0.0
        + name:= templates-devon4j version = 3.1.8
        + name:= nestplugin version = 1.0.0
        + ...
        +
        +
        +
      4. +
      5. +

        If that worked, now you can send any arguments to the CLI in order to generate with your templates. Please follow this guide that explains all the CLI commands.

        +
      6. +
      +
      +
    2. +
    3. +

      Modify the already present JUnit tests on the CLI project: They test the generation of templates from multiple plug-ins, you can add your own tests and input files.

      +
    4. +
    5. +

      Use the CLI jar to execute commands:

      +
      +
        +
      1. +

        The mvn clean install -DskipTests command will have created a Cobigen.jar inside your target folder (cobigen-cli/cli/target). Open the jar with any unzipper and extract to the current location class-loader-agent.jar, cobigen.bat and cg.bat:

        +
        +
        +Extract files +
        +
        +
      2. +
      3. +

        Now you can run any CobiGen CLI commands using a console. This guide explains all the CLI commands.

        +
        +
        +Run CLI +
        +
        +
      4. +
      +
      +
    6. +
    +
    +
  • +
+
+ +
+
+
+
devon4net CobiGen Guide
+
+
Overview
+
+

In this guide we will explain how to generate a new WebAPI project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+
Getting things ready
+
+devonfw-IDE +
+

First, we will install the devonfw-IDE. It is a tool that will setup your IDE within minutes. Please follow the install guide here.

+
+
+
+devon4net Templates +
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
`dotnet new -i Devon4Net.WebAPI.Template`
+
+
+
+

and then:

+
+
+
+
`dotnet new Devon4NetAPI`
+
+
+
+
+OpenAPI File +
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+
+
Generating files
+
+

CobiGen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the CobiGen CLI tool.

+
+
+Generating files through Eclipse +
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+CobiGen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+CobiGen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+CobiGen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+CobiGen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+Generating files through CobiGen CLI +
+

In order to generate the files using the CobiGen CLI it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    `cobigen generate {yourOpenAPIFile}.yml`
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+
+
Configuration
+
+Data base +
+

CobiGen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+CobiGen +
+
+
+
+Run the application +
+

After the configuration of the database, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
`dotnet run`
+
+
+
+

This will deploy our application in our localhost with the port 8082, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+ +
+

==How to update CobiGen

+
+
+

In order to update CobiGen from our devonfw distribution, we have two options:

+
+
+
    +
  • +

    Open Eclipse, click on HelpCheck for updates

    +
  • +
+
+
+
+Check updates +
+
+
+
    +
  • +

    Select all the CobiGen plugins listed and click on Next.

    +
  • +
+
+
+
+All the updates +
+
+
+

If this option is not working properly, then you can try the second option:

+
+
+
    +
  • +

    Open Eclipse, click on HelpAbout Eclipse IDE:

    +
  • +
+
+
+
+About Eclipse +
+
+
+
    +
  • +

    Click on Installation details:

    +
  • +
+
+
+
+Installation details +
+
+
+
    +
  • +

    Select all the CobiGen plugins and click on Update:

    +
  • +
+
+
+
+All updates details +
+
+
+

After the update process finishes, remember to restart Eclipse.

+
+
+
+
+
+
Updating templates:
+
+

To update your CobiGen templates to the latest version, you just need to do one step:

+
+
+
    +
  • +

    Right click any file on your package explorer, click on CobiGenUpdate templates, then click on download:

    +
  • +
+
+
+
+Update templates +
+
+
+

Now you will have the latest templates ready!

+
+
+

Unresolved include directive in modules/ROOT/pages/master-cobigen.adoc - include::howto-Cobigen-CLI-generation.adoc[]

+
+ +
+

==End to End POC Code generation using Entity class +This article helps to create a sample application using cobigen.

+
+
+
+
Prerequisites
+
+

Download and install devonnfw IDE here,

+
+
+
+
Steps to create a Sample Project using Cobigen
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to start from the BE part …

+
+
+
Back End
+
+

run \devonfw-ide-scripts-3.2.4\eclipse-main.bat

+
+
+

It will open eclipse

+
+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  1. +

    Create a new SQL file (i.e: V0005CreateTables-ItaPoc.sql) inside myapp-__core and insert the following script:

    +
  2. +
+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment, modificationCounter INTEGER NOT NULL,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+

sql file

+
+
+
    +
  1. +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql) and add following script about the INSERT in order to populate the table created before

    +
  2. +
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Albert','Miller','albert.miller@capgemini.com');
+INSERT INTO  EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Wills','Smith', 'wills.smith@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Thomas', 'jaime.thomas@capgemini.com');
+
+
+
+

sql insert

+
+
+

Let’s create the Entity Class for the code generation

+
+
+
    +
  1. +

    Create a package employeemanagement.dataaccess.api under the folder myapp-core. Note: It is important to follow this naming convention for CobiGen to work properly.

    +
  2. +
+
+
+

package

+
+
+
    +
  1. +

    Now create a JPA Entity class in this package

    +
  2. +
+
+
+
+
import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Column;
+@Entity
+@javax.persistence.Table(name = "EMPLOYEE")
+public class EmployeeEntity {
+  @Column(name = "EMPLOYEEID")
+  @GeneratedValue(strategy = GenerationType.IDENTITY)
+  private Long employeeId;
+  @Column(name = "NAME")
+  private String name;
+  @Column(name = "SURNAME")
+  private String surname;
+  @Column(name = "EMAIL")
+  private String email;
+}
+
+
+
+

then generate getters and setters for all attributes …

+
+
+
    +
  1. +

    Use Cobigen to generate code. Right click on EmployeeEntity. CobiGen → Generate

    +
  2. +
+
+
+

It will ask you to download the templates, click on update:

+
+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  1. +

    Click on all the option selected as below:

    +
  2. +
+
+
+

cobigen option selection

+
+
+
    +
  1. +

    Click on finish. Below Screen would be seen. Click on continue

    +
  2. +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (myapp-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (myapp-core), normally it will be implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+

Last but not least: We make a quick REST services test !

+
+
+

See in the application.properties the TCP Port and the PATH

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id} (i.e: for getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

for all employees

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+
+
+
+

for the specific employee

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

Now download Postman to test the rest services.

+
+
+

Once done, you have to create a POST Request for the LOGIN and insert in the body the JSON containing the username and password waiter

+
+
+

postman

+
+
+

Once done with success (Status: 200 OK) …

+
+
+

… We create a NEW GET Request in order to get one employee

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+
Front End
+
+

Let’s start now with angular Web and then Ionic app.

+
+
+Angular Web App +
+
    +
  1. +

    To generate angular structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  2. +
+
+
+

devon dist folder

+
+
+
    +
  1. +

    Once done, right click on EmployeeEto.java file present under the package com.devonfw.poc.employeemanagement.logic.api.to

    +
  2. +
+
+
+

eclipse generate

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
+
+
+

eclipse

+
+
+
    +
  1. +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  2. +
+
+
+

angular ee layer

+
+
+
    +
  1. +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  2. +
+
+
+
+
,\{
+path: 'employee',
+component: EmployeeGridComponent,
+canActivate: [AuthGuard],
+},
+
+
+
+

Following picture explain where to place the above content:

+
+
+

routes

+
+
+
    +
  1. +

    Open the command prompt and execute devon yarn install from the base folder, which would download all the required libraries..

    +
  2. +
+
+
+
    +
  1. +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  2. +
+
+
+

environment

+
+
+

In order to do that it’s important to look at the application.properties to see the values as PATH, TCP port etc …

+
+
+

configure

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  1. +

    Now run the ng serve -o command to run the Angular Application.

    +
  2. +
+
+
+

image44

+
+
+
    +
  1. +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  2. +
+
+
+

image45

+
+
+

WebApp DONE

+
+
+
+Ionic Mobile App +
+
    +
  1. +

    To generate Ionic structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  2. +
  3. +

    Once done, Right click on the EmployeeEto as you already did before in order to use CobiGen.

    +
  4. +
  5. +

    Click on the selected options as seen in the screenshot:

    +
  6. +
+
+
+

image46

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
  3. +

    The entire ionic structure will be auto generated.

    +
  4. +
+
+
+

image47

+
+
+
    +
  1. +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  2. +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  1. +

    Run npm install in the root folder to download the dependecies

    +
  2. +
  3. +

    Run ionic serve

    +
  4. +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App DONE*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+Build APK +
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
Adapt CobiGen_Templates
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+ + +
+

==End to End POC Code generation using OpenAPI +This article helps to create a sample application using cobigen.

+
+
+
+
Prerequisites
+
+

Download and install devonnfw IDE here,

+
+
+
+
Steps to create a Sample Project using Cobigen
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to star

+
+
+

t from the BE part …

+
+
+
Back End
+
+

run \devonfw-ide-scripts-3.2.4\eclipse-main.bat

+
+
+

It will open eclipse

+
+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  1. +

    Create a new SQL file (i.e: V0005CreateTables_ItaPoc.sql) inside jwtsample-__core and insert the following script:

    +
  2. +
+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment, modificationCounter *INTEGER* *NOT* *NULL*,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+

sql file

+
+
+
    +
  1. +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql) and add following script about the INSERT in order to populate the table created before

    +
  2. +
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Stefano','Rossini','stefano.rossini@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Angelo','Muresu', 'angelo.muresu@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Gonzalez', 'jaime.diaz-gonzalez@capgemini.com');
+
+
+
+

sql insert

+
+
+

Let’s create the yml file for the code generation

+
+
+
    +
  1. +

    Now create a new file devonfw.yml in the root of your core folder. This will be our OpenAPI contract, like shown below. Then, copy the contents of this file into your OpenAPI. It defines some REST service endpoints and a EmployeeEntity with its properties defined.

    +
  2. +
+
+
+

Important: if you want to know how to write an OpenAPI contract compatible with CobiGen, please read this tutorial.

+
+
+

Swagger at OASP4J Project

+
+
+
    +
  1. +

    Right click devonfw.yml. CobiGen → Generate

    +
  2. +
+
+
+

It will ask you to download the templates, click on update:

+
+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  1. +

    Click on all the option selected as below:

    +
  2. +
+
+
+

cobigen option selection

+
+
+
    +
  1. +

    Click on finish. Below Screen would be seen. Click on continue

    +
  2. +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (jwtsample-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (jwtsample-core), normally it will be implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+

Last but not least: We make a quick REST services test !

+
+
+

See in the application.properties the TCP Port and the PATH

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id}  (i.e: for  getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

For all employees

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+
+
+
+

For the specific employee

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

Now download Postman to test the rest services.

+
+
+

Once done, you have to create a POST Request for the LOGIN and insert in the body the JSON containing the username and password waiter

+
+
+

postman

+
+
+

Once done with success (Status: 200 OK) …

+
+
+

postman

+
+
+

… We create a NEW POST Request and We copy the Authorization Bearer field (see above) and We paste it in the Token field (see below)

+
+
+

postman

+
+
+

and specific the JSON parameters for the pagination of the Request that We’re going to send:

+
+
+

postman

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below list of Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+
Front End
+
+

Let’s start now with angular Web and then Ionic app.

+
+
+Angular Web App +
+
    +
  1. +

    To generate angular structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  2. +
+
+
+

devon dist folder

+
+
+
    +
  1. +

    Once done, right click on devonfw.yml again (the OpenAPI contract). CobiGen → Generate

    +
  2. +
  3. +

    Click on the selected options as seen in the screenshot:

    +
  4. +
+
+
+

eclipse generate

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
+
+
+

eclipse

+
+
+
    +
  1. +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  2. +
+
+
+

angular ee layer

+
+
+
    +
  1. +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  2. +
+
+
+
+
,\{
+path: 'employee',
+component: EmployeeGridComponent,
+canActivate: [AuthGuard],
+},
+
+
+
+

Following picture explain where to place the above content:

+
+
+

routes

+
+
+
    +
  1. +

    Open the command prompt and execute devon yarn install from the base folder, which would download all the required libraries..

    +
  2. +
+
+
+
    +
  1. +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  2. +
+
+
+

environment

+
+
+

In order to do that it’s important to look at the application.properties to see the values as PATH, TCP port etc …

+
+
+

configure

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  1. +

    Now run the *ng serve -o* command to run the Angular Application.

    +
  2. +
+
+
+

image44

+
+
+
    +
  1. +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  2. +
+
+
+

image45

+
+
+

WebApp DONE

+
+
+
+Ionic Mobile App +
+
    +
  1. +

    To generate Ionic structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  2. +
  3. +

    Once done, Right click on the devonfw.yml as you already did before in order to use CobiGen.

    +
  4. +
  5. +

    Click on the selected options as seen in the screenshot:

    +
  6. +
+
+
+

image46

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
  3. +

    The entire ionic structure will be auto generated.

    +
  4. +
+
+
+

image47

+
+
+
    +
  1. +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  2. +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  1. +

    Run npm install in the root folder to download the dependecies

    +
  2. +
  3. +

    Run ionic serve

    +
  4. +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App DONE*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+Build APK +
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
Adapt CobiGen_Templates
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+ +
+

==Adapt Templates from CobiGen

+
+
+
+
Adapt CobiGen_Templates
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click OK:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+ +
+

==Enable Composite Primary Keys in Entity

+
+
+

In order to enable Composite Primary Keys in entity in CobiGen, the below approach is suggested

+
+
+

The templates in CobiGen have been enhanced to support Composite primary keys while still supporting the default devonfw/Cobigen values with Long id.

+
+
+

Also, the current generation from Entity still holds good - right click from an Entity object, CobiGen → Generate will show the CobiGen wizard relative to the entity generation.

+
+
+

After generating, below example shows how composite primary keys can be enabled.

+
+
+
+
@Entity
+@Table(name = "employee")
+public class EmployeeEntity {
+	private CompositeEmployeeKey id;
+	private String name;
+	private String lastName;
+	@Override
+	@EmbeddedId
+	public CompositeEmployeeKey getId() {
+		return id;
+	}
+	@Override
+	public void setId(CompositeEmployeeKey id) {
+		this.id = id;
+	}
+	.
+	.
+	.
+
+
+
+
+
public class CompositeEmployeeKey implements Serializable {
+  private String companyId;
+  private String employeeId;
+
+
+
+

Once the generation is complete, implement PersistenceEntity<ID>.java in the EmployeeEntity and pass the composite primary key object which is CompositeEmployeeKey in this case as the parameter ID.

+
+
+
+
import com.devonfw.module.basic.common.api.entity.PersistenceEntity;
+@Entity
+@Table(name = "employee")
+public class EmployeeEntity implements PersistenceEntity<CompositeEmployeeKey> {
+	private CompositeEmployeeKey id;
+	private String name;
+	private String lastName;
+
+
+
+

Also, the modificationCounter methods needs to be implemented from the interface PersistenceEntity<ID>. The sample implementation of the modification counter can be referred below.

+
+
+
+
@Override
+  public int getModificationCounter() {
+    if (this.persistentEntity != null) {
+      // JPA implementations will update modification counter only after the transaction has been committed.
+      // Conversion will typically happen before and would result in the wrong (old) modification counter.
+      // Therefore we update the modification counter here (that has to be called before serialization takes
+      // place).
+      this.modificationCounter = this.persistentEntity.getModificationCounter();
+    }
+    return this.modificationCounter;
+  }
+  @Override
+  public void setModificationCounter(int version) {
+    this.modificationCounter = version;
+  }
+
+
+
+
+
+
+

Template Development

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/mgmt__release_and_deployment_process.html b/docs/cobigen/1.0/mgmt__release_and_deployment_process.html new file mode 100644 index 00000000..014e8b83 --- /dev/null +++ b/docs/cobigen/1.0/mgmt__release_and_deployment_process.html @@ -0,0 +1,536 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Release and Deployment Process

+
+
+

Updated: This explains the manual process of releasing. We now have an automatic script that enables a fast and easy release. Please check it here

+
+
+

Create a new issue with the following markdown contents for each release of any plugin/module containing the following tasks to be performed on release:

+
+
+
Template for release tasks (markdown)
+
+
 **Release tasks:**
+***1. Preparation***
+* [ ] Check running maven build on the development branch `mvn clean install`
+  * especially for eclipse plugin release run `mvn clean install -Pp2-build-mars,p2-build-stable` in cobigen-eclipse folder to run SWTBot UI tests locally. **Be patient, do not touch mouse and keyboard.**
+* [ ] Check if all tests are green and if there are no ignored ones left. As there are ignored ones, please check them if they can be removed or they only should be temporarily ignored. Potentially fix them.
+* [ ] Check/Update documentation according to changelog to be released
+  * [ ] especially update version number of module to be released [here](https://github.com/devonfw/cobigen/wiki/CobiGen)
+  * [ ] Update the wiki submodule and commit the latest version to target the updated release version of the wiki
+    \```
+    cd cobigen-documentation/cobigen.wiki
+    git pull origin master
+    cd ..
+    git add cobigen.wiki
+    git commit -m"#<releaseIssueNo> update docs"
+    git push
+    \```
+  * [ ] Check branch build to not fail in production line https://devon.s2-eu.capgemini.com/
+
+***2. Merging / Review***
+* [ ] **Locally** merge development branch to master branch
+  * [ ] Check for changed maven dependencies and document them. _As dependencies have been changed:_
+    * [ ] check new dependencies with legal (in case of not yet used licenses).
+    * **If there are any issues with the dependencies. Abort, get in contact.**
+    * [ ] document the changes in the [`ChangeLog` of the dependency tracking](https://github.com/devonfw/cobigen/wiki/mgmt_dependency-and-license-tracking).
+    * [ ] create a new licensing document
+  * [ ] Perform final review of merged contents
+    * [ ] Are there any changes in a different module not corresponding to the current development branch? Try to find the cause and potentially discuss with the guy failing.
+    * [ ] Any major issues, which would prevent from merging? Missing files, changes?
+    * if ok - commit (if not yet done) **but do not push** to master branch
+    * if not - abort merge, cleanup working copy, and fix on dev branch
+
+***3. Testing / Consolidation***
+* [ ] Higher component version number to release version
+* [ ] Fix snapshot versions of dependencies of all components to be released to its release versions
+* [ ] Install components locally and/or deploy to experimental update site
+* [ ] Perform a final manual test of all issues resolved in the milestone to be released.
+* [ ] Perform integration tests
+  * especially for cobigen-eclipse if cobigen internal dependencies have been changed
+
+***4. Deployment***
+* [ ] Close eclipse IDE
+* [ ] In case of non-eclipse component (for cobigen-core, just execute first line):
+  \```
+  mvn clean package bundle:bundle -Pp2-bundle -Dmaven.test.skip=true
+  mvn install bundle:bundle -Pp2-bundle p2:site -Dmaven.test.skip=true
+  mvn deploy -Pp2-upload-stable -Dmaven.test.skip=true -Dp2.upload=stable
+  \```
+* [ ] In case of eclipse plug-in release:
+  \```
+  cd cobigen-eclipse
+  mvn clean deploy -Pp2-build-stable,p2-upload-stable,p2-build-mars -Dp2.upload=stable
+  \```
+* [ ] Check the update site `http://de-mucevolve02/files/cobigen/updatesite/stable/` by installing/updating it once to an eclipse distribution.
+* [ ] Assure, that everything is committed and the working copy is clean
+* [ ] Create a tag according to the naming conventions
+* [ ] Push
+* [ ] Close milestone and create new release with binaries on GitHub
+
+***5. Follow-up***
+* [ ] Merge master branch back to corresponding dev_ branch
+* [ ] Create new Milestone (minor version update)
+* [ ] increase version on dev branch to next minor version + SNAPSHOT
+* [ ] Push
+
+
+
+

Testing process

+
+
+

In this section, the testing process of certain CobiGen features will be described. This should be used as a quality assurance document to follow up before releasing these features:

+
+
+

Update templates feature

+
+

Starting from a clean devonfw 3.0.0 distribution, follow the next steps to test the new feature for updating templates:

+
+
+
    +
  • +

    Open devonfw distribution, right click on a Java entity. Click on CobiGen → Healtcheck. It should:

    +
    +
      +
    1. +

      Throw message stating that there are no templates. It asks you to download them. If you cancel it, nothing happens. If you accept, it should say "Templates downloaded successfully".

      +
    2. +
    3. +

      After downloading the templates, you should see two OK values on CobiGen_Templates and on context.xml.

      +
      +
        +
      1. +

        If you click on Advanced Health Check everything should be green.

        +
      2. +
      +
      +
    4. +
    5. +

      Now, right click again on CobiGen → Generate. As you have already downloaded the templates, it should directly start loading them, without asking to download them again.

      +
    6. +
    7. +

      Try to generate something. The generated files should be visible after generating.

      +
    8. +
    +
    +
  • +
+
+
+
+
**Quality assurance plan Update Templates feature:**
+
+***1. Preparation***
+* [ ] Follow [this tutorial](https://github.com/devonfw/cobigen/wiki/mgmt__release_and_deployment_process#update-templates-feature) to start the testing phase
+
+***2. Testing scenarios***
+* [ ] A message is thrown informing that there are no templates.
+* [ ] It asks you to download templates.
+* [ ] If you cancel it, nothing happens.
+* [ ] If you accept it, a new window is shown with: Templates downloaded successfully.
+* [ ] You should see two OK values.
+* [ ] If you press on Advanced Health check, everything should be green.
+* [ ] If you try to generate, it directly reads the templates.
+* [ ] You are able to generate and you see the generated files.
+
+
+***3. Deployment***
+
+If every of these test scenarios are checked out, then release process can continue.
+
+
+
+
+

Ionic and Angular

+
+

To properly test the Ionic and Angular templates we need to follow the next steps:

+
+
+
    +
  • +

    Copy the jwtsample project from the worskpaces/examples folder and paste it to the workspaces/main folder, then import it into your workspace.

    +
  • +
  • +

    Add to the database of the project the following SQL script, so that we can test the retrieval of data.

    +
  • +
+
+
+
+
CREATE TABLE EMPLOYEE (
+
+  id BIGINT auto_increment ,
+
+  modificationCounter INTEGER NOT NULL,
+
+  employeeid BIGINT auto_increment,
+
+  name VARCHAR(255),
+
+  surname VARCHAR(255),
+
+  email VARCHAR(255),
+
+  PRIMARY KEY (employeeid)
+
+);
+
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Mister','Boss','mister.boss@capgemini.com');
+
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Intern','Student', 'intern.student@capgemini.com');
+
+
+
+
    +
  • +

    Create a Hibernate entity to map the data of the previous SQL script.

    +
  • +
+
+
+
+
import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Column;
+
+
+@Entity
+@javax.persistence.Table(name = "EMPLOYEE")
+
+public class EmployeeEntity {
+
+  @Column(name = "EMPLOYEEID")
+
+  @GeneratedValue(strategy = GenerationType.IDENTITY)
+
+  private Long employeeId;
+
+  @Column(name = "NAME")
+
+  private String name;
+
+  @Column(name = "SURNAME")
+
+  private String surname;
+
+  @Column(name = "EMAIL")
+
+  private String email;
+
+}
+
+
+
+
    +
  • +

    Using the EmployeeEntity, generate increments CRUD DAO’S, CRUD REST services, CRUD SOAP services, CRUD logic (all in one), Entity infrastructure and TO’s. After generating, follow first the following tutorial related to Ionic Client Generation and afterwards the Angular tutorial.

    +
  • +
  • +

    The final step before releasing should be creating an issue with the following Markdown template. If every test scenario is completed, then testing phase is over and you can release.

    +
  • +
+
+
+
+
**Quality assurance plan Ionic and Angular:**
+
+***1. Preparation***
+* [ ] Follow [this tutorial](https://github.com/devonfw/cobigen/wiki/mgmt__release_and_deployment_process#ionic-and-angular) to start the testing phase
+
+***2. Testing scenarios***
+* [ ] You are able to log-in into both Ionic and Angular apps using JWT authentication.
+* [ ] You are able to log-in into Angular using CSRF authentication.
+* [ ] You are able to retrieve all the employees in both Ionic and Angular.
+* [ ] You are able to create an employee in both Ionic and Angular.
+* [ ] You are able to find an employee by any of its fields in both Ionic and Angular.
+* [ ] You are able to update an employee by any of its fields in both Ionic and Angular.
+* [ ] You are able to use [swipe functionality](https://ionicframework.com/docs/api/components/item/ItemSliding/) to update or delete an employee in Ionic.
+* [ ] You are able to use the [Ionic refresher](https://ionicframework.com/docs/api/components/refresher/Refresher/).
+
+
+***3. Deployment***
+
+If every of these test scenarios are checked out, then release process can continue.
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/mgmt_dependency-and-license-tracking.html b/docs/cobigen/1.0/mgmt_dependency-and-license-tracking.html new file mode 100644 index 00000000..53861de1 --- /dev/null +++ b/docs/cobigen/1.0/mgmt_dependency-and-license-tracking.html @@ -0,0 +1,2540 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==License Tracking of Dependencies

+
+
+

Current Releases

+
+
+

CobiGen-core v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

SLF4J

1.7.7

MIT

http://www.slf4j.org/license.html

Guava

17.0

Apache License 2.0

http://code.google.com/p/guava-libraries/

Reflections

0.9.9-RC2

WTFPL

https://code.google.com/p/reflections/

FreeMarker

2.3.20

BSD-style

http://freemarker.org/docs/app_license.html

Jaxen

1.1.4

"Apache-style open source license"

http://jaxen.codehaus.org/license.html

Apache Commons IO

2.4

Apache License 2.0

http://commons.apache.org/proper/commons-io/

`Apache Commons Lang `

3.1

Apache License 2.0

http://commons.apache.org/proper/commons-lang/

Apache Commons JXPath

1.3

Apache License 2.0

http://commons.apache.org/proper/commons-jxpath/

JDOM

1.1.3

"Apache-style open source license"

http://www.jdom.org/docs/faq.html#a0030

+
+
+

CobiGen-Java plugin v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

cobigen-core

v1.0.0

SLF4J

1.7.7

MIT

http://www.slf4j.org/license.html

QDox

2.0-M2

Apache License 2.0

http://qdox.codehaus.org/license.html

+
+
+

CobiGen-property plugin v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

cobigen-core

v1.0.0

SLF4J

1.7.7

MIT

http://www.slf4j.org/license.html

+
+
+

CobiGen-XML plugin v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

cobigen-core

v1.0.0

SLF4J

1.7.7

MIT

http://www.slf4j.org/license.html

XMLMerge

3.1

LGPL 2.0

http://geonetwork.tv/xmlmerge/License.txt http://el4j.sourceforge.net/license.html

atinject

1

Apache License 2.0

https://code.google.com/p/atinject/

+
+
+

CobiGen-text merger v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

cobigen-core

v1.0.0

SLF4J

1.7.7

MIT

http://www.slf4j.org/license.html

+
+
+

CobiGen-eclipse v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

cobigen-core

v1.0.0

cobigen-javaplugin

v1.0.0

cobigen-propertyplugin

v1.0.0

cobigen-xmlplugin

v1.0.0

cobigen-textmerger

v1.0.0

+
+
+
+
+

Changelog

+
+
+

CobiGen-core v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

JDOM

+
+
+

CobiGen-Java plugin v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

` cobigen-core `

v1.1.0

+
+
+

CobiGen-XML plugin v1.0.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

JDOM

1.1.3

"Apache-style open source license"

http://www.jdom.org/docs/faq.html#a0030

+
+
+

CobiGen-eclipse v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

1.1.0

updated

cobigen-javaplugin

1.1.1

updated

cobigen-xmlplugin

1.0.1

+
+
+

CobiGen-XML plugin v2.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

1.2.0

+
+
+

CobiGen-Java plugin v1.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

mmm-util-core

5.0.0

Apache License 2.0

https://github.com/m-m-m/mmm/wiki/FAQ#will-mmm-ever-change-its-license-in-later-releases

updated

cobigen-core

1.2.0

+
+
+

CobiGen-eclipse v1.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

1.2.0

updated

cobigen-javaplugin

1.2.0

updated

cobigen-xmlplugin

2.0.0

+
+
+

CobiGen-eclipse v1.2.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-javaplugin

1.2.1

+
+
+

CobiGen-Java plugin v1.3.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

2.0.0

+
+
+

CobiGen-maven v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

maven-core

3.0

Apache License 2.0

http://maven.apache.org/ref/3.0/maven-core/

added

maven-compat

3.0

Apache License 2.0

http://maven.apache.org/ref/3.0/maven-compat/

added

maven-plugin-api

3.0

Apache License 2.0

http://maven.apache.org/ref/3.0/maven-plugin-api/

added

cobigen-core

2.0.0

added

cobigen-xmlplugin

2.1.0

added

cobigen-javaplugin

1.3.0

added

cobigen-propertyplugin

1.0.0

added

cobigen-textmerger

1.0.1

+
+
+

CobiGen-eclipse v1.3.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

changed

cobigen-core

2.0.0

changed

cobigen-xmlplugin

2.1.0

changed

cobigen-javaplugin

1.3.0

changed

cobigen-textmerger

1.0.1

+
+
+

CobiGen-core v2.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

Dozer

5.5.1

Apache License 2.0

http://dozer.sourceforge.net/license.html

+
+
+

CobiGen-Java plugin v1.3.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

QDox

2.0-M3

+
+
+

CobiGen-eclipse v1.4.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

2.1.0

updated

cobigen-javaplugin

1.3.1

+
+
+

CobiGen-maven v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

2.1.0

updated

cobigen-javaplugin

1.3.1

+
+
+

CobiGen-core v2.1.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

FreeMarker

2.3.23

Apache License 2.0

http://freemarker.org/LICENSE.txt

+
+
+

CobiGen-eclipse v1.4.1

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

2.1.1

updated

cobigen-javaplugin

1.3.2

added

ant

1.9.6

Apache License 2.0

http://www.apache.org/licenses/LICENSE-2.0.html

+
+
+

CobiGen-Java plugin v1.4.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

3.0.0

+
+
+

CobiGen-JSON plugin v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

cobigen-core

3.0.0

added

` mmm-util-core`

5.0.0

Apache License 2.0

added

json

20160810

MIT

https://github.com/stleary/JSON-java

added

gson

2.7

Apache License 2.0

https://github.com/google/gson

+
+
+

CobiGen-XML plugin v3.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

3.0.0

removed

XMLMerge

removed

module-xml_merge-common

removed

javax.inject

removed

JDOM

added

LeXeMe

1.0.0

Apache License 2.0

https://github.com/maybeec/lexeme

+
+
+

CobiGen-maven v2.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

3.0.0

updated

cobigen-javaplugin

1.4.0

updated

cobigen-xmlplugin

3.0.0

updated

cobigen-propertyplugin

1.1.0

updated

cobigen-textmerger

1.1.0

added

cobigen-jsonplugin

1.0.0

+
+
+

CobiGen-maven v2.0.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-propertyplugin

1.1.1

+
+
+

CobiGen-eclipse v2.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

3.0.0

updated

cobigen-javaplugin

1.4.0

updated

cobigen-xmlplugin

3.0.0

updated

cobigen-propertyplugin

1.1.0

updated

cobigen-textmerger

1.1.1

added

cobigen-jsonplugin

1.0.0

+
+
+

CobiGen-HTML merger v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

cobigen-core

4.0.0

added

jsoup

1.10.2

MIT

https://jsoup.org/

+
+
+

CobiGen-JSON plugin v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

mmm-util-core

+
+
+

CobiGen-core v4.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

mmm-util-core

7.4.0

Apache Software License 2.0

https://github.com/m-m-m/mmm/wiki/License

removed

FreeMarker

2.3.23

+
+
+

CobiGen-Java plugin v1.5.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

mmm-util-core

added

mmm-util-pojo

7.4.0

Apache Software License 2.0

https://github.com/m-m-m/mmm/wiki/License

+
+
+

CobiGen-tempeng-velocity-plugin v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

velocity

1.7

Apache Software License 2.0

http://velocity.apache.org/engine/1.7/license.html

+
+
+

CobiGen-TS plugin v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

cobigen-core

4.0.0

added

ts-merger

1.0.0

Apache Public License 2.0

https://github.com/devonfw/ts-merger

added

js-beautifier

1.6.14

MIT

https://github.com/beautify-web/js-beautify

added

rhino

1.7R4

Mozilla Public License 2.0

https://github.com/mozilla/rhino/blob/master/LICENSE.txt

+
+
+

CobiGen-eclipse v2.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

4.0.0

updated

cobigen-javaplugin

1.5.0

updated

cobigen-jsonplugin

1.1.0

added

cobigen-tsplugin

1.0.0

added

cobigen-htmlplugin

1.0.0

added

cobigen-tempeng-freemarkerplugin

1.0.0-SNAPSHOT

+
+
+

CobiGen-maven v2.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

4.0.0

added

cobigen-core-test

4.0.0

updated

cobigen-javaplugin

1.5.0

updated

cobigen-jsonplugin

1.1.0

added

cobigen-tsplugin

1.0.0

added

cobigen-htmlplugin

1.0.0

added

cobigen-tempeng-freemarkerplugin

1.0.0-SNAPSHOT

+
+
+

CobiGen-TS plugin v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

updated

ts-merger

2.0.0

updated

beautify

1.6.14

removed

rhino

+
+
+

CobiGen-tempeng-FreeMarker-plugin v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

cobigen-core-api

4.1.0

added

FreeMarker

2.3.23

Apache Software License 2.0

http://freemarker.org/docs/app_license.html

added

Jaxen

1.1.4

"Apache-style open source license"

http://jaxen.codehaus.org/license.html

+
+
+

CobiGen-eclipse v3.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

4.1.0

added

`cobigen-java`plugin-model

1.0.0

removed

cobigen-jsonplugin

removed

cobigen-javaplugin

removed

cobigen-htmlplugin

removed

cobigen-propertyplugin

removed

cobigen-textmerger

removed

`cobigen-tsplugin `

removed

cobigen-xmlplugin

removed

cobigen-tempeng-freemarkerplugin

+
+
+

CobiGen-XML plugin v3.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

+
+
+

CobiGen-maven v3.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

4.1.0

removed

cobigen-jsonplugin

removed

cobigen-javaplugin

removed

cobigen-htmlplugin

removed

cobigen-propertyplugin

removed

cobigen-textmerger

removed

cobigen-tsplugin

removed

cobigen-xmlplugin

removed

cobigen-tempeng-freemarkerplugin

+
+
+

CobiGen-property plugin v1.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

+
+
+

CobiGen-text merger v1.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

+
+
+

CobiGen-HTML plugin v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

added

commons-io

2.4

Apache License 2.0

https://commons.apache.org/proper/commons-io/

+
+
+

CobiGen-JSON plugin v1.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

+
+
+

CobiGen-OpenAPI plugin v1.0.1

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

cobigen-core-api

v4.1.0

added

kaizen.openapi-parser

v0.0.1.201709142043

EPL v1.0

KaiZen Open API parser

+
+
+

CobiGen-OpenAPI plugin v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

changed

kaizen.openapi-parser

v0.0.3.201803041924

EPL v1.0

KaiZen Open API parser

added

json-path

2.4.0

Apache License 2.0

https://github.com/json-path/JsonPath/blob/master/LICENSE

+
+
+

CobiGen-JSON plugin v1.2.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

json

20160810

MIT

https://github.com/stleary/JSON-java

+
+
+

CobiGen-maven v3.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

4.2.1

added

cobigen-core-api

4.2.1

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/mgmt_ide-setup-oomph.html b/docs/cobigen/1.0/mgmt_ide-setup-oomph.html new file mode 100644 index 00000000..3fb2b740 --- /dev/null +++ b/docs/cobigen/1.0/mgmt_ide-setup-oomph.html @@ -0,0 +1,552 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==IDE Setup with the Oomph Installer

+
+
+ + + + + +
+ + +
+

This page is still under construction.

+
+
+
+
+

As an alternate and faster way to set up Eclipse for CobiGen development we also provide an customized Eclipse Installer and Oomph setups.

+
+
+
    +
  1. +

    The installer can be downloaded from within the corp network.

    +
  2. +
  3. +

    Unarchive it in a folder of your choice (e.g. %home%\Eclipse Installer Capgemini\ if you want to use the installer frequently)

    +
  4. +
  5. +

    Run eclipse-inst.exe or eclipse-inst on linux

    +
  6. +
+
+
+ + + + + +
+ + +
+

Before starting the installation make sure to

+
+
+
    +
  1. +

    have git configured for your preferred github authentication method

    +
  2. +
  3. +

    have git configured to handle long file names (e.g. by setting git config --system core.longpaths true)

    +
  4. +
  5. +

    have read access to http://de-mucevolve02/ in the corp network

    +
  6. +
+
+
+
+
+

Quick start guide

+
+
+
    +
  1. +

    On the Product page choose CobiGen IDE

    +
  2. +
  3. +

    On the Project page choose CobiGen

    +
    +
      +
    1. +

      master clones only the master branch from the specified origin

      +
    2. +
    3. +

      development clones all development branches and the master branch from the specified origin into %installation location%/workspaces/cobigen-development and the master branch from the devonfw repository into %installation location%/workspaces/cobigen-master

      +
    4. +
    +
    +
  4. +
+
+
+
+
+

Detailed Walkthrough

+
+
+

Clean (with Eclipse installation)

+
+

On the first installer page you need to choose what Eclipse bundle you want to use. The Product page (picture below) displays the possible choices. +Product page of the installer

+
+
+
    +
  1. +

    the current Product Catalog. Each entry represents a pre-configured Eclipse bundle. In case of doubt choose CobiGen IDE

    +
  2. +
  3. +

    the Eclipse version to be installed.

    +
  4. +
  5. +

    the bitness of the Eclipse version. Be sure to choose the bitness of your OS

    +
  6. +
  7. +

    the Java VM used during installation.

    +
  8. +
  9. +

    the bundle pool. If activated Eclipse will create a p2 pool. This can be helpful if you want to create multiple installations of eclipse. This option is hidden and deactivated by default. You can make it visible by removing the -Doomph.p2.pool=@none line in the installers eclipse-inst.ini

    +
  10. +
  11. +

    the update indicator. If those arrows spin you can update the installer or any of it’s components by clicking on this button

    +
  12. +
  13. +

    Chooses the selected product and continues with the installation

    +
  14. +
+
+
+

The next installer page lets you choose a project to be checked out during installation. +Project page of the installer

+
+
+
    +
  1. +

    the current Project Catalog. Select CobiGen

    +
  2. +
  3. +

    the project stream. In case of CobiGen:

    +
    +
      +
    1. +

      master: Only the master branch of Cobigen will be checked out

      +
    2. +
    3. +

      development: the master branch and ALL development branches will be checked out.

      +
    4. +
    5. +

      In each case you can specify an own fork as git origin

      +
    6. +
    +
    +
  4. +
+
+
+

After choosing a project the installer fetches additional Oomph tasks. You need to accept the installation of said tasks in order to proceed.

+
+
+

Installation of external Oomph tasks

+
+
+

The installer restarts then and open at the Project page again. Simply repeat the instructions for the Project page. Installation and restart is only done the first time a new task is requested by a product or project configuration.

+
+
+

By proceeding with the Next button the installer opens the Variables page. On this page the installation and configuration of the Eclipse bundle and the chosen projects is done by setting the variables presented.

+
+
+

Variable page of the installer

+
+
+
    +
  1. +

    the folder into that Eclipse will be installed. It is recommended to use the Browse…​ button to locate the folder. A direct input into the text field is possible but due to a randomly occurring bug in the installer the input is only partially parsed.

    +
  2. +
  3. +

    the User name to access the Devon Maven Nexus. Typically your corp user name. This value will be stored in variables-customized(.bat)

    +
  4. +
  5. +

    the password to access the Devon Maven Nexus. Typically your corp password. This value will be stored (PLAIN!) in variables-customized(.bat)

    +
  6. +
  7. +

    the User name to access the iCSD Fileserver. This value will be stored in variables-customized(.bat). If no credentials were provided insert anything.

    +
  8. +
  9. +

    the password to access the iCSD Fileserver. This value will be stored (PLAIN!) in variables-customized(.bat). If no credentials were provided insert anything.

    +
  10. +
  11. +

    the Github remote URI for cloning the devonfw repository of CobiGen. Target of this URI is %installation location%/workspaces/cobigen-master if the chosen stream is development.

    +
    +
      +
    1. +

      SSH: The remote URI to access the repository via ssh. Make sure to have your git configured to work with a ssh client and have this client running.

      +
    2. +
    3. +

      HTTPS: The remote URI to access the repository via https. Activates the Github user ID and Github user Password variables. User id and password are stored in the cloning scripts in plain text.

      +
    4. +
    5. +

      Two-Factor-Authentification isn’t supported and probably won’t be in the future.

      +
    6. +
    +
    +
  12. +
  13. +

    the Github remote URI for cloning a CobiGen repository.

    +
    +
      +
    1. +

      Existing own fork (SSH): Same as above. The Github user ID is used in the remote URI instead of devonfw. Activates and requires the Github user ID variable.

      +
    2. +
    3. +

      Existing own fork (HTTPS): Same as above. The Github user ID is used in the remote URI instead of devonfw.

      +
    4. +
    5. +

      devonfw repository: Uses the remote URI from above.

      +
    6. +
    +
    +
  14. +
  15. +

    The Eclipse version you want to develop cobigen for. This is not the Eclipse version to be installed. When running integration tests for the CobiGen Eclipse Plugin this Eclipse version is launched.

    +
  16. +
  17. +

    Your Github user id.

    +
  18. +
  19. +

    Your Github user password. Be aware that this is stored in plain text! Moreover, if you use special characters as for example ! or % in your password, you need to escape them in the batch file. See http://www.robvanderwoude.com/escapechars.php for further information.

    +
  20. +
  21. +

    Reveals all variables that can be set. Activated by default. If not activated preset variables and variables with default values are hidden.

    +
  22. +
+
+
+

The Next button can only be used if all variables are set. Proceeding the installer opens the Confirmation page. All tasks needed for installation are shown here with all variables resolved. Only the tasks needed for the installation are activated. Tasks like Project import are triggered at first startup of Eclipse.

+
+
+

Confirmation page

+
+
+

The Finish button triggers the installation process. Once started the installation proceeds automatically.

+
+
+

Progress page

+
+
+
    +
  1. +

    indicates the task that is currently executed

    +
  2. +
  3. +

    the task output. Provides progress and debugging information

    +
  4. +
  5. +

    if activated the installer exits after successful installation

    +
  6. +
  7. +

    stops the installation

    +
  8. +
+
+
+
+

Into an existing Eclipse installation

+
+

The following instructions only hold for OASP4J-like Eclipse installations. Furthermore you need to install Oomph Setup from the Oomph Update site. When Oomph is installed activate the Oomph tool bar via the Show tool bar contributions check box.

+
+
+

Oomph preferences page

+
+
+

The tool bar looks like this: Oomph tool bar

+
+
+
+
+
+

Configuration of the CobiGen Oomph Setup

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/mgmt_ide-setup.html b/docs/cobigen/1.0/mgmt_ide-setup.html new file mode 100644 index 00000000..879a9dd4 --- /dev/null +++ b/docs/cobigen/1.0/mgmt_ide-setup.html @@ -0,0 +1,396 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==IDE setup using devonfw IDE

+
+
+

General Installation Process

+
+
+
    +
  1. +

    First of all you will have to setup the devonfw IDE. Once the download is complete and you started the installation process via the setup command (or setup.bat), you will be prompted for a settings URL. You can just press enter as no special settings are used. Following that, you will simply have to wait and follow any instructions given.

    +
    +

    After you have executed the steps mentioned above, your install location should look like the picture below.

    +
    +
    +
    +Install directory after executing setup.bat +
    +
    +
  2. +
  3. +

    As devonfw IDE by default does not install eclipse anymore, we need to setup eclipse first by executing devon eclipse

    +
  4. +
  5. +

    The next step is to open a console and execute the following command: devon project setup cobigen +This downloads all the necessary files for CobiGen development.

    +
  6. +
  7. +

    Build the project by running the build.sh located in the workspaces/main/cobigen folder within git bash bash build.sh parallel.

    +
  8. +
  9. +

    Optional: If you are making use of SSH private key authentication working with Git on GitHub, you can change the HTTPS default setup git remote URL by executing git remote set-url origin git@github.com:devonfw/cobigen.git (possibly with git@github.com:<your user>/cobigen.git in case you want to work on your fork)

    +
  10. +
  11. +

    Now open eclipse using the eclipse-main.bat file or by executing devon eclipse on the console and import the CobiGen projects you want to work on

    +
  12. +
  13. +

    Switch to the "Project Explorer" view (Window→Show View→Project Explorer). This extra step is required because an import from the default view "Package Explorer" doesn’t work properly.

    +
  14. +
  15. +

    Click on File→import…​→Maven→Existing Maven Project and entering `{Install directory}/workspaces/main/cobigen `(Should be the default location when clicking on "Browse…​")

    +
  16. +
  17. +

    After you have have finished your installation run a maven update. To do so right click on a project, select maven and afterwards update project. Select all projects and the checkbox Force Update of Snapshot/Releases. +You might be asked to install some Tycho-plugins. You need those, if you want to debug eclipse-plugins.

    +
  18. +
+
+
+
+
+

Eclipse Plugin Installation

+
+
+

For some parts of CobiGen, you will have to have additional plugins installed.

+
+
+

Plugin development

+
+

If you want to develop CobiGen plugins (OpenAPI plugin, Java plugin etc.) you need to have the eclipse PDE plugin available. +It is not strictly necessary to install this manually as Eclipse should prompt you for installation once you try to build a relevant project.

+
+
+

To install the plugin manually, open a console in your IDE Install location and execute the command devon eclipse add-plugin eclipsepde

+
+
+

When using the default devonfw IDE, you should get an error here that stems from parts of the plugin being installed with the devonfw IDE by default.

+
+
+
+

Eclipse Testing

+
+

To properly test CobiGen in an Eclipse environment we use the Eclipse SWTBot which can automate eclipse interactions in a new Eclipse instance. +It is not strictly necessary to install this manually as Eclipse should prompt you for installation once you try to build a relevant project.

+
+
+

To install the plugin manually, open a console in your IDE Install location and execute the command devon eclipse add-plugin swtbot in a console

+
+
+

When using the default devonfw IDE, you should get an error here that stems from parts of the plugin being installed with the devonfw IDE by default.

+
+
+
+

Optional

+
+

Template Development

+
+

Since CobiGen is a template-based code generator, we have to develop templates. We do this using the template language FreeMarker. +It is not necessary to install any plugin though for easier usage we recommend you install an IDE into Eclipse if you do not want to use another platform.

+
+
+

To install an IDE for FreeMarker, open a console in your IDE Install location and execute the command devon eclipse add-plugin freemarker in a console

+
+
+
+

Script Development

+
+

There are some scripts used in CobiGen development that are written in Python. +You may use any platform to write in python you want, but if you want to work in Eclipse, we recommend to install pydev.

+
+
+

You can do this by opening a console in your IDE Install location and executing the command devon eclipse add-plugin pydev in a console

+
+
+
+
+

Contributing

+
+

If you want to contribute to CobiGen you should fork CobiGen and change the origin of the local repository to your fork. +You can check your remote settings by entering workspaces/main/cobigen and run the command: git remote -v. +Now let us change the URL to your Fork: git remote set-url origin <Fork url> +You can use the CobiGen repository as another remote, to get the latest changes. check out the following tutorial to do so. +https://devonfw.com/website/pages/docs/CONTRIBUTING.adoc.html#contributing.asciidoc_forking

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/cobigen/1.0/setup-jre.html b/docs/cobigen/1.0/setup-jre.html new file mode 100644 index 00000000..4520c4bd --- /dev/null +++ b/docs/cobigen/1.0/setup-jre.html @@ -0,0 +1,292 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Set up JRE for development

+
+
+

CobiGen is supposed to support both java 8 and java 11 even though we are moving to 11. Here is a short description of how to setup the execution environment for developing so that you can test both environments.

+
+
+

By default, CobiGen development tools come with some installed JREs in /software/java (11) and /software/java/additionalJdk (7 and 8)

+
+
+

installed jre

+
+
+

In CobiGen, there is a fixed setup of JAVASE-1.8 in maven, which leads to the odd that no matter which Java is currently used, eclipse keeps showing JAVASE-1.8. A temporary reconfiguration of JRE in build path will also be overwritten by a maven update.

+
+
+

java 11

+
+
+

Eclipse has a fixed list of execution environments, which is automatically matched with the current most suitable installed JRE, in our case JDK-8 by default. The matching JRE is the actual one, which is used to compile no matter which name eclipse shows

+
+
+

execution environments

+
+
+

As that, to move to 11, just setup the match JRE of JAVASE-1.8 to 11 or any version you need

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/dashboard/1.0/_images/images/collage.png b/docs/dashboard/1.0/_images/images/collage.png new file mode 100644 index 00000000..8d2eabd1 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/collage.png differ diff --git a/docs/dashboard/1.0/_images/images/dashboard-multiple-ides.png b/docs/dashboard/1.0/_images/images/dashboard-multiple-ides.png new file mode 100644 index 00000000..f5dd6648 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/dashboard-multiple-ides.png differ diff --git a/docs/dashboard/1.0/_images/images/example/chrome-stable.png b/docs/dashboard/1.0/_images/images/example/chrome-stable.png new file mode 100644 index 00000000..68f662c5 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/chrome-stable.png differ diff --git a/docs/dashboard/1.0/_images/images/example/cicdgen-command.png b/docs/dashboard/1.0/_images/images/example/cicdgen-command.png new file mode 100644 index 00000000..31301861 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/cicdgen-command.png differ diff --git a/docs/dashboard/1.0/_images/images/example/docker-global.png b/docs/dashboard/1.0/_images/images/example/docker-global.png new file mode 100644 index 00000000..70c99574 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/docker-global.png differ diff --git a/docs/dashboard/1.0/_images/images/example/gitlab-2.png b/docs/dashboard/1.0/_images/images/example/gitlab-2.png new file mode 100644 index 00000000..2cf98212 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/gitlab-2.png differ diff --git a/docs/dashboard/1.0/_images/images/example/gitlab-webhook.png b/docs/dashboard/1.0/_images/images/example/gitlab-webhook.png new file mode 100644 index 00000000..5d12afb5 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/gitlab-webhook.png differ diff --git a/docs/dashboard/1.0/_images/images/example/gitlab.png b/docs/dashboard/1.0/_images/images/example/gitlab.png new file mode 100644 index 00000000..bcf569f3 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/gitlab.png differ diff --git a/docs/dashboard/1.0/_images/images/example/global-settings-id.png b/docs/dashboard/1.0/_images/images/example/global-settings-id.png new file mode 100644 index 00000000..75aa37b9 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/global-settings-id.png differ diff --git a/docs/dashboard/1.0/_images/images/example/help-1.png b/docs/dashboard/1.0/_images/images/example/help-1.png new file mode 100644 index 00000000..eb387525 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/help-1.png differ diff --git a/docs/dashboard/1.0/_images/images/example/help-2.png b/docs/dashboard/1.0/_images/images/example/help-2.png new file mode 100644 index 00000000..56edeb48 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/help-2.png differ diff --git a/docs/dashboard/1.0/_images/images/example/maven-installation.png b/docs/dashboard/1.0/_images/images/example/maven-installation.png new file mode 100644 index 00000000..401d01cd Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/maven-installation.png differ diff --git a/docs/dashboard/1.0/_images/images/example/new-pipeline.png b/docs/dashboard/1.0/_images/images/example/new-pipeline.png new file mode 100644 index 00000000..5c0d365e Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/new-pipeline.png differ diff --git a/docs/dashboard/1.0/_images/images/example/push-code.png b/docs/dashboard/1.0/_images/images/example/push-code.png new file mode 100644 index 00000000..1a816516 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/push-code.png differ diff --git a/docs/dashboard/1.0/_images/images/example/repository-id.png b/docs/dashboard/1.0/_images/images/example/repository-id.png new file mode 100644 index 00000000..6a99f8b7 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/repository-id.png differ diff --git a/docs/dashboard/1.0/_images/images/example/sonar-env.png b/docs/dashboard/1.0/_images/images/example/sonar-env.png new file mode 100644 index 00000000..47d4ba81 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/sonar-env.png differ diff --git a/docs/dashboard/1.0/_images/images/example/sonar-tool.png b/docs/dashboard/1.0/_images/images/example/sonar-tool.png new file mode 100644 index 00000000..ad15e518 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/sonar-tool.png differ diff --git a/docs/dashboard/1.0/_images/images/example/teams-1.png b/docs/dashboard/1.0/_images/images/example/teams-1.png new file mode 100644 index 00000000..afafd9b0 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/teams-1.png differ diff --git a/docs/dashboard/1.0/_images/images/example/teams-2.png b/docs/dashboard/1.0/_images/images/example/teams-2.png new file mode 100644 index 00000000..8636fb07 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/teams-2.png differ diff --git a/docs/dashboard/1.0/_images/images/example/teams-3.png b/docs/dashboard/1.0/_images/images/example/teams-3.png new file mode 100644 index 00000000..8de6fec2 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/example/teams-3.png differ diff --git a/docs/dashboard/1.0/_images/images/home_page/dolwnload_latest_version.png b/docs/dashboard/1.0/_images/images/home_page/dolwnload_latest_version.png new file mode 100644 index 00000000..f9020857 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/home_page/dolwnload_latest_version.png differ diff --git a/docs/dashboard/1.0/_images/images/home_page/installation_options.png b/docs/dashboard/1.0/_images/images/home_page/installation_options.png new file mode 100644 index 00000000..5a883532 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/home_page/installation_options.png differ diff --git a/docs/dashboard/1.0/_images/images/home_page/installation_setup.png b/docs/dashboard/1.0/_images/images/home_page/installation_setup.png new file mode 100644 index 00000000..41991c42 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/home_page/installation_setup.png differ diff --git a/docs/dashboard/1.0/_images/images/home_page/installing_devonfw.png b/docs/dashboard/1.0/_images/images/home_page/installing_devonfw.png new file mode 100644 index 00000000..ca80740f Binary files /dev/null and b/docs/dashboard/1.0/_images/images/home_page/installing_devonfw.png differ diff --git a/docs/dashboard/1.0/_images/images/home_page/installing_devonfw_dowload_completes.png b/docs/dashboard/1.0/_images/images/home_page/installing_devonfw_dowload_completes.png new file mode 100644 index 00000000..eace4193 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/home_page/installing_devonfw_dowload_completes.png differ diff --git a/docs/dashboard/1.0/_images/images/home_page/installing_devonfw_download_location_set.png b/docs/dashboard/1.0/_images/images/home_page/installing_devonfw_download_location_set.png new file mode 100644 index 00000000..ce329eda Binary files /dev/null and b/docs/dashboard/1.0/_images/images/home_page/installing_devonfw_download_location_set.png differ diff --git a/docs/dashboard/1.0/_images/images/home_page/quick_help.png b/docs/dashboard/1.0/_images/images/home_page/quick_help.png new file mode 100644 index 00000000..861c3a37 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/home_page/quick_help.png differ diff --git a/docs/dashboard/1.0/_images/images/home_page/sidebar.png b/docs/dashboard/1.0/_images/images/home_page/sidebar.png new file mode 100644 index 00000000..f9b8a23f Binary files /dev/null and b/docs/dashboard/1.0/_images/images/home_page/sidebar.png differ diff --git a/docs/dashboard/1.0/_images/images/home_page/toolbar.png b/docs/dashboard/1.0/_images/images/home_page/toolbar.png new file mode 100644 index 00000000..1ee60cef Binary files /dev/null and b/docs/dashboard/1.0/_images/images/home_page/toolbar.png differ diff --git a/docs/dashboard/1.0/_images/images/home_page/toolbar_workspace.png b/docs/dashboard/1.0/_images/images/home_page/toolbar_workspace.png new file mode 100644 index 00000000..328d76bc Binary files /dev/null and b/docs/dashboard/1.0/_images/images/home_page/toolbar_workspace.png differ diff --git a/docs/dashboard/1.0/_images/images/ides_page/2-ides.png b/docs/dashboard/1.0/_images/images/ides_page/2-ides.png new file mode 100644 index 00000000..0e19c52c Binary files /dev/null and b/docs/dashboard/1.0/_images/images/ides_page/2-ides.png differ diff --git a/docs/dashboard/1.0/_images/images/ides_page/3-ides.png b/docs/dashboard/1.0/_images/images/ides_page/3-ides.png new file mode 100644 index 00000000..934f77c6 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/ides_page/3-ides.png differ diff --git a/docs/dashboard/1.0/_images/images/ides_page/devonfw-instance-dropdown.png b/docs/dashboard/1.0/_images/images/ides_page/devonfw-instance-dropdown.png new file mode 100644 index 00000000..bec45d42 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/ides_page/devonfw-instance-dropdown.png differ diff --git a/docs/dashboard/1.0/_images/images/ides_page/ides.png b/docs/dashboard/1.0/_images/images/ides_page/ides.png new file mode 100644 index 00000000..0fbf05b2 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/ides_page/ides.png differ diff --git a/docs/dashboard/1.0/_images/images/landing_page/devonfw_distributions_dialog.png b/docs/dashboard/1.0/_images/images/landing_page/devonfw_distributions_dialog.png new file mode 100644 index 00000000..615e152e Binary files /dev/null and b/docs/dashboard/1.0/_images/images/landing_page/devonfw_distributions_dialog.png differ diff --git a/docs/dashboard/1.0/_images/images/landing_page/get_started.png b/docs/dashboard/1.0/_images/images/landing_page/get_started.png new file mode 100644 index 00000000..b367fdc6 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/landing_page/get_started.png differ diff --git a/docs/dashboard/1.0/_images/images/landing_page/profile_form.png b/docs/dashboard/1.0/_images/images/landing_page/profile_form.png new file mode 100644 index 00000000..9085bcd3 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/landing_page/profile_form.png differ diff --git a/docs/dashboard/1.0/_images/images/merge-combine-vscode.png b/docs/dashboard/1.0/_images/images/merge-combine-vscode.png new file mode 100644 index 00000000..46871bb7 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/merge-combine-vscode.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/creation.png b/docs/dashboard/1.0/_images/images/project_page/creation.png new file mode 100644 index 00000000..cd0ad071 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/creation.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/folders.png b/docs/dashboard/1.0/_images/images/project_page/folders.png new file mode 100644 index 00000000..b95bfbdc Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/folders.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/installation.png b/docs/dashboard/1.0/_images/images/project_page/installation.png new file mode 100644 index 00000000..93ac431e Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/installation.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/project_data.png b/docs/dashboard/1.0/_images/images/project_page/project_data.png new file mode 100644 index 00000000..9ebb2809 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/project_data.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/project_details_features.png b/docs/dashboard/1.0/_images/images/project_page/project_details_features.png new file mode 100644 index 00000000..f80af00d Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/project_details_features.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/project_details_screen.png b/docs/dashboard/1.0/_images/images/project_page/project_details_screen.png new file mode 100644 index 00000000..1aa88ea7 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/project_details_screen.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/project_execution.png b/docs/dashboard/1.0/_images/images/project_page/project_execution.png new file mode 100644 index 00000000..bbfd984b Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/project_execution.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/project_page.png b/docs/dashboard/1.0/_images/images/project_page/project_page.png new file mode 100644 index 00000000..7529e03b Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/project_page.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/project_screen.png b/docs/dashboard/1.0/_images/images/project_page/project_screen.png new file mode 100644 index 00000000..d0adef6a Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/project_screen.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/project_type.png b/docs/dashboard/1.0/_images/images/project_page/project_type.png new file mode 100644 index 00000000..d0adef6a Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/project_type.png differ diff --git a/docs/dashboard/1.0/_images/images/project_page/retry.png b/docs/dashboard/1.0/_images/images/project_page/retry.png new file mode 100644 index 00000000..a8de973a Binary files /dev/null and b/docs/dashboard/1.0/_images/images/project_page/retry.png differ diff --git a/docs/dashboard/1.0/_images/images/repositories_page/repositories.png b/docs/dashboard/1.0/_images/images/repositories_page/repositories.png new file mode 100644 index 00000000..e2f70aad Binary files /dev/null and b/docs/dashboard/1.0/_images/images/repositories_page/repositories.png differ diff --git a/docs/dashboard/1.0/_images/images/repositories_page/repositories_search.png b/docs/dashboard/1.0/_images/images/repositories_page/repositories_search.png new file mode 100644 index 00000000..6c16b571 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/repositories_page/repositories_search.png differ diff --git a/docs/dashboard/1.0/_images/images/settings_page/account-settings.png b/docs/dashboard/1.0/_images/images/settings_page/account-settings.png new file mode 100644 index 00000000..d18e3f31 Binary files /dev/null and b/docs/dashboard/1.0/_images/images/settings_page/account-settings.png differ diff --git a/docs/dashboard/1.0/_images/images/settings_page/installed-versions.png b/docs/dashboard/1.0/_images/images/settings_page/installed-versions.png new file mode 100644 index 00000000..a0f8146f Binary files /dev/null and b/docs/dashboard/1.0/_images/images/settings_page/installed-versions.png differ diff --git a/docs/dashboard/1.0/_images/images/wiki_page/wiki.png b/docs/dashboard/1.0/_images/images/wiki_page/wiki.png new file mode 100644 index 00000000..97bad35c Binary files /dev/null and b/docs/dashboard/1.0/_images/images/wiki_page/wiki.png differ diff --git a/docs/dashboard/1.0/home-page.html b/docs/dashboard/1.0/home-page.html new file mode 100644 index 00000000..c07f0ba9 --- /dev/null +++ b/docs/dashboard/1.0/home-page.html @@ -0,0 +1,429 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Home page

+
+
+

This is the main page that you will find after your profile creation and the page where you will start from henceforth.

+
+
+

It contains three sections as below:

+
+
+
    +
  1. +

    Toolbar

    +
  2. +
  3. +

    Sidebar

    +
  4. +
  5. +

    Content

    +
  6. +
+
+
+

Topbar

+
+
+

This section is at the top of the page, it contains devonfw instance dropdown to select devonfw-ide that can be used as a base for the projects.

+
+
+
+Toolbar +
+
+
+

Next to the devonfw instance dropdown, there is a quick help icon, clicking on it will open a popup which gives some tips for how to use Devon Dashboard IDE.

+
+
+
+Quick Help +
+
+
+
+
+ +
+
+

The sidebar has divided into two sections:

+
+
+
    +
  1. +

    User Profile - Users can see his/her pic, name, and role.

    +
  2. +
  3. +

    Links to access to different sections of the dashboard.

    +
  4. +
+
+
+
+Sidebar +
+
+
+
+
+

Content Section

+
+
+

The Content section has also divided into three sections:

+
+
+
    +
  1. +

    A small introduction about the devonfw IDE

    +
  2. +
  3. +

    A button to Download latest version of devonfw IDE

    +
  4. +
  5. +

    A "Project" block which shows the total number of Projects which are available in different devonfw IDE

    +
  6. +
+
+
+
+
+

Steps to download and Install devonfw IDE

+
+
+

Step 1: Click on Download latest version button which is in the Content section. Check the below screen for the reference.

+
+
+
+Download Latest Version +
+
+
+

Step 2: By clicking Download latest version button, Installing devonfw popup will open.

+
+
+
+Installing Devonfw +
+
+
+

Step 3: Installing devonfw popup will automatically trigger one more popup to specify the location for downloading Devonfw IDE. Specify the location and click the Save button to download.

+
+
+
+Download location popup +
+
+
+

Step 3: Once the download completes successfully, the Next button will be enabled for the further installation process.

+
+
+
+Download Devonfw Completed +
+
+
+

Step 4: By Clicking Next button in the Installing devonfw pop up, two options are shown:
+1: Select the Git url for the installation setup.
+2: Skip this process.

+
+
+
+Installation Options +
+
+
+

Step 5: Select one of the above options.

+
+
+
    +
  • +

    If the selection is Git url, then Configuration file url should be filled in the input box and needs to click Next button to start the further installation process.

    +
  • +
  • +

    In case the user doesn’t have Git url, then simply Skip the process and click the Next button to start the further installation process.

    +
  • +
+
+
+

Step 6: Click on the Next button for the final installation process. Wait for some time to complete the installation setup. Once the installation setup completes, the Close button will appear. Just click on it and go to the specified folder location.

+
+
+
+Installation Setup +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/dashboard/1.0/home.html b/docs/dashboard/1.0/home.html new file mode 100644 index 00000000..e77175d3 --- /dev/null +++ b/docs/dashboard/1.0/home.html @@ -0,0 +1,257 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

dashboard

+
+
+

Welcome to dashboard! This document will guide you in using the dashboard as per your needs. But before proceeding to how, let us understand what is it and why has it been developed.

+
+
+
+
+

Overview

+
+
+

The dashboard is your one stop destination for all your devonfw-ide needs. It serves as a UI on top of devonfw-ide.

+
+
+
+dashboard +
+
Figure 1. dashboard
+
+
+

Are you new to devonfw? You can download and setup the latest devonfw-ide from dashboard and get started. Or if you prefer, you can also download one of our older devonfw-ide versions available from our maven repository. The dashboard will be a fantastic introduction to devonfw as it encapsulates all that devonfw offers.

+
+
+

If you are already using devonfw-ide, you can update its settings and software to the latest versions available.

+
+
+

If you are an existing user, you will find all your workspaces spread across your different devonfw-ide versions (2020.04.003 or higher) all in one place in the dashboard. You can also create new (devon4ng, devon4j and devon4node) projects from within the dashboard and manage it from there itself.

+
+
+

You can launch any of your most used IDE’s (VS Code or Eclipse) from your active devonfw-ide instance.

+
+
+

You can also go through the list of devonfw repositories, open any one of them up in the browser or copy their URL for cloning locally.

+
+
+

You also have the devonfw wiki to know more about devonfw right from within the dashboard.

+
+
+
+
+

Motivation

+
+
+

The devon-ide is a collections of tools and software which you can configure and customize as per your requirements. It ships with a number of command line tools under an umbrella devon command.

+
+
+

The main motivation behind dashboard is to provide a user interface on top of this collection of command line tools. So it can do (almost) everything that the devonfw-ide could and more.

+
+
+

It was also developed to serve as a single point of contact for the most common devonfw tasks. You can have multiple devonfw-ide of different versions, each holding multiple projects and softwares configured differently. With dashboard you can manage all your projects from the same place.

+
+
+
+Handle multiple 'devon-ide’s +
+
Figure 2. Handle multiple 'devon-ide’s
+
+
+
+
+

Features

+
+
+
    +
  • +

    More user friendly than a command line tool

    +
  • +
  • +

    Saves time getting to know devonfw and facilitates its usage

    +
  • +
  • +

    Enhanced visibility of your projects, IDEs and devonfw-ide instances

    +
  • +
  • +

    Better version control of all your devonfw-ide

    +
  • +
  • +

    Project inventory management

    +
  • +
  • +

    Connected to devonfw docs

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/dashboard/1.0/ides-page.html b/docs/dashboard/1.0/ides-page.html new file mode 100644 index 00000000..9b01c6e0 --- /dev/null +++ b/docs/dashboard/1.0/ides-page.html @@ -0,0 +1,299 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==IDE’s

+
+
+

This page allows you to open the IDE of your choice.

+
+
+
+IDE’s +
+
Figure 1. IDE’s
+
+
+

It currently lists only 2 IDEs: Eclipse and VS Code.

+
+
+

It will open the IDE from the devonfw instance that you have selected in the top bar:

+
+
+
+Choose your devonfw instance +
+
Figure 2. Choose your devonfw instance
+
+
+

Click on OPEN to launch the IDE of your choice. By default, the IDE will display projects from your main workspace in the selected devonfw instance.

+
+
+
+Open an IDE +
+
Figure 3. Open an IDE
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/dashboard/1.0/index.html b/docs/dashboard/1.0/index.html new file mode 100644 index 00000000..a1c15e19 --- /dev/null +++ b/docs/dashboard/1.0/index.html @@ -0,0 +1,361 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

dashboard

+
+
+

Welcome to dashboard! This document will guide you in using the dashboard as per your needs. But before proceeding to how, let us understand what is it and why has it been developed.

+
+
+
+
+

Overview

+
+
+

The dashboard is your one stop destination for all your devonfw-ide needs. It serves as a UI on top of devonfw-ide.

+
+
+
+dashboard +
+
Figure 1. dashboard
+
+
+

Are you new to devonfw? You can download and setup the latest devonfw-ide from dashboard and get started. Or if you prefer, you can also download one of our older devonfw-ide versions available from our maven repository. The dashboard will be a fantastic introduction to devonfw as it encapsulates all that devonfw offers.

+
+
+

If you are already using devonfw-ide, you can update its settings and software to the latest versions available.

+
+
+

If you are an existing user, you will find all your workspaces spread across your different devonfw-ide versions (2020.04.003 or higher) all in one place in the dashboard. You can also create new (devon4ng, devon4j and devon4node) projects from within the dashboard and manage it from there itself.

+
+
+

You can launch any of your most used IDE’s (VS Code or Eclipse) from your active devonfw-ide instance.

+
+
+

You can also go through the list of devonfw repositories, open any one of them up in the browser or copy their URL for cloning locally.

+
+
+

You also have the devonfw wiki to know more about devonfw right from within the dashboard.

+
+
+
+
+

Motivation

+
+
+

The devon-ide is a collections of tools and software which you can configure and customize as per your requirements. It ships with a number of command line tools under an umbrella devon command.

+
+
+

The main motivation behind dashboard is to provide a user interface on top of this collection of command line tools. So it can do (almost) everything that the devonfw-ide could and more.

+
+
+

It was also developed to serve as a single point of contact for the most common devonfw tasks. You can have multiple devonfw-ide of different versions, each holding multiple projects and softwares configured differently. With dashboard you can manage all your projects from the same place.

+
+
+
+Handle multiple 'devon-ide’s +
+
Figure 2. Handle multiple 'devon-ide’s
+
+
+
+
+

Features

+
+
+
    +
  • +

    More user friendly than a command line tool

    +
  • +
  • +

    Saves time getting to know devonfw and facilitates its usage

    +
  • +
  • +

    Enhanced visibility of your projects, IDEs and devonfw-ide instances

    +
  • +
  • +

    Better version control of all your devonfw-ide

    +
  • +
  • +

    Project inventory management

    +
  • +
  • +

    Connected to devonfw docs

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/dashboard/1.0/landing-page.html b/docs/dashboard/1.0/landing-page.html new file mode 100644 index 00000000..bf2ab40a --- /dev/null +++ b/docs/dashboard/1.0/landing-page.html @@ -0,0 +1,315 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Landing page

+
+
+

This is the entry point of the devonfw dashboard. Click on GET STARTED NOW to start using it.

+
+
+
+Get Started +
+
Figure 1. Get Started
+
+
+

Your devonfw distributions

+
+
+

The first time you open the application you will get a dialog with all the devonfw distributions found on your machine. Click on OK GOT IT to continue.

+
+
+
+devon-ide distributions +
+
Figure 2. devon-ide distributions
+
+
+
+
+

Profile form

+
+
+

Here you will find a screen that allows you to create a profile. This is just for the purpose of customizing your dashboard.

+
+
+
+Profile +
+
Figure 3. Profile
+
+
+

Fill the data and click on CREATE MY PROFILE if you want to create the profile at the moment or click WILL DO IT LATER to skip the creation.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/dashboard/1.0/master-dashboard.html b/docs/dashboard/1.0/master-dashboard.html new file mode 100644 index 00000000..dd8c5b4b --- /dev/null +++ b/docs/dashboard/1.0/master-dashboard.html @@ -0,0 +1,761 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw dashboard

+
+ +
+
+
+

Landing page

+
+ +
+

==Landing page

+
+
+

This is the entry point of the devonfw dashboard. Click on GET STARTED NOW to start using it.

+
+
+
+Get Started +
+
Figure 1. Get Started
+
+
+
Your devonfw distributions
+
+

The first time you open the application you will get a dialog with all the devonfw distributions found on your machine. Click on OK GOT IT to continue.

+
+
+
+devon-ide distributions +
+
Figure 2. devon-ide distributions
+
+
+
+
Profile form
+
+

Here you will find a screen that allows you to create a profile. This is just for the purpose of customizing your dashboard.

+
+
+
+Profile +
+
Figure 3. Profile
+
+
+

Fill the data and click on CREATE MY PROFILE if you want to create the profile at the moment or click WILL DO IT LATER to skip the creation.

+
+
+
+
+
+

Home

+
+ +
+

==Home page

+
+
+

This is the main page that you will find after your profile creation and the page where you will start from henceforth.

+
+
+

It contains three sections as below:

+
+
+
    +
  1. +

    Toolbar

    +
  2. +
  3. +

    Sidebar

    +
  4. +
  5. +

    Content

    +
  6. +
+
+
+
Topbar
+
+

This section is at the top of the page, it contains devonfw instance dropdown to select devonfw-ide that can be used as a base for the projects.

+
+
+
+Toolbar +
+
+
+

Next to the devonfw instance dropdown, there is a quick help icon, clicking on it will open a popup which gives some tips for how to use Devon Dashboard IDE.

+
+
+
+Quick Help +
+
+
+
+ +
+

The sidebar has divided into two sections:

+
+
+
    +
  1. +

    User Profile - Users can see his/her pic, name, and role.

    +
  2. +
  3. +

    Links to access to different sections of the dashboard.

    +
  4. +
+
+
+
+Sidebar +
+
+
+
+
Content Section
+
+

The Content section has also divided into three sections:

+
+
+
    +
  1. +

    A small introduction about the devonfw IDE

    +
  2. +
  3. +

    A button to Download latest version of devonfw IDE

    +
  4. +
  5. +

    A "Project" block which shows the total number of Projects which are available in different devonfw IDE

    +
  6. +
+
+
+
+
Steps to download and Install devonfw IDE
+
+

Step 1: Click on Download latest version button which is in the Content section. Check the below screen for the reference.

+
+
+
+Download Latest Version +
+
+
+

Step 2: By clicking Download latest version button, Installing devonfw popup will open.

+
+
+
+Installing Devonfw +
+
+
+

Step 3: Installing devonfw popup will automatically trigger one more popup to specify the location for downloading Devonfw IDE. Specify the location and click the Save button to download.

+
+
+
+Download location popup +
+
+
+

Step 3: Once the download completes successfully, the Next button will be enabled for the further installation process.

+
+
+
+Download Devonfw Completed +
+
+
+

Step 4: By Clicking Next button in the Installing devonfw pop up, two options are shown:
+1: Select the Git url for the installation setup.
+2: Skip this process.

+
+
+
+Installation Options +
+
+
+

Step 5: Select one of the above options.

+
+
+
    +
  • +

    If the selection is Git url, then Configuration file url should be filled in the input box and needs to click Next button to start the further installation process.

    +
  • +
  • +

    In case the user doesn’t have Git url, then simply Skip the process and click the Next button to start the further installation process.

    +
  • +
+
+
+

Step 6: Click on the Next button for the final installation process. Wait for some time to complete the installation setup. Once the installation setup completes, the Close button will appear. Just click on it and go to the specified folder location.

+
+
+
+Installation Setup +
+
+
+
+
+
+

Projects

+
+ +
+
Introduction to project management in the dashboard
+
+
    +
  • +

    The dashboard manages multiple projects in multiple workspaces that include Angular, JAVA, and Node.

    +
  • +
  • +

    The dashboard provides rich UI for creating multiple projects, abstracting all the functionality which is usually required while creating an application like opening a command terminal, specifying workspace, and executing commands.

    +
  • +
  • +

    The dashboard makes it easy to see all the projects which are in different devonfw-ide workspace, just by changing the "devonfw Instance" dropdown.

    +
  • +
  • +

    The dashboard makes it very easy to open a project in a different IDE like Visual Studio or Eclipse respectively just by right click on the Project folder and open option.

    +
  • +
  • +

    The dashboard also makes it easy to delete the project, explore the project location.

    +
  • +
+
+
+
+
Projects
+
+

Click on the Projects link on the sidebar to navigate to the project’s screen. The screen displays all the projects in the currently selected devonfw-ide, grouped by the workspaces in which they exist.
+Note: Currently it only displays projects created through the dashboard.

+
+
+
+Project Screen +
+
+
+
    +
  • +

    It shows the total number of projects available in each devonfw-ide.

    +
  • +
  • +

    Filtering and searching the projects.

    +
  • +
  • +

    Add New Project - For creating a Project.

    +
  • +
  • +

    Project folder which gives information about the project like which technology the project belongs to, the name of the project, and when it has created.

    +
  • +
  • +

    There are many operations that are available on right-click on Project folder they are :

    +
    +
      +
    1. +

      Opening a project in different IDE ( Visual Studio or Eclipse )

      +
    2. +
    3. +

      Enclosing Folder, and

      +
    4. +
    5. +

      Deleting the project.

      +
    6. +
    +
    +
  • +
  • +

    Users can see projects of different devonfw-ide workspace just by changing the option in the devonfw instance dropdown which is set globally at the top of the screen.

    +
  • +
+
+
+

Click on Add New Project to start creating a new project.

+
+
+
+
How to create a project
+
+

Three main steps are involved in creating any devonfw project. They are:

+
+
+

Step 1. Project Type

+
+
+

In this first step the user has to choose the language technology to start the project with, e.g. Angular, Java or Node and click the Next button for to continue to the next step.

+
+
+
+Project Type +
+
+
+

Step 2. Project Data

+
+
+

After the Project type selection, the second screen will appear for the user to fill up all the required fields. User can select the workspace in the active devonfw-ide for the project in this step. Once the user enters all the required fields, the Next button will be enabled for the final step.

+
+
+
+Project Data +
+
+
+

User can change the devonfw-ide workspace where the project is going to generate, just by changing the option in the devonfw instance dropdown which is set globally in the header of the dashboard.

+
+
+
+Toolbar +
+
+
+

Step 3. Execution

+
+
+

The execution step takes all the user entered data from the Project Data step and executes the respective commands to generate the project.

+
+
+

Execution has divided into two sections:
+- Creation
+- Setup Installation

+
+
+3.1 Creation +
+
    +
  • +

    Creates only source code and notify the user if the project creation fails or success.

    +
  • +
+
+
+
+Creation +
+
+
+
    +
  • +

    In case any network issue or technical issue and the user wants to re-run the Project execution process, then the Retry button will help to start the process again.

    +
  • +
+
+
+
+Retry +
+
+
+
+3.2 Setup installation +
+

Allows user to install the dependencies of application (maven modules for java, node modules for node, angular) by clicking Proceed button.

+
+
+

The installation can be skipped by clicking cancel button.

+
+
+
+Installation +
+
+
+

Step 4. Click on Finish button to go to Project Details Screen.

+
+
+
+
+
+
+

Repositories

+
+ +
+

==Repositories

+
+
+

This page lists the different repositories under devonfw organization.

+
+
+
+Repositories +
+
Figure 4. Repositories
+
+
+

The list updates as you type in the search bar.

+
+
+
+Search Repositories +
+
Figure 5. Search Repositories
+
+
+
    +
  • +

    You can click COPY GITHUB URL for any of the repository list item to copy its github URL to your clipboard and clone it locally.

    +
  • +
  • +

    You can also click the OPEN REPOSITORY button to view its github repository page in your default browser.

    +
  • +
+
+
+
+
+

Wiki

+
+ +
+

==Wiki page.

+
+
+

This page displays the documentation of devonfw. You can also find it at https://devonfw.com/

+
+
+
+Wiki +
+
Figure 6. Wiki
+
+
+
+
+

Settings

+
+ +
+

==Settings

+
+
+
Account settings
+
+

Here you get a screen that allows you to create a profile. This is the same screen which you see during the initial setup of the dashboard. It is completely optional.

+
+
+
+Account settings +
+
Figure 7. Account settings
+
+
+

Fill the data and click on Save if you want to create the profile.

+
+
+
+
Installed versions
+
+

The installed versions subsection allows you to manage the different versions of devonfw-ide available.

+
+
+
+Installed versions +
+
Figure 8. Installed versions
+
+
+
    +
  • +

    It lists the devonfw-ide you have installed in your system, along with the ones available for download from our maven repository

    +
  • +
  • +

    If you want to install specific version, you can search it here and DOWNLOAD it

    +
  • +
  • +

    To check the release notes for a version, simply click on Consolidated list of features

    +
  • +
  • +

    For the installed versions:

    +
    +
      +
    • +

      Hovering over the eye icon shows you the path for the devonfw-ide in a tooltip

      +
    • +
    • +

      You can view it in your system explorer by clicking the eye icon

      +
    • +
    • +

      You can update its settings and softwares by clicking on UPDATE

      +
    • +
    • +

      You can also UNINSTALL an installed version, after which the dashboard will no longer keep track of the projects and IDEs belonging to that devonfw-ide

      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/dashboard/1.0/projects-page.html b/docs/dashboard/1.0/projects-page.html new file mode 100644 index 00000000..9f12886f --- /dev/null +++ b/docs/dashboard/1.0/projects-page.html @@ -0,0 +1,447 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Introduction to project management in the dashboard

+
+
+
    +
  • +

    The dashboard manages multiple projects in multiple workspaces that include Angular, JAVA, and Node.

    +
  • +
  • +

    The dashboard provides rich UI for creating multiple projects, abstracting all the functionality which is usually required while creating an application like opening a command terminal, specifying workspace, and executing commands.

    +
  • +
  • +

    The dashboard makes it easy to see all the projects which are in different devonfw-ide workspace, just by changing the "devonfw Instance" dropdown.

    +
  • +
  • +

    The dashboard makes it very easy to open a project in a different IDE like Visual Studio or Eclipse respectively just by right click on the Project folder and open option.

    +
  • +
  • +

    The dashboard also makes it easy to delete the project, explore the project location.

    +
  • +
+
+
+
+
+

Projects

+
+
+

Click on the Projects link on the sidebar to navigate to the project’s screen. The screen displays all the projects in the currently selected devonfw-ide, grouped by the workspaces in which they exist.
+Note: Currently it only displays projects created through the dashboard.

+
+
+
+Project Screen +
+
+
+
    +
  • +

    It shows the total number of projects available in each devonfw-ide.

    +
  • +
  • +

    Filtering and searching the projects.

    +
  • +
  • +

    Add New Project - For creating a Project.

    +
  • +
  • +

    Project folder which gives information about the project like which technology the project belongs to, the name of the project, and when it has created.

    +
  • +
  • +

    There are many operations that are available on right-click on Project folder they are :

    +
    +
      +
    1. +

      Opening a project in different IDE ( Visual Studio or Eclipse )

      +
    2. +
    3. +

      Enclosing Folder, and

      +
    4. +
    5. +

      Deleting the project.

      +
    6. +
    +
    +
  • +
  • +

    Users can see projects of different devonfw-ide workspace just by changing the option in the devonfw instance dropdown which is set globally at the top of the screen.

    +
  • +
+
+
+

Click on Add New Project to start creating a new project.

+
+
+
+
+

How to create a project

+
+
+

Three main steps are involved in creating any devonfw project. They are:

+
+
+

Step 1. Project Type

+
+
+

In this first step the user has to choose the language technology to start the project with, e.g. Angular, Java or Node and click the Next button for to continue to the next step.

+
+
+
+Project Type +
+
+
+

Step 2. Project Data

+
+
+

After the Project type selection, the second screen will appear for the user to fill up all the required fields. User can select the workspace in the active devonfw-ide for the project in this step. Once the user enters all the required fields, the Next button will be enabled for the final step.

+
+
+
+Project Data +
+
+
+

User can change the devonfw-ide workspace where the project is going to generate, just by changing the option in the devonfw instance dropdown which is set globally in the header of the dashboard.

+
+
+
+Toolbar +
+
+
+

Step 3. Execution

+
+
+

The execution step takes all the user entered data from the Project Data step and executes the respective commands to generate the project.

+
+
+

Execution has divided into two sections:
+- Creation
+- Setup Installation

+
+
+

3.1 Creation

+
+
    +
  • +

    Creates only source code and notify the user if the project creation fails or success.

    +
  • +
+
+
+
+Creation +
+
+
+
    +
  • +

    In case any network issue or technical issue and the user wants to re-run the Project execution process, then the Retry button will help to start the process again.

    +
  • +
+
+
+
+Retry +
+
+
+
+

3.2 Setup installation

+
+

Allows user to install the dependencies of application (maven modules for java, node modules for node, angular) by clicking Proceed button.

+
+
+

The installation can be skipped by clicking cancel button.

+
+
+
+Installation +
+
+
+

Step 4. Click on Finish button to go to Project Details Screen.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/dashboard/1.0/repositories-page.html b/docs/dashboard/1.0/repositories-page.html new file mode 100644 index 00000000..40315327 --- /dev/null +++ b/docs/dashboard/1.0/repositories-page.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Repositories

+
+
+

This page lists the different repositories under devonfw organization.

+
+
+
+Repositories +
+
Figure 1. Repositories
+
+
+

The list updates as you type in the search bar.

+
+
+
+Search Repositories +
+
Figure 2. Search Repositories
+
+
+
    +
  • +

    You can click COPY GITHUB URL for any of the repository list item to copy its github URL to your clipboard and clone it locally.

    +
  • +
  • +

    You can also click the OPEN REPOSITORY button to view its github repository page in your default browser.

    +
  • +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/dashboard/1.0/settings-page.html b/docs/dashboard/1.0/settings-page.html new file mode 100644 index 00000000..9313d51e --- /dev/null +++ b/docs/dashboard/1.0/settings-page.html @@ -0,0 +1,338 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Settings

+
+
+

Account settings

+
+
+

Here you get a screen that allows you to create a profile. This is the same screen which you see during the initial setup of the dashboard. It is completely optional.

+
+
+
+Account settings +
+
Figure 1. Account settings
+
+
+

Fill the data and click on Save if you want to create the profile.

+
+
+
+
+

Installed versions

+
+
+

The installed versions subsection allows you to manage the different versions of devonfw-ide available.

+
+
+
+Installed versions +
+
Figure 2. Installed versions
+
+
+
    +
  • +

    It lists the devonfw-ide you have installed in your system, along with the ones available for download from our maven repository

    +
  • +
  • +

    If you want to install specific version, you can search it here and DOWNLOAD it

    +
  • +
  • +

    To check the release notes for a version, simply click on Consolidated list of features

    +
  • +
  • +

    For the installed versions:

    +
    +
      +
    • +

      Hovering over the eye icon shows you the path for the devonfw-ide in a tooltip

      +
    • +
    • +

      You can view it in your system explorer by clicking the eye icon

      +
    • +
    • +

      You can update its settings and softwares by clicking on UPDATE

      +
    • +
    • +

      You can also UNINSTALL an installed version, after which the dashboard will no longer keep track of the projects and IDEs belonging to that devonfw-ide

      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/dashboard/1.0/wiki-page.html b/docs/dashboard/1.0/wiki-page.html new file mode 100644 index 00000000..8e3db3aa --- /dev/null +++ b/docs/dashboard/1.0/wiki-page.html @@ -0,0 +1,278 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Wiki page.

+
+
+

This page displays the documentation of devonfw. You can also find it at https://devonfw.com/

+
+
+
+Wiki +
+
Figure 1. Wiki
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/Home.html b/docs/devon4j/1.0/Home.html new file mode 100644 index 00000000..a1e93612 --- /dev/null +++ b/docs/devon4j/1.0/Home.html @@ -0,0 +1,440 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw for Java (devon4j)

+
+
+

Welcome to the Java edition of devonfw. devon4j is documented by a platform guide (see the side-bar of this wiki) to be used in your projects.

+
+
+

You will find the latest stable versions of documents generated from this wiki here:

+
+
+ +
+
+
+
+

For contributors

+
+
+

Contributions and improvements to devonfw are more than welcome. Please read our contributing guide to get started.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/_images/images/REST-Inheritance.png b/docs/devon4j/1.0/_images/images/REST-Inheritance.png new file mode 100644 index 00000000..154de4c0 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/REST-Inheritance.png differ diff --git a/docs/devon4j/1.0/_images/images/Security-AccessControl.drawio b/docs/devon4j/1.0/_images/images/Security-AccessControl.drawio new file mode 100644 index 00000000..e0d6d484 --- /dev/null +++ b/docs/devon4j/1.0/_images/images/Security-AccessControl.drawio @@ -0,0 +1 @@ +7V1tc5u4Fv41mdn9EA964+Wjk7R7M9NsM83d2e2nDrWJzRZbvkBe3F+/AiQwEhhsI4P3kuk0QcgC9DzPkXTOEb5Ct6v330J3s3ygcy+4gsb8/QrdXUEIsGOyX0nJlpcYFs5KFqE/52VFwZP/0xMVeemLP/eiUsWY0iD2N+XCGV2vvVlcKnPDkL6Vqz3ToHzVjbvwlIKnmRuopX/683jJS4HpFCf+4/mLJb+0Da3sxMoVlfmTREt3Tt92itCHK3QbUhpnf63eb70g6T3RL9nnPtaczW8s9NZxqw9s//6MHvzP7h/e8mfw4Xr9+zW+5q28usELf+AraAasvZtnyppldx1veVeY/3uh4sR1lAI1ZRWAuXkvTrK/Fsnv+zm7Kz/eXl8ld2W6qw0rzP5nJdPZzIui6wd3zXpv5aUX+uV++vCruDh7iuz6WWu8A/NbgSF9Wc+95MEMdvpt6cfe08adJWffGBFZ2TJeBewIsD9fvTD2GaLTwF+sWVlMN3mbyTnvvbZHQY4TY7hHV14cblkV/gGbI8u5jfnhW8ETbPCy5Q5FTFHocm4u8pYL+NgfHMFD0HQq4JS6zpszfvNDGsZLuqBrN/hQlN6UO7eo84kmHZd26d9eHG+5WN2XmJY7nHVfuP2Lfz49+JocTIg4vHvfPXm35UfZvSY3uB8Q9jz0JZx5e3qCP3fshgsv3tdjsBrh0Avc2H8t30gVXulHp2HobncqbKi/jqOdlh+TgoI4kOAScwCwy9JtqA8NLHElu4OCOfmjHE8mqHDpC2X8YIpXOOVGm8z2PvvvCXFuNl7os+t7IStjl2LG2nssina54nJRzhjE6bncTCaMmLvRMmdixBTurxf/TVmIWIG/So23+H3nrxbsSQP/O/vfnSXwfZv7IbszmmD18SXywm/ubMboHUeT6HXRjRUAVhkcBzGiy4aAVNgBbIuK3RsCexCGoENBOy0FfaqeT+p21fz+wTh34ZLpUCmGVVYKVHQCck3sKoVoGzBVI/cbY/3mwiHr0LpBRxqqLEcBDaFzQoZrIVNt3AhZcmzifiGDqsqGPRo1TxvNLqaNyjwPQXPisB8bIwMjYR45jMgxJgbCmK0wiW1CYUtF69kIyhssADt0aoqkRQ276m5zjfWxZUl80TA1BaTWBqDRBlTZAIRRz2bbrIUMj5ClyoFlyDBRp0fnhcyqhYyMkFVBRkjV2u+8Y22VR68HF9C7H/+18/fXnb8L709yIJw/A3AatV1jZsNP56M/xCgd/bFpEwchhO0SuQCyJpaz8yM5D2omAOqIDaQZPZEayp7/5JkEgKh8nQYnFzjQKSbXR2apvianGBqsvLhUhiwwMW9rVpilRWEIlR0QyLKP0xBG0lgNpYY60pAyGzf3a0KuT/hqvfY55AGsXF+ThtQVfDdBp7n3StfPb0n/bjaXFUoicGixJKiusf6F/v/A/e4FN+7sxyLlwS0NaHLPa7r2OgIWl4EdQnQA1q/FLhnbDif2pOj9ofg9Yf1qbPR71oDWv+dTjcONjqoG0Hp3VUE1jDe6qhpA691ZhYxa0EZnVQ1oQ3BXqbApaA04YynHu+NsB94JzdkOmjxRaL8nCnbkiTIkjw/W44nKV04tPUsN9fWsitFAHLddKOEY91SH6gGt5aMnjNsoH9yJfAgx1HY1KUi6lJSAeGh9TQqCg1BQWQlHyqlvBeG2CtLkqDUl/tjOkRrBDQ0dLJDSdKcL1g4jovCvYG3b8EK2LO+etVa1t/5w1qKGhvpnbZUPX2LtyMmdvJdmTjpaOIklTor8rJMtqdJQDSc7Y5waj9DMuPYsaAQXGXrAJeV4JrGUoEF7eBXvgBwc1Q2wGpV49MKVH0U+XV9dbmjih7c9V7DJBlL4V3XKVYWakFVPvNOccmrMogSpquAR0gZIgYgD9YapGtIoYXqpcY0eMc07rTdM1YhHCdNLDXv0iakF+sUUq571EqaXGhXpEVOEnZbJG/pQVb3EJVSdEdWDJ0nC19QbpqrfMk/HmUwvFNCOg5emlNjoqNb1rHFLrHrtCszUJMMRs6S2rQrtvKDVbx3Fo9CqQUNiI1lvoNXv9cOj0qpBw3bPKVRY9eUoSA04mN2zzxi29RlnM4fO3YrEdkrxaymp0iATu4PwtSnH+BwykW61o+i1ieT19v7otVwfcCe+1ug1Vn1lo2Q0SAbpkYyzVzIgO5v/4CMlIwcYDWcCtUiG2FAW/RkkMIy3Fl2qBNpuX82mwZ1LwDTkSYh5JMsdJbsYSU1pGhikl1c01wdn2C6HL/ulfhejCj2ptMTGMpnFe10O1oVd3lBNkNTQQFRBjDOkyxLVvT2qorUqhMuxOXFBT4asJW13BSICdagmbCCPFScn+NXcsbl3TdBYn4eD9GpiICnkl/lygvaa0JPzakmTe3DsOGGLKJBoiEgNDUQSAJ9hWS2evW9N1PDbGvpAgXoWhZy/Lc95WosCKTu67fPIQlo6N8uCnGGvBBlG1vmlTp9aq0JPorkldjidrAo5a8UcpiZEH+nVRIuc9lETdb0n8hKbNaEn0V0JXIp3PxyqCctS3kgApKY6UoWiPvsM7iTSIo9+ZHkteY2WLMc1EdtTWY6B8o6TIzd0KFMr0G5Dx8HuJMmWN72TrKG+JlUMI2DdpT222zJVz94UYoMJLiJe0ksnkWlNzCPjA2x9uxtNs53GlnvfLkcaNkJYCtkuI4+lx7RNLBTaV9omadgIYY+YHoyp0HJvmA471jZ4b1Hb1wZoGnMsw5gYxU/5tRsYgnISxpHTJsfcN/pYprwDU9MSWoo9N9Znd6Z/GmWOUblzyIdocrbCffJh8yp8unxssE8++aA+MPFgfAb/kzmQ8N2Fiqftyz+IJp8sRPXiIRBOjhUM+yhBjLEGNExDzizENpMsQNAxEcDYbrll/2D5ACnzxNj/AjVli9U5MkKE63pHPvfTB1Zwp+4SGOTEe+7G7nc38s41+wZSwl6eXbq7a8CsmH4DAW/n82/hs+3bBHJTVpivrztnqk1ZxLQWT5NvnE7YErhsGTgTxR/9QIOHp+3LRzJhdG7wsORwz/MXDp5RG7LDXWmqdweOqXrNp5tN4M9YD9J19n3Wt5SNhzRgv579hULb6M1fBW6q3eT7CASDK21M9dcMzJZ+MP/kbulLgjYj1uyHOLpZ0tD/yZp1Bc9S3nGCQ7NU4yn5JGdt6CVfqvAo2Aekogf3vVTxkxvFvGBGg8DdRP73/DFWDCR/fUPjmK54Jf2v15dIaIqg/o4Ny7+ne9eGycnJ3ZmwKk9y8qXnjx++PNw/Pd1//v0byL7F4ikO2XihMIX1RpwiGNIfntRlz8ySSEWCPYH3HNdyh49Nn9I6d7go+cK7JCmi7LPPQWrAlv587q1TWxonw1J6bwmiXGLsRskN+8d69jaxkoTd+C07BsUx+5dUD2MmCPYsrp8i6jEGvXlRLMkAt+NCvTRVhuTxkXaEgNoI0WJjT+CnUGaQ/8nvFByF94ohl46CpenINVBIgFQSoArAUwk/0shPDB0rC7O6EhGasNYGb8Urjqr1rgvdqj0ridwTi7f5RUj8Npf7ZDL5NZN/adTge1r/T02BNnpUvNvhzOofiEf5iBlt4Qgo1v5fd8/VOALOPxOGbR1ndWw59YXPjjQLkTdWt50KQ0uezpw6Ea719JWvw3VSO0XfX//QBT07DGny1V1F9dDdLB/o3Etq/AM= \ No newline at end of file diff --git a/docs/devon4j/1.0/_images/images/Security-AccessControl.png b/docs/devon4j/1.0/_images/images/Security-AccessControl.png new file mode 100644 index 00000000..47ba4321 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/Security-AccessControl.png differ diff --git a/docs/devon4j/1.0/_images/images/Security-AccessControl.svg b/docs/devon4j/1.0/_images/images/Security-AccessControl.svg new file mode 100644 index 00000000..9d73ce5a --- /dev/null +++ b/docs/devon4j/1.0/_images/images/Security-AccessControl.svg @@ -0,0 +1,3 @@ + + +
Identity- & Access-Management (IAM)
Identity- & Access-Management (IAM)
Role 1
Role 1
User 1
Use...
Group 1
Group...
Group 2
Group...
Group 3
Group...
Group 4
Group...
Group 5
Group...
devonfw app
devonfw app
Role 1
Role 1
Group 1
Group...
Group 2
Group...
Group 3
Group...
Group 4
Group...
Group 5
Group...
Permission 1
Permissi...
Permission 2
Permissi...
Permission 3
Permissi...
Permission 4
Permissi...
Permission 5
Permissi...
Permission 9
Permissi...
Group 1.A
Group...
Group 1.B
Group...
Group 4.A
Group...
Group 4.B
Group...
Permission 7
Permissi...
Permission 8
Permissi...
IAM DB
IAM DB
ApplicationAccessControlConfig+ PERMISSION_1: String+ group(String, String...): AccessControlGroup
Viewer does not support full SVG 1.1
\ No newline at end of file diff --git a/docs/devon4j/1.0/_images/images/T-Architecture.drawio b/docs/devon4j/1.0/_images/images/T-Architecture.drawio new file mode 100644 index 00000000..2057f849 --- /dev/null +++ b/docs/devon4j/1.0/_images/images/T-Architecture.drawio @@ -0,0 +1 @@ +7V1bd5u6Ev41WWufB3shieujHTdJ09yaS5v0jdjEpsHGxSSO8+uPsAGDECBsbia0e3XHgmChmZE+fTOaOULH049TS51PLs2RZhxBbvRxhAZHEAIewiPnP2602rTIXsPY0kfuTduGO/1Tcxs5t/VNH2mL0I22aRq2Pg83Ds3ZTBvaoTbVssxl+LYX0wh/61wda5GGu6FqRFt/6yN74r2FtG0/0/TxxPtmICqbK1PVu9l9k8VEHZnLQBP6doSOLdO0Nz9NP441wxk8b1w2v3cSc9XvmKXNbJZf+K7ecSe913dzcH71Y9m7A+rd7w6Q+c1z3lXjzX3lIyga+In9Z6fT9sodCfHfm9PT/os5szsv6lQ3sER7R86Xiup0jq8cQYR4/H/V0lUj0urcuf53/YDFWtDOrwNx/uFfs7UPu6Ma+ni2uTjE76ZZ2y/HP42d/1/qQ8tcaNa7PtTwbc6v/nds6Pju/3m9xwPx7N3uCsB/F4hlMXd+HL494//1lxPd1u7m6tBpW2JFxm0Te2rgT8AZCPNtNtJGF89+gzp8HVtO6/WbbegzzW0fqdbrNX6MbjvaznU5IdwI163OnQvbMl+1Y9MwrXWPELf+41/xdA24I37iDvjgTDPeNVsfqu4F12LAeuA3AzcwtBfnrRfuJeepL7phBL7tBDp/cfu7ZjkPM3rur9rm3B+voGq52ubcrn0EmlxVO9XMqWZbK3yLe1WE4uZXVt5M4JrBcmtFkHPbJgELgqLbqLqWO/afvVVu/IOr35l0HTVA13utrtdN1721KkHVRVCyqsOmqPod/qhZrarXUtV5kaLqtFldhIWpOpIaoOr9VtVzVfWF093Z+GL9nAHMC9NIhPbzbJhGBGJR2g+aMNEft9pff+2XeZCq/UguSvs/x6rEP4gfQmd5/XY1/rj90G87IFb3naGNV39Xe4GCVZe0iN5a+1F/uBntXlC2I93ShrZuOtpt2Phq39BsrOAdd8BxM7d+pKNTHW8MesAxMkeVTE+3eiBqEwGzJRosZ5TcxmPDfBtRr9ANx2/bjEiMPdHVyNNH14qxti1GH98HuEWQFIpSrv/g9rGljpz9iXdtZjoW1veMdmFrs6Fu/Pfx6+ViaC+Wg5ulevbETXo3J2A0v8F95Y7vVtaTYWEzMu5e5CPYHx0b9/178Pt++uv9qvf3/A2NbXAzgMO3v9cvDzf/Ls3HD+tFfnr8NxosP2XVfDvhxMvL5cevm8H9EryP4I2pwBP5RoJTvAvsv7yJs1cFf9UtnrxOHvGIniynfwRrOboB33/9wB9fuRPp4Ua9Ee5nAr7c/2Yg8/kP4s9vpOkZvF2q6v3z9UWPf9eN4WqsaKuVeC+erVbq5cPn8IpfzUb/0C2SLvGz+ki+O3LsFfZX1+iXMHy8l7BJnLyvHk9v5cFt7/LH+cPPFX+y6L/ensonE/1Wnl7+uhxY4+/manFnLtXl7bTP/Ty/vXoaf5+a9qUm9L7//Xlzs5j97YFvN9/HvV/H+uvrQrVPB6/ImF5ePPzEX2ctFubyyeo9mhcX/dPVo/5vcju3rl7+/AOvD2eX9/f4Hum09/b2+HqNnk5XtwKcrBaTn/a/kz/j0dnnxUvv9/fLp9eTn2PZ7F18H1w83Zgn6vH75E9vacN/2nVPHVqXj0+n3AN+IbBc6T3x+uJuunxEL5Pf1gt/vpxNniZnl+jsu3LuDPON8h38OjF+v9z37Dt79W/+2X+Zmv/Witx/fP9x/+3EGPx4vb/qqfxSRn/PdNz+CidnxvWPn/LNj4vh4FT/ISwvZ8bvPxz/4/n49Idxp93Or39+vzsWXn6bVytVuPsBb81fY/Vj/OvsXb7jnvBDBuNvfz4/n09nV8On7yMOb6H/WdKrNRn+eX8cnz58/P4xld7O9F/f/4F3Xryf3ukLUx8ZT5+Xv/gfA6Vn6obyjtXpZE3iocH/EtcBQ33WjL6/0BC2YG2WJGc9Ca9VtKUsj8kbhYG7FMXtPKJtUeWicLtwkMDl2wdunjnfwt2t8EQ2dX4Awfl4c98azbjgxpwt3qYtsokiG5iEbHJBLAQxI0sRrQcKReuLU/qDZCCpSg8Tlf7GMt/1Uav05Ss94AicTtV6Gk7fQesf8B7u+vmv43iD3HrF2wGG+2qZiogvzLE+7Fyoq7WWZkO7WC9eN52b2LbjPOytIcfJWLcnb8/doYm1+mSkvZuzl6X3E//XebxhPjuvrS7WxnEyMod4Op/Z6nojgB/geCg7xrprhtO1rroY6jq+DUs20LzpRuL0IAZF684nATMJg4YUKylDpenbBkK5e9JAOuEK25UKShjY8FFtRwJF2wGPMqs7/hjQ+EQLcCgNfO1YXWiLUhTwbaEN8ZcFlc9tYlE8VL7iwT0UL27nuf41t9+gXHpEIRRR5ijTLk0RpezTLkUP2fCHKFDwB7EYOyvpnH1A/NAL9dl7Apc4UIATw7BMBhSTFaMDxQsFoTJRTB+VTCaQpJ47QYeI4UTlkyxv9vm0yyFZRrwiAF4QIZIjooFSVxEBEAWOlwUeiV4ESTmiojl7vpqoXLsRu0gSouKBXQ4oPMALoSBh2xIo4pG7IixMQnIrIU9CUOnCXSVUmIC89SZJQNps1HOC5/CnoaEuFvqQtsZ7Y4iySGOkLiZr0TvP0T50+zHw85OjEl3B/TT4cDVk/WHlfZh5YYJw3RPVsgOfsQit1eP2Mc7HJ/838YftQ9efvKfGLngL880aaqnYCe+mVWusJemP7EpUG4VCDqPaE9AVGmDw2izNwBDwXQt1l6Yu7jfcmPp6N+apLoIotA77M7n3jM2ru7+21brokwAKYx/EEU/ajE3kSVjH1FXgtrlzwyKhy5BgdGQeJveM/AWe51JeJfkX8A+bTm/tzxfc7iaJ5OiydnZ/f3MXNUysOxebXUbQHqOeGgL4TvXRyFgTzxredbtQzdF7d8jxw4X+kTCgGGysZTNOm/58szdyxCYNEL+f8nu3mC8vC80mRJuLMGWaV5KKsR03gLuLQ4OOw8lk2YlkB96AiMmEUhR4Qzk64XhtuS9F1Cg1UuMNQ58vGGhBdTHfBIC/6B/OAsO2JIlpAAEREMR34KRLLlk92MNOwrN0RGTe9inEbkhFiYxhB1lTkYWJiRLlWJl50WL7CVkN36z3LRzbwj53eALCwhdPdKcDgyiXgwJYjus6VEgQz0E5BdHhDzeapeO3dhavLTrbPI7jQ0Cui7YNMWBu/Yl8Yka51gWldYXwnE2wRawYDe/aww8CSiEQTST7Kyd3CybenxVupfkjynSykd6MgTlV9VmS98IZhoJZ4tGmExE/RbB905HEmUWRAhpwsI6KKA9McA0Drof/5uSoIFhPhcZ6VuCoKMo0SPV3HCLfZrZu645DZPsc7/dIZ/KtNjcXum1a6/srMpe/czVoJZrT/xWLfcjl20fD/SkKZbNSuT9FUdIRVvH+FHKkKA7/Uv0pgGOgGA+TA/YF3hSHCuAY6IrGy6rWHhXAMdAkX0VEtXSpAE6JCMTZP3pLp2nZE3NszlTj27aViAHe3nNhOqvqWlB/NdteuQOvvtkmsSP3d9ze/vopcCXWe0Js8dP29cycTPmuF8C5s2mq70Vxb0zd1e+7XecIp4qi+Dv4lJ32/rvbnKPt3GjOThSb1zICzz1OFt3bhi4wgHdeLB+8Fx6F940bSN+Ki8IjNreCJHSjszSQqNtbnjSQ/CPxXFUuJxLP0hZ2UP/+qh8da8GieO2ucW9FlMKKKCKZpohV7xv5GsThKeRQ8bShKnXjyDcW6PJNC8TjaSdZvpqoar1t5Bm8f19FQrXcNcoMYceFLwIofFjIT95Smc+aIcC3pvEFjNrri515KRAIHykdW0eFVFwQCAPt3QYWxMi9JoEFeNWSRcj5f+SQimFoISnbPxyJI5hDQ7sABp6jCGFNxogTgO1Vb8nKOSZhPc9v39RV1rgui+GbwV4BCYxuJJaQ7HpOetSgqvxnwsqWIzkqiJbYLZzY9VBAKq/Ls/K6/nFRIrxbSlgj6fODwofnMFEgiIOd+V3GHAIsCKm48xu+bgZX4qBmAk4I6qYctxCHD4LsqGLJh0F2V0AvO0m6Y4Gr1aquKAQRCwjlZF24/bXY3xjk5cVgPEYWXRIbcyLCt+C9t28dDGJlKRz+0YH7adI+JyTSPFSLuTo7ys9D1Vft4YQpH8Tmi0v0Rj2vuxbxRQWaN91Iye5Ywbn8wl1Rx1yfO+4V5QEAHF9fX1SZwZZr40iKnUxRPb581WuYMwqrYnhqrqk3CvivVmUYIxAIKpKWyaXUMEZvFJrHoIMdPKv19kcB2FiHVBZh1dojBWBjXVI7yKhqnxQ9o7I3CQeTA3tZf3lxTb8H+C+fBMT77ij7RLREUzWE9+4kJZGAGjar9f2aTgs09E3bxrg91OYu6YGWW3ckuaRdYUDJYjK54g69rP9EGL8d6bsAWRe7zjJno8g50DGeco+n/jkUphAUbkcuIvokL1AkfzKCbhVeUS+aVQgQ5GUVQsgqhIZZRd4scio3J/G1sQUy9YLESbvaApHeUyJ3CkXbghB1iWxXiA3b29pCIStE2V4YnjW6vj6GphD+nT3sDAkF2Vkbxl9KGL875TaMPC02jl/0gFWQO90GQ3gH0SrnUZ2kC4cR0w9gBakSGsajisSkLiI+TSurp1QZ0mAWTalK5LjxqMsljlvJ/CpDGOGh0kFyjBAPll9FDDFqzRdWvflVxODH+TIyqppfjRERrMG6AMIUgqCkraelxlwCxODKqWk4LKs6+2pwuIcA/FSs7SmADKcAfMnXJGCwPQZQn2MAwKOcDnDi2+ccQJbZsLpFSYyKoj0JUHyKF8SaXh941blZzwKIRE1MXgI0GMQ+l0qQOBnAU3HVoRHIm6prB0If51KHDVSRiePAC7GR1DGtEFsMWdwWYgtonlK+5jWOLg5vb2tZic3vQLUEcdhmK6/E5oefNZDB8gXeGEqYWsL7ywmr3pQwteL0V5VRPSnhOhTllACB3qouDQRYinLWlAthVeDs1TnrRwKzlHtsSeA4ybckcEsCR3SDISqiphPfXiRwhtmwukVJioqiJYGLJ4E9MJBOAnsnbotO9C2BcEC/DIkltugwfAlFVHF7JIVfC6YNwy/oSMouBrmH9kPWbEj+2lF9IL7krVp+ID4xKbOihsiDxLLi8GtQLC6T06UEbjun8nGuH6Bh/hW/Wlwp/hVa/bgq/CtlpTRxTOfL1I8DEijfQBruBlLkaGrj6t1AEgOHXrwbiBypyisB+PC2gby1L/LGOIK8859fW1j1dgRJDF6GLyOjejqCpDbEsBJ2SWLeX0usxeH3ZpfC3LiiEBi+4Oyr/pAcIAmdA3SUgEQ42hRqcaRyXW1S62pLdLWx2maMy1vx9smMPrcM1ozkriwGfFvh7xXwEO9KjgEUdqoRTwZdwXvLnP1oEuLDvjHqWMb3W2b/7YL8bFLj/Ww7zHvpJlGUwy3GyxGVScDL0SYbapCXw/PcMaCw2no5ZLKcxK5eDsXLjVBWUQkGbFFCUQlwFAQaon+NHWiUUFVii5xAksYfWukImQtXrpYFuDMu8RcOjxPhxEJQiEwk6ZKFZNwhE4kkBTH5frLWC3F/MchEiAKT5pS+iEvjkJm0xiYtK2A/zS+y1EVBWdxqeeguv5xtCgyIqc6O4VjjSs7algNIh2T2Dmp9C6RQXcEl1bco5ARqLVU/l/OmkDuUOi/Z1N4/cZqL2hOZPWnlIbwFoQHxDztECzUiNghuqO/G2YIfHVSALdBigWIWgFxsgZG84WlHeDbaOadaizuyPZcLm+JNgqM560/rsXNbrM0bBZvWY97jsHX4Tc8u/eK24rG1OzrWnZn3q+/e920ENtIt/IaOSuMe2NYRLQop0wIH+PUXh+eA3treUX+4UY1eUCmjPTA0G+tYx6WK/HfBpqx1PMH2gDOtOBtc07nNdr4FxHc9WArt2JzOzZm2fpdoCFUPRttia6rFLMF+8zw8GQUJPar5kBF4fopxRDqfXT5vbKkjHb8KyZVO1LnzRXjumg1147/Zn/7FwOyNB99ub7hPdPd99VdbcncPt+M73Jn+xZvyevnHfLyYLK+Ua9xgv732DOtqfj4Gd5+DM/7nb8cgbsdXT+M/969/e/zD6/L8YXz6iMUD++LgEXxevEzN1XT0aALpYyJ8Tm/PxZ887P8+fhlC8/xl1MOj2X96x//0bpTjZ+n6HJ3hh8LzU3B2dfqJf5T5V+N+cvPreIh7/r8ME5nDT8QQhO54hOfPwFxJm0qjgVnhgK79pzIeEtXaKAXogUhjPXYI02Kdt6KnPALqB7OpH42qz1FCOYhAIEkRb4ORFvzFFyaApBIDKMYp0tp//vYf1a5ki8kQ0hZRMZpjqUAVS6jowq8heKtih6piq1gVo6wjhbkuPVqermFCq2GHr2HlqRidLZWjnoIURB+t7pYt5jO32E6K7rOeDcgbgkIWCEo7WbMLBI2jkRzJHEpJC6UCtrJhh1MgkXtSRKCWlYEVhsj+oo+nIHKs+Kh9lns4RWluLL0v8MYcTlEYEgk1X1j1PpyiMISbfxkZ1fNwisIQIVb0SgAVwvWt0FbNclPCKFEMHhmXmoYRM6tw5mJGtctTBj1M3x6eoPsgkyVfkxDFNk9ZzN0V5CmDHEOq3ppOfHvlKcswG1a1KEGOIor2JGnxJ0k9MJB+hsE70Ml6Wo0M+eBFOSmUjz5NII4sUAHIh+SeTWlNbB1OAn7I8YHxa8mtBPieQG6RCfhRN7igRhmCyokuyDFQOsUTXTA8biBt3EqlvSDXWCZlK/6m0F6QayylkkVYtaa9IMfAqXwZGdWS9oIcA71TPO0VXhckKW1dKHe3ARi4lZruBJnVWYkR5gFRYIBhw95SYHGSbymwlgKLWBQ82IlvHwosy2xY3aIUzY/eUmDFU2A+GEilwFymhZ0CQ54r1Y9Tz0yAQYXM2k9FUiUl3YCAj+hom8O/mdltPL433Sy8RaU4s9gh0Q0iUnRJHJ9kOKw4I/JYQSzEHqtOXr4mwb9M9nK4qY3Scug5cuiKRLWMWnHogIFyLJ5DJ3OZC12+Rhw6aC7lB7IfAqs5hw4YyL/mC6veHDpkIB+/jIzqyaH7KLndimffp8Sulelbcc8SGPYc7hpY6lacyLAtK1Vuxb2xOkAqLw+0qUQTnqfhpnKdF5ChHsxXdl5kTH5OuhQVRLW+3FOhIyCH/BcEWOfDDowdWQX8Nl2AuNgvCbsv2ALgsjovEEjMj04d7vj3ScyWnvSsghwfkOFYYE1ny6Jyp2e3qHL9IR68azOpN51r9h0r6bgPFu6CyYFrlunhLvtyzQoUSuWanWxqB3LCH2/2A2rQEri7rAc85AlIXdMz/hDWgLYVoqNV9Sl/fy1uILMEG0fVwuZStRmEVW+qFjWXqs0uo3pStZ73rMq1AE8fZOxs9ef8oXc25wC3vKxKjDKDnPoFOaOWJ9whyBmx0odtkPOXC3JGjef69p4Nq1uUoqRe61ktwbPqgQEGhi2jZ5XnyI3wTif9eUWJ7KdLOOvvPP2Azvqjluban+YiMeAhnPZHtaC8RHLkanbeHzWXAEONI8BQcwmwDMKqNwHGN5cAyy6jehJgfC0IMHJlqNuJf775ZBjfADKMb8mwHcgwviXDWjIsTje+KBmWYTasblFqybBKyDAPDKSTYSj7MQNyTQViIviLY8M4cqtd7cF/rzJqe/C/8cGYHgGcbh3e2lKwdewQkckrkQ1JPuf/KQ9uagYA5y2+TgaATS21llnPlVk/hBwAPANnXDyzHhm5mmUB4JtL1voK0BhmXWgua5tBWPVm1gUG2vbLyKiezLpAYUra7Xnx23OBOfOYVwC39O25TAarVJwMQGDgzWvK8uUBPMmTrDVLBSAwcLBf2auRMRVA1NtYVjIAAaAuCrg2yAwUYd+GGH48M8UgS12RORkAobQ5+TUEzvmWfJIBYOvc9VkF+UQ8V9EBzpZFJQPYxabKdZYIiQx0mw6gQQw0YmagPUs+BAY6p6wA0QcXlReA0TBF5kJUjjq6fC0adJRELckhsSaR+kH2ckdUNoWJDOWoarruMAguRTuYl6JwwnMxIrNysbvIQA9/ZeyeLPWaRCSFI5CIX2edeJXQQ2DOwDuk7pEueF8KCYUuAh2LDFR/TWcpKjouY+qqbkmRorL5whxleJITUqa4PSCqyAxRmUOI9pzjeC9W34M7jCGPuc0bnte2GlUER8HVMmWtbBRdzp48zZvZi1ZF5JDh4TVXRnIXCIFNCpNuRp8sSV2efLLUFcUIXZayuuen9ZCyWm6CXxzdOIoN0lmstcYJ0QHi/CMaf3NsmYtF5/jNtvXZOBBPs3lqMKRmZ49hVs/gECunAwVpYSxh7+OAc/5SaIxN+Ek4SGV/9gqFpz7BKxwY2jXQ4CWvxOt2zMJcdVxWZu0SaNp1p+FNyzo0KkaxKorfWrj9CgZxLfy+wnRzRAFFacO4djEmAkeIAHRlxsAtESUd/M89dEti8KsVHboFyOGCHG24yo3WkhgouQMNLvFl3phoLam5ReozCKve0VpSc2vTZ5dRydFazQBcF+Z4nATkDxVvyS3eyhlvyXyXskbR8VbC1jx/tMXAC5eOthRYPdqSmxtu7cu8MWhLbm7cdQZh1RttyTRO7avKqEVbO6CtS3O2PolYP8A19XsWhFzTQH/TQZfcklw5gy5J4NlJLq5UkkuuIckliYiGUUuGXc0lueTGkVxyc0muDMKqOexqLsmVXUYt7NoBdt1b6myhDh3A04kmiDhTZyMjAMm2V+oG0uzteyyCMC3Q3pn4b5MK2DCmagFbvoBNxssZM0tWKmBDHAPDUDZgk5FQOU/mlzNs3vqylXlTABviGE4UN19YtQZsiGPYGn4ZGbWAbQfA9u1jqM0PHq5p3luEwJrfmg2qtXnA8oZqilRTbg1xDIe7yoZqCidWzq0hrrHpvrYybwxUA431P2cRVr2hGmis23kHGbVQbQeodqwOJweBxoabjgah2NDrezr+ArDFX/niL0VUaHU9agC/AAPHUDr8kqrGXqC5u3pf4M3BXo11QmcRVs2xV2N9zzvIqMVeO2Cv3nCoLRYUjuwY/7ZlGvUHZermDYab/gaxWfgKE0RrY/73hmjhpI4Cqi1Eq0XQPzFafNUQDTaXcfEF3hiIBptLvWQQVr0hmieSVkb1g2h4AZwxQSdqGpMA0HMhk6HjEetcqCsHrtDgkN+2+eYyqatN3wynbyH+KtDOgpBgBQH6O2V0cbWbFfkQVnXMDbiMWTxjVxHRK8rnGYHIRQERLYcLIDPx7YaH4qwAvxs3wHsNc7w4gsdRoH9pjjTjiFYea/PD7HkxD5pBYOswnZszzcsjm6ZRfIu599UwIcz0IQ51ucAfyAbAyyxOhSADvVQ0/JaEsGUiIHSFxHErGYw3l97xxd8cMN7YMwZZhFVzMM4QFfNlZNSC8UMH40howXjGVQSCdDDOc1QwXgw6+hyrEv8gfgid5fXb1fjj9kO/7QBv8qSWBBHXSVbbkiBpJUF2SuVcdpJb5NVuSE1yCxBr6vlsSW53KAEieZ32gLNI7FRZM8+TDxIUwsoKLy8X9QKc3d/f3EXMzxn0i82qFcoCTyaQjVVlS8NrmLszcVTGHWb8cKF/JAwos2fsNBvVtuRZZP+dUofr8gK11sCuCZW9W8yXl4VWiGwRT6PHN6v+nIo0XGH23OVqinVPn3U2n3rctsXaTF3BpvVS1uMwNPGbnt1p0G3FErA7Op6fZt6vvnvft1kHR7qlDTfp/HuGbR3t7fQF/PqLwy7n3tq9jPrDzfzZC07o0R4Ymo3VuuNO2f67YHykdbzVsbdeGp3p0nRus51vAfFdD6Itnyna0EwEj9SD0bZY2JbmlJ6HEV5wYaWiEhK6eKsvj0jc766rY0sdOfCNLNwwUefOF2FAOBvqxn+zP/2LgdkbD77d3nCf6O776q+25O4ebsd3uDP9izfl9fKP+XgxWV4p17jBfnvtGdbV/HwM7j4HZ/zP3w6quB1fPY3/3L/+7fEPr8vzh/HpoxOE2BcHj+Dz4mVqrqajRxNIHxPhc3p7Lv7kYf/38csQmucvGBDgO5/e8T+9G+X4Wbo+R2f4ofD8FJxdnX7iH2X+1bif3Pw6HuKe/y8jsRazUPs1K4JrdGAmpeFTCjTMOb82D8PUGfAKbgbJMpGGB8liVPntUj2+mAb+eEQC5BT1o9UNyVFCOYhAQGEMAL0Dmmm8G1+YAFCSAGIqtLT2n7/9s5IfnsVkYBOjxAeluk2BKsYnqRjfqtgBq9gqVsUo60hxq0gChcAjodWww9ewwlQsjqt1EHkdfMZCcFxan/FuwFcgiq1Cr+J2rfzEnou2Sj+xEBkr9zB9hb5hoblZTHyRN8Y37DnRvraw6u0bFhgCUr6MjEr2DTN6xSglQbdeMW7t2Wm9YgV4xWKX1nTvluDeWb3PiifqNu7ssyIfVL7PKhpp1ByflSeVfHxWCnL9ezV2UwkMUX6BCWtoqIuFPqRtYjLXdeVDE4LEPiOM1MUkUBd+N6f5do6FxAwMj4KFa0E+U1CODvZyarsLRFgvL2yLz2adtETPr+GT7DlXeffJ/PDkyLsFTGP7Rfhfst4viMn3SyDx/qNCCs0jIRpneIgzdMp8lccMLXCKGBJQ/cMKKCEjVczXWcqCH9B07dMoBzddi8RUI0GhKxF7Iea4KEWineDwMrkICO+yIldznsrJqVZ2sdRhT82eL7+ZU3PciczGTc30zQOIzsRbnxdELUHAQhDsujyUSywwLxOQqw0FgYgZjydjylmXB1g5BdEMgJs8i+RDQQCuvrNojCOaRkgkxc2GA2XxLXhUNubNERGztGub0FnninOwJ3zNj6ElLoeDaUNPDQax4o9OHOv+uZP4wHfvnsLJfcA29JbbrgtJ3XcvRYJxiUEJR+U63d4E5nrXt+G5R+tJMYcIXdBG6H7t4JbiI3Qhsa2gRuhS0yfsEKEbG/iC+7hb4AvFarLFumyAThvrso8KoXCarXpGuiCGA+RFR7ogYqSqj3NBzT2yjeJcXQcb54IYqNHmC6vecS58lAD7ujKqOs4lRkQJcS4NPADkH4aozwEgOUkA7QGg+sXOZ19Ko1Zf7gEgJUnF2gNAh6xi9TgA5G1o2gNAjdWwilUMyDVI4keiB8FzslS1ZQVy7mn7ckPSFItn1M6trJuyWwVy7hn7DkxOtd6oAjl35ucwxVPPPaoYn1yH2atGzeR3bJmLRef4zbYDNeEiXqNS9cD3UidrAgYUA875G8tu5+0bQWHXiB/qkOoakbMT202qd6M/64bjCa2ouI2j116qSNyiGVg5ksvbuB2OVrfx3wSmWywM6FLrSNrF3oiKeoira3EbsYb1B93qShX6ksTmHsMVG1d/UGzuedwMwqo1REdicxPUZ5dRzfJpHwYe+2ZZpkUpP3imzkZG0gagMkCmrXs8cfoXzt4dutDRZx11Nn4zVIsJm0ktNssXm/kxP6nIDJWLzBhYjbKRmT921SGz5gaO+AJvDDKTmhtBkkFY9UZmUnNLQ2aXUS0ZVEg5iRizEjj+RheFoEEnYymM7A41BYVWB9q+nRYrUZibEVK5ZmKk8HP0+UJLV2R1MV+v2YMX/cNRfjYNFtM0OCYoiEFwKdrBXgUuJDUxin6kqMyAVJjMGBio4Zv1HjiWTh65DAhte7o8imPXxzr9w4eKoBwFD8ZDOflovPPhRrN0/NoOxU8egeT4o+AZyC7aNsQek8efyCdmlXpNjrRTKz5kPayohB5C1rTc97B6SN0jXfC+FBIKXcDJcsjCLtZ0lqIGiZYxdVW3pFBkEzgablr2xBybM9X4tm0lon6291yY67Pcjtz+ara9csWhvtkmMY3tVJKInBfTJkNmVdnOc+FJTkiZ4mLBR3p2D28pTz22XVTmuGg6N9JNgYiYz4LPUiNJrFIVs6SRKUUVSyu8xZ5BwJvZi1ZFpHCh+rPEWSwEuoIcuCow6Wn0W2TyuXgjFHguR+h/zDqfl/7L0WIb7UxMU37AhdVfzoA2WY1Chow24aWfLtokAPAO9XhQTmADkOnqGedAGah4q6xikAZP7oYTbariy73Nd+7iXHhZdtaOg6NoddCxNtMsx9lyMnr2/Qsj9+uDroWR36V0V4IkB0Yz6krwIrmHK8PJn2ChdCz6vDG2i2e/wQ+Lvn6znawDbvvC1ehtRhw2LwQZdB5MTrN7sFYkNKvH9bJRJ7FbbUgkcgGAozDoCjUOJDOipfgasqjuHn6x0lVXaVW3aNUVEUxXXVAP1T0+HNWVuVZ1i1ZdmVfSVRfR4l3zUV02POttFlo8W+p2ziN0UpGryIpcPZ3kiAQ0EpnUkwHFckSNQ5kx/XJuJIPM4EIpPOwgdiSrogE9pTlAyjYqpxTBs9fjJGodCbQY5zJ9SRJDJGjrSooTfF6uJPbZDopdWYRbPinsRZK6iqRs/3BknAqrWwl0AQw8RxHCWit3BQC2V8mD/DnlSF7HEAQSMcPELovhm12/eqFeKalhTqkCpr2qFh/kkW21WpMVHlDm+1IHBnAMMZI1VVpm/YQxEjycZRkAhgCmdl2Ok3yD12UUcFJxStjb1K7LrvEcbkzbfgsz+8RX3cIcdUluM+eI68kncxmBbRkYSuWUUF2VBlUSCJaE2b3sTK4eTi/1a3oAiswai1d43YAIfQQ4MjMt8+zMITz+RFwfAEKo2gxbYEF+3FRSpfrW3GpgbuXW9vBTVqTbqKTUxUYlIv4acKTTgbn2E4y4OVDJBhmNSGsN8gsbJPRCzRgMUq6LQfIiEaPBkUVymAv+ilxX5BzDdP8KhH3yedkn/miZph28HWP9yaa4APr2fw== \ No newline at end of file diff --git a/docs/devon4j/1.0/_images/images/T-Architecture.png b/docs/devon4j/1.0/_images/images/T-Architecture.png new file mode 100644 index 00000000..9293afa5 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/T-Architecture.png differ diff --git a/docs/devon4j/1.0/_images/images/T-Architecture.svg b/docs/devon4j/1.0/_images/images/T-Architecture.svg new file mode 100644 index 00000000..bd0dd80b --- /dev/null +++ b/docs/devon4j/1.0/_images/images/T-Architecture.svg @@ -0,0 +1,4 @@ + + + +
Microservice B (Client)
Microservice B (Client)
Microservice A (Client)
Microservice A (Client)
Microservice A (Server)
Microservice A (Server)
Microservice B (Server)
Microservice B (Server)
Microservice C (Server)
Microservice C (Server)

Cloud
Cloud...
External System S1
(Service Consumer)
External System S1...
External System S2
(Service Provider)
External System S2...
Logic-Layer
Logic-Layer
C UseCases
C UseCases
HTTPS
HTTPS
Domain-Layer
Domain-Layer
C Entities &
Repositories
C Entities &...
Service-
Layer
Service-...
C Services
C Services
HTTPS
HTTPS
Batch-Layer
Batch-Layer
C Batches
C Batches
Service-
Layer
Service-...
B Services
B Services
Logic-
Layer
Logic-...
C UseCases
C UseCases
Domain-
Layer
Domain-...
B Entities &
Repositories
B Entities &...
HTTPS
HTTPS
Service-Layer
Service-Layer
Logic-Layer
Logic-Layer
Domain-Layer
Domain-Layer

Component
A2

Component...
Component
A1
Component...
A1 Services
A1 Services
A1 UseCases
A1 UseCases
A1 Entities &
Repositories
A1 Entities &...
A2 Services
A2 Services
A2 UseCases
A2 UseCases
A2 Entities &
Repositories
A2 Entities &...
Cross-Cutting
Cross-Cutting
Security
Security
Logging
Logging
Monitoring
Monitoring
Transaction-
Handling
Transaction-...
Exception-
Handling
Exception-...
Caching
Caching
Access-
Control
Access-...
Client-Layer
Client-Layer
B Dialogs,
Model & 
Components
B Dialogs,...
Client-Layer
Client-Layer
HTTPS
HTTPS

Component
A2

Component...
A2 Dialogs,
Model & 
Components
A2 Dialogs,...
HTTPS
HTTPS
HTTPS
HTTPS
HTTPS
HTTPS
HTTPS
HTTPS

Component
A1

Component...
A1 Dialogs,
Model &
Components
A1 Dialogs,...
Cross-Cutting
Cross-Cutting
Accessibility
Accessibility
Error-
Handling
Error-...
Database/Schema A
Database/Schema A
Database/Schema B
Database/Schema B
Database/Schema C
Database/Schema C
Viewer does not support full SVG 1.1
\ No newline at end of file diff --git a/docs/devon4j/1.0/_images/images/appcore_usecase_crud.png b/docs/devon4j/1.0/_images/images/appcore_usecase_crud.png new file mode 100644 index 00000000..e684041d Binary files /dev/null and b/docs/devon4j/1.0/_images/images/appcore_usecase_crud.png differ diff --git a/docs/devon4j/1.0/_images/images/batch_layer.png b/docs/devon4j/1.0/_images/images/batch_layer.png new file mode 100644 index 00000000..d5587683 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/batch_layer.png differ diff --git a/docs/devon4j/1.0/_images/images/component-facade-example.png b/docs/devon4j/1.0/_images/images/component-facade-example.png new file mode 100644 index 00000000..895d9bd0 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/component-facade-example.png differ diff --git a/docs/devon4j/1.0/_images/images/component-facade-with-use-cases.png b/docs/devon4j/1.0/_images/images/component-facade-with-use-cases.png new file mode 100644 index 00000000..362010f1 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/component-facade-with-use-cases.png differ diff --git a/docs/devon4j/1.0/_images/images/csrf.png b/docs/devon4j/1.0/_images/images/csrf.png new file mode 100644 index 00000000..605cda00 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/csrf.png differ diff --git a/docs/devon4j/1.0/_images/images/devonfw.png b/docs/devon4j/1.0/_images/images/devonfw.png new file mode 100644 index 00000000..94dbda05 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/devonfw.png differ diff --git a/docs/devon4j/1.0/_images/images/eclipse-debug-aop.png b/docs/devon4j/1.0/_images/images/eclipse-debug-aop.png new file mode 100644 index 00000000..7136ef7f Binary files /dev/null and b/docs/devon4j/1.0/_images/images/eclipse-debug-aop.png differ diff --git a/docs/devon4j/1.0/_images/images/eclipse-debug-step-filters.png b/docs/devon4j/1.0/_images/images/eclipse-debug-step-filters.png new file mode 100644 index 00000000..95f9b982 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/eclipse-debug-step-filters.png differ diff --git a/docs/devon4j/1.0/_images/images/eclipse-m2e-create-devon4j-project-parameters.png b/docs/devon4j/1.0/_images/images/eclipse-m2e-create-devon4j-project-parameters.png new file mode 100644 index 00000000..910df367 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/eclipse-m2e-create-devon4j-project-parameters.png differ diff --git a/docs/devon4j/1.0/_images/images/eclipse-m2e-create-devon4j-project.png b/docs/devon4j/1.0/_images/images/eclipse-m2e-create-devon4j-project.png new file mode 100644 index 00000000..5bb7e375 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/eclipse-m2e-create-devon4j-project.png differ diff --git a/docs/devon4j/1.0/_images/images/guide-logic-layer.png b/docs/devon4j/1.0/_images/images/guide-logic-layer.png new file mode 100644 index 00000000..fdbbac9c Binary files /dev/null and b/docs/devon4j/1.0/_images/images/guide-logic-layer.png differ diff --git a/docs/devon4j/1.0/_images/images/i18n.png b/docs/devon4j/1.0/_images/images/i18n.png new file mode 100644 index 00000000..a7e49cf6 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/i18n.png differ diff --git a/docs/devon4j/1.0/_images/images/integration-levels.png b/docs/devon4j/1.0/_images/images/integration-levels.png new file mode 100644 index 00000000..d418cfca Binary files /dev/null and b/docs/devon4j/1.0/_images/images/integration-levels.png differ diff --git a/docs/devon4j/1.0/_images/images/integration-levels.svg b/docs/devon4j/1.0/_images/images/integration-levels.svg new file mode 100644 index 00000000..33216208 --- /dev/null +++ b/docs/devon4j/1.0/_images/images/integration-levels.svg @@ -0,0 +1,315 @@ + + + + + + + + + + + Small icons + + + + + + + + + + Small icons + + Sheet.17 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Page-3 + + + + + Sheet.2 + + + + Sheet.16 + + + + Sheet.5 + Web-Server (3) + + + + Web-Server (3) + + Sheet.6 + Client (4) + + + + Client (4) + + Sheet.7 + External Systems (Service Consumer) + + + + External Systems (Service Consumer) + + Dynamic connector + + + + Dynamic connector.9 + + + + Dynamic connector.11 + + + + Sheet.12 + Batches (4) + + + + Batches (4) + + Dynamic connector.13 + + + + Dynamic connector.14 + + + + Sheet.19 + Service-Layer (3) + + + + Service-Layer (3) + + Sheet.1 + Application Code + + + + Application Code + + Sheet.10 + 3rd Party Code + + + + 3rd Party Code + + Sheet.18 + Logical Layer (1) + + + + Logical Layer (1) + + Sheet.20 + Data Access Layer (2) + + + + Data Access Layer (2) + + Sheet.21 + Cross-Cutting (1) + + + + Cross-Cutting (1) + + Datastore + DB (2) + + + + + + + + + + + + + + + + + + + + + + DB (2) + + Dynamic connector.4 + + + + Small icons + + + Sheet.22 + Filesystem (2) + + + + Filesystem (2) + + Dynamic connector.23 + + + + Sheet.24 + External Systems (Service Provider) + + + + External Systems (Service Provider) + + diff --git a/docs/devon4j/1.0/_images/images/jwt_flow.png b/docs/devon4j/1.0/_images/images/jwt_flow.png new file mode 100644 index 00000000..f1139470 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/jwt_flow.png differ diff --git a/docs/devon4j/1.0/_images/images/kafka-architecture-internal.png b/docs/devon4j/1.0/_images/images/kafka-architecture-internal.png new file mode 100644 index 00000000..bf069df9 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/kafka-architecture-internal.png differ diff --git a/docs/devon4j/1.0/_images/images/kafka-architecture-internal.puml b/docs/devon4j/1.0/_images/images/kafka-architecture-internal.puml new file mode 100644 index 00000000..c7c9b042 --- /dev/null +++ b/docs/devon4j/1.0/_images/images/kafka-architecture-internal.puml @@ -0,0 +1,33 @@ +@startuml +skinparam componentStyle uml2 +allow_mixing + +package "Client" #lightgreen { + component MyClient +} + +component "Kafka Broker" #grey + +package "devon4j application" #lightblue { + package "service layer" { + component FooService + MyClient .down.> FooService :http (e.g. 100 receipts) + + } + package "logic layer" { + package "FooUseCase" { + class FooUseCaseImpl + class FooConsumer + class FooMessageProcessor + FooUseCaseImpl .right.> "Kafka Broker" : enqueue (1 message per receipt) + FooService -down-> FooUseCaseImpl : (100 receipts) + "Kafka Broker" ..> FooConsumer : dequeue (1 message per receipt) + FooConsumer -> FooMessageProcessor + FooMessageProcessor -up-> FooUseCaseImpl + } + } +} + + + +@enduml diff --git a/docs/devon4j/1.0/_images/images/kafka-architecture-internal.svg b/docs/devon4j/1.0/_images/images/kafka-architecture-internal.svg new file mode 100644 index 00000000..d21131cd --- /dev/null +++ b/docs/devon4j/1.0/_images/images/kafka-architecture-internal.svg @@ -0,0 +1,62 @@ +Clientdevon4j applicationservice layerlogic layerFooUseCaseMyClientFooServiceFooUseCaseImplFooConsumerFooMessageProcessorKafka Brokerhttp (e.g. 100 receipts)enqueue (1 message per receipt)(100 receipts)dequeue (1 message per receipt) \ No newline at end of file diff --git a/docs/devon4j/1.0/_images/images/kafka-architecture-service.png b/docs/devon4j/1.0/_images/images/kafka-architecture-service.png new file mode 100644 index 00000000..5367b970 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/kafka-architecture-service.png differ diff --git a/docs/devon4j/1.0/_images/images/kafka-architecture-service.puml b/docs/devon4j/1.0/_images/images/kafka-architecture-service.puml new file mode 100644 index 00000000..f49803c7 --- /dev/null +++ b/docs/devon4j/1.0/_images/images/kafka-architecture-service.puml @@ -0,0 +1,26 @@ +@startuml +skinparam componentStyle uml2 +allow_mixing + +component "Kafka Broker" #grey + +package "Client" #lightgreen { + component "MyClient" + MyClient .down.> "Kafka Broker" :enqueue +} + +package "devon4j application" #lightblue { + package "service layer" { + package "FooService" { + class FooMessageListener + class FooMessageProcessor + "Kafka Broker" .down.> FooMessageListener :dequeue + FooMessageListener -right-> FooMessageProcessor + } + } + package "logic layer" { + component Foo + FooMessageProcessor -down-> Foo + } +} +@enduml diff --git a/docs/devon4j/1.0/_images/images/kafka-architecture-service.svg b/docs/devon4j/1.0/_images/images/kafka-architecture-service.svg new file mode 100644 index 00000000..8e79e35b --- /dev/null +++ b/docs/devon4j/1.0/_images/images/kafka-architecture-service.svg @@ -0,0 +1,52 @@ +Clientdevon4j applicationservice layerFooServicelogic layerMyClientFooMessageListenerFooMessageProcessorFooKafka Brokerenqueuedequeue \ No newline at end of file diff --git a/docs/devon4j/1.0/_images/images/kafka-retry.drawio b/docs/devon4j/1.0/_images/images/kafka-retry.drawio new file mode 100644 index 00000000..98ed6c7e --- /dev/null +++ b/docs/devon4j/1.0/_images/images/kafka-retry.drawio @@ -0,0 +1 @@ +5VnLbts4FP0aA+0ihkRZDy9jx51ZpEAwmWKabga0RElsKVGlqdju1w8pkXpRTtzGjqeIsgh5+L7nvkhPnGW2+4PBIv1II0QmwIp2E+dmAgCwbVf8k8i+RnyggIThqIbsFrjHP5ACLYWWOEKbXkdOKeG46IMhzXMU8h4GGaPbfreYkv6qBUyQAdyHkJjoPzjiqUJtb942/IlwkqqlA+DVDWsYfksYLXO13gQ4cfXVzRnUc6mDblIY0W0HclYTZ8ko5XUp2y0RkbLVYqvHfTjQ2uyboZwfM2Axu3tIv5abv1a2hT65X3Y3C37lqWkeISmRPodHxISLtSgksrCk+abMENMNYommrenMNPJuIeSC8ui92XQcUsmK7zU/Kc+IKNmiLWK0+BuyBMnzWgKIMSFLSiirujoRREEcCnzDGf2GOi1eGKB13Ez/iBhHu4NytBt2hNYjmiHO9qKLGjCzlIIrhfcUv9uO9gQKSzuKY+uOUGls0kzdsiYKirifIRGMkDgQo9C+QhZDmhU0r466+Io432ulDzTQKPvsfBJ+QhcPyt02xAxGpKwxhgjk+LFv5GOSV+vdUSw21yx2BfweyX5/AhrHG6GFQ+KaTf86l77JJZhK94q+l0ggQ15RJLyZqlLGU5rQHJJViy4qF4UiZTJtn1tKC2VYFe/KNcOSUwF1zE6wxvaf1fiq8iArU1dXb3bdxpu9qtV7lRt82tLEeWjJQvSEVFwlf66t/6AOueM6dLQ6vMgQ9TY75HFa4NC0xsp8tOmBvry1qWa7RIbcaUzoNkwh49MIMxEC/40gh6LfNsUc3RewktxWdDXtNXbl36i9Vp8cQXPewevvNJ5SuMqeEdkz13SV1ogRn81T+ma4s6V1ofy3s66qdocYFpJB7AwmNz/S5Gqlv5jJzZ9PYO4Yjcrw6ARmSbCQ1vuXZCIiCwThaJyMvLXneqexr6uhfTlHpiLzs2Ui1mtlIqeS8BNK9dYzEcfg0ml9ZXXVYtXxtOJczHG2vvKh5yov4zg999hcJbik4/Rdg15X0ktoIofGkmAs+CBySzHERHD1G7F8CUJ965KEeiaht3jDUS4D36+HMnsNbQTGHK1leavrD2YKqV5BTnPZBoMQ55ohbj7ifN2zRTjvtSLcuQX/U/entxX5ZgbHs94d3HpXICbCX6bjn5ksvqZrtHuOceo/e3XYYd6ME+XOhUPU2kGy0h1zzqAZHBs0X3rBr4ZeMwb3nQ6F1LzNYVV03b4bctzBW+sz/e0gGChqvYOTqq2W4fD14Wo8RXsbbxCOM2AOjNyRXvcNwrwj3cqc60AQiWhYZnUMGRN2h7N17VBu1xr4XxHhDS6rYIQI/zQ8iGr7s0ptXe1vV87qPw== \ No newline at end of file diff --git a/docs/devon4j/1.0/_images/images/kafka-retry.png b/docs/devon4j/1.0/_images/images/kafka-retry.png new file mode 100644 index 00000000..41ebcd56 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/kafka-retry.png differ diff --git a/docs/devon4j/1.0/_images/images/kafka-retry.svg b/docs/devon4j/1.0/_images/images/kafka-retry.svg new file mode 100644 index 00000000..64917882 --- /dev/null +++ b/docs/devon4j/1.0/_images/images/kafka-retry.svg @@ -0,0 +1,3 @@ + + +
Consumer
(Backend)


Consumer...
2. dequeue
2. dequeue
topic
topic
1. enqueue
1. enqueue
Producer
(Client)
Producer...
3. enqueu for retry
3. enqueu for retry
5. log if finally failed
5. log if finally failed
Listener
Listener
4. dequeue (perform retry)
4. dequeue (perform retry)
topic-retry
topic-retry
Log
Log
Viewer does not support full SVG 1.1
\ No newline at end of file diff --git a/docs/devon4j/1.0/_images/images/microservices/microservices.pptx b/docs/devon4j/1.0/_images/images/microservices/microservices.pptx new file mode 100644 index 00000000..a40b1565 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/microservices/microservices.pptx differ diff --git a/docs/devon4j/1.0/_images/images/microservices/microservices_01.png b/docs/devon4j/1.0/_images/images/microservices/microservices_01.png new file mode 100644 index 00000000..62a80050 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/microservices/microservices_01.png differ diff --git a/docs/devon4j/1.0/_images/images/microservices/microservices_02.png b/docs/devon4j/1.0/_images/images/microservices/microservices_02.png new file mode 100644 index 00000000..eda653cb Binary files /dev/null and b/docs/devon4j/1.0/_images/images/microservices/microservices_02.png differ diff --git a/docs/devon4j/1.0/_images/images/microservices/microservices_03.png b/docs/devon4j/1.0/_images/images/microservices/microservices_03.png new file mode 100644 index 00000000..6f67e159 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/microservices/microservices_03.png differ diff --git a/docs/devon4j/1.0/_images/images/microservices/microservices_04.png b/docs/devon4j/1.0/_images/images/microservices/microservices_04.png new file mode 100644 index 00000000..8ed9f624 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/microservices/microservices_04.png differ diff --git a/docs/devon4j/1.0/_images/images/microservices/microservices_05.png b/docs/devon4j/1.0/_images/images/microservices/microservices_05.png new file mode 100644 index 00000000..eaa730a2 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/microservices/microservices_05.png differ diff --git a/docs/devon4j/1.0/_images/images/microservices/microservices_06.png b/docs/devon4j/1.0/_images/images/microservices/microservices_06.png new file mode 100644 index 00000000..25666417 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/microservices/microservices_06.png differ diff --git a/docs/devon4j/1.0/_images/images/microservices/microservices_07.png b/docs/devon4j/1.0/_images/images/microservices/microservices_07.png new file mode 100644 index 00000000..a929a0d7 Binary files /dev/null and b/docs/devon4j/1.0/_images/images/microservices/microservices_07.png differ diff --git a/docs/devon4j/1.0/_images/images/outdated-transfer-objects.png b/docs/devon4j/1.0/_images/images/outdated-transfer-objects.png new file mode 100644 index 00000000..c84db0af Binary files /dev/null and b/docs/devon4j/1.0/_images/images/outdated-transfer-objects.png differ diff --git a/docs/devon4j/1.0/_images/images/transfer-object-eto-cto.drawio b/docs/devon4j/1.0/_images/images/transfer-object-eto-cto.drawio new file mode 100644 index 00000000..5d0c5615 --- /dev/null +++ b/docs/devon4j/1.0/_images/images/transfer-object-eto-cto.drawio @@ -0,0 +1 @@ +7VxRc6I6FP41vtyZMpAI6mPVdnfv3J3Zvftwdx8DRKUFwoVo9f76m0AQCLRSK0i3melYc5IcTnI+v+ScREdwEew/xSjafCUu9kdAd/cjuBwBACAw2D8uOWQSayoE69hzM1FJ8MP7DwuhLqRbz8VJpSElxKdeVBU6JAyxQysyFMfkqdpsRfzqUyO0xjXBDwf5dek/nks3mXQKJoX8M/bWm/zJhjXLagKUNxYjSTbIJU8lEbwbwUVMCM3eBfsF9vnk5fOS9bt/pvZoWIxD2qbD6sF+3H8Pft5vrD8nDw/3X7/bdzdWpmWH/K0Y8AhYPtM3j7jJ9CDmwfp3y+2cByhee+EI3rJaPdqzVyZMR8vlN5REWd24VEfxnt4g31uLfg4zGMeFTvZuLf6nT7ZzwcJHSZJL2cBsuSWTRbJsAHbf2gmNkUPvKLm89ZmFeun1ZWtF9YqE9CZJP1681ki7yiNxSKC5eEfC1ZMWEHfrY81GiedorCIgoYYiTyvGVBuQHTcM8b047e2D2PDZTnIGyx9oPP9s19s1jnsjCIXbD5r8VLIi1ZFLQUUd2OGYeozKbrPpWKYTNheTs/TxivcirNXKT4lp5THCgHMOFUHEBhDlexR4Pqfwz9jfYa6V20kDnzfibVjXBfFJnD4ZrlYr4DhMzj4I5BGXalzLtkzraC03Ee+fZTTjyJNsgcEkwDQ+sCaiAzQFtYq1ZSaKTwVRA0vINiWSzmVIrA3ro+aCPtkbwaCvYNOJYtPu2HTRBZt2ZX0SobDx4R3wMBt59rjaBJ77fEXoitCvQuhWldCNJkYf98noU8Xol7f7S7jzKHY/bXHyvvfIKIp89smhHuPkgCYPmk3IoxeuAxSyuDBg2tXOWRHtAIn2ZlolWqjXiRYaZo9EO1NEe3m75xkdKY5VHKs4tm+OnegtOBb0uZnNc+rDJNkvvN0KOfglZDfif4gfWEG9H4R3Fekq0h0G6RpV0h03bmx7JV0wZNJ97zvbkHr08FuTrIsoQo6Dk0QRrSLa4RCtDjSzQrWm0bS/7TOHkCcsFNV2QLWdHL8NiGe7TiIohur7MAlUt4IDiL/rt63u9hSHblJzDhPe8ttrrGT7xHlkU8JE+bxbWfE+9chSr0423nv0Jxczfs5Kv0Qj/n65LxcOeSFk4yt14sVfuT5eKLqlpbxfZjd2a3foJCexsZFt7OAW5E0ZPWD6QsNJs9dLXjVyD8bYZ5/5XdW4JrcKdd+Ix8w+IghICAJTCRnZoESvAhw1RXBcVWRASVE26JoiBgF0KDWLeIOkhsPjLLwBmoO+uvJ7pYbKx58fZEl9dz5S6/27W+9vptV4pDH10288om6PdH17ROV/VP5Hse0A2LY5+9NrdNV0heR0WOWiZIPdY6xTDqpKERcYtYix8prXxVhFVNV5jDVtGWIZ/cVYxqSKo2OU/uoYC55Q9EyMdakIKh/69eA3/BB/iPgzpV2jzE9t8ScrMvvGX9M5o8Jf01HsafzpveFvLGcpz80xyYqO5b7wB6+Nv5eX3wviaNYbOiYTCR3mmeiYSt+sGOtGK3T0l4EE41fhh0Q4rCDGaIUYo8xXR/ScYqzKdq2grwElxfuDpGme4Jm2kLSghG19lp8oDweUTSfKFwelfg4ojeGDctrfKip/n8E4E5TglKKuV9H6OYyd3ySVcMezLaWUgki2iBSCaMY9L2ciAs91eSX/jqrDFP+V5iKWsJD8LaJ1WEsihCTkPWNCEUX28RHiA8jMMufsj03ZgiPSZGYuWNkoyuyPN4/pgoT8a71eCj6MEvrET0OqCZACt29KVVjjZ3xaAuKkIVMhE9vFMhWgnqnwSknN+mmwcnVLV4OhuRo2/cDKh9/VtM4DgP5WEPni8Lk7baifUNTxCgLr5KJWkAuvICa4Mq3k9nR7k0ieybelfa5GN23ZxuqNbOBMWqbOTTrKwVhNUcdkM64nvYeOQ0PTjer5i2ZBeAKNaekbjj02YZwiO8koDQqihpyXPDeiqv2mR8u7bxeDKKhBVNe0P04thuKEVi2Fp3bY8NpLYT3vzPxrnPJvnJmmHFzb68hpuas7uJ6Da+NgtZttvZsdd+ZhVix+4DRj9OJnYuHd/w== \ No newline at end of file diff --git a/docs/devon4j/1.0/_images/images/transfer-object-eto-cto.drawio.png b/docs/devon4j/1.0/_images/images/transfer-object-eto-cto.drawio.png new file mode 100644 index 00000000..94eff66f Binary files /dev/null and b/docs/devon4j/1.0/_images/images/transfer-object-eto-cto.drawio.png differ diff --git a/docs/devon4j/1.0/alternative-microservice-netflix.html b/docs/devon4j/1.0/alternative-microservice-netflix.html new file mode 100644 index 00000000..432bbbc6 --- /dev/null +++ b/docs/devon4j/1.0/alternative-microservice-netflix.html @@ -0,0 +1,1404 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Microservices based on Netflix-Tools

+
+
+

Devonfw microservices approach is based on Spring Cloud Netflix, that provides all the main components for microservices integrated within Spring Boot context.

+
+
+

In the following schema we can see an overview of the structure of components in a Devon application based on the Spring Cloud Netflix solution for microservices.

+
+
+
+microservices 01 +
+
+
+

Let’s explain each component

+
+
+

Service Discovery - Eureka

+
+

Eureka is a server to register and locate the microservices. The main function for Eureka is to register the different instances of the microservices, its location, its state and other metadata.

+
+
+

It works in a simple way, during the start of each microservice, this communicates with the Eureka server to notify its availability and to send the metadata. The microservice will continue to notify its status to the Eureka server every 30 seconds (default time on Eureka server properties). This value can be changed in the configuration of the component.

+
+
+

If after 3 periods, Eureka does not receive notification of any of the microservices, it will be considered as unavailable and will eliminate its registration.

+
+
+

In addition, it serves as a catalog to locate a specific microservice when routing a request to it.

+
+
+
+

Circuit Breaker - Hystrix

+
+

Hystrix is a library that implements the Circuit Breaker pattern. Its main functionality is to improve the reliability of the system, isolating the entry points of the microservices, preventing the cascading failure from the lower levels of the application all the way up to the user.

+
+
+

In addition to that, it allows developers to provide a fallback in case of error. Hystrix manages the requests to a service, and in case that the microservice doesn’t response, allows to implement an alternative to the request.

+
+
+
+

Client Side Load Balancer - Ribbon

+
+

Ribbon is a library designed as client side load balancer. Its main feature is to integrate with Eureka to discover the instances of the microservices and their metadata. In that way the Ribbon is able to calculate which of the available instances of a microservice is the most appropriate for the client, when facing a request.

+
+
+
+

REST Client - Feign

+
+

Feign is a REST client to make calls to other microservices. The strength of Feign is that it integrates seamlessly with Ribbon and Hystrix, and its implementation is through annotations, which greatly facilitates this task to the developer.

+
+
+

Using annotations, Spring-cloud generates, automatically, a fully configured REST client.

+
+
+
+

Router and Filter - Zuul

+
+

Zuul is the entry point of the apps based on Spring-cloud microservices. It allows dynamic routing, load balancing, monitoring and securing of requests. By default Zuul uses Ribbon to locate, through Eureka, the instances of the microservice that it wants to invoke and sends the requests within a Hystrix Command, taking advantage of its functionality.

+
+
+
+

How to create microservices in devonfw?

+
+

Follow the instructions in the link below to set up devonfw distribution

+
+ +
+

Next, install devonfw modules and dependencies

+
+
+

Step 1: Open the console

+
+

Open the devonfw console by executing the batch file console.bat from the devonfw distribution. It is a pre-configured console which automatically uses the software and configuration provided by the devonfw distribution.

+
+
+
+

Step 2: Change the directory

+
+

Run the following command in the console to change directory to devonfw module

+
+
+
+
cd workspaces\examples\devonfw
+
+
+
+
+

Step 3: Install

+
+

To install modules and dependencies, you need to execute the following command:

+
+
+
+
mvn --projects bom,modules/microservices/microservices,modules/microservices/microservice-archetype,modules/microservices/microservice-infra-archetype  --also-make install
+
+
+
+ + + + + +
+ + +In case installation fails, try running the command again as it is often due to hitch in the network. +
+
+
+

Now, you can use the Microservices archetype given below to create Microservices.

+
+
+

In order to generate microservices in a devonfw project we can choose between two approaches:

+
+
+
    +
  • +

    generate a new devon4j application and implement one by one all the needed components (based on Spring Cloud).

    +
  • +
  • +

    generate a new devon4j application through the custom microservice archetype included in the devonfw distributions.

    +
  • +
+
+
+

That second approach, using the devonfw microservices archetype, will generate automatically all the basic structure and components to start developing the microservices based application.

+
+
+
+
+

devonfw archetypes

+
+

To simplify starting with projects based on microservices, devonfw includes two archetypes to generate pre-configured projects that include all the basic components of the Spring Cloud implementation.

+
+
+
    +
  • +

    archetypes-microservices-infra: generates a project with the needed infrastructure services to manage microservices. Includes the Eureka service, Zuul service and the authentication service.

    +
  • +
  • +

    archetypes-microservices: generates a simple project pre-configured to work as a microservice.

    +
  • +
+
+
+
+

Create New Microservices infrastructure application

+
+

To generate a new microservices infrastructure application through the devonfw archetype you only need to open a devonfw console (console.bat script) and follow the same steps described in getting started creating new devonfw devon4j application. But, instead of using the standard archetype, we must provide the special infrastructure archetype archetype-microservice-infra. Remember to provide your own values for DgroupId, DartifactId, Dversion and Dpackage parameters, Also provide the -DarchetypeVersion with latest value:

+
+
+
+
mvn -DarchetypeVersion=2.4.0 -DarchetypeGroupId=com.devonfw.microservices -DarchetypeArtifactId=microservices-infra-archetype archetype:generate -DgroupId=com.capgemini -DartifactId=sampleinfra -Dversion=0.1-SNAPSHOT -Dpackage=com.capgemini.sampleinfra
+
+
+
+

Once the Maven command has finished an application with the following modules should be created:

+
+
+
+microservices 02 +
+
+
+

service-eureka module

+
+

This module contains the needed classes and configuration to start a Eureka server.

+
+
+

This service runs by default on port 8761 although ti can be changed in the application.properties file of the project.

+
+
+
+

service-zuul module

+
+

This module contains all the needed classes and configuration to start a Zuul server, that will be in charge of the routing and filter of the requests.

+
+
+

This service by default runs on port 8081 but, as we already mentioned, it can be changed through the file application.properties of the project.

+
+
+
+

service-auth module

+
+

This module runs an authentication and authorization mock microservice that allows to generate a security token to make calls to the rest of microservices. This module is only providing a basic structure, the security measures must be implemented fitting the requirements of each project (authentication through DB, SSO, LDAP, OAuth,…​)

+
+
+

This service runs by default on port 9999, although, as in previous services, it can be edited in the application.properties file.

+
+
+
+
+

Create New Microservices Application

+
+

To generate a new microservice project through the devonfw archetype, as in previous archetype example, you can follow the instructions explained in getting started creating new devonfw devon4j application. But, instead of using the standard archetype, we must provide the special microservices archetype archetype-microservices. Open a devonfw console (console.bat script) and launch a Maven command like the following (provide your own values for DgroupId, DartifactId, Dversion and Dpackage parameters, also provide the -DarchetypeVersion with latest value):

+
+
+
+
mvn -DarchetypeVersion=2.4.0 -DarchetypeGroupId=com.devonfw.microservices -DarchetypeArtifactId=microservices-archetype archetype:generate -DgroupId=com.capgemini -DartifactId=sampleapp1 -Dversion=0.1-SNAPSHOT -Dpackage=com.capgemini.sampleapp1
+
+
+
+

That command generates a simple application containing the source code for the microservice. By default, the pom.xml includes the devon-microservices module, that contains the security configuration, jwt interceptors, Hystrix, Ribbon and FeignClient configuration and some properties common to all microservices.

+
+
+

The created microservice runs by default on port 9001 and has the context-path with the same name than the project. This parameters can be changed through the 'application.properties' file of the project.

+
+
+
+

How to use microservices in devonfw

+
+

In the following sections we are going to provide some patterns to manage microservices in devonfw using the archetype, alongside the options that each of the available modules offer.

+
+
+

Eureka service

+
+

We are going to review the general options for the Eureka service. If you are interested in getting more details you can visit the official site for Spring Cloud Eureka clients.

+
+
+

To create an Eureka server you only need to create a new Spring Boot application and add the @EnableEurekaServer to the main class.

+
+
+ + + + + +
+ + +
+

The provided archetype archetype-microservices-infra already provides that annotated class.

+
+
+
+
+
+
@Configuration
+@EnableEurekaServer
+@EnableAutoConfiguration
+@SpringBootApplication
+public class EurekaBootApp {
+
+  public static void main(String[] args) {
+
+    new SpringApplicationBuilder(EurekaBootApp.class).web(true).run(args);
+  }
+}
+
+
+
+

The basic properties that must be configured for Eureka server are:

+
+
+
    +
  • +

    port: in which port the service will run. The default port is the 8761 and you have to keep in mind that the connection to this port is specially critical as all the microservices must be able to connect to this host:port. Remember that Eureka generates and manages the microservices catalog, so it`s crucial to allow the microservices to register in this component.

    +
  • +
  • +

    url: which URL manages as area.

    +
  • +
+
+
+
+
eureka.instance.hostname=localhost
+eureka.instance.port=8761
+
+server.port=${eureka.instance.port}
+
+eureka.client.serviceUrl.defaultZone=http://${eureka.instance.hostname}:${eureka.instance.port}/eureka/
+
+
+
+

The way to connect a microservice to Eureka server is really simple. You only will need to specify the host:port where the server is located and annotate the Spring Boot class with @EnableMicroservices annotation.

+
+
+ + + + + +
+ + +
+

Instead of using that @EnableMicroservices annotation, you can use the equivalent Spring annotations @EnableDiscoveryClient or @EnableEurekaClient.

+
+
+
+
+
+
@Configuration
+@EnableMicroservices
+@SpringBootApplication
+public class MicroserviceBootApp {
+  public static void main(String[] args) {
+
+    SpringApplication.run(MicroserviceBootApp.class, args);
+  }
+}
+
+
+
+
+
eureka.instance.hostname=localhost
+eureka.instance.port=8761
+
+eureka.client.serviceUrl.defaultZone=http://${eureka.instance.hostname}:${eureka.instance.port}/eureka/
+
+
+
+

With this the application will register automatically in Eureka and will be validated each 30 seconds. This value can be changed editing the property eureka.instance.leaseRenewalIntervalInSeconds in application.properties file. It must be taken into account that each Eureka client will maintain a cache of Eureka records to avoid calling the service every time it is necessary to access another microservice. This cache is reloaded every 30 seconds, this value can also be edited through property eureka.client.registryFetchIntervalSeconds in application.properties file.

+
+
+
+

Zuul service

+
+

We are going to show an overview to the options of the Zuul service, if you want to know more details about this particular service visit the official site of Spring Cloud.

+
+
+

Zuul is the component in charge for router and filtering the requests to the microservices system. It works as a gateway that, through a rule engine, redirects the requests to the suitable microservice. In addition, it can be used as a security filter as it can implement PRE-Filters and POST-Filters.

+
+
+

To create a basic Zuul server you only need to create a new Spring Boot application and add the @EnableZuulProxy annotation.

+
+
+
+
@EnableAutoConfiguration
+@EnableEurekaClient
+@EnableZuulProxy
+@SpringBootApplication
+public class ZuulBootApp {
+  public static void main(String[] args) {
+
+    SpringApplication.run(ZuulBootApp.class, args);
+  }
+}
+
+
+
+

To allow Zuul to redirect the requests we need to connect Zuul with the previously created Eureka service, to allow him to register and access to the catalog of microservices created by Eureka.

+
+
+

Also, if we are going to use the Zuul service from a web browser, we must configure the CORS filter to allow connections from any source. This is really easy to implement by adding the following Java Bean to our ZuulBootApp class:

+
+
+
+
@Bean
+public CorsFilter corsFilter() {
+    final UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
+    final CorsConfiguration config = new CorsConfiguration();
+    config.setAllowCredentials(true);
+    config.addAllowedOrigin("*");
+    config.addAllowedHeader("*");
+    config.addAllowedMethod("OPTIONS");
+    config.addAllowedMethod("HEAD");
+    config.addAllowedMethod("GET");
+    config.addAllowedMethod("PUT");
+    config.addAllowedMethod("POST");
+    config.addAllowedMethod("DELETE");
+    config.addAllowedMethod("PATCH");
+    source.registerCorsConfiguration("/**", config);
+    return new CorsFilter(source);
+}
+
+
+
+

To configure the Zuul service we need to define a series of properties that we will describe below:

+
+
+
+
server.port=8081
+spring.application.name=zuulserver
+
+eureka.instance.hostname=localhost
+eureka.instance.port=8761
+eureka.client.serviceUrl.defaultZone=http://${eureka.instance.hostname}:${eureka.instance.port}/eureka/
+
+
+microservices.context-path=/demo
+
+zuul.routes.security.path=${microservices.context-path}/services/rest/security/**
+zuul.routes.security.serviceId=AUTH
+zuul.routes.security.stripPrefix=false
+
+zuul.routes.login.path=${microservices.context-path}/services/rest/login
+zuul.routes.login.serviceId=AUTH
+zuul.routes.login.stripPrefix=false
+
+
+zuul.ignoredServices='*'
+zuul.sensitive-headers=
+
+ribbon.eureka.enabled=true
+hystrix.command.default.execution.timeout.enabled=false
+
+
+
+
    +
  • +

    server.port: Is the port where the Zuul service is listening.

    +
  • +
  • +

    spring.application.name: The name of the service the will be sent to Eureka.

    +
  • +
  • +

    eureka.*: The properties for the register of the Eureka client.

    +
  • +
  • +

    zuul.routes.XXXXX: The configuration of a concrete route.

    +
  • +
  • +

    zuul.routes.XXXXX.path: The path used for a redirection.

    +
  • +
  • +

    zuul.routes.XXXXX.serviceId: ID of the service where the request will be redirected. It must match the property spring.application.name in the microservice.

    +
  • +
  • +

    zuul.routes.XXXXX.stripPrefix: by default set to false. With this property we configure if the part of the route that has matched the request must be cutted out. i.e., if the path is /sample/services/rest/foomanagement/∗∗ and the property is set to true it will redirect to the microservice but it will only send the path **, the root /sample/services/rest/foomanagement/ will be removed.

    +
  • +
  • +

    zuul.ignoredServices: Configures which services without result in the routes, must be ignored.

    +
  • +
  • +

    zuul.sensitive-headers: Configures which headers must be ignored. This property must be set to empty, otherwise Zuul will ignore security authorization headers and the json web token will not work.

    +
  • +
  • +

    ribbon.eureka.enabled: Configures if the Ribbon should be used to route the requests.

    +
  • +
  • +

    hystrix.command.default.execution.timeout.enabled: Enables or disables the timeout parameter to consider a microservices as unavailable. By default the value for this property is 1 second. Any request that takes more than this will be consider failed. By default in the archetype this property is disabled.

    +
  • +
+
+
+

Having an Eureka client activated, the Zuul service will refresh its content every 30 seconds, so a just registered service may still have not been cached in Zuul. On the contrary, if a service is unavailable, 3 cycles of 30 seconds must pass before Eureka sets its register as dead, and other 30 seconds for Zuul to refresh its cache.

+
+
+
+

Security, Authentication and authorization

+
+

The most commonly used authentication in micro-service environments is authentication based on json web tokens, since the server does not need to store any type of user information (stateless) and therefore favors the scalability of the microservices.

+
+
+ + + + + +
+ + +
+

The service-auth module is useful only if the authentication and authorization needs to be done by a remote service (e.g. to have a common auth. service to be used by several microservices).

+
+
+

Otherwise, the autentication and authorization can happen in the main application, that will perform the authentication and will generate the JWT.

+
+
+
+
+
Security in the monolith application
+
+

In this case, the main microservice or application will perform the authentication and generate the JWT, without using service-auth.

+
+
+

It works as follows:

+
+
+
    +
  • +

    The user is authenticated in our application, either through a user / password access, or through a third provider.

    +
  • +
  • +

    This authentication request is launched against the Zuul server which will redirect it to an instance of the microservice.

    +
  • +
  • +

    The microservice will check the user, retrieve their roles and metadata and generate two tokens: one with user access information and another needed to refresh the access token. This information will be returned to the client.

    +
  • +
  • +

    The client is now able to call the microservice, adding the authorization token to the header of the request.

    +
  • +
+
+
+
+microservices 07 +
+
+
+
+
Security in external service (service-auth)
+
+

It works as follows:

+
+
+
    +
  • +

    The user is authenticated in our application, either through a user / password access, or through a third provider.

    +
  • +
  • +

    This authentication request is launched against the Zuul server which will redirect it to an instance of the Auth microservice.

    +
  • +
  • +

    The Auth microservice will check the user, retrieve their roles and metadata and generate two tokens: one with user access information and another needed to refresh the access token. This information will be returned to the client.

    +
  • +
+
+
+
+microservices 03 +
+
+
+

The service-auth service is already prepared to listen to the /login path and generate the two mentioned tokens. To do so we can use the JsonWebTokenUtility class that is implemented in devonfw

+
+
+
+
      UserDetailsJsonWebTokenAbstract clientTo = new UserDetailsJsonWebTokenTo();
+      clientTo.setId(1L);
+      clientTo.setUsername("demo");
+      clientTo.setRoles(new ArrayList<>(Arrays.asList("DEMO")));
+      clientTo.setExpirationDate(buildExpirationDate(this.expirationTime * 60 * 1000L));
+
+      return new ResponseEntity<>(new JwtHeaderTo(this.jsonWebTokenUtility.createJsonWebTokenAccess(clientTo),
+          this.jsonWebTokenUtility.createJsonWebTokenRefresh(clientTo),
+          this.expirationTime * 60 * 1000L), HttpStatus.OK);
+
+
+
+ + + + + +
+ + +
+

In our example you can make a POST request to:

+
+
+

http://localhost:8081/service-auth/services/rest/login
+     HEADER     Content-Type : application/json
+     BODY        { "j_username" : "xxx", "j_password" : "xxx"}

+
+
+
+
+

This will generate a response like the following

+
+
+
+
{
+  "accessToken": "eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiJkZW1vIiwiZmlyc3ROYW1lIjoiZGVtbyIsImxhc3ROYW1lIjoiZGVtbyIsImV4cCI6MTQ4Nzg3NTAyMSwicm9sZXMiOlsiREVNTyJdfQ.aEdJWEpyvRlO8nF_rpSMSM7NXjRIyeJF425HRt8imCTsq4iGiWbmi1FFZ6pydMwKjd-Uw1-ZGf2WF58qjWc4xg",
+  "refreshToken": "eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiJkZW1vIiwiZmlyc3ROYW1lIjoiZGVtbyIsImxhc3ROYW1lIjoiZGVtbyIsImV4cCI6MTQ4Nzg3NTAyMSwicm9sZXMiOlsiUkVGUkVTSF9KV1QiXX0.YtK8Bh07O-h1GTsyTK36YHxkGniyiTlxnazZXi8tT-RtUxxW8We8cdiYJn6tw0RoFkOyr1F5EzvkGyU0HNoLyw",
+  "expirationTime": 900000,
+  "accessHeaderName": "Authorization",
+  "refreshHeaderName": "Authorization-Refresh"
+}
+
+
+
+

The client now should store, in the header defined in accessHeaderName, the token included as accessToken.

+
+
+ + + + + +
+ + +
+

When using service-auth (or any other external authorization service), we must secure not only +the communication between the Client and Zuul, but also between Zuul and the service-auth.

+
+
+

There is very sensitive information being sent (username and password) between the different services that +anyone could read if the channel is not properly secured.

+
+
+
+
+

When configuring the service-auth module is very important to have into account the following aspects:

+
+
+
    +
  • +

    The expiration date of the token can be configured in the properties file with the property jwt.expirationTime (will appear in minutes).

    +
  • +
  • +

    The key for the token generation can be configured also in the properties file using the property jwt.encodedKey which will have a Base64 encoded value.

    +
  • +
  • +

    The roles inserted in the token should be the list of the access roles of the user. Doing this we avoid that each microservice has to look for the roles that belong to a profile.

    +
  • +
  • +

    If you want to use a specific UserDetails for the project, with new fields, you must extend the behavior as explained in here.

    +
  • +
+
+
+

From now on, the client will be able to make calls to the microservices, sending the access token in the header of the request.

+
+
+
+microservices 04 +
+
+
+

Once the request reaches the microservice, the app must validate the token and register the user in the security context. These operations will be automatic as long as the microservice has enabled the security inherited from the JsonWebTokenSecurityConfig class. This is done using the following code:

+
+
+
+
@Configuration
+@EnableWebSecurity
+public class WebSecurityConfig extends JsonWebTokenSecurityConfig {
+
+  @Override
+  public JsonWebTokenUtility getJsonWebTokenUtility() {
+
+    return new JsonWebTokenUtility();
+  }
+
+  @Override
+  protected void setupAuthorization(HttpSecurity http) throws Exception {
+
+    http.authorizeRequests()
+        // authenticate all other requests
+        .anyRequest().authenticated();
+  }
+
+}
+
+
+
+

In addition, devonfw has already implemented the needed interceptors and filters to resend the security header each time that a microservice calls other microservice of the ecosystem.

+
+
+

When validating the token, it is also checked its expiration date, so it is highly recommended that the client refresh from time to time the token, in order to update its expiration date. This is done by launching a request to /refresh_jwt within the service-auth module and sending both the access token and the refresh token in the header.

+
+
+
+microservices 05 +
+
+
+

If for any reason an attempt is made to access a business operation without having a valid token, or without sufficient role level permission to execute that operation, the microservice response will be Forbidden.

+
+
+
+microservices 06 +
+
+
+
+
+
+

How to modify the UserDetails information

+
+

In order to modify the UserDetails information we will need to accomplish two steps: modify the authentication service to generate the authentication token with the custom attributes embedded, and modify the pre-authentication filter of the microservices to convert the token into an Object with the custom attributes available.

+
+
+

Modify the authentication service to generate a new token

+
+

We must modify the service-auth that is in charge of logging the user and generate the security token.

+
+
+

The first thing to do is to create a UserDetails class that contains the required attributes and custom attributes. In the code sample we will call this class UserDetailsJsonWebTokenCustomTo, and must either implement the generic UserDetailsJsonWebTokenAbstract interface or extend it from the current UserDetailsJsonWebTokenTo class, since the services are prepared to work with it. In the example, we will add two new attributes firstName and lastName.

+
+
+
+
public class UserDetailsJsonWebTokenCustomTo extends UserDetailsJsonWebTokenTo {
+
+  private String firstName;
+  private String lastName;
+
+  public String getFirstName() {
+    return this.firstName;
+  }
+
+  public String getLastName() {
+    return this.lastName;
+  }
+
+  public void setFirstName(String firstName) {
+    this.firstName = firstName;
+  }
+
+  public void setLastName(String lastName) {
+    this.lastName = lastName;
+  }
+}
+
+
+
+

In case that the UserDetailsJsonWebTokenAbstract interface is implemented, in addition to the new attributes the rest of the interface must be implemented.

+
+
+

The next step would be to override the component that performs the conversions Token→UserDetails and UserDetails→Token. This component is the JsonWebTokenUtility, so you should create a new class that extends from this, in the example we will call it JsonWebTokenUtilityCustom. In this new class, you must overwrite the only two methods that are allowed to perform the conversions, to add writing and reading operations for the new custom attributes.

+
+
+
+
public class JsonWebTokenUtilityCustom extends JsonWebTokenUtility {
+
+  @Override
+  protected UserDetailsJsonWebTokenAbstract addCustomPropertiesClaimsToUserDetails(Claims claims) {
+
+    UserDetailsJsonWebTokenCustomTo userDetails = new UserDetailsJsonWebTokenCustomTo();
+
+    userDetails.setFirstName(claims.get("firstName", String.class));
+    userDetails.setLastName(claims.get("lastName", String.class));
+
+    return userDetails;
+  }
+
+  @Override
+  protected void addCustomPropertiesUserDetailsToJwt(UserDetailsJsonWebTokenAbstract authTokenDetailsDTO, JwtBuilder jBuilder) {
+
+    UserDetailsJsonWebTokenCustomTo userDetails = (UserDetailsJsonWebTokenCustomTo) authTokenDetailsDTO;
+
+    jBuilder.claim("firtName", userDetails.getFirstName());
+    jBuilder.claim("lastName", userDetails.getLastName());
+  }
+}
+
+
+
+

Now you should enable that new converter to replace the default one. In the WebSecurityConfig class you must change the related @Bean to start using this new class

+
+
+
+
@Configuration
+@EnableWebSecurity
+public class WebSecurityConfig extends WebSecurityConfigurerAdapter {
+
+...
+
+  @Bean
+  public JsonWebTokenUtility getJsonWebTokenUtility() {
+    return new JsonWebTokenUtilityCustom();
+  }
+
+...
+
+}
+
+
+
+

Finally, in the login process the new attributes should be filled in when creating the user. In our example in the class SecuritymanagementRestServiceImpl.

+
+
+
+
      UserDetailsJsonWebTokenCustomTo clientTo = new UserDetailsJsonWebTokenCustomTo();
+      clientTo.setId(1L);
+      clientTo.setUsername("demo");
+      clientTo.setRoles(new ArrayList<>(Arrays.asList("DEMO")));
+      clientTo.setExpirationDate(buildExpirationDate(this.expirationTime * 60 * 1000L));
+
+      clientTo.setFirstName("firstName");
+      clientTo.setLastName("lastName");
+
+
+      return new ResponseEntity<>(new JwtHeaderTo(this.jsonWebTokenUtility.createJsonWebTokenAccess(clientTo),
+          this.jsonWebTokenUtility.createJsonWebTokenRefresh(clientTo), //
+          this.expirationTime * 60 * 1000L), HttpStatus.OK);
+
+
+
+
+

Modify the pre-authentication filter to read the new token

+
+

Once a token with custom attributes has been obtained, the steps to read it and put it in the security context are very simple. The changes shown in this point should be reproduced in those microservices where you want to use the new custom attributes. The steps to follow are those:

+
+
+
    +
  • +

    Create a UserDetailsJsonWebTokenCustomTo class that contains the new attributes, as was done in the previous section. The ideal would be to reuse the same class.

    +
  • +
  • +

    Create a JsonWebTokenUtilityCustom class that extends the implementation of the token generator, just as it was done in the previous section. Again, the ideal would be to reuse the same class.

    +
  • +
  • +

    Configure the creation of this new @Bean in the WebSecurityConfig class just like in the previous section.

    +
  • +
+
+
+

With these three steps you can use the new security object with the custom attributes. One way to use it could be as follows:

+
+
+
+
   UserDetailsJsonWebToken principal = (UserDetailsJsonWebToken) SecurityContextHolder.getContext().getAuthentication().getPrincipal();
+
+   UserDetailsJsonWebTokenCustomTo userDetails = (UserDetailsJsonWebTokenCustomTo) principal.getUserDetailsJsonWebTokenAbstract();
+
+   userDetails.getFirstName();
+
+
+
+
+
+

How to start with a microservice

+
+

Once the microservice has been created through its archetype, you need to have a series of points in mind to configure it correctly:

+
+
+
    +
  • +

    The microservice must have the microservices starter in its pom.xml configuration to be able to use the interceptors and the generic configuration.

    +
  • +
+
+
+
+
<dependency>
+      <groupId>com.devonfw.starter</groupId>
+      <artifactId>devonfw-microservices-starter</artifactId>
+      <version>${devonfw.version}</version>
+</dependency>
+
+
+
+
    +
  • +

    It should be annotated in its initial class with @EnableMicroservices, this will activate the annotations for Eureka client, CircuitBreaker and the client Feign. All of this is configured in the properties file.

    +
  • +
  • +

    This is a bootified application so in the pom.xml file you will have to define which one is the boot class.

    +
  • +
  • +

    You must consider the boot configuration: port and context-path. In development, each microservice must have a different port, to avoid colliding with other microservices, while the context-path is recommended to be the same, to simplify the Zuul configurations and calls between microservices.

    +
  • +
  • +

    You can use @RolesAllowed annotations in the services methods to secure them, as long as the Web security inherited from JsonWebTokenSecurityConfig has been enabled, since it is the responsible for putting the UserDetails generated from the token into the security context.

    +
  • +
  • +

    All microservices must share the security key to encrypt and decrypt the token. And, specially, it should be the same as the service-auth, which will be responsible for generating the initial token.

    +
  • +
  • +

    In the Zuul module, the routes must be well configured to be able to route certain URLs to the new created microservices. So, if we have added a sampleapp1 with server.context-path=/sampleapp1 we will need to map that service in the Zuul’s application.properties file adding

    +
  • +
+
+
+
+
zuul.routes.sampleapp1.path=/sampleapp1/services/rest/**
+zuul.routes.sampleapp1.serviceId=sampleapp1
+zuul.routes.sampleapp1.stripPrefix=false
+
+
+
+

The rest will be treated as if it were a normal Web application, which exposes some services through a REST API.

+
+
+
+

Calls between microservices

+
+

In order to invoke a microservice manually, you would need to implement the following steps:

+
+
+
    +
  • +

    Obtain the instances of the microservice you want to invoke.

    +
  • +
  • +

    Choose which of all instances is the most optimal for the client.

    +
  • +
  • +

    Retrieve the security token from the source request.

    +
  • +
  • +

    Create a REST client that invokes the instance by passing the generated security token.

    +
  • +
  • +

    Intercept the response in case it causes an error, to avoid a cascade propagation.

    +
  • +
+
+
+

Thanks to the combination of Feign, Hystrix, Ribbon, Eureka and devonfw it is possible to make a call to another microservice in a declarative, very simple and almost automatic way.

+
+
+

You only need to create an interface with the methods that need to be invoked. This interface must be annotated with @FeignClient and each of the methods created must have a path and a method in the @RequestMapping annotation. An example interface might be as follows:

+
+
+
+
@FeignClient(value = "foo")
+public interface FooClient {
+
+  @RequestMapping(method = RequestMethod.GET, value = "/${server.context-path}/services/rest/foomanagement/v1/foo")
+  FooMessageTo foo();
+
+}
+
+
+
+

It is important to highlight the following aspects:

+
+
+
    +
  • +

    The @FeignClient annotation comes along with the name of the microservice to be invoked. The correct and optimal would be to use the name of the microservice, but it is also possible to launch the request to the Zuul server. In the latter case it would be the server itself that would perform the load balancing and self-discovery of the most appropriate microservice, but have in mind that, doing this, the proxy server is also unnecessarily overloaded with unnecessary requests.

    +
  • +
  • +

    The @RequestMapping annotation must have the same method and path as expected on target, otherwise the request will be thrown and no response will be found.

    +
  • +
  • +

    The input and output parameters will be mapped to json, so they may not be exactly the same classes in both destination and source. It will depend on how you want to send and retrieve the information.

    +
  • +
+
+
+

Once the interface is created and annotated, in order to use the calls, it would be enough to inject the component into the object from which we want to use it and invoke any of its methods. Spring Cloud will automatically generate the required bean.

+
+
+
+
...
+
+  @Inject
+  FooClient fooClient;
+
+  public FooMessageTo ivokeFooClient() {
+    return this.fooClient.foo();
+  }
+
+...
+
+
+
+

With these two annotations, almost all the functionality is covered automatically: search in Eureka, choice of the best instance through Ribbon, registration of the token and creation of the REST client. Only would be necessary to control the response in case of failure. The idea is to allow, in case of failure or fall of the invoked microservice, from the origin of the invocation is executed an alternative plan. This is as simple as activating the fallback in the @FeignClient annotation and assigning a class that will be invoked in case the REST client response fails.

+
+
+
+
@FeignClient(value = "foo", fallback = FooClientHystrixFallback.class)
+public interface FooClient {
+
+  @RequestMapping(method = RequestMethod.GET, value = "/${server.context-path}/services/rest/foomanagement/v1/foo")
+  FooMessageTo foo();
+
+}
+
+
+
+

Finally, you will need to create a class annotated with @Component that implements the interface of the Feign client. Within this implementation you can add the desired functionality in case the invocation to the REST client fails.

+
+
+
+
@Component
+public class FooClientHystrixFallback implements FooClient {
+
+  @Override
+  public FooMessageTo foo() {
+    return new FooMessageTo("Fail Message");
+  }
+
+}
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/architecture.html b/docs/devon4j/1.0/architecture.html new file mode 100644 index 00000000..bb172119 --- /dev/null +++ b/docs/devon4j/1.0/architecture.html @@ -0,0 +1,761 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Architecture

+
+
+

There are many different views that are summarized by the term architecture. First, we will introduce the key principles and architecture principles of devonfw. Then, we will go into details of the the architecture of an application.

+
+
+
+
+

Key Principles

+
+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
+
+
+
+
+

Architecture Principles

+
+
+

Additionally we define the following principles that our architecture is based on:

+
+
+
    +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of Concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information Hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise, maintenance problems will arise to ensure that data remains consistent. Therefore, interfaces of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style.

    +
  • +
+
+
+

As an architect you should be prepared for the future by reading the TechnoVision.

+
+
+
+
+

Application Architecture

+
+
+

For the architecture of an application we distinguish the following views:

+
+
+
    +
  • +

    The Business Architecture describes an application from the business perspective. It divides the application into business components and with full abstraction of technical aspects.

    +
  • +
  • +

    The Technical Architecture describes an application from the technical implementation perspective. It divides the application into technical layers and defines which technical products and frameworks are used to support these layers.

    +
  • +
  • +

    The Infrastructure Architecture describes an application from the operational infrastructure perspective. It defines the nodes used to run the application including clustering, load-balancing and networking. This view is not explored further in this guide.

    +
  • +
+
+
+

Business Architecture

+
+

The business architecture divides the application into business components. A business component has a well-defined responsibility that it encapsulates. All aspects related to that responsibility have to be implemented within that business component. Further, the business architecture defines the dependencies between the business components. These dependencies need to be free of cycles. A business component exports its functionality via well-defined interfaces as a self-contained API. A business component may use another business component via its API and compliant with the dependencies defined by the business architecture.

+
+
+

As the business domain and logic of an application can be totally different, the devonfw can not define a standardized business architecture. Depending on the business domain it has to be defined from scratch or from a domain reference architecture template. For very small systems it may be suitable to define just a single business component containing all the code.

+
+
+
+

Technical Architecture

+
+

The technical architecture divides the application into technical layers based on the multilayered architecture. A layer is a unit of code with the same category such as a service or presentation logic. So, a layer is often supported by a technical framework. Each business component can therefore be split into component parts for each layer. However, a business component may not have component parts for every layer (e.g. only a presentation part that utilized logic from other components).

+
+
+

An overview of the technical reference architecture of the devonfw is given by figure "Technical Reference Architecture". +It defines the following layers visualized as horizontal boxes:

+
+
+ +
+
+

Also, you can see the (business) components as vertical boxes (e.g. A and X) and how they are composed out of component parts each one assigned to one of the technical layers.

+
+
+

Further, there are technical components for cross-cutting aspects grouped by the gray box on the left. Here is a complete list:

+
+ +
+
+devonfw architecture blueprint +
+
Figure 1. Technical Reference Architecture
+
+
+

Please click on the architecture image to open it as SVG and click on the layers and cross-cutting topics to open the according documentation guide.

+
+
+

We reflect this architecture in our code as described in our coding conventions allowing a traceability of business components, use-cases, layers, etc. into the code and giving +developers a sound orientation within the project.

+
+
+

Further, the architecture diagram shows the allowed dependencies illustrated by the dark green connectors. +Within a business component a component part can call the next component part on the layer directly below via a dependency on its API (vertical connectors). +While this is natural and obvious, it is generally forbidden to have dependencies upwards the layers +or to skip a layer by a direct dependency on a component part two or more layers below. +The general dependencies allowed between business components are defined by the business architecture. +In our reference architecture diagram we assume that the business component A1 is allowed to depend +on component A2. Therefore, a use-case within the logic component part of A1 is allowed to call a +use-case from A2 via a dependency on the component API. The same applies for dialogs on the client layer. +This is illustrated by the horizontal connectors. Please note that persistence entities are part of the API of the data-access component part so only the logic component part of the same +business component may depend on them.

+
+
+

The technical architecture has to address non-functional requirements:

+
+
+
    +
  • +

    scalability
    +is established by keeping state in the client and making the server state-less (except for login session). Via load-balancers new server nodes can be added to improve performance (horizontal scaling).

    +
  • +
  • +

    availability and reliability
    +are addressed by clustering with redundant nodes avoiding any single-point-of failure. If one node fails the system is still available. Further, the software has to be robust so there are no dead-locks or other bad effects that can make the system unavailable or not reliable.

    +
  • +
  • +

    security
    +is archived in the devonfw by the right templates and best-practices that avoid vulnerabilities. See security guidelines for further details.

    +
  • +
  • +

    performance
    +is obtained by choosing the right products and proper configurations. While the actual implementation of the application matters for performance a proper design is important as it is the key to allow performance-optimizations (see e.g. caching).

    +
  • +
+
+
+

Technology Stack

+
+

The technology stack of the devonfw is illustrated by the following table.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Technology Stack of devonfw
TopicDetailStandardSuggested implementation

runtime

language & VM

Java

Oracle JDK

runtime

servlet-container

JEE

tomcat

component management

dependency injection

JSR330 & JSR250

spring

configuration

framework

-

spring-boot

persistence

OR-mapper

JPA

hibernate

batch

framework

JSR352

spring-batch

service

SOAP services

JAX-WS

CXF

service

REST services

JAX-RS

CXF

logging

framework

slf4j

logback

validation

framework

beanvalidation/JSR303

hibernate-validator

security

Authentication & Authorization

JAAS

spring-security

monitoring

framework

JMX

spring

monitoring

HTTP Bridge

HTTP & JSON

jolokia

AOP

framework

dynamic proxies

spring AOP

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/coding-conventions.html b/docs/devon4j/1.0/coding-conventions.html new file mode 100644 index 00000000..59bdadc4 --- /dev/null +++ b/docs/devon4j/1.0/coding-conventions.html @@ -0,0 +1,1021 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Coding Conventions

+
+
+

The code should follow general conventions for Java (see Oracle Naming Conventions, Google Java Style, etc.).We consider this as common sense and provide configurations for SonarQube and related tools such as Checkstyle instead of repeating this here.

+
+
+
+
+

Naming

+
+
+

Besides general Java naming conventions, we follow the additional rules listed here explicitly:

+
+
+
    +
  • +

    Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Use CamelCase even for abbreviations (XmlUtil instead of XMLUtil)

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc'). See #1095 for details.

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+
+

Packages

+
+
+

Java Packages are the most important element to structure your code. We use a strict packaging convention to map technical layers and business components (slices) to the code (See technical architecture for further details). By using the same names in documentation and code we create a strong link that gives orientation and makes it easy to find from business requirements, specifications or story tickets into the code and back.

+
+
+

For an devon4j based application we use the following Java-Package schema:

+
+
+
+
«root».«component».«layer»[.«detail»]
+
+
+
+

E.g. in our example application we find the Spring Data repositories for the ordermanagement component in the package com.devonfw.application.mtsj.ordermanagement.dataaccess.api.repo

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Segments of package schema
SegmentDescriptionExample

«root»

Is the basic Java Package name-space of your app. Typically we suggest to use «group».«artifact» where «group» is your maven/gradle groupId corresponding to your organization or IT project owning the code following common Java Package conventions. The segment «artifact» is your maven/gradle artifactId and is typically the technical name of your app.

com.devonfw.application.mtsj

«component»

The (business) component the code belongs to. It is defined by the business architecture and uses terms from the business domain. Use the implicit component general for code not belonging to a specific component (foundation code).

salesmanagement

«layer»

The name of the technical layer (See technical architecture). Details are described for the modern project structure and for the classic project structure.

logic

«detail»

Here you are free to further divide your code into sub-components and other concerns according to the size of your component part. If you want to strictly separate API from implementation you should start «detail» with «scope» that is explained below.

dao

«scope»

The scope which is one of api (official API to be used by other layers or components), base (basic code to be reused by other implementations) and impl (implementation that should never be imported from outside). This segment was initially mandatory but due to trends such as microservices, lean, and agile we decided to make it optional and do not force anybody to use it.

api

+
+

Please note that devon4j library modules for spring use com.devonfw.module as «root» and the name of the module as «component». E.g. the API of our beanmapping module can be found in the package com.devonfw.module.beanmapping.common.api.

+
+
+
+
+

Code Tasks

+
+
+

Code spots that need some rework can be marked with the following tasks tags. These are already properly pre-configured in your development environment for auto completion and to view tasks you are responsible for. It is important to keep the number of code tasks low. Therefore, every member of the team should be responsible for the overall code quality. So if you change a piece of code and hit a code task that you can resolve in a reliable way, please do this as part of your change and remove the according tag.

+
+
+

TODO

+
+

Used to mark a piece of code that is not yet complete (typically because it can not be completed due to a dependency on something that is not ready).

+
+
+
+
 // TODO «author» «description»
+
+
+
+

A TODO tag is added by the author of the code who is also responsible for completing this task.

+
+
+
+

FIXME

+
+
+
 // FIXME «author» «description»
+
+
+
+

A FIXME tag is added by the author of the code or someone who found a bug he can not fix right now. The «author» who added the FIXME is also responsible for completing this task. This is very similar to a TODO but with a higher priority. FIXME tags indicate problems that should be resolved before a release is completed while TODO tags might have to stay for a longer time.

+
+
+
+

REVIEW

+
+
+
 // REVIEW «responsible» («reviewer») «description»
+
+
+
+

A REVIEW tag is added by a reviewer during a code review. Here the original author of the code is responsible to resolve the REVIEW tag and the reviewer is assigning this task to him. This is important for feedback and learning and has to be aligned with a review "process" where people talk to each other and get into discussion. In smaller or local teams a peer-review is preferable but this does not scale for large or even distributed teams.

+
+
+
+
+
+

Code-Documentation

+
+
+

As a general goal, the code should be easy to read and understand. Besides, clear naming the documentation is important. We follow these rules:

+
+
+
    +
  • +

    APIs (especially component interfaces) are properly documented with JavaDoc.

    +
  • +
  • +

    JavaDoc shall provide actual value - we do not write JavaDoc to satisfy tools such as checkstyle but to express information not already available in the signature.

    +
  • +
  • +

    We make use of {@link} tags in JavaDoc to make it more expressive.

    +
  • +
  • +

    JavaDoc of APIs describes how to use the type or method and not how the implementation internally works.

    +
  • +
  • +

    To document implementation details, we use code comments (e.g. // we have to flush explicitly to ensure version is up-to-date). This is only needed for complex logic.

    +
  • +
  • +

    Avoid the pointless {@inheritDoc} as since Java 1.5 there is the @Override annotation for overridden methods and your JavaDoc is inherited automatically even without any JavaDoc comment at all.

    +
  • +
+
+
+
+
+

Code-Style

+
+
+

This section gives you best practices to write better code and avoid pitfalls and mistakes.

+
+
+

BLOBs

+
+

Avoid using byte[] for BLOBs as this will load them entirely into your memory. This will cause performance issues or out of memory errors. Instead, use streams when dealing with BLOBs. For further details see BLOB support.

+
+
+
+

Stateless Programming

+
+

When implementing logic as components or beans of your container using dependency injection, we strongly encourage stateless programming. +This is not about data objects like an entity or transfer-object that are stateful by design. +Instead this applies to all classes annotated with @Named, @ApplicationScoped, @Stateless, etc. and all their super-classes. +These classes especially include your repositories, use-cases, and REST services. +Such classes shall never be modified after initialization. +Methods called at runtime (after initialization via the container) do not assign fields (member variables of your class) or mutate the object stored in a field. +This allows your component or bean to be stateless and thread-safe. +Therefore it can be initialized as a singleton so only one instance is created and shared accross all threads of the application. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // bad
+  private String contractOwner;
+
+  private MyState state;
+
+  @Overide
+  public void approve(Contract contract) {
+    this.contractOwner = contract.getOwner();
+    this.contractOwner = this.contractOwner.toLowerCase(Locale.US);
+    this.state.setAdmin(this.contractOwner.endsWith("admin"));
+    if (this.state.isAdmin()) {
+      ...
+    } else {
+      ...
+    }
+  }
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    if (contractOwner.endsWith("admin")) {
+      ...
+    } else {
+      ...
+    }
+  }
+}
+
+
+
+

As you can see in the bad code fields of the class are assigned when the method approve is called. +So mutliple users and therefore threads calling this method concurrently can interfere and override this state causing side-effects on parallel threads. +This will lead to nasty bugs and errors that are hard to trace down. +They will not occur in simple tests but for sure in production with real users. +Therefore never do this and implement your functionality stateless. +That is keeping all state in local variables and strictly avoid modifying fields or their value as illustrated in the fine code. +If you find yourself passing many parameters between methods that all represent state, you can easily create a separate class that encapsulates this state. +However, then you need to create this state object in your method as local variable and pass it between methods as parameter:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    MyState state = new MyState();
+    state.setAdmin(this.contractOwner.endsWith("admin"));
+    doApproveContract(contract, state);
+  }
+}
+
+
+
+
+

Closing Resources

+
+

Resources such as streams (InputStream, OutputStream, Reader, Writer) or transactions need to be handled properly. Therefore, it is important to follow these rules:

+
+
+
    +
  • +

    Each resource has to be closed properly, otherwise you will get out of file handles, TX sessions, memory leaks or the like

    +
  • +
  • +

    Where possible avoid to deal with such resources manually. That is why we are recommending @Transactional for transactions in devonfw (see Transaction Handling).

    +
  • +
  • +

    In case you have to deal with resources manually (e.g. binary streams) ensure to close them properly. See the example below for details.

    +
  • +
+
+
+

Closing streams and other such resources is error prone. Have a look at the following example:

+
+
+
+
// bad
+try {
+  InputStream in = new FileInputStream(file);
+  readData(in);
+  in.close();
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+

The code above is wrong as in case of an IOException the InputStream is not properly closed. In a server application such mistakes can cause severe errors that typically will only occur in production. As such resources implement the AutoCloseable interface you can use the try-with-resource syntax to write correct code. The following code shows a correct version of the example:

+
+
+
+
// fine
+try (InputStream in = new FileInputStream(file)) {
+  readData(in);
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+
+

Catching and handling Exceptions

+
+

When catching exceptions always ensure the following:

+
+
+
    +
  • +

    Never call printStackTrace() method on an exception

    +
  • +
  • +

    Either log or wrap and re-throw the entire catched exception. Be aware that the cause(s) of an exception is very valuable information. If you loose such information by improper exception-handling you may be unable to properly analyse production problems what can cause severe issues.

    +
    +
      +
    • +

      If you wrap and re-throw an exception ensure that the catched exception is passed as cause to the newly created and thrown exception.

      +
    • +
    • +

      If you log an exception ensure that the entire exception is passed as argument to the logger (and not only the result of getMessage() or toString() on the exception).

      +
    • +
    +
    +
  • +
  • +

    See exception handling

    +
  • +
+
+
+
+

Lambdas and Streams

+
+

With Java8 you have cool new features like lambdas and monads like (Stream, CompletableFuture, Optional, etc.). +However, these new features can also be misused or led to code that is hard to read or debug. To avoid pain, we give you the following best practices:

+
+
+
    +
  1. +

    Learn how to use the new features properly before using. Developers are often keen on using cool new features. When you do your first experiments in your project code you will cause deep pain and might be ashamed afterwards. Please study the features properly. Even Java8 experts still write for loops to iterate over collections, so only use these features where it really makes sense.

    +
  2. +
  3. +

    Streams shall only be used in fluent API calls as a Stream can not be forked or reused.

    +
  4. +
  5. +

    Each stream has to have exactly one terminal operation.

    +
  6. +
  7. +

    Do not write multiple statements into lambda code:

    +
    +
    +
    // bad
    +collection.stream().map(x -> {
    +Foo foo = doSomething(x);
    +...
    +return foo;
    +}).collect(Collectors.toList());
    +
    +
    +
    +

    This style makes the code hard to read and debug. Never do that! Instead, extract the lambda body to a private method with a meaningful name:

    +
    +
    +
    +
    // fine
    +collection.stream().map(this::convertToFoo).collect(Collectors.toList());
    +
    +
    +
  8. +
  9. +

    Do not use parallelStream() in general code (that will run on server side) unless you know exactly what you are doing and what is going on under the hood. Some developers might think that using parallel streams is a good idea as it will make the code faster. However, if you want to do performance optimizations talk to your technical lead (architect). Many features such as security and transactions will rely on contextual information that is associated with the current thread. Hence, using parallel streams will most probably cause serious bugs. Only use them for standalone (CLI) applications or for code that is just processing large amounts of data.

    +
  10. +
  11. +

    Do not perform operations on a sub-stream inside a lambda:

    +
    +
    +
    set.stream().flatMap(x -> x.getChildren().stream().filter(this::isSpecial)).collect(Collectors.toList()); // bad
    +set.stream().flatMap(x -> x.getChildren().stream()).filter(this::isSpecial).collect(Collectors.toList()); // fine
    +
    +
    +
  12. +
  13. +

    Only use collect at the end of the stream:

    +
    +
    +
    set.stream().collect(Collectors.toList()).forEach(...) // bad
    +set.stream().peek(...).collect(Collectors.toList()) // fine
    +
    +
    +
  14. +
  15. +

    Lambda parameters with Types inference

    +
    +
    +
    (String a, Float b, Byte[] c) -> a.toString() + Float.toString(b) + Arrays.toString(c)  // bad
    +(a,b,c)  -> a.toString() + Float.toString(b) + Arrays.toString(c)  // fine
    +
    +Collections.sort(personList, (Person p1, Person p2) -> p1.getSurName().compareTo(p2.getSurName()));  // bad
    +Collections.sort(personList, (p1, p2) -> p1.getSurName().compareTo(p2.getSurName()));  // fine
    +
    +
    +
  16. +
  17. +

    Avoid Return Braces and Statement

    +
    +
    +
     a ->  { return a.toString(); } // bad
    + a ->  a.toString();   // fine
    +
    +
    +
  18. +
  19. +

    Avoid Parentheses with Single Parameter

    +
    +
    +
    (a) -> a.toString(); // bad
    + a -> a.toString();  // fine
    +
    +
    +
  20. +
  21. +

    Avoid if/else inside foreach method. Use Filter method & comprehension

    +
    +
    +
    // bad
    +static public Iterator<String> TwitterHandles(Iterator<Author> authors, string company) {
    +    final List result = new ArrayList<String> ();
    +    foreach (Author a : authors) {
    +      if (a.Company.equals(company)) {
    +        String handle = a.TwitterHandle;
    +        if (handle != null)
    +          result.Add(handle);
    +      }
    +    }
    +    return result;
    +  }
    +
    +
    +
    +
    +
    // fine
    +public List<String> twitterHandles(List<Author> authors, String company) {
    +    return authors.stream()
    +            .filter(a -> null != a && a.getCompany().equals(company))
    +            .map(a -> a.getTwitterHandle())
    +            .collect(toList());
    +  }
    +
    +
    +
  22. +
+
+
+
+

Optionals

+
+

With Optional you can wrap values to avoid a NullPointerException (NPE). However, it is not a good code-style to use Optional for every parameter or result to express that it may be null. For such case use @Nullable or even better instead annotate @NotNull where null is not acceptable.

+
+
+

However, Optional can be used to prevent NPEs in fluent calls (due to the lack of the elvis operator):

+
+
+
+
Long id;
+id = fooCto.getBar().getBar().getId(); // may cause NPE
+id = Optional.ofNullable(fooCto).map(FooCto::getBar).map(BarCto::getBar).map(BarEto::getId).orElse(null); // null-safe
+
+
+
+
+

Encoding

+
+

Encoding (esp. Unicode with combining characters and surrogates) is a complex topic. Please study this topic if you have to deal with encodings and processing of special characters. For the basics follow these recommendations:

+
+
+
    +
  • +

    Whenever possible prefer unicode (UTF-8 or better) as encoding. This especially impacts your databases and has to be defined upfront as it typically can not be changed (easily) afterwards.

    +
  • +
  • +

    Do not cast from byte to char (unicode characters can be composed of multiple bytes, such cast may only work for ASCII characters)

    +
  • +
  • +

    Never convert the case of a String using the default locale (esp. when writing generic code like in devonfw). E.g. if you do "HI".toLowerCase() and your system locale is Turkish, then the output will be "hı" instead of "hi", which can lead to wrong assumptions and serious problems. If you want to do a "universal" case conversion always explicitly use an according western locale (e.g. toLowerCase(Locale.US)). Consider using a helper class (see e.g. CaseHelper) or create your own little static utility for that in your project.

    +
  • +
  • +

    Write your code independent from the default encoding (system property file.encoding) - this will most likely differ in JUnit from production environment

    +
    +
      +
    • +

      Always provide an encoding when you create a String from byte[]: new String(bytes, encoding)

      +
    • +
    • +

      Always provide an encoding when you create a Reader or Writer : new InputStreamReader(inStream, encoding)

      +
    • +
    +
    +
  • +
+
+
+
+

Prefer general API

+
+

Avoid unnecessary strong bindings:

+
+
+
    +
  • +

    Do not bind your code to implementations such as Vector or ArrayList instead of List

    +
  • +
  • +

    In APIs for input (=parameters) always consider to make little assumptions:

    +
    +
      +
    • +

      prefer Collection over List or Set where the difference does not matter (e.g. only use Set when you require uniqueness or highly efficient contains)

      +
    • +
    • +

      consider preferring Collection<? extends Foo> over Collection<Foo> when Foo is an interface or super-class

      +
    • +
    +
    +
  • +
+
+
+
+

Prefer primitive boolean

+
+

Unless in rare cases where you need to allow a flag being null avoid using the object type Boolean.

+
+
+
+
// bad
+public Boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

Instead always use the primitive boolean type:

+
+
+
+
// fine
+public boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

The only known excuse is for flags in embeddable types due to limitations of hibernate.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/coding-tools.html b/docs/devon4j/1.0/coding-tools.html new file mode 100644 index 00000000..22f89e67 --- /dev/null +++ b/docs/devon4j/1.0/coding-tools.html @@ -0,0 +1,492 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Tools

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Development Tools used for devon4j
TopicDetailSuggested Tool

build-management

*

maven

IDE

IDE

Eclipse

IDE

setup & update

devonfw-ide

IDE

code generation

CobiGen

Testing

Unit-Testing

JUnit

Testing

Mocking

Mockito & WireMock

Testing

Integration-Testing

spring-test (arquillian for JEE)

Testing

End-to-end

MrChecker

Quality

Code-Analysis

SonarQube

+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/decision-service-framework.html b/docs/devon4j/1.0/decision-service-framework.html new file mode 100644 index 00000000..cb0fdd6d --- /dev/null +++ b/docs/devon4j/1.0/decision-service-framework.html @@ -0,0 +1,488 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Decision Sheet for Choosing a Service Framework

+
+
+

We need to choose which framework(s) we want to use for building services. For the devonfw, we focus on a standard API, if available. However, we also want to recommend an implementation. While projects would still be able to choose whatever they want, we want to suggest the best, most robust, and established solution. This way, projects do not have to worry about the decision and can rely on a production-ready framework without running into any trouble. Also, besides the standard, the configuration of the implementation framework differs, so we want to give instructions in the documentation and by our sample application. This is why, in the end, the implementation also matters. If a project has a customer demand to use something else, the project has to take care of it. We will always suggest and "support" ONE solution.

+
+
+
+
+

REST Services

+
+
+

For REST services, devonfw relies on the JAX-RS standard (and NOT on spring-mvc with its proprietary annotations). JAX-RS (Jakarta RESTful Web Services) is a Java programming language API to develop web services following the Representational State Transfer (REST) architectural pattern. +For Apache CXF, the spring container was the first choice, but container abstraction has been properly introduced by design, so it can be used in JEE application servers. Apache CXF is a services framework that helps to build and develop services using frontend programming APIs, such as JAX-RS. Everything works smoothly in our sample application, and in addition, we collected feedback from various projects utilizing CXF, either with XML or JSON, with reported success in production. Therefore, we decided to use Apache CXF for Spring. +For Quarkus applications, devon4j recommends to use RESTEasy, which is a JAX-RS implementation aimed at providing productivity frameworks for developing client and server RESTful applications and services in Java.

+
+
+
+
+

WebServices

+
+
+

For WebServices we rely on the JAX-WS standard. On our short list we have Metro2 and Apache CXF. Here a collection of facts and considerations:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 1. Decision for JAX-WS implementation
Metro2Apache CXF

Pro

- reference implementation
+- proven in many projects
+- standard in RF

- supports both JAX-WS and JAX-RS therefore consistent configuration, single integration into servlet-container and spring
+- proven in a lot of projects
+- already chosen by devonfw for JAX-RS (so we already have a JAX-WS implementation on board).

Contra

- We expect trouble if use the planned URL path scheme <app>/services/(rest|ws)/... as CXF and Metro2 would both occupy services/*
+- ugly endorsed trouble and small spring-integration issues with WSDL/XSD link resolution (input from Martin Girschik)

- IMHO currently used in less projects than metro2 so less existing experience

+
+

See also +http://predic8.de/axis2-cxf-jax-ws-vergleich.htm +We also had an evaluation at CSD research on CXF vs. Axis2. vs. Metro that suggested CXF.

+
+
+

BTW: Axis(2) is definitely out of discussion for devonfw.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/devon4j-doc.html b/docs/devon4j/1.0/devon4j-doc.html new file mode 100644 index 00000000..d2e0c8a4 --- /dev/null +++ b/docs/devon4j/1.0/devon4j-doc.html @@ -0,0 +1,12184 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Java

+
+
+

The devonfw community +${project.version}, ${buildtime}

+
+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+

The following sections contain the complete compendium of devon4j, the Java stack of devonfw. +With devon4j we support both spring and quarkus as major frameworks. +However, the general coding patterns are based on common Java standards mainly from Jakarta EE and therefore do not differ between those frameworks. +Therefore, the general section contains all the documentation that is universal to Java and does not differ between the two frameworks. +Only the sections spring and quarkus contain documentation that is specific to the respective approach.

+
+
+

You can also read the latest version of this documentation online at the following sources:

+
+ +
+
+
+

1. General

+
+
+

Here you will find documentation and code-patterns for developing with Java in general, independent of the framework you choose.

+
+ +
+

1.1. Architecture

+
+

There are many different views that are summarized by the term architecture. First, we will introduce the key principles and architecture principles of devonfw. Then, we will go into details of the the architecture of an application.

+
+
+
+

1.2. Key Principles

+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
+
+
+
+

1.3. Architecture Principles

+
+

Additionally we define the following principles that our architecture is based on:

+
+
+
    +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of Concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information Hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise, maintenance problems will arise to ensure that data remains consistent. Therefore, interfaces of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style.

    +
  • +
+
+
+

As an architect you should be prepared for the future by reading the TechnoVision.

+
+
+
+

1.4. Application Architecture

+
+

For the architecture of an application we distinguish the following views:

+
+
+
    +
  • +

    The Business Architecture describes an application from the business perspective. It divides the application into business components and with full abstraction of technical aspects.

    +
  • +
  • +

    The Technical Architecture describes an application from the technical implementation perspective. It divides the application into technical layers and defines which technical products and frameworks are used to support these layers.

    +
  • +
  • +

    The Infrastructure Architecture describes an application from the operational infrastructure perspective. It defines the nodes used to run the application including clustering, load-balancing and networking. This view is not explored further in this guide.

    +
  • +
+
+
+
Business Architecture
+
+

The business architecture divides the application into business components. A business component has a well-defined responsibility that it encapsulates. All aspects related to that responsibility have to be implemented within that business component. Further, the business architecture defines the dependencies between the business components. These dependencies need to be free of cycles. A business component exports its functionality via well-defined interfaces as a self-contained API. A business component may use another business component via its API and compliant with the dependencies defined by the business architecture.

+
+
+

As the business domain and logic of an application can be totally different, the devonfw can not define a standardized business architecture. Depending on the business domain it has to be defined from scratch or from a domain reference architecture template. For very small systems it may be suitable to define just a single business component containing all the code.

+
+
+
+
Technical Architecture
+
+

The technical architecture divides the application into technical layers based on the multilayered architecture. A layer is a unit of code with the same category such as a service or presentation logic. So, a layer is often supported by a technical framework. Each business component can therefore be split into component parts for each layer. However, a business component may not have component parts for every layer (e.g. only a presentation part that utilized logic from other components).

+
+
+

An overview of the technical reference architecture of the devonfw is given by figure "Technical Reference Architecture". +It defines the following layers visualized as horizontal boxes:

+
+
+ +
+
+

Also, you can see the (business) components as vertical boxes (e.g. A and X) and how they are composed out of component parts each one assigned to one of the technical layers.

+
+
+

Further, there are technical components for cross-cutting aspects grouped by the gray box on the left. Here is a complete list:

+
+ +
+
+devonfw architecture blueprint +
+
Figure 1. Technical Reference Architecture
+
+
+

Please click on the architecture image to open it as SVG and click on the layers and cross-cutting topics to open the according documentation guide.

+
+
+

We reflect this architecture in our code as described in our coding conventions allowing a traceability of business components, use-cases, layers, etc. into the code and giving +developers a sound orientation within the project.

+
+
+

Further, the architecture diagram shows the allowed dependencies illustrated by the dark green connectors. +Within a business component a component part can call the next component part on the layer directly below via a dependency on its API (vertical connectors). +While this is natural and obvious, it is generally forbidden to have dependencies upwards the layers +or to skip a layer by a direct dependency on a component part two or more layers below. +The general dependencies allowed between business components are defined by the business architecture. +In our reference architecture diagram we assume that the business component A1 is allowed to depend +on component A2. Therefore, a use-case within the logic component part of A1 is allowed to call a +use-case from A2 via a dependency on the component API. The same applies for dialogs on the client layer. +This is illustrated by the horizontal connectors. Please note that persistence entities are part of the API of the data-access component part so only the logic component part of the same +business component may depend on them.

+
+
+

The technical architecture has to address non-functional requirements:

+
+
+
    +
  • +

    scalability
    +is established by keeping state in the client and making the server state-less (except for login session). Via load-balancers new server nodes can be added to improve performance (horizontal scaling).

    +
  • +
  • +

    availability and reliability
    +are addressed by clustering with redundant nodes avoiding any single-point-of failure. If one node fails the system is still available. Further, the software has to be robust so there are no dead-locks or other bad effects that can make the system unavailable or not reliable.

    +
  • +
  • +

    security
    +is archived in the devonfw by the right templates and best-practices that avoid vulnerabilities. See security guidelines for further details.

    +
  • +
  • +

    performance
    +is obtained by choosing the right products and proper configurations. While the actual implementation of the application matters for performance a proper design is important as it is the key to allow performance-optimizations (see e.g. caching).

    +
  • +
+
+
+
Technology Stack
+
+

The technology stack of the devonfw is illustrated by the following table.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Technology Stack of devonfw
TopicDetailStandardSuggested implementation

runtime

language & VM

Java

Oracle JDK

runtime

servlet-container

JEE

tomcat

component management

dependency injection

JSR330 & JSR250

spring

configuration

framework

-

spring-boot

persistence

OR-mapper

JPA

hibernate

batch

framework

JSR352

spring-batch

service

SOAP services

JAX-WS

CXF

service

REST services

JAX-RS

CXF

logging

framework

slf4j

logback

validation

framework

beanvalidation/JSR303

hibernate-validator

security

Authentication & Authorization

JAAS

spring-security

monitoring

framework

JMX

spring

monitoring

HTTP Bridge

HTTP & JSON

jolokia

AOP

framework

dynamic proxies

spring AOP

+
+ +
+
+
+
+

1.5. Configuration

+
+

An application needs to be configurable in order to allow internal setup (like CDI) but also to allow externalized configuration of a deployed package (e.g. integration into runtime environment). We rely on a comprehensive configuration approach following a "convention over configuration" pattern. This guide adds on to this by detailed instructions and best-practices how to deal with configurations.

+
+
+

In general we distinguish the following kinds of configuration that are explained in the following sections:

+
+
+ +
+
+
+

1.6. Internal Application Configuration

+
+

The application configuration contains all internal settings and wirings of the application (bean wiring, database mappings, etc.) and is maintained by the application developers at development time.

+
+
+

For more detail of Spring stack, see here

+
+
+
+

1.7. Externalized Configuration

+
+

Externalized configuration is a configuration that is provided separately to a deployment package and can be maintained undisturbed by re-deployments.

+
+
+
Environment Configuration
+
+

The environment configuration contains configuration parameters (typically port numbers, host names, passwords, logins, timeouts, certificates, etc.) specific for the different environments. These are under the control of the operators responsible for the application.

+
+
+

The environment configuration is maintained in application.properties files, defining various properties. +These properties are explained in the corresponding configuration sections of the guides for each topic:

+
+
+ +
+
+

Make sure your properties are thoroughly documented by providing a comment to each property. This inline documentation is most valuable for your operating department.

+
+
+

More about structuring your application.properties files can be read here for Spring.

+
+
+

For Quarkus, please refer to Quarkus Config Reference for more details.

+
+
+
+
Business Configuration
+
+

Often applications do not need business configuration. In case they do it should typically be editable by administrators via the GUI. The business configuration values should therefore be stored in the database in key/value pairs.

+
+
+

Therefore we suggest to create a dedicated table with (at least) the following columns:

+
+
+
    +
  • +

    ID

    +
  • +
  • +

    Property name

    +
  • +
  • +

    Property type (Boolean, Integer, String)

    +
  • +
  • +

    Property value

    +
  • +
  • +

    Description

    +
  • +
+
+
+

According to the entries in this table, an administrative GUI may show a generic form to modify business configuration. Boolean values should be shown as checkboxes, integer and string values as text fields. The values should be validated according to their type so an error is raised if you try to save a string in an integer property for example.

+
+
+

We recommend the following base layout for the hierarchical business configuration:

+
+
+

component.[subcomponent].[subcomponent].propertyname

+
+
+
+
+

1.8. Security

+
+

Often you need to have passwords (for databases, third-party services, etc.) as part of your configuration. These are typically environment specific (see above). However, with DevOps and continuous-deployment you might be tempted to commit such configurations into your version-control (e.g. git). Doing that with plain text passwords is a severe problem especially for production systems. Never do that! Instead we offer some suggestions how to deal with sensible configurations:

+
+
+
Password Encryption
+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control.

+
+
+

For Spring, we use jasypt-spring-boot. For more details, see here

+
+
+

For Quarkus, see here

+
+
+
Is this Security by Obscurity?
+
+
    +
  • +

    Yes, from the point of view to protect the passwords on the target environment this is nothing but security by obscurity. If an attacker somehow got full access to the machine this will only cause him to spend some more time.

    +
  • +
  • +

    No, if someone only gets the configuration file. So all your developers might have access to the version-control where the config is stored. Others might have access to the software releases that include this configs. But without the master-password that should only be known to specific operators none else can decrypt the password (except with brute-force what will take a very long time, see jasypt for details).

    +
  • +
+
+
+ +
+
+
+
+

1.9. Coding Conventions

+
+

The code should follow general conventions for Java (see Oracle Naming Conventions, Google Java Style, etc.).We consider this as common sense and provide configurations for SonarQube and related tools such as Checkstyle instead of repeating this here.

+
+
+
+

1.10. Naming

+
+

Besides general Java naming conventions, we follow the additional rules listed here explicitly:

+
+
+
    +
  • +

    Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Use CamelCase even for abbreviations (XmlUtil instead of XMLUtil)

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc'). See #1095 for details.

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+

1.11. Packages

+
+

Java Packages are the most important element to structure your code. We use a strict packaging convention to map technical layers and business components (slices) to the code (See technical architecture for further details). By using the same names in documentation and code we create a strong link that gives orientation and makes it easy to find from business requirements, specifications or story tickets into the code and back.

+
+
+

For an devon4j based application we use the following Java-Package schema:

+
+
+
+
«root».«component».«layer»[.«detail»]
+
+
+
+

E.g. in our example application we find the Spring Data repositories for the ordermanagement component in the package com.devonfw.application.mtsj.ordermanagement.dataaccess.api.repo

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of package schema
SegmentDescriptionExample

«root»

Is the basic Java Package name-space of your app. Typically we suggest to use «group».«artifact» where «group» is your maven/gradle groupId corresponding to your organization or IT project owning the code following common Java Package conventions. The segment «artifact» is your maven/gradle artifactId and is typically the technical name of your app.

com.devonfw.application.mtsj

«component»

The (business) component the code belongs to. It is defined by the business architecture and uses terms from the business domain. Use the implicit component general for code not belonging to a specific component (foundation code).

salesmanagement

«layer»

The name of the technical layer (See technical architecture). Details are described for the modern project structure and for the classic project structure.

logic

«detail»

Here you are free to further divide your code into sub-components and other concerns according to the size of your component part. If you want to strictly separate API from implementation you should start «detail» with «scope» that is explained below.

dao

«scope»

The scope which is one of api (official API to be used by other layers or components), base (basic code to be reused by other implementations) and impl (implementation that should never be imported from outside). This segment was initially mandatory but due to trends such as microservices, lean, and agile we decided to make it optional and do not force anybody to use it.

api

+
+

Please note that devon4j library modules for spring use com.devonfw.module as «root» and the name of the module as «component». E.g. the API of our beanmapping module can be found in the package com.devonfw.module.beanmapping.common.api.

+
+
+
+

1.12. Code Tasks

+
+

Code spots that need some rework can be marked with the following tasks tags. These are already properly pre-configured in your development environment for auto completion and to view tasks you are responsible for. It is important to keep the number of code tasks low. Therefore, every member of the team should be responsible for the overall code quality. So if you change a piece of code and hit a code task that you can resolve in a reliable way, please do this as part of your change and remove the according tag.

+
+
+
TODO
+
+

Used to mark a piece of code that is not yet complete (typically because it can not be completed due to a dependency on something that is not ready).

+
+
+
+
 // TODO «author» «description»
+
+
+
+

A TODO tag is added by the author of the code who is also responsible for completing this task.

+
+
+
+
FIXME
+
+
+
 // FIXME «author» «description»
+
+
+
+

A FIXME tag is added by the author of the code or someone who found a bug he can not fix right now. The «author» who added the FIXME is also responsible for completing this task. This is very similar to a TODO but with a higher priority. FIXME tags indicate problems that should be resolved before a release is completed while TODO tags might have to stay for a longer time.

+
+
+
+
REVIEW
+
+
+
 // REVIEW «responsible» («reviewer») «description»
+
+
+
+

A REVIEW tag is added by a reviewer during a code review. Here the original author of the code is responsible to resolve the REVIEW tag and the reviewer is assigning this task to him. This is important for feedback and learning and has to be aligned with a review "process" where people talk to each other and get into discussion. In smaller or local teams a peer-review is preferable but this does not scale for large or even distributed teams.

+
+
+
+
+

1.13. Code-Documentation

+
+

As a general goal, the code should be easy to read and understand. Besides, clear naming the documentation is important. We follow these rules:

+
+
+
    +
  • +

    APIs (especially component interfaces) are properly documented with JavaDoc.

    +
  • +
  • +

    JavaDoc shall provide actual value - we do not write JavaDoc to satisfy tools such as checkstyle but to express information not already available in the signature.

    +
  • +
  • +

    We make use of {@link} tags in JavaDoc to make it more expressive.

    +
  • +
  • +

    JavaDoc of APIs describes how to use the type or method and not how the implementation internally works.

    +
  • +
  • +

    To document implementation details, we use code comments (e.g. // we have to flush explicitly to ensure version is up-to-date). This is only needed for complex logic.

    +
  • +
  • +

    Avoid the pointless {@inheritDoc} as since Java 1.5 there is the @Override annotation for overridden methods and your JavaDoc is inherited automatically even without any JavaDoc comment at all.

    +
  • +
+
+
+
+

1.14. Code-Style

+
+

This section gives you best practices to write better code and avoid pitfalls and mistakes.

+
+
+
BLOBs
+
+

Avoid using byte[] for BLOBs as this will load them entirely into your memory. This will cause performance issues or out of memory errors. Instead, use streams when dealing with BLOBs. For further details see BLOB support.

+
+
+
+
Stateless Programming
+
+

When implementing logic as components or beans of your container using dependency injection, we strongly encourage stateless programming. +This is not about data objects like an entity or transfer-object that are stateful by design. +Instead this applies to all classes annotated with @Named, @ApplicationScoped, @Stateless, etc. and all their super-classes. +These classes especially include your repositories, use-cases, and REST services. +Such classes shall never be modified after initialization. +Methods called at runtime (after initialization via the container) do not assign fields (member variables of your class) or mutate the object stored in a field. +This allows your component or bean to be stateless and thread-safe. +Therefore it can be initialized as a singleton so only one instance is created and shared accross all threads of the application. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // bad
+  private String contractOwner;
+
+  private MyState state;
+
+  @Overide
+  public void approve(Contract contract) {
+    this.contractOwner = contract.getOwner();
+    this.contractOwner = this.contractOwner.toLowerCase(Locale.US);
+    this.state.setAdmin(this.contractOwner.endsWith("admin"));
+    if (this.state.isAdmin()) {
+      ...
+    } else {
+      ...
+    }
+  }
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    if (contractOwner.endsWith("admin")) {
+      ...
+    } else {
+      ...
+    }
+  }
+}
+
+
+
+

As you can see in the bad code fields of the class are assigned when the method approve is called. +So mutliple users and therefore threads calling this method concurrently can interfere and override this state causing side-effects on parallel threads. +This will lead to nasty bugs and errors that are hard to trace down. +They will not occur in simple tests but for sure in production with real users. +Therefore never do this and implement your functionality stateless. +That is keeping all state in local variables and strictly avoid modifying fields or their value as illustrated in the fine code. +If you find yourself passing many parameters between methods that all represent state, you can easily create a separate class that encapsulates this state. +However, then you need to create this state object in your method as local variable and pass it between methods as parameter:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    MyState state = new MyState();
+    state.setAdmin(this.contractOwner.endsWith("admin"));
+    doApproveContract(contract, state);
+  }
+}
+
+
+
+
+
Closing Resources
+
+

Resources such as streams (InputStream, OutputStream, Reader, Writer) or transactions need to be handled properly. Therefore, it is important to follow these rules:

+
+
+
    +
  • +

    Each resource has to be closed properly, otherwise you will get out of file handles, TX sessions, memory leaks or the like

    +
  • +
  • +

    Where possible avoid to deal with such resources manually. That is why we are recommending @Transactional for transactions in devonfw (see Transaction Handling).

    +
  • +
  • +

    In case you have to deal with resources manually (e.g. binary streams) ensure to close them properly. See the example below for details.

    +
  • +
+
+
+

Closing streams and other such resources is error prone. Have a look at the following example:

+
+
+
+
// bad
+try {
+  InputStream in = new FileInputStream(file);
+  readData(in);
+  in.close();
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+

The code above is wrong as in case of an IOException the InputStream is not properly closed. In a server application such mistakes can cause severe errors that typically will only occur in production. As such resources implement the AutoCloseable interface you can use the try-with-resource syntax to write correct code. The following code shows a correct version of the example:

+
+
+
+
// fine
+try (InputStream in = new FileInputStream(file)) {
+  readData(in);
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+
+
Catching and handling Exceptions
+
+

When catching exceptions always ensure the following:

+
+
+
    +
  • +

    Never call printStackTrace() method on an exception

    +
  • +
  • +

    Either log or wrap and re-throw the entire catched exception. Be aware that the cause(s) of an exception is very valuable information. If you loose such information by improper exception-handling you may be unable to properly analyse production problems what can cause severe issues.

    +
    +
      +
    • +

      If you wrap and re-throw an exception ensure that the catched exception is passed as cause to the newly created and thrown exception.

      +
    • +
    • +

      If you log an exception ensure that the entire exception is passed as argument to the logger (and not only the result of getMessage() or toString() on the exception).

      +
    • +
    +
    +
  • +
  • +

    See exception handling

    +
  • +
+
+
+
+
Lambdas and Streams
+
+

With Java8 you have cool new features like lambdas and monads like (Stream, CompletableFuture, Optional, etc.). +However, these new features can also be misused or led to code that is hard to read or debug. To avoid pain, we give you the following best practices:

+
+
+
    +
  1. +

    Learn how to use the new features properly before using. Developers are often keen on using cool new features. When you do your first experiments in your project code you will cause deep pain and might be ashamed afterwards. Please study the features properly. Even Java8 experts still write for loops to iterate over collections, so only use these features where it really makes sense.

    +
  2. +
  3. +

    Streams shall only be used in fluent API calls as a Stream can not be forked or reused.

    +
  4. +
  5. +

    Each stream has to have exactly one terminal operation.

    +
  6. +
  7. +

    Do not write multiple statements into lambda code:

    +
    +
    +
    // bad
    +collection.stream().map(x -> {
    +Foo foo = doSomething(x);
    +...
    +return foo;
    +}).collect(Collectors.toList());
    +
    +
    +
    +

    This style makes the code hard to read and debug. Never do that! Instead, extract the lambda body to a private method with a meaningful name:

    +
    +
    +
    +
    // fine
    +collection.stream().map(this::convertToFoo).collect(Collectors.toList());
    +
    +
    +
  8. +
  9. +

    Do not use parallelStream() in general code (that will run on server side) unless you know exactly what you are doing and what is going on under the hood. Some developers might think that using parallel streams is a good idea as it will make the code faster. However, if you want to do performance optimizations talk to your technical lead (architect). Many features such as security and transactions will rely on contextual information that is associated with the current thread. Hence, using parallel streams will most probably cause serious bugs. Only use them for standalone (CLI) applications or for code that is just processing large amounts of data.

    +
  10. +
  11. +

    Do not perform operations on a sub-stream inside a lambda:

    +
    +
    +
    set.stream().flatMap(x -> x.getChildren().stream().filter(this::isSpecial)).collect(Collectors.toList()); // bad
    +set.stream().flatMap(x -> x.getChildren().stream()).filter(this::isSpecial).collect(Collectors.toList()); // fine
    +
    +
    +
  12. +
  13. +

    Only use collect at the end of the stream:

    +
    +
    +
    set.stream().collect(Collectors.toList()).forEach(...) // bad
    +set.stream().peek(...).collect(Collectors.toList()) // fine
    +
    +
    +
  14. +
  15. +

    Lambda parameters with Types inference

    +
    +
    +
    (String a, Float b, Byte[] c) -> a.toString() + Float.toString(b) + Arrays.toString(c)  // bad
    +(a,b,c)  -> a.toString() + Float.toString(b) + Arrays.toString(c)  // fine
    +
    +Collections.sort(personList, (Person p1, Person p2) -> p1.getSurName().compareTo(p2.getSurName()));  // bad
    +Collections.sort(personList, (p1, p2) -> p1.getSurName().compareTo(p2.getSurName()));  // fine
    +
    +
    +
  16. +
  17. +

    Avoid Return Braces and Statement

    +
    +
    +
     a ->  { return a.toString(); } // bad
    + a ->  a.toString();   // fine
    +
    +
    +
  18. +
  19. +

    Avoid Parentheses with Single Parameter

    +
    +
    +
    (a) -> a.toString(); // bad
    + a -> a.toString();  // fine
    +
    +
    +
  20. +
  21. +

    Avoid if/else inside foreach method. Use Filter method & comprehension

    +
    +
    +
    // bad
    +static public Iterator<String> TwitterHandles(Iterator<Author> authors, string company) {
    +    final List result = new ArrayList<String> ();
    +    foreach (Author a : authors) {
    +      if (a.Company.equals(company)) {
    +        String handle = a.TwitterHandle;
    +        if (handle != null)
    +          result.Add(handle);
    +      }
    +    }
    +    return result;
    +  }
    +
    +
    +
    +
    +
    // fine
    +public List<String> twitterHandles(List<Author> authors, String company) {
    +    return authors.stream()
    +            .filter(a -> null != a && a.getCompany().equals(company))
    +            .map(a -> a.getTwitterHandle())
    +            .collect(toList());
    +  }
    +
    +
    +
  22. +
+
+
+
+
Optionals
+
+

With Optional you can wrap values to avoid a NullPointerException (NPE). However, it is not a good code-style to use Optional for every parameter or result to express that it may be null. For such case use @Nullable or even better instead annotate @NotNull where null is not acceptable.

+
+
+

However, Optional can be used to prevent NPEs in fluent calls (due to the lack of the elvis operator):

+
+
+
+
Long id;
+id = fooCto.getBar().getBar().getId(); // may cause NPE
+id = Optional.ofNullable(fooCto).map(FooCto::getBar).map(BarCto::getBar).map(BarEto::getId).orElse(null); // null-safe
+
+
+
+
+
Encoding
+
+

Encoding (esp. Unicode with combining characters and surrogates) is a complex topic. Please study this topic if you have to deal with encodings and processing of special characters. For the basics follow these recommendations:

+
+
+
    +
  • +

    Whenever possible prefer unicode (UTF-8 or better) as encoding. This especially impacts your databases and has to be defined upfront as it typically can not be changed (easily) afterwards.

    +
  • +
  • +

    Do not cast from byte to char (unicode characters can be composed of multiple bytes, such cast may only work for ASCII characters)

    +
  • +
  • +

    Never convert the case of a String using the default locale (esp. when writing generic code like in devonfw). E.g. if you do "HI".toLowerCase() and your system locale is Turkish, then the output will be "hı" instead of "hi", which can lead to wrong assumptions and serious problems. If you want to do a "universal" case conversion always explicitly use an according western locale (e.g. toLowerCase(Locale.US)). Consider using a helper class (see e.g. CaseHelper) or create your own little static utility for that in your project.

    +
  • +
  • +

    Write your code independent from the default encoding (system property file.encoding) - this will most likely differ in JUnit from production environment

    +
    +
      +
    • +

      Always provide an encoding when you create a String from byte[]: new String(bytes, encoding)

      +
    • +
    • +

      Always provide an encoding when you create a Reader or Writer : new InputStreamReader(inStream, encoding)

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer general API
+
+

Avoid unnecessary strong bindings:

+
+
+
    +
  • +

    Do not bind your code to implementations such as Vector or ArrayList instead of List

    +
  • +
  • +

    In APIs for input (=parameters) always consider to make little assumptions:

    +
    +
      +
    • +

      prefer Collection over List or Set where the difference does not matter (e.g. only use Set when you require uniqueness or highly efficient contains)

      +
    • +
    • +

      consider preferring Collection<? extends Foo> over Collection<Foo> when Foo is an interface or super-class

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer primitive boolean
+
+

Unless in rare cases where you need to allow a flag being null avoid using the object type Boolean.

+
+
+
+
// bad
+public Boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

Instead always use the primitive boolean type:

+
+
+
+
// fine
+public boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

The only known excuse is for flags in embeddable types due to limitations of hibernate.

+
+ +
+
+
+

1.15. Project structure

+
+

In devonfw we want to give clear structure and guidance for building applications. +This also allows tools such as CobiGen or sonar-devon4j-plugin to "understand" the code. +Also this helps developers going from one devonfw project to the next one to quickly understand the code-base. +If every developer knows where to find what, the project gets more efficient. +A long time ago maven standardized the project structure with src/main/java, etc. and turned chaos into structure. +With devonfw we experienced the same for the codebase (what is inside src/main/java).

+
+
+

We initially started devon4j based on spring and spring-boot and proposed a classic project structure. +With modern cloud-native trends we added a modern project structure, that is more lean and up-to-date with the latest market trends.

+
+ +
+
+

1.16. Dependency Injection

+
+

Dependency injection is one of the most important design patterns and is a key principle to a modular and component based architecture. +The Java Standard for dependency injection is javax.inject (JSR330) that we use in combination with JSR250. +Additionally, for scoping you can use CDI (Context and Dependency Injection) from JSR365.

+
+
+

There are many frameworks which support this standard including all recent Java EE application servers. +Therefore in devonfw we rely on these open standards and can propagate patterns and code examples that work independent from the underlying frameworks.

+
+
+
+

1.17. Key Principles

+
+

Within dependency injection a bean is typically a reusable unit of your application providing an encapsulated functionality. +This bean can be injected into other beans and it should in general be replaceable. +As an example we can think of a use-case, a repository, etc. +As best practice we use the following principles:

+
+
+
    +
  • +

    Stateless implementation
    +By default such beans shall be implemented stateless. If you store state information in member variables you can easily run into concurrency problems and nasty bugs. This is easy to avoid by using local variables and separate state classes for complex state-information. Try to avoid stateful beans wherever possible. Only add state if you are fully aware of what you are doing and properly document this as a warning in your JavaDoc.

    +
  • +
  • +

    Usage of Java standards
    +We use common standards (see above) that makes our code portable. Therefore we use standardized annotations like @Inject (javax.inject.Inject) instead of proprietary annotations such as @Autowired. Generally we avoid proprietary annotations in business code (logic layer).

    +
  • +
  • +

    Simple injection-style
    +In general you can choose between constructor, setter or field injection. For simplicity we recommend to do private field injection as it is very compact and easy to maintain. We believe that constructor injection is bad for maintenance especially in case of inheritance (if you change the dependencies you need to refactor all sub-classes). Private field injection and public setter injection are very similar but setter injection is much more verbose (often you are even forced to have javadoc for all public methods). If you are writing re-usable library code setter injection will make sense as it is more flexible. In a business application you typically do not need that and can save a lot of boiler-plate code if you use private field injection instead. Nowadays you are using container infrastructure also for your tests (see testing) so there is no need to inject manually (what would require a public setter).

    +
  • +
  • +

    KISS
    +To follow the KISS (keep it small and simple) principle we avoid advanced features (e.g. custom AOP, non-singleton beans) and only use them where necessary.

    +
  • +
  • +

    Separation of API and implementation
    +For important components we should separate a self-contained API documented with JavaDoc from its implementation. Code from other components that wants to use the implementation shall only rely on the API. However, for things that will never be exchanged no API as interface is required you can skip such separation.

    +
  • +
+
+
+
+

1.18. Example Bean

+
+

Here you can see the implementation of an example bean using dependency injection:

+
+
+
+
@ApplicationScoped
+@Named("MyComponent")
+public class MyComponentImpl implements MyComponent {
+  @Inject
+  private MyOtherComponent myOtherComponent;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+
+  ...
+}
+
+
+
+

Here MyComponentImpl depends on MyOtherComponent that is injected into the field myOtherComponent because of the @Inject annotation. +To make this work there must be exactly one bean in the container (e.g. spring or quarkus) that is an instance of MyOtherComponent. +In order to put a bean into the container, we can use @ApplicationScoped in case of CDI (required for quarkus) for a stateless bean. +In spring we can ommit a CDI annotation and the @Named annotation is already sufficient as a bean is stateless by default in spring. +If we always use @ApplicationScoped we can make this more explicit and more portable accross different frameworks. +So in our example we put MyComponentImpl into the container. +That bean will be called MyComponent as we specified in the @Named annotation but we can also omit the name to use the classname as fallback. +Now our bean can be injected into other beans using @Inject annotation either via MyComponent interface (recommended when interface is present) or even directly via MyComponentImpl. +In case you omit the interface, you should also omit the Impl suffix or instead use Bean as suffix.

+
+
+
+

1.19. Multiple bean implementations

+
+

In some cases you might have multiple implementations as beans for the same interface. +The following sub-sections handle the different scenarios to give you guidance.

+
+
+
Only one implementation in container
+
+

In some cases you still have only one implementation active as bean in the container at runtime. +A typical example is that you have different implemenations for test and main usage. +This case is easy, as @Inject will always be unique. +The only thing you need to care about is how to configure your framework (spring, quarkus, etc.) to know which implementation to put in the container depending on specific configuration. +In spring this can be archived via the proprietary @Profile annotaiton.

+
+
+
+
Injecting all of multiple implementations
+
+

In some situations you may have an interface that defines a kind of "plugin". +You can have multiple implementations in your container and want to have all of them injected. +Then you can request a list with all the bean implementations via the interface as in the following example:

+
+
+
+
  @Inject
+  private List<MyConverter> converters;
+
+
+
+

Your code may iterate over all plugins (converters) and apply them sequentially. +Please note that the injection will fail (at least in spring), when there is no bean available to inject. +So you do not get an empty list injected but will get an exception on startup.

+
+
+
+
Injecting one of multiple implementations
+
+

Another scenario is that you have multiple implementations in your container coexisting, but for injection you may want to choose a specific implementation. +Here you could use the @Named annotation to specify a unique identifier for each implementation what is called qualified injection:

+
+
+
+
@ApplicationScoped
+@Named("UserAuthenticator")
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+@Named("ServiceAuthenticator")
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  @Named("UserAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  @Named("ServiceAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+
+
+
+

However, we discovered that this pattern is not so great: +The identifiers in the @Named annotation are just strings that could easily break. +You could use constants instead but still this is not the best solution.

+
+
+

In the end you can very much simplify this by just directly injecting the implementation instead:

+
+
+
+
@ApplicationScoped
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

In case you want to strictly decouple from implementations, you can still create dedicated interfaces:

+
+
+
+
public interface UserAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class UserAuthenticatorImpl implements UserAuthenticator {
+  ...
+}
+public interface ServiceAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class ServiceAuthenticatorImpl implements ServiceAuthenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

However, as you can see this is again introducing additional boiler-plate code. +While the principle to separate API and implementation and strictly decouple from implementation is valuable in general, +you should always consider KISS, lean, and agile in contrast and balance pros and cons instead of blindly following dogmas.

+
+
+
+
+

1.20. Imports

+
+

Here are the import statements for the most important annotations for dependency injection

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.enterprise.context.ApplicationScoped;
+// import javax.enterprise.context.RequestScoped;
+// import javax.enterprise.context.SessionScoped;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+
+
+
+

1.21. Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>jakarta.inject</groupId>
+  <artifactId>jakarta.inject-api</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>jakarta.annotation</groupId>
+  <artifactId>jakarta.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>javax.inject</groupId>
+  <artifactId>javax.inject</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>javax.annotation</groupId>
+  <artifactId>javax.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+ +
+
+

1.22. BLOB support

+
+

BLOB stands for Binary Large Object. A BLOB may be an image, an office document, ZIP archive or any other multimedia object. +Often these BLOBs are large. if this is the case you need to take care, that you do not copy all the blob data into you application heap, e.g. when providing them via a REST service. +This could easily lead to performance problems or out of memory errors. +As solution for that problem is "streaming" those BLOBs directly from the database to the client. To demonstrate how this can be accomplished, devonfw provides a example.

+
+
+ +
+

1.24. Common

+
+

In our coding-conventions we define a clear packaging and layering. +However, there is always cross-cutting code that does not belong to a specific layer such as generic helpers, general code for configuration or integration, etc. +Therefore, we define a package segment common that can be used as «layer» for such cross-cutting code. +Code from any other layer is allowed to access such common code (at least within the same component).

+
+
+ +
+
+

1.25. Java Persistence API

+
+

For mapping java objects to a relational database we use the Java Persistence API (JPA). +As JPA implementation we recommend to use Hibernate. For general documentation about JPA and Hibernate follow the links above as we will not replicate the documentation. Here you will only find guidelines and examples how we recommend to use it properly. The following examples show how to map the data of a database to an entity. As we use JPA we abstract from SQL here. However, you will still need a DDL script for your schema and during maintenance also database migrations. Please follow our SQL guide for such artifacts.

+
+
+
+

1.26. Entity

+
+

Entities are part of the persistence layer and contain the actual data. They are POJOs (Plain Old Java Objects) on which the relational data of a database is mapped and vice versa. The mapping is configured via JPA annotations (javax.persistence). Usually an entity class corresponds to a table of a database and a property to a column of that table. A persistent entity instance then represents a row of the database table.

+
+
+
A Simple Entity
+
+

The following listing shows a simple example:

+
+
+
+
@Entity
+@Table(name="TEXTMESSAGE")
+public class MessageEntity extends ApplicationPersistenceEntity implements Message {
+
+  private String text;
+
+  public String getText() {
+    return this.text;
+  }
+
+  public void setText(String text) {
+    this.text = text;
+  }
+ }
+
+
+
+

The @Entity annotation defines that instances of this class will be entities which can be stored in the database. The @Table annotation is optional and can be used to define the name of the corresponding table in the database. If it is not specified, the simple name of the entity class is used instead.

+
+
+

In order to specify how to map the attributes to columns we annotate the corresponding getter methods (technically also private field annotation is also possible but approaches can not be mixed). +The @Id annotation specifies that a property should be used as primary key. +With the help of the @Column annotation it is possible to define the name of the column that an attribute is mapped to as well as other aspects such as nullable or unique. If no column name is specified, the name of the property is used as default.

+
+
+

Note that every entity class needs a constructor with public or protected visibility that does not have any arguments. Moreover, neither the class nor its getters and setters may be final.

+
+
+

Entities should be simple POJOs and not contain business logic.

+
+
+
+
Entities and Datatypes
+
+

Standard datatypes like Integer, BigDecimal, String, etc. are mapped automatically by JPA. Custom datatypes are mapped as serialized BLOB by default what is typically undesired. +In order to map atomic custom datatypes (implementations of`+SimpleDatatype`) we implement an AttributeConverter. Here is a simple example:

+
+
+
+
@Converter(autoApply = true)
+public class MoneyAttributeConverter implements AttributeConverter<Money, BigDecimal> {
+
+  public BigDecimal convertToDatabaseColumn(Money attribute) {
+    return attribute.getValue();
+  }
+
+  public Money convertToEntityAttribute(BigDecimal dbData) {
+    return new Money(dbData);
+  }
+}
+
+
+
+

The annotation @Converter is detected by the JPA vendor if the annotated class is in the packages to scan. Further, autoApply = true implies that the converter is automatically used for all properties of the handled datatype. Therefore all entities with properties of that datatype will automatically be mapped properly (in our example Money is mapped as BigDecimal).

+
+
+

In case you have a composite datatype that you need to map to multiple columns the JPA does not offer a real solution. As a workaround you can use a bean instead of a real datatype and declare it as @Embeddable. If you are using Hibernate you can implement CompositeUserType. Via the @TypeDef annotation it can be registered to Hibernate. If you want to annotate the CompositeUserType implementation itself you also need another annotation (e.g. MappedSuperclass tough not technically correct) so it is found by the scan.

+
+
+
Enumerations
+
+

By default JPA maps Enums via their ordinal. Therefore the database will only contain the ordinals (0, 1, 2, etc.) . So , inside the database you can not easily understand their meaning. Using @Enumerated with EnumType.STRING allows to map the enum values to their name (Enum.name()). Both approaches are fragile when it comes to code changes and refactoring (if you change the order of the enum values or rename them) after the application is deployed to production. If you want to avoid this and get a robust mapping you can define a dedicated string in each enum value for database representation that you keep untouched. Then you treat the enum just like any other custom datatype.

+
+
+
+
BLOB
+
+

If binary or character large objects (BLOB/CLOB) should be used to store the value of an attribute, e.g. to store an icon, the @Lob annotation should be used as shown in the following listing:

+
+
+
+
@Lob
+public byte[] getIcon() {
+  return this.icon;
+}
+
+
+
+ + + + + +
+ + +Using a byte array will cause problems if BLOBs get large because the entire BLOB is loaded into the RAM of the server and has to be processed by the garbage collector. For larger BLOBs the type Blob and streaming should be used. +
+
+
+
+
public Blob getAttachment() {
+  return this.attachment;
+}
+
+
+
+
+
Date and Time
+
+

To store date and time related values, the temporal annotation can be used as shown in the listing below:

+
+
+
+
@Temporal(TemporalType.TIMESTAMP)
+public java.util.Date getStart() {
+  return start;
+}
+
+
+
+

Until Java8 the java data type java.util.Date (or Jodatime) has to be used. +TemporalType defines the granularity. In this case, a precision of nanoseconds is used. If this granularity is not wanted, TemporalType.DATE can be used instead, which only has a granularity of milliseconds. +Mixing these two granularities can cause problems when comparing one value to another. This is why we only use TemporalType.TIMESTAMP.

+
+
+
+
QueryDSL and Custom Types
+
+

Using the Aliases API of QueryDSL might result in an InvalidDataAccessApiUsageException when using custom datatypes in entity properties. This can be circumvented in two steps:

+
+
+
    +
  1. +

    Ensure you have the following maven dependencies in your project (core module) to support custom types via the Aliases API:

    +
    +
    +
    <dependency>
    +  <groupId>org.ow2.asm</groupId>
    +  <artifactId>asm</artifactId>
    +</dependency>
    +<dependency>
    +  <groupId>cglib</groupId>
    +  <artifactId>cglib</artifactId>
    +</dependency>
    +
    +
    +
  2. +
  3. +

    Make sure, that all your custom types used in entities provide a non-argument constructor with at least visibility level protected.

    +
  4. +
+
+
+
+
+
Primary Keys
+
+

We only use simple Long values as primary keys (IDs). By default it is auto generated (@GeneratedValue(strategy=GenerationType.AUTO)). This is already provided by the class com.devonfw.<projectName>.general.dataaccess.api.AbstractPersistenceEntity within the classic project structure respectively com.devonfw.<projectName>.general.domain.model.AbstractPersistenceEntity within the modern project structure, that you can extend. +In case you have business oriented keys (often as String), you can define an additional property for it and declare it as unique (@Column(unique=true)). +Be sure to include "AUTO_INCREMENT" in your sql table field ID to be able to persist data (or similar for other databases).

+
+
+
+
+

1.27. Relationships

+
+
n:1 and 1:1 Relationships
+
+

Entities often do not exist independently but are in some relation to each other. For example, for every period of time one of the StaffMember’s of the restaurant example has worked, which is represented by the class WorkingTime, there is a relationship to this StaffMember.

+
+
+

The following listing shows how this can be modeled using JPA:

+
+
+
+
...
+
+@Entity
+public class WorkingTimeEntity {
+   ...
+
+   private StaffMemberEntity staffMember;
+
+   @ManyToOne
+   @JoinColumn(name="STAFFMEMBER")
+   public StaffMemberEntity getStaffMember() {
+      return this.staffMember;
+   }
+
+   public void setStaffMember(StaffMemberEntity staffMember) {
+      this.staffMember = staffMember;
+   }
+}
+
+
+
+

To represent the relationship, an attribute of the type of the corresponding entity class that is referenced has been introduced. The relationship is a n:1 relationship, because every WorkingTime belongs to exactly one StaffMember, but a StaffMember usually worked more often than once.
+This is why the @ManyToOne annotation is used here. For 1:1 relationships the @OneToOne annotation can be used which works basically the same way. To be able to save information about the relation in the database, an additional column in the corresponding table of WorkingTime is needed which contains the primary key of the referenced StaffMember. With the name element of the @JoinColumn annotation it is possible to specify the name of this column.

+
+
+
+
1:n and n:m Relationships
+
+

The relationship of the example listed above is currently an unidirectional one, as there is a getter method for retrieving the StaffMember from the WorkingTime object, but not vice versa.

+
+
+

To make it a bidirectional one, the following code has to be added to StaffMember:

+
+
+
+
  private Set<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy="staffMember")
+  public Set<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(Set<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

To make the relationship bidirectional, the tables in the database do not have to be changed. Instead the column that corresponds to the attribute staffMember in class WorkingTime is used, which is specified by the mappedBy element of the @OneToMany annotation. Hibernate will search for corresponding WorkingTime objects automatically when a StaffMember is loaded.

+
+
+

The problem with bidirectional relationships is that if a WorkingTime object is added to the set or list workingTimes in StaffMember, this does not have any effect in the database unless +the staffMember attribute of that WorkingTime object is set. That is why the devon4j advices not to use bidirectional relationships but to use queries instead. How to do this is shown here. If a bidirectional relationship should be used nevertheless, appropriate add and remove methods must be used.

+
+
+

For 1:n and n:m relations, the devon4j demands that (unordered) Sets and no other collection types are used, as shown in the listing above. The only exception is whenever an ordering is really needed, (sorted) lists can be used.
+For example, if WorkingTime objects should be sorted by their start time, this could be done like this:

+
+
+
+
  private List<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy = "staffMember")
+  @OrderBy("startTime asc")
+  public List<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(List<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

The value of the @OrderBy annotation consists of an attribute name of the class followed by asc (ascending) or desc (descending).

+
+
+

To store information about a n:m relationship, a separate table has to be used, as one column cannot store several values (at least if the database schema is in first normal form).
+For example if one wanted to extend the example application so that all ingredients of one FoodDrink can be saved and to model the ingredients themselves as entities (e.g. to store additional information about them), this could be modeled as follows (extract of class FoodDrink):

+
+
+
+
  private Set<IngredientEntity> ingredients;
+
+  @ManyToMany()
+  @JoinTable
+  public Set<IngredientEntity> getIngredients() {
+    return this.ingredients;
+  }
+
+  public void setOrders(Set<IngredientEntity> ingredients) {
+    this.ingredients = ingredients;
+  }
+
+
+
+

Information about the relation is stored in a table called BILL_ORDER that has to have two columns, one for referencing the Bill, the other one for referencing the Order. Note that the @JoinTable annotation is not needed in this case because a separate table is the default solution here (same for n:m relations) unless there is a mappedBy element specified.

+
+
+

For 1:n relationships this solution has the disadvantage that more joins (in the database system) are needed to get a Bill with all the Orders it refers to. This might have a negative impact on performance so that the solution to store a reference to the Bill row/entity in the Order’s table is probably the better solution in most cases.

+
+
+

Note that bidirectional n:m relationships are not allowed for applications based on devon4j. Instead a third entity has to be introduced, which "represents" the relationship (it has two n:1 relationships).

+
+
+
+
Eager vs. Lazy Loading
+
+

Using JPA it is possible to use either lazy or eager loading. Eager loading means that for entities retrieved from the database, other entities that are referenced by these entities are also retrieved, whereas lazy loading means that this is only done when they are actually needed, i.e. when the corresponding getter method is invoked.

+
+
+

Application based on devon4j are strongly advised to always use lazy loading. The JPA defaults are:

+
+
+
    +
  • +

    @OneToMany: LAZY

    +
  • +
  • +

    @ManyToMany: LAZY

    +
  • +
  • +

    @ManyToOne: EAGER

    +
  • +
  • +

    @OneToOne: EAGER

    +
  • +
+
+
+

So at least for @ManyToOne and @OneToOne you always need to override the default by providing fetch = FetchType.LAZY.

+
+
+ + + + + +
+ + +Please read the performance guide. +
+
+
+
+
Cascading Relationships
+
+

For relations it is also possible to define whether operations are cascaded (like a recursion) to the related entity. +By default, nothing is done in these situations. This can be changed by using the cascade property of the annotation that specifies the relation type (@OneToOne, @ManyToOne, @OneToMany, @ManyToOne). This property accepts a CascadeType that offers the following options:

+
+
+
    +
  • +

    PERSIST (for EntityManager.persist, relevant to inserted transient entities into DB)

    +
  • +
  • +

    REMOVE (for EntityManager.remove to delete entity from DB)

    +
  • +
  • +

    MERGE (for EntityManager.merge)

    +
  • +
  • +

    REFRESH (for EntityManager.refresh)

    +
  • +
  • +

    DETACH (for EntityManager.detach)

    +
  • +
  • +

    ALL (cascade all of the above operations)

    +
  • +
+
+
+

See here for more information.

+
+
+
+
Typesafe Foreign Keys using IdRef
+
+

For simple usage you can use Long for all your foreign keys. +However, as an optional pattern for advanced and type-safe usage, we offer IdRef.

+
+
+
+
+

1.28. Embeddable

+
+

An embeddable Object is a way to group properties of an entity into a separate Java (child) object. Unlike with implement relationships the embeddable is not a separate entity and its properties are stored (embedded) in the same table together with the entity. This is helpful to structure and reuse groups of properties.

+
+
+

The following example shows an Address implemented as an embeddable class:

+
+
+
+
@Embeddable
+public class AddressEmbeddable {
+
+  private String street;
+  private String number;
+  private Integer zipCode;
+  private String city;
+
+  @Column(name="STREETNUMBER")
+  public String getNumber() {
+    return number;
+  }
+
+  public void setNumber(String number) {
+    this.number = number;
+  }
+
+  ...  // other getter and setter methods, equals, hashCode
+}
+
+
+
+

As you can see an embeddable is similar to an entity class, but with an @Embeddable annotation instead of the @Entity annotation and without primary key or modification counter. +An Embeddable does not exist on its own but in the context of an entity. +As a simplification Embeddables do not require a separate interface and ETO as the bean-mapper will create a copy automatically when converting the owning entity to an ETO. +However, in this case the embeddable becomes part of your api module that therefore needs a dependency on the JPA.

+
+
+

In addition to that the methods equals(Object) and hashCode() need to be implemented as this is required by Hibernate (it is not required for entities because they can be unambiguously identified by their primary key). For some hints on how to implement the hashCode() method please have a look here.

+
+
+

Using this AddressEmbeddable inside an entity class can be done like this:

+
+
+
+
  private AddressEmbeddable address;
+
+  @Embedded
+  public AddressEmbeddable getAddress() {
+    return this.address;
+  }
+
+  public void setAddress(AddressEmbeddable address) {
+    this.address = address;
+  }
+}
+
+
+
+

The @Embedded annotation needs to be used for embedded attributes. Note that if in all columns of the embeddable (here Address) are null, then the embeddable object itself is also null inside the entity. This has to be considered to avoid NullPointerException’s. Further this causes some issues with primitive types in embeddable classes that can be avoided by only using object types instead.

+
+
+
+

1.29. Inheritance

+
+

Just like normal java classes, entity classes can inherit from others. The only difference is that you need to specify how to map a class hierarchy to database tables. Generic abstract super-classes for entities can simply be annotated with @MappedSuperclass.

+
+
+

For all other cases the JPA offers the annotation @Inheritance with the property strategy talking an InheritanceType that has the following options:

+
+
+
+
+
    +
  • +

    SINGLE_TABLE: This strategy uses a single table that contains all columns needed to store all entity-types of the entire inheritance hierarchy. If a column is not needed for an entity because of its type, there is a null value in this column. An additional column is introduced, which denotes the type of the entity (called dtype).

    +
  • +
  • +

    TABLE_PER_CLASS: For each concrete entity class there is a table in the database that can store such an entity with all its attributes. An entity is only saved in the table corresponding to its most concrete type. To get all entities of a super type, joins are needed.

    +
  • +
  • +

    JOINED: In this case there is a table for every entity class including abstract classes, which contains only the columns for the persistent properties of that particular class. Additionally there is a primary key column in every table. To get an entity of a class that is a subclass of another one, joins are needed.

    +
  • +
+
+
+
+
+

Each of the three approaches has its advantages and drawbacks, which are discussed in detail here. In most cases, the first one should be used, because it is usually the fastest way to do the mapping, as no joins are needed when retrieving, searching or persisting entities. Moreover it is rather simple and easy to understand. +One major disadvantage is that the first approach could lead to a table with a lot of null values, which might have a negative impact on the database size.

+
+
+

The inheritance strategy has to be annotated to the top-most entity of the class hierarchy (where @MappedSuperclass classes are not considered) like in the following example:

+
+
+
+
@Entity
+@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+public abstract class MyParentEntity extends ApplicationPersistenceEntity implements MyParent {
+  ...
+}
+
+@Entity
+public class MyChildEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+@Entity
+public class MyOtherEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+
+
+

As a best practice we advise you to avoid entity hierarchies at all where possible and otherwise to keep the hierarchy as small as possible. In order to just ensure reuse or establish a common API you can consider a shared interface, a @MappedSuperclass or an @Embeddable instead of an entity hierarchy.

+
+
+
+

1.30. Repositories and DAOs

+
+

For each entity a code unit is created that groups all database operations for that entity. We recommend to use spring-data repositories for that as it is most efficient for developers. As an alternative there is still the classic approach using DAOs.

+
+
+
Concurrency Control
+
+

The concurrency control defines the way concurrent access to the same data of a database is handled. When several users (or threads of application servers) concurrently access a database, anomalies may happen, e.g. a transaction is able to see changes from another transaction although that one did, not yet commit these changes. Most of these anomalies are automatically prevented by the database system, depending on the isolation level (property hibernate.connection.isolation in the jpa.xml, see here, or quarkus.datasource.jdbc.transaction-isolation-level in the application.properties).

+
+
+

Another anomaly is when two stakeholders concurrently access a record, do some changes and write them back to the database. The JPA addresses this with different locking strategies (see here).

+
+
+

As a best practice we are using optimistic locking for regular end-user services (OLTP) and pessimistic locking for batches.

+
+
+
+
Optimistic Locking
+
+

The class com.devonfw.module.jpa.persistence.api.AbstractPersistenceEntity already provides optimistic locking via a modificationCounter with the @Version annotation. Therefore JPA takes care of optimistic locking for you. When entities are transferred to clients, modified and sent back for update you need to ensure the modificationCounter is part of the game. If you follow our guides about transfer-objects and services this will also work out of the box. +You only have to care about two things:

+
+
+
    +
  • +

    How to deal with optimistic locking in relationships?
    +Assume an entity A contains a collection of B entities. Should there be a locking conflict if one user modifies an instance of A while another user in parallel modifies an instance of B that is contained in the other instance? To address this , take a look at FeatureForceIncrementModificationCounter.

    +
  • +
  • +

    What should happen in the UI if an OptimisticLockException occurred?
    +According to KISS our recommendation is that the user gets an error displayed that tells him to do his change again on the recent data. Try to design your system and the work processing in a way to keep such conflicts rare and you are fine.

    +
  • +
+
+
+
+
Pessimistic Locking
+
+

For back-end services and especially for batches optimistic locking is not suitable. A human user shall not cause a large batch process to fail because he was editing the same entity. Therefore such use-cases use pessimistic locking what gives them a kind of priority over the human users. +In your DAO implementation you can provide methods that do pessimistic locking via EntityManager operations that take a LockModeType. Here is a simple example:

+
+
+
+
  getEntityManager().lock(entity, LockModeType.READ);
+
+
+
+

When using the lock(Object, LockModeType) method with LockModeType.READ, Hibernate will issue a SELECT …​ FOR UPDATE. This means that no one else can update the entity (see here for more information on the statement). If LockModeType.WRITE is specified, Hibernate issues a SELECT …​ FOR UPDATE NOWAIT instead, which has has the same meaning as the statement above, but if there is already a lock, the program will not wait for this lock to be released. Instead, an exception is raised.
+Use one of the types if you want to modify the entity later on, for read only access no lock is required.

+
+
+

As you might have noticed, the behavior of Hibernate deviates from what one would expect by looking at the LockModeType (especially LockModeType.READ should not cause a SELECT …​ FOR UPDATE to be issued). The framework actually deviates from what is specified in the JPA for unknown reasons.

+
+
+
+
+

1.31. Database Auditing

+ +
+
+

1.32. Testing Data-Access

+
+

For testing of Entities and Repositories or DAOs see testing guide.

+
+
+
+

1.33. Principles

+
+

We strongly recommend these principles:

+
+
+
    +
  • +

    Use the JPA where ever possible and use vendor (hibernate) specific features only for situations when JPA does not provide a solution. In the latter case consider first if you really need the feature.

    +
  • +
  • +

    Create your entities as simple POJOs and use JPA to annotate the getters in order to define the mapping.

    +
  • +
  • +

    Keep your entities simple and avoid putting advanced logic into entity methods.

    +
  • +
+
+
+
+

1.34. Database Configuration

+
+

For details on the configuration of the database connection and database logging of the individual framework, please refer to the respective configuration guide.

+
+
+

For spring see here.

+
+
+

For quarkus see here.

+
+
+
Database Migration
+ +
+
+
Pooling
+
+

You typically want to pool JDBC connections to boost performance by recycling previous connections. There are many libraries available to do connection pooling. We recommend to use HikariCP. For Oracle RDBMS see here.

+
+
+
+
+

1.35. Security

+
+
SQL-Injection
+
+

A common security threat is SQL-injection. Never build queries with string concatenation or your code might be vulnerable as in the following example:

+
+
+
+
  String query = "Select op from OrderPosition op where op.comment = " + userInput;
+  return getEntityManager().createQuery(query).getResultList();
+
+
+
+

Via the parameter userInput an attacker can inject SQL (JPQL) and execute arbitrary statements in the database causing extreme damage.

+
+
+

In order to prevent such injections you have to strictly follow our rules for queries:

+
+
+ +
+
+
+
Limited Permissions for Application
+
+

We suggest that you operate your application with a database user that has limited permissions so he can not modify the SQL schema (e.g. drop tables). For initializing the schema (DDL) or to do schema migrations use a separate user that is not used by the application itself.

+
+ +
+
+
Queries
+
+

The Java Persistence API (JPA) defines its own query language, the java persistence query language (JPQL) (see also JPQL tutorial), which is similar to SQL but operates on entities and their attributes instead of tables and columns.

+
+
+

The simplest CRUD-Queries (e.g. find an entity by its ID) are already build in the devonfw CRUD functionality (via Repository or DAO). For other cases you need to write your own query. We distinguish between static and dynamic queries. Static queries have a fixed JPQL query string that may only use parameters to customize the query at runtime. Instead, dynamic queries can change their clauses (WHERE, ORDER BY, JOIN, etc.) at runtime depending on the given search criteria.

+
+
+
+
Static Queries
+
+

E.g. to find all DishEntries (from MTS sample app) that have a price not exceeding a given maxPrice we write the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice
+
+
+
+

Here dish is used as alias (variable name) for our selected DishEntity (what refers to the simple name of the Java entity class). With dish.price we are referring to the Java property price (getPrice()/setPrice(…​)) in DishEntity. A named variable provided from outside (the search criteria at runtime) is specified with a colon (:) as prefix. Here with :maxPrice we reference to a variable that needs to be set via query.setParameter("maxPrice", maxPriceValue). JPQL also supports indexed parameters (?) but they are discouraged because they easily cause confusion and mistakes.

+
+
+
Using Queries to Avoid Bidirectional Relationships
+
+

With the usage of queries it is possible to avoid exposing relationships or modelling bidirectional relationships, which have some disadvantages (see relationships). This is especially desired for relationships between entities of different business components. +So for example to get all OrderLineEntities for a specific OrderEntity without using the orderLines relation from OrderEntity the following query could be used:

+
+
+
+
SELECT line FROM OrderLineEntity line WHERE line.order.id = :orderId
+
+
+
+
+
+
Dynamic Queries
+
+

For dynamic queries we use QueryDSL. It allows to implement queries in a powerful but readable and type-safe way (unlike Criteria API). If you already know JPQL you will quickly be able to read and write QueryDSL code. It feels like JPQL but implemented in Java instead of plain text.

+
+
+

Here is an example from our sample application:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(dish.price.goe(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(dish.price.loe(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      query.where(dish.name.eq(name));
+    }
+    return query.fetch();
+  }
+
+
+
+

In this example we use the so called Q-types (QDishEntity). These are classes generated at build time by the QueryDSL annotation processor from entity classes. The Q-type classes can be used as static types representative of the original entity class.

+
+
+

For spring, devon4j provides another approach that you can use for your Spring applications to implement QueryDSL logic without having to use these metaclasses. An example can be found here.

+
+
+
+
Using Wildcards
+
+

For flexible queries it is often required to allow wildcards (especially in dynamic queries). While users intuitively expect glob syntax the SQL and JPQL standards work different. Therefore a mapping is required. devonfw provides this on a lower level by LikePatternSyntax and on a high level by QueryUtil (see QueryHelper.newStringClause(…​)).

+
+
+
+
Pagination
+
+

When dealing with large amounts of data, an efficient method of retrieving the data is required. Fetching the entire data set each time would be too time consuming. Instead, Paging is used to process only small subsets of the entire data set.

+
+
+

If you are using Spring Data repositories you will get pagination support out of the box by providing the interfaces Page and Pageable:

+
+
+
Listing 1. repository
+
+
Page<DishEntity> findAll(Pageable pageable);
+
+
+
+

Then you can create a Pageable object and pass it to the method call as follows:

+
+
+
+
int page = criteria.getPageNumber();
+int size = criteria.getPageSize();
+Pageable pageable = PageRequest.of(page, size);
+Page<DishEntity> dishes = dishRepository.findAll(pageable);
+
+
+
+
Paging with QueryDSL
+
+

Pagination is also supported for dynamic queries with QueryDSL:

+
+
+
+
  public Page<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    // conditions
+
+    int page = criteria.getPageNumber();
+    int size = criteria.getPageSize();
+    Pageable pageable = PageRequest.of(page, size);
+    query.offset(pageable.getOffset());
+    query.limit(pageable.getPageSize());
+
+    List<DishEntity> dishes = query.fetch();
+    return new PageImpl<>(dishes, pageable, dishes.size());
+  }
+
+
+
+
+
Pagination example
+
+

For the table entity we can make a search request by accessing the REST endpoint with pagination support like in the following examples:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "total":true
+  }
+}
+
+//Response
+{
+    "pagination": {
+        "size": 2,
+        "page": 1,
+        "total": 11
+    },
+    "result": [
+        {
+            "id": 101,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 1,
+            "state": "OCCUPIED"
+        },
+        {
+            "id": 102,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 2,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+ + + + + +
+ + +As we are requesting with the total property set to true the server responds with the total count of rows for the query. +
+
+
+

For retrieving a concrete page, we provide the page attribute with the desired value. Here we also left out the total property so the server doesn’t incur on the effort to calculate it:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "page":2
+  }
+}
+
+//Response
+
+{
+    "pagination": {
+        "size": 2,
+        "page": 2,
+        "total": null
+    },
+    "result": [
+        {
+            "id": 103,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 3,
+            "state": "FREE"
+        },
+        {
+            "id": 104,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 4,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+
+
Pagingation in devon4j-spring
+
+

For spring applications, devon4j also offers its own solution for pagination. You can find an example of this here.

+
+
+
+
+
Query Meta-Parameters
+
+

Queries can have meta-parameters and that are provided via SearchCriteriaTo. Besides paging (see above) we also get timeout support.

+
+
+
+
Advanced Queries
+
+

Writing queries can sometimes get rather complex. The current examples given above only showed very simple basics. Within this topic a lot of advanced features need to be considered like:

+
+
+ +
+
+

This list is just containing the most important aspects. As we can not cover all these topics here, they are linked to external documentation that can help and guide you.

+
+ +
+
+
Spring Data
+
+

Spring Data JPA is supported by both Spring and Quarkus. However, in Quarkus this approach still has some limitations. For detailed information, see the official Quarkus Spring Data guide.

+
+
+
+
Motivation
+
+

The benefits of Spring Data are (for examples and explanations see next sections):

+
+
+
    +
  • +

    All you need is one single repository interface for each entity. No need for a separate implementation or other code artifacts like XML descriptors, NamedQueries class, etc.

    +
  • +
  • +

    You have all information together in one place (the repository interface) that actually belong together (where as in the classic approach you have the static queries in an XML file, constants to them in NamedQueries class and referencing usages in DAO implementation classes).

    +
  • +
  • +

    Static queries are most simple to realize as you do not need to write any method body. This means you can develop faster.

    +
  • +
  • +

    Support for paging is already build-in. Again for static query method the is nothing you have to do except using the paging objects in the signature.

    +
  • +
  • +

    Still you have the freedom to write custom implementations via default methods within the repository interface (e.g. for dynamic queries).

    +
  • +
+
+
+
+
Dependency
+
+

In case you want to switch to or add Spring Data support to your Spring or Quarkus application, all you need is to add the respective maven dependency:

+
+
+
Listing 2. spring
+
+
<dependency>
+  <groupId>org.springframework.boot</groupId>
+  <artifactId>spring-boot-starter-data-jpa</artifactId>
+</dependency>
+
+
+
+
Listing 3. quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
Repository
+
+

For each entity «Entity»Entity an interface is created with the name «Entity»Repository extending JpaRepository. +Such repository is the analogy to a Data-Access-Object (DAO) used in the classic approach or when Spring Data is not an option.

+
+
+
Listing 4. Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long> {
+
+}
+
+
+
+

The Spring Data repository provides some basic implementations for accessing data, e.g. returning all instances of a type (findAll) or returning an instance by its ID (findById).

+
+
+
+
Custom method implementation
+
+

In addition, repositories can be enriched with additional functionality, e.g. to add QueryDSL functionality or to override the default implementations, by using so called repository fragments:

+
+
+
Example
+
+

The following example shows how to write such a repository:

+
+
+
Listing 5. Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, ProductFragment {
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  List<ProductEntity> findByTitle(@Param("title") String title);
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  Page<ProductEntity> findByTitlePaginated(@Param("title") String title, Pageable pageable);
+}
+
+
+
+
Listing 6. Repository fragment
+
+
public interface ProductFragment {
+  Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria);
+}
+
+
+
+
Listing 7. Fragment implementation
+
+
public class ProductFragmentImpl implements ProductFragment {
+  @Inject
+  EntityManager entityManager;
+
+  public Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria) {
+    QProductEntity product = QProductEntity.productEntity;
+    JPAQuery<ProductEntity> query = new JPAQuery<ProductEntity>(this.entityManager);
+    query.from(product);
+
+    String title = criteria.getTitle();
+    if ((title != null) && !title.isEmpty()) {
+      query.where(product.title.eq(title));
+    }
+
+    List<ProductEntity> products = query.fetch();
+    return new PageImpl<>(products, PageRequest.of(criteria.getPageNumber(), criteria.getPageSize()), products.size());
+  }
+}
+
+
+
+

This ProductRepository has the following features:

+
+
+
    +
  • +

    CRUD support from Spring Data (see JavaDoc for details).

    +
  • +
  • +

    Support for QueryDSL integration, paging and more.

    +
  • +
  • +

    A static query method findByTitle to find all ProductEntity instances from DB that have the given title. Please note the @Param annotation that links the method parameter with the variable inside the query (:title).

    +
  • +
  • +

    The same with pagination support via findByTitlePaginated method.

    +
  • +
  • +

    A dynamic query method findByCriteria showing the QueryDSL and paging integration into Spring via a fragment implementation.

    +
  • +
+
+
+

You can find an implementation of this ProductRepository in our Quarkus reference application.

+
+
+ + + + + +
+ + +In Quarkus, native and named queries via the @Query annotation are currently not supported +
+
+
+
+
Integration of Spring Data in devon4j-spring
+
+

For Spring applications, devon4j offers a proprietary solution that integrates seamlessly with QueryDSL and uses default methods instead of the fragment approach. A separate guide for this can be found here.

+
+
+
+
+
Drawbacks
+
+

Spring Data also has some drawbacks:

+
+
+
    +
  • +

    Some kind of magic behind the scenes that are not so easy to understand. So in case you want to extend all your repositories without providing the implementation via a default method in a parent repository interface you need to deep-dive into Spring Data. We assume that you do not need that and hope what Spring Data and devon already provides out-of-the-box is already sufficient.

    +
  • +
  • +

    The Spring Data magic also includes guessing the query from the method name. This is not easy to understand and especially to debug. Our suggestion is not to use this feature at all and either provide a @Query annotation or an implementation via default method.

    +
  • +
+
+
+
+
Limitations in Quarkus
+
+
    +
  • +

    Native and named queries are not supported using @Query annotation. You will receive something like: Build step io.quarkus.spring.data.deployment.SpringDataJPAProcessor#build threw an exception: java.lang.IllegalArgumentException: Attribute nativeQuery of @Query is currently not supported

    +
  • +
  • +

    Customizing the base repository for all repository interfaces in the code base, which is done in Spring Data by registering a class the extends SimpleJpaRepository

    +
  • +
+
+ +
+
+
Data Access Object
+
+

The Data Access Objects (DAOs) are part of the persistence layer. +They are responsible for a specific entity and should be named «Entity»Dao and «Entity»DaoImpl. +The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. +Additionally a DAO may offer advanced operations such as query or locking methods.

+
+
+
+
DAO Interface
+
+

For each DAO there is an interface named «Entity»Dao that defines the API. For CRUD support and common naming we derive it from the ApplicationDao interface that comes with the devon application template:

+
+
+
+
public interface MyEntityDao extends ApplicationDao<MyEntity> {
+  List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria);
+}
+
+
+
+

All CRUD operations are inherited from ApplicationDao so you only have to declare the additional methods.

+
+
+
+
DAO Implementation
+
+

Implementing a DAO is quite simple. We create a class named «Entity»DaoImpl that extends ApplicationDaoImpl and implements your «Entity»Dao interface:

+
+
+
+
public class MyEntityDaoImpl extends ApplicationDaoImpl<MyEntity> implements MyEntityDao {
+
+  public List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria) {
+    TypedQuery<MyEntity> query = createQuery(criteria, getEntityManager());
+    return query.getResultList();
+  }
+  ...
+}
+
+
+
+

Again you only need to implement the additional non-CRUD methods that you have declared in your «Entity»Dao interface. +In the DAO implementation you can use the method getEntityManager() to access the EntityManager from the JPA. You will need the EntityManager to create and execute queries.

+
+
+
Static queries for DAO Implementation
+
+

All static queries are declared in the file src\main\resources\META-INF\orm.xml:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+  <named-query name="find.dish.with.max.price">
+    <query><![SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice]]></query>
+  </named-query>
+  ...
+</hibernate-mapping>
+
+
+
+

When your application is started, all these static queries will be created as prepared statements. This allows better performance and also ensures that you get errors for invalid JPQL queries when you start your app rather than later when the query is used.

+
+
+

To avoid redundant occurrences of the query name (get.open.order.positions.for.order) we define a constant for each named query:

+
+
+
+
public class NamedQueries {
+  public static final String FIND_DISH_WITH_MAX_PRICE = "find.dish.with.max.price";
+}
+
+
+
+

Note that changing the name of the java constant (FIND_DISH_WITH_MAX_PRICE) can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+

The following listing shows how to use this query:

+
+
+
+
public List<DishEntity> findDishByMaxPrice(BigDecimal maxPrice) {
+  Query query = getEntityManager().createNamedQuery(NamedQueries.FIND_DISH_WITH_MAX_PRICE);
+  query.setParameter("maxPrice", maxPrice);
+  return query.getResultList();
+}
+
+
+
+

Via EntityManager.createNamedQuery(String) we create an instance of Query for our predefined static query. +Next we use setParameter(String, Object) to provide a parameter (maxPrice) to the query. This has to be done for all parameters of the query.

+
+
+

Note that using the createQuery(String) method, which takes the entire query as string (that may already contain the parameter) is not allowed to avoid SQL injection vulnerabilities. +When the method getResultList() is invoked, the query is executed and the result is delivered as List. As an alternative, there is a method called getSingleResult(), which returns the entity if the query returned exactly one and throws an exception otherwise.

+
+ +
+
+
+
JPA Performance
+
+

When using JPA the developer sometimes does not see or understand where and when statements to the database are triggered.

+
+
+
+
+

Establishing expectations Developers shouldn’t expect to sprinkle magic pixie dust on POJOs in hopes they will become persistent.

+
+
+
+— Dan Allen
+https://epdf.tips/seam-in-action.html +
+
+
+

So in case you do not understand what is going on under the hood of JPA, you will easily run into performance issues due to lazy loading and other effects.

+
+
+
+
N plus 1 Problem
+
+

The most prominent phenomena is call the N+1 Problem. +We use entities from our MTS demo app as an example to explain the problem. +There is a DishEntity that has a @ManyToMany relation to +IngredientEntity. +Now we assume that we want to iterate all ingredients for a dish like this:

+
+
+
+
DishEntity dish = dao.findDishById(dishId);
+BigDecimal priceWithAllExtras = dish.getPrice();
+for (IngredientEntity ingredient : dish.getExtras()) {
+  priceWithAllExtras = priceWithAllExtras.add(ingredient.getPrice());
+}
+
+
+
+

Now dish.getExtras() is loaded lazy. Therefore the JPA vendor will provide a list with lazy initialized instances of IngredientEntity that only contain the ID of that entity. Now with every call of ingredient.getPrice() we technically trigger an SQL query statement to load the specific IngredientEntity by its ID from the database. +Now findDishById caused 1 initial query statement and for any number N of ingredients we are causing an additional query statement. This makes a total of N+1 statements. As causing statements to the database is an expensive operation with a lot of overhead (creating connection, etc.) this ends in bad performance and is therefore a problem (the N+1 Problem).

+
+
+
+
Solving N plus 1 Problem
+
+

To solve the N+1 Problem you need to change your code to only trigger a single statement instead. This can be archived in various ways. The most universal solution is to use FETCH JOIN in order to pre-load the nested N child entities into the first level cache of the JPA vendor implementation. This will behave very similar as if the @ManyToMany relation to IngredientEntity was having FetchType.EAGER but only for the specific query and not in general. Because changing @ManyToMany to FetchType.EAGER would cause bad performance for other usecases where only the dish but not its extra ingredients are needed. For this reason all relations, including @OneToOne should always be FetchType.LAZY. Back to our example we simply replace dao.findDishById(dishId) with dao.findDishWithExtrasById(dishId) that we implement by the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish
+  LEFT JOIN FETCH dish.extras
+  WHERE dish.id = :dishId
+
+
+
+

The rest of the code does not have to be changed but now dish.getExtras() will get the IngredientEntity from the first level cache where is was fetched by the initial query above.

+
+
+

Please note that if you only need the sum of the prices from the extras you can also create a query using an aggregator function:

+
+
+
+
SELECT sum(dish.extras.price) FROM DishEntity dish
+
+
+
+

As you can see you need to understand the concepts in order to get good performance.

+
+
+

There are many advanced topics such as creating database indexes or calculating statistics for the query optimizer to get the best performance. For such advanced topics we recommend to have a database expert in your team that cares about such things. However, understanding the N+1 Problem and its solutions is something that every Java developer in the team needs to understand.

+
+ +
+
+
IdRef
+
+

IdRef can be used to reference other entities in TOs in order to make them type-safe and semantically more expressive. +It is an optional concept in devon4j for more complex applications that make intensive use of relations and foreign keys.

+
+
+
+
Motivation
+
+

Assuming you have a method signature like the following:

+
+
+
+
Long approve(Long cId, Long cuId);
+
+
+
+

So what are the paremeters? What is returned?

+
+
+

IdRef is just a wrapper for a Long used as foreign key. This makes our signature much more expressive and self-explanatory:

+
+
+
+
IdRef<Contract> approve(IdRef<Contract> cId, IdRef<Customer> cuId);
+
+
+
+

Now we can easily see, that the result and the parameters are foreign-keys and which entity they are referring to via their generic type. +We can read the javadoc of these entities from the generic type and understand the context. +Finally, when passing IdRef objects to such methods, we get compile errors in case we accidentally place parameters in the wrong order.

+
+
+
+
IdRef and Mapping
+
+

In order to easily map relations from entities to transfer-objects and back, we can easily also put according getters and setters into our entities:

+
+
+
+
public class ContractEntity extends ApplicationPersistenceEntity implements Contract {
+
+  private CustomerEntity customer;
+
+  ...
+
+  @ManyToOne(fetch = FetchType.LAZY)
+  @JoinColumn(name = "CUSTOMER_ID")
+  public CustomerEntity getCustomer() {
+    return this.customer;
+  }
+
+  public void setCustomer(CustomerEntity customer) {
+    this.customer = customer;
+  }
+
+  @Transient
+  public IdRef<Customer> getCustomerId() {
+    return IdRef.of(this.customer);
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customer = JpaHelper.asEntity(customerId, CustomerEntity.class);
+  }
+}
+
+
+
+

Now, ensure that you have the same getters and setters for customerId in your Eto:

+
+
+
+
public class ContractEto extends AbstractEto implements Contract {
+
+  private IdRef<Customer> customerId;
+
+  ...
+
+  public IdRef<Customer> getCustomerId() {
+    return this.customerId;
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customerId = customerId;
+  }
+}
+
+
+
+

This way the bean-mapper can automatically map from your entity (ContractEntity) to your Eto (ContractEto) and vice-versa.

+
+
+
+
JpaHelper and EntityManager access
+
+

In the above example we used JpaHelper.asEntity to convert the foreign key (IdRef<Customer>) to the according entity (CustomerEntity). +This will internally use EntityManager.getReference to properly create a JPA entity. +The alternative "solution" that may be used with Long instead of IdRef is typically:

+
+
+
+
  public void setCustomerId(IdRef<Customer> customerId) {
+    Long id = null;
+    if (customerId != null) {
+      id = customerId.getId();
+    }
+    if (id == null) {
+      this.customer = null;
+    } else {
+      this.customer = new CustomerEntity();
+      this.customer.setId(id);
+    }
+  }
+
+
+
+

While this "solution" works is most cases, we discovered some more complex cases, where it fails with very strange hibernate exceptions. +When cleanly creating the entity via EntityManager.getReference instead it is working in all cases. +So how can JpaHelper.asEntity as a static method access the EntityManager? +Therefore we need to initialize this as otherwise you may see this exception:

+
+
+
+
java.lang.IllegalStateException: EntityManager has not yet been initialized!
+	at com.devonfw.module.jpa.dataaccess.api.JpaEntityManagerAccess.getEntityManager(JpaEntityManagerAccess.java:38)
+	at com.devonfw.module.jpa.dataaccess.api.JpaHelper.asEntity(JpaHelper.java:49)
+
+
+
+

For main usage in your application we assume that there is only one instance of EntityManager. +Therefore we can initialize this instance during the spring boot setup. +This is what we provide for you in JpaInitializer for you +when creating a devon4j app.

+
+
+
JpaHelper and spring-test
+
+

Further, you also want your code to work in integration tests. +Spring-test provides a lot of magic under the hood to make integration testing easy for you. +To boost the performance when running multiple tests, spring is smart and avoids creating the same spring-context multiple times. +Therefore it stores these contexts so that if a test-case is executed with a specific spring-configuration that has already been setup before, +the same spring-context can be reused instead of creating it again. +However, your tests may have multiple spring configurations leading to multiple spring-contexts. +Even worse these tests can run in any order leading to switching between spring-contexts forth and back. +Therefore, a static initializer during the spring boot setup can lead to strange errors as you can get the wrong EntityManager instance. +In order to fix such problems, we provide a solution pattern via DbTest ensuring for every test, +that the proper instance of EntityManager is initialized. +Therefore you should derive directly or indirectly (e.g. via ComponentDbTest and SubsystemDbTest) from DbTesT or adopt your own way to apply this pattern to your tests, when using JpaHelper. +This already happens if you are extending ApplicationComponentTest or ApplicationSubsystemTest.

+
+
+ +
+
+
+
Transaction Handling
+
+

For transaction handling we AOP to add transaction control via annotations as aspect. +This is done by annotating your code with the @Transactional annotation. +You can either annotate your container bean at class level to make all methods transactional or your can annotate individual methods to make them transactional:

+
+
+
+
  @Transactional
+  public Output getData(Input input) {
+    ...
+  }
+
+
+
+
+
JTA Imports
+
+

Here are the import statements for transaction support:

+
+
+
+
import javax.transaction.Transactional;
+
+
+
+ + + + + +
+ + +Use the above import statement to follow JEE and avoid using org.springframework.transaction.annotation.Transactional. +
+
+
+
+
JTA Dependencies
+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>jakarta.transaction</groupId>
+  <artifactId>jakarta.transaction-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>javax.transaction</groupId>
+  <artifactId>javax.transaction-api</artifactId>
+</dependency>
+
+
+
+
+
Handling constraint violations
+
+

Using @Transactional magically wraps transaction handling around your code. +As constraints are checked by the database at the end when the transaction gets committed, a constraint violation will be thrown by this aspect outside your code. +In case you have to handle constraint violations manually, you have to do that in code outside the logic that is annotated with @Transactional. +This may be done in a service operation by catching a ConstraintViolationException (org.hibernate.exception.ConstraintViolationException for hibernate). +As a generic approach you can solve this via REST execption handling.

+
+
+
+
Batches
+
+

Transaction control for batches is a lot more complicated and is described in the batch layer.

+
+
+ +
+
+
+

1.36. SQL

+
+

For general guides on dealing or avoiding SQL, preventing SQL-injection, etc. you should study domain layer.

+
+
+
+

1.37. Naming Conventions

+
+

Here we define naming conventions that you should follow whenever you write SQL files:

+
+
+
    +
  • +

    All SQL-Keywords in UPPER CASE

    +
  • +
  • +

    Indentation should be 2 spaces as suggested by devonfw for every format.

    +
  • +
+
+
+
DDL
+
+

The naming conventions for database constructs (tables, columns, triggers, constraints, etc.) should be aligned with your database product and their operators. +However, when you have the freedom of choice and a modern case-sensitive database, you can simply use your code conventions also for database constructs to avoid explicitly mapping each and every property (e.g. RestaurantTable vs. RESTAURANT_TABLE).

+
+
+
    +
  • +

    Define columns and constraints inline in the statement to create the table

    +
  • +
  • +

    Indent column types so they all start in the same text column

    +
  • +
  • +

    Constraints should be named explicitly (to get a reasonable hint error messages) with:

    +
    +
      +
    • +

      PK_«table» for primary key (name optional here as PK constraint are fundamental)

      +
    • +
    • +

      FK_«table»_«property» for foreign keys («table» and «property» are both on the source where the foreign key is defined)

      +
    • +
    • +

      UC_«table»_«property»[_«propertyN»]* for unique constraints

      +
    • +
    • +

      CK_«table»_«check» for check constraints («check» describes the check, if it is defined on a single property it should start with the property).

      +
    • +
    +
    +
  • +
  • +

    Old RDBMS had hard limitations for names (e.g. 30 characters). Please note that recent databases have overcome this very low length limitations. However, keep your names short but precise and try to define common abbreviations in your project for according (business) terms. Especially do not just truncate the names at the limit.

    +
  • +
  • +

    If possible add comments on table and columns to help DBAs understanding your schema. This is also honored by many tools (not only DBA-tools).

    +
  • +
+
+
+

Here is a brief example of a DDL:

+
+
+
+
CREATE SEQUENCE HIBERNATE_SEQUENCE START WITH 1000000;
+
+-- *** Table ***
+CREATE TABLE RESTAURANT_TABLE (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  SEATS                INTEGER NOT NULL,
+  CONSTRAINT PK_TABLE PRIMARY KEY(ID)
+);
+COMMENT ON TABLE RESTAURANT_TABLE IS 'The physical tables inside the restaurant.';
+-- *** Order ***
+CREATE TABLE RESTAURANT_ORDER (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  TABLE_ID             NUMBER(19) NOT NULL,
+  TOTAL                DECIMAL(5, 2) NOT NULL,
+  CREATION_DATE        TIMESTAMP NOT NULL,
+  PAYMENT_DATE         TIMESTAMP,
+  STATUS               VARCHAR2(10 CHAR) NOT NULL,
+  CONSTRAINT PK_ORDER PRIMARY KEY(ID),
+  CONSTRAINT FK_ORDER_TABLE_ID FOREIGN KEY(TABLE_ID) REFERENCES RESTAURANT_TABLE(ID)
+);
+COMMENT ON TABLE RESTAURANT_ORDER IS 'An order and bill at the restaurant.';
+...
+
+
+
+

ATTENTION: Please note that TABLE and ORDER are reserved keywords in SQL and you should avoid using such keywords to prevent problems.

+
+
+
+
Data
+
+

For insert, update, delete, etc. of data SQL scripts should additionally follow these guidelines:

+
+
+
    +
  • +

    Inserts always with the same order of columns in blocks for each table.

    +
  • +
  • +

    Insert column values always starting with ID, MODIFICATION_COUNTER, [DTYPE, ] …​

    +
  • +
  • +

    List columns with fixed length values (boolean, number, enums, etc.) before columns with free text to support alignment of multiple insert statements

    +
  • +
  • +

    Pro Tip: Get familiar with column mode of advanced editors such as notepad++ when editing large blocks of similar insert statements.

    +
  • +
+
+
+
+
INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (0, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (1, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (2, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (3, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (4, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (5, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (6, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (7, 1, 8);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (8, 1, 8);
+...
+
+
+
+

See also Database Migrations.

+
+
+ +
+
+
+

1.38. Database Migration

+
+

When you have a schema-based database, +you need a solution for schema versioning and migration for your database. +A specific release of your app requires a corresponding version of the schema in the database to run. +As you want simple and continuous deployment you should automate the schema versiong and database migration.

+
+
+

The general idea is that your software product contains "scripts" to migrate the database from schema version X to verion X+1. +When you begin your project you start with version 1 and with every increment of your app that needs a change to the database schema (e.g. a new table, a new column to an existing table, a new index, etc.) you add another "script" that migrates from the current to the next version. +For simplicity these versions are just sequential numbers or timestamps. +Now, the solution you choose will automatically manage the schema version in a separate metadata table in your database that stores the current schema version. +When your app is started, it will check the current version inside the database from that metadata table. +As long as there are "scripts" that migrate from there to a higher version, they will be automatically applied to the database and this process is protocolled to the metadata table in your database what also updates the current schema version there. +Using this approach, you can start with an empty database what will result in all "scripts" being applied sequentially. +Also any version of your database schema can be present and you will always end up in a controlled migration to the latest schema version.

+
+
+
+

1.39. Options for database migration

+
+

For database migration you can choose between the following options:

+
+
+
    +
  • +

    flyway (KISS based approach with migrations as SQL)

    +
  • +
  • +

    liquibase (more complex approach with database abstraction)

    +
  • +
+
+ +
+
Flyway
+
+

Flyway is a tool for database migration and schema versioning. +See why for a motivation why using flyway.

+
+
+

Flyway can be used standalone e.g. via flyway-maven-plugin or can be integrated directly into your app to make sure the database migration takes place on startup. +For simplicity we recommend to integrate flyway into your app. +However, you need to be aware that therefore your app needs database access with full schema owner permissions.

+
+
+
+
Organizational Advice
+
+

A few considerations with respect to project organization will help to implement maintainable Flyway migrations.

+
+
+

At first, testing and production environments must be clearly and consistently distinguished. Use the following directory structure to achieve this distinction:

+
+
+
+
  src/main/resources/db
+  src/test/resources/db
+
+
+
+

Although this structure introduces redundancies, the benefit outweighs this disadvantage. +An even more fine-grained production directory structure which contains one sub folder per release should be implemented:

+
+
+
+
  src/main/resources/db/migration/releases/X.Y/x.sql
+
+
+
+

Emphasizing that migration scripts below the current version must never be changed will aid the second advantage of migrations: it will always be clearly reproducible in which state the database currently is. +Here, it is important to mention that, if test data is required, it must be managed separately from the migration data in the following directory:

+
+
+
+
  src/test/resources/db/migration/
+
+
+
+

The migration directory is added to aid easy usage of Flyway defaults. +Of course, test data should also be managed per release as like production data.

+
+
+

With regard to content, separation of concerns (SoC) is an important goal. SoC can be achieved by distinguishing and writing multiple scripts with respect to business components/use cases (or database tables in case of large volumes of master data [1]. Comprehensible file names aid this separation.

+
+
+

It is important to have clear responsibilities regarding the database, the persistence layer (JPA), and migrations. Therefore a dedicated database expert should be in charge of any migrations performed or she should at least be informed before any change to any of the mentioned parts is applied.

+
+
+
+
Technical Configuration
+
+

Database migrations can be SQL based or Java based.

+
+
+

To enable auto migration on startup (not recommended for productive environment) set the following property in the application.properties file for an environment.

+
+
+
+
flyway.enabled=true
+flyway.clean-on-validation-error=false
+
+
+
+

For development environment it is helpful to set both properties to true in order to simplify development. For regular environments flyway.clean-on-validation-error should be false.

+
+
+

If you want to use Flyway set the following property in any case to prevent Hibernate from doing changes on the database (pre-configured by default in devonfw):

+
+
+
+
spring.jpa.hibernate.ddl-auto=validate
+
+
+
+

The setting must be communicated to and coordinated with the customer and their needs. +In acceptance testing the same configuration as for the production environment should be enabled.

+
+
+

Since migration scripts will also be versioned the end-of-line (EOL) style must be fixated according to this issue. This is however solved in flyway 4.0+ and the latest devonfw release. +Also, the version numbers of migration scripts should not consist of simple ascending integer numbers like V0001…​, V0002…​, …​ This naming may lead to problems when merging branches. Instead the usage of timestamps as version numbers will help to avoid such problems.

+
+
+
+
Naming Conventions
+
+

Database migrations should follow this naming convention: +V<version>__<description> (e.g.: V12345__Add_new_table.sql).

+
+
+

It is also possible to use Flyway for test data. To do so place your test data migrations in src/main/resources/db/testdata/ and set property

+
+
+
+
flyway.locations=classpath:db/migration/releases,classpath:db/migration/testdata
+
+
+
+

Then Flyway scans the additional location for migrations and applies all in the order specified by their version. If migrations V0001__... and V0002__... exist and a test data migration should be applied in between you can name it V0001_1__....

+
+ +
+
+
Liquibase
+ +
+

See devon4j#303 for details and status.

+
+
+
+
Spring-boot usage
+
+

For using liquibase in spring see Using Liquibase with Spring Boot.

+
+
+
+
Quarkus usage
+
+

For uisng liquibase in quarkus see Using Liquibase.

+
+
+ +
+
+
+

1.40. REST

+
+

REST (REpresentational State Transfer) is an inter-operable protocol for services that is more lightweight than SOAP. +However, it is no real standard and can cause confusion (see REST philosophy). +Therefore we define best practices here to guide you.

+
+
+
+

1.41. URLs

+
+

URLs are not case sensitive. Hence, we follow the best practice to use only lower-case-letters-with-hyphen-to-separate-words. +For operations in REST we distinguish the following types of URLs:

+
+
+
    +
  • +

    A collection URL is build from the rest service URL by appending the name of a collection. This is typically the name of an entity. Such URL identifies the entire collection of all elements of this type. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity

    +
  • +
  • +

    An element URL is build from a collection URL by appending an element ID. It identifies a single element (entity) within the collection. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42

    +
  • +
+
+
+

To follow KISS avoid using plural forms (…​/productmanagement/v1/products vs. …​/productmanagement/v1/product/42). Always use singular forms and avoid confusions (except for the rare cases where no singular exists).

+
+
+

The REST URL scheme fits perfect for CRUD operations. +For business operations (processing, calculation, advanced search, etc.) we simply append a collection URL with the name of the business operation. +Then we can POST the input for the business operation and get the result back. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/search

+
+
+
+

1.42. HTTP Methods

+
+

The following table defines the HTTP methods (verbs) and their meaning:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3. Usage of HTTP methods
HTTP MethodMeaning

GET

Read data (stateless).

PUT

Create or update data.

POST

Process data.

DELETE

Delete an entity.

+
+

Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

+
+
+

For general recommendations on HTTP methods for collection and element URLs see REST@wikipedia.

+
+
+
+

1.43. HTTP Status Codes

+
+

Further we define how to use the HTTP status codes for REST services properly. In general the 4xx codes correspond to an error on the client side and the 5xx codes to an error on the server side.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4. Usage of HTTP status codes
HTTP CodeMeaningResponseComment

200

OK

requested result

Result of successful GET

204

No Content

none

Result of successful POST, DELETE, or PUT with empty result (void return)

400

Bad Request

error details

The HTTP request is invalid (parse error, validation failed)

401

Unauthorized

none

Authentication failed

403

Forbidden

none

Authorization failed

404

Not found

none

Either the service URL is wrong or the requested resource does not exist

500

Server Error

error code, UUID

Internal server error occurred, in case of an exception, see REST exception handling

+
+
+

1.44. JAX-RS

+
+

For implementing REST services we use the JAX-RS standard. +As payload encoding we recommend JSON bindings using Jackson. +To implement a REST service you simply add JAX-RS annotations. +Here is a simple example:

+
+
+
+
@ApplicationScoped
+@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class ImagemanagementRestService {
+
+  @Inject
+  private Imagemanagement imagemanagement;
+
+  @GET
+  @Path("/image/{id}/")
+  public ImageDto getImage(@PathParam("id") long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+}
+
+
+
+

Here we can see a REST service for the business component imagemanagement. The method getImage can be accessed via HTTP GET (see @GET) under the URL path imagemanagement/image/{id} (see @Path annotations) where {id} is the ID of the requested table and will be extracted from the URL and provided as parameter id to the method getImage. It will return its result (ImageDto) as JSON (see @Produces annotation - you can also extend RestService marker interface that defines these annotations for JSON). As you can see it delegates to the logic component imagemanagement that contains the actual business logic while the service itself only exposes this logic via HTTP. The REST service implementation is a regular CDI bean that can use dependency injection.

+
+
+ + + + + +
+ + +With JAX-RS it is important to make sure that each service method is annotated with the proper HTTP method (@GET,@POST,etc.) to avoid unnecessary debugging. So you should take care not to forget to specify one of these annotations. +
+
+
+
Service-Interface
+
+

You may also separate API and implementation in case you want to reuse the API for service-client:

+
+
+
+
@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface ImagemanagementRestService {
+
+  @GET
+  @Path("/image/{id}/")
+  ImageEto getImage(@PathParam("id") long id);
+
+}
+
+@Named("ImagemanagementRestService")
+public class ImagemanagementRestServiceImpl implements ImagemanagementRestService {
+
+  @Override
+  public ImageEto getImage(long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+
+}
+
+
+
+
+
JAX-RS Configuration
+
+

Starting from CXF 3.0.0 it is possible to enable the auto-discovery of JAX-RS roots.

+
+
+

When the jaxrs server is instantiated all the scanned root and provider beans (beans annotated with javax.ws.rs.Path and javax.ws.rs.ext.Provider) are configured.

+
+
+
+
REST Exception Handling
+
+

For exceptions a service needs to have an exception façade that catches all exceptions and handles them by writing proper log messages and mapping them to a HTTP response with an according HTTP status code. Therefore the devonfw provides a generic solution via RestServiceExceptionFacade. You need to follow the exception guide so that it works out of the box because the façade needs to be able to distinguish between business and technical exceptions. +Now your service may throw exceptions but the façade with automatically handle them for you.

+
+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+
+
Pagination details
+
+

We recommend to use spring-data repositories for database access that already comes with pagination support. +Therefore, when performing a search, you can include a Pageable object. +Here is a JSON example for it:

+
+
+
+
{ "pageSize": 20, "pageNumber": 0, "sort": [] }
+
+
+
+

By increasing the pageNumber the client can browse and page through the hits.

+
+
+

As a result you will receive a Page. +It is a container for your search results just like a Collection but additionally contains pagination information for the client. +Here is a JSON example:

+
+
+
+
{ "totalElements": 1022,
+  pageable: { "pageSize": 20, "pageNumber": 0 },
+  content: [ ... ] }
+
+
+
+

The totalElements property contains the total number of hits. +This can be used by the client to compute the total number of pages and render the pagination links accordingly. +Via the pageable property the client gets back the Pageable properties from the search request. +The actual hits for the current page are returned as array in the content property.

+
+
+
+
+

1.45. REST Testing

+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+

1.46. Security

+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+
CSRF
+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+
JSON top-level arrays
+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+ +
+
+
+

1.47. JSON

+
+

JSON (JavaScript Object Notation) is a popular format to represent and exchange data especially for modern web-clients. For mapping Java objects to JSON and vice-versa there is no official standard API. We use the established and powerful open-source solution Jackson. +Due to problems with the wiki of fasterxml you should try this alternative link: Jackson/AltLink.

+
+
+
+

1.48. Configure JSON Mapping

+
+

In order to avoid polluting business objects with proprietary Jackson annotations (e.g. @JsonTypeInfo, @JsonSubTypes, @JsonProperty) we propose to create a separate configuration class. Every devonfw application (sample or any app created from our app-template) therefore has a class called ApplicationObjectMapperFactory that extends ObjectMapperFactory from the devon4j-rest module. It looks like this:

+
+
+
+
@Named("ApplicationObjectMapperFactory")
+public class ApplicationObjectMapperFactory extends ObjectMapperFactory {
+
+  public RestaurantObjectMapperFactory() {
+    super();
+    // JSON configuration code goes here
+  }
+}
+
+
+
+
+

1.49. JSON and Inheritance

+
+

If you are using inheritance for your objects mapped to JSON then polymorphism can not be supported out-of-the box. So in general avoid polymorphic objects in JSON mapping. However, this is not always possible. +Have a look at the following example from our sample application:

+
+
+
+inheritance class diagram +
+
Figure 2. Transfer-Objects using Inheritance
+
+
+

Now assume you have a REST service operation as Java method that takes a ProductEto as argument. As this is an abstract class the server needs to know the actual sub-class to instantiate. +We typically do not want to specify the classname in the JSON as this should be an implementation detail and not part of the public JSON format (e.g. in case of a service interface). Therefore we use a symbolic name for each polymorphic subtype that is provided as virtual attribute @type within the JSON data of the object:

+
+
+
+
{ "@type": "Drink", ... }
+
+
+
+

Therefore you add configuration code to the constructor of ApplicationObjectMapperFactory. Here you can see an example from the sample application:

+
+
+
+
setBaseClasses(ProductEto.class);
+addSubtypes(new NamedType(MealEto.class, "Meal"), new NamedType(DrinkEto.class, "Drink"),
+  new NamedType(SideDishEto.class, "SideDish"));
+
+
+
+

We use setBaseClasses to register all top-level classes of polymorphic objects. Further we declare all concrete polymorphic sub-classes together with their symbolic name for the JSON format via addSubtypes.

+
+
+
+

1.50. Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create objects dedicated for the JSON mapping you can easily avoid such situations. When this is not suitable follow these instructions to define the mapping:

+
+
+
    +
  1. +

    As an example, the use of JSR354 (javax.money) is appreciated in order to process monetary amounts properly. However, without custom mapping, the default mapping of Jackson will produce the following JSON for a MonetaryAmount:

    +
    +
    +
    "currency": {"defaultFractionDigits":2, "numericCode":978, "currencyCode":"EUR"},
    +"monetaryContext": {...},
    +"number":6.99,
    +"factory": {...}
    +
    +
    +
    +

    As clearly can be seen, the JSON contains too much information and reveals implementation secrets that do not belong here. Instead the JSON output expected and desired would be:

    +
    +
    +
    +
    "currency":"EUR","amount":"6.99"
    +
    +
    +
    +

    Even worse, when we send the JSON data to the server, Jackson will see that MonetaryAmount is an interface and does not know how to instantiate it so the request will fail. +Therefore we need a customized Serializer.

    +
    +
  2. +
  3. +

    We implement MonetaryAmountJsonSerializer to define how a MonetaryAmount is serialized to JSON:

    +
    +
    +
    public final class MonetaryAmountJsonSerializer extends JsonSerializer<MonetaryAmount> {
    +
    +  public static final String NUMBER = "amount";
    +  public static final String CURRENCY = "currency";
    +
    +  public void serialize(MonetaryAmount value, JsonGenerator jgen, SerializerProvider provider) throws ... {
    +    if (value != null) {
    +      jgen.writeStartObject();
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.CURRENCY);
    +      jgen.writeString(value.getCurrency().getCurrencyCode());
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.NUMBER);
    +      jgen.writeString(value.getNumber().toString());
    +      jgen.writeEndObject();
    +    }
    +  }
    +
    +
    +
    +

    For composite datatypes it is important to wrap the info as an object (writeStartObject() and writeEndObject()). MonetaryAmount provides the information we need by the getCurrency() and getNumber(). So that we can easily write them into the JSON data.

    +
    +
  4. +
  5. +

    Next, we implement MonetaryAmountJsonDeserializer to define how a MonetaryAmount is deserialized back as Java object from JSON:

    +
    +
    +
    public final class MonetaryAmountJsonDeserializer extends AbstractJsonDeserializer<MonetaryAmount> {
    +  protected MonetaryAmount deserializeNode(JsonNode node) {
    +    BigDecimal number = getRequiredValue(node, MonetaryAmountJsonSerializer.NUMBER, BigDecimal.class);
    +    String currencyCode = getRequiredValue(node, MonetaryAmountJsonSerializer.CURRENCY, String.class);
    +    MonetaryAmount monetaryAmount =
    +        MonetaryAmounts.getAmountFactory().setNumber(number).setCurrency(currencyCode).create();
    +    return monetaryAmount;
    +  }
    +}
    +
    +
    +
    +

    For composite datatypes we extend from AbstractJsonDeserializer as this makes our task easier. So we already get a JsonNode with the parsed payload of our datatype. Based on this API it is easy to retrieve individual fields from the payload without taking care of their order, etc. +AbstractJsonDeserializer also provides methods such as getRequiredValue to read required fields and get them converted to the desired basis datatype. So we can easily read the amount and currency and construct an instance of MonetaryAmount via the official factory API.

    +
    +
  6. +
  7. +

    Finally we need to register our custom (de)serializers with the following configuration code in the constructor of ApplicationObjectMapperFactory:+

    +
  8. +
+
+
+
+
  SimpleModule module = getExtensionModule();
+  module.addDeserializer(MonetaryAmount.class, new MonetaryAmountJsonDeserializer());
+  module.addSerializer(MonetaryAmount.class, new MonetaryAmountJsonSerializer());
+
+
+
+

Now we can read and write MonetaryAmount from and to JSON as expected.

+
+
+ +
+
+

1.51. XML

+
+

XML (Extensible Markup Language) is a W3C standard format for structured information. It has a large eco-system of additional standards and tools.

+
+
+

In Java there are many different APIs and frameworks for accessing, producing and processing XML. For the devonfw we recommend to use JAXB for mapping Java objects to XML and vice-versa. Further there is the popular DOM API for reading and writing smaller XML documents directly. When processing large XML documents StAX is the right choice.

+
+
+
+

1.52. JAXB

+
+

We use JAXB to serialize Java objects to XML or vice-versa.

+
+
+
JAXB and Inheritance
+
+

Use @XmlSeeAlso annotation to provide sub-classes. +See section "Collective Polymorphism" described here.

+
+
+
+
JAXB Custom Mapping
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create dedicated objects for the XML mapping you can easily avoid such situations. When this is not suitable use @XmlJavaTypeAdapter and provide an XmlAdapter implementation that handles the mapping. +For details see here.

+
+
+
+
+

1.53. Security

+
+

To prevent XML External Entity attacks, follow JAXP Security Guide and enable FSP.

+
+
+ +
+
+

1.54. SOAP

+
+

SOAP is a common protocol for services that is rather complex and heavy. It allows to build inter-operable and well specified services (see WSDL). SOAP is transport neutral what is not only an advantage. We strongly recommend to use HTTPS transport and ignore additional complex standards like WS-Security and use established HTTP-Standards such as RFC2617 (and RFC5280).

+
+
+
+

1.55. JAX-WS

+
+

For building web-services with Java we use the JAX-WS standard. +There are two approaches:

+
+
+
    +
  • +

    code first

    +
  • +
  • +

    contract first

    +
  • +
+
+
+

Here is an example in case you define a code-first service.

+
+
+
Web-Service Interface
+
+

We define a regular interface to define the API of the service and annotate it with JAX-WS annotations:

+
+
+
+
@WebService
+public interface TablemanagmentWebService {
+
+  @WebMethod
+  @WebResult(name = "message")
+  TableEto getTable(@WebParam(name = "id") String id);
+
+}
+
+
+
+
+
Web-Service Implementation
+
+

And here is a simple implementation of the service:

+
+
+
+
@Named
+@WebService(endpointInterface = "com.devonfw.application.mtsj.tablemanagement.service.api.ws.TablemanagmentWebService")
+public class TablemanagementWebServiceImpl implements TablemanagmentWebService {
+
+  private Tablemanagement tableManagement;
+
+  @Override
+  public TableEto getTable(String id) {
+
+    return this.tableManagement.findTable(id);
+  }
+
+
+
+
+
+

1.56. SOAP Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to write adapters for JAXB (see XML).

+
+
+
+

1.57. SOAP Testing

+
+

For testing SOAP services in general consult the testing guide.

+
+
+

For testing SOAP services manually we strongly recommend SoapUI.

+
+
+ +
+
+

1.58. Logging

+
+

We recommend to use SLF4J as API for logging, that has become a de facto standard in Java as it has a much better design than java.util.logging offered by the JDK. +There are serveral implementations for SLF4J. For Spring applications our recommended implementation is Logback. Quarkus uses JBoss Logging which provides a JBoss Log Manager implementation for SLF4J. For more information on logging in Quarkus, see the Quarkus logging guide.

+
+
+
+

1.59. Logging Dependencies

+
+

To use Logback in your Spring application, you need to include the following dependencies:

+
+
+
+
<!-- SLF4J as logging API -->
+<dependency>
+  <groupId>org.slf4j</groupId>
+  <artifactId>slf4j-api</artifactId>
+</dependency>
+<!-- Logback as logging implementation  -->
+<dependency>
+  <groupId>ch.qos.logback</groupId>
+  <artifactId>logback-classic</artifactId>
+</dependency>
+<!-- JSON logging for cloud-native log monitoring -->
+<dependency>
+  <groupId>net.logstash.logback</groupId>
+  <artifactId>logstash-logback-encoder</artifactId>
+</dependency>
+
+
+
+

In devon4j these dependencies are provided by the devon4j-logging module.

+
+
+

In Quarkus, SLF4J and the slf4j-jboss-logmanager are directly included in the Quarkus core runtime and can be used out of the box.

+
+
+
+

1.60. Logger Access

+
+

The general pattern for accessing loggers from your code is a static logger instance per class using the following pattern:

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+  private static final Logger LOG = LoggerFactory.getLogger(MyClass.class);
+  ...
+}
+
+
+
+

For detailed documentation how to use the logger API check the SLF4j manual.

+
+
+ + + + + +
+ + +In case you are using devonfw-ide and Eclipse you can just type LOG and hit [ctrl][space] to insert the code pattern including the imports into your class. +
+
+
+
Lombok
+
+

In case you are using Lombok, you can simply use the @Slf4j annotation in your class. This causes Lombok to generate the logger instance for you.

+
+
+
+
+

1.61. Log-Levels

+
+

We use a common understanding of the log-levels as illustrated by the following table. +This helps for better maintenance and operation of the systems.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5. Log-levels
Log-levelDescriptionImpactActive Environments

FATAL

Only used for fatal errors that prevent the application to work at all (e.g. startup fails or shutdown/restart required)

Operator has to react immediately

all

ERROR

An abnormal error indicating that the processing failed due to technical problems.

Operator should check for known issue and otherwise inform development

all

WARNING

A situation where something worked not as expected. E.g. a business exception or user validation failure occurred.

No direct reaction required. Used for problem analysis.

all

INFO

Important information such as context, duration, success/failure of request or process

No direct reaction required. Used for analysis.

all

DEBUG

Development information that provides additional context for debugging problems.

No direct reaction required. Used for analysis.

development and testing

TRACE

Like DEBUG but exhaustive information and for code that is run very frequently. Will typically cause large log-files.

No direct reaction required. Used for problem analysis.

none (turned off by default)

+
+

Exceptions (with their stack trace) should only be logged on FATAL or ERROR level. For business exceptions typically a WARNING including the message of the exception is sufficient.

+
+
+
Configuration of Logback
+
+

The configuration of logback happens via the logback.xml file that you should place into src/main/resources of your app. +For details consult the logback configuration manual.

+
+
+ + + + + +
+ + +Logback also allows to overrule the configuration with a logback-test.xml file that you may put into src/test/resources or into a test-dependency. +
+
+
+
+
Configuration in Quarkus
+
+

The are several options you can set in the application.properties file to configure the behaviour of the logger in Quarkus. For a detailed overview, see the corresponding part of the Quarkus guide.

+
+
+
+
+

1.62. JSON-logging

+
+

For easy integration with log-monitoring, we recommend that your app logs to standard out in JSON following JSON Lines.

+
+
+

In Spring applications, this can be achieved via logstash-logback-encoder (see dependencies). In Quarkus, it can be easily achieved using the quarkus-logging-json extension (see here for more details).

+
+
+

This will produce log-lines with the following format (example formatted for readability):

+
+
+
+
{
+  "timestamp":"2000-12-31T23:59:59.999+00:00",
+  "@version":"1",
+  "message":"Processing 4 order(s) for shipment",
+  "logger_name":"com.myapp.order.logic.UcManageOrder",
+  "thread_name":"http-nio-8081-exec-6",
+  "level":"INFO",
+  "level_value":20000,
+  "appname":"myapp",
+}
+
+
+
+
Adding custom values to JSON log with Logstash
+
+

The JSON encoder even supports logging custom properties for your log-monitoring. +The trick is to use the class net.logstash.logback.argument.StructuredArguments for adding the arguments to you log message, e.g.

+
+
+
+
import static net.logstash.logback.argument.StructuredArguments.v;
+
+...
+    LOG.info("Request with {} and {} took {} ms.", v("url", url), v("status", statusCode), v("duration", millis));
+...
+
+
+
+

This will produce the a JSON log-line with the following properties:

+
+
+
+
...
+  "message":"Request with url=https://api/service/v1/ordermanagement/order and status=200 took duration=251 ms",
+  "url":"https://api/service/v1/ordermanagement/order",
+  "status":"200",
+  "duration":"251",
+...
+
+
+
+

As you can quickly see besides the human readable message you also have the structured properties url, status and duration that can be extremly valuable to configure dashboards in your log-monitoring that visualize success/failure ratio as well as performance of your requests.

+
+
+
+
+

1.63. Classic log-files

+
+ + + + + +
+ + +In devon4j, we strongly recommend using JSON logging instead of classic log files. The following section refers only to devon4j Spring applications that use Logback. +
+
+
+

Even though we do not recommend anymore to write classical log-files to the local disc, here you can still find our approach for it.

+
+
+
Maven-Integration
+
+

In the pom.xml of your application add this dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java</groupId>
+  <artifactId>devon4j-logging</artifactId>
+</dependency>
+
+
+
+

The above dependency already adds transitive dependencies to SLF4J and logback. +Also it comes with configration snipplets that can be included from your logback.xml file (see configuration).

+
+
+

The logback.xml to write regular log-files can look as following:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+  <property resource="com/devonfw/logging/logback/application-logging.properties" />
+  <property name="appname" value="MyApp"/>
+  <property name="logPath" value="../logs"/>
+  <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />
+  <include resource="com/devonfw/logging/logback/appender-console.xml" />
+
+  <root level="DEBUG">
+    <appender-ref ref="ERROR_APPENDER"/>
+    <appender-ref ref="INFO_APPENDER"/>
+    <appender-ref ref="DEBUG_APPENDER"/>
+    <appender-ref ref="CONSOLE_APPENDER"/>
+  </root>
+
+  <logger name="org.springframework" level="INFO"/>
+</configuration>
+
+
+
+

The provided logback.xml is configured to use variables defined on the config/application.properties file. +On our example, the log files path point to ../logs/ in order to log to tomcat log directory when starting tomcat on the bin folder. +Change it according to your custom needs.

+
+
+
Listing 8. config/application.properties
+
+
log.dir=../logs/
+
+
+
+
+
Log Files
+
+

The classical approach uses the following log files:

+
+
+
    +
  • +

    Error Log: Includes log entries to detect errors.

    +
  • +
  • +

    Info Log: Used to analyze system status and to detect bottlenecks.

    +
  • +
  • +

    Debug Log: Detailed information for error detection.

    +
  • +
+
+
+

The log file name pattern is as follows:

+
+
+
+
«LOGTYPE»_log_«HOST»_«APPLICATION»_«TIMESTAMP».log
+
+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 6. Segments of Logfilename
ElementValueDescription

«LOGTYPE»

info, error, debug

Type of log file

«HOST»

e.g. mywebserver01

Name of server, where logs are generated

«APPLICATION»

e.g. myapp

Name of application, which causes logs

«TIMESTAMP»

YYYY-MM-DD_HH00

date of log file

+
+

Example: +error_log_mywebserver01_myapp_2013-09-16_0900.log

+
+
+

Error log from mywebserver01 at application myapp at 16th September 2013 9pm.

+
+
+
+
Output format
+
+

We use the following output format for all log entries to ensure that searching and filtering of log entries work consistent for all logfiles:

+
+
+
+
[D: «timestamp»] [P: «priority»] [C: «NDC»][T: «thread»][L: «logger»]-[M: «message»]
+
+
+
+
    +
  • +

    D: Date (Timestamp in ISO8601 format e.g. 2013-09-05 16:40:36,464)

    +
  • +
  • +

    P: Priority (the log level)

    +
  • +
  • +

    C: Correlation ID (ID to identify users across multiple systems, needed when application is distributed)

    +
  • +
  • +

    T: Thread (Name of thread)

    +
  • +
  • +

    L: Logger name (use class name)

    +
  • +
  • +

    M: Message (log message)

    +
  • +
+
+
+

Example:

+
+
+
+
[D: 2013-09-05 16:40:36,464] [P: DEBUG] [C: 12345] [T: main] [L: my.package.MyClass]-[M: My message...]
+
+
+
+ + + + + +
+ + +When using devon4j-logging, this format is used by default. To achieve this format in Quarkus, set quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n in your properties. +
+
+
+
+
Correlation ID
+
+

In order to correlate separate HTTP requests to services belonging to the same user / session, we provide a servlet filter called DiagnosticContextFilter. +This filter takes a provided correlation ID from the HTTP header X-Correlation-Id. +If none was found, it will generate a new correlation id as UUID. +This correlation ID is added as MDC to the logger. +Therefore, it will then be included to any log message of the current request (thread). +Further concepts such as service invocations will pass this correlation ID to subsequent calls in the application landscape. Hence you can find all log messages related to an initial request simply via the correlation ID even in highly distributed systems.

+
+
+
+
Security
+
+

In order to prevent log forging attacks you can simply use the suggested JSON logging format. +Otherwise you can use com.devonfw.module.logging.common.impl.SingleLinePatternLayout as demonstrated here in order to prevent such attacks.

+
+
+ +
+
+
+

1.64. Monitoring

+
+

For monitoring a complex application landscape it is crucial to have an exact overview which applications are up and running and which are not and why. +In devonfw we only focus on topics which are most important when developing production-ready applications. +On a high level view we strongly suggest to separate the application to be monitored from the monitoring system itself. +Therefore, your application should concentrate on providing app specific data for the monitoring. +Aspects such as aggregation, visualization, search, alerting, etc. should be addressed outside of your app by a monitoring system product. +There are many products providing such a monitoring system like checkmk, icinga, SkyWalking, etc. +Please note that there is a huge list of such products and devonfw is not biased or aims to make a choice for you. +Instead please search and find the products that fit best for your requirements and infrastructure.

+
+
+
+

1.65. Types of monitoring

+
+

As monitoring coveres a lot of different aspects we separate the following types of monitoring and according data:

+
+
+
    +
  • +

    Log-monitoring
    +is about collecting and monitoring the logs of all apps and containers in your IT landscape. It is suitable for events such as an HTTP request with its URL, resulting status code and duration in milliseconds. Your monitoring may not react to such data in realtime. Instead it may take a delay of one or a few seconds.

    +
  • +
  • +

    Infrastructure monitoring
    +is about monitoring the (hardware) infrastructure with measures like usage of CPU, memory, disc-space, etc. This is a pure operational task and your app should have nothing to do with this. In other words it is a waste if your app tries to monitor these aspects as existing products can do this much better and your app will only see virtual machines and is unable to see the physical infrastructure.

    +
  • +
  • +

    Health check
    +is about providing internal data about the current health of your app. Typically you provide sensors with health status per component or interface to neighbour service (database connectivity, etc.).

    +
  • +
  • +

    Application Performance Monitoring
    +is about measuring performance and tracing down performance issues.

    +
  • +
+
+
+
+

1.66. Health-Check

+
+

The idea of a health check is to prodvide monitoring data about the current health status of your application. +This allows to integrate this specific data into the monitoring system used for your IT landscape. +In order to keep the monitoring simple and easy to integreate consider using the following best practices:

+
+
+
    +
  • +

    Use simple and established protocols such as REST instead of JMX via RMI.

    +
  • +
  • +

    Considuer using recent standards such as microprofile-health.

    +
  • +
  • +

    Consider to drop access-control for your monitoring interfaces and for security prevent external access to it in your infrastructure (loadbalancers or gateways). Monitoring is only for usage within an IT landscape internally. It does not make sense for externals and end-users to access your app for reading monitoring data from a random node decided by a loadbalancer. Furhter, external access can easily lead to sensitive data exposure.

    +
  • +
  • +

    Consider to define different end-points per usage-scenario. So if you want the loadbalancer to ask your app monitoring for availability of each node then create a separate service URL that only provides OK or anything else for failure (NOK, 404, 500, timeout). Do not mix this with a health-check that needs more detailed information.

    +
  • +
  • +

    Also do not forget about basic features such as prodiving the name and the release version of your application.

    +
  • +
  • +

    Be careful to automate decisions based on monitoring and health checks. It easily turns out to be stupid if you automatically restart your pod or container because of some monitoring indicator. In the worst case a failure of a central component will cause your health-check to report down for all apps and as a result all your containers will be restarted frequently. Indead of curing problems such decisions will cause much more harm and trouble.

    +
  • +
  • +

    Avoid causing reasonable load with your monitoring and health-check itself. In many cases it is better to use log-monitoring or to collect monitoring data from use-cases that happen in your app anyway. If you create dummy read and write requests in your monitoring implementation you will easily turn it into a DOS-attack.

    +
  • +
+
+
+

For spring you can simply integrate app monitoring and health check via spring-boot-actuator.

+
+
+

For quarkus you can simply integrate app monitoring via micrometer or smallrye-metrics and health check via smallrye-health.

+
+ +
+
+

1.67. Log-Monitoring

+
+

Log-monitoring is an aspect of monitoring with a strict focus on logging. +With trends towards IT landscapes with many but much smaller apps the classicial approach to write log-files to the disc and let operators read those via SSH became entirely obsolete. +Nowadays we have up to hundreds or even thousands of apps that themselves are clustered into multiple nodes. +Therefore you should establish a centralized log monitoring system in the environment and let all your nodes log directly into that system. +This approach gives the following benefits:

+
+
+
    +
  • +

    all log information available in one place

    +
  • +
  • +

    full-text search accross all logfiles

    +
  • +
  • +

    ability to automatically trigger alerts from specific log patterns

    +
  • +
  • +

    ability to do data-mining on logs and visualize in dashboards

    +
  • +
+
+
+
+

1.68. Options for log-monitoring

+
+

Typical products for such a log monitoring system are:

+
+
+ +
+
+

In devonfw we are not biased for any of these products. Therefore, feel free to make your choice according to the requirements of your project.

+
+
+

For Quarkus applications, you can get an insight into the topic by reading the guide about centralized log management.

+
+
+
+

1.69. API for log-monitoring

+
+

The "API" for logging to a log-monitoring system for your app is pretty simple:

+
+
+
    +
  • +

    Write your logs to standard out.

    +
  • +
  • +

    Use JSON logging as format.

    +
  • +
+
+
+

Then the container infrastructure can automatically collect your logs from standard out and directly feed those into the log monitoring system. +As a result, your app does not need to know anything about your log monitoring system and logging becomes most simple. +Further, if you do not write log-files anymore, you might not need to write any other files and therefore may not even need write permissions on the filesystem of your container. +In such case an attacker who may find a vulnerability in your app will have less attack surface in case he can not write any file.

+
+ +
+
+

1.70. Application Performance Management

+
+

This guide gives hints how to manage, monitor and analyse performance of Java applications.

+
+
+
+

1.71. Temporary Analysis

+
+

If you are facing performance issues and want to do a punctual analysis we recommend you to use glowroot. It is ideal in cases where monitoring in your local development environment is suitable. However, it is also possible to use it in your test environment. It is entirely free and open-source. Still it is very powerful and helps to trace down bottlenecks. To get a first impression of the tool take a look at the demo.

+
+
+
JEE/WTP
+
+

In case you are forced to use an JEE application server and want to do a temporary analysis you can double click your server instance from the servers view in Eclipse and click on the link Open launch configuration in order to add the -javaagent JVM option.

+
+
+
+
+

1.72. Regular Analysis

+
+

In case you want to manage application performance regularly we recommend to use JavaMelody that can be integrated into your application. More information on javamelody is available on the JavaMelody Wiki

+
+
+
+

1.73. Alternatives

+
+ +
+
+ +
+
+

1.74. Security

+
+

Security is todays most important cross-cutting concern of an application and an enterprise IT-landscape. We seriously care about security and give you detailed guides to prevent pitfalls, vulnerabilities, and other disasters. While many mistakes can be avoided by following our guidelines you still have to consider security and think about it in your design and implementation. The security guide will not only automatically prevent you from any harm, but will provide you hints and best practices already used in different software products.

+
+
+

An important aspect of security is proper authentication and authorization as described in access-control. In the following we discuss about potential vulnerabilities and protection to prevent them.

+
+
+
+

1.75. Vulnerabilities and Protection

+
+

Independent from classical authentication and authorization mechanisms there are many common pitfalls that can lead to vulnerabilities and security issues in your application such as XSS, CSRF, SQL-injection, log-forging, etc. A good source of information about this is the OWASP. +We address these common threats individually in security sections of our technological guides as a concrete solution to prevent an attack typically depends on the according technology. The following table illustrates common threats and contains links to the solutions and protection-mechanisms provided by the devonfw:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 7. Security threats and protection-mechanisms
ThreatProtectionLink to details

A1 Injection

validate input, escape output, use proper frameworks

SQL Injection

A2 Broken Authentication

encrypt all channels, use a central identity management with strong password-policy

Authentication

A3 Sensitive Data Exposure

Use secured exception facade, design your data model accordingly

REST exception handling

A4 XML External Entities

Prefer JSON over XML, ensure FSP when parsing (external) XML

XML guide

A5 Broken Access Control

Ensure proper authorization for all use-cases, use @DenyAll as default to enforce

Access-control guide especially method authorization

A6 Security Misconfiguration

Use devon4j application template and guides to avoid

tutorial-newapp and sensitive configuration

A7 Cross-Site Scripting

prevent injection (see A1) for HTML, JavaScript and CSS and understand same-origin-policy

client-layer

A8 Insecure Deserialization

Use simple and established serialization formats such as JSON, prevent generic deserialization (for polymorphic types)

JSON guide especially inheritence, XML guide

A9 Using Components with Known Vulnerabilities

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

CVE newsletter and dependency check

A10 Insufficient_Logging & Monitoring

Ensure to log all security related events (login, logout, errors), establish effective monitoring

Logging guide and monitoring guide

Insecure Direct Object References

Using direct object references (IDs) only with appropriate authorization

logic-layer

Cross-Site Request Forgery (CSRF)

secure mutable service operations with an explicit CSRF security token sent in HTTP header and verified on the server

CSRF guide

Log-Forging

Escape newlines in log messages

logging security

Unvalidated Redirects and Forwards

Avoid using redirects and forwards, in case you need them do a security audit on the solution.

devonfw proposes to use rich-clients (SPA/RIA). We only use redirects for login in a safe way.

+
+
+

1.76. Advanced Security

+
+

While OWASP Top 10 covers the basic aspects of application security, there are advanced standards such as AVS. +In devonfw we address this in the +Application Security Quick Solution Guide.

+
+
+
+

1.77. Tools

+
+
Dependency Check
+
+

To address the thread Using Components with Known Vulnerabilities we recomment to use OWASP dependency check that ships with a maven plugin and can analyze your dependencies for known CVEs. +In order to run this check, you can simply call this command on any maven project:

+
+
+
+
mvn org.owasp:dependency-check-maven:6.1.5:aggregate
+
+
+
+ + + + + +
+ + +The version is just for completeness. You should check yourself for using a recent version of the plugin. +
+
+
+

If you build an devon4j spring application from our app-template you can activate the dependency check even easier with the security profile:

+
+
+
+
mvn clean install -P security
+
+
+
+

This does not run by default as it causes some overhead for the build performance. However, consider to build this in your CI at least nightly. +After the dependency check is performed, you will find the results in target/dependency-check-report.html of each module. The report will also be generated when the site is build (mvn site) even without the profile.

+
+
+
+
Penetration Testing
+
+

For penetration testing (testing for vulnerabilities) of your web application, we recommend the following tools:

+
+
+ +
+
+ +
+
+
+

1.78. CORS support

+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

For more information about CORS see here. Information about the CORS headers can be found here.

+
+
+
+

1.79. Configuring CORS support

+
+

To enable CORS support for your application, see the advanced guides. For Spring applications see here. For Quarkus follow the official Quarkus guide.

+
+
+
+

1.80. Configuration with service mesh

+
+

If you are using a service mesh, you can also define your CORS policy directly there. Here is an example from Istio.

+
+
+ +
+
+

1.81. Java Development Kit

+
+

The Java Development Kit is an implementation of the Java platform. It provides the Java Virtual Machine (JVM) and the Java Runtime Environment (JRE).

+
+
+
+

1.82. Editions

+
+

The JDK exists in different editions:

+
+
+ +
+
+

As Java is evolving and also complex maintaining a JVM requires a lot of energy. +Therefore many alternative JDK editions are unable to cope with this and support latest Java versions and according compatibility. +Unfortunately OpenJDK only maintains a specific version of Java for a relative short period of time before moving to the next major version. +In the end, this technically means that OpenJDK is continuous beta and can not be used in production for reasonable software projects. +As OracleJDK changed its licensing model and can not be used for commercial usage even during development, things can get tricky. +You may want to use OpenJDK for development and OracleJDK only in production. +However, e.g. OpenJDK 11 never released a version that is stable enough for reasonable development (e.g. javadoc tool is broken and fixes are not available of OpenJDK 11 - fixed in 11.0.3 what is only available as OracleJDK 11 or you need to go to OpenJDK 12+, what has other bugs) so in the end there is no working release of OpenJDK 11. +This more or less forces you to use OracleJDK what requires you to buy a subscription so you can use it for commercial development. +However, there is AdoptOpenJDK that provides forked releases of OpenJDK with bug-fixes what might be an option. +Anyhow, as you want to have your development environment close to production, the productively used JDK (most likely OracleJDK) should be preferred also for development.

+
+
+
+

1.83. Upgrading

+
+

Until Java 8 compatibility was one of the key aspects for Java version updates (after the mess on the Swing updates with Java2 many years ago). +However, Java 9 introduced a lot of breaking changes. +This documentation wants to share the experience we collected in devonfw when upgrading from Java 8 to newer versions. +First of all we separate runtime changes that you need if you want to build your software with JDK 8 but such that it can also run on newer versions (e.g. JRE 11) +from changes required to also build your software with more recent JDKs (e.g. JDK 11 or 12).

+
+
+
Runtime Changes
+
+

This section describes required changes to your software in order to make it run also with versions newer than Java 8.

+
+
+
Classes removed from JDK
+
+

The first thing that most users hit when running their software with newer Java versions is a ClassNotFoundException like this:

+
+
+
+
Caused by: java.lang.ClassNotFoundException: javax.xml.bind.JAXBException
+
+
+
+

As Java 9 introduced a module system with Jigsaw, the JDK that has been a monolithic mess is now a well-defined set of structured modules. +Some of the classes that used to come with the JDK moved to modules that where not available by default in Java 9 and have even been removed entirely in later versions of Java. +Therefore you should simply treat such code just like any other 3rd party component that you can add as a (maven) dependency. +The following table gives you the required hints to make your software work even with such classes / modules removed from the JDK (please note that the specified version is just a suggestion that worked, feel free to pick a more recent or more appropriate version):

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 8. Dependencies for classes removed from Java 8 since 9+
ClassGroupIdArtifactIdVersion

javax.xml.bind.*

javax.xml.bind

jaxb-api

2.3.1

com.sun.xml.bind.*

org.glassfish.jaxb

jaxb-runtime

2.3.1

java.activation.*

javax.activation

javax.activation-api

1.2.0

java.transaction.*

javax.transaction

javax.transaction-api

1.2

java.xml.ws.*

javax.xml.ws

jaxws-api

2.3.1

javax.jws.*

javax.jws

javax.jws-api

1.1

javax.annotation.*

javax.annotation

javax.annotation-api

1.3.2

+
+
+
3rd Party Updates
+
+

Further, internal and inofficial APIs (e.g. sun.misc.Unsafe) have been removed. +These are typically not used by your software directly but by low-level 3rd party libraries like asm that need to be updated. +Also simple things like the Java version have changed (from 1.8.x to 9.x, 10.x, 11.x, 12.x, etc.). +Some 3rd party libraries were parsing the Java version in a very naive way making them unable to be used with Java 9+:

+
+
+
+
Caused by: java.lang.NullPointerException
+   at org.apache.maven.surefire.shade.org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast (SystemUtils.java:1626)
+
+
+
+

Therefore the following table gives an overview of common 3rd party libraries that have been affected by such breaking changes and need to be updated to at least the specified version:

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 9. Minimum recommended versions of common 3rd party for Java 9+
GroupIdArtifactIdVersionIssue

org.apache.commons

commons-lang3

3.7

LANG-1365

cglib

cglib

3.2.9

102, 93, 133

org.ow2.asm

asm

7.1

2941

org.javassist

javassist

3.25.0-GA

194, 228, 246, 171

+
+
+
ResourceBundles
+
+

For internationalization (i18n) and localization (l10n) ResourceBundle is used for language and country specific texts and configurations as properties (e.g. MyResourceBundle_de.properties). With Java modules there are changes and impacts you need to know to get things working. The most important change is documented in the JavaDoc of ResourceBundle. However, instead of using ResourceBundleProvider and refactoring your entire code causing incompatibilities, you can simply put the resource bundles in a regular JAR on the classpath rather than a named module (or into the lauching app). +If you want to implement (new) Java modules with i18n support, you can have a look at mmm-nls.

+
+
+
+
+
Buildtime Changes
+
+

If you also want to change your build to work with a recent JDK you also need to ensure that test frameworks and maven plugins properly support this.

+
+
+
Findbugs
+
+

Findbugs does not work with Java 9+ and is actually a dead project. +The new findbugs is SpotBugs. +For maven the new solution is spotbugs-maven-plugin:

+
+
+
+
<plugin>
+  <groupId>com.github.spotbugs</groupId>
+  <artifactId>spotbugs-maven-plugin</artifactId>
+  <version>3.1.11</version>
+</plugin>
+
+
+
+
+
Test Frameworks
+ + ++++++ + + + + + + + + + + + + + + + + +
Table 10. Minimum recommended versions of common 3rd party test frameworks for Java 9+
GroupIdArtifactIdVersionIssue

org.mockito

mockito-core

2.23.4

1419, 1696, 1607, 1594, 1577, 1482

+
+
+
Maven Plugins
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 11. Minimum recommended versions of common maven plugins for Java 9+
GroupIdArtifactId(min.) VersionIssue

org.apache.maven.plugins

maven-compiler-plugin

3.8.1

x

org.apache.maven.plugins

maven-surefire-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-surefire-report-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-archetype-plugin

3.1.0

x

org.apache.maven.plugins

maven-javadoc-plugin

3.1.0

x

org.jacoco

jacoco-maven-plugin

0.8.3

663

+
+
+
Maven Usage
+
+

With Java modules you can not run Javadoc standalone anymore or you will get this error when running mvn javadoc:javadoc:

+
+
+
+
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-javadoc-plugin:3.1.1:javadoc (default-cli) on project mmm-base: An error has occurred in Javadoc report generation:
+[ERROR] Exit code: 1 - error: module not found: io.github.mmm.base
+[ERROR]
+[ERROR] Command line was: /projects/mmm/software/java/bin/javadoc @options @packages @argfile
+
+
+
+

As a solution or workaround you need to include the compile goal into your build lifecycle so the module-path is properly configured:

+
+
+
+
mvn compile javadoc:javadoc
+
+
+
+
+
+
+ +
+

We want to give credits and say thanks to the following articles that have been there before and helped us on our way:

+
+ +
+ +
+
+

1.85. JEE

+
+

This section is about Java Enterprise Edition (JEE). +Regarding to our key principles we focus on open standards. +For Java this means that we consider official standards from Java Standard and Enterprise Edition as first choice for considerations. +Therefore we also decided to recommend JAX-RS over SpringMVC as the latter is proprietary. +Only if an existing Java standard is not suitable for current demands such as Java Server Faces (JSF), we do not officially recommend it (while you are still free to use it if you have good reasons to do so). +In all other cases we officially suggest the according standard and use it in our guides, code-samples, sample application, modules, templates, etc. +Examples for such standards are JPA, JAX-RS, JAX-WS, JSR330, JSR250, JAX-B, etc.

+
+
+
+

1.86. Application-Server

+
+

We designed everything based on standards to work with different technology stacks and servlet containers. +However, we strongly encourage to use modern and leightweight frameworks such as spring or quarkus. +You are free to decide for a JEE application server but here is a list of good reasons for our decision:

+
+
+
    +
  • +

    Up-to-date

    +
    +

    With spring or quarkus you easily keep up to date with evolving technologies (microservices, reactive, NoSQL, etc.). +Most application servers put you in a jail with old legacy technology. +In many cases you are even forced to use a totally outdated version of java (JVM/JDK). +This may even cause severe IT-Security vulnerabilities but with expensive support you might get updates. +Also with leightweight open-source frameworks you need to be aware that for IT-security you need to update recently what can cost quite a lot of additional maintenance effort.

    +
    +
  • +
  • +

    Development speed

    +
    +

    With spring-boot you can implement and especially test your individual logic very fast. Starting the app in your IDE is very easy, fast, and realistic (close to production). You can easily write JUnit tests that startup your server application to e.g. test calls to your remote services via HTTP fast and easy. For application servers you need to bundle and deploy your app what takes more time and limits you in various ways. We are aware that this has improved in the past but also spring continuously improves and is always way ahead in this area. Further, with spring you have your configurations bundled together with the code in version control (still with ability to handle different environments) while with application servers these are configured externally and can not be easily tested during development.

    +
    +
  • +
  • +

    Documentation

    +
    +

    Spring and also quarkus have an extremely open and active community. +There is documentation for everything available for free on the web. +You will find solutions to almost any problem on platforms like stackoverflow. +If you have a problem you are only a google search away from your solution. +This is very much different for proprietary application server products.

    +
    +
  • +
  • +

    Helpful Exception Messages

    +
    +

    Especially spring is really great for developers on exception messages. +If you do something wrong you get detailed and helpful messages that guide you to the problem or even the solution. +This is not as great in application servers.

    +
    +
  • +
  • +

    Future-proof

    +
    +

    Spring has evolved really awesome over time. +Since its 1.0 release in 2004 spring has continuously been improved and always caught up with important trends and innovations. +Even in critical situations, when the company behind it (interface21) was sold, spring went on perfectly. +Quarkus on the other hand is relatively new. +It does not have to carry a large legacy history and is therefore most state-of-the-art for modern projects esp. in cloud environments. +JEE went through a lot of trouble and crisis. +Just look at the EJB pain stories. +This happened often in the past and also recent. +See JEE 8 in crisis.

    +
    +
  • +
  • +

    Free

    +
    +

    Spring and quarkus including their ecosystems are free and open-source. +It still perfectly integrates with commercial solutions for specific needs. +Most application servers are commercial and cost a lot of money. +As of today the ROI for this is of question.

    +
    +
  • +
  • +

    Cloud-native

    +
    +

    Quarkus is designed for cloud-native projects from the start. +With spring this is also available via spring-native. +Using an application server will effectively prevent you from going to the cloud smoothly.

    +
    +
  • +
  • +

    Fun

    +
    +

    If you go to conferences or ask developers you will see that spring or quarkus is popular and fun. +If new developers are forced to use an old application server product they will be less motivated or even get frustrated. +Especially in today’s agile projects this is a very important aspect. +In the end you will get into trouble with maintenance on the long run if you rely on a proprietary application server.

    +
    +
  • +
+
+
+

Of course the vendors of application servers will tell you a different story. +This is simply because they still make a lot of money from their products. +We do not get paid from application servers nor from spring, quarkus or any other IT product company. +We are just developers who love to build great systems. +A good reason for application servers is that they combine a set of solutions to particular aspects to one product that helps to standardize your IT. +However, devonfw fills exactly this gap for the spring and quarkus ecosystems in a very open and flexible way. +However, there is one important aspect that you need to understand and be aware of:

+
+
+

Some big companies decided for a specific application server as their IT strategy. +They may have hundreds of apps running with this application server. +All their operators and developers have learned a lot of specific skills for this product and are familiar with it. +If you are implementing yet another (small) app in this context it could make sense to stick with this application server. +However, also they have to be aware that with every additional app they increase their technical debt. +So actively help your customer and consult him to make the right choices for the future.

+
+
+ +
+
+

1.87. Validation

+
+

Validation is about checking syntax and semantics of input data. Invalid data is rejected by the application. +Therefore validation is required in multiple places of an application. E.g. the GUI will do validation for usability reasons to assist the user, early feedback and to prevent unnecessary server requests. +On the server-side validation has to be done for consistency and security.

+
+
+

In general we distinguish these forms of validation:

+
+
+
    +
  • +

    stateless validation will produce the same result for given input at any time (for the same code/release).

    +
  • +
  • +

    stateful validation is dependent on other states and can consider the same input data as valid in once case and as invalid in another.

    +
  • +
+
+
+
+

1.88. Stateless Validation

+
+

For regular, stateless validation we use the JSR303 standard that is also called bean validation (BV). +Details can be found in the specification. +As implementation we recommend hibernate-validator.

+
+
+
Example
+
+

A description of how to enable BV for spring applications can be found in the relevant Spring documentation. A guide you can use to integrate validation in Quarkus applications can be found here. For a quick summary follow these steps:

+
+
+
    +
  • +

    Make sure that hibernate-validator is located in the classpath by adding a dependency to the pom.xml.

    +
  • +
+
+
+
Listing 9. spring
+
+
    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
Listing 10. quarkus
+
+
    <dependency>
+      <groupId>io.quarkus</groupId>
+      <artifactId>quarkus-hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
    +
  • +

    For methods to validate go to their declaration and add constraint annotations to the method parameters.

    +
    +

    In spring applications you can add the @Validated annotation to the implementation (spring bean) to be validated (this is an annotation of the spring framework, so it`s not available in the Quarkus context). The standard use case is to annotate the logic layer implementation, i.e. the use case implementation or component facade in case of simple logic layer pattern. Thus, the validation will be executed for service requests as well as batch processing.

    +
    +
    +
      +
    • +

      @Valid annotation to the arguments to validate (if that class itself is annotated with constraints to check).

      +
    • +
    • +

      @NotNull for required arguments.

      +
    • +
    • +

      Other constraints (e.g. @Size) for generic arguments (e.g. of type String or Integer). However, consider to create custom datatypes and avoid adding too much validation logic (especially redundant in multiple places).

      +
    • +
    +
    +
  • +
+
+
+
Listing 11. BookingmanagementRestServiceImpl.java
+
+
@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+  ...
+  public BookingEto saveBooking(@Valid BookingCto booking) {
+  ...
+
+
+
+
    +
  • +

    Finally add appropriate validation constraint annotations to the fields of the ETO class.

    +
  • +
+
+
+
Listing 12. BookingCto.java
+
+
  @Valid
+  private BookingEto booking;
+
+
+
+
Listing 13. BookingEto.java
+
+
  @NotNull
+  @Future
+  private Timestamp bookingDate;
+
+
+
+

A list with all bean validation constraint annotations available for hibernate-validator can be found here. In addition it is possible to configure custom constraints. Therefore it is necessary to implement a annotation and a corresponding validator. A description can also be found in the Spring documentation or with more details in the hibernate documentation.

+
+
+ + + + + +
+ + +Bean Validation in Wildfly >v8: Wildfly v8 is the first version of Wildfly implementing the JEE7 specification. It comes with bean validation based on hibernate-validator out of the box. In case someone is running Spring in Wildfly for whatever reasons, the spring based annotation @Validated would duplicate bean validation at runtime and thus should be omitted. +
+
+
+
+
GUI-Integration
+
+

TODO

+
+
+
+
Cross-Field Validation
+
+

BV has poor support for this. Best practice is to create and use beans for ranges, etc. that solve this. A bean for a range could look like so:

+
+
+
+
public class Range<V extends Comparable<V>> {
+
+  private V min;
+  private V max;
+
+  public Range(V min, V max) {
+
+    super();
+    if ((min != null) && (max != null)) {
+      int delta = min.compareTo(max);
+      if (delta > 0) {
+        throw new ValueOutOfRangeException(null, min, min, max);
+      }
+    }
+    this.min = min;
+    this.max = max;
+  }
+
+  public V getMin() ...
+  public V getMax() ...
+
+
+
+
+
+

1.89. Stateful Validation

+
+

For complex and stateful business validations we do not use BV (possible with groups and context, etc.) but follow KISS and just implement this on the server in a straight forward manner. +An example is the deletion of a table in the example application. Here the state of the table must be checked first:

+
+
+

BookingmanagementImpl.java

+
+
+
+
  private void sendConfirmationEmails(BookingEntity booking) {
+
+    if (!booking.getInvitedGuests().isEmpty()) {
+      for (InvitedGuestEntity guest : booking.getInvitedGuests()) {
+        sendInviteEmailToGuest(guest, booking);
+      }
+    }
+
+    sendConfirmationEmailToHost(booking);
+  }
+
+
+
+

Implementing this small check with BV would be a lot more effort.

+
+
+ +
+
+

1.90. Bean-Mapping

+
+

For decoupling, you sometimes need to create separate objects (beans) for a different view. E.g. for an external service, you will use a transfer-object instead of the persistence entity so internal changes to the entity do not implicitly change or break the service.

+
+
+

Therefore you have the need to map similar objects what creates a copy. This also has the benefit that modifications to the copy have no side-effect on the original source object. However, to implement such mapping code by hand is very tedious and error-prone (if new properties are added to beans but not to mapping code):

+
+
+
+
public UserEto mapUser(UserEntity source) {
+  UserEto target = new UserEto();
+  target.setUsername(source.getUsername());
+  target.setEmail(source.getEmail());
+  ...
+  return target;
+}
+
+
+
+

Therefore we are using a BeanMapper for this purpose that makes our lives a lot easier. +There are several bean mapping frameworks with different approaches.

+
+
+

For a devon4j-spring application we recommend Orika, follow Spring Bean-Mapping for an introduction to Orika and Dozer in a devon4j-spring context application.

+
+
+ + + + + +
+ + +devon4j started with Dozer as framework for Spring applications and still supports it. However, we now recommend Orika (for new projects) as it is much faster (see Performance of Java Mapping Frameworks). +
+
+
+

For a Quarkus application we recommend Mapstruct, follow Quarkus Bean-Mapping for an introduction to Mapstruct in a quarkus context application.

+
+ +
+
+

1.91. Lombok

+
+

Lombok is a library that works with an annotation processor and will generate code for you to save you some time and reduce the amount of boilerplate code in your project. Lombok can generate getter and setter, equals methods, automate your logging variables for your classes, and more. Follow the list of all the features provided by Lombok to get an overview.

+
+
+
+

1.92. Lombok Dependency

+
+

To get access to the Lombok library just add the following dependency to the POM.xml.

+
+
+

The Lombok dependency:

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok</artifactId>
+	<version>1.18.20</version>
+</dependency>
+
+
+
+

To get Lombok working with your current IDE you should also install the Lombok addon. Follow the Eclipse installation guide, there are also guides for other supported IDEs.

+
+
+
+

1.93. Lombok with Mapstruct

+
+

MapStruct takes advantage of generated getters, setters, and constructors from Lombok and uses them to +generate the mapper implementations. Lombok is also an annotation processor and since version 1.18.14 both frameworks are working together. Just add the lombok-mapstruct-binding to your POM.xml.

+
+
+

The Lombok annotation processor and the lombok-mapstruct-binding

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok-mapstruct-binding</artifactId>
+	<version>0.2.0</version>
+</dependency>
+
+<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok</artifactId>
+				<version>1.18.4</version>
+			</path>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok-mapstruct-binding</artifactId>
+				<version>0.2.0</version>
+			</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

In our quarkus reference project you can get a look into the usage of both frameworks.

+
+
+
+

1.94. Lombok Usage

+
+

Lombok can be used like any other annotation processor and will be shown in the simple example below to generate getter and setter for a Product Entity.

+
+
+
+
@Getter
+@Setter
+public class Product{
+
+    private String title;
+    private String description;
+    private BigDecimal price;
+}
+
+
+
+

For advanced Lombok usage follow the Baeldung Lombok guide or just read the Lombok javadoc

+
+
+ +
+
+

1.95. OpenAPI

+
+

The OpenAPI Specification (OAS) defines a standard for describing RESTful web services in a machine- and human-readable format. OpenAPI allows REST APIs to be defined in a uniform manner. +Technically, an OpenAPI document is written in YAML or JSON format. The specification defines the structure of a REST API by describing attributes such as path information, response codes, and return types. Some examples can be found here.

+
+
+

OpenAPI is often used in combination with Swagger. Swagger is a set of tools build around OpenAPI, that help developers to design and document their REST APIs. +The most common tool is the Swagger UI, which uses the OpenAPI specification to create a graphical interface of the REST API that you can also interact with. Check out the Swagger online editor to get a feeling for it.

+
+
+
+

1.96. OpenAPI generation

+
+

There are several extensions you can use in your project to automatically generate the OpenAPI specifications and Swagger UI from your REST API (code-first approach). devon4j recommends the following two extensions/plugins to use:

+
+
+
    +
  • +

    Smallrye OpenAPI extension

    +
  • +
  • +

    ServicedocGen maven plugin

    +
  • +
+
+
+
Smallrye OpenAPI
+
+

Quarkus provides OpenAPI support through Smallrye OpenAPI extension:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-smallrye-openapi</artifactId>
+</dependency>
+
+
+
+

After adding the extension to your project, you can access the Swagger UI by navigating to /q/swagger-ui.

+
+
+

The OpenAPI specification can be accessed by requesting /q/openapi.

+
+
+

Smallrye OpenAPI is compliant with MicroProfile OpenAPI. You can add MicroProfile annotations to further describe your REST endpoints and extend the OpenAPI documentation. +More information for this can be found here or here.

+
+
+ + + + + +
+ + +
+

Quarkus recommends using this extension and you can document your APIs in great detail by using the MicroProfile annotations. The downside to this is that using these annotations will blow up your code and you will have some duplicate information in it. +If you don’t want to specify the REST API again with all this annotation based information, we also recommend taking a look at the ServicedocGen Maven plugin for your Quarkus applications when implementing JAX-RS APIs.

+
+
+
+
+
+
ServicedocGen Maven Plugin
+
+

The ServicedocGen maven plugin can be used within both Spring and Quarkus applications. +It works a bit different then the Smallrye extensions mentioned above. The plugin analysis the REST API and it’s JavaDoc and then generate the OpenAPI specification and the Swagger UI as static files. So no Swagger or MicroProfile annotations have to be added.

+
+
+

The plugin can be configured in the pom.xml file of your application as follows:

+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>org.codehaus.mojo</groupId>
+      <artifactId>servicedocgen-maven-plugin</artifactId>
+      <version>1.0.0</version>
+      <executions>
+        <execution>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <descriptor>
+          <info>
+            <title>...</title>
+            <description>...</description>
+          </info>
+          <host>...</host>
+          <port>...</port>
+          <basePath>...</basePath>
+          <schemes>
+            <scheme>...</scheme>
+          </schemes>
+        </descriptor>
+      </configuration>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

In the configuration section you have to define additional information to generate the OpenAPI specification correctly. An example can be found in our Quarkus reference application. +When building the application, an OpenApi.yaml and a SwaggerUI.html file are created in the /target/site folder. To make the Swagger UI available in the browser, the file must be served by some servlet.

+
+
+ +
+
+
+

1.97. Spring

+
+

Spring is the most famous and established Java framework. +It is fully supported by devonfw as an option and alternative to quarkus.

+
+
+
+

1.98. Guide to the Reader

+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If you are not yet familiar with Spring, you may be interested in pros and cons of Spring. Also take a look at the official Spring website.

    +
  • +
  • +

    If you already have experience developing with Spring but are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring and coding conventions. Follow the referenced links to go deeper into a topic.

    +
  • +
  • +

    If you have already developed with devon4j and Spring and need more information on a specific topic, check out the devon4j guides for Spring. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Spring and Quarkus are documented there.

    +
  • +
  • +

    If you want to get started or create your first Spring application using devon4j, check out the guide about creating a new application or the Jump the Queue and My Thai Star reference applications.

    +
  • +
+
+
+
+

1.99. Pros

+
+

Spring offers the following benefits:

+
+
+
    +
  • +

    highly flexible
    +Spring is famous for its great flexibility. You can customize and integrate nearly everything.

    +
  • +
  • +

    well established
    +While JEE application servers including very expensive commercial products turned out to be a dead-end, spring has guided projects through the changing trends of IT throughout decades. It may be the framework with the longest history track and popularity. As a result you can easily find developers, experts, books, articles, etc. about spring.

    +
  • +
  • +

    non-invasive and not biased
    +Spring became famous for its non-invasive coding based on patterns instead of hard dependencies. It gives you a lot of freedom and avoids tight coupling of your (business) code.

    +
  • +
+
+
+

See Why Spring? for details.

+
+
+
+

1.100. Cons

+
+

Spring has the following drawbacks:

+
+
+
    +
  • +

    history and legacy
    +Due to the pro of its long established history, spring also carries a lot of legacy. As a result there are many ways to do the same thing while some options may be discouraged. Developers needs some guidance (e.g. via devon4j) as they may enter pitfalls and dead-ends when choosing the first solution they found on google or stackoverflow.

    +
  • +
  • +

    lost lead in cloud-native
    +While for the last decades spring was leading innovation in Java app development, it seems that with the latest trends and shift such as cloud-native, they have been overtaken by frameworks like quarkus. However, spring is trying to catch up with spring-native.

    +
  • +
+
+
+
+

1.101. Spring-Boot

+
+

Spring-boot is a project and initiaitve within the spring-ecosystem that brought a lot of innovation and simplification into app development on top of spring. +As of today we typically use the terms spring and spring-boot rather synonymously as we always use spring together with spring-boot.

+
+
+
+

1.102. Spring-Native

+
+

Spring-native adds cloud-native support to the spring ecosystem and allows to build a spring app as cloud-native image via GraalVM. +This feature is currently beta. +You may also consider quarkus if you are interested in building cloud-native images.

+
+ +
+

1.102.1. Components

+
+

Following separation-of-concerns we divide an application into components using our package-conventions and project structure. +As described by the architecture each component is divided into layers as described in the project structure. +Please note that a component will only have the required layers. +So a component may have any number from one to all layers.

+
+
+
+

1.102.2. General Component

+
+

Cross-cutting aspects belong to the implicit component general. It contains technical configurations and very general code that is not business specific. Such code shall not have any dependencies to other components and therefore business related code.

+
+
+
+

1.102.3. Business Component

+
+

The business-architecture defines the business components with their allowed dependencies. A small application (microservice) may just have one component and no dependencies making it simple while the same architecture can scale up to large and complex applications (from bigger microservice up to modulith). +Tailoring an business domain into applications and applications into components is a tricky task that needs the skills of an experienced architect. +Also, the tailoring should follow the business and not split by technical reasons or only by size. +Size is only an indicator but not a driver of tailoring. +Whatever hypes like microservices are telling you, never get misled in this regard: +If your system grows and reaches MAX+1 lines of code, it is not the right motivation to split it into two microservices of ~MAX/2 lines of code - such approaches will waste huge amounts of money and lead to chaos.

+
+
+
+

1.102.4. App Component

+
+

Only in case you need cross-cutting code that aggregates another component you may introduce the component app. +It is allowed to depend on all other components but no other component may depend on it. +With the modularity and flexibility of spring you typically do not need this. +However, when you need to have a class that registers all services or component-facades using direct code dependencies, you can introduce this component.

+
+
+
+

1.102.5. Component Example

+
+

The following class diagram illustrates an example of the business component Staffmanagement:

+
+
+
+logic layer component pattern +
+
+
+

In this scheme, you can see the structure and flow from the service-layer (REST service call) via the logic-layer to the dataaccess-layer (and back).

+
+ +
+
+

1.102.6. Classic project structure

+
+

In this section we describe the classic project structure as initially proposed for Java in devonfw. +It is still valid and fully supported. +However, if you want to start a new project, please consider using the modern structure.

+
+
+
+

1.102.7. Modules

+
+

The structure of a devon4j application is divided into the following modules:

+
+
+
    +
  • +

    api: module containing the API of your application. The API contains the required artifacts to interact with your application via remote services. This can be REST service interfaces, transfer-objects with their interfaces and datatypes but also OpenAPI or gRPC contracts.

    +
  • +
  • +

    core: maven module containing the core of the application with service implementation, as well as entire logic layer and dataaccess layer.

    +
  • +
  • +

    batch: optional module for batch layer

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) typically as a bootified WAR file.

    +
  • +
+
+
+
+

1.102.8. Deployment

+
+
+
+

Make jar not war

+
+
+
+— Josh Long +
+
+
+

First of all it is important to understand that the above defined modules aim to make api, core, and batch reusable artifacts, that can be used as a regular maven dependency. +On the other hand to build and deploy your application you want a final artifact that is containing all required 3rd party libraries. +This artifact is not reusable as a maven dependency. +That is exactly the purpose of the server module to build and package this final deployment artifact. +By default we first build a regular WAR file with maven in your server/target directory (*-server-«version».war) and in a second step create a bootified WAR out of this (*-server-bootified.war). +The bootified WAR file can then be started standalone (java -jar «filename».war). +However, it is also possible to deploy the same WAR file to a servlet container like tomcat or jetty. +As application servers and externally provided servlet containers are not recommendet anymore for various reasons (see JEE), you may also want to create a bootified JAR file instead. +All you need to do in that case is to change the packaging in your server/pom.xml from war to jar.

+
+
+
+

1.102.9. Package Structure

+
+

The package structure of your code inside src/main/java (and src/test/java) of your modules is described in our coding conventions in the sections packages. A full mapping of the architecture and the different code elements to the packaging is described in the following section.

+
+
+
+

1.102.10. Layers

+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +The following table describes our classic approach for packaging and layering:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 12. Traditional generic devon4j layers
Layer«layer»

service

service

logic

logic

data-access

dataaccess

batch (optional)

batch

client (optional)

client

common

common

+
+
+

1.102.11. Architecture Mapping

+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.common
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.datatype
+|  |  |  |  └──.«Datatype» (api)
+|  |  |  └──.«BusinessObject» (api)
+|  |  └──.impl[.«detail»]
+|  |     ├──.«Aspect»ConfigProperties (core)
+|  |     ├──.«Datatype»JsonSerializer (core)
+|  |     └──.«Datatype»JsonDeserializer (core)
+|  ├──.dataaccess
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.repo
+|  |  |  |  └──.«BusinessObject»Repository (core)
+|  |  |  ├──.dao (core) [alternative to repo]
+|  |  |  |  └──.«BusinessObject»Dao (core) [alternative to Repository]
+|  |  |  └──.«BusinessObject»Entity (core)
+|  |  └──.impl[.«detail»]
+|  |     ├──.dao (core) [alternative to repo]
+|  |     |  └──.«BusinessObject»DaoImpl (core) [alternative to Repository]
+|  |     └──.«Datatype»AttributeConverter (core)
+|  ├──.logic
+|  |  ├──.api
+|  |  |  ├──.[«detail».]to
+|  |  |  |   ├──.«MyCustom»«To (api)
+|  |  |  |   ├──.«DataStructure»Embeddable (api)
+|  |  |  |   ├──.«BusinessObject»Eto (api)
+|  |  |  |   └──.«BusinessObject»«Subset»Cto (api)
+|  |  |  ├──.[«detail».]usecase
+|  |  |  |   ├──.UcFind«BusinessObject» (core)
+|  |  |  |   ├──.UcManage«BusinessObject» (core)
+|  |  |  |   └──.Uc«Operation»«BusinessObject» (core)
+|  |  |  └──.«Component» (core)
+|  |  ├──.base
+|  |  |  └──.[«detail».]usecase
+|  |  |     └──.Abstract«BusinessObject»Uc (core)
+|  |  └──.impl
+|  |     ├──.[«detail».]usecase
+|  |     |   ├──.UcFind«BusinessObject»Impl (core)
+|  |     |   ├──.UcManage«BusinessObject»Impl (core)
+|  |     |   └──.Uc«Operation»«BusinessObject»Impl (core)
+|  |     └──.«Component»Impl (core)
+|  └──.service
+|     ├──.api[.«detail»]
+|     |  ├──.rest
+|     |  |  └──.«Component»RestService (api)
+|     |  └──.ws
+|     |     └──.«Component»WebService (api)
+|     └──.impl[.«detail»]
+|        ├──.jms
+|        |  └──.«BusinessObject»JmsListener (core)
+|        ├──.rest
+|        |  └──.«Component»RestServiceImpl (core)
+|        └──.ws
+|           └──.«Component»WebServiceImpl (core)
+├──.general
+│  ├──.common
+│  |  ├──.api
+|  |  |  ├──.to
+|  |  |  |  ├──.AbstractSearchCriteriaTo (api)
+|  |  |  └──.ApplicationEntity
+│  |  ├──.base
+|  |  |  └──.AbstractBeanMapperSupport (core)
+│  |  └──.impl
+│  |     ├──.config
+│  |     |  └──.ApplicationObjectMapperFactory (core)
+│  |     └──.security
+│  |        └──.ApplicationWebSecurityConfig (core)
+│  ├──.dataaccess
+│  |  └──.api
+|  |     └──.ApplicationPersistenceEntity (core)
+│  ├──.logic
+│  |  └──.base
+|  |     ├──.AbstractComponentFacade (core)
+|  |     ├──.AbstractLogic (core)
+|  |     └──.AbstractUc (core)
+|  └──.service
+|     └──...
+└──.SpringBootApp (core)
+
+
+
+
+
+
+

1.103. Layers

+ +
+
Client Layer
+
+

There are various technical approaches to building GUI clients. The devonfw proposes rich clients that connect to the server via data-oriented services (e.g. using REST with JSON). +In general, we have to distinguish among the following types of clients:

+
+
+
    +
  • +

    web clients

    +
  • +
  • +

    native desktop clients

    +
  • +
  • +

    (native) mobile clients

    +
  • +
+
+
+

Our main focus is on web-clients. In our sample application my-thai-star we offer a responsive web-client based on Angular following devon4ng that integrates seamlessly with the back ends of my-thai-star available for Java using devon4j as well as .NET/C# using devon4net. For building angular clients read the separate devon4ng guide.

+
+
+
+
JavaScript for Java Developers
+
+

In order to get started with client development as a Java developer we give you some hints to get started. Also if you are an experienced JavaScript developer and want to learn Java this can be helpful. First, you need to understand that the JavaScript ecosystem is as large as the Java ecosystem and developing a modern web client requires a lot of knowledge. The following table helps you as experienced developer to get an overview of the tools, configuration-files, and other related aspects from the new world to learn. Also it helps you to map concepts between the ecosystems. Please note that we list the tools recommended by devonfw here (and we know that there are alternatives not listed here such as gradle, grunt, bower, etc.).

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 13. Aspects in JavaScript and Java ecosystem
TopicAspectJavaScriptJava

Programming

Language

TypeScript (extends JavaScript)

Java

Runtime

VM

nodejs (or web-browser)

jvm

Build- & Dependency-Management

Tool

npm or yarn

maven

Config

package.json

pom.xml

Repository

npm repo

maven central (repo search)

Build cmd

ng build or npm run build (goals are not standardized in npm)

mvn install (see lifecycle)

Test cmd

ng test

mvn test

Testing

Test-Tool

jasmine

junit

Test-Runner

karma

junit / surefire

E2E Testing

Protractor

Selenium

Code Analysis

Code Coverage

ng test --no-watch --code-coverage

JaCoCo

Development

IDE

MS VS Code or IntelliJ

Eclipse or IntelliJ

Framework

Angular (etc.)

Spring or Quarkus

+
+ +
+
+
Service Layer
+
+

The service layer is responsible for exposing functionality made available by the logical layer to external consumers over a network via technical protocols.

+
+
+
+
Types of Services
+
+

Before you start creating your services you should consider some general design aspects:

+
+
+
    +
  • +

    Do you want to create a RPC service?

    +
  • +
  • +

    Or is your problem better addressed by messaging or eventing?

    +
  • +
  • +

    Who will consume your service?

    +
    +
      +
    • +

      Do you have one or multiple consumers?

      +
    • +
    • +

      Do web-browsers have to use your service?

      +
    • +
    • +

      Will apps from other vendors or parties have to consume your service that you can not influence if the service may have to change or be extended?

      +
    • +
    +
    +
  • +
+
+
+

For RPC a common choice is REST but there are also interesting alternatives like gRPC. We also have a guide for SOAP but this technology should rather be considered as legacy and is not recommended for new services.

+
+
+

When it comes to messaging in Java the typical answer will be JMS. However, a very promising alternative is Kafka.

+
+
+
+
Versioning
+
+

For RPC services consumed by other applications we use versioning to prevent incompatibilities between applications when deploying updates. This is done by the following conventions:

+
+
+
    +
  • +

    We define a version number and prefix it with v (e.g. v1).

    +
  • +
  • +

    If we support previous versions we use that version numbers as part of the Java package defining the service API (e.g. com.foo.application.component.service.api.v1)

    +
  • +
  • +

    We use the version number as part of the service name in the remote URL (e.g. https://application.foo.com/services/rest/component/v1/resource)

    +
  • +
  • +

    Whenever breaking changes are made to the API, create a separate version of the service and increment the version (e.g. v1v2) . The implementations of the different versions of the service contain compatibility code and delegate to the same unversioned use-case of the logic layer whenever possible.

    +
  • +
  • +

    For maintenance and simplicity, avoid keeping more than one previous version.

    +
  • +
+
+
+
+
Interoperability
+
+

For services that are consumed by clients with different technology, interoperability is required. This is addressed by selecting the right protocol, following protocol-specific best practices and following our considerations especially simplicity.

+
+
+
+
Service Considerations
+
+

The term service is quite generic and therefore easily misunderstood. It is a unit exposing coherent functionality via a well-defined interface over a network. For the design of a service, we consider the following aspects:

+
+
+
    +
  • +

    self-contained
    +The entire API of the service shall be self-contained and have no dependencies on other parts of the application (other services, implementations, etc.).

    +
  • +
  • +

    idempotence
    +E.g. creation of the same master-data entity has no effect (no error)

    +
  • +
  • +

    loosely coupled
    +Service consumers have minimum knowledge and dependencies on the service provider.

    +
  • +
  • +

    normalized
    +Complete, no redundancy, minimal

    +
  • +
  • +

    coarse-grained
    +Service provides rather large operations (save entire entity or set of entities rather than individual attributes)

    +
  • +
  • +

    atomic
    +Process individual entities (for processing large sets of data, use a batch instead of a service)

    +
  • +
  • +

    simplicity
    +Avoid polymorphism, RPC methods with unique name per signature and no overloading, avoid attachments (consider separate download service), etc.

    +
  • +
+
+
+
+
Security
+
+

Your services are the major entry point to your application. Hence, security considerations are important here.

+
+
+

See REST Security.

+
+ +
+
+
Service-Versioning
+
+

This guide describes the aspect and details about versioning of services

+
+
+
+
Motivation
+
+

Why versioning of services? First of all, you should only care about this topic if you really have to. Service versioning is complex and requires effort (time and budget). The best way to avoid this is to be smart in the first place when designing the service API. +Further, if you are creating services where the only consumer is e.g. the web-client that you deploy together with the consumed services then you can change your service without the overhead to create new service versions and keeping old service versions for compatibility.

+
+
+

However, if the following indicators are given you typically need to do service versioning:

+
+
+
    +
  • +

    Your service is part of a complex and distributed IT landscape

    +
  • +
  • +

    Your service requires incompatible changes

    +
  • +
  • +

    There are many consumers or there is at least one (relevant) consumer that can not be updated at the same time or is entirely out of control (unknown or totally different party/company)

    +
  • +
+
+
+

What are incompatible changes?

+
+
+
    +
  • +

    Almost any change when SOAP is used (as it changes the WSDL and breaks the contract). Therefore, we recommend to use REST instead. Then, only the following changes are critical.

    +
  • +
  • +

    A change where existing properties (attributes) have to change their name

    +
  • +
  • +

    A change where existing features (properties, operations, etc.) have to change their semantics (meaning)

    +
  • +
+
+
+

What changes do not cause incompatibilities?

+
+
+
    +
  • +

    Adding new service operations is entirely uncritical with REST.

    +
  • +
  • +

    Adding new properties is only a problem in the following cases:

    +
    +
      +
    • +

      Adding new mandatory properties to the input of a service is causing incompatibilities. This problem can be avoided by contract-design.

      +
    • +
    • +

      If a consumer is using a service to read data, modify it and then save it back via a service and a property is added to the data, then this property might be lost. This is not a problem with dynamic languages such as JavaScript/TypeScript but with strictly typed languages such as Java. In Java you will typically use structured typed transfer-objects (and not Map<String, Object>) so new properties that have been added but are not known to the consumer can not be mapped to the transfer-object and will be lost. When saving that transfer-object later the property will be gone. It might be impossible to determine the difference between a lost property and a property that was removed on purpose. This is a general problem that you need to be aware of and that you have to consider by your design in such situations.

      +
    • +
    +
    +
  • +
+
+
+

Even if you hit an indicator for incompatible changes you can still think about adding a new service operation instead of changing an existing one (and deprecating the old one). Be creative to simplify and avoid extra effort.

+
+
+
+
Procedure
+
+

The procedure when rolling out incompatible changes is illustrated by the following example:

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +---+----+
+        |
++-------+--------+
+|      Sv1       |
+|                |
+|      App3      |
++----------------+
+
+
+
+

So, here we see a simple example where App3 provides a Service S in Version v1 that is consumed both by App1 and App2.

+
+
+

Now for some reason the service S has to be changed in an incompatible way to make it future-proof for demands. However, upgrading all 3 applications at the same time is not possible in this case for whatever reason. Therefore, service versioning is applied for the changes of S.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+    |
++---+------------+
+|  Sv1  |  Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Now, App3 has been upgraded and the new release was deployed. A new version v2 of S has been added while v1 is still kept for compatibility reasons and that version is still used by App1 and App2.

+
+
+
+
+------+  +------+
+| App1 |  | App2*|
++---+--+  +--+---+
+    |        |
+    |        |
+    |        |
++---+--------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, App2 has been updated and deployed and it is using the new version v2 of S.

+
+
+
+
+------+  +------+
+| App1*|  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, also App1 has been updated and deployed and it is using the new version v2 of S. The version v1 of S is not used anymore. This can be verified via logging and monitoring.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|          Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Finally, version v1 of the service S was removed from App3 and the new release has been deployed.

+
+
+
+
Versioning Schema
+
+

In general anything can be used to differentiate versions of a service. Possibilities are:

+
+
+
    +
  • +

    Code names (e.g. Strawberry, Blueberry, Grapefruit)

    +
  • +
  • +

    Timestamps (YYYYMMDD-HHmmSS)

    +
  • +
  • +

    Sequential version numbers (e.g. v1, v2, v3)

    +
  • +
  • +

    Composed version numbers (e.g. 1.0.48-pre-alpha-3-20171231-235959-Strawberry)

    +
  • +
+
+
+

As we are following the KISS principle (see key principles) we propose to use sequential version numbers. These are short, clear, and easy while still allowing to see what version is after another one. Especially composed version numbers (even 1.1 vs. 2.0) lead to decisions and discussions that easily waste more time than adding value. It is still very easy to maintain an Excel sheet or release-notes document that is explaining the changes for each version (v1, v2, v3) of a particular service.

+
+
+

We suggest to always add the version schema to the service URL to be prepared for service versioning even if service versioning is not (yet) actively used. For simplicity it is explicitly stated that you may even do incompatible changes to the current version (typically v1) of your service if you can update the according consumers within the same deployment.

+
+
+
+
Practice
+
+

So assuming you know that you have to do service versioning, the question is how to do it practically in the code. +The approach for your devon4j project in case of code-first should be as described below:

+
+
+
    +
  • +

    Determine which types in the code need to be changed. It is likely to be the API and implementation of the according service but it may also impact transfer objects and potentially even datatypes.

    +
  • +
  • +

    Create new packages for all these concerned types containing the current version number (e.g. v1).

    +
  • +
  • +

    Copy all these types to that new packages.

    +
  • +
  • +

    Rename these copies so they carry the version number as suffix (e.g. V1).

    +
  • +
  • +

    Increase the version of the service in the unversioned package (e.g. from v1 to v2).

    +
  • +
  • +

    Now you have two versions of the same service (e.g. v1 and v2) but so far they behave exactly the same.

    +
  • +
  • +

    You start with your actual changes and modify the original files that have been copied before.

    +
  • +
  • +

    You will also ensure the links (import statements) of the copied types point to the copies with the version number

    +
  • +
  • +

    This will cause incompatibilities (and compile errors) in the copied service. Therefore, you need to fix that service implementation to map from the old API to the new API and behavior. In some cases, this may be easy (e.g. mapping x.y.z.v1.FooTo to x.y.z.FooTo using bean-mapping with some custom mapping for the incompatible changes), in other cases this can get very complex. Be aware of this complexity from the start before you make your decision about service versioning.

    +
  • +
  • +

    As far as possible this mapping should be done in the service-layer, not to pollute your business code in the core-layer with versioning-aspects. If there is no way to handle it in the service layer, e.g. you need some data from the persistence-layer, implement the "mapping" in the core-layer then, but don’t forget to remove this code, when removing the old service version.

    +
  • +
  • +

    Finally, ensure that both the old service behaves as before as well as the new service works as planned.

    +
  • +
+
+
+
Modularization
+
+

For modularization, we also follow the KISS principle (see key principles): +we suggest to have one api module per application that will contain the most recent version of your service and get released with every release-version of the application. The compatibility code with the versioned packages will be added to the core module and therefore is not exposed via the api module (because it has already been exposed in the previous release of the app). This way, you can always determine for sure which version of a service is used by another application just by its maven dependencies.

+
+
+

The KISS approach with only a single module that may contain multiple services (e.g. one for each business component) will cause problems when you want to have mixed usages of service versions: You can not use an old version of one service and a new version of another service from the same APP as then you would need to have its API module twice as a dependency on different versions, which is not possible. However, to avoid complicated overhead we always suggest to follow this easy approach. Only if you come to the point that you really need this complexity you can still solve it (even afterwards by publishing another maven artefact). As we are all on our way to build more but smaller applications (SOA, microservices, etc.) we should always start simple and only add complexity when really needed.

+
+
+

The following example gives an idea of the structure:

+
+
+
+
/«my-app»
+├──/api
+|  └──/src/main/java/
+|     └──/«rootpackage»/«application»/«component»
+|        ├──/common/api/to
+|        |  └──FooTo
+|        └──/service/api/rest
+|           └──FooRestService
+└──/core
+   └──/src/main/java/
+      └──«rootpackage»/«application»/«component»
+         ├──/common/api/to/v1
+         |  └──FooToV1
+         └──/service
+            ├──/api/rest/v1
+            |  └──FooRestServiceV1
+            └──impl/rest
+               ├──/v1
+               |  └── FooRestServiceImplV1
+               └──FooRestServiceImpl
+
+
+
+ +
+
+
+
Logic Layer
+
+

The logic layer is the heart of the application and contains the main business logic. +According to our business architecture, we divide an application into components. +For each component, the logic layer defines different use-cases. Another approach is to define a component-facade, which we do not recommend for future application. Especially for quarkus application, we want to simplify things and highly suggest omitting component-facade completely and using use-cases only. +It is very important that you follow the links to understand the concept of use-case in order to properly implement your business logic.

+
+
+
+
Responsibility
+
+

The logic layer is responsible to implement the business logic according to the specified functional demands and requirements. +Therefore, it creates the actual value of the application. The logic layer is responsible for invoking business logic in external systems. +The following additional aspects are also included in its responsibility:

+
+
+ +
+
+
+
Security
+
+

The logic layer is the heart of the application. It is also responsible for authorization and hence security is important in this current case. Every method exposed in an interface needs to be annotated with an authorization check, stating what role(s) a caller must provide in order to be allowed to make the call. The authorization concept is described here.

+
+
+
Direct Object References
+
+

A security threat are Insecure Direct Object References. This simply gives you two options:

+
+
+
    +
  • +

    avoid direct object references

    +
  • +
  • +

    ensure that direct object references are secure

    +
  • +
+
+
+

Especially when using REST, direct object references via technical IDs are common sense. This implies that you have a proper authorization in place. This is especially tricky when your authorization does not only rely on the type of the data and according to static permissions but also on the data itself. Vulnerabilities for this threat can easily happen by design flaws and inadvertence. Here is an example from our sample application:

+
+
+

We have a generic use-case to manage BLOBs. In the first place, it makes sense to write a generic REST service to load and save these BLOBs. However, the permission to read or even update such BLOB depends on the business object hosting the BLOB. Therefore, such a generic REST service would open the door for this OWASP A4 vulnerability. To solve this in a secure way, you need individual services for each hosting business object to manage the linked BLOB and have to check permissions based on the parent business object. In this example the ID of the BLOB would be the direct object reference and the ID of the business object (and a BLOB property indicator) would be the indirect object reference.

+
+ +
+
+
Component Facade
+
+ + + + + +
+ + +Our recommended approach for implementing the logic layer is use-cases +
+
+
+

For each component of the application, the logic layer defines a component facade. +This is an interface defining all business operations of the component. +It carries the name of the component («Component») and has an implementation named «Component»Impl (see implementation).

+
+
+
+
API
+
+

The component facade interface defines the logic API of the component and has to be business oriented. +This means that all parameters and return types of all methods from this API have to be business transfer-objects, datatypes (String, Integer, MyCustomerNumber, etc.), or collections of these. +The API may also only access objects of other business components listed in the (transitive) dependencies of the business-architecture.

+
+
+

Here is an example how such an API may look like:

+
+
+
+
public interface Bookingmanagement {
+
+  BookingEto findBooking(Long id);
+
+  BookingCto findBookingCto(Long id);
+
+  Page<BookingEto> findBookingEtos(BookingSearchCriteriaTo criteria);
+
+  void approveBooking(BookingEto booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation of an interface from the logic layer (a component facade or a use-case) carries the name of that interface with the suffix Impl and is annotated with @Named. +An implementation typically needs access to the persistent data. +This is done by injecting the corresponding repository (or DAO). +According to data-sovereignty, only repositories of the same business component may be accessed directly. +For accessing data from other components the implementation has to use the corresponding API of the logic layer (the component facade). Further, it shall not expose persistent entities from the domain layer and has to map them to transfer objects using the bean-mapper.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public BookingEto findBooking(Long id) {
+
+    LOG.debug("Get Booking with id {} from database.", id);
+    BookingEntity entity = this.bookingRepository.findOne(id);
+    return getBeanMapper().map(entity, BookingEto.class));
+  }
+}
+
+
+
+

As you can see, entities (BookingEntity) are mapped to corresponding ETOs (BookingEto). +Further details about this can be found in bean-mapping.

+
+ +
+
+
UseCase
+
+

A use-case is a small unit of the logic layer responsible for an operation on a particular entity (business object). +We leave it up to you to decide whether you want to define an interface (API) for each use-case or provide an implementation directly.

+
+
+

Following our architecture-mapping (for classic and modern project), use-cases are named Uc«Operation»«BusinessObject»[Impl]. The prefix Uc stands for use-case and allows to easily find and identify them in your IDE. The «Operation» stands for a verb that is operated on the entity identified by «BusinessObject». +For CRUD we use the standard operations Find and Manage that can be generated by CobiGen. This also separates read and write operations (e.g. if you want to do CQSR, or to configure read-only transactions for read operations).

+
+
+

In our example, we choose to define an interface for each use-case. We also use *To to refer to any type of transfer object. Please follow our guide to understand more about different types of transfer object e.g. Eto, Dto, Cto

+
+
+
+
Find
+
+

The UcFind«BusinessObject» defines all read operations to retrieve and search the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcFindBooking {
+  //*To = Eto, Dto or Cto
+  Booking*To findBooking(Long id);
+}
+
+
+
+
+
Manage
+
+

The UcManage«BusinessObject» defines all CRUD write operations (create, update and delete) for the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcManageBooking {
+
+  //*To = Eto, Dto or Cto
+  Booking*To saveBooking(Booking*To booking);
+
+  void deleteBooking(Long id);
+
+}
+
+
+
+
+
Custom
+
+

Any other non CRUD operation Uc«Operation»«BusinessObject» uses any other custom verb for «Operation». +Typically, such custom use-cases only define a single method. +Here is an example:

+
+
+
+
public interface UcApproveBooking {
+
+  //*To = Eto, Dto or Cto
+  void approveBooking(Booking*To booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation should carry its own name and the suffix Impl and is annotated with @Named and @ApplicationScoped. It will need access to the persistent data which is done by injecting the corresponding repository (or DAO). Furthermore, it shall not expose persistent entities from the data access layer and has to map them to transfer objects using the bean-mapper. Please refer to our bean mapping, transfer object and dependency injection documentation for more information. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcManageBookingImpl implements UcManageBooking {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public void deleteBooking(Long id) {
+
+    LOG.debug("Delete Booking with id {} from database.", id);
+    this.bookingRepository.deleteById(id);
+  }
+}
+
+
+
+

The use-cases can then be injected directly into the service.

+
+
+
+
@Named("BookingmanagementRestService")
+@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private UcFindBooking ucFindBooking;
+
+  @Inject
+  private UcManageBooking ucManageBooking;
+
+  @Inject
+  private UcApproveBooking ucApproveBooking;
+}
+
+
+
+
+
Internal use case
+
+

Sometimes, a component with multiple related entities and many use-cases needs to reuse business logic internally. +Of course, this can be exposed as an official use-case API but this will imply using transfer-objects (ETOs) instead of entities. In some cases, this is undesired e.g. for better performance to prevent unnecessary mapping of entire collections of entities. +In the first place, you should try to use abstract base implementations providing reusable methods the actual use-case implementations can inherit from. +If your business logic is even more complex and you have multiple aspects of business logic to share and reuse but also run into multi-inheritance issues, you may also just create use-cases that have their interface located in the impl scope package right next to the implementation (or you may just skip the interface). In such a case, you may define methods that directly take or return entity objects. +To avoid confusion with regular use-cases, we recommend to add the Internal suffix to the type name leading to Uc«Operation»«BusinessObject»Internal[Impl].

+
+
+ +
+
+
+
Data-Access Layer
+
+

The data-access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store. External system could also be accessed from the data-access layer if they match this definition, e.g. a mongo-db via rest services.

+
+
+

Note: In the modern project structure, this layer is replaced by the domain layer.

+
+
+
+
Database
+
+

You need to make your choice for a database. Options are documented here.

+
+
+

The classical approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+ +
+
+
Batch Layer
+
+

We understand batch processing as a bulk-oriented, non-interactive, typically long running execution of tasks. For simplicity, we use the term "batch" or "batch job" for such tasks in the following documentation.

+
+
+

devonfw uses Spring Batch as a batch framework.

+
+
+

This guide explains how Spring Batch is used in devonfw applications. It focuses on aspects which are special to devonfw. If you want to learn about spring-batch you should adhere to springs references documentation.

+
+
+

There is an example of a simple batch implementation in the my-thai-star batch module.

+
+
+

In this chapter, we will describe the overall architecture (especially concerning layering) and how to administer batches.

+
+
+
+
Layering
+
+

Batches are implemented in the batch layer. The batch layer is responsible for batch processes, whereas the business logic is implemented in the logic layer. Compared to the service layer, you may understand the batch layer just as a different way of accessing the business logic. +From a component point of view, each batch is implemented as a subcomponent in the corresponding business component. +The business component is defined by the business architecture.

+
+
+

Let’s make an example for that. The sample application implements a batch for exporting ingredients. This ingredientExportJob belongs to the dishmanagement business component. +So the ingredientExportJob is implemented in the following package:

+
+
+
+
<basepackage>.dishmanagement.batch.impl.*
+
+
+
+

Batches should invoke use cases in the logic layer for doing their work. +Only "batch specific" technical aspects should be implemented in the batch layer.

+
+
+
+
+

Example: +For a batch, which imports product data from a CSV file, this means that all code for actually reading and parsing the CSV input file is implemented in the batch layer. +The batch calls the use case "create product" in the logic layer for actually creating the products for each line read from the CSV input file.

+
+
+
+
+
Directly accessing data access layer
+
+

In practice, it is not always appropriate to create use cases for every bit of work a batch should do. Instead, the data access layer can be used directly. +An example for that is a typical batch for data retention which deletes out-of-time data. +Often deleting, out-dated data is done by invoking a single SQL statement. It is appropriate to implement that SQL in a Repository or DAO method and call this method directly from the batch. +But be careful: this pattern is a simplification which could lead to business logic cluttered in different layers, which reduces the maintainability of your application. +It is a typical design decision you have to make when designing your specific batches.

+
+
+
+
+
Project structure and packaging
+
+

Batches will be implemented in a separate Maven module to keep the application core free of batch dependencies. The batch module includes a dependency on the application core-module to allow the reuse of the use cases, DAOs etc. +Additionally the batch module has dependencies on the required spring batch jars:

+
+
+
+
  <dependencies>
+
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>mtsj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-batch</artifactId>
+    </dependency>
+
+  </dependencies>
+
+
+
+

To allow an easy start of the batches from the command line it is advised to create a bootified jar for the batch module by adding the following to the pom.xml of the batch module:

+
+
+
+
  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>config/application.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Create bootified jar for batch execution via command line.
+           Your applications spring boot app is used as main-class.
+       -->
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        <configuration>
+          <mainClass>com.devonfw.application.mtsj.SpringBootApp</mainClass>
+          <classifier>bootified</classifier>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>repackage</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+
+
+
Implementation
+
+

Most of the details about implementation of batches is described in the spring batch documentation. +There is nothing special about implementing batches in devonfw. You will find an easy example in my-thai-star.

+
+
+
+
Starting from command line
+
+

Devonfw advises to start batches via command line. This is most common to many ops teams and allows easy integration in existing schedulers. In general batches are started with the following command:

+
+
+
+
java -jar <app>-batch-<version>-bootified.jar --spring.main.web-application-type=none --spring.batch.job.enabled=true --spring.batch.job.names=<myJob> <params>
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + +
ParameterExplanation

--spring.main.web-application-type=none

This disables the web app (e.g. Tomcat)

--spring.batch.job.names=<myJob>

This specifies the name of the job to run. If you leave this out ALL jobs will be executed. Which probably does not make to much sense.

<params>

(Optional) additional parameters which are passed to your job

+
+

This will launch your normal spring boot app, disables the web application part and runs the designated job via Spring Boots org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.

+
+
+
+
Scheduling
+
+

In real world scheduling of batches is not as simple as it first might look like.

+
+
+
    +
  • +

    Multiple batches have to be executed in order to achieve complex tasks. If one of those batches fails the further execution has to be stopped and operations should be notified for example.

    +
  • +
  • +

    Input files or those created by batches have to be copied from one node to another.

    +
  • +
  • +

    Scheduling batch executing could get complex easily (quarterly jobs, run job on first workday of a month, …​)

    +
  • +
+
+
+

For devonfw we propose the batches themselves should not mess around with details of scheduling. +Likewise your application should not do so. This complexity should be externalized to a dedicated batch administration service or scheduler. +This service could be a complex product or a simple tool like cron. We propose Rundeck as an open source job scheduler.

+
+
+

This gives full control to operations to choose the solution which fits best into existing administration procedures.

+
+
+
+
Handling restarts
+
+

If you start a job with the same parameters set after a failed run (BatchStatus.FAILED) a restart will occur. +In many cases your batch should then not reprocess all items it processed in the previous runs. +For that you need some logic to start at the desired offset. There different ways to implement such logic:

+
+
+
    +
  • +

    Marking processed items in the database in a dedicated column

    +
  • +
  • +

    Write all IDs of items to process in a separate table as an initialization step of your batch. You can then delete IDs of already processed items from that table during the batch execution.

    +
  • +
  • +

    Storing restart information in springs ExecutionContext (see below)

    +
  • +
+
+
+
Using spring batch ExecutionContext for restarts
+
+

By implementing the ItemStream interface in your ItemReader or ItemWriter you may store information about the batch progress in the ExecutionContext. You will find an example for that in the CountJob in My Thai Star.

+
+
+

Additional hint: It is important that bean definition method of your ItemReader/ItemWriter return types implementing ItemStream(and not just ItemReader or ItemWriter alone). For that the ItemStreamReader and ItemStreamWriter interfaces are provided.

+
+
+
+
+
Exit codes
+
+

Your batches should create a meaningful exit code to allow reaction to batch errors e.g. in a scheduler. +For that spring batch automatically registers an org.springframework.boot.autoconfigure.batch.JobExecutionExitCodeGenerator. To make this mechanism work your spring boot app main class as to populate this exit code to the JVM:

+
+
+
+
@SpringBootApplication
+public class SpringBootApp {
+
+  public static void main(String[] args) {
+    if (Arrays.stream(args).anyMatch((String e) -> e.contains("--spring.batch.job.names"))) {
+      // if executing batch job, explicitly exit jvm to report error code from batch
+      System.exit(SpringApplication.exit(SpringApplication.run(SpringBootApp.class, args)));
+    } else {
+      // normal web application start
+      SpringApplication.run(SpringBootApp.class, args);
+    }
+  }
+}
+
+
+
+
+
Stop batches and manage batch status
+
+

Spring batch uses several database tables to store the status of batch executions. +Each execution may have different status. +You may use this mechanism to gracefully stop batches. +Additionally in some edge cases (batch process crashed) the execution status may be in an undesired state. +E.g. the state will be running, despite the process crashed sometime ago. +For that cases you have to change the status of the execution in the database.

+
+
+
CLI-Tool
+
+

Devonfw provides a easy to use cli-tool to manage the executing status of your jobs. +The tool is implemented in the devonfw module devon4j-batch-tool. It will provide a runnable jar, which may be used as follows:

+
+
+
+
List names of all previous executed jobs
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs list

+
+
Stop job named 'countJob'
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs stop countJob

+
+
Show help
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar

+
+
+
+
+

As you can the each invocation includes the JDBC connection string to your database. +This means that you have to make sure that the corresponding DB driver is in the classpath (the prepared jar only contains H2).

+
+
+
+
+
Authentication
+
+

Most business application incorporate authentication and authorization. +Your spring boot application will implement some kind of security, e.g. integrated login with username+password or in many cases authentication via an existing IAM. +For security reasons your batch should also implement an authentication mechanism and obey the authorization implemented in your application (e.g. via @RolesAllowed).

+
+
+

Since there are many different authentication mechanism we cannot provide an out-of-the-box solution in devonfw, but we describe a pattern how this can be implemented in devonfw batches.

+
+
+

We suggest to implement the authentication in a Spring Batch tasklet, which runs as the first step in your batch. This tasklet will do all of the work which is required to authenticate the batch. A simple example which authenticates the batch "locally" via username and password could be implemented like this:

+
+
+
+
@Named
+public class SimpleAuthenticationTasklet implements Tasklet {
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+    String username = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("username");
+    String password = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("password");
+    Authentication authentication = new UsernamePasswordAuthenticationToken(username, password);
+
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+    return RepeatStatus.FINISHED;
+  }
+
+}
+
+
+
+

The username and password have to be supplied via two cli parameters -username and -password. This implementation creates an "authenticated" Authentication and sets in the Spring Security context. This is just for demonstration normally you should not provide passwords via command line. The actual authentication will be done automatically via Spring Security as in your "normal" application. +If you have a more complex authentication mechanism in your application e.g. via OpenID connect just call this in the tasklet. Naturally you may read authentication parameters (e.g. secrets) from the command line or more securely from a configuration file.

+
+
+

In your Job Configuration set this tasklet as the first step:

+
+
+
+
@Configuration
+@EnableBatchProcessing
+public class BookingsExportBatchConfig {
+  @Inject
+  private JobBuilderFactory jobBuilderFactory;
+
+  @Inject
+  private StepBuilderFactory stepBuilderFactory;
+
+  @Bean
+  public Job myBatchJob() {
+    return this.jobBuilderFactory.get("myJob").start(myAuthenticationStep()).next(...).build();
+  }
+
+  @Bean
+  public Step myAuthenticationStep() {
+    return this.stepBuilderFactory.get("myAuthenticationStep").tasklet(myAuthenticatonTasklet()).build();
+  }
+
+  @Bean
+  public Tasklet myAuthenticatonTasklet() {
+    return new SimpleAuthenticationTasklet();
+  }
+...
+
+
+
+
+
Tipps & tricks
+
+
Identifying job parameters
+
+

Spring uses a jobs parameters to identify job executions. Parameters starting with "-" are not considered for identifying a job execution.

+
+
+
+
+
+
+

1.104. Guides

+ +
+
Configuration
+ +
+
+
Internal Application Configuration
+
+

There usually is a main configuration registered with main Spring Boot App, but differing configurations to support automated test of the application can be defined using profiles (not detailed in this guide).

+
+
+
Spring Boot Application
+
+

For a complete documentation, see the Spring Boot Reference Guide.

+
+
+

With spring-boot you provide a simple main class (also called starter class) like this: +com.devonfw.mtsj.application

+
+
+
+
@SpringBootApplication(exclude = { EndpointAutoConfiguration.class })
+@EntityScan(basePackages = { "com.devonfw.mtsj.application" }, basePackageClasses = { AdvancedRevisionEntity.class })
+@EnableGlobalMethodSecurity(jsr250Enabled = true)
+@ComponentScan(basePackages = { "com.devonfw.mtsj.application.general", "com.devonfw.mtsj.application" })
+public class SpringBootApp {
+
+  /**
+   * Entry point for spring-boot based app
+   *
+   * @param args - arguments
+   */
+  public static void main(String[] args) {
+
+    SpringApplication.run(SpringBootApp.class, args);
+  }
+}
+
+
+
+

In an devonfw application this main class is always located in the <basepackage> of the application package namespace (see package-conventions). This is because a spring boot application will automatically do a classpath scan for components (spring-beans) and entities in the package where the application main class is located including all sub-packages. You can use the @ComponentScan and @EntityScan annotations to customize this behaviour.

+
+
+

If you want to map spring configuration properties into your custom code please see configuration mapping.

+
+
+
+
Standard beans configuration
+
+

For basic bean configuration we rely on spring boot using mainly configuration classes and only occasionally XML configuration files. Some key principle to understand Spring Boot auto-configuration features:

+
+
+
    +
  • +

    Spring Boot auto-configuration attempts to automatically configure your Spring application based on the jar dependencies and annotated components found in your source code.

    +
  • +
  • +

    Auto-configuration is non-invasive, at any point you can start to define your own configuration to replace specific parts of the auto-configuration by redefining your identically named bean (see also exclude attribute of @SpringBootApplication in example code above).

    +
  • +
+
+
+

Beans are configured via annotations in your java code (see dependency-injection).

+
+
+

For technical configuration you will typically write additional spring config classes annotated with @Configuration that provide bean implementations via methods annotated with @Bean. See spring @Bean documentation for further details. Like in XML you can also use @Import to make a @Configuration class include other configurations.

+
+
+

More specific configuration files (as required) reside in an adequately named subfolder of:

+
+
+

src/main/resources/app

+
+
+
+
BeanMapper Configuration
+
+

In case you are still using dozer, you will find further details in bean-mapper configuration.

+
+
+
+
Security configuration
+
+

The abstract base class BaseWebSecurityConfig should be extended to configure web application security thoroughly. +A basic and secure configuration is provided which can be overridden or extended by subclasses. +Subclasses must use the @Profile annotation to further discriminate between beans used in production and testing scenarios. See the following example:

+
+
+
Listing 14. How to extend BaseWebSecurityConfig for Production and Test
+
+
@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.JUNIT)
+public class TestWebSecurityConfig extends BaseWebSecurityConfig {...}
+
+@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.NOT_JUNIT)
+public class WebSecurityConfig extends BaseWebSecurityConfig {...}
+
+
+ +
+
+
WebSocket configuration
+
+

A websocket endpoint is configured within the business package as a Spring configuration class. The annotation @EnableWebSocketMessageBroker makes Spring Boot registering this endpoint.

+
+
+
+
package your.path.to.the.websocket.config;
+...
+@Configuration
+@EnableWebSocketMessageBroker
+public class WebSocketConfig extends AbstractWebSocketMessageBrokerConfigurer {
+...
+
+
+
+
+
+
External Application Configuration
+
+
application.properties files
+
+

Here is a list of common properties provided by the Spring framework.

+
+
+

For a general understanding how spring-boot is loading and boostrapping your application.properties see spring-boot external configuration.

+
+
+

The following properties files are used in devonfw application:

+
+
+
    +
  • +

    src/main/resources/application.properties providing a default configuration - bundled and deployed with the application package. It further acts as a template to derive a tailored minimal environment-specific configuration.

    +
  • +
  • +

    src/main/resources/config/application.properties providing additional properties only used at development time (for all local deployment scenarios). This property file is excluded from all packaging.

    +
  • +
  • +

    src/test/resources/config/application.properties providing additional properties only used for testing (JUnits based on spring test).

    +
  • +
+
+
+

For other environments where the software gets deployed such as test, acceptance and production you need to provide a tailored copy of application.properties. The location depends on the deployment strategy:

+
+
+
    +
  • +

    standalone run-able Spring Boot App using embedded tomcat: config/application.properties under the installation directory of the spring boot application.

    +
  • +
  • +

    dedicated tomcat (one tomcat per app): $CATALINA_BASE/lib/config/application.properties

    +
  • +
  • +

    tomcat serving a number of apps (requires expanding the wars): $CATALINA_BASE/webapps/<app>/WEB-INF/classes/config

    +
  • +
+
+
+

In this application.properties you only define the minimum properties that are environment specific and inherit everything else from the bundled src/main/resources/application.properties. In any case, make very sure that the classloader will find the file.

+
+
+
+
Database Configuration
+
+

The configuration for spring and Hibernate is already provided by devonfw in our sample application and the application template. So you only need to worry about a few things to customize.

+
+
+Database System and Access +
+

Obviously you need to configure which type of database you want to use as well as the location and credentials to access it. The defaults are configured in application.properties that is bundled and deployed with the release of the software. The files should therefore contain the properties as in the given example:

+
+
+
+
  database.url=jdbc:postgresql://database.enterprise.com/app
+  database.user.login=appuser01
+  database.user.password=************
+  database.hibernate.dialect = org.hibernate.dialect.PostgreSQLDialect
+  database.hibernate.hbm2ddl.auto=validate
+
+
+
+

For further details about database.hibernate.hbm2ddl.auto please see here. For production and acceptance environments we use the value validate that should be set as default. In case you want to use Oracle RDBMS you can find additional hints here.

+
+
+

If your application supports multiples database types, set spring.profiles.active=XXX in src/main/resources/config/application.properties choose database of your choice. Also, one has to set all the active spring profiles in this application.properties and not in any of the other application.properties.

+
+
+
+Database Logging +
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
spring.jpa.properties.hibernate.show_sql=true
+spring.jpa.properties.hibernate.use_sql_comments=true
+spring.jpa.properties.hibernate.format_sql=true
+
+
+
+
+
+
+
Security
+
+
Password Encryption
+
+

In order to support encrypted passwords in spring-boot application.properties all you need to do is to add jasypt-spring-boot as dependency in your pom.xml (please check for recent version here):

+
+
+
+
<dependency>
+  <groupId>com.github.ulisesbocchio</groupId>
+  <artifactId>jasypt-spring-boot-starter</artifactId>
+  <version>3.0.3</version>
+</dependency>
+
+
+
+

This will smoothly integrate jasypt into your spring-boot application. Read this HOWTO to learn how to encrypt and decrypt passwords using jasypt.

+
+
+

Next, we give a simple example how to encypt and configure a secret value. +We use the algorithm PBEWITHHMACSHA512ANDAES_256 that provides strong encryption and is the default of jasypt-spring-boot-starter. +However, different algorithms can be used if perferred (e.g. PBEWITHMD5ANDTRIPLEDES).

+
+
+
+
java -cp ${M2_REPO}/org/jasypt/jasypt/1.9.3/jasypt-1.9.3.jar org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI password=masterpassword algorithm=PBEWITHHMACSHA512ANDAES_256 input=secret ivGeneratorClassName=org.jasypt.iv.RandomIvGenerator
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.5+10
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: masterpassword
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC
+
+
+
+

Of course the master-password (masterpassword) and the actual password to encrypt (secret) are just examples. +Please replace them with reasonable strong passwords for your environment. +Further, if you are using devonfw-ide you can make your life much easier and just type:

+
+
+
+
devon jasypt encrypt
+
+
+
+

See jasypt commandlet for details.

+
+
+

Now the entire line after the OUTPUT block is your encrypted secret. +It even contains some random salt so that multiple encryption invocations with the same parameters (ARGUMENTS) will produce a different OUTPUT.

+
+
+

The master-password can be configured on your target environment via the property jasypt.encryptor.password. As system properties given on the command-line are visible in the process list, we recommend to use an config/application.yml file only for this purpose (as we recommended to use application.properties for regular configs):

+
+
+
+
jasypt:
+    encryptor:
+        password: masterpassword
+
+
+
+

Again masterpassword is just an example that your replace with your actual master password. +Now you are able to put encrypted passwords into your application.properties and specify the algorithm.

+
+
+
+
spring.datasource.password=ENC(PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC)
+jasypt.encryptor.algorithm=PBEWITHHMACSHA512ANDAES_256
+
+
+
+

This application.properties file can be version controlled (git-opts) and without knowing the masterpassword nobody is able to decrypt this to get the actual secret back.

+
+
+

To prevent jasypt to throw an exception in dev or test scenarios you can simply put this in your local config (src/main/config/application.properties and same for test, see above for details):

+
+
+
+
jasypt.encryptor.password=none
+
+
+ +
+
+
Mapping configuration to your code
+
+

If you are using spring-boot as suggested by devon4j your application can be configured by application.properties file as described in configuration. +To get a single configuration option into your code for flexibility, you can use

+
+
+
+
@Value("${my.property.name}")
+private String myConfigurableField;
+
+
+
+

Now, in your application.properties you can add the property:

+
+
+
+
my.property.name=my-property-value
+
+
+
+

You may even use @Value("${my.property.name:my-default-value}") to make the property optional.

+
+
+
+
Naming conventions for configuration properties
+
+

As a best practice your configruation properties should follow these naming conventions:

+
+
+
    +
  • +

    build the property-name as a path of segments separated by the dot character (.)

    +
  • +
  • +

    segments should get more specific from left to right

    +
  • +
  • +

    a property-name should either be a leaf value or a tree node (prefix of other property-names) but never both! So never have something like foo.bar=value and foo.bar.child=value2.

    +
  • +
  • +

    start with a segment namespace unique to your context or application

    +
  • +
  • +

    a good example would be «myapp».billing.service.email.sender for the sender address of billing service emails send by «myapp».

    +
  • +
+
+
+
+
Mapping advanced configuration
+
+

However, in many scenarios you will have features that require more than just one property. +Injecting those via @Value is not leading to good code quality. +Instead we create a class with the suffix ConfigProperties containing all configuration properties for our aspect that is annotated with @ConfigurationProperties:

+
+
+
+
@ConfigurationProperties(prefix = "myapp.billing.service")
+public class BillingServiceConfigProperties {
+
+  private final Email email = new Email();
+  private final Smtp smtp = new Smtp();
+
+  public Email getEmail() { return this.email; }
+  public Email getSmtp() { return this.smtp; }
+
+  public static class Email {
+
+    private String sender;
+    private String subject;
+
+    public String getSender() { return this.sender; }
+    public void setSender(String sender) { this.sender = sender; }
+    public String getSubject() { return this.subject; }
+    public void setSubject(String subject) { this.subject = subject; }
+  }
+
+  public static class Smtp {
+
+    private String host;
+    private int port = 25;
+
+    public String getHost() { return this.host; }
+    public void setHost(String host) { this.host = host; }
+    public int getPort() { return this.port; }
+    public void setPort(int port) { this.port = port; }
+  }
+
+}
+
+
+
+

Of course this is just an example to demonstrate this feature of spring-boot. +In order to send emails you would typically use the existing spring-email feature. +But as you can see this allows us to define and access our configuration in a very structured and comfortable way. +The annotation @ConfigurationProperties(prefix = "myapp.billing.service") will automatically map spring configuration properties starting with myapp.billing.service via the according getters and setters into our BillingServiceConfigProperties. +We can easily define defaults (e.g. 25 as default value for myapp.billing.service.smtp.port). +Also Email or Smtp could be top-level classes to be reused in multiple configurations. +Of course you would also add helpful JavaDoc comments to the getters and classes to document your configuration options. +Further to access this configuration, we can use standard dependency-injection:

+
+
+
+
@Inject
+private BillingServiceConfigProperties config;
+
+
+
+

For very generic cases you may also use Map<String, String> to map any kind of property in an untyped way. +An example for generic configuration from devon4j can be found in +ServiceConfigProperties.

+
+
+

For further details about this feature also consult Guide to @ConfigurationProperties in Spring Boot.

+
+
+
+
Generate configuration metadata
+
+

You should further add this dependency to your module containing the *ConfigProperties:

+
+
+
+
    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+
+
+
+

This will generate configuration metadata so projects using your code can benefit from autocompletion and getting your JavaDoc as tooltip when editing application.properites what makes this approach very powerful. +For further details about this please read A Guide to Spring Boot Configuration Metadata.

+
+
+ +
+
+
+
Auditing
+
+

For database auditing we use hibernate envers. If you want to use auditing ensure you have the following dependency in your pom.xml:

+
+
+
Listing 15. spring
+
+
<dependency>
+  <groupId>com.devonfw.java.modules</groupId>
+  <artifactId>devon4j-jpa-envers</artifactId>
+</dependency>
+
+
+
+
Listing 16. quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-envers</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +The following part applies only to spring applications. At this point, the Quarkus extension does not provide any additional configurations. For Quarkus applications, simply use the @Audited annotation to enable auditing for an entity class, as described a few lines below or seen here. +
+
+
+

Make sure that entity manager also scans the package from the devon4j-jpa[-envers] module in order to work properly. And make sure that correct Repository Factory Bean Class is chosen.

+
+
+
+
@EntityScan(basePackages = { "«my.base.package»" }, basePackageClasses = { AdvancedRevisionEntity.class })
+...
+@EnableJpaRepositories(repositoryFactoryBeanClass = GenericRevisionedRepositoryFactoryBean.class)
+...
+public class SpringBootApp {
+  ...
+}
+
+
+
+

Now let your [Entity]Repository extend from DefaultRevisionedRepository instead of DefaultRepository.

+
+
+

The repository now has a method getRevisionHistoryMetadata(id) and getRevisionHistoryMetadata(id, boolean lazy) available to get a list of revisions for a given entity and a method find(id, revision) to load a specific revision of an entity with the given ID or getLastRevisionHistoryMetadata(id) to load last revision. +To enable auditing for a entity simply place the @Audited annotation to your entity and all entity classes it extends from.

+
+
+
+
@Entity(name = "Drink")
+@Audited
+public class DrinkEntity extends ProductEntity implements Drink {
+...
+
+
+
+

When auditing is enabled for an entity an additional database table is used to store all changes to the entity table and a corresponding revision number. This table is called <ENTITY_NAME>_AUD per default. Another table called REVINFO is used to store all revisions. Make sure that these tables are available. They can be generated by hibernate with the following property (only for development environments).

+
+
+
+
  database.hibernate.hbm2ddl.auto=create
+
+
+
+

Another possibility is to put them in your database migration scripts like so.

+
+
+
+
CREATE CACHED TABLE PUBLIC.REVINFO(
+  id BIGINT NOT NULL generated by default as identity (start with 1),
+  timestamp BIGINT NOT NULL,
+  user VARCHAR(255)
+);
+...
+CREATE CACHED TABLE PUBLIC.<TABLE_NAME>_AUD(
+    <ALL_TABLE_ATTRIBUTES>,
+    revtype TINYINT,
+    rev BIGINT NOT NULL
+);
+
+
+
+ +
+
+
Access-Control
+
+

Access-Control is a central and important aspect of Security. It consists of two major aspects:

+
+
+ +
+
+
+
Authentication
+
+

Definition:

+
+
+
+
+

Authentication is the verification that somebody interacting with the system is the actual subject for whom he claims to be.

+
+
+
+
+

The one authenticated is properly called subject or principal. There are two forms of principals you need to distinguish while designing your authentication: human users and autonomous systems. While e.g. a Kerberos/SPNEGO Single-Sign-On makes sense for human users, it is pointless for authenticating autonomous systems. For simplicity, we use the common term user to refer to any principal even though it may not be a human (e.g. in case of a service call from an external system).

+
+
+

To prove the authenticity, the user provides some secret called credentials. The most simple form of credentials is a password.

+
+
+
Implementations
+
+ + + + + +
+ + +Please never implement your own authentication mechanism or credential store. You have to be aware of implicit demands such as salting and hashing credentials, password life-cycle with recovery, expiry, and renewal including email notification confirmation tokens, central password policies, etc. This is the domain of access managers and identity management systems. In a business context you will typically already find a system for this purpose that you have to integrate (e.g. via LDAP). Otherwise you should consider establishing such a system e.g. using keycloak. +
+
+
+

We recommend using JWT when possible. For KISS, also try to avoid combining multiple authentication mechanisms (form based, basic-auth, SAMLv2, OAuth, etc.) within the same application (for different URLs).

+
+
+

For spring, check the Spring Security

+
+
+

For quarkus, check the Quarkus Authentication

+
+
+
+
+
Authorization
+
+

Definition:

+
+
+
+
+

Authorization is the verification that an authenticated user is allowed to perform the operation he intends to invoke.

+
+
+
+
+
Clarification of terms
+
+

For clarification we also want to give a common understanding of related terms that have no unique definition and consistent usage in the wild.

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 14. Security terms related to authorization
TermMeaning and comment

Permission

A permission is an object that allows a principal to perform an operation in the system. This permission can be granted (give) or revoked (taken away). Sometimes people also use the term right what is actually wrong as a right (such as the right to be free) can not be revoked.

Group

We use the term group in this context for an object that contains permissions. A group may also contain other groups. Then the group represents the set of all recursively contained permissions.

Role

We consider a role as a specific form of group that also contains permissions. A role identifies a specific function of a principal. A user can act in a role.

+

For simple scenarios a principal has a single role associated. In more complex situations a principal can have multiple roles but has only one active role at a time that he can choose out of his assigned roles. For KISS it is sometimes sufficient to avoid this by creating multiple accounts for the few users with multiple roles. Otherwise at least avoid switching roles at run-time in clients as this may cause problems with related states. Simply restart the client with the new role as parameter in case the user wants to switch his role.

Access Control

Any permission, group, role, etc., which declares a control for access management.

+
+
+
Suggestions on the access model
+
+

For the access model we give the following suggestions:

+
+
+
    +
  • +

    Each Access Control (permission, group, role, …​) is uniquely identified by a human readable string.

    +
  • +
  • +

    We create a unique permission for each use-case.

    +
  • +
  • +

    We define groups that combine permissions to typical and useful sets for the users.

    +
  • +
  • +

    We define roles as specific groups as required by our business demands.

    +
  • +
  • +

    We allow to associate users with a list of Access Controls.

    +
  • +
  • +

    For authorization of an implemented use case we determine the required permission. Furthermore, we determine the current user and verify that the required permission is contained in the tree spanned by all his associated Access Controls. If the user does not have the permission we throw a security exception and thus abort the operation and transaction.

    +
  • +
  • +

    We avoid negative permissions, that is a user has no permission by default and only those granted to him explicitly give him additional permission for specific things. Permissions granted can not be reduced by other permissions.

    +
  • +
  • +

    Technically we consider permissions as a secret of the application. Administrators shall not fiddle with individual permissions but grant them via groups. So the access management provides a list of strings identifying the Access Controls of a user. The individual application itself contains these Access Controls in a structured way, whereas each group forms a permission tree.

    +
  • +
+
+
+
+
Naming conventions
+
+

As stated above each Access Control is uniquely identified by a human readable string. This string should follow the naming convention:

+
+
+
+
«app-id».«local-name»
+
+
+
+

For Access Control Permissions the «local-name» again follows the convention:

+
+
+
+
«verb»«object»
+
+
+
+

The segments are defined by the following table:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 15. Segments of Access Control Permission ID
SegmentDescriptionExample

«app-id»

Is a unique technical but human readable string of the application (or microservice). It shall not contain special characters and especially no dot or whitespace. We recommend to use lower-train-case-ascii-syntax. The identity and access management should be organized on enterprise level rather than application level. Therefore permissions of different apps might easily clash (e.g. two apps might both define a group ReadMasterData but some user shall get this group for only one of these two apps). Using the «app-id». prefix is a simple but powerful namespacing concept that allows you to scale and grow. You may also reserve specific «app-id»s for cross-cutting concerns that do not actually reflect a single app e.g to grant access to a geographic region.

shop

«verb»

The action that is to be performed on «object». We use Find for searching and reading data. Save shall be used both for create and update. Only if you really have demands to separate these two you may use Create in addition to Save. Finally, Delete is used for deletions. For non CRUD actions you are free to use additional verbs such as Approve or Reject.

Find

«object»

The affected object or entity. Shall be named according to your data-model

Product

+
+

So as an example shop.FindProduct will reflect the permission to search and retrieve a Product in the shop application. The group shop.ReadMasterData may combine all permissions to read master-data from the shop. However, also a group shop.Admin may exist for the Admin role of the shop application. Here the «local-name» is Admin that does not follow the «verb»«object» schema.

+
+
+
+
devon4j-security
+
+

The module devon4j-security provides ready-to-use code based on spring-security that makes your life a lot easier.

+
+
+
+access-control +
+
Figure 3. devon4j Security Model
+
+
+

The diagram shows the model of devon4j-security that separates two different aspects:

+
+
+
    +
  • +

    The Identity- and Access-Management is provided by according products and typically already available in the enterprise landscape (e.g. an active directory). It provides a hierarchy of primary access control objects (roles and groups) of a user. An administrator can grant and revoke permissions (indirectly) via this way.

    +
  • +
  • +

    The application security defines a hierarchy of secondary access control objects (groups and permissions). This is done by configuration owned by the application (see following section). The "API" is defined by the IDs of the primary access control objects that will be referenced from the Identity- and Access-Management.

    +
  • +
+
+
+
+
Access Control Config
+
+

In your application simply extend AccessControlConfig to configure your access control objects as code and reference it from your use-cases. An example config may look like this:

+
+
+
+
@Named
+public class ApplicationAccessControlConfig extends AccessControlConfig {
+
+  public static final String APP_ID = "MyApp";
+
+  private static final String PREFIX = APP_ID + ".";
+
+  public static final String PERMISSION_FIND_OFFER = PREFIX + "FindOffer";
+
+  public static final String PERMISSION_SAVE_OFFER = PREFIX + "SaveOffer";
+
+  public static final String PERMISSION_DELETE_OFFER = PREFIX + "DeleteOffer";
+
+  public static final String PERMISSION_FIND_PRODUCT = PREFIX + "FindProduct";
+
+  public static final String PERMISSION_SAVE_PRODUCT = PREFIX + "SaveProduct";
+
+  public static final String PERMISSION_DELETE_PRODUCT = PREFIX + "DeleteProduct";
+
+  public static final String GROUP_READ_MASTER_DATA = PREFIX + "ReadMasterData";
+
+  public static final String GROUP_MANAGER = PREFIX + "Manager";
+
+  public static final String GROUP_ADMIN = PREFIX + "Admin";
+
+  public ApplicationAccessControlConfig() {
+
+    super();
+    AccessControlGroup readMasterData = group(GROUP_READ_MASTER_DATA, PERMISSION_FIND_OFFER, PERMISSION_FIND_PRODUCT);
+    AccessControlGroup manager = group(GROUP_MANAGER, readMasterData, PERMISSION_SAVE_OFFER, PERMISSION_SAVE_PRODUCT);
+    AccessControlGroup admin = group(GROUP_ADMIN, manager, PERMISSION_DELETE_OFFER, PERMISSION_DELETE_PRODUCT);
+  }
+}
+
+
+
+
+
Configuration on Java Method level
+
+

In your use-case you can now reference a permission like this:

+
+
+
+
@Named
+public class UcSafeOfferImpl extends ApplicationUc implements UcSafeOffer {
+
+  @Override
+  @RolesAllowed(ApplicationAccessControlConfig.PERMISSION_SAVE_OFFER)
+  public OfferEto save(OfferEto offer) { ... }
+  ...
+}
+
+
+
+
+
JEE Standard
+
+

Role-based Access Control (RBAC) is commonly used for authorization. +JSR 250 defines a number of common annotations to secure your application.

+
+
+
    +
  • +

    javax.annotation.security.PermitAll specifies that no access control is required to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DenyAll specifies that no access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.RolesAllowed specifies that only a list of access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DeclareRoles defines roles for security checking.

    +
  • +
  • +

    javax.annotation.security.RunAs specifies the RunAs role for the given components.

    +
  • +
+
+
+

@PermitAll, @Denyall, and @RolesAllowed annotations can be applied to both class and method. +A method-level annotation will override the behaviour of class-level annotation. Using multiple annotations of those 3 is not valid.

+
+
+
+
// invalid
+@PermitAll
+@DenyAll
+public String foo()
+
+// invalid and compilation fails
+@RolesAllowed("admin")
+@RolesAllowed("user")
+public String bar()
+
+// OK
+@RolesAllowed("admin", "user")
+public String bar()
+
+
+
+

Please note that when specifying multiple arguments to @RolesAllowed those are combined with OR (and not with AND). +So if the user has any of the specified access controls, he will be able to access the method.

+
+
+

As a best practice avoid specifying string literals to @RolesAllowed. +Instead define a class with all access controls as constants and reference them from there. +This class is typically called ApplicationAccessControlConfig in devonfw.

+
+
+

In many complicated cases where @PermitAll @DenyAll @RolesAllowed are insufficient e.g. a method should be accessed by a user in role A and not in role B at the same time, you have to verify the user role directly in the method. You can use SecurityContext class to get further needed information.

+
+
+Spring +
+

Spring Security also supports authorization on method level. To use it, you need to add the spring-security-config dependency. If you use Spring Boot, the dependency spring-boot-starter-security already includes spring-security-config. Then you can configure as follows:

+
+
+
    +
  • +

    prePostEnabled property enables Spring Security pre/post annotations. @PreAuthorize and @PostAuthorize annotations provide expression-based access control. See more here

    +
  • +
  • +

    securedEnabled property determines if the @Secured annotation should be enabled. @Secured can be used similarly as @RollesAllowed.

    +
  • +
  • +

    jsr250Enabled property allows us to use the JSR-250 annotations such as @RolesAllowed.

    +
  • +
+
+
+
+
@Configuration
+@EnableGlobalMethodSecurity(
+  prePostEnabled = true,
+  securedEnabled = true,
+  jsr250Enabled = true)
+public class MethodSecurityConfig
+  extends GlobalMethodSecurityConfiguration {
+}
+
+
+
+

A further read about the whole concept of Spring Security Authorization can be found here.

+
+
+
+Quarkus +
+

Quarkus comes with built-in security to allow for RBAC based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints and CDI beans. Quarkus also provides the io.quarkus.security.Authenticated annotation that will permit any authenticated user to access the resource (equivalent to @RolesAllowed("**")).

+
+
+
+
+
Data-based Permissions
+ +
+
+
Access Control Schema (deprecated)
+
+

The access-control-schema.xml approach is deprecated. The documentation can still be found in access control schema.

+
+
+ +
+
+
Data-permissions
+
+

In some projects there are demands for permissions and authorization that is dependent on the processed data. E.g. a user may only be allowed to read or write data for a specific region. This is adding some additional complexity to your authorization. If you can avoid this it is always best to keep things simple. However, in various cases this is a requirement. Therefore the following sections give you guidance and patterns how to solve this properly.

+
+
+
+
Structuring your data
+
+

For all your business objects (entities) that have to be secured regarding to data permissions we recommend that you create a separate interface that provides access to the relevant data required to decide about the permission. Here is a simple example:

+
+
+
+
public interface SecurityDataPermissionCountry {
+
+  /**
+   * @return the 2-letter ISO code of the country this object is associated with. Users need
+   *         a data-permission for this country in order to read and write this object.
+   */
+  String getCountry();
+}
+
+
+
+

Now related business objects (entities) can implement this interface. Often such data-permissions have to be applied to an entire object-hierarchy. For security reasons we recommend that also all child-objects implement this interface. For performance reasons we recommend that the child-objects redundantly store the data-permission properties (such as country in the example above) and this gets simply propagated from the parent, when a child object is created.

+
+
+
+
Permissions for processing data
+
+

When saving or processing objects with a data-permission, we recommend to provide dedicated methods to verify the permission in an abstract base-class such as AbstractUc and simply call this explicitly from your business code. This makes it easy to understand and debug the code. Here is a simple example:

+
+
+
+
protected void verifyPermission(SecurityDataPermissionCountry entity) throws AccessDeniedException;
+
+
+
+Beware of AOP +
+

For simple but cross-cutting data-permissions you may also use AOP. This leads to programming aspects that reflectively scan method arguments and magically decide what to do. Be aware that this quickly gets tricky:

+
+
+
    +
  • +

    What if multiple of your method arguments have data-permissions (e.g. implement SecurityDataPermission*)?

    +
  • +
  • +

    What if the object to authorize is only provided as reference (e.g. Long or IdRef) and only loaded and processed inside the implementation where the AOP aspect does not apply?

    +
  • +
  • +

    How to express advanced data-permissions in annotations?

    +
  • +
+
+
+

What we have learned is that annotations like @PreAuthorize from spring-security easily lead to the "programming in string literals" anti-pattern. We strongly discourage to use this anti-pattern. In such case writing your own verifyPermission methods that you manually call in the right places of your business-logic is much better to understand, debug and maintain.

+
+
+
+
+
Permissions for reading data
+
+

When it comes to restrictions on the data to read it becomes even more tricky. In the context of a user only entities shall be loaded from the database he is permitted to read. This is simple for loading a single entity (e.g. by its ID) as you can load it and then if not permitted throw an exception to secure your code. But what if the user is performing a search query to find many entities? For performance reasons we should only find data the user is permitted to read and filter all the rest already via the database query. But what if this is not a requirement for a single query but needs to be applied cross-cutting to tons of queries? Therefore we have the following pattern that solves your problem:

+
+
+

For each data-permission attribute (or set of such) we create an abstract base entity:

+
+
+
+
@MappedSuperclass
+@EntityListeners(PermissionCheckListener.class)
+@FilterDef(name = "country", parameters = {@ParamDef(name = "countries", type = "string")})
+@Filter(name = "country", condition = "country in (:countries)")
+public abstract class SecurityDataPermissionCountryEntity extends ApplicationPersistenceEntity
+    implements SecurityDataPermissionCountry {
+
+  private String country;
+
+  @Override
+  public String getCountry() {
+    return this.country;
+  }
+
+  public void setCountry(String country) {
+    this.country = country;
+  }
+}
+
+
+
+

There are some special hibernate annotations @EntityListeners, @FilterDef, and @Filter used here allowing to apply a filter on the country for any (non-native) query performed by hibernate. The entity listener may look like this:

+
+
+
+
public class PermissionCheckListener {
+
+  @PostLoad
+  public void read(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireReadPermission(entity);
+  }
+
+  @PrePersist
+  @PreUpdate
+  public void write(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireWritePermission(entity);
+  }
+}
+
+
+
+

This will ensure that hibernate implicitly will call these checks for every such entity when it is read from or written to the database. Further to avoid reading entities from the database the user is not permitted to (and ending up with exceptions), we create an AOP aspect that automatically activates the above declared hibernate filter:

+
+
+
+
@Named
+public class PermissionCheckerAdvice implements MethodBeforeAdvice {
+
+  @Inject
+  private PermissionChecker permissionChecker;
+
+  @PersistenceContext
+  private EntityManager entityManager;
+
+  @Override
+  public void before(Method method, Object[] args, Object target) {
+
+    Collection<String> permittedCountries = this.permissionChecker.getPermittedCountriesForReading();
+    if (permittedCountries != null) { // null is returned for admins that may access all countries
+      if (permittedCountries.isEmpty()) {
+        throw new AccessDeniedException("Not permitted for any country!");
+      }
+      Session session = this.entityManager.unwrap(Session.class);
+      session.enableFilter("country").setParameterList("countries", permittedCountries.toArray());
+    }
+  }
+}
+
+
+
+

Finally to apply this aspect to all Repositories (can easily be changed to DAOs) implement the following advisor:

+
+
+
+
@Named
+public class PermissionCheckerAdvisor implements PointcutAdvisor, Pointcut, ClassFilter, MethodMatcher {
+
+  @Inject
+  private PermissionCheckerAdvice advice;
+
+  @Override
+  public Advice getAdvice() {
+    return this.advice;
+  }
+
+  @Override
+  public boolean isPerInstance() {
+    return false;
+  }
+
+  @Override
+  public Pointcut getPointcut() {
+    return this;
+  }
+
+  @Override
+  public ClassFilter getClassFilter() {
+    return this;
+  }
+
+  @Override
+  public MethodMatcher getMethodMatcher() {
+    return this;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass) {
+    return true; // apply to all methods
+  }
+
+  @Override
+  public boolean isRuntime() {
+    return false;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass, Object... args) {
+    throw new IllegalStateException("isRuntime()==false");
+  }
+
+  @Override
+  public boolean matches(Class<?> clazz) {
+    // when using DAOs simply change to some class like ApplicationDao
+    return DefaultRepository.class.isAssignableFrom(clazz);
+  }
+}
+
+
+
+
+
Managing and granting the data-permissions
+
+

Following our authorization guide we can simply create a permission for each country. We might simply reserve a prefix (as virtual «app-id») for each data-permission to allow granting data-permissions to end-users across all applications of the IT landscape. In our example we could create access controls country.DE, country.US, country.ES, etc. and assign those to the users. The method permissionChecker.getPermittedCountriesForReading() would then scan for these access controls and only return the 2-letter country code from it.

+
+
+ + + + + +
+ + +Before you make your decisions how to design your access controls please clarify the following questions: +
+
+
+
    +
  • +

    Do you need to separate data-permissions independent of the functional permissions? E.g. may it be required to express that a user can read data from the countries ES and PL but is only permitted to modify data from PL? In such case a single assignment of "country-permissions" to users is insufficient.

    +
  • +
  • +

    Do you want to grant data-permissions individually for each application (higher flexibility and complexity) or for the entire application landscape (simplicity, better maintenance for administrators)? In case of the first approach you would rather have access controls like app1.country.GB and app2.country.GB.

    +
  • +
  • +

    Do your data-permissions depend on objects that can be created dynamically inside your application?

    +
  • +
  • +

    If you want to grant data-permissions on other business objects (entities), how do you want to reference them (primary keys, business keys, etc.)? What reference is most stable? Which is most readable?

    +
  • +
+
+
+ +
+
+
+
JWT
+
+

JWT (JSON Web Token) is an open standard (see RFC 7519) for creating JSON based access tokens that assert some number of claims. +With an IT landscape divided into multiple smaller apps you want to avoid coupling all those apps or services tightly with your IAM (Identity & Access Management). +Instead your apps simply expects a JWT as bearer-token in the Authorization HTTP header field. +All it needs to do for authentication is validating this JWT. +The actual authentication is done centrally by an access system (IAM) that authors those JWTs. +Therefore we recommend to use strong asymmetric cryptography to sign the JWT when it is authored. +Create a keypair per environment and keep the private key as a secret only known to the access system authorizing the JWTs. +Your apps only need to know the public key in order to validate the JWT. +Any request without a JWT or with an invalid JWT will be rejected (with status code 401).

+
+
+

When using spring check the JWT Spring-Starter. +For quarkus follow Using JWT RBAC.

+
+
+ +
+
+
Cross-site request forgery (CSRF)
+
+

CSRF is a type of malicious exploit of a web application that allows an attacker to induce users to perform actions that they do not intend to perform.

+
+
+
+csrf +
+
+
+

More details about csrf can be found at https://owasp.org/www-community/attacks/csrf.

+
+
+
+
Secure devon4j server against CSRF
+
+

In case your devon4j server application is not accessed by browsers or the web-client is using JWT based authentication, you are already safe according to CSRF. +However, if your application is accessed from a browser and you are using form based authentication (with session coockie) or basic authentication, you need to enable CSRF protection. +This guide will tell you how to do this.

+
+
+
Dependency
+
+

To secure your devon4j application against CSRF attacks, you only need to add the following dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-csrf</artifactId>
+</dependency>
+
+
+
+

Starting with devon4j version 2020.12.001 application template, this is all you need to do. +However, if you have started from an older version or you want to understand more, please read on.

+
+
+
+
Pluggable web-security
+
+

To enable pluggable security via devon4j security starters you need to apply WebSecurityConfigurer to your BaseWebSecurityConfig (your class extending spring-boot’s WebSecurityConfigurerAdapter) as following:

+
+
+
+
  @Inject
+  private WebSecurityConfigurer webSecurityConfigurer;
+
+  public void configure(HttpSecurity http) throws Exception {
+    // disable CSRF protection by default, use csrf starter to override.
+	  http = http.csrf().disable();
+	  // apply pluggable web-security from devon4j security starters
+    http = this.webSecurityConfigurer.configure(http);
+    .....
+  }
+
+
+
+
+
Custom CsrfRequestMatcher
+
+

If you want to customize which HTTP requests will require a CSRF token, you can implement your own CsrfRequestMatcher and provide it to the devon4j CSRF protection via qualified injection as following:

+
+
+
+
@Named("CsrfRequestMatcher")
+public class CsrfRequestMatcher implements RequestMatcher {
+  @Override
+  public boolean matches(HttpServletRequest request) {
+    .....
+  }
+}
+
+
+
+

Please note that the exact name (@Named("CsrfRequestMatcher")) is required here to ensure your custom implementation will be injected properly.

+
+
+
+
CsrfRestService
+
+

With the devon4j-starter-security-csrf the CsrfRestService gets integrated into your app. +It provides an operation to get the CSRF token via an HTTP GET request. +The URL path to retrieve this CSRF token is services/rest/csrf/v1/token. +As a result you will get a JSON like the following:

+
+
+
+
{
+  "token":"3a8a5f66-c9eb-4494-81e1-7cc58bc3a519",
+  "parameterName":"_csrf",
+  "headerName":"X-CSRF-TOKEN"
+}
+
+
+
+

The token value is a strong random value that will differ for each user session. +It has to be send with subsequent HTTP requests (when method is other than GET) in the specified header (X-CSRF-TOKEN).

+
+
+
+
How it works
+
+

Putting it all together, a browser client should call the CsrfRestService after successfull login to receive the current CSRF token. +With every subsequent HTTP request (other than GET) the client has to send this token in the according HTTP header. +Otherwise the server will reject the request to prevent CSRF attacks. +Therefore, an attacker might make your browser perform HTTP requests towards your devon4j application backend via <image> elements, <iframes>, etc. +Your browser will then still include your session coockie if you are already logged in (e.g. from another tab). +However, in case he wants to trigger DELETE or POST requests trying your browser to make changes in the application (delete or update data, etc.) this will fail without CSRF token. +The attacker may make your browser retrieve the CSRF token but he will not be able to retrieve the result and put it into the header of other requests due to the same-origin-policy. +This way your application will be secured against CSRF attacks.

+
+
+
+
+
Configure devon4ng client for CSRF
+
+

Devon4ng client configuration for CSRF is described here

+
+
+ +
+
+
Aspect Oriented Programming (AOP)
+
+

AOP is a powerful feature for cross-cutting concerns. However, if used extensive and for the wrong things an application can get unmaintainable. Therefore we give you the best practices where and how to use AOP properly.

+
+
+
+
AOP Key Principles
+
+

We follow these principles:

+
+
+
    +
  • +

    We use spring AOP based on dynamic proxies (and fallback to cglib).

    +
  • +
  • +

    We avoid AspectJ and other mighty and complex AOP frameworks whenever possible

    +
  • +
  • +

    We only use AOP where we consider it as necessary (see below).

    +
  • +
+
+
+
+
AOP Usage
+
+

We recommend to use AOP with care but we consider it established for the following cross cutting concerns:

+
+
+ +
+
+
+
AOP Debugging
+
+

When using AOP with dynamic proxies the debugging of your code can get nasty. As you can see by the red boxes in the call stack in the debugger there is a lot of magic happening while you often just want to step directly into the implementation skipping all the AOP clutter. When using Eclipse this can easily be archived by enabling step filters. Therefore you have to enable the feature in the Eclipse tool bar (highlighted in read).

+
+
+
+AOP debugging +
+
+
+

In order to properly make this work you need to ensure that the step filters are properly configured:

+
+
+
+Step Filter Configuration +
+
+
+

Ensure you have at least the following step-filters configured and active:

+
+
+
+
ch.qos.logback.*
+com.devonfw.module.security.*
+java.lang.reflect.*
+java.security.*
+javax.persistence.*
+org.apache.commons.logging.*
+org.apache.cxf.jaxrs.client.*
+org.apache.tomcat.*
+org.h2.*
+org.springframework.*
+
+
+
+ +
+
+
Exception Handling
+ +
+
+
Exception Principles
+
+

For exceptions we follow these principles:

+
+
+
    +
  • +

    We only use exceptions for exceptional situations and not for programming control flows, etc. Creating an exception in Java is expensive and hence you should not do it just for testing if something is present, valid or permitted. In the latter case design your API to return this as a regular result.

    +
  • +
  • +

    We use unchecked exceptions (RuntimeException) [2]

    +
  • +
  • +

    We distinguish internal exceptions and user exceptions:

    +
    +
      +
    • +

      Internal exceptions have technical reasons. For unexpected and exotic situations it is sufficient to throw existing exceptions such as IllegalStateException. For common scenarios a own exception class is reasonable.

      +
    • +
    • +

      User exceptions contain a message explaining the problem for end users. Therefore we always define our own exception classes with a clear, brief but detailed message.

      +
    • +
    +
    +
  • +
  • +

    Our own exceptions derive from an exception base class supporting

    + +
  • +
+
+
+

All this is offered by mmm-util-core that we propose as solution.

+
+
+
+
Exception Example
+
+

Here is an exception class from our sample application:

+
+
+
+
public class IllegalEntityStateException extends ApplicationBusinessException {
+
+  private static final long serialVersionUID = 1L;
+
+  public IllegalEntityStateException(Object entity, Object state) {
+
+    this((Throwable) null, entity, state);
+  }
+
+
+  public IllegalEntityStateException(Object entity, Object currentState, Object newState) {
+
+    this(null, entity, currentState, newState);
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object state) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityState(entity, state));
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object currentState, Object newState) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityStateChange(entity, currentState,
+        newState));
+  }
+
+}
+
+
+
+

The message templates are defined in the interface NlsBundleRestaurantRoot as following:

+
+
+
+
public interface NlsBundleApplicationRoot extends NlsBundle {
+
+
+  @NlsBundleMessage("The entity {entity} is in state {state}!")
+  NlsMessage errorIllegalEntityState(@Named("entity") Object entity, @Named("state") Object state);
+
+
+  @NlsBundleMessage("The entity {entity} in state {currentState} can not be changed to state {newState}!")
+  NlsMessage errorIllegalEntityStateChange(@Named("entity") Object entity, @Named("currentState") Object currentState,
+      @Named("newState") Object newState);
+
+
+  @NlsBundleMessage("The property {property} of object {object} can not be changed!")
+  NlsMessage errorIllegalPropertyChange(@Named("object") Object object, @Named("property") Object property);
+
+  @NlsBundleMessage("There is currently no user logged in")
+  NlsMessage errorNoActiveUser();
+
+
+
+
+
Handling Exceptions
+
+

For catching and handling exceptions we follow these rules:

+
+
+
    +
  • +

    We do not catch exceptions just to wrap or to re-throw them.

    +
  • +
  • +

    If we catch an exception and throw a new one, we always have to provide the original exception as cause to the constructor of the new exception.

    +
  • +
  • +

    At the entry points of the application (e.g. a service operation) we have to catch and handle all throwables. This is done via the exception-facade-pattern via an explicit facade or aspect. The devon4j-rest module already provides ready-to-use implementations for this such as RestServiceExceptionFacade. The exception facade has to …​

    +
    +
      +
    • +

      log all errors (user errors on info and technical errors on error level)

      +
    • +
    • +

      ensure the entire exception is passed to the logger (not only the message) so that the logger can capture the entire stacktrace and the root cause is not lost.

      +
    • +
    • +

      convert the error to a result appropriable for the client and secure for Sensitive Data Exposure. Especially for security exceptions only a generic security error code or message may be revealed but the details shall only be logged but not be exposed to the client. All internal exceptions are converted to a generic error with a message like:

      +
      +
      +
      +

      An unexpected technical error has occurred. We apologize any inconvenience. Please try again later.

      +
      +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
Common Errors
+
+

The following errors may occur in any devon application:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 16. Common Exceptions
CodeMessageLink

TechnicalError

An unexpected error has occurred! We apologize any inconvenience. Please try again later.

TechnicalErrorUserException.java

ServiceInvoke

«original message of the cause»

ServiceInvocationFailedException.java

+
+ +
+
+
Internationalization
+
+

Internationalization (I18N) is about writing code independent from locale-specific information. +For I18N of text messages we are suggesting +mmm native-language-support.

+
+
+

In devonfw we have developed a solution to manage text internationalization. devonfw solution comes into two aspects:

+
+
+
    +
  • +

    Bind locale information to the user.

    +
  • +
  • +

    Get the messages in the current user locale.

    +
  • +
+
+
+
+
Binding locale information to the user
+
+

We have defined two different points to bind locale information to user, depending on user is authenticated or not.

+
+
+
    +
  • +

    User not authenticated: devonfw intercepts unsecured request and extract locale from it. At first, we try to extract a language parameter from the request and if it is not possible, we extract locale from Àccept-language` header.

    +
  • +
  • +

    User authenticated. During login process, applications developers are responsible to fill language parameter in the UserProfile class. This language parameter could be obtain from DB, LDAP, request, etc. In devonfw sample we get the locale information from database.

    +
  • +
+
+
+

This image shows the entire process:

+
+
+
+Internationalization +
+
+
+
+
Getting internationalizated messages
+
+

devonfw has a bean that manage i18n message resolution, the ApplicationLocaleResolver. This bean is responsible to get the current user and extract locale information from it and read the correct properties file to get the message.

+
+
+

The i18n properties file must be called ApplicationMessages_la_CO.properties where la=language and CO=country. This is an example of a i18n properties file for English language to translate devonfw sample user roles:

+
+
+

ApplicationMessages_en_US.properties

+
+
+
+
admin=Admin
+
+
+
+

You should define an ApplicationMessages_la_CO.properties file for every language that your application needs.

+
+
+

ApplicationLocaleResolver bean is injected in AbstractComponentFacade class so you have available this bean in logic layer so you only need to put this code to get an internationalized message:

+
+
+
+
String msg = getApplicationLocaleResolver().getMessage("mymessage");
+
+
+
+ +
+
+
Service Client
+
+

This guide is about consuming (calling) services from other applications (micro-services). For providing services, see the Service-Layer Guide. Services can be consumed by the client or the server. As the client is typically not written in Java, you should consult the according guide for your client technology. In case you want to call a service within your Java code, this guide is the right place to get help.

+
+
+
+
Motivation
+
+

Various solutions already exist for calling services, such as RestTemplate from spring or the JAX-RS client API. Furthermore, each and every service framework offers its own API as well. These solutions might be suitable for very small and simple projects (with one or two such invocations). However, with the trend of microservices, the invocation of a service becomes a very common use-case that occurs all over the place. You typically need a solution that is very easy to use but supports flexible configuration, adding headers for authentication, mapping of errors from the server, logging success/errors with duration for performance analysis, support for synchronous and asynchronous invocations, etc. This is exactly what this devon4j service-client solution brings to you.

+
+
+
+
Usage
+
+

Spring

+
+
+

For Spring, follow the Spring rest-client guide.

+
+
+

Quarkus

+
+
+

For Quarkus, we recommend to follow the the official Quarkus rest-client guide

+
+
+ +
+
+
Testing
+ +
+
+
General best practices
+
+

For testing please follow our general best practices:

+
+
+
    +
  • +

    Tests should have a clear goal that should also be documented.

    +
  • +
  • +

    Tests have to be classified into different integration levels.

    +
  • +
  • +

    Tests should follow a clear naming convention.

    +
  • +
  • +

    Automated tests need to properly assert the result of the tested operation(s) in a reliable way. E.g. avoid stuff like assertThat(service.getAllEntities()).hasSize(42) or even worse tests that have no assertion at all.

    +
  • +
  • +

    Tests need to be independent of each other. Never write test-cases or tests (in Java @Test methods) that depend on another test to be executed before.

    +
  • +
  • +

    Use AssertJ to write good readable and maintainable tests that also provide valuable feedback in case a test fails. Do not use legacy JUnit methods like assertEquals anymore!

    +
  • +
  • +

    For easy understanding divide your test in three commented sections:

    +
    +
      +
    • +

      //given

      +
    • +
    • +

      //when

      +
    • +
    • +

      //then

      +
    • +
    +
    +
  • +
  • +

    Plan your tests and test data management properly before implementing.

    +
  • +
  • +

    Instead of having a too strong focus on test coverage better ensure you have covered your critical core functionality properly and review the code including tests.

    +
  • +
  • +

    Test code shall NOT be seen as second class code. You shall consider design, architecture and code-style also for your test code but do not over-engineer it.

    +
  • +
  • +

    Test automation is good but should be considered in relation to cost per use. Creating full coverage via automated system tests can cause a massive amount of test-code that can turn out as a huge maintenance hell. Always consider all aspects including product life-cycle, criticality of use-cases to test, and variability of the aspect to test (e.g. UI, test-data).

    +
  • +
  • +

    Use continuous integration and establish that the entire team wants to have clean builds and running tests.

    +
  • +
  • +

    Prefer delegation over inheritance for cross-cutting testing functionality. Good places to put this kind of code can be realized and reused via the JUnit @Rule mechanism.

    +
  • +
+
+
+
+
Test Automation Technology Stack
+
+

For test automation we use JUnit. However, we are strictly doing all assertions with AssertJ. For mocking we use mockito. +In order to mock remote connections we use wiremock. +For testing entire components or sub-systems we recommend to use spring-boot-starter-test as lightweight and fast testing infrastructure that is already shipped with devon4j-test.

+
+
+

In case you have to use a full blown JEE application server, we recommend to use arquillian. To get started with arquillian, look here.

+
+
+
+
Test Doubles
+
+

We use test doubles as generic term for mocks, stubs, fakes, dummies, or spys to avoid confusion. Here is a short summary from stubs VS mocks:

+
+
+
    +
  • +

    Dummy objects specifying no logic at all. May declare data in a POJO style to be used as boiler plate code to parameter lists or even influence the control flow towards the test’s needs.

    +
  • +
  • +

    Fake objects actually have working implementations, but usually take some shortcut which makes them not suitable for production (an in memory database is a good example).

    +
  • +
  • +

    Stubs provide canned answers to calls made during the test, usually not responding at all to anything outside what’s programmed in for the test. Stubs may also record information about calls, such as an email gateway stub that remembers the messages it 'sent', or maybe only how many messages it 'sent'.

    +
  • +
  • +

    Mocks are objects pre-programmed with expectations, which form a specification of the calls they are expected to receive.

    +
  • +
+
+
+

We try to give some examples, which should make it somehow clearer:

+
+
+
Stubs
+
+

Best Practices for applications:

+
+
+
    +
  • +

    A good way to replace small to medium large boundary systems, whose impact (e.g. latency) should be ignored during load and performance tests of the application under development.

    +
  • +
  • +

    As stub implementation will rely on state-based verification, there is the threat, that test developers will partially reimplement the state transitions based on the replaced code. This will immediately lead to a black maintenance whole, so better use mocks to assure the certain behavior on interface level.

    +
  • +
  • +

    Do NOT use stubs as basis of a large amount of test cases as due to state-based verification of stubs, test developers will enrich the stub implementation to become a large monster with its own hunger after maintenance efforts.

    +
  • +
+
+
+
+
Mocks
+
+

Best Practices for applications:

+
+
+
    +
  • +

    Replace not-needed dependencies of your system-under-test (SUT) to minimize the application context to start of your component framework.

    +
  • +
  • +

    Replace dependencies of your SUT to impact the control flow under test without establishing all the context parameters needed to match the control flow.

    +
  • +
  • +

    Remember: Not everything has to be mocked! Especially on lower levels of tests like isolated module tests you can be betrayed into a mocking delusion, where you end up in a hundred lines of code mocking the whole context and five lines executing the test and verifying the mocks behavior. Always keep in mind the benefit-cost ratio, when implementing tests using mocks.

    +
  • +
+
+
+
+
Wiremock
+
+

If you need to mock remote connections such as HTTP-Servers, wiremock offers easy to use functionality. For a full description see the homepage or the github repository. Wiremock can be used either as a JUnit Rule, in Java outside of JUnit or as a standalone process. The mocked server can be configured to respond to specific requests in a given way via a fluent Java API, JSON files and JSON over HTTP. An example as an integration to JUnit can look as follows.

+
+
+
+
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+
+public class WireMockOfferImport{
+
+  @Rule
+  public WireMockRule mockServer = new WireMockRule(wireMockConfig().dynamicPort());
+
+  @Test
+  public void requestDataTest() throws Exception {
+  int port = this.mockServer.port();
+  ...}
+
+
+
+

This creates a server on a randomly chosen free port on the running machine. You can also specify the port to be used if wanted. Other than that there are several options to further configure the server. This includes HTTPs, proxy settings, file locations, logging and extensions.

+
+
+
+
  @Test
+  public void requestDataTest() throws Exception {
+      this.mockServer.stubFor(get(urlEqualTo("/new/offers")).withHeader("Accept", equalTo("application/json"))
+      .withHeader("Authorization", containing("Basic")).willReturn(aResponse().withStatus(200).withFixedDelay(1000)
+      .withHeader("Content-Type", "application/json").withBodyFile("/wireMockTest/jsonBodyFile.json")));
+  }
+
+
+
+

This will stub the URL localhost:port/new/offers to respond with a status 200 message containing a header (Content-Type: application/json) and a body with content given in jsonBodyFile.json if the request matches several conditions. +It has to be a GET request to ../new/offers with the two given header properties.

+
+
+

Note that by default files are located in src/test/resources/__files/. When using only one WireMock server one can omit the this.mockServer in before the stubFor call (static method). +You can also add a fixed delay to the response or processing delay with WireMock.addRequestProcessingDelay(time) in order to test for timeouts.

+
+
+

WireMock can also respond with different corrupted messages to simulate faulty behaviour.

+
+
+
+
@Test(expected = ResourceAccessException.class)
+public void faultTest() {
+
+    this.mockServer.stubFor(get(urlEqualTo("/fault")).willReturn(aResponse()
+    .withFault(Fault.MALFORMED_RESPONSE_CHUNK)));
+...}
+
+
+
+

A GET request to ../fault returns an OK status header, then garbage, and then closes the connection.

+
+
+
+
+
Integration Levels
+
+

There are many discussions about the right level of integration for test automation. Sometimes it is better to focus on small, isolated modules of the system - whatever a "module" may be. In other cases it makes more sense to test integrated groups of modules. Because there is no universal answer to this question, devonfw only defines a common terminology for what could be tested. Each project must make its own decision where to put the focus of test automation. There is no worldwide accepted terminology for the integration levels of testing. In general we consider ISTQB. However, with a technical focus on test automation we want to get more precise.

+
+
+

The following picture shows a simplified view of an application based on the devonfw reference architecture. We define four integration levels that are explained in detail below. +The boxes in the picture contain parenthesized numbers. These numbers depict the lowest integration level, a box belongs to. Higher integration levels also contain all boxes of lower integration levels. When writing tests for a given integration level, related boxes with a lower integration level must be replaced by test doubles or drivers.

+
+
+
+Integration Levels +
+
+
+

The main difference between the integration levels is the amount of infrastructure needed to test them. The more infrastructure you need, the more bugs you will find, but the more instable and the slower your tests will be. So each project has to make a trade-off between pros and contras of including much infrastructure in tests and has to select the integration levels that fit best to the project.

+
+
+

Consider, that more infrastructure does not automatically lead to a better bug-detection. There may be bugs in your software that are masked by bugs in the infrastructure. The best way to find those bugs is to test with very few infrastructure.

+
+
+

External systems do not belong to any of the integration levels defined here. devonfw does not recommend involving real external systems in test automation. This means, they have to be replaced by test doubles in automated tests. An exception may be external systems that are fully under control of the own development team.

+
+
+

The following chapters describe the four integration levels.

+
+
+
Level 1 Module Test
+
+

The goal of a isolated module test is to provide fast feedback to the developer. Consequently, isolated module tests must not have any interaction with the client, the database, the file system, the network, etc.

+
+
+

An isolated module test is testing a single classes or at least a small set of classes in isolation. If such classes depend on other components or external resources, etc. these shall be replaced with a test double.

+
+
+
+
public class MyClassTest extends ModuleTest {
+
+  @Test
+  public void testMyClass() {
+
+    // given
+    MyClass myClass = new MyClass();
+    // when
+    String value = myClass.doSomething();
+    // then
+    assertThat(value).isEqualTo("expected value");
+  }
+
+}
+
+
+
+

For an advanced example see here.

+
+
+
+
Level 2 Component Test
+
+

A component test aims to test components or component parts as a unit. +These tests typically run with a (light-weight) infrastructure such as spring-boot-starter-test and can access resources such as a database (e.g. for DAO tests). +Further, no remote communication is intended here. Access to external systems shall be replaced by a test double.

+
+
+

With devon4j and spring you can write a component-test as easy as illustrated in the following example:

+
+
+
+
@SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.NONE)
+public class UcFindCountryTest extends ComponentTest {
+  @Inject
+  private UcFindCountry ucFindCountry;
+
+  @Test
+  public void testFindCountry() {
+
+    // given
+    String countryCode = "de";
+
+    // when
+    TestUtil.login("user", MyAccessControlConfig.FIND_COUNTRY);
+    CountryEto country = this.ucFindCountry.findCountry(countryCode);
+
+    // then
+    assertThat(country).isNotNull();
+    assertThat(country.getCountryCode()).isEqualTo(countryCode);
+    assertThat(country.getName()).isEqualTo("Germany");
+  }
+}
+
+
+
+

This test will start the entire spring-context of your app (MySpringBootApp). Within the test spring will inject according spring-beans into all your fields annotated with @Inject. In the test methods you can use these spring-beans and perform your actual tests. This pattern can be used for testing DAOs/Repositories, Use-Cases, or any other spring-bean with its entire configuration including database and transactions.

+
+
+

When you are testing use-cases your authorization will also be in place. Therefore, you have to simulate a logon in advance what is done via the login method in the above example. The test-infrastructure will automatically do a logout for you after each test method in doTearDown.

+
+
+
+
Level 3 Subsystem Test
+
+

A subsystem test runs against the external interfaces (e.g. HTTP service) of the integrated subsystem. Subsystem tests of the client subsystem are described in the devon4ng testing guide. In devon4j the server (JEE application) is the subsystem under test. The tests act as a client (e.g. service consumer) and the server has to be integrated and started in a container.

+
+
+

With devon4j and spring you can write a subsystem-test as easy as illustrated in the following example:

+
+
+
+
@SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.RANDOM_PORT)
+public class CountryRestServiceTest extends SubsystemTest {
+
+  @Inject
+  private ServiceClientFactory serviceClientFactory;
+
+  @Test
+  public void testFindCountry() {
+
+    // given
+    String countryCode = "de";
+
+    // when
+    CountryRestService service = this.serviceClientFactory.create(CountryRestService.class);
+    CountryEto country = service.findCountry(countryCode);
+
+    // then
+    assertThat(country).isNotNull();
+    assertThat(country.getCountryCode()).isEqualTo(countryCode);
+    assertThat(country.getName()).isEqualTo("Germany");
+  }
+}
+
+
+
+

Even though not obvious on the first look this test will start your entire application as a server on a free random port (so that it works in CI with parallel builds for different branches) and tests the invocation of a (REST) service including (un)marshalling of data (e.g. as JSON) and transport via HTTP (all in the invocation of the findCountry method).

+
+
+

Do not confuse a subsystem test with a system integration test. A system integration test validates the interaction of several systems where we do not recommend test automation.

+
+
+
+
Level 4 System Test
+
+

A system test has the goal to test the system as a whole against its official interfaces such as its UI or batches. The system itself runs as a separate process in a way close to a regular deployment. Only external systems are simulated by test doubles.

+
+
+

The devonfw only gives advice for automated system test (TODO see allure testing framework). In nearly every project there must be manual system tests, too. This manual system tests are out of scope here.

+
+
+
+
Classifying Integration-Levels
+
+

devon4j defines Category-Interfaces that shall be used as JUnit Categories. +Also devon4j provides abstract base classes that you may extend in your test-cases if you like.

+
+
+

devon4j further pre-configures the maven build to only run integration levels 1-2 by default (e.g. for fast feedback in continuous integration). It offers the profiles subsystemtest (1-3) and systemtest (1-4). In your nightly build you can simply add -Psystemtest to run all tests.

+
+
+
+
+
Implementation
+
+

This section introduces how to implement tests on the different levels with the given devonfw infrastructure and the proposed frameworks.

+
+
+
Module Test
+
+

In devon4j you can extend the abstract class ModuleTest to basically get access to assertions. In order to test classes embedded in dependencies and external services one needs to provide mocks for that. As the technology stack recommends we use the Mockito framework to offer this functionality. The following example shows how to implement Mockito into a JUnit test.

+
+
+
+
import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.mock;
+...
+
+public class StaffmanagementImplTest extends ModuleTest {
+  @Rule
+  public MockitoRule rule = MockitoJUnit.rule();
+
+  @Test
+  public void testFindStaffMember() {
+  ...}
+}
+
+
+
+

Note that the test class does not use the @SpringApplicationConfiguration annotation. In a module test one does not use the whole application. +The JUnit rule is the best solution to use in order to get all needed functionality of Mockito. Static imports are a convenient option to enhance readability within Mockito tests. +You can define mocks with the @Mock annotation or the mock(*.class) call. To inject the mocked objects into your class under test you can use the @InjectMocks annotation. This automatically uses the setters of StaffmanagementImpl to inject the defined mocks into the class under test (CUT) when there is a setter available. In this case the beanMapper and the staffMemberDao are injected. Of course it is possible to do this manually if you need more control.

+
+
+
+
  @Mock
+  private BeanMapper beanMapper;
+  @Mock
+  private StaffMemberEntity staffMemberEntity;
+  @Mock
+  private StaffMemberEto staffMemberEto;
+  @Mock
+  private StaffMemberDao staffMemberDao;
+  @InjectMocks
+  StaffmanagementImpl staffmanagementImpl = new StaffmanagementImpl();
+
+
+
+

The mocked objects do not provide any functionality at the time being. To define what happens on a method call on a mocked dependency in the CUT one can use when(condition).thenReturn(result). In this case we want to test findStaffMember(Long id) in the StaffmanagementImpl.java.

+
+
+
+
public StaffMemberEto findStaffMember(Long id) {
+  return getBeanMapper().map(getStaffMemberDao().find(id), StaffMemberEto.class);
+}
+
+
+
+

In this simple example one has to stub two calls on the CUT as you can see below. For example the method call of the CUT staffMemberDao.find(id) is stubbed for returning a mock object staffMemberEntity that is also defined as mock.

+
+
+
+
Subsystem Test
+
+

devon4j provides a simple test infrastructure to aid with the implementation of subsystem tests.

+
+
+
+
//given
+long id = 1L;
+Class<StaffMemberEto> targetClass = StaffMemberEto.class;
+when(this.staffMemberDao.find(id)).thenReturn(this.staffMemberEntity);
+when(this.beanMapper.map(this.staffMemberEntity, targetClass)).thenReturn(this.staffMemberEto);
+
+//when
+StaffMemberEto resultEto = this.staffmanagementImpl.findStaffMember(id);
+
+//then
+assertThat(resultEto).isNotNull();
+assertThat(resultEto).isEqualTo(this.staffMemberEto);
+
+
+
+

After the test method call one can verify the expected results. Mockito can check whether a mocked method call was indeed called. This can be done using Mockito verify. Note that it does not generate any value if you check for method calls that are needed to reach the asserted result anyway. Call verification can be useful e.g. when you want to assure that statistics are written out without actually testing them.

+
+
+
+
+
Regression testing
+
+

When it comes to complex output (even binary) that you want to regression test by comparing with an expected result, you sould consider Approval Tests using ApprovalTests.Java. +If applied for the right problems, it can be very helpful.

+
+
+
+
Deployment Pipeline
+
+

A deployment pipeline is a semi-automated process that gets software-changes from version control into production. It contains several validation steps, e.g. automated tests of all integration levels. +Because devon4j should fit to different project types - from agile to waterfall - it does not define a standard deployment pipeline. But we recommend to define such a deployment pipeline explicitly for each project and to find the right place in it for each type of test.

+
+
+

For that purpose, it is advisable to have fast running test suite that gives as much confidence as possible without needing too much time and too much infrastructure. This test suite should run in an early stage of your deployment pipeline. Maybe the developer should run it even before he/she checked in the code. Usually lower integration levels are more suitable for this test suite than higher integration levels.

+
+
+

Note, that the deployment pipeline always should contain manual validation steps, at least manual acceptance testing. There also may be manual validation steps that have to be executed for special changes only, e.g. usability testing. Management and execution processes of those manual validation steps are currently not in the scope of devonfw.

+
+
+
+
Test Coverage
+
+

We are using tools (SonarQube/Jacoco) to measure the coverage of the tests. Please always keep in mind that the only reliable message of a code coverage of X% is that (100-X)% of the code is entirely untested. It does not say anything about the quality of the tests or the software though it often relates to it.

+
+
+
+
Test Configuration
+
+

This section covers test configuration in general without focusing on integration levels as in the first chapter.

+
+
+
Configure Test Specific Beans
+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+
Test Data
+
+

It is possible to obtain test data in two different ways depending on your test’s integration level.

+
+
+
+
+
Debugging Tests
+
+

The following two sections describe two debugging approaches for tests. Tests are either run from within the IDE or from the command line using Maven.

+
+
+
Debugging with the IDE
+
+

Debugging with the IDE is as easy as always. Even if you want to execute a SubsystemTest which needs a Spring context and a server infrastructure to run properly, you just set your breakpoints and click on Debug As → JUnit Test. The test infrastructure will take care of initializing the necessary infrastructure - if everything is configured properly.

+
+
+
+
Debugging with Maven
+
+

Please refer to the following two links to find a guide for debugging tests when running them from Maven.

+
+ +
+

In essence, you first have to start execute a test using the command line. Maven will halt just before the test execution and wait for your IDE to connect to the process. When receiving a connection the test will start and then pause at any breakpoint set in advance. +The first link states that tests are started through the following command:

+
+
+
+
mvn -Dmaven.surefire.debug test
+
+
+
+

Although this is correct, it will run every test class in your project and - which is time consuming and mostly unnecessary - halt before each of these tests. +To counter this problem you can simply execute a single test class through the following command (here we execute the TablemanagementRestServiceTest from the restaurant sample application):

+
+
+
+
mvn test -Dmaven.surefire.debug test -Dtest=TablemanagementRestServiceTest
+
+
+
+

It is important to notice that you first have to execute the Maven command in the according submodule, e.g. to execute the TablemanagementRestServiceTest you have first to navigate to the core module’s directory.

+
+
+ +
+
+
+
Transfer-Objects
+
+

The technical data model is defined in form of persistent entities. +However, passing persistent entities via call-by-reference across the entire application will soon cause problems:

+
+
+
    +
  • +

    Changes to a persistent entity are directly written back to the persistent store when the transaction is committed. When the entity is send across the application also changes tend to take place in multiple places endangering data sovereignty and leading to inconsistency.

    +
  • +
  • +

    You want to send and receive data via services across the network and have to define what section of your data is actually transferred. If you have relations in your technical model you quickly end up loading and transferring way too much data.

    +
  • +
  • +

    Modifications to your technical data model shall not automatically have impact on your external services causing incompatibilities.

    +
  • +
+
+
+

To prevent such problems transfer-objects are used leading to a call-by-value model and decoupling changes to persistent entities.

+
+
+

In the following sections the different types of transfer-objects are explained. +You will find all according naming-conventions in the architecture-mapping

+
+
+

To structure your transfer objects, we recommend the following approaches:

+
+
+ +
+
+

Also considering the following transfer objects in specific cases:

+
+
+
+
SearchCriteriaTo
+
+

For searching we create or generate a «BusinessObject»SearchCriteriaTo representing a query to find instances of «BusinessObject».

+
+
TO
+
+

There are typically transfer-objects for data that is never persistent. +For very generic cases these just carry the suffix To.

+
+
STO
+
+

We can potentially create separate service transfer objects (STO) (if possible named «BusinessObject»Sto) to keep the service API stable and independent of the actual data-model. +However, we usually do not need this and want to keep our architecture simple. +Only create STOs if you need service versioning and support previous APIs or to provide legacy service technologies that require their own isolated data-model. +In such case you also need beanmapping between STOs and ETOs/DTOs what means extra effort and complexity that should be avoided.

+
+
+
+
+
+
+
Bean Mapping in devon4j-spring
+
+

We have developed a solution that uses a BeanMapper that allows to abstract from the underlying implementation. As mentioned in the general bean mapping guide, we started with Dozer a Java Bean to Java Bean mapper that recursively copies data from one object to another. Now we recommend using Orika. This guide will show an introduction to Orika and Dozer bean-mapper.

+
+
+
+
Bean-Mapper Dependency
+
+

To get access to the BeanMapper we have to use either of the below dependency in our POM:

+
+
+
Listing 17. Orika
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-orika</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
Listing 18. Dozer
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-dozer</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
+
Bean-Mapper Configuration
+
+
Bean-Mapper Configuration using Dozer
+
+

The BeanMapper implementation is based on an existing open-source bean-mapping framework. +In case of Dozer the mapping is configured src/main/resources/config/app/common/dozer-mapping.xml.

+
+
+

See the my-thai-star dozer-mapping.xml as an example. +Important is that you configure all your custom datatypes as <copy-by-reference> tags and have the mapping from PersistenceEntity (ApplicationPersistenceEntity) to AbstractEto configured properly:

+
+
+
+
 <mapping type="one-way">
+    <class-a>com.devonfw.module.basic.common.api.entity.PersistenceEntity</class-a>
+    <class-b>com.devonfw.module.basic.common.api.to.AbstractEto</class-b>
+    <field custom-converter="com.devonfw.module.beanmapping.common.impl.dozer.IdentityConverter">
+      <a>this</a>
+      <b is-accessible="true">persistentEntity</b>
+    </field>
+</mapping>
+
+
+
+
+
+
Bean-Mapper Configuration using Orika
+
+

Orika with devonfw is configured by default and sets some custom mappings for GenericEntity.java to GenericEntityDto.java. To specify and customize the mappings you can create the class BeansOrikaConfig.java that extends the class BaseOrikaConfig.java from the devon4j.orika package. To register a basic mapping, register a ClassMap for the mapperFactory with your custom mapping. Watch the example below and follow the basic Orika mapping configuration guide and the Orika advanced mapping guide.

+
+
+

Register Mappings:

+
+
+
+
mapperFactory.classMap(UserEntity.class, UserEto.class)
+			.field("email", "email")
+			.field("username", "name")
+			.byDefault()
+			.register();
+
+
+
+
+
Bean-Mapper Usage
+
+

Then we can get the BeanMapper via dependency-injection what we typically already provide by an abstract base class (e.g. AbstractUc). Now we can solve our problem very easy:

+
+
+
+
...
+UserEntity resultEntity = ...;
+...
+return getBeanMapper().map(resultEntity, UserEto.class);
+
+
+
+ +
+
+
Datatypes
+
+
+
+

A datatype is an object representing a value of a specific type with the following aspects:

+
+
+
    +
  • +

    It has a technical or business specific semantic.

    +
  • +
  • +

    Its JavaDoc explains the meaning and semantic of the value.

    +
  • +
  • +

    It is immutable and therefore stateless (its value assigned at construction time and can not be modified).

    +
  • +
  • +

    It is serializable.

    +
  • +
  • +

    It properly implements #equals(Object) and #hashCode() (two different instances with the same value are equal and have the same hash).

    +
  • +
  • +

    It shall ensure syntactical validation so it is NOT possible to create an instance with an invalid value.

    +
  • +
  • +

    It is responsible for formatting its value to a string representation suitable for sinks such as UI, loggers, etc. Also consider cases like a Datatype representing a password where toString() should return something like "**" instead of the actual password to prevent security accidents.

    +
  • +
  • +

    It is responsible for parsing the value from other representations such as a string (as needed).

    +
  • +
  • +

    It shall provide required logical operations on the value to prevent redundancies. Due to the immutable attribute all manipulative operations have to return a new Datatype instance (see e.g. BigDecimal.add(java.math.BigDecimal)).

    +
  • +
  • +

    It should implement Comparable if a natural order is defined.

    +
  • +
+
+
+

Based on the Datatype a presentation layer can decide how to view and how to edit the value. Therefore a structured data model should make use of custom datatypes in order to be expressive. +Common generic datatypes are String, Boolean, Number and its subclasses, Currency, etc. +Please note that both Date and Calendar are mutable and have very confusing APIs. Therefore, use JSR-310 or jodatime instead. +Even if a datatype is technically nothing but a String or a Number but logically something special it is worth to define it as a dedicated datatype class already for the purpose of having a central javadoc to explain it. On the other side avoid to introduce technical datatypes like String32 for a String with a maximum length of 32 characters as this is not adding value in the sense of a real Datatype. +It is suitable and in most cases also recommended to use the class implementing the datatype as API omitting a dedicated interface.

+
+
+
+— mmm project
+datatype javadoc +
+
+ +
+
+
Datatype Packaging
+
+

For the devonfw we use a common packaging schema. +The specifics for datatypes are as following:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
SegmentValueExplanation

<component>

*

Here we use the (business) component defining the datatype or general for generic datatypes.

<layer>

common

Datatypes are used across all layers and are not assigned to a dedicated layer.

<scope>

api

Datatypes are always used directly as API even tough they may contain (simple) implementation logic. Most datatypes are simple wrappers for generic Java types (e.g. String) but make these explicit and might add some validation.

+
+
+
Technical Concerns
+
+

Many technologies like Dozer and QueryDSL’s (alias API) are heavily based on reflection. For them to work properly with custom datatypes, the frameworks must be able to instantiate custom datatypes with no-argument constructors. It is therefore recommended to implement a no-argument constructor for each datatype of at least protected visibility.

+
+
+
+
Datatypes in Entities
+
+

The usage of custom datatypes in entities is explained in the persistence layer guide.

+
+
+
+
Datatypes in Transfer-Objects
+
+
XML
+
+

For mapping datatypes with JAXB see XML guide.

+
+
+
+
JSON
+
+

For mapping datatypes from and to JSON see JSON custom mapping.

+
+
+ +
+
+
+
CORS configuration in Spring
+ +
+
+
Dependency
+
+

To enable the CORS support from the server side for your devon4j-Spring application, add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-cors</artifactId>
+</dependency>
+
+
+
+
+
Configuration
+
+

Add the below properties in your application.properties file:

+
+
+
+
#CORS support
+security.cors.spring.allowCredentials=true
+security.cors.spring.allowedOriginPatterns=*
+security.cors.spring.allowedHeaders=*
+security.cors.spring.allowedMethods=OPTIONS,HEAD,GET,PUT,POST,DELETE,PATCH
+security.cors.pathPattern=/**
+
+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AttributeDescriptionHTTP Header

allowCredentials

Decides the browser should include any cookies associated with the request (true if cookies should be included).

Access-Control-Allow-Credentials

allowedOrigins

List of allowed origins (use * to allow all orgins).

Access-Control-Allow-Origin

allowedMethods

List of allowed HTTP request methods (OPTIONS, HEAD, GET, PUT, POST, DELETE, PATCH, etc.).

-

allowedHeaders

List of allowed headers that can be used during the request (use * to allow all headers requested by the client)

Access-Control-Allow-Headers

pathPattern

Ant-style pattern for the URL paths where to apply CORS. Use "/**" to match all URL paths.

+
+ +
+
+
Microservices in devonfw
+
+

The Microservices architecture is an approach for application development based on a series of small services grouped under a business domain. Each individual service runs autonomously and communicating with each other through their APIs. That independence between the different services allows to manage (upgrade, fix, deploy, etc.) each one without affecting the rest of the system’s services. In addition to that the microservices architecture allows to scale specific services when facing an increment of the requests, so the applications based on microservices are more flexible and stable, and can be adapted quickly to demand changes.

+
+
+

However, this new approach, developing apps based on microservices, presents some downsides.

+
+
+

Let’s see the main challenges when working with microservices:

+
+
+
    +
  • +

    Having the applications divided in different services we will need a component (router) to redirect each request to the related microservice. These redirection rules must implement filters to guarantee a proper functionality.

    +
  • +
  • +

    In order to manage correctly the routing process, the application will also need a catalog with all the microservices and its details: IPs and ports of each of the deployed instances of each microservice, the state of each instance and some other related information. This catalog is called Service Discovery.

    +
  • +
  • +

    With all the information of the Service Discovery the application will need to calculate and select between all the available instances of a microservice which is the suitable one. This will be figured out by the library Client Side Load Balancer.

    +
  • +
  • +

    The different microservices will be likely interconnected with each other, that means that in case of failure of one of the microservices involved in a process, the application must implement a mechanism to avoid the error propagation through the rest of the services and provide an alternative as a process result. To solve this, the pattern Circuit Breaker can be implemented in the calls between microservices.

    +
  • +
  • +

    As we have mentioned, the microservices will exchange calls and information with each other so our applications will need to provide a secured context to avoid not allowed operations or intrusions. In addition, since microservices must be able to operate in an isolated way, it is not recommended to maintain a session. To meet this need without using Spring sessions, a token-based authentication is used that exchanges information using the json web token (JWT) protocol.

    +
  • +
+
+
+

In addition to all of this we will find other issues related to this particular architecture that we will address fitting the requirements of each project.

+
+
+
    +
  • +

    Distributed data bases: each instance of a microservice should have only one data base.

    +
  • +
  • +

    Centralized logs: each instance of a microservice creates a log and a trace that should be centralized to allow an easier way to read all that information.

    +
  • +
  • +

    Centralized configuration: each microservice has its own configuration, so our applications should group all those configurations in only one place to ease the configuration management.

    +
  • +
  • +

    Automatized deployments: as we are managing several components (microservices, catalogs, balancers, etc.) the deployment should be automatized to avoid errors and ease this process.

    +
  • +
+
+
+

To address the above, devonfw microservices has an alternative approach Microservices based on Netflix-Tools.

+
+
+ +
+
+
Caching
+
+

Caching is a technical approach to improve performance. While it may appear easy on the first sight it is an advanced topic. In general, try to use caching only when required for performance reasons. If you come to the point that you need caching first think about:

+
+
+
    +
  • +

    What to cache?
    +Be sure about what you want to cache. Is it static data? How often will it change? What will happen if the data changes but due to caching you might receive "old" values? Can this be tolerated? For how long? This is not a technical question but a business requirement.

    +
  • +
  • +

    Where to cache?
    +Will you cache data on client or server? Where exactly?

    +
  • +
  • +

    How to cache?
    +Is a local cache sufficient or do you need a shared cache?

    +
  • +
+
+
+
+
Local Cache
+ +
+
+
Shared Cache
+
+
Distributed Cache
+ +
+
+ + +
+
Feature-Toggles
+
+

The most software developing teams use Feature-Branching to be able to work in parallel and maintain a stable main branch in the VCS. However Feature-Branching might not be the ideal tool in every case because of big merges and isolation between development groups. In many cases, Feature-Toggles can avoid some of these problems, so these should definitely be considered to be used in the collaborative software development.

+
+
+
+
Implementation with the devonfw
+
+

To use Feature-Toggles with the devonfw, use the Framework Togglz because it has all the features generally needed and provides a great documentation.

+
+
+

For a pretty minimal working example, also see this fork.

+
+
+
Preparation
+
+

The following example takes place in the oasp-sample-core project, so the necessary dependencies have to be added to the according pom.xml file. Required are the main Togglz project including Spring support, the Togglz console to graphically change the feature state and the Spring security package to handle authentication for the Togglz console.

+
+
+
+
<!-- Feature-Toggle-Framework togglz -->
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-boot-starter</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-console</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-security</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+
+
+

In addition to that, the following lines have to be included in the spring configuration file application.properties

+
+
+
+
# configuration for the togglz Feature-Toggle-Framework
+togglz.enabled=true
+togglz.console.secured=false
+
+
+
+
+
Small features
+
+

For small features, a simple query of the toggle state is often enough to achieve the desired functionality. To illustrate this, a simple example follows, which implements a toggle to limit the page size returned by the staffmanagement. See here for further details.

+
+
+

This is the current implementation to toggle the feature:

+
+
+
+
// Uncomment next line in order to limit the maximum page size for the staff member search
+// criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+
+
+
+

To realise this more elegantly with Togglz, first an enum is required to configure the feature-toggle.

+
+
+
+
public enum StaffmanagementFeatures implements Feature {
+  @Label("Limit the maximum page size for the staff members")
+  LIMIT_STAFF_PAGE_SIZE;
+
+  public boolean isActive() {
+    return FeatureContext.getFeatureManager().isActive(this);
+  }
+}
+
+
+
+

To familiarize the Spring framework with the enum, add the following entry to the application.properties file.

+
+
+
+
togglz.feature-enums=io.oasp.gastronomy.restaurant.staffmanagement.featuremanager.StaffmanagementFeatures
+
+
+
+

After that, the toggle can be used easily by calling the isActive() method of the enum.

+
+
+
+
if (StaffmanagementFeatures.LIMIT_STAFF_PAGE_SIZE.isActive()) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+}
+
+
+
+

This way, you can easily switch the feature on or off by using the administration console at http://localhost:8081/devon4j-sample-server/togglz-console. If you are getting redirected to the login page, just sign in with any valid user (eg. admin).

+
+
+
+
Extensive features
+
+

When implementing extensive features, you might want to consider using the strategy design pattern to maintain the overview of your software. The following example is an implementation of a feature which adds a 25% discount to all products managed by the offermanagement.

+
+
+
Therefore there are two strategies needed:
+
    +
  1. +

    Return the offers with the normal price

    +
  2. +
  3. +

    Return the offers with a 25% discount

    +
  4. +
+
+
+

The implementation is pretty straight forward so use this as a reference. Compare this for further details.

+
+
+
+
@Override
+@RolesAllowed(PermissionConstants.FIND_OFFER)
+public PaginatedListTo<OfferEto> findOfferEtos(OfferSearchCriteriaTo criteria) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+  PaginatedListTo<OfferEntity> offers = getOfferDao().findOffers(criteria);
+
+
+  if (OffermanagementFeatures.DISCOUNT.isActive()) {
+    return getOfferEtosDiscount(offers);
+  } else {
+    return getOfferEtosNormalPrice(offers);
+  }
+
+}
+
+
+// Strategy 1: Return the OfferEtos with the normal price
+private PaginatedListTo<OfferEto> getOfferEtosNormalPrice(PaginatedListTo<OfferEntity> offers) {
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+// Strategy 2: Return the OfferEtos with the new, discounted price
+private PaginatedListTo<OfferEto> getOfferEtosDiscount(PaginatedListTo<OfferEntity> offers) {
+  offers = addDiscountToOffers(offers);
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+private PaginatedListTo<OfferEntity> addDiscountToOffers(PaginatedListTo<OfferEntity> offers) {
+  for (OfferEntity oe : offers.getResult()) {
+    Double oldPrice = oe.getPrice().getValue().doubleValue();
+
+    // calculate the new price and round it to two decimal places
+    BigDecimal newPrice = new BigDecimal(oldPrice * 0.75);
+    newPrice = newPrice.setScale(2, RoundingMode.HALF_UP);
+
+    oe.setPrice(new Money(newPrice));
+  }
+
+  return offers;
+}
+
+
+
+
+
+
Guidelines for a successful use of feature-toggles
+
+

The use of feature-toggles requires a specified set of guidelines to maintain the overview on the software. The following is a collection of considerations and examples for conventions that are reasonable to use.

+
+
+
Minimize the number of toggles
+
+

When using too many toggles at the same time, it is hard to maintain a good overview of the system and things like finding bugs are getting much harder. Additionally, the management of toggles in the configuration interface gets more difficult due to the amount of toggles.

+
+
+

To prevent toggles from piling up during development, a toggle and the associated obsolete source code should be removed after the completion of the corresponding feature. In addition to that, the existing toggles should be revisited periodically to verify that these are still needed and therefore remove legacy toggles.

+
+
+
+
Consistent naming scheme
+
+

A consistent naming scheme is the key to a structured and easily maintainable set of features. This should include the naming of toggles in the source code and the appropriate naming of commit messages in the VCS. The following section contains an example for a useful naming scheme including a small example.

+
+
+

Every Feature-Toggle in the system has to get its own unique name without repeating any names of features, which were removed from the system. The chosen names should be descriptive names to simplify the association between toggles and their purpose. If the feature should be split into multiple sub-features, you might want to name the feature like the parent feature with a describing addition. If for example you want to split the DISCOUNT feature into the logic and the UI part, you might want to name the sub-features DISCOUNT_LOGIC and DISCOUNT_UI.

+
+
+

The entry in the togglz configuration enum should be named identically to the aforementioned feature name. The explicitness of feature names prevents a confusion between toggles due to using multiple enums.

+
+
+

Commit messages are very important for the use of feature-toggles and also should follow a predefined naming scheme. You might want to state the feature name at the beginning of the message, followed by the actual message, describing what the commit changes to the feature. An example commit message could look like the following:

+
+
+
+
DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

Mentioning the feature name in the commit message has the advantage, that you can search your git log for the feature name and get every commit belonging to the feature. An example for this using the tool grep could look like this.

+
+
+
+
$ git log | grep -C 4 DISCOUNT
+
+commit 034669a48208cb946cc6ba8a258bdab586929dd9
+Author: Florian Luediger <florian.luediger@somemail.com>
+Date:   Thu Jul 7 13:04:37 2016 +0100
+
+DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

To keep track of all the features in your software system, a platform like GitHub offers issues. When creating an issue for every feature, you can retrace, who created the feature and who is assigned to completing its development. When referencing the issue from commits, you also have links to all the relevant commits from the issue view.

+
+
+
+
Placement of toggle points
+
+

To maintain a clean codebase, you definitely want to avoid using the same toggle in different places in the software. There should be one single query of the toggle which should be able to toggle the whole functionality of the feature. If one single toggle point is not enough to switch the whole feature on or off, you might want to think about splitting the feature into multiple ones.

+
+
+
+
Use of fine-grained features
+
+

Bigger features in general should be split into multiple sub-features to maintain the overview on the codebase. These sub-features get their own feature-toggle and get implemented independently.

+
+
+ +
+
+
+
Accessibility
+
+

TODO

+
+ + + +
+ +
+ + + + + +
+ + +devon4j-kafka has been abandoned. Its main feature was the implementation of a retry pattern using multiple topics. This implementation has become an integral part of Spring Kafka. We recommend to use Spring Kafkas own implemenation for retries. +
+
+
+
+
Messaging Services
+
+

Messaging Services provide an asynchronous communication mechanism between applications. Technically this is implemented using Apache Kafka .

+
+
+

For spring, devonfw uses Spring-Kafka as kafka framework. +For more details, check the devon4j-kafka.

+
+ +
+ +
+
+
Messaging
+
+

Messaging in Java is done using the JMS standard from JEE.

+
+
+
+
Products
+
+

For messaging you need to choose a JMS provider such as:

+
+
+ +
+
+
+
Receiver
+
+

As a receiver of messages is receiving data from other systems it is located in the service-layer.

+
+
+
JMS Listener
+
+

A JmsListener is a class listening and consuming JMS messages. It should carry the suffix JmsListener and implement the MessageListener interface or have its listener method annotated with @JmsListener. This is illustrated by the following example:

+
+
+
+
@Named
+@Transactional
+public class BookingJmsListener /* implements MessageListener */ {
+
+  @Inject
+  private Bookingmanagement bookingmanagement;
+
+  @Inject
+  private MessageConverter messageConverter;
+
+  @JmsListener(destination = "BOOKING_QUEUE", containerFactory = "jmsListenerContainerFactory")
+  public void onMessage(Message message) {
+    try {
+      BookingTo bookingTo = (BookingTo) this.messageConverter.fromMessage(message);
+      this.bookingmanagement.importBooking(bookingTo);
+    } catch (MessageConversionException | JMSException e) {
+      throw new InvalidMessageException(message);
+    }
+  }
+}
+
+
+
+
+
+
Sender
+
+

The sending of JMS messages is considered as any other sending of data like kafka messages or RPC calls via REST using service-client, gRPC, etc. +This will typically happen directly from a use-case in the logic-layer. +However, the technical complexity of the communication and protocols itself shall be hidden from the use-case and not be part of the logic layer. +With spring we can simply use JmsTemplate to do that.

+
+
+ +
+
+ +
+

If you want to all your users fast and simple searches with just a single search field (like in google), you need full text indexing and search support.

+
+
+
+
Solutions
+
+ +
+
+

Maybe you also want to use native features of your database

+
+ +
+
+
Best Practices
+
+

TODO

+
+
+
+
+

1.105. Tutorials

+ +
+
Creating a new application
+ +
+
+
Running the archetype
+
+

In order to create a new application you must use the archetype provided by devon4j which uses the maven archetype functionality.

+
+
+

To create a new application, you should have installed devonfw IDE. Follow the devon ide documentation to install +the same. +You can choose between 2 alternatives, create it from command line or, in more visual manner, within eclipse.

+
+
+
From command Line
+
+

To create a new devon4j application from command line, you can simply run the following command:

+
+
+
+
devon java create com.example.application.sampleapp
+
+
+
+

For low-level creation you can also manually call this command:

+
+
+
+
mvn -DarchetypeVersion=${devon4j.version} -DarchetypeGroupId=com.devonfw.java.templates -DarchetypeArtifactId=devon4j-template-server archetype:generate -DgroupId=com.example.application -DartifactId=sampleapp -Dversion=1.0.0-SNAPSHOT -Dpackage=com.devonfw.application.sampleapp
+
+
+
+

Attention: The archetypeVersion (first argument) should be set to the latest version of devon4j. You can easily determine the version from this badge: +latest devon4j version

+
+
+

Further providing additional properties (using -D parameter) you can customize the generated app:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 17. Options for app template
propertycommentexample

dbType

Choose the type of RDBMS to use (hana, oracle, mssql, postgresql, mariadb, mysql, etc.)

-DdbTpye=postgresql

batch

Option to add an batch module

-Dbatch=batch

+
+
+
From Eclipse
+
+
+
After that, you should follow this Eclipse steps to create your application:
+
+
+
+
    +
  • +

    Create a new Maven Project.

    +
  • +
  • +

    Choose the devon4j-template-server archetype, just like the image.

    +
  • +
+
+
+
+Select archetype +
+
+
+
    +
  • +

    Fill the Group Id, Artifact Id, Version and Package for your project.

    +
  • +
+
+
+
+Configure archetype +
+
+
+
    +
  • +

    Finish the Eclipse assistant and you are ready to start your project.

    +
  • +
+
+
+
+
+
What is generated
+
+

The application template (archetype) generates a Maven multi-module project. It has the following modules:

+
+
+
    +
  • +

    api: module with the API (REST service interfaces, transferobjects, datatypes, etc.) to be imported by other apps as a maven dependency in order to invoke and consume the offered (micro)services.

    +
  • +
  • +

    core: maven module containing the core of the application.

    +
  • +
  • +

    batch: optional module for batch(es)

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) as a WAR file.

    +
  • +
+
+
+

The toplevel pom.xml of the generated project has the following features:

+
+
+
    +
  • +

    Properties definition: Spring-boot version, Java version, etc.

    +
  • +
  • +

    Modules definition for the modules (described above)

    +
  • +
  • +

    Dependency management: define versions for dependencies of the technology stack that are recommended and work together in a compatible way.

    +
  • +
  • +

    Maven plugins with desired versions and configuration

    +
  • +
  • +

    Profiles for test stages

    +
  • +
+
+
+
+
How to run your app
+
+
Run app from IDE
+
+

To run your application from your favourite IDE, simply launch SpringBootApp as java application.

+
+
+
+
Run app as bootified jar or war
+
+

More details are available here.

+
+ +
+
+
+
+

1.106. Quarkus

+
+

Quarkus is a Java framework for building cloud-native apps. +It is fully supported by devonfw as an option and alternative to spring. +Additional things like extensions will be available on the devon4quarkus GitHub repository.

+
+
+
+

1.107. Guide to the Reader

+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If you are completely new to Quarkus, you may be interested in the pros and cons of Quarkus. Also take a look at the official Quarkus website. And you might also be interested in the features that GraalVM offers.

    +
  • +
  • +

    If you are new to devon4j, also take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring and coding conventions. Follow the referenced links to explore a topic in more depth.

    +
  • +
  • +

    If you are an experienced Spring developer and want to get in touch with Quarkus, read our Getting started with Quarkus for Spring developers guide.

    +
  • +
  • +

    If you’re looking to build your first Quarkus application, the Quarkus website offers some good getting started guides. Also check out our Quarkus template guide, which gives you some recommendations on extensions and frameworks to use. It also provides some links to the Quarkus code generator with preselected configurations you can use to create your application.

    +
  • +
  • +

    If you want to have a Quarkus sample application using devon4j recommendations, check out our Quarkus reference application.

    +
  • +
  • +

    If you already have some experience with devon4j and Quarkus and need more information on a specific topic, check out our Quarkus guides. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Quarkus and Spring are documented there.

    +
  • +
  • +

    If you want to learn how to build native images, check out this guide.

    +
  • +
+
+
+
+

1.108. Pros

+
+

Quarkus offers the following benefits:

+
+
+
    +
  • +

    fast turn-around cycles for developers
    +Save changes in your Java code and immediately test the results without restarting or waiting

    +
  • +
  • +

    faster start-up and less memory footprint
    +When building your app as native-images via GraalVM it gets highly optimized. As a result it starts up lightning fast and consumes much less memory. This is a great advantage for cloud deployment as well as for sustainability. You can find a performance comparison between Spring and Quarkus here.

    +
  • +
  • +

    clean and lean +As quarkus was born as cloud-native framework it is very light-weigth and does not carry much history and legacy.

    +
  • +
+
+
+
+

1.109. Cons

+
+

Quarkus has the following drawbacks:

+
+
+
    +
  • +

    less flexible
    +Quarkus is less flexible compared to spring or in other words it is more biased and coupled to specific implementations. However, the implementations just work and you have less things to choose and worry about. However, in case you want to integrate a specific or custom library you may hit limitations or lose support for native-images especially when that library is based on reflection. Therefore, check your requirements and technology stack early on when making your choice.

    +
  • +
  • +

    less established
    +Since quarkus was born in 2019 it is modern but also less established. It will be easier to get developers for spring but we already consider quarkus mature and established enought for building production ready apps.

    +
  • +
+
+
+

1.109.1. Quarkus Quickstart

+
+

This guide will show a quickstart how to create a Quarkus app and will briefly show the key functionalities that Quarkus provides and how to start as a beginner or also experienced developers.

+
+
+
+

1.109.2. Introduction to Quarkus

+
+

To get the first introduction to Quarkus you can read the Quarkus introduction guide. To get a short overview where you can find the important Quarkus related guides follow the chapter guide to the reader. +Also, a comparison of the advantages and disadvantages of a Quarkus application compared to the alternative framework Spring. +This comparison will be supported by our performance comparison between Spring and Quarkus that shows the lower resource consumption and startup time of Quarkus applications.

+
+
+
+

1.109.3. Installation of Tools and Dependencies

+
+

First, we need to install some dependencies and tools before we can start programming. Our tool devonfw-ide comes with many development tools for you. +We need to install the following tools for this guide.

+
+
+
    +
  • +

    Maven

    +
  • +
  • +

    Java

    +
  • +
  • +

    any IDE (devonfw-ide supports Eclipse, Intellij and VScode)

    +
  • +
  • +

    Docker

    +
  • +
+
+
+

We recommend installing the devonfw-ide with the tools, but if you already got your system configured and the tools from above installed you can skip to Bootstrap a Quarkus Project otherwise we will show you how to setup and update your devonfw-ide.

+
+
+
devonfw-ide
+
    +
  1. +

    Install devonfw-ide
    +Follow the Setup to install the devonfw-ide with Java, Maven, Eclipse and VScode.

    +
    +
      +
    1. +

      Command to install Docker
      +devon docker setup

      +
    2. +
    +
    +
  2. +
  3. +

    Update devonfw-ide
    +We advise to update your already installed devonfw-ide and all tools because we are still working to improve devonfw-ide and there could be essential features for cloud development with Quarkus that you could be missing.

    +
  4. +
+
+
+

Use the commands devon ide update, devon ide update software and devon ide scripts to update devonfw-ide and all software that is installed.

+
+
+

Go to the main folder under workspaces of the devonfw-ide installation. +We will create the project there.

+
+
+
+

1.109.4. Bootstrap a Quarkus Project

+
+

Quarkus provides multiple ways to bootstrap a project. +The option to bootstrap a project via the command-line will be shown in the Quarkus getting started guide Bootstrap the project. +Quarkus also provides a project builder where you can select some extensions, the build tool for your project, and if you want some starter code. +This will deliver a project skeleton with the configured project dependencies and also contributes the information to compile the application natively. To get some recommendations on starter templates follow this guide templates recommendations.

+
+
+ + + + + +
+ + +
+

By creating a Quarkus project from command-line or with the project builder you get a different project structure and have to adapt it to the devon4j conventions shown in the next Chapter.

+
+
+
+
+
Project Structure
+
+

We provide a recommendation and guideline for a modern project structure to help organize your project into logically related modules. +You should follow the guide and also it in your project so you structure the project to the needs of modern cloud development and microservice architectures and also find similar modules faster in our example projects.

+
+
+
+
+

1.109.5. Introduction to Quarkus Functionality

+
+

Before we start programming you should have a first look at the functionality of Quarkus.

+
+
+
Quarkus functionality guides
+
    +
  1. +

    Getting started guide from Quarkus
    +The guide is a good functionality overview, it shows with a simple Greeting Service a brief introduction into the concepts like CDI, testing, dev mode, packaging and running the app.

    +
  2. +
  3. +

    From Spring to Quarkus
    +For experienced Spring developers that have already followed devon4j guidelines, you can read our guide to getting started with Quarkus for Spring developer, as it goes more into the differences that can give you a more detailed comparison to Spring.

    +
    +
      +
    1. +

      Migrate a Spring app to Quarkus
      +This guide shows, how to migrate a Spring application to a Quarkus application with devon4j conventions.

      +
    2. +
    +
    +
  4. +
+
+
+
+

1.109.6. Create a REST service

+
+

Now let’s create our first REST CRUD service with Quarkus. +We give you the options to use a guide and start to code the service yourself, +or just download a service that’s ready to use.

+
+
+
Options
+
    +
  1. +

    Create the service yourself
    +There is a good Quarkus guide for a simple JSON REST service that will guide you through your first application and will help you to implement defining endpoint with JAX-RS and an Entity that will be managed by the service and also how to configure the JSON support.

    +
  2. +
  3. +

    Use an existing Quarkus project
    +You don’t want to code a service and just want to test some Quarkus functionalities? Just load a Quarkus sample project, provided for every existing quick start guide and the supported framework. +Our Team also provides some Quarkus applications that are working and can be loaded and tested.

    +
    +
      +
    • +

      reference project is a service that manages products. It contains the devon4j modern project structure, pagination, queries, a Postgres database, Jaeger tracing, Prometheus monitoring, SwaggerUI and support for Kubernetes deploy. +This project will be steadily improved and is used to showcase the abilities of Quarkus with devon4j.

      +
    • +
    • +

      minimal Quarkus project is just the Quarkus project from a getting started with Quarkus guide with a Greeting Services modified with the correct modern structure talked about in this chapter Project Structure

      +
    • +
    +
    +
  4. +
+
+
+
+

1.109.7. OpenAPI generation

+
+

We provide a guide with a short introduction to the OpenAPI specification with two plugins that are important in a Quarkus Context.

+
+ +
+

A more detailed usage guide to the Smallrye Plugin is provided by Quarkus OpenAPI and Swagger guide.

+
+
+
+

1.109.8. How to Integrate a Database

+
+

The next step for our REST service would be to integrate a database to store the objects of the entity.

+
+
+

With Quarkus, adding a database can be easy, because Quarkus can take over the build-up and connection process. +First, you should understand our guides to the concepts of how to work with data and then we will show how to integrate a database with Quarkus.

+
+
+
Data Principles Guides
+
    +
  1. +

    General devon4j JPA guide
    +To get an insight into the general JPA usage you should read the JPA guide which contains a general explanation of the Java Persistence API.

    +
  2. +
  3. +

    Difference to SpringData
    +If you have already worked with SpringData this is also partially supported with Quarkus, this is explained in more detail in this SpringData Guide.

    +
  4. +
+
+
+
Database Integration
+
    +
  1. +

    Quarkus zero config dev mode
    +Starting with database implementation in Quarkus, we recommend for beginners to use the DEV mode Zero Config Setup (Dev Services) this is especially great for testing the code without a database set up. +Quarkus does all the work for you and configures a database and creates the database and tables(schemas) for you.

    +
    +
      +
    1. +

      Configuration Properties
      +A list of all database configuration properties for the Dev services

      +
    2. +
    +
    +
  2. +
  3. +

    Integrate a simple Hibernate ORM database
    +The zero config setup only works with the Dev mode, it’s comfortable in the first phases of the creation of your service but if the goal is to also get a deployable version, you have to create your own database and integrate it. +This Quarkus guide shows, how to integrate a Hibernate ORM database with an example service.

    +
    +
      +
    1. +

      Configuration list for JDBC
      +A list of all configuration that is possible with a JDBC configuration properties

      +
    2. +
    +
    +
  4. +
  5. +

    Reactive CRUD application with Panache
    +Quarkus unifies reactive and imperative programming. +Reactive is an architectural principle to build robust, efficient, and concurrent applications. +An introduction into reactive and how Quarkus is enabling it follow this Quarkus reactive architecture article and also the reactive quickstart. +To get started with reactive and implement reactive methods you can follow the Quarkus reactive guide. +The reactive guide is using the Quarkus based implementation of a Hibernate ORM called Panache. +That implementation is not our first choice with devon4j and therefore not part of our recommendations, but to understand the reactive guide you can read the Hibernate ORM with Panache guide first to prevent possible problems following the guide.

    +
  6. +
+
+
+ + + + + +
+ + +
+

You need an installed Docker version for the zero config setup.

+
+
+
+
+
Database Migration
+
    +
  1. +

    Migration guide +For schema-based databases, we recommend migrating databases with Flyway. +In that case there is our general migration guide can give you an overview if you are not familiar with migration.

    +
    +
      +
    1. +

      Flyway guide for Quarkus +This Quarkus guide will show how to work with the Flyway extension in a Quarkus application. +This should be used if you start your own database and do not leave the creation to quarkus.

      +
    2. +
    +
    +
  2. +
+
+
+
+

1.109.9. Testing a Quarkus Application

+
+

After we have built the service, we have to verify it with some tests. +We will give you some guidelines to implement some test cases.

+
+
+
Testing Guides
+
    +
  1. +

    General testing guide
    +For users that aren’t familiar with the devon4j testing principles, we created a general best practices and recommendations guide for testing.

    +
    +
      +
    1. +

      Our guide for testing with Quarkus +In addition, we also provide a guide that specifically addresses the testing of a Quarkus application.

      +
    2. +
    +
    +
  2. +
+
+
+

Most of the Quarkus applications are already equipped with a basic Test and also our reference project provides some test cases, if you want to improve and extends the tests, you can also follow the large Quarkus guide for testing.

+
+
+
+

1.109.10. Packaging of a Quarkus application and creation of a native executable

+
+

Quarkus applications can be packed into different types. The following link will show how to build and also give you a short explanation of the characteristics of these files.

+
+
+
Package types
+
    +
  1. +

    fast-jar

    +
  2. +
  3. +

    mutable-jar

    +
  4. +
  5. +

    uber-jar

    +
  6. +
  7. +

    native executable

    +
  8. +
+
+
+

To pack an application use the command mvn package and Quarkus will generate the output in the /target folder. For the native executables, the command needs more parameters but this is explained in the link above.

+
+
+

Configure the Output with these configuration properties

+
+
+
+

1.109.11. Create and build a Docker Image

+
+

Quarkus supports Jib, S2I and Docker for building images. We focus on building a Quarkus App with Docker. +You get a created Dockerfile from Quarkus in the src/main/docker folder of any project generated from Quarkus. There are multiple Dockerfiles.

+
+
+
Dockerfiles
+
    +
  1. +

    Dockerfile.jvm
    +Dockerfile for Quarkus application in the JVM mode. running in Red Hat Universal Base Image 8 Minimal Container

    +
  2. +
  3. +

    Dockerfile.legacy-jar
    +DockerFile for Quarkus application in JVM mode with the legacy jar running in Red Hat Universal Base Image 8 Minimal Container.

    +
  4. +
  5. +

    Dockerfile.native
    +Dockerfile using the native executable running in Red Hat Universal Base Image 8 Minimal container.

    +
  6. +
  7. +

    Dockerfile.native-distroless +The native file will run in a Distroless container. Distroless images are very small containers with just the application and runtime dependencies and without the other programs coming with a Linux distribution.

    +
  8. +
+
+
+
+
+

For more information to the different executables go back to the chapter Packaging of a Quarkus application and creation of a native executable

+
+
+
+
+

To simply build and run a Docker image you can follow the instructions Quarkus provides for every Dockerfile in the comments block.

+
+
+

Docker commands example for the from the JVM Dockerfile from our reference project

+
+
+
+
####
+# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
+#
+# Before building the container image run:
+#
+# ./mvnw package
+#
+# Then, build the image with:
+#
+# docker build -f src/main/docker/Dockerfile.jvm -t quarkus/quarkus-basics-jvm .
+#
+# Then run the container using:
+#
+# docker run -i --rm -p 8080:8080 quarkus/quarkus-basics-jvm
+#
+# If you want to include the debug port into your docker image
+# you will have to expose the debug port (default 5005) like this :  EXPOSE 8080 5050
+#
+# Then run the container using :
+#
+# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/quarkus-basics-jvm
+#
+###
+
+
+
+

Quarkus is also able to build the image while packaging the application so you don’t have to execute the command from above. +To perform Docker builds with the generated Dockerfiles from above you need to add the following extension to your project with the command mvn quarkus:add-extension -Dextensions="container-image-docker".

+
+
+

Also you have to set the quarkus.container-image.build=true, you can add this to your application.properties or just append it to the packaging command like that ./mvn package -Dquarkus.container-image.build=true.

+
+
+

If your needs exceed the instructions given by the file, we recommend to follow the Docker getting started guide to get familiar with Docker and customize the Dockerfiles according to your needs. +To specify your container build, you can use the general container image configurations properties and the Docker image configurations properties when building and runnig Docker images.

+
+ +
+
+

1.109.12. Modern project structure

+
+

With trends such as cloud, microservices, lean, and agile we decided for a more modern project structure that fits better to recent market trends. +When starting new projects with devonfw and escpecially in the context of cloud-native development we strongly recommend this modern approach over the classic structure.

+
+
+
+

1.109.13. Modules

+
+

Due to trends such as microservices we are building smaller apps compared to moduliths. +For simplicity we therefore do not split our app into different modules and keep everything top-level and easy.

+
+
+

In addition to java and resources we also add helm for helm templates and docker for docker scripts (e.g. Dockerfile) in src/main:

+
+
+
+
├──/src
+|  ├──/main
+|  |  ├──/docker
+|  |  ├──/helm
+|  |  ├──/java
+|  |  └──/resources
+|  └──/test
+|     ├──/java
+|     └──/resources
+└──/pom.xml
+
+
+
+
+

1.109.14. Deployment

+
+

For modern projects we strongly recommend that your build process is generating the final deliverable as an OCI compliant container. +Further, to go fully cloud-native you should build your app as a native image via GraalVM AOT compiler. +Therefore we recommed to use quarkus as your main framework. +In case you want to go with spring you may consider using spring-native.

+
+
+
+

1.109.15. Layers

+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +For the modern project structure the layers are defined by the following table:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Layer«layer»Description

service

service

The service layer exposing functionality via its remote API. Typical protocol is REST. May also be any other protocol you are using such as gRPC.

domain

domain

The domain with the data-model and DB access. Use sub-package (in «detail») repository for repository and dao for DAOs. Also we recommend to put entities in model sub-package.

logic

logic

The logic layer with the functionallity providing the business value.

common

common

cross-cutting code not assigned to a technical layer.

+
+
+

1.109.16. Architecture Mapping

+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.domain
+|  |  ├──.repo
+|  |  |  ├──.«BusinessObject»Repository
+|  |  |  ├──.«BusinessObject»Fragment
+|  |  |  └──.«BusinessObject»FragmentImpl
+|  |  ├──.dao [alternative to repo]
+|  |  |  ├──.«BusinessObject»Dao
+|  |  |  └──.«BusinessObject»DaoImpl
+|  |  └──.model
+|  |     └──.«BusinessObject»Entity
+|  ├──.logic
+|  |  ├──«BusinessObject»Validator
+|  |  └──«BusinessObject»EventsEmitter
+|   |  └──.Uc«Operation»«BusinessObject»[Impl]
+|  └──.service
+|     └──.v1
+|        ├──.«Component»RestService
+|        ├──.mapper
+|        |     └──.«BusinessObject»Mapper
+|        └──.model
+|           └──.«BusinessObject»Dto
+└──.general
+   └──.domain
+      └──.model
+         └──.ApplicationPersistenceEntity
+
+
+ +
+
+

1.109.17. Domain Layer

+
+

The domain layer is responsible for the data-model and mapping this to a database. +The most common approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+

Note: The domain layer is the replacement for the data-access layer in the modern project structure.

+
+
+
+
+
+

1.110. Guides

+ +
+
Getting started with Quarkus for Spring developers
+
+

As a Spring developer, you heard more and more about Quarkus: its pros and cons, its fast growth etc. So, you decided to adopt/try Quarkus for your (next) project(s) and wonder where to go next and where do you need to pay attention to when moving from Spring to Quarkus.

+
+
+

This guide tries to address exactly this concern. In the following, we will present you some main points you should be aware of when starting to develop with Quarkus, along with some useful sources.

+
+
+
    +
  1. +

    Quarkus is fairly a new Java toolkit. Thus, it is very well documented. It also provides a set of well-written technical guides that are a good starting point to get in touch and make the first steps with Quarkus. See here. It is an Open Source project licensed under the Apache License version 2.0. The source code is hosted in GitHub. If you have any question or concern, don’t hesitate to reach out to the Quarkus community.

    +
  2. +
  3. +

    Same as Spring Initializr, you can go to code.quarkus.io to create a new application. Also, check out our Template Quarkus Guide to have our recommendations on certain topics.

    +
  4. +
  5. +

    In Spring stack, we recommend structuring your application into multiple modules, known as our classic structure. Moving to Quarkus and the world of cloud-native, microservices where we build smaller applications compared to monoliths, we recommend keeping everything top-level and simple. Therefore, we propose the modern structure as a better fit.

    +
  6. +
  7. +

    Quarkus focuses not only on delivering top features but also on the developer experience. The Quarkus’s Live Coding feature automatically detects changes made to Java files, application configuration, static resources or even classpath dependency changes and recompiles and redeploys the changes. As that, it solves the problem of traditional Java development workflow, hence improves productivity.

    +
    +
    +
        Write Code → Compile → Deploy → Test Changes/ Refresh Browser/ etc → Repeat (traditional)
    +    Write Code → Test Changes/ Refresh Browser/ etc → Repeat (Quarkus)
    +
    +
    +
    +

    You can use this feature out of the box without any extra setup by running:

    +
    +
    +
    +
        mvn compile quarkus:dev
    +
    +
    +
    +

    Another highlight feature to speed up developing is the Quarkus’s Dev Mode with Dev Services, which can automatically provision unconfigured services in development and test mode. It means that if you include an extension and don’t configure it, Quarkus will automatically start the relevant service and wire up your application to use it, therefore will save you a lot of time setting up those services manually. In production mode, where the real configuration is provided, Dev Service will be disabled automatically.

    +
    +
    +

    Also in Dev Mode, you can access the Dev UI at \q\dev to browse endpoints offered by various extensions, conceptually similar to what a Spring Boot actuator might provide.

    +
    +
  8. +
  9. +

    Quarkus is made of a small core on which relies hundreds of extensions. In fact, the power of Quarkus is its extension mechanism. Think of these extensions as your project dependencies. You can add it per dependency manager such as maven or gradle.

    +
    +
    +
    mvn quarkus:list-extensions
    +mvn quarkus:add-extension -Dextensions="groupId:artifactId"
    +(or add it manually to pom.xml)
    +# or
    +gradle list-extensions
    +(add dependency to build.gradle)
    +
    +
    +
    +

    Like Spring Boot, Quarkus also has a vast ecosystem of extensions with commonly-used technologies.

    +
    + + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Table 18. Example of common Quarkus extensions and the Spring Boot Starters with similar functionality (book: Quarkus for Spring Developer)
    Quarkus extensionSpring Boot Starter

    quarkus-resteasy-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-resteasy-reactive-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-hibernate-orm-panache

    spring-boot-starter-data-jpa

    quarkus-hibernate-orm-rest-datapanache

    spring-boot-starter-data-rest

    quarkus-hibernate-reactive-panache

    spring-boot-starter-data-r2dbc

    quarkus-mongodb-panache

    spring-boot-starter-data-mongodb

    +

    spring-boot-starter-data-mongodb-reactive

    quarkus-hibernate-validator

    spring-boot-starter-validation

    quarkus-qpid-jms

    spring-boot-starter-activemq

    quarkus-artemis-jms

    spring-boot-starter-artemis

    quarkus-cache

    spring-boot-starter-cache

    quarkus-redis-client

    spring-boot-starter-data-redis

    +

    spring-boot-starter-data-redis-reactive

    quarkus-mailer

    spring-boot-starter-mail

    quarkus-quartz

    spring-boot-starter-quartz

    quarkus-oidc

    spring-boot-starter-oauth2-resource-server

    quarkus-oidc-client

    spring-boot-starter-oauth2-client

    quarkus-smallrye-jwt

    spring-boot-starter-security

    +
    +

    A full list of all Quarkus extensions can be found here. Furthermore, you can check out the community extensions hosted by Quarkiverse Hub. Quarkus has some extensions for Spring API as well which is helpful while migrating from Spring to Quarkus.

    +
    + +
    +

    Besides extensions, which are officially maintained by Quarkus team, Quarkus allows adding external libraries too. While extensions can be integrated seamlessly into Quarkus as they can be processed at build time and be built in native mode with GraalVM, external dependencies might not work out of the box with native compilation. If that is the case, then you have to recompile them with the right GraalVM configuration to make them work.

    +
    +
  10. +
  11. +

    Quarkus’s design accounted for native compilation by default. A Quarkus native executable starts much faster and utilizes far less memory than a traditional JVM (see our performace comparision between Spring and Quarkus). To get familiar with building native executable, configuring and running it, please check out our Native Image Guide. Be sure to test your code in both JVM and native mode.

    +
  12. +
  13. +

    Both Quarkus and Spring include testing frameworks based on JUnit and Mockito. Thus, by design, Quarkus enables test-driven development by detecting affected tests as changes are made and automatically rerun them in background. As that, it gives developer instant feedback, hence improves productivity. To use continuous testing, execute the following command:

    +
    +
    +
    mvn quarkus:dev
    +
    +
    +
  14. +
  15. +

    For the sake of performance optimization, Quarkus avoids reflection as much as possible, instead favoring static class binding. When building a native executable, it analyzes the call tree and removes all the classes/methods/fields that are not used directly. As a consequence, the elements used via reflection are not part of the call tree so they are dead code eliminated (if not called directly in other cases).

    +
    +

    A common example is the JSON libraries which typically use reflection to serialize the objects to JSON. If you use them out of the box, you might encounter some errors in native mode. So, be sure to register the elements for reflection explicitly. A How-to is provided by Quarkus Registering For Reflection with practical program snippets.

    +
    +
  16. +
+
+
+

A very good read on the topic is the e-book Quarkus for Spring Developers by Red Hat. Another good source for direct hands-on coding tutorial is Katacoda Quarkus for Spring Boot Developers

+
+ +
+
+
Configuration
+
+

Quarkus provides a comprehensive guide to configuration here.

+
+
+
+
External Application Configuration
+
+
Database Configuration
+
+

In Quarkus, Hibernate is provided by the quarkus-hibernate-orm extension. Ensure the extension is added to your pom.xml as follows:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-orm</artifactId>
+</dependency>
+
+
+
+

You additionally have to add the respective JDBC driver extension to your pom.xml. There are different drivers for different database types. See Quarkus Hibernate guide.

+
+
+
+
Database System and Access
+
+

You need to configure which database type you want to use, as well as the location and credentials to access it. The defaults are configured in application.properties. The file should therefore contain the properties as in the given example:

+
+
+
+
quarkus.datasource.jdbc.url=jdbc:postgresql://database.enterprise.com/app
+quarkus.datasource.username=appuser01
+quarkus.datasource.password=************
+quarkus.datasource.db-kind=postgresql
+
+# drop and create the database at startup (use only for local development)
+quarkus.hibernate-orm.database.generation=drop-and-create
+
+
+
+
+
Database Logging
+
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
quarkus.hibernate-orm.log.sql=true
+quarkus.hibernate-orm.log.format-sql=true
+
+#Logs SQL bind parameters. Setting it to true is obviously not recommended in production.
+quarkus.hibernate-orm.log.bind-parameters=true
+
+
+
+
+
+
Security
+
+
Password Encryption
+
+

There is also some libraries to make Jasypt work with Quarkus such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode.

+
+
+

Quarkus supports many credentials provider with official extensions such as HashiCorp Vault.

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-vault</artifactId>
+</dependency>
+
+
+
+

A detailed guide can be found here and here.

+
+ +
+
+
+
Quarkus template
+
+

Quarkus Code Generator is providing a lot of alternatives on technologies and libraries to be integrated. Detailed guides to multiple topics can be found here.

+
+
+

Thus, the large selection can be difficult for developer to get started. +Therefore, in this guide, we aims to provide a general suggestion on basic frameworks, libraries, technologies to make it easy for developer to begin with.

+
+
+

With that said, please take this as a recommendation and not a compulsion. Depend on your project requirements, you might have to use another stack in comparison to what is listed below.

+
+
+

If you are new to Quarkus, consider checking out their getting started guide to have an overview about how to create, run, test as well as package a Quarkus application. Another recommended source to get started is the Katacoda tutorials.

+
+
+
Basic templates
+
+
    +
  1. +

    simple REST API (go to code.quarkus.io)

    +
  2. +
  3. +

    simple REST API with monitoring (go to code.quarkus.io)

    +
  4. +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 19. Topic-based suggested implementation
TopicDetailSuggested implementationNote

runtime

servlet-container

Undertow

component management

dependency injection

ArC

ArC is based on JSR 365. It also provides interceptors that can be used to implement the same functionality as AOP provides

configuration

SmallRye Config

SmallRye Config is an implementation of Eclipse MicroProfile Config. It also supports YAML configuration files

persistence

OR-mapper

Hibernate ORM, Spring Data JPA

Hibernate ORM is the de facto standard JPA implementation and works perfectly in Quarkus. Quarkus also provides a compatibility layer for Spring Data JPA repositories in the form of the spring-data-jpa extension.

batch

Quarkus JBeret Extension is a non-official extension, which is hosted in the Quarkiverse Hub. It is an implementation of JSR 352.

service

REST services

RESTEasy

RESTEasy is an portable implementation of the new JCP specification JAX-RS JSR-311. It can be documented via Swagger OpenAPI.

async messaging

SmallRye Reactive Messaging, Vert.x EventBus

SmallRye Reactive Messaging is an implementation of the Eclipse MicroProfile Reactive Messaging specification 1.0. You can also utilize SmallRye Reactive Messaging in your Quarkus application to interact with Apache Kafka.

marshalling

RESTEasy Jackson, RESTEasy JSON-B, RESTEasy JAXB, RESTEasy Multipart

cloud

kubernetes

Kubernetes

deployment

Minikube, k3d

Minikube is quite popular when a Kubernetes cluster is needed for development purposes. Quarkus supports this with the quarkus-minikube extension.

logging

framework

JBoss Log Manager and the JBoss Logging facade

Internally, Quarkus uses JBoss Log Manager and the JBoss Logging facade. Logs from other supported Logging API (JBoss Logging, SLF4J, Apache Commons Logging) will be merged.

validation

framework

Hibernate Validator/Bean Validation (JSR 380)

security

authentication & authorization

JWT authentication

Quarkus supports various security mechanisms. Depending on your protocol, identity provider you can choose the necessary extensions such as quarkus-oidc quarkus-smallrye-jwt quarkus-elytron-security-oauth2.

monitoring

framework

Micrometer Metrics, SmallRye Metrics

SmallRye Metrics is an implementation of the MicroProfile Metrics specification. Quarkus also offers various extensions to customize the metrics.

health

SmallRye Health

SmallRye Health is an implementation of the MicroProfile Health specification.

fault tolerance

SmallRye Fault Tolerance

SmallRye Fault Tolerance is an implementation of the MicroProfile Fault Tolerance specification.

+ +
+
+
+
Building a native image
+
+

Quarkus provides the ability to create a native executable of the application called native image. +Unlike other Java based deployments such native image will only run on the architecture and operating system it is compiled for. +Also, no JVM is needed to run the native-image. +This improves the startup time, performance and efficiency. +A distribution of GraalVM is needed. +You can find the differences between the available distributions here.

+
+
+

To build your quarkus app as native-image you have two options that are descibed in the following sections.

+
+
+
+
Build a native executable with GraalVM
+
+

To build a Quarkus application you can install GraalVM locally on your machine as described below. +Therefore read the basic Quarkus application chapter, or clone the example project provided by devonfw. +Follow this chapter from the Quarkus Guide for building a native executable.

+
+
+
Installing GraalVM
+
+

A native image can be created locally or through a container environment. +To create a native image locally an installed and configured version of GraalVM is needed, you can follow the installation guide from Quarkus or the guide provided by GraalVM for this.

+
+
+
+
+
Build a native executable with GraalVM through container environment
+
+

In order to make the build of native images more portable, you can also use your container environment and run the GraalVM inside a container (typically Docker). +You can simply install Docker with your devonfw-ide distribution just follow this description Docker with devonfw-ide. +Follow this chapter to build a native Linux image through container runtime.

+
+
+
+
Configuring the native executable
+
+

A list of all configuration properties for a native image can be found here.

+
+ +
+
+
Bean mapping with Quarkus
+
+

This guide will show bean-mapping in particular for a Quarkus application. We recommend using MapStruct with a Quarkus application because the other bean-mapper frameworks are using Java reflections. They are not supported in GraalVm right now and causes problems building native applications. MapStruct is a code generator that greatly simplifies the implementation of mappings between Java bean types based on a convention over configuration approach. The mapping code will be generated at compile-time and uses plain method invocations and thus is fast, type-safe, and easy to understand. MapStruct has to be configured to not use Java reflections but it will be shown in this guide.

+
+
+

You can find the official +MapStruct reference guide and a general introduction to MapStruct from Baeldung.

+
+
+
+
MapStruct Dependency
+
+

To get access to MapStruct we have to add the dependency to our POM.xml:

+
+
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+  <scope>provided</scope>
+</dependency>
+
+
+
+

MapStruct provides an annotation processor that also has to be added to the POM.xml

+
+
+
+
<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.mapstruct</groupId>
+				<artifactId>mapstruct-processor</artifactId>
+				<version>1.4.2.Final</version>
+			</path>
+		</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

MapStruct takes advantage of generated getters, setters, and constructors from the Lombok library, follow this Lombok with Mapstruct guide to get Lombok with Mapstruct working.

+
+
+
+
MapStruct Configuration
+
+

We already discussed the benefits of dependency injection and MapStruct supports CDI with EJB, spring, and jsr330. The default retrieving method for a mapper is a factory that uses reflections and should be avoided. The component model should be set to CDI, as this will allow us to easily inject the generated mapper implementation. The component model can be configured in multiple ways.

+
+
+
Simple Configuration
+
+

Add the attribute componentModel to the @Mapper annotation in the mapper interface.

+
+
+
+
@Mapper(compnentModel = "cdi")
+public interface ProductMapper{
+  ...
+}
+
+
+
+
+
MapperConfig Configuration
+
+

Create a shared configuration that can be used for multiple mappers. Implement an Interface and use the annotation @MapperConfig for the class. You can define all configurations in this interface and pass the generated MapperConfig.class with the config attribute to the mapper. The MapperConfig also defines the InjectionStrategy and MappingInheritaceStrategy both will be explained later. +A list of all configurations can be found here.

+
+
+
+
@MapperConfig(
+  compnentModel = "cdi",
+  mappingInheritanceStrategy = MappingInheritanceStrategy.AUTO_INHERIT_FROM_CONFIG
+  injectionStrategy =InjectionStrategy.CONSTRUCTOR
+)
+public interface MapperConfig{
+}
+
+
+
+
+
@Mapper( config = MapperConfig.class )
+public interface ProductMapper{
+  ...
+}
+
+
+
+

Any attributes not given via @Mapper will be inherited from the shared configuration MapperConfig.class.

+
+
+
+
Configuration via annotation processor options
+
+

The MapStruct code generator can be configured using annotation processor options. +You can pass the options to the compiler while invoking javac directly, or add the parameters to the maven configuration in the POM.xml

+
+
+

We are also using the constructor injection strategie to avoid field injections and potential reflections also it will simplify our tests. +The option to pass the parameter to the annotation processor in the POM.xml is used and can be inspected in our quarkus reference project.

+
+
+

A list of all annotation processor options can be found here.

+
+
+
+
+
Basic Bean-Mapper Usage
+
+

To use the mapper we have to implement the mapper interface and the function prototypes with a @Mapper annotation.

+
+
+
+
@Mapper
+public interface ProductMapper {
+
+  ProductDto map(ProductEntity model);
+
+  ProductEntity create(NewProductDto dto);
+}
+
+
+
+

The MapStruct annotation processor will generate the implementation for us under /target/generated-sources/, we just need to tell it that we would like to have a method that accepts an ProductEntity entity and returns a new ProductDto DTO.

+
+
+

The generated mapper implementation will be marked with the @ApplicationScoped annotation and thus can be injected into fields, constructor arguments, etc. using the @Inject annotation:

+
+
+
+
public class ProductRestService{
+
+  @Inject
+  ProductMapper mapper;
+}
+
+
+
+

That is the basic usage of a Mapstruct mapper. In the next chapter, we go a bit into detail and show some more configurations.

+
+
+
+
Advanced Bean-Mapper Usage
+
+

Let´s assume our Product entity and the ProductDto has some different named property that should be mapped. Add a mapping annotation to map the property type from Product to kind from ProductDto. We define the source name of the property and the target name.

+
+
+
+
@Mapper
+public interface ProductMapper {
+  @Mapping(target = "kind", source = "type")
+  ProductDto map(ProductEntity entity);
+
+  @InheritInverseConfiguration(name = "map" )
+  ProductEntity create(ProductDto dto);
+}
+
+
+
+

For bi-directional mappings, we can indicate that a method shall inherit the inverse configuration of the corresponding method with the @InheritInverseConfiguration. You can omit the name parameter if the result type of method A is the same as the +single-source type of method B and if the single-source type of A is the same as the result type of B. If multiple applies the attribute name is needed. Specific mappings from the inversed method can (optionally) be overridden, ignored, and set to constants or expressions.

+
+
+

The mappingInheritanceStrategy can be defined as showed in MapStruct Configuration the existing options can be found here.

+
+
+

Not always a mapped attribute has the same type in the source and target objects. For instance, an attribute may be of type int in the source bean but of type Long in the target bean.

+
+
+

Another example are references to other objects which should be mapped to the corresponding types in the target model. E.g. the class ShoppingCart might have a property content of the type Product which needs to be converted into an ProductDto object when mapping a ShoppingCart object to ShoppingCartDto. For these cases, it’s useful to understand how Mapstruct is converting the data types and the object references.

+
+
+

Also, the Chapter for nested bean mappings will help to configure MapStruct to map arbitrary deep object graphs.

+
+
+

You can study running MapStruct implementation examples given by MapStruct or in our Quarkus reference project

+
+
+
+
+
+
+
+
+1. "Stammdaten" in German. +
+
+2. Whether to use checked exceptions or not is a controversial topic. Arguments for both sides can be found under The Trouble with Checked Exceptions, Unchecked Exceptions — The Controversy, and Checked Exceptions are Evil. The arguments in favor of unchecked exceptions tend to prevail for applications build with Devon4j. Therefore, unchecked exceptions should be used for a consistent style. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/devon4j.html b/docs/devon4j/1.0/devon4j.html new file mode 100644 index 00000000..8958ffde --- /dev/null +++ b/docs/devon4j/1.0/devon4j.html @@ -0,0 +1,12184 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Java

+
+
+

The devonfw community +${project.version}, ${buildtime}

+
+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+

The following sections contain the complete compendium of devon4j, the Java stack of devonfw. +With devon4j we support both spring and quarkus as major frameworks. +However, the general coding patterns are based on common Java standards mainly from Jakarta EE and therefore do not differ between those frameworks. +Therefore, the general section contains all the documentation that is universal to Java and does not differ between the two frameworks. +Only the sections spring and quarkus contain documentation that is specific to the respective approach.

+
+
+

You can also read the latest version of this documentation online at the following sources:

+
+ +
+
+
+

1. General

+
+
+

Here you will find documentation and code-patterns for developing with Java in general, independent of the framework you choose.

+
+ +
+

1.1. Architecture

+
+

There are many different views that are summarized by the term architecture. First, we will introduce the key principles and architecture principles of devonfw. Then, we will go into details of the the architecture of an application.

+
+
+
+

1.2. Key Principles

+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
+
+
+
+

1.3. Architecture Principles

+
+

Additionally we define the following principles that our architecture is based on:

+
+
+
    +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of Concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information Hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise, maintenance problems will arise to ensure that data remains consistent. Therefore, interfaces of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style.

    +
  • +
+
+
+

As an architect you should be prepared for the future by reading the TechnoVision.

+
+
+
+

1.4. Application Architecture

+
+

For the architecture of an application we distinguish the following views:

+
+
+
    +
  • +

    The Business Architecture describes an application from the business perspective. It divides the application into business components and with full abstraction of technical aspects.

    +
  • +
  • +

    The Technical Architecture describes an application from the technical implementation perspective. It divides the application into technical layers and defines which technical products and frameworks are used to support these layers.

    +
  • +
  • +

    The Infrastructure Architecture describes an application from the operational infrastructure perspective. It defines the nodes used to run the application including clustering, load-balancing and networking. This view is not explored further in this guide.

    +
  • +
+
+
+
Business Architecture
+
+

The business architecture divides the application into business components. A business component has a well-defined responsibility that it encapsulates. All aspects related to that responsibility have to be implemented within that business component. Further, the business architecture defines the dependencies between the business components. These dependencies need to be free of cycles. A business component exports its functionality via well-defined interfaces as a self-contained API. A business component may use another business component via its API and compliant with the dependencies defined by the business architecture.

+
+
+

As the business domain and logic of an application can be totally different, the devonfw can not define a standardized business architecture. Depending on the business domain it has to be defined from scratch or from a domain reference architecture template. For very small systems it may be suitable to define just a single business component containing all the code.

+
+
+
+
Technical Architecture
+
+

The technical architecture divides the application into technical layers based on the multilayered architecture. A layer is a unit of code with the same category such as a service or presentation logic. So, a layer is often supported by a technical framework. Each business component can therefore be split into component parts for each layer. However, a business component may not have component parts for every layer (e.g. only a presentation part that utilized logic from other components).

+
+
+

An overview of the technical reference architecture of the devonfw is given by figure "Technical Reference Architecture". +It defines the following layers visualized as horizontal boxes:

+
+
+ +
+
+

Also, you can see the (business) components as vertical boxes (e.g. A and X) and how they are composed out of component parts each one assigned to one of the technical layers.

+
+
+

Further, there are technical components for cross-cutting aspects grouped by the gray box on the left. Here is a complete list:

+
+ +
+
+devonfw architecture blueprint +
+
Figure 1. Technical Reference Architecture
+
+
+

Please click on the architecture image to open it as SVG and click on the layers and cross-cutting topics to open the according documentation guide.

+
+
+

We reflect this architecture in our code as described in our coding conventions allowing a traceability of business components, use-cases, layers, etc. into the code and giving +developers a sound orientation within the project.

+
+
+

Further, the architecture diagram shows the allowed dependencies illustrated by the dark green connectors. +Within a business component a component part can call the next component part on the layer directly below via a dependency on its API (vertical connectors). +While this is natural and obvious, it is generally forbidden to have dependencies upwards the layers +or to skip a layer by a direct dependency on a component part two or more layers below. +The general dependencies allowed between business components are defined by the business architecture. +In our reference architecture diagram we assume that the business component A1 is allowed to depend +on component A2. Therefore, a use-case within the logic component part of A1 is allowed to call a +use-case from A2 via a dependency on the component API. The same applies for dialogs on the client layer. +This is illustrated by the horizontal connectors. Please note that persistence entities are part of the API of the data-access component part so only the logic component part of the same +business component may depend on them.

+
+
+

The technical architecture has to address non-functional requirements:

+
+
+
    +
  • +

    scalability
    +is established by keeping state in the client and making the server state-less (except for login session). Via load-balancers new server nodes can be added to improve performance (horizontal scaling).

    +
  • +
  • +

    availability and reliability
    +are addressed by clustering with redundant nodes avoiding any single-point-of failure. If one node fails the system is still available. Further, the software has to be robust so there are no dead-locks or other bad effects that can make the system unavailable or not reliable.

    +
  • +
  • +

    security
    +is archived in the devonfw by the right templates and best-practices that avoid vulnerabilities. See security guidelines for further details.

    +
  • +
  • +

    performance
    +is obtained by choosing the right products and proper configurations. While the actual implementation of the application matters for performance a proper design is important as it is the key to allow performance-optimizations (see e.g. caching).

    +
  • +
+
+
+
Technology Stack
+
+

The technology stack of the devonfw is illustrated by the following table.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Technology Stack of devonfw
TopicDetailStandardSuggested implementation

runtime

language & VM

Java

Oracle JDK

runtime

servlet-container

JEE

tomcat

component management

dependency injection

JSR330 & JSR250

spring

configuration

framework

-

spring-boot

persistence

OR-mapper

JPA

hibernate

batch

framework

JSR352

spring-batch

service

SOAP services

JAX-WS

CXF

service

REST services

JAX-RS

CXF

logging

framework

slf4j

logback

validation

framework

beanvalidation/JSR303

hibernate-validator

security

Authentication & Authorization

JAAS

spring-security

monitoring

framework

JMX

spring

monitoring

HTTP Bridge

HTTP & JSON

jolokia

AOP

framework

dynamic proxies

spring AOP

+
+ +
+
+
+
+

1.5. Configuration

+
+

An application needs to be configurable in order to allow internal setup (like CDI) but also to allow externalized configuration of a deployed package (e.g. integration into runtime environment). We rely on a comprehensive configuration approach following a "convention over configuration" pattern. This guide adds on to this by detailed instructions and best-practices how to deal with configurations.

+
+
+

In general we distinguish the following kinds of configuration that are explained in the following sections:

+
+
+ +
+
+
+

1.6. Internal Application Configuration

+
+

The application configuration contains all internal settings and wirings of the application (bean wiring, database mappings, etc.) and is maintained by the application developers at development time.

+
+
+

For more detail of Spring stack, see here

+
+
+
+

1.7. Externalized Configuration

+
+

Externalized configuration is a configuration that is provided separately to a deployment package and can be maintained undisturbed by re-deployments.

+
+
+
Environment Configuration
+
+

The environment configuration contains configuration parameters (typically port numbers, host names, passwords, logins, timeouts, certificates, etc.) specific for the different environments. These are under the control of the operators responsible for the application.

+
+
+

The environment configuration is maintained in application.properties files, defining various properties. +These properties are explained in the corresponding configuration sections of the guides for each topic:

+
+
+ +
+
+

Make sure your properties are thoroughly documented by providing a comment to each property. This inline documentation is most valuable for your operating department.

+
+
+

More about structuring your application.properties files can be read here for Spring.

+
+
+

For Quarkus, please refer to Quarkus Config Reference for more details.

+
+
+
+
Business Configuration
+
+

Often applications do not need business configuration. In case they do it should typically be editable by administrators via the GUI. The business configuration values should therefore be stored in the database in key/value pairs.

+
+
+

Therefore we suggest to create a dedicated table with (at least) the following columns:

+
+
+
    +
  • +

    ID

    +
  • +
  • +

    Property name

    +
  • +
  • +

    Property type (Boolean, Integer, String)

    +
  • +
  • +

    Property value

    +
  • +
  • +

    Description

    +
  • +
+
+
+

According to the entries in this table, an administrative GUI may show a generic form to modify business configuration. Boolean values should be shown as checkboxes, integer and string values as text fields. The values should be validated according to their type so an error is raised if you try to save a string in an integer property for example.

+
+
+

We recommend the following base layout for the hierarchical business configuration:

+
+
+

component.[subcomponent].[subcomponent].propertyname

+
+
+
+
+

1.8. Security

+
+

Often you need to have passwords (for databases, third-party services, etc.) as part of your configuration. These are typically environment specific (see above). However, with DevOps and continuous-deployment you might be tempted to commit such configurations into your version-control (e.g. git). Doing that with plain text passwords is a severe problem especially for production systems. Never do that! Instead we offer some suggestions how to deal with sensible configurations:

+
+
+
Password Encryption
+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control.

+
+
+

For Spring, we use jasypt-spring-boot. For more details, see here

+
+
+

For Quarkus, see here

+
+
+
Is this Security by Obscurity?
+
+
    +
  • +

    Yes, from the point of view to protect the passwords on the target environment this is nothing but security by obscurity. If an attacker somehow got full access to the machine this will only cause him to spend some more time.

    +
  • +
  • +

    No, if someone only gets the configuration file. So all your developers might have access to the version-control where the config is stored. Others might have access to the software releases that include this configs. But without the master-password that should only be known to specific operators none else can decrypt the password (except with brute-force what will take a very long time, see jasypt for details).

    +
  • +
+
+
+ +
+
+
+
+

1.9. Coding Conventions

+
+

The code should follow general conventions for Java (see Oracle Naming Conventions, Google Java Style, etc.).We consider this as common sense and provide configurations for SonarQube and related tools such as Checkstyle instead of repeating this here.

+
+
+
+

1.10. Naming

+
+

Besides general Java naming conventions, we follow the additional rules listed here explicitly:

+
+
+
    +
  • +

    Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Use CamelCase even for abbreviations (XmlUtil instead of XMLUtil)

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc'). See #1095 for details.

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+

1.11. Packages

+
+

Java Packages are the most important element to structure your code. We use a strict packaging convention to map technical layers and business components (slices) to the code (See technical architecture for further details). By using the same names in documentation and code we create a strong link that gives orientation and makes it easy to find from business requirements, specifications or story tickets into the code and back.

+
+
+

For an devon4j based application we use the following Java-Package schema:

+
+
+
+
«root».«component».«layer»[.«detail»]
+
+
+
+

E.g. in our example application we find the Spring Data repositories for the ordermanagement component in the package com.devonfw.application.mtsj.ordermanagement.dataaccess.api.repo

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of package schema
SegmentDescriptionExample

«root»

Is the basic Java Package name-space of your app. Typically we suggest to use «group».«artifact» where «group» is your maven/gradle groupId corresponding to your organization or IT project owning the code following common Java Package conventions. The segment «artifact» is your maven/gradle artifactId and is typically the technical name of your app.

com.devonfw.application.mtsj

«component»

The (business) component the code belongs to. It is defined by the business architecture and uses terms from the business domain. Use the implicit component general for code not belonging to a specific component (foundation code).

salesmanagement

«layer»

The name of the technical layer (See technical architecture). Details are described for the modern project structure and for the classic project structure.

logic

«detail»

Here you are free to further divide your code into sub-components and other concerns according to the size of your component part. If you want to strictly separate API from implementation you should start «detail» with «scope» that is explained below.

dao

«scope»

The scope which is one of api (official API to be used by other layers or components), base (basic code to be reused by other implementations) and impl (implementation that should never be imported from outside). This segment was initially mandatory but due to trends such as microservices, lean, and agile we decided to make it optional and do not force anybody to use it.

api

+
+

Please note that devon4j library modules for spring use com.devonfw.module as «root» and the name of the module as «component». E.g. the API of our beanmapping module can be found in the package com.devonfw.module.beanmapping.common.api.

+
+
+
+

1.12. Code Tasks

+
+

Code spots that need some rework can be marked with the following tasks tags. These are already properly pre-configured in your development environment for auto completion and to view tasks you are responsible for. It is important to keep the number of code tasks low. Therefore, every member of the team should be responsible for the overall code quality. So if you change a piece of code and hit a code task that you can resolve in a reliable way, please do this as part of your change and remove the according tag.

+
+
+
TODO
+
+

Used to mark a piece of code that is not yet complete (typically because it can not be completed due to a dependency on something that is not ready).

+
+
+
+
 // TODO «author» «description»
+
+
+
+

A TODO tag is added by the author of the code who is also responsible for completing this task.

+
+
+
+
FIXME
+
+
+
 // FIXME «author» «description»
+
+
+
+

A FIXME tag is added by the author of the code or someone who found a bug he can not fix right now. The «author» who added the FIXME is also responsible for completing this task. This is very similar to a TODO but with a higher priority. FIXME tags indicate problems that should be resolved before a release is completed while TODO tags might have to stay for a longer time.

+
+
+
+
REVIEW
+
+
+
 // REVIEW «responsible» («reviewer») «description»
+
+
+
+

A REVIEW tag is added by a reviewer during a code review. Here the original author of the code is responsible to resolve the REVIEW tag and the reviewer is assigning this task to him. This is important for feedback and learning and has to be aligned with a review "process" where people talk to each other and get into discussion. In smaller or local teams a peer-review is preferable but this does not scale for large or even distributed teams.

+
+
+
+
+

1.13. Code-Documentation

+
+

As a general goal, the code should be easy to read and understand. Besides, clear naming the documentation is important. We follow these rules:

+
+
+
    +
  • +

    APIs (especially component interfaces) are properly documented with JavaDoc.

    +
  • +
  • +

    JavaDoc shall provide actual value - we do not write JavaDoc to satisfy tools such as checkstyle but to express information not already available in the signature.

    +
  • +
  • +

    We make use of {@link} tags in JavaDoc to make it more expressive.

    +
  • +
  • +

    JavaDoc of APIs describes how to use the type or method and not how the implementation internally works.

    +
  • +
  • +

    To document implementation details, we use code comments (e.g. // we have to flush explicitly to ensure version is up-to-date). This is only needed for complex logic.

    +
  • +
  • +

    Avoid the pointless {@inheritDoc} as since Java 1.5 there is the @Override annotation for overridden methods and your JavaDoc is inherited automatically even without any JavaDoc comment at all.

    +
  • +
+
+
+
+

1.14. Code-Style

+
+

This section gives you best practices to write better code and avoid pitfalls and mistakes.

+
+
+
BLOBs
+
+

Avoid using byte[] for BLOBs as this will load them entirely into your memory. This will cause performance issues or out of memory errors. Instead, use streams when dealing with BLOBs. For further details see BLOB support.

+
+
+
+
Stateless Programming
+
+

When implementing logic as components or beans of your container using dependency injection, we strongly encourage stateless programming. +This is not about data objects like an entity or transfer-object that are stateful by design. +Instead this applies to all classes annotated with @Named, @ApplicationScoped, @Stateless, etc. and all their super-classes. +These classes especially include your repositories, use-cases, and REST services. +Such classes shall never be modified after initialization. +Methods called at runtime (after initialization via the container) do not assign fields (member variables of your class) or mutate the object stored in a field. +This allows your component or bean to be stateless and thread-safe. +Therefore it can be initialized as a singleton so only one instance is created and shared accross all threads of the application. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // bad
+  private String contractOwner;
+
+  private MyState state;
+
+  @Overide
+  public void approve(Contract contract) {
+    this.contractOwner = contract.getOwner();
+    this.contractOwner = this.contractOwner.toLowerCase(Locale.US);
+    this.state.setAdmin(this.contractOwner.endsWith("admin"));
+    if (this.state.isAdmin()) {
+      ...
+    } else {
+      ...
+    }
+  }
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    if (contractOwner.endsWith("admin")) {
+      ...
+    } else {
+      ...
+    }
+  }
+}
+
+
+
+

As you can see in the bad code fields of the class are assigned when the method approve is called. +So mutliple users and therefore threads calling this method concurrently can interfere and override this state causing side-effects on parallel threads. +This will lead to nasty bugs and errors that are hard to trace down. +They will not occur in simple tests but for sure in production with real users. +Therefore never do this and implement your functionality stateless. +That is keeping all state in local variables and strictly avoid modifying fields or their value as illustrated in the fine code. +If you find yourself passing many parameters between methods that all represent state, you can easily create a separate class that encapsulates this state. +However, then you need to create this state object in your method as local variable and pass it between methods as parameter:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    MyState state = new MyState();
+    state.setAdmin(this.contractOwner.endsWith("admin"));
+    doApproveContract(contract, state);
+  }
+}
+
+
+
+
+
Closing Resources
+
+

Resources such as streams (InputStream, OutputStream, Reader, Writer) or transactions need to be handled properly. Therefore, it is important to follow these rules:

+
+
+
    +
  • +

    Each resource has to be closed properly, otherwise you will get out of file handles, TX sessions, memory leaks or the like

    +
  • +
  • +

    Where possible avoid to deal with such resources manually. That is why we are recommending @Transactional for transactions in devonfw (see Transaction Handling).

    +
  • +
  • +

    In case you have to deal with resources manually (e.g. binary streams) ensure to close them properly. See the example below for details.

    +
  • +
+
+
+

Closing streams and other such resources is error prone. Have a look at the following example:

+
+
+
+
// bad
+try {
+  InputStream in = new FileInputStream(file);
+  readData(in);
+  in.close();
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+

The code above is wrong as in case of an IOException the InputStream is not properly closed. In a server application such mistakes can cause severe errors that typically will only occur in production. As such resources implement the AutoCloseable interface you can use the try-with-resource syntax to write correct code. The following code shows a correct version of the example:

+
+
+
+
// fine
+try (InputStream in = new FileInputStream(file)) {
+  readData(in);
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+
+
Catching and handling Exceptions
+
+

When catching exceptions always ensure the following:

+
+
+
    +
  • +

    Never call printStackTrace() method on an exception

    +
  • +
  • +

    Either log or wrap and re-throw the entire catched exception. Be aware that the cause(s) of an exception is very valuable information. If you loose such information by improper exception-handling you may be unable to properly analyse production problems what can cause severe issues.

    +
    +
      +
    • +

      If you wrap and re-throw an exception ensure that the catched exception is passed as cause to the newly created and thrown exception.

      +
    • +
    • +

      If you log an exception ensure that the entire exception is passed as argument to the logger (and not only the result of getMessage() or toString() on the exception).

      +
    • +
    +
    +
  • +
  • +

    See exception handling

    +
  • +
+
+
+
+
Lambdas and Streams
+
+

With Java8 you have cool new features like lambdas and monads like (Stream, CompletableFuture, Optional, etc.). +However, these new features can also be misused or led to code that is hard to read or debug. To avoid pain, we give you the following best practices:

+
+
+
    +
  1. +

    Learn how to use the new features properly before using. Developers are often keen on using cool new features. When you do your first experiments in your project code you will cause deep pain and might be ashamed afterwards. Please study the features properly. Even Java8 experts still write for loops to iterate over collections, so only use these features where it really makes sense.

    +
  2. +
  3. +

    Streams shall only be used in fluent API calls as a Stream can not be forked or reused.

    +
  4. +
  5. +

    Each stream has to have exactly one terminal operation.

    +
  6. +
  7. +

    Do not write multiple statements into lambda code:

    +
    +
    +
    // bad
    +collection.stream().map(x -> {
    +Foo foo = doSomething(x);
    +...
    +return foo;
    +}).collect(Collectors.toList());
    +
    +
    +
    +

    This style makes the code hard to read and debug. Never do that! Instead, extract the lambda body to a private method with a meaningful name:

    +
    +
    +
    +
    // fine
    +collection.stream().map(this::convertToFoo).collect(Collectors.toList());
    +
    +
    +
  8. +
  9. +

    Do not use parallelStream() in general code (that will run on server side) unless you know exactly what you are doing and what is going on under the hood. Some developers might think that using parallel streams is a good idea as it will make the code faster. However, if you want to do performance optimizations talk to your technical lead (architect). Many features such as security and transactions will rely on contextual information that is associated with the current thread. Hence, using parallel streams will most probably cause serious bugs. Only use them for standalone (CLI) applications or for code that is just processing large amounts of data.

    +
  10. +
  11. +

    Do not perform operations on a sub-stream inside a lambda:

    +
    +
    +
    set.stream().flatMap(x -> x.getChildren().stream().filter(this::isSpecial)).collect(Collectors.toList()); // bad
    +set.stream().flatMap(x -> x.getChildren().stream()).filter(this::isSpecial).collect(Collectors.toList()); // fine
    +
    +
    +
  12. +
  13. +

    Only use collect at the end of the stream:

    +
    +
    +
    set.stream().collect(Collectors.toList()).forEach(...) // bad
    +set.stream().peek(...).collect(Collectors.toList()) // fine
    +
    +
    +
  14. +
  15. +

    Lambda parameters with Types inference

    +
    +
    +
    (String a, Float b, Byte[] c) -> a.toString() + Float.toString(b) + Arrays.toString(c)  // bad
    +(a,b,c)  -> a.toString() + Float.toString(b) + Arrays.toString(c)  // fine
    +
    +Collections.sort(personList, (Person p1, Person p2) -> p1.getSurName().compareTo(p2.getSurName()));  // bad
    +Collections.sort(personList, (p1, p2) -> p1.getSurName().compareTo(p2.getSurName()));  // fine
    +
    +
    +
  16. +
  17. +

    Avoid Return Braces and Statement

    +
    +
    +
     a ->  { return a.toString(); } // bad
    + a ->  a.toString();   // fine
    +
    +
    +
  18. +
  19. +

    Avoid Parentheses with Single Parameter

    +
    +
    +
    (a) -> a.toString(); // bad
    + a -> a.toString();  // fine
    +
    +
    +
  20. +
  21. +

    Avoid if/else inside foreach method. Use Filter method & comprehension

    +
    +
    +
    // bad
    +static public Iterator<String> TwitterHandles(Iterator<Author> authors, string company) {
    +    final List result = new ArrayList<String> ();
    +    foreach (Author a : authors) {
    +      if (a.Company.equals(company)) {
    +        String handle = a.TwitterHandle;
    +        if (handle != null)
    +          result.Add(handle);
    +      }
    +    }
    +    return result;
    +  }
    +
    +
    +
    +
    +
    // fine
    +public List<String> twitterHandles(List<Author> authors, String company) {
    +    return authors.stream()
    +            .filter(a -> null != a && a.getCompany().equals(company))
    +            .map(a -> a.getTwitterHandle())
    +            .collect(toList());
    +  }
    +
    +
    +
  22. +
+
+
+
+
Optionals
+
+

With Optional you can wrap values to avoid a NullPointerException (NPE). However, it is not a good code-style to use Optional for every parameter or result to express that it may be null. For such case use @Nullable or even better instead annotate @NotNull where null is not acceptable.

+
+
+

However, Optional can be used to prevent NPEs in fluent calls (due to the lack of the elvis operator):

+
+
+
+
Long id;
+id = fooCto.getBar().getBar().getId(); // may cause NPE
+id = Optional.ofNullable(fooCto).map(FooCto::getBar).map(BarCto::getBar).map(BarEto::getId).orElse(null); // null-safe
+
+
+
+
+
Encoding
+
+

Encoding (esp. Unicode with combining characters and surrogates) is a complex topic. Please study this topic if you have to deal with encodings and processing of special characters. For the basics follow these recommendations:

+
+
+
    +
  • +

    Whenever possible prefer unicode (UTF-8 or better) as encoding. This especially impacts your databases and has to be defined upfront as it typically can not be changed (easily) afterwards.

    +
  • +
  • +

    Do not cast from byte to char (unicode characters can be composed of multiple bytes, such cast may only work for ASCII characters)

    +
  • +
  • +

    Never convert the case of a String using the default locale (esp. when writing generic code like in devonfw). E.g. if you do "HI".toLowerCase() and your system locale is Turkish, then the output will be "hı" instead of "hi", which can lead to wrong assumptions and serious problems. If you want to do a "universal" case conversion always explicitly use an according western locale (e.g. toLowerCase(Locale.US)). Consider using a helper class (see e.g. CaseHelper) or create your own little static utility for that in your project.

    +
  • +
  • +

    Write your code independent from the default encoding (system property file.encoding) - this will most likely differ in JUnit from production environment

    +
    +
      +
    • +

      Always provide an encoding when you create a String from byte[]: new String(bytes, encoding)

      +
    • +
    • +

      Always provide an encoding when you create a Reader or Writer : new InputStreamReader(inStream, encoding)

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer general API
+
+

Avoid unnecessary strong bindings:

+
+
+
    +
  • +

    Do not bind your code to implementations such as Vector or ArrayList instead of List

    +
  • +
  • +

    In APIs for input (=parameters) always consider to make little assumptions:

    +
    +
      +
    • +

      prefer Collection over List or Set where the difference does not matter (e.g. only use Set when you require uniqueness or highly efficient contains)

      +
    • +
    • +

      consider preferring Collection<? extends Foo> over Collection<Foo> when Foo is an interface or super-class

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer primitive boolean
+
+

Unless in rare cases where you need to allow a flag being null avoid using the object type Boolean.

+
+
+
+
// bad
+public Boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

Instead always use the primitive boolean type:

+
+
+
+
// fine
+public boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

The only known excuse is for flags in embeddable types due to limitations of hibernate.

+
+ +
+
+
+

1.15. Project structure

+
+

In devonfw we want to give clear structure and guidance for building applications. +This also allows tools such as CobiGen or sonar-devon4j-plugin to "understand" the code. +Also this helps developers going from one devonfw project to the next one to quickly understand the code-base. +If every developer knows where to find what, the project gets more efficient. +A long time ago maven standardized the project structure with src/main/java, etc. and turned chaos into structure. +With devonfw we experienced the same for the codebase (what is inside src/main/java).

+
+
+

We initially started devon4j based on spring and spring-boot and proposed a classic project structure. +With modern cloud-native trends we added a modern project structure, that is more lean and up-to-date with the latest market trends.

+
+ +
+
+

1.16. Dependency Injection

+
+

Dependency injection is one of the most important design patterns and is a key principle to a modular and component based architecture. +The Java Standard for dependency injection is javax.inject (JSR330) that we use in combination with JSR250. +Additionally, for scoping you can use CDI (Context and Dependency Injection) from JSR365.

+
+
+

There are many frameworks which support this standard including all recent Java EE application servers. +Therefore in devonfw we rely on these open standards and can propagate patterns and code examples that work independent from the underlying frameworks.

+
+
+
+

1.17. Key Principles

+
+

Within dependency injection a bean is typically a reusable unit of your application providing an encapsulated functionality. +This bean can be injected into other beans and it should in general be replaceable. +As an example we can think of a use-case, a repository, etc. +As best practice we use the following principles:

+
+
+
    +
  • +

    Stateless implementation
    +By default such beans shall be implemented stateless. If you store state information in member variables you can easily run into concurrency problems and nasty bugs. This is easy to avoid by using local variables and separate state classes for complex state-information. Try to avoid stateful beans wherever possible. Only add state if you are fully aware of what you are doing and properly document this as a warning in your JavaDoc.

    +
  • +
  • +

    Usage of Java standards
    +We use common standards (see above) that makes our code portable. Therefore we use standardized annotations like @Inject (javax.inject.Inject) instead of proprietary annotations such as @Autowired. Generally we avoid proprietary annotations in business code (logic layer).

    +
  • +
  • +

    Simple injection-style
    +In general you can choose between constructor, setter or field injection. For simplicity we recommend to do private field injection as it is very compact and easy to maintain. We believe that constructor injection is bad for maintenance especially in case of inheritance (if you change the dependencies you need to refactor all sub-classes). Private field injection and public setter injection are very similar but setter injection is much more verbose (often you are even forced to have javadoc for all public methods). If you are writing re-usable library code setter injection will make sense as it is more flexible. In a business application you typically do not need that and can save a lot of boiler-plate code if you use private field injection instead. Nowadays you are using container infrastructure also for your tests (see testing) so there is no need to inject manually (what would require a public setter).

    +
  • +
  • +

    KISS
    +To follow the KISS (keep it small and simple) principle we avoid advanced features (e.g. custom AOP, non-singleton beans) and only use them where necessary.

    +
  • +
  • +

    Separation of API and implementation
    +For important components we should separate a self-contained API documented with JavaDoc from its implementation. Code from other components that wants to use the implementation shall only rely on the API. However, for things that will never be exchanged no API as interface is required you can skip such separation.

    +
  • +
+
+
+
+

1.18. Example Bean

+
+

Here you can see the implementation of an example bean using dependency injection:

+
+
+
+
@ApplicationScoped
+@Named("MyComponent")
+public class MyComponentImpl implements MyComponent {
+  @Inject
+  private MyOtherComponent myOtherComponent;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+
+  ...
+}
+
+
+
+

Here MyComponentImpl depends on MyOtherComponent that is injected into the field myOtherComponent because of the @Inject annotation. +To make this work there must be exactly one bean in the container (e.g. spring or quarkus) that is an instance of MyOtherComponent. +In order to put a bean into the container, we can use @ApplicationScoped in case of CDI (required for quarkus) for a stateless bean. +In spring we can ommit a CDI annotation and the @Named annotation is already sufficient as a bean is stateless by default in spring. +If we always use @ApplicationScoped we can make this more explicit and more portable accross different frameworks. +So in our example we put MyComponentImpl into the container. +That bean will be called MyComponent as we specified in the @Named annotation but we can also omit the name to use the classname as fallback. +Now our bean can be injected into other beans using @Inject annotation either via MyComponent interface (recommended when interface is present) or even directly via MyComponentImpl. +In case you omit the interface, you should also omit the Impl suffix or instead use Bean as suffix.

+
+
+
+

1.19. Multiple bean implementations

+
+

In some cases you might have multiple implementations as beans for the same interface. +The following sub-sections handle the different scenarios to give you guidance.

+
+
+
Only one implementation in container
+
+

In some cases you still have only one implementation active as bean in the container at runtime. +A typical example is that you have different implemenations for test and main usage. +This case is easy, as @Inject will always be unique. +The only thing you need to care about is how to configure your framework (spring, quarkus, etc.) to know which implementation to put in the container depending on specific configuration. +In spring this can be archived via the proprietary @Profile annotaiton.

+
+
+
+
Injecting all of multiple implementations
+
+

In some situations you may have an interface that defines a kind of "plugin". +You can have multiple implementations in your container and want to have all of them injected. +Then you can request a list with all the bean implementations via the interface as in the following example:

+
+
+
+
  @Inject
+  private List<MyConverter> converters;
+
+
+
+

Your code may iterate over all plugins (converters) and apply them sequentially. +Please note that the injection will fail (at least in spring), when there is no bean available to inject. +So you do not get an empty list injected but will get an exception on startup.

+
+
+
+
Injecting one of multiple implementations
+
+

Another scenario is that you have multiple implementations in your container coexisting, but for injection you may want to choose a specific implementation. +Here you could use the @Named annotation to specify a unique identifier for each implementation what is called qualified injection:

+
+
+
+
@ApplicationScoped
+@Named("UserAuthenticator")
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+@Named("ServiceAuthenticator")
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  @Named("UserAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  @Named("ServiceAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+
+
+
+

However, we discovered that this pattern is not so great: +The identifiers in the @Named annotation are just strings that could easily break. +You could use constants instead but still this is not the best solution.

+
+
+

In the end you can very much simplify this by just directly injecting the implementation instead:

+
+
+
+
@ApplicationScoped
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

In case you want to strictly decouple from implementations, you can still create dedicated interfaces:

+
+
+
+
public interface UserAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class UserAuthenticatorImpl implements UserAuthenticator {
+  ...
+}
+public interface ServiceAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class ServiceAuthenticatorImpl implements ServiceAuthenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

However, as you can see this is again introducing additional boiler-plate code. +While the principle to separate API and implementation and strictly decouple from implementation is valuable in general, +you should always consider KISS, lean, and agile in contrast and balance pros and cons instead of blindly following dogmas.

+
+
+
+
+

1.20. Imports

+
+

Here are the import statements for the most important annotations for dependency injection

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.enterprise.context.ApplicationScoped;
+// import javax.enterprise.context.RequestScoped;
+// import javax.enterprise.context.SessionScoped;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+
+
+
+

1.21. Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>jakarta.inject</groupId>
+  <artifactId>jakarta.inject-api</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>jakarta.annotation</groupId>
+  <artifactId>jakarta.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>javax.inject</groupId>
+  <artifactId>javax.inject</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>javax.annotation</groupId>
+  <artifactId>javax.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+ +
+
+

1.22. BLOB support

+
+

BLOB stands for Binary Large Object. A BLOB may be an image, an office document, ZIP archive or any other multimedia object. +Often these BLOBs are large. if this is the case you need to take care, that you do not copy all the blob data into you application heap, e.g. when providing them via a REST service. +This could easily lead to performance problems or out of memory errors. +As solution for that problem is "streaming" those BLOBs directly from the database to the client. To demonstrate how this can be accomplished, devonfw provides a example.

+
+
+ +
+

1.24. Common

+
+

In our coding-conventions we define a clear packaging and layering. +However, there is always cross-cutting code that does not belong to a specific layer such as generic helpers, general code for configuration or integration, etc. +Therefore, we define a package segment common that can be used as «layer» for such cross-cutting code. +Code from any other layer is allowed to access such common code (at least within the same component).

+
+
+ +
+
+

1.25. Java Persistence API

+
+

For mapping java objects to a relational database we use the Java Persistence API (JPA). +As JPA implementation we recommend to use Hibernate. For general documentation about JPA and Hibernate follow the links above as we will not replicate the documentation. Here you will only find guidelines and examples how we recommend to use it properly. The following examples show how to map the data of a database to an entity. As we use JPA we abstract from SQL here. However, you will still need a DDL script for your schema and during maintenance also database migrations. Please follow our SQL guide for such artifacts.

+
+
+
+

1.26. Entity

+
+

Entities are part of the persistence layer and contain the actual data. They are POJOs (Plain Old Java Objects) on which the relational data of a database is mapped and vice versa. The mapping is configured via JPA annotations (javax.persistence). Usually an entity class corresponds to a table of a database and a property to a column of that table. A persistent entity instance then represents a row of the database table.

+
+
+
A Simple Entity
+
+

The following listing shows a simple example:

+
+
+
+
@Entity
+@Table(name="TEXTMESSAGE")
+public class MessageEntity extends ApplicationPersistenceEntity implements Message {
+
+  private String text;
+
+  public String getText() {
+    return this.text;
+  }
+
+  public void setText(String text) {
+    this.text = text;
+  }
+ }
+
+
+
+

The @Entity annotation defines that instances of this class will be entities which can be stored in the database. The @Table annotation is optional and can be used to define the name of the corresponding table in the database. If it is not specified, the simple name of the entity class is used instead.

+
+
+

In order to specify how to map the attributes to columns we annotate the corresponding getter methods (technically also private field annotation is also possible but approaches can not be mixed). +The @Id annotation specifies that a property should be used as primary key. +With the help of the @Column annotation it is possible to define the name of the column that an attribute is mapped to as well as other aspects such as nullable or unique. If no column name is specified, the name of the property is used as default.

+
+
+

Note that every entity class needs a constructor with public or protected visibility that does not have any arguments. Moreover, neither the class nor its getters and setters may be final.

+
+
+

Entities should be simple POJOs and not contain business logic.

+
+
+
+
Entities and Datatypes
+
+

Standard datatypes like Integer, BigDecimal, String, etc. are mapped automatically by JPA. Custom datatypes are mapped as serialized BLOB by default what is typically undesired. +In order to map atomic custom datatypes (implementations of`+SimpleDatatype`) we implement an AttributeConverter. Here is a simple example:

+
+
+
+
@Converter(autoApply = true)
+public class MoneyAttributeConverter implements AttributeConverter<Money, BigDecimal> {
+
+  public BigDecimal convertToDatabaseColumn(Money attribute) {
+    return attribute.getValue();
+  }
+
+  public Money convertToEntityAttribute(BigDecimal dbData) {
+    return new Money(dbData);
+  }
+}
+
+
+
+

The annotation @Converter is detected by the JPA vendor if the annotated class is in the packages to scan. Further, autoApply = true implies that the converter is automatically used for all properties of the handled datatype. Therefore all entities with properties of that datatype will automatically be mapped properly (in our example Money is mapped as BigDecimal).

+
+
+

In case you have a composite datatype that you need to map to multiple columns the JPA does not offer a real solution. As a workaround you can use a bean instead of a real datatype and declare it as @Embeddable. If you are using Hibernate you can implement CompositeUserType. Via the @TypeDef annotation it can be registered to Hibernate. If you want to annotate the CompositeUserType implementation itself you also need another annotation (e.g. MappedSuperclass tough not technically correct) so it is found by the scan.

+
+
+
Enumerations
+
+

By default JPA maps Enums via their ordinal. Therefore the database will only contain the ordinals (0, 1, 2, etc.) . So , inside the database you can not easily understand their meaning. Using @Enumerated with EnumType.STRING allows to map the enum values to their name (Enum.name()). Both approaches are fragile when it comes to code changes and refactoring (if you change the order of the enum values or rename them) after the application is deployed to production. If you want to avoid this and get a robust mapping you can define a dedicated string in each enum value for database representation that you keep untouched. Then you treat the enum just like any other custom datatype.

+
+
+
+
BLOB
+
+

If binary or character large objects (BLOB/CLOB) should be used to store the value of an attribute, e.g. to store an icon, the @Lob annotation should be used as shown in the following listing:

+
+
+
+
@Lob
+public byte[] getIcon() {
+  return this.icon;
+}
+
+
+
+ + + + + +
+ + +Using a byte array will cause problems if BLOBs get large because the entire BLOB is loaded into the RAM of the server and has to be processed by the garbage collector. For larger BLOBs the type Blob and streaming should be used. +
+
+
+
+
public Blob getAttachment() {
+  return this.attachment;
+}
+
+
+
+
+
Date and Time
+
+

To store date and time related values, the temporal annotation can be used as shown in the listing below:

+
+
+
+
@Temporal(TemporalType.TIMESTAMP)
+public java.util.Date getStart() {
+  return start;
+}
+
+
+
+

Until Java8 the java data type java.util.Date (or Jodatime) has to be used. +TemporalType defines the granularity. In this case, a precision of nanoseconds is used. If this granularity is not wanted, TemporalType.DATE can be used instead, which only has a granularity of milliseconds. +Mixing these two granularities can cause problems when comparing one value to another. This is why we only use TemporalType.TIMESTAMP.

+
+
+
+
QueryDSL and Custom Types
+
+

Using the Aliases API of QueryDSL might result in an InvalidDataAccessApiUsageException when using custom datatypes in entity properties. This can be circumvented in two steps:

+
+
+
    +
  1. +

    Ensure you have the following maven dependencies in your project (core module) to support custom types via the Aliases API:

    +
    +
    +
    <dependency>
    +  <groupId>org.ow2.asm</groupId>
    +  <artifactId>asm</artifactId>
    +</dependency>
    +<dependency>
    +  <groupId>cglib</groupId>
    +  <artifactId>cglib</artifactId>
    +</dependency>
    +
    +
    +
  2. +
  3. +

    Make sure, that all your custom types used in entities provide a non-argument constructor with at least visibility level protected.

    +
  4. +
+
+
+
+
+
Primary Keys
+
+

We only use simple Long values as primary keys (IDs). By default it is auto generated (@GeneratedValue(strategy=GenerationType.AUTO)). This is already provided by the class com.devonfw.<projectName>.general.dataaccess.api.AbstractPersistenceEntity within the classic project structure respectively com.devonfw.<projectName>.general.domain.model.AbstractPersistenceEntity within the modern project structure, that you can extend. +In case you have business oriented keys (often as String), you can define an additional property for it and declare it as unique (@Column(unique=true)). +Be sure to include "AUTO_INCREMENT" in your sql table field ID to be able to persist data (or similar for other databases).

+
+
+
+
+

1.27. Relationships

+
+
n:1 and 1:1 Relationships
+
+

Entities often do not exist independently but are in some relation to each other. For example, for every period of time one of the StaffMember’s of the restaurant example has worked, which is represented by the class WorkingTime, there is a relationship to this StaffMember.

+
+
+

The following listing shows how this can be modeled using JPA:

+
+
+
+
...
+
+@Entity
+public class WorkingTimeEntity {
+   ...
+
+   private StaffMemberEntity staffMember;
+
+   @ManyToOne
+   @JoinColumn(name="STAFFMEMBER")
+   public StaffMemberEntity getStaffMember() {
+      return this.staffMember;
+   }
+
+   public void setStaffMember(StaffMemberEntity staffMember) {
+      this.staffMember = staffMember;
+   }
+}
+
+
+
+

To represent the relationship, an attribute of the type of the corresponding entity class that is referenced has been introduced. The relationship is a n:1 relationship, because every WorkingTime belongs to exactly one StaffMember, but a StaffMember usually worked more often than once.
+This is why the @ManyToOne annotation is used here. For 1:1 relationships the @OneToOne annotation can be used which works basically the same way. To be able to save information about the relation in the database, an additional column in the corresponding table of WorkingTime is needed which contains the primary key of the referenced StaffMember. With the name element of the @JoinColumn annotation it is possible to specify the name of this column.

+
+
+
+
1:n and n:m Relationships
+
+

The relationship of the example listed above is currently an unidirectional one, as there is a getter method for retrieving the StaffMember from the WorkingTime object, but not vice versa.

+
+
+

To make it a bidirectional one, the following code has to be added to StaffMember:

+
+
+
+
  private Set<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy="staffMember")
+  public Set<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(Set<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

To make the relationship bidirectional, the tables in the database do not have to be changed. Instead the column that corresponds to the attribute staffMember in class WorkingTime is used, which is specified by the mappedBy element of the @OneToMany annotation. Hibernate will search for corresponding WorkingTime objects automatically when a StaffMember is loaded.

+
+
+

The problem with bidirectional relationships is that if a WorkingTime object is added to the set or list workingTimes in StaffMember, this does not have any effect in the database unless +the staffMember attribute of that WorkingTime object is set. That is why the devon4j advices not to use bidirectional relationships but to use queries instead. How to do this is shown here. If a bidirectional relationship should be used nevertheless, appropriate add and remove methods must be used.

+
+
+

For 1:n and n:m relations, the devon4j demands that (unordered) Sets and no other collection types are used, as shown in the listing above. The only exception is whenever an ordering is really needed, (sorted) lists can be used.
+For example, if WorkingTime objects should be sorted by their start time, this could be done like this:

+
+
+
+
  private List<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy = "staffMember")
+  @OrderBy("startTime asc")
+  public List<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(List<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

The value of the @OrderBy annotation consists of an attribute name of the class followed by asc (ascending) or desc (descending).

+
+
+

To store information about a n:m relationship, a separate table has to be used, as one column cannot store several values (at least if the database schema is in first normal form).
+For example if one wanted to extend the example application so that all ingredients of one FoodDrink can be saved and to model the ingredients themselves as entities (e.g. to store additional information about them), this could be modeled as follows (extract of class FoodDrink):

+
+
+
+
  private Set<IngredientEntity> ingredients;
+
+  @ManyToMany()
+  @JoinTable
+  public Set<IngredientEntity> getIngredients() {
+    return this.ingredients;
+  }
+
+  public void setOrders(Set<IngredientEntity> ingredients) {
+    this.ingredients = ingredients;
+  }
+
+
+
+

Information about the relation is stored in a table called BILL_ORDER that has to have two columns, one for referencing the Bill, the other one for referencing the Order. Note that the @JoinTable annotation is not needed in this case because a separate table is the default solution here (same for n:m relations) unless there is a mappedBy element specified.

+
+
+

For 1:n relationships this solution has the disadvantage that more joins (in the database system) are needed to get a Bill with all the Orders it refers to. This might have a negative impact on performance so that the solution to store a reference to the Bill row/entity in the Order’s table is probably the better solution in most cases.

+
+
+

Note that bidirectional n:m relationships are not allowed for applications based on devon4j. Instead a third entity has to be introduced, which "represents" the relationship (it has two n:1 relationships).

+
+
+
+
Eager vs. Lazy Loading
+
+

Using JPA it is possible to use either lazy or eager loading. Eager loading means that for entities retrieved from the database, other entities that are referenced by these entities are also retrieved, whereas lazy loading means that this is only done when they are actually needed, i.e. when the corresponding getter method is invoked.

+
+
+

Application based on devon4j are strongly advised to always use lazy loading. The JPA defaults are:

+
+
+
    +
  • +

    @OneToMany: LAZY

    +
  • +
  • +

    @ManyToMany: LAZY

    +
  • +
  • +

    @ManyToOne: EAGER

    +
  • +
  • +

    @OneToOne: EAGER

    +
  • +
+
+
+

So at least for @ManyToOne and @OneToOne you always need to override the default by providing fetch = FetchType.LAZY.

+
+
+ + + + + +
+ + +Please read the performance guide. +
+
+
+
+
Cascading Relationships
+
+

For relations it is also possible to define whether operations are cascaded (like a recursion) to the related entity. +By default, nothing is done in these situations. This can be changed by using the cascade property of the annotation that specifies the relation type (@OneToOne, @ManyToOne, @OneToMany, @ManyToOne). This property accepts a CascadeType that offers the following options:

+
+
+
    +
  • +

    PERSIST (for EntityManager.persist, relevant to inserted transient entities into DB)

    +
  • +
  • +

    REMOVE (for EntityManager.remove to delete entity from DB)

    +
  • +
  • +

    MERGE (for EntityManager.merge)

    +
  • +
  • +

    REFRESH (for EntityManager.refresh)

    +
  • +
  • +

    DETACH (for EntityManager.detach)

    +
  • +
  • +

    ALL (cascade all of the above operations)

    +
  • +
+
+
+

See here for more information.

+
+
+
+
Typesafe Foreign Keys using IdRef
+
+

For simple usage you can use Long for all your foreign keys. +However, as an optional pattern for advanced and type-safe usage, we offer IdRef.

+
+
+
+
+

1.28. Embeddable

+
+

An embeddable Object is a way to group properties of an entity into a separate Java (child) object. Unlike with implement relationships the embeddable is not a separate entity and its properties are stored (embedded) in the same table together with the entity. This is helpful to structure and reuse groups of properties.

+
+
+

The following example shows an Address implemented as an embeddable class:

+
+
+
+
@Embeddable
+public class AddressEmbeddable {
+
+  private String street;
+  private String number;
+  private Integer zipCode;
+  private String city;
+
+  @Column(name="STREETNUMBER")
+  public String getNumber() {
+    return number;
+  }
+
+  public void setNumber(String number) {
+    this.number = number;
+  }
+
+  ...  // other getter and setter methods, equals, hashCode
+}
+
+
+
+

As you can see an embeddable is similar to an entity class, but with an @Embeddable annotation instead of the @Entity annotation and without primary key or modification counter. +An Embeddable does not exist on its own but in the context of an entity. +As a simplification Embeddables do not require a separate interface and ETO as the bean-mapper will create a copy automatically when converting the owning entity to an ETO. +However, in this case the embeddable becomes part of your api module that therefore needs a dependency on the JPA.

+
+
+

In addition to that the methods equals(Object) and hashCode() need to be implemented as this is required by Hibernate (it is not required for entities because they can be unambiguously identified by their primary key). For some hints on how to implement the hashCode() method please have a look here.

+
+
+

Using this AddressEmbeddable inside an entity class can be done like this:

+
+
+
+
  private AddressEmbeddable address;
+
+  @Embedded
+  public AddressEmbeddable getAddress() {
+    return this.address;
+  }
+
+  public void setAddress(AddressEmbeddable address) {
+    this.address = address;
+  }
+}
+
+
+
+

The @Embedded annotation needs to be used for embedded attributes. Note that if in all columns of the embeddable (here Address) are null, then the embeddable object itself is also null inside the entity. This has to be considered to avoid NullPointerException’s. Further this causes some issues with primitive types in embeddable classes that can be avoided by only using object types instead.

+
+
+
+

1.29. Inheritance

+
+

Just like normal java classes, entity classes can inherit from others. The only difference is that you need to specify how to map a class hierarchy to database tables. Generic abstract super-classes for entities can simply be annotated with @MappedSuperclass.

+
+
+

For all other cases the JPA offers the annotation @Inheritance with the property strategy talking an InheritanceType that has the following options:

+
+
+
+
+
    +
  • +

    SINGLE_TABLE: This strategy uses a single table that contains all columns needed to store all entity-types of the entire inheritance hierarchy. If a column is not needed for an entity because of its type, there is a null value in this column. An additional column is introduced, which denotes the type of the entity (called dtype).

    +
  • +
  • +

    TABLE_PER_CLASS: For each concrete entity class there is a table in the database that can store such an entity with all its attributes. An entity is only saved in the table corresponding to its most concrete type. To get all entities of a super type, joins are needed.

    +
  • +
  • +

    JOINED: In this case there is a table for every entity class including abstract classes, which contains only the columns for the persistent properties of that particular class. Additionally there is a primary key column in every table. To get an entity of a class that is a subclass of another one, joins are needed.

    +
  • +
+
+
+
+
+

Each of the three approaches has its advantages and drawbacks, which are discussed in detail here. In most cases, the first one should be used, because it is usually the fastest way to do the mapping, as no joins are needed when retrieving, searching or persisting entities. Moreover it is rather simple and easy to understand. +One major disadvantage is that the first approach could lead to a table with a lot of null values, which might have a negative impact on the database size.

+
+
+

The inheritance strategy has to be annotated to the top-most entity of the class hierarchy (where @MappedSuperclass classes are not considered) like in the following example:

+
+
+
+
@Entity
+@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+public abstract class MyParentEntity extends ApplicationPersistenceEntity implements MyParent {
+  ...
+}
+
+@Entity
+public class MyChildEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+@Entity
+public class MyOtherEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+
+
+

As a best practice we advise you to avoid entity hierarchies at all where possible and otherwise to keep the hierarchy as small as possible. In order to just ensure reuse or establish a common API you can consider a shared interface, a @MappedSuperclass or an @Embeddable instead of an entity hierarchy.

+
+
+
+

1.30. Repositories and DAOs

+
+

For each entity a code unit is created that groups all database operations for that entity. We recommend to use spring-data repositories for that as it is most efficient for developers. As an alternative there is still the classic approach using DAOs.

+
+
+
Concurrency Control
+
+

The concurrency control defines the way concurrent access to the same data of a database is handled. When several users (or threads of application servers) concurrently access a database, anomalies may happen, e.g. a transaction is able to see changes from another transaction although that one did, not yet commit these changes. Most of these anomalies are automatically prevented by the database system, depending on the isolation level (property hibernate.connection.isolation in the jpa.xml, see here, or quarkus.datasource.jdbc.transaction-isolation-level in the application.properties).

+
+
+

Another anomaly is when two stakeholders concurrently access a record, do some changes and write them back to the database. The JPA addresses this with different locking strategies (see here).

+
+
+

As a best practice we are using optimistic locking for regular end-user services (OLTP) and pessimistic locking for batches.

+
+
+
+
Optimistic Locking
+
+

The class com.devonfw.module.jpa.persistence.api.AbstractPersistenceEntity already provides optimistic locking via a modificationCounter with the @Version annotation. Therefore JPA takes care of optimistic locking for you. When entities are transferred to clients, modified and sent back for update you need to ensure the modificationCounter is part of the game. If you follow our guides about transfer-objects and services this will also work out of the box. +You only have to care about two things:

+
+
+
    +
  • +

    How to deal with optimistic locking in relationships?
    +Assume an entity A contains a collection of B entities. Should there be a locking conflict if one user modifies an instance of A while another user in parallel modifies an instance of B that is contained in the other instance? To address this , take a look at FeatureForceIncrementModificationCounter.

    +
  • +
  • +

    What should happen in the UI if an OptimisticLockException occurred?
    +According to KISS our recommendation is that the user gets an error displayed that tells him to do his change again on the recent data. Try to design your system and the work processing in a way to keep such conflicts rare and you are fine.

    +
  • +
+
+
+
+
Pessimistic Locking
+
+

For back-end services and especially for batches optimistic locking is not suitable. A human user shall not cause a large batch process to fail because he was editing the same entity. Therefore such use-cases use pessimistic locking what gives them a kind of priority over the human users. +In your DAO implementation you can provide methods that do pessimistic locking via EntityManager operations that take a LockModeType. Here is a simple example:

+
+
+
+
  getEntityManager().lock(entity, LockModeType.READ);
+
+
+
+

When using the lock(Object, LockModeType) method with LockModeType.READ, Hibernate will issue a SELECT …​ FOR UPDATE. This means that no one else can update the entity (see here for more information on the statement). If LockModeType.WRITE is specified, Hibernate issues a SELECT …​ FOR UPDATE NOWAIT instead, which has has the same meaning as the statement above, but if there is already a lock, the program will not wait for this lock to be released. Instead, an exception is raised.
+Use one of the types if you want to modify the entity later on, for read only access no lock is required.

+
+
+

As you might have noticed, the behavior of Hibernate deviates from what one would expect by looking at the LockModeType (especially LockModeType.READ should not cause a SELECT …​ FOR UPDATE to be issued). The framework actually deviates from what is specified in the JPA for unknown reasons.

+
+
+
+
+

1.31. Database Auditing

+ +
+
+

1.32. Testing Data-Access

+
+

For testing of Entities and Repositories or DAOs see testing guide.

+
+
+
+

1.33. Principles

+
+

We strongly recommend these principles:

+
+
+
    +
  • +

    Use the JPA where ever possible and use vendor (hibernate) specific features only for situations when JPA does not provide a solution. In the latter case consider first if you really need the feature.

    +
  • +
  • +

    Create your entities as simple POJOs and use JPA to annotate the getters in order to define the mapping.

    +
  • +
  • +

    Keep your entities simple and avoid putting advanced logic into entity methods.

    +
  • +
+
+
+
+

1.34. Database Configuration

+
+

For details on the configuration of the database connection and database logging of the individual framework, please refer to the respective configuration guide.

+
+
+

For spring see here.

+
+
+

For quarkus see here.

+
+
+
Database Migration
+ +
+
+
Pooling
+
+

You typically want to pool JDBC connections to boost performance by recycling previous connections. There are many libraries available to do connection pooling. We recommend to use HikariCP. For Oracle RDBMS see here.

+
+
+
+
+

1.35. Security

+
+
SQL-Injection
+
+

A common security threat is SQL-injection. Never build queries with string concatenation or your code might be vulnerable as in the following example:

+
+
+
+
  String query = "Select op from OrderPosition op where op.comment = " + userInput;
+  return getEntityManager().createQuery(query).getResultList();
+
+
+
+

Via the parameter userInput an attacker can inject SQL (JPQL) and execute arbitrary statements in the database causing extreme damage.

+
+
+

In order to prevent such injections you have to strictly follow our rules for queries:

+
+
+ +
+
+
+
Limited Permissions for Application
+
+

We suggest that you operate your application with a database user that has limited permissions so he can not modify the SQL schema (e.g. drop tables). For initializing the schema (DDL) or to do schema migrations use a separate user that is not used by the application itself.

+
+ +
+
+
Queries
+
+

The Java Persistence API (JPA) defines its own query language, the java persistence query language (JPQL) (see also JPQL tutorial), which is similar to SQL but operates on entities and their attributes instead of tables and columns.

+
+
+

The simplest CRUD-Queries (e.g. find an entity by its ID) are already build in the devonfw CRUD functionality (via Repository or DAO). For other cases you need to write your own query. We distinguish between static and dynamic queries. Static queries have a fixed JPQL query string that may only use parameters to customize the query at runtime. Instead, dynamic queries can change their clauses (WHERE, ORDER BY, JOIN, etc.) at runtime depending on the given search criteria.

+
+
+
+
Static Queries
+
+

E.g. to find all DishEntries (from MTS sample app) that have a price not exceeding a given maxPrice we write the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice
+
+
+
+

Here dish is used as alias (variable name) for our selected DishEntity (what refers to the simple name of the Java entity class). With dish.price we are referring to the Java property price (getPrice()/setPrice(…​)) in DishEntity. A named variable provided from outside (the search criteria at runtime) is specified with a colon (:) as prefix. Here with :maxPrice we reference to a variable that needs to be set via query.setParameter("maxPrice", maxPriceValue). JPQL also supports indexed parameters (?) but they are discouraged because they easily cause confusion and mistakes.

+
+
+
Using Queries to Avoid Bidirectional Relationships
+
+

With the usage of queries it is possible to avoid exposing relationships or modelling bidirectional relationships, which have some disadvantages (see relationships). This is especially desired for relationships between entities of different business components. +So for example to get all OrderLineEntities for a specific OrderEntity without using the orderLines relation from OrderEntity the following query could be used:

+
+
+
+
SELECT line FROM OrderLineEntity line WHERE line.order.id = :orderId
+
+
+
+
+
+
Dynamic Queries
+
+

For dynamic queries we use QueryDSL. It allows to implement queries in a powerful but readable and type-safe way (unlike Criteria API). If you already know JPQL you will quickly be able to read and write QueryDSL code. It feels like JPQL but implemented in Java instead of plain text.

+
+
+

Here is an example from our sample application:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(dish.price.goe(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(dish.price.loe(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      query.where(dish.name.eq(name));
+    }
+    return query.fetch();
+  }
+
+
+
+

In this example we use the so called Q-types (QDishEntity). These are classes generated at build time by the QueryDSL annotation processor from entity classes. The Q-type classes can be used as static types representative of the original entity class.

+
+
+

For spring, devon4j provides another approach that you can use for your Spring applications to implement QueryDSL logic without having to use these metaclasses. An example can be found here.

+
+
+
+
Using Wildcards
+
+

For flexible queries it is often required to allow wildcards (especially in dynamic queries). While users intuitively expect glob syntax the SQL and JPQL standards work different. Therefore a mapping is required. devonfw provides this on a lower level by LikePatternSyntax and on a high level by QueryUtil (see QueryHelper.newStringClause(…​)).

+
+
+
+
Pagination
+
+

When dealing with large amounts of data, an efficient method of retrieving the data is required. Fetching the entire data set each time would be too time consuming. Instead, Paging is used to process only small subsets of the entire data set.

+
+
+

If you are using Spring Data repositories you will get pagination support out of the box by providing the interfaces Page and Pageable:

+
+
+
Listing 1. repository
+
+
Page<DishEntity> findAll(Pageable pageable);
+
+
+
+

Then you can create a Pageable object and pass it to the method call as follows:

+
+
+
+
int page = criteria.getPageNumber();
+int size = criteria.getPageSize();
+Pageable pageable = PageRequest.of(page, size);
+Page<DishEntity> dishes = dishRepository.findAll(pageable);
+
+
+
+
Paging with QueryDSL
+
+

Pagination is also supported for dynamic queries with QueryDSL:

+
+
+
+
  public Page<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    // conditions
+
+    int page = criteria.getPageNumber();
+    int size = criteria.getPageSize();
+    Pageable pageable = PageRequest.of(page, size);
+    query.offset(pageable.getOffset());
+    query.limit(pageable.getPageSize());
+
+    List<DishEntity> dishes = query.fetch();
+    return new PageImpl<>(dishes, pageable, dishes.size());
+  }
+
+
+
+
+
Pagination example
+
+

For the table entity we can make a search request by accessing the REST endpoint with pagination support like in the following examples:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "total":true
+  }
+}
+
+//Response
+{
+    "pagination": {
+        "size": 2,
+        "page": 1,
+        "total": 11
+    },
+    "result": [
+        {
+            "id": 101,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 1,
+            "state": "OCCUPIED"
+        },
+        {
+            "id": 102,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 2,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+ + + + + +
+ + +As we are requesting with the total property set to true the server responds with the total count of rows for the query. +
+
+
+

For retrieving a concrete page, we provide the page attribute with the desired value. Here we also left out the total property so the server doesn’t incur on the effort to calculate it:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "page":2
+  }
+}
+
+//Response
+
+{
+    "pagination": {
+        "size": 2,
+        "page": 2,
+        "total": null
+    },
+    "result": [
+        {
+            "id": 103,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 3,
+            "state": "FREE"
+        },
+        {
+            "id": 104,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 4,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+
+
Pagingation in devon4j-spring
+
+

For spring applications, devon4j also offers its own solution for pagination. You can find an example of this here.

+
+
+
+
+
Query Meta-Parameters
+
+

Queries can have meta-parameters and that are provided via SearchCriteriaTo. Besides paging (see above) we also get timeout support.

+
+
+
+
Advanced Queries
+
+

Writing queries can sometimes get rather complex. The current examples given above only showed very simple basics. Within this topic a lot of advanced features need to be considered like:

+
+
+ +
+
+

This list is just containing the most important aspects. As we can not cover all these topics here, they are linked to external documentation that can help and guide you.

+
+ +
+
+
Spring Data
+
+

Spring Data JPA is supported by both Spring and Quarkus. However, in Quarkus this approach still has some limitations. For detailed information, see the official Quarkus Spring Data guide.

+
+
+
+
Motivation
+
+

The benefits of Spring Data are (for examples and explanations see next sections):

+
+
+
    +
  • +

    All you need is one single repository interface for each entity. No need for a separate implementation or other code artifacts like XML descriptors, NamedQueries class, etc.

    +
  • +
  • +

    You have all information together in one place (the repository interface) that actually belong together (where as in the classic approach you have the static queries in an XML file, constants to them in NamedQueries class and referencing usages in DAO implementation classes).

    +
  • +
  • +

    Static queries are most simple to realize as you do not need to write any method body. This means you can develop faster.

    +
  • +
  • +

    Support for paging is already build-in. Again for static query method the is nothing you have to do except using the paging objects in the signature.

    +
  • +
  • +

    Still you have the freedom to write custom implementations via default methods within the repository interface (e.g. for dynamic queries).

    +
  • +
+
+
+
+
Dependency
+
+

In case you want to switch to or add Spring Data support to your Spring or Quarkus application, all you need is to add the respective maven dependency:

+
+
+
Listing 2. spring
+
+
<dependency>
+  <groupId>org.springframework.boot</groupId>
+  <artifactId>spring-boot-starter-data-jpa</artifactId>
+</dependency>
+
+
+
+
Listing 3. quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
Repository
+
+

For each entity «Entity»Entity an interface is created with the name «Entity»Repository extending JpaRepository. +Such repository is the analogy to a Data-Access-Object (DAO) used in the classic approach or when Spring Data is not an option.

+
+
+
Listing 4. Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long> {
+
+}
+
+
+
+

The Spring Data repository provides some basic implementations for accessing data, e.g. returning all instances of a type (findAll) or returning an instance by its ID (findById).

+
+
+
+
Custom method implementation
+
+

In addition, repositories can be enriched with additional functionality, e.g. to add QueryDSL functionality or to override the default implementations, by using so called repository fragments:

+
+
+
Example
+
+

The following example shows how to write such a repository:

+
+
+
Listing 5. Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, ProductFragment {
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  List<ProductEntity> findByTitle(@Param("title") String title);
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  Page<ProductEntity> findByTitlePaginated(@Param("title") String title, Pageable pageable);
+}
+
+
+
+
Listing 6. Repository fragment
+
+
public interface ProductFragment {
+  Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria);
+}
+
+
+
+
Listing 7. Fragment implementation
+
+
public class ProductFragmentImpl implements ProductFragment {
+  @Inject
+  EntityManager entityManager;
+
+  public Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria) {
+    QProductEntity product = QProductEntity.productEntity;
+    JPAQuery<ProductEntity> query = new JPAQuery<ProductEntity>(this.entityManager);
+    query.from(product);
+
+    String title = criteria.getTitle();
+    if ((title != null) && !title.isEmpty()) {
+      query.where(product.title.eq(title));
+    }
+
+    List<ProductEntity> products = query.fetch();
+    return new PageImpl<>(products, PageRequest.of(criteria.getPageNumber(), criteria.getPageSize()), products.size());
+  }
+}
+
+
+
+

This ProductRepository has the following features:

+
+
+
    +
  • +

    CRUD support from Spring Data (see JavaDoc for details).

    +
  • +
  • +

    Support for QueryDSL integration, paging and more.

    +
  • +
  • +

    A static query method findByTitle to find all ProductEntity instances from DB that have the given title. Please note the @Param annotation that links the method parameter with the variable inside the query (:title).

    +
  • +
  • +

    The same with pagination support via findByTitlePaginated method.

    +
  • +
  • +

    A dynamic query method findByCriteria showing the QueryDSL and paging integration into Spring via a fragment implementation.

    +
  • +
+
+
+

You can find an implementation of this ProductRepository in our Quarkus reference application.

+
+
+ + + + + +
+ + +In Quarkus, native and named queries via the @Query annotation are currently not supported +
+
+
+
+
Integration of Spring Data in devon4j-spring
+
+

For Spring applications, devon4j offers a proprietary solution that integrates seamlessly with QueryDSL and uses default methods instead of the fragment approach. A separate guide for this can be found here.

+
+
+
+
+
Drawbacks
+
+

Spring Data also has some drawbacks:

+
+
+
    +
  • +

    Some kind of magic behind the scenes that are not so easy to understand. So in case you want to extend all your repositories without providing the implementation via a default method in a parent repository interface you need to deep-dive into Spring Data. We assume that you do not need that and hope what Spring Data and devon already provides out-of-the-box is already sufficient.

    +
  • +
  • +

    The Spring Data magic also includes guessing the query from the method name. This is not easy to understand and especially to debug. Our suggestion is not to use this feature at all and either provide a @Query annotation or an implementation via default method.

    +
  • +
+
+
+
+
Limitations in Quarkus
+
+
    +
  • +

    Native and named queries are not supported using @Query annotation. You will receive something like: Build step io.quarkus.spring.data.deployment.SpringDataJPAProcessor#build threw an exception: java.lang.IllegalArgumentException: Attribute nativeQuery of @Query is currently not supported

    +
  • +
  • +

    Customizing the base repository for all repository interfaces in the code base, which is done in Spring Data by registering a class the extends SimpleJpaRepository

    +
  • +
+
+ +
+
+
Data Access Object
+
+

The Data Access Objects (DAOs) are part of the persistence layer. +They are responsible for a specific entity and should be named «Entity»Dao and «Entity»DaoImpl. +The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. +Additionally a DAO may offer advanced operations such as query or locking methods.

+
+
+
+
DAO Interface
+
+

For each DAO there is an interface named «Entity»Dao that defines the API. For CRUD support and common naming we derive it from the ApplicationDao interface that comes with the devon application template:

+
+
+
+
public interface MyEntityDao extends ApplicationDao<MyEntity> {
+  List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria);
+}
+
+
+
+

All CRUD operations are inherited from ApplicationDao so you only have to declare the additional methods.

+
+
+
+
DAO Implementation
+
+

Implementing a DAO is quite simple. We create a class named «Entity»DaoImpl that extends ApplicationDaoImpl and implements your «Entity»Dao interface:

+
+
+
+
public class MyEntityDaoImpl extends ApplicationDaoImpl<MyEntity> implements MyEntityDao {
+
+  public List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria) {
+    TypedQuery<MyEntity> query = createQuery(criteria, getEntityManager());
+    return query.getResultList();
+  }
+  ...
+}
+
+
+
+

Again you only need to implement the additional non-CRUD methods that you have declared in your «Entity»Dao interface. +In the DAO implementation you can use the method getEntityManager() to access the EntityManager from the JPA. You will need the EntityManager to create and execute queries.

+
+
+
Static queries for DAO Implementation
+
+

All static queries are declared in the file src\main\resources\META-INF\orm.xml:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+  <named-query name="find.dish.with.max.price">
+    <query><![SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice]]></query>
+  </named-query>
+  ...
+</hibernate-mapping>
+
+
+
+

When your application is started, all these static queries will be created as prepared statements. This allows better performance and also ensures that you get errors for invalid JPQL queries when you start your app rather than later when the query is used.

+
+
+

To avoid redundant occurrences of the query name (get.open.order.positions.for.order) we define a constant for each named query:

+
+
+
+
public class NamedQueries {
+  public static final String FIND_DISH_WITH_MAX_PRICE = "find.dish.with.max.price";
+}
+
+
+
+

Note that changing the name of the java constant (FIND_DISH_WITH_MAX_PRICE) can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+

The following listing shows how to use this query:

+
+
+
+
public List<DishEntity> findDishByMaxPrice(BigDecimal maxPrice) {
+  Query query = getEntityManager().createNamedQuery(NamedQueries.FIND_DISH_WITH_MAX_PRICE);
+  query.setParameter("maxPrice", maxPrice);
+  return query.getResultList();
+}
+
+
+
+

Via EntityManager.createNamedQuery(String) we create an instance of Query for our predefined static query. +Next we use setParameter(String, Object) to provide a parameter (maxPrice) to the query. This has to be done for all parameters of the query.

+
+
+

Note that using the createQuery(String) method, which takes the entire query as string (that may already contain the parameter) is not allowed to avoid SQL injection vulnerabilities. +When the method getResultList() is invoked, the query is executed and the result is delivered as List. As an alternative, there is a method called getSingleResult(), which returns the entity if the query returned exactly one and throws an exception otherwise.

+
+ +
+
+
+
JPA Performance
+
+

When using JPA the developer sometimes does not see or understand where and when statements to the database are triggered.

+
+
+
+
+

Establishing expectations Developers shouldn’t expect to sprinkle magic pixie dust on POJOs in hopes they will become persistent.

+
+
+
+— Dan Allen
+https://epdf.tips/seam-in-action.html +
+
+
+

So in case you do not understand what is going on under the hood of JPA, you will easily run into performance issues due to lazy loading and other effects.

+
+
+
+
N plus 1 Problem
+
+

The most prominent phenomena is call the N+1 Problem. +We use entities from our MTS demo app as an example to explain the problem. +There is a DishEntity that has a @ManyToMany relation to +IngredientEntity. +Now we assume that we want to iterate all ingredients for a dish like this:

+
+
+
+
DishEntity dish = dao.findDishById(dishId);
+BigDecimal priceWithAllExtras = dish.getPrice();
+for (IngredientEntity ingredient : dish.getExtras()) {
+  priceWithAllExtras = priceWithAllExtras.add(ingredient.getPrice());
+}
+
+
+
+

Now dish.getExtras() is loaded lazy. Therefore the JPA vendor will provide a list with lazy initialized instances of IngredientEntity that only contain the ID of that entity. Now with every call of ingredient.getPrice() we technically trigger an SQL query statement to load the specific IngredientEntity by its ID from the database. +Now findDishById caused 1 initial query statement and for any number N of ingredients we are causing an additional query statement. This makes a total of N+1 statements. As causing statements to the database is an expensive operation with a lot of overhead (creating connection, etc.) this ends in bad performance and is therefore a problem (the N+1 Problem).

+
+
+
+
Solving N plus 1 Problem
+
+

To solve the N+1 Problem you need to change your code to only trigger a single statement instead. This can be archived in various ways. The most universal solution is to use FETCH JOIN in order to pre-load the nested N child entities into the first level cache of the JPA vendor implementation. This will behave very similar as if the @ManyToMany relation to IngredientEntity was having FetchType.EAGER but only for the specific query and not in general. Because changing @ManyToMany to FetchType.EAGER would cause bad performance for other usecases where only the dish but not its extra ingredients are needed. For this reason all relations, including @OneToOne should always be FetchType.LAZY. Back to our example we simply replace dao.findDishById(dishId) with dao.findDishWithExtrasById(dishId) that we implement by the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish
+  LEFT JOIN FETCH dish.extras
+  WHERE dish.id = :dishId
+
+
+
+

The rest of the code does not have to be changed but now dish.getExtras() will get the IngredientEntity from the first level cache where is was fetched by the initial query above.

+
+
+

Please note that if you only need the sum of the prices from the extras you can also create a query using an aggregator function:

+
+
+
+
SELECT sum(dish.extras.price) FROM DishEntity dish
+
+
+
+

As you can see you need to understand the concepts in order to get good performance.

+
+
+

There are many advanced topics such as creating database indexes or calculating statistics for the query optimizer to get the best performance. For such advanced topics we recommend to have a database expert in your team that cares about such things. However, understanding the N+1 Problem and its solutions is something that every Java developer in the team needs to understand.

+
+ +
+
+
IdRef
+
+

IdRef can be used to reference other entities in TOs in order to make them type-safe and semantically more expressive. +It is an optional concept in devon4j for more complex applications that make intensive use of relations and foreign keys.

+
+
+
+
Motivation
+
+

Assuming you have a method signature like the following:

+
+
+
+
Long approve(Long cId, Long cuId);
+
+
+
+

So what are the paremeters? What is returned?

+
+
+

IdRef is just a wrapper for a Long used as foreign key. This makes our signature much more expressive and self-explanatory:

+
+
+
+
IdRef<Contract> approve(IdRef<Contract> cId, IdRef<Customer> cuId);
+
+
+
+

Now we can easily see, that the result and the parameters are foreign-keys and which entity they are referring to via their generic type. +We can read the javadoc of these entities from the generic type and understand the context. +Finally, when passing IdRef objects to such methods, we get compile errors in case we accidentally place parameters in the wrong order.

+
+
+
+
IdRef and Mapping
+
+

In order to easily map relations from entities to transfer-objects and back, we can easily also put according getters and setters into our entities:

+
+
+
+
public class ContractEntity extends ApplicationPersistenceEntity implements Contract {
+
+  private CustomerEntity customer;
+
+  ...
+
+  @ManyToOne(fetch = FetchType.LAZY)
+  @JoinColumn(name = "CUSTOMER_ID")
+  public CustomerEntity getCustomer() {
+    return this.customer;
+  }
+
+  public void setCustomer(CustomerEntity customer) {
+    this.customer = customer;
+  }
+
+  @Transient
+  public IdRef<Customer> getCustomerId() {
+    return IdRef.of(this.customer);
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customer = JpaHelper.asEntity(customerId, CustomerEntity.class);
+  }
+}
+
+
+
+

Now, ensure that you have the same getters and setters for customerId in your Eto:

+
+
+
+
public class ContractEto extends AbstractEto implements Contract {
+
+  private IdRef<Customer> customerId;
+
+  ...
+
+  public IdRef<Customer> getCustomerId() {
+    return this.customerId;
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customerId = customerId;
+  }
+}
+
+
+
+

This way the bean-mapper can automatically map from your entity (ContractEntity) to your Eto (ContractEto) and vice-versa.

+
+
+
+
JpaHelper and EntityManager access
+
+

In the above example we used JpaHelper.asEntity to convert the foreign key (IdRef<Customer>) to the according entity (CustomerEntity). +This will internally use EntityManager.getReference to properly create a JPA entity. +The alternative "solution" that may be used with Long instead of IdRef is typically:

+
+
+
+
  public void setCustomerId(IdRef<Customer> customerId) {
+    Long id = null;
+    if (customerId != null) {
+      id = customerId.getId();
+    }
+    if (id == null) {
+      this.customer = null;
+    } else {
+      this.customer = new CustomerEntity();
+      this.customer.setId(id);
+    }
+  }
+
+
+
+

While this "solution" works is most cases, we discovered some more complex cases, where it fails with very strange hibernate exceptions. +When cleanly creating the entity via EntityManager.getReference instead it is working in all cases. +So how can JpaHelper.asEntity as a static method access the EntityManager? +Therefore we need to initialize this as otherwise you may see this exception:

+
+
+
+
java.lang.IllegalStateException: EntityManager has not yet been initialized!
+	at com.devonfw.module.jpa.dataaccess.api.JpaEntityManagerAccess.getEntityManager(JpaEntityManagerAccess.java:38)
+	at com.devonfw.module.jpa.dataaccess.api.JpaHelper.asEntity(JpaHelper.java:49)
+
+
+
+

For main usage in your application we assume that there is only one instance of EntityManager. +Therefore we can initialize this instance during the spring boot setup. +This is what we provide for you in JpaInitializer for you +when creating a devon4j app.

+
+
+
JpaHelper and spring-test
+
+

Further, you also want your code to work in integration tests. +Spring-test provides a lot of magic under the hood to make integration testing easy for you. +To boost the performance when running multiple tests, spring is smart and avoids creating the same spring-context multiple times. +Therefore it stores these contexts so that if a test-case is executed with a specific spring-configuration that has already been setup before, +the same spring-context can be reused instead of creating it again. +However, your tests may have multiple spring configurations leading to multiple spring-contexts. +Even worse these tests can run in any order leading to switching between spring-contexts forth and back. +Therefore, a static initializer during the spring boot setup can lead to strange errors as you can get the wrong EntityManager instance. +In order to fix such problems, we provide a solution pattern via DbTest ensuring for every test, +that the proper instance of EntityManager is initialized. +Therefore you should derive directly or indirectly (e.g. via ComponentDbTest and SubsystemDbTest) from DbTesT or adopt your own way to apply this pattern to your tests, when using JpaHelper. +This already happens if you are extending ApplicationComponentTest or ApplicationSubsystemTest.

+
+
+ +
+
+
+
Transaction Handling
+
+

For transaction handling we AOP to add transaction control via annotations as aspect. +This is done by annotating your code with the @Transactional annotation. +You can either annotate your container bean at class level to make all methods transactional or your can annotate individual methods to make them transactional:

+
+
+
+
  @Transactional
+  public Output getData(Input input) {
+    ...
+  }
+
+
+
+
+
JTA Imports
+
+

Here are the import statements for transaction support:

+
+
+
+
import javax.transaction.Transactional;
+
+
+
+ + + + + +
+ + +Use the above import statement to follow JEE and avoid using org.springframework.transaction.annotation.Transactional. +
+
+
+
+
JTA Dependencies
+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>jakarta.transaction</groupId>
+  <artifactId>jakarta.transaction-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>javax.transaction</groupId>
+  <artifactId>javax.transaction-api</artifactId>
+</dependency>
+
+
+
+
+
Handling constraint violations
+
+

Using @Transactional magically wraps transaction handling around your code. +As constraints are checked by the database at the end when the transaction gets committed, a constraint violation will be thrown by this aspect outside your code. +In case you have to handle constraint violations manually, you have to do that in code outside the logic that is annotated with @Transactional. +This may be done in a service operation by catching a ConstraintViolationException (org.hibernate.exception.ConstraintViolationException for hibernate). +As a generic approach you can solve this via REST execption handling.

+
+
+
+
Batches
+
+

Transaction control for batches is a lot more complicated and is described in the batch layer.

+
+
+ +
+
+
+

1.36. SQL

+
+

For general guides on dealing or avoiding SQL, preventing SQL-injection, etc. you should study domain layer.

+
+
+
+

1.37. Naming Conventions

+
+

Here we define naming conventions that you should follow whenever you write SQL files:

+
+
+
    +
  • +

    All SQL-Keywords in UPPER CASE

    +
  • +
  • +

    Indentation should be 2 spaces as suggested by devonfw for every format.

    +
  • +
+
+
+
DDL
+
+

The naming conventions for database constructs (tables, columns, triggers, constraints, etc.) should be aligned with your database product and their operators. +However, when you have the freedom of choice and a modern case-sensitive database, you can simply use your code conventions also for database constructs to avoid explicitly mapping each and every property (e.g. RestaurantTable vs. RESTAURANT_TABLE).

+
+
+
    +
  • +

    Define columns and constraints inline in the statement to create the table

    +
  • +
  • +

    Indent column types so they all start in the same text column

    +
  • +
  • +

    Constraints should be named explicitly (to get a reasonable hint error messages) with:

    +
    +
      +
    • +

      PK_«table» for primary key (name optional here as PK constraint are fundamental)

      +
    • +
    • +

      FK_«table»_«property» for foreign keys («table» and «property» are both on the source where the foreign key is defined)

      +
    • +
    • +

      UC_«table»_«property»[_«propertyN»]* for unique constraints

      +
    • +
    • +

      CK_«table»_«check» for check constraints («check» describes the check, if it is defined on a single property it should start with the property).

      +
    • +
    +
    +
  • +
  • +

    Old RDBMS had hard limitations for names (e.g. 30 characters). Please note that recent databases have overcome this very low length limitations. However, keep your names short but precise and try to define common abbreviations in your project for according (business) terms. Especially do not just truncate the names at the limit.

    +
  • +
  • +

    If possible add comments on table and columns to help DBAs understanding your schema. This is also honored by many tools (not only DBA-tools).

    +
  • +
+
+
+

Here is a brief example of a DDL:

+
+
+
+
CREATE SEQUENCE HIBERNATE_SEQUENCE START WITH 1000000;
+
+-- *** Table ***
+CREATE TABLE RESTAURANT_TABLE (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  SEATS                INTEGER NOT NULL,
+  CONSTRAINT PK_TABLE PRIMARY KEY(ID)
+);
+COMMENT ON TABLE RESTAURANT_TABLE IS 'The physical tables inside the restaurant.';
+-- *** Order ***
+CREATE TABLE RESTAURANT_ORDER (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  TABLE_ID             NUMBER(19) NOT NULL,
+  TOTAL                DECIMAL(5, 2) NOT NULL,
+  CREATION_DATE        TIMESTAMP NOT NULL,
+  PAYMENT_DATE         TIMESTAMP,
+  STATUS               VARCHAR2(10 CHAR) NOT NULL,
+  CONSTRAINT PK_ORDER PRIMARY KEY(ID),
+  CONSTRAINT FK_ORDER_TABLE_ID FOREIGN KEY(TABLE_ID) REFERENCES RESTAURANT_TABLE(ID)
+);
+COMMENT ON TABLE RESTAURANT_ORDER IS 'An order and bill at the restaurant.';
+...
+
+
+
+

ATTENTION: Please note that TABLE and ORDER are reserved keywords in SQL and you should avoid using such keywords to prevent problems.

+
+
+
+
Data
+
+

For insert, update, delete, etc. of data SQL scripts should additionally follow these guidelines:

+
+
+
    +
  • +

    Inserts always with the same order of columns in blocks for each table.

    +
  • +
  • +

    Insert column values always starting with ID, MODIFICATION_COUNTER, [DTYPE, ] …​

    +
  • +
  • +

    List columns with fixed length values (boolean, number, enums, etc.) before columns with free text to support alignment of multiple insert statements

    +
  • +
  • +

    Pro Tip: Get familiar with column mode of advanced editors such as notepad++ when editing large blocks of similar insert statements.

    +
  • +
+
+
+
+
INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (0, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (1, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (2, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (3, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (4, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (5, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (6, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (7, 1, 8);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (8, 1, 8);
+...
+
+
+
+

See also Database Migrations.

+
+
+ +
+
+
+

1.38. Database Migration

+
+

When you have a schema-based database, +you need a solution for schema versioning and migration for your database. +A specific release of your app requires a corresponding version of the schema in the database to run. +As you want simple and continuous deployment you should automate the schema versiong and database migration.

+
+
+

The general idea is that your software product contains "scripts" to migrate the database from schema version X to verion X+1. +When you begin your project you start with version 1 and with every increment of your app that needs a change to the database schema (e.g. a new table, a new column to an existing table, a new index, etc.) you add another "script" that migrates from the current to the next version. +For simplicity these versions are just sequential numbers or timestamps. +Now, the solution you choose will automatically manage the schema version in a separate metadata table in your database that stores the current schema version. +When your app is started, it will check the current version inside the database from that metadata table. +As long as there are "scripts" that migrate from there to a higher version, they will be automatically applied to the database and this process is protocolled to the metadata table in your database what also updates the current schema version there. +Using this approach, you can start with an empty database what will result in all "scripts" being applied sequentially. +Also any version of your database schema can be present and you will always end up in a controlled migration to the latest schema version.

+
+
+
+

1.39. Options for database migration

+
+

For database migration you can choose between the following options:

+
+
+
    +
  • +

    flyway (KISS based approach with migrations as SQL)

    +
  • +
  • +

    liquibase (more complex approach with database abstraction)

    +
  • +
+
+ +
+
Flyway
+
+

Flyway is a tool for database migration and schema versioning. +See why for a motivation why using flyway.

+
+
+

Flyway can be used standalone e.g. via flyway-maven-plugin or can be integrated directly into your app to make sure the database migration takes place on startup. +For simplicity we recommend to integrate flyway into your app. +However, you need to be aware that therefore your app needs database access with full schema owner permissions.

+
+
+
+
Organizational Advice
+
+

A few considerations with respect to project organization will help to implement maintainable Flyway migrations.

+
+
+

At first, testing and production environments must be clearly and consistently distinguished. Use the following directory structure to achieve this distinction:

+
+
+
+
  src/main/resources/db
+  src/test/resources/db
+
+
+
+

Although this structure introduces redundancies, the benefit outweighs this disadvantage. +An even more fine-grained production directory structure which contains one sub folder per release should be implemented:

+
+
+
+
  src/main/resources/db/migration/releases/X.Y/x.sql
+
+
+
+

Emphasizing that migration scripts below the current version must never be changed will aid the second advantage of migrations: it will always be clearly reproducible in which state the database currently is. +Here, it is important to mention that, if test data is required, it must be managed separately from the migration data in the following directory:

+
+
+
+
  src/test/resources/db/migration/
+
+
+
+

The migration directory is added to aid easy usage of Flyway defaults. +Of course, test data should also be managed per release as like production data.

+
+
+

With regard to content, separation of concerns (SoC) is an important goal. SoC can be achieved by distinguishing and writing multiple scripts with respect to business components/use cases (or database tables in case of large volumes of master data [1]. Comprehensible file names aid this separation.

+
+
+

It is important to have clear responsibilities regarding the database, the persistence layer (JPA), and migrations. Therefore a dedicated database expert should be in charge of any migrations performed or she should at least be informed before any change to any of the mentioned parts is applied.

+
+
+
+
Technical Configuration
+
+

Database migrations can be SQL based or Java based.

+
+
+

To enable auto migration on startup (not recommended for productive environment) set the following property in the application.properties file for an environment.

+
+
+
+
flyway.enabled=true
+flyway.clean-on-validation-error=false
+
+
+
+

For development environment it is helpful to set both properties to true in order to simplify development. For regular environments flyway.clean-on-validation-error should be false.

+
+
+

If you want to use Flyway set the following property in any case to prevent Hibernate from doing changes on the database (pre-configured by default in devonfw):

+
+
+
+
spring.jpa.hibernate.ddl-auto=validate
+
+
+
+

The setting must be communicated to and coordinated with the customer and their needs. +In acceptance testing the same configuration as for the production environment should be enabled.

+
+
+

Since migration scripts will also be versioned the end-of-line (EOL) style must be fixated according to this issue. This is however solved in flyway 4.0+ and the latest devonfw release. +Also, the version numbers of migration scripts should not consist of simple ascending integer numbers like V0001…​, V0002…​, …​ This naming may lead to problems when merging branches. Instead the usage of timestamps as version numbers will help to avoid such problems.

+
+
+
+
Naming Conventions
+
+

Database migrations should follow this naming convention: +V<version>__<description> (e.g.: V12345__Add_new_table.sql).

+
+
+

It is also possible to use Flyway for test data. To do so place your test data migrations in src/main/resources/db/testdata/ and set property

+
+
+
+
flyway.locations=classpath:db/migration/releases,classpath:db/migration/testdata
+
+
+
+

Then Flyway scans the additional location for migrations and applies all in the order specified by their version. If migrations V0001__... and V0002__... exist and a test data migration should be applied in between you can name it V0001_1__....

+
+ +
+
+
Liquibase
+ +
+

See devon4j#303 for details and status.

+
+
+
+
Spring-boot usage
+
+

For using liquibase in spring see Using Liquibase with Spring Boot.

+
+
+
+
Quarkus usage
+
+

For uisng liquibase in quarkus see Using Liquibase.

+
+
+ +
+
+
+

1.40. REST

+
+

REST (REpresentational State Transfer) is an inter-operable protocol for services that is more lightweight than SOAP. +However, it is no real standard and can cause confusion (see REST philosophy). +Therefore we define best practices here to guide you.

+
+
+
+

1.41. URLs

+
+

URLs are not case sensitive. Hence, we follow the best practice to use only lower-case-letters-with-hyphen-to-separate-words. +For operations in REST we distinguish the following types of URLs:

+
+
+
    +
  • +

    A collection URL is build from the rest service URL by appending the name of a collection. This is typically the name of an entity. Such URL identifies the entire collection of all elements of this type. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity

    +
  • +
  • +

    An element URL is build from a collection URL by appending an element ID. It identifies a single element (entity) within the collection. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42

    +
  • +
+
+
+

To follow KISS avoid using plural forms (…​/productmanagement/v1/products vs. …​/productmanagement/v1/product/42). Always use singular forms and avoid confusions (except for the rare cases where no singular exists).

+
+
+

The REST URL scheme fits perfect for CRUD operations. +For business operations (processing, calculation, advanced search, etc.) we simply append a collection URL with the name of the business operation. +Then we can POST the input for the business operation and get the result back. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/search

+
+
+
+

1.42. HTTP Methods

+
+

The following table defines the HTTP methods (verbs) and their meaning:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3. Usage of HTTP methods
HTTP MethodMeaning

GET

Read data (stateless).

PUT

Create or update data.

POST

Process data.

DELETE

Delete an entity.

+
+

Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

+
+
+

For general recommendations on HTTP methods for collection and element URLs see REST@wikipedia.

+
+
+
+

1.43. HTTP Status Codes

+
+

Further we define how to use the HTTP status codes for REST services properly. In general the 4xx codes correspond to an error on the client side and the 5xx codes to an error on the server side.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4. Usage of HTTP status codes
HTTP CodeMeaningResponseComment

200

OK

requested result

Result of successful GET

204

No Content

none

Result of successful POST, DELETE, or PUT with empty result (void return)

400

Bad Request

error details

The HTTP request is invalid (parse error, validation failed)

401

Unauthorized

none

Authentication failed

403

Forbidden

none

Authorization failed

404

Not found

none

Either the service URL is wrong or the requested resource does not exist

500

Server Error

error code, UUID

Internal server error occurred, in case of an exception, see REST exception handling

+
+
+

1.44. JAX-RS

+
+

For implementing REST services we use the JAX-RS standard. +As payload encoding we recommend JSON bindings using Jackson. +To implement a REST service you simply add JAX-RS annotations. +Here is a simple example:

+
+
+
+
@ApplicationScoped
+@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class ImagemanagementRestService {
+
+  @Inject
+  private Imagemanagement imagemanagement;
+
+  @GET
+  @Path("/image/{id}/")
+  public ImageDto getImage(@PathParam("id") long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+}
+
+
+
+

Here we can see a REST service for the business component imagemanagement. The method getImage can be accessed via HTTP GET (see @GET) under the URL path imagemanagement/image/{id} (see @Path annotations) where {id} is the ID of the requested table and will be extracted from the URL and provided as parameter id to the method getImage. It will return its result (ImageDto) as JSON (see @Produces annotation - you can also extend RestService marker interface that defines these annotations for JSON). As you can see it delegates to the logic component imagemanagement that contains the actual business logic while the service itself only exposes this logic via HTTP. The REST service implementation is a regular CDI bean that can use dependency injection.

+
+
+ + + + + +
+ + +With JAX-RS it is important to make sure that each service method is annotated with the proper HTTP method (@GET,@POST,etc.) to avoid unnecessary debugging. So you should take care not to forget to specify one of these annotations. +
+
+
+
Service-Interface
+
+

You may also separate API and implementation in case you want to reuse the API for service-client:

+
+
+
+
@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface ImagemanagementRestService {
+
+  @GET
+  @Path("/image/{id}/")
+  ImageEto getImage(@PathParam("id") long id);
+
+}
+
+@Named("ImagemanagementRestService")
+public class ImagemanagementRestServiceImpl implements ImagemanagementRestService {
+
+  @Override
+  public ImageEto getImage(long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+
+}
+
+
+
+
+
JAX-RS Configuration
+
+

Starting from CXF 3.0.0 it is possible to enable the auto-discovery of JAX-RS roots.

+
+
+

When the jaxrs server is instantiated all the scanned root and provider beans (beans annotated with javax.ws.rs.Path and javax.ws.rs.ext.Provider) are configured.

+
+
+
+
REST Exception Handling
+
+

For exceptions a service needs to have an exception façade that catches all exceptions and handles them by writing proper log messages and mapping them to a HTTP response with an according HTTP status code. Therefore the devonfw provides a generic solution via RestServiceExceptionFacade. You need to follow the exception guide so that it works out of the box because the façade needs to be able to distinguish between business and technical exceptions. +Now your service may throw exceptions but the façade with automatically handle them for you.

+
+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+
+
Pagination details
+
+

We recommend to use spring-data repositories for database access that already comes with pagination support. +Therefore, when performing a search, you can include a Pageable object. +Here is a JSON example for it:

+
+
+
+
{ "pageSize": 20, "pageNumber": 0, "sort": [] }
+
+
+
+

By increasing the pageNumber the client can browse and page through the hits.

+
+
+

As a result you will receive a Page. +It is a container for your search results just like a Collection but additionally contains pagination information for the client. +Here is a JSON example:

+
+
+
+
{ "totalElements": 1022,
+  pageable: { "pageSize": 20, "pageNumber": 0 },
+  content: [ ... ] }
+
+
+
+

The totalElements property contains the total number of hits. +This can be used by the client to compute the total number of pages and render the pagination links accordingly. +Via the pageable property the client gets back the Pageable properties from the search request. +The actual hits for the current page are returned as array in the content property.

+
+
+
+
+

1.45. REST Testing

+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+

1.46. Security

+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+
CSRF
+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+
JSON top-level arrays
+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+ +
+
+
+

1.47. JSON

+
+

JSON (JavaScript Object Notation) is a popular format to represent and exchange data especially for modern web-clients. For mapping Java objects to JSON and vice-versa there is no official standard API. We use the established and powerful open-source solution Jackson. +Due to problems with the wiki of fasterxml you should try this alternative link: Jackson/AltLink.

+
+
+
+

1.48. Configure JSON Mapping

+
+

In order to avoid polluting business objects with proprietary Jackson annotations (e.g. @JsonTypeInfo, @JsonSubTypes, @JsonProperty) we propose to create a separate configuration class. Every devonfw application (sample or any app created from our app-template) therefore has a class called ApplicationObjectMapperFactory that extends ObjectMapperFactory from the devon4j-rest module. It looks like this:

+
+
+
+
@Named("ApplicationObjectMapperFactory")
+public class ApplicationObjectMapperFactory extends ObjectMapperFactory {
+
+  public RestaurantObjectMapperFactory() {
+    super();
+    // JSON configuration code goes here
+  }
+}
+
+
+
+
+

1.49. JSON and Inheritance

+
+

If you are using inheritance for your objects mapped to JSON then polymorphism can not be supported out-of-the box. So in general avoid polymorphic objects in JSON mapping. However, this is not always possible. +Have a look at the following example from our sample application:

+
+
+
+inheritance class diagram +
+
Figure 2. Transfer-Objects using Inheritance
+
+
+

Now assume you have a REST service operation as Java method that takes a ProductEto as argument. As this is an abstract class the server needs to know the actual sub-class to instantiate. +We typically do not want to specify the classname in the JSON as this should be an implementation detail and not part of the public JSON format (e.g. in case of a service interface). Therefore we use a symbolic name for each polymorphic subtype that is provided as virtual attribute @type within the JSON data of the object:

+
+
+
+
{ "@type": "Drink", ... }
+
+
+
+

Therefore you add configuration code to the constructor of ApplicationObjectMapperFactory. Here you can see an example from the sample application:

+
+
+
+
setBaseClasses(ProductEto.class);
+addSubtypes(new NamedType(MealEto.class, "Meal"), new NamedType(DrinkEto.class, "Drink"),
+  new NamedType(SideDishEto.class, "SideDish"));
+
+
+
+

We use setBaseClasses to register all top-level classes of polymorphic objects. Further we declare all concrete polymorphic sub-classes together with their symbolic name for the JSON format via addSubtypes.

+
+
+
+

1.50. Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create objects dedicated for the JSON mapping you can easily avoid such situations. When this is not suitable follow these instructions to define the mapping:

+
+
+
    +
  1. +

    As an example, the use of JSR354 (javax.money) is appreciated in order to process monetary amounts properly. However, without custom mapping, the default mapping of Jackson will produce the following JSON for a MonetaryAmount:

    +
    +
    +
    "currency": {"defaultFractionDigits":2, "numericCode":978, "currencyCode":"EUR"},
    +"monetaryContext": {...},
    +"number":6.99,
    +"factory": {...}
    +
    +
    +
    +

    As clearly can be seen, the JSON contains too much information and reveals implementation secrets that do not belong here. Instead the JSON output expected and desired would be:

    +
    +
    +
    +
    "currency":"EUR","amount":"6.99"
    +
    +
    +
    +

    Even worse, when we send the JSON data to the server, Jackson will see that MonetaryAmount is an interface and does not know how to instantiate it so the request will fail. +Therefore we need a customized Serializer.

    +
    +
  2. +
  3. +

    We implement MonetaryAmountJsonSerializer to define how a MonetaryAmount is serialized to JSON:

    +
    +
    +
    public final class MonetaryAmountJsonSerializer extends JsonSerializer<MonetaryAmount> {
    +
    +  public static final String NUMBER = "amount";
    +  public static final String CURRENCY = "currency";
    +
    +  public void serialize(MonetaryAmount value, JsonGenerator jgen, SerializerProvider provider) throws ... {
    +    if (value != null) {
    +      jgen.writeStartObject();
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.CURRENCY);
    +      jgen.writeString(value.getCurrency().getCurrencyCode());
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.NUMBER);
    +      jgen.writeString(value.getNumber().toString());
    +      jgen.writeEndObject();
    +    }
    +  }
    +
    +
    +
    +

    For composite datatypes it is important to wrap the info as an object (writeStartObject() and writeEndObject()). MonetaryAmount provides the information we need by the getCurrency() and getNumber(). So that we can easily write them into the JSON data.

    +
    +
  4. +
  5. +

    Next, we implement MonetaryAmountJsonDeserializer to define how a MonetaryAmount is deserialized back as Java object from JSON:

    +
    +
    +
    public final class MonetaryAmountJsonDeserializer extends AbstractJsonDeserializer<MonetaryAmount> {
    +  protected MonetaryAmount deserializeNode(JsonNode node) {
    +    BigDecimal number = getRequiredValue(node, MonetaryAmountJsonSerializer.NUMBER, BigDecimal.class);
    +    String currencyCode = getRequiredValue(node, MonetaryAmountJsonSerializer.CURRENCY, String.class);
    +    MonetaryAmount monetaryAmount =
    +        MonetaryAmounts.getAmountFactory().setNumber(number).setCurrency(currencyCode).create();
    +    return monetaryAmount;
    +  }
    +}
    +
    +
    +
    +

    For composite datatypes we extend from AbstractJsonDeserializer as this makes our task easier. So we already get a JsonNode with the parsed payload of our datatype. Based on this API it is easy to retrieve individual fields from the payload without taking care of their order, etc. +AbstractJsonDeserializer also provides methods such as getRequiredValue to read required fields and get them converted to the desired basis datatype. So we can easily read the amount and currency and construct an instance of MonetaryAmount via the official factory API.

    +
    +
  6. +
  7. +

    Finally we need to register our custom (de)serializers with the following configuration code in the constructor of ApplicationObjectMapperFactory:+

    +
  8. +
+
+
+
+
  SimpleModule module = getExtensionModule();
+  module.addDeserializer(MonetaryAmount.class, new MonetaryAmountJsonDeserializer());
+  module.addSerializer(MonetaryAmount.class, new MonetaryAmountJsonSerializer());
+
+
+
+

Now we can read and write MonetaryAmount from and to JSON as expected.

+
+
+ +
+
+

1.51. XML

+
+

XML (Extensible Markup Language) is a W3C standard format for structured information. It has a large eco-system of additional standards and tools.

+
+
+

In Java there are many different APIs and frameworks for accessing, producing and processing XML. For the devonfw we recommend to use JAXB for mapping Java objects to XML and vice-versa. Further there is the popular DOM API for reading and writing smaller XML documents directly. When processing large XML documents StAX is the right choice.

+
+
+
+

1.52. JAXB

+
+

We use JAXB to serialize Java objects to XML or vice-versa.

+
+
+
JAXB and Inheritance
+
+

Use @XmlSeeAlso annotation to provide sub-classes. +See section "Collective Polymorphism" described here.

+
+
+
+
JAXB Custom Mapping
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create dedicated objects for the XML mapping you can easily avoid such situations. When this is not suitable use @XmlJavaTypeAdapter and provide an XmlAdapter implementation that handles the mapping. +For details see here.

+
+
+
+
+

1.53. Security

+
+

To prevent XML External Entity attacks, follow JAXP Security Guide and enable FSP.

+
+
+ +
+
+

1.54. SOAP

+
+

SOAP is a common protocol for services that is rather complex and heavy. It allows to build inter-operable and well specified services (see WSDL). SOAP is transport neutral what is not only an advantage. We strongly recommend to use HTTPS transport and ignore additional complex standards like WS-Security and use established HTTP-Standards such as RFC2617 (and RFC5280).

+
+
+
+

1.55. JAX-WS

+
+

For building web-services with Java we use the JAX-WS standard. +There are two approaches:

+
+
+
    +
  • +

    code first

    +
  • +
  • +

    contract first

    +
  • +
+
+
+

Here is an example in case you define a code-first service.

+
+
+
Web-Service Interface
+
+

We define a regular interface to define the API of the service and annotate it with JAX-WS annotations:

+
+
+
+
@WebService
+public interface TablemanagmentWebService {
+
+  @WebMethod
+  @WebResult(name = "message")
+  TableEto getTable(@WebParam(name = "id") String id);
+
+}
+
+
+
+
+
Web-Service Implementation
+
+

And here is a simple implementation of the service:

+
+
+
+
@Named
+@WebService(endpointInterface = "com.devonfw.application.mtsj.tablemanagement.service.api.ws.TablemanagmentWebService")
+public class TablemanagementWebServiceImpl implements TablemanagmentWebService {
+
+  private Tablemanagement tableManagement;
+
+  @Override
+  public TableEto getTable(String id) {
+
+    return this.tableManagement.findTable(id);
+  }
+
+
+
+
+
+

1.56. SOAP Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to write adapters for JAXB (see XML).

+
+
+
+

1.57. SOAP Testing

+
+

For testing SOAP services in general consult the testing guide.

+
+
+

For testing SOAP services manually we strongly recommend SoapUI.

+
+
+ +
+
+

1.58. Logging

+
+

We recommend to use SLF4J as API for logging, that has become a de facto standard in Java as it has a much better design than java.util.logging offered by the JDK. +There are serveral implementations for SLF4J. For Spring applications our recommended implementation is Logback. Quarkus uses JBoss Logging which provides a JBoss Log Manager implementation for SLF4J. For more information on logging in Quarkus, see the Quarkus logging guide.

+
+
+
+

1.59. Logging Dependencies

+
+

To use Logback in your Spring application, you need to include the following dependencies:

+
+
+
+
<!-- SLF4J as logging API -->
+<dependency>
+  <groupId>org.slf4j</groupId>
+  <artifactId>slf4j-api</artifactId>
+</dependency>
+<!-- Logback as logging implementation  -->
+<dependency>
+  <groupId>ch.qos.logback</groupId>
+  <artifactId>logback-classic</artifactId>
+</dependency>
+<!-- JSON logging for cloud-native log monitoring -->
+<dependency>
+  <groupId>net.logstash.logback</groupId>
+  <artifactId>logstash-logback-encoder</artifactId>
+</dependency>
+
+
+
+

In devon4j these dependencies are provided by the devon4j-logging module.

+
+
+

In Quarkus, SLF4J and the slf4j-jboss-logmanager are directly included in the Quarkus core runtime and can be used out of the box.

+
+
+
+

1.60. Logger Access

+
+

The general pattern for accessing loggers from your code is a static logger instance per class using the following pattern:

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+  private static final Logger LOG = LoggerFactory.getLogger(MyClass.class);
+  ...
+}
+
+
+
+

For detailed documentation how to use the logger API check the SLF4j manual.

+
+
+ + + + + +
+ + +In case you are using devonfw-ide and Eclipse you can just type LOG and hit [ctrl][space] to insert the code pattern including the imports into your class. +
+
+
+
Lombok
+
+

In case you are using Lombok, you can simply use the @Slf4j annotation in your class. This causes Lombok to generate the logger instance for you.

+
+
+
+
+

1.61. Log-Levels

+
+

We use a common understanding of the log-levels as illustrated by the following table. +This helps for better maintenance and operation of the systems.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5. Log-levels
Log-levelDescriptionImpactActive Environments

FATAL

Only used for fatal errors that prevent the application to work at all (e.g. startup fails or shutdown/restart required)

Operator has to react immediately

all

ERROR

An abnormal error indicating that the processing failed due to technical problems.

Operator should check for known issue and otherwise inform development

all

WARNING

A situation where something worked not as expected. E.g. a business exception or user validation failure occurred.

No direct reaction required. Used for problem analysis.

all

INFO

Important information such as context, duration, success/failure of request or process

No direct reaction required. Used for analysis.

all

DEBUG

Development information that provides additional context for debugging problems.

No direct reaction required. Used for analysis.

development and testing

TRACE

Like DEBUG but exhaustive information and for code that is run very frequently. Will typically cause large log-files.

No direct reaction required. Used for problem analysis.

none (turned off by default)

+
+

Exceptions (with their stack trace) should only be logged on FATAL or ERROR level. For business exceptions typically a WARNING including the message of the exception is sufficient.

+
+
+
Configuration of Logback
+
+

The configuration of logback happens via the logback.xml file that you should place into src/main/resources of your app. +For details consult the logback configuration manual.

+
+
+ + + + + +
+ + +Logback also allows to overrule the configuration with a logback-test.xml file that you may put into src/test/resources or into a test-dependency. +
+
+
+
+
Configuration in Quarkus
+
+

The are several options you can set in the application.properties file to configure the behaviour of the logger in Quarkus. For a detailed overview, see the corresponding part of the Quarkus guide.

+
+
+
+
+

1.62. JSON-logging

+
+

For easy integration with log-monitoring, we recommend that your app logs to standard out in JSON following JSON Lines.

+
+
+

In Spring applications, this can be achieved via logstash-logback-encoder (see dependencies). In Quarkus, it can be easily achieved using the quarkus-logging-json extension (see here for more details).

+
+
+

This will produce log-lines with the following format (example formatted for readability):

+
+
+
+
{
+  "timestamp":"2000-12-31T23:59:59.999+00:00",
+  "@version":"1",
+  "message":"Processing 4 order(s) for shipment",
+  "logger_name":"com.myapp.order.logic.UcManageOrder",
+  "thread_name":"http-nio-8081-exec-6",
+  "level":"INFO",
+  "level_value":20000,
+  "appname":"myapp",
+}
+
+
+
+
Adding custom values to JSON log with Logstash
+
+

The JSON encoder even supports logging custom properties for your log-monitoring. +The trick is to use the class net.logstash.logback.argument.StructuredArguments for adding the arguments to you log message, e.g.

+
+
+
+
import static net.logstash.logback.argument.StructuredArguments.v;
+
+...
+    LOG.info("Request with {} and {} took {} ms.", v("url", url), v("status", statusCode), v("duration", millis));
+...
+
+
+
+

This will produce the a JSON log-line with the following properties:

+
+
+
+
...
+  "message":"Request with url=https://api/service/v1/ordermanagement/order and status=200 took duration=251 ms",
+  "url":"https://api/service/v1/ordermanagement/order",
+  "status":"200",
+  "duration":"251",
+...
+
+
+
+

As you can quickly see besides the human readable message you also have the structured properties url, status and duration that can be extremly valuable to configure dashboards in your log-monitoring that visualize success/failure ratio as well as performance of your requests.

+
+
+
+
+

1.63. Classic log-files

+
+ + + + + +
+ + +In devon4j, we strongly recommend using JSON logging instead of classic log files. The following section refers only to devon4j Spring applications that use Logback. +
+
+
+

Even though we do not recommend anymore to write classical log-files to the local disc, here you can still find our approach for it.

+
+
+
Maven-Integration
+
+

In the pom.xml of your application add this dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java</groupId>
+  <artifactId>devon4j-logging</artifactId>
+</dependency>
+
+
+
+

The above dependency already adds transitive dependencies to SLF4J and logback. +Also it comes with configration snipplets that can be included from your logback.xml file (see configuration).

+
+
+

The logback.xml to write regular log-files can look as following:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+  <property resource="com/devonfw/logging/logback/application-logging.properties" />
+  <property name="appname" value="MyApp"/>
+  <property name="logPath" value="../logs"/>
+  <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />
+  <include resource="com/devonfw/logging/logback/appender-console.xml" />
+
+  <root level="DEBUG">
+    <appender-ref ref="ERROR_APPENDER"/>
+    <appender-ref ref="INFO_APPENDER"/>
+    <appender-ref ref="DEBUG_APPENDER"/>
+    <appender-ref ref="CONSOLE_APPENDER"/>
+  </root>
+
+  <logger name="org.springframework" level="INFO"/>
+</configuration>
+
+
+
+

The provided logback.xml is configured to use variables defined on the config/application.properties file. +On our example, the log files path point to ../logs/ in order to log to tomcat log directory when starting tomcat on the bin folder. +Change it according to your custom needs.

+
+
+
Listing 8. config/application.properties
+
+
log.dir=../logs/
+
+
+
+
+
Log Files
+
+

The classical approach uses the following log files:

+
+
+
    +
  • +

    Error Log: Includes log entries to detect errors.

    +
  • +
  • +

    Info Log: Used to analyze system status and to detect bottlenecks.

    +
  • +
  • +

    Debug Log: Detailed information for error detection.

    +
  • +
+
+
+

The log file name pattern is as follows:

+
+
+
+
«LOGTYPE»_log_«HOST»_«APPLICATION»_«TIMESTAMP».log
+
+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 6. Segments of Logfilename
ElementValueDescription

«LOGTYPE»

info, error, debug

Type of log file

«HOST»

e.g. mywebserver01

Name of server, where logs are generated

«APPLICATION»

e.g. myapp

Name of application, which causes logs

«TIMESTAMP»

YYYY-MM-DD_HH00

date of log file

+
+

Example: +error_log_mywebserver01_myapp_2013-09-16_0900.log

+
+
+

Error log from mywebserver01 at application myapp at 16th September 2013 9pm.

+
+
+
+
Output format
+
+

We use the following output format for all log entries to ensure that searching and filtering of log entries work consistent for all logfiles:

+
+
+
+
[D: «timestamp»] [P: «priority»] [C: «NDC»][T: «thread»][L: «logger»]-[M: «message»]
+
+
+
+
    +
  • +

    D: Date (Timestamp in ISO8601 format e.g. 2013-09-05 16:40:36,464)

    +
  • +
  • +

    P: Priority (the log level)

    +
  • +
  • +

    C: Correlation ID (ID to identify users across multiple systems, needed when application is distributed)

    +
  • +
  • +

    T: Thread (Name of thread)

    +
  • +
  • +

    L: Logger name (use class name)

    +
  • +
  • +

    M: Message (log message)

    +
  • +
+
+
+

Example:

+
+
+
+
[D: 2013-09-05 16:40:36,464] [P: DEBUG] [C: 12345] [T: main] [L: my.package.MyClass]-[M: My message...]
+
+
+
+ + + + + +
+ + +When using devon4j-logging, this format is used by default. To achieve this format in Quarkus, set quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n in your properties. +
+
+
+
+
Correlation ID
+
+

In order to correlate separate HTTP requests to services belonging to the same user / session, we provide a servlet filter called DiagnosticContextFilter. +This filter takes a provided correlation ID from the HTTP header X-Correlation-Id. +If none was found, it will generate a new correlation id as UUID. +This correlation ID is added as MDC to the logger. +Therefore, it will then be included to any log message of the current request (thread). +Further concepts such as service invocations will pass this correlation ID to subsequent calls in the application landscape. Hence you can find all log messages related to an initial request simply via the correlation ID even in highly distributed systems.

+
+
+
+
Security
+
+

In order to prevent log forging attacks you can simply use the suggested JSON logging format. +Otherwise you can use com.devonfw.module.logging.common.impl.SingleLinePatternLayout as demonstrated here in order to prevent such attacks.

+
+
+ +
+
+
+

1.64. Monitoring

+
+

For monitoring a complex application landscape it is crucial to have an exact overview which applications are up and running and which are not and why. +In devonfw we only focus on topics which are most important when developing production-ready applications. +On a high level view we strongly suggest to separate the application to be monitored from the monitoring system itself. +Therefore, your application should concentrate on providing app specific data for the monitoring. +Aspects such as aggregation, visualization, search, alerting, etc. should be addressed outside of your app by a monitoring system product. +There are many products providing such a monitoring system like checkmk, icinga, SkyWalking, etc. +Please note that there is a huge list of such products and devonfw is not biased or aims to make a choice for you. +Instead please search and find the products that fit best for your requirements and infrastructure.

+
+
+
+

1.65. Types of monitoring

+
+

As monitoring coveres a lot of different aspects we separate the following types of monitoring and according data:

+
+
+
    +
  • +

    Log-monitoring
    +is about collecting and monitoring the logs of all apps and containers in your IT landscape. It is suitable for events such as an HTTP request with its URL, resulting status code and duration in milliseconds. Your monitoring may not react to such data in realtime. Instead it may take a delay of one or a few seconds.

    +
  • +
  • +

    Infrastructure monitoring
    +is about monitoring the (hardware) infrastructure with measures like usage of CPU, memory, disc-space, etc. This is a pure operational task and your app should have nothing to do with this. In other words it is a waste if your app tries to monitor these aspects as existing products can do this much better and your app will only see virtual machines and is unable to see the physical infrastructure.

    +
  • +
  • +

    Health check
    +is about providing internal data about the current health of your app. Typically you provide sensors with health status per component or interface to neighbour service (database connectivity, etc.).

    +
  • +
  • +

    Application Performance Monitoring
    +is about measuring performance and tracing down performance issues.

    +
  • +
+
+
+
+

1.66. Health-Check

+
+

The idea of a health check is to prodvide monitoring data about the current health status of your application. +This allows to integrate this specific data into the monitoring system used for your IT landscape. +In order to keep the monitoring simple and easy to integreate consider using the following best practices:

+
+
+
    +
  • +

    Use simple and established protocols such as REST instead of JMX via RMI.

    +
  • +
  • +

    Considuer using recent standards such as microprofile-health.

    +
  • +
  • +

    Consider to drop access-control for your monitoring interfaces and for security prevent external access to it in your infrastructure (loadbalancers or gateways). Monitoring is only for usage within an IT landscape internally. It does not make sense for externals and end-users to access your app for reading monitoring data from a random node decided by a loadbalancer. Furhter, external access can easily lead to sensitive data exposure.

    +
  • +
  • +

    Consider to define different end-points per usage-scenario. So if you want the loadbalancer to ask your app monitoring for availability of each node then create a separate service URL that only provides OK or anything else for failure (NOK, 404, 500, timeout). Do not mix this with a health-check that needs more detailed information.

    +
  • +
  • +

    Also do not forget about basic features such as prodiving the name and the release version of your application.

    +
  • +
  • +

    Be careful to automate decisions based on monitoring and health checks. It easily turns out to be stupid if you automatically restart your pod or container because of some monitoring indicator. In the worst case a failure of a central component will cause your health-check to report down for all apps and as a result all your containers will be restarted frequently. Indead of curing problems such decisions will cause much more harm and trouble.

    +
  • +
  • +

    Avoid causing reasonable load with your monitoring and health-check itself. In many cases it is better to use log-monitoring or to collect monitoring data from use-cases that happen in your app anyway. If you create dummy read and write requests in your monitoring implementation you will easily turn it into a DOS-attack.

    +
  • +
+
+
+

For spring you can simply integrate app monitoring and health check via spring-boot-actuator.

+
+
+

For quarkus you can simply integrate app monitoring via micrometer or smallrye-metrics and health check via smallrye-health.

+
+ +
+
+

1.67. Log-Monitoring

+
+

Log-monitoring is an aspect of monitoring with a strict focus on logging. +With trends towards IT landscapes with many but much smaller apps the classicial approach to write log-files to the disc and let operators read those via SSH became entirely obsolete. +Nowadays we have up to hundreds or even thousands of apps that themselves are clustered into multiple nodes. +Therefore you should establish a centralized log monitoring system in the environment and let all your nodes log directly into that system. +This approach gives the following benefits:

+
+
+
    +
  • +

    all log information available in one place

    +
  • +
  • +

    full-text search accross all logfiles

    +
  • +
  • +

    ability to automatically trigger alerts from specific log patterns

    +
  • +
  • +

    ability to do data-mining on logs and visualize in dashboards

    +
  • +
+
+
+
+

1.68. Options for log-monitoring

+
+

Typical products for such a log monitoring system are:

+
+
+ +
+
+

In devonfw we are not biased for any of these products. Therefore, feel free to make your choice according to the requirements of your project.

+
+
+

For Quarkus applications, you can get an insight into the topic by reading the guide about centralized log management.

+
+
+
+

1.69. API for log-monitoring

+
+

The "API" for logging to a log-monitoring system for your app is pretty simple:

+
+
+
    +
  • +

    Write your logs to standard out.

    +
  • +
  • +

    Use JSON logging as format.

    +
  • +
+
+
+

Then the container infrastructure can automatically collect your logs from standard out and directly feed those into the log monitoring system. +As a result, your app does not need to know anything about your log monitoring system and logging becomes most simple. +Further, if you do not write log-files anymore, you might not need to write any other files and therefore may not even need write permissions on the filesystem of your container. +In such case an attacker who may find a vulnerability in your app will have less attack surface in case he can not write any file.

+
+ +
+
+

1.70. Application Performance Management

+
+

This guide gives hints how to manage, monitor and analyse performance of Java applications.

+
+
+
+

1.71. Temporary Analysis

+
+

If you are facing performance issues and want to do a punctual analysis we recommend you to use glowroot. It is ideal in cases where monitoring in your local development environment is suitable. However, it is also possible to use it in your test environment. It is entirely free and open-source. Still it is very powerful and helps to trace down bottlenecks. To get a first impression of the tool take a look at the demo.

+
+
+
JEE/WTP
+
+

In case you are forced to use an JEE application server and want to do a temporary analysis you can double click your server instance from the servers view in Eclipse and click on the link Open launch configuration in order to add the -javaagent JVM option.

+
+
+
+
+

1.72. Regular Analysis

+
+

In case you want to manage application performance regularly we recommend to use JavaMelody that can be integrated into your application. More information on javamelody is available on the JavaMelody Wiki

+
+
+
+

1.73. Alternatives

+
+ +
+
+ +
+
+

1.74. Security

+
+

Security is todays most important cross-cutting concern of an application and an enterprise IT-landscape. We seriously care about security and give you detailed guides to prevent pitfalls, vulnerabilities, and other disasters. While many mistakes can be avoided by following our guidelines you still have to consider security and think about it in your design and implementation. The security guide will not only automatically prevent you from any harm, but will provide you hints and best practices already used in different software products.

+
+
+

An important aspect of security is proper authentication and authorization as described in access-control. In the following we discuss about potential vulnerabilities and protection to prevent them.

+
+
+
+

1.75. Vulnerabilities and Protection

+
+

Independent from classical authentication and authorization mechanisms there are many common pitfalls that can lead to vulnerabilities and security issues in your application such as XSS, CSRF, SQL-injection, log-forging, etc. A good source of information about this is the OWASP. +We address these common threats individually in security sections of our technological guides as a concrete solution to prevent an attack typically depends on the according technology. The following table illustrates common threats and contains links to the solutions and protection-mechanisms provided by the devonfw:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 7. Security threats and protection-mechanisms
ThreatProtectionLink to details

A1 Injection

validate input, escape output, use proper frameworks

SQL Injection

A2 Broken Authentication

encrypt all channels, use a central identity management with strong password-policy

Authentication

A3 Sensitive Data Exposure

Use secured exception facade, design your data model accordingly

REST exception handling

A4 XML External Entities

Prefer JSON over XML, ensure FSP when parsing (external) XML

XML guide

A5 Broken Access Control

Ensure proper authorization for all use-cases, use @DenyAll as default to enforce

Access-control guide especially method authorization

A6 Security Misconfiguration

Use devon4j application template and guides to avoid

tutorial-newapp and sensitive configuration

A7 Cross-Site Scripting

prevent injection (see A1) for HTML, JavaScript and CSS and understand same-origin-policy

client-layer

A8 Insecure Deserialization

Use simple and established serialization formats such as JSON, prevent generic deserialization (for polymorphic types)

JSON guide especially inheritence, XML guide

A9 Using Components with Known Vulnerabilities

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

CVE newsletter and dependency check

A10 Insufficient_Logging & Monitoring

Ensure to log all security related events (login, logout, errors), establish effective monitoring

Logging guide and monitoring guide

Insecure Direct Object References

Using direct object references (IDs) only with appropriate authorization

logic-layer

Cross-Site Request Forgery (CSRF)

secure mutable service operations with an explicit CSRF security token sent in HTTP header and verified on the server

CSRF guide

Log-Forging

Escape newlines in log messages

logging security

Unvalidated Redirects and Forwards

Avoid using redirects and forwards, in case you need them do a security audit on the solution.

devonfw proposes to use rich-clients (SPA/RIA). We only use redirects for login in a safe way.

+
+
+

1.76. Advanced Security

+
+

While OWASP Top 10 covers the basic aspects of application security, there are advanced standards such as AVS. +In devonfw we address this in the +Application Security Quick Solution Guide.

+
+
+
+

1.77. Tools

+
+
Dependency Check
+
+

To address the thread Using Components with Known Vulnerabilities we recomment to use OWASP dependency check that ships with a maven plugin and can analyze your dependencies for known CVEs. +In order to run this check, you can simply call this command on any maven project:

+
+
+
+
mvn org.owasp:dependency-check-maven:6.1.5:aggregate
+
+
+
+ + + + + +
+ + +The version is just for completeness. You should check yourself for using a recent version of the plugin. +
+
+
+

If you build an devon4j spring application from our app-template you can activate the dependency check even easier with the security profile:

+
+
+
+
mvn clean install -P security
+
+
+
+

This does not run by default as it causes some overhead for the build performance. However, consider to build this in your CI at least nightly. +After the dependency check is performed, you will find the results in target/dependency-check-report.html of each module. The report will also be generated when the site is build (mvn site) even without the profile.

+
+
+
+
Penetration Testing
+
+

For penetration testing (testing for vulnerabilities) of your web application, we recommend the following tools:

+
+
+ +
+
+ +
+
+
+

1.78. CORS support

+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

For more information about CORS see here. Information about the CORS headers can be found here.

+
+
+
+

1.79. Configuring CORS support

+
+

To enable CORS support for your application, see the advanced guides. For Spring applications see here. For Quarkus follow the official Quarkus guide.

+
+
+
+

1.80. Configuration with service mesh

+
+

If you are using a service mesh, you can also define your CORS policy directly there. Here is an example from Istio.

+
+
+ +
+
+

1.81. Java Development Kit

+
+

The Java Development Kit is an implementation of the Java platform. It provides the Java Virtual Machine (JVM) and the Java Runtime Environment (JRE).

+
+
+
+

1.82. Editions

+
+

The JDK exists in different editions:

+
+
+ +
+
+

As Java is evolving and also complex maintaining a JVM requires a lot of energy. +Therefore many alternative JDK editions are unable to cope with this and support latest Java versions and according compatibility. +Unfortunately OpenJDK only maintains a specific version of Java for a relative short period of time before moving to the next major version. +In the end, this technically means that OpenJDK is continuous beta and can not be used in production for reasonable software projects. +As OracleJDK changed its licensing model and can not be used for commercial usage even during development, things can get tricky. +You may want to use OpenJDK for development and OracleJDK only in production. +However, e.g. OpenJDK 11 never released a version that is stable enough for reasonable development (e.g. javadoc tool is broken and fixes are not available of OpenJDK 11 - fixed in 11.0.3 what is only available as OracleJDK 11 or you need to go to OpenJDK 12+, what has other bugs) so in the end there is no working release of OpenJDK 11. +This more or less forces you to use OracleJDK what requires you to buy a subscription so you can use it for commercial development. +However, there is AdoptOpenJDK that provides forked releases of OpenJDK with bug-fixes what might be an option. +Anyhow, as you want to have your development environment close to production, the productively used JDK (most likely OracleJDK) should be preferred also for development.

+
+
+
+

1.83. Upgrading

+
+

Until Java 8 compatibility was one of the key aspects for Java version updates (after the mess on the Swing updates with Java2 many years ago). +However, Java 9 introduced a lot of breaking changes. +This documentation wants to share the experience we collected in devonfw when upgrading from Java 8 to newer versions. +First of all we separate runtime changes that you need if you want to build your software with JDK 8 but such that it can also run on newer versions (e.g. JRE 11) +from changes required to also build your software with more recent JDKs (e.g. JDK 11 or 12).

+
+
+
Runtime Changes
+
+

This section describes required changes to your software in order to make it run also with versions newer than Java 8.

+
+
+
Classes removed from JDK
+
+

The first thing that most users hit when running their software with newer Java versions is a ClassNotFoundException like this:

+
+
+
+
Caused by: java.lang.ClassNotFoundException: javax.xml.bind.JAXBException
+
+
+
+

As Java 9 introduced a module system with Jigsaw, the JDK that has been a monolithic mess is now a well-defined set of structured modules. +Some of the classes that used to come with the JDK moved to modules that where not available by default in Java 9 and have even been removed entirely in later versions of Java. +Therefore you should simply treat such code just like any other 3rd party component that you can add as a (maven) dependency. +The following table gives you the required hints to make your software work even with such classes / modules removed from the JDK (please note that the specified version is just a suggestion that worked, feel free to pick a more recent or more appropriate version):

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 8. Dependencies for classes removed from Java 8 since 9+
ClassGroupIdArtifactIdVersion

javax.xml.bind.*

javax.xml.bind

jaxb-api

2.3.1

com.sun.xml.bind.*

org.glassfish.jaxb

jaxb-runtime

2.3.1

java.activation.*

javax.activation

javax.activation-api

1.2.0

java.transaction.*

javax.transaction

javax.transaction-api

1.2

java.xml.ws.*

javax.xml.ws

jaxws-api

2.3.1

javax.jws.*

javax.jws

javax.jws-api

1.1

javax.annotation.*

javax.annotation

javax.annotation-api

1.3.2

+
+
+
3rd Party Updates
+
+

Further, internal and inofficial APIs (e.g. sun.misc.Unsafe) have been removed. +These are typically not used by your software directly but by low-level 3rd party libraries like asm that need to be updated. +Also simple things like the Java version have changed (from 1.8.x to 9.x, 10.x, 11.x, 12.x, etc.). +Some 3rd party libraries were parsing the Java version in a very naive way making them unable to be used with Java 9+:

+
+
+
+
Caused by: java.lang.NullPointerException
+   at org.apache.maven.surefire.shade.org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast (SystemUtils.java:1626)
+
+
+
+

Therefore the following table gives an overview of common 3rd party libraries that have been affected by such breaking changes and need to be updated to at least the specified version:

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 9. Minimum recommended versions of common 3rd party for Java 9+
GroupIdArtifactIdVersionIssue

org.apache.commons

commons-lang3

3.7

LANG-1365

cglib

cglib

3.2.9

102, 93, 133

org.ow2.asm

asm

7.1

2941

org.javassist

javassist

3.25.0-GA

194, 228, 246, 171

+
+
+
ResourceBundles
+
+

For internationalization (i18n) and localization (l10n) ResourceBundle is used for language and country specific texts and configurations as properties (e.g. MyResourceBundle_de.properties). With Java modules there are changes and impacts you need to know to get things working. The most important change is documented in the JavaDoc of ResourceBundle. However, instead of using ResourceBundleProvider and refactoring your entire code causing incompatibilities, you can simply put the resource bundles in a regular JAR on the classpath rather than a named module (or into the lauching app). +If you want to implement (new) Java modules with i18n support, you can have a look at mmm-nls.

+
+
+
+
+
Buildtime Changes
+
+

If you also want to change your build to work with a recent JDK you also need to ensure that test frameworks and maven plugins properly support this.

+
+
+
Findbugs
+
+

Findbugs does not work with Java 9+ and is actually a dead project. +The new findbugs is SpotBugs. +For maven the new solution is spotbugs-maven-plugin:

+
+
+
+
<plugin>
+  <groupId>com.github.spotbugs</groupId>
+  <artifactId>spotbugs-maven-plugin</artifactId>
+  <version>3.1.11</version>
+</plugin>
+
+
+
+
+
Test Frameworks
+ + ++++++ + + + + + + + + + + + + + + + + +
Table 10. Minimum recommended versions of common 3rd party test frameworks for Java 9+
GroupIdArtifactIdVersionIssue

org.mockito

mockito-core

2.23.4

1419, 1696, 1607, 1594, 1577, 1482

+
+
+
Maven Plugins
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 11. Minimum recommended versions of common maven plugins for Java 9+
GroupIdArtifactId(min.) VersionIssue

org.apache.maven.plugins

maven-compiler-plugin

3.8.1

x

org.apache.maven.plugins

maven-surefire-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-surefire-report-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-archetype-plugin

3.1.0

x

org.apache.maven.plugins

maven-javadoc-plugin

3.1.0

x

org.jacoco

jacoco-maven-plugin

0.8.3

663

+
+
+
Maven Usage
+
+

With Java modules you can not run Javadoc standalone anymore or you will get this error when running mvn javadoc:javadoc:

+
+
+
+
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-javadoc-plugin:3.1.1:javadoc (default-cli) on project mmm-base: An error has occurred in Javadoc report generation:
+[ERROR] Exit code: 1 - error: module not found: io.github.mmm.base
+[ERROR]
+[ERROR] Command line was: /projects/mmm/software/java/bin/javadoc @options @packages @argfile
+
+
+
+

As a solution or workaround you need to include the compile goal into your build lifecycle so the module-path is properly configured:

+
+
+
+
mvn compile javadoc:javadoc
+
+
+
+
+
+
+ +
+

We want to give credits and say thanks to the following articles that have been there before and helped us on our way:

+
+ +
+ +
+
+

1.85. JEE

+
+

This section is about Java Enterprise Edition (JEE). +Regarding to our key principles we focus on open standards. +For Java this means that we consider official standards from Java Standard and Enterprise Edition as first choice for considerations. +Therefore we also decided to recommend JAX-RS over SpringMVC as the latter is proprietary. +Only if an existing Java standard is not suitable for current demands such as Java Server Faces (JSF), we do not officially recommend it (while you are still free to use it if you have good reasons to do so). +In all other cases we officially suggest the according standard and use it in our guides, code-samples, sample application, modules, templates, etc. +Examples for such standards are JPA, JAX-RS, JAX-WS, JSR330, JSR250, JAX-B, etc.

+
+
+
+

1.86. Application-Server

+
+

We designed everything based on standards to work with different technology stacks and servlet containers. +However, we strongly encourage to use modern and leightweight frameworks such as spring or quarkus. +You are free to decide for a JEE application server but here is a list of good reasons for our decision:

+
+
+
    +
  • +

    Up-to-date

    +
    +

    With spring or quarkus you easily keep up to date with evolving technologies (microservices, reactive, NoSQL, etc.). +Most application servers put you in a jail with old legacy technology. +In many cases you are even forced to use a totally outdated version of java (JVM/JDK). +This may even cause severe IT-Security vulnerabilities but with expensive support you might get updates. +Also with leightweight open-source frameworks you need to be aware that for IT-security you need to update recently what can cost quite a lot of additional maintenance effort.

    +
    +
  • +
  • +

    Development speed

    +
    +

    With spring-boot you can implement and especially test your individual logic very fast. Starting the app in your IDE is very easy, fast, and realistic (close to production). You can easily write JUnit tests that startup your server application to e.g. test calls to your remote services via HTTP fast and easy. For application servers you need to bundle and deploy your app what takes more time and limits you in various ways. We are aware that this has improved in the past but also spring continuously improves and is always way ahead in this area. Further, with spring you have your configurations bundled together with the code in version control (still with ability to handle different environments) while with application servers these are configured externally and can not be easily tested during development.

    +
    +
  • +
  • +

    Documentation

    +
    +

    Spring and also quarkus have an extremely open and active community. +There is documentation for everything available for free on the web. +You will find solutions to almost any problem on platforms like stackoverflow. +If you have a problem you are only a google search away from your solution. +This is very much different for proprietary application server products.

    +
    +
  • +
  • +

    Helpful Exception Messages

    +
    +

    Especially spring is really great for developers on exception messages. +If you do something wrong you get detailed and helpful messages that guide you to the problem or even the solution. +This is not as great in application servers.

    +
    +
  • +
  • +

    Future-proof

    +
    +

    Spring has evolved really awesome over time. +Since its 1.0 release in 2004 spring has continuously been improved and always caught up with important trends and innovations. +Even in critical situations, when the company behind it (interface21) was sold, spring went on perfectly. +Quarkus on the other hand is relatively new. +It does not have to carry a large legacy history and is therefore most state-of-the-art for modern projects esp. in cloud environments. +JEE went through a lot of trouble and crisis. +Just look at the EJB pain stories. +This happened often in the past and also recent. +See JEE 8 in crisis.

    +
    +
  • +
  • +

    Free

    +
    +

    Spring and quarkus including their ecosystems are free and open-source. +It still perfectly integrates with commercial solutions for specific needs. +Most application servers are commercial and cost a lot of money. +As of today the ROI for this is of question.

    +
    +
  • +
  • +

    Cloud-native

    +
    +

    Quarkus is designed for cloud-native projects from the start. +With spring this is also available via spring-native. +Using an application server will effectively prevent you from going to the cloud smoothly.

    +
    +
  • +
  • +

    Fun

    +
    +

    If you go to conferences or ask developers you will see that spring or quarkus is popular and fun. +If new developers are forced to use an old application server product they will be less motivated or even get frustrated. +Especially in today’s agile projects this is a very important aspect. +In the end you will get into trouble with maintenance on the long run if you rely on a proprietary application server.

    +
    +
  • +
+
+
+

Of course the vendors of application servers will tell you a different story. +This is simply because they still make a lot of money from their products. +We do not get paid from application servers nor from spring, quarkus or any other IT product company. +We are just developers who love to build great systems. +A good reason for application servers is that they combine a set of solutions to particular aspects to one product that helps to standardize your IT. +However, devonfw fills exactly this gap for the spring and quarkus ecosystems in a very open and flexible way. +However, there is one important aspect that you need to understand and be aware of:

+
+
+

Some big companies decided for a specific application server as their IT strategy. +They may have hundreds of apps running with this application server. +All their operators and developers have learned a lot of specific skills for this product and are familiar with it. +If you are implementing yet another (small) app in this context it could make sense to stick with this application server. +However, also they have to be aware that with every additional app they increase their technical debt. +So actively help your customer and consult him to make the right choices for the future.

+
+
+ +
+
+

1.87. Validation

+
+

Validation is about checking syntax and semantics of input data. Invalid data is rejected by the application. +Therefore validation is required in multiple places of an application. E.g. the GUI will do validation for usability reasons to assist the user, early feedback and to prevent unnecessary server requests. +On the server-side validation has to be done for consistency and security.

+
+
+

In general we distinguish these forms of validation:

+
+
+
    +
  • +

    stateless validation will produce the same result for given input at any time (for the same code/release).

    +
  • +
  • +

    stateful validation is dependent on other states and can consider the same input data as valid in once case and as invalid in another.

    +
  • +
+
+
+
+

1.88. Stateless Validation

+
+

For regular, stateless validation we use the JSR303 standard that is also called bean validation (BV). +Details can be found in the specification. +As implementation we recommend hibernate-validator.

+
+
+
Example
+
+

A description of how to enable BV for spring applications can be found in the relevant Spring documentation. A guide you can use to integrate validation in Quarkus applications can be found here. For a quick summary follow these steps:

+
+
+
    +
  • +

    Make sure that hibernate-validator is located in the classpath by adding a dependency to the pom.xml.

    +
  • +
+
+
+
Listing 9. spring
+
+
    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
Listing 10. quarkus
+
+
    <dependency>
+      <groupId>io.quarkus</groupId>
+      <artifactId>quarkus-hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
    +
  • +

    For methods to validate go to their declaration and add constraint annotations to the method parameters.

    +
    +

    In spring applications you can add the @Validated annotation to the implementation (spring bean) to be validated (this is an annotation of the spring framework, so it`s not available in the Quarkus context). The standard use case is to annotate the logic layer implementation, i.e. the use case implementation or component facade in case of simple logic layer pattern. Thus, the validation will be executed for service requests as well as batch processing.

    +
    +
    +
      +
    • +

      @Valid annotation to the arguments to validate (if that class itself is annotated with constraints to check).

      +
    • +
    • +

      @NotNull for required arguments.

      +
    • +
    • +

      Other constraints (e.g. @Size) for generic arguments (e.g. of type String or Integer). However, consider to create custom datatypes and avoid adding too much validation logic (especially redundant in multiple places).

      +
    • +
    +
    +
  • +
+
+
+
Listing 11. BookingmanagementRestServiceImpl.java
+
+
@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+  ...
+  public BookingEto saveBooking(@Valid BookingCto booking) {
+  ...
+
+
+
+
    +
  • +

    Finally add appropriate validation constraint annotations to the fields of the ETO class.

    +
  • +
+
+
+
Listing 12. BookingCto.java
+
+
  @Valid
+  private BookingEto booking;
+
+
+
+
Listing 13. BookingEto.java
+
+
  @NotNull
+  @Future
+  private Timestamp bookingDate;
+
+
+
+

A list with all bean validation constraint annotations available for hibernate-validator can be found here. In addition it is possible to configure custom constraints. Therefore it is necessary to implement a annotation and a corresponding validator. A description can also be found in the Spring documentation or with more details in the hibernate documentation.

+
+
+ + + + + +
+ + +Bean Validation in Wildfly >v8: Wildfly v8 is the first version of Wildfly implementing the JEE7 specification. It comes with bean validation based on hibernate-validator out of the box. In case someone is running Spring in Wildfly for whatever reasons, the spring based annotation @Validated would duplicate bean validation at runtime and thus should be omitted. +
+
+
+
+
GUI-Integration
+
+

TODO

+
+
+
+
Cross-Field Validation
+
+

BV has poor support for this. Best practice is to create and use beans for ranges, etc. that solve this. A bean for a range could look like so:

+
+
+
+
public class Range<V extends Comparable<V>> {
+
+  private V min;
+  private V max;
+
+  public Range(V min, V max) {
+
+    super();
+    if ((min != null) && (max != null)) {
+      int delta = min.compareTo(max);
+      if (delta > 0) {
+        throw new ValueOutOfRangeException(null, min, min, max);
+      }
+    }
+    this.min = min;
+    this.max = max;
+  }
+
+  public V getMin() ...
+  public V getMax() ...
+
+
+
+
+
+

1.89. Stateful Validation

+
+

For complex and stateful business validations we do not use BV (possible with groups and context, etc.) but follow KISS and just implement this on the server in a straight forward manner. +An example is the deletion of a table in the example application. Here the state of the table must be checked first:

+
+
+

BookingmanagementImpl.java

+
+
+
+
  private void sendConfirmationEmails(BookingEntity booking) {
+
+    if (!booking.getInvitedGuests().isEmpty()) {
+      for (InvitedGuestEntity guest : booking.getInvitedGuests()) {
+        sendInviteEmailToGuest(guest, booking);
+      }
+    }
+
+    sendConfirmationEmailToHost(booking);
+  }
+
+
+
+

Implementing this small check with BV would be a lot more effort.

+
+
+ +
+
+

1.90. Bean-Mapping

+
+

For decoupling, you sometimes need to create separate objects (beans) for a different view. E.g. for an external service, you will use a transfer-object instead of the persistence entity so internal changes to the entity do not implicitly change or break the service.

+
+
+

Therefore you have the need to map similar objects what creates a copy. This also has the benefit that modifications to the copy have no side-effect on the original source object. However, to implement such mapping code by hand is very tedious and error-prone (if new properties are added to beans but not to mapping code):

+
+
+
+
public UserEto mapUser(UserEntity source) {
+  UserEto target = new UserEto();
+  target.setUsername(source.getUsername());
+  target.setEmail(source.getEmail());
+  ...
+  return target;
+}
+
+
+
+

Therefore we are using a BeanMapper for this purpose that makes our lives a lot easier. +There are several bean mapping frameworks with different approaches.

+
+
+

For a devon4j-spring application we recommend Orika, follow Spring Bean-Mapping for an introduction to Orika and Dozer in a devon4j-spring context application.

+
+
+ + + + + +
+ + +devon4j started with Dozer as framework for Spring applications and still supports it. However, we now recommend Orika (for new projects) as it is much faster (see Performance of Java Mapping Frameworks). +
+
+
+

For a Quarkus application we recommend Mapstruct, follow Quarkus Bean-Mapping for an introduction to Mapstruct in a quarkus context application.

+
+ +
+
+

1.91. Lombok

+
+

Lombok is a library that works with an annotation processor and will generate code for you to save you some time and reduce the amount of boilerplate code in your project. Lombok can generate getter and setter, equals methods, automate your logging variables for your classes, and more. Follow the list of all the features provided by Lombok to get an overview.

+
+
+
+

1.92. Lombok Dependency

+
+

To get access to the Lombok library just add the following dependency to the POM.xml.

+
+
+

The Lombok dependency:

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok</artifactId>
+	<version>1.18.20</version>
+</dependency>
+
+
+
+

To get Lombok working with your current IDE you should also install the Lombok addon. Follow the Eclipse installation guide, there are also guides for other supported IDEs.

+
+
+
+

1.93. Lombok with Mapstruct

+
+

MapStruct takes advantage of generated getters, setters, and constructors from Lombok and uses them to +generate the mapper implementations. Lombok is also an annotation processor and since version 1.18.14 both frameworks are working together. Just add the lombok-mapstruct-binding to your POM.xml.

+
+
+

The Lombok annotation processor and the lombok-mapstruct-binding

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok-mapstruct-binding</artifactId>
+	<version>0.2.0</version>
+</dependency>
+
+<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok</artifactId>
+				<version>1.18.4</version>
+			</path>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok-mapstruct-binding</artifactId>
+				<version>0.2.0</version>
+			</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

In our quarkus reference project you can get a look into the usage of both frameworks.

+
+
+
+

1.94. Lombok Usage

+
+

Lombok can be used like any other annotation processor and will be shown in the simple example below to generate getter and setter for a Product Entity.

+
+
+
+
@Getter
+@Setter
+public class Product{
+
+    private String title;
+    private String description;
+    private BigDecimal price;
+}
+
+
+
+

For advanced Lombok usage follow the Baeldung Lombok guide or just read the Lombok javadoc

+
+
+ +
+
+

1.95. OpenAPI

+
+

The OpenAPI Specification (OAS) defines a standard for describing RESTful web services in a machine- and human-readable format. OpenAPI allows REST APIs to be defined in a uniform manner. +Technically, an OpenAPI document is written in YAML or JSON format. The specification defines the structure of a REST API by describing attributes such as path information, response codes, and return types. Some examples can be found here.

+
+
+

OpenAPI is often used in combination with Swagger. Swagger is a set of tools build around OpenAPI, that help developers to design and document their REST APIs. +The most common tool is the Swagger UI, which uses the OpenAPI specification to create a graphical interface of the REST API that you can also interact with. Check out the Swagger online editor to get a feeling for it.

+
+
+
+

1.96. OpenAPI generation

+
+

There are several extensions you can use in your project to automatically generate the OpenAPI specifications and Swagger UI from your REST API (code-first approach). devon4j recommends the following two extensions/plugins to use:

+
+
+
    +
  • +

    Smallrye OpenAPI extension

    +
  • +
  • +

    ServicedocGen maven plugin

    +
  • +
+
+
+
Smallrye OpenAPI
+
+

Quarkus provides OpenAPI support through Smallrye OpenAPI extension:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-smallrye-openapi</artifactId>
+</dependency>
+
+
+
+

After adding the extension to your project, you can access the Swagger UI by navigating to /q/swagger-ui.

+
+
+

The OpenAPI specification can be accessed by requesting /q/openapi.

+
+
+

Smallrye OpenAPI is compliant with MicroProfile OpenAPI. You can add MicroProfile annotations to further describe your REST endpoints and extend the OpenAPI documentation. +More information for this can be found here or here.

+
+
+ + + + + +
+ + +
+

Quarkus recommends using this extension and you can document your APIs in great detail by using the MicroProfile annotations. The downside to this is that using these annotations will blow up your code and you will have some duplicate information in it. +If you don’t want to specify the REST API again with all this annotation based information, we also recommend taking a look at the ServicedocGen Maven plugin for your Quarkus applications when implementing JAX-RS APIs.

+
+
+
+
+
+
ServicedocGen Maven Plugin
+
+

The ServicedocGen maven plugin can be used within both Spring and Quarkus applications. +It works a bit different then the Smallrye extensions mentioned above. The plugin analysis the REST API and it’s JavaDoc and then generate the OpenAPI specification and the Swagger UI as static files. So no Swagger or MicroProfile annotations have to be added.

+
+
+

The plugin can be configured in the pom.xml file of your application as follows:

+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>org.codehaus.mojo</groupId>
+      <artifactId>servicedocgen-maven-plugin</artifactId>
+      <version>1.0.0</version>
+      <executions>
+        <execution>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <descriptor>
+          <info>
+            <title>...</title>
+            <description>...</description>
+          </info>
+          <host>...</host>
+          <port>...</port>
+          <basePath>...</basePath>
+          <schemes>
+            <scheme>...</scheme>
+          </schemes>
+        </descriptor>
+      </configuration>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

In the configuration section you have to define additional information to generate the OpenAPI specification correctly. An example can be found in our Quarkus reference application. +When building the application, an OpenApi.yaml and a SwaggerUI.html file are created in the /target/site folder. To make the Swagger UI available in the browser, the file must be served by some servlet.

+
+
+ +
+
+
+

1.97. Spring

+
+

Spring is the most famous and established Java framework. +It is fully supported by devonfw as an option and alternative to quarkus.

+
+
+
+

1.98. Guide to the Reader

+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If you are not yet familiar with Spring, you may be interested in pros and cons of Spring. Also take a look at the official Spring website.

    +
  • +
  • +

    If you already have experience developing with Spring but are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring and coding conventions. Follow the referenced links to go deeper into a topic.

    +
  • +
  • +

    If you have already developed with devon4j and Spring and need more information on a specific topic, check out the devon4j guides for Spring. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Spring and Quarkus are documented there.

    +
  • +
  • +

    If you want to get started or create your first Spring application using devon4j, check out the guide about creating a new application or the Jump the Queue and My Thai Star reference applications.

    +
  • +
+
+
+
+

1.99. Pros

+
+

Spring offers the following benefits:

+
+
+
    +
  • +

    highly flexible
    +Spring is famous for its great flexibility. You can customize and integrate nearly everything.

    +
  • +
  • +

    well established
    +While JEE application servers including very expensive commercial products turned out to be a dead-end, spring has guided projects through the changing trends of IT throughout decades. It may be the framework with the longest history track and popularity. As a result you can easily find developers, experts, books, articles, etc. about spring.

    +
  • +
  • +

    non-invasive and not biased
    +Spring became famous for its non-invasive coding based on patterns instead of hard dependencies. It gives you a lot of freedom and avoids tight coupling of your (business) code.

    +
  • +
+
+
+

See Why Spring? for details.

+
+
+
+

1.100. Cons

+
+

Spring has the following drawbacks:

+
+
+
    +
  • +

    history and legacy
    +Due to the pro of its long established history, spring also carries a lot of legacy. As a result there are many ways to do the same thing while some options may be discouraged. Developers needs some guidance (e.g. via devon4j) as they may enter pitfalls and dead-ends when choosing the first solution they found on google or stackoverflow.

    +
  • +
  • +

    lost lead in cloud-native
    +While for the last decades spring was leading innovation in Java app development, it seems that with the latest trends and shift such as cloud-native, they have been overtaken by frameworks like quarkus. However, spring is trying to catch up with spring-native.

    +
  • +
+
+
+
+

1.101. Spring-Boot

+
+

Spring-boot is a project and initiaitve within the spring-ecosystem that brought a lot of innovation and simplification into app development on top of spring. +As of today we typically use the terms spring and spring-boot rather synonymously as we always use spring together with spring-boot.

+
+
+
+

1.102. Spring-Native

+
+

Spring-native adds cloud-native support to the spring ecosystem and allows to build a spring app as cloud-native image via GraalVM. +This feature is currently beta. +You may also consider quarkus if you are interested in building cloud-native images.

+
+ +
+

1.102.1. Components

+
+

Following separation-of-concerns we divide an application into components using our package-conventions and project structure. +As described by the architecture each component is divided into layers as described in the project structure. +Please note that a component will only have the required layers. +So a component may have any number from one to all layers.

+
+
+
+

1.102.2. General Component

+
+

Cross-cutting aspects belong to the implicit component general. It contains technical configurations and very general code that is not business specific. Such code shall not have any dependencies to other components and therefore business related code.

+
+
+
+

1.102.3. Business Component

+
+

The business-architecture defines the business components with their allowed dependencies. A small application (microservice) may just have one component and no dependencies making it simple while the same architecture can scale up to large and complex applications (from bigger microservice up to modulith). +Tailoring an business domain into applications and applications into components is a tricky task that needs the skills of an experienced architect. +Also, the tailoring should follow the business and not split by technical reasons or only by size. +Size is only an indicator but not a driver of tailoring. +Whatever hypes like microservices are telling you, never get misled in this regard: +If your system grows and reaches MAX+1 lines of code, it is not the right motivation to split it into two microservices of ~MAX/2 lines of code - such approaches will waste huge amounts of money and lead to chaos.

+
+
+
+

1.102.4. App Component

+
+

Only in case you need cross-cutting code that aggregates another component you may introduce the component app. +It is allowed to depend on all other components but no other component may depend on it. +With the modularity and flexibility of spring you typically do not need this. +However, when you need to have a class that registers all services or component-facades using direct code dependencies, you can introduce this component.

+
+
+
+

1.102.5. Component Example

+
+

The following class diagram illustrates an example of the business component Staffmanagement:

+
+
+
+logic layer component pattern +
+
+
+

In this scheme, you can see the structure and flow from the service-layer (REST service call) via the logic-layer to the dataaccess-layer (and back).

+
+ +
+
+

1.102.6. Classic project structure

+
+

In this section we describe the classic project structure as initially proposed for Java in devonfw. +It is still valid and fully supported. +However, if you want to start a new project, please consider using the modern structure.

+
+
+
+

1.102.7. Modules

+
+

The structure of a devon4j application is divided into the following modules:

+
+
+
    +
  • +

    api: module containing the API of your application. The API contains the required artifacts to interact with your application via remote services. This can be REST service interfaces, transfer-objects with their interfaces and datatypes but also OpenAPI or gRPC contracts.

    +
  • +
  • +

    core: maven module containing the core of the application with service implementation, as well as entire logic layer and dataaccess layer.

    +
  • +
  • +

    batch: optional module for batch layer

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) typically as a bootified WAR file.

    +
  • +
+
+
+
+

1.102.8. Deployment

+
+
+
+

Make jar not war

+
+
+
+— Josh Long +
+
+
+

First of all it is important to understand that the above defined modules aim to make api, core, and batch reusable artifacts, that can be used as a regular maven dependency. +On the other hand to build and deploy your application you want a final artifact that is containing all required 3rd party libraries. +This artifact is not reusable as a maven dependency. +That is exactly the purpose of the server module to build and package this final deployment artifact. +By default we first build a regular WAR file with maven in your server/target directory (*-server-«version».war) and in a second step create a bootified WAR out of this (*-server-bootified.war). +The bootified WAR file can then be started standalone (java -jar «filename».war). +However, it is also possible to deploy the same WAR file to a servlet container like tomcat or jetty. +As application servers and externally provided servlet containers are not recommendet anymore for various reasons (see JEE), you may also want to create a bootified JAR file instead. +All you need to do in that case is to change the packaging in your server/pom.xml from war to jar.

+
+
+
+

1.102.9. Package Structure

+
+

The package structure of your code inside src/main/java (and src/test/java) of your modules is described in our coding conventions in the sections packages. A full mapping of the architecture and the different code elements to the packaging is described in the following section.

+
+
+
+

1.102.10. Layers

+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +The following table describes our classic approach for packaging and layering:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 12. Traditional generic devon4j layers
Layer«layer»

service

service

logic

logic

data-access

dataaccess

batch (optional)

batch

client (optional)

client

common

common

+
+
+

1.102.11. Architecture Mapping

+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.common
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.datatype
+|  |  |  |  └──.«Datatype» (api)
+|  |  |  └──.«BusinessObject» (api)
+|  |  └──.impl[.«detail»]
+|  |     ├──.«Aspect»ConfigProperties (core)
+|  |     ├──.«Datatype»JsonSerializer (core)
+|  |     └──.«Datatype»JsonDeserializer (core)
+|  ├──.dataaccess
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.repo
+|  |  |  |  └──.«BusinessObject»Repository (core)
+|  |  |  ├──.dao (core) [alternative to repo]
+|  |  |  |  └──.«BusinessObject»Dao (core) [alternative to Repository]
+|  |  |  └──.«BusinessObject»Entity (core)
+|  |  └──.impl[.«detail»]
+|  |     ├──.dao (core) [alternative to repo]
+|  |     |  └──.«BusinessObject»DaoImpl (core) [alternative to Repository]
+|  |     └──.«Datatype»AttributeConverter (core)
+|  ├──.logic
+|  |  ├──.api
+|  |  |  ├──.[«detail».]to
+|  |  |  |   ├──.«MyCustom»«To (api)
+|  |  |  |   ├──.«DataStructure»Embeddable (api)
+|  |  |  |   ├──.«BusinessObject»Eto (api)
+|  |  |  |   └──.«BusinessObject»«Subset»Cto (api)
+|  |  |  ├──.[«detail».]usecase
+|  |  |  |   ├──.UcFind«BusinessObject» (core)
+|  |  |  |   ├──.UcManage«BusinessObject» (core)
+|  |  |  |   └──.Uc«Operation»«BusinessObject» (core)
+|  |  |  └──.«Component» (core)
+|  |  ├──.base
+|  |  |  └──.[«detail».]usecase
+|  |  |     └──.Abstract«BusinessObject»Uc (core)
+|  |  └──.impl
+|  |     ├──.[«detail».]usecase
+|  |     |   ├──.UcFind«BusinessObject»Impl (core)
+|  |     |   ├──.UcManage«BusinessObject»Impl (core)
+|  |     |   └──.Uc«Operation»«BusinessObject»Impl (core)
+|  |     └──.«Component»Impl (core)
+|  └──.service
+|     ├──.api[.«detail»]
+|     |  ├──.rest
+|     |  |  └──.«Component»RestService (api)
+|     |  └──.ws
+|     |     └──.«Component»WebService (api)
+|     └──.impl[.«detail»]
+|        ├──.jms
+|        |  └──.«BusinessObject»JmsListener (core)
+|        ├──.rest
+|        |  └──.«Component»RestServiceImpl (core)
+|        └──.ws
+|           └──.«Component»WebServiceImpl (core)
+├──.general
+│  ├──.common
+│  |  ├──.api
+|  |  |  ├──.to
+|  |  |  |  ├──.AbstractSearchCriteriaTo (api)
+|  |  |  └──.ApplicationEntity
+│  |  ├──.base
+|  |  |  └──.AbstractBeanMapperSupport (core)
+│  |  └──.impl
+│  |     ├──.config
+│  |     |  └──.ApplicationObjectMapperFactory (core)
+│  |     └──.security
+│  |        └──.ApplicationWebSecurityConfig (core)
+│  ├──.dataaccess
+│  |  └──.api
+|  |     └──.ApplicationPersistenceEntity (core)
+│  ├──.logic
+│  |  └──.base
+|  |     ├──.AbstractComponentFacade (core)
+|  |     ├──.AbstractLogic (core)
+|  |     └──.AbstractUc (core)
+|  └──.service
+|     └──...
+└──.SpringBootApp (core)
+
+
+
+
+
+
+

1.103. Layers

+ +
+
Client Layer
+
+

There are various technical approaches to building GUI clients. The devonfw proposes rich clients that connect to the server via data-oriented services (e.g. using REST with JSON). +In general, we have to distinguish among the following types of clients:

+
+
+
    +
  • +

    web clients

    +
  • +
  • +

    native desktop clients

    +
  • +
  • +

    (native) mobile clients

    +
  • +
+
+
+

Our main focus is on web-clients. In our sample application my-thai-star we offer a responsive web-client based on Angular following devon4ng that integrates seamlessly with the back ends of my-thai-star available for Java using devon4j as well as .NET/C# using devon4net. For building angular clients read the separate devon4ng guide.

+
+
+
+
JavaScript for Java Developers
+
+

In order to get started with client development as a Java developer we give you some hints to get started. Also if you are an experienced JavaScript developer and want to learn Java this can be helpful. First, you need to understand that the JavaScript ecosystem is as large as the Java ecosystem and developing a modern web client requires a lot of knowledge. The following table helps you as experienced developer to get an overview of the tools, configuration-files, and other related aspects from the new world to learn. Also it helps you to map concepts between the ecosystems. Please note that we list the tools recommended by devonfw here (and we know that there are alternatives not listed here such as gradle, grunt, bower, etc.).

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 13. Aspects in JavaScript and Java ecosystem
TopicAspectJavaScriptJava

Programming

Language

TypeScript (extends JavaScript)

Java

Runtime

VM

nodejs (or web-browser)

jvm

Build- & Dependency-Management

Tool

npm or yarn

maven

Config

package.json

pom.xml

Repository

npm repo

maven central (repo search)

Build cmd

ng build or npm run build (goals are not standardized in npm)

mvn install (see lifecycle)

Test cmd

ng test

mvn test

Testing

Test-Tool

jasmine

junit

Test-Runner

karma

junit / surefire

E2E Testing

Protractor

Selenium

Code Analysis

Code Coverage

ng test --no-watch --code-coverage

JaCoCo

Development

IDE

MS VS Code or IntelliJ

Eclipse or IntelliJ

Framework

Angular (etc.)

Spring or Quarkus

+
+ +
+
+
Service Layer
+
+

The service layer is responsible for exposing functionality made available by the logical layer to external consumers over a network via technical protocols.

+
+
+
+
Types of Services
+
+

Before you start creating your services you should consider some general design aspects:

+
+
+
    +
  • +

    Do you want to create a RPC service?

    +
  • +
  • +

    Or is your problem better addressed by messaging or eventing?

    +
  • +
  • +

    Who will consume your service?

    +
    +
      +
    • +

      Do you have one or multiple consumers?

      +
    • +
    • +

      Do web-browsers have to use your service?

      +
    • +
    • +

      Will apps from other vendors or parties have to consume your service that you can not influence if the service may have to change or be extended?

      +
    • +
    +
    +
  • +
+
+
+

For RPC a common choice is REST but there are also interesting alternatives like gRPC. We also have a guide for SOAP but this technology should rather be considered as legacy and is not recommended for new services.

+
+
+

When it comes to messaging in Java the typical answer will be JMS. However, a very promising alternative is Kafka.

+
+
+
+
Versioning
+
+

For RPC services consumed by other applications we use versioning to prevent incompatibilities between applications when deploying updates. This is done by the following conventions:

+
+
+
    +
  • +

    We define a version number and prefix it with v (e.g. v1).

    +
  • +
  • +

    If we support previous versions we use that version numbers as part of the Java package defining the service API (e.g. com.foo.application.component.service.api.v1)

    +
  • +
  • +

    We use the version number as part of the service name in the remote URL (e.g. https://application.foo.com/services/rest/component/v1/resource)

    +
  • +
  • +

    Whenever breaking changes are made to the API, create a separate version of the service and increment the version (e.g. v1v2) . The implementations of the different versions of the service contain compatibility code and delegate to the same unversioned use-case of the logic layer whenever possible.

    +
  • +
  • +

    For maintenance and simplicity, avoid keeping more than one previous version.

    +
  • +
+
+
+
+
Interoperability
+
+

For services that are consumed by clients with different technology, interoperability is required. This is addressed by selecting the right protocol, following protocol-specific best practices and following our considerations especially simplicity.

+
+
+
+
Service Considerations
+
+

The term service is quite generic and therefore easily misunderstood. It is a unit exposing coherent functionality via a well-defined interface over a network. For the design of a service, we consider the following aspects:

+
+
+
    +
  • +

    self-contained
    +The entire API of the service shall be self-contained and have no dependencies on other parts of the application (other services, implementations, etc.).

    +
  • +
  • +

    idempotence
    +E.g. creation of the same master-data entity has no effect (no error)

    +
  • +
  • +

    loosely coupled
    +Service consumers have minimum knowledge and dependencies on the service provider.

    +
  • +
  • +

    normalized
    +Complete, no redundancy, minimal

    +
  • +
  • +

    coarse-grained
    +Service provides rather large operations (save entire entity or set of entities rather than individual attributes)

    +
  • +
  • +

    atomic
    +Process individual entities (for processing large sets of data, use a batch instead of a service)

    +
  • +
  • +

    simplicity
    +Avoid polymorphism, RPC methods with unique name per signature and no overloading, avoid attachments (consider separate download service), etc.

    +
  • +
+
+
+
+
Security
+
+

Your services are the major entry point to your application. Hence, security considerations are important here.

+
+
+

See REST Security.

+
+ +
+
+
Service-Versioning
+
+

This guide describes the aspect and details about versioning of services

+
+
+
+
Motivation
+
+

Why versioning of services? First of all, you should only care about this topic if you really have to. Service versioning is complex and requires effort (time and budget). The best way to avoid this is to be smart in the first place when designing the service API. +Further, if you are creating services where the only consumer is e.g. the web-client that you deploy together with the consumed services then you can change your service without the overhead to create new service versions and keeping old service versions for compatibility.

+
+
+

However, if the following indicators are given you typically need to do service versioning:

+
+
+
    +
  • +

    Your service is part of a complex and distributed IT landscape

    +
  • +
  • +

    Your service requires incompatible changes

    +
  • +
  • +

    There are many consumers or there is at least one (relevant) consumer that can not be updated at the same time or is entirely out of control (unknown or totally different party/company)

    +
  • +
+
+
+

What are incompatible changes?

+
+
+
    +
  • +

    Almost any change when SOAP is used (as it changes the WSDL and breaks the contract). Therefore, we recommend to use REST instead. Then, only the following changes are critical.

    +
  • +
  • +

    A change where existing properties (attributes) have to change their name

    +
  • +
  • +

    A change where existing features (properties, operations, etc.) have to change their semantics (meaning)

    +
  • +
+
+
+

What changes do not cause incompatibilities?

+
+
+
    +
  • +

    Adding new service operations is entirely uncritical with REST.

    +
  • +
  • +

    Adding new properties is only a problem in the following cases:

    +
    +
      +
    • +

      Adding new mandatory properties to the input of a service is causing incompatibilities. This problem can be avoided by contract-design.

      +
    • +
    • +

      If a consumer is using a service to read data, modify it and then save it back via a service and a property is added to the data, then this property might be lost. This is not a problem with dynamic languages such as JavaScript/TypeScript but with strictly typed languages such as Java. In Java you will typically use structured typed transfer-objects (and not Map<String, Object>) so new properties that have been added but are not known to the consumer can not be mapped to the transfer-object and will be lost. When saving that transfer-object later the property will be gone. It might be impossible to determine the difference between a lost property and a property that was removed on purpose. This is a general problem that you need to be aware of and that you have to consider by your design in such situations.

      +
    • +
    +
    +
  • +
+
+
+

Even if you hit an indicator for incompatible changes you can still think about adding a new service operation instead of changing an existing one (and deprecating the old one). Be creative to simplify and avoid extra effort.

+
+
+
+
Procedure
+
+

The procedure when rolling out incompatible changes is illustrated by the following example:

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +---+----+
+        |
++-------+--------+
+|      Sv1       |
+|                |
+|      App3      |
++----------------+
+
+
+
+

So, here we see a simple example where App3 provides a Service S in Version v1 that is consumed both by App1 and App2.

+
+
+

Now for some reason the service S has to be changed in an incompatible way to make it future-proof for demands. However, upgrading all 3 applications at the same time is not possible in this case for whatever reason. Therefore, service versioning is applied for the changes of S.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+    |
++---+------------+
+|  Sv1  |  Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Now, App3 has been upgraded and the new release was deployed. A new version v2 of S has been added while v1 is still kept for compatibility reasons and that version is still used by App1 and App2.

+
+
+
+
+------+  +------+
+| App1 |  | App2*|
++---+--+  +--+---+
+    |        |
+    |        |
+    |        |
++---+--------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, App2 has been updated and deployed and it is using the new version v2 of S.

+
+
+
+
+------+  +------+
+| App1*|  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, also App1 has been updated and deployed and it is using the new version v2 of S. The version v1 of S is not used anymore. This can be verified via logging and monitoring.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|          Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Finally, version v1 of the service S was removed from App3 and the new release has been deployed.

+
+
+
+
Versioning Schema
+
+

In general anything can be used to differentiate versions of a service. Possibilities are:

+
+
+
    +
  • +

    Code names (e.g. Strawberry, Blueberry, Grapefruit)

    +
  • +
  • +

    Timestamps (YYYYMMDD-HHmmSS)

    +
  • +
  • +

    Sequential version numbers (e.g. v1, v2, v3)

    +
  • +
  • +

    Composed version numbers (e.g. 1.0.48-pre-alpha-3-20171231-235959-Strawberry)

    +
  • +
+
+
+

As we are following the KISS principle (see key principles) we propose to use sequential version numbers. These are short, clear, and easy while still allowing to see what version is after another one. Especially composed version numbers (even 1.1 vs. 2.0) lead to decisions and discussions that easily waste more time than adding value. It is still very easy to maintain an Excel sheet or release-notes document that is explaining the changes for each version (v1, v2, v3) of a particular service.

+
+
+

We suggest to always add the version schema to the service URL to be prepared for service versioning even if service versioning is not (yet) actively used. For simplicity it is explicitly stated that you may even do incompatible changes to the current version (typically v1) of your service if you can update the according consumers within the same deployment.

+
+
+
+
Practice
+
+

So assuming you know that you have to do service versioning, the question is how to do it practically in the code. +The approach for your devon4j project in case of code-first should be as described below:

+
+
+
    +
  • +

    Determine which types in the code need to be changed. It is likely to be the API and implementation of the according service but it may also impact transfer objects and potentially even datatypes.

    +
  • +
  • +

    Create new packages for all these concerned types containing the current version number (e.g. v1).

    +
  • +
  • +

    Copy all these types to that new packages.

    +
  • +
  • +

    Rename these copies so they carry the version number as suffix (e.g. V1).

    +
  • +
  • +

    Increase the version of the service in the unversioned package (e.g. from v1 to v2).

    +
  • +
  • +

    Now you have two versions of the same service (e.g. v1 and v2) but so far they behave exactly the same.

    +
  • +
  • +

    You start with your actual changes and modify the original files that have been copied before.

    +
  • +
  • +

    You will also ensure the links (import statements) of the copied types point to the copies with the version number

    +
  • +
  • +

    This will cause incompatibilities (and compile errors) in the copied service. Therefore, you need to fix that service implementation to map from the old API to the new API and behavior. In some cases, this may be easy (e.g. mapping x.y.z.v1.FooTo to x.y.z.FooTo using bean-mapping with some custom mapping for the incompatible changes), in other cases this can get very complex. Be aware of this complexity from the start before you make your decision about service versioning.

    +
  • +
  • +

    As far as possible this mapping should be done in the service-layer, not to pollute your business code in the core-layer with versioning-aspects. If there is no way to handle it in the service layer, e.g. you need some data from the persistence-layer, implement the "mapping" in the core-layer then, but don’t forget to remove this code, when removing the old service version.

    +
  • +
  • +

    Finally, ensure that both the old service behaves as before as well as the new service works as planned.

    +
  • +
+
+
+
Modularization
+
+

For modularization, we also follow the KISS principle (see key principles): +we suggest to have one api module per application that will contain the most recent version of your service and get released with every release-version of the application. The compatibility code with the versioned packages will be added to the core module and therefore is not exposed via the api module (because it has already been exposed in the previous release of the app). This way, you can always determine for sure which version of a service is used by another application just by its maven dependencies.

+
+
+

The KISS approach with only a single module that may contain multiple services (e.g. one for each business component) will cause problems when you want to have mixed usages of service versions: You can not use an old version of one service and a new version of another service from the same APP as then you would need to have its API module twice as a dependency on different versions, which is not possible. However, to avoid complicated overhead we always suggest to follow this easy approach. Only if you come to the point that you really need this complexity you can still solve it (even afterwards by publishing another maven artefact). As we are all on our way to build more but smaller applications (SOA, microservices, etc.) we should always start simple and only add complexity when really needed.

+
+
+

The following example gives an idea of the structure:

+
+
+
+
/«my-app»
+├──/api
+|  └──/src/main/java/
+|     └──/«rootpackage»/«application»/«component»
+|        ├──/common/api/to
+|        |  └──FooTo
+|        └──/service/api/rest
+|           └──FooRestService
+└──/core
+   └──/src/main/java/
+      └──«rootpackage»/«application»/«component»
+         ├──/common/api/to/v1
+         |  └──FooToV1
+         └──/service
+            ├──/api/rest/v1
+            |  └──FooRestServiceV1
+            └──impl/rest
+               ├──/v1
+               |  └── FooRestServiceImplV1
+               └──FooRestServiceImpl
+
+
+
+ +
+
+
+
Logic Layer
+
+

The logic layer is the heart of the application and contains the main business logic. +According to our business architecture, we divide an application into components. +For each component, the logic layer defines different use-cases. Another approach is to define a component-facade, which we do not recommend for future application. Especially for quarkus application, we want to simplify things and highly suggest omitting component-facade completely and using use-cases only. +It is very important that you follow the links to understand the concept of use-case in order to properly implement your business logic.

+
+
+
+
Responsibility
+
+

The logic layer is responsible to implement the business logic according to the specified functional demands and requirements. +Therefore, it creates the actual value of the application. The logic layer is responsible for invoking business logic in external systems. +The following additional aspects are also included in its responsibility:

+
+
+ +
+
+
+
Security
+
+

The logic layer is the heart of the application. It is also responsible for authorization and hence security is important in this current case. Every method exposed in an interface needs to be annotated with an authorization check, stating what role(s) a caller must provide in order to be allowed to make the call. The authorization concept is described here.

+
+
+
Direct Object References
+
+

A security threat are Insecure Direct Object References. This simply gives you two options:

+
+
+
    +
  • +

    avoid direct object references

    +
  • +
  • +

    ensure that direct object references are secure

    +
  • +
+
+
+

Especially when using REST, direct object references via technical IDs are common sense. This implies that you have a proper authorization in place. This is especially tricky when your authorization does not only rely on the type of the data and according to static permissions but also on the data itself. Vulnerabilities for this threat can easily happen by design flaws and inadvertence. Here is an example from our sample application:

+
+
+

We have a generic use-case to manage BLOBs. In the first place, it makes sense to write a generic REST service to load and save these BLOBs. However, the permission to read or even update such BLOB depends on the business object hosting the BLOB. Therefore, such a generic REST service would open the door for this OWASP A4 vulnerability. To solve this in a secure way, you need individual services for each hosting business object to manage the linked BLOB and have to check permissions based on the parent business object. In this example the ID of the BLOB would be the direct object reference and the ID of the business object (and a BLOB property indicator) would be the indirect object reference.

+
+ +
+
+
Component Facade
+
+ + + + + +
+ + +Our recommended approach for implementing the logic layer is use-cases +
+
+
+

For each component of the application, the logic layer defines a component facade. +This is an interface defining all business operations of the component. +It carries the name of the component («Component») and has an implementation named «Component»Impl (see implementation).

+
+
+
+
API
+
+

The component facade interface defines the logic API of the component and has to be business oriented. +This means that all parameters and return types of all methods from this API have to be business transfer-objects, datatypes (String, Integer, MyCustomerNumber, etc.), or collections of these. +The API may also only access objects of other business components listed in the (transitive) dependencies of the business-architecture.

+
+
+

Here is an example how such an API may look like:

+
+
+
+
public interface Bookingmanagement {
+
+  BookingEto findBooking(Long id);
+
+  BookingCto findBookingCto(Long id);
+
+  Page<BookingEto> findBookingEtos(BookingSearchCriteriaTo criteria);
+
+  void approveBooking(BookingEto booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation of an interface from the logic layer (a component facade or a use-case) carries the name of that interface with the suffix Impl and is annotated with @Named. +An implementation typically needs access to the persistent data. +This is done by injecting the corresponding repository (or DAO). +According to data-sovereignty, only repositories of the same business component may be accessed directly. +For accessing data from other components the implementation has to use the corresponding API of the logic layer (the component facade). Further, it shall not expose persistent entities from the domain layer and has to map them to transfer objects using the bean-mapper.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public BookingEto findBooking(Long id) {
+
+    LOG.debug("Get Booking with id {} from database.", id);
+    BookingEntity entity = this.bookingRepository.findOne(id);
+    return getBeanMapper().map(entity, BookingEto.class));
+  }
+}
+
+
+
+

As you can see, entities (BookingEntity) are mapped to corresponding ETOs (BookingEto). +Further details about this can be found in bean-mapping.

+
+ +
+
+
UseCase
+
+

A use-case is a small unit of the logic layer responsible for an operation on a particular entity (business object). +We leave it up to you to decide whether you want to define an interface (API) for each use-case or provide an implementation directly.

+
+
+

Following our architecture-mapping (for classic and modern project), use-cases are named Uc«Operation»«BusinessObject»[Impl]. The prefix Uc stands for use-case and allows to easily find and identify them in your IDE. The «Operation» stands for a verb that is operated on the entity identified by «BusinessObject». +For CRUD we use the standard operations Find and Manage that can be generated by CobiGen. This also separates read and write operations (e.g. if you want to do CQSR, or to configure read-only transactions for read operations).

+
+
+

In our example, we choose to define an interface for each use-case. We also use *To to refer to any type of transfer object. Please follow our guide to understand more about different types of transfer object e.g. Eto, Dto, Cto

+
+
+
+
Find
+
+

The UcFind«BusinessObject» defines all read operations to retrieve and search the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcFindBooking {
+  //*To = Eto, Dto or Cto
+  Booking*To findBooking(Long id);
+}
+
+
+
+
+
Manage
+
+

The UcManage«BusinessObject» defines all CRUD write operations (create, update and delete) for the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcManageBooking {
+
+  //*To = Eto, Dto or Cto
+  Booking*To saveBooking(Booking*To booking);
+
+  void deleteBooking(Long id);
+
+}
+
+
+
+
+
Custom
+
+

Any other non CRUD operation Uc«Operation»«BusinessObject» uses any other custom verb for «Operation». +Typically, such custom use-cases only define a single method. +Here is an example:

+
+
+
+
public interface UcApproveBooking {
+
+  //*To = Eto, Dto or Cto
+  void approveBooking(Booking*To booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation should carry its own name and the suffix Impl and is annotated with @Named and @ApplicationScoped. It will need access to the persistent data which is done by injecting the corresponding repository (or DAO). Furthermore, it shall not expose persistent entities from the data access layer and has to map them to transfer objects using the bean-mapper. Please refer to our bean mapping, transfer object and dependency injection documentation for more information. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcManageBookingImpl implements UcManageBooking {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public void deleteBooking(Long id) {
+
+    LOG.debug("Delete Booking with id {} from database.", id);
+    this.bookingRepository.deleteById(id);
+  }
+}
+
+
+
+

The use-cases can then be injected directly into the service.

+
+
+
+
@Named("BookingmanagementRestService")
+@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private UcFindBooking ucFindBooking;
+
+  @Inject
+  private UcManageBooking ucManageBooking;
+
+  @Inject
+  private UcApproveBooking ucApproveBooking;
+}
+
+
+
+
+
Internal use case
+
+

Sometimes, a component with multiple related entities and many use-cases needs to reuse business logic internally. +Of course, this can be exposed as an official use-case API but this will imply using transfer-objects (ETOs) instead of entities. In some cases, this is undesired e.g. for better performance to prevent unnecessary mapping of entire collections of entities. +In the first place, you should try to use abstract base implementations providing reusable methods the actual use-case implementations can inherit from. +If your business logic is even more complex and you have multiple aspects of business logic to share and reuse but also run into multi-inheritance issues, you may also just create use-cases that have their interface located in the impl scope package right next to the implementation (or you may just skip the interface). In such a case, you may define methods that directly take or return entity objects. +To avoid confusion with regular use-cases, we recommend to add the Internal suffix to the type name leading to Uc«Operation»«BusinessObject»Internal[Impl].

+
+
+ +
+
+
+
Data-Access Layer
+
+

The data-access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store. External system could also be accessed from the data-access layer if they match this definition, e.g. a mongo-db via rest services.

+
+
+

Note: In the modern project structure, this layer is replaced by the domain layer.

+
+
+
+
Database
+
+

You need to make your choice for a database. Options are documented here.

+
+
+

The classical approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+ +
+
+
Batch Layer
+
+

We understand batch processing as a bulk-oriented, non-interactive, typically long running execution of tasks. For simplicity, we use the term "batch" or "batch job" for such tasks in the following documentation.

+
+
+

devonfw uses Spring Batch as a batch framework.

+
+
+

This guide explains how Spring Batch is used in devonfw applications. It focuses on aspects which are special to devonfw. If you want to learn about spring-batch you should adhere to springs references documentation.

+
+
+

There is an example of a simple batch implementation in the my-thai-star batch module.

+
+
+

In this chapter, we will describe the overall architecture (especially concerning layering) and how to administer batches.

+
+
+
+
Layering
+
+

Batches are implemented in the batch layer. The batch layer is responsible for batch processes, whereas the business logic is implemented in the logic layer. Compared to the service layer, you may understand the batch layer just as a different way of accessing the business logic. +From a component point of view, each batch is implemented as a subcomponent in the corresponding business component. +The business component is defined by the business architecture.

+
+
+

Let’s make an example for that. The sample application implements a batch for exporting ingredients. This ingredientExportJob belongs to the dishmanagement business component. +So the ingredientExportJob is implemented in the following package:

+
+
+
+
<basepackage>.dishmanagement.batch.impl.*
+
+
+
+

Batches should invoke use cases in the logic layer for doing their work. +Only "batch specific" technical aspects should be implemented in the batch layer.

+
+
+
+
+

Example: +For a batch, which imports product data from a CSV file, this means that all code for actually reading and parsing the CSV input file is implemented in the batch layer. +The batch calls the use case "create product" in the logic layer for actually creating the products for each line read from the CSV input file.

+
+
+
+
+
Directly accessing data access layer
+
+

In practice, it is not always appropriate to create use cases for every bit of work a batch should do. Instead, the data access layer can be used directly. +An example for that is a typical batch for data retention which deletes out-of-time data. +Often deleting, out-dated data is done by invoking a single SQL statement. It is appropriate to implement that SQL in a Repository or DAO method and call this method directly from the batch. +But be careful: this pattern is a simplification which could lead to business logic cluttered in different layers, which reduces the maintainability of your application. +It is a typical design decision you have to make when designing your specific batches.

+
+
+
+
+
Project structure and packaging
+
+

Batches will be implemented in a separate Maven module to keep the application core free of batch dependencies. The batch module includes a dependency on the application core-module to allow the reuse of the use cases, DAOs etc. +Additionally the batch module has dependencies on the required spring batch jars:

+
+
+
+
  <dependencies>
+
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>mtsj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-batch</artifactId>
+    </dependency>
+
+  </dependencies>
+
+
+
+

To allow an easy start of the batches from the command line it is advised to create a bootified jar for the batch module by adding the following to the pom.xml of the batch module:

+
+
+
+
  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>config/application.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Create bootified jar for batch execution via command line.
+           Your applications spring boot app is used as main-class.
+       -->
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        <configuration>
+          <mainClass>com.devonfw.application.mtsj.SpringBootApp</mainClass>
+          <classifier>bootified</classifier>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>repackage</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+
+
+
Implementation
+
+

Most of the details about implementation of batches is described in the spring batch documentation. +There is nothing special about implementing batches in devonfw. You will find an easy example in my-thai-star.

+
+
+
+
Starting from command line
+
+

Devonfw advises to start batches via command line. This is most common to many ops teams and allows easy integration in existing schedulers. In general batches are started with the following command:

+
+
+
+
java -jar <app>-batch-<version>-bootified.jar --spring.main.web-application-type=none --spring.batch.job.enabled=true --spring.batch.job.names=<myJob> <params>
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + +
ParameterExplanation

--spring.main.web-application-type=none

This disables the web app (e.g. Tomcat)

--spring.batch.job.names=<myJob>

This specifies the name of the job to run. If you leave this out ALL jobs will be executed. Which probably does not make to much sense.

<params>

(Optional) additional parameters which are passed to your job

+
+

This will launch your normal spring boot app, disables the web application part and runs the designated job via Spring Boots org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.

+
+
+
+
Scheduling
+
+

In real world scheduling of batches is not as simple as it first might look like.

+
+
+
    +
  • +

    Multiple batches have to be executed in order to achieve complex tasks. If one of those batches fails the further execution has to be stopped and operations should be notified for example.

    +
  • +
  • +

    Input files or those created by batches have to be copied from one node to another.

    +
  • +
  • +

    Scheduling batch executing could get complex easily (quarterly jobs, run job on first workday of a month, …​)

    +
  • +
+
+
+

For devonfw we propose the batches themselves should not mess around with details of scheduling. +Likewise your application should not do so. This complexity should be externalized to a dedicated batch administration service or scheduler. +This service could be a complex product or a simple tool like cron. We propose Rundeck as an open source job scheduler.

+
+
+

This gives full control to operations to choose the solution which fits best into existing administration procedures.

+
+
+
+
Handling restarts
+
+

If you start a job with the same parameters set after a failed run (BatchStatus.FAILED) a restart will occur. +In many cases your batch should then not reprocess all items it processed in the previous runs. +For that you need some logic to start at the desired offset. There different ways to implement such logic:

+
+
+
    +
  • +

    Marking processed items in the database in a dedicated column

    +
  • +
  • +

    Write all IDs of items to process in a separate table as an initialization step of your batch. You can then delete IDs of already processed items from that table during the batch execution.

    +
  • +
  • +

    Storing restart information in springs ExecutionContext (see below)

    +
  • +
+
+
+
Using spring batch ExecutionContext for restarts
+
+

By implementing the ItemStream interface in your ItemReader or ItemWriter you may store information about the batch progress in the ExecutionContext. You will find an example for that in the CountJob in My Thai Star.

+
+
+

Additional hint: It is important that bean definition method of your ItemReader/ItemWriter return types implementing ItemStream(and not just ItemReader or ItemWriter alone). For that the ItemStreamReader and ItemStreamWriter interfaces are provided.

+
+
+
+
+
Exit codes
+
+

Your batches should create a meaningful exit code to allow reaction to batch errors e.g. in a scheduler. +For that spring batch automatically registers an org.springframework.boot.autoconfigure.batch.JobExecutionExitCodeGenerator. To make this mechanism work your spring boot app main class as to populate this exit code to the JVM:

+
+
+
+
@SpringBootApplication
+public class SpringBootApp {
+
+  public static void main(String[] args) {
+    if (Arrays.stream(args).anyMatch((String e) -> e.contains("--spring.batch.job.names"))) {
+      // if executing batch job, explicitly exit jvm to report error code from batch
+      System.exit(SpringApplication.exit(SpringApplication.run(SpringBootApp.class, args)));
+    } else {
+      // normal web application start
+      SpringApplication.run(SpringBootApp.class, args);
+    }
+  }
+}
+
+
+
+
+
Stop batches and manage batch status
+
+

Spring batch uses several database tables to store the status of batch executions. +Each execution may have different status. +You may use this mechanism to gracefully stop batches. +Additionally in some edge cases (batch process crashed) the execution status may be in an undesired state. +E.g. the state will be running, despite the process crashed sometime ago. +For that cases you have to change the status of the execution in the database.

+
+
+
CLI-Tool
+
+

Devonfw provides a easy to use cli-tool to manage the executing status of your jobs. +The tool is implemented in the devonfw module devon4j-batch-tool. It will provide a runnable jar, which may be used as follows:

+
+
+
+
List names of all previous executed jobs
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs list

+
+
Stop job named 'countJob'
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs stop countJob

+
+
Show help
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar

+
+
+
+
+

As you can the each invocation includes the JDBC connection string to your database. +This means that you have to make sure that the corresponding DB driver is in the classpath (the prepared jar only contains H2).

+
+
+
+
+
Authentication
+
+

Most business application incorporate authentication and authorization. +Your spring boot application will implement some kind of security, e.g. integrated login with username+password or in many cases authentication via an existing IAM. +For security reasons your batch should also implement an authentication mechanism and obey the authorization implemented in your application (e.g. via @RolesAllowed).

+
+
+

Since there are many different authentication mechanism we cannot provide an out-of-the-box solution in devonfw, but we describe a pattern how this can be implemented in devonfw batches.

+
+
+

We suggest to implement the authentication in a Spring Batch tasklet, which runs as the first step in your batch. This tasklet will do all of the work which is required to authenticate the batch. A simple example which authenticates the batch "locally" via username and password could be implemented like this:

+
+
+
+
@Named
+public class SimpleAuthenticationTasklet implements Tasklet {
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+    String username = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("username");
+    String password = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("password");
+    Authentication authentication = new UsernamePasswordAuthenticationToken(username, password);
+
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+    return RepeatStatus.FINISHED;
+  }
+
+}
+
+
+
+

The username and password have to be supplied via two cli parameters -username and -password. This implementation creates an "authenticated" Authentication and sets in the Spring Security context. This is just for demonstration normally you should not provide passwords via command line. The actual authentication will be done automatically via Spring Security as in your "normal" application. +If you have a more complex authentication mechanism in your application e.g. via OpenID connect just call this in the tasklet. Naturally you may read authentication parameters (e.g. secrets) from the command line or more securely from a configuration file.

+
+
+

In your Job Configuration set this tasklet as the first step:

+
+
+
+
@Configuration
+@EnableBatchProcessing
+public class BookingsExportBatchConfig {
+  @Inject
+  private JobBuilderFactory jobBuilderFactory;
+
+  @Inject
+  private StepBuilderFactory stepBuilderFactory;
+
+  @Bean
+  public Job myBatchJob() {
+    return this.jobBuilderFactory.get("myJob").start(myAuthenticationStep()).next(...).build();
+  }
+
+  @Bean
+  public Step myAuthenticationStep() {
+    return this.stepBuilderFactory.get("myAuthenticationStep").tasklet(myAuthenticatonTasklet()).build();
+  }
+
+  @Bean
+  public Tasklet myAuthenticatonTasklet() {
+    return new SimpleAuthenticationTasklet();
+  }
+...
+
+
+
+
+
Tipps & tricks
+
+
Identifying job parameters
+
+

Spring uses a jobs parameters to identify job executions. Parameters starting with "-" are not considered for identifying a job execution.

+
+
+
+
+
+
+

1.104. Guides

+ +
+
Configuration
+ +
+
+
Internal Application Configuration
+
+

There usually is a main configuration registered with main Spring Boot App, but differing configurations to support automated test of the application can be defined using profiles (not detailed in this guide).

+
+
+
Spring Boot Application
+
+

For a complete documentation, see the Spring Boot Reference Guide.

+
+
+

With spring-boot you provide a simple main class (also called starter class) like this: +com.devonfw.mtsj.application

+
+
+
+
@SpringBootApplication(exclude = { EndpointAutoConfiguration.class })
+@EntityScan(basePackages = { "com.devonfw.mtsj.application" }, basePackageClasses = { AdvancedRevisionEntity.class })
+@EnableGlobalMethodSecurity(jsr250Enabled = true)
+@ComponentScan(basePackages = { "com.devonfw.mtsj.application.general", "com.devonfw.mtsj.application" })
+public class SpringBootApp {
+
+  /**
+   * Entry point for spring-boot based app
+   *
+   * @param args - arguments
+   */
+  public static void main(String[] args) {
+
+    SpringApplication.run(SpringBootApp.class, args);
+  }
+}
+
+
+
+

In an devonfw application this main class is always located in the <basepackage> of the application package namespace (see package-conventions). This is because a spring boot application will automatically do a classpath scan for components (spring-beans) and entities in the package where the application main class is located including all sub-packages. You can use the @ComponentScan and @EntityScan annotations to customize this behaviour.

+
+
+

If you want to map spring configuration properties into your custom code please see configuration mapping.

+
+
+
+
Standard beans configuration
+
+

For basic bean configuration we rely on spring boot using mainly configuration classes and only occasionally XML configuration files. Some key principle to understand Spring Boot auto-configuration features:

+
+
+
    +
  • +

    Spring Boot auto-configuration attempts to automatically configure your Spring application based on the jar dependencies and annotated components found in your source code.

    +
  • +
  • +

    Auto-configuration is non-invasive, at any point you can start to define your own configuration to replace specific parts of the auto-configuration by redefining your identically named bean (see also exclude attribute of @SpringBootApplication in example code above).

    +
  • +
+
+
+

Beans are configured via annotations in your java code (see dependency-injection).

+
+
+

For technical configuration you will typically write additional spring config classes annotated with @Configuration that provide bean implementations via methods annotated with @Bean. See spring @Bean documentation for further details. Like in XML you can also use @Import to make a @Configuration class include other configurations.

+
+
+

More specific configuration files (as required) reside in an adequately named subfolder of:

+
+
+

src/main/resources/app

+
+
+
+
BeanMapper Configuration
+
+

In case you are still using dozer, you will find further details in bean-mapper configuration.

+
+
+
+
Security configuration
+
+

The abstract base class BaseWebSecurityConfig should be extended to configure web application security thoroughly. +A basic and secure configuration is provided which can be overridden or extended by subclasses. +Subclasses must use the @Profile annotation to further discriminate between beans used in production and testing scenarios. See the following example:

+
+
+
Listing 14. How to extend BaseWebSecurityConfig for Production and Test
+
+
@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.JUNIT)
+public class TestWebSecurityConfig extends BaseWebSecurityConfig {...}
+
+@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.NOT_JUNIT)
+public class WebSecurityConfig extends BaseWebSecurityConfig {...}
+
+
+ +
+
+
WebSocket configuration
+
+

A websocket endpoint is configured within the business package as a Spring configuration class. The annotation @EnableWebSocketMessageBroker makes Spring Boot registering this endpoint.

+
+
+
+
package your.path.to.the.websocket.config;
+...
+@Configuration
+@EnableWebSocketMessageBroker
+public class WebSocketConfig extends AbstractWebSocketMessageBrokerConfigurer {
+...
+
+
+
+
+
+
External Application Configuration
+
+
application.properties files
+
+

Here is a list of common properties provided by the Spring framework.

+
+
+

For a general understanding how spring-boot is loading and boostrapping your application.properties see spring-boot external configuration.

+
+
+

The following properties files are used in devonfw application:

+
+
+
    +
  • +

    src/main/resources/application.properties providing a default configuration - bundled and deployed with the application package. It further acts as a template to derive a tailored minimal environment-specific configuration.

    +
  • +
  • +

    src/main/resources/config/application.properties providing additional properties only used at development time (for all local deployment scenarios). This property file is excluded from all packaging.

    +
  • +
  • +

    src/test/resources/config/application.properties providing additional properties only used for testing (JUnits based on spring test).

    +
  • +
+
+
+

For other environments where the software gets deployed such as test, acceptance and production you need to provide a tailored copy of application.properties. The location depends on the deployment strategy:

+
+
+
    +
  • +

    standalone run-able Spring Boot App using embedded tomcat: config/application.properties under the installation directory of the spring boot application.

    +
  • +
  • +

    dedicated tomcat (one tomcat per app): $CATALINA_BASE/lib/config/application.properties

    +
  • +
  • +

    tomcat serving a number of apps (requires expanding the wars): $CATALINA_BASE/webapps/<app>/WEB-INF/classes/config

    +
  • +
+
+
+

In this application.properties you only define the minimum properties that are environment specific and inherit everything else from the bundled src/main/resources/application.properties. In any case, make very sure that the classloader will find the file.

+
+
+
+
Database Configuration
+
+

The configuration for spring and Hibernate is already provided by devonfw in our sample application and the application template. So you only need to worry about a few things to customize.

+
+
+Database System and Access +
+

Obviously you need to configure which type of database you want to use as well as the location and credentials to access it. The defaults are configured in application.properties that is bundled and deployed with the release of the software. The files should therefore contain the properties as in the given example:

+
+
+
+
  database.url=jdbc:postgresql://database.enterprise.com/app
+  database.user.login=appuser01
+  database.user.password=************
+  database.hibernate.dialect = org.hibernate.dialect.PostgreSQLDialect
+  database.hibernate.hbm2ddl.auto=validate
+
+
+
+

For further details about database.hibernate.hbm2ddl.auto please see here. For production and acceptance environments we use the value validate that should be set as default. In case you want to use Oracle RDBMS you can find additional hints here.

+
+
+

If your application supports multiples database types, set spring.profiles.active=XXX in src/main/resources/config/application.properties choose database of your choice. Also, one has to set all the active spring profiles in this application.properties and not in any of the other application.properties.

+
+
+
+Database Logging +
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
spring.jpa.properties.hibernate.show_sql=true
+spring.jpa.properties.hibernate.use_sql_comments=true
+spring.jpa.properties.hibernate.format_sql=true
+
+
+
+
+
+
+
Security
+
+
Password Encryption
+
+

In order to support encrypted passwords in spring-boot application.properties all you need to do is to add jasypt-spring-boot as dependency in your pom.xml (please check for recent version here):

+
+
+
+
<dependency>
+  <groupId>com.github.ulisesbocchio</groupId>
+  <artifactId>jasypt-spring-boot-starter</artifactId>
+  <version>3.0.3</version>
+</dependency>
+
+
+
+

This will smoothly integrate jasypt into your spring-boot application. Read this HOWTO to learn how to encrypt and decrypt passwords using jasypt.

+
+
+

Next, we give a simple example how to encypt and configure a secret value. +We use the algorithm PBEWITHHMACSHA512ANDAES_256 that provides strong encryption and is the default of jasypt-spring-boot-starter. +However, different algorithms can be used if perferred (e.g. PBEWITHMD5ANDTRIPLEDES).

+
+
+
+
java -cp ${M2_REPO}/org/jasypt/jasypt/1.9.3/jasypt-1.9.3.jar org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI password=masterpassword algorithm=PBEWITHHMACSHA512ANDAES_256 input=secret ivGeneratorClassName=org.jasypt.iv.RandomIvGenerator
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.5+10
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: masterpassword
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC
+
+
+
+

Of course the master-password (masterpassword) and the actual password to encrypt (secret) are just examples. +Please replace them with reasonable strong passwords for your environment. +Further, if you are using devonfw-ide you can make your life much easier and just type:

+
+
+
+
devon jasypt encrypt
+
+
+
+

See jasypt commandlet for details.

+
+
+

Now the entire line after the OUTPUT block is your encrypted secret. +It even contains some random salt so that multiple encryption invocations with the same parameters (ARGUMENTS) will produce a different OUTPUT.

+
+
+

The master-password can be configured on your target environment via the property jasypt.encryptor.password. As system properties given on the command-line are visible in the process list, we recommend to use an config/application.yml file only for this purpose (as we recommended to use application.properties for regular configs):

+
+
+
+
jasypt:
+    encryptor:
+        password: masterpassword
+
+
+
+

Again masterpassword is just an example that your replace with your actual master password. +Now you are able to put encrypted passwords into your application.properties and specify the algorithm.

+
+
+
+
spring.datasource.password=ENC(PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC)
+jasypt.encryptor.algorithm=PBEWITHHMACSHA512ANDAES_256
+
+
+
+

This application.properties file can be version controlled (git-opts) and without knowing the masterpassword nobody is able to decrypt this to get the actual secret back.

+
+
+

To prevent jasypt to throw an exception in dev or test scenarios you can simply put this in your local config (src/main/config/application.properties and same for test, see above for details):

+
+
+
+
jasypt.encryptor.password=none
+
+
+ +
+
+
Mapping configuration to your code
+
+

If you are using spring-boot as suggested by devon4j your application can be configured by application.properties file as described in configuration. +To get a single configuration option into your code for flexibility, you can use

+
+
+
+
@Value("${my.property.name}")
+private String myConfigurableField;
+
+
+
+

Now, in your application.properties you can add the property:

+
+
+
+
my.property.name=my-property-value
+
+
+
+

You may even use @Value("${my.property.name:my-default-value}") to make the property optional.

+
+
+
+
Naming conventions for configuration properties
+
+

As a best practice your configruation properties should follow these naming conventions:

+
+
+
    +
  • +

    build the property-name as a path of segments separated by the dot character (.)

    +
  • +
  • +

    segments should get more specific from left to right

    +
  • +
  • +

    a property-name should either be a leaf value or a tree node (prefix of other property-names) but never both! So never have something like foo.bar=value and foo.bar.child=value2.

    +
  • +
  • +

    start with a segment namespace unique to your context or application

    +
  • +
  • +

    a good example would be «myapp».billing.service.email.sender for the sender address of billing service emails send by «myapp».

    +
  • +
+
+
+
+
Mapping advanced configuration
+
+

However, in many scenarios you will have features that require more than just one property. +Injecting those via @Value is not leading to good code quality. +Instead we create a class with the suffix ConfigProperties containing all configuration properties for our aspect that is annotated with @ConfigurationProperties:

+
+
+
+
@ConfigurationProperties(prefix = "myapp.billing.service")
+public class BillingServiceConfigProperties {
+
+  private final Email email = new Email();
+  private final Smtp smtp = new Smtp();
+
+  public Email getEmail() { return this.email; }
+  public Email getSmtp() { return this.smtp; }
+
+  public static class Email {
+
+    private String sender;
+    private String subject;
+
+    public String getSender() { return this.sender; }
+    public void setSender(String sender) { this.sender = sender; }
+    public String getSubject() { return this.subject; }
+    public void setSubject(String subject) { this.subject = subject; }
+  }
+
+  public static class Smtp {
+
+    private String host;
+    private int port = 25;
+
+    public String getHost() { return this.host; }
+    public void setHost(String host) { this.host = host; }
+    public int getPort() { return this.port; }
+    public void setPort(int port) { this.port = port; }
+  }
+
+}
+
+
+
+

Of course this is just an example to demonstrate this feature of spring-boot. +In order to send emails you would typically use the existing spring-email feature. +But as you can see this allows us to define and access our configuration in a very structured and comfortable way. +The annotation @ConfigurationProperties(prefix = "myapp.billing.service") will automatically map spring configuration properties starting with myapp.billing.service via the according getters and setters into our BillingServiceConfigProperties. +We can easily define defaults (e.g. 25 as default value for myapp.billing.service.smtp.port). +Also Email or Smtp could be top-level classes to be reused in multiple configurations. +Of course you would also add helpful JavaDoc comments to the getters and classes to document your configuration options. +Further to access this configuration, we can use standard dependency-injection:

+
+
+
+
@Inject
+private BillingServiceConfigProperties config;
+
+
+
+

For very generic cases you may also use Map<String, String> to map any kind of property in an untyped way. +An example for generic configuration from devon4j can be found in +ServiceConfigProperties.

+
+
+

For further details about this feature also consult Guide to @ConfigurationProperties in Spring Boot.

+
+
+
+
Generate configuration metadata
+
+

You should further add this dependency to your module containing the *ConfigProperties:

+
+
+
+
    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+
+
+
+

This will generate configuration metadata so projects using your code can benefit from autocompletion and getting your JavaDoc as tooltip when editing application.properites what makes this approach very powerful. +For further details about this please read A Guide to Spring Boot Configuration Metadata.

+
+
+ +
+
+
+
Auditing
+
+

For database auditing we use hibernate envers. If you want to use auditing ensure you have the following dependency in your pom.xml:

+
+
+
Listing 15. spring
+
+
<dependency>
+  <groupId>com.devonfw.java.modules</groupId>
+  <artifactId>devon4j-jpa-envers</artifactId>
+</dependency>
+
+
+
+
Listing 16. quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-envers</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +The following part applies only to spring applications. At this point, the Quarkus extension does not provide any additional configurations. For Quarkus applications, simply use the @Audited annotation to enable auditing for an entity class, as described a few lines below or seen here. +
+
+
+

Make sure that entity manager also scans the package from the devon4j-jpa[-envers] module in order to work properly. And make sure that correct Repository Factory Bean Class is chosen.

+
+
+
+
@EntityScan(basePackages = { "«my.base.package»" }, basePackageClasses = { AdvancedRevisionEntity.class })
+...
+@EnableJpaRepositories(repositoryFactoryBeanClass = GenericRevisionedRepositoryFactoryBean.class)
+...
+public class SpringBootApp {
+  ...
+}
+
+
+
+

Now let your [Entity]Repository extend from DefaultRevisionedRepository instead of DefaultRepository.

+
+
+

The repository now has a method getRevisionHistoryMetadata(id) and getRevisionHistoryMetadata(id, boolean lazy) available to get a list of revisions for a given entity and a method find(id, revision) to load a specific revision of an entity with the given ID or getLastRevisionHistoryMetadata(id) to load last revision. +To enable auditing for a entity simply place the @Audited annotation to your entity and all entity classes it extends from.

+
+
+
+
@Entity(name = "Drink")
+@Audited
+public class DrinkEntity extends ProductEntity implements Drink {
+...
+
+
+
+

When auditing is enabled for an entity an additional database table is used to store all changes to the entity table and a corresponding revision number. This table is called <ENTITY_NAME>_AUD per default. Another table called REVINFO is used to store all revisions. Make sure that these tables are available. They can be generated by hibernate with the following property (only for development environments).

+
+
+
+
  database.hibernate.hbm2ddl.auto=create
+
+
+
+

Another possibility is to put them in your database migration scripts like so.

+
+
+
+
CREATE CACHED TABLE PUBLIC.REVINFO(
+  id BIGINT NOT NULL generated by default as identity (start with 1),
+  timestamp BIGINT NOT NULL,
+  user VARCHAR(255)
+);
+...
+CREATE CACHED TABLE PUBLIC.<TABLE_NAME>_AUD(
+    <ALL_TABLE_ATTRIBUTES>,
+    revtype TINYINT,
+    rev BIGINT NOT NULL
+);
+
+
+
+ +
+
+
Access-Control
+
+

Access-Control is a central and important aspect of Security. It consists of two major aspects:

+
+
+ +
+
+
+
Authentication
+
+

Definition:

+
+
+
+
+

Authentication is the verification that somebody interacting with the system is the actual subject for whom he claims to be.

+
+
+
+
+

The one authenticated is properly called subject or principal. There are two forms of principals you need to distinguish while designing your authentication: human users and autonomous systems. While e.g. a Kerberos/SPNEGO Single-Sign-On makes sense for human users, it is pointless for authenticating autonomous systems. For simplicity, we use the common term user to refer to any principal even though it may not be a human (e.g. in case of a service call from an external system).

+
+
+

To prove the authenticity, the user provides some secret called credentials. The most simple form of credentials is a password.

+
+
+
Implementations
+
+ + + + + +
+ + +Please never implement your own authentication mechanism or credential store. You have to be aware of implicit demands such as salting and hashing credentials, password life-cycle with recovery, expiry, and renewal including email notification confirmation tokens, central password policies, etc. This is the domain of access managers and identity management systems. In a business context you will typically already find a system for this purpose that you have to integrate (e.g. via LDAP). Otherwise you should consider establishing such a system e.g. using keycloak. +
+
+
+

We recommend using JWT when possible. For KISS, also try to avoid combining multiple authentication mechanisms (form based, basic-auth, SAMLv2, OAuth, etc.) within the same application (for different URLs).

+
+
+

For spring, check the Spring Security

+
+
+

For quarkus, check the Quarkus Authentication

+
+
+
+
+
Authorization
+
+

Definition:

+
+
+
+
+

Authorization is the verification that an authenticated user is allowed to perform the operation he intends to invoke.

+
+
+
+
+
Clarification of terms
+
+

For clarification we also want to give a common understanding of related terms that have no unique definition and consistent usage in the wild.

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 14. Security terms related to authorization
TermMeaning and comment

Permission

A permission is an object that allows a principal to perform an operation in the system. This permission can be granted (give) or revoked (taken away). Sometimes people also use the term right what is actually wrong as a right (such as the right to be free) can not be revoked.

Group

We use the term group in this context for an object that contains permissions. A group may also contain other groups. Then the group represents the set of all recursively contained permissions.

Role

We consider a role as a specific form of group that also contains permissions. A role identifies a specific function of a principal. A user can act in a role.

+

For simple scenarios a principal has a single role associated. In more complex situations a principal can have multiple roles but has only one active role at a time that he can choose out of his assigned roles. For KISS it is sometimes sufficient to avoid this by creating multiple accounts for the few users with multiple roles. Otherwise at least avoid switching roles at run-time in clients as this may cause problems with related states. Simply restart the client with the new role as parameter in case the user wants to switch his role.

Access Control

Any permission, group, role, etc., which declares a control for access management.

+
+
+
Suggestions on the access model
+
+

For the access model we give the following suggestions:

+
+
+
    +
  • +

    Each Access Control (permission, group, role, …​) is uniquely identified by a human readable string.

    +
  • +
  • +

    We create a unique permission for each use-case.

    +
  • +
  • +

    We define groups that combine permissions to typical and useful sets for the users.

    +
  • +
  • +

    We define roles as specific groups as required by our business demands.

    +
  • +
  • +

    We allow to associate users with a list of Access Controls.

    +
  • +
  • +

    For authorization of an implemented use case we determine the required permission. Furthermore, we determine the current user and verify that the required permission is contained in the tree spanned by all his associated Access Controls. If the user does not have the permission we throw a security exception and thus abort the operation and transaction.

    +
  • +
  • +

    We avoid negative permissions, that is a user has no permission by default and only those granted to him explicitly give him additional permission for specific things. Permissions granted can not be reduced by other permissions.

    +
  • +
  • +

    Technically we consider permissions as a secret of the application. Administrators shall not fiddle with individual permissions but grant them via groups. So the access management provides a list of strings identifying the Access Controls of a user. The individual application itself contains these Access Controls in a structured way, whereas each group forms a permission tree.

    +
  • +
+
+
+
+
Naming conventions
+
+

As stated above each Access Control is uniquely identified by a human readable string. This string should follow the naming convention:

+
+
+
+
«app-id».«local-name»
+
+
+
+

For Access Control Permissions the «local-name» again follows the convention:

+
+
+
+
«verb»«object»
+
+
+
+

The segments are defined by the following table:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 15. Segments of Access Control Permission ID
SegmentDescriptionExample

«app-id»

Is a unique technical but human readable string of the application (or microservice). It shall not contain special characters and especially no dot or whitespace. We recommend to use lower-train-case-ascii-syntax. The identity and access management should be organized on enterprise level rather than application level. Therefore permissions of different apps might easily clash (e.g. two apps might both define a group ReadMasterData but some user shall get this group for only one of these two apps). Using the «app-id». prefix is a simple but powerful namespacing concept that allows you to scale and grow. You may also reserve specific «app-id»s for cross-cutting concerns that do not actually reflect a single app e.g to grant access to a geographic region.

shop

«verb»

The action that is to be performed on «object». We use Find for searching and reading data. Save shall be used both for create and update. Only if you really have demands to separate these two you may use Create in addition to Save. Finally, Delete is used for deletions. For non CRUD actions you are free to use additional verbs such as Approve or Reject.

Find

«object»

The affected object or entity. Shall be named according to your data-model

Product

+
+

So as an example shop.FindProduct will reflect the permission to search and retrieve a Product in the shop application. The group shop.ReadMasterData may combine all permissions to read master-data from the shop. However, also a group shop.Admin may exist for the Admin role of the shop application. Here the «local-name» is Admin that does not follow the «verb»«object» schema.

+
+
+
+
devon4j-security
+
+

The module devon4j-security provides ready-to-use code based on spring-security that makes your life a lot easier.

+
+
+
+access-control +
+
Figure 3. devon4j Security Model
+
+
+

The diagram shows the model of devon4j-security that separates two different aspects:

+
+
+
    +
  • +

    The Identity- and Access-Management is provided by according products and typically already available in the enterprise landscape (e.g. an active directory). It provides a hierarchy of primary access control objects (roles and groups) of a user. An administrator can grant and revoke permissions (indirectly) via this way.

    +
  • +
  • +

    The application security defines a hierarchy of secondary access control objects (groups and permissions). This is done by configuration owned by the application (see following section). The "API" is defined by the IDs of the primary access control objects that will be referenced from the Identity- and Access-Management.

    +
  • +
+
+
+
+
Access Control Config
+
+

In your application simply extend AccessControlConfig to configure your access control objects as code and reference it from your use-cases. An example config may look like this:

+
+
+
+
@Named
+public class ApplicationAccessControlConfig extends AccessControlConfig {
+
+  public static final String APP_ID = "MyApp";
+
+  private static final String PREFIX = APP_ID + ".";
+
+  public static final String PERMISSION_FIND_OFFER = PREFIX + "FindOffer";
+
+  public static final String PERMISSION_SAVE_OFFER = PREFIX + "SaveOffer";
+
+  public static final String PERMISSION_DELETE_OFFER = PREFIX + "DeleteOffer";
+
+  public static final String PERMISSION_FIND_PRODUCT = PREFIX + "FindProduct";
+
+  public static final String PERMISSION_SAVE_PRODUCT = PREFIX + "SaveProduct";
+
+  public static final String PERMISSION_DELETE_PRODUCT = PREFIX + "DeleteProduct";
+
+  public static final String GROUP_READ_MASTER_DATA = PREFIX + "ReadMasterData";
+
+  public static final String GROUP_MANAGER = PREFIX + "Manager";
+
+  public static final String GROUP_ADMIN = PREFIX + "Admin";
+
+  public ApplicationAccessControlConfig() {
+
+    super();
+    AccessControlGroup readMasterData = group(GROUP_READ_MASTER_DATA, PERMISSION_FIND_OFFER, PERMISSION_FIND_PRODUCT);
+    AccessControlGroup manager = group(GROUP_MANAGER, readMasterData, PERMISSION_SAVE_OFFER, PERMISSION_SAVE_PRODUCT);
+    AccessControlGroup admin = group(GROUP_ADMIN, manager, PERMISSION_DELETE_OFFER, PERMISSION_DELETE_PRODUCT);
+  }
+}
+
+
+
+
+
Configuration on Java Method level
+
+

In your use-case you can now reference a permission like this:

+
+
+
+
@Named
+public class UcSafeOfferImpl extends ApplicationUc implements UcSafeOffer {
+
+  @Override
+  @RolesAllowed(ApplicationAccessControlConfig.PERMISSION_SAVE_OFFER)
+  public OfferEto save(OfferEto offer) { ... }
+  ...
+}
+
+
+
+
+
JEE Standard
+
+

Role-based Access Control (RBAC) is commonly used for authorization. +JSR 250 defines a number of common annotations to secure your application.

+
+
+
    +
  • +

    javax.annotation.security.PermitAll specifies that no access control is required to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DenyAll specifies that no access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.RolesAllowed specifies that only a list of access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DeclareRoles defines roles for security checking.

    +
  • +
  • +

    javax.annotation.security.RunAs specifies the RunAs role for the given components.

    +
  • +
+
+
+

@PermitAll, @Denyall, and @RolesAllowed annotations can be applied to both class and method. +A method-level annotation will override the behaviour of class-level annotation. Using multiple annotations of those 3 is not valid.

+
+
+
+
// invalid
+@PermitAll
+@DenyAll
+public String foo()
+
+// invalid and compilation fails
+@RolesAllowed("admin")
+@RolesAllowed("user")
+public String bar()
+
+// OK
+@RolesAllowed("admin", "user")
+public String bar()
+
+
+
+

Please note that when specifying multiple arguments to @RolesAllowed those are combined with OR (and not with AND). +So if the user has any of the specified access controls, he will be able to access the method.

+
+
+

As a best practice avoid specifying string literals to @RolesAllowed. +Instead define a class with all access controls as constants and reference them from there. +This class is typically called ApplicationAccessControlConfig in devonfw.

+
+
+

In many complicated cases where @PermitAll @DenyAll @RolesAllowed are insufficient e.g. a method should be accessed by a user in role A and not in role B at the same time, you have to verify the user role directly in the method. You can use SecurityContext class to get further needed information.

+
+
+Spring +
+

Spring Security also supports authorization on method level. To use it, you need to add the spring-security-config dependency. If you use Spring Boot, the dependency spring-boot-starter-security already includes spring-security-config. Then you can configure as follows:

+
+
+
    +
  • +

    prePostEnabled property enables Spring Security pre/post annotations. @PreAuthorize and @PostAuthorize annotations provide expression-based access control. See more here

    +
  • +
  • +

    securedEnabled property determines if the @Secured annotation should be enabled. @Secured can be used similarly as @RollesAllowed.

    +
  • +
  • +

    jsr250Enabled property allows us to use the JSR-250 annotations such as @RolesAllowed.

    +
  • +
+
+
+
+
@Configuration
+@EnableGlobalMethodSecurity(
+  prePostEnabled = true,
+  securedEnabled = true,
+  jsr250Enabled = true)
+public class MethodSecurityConfig
+  extends GlobalMethodSecurityConfiguration {
+}
+
+
+
+

A further read about the whole concept of Spring Security Authorization can be found here.

+
+
+
+Quarkus +
+

Quarkus comes with built-in security to allow for RBAC based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints and CDI beans. Quarkus also provides the io.quarkus.security.Authenticated annotation that will permit any authenticated user to access the resource (equivalent to @RolesAllowed("**")).

+
+
+
+
+
Data-based Permissions
+ +
+
+
Access Control Schema (deprecated)
+
+

The access-control-schema.xml approach is deprecated. The documentation can still be found in access control schema.

+
+
+ +
+
+
Data-permissions
+
+

In some projects there are demands for permissions and authorization that is dependent on the processed data. E.g. a user may only be allowed to read or write data for a specific region. This is adding some additional complexity to your authorization. If you can avoid this it is always best to keep things simple. However, in various cases this is a requirement. Therefore the following sections give you guidance and patterns how to solve this properly.

+
+
+
+
Structuring your data
+
+

For all your business objects (entities) that have to be secured regarding to data permissions we recommend that you create a separate interface that provides access to the relevant data required to decide about the permission. Here is a simple example:

+
+
+
+
public interface SecurityDataPermissionCountry {
+
+  /**
+   * @return the 2-letter ISO code of the country this object is associated with. Users need
+   *         a data-permission for this country in order to read and write this object.
+   */
+  String getCountry();
+}
+
+
+
+

Now related business objects (entities) can implement this interface. Often such data-permissions have to be applied to an entire object-hierarchy. For security reasons we recommend that also all child-objects implement this interface. For performance reasons we recommend that the child-objects redundantly store the data-permission properties (such as country in the example above) and this gets simply propagated from the parent, when a child object is created.

+
+
+
+
Permissions for processing data
+
+

When saving or processing objects with a data-permission, we recommend to provide dedicated methods to verify the permission in an abstract base-class such as AbstractUc and simply call this explicitly from your business code. This makes it easy to understand and debug the code. Here is a simple example:

+
+
+
+
protected void verifyPermission(SecurityDataPermissionCountry entity) throws AccessDeniedException;
+
+
+
+Beware of AOP +
+

For simple but cross-cutting data-permissions you may also use AOP. This leads to programming aspects that reflectively scan method arguments and magically decide what to do. Be aware that this quickly gets tricky:

+
+
+
    +
  • +

    What if multiple of your method arguments have data-permissions (e.g. implement SecurityDataPermission*)?

    +
  • +
  • +

    What if the object to authorize is only provided as reference (e.g. Long or IdRef) and only loaded and processed inside the implementation where the AOP aspect does not apply?

    +
  • +
  • +

    How to express advanced data-permissions in annotations?

    +
  • +
+
+
+

What we have learned is that annotations like @PreAuthorize from spring-security easily lead to the "programming in string literals" anti-pattern. We strongly discourage to use this anti-pattern. In such case writing your own verifyPermission methods that you manually call in the right places of your business-logic is much better to understand, debug and maintain.

+
+
+
+
+
Permissions for reading data
+
+

When it comes to restrictions on the data to read it becomes even more tricky. In the context of a user only entities shall be loaded from the database he is permitted to read. This is simple for loading a single entity (e.g. by its ID) as you can load it and then if not permitted throw an exception to secure your code. But what if the user is performing a search query to find many entities? For performance reasons we should only find data the user is permitted to read and filter all the rest already via the database query. But what if this is not a requirement for a single query but needs to be applied cross-cutting to tons of queries? Therefore we have the following pattern that solves your problem:

+
+
+

For each data-permission attribute (or set of such) we create an abstract base entity:

+
+
+
+
@MappedSuperclass
+@EntityListeners(PermissionCheckListener.class)
+@FilterDef(name = "country", parameters = {@ParamDef(name = "countries", type = "string")})
+@Filter(name = "country", condition = "country in (:countries)")
+public abstract class SecurityDataPermissionCountryEntity extends ApplicationPersistenceEntity
+    implements SecurityDataPermissionCountry {
+
+  private String country;
+
+  @Override
+  public String getCountry() {
+    return this.country;
+  }
+
+  public void setCountry(String country) {
+    this.country = country;
+  }
+}
+
+
+
+

There are some special hibernate annotations @EntityListeners, @FilterDef, and @Filter used here allowing to apply a filter on the country for any (non-native) query performed by hibernate. The entity listener may look like this:

+
+
+
+
public class PermissionCheckListener {
+
+  @PostLoad
+  public void read(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireReadPermission(entity);
+  }
+
+  @PrePersist
+  @PreUpdate
+  public void write(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireWritePermission(entity);
+  }
+}
+
+
+
+

This will ensure that hibernate implicitly will call these checks for every such entity when it is read from or written to the database. Further to avoid reading entities from the database the user is not permitted to (and ending up with exceptions), we create an AOP aspect that automatically activates the above declared hibernate filter:

+
+
+
+
@Named
+public class PermissionCheckerAdvice implements MethodBeforeAdvice {
+
+  @Inject
+  private PermissionChecker permissionChecker;
+
+  @PersistenceContext
+  private EntityManager entityManager;
+
+  @Override
+  public void before(Method method, Object[] args, Object target) {
+
+    Collection<String> permittedCountries = this.permissionChecker.getPermittedCountriesForReading();
+    if (permittedCountries != null) { // null is returned for admins that may access all countries
+      if (permittedCountries.isEmpty()) {
+        throw new AccessDeniedException("Not permitted for any country!");
+      }
+      Session session = this.entityManager.unwrap(Session.class);
+      session.enableFilter("country").setParameterList("countries", permittedCountries.toArray());
+    }
+  }
+}
+
+
+
+

Finally to apply this aspect to all Repositories (can easily be changed to DAOs) implement the following advisor:

+
+
+
+
@Named
+public class PermissionCheckerAdvisor implements PointcutAdvisor, Pointcut, ClassFilter, MethodMatcher {
+
+  @Inject
+  private PermissionCheckerAdvice advice;
+
+  @Override
+  public Advice getAdvice() {
+    return this.advice;
+  }
+
+  @Override
+  public boolean isPerInstance() {
+    return false;
+  }
+
+  @Override
+  public Pointcut getPointcut() {
+    return this;
+  }
+
+  @Override
+  public ClassFilter getClassFilter() {
+    return this;
+  }
+
+  @Override
+  public MethodMatcher getMethodMatcher() {
+    return this;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass) {
+    return true; // apply to all methods
+  }
+
+  @Override
+  public boolean isRuntime() {
+    return false;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass, Object... args) {
+    throw new IllegalStateException("isRuntime()==false");
+  }
+
+  @Override
+  public boolean matches(Class<?> clazz) {
+    // when using DAOs simply change to some class like ApplicationDao
+    return DefaultRepository.class.isAssignableFrom(clazz);
+  }
+}
+
+
+
+
+
Managing and granting the data-permissions
+
+

Following our authorization guide we can simply create a permission for each country. We might simply reserve a prefix (as virtual «app-id») for each data-permission to allow granting data-permissions to end-users across all applications of the IT landscape. In our example we could create access controls country.DE, country.US, country.ES, etc. and assign those to the users. The method permissionChecker.getPermittedCountriesForReading() would then scan for these access controls and only return the 2-letter country code from it.

+
+
+ + + + + +
+ + +Before you make your decisions how to design your access controls please clarify the following questions: +
+
+
+
    +
  • +

    Do you need to separate data-permissions independent of the functional permissions? E.g. may it be required to express that a user can read data from the countries ES and PL but is only permitted to modify data from PL? In such case a single assignment of "country-permissions" to users is insufficient.

    +
  • +
  • +

    Do you want to grant data-permissions individually for each application (higher flexibility and complexity) or for the entire application landscape (simplicity, better maintenance for administrators)? In case of the first approach you would rather have access controls like app1.country.GB and app2.country.GB.

    +
  • +
  • +

    Do your data-permissions depend on objects that can be created dynamically inside your application?

    +
  • +
  • +

    If you want to grant data-permissions on other business objects (entities), how do you want to reference them (primary keys, business keys, etc.)? What reference is most stable? Which is most readable?

    +
  • +
+
+
+ +
+
+
+
JWT
+
+

JWT (JSON Web Token) is an open standard (see RFC 7519) for creating JSON based access tokens that assert some number of claims. +With an IT landscape divided into multiple smaller apps you want to avoid coupling all those apps or services tightly with your IAM (Identity & Access Management). +Instead your apps simply expects a JWT as bearer-token in the Authorization HTTP header field. +All it needs to do for authentication is validating this JWT. +The actual authentication is done centrally by an access system (IAM) that authors those JWTs. +Therefore we recommend to use strong asymmetric cryptography to sign the JWT when it is authored. +Create a keypair per environment and keep the private key as a secret only known to the access system authorizing the JWTs. +Your apps only need to know the public key in order to validate the JWT. +Any request without a JWT or with an invalid JWT will be rejected (with status code 401).

+
+
+

When using spring check the JWT Spring-Starter. +For quarkus follow Using JWT RBAC.

+
+
+ +
+
+
Cross-site request forgery (CSRF)
+
+

CSRF is a type of malicious exploit of a web application that allows an attacker to induce users to perform actions that they do not intend to perform.

+
+
+
+csrf +
+
+
+

More details about csrf can be found at https://owasp.org/www-community/attacks/csrf.

+
+
+
+
Secure devon4j server against CSRF
+
+

In case your devon4j server application is not accessed by browsers or the web-client is using JWT based authentication, you are already safe according to CSRF. +However, if your application is accessed from a browser and you are using form based authentication (with session coockie) or basic authentication, you need to enable CSRF protection. +This guide will tell you how to do this.

+
+
+
Dependency
+
+

To secure your devon4j application against CSRF attacks, you only need to add the following dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-csrf</artifactId>
+</dependency>
+
+
+
+

Starting with devon4j version 2020.12.001 application template, this is all you need to do. +However, if you have started from an older version or you want to understand more, please read on.

+
+
+
+
Pluggable web-security
+
+

To enable pluggable security via devon4j security starters you need to apply WebSecurityConfigurer to your BaseWebSecurityConfig (your class extending spring-boot’s WebSecurityConfigurerAdapter) as following:

+
+
+
+
  @Inject
+  private WebSecurityConfigurer webSecurityConfigurer;
+
+  public void configure(HttpSecurity http) throws Exception {
+    // disable CSRF protection by default, use csrf starter to override.
+	  http = http.csrf().disable();
+	  // apply pluggable web-security from devon4j security starters
+    http = this.webSecurityConfigurer.configure(http);
+    .....
+  }
+
+
+
+
+
Custom CsrfRequestMatcher
+
+

If you want to customize which HTTP requests will require a CSRF token, you can implement your own CsrfRequestMatcher and provide it to the devon4j CSRF protection via qualified injection as following:

+
+
+
+
@Named("CsrfRequestMatcher")
+public class CsrfRequestMatcher implements RequestMatcher {
+  @Override
+  public boolean matches(HttpServletRequest request) {
+    .....
+  }
+}
+
+
+
+

Please note that the exact name (@Named("CsrfRequestMatcher")) is required here to ensure your custom implementation will be injected properly.

+
+
+
+
CsrfRestService
+
+

With the devon4j-starter-security-csrf the CsrfRestService gets integrated into your app. +It provides an operation to get the CSRF token via an HTTP GET request. +The URL path to retrieve this CSRF token is services/rest/csrf/v1/token. +As a result you will get a JSON like the following:

+
+
+
+
{
+  "token":"3a8a5f66-c9eb-4494-81e1-7cc58bc3a519",
+  "parameterName":"_csrf",
+  "headerName":"X-CSRF-TOKEN"
+}
+
+
+
+

The token value is a strong random value that will differ for each user session. +It has to be send with subsequent HTTP requests (when method is other than GET) in the specified header (X-CSRF-TOKEN).

+
+
+
+
How it works
+
+

Putting it all together, a browser client should call the CsrfRestService after successfull login to receive the current CSRF token. +With every subsequent HTTP request (other than GET) the client has to send this token in the according HTTP header. +Otherwise the server will reject the request to prevent CSRF attacks. +Therefore, an attacker might make your browser perform HTTP requests towards your devon4j application backend via <image> elements, <iframes>, etc. +Your browser will then still include your session coockie if you are already logged in (e.g. from another tab). +However, in case he wants to trigger DELETE or POST requests trying your browser to make changes in the application (delete or update data, etc.) this will fail without CSRF token. +The attacker may make your browser retrieve the CSRF token but he will not be able to retrieve the result and put it into the header of other requests due to the same-origin-policy. +This way your application will be secured against CSRF attacks.

+
+
+
+
+
Configure devon4ng client for CSRF
+
+

Devon4ng client configuration for CSRF is described here

+
+
+ +
+
+
Aspect Oriented Programming (AOP)
+
+

AOP is a powerful feature for cross-cutting concerns. However, if used extensive and for the wrong things an application can get unmaintainable. Therefore we give you the best practices where and how to use AOP properly.

+
+
+
+
AOP Key Principles
+
+

We follow these principles:

+
+
+
    +
  • +

    We use spring AOP based on dynamic proxies (and fallback to cglib).

    +
  • +
  • +

    We avoid AspectJ and other mighty and complex AOP frameworks whenever possible

    +
  • +
  • +

    We only use AOP where we consider it as necessary (see below).

    +
  • +
+
+
+
+
AOP Usage
+
+

We recommend to use AOP with care but we consider it established for the following cross cutting concerns:

+
+
+ +
+
+
+
AOP Debugging
+
+

When using AOP with dynamic proxies the debugging of your code can get nasty. As you can see by the red boxes in the call stack in the debugger there is a lot of magic happening while you often just want to step directly into the implementation skipping all the AOP clutter. When using Eclipse this can easily be archived by enabling step filters. Therefore you have to enable the feature in the Eclipse tool bar (highlighted in read).

+
+
+
+AOP debugging +
+
+
+

In order to properly make this work you need to ensure that the step filters are properly configured:

+
+
+
+Step Filter Configuration +
+
+
+

Ensure you have at least the following step-filters configured and active:

+
+
+
+
ch.qos.logback.*
+com.devonfw.module.security.*
+java.lang.reflect.*
+java.security.*
+javax.persistence.*
+org.apache.commons.logging.*
+org.apache.cxf.jaxrs.client.*
+org.apache.tomcat.*
+org.h2.*
+org.springframework.*
+
+
+
+ +
+
+
Exception Handling
+ +
+
+
Exception Principles
+
+

For exceptions we follow these principles:

+
+
+
    +
  • +

    We only use exceptions for exceptional situations and not for programming control flows, etc. Creating an exception in Java is expensive and hence you should not do it just for testing if something is present, valid or permitted. In the latter case design your API to return this as a regular result.

    +
  • +
  • +

    We use unchecked exceptions (RuntimeException) [2]

    +
  • +
  • +

    We distinguish internal exceptions and user exceptions:

    +
    +
      +
    • +

      Internal exceptions have technical reasons. For unexpected and exotic situations it is sufficient to throw existing exceptions such as IllegalStateException. For common scenarios a own exception class is reasonable.

      +
    • +
    • +

      User exceptions contain a message explaining the problem for end users. Therefore we always define our own exception classes with a clear, brief but detailed message.

      +
    • +
    +
    +
  • +
  • +

    Our own exceptions derive from an exception base class supporting

    + +
  • +
+
+
+

All this is offered by mmm-util-core that we propose as solution.

+
+
+
+
Exception Example
+
+

Here is an exception class from our sample application:

+
+
+
+
public class IllegalEntityStateException extends ApplicationBusinessException {
+
+  private static final long serialVersionUID = 1L;
+
+  public IllegalEntityStateException(Object entity, Object state) {
+
+    this((Throwable) null, entity, state);
+  }
+
+
+  public IllegalEntityStateException(Object entity, Object currentState, Object newState) {
+
+    this(null, entity, currentState, newState);
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object state) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityState(entity, state));
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object currentState, Object newState) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityStateChange(entity, currentState,
+        newState));
+  }
+
+}
+
+
+
+

The message templates are defined in the interface NlsBundleRestaurantRoot as following:

+
+
+
+
public interface NlsBundleApplicationRoot extends NlsBundle {
+
+
+  @NlsBundleMessage("The entity {entity} is in state {state}!")
+  NlsMessage errorIllegalEntityState(@Named("entity") Object entity, @Named("state") Object state);
+
+
+  @NlsBundleMessage("The entity {entity} in state {currentState} can not be changed to state {newState}!")
+  NlsMessage errorIllegalEntityStateChange(@Named("entity") Object entity, @Named("currentState") Object currentState,
+      @Named("newState") Object newState);
+
+
+  @NlsBundleMessage("The property {property} of object {object} can not be changed!")
+  NlsMessage errorIllegalPropertyChange(@Named("object") Object object, @Named("property") Object property);
+
+  @NlsBundleMessage("There is currently no user logged in")
+  NlsMessage errorNoActiveUser();
+
+
+
+
+
Handling Exceptions
+
+

For catching and handling exceptions we follow these rules:

+
+
+
    +
  • +

    We do not catch exceptions just to wrap or to re-throw them.

    +
  • +
  • +

    If we catch an exception and throw a new one, we always have to provide the original exception as cause to the constructor of the new exception.

    +
  • +
  • +

    At the entry points of the application (e.g. a service operation) we have to catch and handle all throwables. This is done via the exception-facade-pattern via an explicit facade or aspect. The devon4j-rest module already provides ready-to-use implementations for this such as RestServiceExceptionFacade. The exception facade has to …​

    +
    +
      +
    • +

      log all errors (user errors on info and technical errors on error level)

      +
    • +
    • +

      ensure the entire exception is passed to the logger (not only the message) so that the logger can capture the entire stacktrace and the root cause is not lost.

      +
    • +
    • +

      convert the error to a result appropriable for the client and secure for Sensitive Data Exposure. Especially for security exceptions only a generic security error code or message may be revealed but the details shall only be logged but not be exposed to the client. All internal exceptions are converted to a generic error with a message like:

      +
      +
      +
      +

      An unexpected technical error has occurred. We apologize any inconvenience. Please try again later.

      +
      +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
Common Errors
+
+

The following errors may occur in any devon application:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 16. Common Exceptions
CodeMessageLink

TechnicalError

An unexpected error has occurred! We apologize any inconvenience. Please try again later.

TechnicalErrorUserException.java

ServiceInvoke

«original message of the cause»

ServiceInvocationFailedException.java

+
+ +
+
+
Internationalization
+
+

Internationalization (I18N) is about writing code independent from locale-specific information. +For I18N of text messages we are suggesting +mmm native-language-support.

+
+
+

In devonfw we have developed a solution to manage text internationalization. devonfw solution comes into two aspects:

+
+
+
    +
  • +

    Bind locale information to the user.

    +
  • +
  • +

    Get the messages in the current user locale.

    +
  • +
+
+
+
+
Binding locale information to the user
+
+

We have defined two different points to bind locale information to user, depending on user is authenticated or not.

+
+
+
    +
  • +

    User not authenticated: devonfw intercepts unsecured request and extract locale from it. At first, we try to extract a language parameter from the request and if it is not possible, we extract locale from Àccept-language` header.

    +
  • +
  • +

    User authenticated. During login process, applications developers are responsible to fill language parameter in the UserProfile class. This language parameter could be obtain from DB, LDAP, request, etc. In devonfw sample we get the locale information from database.

    +
  • +
+
+
+

This image shows the entire process:

+
+
+
+Internationalization +
+
+
+
+
Getting internationalizated messages
+
+

devonfw has a bean that manage i18n message resolution, the ApplicationLocaleResolver. This bean is responsible to get the current user and extract locale information from it and read the correct properties file to get the message.

+
+
+

The i18n properties file must be called ApplicationMessages_la_CO.properties where la=language and CO=country. This is an example of a i18n properties file for English language to translate devonfw sample user roles:

+
+
+

ApplicationMessages_en_US.properties

+
+
+
+
admin=Admin
+
+
+
+

You should define an ApplicationMessages_la_CO.properties file for every language that your application needs.

+
+
+

ApplicationLocaleResolver bean is injected in AbstractComponentFacade class so you have available this bean in logic layer so you only need to put this code to get an internationalized message:

+
+
+
+
String msg = getApplicationLocaleResolver().getMessage("mymessage");
+
+
+
+ +
+
+
Service Client
+
+

This guide is about consuming (calling) services from other applications (micro-services). For providing services, see the Service-Layer Guide. Services can be consumed by the client or the server. As the client is typically not written in Java, you should consult the according guide for your client technology. In case you want to call a service within your Java code, this guide is the right place to get help.

+
+
+
+
Motivation
+
+

Various solutions already exist for calling services, such as RestTemplate from spring or the JAX-RS client API. Furthermore, each and every service framework offers its own API as well. These solutions might be suitable for very small and simple projects (with one or two such invocations). However, with the trend of microservices, the invocation of a service becomes a very common use-case that occurs all over the place. You typically need a solution that is very easy to use but supports flexible configuration, adding headers for authentication, mapping of errors from the server, logging success/errors with duration for performance analysis, support for synchronous and asynchronous invocations, etc. This is exactly what this devon4j service-client solution brings to you.

+
+
+
+
Usage
+
+

Spring

+
+
+

For Spring, follow the Spring rest-client guide.

+
+
+

Quarkus

+
+
+

For Quarkus, we recommend to follow the the official Quarkus rest-client guide

+
+
+ +
+
+
Testing
+ +
+
+
General best practices
+
+

For testing please follow our general best practices:

+
+
+
    +
  • +

    Tests should have a clear goal that should also be documented.

    +
  • +
  • +

    Tests have to be classified into different integration levels.

    +
  • +
  • +

    Tests should follow a clear naming convention.

    +
  • +
  • +

    Automated tests need to properly assert the result of the tested operation(s) in a reliable way. E.g. avoid stuff like assertThat(service.getAllEntities()).hasSize(42) or even worse tests that have no assertion at all.

    +
  • +
  • +

    Tests need to be independent of each other. Never write test-cases or tests (in Java @Test methods) that depend on another test to be executed before.

    +
  • +
  • +

    Use AssertJ to write good readable and maintainable tests that also provide valuable feedback in case a test fails. Do not use legacy JUnit methods like assertEquals anymore!

    +
  • +
  • +

    For easy understanding divide your test in three commented sections:

    +
    +
      +
    • +

      //given

      +
    • +
    • +

      //when

      +
    • +
    • +

      //then

      +
    • +
    +
    +
  • +
  • +

    Plan your tests and test data management properly before implementing.

    +
  • +
  • +

    Instead of having a too strong focus on test coverage better ensure you have covered your critical core functionality properly and review the code including tests.

    +
  • +
  • +

    Test code shall NOT be seen as second class code. You shall consider design, architecture and code-style also for your test code but do not over-engineer it.

    +
  • +
  • +

    Test automation is good but should be considered in relation to cost per use. Creating full coverage via automated system tests can cause a massive amount of test-code that can turn out as a huge maintenance hell. Always consider all aspects including product life-cycle, criticality of use-cases to test, and variability of the aspect to test (e.g. UI, test-data).

    +
  • +
  • +

    Use continuous integration and establish that the entire team wants to have clean builds and running tests.

    +
  • +
  • +

    Prefer delegation over inheritance for cross-cutting testing functionality. Good places to put this kind of code can be realized and reused via the JUnit @Rule mechanism.

    +
  • +
+
+
+
+
Test Automation Technology Stack
+
+

For test automation we use JUnit. However, we are strictly doing all assertions with AssertJ. For mocking we use mockito. +In order to mock remote connections we use wiremock. +For testing entire components or sub-systems we recommend to use spring-boot-starter-test as lightweight and fast testing infrastructure that is already shipped with devon4j-test.

+
+
+

In case you have to use a full blown JEE application server, we recommend to use arquillian. To get started with arquillian, look here.

+
+
+
+
Test Doubles
+
+

We use test doubles as generic term for mocks, stubs, fakes, dummies, or spys to avoid confusion. Here is a short summary from stubs VS mocks:

+
+
+
    +
  • +

    Dummy objects specifying no logic at all. May declare data in a POJO style to be used as boiler plate code to parameter lists or even influence the control flow towards the test’s needs.

    +
  • +
  • +

    Fake objects actually have working implementations, but usually take some shortcut which makes them not suitable for production (an in memory database is a good example).

    +
  • +
  • +

    Stubs provide canned answers to calls made during the test, usually not responding at all to anything outside what’s programmed in for the test. Stubs may also record information about calls, such as an email gateway stub that remembers the messages it 'sent', or maybe only how many messages it 'sent'.

    +
  • +
  • +

    Mocks are objects pre-programmed with expectations, which form a specification of the calls they are expected to receive.

    +
  • +
+
+
+

We try to give some examples, which should make it somehow clearer:

+
+
+
Stubs
+
+

Best Practices for applications:

+
+
+
    +
  • +

    A good way to replace small to medium large boundary systems, whose impact (e.g. latency) should be ignored during load and performance tests of the application under development.

    +
  • +
  • +

    As stub implementation will rely on state-based verification, there is the threat, that test developers will partially reimplement the state transitions based on the replaced code. This will immediately lead to a black maintenance whole, so better use mocks to assure the certain behavior on interface level.

    +
  • +
  • +

    Do NOT use stubs as basis of a large amount of test cases as due to state-based verification of stubs, test developers will enrich the stub implementation to become a large monster with its own hunger after maintenance efforts.

    +
  • +
+
+
+
+
Mocks
+
+

Best Practices for applications:

+
+
+
    +
  • +

    Replace not-needed dependencies of your system-under-test (SUT) to minimize the application context to start of your component framework.

    +
  • +
  • +

    Replace dependencies of your SUT to impact the control flow under test without establishing all the context parameters needed to match the control flow.

    +
  • +
  • +

    Remember: Not everything has to be mocked! Especially on lower levels of tests like isolated module tests you can be betrayed into a mocking delusion, where you end up in a hundred lines of code mocking the whole context and five lines executing the test and verifying the mocks behavior. Always keep in mind the benefit-cost ratio, when implementing tests using mocks.

    +
  • +
+
+
+
+
Wiremock
+
+

If you need to mock remote connections such as HTTP-Servers, wiremock offers easy to use functionality. For a full description see the homepage or the github repository. Wiremock can be used either as a JUnit Rule, in Java outside of JUnit or as a standalone process. The mocked server can be configured to respond to specific requests in a given way via a fluent Java API, JSON files and JSON over HTTP. An example as an integration to JUnit can look as follows.

+
+
+
+
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+
+public class WireMockOfferImport{
+
+  @Rule
+  public WireMockRule mockServer = new WireMockRule(wireMockConfig().dynamicPort());
+
+  @Test
+  public void requestDataTest() throws Exception {
+  int port = this.mockServer.port();
+  ...}
+
+
+
+

This creates a server on a randomly chosen free port on the running machine. You can also specify the port to be used if wanted. Other than that there are several options to further configure the server. This includes HTTPs, proxy settings, file locations, logging and extensions.

+
+
+
+
  @Test
+  public void requestDataTest() throws Exception {
+      this.mockServer.stubFor(get(urlEqualTo("/new/offers")).withHeader("Accept", equalTo("application/json"))
+      .withHeader("Authorization", containing("Basic")).willReturn(aResponse().withStatus(200).withFixedDelay(1000)
+      .withHeader("Content-Type", "application/json").withBodyFile("/wireMockTest/jsonBodyFile.json")));
+  }
+
+
+
+

This will stub the URL localhost:port/new/offers to respond with a status 200 message containing a header (Content-Type: application/json) and a body with content given in jsonBodyFile.json if the request matches several conditions. +It has to be a GET request to ../new/offers with the two given header properties.

+
+
+

Note that by default files are located in src/test/resources/__files/. When using only one WireMock server one can omit the this.mockServer in before the stubFor call (static method). +You can also add a fixed delay to the response or processing delay with WireMock.addRequestProcessingDelay(time) in order to test for timeouts.

+
+
+

WireMock can also respond with different corrupted messages to simulate faulty behaviour.

+
+
+
+
@Test(expected = ResourceAccessException.class)
+public void faultTest() {
+
+    this.mockServer.stubFor(get(urlEqualTo("/fault")).willReturn(aResponse()
+    .withFault(Fault.MALFORMED_RESPONSE_CHUNK)));
+...}
+
+
+
+

A GET request to ../fault returns an OK status header, then garbage, and then closes the connection.

+
+
+
+
+
Integration Levels
+
+

There are many discussions about the right level of integration for test automation. Sometimes it is better to focus on small, isolated modules of the system - whatever a "module" may be. In other cases it makes more sense to test integrated groups of modules. Because there is no universal answer to this question, devonfw only defines a common terminology for what could be tested. Each project must make its own decision where to put the focus of test automation. There is no worldwide accepted terminology for the integration levels of testing. In general we consider ISTQB. However, with a technical focus on test automation we want to get more precise.

+
+
+

The following picture shows a simplified view of an application based on the devonfw reference architecture. We define four integration levels that are explained in detail below. +The boxes in the picture contain parenthesized numbers. These numbers depict the lowest integration level, a box belongs to. Higher integration levels also contain all boxes of lower integration levels. When writing tests for a given integration level, related boxes with a lower integration level must be replaced by test doubles or drivers.

+
+
+
+Integration Levels +
+
+
+

The main difference between the integration levels is the amount of infrastructure needed to test them. The more infrastructure you need, the more bugs you will find, but the more instable and the slower your tests will be. So each project has to make a trade-off between pros and contras of including much infrastructure in tests and has to select the integration levels that fit best to the project.

+
+
+

Consider, that more infrastructure does not automatically lead to a better bug-detection. There may be bugs in your software that are masked by bugs in the infrastructure. The best way to find those bugs is to test with very few infrastructure.

+
+
+

External systems do not belong to any of the integration levels defined here. devonfw does not recommend involving real external systems in test automation. This means, they have to be replaced by test doubles in automated tests. An exception may be external systems that are fully under control of the own development team.

+
+
+

The following chapters describe the four integration levels.

+
+
+
Level 1 Module Test
+
+

The goal of a isolated module test is to provide fast feedback to the developer. Consequently, isolated module tests must not have any interaction with the client, the database, the file system, the network, etc.

+
+
+

An isolated module test is testing a single classes or at least a small set of classes in isolation. If such classes depend on other components or external resources, etc. these shall be replaced with a test double.

+
+
+
+
public class MyClassTest extends ModuleTest {
+
+  @Test
+  public void testMyClass() {
+
+    // given
+    MyClass myClass = new MyClass();
+    // when
+    String value = myClass.doSomething();
+    // then
+    assertThat(value).isEqualTo("expected value");
+  }
+
+}
+
+
+
+

For an advanced example see here.

+
+
+
+
Level 2 Component Test
+
+

A component test aims to test components or component parts as a unit. +These tests typically run with a (light-weight) infrastructure such as spring-boot-starter-test and can access resources such as a database (e.g. for DAO tests). +Further, no remote communication is intended here. Access to external systems shall be replaced by a test double.

+
+
+

With devon4j and spring you can write a component-test as easy as illustrated in the following example:

+
+
+
+
@SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.NONE)
+public class UcFindCountryTest extends ComponentTest {
+  @Inject
+  private UcFindCountry ucFindCountry;
+
+  @Test
+  public void testFindCountry() {
+
+    // given
+    String countryCode = "de";
+
+    // when
+    TestUtil.login("user", MyAccessControlConfig.FIND_COUNTRY);
+    CountryEto country = this.ucFindCountry.findCountry(countryCode);
+
+    // then
+    assertThat(country).isNotNull();
+    assertThat(country.getCountryCode()).isEqualTo(countryCode);
+    assertThat(country.getName()).isEqualTo("Germany");
+  }
+}
+
+
+
+

This test will start the entire spring-context of your app (MySpringBootApp). Within the test spring will inject according spring-beans into all your fields annotated with @Inject. In the test methods you can use these spring-beans and perform your actual tests. This pattern can be used for testing DAOs/Repositories, Use-Cases, or any other spring-bean with its entire configuration including database and transactions.

+
+
+

When you are testing use-cases your authorization will also be in place. Therefore, you have to simulate a logon in advance what is done via the login method in the above example. The test-infrastructure will automatically do a logout for you after each test method in doTearDown.

+
+
+
+
Level 3 Subsystem Test
+
+

A subsystem test runs against the external interfaces (e.g. HTTP service) of the integrated subsystem. Subsystem tests of the client subsystem are described in the devon4ng testing guide. In devon4j the server (JEE application) is the subsystem under test. The tests act as a client (e.g. service consumer) and the server has to be integrated and started in a container.

+
+
+

With devon4j and spring you can write a subsystem-test as easy as illustrated in the following example:

+
+
+
+
@SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.RANDOM_PORT)
+public class CountryRestServiceTest extends SubsystemTest {
+
+  @Inject
+  private ServiceClientFactory serviceClientFactory;
+
+  @Test
+  public void testFindCountry() {
+
+    // given
+    String countryCode = "de";
+
+    // when
+    CountryRestService service = this.serviceClientFactory.create(CountryRestService.class);
+    CountryEto country = service.findCountry(countryCode);
+
+    // then
+    assertThat(country).isNotNull();
+    assertThat(country.getCountryCode()).isEqualTo(countryCode);
+    assertThat(country.getName()).isEqualTo("Germany");
+  }
+}
+
+
+
+

Even though not obvious on the first look this test will start your entire application as a server on a free random port (so that it works in CI with parallel builds for different branches) and tests the invocation of a (REST) service including (un)marshalling of data (e.g. as JSON) and transport via HTTP (all in the invocation of the findCountry method).

+
+
+

Do not confuse a subsystem test with a system integration test. A system integration test validates the interaction of several systems where we do not recommend test automation.

+
+
+
+
Level 4 System Test
+
+

A system test has the goal to test the system as a whole against its official interfaces such as its UI or batches. The system itself runs as a separate process in a way close to a regular deployment. Only external systems are simulated by test doubles.

+
+
+

The devonfw only gives advice for automated system test (TODO see allure testing framework). In nearly every project there must be manual system tests, too. This manual system tests are out of scope here.

+
+
+
+
Classifying Integration-Levels
+
+

devon4j defines Category-Interfaces that shall be used as JUnit Categories. +Also devon4j provides abstract base classes that you may extend in your test-cases if you like.

+
+
+

devon4j further pre-configures the maven build to only run integration levels 1-2 by default (e.g. for fast feedback in continuous integration). It offers the profiles subsystemtest (1-3) and systemtest (1-4). In your nightly build you can simply add -Psystemtest to run all tests.

+
+
+
+
+
Implementation
+
+

This section introduces how to implement tests on the different levels with the given devonfw infrastructure and the proposed frameworks.

+
+
+
Module Test
+
+

In devon4j you can extend the abstract class ModuleTest to basically get access to assertions. In order to test classes embedded in dependencies and external services one needs to provide mocks for that. As the technology stack recommends we use the Mockito framework to offer this functionality. The following example shows how to implement Mockito into a JUnit test.

+
+
+
+
import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.mock;
+...
+
+public class StaffmanagementImplTest extends ModuleTest {
+  @Rule
+  public MockitoRule rule = MockitoJUnit.rule();
+
+  @Test
+  public void testFindStaffMember() {
+  ...}
+}
+
+
+
+

Note that the test class does not use the @SpringApplicationConfiguration annotation. In a module test one does not use the whole application. +The JUnit rule is the best solution to use in order to get all needed functionality of Mockito. Static imports are a convenient option to enhance readability within Mockito tests. +You can define mocks with the @Mock annotation or the mock(*.class) call. To inject the mocked objects into your class under test you can use the @InjectMocks annotation. This automatically uses the setters of StaffmanagementImpl to inject the defined mocks into the class under test (CUT) when there is a setter available. In this case the beanMapper and the staffMemberDao are injected. Of course it is possible to do this manually if you need more control.

+
+
+
+
  @Mock
+  private BeanMapper beanMapper;
+  @Mock
+  private StaffMemberEntity staffMemberEntity;
+  @Mock
+  private StaffMemberEto staffMemberEto;
+  @Mock
+  private StaffMemberDao staffMemberDao;
+  @InjectMocks
+  StaffmanagementImpl staffmanagementImpl = new StaffmanagementImpl();
+
+
+
+

The mocked objects do not provide any functionality at the time being. To define what happens on a method call on a mocked dependency in the CUT one can use when(condition).thenReturn(result). In this case we want to test findStaffMember(Long id) in the StaffmanagementImpl.java.

+
+
+
+
public StaffMemberEto findStaffMember(Long id) {
+  return getBeanMapper().map(getStaffMemberDao().find(id), StaffMemberEto.class);
+}
+
+
+
+

In this simple example one has to stub two calls on the CUT as you can see below. For example the method call of the CUT staffMemberDao.find(id) is stubbed for returning a mock object staffMemberEntity that is also defined as mock.

+
+
+
+
Subsystem Test
+
+

devon4j provides a simple test infrastructure to aid with the implementation of subsystem tests.

+
+
+
+
//given
+long id = 1L;
+Class<StaffMemberEto> targetClass = StaffMemberEto.class;
+when(this.staffMemberDao.find(id)).thenReturn(this.staffMemberEntity);
+when(this.beanMapper.map(this.staffMemberEntity, targetClass)).thenReturn(this.staffMemberEto);
+
+//when
+StaffMemberEto resultEto = this.staffmanagementImpl.findStaffMember(id);
+
+//then
+assertThat(resultEto).isNotNull();
+assertThat(resultEto).isEqualTo(this.staffMemberEto);
+
+
+
+

After the test method call one can verify the expected results. Mockito can check whether a mocked method call was indeed called. This can be done using Mockito verify. Note that it does not generate any value if you check for method calls that are needed to reach the asserted result anyway. Call verification can be useful e.g. when you want to assure that statistics are written out without actually testing them.

+
+
+
+
+
Regression testing
+
+

When it comes to complex output (even binary) that you want to regression test by comparing with an expected result, you sould consider Approval Tests using ApprovalTests.Java. +If applied for the right problems, it can be very helpful.

+
+
+
+
Deployment Pipeline
+
+

A deployment pipeline is a semi-automated process that gets software-changes from version control into production. It contains several validation steps, e.g. automated tests of all integration levels. +Because devon4j should fit to different project types - from agile to waterfall - it does not define a standard deployment pipeline. But we recommend to define such a deployment pipeline explicitly for each project and to find the right place in it for each type of test.

+
+
+

For that purpose, it is advisable to have fast running test suite that gives as much confidence as possible without needing too much time and too much infrastructure. This test suite should run in an early stage of your deployment pipeline. Maybe the developer should run it even before he/she checked in the code. Usually lower integration levels are more suitable for this test suite than higher integration levels.

+
+
+

Note, that the deployment pipeline always should contain manual validation steps, at least manual acceptance testing. There also may be manual validation steps that have to be executed for special changes only, e.g. usability testing. Management and execution processes of those manual validation steps are currently not in the scope of devonfw.

+
+
+
+
Test Coverage
+
+

We are using tools (SonarQube/Jacoco) to measure the coverage of the tests. Please always keep in mind that the only reliable message of a code coverage of X% is that (100-X)% of the code is entirely untested. It does not say anything about the quality of the tests or the software though it often relates to it.

+
+
+
+
Test Configuration
+
+

This section covers test configuration in general without focusing on integration levels as in the first chapter.

+
+
+
Configure Test Specific Beans
+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+
Test Data
+
+

It is possible to obtain test data in two different ways depending on your test’s integration level.

+
+
+
+
+
Debugging Tests
+
+

The following two sections describe two debugging approaches for tests. Tests are either run from within the IDE or from the command line using Maven.

+
+
+
Debugging with the IDE
+
+

Debugging with the IDE is as easy as always. Even if you want to execute a SubsystemTest which needs a Spring context and a server infrastructure to run properly, you just set your breakpoints and click on Debug As → JUnit Test. The test infrastructure will take care of initializing the necessary infrastructure - if everything is configured properly.

+
+
+
+
Debugging with Maven
+
+

Please refer to the following two links to find a guide for debugging tests when running them from Maven.

+
+ +
+

In essence, you first have to start execute a test using the command line. Maven will halt just before the test execution and wait for your IDE to connect to the process. When receiving a connection the test will start and then pause at any breakpoint set in advance. +The first link states that tests are started through the following command:

+
+
+
+
mvn -Dmaven.surefire.debug test
+
+
+
+

Although this is correct, it will run every test class in your project and - which is time consuming and mostly unnecessary - halt before each of these tests. +To counter this problem you can simply execute a single test class through the following command (here we execute the TablemanagementRestServiceTest from the restaurant sample application):

+
+
+
+
mvn test -Dmaven.surefire.debug test -Dtest=TablemanagementRestServiceTest
+
+
+
+

It is important to notice that you first have to execute the Maven command in the according submodule, e.g. to execute the TablemanagementRestServiceTest you have first to navigate to the core module’s directory.

+
+
+ +
+
+
+
Transfer-Objects
+
+

The technical data model is defined in form of persistent entities. +However, passing persistent entities via call-by-reference across the entire application will soon cause problems:

+
+
+
    +
  • +

    Changes to a persistent entity are directly written back to the persistent store when the transaction is committed. When the entity is send across the application also changes tend to take place in multiple places endangering data sovereignty and leading to inconsistency.

    +
  • +
  • +

    You want to send and receive data via services across the network and have to define what section of your data is actually transferred. If you have relations in your technical model you quickly end up loading and transferring way too much data.

    +
  • +
  • +

    Modifications to your technical data model shall not automatically have impact on your external services causing incompatibilities.

    +
  • +
+
+
+

To prevent such problems transfer-objects are used leading to a call-by-value model and decoupling changes to persistent entities.

+
+
+

In the following sections the different types of transfer-objects are explained. +You will find all according naming-conventions in the architecture-mapping

+
+
+

To structure your transfer objects, we recommend the following approaches:

+
+
+ +
+
+

Also considering the following transfer objects in specific cases:

+
+
+
+
SearchCriteriaTo
+
+

For searching we create or generate a «BusinessObject»SearchCriteriaTo representing a query to find instances of «BusinessObject».

+
+
TO
+
+

There are typically transfer-objects for data that is never persistent. +For very generic cases these just carry the suffix To.

+
+
STO
+
+

We can potentially create separate service transfer objects (STO) (if possible named «BusinessObject»Sto) to keep the service API stable and independent of the actual data-model. +However, we usually do not need this and want to keep our architecture simple. +Only create STOs if you need service versioning and support previous APIs or to provide legacy service technologies that require their own isolated data-model. +In such case you also need beanmapping between STOs and ETOs/DTOs what means extra effort and complexity that should be avoided.

+
+
+
+
+
+
+
Bean Mapping in devon4j-spring
+
+

We have developed a solution that uses a BeanMapper that allows to abstract from the underlying implementation. As mentioned in the general bean mapping guide, we started with Dozer a Java Bean to Java Bean mapper that recursively copies data from one object to another. Now we recommend using Orika. This guide will show an introduction to Orika and Dozer bean-mapper.

+
+
+
+
Bean-Mapper Dependency
+
+

To get access to the BeanMapper we have to use either of the below dependency in our POM:

+
+
+
Listing 17. Orika
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-orika</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
Listing 18. Dozer
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-dozer</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
+
Bean-Mapper Configuration
+
+
Bean-Mapper Configuration using Dozer
+
+

The BeanMapper implementation is based on an existing open-source bean-mapping framework. +In case of Dozer the mapping is configured src/main/resources/config/app/common/dozer-mapping.xml.

+
+
+

See the my-thai-star dozer-mapping.xml as an example. +Important is that you configure all your custom datatypes as <copy-by-reference> tags and have the mapping from PersistenceEntity (ApplicationPersistenceEntity) to AbstractEto configured properly:

+
+
+
+
 <mapping type="one-way">
+    <class-a>com.devonfw.module.basic.common.api.entity.PersistenceEntity</class-a>
+    <class-b>com.devonfw.module.basic.common.api.to.AbstractEto</class-b>
+    <field custom-converter="com.devonfw.module.beanmapping.common.impl.dozer.IdentityConverter">
+      <a>this</a>
+      <b is-accessible="true">persistentEntity</b>
+    </field>
+</mapping>
+
+
+
+
+
+
Bean-Mapper Configuration using Orika
+
+

Orika with devonfw is configured by default and sets some custom mappings for GenericEntity.java to GenericEntityDto.java. To specify and customize the mappings you can create the class BeansOrikaConfig.java that extends the class BaseOrikaConfig.java from the devon4j.orika package. To register a basic mapping, register a ClassMap for the mapperFactory with your custom mapping. Watch the example below and follow the basic Orika mapping configuration guide and the Orika advanced mapping guide.

+
+
+

Register Mappings:

+
+
+
+
mapperFactory.classMap(UserEntity.class, UserEto.class)
+			.field("email", "email")
+			.field("username", "name")
+			.byDefault()
+			.register();
+
+
+
+
+
Bean-Mapper Usage
+
+

Then we can get the BeanMapper via dependency-injection what we typically already provide by an abstract base class (e.g. AbstractUc). Now we can solve our problem very easy:

+
+
+
+
...
+UserEntity resultEntity = ...;
+...
+return getBeanMapper().map(resultEntity, UserEto.class);
+
+
+
+ +
+
+
Datatypes
+
+
+
+

A datatype is an object representing a value of a specific type with the following aspects:

+
+
+
    +
  • +

    It has a technical or business specific semantic.

    +
  • +
  • +

    Its JavaDoc explains the meaning and semantic of the value.

    +
  • +
  • +

    It is immutable and therefore stateless (its value assigned at construction time and can not be modified).

    +
  • +
  • +

    It is serializable.

    +
  • +
  • +

    It properly implements #equals(Object) and #hashCode() (two different instances with the same value are equal and have the same hash).

    +
  • +
  • +

    It shall ensure syntactical validation so it is NOT possible to create an instance with an invalid value.

    +
  • +
  • +

    It is responsible for formatting its value to a string representation suitable for sinks such as UI, loggers, etc. Also consider cases like a Datatype representing a password where toString() should return something like "**" instead of the actual password to prevent security accidents.

    +
  • +
  • +

    It is responsible for parsing the value from other representations such as a string (as needed).

    +
  • +
  • +

    It shall provide required logical operations on the value to prevent redundancies. Due to the immutable attribute all manipulative operations have to return a new Datatype instance (see e.g. BigDecimal.add(java.math.BigDecimal)).

    +
  • +
  • +

    It should implement Comparable if a natural order is defined.

    +
  • +
+
+
+

Based on the Datatype a presentation layer can decide how to view and how to edit the value. Therefore a structured data model should make use of custom datatypes in order to be expressive. +Common generic datatypes are String, Boolean, Number and its subclasses, Currency, etc. +Please note that both Date and Calendar are mutable and have very confusing APIs. Therefore, use JSR-310 or jodatime instead. +Even if a datatype is technically nothing but a String or a Number but logically something special it is worth to define it as a dedicated datatype class already for the purpose of having a central javadoc to explain it. On the other side avoid to introduce technical datatypes like String32 for a String with a maximum length of 32 characters as this is not adding value in the sense of a real Datatype. +It is suitable and in most cases also recommended to use the class implementing the datatype as API omitting a dedicated interface.

+
+
+
+— mmm project
+datatype javadoc +
+
+ +
+
+
Datatype Packaging
+
+

For the devonfw we use a common packaging schema. +The specifics for datatypes are as following:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
SegmentValueExplanation

<component>

*

Here we use the (business) component defining the datatype or general for generic datatypes.

<layer>

common

Datatypes are used across all layers and are not assigned to a dedicated layer.

<scope>

api

Datatypes are always used directly as API even tough they may contain (simple) implementation logic. Most datatypes are simple wrappers for generic Java types (e.g. String) but make these explicit and might add some validation.

+
+
+
Technical Concerns
+
+

Many technologies like Dozer and QueryDSL’s (alias API) are heavily based on reflection. For them to work properly with custom datatypes, the frameworks must be able to instantiate custom datatypes with no-argument constructors. It is therefore recommended to implement a no-argument constructor for each datatype of at least protected visibility.

+
+
+
+
Datatypes in Entities
+
+

The usage of custom datatypes in entities is explained in the persistence layer guide.

+
+
+
+
Datatypes in Transfer-Objects
+
+
XML
+
+

For mapping datatypes with JAXB see XML guide.

+
+
+
+
JSON
+
+

For mapping datatypes from and to JSON see JSON custom mapping.

+
+
+ +
+
+
+
CORS configuration in Spring
+ +
+
+
Dependency
+
+

To enable the CORS support from the server side for your devon4j-Spring application, add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-cors</artifactId>
+</dependency>
+
+
+
+
+
Configuration
+
+

Add the below properties in your application.properties file:

+
+
+
+
#CORS support
+security.cors.spring.allowCredentials=true
+security.cors.spring.allowedOriginPatterns=*
+security.cors.spring.allowedHeaders=*
+security.cors.spring.allowedMethods=OPTIONS,HEAD,GET,PUT,POST,DELETE,PATCH
+security.cors.pathPattern=/**
+
+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AttributeDescriptionHTTP Header

allowCredentials

Decides the browser should include any cookies associated with the request (true if cookies should be included).

Access-Control-Allow-Credentials

allowedOrigins

List of allowed origins (use * to allow all orgins).

Access-Control-Allow-Origin

allowedMethods

List of allowed HTTP request methods (OPTIONS, HEAD, GET, PUT, POST, DELETE, PATCH, etc.).

-

allowedHeaders

List of allowed headers that can be used during the request (use * to allow all headers requested by the client)

Access-Control-Allow-Headers

pathPattern

Ant-style pattern for the URL paths where to apply CORS. Use "/**" to match all URL paths.

+
+ +
+
+
Microservices in devonfw
+
+

The Microservices architecture is an approach for application development based on a series of small services grouped under a business domain. Each individual service runs autonomously and communicating with each other through their APIs. That independence between the different services allows to manage (upgrade, fix, deploy, etc.) each one without affecting the rest of the system’s services. In addition to that the microservices architecture allows to scale specific services when facing an increment of the requests, so the applications based on microservices are more flexible and stable, and can be adapted quickly to demand changes.

+
+
+

However, this new approach, developing apps based on microservices, presents some downsides.

+
+
+

Let’s see the main challenges when working with microservices:

+
+
+
    +
  • +

    Having the applications divided in different services we will need a component (router) to redirect each request to the related microservice. These redirection rules must implement filters to guarantee a proper functionality.

    +
  • +
  • +

    In order to manage correctly the routing process, the application will also need a catalog with all the microservices and its details: IPs and ports of each of the deployed instances of each microservice, the state of each instance and some other related information. This catalog is called Service Discovery.

    +
  • +
  • +

    With all the information of the Service Discovery the application will need to calculate and select between all the available instances of a microservice which is the suitable one. This will be figured out by the library Client Side Load Balancer.

    +
  • +
  • +

    The different microservices will be likely interconnected with each other, that means that in case of failure of one of the microservices involved in a process, the application must implement a mechanism to avoid the error propagation through the rest of the services and provide an alternative as a process result. To solve this, the pattern Circuit Breaker can be implemented in the calls between microservices.

    +
  • +
  • +

    As we have mentioned, the microservices will exchange calls and information with each other so our applications will need to provide a secured context to avoid not allowed operations or intrusions. In addition, since microservices must be able to operate in an isolated way, it is not recommended to maintain a session. To meet this need without using Spring sessions, a token-based authentication is used that exchanges information using the json web token (JWT) protocol.

    +
  • +
+
+
+

In addition to all of this we will find other issues related to this particular architecture that we will address fitting the requirements of each project.

+
+
+
    +
  • +

    Distributed data bases: each instance of a microservice should have only one data base.

    +
  • +
  • +

    Centralized logs: each instance of a microservice creates a log and a trace that should be centralized to allow an easier way to read all that information.

    +
  • +
  • +

    Centralized configuration: each microservice has its own configuration, so our applications should group all those configurations in only one place to ease the configuration management.

    +
  • +
  • +

    Automatized deployments: as we are managing several components (microservices, catalogs, balancers, etc.) the deployment should be automatized to avoid errors and ease this process.

    +
  • +
+
+
+

To address the above, devonfw microservices has an alternative approach Microservices based on Netflix-Tools.

+
+
+ +
+
+
Caching
+
+

Caching is a technical approach to improve performance. While it may appear easy on the first sight it is an advanced topic. In general, try to use caching only when required for performance reasons. If you come to the point that you need caching first think about:

+
+
+
    +
  • +

    What to cache?
    +Be sure about what you want to cache. Is it static data? How often will it change? What will happen if the data changes but due to caching you might receive "old" values? Can this be tolerated? For how long? This is not a technical question but a business requirement.

    +
  • +
  • +

    Where to cache?
    +Will you cache data on client or server? Where exactly?

    +
  • +
  • +

    How to cache?
    +Is a local cache sufficient or do you need a shared cache?

    +
  • +
+
+
+
+
Local Cache
+ +
+
+
Shared Cache
+
+
Distributed Cache
+ +
+
+ + +
+
Feature-Toggles
+
+

The most software developing teams use Feature-Branching to be able to work in parallel and maintain a stable main branch in the VCS. However Feature-Branching might not be the ideal tool in every case because of big merges and isolation between development groups. In many cases, Feature-Toggles can avoid some of these problems, so these should definitely be considered to be used in the collaborative software development.

+
+
+
+
Implementation with the devonfw
+
+

To use Feature-Toggles with the devonfw, use the Framework Togglz because it has all the features generally needed and provides a great documentation.

+
+
+

For a pretty minimal working example, also see this fork.

+
+
+
Preparation
+
+

The following example takes place in the oasp-sample-core project, so the necessary dependencies have to be added to the according pom.xml file. Required are the main Togglz project including Spring support, the Togglz console to graphically change the feature state and the Spring security package to handle authentication for the Togglz console.

+
+
+
+
<!-- Feature-Toggle-Framework togglz -->
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-boot-starter</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-console</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-security</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+
+
+

In addition to that, the following lines have to be included in the spring configuration file application.properties

+
+
+
+
# configuration for the togglz Feature-Toggle-Framework
+togglz.enabled=true
+togglz.console.secured=false
+
+
+
+
+
Small features
+
+

For small features, a simple query of the toggle state is often enough to achieve the desired functionality. To illustrate this, a simple example follows, which implements a toggle to limit the page size returned by the staffmanagement. See here for further details.

+
+
+

This is the current implementation to toggle the feature:

+
+
+
+
// Uncomment next line in order to limit the maximum page size for the staff member search
+// criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+
+
+
+

To realise this more elegantly with Togglz, first an enum is required to configure the feature-toggle.

+
+
+
+
public enum StaffmanagementFeatures implements Feature {
+  @Label("Limit the maximum page size for the staff members")
+  LIMIT_STAFF_PAGE_SIZE;
+
+  public boolean isActive() {
+    return FeatureContext.getFeatureManager().isActive(this);
+  }
+}
+
+
+
+

To familiarize the Spring framework with the enum, add the following entry to the application.properties file.

+
+
+
+
togglz.feature-enums=io.oasp.gastronomy.restaurant.staffmanagement.featuremanager.StaffmanagementFeatures
+
+
+
+

After that, the toggle can be used easily by calling the isActive() method of the enum.

+
+
+
+
if (StaffmanagementFeatures.LIMIT_STAFF_PAGE_SIZE.isActive()) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+}
+
+
+
+

This way, you can easily switch the feature on or off by using the administration console at http://localhost:8081/devon4j-sample-server/togglz-console. If you are getting redirected to the login page, just sign in with any valid user (eg. admin).

+
+
+
+
Extensive features
+
+

When implementing extensive features, you might want to consider using the strategy design pattern to maintain the overview of your software. The following example is an implementation of a feature which adds a 25% discount to all products managed by the offermanagement.

+
+
+
Therefore there are two strategies needed:
+
    +
  1. +

    Return the offers with the normal price

    +
  2. +
  3. +

    Return the offers with a 25% discount

    +
  4. +
+
+
+

The implementation is pretty straight forward so use this as a reference. Compare this for further details.

+
+
+
+
@Override
+@RolesAllowed(PermissionConstants.FIND_OFFER)
+public PaginatedListTo<OfferEto> findOfferEtos(OfferSearchCriteriaTo criteria) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+  PaginatedListTo<OfferEntity> offers = getOfferDao().findOffers(criteria);
+
+
+  if (OffermanagementFeatures.DISCOUNT.isActive()) {
+    return getOfferEtosDiscount(offers);
+  } else {
+    return getOfferEtosNormalPrice(offers);
+  }
+
+}
+
+
+// Strategy 1: Return the OfferEtos with the normal price
+private PaginatedListTo<OfferEto> getOfferEtosNormalPrice(PaginatedListTo<OfferEntity> offers) {
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+// Strategy 2: Return the OfferEtos with the new, discounted price
+private PaginatedListTo<OfferEto> getOfferEtosDiscount(PaginatedListTo<OfferEntity> offers) {
+  offers = addDiscountToOffers(offers);
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+private PaginatedListTo<OfferEntity> addDiscountToOffers(PaginatedListTo<OfferEntity> offers) {
+  for (OfferEntity oe : offers.getResult()) {
+    Double oldPrice = oe.getPrice().getValue().doubleValue();
+
+    // calculate the new price and round it to two decimal places
+    BigDecimal newPrice = new BigDecimal(oldPrice * 0.75);
+    newPrice = newPrice.setScale(2, RoundingMode.HALF_UP);
+
+    oe.setPrice(new Money(newPrice));
+  }
+
+  return offers;
+}
+
+
+
+
+
+
Guidelines for a successful use of feature-toggles
+
+

The use of feature-toggles requires a specified set of guidelines to maintain the overview on the software. The following is a collection of considerations and examples for conventions that are reasonable to use.

+
+
+
Minimize the number of toggles
+
+

When using too many toggles at the same time, it is hard to maintain a good overview of the system and things like finding bugs are getting much harder. Additionally, the management of toggles in the configuration interface gets more difficult due to the amount of toggles.

+
+
+

To prevent toggles from piling up during development, a toggle and the associated obsolete source code should be removed after the completion of the corresponding feature. In addition to that, the existing toggles should be revisited periodically to verify that these are still needed and therefore remove legacy toggles.

+
+
+
+
Consistent naming scheme
+
+

A consistent naming scheme is the key to a structured and easily maintainable set of features. This should include the naming of toggles in the source code and the appropriate naming of commit messages in the VCS. The following section contains an example for a useful naming scheme including a small example.

+
+
+

Every Feature-Toggle in the system has to get its own unique name without repeating any names of features, which were removed from the system. The chosen names should be descriptive names to simplify the association between toggles and their purpose. If the feature should be split into multiple sub-features, you might want to name the feature like the parent feature with a describing addition. If for example you want to split the DISCOUNT feature into the logic and the UI part, you might want to name the sub-features DISCOUNT_LOGIC and DISCOUNT_UI.

+
+
+

The entry in the togglz configuration enum should be named identically to the aforementioned feature name. The explicitness of feature names prevents a confusion between toggles due to using multiple enums.

+
+
+

Commit messages are very important for the use of feature-toggles and also should follow a predefined naming scheme. You might want to state the feature name at the beginning of the message, followed by the actual message, describing what the commit changes to the feature. An example commit message could look like the following:

+
+
+
+
DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

Mentioning the feature name in the commit message has the advantage, that you can search your git log for the feature name and get every commit belonging to the feature. An example for this using the tool grep could look like this.

+
+
+
+
$ git log | grep -C 4 DISCOUNT
+
+commit 034669a48208cb946cc6ba8a258bdab586929dd9
+Author: Florian Luediger <florian.luediger@somemail.com>
+Date:   Thu Jul 7 13:04:37 2016 +0100
+
+DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

To keep track of all the features in your software system, a platform like GitHub offers issues. When creating an issue for every feature, you can retrace, who created the feature and who is assigned to completing its development. When referencing the issue from commits, you also have links to all the relevant commits from the issue view.

+
+
+
+
Placement of toggle points
+
+

To maintain a clean codebase, you definitely want to avoid using the same toggle in different places in the software. There should be one single query of the toggle which should be able to toggle the whole functionality of the feature. If one single toggle point is not enough to switch the whole feature on or off, you might want to think about splitting the feature into multiple ones.

+
+
+
+
Use of fine-grained features
+
+

Bigger features in general should be split into multiple sub-features to maintain the overview on the codebase. These sub-features get their own feature-toggle and get implemented independently.

+
+
+ +
+
+
+
Accessibility
+
+

TODO

+
+ + + +
+ +
+ + + + + +
+ + +devon4j-kafka has been abandoned. Its main feature was the implementation of a retry pattern using multiple topics. This implementation has become an integral part of Spring Kafka. We recommend to use Spring Kafkas own implemenation for retries. +
+
+
+
+
Messaging Services
+
+

Messaging Services provide an asynchronous communication mechanism between applications. Technically this is implemented using Apache Kafka .

+
+
+

For spring, devonfw uses Spring-Kafka as kafka framework. +For more details, check the devon4j-kafka.

+
+ +
+ +
+
+
Messaging
+
+

Messaging in Java is done using the JMS standard from JEE.

+
+
+
+
Products
+
+

For messaging you need to choose a JMS provider such as:

+
+
+ +
+
+
+
Receiver
+
+

As a receiver of messages is receiving data from other systems it is located in the service-layer.

+
+
+
JMS Listener
+
+

A JmsListener is a class listening and consuming JMS messages. It should carry the suffix JmsListener and implement the MessageListener interface or have its listener method annotated with @JmsListener. This is illustrated by the following example:

+
+
+
+
@Named
+@Transactional
+public class BookingJmsListener /* implements MessageListener */ {
+
+  @Inject
+  private Bookingmanagement bookingmanagement;
+
+  @Inject
+  private MessageConverter messageConverter;
+
+  @JmsListener(destination = "BOOKING_QUEUE", containerFactory = "jmsListenerContainerFactory")
+  public void onMessage(Message message) {
+    try {
+      BookingTo bookingTo = (BookingTo) this.messageConverter.fromMessage(message);
+      this.bookingmanagement.importBooking(bookingTo);
+    } catch (MessageConversionException | JMSException e) {
+      throw new InvalidMessageException(message);
+    }
+  }
+}
+
+
+
+
+
+
Sender
+
+

The sending of JMS messages is considered as any other sending of data like kafka messages or RPC calls via REST using service-client, gRPC, etc. +This will typically happen directly from a use-case in the logic-layer. +However, the technical complexity of the communication and protocols itself shall be hidden from the use-case and not be part of the logic layer. +With spring we can simply use JmsTemplate to do that.

+
+
+ +
+
+ +
+

If you want to all your users fast and simple searches with just a single search field (like in google), you need full text indexing and search support.

+
+
+
+
Solutions
+
+ +
+
+

Maybe you also want to use native features of your database

+
+ +
+
+
Best Practices
+
+

TODO

+
+
+
+
+

1.105. Tutorials

+ +
+
Creating a new application
+ +
+
+
Running the archetype
+
+

In order to create a new application you must use the archetype provided by devon4j which uses the maven archetype functionality.

+
+
+

To create a new application, you should have installed devonfw IDE. Follow the devon ide documentation to install +the same. +You can choose between 2 alternatives, create it from command line or, in more visual manner, within eclipse.

+
+
+
From command Line
+
+

To create a new devon4j application from command line, you can simply run the following command:

+
+
+
+
devon java create com.example.application.sampleapp
+
+
+
+

For low-level creation you can also manually call this command:

+
+
+
+
mvn -DarchetypeVersion=${devon4j.version} -DarchetypeGroupId=com.devonfw.java.templates -DarchetypeArtifactId=devon4j-template-server archetype:generate -DgroupId=com.example.application -DartifactId=sampleapp -Dversion=1.0.0-SNAPSHOT -Dpackage=com.devonfw.application.sampleapp
+
+
+
+

Attention: The archetypeVersion (first argument) should be set to the latest version of devon4j. You can easily determine the version from this badge: +latest devon4j version

+
+
+

Further providing additional properties (using -D parameter) you can customize the generated app:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 17. Options for app template
propertycommentexample

dbType

Choose the type of RDBMS to use (hana, oracle, mssql, postgresql, mariadb, mysql, etc.)

-DdbTpye=postgresql

batch

Option to add an batch module

-Dbatch=batch

+
+
+
From Eclipse
+
+
+
After that, you should follow this Eclipse steps to create your application:
+
+
+
+
    +
  • +

    Create a new Maven Project.

    +
  • +
  • +

    Choose the devon4j-template-server archetype, just like the image.

    +
  • +
+
+
+
+Select archetype +
+
+
+
    +
  • +

    Fill the Group Id, Artifact Id, Version and Package for your project.

    +
  • +
+
+
+
+Configure archetype +
+
+
+
    +
  • +

    Finish the Eclipse assistant and you are ready to start your project.

    +
  • +
+
+
+
+
+
What is generated
+
+

The application template (archetype) generates a Maven multi-module project. It has the following modules:

+
+
+
    +
  • +

    api: module with the API (REST service interfaces, transferobjects, datatypes, etc.) to be imported by other apps as a maven dependency in order to invoke and consume the offered (micro)services.

    +
  • +
  • +

    core: maven module containing the core of the application.

    +
  • +
  • +

    batch: optional module for batch(es)

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) as a WAR file.

    +
  • +
+
+
+

The toplevel pom.xml of the generated project has the following features:

+
+
+
    +
  • +

    Properties definition: Spring-boot version, Java version, etc.

    +
  • +
  • +

    Modules definition for the modules (described above)

    +
  • +
  • +

    Dependency management: define versions for dependencies of the technology stack that are recommended and work together in a compatible way.

    +
  • +
  • +

    Maven plugins with desired versions and configuration

    +
  • +
  • +

    Profiles for test stages

    +
  • +
+
+
+
+
How to run your app
+
+
Run app from IDE
+
+

To run your application from your favourite IDE, simply launch SpringBootApp as java application.

+
+
+
+
Run app as bootified jar or war
+
+

More details are available here.

+
+ +
+
+
+
+

1.106. Quarkus

+
+

Quarkus is a Java framework for building cloud-native apps. +It is fully supported by devonfw as an option and alternative to spring. +Additional things like extensions will be available on the devon4quarkus GitHub repository.

+
+
+
+

1.107. Guide to the Reader

+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If you are completely new to Quarkus, you may be interested in the pros and cons of Quarkus. Also take a look at the official Quarkus website. And you might also be interested in the features that GraalVM offers.

    +
  • +
  • +

    If you are new to devon4j, also take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring and coding conventions. Follow the referenced links to explore a topic in more depth.

    +
  • +
  • +

    If you are an experienced Spring developer and want to get in touch with Quarkus, read our Getting started with Quarkus for Spring developers guide.

    +
  • +
  • +

    If you’re looking to build your first Quarkus application, the Quarkus website offers some good getting started guides. Also check out our Quarkus template guide, which gives you some recommendations on extensions and frameworks to use. It also provides some links to the Quarkus code generator with preselected configurations you can use to create your application.

    +
  • +
  • +

    If you want to have a Quarkus sample application using devon4j recommendations, check out our Quarkus reference application.

    +
  • +
  • +

    If you already have some experience with devon4j and Quarkus and need more information on a specific topic, check out our Quarkus guides. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Quarkus and Spring are documented there.

    +
  • +
  • +

    If you want to learn how to build native images, check out this guide.

    +
  • +
+
+
+
+

1.108. Pros

+
+

Quarkus offers the following benefits:

+
+
+
    +
  • +

    fast turn-around cycles for developers
    +Save changes in your Java code and immediately test the results without restarting or waiting

    +
  • +
  • +

    faster start-up and less memory footprint
    +When building your app as native-images via GraalVM it gets highly optimized. As a result it starts up lightning fast and consumes much less memory. This is a great advantage for cloud deployment as well as for sustainability. You can find a performance comparison between Spring and Quarkus here.

    +
  • +
  • +

    clean and lean +As quarkus was born as cloud-native framework it is very light-weigth and does not carry much history and legacy.

    +
  • +
+
+
+
+

1.109. Cons

+
+

Quarkus has the following drawbacks:

+
+
+
    +
  • +

    less flexible
    +Quarkus is less flexible compared to spring or in other words it is more biased and coupled to specific implementations. However, the implementations just work and you have less things to choose and worry about. However, in case you want to integrate a specific or custom library you may hit limitations or lose support for native-images especially when that library is based on reflection. Therefore, check your requirements and technology stack early on when making your choice.

    +
  • +
  • +

    less established
    +Since quarkus was born in 2019 it is modern but also less established. It will be easier to get developers for spring but we already consider quarkus mature and established enought for building production ready apps.

    +
  • +
+
+
+

1.109.1. Quarkus Quickstart

+
+

This guide will show a quickstart how to create a Quarkus app and will briefly show the key functionalities that Quarkus provides and how to start as a beginner or also experienced developers.

+
+
+
+

1.109.2. Introduction to Quarkus

+
+

To get the first introduction to Quarkus you can read the Quarkus introduction guide. To get a short overview where you can find the important Quarkus related guides follow the chapter guide to the reader. +Also, a comparison of the advantages and disadvantages of a Quarkus application compared to the alternative framework Spring. +This comparison will be supported by our performance comparison between Spring and Quarkus that shows the lower resource consumption and startup time of Quarkus applications.

+
+
+
+

1.109.3. Installation of Tools and Dependencies

+
+

First, we need to install some dependencies and tools before we can start programming. Our tool devonfw-ide comes with many development tools for you. +We need to install the following tools for this guide.

+
+
+
    +
  • +

    Maven

    +
  • +
  • +

    Java

    +
  • +
  • +

    any IDE (devonfw-ide supports Eclipse, Intellij and VScode)

    +
  • +
  • +

    Docker

    +
  • +
+
+
+

We recommend installing the devonfw-ide with the tools, but if you already got your system configured and the tools from above installed you can skip to Bootstrap a Quarkus Project otherwise we will show you how to setup and update your devonfw-ide.

+
+
+
devonfw-ide
+
    +
  1. +

    Install devonfw-ide
    +Follow the Setup to install the devonfw-ide with Java, Maven, Eclipse and VScode.

    +
    +
      +
    1. +

      Command to install Docker
      +devon docker setup

      +
    2. +
    +
    +
  2. +
  3. +

    Update devonfw-ide
    +We advise to update your already installed devonfw-ide and all tools because we are still working to improve devonfw-ide and there could be essential features for cloud development with Quarkus that you could be missing.

    +
  4. +
+
+
+

Use the commands devon ide update, devon ide update software and devon ide scripts to update devonfw-ide and all software that is installed.

+
+
+

Go to the main folder under workspaces of the devonfw-ide installation. +We will create the project there.

+
+
+
+

1.109.4. Bootstrap a Quarkus Project

+
+

Quarkus provides multiple ways to bootstrap a project. +The option to bootstrap a project via the command-line will be shown in the Quarkus getting started guide Bootstrap the project. +Quarkus also provides a project builder where you can select some extensions, the build tool for your project, and if you want some starter code. +This will deliver a project skeleton with the configured project dependencies and also contributes the information to compile the application natively. To get some recommendations on starter templates follow this guide templates recommendations.

+
+
+ + + + + +
+ + +
+

By creating a Quarkus project from command-line or with the project builder you get a different project structure and have to adapt it to the devon4j conventions shown in the next Chapter.

+
+
+
+
+
Project Structure
+
+

We provide a recommendation and guideline for a modern project structure to help organize your project into logically related modules. +You should follow the guide and also it in your project so you structure the project to the needs of modern cloud development and microservice architectures and also find similar modules faster in our example projects.

+
+
+
+
+

1.109.5. Introduction to Quarkus Functionality

+
+

Before we start programming you should have a first look at the functionality of Quarkus.

+
+
+
Quarkus functionality guides
+
    +
  1. +

    Getting started guide from Quarkus
    +The guide is a good functionality overview, it shows with a simple Greeting Service a brief introduction into the concepts like CDI, testing, dev mode, packaging and running the app.

    +
  2. +
  3. +

    From Spring to Quarkus
    +For experienced Spring developers that have already followed devon4j guidelines, you can read our guide to getting started with Quarkus for Spring developer, as it goes more into the differences that can give you a more detailed comparison to Spring.

    +
    +
      +
    1. +

      Migrate a Spring app to Quarkus
      +This guide shows, how to migrate a Spring application to a Quarkus application with devon4j conventions.

      +
    2. +
    +
    +
  4. +
+
+
+
+

1.109.6. Create a REST service

+
+

Now let’s create our first REST CRUD service with Quarkus. +We give you the options to use a guide and start to code the service yourself, +or just download a service that’s ready to use.

+
+
+
Options
+
    +
  1. +

    Create the service yourself
    +There is a good Quarkus guide for a simple JSON REST service that will guide you through your first application and will help you to implement defining endpoint with JAX-RS and an Entity that will be managed by the service and also how to configure the JSON support.

    +
  2. +
  3. +

    Use an existing Quarkus project
    +You don’t want to code a service and just want to test some Quarkus functionalities? Just load a Quarkus sample project, provided for every existing quick start guide and the supported framework. +Our Team also provides some Quarkus applications that are working and can be loaded and tested.

    +
    +
      +
    • +

      reference project is a service that manages products. It contains the devon4j modern project structure, pagination, queries, a Postgres database, Jaeger tracing, Prometheus monitoring, SwaggerUI and support for Kubernetes deploy. +This project will be steadily improved and is used to showcase the abilities of Quarkus with devon4j.

      +
    • +
    • +

      minimal Quarkus project is just the Quarkus project from a getting started with Quarkus guide with a Greeting Services modified with the correct modern structure talked about in this chapter Project Structure

      +
    • +
    +
    +
  4. +
+
+
+
+

1.109.7. OpenAPI generation

+
+

We provide a guide with a short introduction to the OpenAPI specification with two plugins that are important in a Quarkus Context.

+
+ +
+

A more detailed usage guide to the Smallrye Plugin is provided by Quarkus OpenAPI and Swagger guide.

+
+
+
+

1.109.8. How to Integrate a Database

+
+

The next step for our REST service would be to integrate a database to store the objects of the entity.

+
+
+

With Quarkus, adding a database can be easy, because Quarkus can take over the build-up and connection process. +First, you should understand our guides to the concepts of how to work with data and then we will show how to integrate a database with Quarkus.

+
+
+
Data Principles Guides
+
    +
  1. +

    General devon4j JPA guide
    +To get an insight into the general JPA usage you should read the JPA guide which contains a general explanation of the Java Persistence API.

    +
  2. +
  3. +

    Difference to SpringData
    +If you have already worked with SpringData this is also partially supported with Quarkus, this is explained in more detail in this SpringData Guide.

    +
  4. +
+
+
+
Database Integration
+
    +
  1. +

    Quarkus zero config dev mode
    +Starting with database implementation in Quarkus, we recommend for beginners to use the DEV mode Zero Config Setup (Dev Services) this is especially great for testing the code without a database set up. +Quarkus does all the work for you and configures a database and creates the database and tables(schemas) for you.

    +
    +
      +
    1. +

      Configuration Properties
      +A list of all database configuration properties for the Dev services

      +
    2. +
    +
    +
  2. +
  3. +

    Integrate a simple Hibernate ORM database
    +The zero config setup only works with the Dev mode, it’s comfortable in the first phases of the creation of your service but if the goal is to also get a deployable version, you have to create your own database and integrate it. +This Quarkus guide shows, how to integrate a Hibernate ORM database with an example service.

    +
    +
      +
    1. +

      Configuration list for JDBC
      +A list of all configuration that is possible with a JDBC configuration properties

      +
    2. +
    +
    +
  4. +
  5. +

    Reactive CRUD application with Panache
    +Quarkus unifies reactive and imperative programming. +Reactive is an architectural principle to build robust, efficient, and concurrent applications. +An introduction into reactive and how Quarkus is enabling it follow this Quarkus reactive architecture article and also the reactive quickstart. +To get started with reactive and implement reactive methods you can follow the Quarkus reactive guide. +The reactive guide is using the Quarkus based implementation of a Hibernate ORM called Panache. +That implementation is not our first choice with devon4j and therefore not part of our recommendations, but to understand the reactive guide you can read the Hibernate ORM with Panache guide first to prevent possible problems following the guide.

    +
  6. +
+
+
+ + + + + +
+ + +
+

You need an installed Docker version for the zero config setup.

+
+
+
+
+
Database Migration
+
    +
  1. +

    Migration guide +For schema-based databases, we recommend migrating databases with Flyway. +In that case there is our general migration guide can give you an overview if you are not familiar with migration.

    +
    +
      +
    1. +

      Flyway guide for Quarkus +This Quarkus guide will show how to work with the Flyway extension in a Quarkus application. +This should be used if you start your own database and do not leave the creation to quarkus.

      +
    2. +
    +
    +
  2. +
+
+
+
+

1.109.9. Testing a Quarkus Application

+
+

After we have built the service, we have to verify it with some tests. +We will give you some guidelines to implement some test cases.

+
+
+
Testing Guides
+
    +
  1. +

    General testing guide
    +For users that aren’t familiar with the devon4j testing principles, we created a general best practices and recommendations guide for testing.

    +
    +
      +
    1. +

      Our guide for testing with Quarkus +In addition, we also provide a guide that specifically addresses the testing of a Quarkus application.

      +
    2. +
    +
    +
  2. +
+
+
+

Most of the Quarkus applications are already equipped with a basic Test and also our reference project provides some test cases, if you want to improve and extends the tests, you can also follow the large Quarkus guide for testing.

+
+
+
+

1.109.10. Packaging of a Quarkus application and creation of a native executable

+
+

Quarkus applications can be packed into different types. The following link will show how to build and also give you a short explanation of the characteristics of these files.

+
+
+
Package types
+
    +
  1. +

    fast-jar

    +
  2. +
  3. +

    mutable-jar

    +
  4. +
  5. +

    uber-jar

    +
  6. +
  7. +

    native executable

    +
  8. +
+
+
+

To pack an application use the command mvn package and Quarkus will generate the output in the /target folder. For the native executables, the command needs more parameters but this is explained in the link above.

+
+
+

Configure the Output with these configuration properties

+
+
+
+

1.109.11. Create and build a Docker Image

+
+

Quarkus supports Jib, S2I and Docker for building images. We focus on building a Quarkus App with Docker. +You get a created Dockerfile from Quarkus in the src/main/docker folder of any project generated from Quarkus. There are multiple Dockerfiles.

+
+
+
Dockerfiles
+
    +
  1. +

    Dockerfile.jvm
    +Dockerfile for Quarkus application in the JVM mode. running in Red Hat Universal Base Image 8 Minimal Container

    +
  2. +
  3. +

    Dockerfile.legacy-jar
    +DockerFile for Quarkus application in JVM mode with the legacy jar running in Red Hat Universal Base Image 8 Minimal Container.

    +
  4. +
  5. +

    Dockerfile.native
    +Dockerfile using the native executable running in Red Hat Universal Base Image 8 Minimal container.

    +
  6. +
  7. +

    Dockerfile.native-distroless +The native file will run in a Distroless container. Distroless images are very small containers with just the application and runtime dependencies and without the other programs coming with a Linux distribution.

    +
  8. +
+
+
+
+
+

For more information to the different executables go back to the chapter Packaging of a Quarkus application and creation of a native executable

+
+
+
+
+

To simply build and run a Docker image you can follow the instructions Quarkus provides for every Dockerfile in the comments block.

+
+
+

Docker commands example for the from the JVM Dockerfile from our reference project

+
+
+
+
####
+# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
+#
+# Before building the container image run:
+#
+# ./mvnw package
+#
+# Then, build the image with:
+#
+# docker build -f src/main/docker/Dockerfile.jvm -t quarkus/quarkus-basics-jvm .
+#
+# Then run the container using:
+#
+# docker run -i --rm -p 8080:8080 quarkus/quarkus-basics-jvm
+#
+# If you want to include the debug port into your docker image
+# you will have to expose the debug port (default 5005) like this :  EXPOSE 8080 5050
+#
+# Then run the container using :
+#
+# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/quarkus-basics-jvm
+#
+###
+
+
+
+

Quarkus is also able to build the image while packaging the application so you don’t have to execute the command from above. +To perform Docker builds with the generated Dockerfiles from above you need to add the following extension to your project with the command mvn quarkus:add-extension -Dextensions="container-image-docker".

+
+
+

Also you have to set the quarkus.container-image.build=true, you can add this to your application.properties or just append it to the packaging command like that ./mvn package -Dquarkus.container-image.build=true.

+
+
+

If your needs exceed the instructions given by the file, we recommend to follow the Docker getting started guide to get familiar with Docker and customize the Dockerfiles according to your needs. +To specify your container build, you can use the general container image configurations properties and the Docker image configurations properties when building and runnig Docker images.

+
+ +
+
+

1.109.12. Modern project structure

+
+

With trends such as cloud, microservices, lean, and agile we decided for a more modern project structure that fits better to recent market trends. +When starting new projects with devonfw and escpecially in the context of cloud-native development we strongly recommend this modern approach over the classic structure.

+
+
+
+

1.109.13. Modules

+
+

Due to trends such as microservices we are building smaller apps compared to moduliths. +For simplicity we therefore do not split our app into different modules and keep everything top-level and easy.

+
+
+

In addition to java and resources we also add helm for helm templates and docker for docker scripts (e.g. Dockerfile) in src/main:

+
+
+
+
├──/src
+|  ├──/main
+|  |  ├──/docker
+|  |  ├──/helm
+|  |  ├──/java
+|  |  └──/resources
+|  └──/test
+|     ├──/java
+|     └──/resources
+└──/pom.xml
+
+
+
+
+

1.109.14. Deployment

+
+

For modern projects we strongly recommend that your build process is generating the final deliverable as an OCI compliant container. +Further, to go fully cloud-native you should build your app as a native image via GraalVM AOT compiler. +Therefore we recommed to use quarkus as your main framework. +In case you want to go with spring you may consider using spring-native.

+
+
+
+

1.109.15. Layers

+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +For the modern project structure the layers are defined by the following table:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Layer«layer»Description

service

service

The service layer exposing functionality via its remote API. Typical protocol is REST. May also be any other protocol you are using such as gRPC.

domain

domain

The domain with the data-model and DB access. Use sub-package (in «detail») repository for repository and dao for DAOs. Also we recommend to put entities in model sub-package.

logic

logic

The logic layer with the functionallity providing the business value.

common

common

cross-cutting code not assigned to a technical layer.

+
+
+

1.109.16. Architecture Mapping

+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.domain
+|  |  ├──.repo
+|  |  |  ├──.«BusinessObject»Repository
+|  |  |  ├──.«BusinessObject»Fragment
+|  |  |  └──.«BusinessObject»FragmentImpl
+|  |  ├──.dao [alternative to repo]
+|  |  |  ├──.«BusinessObject»Dao
+|  |  |  └──.«BusinessObject»DaoImpl
+|  |  └──.model
+|  |     └──.«BusinessObject»Entity
+|  ├──.logic
+|  |  ├──«BusinessObject»Validator
+|  |  └──«BusinessObject»EventsEmitter
+|   |  └──.Uc«Operation»«BusinessObject»[Impl]
+|  └──.service
+|     └──.v1
+|        ├──.«Component»RestService
+|        ├──.mapper
+|        |     └──.«BusinessObject»Mapper
+|        └──.model
+|           └──.«BusinessObject»Dto
+└──.general
+   └──.domain
+      └──.model
+         └──.ApplicationPersistenceEntity
+
+
+ +
+
+

1.109.17. Domain Layer

+
+

The domain layer is responsible for the data-model and mapping this to a database. +The most common approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+

Note: The domain layer is the replacement for the data-access layer in the modern project structure.

+
+
+
+
+
+

1.110. Guides

+ +
+
Getting started with Quarkus for Spring developers
+
+

As a Spring developer, you heard more and more about Quarkus: its pros and cons, its fast growth etc. So, you decided to adopt/try Quarkus for your (next) project(s) and wonder where to go next and where do you need to pay attention to when moving from Spring to Quarkus.

+
+
+

This guide tries to address exactly this concern. In the following, we will present you some main points you should be aware of when starting to develop with Quarkus, along with some useful sources.

+
+
+
    +
  1. +

    Quarkus is fairly a new Java toolkit. Thus, it is very well documented. It also provides a set of well-written technical guides that are a good starting point to get in touch and make the first steps with Quarkus. See here. It is an Open Source project licensed under the Apache License version 2.0. The source code is hosted in GitHub. If you have any question or concern, don’t hesitate to reach out to the Quarkus community.

    +
  2. +
  3. +

    Same as Spring Initializr, you can go to code.quarkus.io to create a new application. Also, check out our Template Quarkus Guide to have our recommendations on certain topics.

    +
  4. +
  5. +

    In Spring stack, we recommend structuring your application into multiple modules, known as our classic structure. Moving to Quarkus and the world of cloud-native, microservices where we build smaller applications compared to monoliths, we recommend keeping everything top-level and simple. Therefore, we propose the modern structure as a better fit.

    +
  6. +
  7. +

    Quarkus focuses not only on delivering top features but also on the developer experience. The Quarkus’s Live Coding feature automatically detects changes made to Java files, application configuration, static resources or even classpath dependency changes and recompiles and redeploys the changes. As that, it solves the problem of traditional Java development workflow, hence improves productivity.

    +
    +
    +
        Write Code → Compile → Deploy → Test Changes/ Refresh Browser/ etc → Repeat (traditional)
    +    Write Code → Test Changes/ Refresh Browser/ etc → Repeat (Quarkus)
    +
    +
    +
    +

    You can use this feature out of the box without any extra setup by running:

    +
    +
    +
    +
        mvn compile quarkus:dev
    +
    +
    +
    +

    Another highlight feature to speed up developing is the Quarkus’s Dev Mode with Dev Services, which can automatically provision unconfigured services in development and test mode. It means that if you include an extension and don’t configure it, Quarkus will automatically start the relevant service and wire up your application to use it, therefore will save you a lot of time setting up those services manually. In production mode, where the real configuration is provided, Dev Service will be disabled automatically.

    +
    +
    +

    Also in Dev Mode, you can access the Dev UI at \q\dev to browse endpoints offered by various extensions, conceptually similar to what a Spring Boot actuator might provide.

    +
    +
  8. +
  9. +

    Quarkus is made of a small core on which relies hundreds of extensions. In fact, the power of Quarkus is its extension mechanism. Think of these extensions as your project dependencies. You can add it per dependency manager such as maven or gradle.

    +
    +
    +
    mvn quarkus:list-extensions
    +mvn quarkus:add-extension -Dextensions="groupId:artifactId"
    +(or add it manually to pom.xml)
    +# or
    +gradle list-extensions
    +(add dependency to build.gradle)
    +
    +
    +
    +

    Like Spring Boot, Quarkus also has a vast ecosystem of extensions with commonly-used technologies.

    +
    + + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Table 18. Example of common Quarkus extensions and the Spring Boot Starters with similar functionality (book: Quarkus for Spring Developer)
    Quarkus extensionSpring Boot Starter

    quarkus-resteasy-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-resteasy-reactive-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-hibernate-orm-panache

    spring-boot-starter-data-jpa

    quarkus-hibernate-orm-rest-datapanache

    spring-boot-starter-data-rest

    quarkus-hibernate-reactive-panache

    spring-boot-starter-data-r2dbc

    quarkus-mongodb-panache

    spring-boot-starter-data-mongodb

    +

    spring-boot-starter-data-mongodb-reactive

    quarkus-hibernate-validator

    spring-boot-starter-validation

    quarkus-qpid-jms

    spring-boot-starter-activemq

    quarkus-artemis-jms

    spring-boot-starter-artemis

    quarkus-cache

    spring-boot-starter-cache

    quarkus-redis-client

    spring-boot-starter-data-redis

    +

    spring-boot-starter-data-redis-reactive

    quarkus-mailer

    spring-boot-starter-mail

    quarkus-quartz

    spring-boot-starter-quartz

    quarkus-oidc

    spring-boot-starter-oauth2-resource-server

    quarkus-oidc-client

    spring-boot-starter-oauth2-client

    quarkus-smallrye-jwt

    spring-boot-starter-security

    +
    +

    A full list of all Quarkus extensions can be found here. Furthermore, you can check out the community extensions hosted by Quarkiverse Hub. Quarkus has some extensions for Spring API as well which is helpful while migrating from Spring to Quarkus.

    +
    + +
    +

    Besides extensions, which are officially maintained by Quarkus team, Quarkus allows adding external libraries too. While extensions can be integrated seamlessly into Quarkus as they can be processed at build time and be built in native mode with GraalVM, external dependencies might not work out of the box with native compilation. If that is the case, then you have to recompile them with the right GraalVM configuration to make them work.

    +
    +
  10. +
  11. +

    Quarkus’s design accounted for native compilation by default. A Quarkus native executable starts much faster and utilizes far less memory than a traditional JVM (see our performace comparision between Spring and Quarkus). To get familiar with building native executable, configuring and running it, please check out our Native Image Guide. Be sure to test your code in both JVM and native mode.

    +
  12. +
  13. +

    Both Quarkus and Spring include testing frameworks based on JUnit and Mockito. Thus, by design, Quarkus enables test-driven development by detecting affected tests as changes are made and automatically rerun them in background. As that, it gives developer instant feedback, hence improves productivity. To use continuous testing, execute the following command:

    +
    +
    +
    mvn quarkus:dev
    +
    +
    +
  14. +
  15. +

    For the sake of performance optimization, Quarkus avoids reflection as much as possible, instead favoring static class binding. When building a native executable, it analyzes the call tree and removes all the classes/methods/fields that are not used directly. As a consequence, the elements used via reflection are not part of the call tree so they are dead code eliminated (if not called directly in other cases).

    +
    +

    A common example is the JSON libraries which typically use reflection to serialize the objects to JSON. If you use them out of the box, you might encounter some errors in native mode. So, be sure to register the elements for reflection explicitly. A How-to is provided by Quarkus Registering For Reflection with practical program snippets.

    +
    +
  16. +
+
+
+

A very good read on the topic is the e-book Quarkus for Spring Developers by Red Hat. Another good source for direct hands-on coding tutorial is Katacoda Quarkus for Spring Boot Developers

+
+ +
+
+
Configuration
+
+

Quarkus provides a comprehensive guide to configuration here.

+
+
+
+
External Application Configuration
+
+
Database Configuration
+
+

In Quarkus, Hibernate is provided by the quarkus-hibernate-orm extension. Ensure the extension is added to your pom.xml as follows:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-orm</artifactId>
+</dependency>
+
+
+
+

You additionally have to add the respective JDBC driver extension to your pom.xml. There are different drivers for different database types. See Quarkus Hibernate guide.

+
+
+
+
Database System and Access
+
+

You need to configure which database type you want to use, as well as the location and credentials to access it. The defaults are configured in application.properties. The file should therefore contain the properties as in the given example:

+
+
+
+
quarkus.datasource.jdbc.url=jdbc:postgresql://database.enterprise.com/app
+quarkus.datasource.username=appuser01
+quarkus.datasource.password=************
+quarkus.datasource.db-kind=postgresql
+
+# drop and create the database at startup (use only for local development)
+quarkus.hibernate-orm.database.generation=drop-and-create
+
+
+
+
+
Database Logging
+
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
quarkus.hibernate-orm.log.sql=true
+quarkus.hibernate-orm.log.format-sql=true
+
+#Logs SQL bind parameters. Setting it to true is obviously not recommended in production.
+quarkus.hibernate-orm.log.bind-parameters=true
+
+
+
+
+
+
Security
+
+
Password Encryption
+
+

There is also some libraries to make Jasypt work with Quarkus such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode.

+
+
+

Quarkus supports many credentials provider with official extensions such as HashiCorp Vault.

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-vault</artifactId>
+</dependency>
+
+
+
+

A detailed guide can be found here and here.

+
+ +
+
+
+
Quarkus template
+
+

Quarkus Code Generator is providing a lot of alternatives on technologies and libraries to be integrated. Detailed guides to multiple topics can be found here.

+
+
+

Thus, the large selection can be difficult for developer to get started. +Therefore, in this guide, we aims to provide a general suggestion on basic frameworks, libraries, technologies to make it easy for developer to begin with.

+
+
+

With that said, please take this as a recommendation and not a compulsion. Depend on your project requirements, you might have to use another stack in comparison to what is listed below.

+
+
+

If you are new to Quarkus, consider checking out their getting started guide to have an overview about how to create, run, test as well as package a Quarkus application. Another recommended source to get started is the Katacoda tutorials.

+
+
+
Basic templates
+
+
    +
  1. +

    simple REST API (go to code.quarkus.io)

    +
  2. +
  3. +

    simple REST API with monitoring (go to code.quarkus.io)

    +
  4. +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 19. Topic-based suggested implementation
TopicDetailSuggested implementationNote

runtime

servlet-container

Undertow

component management

dependency injection

ArC

ArC is based on JSR 365. It also provides interceptors that can be used to implement the same functionality as AOP provides

configuration

SmallRye Config

SmallRye Config is an implementation of Eclipse MicroProfile Config. It also supports YAML configuration files

persistence

OR-mapper

Hibernate ORM, Spring Data JPA

Hibernate ORM is the de facto standard JPA implementation and works perfectly in Quarkus. Quarkus also provides a compatibility layer for Spring Data JPA repositories in the form of the spring-data-jpa extension.

batch

Quarkus JBeret Extension is a non-official extension, which is hosted in the Quarkiverse Hub. It is an implementation of JSR 352.

service

REST services

RESTEasy

RESTEasy is an portable implementation of the new JCP specification JAX-RS JSR-311. It can be documented via Swagger OpenAPI.

async messaging

SmallRye Reactive Messaging, Vert.x EventBus

SmallRye Reactive Messaging is an implementation of the Eclipse MicroProfile Reactive Messaging specification 1.0. You can also utilize SmallRye Reactive Messaging in your Quarkus application to interact with Apache Kafka.

marshalling

RESTEasy Jackson, RESTEasy JSON-B, RESTEasy JAXB, RESTEasy Multipart

cloud

kubernetes

Kubernetes

deployment

Minikube, k3d

Minikube is quite popular when a Kubernetes cluster is needed for development purposes. Quarkus supports this with the quarkus-minikube extension.

logging

framework

JBoss Log Manager and the JBoss Logging facade

Internally, Quarkus uses JBoss Log Manager and the JBoss Logging facade. Logs from other supported Logging API (JBoss Logging, SLF4J, Apache Commons Logging) will be merged.

validation

framework

Hibernate Validator/Bean Validation (JSR 380)

security

authentication & authorization

JWT authentication

Quarkus supports various security mechanisms. Depending on your protocol, identity provider you can choose the necessary extensions such as quarkus-oidc quarkus-smallrye-jwt quarkus-elytron-security-oauth2.

monitoring

framework

Micrometer Metrics, SmallRye Metrics

SmallRye Metrics is an implementation of the MicroProfile Metrics specification. Quarkus also offers various extensions to customize the metrics.

health

SmallRye Health

SmallRye Health is an implementation of the MicroProfile Health specification.

fault tolerance

SmallRye Fault Tolerance

SmallRye Fault Tolerance is an implementation of the MicroProfile Fault Tolerance specification.

+ +
+
+
+
Building a native image
+
+

Quarkus provides the ability to create a native executable of the application called native image. +Unlike other Java based deployments such native image will only run on the architecture and operating system it is compiled for. +Also, no JVM is needed to run the native-image. +This improves the startup time, performance and efficiency. +A distribution of GraalVM is needed. +You can find the differences between the available distributions here.

+
+
+

To build your quarkus app as native-image you have two options that are descibed in the following sections.

+
+
+
+
Build a native executable with GraalVM
+
+

To build a Quarkus application you can install GraalVM locally on your machine as described below. +Therefore read the basic Quarkus application chapter, or clone the example project provided by devonfw. +Follow this chapter from the Quarkus Guide for building a native executable.

+
+
+
Installing GraalVM
+
+

A native image can be created locally or through a container environment. +To create a native image locally an installed and configured version of GraalVM is needed, you can follow the installation guide from Quarkus or the guide provided by GraalVM for this.

+
+
+
+
+
Build a native executable with GraalVM through container environment
+
+

In order to make the build of native images more portable, you can also use your container environment and run the GraalVM inside a container (typically Docker). +You can simply install Docker with your devonfw-ide distribution just follow this description Docker with devonfw-ide. +Follow this chapter to build a native Linux image through container runtime.

+
+
+
+
Configuring the native executable
+
+

A list of all configuration properties for a native image can be found here.

+
+ +
+
+
Bean mapping with Quarkus
+
+

This guide will show bean-mapping in particular for a Quarkus application. We recommend using MapStruct with a Quarkus application because the other bean-mapper frameworks are using Java reflections. They are not supported in GraalVm right now and causes problems building native applications. MapStruct is a code generator that greatly simplifies the implementation of mappings between Java bean types based on a convention over configuration approach. The mapping code will be generated at compile-time and uses plain method invocations and thus is fast, type-safe, and easy to understand. MapStruct has to be configured to not use Java reflections but it will be shown in this guide.

+
+
+

You can find the official +MapStruct reference guide and a general introduction to MapStruct from Baeldung.

+
+
+
+
MapStruct Dependency
+
+

To get access to MapStruct we have to add the dependency to our POM.xml:

+
+
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+  <scope>provided</scope>
+</dependency>
+
+
+
+

MapStruct provides an annotation processor that also has to be added to the POM.xml

+
+
+
+
<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.mapstruct</groupId>
+				<artifactId>mapstruct-processor</artifactId>
+				<version>1.4.2.Final</version>
+			</path>
+		</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

MapStruct takes advantage of generated getters, setters, and constructors from the Lombok library, follow this Lombok with Mapstruct guide to get Lombok with Mapstruct working.

+
+
+
+
MapStruct Configuration
+
+

We already discussed the benefits of dependency injection and MapStruct supports CDI with EJB, spring, and jsr330. The default retrieving method for a mapper is a factory that uses reflections and should be avoided. The component model should be set to CDI, as this will allow us to easily inject the generated mapper implementation. The component model can be configured in multiple ways.

+
+
+
Simple Configuration
+
+

Add the attribute componentModel to the @Mapper annotation in the mapper interface.

+
+
+
+
@Mapper(compnentModel = "cdi")
+public interface ProductMapper{
+  ...
+}
+
+
+
+
+
MapperConfig Configuration
+
+

Create a shared configuration that can be used for multiple mappers. Implement an Interface and use the annotation @MapperConfig for the class. You can define all configurations in this interface and pass the generated MapperConfig.class with the config attribute to the mapper. The MapperConfig also defines the InjectionStrategy and MappingInheritaceStrategy both will be explained later. +A list of all configurations can be found here.

+
+
+
+
@MapperConfig(
+  compnentModel = "cdi",
+  mappingInheritanceStrategy = MappingInheritanceStrategy.AUTO_INHERIT_FROM_CONFIG
+  injectionStrategy =InjectionStrategy.CONSTRUCTOR
+)
+public interface MapperConfig{
+}
+
+
+
+
+
@Mapper( config = MapperConfig.class )
+public interface ProductMapper{
+  ...
+}
+
+
+
+

Any attributes not given via @Mapper will be inherited from the shared configuration MapperConfig.class.

+
+
+
+
Configuration via annotation processor options
+
+

The MapStruct code generator can be configured using annotation processor options. +You can pass the options to the compiler while invoking javac directly, or add the parameters to the maven configuration in the POM.xml

+
+
+

We are also using the constructor injection strategie to avoid field injections and potential reflections also it will simplify our tests. +The option to pass the parameter to the annotation processor in the POM.xml is used and can be inspected in our quarkus reference project.

+
+
+

A list of all annotation processor options can be found here.

+
+
+
+
+
Basic Bean-Mapper Usage
+
+

To use the mapper we have to implement the mapper interface and the function prototypes with a @Mapper annotation.

+
+
+
+
@Mapper
+public interface ProductMapper {
+
+  ProductDto map(ProductEntity model);
+
+  ProductEntity create(NewProductDto dto);
+}
+
+
+
+

The MapStruct annotation processor will generate the implementation for us under /target/generated-sources/, we just need to tell it that we would like to have a method that accepts an ProductEntity entity and returns a new ProductDto DTO.

+
+
+

The generated mapper implementation will be marked with the @ApplicationScoped annotation and thus can be injected into fields, constructor arguments, etc. using the @Inject annotation:

+
+
+
+
public class ProductRestService{
+
+  @Inject
+  ProductMapper mapper;
+}
+
+
+
+

That is the basic usage of a Mapstruct mapper. In the next chapter, we go a bit into detail and show some more configurations.

+
+
+
+
Advanced Bean-Mapper Usage
+
+

Let´s assume our Product entity and the ProductDto has some different named property that should be mapped. Add a mapping annotation to map the property type from Product to kind from ProductDto. We define the source name of the property and the target name.

+
+
+
+
@Mapper
+public interface ProductMapper {
+  @Mapping(target = "kind", source = "type")
+  ProductDto map(ProductEntity entity);
+
+  @InheritInverseConfiguration(name = "map" )
+  ProductEntity create(ProductDto dto);
+}
+
+
+
+

For bi-directional mappings, we can indicate that a method shall inherit the inverse configuration of the corresponding method with the @InheritInverseConfiguration. You can omit the name parameter if the result type of method A is the same as the +single-source type of method B and if the single-source type of A is the same as the result type of B. If multiple applies the attribute name is needed. Specific mappings from the inversed method can (optionally) be overridden, ignored, and set to constants or expressions.

+
+
+

The mappingInheritanceStrategy can be defined as showed in MapStruct Configuration the existing options can be found here.

+
+
+

Not always a mapped attribute has the same type in the source and target objects. For instance, an attribute may be of type int in the source bean but of type Long in the target bean.

+
+
+

Another example are references to other objects which should be mapped to the corresponding types in the target model. E.g. the class ShoppingCart might have a property content of the type Product which needs to be converted into an ProductDto object when mapping a ShoppingCart object to ShoppingCartDto. For these cases, it’s useful to understand how Mapstruct is converting the data types and the object references.

+
+
+

Also, the Chapter for nested bean mappings will help to configure MapStruct to map arbitrary deep object graphs.

+
+
+

You can study running MapStruct implementation examples given by MapStruct or in our Quarkus reference project

+
+
+
+
+
+
+
+
+1. "Stammdaten" in German. +
+
+2. Whether to use checked exceptions or not is a controversial topic. Arguments for both sides can be found under The Trouble with Checked Exceptions, Unchecked Exceptions — The Controversy, and Checked Exceptions are Evil. The arguments in favor of unchecked exceptions tend to prevail for applications build with Devon4j. Therefore, unchecked exceptions should be used for a consistent style. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-access-control-schema.html b/docs/devon4j/1.0/guide-access-control-schema.html new file mode 100644 index 00000000..f8d4b5e1 --- /dev/null +++ b/docs/devon4j/1.0/guide-access-control-schema.html @@ -0,0 +1,551 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Access Control Schema

+
+
+

With release 3.0.0 the access-control-schema.xml has been deprecated. You may still use it and find the documentation in this section. However, for new devonfw applications always start with the new approach described in access control config.

+
+
+
+
+

Legacy Access Control Schema Documentation

+
+
+

The file access-control-schema.xml is used to define the mapping from groups to permissions (see example from sample app). The general terms discussed above can be mapped to the implementation as follows:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. General security terms related to devon4j access control schema
Termdevon4j-security implementationComment

Permission

AccessControlPermission

Group

AccessControlGroup

When considering different levels of groups of different meanings, declare type attribute, e.g. as "group".

Role

AccessControlGroup

With type="role".

Access Control

AccessControl

Super type that represents a tree of AccessControlGroups and AccessControlPermissions. If a principal "has" a AccessControl he also "has" all AccessControls with according permissions in the spanned sub-tree.

+
+
Example access-control-schema.xml
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<access-control-schema>
+  <group id="ReadMasterData" type="group">
+    <permissions>
+      <permission id="OfferManagement_GetOffer"/>
+      <permission id="OfferManagement_GetProduct"/>
+      <permission id="TableManagement_GetTable"/>
+      <permission id="StaffManagement_GetStaffMember"/>
+    </permissions>
+  </group>
+
+  <group id="Waiter" type="role">
+    <inherits>
+      <group-ref>Barkeeper</group-ref>
+    </inherits>
+    <permissions>
+      <permission id="TableManagement_ChangeTable"/>
+    </permissions>
+  </group>
+  ...
+</access-control-schema>
+
+
+
+

This example access-control-schema.xml declares

+
+
+
    +
  • +

    a group named ReadMasterData, which grants four different permissions, e.g., OfferManagement_GetOffer

    +
  • +
  • +

    a group named Waiter, which

    +
    +
      +
    • +

      also grants all permissions from the group Barkeeper

      +
    • +
    • +

      in addition grants the permission TableManagement_ChangeTable

      +
    • +
    • +

      is marked to be a role for further application needs.

      +
    • +
    +
    +
  • +
+
+
+

The devon4j-security module automatically validates the schema configuration and will throw an exception if invalid.

+
+
+

Unfortunately, Spring Security does not provide differentiated interfaces for authentication and authorization. Thus we have to provide an AuthenticationProvider, which is provided from Spring Security as an interface for authentication and authorization simultaneously. +To integrate the devon4j-security provided access control schema, you can simply inherit your own implementation from the devon4j-security provided abstract class AbstractAccessControlBasedAuthenticationProvider and register your ApplicationAuthenticationProvider as an AuthenticationManager. Doing so, you also have to declare the two Beans AccessControlProvider and AccessControlSchemaProvider, which are precondition for the AbstractAccessControlBasedAuthenticationProvider.

+
+
+

As state of the art devon4j will focus on role-based authorization to cope with authorization for executing use case of an application. +We will use the JSR250 annotations, mainly @RolesAllowed, for authorizing method calls against the permissions defined in the annotation body. This has to be done for each use-case method in logic layer. Here is an example:

+
+
+
+
public class OrdermanagementImpl extends AbstractComponentFacade implements Ordermanagement {
+
+  @RolesAllowed(Roles.WAITER)
+  public PaginatedListTo<OrderCto> findOrdersByPost(OrderSearchCriteriaTo criteria) {
+
+    return findOrderCtos(criteria);
+  }
+}
+
+
+
+

Now this method can only be called if a user is logged-in that has the permission FIND_TABLE.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-access-control.html b/docs/devon4j/1.0/guide-access-control.html new file mode 100644 index 00000000..412c1cc4 --- /dev/null +++ b/docs/devon4j/1.0/guide-access-control.html @@ -0,0 +1,846 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Access-Control

+
+
+

Access-Control is a central and important aspect of Security. It consists of two major aspects:

+
+
+ +
+
+
+
+

Authentication

+
+
+

Definition:

+
+
+
+
+

Authentication is the verification that somebody interacting with the system is the actual subject for whom he claims to be.

+
+
+
+
+

The one authenticated is properly called subject or principal. There are two forms of principals you need to distinguish while designing your authentication: human users and autonomous systems. While e.g. a Kerberos/SPNEGO Single-Sign-On makes sense for human users, it is pointless for authenticating autonomous systems. For simplicity, we use the common term user to refer to any principal even though it may not be a human (e.g. in case of a service call from an external system).

+
+
+

To prove the authenticity, the user provides some secret called credentials. The most simple form of credentials is a password.

+
+
+

Implementations

+
+ + + + + +
+ + +Please never implement your own authentication mechanism or credential store. You have to be aware of implicit demands such as salting and hashing credentials, password life-cycle with recovery, expiry, and renewal including email notification confirmation tokens, central password policies, etc. This is the domain of access managers and identity management systems. In a business context you will typically already find a system for this purpose that you have to integrate (e.g. via LDAP). Otherwise you should consider establishing such a system e.g. using keycloak. +
+
+
+

We recommend using JWT when possible. For KISS, also try to avoid combining multiple authentication mechanisms (form based, basic-auth, SAMLv2, OAuth, etc.) within the same application (for different URLs).

+
+
+

For spring, check the Spring Security

+
+
+

For quarkus, check the Quarkus Authentication

+
+
+
+
+
+

Authorization

+
+
+

Definition:

+
+
+
+
+

Authorization is the verification that an authenticated user is allowed to perform the operation he intends to invoke.

+
+
+
+
+

Clarification of terms

+
+

For clarification we also want to give a common understanding of related terms that have no unique definition and consistent usage in the wild.

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Security terms related to authorization
TermMeaning and comment

Permission

A permission is an object that allows a principal to perform an operation in the system. This permission can be granted (give) or revoked (taken away). Sometimes people also use the term right what is actually wrong as a right (such as the right to be free) can not be revoked.

Group

We use the term group in this context for an object that contains permissions. A group may also contain other groups. Then the group represents the set of all recursively contained permissions.

Role

We consider a role as a specific form of group that also contains permissions. A role identifies a specific function of a principal. A user can act in a role.

+

For simple scenarios a principal has a single role associated. In more complex situations a principal can have multiple roles but has only one active role at a time that he can choose out of his assigned roles. For KISS it is sometimes sufficient to avoid this by creating multiple accounts for the few users with multiple roles. Otherwise at least avoid switching roles at run-time in clients as this may cause problems with related states. Simply restart the client with the new role as parameter in case the user wants to switch his role.

Access Control

Any permission, group, role, etc., which declares a control for access management.

+
+
+

Suggestions on the access model

+
+

For the access model we give the following suggestions:

+
+
+
    +
  • +

    Each Access Control (permission, group, role, …​) is uniquely identified by a human readable string.

    +
  • +
  • +

    We create a unique permission for each use-case.

    +
  • +
  • +

    We define groups that combine permissions to typical and useful sets for the users.

    +
  • +
  • +

    We define roles as specific groups as required by our business demands.

    +
  • +
  • +

    We allow to associate users with a list of Access Controls.

    +
  • +
  • +

    For authorization of an implemented use case we determine the required permission. Furthermore, we determine the current user and verify that the required permission is contained in the tree spanned by all his associated Access Controls. If the user does not have the permission we throw a security exception and thus abort the operation and transaction.

    +
  • +
  • +

    We avoid negative permissions, that is a user has no permission by default and only those granted to him explicitly give him additional permission for specific things. Permissions granted can not be reduced by other permissions.

    +
  • +
  • +

    Technically we consider permissions as a secret of the application. Administrators shall not fiddle with individual permissions but grant them via groups. So the access management provides a list of strings identifying the Access Controls of a user. The individual application itself contains these Access Controls in a structured way, whereas each group forms a permission tree.

    +
  • +
+
+
+
+

Naming conventions

+
+

As stated above each Access Control is uniquely identified by a human readable string. This string should follow the naming convention:

+
+
+
+
«app-id».«local-name»
+
+
+
+

For Access Control Permissions the «local-name» again follows the convention:

+
+
+
+
«verb»«object»
+
+
+
+

The segments are defined by the following table:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of Access Control Permission ID
SegmentDescriptionExample

«app-id»

Is a unique technical but human readable string of the application (or microservice). It shall not contain special characters and especially no dot or whitespace. We recommend to use lower-train-case-ascii-syntax. The identity and access management should be organized on enterprise level rather than application level. Therefore permissions of different apps might easily clash (e.g. two apps might both define a group ReadMasterData but some user shall get this group for only one of these two apps). Using the «app-id». prefix is a simple but powerful namespacing concept that allows you to scale and grow. You may also reserve specific «app-id»s for cross-cutting concerns that do not actually reflect a single app e.g to grant access to a geographic region.

shop

«verb»

The action that is to be performed on «object». We use Find for searching and reading data. Save shall be used both for create and update. Only if you really have demands to separate these two you may use Create in addition to Save. Finally, Delete is used for deletions. For non CRUD actions you are free to use additional verbs such as Approve or Reject.

Find

«object»

The affected object or entity. Shall be named according to your data-model

Product

+
+

So as an example shop.FindProduct will reflect the permission to search and retrieve a Product in the shop application. The group shop.ReadMasterData may combine all permissions to read master-data from the shop. However, also a group shop.Admin may exist for the Admin role of the shop application. Here the «local-name» is Admin that does not follow the «verb»«object» schema.

+
+
+
+

devon4j-security

+
+

The module devon4j-security provides ready-to-use code based on spring-security that makes your life a lot easier.

+
+
+
+access-control +
+
Figure 1. devon4j Security Model
+
+
+

The diagram shows the model of devon4j-security that separates two different aspects:

+
+
+
    +
  • +

    The Identity- and Access-Management is provided by according products and typically already available in the enterprise landscape (e.g. an active directory). It provides a hierarchy of primary access control objects (roles and groups) of a user. An administrator can grant and revoke permissions (indirectly) via this way.

    +
  • +
  • +

    The application security defines a hierarchy of secondary access control objects (groups and permissions). This is done by configuration owned by the application (see following section). The "API" is defined by the IDs of the primary access control objects that will be referenced from the Identity- and Access-Management.

    +
  • +
+
+
+
+

Access Control Config

+
+

In your application simply extend AccessControlConfig to configure your access control objects as code and reference it from your use-cases. An example config may look like this:

+
+
+
+
@Named
+public class ApplicationAccessControlConfig extends AccessControlConfig {
+
+  public static final String APP_ID = "MyApp";
+
+  private static final String PREFIX = APP_ID + ".";
+
+  public static final String PERMISSION_FIND_OFFER = PREFIX + "FindOffer";
+
+  public static final String PERMISSION_SAVE_OFFER = PREFIX + "SaveOffer";
+
+  public static final String PERMISSION_DELETE_OFFER = PREFIX + "DeleteOffer";
+
+  public static final String PERMISSION_FIND_PRODUCT = PREFIX + "FindProduct";
+
+  public static final String PERMISSION_SAVE_PRODUCT = PREFIX + "SaveProduct";
+
+  public static final String PERMISSION_DELETE_PRODUCT = PREFIX + "DeleteProduct";
+
+  public static final String GROUP_READ_MASTER_DATA = PREFIX + "ReadMasterData";
+
+  public static final String GROUP_MANAGER = PREFIX + "Manager";
+
+  public static final String GROUP_ADMIN = PREFIX + "Admin";
+
+  public ApplicationAccessControlConfig() {
+
+    super();
+    AccessControlGroup readMasterData = group(GROUP_READ_MASTER_DATA, PERMISSION_FIND_OFFER, PERMISSION_FIND_PRODUCT);
+    AccessControlGroup manager = group(GROUP_MANAGER, readMasterData, PERMISSION_SAVE_OFFER, PERMISSION_SAVE_PRODUCT);
+    AccessControlGroup admin = group(GROUP_ADMIN, manager, PERMISSION_DELETE_OFFER, PERMISSION_DELETE_PRODUCT);
+  }
+}
+
+
+
+
+

Configuration on Java Method level

+
+

In your use-case you can now reference a permission like this:

+
+
+
+
@Named
+public class UcSafeOfferImpl extends ApplicationUc implements UcSafeOffer {
+
+  @Override
+  @RolesAllowed(ApplicationAccessControlConfig.PERMISSION_SAVE_OFFER)
+  public OfferEto save(OfferEto offer) { ... }
+  ...
+}
+
+
+
+
+

JEE Standard

+
+

Role-based Access Control (RBAC) is commonly used for authorization. +JSR 250 defines a number of common annotations to secure your application.

+
+
+
    +
  • +

    javax.annotation.security.PermitAll specifies that no access control is required to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DenyAll specifies that no access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.RolesAllowed specifies that only a list of access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DeclareRoles defines roles for security checking.

    +
  • +
  • +

    javax.annotation.security.RunAs specifies the RunAs role for the given components.

    +
  • +
+
+
+

@PermitAll, @Denyall, and @RolesAllowed annotations can be applied to both class and method. +A method-level annotation will override the behaviour of class-level annotation. Using multiple annotations of those 3 is not valid.

+
+
+
+
// invalid
+@PermitAll
+@DenyAll
+public String foo()
+
+// invalid and compilation fails
+@RolesAllowed("admin")
+@RolesAllowed("user")
+public String bar()
+
+// OK
+@RolesAllowed("admin", "user")
+public String bar()
+
+
+
+

Please note that when specifying multiple arguments to @RolesAllowed those are combined with OR (and not with AND). +So if the user has any of the specified access controls, he will be able to access the method.

+
+
+

As a best practice avoid specifying string literals to @RolesAllowed. +Instead define a class with all access controls as constants and reference them from there. +This class is typically called ApplicationAccessControlConfig in devonfw.

+
+
+

In many complicated cases where @PermitAll @DenyAll @RolesAllowed are insufficient e.g. a method should be accessed by a user in role A and not in role B at the same time, you have to verify the user role directly in the method. You can use SecurityContext class to get further needed information.

+
+
+

Spring

+
+

Spring Security also supports authorization on method level. To use it, you need to add the spring-security-config dependency. If you use Spring Boot, the dependency spring-boot-starter-security already includes spring-security-config. Then you can configure as follows:

+
+
+
    +
  • +

    prePostEnabled property enables Spring Security pre/post annotations. @PreAuthorize and @PostAuthorize annotations provide expression-based access control. See more here

    +
  • +
  • +

    securedEnabled property determines if the @Secured annotation should be enabled. @Secured can be used similarly as @RollesAllowed.

    +
  • +
  • +

    jsr250Enabled property allows us to use the JSR-250 annotations such as @RolesAllowed.

    +
  • +
+
+
+
+
@Configuration
+@EnableGlobalMethodSecurity(
+  prePostEnabled = true,
+  securedEnabled = true,
+  jsr250Enabled = true)
+public class MethodSecurityConfig
+  extends GlobalMethodSecurityConfiguration {
+}
+
+
+
+

A further read about the whole concept of Spring Security Authorization can be found here.

+
+
+
+

Quarkus

+
+

Quarkus comes with built-in security to allow for RBAC based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints and CDI beans. Quarkus also provides the io.quarkus.security.Authenticated annotation that will permit any authenticated user to access the resource (equivalent to @RolesAllowed("**")).

+
+
+
+
+

Data-based Permissions

+ +
+
+

Access Control Schema (deprecated)

+
+

The access-control-schema.xml approach is deprecated. The documentation can still be found in access control schema.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-accessibility.html b/docs/devon4j/1.0/guide-accessibility.html new file mode 100644 index 00000000..a8572a8e --- /dev/null +++ b/docs/devon4j/1.0/guide-accessibility.html @@ -0,0 +1,442 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-aop.html b/docs/devon4j/1.0/guide-aop.html new file mode 100644 index 00000000..d7432d75 --- /dev/null +++ b/docs/devon4j/1.0/guide-aop.html @@ -0,0 +1,521 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Aspect Oriented Programming (AOP)

+
+
+

AOP is a powerful feature for cross-cutting concerns. However, if used extensive and for the wrong things an application can get unmaintainable. Therefore we give you the best practices where and how to use AOP properly.

+
+
+
+
+

AOP Key Principles

+
+
+

We follow these principles:

+
+
+
    +
  • +

    We use spring AOP based on dynamic proxies (and fallback to cglib).

    +
  • +
  • +

    We avoid AspectJ and other mighty and complex AOP frameworks whenever possible

    +
  • +
  • +

    We only use AOP where we consider it as necessary (see below).

    +
  • +
+
+
+
+
+

AOP Usage

+
+
+

We recommend to use AOP with care but we consider it established for the following cross cutting concerns:

+
+
+ +
+
+
+
+

AOP Debugging

+
+
+

When using AOP with dynamic proxies the debugging of your code can get nasty. As you can see by the red boxes in the call stack in the debugger there is a lot of magic happening while you often just want to step directly into the implementation skipping all the AOP clutter. When using Eclipse this can easily be archived by enabling step filters. Therefore you have to enable the feature in the Eclipse tool bar (highlighted in read).

+
+
+
+AOP debugging +
+
+
+

In order to properly make this work you need to ensure that the step filters are properly configured:

+
+
+
+Step Filter Configuration +
+
+
+

Ensure you have at least the following step-filters configured and active:

+
+
+
+
ch.qos.logback.*
+com.devonfw.module.security.*
+java.lang.reflect.*
+java.security.*
+javax.persistence.*
+org.apache.commons.logging.*
+org.apache.cxf.jaxrs.client.*
+org.apache.tomcat.*
+org.h2.*
+org.springframework.*
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-apm.html b/docs/devon4j/1.0/guide-apm.html new file mode 100644 index 00000000..6a64fc2a --- /dev/null +++ b/docs/devon4j/1.0/guide-apm.html @@ -0,0 +1,482 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Application Performance Management

+
+
+

This guide gives hints how to manage, monitor and analyse performance of Java applications.

+
+
+
+
+

Temporary Analysis

+
+
+

If you are facing performance issues and want to do a punctual analysis we recommend you to use glowroot. It is ideal in cases where monitoring in your local development environment is suitable. However, it is also possible to use it in your test environment. It is entirely free and open-source. Still it is very powerful and helps to trace down bottlenecks. To get a first impression of the tool take a look at the demo.

+
+
+

JEE/WTP

+
+

In case you are forced to use an JEE application server and want to do a temporary analysis you can double click your server instance from the servers view in Eclipse and click on the link Open launch configuration in order to add the -javaagent JVM option.

+
+
+
+
+
+

Regular Analysis

+
+
+

In case you want to manage application performance regularly we recommend to use JavaMelody that can be integrated into your application. More information on javamelody is available on the JavaMelody Wiki

+
+
+
+
+

Alternatives

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-auditing.html b/docs/devon4j/1.0/guide-auditing.html new file mode 100644 index 00000000..e326f8a1 --- /dev/null +++ b/docs/devon4j/1.0/guide-auditing.html @@ -0,0 +1,517 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Auditing

+
+
+

For database auditing we use hibernate envers. If you want to use auditing ensure you have the following dependency in your pom.xml:

+
+
+
spring
+
+
<dependency>
+  <groupId>com.devonfw.java.modules</groupId>
+  <artifactId>devon4j-jpa-envers</artifactId>
+</dependency>
+
+
+
+
quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-envers</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +The following part applies only to spring applications. At this point, the Quarkus extension does not provide any additional configurations. For Quarkus applications, simply use the @Audited annotation to enable auditing for an entity class, as described a few lines below or seen here. +
+
+
+

Make sure that entity manager also scans the package from the devon4j-jpa[-envers] module in order to work properly. And make sure that correct Repository Factory Bean Class is chosen.

+
+
+
+
@EntityScan(basePackages = { "«my.base.package»" }, basePackageClasses = { AdvancedRevisionEntity.class })
+...
+@EnableJpaRepositories(repositoryFactoryBeanClass = GenericRevisionedRepositoryFactoryBean.class)
+...
+public class SpringBootApp {
+  ...
+}
+
+
+
+

Now let your [Entity]Repository extend from DefaultRevisionedRepository instead of DefaultRepository.

+
+
+

The repository now has a method getRevisionHistoryMetadata(id) and getRevisionHistoryMetadata(id, boolean lazy) available to get a list of revisions for a given entity and a method find(id, revision) to load a specific revision of an entity with the given ID or getLastRevisionHistoryMetadata(id) to load last revision. +To enable auditing for a entity simply place the @Audited annotation to your entity and all entity classes it extends from.

+
+
+
+
@Entity(name = "Drink")
+@Audited
+public class DrinkEntity extends ProductEntity implements Drink {
+...
+
+
+
+

When auditing is enabled for an entity an additional database table is used to store all changes to the entity table and a corresponding revision number. This table is called <ENTITY_NAME>_AUD per default. Another table called REVINFO is used to store all revisions. Make sure that these tables are available. They can be generated by hibernate with the following property (only for development environments).

+
+
+
+
  database.hibernate.hbm2ddl.auto=create
+
+
+
+

Another possibility is to put them in your database migration scripts like so.

+
+
+
+
CREATE CACHED TABLE PUBLIC.REVINFO(
+  id BIGINT NOT NULL generated by default as identity (start with 1),
+  timestamp BIGINT NOT NULL,
+  user VARCHAR(255)
+);
+...
+CREATE CACHED TABLE PUBLIC.<TABLE_NAME>_AUD(
+    <ALL_TABLE_ATTRIBUTES>,
+    revtype TINYINT,
+    rev BIGINT NOT NULL
+);
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-batch-layer.html b/docs/devon4j/1.0/guide-batch-layer.html new file mode 100644 index 00000000..b55c2f13 --- /dev/null +++ b/docs/devon4j/1.0/guide-batch-layer.html @@ -0,0 +1,842 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Batch Layer

+
+
+

We understand batch processing as a bulk-oriented, non-interactive, typically long running execution of tasks. For simplicity, we use the term "batch" or "batch job" for such tasks in the following documentation.

+
+
+

devonfw uses Spring Batch as a batch framework.

+
+
+

This guide explains how Spring Batch is used in devonfw applications. It focuses on aspects which are special to devonfw. If you want to learn about spring-batch you should adhere to springs references documentation.

+
+
+

There is an example of a simple batch implementation in the my-thai-star batch module.

+
+
+

In this chapter, we will describe the overall architecture (especially concerning layering) and how to administer batches.

+
+
+
+
+

Layering

+
+
+

Batches are implemented in the batch layer. The batch layer is responsible for batch processes, whereas the business logic is implemented in the logic layer. Compared to the service layer, you may understand the batch layer just as a different way of accessing the business logic. +From a component point of view, each batch is implemented as a subcomponent in the corresponding business component. +The business component is defined by the business architecture.

+
+
+

Let’s make an example for that. The sample application implements a batch for exporting ingredients. This ingredientExportJob belongs to the dishmanagement business component. +So the ingredientExportJob is implemented in the following package:

+
+
+
+
<basepackage>.dishmanagement.batch.impl.*
+
+
+
+

Batches should invoke use cases in the logic layer for doing their work. +Only "batch specific" technical aspects should be implemented in the batch layer.

+
+
+
+
+

Example: +For a batch, which imports product data from a CSV file, this means that all code for actually reading and parsing the CSV input file is implemented in the batch layer. +The batch calls the use case "create product" in the logic layer for actually creating the products for each line read from the CSV input file.

+
+
+
+
+

Directly accessing data access layer

+
+

In practice, it is not always appropriate to create use cases for every bit of work a batch should do. Instead, the data access layer can be used directly. +An example for that is a typical batch for data retention which deletes out-of-time data. +Often deleting, out-dated data is done by invoking a single SQL statement. It is appropriate to implement that SQL in a Repository or DAO method and call this method directly from the batch. +But be careful: this pattern is a simplification which could lead to business logic cluttered in different layers, which reduces the maintainability of your application. +It is a typical design decision you have to make when designing your specific batches.

+
+
+
+
+
+

Project structure and packaging

+
+
+

Batches will be implemented in a separate Maven module to keep the application core free of batch dependencies. The batch module includes a dependency on the application core-module to allow the reuse of the use cases, DAOs etc. +Additionally the batch module has dependencies on the required spring batch jars:

+
+
+
+
  <dependencies>
+
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>mtsj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-batch</artifactId>
+    </dependency>
+
+  </dependencies>
+
+
+
+

To allow an easy start of the batches from the command line it is advised to create a bootified jar for the batch module by adding the following to the pom.xml of the batch module:

+
+
+
+
  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>config/application.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Create bootified jar for batch execution via command line.
+           Your applications spring boot app is used as main-class.
+       -->
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        <configuration>
+          <mainClass>com.devonfw.application.mtsj.SpringBootApp</mainClass>
+          <classifier>bootified</classifier>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>repackage</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+
+
+
+

Implementation

+
+
+

Most of the details about implementation of batches is described in the spring batch documentation. +There is nothing special about implementing batches in devonfw. You will find an easy example in my-thai-star.

+
+
+
+
+

Starting from command line

+
+
+

Devonfw advises to start batches via command line. This is most common to many ops teams and allows easy integration in existing schedulers. In general batches are started with the following command:

+
+
+
+
java -jar <app>-batch-<version>-bootified.jar --spring.main.web-application-type=none --spring.batch.job.enabled=true --spring.batch.job.names=<myJob> <params>
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + +
ParameterExplanation

--spring.main.web-application-type=none

This disables the web app (e.g. Tomcat)

--spring.batch.job.names=<myJob>

This specifies the name of the job to run. If you leave this out ALL jobs will be executed. Which probably does not make to much sense.

<params>

(Optional) additional parameters which are passed to your job

+
+

This will launch your normal spring boot app, disables the web application part and runs the designated job via Spring Boots org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.

+
+
+
+
+

Scheduling

+
+
+

In real world scheduling of batches is not as simple as it first might look like.

+
+
+
    +
  • +

    Multiple batches have to be executed in order to achieve complex tasks. If one of those batches fails the further execution has to be stopped and operations should be notified for example.

    +
  • +
  • +

    Input files or those created by batches have to be copied from one node to another.

    +
  • +
  • +

    Scheduling batch executing could get complex easily (quarterly jobs, run job on first workday of a month, …​)

    +
  • +
+
+
+

For devonfw we propose the batches themselves should not mess around with details of scheduling. +Likewise your application should not do so. This complexity should be externalized to a dedicated batch administration service or scheduler. +This service could be a complex product or a simple tool like cron. We propose Rundeck as an open source job scheduler.

+
+
+

This gives full control to operations to choose the solution which fits best into existing administration procedures.

+
+
+
+
+

Handling restarts

+
+
+

If you start a job with the same parameters set after a failed run (BatchStatus.FAILED) a restart will occur. +In many cases your batch should then not reprocess all items it processed in the previous runs. +For that you need some logic to start at the desired offset. There different ways to implement such logic:

+
+
+
    +
  • +

    Marking processed items in the database in a dedicated column

    +
  • +
  • +

    Write all IDs of items to process in a separate table as an initialization step of your batch. You can then delete IDs of already processed items from that table during the batch execution.

    +
  • +
  • +

    Storing restart information in springs ExecutionContext (see below)

    +
  • +
+
+
+

Using spring batch ExecutionContext for restarts

+
+

By implementing the ItemStream interface in your ItemReader or ItemWriter you may store information about the batch progress in the ExecutionContext. You will find an example for that in the CountJob in My Thai Star.

+
+
+

Additional hint: It is important that bean definition method of your ItemReader/ItemWriter return types implementing ItemStream(and not just ItemReader or ItemWriter alone). For that the ItemStreamReader and ItemStreamWriter interfaces are provided.

+
+
+
+
+
+

Exit codes

+
+
+

Your batches should create a meaningful exit code to allow reaction to batch errors e.g. in a scheduler. +For that spring batch automatically registers an org.springframework.boot.autoconfigure.batch.JobExecutionExitCodeGenerator. To make this mechanism work your spring boot app main class as to populate this exit code to the JVM:

+
+
+
+
@SpringBootApplication
+public class SpringBootApp {
+
+  public static void main(String[] args) {
+    if (Arrays.stream(args).anyMatch((String e) -> e.contains("--spring.batch.job.names"))) {
+      // if executing batch job, explicitly exit jvm to report error code from batch
+      System.exit(SpringApplication.exit(SpringApplication.run(SpringBootApp.class, args)));
+    } else {
+      // normal web application start
+      SpringApplication.run(SpringBootApp.class, args);
+    }
+  }
+}
+
+
+
+
+
+

Stop batches and manage batch status

+
+
+

Spring batch uses several database tables to store the status of batch executions. +Each execution may have different status. +You may use this mechanism to gracefully stop batches. +Additionally in some edge cases (batch process crashed) the execution status may be in an undesired state. +E.g. the state will be running, despite the process crashed sometime ago. +For that cases you have to change the status of the execution in the database.

+
+
+

CLI-Tool

+
+

Devonfw provides a easy to use cli-tool to manage the executing status of your jobs. +The tool is implemented in the devonfw module devon4j-batch-tool. It will provide a runnable jar, which may be used as follows:

+
+
+
+
List names of all previous executed jobs
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs list

+
+
Stop job named 'countJob'
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs stop countJob

+
+
Show help
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar

+
+
+
+
+

As you can the each invocation includes the JDBC connection string to your database. +This means that you have to make sure that the corresponding DB driver is in the classpath (the prepared jar only contains H2).

+
+
+
+
+
+

Authentication

+
+
+

Most business application incorporate authentication and authorization. +Your spring boot application will implement some kind of security, e.g. integrated login with username+password or in many cases authentication via an existing IAM. +For security reasons your batch should also implement an authentication mechanism and obey the authorization implemented in your application (e.g. via @RolesAllowed).

+
+
+

Since there are many different authentication mechanism we cannot provide an out-of-the-box solution in devonfw, but we describe a pattern how this can be implemented in devonfw batches.

+
+
+

We suggest to implement the authentication in a Spring Batch tasklet, which runs as the first step in your batch. This tasklet will do all of the work which is required to authenticate the batch. A simple example which authenticates the batch "locally" via username and password could be implemented like this:

+
+
+
+
@Named
+public class SimpleAuthenticationTasklet implements Tasklet {
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+    String username = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("username");
+    String password = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("password");
+    Authentication authentication = new UsernamePasswordAuthenticationToken(username, password);
+
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+    return RepeatStatus.FINISHED;
+  }
+
+}
+
+
+
+

The username and password have to be supplied via two cli parameters -username and -password. This implementation creates an "authenticated" Authentication and sets in the Spring Security context. This is just for demonstration normally you should not provide passwords via command line. The actual authentication will be done automatically via Spring Security as in your "normal" application. +If you have a more complex authentication mechanism in your application e.g. via OpenID connect just call this in the tasklet. Naturally you may read authentication parameters (e.g. secrets) from the command line or more securely from a configuration file.

+
+
+

In your Job Configuration set this tasklet as the first step:

+
+
+
+
@Configuration
+@EnableBatchProcessing
+public class BookingsExportBatchConfig {
+  @Inject
+  private JobBuilderFactory jobBuilderFactory;
+
+  @Inject
+  private StepBuilderFactory stepBuilderFactory;
+
+  @Bean
+  public Job myBatchJob() {
+    return this.jobBuilderFactory.get("myJob").start(myAuthenticationStep()).next(...).build();
+  }
+
+  @Bean
+  public Step myAuthenticationStep() {
+    return this.stepBuilderFactory.get("myAuthenticationStep").tasklet(myAuthenticatonTasklet()).build();
+  }
+
+  @Bean
+  public Tasklet myAuthenticatonTasklet() {
+    return new SimpleAuthenticationTasklet();
+  }
+...
+
+
+
+
+
+

Tipps & tricks

+
+
+

Identifying job parameters

+
+

Spring uses a jobs parameters to identify job executions. Parameters starting with "-" are not considered for identifying a job execution.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-beanmapping.html b/docs/devon4j/1.0/guide-beanmapping.html new file mode 100644 index 00000000..bdf82e01 --- /dev/null +++ b/docs/devon4j/1.0/guide-beanmapping.html @@ -0,0 +1,468 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Bean-Mapping

+
+
+

For decoupling, you sometimes need to create separate objects (beans) for a different view. E.g. for an external service, you will use a transfer-object instead of the persistence entity so internal changes to the entity do not implicitly change or break the service.

+
+
+

Therefore you have the need to map similar objects what creates a copy. This also has the benefit that modifications to the copy have no side-effect on the original source object. However, to implement such mapping code by hand is very tedious and error-prone (if new properties are added to beans but not to mapping code):

+
+
+
+
public UserEto mapUser(UserEntity source) {
+  UserEto target = new UserEto();
+  target.setUsername(source.getUsername());
+  target.setEmail(source.getEmail());
+  ...
+  return target;
+}
+
+
+
+

Therefore we are using a BeanMapper for this purpose that makes our lives a lot easier. +There are several bean mapping frameworks with different approaches.

+
+
+

For a devon4j-spring application we recommend Orika, follow Spring Bean-Mapping for an introduction to Orika and Dozer in a devon4j-spring context application.

+
+
+ + + + + +
+ + +devon4j started with Dozer as framework for Spring applications and still supports it. However, we now recommend Orika (for new projects) as it is much faster (see Performance of Java Mapping Frameworks). +
+
+
+

For a Quarkus application we recommend Mapstruct, follow Quarkus Bean-Mapping for an introduction to Mapstruct in a quarkus context application.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-blob-support.html b/docs/devon4j/1.0/guide-blob-support.html new file mode 100644 index 00000000..42cbb84b --- /dev/null +++ b/docs/devon4j/1.0/guide-blob-support.html @@ -0,0 +1,451 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

BLOB support

+
+
+

BLOB stands for Binary Large Object. A BLOB may be an image, an office document, ZIP archive or any other multimedia object. +Often these BLOBs are large. if this is the case you need to take care, that you do not copy all the blob data into you application heap, e.g. when providing them via a REST service. +This could easily lead to performance problems or out of memory errors. +As solution for that problem is "streaming" those BLOBs directly from the database to the client. To demonstrate how this can be accomplished, devonfw provides a example.

+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-caching.html b/docs/devon4j/1.0/guide-caching.html new file mode 100644 index 00000000..092313f3 --- /dev/null +++ b/docs/devon4j/1.0/guide-caching.html @@ -0,0 +1,510 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Caching

+
+
+

Caching is a technical approach to improve performance. While it may appear easy on the first sight it is an advanced topic. In general, try to use caching only when required for performance reasons. If you come to the point that you need caching first think about:

+
+
+
    +
  • +

    What to cache?
    +Be sure about what you want to cache. Is it static data? How often will it change? What will happen if the data changes but due to caching you might receive "old" values? Can this be tolerated? For how long? This is not a technical question but a business requirement.

    +
  • +
  • +

    Where to cache?
    +Will you cache data on client or server? Where exactly?

    +
  • +
  • +

    How to cache?
    +Is a local cache sufficient or do you need a shared cache?

    +
  • +
+
+
+
+
+

Local Cache

+
+ +
+
+
+

Shared Cache

+
+
+

Distributed Cache

+ +
+
+
+ + +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-client-layer.html b/docs/devon4j/1.0/guide-client-layer.html new file mode 100644 index 00000000..cbe6d33f --- /dev/null +++ b/docs/devon4j/1.0/guide-client-layer.html @@ -0,0 +1,548 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Client Layer

+
+
+

There are various technical approaches to building GUI clients. The devonfw proposes rich clients that connect to the server via data-oriented services (e.g. using REST with JSON). +In general, we have to distinguish among the following types of clients:

+
+
+
    +
  • +

    web clients

    +
  • +
  • +

    native desktop clients

    +
  • +
  • +

    (native) mobile clients

    +
  • +
+
+
+

Our main focus is on web-clients. In our sample application my-thai-star we offer a responsive web-client based on Angular following devon4ng that integrates seamlessly with the back ends of my-thai-star available for Java using devon4j as well as .NET/C# using devon4net. For building angular clients read the separate devon4ng guide.

+
+
+
+
+

JavaScript for Java Developers

+
+
+

In order to get started with client development as a Java developer we give you some hints to get started. Also if you are an experienced JavaScript developer and want to learn Java this can be helpful. First, you need to understand that the JavaScript ecosystem is as large as the Java ecosystem and developing a modern web client requires a lot of knowledge. The following table helps you as experienced developer to get an overview of the tools, configuration-files, and other related aspects from the new world to learn. Also it helps you to map concepts between the ecosystems. Please note that we list the tools recommended by devonfw here (and we know that there are alternatives not listed here such as gradle, grunt, bower, etc.).

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Aspects in JavaScript and Java ecosystem
TopicAspectJavaScriptJava

Programming

Language

TypeScript (extends JavaScript)

Java

Runtime

VM

nodejs (or web-browser)

jvm

Build- & Dependency-Management

Tool

npm or yarn

maven

Config

package.json

pom.xml

Repository

npm repo

maven central (repo search)

Build cmd

ng build or npm run build (goals are not standardized in npm)

mvn install (see lifecycle)

Test cmd

ng test

mvn test

Testing

Test-Tool

jasmine

junit

Test-Runner

karma

junit / surefire

E2E Testing

Protractor

Selenium

Code Analysis

Code Coverage

ng test --no-watch --code-coverage

JaCoCo

Development

IDE

MS VS Code or IntelliJ

Eclipse or IntelliJ

Framework

Angular (etc.)

Spring or Quarkus

+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-common.html b/docs/devon4j/1.0/guide-common.html new file mode 100644 index 00000000..3fdf0b81 --- /dev/null +++ b/docs/devon4j/1.0/guide-common.html @@ -0,0 +1,430 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Common

+
+
+

In our coding-conventions we define a clear packaging and layering. +However, there is always cross-cutting code that does not belong to a specific layer such as generic helpers, general code for configuration or integration, etc. +Therefore, we define a package segment common that can be used as «layer» for such cross-cutting code. +Code from any other layer is allowed to access such common code (at least within the same component).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-component-facade.html b/docs/devon4j/1.0/guide-component-facade.html new file mode 100644 index 00000000..d681e686 --- /dev/null +++ b/docs/devon4j/1.0/guide-component-facade.html @@ -0,0 +1,506 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Component Facade

+
+
+ + + + + +
+ + +Our recommended approach for implementing the logic layer is use-cases +
+
+
+

For each component of the application, the logic layer defines a component facade. +This is an interface defining all business operations of the component. +It carries the name of the component («Component») and has an implementation named «Component»Impl (see implementation).

+
+
+
+
+

API

+
+
+

The component facade interface defines the logic API of the component and has to be business oriented. +This means that all parameters and return types of all methods from this API have to be business transfer-objects, datatypes (String, Integer, MyCustomerNumber, etc.), or collections of these. +The API may also only access objects of other business components listed in the (transitive) dependencies of the business-architecture.

+
+
+

Here is an example how such an API may look like:

+
+
+
+
public interface Bookingmanagement {
+
+  BookingEto findBooking(Long id);
+
+  BookingCto findBookingCto(Long id);
+
+  Page<BookingEto> findBookingEtos(BookingSearchCriteriaTo criteria);
+
+  void approveBooking(BookingEto booking);
+
+}
+
+
+
+
+
+

Implementation

+
+
+

The implementation of an interface from the logic layer (a component facade or a use-case) carries the name of that interface with the suffix Impl and is annotated with @Named. +An implementation typically needs access to the persistent data. +This is done by injecting the corresponding repository (or DAO). +According to data-sovereignty, only repositories of the same business component may be accessed directly. +For accessing data from other components the implementation has to use the corresponding API of the logic layer (the component facade). Further, it shall not expose persistent entities from the domain layer and has to map them to transfer objects using the bean-mapper.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public BookingEto findBooking(Long id) {
+
+    LOG.debug("Get Booking with id {} from database.", id);
+    BookingEntity entity = this.bookingRepository.findOne(id);
+    return getBeanMapper().map(entity, BookingEto.class));
+  }
+}
+
+
+
+

As you can see, entities (BookingEntity) are mapped to corresponding ETOs (BookingEto). +Further details about this can be found in bean-mapping.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-component.html b/docs/devon4j/1.0/guide-component.html new file mode 100644 index 00000000..35799f93 --- /dev/null +++ b/docs/devon4j/1.0/guide-component.html @@ -0,0 +1,482 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Components

+
+
+

Following separation-of-concerns we divide an application into components using our package-conventions and project structure. +As described by the architecture each component is divided into layers as described in the project structure. +Please note that a component will only have the required layers. +So a component may have any number from one to all layers.

+
+
+
+
+

General Component

+
+
+

Cross-cutting aspects belong to the implicit component general. It contains technical configurations and very general code that is not business specific. Such code shall not have any dependencies to other components and therefore business related code.

+
+
+
+
+

Business Component

+
+
+

The business-architecture defines the business components with their allowed dependencies. A small application (microservice) may just have one component and no dependencies making it simple while the same architecture can scale up to large and complex applications (from bigger microservice up to modulith). +Tailoring an business domain into applications and applications into components is a tricky task that needs the skills of an experienced architect. +Also, the tailoring should follow the business and not split by technical reasons or only by size. +Size is only an indicator but not a driver of tailoring. +Whatever hypes like microservices are telling you, never get misled in this regard: +If your system grows and reaches MAX+1 lines of code, it is not the right motivation to split it into two microservices of ~MAX/2 lines of code - such approaches will waste huge amounts of money and lead to chaos.

+
+
+
+
+

App Component

+
+
+

Only in case you need cross-cutting code that aggregates another component you may introduce the component app. +It is allowed to depend on all other components but no other component may depend on it. +With the modularity and flexibility of spring you typically do not need this. +However, when you need to have a class that registers all services or component-facades using direct code dependencies, you can introduce this component.

+
+
+
+
+

Component Example

+
+
+

The following class diagram illustrates an example of the business component Staffmanagement:

+
+
+
+logic layer component pattern +
+
+
+

In this scheme, you can see the structure and flow from the service-layer (REST service call) via the logic-layer to the dataaccess-layer (and back).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-configuration-mapping.html b/docs/devon4j/1.0/guide-configuration-mapping.html new file mode 100644 index 00000000..8bae723f --- /dev/null +++ b/docs/devon4j/1.0/guide-configuration-mapping.html @@ -0,0 +1,566 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Mapping configuration to your code

+
+
+

If you are using spring-boot as suggested by devon4j your application can be configured by application.properties file as described in configuration. +To get a single configuration option into your code for flexibility, you can use

+
+
+
+
@Value("${my.property.name}")
+private String myConfigurableField;
+
+
+
+

Now, in your application.properties you can add the property:

+
+
+
+
my.property.name=my-property-value
+
+
+
+

You may even use @Value("${my.property.name:my-default-value}") to make the property optional.

+
+
+
+
+

Naming conventions for configuration properties

+
+
+

As a best practice your configruation properties should follow these naming conventions:

+
+
+
    +
  • +

    build the property-name as a path of segments separated by the dot character (.)

    +
  • +
  • +

    segments should get more specific from left to right

    +
  • +
  • +

    a property-name should either be a leaf value or a tree node (prefix of other property-names) but never both! So never have something like foo.bar=value and foo.bar.child=value2.

    +
  • +
  • +

    start with a segment namespace unique to your context or application

    +
  • +
  • +

    a good example would be «myapp».billing.service.email.sender for the sender address of billing service emails send by «myapp».

    +
  • +
+
+
+
+
+

Mapping advanced configuration

+
+
+

However, in many scenarios you will have features that require more than just one property. +Injecting those via @Value is not leading to good code quality. +Instead we create a class with the suffix ConfigProperties containing all configuration properties for our aspect that is annotated with @ConfigurationProperties:

+
+
+
+
@ConfigurationProperties(prefix = "myapp.billing.service")
+public class BillingServiceConfigProperties {
+
+  private final Email email = new Email();
+  private final Smtp smtp = new Smtp();
+
+  public Email getEmail() { return this.email; }
+  public Email getSmtp() { return this.smtp; }
+
+  public static class Email {
+
+    private String sender;
+    private String subject;
+
+    public String getSender() { return this.sender; }
+    public void setSender(String sender) { this.sender = sender; }
+    public String getSubject() { return this.subject; }
+    public void setSubject(String subject) { this.subject = subject; }
+  }
+
+  public static class Smtp {
+
+    private String host;
+    private int port = 25;
+
+    public String getHost() { return this.host; }
+    public void setHost(String host) { this.host = host; }
+    public int getPort() { return this.port; }
+    public void setPort(int port) { this.port = port; }
+  }
+
+}
+
+
+
+

Of course this is just an example to demonstrate this feature of spring-boot. +In order to send emails you would typically use the existing spring-email feature. +But as you can see this allows us to define and access our configuration in a very structured and comfortable way. +The annotation @ConfigurationProperties(prefix = "myapp.billing.service") will automatically map spring configuration properties starting with myapp.billing.service via the according getters and setters into our BillingServiceConfigProperties. +We can easily define defaults (e.g. 25 as default value for myapp.billing.service.smtp.port). +Also Email or Smtp could be top-level classes to be reused in multiple configurations. +Of course you would also add helpful JavaDoc comments to the getters and classes to document your configuration options. +Further to access this configuration, we can use standard dependency-injection:

+
+
+
+
@Inject
+private BillingServiceConfigProperties config;
+
+
+
+

For very generic cases you may also use Map<String, String> to map any kind of property in an untyped way. +An example for generic configuration from devon4j can be found in +ServiceConfigProperties.

+
+
+

For further details about this feature also consult Guide to @ConfigurationProperties in Spring Boot.

+
+
+
+
+

Generate configuration metadata

+
+
+

You should further add this dependency to your module containing the *ConfigProperties:

+
+
+
+
    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+
+
+
+

This will generate configuration metadata so projects using your code can benefit from autocompletion and getting your JavaDoc as tooltip when editing application.properites what makes this approach very powerful. +For further details about this please read A Guide to Spring Boot Configuration Metadata.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-configuration.html b/docs/devon4j/1.0/guide-configuration.html new file mode 100644 index 00000000..d7e23c27 --- /dev/null +++ b/docs/devon4j/1.0/guide-configuration.html @@ -0,0 +1,581 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Configuration

+
+
+

An application needs to be configurable in order to allow internal setup (like CDI) but also to allow externalized configuration of a deployed package (e.g. integration into runtime environment). We rely on a comprehensive configuration approach following a "convention over configuration" pattern. This guide adds on to this by detailed instructions and best-practices how to deal with configurations.

+
+
+

In general we distinguish the following kinds of configuration that are explained in the following sections:

+
+
+ +
+
+
+
+

Internal Application Configuration

+
+
+

The application configuration contains all internal settings and wirings of the application (bean wiring, database mappings, etc.) and is maintained by the application developers at development time.

+
+
+

For more detail of Spring stack, see here

+
+
+
+
+

Externalized Configuration

+
+
+

Externalized configuration is a configuration that is provided separately to a deployment package and can be maintained undisturbed by re-deployments.

+
+
+

Environment Configuration

+
+

The environment configuration contains configuration parameters (typically port numbers, host names, passwords, logins, timeouts, certificates, etc.) specific for the different environments. These are under the control of the operators responsible for the application.

+
+
+

The environment configuration is maintained in application.properties files, defining various properties. +These properties are explained in the corresponding configuration sections of the guides for each topic:

+
+
+ +
+
+

Make sure your properties are thoroughly documented by providing a comment to each property. This inline documentation is most valuable for your operating department.

+
+
+

More about structuring your application.properties files can be read here for Spring.

+
+
+

For Quarkus, please refer to Quarkus Config Reference for more details.

+
+
+
+

Business Configuration

+
+

Often applications do not need business configuration. In case they do it should typically be editable by administrators via the GUI. The business configuration values should therefore be stored in the database in key/value pairs.

+
+
+

Therefore we suggest to create a dedicated table with (at least) the following columns:

+
+
+
    +
  • +

    ID

    +
  • +
  • +

    Property name

    +
  • +
  • +

    Property type (Boolean, Integer, String)

    +
  • +
  • +

    Property value

    +
  • +
  • +

    Description

    +
  • +
+
+
+

According to the entries in this table, an administrative GUI may show a generic form to modify business configuration. Boolean values should be shown as checkboxes, integer and string values as text fields. The values should be validated according to their type so an error is raised if you try to save a string in an integer property for example.

+
+
+

We recommend the following base layout for the hierarchical business configuration:

+
+
+

component.[subcomponent].[subcomponent].propertyname

+
+
+
+
+
+

Security

+
+
+

Often you need to have passwords (for databases, third-party services, etc.) as part of your configuration. These are typically environment specific (see above). However, with DevOps and continuous-deployment you might be tempted to commit such configurations into your version-control (e.g. git). Doing that with plain text passwords is a severe problem especially for production systems. Never do that! Instead we offer some suggestions how to deal with sensible configurations:

+
+
+

Password Encryption

+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control.

+
+
+

For Spring, we use jasypt-spring-boot. For more details, see here

+
+
+

For Quarkus, see here

+
+
+

Is this Security by Obscurity?

+
+
    +
  • +

    Yes, from the point of view to protect the passwords on the target environment this is nothing but security by obscurity. If an attacker somehow got full access to the machine this will only cause him to spend some more time.

    +
  • +
  • +

    No, if someone only gets the configuration file. So all your developers might have access to the version-control where the config is stored. Others might have access to the software releases that include this configs. But without the master-password that should only be known to specific operators none else can decrypt the password (except with brute-force what will take a very long time, see jasypt for details).

    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-cors-support.html b/docs/devon4j/1.0/guide-cors-support.html new file mode 100644 index 00000000..473e248e --- /dev/null +++ b/docs/devon4j/1.0/guide-cors-support.html @@ -0,0 +1,472 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

CORS support

+
+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

For more information about CORS see here. Information about the CORS headers can be found here.

+
+
+
+
+

Configuring CORS support

+
+
+

To enable CORS support for your application, see the advanced guides. For Spring applications see here. For Quarkus follow the official Quarkus guide.

+
+
+
+
+

Configuration with service mesh

+
+
+

If you are using a service mesh, you can also define your CORS policy directly there. Here is an example from Istio.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-csrf.html b/docs/devon4j/1.0/guide-csrf.html new file mode 100644 index 00000000..57e94562 --- /dev/null +++ b/docs/devon4j/1.0/guide-csrf.html @@ -0,0 +1,556 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Cross-site request forgery (CSRF)

+
+
+

CSRF is a type of malicious exploit of a web application that allows an attacker to induce users to perform actions that they do not intend to perform.

+
+
+
+csrf +
+
+
+

More details about csrf can be found at https://owasp.org/www-community/attacks/csrf.

+
+
+
+
+

Secure devon4j server against CSRF

+
+
+

In case your devon4j server application is not accessed by browsers or the web-client is using JWT based authentication, you are already safe according to CSRF. +However, if your application is accessed from a browser and you are using form based authentication (with session coockie) or basic authentication, you need to enable CSRF protection. +This guide will tell you how to do this.

+
+
+

Dependency

+
+

To secure your devon4j application against CSRF attacks, you only need to add the following dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-csrf</artifactId>
+</dependency>
+
+
+
+

Starting with devon4j version 2020.12.001 application template, this is all you need to do. +However, if you have started from an older version or you want to understand more, please read on.

+
+
+
+

Pluggable web-security

+
+

To enable pluggable security via devon4j security starters you need to apply WebSecurityConfigurer to your BaseWebSecurityConfig (your class extending spring-boot’s WebSecurityConfigurerAdapter) as following:

+
+
+
+
  @Inject
+  private WebSecurityConfigurer webSecurityConfigurer;
+
+  public void configure(HttpSecurity http) throws Exception {
+    // disable CSRF protection by default, use csrf starter to override.
+	  http = http.csrf().disable();
+	  // apply pluggable web-security from devon4j security starters
+    http = this.webSecurityConfigurer.configure(http);
+    .....
+  }
+
+
+
+
+

Custom CsrfRequestMatcher

+
+

If you want to customize which HTTP requests will require a CSRF token, you can implement your own CsrfRequestMatcher and provide it to the devon4j CSRF protection via qualified injection as following:

+
+
+
+
@Named("CsrfRequestMatcher")
+public class CsrfRequestMatcher implements RequestMatcher {
+  @Override
+  public boolean matches(HttpServletRequest request) {
+    .....
+  }
+}
+
+
+
+

Please note that the exact name (@Named("CsrfRequestMatcher")) is required here to ensure your custom implementation will be injected properly.

+
+
+
+

CsrfRestService

+
+

With the devon4j-starter-security-csrf the CsrfRestService gets integrated into your app. +It provides an operation to get the CSRF token via an HTTP GET request. +The URL path to retrieve this CSRF token is services/rest/csrf/v1/token. +As a result you will get a JSON like the following:

+
+
+
+
{
+  "token":"3a8a5f66-c9eb-4494-81e1-7cc58bc3a519",
+  "parameterName":"_csrf",
+  "headerName":"X-CSRF-TOKEN"
+}
+
+
+
+

The token value is a strong random value that will differ for each user session. +It has to be send with subsequent HTTP requests (when method is other than GET) in the specified header (X-CSRF-TOKEN).

+
+
+
+

How it works

+
+

Putting it all together, a browser client should call the CsrfRestService after successfull login to receive the current CSRF token. +With every subsequent HTTP request (other than GET) the client has to send this token in the according HTTP header. +Otherwise the server will reject the request to prevent CSRF attacks. +Therefore, an attacker might make your browser perform HTTP requests towards your devon4j application backend via <image> elements, <iframes>, etc. +Your browser will then still include your session coockie if you are already logged in (e.g. from another tab). +However, in case he wants to trigger DELETE or POST requests trying your browser to make changes in the application (delete or update data, etc.) this will fail without CSRF token. +The attacker may make your browser retrieve the CSRF token but he will not be able to retrieve the result and put it into the header of other requests due to the same-origin-policy. +This way your application will be secured against CSRF attacks.

+
+
+
+
+
+

Configure devon4ng client for CSRF

+
+
+

Devon4ng client configuration for CSRF is described here

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-dao.html b/docs/devon4j/1.0/guide-dao.html new file mode 100644 index 00000000..469dfd40 --- /dev/null +++ b/docs/devon4j/1.0/guide-dao.html @@ -0,0 +1,537 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Data Access Object

+
+
+

The Data Access Objects (DAOs) are part of the persistence layer. +They are responsible for a specific entity and should be named «Entity»Dao and «Entity»DaoImpl. +The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. +Additionally a DAO may offer advanced operations such as query or locking methods.

+
+
+
+
+

DAO Interface

+
+
+

For each DAO there is an interface named «Entity»Dao that defines the API. For CRUD support and common naming we derive it from the ApplicationDao interface that comes with the devon application template:

+
+
+
+
public interface MyEntityDao extends ApplicationDao<MyEntity> {
+  List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria);
+}
+
+
+
+

All CRUD operations are inherited from ApplicationDao so you only have to declare the additional methods.

+
+
+
+
+

DAO Implementation

+
+
+

Implementing a DAO is quite simple. We create a class named «Entity»DaoImpl that extends ApplicationDaoImpl and implements your «Entity»Dao interface:

+
+
+
+
public class MyEntityDaoImpl extends ApplicationDaoImpl<MyEntity> implements MyEntityDao {
+
+  public List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria) {
+    TypedQuery<MyEntity> query = createQuery(criteria, getEntityManager());
+    return query.getResultList();
+  }
+  ...
+}
+
+
+
+

Again you only need to implement the additional non-CRUD methods that you have declared in your «Entity»Dao interface. +In the DAO implementation you can use the method getEntityManager() to access the EntityManager from the JPA. You will need the EntityManager to create and execute queries.

+
+
+

Static queries for DAO Implementation

+
+

All static queries are declared in the file src\main\resources\META-INF\orm.xml:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+  <named-query name="find.dish.with.max.price">
+    <query><![SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice]]></query>
+  </named-query>
+  ...
+</hibernate-mapping>
+
+
+
+

When your application is started, all these static queries will be created as prepared statements. This allows better performance and also ensures that you get errors for invalid JPQL queries when you start your app rather than later when the query is used.

+
+
+

To avoid redundant occurrences of the query name (get.open.order.positions.for.order) we define a constant for each named query:

+
+
+
+
public class NamedQueries {
+  public static final String FIND_DISH_WITH_MAX_PRICE = "find.dish.with.max.price";
+}
+
+
+
+

Note that changing the name of the java constant (FIND_DISH_WITH_MAX_PRICE) can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+

The following listing shows how to use this query:

+
+
+
+
public List<DishEntity> findDishByMaxPrice(BigDecimal maxPrice) {
+  Query query = getEntityManager().createNamedQuery(NamedQueries.FIND_DISH_WITH_MAX_PRICE);
+  query.setParameter("maxPrice", maxPrice);
+  return query.getResultList();
+}
+
+
+
+

Via EntityManager.createNamedQuery(String) we create an instance of Query for our predefined static query. +Next we use setParameter(String, Object) to provide a parameter (maxPrice) to the query. This has to be done for all parameters of the query.

+
+
+

Note that using the createQuery(String) method, which takes the entire query as string (that may already contain the parameter) is not allowed to avoid SQL injection vulnerabilities. +When the method getResultList() is invoked, the query is executed and the result is delivered as List. As an alternative, there is a method called getSingleResult(), which returns the entity if the query returned exactly one and throws an exception otherwise.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-data-permission.html b/docs/devon4j/1.0/guide-data-permission.html new file mode 100644 index 00000000..c4b8124c --- /dev/null +++ b/docs/devon4j/1.0/guide-data-permission.html @@ -0,0 +1,670 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Data-permissions

+
+
+

In some projects there are demands for permissions and authorization that is dependent on the processed data. E.g. a user may only be allowed to read or write data for a specific region. This is adding some additional complexity to your authorization. If you can avoid this it is always best to keep things simple. However, in various cases this is a requirement. Therefore the following sections give you guidance and patterns how to solve this properly.

+
+
+
+
+

Structuring your data

+
+
+

For all your business objects (entities) that have to be secured regarding to data permissions we recommend that you create a separate interface that provides access to the relevant data required to decide about the permission. Here is a simple example:

+
+
+
+
public interface SecurityDataPermissionCountry {
+
+  /**
+   * @return the 2-letter ISO code of the country this object is associated with. Users need
+   *         a data-permission for this country in order to read and write this object.
+   */
+  String getCountry();
+}
+
+
+
+

Now related business objects (entities) can implement this interface. Often such data-permissions have to be applied to an entire object-hierarchy. For security reasons we recommend that also all child-objects implement this interface. For performance reasons we recommend that the child-objects redundantly store the data-permission properties (such as country in the example above) and this gets simply propagated from the parent, when a child object is created.

+
+
+
+
+

Permissions for processing data

+
+
+

When saving or processing objects with a data-permission, we recommend to provide dedicated methods to verify the permission in an abstract base-class such as AbstractUc and simply call this explicitly from your business code. This makes it easy to understand and debug the code. Here is a simple example:

+
+
+
+
protected void verifyPermission(SecurityDataPermissionCountry entity) throws AccessDeniedException;
+
+
+
+

Beware of AOP

+
+

For simple but cross-cutting data-permissions you may also use AOP. This leads to programming aspects that reflectively scan method arguments and magically decide what to do. Be aware that this quickly gets tricky:

+
+
+
    +
  • +

    What if multiple of your method arguments have data-permissions (e.g. implement SecurityDataPermission*)?

    +
  • +
  • +

    What if the object to authorize is only provided as reference (e.g. Long or IdRef) and only loaded and processed inside the implementation where the AOP aspect does not apply?

    +
  • +
  • +

    How to express advanced data-permissions in annotations?

    +
  • +
+
+
+

What we have learned is that annotations like @PreAuthorize from spring-security easily lead to the "programming in string literals" anti-pattern. We strongly discourage to use this anti-pattern. In such case writing your own verifyPermission methods that you manually call in the right places of your business-logic is much better to understand, debug and maintain.

+
+
+
+
+
+

Permissions for reading data

+
+
+

When it comes to restrictions on the data to read it becomes even more tricky. In the context of a user only entities shall be loaded from the database he is permitted to read. This is simple for loading a single entity (e.g. by its ID) as you can load it and then if not permitted throw an exception to secure your code. But what if the user is performing a search query to find many entities? For performance reasons we should only find data the user is permitted to read and filter all the rest already via the database query. But what if this is not a requirement for a single query but needs to be applied cross-cutting to tons of queries? Therefore we have the following pattern that solves your problem:

+
+
+

For each data-permission attribute (or set of such) we create an abstract base entity:

+
+
+
+
@MappedSuperclass
+@EntityListeners(PermissionCheckListener.class)
+@FilterDef(name = "country", parameters = {@ParamDef(name = "countries", type = "string")})
+@Filter(name = "country", condition = "country in (:countries)")
+public abstract class SecurityDataPermissionCountryEntity extends ApplicationPersistenceEntity
+    implements SecurityDataPermissionCountry {
+
+  private String country;
+
+  @Override
+  public String getCountry() {
+    return this.country;
+  }
+
+  public void setCountry(String country) {
+    this.country = country;
+  }
+}
+
+
+
+

There are some special hibernate annotations @EntityListeners, @FilterDef, and @Filter used here allowing to apply a filter on the country for any (non-native) query performed by hibernate. The entity listener may look like this:

+
+
+
+
public class PermissionCheckListener {
+
+  @PostLoad
+  public void read(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireReadPermission(entity);
+  }
+
+  @PrePersist
+  @PreUpdate
+  public void write(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireWritePermission(entity);
+  }
+}
+
+
+
+

This will ensure that hibernate implicitly will call these checks for every such entity when it is read from or written to the database. Further to avoid reading entities from the database the user is not permitted to (and ending up with exceptions), we create an AOP aspect that automatically activates the above declared hibernate filter:

+
+
+
+
@Named
+public class PermissionCheckerAdvice implements MethodBeforeAdvice {
+
+  @Inject
+  private PermissionChecker permissionChecker;
+
+  @PersistenceContext
+  private EntityManager entityManager;
+
+  @Override
+  public void before(Method method, Object[] args, Object target) {
+
+    Collection<String> permittedCountries = this.permissionChecker.getPermittedCountriesForReading();
+    if (permittedCountries != null) { // null is returned for admins that may access all countries
+      if (permittedCountries.isEmpty()) {
+        throw new AccessDeniedException("Not permitted for any country!");
+      }
+      Session session = this.entityManager.unwrap(Session.class);
+      session.enableFilter("country").setParameterList("countries", permittedCountries.toArray());
+    }
+  }
+}
+
+
+
+

Finally to apply this aspect to all Repositories (can easily be changed to DAOs) implement the following advisor:

+
+
+
+
@Named
+public class PermissionCheckerAdvisor implements PointcutAdvisor, Pointcut, ClassFilter, MethodMatcher {
+
+  @Inject
+  private PermissionCheckerAdvice advice;
+
+  @Override
+  public Advice getAdvice() {
+    return this.advice;
+  }
+
+  @Override
+  public boolean isPerInstance() {
+    return false;
+  }
+
+  @Override
+  public Pointcut getPointcut() {
+    return this;
+  }
+
+  @Override
+  public ClassFilter getClassFilter() {
+    return this;
+  }
+
+  @Override
+  public MethodMatcher getMethodMatcher() {
+    return this;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass) {
+    return true; // apply to all methods
+  }
+
+  @Override
+  public boolean isRuntime() {
+    return false;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass, Object... args) {
+    throw new IllegalStateException("isRuntime()==false");
+  }
+
+  @Override
+  public boolean matches(Class<?> clazz) {
+    // when using DAOs simply change to some class like ApplicationDao
+    return DefaultRepository.class.isAssignableFrom(clazz);
+  }
+}
+
+
+
+
+
+

Managing and granting the data-permissions

+
+
+

Following our authorization guide we can simply create a permission for each country. We might simply reserve a prefix (as virtual «app-id») for each data-permission to allow granting data-permissions to end-users across all applications of the IT landscape. In our example we could create access controls country.DE, country.US, country.ES, etc. and assign those to the users. The method permissionChecker.getPermittedCountriesForReading() would then scan for these access controls and only return the 2-letter country code from it.

+
+
+ + + + + +
+ + +Before you make your decisions how to design your access controls please clarify the following questions: +
+
+
+
    +
  • +

    Do you need to separate data-permissions independent of the functional permissions? E.g. may it be required to express that a user can read data from the countries ES and PL but is only permitted to modify data from PL? In such case a single assignment of "country-permissions" to users is insufficient.

    +
  • +
  • +

    Do you want to grant data-permissions individually for each application (higher flexibility and complexity) or for the entire application landscape (simplicity, better maintenance for administrators)? In case of the first approach you would rather have access controls like app1.country.GB and app2.country.GB.

    +
  • +
  • +

    Do your data-permissions depend on objects that can be created dynamically inside your application?

    +
  • +
  • +

    If you want to grant data-permissions on other business objects (entities), how do you want to reference them (primary keys, business keys, etc.)? What reference is most stable? Which is most readable?

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-dataaccess-layer.html b/docs/devon4j/1.0/guide-dataaccess-layer.html new file mode 100644 index 00000000..5702a215 --- /dev/null +++ b/docs/devon4j/1.0/guide-dataaccess-layer.html @@ -0,0 +1,442 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Data-Access Layer

+
+
+

The data-access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store. External system could also be accessed from the data-access layer if they match this definition, e.g. a mongo-db via rest services.

+
+
+

Note: In the modern project structure, this layer is replaced by the domain layer.

+
+
+
+
+

Database

+
+
+

You need to make your choice for a database. Options are documented here.

+
+
+

The classical approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-database-migration.html b/docs/devon4j/1.0/guide-database-migration.html new file mode 100644 index 00000000..ef680a2e --- /dev/null +++ b/docs/devon4j/1.0/guide-database-migration.html @@ -0,0 +1,464 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Database Migration

+
+
+

When you have a schema-based database, +you need a solution for schema versioning and migration for your database. +A specific release of your app requires a corresponding version of the schema in the database to run. +As you want simple and continuous deployment you should automate the schema versiong and database migration.

+
+
+

The general idea is that your software product contains "scripts" to migrate the database from schema version X to verion X+1. +When you begin your project you start with version 1 and with every increment of your app that needs a change to the database schema (e.g. a new table, a new column to an existing table, a new index, etc.) you add another "script" that migrates from the current to the next version. +For simplicity these versions are just sequential numbers or timestamps. +Now, the solution you choose will automatically manage the schema version in a separate metadata table in your database that stores the current schema version. +When your app is started, it will check the current version inside the database from that metadata table. +As long as there are "scripts" that migrate from there to a higher version, they will be automatically applied to the database and this process is protocolled to the metadata table in your database what also updates the current schema version there. +Using this approach, you can start with an empty database what will result in all "scripts" being applied sequentially. +Also any version of your database schema can be present and you will always end up in a controlled migration to the latest schema version.

+
+
+
+
+

Options for database migration

+
+
+

For database migration you can choose between the following options:

+
+
+
    +
  • +

    flyway (KISS based approach with migrations as SQL)

    +
  • +
  • +

    liquibase (more complex approach with database abstraction)

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-datatype.html b/docs/devon4j/1.0/guide-datatype.html new file mode 100644 index 00000000..65c44a52 --- /dev/null +++ b/docs/devon4j/1.0/guide-datatype.html @@ -0,0 +1,566 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Datatypes

+
+
+
+
+

A datatype is an object representing a value of a specific type with the following aspects:

+
+
+
    +
  • +

    It has a technical or business specific semantic.

    +
  • +
  • +

    Its JavaDoc explains the meaning and semantic of the value.

    +
  • +
  • +

    It is immutable and therefore stateless (its value assigned at construction time and can not be modified).

    +
  • +
  • +

    It is serializable.

    +
  • +
  • +

    It properly implements #equals(Object) and #hashCode() (two different instances with the same value are equal and have the same hash).

    +
  • +
  • +

    It shall ensure syntactical validation so it is NOT possible to create an instance with an invalid value.

    +
  • +
  • +

    It is responsible for formatting its value to a string representation suitable for sinks such as UI, loggers, etc. Also consider cases like a Datatype representing a password where toString() should return something like "**" instead of the actual password to prevent security accidents.

    +
  • +
  • +

    It is responsible for parsing the value from other representations such as a string (as needed).

    +
  • +
  • +

    It shall provide required logical operations on the value to prevent redundancies. Due to the immutable attribute all manipulative operations have to return a new Datatype instance (see e.g. BigDecimal.add(java.math.BigDecimal)).

    +
  • +
  • +

    It should implement Comparable if a natural order is defined.

    +
  • +
+
+
+

Based on the Datatype a presentation layer can decide how to view and how to edit the value. Therefore a structured data model should make use of custom datatypes in order to be expressive. +Common generic datatypes are String, Boolean, Number and its subclasses, Currency, etc. +Please note that both Date and Calendar are mutable and have very confusing APIs. Therefore, use JSR-310 or jodatime instead. +Even if a datatype is technically nothing but a String or a Number but logically something special it is worth to define it as a dedicated datatype class already for the purpose of having a central javadoc to explain it. On the other side avoid to introduce technical datatypes like String32 for a String with a maximum length of 32 characters as this is not adding value in the sense of a real Datatype. +It is suitable and in most cases also recommended to use the class implementing the datatype as API omitting a dedicated interface.

+
+
+
+— mmm project
+datatype javadoc +
+
+ +
+
+
+

Datatype Packaging

+
+
+

For the devonfw we use a common packaging schema. +The specifics for datatypes are as following:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
SegmentValueExplanation

<component>

*

Here we use the (business) component defining the datatype or general for generic datatypes.

<layer>

common

Datatypes are used across all layers and are not assigned to a dedicated layer.

<scope>

api

Datatypes are always used directly as API even tough they may contain (simple) implementation logic. Most datatypes are simple wrappers for generic Java types (e.g. String) but make these explicit and might add some validation.

+
+
+
+

Technical Concerns

+
+
+

Many technologies like Dozer and QueryDSL’s (alias API) are heavily based on reflection. For them to work properly with custom datatypes, the frameworks must be able to instantiate custom datatypes with no-argument constructors. It is therefore recommended to implement a no-argument constructor for each datatype of at least protected visibility.

+
+
+
+
+

Datatypes in Entities

+
+
+

The usage of custom datatypes in entities is explained in the persistence layer guide.

+
+
+
+
+

Datatypes in Transfer-Objects

+
+
+

XML

+
+

For mapping datatypes with JAXB see XML guide.

+
+
+
+

JSON

+
+

For mapping datatypes from and to JSON see JSON custom mapping.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-dependency-injection.html b/docs/devon4j/1.0/guide-dependency-injection.html new file mode 100644 index 00000000..082afc3b --- /dev/null +++ b/docs/devon4j/1.0/guide-dependency-injection.html @@ -0,0 +1,728 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Dependency Injection

+
+
+

Dependency injection is one of the most important design patterns and is a key principle to a modular and component based architecture. +The Java Standard for dependency injection is javax.inject (JSR330) that we use in combination with JSR250. +Additionally, for scoping you can use CDI (Context and Dependency Injection) from JSR365.

+
+
+

There are many frameworks which support this standard including all recent Java EE application servers. +Therefore in devonfw we rely on these open standards and can propagate patterns and code examples that work independent from the underlying frameworks.

+
+
+
+
+

Key Principles

+
+
+

Within dependency injection a bean is typically a reusable unit of your application providing an encapsulated functionality. +This bean can be injected into other beans and it should in general be replaceable. +As an example we can think of a use-case, a repository, etc. +As best practice we use the following principles:

+
+
+
    +
  • +

    Stateless implementation
    +By default such beans shall be implemented stateless. If you store state information in member variables you can easily run into concurrency problems and nasty bugs. This is easy to avoid by using local variables and separate state classes for complex state-information. Try to avoid stateful beans wherever possible. Only add state if you are fully aware of what you are doing and properly document this as a warning in your JavaDoc.

    +
  • +
  • +

    Usage of Java standards
    +We use common standards (see above) that makes our code portable. Therefore we use standardized annotations like @Inject (javax.inject.Inject) instead of proprietary annotations such as @Autowired. Generally we avoid proprietary annotations in business code (logic layer).

    +
  • +
  • +

    Simple injection-style
    +In general you can choose between constructor, setter or field injection. For simplicity we recommend to do private field injection as it is very compact and easy to maintain. We believe that constructor injection is bad for maintenance especially in case of inheritance (if you change the dependencies you need to refactor all sub-classes). Private field injection and public setter injection are very similar but setter injection is much more verbose (often you are even forced to have javadoc for all public methods). If you are writing re-usable library code setter injection will make sense as it is more flexible. In a business application you typically do not need that and can save a lot of boiler-plate code if you use private field injection instead. Nowadays you are using container infrastructure also for your tests (see testing) so there is no need to inject manually (what would require a public setter).

    +
  • +
  • +

    KISS
    +To follow the KISS (keep it small and simple) principle we avoid advanced features (e.g. custom AOP, non-singleton beans) and only use them where necessary.

    +
  • +
  • +

    Separation of API and implementation
    +For important components we should separate a self-contained API documented with JavaDoc from its implementation. Code from other components that wants to use the implementation shall only rely on the API. However, for things that will never be exchanged no API as interface is required you can skip such separation.

    +
  • +
+
+
+
+
+

Example Bean

+
+
+

Here you can see the implementation of an example bean using dependency injection:

+
+
+
+
@ApplicationScoped
+@Named("MyComponent")
+public class MyComponentImpl implements MyComponent {
+  @Inject
+  private MyOtherComponent myOtherComponent;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+
+  ...
+}
+
+
+
+

Here MyComponentImpl depends on MyOtherComponent that is injected into the field myOtherComponent because of the @Inject annotation. +To make this work there must be exactly one bean in the container (e.g. spring or quarkus) that is an instance of MyOtherComponent. +In order to put a bean into the container, we can use @ApplicationScoped in case of CDI (required for quarkus) for a stateless bean. +In spring we can ommit a CDI annotation and the @Named annotation is already sufficient as a bean is stateless by default in spring. +If we always use @ApplicationScoped we can make this more explicit and more portable accross different frameworks. +So in our example we put MyComponentImpl into the container. +That bean will be called MyComponent as we specified in the @Named annotation but we can also omit the name to use the classname as fallback. +Now our bean can be injected into other beans using @Inject annotation either via MyComponent interface (recommended when interface is present) or even directly via MyComponentImpl. +In case you omit the interface, you should also omit the Impl suffix or instead use Bean as suffix.

+
+
+
+
+

Multiple bean implementations

+
+
+

In some cases you might have multiple implementations as beans for the same interface. +The following sub-sections handle the different scenarios to give you guidance.

+
+
+

Only one implementation in container

+
+

In some cases you still have only one implementation active as bean in the container at runtime. +A typical example is that you have different implemenations for test and main usage. +This case is easy, as @Inject will always be unique. +The only thing you need to care about is how to configure your framework (spring, quarkus, etc.) to know which implementation to put in the container depending on specific configuration. +In spring this can be archived via the proprietary @Profile annotaiton.

+
+
+
+

Injecting all of multiple implementations

+
+

In some situations you may have an interface that defines a kind of "plugin". +You can have multiple implementations in your container and want to have all of them injected. +Then you can request a list with all the bean implementations via the interface as in the following example:

+
+
+
+
  @Inject
+  private List<MyConverter> converters;
+
+
+
+

Your code may iterate over all plugins (converters) and apply them sequentially. +Please note that the injection will fail (at least in spring), when there is no bean available to inject. +So you do not get an empty list injected but will get an exception on startup.

+
+
+
+

Injecting one of multiple implementations

+
+

Another scenario is that you have multiple implementations in your container coexisting, but for injection you may want to choose a specific implementation. +Here you could use the @Named annotation to specify a unique identifier for each implementation what is called qualified injection:

+
+
+
+
@ApplicationScoped
+@Named("UserAuthenticator")
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+@Named("ServiceAuthenticator")
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  @Named("UserAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  @Named("ServiceAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+
+
+
+

However, we discovered that this pattern is not so great: +The identifiers in the @Named annotation are just strings that could easily break. +You could use constants instead but still this is not the best solution.

+
+
+

In the end you can very much simplify this by just directly injecting the implementation instead:

+
+
+
+
@ApplicationScoped
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

In case you want to strictly decouple from implementations, you can still create dedicated interfaces:

+
+
+
+
public interface UserAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class UserAuthenticatorImpl implements UserAuthenticator {
+  ...
+}
+public interface ServiceAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class ServiceAuthenticatorImpl implements ServiceAuthenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

However, as you can see this is again introducing additional boiler-plate code. +While the principle to separate API and implementation and strictly decouple from implementation is valuable in general, +you should always consider KISS, lean, and agile in contrast and balance pros and cons instead of blindly following dogmas.

+
+
+
+
+
+

Imports

+
+
+

Here are the import statements for the most important annotations for dependency injection

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.enterprise.context.ApplicationScoped;
+// import javax.enterprise.context.RequestScoped;
+// import javax.enterprise.context.SessionScoped;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+
+
+
+
+

Dependencies

+
+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>jakarta.inject</groupId>
+  <artifactId>jakarta.inject-api</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>jakarta.annotation</groupId>
+  <artifactId>jakarta.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>javax.inject</groupId>
+  <artifactId>javax.inject</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>javax.annotation</groupId>
+  <artifactId>javax.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-domain-layer.html b/docs/devon4j/1.0/guide-domain-layer.html new file mode 100644 index 00000000..0eb089b1 --- /dev/null +++ b/docs/devon4j/1.0/guide-domain-layer.html @@ -0,0 +1,436 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Domain Layer

+
+
+

The domain layer is responsible for the data-model and mapping this to a database. +The most common approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+

Note: The domain layer is the replacement for the data-access layer in the modern project structure.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-dto.html b/docs/devon4j/1.0/guide-dto.html new file mode 100644 index 00000000..99ff1d32 --- /dev/null +++ b/docs/devon4j/1.0/guide-dto.html @@ -0,0 +1,427 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

DTO approach

+
+
+

As described in our modern structure guide, for application e.g. with microservices architecture where we build smaller applications compared to monoliths, we recommend keeping things as simple as possible. The same principle applies to transfer object. Instead of using different types of transfer objects for each entity such as ETO and CTO, we highly suggest using one data transfer object (DTO) named «BusinessObject»Dto.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-eto-cto.html b/docs/devon4j/1.0/guide-eto-cto.html new file mode 100644 index 00000000..0cb9eaa7 --- /dev/null +++ b/docs/devon4j/1.0/guide-eto-cto.html @@ -0,0 +1,468 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

ETO and CTO approach

+
+ +
+
+
+

ETO

+
+
+

For each persistent entity «BusinessObject»Entity we create or generate a corresponding entity transfer object (ETO) named «BusinessObject»Eto. It has the same properties except for relations.

+
+
+
+
+

BO

+
+
+

In order to centralize the properties (getters and setters with their javadoc) we create a common interface «BusinessObject» implemented both by the entity and its ETO. This also gives us compile-time safety that +bean-mapper can properly map all properties between entity and ETO.

+
+
+
+
+

CTO

+
+
+

If we need to pass an entity with its relation(s) we create a corresponding composite transfer object (CTO) named «BusinessObject»«Subset»Cto that only contains other transfer-objects or collections of them. Here «Subset» is empty for the canonical CTO that holds the ETO together with all its relations. +This is what can be generated automatically with CobiGen. +However, be careful to generate CTOs without thinking and considering design. +If there are no relations at all a CTO is pointless and shall be omitted. +However, if there are multiple relations you typically need multiple CTOs for the same «BusinessObject» that define different subsets of the related data. +These will typically be designed and implemented by hand. +E.g. you may have CustomerWithAddressCto and CustomerWithContractCto. Most CTOs correspond to a specific «BusinessObject» and therefore contain a «BusinessObject»Eto. Such CTOs should inherit from MasterCto.

+
+
+

This pattern with entities, ETOs and CTOs is illustrated by the following UML diagram from our sample application.

+
+
+
+ETOs and CTOs +
+
Figure 1. ETOs and CTOs
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-exceptions.html b/docs/devon4j/1.0/guide-exceptions.html new file mode 100644 index 00000000..4c5181f5 --- /dev/null +++ b/docs/devon4j/1.0/guide-exceptions.html @@ -0,0 +1,632 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Exception Handling

+
+ +
+
+
+

Exception Principles

+
+
+

For exceptions we follow these principles:

+
+
+
    +
  • +

    We only use exceptions for exceptional situations and not for programming control flows, etc. Creating an exception in Java is expensive and hence you should not do it just for testing if something is present, valid or permitted. In the latter case design your API to return this as a regular result.

    +
  • +
  • +

    We use unchecked exceptions (RuntimeException) [1]

    +
  • +
  • +

    We distinguish internal exceptions and user exceptions:

    +
    +
      +
    • +

      Internal exceptions have technical reasons. For unexpected and exotic situations it is sufficient to throw existing exceptions such as IllegalStateException. For common scenarios a own exception class is reasonable.

      +
    • +
    • +

      User exceptions contain a message explaining the problem for end users. Therefore we always define our own exception classes with a clear, brief but detailed message.

      +
    • +
    +
    +
  • +
  • +

    Our own exceptions derive from an exception base class supporting

    + +
  • +
+
+
+

All this is offered by mmm-util-core that we propose as solution.

+
+
+
+
+

Exception Example

+
+
+

Here is an exception class from our sample application:

+
+
+
+
public class IllegalEntityStateException extends ApplicationBusinessException {
+
+  private static final long serialVersionUID = 1L;
+
+  public IllegalEntityStateException(Object entity, Object state) {
+
+    this((Throwable) null, entity, state);
+  }
+
+
+  public IllegalEntityStateException(Object entity, Object currentState, Object newState) {
+
+    this(null, entity, currentState, newState);
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object state) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityState(entity, state));
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object currentState, Object newState) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityStateChange(entity, currentState,
+        newState));
+  }
+
+}
+
+
+
+

The message templates are defined in the interface NlsBundleRestaurantRoot as following:

+
+
+
+
public interface NlsBundleApplicationRoot extends NlsBundle {
+
+
+  @NlsBundleMessage("The entity {entity} is in state {state}!")
+  NlsMessage errorIllegalEntityState(@Named("entity") Object entity, @Named("state") Object state);
+
+
+  @NlsBundleMessage("The entity {entity} in state {currentState} can not be changed to state {newState}!")
+  NlsMessage errorIllegalEntityStateChange(@Named("entity") Object entity, @Named("currentState") Object currentState,
+      @Named("newState") Object newState);
+
+
+  @NlsBundleMessage("The property {property} of object {object} can not be changed!")
+  NlsMessage errorIllegalPropertyChange(@Named("object") Object object, @Named("property") Object property);
+
+  @NlsBundleMessage("There is currently no user logged in")
+  NlsMessage errorNoActiveUser();
+
+
+
+
+
+

Handling Exceptions

+
+
+

For catching and handling exceptions we follow these rules:

+
+
+
    +
  • +

    We do not catch exceptions just to wrap or to re-throw them.

    +
  • +
  • +

    If we catch an exception and throw a new one, we always have to provide the original exception as cause to the constructor of the new exception.

    +
  • +
  • +

    At the entry points of the application (e.g. a service operation) we have to catch and handle all throwables. This is done via the exception-facade-pattern via an explicit facade or aspect. The devon4j-rest module already provides ready-to-use implementations for this such as RestServiceExceptionFacade. The exception facade has to …​

    +
    +
      +
    • +

      log all errors (user errors on info and technical errors on error level)

      +
    • +
    • +

      ensure the entire exception is passed to the logger (not only the message) so that the logger can capture the entire stacktrace and the root cause is not lost.

      +
    • +
    • +

      convert the error to a result appropriable for the client and secure for Sensitive Data Exposure. Especially for security exceptions only a generic security error code or message may be revealed but the details shall only be logged but not be exposed to the client. All internal exceptions are converted to a generic error with a message like:

      +
      +
      +
      +

      An unexpected technical error has occurred. We apologize any inconvenience. Please try again later.

      +
      +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

Common Errors

+
+
+

The following errors may occur in any devon application:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 1. Common Exceptions
CodeMessageLink

TechnicalError

An unexpected error has occurred! We apologize any inconvenience. Please try again later.

TechnicalErrorUserException.java

ServiceInvoke

«original message of the cause»

ServiceInvocationFailedException.java

+
+
+
+
+
+1. Whether to use checked exceptions or not is a controversial topic. Arguments for both sides can be found under The Trouble with Checked Exceptions, Unchecked Exceptions — The Controversy, and Checked Exceptions are Evil. The arguments in favor of unchecked exceptions tend to prevail for applications build with Devon4j. Therefore, unchecked exceptions should be used for a consistent style. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-feature-toggle.html b/docs/devon4j/1.0/guide-feature-toggle.html new file mode 100644 index 00000000..cab2fcc8 --- /dev/null +++ b/docs/devon4j/1.0/guide-feature-toggle.html @@ -0,0 +1,672 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Feature-Toggles

+
+
+

The most software developing teams use Feature-Branching to be able to work in parallel and maintain a stable main branch in the VCS. However Feature-Branching might not be the ideal tool in every case because of big merges and isolation between development groups. In many cases, Feature-Toggles can avoid some of these problems, so these should definitely be considered to be used in the collaborative software development.

+
+
+
+
+

Implementation with the devonfw

+
+
+

To use Feature-Toggles with the devonfw, use the Framework Togglz because it has all the features generally needed and provides a great documentation.

+
+
+

For a pretty minimal working example, also see this fork.

+
+
+

Preparation

+
+

The following example takes place in the oasp-sample-core project, so the necessary dependencies have to be added to the according pom.xml file. Required are the main Togglz project including Spring support, the Togglz console to graphically change the feature state and the Spring security package to handle authentication for the Togglz console.

+
+
+
+
<!-- Feature-Toggle-Framework togglz -->
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-boot-starter</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-console</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-security</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+
+
+

In addition to that, the following lines have to be included in the spring configuration file application.properties

+
+
+
+
# configuration for the togglz Feature-Toggle-Framework
+togglz.enabled=true
+togglz.console.secured=false
+
+
+
+
+

Small features

+
+

For small features, a simple query of the toggle state is often enough to achieve the desired functionality. To illustrate this, a simple example follows, which implements a toggle to limit the page size returned by the staffmanagement. See here for further details.

+
+
+

This is the current implementation to toggle the feature:

+
+
+
+
// Uncomment next line in order to limit the maximum page size for the staff member search
+// criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+
+
+
+

To realise this more elegantly with Togglz, first an enum is required to configure the feature-toggle.

+
+
+
+
public enum StaffmanagementFeatures implements Feature {
+  @Label("Limit the maximum page size for the staff members")
+  LIMIT_STAFF_PAGE_SIZE;
+
+  public boolean isActive() {
+    return FeatureContext.getFeatureManager().isActive(this);
+  }
+}
+
+
+
+

To familiarize the Spring framework with the enum, add the following entry to the application.properties file.

+
+
+
+
togglz.feature-enums=io.oasp.gastronomy.restaurant.staffmanagement.featuremanager.StaffmanagementFeatures
+
+
+
+

After that, the toggle can be used easily by calling the isActive() method of the enum.

+
+
+
+
if (StaffmanagementFeatures.LIMIT_STAFF_PAGE_SIZE.isActive()) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+}
+
+
+
+

This way, you can easily switch the feature on or off by using the administration console at http://localhost:8081/devon4j-sample-server/togglz-console. If you are getting redirected to the login page, just sign in with any valid user (eg. admin).

+
+
+
+

Extensive features

+
+

When implementing extensive features, you might want to consider using the strategy design pattern to maintain the overview of your software. The following example is an implementation of a feature which adds a 25% discount to all products managed by the offermanagement.

+
+
+
Therefore there are two strategies needed:
+
    +
  1. +

    Return the offers with the normal price

    +
  2. +
  3. +

    Return the offers with a 25% discount

    +
  4. +
+
+
+

The implementation is pretty straight forward so use this as a reference. Compare this for further details.

+
+
+
+
@Override
+@RolesAllowed(PermissionConstants.FIND_OFFER)
+public PaginatedListTo<OfferEto> findOfferEtos(OfferSearchCriteriaTo criteria) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+  PaginatedListTo<OfferEntity> offers = getOfferDao().findOffers(criteria);
+
+
+  if (OffermanagementFeatures.DISCOUNT.isActive()) {
+    return getOfferEtosDiscount(offers);
+  } else {
+    return getOfferEtosNormalPrice(offers);
+  }
+
+}
+
+
+// Strategy 1: Return the OfferEtos with the normal price
+private PaginatedListTo<OfferEto> getOfferEtosNormalPrice(PaginatedListTo<OfferEntity> offers) {
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+// Strategy 2: Return the OfferEtos with the new, discounted price
+private PaginatedListTo<OfferEto> getOfferEtosDiscount(PaginatedListTo<OfferEntity> offers) {
+  offers = addDiscountToOffers(offers);
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+private PaginatedListTo<OfferEntity> addDiscountToOffers(PaginatedListTo<OfferEntity> offers) {
+  for (OfferEntity oe : offers.getResult()) {
+    Double oldPrice = oe.getPrice().getValue().doubleValue();
+
+    // calculate the new price and round it to two decimal places
+    BigDecimal newPrice = new BigDecimal(oldPrice * 0.75);
+    newPrice = newPrice.setScale(2, RoundingMode.HALF_UP);
+
+    oe.setPrice(new Money(newPrice));
+  }
+
+  return offers;
+}
+
+
+
+
+
+
+

Guidelines for a successful use of feature-toggles

+
+
+

The use of feature-toggles requires a specified set of guidelines to maintain the overview on the software. The following is a collection of considerations and examples for conventions that are reasonable to use.

+
+
+

Minimize the number of toggles

+
+

When using too many toggles at the same time, it is hard to maintain a good overview of the system and things like finding bugs are getting much harder. Additionally, the management of toggles in the configuration interface gets more difficult due to the amount of toggles.

+
+
+

To prevent toggles from piling up during development, a toggle and the associated obsolete source code should be removed after the completion of the corresponding feature. In addition to that, the existing toggles should be revisited periodically to verify that these are still needed and therefore remove legacy toggles.

+
+
+
+

Consistent naming scheme

+
+

A consistent naming scheme is the key to a structured and easily maintainable set of features. This should include the naming of toggles in the source code and the appropriate naming of commit messages in the VCS. The following section contains an example for a useful naming scheme including a small example.

+
+
+

Every Feature-Toggle in the system has to get its own unique name without repeating any names of features, which were removed from the system. The chosen names should be descriptive names to simplify the association between toggles and their purpose. If the feature should be split into multiple sub-features, you might want to name the feature like the parent feature with a describing addition. If for example you want to split the DISCOUNT feature into the logic and the UI part, you might want to name the sub-features DISCOUNT_LOGIC and DISCOUNT_UI.

+
+
+

The entry in the togglz configuration enum should be named identically to the aforementioned feature name. The explicitness of feature names prevents a confusion between toggles due to using multiple enums.

+
+
+

Commit messages are very important for the use of feature-toggles and also should follow a predefined naming scheme. You might want to state the feature name at the beginning of the message, followed by the actual message, describing what the commit changes to the feature. An example commit message could look like the following:

+
+
+
+
DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

Mentioning the feature name in the commit message has the advantage, that you can search your git log for the feature name and get every commit belonging to the feature. An example for this using the tool grep could look like this.

+
+
+
+
$ git log | grep -C 4 DISCOUNT
+
+commit 034669a48208cb946cc6ba8a258bdab586929dd9
+Author: Florian Luediger <florian.luediger@somemail.com>
+Date:   Thu Jul 7 13:04:37 2016 +0100
+
+DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

To keep track of all the features in your software system, a platform like GitHub offers issues. When creating an issue for every feature, you can retrace, who created the feature and who is assigned to completing its development. When referencing the issue from commits, you also have links to all the relevant commits from the issue view.

+
+
+
+

Placement of toggle points

+
+

To maintain a clean codebase, you definitely want to avoid using the same toggle in different places in the software. There should be one single query of the toggle which should be able to toggle the whole functionality of the feature. If one single toggle point is not enough to switch the whole feature on or off, you might want to think about splitting the feature into multiple ones.

+
+
+
+

Use of fine-grained features

+
+

Bigger features in general should be split into multiple sub-features to maintain the overview on the codebase. These sub-features get their own feature-toggle and get implemented independently.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-flyway.html b/docs/devon4j/1.0/guide-flyway.html new file mode 100644 index 00000000..12aa2937 --- /dev/null +++ b/docs/devon4j/1.0/guide-flyway.html @@ -0,0 +1,543 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Flyway

+
+
+

Flyway is a tool for database migration and schema versioning. +See why for a motivation why using flyway.

+
+
+

Flyway can be used standalone e.g. via flyway-maven-plugin or can be integrated directly into your app to make sure the database migration takes place on startup. +For simplicity we recommend to integrate flyway into your app. +However, you need to be aware that therefore your app needs database access with full schema owner permissions.

+
+
+
+
+

Organizational Advice

+
+
+

A few considerations with respect to project organization will help to implement maintainable Flyway migrations.

+
+
+

At first, testing and production environments must be clearly and consistently distinguished. Use the following directory structure to achieve this distinction:

+
+
+
+
  src/main/resources/db
+  src/test/resources/db
+
+
+
+

Although this structure introduces redundancies, the benefit outweighs this disadvantage. +An even more fine-grained production directory structure which contains one sub folder per release should be implemented:

+
+
+
+
  src/main/resources/db/migration/releases/X.Y/x.sql
+
+
+
+

Emphasizing that migration scripts below the current version must never be changed will aid the second advantage of migrations: it will always be clearly reproducible in which state the database currently is. +Here, it is important to mention that, if test data is required, it must be managed separately from the migration data in the following directory:

+
+
+
+
  src/test/resources/db/migration/
+
+
+
+

The migration directory is added to aid easy usage of Flyway defaults. +Of course, test data should also be managed per release as like production data.

+
+
+

With regard to content, separation of concerns (SoC) is an important goal. SoC can be achieved by distinguishing and writing multiple scripts with respect to business components/use cases (or database tables in case of large volumes of master data [1]. Comprehensible file names aid this separation.

+
+
+

It is important to have clear responsibilities regarding the database, the persistence layer (JPA), and migrations. Therefore a dedicated database expert should be in charge of any migrations performed or she should at least be informed before any change to any of the mentioned parts is applied.

+
+
+
+
+

Technical Configuration

+
+
+

Database migrations can be SQL based or Java based.

+
+
+

To enable auto migration on startup (not recommended for productive environment) set the following property in the application.properties file for an environment.

+
+
+
+
flyway.enabled=true
+flyway.clean-on-validation-error=false
+
+
+
+

For development environment it is helpful to set both properties to true in order to simplify development. For regular environments flyway.clean-on-validation-error should be false.

+
+
+

If you want to use Flyway set the following property in any case to prevent Hibernate from doing changes on the database (pre-configured by default in devonfw):

+
+
+
+
spring.jpa.hibernate.ddl-auto=validate
+
+
+
+

The setting must be communicated to and coordinated with the customer and their needs. +In acceptance testing the same configuration as for the production environment should be enabled.

+
+
+

Since migration scripts will also be versioned the end-of-line (EOL) style must be fixated according to this issue. This is however solved in flyway 4.0+ and the latest devonfw release. +Also, the version numbers of migration scripts should not consist of simple ascending integer numbers like V0001…​, V0002…​, …​ This naming may lead to problems when merging branches. Instead the usage of timestamps as version numbers will help to avoid such problems.

+
+
+
+
+

Naming Conventions

+
+
+

Database migrations should follow this naming convention: +V<version>__<description> (e.g.: V12345__Add_new_table.sql).

+
+
+

It is also possible to use Flyway for test data. To do so place your test data migrations in src/main/resources/db/testdata/ and set property

+
+
+
+
flyway.locations=classpath:db/migration/releases,classpath:db/migration/testdata
+
+
+
+

Then Flyway scans the additional location for migrations and applies all in the order specified by their version. If migrations V0001__... and V0002__... exist and a test data migration should be applied in between you can name it V0001_1__....

+
+
+
+
+
+
+1. "Stammdaten" in German. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-i18n.html b/docs/devon4j/1.0/guide-i18n.html new file mode 100644 index 00000000..9f439c0e --- /dev/null +++ b/docs/devon4j/1.0/guide-i18n.html @@ -0,0 +1,505 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Internationalization

+
+
+

Internationalization (I18N) is about writing code independent from locale-specific information. +For I18N of text messages we are suggesting +mmm native-language-support.

+
+
+

In devonfw we have developed a solution to manage text internationalization. devonfw solution comes into two aspects:

+
+
+
    +
  • +

    Bind locale information to the user.

    +
  • +
  • +

    Get the messages in the current user locale.

    +
  • +
+
+
+
+
+

Binding locale information to the user

+
+
+

We have defined two different points to bind locale information to user, depending on user is authenticated or not.

+
+
+
    +
  • +

    User not authenticated: devonfw intercepts unsecured request and extract locale from it. At first, we try to extract a language parameter from the request and if it is not possible, we extract locale from Àccept-language` header.

    +
  • +
  • +

    User authenticated. During login process, applications developers are responsible to fill language parameter in the UserProfile class. This language parameter could be obtain from DB, LDAP, request, etc. In devonfw sample we get the locale information from database.

    +
  • +
+
+
+

This image shows the entire process:

+
+
+
+Internationalization +
+
+
+
+
+

Getting internationalizated messages

+
+
+

devonfw has a bean that manage i18n message resolution, the ApplicationLocaleResolver. This bean is responsible to get the current user and extract locale information from it and read the correct properties file to get the message.

+
+
+

The i18n properties file must be called ApplicationMessages_la_CO.properties where la=language and CO=country. This is an example of a i18n properties file for English language to translate devonfw sample user roles:

+
+
+

ApplicationMessages_en_US.properties

+
+
+
+
admin=Admin
+
+
+
+

You should define an ApplicationMessages_la_CO.properties file for every language that your application needs.

+
+
+

ApplicationLocaleResolver bean is injected in AbstractComponentFacade class so you have available this bean in logic layer so you only need to put this code to get an internationalized message:

+
+
+
+
String msg = getApplicationLocaleResolver().getMessage("mymessage");
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-jdk.html b/docs/devon4j/1.0/guide-jdk.html new file mode 100644 index 00000000..d07754e8 --- /dev/null +++ b/docs/devon4j/1.0/guide-jdk.html @@ -0,0 +1,790 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Java Development Kit

+
+
+

The Java Development Kit is an implementation of the Java platform. It provides the Java Virtual Machine (JVM) and the Java Runtime Environment (JRE).

+
+
+
+
+

Editions

+
+
+

The JDK exists in different editions:

+
+
+ +
+
+

As Java is evolving and also complex maintaining a JVM requires a lot of energy. +Therefore many alternative JDK editions are unable to cope with this and support latest Java versions and according compatibility. +Unfortunately OpenJDK only maintains a specific version of Java for a relative short period of time before moving to the next major version. +In the end, this technically means that OpenJDK is continuous beta and can not be used in production for reasonable software projects. +As OracleJDK changed its licensing model and can not be used for commercial usage even during development, things can get tricky. +You may want to use OpenJDK for development and OracleJDK only in production. +However, e.g. OpenJDK 11 never released a version that is stable enough for reasonable development (e.g. javadoc tool is broken and fixes are not available of OpenJDK 11 - fixed in 11.0.3 what is only available as OracleJDK 11 or you need to go to OpenJDK 12+, what has other bugs) so in the end there is no working release of OpenJDK 11. +This more or less forces you to use OracleJDK what requires you to buy a subscription so you can use it for commercial development. +However, there is AdoptOpenJDK that provides forked releases of OpenJDK with bug-fixes what might be an option. +Anyhow, as you want to have your development environment close to production, the productively used JDK (most likely OracleJDK) should be preferred also for development.

+
+
+
+
+

Upgrading

+
+
+

Until Java 8 compatibility was one of the key aspects for Java version updates (after the mess on the Swing updates with Java2 many years ago). +However, Java 9 introduced a lot of breaking changes. +This documentation wants to share the experience we collected in devonfw when upgrading from Java 8 to newer versions. +First of all we separate runtime changes that you need if you want to build your software with JDK 8 but such that it can also run on newer versions (e.g. JRE 11) +from changes required to also build your software with more recent JDKs (e.g. JDK 11 or 12).

+
+
+

Runtime Changes

+
+

This section describes required changes to your software in order to make it run also with versions newer than Java 8.

+
+
+

Classes removed from JDK

+
+

The first thing that most users hit when running their software with newer Java versions is a ClassNotFoundException like this:

+
+
+
+
Caused by: java.lang.ClassNotFoundException: javax.xml.bind.JAXBException
+
+
+
+

As Java 9 introduced a module system with Jigsaw, the JDK that has been a monolithic mess is now a well-defined set of structured modules. +Some of the classes that used to come with the JDK moved to modules that where not available by default in Java 9 and have even been removed entirely in later versions of Java. +Therefore you should simply treat such code just like any other 3rd party component that you can add as a (maven) dependency. +The following table gives you the required hints to make your software work even with such classes / modules removed from the JDK (please note that the specified version is just a suggestion that worked, feel free to pick a more recent or more appropriate version):

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Dependencies for classes removed from Java 8 since 9+
ClassGroupIdArtifactIdVersion

javax.xml.bind.*

javax.xml.bind

jaxb-api

2.3.1

com.sun.xml.bind.*

org.glassfish.jaxb

jaxb-runtime

2.3.1

java.activation.*

javax.activation

javax.activation-api

1.2.0

java.transaction.*

javax.transaction

javax.transaction-api

1.2

java.xml.ws.*

javax.xml.ws

jaxws-api

2.3.1

javax.jws.*

javax.jws

javax.jws-api

1.1

javax.annotation.*

javax.annotation

javax.annotation-api

1.3.2

+
+
+

3rd Party Updates

+
+

Further, internal and inofficial APIs (e.g. sun.misc.Unsafe) have been removed. +These are typically not used by your software directly but by low-level 3rd party libraries like asm that need to be updated. +Also simple things like the Java version have changed (from 1.8.x to 9.x, 10.x, 11.x, 12.x, etc.). +Some 3rd party libraries were parsing the Java version in a very naive way making them unable to be used with Java 9+:

+
+
+
+
Caused by: java.lang.NullPointerException
+   at org.apache.maven.surefire.shade.org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast (SystemUtils.java:1626)
+
+
+
+

Therefore the following table gives an overview of common 3rd party libraries that have been affected by such breaking changes and need to be updated to at least the specified version:

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Minimum recommended versions of common 3rd party for Java 9+
GroupIdArtifactIdVersionIssue

org.apache.commons

commons-lang3

3.7

LANG-1365

cglib

cglib

3.2.9

102, 93, 133

org.ow2.asm

asm

7.1

2941

org.javassist

javassist

3.25.0-GA

194, 228, 246, 171

+
+
+

ResourceBundles

+
+

For internationalization (i18n) and localization (l10n) ResourceBundle is used for language and country specific texts and configurations as properties (e.g. MyResourceBundle_de.properties). With Java modules there are changes and impacts you need to know to get things working. The most important change is documented in the JavaDoc of ResourceBundle. However, instead of using ResourceBundleProvider and refactoring your entire code causing incompatibilities, you can simply put the resource bundles in a regular JAR on the classpath rather than a named module (or into the lauching app). +If you want to implement (new) Java modules with i18n support, you can have a look at mmm-nls.

+
+
+
+
+

Buildtime Changes

+
+

If you also want to change your build to work with a recent JDK you also need to ensure that test frameworks and maven plugins properly support this.

+
+
+

Findbugs

+
+

Findbugs does not work with Java 9+ and is actually a dead project. +The new findbugs is SpotBugs. +For maven the new solution is spotbugs-maven-plugin:

+
+
+
+
<plugin>
+  <groupId>com.github.spotbugs</groupId>
+  <artifactId>spotbugs-maven-plugin</artifactId>
+  <version>3.1.11</version>
+</plugin>
+
+
+
+
+

Test Frameworks

+ + ++++++ + + + + + + + + + + + + + + + + +
Table 3. Minimum recommended versions of common 3rd party test frameworks for Java 9+
GroupIdArtifactIdVersionIssue

org.mockito

mockito-core

2.23.4

1419, 1696, 1607, 1594, 1577, 1482

+
+
+

Maven Plugins

+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4. Minimum recommended versions of common maven plugins for Java 9+
GroupIdArtifactId(min.) VersionIssue

org.apache.maven.plugins

maven-compiler-plugin

3.8.1

x

org.apache.maven.plugins

maven-surefire-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-surefire-report-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-archetype-plugin

3.1.0

x

org.apache.maven.plugins

maven-javadoc-plugin

3.1.0

x

org.jacoco

jacoco-maven-plugin

0.8.3

663

+
+
+

Maven Usage

+
+

With Java modules you can not run Javadoc standalone anymore or you will get this error when running mvn javadoc:javadoc:

+
+
+
+
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-javadoc-plugin:3.1.1:javadoc (default-cli) on project mmm-base: An error has occurred in Javadoc report generation:
+[ERROR] Exit code: 1 - error: module not found: io.github.mmm.base
+[ERROR]
+[ERROR] Command line was: /projects/mmm/software/java/bin/javadoc @options @packages @argfile
+
+
+
+

As a solution or workaround you need to include the compile goal into your build lifecycle so the module-path is properly configured:

+
+
+
+
mvn compile javadoc:javadoc
+
+
+
+
+
+
+
+ +
+
+

We want to give credits and say thanks to the following articles that have been there before and helped us on our way:

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-jee.html b/docs/devon4j/1.0/guide-jee.html new file mode 100644 index 00000000..327c9be2 --- /dev/null +++ b/docs/devon4j/1.0/guide-jee.html @@ -0,0 +1,539 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

JEE

+
+
+

This section is about Java Enterprise Edition (JEE). +Regarding to our key principles we focus on open standards. +For Java this means that we consider official standards from Java Standard and Enterprise Edition as first choice for considerations. +Therefore we also decided to recommend JAX-RS over SpringMVC as the latter is proprietary. +Only if an existing Java standard is not suitable for current demands such as Java Server Faces (JSF), we do not officially recommend it (while you are still free to use it if you have good reasons to do so). +In all other cases we officially suggest the according standard and use it in our guides, code-samples, sample application, modules, templates, etc. +Examples for such standards are JPA, JAX-RS, JAX-WS, JSR330, JSR250, JAX-B, etc.

+
+
+
+
+

Application-Server

+
+
+

We designed everything based on standards to work with different technology stacks and servlet containers. +However, we strongly encourage to use modern and leightweight frameworks such as spring or quarkus. +You are free to decide for a JEE application server but here is a list of good reasons for our decision:

+
+
+
    +
  • +

    Up-to-date

    +
    +

    With spring or quarkus you easily keep up to date with evolving technologies (microservices, reactive, NoSQL, etc.). +Most application servers put you in a jail with old legacy technology. +In many cases you are even forced to use a totally outdated version of java (JVM/JDK). +This may even cause severe IT-Security vulnerabilities but with expensive support you might get updates. +Also with leightweight open-source frameworks you need to be aware that for IT-security you need to update recently what can cost quite a lot of additional maintenance effort.

    +
    +
  • +
  • +

    Development speed

    +
    +

    With spring-boot you can implement and especially test your individual logic very fast. Starting the app in your IDE is very easy, fast, and realistic (close to production). You can easily write JUnit tests that startup your server application to e.g. test calls to your remote services via HTTP fast and easy. For application servers you need to bundle and deploy your app what takes more time and limits you in various ways. We are aware that this has improved in the past but also spring continuously improves and is always way ahead in this area. Further, with spring you have your configurations bundled together with the code in version control (still with ability to handle different environments) while with application servers these are configured externally and can not be easily tested during development.

    +
    +
  • +
  • +

    Documentation

    +
    +

    Spring and also quarkus have an extremely open and active community. +There is documentation for everything available for free on the web. +You will find solutions to almost any problem on platforms like stackoverflow. +If you have a problem you are only a google search away from your solution. +This is very much different for proprietary application server products.

    +
    +
  • +
  • +

    Helpful Exception Messages

    +
    +

    Especially spring is really great for developers on exception messages. +If you do something wrong you get detailed and helpful messages that guide you to the problem or even the solution. +This is not as great in application servers.

    +
    +
  • +
  • +

    Future-proof

    +
    +

    Spring has evolved really awesome over time. +Since its 1.0 release in 2004 spring has continuously been improved and always caught up with important trends and innovations. +Even in critical situations, when the company behind it (interface21) was sold, spring went on perfectly. +Quarkus on the other hand is relatively new. +It does not have to carry a large legacy history and is therefore most state-of-the-art for modern projects esp. in cloud environments. +JEE went through a lot of trouble and crisis. +Just look at the EJB pain stories. +This happened often in the past and also recent. +See JEE 8 in crisis.

    +
    +
  • +
  • +

    Free

    +
    +

    Spring and quarkus including their ecosystems are free and open-source. +It still perfectly integrates with commercial solutions for specific needs. +Most application servers are commercial and cost a lot of money. +As of today the ROI for this is of question.

    +
    +
  • +
  • +

    Cloud-native

    +
    +

    Quarkus is designed for cloud-native projects from the start. +With spring this is also available via spring-native. +Using an application server will effectively prevent you from going to the cloud smoothly.

    +
    +
  • +
  • +

    Fun

    +
    +

    If you go to conferences or ask developers you will see that spring or quarkus is popular and fun. +If new developers are forced to use an old application server product they will be less motivated or even get frustrated. +Especially in today’s agile projects this is a very important aspect. +In the end you will get into trouble with maintenance on the long run if you rely on a proprietary application server.

    +
    +
  • +
+
+
+

Of course the vendors of application servers will tell you a different story. +This is simply because they still make a lot of money from their products. +We do not get paid from application servers nor from spring, quarkus or any other IT product company. +We are just developers who love to build great systems. +A good reason for application servers is that they combine a set of solutions to particular aspects to one product that helps to standardize your IT. +However, devonfw fills exactly this gap for the spring and quarkus ecosystems in a very open and flexible way. +However, there is one important aspect that you need to understand and be aware of:

+
+
+

Some big companies decided for a specific application server as their IT strategy. +They may have hundreds of apps running with this application server. +All their operators and developers have learned a lot of specific skills for this product and are familiar with it. +If you are implementing yet another (small) app in this context it could make sense to stick with this application server. +However, also they have to be aware that with every additional app they increase their technical debt. +So actively help your customer and consult him to make the right choices for the future.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-jms.html b/docs/devon4j/1.0/guide-jms.html new file mode 100644 index 00000000..5ec278a0 --- /dev/null +++ b/docs/devon4j/1.0/guide-jms.html @@ -0,0 +1,504 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Messaging

+
+
+

Messaging in Java is done using the JMS standard from JEE.

+
+
+
+
+

Products

+
+
+

For messaging you need to choose a JMS provider such as:

+
+
+ +
+
+
+
+

Receiver

+
+
+

As a receiver of messages is receiving data from other systems it is located in the service-layer.

+
+
+

JMS Listener

+
+

A JmsListener is a class listening and consuming JMS messages. It should carry the suffix JmsListener and implement the MessageListener interface or have its listener method annotated with @JmsListener. This is illustrated by the following example:

+
+
+
+
@Named
+@Transactional
+public class BookingJmsListener /* implements MessageListener */ {
+
+  @Inject
+  private Bookingmanagement bookingmanagement;
+
+  @Inject
+  private MessageConverter messageConverter;
+
+  @JmsListener(destination = "BOOKING_QUEUE", containerFactory = "jmsListenerContainerFactory")
+  public void onMessage(Message message) {
+    try {
+      BookingTo bookingTo = (BookingTo) this.messageConverter.fromMessage(message);
+      this.bookingmanagement.importBooking(bookingTo);
+    } catch (MessageConversionException | JMSException e) {
+      throw new InvalidMessageException(message);
+    }
+  }
+}
+
+
+
+
+
+
+

Sender

+
+
+

The sending of JMS messages is considered as any other sending of data like kafka messages or RPC calls via REST using service-client, gRPC, etc. +This will typically happen directly from a use-case in the logic-layer. +However, the technical complexity of the communication and protocols itself shall be hidden from the use-case and not be part of the logic layer. +With spring we can simply use JmsTemplate to do that.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-jmx.html b/docs/devon4j/1.0/guide-jmx.html new file mode 100644 index 00000000..1abdc210 --- /dev/null +++ b/docs/devon4j/1.0/guide-jmx.html @@ -0,0 +1,443 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

JMX

+
+
+

JMX (Java Management Extensions) is the official Java monitoring solution. +It is part of the JDK. +Your application may provide monitoring information or receive monitoring related commands via MBeans. +There is a huge amount of information about JMX available. +A good starting point might be JMX on wikipedia.

+
+
+

Traditionally JMX uses RMI for communication, what is rather a discouraged protocol that should be avoided. +In many environments HTTP(S) is preferred, so be careful on deciding if JMX is the right solution. +However, you can even expose existing JMX MBeans via HTTP(S) instead of RMI. +Traditionally JMX also allows administrators not only to read data but also to write data typically in order to re-configure the app or do other such related tasks (e.g. clear caches). +Today, configuration and monitoring are clearly separated aspects and should not be mixed. +With container technology the ability to re-configure an app as a running process has become an outdated feature. +Instead, you simply restart the container to apply changes. +With cloud-native trends and aims for simplification the importance of JMX is continuously dropping. +Instead new projects tend to use more modern and leight-weight solutions.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-jpa-idref.html b/docs/devon4j/1.0/guide-jpa-idref.html new file mode 100644 index 00000000..44ded321 --- /dev/null +++ b/docs/devon4j/1.0/guide-jpa-idref.html @@ -0,0 +1,587 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

IdRef

+
+
+

IdRef can be used to reference other entities in TOs in order to make them type-safe and semantically more expressive. +It is an optional concept in devon4j for more complex applications that make intensive use of relations and foreign keys.

+
+
+
+
+

Motivation

+
+
+

Assuming you have a method signature like the following:

+
+
+
+
Long approve(Long cId, Long cuId);
+
+
+
+

So what are the paremeters? What is returned?

+
+
+

IdRef is just a wrapper for a Long used as foreign key. This makes our signature much more expressive and self-explanatory:

+
+
+
+
IdRef<Contract> approve(IdRef<Contract> cId, IdRef<Customer> cuId);
+
+
+
+

Now we can easily see, that the result and the parameters are foreign-keys and which entity they are referring to via their generic type. +We can read the javadoc of these entities from the generic type and understand the context. +Finally, when passing IdRef objects to such methods, we get compile errors in case we accidentally place parameters in the wrong order.

+
+
+
+
+

IdRef and Mapping

+
+
+

In order to easily map relations from entities to transfer-objects and back, we can easily also put according getters and setters into our entities:

+
+
+
+
public class ContractEntity extends ApplicationPersistenceEntity implements Contract {
+
+  private CustomerEntity customer;
+
+  ...
+
+  @ManyToOne(fetch = FetchType.LAZY)
+  @JoinColumn(name = "CUSTOMER_ID")
+  public CustomerEntity getCustomer() {
+    return this.customer;
+  }
+
+  public void setCustomer(CustomerEntity customer) {
+    this.customer = customer;
+  }
+
+  @Transient
+  public IdRef<Customer> getCustomerId() {
+    return IdRef.of(this.customer);
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customer = JpaHelper.asEntity(customerId, CustomerEntity.class);
+  }
+}
+
+
+
+

Now, ensure that you have the same getters and setters for customerId in your Eto:

+
+
+
+
public class ContractEto extends AbstractEto implements Contract {
+
+  private IdRef<Customer> customerId;
+
+  ...
+
+  public IdRef<Customer> getCustomerId() {
+    return this.customerId;
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customerId = customerId;
+  }
+}
+
+
+
+

This way the bean-mapper can automatically map from your entity (ContractEntity) to your Eto (ContractEto) and vice-versa.

+
+
+
+
+

JpaHelper and EntityManager access

+
+
+

In the above example we used JpaHelper.asEntity to convert the foreign key (IdRef<Customer>) to the according entity (CustomerEntity). +This will internally use EntityManager.getReference to properly create a JPA entity. +The alternative "solution" that may be used with Long instead of IdRef is typically:

+
+
+
+
  public void setCustomerId(IdRef<Customer> customerId) {
+    Long id = null;
+    if (customerId != null) {
+      id = customerId.getId();
+    }
+    if (id == null) {
+      this.customer = null;
+    } else {
+      this.customer = new CustomerEntity();
+      this.customer.setId(id);
+    }
+  }
+
+
+
+

While this "solution" works is most cases, we discovered some more complex cases, where it fails with very strange hibernate exceptions. +When cleanly creating the entity via EntityManager.getReference instead it is working in all cases. +So how can JpaHelper.asEntity as a static method access the EntityManager? +Therefore we need to initialize this as otherwise you may see this exception:

+
+
+
+
java.lang.IllegalStateException: EntityManager has not yet been initialized!
+	at com.devonfw.module.jpa.dataaccess.api.JpaEntityManagerAccess.getEntityManager(JpaEntityManagerAccess.java:38)
+	at com.devonfw.module.jpa.dataaccess.api.JpaHelper.asEntity(JpaHelper.java:49)
+
+
+
+

For main usage in your application we assume that there is only one instance of EntityManager. +Therefore we can initialize this instance during the spring boot setup. +This is what we provide for you in JpaInitializer for you +when creating a devon4j app.

+
+
+

JpaHelper and spring-test

+
+

Further, you also want your code to work in integration tests. +Spring-test provides a lot of magic under the hood to make integration testing easy for you. +To boost the performance when running multiple tests, spring is smart and avoids creating the same spring-context multiple times. +Therefore it stores these contexts so that if a test-case is executed with a specific spring-configuration that has already been setup before, +the same spring-context can be reused instead of creating it again. +However, your tests may have multiple spring configurations leading to multiple spring-contexts. +Even worse these tests can run in any order leading to switching between spring-contexts forth and back. +Therefore, a static initializer during the spring boot setup can lead to strange errors as you can get the wrong EntityManager instance. +In order to fix such problems, we provide a solution pattern via DbTest ensuring for every test, +that the proper instance of EntityManager is initialized. +Therefore you should derive directly or indirectly (e.g. via ComponentDbTest and SubsystemDbTest) from DbTesT or adopt your own way to apply this pattern to your tests, when using JpaHelper. +This already happens if you are extending ApplicationComponentTest or ApplicationSubsystemTest.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-jpa-performance.html b/docs/devon4j/1.0/guide-jpa-performance.html new file mode 100644 index 00000000..7c7c0f95 --- /dev/null +++ b/docs/devon4j/1.0/guide-jpa-performance.html @@ -0,0 +1,505 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

JPA Performance

+
+
+

When using JPA the developer sometimes does not see or understand where and when statements to the database are triggered.

+
+
+
+
+

Establishing expectations Developers shouldn’t expect to sprinkle magic pixie dust on POJOs in hopes they will become persistent.

+
+
+
+— Dan Allen
+https://epdf.tips/seam-in-action.html +
+
+
+

So in case you do not understand what is going on under the hood of JPA, you will easily run into performance issues due to lazy loading and other effects.

+
+
+
+
+

N plus 1 Problem

+
+
+

The most prominent phenomena is call the N+1 Problem. +We use entities from our MTS demo app as an example to explain the problem. +There is a DishEntity that has a @ManyToMany relation to +IngredientEntity. +Now we assume that we want to iterate all ingredients for a dish like this:

+
+
+
+
DishEntity dish = dao.findDishById(dishId);
+BigDecimal priceWithAllExtras = dish.getPrice();
+for (IngredientEntity ingredient : dish.getExtras()) {
+  priceWithAllExtras = priceWithAllExtras.add(ingredient.getPrice());
+}
+
+
+
+

Now dish.getExtras() is loaded lazy. Therefore the JPA vendor will provide a list with lazy initialized instances of IngredientEntity that only contain the ID of that entity. Now with every call of ingredient.getPrice() we technically trigger an SQL query statement to load the specific IngredientEntity by its ID from the database. +Now findDishById caused 1 initial query statement and for any number N of ingredients we are causing an additional query statement. This makes a total of N+1 statements. As causing statements to the database is an expensive operation with a lot of overhead (creating connection, etc.) this ends in bad performance and is therefore a problem (the N+1 Problem).

+
+
+
+
+

Solving N plus 1 Problem

+
+
+

To solve the N+1 Problem you need to change your code to only trigger a single statement instead. This can be archived in various ways. The most universal solution is to use FETCH JOIN in order to pre-load the nested N child entities into the first level cache of the JPA vendor implementation. This will behave very similar as if the @ManyToMany relation to IngredientEntity was having FetchType.EAGER but only for the specific query and not in general. Because changing @ManyToMany to FetchType.EAGER would cause bad performance for other usecases where only the dish but not its extra ingredients are needed. For this reason all relations, including @OneToOne should always be FetchType.LAZY. Back to our example we simply replace dao.findDishById(dishId) with dao.findDishWithExtrasById(dishId) that we implement by the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish
+  LEFT JOIN FETCH dish.extras
+  WHERE dish.id = :dishId
+
+
+
+

The rest of the code does not have to be changed but now dish.getExtras() will get the IngredientEntity from the first level cache where is was fetched by the initial query above.

+
+
+

Please note that if you only need the sum of the prices from the extras you can also create a query using an aggregator function:

+
+
+
+
SELECT sum(dish.extras.price) FROM DishEntity dish
+
+
+
+

As you can see you need to understand the concepts in order to get good performance.

+
+
+

There are many advanced topics such as creating database indexes or calculating statistics for the query optimizer to get the best performance. For such advanced topics we recommend to have a database expert in your team that cares about such things. However, understanding the N+1 Problem and its solutions is something that every Java developer in the team needs to understand.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-jpa-query.html b/docs/devon4j/1.0/guide-jpa-query.html new file mode 100644 index 00000000..3ac748b7 --- /dev/null +++ b/docs/devon4j/1.0/guide-jpa-query.html @@ -0,0 +1,741 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Queries

+
+
+

The Java Persistence API (JPA) defines its own query language, the java persistence query language (JPQL) (see also JPQL tutorial), which is similar to SQL but operates on entities and their attributes instead of tables and columns.

+
+
+

The simplest CRUD-Queries (e.g. find an entity by its ID) are already build in the devonfw CRUD functionality (via Repository or DAO). For other cases you need to write your own query. We distinguish between static and dynamic queries. Static queries have a fixed JPQL query string that may only use parameters to customize the query at runtime. Instead, dynamic queries can change their clauses (WHERE, ORDER BY, JOIN, etc.) at runtime depending on the given search criteria.

+
+
+
+
+

Static Queries

+
+
+

E.g. to find all DishEntries (from MTS sample app) that have a price not exceeding a given maxPrice we write the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice
+
+
+
+

Here dish is used as alias (variable name) for our selected DishEntity (what refers to the simple name of the Java entity class). With dish.price we are referring to the Java property price (getPrice()/setPrice(…​)) in DishEntity. A named variable provided from outside (the search criteria at runtime) is specified with a colon (:) as prefix. Here with :maxPrice we reference to a variable that needs to be set via query.setParameter("maxPrice", maxPriceValue). JPQL also supports indexed parameters (?) but they are discouraged because they easily cause confusion and mistakes.

+
+
+

Using Queries to Avoid Bidirectional Relationships

+
+

With the usage of queries it is possible to avoid exposing relationships or modelling bidirectional relationships, which have some disadvantages (see relationships). This is especially desired for relationships between entities of different business components. +So for example to get all OrderLineEntities for a specific OrderEntity without using the orderLines relation from OrderEntity the following query could be used:

+
+
+
+
SELECT line FROM OrderLineEntity line WHERE line.order.id = :orderId
+
+
+
+
+
+
+

Dynamic Queries

+
+
+

For dynamic queries we use QueryDSL. It allows to implement queries in a powerful but readable and type-safe way (unlike Criteria API). If you already know JPQL you will quickly be able to read and write QueryDSL code. It feels like JPQL but implemented in Java instead of plain text.

+
+
+

Here is an example from our sample application:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(dish.price.goe(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(dish.price.loe(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      query.where(dish.name.eq(name));
+    }
+    return query.fetch();
+  }
+
+
+
+

In this example we use the so called Q-types (QDishEntity). These are classes generated at build time by the QueryDSL annotation processor from entity classes. The Q-type classes can be used as static types representative of the original entity class.

+
+
+

For spring, devon4j provides another approach that you can use for your Spring applications to implement QueryDSL logic without having to use these metaclasses. An example can be found here.

+
+
+
+
+

Using Wildcards

+
+
+

For flexible queries it is often required to allow wildcards (especially in dynamic queries). While users intuitively expect glob syntax the SQL and JPQL standards work different. Therefore a mapping is required. devonfw provides this on a lower level by LikePatternSyntax and on a high level by QueryUtil (see QueryHelper.newStringClause(…​)).

+
+
+
+
+

Pagination

+
+
+

When dealing with large amounts of data, an efficient method of retrieving the data is required. Fetching the entire data set each time would be too time consuming. Instead, Paging is used to process only small subsets of the entire data set.

+
+
+

If you are using Spring Data repositories you will get pagination support out of the box by providing the interfaces Page and Pageable:

+
+
+
repository
+
+
Page<DishEntity> findAll(Pageable pageable);
+
+
+
+

Then you can create a Pageable object and pass it to the method call as follows:

+
+
+
+
int page = criteria.getPageNumber();
+int size = criteria.getPageSize();
+Pageable pageable = PageRequest.of(page, size);
+Page<DishEntity> dishes = dishRepository.findAll(pageable);
+
+
+
+

Paging with QueryDSL

+
+

Pagination is also supported for dynamic queries with QueryDSL:

+
+
+
+
  public Page<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    // conditions
+
+    int page = criteria.getPageNumber();
+    int size = criteria.getPageSize();
+    Pageable pageable = PageRequest.of(page, size);
+    query.offset(pageable.getOffset());
+    query.limit(pageable.getPageSize());
+
+    List<DishEntity> dishes = query.fetch();
+    return new PageImpl<>(dishes, pageable, dishes.size());
+  }
+
+
+
+
+

Pagination example

+
+

For the table entity we can make a search request by accessing the REST endpoint with pagination support like in the following examples:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "total":true
+  }
+}
+
+//Response
+{
+    "pagination": {
+        "size": 2,
+        "page": 1,
+        "total": 11
+    },
+    "result": [
+        {
+            "id": 101,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 1,
+            "state": "OCCUPIED"
+        },
+        {
+            "id": 102,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 2,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+ + + + + +
+ + +As we are requesting with the total property set to true the server responds with the total count of rows for the query. +
+
+
+

For retrieving a concrete page, we provide the page attribute with the desired value. Here we also left out the total property so the server doesn’t incur on the effort to calculate it:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "page":2
+  }
+}
+
+//Response
+
+{
+    "pagination": {
+        "size": 2,
+        "page": 2,
+        "total": null
+    },
+    "result": [
+        {
+            "id": 103,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 3,
+            "state": "FREE"
+        },
+        {
+            "id": 104,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 4,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+
+

Pagingation in devon4j-spring

+
+

For spring applications, devon4j also offers its own solution for pagination. You can find an example of this here.

+
+
+
+
+
+

Query Meta-Parameters

+
+
+

Queries can have meta-parameters and that are provided via SearchCriteriaTo. Besides paging (see above) we also get timeout support.

+
+
+
+
+

Advanced Queries

+
+
+

Writing queries can sometimes get rather complex. The current examples given above only showed very simple basics. Within this topic a lot of advanced features need to be considered like:

+
+
+ +
+
+

This list is just containing the most important aspects. As we can not cover all these topics here, they are linked to external documentation that can help and guide you.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-jpa.html b/docs/devon4j/1.0/guide-jpa.html new file mode 100644 index 00000000..2598d060 --- /dev/null +++ b/docs/devon4j/1.0/guide-jpa.html @@ -0,0 +1,1124 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Java Persistence API

+
+
+

For mapping java objects to a relational database we use the Java Persistence API (JPA). +As JPA implementation we recommend to use Hibernate. For general documentation about JPA and Hibernate follow the links above as we will not replicate the documentation. Here you will only find guidelines and examples how we recommend to use it properly. The following examples show how to map the data of a database to an entity. As we use JPA we abstract from SQL here. However, you will still need a DDL script for your schema and during maintenance also database migrations. Please follow our SQL guide for such artifacts.

+
+
+
+
+

Entity

+
+
+

Entities are part of the persistence layer and contain the actual data. They are POJOs (Plain Old Java Objects) on which the relational data of a database is mapped and vice versa. The mapping is configured via JPA annotations (javax.persistence). Usually an entity class corresponds to a table of a database and a property to a column of that table. A persistent entity instance then represents a row of the database table.

+
+
+

A Simple Entity

+
+

The following listing shows a simple example:

+
+
+
+
@Entity
+@Table(name="TEXTMESSAGE")
+public class MessageEntity extends ApplicationPersistenceEntity implements Message {
+
+  private String text;
+
+  public String getText() {
+    return this.text;
+  }
+
+  public void setText(String text) {
+    this.text = text;
+  }
+ }
+
+
+
+

The @Entity annotation defines that instances of this class will be entities which can be stored in the database. The @Table annotation is optional and can be used to define the name of the corresponding table in the database. If it is not specified, the simple name of the entity class is used instead.

+
+
+

In order to specify how to map the attributes to columns we annotate the corresponding getter methods (technically also private field annotation is also possible but approaches can not be mixed). +The @Id annotation specifies that a property should be used as primary key. +With the help of the @Column annotation it is possible to define the name of the column that an attribute is mapped to as well as other aspects such as nullable or unique. If no column name is specified, the name of the property is used as default.

+
+
+

Note that every entity class needs a constructor with public or protected visibility that does not have any arguments. Moreover, neither the class nor its getters and setters may be final.

+
+
+

Entities should be simple POJOs and not contain business logic.

+
+
+
+

Entities and Datatypes

+
+

Standard datatypes like Integer, BigDecimal, String, etc. are mapped automatically by JPA. Custom datatypes are mapped as serialized BLOB by default what is typically undesired. +In order to map atomic custom datatypes (implementations of`+SimpleDatatype`) we implement an AttributeConverter. Here is a simple example:

+
+
+
+
@Converter(autoApply = true)
+public class MoneyAttributeConverter implements AttributeConverter<Money, BigDecimal> {
+
+  public BigDecimal convertToDatabaseColumn(Money attribute) {
+    return attribute.getValue();
+  }
+
+  public Money convertToEntityAttribute(BigDecimal dbData) {
+    return new Money(dbData);
+  }
+}
+
+
+
+

The annotation @Converter is detected by the JPA vendor if the annotated class is in the packages to scan. Further, autoApply = true implies that the converter is automatically used for all properties of the handled datatype. Therefore all entities with properties of that datatype will automatically be mapped properly (in our example Money is mapped as BigDecimal).

+
+
+

In case you have a composite datatype that you need to map to multiple columns the JPA does not offer a real solution. As a workaround you can use a bean instead of a real datatype and declare it as @Embeddable. If you are using Hibernate you can implement CompositeUserType. Via the @TypeDef annotation it can be registered to Hibernate. If you want to annotate the CompositeUserType implementation itself you also need another annotation (e.g. MappedSuperclass tough not technically correct) so it is found by the scan.

+
+
+

Enumerations

+
+

By default JPA maps Enums via their ordinal. Therefore the database will only contain the ordinals (0, 1, 2, etc.) . So , inside the database you can not easily understand their meaning. Using @Enumerated with EnumType.STRING allows to map the enum values to their name (Enum.name()). Both approaches are fragile when it comes to code changes and refactoring (if you change the order of the enum values or rename them) after the application is deployed to production. If you want to avoid this and get a robust mapping you can define a dedicated string in each enum value for database representation that you keep untouched. Then you treat the enum just like any other custom datatype.

+
+
+
+

BLOB

+
+

If binary or character large objects (BLOB/CLOB) should be used to store the value of an attribute, e.g. to store an icon, the @Lob annotation should be used as shown in the following listing:

+
+
+
+
@Lob
+public byte[] getIcon() {
+  return this.icon;
+}
+
+
+
+ + + + + +
+ + +Using a byte array will cause problems if BLOBs get large because the entire BLOB is loaded into the RAM of the server and has to be processed by the garbage collector. For larger BLOBs the type Blob and streaming should be used. +
+
+
+
+
public Blob getAttachment() {
+  return this.attachment;
+}
+
+
+
+
+

Date and Time

+
+

To store date and time related values, the temporal annotation can be used as shown in the listing below:

+
+
+
+
@Temporal(TemporalType.TIMESTAMP)
+public java.util.Date getStart() {
+  return start;
+}
+
+
+
+

Until Java8 the java data type java.util.Date (or Jodatime) has to be used. +TemporalType defines the granularity. In this case, a precision of nanoseconds is used. If this granularity is not wanted, TemporalType.DATE can be used instead, which only has a granularity of milliseconds. +Mixing these two granularities can cause problems when comparing one value to another. This is why we only use TemporalType.TIMESTAMP.

+
+
+
+

QueryDSL and Custom Types

+
+

Using the Aliases API of QueryDSL might result in an InvalidDataAccessApiUsageException when using custom datatypes in entity properties. This can be circumvented in two steps:

+
+
+
    +
  1. +

    Ensure you have the following maven dependencies in your project (core module) to support custom types via the Aliases API:

    +
    +
    +
    <dependency>
    +  <groupId>org.ow2.asm</groupId>
    +  <artifactId>asm</artifactId>
    +</dependency>
    +<dependency>
    +  <groupId>cglib</groupId>
    +  <artifactId>cglib</artifactId>
    +</dependency>
    +
    +
    +
  2. +
  3. +

    Make sure, that all your custom types used in entities provide a non-argument constructor with at least visibility level protected.

    +
  4. +
+
+
+
+
+

Primary Keys

+
+

We only use simple Long values as primary keys (IDs). By default it is auto generated (@GeneratedValue(strategy=GenerationType.AUTO)). This is already provided by the class com.devonfw.<projectName>.general.dataaccess.api.AbstractPersistenceEntity within the classic project structure respectively com.devonfw.<projectName>.general.domain.model.AbstractPersistenceEntity within the modern project structure, that you can extend. +In case you have business oriented keys (often as String), you can define an additional property for it and declare it as unique (@Column(unique=true)). +Be sure to include "AUTO_INCREMENT" in your sql table field ID to be able to persist data (or similar for other databases).

+
+
+
+
+
+

Relationships

+
+
+

n:1 and 1:1 Relationships

+
+

Entities often do not exist independently but are in some relation to each other. For example, for every period of time one of the StaffMember’s of the restaurant example has worked, which is represented by the class WorkingTime, there is a relationship to this StaffMember.

+
+
+

The following listing shows how this can be modeled using JPA:

+
+
+
+
...
+
+@Entity
+public class WorkingTimeEntity {
+   ...
+
+   private StaffMemberEntity staffMember;
+
+   @ManyToOne
+   @JoinColumn(name="STAFFMEMBER")
+   public StaffMemberEntity getStaffMember() {
+      return this.staffMember;
+   }
+
+   public void setStaffMember(StaffMemberEntity staffMember) {
+      this.staffMember = staffMember;
+   }
+}
+
+
+
+

To represent the relationship, an attribute of the type of the corresponding entity class that is referenced has been introduced. The relationship is a n:1 relationship, because every WorkingTime belongs to exactly one StaffMember, but a StaffMember usually worked more often than once.
+This is why the @ManyToOne annotation is used here. For 1:1 relationships the @OneToOne annotation can be used which works basically the same way. To be able to save information about the relation in the database, an additional column in the corresponding table of WorkingTime is needed which contains the primary key of the referenced StaffMember. With the name element of the @JoinColumn annotation it is possible to specify the name of this column.

+
+
+
+

1:n and n:m Relationships

+
+

The relationship of the example listed above is currently an unidirectional one, as there is a getter method for retrieving the StaffMember from the WorkingTime object, but not vice versa.

+
+
+

To make it a bidirectional one, the following code has to be added to StaffMember:

+
+
+
+
  private Set<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy="staffMember")
+  public Set<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(Set<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

To make the relationship bidirectional, the tables in the database do not have to be changed. Instead the column that corresponds to the attribute staffMember in class WorkingTime is used, which is specified by the mappedBy element of the @OneToMany annotation. Hibernate will search for corresponding WorkingTime objects automatically when a StaffMember is loaded.

+
+
+

The problem with bidirectional relationships is that if a WorkingTime object is added to the set or list workingTimes in StaffMember, this does not have any effect in the database unless +the staffMember attribute of that WorkingTime object is set. That is why the devon4j advices not to use bidirectional relationships but to use queries instead. How to do this is shown here. If a bidirectional relationship should be used nevertheless, appropriate add and remove methods must be used.

+
+
+

For 1:n and n:m relations, the devon4j demands that (unordered) Sets and no other collection types are used, as shown in the listing above. The only exception is whenever an ordering is really needed, (sorted) lists can be used.
+For example, if WorkingTime objects should be sorted by their start time, this could be done like this:

+
+
+
+
  private List<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy = "staffMember")
+  @OrderBy("startTime asc")
+  public List<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(List<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

The value of the @OrderBy annotation consists of an attribute name of the class followed by asc (ascending) or desc (descending).

+
+
+

To store information about a n:m relationship, a separate table has to be used, as one column cannot store several values (at least if the database schema is in first normal form).
+For example if one wanted to extend the example application so that all ingredients of one FoodDrink can be saved and to model the ingredients themselves as entities (e.g. to store additional information about them), this could be modeled as follows (extract of class FoodDrink):

+
+
+
+
  private Set<IngredientEntity> ingredients;
+
+  @ManyToMany()
+  @JoinTable
+  public Set<IngredientEntity> getIngredients() {
+    return this.ingredients;
+  }
+
+  public void setOrders(Set<IngredientEntity> ingredients) {
+    this.ingredients = ingredients;
+  }
+
+
+
+

Information about the relation is stored in a table called BILL_ORDER that has to have two columns, one for referencing the Bill, the other one for referencing the Order. Note that the @JoinTable annotation is not needed in this case because a separate table is the default solution here (same for n:m relations) unless there is a mappedBy element specified.

+
+
+

For 1:n relationships this solution has the disadvantage that more joins (in the database system) are needed to get a Bill with all the Orders it refers to. This might have a negative impact on performance so that the solution to store a reference to the Bill row/entity in the Order’s table is probably the better solution in most cases.

+
+
+

Note that bidirectional n:m relationships are not allowed for applications based on devon4j. Instead a third entity has to be introduced, which "represents" the relationship (it has two n:1 relationships).

+
+
+
+

Eager vs. Lazy Loading

+
+

Using JPA it is possible to use either lazy or eager loading. Eager loading means that for entities retrieved from the database, other entities that are referenced by these entities are also retrieved, whereas lazy loading means that this is only done when they are actually needed, i.e. when the corresponding getter method is invoked.

+
+
+

Application based on devon4j are strongly advised to always use lazy loading. The JPA defaults are:

+
+
+
    +
  • +

    @OneToMany: LAZY

    +
  • +
  • +

    @ManyToMany: LAZY

    +
  • +
  • +

    @ManyToOne: EAGER

    +
  • +
  • +

    @OneToOne: EAGER

    +
  • +
+
+
+

So at least for @ManyToOne and @OneToOne you always need to override the default by providing fetch = FetchType.LAZY.

+
+
+ + + + + +
+ + +Please read the performance guide. +
+
+
+
+

Cascading Relationships

+
+

For relations it is also possible to define whether operations are cascaded (like a recursion) to the related entity. +By default, nothing is done in these situations. This can be changed by using the cascade property of the annotation that specifies the relation type (@OneToOne, @ManyToOne, @OneToMany, @ManyToOne). This property accepts a CascadeType that offers the following options:

+
+
+
    +
  • +

    PERSIST (for EntityManager.persist, relevant to inserted transient entities into DB)

    +
  • +
  • +

    REMOVE (for EntityManager.remove to delete entity from DB)

    +
  • +
  • +

    MERGE (for EntityManager.merge)

    +
  • +
  • +

    REFRESH (for EntityManager.refresh)

    +
  • +
  • +

    DETACH (for EntityManager.detach)

    +
  • +
  • +

    ALL (cascade all of the above operations)

    +
  • +
+
+
+

See here for more information.

+
+
+
+

Typesafe Foreign Keys using IdRef

+
+

For simple usage you can use Long for all your foreign keys. +However, as an optional pattern for advanced and type-safe usage, we offer IdRef.

+
+
+
+
+
+

Embeddable

+
+
+

An embeddable Object is a way to group properties of an entity into a separate Java (child) object. Unlike with implement relationships the embeddable is not a separate entity and its properties are stored (embedded) in the same table together with the entity. This is helpful to structure and reuse groups of properties.

+
+
+

The following example shows an Address implemented as an embeddable class:

+
+
+
+
@Embeddable
+public class AddressEmbeddable {
+
+  private String street;
+  private String number;
+  private Integer zipCode;
+  private String city;
+
+  @Column(name="STREETNUMBER")
+  public String getNumber() {
+    return number;
+  }
+
+  public void setNumber(String number) {
+    this.number = number;
+  }
+
+  ...  // other getter and setter methods, equals, hashCode
+}
+
+
+
+

As you can see an embeddable is similar to an entity class, but with an @Embeddable annotation instead of the @Entity annotation and without primary key or modification counter. +An Embeddable does not exist on its own but in the context of an entity. +As a simplification Embeddables do not require a separate interface and ETO as the bean-mapper will create a copy automatically when converting the owning entity to an ETO. +However, in this case the embeddable becomes part of your api module that therefore needs a dependency on the JPA.

+
+
+

In addition to that the methods equals(Object) and hashCode() need to be implemented as this is required by Hibernate (it is not required for entities because they can be unambiguously identified by their primary key). For some hints on how to implement the hashCode() method please have a look here.

+
+
+

Using this AddressEmbeddable inside an entity class can be done like this:

+
+
+
+
  private AddressEmbeddable address;
+
+  @Embedded
+  public AddressEmbeddable getAddress() {
+    return this.address;
+  }
+
+  public void setAddress(AddressEmbeddable address) {
+    this.address = address;
+  }
+}
+
+
+
+

The @Embedded annotation needs to be used for embedded attributes. Note that if in all columns of the embeddable (here Address) are null, then the embeddable object itself is also null inside the entity. This has to be considered to avoid NullPointerException’s. Further this causes some issues with primitive types in embeddable classes that can be avoided by only using object types instead.

+
+
+
+
+

Inheritance

+
+
+

Just like normal java classes, entity classes can inherit from others. The only difference is that you need to specify how to map a class hierarchy to database tables. Generic abstract super-classes for entities can simply be annotated with @MappedSuperclass.

+
+
+

For all other cases the JPA offers the annotation @Inheritance with the property strategy talking an InheritanceType that has the following options:

+
+
+
+
+
    +
  • +

    SINGLE_TABLE: This strategy uses a single table that contains all columns needed to store all entity-types of the entire inheritance hierarchy. If a column is not needed for an entity because of its type, there is a null value in this column. An additional column is introduced, which denotes the type of the entity (called dtype).

    +
  • +
  • +

    TABLE_PER_CLASS: For each concrete entity class there is a table in the database that can store such an entity with all its attributes. An entity is only saved in the table corresponding to its most concrete type. To get all entities of a super type, joins are needed.

    +
  • +
  • +

    JOINED: In this case there is a table for every entity class including abstract classes, which contains only the columns for the persistent properties of that particular class. Additionally there is a primary key column in every table. To get an entity of a class that is a subclass of another one, joins are needed.

    +
  • +
+
+
+
+
+

Each of the three approaches has its advantages and drawbacks, which are discussed in detail here. In most cases, the first one should be used, because it is usually the fastest way to do the mapping, as no joins are needed when retrieving, searching or persisting entities. Moreover it is rather simple and easy to understand. +One major disadvantage is that the first approach could lead to a table with a lot of null values, which might have a negative impact on the database size.

+
+
+

The inheritance strategy has to be annotated to the top-most entity of the class hierarchy (where @MappedSuperclass classes are not considered) like in the following example:

+
+
+
+
@Entity
+@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+public abstract class MyParentEntity extends ApplicationPersistenceEntity implements MyParent {
+  ...
+}
+
+@Entity
+public class MyChildEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+@Entity
+public class MyOtherEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+
+
+

As a best practice we advise you to avoid entity hierarchies at all where possible and otherwise to keep the hierarchy as small as possible. In order to just ensure reuse or establish a common API you can consider a shared interface, a @MappedSuperclass or an @Embeddable instead of an entity hierarchy.

+
+
+
+
+

Repositories and DAOs

+
+
+

For each entity a code unit is created that groups all database operations for that entity. We recommend to use spring-data repositories for that as it is most efficient for developers. As an alternative there is still the classic approach using DAOs.

+
+
+

Concurrency Control

+
+

The concurrency control defines the way concurrent access to the same data of a database is handled. When several users (or threads of application servers) concurrently access a database, anomalies may happen, e.g. a transaction is able to see changes from another transaction although that one did, not yet commit these changes. Most of these anomalies are automatically prevented by the database system, depending on the isolation level (property hibernate.connection.isolation in the jpa.xml, see here, or quarkus.datasource.jdbc.transaction-isolation-level in the application.properties).

+
+
+

Another anomaly is when two stakeholders concurrently access a record, do some changes and write them back to the database. The JPA addresses this with different locking strategies (see here).

+
+
+

As a best practice we are using optimistic locking for regular end-user services (OLTP) and pessimistic locking for batches.

+
+
+
+

Optimistic Locking

+
+

The class com.devonfw.module.jpa.persistence.api.AbstractPersistenceEntity already provides optimistic locking via a modificationCounter with the @Version annotation. Therefore JPA takes care of optimistic locking for you. When entities are transferred to clients, modified and sent back for update you need to ensure the modificationCounter is part of the game. If you follow our guides about transfer-objects and services this will also work out of the box. +You only have to care about two things:

+
+
+
    +
  • +

    How to deal with optimistic locking in relationships?
    +Assume an entity A contains a collection of B entities. Should there be a locking conflict if one user modifies an instance of A while another user in parallel modifies an instance of B that is contained in the other instance? To address this , take a look at FeatureForceIncrementModificationCounter.

    +
  • +
  • +

    What should happen in the UI if an OptimisticLockException occurred?
    +According to KISS our recommendation is that the user gets an error displayed that tells him to do his change again on the recent data. Try to design your system and the work processing in a way to keep such conflicts rare and you are fine.

    +
  • +
+
+
+
+

Pessimistic Locking

+
+

For back-end services and especially for batches optimistic locking is not suitable. A human user shall not cause a large batch process to fail because he was editing the same entity. Therefore such use-cases use pessimistic locking what gives them a kind of priority over the human users. +In your DAO implementation you can provide methods that do pessimistic locking via EntityManager operations that take a LockModeType. Here is a simple example:

+
+
+
+
  getEntityManager().lock(entity, LockModeType.READ);
+
+
+
+

When using the lock(Object, LockModeType) method with LockModeType.READ, Hibernate will issue a SELECT …​ FOR UPDATE. This means that no one else can update the entity (see here for more information on the statement). If LockModeType.WRITE is specified, Hibernate issues a SELECT …​ FOR UPDATE NOWAIT instead, which has has the same meaning as the statement above, but if there is already a lock, the program will not wait for this lock to be released. Instead, an exception is raised.
+Use one of the types if you want to modify the entity later on, for read only access no lock is required.

+
+
+

As you might have noticed, the behavior of Hibernate deviates from what one would expect by looking at the LockModeType (especially LockModeType.READ should not cause a SELECT …​ FOR UPDATE to be issued). The framework actually deviates from what is specified in the JPA for unknown reasons.

+
+
+
+
+
+

Database Auditing

+
+ +
+
+
+

Testing Data-Access

+
+
+

For testing of Entities and Repositories or DAOs see testing guide.

+
+
+
+
+

Principles

+
+
+

We strongly recommend these principles:

+
+
+
    +
  • +

    Use the JPA where ever possible and use vendor (hibernate) specific features only for situations when JPA does not provide a solution. In the latter case consider first if you really need the feature.

    +
  • +
  • +

    Create your entities as simple POJOs and use JPA to annotate the getters in order to define the mapping.

    +
  • +
  • +

    Keep your entities simple and avoid putting advanced logic into entity methods.

    +
  • +
+
+
+
+
+

Database Configuration

+
+
+

For details on the configuration of the database connection and database logging of the individual framework, please refer to the respective configuration guide.

+
+
+

For spring see here.

+
+
+

For quarkus see here.

+
+
+

Database Migration

+ +
+
+

Pooling

+
+

You typically want to pool JDBC connections to boost performance by recycling previous connections. There are many libraries available to do connection pooling. We recommend to use HikariCP. For Oracle RDBMS see here.

+
+
+
+
+
+

Security

+
+
+

SQL-Injection

+
+

A common security threat is SQL-injection. Never build queries with string concatenation or your code might be vulnerable as in the following example:

+
+
+
+
  String query = "Select op from OrderPosition op where op.comment = " + userInput;
+  return getEntityManager().createQuery(query).getResultList();
+
+
+
+

Via the parameter userInput an attacker can inject SQL (JPQL) and execute arbitrary statements in the database causing extreme damage.

+
+
+

In order to prevent such injections you have to strictly follow our rules for queries:

+
+
+ +
+
+
+

Limited Permissions for Application

+
+

We suggest that you operate your application with a database user that has limited permissions so he can not modify the SQL schema (e.g. drop tables). For initializing the schema (DDL) or to do schema migrations use a separate user that is not used by the application itself.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-json.html b/docs/devon4j/1.0/guide-json.html new file mode 100644 index 00000000..2f34d57d --- /dev/null +++ b/docs/devon4j/1.0/guide-json.html @@ -0,0 +1,586 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

JSON

+
+
+

JSON (JavaScript Object Notation) is a popular format to represent and exchange data especially for modern web-clients. For mapping Java objects to JSON and vice-versa there is no official standard API. We use the established and powerful open-source solution Jackson. +Due to problems with the wiki of fasterxml you should try this alternative link: Jackson/AltLink.

+
+
+
+
+

Configure JSON Mapping

+
+
+

In order to avoid polluting business objects with proprietary Jackson annotations (e.g. @JsonTypeInfo, @JsonSubTypes, @JsonProperty) we propose to create a separate configuration class. Every devonfw application (sample or any app created from our app-template) therefore has a class called ApplicationObjectMapperFactory that extends ObjectMapperFactory from the devon4j-rest module. It looks like this:

+
+
+
+
@Named("ApplicationObjectMapperFactory")
+public class ApplicationObjectMapperFactory extends ObjectMapperFactory {
+
+  public RestaurantObjectMapperFactory() {
+    super();
+    // JSON configuration code goes here
+  }
+}
+
+
+
+
+
+

JSON and Inheritance

+
+
+

If you are using inheritance for your objects mapped to JSON then polymorphism can not be supported out-of-the box. So in general avoid polymorphic objects in JSON mapping. However, this is not always possible. +Have a look at the following example from our sample application:

+
+
+
+inheritance class diagram +
+
Figure 1. Transfer-Objects using Inheritance
+
+
+

Now assume you have a REST service operation as Java method that takes a ProductEto as argument. As this is an abstract class the server needs to know the actual sub-class to instantiate. +We typically do not want to specify the classname in the JSON as this should be an implementation detail and not part of the public JSON format (e.g. in case of a service interface). Therefore we use a symbolic name for each polymorphic subtype that is provided as virtual attribute @type within the JSON data of the object:

+
+
+
+
{ "@type": "Drink", ... }
+
+
+
+

Therefore you add configuration code to the constructor of ApplicationObjectMapperFactory. Here you can see an example from the sample application:

+
+
+
+
setBaseClasses(ProductEto.class);
+addSubtypes(new NamedType(MealEto.class, "Meal"), new NamedType(DrinkEto.class, "Drink"),
+  new NamedType(SideDishEto.class, "SideDish"));
+
+
+
+

We use setBaseClasses to register all top-level classes of polymorphic objects. Further we declare all concrete polymorphic sub-classes together with their symbolic name for the JSON format via addSubtypes.

+
+
+
+
+

Custom Mapping

+
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create objects dedicated for the JSON mapping you can easily avoid such situations. When this is not suitable follow these instructions to define the mapping:

+
+
+
    +
  1. +

    As an example, the use of JSR354 (javax.money) is appreciated in order to process monetary amounts properly. However, without custom mapping, the default mapping of Jackson will produce the following JSON for a MonetaryAmount:

    +
    +
    +
    "currency": {"defaultFractionDigits":2, "numericCode":978, "currencyCode":"EUR"},
    +"monetaryContext": {...},
    +"number":6.99,
    +"factory": {...}
    +
    +
    +
    +

    As clearly can be seen, the JSON contains too much information and reveals implementation secrets that do not belong here. Instead the JSON output expected and desired would be:

    +
    +
    +
    +
    "currency":"EUR","amount":"6.99"
    +
    +
    +
    +

    Even worse, when we send the JSON data to the server, Jackson will see that MonetaryAmount is an interface and does not know how to instantiate it so the request will fail. +Therefore we need a customized Serializer.

    +
    +
  2. +
  3. +

    We implement MonetaryAmountJsonSerializer to define how a MonetaryAmount is serialized to JSON:

    +
    +
    +
    public final class MonetaryAmountJsonSerializer extends JsonSerializer<MonetaryAmount> {
    +
    +  public static final String NUMBER = "amount";
    +  public static final String CURRENCY = "currency";
    +
    +  public void serialize(MonetaryAmount value, JsonGenerator jgen, SerializerProvider provider) throws ... {
    +    if (value != null) {
    +      jgen.writeStartObject();
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.CURRENCY);
    +      jgen.writeString(value.getCurrency().getCurrencyCode());
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.NUMBER);
    +      jgen.writeString(value.getNumber().toString());
    +      jgen.writeEndObject();
    +    }
    +  }
    +
    +
    +
    +

    For composite datatypes it is important to wrap the info as an object (writeStartObject() and writeEndObject()). MonetaryAmount provides the information we need by the getCurrency() and getNumber(). So that we can easily write them into the JSON data.

    +
    +
  4. +
  5. +

    Next, we implement MonetaryAmountJsonDeserializer to define how a MonetaryAmount is deserialized back as Java object from JSON:

    +
    +
    +
    public final class MonetaryAmountJsonDeserializer extends AbstractJsonDeserializer<MonetaryAmount> {
    +  protected MonetaryAmount deserializeNode(JsonNode node) {
    +    BigDecimal number = getRequiredValue(node, MonetaryAmountJsonSerializer.NUMBER, BigDecimal.class);
    +    String currencyCode = getRequiredValue(node, MonetaryAmountJsonSerializer.CURRENCY, String.class);
    +    MonetaryAmount monetaryAmount =
    +        MonetaryAmounts.getAmountFactory().setNumber(number).setCurrency(currencyCode).create();
    +    return monetaryAmount;
    +  }
    +}
    +
    +
    +
    +

    For composite datatypes we extend from AbstractJsonDeserializer as this makes our task easier. So we already get a JsonNode with the parsed payload of our datatype. Based on this API it is easy to retrieve individual fields from the payload without taking care of their order, etc. +AbstractJsonDeserializer also provides methods such as getRequiredValue to read required fields and get them converted to the desired basis datatype. So we can easily read the amount and currency and construct an instance of MonetaryAmount via the official factory API.

    +
    +
  6. +
  7. +

    Finally we need to register our custom (de)serializers with the following configuration code in the constructor of ApplicationObjectMapperFactory:+

    +
  8. +
+
+
+
+
  SimpleModule module = getExtensionModule();
+  module.addDeserializer(MonetaryAmount.class, new MonetaryAmountJsonDeserializer());
+  module.addSerializer(MonetaryAmount.class, new MonetaryAmountJsonSerializer());
+
+
+
+

Now we can read and write MonetaryAmount from and to JSON as expected.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-jwt.html b/docs/devon4j/1.0/guide-jwt.html new file mode 100644 index 00000000..da6edc6a --- /dev/null +++ b/docs/devon4j/1.0/guide-jwt.html @@ -0,0 +1,439 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

JWT

+
+
+

JWT (JSON Web Token) is an open standard (see RFC 7519) for creating JSON based access tokens that assert some number of claims. +With an IT landscape divided into multiple smaller apps you want to avoid coupling all those apps or services tightly with your IAM (Identity & Access Management). +Instead your apps simply expects a JWT as bearer-token in the Authorization HTTP header field. +All it needs to do for authentication is validating this JWT. +The actual authentication is done centrally by an access system (IAM) that authors those JWTs. +Therefore we recommend to use strong asymmetric cryptography to sign the JWT when it is authored. +Create a keypair per environment and keep the private key as a secret only known to the access system authorizing the JWTs. +Your apps only need to know the public key in order to validate the JWT. +Any request without a JWT or with an invalid JWT will be rejected (with status code 401).

+
+
+

When using spring check the JWT Spring-Starter. +For quarkus follow Using JWT RBAC.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-kafka.html b/docs/devon4j/1.0/guide-kafka.html new file mode 100644 index 00000000..5e43471b --- /dev/null +++ b/docs/devon4j/1.0/guide-kafka.html @@ -0,0 +1,451 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+ + + + + +
+ + +devon4j-kafka has been abandoned. Its main feature was the implementation of a retry pattern using multiple topics. This implementation has become an integral part of Spring Kafka. We recommend to use Spring Kafkas own implemenation for retries. +
+
+
+

Messaging Services

+
+
+

Messaging Services provide an asynchronous communication mechanism between applications. Technically this is implemented using Apache Kafka .

+
+
+

For spring, devonfw uses Spring-Kafka as kafka framework. +For more details, check the devon4j-kafka.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-liquibase.html b/docs/devon4j/1.0/guide-liquibase.html new file mode 100644 index 00000000..726c89f6 --- /dev/null +++ b/docs/devon4j/1.0/guide-liquibase.html @@ -0,0 +1,448 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-log-monitoring.html b/docs/devon4j/1.0/guide-log-monitoring.html new file mode 100644 index 00000000..4ed75a53 --- /dev/null +++ b/docs/devon4j/1.0/guide-log-monitoring.html @@ -0,0 +1,501 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Log-Monitoring

+
+
+

Log-monitoring is an aspect of monitoring with a strict focus on logging. +With trends towards IT landscapes with many but much smaller apps the classicial approach to write log-files to the disc and let operators read those via SSH became entirely obsolete. +Nowadays we have up to hundreds or even thousands of apps that themselves are clustered into multiple nodes. +Therefore you should establish a centralized log monitoring system in the environment and let all your nodes log directly into that system. +This approach gives the following benefits:

+
+
+
    +
  • +

    all log information available in one place

    +
  • +
  • +

    full-text search accross all logfiles

    +
  • +
  • +

    ability to automatically trigger alerts from specific log patterns

    +
  • +
  • +

    ability to do data-mining on logs and visualize in dashboards

    +
  • +
+
+
+
+
+

Options for log-monitoring

+
+
+

Typical products for such a log monitoring system are:

+
+
+ +
+
+

In devonfw we are not biased for any of these products. Therefore, feel free to make your choice according to the requirements of your project.

+
+
+

For Quarkus applications, you can get an insight into the topic by reading the guide about centralized log management.

+
+
+
+
+

API for log-monitoring

+
+
+

The "API" for logging to a log-monitoring system for your app is pretty simple:

+
+
+
    +
  • +

    Write your logs to standard out.

    +
  • +
  • +

    Use JSON logging as format.

    +
  • +
+
+
+

Then the container infrastructure can automatically collect your logs from standard out and directly feed those into the log monitoring system. +As a result, your app does not need to know anything about your log monitoring system and logging becomes most simple. +Further, if you do not write log-files anymore, you might not need to write any other files and therefore may not even need write permissions on the filesystem of your container. +In such case an attacker who may find a vulnerability in your app will have less attack surface in case he can not write any file.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-logging.html b/docs/devon4j/1.0/guide-logging.html new file mode 100644 index 00000000..0a71200b --- /dev/null +++ b/docs/devon4j/1.0/guide-logging.html @@ -0,0 +1,899 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Logging

+
+
+

We recommend to use SLF4J as API for logging, that has become a de facto standard in Java as it has a much better design than java.util.logging offered by the JDK. +There are serveral implementations for SLF4J. For Spring applications our recommended implementation is Logback. Quarkus uses JBoss Logging which provides a JBoss Log Manager implementation for SLF4J. For more information on logging in Quarkus, see the Quarkus logging guide.

+
+
+
+
+

Logging Dependencies

+
+
+

To use Logback in your Spring application, you need to include the following dependencies:

+
+
+
+
<!-- SLF4J as logging API -->
+<dependency>
+  <groupId>org.slf4j</groupId>
+  <artifactId>slf4j-api</artifactId>
+</dependency>
+<!-- Logback as logging implementation  -->
+<dependency>
+  <groupId>ch.qos.logback</groupId>
+  <artifactId>logback-classic</artifactId>
+</dependency>
+<!-- JSON logging for cloud-native log monitoring -->
+<dependency>
+  <groupId>net.logstash.logback</groupId>
+  <artifactId>logstash-logback-encoder</artifactId>
+</dependency>
+
+
+
+

In devon4j these dependencies are provided by the devon4j-logging module.

+
+
+

In Quarkus, SLF4J and the slf4j-jboss-logmanager are directly included in the Quarkus core runtime and can be used out of the box.

+
+
+
+
+

Logger Access

+
+
+

The general pattern for accessing loggers from your code is a static logger instance per class using the following pattern:

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+  private static final Logger LOG = LoggerFactory.getLogger(MyClass.class);
+  ...
+}
+
+
+
+

For detailed documentation how to use the logger API check the SLF4j manual.

+
+
+ + + + + +
+ + +In case you are using devonfw-ide and Eclipse you can just type LOG and hit [ctrl][space] to insert the code pattern including the imports into your class. +
+
+
+

Lombok

+
+

In case you are using Lombok, you can simply use the @Slf4j annotation in your class. This causes Lombok to generate the logger instance for you.

+
+
+
+
+
+

Log-Levels

+
+
+

We use a common understanding of the log-levels as illustrated by the following table. +This helps for better maintenance and operation of the systems.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Log-levels
Log-levelDescriptionImpactActive Environments

FATAL

Only used for fatal errors that prevent the application to work at all (e.g. startup fails or shutdown/restart required)

Operator has to react immediately

all

ERROR

An abnormal error indicating that the processing failed due to technical problems.

Operator should check for known issue and otherwise inform development

all

WARNING

A situation where something worked not as expected. E.g. a business exception or user validation failure occurred.

No direct reaction required. Used for problem analysis.

all

INFO

Important information such as context, duration, success/failure of request or process

No direct reaction required. Used for analysis.

all

DEBUG

Development information that provides additional context for debugging problems.

No direct reaction required. Used for analysis.

development and testing

TRACE

Like DEBUG but exhaustive information and for code that is run very frequently. Will typically cause large log-files.

No direct reaction required. Used for problem analysis.

none (turned off by default)

+
+

Exceptions (with their stack trace) should only be logged on FATAL or ERROR level. For business exceptions typically a WARNING including the message of the exception is sufficient.

+
+
+

Configuration of Logback

+
+

The configuration of logback happens via the logback.xml file that you should place into src/main/resources of your app. +For details consult the logback configuration manual.

+
+
+ + + + + +
+ + +Logback also allows to overrule the configuration with a logback-test.xml file that you may put into src/test/resources or into a test-dependency. +
+
+
+
+

Configuration in Quarkus

+
+

The are several options you can set in the application.properties file to configure the behaviour of the logger in Quarkus. For a detailed overview, see the corresponding part of the Quarkus guide.

+
+
+
+
+
+

JSON-logging

+
+
+

For easy integration with log-monitoring, we recommend that your app logs to standard out in JSON following JSON Lines.

+
+
+

In Spring applications, this can be achieved via logstash-logback-encoder (see dependencies). In Quarkus, it can be easily achieved using the quarkus-logging-json extension (see here for more details).

+
+
+

This will produce log-lines with the following format (example formatted for readability):

+
+
+
+
{
+  "timestamp":"2000-12-31T23:59:59.999+00:00",
+  "@version":"1",
+  "message":"Processing 4 order(s) for shipment",
+  "logger_name":"com.myapp.order.logic.UcManageOrder",
+  "thread_name":"http-nio-8081-exec-6",
+  "level":"INFO",
+  "level_value":20000,
+  "appname":"myapp",
+}
+
+
+
+

Adding custom values to JSON log with Logstash

+
+

The JSON encoder even supports logging custom properties for your log-monitoring. +The trick is to use the class net.logstash.logback.argument.StructuredArguments for adding the arguments to you log message, e.g.

+
+
+
+
import static net.logstash.logback.argument.StructuredArguments.v;
+
+...
+    LOG.info("Request with {} and {} took {} ms.", v("url", url), v("status", statusCode), v("duration", millis));
+...
+
+
+
+

This will produce the a JSON log-line with the following properties:

+
+
+
+
...
+  "message":"Request with url=https://api/service/v1/ordermanagement/order and status=200 took duration=251 ms",
+  "url":"https://api/service/v1/ordermanagement/order",
+  "status":"200",
+  "duration":"251",
+...
+
+
+
+

As you can quickly see besides the human readable message you also have the structured properties url, status and duration that can be extremly valuable to configure dashboards in your log-monitoring that visualize success/failure ratio as well as performance of your requests.

+
+
+
+
+
+

Classic log-files

+
+
+ + + + + +
+ + +In devon4j, we strongly recommend using JSON logging instead of classic log files. The following section refers only to devon4j Spring applications that use Logback. +
+
+
+

Even though we do not recommend anymore to write classical log-files to the local disc, here you can still find our approach for it.

+
+
+

Maven-Integration

+
+

In the pom.xml of your application add this dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java</groupId>
+  <artifactId>devon4j-logging</artifactId>
+</dependency>
+
+
+
+

The above dependency already adds transitive dependencies to SLF4J and logback. +Also it comes with configration snipplets that can be included from your logback.xml file (see configuration).

+
+
+

The logback.xml to write regular log-files can look as following:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+  <property resource="com/devonfw/logging/logback/application-logging.properties" />
+  <property name="appname" value="MyApp"/>
+  <property name="logPath" value="../logs"/>
+  <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />
+  <include resource="com/devonfw/logging/logback/appender-console.xml" />
+
+  <root level="DEBUG">
+    <appender-ref ref="ERROR_APPENDER"/>
+    <appender-ref ref="INFO_APPENDER"/>
+    <appender-ref ref="DEBUG_APPENDER"/>
+    <appender-ref ref="CONSOLE_APPENDER"/>
+  </root>
+
+  <logger name="org.springframework" level="INFO"/>
+</configuration>
+
+
+
+

The provided logback.xml is configured to use variables defined on the config/application.properties file. +On our example, the log files path point to ../logs/ in order to log to tomcat log directory when starting tomcat on the bin folder. +Change it according to your custom needs.

+
+
+
config/application.properties
+
+
log.dir=../logs/
+
+
+
+
+

Log Files

+
+

The classical approach uses the following log files:

+
+
+
    +
  • +

    Error Log: Includes log entries to detect errors.

    +
  • +
  • +

    Info Log: Used to analyze system status and to detect bottlenecks.

    +
  • +
  • +

    Debug Log: Detailed information for error detection.

    +
  • +
+
+
+

The log file name pattern is as follows:

+
+
+
+
«LOGTYPE»_log_«HOST»_«APPLICATION»_«TIMESTAMP».log
+
+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of Logfilename
ElementValueDescription

«LOGTYPE»

info, error, debug

Type of log file

«HOST»

e.g. mywebserver01

Name of server, where logs are generated

«APPLICATION»

e.g. myapp

Name of application, which causes logs

«TIMESTAMP»

YYYY-MM-DD_HH00

date of log file

+
+

Example: +error_log_mywebserver01_myapp_2013-09-16_0900.log

+
+
+

Error log from mywebserver01 at application myapp at 16th September 2013 9pm.

+
+
+
+

Output format

+
+

We use the following output format for all log entries to ensure that searching and filtering of log entries work consistent for all logfiles:

+
+
+
+
[D: «timestamp»] [P: «priority»] [C: «NDC»][T: «thread»][L: «logger»]-[M: «message»]
+
+
+
+
    +
  • +

    D: Date (Timestamp in ISO8601 format e.g. 2013-09-05 16:40:36,464)

    +
  • +
  • +

    P: Priority (the log level)

    +
  • +
  • +

    C: Correlation ID (ID to identify users across multiple systems, needed when application is distributed)

    +
  • +
  • +

    T: Thread (Name of thread)

    +
  • +
  • +

    L: Logger name (use class name)

    +
  • +
  • +

    M: Message (log message)

    +
  • +
+
+
+

Example:

+
+
+
+
[D: 2013-09-05 16:40:36,464] [P: DEBUG] [C: 12345] [T: main] [L: my.package.MyClass]-[M: My message...]
+
+
+
+ + + + + +
+ + +When using devon4j-logging, this format is used by default. To achieve this format in Quarkus, set quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n in your properties. +
+
+
+
+

Correlation ID

+
+

In order to correlate separate HTTP requests to services belonging to the same user / session, we provide a servlet filter called DiagnosticContextFilter. +This filter takes a provided correlation ID from the HTTP header X-Correlation-Id. +If none was found, it will generate a new correlation id as UUID. +This correlation ID is added as MDC to the logger. +Therefore, it will then be included to any log message of the current request (thread). +Further concepts such as service invocations will pass this correlation ID to subsequent calls in the application landscape. Hence you can find all log messages related to an initial request simply via the correlation ID even in highly distributed systems.

+
+
+
+

Security

+
+

In order to prevent log forging attacks you can simply use the suggested JSON logging format. +Otherwise you can use com.devonfw.module.logging.common.impl.SingleLinePatternLayout as demonstrated here in order to prevent such attacks.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-logic-layer.html b/docs/devon4j/1.0/guide-logic-layer.html new file mode 100644 index 00000000..1a99e83d --- /dev/null +++ b/docs/devon4j/1.0/guide-logic-layer.html @@ -0,0 +1,494 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Logic Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. +According to our business architecture, we divide an application into components. +For each component, the logic layer defines different use-cases. Another approach is to define a component-facade, which we do not recommend for future application. Especially for quarkus application, we want to simplify things and highly suggest omitting component-facade completely and using use-cases only. +It is very important that you follow the links to understand the concept of use-case in order to properly implement your business logic.

+
+
+
+
+

Responsibility

+
+
+

The logic layer is responsible to implement the business logic according to the specified functional demands and requirements. +Therefore, it creates the actual value of the application. The logic layer is responsible for invoking business logic in external systems. +The following additional aspects are also included in its responsibility:

+
+
+ +
+
+
+
+

Security

+
+
+

The logic layer is the heart of the application. It is also responsible for authorization and hence security is important in this current case. Every method exposed in an interface needs to be annotated with an authorization check, stating what role(s) a caller must provide in order to be allowed to make the call. The authorization concept is described here.

+
+
+

Direct Object References

+
+

A security threat are Insecure Direct Object References. This simply gives you two options:

+
+
+
    +
  • +

    avoid direct object references

    +
  • +
  • +

    ensure that direct object references are secure

    +
  • +
+
+
+

Especially when using REST, direct object references via technical IDs are common sense. This implies that you have a proper authorization in place. This is especially tricky when your authorization does not only rely on the type of the data and according to static permissions but also on the data itself. Vulnerabilities for this threat can easily happen by design flaws and inadvertence. Here is an example from our sample application:

+
+
+

We have a generic use-case to manage BLOBs. In the first place, it makes sense to write a generic REST service to load and save these BLOBs. However, the permission to read or even update such BLOB depends on the business object hosting the BLOB. Therefore, such a generic REST service would open the door for this OWASP A4 vulnerability. To solve this in a secure way, you need individual services for each hosting business object to manage the linked BLOB and have to check permissions based on the parent business object. In this example the ID of the BLOB would be the direct object reference and the ID of the business object (and a BLOB property indicator) would be the indirect object reference.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-lombok.html b/docs/devon4j/1.0/guide-lombok.html new file mode 100644 index 00000000..078a417c --- /dev/null +++ b/docs/devon4j/1.0/guide-lombok.html @@ -0,0 +1,522 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Lombok

+
+
+

Lombok is a library that works with an annotation processor and will generate code for you to save you some time and reduce the amount of boilerplate code in your project. Lombok can generate getter and setter, equals methods, automate your logging variables for your classes, and more. Follow the list of all the features provided by Lombok to get an overview.

+
+
+
+
+

Lombok Dependency

+
+
+

To get access to the Lombok library just add the following dependency to the POM.xml.

+
+
+

The Lombok dependency:

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok</artifactId>
+	<version>1.18.20</version>
+</dependency>
+
+
+
+

To get Lombok working with your current IDE you should also install the Lombok addon. Follow the Eclipse installation guide, there are also guides for other supported IDEs.

+
+
+
+
+

Lombok with Mapstruct

+
+
+

MapStruct takes advantage of generated getters, setters, and constructors from Lombok and uses them to +generate the mapper implementations. Lombok is also an annotation processor and since version 1.18.14 both frameworks are working together. Just add the lombok-mapstruct-binding to your POM.xml.

+
+
+

The Lombok annotation processor and the lombok-mapstruct-binding

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok-mapstruct-binding</artifactId>
+	<version>0.2.0</version>
+</dependency>
+
+<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok</artifactId>
+				<version>1.18.4</version>
+			</path>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok-mapstruct-binding</artifactId>
+				<version>0.2.0</version>
+			</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

In our quarkus reference project you can get a look into the usage of both frameworks.

+
+
+
+
+

Lombok Usage

+
+
+

Lombok can be used like any other annotation processor and will be shown in the simple example below to generate getter and setter for a Product Entity.

+
+
+
+
@Getter
+@Setter
+public class Product{
+
+    private String title;
+    private String description;
+    private BigDecimal price;
+}
+
+
+
+

For advanced Lombok usage follow the Baeldung Lombok guide or just read the Lombok javadoc

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-microservice.html b/docs/devon4j/1.0/guide-microservice.html new file mode 100644 index 00000000..7d1667ad --- /dev/null +++ b/docs/devon4j/1.0/guide-microservice.html @@ -0,0 +1,474 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Microservices in devonfw

+
+
+

The Microservices architecture is an approach for application development based on a series of small services grouped under a business domain. Each individual service runs autonomously and communicating with each other through their APIs. That independence between the different services allows to manage (upgrade, fix, deploy, etc.) each one without affecting the rest of the system’s services. In addition to that the microservices architecture allows to scale specific services when facing an increment of the requests, so the applications based on microservices are more flexible and stable, and can be adapted quickly to demand changes.

+
+
+

However, this new approach, developing apps based on microservices, presents some downsides.

+
+
+

Let’s see the main challenges when working with microservices:

+
+
+
    +
  • +

    Having the applications divided in different services we will need a component (router) to redirect each request to the related microservice. These redirection rules must implement filters to guarantee a proper functionality.

    +
  • +
  • +

    In order to manage correctly the routing process, the application will also need a catalog with all the microservices and its details: IPs and ports of each of the deployed instances of each microservice, the state of each instance and some other related information. This catalog is called Service Discovery.

    +
  • +
  • +

    With all the information of the Service Discovery the application will need to calculate and select between all the available instances of a microservice which is the suitable one. This will be figured out by the library Client Side Load Balancer.

    +
  • +
  • +

    The different microservices will be likely interconnected with each other, that means that in case of failure of one of the microservices involved in a process, the application must implement a mechanism to avoid the error propagation through the rest of the services and provide an alternative as a process result. To solve this, the pattern Circuit Breaker can be implemented in the calls between microservices.

    +
  • +
  • +

    As we have mentioned, the microservices will exchange calls and information with each other so our applications will need to provide a secured context to avoid not allowed operations or intrusions. In addition, since microservices must be able to operate in an isolated way, it is not recommended to maintain a session. To meet this need without using Spring sessions, a token-based authentication is used that exchanges information using the json web token (JWT) protocol.

    +
  • +
+
+
+

In addition to all of this we will find other issues related to this particular architecture that we will address fitting the requirements of each project.

+
+
+
    +
  • +

    Distributed data bases: each instance of a microservice should have only one data base.

    +
  • +
  • +

    Centralized logs: each instance of a microservice creates a log and a trace that should be centralized to allow an easier way to read all that information.

    +
  • +
  • +

    Centralized configuration: each microservice has its own configuration, so our applications should group all those configurations in only one place to ease the configuration management.

    +
  • +
  • +

    Automatized deployments: as we are managing several components (microservices, catalogs, balancers, etc.) the deployment should be automatized to avoid errors and ease this process.

    +
  • +
+
+
+

To address the above, devonfw microservices has an alternative approach Microservices based on Netflix-Tools.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-migration-oasp3-to-devon3.1.html b/docs/devon4j/1.0/guide-migration-oasp3-to-devon3.1.html new file mode 100644 index 00000000..d0948ad5 --- /dev/null +++ b/docs/devon4j/1.0/guide-migration-oasp3-to-devon3.1.html @@ -0,0 +1,881 @@ + + + + + + Migration Guide from oasp 3.0.0 to devon4j 3.1.0 migration :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+

Migration Guide from oasp 3.0.0 to devon4j 3.1.0 migration

+
+
+
+
    +
  • +

    Automatic migration with devcon doesn’t work with parent pom’s; you need to migrate every single subproject on it’s own.

    +
  • +
  • +

    If your subproject’s don’t contain the old oasp4j or devon4j version number you have to copy your parent pom file into your child pom files and then use the migrate command.

    +
  • +
  • +

    use the devon4j migation command

    +
  • +
  • +

    after migration you need to update the version tag in the pom file manually. If you’re working with a parent pom you also need to update the version tag of the parent reference +in the child pom file.

    +
  • +
  • +

    In case you are using eclipse, now you have to update and rebuild all your maven projects (alt + F5)

    +
  • +
+
+
+
+
+

JsonDeserializer:

+
+
+
    +
  1. +

    Change the super class from AbstractJsonDeserializer to JsonDeserializer

    +
  2. +
  3. +

    Implement unimplemented methods or change the methode signatur from Pageable deserializeNode(JsonNode node) to Pageable deserialize(JsonParser p, DeserializationContext context)

    +
  4. +
  5. +

    To get the JsonNode you need to use the following methods with the JsonParser p: JsonNode node = p.getCodec().readTree(p);

    +
  6. +
  7. +

    To get values of properties, you need to change from getRequiredValue(node, "property", String.class) to JacksonUtil.readValue(node, "property", String.class, false);

    +
  8. +
+
+
+
+
+

QueryUtil update

+
+
+

whereString() (StringSearchConfigTo) method or similar:

+
+
+
    +
  1. +

    Check the parameter type with attetion on the source of the used class (the classes may have the same name but the one from oasp4j is obsolete)

    +
  2. +
  3. +

    Delete the old import of oasp4j (for example import io.oasp.module.beanmapping.common.api.BeanMapper) and import the new class of +devon4j (for example import com.devonfw.module.beanmapping.common.api.BeanMapper)

    +
  4. +
+
+
+
+
+

logback.xml file

+
+
+
    +
  1. +

    There maximum three chnages that needed to be done in the logback.xml file

    +
  2. +
  3. +

    Change the logging properties tag from +<property resource="io/oasp/logging/logback/application-logging.properties" /> to <property resource="com/devonfw/logging/logback/application-logging.properties" />

    +
  4. +
  5. +

    Change the appenders file all tag from +<include resource="io/oasp/logging/logback/appenders-file-all.xml" /> to <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />

    +
  6. +
  7. +

    Change the appender console tag from <include resource="io/oasp/logging/logback/appender-console.xml" /> to <include resource="com/devonfw/logging/logback/appender-console.xml" />

    +
  8. +
+
+
+
+
+

OaspPackage:

+
+
+

If you use the OaspPackage class you can replace it with the Devon4jPackage class

+
+
+
+
+

AbstractLogic

+
+
+
    +
  1. +

    You can replace all net.sf.mmm.util imports with the appropriate com.devonfw.module imports. For example "import net.sf.mmm.util.entity.api.GenericEntity" to "import com.devonfw.module.basic.common.api.entity.GenericEntity"

    +
  2. +
  3. +

    Except the TransferObject and the AbstractTransferObject. These are replaced with the denvonfw AbstractTo. +Example: "import net.sf.mmm.util.transferobject.api.AbstractTransferObject" or "import net.sf.mmm.util.transferobject.api.TransferObject" to "import com.devonfw.module.basic.common.api.to.AbstractTo".

    +
  4. +
+
+
+
+
+

BeanDozerConfig

+
+
+
    +
  1. +

    Change the @ComponentScan annotation from @ComponentScan(basePackages = { "io.oasp.module.beanmapping" }) to @ComponentScan(basePackages = { "com.devonfw.module.beanmapping" }).

    +
  2. +
  3. +

    Now you have to create a variable DOZER_MAPPING_XML with following content: static final String DOZER_MAPPING_XML = "config/app/common/dozer-mapping.xml".

    +
  4. +
  5. +

    Then you create an list beanMappings where you add the variable created in step 2.

    +
  6. +
  7. +

    To get an Mapper instance you now have to use a builder like Mapper mapper = DozerBeanMapperBuilder.create().withMappingFiles(beanMappings).build().

    +
  8. +
  9. +

    Change occurrences of io.oasp.module.beanmapping.common.impl.dozer.IdentityConverter in dozer-mappings.xml to com.devonfw.module.beanmapping.common.impl.dozer.IdentityConverter

    +
  10. +
  11. +

    Migration of dozer 5 to 6.4: +https://github.com/DozerMapper/dozer/blob/master/docs/asciidoc/migration/v6-to-v61.adoc +https://github.com/DozerMapper/dozer/blob/master/docs/asciidoc/migration/v61-to-v62.adoc +https://github.com/DozerMapper/dozer/blob/master/docs/asciidoc/migration/v62-to-v63.adoc +https://github.com/DozerMapper/dozer/blob/master/docs/asciidoc/migration/v63-to-v64.adoc

    +
  12. +
  13. +

    In addition, the semantics of <mapping type="one-way"> seems to be changed. If you for example just needed to exclude files on mapping from a to b one-way, you now have to declare an empty mapping as well from b to a one-way without any field(-extension) declarations to enable mapping from b to a at all. See also https://github.com/DozerMapper/dozer/issues/605 and https://github.com/DozerMapper/dozer/issues/451

    +
  14. +
+
+
+
+
+

pom.xml

+
+
+

In the pom.xml file you have to do some manuall changes. You need to change all oasp dependencies to denvonfw dependencies. Here are some examples:

+
+
+
    +
  1. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-beanmapping</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-beanmapping</artifactId>
    +    </dependency>
    +
    +
    +
  2. +
  3. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-security</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-security</artifactId>
    +    </dependency>
    +
    +
    +
  4. +
  5. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-web</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-web</artifactId>
    +    </dependency>
    +
    +
    +
  6. +
  7. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.starters</groupId>
    +      <artifactId>oasp4j-starter-cxf-client-rest</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.starters</groupId>
    +      <artifactId>devon4j-starter-cxf-client-rest</artifactId>
    +    </dependency>
    +
    +
    +
  8. +
  9. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.starters</groupId>
    +      <artifactId>oasp4j-starter-cxf-client-ws</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.starters</groupId>
    +      <artifactId>devon4j-starter-cxf-client-ws</artifactId>
    +    </dependency>
    +
    +
    +
  10. +
  11. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.starters</groupId>
    +      <artifactId>oasp4j-starter-cxf-server-rest</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.starters</groupId>
    +      <artifactId>devon4j-starter-cxf-server-rest</artifactId>
    +    </dependency>
    +
    +
    +
  12. +
  13. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.starters</groupId>
    +      <artifactId>oasp4j-starter-spring-data-jpa</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.starters</groupId>
    +      <artifactId>devon4j-starter-spring-data-jpa</artifactId>
    +    </dependency>
    +
    +
    +
  14. +
  15. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-batch</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-batch</artifactId>
    +    </dependency>
    +
    +
    +
  16. +
  17. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-test</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-test</artifactId>
    +    </dependency>
    +
    +
    +
  18. +
  19. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-logging</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-logging</artifactId>
    +    </dependency>
    +
    +
    +
  20. +
  21. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-jpa-spring-data</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-jpa-spring-data</artifactId>
    +    </dependency>
    +
    +
    +
  22. +
  23. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-rest</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-rest</artifactId>
    +    </dependency>
    +
    +
    +
  24. +
+
+
+
+
+

MutableGenericEntity

+
+
+

If you use the MutableGenericEntity<> class you have to change it to the PersistenceEntity<> class. Change the import "net.sf.mmm.util.entity.api.MutableGenericEntity" to +"import com.devonfw.module.basic.common.api.entity.PersistenceEntity".

+
+
+
+
+

CompositeTo

+
+
+

If you use the CompositeTo class you should now use the AbstractTo class. Just change the import from "import net.sf.mmm.util.transferobject.api.CompositeTo" +to "import com.devonfw.module.basic.common.api.to.AbstractTo".

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-migration-spring-quarkus.html b/docs/devon4j/1.0/guide-migration-spring-quarkus.html new file mode 100644 index 00000000..62332723 --- /dev/null +++ b/docs/devon4j/1.0/guide-migration-spring-quarkus.html @@ -0,0 +1,696 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Migrate from Spring to Quarkus

+
+
+

This guide will cover the migration process of a Spring application to a Quarkus application. There are already articles about migrating from Spring to Quarkus (e.g. https://developers.redhat.com/blog/2020/04/10/migrating-a-spring-boot-microservices-application-to-quarkus, https://dzone.com/articles/migrating-a-spring-boot-application-to-quarkus-cha). +This guide will focus more on the devon4j specific aspects. We assume that a working Spring application exists, built in the classic devon4j specific way (e.g. Jump The Queue or My Thai Star).

+
+
+
+
+

Create the Quarkus application

+
+
+

We start with an empty Quarkus project. You can create the project with Maven on the command line or use the online generator. The advantage of the online generator is that you have a pre-selection of dependencies to use in your project. +For starters, let’s select the basic dependencies needed to develop a REST service with database connectivity (you can use one of the links in the Quarkus template guide): RESTEasy JAX-RS, RESTEasy Jackson, Hibernate ORM, Spring Data JPA API, JDBC Driver (choose the type of database you need), Flyway (if you have database migration schemas), SmallRye Health (optional for Health Monitoring)

+
+
+

The list does not include all required dependencies. We will add more dependencies to the project later. So for now generate the application with these dependencies.

+
+
+

Migration Toolkit from Red Hat

+
+

Red Hat provides a migration toolkit (MTA, Migration Toolkit for Applications), that supports you migrating a Spring application to a Quarkus application. There are several versions of this toolkit (e.g., a web console, a Maven plugin, or an IDE plugin). +The MTA analyzes your existing application and generates a report with hints and instructions for migrating from Spring to Quarkus. For example, it gives you an indication of which dependencies are not supported in your project for a Quarkus application and which dependencies you need to swap them with. The analysis is rule-based, and you can also add your own rules that will be checked during analysis.

+
+
+
+
+
+

Entities

+
+
+

There is nothing special to consider when creating the entities. In most cases, you can simply take the code from your Spring application and use it for your Quarkus application. Usually, the entities extend a superclass ApplicationPersistenceEntity containing for example the id property. You can also take this class from your Spring application and reuse it.

+
+
+
+
+

Transfer objects

+
+
+

The next step is to create the appropriate transfer objects for the entities. In a devon4j Spring application, we would use CobiGen to create these classes. Since CobiGen is not usable for this purpose in Quarkus applications yet, we have to create the classes manually.

+
+
+

First we create some abstract base classes for the search criteria and DTO classes. Normally these would also be created by CobiGen.

+
+
+
AbstractSearchCriteriaTo
+
+
public abstract class AbstractSearchCriteriaTo extends AbstractTo {
+
+  private static final long serialVersionUID = 1L;
+
+  private Pageable pageable;
+
+  //getter + setter for pageable
+}
+
+
+
+
AbstractDto
+
+
public abstract class AbstractDto extends AbstractTo {
+
+  private static final long serialVersionUID = 1L;
+
+  private Long id;
+
+  private int modificationCounter;
+
+  public AbstractDto() {
+
+    super();
+  }
+
+  //getter + setter
+
+  @Override
+  protected void toString(StringBuilder buffer) {
+    ...
+  }
+}
+
+
+
+

The class AbstractTo, the other classes extend, would be provided by the devon4j-basic module in a devon4j Spring application. You can take the code from here and reuse it in your Quarkus project.

+
+
+

Now you can create your transfer objects. Most of the code of the transfer objects of your Spring application should be reusable. For Quarkus, we recommend (as mentioned here) to use *Dto instead of *Eto classes. So be sure to change the names of the classes accordingly.

+
+
+
+
+

Data Access Layer

+
+
+

In devon4j, we propose to use Spring Data JPA to build the data access layer using repositories and QueryDSL to build dynamic queries. We will also use this approach for Quarkus applications, but we need to change the implementation because the devon4j modules are based on reflection, which is not suitable for Quarkus. +In Quarkus we will use Querydsl using code generation. So for this layer, more changes are required and we can’t just take the existing code.

+
+
+

First, create a repository interface for your entity class that extends JpaRepository (see here).

+
+
+

To add QueryDSL support to your project, add the following dependencies to your pom.xml file:

+
+
+
pom.xml
+
+
<dependency>
+  <groupId>com.querydsl</groupId>
+  <artifactId>querydsl-jpa</artifactId>
+  <version>4.3.1</version>
+</dependency>
+<dependency>
+  <groupId>com.querydsl</groupId>
+  <artifactId>querydsl-apt</artifactId>
+  <scope>provided</scope>
+  <version>4.3.1</version>
+</dependency>
+
+
+
+

As mentioned above, we will use QueryDSL with code generation. For this, add the QueryDSL annotation processor to your plugins:

+
+
+
pom.xml
+
+
<plugins>
+...
+  <plugin>
+    <groupId>com.mysema.maven</groupId>
+    <artifactId>apt-maven-plugin</artifactId>
+    <version>1.1.3</version>
+    <executions>
+      <execution>
+        <phase>generate-sources</phase>
+        <goals>
+          <goal>process</goal>
+        </goals>
+        <configuration>
+          <outputDirectory>target/generated-sources/annotations</outputDirectory>
+          <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+        </configuration>
+      </execution>
+    </executions>
+  </plugin>
+</plugins>
+
+
+
+

To implement the queries, follow the corresponding guide.

+
+
+

Set the following properties in the application.properties file to configure the connection to your database (see also here):

+
+
+
+
quarkus.datasource.db-kind=...
+quarkus.datasource.jdbc.url=...
+quarkus.datasource.username=...
+quarkus.datasource.password=...
+
+
+
+
+
+

Logic Layer

+
+
+

For the logic layer, devon4j uses a use-case approach. You can reuse the use case interfaces from the api module of the Spring application. Again, make sure to rename the transfer objects.

+
+
+

Create the appropriate class that implements the interface. Follow the implementation section of the use-case guide to implement the methods. For mapping the entities to the corresponding transfer objects, see the next section.

+
+
+
+
+

Mapping

+
+
+

For bean mapping, we need to use a completely different approach in the Quarkus application than in the Spring application. For Quarkus, we use MapStruct, which creates the mapper at build time rather than at runtime using reflection. Add the following dependencies to your pom.xml.

+
+
+
pom.xml
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct-processor</artifactId>
+  <version>1.4.2.Final</version>
+</dependency>
+<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+</dependency>
+
+
+
+

Then you can create the mapper as follows:

+
+
+
Mapper
+
+
@Mapper(componentModel = "cdi")
+public interface YourEntityMapper {
+  YourEntityDto map(YourEntity entity);
+
+  YourEntity map(YourEntityDto dto);
+
+  ...
+}
+
+
+
+

Inject the mapper into your use-case implementation and simply use the methods. The method implementations of the mapper are created when the application is built.

+
+
+
+
+

Service Layer

+
+
+

For the implementation of the service layer, we use the JAX-RS for both Quarkus and Spring applications to create the REST services. Classic devon4j Spring applications rely on Apache CFX as the implemention of JAX-RS. +For Quarkus, we use RESTEasy. Since both are implementations of JAX-RS, much of the Spring application code can be reused.

+
+
+

Take the definition of the REST endpoints from the api module of the Spring application (make sure to rename the transfer objects), inject the use-cases from the logic layer and use them in the REST service methods as follows:

+
+
+
REST service
+
+
@Path("/path/v1")
+public class YourComponentRestService {
+
+  @Inject
+  UcFindYourEntity ucFindYourEntity;
+
+  @Inject
+  UcManageYourEntity ucManageYourEntity;
+
+  @GET
+  @Path("/yourEntity/{id}/")
+  public YourEntityDto getYourEntity(@PathParam("id") long id);
+
+    return this.ucFindYourEntity.findYourEntity(id);
+  }
+
+  ...
+}
+
+
+
+
+
+

Summary

+
+
+

As you have seen, some parts hardly differ when migrating a Spring application to a Quarkus application, while other parts differ more. The above sections describe the parts needed for simple applications that provide REST services with a data access layer. +If you add more functionality, more customization and other frameworks/dependencies may be required. If that is the case, take a look at the corresponding guide on the topic in the devon4j documentation or check if there is a tutorial on the official Quarkus website.

+
+
+

Furthermore, we can summarize that migrating from a Spring application to a Quarkus representative is not complex. Although Quarkus is a very young framework (release 1.0 was in 2019), it brings a lot of proven standards and libraries that you can integrate into your application. +This makes it easy to migrate and reuse code from existing (Spring) applications. Also, Quarkus comes with Spring API compatibility for many Spring modules (Spring Data JPA, Spring DI, etc.), which makes it easier for developers to reuse their knowledge.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-monitoring.html b/docs/devon4j/1.0/guide-monitoring.html new file mode 100644 index 00000000..7bbb6473 --- /dev/null +++ b/docs/devon4j/1.0/guide-monitoring.html @@ -0,0 +1,506 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Monitoring

+
+
+

For monitoring a complex application landscape it is crucial to have an exact overview which applications are up and running and which are not and why. +In devonfw we only focus on topics which are most important when developing production-ready applications. +On a high level view we strongly suggest to separate the application to be monitored from the monitoring system itself. +Therefore, your application should concentrate on providing app specific data for the monitoring. +Aspects such as aggregation, visualization, search, alerting, etc. should be addressed outside of your app by a monitoring system product. +There are many products providing such a monitoring system like checkmk, icinga, SkyWalking, etc. +Please note that there is a huge list of such products and devonfw is not biased or aims to make a choice for you. +Instead please search and find the products that fit best for your requirements and infrastructure.

+
+
+
+
+

Types of monitoring

+
+
+

As monitoring coveres a lot of different aspects we separate the following types of monitoring and according data:

+
+
+
    +
  • +

    Log-monitoring
    +is about collecting and monitoring the logs of all apps and containers in your IT landscape. It is suitable for events such as an HTTP request with its URL, resulting status code and duration in milliseconds. Your monitoring may not react to such data in realtime. Instead it may take a delay of one or a few seconds.

    +
  • +
  • +

    Infrastructure monitoring
    +is about monitoring the (hardware) infrastructure with measures like usage of CPU, memory, disc-space, etc. This is a pure operational task and your app should have nothing to do with this. In other words it is a waste if your app tries to monitor these aspects as existing products can do this much better and your app will only see virtual machines and is unable to see the physical infrastructure.

    +
  • +
  • +

    Health check
    +is about providing internal data about the current health of your app. Typically you provide sensors with health status per component or interface to neighbour service (database connectivity, etc.).

    +
  • +
  • +

    Application Performance Monitoring
    +is about measuring performance and tracing down performance issues.

    +
  • +
+
+
+
+
+

Health-Check

+
+
+

The idea of a health check is to prodvide monitoring data about the current health status of your application. +This allows to integrate this specific data into the monitoring system used for your IT landscape. +In order to keep the monitoring simple and easy to integreate consider using the following best practices:

+
+
+
    +
  • +

    Use simple and established protocols such as REST instead of JMX via RMI.

    +
  • +
  • +

    Considuer using recent standards such as microprofile-health.

    +
  • +
  • +

    Consider to drop access-control for your monitoring interfaces and for security prevent external access to it in your infrastructure (loadbalancers or gateways). Monitoring is only for usage within an IT landscape internally. It does not make sense for externals and end-users to access your app for reading monitoring data from a random node decided by a loadbalancer. Furhter, external access can easily lead to sensitive data exposure.

    +
  • +
  • +

    Consider to define different end-points per usage-scenario. So if you want the loadbalancer to ask your app monitoring for availability of each node then create a separate service URL that only provides OK or anything else for failure (NOK, 404, 500, timeout). Do not mix this with a health-check that needs more detailed information.

    +
  • +
  • +

    Also do not forget about basic features such as prodiving the name and the release version of your application.

    +
  • +
  • +

    Be careful to automate decisions based on monitoring and health checks. It easily turns out to be stupid if you automatically restart your pod or container because of some monitoring indicator. In the worst case a failure of a central component will cause your health-check to report down for all apps and as a result all your containers will be restarted frequently. Indead of curing problems such decisions will cause much more harm and trouble.

    +
  • +
  • +

    Avoid causing reasonable load with your monitoring and health-check itself. In many cases it is better to use log-monitoring or to collect monitoring data from use-cases that happen in your app anyway. If you create dummy read and write requests in your monitoring implementation you will easily turn it into a DOS-attack.

    +
  • +
+
+
+

For spring you can simply integrate app monitoring and health check via spring-boot-actuator.

+
+
+

For quarkus you can simply integrate app monitoring via micrometer or smallrye-metrics and health check via smallrye-health.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-openapi.html b/docs/devon4j/1.0/guide-openapi.html new file mode 100644 index 00000000..b0904f0f --- /dev/null +++ b/docs/devon4j/1.0/guide-openapi.html @@ -0,0 +1,543 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

OpenAPI

+
+
+

The OpenAPI Specification (OAS) defines a standard for describing RESTful web services in a machine- and human-readable format. OpenAPI allows REST APIs to be defined in a uniform manner. +Technically, an OpenAPI document is written in YAML or JSON format. The specification defines the structure of a REST API by describing attributes such as path information, response codes, and return types. Some examples can be found here.

+
+
+

OpenAPI is often used in combination with Swagger. Swagger is a set of tools build around OpenAPI, that help developers to design and document their REST APIs. +The most common tool is the Swagger UI, which uses the OpenAPI specification to create a graphical interface of the REST API that you can also interact with. Check out the Swagger online editor to get a feeling for it.

+
+
+
+
+

OpenAPI generation

+
+
+

There are several extensions you can use in your project to automatically generate the OpenAPI specifications and Swagger UI from your REST API (code-first approach). devon4j recommends the following two extensions/plugins to use:

+
+
+
    +
  • +

    Smallrye OpenAPI extension

    +
  • +
  • +

    ServicedocGen maven plugin

    +
  • +
+
+
+

Smallrye OpenAPI

+
+

Quarkus provides OpenAPI support through Smallrye OpenAPI extension:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-smallrye-openapi</artifactId>
+</dependency>
+
+
+
+

After adding the extension to your project, you can access the Swagger UI by navigating to /q/swagger-ui.

+
+
+

The OpenAPI specification can be accessed by requesting /q/openapi.

+
+
+

Smallrye OpenAPI is compliant with MicroProfile OpenAPI. You can add MicroProfile annotations to further describe your REST endpoints and extend the OpenAPI documentation. +More information for this can be found here or here.

+
+
+ + + + + +
+ + +
+

Quarkus recommends using this extension and you can document your APIs in great detail by using the MicroProfile annotations. The downside to this is that using these annotations will blow up your code and you will have some duplicate information in it. +If you don’t want to specify the REST API again with all this annotation based information, we also recommend taking a look at the ServicedocGen Maven plugin for your Quarkus applications when implementing JAX-RS APIs.

+
+
+
+
+
+

ServicedocGen Maven Plugin

+
+

The ServicedocGen maven plugin can be used within both Spring and Quarkus applications. +It works a bit different then the Smallrye extensions mentioned above. The plugin analysis the REST API and it’s JavaDoc and then generate the OpenAPI specification and the Swagger UI as static files. So no Swagger or MicroProfile annotations have to be added.

+
+
+

The plugin can be configured in the pom.xml file of your application as follows:

+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>org.codehaus.mojo</groupId>
+      <artifactId>servicedocgen-maven-plugin</artifactId>
+      <version>1.0.0</version>
+      <executions>
+        <execution>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <descriptor>
+          <info>
+            <title>...</title>
+            <description>...</description>
+          </info>
+          <host>...</host>
+          <port>...</port>
+          <basePath>...</basePath>
+          <schemes>
+            <scheme>...</scheme>
+          </schemes>
+        </descriptor>
+      </configuration>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

In the configuration section you have to define additional information to generate the OpenAPI specification correctly. An example can be found in our Quarkus reference application. +When building the application, an OpenApi.yaml and a SwaggerUI.html file are created in the /target/site folder. To make the Swagger UI available in the browser, the file must be served by some servlet.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-queueing.html b/docs/devon4j/1.0/guide-queueing.html new file mode 100644 index 00000000..00833787 --- /dev/null +++ b/docs/devon4j/1.0/guide-queueing.html @@ -0,0 +1,462 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-repository.html b/docs/devon4j/1.0/guide-repository.html new file mode 100644 index 00000000..77423dcd --- /dev/null +++ b/docs/devon4j/1.0/guide-repository.html @@ -0,0 +1,652 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Spring Data

+
+
+

Spring Data JPA is supported by both Spring and Quarkus. However, in Quarkus this approach still has some limitations. For detailed information, see the official Quarkus Spring Data guide.

+
+
+
+
+

Motivation

+
+
+

The benefits of Spring Data are (for examples and explanations see next sections):

+
+
+
    +
  • +

    All you need is one single repository interface for each entity. No need for a separate implementation or other code artifacts like XML descriptors, NamedQueries class, etc.

    +
  • +
  • +

    You have all information together in one place (the repository interface) that actually belong together (where as in the classic approach you have the static queries in an XML file, constants to them in NamedQueries class and referencing usages in DAO implementation classes).

    +
  • +
  • +

    Static queries are most simple to realize as you do not need to write any method body. This means you can develop faster.

    +
  • +
  • +

    Support for paging is already build-in. Again for static query method the is nothing you have to do except using the paging objects in the signature.

    +
  • +
  • +

    Still you have the freedom to write custom implementations via default methods within the repository interface (e.g. for dynamic queries).

    +
  • +
+
+
+
+
+

Dependency

+
+
+

In case you want to switch to or add Spring Data support to your Spring or Quarkus application, all you need is to add the respective maven dependency:

+
+
+
spring
+
+
<dependency>
+  <groupId>org.springframework.boot</groupId>
+  <artifactId>spring-boot-starter-data-jpa</artifactId>
+</dependency>
+
+
+
+
quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
+

Repository

+
+
+

For each entity «Entity»Entity an interface is created with the name «Entity»Repository extending JpaRepository. +Such repository is the analogy to a Data-Access-Object (DAO) used in the classic approach or when Spring Data is not an option.

+
+
+
Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long> {
+
+}
+
+
+
+

The Spring Data repository provides some basic implementations for accessing data, e.g. returning all instances of a type (findAll) or returning an instance by its ID (findById).

+
+
+
+
+

Custom method implementation

+
+
+

In addition, repositories can be enriched with additional functionality, e.g. to add QueryDSL functionality or to override the default implementations, by using so called repository fragments:

+
+
+

Example

+
+

The following example shows how to write such a repository:

+
+
+
Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, ProductFragment {
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  List<ProductEntity> findByTitle(@Param("title") String title);
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  Page<ProductEntity> findByTitlePaginated(@Param("title") String title, Pageable pageable);
+}
+
+
+
+
Repository fragment
+
+
public interface ProductFragment {
+  Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria);
+}
+
+
+
+
Fragment implementation
+
+
public class ProductFragmentImpl implements ProductFragment {
+  @Inject
+  EntityManager entityManager;
+
+  public Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria) {
+    QProductEntity product = QProductEntity.productEntity;
+    JPAQuery<ProductEntity> query = new JPAQuery<ProductEntity>(this.entityManager);
+    query.from(product);
+
+    String title = criteria.getTitle();
+    if ((title != null) && !title.isEmpty()) {
+      query.where(product.title.eq(title));
+    }
+
+    List<ProductEntity> products = query.fetch();
+    return new PageImpl<>(products, PageRequest.of(criteria.getPageNumber(), criteria.getPageSize()), products.size());
+  }
+}
+
+
+
+

This ProductRepository has the following features:

+
+
+
    +
  • +

    CRUD support from Spring Data (see JavaDoc for details).

    +
  • +
  • +

    Support for QueryDSL integration, paging and more.

    +
  • +
  • +

    A static query method findByTitle to find all ProductEntity instances from DB that have the given title. Please note the @Param annotation that links the method parameter with the variable inside the query (:title).

    +
  • +
  • +

    The same with pagination support via findByTitlePaginated method.

    +
  • +
  • +

    A dynamic query method findByCriteria showing the QueryDSL and paging integration into Spring via a fragment implementation.

    +
  • +
+
+
+

You can find an implementation of this ProductRepository in our Quarkus reference application.

+
+
+ + + + + +
+ + +In Quarkus, native and named queries via the @Query annotation are currently not supported +
+
+
+
+

Integration of Spring Data in devon4j-spring

+
+

For Spring applications, devon4j offers a proprietary solution that integrates seamlessly with QueryDSL and uses default methods instead of the fragment approach. A separate guide for this can be found here.

+
+
+
+
+
+

Drawbacks

+
+
+

Spring Data also has some drawbacks:

+
+
+
    +
  • +

    Some kind of magic behind the scenes that are not so easy to understand. So in case you want to extend all your repositories without providing the implementation via a default method in a parent repository interface you need to deep-dive into Spring Data. We assume that you do not need that and hope what Spring Data and devon already provides out-of-the-box is already sufficient.

    +
  • +
  • +

    The Spring Data magic also includes guessing the query from the method name. This is not easy to understand and especially to debug. Our suggestion is not to use this feature at all and either provide a @Query annotation or an implementation via default method.

    +
  • +
+
+
+
+
+

Limitations in Quarkus

+
+
+
    +
  • +

    Native and named queries are not supported using @Query annotation. You will receive something like: Build step io.quarkus.spring.data.deployment.SpringDataJPAProcessor#build threw an exception: java.lang.IllegalArgumentException: Attribute nativeQuery of @Query is currently not supported

    +
  • +
  • +

    Customizing the base repository for all repository interfaces in the code base, which is done in Spring Data by registering a class the extends SimpleJpaRepository

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-rest-philosophy.html b/docs/devon4j/1.0/guide-rest-philosophy.html new file mode 100644 index 00000000..ce5d5fc7 --- /dev/null +++ b/docs/devon4j/1.0/guide-rest-philosophy.html @@ -0,0 +1,874 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

REST Philosophy

+
+
+

REST and RESTful often implies very strict and specific rules and conventions. +However different people will often have different opinions of such rules. +We learned that this leads to "religious discussions" (starting from PUT vs. POST and IDs in path vs. payload up to Hypermedia and HATEOAS). +These "religious discussions" waste a lot of time and money without adding real value in case of common business applications (if you publish your API on the internet to billions of users this is a different story). +Therefore we give best practices that lead to simple, easy and pragmatic "HTTP APIs" (to avoid the term "REST services" and end "religious discussions"). +Please also note that we do not want to assault anybody nor force anyone to follow our guidelines. +This guide is just an option for people who want to be pragmatic and face such "religious discussions". +Please read the following best practices carefully and be aware that they might slightly differ from what your first hit on the web will say about REST (see e.g. RESTful cookbook).

+
+
+

If you want to provide an entity with a different structure do not append further details to an element URL but create a separate collection URL as base. +So use https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity-with-details/42 instead of https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42/with-details. +For offering a CTO simply append -cto to the collection URL (e.g. …​/myentity-cto/).

+
+
+

While REST was designed as a pragmatical approach it sometimes leads to "religious discussions" e.g. about using PUT vs. POST (see ATTENTION notice above). +As the devonfw has a strong focus on usual business applications it proposes a more "pragmatic" approach to REST services.

+
+
+

On the next table we compare the main differences between the "canonical" REST approach (or RESTful) and the devonfw proposal.

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Usage of HTTP methods
HTTP MethodRESTful Meaningdevonfw

GET

Read single element.

+

Search on an entity (with parametrized url)

Read a single element.

PUT

Replace entity data.

+

Replace entire collection (typically not supported)

Not used

POST

Create a new element in the collection

Create or update an element in the collection.

+

Search on an entity (parametrized post body)

+

Bulk deletion.

DELETE

Delete an entity.

+

Delete an entire collection (typically not supported)

Delete an entity.

+

Delete an entire collection (typically not supported)

+
+

Please consider these guidelines and rationales:

+
+
+
    +
  • +

    We use POST on the collection URL to save an entity (create if no ID provided in payload otherwise update). This avoids pointless discussions in distinctions between PUT and POST and what to do if a create contains an ID in the payload or if an update is missing the ID property or contains a different ID in payload than in URL.

    +
  • +
  • +

    Hence, we do NOT use PUT but always use POST for write operations. As we always have a technical ID for each entity, we can simply distinguish create and update by the presence of the ID property.

    +
  • +
  • +

    Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

    +
  • +
+
+
+
+
+

Metadata

+
+
+

devonfw has support for the following metadata in REST service invocations:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
NameDescriptionFurther information

X-Correlation-Id

HTTP header for a correlation ID that is a unique identifier to associate different requests belonging to the same session / action

Logging guide

Validation errors

Standardized format for a service to communicate validation errors to the client

Server-side validation is documented in the Validation guide.

+

The protocol to communicate these validation errors is described in REST exception handling.

Pagination

Standardized format for a service to offer paginated access to a list of entities

Server-side support for pagination is documented in the Repository Guide.

+
+
+
+

Recommendations for REST requests and responses

+
+
+

The devonfw proposes, for simplicity, a deviation from the common REST pattern:

+
+
+
    +
  • +

    Using POST for updates (instead of PUT)

    +
  • +
  • +

    Using the payload for addressing resources on POST (instead of identifier on the URL)

    +
  • +
  • +

    Using parametrized POST for searches

    +
  • +
+
+
+

This use of REST will lead to simpler code both on client and on server. We discuss this use on the next points.

+
+
+

The following table specifies how to use the HTTP methods (verbs) for collection and element URIs properly (see wikipedia).

+
+
+

Unparameterized loading of a single resource

+
+
    +
  • +

    HTTP Method: GET

    +
  • +
  • +

    URL example: /services/rest/productmanagement/v1/product/123

    +
  • +
+
+
+

For loading of a single resource, embed the identifier (e.g. 123) of the resource in the URL.

+
+
+

The response contains the resource in JSON format, using a JSON object at the top-level, for example:

+
+
+
+
{
+  "id": 123,
+  "name": "Steak",
+  "color": "brown"
+}
+
+
+
+
+

Unparameterized loading of a collection of resources

+
+
    +
  • +

    HTTP Method: GET

    +
  • +
  • +

    URL example: /services/rest/productmanagement/v1/product

    +
  • +
+
+
+

For loading of a collection of resources, make sure that the size of the collection can never exceed a reasonable maximum size. For parameterized loading (searching, pagination), see below.

+
+
+

The response contains the collection in JSON format, using a JSON object at the top-level, and the actual collection underneath a result key, for example:

+
+
+
+
{
+  "result": [
+    {
+      "id": 123,
+      "name": "Steak",
+      "color": "brown"
+    },
+    {
+      "id": 124,
+      "name": "Broccoli",
+      "color": "green"
+    }
+  ]
+}
+
+
+
+
+

Saving a resource

+
+
    +
  • +

    HTTP Method: POST

    +
  • +
  • +

    URL example: /services/rest/productmanagement/v1/product

    +
  • +
+
+
+

The resource will be passed via JSON in the request body. If updating an existing resource, include the resource’s identifier in the JSON and not in the URL, in order to avoid ambiguity.

+
+
+

If saving was successful, the updated product (e.g. with assigned ID or updated modification counter) is returned.

+
+
+

If saving was unsuccessful, refer below for the format to return errors to the client.

+
+
+
+

Parameterized loading of a resource

+
+
    +
  • +

    HTTP Method: POST

    +
  • +
  • +

    URL example: /services/rest/productmanagement/v1/product/search

    +
  • +
+
+
+

In order to differentiate from an unparameterized load, a special subpath (for example search) is introduced. The parameters are passed via JSON in the request body. An example of a simple, paginated search would be:

+
+
+
+
{
+  "status": "OPEN",
+  "pagination": {
+    "page": 2,
+    "size": 25
+  }
+}
+
+
+
+

The response contains the requested page of the collection in JSON format, using a JSON object at the top-level, the actual page underneath a result key, and additional pagination information underneath a pagination key, for example:

+
+
+
+
{
+  "pagination": {
+    "page": 2,
+    "size": 25,
+    "total": null
+  },
+  "result": [
+    {
+      "id": 123,
+      "name": "Steak",
+      "color": "brown"
+    },
+    {
+      "id": 124,
+      "name": "Broccoli",
+      "color": "green"
+    }
+  ]
+}
+
+
+
+

Compare the code needed on server side to accept this request:

+
+
+
+
  @Path("/category/search")
+  @POST
+  public PaginatedListTo<CategoryEto> findCategorysByPost(CategorySearchCriteriaTo searchCriteriaTo) {
+    return this.dishmanagement.findCategoryEtos(searchCriteriaTo);
+ }
+
+
+
+

With the equivalent code required if doing it the RESTful way by issuing a GET request:

+
+
+
+
 @Path("/category/search")
+  @POST @Path("/order")
+  @GET
+  public PaginatedListTo<CategoryEto> findCategorysByPost( @Context UriInfo info) {
+
+    RequestParameters parameters = RequestParameters.fromQuery(info);
+    CategorySearchCriteriaTo criteria = new CategorySearchCriteriaTo();
+    criteria.setName(parameters.get("name", Long.class, false));
+    criteria.setDescription(parameters.get("description", OrderState.class, false));
+    criteria.setShowOrder(parameters.get("showOrder", OrderState.class, false));
+    return this.dishmanagement.findCategoryEtos(criteria);
+
+  }
+
+
+
+

Pagination details

+
+

The client can choose to request a count of the total size of the collection, for example to calculate the total number of available pages. It does so, by specifying the pagination.total property with a value of true.

+
+
+

The service is free to honour this request. If it chooses to do so, it returns the total count as the pagination.total property in the response.

+
+
+
+
+

Deletion of a resource

+
+
    +
  • +

    HTTP Method: DELETE

    +
  • +
  • +

    URL example: /services/rest/productmanagement/v1/product/123

    +
  • +
+
+
+

For deletion of a single resource, embed the identifier of the resource in the URL.

+
+
+
+

Error results

+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+

If the error is caused by a failed validation of the entity, the above format is extended to also include the list of individual validation errors:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs",
+  "errors": {
+    "property failing validation": [
+       "First error message on this property",
+       "Second error message on this property"
+    ],
+    // ....
+  }
+}
+
+
+
+
+
+
+

REST Media Types

+
+
+

The payload of a REST service can be in any format as REST by itself does not specify this. The most established ones that the devonfw recommends are XML and JSON. Follow these links for further details and guidance how to use them properly. JAX-RS and CXF properly support these formats (MediaType.APPLICATION_JSON and MediaType.APPLICATION_XML can be specified for @Produces or @Consumes). Try to decide for a single format for all services if possible and NEVER mix different formats in a service.

+
+
+
+
+

REST Testing

+
+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+
+

Security

+
+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+

CSRF

+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+

JSON top-level arrays

+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-rest.html b/docs/devon4j/1.0/guide-rest.html new file mode 100644 index 00000000..5f4ddf8c --- /dev/null +++ b/docs/devon4j/1.0/guide-rest.html @@ -0,0 +1,776 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

REST

+
+
+

REST (REpresentational State Transfer) is an inter-operable protocol for services that is more lightweight than SOAP. +However, it is no real standard and can cause confusion (see REST philosophy). +Therefore we define best practices here to guide you.

+
+
+
+
+

URLs

+
+
+

URLs are not case sensitive. Hence, we follow the best practice to use only lower-case-letters-with-hyphen-to-separate-words. +For operations in REST we distinguish the following types of URLs:

+
+
+
    +
  • +

    A collection URL is build from the rest service URL by appending the name of a collection. This is typically the name of an entity. Such URL identifies the entire collection of all elements of this type. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity

    +
  • +
  • +

    An element URL is build from a collection URL by appending an element ID. It identifies a single element (entity) within the collection. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42

    +
  • +
+
+
+

To follow KISS avoid using plural forms (…​/productmanagement/v1/products vs. …​/productmanagement/v1/product/42). Always use singular forms and avoid confusions (except for the rare cases where no singular exists).

+
+
+

The REST URL scheme fits perfect for CRUD operations. +For business operations (processing, calculation, advanced search, etc.) we simply append a collection URL with the name of the business operation. +Then we can POST the input for the business operation and get the result back. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/search

+
+
+
+
+

HTTP Methods

+
+
+

The following table defines the HTTP methods (verbs) and their meaning:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Usage of HTTP methods
HTTP MethodMeaning

GET

Read data (stateless).

PUT

Create or update data.

POST

Process data.

DELETE

Delete an entity.

+
+

Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

+
+
+

For general recommendations on HTTP methods for collection and element URLs see REST@wikipedia.

+
+
+
+
+

HTTP Status Codes

+
+
+

Further we define how to use the HTTP status codes for REST services properly. In general the 4xx codes correspond to an error on the client side and the 5xx codes to an error on the server side.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Usage of HTTP status codes
HTTP CodeMeaningResponseComment

200

OK

requested result

Result of successful GET

204

No Content

none

Result of successful POST, DELETE, or PUT with empty result (void return)

400

Bad Request

error details

The HTTP request is invalid (parse error, validation failed)

401

Unauthorized

none

Authentication failed

403

Forbidden

none

Authorization failed

404

Not found

none

Either the service URL is wrong or the requested resource does not exist

500

Server Error

error code, UUID

Internal server error occurred, in case of an exception, see REST exception handling

+
+
+
+

JAX-RS

+
+
+

For implementing REST services we use the JAX-RS standard. +As payload encoding we recommend JSON bindings using Jackson. +To implement a REST service you simply add JAX-RS annotations. +Here is a simple example:

+
+
+
+
@ApplicationScoped
+@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class ImagemanagementRestService {
+
+  @Inject
+  private Imagemanagement imagemanagement;
+
+  @GET
+  @Path("/image/{id}/")
+  public ImageDto getImage(@PathParam("id") long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+}
+
+
+
+

Here we can see a REST service for the business component imagemanagement. The method getImage can be accessed via HTTP GET (see @GET) under the URL path imagemanagement/image/{id} (see @Path annotations) where {id} is the ID of the requested table and will be extracted from the URL and provided as parameter id to the method getImage. It will return its result (ImageDto) as JSON (see @Produces annotation - you can also extend RestService marker interface that defines these annotations for JSON). As you can see it delegates to the logic component imagemanagement that contains the actual business logic while the service itself only exposes this logic via HTTP. The REST service implementation is a regular CDI bean that can use dependency injection.

+
+
+ + + + + +
+ + +With JAX-RS it is important to make sure that each service method is annotated with the proper HTTP method (@GET,@POST,etc.) to avoid unnecessary debugging. So you should take care not to forget to specify one of these annotations. +
+
+
+

Service-Interface

+
+

You may also separate API and implementation in case you want to reuse the API for service-client:

+
+
+
+
@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface ImagemanagementRestService {
+
+  @GET
+  @Path("/image/{id}/")
+  ImageEto getImage(@PathParam("id") long id);
+
+}
+
+@Named("ImagemanagementRestService")
+public class ImagemanagementRestServiceImpl implements ImagemanagementRestService {
+
+  @Override
+  public ImageEto getImage(long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+
+}
+
+
+
+
+

JAX-RS Configuration

+
+

Starting from CXF 3.0.0 it is possible to enable the auto-discovery of JAX-RS roots.

+
+
+

When the jaxrs server is instantiated all the scanned root and provider beans (beans annotated with javax.ws.rs.Path and javax.ws.rs.ext.Provider) are configured.

+
+
+
+

REST Exception Handling

+
+

For exceptions a service needs to have an exception façade that catches all exceptions and handles them by writing proper log messages and mapping them to a HTTP response with an according HTTP status code. Therefore the devonfw provides a generic solution via RestServiceExceptionFacade. You need to follow the exception guide so that it works out of the box because the façade needs to be able to distinguish between business and technical exceptions. +Now your service may throw exceptions but the façade with automatically handle them for you.

+
+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+
+

Pagination details

+
+

We recommend to use spring-data repositories for database access that already comes with pagination support. +Therefore, when performing a search, you can include a Pageable object. +Here is a JSON example for it:

+
+
+
+
{ "pageSize": 20, "pageNumber": 0, "sort": [] }
+
+
+
+

By increasing the pageNumber the client can browse and page through the hits.

+
+
+

As a result you will receive a Page. +It is a container for your search results just like a Collection but additionally contains pagination information for the client. +Here is a JSON example:

+
+
+
+
{ "totalElements": 1022,
+  pageable: { "pageSize": 20, "pageNumber": 0 },
+  content: [ ... ] }
+
+
+
+

The totalElements property contains the total number of hits. +This can be used by the client to compute the total number of pages and render the pagination links accordingly. +Via the pageable property the client gets back the Pageable properties from the search request. +The actual hits for the current page are returned as array in the content property.

+
+
+
+
+
+

REST Testing

+
+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+
+

Security

+
+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+

CSRF

+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+

JSON top-level arrays

+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-scm.html b/docs/devon4j/1.0/guide-scm.html new file mode 100644 index 00000000..193b1c63 --- /dev/null +++ b/docs/devon4j/1.0/guide-scm.html @@ -0,0 +1,474 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-security.html b/docs/devon4j/1.0/guide-security.html new file mode 100644 index 00000000..f91e3889 --- /dev/null +++ b/docs/devon4j/1.0/guide-security.html @@ -0,0 +1,612 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Security

+
+
+

Security is todays most important cross-cutting concern of an application and an enterprise IT-landscape. We seriously care about security and give you detailed guides to prevent pitfalls, vulnerabilities, and other disasters. While many mistakes can be avoided by following our guidelines you still have to consider security and think about it in your design and implementation. The security guide will not only automatically prevent you from any harm, but will provide you hints and best practices already used in different software products.

+
+
+

An important aspect of security is proper authentication and authorization as described in access-control. In the following we discuss about potential vulnerabilities and protection to prevent them.

+
+
+
+
+

Vulnerabilities and Protection

+
+
+

Independent from classical authentication and authorization mechanisms there are many common pitfalls that can lead to vulnerabilities and security issues in your application such as XSS, CSRF, SQL-injection, log-forging, etc. A good source of information about this is the OWASP. +We address these common threats individually in security sections of our technological guides as a concrete solution to prevent an attack typically depends on the according technology. The following table illustrates common threats and contains links to the solutions and protection-mechanisms provided by the devonfw:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Security threats and protection-mechanisms
ThreatProtectionLink to details

A1 Injection

validate input, escape output, use proper frameworks

SQL Injection

A2 Broken Authentication

encrypt all channels, use a central identity management with strong password-policy

Authentication

A3 Sensitive Data Exposure

Use secured exception facade, design your data model accordingly

REST exception handling

A4 XML External Entities

Prefer JSON over XML, ensure FSP when parsing (external) XML

XML guide

A5 Broken Access Control

Ensure proper authorization for all use-cases, use @DenyAll as default to enforce

Access-control guide especially method authorization

A6 Security Misconfiguration

Use devon4j application template and guides to avoid

tutorial-newapp and sensitive configuration

A7 Cross-Site Scripting

prevent injection (see A1) for HTML, JavaScript and CSS and understand same-origin-policy

client-layer

A8 Insecure Deserialization

Use simple and established serialization formats such as JSON, prevent generic deserialization (for polymorphic types)

JSON guide especially inheritence, XML guide

A9 Using Components with Known Vulnerabilities

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

CVE newsletter and dependency check

A10 Insufficient_Logging & Monitoring

Ensure to log all security related events (login, logout, errors), establish effective monitoring

Logging guide and monitoring guide

Insecure Direct Object References

Using direct object references (IDs) only with appropriate authorization

logic-layer

Cross-Site Request Forgery (CSRF)

secure mutable service operations with an explicit CSRF security token sent in HTTP header and verified on the server

CSRF guide

Log-Forging

Escape newlines in log messages

logging security

Unvalidated Redirects and Forwards

Avoid using redirects and forwards, in case you need them do a security audit on the solution.

devonfw proposes to use rich-clients (SPA/RIA). We only use redirects for login in a safe way.

+
+
+
+

Advanced Security

+
+
+

While OWASP Top 10 covers the basic aspects of application security, there are advanced standards such as AVS. +In devonfw we address this in the +Application Security Quick Solution Guide.

+
+
+
+
+

Tools

+
+
+

Dependency Check

+
+

To address the thread Using Components with Known Vulnerabilities we recomment to use OWASP dependency check that ships with a maven plugin and can analyze your dependencies for known CVEs. +In order to run this check, you can simply call this command on any maven project:

+
+
+
+
mvn org.owasp:dependency-check-maven:6.1.5:aggregate
+
+
+
+ + + + + +
+ + +The version is just for completeness. You should check yourself for using a recent version of the plugin. +
+
+
+

If you build an devon4j spring application from our app-template you can activate the dependency check even easier with the security profile:

+
+
+
+
mvn clean install -P security
+
+
+
+

This does not run by default as it causes some overhead for the build performance. However, consider to build this in your CI at least nightly. +After the dependency check is performed, you will find the results in target/dependency-check-report.html of each module. The report will also be generated when the site is build (mvn site) even without the profile.

+
+
+
+

Penetration Testing

+
+

For penetration testing (testing for vulnerabilities) of your web application, we recommend the following tools:

+
+
+ +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-service-client.html b/docs/devon4j/1.0/guide-service-client.html new file mode 100644 index 00000000..3c28cb2e --- /dev/null +++ b/docs/devon4j/1.0/guide-service-client.html @@ -0,0 +1,459 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Service Client

+
+
+

This guide is about consuming (calling) services from other applications (micro-services). For providing services, see the Service-Layer Guide. Services can be consumed by the client or the server. As the client is typically not written in Java, you should consult the according guide for your client technology. In case you want to call a service within your Java code, this guide is the right place to get help.

+
+
+
+
+

Motivation

+
+
+

Various solutions already exist for calling services, such as RestTemplate from spring or the JAX-RS client API. Furthermore, each and every service framework offers its own API as well. These solutions might be suitable for very small and simple projects (with one or two such invocations). However, with the trend of microservices, the invocation of a service becomes a very common use-case that occurs all over the place. You typically need a solution that is very easy to use but supports flexible configuration, adding headers for authentication, mapping of errors from the server, logging success/errors with duration for performance analysis, support for synchronous and asynchronous invocations, etc. This is exactly what this devon4j service-client solution brings to you.

+
+
+
+
+

Usage

+
+
+

Spring

+
+
+

For Spring, follow the Spring rest-client guide.

+
+
+

Quarkus

+
+
+

For Quarkus, we recommend to follow the the official Quarkus rest-client guide

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-service-layer.html b/docs/devon4j/1.0/guide-service-layer.html new file mode 100644 index 00000000..e52f474e --- /dev/null +++ b/docs/devon4j/1.0/guide-service-layer.html @@ -0,0 +1,563 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Service Layer

+
+
+

The service layer is responsible for exposing functionality made available by the logical layer to external consumers over a network via technical protocols.

+
+
+
+
+

Types of Services

+
+
+

Before you start creating your services you should consider some general design aspects:

+
+
+
    +
  • +

    Do you want to create a RPC service?

    +
  • +
  • +

    Or is your problem better addressed by messaging or eventing?

    +
  • +
  • +

    Who will consume your service?

    +
    +
      +
    • +

      Do you have one or multiple consumers?

      +
    • +
    • +

      Do web-browsers have to use your service?

      +
    • +
    • +

      Will apps from other vendors or parties have to consume your service that you can not influence if the service may have to change or be extended?

      +
    • +
    +
    +
  • +
+
+
+

For RPC a common choice is REST but there are also interesting alternatives like gRPC. We also have a guide for SOAP but this technology should rather be considered as legacy and is not recommended for new services.

+
+
+

When it comes to messaging in Java the typical answer will be JMS. However, a very promising alternative is Kafka.

+
+
+
+
+

Versioning

+
+
+

For RPC services consumed by other applications we use versioning to prevent incompatibilities between applications when deploying updates. This is done by the following conventions:

+
+
+
    +
  • +

    We define a version number and prefix it with v (e.g. v1).

    +
  • +
  • +

    If we support previous versions we use that version numbers as part of the Java package defining the service API (e.g. com.foo.application.component.service.api.v1)

    +
  • +
  • +

    We use the version number as part of the service name in the remote URL (e.g. https://application.foo.com/services/rest/component/v1/resource)

    +
  • +
  • +

    Whenever breaking changes are made to the API, create a separate version of the service and increment the version (e.g. v1v2) . The implementations of the different versions of the service contain compatibility code and delegate to the same unversioned use-case of the logic layer whenever possible.

    +
  • +
  • +

    For maintenance and simplicity, avoid keeping more than one previous version.

    +
  • +
+
+
+
+
+

Interoperability

+
+
+

For services that are consumed by clients with different technology, interoperability is required. This is addressed by selecting the right protocol, following protocol-specific best practices and following our considerations especially simplicity.

+
+
+
+
+

Service Considerations

+
+
+

The term service is quite generic and therefore easily misunderstood. It is a unit exposing coherent functionality via a well-defined interface over a network. For the design of a service, we consider the following aspects:

+
+
+
    +
  • +

    self-contained
    +The entire API of the service shall be self-contained and have no dependencies on other parts of the application (other services, implementations, etc.).

    +
  • +
  • +

    idempotence
    +E.g. creation of the same master-data entity has no effect (no error)

    +
  • +
  • +

    loosely coupled
    +Service consumers have minimum knowledge and dependencies on the service provider.

    +
  • +
  • +

    normalized
    +Complete, no redundancy, minimal

    +
  • +
  • +

    coarse-grained
    +Service provides rather large operations (save entire entity or set of entities rather than individual attributes)

    +
  • +
  • +

    atomic
    +Process individual entities (for processing large sets of data, use a batch instead of a service)

    +
  • +
  • +

    simplicity
    +Avoid polymorphism, RPC methods with unique name per signature and no overloading, avoid attachments (consider separate download service), etc.

    +
  • +
+
+
+
+
+

Security

+
+
+

Your services are the major entry point to your application. Hence, security considerations are important here.

+
+
+

See REST Security.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-service-versioning.html b/docs/devon4j/1.0/guide-service-versioning.html new file mode 100644 index 00000000..97efc252 --- /dev/null +++ b/docs/devon4j/1.0/guide-service-versioning.html @@ -0,0 +1,716 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Service-Versioning

+
+
+

This guide describes the aspect and details about versioning of services

+
+
+
+
+

Motivation

+
+
+

Why versioning of services? First of all, you should only care about this topic if you really have to. Service versioning is complex and requires effort (time and budget). The best way to avoid this is to be smart in the first place when designing the service API. +Further, if you are creating services where the only consumer is e.g. the web-client that you deploy together with the consumed services then you can change your service without the overhead to create new service versions and keeping old service versions for compatibility.

+
+
+

However, if the following indicators are given you typically need to do service versioning:

+
+
+
    +
  • +

    Your service is part of a complex and distributed IT landscape

    +
  • +
  • +

    Your service requires incompatible changes

    +
  • +
  • +

    There are many consumers or there is at least one (relevant) consumer that can not be updated at the same time or is entirely out of control (unknown or totally different party/company)

    +
  • +
+
+
+

What are incompatible changes?

+
+
+
    +
  • +

    Almost any change when SOAP is used (as it changes the WSDL and breaks the contract). Therefore, we recommend to use REST instead. Then, only the following changes are critical.

    +
  • +
  • +

    A change where existing properties (attributes) have to change their name

    +
  • +
  • +

    A change where existing features (properties, operations, etc.) have to change their semantics (meaning)

    +
  • +
+
+
+

What changes do not cause incompatibilities?

+
+
+
    +
  • +

    Adding new service operations is entirely uncritical with REST.

    +
  • +
  • +

    Adding new properties is only a problem in the following cases:

    +
    +
      +
    • +

      Adding new mandatory properties to the input of a service is causing incompatibilities. This problem can be avoided by contract-design.

      +
    • +
    • +

      If a consumer is using a service to read data, modify it and then save it back via a service and a property is added to the data, then this property might be lost. This is not a problem with dynamic languages such as JavaScript/TypeScript but with strictly typed languages such as Java. In Java you will typically use structured typed transfer-objects (and not Map<String, Object>) so new properties that have been added but are not known to the consumer can not be mapped to the transfer-object and will be lost. When saving that transfer-object later the property will be gone. It might be impossible to determine the difference between a lost property and a property that was removed on purpose. This is a general problem that you need to be aware of and that you have to consider by your design in such situations.

      +
    • +
    +
    +
  • +
+
+
+

Even if you hit an indicator for incompatible changes you can still think about adding a new service operation instead of changing an existing one (and deprecating the old one). Be creative to simplify and avoid extra effort.

+
+
+
+
+

Procedure

+
+
+

The procedure when rolling out incompatible changes is illustrated by the following example:

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +---+----+
+        |
++-------+--------+
+|      Sv1       |
+|                |
+|      App3      |
++----------------+
+
+
+
+

So, here we see a simple example where App3 provides a Service S in Version v1 that is consumed both by App1 and App2.

+
+
+

Now for some reason the service S has to be changed in an incompatible way to make it future-proof for demands. However, upgrading all 3 applications at the same time is not possible in this case for whatever reason. Therefore, service versioning is applied for the changes of S.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+    |
++---+------------+
+|  Sv1  |  Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Now, App3 has been upgraded and the new release was deployed. A new version v2 of S has been added while v1 is still kept for compatibility reasons and that version is still used by App1 and App2.

+
+
+
+
+------+  +------+
+| App1 |  | App2*|
++---+--+  +--+---+
+    |        |
+    |        |
+    |        |
++---+--------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, App2 has been updated and deployed and it is using the new version v2 of S.

+
+
+
+
+------+  +------+
+| App1*|  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, also App1 has been updated and deployed and it is using the new version v2 of S. The version v1 of S is not used anymore. This can be verified via logging and monitoring.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|          Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Finally, version v1 of the service S was removed from App3 and the new release has been deployed.

+
+
+
+
+

Versioning Schema

+
+
+

In general anything can be used to differentiate versions of a service. Possibilities are:

+
+
+
    +
  • +

    Code names (e.g. Strawberry, Blueberry, Grapefruit)

    +
  • +
  • +

    Timestamps (YYYYMMDD-HHmmSS)

    +
  • +
  • +

    Sequential version numbers (e.g. v1, v2, v3)

    +
  • +
  • +

    Composed version numbers (e.g. 1.0.48-pre-alpha-3-20171231-235959-Strawberry)

    +
  • +
+
+
+

As we are following the KISS principle (see key principles) we propose to use sequential version numbers. These are short, clear, and easy while still allowing to see what version is after another one. Especially composed version numbers (even 1.1 vs. 2.0) lead to decisions and discussions that easily waste more time than adding value. It is still very easy to maintain an Excel sheet or release-notes document that is explaining the changes for each version (v1, v2, v3) of a particular service.

+
+
+

We suggest to always add the version schema to the service URL to be prepared for service versioning even if service versioning is not (yet) actively used. For simplicity it is explicitly stated that you may even do incompatible changes to the current version (typically v1) of your service if you can update the according consumers within the same deployment.

+
+
+
+
+

Practice

+
+
+

So assuming you know that you have to do service versioning, the question is how to do it practically in the code. +The approach for your devon4j project in case of code-first should be as described below:

+
+
+
    +
  • +

    Determine which types in the code need to be changed. It is likely to be the API and implementation of the according service but it may also impact transfer objects and potentially even datatypes.

    +
  • +
  • +

    Create new packages for all these concerned types containing the current version number (e.g. v1).

    +
  • +
  • +

    Copy all these types to that new packages.

    +
  • +
  • +

    Rename these copies so they carry the version number as suffix (e.g. V1).

    +
  • +
  • +

    Increase the version of the service in the unversioned package (e.g. from v1 to v2).

    +
  • +
  • +

    Now you have two versions of the same service (e.g. v1 and v2) but so far they behave exactly the same.

    +
  • +
  • +

    You start with your actual changes and modify the original files that have been copied before.

    +
  • +
  • +

    You will also ensure the links (import statements) of the copied types point to the copies with the version number

    +
  • +
  • +

    This will cause incompatibilities (and compile errors) in the copied service. Therefore, you need to fix that service implementation to map from the old API to the new API and behavior. In some cases, this may be easy (e.g. mapping x.y.z.v1.FooTo to x.y.z.FooTo using bean-mapping with some custom mapping for the incompatible changes), in other cases this can get very complex. Be aware of this complexity from the start before you make your decision about service versioning.

    +
  • +
  • +

    As far as possible this mapping should be done in the service-layer, not to pollute your business code in the core-layer with versioning-aspects. If there is no way to handle it in the service layer, e.g. you need some data from the persistence-layer, implement the "mapping" in the core-layer then, but don’t forget to remove this code, when removing the old service version.

    +
  • +
  • +

    Finally, ensure that both the old service behaves as before as well as the new service works as planned.

    +
  • +
+
+
+

Modularization

+
+

For modularization, we also follow the KISS principle (see key principles): +we suggest to have one api module per application that will contain the most recent version of your service and get released with every release-version of the application. The compatibility code with the versioned packages will be added to the core module and therefore is not exposed via the api module (because it has already been exposed in the previous release of the app). This way, you can always determine for sure which version of a service is used by another application just by its maven dependencies.

+
+
+

The KISS approach with only a single module that may contain multiple services (e.g. one for each business component) will cause problems when you want to have mixed usages of service versions: You can not use an old version of one service and a new version of another service from the same APP as then you would need to have its API module twice as a dependency on different versions, which is not possible. However, to avoid complicated overhead we always suggest to follow this easy approach. Only if you come to the point that you really need this complexity you can still solve it (even afterwards by publishing another maven artefact). As we are all on our way to build more but smaller applications (SOA, microservices, etc.) we should always start simple and only add complexity when really needed.

+
+
+

The following example gives an idea of the structure:

+
+
+
+
/«my-app»
+├──/api
+|  └──/src/main/java/
+|     └──/«rootpackage»/«application»/«component»
+|        ├──/common/api/to
+|        |  └──FooTo
+|        └──/service/api/rest
+|           └──FooRestService
+└──/core
+   └──/src/main/java/
+      └──«rootpackage»/«application»/«component»
+         ├──/common/api/to/v1
+         |  └──FooToV1
+         └──/service
+            ├──/api/rest/v1
+            |  └──FooRestServiceV1
+            └──impl/rest
+               ├──/v1
+               |  └── FooRestServiceImplV1
+               └──FooRestServiceImpl
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-soap.html b/docs/devon4j/1.0/guide-soap.html new file mode 100644 index 00000000..99d2221b --- /dev/null +++ b/docs/devon4j/1.0/guide-soap.html @@ -0,0 +1,520 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

SOAP

+
+
+

SOAP is a common protocol for services that is rather complex and heavy. It allows to build inter-operable and well specified services (see WSDL). SOAP is transport neutral what is not only an advantage. We strongly recommend to use HTTPS transport and ignore additional complex standards like WS-Security and use established HTTP-Standards such as RFC2617 (and RFC5280).

+
+
+
+
+

JAX-WS

+
+
+

For building web-services with Java we use the JAX-WS standard. +There are two approaches:

+
+
+
    +
  • +

    code first

    +
  • +
  • +

    contract first

    +
  • +
+
+
+

Here is an example in case you define a code-first service.

+
+
+

Web-Service Interface

+
+

We define a regular interface to define the API of the service and annotate it with JAX-WS annotations:

+
+
+
+
@WebService
+public interface TablemanagmentWebService {
+
+  @WebMethod
+  @WebResult(name = "message")
+  TableEto getTable(@WebParam(name = "id") String id);
+
+}
+
+
+
+
+

Web-Service Implementation

+
+

And here is a simple implementation of the service:

+
+
+
+
@Named
+@WebService(endpointInterface = "com.devonfw.application.mtsj.tablemanagement.service.api.ws.TablemanagmentWebService")
+public class TablemanagementWebServiceImpl implements TablemanagmentWebService {
+
+  private Tablemanagement tableManagement;
+
+  @Override
+  public TableEto getTable(String id) {
+
+    return this.tableManagement.findTable(id);
+  }
+
+
+
+
+
+
+

SOAP Custom Mapping

+
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to write adapters for JAXB (see XML).

+
+
+
+
+

SOAP Testing

+
+
+

For testing SOAP services in general consult the testing guide.

+
+
+

For testing SOAP services manually we strongly recommend SoapUI.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-sql.html b/docs/devon4j/1.0/guide-sql.html new file mode 100644 index 00000000..6aaeae92 --- /dev/null +++ b/docs/devon4j/1.0/guide-sql.html @@ -0,0 +1,571 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

SQL

+
+
+

For general guides on dealing or avoiding SQL, preventing SQL-injection, etc. you should study domain layer.

+
+
+
+
+

Naming Conventions

+
+
+

Here we define naming conventions that you should follow whenever you write SQL files:

+
+
+
    +
  • +

    All SQL-Keywords in UPPER CASE

    +
  • +
  • +

    Indentation should be 2 spaces as suggested by devonfw for every format.

    +
  • +
+
+
+

DDL

+
+

The naming conventions for database constructs (tables, columns, triggers, constraints, etc.) should be aligned with your database product and their operators. +However, when you have the freedom of choice and a modern case-sensitive database, you can simply use your code conventions also for database constructs to avoid explicitly mapping each and every property (e.g. RestaurantTable vs. RESTAURANT_TABLE).

+
+
+
    +
  • +

    Define columns and constraints inline in the statement to create the table

    +
  • +
  • +

    Indent column types so they all start in the same text column

    +
  • +
  • +

    Constraints should be named explicitly (to get a reasonable hint error messages) with:

    +
    +
      +
    • +

      PK_«table» for primary key (name optional here as PK constraint are fundamental)

      +
    • +
    • +

      FK_«table»_«property» for foreign keys («table» and «property» are both on the source where the foreign key is defined)

      +
    • +
    • +

      UC_«table»_«property»[_«propertyN»]* for unique constraints

      +
    • +
    • +

      CK_«table»_«check» for check constraints («check» describes the check, if it is defined on a single property it should start with the property).

      +
    • +
    +
    +
  • +
  • +

    Old RDBMS had hard limitations for names (e.g. 30 characters). Please note that recent databases have overcome this very low length limitations. However, keep your names short but precise and try to define common abbreviations in your project for according (business) terms. Especially do not just truncate the names at the limit.

    +
  • +
  • +

    If possible add comments on table and columns to help DBAs understanding your schema. This is also honored by many tools (not only DBA-tools).

    +
  • +
+
+
+

Here is a brief example of a DDL:

+
+
+
+
CREATE SEQUENCE HIBERNATE_SEQUENCE START WITH 1000000;
+
+-- *** Table ***
+CREATE TABLE RESTAURANT_TABLE (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  SEATS                INTEGER NOT NULL,
+  CONSTRAINT PK_TABLE PRIMARY KEY(ID)
+);
+COMMENT ON TABLE RESTAURANT_TABLE IS 'The physical tables inside the restaurant.';
+-- *** Order ***
+CREATE TABLE RESTAURANT_ORDER (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  TABLE_ID             NUMBER(19) NOT NULL,
+  TOTAL                DECIMAL(5, 2) NOT NULL,
+  CREATION_DATE        TIMESTAMP NOT NULL,
+  PAYMENT_DATE         TIMESTAMP,
+  STATUS               VARCHAR2(10 CHAR) NOT NULL,
+  CONSTRAINT PK_ORDER PRIMARY KEY(ID),
+  CONSTRAINT FK_ORDER_TABLE_ID FOREIGN KEY(TABLE_ID) REFERENCES RESTAURANT_TABLE(ID)
+);
+COMMENT ON TABLE RESTAURANT_ORDER IS 'An order and bill at the restaurant.';
+...
+
+
+
+

ATTENTION: Please note that TABLE and ORDER are reserved keywords in SQL and you should avoid using such keywords to prevent problems.

+
+
+
+

Data

+
+

For insert, update, delete, etc. of data SQL scripts should additionally follow these guidelines:

+
+
+
    +
  • +

    Inserts always with the same order of columns in blocks for each table.

    +
  • +
  • +

    Insert column values always starting with ID, MODIFICATION_COUNTER, [DTYPE, ] …​

    +
  • +
  • +

    List columns with fixed length values (boolean, number, enums, etc.) before columns with free text to support alignment of multiple insert statements

    +
  • +
  • +

    Pro Tip: Get familiar with column mode of advanced editors such as notepad++ when editing large blocks of similar insert statements.

    +
  • +
+
+
+
+
INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (0, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (1, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (2, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (3, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (4, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (5, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (6, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (7, 1, 8);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (8, 1, 8);
+...
+
+
+
+

See also Database Migrations.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-structure-classic.html b/docs/devon4j/1.0/guide-structure-classic.html new file mode 100644 index 00000000..51b08215 --- /dev/null +++ b/docs/devon4j/1.0/guide-structure-classic.html @@ -0,0 +1,633 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Classic project structure

+
+
+

In this section we describe the classic project structure as initially proposed for Java in devonfw. +It is still valid and fully supported. +However, if you want to start a new project, please consider using the modern structure.

+
+
+
+
+

Modules

+
+
+

The structure of a devon4j application is divided into the following modules:

+
+
+
    +
  • +

    api: module containing the API of your application. The API contains the required artifacts to interact with your application via remote services. This can be REST service interfaces, transfer-objects with their interfaces and datatypes but also OpenAPI or gRPC contracts.

    +
  • +
  • +

    core: maven module containing the core of the application with service implementation, as well as entire logic layer and dataaccess layer.

    +
  • +
  • +

    batch: optional module for batch layer

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) typically as a bootified WAR file.

    +
  • +
+
+
+
+
+

Deployment

+
+
+
+
+

Make jar not war

+
+
+
+— Josh Long +
+
+
+

First of all it is important to understand that the above defined modules aim to make api, core, and batch reusable artifacts, that can be used as a regular maven dependency. +On the other hand to build and deploy your application you want a final artifact that is containing all required 3rd party libraries. +This artifact is not reusable as a maven dependency. +That is exactly the purpose of the server module to build and package this final deployment artifact. +By default we first build a regular WAR file with maven in your server/target directory (*-server-«version».war) and in a second step create a bootified WAR out of this (*-server-bootified.war). +The bootified WAR file can then be started standalone (java -jar «filename».war). +However, it is also possible to deploy the same WAR file to a servlet container like tomcat or jetty. +As application servers and externally provided servlet containers are not recommendet anymore for various reasons (see JEE), you may also want to create a bootified JAR file instead. +All you need to do in that case is to change the packaging in your server/pom.xml from war to jar.

+
+
+
+
+

Package Structure

+
+
+

The package structure of your code inside src/main/java (and src/test/java) of your modules is described in our coding conventions in the sections packages. A full mapping of the architecture and the different code elements to the packaging is described in the following section.

+
+
+
+
+

Layers

+
+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +The following table describes our classic approach for packaging and layering:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Traditional generic devon4j layers
Layer«layer»

service

service

logic

logic

data-access

dataaccess

batch (optional)

batch

client (optional)

client

common

common

+
+
+
+

Architecture Mapping

+
+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.common
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.datatype
+|  |  |  |  └──.«Datatype» (api)
+|  |  |  └──.«BusinessObject» (api)
+|  |  └──.impl[.«detail»]
+|  |     ├──.«Aspect»ConfigProperties (core)
+|  |     ├──.«Datatype»JsonSerializer (core)
+|  |     └──.«Datatype»JsonDeserializer (core)
+|  ├──.dataaccess
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.repo
+|  |  |  |  └──.«BusinessObject»Repository (core)
+|  |  |  ├──.dao (core) [alternative to repo]
+|  |  |  |  └──.«BusinessObject»Dao (core) [alternative to Repository]
+|  |  |  └──.«BusinessObject»Entity (core)
+|  |  └──.impl[.«detail»]
+|  |     ├──.dao (core) [alternative to repo]
+|  |     |  └──.«BusinessObject»DaoImpl (core) [alternative to Repository]
+|  |     └──.«Datatype»AttributeConverter (core)
+|  ├──.logic
+|  |  ├──.api
+|  |  |  ├──.[«detail».]to
+|  |  |  |   ├──.«MyCustom»«To (api)
+|  |  |  |   ├──.«DataStructure»Embeddable (api)
+|  |  |  |   ├──.«BusinessObject»Eto (api)
+|  |  |  |   └──.«BusinessObject»«Subset»Cto (api)
+|  |  |  ├──.[«detail».]usecase
+|  |  |  |   ├──.UcFind«BusinessObject» (core)
+|  |  |  |   ├──.UcManage«BusinessObject» (core)
+|  |  |  |   └──.Uc«Operation»«BusinessObject» (core)
+|  |  |  └──.«Component» (core)
+|  |  ├──.base
+|  |  |  └──.[«detail».]usecase
+|  |  |     └──.Abstract«BusinessObject»Uc (core)
+|  |  └──.impl
+|  |     ├──.[«detail».]usecase
+|  |     |   ├──.UcFind«BusinessObject»Impl (core)
+|  |     |   ├──.UcManage«BusinessObject»Impl (core)
+|  |     |   └──.Uc«Operation»«BusinessObject»Impl (core)
+|  |     └──.«Component»Impl (core)
+|  └──.service
+|     ├──.api[.«detail»]
+|     |  ├──.rest
+|     |  |  └──.«Component»RestService (api)
+|     |  └──.ws
+|     |     └──.«Component»WebService (api)
+|     └──.impl[.«detail»]
+|        ├──.jms
+|        |  └──.«BusinessObject»JmsListener (core)
+|        ├──.rest
+|        |  └──.«Component»RestServiceImpl (core)
+|        └──.ws
+|           └──.«Component»WebServiceImpl (core)
+├──.general
+│  ├──.common
+│  |  ├──.api
+|  |  |  ├──.to
+|  |  |  |  ├──.AbstractSearchCriteriaTo (api)
+|  |  |  └──.ApplicationEntity
+│  |  ├──.base
+|  |  |  └──.AbstractBeanMapperSupport (core)
+│  |  └──.impl
+│  |     ├──.config
+│  |     |  └──.ApplicationObjectMapperFactory (core)
+│  |     └──.security
+│  |        └──.ApplicationWebSecurityConfig (core)
+│  ├──.dataaccess
+│  |  └──.api
+|  |     └──.ApplicationPersistenceEntity (core)
+│  ├──.logic
+│  |  └──.base
+|  |     ├──.AbstractComponentFacade (core)
+|  |     ├──.AbstractLogic (core)
+|  |     └──.AbstractUc (core)
+|  └──.service
+|     └──...
+└──.SpringBootApp (core)
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-structure-modern.html b/docs/devon4j/1.0/guide-structure-modern.html new file mode 100644 index 00000000..1d97e334 --- /dev/null +++ b/docs/devon4j/1.0/guide-structure-modern.html @@ -0,0 +1,554 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Modern project structure

+
+
+

With trends such as cloud, microservices, lean, and agile we decided for a more modern project structure that fits better to recent market trends. +When starting new projects with devonfw and escpecially in the context of cloud-native development we strongly recommend this modern approach over the classic structure.

+
+
+
+
+

Modules

+
+
+

Due to trends such as microservices we are building smaller apps compared to moduliths. +For simplicity we therefore do not split our app into different modules and keep everything top-level and easy.

+
+
+

In addition to java and resources we also add helm for helm templates and docker for docker scripts (e.g. Dockerfile) in src/main:

+
+
+
+
├──/src
+|  ├──/main
+|  |  ├──/docker
+|  |  ├──/helm
+|  |  ├──/java
+|  |  └──/resources
+|  └──/test
+|     ├──/java
+|     └──/resources
+└──/pom.xml
+
+
+
+
+
+

Deployment

+
+
+

For modern projects we strongly recommend that your build process is generating the final deliverable as an OCI compliant container. +Further, to go fully cloud-native you should build your app as a native image via GraalVM AOT compiler. +Therefore we recommed to use quarkus as your main framework. +In case you want to go with spring you may consider using spring-native.

+
+
+
+
+

Layers

+
+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +For the modern project structure the layers are defined by the following table:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Layer«layer»Description

service

service

The service layer exposing functionality via its remote API. Typical protocol is REST. May also be any other protocol you are using such as gRPC.

domain

domain

The domain with the data-model and DB access. Use sub-package (in «detail») repository for repository and dao for DAOs. Also we recommend to put entities in model sub-package.

logic

logic

The logic layer with the functionallity providing the business value.

common

common

cross-cutting code not assigned to a technical layer.

+
+
+
+

Architecture Mapping

+
+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.domain
+|  |  ├──.repo
+|  |  |  ├──.«BusinessObject»Repository
+|  |  |  ├──.«BusinessObject»Fragment
+|  |  |  └──.«BusinessObject»FragmentImpl
+|  |  ├──.dao [alternative to repo]
+|  |  |  ├──.«BusinessObject»Dao
+|  |  |  └──.«BusinessObject»DaoImpl
+|  |  └──.model
+|  |     └──.«BusinessObject»Entity
+|  ├──.logic
+|  |  ├──«BusinessObject»Validator
+|  |  └──«BusinessObject»EventsEmitter
+|   |  └──.Uc«Operation»«BusinessObject»[Impl]
+|  └──.service
+|     └──.v1
+|        ├──.«Component»RestService
+|        ├──.mapper
+|        |     └──.«BusinessObject»Mapper
+|        └──.model
+|           └──.«BusinessObject»Dto
+└──.general
+   └──.domain
+      └──.model
+         └──.ApplicationPersistenceEntity
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-structure.html b/docs/devon4j/1.0/guide-structure.html new file mode 100644 index 00000000..573f5bef --- /dev/null +++ b/docs/devon4j/1.0/guide-structure.html @@ -0,0 +1,436 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Project structure

+
+
+

In devonfw we want to give clear structure and guidance for building applications. +This also allows tools such as CobiGen or sonar-devon4j-plugin to "understand" the code. +Also this helps developers going from one devonfw project to the next one to quickly understand the code-base. +If every developer knows where to find what, the project gets more efficient. +A long time ago maven standardized the project structure with src/main/java, etc. and turned chaos into structure. +With devonfw we experienced the same for the codebase (what is inside src/main/java).

+
+
+

We initially started devon4j based on spring and spring-boot and proposed a classic project structure. +With modern cloud-native trends we added a modern project structure, that is more lean and up-to-date with the latest market trends.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-testing-snapshots.html b/docs/devon4j/1.0/guide-testing-snapshots.html new file mode 100644 index 00000000..7d6dbfc3 --- /dev/null +++ b/docs/devon4j/1.0/guide-testing-snapshots.html @@ -0,0 +1,466 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Testing devon4j SNAPSHOT releases

+
+
+

Whenever a story in devon4j is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here.

+
+
+

In order to test a SNAPSHOT release in your project e.g. to give feedback if a bugfix or feature is working as you expect, you can do the following:

+
+
+
    +
  1. +

    Add the repository https://oss.sonatype.org/content/repositories/snapshots/. +If you are using a recent devonfw-ide simply edit the file $DEVON_IDE_HOME/conf/.m2/settings.xml and activate the devonfw-snapshots profile by changing activeByDefault to true. +All details can be found here.

    +
  2. +
  3. +

    Edit your toplevel pom.xml file and change the devon4j.version to the most recent SNAPSHOT version. To figure out the latest SNAPSHOT version of devon4j, check the the maven.config or the CHANGELOG.

    +
  4. +
  5. +

    Test your appliation and see if the latest SNAPSHOT release fixes your issues, does not break your app and works as expected.

    +
  6. +
  7. +

    Give us feedback. We love to hear your feedback:

    +
    +
      +
    • +

      If and issue is not fixed as expected, comment the according issue on github.

      +
    • +
    • +

      If something broke or does not work as expected, please file a new issue and provide details (stacktrace, error log, etc.) but no confidentail data (passwords, customer details, etc.).

      +
    • +
    • +

      If your test succeeded with the latest SNAPSHOT please also give confirming feedback to bug or feature tickets to let us know.

      +
    • +
    +
    +
  8. +
+
+
+

Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-testing.html b/docs/devon4j/1.0/guide-testing.html new file mode 100644 index 00000000..6027abe7 --- /dev/null +++ b/docs/devon4j/1.0/guide-testing.html @@ -0,0 +1,1067 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Testing

+
+ +
+
+
+

General best practices

+
+
+

For testing please follow our general best practices:

+
+
+
    +
  • +

    Tests should have a clear goal that should also be documented.

    +
  • +
  • +

    Tests have to be classified into different integration levels.

    +
  • +
  • +

    Tests should follow a clear naming convention.

    +
  • +
  • +

    Automated tests need to properly assert the result of the tested operation(s) in a reliable way. E.g. avoid stuff like assertThat(service.getAllEntities()).hasSize(42) or even worse tests that have no assertion at all.

    +
  • +
  • +

    Tests need to be independent of each other. Never write test-cases or tests (in Java @Test methods) that depend on another test to be executed before.

    +
  • +
  • +

    Use AssertJ to write good readable and maintainable tests that also provide valuable feedback in case a test fails. Do not use legacy JUnit methods like assertEquals anymore!

    +
  • +
  • +

    For easy understanding divide your test in three commented sections:

    +
    +
      +
    • +

      //given

      +
    • +
    • +

      //when

      +
    • +
    • +

      //then

      +
    • +
    +
    +
  • +
  • +

    Plan your tests and test data management properly before implementing.

    +
  • +
  • +

    Instead of having a too strong focus on test coverage better ensure you have covered your critical core functionality properly and review the code including tests.

    +
  • +
  • +

    Test code shall NOT be seen as second class code. You shall consider design, architecture and code-style also for your test code but do not over-engineer it.

    +
  • +
  • +

    Test automation is good but should be considered in relation to cost per use. Creating full coverage via automated system tests can cause a massive amount of test-code that can turn out as a huge maintenance hell. Always consider all aspects including product life-cycle, criticality of use-cases to test, and variability of the aspect to test (e.g. UI, test-data).

    +
  • +
  • +

    Use continuous integration and establish that the entire team wants to have clean builds and running tests.

    +
  • +
  • +

    Prefer delegation over inheritance for cross-cutting testing functionality. Good places to put this kind of code can be realized and reused via the JUnit @Rule mechanism.

    +
  • +
+
+
+
+
+

Test Automation Technology Stack

+
+
+

For test automation we use JUnit. However, we are strictly doing all assertions with AssertJ. For mocking we use mockito. +In order to mock remote connections we use wiremock. +For testing entire components or sub-systems we recommend to use spring-boot-starter-test as lightweight and fast testing infrastructure that is already shipped with devon4j-test.

+
+
+

In case you have to use a full blown JEE application server, we recommend to use arquillian. To get started with arquillian, look here.

+
+
+
+
+

Test Doubles

+
+
+

We use test doubles as generic term for mocks, stubs, fakes, dummies, or spys to avoid confusion. Here is a short summary from stubs VS mocks:

+
+
+
    +
  • +

    Dummy objects specifying no logic at all. May declare data in a POJO style to be used as boiler plate code to parameter lists or even influence the control flow towards the test’s needs.

    +
  • +
  • +

    Fake objects actually have working implementations, but usually take some shortcut which makes them not suitable for production (an in memory database is a good example).

    +
  • +
  • +

    Stubs provide canned answers to calls made during the test, usually not responding at all to anything outside what’s programmed in for the test. Stubs may also record information about calls, such as an email gateway stub that remembers the messages it 'sent', or maybe only how many messages it 'sent'.

    +
  • +
  • +

    Mocks are objects pre-programmed with expectations, which form a specification of the calls they are expected to receive.

    +
  • +
+
+
+

We try to give some examples, which should make it somehow clearer:

+
+
+

Stubs

+
+

Best Practices for applications:

+
+
+
    +
  • +

    A good way to replace small to medium large boundary systems, whose impact (e.g. latency) should be ignored during load and performance tests of the application under development.

    +
  • +
  • +

    As stub implementation will rely on state-based verification, there is the threat, that test developers will partially reimplement the state transitions based on the replaced code. This will immediately lead to a black maintenance whole, so better use mocks to assure the certain behavior on interface level.

    +
  • +
  • +

    Do NOT use stubs as basis of a large amount of test cases as due to state-based verification of stubs, test developers will enrich the stub implementation to become a large monster with its own hunger after maintenance efforts.

    +
  • +
+
+
+
+

Mocks

+
+

Best Practices for applications:

+
+
+
    +
  • +

    Replace not-needed dependencies of your system-under-test (SUT) to minimize the application context to start of your component framework.

    +
  • +
  • +

    Replace dependencies of your SUT to impact the control flow under test without establishing all the context parameters needed to match the control flow.

    +
  • +
  • +

    Remember: Not everything has to be mocked! Especially on lower levels of tests like isolated module tests you can be betrayed into a mocking delusion, where you end up in a hundred lines of code mocking the whole context and five lines executing the test and verifying the mocks behavior. Always keep in mind the benefit-cost ratio, when implementing tests using mocks.

    +
  • +
+
+
+
+

Wiremock

+
+

If you need to mock remote connections such as HTTP-Servers, wiremock offers easy to use functionality. For a full description see the homepage or the github repository. Wiremock can be used either as a JUnit Rule, in Java outside of JUnit or as a standalone process. The mocked server can be configured to respond to specific requests in a given way via a fluent Java API, JSON files and JSON over HTTP. An example as an integration to JUnit can look as follows.

+
+
+
+
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+
+public class WireMockOfferImport{
+
+  @Rule
+  public WireMockRule mockServer = new WireMockRule(wireMockConfig().dynamicPort());
+
+  @Test
+  public void requestDataTest() throws Exception {
+  int port = this.mockServer.port();
+  ...}
+
+
+
+

This creates a server on a randomly chosen free port on the running machine. You can also specify the port to be used if wanted. Other than that there are several options to further configure the server. This includes HTTPs, proxy settings, file locations, logging and extensions.

+
+
+
+
  @Test
+  public void requestDataTest() throws Exception {
+      this.mockServer.stubFor(get(urlEqualTo("/new/offers")).withHeader("Accept", equalTo("application/json"))
+      .withHeader("Authorization", containing("Basic")).willReturn(aResponse().withStatus(200).withFixedDelay(1000)
+      .withHeader("Content-Type", "application/json").withBodyFile("/wireMockTest/jsonBodyFile.json")));
+  }
+
+
+
+

This will stub the URL localhost:port/new/offers to respond with a status 200 message containing a header (Content-Type: application/json) and a body with content given in jsonBodyFile.json if the request matches several conditions. +It has to be a GET request to ../new/offers with the two given header properties.

+
+
+

Note that by default files are located in src/test/resources/__files/. When using only one WireMock server one can omit the this.mockServer in before the stubFor call (static method). +You can also add a fixed delay to the response or processing delay with WireMock.addRequestProcessingDelay(time) in order to test for timeouts.

+
+
+

WireMock can also respond with different corrupted messages to simulate faulty behaviour.

+
+
+
+
@Test(expected = ResourceAccessException.class)
+public void faultTest() {
+
+    this.mockServer.stubFor(get(urlEqualTo("/fault")).willReturn(aResponse()
+    .withFault(Fault.MALFORMED_RESPONSE_CHUNK)));
+...}
+
+
+
+

A GET request to ../fault returns an OK status header, then garbage, and then closes the connection.

+
+
+
+
+
+

Integration Levels

+
+
+

There are many discussions about the right level of integration for test automation. Sometimes it is better to focus on small, isolated modules of the system - whatever a "module" may be. In other cases it makes more sense to test integrated groups of modules. Because there is no universal answer to this question, devonfw only defines a common terminology for what could be tested. Each project must make its own decision where to put the focus of test automation. There is no worldwide accepted terminology for the integration levels of testing. In general we consider ISTQB. However, with a technical focus on test automation we want to get more precise.

+
+
+

The following picture shows a simplified view of an application based on the devonfw reference architecture. We define four integration levels that are explained in detail below. +The boxes in the picture contain parenthesized numbers. These numbers depict the lowest integration level, a box belongs to. Higher integration levels also contain all boxes of lower integration levels. When writing tests for a given integration level, related boxes with a lower integration level must be replaced by test doubles or drivers.

+
+
+
+Integration Levels +
+
+
+

The main difference between the integration levels is the amount of infrastructure needed to test them. The more infrastructure you need, the more bugs you will find, but the more instable and the slower your tests will be. So each project has to make a trade-off between pros and contras of including much infrastructure in tests and has to select the integration levels that fit best to the project.

+
+
+

Consider, that more infrastructure does not automatically lead to a better bug-detection. There may be bugs in your software that are masked by bugs in the infrastructure. The best way to find those bugs is to test with very few infrastructure.

+
+
+

External systems do not belong to any of the integration levels defined here. devonfw does not recommend involving real external systems in test automation. This means, they have to be replaced by test doubles in automated tests. An exception may be external systems that are fully under control of the own development team.

+
+
+

The following chapters describe the four integration levels.

+
+
+

Level 1 Module Test

+
+

The goal of a isolated module test is to provide fast feedback to the developer. Consequently, isolated module tests must not have any interaction with the client, the database, the file system, the network, etc.

+
+
+

An isolated module test is testing a single classes or at least a small set of classes in isolation. If such classes depend on other components or external resources, etc. these shall be replaced with a test double.

+
+
+
+
public class MyClassTest extends ModuleTest {
+
+  @Test
+  public void testMyClass() {
+
+    // given
+    MyClass myClass = new MyClass();
+    // when
+    String value = myClass.doSomething();
+    // then
+    assertThat(value).isEqualTo("expected value");
+  }
+
+}
+
+
+
+

For an advanced example see here.

+
+
+
+

Level 2 Component Test

+
+

A component test aims to test components or component parts as a unit. +These tests typically run with a (light-weight) infrastructure such as spring-boot-starter-test and can access resources such as a database (e.g. for DAO tests). +Further, no remote communication is intended here. Access to external systems shall be replaced by a test double.

+
+
+

With devon4j and spring you can write a component-test as easy as illustrated in the following example:

+
+
+
+
@SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.NONE)
+public class UcFindCountryTest extends ComponentTest {
+  @Inject
+  private UcFindCountry ucFindCountry;
+
+  @Test
+  public void testFindCountry() {
+
+    // given
+    String countryCode = "de";
+
+    // when
+    TestUtil.login("user", MyAccessControlConfig.FIND_COUNTRY);
+    CountryEto country = this.ucFindCountry.findCountry(countryCode);
+
+    // then
+    assertThat(country).isNotNull();
+    assertThat(country.getCountryCode()).isEqualTo(countryCode);
+    assertThat(country.getName()).isEqualTo("Germany");
+  }
+}
+
+
+
+

This test will start the entire spring-context of your app (MySpringBootApp). Within the test spring will inject according spring-beans into all your fields annotated with @Inject. In the test methods you can use these spring-beans and perform your actual tests. This pattern can be used for testing DAOs/Repositories, Use-Cases, or any other spring-bean with its entire configuration including database and transactions.

+
+
+

When you are testing use-cases your authorization will also be in place. Therefore, you have to simulate a logon in advance what is done via the login method in the above example. The test-infrastructure will automatically do a logout for you after each test method in doTearDown.

+
+
+
+

Level 3 Subsystem Test

+
+

A subsystem test runs against the external interfaces (e.g. HTTP service) of the integrated subsystem. Subsystem tests of the client subsystem are described in the devon4ng testing guide. In devon4j the server (JEE application) is the subsystem under test. The tests act as a client (e.g. service consumer) and the server has to be integrated and started in a container.

+
+
+

With devon4j and spring you can write a subsystem-test as easy as illustrated in the following example:

+
+
+
+
@SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.RANDOM_PORT)
+public class CountryRestServiceTest extends SubsystemTest {
+
+  @Inject
+  private ServiceClientFactory serviceClientFactory;
+
+  @Test
+  public void testFindCountry() {
+
+    // given
+    String countryCode = "de";
+
+    // when
+    CountryRestService service = this.serviceClientFactory.create(CountryRestService.class);
+    CountryEto country = service.findCountry(countryCode);
+
+    // then
+    assertThat(country).isNotNull();
+    assertThat(country.getCountryCode()).isEqualTo(countryCode);
+    assertThat(country.getName()).isEqualTo("Germany");
+  }
+}
+
+
+
+

Even though not obvious on the first look this test will start your entire application as a server on a free random port (so that it works in CI with parallel builds for different branches) and tests the invocation of a (REST) service including (un)marshalling of data (e.g. as JSON) and transport via HTTP (all in the invocation of the findCountry method).

+
+
+

Do not confuse a subsystem test with a system integration test. A system integration test validates the interaction of several systems where we do not recommend test automation.

+
+
+
+

Level 4 System Test

+
+

A system test has the goal to test the system as a whole against its official interfaces such as its UI or batches. The system itself runs as a separate process in a way close to a regular deployment. Only external systems are simulated by test doubles.

+
+
+

The devonfw only gives advice for automated system test (TODO see allure testing framework). In nearly every project there must be manual system tests, too. This manual system tests are out of scope here.

+
+
+
+

Classifying Integration-Levels

+
+

devon4j defines Category-Interfaces that shall be used as JUnit Categories. +Also devon4j provides abstract base classes that you may extend in your test-cases if you like.

+
+
+

devon4j further pre-configures the maven build to only run integration levels 1-2 by default (e.g. for fast feedback in continuous integration). It offers the profiles subsystemtest (1-3) and systemtest (1-4). In your nightly build you can simply add -Psystemtest to run all tests.

+
+
+
+
+
+

Implementation

+
+
+

This section introduces how to implement tests on the different levels with the given devonfw infrastructure and the proposed frameworks.

+
+
+

Module Test

+
+

In devon4j you can extend the abstract class ModuleTest to basically get access to assertions. In order to test classes embedded in dependencies and external services one needs to provide mocks for that. As the technology stack recommends we use the Mockito framework to offer this functionality. The following example shows how to implement Mockito into a JUnit test.

+
+
+
+
import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.mock;
+...
+
+public class StaffmanagementImplTest extends ModuleTest {
+  @Rule
+  public MockitoRule rule = MockitoJUnit.rule();
+
+  @Test
+  public void testFindStaffMember() {
+  ...}
+}
+
+
+
+

Note that the test class does not use the @SpringApplicationConfiguration annotation. In a module test one does not use the whole application. +The JUnit rule is the best solution to use in order to get all needed functionality of Mockito. Static imports are a convenient option to enhance readability within Mockito tests. +You can define mocks with the @Mock annotation or the mock(*.class) call. To inject the mocked objects into your class under test you can use the @InjectMocks annotation. This automatically uses the setters of StaffmanagementImpl to inject the defined mocks into the class under test (CUT) when there is a setter available. In this case the beanMapper and the staffMemberDao are injected. Of course it is possible to do this manually if you need more control.

+
+
+
+
  @Mock
+  private BeanMapper beanMapper;
+  @Mock
+  private StaffMemberEntity staffMemberEntity;
+  @Mock
+  private StaffMemberEto staffMemberEto;
+  @Mock
+  private StaffMemberDao staffMemberDao;
+  @InjectMocks
+  StaffmanagementImpl staffmanagementImpl = new StaffmanagementImpl();
+
+
+
+

The mocked objects do not provide any functionality at the time being. To define what happens on a method call on a mocked dependency in the CUT one can use when(condition).thenReturn(result). In this case we want to test findStaffMember(Long id) in the StaffmanagementImpl.java.

+
+
+
+
public StaffMemberEto findStaffMember(Long id) {
+  return getBeanMapper().map(getStaffMemberDao().find(id), StaffMemberEto.class);
+}
+
+
+
+

In this simple example one has to stub two calls on the CUT as you can see below. For example the method call of the CUT staffMemberDao.find(id) is stubbed for returning a mock object staffMemberEntity that is also defined as mock.

+
+
+
+

Subsystem Test

+
+

devon4j provides a simple test infrastructure to aid with the implementation of subsystem tests.

+
+
+
+
//given
+long id = 1L;
+Class<StaffMemberEto> targetClass = StaffMemberEto.class;
+when(this.staffMemberDao.find(id)).thenReturn(this.staffMemberEntity);
+when(this.beanMapper.map(this.staffMemberEntity, targetClass)).thenReturn(this.staffMemberEto);
+
+//when
+StaffMemberEto resultEto = this.staffmanagementImpl.findStaffMember(id);
+
+//then
+assertThat(resultEto).isNotNull();
+assertThat(resultEto).isEqualTo(this.staffMemberEto);
+
+
+
+

After the test method call one can verify the expected results. Mockito can check whether a mocked method call was indeed called. This can be done using Mockito verify. Note that it does not generate any value if you check for method calls that are needed to reach the asserted result anyway. Call verification can be useful e.g. when you want to assure that statistics are written out without actually testing them.

+
+
+
+
+
+

Regression testing

+
+
+

When it comes to complex output (even binary) that you want to regression test by comparing with an expected result, you sould consider Approval Tests using ApprovalTests.Java. +If applied for the right problems, it can be very helpful.

+
+
+
+
+

Deployment Pipeline

+
+
+

A deployment pipeline is a semi-automated process that gets software-changes from version control into production. It contains several validation steps, e.g. automated tests of all integration levels. +Because devon4j should fit to different project types - from agile to waterfall - it does not define a standard deployment pipeline. But we recommend to define such a deployment pipeline explicitly for each project and to find the right place in it for each type of test.

+
+
+

For that purpose, it is advisable to have fast running test suite that gives as much confidence as possible without needing too much time and too much infrastructure. This test suite should run in an early stage of your deployment pipeline. Maybe the developer should run it even before he/she checked in the code. Usually lower integration levels are more suitable for this test suite than higher integration levels.

+
+
+

Note, that the deployment pipeline always should contain manual validation steps, at least manual acceptance testing. There also may be manual validation steps that have to be executed for special changes only, e.g. usability testing. Management and execution processes of those manual validation steps are currently not in the scope of devonfw.

+
+
+
+
+

Test Coverage

+
+
+

We are using tools (SonarQube/Jacoco) to measure the coverage of the tests. Please always keep in mind that the only reliable message of a code coverage of X% is that (100-X)% of the code is entirely untested. It does not say anything about the quality of the tests or the software though it often relates to it.

+
+
+
+
+

Test Configuration

+
+
+

This section covers test configuration in general without focusing on integration levels as in the first chapter.

+
+
+

Configure Test Specific Beans

+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+

Test Data

+
+

It is possible to obtain test data in two different ways depending on your test’s integration level.

+
+
+
+
+
+

Debugging Tests

+
+
+

The following two sections describe two debugging approaches for tests. Tests are either run from within the IDE or from the command line using Maven.

+
+
+

Debugging with the IDE

+
+

Debugging with the IDE is as easy as always. Even if you want to execute a SubsystemTest which needs a Spring context and a server infrastructure to run properly, you just set your breakpoints and click on Debug As → JUnit Test. The test infrastructure will take care of initializing the necessary infrastructure - if everything is configured properly.

+
+
+
+

Debugging with Maven

+
+

Please refer to the following two links to find a guide for debugging tests when running them from Maven.

+
+ +
+

In essence, you first have to start execute a test using the command line. Maven will halt just before the test execution and wait for your IDE to connect to the process. When receiving a connection the test will start and then pause at any breakpoint set in advance. +The first link states that tests are started through the following command:

+
+
+
+
mvn -Dmaven.surefire.debug test
+
+
+
+

Although this is correct, it will run every test class in your project and - which is time consuming and mostly unnecessary - halt before each of these tests. +To counter this problem you can simply execute a single test class through the following command (here we execute the TablemanagementRestServiceTest from the restaurant sample application):

+
+
+
+
mvn test -Dmaven.surefire.debug test -Dtest=TablemanagementRestServiceTest
+
+
+
+

It is important to notice that you first have to execute the Maven command in the according submodule, e.g. to execute the TablemanagementRestServiceTest you have first to navigate to the core module’s directory.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-text-search.html b/docs/devon4j/1.0/guide-text-search.html new file mode 100644 index 00000000..31929b97 --- /dev/null +++ b/docs/devon4j/1.0/guide-text-search.html @@ -0,0 +1,471 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-transactions.html b/docs/devon4j/1.0/guide-transactions.html new file mode 100644 index 00000000..81a33050 --- /dev/null +++ b/docs/devon4j/1.0/guide-transactions.html @@ -0,0 +1,522 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Transaction Handling

+
+
+

For transaction handling we AOP to add transaction control via annotations as aspect. +This is done by annotating your code with the @Transactional annotation. +You can either annotate your container bean at class level to make all methods transactional or your can annotate individual methods to make them transactional:

+
+
+
+
  @Transactional
+  public Output getData(Input input) {
+    ...
+  }
+
+
+
+
+
+

JTA Imports

+
+
+

Here are the import statements for transaction support:

+
+
+
+
import javax.transaction.Transactional;
+
+
+
+ + + + + +
+ + +Use the above import statement to follow JEE and avoid using org.springframework.transaction.annotation.Transactional. +
+
+
+
+
+

JTA Dependencies

+
+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>jakarta.transaction</groupId>
+  <artifactId>jakarta.transaction-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>javax.transaction</groupId>
+  <artifactId>javax.transaction-api</artifactId>
+</dependency>
+
+
+
+
+
+

Handling constraint violations

+
+
+

Using @Transactional magically wraps transaction handling around your code. +As constraints are checked by the database at the end when the transaction gets committed, a constraint violation will be thrown by this aspect outside your code. +In case you have to handle constraint violations manually, you have to do that in code outside the logic that is annotated with @Transactional. +This may be done in a service operation by catching a ConstraintViolationException (org.hibernate.exception.ConstraintViolationException for hibernate). +As a generic approach you can solve this via REST execption handling.

+
+
+
+
+

Batches

+
+
+

Transaction control for batches is a lot more complicated and is described in the batch layer.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-transferobject.html b/docs/devon4j/1.0/guide-transferobject.html new file mode 100644 index 00000000..f957c5c8 --- /dev/null +++ b/docs/devon4j/1.0/guide-transferobject.html @@ -0,0 +1,489 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Transfer-Objects

+
+
+

The technical data model is defined in form of persistent entities. +However, passing persistent entities via call-by-reference across the entire application will soon cause problems:

+
+
+
    +
  • +

    Changes to a persistent entity are directly written back to the persistent store when the transaction is committed. When the entity is send across the application also changes tend to take place in multiple places endangering data sovereignty and leading to inconsistency.

    +
  • +
  • +

    You want to send and receive data via services across the network and have to define what section of your data is actually transferred. If you have relations in your technical model you quickly end up loading and transferring way too much data.

    +
  • +
  • +

    Modifications to your technical data model shall not automatically have impact on your external services causing incompatibilities.

    +
  • +
+
+
+

To prevent such problems transfer-objects are used leading to a call-by-value model and decoupling changes to persistent entities.

+
+
+

In the following sections the different types of transfer-objects are explained. +You will find all according naming-conventions in the architecture-mapping

+
+
+

To structure your transfer objects, we recommend the following approaches:

+
+
+ +
+
+

Also considering the following transfer objects in specific cases:

+
+
+
+
SearchCriteriaTo
+
+

For searching we create or generate a «BusinessObject»SearchCriteriaTo representing a query to find instances of «BusinessObject».

+
+
TO
+
+

There are typically transfer-objects for data that is never persistent. +For very generic cases these just carry the suffix To.

+
+
STO
+
+

We can potentially create separate service transfer objects (STO) (if possible named «BusinessObject»Sto) to keep the service API stable and independent of the actual data-model. +However, we usually do not need this and want to keep our architecture simple. +Only create STOs if you need service versioning and support previous APIs or to provide legacy service technologies that require their own isolated data-model. +In such case you also need beanmapping between STOs and ETOs/DTOs what means extra effort and complexity that should be avoided.

+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-usecase.html b/docs/devon4j/1.0/guide-usecase.html new file mode 100644 index 00000000..dff224af --- /dev/null +++ b/docs/devon4j/1.0/guide-usecase.html @@ -0,0 +1,557 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

UseCase

+
+
+

A use-case is a small unit of the logic layer responsible for an operation on a particular entity (business object). +We leave it up to you to decide whether you want to define an interface (API) for each use-case or provide an implementation directly.

+
+
+

Following our architecture-mapping (for classic and modern project), use-cases are named Uc«Operation»«BusinessObject»[Impl]. The prefix Uc stands for use-case and allows to easily find and identify them in your IDE. The «Operation» stands for a verb that is operated on the entity identified by «BusinessObject». +For CRUD we use the standard operations Find and Manage that can be generated by CobiGen. This also separates read and write operations (e.g. if you want to do CQSR, or to configure read-only transactions for read operations).

+
+
+

In our example, we choose to define an interface for each use-case. We also use *To to refer to any type of transfer object. Please follow our guide to understand more about different types of transfer object e.g. Eto, Dto, Cto

+
+
+
+
+

Find

+
+
+

The UcFind«BusinessObject» defines all read operations to retrieve and search the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcFindBooking {
+  //*To = Eto, Dto or Cto
+  Booking*To findBooking(Long id);
+}
+
+
+
+
+
+

Manage

+
+
+

The UcManage«BusinessObject» defines all CRUD write operations (create, update and delete) for the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcManageBooking {
+
+  //*To = Eto, Dto or Cto
+  Booking*To saveBooking(Booking*To booking);
+
+  void deleteBooking(Long id);
+
+}
+
+
+
+
+
+

Custom

+
+
+

Any other non CRUD operation Uc«Operation»«BusinessObject» uses any other custom verb for «Operation». +Typically, such custom use-cases only define a single method. +Here is an example:

+
+
+
+
public interface UcApproveBooking {
+
+  //*To = Eto, Dto or Cto
+  void approveBooking(Booking*To booking);
+
+}
+
+
+
+
+
+

Implementation

+
+
+

The implementation should carry its own name and the suffix Impl and is annotated with @Named and @ApplicationScoped. It will need access to the persistent data which is done by injecting the corresponding repository (or DAO). Furthermore, it shall not expose persistent entities from the data access layer and has to map them to transfer objects using the bean-mapper. Please refer to our bean mapping, transfer object and dependency injection documentation for more information. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcManageBookingImpl implements UcManageBooking {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public void deleteBooking(Long id) {
+
+    LOG.debug("Delete Booking with id {} from database.", id);
+    this.bookingRepository.deleteById(id);
+  }
+}
+
+
+
+

The use-cases can then be injected directly into the service.

+
+
+
+
@Named("BookingmanagementRestService")
+@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private UcFindBooking ucFindBooking;
+
+  @Inject
+  private UcManageBooking ucManageBooking;
+
+  @Inject
+  private UcApproveBooking ucApproveBooking;
+}
+
+
+
+
+
+

Internal use case

+
+
+

Sometimes, a component with multiple related entities and many use-cases needs to reuse business logic internally. +Of course, this can be exposed as an official use-case API but this will imply using transfer-objects (ETOs) instead of entities. In some cases, this is undesired e.g. for better performance to prevent unnecessary mapping of entire collections of entities. +In the first place, you should try to use abstract base implementations providing reusable methods the actual use-case implementations can inherit from. +If your business logic is even more complex and you have multiple aspects of business logic to share and reuse but also run into multi-inheritance issues, you may also just create use-cases that have their interface located in the impl scope package right next to the implementation (or you may just skip the interface). In such a case, you may define methods that directly take or return entity objects. +To avoid confusion with regular use-cases, we recommend to add the Internal suffix to the type name leading to Uc«Operation»«BusinessObject»Internal[Impl].

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-validation.html b/docs/devon4j/1.0/guide-validation.html new file mode 100644 index 00000000..ad3b0b43 --- /dev/null +++ b/docs/devon4j/1.0/guide-validation.html @@ -0,0 +1,631 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Validation

+
+
+

Validation is about checking syntax and semantics of input data. Invalid data is rejected by the application. +Therefore validation is required in multiple places of an application. E.g. the GUI will do validation for usability reasons to assist the user, early feedback and to prevent unnecessary server requests. +On the server-side validation has to be done for consistency and security.

+
+
+

In general we distinguish these forms of validation:

+
+
+
    +
  • +

    stateless validation will produce the same result for given input at any time (for the same code/release).

    +
  • +
  • +

    stateful validation is dependent on other states and can consider the same input data as valid in once case and as invalid in another.

    +
  • +
+
+
+
+
+

Stateless Validation

+
+
+

For regular, stateless validation we use the JSR303 standard that is also called bean validation (BV). +Details can be found in the specification. +As implementation we recommend hibernate-validator.

+
+
+

Example

+
+

A description of how to enable BV for spring applications can be found in the relevant Spring documentation. A guide you can use to integrate validation in Quarkus applications can be found here. For a quick summary follow these steps:

+
+
+
    +
  • +

    Make sure that hibernate-validator is located in the classpath by adding a dependency to the pom.xml.

    +
  • +
+
+
+
spring
+
+
    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
quarkus
+
+
    <dependency>
+      <groupId>io.quarkus</groupId>
+      <artifactId>quarkus-hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
    +
  • +

    For methods to validate go to their declaration and add constraint annotations to the method parameters.

    +
    +

    In spring applications you can add the @Validated annotation to the implementation (spring bean) to be validated (this is an annotation of the spring framework, so it`s not available in the Quarkus context). The standard use case is to annotate the logic layer implementation, i.e. the use case implementation or component facade in case of simple logic layer pattern. Thus, the validation will be executed for service requests as well as batch processing.

    +
    +
    +
      +
    • +

      @Valid annotation to the arguments to validate (if that class itself is annotated with constraints to check).

      +
    • +
    • +

      @NotNull for required arguments.

      +
    • +
    • +

      Other constraints (e.g. @Size) for generic arguments (e.g. of type String or Integer). However, consider to create custom datatypes and avoid adding too much validation logic (especially redundant in multiple places).

      +
    • +
    +
    +
  • +
+
+
+
BookingmanagementRestServiceImpl.java
+
+
@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+  ...
+  public BookingEto saveBooking(@Valid BookingCto booking) {
+  ...
+
+
+
+
    +
  • +

    Finally add appropriate validation constraint annotations to the fields of the ETO class.

    +
  • +
+
+
+
BookingCto.java
+
+
  @Valid
+  private BookingEto booking;
+
+
+
+
BookingEto.java
+
+
  @NotNull
+  @Future
+  private Timestamp bookingDate;
+
+
+
+

A list with all bean validation constraint annotations available for hibernate-validator can be found here. In addition it is possible to configure custom constraints. Therefore it is necessary to implement a annotation and a corresponding validator. A description can also be found in the Spring documentation or with more details in the hibernate documentation.

+
+
+ + + + + +
+ + +Bean Validation in Wildfly >v8: Wildfly v8 is the first version of Wildfly implementing the JEE7 specification. It comes with bean validation based on hibernate-validator out of the box. In case someone is running Spring in Wildfly for whatever reasons, the spring based annotation @Validated would duplicate bean validation at runtime and thus should be omitted. +
+
+
+
+

GUI-Integration

+
+

TODO

+
+
+
+

Cross-Field Validation

+
+

BV has poor support for this. Best practice is to create and use beans for ranges, etc. that solve this. A bean for a range could look like so:

+
+
+
+
public class Range<V extends Comparable<V>> {
+
+  private V min;
+  private V max;
+
+  public Range(V min, V max) {
+
+    super();
+    if ((min != null) && (max != null)) {
+      int delta = min.compareTo(max);
+      if (delta > 0) {
+        throw new ValueOutOfRangeException(null, min, min, max);
+      }
+    }
+    this.min = min;
+    this.max = max;
+  }
+
+  public V getMin() ...
+  public V getMax() ...
+
+
+
+
+
+
+

Stateful Validation

+
+
+

For complex and stateful business validations we do not use BV (possible with groups and context, etc.) but follow KISS and just implement this on the server in a straight forward manner. +An example is the deletion of a table in the example application. Here the state of the table must be checked first:

+
+
+

BookingmanagementImpl.java

+
+
+
+
  private void sendConfirmationEmails(BookingEntity booking) {
+
+    if (!booking.getInvitedGuests().isEmpty()) {
+      for (InvitedGuestEntity guest : booking.getInvitedGuests()) {
+        sendInviteEmailToGuest(guest, booking);
+      }
+    }
+
+    sendConfirmationEmailToHost(booking);
+  }
+
+
+
+

Implementing this small check with BV would be a lot more effort.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/guide-xml.html b/docs/devon4j/1.0/guide-xml.html new file mode 100644 index 00000000..d0f3ab2d --- /dev/null +++ b/docs/devon4j/1.0/guide-xml.html @@ -0,0 +1,472 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

XML

+
+
+

XML (Extensible Markup Language) is a W3C standard format for structured information. It has a large eco-system of additional standards and tools.

+
+
+

In Java there are many different APIs and frameworks for accessing, producing and processing XML. For the devonfw we recommend to use JAXB for mapping Java objects to XML and vice-versa. Further there is the popular DOM API for reading and writing smaller XML documents directly. When processing large XML documents StAX is the right choice.

+
+
+
+
+

JAXB

+
+
+

We use JAXB to serialize Java objects to XML or vice-versa.

+
+
+

JAXB and Inheritance

+
+

Use @XmlSeeAlso annotation to provide sub-classes. +See section "Collective Polymorphism" described here.

+
+
+
+

JAXB Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create dedicated objects for the XML mapping you can easily avoid such situations. When this is not suitable use @XmlJavaTypeAdapter and provide an XmlAdapter implementation that handles the mapping. +For details see here.

+
+
+
+
+
+

Security

+
+
+

To prevent XML External Entity attacks, follow JAXP Security Guide and enable FSP.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/index.html b/docs/devon4j/1.0/index.html new file mode 100644 index 00000000..8900137e --- /dev/null +++ b/docs/devon4j/1.0/index.html @@ -0,0 +1,544 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devon4j

+
+
+

Devon4j is the Java stack of devonfw. It allows you to build business applications (backends) using Java technology in a highly efficient and standardized way based on established best-practices. To build web-clients as frontend for a devon4j backend we recommend devon4ng.

+
+
+

Apache License, Version 2.0 +Maven Central +Build Status

+
+
+

For details see modules below.

+
+
+
+
+

Options

+
+
+ +
+
+

Tutorials

+ +
+
+
+
+

Modules

+
+
+

Here you can see a list of all provided modules with their JavaDoc and Artifacts

+
+
+
    +
  • +

    basic basic JavaDoc basic artifact

    +
  • +
  • +

    batch batch JavaDoc batch artifact

    +
  • +
  • +

    batch-tool batch-tool JavaDoc batch-tool artifact

    +
  • +
  • +

    beanmapping beanmapping JavaDoc beanmapping artifact

    +
  • +
  • +

    beanmapping-dozer beanmapping-dozer JavaDoc beanmapping-dozer artifact

    +
  • +
  • +

    beanmapping-orika beanmapping-orika JavaDoc beanmapping-orika artifact

    +
  • +
  • +

    cxf-client cxf-client JavaDoc cxf-client artifact

    +
  • +
  • +

    cxf-client-rest cxf-client-rest JavaDoc cxf-client-rest artifact

    +
  • +
  • +

    cxf-client-ws cxf-client-ws JavaDoc cxf-client-ws artifact

    +
  • +
  • +

    cxf-server cxf-server JavaDoc cxf-server artifact

    +
  • +
  • +

    cxf-server-rest cxf-server-rest JavaDoc cxf-server-rest artifact

    +
  • +
  • +

    cxf-server-ws cxf-server-ws JavaDoc cxf-server-ws artifact

    +
  • +
  • +

    jpa-basic jpa-basic JavaDoc jpa-basic artifact

    +
  • +
  • +

    jpa-dao jpa-dao JavaDoc jpa-dao artifact

    +
  • +
  • +

    jpa-envers jpa-envers JavaDoc jpa-envers artifact

    +
  • +
  • +

    jpa-spring-data jpa-spring-data JavaDoc jpa-spring-data artifact

    +
  • +
  • +

    json json JavaDoc json artifact

    +
  • +
  • +

    kafka kafka JavaDoc kafka artifact

    +
  • +
  • +

    logging logging JavaDoc logging artifact

    +
  • +
  • +

    rest rest JavaDoc rest artifact

    +
  • +
  • +

    security security JavaDoc security artifact

    +
  • +
  • +

    security-jwt security-jwt JavaDoc security-jwt artifact

    +
  • +
  • +

    security-keystore security-keystore JavaDoc security-keystore artifact

    +
  • +
  • +

    service service JavaDoc service artifact

    +
  • +
  • +

    test test JavaDoc test artifact

    +
  • +
  • +

    test-jpa test-jpa JavaDoc test-jpa artifact

    +
  • +
  • +

    web web JavaDoc web artifact

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/performance-comparision-spring-quarkus.html b/docs/devon4j/1.0/performance-comparision-spring-quarkus.html new file mode 100644 index 00000000..7848abd2 --- /dev/null +++ b/docs/devon4j/1.0/performance-comparision-spring-quarkus.html @@ -0,0 +1,562 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Performance comparison between Spring and Quarkus

+
+
+

Quarkus offers a big advantage in resource consumption compared to a Spring application. Especially in native mode, the memory footprint of a Quarkus application is extremely low, which can be a deciding factor in real-world environments. +The tables performace comparision application 1 and performace comparision application 2, which show the startup and memory consumption of two applications that are similar in their Quarkus and Spring implementations, illustrate this point. Application 1 is more complex in scope than Application 2 and uses more dependencies. +The listings above the tables show the functions/extensions of the applications and the lines of code (only java files).

+
+
+
Application 1:
+
    +
  • +

    LOC (without automatically generated classes)

    +
    +
      +
    • +

      Quarkus: ~4600

      +
    • +
    • +

      Spring: ~7700 (separated into api and core module, as described for the classic project structure; api: ~3800, core: 3900)

      +
    • +
    +
    +
  • +
  • +

    Features

    +
    +
      +
    • +

      3 entitites

      +
    • +
    • +

      REST service

      +
    • +
    • +

      Connection to a Postgres database (using Spring Data JPA and QueryDSL for the repository implementation)

      +
    • +
    • +

      Flyway for database migration

      +
    • +
    • +

      Kafka for asynchronous messaging

      +
    • +
    • +

      Avro for data serialization combined with a schema registry

      +
    • +
    +
    +
  • +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + +
Table 1. performace comparision application 1

Spring

Quarkus JVM Mode

Quarkus Native Mode

startup time (time until first response)

~35 seconds (+/- 1s)

~4,7 - 5,2 seconds

~0,9 seconds

memory usage

~850 - 900 MB

~550 MB

~190 MB

+
+
+
Application 2:
+
    +
  • +

    LOC

    +
    +
      +
    • +

      Quarkus: ~300

      +
    • +
    • +

      Spring: ~ 280

      +
    • +
    +
    +
  • +
  • +

    Features

    +
    +
      +
    • +

      1 entity

      +
    • +
    • +

      REST service with Postgres database connection

      +
    • +
    +
    +
  • +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + +
Table 2. performace comparision application 2

Spring

Quarkus JVM Mode

Quarkus Native Mode

startup time (time until first response)

~9 - 10 seconds

~3,9 seconds

~0,9 seconds

memory usage

~810 MB

~460 MB

~90 MB

+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/quarkus.html b/docs/devon4j/1.0/quarkus.html new file mode 100644 index 00000000..a48b5d9c --- /dev/null +++ b/docs/devon4j/1.0/quarkus.html @@ -0,0 +1,509 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Quarkus

+
+
+

Quarkus is a Java framework for building cloud-native apps. +It is fully supported by devonfw as an option and alternative to spring. +Additional things like extensions will be available on the devon4quarkus GitHub repository.

+
+
+
+
+

Guide to the Reader

+
+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If you are completely new to Quarkus, you may be interested in the pros and cons of Quarkus. Also take a look at the official Quarkus website. And you might also be interested in the features that GraalVM offers.

    +
  • +
  • +

    If you are new to devon4j, also take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring and coding conventions. Follow the referenced links to explore a topic in more depth.

    +
  • +
  • +

    If you are an experienced Spring developer and want to get in touch with Quarkus, read our Getting started with Quarkus for Spring developers guide.

    +
  • +
  • +

    If you’re looking to build your first Quarkus application, the Quarkus website offers some good getting started guides. Also check out our Quarkus template guide, which gives you some recommendations on extensions and frameworks to use. It also provides some links to the Quarkus code generator with preselected configurations you can use to create your application.

    +
  • +
  • +

    If you want to have a Quarkus sample application using devon4j recommendations, check out our Quarkus reference application.

    +
  • +
  • +

    If you already have some experience with devon4j and Quarkus and need more information on a specific topic, check out our Quarkus guides. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Quarkus and Spring are documented there.

    +
  • +
  • +

    If you want to learn how to build native images, check out this guide.

    +
  • +
+
+
+
+
+

Pros

+
+
+

Quarkus offers the following benefits:

+
+
+
    +
  • +

    fast turn-around cycles for developers
    +Save changes in your Java code and immediately test the results without restarting or waiting

    +
  • +
  • +

    faster start-up and less memory footprint
    +When building your app as native-images via GraalVM it gets highly optimized. As a result it starts up lightning fast and consumes much less memory. This is a great advantage for cloud deployment as well as for sustainability. You can find a performance comparison between Spring and Quarkus here.

    +
  • +
  • +

    clean and lean +As quarkus was born as cloud-native framework it is very light-weigth and does not carry much history and legacy.

    +
  • +
+
+
+
+
+

Cons

+
+
+

Quarkus has the following drawbacks:

+
+
+
    +
  • +

    less flexible
    +Quarkus is less flexible compared to spring or in other words it is more biased and coupled to specific implementations. However, the implementations just work and you have less things to choose and worry about. However, in case you want to integrate a specific or custom library you may hit limitations or lose support for native-images especially when that library is based on reflection. Therefore, check your requirements and technology stack early on when making your choice.

    +
  • +
  • +

    less established
    +Since quarkus was born in 2019 it is modern but also less established. It will be easier to get developers for spring but we already consider quarkus mature and established enought for building production ready apps.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/quarkus/getting-started-for-spring-developers.html b/docs/devon4j/1.0/quarkus/getting-started-for-spring-developers.html new file mode 100644 index 00000000..7e7a6983 --- /dev/null +++ b/docs/devon4j/1.0/quarkus/getting-started-for-spring-developers.html @@ -0,0 +1,622 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Getting started with Quarkus for Spring developers

+
+
+

As a Spring developer, you heard more and more about Quarkus: its pros and cons, its fast growth etc. So, you decided to adopt/try Quarkus for your (next) project(s) and wonder where to go next and where do you need to pay attention to when moving from Spring to Quarkus.

+
+
+

This guide tries to address exactly this concern. In the following, we will present you some main points you should be aware of when starting to develop with Quarkus, along with some useful sources.

+
+
+
    +
  1. +

    Quarkus is fairly a new Java toolkit. Thus, it is very well documented. It also provides a set of well-written technical guides that are a good starting point to get in touch and make the first steps with Quarkus. See here. It is an Open Source project licensed under the Apache License version 2.0. The source code is hosted in GitHub. If you have any question or concern, don’t hesitate to reach out to the Quarkus community.

    +
  2. +
  3. +

    Same as Spring Initializr, you can go to code.quarkus.io to create a new application. Also, check out our Template Quarkus Guide to have our recommendations on certain topics.

    +
  4. +
  5. +

    In Spring stack, we recommend structuring your application into multiple modules, known as our classic structure. Moving to Quarkus and the world of cloud-native, microservices where we build smaller applications compared to monoliths, we recommend keeping everything top-level and simple. Therefore, we propose the modern structure as a better fit.

    +
  6. +
  7. +

    Quarkus focuses not only on delivering top features but also on the developer experience. The Quarkus’s Live Coding feature automatically detects changes made to Java files, application configuration, static resources or even classpath dependency changes and recompiles and redeploys the changes. As that, it solves the problem of traditional Java development workflow, hence improves productivity.

    +
    +
    +
        Write Code → Compile → Deploy → Test Changes/ Refresh Browser/ etc → Repeat (traditional)
    +    Write Code → Test Changes/ Refresh Browser/ etc → Repeat (Quarkus)
    +
    +
    +
    +

    You can use this feature out of the box without any extra setup by running:

    +
    +
    +
    +
        mvn compile quarkus:dev
    +
    +
    +
    +

    Another highlight feature to speed up developing is the Quarkus’s Dev Mode with Dev Services, which can automatically provision unconfigured services in development and test mode. It means that if you include an extension and don’t configure it, Quarkus will automatically start the relevant service and wire up your application to use it, therefore will save you a lot of time setting up those services manually. In production mode, where the real configuration is provided, Dev Service will be disabled automatically.

    +
    +
    +

    Also in Dev Mode, you can access the Dev UI at \q\dev to browse endpoints offered by various extensions, conceptually similar to what a Spring Boot actuator might provide.

    +
    +
  8. +
  9. +

    Quarkus is made of a small core on which relies hundreds of extensions. In fact, the power of Quarkus is its extension mechanism. Think of these extensions as your project dependencies. You can add it per dependency manager such as maven or gradle.

    +
    +
    +
    mvn quarkus:list-extensions
    +mvn quarkus:add-extension -Dextensions="groupId:artifactId"
    +(or add it manually to pom.xml)
    +# or
    +gradle list-extensions
    +(add dependency to build.gradle)
    +
    +
    +
    +

    Like Spring Boot, Quarkus also has a vast ecosystem of extensions with commonly-used technologies.

    +
    + + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Table 1. Example of common Quarkus extensions and the Spring Boot Starters with similar functionality (book: Quarkus for Spring Developer)
    Quarkus extensionSpring Boot Starter

    quarkus-resteasy-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-resteasy-reactive-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-hibernate-orm-panache

    spring-boot-starter-data-jpa

    quarkus-hibernate-orm-rest-datapanache

    spring-boot-starter-data-rest

    quarkus-hibernate-reactive-panache

    spring-boot-starter-data-r2dbc

    quarkus-mongodb-panache

    spring-boot-starter-data-mongodb

    +

    spring-boot-starter-data-mongodb-reactive

    quarkus-hibernate-validator

    spring-boot-starter-validation

    quarkus-qpid-jms

    spring-boot-starter-activemq

    quarkus-artemis-jms

    spring-boot-starter-artemis

    quarkus-cache

    spring-boot-starter-cache

    quarkus-redis-client

    spring-boot-starter-data-redis

    +

    spring-boot-starter-data-redis-reactive

    quarkus-mailer

    spring-boot-starter-mail

    quarkus-quartz

    spring-boot-starter-quartz

    quarkus-oidc

    spring-boot-starter-oauth2-resource-server

    quarkus-oidc-client

    spring-boot-starter-oauth2-client

    quarkus-smallrye-jwt

    spring-boot-starter-security

    +
    +

    A full list of all Quarkus extensions can be found here. Furthermore, you can check out the community extensions hosted by Quarkiverse Hub. Quarkus has some extensions for Spring API as well which is helpful while migrating from Spring to Quarkus.

    +
    + +
    +

    Besides extensions, which are officially maintained by Quarkus team, Quarkus allows adding external libraries too. While extensions can be integrated seamlessly into Quarkus as they can be processed at build time and be built in native mode with GraalVM, external dependencies might not work out of the box with native compilation. If that is the case, then you have to recompile them with the right GraalVM configuration to make them work.

    +
    +
  10. +
  11. +

    Quarkus’s design accounted for native compilation by default. A Quarkus native executable starts much faster and utilizes far less memory than a traditional JVM (see our performace comparision between Spring and Quarkus). To get familiar with building native executable, configuring and running it, please check out our Native Image Guide. Be sure to test your code in both JVM and native mode.

    +
  12. +
  13. +

    Both Quarkus and Spring include testing frameworks based on JUnit and Mockito. Thus, by design, Quarkus enables test-driven development by detecting affected tests as changes are made and automatically rerun them in background. As that, it gives developer instant feedback, hence improves productivity. To use continuous testing, execute the following command:

    +
    +
    +
    mvn quarkus:dev
    +
    +
    +
  14. +
  15. +

    For the sake of performance optimization, Quarkus avoids reflection as much as possible, instead favoring static class binding. When building a native executable, it analyzes the call tree and removes all the classes/methods/fields that are not used directly. As a consequence, the elements used via reflection are not part of the call tree so they are dead code eliminated (if not called directly in other cases).

    +
    +

    A common example is the JSON libraries which typically use reflection to serialize the objects to JSON. If you use them out of the box, you might encounter some errors in native mode. So, be sure to register the elements for reflection explicitly. A How-to is provided by Quarkus Registering For Reflection with practical program snippets.

    +
    +
  16. +
+
+
+

A very good read on the topic is the e-book Quarkus for Spring Developers by Red Hat. Another good source for direct hands-on coding tutorial is Katacoda Quarkus for Spring Boot Developers

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/quarkus/getting-started-quarkus.html b/docs/devon4j/1.0/quarkus/getting-started-quarkus.html new file mode 100644 index 00000000..aecbca2f --- /dev/null +++ b/docs/devon4j/1.0/quarkus/getting-started-quarkus.html @@ -0,0 +1,843 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Quarkus Quickstart

+
+
+

This guide will show a quickstart how to create a Quarkus app and will briefly show the key functionalities that Quarkus provides and how to start as a beginner or also experienced developers.

+
+
+
+
+

Introduction to Quarkus

+
+
+

To get the first introduction to Quarkus you can read the Quarkus introduction guide. To get a short overview where you can find the important Quarkus related guides follow the chapter guide to the reader. +Also, a comparison of the advantages and disadvantages of a Quarkus application compared to the alternative framework Spring. +This comparison will be supported by our performance comparison between Spring and Quarkus that shows the lower resource consumption and startup time of Quarkus applications.

+
+
+
+
+

Installation of Tools and Dependencies

+
+
+

First, we need to install some dependencies and tools before we can start programming. Our tool devonfw-ide comes with many development tools for you. +We need to install the following tools for this guide.

+
+
+
    +
  • +

    Maven

    +
  • +
  • +

    Java

    +
  • +
  • +

    any IDE (devonfw-ide supports Eclipse, Intellij and VScode)

    +
  • +
  • +

    Docker

    +
  • +
+
+
+

We recommend installing the devonfw-ide with the tools, but if you already got your system configured and the tools from above installed you can skip to Bootstrap a Quarkus Project otherwise we will show you how to setup and update your devonfw-ide.

+
+
+
devonfw-ide
+
    +
  1. +

    Install devonfw-ide
    +Follow the Setup to install the devonfw-ide with Java, Maven, Eclipse and VScode.

    +
    +
      +
    1. +

      Command to install Docker
      +devon docker setup

      +
    2. +
    +
    +
  2. +
  3. +

    Update devonfw-ide
    +We advise to update your already installed devonfw-ide and all tools because we are still working to improve devonfw-ide and there could be essential features for cloud development with Quarkus that you could be missing.

    +
  4. +
+
+
+

Use the commands devon ide update, devon ide update software and devon ide scripts to update devonfw-ide and all software that is installed.

+
+
+

Go to the main folder under workspaces of the devonfw-ide installation. +We will create the project there.

+
+
+
+
+

Bootstrap a Quarkus Project

+
+
+

Quarkus provides multiple ways to bootstrap a project. +The option to bootstrap a project via the command-line will be shown in the Quarkus getting started guide Bootstrap the project. +Quarkus also provides a project builder where you can select some extensions, the build tool for your project, and if you want some starter code. +This will deliver a project skeleton with the configured project dependencies and also contributes the information to compile the application natively. To get some recommendations on starter templates follow this guide templates recommendations.

+
+
+ + + + + +
+ + +
+

By creating a Quarkus project from command-line or with the project builder you get a different project structure and have to adapt it to the devon4j conventions shown in the next Chapter.

+
+
+
+
+

Project Structure

+
+

We provide a recommendation and guideline for a modern project structure to help organize your project into logically related modules. +You should follow the guide and also it in your project so you structure the project to the needs of modern cloud development and microservice architectures and also find similar modules faster in our example projects.

+
+
+
+
+
+

Introduction to Quarkus Functionality

+
+
+

Before we start programming you should have a first look at the functionality of Quarkus.

+
+
+
Quarkus functionality guides
+
    +
  1. +

    Getting started guide from Quarkus
    +The guide is a good functionality overview, it shows with a simple Greeting Service a brief introduction into the concepts like CDI, testing, dev mode, packaging and running the app.

    +
  2. +
  3. +

    From Spring to Quarkus
    +For experienced Spring developers that have already followed devon4j guidelines, you can read our guide to getting started with Quarkus for Spring developer, as it goes more into the differences that can give you a more detailed comparison to Spring.

    +
    +
      +
    1. +

      Migrate a Spring app to Quarkus
      +This guide shows, how to migrate a Spring application to a Quarkus application with devon4j conventions.

      +
    2. +
    +
    +
  4. +
+
+
+
+
+

Create a REST service

+
+
+

Now let’s create our first REST CRUD service with Quarkus. +We give you the options to use a guide and start to code the service yourself, +or just download a service that’s ready to use.

+
+
+
Options
+
    +
  1. +

    Create the service yourself
    +There is a good Quarkus guide for a simple JSON REST service that will guide you through your first application and will help you to implement defining endpoint with JAX-RS and an Entity that will be managed by the service and also how to configure the JSON support.

    +
  2. +
  3. +

    Use an existing Quarkus project
    +You don’t want to code a service and just want to test some Quarkus functionalities? Just load a Quarkus sample project, provided for every existing quick start guide and the supported framework. +Our Team also provides some Quarkus applications that are working and can be loaded and tested.

    +
    +
      +
    • +

      reference project is a service that manages products. It contains the devon4j modern project structure, pagination, queries, a Postgres database, Jaeger tracing, Prometheus monitoring, SwaggerUI and support for Kubernetes deploy. +This project will be steadily improved and is used to showcase the abilities of Quarkus with devon4j.

      +
    • +
    • +

      minimal Quarkus project is just the Quarkus project from a getting started with Quarkus guide with a Greeting Services modified with the correct modern structure talked about in this chapter Project Structure

      +
    • +
    +
    +
  4. +
+
+
+
+
+

OpenAPI generation

+
+
+

We provide a guide with a short introduction to the OpenAPI specification with two plugins that are important in a Quarkus Context.

+
+ +
+

A more detailed usage guide to the Smallrye Plugin is provided by Quarkus OpenAPI and Swagger guide.

+
+
+
+
+

How to Integrate a Database

+
+
+

The next step for our REST service would be to integrate a database to store the objects of the entity.

+
+
+

With Quarkus, adding a database can be easy, because Quarkus can take over the build-up and connection process. +First, you should understand our guides to the concepts of how to work with data and then we will show how to integrate a database with Quarkus.

+
+
+
Data Principles Guides
+
    +
  1. +

    General devon4j JPA guide
    +To get an insight into the general JPA usage you should read the JPA guide which contains a general explanation of the Java Persistence API.

    +
  2. +
  3. +

    Difference to SpringData
    +If you have already worked with SpringData this is also partially supported with Quarkus, this is explained in more detail in this SpringData Guide.

    +
  4. +
+
+
+
Database Integration
+
    +
  1. +

    Quarkus zero config dev mode
    +Starting with database implementation in Quarkus, we recommend for beginners to use the DEV mode Zero Config Setup (Dev Services) this is especially great for testing the code without a database set up. +Quarkus does all the work for you and configures a database and creates the database and tables(schemas) for you.

    +
    +
      +
    1. +

      Configuration Properties
      +A list of all database configuration properties for the Dev services

      +
    2. +
    +
    +
  2. +
  3. +

    Integrate a simple Hibernate ORM database
    +The zero config setup only works with the Dev mode, it’s comfortable in the first phases of the creation of your service but if the goal is to also get a deployable version, you have to create your own database and integrate it. +This Quarkus guide shows, how to integrate a Hibernate ORM database with an example service.

    +
    +
      +
    1. +

      Configuration list for JDBC
      +A list of all configuration that is possible with a JDBC configuration properties

      +
    2. +
    +
    +
  4. +
  5. +

    Reactive CRUD application with Panache
    +Quarkus unifies reactive and imperative programming. +Reactive is an architectural principle to build robust, efficient, and concurrent applications. +An introduction into reactive and how Quarkus is enabling it follow this Quarkus reactive architecture article and also the reactive quickstart. +To get started with reactive and implement reactive methods you can follow the Quarkus reactive guide. +The reactive guide is using the Quarkus based implementation of a Hibernate ORM called Panache. +That implementation is not our first choice with devon4j and therefore not part of our recommendations, but to understand the reactive guide you can read the Hibernate ORM with Panache guide first to prevent possible problems following the guide.

    +
  6. +
+
+
+ + + + + +
+ + +
+

You need an installed Docker version for the zero config setup.

+
+
+
+
+
Database Migration
+
    +
  1. +

    Migration guide +For schema-based databases, we recommend migrating databases with Flyway. +In that case there is our general migration guide can give you an overview if you are not familiar with migration.

    +
    +
      +
    1. +

      Flyway guide for Quarkus +This Quarkus guide will show how to work with the Flyway extension in a Quarkus application. +This should be used if you start your own database and do not leave the creation to quarkus.

      +
    2. +
    +
    +
  2. +
+
+
+
+
+

Testing a Quarkus Application

+
+
+

After we have built the service, we have to verify it with some tests. +We will give you some guidelines to implement some test cases.

+
+
+
Testing Guides
+
    +
  1. +

    General testing guide
    +For users that aren’t familiar with the devon4j testing principles, we created a general best practices and recommendations guide for testing.

    +
    +
      +
    1. +

      Our guide for testing with Quarkus +In addition, we also provide a guide that specifically addresses the testing of a Quarkus application.

      +
    2. +
    +
    +
  2. +
+
+
+

Most of the Quarkus applications are already equipped with a basic Test and also our reference project provides some test cases, if you want to improve and extends the tests, you can also follow the large Quarkus guide for testing.

+
+
+
+
+

Packaging of a Quarkus application and creation of a native executable

+
+
+

Quarkus applications can be packed into different types. The following link will show how to build and also give you a short explanation of the characteristics of these files.

+
+
+
Package types
+
    +
  1. +

    fast-jar

    +
  2. +
  3. +

    mutable-jar

    +
  4. +
  5. +

    uber-jar

    +
  6. +
  7. +

    native executable

    +
  8. +
+
+
+

To pack an application use the command mvn package and Quarkus will generate the output in the /target folder. For the native executables, the command needs more parameters but this is explained in the link above.

+
+
+

Configure the Output with these configuration properties

+
+
+
+
+

Create and build a Docker Image

+
+
+

Quarkus supports Jib, S2I and Docker for building images. We focus on building a Quarkus App with Docker. +You get a created Dockerfile from Quarkus in the src/main/docker folder of any project generated from Quarkus. There are multiple Dockerfiles.

+
+
+
Dockerfiles
+
    +
  1. +

    Dockerfile.jvm
    +Dockerfile for Quarkus application in the JVM mode. running in Red Hat Universal Base Image 8 Minimal Container

    +
  2. +
  3. +

    Dockerfile.legacy-jar
    +DockerFile for Quarkus application in JVM mode with the legacy jar running in Red Hat Universal Base Image 8 Minimal Container.

    +
  4. +
  5. +

    Dockerfile.native
    +Dockerfile using the native executable running in Red Hat Universal Base Image 8 Minimal container.

    +
  6. +
  7. +

    Dockerfile.native-distroless +The native file will run in a Distroless container. Distroless images are very small containers with just the application and runtime dependencies and without the other programs coming with a Linux distribution.

    +
  8. +
+
+
+
+
+

For more information to the different executables go back to the chapter Packaging of a Quarkus application and creation of a native executable

+
+
+
+
+

To simply build and run a Docker image you can follow the instructions Quarkus provides for every Dockerfile in the comments block.

+
+
+

Docker commands example for the from the JVM Dockerfile from our reference project

+
+
+
+
####
+# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
+#
+# Before building the container image run:
+#
+# ./mvnw package
+#
+# Then, build the image with:
+#
+# docker build -f src/main/docker/Dockerfile.jvm -t quarkus/quarkus-basics-jvm .
+#
+# Then run the container using:
+#
+# docker run -i --rm -p 8080:8080 quarkus/quarkus-basics-jvm
+#
+# If you want to include the debug port into your docker image
+# you will have to expose the debug port (default 5005) like this :  EXPOSE 8080 5050
+#
+# Then run the container using :
+#
+# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/quarkus-basics-jvm
+#
+###
+
+
+
+

Quarkus is also able to build the image while packaging the application so you don’t have to execute the command from above. +To perform Docker builds with the generated Dockerfiles from above you need to add the following extension to your project with the command mvn quarkus:add-extension -Dextensions="container-image-docker".

+
+
+

Also you have to set the quarkus.container-image.build=true, you can add this to your application.properties or just append it to the packaging command like that ./mvn package -Dquarkus.container-image.build=true.

+
+
+

If your needs exceed the instructions given by the file, we recommend to follow the Docker getting started guide to get familiar with Docker and customize the Dockerfiles according to your needs. +To specify your container build, you can use the general container image configurations properties and the Docker image configurations properties when building and runnig Docker images.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/quarkus/guide-authentication-quarkus.html b/docs/devon4j/1.0/quarkus/guide-authentication-quarkus.html new file mode 100644 index 00000000..7f10ccf8 --- /dev/null +++ b/docs/devon4j/1.0/quarkus/guide-authentication-quarkus.html @@ -0,0 +1,455 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Quarkus Authentication

+
+
+

Quarkus supports different authentication mechanisms through different extensions. For example:

+
+
+ +
+
+

For mix authentication, see here.

+
+
+

For further details see Quarkus - Security architecture and guides. Quarkus also provides a compatibility layer for Spring Security in the form of the spring-security extension.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/quarkus/guide-beanmapping-quarkus.html b/docs/devon4j/1.0/quarkus/guide-beanmapping-quarkus.html new file mode 100644 index 00000000..196a3dfa --- /dev/null +++ b/docs/devon4j/1.0/quarkus/guide-beanmapping-quarkus.html @@ -0,0 +1,627 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Bean mapping with Quarkus

+
+
+

This guide will show bean-mapping in particular for a Quarkus application. We recommend using MapStruct with a Quarkus application because the other bean-mapper frameworks are using Java reflections. They are not supported in GraalVm right now and causes problems building native applications. MapStruct is a code generator that greatly simplifies the implementation of mappings between Java bean types based on a convention over configuration approach. The mapping code will be generated at compile-time and uses plain method invocations and thus is fast, type-safe, and easy to understand. MapStruct has to be configured to not use Java reflections but it will be shown in this guide.

+
+
+

You can find the official +MapStruct reference guide and a general introduction to MapStruct from Baeldung.

+
+
+
+
+

MapStruct Dependency

+
+
+

To get access to MapStruct we have to add the dependency to our POM.xml:

+
+
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+  <scope>provided</scope>
+</dependency>
+
+
+
+

MapStruct provides an annotation processor that also has to be added to the POM.xml

+
+
+
+
<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.mapstruct</groupId>
+				<artifactId>mapstruct-processor</artifactId>
+				<version>1.4.2.Final</version>
+			</path>
+		</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

MapStruct takes advantage of generated getters, setters, and constructors from the Lombok library, follow this Lombok with Mapstruct guide to get Lombok with Mapstruct working.

+
+
+
+
+

MapStruct Configuration

+
+
+

We already discussed the benefits of dependency injection and MapStruct supports CDI with EJB, spring, and jsr330. The default retrieving method for a mapper is a factory that uses reflections and should be avoided. The component model should be set to CDI, as this will allow us to easily inject the generated mapper implementation. The component model can be configured in multiple ways.

+
+
+

Simple Configuration

+
+

Add the attribute componentModel to the @Mapper annotation in the mapper interface.

+
+
+
+
@Mapper(compnentModel = "cdi")
+public interface ProductMapper{
+  ...
+}
+
+
+
+
+

MapperConfig Configuration

+
+

Create a shared configuration that can be used for multiple mappers. Implement an Interface and use the annotation @MapperConfig for the class. You can define all configurations in this interface and pass the generated MapperConfig.class with the config attribute to the mapper. The MapperConfig also defines the InjectionStrategy and MappingInheritaceStrategy both will be explained later. +A list of all configurations can be found here.

+
+
+
+
@MapperConfig(
+  compnentModel = "cdi",
+  mappingInheritanceStrategy = MappingInheritanceStrategy.AUTO_INHERIT_FROM_CONFIG
+  injectionStrategy =InjectionStrategy.CONSTRUCTOR
+)
+public interface MapperConfig{
+}
+
+
+
+
+
@Mapper( config = MapperConfig.class )
+public interface ProductMapper{
+  ...
+}
+
+
+
+

Any attributes not given via @Mapper will be inherited from the shared configuration MapperConfig.class.

+
+
+
+

Configuration via annotation processor options

+
+

The MapStruct code generator can be configured using annotation processor options. +You can pass the options to the compiler while invoking javac directly, or add the parameters to the maven configuration in the POM.xml

+
+
+

We are also using the constructor injection strategie to avoid field injections and potential reflections also it will simplify our tests. +The option to pass the parameter to the annotation processor in the POM.xml is used and can be inspected in our quarkus reference project.

+
+
+

A list of all annotation processor options can be found here.

+
+
+
+
+
+

Basic Bean-Mapper Usage

+
+
+

To use the mapper we have to implement the mapper interface and the function prototypes with a @Mapper annotation.

+
+
+
+
@Mapper
+public interface ProductMapper {
+
+  ProductDto map(ProductEntity model);
+
+  ProductEntity create(NewProductDto dto);
+}
+
+
+
+

The MapStruct annotation processor will generate the implementation for us under /target/generated-sources/, we just need to tell it that we would like to have a method that accepts an ProductEntity entity and returns a new ProductDto DTO.

+
+
+

The generated mapper implementation will be marked with the @ApplicationScoped annotation and thus can be injected into fields, constructor arguments, etc. using the @Inject annotation:

+
+
+
+
public class ProductRestService{
+
+  @Inject
+  ProductMapper mapper;
+}
+
+
+
+

That is the basic usage of a Mapstruct mapper. In the next chapter, we go a bit into detail and show some more configurations.

+
+
+
+
+

Advanced Bean-Mapper Usage

+
+
+

Let´s assume our Product entity and the ProductDto has some different named property that should be mapped. Add a mapping annotation to map the property type from Product to kind from ProductDto. We define the source name of the property and the target name.

+
+
+
+
@Mapper
+public interface ProductMapper {
+  @Mapping(target = "kind", source = "type")
+  ProductDto map(ProductEntity entity);
+
+  @InheritInverseConfiguration(name = "map" )
+  ProductEntity create(ProductDto dto);
+}
+
+
+
+

For bi-directional mappings, we can indicate that a method shall inherit the inverse configuration of the corresponding method with the @InheritInverseConfiguration. You can omit the name parameter if the result type of method A is the same as the +single-source type of method B and if the single-source type of A is the same as the result type of B. If multiple applies the attribute name is needed. Specific mappings from the inversed method can (optionally) be overridden, ignored, and set to constants or expressions.

+
+
+

The mappingInheritanceStrategy can be defined as showed in MapStruct Configuration the existing options can be found here.

+
+
+

Not always a mapped attribute has the same type in the source and target objects. For instance, an attribute may be of type int in the source bean but of type Long in the target bean.

+
+
+

Another example are references to other objects which should be mapped to the corresponding types in the target model. E.g. the class ShoppingCart might have a property content of the type Product which needs to be converted into an ProductDto object when mapping a ShoppingCart object to ShoppingCartDto. For these cases, it’s useful to understand how Mapstruct is converting the data types and the object references.

+
+
+

Also, the Chapter for nested bean mappings will help to configure MapStruct to map arbitrary deep object graphs.

+
+
+

You can study running MapStruct implementation examples given by MapStruct or in our Quarkus reference project

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/quarkus/guide-native-image.html b/docs/devon4j/1.0/quarkus/guide-native-image.html new file mode 100644 index 00000000..60252b1b --- /dev/null +++ b/docs/devon4j/1.0/quarkus/guide-native-image.html @@ -0,0 +1,477 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Building a native image

+
+
+

Quarkus provides the ability to create a native executable of the application called native image. +Unlike other Java based deployments such native image will only run on the architecture and operating system it is compiled for. +Also, no JVM is needed to run the native-image. +This improves the startup time, performance and efficiency. +A distribution of GraalVM is needed. +You can find the differences between the available distributions here.

+
+
+

To build your quarkus app as native-image you have two options that are descibed in the following sections.

+
+
+
+
+

Build a native executable with GraalVM

+
+
+

To build a Quarkus application you can install GraalVM locally on your machine as described below. +Therefore read the basic Quarkus application chapter, or clone the example project provided by devonfw. +Follow this chapter from the Quarkus Guide for building a native executable.

+
+
+

Installing GraalVM

+
+

A native image can be created locally or through a container environment. +To create a native image locally an installed and configured version of GraalVM is needed, you can follow the installation guide from Quarkus or the guide provided by GraalVM for this.

+
+
+
+
+
+

Build a native executable with GraalVM through container environment

+
+
+

In order to make the build of native images more portable, you can also use your container environment and run the GraalVM inside a container (typically Docker). +You can simply install Docker with your devonfw-ide distribution just follow this description Docker with devonfw-ide. +Follow this chapter to build a native Linux image through container runtime.

+
+
+
+
+

Configuring the native executable

+
+
+

A list of all configuration properties for a native image can be found here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/quarkus/guide-quarkus-configuration.html b/docs/devon4j/1.0/quarkus/guide-quarkus-configuration.html new file mode 100644 index 00000000..e5aa6601 --- /dev/null +++ b/docs/devon4j/1.0/quarkus/guide-quarkus-configuration.html @@ -0,0 +1,518 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Configuration

+
+
+

Quarkus provides a comprehensive guide to configuration here.

+
+
+
+
+

External Application Configuration

+
+
+

Database Configuration

+
+

In Quarkus, Hibernate is provided by the quarkus-hibernate-orm extension. Ensure the extension is added to your pom.xml as follows:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-orm</artifactId>
+</dependency>
+
+
+
+

You additionally have to add the respective JDBC driver extension to your pom.xml. There are different drivers for different database types. See Quarkus Hibernate guide.

+
+
+
+

Database System and Access

+
+

You need to configure which database type you want to use, as well as the location and credentials to access it. The defaults are configured in application.properties. The file should therefore contain the properties as in the given example:

+
+
+
+
quarkus.datasource.jdbc.url=jdbc:postgresql://database.enterprise.com/app
+quarkus.datasource.username=appuser01
+quarkus.datasource.password=************
+quarkus.datasource.db-kind=postgresql
+
+# drop and create the database at startup (use only for local development)
+quarkus.hibernate-orm.database.generation=drop-and-create
+
+
+
+
+

Database Logging

+
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
quarkus.hibernate-orm.log.sql=true
+quarkus.hibernate-orm.log.format-sql=true
+
+#Logs SQL bind parameters. Setting it to true is obviously not recommended in production.
+quarkus.hibernate-orm.log.bind-parameters=true
+
+
+
+
+
+
+

Security

+
+
+

Password Encryption

+
+

There is also some libraries to make Jasypt work with Quarkus such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode.

+
+
+

Quarkus supports many credentials provider with official extensions such as HashiCorp Vault.

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-vault</artifactId>
+</dependency>
+
+
+
+

A detailed guide can be found here and here.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/quarkus/quarkus-template.html b/docs/devon4j/1.0/quarkus/quarkus-template.html new file mode 100644 index 00000000..7eb7c21b --- /dev/null +++ b/docs/devon4j/1.0/quarkus/quarkus-template.html @@ -0,0 +1,564 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Quarkus template

+
+
+

Quarkus Code Generator is providing a lot of alternatives on technologies and libraries to be integrated. Detailed guides to multiple topics can be found here.

+
+
+

Thus, the large selection can be difficult for developer to get started. +Therefore, in this guide, we aims to provide a general suggestion on basic frameworks, libraries, technologies to make it easy for developer to begin with.

+
+
+

With that said, please take this as a recommendation and not a compulsion. Depend on your project requirements, you might have to use another stack in comparison to what is listed below.

+
+
+

If you are new to Quarkus, consider checking out their getting started guide to have an overview about how to create, run, test as well as package a Quarkus application. Another recommended source to get started is the Katacoda tutorials.

+
+
+

Basic templates

+
+
    +
  1. +

    simple REST API (go to code.quarkus.io)

    +
  2. +
  3. +

    simple REST API with monitoring (go to code.quarkus.io)

    +
  4. +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Topic-based suggested implementation
TopicDetailSuggested implementationNote

runtime

servlet-container

Undertow

component management

dependency injection

ArC

ArC is based on JSR 365. It also provides interceptors that can be used to implement the same functionality as AOP provides

configuration

SmallRye Config

SmallRye Config is an implementation of Eclipse MicroProfile Config. It also supports YAML configuration files

persistence

OR-mapper

Hibernate ORM, Spring Data JPA

Hibernate ORM is the de facto standard JPA implementation and works perfectly in Quarkus. Quarkus also provides a compatibility layer for Spring Data JPA repositories in the form of the spring-data-jpa extension.

batch

Quarkus JBeret Extension is a non-official extension, which is hosted in the Quarkiverse Hub. It is an implementation of JSR 352.

service

REST services

RESTEasy

RESTEasy is an portable implementation of the new JCP specification JAX-RS JSR-311. It can be documented via Swagger OpenAPI.

async messaging

SmallRye Reactive Messaging, Vert.x EventBus

SmallRye Reactive Messaging is an implementation of the Eclipse MicroProfile Reactive Messaging specification 1.0. You can also utilize SmallRye Reactive Messaging in your Quarkus application to interact with Apache Kafka.

marshalling

RESTEasy Jackson, RESTEasy JSON-B, RESTEasy JAXB, RESTEasy Multipart

cloud

kubernetes

Kubernetes

deployment

Minikube, k3d

Minikube is quite popular when a Kubernetes cluster is needed for development purposes. Quarkus supports this with the quarkus-minikube extension.

logging

framework

JBoss Log Manager and the JBoss Logging facade

Internally, Quarkus uses JBoss Log Manager and the JBoss Logging facade. Logs from other supported Logging API (JBoss Logging, SLF4J, Apache Commons Logging) will be merged.

validation

framework

Hibernate Validator/Bean Validation (JSR 380)

security

authentication & authorization

JWT authentication

Quarkus supports various security mechanisms. Depending on your protocol, identity provider you can choose the necessary extensions such as quarkus-oidc quarkus-smallrye-jwt quarkus-elytron-security-oauth2.

monitoring

framework

Micrometer Metrics, SmallRye Metrics

SmallRye Metrics is an implementation of the MicroProfile Metrics specification. Quarkus also offers various extensions to customize the metrics.

health

SmallRye Health

SmallRye Health is an implementation of the MicroProfile Health specification.

fault tolerance

SmallRye Fault Tolerance

SmallRye Fault Tolerance is an implementation of the MicroProfile Fault Tolerance specification.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring.html b/docs/devon4j/1.0/spring.html new file mode 100644 index 00000000..554bb5d8 --- /dev/null +++ b/docs/devon4j/1.0/spring.html @@ -0,0 +1,523 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Spring

+
+
+

Spring is the most famous and established Java framework. +It is fully supported by devonfw as an option and alternative to quarkus.

+
+
+
+
+

Guide to the Reader

+
+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If you are not yet familiar with Spring, you may be interested in pros and cons of Spring. Also take a look at the official Spring website.

    +
  • +
  • +

    If you already have experience developing with Spring but are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring and coding conventions. Follow the referenced links to go deeper into a topic.

    +
  • +
  • +

    If you have already developed with devon4j and Spring and need more information on a specific topic, check out the devon4j guides for Spring. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Spring and Quarkus are documented there.

    +
  • +
  • +

    If you want to get started or create your first Spring application using devon4j, check out the guide about creating a new application or the Jump the Queue and My Thai Star reference applications.

    +
  • +
+
+
+
+
+

Pros

+
+
+

Spring offers the following benefits:

+
+
+
    +
  • +

    highly flexible
    +Spring is famous for its great flexibility. You can customize and integrate nearly everything.

    +
  • +
  • +

    well established
    +While JEE application servers including very expensive commercial products turned out to be a dead-end, spring has guided projects through the changing trends of IT throughout decades. It may be the framework with the longest history track and popularity. As a result you can easily find developers, experts, books, articles, etc. about spring.

    +
  • +
  • +

    non-invasive and not biased
    +Spring became famous for its non-invasive coding based on patterns instead of hard dependencies. It gives you a lot of freedom and avoids tight coupling of your (business) code.

    +
  • +
+
+
+

See Why Spring? for details.

+
+
+
+
+

Cons

+
+
+

Spring has the following drawbacks:

+
+
+
    +
  • +

    history and legacy
    +Due to the pro of its long established history, spring also carries a lot of legacy. As a result there are many ways to do the same thing while some options may be discouraged. Developers needs some guidance (e.g. via devon4j) as they may enter pitfalls and dead-ends when choosing the first solution they found on google or stackoverflow.

    +
  • +
  • +

    lost lead in cloud-native
    +While for the last decades spring was leading innovation in Java app development, it seems that with the latest trends and shift such as cloud-native, they have been overtaken by frameworks like quarkus. However, spring is trying to catch up with spring-native.

    +
  • +
+
+
+
+
+

Spring-Boot

+
+
+

Spring-boot is a project and initiaitve within the spring-ecosystem that brought a lot of innovation and simplification into app development on top of spring. +As of today we typically use the terms spring and spring-boot rather synonymously as we always use spring together with spring-boot.

+
+
+
+
+

Spring-Native

+
+
+

Spring-native adds cloud-native support to the spring ecosystem and allows to build a spring app as cloud-native image via GraalVM. +This feature is currently beta. +You may also consider quarkus if you are interested in building cloud-native images.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring/guide-authentication-spring.html b/docs/devon4j/1.0/spring/guide-authentication-spring.html new file mode 100644 index 00000000..b7f5b2a3 --- /dev/null +++ b/docs/devon4j/1.0/spring/guide-authentication-spring.html @@ -0,0 +1,502 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Spring Security

+
+
+

We use spring-security as a framework for authentication purposes.

+
+
+

Therefore, you need to provide an implementation of WebSecurityConfigurerAdapter:

+
+
+
+
@Configuration
+@EnableWebSecurity
+public class MyWebSecurityConfig extends WebSecurityConfigurerAdapter {
+
+  @Inject
+  private UserDetailsService userDetailsService;
+  ...
+  public void configure(HttpSecurity http) throws Exception {
+    http.userDetailsService(this.userDetailsService)
+        .authorizeRequests().antMatchers("/public/**").permitAll()
+        .anyRequest().authenticated().and()
+        ...
+  }
+}
+
+
+
+

As you can see, spring-security offers a fluent API for easy configuration. You can simply add invocations like formLogin().loginPage("/public/login") or httpBasic().realmName("MyApp"). Also CSRF protection can be configured by invoking csrf(). +For further details see spring Java-config for HTTP security.

+
+
+

Further, you need to provide an implementation of the UserDetailsService interface. +A good starting point comes with our application template.

+
+
+

For authentication via JSON Web Token (JWT), check the JWT Spring-Starter.

+
+
+

Mix authentication should be avoided where possible. However, when needed, you can find a solution +here.

+
+
+

Preserve original request anchors after form login redirect

+
+

Spring Security will automatically redirect any unauthorized access to the defined login-page. After successful login, the user will be redirected to the original requested URL. The only pitfall is, that anchors in the request URL will not be transmitted to server and thus cannot be restored after successful login. Therefore the devon4j-security module provides the RetainAnchorFilter, which is able to inject javascript code to the source page and to the target page of any redirection. Using javascript this filter is able to retrieve the requested anchors and store them into a cookie. Heading the target URL this cookie will be used to restore the original anchors again.

+
+
+

To enable this mechanism you have to integrate the RetainAnchorFilter as follows: +First, declare the filter with

+
+
+
    +
  • +

    storeUrlPattern: a regular expression matching the URL, where anchors should be stored

    +
  • +
  • +

    restoreUrlPattern: a regular expression matching the URL, where anchors should be restored

    +
  • +
  • +

    cookieName: the name of the cookie to save the anchors in the intermediate time

    +
  • +
+
+
+

You can easily configure this as code in your WebSecurityConfig as following:

+
+
+
+
RetainAnchorFilter filter = new RetainAnchorFilter();
+filter.setStoreUrlPattern("http://[^/]+/[^/]+/login.*");
+filter.setRestoreUrlPattern("http://[^/]+/[^/]+/.*");
+filter.setCookieName("TARGETANCHOR");
+http.addFilterBefore(filter, UsernamePasswordAuthenticationFilter.class);
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring/guide-beanmapping-spring.html b/docs/devon4j/1.0/spring/guide-beanmapping-spring.html new file mode 100644 index 00000000..9c824a09 --- /dev/null +++ b/docs/devon4j/1.0/spring/guide-beanmapping-spring.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Bean Mapping in devon4j-spring

+
+
+

We have developed a solution that uses a BeanMapper that allows to abstract from the underlying implementation. As mentioned in the general bean mapping guide, we started with Dozer a Java Bean to Java Bean mapper that recursively copies data from one object to another. Now we recommend using Orika. This guide will show an introduction to Orika and Dozer bean-mapper.

+
+
+
+
+

Bean-Mapper Dependency

+
+
+

To get access to the BeanMapper we have to use either of the below dependency in our POM:

+
+
+
Orika
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-orika</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
Dozer
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-dozer</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
+
+

Bean-Mapper Configuration

+
+
+

Bean-Mapper Configuration using Dozer

+
+

The BeanMapper implementation is based on an existing open-source bean-mapping framework. +In case of Dozer the mapping is configured src/main/resources/config/app/common/dozer-mapping.xml.

+
+
+

See the my-thai-star dozer-mapping.xml as an example. +Important is that you configure all your custom datatypes as <copy-by-reference> tags and have the mapping from PersistenceEntity (ApplicationPersistenceEntity) to AbstractEto configured properly:

+
+
+
+
 <mapping type="one-way">
+    <class-a>com.devonfw.module.basic.common.api.entity.PersistenceEntity</class-a>
+    <class-b>com.devonfw.module.basic.common.api.to.AbstractEto</class-b>
+    <field custom-converter="com.devonfw.module.beanmapping.common.impl.dozer.IdentityConverter">
+      <a>this</a>
+      <b is-accessible="true">persistentEntity</b>
+    </field>
+</mapping>
+
+
+
+
+
+
+

Bean-Mapper Configuration using Orika

+
+
+

Orika with devonfw is configured by default and sets some custom mappings for GenericEntity.java to GenericEntityDto.java. To specify and customize the mappings you can create the class BeansOrikaConfig.java that extends the class BaseOrikaConfig.java from the devon4j.orika package. To register a basic mapping, register a ClassMap for the mapperFactory with your custom mapping. Watch the example below and follow the basic Orika mapping configuration guide and the Orika advanced mapping guide.

+
+
+

Register Mappings:

+
+
+
+
mapperFactory.classMap(UserEntity.class, UserEto.class)
+			.field("email", "email")
+			.field("username", "name")
+			.byDefault()
+			.register();
+
+
+
+
+
+

Bean-Mapper Usage

+
+
+

Then we can get the BeanMapper via dependency-injection what we typically already provide by an abstract base class (e.g. AbstractUc). Now we can solve our problem very easy:

+
+
+
+
...
+UserEntity resultEntity = ...;
+...
+return getBeanMapper().map(resultEntity, UserEto.class);
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring/guide-cors-spring.html b/docs/devon4j/1.0/spring/guide-cors-spring.html new file mode 100644 index 00000000..80c95cb2 --- /dev/null +++ b/docs/devon4j/1.0/spring/guide-cors-spring.html @@ -0,0 +1,502 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

CORS configuration in Spring

+
+ +
+
+
+

Dependency

+
+
+

To enable the CORS support from the server side for your devon4j-Spring application, add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-cors</artifactId>
+</dependency>
+
+
+
+
+
+

Configuration

+
+
+

Add the below properties in your application.properties file:

+
+
+
+
#CORS support
+security.cors.spring.allowCredentials=true
+security.cors.spring.allowedOriginPatterns=*
+security.cors.spring.allowedHeaders=*
+security.cors.spring.allowedMethods=OPTIONS,HEAD,GET,PUT,POST,DELETE,PATCH
+security.cors.pathPattern=/**
+
+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AttributeDescriptionHTTP Header

allowCredentials

Decides the browser should include any cookies associated with the request (true if cookies should be included).

Access-Control-Allow-Credentials

allowedOrigins

List of allowed origins (use * to allow all orgins).

Access-Control-Allow-Origin

allowedMethods

List of allowed HTTP request methods (OPTIONS, HEAD, GET, PUT, POST, DELETE, PATCH, etc.).

-

allowedHeaders

List of allowed headers that can be used during the request (use * to allow all headers requested by the client)

Access-Control-Allow-Headers

pathPattern

Ant-style pattern for the URL paths where to apply CORS. Use "/**" to match all URL paths.

+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring/guide-devon4j-spring-repository.html b/docs/devon4j/1.0/spring/guide-devon4j-spring-repository.html new file mode 100644 index 00000000..d666ce37 --- /dev/null +++ b/docs/devon4j/1.0/spring/guide-devon4j-spring-repository.html @@ -0,0 +1,500 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Spring Data in devon4j-spring

+
+
+

In devon4j-spring, spring-data-jpa is provided via devon4j-starter-spring-data-jpa extension, which provides advanced integration (esp. for QueryDSL).

+
+
+

Spring Data uses a fragment approach to implement custom functionality for repositories. For Spring applications, devon4j provides a solution that works without this fragment approach.

+
+
+

The repository must extend DefaultRepository, which uses GenericRepositoryImpl as implementation. The QueryUtil helper class provides methods to support pagination and query creation.

+
+
+
+
+

Dependency

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
+

Example

+
+
+

The following example shows how to write such a repository. The example has the same functionality as the example in the Spring Data guide:

+
+
+
+
public interface ExampleRepository extends DefaultRepository<ExampleEntity> {
+
+  @Query("SELECT example FROM ExampleEntity example" //
+      + " WHERE example.name = :name")
+  List<ExampleEntity> findByName(@Param("name") String name);
+
+  @Query("SELECT example FROM ExampleEntity example" //
+      + " WHERE example.name = :name")
+  Page<ExampleEntity> findByNamePaginated(@Param("name") String name, Pageable pageable);
+
+  default Page<ExampleEntity> findByCriteria(ExampleSearchCriteriaTo criteria) {
+    ExampleEntity alias = newDslAlias();
+    JPAQuery<ExampleEntity> query = newDslQuery(alias);
+    String name = criteria.getName();
+    if ((name != null) && !name.isEmpty()) {
+      QueryUtil.get().whereString(query, $(alias.getName()), name, criteria.getNameOption());
+    }
+    return QueryUtil.get().findPaginated(criteria.getPageable(), query, false);
+  }
+
+}
+
+
+
+
+
+

Further examples

+
+
+

You can also read the JUnit test-case DefaultRepositoryTest that is testing an example +FooRepository.

+
+
+
+
+

Auditing

+
+
+

In case you need auditing, you only need to extend DefaultRevisionedRepository instead of DefaultRepository. The auditing methods can be found in GenericRevisionedRepository.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring/guide-jwt-spring.html b/docs/devon4j/1.0/spring/guide-jwt-spring.html new file mode 100644 index 00000000..7e2473b9 --- /dev/null +++ b/docs/devon4j/1.0/spring/guide-jwt-spring.html @@ -0,0 +1,582 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

JWT Spring-Starter

+
+ +
+
+
+

Keystore

+
+
+

A KeyStore is a repository of certificates and keys (public key, private key, or secret key). They can be used for TSL transportation, for encryption and decryption as well as for signing. +For demonstration you might create a keystore with openssl, with the following commands:

+
+
+
+
openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365
+openssl pkcs12 -export -in cert.pem -inkey key.pem -out example.p12
+
+
+
+

For Java tooling you may also try the following instead:

+
+
+
+
keytool -genkeypair -alias devonfw -keypass "password" -storetype PKCS12 -keyalg RSA -keysize 4096 -storepass "password" -keystore keystore.pkcs
+
+
+
+ + + + + +
+ + +Please use reasonable passwords instead of password what should be obvious. Also for the alias the value devonfw is just an example. +
+
+
+
+
+

JWT Dependency

+
+
+

To use JWT support from devon4j with spring-boot you have to add following required dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-jwt</artifactId>
+</dependency>
+
+
+
+
+
+

Configuration

+
+
+

The following properties need to be configured in your application.properties file:

+
+
+
+
# location of the keystore file, can be any spring resource (such as file or classpath URIs)
+security.keystore.location=classpath:config/keystore.pkcs
+# type of keystore e.g. "PKCS12" (recommended), "JKS", or "JCEKS"
+security.keystore.type=PKCS12
+# password the keystore is secured with. Consider using password encryption as described in devon4j configuration guide
+security.keystore.password=password
+# the algorithm for encryption/decryption and signing - see io.jsonwebtoken.SignatureAlgorithm
+security.authentication.jwt.algorithm=RS256
+# alias of public/private key in keystore (for validation only public key is used, for creation private key is required)
+security.authentication.jwt.alias=devonfw
+# the following properties are used if you are validating JWTs (e.g. via JwtAuthenticationFilter)
+security.authentication.jwt.validation.expiration-required=false
+security.authentication.jwt.validation.max-validity=42h
+security.authentication.jwt.validation.not-before-required=false
+# the following properties are only used if you are issuing JWTs (e.g. via JwtLoginFilter)
+security.authentication.jwt.creation.add-issued-at=true
+security.authentication.jwt.creation.validity=4h
+security.authentication.jwt.creation.not-before-delay=1m
+# the following properties enable backward compatiblity for devon4j <= 2021.04.002
+# after microprofile JWT is used by default since 2021.04.003
+#security.authentication.jwt.claims.access-controls-name=roles
+#security.authentication.jwt.claims.access-controls-array=false
+
+
+
+

See also JwtConfigProperties for details about configuration.

+
+
+
+
+

Authentication with JWT via OAuth

+
+
+

The authentication with JWT via OAuth (HTTP header), will happen via JwtAuthenticationFilter that is automatically added by devon4j-starter-security-jwt via JwtAutoConfiguration. +With the starter and auto-configuration we want to make it as easy as possible for you. +In case you would like to build a server app that e.g. wants to issue JWTs but does not allow authentication via JWT itself, you can use devon4j-security-jwt as dependency instead of the starter and do the spring config yourself (pick and choose from JwtAutoConfiguration).

+
+
+

To do this, you need to add the following changes in your BaseWebSecurityConfig:

+
+
+
+
  @Bean
+  public JwtAuthenticationFilter getJwtAuthenticationFilter() {
+    return new JwtAuthenticationFilter();
+  }
+
+  @Override
+  public void configure(HttpSecurity http) throws Exception {
+    // ...
+    // add this line to the end of this existing method
+    http.addFilterBefore(getJwtAuthenticationFilter(), UsernamePasswordAuthenticationFilter.class);
+  }
+
+
+
+
+
+

Login with Username and Password to get JWT

+
+
+

To allow a client to login with username and password to get a JWT for sub-sequent requests, you need to do the following changes in your BaseWebSecurityConfig:

+
+
+
+
  @Bean
+  public JwtLoginFilter getJwtLoginFilter() throws Exception {
+
+    JwtLoginFilter jwtLoginFilter = new JwtLoginFilter("/login");
+    jwtLoginFilter.setAuthenticationManager(authenticationManager());
+    jwtLoginFilter.setUserDetailsService(this.userDetailsService);
+    return jwtLoginFilter;
+  }
+
+  @Override
+  public void configure(HttpSecurity http) throws Exception {
+    // ...
+    // add this line to the end of this existing method
+    http.addFilterBefore(getJwtLoginFilter(), UsernamePasswordAuthenticationFilter.class);
+  }
+
+
+
+
+
+

Authentication with Kafka

+
+
+

Authentication with JWT and Kafka is explained in the Kafka guide.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring/guide-kafka-spring.html b/docs/devon4j/1.0/spring/guide-kafka-spring.html new file mode 100644 index 00000000..5f3ab14a --- /dev/null +++ b/docs/devon4j/1.0/spring/guide-kafka-spring.html @@ -0,0 +1,1004 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Kafka

+
+
+

This guide explains how Spring Kafka is used in devonfw applications. It focuses on aspects which are special to devonfw if you want to learn about spring-kafka you should adhere to springs references documentation.

+
+
+

There is an example of simple Kafka implementation in the devon4j-kafka-employeeapp.

+
+
+

The devon4j-kafka library consists of:

+
+
+
    +
  • +

    Custom message processor with retry pattern

    +
  • +
  • +

    Monitoring support

    +
  • +
  • +

    Tracing support

    +
  • +
  • +

    Logging support

    +
  • +
  • +

    Configuration support for Kafka Producers, Consumers, brave tracer and message retry processing including defaults

    +
  • +
+
+
+
+
+

How to use?

+
+
+

To use devon4j-kafka you have to add required starter dependencies which is "starter-kafka-sender" or "starter-kafka-receiver" from devon4j. These 2 starters are responsible for taking care of the required spring configuration. If you only want to produce messages "starter-kafka-sender" is enough. For consuming messages you need "starter-kafka-receiver" which also includes "starter-kafka-sender".

+
+
+

To use devon4j-kafka message sender add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-kafka-sender</artifactId>
+</dependency>
+
+
+
+

It includes the Tracer implementations from Spring cloud sleuth.

+
+
+

To use the devon4j-kafka message receiver configurations, loggers and message retry processor for processing message, add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-kafka-receiver</artifactId>
+</dependency>
+
+
+
+
+
+

Property Parameters

+
+
+

As written before kafka-producer and listener-specific configuration is done via properties classes. These classes provide useful defaults, at a minimum the following parameters have to be configured:

+
+
+
+
messaging.kafka.common.bootstrap-servers=kafka-broker:9092
+messaging.kafka.consumer.group-id=<e.g. application name>
+messaging.kafka.listener.container.concurrency=<Number of listener threads for each listener container>
+
+
+
+

All the configuration beans for devon4j-kafka are annotated with @ConfigurationProperties and use common prefixes to read the property values from application.properties or application.yml.

+
+
+

Example:

+
+
+
+
 @Bean
+  @ConfigurationProperties(prefix = "messaging.kafka.producer")
+  public KafkaProducerProperties messageKafkaProducerProperties() {
+    return new KafkaProducerProperties();
+  }
+
+
+
+

For producer and consumer the prefixes are messaging.kafka.producer…​ and message.kafka.consumer…​ and for retry the prefix is messaging.retry…​

+
+ +
+

We use the same properties defined by Apache Kafka or Spring Kafka. They are simply "mapped" to the above prefixes to allow easy access from your application properties. The java docs provided in each of the devon4j-kafka property classes which explains their use and what value has to be passed.

+
+ +
+
+
+

Naming convention for topics

+
+
+

For better managing of several Kafka topics in your application portfolio we strongly advice to introduce a naming scheme for your topics. The schema may depend on the actual usage pattern of Kafka. For context where Kafka is used +in a 1-to-1-communication-scheme (not publish/subscribe) the following schema has been proven useful in practice:

+
+
+
+
<application name>-<service name>-<version>-<service-operation>
+
+
+
+

To keep things easy and prevent problems we suggest to use only small letters, hyphens but no other special characters.

+
+
+
+
+

Send Messages

+
+
+

As mentioned above the 'starter-kafka-sender' is required to be added as a dependency to use MessageSender from Kafka.

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-kafka-sender</artifactId>
+</dependency>
+
+
+
+

The following example shows how to use MessageSender and its method to send messages to Kafka broker:

+
+
+

Example:

+
+
+
+
  @Inject
+  private MessageSender messageSender;
+  private ProducerRecord<K,V> producerRecord;
+
+  public void sendMessageToKafka(){
+  producerRecord=new ProducerRecord<>("topic-name","message");
+  messageSender.sendMessage(this.producerRecord);
+  //Alternative
+  messageSender.sendMessageAndWait(this.producerRecord,10);
+  }
+
+
+
+

There are multiple methods available from MessageSender of devon4j-kafka. The ProducerListener will log the message sent to the Kafka broker.

+
+
+
+
+

Receive Messages

+
+
+

To receive messages you have to define a listener. The listener is normally part of the service layer.

+
+
+
+Architecture for Kafka services +
+
Figure 1. Architecture for Kafka services
+
+
+

Import the following starter-kafka-receiver dependency to use the listener configurations and loggers from devon4j-kafka.

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-kafka-receiver</artifactId>
+</dependency>
+
+
+
+

The listener +is defined by implementing and annotating a method like in the following example:

+
+
+
+
  @KafkaListener(topics = "employeeapp-employee-v1-delete", groupId = "${messaging.kafka.consumer.groupId}", containerFactory = "kafkaListenerContainerFactory")
+  public void consumer(ConsumerRecord<Object, Object> consumerRecord, Acknowledgment acknowledgment) {
+  //user operation
+  //To acknowledge listener after processing
+  acknowledgement.acknowledge();
+  }
+
+
+
+

The group id can be mentioned in application.properties as listener properties.

+
+
+
+
messaging.kafka.consumer.groupId=default
+
+
+
+

If there are multiple topics and multiple listeners then we suggest to specify the topic names directly on each listener instead reading from the property file. +The container factory mentioned in the @KafkaListener is provided in the KafkaListenerContainerProperties.java to create a default container factory with acknowledgement.

+
+
+

The default ack-mode is manual_immediate . It can be overridden by below example:

+
+
+
+
messaging.kafka.listener.container.ackMode=<ack-mode>
+
+
+
+

The other ack-mode values can be referred from +here.

+
+
+
+
+

Retry

+
+
+

The retry pattern in devon4j-kafka is invoked when a particular exception(described by user in application.properties file) is thrown while processing the consumed message and it is configured in application.properties file. The general idea is to separate messages which could not be processed into dedicated retry-topics to allow fine control on how processing of the messages is retried and to not block newly arriving messages. +Let us see more about handling retry in the below topics.

+
+
+
+Retry pattern in devon4j-kafka +
+
+
+

Handling retry in devon4j-kafka

+
+

The retry pattern is included in the starter dependency of "starter-kafka-receiver".

+
+
+

The retryPattern method is used by calling the method processMessageWithRetry(ConsumerRecord<K, V> consumerRecord,MessageProcessor<K, V> processor). Please find the below Example:

+
+
+
+
@Inject
+private MessageRetryOperations<K, V> messageRetryOperations;
+@Inject
+private DeleteEmployeeMessageProcessor<K, V> deleteEmployeeMessageProcessor;
+@KafkaListener(topics = "employeeapp-employee-v1-delete", groupId = "${messaging.kafka.consumer.groupId}",containerFactory = "kafkaListenerContainerFactory")
+public void consumer(ConsumerRecord<K, V> consumerRecord, Acknowledgment acknowledgment) {
+this.messageRetryOperations.processMessageWithRetry(consumerRecord, this.deleteEmployeeMessageProcessor);
+// Acknowledge the listener.
+acknowledgment.acknowledge();
+}
+
+
+
+

The implementation for MessageProcessor from devon4j-kafka is required to provide the implementation to process the ConsumedRecord from Kafka broker. The implementation for MessageProcessor interface can look as below example:

+
+
+
+
import com.devonfw.module.kafka.common.messaging.retry.api.client.MessageProcessor;
+@Named
+public class DeleteEmployeeMessageProcessor<K, V> implements MessageProcessor<K, V> {
+ @Override
+  public void processMessage(ConsumerRecord<K, V> message) {
+  //process message
+  }
+}
+
+
+
+

It works as follows:

+
+
+
    +
  • +

    The application gets a message from the topic.

    +
  • +
  • +

    During the processing of the message an error occurs, the message will be written to the redelivery topic.

    +
  • +
  • +

    The message is acknowledged in the topic.

    +
  • +
  • +

    The message will be processed from the re-delivery topic after a delay.

    +
  • +
  • +

    Processing of the message fails again. It retires until the retry count gets over.

    +
  • +
  • +

    When the retry fails in all the retry then the message is logged and payload in the ProducerRecord is deleted for log +compaction which is explained below.

    +
  • +
+
+
+
+

Retry configuration and naming convention of redelivery topics.

+
+

The following properties should be added in the application.properties or application.yml file.

+
+
+

The retry pattern in devon4j-kafka will perform for specific topic of a message. So its mandatory to specify the properties for each topic. Below properties are example,

+
+
+
+
# Back off policy properties for employeeapp-employee-v1-delete
+messaging.retry.back-off-policy.retryReEnqueueDelay.employeeapp-employee-v1-delete=1000
+messaging.retry.back-off-policy.retryDelay.employeeapp-employee-v1-delete=600000
+messaging.retry.back-off-policy.retryDelayMultiplier.employeeapp-employee-v1-delete=1.0
+messaging.retry.back-off-policy.retryMaxDelay.employeeapp-employee-v1-delete=600000
+messaging.retry.back-off-policy.retryCount.employeeapp-employee-v1-delete=2
+
+# Retry policy properties for employeeapp-employee-v1-delete
+messaging.retry.retry-policy.retryPeriod.employeeapp-employee-v1-delete=1800
+messaging.retry.retry-policy.retryableExceptions.employeeapp-employee-v1-delete=<Class names of exceptions for which a retry should be performed>
+messaging.retry.retry-policy.retryableExceptionsTraverseCauses.employeeapp-employee-v1-delete=true
+
+# Back off policy properties for employeeapp-employee-v1-add
+messaging.retry.back-off-policy.retryReEnqueueDelay.employeeapp-employee-v1-add=1000
+messaging.retry.back-off-policy.retryDelay.employeeapp-employee-v1-add=600000
+messaging.retry.back-off-policy.retryDelayMultiplier.employeeapp-employee-v1-add=2.0
+messaging.retry.back-off-policy.retryMaxDelay.employeeapp-employee-v1-add=600000
+messaging.retry.back-off-policy.retryCount.employeeapp-employee-v1-add=4
+
+# Retry policy properties for employeeapp-employee-v1-add
+messaging.retry.retry-policy.retryPeriod.employeeapp-employee-v1-add=3000
+messaging.retry.retry-policy.retryableExceptions.employeeapp-employee-v1-add=<Class names of exceptions for which a retry should be performed>
+messaging.retry.retry-policy.retryableExceptionsTraverseCauses.employeeapp-employee-v1-add=true
+
+
+
+

If you notice the above properties, the retry-policy and back-off policy properties are repeated twice as i have 2 topics for the retry to be performed with different level of values. The topic name should be added at the last of attribute.

+
+
+

So, the retry will be performed for each topic according to their configuration values.

+
+
+

If you want to provide same/default values for all the topics, then its required to add default in the place of topic on the above properties example.

+
+
+

For example,

+
+
+
+
# Default back off policy properties
+messaging.retry.back-off-policy.retryReEnqueueDelay.default=1000
+messaging.retry.back-off-policy.retryDelay.default=600000
+messaging.retry.back-off-policy.retryDelayMultiplier.default=1.0
+messaging.retry.back-off-policy.retryMaxDelay.default=600000
+messaging.retry.back-off-policy.retryCount.default=2
+
+# Default retry policy properties
+messaging.retry.retry-policy.retryPeriod.default=1800
+messaging.retry.retry-policy.retryableExceptions.default=<Class names of exceptions for which a retry should be performed>
+messaging.retry.retry-policy.retryableExceptionsTraverseCauses.default=true
+
+
+
+

By giving properties like above, the same values will be passed for all the topics and the way of processing retry for all the topics are same.

+
+
+

All these above property values are mapped to the classes DefaultBackOffPolicyProperties.java and DefaultRetryPolicyProperties.java and configured by the class MessageDefaultRetryConfig.java.

+
+
+

The MessageRetryContext in devon kafka is used to perform the retry pattern with the properties from DefaultBackOffPolicyProperties and DefaultRetryPolicyProperties.

+
+
+

The 2 main properties of MessageRetryContext are nextRetry and retryUntil which is a Instant date format and it is calculated internally using the properties given in DefaultBackOffPolicyProperties and DefaultRetryPolicyProperties.

+
+
+

You may change the behavior of this date calculation by providing your own implementation classes for MessageBackOffPolicy.java and MessageRetryPolicy.java.

+
+
+

The naming convention for retry topic is the same topic name which you have given to publish the message and we add suffix -retry to it once it is consumed and given to process with retry.

+
+
+

If there is no topic found in the consumed record the default retry topic will be added which is default-message-retry.

+
+
+
+

Retry topics

+
+

Devon4j-kafka uses a separate retry topic for each topic where retries occur. By default this topic is named <topic name>-retry. You may change this behavior by providing your own implementation for DefaultKafkaRecordSupport which is a default implementation from devon4j-kafka for KafkaRecordSupport.

+
+
+

Devon4j-kafka enqueues a new message for each retry attempt. It is very important to configure your retry tropics with log compaction enabled. More or less simplified, if log compaction is enabled Kafka keeps only one message per message key. Since each retry message has the same key, in fact only one message per retry attempt is stored. After the last retry attempt the message payload is removed from the message so, you do not keep unnecessary data in your topics.

+
+
+
+

Handling retry finally failed

+
+

Per default when the retry fails with final attempt we just log the message and delete the payload of ProducerRecord which comes to proceed the retry pattern.

+
+
+

You can change this behavior by providing the implementation class for the interface MessageRetryHandler.java +which has two methods retryTimeout and retryFailedFinal.

+
+
+
+
+
+

Tracer

+
+
+

We leverage Spring Cloud Sleuth for tracing in devon4j-kafka +This is used to trace the asynchronous process of Kafka producing and consuming. In an asynchronous process it is important to maintain an id which will be same for all asynchronous process. +However, devon uses its own correlation-id(UUID) to track the process. But devon4j-kafka uses an additional tracing protocol which is Brave Tracer.

+
+
+

This is a part of both starter dependencies starter-kafka-receiver and starter-kafka-sender.

+
+
+

There are 2 important properties which will be automatically logged which are trace-id and spain-id. +The trace-id is same for all the asynchronous process and span-id is unique for each asynchronous process.

+
+
+

How devon4j-kafka handles tracer ?

+
+

We inject the trace-id and span-id in to the ProducerRecord headers which comes to publish into the Kafka broker. +It’s injected in the headers with the key traceId for trace-id and spanId for span-id. +Along with these, the correlation-id(UUID) is also injected in the headers of record with the key correlationId.

+
+
+

So, when you consume record from Kafka broker, these values can be found in the consumed record’s headers with these keys.

+
+
+

So, it is very helpful to track the asynchronous process of consuming the messages.

+
+
+
+
+
+

Logging

+
+
+

devon4j-kafka provides multiple support classes to log the published message and the consumed message. +* The class ProducerLoggingListener which implements ProducerListener<K,V> from Spring Kafka uses to log the message as soon as it is published in the Kafka broker.

+
+
+
    +
  • +

    The aspect class MessageListenerLoggingAspect which is annotated with @Aspect and has a method logMessageprocessing which is annotated with @Around("@annotation(org.springframework.kafka.annotation.KafkaListener)&&args(kafkaRecord,..)") +is used to listen to the classes which is annotated with @KafkaListener and logs the message as soon as it is consumed.

    +
  • +
  • +

    The class MessageLoggingSupport has multiple methods to log different types of events like MessageReceived, MessageSent, MessageProcessed, MessageNotProcessed.

    +
  • +
  • +

    The class LoggingErrorHandler which implements ErrorHandler from spring-kafka which logs the message when an error occurred while consuming messages. You may change this behavior by creating your own implementation class for the ErrorHandler.

    +
  • +
+
+
+
+
+

Kafka Health check using Spring acutator

+
+
+

The spring config class MessageCommonConfig automatically provides a spring health indicator bean for kafka if +the property endpoints. The health indicator will check for all topics listed in messaging.kafka.health.topics-tocheck +if a leader is available. If this property is missing only the broker connection will be checked. The timeout for +the check (default 60s) maybe changed via the property messaging.kafka.health.timeout. +If an application uses multiple broker(-clusters) for each broker(-cluster) a dedicated health indicator bean has to be +configured in the spring config.

+
+
+

The properties for the devon kafka health check should be given like below example:

+
+
+
+
management.endpoint.health.enabled=<true or false>
+messaging.kafka.health.timeout=<the health check timeout seconds>
+messaging.kafka.health.topicsToCheck=employeeapp-employee-v1-delete,employeeapp-employee-v1-add
+
+
+
+

These properties are provided with default values except the topicsToCheck and health check will do happen only when the property is management.endpoint.health.enabled=true.

+
+
+
+
+

Authentication

+
+
+

JSON Web Token (JWT)

+
+

devon4j-kafka supports authentication via JSON Web Tokens (JWT) out-of-the-box. +To use it add a dependency to the devon4j-starter-security-jwt:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-jwt</artifactId>
+</dependency>
+
+
+
+

The authentication via JWT needs some configuration, e.g. a keystore to verify the token signature. This is explained in the JWT documentation.

+
+
+

To secure a message listener with jwt add the @JwtAuthentication:

+
+
+
+
  @JwtAuthentication
+  @KafkaListener(topics = "employeeapp-employee-v1-delete", groupId = "${messaging.kafka.consumer.groupId}")
+  public void consumer(ConsumerRecord<K, V> consumerRecord, Acknowledgment acknowledgment) {
+...
+    }
+  }
+
+
+
+

With this annotation in-place each message will be checked for a valid JWT in a message header with the name Authorization. If a valid annotation is found the spring security context will be initialized with the user roles and "normal" authorization e.g. with @RolesAllowed may be used. This is also demonstrated in the kafka sample application.

+
+
+
+
+
+

Using Kafka for internal parallel processing

+
+
+

Apart from the use of Kafka as "communication channel", it is sometimes helpful to use Kafka internally to do parallel processing:

+
+
+
+Architecture for internal parallel processing with Kafka +
+
Figure 2. Architecture for internal parallel processing with Kafka
+
+
+

This examples shows a payment service which allows to submit a list of receipt IDs for payment. +We assume that the payment itself takes a long time and should be done asynchronously and in parallel. +The general idea is to put a message for each receipt to pay into a topic. This is done in the use case implementation in a first step, if a rest call arrives. +Also part of the use case is a listener which consumes the messages. For each message (e.g. payment to do) a processor is called, which actually does the payment via the use case. +Since Kafka supports concurrency for the listeners easily the payment will also be done in parallel. +All features of devon4j-kafka, like retry handling could also be used.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring/guide-querydsl-spring.html b/docs/devon4j/1.0/spring/guide-querydsl-spring.html new file mode 100644 index 00000000..3b775245 --- /dev/null +++ b/docs/devon4j/1.0/spring/guide-querydsl-spring.html @@ -0,0 +1,476 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

QueryDSL in devon4j-spring

+
+
+

To implement dynamic queries, devon4j suggests the use of QueryDSL. QueryDSL uses metaclasses generated from entity classes at build time. devon4j-spring provides a way to use QueryDSL without the need for code generation. For this, devon4j provides the interface DefaultRepository that your repository needs to extend and the QueryUtil helper class to build your queries.

+
+
+
+
+

Example

+
+
+

Here is an example for using QueryDSL in devon4j-spring:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    DishEntity dish = Alias.alias(DishEntity.class);
+    JPAQuery<DishEntity> query = newDslQuery(alias);
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(Alias.$(dish.getPrice()).ge(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(Alias.$(dish.getPrice()).le(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      // query.where(Alias.$(alias.getName()).eq(name));
+      QueryUtil.get().whereString(query, Alias.$(alias.getName()), name, criteria.getNameOption());
+    }
+    return query.fetch();
+  }
+
+
+
+
+
+

Pagination

+
+
+

Pagination for dynamic or generally handwritten queries is provided in devon4j-spring via QueryUtil.findPaginated(…​):

+
+
+
+
boolean determineTotalHitCount = ...;
+return QueryUtil.get().findPaginated(criteria.getPageable(), query, determineTotalHitCount);
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring/guide-service-client-spring.html b/docs/devon4j/1.0/spring/guide-service-client-spring.html new file mode 100644 index 00000000..1d66b5b8 --- /dev/null +++ b/docs/devon4j/1.0/spring/guide-service-client-spring.html @@ -0,0 +1,820 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Service Client in devon4j-spring

+
+
+

This guide is about consuming (calling) services from other applications (micro-services) in devon4j-spring.

+
+
+
+
+

Dependency

+
+
+

You need to add (at least one of) these dependencies to your application:

+
+
+
+
<!-- Starter for asynchronous consuming REST services via Jaca HTTP Client (Java11+) -->
+<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-http-client-rest-async</artifactId>
+</dependency>
+<!-- Starter for synchronous consuming REST services via Jaca HTTP Client (Java11+) -->
+<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-http-client-rest-sync</artifactId>
+</dependency>
+<!-- Starter for synchronous consuming REST services via Apache CXF (Java8+)
+  NOTE: This is an alternative to devon4j-starter-http-client-rest-sync
+  -->
+<!--
+<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-cxf-client-rest</artifactId>
+</dependency>
+-->
+<!-- Starter for synchronous consuming SOAP services via Apache CXF (Java8+) -->
+<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-cxf-client-ws</artifactId>
+</dependency>
+
+
+
+
+
+

Features

+
+
+

When invoking a service, you need to consider many cross-cutting aspects. You might not think about them in the very first place and you do not want to redundantly implement them multiple times. Therefore, you should consider using this approach. The following sub-sections list the covered features and aspects:

+
+
+

Simple usage

+
+

Assuming you already have a Java interface MyService of the service you want to invoke:

+
+
+
+
package com.company.department.foo.mycomponent.service.api.rest;
+...
+
+@Path("/myservice")
+public interface MyService extends RestService {
+
+  @POST
+  @Path("/getresult")
+  MyResult getResult(MyArgs myArgs);
+
+  @DELETE
+  @Path("/entity/{id}")
+  void deleteEntity(@PathParam("id") long id);
+}
+
+
+
+

Then, all you need to do is this:

+
+
+
+
@Named
+public class UcMyUseCaseImpl extends MyUseCaseBase implements UcMyUseCase {
+  @Inject
+  private ServiceClientFactory serviceClientFactory;
+
+  ...
+  private void callSynchronous(MyArgs myArgs) {
+    MyService myService = this.serviceClientFactory.create(MyService.class);
+    // call of service over the wire, synchronously blocking until result is received or error occurred
+    MyResult myResult = myService.myMethod(myArgs);
+    handleResult(myResult);
+  }
+
+  private void callAsynchronous(MyArgs myArgs) {
+    AsyncServiceClient<MyService> client = this.serviceClientFactory.createAsync(MyService.class);
+    // call of service over the wire, will return when request is send and invoke handleResult asynchronously
+    client.call(client.get().myMethod(myArgs), this::handleResult);
+  }
+
+  private void handleResult(MyResult myResult) {
+    ...
+  }
+  ...
+}
+
+
+
+

As you can see, both synchronous and asynchronous invocation of a service is very simple and type-safe. However, it is also very flexible and powerful (see following features). The actual call of myMethod will technically call the remote service over the wire (e.g. via HTTP), including marshalling the arguments (e.g. converting myArgs to JSON) and unmarshalling the result (e.g. converting the received JSON to myResult).

+
+
+

Asynchronous Invocation of void Methods

+
+

If you want to call a service method with void as the return type, the type-safe call method cannot be used as void methods do not return a result. Therefore you can use the callVoid method as following:

+
+
+
+
  private void callAsynchronousVoid(long id) {
+    AsyncServiceClient<MyService> client = this.serviceClientFactory.createAsync(MyService.class);
+    // call of service over the wire, will return when request is send and invoke resultHandler asynchronously
+    Consumer<Void> resultHandler = r -> { System.out.println("Response received")};
+    client.callVoid(() -> { client.get().deleteEntity(id);}, resultHandler);
+  }
+
+
+
+

You may also provide null as resultHandler for "fire and forget". However, this will lead to the result being ignored, so even in the case of an error you will not be notified.

+
+
+
+
+

Configuration

+
+

This solution allows a very flexible configuration on the following levels:

+
+
+
    +
  1. +

    Global configuration (defaults)

    +
  2. +
  3. +

    Configuration per remote service application (microservice)

    +
  4. +
  5. +

    Configuration per invocation.

    +
  6. +
+
+
+

A configuration on a deeper level (e.g. 3) overrides the configuration from a higher level (e.g. 1).

+
+
+

The configuration on Level 1 and 2 are configured via application.properties +(see configuration guide). +For Level 1, the prefix service.client.default. is used for properties. +Further, for level 2, the prefix service.client.app.«application». is used where «application» is the +technical name of the application providing the service. This name will automatically be derived from +the java package of the service interface (e.g. foo in MyService interface before) following our +packaging conventions. +In case these conventions are not met, it will fall back to the fully qualified name of the service interface.

+
+
+

Configuration on Level 3 has to be provided as a Map argument to the method +ServiceClientFactory.create(Class<S> serviceInterface, Map<String, String> config). +The keys of this Map will not use prefixes (such as the ones above). For common configuration +parameters, a type-safe builder is offered to create such a map via ServiceClientConfigBuilder. +E.g. for testing, you may want to do:

+
+
+
+
this.serviceClientFactory.create(MyService.class,
+  new ServiceClientConfigBuilder().authBasic().userLogin(login).userPassword(password).buildMap());
+
+
+
+

Here is an example of a configuration block for your application.properties:

+
+
+
+
service.client.default.url=https://api.company.com/services/${type}
+service.client.default.timeout.connection=120
+service.client.default.timeout.response=3600
+
+service.client.app.bar.url=https://bar.company.com:8080/services/rest
+service.client.app.bar.auth=basic
+service.client.app.bar.user.login=user4711
+service.client.app.bar.user.password=ENC(jd5ZREpBqxuN9ok0IhnXabgw7V3EoG2p)
+
+service.client.app.foo.url=https://foo.company.com:8443/services/rest
+# authForward: simply forward Authorization header (e.g. with JWT) to remote service
+service.client.app.bar.auth=authForward
+
+
+
+
+

Service Discovery

+
+

You do not want to hardwire service URLs in your code, right? Therefore, different strategies might apply +to discover the URL of the invoked service. This is done internally by an implementation of the interface +ServiceDiscoverer. The default implementation simply reads the base URL from the configuration. +You can simply add this to your application.properties as in the above configuration example.

+
+
+

Assuming your service interface has the fully qualified name +com.company.department.foo.mycomponent.service.api.rest.MyService, then the URL would be resolved to +https://foo.company.com:8443/services/rest, as the «application» is foo.

+
+
+

Additionally, the URL might use the following variables that will automatically be resolved:

+
+
+
    +
  • +

    ${app} to «application» (useful for default URL)

    +
  • +
  • +

    ${type} to the type of the service. E.g. rest in case of a REST service and ws for a SOAP service.

    +
  • +
  • +

    ${local.server.port} for the port of your current Java servlet container running the JVM. Should only be used for testing with spring-boot random port mechanism (technically spring cannot resolve this variable, but we do it for you here).

    +
  • +
+
+
+

Therefore, the default URL may also be configured as:

+
+
+
+
service.client.default.url=https://api.company.com/${app}/services/${type}
+
+
+
+

As you can use any implementation of ServiceDiscoverer, you can also easily use eureka (or anything else) instead to discover your services. +However, we recommend to use istio instead, as described below.

+
+
+
+

Headers

+
+

A very common demand is to tweak (HTTP) headers in the request to invoke the service. May it be for security (authentication data) or for other cross-cutting concerns (such as the Correlation ID). This is done internally by implementations of the interface ServiceHeaderCustomizer. +We already provide several implementations such as:

+
+
+
    +
  • +

    ServiceHeaderCustomizerBasicAuth for basic authentication (auth=basic).

    +
  • +
  • +

    ServiceHeaderCustomizerOAuth for OAuth: passes a security token from security context such as a JWT via OAuth (auth=oauth).

    +
  • +
  • +

    ServiceHeaderCustomizerAuthForward forwards the Authorization HTTP header from the running request to the request to the remote service as is (auth=authForward). Be careful to avoid security pitfalls by misconfiguring this feature, as it may also contain sensitive credentials (e.g. basic auth) to the remote service. Never use as default.

    +
  • +
  • +

    ServiceHeaderCustomizerCorrelationId passed the Correlation ID to the service request.

    +
  • +
+
+
+

Additionally, you can add further custom implementations of ServiceHeaderCustomizer for your individual requirements and additional headers.

+
+
+
+

Timeouts

+
+

You can configure timeouts in a very flexible way. First of all, you can configure timeouts to establish the connection (timeout.connection) and to wait for the response (timeout.response) separately. These timeouts can be configured on all three levels as described in the configuration section above.

+
+
+
+

Error Handling

+
+

Whilst invoking a remote service, an error may occur. This solution will automatically handle such errors and map them to a higher level ServiceInvocationFailedException. In general, we separate two different types of errors:

+
+
+
    +
  • +

    Network error
    +In such a case (host not found, connection refused, time out, etc.), there is not even a response from the server. However, in advance to a low-level exception you will get a wrapped ServiceInvocationFailedException (with code ServiceInvoke) with a readable message containing the service that could not be invoked.

    +
  • +
  • +

    Service error
    +In case the service failed on the server-side, the error result will be parsed and thrown as a ServiceInvocationFailedException with the received message and code.

    +
  • +
+
+
+

This allows to catch and handle errors when a service-invocation failed. You can even distinguish business errors from the server-side from technical errors and implement retry strategies or the like. +Further, the created exception contains detailed contextual information about the service that failed (service interface class, method, URL), which makes it much easier to trace down errors. Here is an example from our tests:

+
+
+
+
While invoking the service com.devonfw.test.app.myexample.service.api.rest.MyExampleRestService#businessError[http://localhost:50178/app/services/rest/my-example/v1/business-error] the following error occurred: Test of business error. Probably the service is temporary unavailable. Please try again later. If the problem persists contact your system administrator.
+2f43b03e-685b-45c0-9aae-23ff4b220c85:BusinessErrorCode
+
+
+
+

You may even provide your own implementation of ServiceClientErrorFactory instead to provide an own exception class for this purpose.

+
+
+

Handling Errors

+
+

In case of a synchronous service invocation, an error will be immediately thrown so you can surround the call with a regular try-catch block:

+
+
+
+
  private void callSynchronous(MyArgs myArgs) {
+    MyService myService = this.serviceClientFactory.create(MyService.class);
+    // call of service over the wire, synchronously blocking until result is received or error occurred
+    try {
+      MyResult myResult = myService.myMethod(myArgs);
+      handleResult(myResult);
+    } catch (ServiceInvocationFailedException e) {
+      if (e.isTechnical()) {
+        handleTechnicalError(e);
+      } else {
+        // error code you defined in the exception on the server side of the service
+        String errorCode = e.getCode();
+        handleBusinessError(e, errorCode;
+      }
+    } catch (Throwable e) { // you may not handle this explicitly here...
+      handleTechnicalError(e);
+    }
+  }
+
+
+
+

If you are using asynchronous service invocation, an error can occurr in a separate thread. Therefore, you may and should define a custom error handler:

+
+
+
+
  private void callAsynchronous(MyArgs myArgs) {
+    AsyncServiceClient<MyService> client = this.serviceClientFactory.createAsync(MyService.class);
+    Consumer<Throwalbe> errorHandler = this::handleError;
+    client.setErrorHandler(errorHandler);
+    // call of service over the wire, will return when request is send and invoke handleResult asynchronously
+    client.call(client.get().myMethod(myArgs), this::handleResult);
+  }
+
+  private void handleError(Throwalbe error) {
+    ...
+  }
+}
+
+
+
+

The error handler consumes Throwable, and not only RuntimeException, so you can get notified even in case of an unexpected OutOfMemoryError, NoClassDefFoundError, or other technical problems. Please note that the error handler may also be called from the thread calling the service (e.g. if already creating the request fails). The default error handler used if no custom handler is set will only log the error and do nothing else.

+
+
+
+
+

Logging

+
+

By default, this solution will log all invocations including the URL of the invoked service, success or error status flag and the duration in seconds (with decimal nano precision as available). Therefore, you can easily monitor the status and performance of the service invocations. Here is an example from our tests:

+
+
+
+
Invoking service com.devonfw.test.app.myexample.service.api.rest.MyExampleRestService#greet[http://localhost:50178/app/services/rest/my-example/v1/greet/John%20Doe%20%26%20%3F%23] took PT20.309756622S (20309756622ns) and succeded with status 200.
+
+
+
+
+

Resilience

+
+

Resilience adds a lot of complexity, which typically means that addressing this here would most probably result in not being up-to-date and not meeting all requirements. Therefore, we recommend something completely different: the sidecar approach (based on sidecar pattern). This means that you use a generic proxy app that runs as a separate process on the same host, VM, or container of your actual application. Then, in your app, you call the service via the sidecar proxy on localhost (service discovery URL is e.g. http://localhost:8081/${app}/services/${type}) that then acts as proxy to the actual remote service. Now aspects such as resilience with circuit breaking and the actual service discovery can be configured in the sidecar proxy app, independent of your actual application. Therefore, you can even share and reuse configuration and experience with such a sidecar proxy app even across different technologies (Java, .NET/C#, Node.JS, etc.). Further, you do not pollute the technology stack of your actual app with the infrastructure for resilience, throttling, etc. and can update the app and the sidecar independently when security-fixes are available.

+
+
+

Various implementations of such sidecar proxy apps are available as free open source software. +Our recommendation in devonfw is to use istio. This not only provides such a side-car, but also an entire management solution for service-mesh, making administration and maintenance much easier. Platforms like OpenShift support this out of the box.

+
+
+

However, if you are looking for details about side-car implementations for services, you can have a look at the following links:

+
+
+ +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring/guide-spring-configuration.html b/docs/devon4j/1.0/spring/guide-spring-configuration.html new file mode 100644 index 00000000..c963d073 --- /dev/null +++ b/docs/devon4j/1.0/spring/guide-spring-configuration.html @@ -0,0 +1,753 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Configuration

+
+ +
+
+
+

Internal Application Configuration

+
+
+

There usually is a main configuration registered with main Spring Boot App, but differing configurations to support automated test of the application can be defined using profiles (not detailed in this guide).

+
+
+

Spring Boot Application

+
+

For a complete documentation, see the Spring Boot Reference Guide.

+
+
+

With spring-boot you provide a simple main class (also called starter class) like this: +com.devonfw.mtsj.application

+
+
+
+
@SpringBootApplication(exclude = { EndpointAutoConfiguration.class })
+@EntityScan(basePackages = { "com.devonfw.mtsj.application" }, basePackageClasses = { AdvancedRevisionEntity.class })
+@EnableGlobalMethodSecurity(jsr250Enabled = true)
+@ComponentScan(basePackages = { "com.devonfw.mtsj.application.general", "com.devonfw.mtsj.application" })
+public class SpringBootApp {
+
+  /**
+   * Entry point for spring-boot based app
+   *
+   * @param args - arguments
+   */
+  public static void main(String[] args) {
+
+    SpringApplication.run(SpringBootApp.class, args);
+  }
+}
+
+
+
+

In an devonfw application this main class is always located in the <basepackage> of the application package namespace (see package-conventions). This is because a spring boot application will automatically do a classpath scan for components (spring-beans) and entities in the package where the application main class is located including all sub-packages. You can use the @ComponentScan and @EntityScan annotations to customize this behaviour.

+
+
+

If you want to map spring configuration properties into your custom code please see configuration mapping.

+
+
+
+

Standard beans configuration

+
+

For basic bean configuration we rely on spring boot using mainly configuration classes and only occasionally XML configuration files. Some key principle to understand Spring Boot auto-configuration features:

+
+
+
    +
  • +

    Spring Boot auto-configuration attempts to automatically configure your Spring application based on the jar dependencies and annotated components found in your source code.

    +
  • +
  • +

    Auto-configuration is non-invasive, at any point you can start to define your own configuration to replace specific parts of the auto-configuration by redefining your identically named bean (see also exclude attribute of @SpringBootApplication in example code above).

    +
  • +
+
+
+

Beans are configured via annotations in your java code (see dependency-injection).

+
+
+

For technical configuration you will typically write additional spring config classes annotated with @Configuration that provide bean implementations via methods annotated with @Bean. See spring @Bean documentation for further details. Like in XML you can also use @Import to make a @Configuration class include other configurations.

+
+
+

More specific configuration files (as required) reside in an adequately named subfolder of:

+
+
+

src/main/resources/app

+
+
+
+

BeanMapper Configuration

+
+

In case you are still using dozer, you will find further details in bean-mapper configuration.

+
+
+
+

Security configuration

+
+

The abstract base class BaseWebSecurityConfig should be extended to configure web application security thoroughly. +A basic and secure configuration is provided which can be overridden or extended by subclasses. +Subclasses must use the @Profile annotation to further discriminate between beans used in production and testing scenarios. See the following example:

+
+
+
How to extend BaseWebSecurityConfig for Production and Test
+
+
@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.JUNIT)
+public class TestWebSecurityConfig extends BaseWebSecurityConfig {...}
+
+@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.NOT_JUNIT)
+public class WebSecurityConfig extends BaseWebSecurityConfig {...}
+
+
+ +
+
+

WebSocket configuration

+
+

A websocket endpoint is configured within the business package as a Spring configuration class. The annotation @EnableWebSocketMessageBroker makes Spring Boot registering this endpoint.

+
+
+
+
package your.path.to.the.websocket.config;
+...
+@Configuration
+@EnableWebSocketMessageBroker
+public class WebSocketConfig extends AbstractWebSocketMessageBrokerConfigurer {
+...
+
+
+
+
+
+
+

External Application Configuration

+
+
+

application.properties files

+
+

Here is a list of common properties provided by the Spring framework.

+
+
+

For a general understanding how spring-boot is loading and boostrapping your application.properties see spring-boot external configuration.

+
+
+

The following properties files are used in devonfw application:

+
+
+
    +
  • +

    src/main/resources/application.properties providing a default configuration - bundled and deployed with the application package. It further acts as a template to derive a tailored minimal environment-specific configuration.

    +
  • +
  • +

    src/main/resources/config/application.properties providing additional properties only used at development time (for all local deployment scenarios). This property file is excluded from all packaging.

    +
  • +
  • +

    src/test/resources/config/application.properties providing additional properties only used for testing (JUnits based on spring test).

    +
  • +
+
+
+

For other environments where the software gets deployed such as test, acceptance and production you need to provide a tailored copy of application.properties. The location depends on the deployment strategy:

+
+
+
    +
  • +

    standalone run-able Spring Boot App using embedded tomcat: config/application.properties under the installation directory of the spring boot application.

    +
  • +
  • +

    dedicated tomcat (one tomcat per app): $CATALINA_BASE/lib/config/application.properties

    +
  • +
  • +

    tomcat serving a number of apps (requires expanding the wars): $CATALINA_BASE/webapps/<app>/WEB-INF/classes/config

    +
  • +
+
+
+

In this application.properties you only define the minimum properties that are environment specific and inherit everything else from the bundled src/main/resources/application.properties. In any case, make very sure that the classloader will find the file.

+
+
+
+

Database Configuration

+
+

The configuration for spring and Hibernate is already provided by devonfw in our sample application and the application template. So you only need to worry about a few things to customize.

+
+
+

Database System and Access

+
+

Obviously you need to configure which type of database you want to use as well as the location and credentials to access it. The defaults are configured in application.properties that is bundled and deployed with the release of the software. The files should therefore contain the properties as in the given example:

+
+
+
+
  database.url=jdbc:postgresql://database.enterprise.com/app
+  database.user.login=appuser01
+  database.user.password=************
+  database.hibernate.dialect = org.hibernate.dialect.PostgreSQLDialect
+  database.hibernate.hbm2ddl.auto=validate
+
+
+
+

For further details about database.hibernate.hbm2ddl.auto please see here. For production and acceptance environments we use the value validate that should be set as default. In case you want to use Oracle RDBMS you can find additional hints here.

+
+
+

If your application supports multiples database types, set spring.profiles.active=XXX in src/main/resources/config/application.properties choose database of your choice. Also, one has to set all the active spring profiles in this application.properties and not in any of the other application.properties.

+
+
+
+

Database Logging

+
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
spring.jpa.properties.hibernate.show_sql=true
+spring.jpa.properties.hibernate.use_sql_comments=true
+spring.jpa.properties.hibernate.format_sql=true
+
+
+
+
+
+
+
+

Security

+
+
+

Password Encryption

+
+

In order to support encrypted passwords in spring-boot application.properties all you need to do is to add jasypt-spring-boot as dependency in your pom.xml (please check for recent version here):

+
+
+
+
<dependency>
+  <groupId>com.github.ulisesbocchio</groupId>
+  <artifactId>jasypt-spring-boot-starter</artifactId>
+  <version>3.0.3</version>
+</dependency>
+
+
+
+

This will smoothly integrate jasypt into your spring-boot application. Read this HOWTO to learn how to encrypt and decrypt passwords using jasypt.

+
+
+

Next, we give a simple example how to encypt and configure a secret value. +We use the algorithm PBEWITHHMACSHA512ANDAES_256 that provides strong encryption and is the default of jasypt-spring-boot-starter. +However, different algorithms can be used if perferred (e.g. PBEWITHMD5ANDTRIPLEDES).

+
+
+
+
java -cp ${M2_REPO}/org/jasypt/jasypt/1.9.3/jasypt-1.9.3.jar org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI password=masterpassword algorithm=PBEWITHHMACSHA512ANDAES_256 input=secret ivGeneratorClassName=org.jasypt.iv.RandomIvGenerator
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.5+10
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: masterpassword
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC
+
+
+
+

Of course the master-password (masterpassword) and the actual password to encrypt (secret) are just examples. +Please replace them with reasonable strong passwords for your environment. +Further, if you are using devonfw-ide you can make your life much easier and just type:

+
+
+
+
devon jasypt encrypt
+
+
+
+

See jasypt commandlet for details.

+
+
+

Now the entire line after the OUTPUT block is your encrypted secret. +It even contains some random salt so that multiple encryption invocations with the same parameters (ARGUMENTS) will produce a different OUTPUT.

+
+
+

The master-password can be configured on your target environment via the property jasypt.encryptor.password. As system properties given on the command-line are visible in the process list, we recommend to use an config/application.yml file only for this purpose (as we recommended to use application.properties for regular configs):

+
+
+
+
jasypt:
+    encryptor:
+        password: masterpassword
+
+
+
+

Again masterpassword is just an example that your replace with your actual master password. +Now you are able to put encrypted passwords into your application.properties and specify the algorithm.

+
+
+
+
spring.datasource.password=ENC(PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC)
+jasypt.encryptor.algorithm=PBEWITHHMACSHA512ANDAES_256
+
+
+
+

This application.properties file can be version controlled (git-opts) and without knowing the masterpassword nobody is able to decrypt this to get the actual secret back.

+
+
+

To prevent jasypt to throw an exception in dev or test scenarios you can simply put this in your local config (src/main/config/application.properties and same for test, see above for details):

+
+
+
+
jasypt.encryptor.password=none
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/spring/guide-spring-testing.html b/docs/devon4j/1.0/spring/guide-spring-testing.html new file mode 100644 index 00000000..0f2766a2 --- /dev/null +++ b/docs/devon4j/1.0/spring/guide-spring-testing.html @@ -0,0 +1,577 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Testing

+
+ +
+
+
+

Implementation

+
+
+

Module Test

+
+

In devon4j you can extend the abstract class ModuleTest to basically get access to assertions. In order to test classes embedded in dependencies and external services one needs to provide mocks for that. As the technology stack recommends we use the Mockito framework to offer this functionality. The following example shows how to implement Mockito into a JUnit test.

+
+
+
+
import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.mock;
+...
+
+public class StaffmanagementImplTest extends ModuleTest {
+  @Rule
+  public MockitoRule rule = MockitoJUnit.rule();
+
+  @Test
+  public void testFindStaffMember() {
+  ...}
+}
+
+
+
+

Note that the test class does not use the @SpringApplicationConfiguration annotation. In a module test one does not use the whole application. +The JUnit rule is the best solution to use in order to get all needed functionality of Mockito. Static imports are a convenient option to enhance readability within Mockito tests. +You can define mocks with the @Mock annotation or the mock(*.class) call. To inject the mocked objects into your class under test you can use the @InjectMocks annotation. This automatically uses the setters of StaffmanagementImpl to inject the defined mocks into the class under test (CUT) when there is a setter available. In this case the beanMapper and the staffMemberDao are injected. Of course it is possible to do this manually if you need more control.

+
+
+
+
  @Mock
+  private BeanMapper beanMapper;
+  @Mock
+  private StaffMemberEntity staffMemberEntity;
+  @Mock
+  private StaffMemberEto staffMemberEto;
+  @Mock
+  private StaffMemberDao staffMemberDao;
+  @InjectMocks
+  StaffmanagementImpl staffmanagementImpl = new StaffmanagementImpl();
+
+
+
+

The mocked objects do not provide any functionality at the time being. To define what happens on a method call on a mocked dependency in the CUT one can use when(condition).thenReturn(result). In this case we want to test findStaffMember(Long id) in the StaffmanagementImpl.

+
+
+
+
public StaffMemberEto findStaffMember(Long id) {
+  return getBeanMapper().map(getStaffMemberDao().find(id), StaffMemberEto.class);
+}
+
+
+
+

In this simple example one has to stub two calls on the CUT as you can see below. For example the method call of the CUT staffMemberDao.find(id) is stubbed for returning a mock object staffMemberEntity that is also defined as mock.

+
+
+
+

Subsystem Test

+
+

devon4j provides a simple test infrastructure to aid with the implementation of subsystem tests. It becomes available by simply subclassing AbstractRestServiceTest.java.

+
+
+
+
//given
+long id = 1L;
+Class<StaffMemberEto> targetClass = StaffMemberEto.class;
+when(this.staffMemberDao.find(id)).thenReturn(this.staffMemberEntity);
+when(this.beanMapper.map(this.staffMemberEntity, targetClass)).thenReturn(this.staffMemberEto);
+
+//when
+StaffMemberEto resultEto = this.staffmanagementImpl.findStaffMember(id);
+
+//then
+assertThat(resultEto).isNotNull();
+assertThat(resultEto).isEqualTo(this.staffMemberEto);
+
+
+
+

After the test method call one can verify the expected results. Mockito can check whether a mocked method call was indeed called. This can be done using Mockito verify. Note that it does not generate any value if you check for method calls that are needed to reach the asserted result anyway. Call verification can be useful e.g. when you want to assure that statistics are written out without actually testing them.

+
+
+
+
+
+

Configuration

+
+
+

Configure Test Specific Beans

+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4j/1.0/tutorial-newapp.html b/docs/devon4j/1.0/tutorial-newapp.html new file mode 100644 index 00000000..d3d6a9ec --- /dev/null +++ b/docs/devon4j/1.0/tutorial-newapp.html @@ -0,0 +1,609 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Creating a new application

+
+ +
+
+
+

Running the archetype

+
+
+

In order to create a new application you must use the archetype provided by devon4j which uses the maven archetype functionality.

+
+
+

To create a new application, you should have installed devonfw IDE. Follow the devon ide documentation to install +the same. +You can choose between 2 alternatives, create it from command line or, in more visual manner, within eclipse.

+
+
+

From command Line

+
+

To create a new devon4j application from command line, you can simply run the following command:

+
+
+
+
devon java create com.example.application.sampleapp
+
+
+
+

For low-level creation you can also manually call this command:

+
+
+
+
mvn -DarchetypeVersion=${devon4j.version} -DarchetypeGroupId=com.devonfw.java.templates -DarchetypeArtifactId=devon4j-template-server archetype:generate -DgroupId=com.example.application -DartifactId=sampleapp -Dversion=1.0.0-SNAPSHOT -Dpackage=com.devonfw.application.sampleapp
+
+
+
+

Attention: The archetypeVersion (first argument) should be set to the latest version of devon4j. You can easily determine the version from this badge: +latest devon4j version

+
+
+

Further providing additional properties (using -D parameter) you can customize the generated app:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 1. Options for app template
propertycommentexample

dbType

Choose the type of RDBMS to use (hana, oracle, mssql, postgresql, mariadb, mysql, etc.)

-DdbTpye=postgresql

batch

Option to add an batch module

-Dbatch=batch

+
+
+

From Eclipse

+
+
+
After that, you should follow this Eclipse steps to create your application:
+
+
+
+
    +
  • +

    Create a new Maven Project.

    +
  • +
  • +

    Choose the devon4j-template-server archetype, just like the image.

    +
  • +
+
+
+
+Select archetype +
+
+
+
    +
  • +

    Fill the Group Id, Artifact Id, Version and Package for your project.

    +
  • +
+
+
+
+Configure archetype +
+
+
+
    +
  • +

    Finish the Eclipse assistant and you are ready to start your project.

    +
  • +
+
+
+
+
+
+

What is generated

+
+
+

The application template (archetype) generates a Maven multi-module project. It has the following modules:

+
+
+
    +
  • +

    api: module with the API (REST service interfaces, transferobjects, datatypes, etc.) to be imported by other apps as a maven dependency in order to invoke and consume the offered (micro)services.

    +
  • +
  • +

    core: maven module containing the core of the application.

    +
  • +
  • +

    batch: optional module for batch(es)

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) as a WAR file.

    +
  • +
+
+
+

The toplevel pom.xml of the generated project has the following features:

+
+
+
    +
  • +

    Properties definition: Spring-boot version, Java version, etc.

    +
  • +
  • +

    Modules definition for the modules (described above)

    +
  • +
  • +

    Dependency management: define versions for dependencies of the technology stack that are recommended and work together in a compatible way.

    +
  • +
  • +

    Maven plugins with desired versions and configuration

    +
  • +
  • +

    Profiles for test stages

    +
  • +
+
+
+
+
+

How to run your app

+
+
+

Run app from IDE

+
+

To run your application from your favourite IDE, simply launch SpringBootApp as java application.

+
+
+
+

Run app as bootified jar or war

+
+

More details are available here.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/_images/images/BussinessConfiguration.png b/docs/devon4net/1.0/_images/images/BussinessConfiguration.png new file mode 100644 index 00000000..1dffe8f6 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/BussinessConfiguration.png differ diff --git a/docs/devon4net/1.0/_images/images/CobigenContextLocation.png b/docs/devon4net/1.0/_images/images/CobigenContextLocation.png new file mode 100644 index 00000000..9744124a Binary files /dev/null and b/docs/devon4net/1.0/_images/images/CobigenContextLocation.png differ diff --git a/docs/devon4net/1.0/_images/images/Image-140917-123815.636.png b/docs/devon4net/1.0/_images/images/Image-140917-123815.636.png new file mode 100644 index 00000000..6fa3b3b1 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/Image-140917-123815.636.png differ diff --git a/docs/devon4net/1.0/_images/images/OpenAPI_file_root_folder.png b/docs/devon4net/1.0/_images/images/OpenAPI_file_root_folder.png new file mode 100644 index 00000000..033bc985 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/OpenAPI_file_root_folder.png differ diff --git a/docs/devon4net/1.0/_images/images/Project_selection.PNG b/docs/devon4net/1.0/_images/images/Project_selection.PNG new file mode 100644 index 00000000..76c5b862 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/Project_selection.PNG differ diff --git a/docs/devon4net/1.0/_images/images/Repositories.PNG b/docs/devon4net/1.0/_images/images/Repositories.PNG new file mode 100644 index 00000000..fdde31be Binary files /dev/null and b/docs/devon4net/1.0/_images/images/Repositories.PNG differ diff --git a/docs/devon4net/1.0/_images/images/ServiceExample.png b/docs/devon4net/1.0/_images/images/ServiceExample.png new file mode 100644 index 00000000..551610e3 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/ServiceExample.png differ diff --git a/docs/devon4net/1.0/_images/images/Services.png b/docs/devon4net/1.0/_images/images/Services.png new file mode 100644 index 00000000..f9fac78f Binary files /dev/null and b/docs/devon4net/1.0/_images/images/Services.png differ diff --git a/docs/devon4net/1.0/_images/images/architecture.png b/docs/devon4net/1.0/_images/images/architecture.png new file mode 100644 index 00000000..e21753fa Binary files /dev/null and b/docs/devon4net/1.0/_images/images/architecture.png differ diff --git a/docs/devon4net/1.0/_images/images/bg.jpg b/docs/devon4net/1.0/_images/images/bg.jpg new file mode 100644 index 00000000..32e25fac Binary files /dev/null and b/docs/devon4net/1.0/_images/images/bg.jpg differ diff --git a/docs/devon4net/1.0/_images/images/bgbw.jpg b/docs/devon4net/1.0/_images/images/bgbw.jpg new file mode 100644 index 00000000..4a0d30d8 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/bgbw.jpg differ diff --git a/docs/devon4net/1.0/_images/images/bgbw2.jpg b/docs/devon4net/1.0/_images/images/bgbw2.jpg new file mode 100644 index 00000000..455c7702 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/bgbw2.jpg differ diff --git a/docs/devon4net/1.0/_images/images/bgsat.jpg b/docs/devon4net/1.0/_images/images/bgsat.jpg new file mode 100644 index 00000000..37e05ce7 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/bgsat.jpg differ diff --git a/docs/devon4net/1.0/_images/images/bundle_folder.png b/docs/devon4net/1.0/_images/images/bundle_folder.png new file mode 100644 index 00000000..ce1d69f3 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/bundle_folder.png differ diff --git a/docs/devon4net/1.0/_images/images/business_ide.png b/docs/devon4net/1.0/_images/images/business_ide.png new file mode 100644 index 00000000..e81472b3 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/business_ide.png differ diff --git a/docs/devon4net/1.0/_images/images/business_ide_ext.png b/docs/devon4net/1.0/_images/images/business_ide_ext.png new file mode 100644 index 00000000..a7a3214f Binary files /dev/null and b/docs/devon4net/1.0/_images/images/business_ide_ext.png differ diff --git a/docs/devon4net/1.0/_images/images/cobigen_generate0.png b/docs/devon4net/1.0/_images/images/cobigen_generate0.png new file mode 100644 index 00000000..3f9ddfcb Binary files /dev/null and b/docs/devon4net/1.0/_images/images/cobigen_generate0.png differ diff --git a/docs/devon4net/1.0/_images/images/cobigen_generate1.png b/docs/devon4net/1.0/_images/images/cobigen_generate1.png new file mode 100644 index 00000000..3917e4ee Binary files /dev/null and b/docs/devon4net/1.0/_images/images/cobigen_generate1.png differ diff --git a/docs/devon4net/1.0/_images/images/code.jpg b/docs/devon4net/1.0/_images/images/code.jpg new file mode 100644 index 00000000..faac9299 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/code.jpg differ diff --git a/docs/devon4net/1.0/_images/images/codeSat.jpg b/docs/devon4net/1.0/_images/images/codeSat.jpg new file mode 100644 index 00000000..6e7a8316 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/codeSat.jpg differ diff --git a/docs/devon4net/1.0/_images/images/codeSat2.jpg b/docs/devon4net/1.0/_images/images/codeSat2.jpg new file mode 100644 index 00000000..4d9afa59 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/codeSat2.jpg differ diff --git a/docs/devon4net/1.0/_images/images/codeSat3.jpg b/docs/devon4net/1.0/_images/images/codeSat3.jpg new file mode 100644 index 00000000..32e25fac Binary files /dev/null and b/docs/devon4net/1.0/_images/images/codeSat3.jpg differ diff --git a/docs/devon4net/1.0/_images/images/coding.png b/docs/devon4net/1.0/_images/images/coding.png new file mode 100644 index 00000000..76c0c682 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/coding.png differ diff --git a/docs/devon4net/1.0/_images/images/dependency_injection.png b/docs/devon4net/1.0/_images/images/dependency_injection.png new file mode 100644 index 00000000..e743a8ad Binary files /dev/null and b/docs/devon4net/1.0/_images/images/dependency_injection.png differ diff --git a/docs/devon4net/1.0/_images/images/devon.png b/docs/devon4net/1.0/_images/images/devon.png new file mode 100644 index 00000000..ed4f4d8f Binary files /dev/null and b/docs/devon4net/1.0/_images/images/devon.png differ diff --git a/docs/devon4net/1.0/_images/images/devonWhite.png b/docs/devon4net/1.0/_images/images/devonWhite.png new file mode 100644 index 00000000..dd7c7849 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/devonWhite.png differ diff --git a/docs/devon4net/1.0/_images/images/doc_copy_always.png b/docs/devon4net/1.0/_images/images/doc_copy_always.png new file mode 100644 index 00000000..b72c3840 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/doc_copy_always.png differ diff --git a/docs/devon4net/1.0/_images/images/email_swagger.png b/docs/devon4net/1.0/_images/images/email_swagger.png new file mode 100644 index 00000000..d01c8246 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/email_swagger.png differ diff --git a/docs/devon4net/1.0/_images/images/environment.png b/docs/devon4net/1.0/_images/images/environment.png new file mode 100644 index 00000000..e4fa98a5 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/environment.png differ diff --git a/docs/devon4net/1.0/_images/images/functionality_stack.png b/docs/devon4net/1.0/_images/images/functionality_stack.png new file mode 100644 index 00000000..629019b8 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/functionality_stack.png differ diff --git a/docs/devon4net/1.0/_images/images/global_arch.png b/docs/devon4net/1.0/_images/images/global_arch.png new file mode 100644 index 00000000..97d79d95 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/global_arch.png differ diff --git a/docs/devon4net/1.0/_images/images/helmet_white.png b/docs/devon4net/1.0/_images/images/helmet_white.png new file mode 100644 index 00000000..d2007a19 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/helmet_white.png differ diff --git a/docs/devon4net/1.0/_images/images/iis_1.png b/docs/devon4net/1.0/_images/images/iis_1.png new file mode 100644 index 00000000..be03aaf7 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/iis_1.png differ diff --git a/docs/devon4net/1.0/_images/images/iis_2.png b/docs/devon4net/1.0/_images/images/iis_2.png new file mode 100644 index 00000000..3aadb285 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/iis_2.png differ diff --git a/docs/devon4net/1.0/_images/images/iis_3.png b/docs/devon4net/1.0/_images/images/iis_3.png new file mode 100644 index 00000000..acbc3833 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/iis_3.png differ diff --git a/docs/devon4net/1.0/_images/images/jwt.png b/docs/devon4net/1.0/_images/images/jwt.png new file mode 100644 index 00000000..7856fa56 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/jwt.png differ diff --git a/docs/devon4net/1.0/_images/images/jwt_schema.png b/docs/devon4net/1.0/_images/images/jwt_schema.png new file mode 100644 index 00000000..5a8320d0 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/jwt_schema.png differ diff --git a/docs/devon4net/1.0/_images/images/laryer_arch_detail.png b/docs/devon4net/1.0/_images/images/laryer_arch_detail.png new file mode 100644 index 00000000..3c1c2eef Binary files /dev/null and b/docs/devon4net/1.0/_images/images/laryer_arch_detail.png differ diff --git a/docs/devon4net/1.0/_images/images/layer_arch_detail.png b/docs/devon4net/1.0/_images/images/layer_arch_detail.png new file mode 100644 index 00000000..afca0433 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/layer_arch_detail.png differ diff --git a/docs/devon4net/1.0/_images/images/layer_impl.png b/docs/devon4net/1.0/_images/images/layer_impl.png new file mode 100644 index 00000000..819979c9 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/layer_impl.png differ diff --git a/docs/devon4net/1.0/_images/images/logo-dark.old.png b/docs/devon4net/1.0/_images/images/logo-dark.old.png new file mode 100644 index 00000000..c2a66006 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/logo-dark.old.png differ diff --git a/docs/devon4net/1.0/_images/images/logo-dark.png b/docs/devon4net/1.0/_images/images/logo-dark.png new file mode 100644 index 00000000..b6867b41 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/logo-dark.png differ diff --git a/docs/devon4net/1.0/_images/images/logo.png b/docs/devon4net/1.0/_images/images/logo.png new file mode 100644 index 00000000..9bc501dd Binary files /dev/null and b/docs/devon4net/1.0/_images/images/logo.png differ diff --git a/docs/devon4net/1.0/_images/images/mtsn_components.png b/docs/devon4net/1.0/_images/images/mtsn_components.png new file mode 100644 index 00000000..47b9342f Binary files /dev/null and b/docs/devon4net/1.0/_images/images/mtsn_components.png differ diff --git a/docs/devon4net/1.0/_images/images/onion.png b/docs/devon4net/1.0/_images/images/onion.png new file mode 100644 index 00000000..5fadda5b Binary files /dev/null and b/docs/devon4net/1.0/_images/images/onion.png differ diff --git a/docs/devon4net/1.0/_images/images/onion_architecture.png b/docs/devon4net/1.0/_images/images/onion_architecture.png new file mode 100644 index 00000000..9e2571b6 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/onion_architecture.png differ diff --git a/docs/devon4net/1.0/_images/images/onion_architecture_solution.png b/docs/devon4net/1.0/_images/images/onion_architecture_solution.png new file mode 100644 index 00000000..282a5d83 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/onion_architecture_solution.png differ diff --git a/docs/devon4net/1.0/_images/images/overlay.png b/docs/devon4net/1.0/_images/images/overlay.png new file mode 100644 index 00000000..3cd22cc9 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/overlay.png differ diff --git a/docs/devon4net/1.0/_images/images/packages.png b/docs/devon4net/1.0/_images/images/packages.png new file mode 100644 index 00000000..6f9c727e Binary files /dev/null and b/docs/devon4net/1.0/_images/images/packages.png differ diff --git a/docs/devon4net/1.0/_images/images/pic01.jpg b/docs/devon4net/1.0/_images/images/pic01.jpg new file mode 100644 index 00000000..2c4200ca Binary files /dev/null and b/docs/devon4net/1.0/_images/images/pic01.jpg differ diff --git a/docs/devon4net/1.0/_images/images/pic02.jpg b/docs/devon4net/1.0/_images/images/pic02.jpg new file mode 100644 index 00000000..34ac5e86 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/pic02.jpg differ diff --git a/docs/devon4net/1.0/_images/images/pic03.jpg b/docs/devon4net/1.0/_images/images/pic03.jpg new file mode 100644 index 00000000..db243174 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/pic03.jpg differ diff --git a/docs/devon4net/1.0/_images/images/project_doc.png b/docs/devon4net/1.0/_images/images/project_doc.png new file mode 100644 index 00000000..ddcf8dc7 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/project_doc.png differ diff --git a/docs/devon4net/1.0/_images/images/project_modules.png b/docs/devon4net/1.0/_images/images/project_modules.png new file mode 100644 index 00000000..7c3865d2 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/project_modules.png differ diff --git a/docs/devon4net/1.0/_images/images/project_nature.png b/docs/devon4net/1.0/_images/images/project_nature.png new file mode 100644 index 00000000..573c8fbc Binary files /dev/null and b/docs/devon4net/1.0/_images/images/project_nature.png differ diff --git a/docs/devon4net/1.0/_images/images/samples.png b/docs/devon4net/1.0/_images/images/samples.png new file mode 100644 index 00000000..05f6dc4a Binary files /dev/null and b/docs/devon4net/1.0/_images/images/samples.png differ diff --git a/docs/devon4net/1.0/_images/images/serilog_seq.png b/docs/devon4net/1.0/_images/images/serilog_seq.png new file mode 100644 index 00000000..0bdbbe2e Binary files /dev/null and b/docs/devon4net/1.0/_images/images/serilog_seq.png differ diff --git a/docs/devon4net/1.0/_images/images/stack.png b/docs/devon4net/1.0/_images/images/stack.png new file mode 100644 index 00000000..ef19c183 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/stack.png differ diff --git a/docs/devon4net/1.0/_images/images/swaggerDoc.png b/docs/devon4net/1.0/_images/images/swaggerDoc.png new file mode 100644 index 00000000..9a822b4f Binary files /dev/null and b/docs/devon4net/1.0/_images/images/swaggerDoc.png differ diff --git a/docs/devon4net/1.0/_images/images/swaggerDocXMLCheck.png b/docs/devon4net/1.0/_images/images/swaggerDocXMLCheck.png new file mode 100644 index 00000000..7fd7fa7a Binary files /dev/null and b/docs/devon4net/1.0/_images/images/swaggerDocXMLCheck.png differ diff --git a/docs/devon4net/1.0/_images/images/templates.png b/docs/devon4net/1.0/_images/images/templates.png new file mode 100644 index 00000000..098870dc Binary files /dev/null and b/docs/devon4net/1.0/_images/images/templates.png differ diff --git a/docs/devon4net/1.0/_images/images/traiectum_white.png b/docs/devon4net/1.0/_images/images/traiectum_white.png new file mode 100644 index 00000000..c4e4bd34 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/traiectum_white.png differ diff --git a/docs/devon4net/1.0/_images/images/uow_sample.png b/docs/devon4net/1.0/_images/images/uow_sample.png new file mode 100644 index 00000000..cf521a79 Binary files /dev/null and b/docs/devon4net/1.0/_images/images/uow_sample.png differ diff --git a/docs/devon4net/1.0/_images/images/userguide.png b/docs/devon4net/1.0/_images/images/userguide.png new file mode 100644 index 00000000..ed3a222f Binary files /dev/null and b/docs/devon4net/1.0/_images/images/userguide.png differ diff --git a/docs/devon4net/1.0/_images/images/vscodeopenfolder.png b/docs/devon4net/1.0/_images/images/vscodeopenfolder.png new file mode 100644 index 00000000..4a98816d Binary files /dev/null and b/docs/devon4net/1.0/_images/images/vscodeopenfolder.png differ diff --git a/docs/devon4net/1.0/architecture_guide.html b/docs/devon4net/1.0/architecture_guide.html new file mode 100644 index 00000000..1c38b7d8 --- /dev/null +++ b/docs/devon4net/1.0/architecture_guide.html @@ -0,0 +1,767 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Introduction

+
+
+

The devonfw platform provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+
+
+

Overview Onion Design

+
+
+

This guide shows the overall proposed architecture in terms of separated layers making use the Onion architecture pattern. Each layers represents a logical group of components and functionality. In this guide you will learn the basics of the proposed architecture based in layers in order to develop software making use of the best practices.

+
+
+
+
+

Layer specification

+
+
+
+
+

It is important to understand the distinction between layers and tiers. Layers describe the logical groupings of the functionality and components in an application; whereas tiers describe the physical distribution of the functionality and components on separate servers, computers, networks, or remote locations. Although both layers and tiers use the same set of names (presentation, business, services, and data), remember that only tiers imply a physical separation. It is quite common to locate more than one layer on the same physical machine (the same tier). You can think of the term tier as referring to physical distribution patterns such as two-tier, three-tier, and n-tier.

+
+
+
+— Layered Application Guidelines
+MSDN Microsoft +
+
+
+

The proposed architecture makes use of cooperating components called layers. To develop specific functionality each layer contains a set of components which is capable to develop such functionalities.

+
+
+

The next figure represents the different layers:

+
+
+
+technical architecture +
+
Figure 1. High level architecture representation
+
+
+

The layers are separated in physical tiers making use of interfaces. This pattern makes possible to be flexible in different kind of projects maximizing performance and deployment strategies (synchronous/asynchronous access, security, component deployment in different environments, microservices…​). Another important point is to provide automated unit testing or test-driven development (TDD) facilities.

+
+
+
+
+

== Application layer

+
+
+

The Application Layer encapsulates the different .Net projects and its resource dependencies and manages the user interaction depending on the project’s nature.

+
+
+
+technical architecture +
+
Figure 2. Net application stack
+
+
+

The provided application template implements an dotnet API application. Also integrates by default the Swagger client. This provides the possibility to share the contract with external applications (angular, mobile apps, external services…​).

+
+
+
+
+

== Business layer

+
+
+

The business layer implements the core functionality of the application and encapsulates the component’s logic. +This layer provides the interface between the data transformation and the application exposition. This allow the data to be optimized and ready for different data consumers.

+
+
+

This layer may implement for each main entity the API controller, the entity related service and other classes to support the application logic.

+
+
+

In order to implement the service logic, the services class must follow the next specification:

+
+
+
+
    public class Service<TContext> : IService where TContext: DbContext
+
+
+
+

PE: devon4Net API template shows how to implement the TODOs service as follows:

+
+
+
+
    public class TodoService: Service<TodoContext>, ITodoService
+
+
+
+

Where Service is the base service class to be inherited and have full access for the Unit of work, TodoContext is the TODOs database context and ITodoService is the interface of the service, which exposes the public extended methods to be implemented.

+
+
+
+
+

== Data layer

+
+
+

The data layer orchestrates the data obtained between the Domain Layer and the Business Layer. Also transforms the data to be used more efficiently between layers.

+
+
+

So, if a service needs the help of another service or repository, the implemented Dependency Injection is the solution to accomplish the task.

+
+
+

The main aim of this layer is to implement the repository for each entity. The repository’s interface is defined in the Domain layer.

+
+
+

In order to implement the repository logic, the repository class must follow the next specification:

+
+
+
+
    Repository<T> : IRepository<T> where T : class
+
+
+
+

PE: devon4Net API template shows how to implement the TODOs repository as follows:

+
+
+
+
    public class TodoRepository : Repository<Todos>, ITodoRepository
+
+
+
+

Where Repository is the the base repository class to be inherited and have full access for the basic CRUD operations, Todos is the entity defined in the database context. ITodoRepository is the interface of the repository, which exposes the public extended methods to be implemented.

+
+
+ + + + + +
+ + +Please remember that <T> is the mapped class which reference the entity from the database context. This abstraction allows to write services implementation with different database contexts +
+
+
+
+
+

== Domain layer

+
+
+

The domain layer provides access to data directly exposed from other systems. The main source is used to be a data base system. The provided template makes use of Entity Framework solution from Microsoft in order to achieve this functionality.

+
+
+

To make a good use of this technology, Repository Pattern has been implemented with the help of Unit Of Work pattern. Also, the use of generic types are makes this solution to be the most flexible.

+
+
+

Regarding to data base source, each entity is mapped as a class. Repository pattern allows to use this mapped classes to access the data base via Entity framework:

+
+
+
+
 public class UnitOfWork<TContext> : IUnitOfWork<TContext> where TContext : DbContext
+
+
+
+ + + + + +
+ + +Where <T> is the mapped class which reference the entity from the database. +
+
+
+

The repository and unit of work patterns are create an abstraction layer between the data access layer and the business logic layer of an application.

+
+
+ + + + + +
+ + +Domain Layer has no dependencies with other layers. It contains the Entities, datasources and the Repository Interfaces. +
+
+
+
+
+

devon4Net architecture layer implementation

+
+
+

The next picture shows how the devon4Net API template implements the architecture described in previous points:

+
+
+
+devon4Net api template architecture implementation +
+
Figure 3. devon4Net architecture implementations
+
+
+
+
+

== Cross-Cutting concerns

+
+
+

Cross-cutting provides the implementation functionality that spans layers. Each functionality is implemented through components able to work stand alone. This approach provides better reusability and maintainability.

+
+
+

A common component set of cross cutting components include different types of functionality regarding to authentication, authorization, security, caching, configuration, logging, and communication.

+
+
+
+
+

Communication between Layers: Interfaces

+
+
+

The main target of the use of interfaces is to loose coupling between layers and minimize dependencies.

+
+
+

Public interfaces allow to hide implementation details of the components within the layers making use of dependency inversion.

+
+
+

In order to make this possible, we make use of Dependency Injection Pattern (implementation of dependency inversion) given by default in .Net Core.

+
+
+

The provided Data Layer contains the abstract classes to inherit from. All new repository and service classes must inherit from them, also the must implement their own interfaces.

+
+
+
+technical architecture +
+
Figure 4. Architecture representation in deep
+
+
+
+
+

Templates

+
+ +
+
+
+

State of the art

+
+
+

The provided bundle contains the devon4Net API template based on .net core. The template allows to create a microservice solution with minimal configuration.

+
+
+

Also, the devon4Net framework can be added to third party templates such as the Amazon API template to use lambdas in serverless environments.

+
+
+

Included features:

+
+
+
    +
  • +

    Logging:

    +
  • +
  • +

    Text File

    +
  • +
  • +

    Sqlite database support

    +
  • +
  • +

    Serilog Seq Server support

    +
  • +
  • +

    Graylog integration ready through TCP/UDP/HTTP protocols

    +
  • +
  • +

    API Call params interception (simple and compose objects)

    +
  • +
  • +

    API error exception management

    +
  • +
  • +

    Swagger:

    +
  • +
  • +

    Swagger autogenerating client from comments and annotations on controller classes

    +
  • +
  • +

    Full swagger client customization (Version, Title, Description, Terms, License, Json end point definition)

    +
  • +
  • +

    Easy configuration with just one configuration node in your settings file

    +
  • +
  • +

    JWT:

    +
  • +
  • +

    Issuer, audience, token expiration customization by external file configuration

    +
  • +
  • +

    Token generation via certificate

    +
  • +
  • +

    MVC inherited classes to access JWT user properties

    +
  • +
  • +

    API method security access based on JWT Claims

    +
  • +
  • +

    CORS:

    +
  • +
  • +

    Simple CORS definition ready

    +
  • +
  • +

    Multiple CORS domain origin definition with specific headers and verbs

    +
  • +
  • +

    Headers:

    +
  • +
  • +

    Automatic header injection with middleware.

    +
  • +
  • +

    Supported header definitions: AccessControlExposeHeader, StrictTransportSecurityHeader, XFrameOptionsHeader, XssProtectionHeader, XContentTypeOptionsHeader, ContentSecurityPolicyHeader, PermittedCrossDomainPoliciesHeader, ReferrerPolicyHeader

    +
  • +
  • +

    Reporting server:

    +
  • +
  • +

    Partial implementation of reporting server based on My-FyiReporting (now runs on linux container)

    +
  • +
  • +

    Testing:

    +
  • +
  • +

    Integration test template with sqlite support

    +
  • +
  • +

    Unit test template

    +
  • +
  • +

    Moq, xunit frameworks integrated

    +
  • +
  • +

    Circuit breaker:

    +
  • +
  • +

    Integrated with HttpClient factory

    +
  • +
  • +

    Client Certificate customization

    +
  • +
  • +

    Number of retries customizables

    +
  • +
  • +

    LiteDB:

    +
  • +
  • +

    Support for LiteDB

    +
  • +
  • +

    Provided basic repository for CRUD operations

    +
  • +
  • +

    RabbitMq:

    +
  • +
  • +

    Use of EasyQNet library to perform CQRS main functions between different microservices

    +
  • +
  • +

    Send commands / Subscribe queues with one C# sentence

    +
  • +
  • +

    Events management: Handled received commands to subscribed messages

    +
  • +
  • +

    Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    MediatR:

    +
  • +
  • +

    Use of MediatR library to perform CQRS main functions in memory

    +
  • +
  • +

    Send commands / Subscribe queues with one C# sentence

    +
  • +
  • +

    Events management: Handled received commands to subscribed messages

    +
  • +
  • +

    Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    SmaxHcm:

    +
  • +
  • +

    Component to manage Microfocus SMAX for cloud infrastructure services management

    +
  • +
  • +

    CyberArk:

    +
  • +
  • +

    Manage safe credentials with CyberArk

    +
  • +
  • +

    AnsibleTower:

    +
  • +
  • +

    Ansible automates the cloud infrastructure. devon4net integrates with Ansible Tower via API consumption endpoints

    +
  • +
  • +

    gRPC+Protobuf:

    +
  • +
  • +

    Added Client + Server basic templates sample gRPC with Google’s Protobuf protocol using devon4net

    +
  • +
  • +

    Kafka:

    +
  • +
  • +

    Added Apache Kafka support for deliver/consume messages and create/delete topics as well

    +
  • +
+
+
+
+
+

Software stack

+
+
+
Technology Stack of devon4Net
+

|== == == == == == == == == == == = +|Topic|Detail|Implementation +|runtime|language & VM|.Net Core Version 3.0 +|persistence|OR-mapper| Entity Framework Core +|service|REST services|https://www.asp.net/web-api[Web API] +|service - integration to external systems - optional|SOAP services|https://msdn.microsoft.com/en-us/library/dd456779(v=vs.110).aspx[WCF] +|logging|framework|https://github.com/serilog/serilog-extensions-logging[Serilog] +|validation|framework| NewtonSoft Json, DataAnnotations +|component management|dependency injection| Unity +|security|Authentication & Authorization| JWT .Net Security - Token based, local Authentication Provider +|unit tests|framework|https://github.com/xunit/xunit[xUnit] +|Circuit breaker|framework, allows retry pattern on http calls|https://github.com/App-vNext/Polly[Polly] +|CQRS|Memory events and queue events| MediatR - EasyNetQ - Kafka +|Kafka| Kafka support for enterprise applications| Confluent.Kafka +|Fluent Validation| Fluent validation for class instances|https://fluentvalidation.net/[Fluent validation] +|== == == == == == == == == == == =

+
+
+
+
+

Target platforms

+
+
+

Thanks to the new .Net Core platform from Microsoft, the developed software can be published Windows, Linux, OS, X and Android platforms.

+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/cobiGen.html b/docs/devon4net/1.0/cobiGen.html new file mode 100644 index 00000000..d1f090e5 --- /dev/null +++ b/docs/devon4net/1.0/cobiGen.html @@ -0,0 +1,534 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4net Cobigen Guide

+
+ +
+
+
+

Overview

+
+
+

In this guide we will explain how to generate a new WebApi project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+
+

Getting things ready

+
+ +
+
+
+

devonfw Distribution

+
+
+

The devonfw distributions can be obtained from the TeamForge releases library and are packaged in zips files that include all the needed tools, software and configurations.

+
+
+

It is not necessary to install nor configure anything. Just extracting the zip content is enough to have a fully functional devonfw. The only thing you have to do is run create-or-update-workspace.bat and then update-all-workspaces.bat to set up all the needed tools.

+
+
+
+
+

devon4net Templates

+
+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
dotnet new -i `Devon4Net`.WebAPI.Template
+
+
+
+

and then:

+
+
+
+
dotnet new Devon4NetAPI
+
+
+
+
+
+

OpenAPI File

+
+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to `CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+
+

Generating files

+
+
+

Cobigen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the Cobigen CLI tool.

+
+
+
+
+

Generating files through Eclipse

+
+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+cobigen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+cobigen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+cobigen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+cobigen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+
+

Generating files through Cobigen CLI

+
+
+

In order to generate the files using the Cobigen CLI it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    cobigen generate {yourOpenAPIFile}.yml
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+
+

Configuration

+
+ +
+
+
+

Dependency Injection configuration

+
+
+

At this point it is needed to make some modifications in the code in order to configure correctly the server. To do so it is needed to locate the services and the repositories files that were created in Devon4Net.WebAPI.Implementation

+
+
+

Services location:

+
+
+
+cobigen +
+
+
+

Repositories location:

+
+
+
+cobigen +
+
+
+

Now, we are going to open the following file Devon4Net.WebAPI.Implementation\Configure\DevonConfiguration.cs. +In there we have to add the Dependency Injection for the services and the repositories that Cobigen has generated. The following image is an example of what is needed to add.

+
+
+
+cobigen +
+
+
+

Moreover it is needed to remove the last line in order to be able to run the application:

+
+
+
+
`throw new NotImplementedException(...);`
+
+
+
+
+
+

Configure data base

+
+
+

Cobigen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+cobigen +
+
+
+
+
+

Configure services

+
+
+

In order to finish the configuration of the services it is needed to go to each service file of the managements generated.

+
+
+

In there we will see some "NotImplementedExceptions", so it is needed to read carefully each comment inside of each exception in order to be able to use the service. It can be shown an example of the service with its NotImplementedExceptions comments:

+
+
+
+cobigen +
+
+
+
+
+

Run the application

+
+
+

After doing all the steps defined above, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
dotnet run
+
+
+
+

This will deploy our application in our localhost with the port 8081, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/codeconvention.html b/docs/devon4net/1.0/codeconvention.html new file mode 100644 index 00000000..e8cd1260 --- /dev/null +++ b/docs/devon4net/1.0/codeconvention.html @@ -0,0 +1,616 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Code conventions

+
+
+

Introduction

+
+
+

This document covers .NET Coding Standards and is recommended to be read by team leaders/sw architects and developing teams operating in the Microsoft .NET environment.

+
+
+

“All the code in the system looks as if it was written by a single – very competent – individual” (K. Beck)

+
+
+
+
+

Capitalization Conventions

+
+
+

Terminology

+
+

Camel Case (camelCase)

+
+

Each word or abbreviation in the middle of the phrase begins with a capital letter, with no intervening spaces or punctuation.

+
+
+

The camel case convention, used only for parameter names, capitalizes the first character of each word except the first word, as shown in the following examples. As the example also shows, two-letter acronyms that begin a camel-cased identifier are both lowercase.

+
+
+

use camelCasing for parameter names.

+
+
+
+

Pascal Case (PascalCase)

+
+

The first letter of each concatenated word is capitalized. No other characters are used to separate the words, like hyphens or underscores.

+
+
+

The PascalCasing convention, used for all identifiers except parameter names, capitalizes the first character of each word (including acronyms over two letters in length).

+
+
+

use PascalCasing for all public member, type, and namespace names consisting of multiple words.

+
+
+
+

Underscore Prefix (_underScore)

+
+

For underscore ( _ ), the word after _ use camelCase terminology.

+
+
+
+
+
+
+

General Naming Conventions

+
+
+

choose easily readable identifier names.

+
+
+

favor readability over brevity.

+
+
+
+
◦ e.g.: `GetLength` is a better name than GetInt.
+◦ Aim for the “ubiquitous language” (E. Evans): A language distilled from the domain language, which helps the team clarifying domain concepts and communicating with domain experts.
+
+
+
+

prefer adding a suffix rather than a prefix to indicate a new version of an existing API.

+
+
+

use a numeric suffix to indicate a new version of an existing API, particularly if the existing name of the API is the only name that makes sense (i.e., if it is an industry standard) and if adding any meaningful suffix (or changing the name) is not an appropriate option.

+
+
+

do not use underscores, hyphens, or any other non-alphanumeric characters.

+
+
+

do not use Hungarian notation.

+
+
+

avoid using identifiers that conflict with keywords of widely used programming languages.

+
+
+

do not use abbreviations or contractions as part of identifier names.

+
+
+

do not use any acronyms that are not widely accepted, and even if they are, only when necessary.

+
+
+

do not use the "Ex" (or a similar) suffix for an identifier to distinguish it from an earlier version of the same API.

+
+
+

do not use C# reserved words as names.

+
+
+

do not use Hungarian notation. Hungarian notation is the practice of including a prefix in identifiers to encode some metadata about the parameter, such as the data type of the identifier.

+
+
+
+
◦ `e.g.: iNumberOfClients, sClientName`
+
+
+
+
+
+

Names of Assemblies and DLLs

+
+
+

An assembly is the unit of deployment and identity for managed code programs. Although assemblies can span one or more files, typically an assembly maps one-to-one with a` DLL`. Therefore, this section describes only` DLL` naming conventions, which then can be mapped to assembly naming conventions.

+
+
+

choose names for your assembly DLLs that suggest large chunks of functionality, such as System.Data.

+
+
+

Assembly and DLL names don’t have to correspond to namespace names, but it is reasonable to follow the namespace name when naming assemblies. A good rule of thumb is to name the DLL based on the common prefix of the assemblies contained in the assembly. For example, an assembly with two namespaces, MyCompany.MyTechnology.FirstFeature and MyCompany.MyTechnology.SecondFeature, could be called MyCompany.MyTechnology.dll.

+
+
+

consider naming DLLs according to the following pattern:
+<Company>.<Component>.dll +where <Component> contains one or more dot-separated clauses.

+
+
+

For example: +Litware.Controls.dll.

+
+
+
+
+

General coding style

+
+
+
    +
  • +

    Source files: One Namespace per file and one class per file.

    +
  • +
  • +

    Braces: On new line. Always use braces when optional.

    +
  • +
  • +

    Indention: Use tabs with size of 4.

    +
  • +
  • +

    Comments: Use // for simple comment or /// for summaries. Do not /* … */ and do not flower box.

    +
  • +
  • +

    Use Use built-in C# native data types vs .NET CTS types (string instead of String)

    +
  • +
  • +

    Avoid changing default type in Enums.

    +
  • +
  • +

    Use base or this only in constructors or within an override.

    +
  • +
  • +

    Always check for null before invoking events.

    +
  • +
  • +

    Avoid using Finalize. Use C# Destructors and do not create Finalize() method.

    +
  • +
  • +

    Suggestion: Use blank lines, to make it much more readable by dividing it into small, easy-to-digest sections:

    +
    +
    +
    ◦ Use a single blank line to separate logical groups of code, such as control structures.
    +◦ Use two blank lines to separate method definitions
    +
    +
    +
  • +
+
+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
CaseConvention

Source File

Pascal case. Match class name and file name

Namespace

Pascal case

Class

Pascal case

Interface

Pascal case

Generics

Single capital letter (T or K)

Methods

Pascal case (use a Verb or Verb+Object)

Public field

Pascal case

Private field

Camel case with underscore (_) prefix

Static field

Pascal case

Property

Pascal case. Try to use get and and set convention {get;set;}

Constant

Pascal case

Enum

Pascal case

Variable (inline)

Camel case

Param

Camel case

+
+
+
+

Use of Region guideline

+
+
+

Regions can be used to collapse code inside Visual Studio .NET. Regions are ideal candidates to hide boiler plate style code that adds little value to the reader on your code. Regions can then be expanded to provide progressive disclosure of the underlying details of the class or method.

+
+
+
    +
  • +

    Do Not regionalise entire type definitions that are of an important nature. Types such as enums (which tend to be fairly static in their nature) can be regionalised – their permissible values show up in Intellisense anyway.

    +
  • +
  • +

    Do Not regionalise an entire file. When another developer opens the file, all they will see is a single line in the code editor pane.

    +
  • +
  • +

    Do regionalise boiler plate type code.

    +
  • +
+
+
+
+
+

Use of Comment guideline

+
+
+

Code is the only completely reliable documentation: write “good code” first!

+
+
+

Avoid Unnecessary comments

+
+
    +
  • +

    Choosing good names for fields, methods, parameters, etc. “let the code speak” (K. Beck) by itself reducing the need for comments and documentation

    +
  • +
  • +

    Avoid “repeating the code” and commenting the obvious

    +
  • +
  • +

    Avoid commenting “tricky code”: rewrite it! If there’s no time at present to refactor a tricky section, mark it with a TODO and schedule time to take care of it as soon as possible.

    +
  • +
+
+
+
+

Effective comments

+
+
    +
  • +

    Use comments to summarize a section of code

    +
  • +
  • +

    Use comments to clarify sensitive pieces of code

    +
  • +
  • +

    Use comments to clarify the intent of the code

    +
  • +
  • +

    Bad written or out-of-date comments are more damaging than helpful:

    +
  • +
  • +

    Write clear and effective comments

    +
  • +
  • +

    Pay attention to pre-existing comments when modifying code or copying&pasting code

    +
  • +
+
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/environment.html b/docs/devon4net/1.0/environment.html new file mode 100644 index 00000000..488d8288 --- /dev/null +++ b/docs/devon4net/1.0/environment.html @@ -0,0 +1,443 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Environment

+
+ +
+
+
+

Overview

+
+ +
+
+
+

Required software

+ +
+
+

Setting up the environment

+
+
+
    +
  1. +

    Download and install Visual Studio Code

    +
  2. +
  3. +

    Download and install .Net Core SDK

    +
  4. +
  5. +

    Intall the extension Omnisharp in Visual Studio Code

    +
  6. +
+
+
+
+
+

== Hello world

+
+
+
    +
  1. +

    Open a project:

    +
    +
      +
    • +

      Open Visual Studio Code.

      +
    • +
    • +

      Click on the Explorer icon on the left menu and then click Open Folder.

      +
    • +
    • +

      Select the folder you want your C# project to be in and click Select Folder. For our example, we’ll create a folder for our project named 'HelloWorld'.

      +
    • +
    +
    +
  2. +
  3. +

    Initialize a C# project:

    +
    +
      +
    • +

      Open the Integrated Terminal from Visual Studio Code by typing CTRL+(backtick). Alternatively, you can select View > Integrated Terminal from the main menu.

      +
    • +
    • +

      In the terminal window, type dotnet new console.

      +
    • +
    • +

      This creates a Program.cs file in your folder with a simple "Hello World" program already written, along with a C# project file named HelloWorld.csproj.

      +
    • +
    +
    +
  4. +
  5. +

    Resolve the build assets:

    +
    +
      +
    • +

      For .NET Core 2.0, this step is optional. The dotnet restore command executes automatically when a new project is created.

      +
    • +
    +
    +
  6. +
  7. +

    Run the "Hello World" program:

    +
    +
      +
    • +

      Type dotnet run.

      +
    • +
    +
    +
  8. +
+
+
+
+
+

Debug

+
+
+
    +
  1. +

    Open Program.cs by clicking on it. The first time you open a C# file in Visual Studio Code, OmniSharp will load in the editor.

    +
  2. +
  3. +

    Visual Studio Code will prompt you to add the missing assets to build and debug your app. Select Yes.

    +
  4. +
  5. +

    To open the Debug view, click on the Debugging icon on the left side menu.

    +
  6. +
  7. +

    Locate the green arrow at the top of the pane. Make sure the drop-down next to it has .NET Core Launch (console) selected.

    +
  8. +
  9. +

    Add a breakpoint to your project by clicking on the editor margin (the space on the left of the line numbers in the editor).

    +
  10. +
  11. +

    Select F5 or the green arrow to start debugging. The debugger stops execution of your program when it reaches the breakpoint you set in the previous step.

    +
    +
      +
    • +

      While debugging you can view your local variables in the top left pane or use the debug console.

      +
    • +
    +
    +
  12. +
  13. +

    Select the green arrow at the top to continue debugging, or select the red square at the top to stop.

    +
  14. +
+
+
+
+
+

==

+
+
+

For more information and troubleshooting tips on .NET Core debugging with OmniSharp in Visual Studio Code, see Instructions for setting up the .NET Core debugger. +== ==

+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/home.html b/docs/devon4net/1.0/home.html new file mode 100644 index 00000000..4189444a --- /dev/null +++ b/docs/devon4net/1.0/home.html @@ -0,0 +1,189 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

devon4net

+
+
+

This guide describes an application architecture for API development with .net core.

+
+
+
+
+

Motivation

+
+
+

The main challenge we encounter in our projects is to bring junior and senior developers into .net core. +There are a lot of different frameworks and architectures in the market. +The idea is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends. +On the other hand, providing a short onboarding time while still using an architecture that helps us scale and be productive at the same time. +Also, the architecture must be compatible with the market. +Guides, practices and naming found in the web should still be valid (e.g. a stackoverflow article for a given problem).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/howto.html b/docs/devon4net/1.0/howto.html new file mode 100644 index 00000000..371bd6cd --- /dev/null +++ b/docs/devon4net/1.0/howto.html @@ -0,0 +1,1249 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Introduction

+
+
+

The aim of this document is to show how to get devon4net things done in a easy way.

+
+
+
+
+

How to

+
+ +
+
+
+

Start a new devonfw project

+
+
+

The .Net Core 3.1 template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2019.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
+
+

== Using devon4Net template

+
+ +
+
+
+

== Option 1

+
+
+
+
Open your favourite terminal (Win/Linux/iOS)
+Go to future project's path
+Type dotnet new --install Devon4Net.WebAPI.Template
+Type dotnet new Devon4NetAPI
+Go to project's path
+You are ready to start developing with devon4Net
+
+
+
+
+
+

== Option 2

+
+
+
+
Create a new dotnet` API` project from scratch
+Add the NuGet package reference to your project:
+Install-Package Devon4Net.Application.WebAPI.Configuration
+
+
+
+

Set up your project as follows in program.cs file:

+
+
+
+
        public static void Main(string[] args)
+        {
+            // Please use
+            // Devonfw.Configure<Startup>(args);
+            // Or :
+
+            WebHost.CreateDefaultBuilder(args)
+                .UseStartup<Startup>()
+                .InitializeDevonFw()
+                .Build()
+                .Run();
+        }
+
+
+
+

Set up your project as follows in startup.cs file:

+
+
+
+
    private IConfiguration Configuration { get; }
+
+
+   public Startup(IConfiguration configuration)
+    {
+        Configuration = configuration;
+    }
+
+    public void ConfigureServices(IServiceCollection services)
+    {
+
+        services.ConfigureDevonFw(Configuration);
+        SetupDatabase(services);
+
+        ...
+    }
+
+
+    private void SetupDatabase(IServiceCollection services)
+    {
+        // Default is the database connection name in appsettings.json file
+        services.SetupDatabase<TodoContext>(Configuration, "Default", DatabaseType.InMemory);
+    }
+
+    public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
+    {
+        app.ConfigureDevonFw();
+        ...
+    }
+
+
+
+

Add the devonfw configuration options in your appsettings.json file

+
+
+
+
+

devon4net configuration files

+
+
+

To start using devon4net in your .net core application add this configuration in your appsettings.json file:

+
+
+
+
 "devonfw": {
+    "UseDetailedErrorsKey": true,
+    "UseIIS": false,
+    "UseSwagger": true,
+    "Environment": "Development",
+    "KillSwitch": {
+      "killSwitchSettingsFile": "killswitch.appsettings.json"
+    },
+    "Kestrel": {
+      "UseHttps": true,
+      "HttpProtocol": "Http2", //Http1, Http2, Http1AndHttp2, none
+      "ApplicationPort": 8082,
+      "KeepAliveTimeout": 120, //in seconds
+      "MaxConcurrentConnections": 100,
+      "MaxConcurrentUpgradedConnections": 100,
+      "MaxRequestBodySize": 28.6, //In MB. The default maximum request body size is 30,000,000 bytes, which is approximately 28.6 MB
+      "Http2MaxStreamsPerConnection": 100,
+      "Http2InitialConnectionWindowSize": 131072, // From 65,535 and less than 2^31 (2,147,483,648)
+      "Http2InitialStreamWindowSize": 98304, // From 65,535 and less than 2^31 (2,147,483,648)
+      "AllowSynchronousIO": true,
+      "SslProtocol": "Tls12", //Tls, Tls11,Tls12, Tls13, Ssl2, Ssl3, none. For Https2 Tls12 is needed
+      "ServerCertificate": {
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost"
+      },
+      "ClientCertificate": {
+        "DisableClientCertificateCheck": true,
+        "RequireClientCertificate": false,
+        "CheckCertificateRevocation": true,
+        "ClientCertificates": {
+          "Whitelist": [
+            "3A87A49460E8FE0E2A198E63D408DC58435BC501"
+          ],
+          "DisableClientCertificateCheck": false
+        }
+      }
+    },
+    "IIS": {
+      "ForwardClientCertificate": true,
+      "AutomaticAuthentication": true,
+      "AuthenticationDisplayName" : ""
+    }
+  }
+
+
+
+

Also, for start using the devon4net components, you should add the next json options in your appsettings.json or appsettings.Development.json file:

+
+
+
+
{
+  "ExtraSettingsFiles": [
+    "Put a directory path (relative/absolute/linux-like) like /run/secrets/global where there are many settings/secret files to load",
+    "Put a specific file name (with/without path) like /app-configs/app/extra-settings.json"
+  ],
+  "ConnectionStrings": {
+    "Default": "Todos",
+    "Employee": "Employee",
+    "RabbitMqBackup": "Add your database connection string here for messaging backup",
+    "MediatRBackup": "Add your database connection string here for messaging backup"
+  },
+  "Logging": {
+    "LogLevel": {
+      "Default": "Debug",
+      "System": "Information",
+      "Microsoft": "Information"
+    }
+  },
+  "Swagger": {
+    "Version": "v1",
+    "Title": "devon4net API",
+    "Description": "devon4net API Contract",
+    "Terms": "https://www.devonfw.com/terms-of-use/",
+    "Contact": {
+      "Name": "devonfw",
+      "Email": "sample@mail.com",
+      "Url": "https://www.devonfw.com"
+    },
+    "License": {
+      "Name": "devonfw - Terms of Use",
+      "Url": "https://www.devonfw.com/terms-of-use/"
+    },
+    "Endpoint": {
+      "Name": "V1 Docs",
+      "Url": "/swagger/v1/swagger.json",
+      "UrlUi": "swagger",
+      "RouteTemplate": "swagger/v1/{documentName}/swagger.json"
+    }
+  },
+  "JWT": {
+    "Audience": "devon4Net",
+    "Issuer": "devon4Net",
+    "TokenExpirationTime": 60,
+    "ValidateIssuerSigningKey": true,
+    "ValidateLifetime": true,
+    "ClockSkew": 5,
+    "Security": {
+      "SecretKeyLengthAlgorithm": "",
+      "SecretKeyEncryptionAlgorithm": "",
+      "SecretKey": "",
+      "Certificate": "",
+      "CertificatePassword": "",
+      "CertificateEncryptionAlgorithm": ""
+    }
+  },
+  "Cors": []
+  //[
+  //  {
+  //    "CorsPolicy": "CorsPolicy1",
+  //    "Origins": "http://example.com,http://www.contoso.com",
+  //    "Headers": "accept,content-type,origin,x-custom-header",
+  //    "Methods": "GET,POST,HEAD",
+  //    "AllowCredentials": true
+  //  },
+  //  {
+  //    "CorsPolicy": "CorsPolicy2",
+  //    "Origins": "http://example.com,http://www.contoso.com",
+  //    "Headers": "accept,content-type,origin,x-custom-header",
+  //    "Methods": "GET,POST,HEAD",
+  //    "AllowCredentials": true
+  //  }
+  //]
+  ,
+  "CircuitBreaker": {
+    "CheckCertificate": false,
+    "Endpoints": [
+      {
+        "Name": "AnsibleTower",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      },
+      {
+        "Name": "CyberArk",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      },
+      {
+        "Name": "SmaxHcm",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      }
+    ]
+  },
+  "Headers": {
+    "AccessControlExposeHeader": "Authorization",
+    "StrictTransportSecurityHeader": "",
+    "XFrameOptionsHeader": "DENY",
+    "XssProtectionHeader": "1;mode=block",
+    "XContentTypeOptionsHeader": "nosniff",
+    "ContentSecurityPolicyHeader": "",
+    "PermittedCrossDomainPoliciesHeader": "",
+    "ReferrerPolicyHeader": ""
+  },
+  "Log": {
+    "UseAOPTrace": false,
+    "LogLevel": "Debug",
+    "SqliteDatabase": "logs/log.db",
+    "LogFile": "logs/{0}_devonfw.log",
+    "SeqLogServerHost": "http://127.0.0.1:5341",
+    "GrayLog": {
+      "GrayLogHost": "127.0.0.1",
+      "GrayLogPort": "12201",
+      "GrayLogProtocol": "UDP",
+      "UseSecureConnection": true,
+      "UseAsyncLogging": true,
+      "RetryCount": 5,
+      "RetryIntervalMs": 15,
+      "MaxUdpMessageSize": 8192
+    }
+  },
+  "RabbitMq": {
+    "EnableRabbitMq": false,
+    "Hosts": [
+      {
+        "Host": "127.0.0.1",
+        "Port": 5672,
+        "Ssl": false,
+        "SslServerName": "localhost",
+        "SslCertPath": "localhost.pfx",
+        "SslCertPassPhrase": "localhost",
+        "SslPolicyErrors": "RemoteCertificateNotAvailable" //None, RemoteCertificateNotAvailable, RemoteCertificateNameMismatch, RemoteCertificateChainErrors
+      }
+    ],
+
+    "VirtualHost": "/",
+    "UserName": "admin",
+    "Password": "password",
+    "Product": "devon4net",
+    "RequestedHeartbeat": 10, //Set to zero for no heartbeat
+    "PrefetchCount": 50,
+    "PublisherConfirms": false,
+    "PersistentMessages": true,
+    "Platform": "localhost",
+    "Timeout": 10,
+    "Backup": {
+      "UseLocalBackup": false,
+      "DatabaseName": "devon4netMessageBackup.db"
+    }
+  },
+  "MediatR": {
+    "EnableMediatR": false,
+    "Backup": {
+      "UseLocalBackup": false,
+      "DatabaseName": "devon4netMessageBackup.db"
+    }
+  },
+  "LiteDb": {
+    "DatabaseLocation": "devon4net.db"
+  },
+  "AnsibleTower": {
+    "EnableAnsible": false,
+    "Name": "AnsibleTower",
+    "CircuitBreakerName": "AnsibleTower",
+    "ApiUrlBase": "/api/v2/?format=json",
+    "Version": "1.0.5.29",
+    "Username": "",
+    "Password": ""
+  },
+  "CyberArk": {
+    "EnableCyberArk": false,
+    "Username": "",
+    "Password": "",
+    "CircuitBreakerName": "CyberArk"
+  },
+  "SmaxHcm": {
+    "EnableSmax": false,
+    "Username": "",
+    "Password": "",
+    "TenantId": "",
+    "CircuitBreakerName": "SmaxHcm",
+    "ProviderId": ""
+  },
+  "Kafka": {
+    "EnableKafka": true,
+    "Administration": [
+      {
+        "AdminId": "Admin1",
+        "Servers": "127.0.0.1:9092"
+      }
+    ],
+    "Producers": [
+      {
+        "ProducerId": "Producer1", // devon identifier
+        "Servers": "127.0.0.1:9092", // Initial list of brokers as a CSV list of broker host or host:port. The application may also use `rd_kafka_brokers_add()` to add brokers during runtime
+        "ClientId": "client1", //Client identifier
+        "Topic": "devonfw", // topics to deliver the message
+        "MessageMaxBytes": 1000000, //Maximum Kafka protocol request message size. Due to differing framing overhead between protocol versions the producer is unable to reliably enforce a strict max message limit at produce time and may exceed the maximum size by one message in protocol ProduceRequests, the broker will enforce the the topic's `max.message.bytes` limit (see Apache Kafka documentation)
+        "CompressionLevel": -1, // [0-9] for gzip; [0-12] for lz4; only 0 for snappy; -1 = codec-dependent default compression level
+        "CompressionType": "None", // None, Gzip, Snappy, Lz4, Zstd
+        "ReceiveMessageMaxBytes": 100000000,
+        "EnableSslCertificateVerification": false,
+        "CancellationDelayMaxMs": 100, // The maximum length of time (in milliseconds) before a cancellation request is acted on. Low values may result in measurably higher CPU usage
+        "Ack": "None", //Zero=Broker does not send any response/ack to client, One=The leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. All=Broker will block until message is committed by all in sync replicas (ISRs). If there are less than min.insync.replicas (broker configuration) in the ISR set the produce request will fail
+        "Debug": "", //A comma-separated list of debug contexts to enable. Detailed Producer debugging: broker,topic,msg. Consumer: consumer,cgrp,topic,fetch
+        "BrokerAddressTtl": 1000, //How long to cache the broker address resolving results (milliseconds)
+        "BatchNumMessages": 1000000, // Maximum size (in bytes) of all messages batched in one MessageSet, including protocol framing overhead. This limit is applied after the first message has been added to the batch, regardless of the first message's size, this is to ensure that messages that exceed batch.size are produced. The total MessageSet size is also limited by batch.num.messages and message.max.bytes
+        "EnableIdempotence": false, //When set to `true`, the producer will ensure that messages are successfully produced exactly once and in the original produce order. The following configuration properties are adjusted automatically (if not modified by the user) when idempotence is enabled: `max.in.flight.requests.per.connection=5` (must be less than or equal to 5), `retries=INT32_MAX` (must be greater than 0), `acks=all`, `queuing.strategy=fifo`. Producer instantation will fail if user-supplied configuration is incompatible
+        "MaxInFlight": 5,
+        "MessageSendMaxRetries": 5,
+        "BatchSize": 100000000 // Maximum size (in bytes) of all messages batched in one MessageSet, including protocol framing overhead. This limit is applied after the first message has been added to the batch, regardless of the first message's size, this is to ensure that messages that exceed batch.size are produced. The total MessageSet size is also limited by batch.num.messages and message.max.bytes
+      }
+    ],
+    "Consumers": [
+      {
+        "ConsumerId": "Consumer1", // devon identifier
+        "Servers": "127.0.0.1:9092",
+        "GroupId": "group1",
+        "Topics": "devonfw", // Comma separated topics to subscribe
+        "AutoCommit": true, //Automatically and periodically commit offsets in the background. Note: setting this to false does not prevent the consumer from fetching previously committed start offsets. To circumvent this behaviour set specific start offsets per partition in the call to assign()
+        "StatisticsIntervalMs": 0, //librdkafka statistics emit interval. The application also needs to register a stats callback using `rd_kafka_conf_set_stats_cb()`. The granularity is 1000ms. A value of 0 disables statistics
+        "SessionTimeoutMs": 10000, //Client group session and failure detection timeout. The consumer sends periodic heartbeats (heartbeat.interval.ms) to indicate its liveness to the broker. If no hearts are received by the broker for a group member within the session timeout, the broker will remove the consumer from the group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. Also see `max.poll.interval.ms`
+        "AutoOffsetReset": "Largest", //Action to take when there is no initial offset in offset store or the desired offset is out of range: 'smallest','earliest' - automatically reset the offset to the smallest offset, 'largest','latest' - automatically reset the offset to the largest offset, 'error' - trigger an error which is retrieved by consuming messages and checking 'message-&gt;err'
+        "EnablePartitionEof": true, //Verify CRC32 of consumed messages, ensuring no on-the-wire or on-disk corruption to the messages occurred. This check comes at slightly increased CPU usage
+        "IsolationLevel": "ReadCommitted", //Controls how to read messages written transactionally: `ReadCommitted` - only return transactional messages which have been committed. `ReadUncommitted` - return all messages, even transactional messages which have been aborted.
+        "EnableSslCertificateVerification": false,
+        "Debug": "" //A comma-separated list of debug contexts to enable. Detailed Producer debugging: broker,topic,msg. Consumer: consumer,cgrp,topic,fetch
+      }
+    ]
+  }
+}
+
+
+
+
+
+

devon4net Cobigen Guide

+
+ +
+
+
+

Overview

+
+
+

In this guide we will explain how to generate a new WebApi project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+
+

Getting things ready

+
+ +
+
+
+

== devonfw Distribution

+
+
+

The devonfw distributions can be obtained from here. You can find all releases in maven central.

+
+
+

It is not necessary to install nor configure anything. Just extracting the zip content is enough to have a fully functional devonfw. The only thing you have to do is run create-or-update-workspace.bat and then update-all-workspaces.bat to set up all the needed tools.

+
+
+
+
+

== devon4net Templates

+
+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
dotnet new -i Devon4Net.WebAPI.Template
+
+
+
+

and then:

+
+
+
+
dotnet new Devon4NetAPI
+
+
+
+
+
+

== OpenAPI File

+
+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+
+

Generating files

+
+
+

Cobigen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the Cobigen` CLI` tool.

+
+
+
+
+

== Generating files through Eclipse

+
+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+cobigen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+cobigen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+cobigen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+cobigen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+
+

== Generating files through Cobigen` CLI`

+
+
+

In order to generate the files using the Cobigen` CLI` it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    cobigen generate {yourOpenAPIFile}.yml
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+
+

Configuration

+
+ +
+
+
+

== Dependency Injection configuration

+
+
+

At this point it is needed to make some modifications in the code in order to configure correctly the server. To do so it is needed to locate the services and the repositories files that were created in Devon4Net.WebAPI.Implementation

+
+
+

Services location:

+
+
+
+cobigen +
+
+
+

Repositories location:

+
+
+
+cobigen +
+
+
+

Now, we are going to open the following file Devon4Net.WebAPI.Implementation\Configure\DevonConfiguration.cs. +In there we have to add the Dependency Injection for the services and the repositories that Cobigen has generated. The following image is an example of what is needed to add.

+
+
+
+cobigen +
+
+
+

Moreover it is needed to remove the last line in order to be able to run the application:

+
+
+
+
`throw new NotImplementedException(...);`
+
+
+
+
+
+

== Configure data base

+
+
+

Cobigen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+cobigen +
+
+
+
+
+

== Configure services

+
+
+

In order to finish the configuration of the services it is needed to go to each service file of the managements generated.

+
+
+

In there we will see some "NotImplementedExceptions", so it is needed to read carefully each comment inside of each exception in order to be able to use the service. It can be shown an example of the service with its NotImplementedExceptions comments:

+
+
+
+cobigen +
+
+
+
+
+

== Run the application

+
+
+

After doing all the steps defined above, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
dotnet run
+
+
+
+

This will deploy our application in our localhost with the port 8081, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+
+

Use HTTP2 protocol

+
+
+

You can specify the` HTTP` protocol to be used on your devon4net application modifying some node values at devonfw node in your appsettings configuration file.

+
+
+
+
+

HttpProtocol

+
+
+

The supported protocols are:

+
+
+

|== == == == == == == == == == == = +|Protocol|Description +|Http1| Http1 protocol +|Http2| Http2 Protocol +|Http1AndHttp2| Both supported +|== == == == == == == == == == == =

+
+
+
+
+

=` SSL`

+
+
+

To activate the HTTP2, the SslProtocol node must be set to Tls12 value.

+
+
+

The` SSL` protocol supported version values are:

+
+
+
    +
  • +

    Tls

    +
  • +
  • +

    Tls11

    +
  • +
  • +

    Tls12

    +
  • +
  • +

    Tls13

    +
  • +
  • +

    Ssl2

    +
  • +
  • +

    Ssl3

    +
  • +
+
+
+
+
+

Create a certificate for development purposes

+
+
+

In order to create a valid certificate for development purposes the Open` SSL` tools are needed.

+
+
+
+
+

Certificate authority (CA)

+
+
+

Run the next commands in a shell:

+
+
+
+
1. openssl req -x509 -nodes -new -sha256 -days 1024 -newkey rsa:2048 -keyout RootCA.key -out RootCA.pem -subj "/C=ES/ST=Valencia/L=Valencia/O=Certificates/CN=localhost.local"
+
+2. openssl x509 -outform pem -in RootCA.pem -out RootCA.crt
+
+
+
+

If you want to convert your certificate run the command:

+
+
+
+
openssl pkcs12 -export -out localhost.pfx -inkey RootCA.key -in RootCA.crt
+
+
+
+
+
+

Domain name certificate

+
+
+

Run the next commands in a shell:

+
+
+
+
1. openssl req -new -nodes -newkey rsa:2048 -keyout localhost.key -out localhost.csr -subj "/C=ES/ST=Valencia/L=Valencia/O=Certificates/CN=localhost.local"
+
+2. openssl x509 -req -sha256 -days 1024 -in localhost.csr -CA RootCA.pem -CAkey RootCA.key -CAcreateserial -extfile domains.ext -out localhost.crt
+
+
+
+

Where the domains.ext file should contain:

+
+
+
+
authorityKeyIdentifier=keyid,issuer
+basicConstraints=CA:FALSE
+keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment
+subjectAltName = @alt_names
+[alt_names]
+DNS.1 = localhost
+DNS.2 = localhost.local
+DNS.3 = 127.0.0.1
+DNS.4 = fake1.local
+DNS.5 = fake2.local
+
+
+
+

If you want to convert your certificate run the command:

+
+
+
+
openssl pkcs12 -export -out localhost.pfx -inkey localhost.key -in localhost.crt
+
+
+
+
+
+

Setup the database driver

+
+
+

Add the database connection on the SetupDatabase method at Startup.cs

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+
+

Change the JWT encryption algorithm

+
+
+

In the appsettings.json configuration file, you can use the next values on the SecretKeyLengthAlgorithm and SecretKeyEncryptionAlgorithm nodes at JWT configuration:

+
+
+

|== == == == == == == == == == == = +|Algorithm|Description +|Aes128Encryption|"http://www.w3.org/2001/04/xmlenc#aes128-cbc" +|Aes192Encryption|"http://www.w3.org/2001/04/xmlenc#aes192-cbc" +|Aes256Encryption|"http://www.w3.org/2001/04/xmlenc#aes256-cbc" +|DesEncryption|"http://www.w3.org/2001/04/xmlenc#des-cbc" +|Aes128KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes128" +|Aes192KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes192" +|Aes256KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes256" +|RsaV15KeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-1_5" +|Ripemd160Digest|"http://www.w3.org/2001/04/xmlenc#ripemd160" +|RsaOaepKeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-oaep" +|Aes128KW|"A128KW" +|Aes256KW|"A256KW" +|RsaPKCS1|"RSA1_5" +|RsaOAEP|"RSA-OAEP" +|ExclusiveC14n|"http://www.w3.org/2001/10/xml-exc-c14n#" +|ExclusiveC14nWithComments|"http://www.w3.org/2001/10/xml-exc-c14n#WithComments" +|EnvelopedSignature|"http://www.w3.org/2000/09/xmldsig#enveloped-signature" +|Sha256Digest|"http://www.w3.org/2001/04/xmlenc#sha256" +|Sha384Digest|"http://www.w3.org/2001/04/xmldsig-more#sha384" +|Sha512Digest|"http://www.w3.org/2001/04/xmlenc#sha512" +|Sha256|"SHA256" +|Sha384|"SHA384" +|Sha512|"SHA512" +|EcdsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" +|EcdsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384" +|EcdsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512" +|HmacSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha256" +|HmacSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha384" +|HmacSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha512" +|RsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" +|RsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" +|RsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" +|RsaSsaPssSha256Signature|"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1" +|RsaSsaPssSha384Signature|"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1" +|RsaSsaPssSha512Signature|"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1" +|EcdsaSha256|"ES256" +|EcdsaSha384|"ES384" +|EcdsaSha512|"ES512" +|HmacSha256|"HS256" +|HmacSha384|"HS384" +|HmacSha512|"HS512" +|None|"none" +|RsaSha256|"RS256" +|RsaSha384|"RS384" +|RsaSha512|"RS512" +|RsaSsaPssSha256|"PS256" +|RsaSsaPssSha384|"PS384" +|RsaSsaPssSha512|"PS512" +|Aes128CbcHmacSha256|"A128CBC-HS256" +|Aes192CbcHmacSha384|"A192CBC-HS384" +|Aes256CbcHmacSha512|"A256CBC-HS512" +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/index.html b/docs/devon4net/1.0/index.html new file mode 100644 index 00000000..d35df6b6 --- /dev/null +++ b/docs/devon4net/1.0/index.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

devon4net

+
+
+

This guide describes an application architecture for API development with .net core.

+
+
+
+
+

Motivation

+
+
+

The main challenge we encounter in our projects is to bring junior and senior developers into .net core. +There are a lot of different frameworks and architectures in the market. +The idea is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends. +On the other hand, providing a short onboarding time while still using an architecture that helps us scale and be productive at the same time. +Also, the architecture must be compatible with the market. +Guides, practices and naming found in the web should still be valid (e.g. a stackoverflow article for a given problem).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/master-devon4net.html b/docs/devon4net/1.0/master-devon4net.html new file mode 100644 index 00000000..a9ca8b80 --- /dev/null +++ b/docs/devon4net/1.0/master-devon4net.html @@ -0,0 +1,5743 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==.net

+
+
+

Architecture basics

+
+ +
+

Introduction

+
+

The devonfw platform provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+
+

Overview Onion Design

+
+

This guide shows the overall proposed architecture in terms of separated layers making use the Onion architecture pattern. Each layers represents a logical group of components and functionality. In this guide you will learn the basics of the proposed architecture based in layers in order to develop software making use of the best practices.

+
+
+
+

Layer specification

+
+
+
+

It is important to understand the distinction between layers and tiers. Layers describe the logical groupings of the functionality and components in an application; whereas tiers describe the physical distribution of the functionality and components on separate servers, computers, networks, or remote locations. Although both layers and tiers use the same set of names (presentation, business, services, and data), remember that only tiers imply a physical separation. It is quite common to locate more than one layer on the same physical machine (the same tier). You can think of the term tier as referring to physical distribution patterns such as two-tier, three-tier, and n-tier.

+
+
+
+— Layered Application Guidelines
+MSDN Microsoft +
+
+
+

The proposed architecture makes use of cooperating components called layers. To develop specific functionality each layer contains a set of components which is capable to develop such functionalities.

+
+
+

The next figure represents the different layers:

+
+
+
+technical architecture +
+
Figure 1. High level architecture representation
+
+
+

The layers are separated in physical tiers making use of interfaces. This pattern makes possible to be flexible in different kind of projects maximizing performance and deployment strategies (synchronous/asynchronous access, security, component deployment in different environments, microservices…​). Another important point is to provide automated unit testing or test-driven development (TDD) facilities.

+
+
+
+

== Application layer

+
+

The Application Layer encapsulates the different .Net projects and its resource dependencies and manages the user interaction depending on the project’s nature.

+
+
+
+technical architecture +
+
Figure 2. Net application stack
+
+
+

The provided application template implements an dotnet API application. Also integrates by default the Swagger client. This provides the possibility to share the contract with external applications (angular, mobile apps, external services…​).

+
+
+
+

== Business layer

+
+

The business layer implements the core functionality of the application and encapsulates the component’s logic. +This layer provides the interface between the data transformation and the application exposition. This allow the data to be optimized and ready for different data consumers.

+
+
+

This layer may implement for each main entity the API controller, the entity related service and other classes to support the application logic.

+
+
+

In order to implement the service logic, the services class must follow the next specification:

+
+
+
+
    public class Service<TContext> : IService where TContext: DbContext
+
+
+
+

PE: devon4Net API template shows how to implement the TODOs service as follows:

+
+
+
+
    public class TodoService: Service<TodoContext>, ITodoService
+
+
+
+

Where Service is the base service class to be inherited and have full access for the Unit of work, TodoContext is the TODOs database context and ITodoService is the interface of the service, which exposes the public extended methods to be implemented.

+
+
+
+

== Data layer

+
+

The data layer orchestrates the data obtained between the Domain Layer and the Business Layer. Also transforms the data to be used more efficiently between layers.

+
+
+

So, if a service needs the help of another service or repository, the implemented Dependency Injection is the solution to accomplish the task.

+
+
+

The main aim of this layer is to implement the repository for each entity. The repository’s interface is defined in the Domain layer.

+
+
+

In order to implement the repository logic, the repository class must follow the next specification:

+
+
+
+
    Repository<T> : IRepository<T> where T : class
+
+
+
+

PE: devon4Net API template shows how to implement the TODOs repository as follows:

+
+
+
+
    public class TodoRepository : Repository<Todos>, ITodoRepository
+
+
+
+

Where Repository is the the base repository class to be inherited and have full access for the basic CRUD operations, Todos is the entity defined in the database context. ITodoRepository is the interface of the repository, which exposes the public extended methods to be implemented.

+
+
+ + + + + +
+ + +Please remember that <T> is the mapped class which reference the entity from the database context. This abstraction allows to write services implementation with different database contexts +
+
+
+
+

== Domain layer

+
+

The domain layer provides access to data directly exposed from other systems. The main source is used to be a data base system. The provided template makes use of Entity Framework solution from Microsoft in order to achieve this functionality.

+
+
+

To make a good use of this technology, Repository Pattern has been implemented with the help of Unit Of Work pattern. Also, the use of generic types are makes this solution to be the most flexible.

+
+
+

Regarding to data base source, each entity is mapped as a class. Repository pattern allows to use this mapped classes to access the data base via Entity framework:

+
+
+
+
 public class UnitOfWork<TContext> : IUnitOfWork<TContext> where TContext : DbContext
+
+
+
+ + + + + +
+ + +Where <T> is the mapped class which reference the entity from the database. +
+
+
+

The repository and unit of work patterns are create an abstraction layer between the data access layer and the business logic layer of an application.

+
+
+ + + + + +
+ + +Domain Layer has no dependencies with other layers. It contains the Entities, datasources and the Repository Interfaces. +
+
+
+
+

devon4Net architecture layer implementation

+
+

The next picture shows how the devon4Net API template implements the architecture described in previous points:

+
+
+
+devon4Net api template architecture implementation +
+
Figure 3. devon4Net architecture implementations
+
+
+
+

== Cross-Cutting concerns

+
+

Cross-cutting provides the implementation functionality that spans layers. Each functionality is implemented through components able to work stand alone. This approach provides better reusability and maintainability.

+
+
+

A common component set of cross cutting components include different types of functionality regarding to authentication, authorization, security, caching, configuration, logging, and communication.

+
+
+
+

Communication between Layers: Interfaces

+
+

The main target of the use of interfaces is to loose coupling between layers and minimize dependencies.

+
+
+

Public interfaces allow to hide implementation details of the components within the layers making use of dependency inversion.

+
+
+

In order to make this possible, we make use of Dependency Injection Pattern (implementation of dependency inversion) given by default in .Net Core.

+
+
+

The provided Data Layer contains the abstract classes to inherit from. All new repository and service classes must inherit from them, also the must implement their own interfaces.

+
+
+
+technical architecture +
+
Figure 4. Architecture representation in deep
+
+
+
+

Templates

+ +
+
+

State of the art

+
+

The provided bundle contains the devon4Net API template based on .net core. The template allows to create a microservice solution with minimal configuration.

+
+
+

Also, the devon4Net framework can be added to third party templates such as the Amazon API template to use lambdas in serverless environments.

+
+
+

Included features:

+
+
+
    +
  • +

    Logging:

    +
  • +
  • +

    Text File

    +
  • +
  • +

    Sqlite database support

    +
  • +
  • +

    Serilog Seq Server support

    +
  • +
  • +

    Graylog integration ready through TCP/UDP/HTTP protocols

    +
  • +
  • +

    API Call params interception (simple and compose objects)

    +
  • +
  • +

    API error exception management

    +
  • +
  • +

    Swagger:

    +
  • +
  • +

    Swagger autogenerating client from comments and annotations on controller classes

    +
  • +
  • +

    Full swagger client customization (Version, Title, Description, Terms, License, Json end point definition)

    +
  • +
  • +

    Easy configuration with just one configuration node in your settings file

    +
  • +
  • +

    JWT:

    +
  • +
  • +

    Issuer, audience, token expiration customization by external file configuration

    +
  • +
  • +

    Token generation via certificate

    +
  • +
  • +

    MVC inherited classes to access JWT user properties

    +
  • +
  • +

    API method security access based on JWT Claims

    +
  • +
  • +

    CORS:

    +
  • +
  • +

    Simple CORS definition ready

    +
  • +
  • +

    Multiple CORS domain origin definition with specific headers and verbs

    +
  • +
  • +

    Headers:

    +
  • +
  • +

    Automatic header injection with middleware.

    +
  • +
  • +

    Supported header definitions: AccessControlExposeHeader, StrictTransportSecurityHeader, XFrameOptionsHeader, XssProtectionHeader, XContentTypeOptionsHeader, ContentSecurityPolicyHeader, PermittedCrossDomainPoliciesHeader, ReferrerPolicyHeader

    +
  • +
  • +

    Reporting server:

    +
  • +
  • +

    Partial implementation of reporting server based on My-FyiReporting (now runs on linux container)

    +
  • +
  • +

    Testing:

    +
  • +
  • +

    Integration test template with sqlite support

    +
  • +
  • +

    Unit test template

    +
  • +
  • +

    Moq, xunit frameworks integrated

    +
  • +
  • +

    Circuit breaker:

    +
  • +
  • +

    Integrated with HttpClient factory

    +
  • +
  • +

    Client Certificate customization

    +
  • +
  • +

    Number of retries customizables

    +
  • +
  • +

    LiteDB:

    +
  • +
  • +

    Support for LiteDB

    +
  • +
  • +

    Provided basic repository for CRUD operations

    +
  • +
  • +

    RabbitMq:

    +
  • +
  • +

    Use of EasyQNet library to perform CQRS main functions between different microservices

    +
  • +
  • +

    Send commands / Subscribe queues with one C# sentence

    +
  • +
  • +

    Events management: Handled received commands to subscribed messages

    +
  • +
  • +

    Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    MediatR:

    +
  • +
  • +

    Use of MediatR library to perform CQRS main functions in memory

    +
  • +
  • +

    Send commands / Subscribe queues with one C# sentence

    +
  • +
  • +

    Events management: Handled received commands to subscribed messages

    +
  • +
  • +

    Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    SmaxHcm:

    +
  • +
  • +

    Component to manage Microfocus SMAX for cloud infrastructure services management

    +
  • +
  • +

    CyberArk:

    +
  • +
  • +

    Manage safe credentials with CyberArk

    +
  • +
  • +

    AnsibleTower:

    +
  • +
  • +

    Ansible automates the cloud infrastructure. devon4net integrates with Ansible Tower via API consumption endpoints

    +
  • +
  • +

    gRPC+Protobuf:

    +
  • +
  • +

    Added Client + Server basic templates sample gRPC with Google’s Protobuf protocol using devon4net

    +
  • +
  • +

    Kafka:

    +
  • +
  • +

    Added Apache Kafka support for deliver/consume messages and create/delete topics as well

    +
  • +
+
+
+
+

Software stack

+
+
Technology Stack of devon4Net
+

|== == == == == == == == == == == = +|Topic|Detail|Implementation +|runtime|language & VM|.Net Core Version 3.0 +|persistence|OR-mapper| Entity Framework Core +|service|REST services|https://www.asp.net/web-api[Web API] +|service - integration to external systems - optional|SOAP services|https://msdn.microsoft.com/en-us/library/dd456779(v=vs.110).aspx[WCF] +|logging|framework|https://github.com/serilog/serilog-extensions-logging[Serilog] +|validation|framework| NewtonSoft Json, DataAnnotations +|component management|dependency injection| Unity +|security|Authentication & Authorization| JWT .Net Security - Token based, local Authentication Provider +|unit tests|framework|https://github.com/xunit/xunit[xUnit] +|Circuit breaker|framework, allows retry pattern on http calls|https://github.com/App-vNext/Polly[Polly] +|CQRS|Memory events and queue events| MediatR - EasyNetQ - Kafka +|Kafka| Kafka support for enterprise applications| Confluent.Kafka +|Fluent Validation| Fluent validation for class instances|https://fluentvalidation.net/[Fluent validation] +|== == == == == == == == == == == =

+
+
+
+

Target platforms

+
+

Thanks to the new .Net Core platform from Microsoft, the developed software can be published Windows, Linux, OS, X and Android platforms.

+
+
+ +
+
+
+

User guide

+
+ +
+
+technical architecture +
+
+
+

devon4net Guide

+ +
+
+

Introduction

+
+

Welcome to devon4net framework user guide. In this document you will find the information regarding how to start and deploy your project using the guidelines proposed in our solution.

+
+
+

All the guidelines shown and used in this document are a set of rules and conventions proposed and supported by Microsoft and the industry.

+
+
+
+

The package

+
+

Devon4Net package solution contains:

+
+
+

|== == == == == == == == == == == = +|File / Folder|Content +|Documentation| User documentation in HTML format +|Modules| Contains the source code of the different devon4net modules +|Samples| Different samples implemented in .NET and .NET Core. Also includes My Thai Star Devon flagship restaurant application +|Templates| Main .net Core template to start developing from scratch +|License| License agreement +|README.md| Github main page +|TERMS_OF_USE.adoc| The devon4net terms of use +|LICENSE| The devon license +|Other files| Such the code of conduct and contributing guide +|== == == == == == == == == == == =

+
+
+
+

Application templates

+
+

The application templates given in the bundle are ready to use.

+
+
+

At the moment .net Core template is supported. The template is ready to be used as a simple console Kestrel application or being deployed in a web server like IIS.

+
+
+
+

Samples

+ +
+
+

== My Thai Star

+
+

You can find My Thai Star .NET port application at Github.

+
+
+ + + + + +
+ + +As devon4net has been migrated to the latest version of .net core, the template is not finished yet. +
+
+
+
+

Cookbook

+ +
+
+

Data management

+
+

To use Entity Framework Core, install the package for the database provider(s) you want to target. This walk-through uses SQL Server.

+
+
+

For a list of available providers see Database Providers

+
+
+
    +
  • +

    Go to Tools > NuGet Package Manager > Package Manager Console

    +
  • +
  • +

    Run Install-Package Microsoft.EntityFrameworkCore.SqlServer

    +
  • +
+
+
+

We will be using some Entity Framework Tools to create a model from the database. So we will install the tools package as well:

+
+
+
    +
  • +

    Run Install-Package Microsoft.EntityFrameworkCore.Tools

    +
  • +
+
+
+

We will be using some ASP.NET Core Scaffolding tools to create controllers and views later on. So we will install this design package as well:

+
+
+
    +
  • +

    Run Install-Package Microsoft.VisualStudio.Web.CodeGeneration.Design

    +
  • +
+
+
+
+

== Entity Framework Code first

+
+

In order to design your database model from scratch, we encourage to follow the Microsoft guidelines described here.

+
+
+
+

== Entity Framework Database first

+
+
    +
  • +

    Go to Tools > NuGet Package Manager > Package Manager Console

    +
  • +
  • +

    Run the following command to create a model from the existing database:

    +
  • +
+
+
+
+
Scaffold-DbContext "Your connection string to existing database" Microsoft.EntityFrameworkCore.SqlServer -OutputDir Models
+
+
+
+

The command will create the database context and the mapped entities as well inside of Models folder.

+
+
+
+

== Register your context with dependency injection

+
+

Services are registered with dependency injection during application startup.

+
+
+

In order to register your database context (or multiple database context as well) you can add the following line at ConfigureDbService method at startup.cs:

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+

Repositories and Services

+
+

Services and Repositories are an important part of devon4net proposal. To make them work properly, first of all must be declared and injected at Startup.cs at DI Region.

+
+
+

Services are declared in devon4net.Business.Common and injected in Controller classes when needed. Use services to build your application logic.

+
+
+
+technical architecture +
+
Figure 5. Screenshot of devon4net.Business.Common project in depth
+
+
+

For example, My Thai Star Booking controller constructor looks like this:

+
+
+
+
        public BookingController(IBookingService bookingService, IMapper mapper)
+        {
+            BookingService = bookingService;
+            Mapper = mapper;
+
+        }
+
+
+
+

Currently devon4net has a Unit of Work class in order to perform CRUD operations to database making use of your designed model context.

+
+
+

Repositories are declared at devon4net.Domain.UnitOfWork project and make use of Unit of Work class.

+
+
+

The common methods to perform CRUD operations (where <T> is an entity from your model) are:

+
+
+
    +
  • +

    Sync methods:

    +
  • +
+
+
+
+
IList<T> GetAll(Expression<Func<T, bool>> predicate = null);
+T Get(Expression<Func<T, bool>> predicate = null);
+IList<T> GetAllInclude(IList<string> include, Expression<Func<T, bool>> predicate = null);
+T Create(T entity);
+void Delete(T entity);
+void DeleteById(object id);
+void Delete(Expression<Func<T, bool>> where);
+void Edit(T entity);
+
+
+
+
    +
  • +

    Async methods:

    +
  • +
+
+
+
+
Task<IList<T>> GetAllAsync(Expression<Func<T, bool>> predicate = null);
+Task<T> GetAsync(Expression<Func<T, bool>> predicate = null);
+Task<IList<T>> GetAllIncludeAsync(IList<string> include, Expression<Func<T, bool>> predicate = null);
+
+
+
+

If you perform a Commit operation and an error happens, changes will be rolled back.

+
+
+
+

Swagger integration

+
+

The given templates allow you to specify the API contract through Swagger integration and the controller classes are the responsible of exposing methods making use of comments in the source code.

+
+
+

The next example shows how to comment the method with summaries in order to define the contract. Add (Triple Slash) XML Documentation To Swagger:

+
+
+
+
/// <summary>
+/// Method to get reservations
+/// </summary>
+/// <response code="201">Ok.</response>
+/// <response code="400">Bad request. Parser data error.</response>
+/// <response code="401">Unauthorized. Authentication fail.</response>
+/// <response code="403">Forbidden. Authorization error.</response>
+/// <response code="500">Internal Server Error. The search process ended with error.</response>
+[HttpPost]
+[Route("/mythaistar/services/rest/bookingmanagement/v1/booking/search")]
+//[Authorize(Policy = "MTSWaiterPolicy")]
+[AllowAnonymous]
+[EnableCors("CorsPolicy")]
+public async Task<IActionResult> BookingSearch([FromBody]BookingSearchDto bookingSearchDto)
+{
+
+
+
+

In order to be effective and make use of the comments to build the API contract, the project which contains the controller classes must generate the XML document file. To achieve this, the XML documentation file must be checked in project settings tab:

+
+
+
+technical architecture +
+
Figure 6. Project settings tab
+
+
+

We propose to generate the file under the XmlDocumentation folder. For example in devon4net.Domain.Entities project in My Thai Star .NET implementation the output folder is:

+
+
+
+
`XmlDocumentation\devon4net.Business.Common.xml`
+
+
+
+

The file devon4net.Business.Common.xml won’t appear until you build the project. Once the file is generated, please modify its properties as a resource and set it to be Copy always .

+
+
+
+technical architecture +
+
Figure 7. Swagger XML document file properties
+
+
+

Once you have this, the swagger user interface will show the method properties defined in your controller comments.

+
+
+

Making use of this technique controller are not encapsulated to the application project. Also, you can develop your controller classes in different projects obtain code reusability.

+
+
+

Swagger comment:

+
+
+

|== == == == == == == == == == == = +|Comment|Functionality +|<summary>| Will map to the operation’s summary +|<remarks>| Will map to the operation’s description (shown as "Implementation Notes" in the UI) +|<response code="###">| Specifies the different response of the target method +|<param>| Will define the parameter(s) of the target method +| +|== == == == == == == == == == == =

+
+
+

Please check Microsoft’s site regarding to summary notations.

+
+
+
+

Logging module

+
+

An important part of life software is the need of using log and traces. devon4net has a log module pre-configured to achieve this important point.

+
+
+

By default Microsoft provides a logging module on .NET Core applications. This module is open and can it can be extended. devon4net uses the Serilog implementation. This implementation provides a huge quantity information about events and traces.

+
+
+
+

== Log file

+
+

devon4net can write the log information to a simple text file. You can configure the file name and folder at appsettings.json file (LogFile attribute) at devon4net.Application.WebApi project.

+
+
+
+

== Database log

+
+

devon4net can write the log information to a SQLite database. You can configure the file name and folder at appsettings.json file (LogDatabase attribute) at devon4net.Application.WebApi project.

+
+
+

With this method you can launch queries in order to search the information you are looking for.

+
+
+
+

== Seq log

+
+

devon4net can write the log information to a Serilog server. You can configure the Serilog URL at appsettings.json file (SeqLogServerUrl attribute) at devon4net.Application.WebApi project.

+
+
+

With this method you can make queries via HTTP.

+
+
+
+serilog seq +
+
+
+

By default you can find the log information at Logs folder.

+
+
+
+

JWT module

+
+

JSON Web Tokens are an open, industry standard RFC 7519 method for representing claims securely between two parties allowing you to decode, verify and generate JWT.

+
+
+

You should use JWT for:

+
+
+
    +
  • +

    Authentication : allowing the user to access routes, services, and resources that are permitted with that token.

    +
  • +
  • +

    Information Exchange: JSON Web Tokens are a good way of securely transmitting information between parties. Additionally, as the signature is calculated using the header and the payload, you can also verify that the content.

    +
  • +
+
+
+

The JWT module is configured at Startup.cs inside devon4net.Application.WebApi project from .NET Core template. In this class you can configure the different authentication policy and JWT properties.

+
+
+

Once the user has been authenticated, the client perform the call to the backend with the attribute Bearer plus the token generated at server side.

+
+
+
+jwt +
+
+
+

On My Thai Star sample there are two predefined users: user0 and Waiter. Once they log in the application, the client (Angular/Xamarin) will manage the server call with the json web token. With this method we can manage the server authentication and authorization.

+
+
+

You can find more information about JWT at jwt.io

+
+
+
+

AOP module

+
+

AOP (Aspect Oriented Programming) tracks all information when a method is call.AOP also tracks the input and output data when a method is call.

+
+
+

By default devon4net has AOP module pre-configured and activated for controllers at Startup.cs file at devon4net.Application.WebApi:

+
+
+
+
options.Filters.Add(new Infrastructure.AOP.AopControllerAttribute(Log.Logger));
+
+options.Filters.Add(new Infrastructure.AOP.AopExceptionFilter(Log.Logger));
+
+
+
+

This configuration allows all Controller classes to be tracked. If you don’t need to track the info comment the lines written before.

+
+
+
+

Docker support

+
+

devon4net Core projects are ready to be integrated with docker.

+
+
+

My Thai Star application sample is ready to be use with linux docker containers. The Readme file explains how to launch and setup the sample application.

+
+
+
    +
  • +

    angular : Angular client to support backend. Just binaries.

    +
  • +
  • +

    database : Database scripts and .bak file

    +
  • +
  • +

    mailservice: Microservice implementation to send notifications.

    +
  • +
  • +

    netcore: Server side using .net core 2.0.x.

    +
  • +
  • +

    xamarin: Xamarin client based on Excalibur framework from The Netherlands using XForms.

    +
  • +
+
+
+

Docker configuration and docker-compose files are provided.

+
+
+
+

Testing with XUnit

+
+
+
+

xUnit.net is a free, open source, community-focused unit testing tool for the .NET Framework. Written by the original inventor of NUnit v2, xUnit.net is the latest technology for unit testing C#, F#, VB.NET and other .NET languages. xUnit.net works with ReSharper, CodeRush, TestDriven.NET and Xamarin. It is part of the .NET Foundation, and operates under their code of conduct. It is licensed under Apache 2 (an OSI approved license).

+
+
+
+— About xUnit.net
+https://xunit.github.io/#documentation +
+
+
+

Facts are tests which are always true. They test invariant conditions.

+
+
+

Theories are tests which are only true for a particular set of data.

+
+
+
+

The first test

+
+
+
using Xunit;
+
+namespace MyFirstUnitTests
+{
+    public class Class1
+    {
+        [Fact]
+        public void PassingTest()
+        {
+            Assert.Equal(4, Add(2, 2));
+        }
+
+        [Fact]
+        public void FailingTest()
+        {
+            Assert.Equal(5, Add(2, 2));
+        }
+
+        int Add(int x, int y)
+        {
+            return x + y;
+        }
+    }
+}
+
+
+
+
+

The first test with theory

+
+

Theory attribute is used to create tests with input params:

+
+
+
+
[Theory]
+[InlineData(3)]
+[InlineData(5)]
+[InlineData(6)]
+public void MyFirstTheory(int value)
+{
+    Assert.True(IsOdd(value));
+}
+
+bool IsOdd(int value)
+{
+    return value % 2 ==  1;
+}
+
+
+
+
+

Cheat Sheet

+
+

|== == == == == == == == == == == = +|Operation| Example +|Test|

+
+
+
+
public void Test()
+{
+}
+|Setup|public class TestFixture {
+public TestFixture()
+{
+
+...
+
+    }
+
+}
+|Teardown|public class TestFixture : IDisposable
+
+{
+
+public void Dispose() {
+
+ ...
+ }
+
+}
+
+
+
+

|== == == == == == == == == == == =

+
+
+
+

Console runner return codes

+
+

|== == == == == == == == == == == = +|Code| Meaning +|0|The tests ran successfully. +|1|One or more of the tests failed. +|2|The help page was shown, either because it was requested, or because the user did not provide any command line arguments. +|3| There was a problem with one of the command line options passed to the runner. +|4|There was a problem loading one or more of the test assemblies (for example, if a 64-bit only assembly is run with the 32-bit test runner). +|== == == == == == == == == == == =

+
+
+
+

Publishing

+ +
+
+

== Nginx

+
+

In order to deploy your application to a Nginx server on Linux platform you can follow the instructions from Microsoft here.

+
+
+
+

== IIS

+
+

In this point is shown the configuration options that must implement the .Net Core application.

+
+
+

Supported operating systems:

+
+
+
    +
  • +

    Windows 7 and newer

    +
  • +
  • +

    Windows Server 2008 R2 and newer*

    +
  • +
+
+
+

WebListener server will not work in a reverse-proxy configuration with IIS. You must use the Kestrel server.

+
+
+

IIS configuration

+
+
+

Enable the Web Server (IIS) role and establish role services.

+
+
+

Windows desktop operating systems

+
+
+

Navigate to Control Panel > Programs > Programs and Features > Turn Windows features on or off (left side of the screen). Open the group for Internet Information Services and Web Management Tools. Check the box for IIS Management Console. Check the box for World Wide Web Services. Accept the default features for World Wide Web Services or customize the IIS features to suit your needs.

+
+
+
+iis 1 +
+
+
+

*Conceptually, the IIS configuration described in this document also applies to hosting ASP.NET Core applications on Nano Server IIS, but refer to ASP.NET Core with IIS on Nano Server for specific instructions.

+
+
+

Windows Server operating systems +For server operating systems, use the Add Roles and Features wizard via the Manage menu or the link in Server Manager. On the Server Roles step, check the box for Web Server (IIS).

+
+
+
+iis 2 +
+
+
+

On the Role services step, select the IIS role services you desire or accept the default role services provided.

+
+
+
+iis 3 +
+
+
+

Proceed through the Confirmation step to install the web server role and services. A server/IIS restart is not required after installing the Web Server (IIS) role.

+
+
+

Install the .NET Core Windows Server Hosting bundle

+
+
+
    +
  1. +

    Install the .NET Core Windows Server Hosting bundle on the hosting system. The bundle will install the .NET Core Runtime, .NET Core Library, and the ASP.NET Core Module. The module creates the reverse-proxy between IIS and the Kestrel server. Note: If the system doesn’t have an Internet connection, obtain and install the Microsoft Visual C++ 2015 Re-distributable before installing the .NET Core Windows Server Hosting bundle.

    +
  2. +
  3. +

    Restart the system or execute net stop was /y followed by net start w3svc from a command prompt to pick up a change to the system PATH.

    +
  4. +
+
+
+ + + + + +
+ + +If you use an IIS Shared Configuration, see ASP.NET Core Module with IIS Shared Configuration. +
+
+
+

To configure IISIntegration service options, include a service configuration for IISOptions in ConfigureServices:

+
+
+
+
services.Configure<IISOptions>(options =>
+{
+    ...
+});
+
+
+
+

|== == == == == == == == == == == = +|Option|Default|Setting +|AutomaticAuthentication| true |If true, the authentication middleware sets the HttpContext.User and responds to generic challenges. If false, the authentication middleware only provides an identity (HttpContext.User) and responds to challenges when explicitly requested by the Authentication Scheme. Windows Authentication must be enabled in IIS for AutomaticAuthentication to function. +|AuthenticationDisplayName | null| Sets the display name shown to users on login pages. +|ForwardClientCertificate |true|If true and the MS-ASPNETCORE-CLIENTCERT request header is present, the HttpContext.Connection.ClientCertificate is populated. +|== == == == == == == == == == == =

+
+
+

web.config

+
+
+

The web.config file configures the ASP.NET Core Module and provides other IIS configuration. Creating, transforming, and publishing web.config is handled by Microsoft.NET.Sdk.Web, which is included when you set your project’s SDK at the top of your .csproj file, <Project Sdk="Microsoft.NET.Sdk.Web">. To prevent the MSBuild target from transforming your web.config file, add the <IsTransformWebConfigDisabled> property to your project file with a setting of true:

+
+
+
+
<PropertyGroup>
+  <IsTransformWebConfigDisabled>true</IsTransformWebConfigDisabled>
+</PropertyGroup>
+
+
+
+
+

== Azure

+
+

In order to deploy your application to Azure platform you can follow the instructions from Microsoft:

+
+
+

Set up the development environment

+
+
+ +
+
+

Create a web app

+
+
+

In the Visual Studio Start Page, select File > New > Project…​

+
+
+
+File menu +
+
+
+

Complete the New Project dialog:

+
+
+
    +
  • +

    In the left pane, select .NET Core.

    +
  • +
  • +

    In the center pane, select ASP.NET Core Web Application.

    +
  • +
  • +

    Select OK.

    +
  • +
+
+
+
+New Project dialog +
+
+
+

In the New ASP.NET Core Web Application dialog:

+
+
+
    +
  • +

    Select Web Application.

    +
  • +
  • +

    Select Change Authentication.

    +
  • +
+
+
+
+New Project dialog +
+
+
+

The Change Authentication dialog appears.

+
+
+
    +
  • +

    Select Individual User Accounts.

    +
  • +
  • +

    Select OK to return to the New ASP.NET Core Web Application, then select OK again.

    +
  • +
+
+
+
+New ASP.NET Core Web authentication dialog +
+
+
+

Visual Studio creates the solution.

+
+
+

Run the app locally

+
+
+
    +
  • +

    Choose Debug then Start Without Debugging to run the app locally.

    +
  • +
  • +

    Click the About and Contact links to verify the web application works.

    +
  • +
+
+
+
+Web application open in Microsoft Edge on localhost +
+
+
+
    +
  • +

    Select Register and register a new user. You can use a fictitious email address. When you submit, the page displays the following error:

    +
  • +
+
+
+

"Internal Server Error: A database operation failed while processing the request. SQL exception: Cannot open the database. Applying existing migrations for Application DB context may resolve this issue."

+
+
+
    +
  • +

    Select Apply Migrations and, once the page updates, refresh the page.

    +
  • +
+
+
+
+Internal Server Error: A database operation failed while processing the request. SQL exception: Cannot open the database. Applying existing migrations for Application DB context may resolve this issue. +
+
+
+

The app displays the email used to register the new user and a Log out link.

+
+
+
+Web application open in Microsoft Edge. The Register link is replaced by the text Hello email@domain.com! +
+
+
+

Deploy the app to Azure

+
+
+

Close the web page, return to Visual Studio, and select Stop Debugging from the Debug menu.

+
+
+

Right-click on the project in Solution Explorer and select Publish…​.

+
+
+
+Contextual menu open with Publish link highlighted +
+
+
+

In the Publish dialog, select Microsoft Azure App Service and click Publish.

+
+
+
+Publish dialog +
+
+
+
    +
  • +

    Name the app a unique name.

    +
  • +
  • +

    Select a subscription.

    +
  • +
  • +

    Select New…​ for the resource group and enter a name for the new resource group.

    +
  • +
  • +

    Select New…​ for the app service plan and select a location near you. You can keep the name that is generated by default.

    +
  • +
+
+
+
+App Service dialog +
+
+
+
    +
  • +

    Select the Services tab to create a new database.

    +
  • +
  • +

    Select the green + icon to create a new SQL Database

    +
  • +
+
+
+
+New SQL Database +
+
+
+
    +
  • +

    Select New…​ on the Configure SQL Database dialog to create a new database.

    +
  • +
+
+
+
+New SQL Database and server +
+
+
+

The Configure SQL Server dialog appears.

+
+
+
    +
  • +

    Enter an administrator user name and password, and then select OK. Don’t forget the user name and password you create in this step. You can keep the default Server Name.

    +
  • +
  • +

    Enter names for the database and connection string.

    +
  • +
+
+
+
+

== Note

+
+

"admin" is not allowed as the administrator user name.

+
+
+
+Configure SQL Server dialog +
+
+
+
    +
  • +

    Select OK.

    +
  • +
+
+
+

Visual Studio returns to the Create App Service dialog.

+
+
+
    +
  • +

    Select Create on the Create App Service dialog.

    +
  • +
+
+
+
+Configure SQL Database dialog +
+
+
+
    +
  • +

    Click the Settings link in the Publish dialog.

    +
  • +
+
+
+
+Publish dialog: Connection panel +
+
+
+

On the Settings page of the Publish dialog:

+
+
+
    +
  • +

    Expand Databases and check Use this connection string at runtime.

    +
  • +
  • +

    Expand Entity Framework Migrations and check Apply this migration on publish.

    +
  • +
  • +

    Select Save. Visual Studio returns to the Publish dialog.

    +
  • +
+
+
+
+Publish dialog: Settings panel +
+
+
+

Click Publish. Visual Studio will publish your app to Azure and launch the cloud app in your browser.

+
+
+

Test your app in Azure

+
+
+
    +
  • +

    Test the About and Contact links

    +
  • +
  • +

    Register a new user

    +
  • +
+
+
+
+Web application opened in Microsoft Edge on Azure App Service +
+
+
+

Update the app

+
+
+
    +
  • +

    Edit the Pages/About.cshtml Razor page and change its contents. For example, you can modify the paragraph to say "Hello ASP.NET Core!":

    +
    +
    +
    html<button class="action copy" data-bi-name="copy">Copy</button>
    +
    +
    +
  • +
+
+
+
+
@page
+@model AboutModel
+@{
+    ViewData["Title"] = "About";
+}
+<h2>@ViewData["Title"]</h2>
+<h3>@Model.Message</h3>
+
+    <p>Hello ASP.NET Core!</p>
+
+
+
+
    +
  • +

    Right-click on the project and select Publish…​ again.

    +
  • +
+
+
+
+Contextual menu open with Publish link highlighted +
+
+
+
    +
  • +

    After the app is published, verify the changes you made are available on Azure.

    +
  • +
+
+
+
+Verify task is complete +
+
+
+

Clean up

+
+
+

When you have finished testing the app, go to the Azure portal and delete the app.

+
+
+
    +
  • +

    Select Resource groups, then select the resource group you created.

    +
  • +
+
+
+
+Azure Portal: Resource Groups in sidebar menu +
+
+
+
    +
  • +

    In the Resource groups page, select Delete.

    +
  • +
+
+
+
+Azure Portal: Resource Groups page +
+
+
+
    +
  • +

    Enter the name of the resource group and select Delete. Your app and all other resources created in this tutorial are now deleted from Azure.

    +
  • +
+
+
+ +
+
+
+

How To section

+
+ +
+

Introduction

+
+

The aim of this document is to show how to get devon4net things done in a easy way.

+
+
+
+

How to

+ +
+
+

Start a new devonfw project

+
+

The .Net Core 3.1 template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2019.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
+

== Using devon4Net template

+ +
+
+

== Option 1

+
+
+
Open your favourite terminal (Win/Linux/iOS)
+Go to future project's path
+Type dotnet new --install Devon4Net.WebAPI.Template
+Type dotnet new Devon4NetAPI
+Go to project's path
+You are ready to start developing with devon4Net
+
+
+
+
+

== Option 2

+
+
+
Create a new dotnet` API` project from scratch
+Add the NuGet package reference to your project:
+Install-Package Devon4Net.Application.WebAPI.Configuration
+
+
+
+

Set up your project as follows in program.cs file:

+
+
+
+
        public static void Main(string[] args)
+        {
+            // Please use
+            // Devonfw.Configure<Startup>(args);
+            // Or :
+
+            WebHost.CreateDefaultBuilder(args)
+                .UseStartup<Startup>()
+                .InitializeDevonFw()
+                .Build()
+                .Run();
+        }
+
+
+
+

Set up your project as follows in startup.cs file:

+
+
+
+
    private IConfiguration Configuration { get; }
+
+
+   public Startup(IConfiguration configuration)
+    {
+        Configuration = configuration;
+    }
+
+    public void ConfigureServices(IServiceCollection services)
+    {
+
+        services.ConfigureDevonFw(Configuration);
+        SetupDatabase(services);
+
+        ...
+    }
+
+
+    private void SetupDatabase(IServiceCollection services)
+    {
+        // Default is the database connection name in appsettings.json file
+        services.SetupDatabase<TodoContext>(Configuration, "Default", DatabaseType.InMemory);
+    }
+
+    public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
+    {
+        app.ConfigureDevonFw();
+        ...
+    }
+
+
+
+

Add the devonfw configuration options in your appsettings.json file

+
+
+
+

devon4net configuration files

+
+

To start using devon4net in your .net core application add this configuration in your appsettings.json file:

+
+
+
+
 "devonfw": {
+    "UseDetailedErrorsKey": true,
+    "UseIIS": false,
+    "UseSwagger": true,
+    "Environment": "Development",
+    "KillSwitch": {
+      "killSwitchSettingsFile": "killswitch.appsettings.json"
+    },
+    "Kestrel": {
+      "UseHttps": true,
+      "HttpProtocol": "Http2", //Http1, Http2, Http1AndHttp2, none
+      "ApplicationPort": 8082,
+      "KeepAliveTimeout": 120, //in seconds
+      "MaxConcurrentConnections": 100,
+      "MaxConcurrentUpgradedConnections": 100,
+      "MaxRequestBodySize": 28.6, //In MB. The default maximum request body size is 30,000,000 bytes, which is approximately 28.6 MB
+      "Http2MaxStreamsPerConnection": 100,
+      "Http2InitialConnectionWindowSize": 131072, // From 65,535 and less than 2^31 (2,147,483,648)
+      "Http2InitialStreamWindowSize": 98304, // From 65,535 and less than 2^31 (2,147,483,648)
+      "AllowSynchronousIO": true,
+      "SslProtocol": "Tls12", //Tls, Tls11,Tls12, Tls13, Ssl2, Ssl3, none. For Https2 Tls12 is needed
+      "ServerCertificate": {
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost"
+      },
+      "ClientCertificate": {
+        "DisableClientCertificateCheck": true,
+        "RequireClientCertificate": false,
+        "CheckCertificateRevocation": true,
+        "ClientCertificates": {
+          "Whitelist": [
+            "3A87A49460E8FE0E2A198E63D408DC58435BC501"
+          ],
+          "DisableClientCertificateCheck": false
+        }
+      }
+    },
+    "IIS": {
+      "ForwardClientCertificate": true,
+      "AutomaticAuthentication": true,
+      "AuthenticationDisplayName" : ""
+    }
+  }
+
+
+
+

Also, for start using the devon4net components, you should add the next json options in your appsettings.json or appsettings.Development.json file:

+
+
+
+
{
+  "ExtraSettingsFiles": [
+    "Put a directory path (relative/absolute/linux-like) like /run/secrets/global where there are many settings/secret files to load",
+    "Put a specific file name (with/without path) like /app-configs/app/extra-settings.json"
+  ],
+  "ConnectionStrings": {
+    "Default": "Todos",
+    "Employee": "Employee",
+    "RabbitMqBackup": "Add your database connection string here for messaging backup",
+    "MediatRBackup": "Add your database connection string here for messaging backup"
+  },
+  "Logging": {
+    "LogLevel": {
+      "Default": "Debug",
+      "System": "Information",
+      "Microsoft": "Information"
+    }
+  },
+  "Swagger": {
+    "Version": "v1",
+    "Title": "devon4net API",
+    "Description": "devon4net API Contract",
+    "Terms": "https://www.devonfw.com/terms-of-use/",
+    "Contact": {
+      "Name": "devonfw",
+      "Email": "sample@mail.com",
+      "Url": "https://www.devonfw.com"
+    },
+    "License": {
+      "Name": "devonfw - Terms of Use",
+      "Url": "https://www.devonfw.com/terms-of-use/"
+    },
+    "Endpoint": {
+      "Name": "V1 Docs",
+      "Url": "/swagger/v1/swagger.json",
+      "UrlUi": "swagger",
+      "RouteTemplate": "swagger/v1/{documentName}/swagger.json"
+    }
+  },
+  "JWT": {
+    "Audience": "devon4Net",
+    "Issuer": "devon4Net",
+    "TokenExpirationTime": 60,
+    "ValidateIssuerSigningKey": true,
+    "ValidateLifetime": true,
+    "ClockSkew": 5,
+    "Security": {
+      "SecretKeyLengthAlgorithm": "",
+      "SecretKeyEncryptionAlgorithm": "",
+      "SecretKey": "",
+      "Certificate": "",
+      "CertificatePassword": "",
+      "CertificateEncryptionAlgorithm": ""
+    }
+  },
+  "Cors": []
+  //[
+  //  {
+  //    "CorsPolicy": "CorsPolicy1",
+  //    "Origins": "http://example.com,http://www.contoso.com",
+  //    "Headers": "accept,content-type,origin,x-custom-header",
+  //    "Methods": "GET,POST,HEAD",
+  //    "AllowCredentials": true
+  //  },
+  //  {
+  //    "CorsPolicy": "CorsPolicy2",
+  //    "Origins": "http://example.com,http://www.contoso.com",
+  //    "Headers": "accept,content-type,origin,x-custom-header",
+  //    "Methods": "GET,POST,HEAD",
+  //    "AllowCredentials": true
+  //  }
+  //]
+  ,
+  "CircuitBreaker": {
+    "CheckCertificate": false,
+    "Endpoints": [
+      {
+        "Name": "AnsibleTower",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      },
+      {
+        "Name": "CyberArk",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      },
+      {
+        "Name": "SmaxHcm",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      }
+    ]
+  },
+  "Headers": {
+    "AccessControlExposeHeader": "Authorization",
+    "StrictTransportSecurityHeader": "",
+    "XFrameOptionsHeader": "DENY",
+    "XssProtectionHeader": "1;mode=block",
+    "XContentTypeOptionsHeader": "nosniff",
+    "ContentSecurityPolicyHeader": "",
+    "PermittedCrossDomainPoliciesHeader": "",
+    "ReferrerPolicyHeader": ""
+  },
+  "Log": {
+    "UseAOPTrace": false,
+    "LogLevel": "Debug",
+    "SqliteDatabase": "logs/log.db",
+    "LogFile": "logs/{0}_devonfw.log",
+    "SeqLogServerHost": "http://127.0.0.1:5341",
+    "GrayLog": {
+      "GrayLogHost": "127.0.0.1",
+      "GrayLogPort": "12201",
+      "GrayLogProtocol": "UDP",
+      "UseSecureConnection": true,
+      "UseAsyncLogging": true,
+      "RetryCount": 5,
+      "RetryIntervalMs": 15,
+      "MaxUdpMessageSize": 8192
+    }
+  },
+  "RabbitMq": {
+    "EnableRabbitMq": false,
+    "Hosts": [
+      {
+        "Host": "127.0.0.1",
+        "Port": 5672,
+        "Ssl": false,
+        "SslServerName": "localhost",
+        "SslCertPath": "localhost.pfx",
+        "SslCertPassPhrase": "localhost",
+        "SslPolicyErrors": "RemoteCertificateNotAvailable" //None, RemoteCertificateNotAvailable, RemoteCertificateNameMismatch, RemoteCertificateChainErrors
+      }
+    ],
+
+    "VirtualHost": "/",
+    "UserName": "admin",
+    "Password": "password",
+    "Product": "devon4net",
+    "RequestedHeartbeat": 10, //Set to zero for no heartbeat
+    "PrefetchCount": 50,
+    "PublisherConfirms": false,
+    "PersistentMessages": true,
+    "Platform": "localhost",
+    "Timeout": 10,
+    "Backup": {
+      "UseLocalBackup": false,
+      "DatabaseName": "devon4netMessageBackup.db"
+    }
+  },
+  "MediatR": {
+    "EnableMediatR": false,
+    "Backup": {
+      "UseLocalBackup": false,
+      "DatabaseName": "devon4netMessageBackup.db"
+    }
+  },
+  "LiteDb": {
+    "DatabaseLocation": "devon4net.db"
+  },
+  "AnsibleTower": {
+    "EnableAnsible": false,
+    "Name": "AnsibleTower",
+    "CircuitBreakerName": "AnsibleTower",
+    "ApiUrlBase": "/api/v2/?format=json",
+    "Version": "1.0.5.29",
+    "Username": "",
+    "Password": ""
+  },
+  "CyberArk": {
+    "EnableCyberArk": false,
+    "Username": "",
+    "Password": "",
+    "CircuitBreakerName": "CyberArk"
+  },
+  "SmaxHcm": {
+    "EnableSmax": false,
+    "Username": "",
+    "Password": "",
+    "TenantId": "",
+    "CircuitBreakerName": "SmaxHcm",
+    "ProviderId": ""
+  },
+  "Kafka": {
+    "EnableKafka": true,
+    "Administration": [
+      {
+        "AdminId": "Admin1",
+        "Servers": "127.0.0.1:9092"
+      }
+    ],
+    "Producers": [
+      {
+        "ProducerId": "Producer1", // devon identifier
+        "Servers": "127.0.0.1:9092", // Initial list of brokers as a CSV list of broker host or host:port. The application may also use `rd_kafka_brokers_add()` to add brokers during runtime
+        "ClientId": "client1", //Client identifier
+        "Topic": "devonfw", // topics to deliver the message
+        "MessageMaxBytes": 1000000, //Maximum Kafka protocol request message size. Due to differing framing overhead between protocol versions the producer is unable to reliably enforce a strict max message limit at produce time and may exceed the maximum size by one message in protocol ProduceRequests, the broker will enforce the the topic's `max.message.bytes` limit (see Apache Kafka documentation)
+        "CompressionLevel": -1, // [0-9] for gzip; [0-12] for lz4; only 0 for snappy; -1 = codec-dependent default compression level
+        "CompressionType": "None", // None, Gzip, Snappy, Lz4, Zstd
+        "ReceiveMessageMaxBytes": 100000000,
+        "EnableSslCertificateVerification": false,
+        "CancellationDelayMaxMs": 100, // The maximum length of time (in milliseconds) before a cancellation request is acted on. Low values may result in measurably higher CPU usage
+        "Ack": "None", //Zero=Broker does not send any response/ack to client, One=The leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. All=Broker will block until message is committed by all in sync replicas (ISRs). If there are less than min.insync.replicas (broker configuration) in the ISR set the produce request will fail
+        "Debug": "", //A comma-separated list of debug contexts to enable. Detailed Producer debugging: broker,topic,msg. Consumer: consumer,cgrp,topic,fetch
+        "BrokerAddressTtl": 1000, //How long to cache the broker address resolving results (milliseconds)
+        "BatchNumMessages": 1000000, // Maximum size (in bytes) of all messages batched in one MessageSet, including protocol framing overhead. This limit is applied after the first message has been added to the batch, regardless of the first message's size, this is to ensure that messages that exceed batch.size are produced. The total MessageSet size is also limited by batch.num.messages and message.max.bytes
+        "EnableIdempotence": false, //When set to `true`, the producer will ensure that messages are successfully produced exactly once and in the original produce order. The following configuration properties are adjusted automatically (if not modified by the user) when idempotence is enabled: `max.in.flight.requests.per.connection=5` (must be less than or equal to 5), `retries=INT32_MAX` (must be greater than 0), `acks=all`, `queuing.strategy=fifo`. Producer instantation will fail if user-supplied configuration is incompatible
+        "MaxInFlight": 5,
+        "MessageSendMaxRetries": 5,
+        "BatchSize": 100000000 // Maximum size (in bytes) of all messages batched in one MessageSet, including protocol framing overhead. This limit is applied after the first message has been added to the batch, regardless of the first message's size, this is to ensure that messages that exceed batch.size are produced. The total MessageSet size is also limited by batch.num.messages and message.max.bytes
+      }
+    ],
+    "Consumers": [
+      {
+        "ConsumerId": "Consumer1", // devon identifier
+        "Servers": "127.0.0.1:9092",
+        "GroupId": "group1",
+        "Topics": "devonfw", // Comma separated topics to subscribe
+        "AutoCommit": true, //Automatically and periodically commit offsets in the background. Note: setting this to false does not prevent the consumer from fetching previously committed start offsets. To circumvent this behaviour set specific start offsets per partition in the call to assign()
+        "StatisticsIntervalMs": 0, //librdkafka statistics emit interval. The application also needs to register a stats callback using `rd_kafka_conf_set_stats_cb()`. The granularity is 1000ms. A value of 0 disables statistics
+        "SessionTimeoutMs": 10000, //Client group session and failure detection timeout. The consumer sends periodic heartbeats (heartbeat.interval.ms) to indicate its liveness to the broker. If no hearts are received by the broker for a group member within the session timeout, the broker will remove the consumer from the group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. Also see `max.poll.interval.ms`
+        "AutoOffsetReset": "Largest", //Action to take when there is no initial offset in offset store or the desired offset is out of range: 'smallest','earliest' - automatically reset the offset to the smallest offset, 'largest','latest' - automatically reset the offset to the largest offset, 'error' - trigger an error which is retrieved by consuming messages and checking 'message-&gt;err'
+        "EnablePartitionEof": true, //Verify CRC32 of consumed messages, ensuring no on-the-wire or on-disk corruption to the messages occurred. This check comes at slightly increased CPU usage
+        "IsolationLevel": "ReadCommitted", //Controls how to read messages written transactionally: `ReadCommitted` - only return transactional messages which have been committed. `ReadUncommitted` - return all messages, even transactional messages which have been aborted.
+        "EnableSslCertificateVerification": false,
+        "Debug": "" //A comma-separated list of debug contexts to enable. Detailed Producer debugging: broker,topic,msg. Consumer: consumer,cgrp,topic,fetch
+      }
+    ]
+  }
+}
+
+
+
+
+

devon4net Cobigen Guide

+ +
+
+

Overview

+
+

In this guide we will explain how to generate a new WebApi project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+

Getting things ready

+ +
+
+

== devonfw Distribution

+
+

The devonfw distributions can be obtained from here. You can find all releases in maven central.

+
+
+

It is not necessary to install nor configure anything. Just extracting the zip content is enough to have a fully functional devonfw. The only thing you have to do is run create-or-update-workspace.bat and then update-all-workspaces.bat to set up all the needed tools.

+
+
+
+

== devon4net Templates

+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
dotnet new -i Devon4Net.WebAPI.Template
+
+
+
+

and then:

+
+
+
+
dotnet new Devon4NetAPI
+
+
+
+
+

== OpenAPI File

+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+

Generating files

+
+

Cobigen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the Cobigen` CLI` tool.

+
+
+
+

== Generating files through Eclipse

+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+cobigen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+cobigen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+cobigen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+cobigen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+

== Generating files through Cobigen` CLI`

+
+

In order to generate the files using the Cobigen` CLI` it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    cobigen generate {yourOpenAPIFile}.yml
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+

Configuration

+ +
+
+

== Dependency Injection configuration

+
+

At this point it is needed to make some modifications in the code in order to configure correctly the server. To do so it is needed to locate the services and the repositories files that were created in Devon4Net.WebAPI.Implementation

+
+
+

Services location:

+
+
+
+cobigen +
+
+
+

Repositories location:

+
+
+
+cobigen +
+
+
+

Now, we are going to open the following file Devon4Net.WebAPI.Implementation\Configure\DevonConfiguration.cs. +In there we have to add the Dependency Injection for the services and the repositories that Cobigen has generated. The following image is an example of what is needed to add.

+
+
+
+cobigen +
+
+
+

Moreover it is needed to remove the last line in order to be able to run the application:

+
+
+
+
`throw new NotImplementedException(...);`
+
+
+
+
+

== Configure data base

+
+

Cobigen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+cobigen +
+
+
+
+

== Configure services

+
+

In order to finish the configuration of the services it is needed to go to each service file of the managements generated.

+
+
+

In there we will see some "NotImplementedExceptions", so it is needed to read carefully each comment inside of each exception in order to be able to use the service. It can be shown an example of the service with its NotImplementedExceptions comments:

+
+
+
+cobigen +
+
+
+
+

== Run the application

+
+

After doing all the steps defined above, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
dotnet run
+
+
+
+

This will deploy our application in our localhost with the port 8081, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+

Use HTTP2 protocol

+
+

You can specify the` HTTP` protocol to be used on your devon4net application modifying some node values at devonfw node in your appsettings configuration file.

+
+
+
+

HttpProtocol

+
+

The supported protocols are:

+
+
+

|== == == == == == == == == == == = +|Protocol|Description +|Http1| Http1 protocol +|Http2| Http2 Protocol +|Http1AndHttp2| Both supported +|== == == == == == == == == == == =

+
+
+
+

=` SSL`

+
+

To activate the HTTP2, the SslProtocol node must be set to Tls12 value.

+
+
+

The` SSL` protocol supported version values are:

+
+
+
    +
  • +

    Tls

    +
  • +
  • +

    Tls11

    +
  • +
  • +

    Tls12

    +
  • +
  • +

    Tls13

    +
  • +
  • +

    Ssl2

    +
  • +
  • +

    Ssl3

    +
  • +
+
+
+
+

Create a certificate for development purposes

+
+

In order to create a valid certificate for development purposes the Open` SSL` tools are needed.

+
+
+
+

Certificate authority (CA)

+
+

Run the next commands in a shell:

+
+
+
+
1. openssl req -x509 -nodes -new -sha256 -days 1024 -newkey rsa:2048 -keyout RootCA.key -out RootCA.pem -subj "/C=ES/ST=Valencia/L=Valencia/O=Certificates/CN=localhost.local"
+
+2. openssl x509 -outform pem -in RootCA.pem -out RootCA.crt
+
+
+
+

If you want to convert your certificate run the command:

+
+
+
+
openssl pkcs12 -export -out localhost.pfx -inkey RootCA.key -in RootCA.crt
+
+
+
+
+

Domain name certificate

+
+

Run the next commands in a shell:

+
+
+
+
1. openssl req -new -nodes -newkey rsa:2048 -keyout localhost.key -out localhost.csr -subj "/C=ES/ST=Valencia/L=Valencia/O=Certificates/CN=localhost.local"
+
+2. openssl x509 -req -sha256 -days 1024 -in localhost.csr -CA RootCA.pem -CAkey RootCA.key -CAcreateserial -extfile domains.ext -out localhost.crt
+
+
+
+

Where the domains.ext file should contain:

+
+
+
+
authorityKeyIdentifier=keyid,issuer
+basicConstraints=CA:FALSE
+keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment
+subjectAltName = @alt_names
+[alt_names]
+DNS.1 = localhost
+DNS.2 = localhost.local
+DNS.3 = 127.0.0.1
+DNS.4 = fake1.local
+DNS.5 = fake2.local
+
+
+
+

If you want to convert your certificate run the command:

+
+
+
+
openssl pkcs12 -export -out localhost.pfx -inkey localhost.key -in localhost.crt
+
+
+
+
+

Setup the database driver

+
+

Add the database connection on the SetupDatabase method at Startup.cs

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+

Change the JWT encryption algorithm

+
+

In the appsettings.json configuration file, you can use the next values on the SecretKeyLengthAlgorithm and SecretKeyEncryptionAlgorithm nodes at JWT configuration:

+
+
+

|== == == == == == == == == == == = +|Algorithm|Description +|Aes128Encryption|"http://www.w3.org/2001/04/xmlenc#aes128-cbc" +|Aes192Encryption|"http://www.w3.org/2001/04/xmlenc#aes192-cbc" +|Aes256Encryption|"http://www.w3.org/2001/04/xmlenc#aes256-cbc" +|DesEncryption|"http://www.w3.org/2001/04/xmlenc#des-cbc" +|Aes128KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes128" +|Aes192KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes192" +|Aes256KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes256" +|RsaV15KeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-1_5" +|Ripemd160Digest|"http://www.w3.org/2001/04/xmlenc#ripemd160" +|RsaOaepKeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-oaep" +|Aes128KW|"A128KW" +|Aes256KW|"A256KW" +|RsaPKCS1|"RSA1_5" +|RsaOAEP|"RSA-OAEP" +|ExclusiveC14n|"http://www.w3.org/2001/10/xml-exc-c14n#" +|ExclusiveC14nWithComments|"http://www.w3.org/2001/10/xml-exc-c14n#WithComments" +|EnvelopedSignature|"http://www.w3.org/2000/09/xmldsig#enveloped-signature" +|Sha256Digest|"http://www.w3.org/2001/04/xmlenc#sha256" +|Sha384Digest|"http://www.w3.org/2001/04/xmldsig-more#sha384" +|Sha512Digest|"http://www.w3.org/2001/04/xmlenc#sha512" +|Sha256|"SHA256" +|Sha384|"SHA384" +|Sha512|"SHA512" +|EcdsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" +|EcdsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384" +|EcdsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512" +|HmacSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha256" +|HmacSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha384" +|HmacSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha512" +|RsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" +|RsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" +|RsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" +|RsaSsaPssSha256Signature|"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1" +|RsaSsaPssSha384Signature|"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1" +|RsaSsaPssSha512Signature|"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1" +|EcdsaSha256|"ES256" +|EcdsaSha384|"ES384" +|EcdsaSha512|"ES512" +|HmacSha256|"HS256" +|HmacSha384|"HS384" +|HmacSha512|"HS512" +|None|"none" +|RsaSha256|"RS256" +|RsaSha384|"RS384" +|RsaSha512|"RS512" +|RsaSsaPssSha256|"PS256" +|RsaSsaPssSha384|"PS384" +|RsaSsaPssSha512|"PS512" +|Aes128CbcHmacSha256|"A128CBC-HS256" +|Aes192CbcHmacSha384|"A192CBC-HS384" +|Aes256CbcHmacSha512|"A256CBC-HS512" +|== == == == == == == == == == == =

+
+
+
+
+
+

Cobigen guide

+
+ +
+

devon4net Cobigen Guide

+ +
+
+

Overview

+
+

In this guide we will explain how to generate a new WebApi project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+

Getting things ready

+ +
+
+

devonfw Distribution

+
+

The devonfw distributions can be obtained from the TeamForge releases library and are packaged in zips files that include all the needed tools, software and configurations.

+
+
+

It is not necessary to install nor configure anything. Just extracting the zip content is enough to have a fully functional devonfw. The only thing you have to do is run create-or-update-workspace.bat and then update-all-workspaces.bat to set up all the needed tools.

+
+
+
+

devon4net Templates

+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
dotnet new -i `Devon4Net`.WebAPI.Template
+
+
+
+

and then:

+
+
+
+
dotnet new Devon4NetAPI
+
+
+
+
+

OpenAPI File

+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to `CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+

Generating files

+
+

Cobigen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the Cobigen CLI tool.

+
+
+
+

Generating files through Eclipse

+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+cobigen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+cobigen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+cobigen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+cobigen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+

Generating files through Cobigen CLI

+
+

In order to generate the files using the Cobigen CLI it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    cobigen generate {yourOpenAPIFile}.yml
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+

Configuration

+ +
+
+

Dependency Injection configuration

+
+

At this point it is needed to make some modifications in the code in order to configure correctly the server. To do so it is needed to locate the services and the repositories files that were created in Devon4Net.WebAPI.Implementation

+
+
+

Services location:

+
+
+
+cobigen +
+
+
+

Repositories location:

+
+
+
+cobigen +
+
+
+

Now, we are going to open the following file Devon4Net.WebAPI.Implementation\Configure\DevonConfiguration.cs. +In there we have to add the Dependency Injection for the services and the repositories that Cobigen has generated. The following image is an example of what is needed to add.

+
+
+
+cobigen +
+
+
+

Moreover it is needed to remove the last line in order to be able to run the application:

+
+
+
+
`throw new NotImplementedException(...);`
+
+
+
+
+

Configure data base

+
+

Cobigen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+cobigen +
+
+
+
+

Configure services

+
+

In order to finish the configuration of the services it is needed to go to each service file of the managements generated.

+
+
+

In there we will see some "NotImplementedExceptions", so it is needed to read carefully each comment inside of each exception in order to be able to use the service. It can be shown an example of the service with its NotImplementedExceptions comments:

+
+
+
+cobigen +
+
+
+
+

Run the application

+
+

After doing all the steps defined above, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
dotnet run
+
+
+
+

This will deploy our application in our localhost with the port 8081, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+
+
+

Coding conventions

+
+ +
+

==Code conventions

+
+
+

Introduction

+
+

This document covers .NET Coding Standards and is recommended to be read by team leaders/sw architects and developing teams operating in the Microsoft .NET environment.

+
+
+

“All the code in the system looks as if it was written by a single – very competent – individual” (K. Beck)

+
+
+
+

Capitalization Conventions

+
+
Terminology
+
+
Camel Case (camelCase)
+
+

Each word or abbreviation in the middle of the phrase begins with a capital letter, with no intervening spaces or punctuation.

+
+
+

The camel case convention, used only for parameter names, capitalizes the first character of each word except the first word, as shown in the following examples. As the example also shows, two-letter acronyms that begin a camel-cased identifier are both lowercase.

+
+
+

use camelCasing for parameter names.

+
+
+
+
Pascal Case (PascalCase)
+
+

The first letter of each concatenated word is capitalized. No other characters are used to separate the words, like hyphens or underscores.

+
+
+

The PascalCasing convention, used for all identifiers except parameter names, capitalizes the first character of each word (including acronyms over two letters in length).

+
+
+

use PascalCasing for all public member, type, and namespace names consisting of multiple words.

+
+
+
+
Underscore Prefix (_underScore)
+
+

For underscore ( _ ), the word after _ use camelCase terminology.

+
+
+
+
+
+

General Naming Conventions

+
+

choose easily readable identifier names.

+
+
+

favor readability over brevity.

+
+
+
+
◦ e.g.: `GetLength` is a better name than GetInt.
+◦ Aim for the “ubiquitous language” (E. Evans): A language distilled from the domain language, which helps the team clarifying domain concepts and communicating with domain experts.
+
+
+
+

prefer adding a suffix rather than a prefix to indicate a new version of an existing API.

+
+
+

use a numeric suffix to indicate a new version of an existing API, particularly if the existing name of the API is the only name that makes sense (i.e., if it is an industry standard) and if adding any meaningful suffix (or changing the name) is not an appropriate option.

+
+
+

do not use underscores, hyphens, or any other non-alphanumeric characters.

+
+
+

do not use Hungarian notation.

+
+
+

avoid using identifiers that conflict with keywords of widely used programming languages.

+
+
+

do not use abbreviations or contractions as part of identifier names.

+
+
+

do not use any acronyms that are not widely accepted, and even if they are, only when necessary.

+
+
+

do not use the "Ex" (or a similar) suffix for an identifier to distinguish it from an earlier version of the same API.

+
+
+

do not use C# reserved words as names.

+
+
+

do not use Hungarian notation. Hungarian notation is the practice of including a prefix in identifiers to encode some metadata about the parameter, such as the data type of the identifier.

+
+
+
+
◦ `e.g.: iNumberOfClients, sClientName`
+
+
+
+
+

Names of Assemblies and DLLs

+
+

An assembly is the unit of deployment and identity for managed code programs. Although assemblies can span one or more files, typically an assembly maps one-to-one with a` DLL`. Therefore, this section describes only` DLL` naming conventions, which then can be mapped to assembly naming conventions.

+
+
+

choose names for your assembly DLLs that suggest large chunks of functionality, such as System.Data.

+
+
+

Assembly and DLL names don’t have to correspond to namespace names, but it is reasonable to follow the namespace name when naming assemblies. A good rule of thumb is to name the DLL based on the common prefix of the assemblies contained in the assembly. For example, an assembly with two namespaces, MyCompany.MyTechnology.FirstFeature and MyCompany.MyTechnology.SecondFeature, could be called MyCompany.MyTechnology.dll.

+
+
+

consider naming DLLs according to the following pattern:
+<Company>.<Component>.dll +where <Component> contains one or more dot-separated clauses.

+
+
+

For example: +Litware.Controls.dll.

+
+
+
+

General coding style

+
+
    +
  • +

    Source files: One Namespace per file and one class per file.

    +
  • +
  • +

    Braces: On new line. Always use braces when optional.

    +
  • +
  • +

    Indention: Use tabs with size of 4.

    +
  • +
  • +

    Comments: Use // for simple comment or /// for summaries. Do not /* … */ and do not flower box.

    +
  • +
  • +

    Use Use built-in C# native data types vs .NET CTS types (string instead of String)

    +
  • +
  • +

    Avoid changing default type in Enums.

    +
  • +
  • +

    Use base or this only in constructors or within an override.

    +
  • +
  • +

    Always check for null before invoking events.

    +
  • +
  • +

    Avoid using Finalize. Use C# Destructors and do not create Finalize() method.

    +
  • +
  • +

    Suggestion: Use blank lines, to make it much more readable by dividing it into small, easy-to-digest sections:

    +
    +
    +
    ◦ Use a single blank line to separate logical groups of code, such as control structures.
    +◦ Use two blank lines to separate method definitions
    +
    +
    +
  • +
+
+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
CaseConvention

Source File

Pascal case. Match class name and file name

Namespace

Pascal case

Class

Pascal case

Interface

Pascal case

Generics

Single capital letter (T or K)

Methods

Pascal case (use a Verb or Verb+Object)

Public field

Pascal case

Private field

Camel case with underscore (_) prefix

Static field

Pascal case

Property

Pascal case. Try to use get and and set convention {get;set;}

Constant

Pascal case

Enum

Pascal case

Variable (inline)

Camel case

Param

Camel case

+
+
+

Use of Region guideline

+
+

Regions can be used to collapse code inside Visual Studio .NET. Regions are ideal candidates to hide boiler plate style code that adds little value to the reader on your code. Regions can then be expanded to provide progressive disclosure of the underlying details of the class or method.

+
+
+
    +
  • +

    Do Not regionalise entire type definitions that are of an important nature. Types such as enums (which tend to be fairly static in their nature) can be regionalised – their permissible values show up in Intellisense anyway.

    +
  • +
  • +

    Do Not regionalise an entire file. When another developer opens the file, all they will see is a single line in the code editor pane.

    +
  • +
  • +

    Do regionalise boiler plate type code.

    +
  • +
+
+
+
+

Use of Comment guideline

+
+

Code is the only completely reliable documentation: write “good code” first!

+
+
+
Avoid Unnecessary comments
+
+
    +
  • +

    Choosing good names for fields, methods, parameters, etc. “let the code speak” (K. Beck) by itself reducing the need for comments and documentation

    +
  • +
  • +

    Avoid “repeating the code” and commenting the obvious

    +
  • +
  • +

    Avoid commenting “tricky code”: rewrite it! If there’s no time at present to refactor a tricky section, mark it with a TODO and schedule time to take care of it as soon as possible.

    +
  • +
+
+
+
+
Effective comments
+
+
    +
  • +

    Use comments to summarize a section of code

    +
  • +
  • +

    Use comments to clarify sensitive pieces of code

    +
  • +
  • +

    Use comments to clarify the intent of the code

    +
  • +
  • +

    Bad written or out-of-date comments are more damaging than helpful:

    +
  • +
  • +

    Write clear and effective comments

    +
  • +
  • +

    Pay attention to pre-existing comments when modifying code or copying&pasting code

    +
  • +
+
+
+
+ +
+
+
+

Environment

+
+ +
+

Environment

+ +
+
+

Overview

+ +
+
+

Required software

+ + + +
+
+

Setting up the environment

+
+
    +
  1. +

    Download and install Visual Studio Code

    +
  2. +
  3. +

    Download and install .Net Core SDK

    +
  4. +
  5. +

    Intall the extension Omnisharp in Visual Studio Code

    +
  6. +
+
+
+
+

== Hello world

+
+
    +
  1. +

    Open a project:

    +
    +
      +
    • +

      Open Visual Studio Code.

      +
    • +
    • +

      Click on the Explorer icon on the left menu and then click Open Folder.

      +
    • +
    • +

      Select the folder you want your C# project to be in and click Select Folder. For our example, we’ll create a folder for our project named 'HelloWorld'.

      +
    • +
    +
    +
  2. +
  3. +

    Initialize a C# project:

    +
    +
      +
    • +

      Open the Integrated Terminal from Visual Studio Code by typing CTRL+(backtick). Alternatively, you can select View > Integrated Terminal from the main menu.

      +
    • +
    • +

      In the terminal window, type dotnet new console.

      +
    • +
    • +

      This creates a Program.cs file in your folder with a simple "Hello World" program already written, along with a C# project file named HelloWorld.csproj.

      +
    • +
    +
    +
  4. +
  5. +

    Resolve the build assets:

    +
    +
      +
    • +

      For .NET Core 2.0, this step is optional. The dotnet restore command executes automatically when a new project is created.

      +
    • +
    +
    +
  6. +
  7. +

    Run the "Hello World" program:

    +
    +
      +
    • +

      Type dotnet run.

      +
    • +
    +
    +
  8. +
+
+
+
+

Debug

+
+
    +
  1. +

    Open Program.cs by clicking on it. The first time you open a C# file in Visual Studio Code, OmniSharp will load in the editor.

    +
  2. +
  3. +

    Visual Studio Code will prompt you to add the missing assets to build and debug your app. Select Yes.

    +
  4. +
  5. +

    To open the Debug view, click on the Debugging icon on the left side menu.

    +
  6. +
  7. +

    Locate the green arrow at the top of the pane. Make sure the drop-down next to it has .NET Core Launch (console) selected.

    +
  8. +
  9. +

    Add a breakpoint to your project by clicking on the editor margin (the space on the left of the line numbers in the editor).

    +
  10. +
  11. +

    Select F5 or the green arrow to start debugging. The debugger stops execution of your program when it reaches the breakpoint you set in the previous step.

    +
    +
      +
    • +

      While debugging you can view your local variables in the top left pane or use the debug console.

      +
    • +
    +
    +
  12. +
  13. +

    Select the green arrow at the top to continue debugging, or select the red square at the top to stop.

    +
  14. +
+
+
+
+

==

+
+

For more information and troubleshooting tips on .NET Core debugging with OmniSharp in Visual Studio Code, see Instructions for setting up the .NET Core debugger. +== ==

+
+
+ +
+
+
+

Packages

+
+ +
+

Packages

+ +
+
+

Packages overview

+
+ + + + + +
+ + +devon4Net is composed by a number of packages that increases the functionality and boosts time development. Each package has it’s own configuration to make them work properly. In appsettings.json set up your environment. On appsettings.{environment}.json you can configure each component. +
+
+
+
+

The packages

+
+

You can get the devon4Net packages on nuget.org.

+
+
+
+

Devon4Net.Application.WebAPI.Configuration

+ +
+
+

== Description

+
+

The devon4Net web API configuration core.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package Devon4Net.Application.WebAPI.Configuration
    +
    +
    +
  • +
+
+
+
+

== Default configuration values

+
+
+
  "devonfw": {
+    "UseDetailedErrorsKey": true,
+    "UseIIS": false,
+    "UseSwagger": true,
+    "Environment": "Development",
+    "KillSwitch": {
+      "killSwitchSettingsFile": "killswitch.appsettings.json"
+    },
+    "Kestrel": {
+      "UseHttps": true,
+      "HttpProtocol": "Http2", //Http1, Http2, Http1AndHttp2, none
+      "ApplicationPort": 8082,
+      "KeepAliveTimeout": 120, //in seconds
+      "MaxConcurrentConnections": 100,
+      "MaxConcurrentUpgradedConnections": 100,
+      "MaxRequestBodySize": 28.6, //In MB. The default maximum request body size is 30,000,000 bytes, which is approximately 28.6 MB
+      "Http2MaxStreamsPerConnection": 100,
+      "Http2InitialConnectionWindowSize": 131072, // From 65,535 and less than 2^31 (2,147,483,648)
+      "Http2InitialStreamWindowSize": 98304, // From 65,535 and less than 2^31 (2,147,483,648)
+      "AllowSynchronousIO": true,
+      "SslProtocol": "Tls12", //Tls, Tls11,Tls12, Tls13, Ssl2, Ssl3, none. For Https2 Tls12 is needed
+      "ServerCertificate": {
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost"
+      },
+      "ClientCertificate": {
+        "DisableClientCertificateCheck": true,
+        "RequireClientCertificate": false,
+        "CheckCertificateRevocation": true,
+        "ClientCertificates": {
+          "Whitelist": [
+            "3A87A49460E8FE0E2A198E63D408DC58435BC501"
+          ],
+          "DisableClientCertificateCheck": false
+        }
+      }
+    },
+    "IIS": {
+      "ForwardClientCertificate": true,
+      "AutomaticAuthentication": true,
+      "AuthenticationDisplayName" : ""
+    }
+  }
+
+
+
+
+

Devon4Net.Infrastructure.CircuitBreaker

+ +
+
+

== Description

+
+

The Devon4Net.Infrastructure.CircuitBreaker component implements the retry pattern for HTTP/HTTPS calls.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package Devon4Net.Infrastructure.CircuitBreaker
    +
    +
    +
  • +
+
+
+
+

== Default configuration values

+
+
+
  "CircuitBreaker": {
+    "CheckCertificate": true,
+    "Endpoints": [
+      {
+        "Name": "SampleService",
+        "BaseAddress": "https://localhost:5001/",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": true,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      }
+    ]
+  }
+
+
+
+

|== == == == == == == == == == == = +|Property|Description +|CheckCertificate| True if HTTPS is required. This is useful when developing an API Gateway needs a secured HTTP, disabling this on development we can use communications with a valid server certificate +|Endpoints| Array with predefined sites to connect with +|Name| The name key to identify the destination URL +|Headers| Not ready yet +|WaitAndRetrySeconds| Array which determines the number of retries and the lapse period between each retry. The value is in milliseconds. +|Certificate| Ceritificate client to use to perform the HTTP call +|SslProtocol| The secure protocol to use on the call +|== == == == == == == == == == == =

+
+
+
+

== Protocols

+
+

|== == == == == == == == == == == = +|Protocol|Key|Description +|SSl3|48| Specifies the Secure Socket Layer (SSL) 3.0 security protocol. SSL 3.0 has been superseded by the Transport Layer Security (TLS) protocol and is provided for backward compatibility only. +|TLS|192|Specifies the Transport Layer Security (TLS) 1.0 security protocol. The TLS 1.0 protocol is defined in IETF RFC 2246. +|TLS11|768| Specifies the Transport Layer Security (TLS) 1.1 security protocol. The TLS 1.1 protocol is defined in IETF RFC 4346. On Windows systems, this value is supported starting with Windows 7. +|TLS12|3072| Specifies the Transport Layer Security (TLS) 1.2 security protocol. The TLS 1.2 protocol is defined in IETF RFC 5246. On Windows systems, this value is supported starting with Windows 7. +|TLS13|12288| Specifies the TLS 1.3 security protocol. The TLS protocol is defined in IETF RFC 8446.

+
+
+

|== == == == == == == == == == == =

+
+
+
+

== Usage

+
+

Add via Dependency Injection the circuit breaker instance. PE:

+
+
+
+
    public class FooService : Service<TodosContext>, ILoginService
+    {
+ public FooService(IUnitOfWork<AUTContext> uoW,  ICircuitBreakerHttpClient circuitBreakerClient,
+            ILogger<LoginService> logger) : base(uoW)
+        {
+        ...
+        }
+    }
+
+
+
+

At this point you can use the circuit breaker functionality in your code.

+
+
+

To perform a POST call you should use your circuit breaker instance as follows:

+
+
+
+
await circuitBreakerClient.PostAsync<YourOutputClass>(NameOftheService, EndPoint, InputData, MediaType.ApplicationJson).ConfigureAwait(false);
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Property|Description +|YourOutputClass| The type of the class that you are expecting to retrieve from the POST call +|NameOftheService| The key name of the endpoint provided in the appsettings.json file at Endpoints[] node +|EndPoint|Part of the url to use with the base address. PE: /validate +|InputData| Your instance of the class with values that you want to use in the POST call +|MediaType.ApplicationJson| The media type flag for the POST call +|== == == == == == == == == == == =

+
+
+
+

devon4Net.Domain.UnitOfWork

+ +
+
+

== Description

+
+

Unit of work implementation for devon4net solution. This unit of work provides the different methods to access the data layer with an atomic context. Sync and Async repository operations are provided. Customized Eager Loading method also provided for custom entity properties.

+
+
+ + + + + +
+ + +This component will move on next releases to Infrastructure instead of being part of Domain components +
+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.UnitOfWork
    +
    +
    +
  • +
  • +

    Adding the database connection information:

    +
  • +
+
+
+

Add the database connection on the SetupDatabase method at Startup.cs

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+

== Notes

+
+

Now you can use the unit of work via dependency injection on your classes:

+
+
+
+UOW `DI` Sample +
+
Figure 8. Use of Unit of work via dependency injection
+
+
+

As you can see in the image, you can use Unit Of Work class with your defined ModelContext classes.

+
+
+

Predicate expression builder

+
+
+
    +
  • +

    Use this expression builder to generate lambda expressions dynamically.

    +
    +
    +
    var predicate =  PredicateBuilder.True<T>();
    +
    +
    +
  • +
+
+
+

Where T is a class. At this moment, you can build your expression and apply it to obtain your results in a efficient way and not retrieving data each time you apply an expression.

+
+
+
    +
  • +

    Example from My Thai Star .Net Core implementation:

    +
  • +
+
+
+
+
public async Task<PaginationResult<Dish>> GetpagedDishListFromFilter(int currentpage, int pageSize, bool isFav, decimal maxPrice, int minLikes, string searchBy, IList<long> categoryIdList, long userId)
+{
+    var includeList = new List<string>{"DishCategory","DishCategory.IdCategoryNavigation", "DishIngredient","DishIngredient.IdIngredientNavigation","IdImageNavigation"};
+
+    //Here we create our predicate builder
+    var dishPredicate = PredicateBuilder.True<Dish>();
+
+
+    //Now we start applying the different criteria:
+    if (!string.IsNullOrEmpty(searchBy))
+    {
+        var criteria = searchBy.ToLower();
+        dishPredicate = dishPredicate.And(d => d.Name.ToLower().Contains(criteria) || d.Description.ToLower().Contains(criteria));
+    }
+
+    if (maxPrice > 0) dishPredicate = dishPredicate.And(d=>d.Price<=maxPrice);
+
+    if (categoryIdList.Any())
+    {
+        dishPredicate = dishPredicate.And(r => r.DishCategory.Any(a => categoryIdList.Contains(a.IdCategory)));
+    }
+
+    if (isFav && userId >= 0)
+    {
+        var favourites = await UoW.Repository<UserFavourite>().GetAllAsync(w=>w.IdUser ==  userId);
+        var dishes = favourites.Select(s => s.IdDish);
+        dishPredicate = dishPredicate.And(r=> dishes.Contains(r.Id));
+    }
+
+    // Now we can use the predicate to retrieve data from database with just one call
+    return await UoW.Repository<Dish>().GetAllIncludePagedAsync(currentpage, pageSize, includeList, dishPredicate);
+
+}
+
+
+
+
+

devon4Net.Infrastructure.Extensions

+ +
+
+

== Description

+
+

Miscellaneous extension library which contains : +- Predicate expression builder +- DateTime formatter +- HttpClient +- HttpContext (Middleware support)

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Infrastructure.Extensions
    +
    +
    +
  • +
+
+
+

HttpContext

+
+
+
    +
  • +

    TryAddHeader method is used on devon4Net.Infrastructure.Middleware component to add automatically response header options such authorization.

    +
  • +
+
+
+
+

devon4Net.Infrastructure.JWT

+ +
+
+

== Description

+
+
+
+

JSON Web Token (JWT) is an open standard (RFC 7519) that defines a compact and self-contained way for securely transmitting information between parties as a JSON object. This information can be verified and trusted because it is digitally signed. JWTs can be signed using a secret (with the` HMAC` algorithm) or a public/private key pair using RSA or ECDSA.

+
+
+
+— What is JSON Web Token?
+https://jwt.io/introduction/ +
+
+
+
    +
  • +

    devon4Net component to manage JWT standard to provide security to .Net API applications.

    +
  • +
+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.JWT
    +
    +
    +
  • +
+
+
+
+

== Default configuration values

+
+
+
"JWT": {
+    "Audience": "devon4Net",
+    "Issuer": "devon4Net",
+    "TokenExpirationTime": 60,
+    "ValidateIssuerSigningKey": true,
+    "ValidateLifetime": true,
+    "ClockSkew": 5,
+    "Security": {
+      "SecretKeyLengthAlgorithm": "",
+      "SecretKeyEncryptionAlgorithm": "",
+      "SecretKey": "",
+      "Certificate": "",
+      "CertificatePassword": "",
+      "CertificateEncryptionAlgorithm": ""
+    }
+  }
+
+
+
+
    +
  • +

    ClockSkew indicates the token expiration time in minutes

    +
  • +
  • +

    Certificate you can specify the name of your certificate (if it is on the same path) or the full path of the certificate. If the certificate does not exists an exception will be raised.

    +
  • +
  • +

    SecretKeyLengthAlgorithm, SecretKeyEncryptionAlgorithm and CertificateEncryptionAlgorithm supported algorithms are:

    +
  • +
+
+
+

|== == == == == == == == == == == = +|Algorithm|Description +|Aes128Encryption|"http://www.w3.org/2001/04/xmlenc#aes128-cbc" +|Aes192Encryption|"http://www.w3.org/2001/04/xmlenc#aes192-cbc" +|Aes256Encryption|"http://www.w3.org/2001/04/xmlenc#aes256-cbc" +|DesEncryption|"http://www.w3.org/2001/04/xmlenc#des-cbc" +|Aes128KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes128" +|Aes192KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes192" +|Aes256KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes256" +|RsaV15KeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-1_5" +|Ripemd160Digest|"http://www.w3.org/2001/04/xmlenc#ripemd160" +|RsaOaepKeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-oaep" +|Aes128KW|"A128KW" +|Aes256KW|"A256KW" +|RsaPKCS1|"RSA1_5" +|RsaOAEP|"RSA-OAEP" +|ExclusiveC14n|"http://www.w3.org/2001/10/xml-exc-c14n#" +|ExclusiveC14nWithComments|"http://www.w3.org/2001/10/xml-exc-c14n#WithComments" +|EnvelopedSignature|"http://www.w3.org/2000/09/xmldsig#enveloped-signature" +|Sha256Digest|"http://www.w3.org/2001/04/xmlenc#sha256" +|Sha384Digest|"http://www.w3.org/2001/04/xmldsig-more#sha384" +|Sha512Digest|"http://www.w3.org/2001/04/xmlenc#sha512" +|Sha256|"SHA256" +|Sha384|"SHA384" +|Sha512|"SHA512" +|EcdsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" +|EcdsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384" +|EcdsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512" +|HmacSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha256" +|HmacSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha384" +|HmacSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha512" +|RsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" +|RsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" +|RsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" +|RsaSsaPssSha256Signature|"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1" +|RsaSsaPssSha384Signature|"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1" +|RsaSsaPssSha512Signature|"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1" +|EcdsaSha256|"ES256" +|EcdsaSha384|"ES384" +|EcdsaSha512|"ES512" +|HmacSha256|"HS256" +|HmacSha384|"HS384" +|HmacSha512|"HS512" +|None|"none" +|RsaSha256|"RS256" +|RsaSha384|"RS384" +|RsaSha512|"RS512" +|RsaSsaPssSha256|"PS256" +|RsaSsaPssSha384|"PS384" +|RsaSsaPssSha512|"PS512" +|Aes128CbcHmacSha256|"A128CBC-HS256" +|Aes192CbcHmacSha384|"A192CBC-HS384" +|Aes256CbcHmacSha512|"A256CBC-HS512" +|== == == == == == == == == == == =

+
+
+ + + + + +
+ + +Please check Microsoft documentation to get the lastest updates on supported encryption algorithms +
+
+
+
    +
  • +

    Add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
services.AddBusinessCommonJwtPolicy();
+
+
+
+

On

+
+
+
+
Startup.cs
+
+
+
+

or on:

+
+
+
+
devon4Net.Application.Configuration.Startup/JwtApplicationConfiguration/ConfigureJwtPolicy method.
+
+
+
+
    +
  • +

    Inside the AddBusinessCommonJwtPolicy method you can add your JWT Policy like in My Thai Star application sample:

    +
  • +
+
+
+
+
 services.ConfigureJwtAddPolicy("MTSWaiterPolicy", "role", "waiter");
+
+
+
+
+

== Notes

+
+
    +
  • +

    The certificate will be used to generate the key to encrypt the json web token.

    +
  • +
+
+
+
+

devon4Net.Infrastructure.Middleware

+ +
+
+

== Description

+
+
    +
  • +

    devon4Net support for middleware classes.

    +
  • +
  • +

    In ASP.NET Core, middleware classes can handle an HTTP request or response. Middleware can either:

    +
    +
      +
    • +

      Handle an incoming HTTP request by generating an HTTP response.

      +
    • +
    • +

      Process an incoming HTTP request, modify it, and pass it on to another piece of middleware.

      +
    • +
    • +

      Process an outgoing HTTP response, modify it, and pass it on to either another piece of middleware, or the ASP.NET Core web server.

      +
    • +
    +
    +
  • +
  • +

    devon4Net supports the following automatic response headers:

    +
    +
      +
    • +

      AccessControlExposeHeader

      +
    • +
    • +

      StrictTransportSecurityHeader

      +
    • +
    • +

      XFrameOptionsHeader

      +
    • +
    • +

      XssProtectionHeader

      +
    • +
    • +

      XContentTypeOptionsHeader

      +
    • +
    • +

      ContentSecurityPolicyHeader

      +
    • +
    • +

      PermittedCrossDomainPoliciesHeader

      +
    • +
    • +

      ReferrerPolicyHeader:toc: macro

      +
    • +
    +
    +
  • +
+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Infrastructure.Middleware
    +
    +
    +
  • +
  • +

    You can configure your Middleware configuration on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"Middleware": {
+    "Headers": {
+      "AccessControlExposeHeader": "Authorization",
+      "StrictTransportSecurityHeader": "",
+      "XFrameOptionsHeader": "DENY",
+      "XssProtectionHeader": "1;mode=block",
+      "XContentTypeOptionsHeader": "nosniff",
+      "ContentSecurityPolicyHeader": "",
+      "PermittedCrossDomainPoliciesHeader": "",
+      "ReferrerPolicyHeader": ""
+    }
+}
+
+
+
+
    +
  • +

    On the above sample, the server application will add to response header the AccessControlExposeHeader, XFrameOptionsHeader, XssProtectionHeader and XContentTypeOptionsHeader headers.

    +
  • +
  • +

    If the header response type does not have a value, it will not be added to the response headers.

    +
  • +
+
+
+
+

devon4Net.Infrastructure.Swagger

+ +
+
+

== Description

+
+
    +
  • +

    devon4net Swagger abstraction to provide full externalized easy configuration.

    +
  • +
  • +

    Swagger offers the easiest to use tools to take full advantage of all the capabilities of the OpenAPI Specification (OAS).

    +
  • +
+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Swagger
    +
    +
    +
  • +
  • +

    You can configure your Swagger configuration on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"Swagger": {
+    "Version": "v1",
+    "Title": "devon4net API",
+    "Description": "devon4net API Contract",
+    "Terms": "https://www.devonfw.com/terms-of-use/",
+    "Contact": {
+      "Name": "devonfw",
+      "Email": "sample@mail.com",
+      "Url": "https://www.devonfw.com"
+    },
+    "License": {
+      "Name": "devonfw - Terms of Use",
+      "Url": "https://www.devonfw.com/terms-of-use/"
+    },
+    "Endpoint": {
+      "Name": "V1 Docs",
+      "Url": "/swagger/v1/swagger.json",
+      "UrlUi": "swagger",
+      "RouteTemplate": "swagger/v1/{documentName}/swagger.json"
+    }
+  }
+
+
+
+
    +
  • +

    Add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
services.ConfigureSwaggerService();
+
+
+
+

On

+
+
+
+
Startup.cs
+
+
+
+
    +
  • +

    Also add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
app.ConfigureSwaggerApplication();
+
+
+
+

On

+
+
+
+
Startup.cs/Configure(IApplicationBuilder app, IHostingEnvironment env)
+
+
+
+
    +
  • +

    Ensure your API actions and non-route parameters are decorated with explicit "Http" and "From" bindings.

    +
  • +
+
+
+
+

== Notes

+
+
    +
  • +

    To access to swagger UI launch your API project and type in your html browser the url http://localhost:yourPort/swagger.

    +
  • +
  • +

    In order to generate the documentation annotate your actions with summary, remarks and response tags:

    +
  • +
+
+
+
+
/// <summary>
+/// Method to make a reservation with potential guests. The method returns the reservation token with the format: {(CB_|GB_)}{now.Year}{now.Month:00}{now.Day:00}{_}{MD5({Host/Guest-email}{now.Year}{now.Month:00}{now.Day:00}{now.Hour:00}{now.Minute:00}{now.Second:00})}
+/// </summary>
+/// <param name="bookingDto"></param>
+/// <response code="201">Ok.</response>
+/// <response code="400">Bad request. Parser data error.</response>
+/// <response code="401">Unauthorized. Authentication fail.</response>
+/// <response code="403">Forbidden. Authorization error.</response>
+/// <response code="500">Internal Server Error. The search process ended with error.</response>
+[HttpPost]
+[HttpOptions]
+[Route("/mythaistar/services/rest/bookingmanagement/v1/booking")]
+[AllowAnonymous]
+[EnableCors("CorsPolicy")]
+public async Task<IActionResult> Booking([FromBody]BookingDto bookingDto)
+{
+    try
+    {
+
+    ...
+
+
+
+
    +
  • +

    Ensure that your project has the generateXMLdocumentationfile check active on build menu:

    +
  • +
+
+
+
+Generate documentation XML check +
+
Figure 9. Swagger documentation
+
+
+
    +
  • +

    Ensure that your XML files has the attribute copy always to true:

    +
  • +
+
+
+
+Generate documentation XML check +
+
Figure 10. Swagger documentation
+
+
+
+

devon4Net.Infrastructure.Test

+ +
+
+

== Description

+
+

devon4Net Base classes to create unit tests and integration tests with Moq and xUnit.

+
+
+
+

== Configuration

+
+
    +
  • +

    Load the template: +> dotnet new -i devon4Net.Test.Template +> dotnet new devon4NetTest

    +
  • +
+
+
+
+

== Notes

+
+
    +
  • +

    At this point you can find this classes:

    +
    +
      +
    • +

      BaseManagementTest

      +
    • +
    • +

      DatabaseManagementTest<T> (Where T is a devon4NetBaseContext class)

      +
    • +
    +
    +
  • +
  • +

    For unit testing, inherit a class from BaseManagementTest.

    +
  • +
  • +

    For integration tests, inherit a class from DatabaseManagementTest.

    +
  • +
  • +

    The recommended databases in integration test are in memory database or SQlite database.

    +
  • +
  • +

    Please check My thai Star test project.

    +
  • +
+
+
+
+

Deperecated packages

+ +
+
+

devon4Net.Domain.Context

+ +
+
+

== Description

+
+

devon4Net.Domain.Context contains the extended class devon4NetBaseContext in order to make easier the process of having a model context configured against different database engines. This configuration allows an easier testing configuration against local and in memory databases.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.Context
    +
    +
    +
  • +
  • +

    Add to appsettings.{environment}.json file your database connections:

    +
  • +
+
+
+
+
"ConnectionStrings":
+{
+"DefaultConnection":
+"Server=localhost;Database=MyThaiStar;User Id=sa;Password=sa;MultipleActiveResultSets=True;",
+
+"AuthConnection":
+"Server=(localdb)\\mssqllocaldb;Database=aspnet-DualAuthCore-5E206A0B-D4DA-4E71-92D3-87FD6B120C5E;Trusted_Connection=True;MultipleActiveResultSets=true",
+
+"SqliteConnection": "Data Source=c:\\tmp\\membership.db;"
+}
+
+
+
+
    +
  • +

    On Startup.cs :

    +
  • +
+
+
+
+
void ConfigureServices(IServiceCollection services)
+
+
+
+
    +
  • +

    Add your database connections defined on previous point:

    +
  • +
+
+
+
+
services.ConfigureDataBase(
+new Dictionary<string, string> {
+{ConfigurationConst.DefaultConnection, Configuration.GetConnectionString(ConfigurationConst.DefaultConnection) }});
+
+
+
+
    +
  • +

    On devon4Net.Application.Configuration.Startup/DataBaseConfiguration/ConfigureDataBase configure your connections.

    +
  • +
+
+
+
+

devon4Net.Infrastructure.ApplicationUser

+ +
+
+

== Description

+
+

devon4Net Application user classes to implement basic Microsoft’s basic authentication in order to be used on authentication methodologies such Jason Web Token (JWT).

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.ApplicationUser
    +
    +
    +
  • +
  • +

    Add the database connection string for user management on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"ConnectionStrings":
+{
+"AuthConnection":
+"Server=(localdb)\\mssqllocaldb;Database=aspnet-DualAuthCore-5E206A0B-D4DA-4E71-92D3-87FD6B120C5E;Trusted_Connection=True;MultipleActiveResultSets=true"
+}
+
+
+
+
    +
  • +

    Add the following line of code

    +
  • +
+
+
+
+
services.AddApplicationUserDependencyInjection();
+
+
+
+

On

+
+
+
+
Startup.cs/ConfigureServices(IServiceCollection services)
+
+
+
+

or on:

+
+
+
+
devon4Net.Application.Configuration.Startup/DependencyInjectionConfiguration/ConfigureDependencyInjectionService method.
+
+
+
+
    +
  • +

    Add the data seeder on Configure method on start.cs class:

    +
  • +
+
+
+
+
public void Configure(IApplicationBuilder app, IHostingEnvironment env, DataSeeder seeder)
+{
+    ...
+
+    app.UseAuthentication();
+    seeder.SeedAsync().Wait();
+
+    ...
+}
+
+
+
+
+

== Notes

+
+
    +
  • +

    You can use the following methods to set up the database configuration:

    +
  • +
+
+
+
+
public static void AddApplicationUserDbContextInMemoryService(this IServiceCollection services)
+
+public static void AddApplicationUserDbContextSQliteService(this IServiceCollection services, string connectionString)
+
+public static void AddApplicationUserDbContextSQlServerService(this IServiceCollection services, string connectionString)
+
+
+
+
    +
  • +

    The method AddApplicationUserDbContextInMemoryService uses the AuthContext connection string name to set up the database.

    +
  • +
  • +

    This component is used with the components devon4Net.Infrastructure.JWT and devon4Net.Infrastructure.JWT.MVC.

    +
  • +
+
+
+
+

devon4Net.Infrastructure.Communication

+ +
+
+

== Description

+
+

Basic client classes to invoke` GET`/POST methods asynchronously. This component has the minimal classes to send basic data. For more complex operations please use ASP4Net.Infrastructure.Extensions.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Communication
    +
    +
    +
  • +
  • +

    Create an instance of RestManagementService class.

    +
  • +
  • +

    Use next methods to use GET/POST basic options:

    +
  • +
+
+
+
+
public Task<string> CallGetMethod(string url);
+public Task<Stream> CallGetMethodAsStream(string url);
+public Task<string> CallPostMethod<T>(string url, T dataToSend);
+public Task<string> CallPutMethod<T>(string url, T dataToSend);
+
+
+
+
+

== Notes

+
+
    +
  • +

    Example:

    +
  • +
+
+
+
+
private async Task RestManagementServiceSample(EmailDto dataToSend)
+{
+    var url = Configuration["EmailServiceUrl"];
+    var restManagementService = new RestManagementService();
+    await restManagementService.CallPostMethod(url, dataToSend);
+}
+
+
+
+
+

devon4Net.Infrastructure.JWT.MVC

+ +
+
+

== Description

+
+
    +
  • +

    devon4Net Extended controller to interact with JWT features

    +
  • +
+
+
+
+

== Configuration

+
+
    +
  • +

    Extend your _ Microsoft.AspNetCore.Mvc.Controller_ class with devon4NetJWTController class:

    +
  • +
+
+
+
+
public class LoginController : devon4NetJWTController
+{
+    private readonly ILoginService _loginService;
+
+    public LoginController(ILoginService loginService,  SignInManager<ApplicationUser>  signInManager, UserManager<ApplicationUser> userManager, ILogger<LoginController> logger, IMapper mapper) : base(logger,mapper)
+    {
+        _loginService = loginService;
+    }
+
+    ....
+
+
+
+
+

== Notes

+
+
    +
  • +

    In order to generate a JWT, you should implement the JWT generation on user login. For example, in My Thai Star is created as follows:

    +
  • +
+
+
+
+
public async Task<IActionResult> Login([FromBody]LoginDto loginDto)
+{
+    try
+    {
+        if (loginDto ==  null) return Ok();
+        var logged = await _loginService.LoginAsync(loginDto.UserName, loginDto.Password);
+
+        if (logged)
+        {
+            var user = await _loginService.GetUserByUserNameAsync(loginDto.UserName);
+
+            var encodedJwt = new JwtClientToken().CreateClientToken(_loginService.GetUserClaimsAsync(user));
+
+            Response.Headers.Add("Access-Control-Expose-Headers", "Authorization");
+
+            Response.Headers.Add("Authorization", $"{JwtBearerDefaults.AuthenticationScheme} {encodedJwt}");
+
+            return Ok(encodedJwt);
+        }
+        else
+        {
+            Response.Headers.Clear();
+            return StatusCode((int)HttpStatusCode.Unauthorized, "Login Error");
+        }
+
+    }
+    catch (Exception ex)
+    {
+        return StatusCode((int)HttpStatusCode.InternalServerError, $"{ex.Message} : {ex.InnerException}");
+    }
+}
+
+
+
+
    +
  • +

    In My Thai Star the JWT will contain the user information such id, roles…​

    +
  • +
  • +

    Once you extend your controller with devon4NetJWTController you will have available these methods to simplify user management:

    +
  • +
+
+
+
+
    public interface Idevon4NetJWTController
+    {
+        // Gets the current user
+        JwtSecurityToken GetCurrentUser();
+
+        // Gets an specific assigned claim of current user
+        Claim GetUserClaim(string claimName, JwtSecurityToken jwtUser = null);
+
+        // Gets all the assigned claims of current user
+        IEnumerable<Claim> GetUserClaims(JwtSecurityToken jwtUser = null);
+    }
+
+
+
+
+

devon4Net.Infrastructure.MVC

+ +
+
+

== Description

+
+

Common classes to extend controller functionality on API. Also provides support for paged results in devon4Net applications and automapper injected class.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.MVC
    +
    +
    +
  • +
+
+
+
+

== Notes

+
+
    +
  • +

    The generic class ResultObjectDto<T> provides a typed result object with pagination.

    +
  • +
  • +

    The extended class provides the following methods:

    +
  • +
+
+
+
+
        ResultObjectDto<T> GenerateResultDto<T>(int? page, int? size, int? total);
+        ResultObjectDto<T> GenerateResultDto<T>(List<T> result, int? page = null, int? size = null);
+
+
+
+
    +
  • +

    GenerateResultDto provides typed ResultObjectDto object or a list of typed ResultObjectDto object. The aim of this methods is to provide a clean management for result objects and not repeating code through the different controller classes.

    +
  • +
  • +

    The following sample from My Thai Star shows how to use it:

    +
  • +
+
+
+
+
public async Task<IActionResult> Search([FromBody] FilterDtoSearchObject filterDto)
+{
+    if (filterDto ==  null) filterDto = new FilterDtoSearchObject();
+
+    try
+    {
+        var dishList = await _dishService.GetDishListFromFilter(false, filterDto.GetMaxPrice(), filterDto.GetMinLikes(), filterDto.GetSearchBy(),filterDto.GetCategories(), -1);
+
+
+        return new OkObjectResult(GenerateResultDto(dishList).ToJson());
+    }
+    catch (Exception ex)
+    {
+        return StatusCode((int)HttpStatusCode.InternalServerError, $"{ex.Message} : {ex.InnerException}");
+    }
+}
+
+
+
+
+

devon4Net.Infrastructure.AOP

+ +
+
+

== Description

+
+

Simple AOP Exception handler for .Net Controller classes integrated with Serilog.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.AOP
    +
    +
    +
  • +
+
+
+

Add this line of code on ConfigureServices method on Startup.cs

+
+
+
+
services.AddAopAttributeService();
+
+
+
+
+

== Notes

+
+

Now automatically your exposed API methods exposed on controller classes will be tracked on the methods:

+
+
+
    +
  • +

    OnActionExecuting

    +
  • +
  • +

    OnActionExecuted

    +
  • +
  • +

    OnResultExecuting

    +
  • +
  • +

    OnResultExecuted

    +
  • +
+
+
+

If an exception occurs, a message will be displayed on log with the stack trace.

+
+
+
+

devon4Net.Infrastructure.Cors

+ +
+
+

== Description

+
+

Enables CORS configuration for devon4Net application. Multiple domains can be configured from configuration. Mandatory to web clients (p.e. Angular) to prevent making AJAX requests to another domain.

+
+
+

Cross-Origin Resource Sharing (CORS) is a mechanism that uses additional HTTP headers to tell a browser to let a web application running at one origin (domain) have permission to access selected resources from a server at a different origin. A web application makes a cross-origin HTTP request when it requests a resource that has a different origin (domain, protocol, and port) than its own origin.

+
+
+

Please refer to this link to get more information about CORS and .Net core.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Cors
    +
    +
    +
  • +
  • +

    You can configure your Cors configuration on appsettings.{environment}.json:

    +
    +
    +
    `CorsPolicy`: indicates the name of the policy. You can use this name to add security headers on your API exposed methods.
    +
    +
    +
    +
    +
    Origins: The allowed domains
    +
    +
    +
    +
    +
    Headers: The allowed headers such accept,content-type,origin,x-custom-header
    +
    +
    +
  • +
  • +

    If you specify the cors configuration as empty array, a default cors-policy will be used with all origins enabled:

    +
  • +
+
+
+
+
  "Cors": []
+
+
+
+
    +
  • +

    On the other hand, you can specify different Cors policies in your solution as follows:

    +
  • +
+
+
+
+
"Cors": []
+[
+  {
+    "CorsPolicy": "CorsPolicy1",
+    "Origins": "http:example.com,http:www.contoso.com",
+    "Headers": "accept,content-type,origin,x-custom-header",
+    "Methods": "GET,POST,HEAD",
+    "AllowCredentials": true
+  },
+  {
+    "CorsPolicy": "CorsPolicy2",
+    "Origins": "http:example.com,http:www.contoso.com",
+    "Headers": "accept,content-type,origin,x-custom-header",
+    "Methods": "GET,POST,HEAD",
+    "AllowCredentials": true
+  }
+]
+
+
+
+
+

== Notes

+
+
    +
  • +

    To use CORS in your API methods, use the next notation:

    +
  • +
+
+
+
+
[EnableCors("YourCorsPolicy")]
+public IActionResult Index() {
+    return View();
+}
+
+
+
+
    +
  • +

    if you want to disable the CORS check use the following annotation:

    +
  • +
+
+
+
+
[DisableCors]
+public IActionResult Index() {
+    return View();
+}
+
+
+
+ +
+
+
+

Templates

+
+ +
+

==Templates

+
+
+

Overview

+
+

The .Net Core and .Net Framework given templates allows to start coding an application with the following functionality ready to use:

+
+
+

Please refer to User guide in order to start developing.

+
+
+
+

Net Core 3.0

+
+

The .Net Core 3.0 template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2019.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
Using devon4Net template
+ +
+
+
Option 1
+
+
    +
  1. +

    Open your favourite terminal (Win/Linux/iOS)

    +
  2. +
  3. +

    Go to future project’s path

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template

    +
  6. +
  7. +

    Type dotnet new Devon4NetAPI

    +
  8. +
  9. +

    Go to project’s path

    +
  10. +
  11. +

    You are ready to start developing with devon4Net

    +
  12. +
+
+
+
+
Option 2
+
+
    +
  1. +

    Create a new dotnet API project from scratch

    +
  2. +
  3. +

    Add the NuGet package reference to your project

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template

    +
  6. +
+
+
+
+
+

Net Core 2.1.x

+
+

The .Net Core 2.1.x template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2017.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
Using devon4Net template
+
+
    +
  1. +

    Open your favourite terminal (Win/Linux/iOS)

    +
  2. +
  3. +

    Go to future project’s path

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template::1.0.8

    +
  6. +
  7. +

    Type dotnet new Devon4NetAPI

    +
  8. +
  9. +

    Go to project’s path

    +
  10. +
  11. +

    You are ready to start developing with devon4Net

    +
  12. +
+
+
+ + + + + +
+ + +For the latest updates on references packages, please get the sources from Github +
+
+
+
+
+ + +
+
+
+
+

Samples

+
+ +
+

Samples

+ +
+
+

My Thai Star Restaurant

+ +
+
+ +
+
+
+
+

Angular requirements

+
+ +
+
+
+

Angular client

+
+
    +
  1. +

    Install Node.js LTS version

    +
  2. +
  3. +

    Install Angular CLI from command line:

    +
    +
      +
    • +

      npm install -g @angular/cli

      +
    • +
    +
    +
  4. +
  5. +

    Install Yarn

    +
  6. +
  7. +

    Go to Angular client from command line

    +
  8. +
  9. +

    Execute : yarn install

    +
  10. +
  11. +

    Launch the app from command line: ng serve and check http://localhost:4200

    +
  12. +
  13. +

    You are ready

    +
  14. +
+
+
+
+

.Net Core server

+ +
+
+

== Basic architecture details

+
+

Following the devonfw conventions the .Net Core 2.0 My Thai Star backend is going to be developed dividing the application in Components and using a n-layer architecture.

+
+
+
+project modules +
+
+
+
+

== Components

+
+

The application is going to be divided in different components to encapsulate the different domains of the application functionalities.

+
+
+
+mtsn components +
+
+
+

As main components we will find:

+
+
+
    +
  • +

    _BookingService: Manages the bookings part of the application. With this component the users (anonymous/logged in) can create new bookings or cancel an existing booking. The users with waiter role can see all scheduled bookings.

    +
  • +
+
+
+

-OrderService: This component handles the process to order dishes (related to bookings). A user (as a host or as a guest) can create orders (that contain dishes) or cancel an existing one. The users with waiter role can see all ordered orders.

+
+
+
    +
  • +

    DishService: This component groups the logic related to the menu (dishes) view. Its main feature is to provide the client with the data of the available dishes but also can be used by other components (Ordermanagement) as a data provider in some processes.

    +
  • +
  • +

    UserService: Takes care of the User Profile management, allowing to create and update the data profiles.

    +
  • +
+
+
+

As common components (that don’t exactly represent an application’s area but provide functionalities that can be used by the main components):

+
+
+
    +
  • +

    Mailservice: with this service we will provide the functionality for sending email notifications. This is a shared service between different app components such as bookingmanagement or ordercomponent.

    +
  • +
+
+
+

Other components:

+
+
+
    +
  • +

    Security (will manage the access to the private part of the application using a jwt implementation).

    +
  • +
  • +

    Twitter integration: planned as a Microservice will provide the twitter integration needed for some specific functionalities of the application.

    +
  • +
+
+
+
+

Layers

+ +
+
+

== Introduction

+
+

The .Net Core backend for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    devon4NET as the .Net Core framework

    +
  • +
  • +

    VSCode as the Development environment

    +
  • +
  • +

    TOBAGO as code generation tool

    +
  • +
+
+
+
+

== Application layer

+
+

This layer will expose the REST api to exchange information with the client applications.

+
+
+

The application will expose the services on port 8081 and it can be launched as a self host console application (microservice approach) and as a Web Api application hosted on IIS/IIS Express.

+
+
+
+

== Business layer

+
+

This layer will define the controllers which will be used on the application layer to expose the different services. Also, will define the swagger contract making use of summary comments and framework attributes.

+
+
+

This layer also includes the object response classes in order to interact with external clients.

+
+
+
+

== Service layer

+
+

The layer in charge of hosting the business logic of the application. Also orchestrates the object conversion between object response and entity objects defined in Data layer.

+
+
+
+

== Data layer

+
+

The layer to communicate with the data base.

+
+
+

Data layer makes use of Entity Framework. +The Database context is defined on DataAccessLayer assembly (ModelContext).

+
+
+

This layer makes use of the Repository pattern and Unit of work in order to encapsulate the complexity. Making use of this combined patterns we ensure an organized and easy work model.

+
+
+

As in the previous layers, the data access layer will have both interface and implementation tiers. However, in this case, the implementation will be slightly different due to the use of generics.

+
+
+
+

== Cross-Cutting concerns

+
+

the layer to make use of transversal components such JWT and mailing.

+
+
+
+

Jwt basics

+
+
    +
  • +

    A user will provide a username / password combination to our auth server.

    +
  • +
  • +

    The auth server will try to identify the user and, if the credentials match, will issue a token.

    +
  • +
  • +

    The user will send the token as the Authorization header to access resources on server protected by JWT Authentication.

    +
  • +
+
+
+
+jwt schema +
+
+
+
+

Jwt implementation details

+
+

The Json Web Token pattern will be implemented based on the jwt on .net core framework that is provided by default in the devon4Net projects.

+
+
+
+

Authentication

+
+

Based on Microsoft approach, we will implement a class to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

On devon4Net.Infrastructure.JWT assembly is defined a subset of Microsoft’s authorization schema Database. It is started up the first time the application launches.

+
+
+

You can read more about _Authorization on:

+
+ + +
+
+

Dependency injection

+
+

As it is explained in the Microsoft documentation we are going to implement the dependency injection pattern basing our solution on .Net Core.

+
+
+
+dependency injection +
+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different tiers: interface and implementation. The interface tier will store the interface with the methods definition and inside the implementation we will store the class that implements the interface.

    +
  • +
+
+
+
+

Layer communication method

+
+

The connection between layers, to access to the functionalities of each one, will be solved using the dependency injection.

+
+
+
+layer impl +
+
+
+

Connection BookingService - Logic

+
+
+
+
 public class BookingService : EntityService<Booking>, IBookingService
+    {
+        private readonly IBookingRepository _bookingRepository;
+        private readonly IRepository<Order> _orderRepository;
+        private readonly IRepository<InvitedGuest> _invitedGuestRepository;
+        private readonly IOrderLineRepository _orderLineRepository;
+        private readonly IUnitOfWork _unitOfWork;
+
+        public BookingService(IUnitOfWork unitOfWork,
+            IBookingRepository repository,
+            IRepository<Order> orderRepository,
+            IRepository<InvitedGuest> invitedGuestRepository,
+            IOrderLineRepository orderLineRepository) : base(unitOfWork, repository)
+        {
+            _unitOfWork = unitOfWork;
+            _bookingRepository = repository;
+            _orderRepository = orderRepository;
+            _invitedGuestRepository = invitedGuestRepository;
+            _orderLineRepository = orderLineRepository;
+        }
+}
+
+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

Following the [naming conventions] proposed for devon4Net applications we will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+
+

Api Exposed

+
+

The devon4Net.Business.Controller assembly in the business layer of a component will store the definition of the service by a interface. In this definition of the service we will set-up the endpoints of the service, the type of data expected and returned, the HTTP method for each endpoint of the service and other configurations if needed.

+
+
+
+
        /// <summary>
+        /// Method to make a reservation with potential guests. The method returns the reservation token with the format: {(CB_|GB_)}{now.Year}{now.Month:00}{now.Day:00}{_}{MD5({Host/Guest-email}{now.Year}{now.Month:00}{now.Day:00}{now.Hour:00}{now.Minute:00}{now.Second:00})}
+        /// </summary>
+
+        /// <param name="bookingView"></param>
+        /// <response code="201">Ok.</response>
+        /// <response code="400">Bad request. Parser data error.</response>
+        /// <response code="401">Unauthorized. Authentication fail.</response>
+        /// <response code="403">Forbidden. Authorization error.</response>
+        /// <response code="500">Internal Server Error. The search process ended with error.</response>
+        [HttpPost]
+        [HttpOptions]
+        [Route("/mythaistar/services/rest/bookingmanagement/v1/booking")]
+        [AllowAnonymous]
+        [EnableCors("CorsPolicy")]
+        public IActionResult BookingBooking([FromBody]BookingView bookingView)
+        {
+...
+
+
+
+

Using the summary annotations and attributes will tell to swagger the contract via the XML doc generated on compiling time. This doc will be stored in XmlDocumentation folder.

+
+
+

The Api methods will be exposed on the application layer.

+
+
+
+

Google Mail API Consumer

+ +
+

|== == == == == == == == == == == = +|Application| MyThaiStarEmailService.exe +|Config file| MyThaiStarEmailService.exe.Config +|Default port|8080 +|== == == == == == == == == == == =

+
+
+
+

Overview

+
+
    +
  1. +

    Execute MyThaiStarEmailService.exe.

    +
  2. +
  3. +

    The first time google will ask you for credentials +(just one time) in your default browser:

    +
    + +
    +
  4. +
  5. +

    Visit the url: http://localhost:8080/swagger

    +
  6. +
  7. +

    Your server is ready!

    +
  8. +
+
+
+
+GMail Service +
+
Figure 11. GMail Server Swagger contract page
+
+
+
+

JSON Example

+
+

This is the JSON example to test with swagger client. Please read the swagger documentation.

+
+
+
+
{
+   "EmailFrom":"mythaistarrestaurant@gmail.com",
+   "EmailAndTokenTo":{
+      "MD5Token1":" Email_Here!@gmail.com",
+      "MD5Token2":" Email_Here!@gmail.com"
+   },
+   "EmailType":0,
+   "DetailMenu":[
+      "Thai Spicy Basil Fried Rice x2",
+      "Thai green chicken curry x2"
+   ],
+   "BookingDate":"2017-05-31T12:53:39.7864723+02:00",
+   "Assistants":2,
+   "BookingToken":"MD5Booking",
+   "Price":20.0,
+   "ButtonActionList":{
+      "http://accept.url":"Accept",
+      "http://cancel.url":"Cancel"
+   },
+   "Host":{
+      " Email_Here!@gmail.com":"José Manuel"
+   }
+}
+
+
+
+
+

Configure the service port

+
+

If you want to change the default port, please edit the config file and +change the next entry in appSettings node:

+
+
+
+
<appSettings>
+   <add key="LocalListenPort" value="8080" />
+</appSettings>
+
+
+
+
+ + + +
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/packages.html b/docs/devon4net/1.0/packages.html new file mode 100644 index 00000000..96bbadf9 --- /dev/null +++ b/docs/devon4net/1.0/packages.html @@ -0,0 +1,2050 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Packages

+
+ +
+
+
+

Packages overview

+
+
+ + + + + +
+ + +devon4Net is composed by a number of packages that increases the functionality and boosts time development. Each package has it’s own configuration to make them work properly. In appsettings.json set up your environment. On appsettings.{environment}.json you can configure each component. +
+
+
+
+
+

The packages

+
+
+

You can get the devon4Net packages on nuget.org.

+
+
+
+
+

Devon4Net.Application.WebAPI.Configuration

+
+ +
+
+
+

== Description

+
+
+

The devon4Net web API configuration core.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package Devon4Net.Application.WebAPI.Configuration
    +
    +
    +
  • +
+
+
+
+
+

== Default configuration values

+
+
+
+
  "devonfw": {
+    "UseDetailedErrorsKey": true,
+    "UseIIS": false,
+    "UseSwagger": true,
+    "Environment": "Development",
+    "KillSwitch": {
+      "killSwitchSettingsFile": "killswitch.appsettings.json"
+    },
+    "Kestrel": {
+      "UseHttps": true,
+      "HttpProtocol": "Http2", //Http1, Http2, Http1AndHttp2, none
+      "ApplicationPort": 8082,
+      "KeepAliveTimeout": 120, //in seconds
+      "MaxConcurrentConnections": 100,
+      "MaxConcurrentUpgradedConnections": 100,
+      "MaxRequestBodySize": 28.6, //In MB. The default maximum request body size is 30,000,000 bytes, which is approximately 28.6 MB
+      "Http2MaxStreamsPerConnection": 100,
+      "Http2InitialConnectionWindowSize": 131072, // From 65,535 and less than 2^31 (2,147,483,648)
+      "Http2InitialStreamWindowSize": 98304, // From 65,535 and less than 2^31 (2,147,483,648)
+      "AllowSynchronousIO": true,
+      "SslProtocol": "Tls12", //Tls, Tls11,Tls12, Tls13, Ssl2, Ssl3, none. For Https2 Tls12 is needed
+      "ServerCertificate": {
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost"
+      },
+      "ClientCertificate": {
+        "DisableClientCertificateCheck": true,
+        "RequireClientCertificate": false,
+        "CheckCertificateRevocation": true,
+        "ClientCertificates": {
+          "Whitelist": [
+            "3A87A49460E8FE0E2A198E63D408DC58435BC501"
+          ],
+          "DisableClientCertificateCheck": false
+        }
+      }
+    },
+    "IIS": {
+      "ForwardClientCertificate": true,
+      "AutomaticAuthentication": true,
+      "AuthenticationDisplayName" : ""
+    }
+  }
+
+
+
+
+
+

Devon4Net.Infrastructure.CircuitBreaker

+
+ +
+
+
+

== Description

+
+
+

The Devon4Net.Infrastructure.CircuitBreaker component implements the retry pattern for HTTP/HTTPS calls.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package Devon4Net.Infrastructure.CircuitBreaker
    +
    +
    +
  • +
+
+
+
+
+

== Default configuration values

+
+
+
+
  "CircuitBreaker": {
+    "CheckCertificate": true,
+    "Endpoints": [
+      {
+        "Name": "SampleService",
+        "BaseAddress": "https://localhost:5001/",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": true,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      }
+    ]
+  }
+
+
+
+

|== == == == == == == == == == == = +|Property|Description +|CheckCertificate| True if HTTPS is required. This is useful when developing an API Gateway needs a secured HTTP, disabling this on development we can use communications with a valid server certificate +|Endpoints| Array with predefined sites to connect with +|Name| The name key to identify the destination URL +|Headers| Not ready yet +|WaitAndRetrySeconds| Array which determines the number of retries and the lapse period between each retry. The value is in milliseconds. +|Certificate| Ceritificate client to use to perform the HTTP call +|SslProtocol| The secure protocol to use on the call +|== == == == == == == == == == == =

+
+
+
+
+

== Protocols

+
+
+

|== == == == == == == == == == == = +|Protocol|Key|Description +|SSl3|48| Specifies the Secure Socket Layer (SSL) 3.0 security protocol. SSL 3.0 has been superseded by the Transport Layer Security (TLS) protocol and is provided for backward compatibility only. +|TLS|192|Specifies the Transport Layer Security (TLS) 1.0 security protocol. The TLS 1.0 protocol is defined in IETF RFC 2246. +|TLS11|768| Specifies the Transport Layer Security (TLS) 1.1 security protocol. The TLS 1.1 protocol is defined in IETF RFC 4346. On Windows systems, this value is supported starting with Windows 7. +|TLS12|3072| Specifies the Transport Layer Security (TLS) 1.2 security protocol. The TLS 1.2 protocol is defined in IETF RFC 5246. On Windows systems, this value is supported starting with Windows 7. +|TLS13|12288| Specifies the TLS 1.3 security protocol. The TLS protocol is defined in IETF RFC 8446.

+
+
+

|== == == == == == == == == == == =

+
+
+
+
+

== Usage

+
+
+

Add via Dependency Injection the circuit breaker instance. PE:

+
+
+
+
    public class FooService : Service<TodosContext>, ILoginService
+    {
+ public FooService(IUnitOfWork<AUTContext> uoW,  ICircuitBreakerHttpClient circuitBreakerClient,
+            ILogger<LoginService> logger) : base(uoW)
+        {
+        ...
+        }
+    }
+
+
+
+

At this point you can use the circuit breaker functionality in your code.

+
+
+

To perform a POST call you should use your circuit breaker instance as follows:

+
+
+
+
await circuitBreakerClient.PostAsync<YourOutputClass>(NameOftheService, EndPoint, InputData, MediaType.ApplicationJson).ConfigureAwait(false);
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Property|Description +|YourOutputClass| The type of the class that you are expecting to retrieve from the POST call +|NameOftheService| The key name of the endpoint provided in the appsettings.json file at Endpoints[] node +|EndPoint|Part of the url to use with the base address. PE: /validate +|InputData| Your instance of the class with values that you want to use in the POST call +|MediaType.ApplicationJson| The media type flag for the POST call +|== == == == == == == == == == == =

+
+
+
+
+

devon4Net.Domain.UnitOfWork

+
+ +
+
+
+

== Description

+
+
+

Unit of work implementation for devon4net solution. This unit of work provides the different methods to access the data layer with an atomic context. Sync and Async repository operations are provided. Customized Eager Loading method also provided for custom entity properties.

+
+
+ + + + + +
+ + +This component will move on next releases to Infrastructure instead of being part of Domain components +
+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.UnitOfWork
    +
    +
    +
  • +
  • +

    Adding the database connection information:

    +
  • +
+
+
+

Add the database connection on the SetupDatabase method at Startup.cs

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+
+

== Notes

+
+
+

Now you can use the unit of work via dependency injection on your classes:

+
+
+
+UOW `DI` Sample +
+
Figure 1. Use of Unit of work via dependency injection
+
+
+

As you can see in the image, you can use Unit Of Work class with your defined ModelContext classes.

+
+
+

Predicate expression builder

+
+
+
    +
  • +

    Use this expression builder to generate lambda expressions dynamically.

    +
    +
    +
    var predicate =  PredicateBuilder.True<T>();
    +
    +
    +
  • +
+
+
+

Where T is a class. At this moment, you can build your expression and apply it to obtain your results in a efficient way and not retrieving data each time you apply an expression.

+
+
+
    +
  • +

    Example from My Thai Star .Net Core implementation:

    +
  • +
+
+
+
+
public async Task<PaginationResult<Dish>> GetpagedDishListFromFilter(int currentpage, int pageSize, bool isFav, decimal maxPrice, int minLikes, string searchBy, IList<long> categoryIdList, long userId)
+{
+    var includeList = new List<string>{"DishCategory","DishCategory.IdCategoryNavigation", "DishIngredient","DishIngredient.IdIngredientNavigation","IdImageNavigation"};
+
+    //Here we create our predicate builder
+    var dishPredicate = PredicateBuilder.True<Dish>();
+
+
+    //Now we start applying the different criteria:
+    if (!string.IsNullOrEmpty(searchBy))
+    {
+        var criteria = searchBy.ToLower();
+        dishPredicate = dishPredicate.And(d => d.Name.ToLower().Contains(criteria) || d.Description.ToLower().Contains(criteria));
+    }
+
+    if (maxPrice > 0) dishPredicate = dishPredicate.And(d=>d.Price<=maxPrice);
+
+    if (categoryIdList.Any())
+    {
+        dishPredicate = dishPredicate.And(r => r.DishCategory.Any(a => categoryIdList.Contains(a.IdCategory)));
+    }
+
+    if (isFav && userId >= 0)
+    {
+        var favourites = await UoW.Repository<UserFavourite>().GetAllAsync(w=>w.IdUser ==  userId);
+        var dishes = favourites.Select(s => s.IdDish);
+        dishPredicate = dishPredicate.And(r=> dishes.Contains(r.Id));
+    }
+
+    // Now we can use the predicate to retrieve data from database with just one call
+    return await UoW.Repository<Dish>().GetAllIncludePagedAsync(currentpage, pageSize, includeList, dishPredicate);
+
+}
+
+
+
+
+
+

devon4Net.Infrastructure.Extensions

+
+ +
+
+
+

== Description

+
+
+

Miscellaneous extension library which contains : +- Predicate expression builder +- DateTime formatter +- HttpClient +- HttpContext (Middleware support)

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Infrastructure.Extensions
    +
    +
    +
  • +
+
+
+

HttpContext

+
+
+
    +
  • +

    TryAddHeader method is used on devon4Net.Infrastructure.Middleware component to add automatically response header options such authorization.

    +
  • +
+
+
+
+
+

devon4Net.Infrastructure.JWT

+
+ +
+
+
+

== Description

+
+
+
+
+

JSON Web Token (JWT) is an open standard (RFC 7519) that defines a compact and self-contained way for securely transmitting information between parties as a JSON object. This information can be verified and trusted because it is digitally signed. JWTs can be signed using a secret (with the` HMAC` algorithm) or a public/private key pair using RSA or ECDSA.

+
+
+
+— What is JSON Web Token?
+https://jwt.io/introduction/ +
+
+
+
    +
  • +

    devon4Net component to manage JWT standard to provide security to .Net API applications.

    +
  • +
+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.JWT
    +
    +
    +
  • +
+
+
+
+
+

== Default configuration values

+
+
+
+
"JWT": {
+    "Audience": "devon4Net",
+    "Issuer": "devon4Net",
+    "TokenExpirationTime": 60,
+    "ValidateIssuerSigningKey": true,
+    "ValidateLifetime": true,
+    "ClockSkew": 5,
+    "Security": {
+      "SecretKeyLengthAlgorithm": "",
+      "SecretKeyEncryptionAlgorithm": "",
+      "SecretKey": "",
+      "Certificate": "",
+      "CertificatePassword": "",
+      "CertificateEncryptionAlgorithm": ""
+    }
+  }
+
+
+
+
    +
  • +

    ClockSkew indicates the token expiration time in minutes

    +
  • +
  • +

    Certificate you can specify the name of your certificate (if it is on the same path) or the full path of the certificate. If the certificate does not exists an exception will be raised.

    +
  • +
  • +

    SecretKeyLengthAlgorithm, SecretKeyEncryptionAlgorithm and CertificateEncryptionAlgorithm supported algorithms are:

    +
  • +
+
+
+

|== == == == == == == == == == == = +|Algorithm|Description +|Aes128Encryption|"http://www.w3.org/2001/04/xmlenc#aes128-cbc" +|Aes192Encryption|"http://www.w3.org/2001/04/xmlenc#aes192-cbc" +|Aes256Encryption|"http://www.w3.org/2001/04/xmlenc#aes256-cbc" +|DesEncryption|"http://www.w3.org/2001/04/xmlenc#des-cbc" +|Aes128KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes128" +|Aes192KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes192" +|Aes256KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes256" +|RsaV15KeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-1_5" +|Ripemd160Digest|"http://www.w3.org/2001/04/xmlenc#ripemd160" +|RsaOaepKeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-oaep" +|Aes128KW|"A128KW" +|Aes256KW|"A256KW" +|RsaPKCS1|"RSA1_5" +|RsaOAEP|"RSA-OAEP" +|ExclusiveC14n|"http://www.w3.org/2001/10/xml-exc-c14n#" +|ExclusiveC14nWithComments|"http://www.w3.org/2001/10/xml-exc-c14n#WithComments" +|EnvelopedSignature|"http://www.w3.org/2000/09/xmldsig#enveloped-signature" +|Sha256Digest|"http://www.w3.org/2001/04/xmlenc#sha256" +|Sha384Digest|"http://www.w3.org/2001/04/xmldsig-more#sha384" +|Sha512Digest|"http://www.w3.org/2001/04/xmlenc#sha512" +|Sha256|"SHA256" +|Sha384|"SHA384" +|Sha512|"SHA512" +|EcdsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" +|EcdsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384" +|EcdsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512" +|HmacSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha256" +|HmacSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha384" +|HmacSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha512" +|RsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" +|RsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" +|RsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" +|RsaSsaPssSha256Signature|"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1" +|RsaSsaPssSha384Signature|"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1" +|RsaSsaPssSha512Signature|"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1" +|EcdsaSha256|"ES256" +|EcdsaSha384|"ES384" +|EcdsaSha512|"ES512" +|HmacSha256|"HS256" +|HmacSha384|"HS384" +|HmacSha512|"HS512" +|None|"none" +|RsaSha256|"RS256" +|RsaSha384|"RS384" +|RsaSha512|"RS512" +|RsaSsaPssSha256|"PS256" +|RsaSsaPssSha384|"PS384" +|RsaSsaPssSha512|"PS512" +|Aes128CbcHmacSha256|"A128CBC-HS256" +|Aes192CbcHmacSha384|"A192CBC-HS384" +|Aes256CbcHmacSha512|"A256CBC-HS512" +|== == == == == == == == == == == =

+
+
+ + + + + +
+ + +Please check Microsoft documentation to get the lastest updates on supported encryption algorithms +
+
+
+
    +
  • +

    Add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
services.AddBusinessCommonJwtPolicy();
+
+
+
+

On

+
+
+
+
Startup.cs
+
+
+
+

or on:

+
+
+
+
devon4Net.Application.Configuration.Startup/JwtApplicationConfiguration/ConfigureJwtPolicy method.
+
+
+
+
    +
  • +

    Inside the AddBusinessCommonJwtPolicy method you can add your JWT Policy like in My Thai Star application sample:

    +
  • +
+
+
+
+
 services.ConfigureJwtAddPolicy("MTSWaiterPolicy", "role", "waiter");
+
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    The certificate will be used to generate the key to encrypt the json web token.

    +
  • +
+
+
+
+
+

devon4Net.Infrastructure.Middleware

+
+ +
+
+
+

== Description

+
+
+
    +
  • +

    devon4Net support for middleware classes.

    +
  • +
  • +

    In ASP.NET Core, middleware classes can handle an HTTP request or response. Middleware can either:

    +
    +
      +
    • +

      Handle an incoming HTTP request by generating an HTTP response.

      +
    • +
    • +

      Process an incoming HTTP request, modify it, and pass it on to another piece of middleware.

      +
    • +
    • +

      Process an outgoing HTTP response, modify it, and pass it on to either another piece of middleware, or the ASP.NET Core web server.

      +
    • +
    +
    +
  • +
  • +

    devon4Net supports the following automatic response headers:

    +
    +
      +
    • +

      AccessControlExposeHeader

      +
    • +
    • +

      StrictTransportSecurityHeader

      +
    • +
    • +

      XFrameOptionsHeader

      +
    • +
    • +

      XssProtectionHeader

      +
    • +
    • +

      XContentTypeOptionsHeader

      +
    • +
    • +

      ContentSecurityPolicyHeader

      +
    • +
    • +

      PermittedCrossDomainPoliciesHeader

      +
    • +
    • +

      ReferrerPolicyHeader:toc: macro

      +
    • +
    +
    +
  • +
+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Infrastructure.Middleware
    +
    +
    +
  • +
  • +

    You can configure your Middleware configuration on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"Middleware": {
+    "Headers": {
+      "AccessControlExposeHeader": "Authorization",
+      "StrictTransportSecurityHeader": "",
+      "XFrameOptionsHeader": "DENY",
+      "XssProtectionHeader": "1;mode=block",
+      "XContentTypeOptionsHeader": "nosniff",
+      "ContentSecurityPolicyHeader": "",
+      "PermittedCrossDomainPoliciesHeader": "",
+      "ReferrerPolicyHeader": ""
+    }
+}
+
+
+
+
    +
  • +

    On the above sample, the server application will add to response header the AccessControlExposeHeader, XFrameOptionsHeader, XssProtectionHeader and XContentTypeOptionsHeader headers.

    +
  • +
  • +

    If the header response type does not have a value, it will not be added to the response headers.

    +
  • +
+
+
+
+
+

devon4Net.Infrastructure.Swagger

+
+ +
+
+
+

== Description

+
+
+
    +
  • +

    devon4net Swagger abstraction to provide full externalized easy configuration.

    +
  • +
  • +

    Swagger offers the easiest to use tools to take full advantage of all the capabilities of the OpenAPI Specification (OAS).

    +
  • +
+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Swagger
    +
    +
    +
  • +
  • +

    You can configure your Swagger configuration on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"Swagger": {
+    "Version": "v1",
+    "Title": "devon4net API",
+    "Description": "devon4net API Contract",
+    "Terms": "https://www.devonfw.com/terms-of-use/",
+    "Contact": {
+      "Name": "devonfw",
+      "Email": "sample@mail.com",
+      "Url": "https://www.devonfw.com"
+    },
+    "License": {
+      "Name": "devonfw - Terms of Use",
+      "Url": "https://www.devonfw.com/terms-of-use/"
+    },
+    "Endpoint": {
+      "Name": "V1 Docs",
+      "Url": "/swagger/v1/swagger.json",
+      "UrlUi": "swagger",
+      "RouteTemplate": "swagger/v1/{documentName}/swagger.json"
+    }
+  }
+
+
+
+
    +
  • +

    Add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
services.ConfigureSwaggerService();
+
+
+
+

On

+
+
+
+
Startup.cs
+
+
+
+
    +
  • +

    Also add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
app.ConfigureSwaggerApplication();
+
+
+
+

On

+
+
+
+
Startup.cs/Configure(IApplicationBuilder app, IHostingEnvironment env)
+
+
+
+
    +
  • +

    Ensure your API actions and non-route parameters are decorated with explicit "Http" and "From" bindings.

    +
  • +
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    To access to swagger UI launch your API project and type in your html browser the url http://localhost:yourPort/swagger.

    +
  • +
  • +

    In order to generate the documentation annotate your actions with summary, remarks and response tags:

    +
  • +
+
+
+
+
/// <summary>
+/// Method to make a reservation with potential guests. The method returns the reservation token with the format: {(CB_|GB_)}{now.Year}{now.Month:00}{now.Day:00}{_}{MD5({Host/Guest-email}{now.Year}{now.Month:00}{now.Day:00}{now.Hour:00}{now.Minute:00}{now.Second:00})}
+/// </summary>
+/// <param name="bookingDto"></param>
+/// <response code="201">Ok.</response>
+/// <response code="400">Bad request. Parser data error.</response>
+/// <response code="401">Unauthorized. Authentication fail.</response>
+/// <response code="403">Forbidden. Authorization error.</response>
+/// <response code="500">Internal Server Error. The search process ended with error.</response>
+[HttpPost]
+[HttpOptions]
+[Route("/mythaistar/services/rest/bookingmanagement/v1/booking")]
+[AllowAnonymous]
+[EnableCors("CorsPolicy")]
+public async Task<IActionResult> Booking([FromBody]BookingDto bookingDto)
+{
+    try
+    {
+
+    ...
+
+
+
+
    +
  • +

    Ensure that your project has the generateXMLdocumentationfile check active on build menu:

    +
  • +
+
+
+
+Generate documentation XML check +
+
Figure 2. Swagger documentation
+
+
+
    +
  • +

    Ensure that your XML files has the attribute copy always to true:

    +
  • +
+
+
+
+Generate documentation XML check +
+
Figure 3. Swagger documentation
+
+
+
+
+

devon4Net.Infrastructure.Test

+
+ +
+
+
+

== Description

+
+
+

devon4Net Base classes to create unit tests and integration tests with Moq and xUnit.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Load the template: +> dotnet new -i devon4Net.Test.Template +> dotnet new devon4NetTest

    +
  • +
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    At this point you can find this classes:

    +
    +
      +
    • +

      BaseManagementTest

      +
    • +
    • +

      DatabaseManagementTest<T> (Where T is a devon4NetBaseContext class)

      +
    • +
    +
    +
  • +
  • +

    For unit testing, inherit a class from BaseManagementTest.

    +
  • +
  • +

    For integration tests, inherit a class from DatabaseManagementTest.

    +
  • +
  • +

    The recommended databases in integration test are in memory database or SQlite database.

    +
  • +
  • +

    Please check My thai Star test project.

    +
  • +
+
+
+
+
+

Deperecated packages

+
+ +
+
+
+

devon4Net.Domain.Context

+
+ +
+
+
+

== Description

+
+
+

devon4Net.Domain.Context contains the extended class devon4NetBaseContext in order to make easier the process of having a model context configured against different database engines. This configuration allows an easier testing configuration against local and in memory databases.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.Context
    +
    +
    +
  • +
  • +

    Add to appsettings.{environment}.json file your database connections:

    +
  • +
+
+
+
+
"ConnectionStrings":
+{
+"DefaultConnection":
+"Server=localhost;Database=MyThaiStar;User Id=sa;Password=sa;MultipleActiveResultSets=True;",
+
+"AuthConnection":
+"Server=(localdb)\\mssqllocaldb;Database=aspnet-DualAuthCore-5E206A0B-D4DA-4E71-92D3-87FD6B120C5E;Trusted_Connection=True;MultipleActiveResultSets=true",
+
+"SqliteConnection": "Data Source=c:\\tmp\\membership.db;"
+}
+
+
+
+
    +
  • +

    On Startup.cs :

    +
  • +
+
+
+
+
void ConfigureServices(IServiceCollection services)
+
+
+
+
    +
  • +

    Add your database connections defined on previous point:

    +
  • +
+
+
+
+
services.ConfigureDataBase(
+new Dictionary<string, string> {
+{ConfigurationConst.DefaultConnection, Configuration.GetConnectionString(ConfigurationConst.DefaultConnection) }});
+
+
+
+
    +
  • +

    On devon4Net.Application.Configuration.Startup/DataBaseConfiguration/ConfigureDataBase configure your connections.

    +
  • +
+
+
+
+
+

devon4Net.Infrastructure.ApplicationUser

+
+ +
+
+
+

== Description

+
+
+

devon4Net Application user classes to implement basic Microsoft’s basic authentication in order to be used on authentication methodologies such Jason Web Token (JWT).

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.ApplicationUser
    +
    +
    +
  • +
  • +

    Add the database connection string for user management on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"ConnectionStrings":
+{
+"AuthConnection":
+"Server=(localdb)\\mssqllocaldb;Database=aspnet-DualAuthCore-5E206A0B-D4DA-4E71-92D3-87FD6B120C5E;Trusted_Connection=True;MultipleActiveResultSets=true"
+}
+
+
+
+
    +
  • +

    Add the following line of code

    +
  • +
+
+
+
+
services.AddApplicationUserDependencyInjection();
+
+
+
+

On

+
+
+
+
Startup.cs/ConfigureServices(IServiceCollection services)
+
+
+
+

or on:

+
+
+
+
devon4Net.Application.Configuration.Startup/DependencyInjectionConfiguration/ConfigureDependencyInjectionService method.
+
+
+
+
    +
  • +

    Add the data seeder on Configure method on start.cs class:

    +
  • +
+
+
+
+
public void Configure(IApplicationBuilder app, IHostingEnvironment env, DataSeeder seeder)
+{
+    ...
+
+    app.UseAuthentication();
+    seeder.SeedAsync().Wait();
+
+    ...
+}
+
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    You can use the following methods to set up the database configuration:

    +
  • +
+
+
+
+
public static void AddApplicationUserDbContextInMemoryService(this IServiceCollection services)
+
+public static void AddApplicationUserDbContextSQliteService(this IServiceCollection services, string connectionString)
+
+public static void AddApplicationUserDbContextSQlServerService(this IServiceCollection services, string connectionString)
+
+
+
+
    +
  • +

    The method AddApplicationUserDbContextInMemoryService uses the AuthContext connection string name to set up the database.

    +
  • +
  • +

    This component is used with the components devon4Net.Infrastructure.JWT and devon4Net.Infrastructure.JWT.MVC.

    +
  • +
+
+
+
+
+

devon4Net.Infrastructure.Communication

+
+ +
+
+
+

== Description

+
+
+

Basic client classes to invoke` GET`/POST methods asynchronously. This component has the minimal classes to send basic data. For more complex operations please use ASP4Net.Infrastructure.Extensions.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Communication
    +
    +
    +
  • +
  • +

    Create an instance of RestManagementService class.

    +
  • +
  • +

    Use next methods to use GET/POST basic options:

    +
  • +
+
+
+
+
public Task<string> CallGetMethod(string url);
+public Task<Stream> CallGetMethodAsStream(string url);
+public Task<string> CallPostMethod<T>(string url, T dataToSend);
+public Task<string> CallPutMethod<T>(string url, T dataToSend);
+
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    Example:

    +
  • +
+
+
+
+
private async Task RestManagementServiceSample(EmailDto dataToSend)
+{
+    var url = Configuration["EmailServiceUrl"];
+    var restManagementService = new RestManagementService();
+    await restManagementService.CallPostMethod(url, dataToSend);
+}
+
+
+
+
+
+

devon4Net.Infrastructure.JWT.MVC

+
+ +
+
+
+

== Description

+
+
+
    +
  • +

    devon4Net Extended controller to interact with JWT features

    +
  • +
+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Extend your _ Microsoft.AspNetCore.Mvc.Controller_ class with devon4NetJWTController class:

    +
  • +
+
+
+
+
public class LoginController : devon4NetJWTController
+{
+    private readonly ILoginService _loginService;
+
+    public LoginController(ILoginService loginService,  SignInManager<ApplicationUser>  signInManager, UserManager<ApplicationUser> userManager, ILogger<LoginController> logger, IMapper mapper) : base(logger,mapper)
+    {
+        _loginService = loginService;
+    }
+
+    ....
+
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    In order to generate a JWT, you should implement the JWT generation on user login. For example, in My Thai Star is created as follows:

    +
  • +
+
+
+
+
public async Task<IActionResult> Login([FromBody]LoginDto loginDto)
+{
+    try
+    {
+        if (loginDto ==  null) return Ok();
+        var logged = await _loginService.LoginAsync(loginDto.UserName, loginDto.Password);
+
+        if (logged)
+        {
+            var user = await _loginService.GetUserByUserNameAsync(loginDto.UserName);
+
+            var encodedJwt = new JwtClientToken().CreateClientToken(_loginService.GetUserClaimsAsync(user));
+
+            Response.Headers.Add("Access-Control-Expose-Headers", "Authorization");
+
+            Response.Headers.Add("Authorization", $"{JwtBearerDefaults.AuthenticationScheme} {encodedJwt}");
+
+            return Ok(encodedJwt);
+        }
+        else
+        {
+            Response.Headers.Clear();
+            return StatusCode((int)HttpStatusCode.Unauthorized, "Login Error");
+        }
+
+    }
+    catch (Exception ex)
+    {
+        return StatusCode((int)HttpStatusCode.InternalServerError, $"{ex.Message} : {ex.InnerException}");
+    }
+}
+
+
+
+
    +
  • +

    In My Thai Star the JWT will contain the user information such id, roles…​

    +
  • +
  • +

    Once you extend your controller with devon4NetJWTController you will have available these methods to simplify user management:

    +
  • +
+
+
+
+
    public interface Idevon4NetJWTController
+    {
+        // Gets the current user
+        JwtSecurityToken GetCurrentUser();
+
+        // Gets an specific assigned claim of current user
+        Claim GetUserClaim(string claimName, JwtSecurityToken jwtUser = null);
+
+        // Gets all the assigned claims of current user
+        IEnumerable<Claim> GetUserClaims(JwtSecurityToken jwtUser = null);
+    }
+
+
+
+
+
+

devon4Net.Infrastructure.MVC

+
+ +
+
+
+

== Description

+
+
+

Common classes to extend controller functionality on API. Also provides support for paged results in devon4Net applications and automapper injected class.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.MVC
    +
    +
    +
  • +
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    The generic class ResultObjectDto<T> provides a typed result object with pagination.

    +
  • +
  • +

    The extended class provides the following methods:

    +
  • +
+
+
+
+
        ResultObjectDto<T> GenerateResultDto<T>(int? page, int? size, int? total);
+        ResultObjectDto<T> GenerateResultDto<T>(List<T> result, int? page = null, int? size = null);
+
+
+
+
    +
  • +

    GenerateResultDto provides typed ResultObjectDto object or a list of typed ResultObjectDto object. The aim of this methods is to provide a clean management for result objects and not repeating code through the different controller classes.

    +
  • +
  • +

    The following sample from My Thai Star shows how to use it:

    +
  • +
+
+
+
+
public async Task<IActionResult> Search([FromBody] FilterDtoSearchObject filterDto)
+{
+    if (filterDto ==  null) filterDto = new FilterDtoSearchObject();
+
+    try
+    {
+        var dishList = await _dishService.GetDishListFromFilter(false, filterDto.GetMaxPrice(), filterDto.GetMinLikes(), filterDto.GetSearchBy(),filterDto.GetCategories(), -1);
+
+
+        return new OkObjectResult(GenerateResultDto(dishList).ToJson());
+    }
+    catch (Exception ex)
+    {
+        return StatusCode((int)HttpStatusCode.InternalServerError, $"{ex.Message} : {ex.InnerException}");
+    }
+}
+
+
+
+
+
+

devon4Net.Infrastructure.AOP

+
+ +
+
+
+

== Description

+
+
+

Simple AOP Exception handler for .Net Controller classes integrated with Serilog.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.AOP
    +
    +
    +
  • +
+
+
+

Add this line of code on ConfigureServices method on Startup.cs

+
+
+
+
services.AddAopAttributeService();
+
+
+
+
+
+

== Notes

+
+
+

Now automatically your exposed API methods exposed on controller classes will be tracked on the methods:

+
+
+
    +
  • +

    OnActionExecuting

    +
  • +
  • +

    OnActionExecuted

    +
  • +
  • +

    OnResultExecuting

    +
  • +
  • +

    OnResultExecuted

    +
  • +
+
+
+

If an exception occurs, a message will be displayed on log with the stack trace.

+
+
+
+
+

devon4Net.Infrastructure.Cors

+
+ +
+
+
+

== Description

+
+
+

Enables CORS configuration for devon4Net application. Multiple domains can be configured from configuration. Mandatory to web clients (p.e. Angular) to prevent making AJAX requests to another domain.

+
+
+

Cross-Origin Resource Sharing (CORS) is a mechanism that uses additional HTTP headers to tell a browser to let a web application running at one origin (domain) have permission to access selected resources from a server at a different origin. A web application makes a cross-origin HTTP request when it requests a resource that has a different origin (domain, protocol, and port) than its own origin.

+
+
+

Please refer to this link to get more information about CORS and .Net core.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Cors
    +
    +
    +
  • +
  • +

    You can configure your Cors configuration on appsettings.{environment}.json:

    +
    +
    +
    `CorsPolicy`: indicates the name of the policy. You can use this name to add security headers on your API exposed methods.
    +
    +
    +
    +
    +
    Origins: The allowed domains
    +
    +
    +
    +
    +
    Headers: The allowed headers such accept,content-type,origin,x-custom-header
    +
    +
    +
  • +
  • +

    If you specify the cors configuration as empty array, a default cors-policy will be used with all origins enabled:

    +
  • +
+
+
+
+
  "Cors": []
+
+
+
+
    +
  • +

    On the other hand, you can specify different Cors policies in your solution as follows:

    +
  • +
+
+
+
+
"Cors": []
+[
+  {
+    "CorsPolicy": "CorsPolicy1",
+    "Origins": "http:example.com,http:www.contoso.com",
+    "Headers": "accept,content-type,origin,x-custom-header",
+    "Methods": "GET,POST,HEAD",
+    "AllowCredentials": true
+  },
+  {
+    "CorsPolicy": "CorsPolicy2",
+    "Origins": "http:example.com,http:www.contoso.com",
+    "Headers": "accept,content-type,origin,x-custom-header",
+    "Methods": "GET,POST,HEAD",
+    "AllowCredentials": true
+  }
+]
+
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    To use CORS in your API methods, use the next notation:

    +
  • +
+
+
+
+
[EnableCors("YourCorsPolicy")]
+public IActionResult Index() {
+    return View();
+}
+
+
+
+
    +
  • +

    if you want to disable the CORS check use the following annotation:

    +
  • +
+
+
+
+
[DisableCors]
+public IActionResult Index() {
+    return View();
+}
+
+
+
+
+
+

Required software

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/samples.html b/docs/devon4net/1.0/samples.html new file mode 100644 index 00000000..3de09ec7 --- /dev/null +++ b/docs/devon4net/1.0/samples.html @@ -0,0 +1,866 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Samples

+
+ +
+
+
+

My Thai Star Restaurant

+
+ +
+
+ +
+
+
+
+
+

Angular requirements

+
+
+ +
+
+
+
+

Angular client

+
+
+
    +
  1. +

    Install Node.js LTS version

    +
  2. +
  3. +

    Install Angular CLI from command line:

    +
    +
      +
    • +

      npm install -g @angular/cli

      +
    • +
    +
    +
  4. +
  5. +

    Install Yarn

    +
  6. +
  7. +

    Go to Angular client from command line

    +
  8. +
  9. +

    Execute : yarn install

    +
  10. +
  11. +

    Launch the app from command line: ng serve and check http://localhost:4200

    +
  12. +
  13. +

    You are ready

    +
  14. +
+
+
+
+
+

.Net Core server

+
+ +
+
+
+

== Basic architecture details

+
+
+

Following the devonfw conventions the .Net Core 2.0 My Thai Star backend is going to be developed dividing the application in Components and using a n-layer architecture.

+
+
+
+project modules +
+
+
+
+
+

== Components

+
+
+

The application is going to be divided in different components to encapsulate the different domains of the application functionalities.

+
+
+
+mtsn components +
+
+
+

As main components we will find:

+
+
+
    +
  • +

    _BookingService: Manages the bookings part of the application. With this component the users (anonymous/logged in) can create new bookings or cancel an existing booking. The users with waiter role can see all scheduled bookings.

    +
  • +
+
+
+

-OrderService: This component handles the process to order dishes (related to bookings). A user (as a host or as a guest) can create orders (that contain dishes) or cancel an existing one. The users with waiter role can see all ordered orders.

+
+
+
    +
  • +

    DishService: This component groups the logic related to the menu (dishes) view. Its main feature is to provide the client with the data of the available dishes but also can be used by other components (Ordermanagement) as a data provider in some processes.

    +
  • +
  • +

    UserService: Takes care of the User Profile management, allowing to create and update the data profiles.

    +
  • +
+
+
+

As common components (that don’t exactly represent an application’s area but provide functionalities that can be used by the main components):

+
+
+
    +
  • +

    Mailservice: with this service we will provide the functionality for sending email notifications. This is a shared service between different app components such as bookingmanagement or ordercomponent.

    +
  • +
+
+
+

Other components:

+
+
+
    +
  • +

    Security (will manage the access to the private part of the application using a jwt implementation).

    +
  • +
  • +

    Twitter integration: planned as a Microservice will provide the twitter integration needed for some specific functionalities of the application.

    +
  • +
+
+
+
+
+

Layers

+
+ +
+
+
+

== Introduction

+
+
+

The .Net Core backend for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    devon4NET as the .Net Core framework

    +
  • +
  • +

    VSCode as the Development environment

    +
  • +
  • +

    TOBAGO as code generation tool

    +
  • +
+
+
+
+
+

== Application layer

+
+
+

This layer will expose the REST api to exchange information with the client applications.

+
+
+

The application will expose the services on port 8081 and it can be launched as a self host console application (microservice approach) and as a Web Api application hosted on IIS/IIS Express.

+
+
+
+
+

== Business layer

+
+
+

This layer will define the controllers which will be used on the application layer to expose the different services. Also, will define the swagger contract making use of summary comments and framework attributes.

+
+
+

This layer also includes the object response classes in order to interact with external clients.

+
+
+
+
+

== Service layer

+
+
+

The layer in charge of hosting the business logic of the application. Also orchestrates the object conversion between object response and entity objects defined in Data layer.

+
+
+
+
+

== Data layer

+
+
+

The layer to communicate with the data base.

+
+
+

Data layer makes use of Entity Framework. +The Database context is defined on DataAccessLayer assembly (ModelContext).

+
+
+

This layer makes use of the Repository pattern and Unit of work in order to encapsulate the complexity. Making use of this combined patterns we ensure an organized and easy work model.

+
+
+

As in the previous layers, the data access layer will have both interface and implementation tiers. However, in this case, the implementation will be slightly different due to the use of generics.

+
+
+
+
+

== Cross-Cutting concerns

+
+
+

the layer to make use of transversal components such JWT and mailing.

+
+
+
+
+

Jwt basics

+
+
+
    +
  • +

    A user will provide a username / password combination to our auth server.

    +
  • +
  • +

    The auth server will try to identify the user and, if the credentials match, will issue a token.

    +
  • +
  • +

    The user will send the token as the Authorization header to access resources on server protected by JWT Authentication.

    +
  • +
+
+
+
+jwt schema +
+
+
+
+
+

Jwt implementation details

+
+
+

The Json Web Token pattern will be implemented based on the jwt on .net core framework that is provided by default in the devon4Net projects.

+
+
+
+
+

Authentication

+
+
+

Based on Microsoft approach, we will implement a class to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

On devon4Net.Infrastructure.JWT assembly is defined a subset of Microsoft’s authorization schema Database. It is started up the first time the application launches.

+
+
+

You can read more about _Authorization on:

+
+ + +
+
+
+

Dependency injection

+
+
+

As it is explained in the Microsoft documentation we are going to implement the dependency injection pattern basing our solution on .Net Core.

+
+
+
+dependency injection +
+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different tiers: interface and implementation. The interface tier will store the interface with the methods definition and inside the implementation we will store the class that implements the interface.

    +
  • +
+
+
+
+
+

Layer communication method

+
+
+

The connection between layers, to access to the functionalities of each one, will be solved using the dependency injection.

+
+
+
+layer impl +
+
+
+

Connection BookingService - Logic

+
+
+
+
 public class BookingService : EntityService<Booking>, IBookingService
+    {
+        private readonly IBookingRepository _bookingRepository;
+        private readonly IRepository<Order> _orderRepository;
+        private readonly IRepository<InvitedGuest> _invitedGuestRepository;
+        private readonly IOrderLineRepository _orderLineRepository;
+        private readonly IUnitOfWork _unitOfWork;
+
+        public BookingService(IUnitOfWork unitOfWork,
+            IBookingRepository repository,
+            IRepository<Order> orderRepository,
+            IRepository<InvitedGuest> invitedGuestRepository,
+            IOrderLineRepository orderLineRepository) : base(unitOfWork, repository)
+        {
+            _unitOfWork = unitOfWork;
+            _bookingRepository = repository;
+            _orderRepository = orderRepository;
+            _invitedGuestRepository = invitedGuestRepository;
+            _orderLineRepository = orderLineRepository;
+        }
+}
+
+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

Following the [naming conventions] proposed for devon4Net applications we will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+
+
+

Api Exposed

+
+
+

The devon4Net.Business.Controller assembly in the business layer of a component will store the definition of the service by a interface. In this definition of the service we will set-up the endpoints of the service, the type of data expected and returned, the HTTP method for each endpoint of the service and other configurations if needed.

+
+
+
+
        /// <summary>
+        /// Method to make a reservation with potential guests. The method returns the reservation token with the format: {(CB_|GB_)}{now.Year}{now.Month:00}{now.Day:00}{_}{MD5({Host/Guest-email}{now.Year}{now.Month:00}{now.Day:00}{now.Hour:00}{now.Minute:00}{now.Second:00})}
+        /// </summary>
+
+        /// <param name="bookingView"></param>
+        /// <response code="201">Ok.</response>
+        /// <response code="400">Bad request. Parser data error.</response>
+        /// <response code="401">Unauthorized. Authentication fail.</response>
+        /// <response code="403">Forbidden. Authorization error.</response>
+        /// <response code="500">Internal Server Error. The search process ended with error.</response>
+        [HttpPost]
+        [HttpOptions]
+        [Route("/mythaistar/services/rest/bookingmanagement/v1/booking")]
+        [AllowAnonymous]
+        [EnableCors("CorsPolicy")]
+        public IActionResult BookingBooking([FromBody]BookingView bookingView)
+        {
+...
+
+
+
+

Using the summary annotations and attributes will tell to swagger the contract via the XML doc generated on compiling time. This doc will be stored in XmlDocumentation folder.

+
+
+

The Api methods will be exposed on the application layer.

+
+
+
+
+

Google Mail API Consumer

+
+ +
+

|== == == == == == == == == == == = +|Application| MyThaiStarEmailService.exe +|Config file| MyThaiStarEmailService.exe.Config +|Default port|8080 +|== == == == == == == == == == == =

+
+
+
+
+

Overview

+
+
+
    +
  1. +

    Execute MyThaiStarEmailService.exe.

    +
  2. +
  3. +

    The first time google will ask you for credentials +(just one time) in your default browser:

    +
    + +
    +
  4. +
  5. +

    Visit the url: http://localhost:8080/swagger

    +
  6. +
  7. +

    Your server is ready!

    +
  8. +
+
+
+
+GMail Service +
+
Figure 1. GMail Server Swagger contract page
+
+
+
+
+

JSON Example

+
+
+

This is the JSON example to test with swagger client. Please read the swagger documentation.

+
+
+
+
{
+   "EmailFrom":"mythaistarrestaurant@gmail.com",
+   "EmailAndTokenTo":{
+      "MD5Token1":" Email_Here!@gmail.com",
+      "MD5Token2":" Email_Here!@gmail.com"
+   },
+   "EmailType":0,
+   "DetailMenu":[
+      "Thai Spicy Basil Fried Rice x2",
+      "Thai green chicken curry x2"
+   ],
+   "BookingDate":"2017-05-31T12:53:39.7864723+02:00",
+   "Assistants":2,
+   "BookingToken":"MD5Booking",
+   "Price":20.0,
+   "ButtonActionList":{
+      "http://accept.url":"Accept",
+      "http://cancel.url":"Cancel"
+   },
+   "Host":{
+      " Email_Here!@gmail.com":"José Manuel"
+   }
+}
+
+
+
+
+
+

Configure the service port

+
+
+

If you want to change the default port, please edit the config file and +change the next entry in appSettings node:

+
+
+
+
<appSettings>
+   <add key="LocalListenPort" value="8080" />
+</appSettings>
+
+
+
+
+
+ + +
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/templates.html b/docs/devon4net/1.0/templates.html new file mode 100644 index 00000000..2e58f38d --- /dev/null +++ b/docs/devon4net/1.0/templates.html @@ -0,0 +1,416 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Templates

+
+
+

Overview

+
+
+

The .Net Core and .Net Framework given templates allows to start coding an application with the following functionality ready to use:

+
+
+

Please refer to User guide in order to start developing.

+
+
+
+
+

Net Core 3.0

+
+
+

The .Net Core 3.0 template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2019.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+

Using devon4Net template

+ +
+
+

Option 1

+
+
    +
  1. +

    Open your favourite terminal (Win/Linux/iOS)

    +
  2. +
  3. +

    Go to future project’s path

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template

    +
  6. +
  7. +

    Type dotnet new Devon4NetAPI

    +
  8. +
  9. +

    Go to project’s path

    +
  10. +
  11. +

    You are ready to start developing with devon4Net

    +
  12. +
+
+
+
+

Option 2

+
+
    +
  1. +

    Create a new dotnet API project from scratch

    +
  2. +
  3. +

    Add the NuGet package reference to your project

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template

    +
  6. +
+
+
+
+
+
+

Net Core 2.1.x

+
+
+

The .Net Core 2.1.x template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2017.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+

Using devon4Net template

+
+
    +
  1. +

    Open your favourite terminal (Win/Linux/iOS)

    +
  2. +
  3. +

    Go to future project’s path

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template::1.0.8

    +
  6. +
  7. +

    Type dotnet new Devon4NetAPI

    +
  8. +
  9. +

    Go to project’s path

    +
  10. +
  11. +

    You are ready to start developing with devon4Net

    +
  12. +
+
+
+ + + + + +
+ + +For the latest updates on references packages, please get the sources from Github +
+
+
+
+
+
+ + +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4net/1.0/userguide.html b/docs/devon4net/1.0/userguide.html new file mode 100644 index 00000000..d8616ac8 --- /dev/null +++ b/docs/devon4net/1.0/userguide.html @@ -0,0 +1,1529 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+technical architecture +
+
+
+

devon4net Guide

+
+ +
+
+
+

Introduction

+
+
+

Welcome to devon4net framework user guide. In this document you will find the information regarding how to start and deploy your project using the guidelines proposed in our solution.

+
+
+

All the guidelines shown and used in this document are a set of rules and conventions proposed and supported by Microsoft and the industry.

+
+
+
+
+

The package

+
+
+

Devon4Net package solution contains:

+
+
+

|== == == == == == == == == == == = +|File / Folder|Content +|Documentation| User documentation in HTML format +|Modules| Contains the source code of the different devon4net modules +|Samples| Different samples implemented in .NET and .NET Core. Also includes My Thai Star Devon flagship restaurant application +|Templates| Main .net Core template to start developing from scratch +|License| License agreement +|README.md| Github main page +|TERMS_OF_USE.adoc| The devon4net terms of use +|LICENSE| The devon license +|Other files| Such the code of conduct and contributing guide +|== == == == == == == == == == == =

+
+
+
+
+

Application templates

+
+
+

The application templates given in the bundle are ready to use.

+
+
+

At the moment .net Core template is supported. The template is ready to be used as a simple console Kestrel application or being deployed in a web server like IIS.

+
+
+
+
+

Samples

+
+ +
+
+
+

== My Thai Star

+
+
+

You can find My Thai Star .NET port application at Github.

+
+
+ + + + + +
+ + +As devon4net has been migrated to the latest version of .net core, the template is not finished yet. +
+
+
+
+
+

Cookbook

+
+ +
+
+
+

Data management

+
+
+

To use Entity Framework Core, install the package for the database provider(s) you want to target. This walk-through uses SQL Server.

+
+
+

For a list of available providers see Database Providers

+
+
+
    +
  • +

    Go to Tools > NuGet Package Manager > Package Manager Console

    +
  • +
  • +

    Run Install-Package Microsoft.EntityFrameworkCore.SqlServer

    +
  • +
+
+
+

We will be using some Entity Framework Tools to create a model from the database. So we will install the tools package as well:

+
+
+
    +
  • +

    Run Install-Package Microsoft.EntityFrameworkCore.Tools

    +
  • +
+
+
+

We will be using some ASP.NET Core Scaffolding tools to create controllers and views later on. So we will install this design package as well:

+
+
+
    +
  • +

    Run Install-Package Microsoft.VisualStudio.Web.CodeGeneration.Design

    +
  • +
+
+
+
+
+

== Entity Framework Code first

+
+
+

In order to design your database model from scratch, we encourage to follow the Microsoft guidelines described here.

+
+
+
+
+

== Entity Framework Database first

+
+
+
    +
  • +

    Go to Tools > NuGet Package Manager > Package Manager Console

    +
  • +
  • +

    Run the following command to create a model from the existing database:

    +
  • +
+
+
+
+
Scaffold-DbContext "Your connection string to existing database" Microsoft.EntityFrameworkCore.SqlServer -OutputDir Models
+
+
+
+

The command will create the database context and the mapped entities as well inside of Models folder.

+
+
+
+
+

== Register your context with dependency injection

+
+
+

Services are registered with dependency injection during application startup.

+
+
+

In order to register your database context (or multiple database context as well) you can add the following line at ConfigureDbService method at startup.cs:

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+
+

Repositories and Services

+
+
+

Services and Repositories are an important part of devon4net proposal. To make them work properly, first of all must be declared and injected at Startup.cs at DI Region.

+
+
+

Services are declared in devon4net.Business.Common and injected in Controller classes when needed. Use services to build your application logic.

+
+
+
+technical architecture +
+
Figure 1. Screenshot of devon4net.Business.Common project in depth
+
+
+

For example, My Thai Star Booking controller constructor looks like this:

+
+
+
+
        public BookingController(IBookingService bookingService, IMapper mapper)
+        {
+            BookingService = bookingService;
+            Mapper = mapper;
+
+        }
+
+
+
+

Currently devon4net has a Unit of Work class in order to perform CRUD operations to database making use of your designed model context.

+
+
+

Repositories are declared at devon4net.Domain.UnitOfWork project and make use of Unit of Work class.

+
+
+

The common methods to perform CRUD operations (where <T> is an entity from your model) are:

+
+
+
    +
  • +

    Sync methods:

    +
  • +
+
+
+
+
IList<T> GetAll(Expression<Func<T, bool>> predicate = null);
+T Get(Expression<Func<T, bool>> predicate = null);
+IList<T> GetAllInclude(IList<string> include, Expression<Func<T, bool>> predicate = null);
+T Create(T entity);
+void Delete(T entity);
+void DeleteById(object id);
+void Delete(Expression<Func<T, bool>> where);
+void Edit(T entity);
+
+
+
+
    +
  • +

    Async methods:

    +
  • +
+
+
+
+
Task<IList<T>> GetAllAsync(Expression<Func<T, bool>> predicate = null);
+Task<T> GetAsync(Expression<Func<T, bool>> predicate = null);
+Task<IList<T>> GetAllIncludeAsync(IList<string> include, Expression<Func<T, bool>> predicate = null);
+
+
+
+

If you perform a Commit operation and an error happens, changes will be rolled back.

+
+
+
+
+

Swagger integration

+
+
+

The given templates allow you to specify the API contract through Swagger integration and the controller classes are the responsible of exposing methods making use of comments in the source code.

+
+
+

The next example shows how to comment the method with summaries in order to define the contract. Add (Triple Slash) XML Documentation To Swagger:

+
+
+
+
/// <summary>
+/// Method to get reservations
+/// </summary>
+/// <response code="201">Ok.</response>
+/// <response code="400">Bad request. Parser data error.</response>
+/// <response code="401">Unauthorized. Authentication fail.</response>
+/// <response code="403">Forbidden. Authorization error.</response>
+/// <response code="500">Internal Server Error. The search process ended with error.</response>
+[HttpPost]
+[Route("/mythaistar/services/rest/bookingmanagement/v1/booking/search")]
+//[Authorize(Policy = "MTSWaiterPolicy")]
+[AllowAnonymous]
+[EnableCors("CorsPolicy")]
+public async Task<IActionResult> BookingSearch([FromBody]BookingSearchDto bookingSearchDto)
+{
+
+
+
+

In order to be effective and make use of the comments to build the API contract, the project which contains the controller classes must generate the XML document file. To achieve this, the XML documentation file must be checked in project settings tab:

+
+
+
+technical architecture +
+
Figure 2. Project settings tab
+
+
+

We propose to generate the file under the XmlDocumentation folder. For example in devon4net.Domain.Entities project in My Thai Star .NET implementation the output folder is:

+
+
+
+
`XmlDocumentation\devon4net.Business.Common.xml`
+
+
+
+

The file devon4net.Business.Common.xml won’t appear until you build the project. Once the file is generated, please modify its properties as a resource and set it to be Copy always .

+
+
+
+technical architecture +
+
Figure 3. Swagger XML document file properties
+
+
+

Once you have this, the swagger user interface will show the method properties defined in your controller comments.

+
+
+

Making use of this technique controller are not encapsulated to the application project. Also, you can develop your controller classes in different projects obtain code reusability.

+
+
+

Swagger comment:

+
+
+

|== == == == == == == == == == == = +|Comment|Functionality +|<summary>| Will map to the operation’s summary +|<remarks>| Will map to the operation’s description (shown as "Implementation Notes" in the UI) +|<response code="###">| Specifies the different response of the target method +|<param>| Will define the parameter(s) of the target method +| +|== == == == == == == == == == == =

+
+
+

Please check Microsoft’s site regarding to summary notations.

+
+
+
+
+

Logging module

+
+
+

An important part of life software is the need of using log and traces. devon4net has a log module pre-configured to achieve this important point.

+
+
+

By default Microsoft provides a logging module on .NET Core applications. This module is open and can it can be extended. devon4net uses the Serilog implementation. This implementation provides a huge quantity information about events and traces.

+
+
+
+
+

== Log file

+
+
+

devon4net can write the log information to a simple text file. You can configure the file name and folder at appsettings.json file (LogFile attribute) at devon4net.Application.WebApi project.

+
+
+
+
+

== Database log

+
+
+

devon4net can write the log information to a SQLite database. You can configure the file name and folder at appsettings.json file (LogDatabase attribute) at devon4net.Application.WebApi project.

+
+
+

With this method you can launch queries in order to search the information you are looking for.

+
+
+
+
+

== Seq log

+
+
+

devon4net can write the log information to a Serilog server. You can configure the Serilog URL at appsettings.json file (SeqLogServerUrl attribute) at devon4net.Application.WebApi project.

+
+
+

With this method you can make queries via HTTP.

+
+
+
+serilog seq +
+
+
+

By default you can find the log information at Logs folder.

+
+
+
+
+

JWT module

+
+
+

JSON Web Tokens are an open, industry standard RFC 7519 method for representing claims securely between two parties allowing you to decode, verify and generate JWT.

+
+
+

You should use JWT for:

+
+
+
    +
  • +

    Authentication : allowing the user to access routes, services, and resources that are permitted with that token.

    +
  • +
  • +

    Information Exchange: JSON Web Tokens are a good way of securely transmitting information between parties. Additionally, as the signature is calculated using the header and the payload, you can also verify that the content.

    +
  • +
+
+
+

The JWT module is configured at Startup.cs inside devon4net.Application.WebApi project from .NET Core template. In this class you can configure the different authentication policy and JWT properties.

+
+
+

Once the user has been authenticated, the client perform the call to the backend with the attribute Bearer plus the token generated at server side.

+
+
+
+jwt +
+
+
+

On My Thai Star sample there are two predefined users: user0 and Waiter. Once they log in the application, the client (Angular/Xamarin) will manage the server call with the json web token. With this method we can manage the server authentication and authorization.

+
+
+

You can find more information about JWT at jwt.io

+
+
+
+
+

AOP module

+
+
+

AOP (Aspect Oriented Programming) tracks all information when a method is call.AOP also tracks the input and output data when a method is call.

+
+
+

By default devon4net has AOP module pre-configured and activated for controllers at Startup.cs file at devon4net.Application.WebApi:

+
+
+
+
options.Filters.Add(new Infrastructure.AOP.AopControllerAttribute(Log.Logger));
+
+options.Filters.Add(new Infrastructure.AOP.AopExceptionFilter(Log.Logger));
+
+
+
+

This configuration allows all Controller classes to be tracked. If you don’t need to track the info comment the lines written before.

+
+
+
+
+

Docker support

+
+
+

devon4net Core projects are ready to be integrated with docker.

+
+
+

My Thai Star application sample is ready to be use with linux docker containers. The Readme file explains how to launch and setup the sample application.

+
+
+
    +
  • +

    angular : Angular client to support backend. Just binaries.

    +
  • +
  • +

    database : Database scripts and .bak file

    +
  • +
  • +

    mailservice: Microservice implementation to send notifications.

    +
  • +
  • +

    netcore: Server side using .net core 2.0.x.

    +
  • +
  • +

    xamarin: Xamarin client based on Excalibur framework from The Netherlands using XForms.

    +
  • +
+
+
+

Docker configuration and docker-compose files are provided.

+
+
+
+
+

Testing with XUnit

+
+
+
+
+

xUnit.net is a free, open source, community-focused unit testing tool for the .NET Framework. Written by the original inventor of NUnit v2, xUnit.net is the latest technology for unit testing C#, F#, VB.NET and other .NET languages. xUnit.net works with ReSharper, CodeRush, TestDriven.NET and Xamarin. It is part of the .NET Foundation, and operates under their code of conduct. It is licensed under Apache 2 (an OSI approved license).

+
+
+
+— About xUnit.net
+https://xunit.github.io/#documentation +
+
+
+

Facts are tests which are always true. They test invariant conditions.

+
+
+

Theories are tests which are only true for a particular set of data.

+
+
+
+
+

The first test

+
+
+
+
using Xunit;
+
+namespace MyFirstUnitTests
+{
+    public class Class1
+    {
+        [Fact]
+        public void PassingTest()
+        {
+            Assert.Equal(4, Add(2, 2));
+        }
+
+        [Fact]
+        public void FailingTest()
+        {
+            Assert.Equal(5, Add(2, 2));
+        }
+
+        int Add(int x, int y)
+        {
+            return x + y;
+        }
+    }
+}
+
+
+
+
+
+

The first test with theory

+
+
+

Theory attribute is used to create tests with input params:

+
+
+
+
[Theory]
+[InlineData(3)]
+[InlineData(5)]
+[InlineData(6)]
+public void MyFirstTheory(int value)
+{
+    Assert.True(IsOdd(value));
+}
+
+bool IsOdd(int value)
+{
+    return value % 2 ==  1;
+}
+
+
+
+
+
+

Cheat Sheet

+
+
+

|== == == == == == == == == == == = +|Operation| Example +|Test|

+
+
+
+
public void Test()
+{
+}
+|Setup|public class TestFixture {
+public TestFixture()
+{
+
+...
+
+    }
+
+}
+|Teardown|public class TestFixture : IDisposable
+
+{
+
+public void Dispose() {
+
+ ...
+ }
+
+}
+
+
+
+

|== == == == == == == == == == == =

+
+
+
+
+

Console runner return codes

+
+
+

|== == == == == == == == == == == = +|Code| Meaning +|0|The tests ran successfully. +|1|One or more of the tests failed. +|2|The help page was shown, either because it was requested, or because the user did not provide any command line arguments. +|3| There was a problem with one of the command line options passed to the runner. +|4|There was a problem loading one or more of the test assemblies (for example, if a 64-bit only assembly is run with the 32-bit test runner). +|== == == == == == == == == == == =

+
+
+
+
+

Publishing

+
+ +
+
+
+

== Nginx

+
+
+

In order to deploy your application to a Nginx server on Linux platform you can follow the instructions from Microsoft here.

+
+
+
+
+

== IIS

+
+
+

In this point is shown the configuration options that must implement the .Net Core application.

+
+
+

Supported operating systems:

+
+
+
    +
  • +

    Windows 7 and newer

    +
  • +
  • +

    Windows Server 2008 R2 and newer*

    +
  • +
+
+
+

WebListener server will not work in a reverse-proxy configuration with IIS. You must use the Kestrel server.

+
+
+

IIS configuration

+
+
+

Enable the Web Server (IIS) role and establish role services.

+
+
+

Windows desktop operating systems

+
+
+

Navigate to Control Panel > Programs > Programs and Features > Turn Windows features on or off (left side of the screen). Open the group for Internet Information Services and Web Management Tools. Check the box for IIS Management Console. Check the box for World Wide Web Services. Accept the default features for World Wide Web Services or customize the IIS features to suit your needs.

+
+
+
+iis 1 +
+
+
+

*Conceptually, the IIS configuration described in this document also applies to hosting ASP.NET Core applications on Nano Server IIS, but refer to ASP.NET Core with IIS on Nano Server for specific instructions.

+
+
+

Windows Server operating systems +For server operating systems, use the Add Roles and Features wizard via the Manage menu or the link in Server Manager. On the Server Roles step, check the box for Web Server (IIS).

+
+
+
+iis 2 +
+
+
+

On the Role services step, select the IIS role services you desire or accept the default role services provided.

+
+
+
+iis 3 +
+
+
+

Proceed through the Confirmation step to install the web server role and services. A server/IIS restart is not required after installing the Web Server (IIS) role.

+
+
+

Install the .NET Core Windows Server Hosting bundle

+
+
+
    +
  1. +

    Install the .NET Core Windows Server Hosting bundle on the hosting system. The bundle will install the .NET Core Runtime, .NET Core Library, and the ASP.NET Core Module. The module creates the reverse-proxy between IIS and the Kestrel server. Note: If the system doesn’t have an Internet connection, obtain and install the Microsoft Visual C++ 2015 Re-distributable before installing the .NET Core Windows Server Hosting bundle.

    +
  2. +
  3. +

    Restart the system or execute net stop was /y followed by net start w3svc from a command prompt to pick up a change to the system PATH.

    +
  4. +
+
+
+ + + + + +
+ + +If you use an IIS Shared Configuration, see ASP.NET Core Module with IIS Shared Configuration. +
+
+
+

To configure IISIntegration service options, include a service configuration for IISOptions in ConfigureServices:

+
+
+
+
services.Configure<IISOptions>(options =>
+{
+    ...
+});
+
+
+
+

|== == == == == == == == == == == = +|Option|Default|Setting +|AutomaticAuthentication| true |If true, the authentication middleware sets the HttpContext.User and responds to generic challenges. If false, the authentication middleware only provides an identity (HttpContext.User) and responds to challenges when explicitly requested by the Authentication Scheme. Windows Authentication must be enabled in IIS for AutomaticAuthentication to function. +|AuthenticationDisplayName | null| Sets the display name shown to users on login pages. +|ForwardClientCertificate |true|If true and the MS-ASPNETCORE-CLIENTCERT request header is present, the HttpContext.Connection.ClientCertificate is populated. +|== == == == == == == == == == == =

+
+
+

web.config

+
+
+

The web.config file configures the ASP.NET Core Module and provides other IIS configuration. Creating, transforming, and publishing web.config is handled by Microsoft.NET.Sdk.Web, which is included when you set your project’s SDK at the top of your .csproj file, <Project Sdk="Microsoft.NET.Sdk.Web">. To prevent the MSBuild target from transforming your web.config file, add the <IsTransformWebConfigDisabled> property to your project file with a setting of true:

+
+
+
+
<PropertyGroup>
+  <IsTransformWebConfigDisabled>true</IsTransformWebConfigDisabled>
+</PropertyGroup>
+
+
+
+
+
+

== Azure

+
+
+

In order to deploy your application to Azure platform you can follow the instructions from Microsoft:

+
+
+

Set up the development environment

+
+
+ +
+
+

Create a web app

+
+
+

In the Visual Studio Start Page, select File > New > Project…​

+
+
+
+File menu +
+
+
+

Complete the New Project dialog:

+
+
+
    +
  • +

    In the left pane, select .NET Core.

    +
  • +
  • +

    In the center pane, select ASP.NET Core Web Application.

    +
  • +
  • +

    Select OK.

    +
  • +
+
+
+
+New Project dialog +
+
+
+

In the New ASP.NET Core Web Application dialog:

+
+
+
    +
  • +

    Select Web Application.

    +
  • +
  • +

    Select Change Authentication.

    +
  • +
+
+
+
+New Project dialog +
+
+
+

The Change Authentication dialog appears.

+
+
+
    +
  • +

    Select Individual User Accounts.

    +
  • +
  • +

    Select OK to return to the New ASP.NET Core Web Application, then select OK again.

    +
  • +
+
+
+
+New ASP.NET Core Web authentication dialog +
+
+
+

Visual Studio creates the solution.

+
+
+

Run the app locally

+
+
+
    +
  • +

    Choose Debug then Start Without Debugging to run the app locally.

    +
  • +
  • +

    Click the About and Contact links to verify the web application works.

    +
  • +
+
+
+
+Web application open in Microsoft Edge on localhost +
+
+
+
    +
  • +

    Select Register and register a new user. You can use a fictitious email address. When you submit, the page displays the following error:

    +
  • +
+
+
+

"Internal Server Error: A database operation failed while processing the request. SQL exception: Cannot open the database. Applying existing migrations for Application DB context may resolve this issue."

+
+
+
    +
  • +

    Select Apply Migrations and, once the page updates, refresh the page.

    +
  • +
+
+
+
+Internal Server Error: A database operation failed while processing the request. SQL exception: Cannot open the database. Applying existing migrations for Application DB context may resolve this issue. +
+
+
+

The app displays the email used to register the new user and a Log out link.

+
+
+
+Web application open in Microsoft Edge. The Register link is replaced by the text Hello email@domain.com! +
+
+
+

Deploy the app to Azure

+
+
+

Close the web page, return to Visual Studio, and select Stop Debugging from the Debug menu.

+
+
+

Right-click on the project in Solution Explorer and select Publish…​.

+
+
+
+Contextual menu open with Publish link highlighted +
+
+
+

In the Publish dialog, select Microsoft Azure App Service and click Publish.

+
+
+
+Publish dialog +
+
+
+
    +
  • +

    Name the app a unique name.

    +
  • +
  • +

    Select a subscription.

    +
  • +
  • +

    Select New…​ for the resource group and enter a name for the new resource group.

    +
  • +
  • +

    Select New…​ for the app service plan and select a location near you. You can keep the name that is generated by default.

    +
  • +
+
+
+
+App Service dialog +
+
+
+
    +
  • +

    Select the Services tab to create a new database.

    +
  • +
  • +

    Select the green + icon to create a new SQL Database

    +
  • +
+
+
+
+New SQL Database +
+
+
+
    +
  • +

    Select New…​ on the Configure SQL Database dialog to create a new database.

    +
  • +
+
+
+
+New SQL Database and server +
+
+
+

The Configure SQL Server dialog appears.

+
+
+
    +
  • +

    Enter an administrator user name and password, and then select OK. Don’t forget the user name and password you create in this step. You can keep the default Server Name.

    +
  • +
  • +

    Enter names for the database and connection string.

    +
  • +
+
+
+
+
+

== Note

+
+
+

"admin" is not allowed as the administrator user name.

+
+
+
+Configure SQL Server dialog +
+
+
+
    +
  • +

    Select OK.

    +
  • +
+
+
+

Visual Studio returns to the Create App Service dialog.

+
+
+
    +
  • +

    Select Create on the Create App Service dialog.

    +
  • +
+
+
+
+Configure SQL Database dialog +
+
+
+
    +
  • +

    Click the Settings link in the Publish dialog.

    +
  • +
+
+
+
+Publish dialog: Connection panel +
+
+
+

On the Settings page of the Publish dialog:

+
+
+
    +
  • +

    Expand Databases and check Use this connection string at runtime.

    +
  • +
  • +

    Expand Entity Framework Migrations and check Apply this migration on publish.

    +
  • +
  • +

    Select Save. Visual Studio returns to the Publish dialog.

    +
  • +
+
+
+
+Publish dialog: Settings panel +
+
+
+

Click Publish. Visual Studio will publish your app to Azure and launch the cloud app in your browser.

+
+
+

Test your app in Azure

+
+
+
    +
  • +

    Test the About and Contact links

    +
  • +
  • +

    Register a new user

    +
  • +
+
+
+
+Web application opened in Microsoft Edge on Azure App Service +
+
+
+

Update the app

+
+
+
    +
  • +

    Edit the Pages/About.cshtml Razor page and change its contents. For example, you can modify the paragraph to say "Hello ASP.NET Core!":

    +
    +
    +
    html<button class="action copy" data-bi-name="copy">Copy</button>
    +
    +
    +
  • +
+
+
+
+
@page
+@model AboutModel
+@{
+    ViewData["Title"] = "About";
+}
+<h2>@ViewData["Title"]</h2>
+<h3>@Model.Message</h3>
+
+    <p>Hello ASP.NET Core!</p>
+
+
+
+
    +
  • +

    Right-click on the project and select Publish…​ again.

    +
  • +
+
+
+
+Contextual menu open with Publish link highlighted +
+
+
+
    +
  • +

    After the app is published, verify the changes you made are available on Azure.

    +
  • +
+
+
+
+Verify task is complete +
+
+
+

Clean up

+
+
+

When you have finished testing the app, go to the Azure portal and delete the app.

+
+
+
    +
  • +

    Select Resource groups, then select the resource group you created.

    +
  • +
+
+
+
+Azure Portal: Resource Groups in sidebar menu +
+
+
+
    +
  • +

    In the Resource groups page, select Delete.

    +
  • +
+
+
+
+Azure Portal: Resource Groups page +
+
+
+
    +
  • +

    Enter the name of the resource group and select Delete. Your app and all other resources created in this tutorial are now deleted from Azure.

    +
  • +
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/_images/images/CG-architectureBackground.png b/docs/devon4ng/1.0/_images/images/CG-architectureBackground.png new file mode 100644 index 00000000..91b3d696 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/CG-architectureBackground.png differ diff --git a/docs/devon4ng/1.0/_images/images/CapgeminiLogo.png b/docs/devon4ng/1.0/_images/images/CapgeminiLogo.png new file mode 100644 index 00000000..9ce5e7e5 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/CapgeminiLogo.png differ diff --git a/docs/devon4ng/1.0/_images/images/CapgeminiLogoWhite.png b/docs/devon4ng/1.0/_images/images/CapgeminiLogoWhite.png new file mode 100644 index 00000000..3e5bf717 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/CapgeminiLogoWhite.png differ diff --git a/docs/devon4ng/1.0/_images/images/Example_Angular_Restaurant_Screen.png b/docs/devon4ng/1.0/_images/images/Example_Angular_Restaurant_Screen.png new file mode 100644 index 00000000..3d8793d1 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/Example_Angular_Restaurant_Screen.png differ diff --git a/docs/devon4ng/1.0/_images/images/IntegratedIDE.png b/docs/devon4ng/1.0/_images/images/IntegratedIDE.png new file mode 100644 index 00000000..bb2068dc Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/IntegratedIDE.png differ diff --git a/docs/devon4ng/1.0/_images/images/Logo.png b/docs/devon4ng/1.0/_images/images/Logo.png new file mode 100644 index 00000000..f6e4e645 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/Logo.png differ diff --git a/docs/devon4ng/1.0/_images/images/OASP-Layering.png b/docs/devon4ng/1.0/_images/images/OASP-Layering.png new file mode 100644 index 00000000..84a065eb Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/OASP-Layering.png differ diff --git a/docs/devon4ng/1.0/_images/images/OASP.png b/docs/devon4ng/1.0/_images/images/OASP.png new file mode 100644 index 00000000..2b0e2574 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/OASP.png differ diff --git a/docs/devon4ng/1.0/_images/images/OASP_dark.png b/docs/devon4ng/1.0/_images/images/OASP_dark.png new file mode 100644 index 00000000..edf59f1a Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/OASP_dark.png differ diff --git a/docs/devon4ng/1.0/_images/images/OASP_technologies_used.png b/docs/devon4ng/1.0/_images/images/OASP_technologies_used.png new file mode 100644 index 00000000..98db5b7f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/OASP_technologies_used.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/0.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/0.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/0.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/1.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/1.png new file mode 100644 index 00000000..a168ebfa Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/1.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/10.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/10.png new file mode 100644 index 00000000..ee452fec Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/10.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/11.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/11.png new file mode 100644 index 00000000..bf031376 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/11.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/12.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/12.png new file mode 100644 index 00000000..37ecfef2 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/12.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/13.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/13.png new file mode 100644 index 00000000..aa68cf4f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/13.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/14.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/14.png new file mode 100644 index 00000000..63c2ed55 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/14.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/2.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/2.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/2.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/3.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/3.png new file mode 100644 index 00000000..21f206b0 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/3.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/4.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/4.png new file mode 100644 index 00000000..c097c867 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/4.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/5.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/5.png new file mode 100644 index 00000000..d0941916 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/5.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/6.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/6.png new file mode 100644 index 00000000..8f7450f8 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/6.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/7.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/7.png new file mode 100644 index 00000000..0244ebdb Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/7.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/8.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/8.png new file mode 100644 index 00000000..e4ed5871 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/8.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-clarity-layout/9.png b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/9.png new file mode 100644 index 00000000..5464bac8 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-clarity-layout/9.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-library/result.png b/docs/devon4ng/1.0/_images/images/angular-library/result.png new file mode 100644 index 00000000..2fe702a8 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-library/result.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_0.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_0.png new file mode 100644 index 00000000..f4aeadca Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_0.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_1.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_1.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_1.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_10.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_10.png new file mode 100644 index 00000000..d84563a7 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_10.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_11.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_11.png new file mode 100644 index 00000000..2eeb8fdd Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_11.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_12.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_12.png new file mode 100644 index 00000000..d0e81eaa Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_12.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_13.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_13.png new file mode 100644 index 00000000..4b3b4074 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_13.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_14.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_14.png new file mode 100644 index 00000000..f1ff7d9f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_14.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_15.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_15.png new file mode 100644 index 00000000..b00554fe Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_15.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_16.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_16.png new file mode 100644 index 00000000..4367bd60 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_16.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_17.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_17.png new file mode 100644 index 00000000..d3f5edfb Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_17.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_18.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_18.png new file mode 100644 index 00000000..54cb5b00 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_18.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_19.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_19.png new file mode 100644 index 00000000..dc441ee0 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_19.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_2.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_2.png new file mode 100644 index 00000000..a8f8b70f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_2.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_20.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_20.png new file mode 100644 index 00000000..6728163f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_20.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_21.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_21.png new file mode 100644 index 00000000..a4f23dba Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_21.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_22.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_22.png new file mode 100644 index 00000000..98a258c7 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_22.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_3.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_3.png new file mode 100644 index 00000000..625228b0 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_3.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_4.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_4.png new file mode 100644 index 00000000..97f33148 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_4.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_5.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_5.png new file mode 100644 index 00000000..32de7eee Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_5.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_6.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_6.png new file mode 100644 index 00000000..331b345f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_6.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_7.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_7.png new file mode 100644 index 00000000..fc7a638e Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_7.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_8.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_8.png new file mode 100644 index 00000000..db26df0a Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_8.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_9.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_9.png new file mode 100644 index 00000000..cae3b40a Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/Screenshot_9.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-primeng-layout/tablestyle.png b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/tablestyle.png new file mode 100644 index 00000000..8f8a5435 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-primeng-layout/tablestyle.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure1.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure1.png new file mode 100644 index 00000000..8638e11e Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure1.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure10.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure10.png new file mode 100644 index 00000000..b85a70bb Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure10.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure11.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure11.png new file mode 100644 index 00000000..b351a5eb Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure11.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure12.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure12.png new file mode 100644 index 00000000..7a2f4d7b Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure12.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure13.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure13.png new file mode 100644 index 00000000..9d2e909f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure13.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure14.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure14.png new file mode 100644 index 00000000..fad86295 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure14.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure15.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure15.png new file mode 100644 index 00000000..d7e0f1a7 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure15.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure16.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure16.png new file mode 100644 index 00000000..cfe9fbd9 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure16.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure17.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure17.png new file mode 100644 index 00000000..b60a8367 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure17.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure18.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure18.png new file mode 100644 index 00000000..fe6ee92b Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure18.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure19.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure19.png new file mode 100644 index 00000000..1fe8f608 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure19.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure20.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure20.png new file mode 100644 index 00000000..54267100 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure20.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure21.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure21.png new file mode 100644 index 00000000..db215fce Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure21.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure3.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure3.png new file mode 100644 index 00000000..9fa7f617 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure3.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure4.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure4.png new file mode 100644 index 00000000..58c84427 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure4.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure5.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure5.png new file mode 100644 index 00000000..883bcb04 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure5.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure6.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure6.png new file mode 100644 index 00000000..3c05e35c Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure6.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure7.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure7.png new file mode 100644 index 00000000..3c3cbf0c Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure7.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure8.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure8.png new file mode 100644 index 00000000..29d9fc2a Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure8.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure9.png b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure9.png new file mode 100644 index 00000000..42915cc7 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular-zorro-layout/figure9.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-lazy/compile-eager.png b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/compile-eager.png new file mode 100644 index 00000000..ffce0d19 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/compile-eager.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-lazy/compile-first-lazy.png b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/compile-first-lazy.png new file mode 100644 index 00000000..8bd56e5d Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/compile-first-lazy.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-lazy/first-lvl-eager.png b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/first-lvl-eager.png new file mode 100644 index 00000000..2480f2d6 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/first-lvl-eager.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-lazy/first-lvl-lazy.png b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/first-lvl-lazy.png new file mode 100644 index 00000000..7f89915d Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/first-lvl-lazy.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-lazy/first-lvl-wrong-path.png b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/first-lvl-wrong-path.png new file mode 100644 index 00000000..68587c34 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/first-lvl-wrong-path.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-lazy/levels-app.png b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/levels-app.png new file mode 100644 index 00000000..0a147442 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/levels-app.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-lazy.png b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-lazy.png new file mode 100644 index 00000000..0afe2f5a Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-lazy.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-left-lazy.png b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-left-lazy.png new file mode 100644 index 00000000..b2005351 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-left-lazy.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager-d.png b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager-d.png new file mode 100644 index 00000000..47addfcc Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager-d.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager.png b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager.png new file mode 100644 index 00000000..c55c77e2 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-pwa/http-serve.png b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/http-serve.png new file mode 100644 index 00000000..d6926625 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/http-serve.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-pwa/lighthouse-ng.png b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/lighthouse-ng.png new file mode 100644 index 00000000..774a40f6 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/lighthouse-ng.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-pwa/mts-pwa-rec.png b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/mts-pwa-rec.png new file mode 100644 index 00000000..8cf524e3 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/mts-pwa-rec.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-pwa/ng-serve.png b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/ng-serve.png new file mode 100644 index 00000000..9f614131 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/ng-serve.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-pwa/online-offline-ng.png b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/online-offline-ng.png new file mode 100644 index 00000000..0f42e5a4 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/online-offline-ng.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-pwa/pwa-nopwa-app-ng.png b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/pwa-nopwa-app-ng.png new file mode 100644 index 00000000..4724098e Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-pwa/pwa-nopwa-app-ng.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-theming/custom-dark.png b/docs/devon4ng/1.0/_images/images/angular/angular-theming/custom-dark.png new file mode 100644 index 00000000..701ccadf Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-theming/custom-dark.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-theming/custom-light.png b/docs/devon4ng/1.0/_images/images/angular/angular-theming/custom-light.png new file mode 100644 index 00000000..32aa97ef Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-theming/custom-light.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-theming/deeppurple-amber.png b/docs/devon4ng/1.0/_images/images/angular/angular-theming/deeppurple-amber.png new file mode 100644 index 00000000..26fcdd93 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-theming/deeppurple-amber.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-theming/indigo-pink.png b/docs/devon4ng/1.0/_images/images/angular/angular-theming/indigo-pink.png new file mode 100644 index 00000000..9af01630 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-theming/indigo-pink.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-theming/palette.PNG b/docs/devon4ng/1.0/_images/images/angular/angular-theming/palette.PNG new file mode 100644 index 00000000..3e2e7af4 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-theming/palette.PNG differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-theming/pink-bluegrey.png b/docs/devon4ng/1.0/_images/images/angular/angular-theming/pink-bluegrey.png new file mode 100644 index 00000000..9cc6a27d Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-theming/pink-bluegrey.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-theming/purple-green.png b/docs/devon4ng/1.0/_images/images/angular/angular-theming/purple-green.png new file mode 100644 index 00000000..d23d948d Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-theming/purple-green.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-theming/scss-map.png b/docs/devon4ng/1.0/_images/images/angular/angular-theming/scss-map.png new file mode 100644 index 00000000..87285543 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-theming/scss-map.png differ diff --git a/docs/devon4ng/1.0/_images/images/angular/angular-theming/theme-files-structure.png b/docs/devon4ng/1.0/_images/images/angular/angular-theming/theme-files-structure.png new file mode 100644 index 00000000..953d3eaf Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/angular/angular-theming/theme-files-structure.png differ diff --git a/docs/devon4ng/1.0/_images/images/apache_logo.png b/docs/devon4ng/1.0/_images/images/apache_logo.png new file mode 100644 index 00000000..5b5e925b Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/apache_logo.png differ diff --git a/docs/devon4ng/1.0/_images/images/app-initializer/loadExternalConfigFalse.png b/docs/devon4ng/1.0/_images/images/app-initializer/loadExternalConfigFalse.png new file mode 100644 index 00000000..f6c90dd9 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/app-initializer/loadExternalConfigFalse.png differ diff --git a/docs/devon4ng/1.0/_images/images/app-initializer/loadExternalConfigTrue.png b/docs/devon4ng/1.0/_images/images/app-initializer/loadExternalConfigTrue.png new file mode 100644 index 00000000..647ea162 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/app-initializer/loadExternalConfigTrue.png differ diff --git a/docs/devon4ng/1.0/_images/images/architecture-layers.svg b/docs/devon4ng/1.0/_images/images/architecture-layers.svg new file mode 100644 index 00000000..63c0c475 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/architecture-layers.svg @@ -0,0 +1,639 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + c + + + + + + + omponents + + + + + + + + s + + + + + + + ervices + + + + + + + + adapter + + + + + + + + + + module + + + + + + + + s + + + + + + + mart + + + + + + + + dumb + + + + + + + + + store + + + + + + + + model + + + + + + + + use + + + + + + + - + + + + + + + case + + + + + diff --git a/docs/devon4ng/1.0/_images/images/architecture-modules.svg b/docs/devon4ng/1.0/_images/images/architecture-modules.svg new file mode 100644 index 00000000..2ec15f98 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/architecture-modules.svg @@ -0,0 +1,358 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + shared + + + + + + + + c + + + + + + + ore + + + + + + + + f + + + + + + + eature + + + + + + + (e.g. booking) + + + + + + + + a + + + + + + + pp + + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/architecture.png b/docs/devon4ng/1.0/_images/images/architecture.png new file mode 100644 index 00000000..d21165e7 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/architecture.png differ diff --git a/docs/devon4ng/1.0/_images/images/architecture_background.png b/docs/devon4ng/1.0/_images/images/architecture_background.png new file mode 100644 index 00000000..91d5af3a Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/architecture_background.png differ diff --git a/docs/devon4ng/1.0/_images/images/batch_icon.png b/docs/devon4ng/1.0/_images/images/batch_icon.png new file mode 100644 index 00000000..1ca97b15 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/batch_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/blob-streaming/folder-structure.PNG b/docs/devon4ng/1.0/_images/images/blob-streaming/folder-structure.PNG new file mode 100644 index 00000000..d4880bfd Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/blob-streaming/folder-structure.PNG differ diff --git a/docs/devon4ng/1.0/_images/images/blob-streaming/html-view-1.PNG b/docs/devon4ng/1.0/_images/images/blob-streaming/html-view-1.PNG new file mode 100644 index 00000000..adef8fff Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/blob-streaming/html-view-1.PNG differ diff --git a/docs/devon4ng/1.0/_images/images/capgemini.png b/docs/devon4ng/1.0/_images/images/capgemini.png new file mode 100644 index 00000000..e323d3dd Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/capgemini.png differ diff --git a/docs/devon4ng/1.0/_images/images/cloud_icon.png b/docs/devon4ng/1.0/_images/images/cloud_icon.png new file mode 100644 index 00000000..fc565675 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/cloud_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/code_icon.png b/docs/devon4ng/1.0/_images/images/code_icon.png new file mode 100644 index 00000000..72c4d880 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/code_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/comillas.png b/docs/devon4ng/1.0/_images/images/comillas.png new file mode 100644 index 00000000..01644235 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/comillas.png differ diff --git a/docs/devon4ng/1.0/_images/images/component-decomposition-example-1.svg b/docs/devon4ng/1.0/_images/images/component-decomposition-example-1.svg new file mode 100644 index 00000000..1370c66d --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/component-decomposition-example-1.svg @@ -0,0 +1,101 @@ + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/component-decomposition-example-2.svg b/docs/devon4ng/1.0/_images/images/component-decomposition-example-2.svg new file mode 100644 index 00000000..747697ae --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/component-decomposition-example-2.svg @@ -0,0 +1,254 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/component-decomposition-example-component-tree.svg b/docs/devon4ng/1.0/_images/images/component-decomposition-example-component-tree.svg new file mode 100644 index 00000000..3b0a7061 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/component-decomposition-example-component-tree.svg @@ -0,0 +1,356 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + FormListpicker + + + + + + + + FilterResultTable + + + + + + + + FilterInput + + + + + + + + + + + + + + + + + + + DirectInput + + + + + + + + Listpicker + + + + + + + + ListpickerDropdown + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/component-tree-highlighted-subtree.svg b/docs/devon4ng/1.0/_images/images/component-tree-highlighted-subtree.svg new file mode 100644 index 00000000..d380bf5b --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/component-tree-highlighted-subtree.svg @@ -0,0 +1,950 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Overview + + + + + + + + SearchPanel + + + + + + + + QuickSearchTab + + + + + + + + Details + + + + + + + + App + + + + + + + + Toolbar + + + + + + + + DetailSearchTab + + + + + + + + CriteriaForm + + + + + + + + Header + + + + + + + + UserInfo + + + + + + + + ActionToolbar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + ResultPanel + + + + + + + + Table + + + + + + + + ResultActions + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + + + + + + + + + + + + + + + /overview + + + + + + + /details + + + + + diff --git a/docs/devon4ng/1.0/_images/images/component-tree.svg b/docs/devon4ng/1.0/_images/images/component-tree.svg new file mode 100644 index 00000000..010e562b --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/component-tree.svg @@ -0,0 +1,950 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Overview + + + + + + + + SearchPanel + + + + + + + + QuickSearchTab + + + + + + + + Details + + + + + + + + App + + + + + + + + Toolbar + + + + + + + + DetailSearchTab + + + + + + + + CriteriaForm + + + + + + + + Header + + + + + + + + UserInfo + + + + + + + + ActionToolbar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + ResultPanel + + + + + + + + Table + + + + + + + + ResultActions + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + + + + + + + + + + + + + + + /overview + + + + + + + /details + + + + + diff --git a/docs/devon4ng/1.0/_images/images/components-layer-service-layer-boundaries.svg b/docs/devon4ng/1.0/_images/images/components-layer-service-layer-boundaries.svg new file mode 100644 index 00000000..70484237 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/components-layer-service-layer-boundaries.svg @@ -0,0 +1,355 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart + + + + + + + Component + + + + + + + + Use Case Service + + + + + + + + + + + + + Store + + + + + + + + + + + + subscribe() + + + + + + + action() + + + + + + + + + + Services Layer + + + + + + + Components Layer + + + + + diff --git a/docs/devon4ng/1.0/_images/images/corte_1.png b/docs/devon4ng/1.0/_images/images/corte_1.png new file mode 100644 index 00000000..e1c81c12 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/corte_1.png differ diff --git a/docs/devon4ng/1.0/_images/images/corte_2.png b/docs/devon4ng/1.0/_images/images/corte_2.png new file mode 100644 index 00000000..a0bf4db5 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/corte_2.png differ diff --git a/docs/devon4ng/1.0/_images/images/corte_3.png b/docs/devon4ng/1.0/_images/images/corte_3.png new file mode 100644 index 00000000..a1b02164 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/corte_3.png differ diff --git a/docs/devon4ng/1.0/_images/images/corte_4.png b/docs/devon4ng/1.0/_images/images/corte_4.png new file mode 100644 index 00000000..c0867326 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/corte_4.png differ diff --git a/docs/devon4ng/1.0/_images/images/cypress/browserTab.jpg b/docs/devon4ng/1.0/_images/images/cypress/browserTab.jpg new file mode 100644 index 00000000..9dff976e Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/cypress/browserTab.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/cypress/contextImg.jpg b/docs/devon4ng/1.0/_images/images/cypress/contextImg.jpg new file mode 100644 index 00000000..c925bb34 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/cypress/contextImg.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/cypress/reporter.jpg b/docs/devon4ng/1.0/_images/images/cypress/reporter.jpg new file mode 100644 index 00000000..fe821059 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/cypress/reporter.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/data-box.jpg b/docs/devon4ng/1.0/_images/images/data-box.jpg new file mode 100644 index 00000000..6d38b892 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/data-box.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/december.png b/docs/devon4ng/1.0/_images/images/december.png new file mode 100644 index 00000000..0c7a6800 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/december.png differ diff --git a/docs/devon4ng/1.0/_images/images/desktop_icon.png b/docs/devon4ng/1.0/_images/images/desktop_icon.png new file mode 100644 index 00000000..0fbcb96b Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/desktop_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/devon_logo - responsive.png b/docs/devon4ng/1.0/_images/images/devon_logo - responsive.png new file mode 100644 index 00000000..908cc66e Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/devon_logo - responsive.png differ diff --git a/docs/devon4ng/1.0/_images/images/devon_logo.png b/docs/devon4ng/1.0/_images/images/devon_logo.png new file mode 100644 index 00000000..908cc66e Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/devon_logo.png differ diff --git a/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_CompleteOverview.png b/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_CompleteOverview.png new file mode 100644 index 00000000..8591d2cb Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_CompleteOverview.png differ diff --git a/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogComponent.png b/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogComponent.png new file mode 100644 index 00000000..bb81efe5 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogComponent.png differ diff --git a/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogContainer.png b/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogContainer.png new file mode 100644 index 00000000..1f9222f1 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogContainer.png differ diff --git a/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogInteractions.png b/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogInteractions.png new file mode 100644 index 00000000..6ac503fb Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogInteractions.png differ diff --git a/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_Overview.png b/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_Overview.png new file mode 100644 index 00000000..b694090e Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_Overview.png differ diff --git a/docs/devon4ng/1.0/_images/images/devonfw-oasp.png b/docs/devon4ng/1.0/_images/images/devonfw-oasp.png new file mode 100644 index 00000000..4d1171ff Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/devonfw-oasp.png differ diff --git a/docs/devon4ng/1.0/_images/images/enviroment_icon.png b/docs/devon4ng/1.0/_images/images/enviroment_icon.png new file mode 100644 index 00000000..16d91378 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/enviroment_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/examples.png b/docs/devon4ng/1.0/_images/images/examples.png new file mode 100644 index 00000000..ea8796c2 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/examples.png differ diff --git a/docs/devon4ng/1.0/_images/images/facebook.png b/docs/devon4ng/1.0/_images/images/facebook.png new file mode 100644 index 00000000..56f12068 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/facebook.png differ diff --git a/docs/devon4ng/1.0/_images/images/february.png b/docs/devon4ng/1.0/_images/images/february.png new file mode 100644 index 00000000..d5db9e90 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/february.png differ diff --git a/docs/devon4ng/1.0/_images/images/flexibility.png b/docs/devon4ng/1.0/_images/images/flexibility.png new file mode 100644 index 00000000..a9e880d8 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/flexibility.png differ diff --git a/docs/devon4ng/1.0/_images/images/generate-component.png b/docs/devon4ng/1.0/_images/images/generate-component.png new file mode 100644 index 00000000..778a70c7 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/generate-component.png differ diff --git a/docs/devon4ng/1.0/_images/images/generate-module.png b/docs/devon4ng/1.0/_images/images/generate-module.png new file mode 100644 index 00000000..a0c9ea0a Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/generate-module.png differ diff --git a/docs/devon4ng/1.0/_images/images/getting_started.png b/docs/devon4ng/1.0/_images/images/getting_started.png new file mode 100644 index 00000000..8f3340bf Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/getting_started.png differ diff --git a/docs/devon4ng/1.0/_images/images/github.png b/docs/devon4ng/1.0/_images/images/github.png new file mode 100644 index 00000000..602e9527 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/github.png differ diff --git a/docs/devon4ng/1.0/_images/images/help_icon.png b/docs/devon4ng/1.0/_images/images/help_icon.png new file mode 100644 index 00000000..a8c7a71e Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/help_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/high-speed.png b/docs/devon4ng/1.0/_images/images/high-speed.png new file mode 100644 index 00000000..2799a180 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/high-speed.png differ diff --git a/docs/devon4ng/1.0/_images/images/ico_flexibility.svg b/docs/devon4ng/1.0/_images/images/ico_flexibility.svg new file mode 100644 index 00000000..a4c54c4d --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/ico_flexibility.svg @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/ico_highSpeed.svg b/docs/devon4ng/1.0/_images/images/ico_highSpeed.svg new file mode 100644 index 00000000..c944092d --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/ico_highSpeed.svg @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/ico_innovation.svg b/docs/devon4ng/1.0/_images/images/ico_innovation.svg new file mode 100644 index 00000000..9f2dfda7 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/ico_innovation.svg @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/ico_quality.svg b/docs/devon4ng/1.0/_images/images/ico_quality.svg new file mode 100644 index 00000000..5e2a8375 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/ico_quality.svg @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/ide.png b/docs/devon4ng/1.0/_images/images/ide.png new file mode 100644 index 00000000..ec3fed60 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ide.png differ diff --git a/docs/devon4ng/1.0/_images/images/img.png b/docs/devon4ng/1.0/_images/images/img.png new file mode 100644 index 00000000..4b0bfaa8 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/img.png differ diff --git a/docs/devon4ng/1.0/_images/images/img_1.png b/docs/devon4ng/1.0/_images/images/img_1.png new file mode 100644 index 00000000..583bc83d Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/img_1.png differ diff --git a/docs/devon4ng/1.0/_images/images/innovation.png b/docs/devon4ng/1.0/_images/images/innovation.png new file mode 100644 index 00000000..a2fc9a6a Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/innovation.png differ diff --git a/docs/devon4ng/1.0/_images/images/install-cli-success.png b/docs/devon4ng/1.0/_images/images/install-cli-success.png new file mode 100644 index 00000000..b14462cd Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/install-cli-success.png differ diff --git a/docs/devon4ng/1.0/_images/images/integration_icon.png b/docs/devon4ng/1.0/_images/images/integration_icon.png new file mode 100644 index 00000000..2faf2830 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/integration_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-getting-started/ionic-blank-project.PNG b/docs/devon4ng/1.0/_images/images/ionic-getting-started/ionic-blank-project.PNG new file mode 100644 index 00000000..94b9772e Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-getting-started/ionic-blank-project.PNG differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-getting-started/ionic-start-list.png b/docs/devon4ng/1.0/_images/images/ionic-getting-started/ionic-start-list.png new file mode 100644 index 00000000..aca2d6d1 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-getting-started/ionic-start-list.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-getting-started/update-ionic-cli.PNG b/docs/devon4ng/1.0/_images/images/ionic-getting-started/update-ionic-cli.PNG new file mode 100644 index 00000000..b28e83d5 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-getting-started/update-ionic-cli.PNG differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-build-apk.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-build-apk.png new file mode 100644 index 00000000..f321c1df Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-build-apk.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-build-run.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-build-run.png new file mode 100644 index 00000000..aaad2c91 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-build-run.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-make-app.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-make-app.png new file mode 100644 index 00000000..52fa226f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-make-app.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-make.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-make.png new file mode 100644 index 00000000..384e9079 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/and-vsc-make.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/config-device.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/config-device.png new file mode 100644 index 00000000..d68d982b Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/config-device.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/create-new-device.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/create-new-device.png new file mode 100644 index 00000000..7c8a5a7c Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/create-new-device.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/download-so.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/download-so.png new file mode 100644 index 00000000..0048db46 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/download-so.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/enable-developer-options1_2_3.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/enable-developer-options1_2_3.png new file mode 100644 index 00000000..d17e22b9 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/enable-developer-options1_2_3.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/enable-developer-options4_5_6.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/enable-developer-options4_5_6.png new file mode 100644 index 00000000..529fdf07 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/enable-developer-options4_5_6.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/environments.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/environments.png new file mode 100644 index 00000000..47d7d367 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/environments.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/ipconfig-short.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/ipconfig-short.png new file mode 100644 index 00000000..c2a77d81 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/ipconfig-short.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/locate-apk.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/locate-apk.png new file mode 100644 index 00000000..e27bda40 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/locate-apk.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/new-backend-url.PNG b/docs/devon4ng/1.0/_images/images/ionic-to-android/new-backend-url.PNG new file mode 100644 index 00000000..7f92fa2f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/new-backend-url.PNG differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/new-phone-created.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/new-phone-created.png new file mode 100644 index 00000000..f395296a Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/new-phone-created.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/new-phone-nexus.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/new-phone-nexus.png new file mode 100644 index 00000000..7a166ba0 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/new-phone-nexus.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic-to-android/real-device.png b/docs/devon4ng/1.0/_images/images/ionic-to-android/real-device.png new file mode 100644 index 00000000..524038f2 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic-to-android/real-device.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/base.png b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/base.png new file mode 100644 index 00000000..159aa873 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/base.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/http-server.png b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/http-server.png new file mode 100644 index 00000000..dc5084f3 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/http-server.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/ionic-serve.png b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/ionic-serve.png new file mode 100644 index 00000000..cad3c335 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/ionic-serve.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/lighthouse.png b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/lighthouse.png new file mode 100644 index 00000000..f24e8806 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/lighthouse.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/online-offline.png b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/online-offline.png new file mode 100644 index 00000000..2c44171d Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/online-offline.png differ diff --git a/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/pwa-nopwa-app.png b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/pwa-nopwa-app.png new file mode 100644 index 00000000..7dd4b467 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ionic/ionic-pwa/pwa-nopwa-app.png differ diff --git a/docs/devon4ng/1.0/_images/images/iwan.jpg b/docs/devon4ng/1.0/_images/images/iwan.jpg new file mode 100644 index 00000000..5c4d2af3 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/iwan.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/january.png b/docs/devon4ng/1.0/_images/images/january.png new file mode 100644 index 00000000..ccc123cb Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/january.png differ diff --git a/docs/devon4ng/1.0/_images/images/java_icon.png b/docs/devon4ng/1.0/_images/images/java_icon.png new file mode 100644 index 00000000..b99f7003 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/java_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/javascript_icon.png b/docs/devon4ng/1.0/_images/images/javascript_icon.png new file mode 100644 index 00000000..e5aecbfc Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/javascript_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/june.png b/docs/devon4ng/1.0/_images/images/june.png new file mode 100644 index 00000000..04247755 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/june.png differ diff --git a/docs/devon4ng/1.0/_images/images/layout-angular-material/1-finished-application.png b/docs/devon4ng/1.0/_images/images/layout-angular-material/1-finished-application.png new file mode 100644 index 00000000..359cb08b Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/layout-angular-material/1-finished-application.png differ diff --git a/docs/devon4ng/1.0/_images/images/layout-angular-material/2-blank-application.png b/docs/devon4ng/1.0/_images/images/layout-angular-material/2-blank-application.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/layout-angular-material/2-blank-application.png differ diff --git a/docs/devon4ng/1.0/_images/images/layout-angular-material/3-material-added.png b/docs/devon4ng/1.0/_images/images/layout-angular-material/3-material-added.png new file mode 100644 index 00000000..c33d83bd Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/layout-angular-material/3-material-added.png differ diff --git a/docs/devon4ng/1.0/_images/images/layout-angular-material/4-header.png b/docs/devon4ng/1.0/_images/images/layout-angular-material/4-header.png new file mode 100644 index 00000000..8f336afb Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/layout-angular-material/4-header.png differ diff --git a/docs/devon4ng/1.0/_images/images/layout-angular-material/5-header-layout-final.png b/docs/devon4ng/1.0/_images/images/layout-angular-material/5-header-layout-final.png new file mode 100644 index 00000000..1d7fb776 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/layout-angular-material/5-header-layout-final.png differ diff --git a/docs/devon4ng/1.0/_images/images/layout-angular-material/6-home-page.png b/docs/devon4ng/1.0/_images/images/layout-angular-material/6-home-page.png new file mode 100644 index 00000000..8eea07fa Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/layout-angular-material/6-home-page.png differ diff --git a/docs/devon4ng/1.0/_images/images/layout-angular-material/7-data-page.png b/docs/devon4ng/1.0/_images/images/layout-angular-material/7-data-page.png new file mode 100644 index 00000000..e4fadfa5 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/layout-angular-material/7-data-page.png differ diff --git a/docs/devon4ng/1.0/_images/images/layout-angular-material/8-sidenav-started.png b/docs/devon4ng/1.0/_images/images/layout-angular-material/8-sidenav-started.png new file mode 100644 index 00000000..d7b06579 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/layout-angular-material/8-sidenav-started.png differ diff --git a/docs/devon4ng/1.0/_images/images/layout-angular-material/9-finished.png b/docs/devon4ng/1.0/_images/images/layout-angular-material/9-finished.png new file mode 100644 index 00000000..beb49f9f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/layout-angular-material/9-finished.png differ diff --git a/docs/devon4ng/1.0/_images/images/linkedin.png b/docs/devon4ng/1.0/_images/images/linkedin.png new file mode 100644 index 00000000..0d863462 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/linkedin.png differ diff --git a/docs/devon4ng/1.0/_images/images/logo_capgemini_white.png b/docs/devon4ng/1.0/_images/images/logo_capgemini_white.png new file mode 100644 index 00000000..7e6c447f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/logo_capgemini_white.png differ diff --git a/docs/devon4ng/1.0/_images/images/menu.svg b/docs/devon4ng/1.0/_images/images/menu.svg new file mode 100644 index 00000000..e22f434d --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/menu.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/mesteve.jpg b/docs/devon4ng/1.0/_images/images/mesteve.jpg new file mode 100644 index 00000000..f8a96dd1 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/mesteve.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/mkurz.jpg b/docs/devon4ng/1.0/_images/images/mkurz.jpg new file mode 100644 index 00000000..3571aebe Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/mkurz.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/mobile_icon.png b/docs/devon4ng/1.0/_images/images/mobile_icon.png new file mode 100644 index 00000000..02d47454 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/mobile_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/module-declaration.svg b/docs/devon4ng/1.0/_images/images/module-declaration.svg new file mode 100644 index 00000000..41cd8c07 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/module-declaration.svg @@ -0,0 +1,448 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + AppRoutingModule + + + + + + + + AppModule + + + + + + + + + + + + RouterModule + + + + + + + + forRoot + + + + + + + () + + + + + + + + FeatureModule + + + + + + + + + forChild + + + + + + + () + + + + + + + + FeatureRoutingModule + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/net_icon.png b/docs/devon4ng/1.0/_images/images/net_icon.png new file mode 100644 index 00000000..6e65ecd0 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/net_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/next.scg.svg b/docs/devon4ng/1.0/_images/images/next.scg.svg new file mode 100644 index 00000000..aec8cd2a --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/next.scg.svg @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/ngrx-concept.svg b/docs/devon4ng/1.0/_images/images/ngrx-concept.svg new file mode 100644 index 00000000..adb2887d --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/ngrx-concept.svg @@ -0,0 +1,403 @@ + + + + + + image/svg+xml + + + + + + + + + + + + + Store + + + Component + + + Effect + + + + + + Reducer + + + Services + + dispatches + [Action] + + + + + + + + + + + selects + state + slice + + emits + updates + + Invoke + side + effect + diff --git a/docs/devon4ng/1.0/_images/images/ngrx-devtools.png b/docs/devon4ng/1.0/_images/images/ngrx-devtools.png new file mode 100644 index 00000000..965e1b27 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/ngrx-devtools.png differ diff --git a/docs/devon4ng/1.0/_images/images/nx-cli/create-nx-workspace.png b/docs/devon4ng/1.0/_images/images/nx-cli/create-nx-workspace.png new file mode 100644 index 00000000..cb9044dd Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/nx-cli/create-nx-workspace.png differ diff --git a/docs/devon4ng/1.0/_images/images/nx-cli/nx-workspace-in-vscode.png b/docs/devon4ng/1.0/_images/images/nx-cli/nx-workspace-in-vscode.png new file mode 100644 index 00000000..f42be339 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/nx-cli/nx-workspace-in-vscode.png differ diff --git a/docs/devon4ng/1.0/_images/images/oasp-logo.png b/docs/devon4ng/1.0/_images/images/oasp-logo.png new file mode 100644 index 00000000..5b20ebf4 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/oasp-logo.png differ diff --git a/docs/devon4ng/1.0/_images/images/on_the_flexible_solution.png b/docs/devon4ng/1.0/_images/images/on_the_flexible_solution.png new file mode 100644 index 00000000..e1a29757 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/on_the_flexible_solution.png differ diff --git a/docs/devon4ng/1.0/_images/images/prev.svg b/docs/devon4ng/1.0/_images/images/prev.svg new file mode 100644 index 00000000..cb0d1d41 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/prev.svg @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/project-structure.png b/docs/devon4ng/1.0/_images/images/project-structure.png new file mode 100644 index 00000000..75f2c617 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/project-structure.png differ diff --git a/docs/devon4ng/1.0/_images/images/quality.png b/docs/devon4ng/1.0/_images/images/quality.png new file mode 100644 index 00000000..7a6424a2 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/quality.png differ diff --git a/docs/devon4ng/1.0/_images/images/query_logo.png b/docs/devon4ng/1.0/_images/images/query_logo.png new file mode 100644 index 00000000..a1391f04 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/query_logo.png differ diff --git a/docs/devon4ng/1.0/_images/images/read_icon.png b/docs/devon4ng/1.0/_images/images/read_icon.png new file mode 100644 index 00000000..731650ab Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/read_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/rest-adapter.svg b/docs/devon4ng/1.0/_images/images/rest-adapter.svg new file mode 100644 index 00000000..bb2a7c24 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/rest-adapter.svg @@ -0,0 +1,366 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + client + + + + + + + + Use Case Service + + + + + + + + Adapter + + + + + + + + + HttpClient + + + + + + + + + + server + + + + + + + + + HTTP + + + + + + + Endpoint + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/slider1.jpg b/docs/devon4ng/1.0/_images/images/slider1.jpg new file mode 100644 index 00000000..49d1c706 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/slider1.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/slider2.jpg b/docs/devon4ng/1.0/_images/images/slider2.jpg new file mode 100644 index 00000000..f34ef1fe Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/slider2.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/slider3.jpg b/docs/devon4ng/1.0/_images/images/slider3.jpg new file mode 100644 index 00000000..cabfc561 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/slider3.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/slideshare.png b/docs/devon4ng/1.0/_images/images/slideshare.png new file mode 100644 index 00000000..069568fa Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/slideshare.png differ diff --git a/docs/devon4ng/1.0/_images/images/smart-component-interaction-via-services-layer.svg b/docs/devon4ng/1.0/_images/images/smart-component-interaction-via-services-layer.svg new file mode 100644 index 00000000..636e0028 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/smart-component-interaction-via-services-layer.svg @@ -0,0 +1,724 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Store + + + + + + + + state$: Observable<State> + + + + + + + + + + + + + + + changeState + + + + + + + ( + + + + + + + args + + + + + + + ): void + + + + + + + + Smart + + + + + + + Component A + + + + + + + + Smart + + + + + + + Component B + + + + + + + + Smart + + + + + + + Component + + + + + + + C + + + + + + + + + + + + + + + + + action() + + + + + + + subscribe() + + + + + + + + UseCaseService + + + + + + + + action(): void + + + + + + + + + subscribe() + + + + + + + + + + Services Layer + + + + + + + Components Layer + + + + + diff --git a/docs/devon4ng/1.0/_images/images/smart-dumb-components-interaction.svg b/docs/devon4ng/1.0/_images/images/smart-dumb-components-interaction.svg new file mode 100644 index 00000000..15706ef0 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/smart-dumb-components-interaction.svg @@ -0,0 +1,501 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart + + + + + + + Component + + + + + + + + Dumb + + + + + + + Component A + + + + + + + + Dumb + + + + + + + Component B + + + + + + + + Dumb + + + + + + + Component C + + + + + + + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + + Event Binding + + + + + + + + + + + + + Event Binding + + + + + diff --git a/docs/devon4ng/1.0/_images/images/smart-dumb-components.svg b/docs/devon4ng/1.0/_images/images/smart-dumb-components.svg new file mode 100644 index 00000000..df8809db --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/smart-dumb-components.svg @@ -0,0 +1,887 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart + + + + + + + Component + + + + + + + + Dumb + + + + + + + Component + + + + + + + A + + + + + + + + Dumb + + + + + + + Component + + + + + + + B + + + + + + + + Dumb + + + + + + + Component + + + + + + + C + + + + + + + + + + + + + + Service + + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + + Event Binding + + + + + + + + + + + + + Event Binding + + + + + + + + Store + + + + + + + + subscribe() + + + + + + + action() + + + + + diff --git a/docs/devon4ng/1.0/_images/images/smart-smart-components-example.svg b/docs/devon4ng/1.0/_images/images/smart-smart-components-example.svg new file mode 100644 index 00000000..dacb06d9 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/smart-smart-components-example.svg @@ -0,0 +1,1456 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + FlightSearchStore + + + + + + + + state$: Observable<State> + + + + + + + + + + + + + + + setFlights + + + + + + + ( + + + + + + + Flug + + + + + + + []): void + + + + + + + + + + + + + + + clearFlights + + + + + + + (): void + + + + + + + + + + + + + + + setLoadingFlights + + + + + + + ( + + + + + + + boolean + + + + + + + ): void + + + + + + + + FlightSearchComponent + + + + + + + + FlightDetailsComponent + + + + + + + + + + + + + + + + + subscribe() + + + + + + + + FlightSearchService + + + + + + + + + + + + + + + loadFlights + + + + + + + (): void + + + + + + + + + FlightSearchState + + + + + + + + + + + + + + + isLoadingFlights + + + + + + + : + + + + + + + boolean + + + + + + + + flights: + + + + + + + Flug + + + + + + + [] + + + + + + + + c + + + + + + + riteria: + + + + + + + FlightSearchCriteria + + + + + + + + + + + loadFlights + + + + + + + () + + + + + + + + FlightSearchAdapter + + + + + + + + + + + + + + + getFlights + + + + + + + (): + + + + + + + Observable<Flight[]> + + + + + + + + + HttpClient + + + + + + + + + + + + + + + get + + + + + + + <T> + + + + + + + (): Observable<T> + + + + + + + + subscribe() + + + + + + + + FlightSearchCriteria + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/smart-smart-components.svg b/docs/devon4ng/1.0/_images/images/smart-smart-components.svg new file mode 100644 index 00000000..b4fc8369 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/smart-smart-components.svg @@ -0,0 +1,794 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Store + + + + + + + + state$: Observable<State> + + + + + + + + + + + + + + + changeState + + + + + + + ( + + + + + + + args + + + + + + + ): + + + + + + + void + + + + + + + + Smart + + + + + + + Component + + + + + + + A + + + + + + + + Smart + + + + + + + Component + + + + + + + B + + + + + + + + Smart + + + + + + + Component + + + + + + + C + + + + + + + + + + + + + + + + + action() + + + + + + + subscribe() + + + + + + + + UseCaseService + + + + + + + + + + + + + + + action(): + + + + + + + void + + + + + + + + + subscribe() + + + + + diff --git a/docs/devon4ng/1.0/_images/images/src/ngrx-concept.pptx b/docs/devon4ng/1.0/_images/images/src/ngrx-concept.pptx new file mode 100644 index 00000000..219d5cd5 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/src/ngrx-concept.pptx differ diff --git a/docs/devon4ng/1.0/_images/images/testimonials.png b/docs/devon4ng/1.0/_images/images/testimonials.png new file mode 100644 index 00000000..9835e68f Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/testimonials.png differ diff --git a/docs/devon4ng/1.0/_images/images/testing-areas.svg b/docs/devon4ng/1.0/_images/images/testing-areas.svg new file mode 100644 index 00000000..45b461ed --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/testing-areas.svg @@ -0,0 +1,1161 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Store + + + + + + + + + + + + + Smart + + + + + + + + Dumb + + + + + + + + Dumb + + + + + + + + Dumb + + + + + + + + Dumb + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dumb + + + + + + + + + + + + + Dumb + + + + + + + + + + + + + Use + + + + + + + Case + + + + + + + Service + + + + + + + + + + Adapter + + + + + + + + + Service + + + + + + + + + + + + + + + + + View + + + + + + + l + + + + + + + ogic + + + + + + + in + + + + + + + Smart + + + + + + + Components + + + + + + + (1) + + + + + + + State + + + + + + + t + + + + + + + ransitions + + + + + + + in Stores + + + + + + + (2) + + + + + + + Business + + + + + + + logic + + + + + + + in + + + + + + + S + + + + + + + ervices + + + + + + + (3) + + + + + diff --git a/docs/devon4ng/1.0/_images/images/triggering-navigation.svg b/docs/devon4ng/1.0/_images/images/triggering-navigation.svg new file mode 100644 index 00000000..f54fd123 --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/triggering-navigation.svg @@ -0,0 +1,422 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart Component + + + + + + + + Dumb Component A + + + + + + + + Dumb Component C + + + + + + + + navigationButtonClick + + + + + + + Event + + + + + + + + navigationButtonClick + + + + + + + Event + + + + + + + User clicks button to + + + + + + + trigger navigation + + + + + + + + Router + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/twitter.png b/docs/devon4ng/1.0/_images/images/twitter.png new file mode 100644 index 00000000..846ef2e2 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/twitter.png differ diff --git a/docs/devon4ng/1.0/_images/images/university.png b/docs/devon4ng/1.0/_images/images/university.png new file mode 100644 index 00000000..e3ebe33c Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/university.png differ diff --git a/docs/devon4ng/1.0/_images/images/use-case-service.svg b/docs/devon4ng/1.0/_images/images/use-case-service.svg new file mode 100644 index 00000000..cfabc02a --- /dev/null +++ b/docs/devon4ng/1.0/_images/images/use-case-service.svg @@ -0,0 +1,319 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + UseCaseService + + + + + + + + action(): void + + + + + + + + Store + + + + + + + + Adapter + + + + + + + + Business + + + + + + + Service + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devon4ng/1.0/_images/images/used-technologies.jpg b/docs/devon4ng/1.0/_images/images/used-technologies.jpg new file mode 100644 index 00000000..f79fe526 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/used-technologies.jpg differ diff --git a/docs/devon4ng/1.0/_images/images/view_icon.png b/docs/devon4ng/1.0/_images/images/view_icon.png new file mode 100644 index 00000000..51257d45 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/view_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/web_icon.png b/docs/devon4ng/1.0/_images/images/web_icon.png new file mode 100644 index 00000000..0afc937e Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/web_icon.png differ diff --git a/docs/devon4ng/1.0/_images/images/youtube.png b/docs/devon4ng/1.0/_images/images/youtube.png new file mode 100644 index 00000000..b5eb06a6 Binary files /dev/null and b/docs/devon4ng/1.0/_images/images/youtube.png differ diff --git a/docs/devon4ng/1.0/architecture.html b/docs/devon4ng/1.0/architecture.html new file mode 100644 index 00000000..d706ae84 --- /dev/null +++ b/docs/devon4ng/1.0/architecture.html @@ -0,0 +1,386 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Architecture

+
+
+

The following principles and guidelines are based on Angular Style Guide - especially Angular modules (see Angular Docs). +It extends those where additional guidance is needed to define an architecture which is:

+
+
+
    +
  • +

    maintainable across applications and teams

    +
  • +
  • +

    easy to understand, especially when coming from a classic Java/.Net perspective - so whenever possible the same principles apply both to the server and the client

    +
  • +
  • +

    pattern based to solve common problems

    +
  • +
  • +

    based on best of breed solutions coming from open source and Capgemini project experiences

    +
  • +
  • +

    gives as much guidance as necessary and as little as possible

    +
  • +
+
+
+
+
+

Overview

+
+
+

When using Angular the web client architecture is driven by the framework in a certain way Google and the Angular community think about web client architecture. +Angular gives an opinion on how to look at architecture. +It is a component based like devon4j but uses different terms which are common language in web application development. +The important term is module which is used instead of component. The primary reason is the naming collision with the Web Components standard (see Web Components).
+To clarify this:

+
+
+
    +
  • +

    A component describes an UI element containing HTML, CSS and JavaScript - structure, design and logic encapsulated inside a reusable container called component.

    +
  • +
  • +

    A module describes an applications feature area. The application flight-app may have a module called booking.

    +
  • +
+
+
+

An application developed using Angular consists of multiple modules. +There are feature modules and special modules described by the Angular Style Guide - core and shared. +Angular or Angular Style Guide give no guidance on how to structure a module internally. +This is where this architecture comes in.

+
+
+
+
+

Layers

+
+
+

The architecture describes two layers. The terminology is based on common language in web development.

+
+
+
+Architecture - Layers +
+
Figure 1. Layers
+
+
+
    +
  • +

    Components Layer encapsulates components which present the current application state. +Components are separated into Smart and Dumb Components. +The only logic present is view logic inside Smart Components.

    +
  • +
  • +

    Services Layer is more or less what we call 'business logic layer' on the server side. +The layer defines the applications state, the transitions between state and classic business logic. +Stores contain application state over time to which Smart Components subscribe to. +Adapters are used to perform XHR, WebSocket connections, etc. +The business model is described inside the module. +Use case services perform business logic needed for use cases. +A use case services interacts with the store and adapters. +Methods of use case services are the API for Smart Components. +Those methods are Actions in reactive terminology.

    +
  • +
+
+
+
+
+

Modules

+
+
+

Angular requires a module called app which is the main entrance to an application at runtime - this module gets bootstrapped. +Angular Style Guide defines feature modules and two special modules - core and shared.

+
+
+
+Architecture - Modules +
+
Figure 2. Modules
+
+
+

A feature module is basically a vertical cut through both layers. +The shared module consists of components shared across feature modules. +The core module holds services shared across modules. +So core module is a module only having a services layer +and shared module is a module only having a components layer.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/components-layer.html b/docs/devon4ng/1.0/components-layer.html new file mode 100644 index 00000000..ebfc5366 --- /dev/null +++ b/docs/devon4ng/1.0/components-layer.html @@ -0,0 +1,470 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Components Layer

+
+
+

The components layer encapsulates all components presenting the current application view state, which means data to be shown to the user. +The term component refers to a component described by the standard Web Components. +So this layer has all Angular components, directives and pipes defined for an application. +The main challenges are:

+
+
+
    +
  • +

    how to structure the components layer (see File Structure Guide)

    +
  • +
  • +

    decompose components into maintainable chunks (see Component Decomposition Guide)

    +
  • +
  • +

    handle component interaction

    +
  • +
  • +

    manage calls to the services layer

    +
  • +
  • +

    apply a maintainable data and event flow throughout the component tree

    +
  • +
+
+
+
+
+

Smart and Dumb Components

+
+
+

The architecture applies the concept of Smart and Dumb Components (syn. Containers and Presenters). +The concept means that components are divided into Smart and Dumb Components.

+
+
+

A Smart Component typically is a top-level dialog inside the component tree.

+
+
+
    +
  • +

    a component, that can be routed to

    +
  • +
  • +

    a modal dialog

    +
  • +
  • +

    a component, which is placed inside AppComponent

    +
  • +
+
+
+

A Dumb Component can be used by one to many Smart Components. +Inside the component tree a Dumb Component is a child of a Smart Component.

+
+
+
+Component Tree +
+
Figure 1. Component tree example
+
+
+

As shown the topmost component is always the AppComponent in Angular applications. +The component tree describes the hierarchy of components starting from AppComponent. +The figure shows Smart Components in blue and Dumb Components in green. +AppComponent is a Smart Component by definition. +Inside the template of AppComponent placed components are static components inside the component tree. +So they are always displayed. +In the example OverviewComponent and DetailsComponent are rendered by Angular compiler depending on current URL the application displays. +So OverviewComponents sub-tree is displayed if the URL is /overview and DetailsComponents sub-tree is displayed if the URL is /details. +To clarify this distinction further the following table shows the main differences.

+
+
+
Smart vs Dumb Components
+

|== = +|Smart Components |Dumb Components

+
+
+

|contain the current view state +|show data via binding (@Input) and contain no view state

+
+
+

|handle events emitted by Dumb Components +|pass events up the component tree to be handled by Smart Components (@Output)

+
+
+

|call the services layer +|never call the services layer

+
+
+

|use services +|do not use services

+
+
+

|consists of n Dumb Components +|is independent of Smart Components +|== =

+
+
+
+
+

Interaction of Smart and Dumb Components

+
+
+

With the usage of the Smart and Dumb Components pattern one of the most important part is component interaction. +Angular comes with built in support for component interaction with @Input() and @Output() Decorators. +The following figure illustrates an unidirectional data flow.

+
+
+
    +
  • +

    Data always goes down the component tree - from a Smart Component down its children.

    +
  • +
  • +

    Events bubble up, to be handled by a Smart Component.

    +
  • +
+
+
+
+Smart and Dumb Components Interaction +
+
Figure 2. Smart and Dumb Component Interaction
+
+
+

As shown a Dumb Components role is to define a signature by declaring Input and Output Bindings.

+
+
+
    +
  • +

    @Input() defines what data is necessary for that component to work

    +
  • +
  • +

    @Output() defines which events can be listened on by the parent component

    +
  • +
+
+
+
Listing 1. Dumb Components define a signature
+
+
export class ValuePickerComponent {
+
+  @Input() columns: string[];
+  @Input() items: {}[];
+  @Input() selected: {};
+  @Input() filter: string;
+  @Input() isChunked = false;
+  @Input() showInput = true;
+  @Input() showDropdownHeader = true;
+
+  @Output() elementSelected = new EventEmitter<{}>();
+  @Output() filterChanged = new EventEmitter<string>();
+  @Output() loadNextChunk = new EventEmitter();
+  @Output() escapeKeyPressed = new EventEmitter();
+
+}
+
+
+
+

The example shows the Dumb Component ValuePickerComponent. +It describes seven input bindings with isChunked, showHeader and showDropdownHeader being non mandatory as they have a default value. +Four output bindings are present. Typically, a Dumb Component has very little code to no code inside the TypeScript class.

+
+
+
Listing 2. Smart Components use the Dumb Components signature inside the template
+
+
<div>
+
+  <value-input
+    ...>
+  </value-input>
+
+  <value-picker
+    *ngIf="isValuePickerOpen"
+    [columns]="columns"
+    [items]="filteredItems"
+    [isChunked]="isChunked"
+    [filter]="filter"
+    [selected]="selectedItem"
+    [showDropdownHeader]="showDropdownHeader"
+    (loadNextChunk)="onLoadNextChunk()"
+    (elementSelected)="onElementSelected($event)"
+    (filterChanged)="onFilterChanged($event)"
+    (escapeKeyPressed)="onEscapePressedInsideChildTable()">
+  </value-picker>
+
+</div>
+
+
+
+

Inside the Smart Components template the events emitted by Dumb Components are handled. +It is a good practice to name the handlers with the prefix on* (e.g. onInputChanged()).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/cookbook-abstract-class-store.html b/docs/devon4ng/1.0/cookbook-abstract-class-store.html new file mode 100644 index 00000000..4f77cf29 --- /dev/null +++ b/docs/devon4ng/1.0/cookbook-abstract-class-store.html @@ -0,0 +1,402 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Abstract Class Store

+
+
+

The following solution presents a base class for implementing stores which handle state and its transitions. +Working with the base class achieves:

+
+
+
    +
  • +

    common API across all stores

    +
  • +
  • +

    logging (when activated in the constructor)

    +
  • +
  • +

    state transitions are asynchronous by design - sequential order problems are avoided

    +
  • +
+
+
+
Listing 1. Usage Example
+
+
@Injectable()
+export class ModalStore extends Store<ModalState> {
+
+  constructor() {
+    super({ isOpen: false }, !environment.production);
+  }
+
+  closeDialog() {
+    this.dispatchAction('Close Dialog', (currentState) => ({...currentState, isOpen: false}));
+  }
+
+  openDialog() {
+    this.dispatchAction('Open Dialog', (currentState) => ({...currentState, isOpen: true}));
+  }
+
+}
+
+
+
+
Listing 2. Abstract Base Class Store
+
+
import { OnDestroy } from '@angular/core';
+import { BehaviorSubject } from 'rxjs/BehaviorSubject';
+import { Observable } from 'rxjs/Observable';
+import { intersection, difference } from 'lodash';
+import { map, distinctUntilChanged, observeOn } from 'rxjs/operators';
+import { Subject } from 'rxjs/Subject';
+import { queue } from 'rxjs/scheduler/queue';
+import { Subscription } from 'rxjs/Subscription';
+
+interface Action<T> {
+  name: string;
+  actionFn: (state: T) => T;
+}
+
+/** Base class for implementing stores. */
+export abstract class Store<T> implements OnDestroy {
+
+  private actionSubscription: Subscription;
+  private actionSource: Subject<Action<T>>;
+  private stateSource: BehaviorSubject<T>;
+  state$: Observable<T>;
+
+  /**
+   * Initializes a store with initial state and logging.
+   * @param initialState Initial state
+   * @param logChanges When true state transitions are logged to the console.
+   */
+  constructor(initialState: T, public logChanges = false) {
+    this.stateSource = new BehaviorSubject<T>(initialState);
+    this.state$ = this.stateSource.asObservable();
+    this.actionSource = new Subject<Action<T>>();
+
+    this.actionSubscription = this.actionSource.pipe(observeOn(queue)).subscribe(action => {
+      const currentState = this.stateSource.getValue();
+      const nextState = action.actionFn(currentState);
+
+      if (this.logChanges) {
+        this.log(action.name, currentState, nextState);
+      }
+
+      this.stateSource.next(nextState);
+    });
+  }
+
+  /**
+   * Selects a property from the stores state.
+   * Will do distinctUntilChanged() and map() with the given selector.
+   * @param selector Selector function which selects the needed property from the state.
+   * @returns Observable of return type from selector function.
+   */
+  select<TX>(selector: (state: T) => TX): Observable<TX> {
+    return this.state$.pipe(
+      map(selector),
+      distinctUntilChanged()
+    );
+  }
+
+  protected dispatchAction(name: string, action: (state: T) => T) {
+    this.actionSource.next({ name, actionFn: action });
+  }
+
+  private log(actionName: string, before: T, after: T) {
+    const result: { [key: string]: { from: any, to: any} } = {};
+    const sameProbs = intersection(Object.keys(after), Object.keys(before));
+    const newProbs = difference(Object.keys(after), Object.keys(before));
+    for (const prop of newProbs) {
+      result[prop] = { from: undefined, to: (<any>after)[prop] };
+    }
+
+    for (const prop of sameProbs) {
+      if ((<any>before)[prop] !==  (<any>after)[prop]) {
+        result[prop] = { from: (<any>before)[prop], to: (<any>after)[prop] };
+      }
+    }
+
+    console.log(this.constructor.name, actionName, result);
+  }
+
+  ngOnDestroy() {
+    this.actionSubscription.unsubscribe();
+  }
+
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-accessibility.html b/docs/devon4ng/1.0/guide-accessibility.html new file mode 100644 index 00000000..33fbe4fb --- /dev/null +++ b/docs/devon4ng/1.0/guide-accessibility.html @@ -0,0 +1,660 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Accessibility

+
+
+

Multiple studies suggest that around 15-20% of the population are living with a disability of some kind. In comparison, that number is higher than any single browser demographic currently, other than Chrome2. Not considering those users when developing an application means excluding a large number of people from being able to use it comfortable or at all.

+
+
+

Some people are unable to use the mouse, view a screen, see low contrast text, Hear dialogue or music and some people having difficulty to understanding the complex language.This kind of people needed the support like Keyboard support, screen reader support, high contrast text, captions and transcripts and Plain language support. This disability may change the from permanent to the situation.

+
+
+
+
+

Key Concerns of Accessible Web Applications

+
+
+
    +
  • +

    Semantic Markup - Allows the application to be understood on a more general level rather than just details of whats being rendered

    +
  • +
  • +

    Keyboard Accessibility - Applications must still be usable when using only a keyboard

    +
  • +
  • +

    Visual Assistance - color contrast, focus of elements and text representations of audio and events

    +
  • +
+
+
+
+
+

Semantic Markup

+
+
+

If you’re creating custom element directives, Web Components or HTML in general, use native elements wherever possible to utilize built-in events and properties. Alternatively, use ARIA to communicate semantic meaning.

+
+
+

HTML tags have attributes that providers extra context on what’s being displayed on the browser. For example, the <img> tag’s alt attribute lets the reader know what is being shown using a short description.However, native tags don’t cover all cases. This is where ARIA fits in. ARIA attributes can provide context on what roles specific elements have in the application or on how elements within the document relate to each other.

+
+
+

A modal component can be given the role of dialog or alertdialog to let the browser know that that component is acting as a modal. The modal component template can use the ARIA attributes aria-labelledby and aria-described to describe to readers what the title and purpose of the modal is.

+
+
+
+
@Component({
+    selector: 'ngc2-app',
+    template: `
+      <ngc2-notification-button
+        message="Hello!"
+        label="Greeting"
+        role="button">
+      </ngc2-notification-button>
+      <ngc2-modal
+        [title]="modal.title"
+        [description]="modal.description"
+        [visible]="modal.visible"
+        (close)="modal.close()">
+      </ngc2-modal>
+    `
+})
+export class AppComponent {
+  constructor(private modal: ModalService) { }
+}
+
+
+
+

notification-button.component.ts

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `
+})
+export class ModalComponent {
+  ...
+}
+
+
+
+
+
+

Keyboard Accessibility

+
+
+

Keyboard accessibility is the ability of your application to be interacted with using just a keyboard. The more streamlined the site can be used this way, the more keyboard accessible it is. Keyboard accessibility is one of the largest aspects of web accessibility since it targets:

+
+
+
    +
  • +

    those with motor disabilities who can’t use a mouse

    +
  • +
  • +

    users who rely on screen readers and other assistive technology, which require keyboard navigation

    +
  • +
  • +

    those who prefer not to use a mouse

    +
  • +
+
+
+
+
+

== Focus

+
+
+

Keyboard interaction is driven by something called focus. In web applications, only one element on a document has focus at a time, and keypress will activate whatever function is bound to that element. +Focus element border can be styled with CSS using the outline property, but it should not be removed. Elements can also be styled using the :focus psuedo-selector.

+
+
+
+
+

== Tabbing

+
+
+

The most common way of moving focus along the page is through the tab key. Elements will be traversed in the order they appear in the document outline - so that order must be carefully considered during development. +There is way change the default behavior or tab order. This can be done through the tabindex attribute. The tabindex can be given the values: +* less than zero - to let readers know that an element should be focusable but not keyboard accessible +* 0 - to let readers know that that element should be accessible by keyboard +* greater than zero - to let readers know the order in which the focusable element should be reached using the keyboard. Order is calculated from lowest to highest.

+
+
+
+
+

== Transitions

+
+
+

The majority of transitions that happen in an Angular application will not involve a page reload. This means that developers will need to carefully manage what happens to focus in these cases.

+
+
+

For example:

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `,
+})
+export class ModalComponent {
+  constructor(private modal: ModalService, private element: ElementRef) { }
+
+  ngOnInit() {
+    this.modal.visible$.subscribe(visible => {
+      if(visible) {
+        setTimeout(() => {
+          this.element.nativeElement.querySelector('button').focus();
+        }, 0);
+      }
+    })
+  }
+}
+
+
+
+
+
+

Visual Assistance

+
+
+

One large category of disability is visual impairment. This includes not just the blind, but those who are color blind or partially sighted, and require some additional consideration.

+
+
+
+
+

Color Contrast

+
+
+

When choosing colors for text or elements on a website, the contrast between them needs to be considered. For WCAG 2.0 AA, this means that the contrast ratio for text or visual representations of text needs to be at least 4.5:1. There are tools online to measure the contrast ratio such as this color contrast checker from WebAIM or be checked with using automation tests.

+
+
+
+
+

Visual Information

+
+
+

Color can help a user’s understanding of information, but it should never be the only way to convey information to a user. For example, a user with red/green color-blindness may have trouble discerning at a glance if an alert is informing them of success or failure.

+
+
+
+
+

Audiovisual Media

+
+
+

Audiovisual elements in the application such as video, sound effects or audio (that is, podcasts) need related textual representations such as transcripts, captions or descriptions. They also should never auto-play and playback controls should be provided to the user.

+
+
+
+
+

Accessibility with Angular Material

+
+
+

The a11y package provides a number of tools to improve accessibility. Import

+
+
+
+
import { A11yModule } from '@angular/cdk/a11y';
+
+
+
+
+
+

ListKeyManager

+
+
+

ListKeyManager manages the active option in a list of items based on keyboard interaction. Intended to be used with components that correspond to a role="menu" or role="listbox" pattern . Any component that uses a ListKeyManager will generally do three things:

+
+
+
    +
  • +

    Create a @ViewChildren query for the options being managed.

    +
  • +
  • +

    Initialize the ListKeyManager, passing in the options.

    +
  • +
  • +

    Forward keyboard events from the managed component to the ListKeyManager.

    +
  • +
+
+
+

Each option should implement the ListKeyManagerOption interface:

+
+
+
+
interface ListKeyManagerOption {
+  disabled?: boolean;
+  getLabel?(): string;
+}
+
+
+
+
+
+

== Types of ListKeyManager

+
+
+

There are two varieties of ListKeyManager, FocusKeyManager and ActiveDescendantKeyManager.

+
+
+
+
+

FocusKeyManager

+
+
+

Used when options will directly receive browser focus. Each item managed must implement the FocusableOption interface:

+
+
+
+
interface FocusableOption extends ListKeyManagerOption {
+  focus(): void;
+}
+
+
+
+
+
+

ActiveDescendantKeyManager

+
+
+

Used when options will be marked as active via aria-activedescendant. Each item managed must implement the Highlightable interface:

+
+
+
+
interface Highlightable extends ListKeyManagerOption {
+  setActiveStyles(): void;
+  setInactiveStyles(): void;
+}
+
+
+
+

Each item must also have an ID bound to the listbox’s or menu’s aria-activedescendant.

+
+
+
+
+

FocusTrap

+
+
+

The cdkTrapFocus directive traps Tab key focus within an element. This is intended to be used to create accessible experience for components like modal dialogs, where focus must be constrained. This directive is declared in A11yModule.

+
+
+

This directive will not prevent focus from moving out of the trapped region due to mouse interaction.

+
+
+

For example:

+
+
+
+
<div class="my-inner-dialog-content" cdkTrapFocus>
+  <!-- Tab and Shift + Tab will not leave this element. -->
+</div>
+
+
+
+
+
+

Regions

+
+
+

Regions can be declared explicitly with an initial focus element by using the cdkFocusRegionStart, cdkFocusRegionEnd and cdkFocusInitial DOM attributes. When using the tab key, focus will move through this region and wrap around on either end.

+
+
+

For example:

+
+
+
+
<a mat-list-item routerLink cdkFocusRegionStart>Focus region start</a>
+<a mat-list-item routerLink>Link</a>
+<a mat-list-item routerLink cdkFocusInitial>Initially focused</a>
+<a mat-list-item routerLink cdkFocusRegionEnd>Focus region end</a>
+
+
+
+
+
+

InteractivityChecker

+
+
+

InteractivityChecker is used to check the interactivity of an element, capturing disabled, visible, tabbable, and focusable states for accessibility purposes.

+
+
+
+
+

LiveAnnouncer

+
+
+

LiveAnnouncer is used to announce messages for screen-reader users using an aria-live region.

+
+
+

For example:

+
+
+
+
@Component({...})
+export class MyComponent {
+
+ constructor(liveAnnouncer: LiveAnnouncer) {
+   liveAnnouncer.announce("Hey Google");
+ }
+}
+
+
+
+
+
+

API reference for Angular CDK a11y

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-add-electron.html b/docs/devon4ng/1.0/guide-add-electron.html new file mode 100644 index 00000000..2f9e197e --- /dev/null +++ b/docs/devon4ng/1.0/guide-add-electron.html @@ -0,0 +1,848 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Add Electron to an Angular application using Angular CLI

+
+
+

This cookbook recipe explains how to integrate Electron in an Angular 10+ application. Electron is a framework for creating native applications with web technologies like JavaScript, HTML, and CSS. As an example, very well known applications as Visual Studio Code, Atom, Slack or Skype (and many more) are using Electron too.

+
+
+ + + + + +
+ + +At the moment of this writing Angular 11.2.0, Electron 11.2.3 and Electron-builder 22.9.1 were the versions available. +
+
+
+

Here are the steps to achieve this goal. Follow them in order.

+
+
+
+
+

Add Electron and other relevant dependencies

+
+
+

There are two different approaches to add the dependencies in the package.json file:

+
+
+
    +
  • +

    Writing the dependencies directly in that file.

    +
  • +
  • +

    Installing using npm install or yarn add.

    +
  • +
+
+
+ + + + + +
+ + +Please remember if the project has a package-lock.json or yarn.lock file use npm or yarn respectively. +
+
+
+

In order to add the dependencies directly in the package.json file, include the following lines in the devDependencies section:

+
+
+
+
"devDependencies": {
+...
+    "electron": "^11.2.3",
+    "electron-builder": "^22.9.1",
+...
+},
+
+
+
+

As indicated above, instead of this npm install can be used:

+
+
+
+
$ npm install -D electron electron-builder
+
+
+
+

Or with yarn:

+
+
+
+
$ yarn add -D electron electron-builder
+
+
+
+
+
+

Create the necessary typescript configurations

+
+
+

In order to initiate electron in an angular app we need to modify the tsconfig.json file and create a tsconfig.serve.json and a tsconfig.base.json in the root folder.

+
+
+
+
+

== tsconfig.json

+
+
+

This file needs to be modified to create references to ./src/tsconfig.app.json and ./src/tsconfig.spec.json to support different configurations.

+
+
+
+
{
+  "files": [],
+  "references": [
+    {
+      "path": "./src/tsconfig.app.json"
+    },
+    {
+      "path": "./src/tsconfig.spec.json"
+    }
+  ]
+}
+
+
+
+
+
+

== tsconfig.app.json

+
+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../app",
+    "module": "es2015",
+    "baseUrl": "",
+    "types": []
+  },
+  "include": [
+    "**/*.ts",
+  ],
+  "exclude": [
+    "**/*.spec.ts"
+  ],
+  "angularCompilerOptions": {
+    "fullTemplateTypeCheck": true,
+    "strictInjectionParameters": true,
+    "preserveWhitespaces": true
+  }
+}
+
+
+
+
+
+

== tsconfig.spec.json

+
+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../spec",
+    "module": "commonjs",
+    "types": [
+      "jasmine",
+      "node"
+    ]
+  },
+  "files": [
+    "test.ts",
+  ],
+  "include": [
+    "**/*.spec.ts",
+    "**/*.d.ts"
+  ],
+  "exclude": [
+    "dist",
+    "release",
+    "node_modules"
+  ]
+}
+
+
+
+
+
+

== tsconfig.base.json

+
+
+

This is shared between tsconfig.app.json and tsconfig.spec.json and it will be extended on each config file.

+
+
+
+
{
+  "compileOnSave": false,
+  "compilerOptions": {
+    "outDir": "./dist",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "es2016",
+      "es2015",
+      "dom"
+    ]
+  },
+  "files": [
+    "electron-main.ts"
+    "src/polyfills.ts"
+  ],
+  "include": [
+    "src/**/*.d.ts"
+  ],
+  "exclude": [
+    "node_modules"
+  ]
+}
+
+
+
+
+
+

== tsconfig.serve.json

+
+
+

In the root, tsconfig.serve.json needs to be created. This typescript config file is going to be used when we serve electron:

+
+
+
+
{
+  "compilerOptions": {
+    "outDir": ".",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "dom"
+    ]
+  },
+  "include": [
+    "electron-main.ts"
+  ],
+  "exclude": [
+    "node_modules",
+    "**/*.spec.ts"
+  ]
+}
+
+
+
+
+
+

Add Electron build configuration

+
+
+

In order to configure electron builds properly we need to create a new json on our application, let’s call it electron-builder.json. For more information and fine tuning please refer to the Electron Builder official documentation.

+
+
+

The contents of the file will be something similar to the following:

+
+
+
+
{
+  "productName": "devon4ngElectron",
+  "directories":{
+    "output": "./builder-release"
+  },
+  "win": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "portable"
+    ]
+  },
+  "mac": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "dmg"
+    ]
+  },
+  "linux": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "AppImage"
+    ]
+  }
+}
+
+
+
+

There are two important things in this files:

+
+
+
    +
  1. +

    "output": this is where electron builder is going to build our application

    +
  2. +
  3. +

    "icon": in every OS possible there is an icon parameter, the route to the icon folder that will be created after building with angular needs to be used here. This will make it so the electron builder can find the icons and build.

    +
  4. +
+
+
+
+
+

Modify angular.json

+
+
+

angular.json has to to be modified so the project is build inside /dist without an intermediate folder.

+
+
+
+
{
+  "architect": {
+    "build": {
+      "outputPath": "dist"
+    }
+  }
+}
+
+
+
+
+
+

Create the electron window in electron-main.ts

+
+
+

In order to use electron, a file needs to be created at the root of the application (main.ts). This file will create a window with different settings checking if we are using --serve as an argument:

+
+
+
+
import { app, BrowserWindow } from 'electron';
+import * as path from 'path';
+import * as url from 'url';
+
+let win: any;
+const args: any = process.argv.slice(1);
+const serve: any = args.some((val) => val == '--serve');
+
+const createWindow:any = ()=>{
+  // Create the browser window.
+  win = new BrowserWindow({
+    fullscreen: true,
+    webPreferences: {
+      nodeIntegration: true,
+    }
+  });
+
+  if (serve) {
+    require('electron-reload')(__dirname, {
+      electron: require(`${__dirname}/node_modules/electron`)
+    });
+    win.loadURL('http://localhost:4200');
+  } else {
+    win.loadURL(
+      url.format({
+        pathname: path.join(__dirname, 'dist/index.html'),
+        protocol: 'file:',
+        slashes: true
+      })
+    );
+  }
+
+  if (serve) {
+    win.webContents.openDevTools();
+  }
+
+  // Emitted when the window is closed.
+  win.on('closed', () => {
+    // Dereference the window object, usually you would store window
+    // in an array if your app supports multi windows, this is the time
+    // when you should delete the corresponding element.
+    // tslint:disable-next-line:no-null-keyword
+    win = null;
+  });
+}
+
+try {
+  // This method will be called when Electron has finished
+  // initialization and is ready to create browser windows.
+  // Some APIs can only be used after this event occurs.
+  app.on('ready', createWindow);
+
+   // Quit when all windows are closed.
+  app.on('window-all-closed', () => {
+    // On OS X it is common for applications and their menu bar
+    // to stay active until the user quits explicitly with Cmd + Q
+    if (process.platform !==  'darwin') {
+      app.quit();
+    }
+  });
+
+   app.on('activate', () => {
+    // On OS X it's common to re-create a window in the app when the
+    // dock icon is clicked and there are no other windows open.
+    if (win == null) {
+      createWindow();
+    }
+  });
+} catch (e) {
+  // Catch Error
+  // throw e;
+}
+
+
+
+
+
+

Add the electron window and improve the package.json scripts

+
+
+

Inside package.json the electron window that will be transformed to electron-main.js when building needs to be added.

+
+
+
+
{
+  ....
+  "main": "electron-main.js",
+  "scripts": {...}
+  ....
+}
+
+
+
+

The scripts section in the package.json can be improved to avoid running too verbose commands. As a very complete example we can take a look to the My Thai Star’s scripts section and copy the lines useful in your project. In any case, at least we recommend to add the following lines:

+
+
+
+
  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e",
+    "electron:tsc": "tsc -p tsconfig.serve.json",
+    "electron:run": "npm run electron:tsc && ng build --base-href ./ && npx electron .",
+    "electron:serve": "npm run electron:tsc && npx electron . --serve",
+    "electron:pack": "npm run electron:tsc && electron-builder --dir --config electron-builder.json",
+    "electron:build": "npm run electron:tsc && electron-builder --config electron-builder.json build"
+  },
+
+
+
+

The electron: scripts do the following:

+
+
+
    +
  • +

    electron:tsc: Compiles electron TS files.

    +
  • +
  • +

    electron:run: Serves Angular app and runs electron.

    +
  • +
  • +

    electron:serve: Serves electron with an already running angular app (i.e. a ng serve command running on another terminal).

    +
  • +
  • +

    electron:pack: Packs electron app.

    +
  • +
  • +

    electron:build: Builds electron app.

    +
  • +
+
+
+
+
+

Add Electron to an Angular application using Nx CLI

+
+
+

Creating an Electron app is very easy and straight-forward if you are using Nx CLI. As a pre-requisite, you should already have an application in your Nx workspace which you want to run as a front-end in your Electron app. (You can follow this guide if you want to get started with Nx).

+
+
+

Follow the steps below to develop an Electron app in your Nx workspace:

+
+
+
+
+

Install nx-electron

+
+
+

Install nx-electron using the command:

+
+
+
+
  npm install -D nx-electron
+
+
+
+

This will add the packages electron and nx-electron as dev dependencies to your Nx workspace. This will help us generate our Electron app in the next step.

+
+
+
+
+

Generate your Electron app

+
+
+

Once you have installed nx-electron, you can generate your electron app using the command:

+
+
+
+
  nx g nx-electron:app <electron-app-name> --frontendProject=<frontend-app-name>
+
+
+
+

And that is it! You have generated your Electron app already. All the configuration files (tsconfig.*) are generated for you under <electron-app-name> in your Nx workspace.

+
+
+
+
+

Serving your app

+
+
+

You can use this command to serve your Electron app:

+
+
+
+
  nx run-many --target=serve --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+

If you see a blank application, it is because the Electron app was served before the front-end was served. To avoid this, you can serve the front-end and back-end separately, (that is, serve the back-end only after the front-end is served).

+
+
+
+
+

Building your app

+
+
+

The command for building your Electron app in Nx is similar to the serve command above, you only change the target from serve to build:

+
+
+
+
  nx run-many --target=build --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+
+
+

Packaging your app

+
+
+

Make sure you have build your app before you try to package it using the following command:

+
+
+
+
  nx run <electron-app-name>:package [--options]
+
+
+
+

The options that can be passed can be found here.

+
+
+

You can find a working example of an Electron app in devon4ts-samples.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-angular-elements.html b/docs/devon4ng/1.0/guide-angular-elements.html new file mode 100644 index 00000000..060bbf1c --- /dev/null +++ b/docs/devon4ng/1.0/guide-angular-elements.html @@ -0,0 +1,949 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Elements

+
+ +
+
+
+

What are Angular Elements?

+
+
+

Angular elements are Angular components packaged as custom elements, a web standard for defining new HTML elements in a framework-agnostic way.

+
+
+

Custom elements are a Web Platform feature currently supported by Chrome, Firefox, Opera, and Safari, and available in other browsers through Polyfills. A custom element extends HTML by allowing you to define a tag whose content is created and controlled by JavaScript code. The browser maintains a CustomElementRegistry of defined custom elements (also called Web Components), which maps an instantiable JavaScript class to an HTML tag.

+
+
+
+
+

Why use Angular Elements?

+
+
+

Angular Elements allows Angular to work with different frameworks by using input and output elements. This allows Angular to work with many different frameworks if needed. This is an ideal situation if a slow transformation of an application to Angular is needed or some Angular needs to be added in other web applications(For example. ASP.net, JSP etc )

+
+
+
+
+

Negative points about Elements

+
+
+

Angular Elements is really powerful but since, the transition between views is going to be handled by another framework or HTML/JavaScript, using Angular Router is not possible. the view transitions have to be handled manually. This fact also eliminates the possibility of just porting an application completely.

+
+
+
+
+

How to use Angular Elements?

+
+
+

In a generalized way, a simple Angular component could be transformed to an Angular Element with this steps:

+
+
+
+
+

Installing Angular Elements

+
+
+

The first step is going to be install the library using our preferred packet manager:

+
+
+
+
+

== NPM

+
+
+
+
npm install @angular/elements
+
+
+
+
+
+

== YARN

+
+
+
+
yarn add @angular/elements
+
+
+
+
+
+

Preparing the components in the modules

+
+
+

Inside the app.module.ts, in addition to the normal declaration of the components inside declarations, the modules inside imports and the services inside providers, the components need to added in entryComponents. If there are components that have their own module, the same logic is going to be applied for them, only adding in the app.module.ts the components that do not have their own module. Here is an example of this:

+
+
+
+
....
+@NgModule({
+  declarations: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  imports: [
+    CoreModule,  // Module containing Angular Materials
+    FormsModule
+  ],
+  entryComponents: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  providers: [DishShareService]
+})
+....
+
+
+
+

After that is done, the constructor of the module is going to be modified to use injector and bootstrap the application defining the components. This is going to allow the Angular Element to get the injections and to define a component tag that will be used later:

+
+
+
+
....
+})
+export class AppModule {
+  constructor(private injector: Injector) {
+
+  }
+
+  ngDoBootstrap() {
+    const el = createCustomElement(DishFormComponent, {injector: this.injector});
+    customElements.define('dish-form', el);
+
+    const elView = createCustomElement(DishViewComponent, {injector: this.injector});
+    customElements.define('dish-view', elView);
+  }
+}
+....
+
+
+
+
+
+

A component example

+
+
+

In order to be able to use a component, @Input() and @Output() variables are used. These variables are going to be the ones that will allow the Angular Element to communicate with the framework/JavaScript:

+
+
+

Component html

+
+
+
+
<mat-card>
+    <mat-grid-list cols="1" rowHeight="100px" rowWidth="50%">
+				<mat-grid-tile colspan="1" rowspan="1">
+					<span>{{ platename }}</span>
+				</mat-grid-tile>
+				<form (ngSubmit)="onSubmit(dishForm)" #dishForm="ngForm">
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<input matInput placeholder="Name" name="name" [(ngModel)]="dish.name">
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<textarea matInput placeholder="Description" name="description" [(ngModel)]="dish.description"></textarea>
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<button mat-raised-button color="primary" type="submit">Submit</button>
+					</mat-grid-tile>
+				</form>
+		</mat-grid-list>
+</mat-card>
+
+
+
+

Component ts

+
+
+
+
@Component({
+  templateUrl: './dish-form.component.html',
+  styleUrls: ['./dish-form.component.scss']
+})
+export class DishFormComponent implements OnInit {
+
+  @Input() platename;
+
+  @Input() platedescription;
+
+  @Output()
+  submitDishEvent = new EventEmitter();
+
+  submitted = false;
+  dish = {name: '', description: ''};
+
+  constructor(public dishShareService: DishShareService) { }
+
+  ngOnInit() {
+    this.dish.name = this.platename;
+    this.dish.description = this.platedescription;
+  }
+
+  onSubmit(dishForm: NgForm): void {
+    this.dishShareService.createDish(dishForm.value.name, dishForm.value.description);
+    this.submitDishEvent.emit('dishSubmited');
+  }
+
+}
+
+
+
+

In this file there are definitions of multiple variables that will be used as input and output. Since the input variables are going to be used directly by html, only lowercase and underscore strategies can be used for them. On the onSubmit(dishForm: NgForm) a service is used to pass this variables to another component. Finally, as a last thing, the selector inside @Component has been removed since a tag that will be used dynamically was already defined in the last step.

+
+
+
+
+

Solving the error

+
+
+

In order to be able to use this Angular Element a Polyfills/Browser support related error needs to solved. This error can be solved in two ways:

+
+
+
+
+

== Changing the target

+
+
+

One solution is to change the target in tsconfig.json to es2015. This might not be doable for every application since maybe a specific target is required.

+
+
+
+
+

== Installing Polyfaces

+
+
+

Another solution is to use AutoPollyfill. In order to do so, the library is going to be installed with a packet manager:

+
+
+

Yarn

+
+
+
+
yarn add @webcomponents/webcomponentsjs
+
+
+
+

Npm

+
+
+
+
npm install @webcomponents/webcomponentsjs
+
+
+
+

After the packet manager has finished, inside the src folder a new file polyfills.ts is found. To solve the error, importing the corresponding adapter (custom-elements-es5-adapter.js) is necessary:

+
+
+
+
....
+/***************************************************************************************************
+ * APPLICATION IMPORTS
+ */
+
+import '@webcomponents/webcomponentsjs/custom-elements-es5-adapter.js';
+....
+
+
+
+

If you want to learn more about polyfills in angular you can do it here

+
+
+
+
+

Building the Angular Element

+
+
+

First, before building the Angular Element, every element inside that app component except the module need to be removed. After that, a bash script is created in the root folder,. This script will allow to put every necessary file into a JS.

+
+
+
+
ng build "projectName" --configuration production --output-hashing=none && cat dist/"projectName"/runtime.js dist/"projectName"/polyfills.js dist/"projectName"/scripts.js dist/"projectName"/main.js > ./dist/"projectName"/"nameWantedAngularElement".js
+
+
+
+

After executing the bash script, it will generate inside the path dist/"projectName" (or dist/apps/projectname in a Nx workspace) a JS file named "nameWantedAngularElement".js and a css file.

+
+
+
+
+ +
+
+

The library ngx-build-plus allows to add different options when building. In addition, it solves some errors that will occur when trying to use multiple angular elements in an application. In order to use it, yarn or npm can be used:

+
+
+

Yarn

+
+
+
+
yarn add ngx-build-plus
+
+
+
+

Npm

+
+
+
+
npm install ngx-build-plus
+
+
+
+

If you want to add it to a specific sub project in your projects folder, use the --project:

+
+
+
+
.... ngx-build-plus --project "project-name"
+
+
+
+

Using this library and the following command, an isolated Angular Element which won’t have conflict with others can be generated. This Angular Element will not have a polyfill so, the project where we use them will need to include a poliyfill with the Angular Element requirements.

+
+
+
+
ng build "projectName" --output-hashing none --single-bundle true --configuration production --bundle-styles false
+
+
+
+

This command will generate three things:

+
+
+
    +
  1. +

    The main JS bundle

    +
  2. +
  3. +

    The script JS

    +
  4. +
  5. +

    The css

    +
  6. +
+
+
+

These files will be used later instead of the single JS generated in the last step.

+
+
+
+
+

== == Extra parameters

+
+
+

Here are some extra useful parameters that ngx-build-plus provides:

+
+
+
    +
  • +

    --keep-polyfills: This parameter is going to allow us to keep the polyfills. This needs to be used with caution, avoiding using multiple different polyfills that could cause an error is necessary.

    +
  • +
  • +

    --extraWebpackConfig webpack.extra.js: This parameter allows us to create a JavaScript file inside our Angular Elements project with the name of different libraries. Using webpack these libraries will not be included in the Angular Element. This is useful to lower the size of our Angular Element by removing libraries shared. Example:

    +
  • +
+
+
+
+
const webpack = require('webpack');
+
+module.exports = {
+    "externals": {
+        "rxjs": "rxjs",
+        "@angular/core": "ng.core",
+        "@angular/common": "ng.common",
+        "@angular/common/http": "ng.common.http",
+        "@angular/platform-browser": "ng.platformBrowser",
+        "@angular/platform-browser-dynamic": "ng.platformBrowserDynamic",
+        "@angular/compiler": "ng.compiler",
+        "@angular/elements": "ng.elements",
+        "@angular/router": "ng.router",
+        "@angular/forms": "ng.forms"
+    }
+}
+
+
+
+
+
+

==

+
+
+
+
  If some libraries are excluded from the `Angular Element` you will need to add the bundled UMD files of those libraries manually.
+== ==
+
+
+
+
+
+

Using the Angular Element

+
+
+

The Angular Element that got generated in the last step can be used in almost every framework. In this case, the Angular Element is going to be used in html:

+
+
+
Listing 1. Sample index.html version without ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+        <script src="./devon4ngAngularElements.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+
Listing 2. Sample index.html version with ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+         <script src="./polyfills.js"> </script> <!-- Created using --keep-polyfills options -->
+        <script src="./scripts.js"> </script>
+         <script src="./main.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+

In this html, the css generated in the last step is going to be imported inside the <head> and then, the JavaScript element is going to be imported at the end of the body. After that is done, There is two uses of Angular Elements in the html, one directly with use of the @input() variables as parameters commented in the html:

+
+
+
+
....
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+....
+
+
+
+

and one dynamically inside the script:

+
+
+
+
....
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+....
+
+
+
+

This JavaScript is an example of how to create dynamically an Angular Element inserting attributed to fill our @Input() variables and listen to the @Output() that was defined earlier. This is done with:

+
+
+
+
                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+
+
+
+

This allows JavaScript to hook with the @Output() event emitter that was defined. When this event gets called, another component that was defined gets inserted dynamically.

+
+
+
+
+

Angular Element within another Angular project

+
+
+

In order to use an Angular Element within another Angular project the following steps need to be followed:

+
+
+
+
+

Copy bundled script and css to resources

+
+
+

First copy the generated .js and .css inside assets in the corresponding folder.

+
+
+
+
+

Add bundled script to angular.json

+
+
+

Inside angular.json both of the files that were copied in the last step are going to be included. This will be done both, in test and in build. Including it on the test, will allow to perform unitary tests.

+
+
+
+
{
+....
+  "architect": {
+    ....
+    "build": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+    ....
+    "test": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+  }
+}
+
+
+
+

By declaring the files in the angular.json angular will take care of including them in a proper way.

+
+
+
+
+

==

+
+
+
+
  If you are using Nx, the configuration file `angular.json` might be named as `workspace.json`, depending on how you had setup the workspace. The structure of the file remains similar though.
+== ==
+
+
+
+
+
+

Using Angular Element

+
+
+

There are two ways that Angular Element can be used:

+
+
+
+
+

== Create component dynamically

+
+
+

In order to add the component in a dynamic way, first adding a container is necessary:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+</div>
+....
+
+
+
+

With this container created, inside the app.component.ts a method is going to be created. This method is going to find the container, create the dynamic element and append it into the container.

+
+
+

app.component.ts

+
+
+
+
export class AppComponent implements OnInit {
+  ....
+  ngOnInit(): void {
+    this.createComponent();
+  }
+  ....
+  createComponent(): void {
+    const container = document.getElementById('container');
+    const component = document.createElement('dish-form');
+    container.appendChild(component);
+  }
+  ....
+
+
+
+
+
+

== Using it directly

+
+
+

In order to use it directly on the templates, in the app.module.ts the CUSTOM_ELEMENTS_SCHEMA needs to be added:

+
+
+
+
....
+import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
+....
+@NgModule({
+  ....
+  schemas: [ CUSTOM_ELEMENTS_SCHEMA ],
+
+
+
+

This is going to allow the use of the Angular Element in the templates directly:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+  <dish-form></dish-form>
+</div>
+
+
+
+

You can find a working example of Angular Elements in our devon4ts-samples repo by referring the samples named angular-elements and angular-elements-test.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-angular-lazy-loading.html b/docs/devon4ng/1.0/guide-angular-lazy-loading.html new file mode 100644 index 00000000..1d15d58b --- /dev/null +++ b/docs/devon4ng/1.0/guide-angular-lazy-loading.html @@ -0,0 +1,684 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Lazy loading

+
+
+

When the development of an application starts, it just contains a small set of features so the app usually loads fast. However, as new features are added, the overall application size grows up and its loading speed decreases. It is in this context where Lazy loading finds its place. +Lazy loading is a design pattern that defers initialization of objects until it is needed, so, for example, users that just access to a website’s home page do not need to have other areas loaded. +Angular handles lazy loading through the routing module which redirects to requested pages. Those pages can be loaded at start or on demand.

+
+
+
+
+

An example with Angular

+
+
+

To explain how lazy loading is implemented using angular, a basic sample app is going to be developed. This app will consist in a window named "level 1" that contains two buttons that redirects to other windows in a "second level". It is a simple example, but useful to understand the relation between angular modules and lazy loading.

+
+
+
+Levels app structure +
+
Figure 1. Levels app structure.
+
+
+

This graphic shows that modules acts as gates to access components "inside" them.

+
+
+

Because the objective of this guide is related mainly with logic, the html structure and SCSS styles are less relevant, but the complete code can be found as a sample here.

+
+
+
+
+

Implementation

+
+
+

First write in a console ng new level-app --routing, to generate a new project called level-app including an app-routing.module.ts file (--routing flag). If you are using Nx, the command would be nx generate @nrwl/angular:app level-app --routing in your Nx workspace.

+
+
+

In the file app.component.html delete all the content except the router-outlet tag.

+
+
+
Listing 1. File app.component.html
+
+
<router-outlet></router-outlet>
+
+
+
+

The next steps consists on creating features modules.

+
+
+
    +
  • +

    run ng generate module first --routing to generate a module named first.

    +
  • +
  • +

    run ng generate module first/second-left --routing to generate a module named second-left under first.

    +
  • +
  • +

    run ng generate module first/second-right --routing to generate a module second-right under first.

    +
  • +
  • +

    run ng generate component first/first to generate a component named first inside the module first.

    +
  • +
  • +

    run ng generate component first/second-left/content to generate a component content inside the module second-left.

    +
  • +
  • +

    run ng generate component first/second-right/content to generate a component content inside the module second-right.

    +
  • +
+
+
+
+
+

==

+
+
+
+
  If you are using Nx, you have to specify the project name (level-app) along with the --project flag. For example, command for generating the first module will be `ng generate module first --project=level-app --routing`
+== ==
+
+
+
+

To move between components we have to configure the routes used:

+
+
+

In app-routing.module.ts add a path 'first' to FirstComponent and a redirection from '' to 'first'.

+
+
+
Listing 2. File app-routing.module.ts.
+
+
...
+import { FirstComponent } from './first/first/first.component';
+
+const routes: Routes = [
+  {
+    path: 'first',
+    component: FirstComponent
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

In app.module.ts import the module which includes FirstComponent.

+
+
+
Listing 3. File app.module.ts
+
+
....
+import { FirstModule } from './first/first.module';
+
+@NgModule({
+  ...
+  imports: [
+    ....
+    FirstModule
+  ],
+  ...
+})
+export class AppModule { }
+
+
+
+

In first-routing.module.ts add routes that direct to the content of SecondRightModule and SecondLeftModule. The content of both modules have the same name so, in order to avoid conflicts the name of the components are going to be changed using as ( original-name as new-name).

+
+
+
Listing 4. File first-routing.module.ts
+
+
...
+import { ContentComponent as ContentLeft} from './second-left/content/content.component';
+import { ContentComponent as ContentRight} from './second-right/content/content.component';
+import { FirstComponent } from './first/first.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'first/second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'first/second-right',
+    component: ContentRight
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class FirstRoutingModule { }
+
+
+
+

In first.module.ts import SecondLeftModule and SecondRightModule.

+
+
+
Listing 5. File first.module.ts
+
+
...
+import { SecondLeftModule } from './second-left/second-left.module';
+import { SecondRightModule } from './second-right/second-right.module';
+
+@NgModule({
+  ...
+  imports: [
+    ...
+    SecondLeftModule,
+    SecondRightModule,
+  ]
+})
+export class FirstModule { }
+
+
+
+

Using the current configuration, we have a project that loads all the modules in a eager way. Run ng serve (with --project=level-app in an Nx workspace) to see what happens.

+
+
+

First, during the compilation we can see that just a main file is built.

+
+
+
+Compile eager +
+
Figure 2. Compile eager.
+
+
+

If we go to http://localhost:4200/first and open developer options (F12 on Chrome), it is found that a document named "first" is loaded.

+
+
+
+First level eager +
+
Figure 3. First level eager.
+
+
+

If we click on [Go to right module] a second level module opens, but there is no 'second-right' document.

+
+
+
+Second level right eager +
+
Figure 4. Second level right eager.
+
+
+

But, typing the URL directly will load 'second-right' but no 'first', even if we click on [Go back]

+
+
+
+Second level right eager +
+
Figure 5. Second level right eager direct URL.
+
+
+

Modifying an angular application to load its modules lazily is easy, you have to change the routing configuration of the desired module (for example FirstModule).

+
+
+
Listing 6. File app-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: 'first',
+    loadChildren: () => import('./first/first.module').then(m => m.FirstModule),
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

Notice that instead of loading a component, you dynamically import it in a loadChildren attribute because modules acts as gates to access components "inside" them. Updating the app to load lazily has four consequences:

+
+
+
    +
  1. +

    No component attribute.

    +
  2. +
  3. +

    No import of FirstComponent.

    +
  4. +
  5. +

    FirstModule import has to be removed from the imports array at app.module.ts.

    +
  6. +
  7. +

    Change of context.

    +
  8. +
+
+
+

If we check first-routing.module.ts again, we can see that the path for ContentLeft and ContentRight is set to 'first/second-left' and 'first/second-right' respectively, so writing http://localhost:4200/first/second-left will redirect us to ContentLeft. However, after loading a module with loadChildren setting the path to 'second-left' and 'second-right' is enough because it acquires the context set by AppRoutingModule.

+
+
+
Listing 7. File first-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+

If we go to 'first' then FirstModule is situated in '/first' but also its children ContentLeft and ContentRight, so it is not necessary to write in their path 'first/second-left' and 'first/second-right', because that will situate the components on 'first/first/second-left' and 'first/first/second-right'.

+
+
+
+First level wrong path +
+
Figure 6. First level lazy wrong path.
+
+
+

When we compile an app with lazy loaded modules, files containing them will be generated

+
+
+
+First level lazy compilation +
+
Figure 7. First level lazy compilation.
+
+
+

And if we go to developer tools → network, we can find those modules loaded (if they are needed).

+
+
+
+First level lazy +
+
Figure 8. First level lazy.
+
+
+

To load the component ContentComponent of SecondLeftModule lazily, we have to load SecondLeftModule as a children of FirstModule:

+
+
+
    +
  • +

    Change component to loadChildren and reference SecondLeftModule.

    +
  • +
+
+
+
Listing 8. File first-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    loadChildren: () => import('./second-left/second-left.module').then(m => m.SecondLeftModule),
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+
    +
  • +

    Remove SecondLeftModule at first.component.ts

    +
  • +
  • +

    Route the components inside SecondLeftModule. Without this step nothing would be displayed.

    +
  • +
+
+
+
Listing 9. File second-left-routing.module.ts.
+
+
...
+import { ContentComponent } from './content/content.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: ContentComponent
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class SecondLeftRoutingModule { }
+
+
+
+
    +
  • +

    run ng serve to generate files containing the lazy modules.

    +
  • +
+
+
+
+Second level lazy +
+
Figure 9. Second level lazy loading compilation.
+
+
+

Clicking on [Go to left module] triggers the load of SecondLeftModule.

+
+
+
+Second level lazy network +
+
Figure 10. Second level lazy loading network.
+
+
+
+
+

Conclusion

+
+
+

Lazy loading is a pattern useful when new features are added, these features are usually identified as modules which can be loaded only if needed as shown in this document, reducing the time spent loading an application.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-angular-library.html b/docs/devon4ng/1.0/guide-angular-library.html new file mode 100644 index 00000000..1f4c5830 --- /dev/null +++ b/docs/devon4ng/1.0/guide-angular-library.html @@ -0,0 +1,566 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Library

+
+
+

Angular CLI provides us with methods that allow the creation of a library. After that, using a packet manager (either npm or yarn) the library can be build and packed which will allow later to install/publish it.

+
+
+
+
+

Whats a library?

+
+
+

From wikipedia: a library is a collection of non-volatile resources used by computer programs, often for software development. These may include configuration data, documentation, help data, message templates, pre-written code and subroutines, classes, values or type specifications.

+
+
+
+
+

How to build a library

+
+
+

In this section, a library is going to be build step by step. Please note, we will be explaining the steps using both Angular CLI and Nx CLI. You are free to choose either one for your development.

+
+
+
+
+

1. Creating an empty application

+
+
+

First, using Angular CLI we are going to generate a empty application which will be later filled with the generated library. In order to do so, Angular CLI allows us to add to ng new "application-name" an option (--create-application). This option is going to tell Angular CLI not to create the initial app project. This is convenient since a library is going to be generated in later steps. Using this command ng new "application-name" --create-application=false an empty project with the name wanted is created.

+
+
+
+
ng new "application-name" --create-application=false
+
+
+
+

This step is much more easier and straight-forward when using Nx. Nx allows us to work in a monorepo workspace, where you can develop a project as an application, or a library, or a tool. You can follow this guide to get started with Nx. +The command for generating a library in Nx is nx generate @nrwl/angular:library library-name --publishable --importPath=library-name. This will create an empty angular application which we can modify and publish as a library.

+
+
+
+
+

2. Generating a library

+
+
+

After generating an empty application, a library is going to be generated. Inside the folder of the project, the Angular CLI command ng generate library "library-name" is going to generate the library as a project (projects/"library-name"). As an addition, the option --prefix="library-prefix-wanted" allows us to switch the default prefix that Angular generated with (lib). Using the option to change the prefix the command will look like this ng generate library "library-name" --prefix="library-prefix-wanted".

+
+
+
+
ng generate library "library-name" --prefix="library-prefix-wanted"
+
+
+
+

If you are using Nx, this step is not needed as it is already covered in step 1. In this case, the library project will be generated in the libs folder of a Nx workspace.

+
+
+
+
+

3. Modifying our library

+
+
+

In the last step we generated a library. This automatically generates a module,service and a component inside projects/"library-name" that we can modify adding new methods, components etc that we want to use in other projects. We can generate other elements, using the usual Angular CLI generate commands adding the option --project="library-name", this will allow to generate elements within our project . An example of this is: ng generate service "name" --project="library-name".

+
+
+
+
ng generate "element" "name" --project="library-name"
+
+
+
+

You can use the same command as above in a Nx workspace.

+
+
+
+
+

4. Exporting the generated things

+
+
+

Inside the library (projects/"library-name) there’s a public_api.ts which is the file that exports the elements inside the library. (The file is named as index.ts in an Nx workspace). In case we generated other things, this file needs to be modified adding the extra exports with the generated elements. In addition, changing the library version is possible in the file package.json.

+
+
+
+
+

5. Building our library

+
+
+

Once we added the necessary exports, in order to use the library in other applications, we need to build the library. The command ng build "library-name" is going to build the library, generating the necessary files in "project-name"/dist/"library-name".

+
+
+
+
ng build "library-name"
+
+
+
+

You can use the same command in Nx as well. Only the path for the generated files will be slightly different: "project-name"/dist/libs/"library-name"

+
+
+
+
+

6. Packing the library

+
+
+

In this step we are going to pack the build library. In order to do so, we need to go inside dist/"library-name" (or dist/libs/"library-name") and then run either npm pack or yarn pack to generate a "library-name-version.tgz" file.

+
+
+
Listing 1. Packing using npm
+
+
npm pack
+
+
+
+
Listing 2. Packing using yarn
+
+
yarn pack
+
+
+
+
+
+

7. Publishing to npm repository (optional)

+
+
+
    +
  • +

    Add a README.md and LICENSE file. The text inside README.md will be used in you npm package web page as documentation.

    +
  • +
  • +

    run npm adduser if you do not have a npm account to create it, otherwise run npm login and introduce your credentials.

    +
  • +
  • +

    run npm publish inside dist/"library-name" folder.

    +
  • +
  • +

    Check that the library is published: https://npmjs.com/package/library-name

    +
  • +
+
+
+
+
+

8. Installing our library in other projects

+
+
+

In this step we are going to install/add the library on other projects.

+
+
+
+
+

== npm

+
+
+

In order to add the library in other applications, there are two ways:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command npm install "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run npm install "library-name" to install it from npm repository.

    +
  • +
+
+
+
+
+

== yarn

+
+
+

To add the package using yarn:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command yarn add "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run yarn add "library-name" to install it from npm repository.

    +
  • +
+
+
+
+
+

9. Using the library

+
+
+

Finally, once the library was installed with either packet manager, you can start using the elements from inside like they would be used in a normal element inside the application. Example app.component.ts:

+
+
+
+
import { Component, OnInit } from '@angular/core';
+import { MyLibraryService } from 'my-library';
+
+@Component({
+  selector: 'app-root',
+  templateUrl: './app.component.html',
+  styleUrls: ['./app.component.scss']
+})
+export class AppComponent implements OnInit {
+
+  toUpper: string;
+
+  constructor(private myLibraryService: MyLibraryService) {}
+  title = 'devon4ng library test';
+  ngOnInit(): void {
+    this.toUpper = this.myLibraryService.firstLetterToUpper('test');
+  }
+}
+
+
+
+

Example app.component.html:

+
+
+
+
<!--The content below is only a placeholder and can be replaced.-->
+<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+  <img width="300" alt="Angular Logo" src="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNTAgMjUwIj4KICAgIDxwYXRoIGZpbGw9IiNERDAwMzEiIGQ9Ik0xMjUgMzBMMzEuOSA2My4ybDE0LjIgMTIzLjFMMTI1IDIzMGw3OC45LTQzLjcgMTQuMi0xMjMuMXoiIC8+CiAgICA8cGF0aCBmaWxsPSIjQzMwMDJGIiBkPSJNMTI1IDMwdjIyLjItLjFWMjMwbDc4LjktNDMuNyAxNC4yLTEyMy4xTDEyNSAzMHoiIC8+CiAgICA8cGF0aCAgZmlsbD0iI0ZGRkZGRiIgZD0iTTEyNSA1Mi4xTDY2LjggMTgyLjZoMjEuN2wxMS43LTI5LjJoNDkuNGwxMS43IDI5LjJIMTgzTDEyNSA1Mi4xem0xNyA4My4zaC0zNGwxNy00MC45IDE3IDQwLjl6IiAvPgogIDwvc3ZnPg== ">
+</div>
+<h2>Here is my library service being used: {{toUpper}}</h2>
+<lib-my-library></lib-my-library>
+
+
+
+

Example app.module.ts:

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+
+import { MyLibraryModule } from 'my-library';
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    AppRoutingModule,
+    MyLibraryModule
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+

The result from using the library:

+
+
+
+result +
+
+
+
+
+

devon4ng libraries

+
+
+

In devonfw/devon4ng-library you can find some useful libraries:

+
+
+
    +
  • +

    Authorization module: This devon4ng Angular module adds rights-based authorization to your Angular app.

    +
  • +
  • +

    Cache module: Use this devon4ng Angular module when you want to cache requests to server. You may configure it to store in cache only the requests you need and to set the duration you want.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-angular-mock-service.html b/docs/devon4ng/1.0/guide-angular-mock-service.html new file mode 100644 index 00000000..876d7b33 --- /dev/null +++ b/docs/devon4ng/1.0/guide-angular-mock-service.html @@ -0,0 +1,409 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Mock Service

+
+
+

We’ve all been there: A new idea comes, let’s quickly prototype it. But wait, there’s no back-end. What can we do?

+
+
+

Below you will find a solution that will get your started quick and easy. The idea is to write a simple mock service that helps us by feeding data into our components.

+
+
+
+
+

The app we start with

+
+
+

Let’s say you have a simple boilerplate code, with your favorite styling library hooked up and you’re ready to go. The angular-material-basic-layout sample is a good starting place.

+
+
+
+
+

The Components

+
+
+

Components - are the building blocks of our application. Their main role is to enable fragments of user interfaces. They will either display data (a list, a table, a chart, etc.), or 'collect' user interaction (e.g: a form, a menu, etc.)

+
+
+

Components stay at the forefront of the application. They should also be reusable (as much as possible). Reusability is key for what we are trying to achieve - a stable, maintainable front-end where multiple people can contribute and collaborate.

+
+
+

In our project, we are at the beginning. That means we may have more ideas than plans. We are exploring possibilities. In order to code efficiently:
+1) We will not store mock data in the components.
+2) We will not fetch or save data directly in the components.

+
+ +
+
+
+

The Service

+
+
+

So, how do we get data in our app? How do we propagate the data to the components and how can we send user interaction from the components to the our data "manager" logic.

+
+
+

The answer to all these questions is an Angular Service (that we will just call a service from now on).

+
+
+

A service is an injectable logic that can be consumed by all the components that need it. It can carry manipulation functions and ,in our case, fetch data from a provider.

+
+
+
+Service Architecture +
+
Figure 1. Angular Components & Services architecture.
+
+
+

Inside the Angular App, an Injector gives access to each component to their required services. It’s good coding practice to use a distinct service to each data type you want to manipulate. The type is described in a interface.

+
+
+

Still, our ideas drive in different ways, so we have to stay flexible. We cannot use a database at the moment, but we want a way to represent data on screen, which can grow organically.

+
+ +
+
+
+

The Model

+
+
+
+Data Box +
+
Figure 2. Data box in relation to services and components.
+
+
+

Let’s consider a 'box of data' represented in JSON. Physically, this means a folder with some JSON/TS files in it. They are located in the app/mock folder. The example uses only one mock data file. The file is typed according to our data model.

+
+
+

Pro tip: separate your files based on purpose. In your source code, put the mock files in the mock folder, components in the components folder, services in the services folder and data models in the models folder.

+
+
+
+Project Structure +
+
Figure 3. Project structure.
+
+
+

Aligned with the Angular way of development, we are implementing a model-view-controller pattern.

+
+
+

The model is represented by the interfaces we make. These interfaces describe the data structures we will use in our application. In this example, there is one data model, corresponding with the 'type' of data that was mocked. In the models folder you will find the .ts script file that describes chemical elements. The corresponding mock file defines a set is chemical elements objects, in accordance to our interface definition.

+
+
+
+
+

Use case

+
+
+

Enough with the theory, let’s see what we have here. The app presents 3 pages as follows:

+
+
+
    +
  • +

    A leader board with the top 3 elements

    +
  • +
  • +

    A data table with all the elements

    +
  • +
  • +

    A details page that reads a route parameter and displays the details of the element.

    +
  • +
+
+
+

There are a lot of business cases which have these requirements:

+
+
+
    +
  • +

    A leader board can be understood as "the most popular items in a set", "the latest updated items", "you favorite items" etc.

    +
  • +
  • +

    A data table with CRUD operations is very useful (in our case we only view details or delete an item, but they illustrate two important things: the details view shows how to navigate and consume a parametric route, the delete action shows how to invoke service operations over the loaded data - this means that the component is reusable and when the data comes with and API, only the service will need it’s implementation changed)

    +
  • +
+
+
+

Check out the angular-mock-service sample from the apps folder and easily get started with front-end development using dummy data.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-angular-pwa.html b/docs/devon4ng/1.0/guide-angular-pwa.html new file mode 100644 index 00000000..5853821e --- /dev/null +++ b/docs/devon4ng/1.0/guide-angular-pwa.html @@ -0,0 +1,816 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Progressive Web App

+
+
+

Progressive web applications (PWA) are web application that offer better user experience than the traditional ones. In general, they solve problems related with reliability and speed:

+
+
+
    +
  • +

    Reliability: PWA are stable. In this context stability means than even with slow connections or even with no network at all, the application still works. To achieve this, some basic resources like styles, fonts, requests, …​ are stored; due to this caching, it is not possible to assure that the content is always up-to-date.

    +
  • +
  • +

    Speed: When an users opens an application, he or she will expect it to load almost immediately (almost 53% of users abandon sites that take longer that 3 seconds, source: https://developers.google.com/web/progressive-web-apps/#fast).

    +
  • +
+
+
+

PWA uses a script called service worker, which runs in background and essentially act as proxy between web app and network, intercepting requests and acting depending on the network conditions.

+
+
+
+
+

Assumptions

+
+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
+
+
+
+
+

Sample Application

+
+
+
+My Thai Star recommendation +
+
Figure 1. Basic angular PWA.
+
+
+

To explain how to build PWA using angular, a basic application is going to be built. This app will be able to ask for resources and save in the cache in order to work even offline.

+
+
+
+
+

Step 1: Create a new project

+
+
+

This step can be completed with one simple command using the Angular CLI: ng new <name>, where <name> is the name for the app. In this case, the app is going to be named basic-ng-pwa. If you are using Nx CLI, you can use the command nx generate @nrwl/angular:app <name> in your Nx workspace. You can follow this guide if you want to get started with Nx workspace.

+
+
+
+
+

Step 2: Create a service

+
+
+

Web applications usually uses external resources, making necessary the addition of services which can get those resources. This application gets a dish from My Thai Star’s back-end and shows it. To do so, a new service is going to be created.

+
+
+
    +
  • +

    go to project folder: cd basic-ng-pwa. If using Nx, go to the root folder of the workspace.

    +
  • +
  • +

    run ng generate service data. For Nx CLI, specify the project name with --project flag. So the command becomes ng generate service data --project=basic-ng-pwa

    +
  • +
  • +

    Modify data.service.ts, environment.ts, environment.prod.ts

    +
  • +
+
+
+

To retrieve data with this service, you have to import the module HttpClient and add it to the service’s constructor. Once added, use it to create a function getDishes() that sends HTTP request to My Thai Start’s back-end. The URL of the back-end can be stored as an environment variable MY_THAI_STAR_DISH.

+
+
+

data.service.ts

+
+
+
+
  ...
+  import { HttpClient } from '@angular/common/http';
+  import { MY_THAI_STAR_DISH } from '../environments/environment';
+  ...
+
+  export class DataService {
+    constructor(private http: HttpClient) {}
+
+    /* Get data from Back-end */
+    getDishes() {
+      return this.http.get(MY_THAI_STAR_DISH);
+    }
+    ...
+  }
+
+
+
+

environments.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+

environments.prod.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+
+
+

Step 3: Use the service

+
+
+

The component AppComponent implements the interface OnInit and inside its method ngOnInit() the subscription to the services is done. When a dish arrives, it is saved and shown (app.component.html).

+
+
+
+
  ...
+  import { DataService } from './data.service';
+  export class AppComponent implements OnInit {
+  dish: { name: string; description: string } = { name: '', description: ''};
+
+  ...
+  ngOnInit() {
+    this.data
+      .getDishes()
+      .subscribe(
+        (dishToday: { dish: { name: string; description: string } }) => {
+          this.dish = {
+            name: dishToday.dish.name,
+            description: dishToday.dish.description,
+          };
+        },
+      );
+  }
+}
+
+
+
+
+
+

Step 4: Structures, styles and updates

+
+
+

This step shows code interesting inside the sample app. The complete content can be found in devon4ts-samples.

+
+
+

index.html

+
+
+

To use the Montserrat font add the following link inside the head tag of the app’s index.html file.

+
+
+
+
  <link href="https://fonts.googleapis.com/css?family=Montserrat" rel="stylesheet">
+
+
+
+

styles.scss

+
+
+
+
  body {
+    ...
+    font-family: 'Montserrat', sans-serif;
+  }
+
+
+
+

app.component.ts

+
+
+

This file is also used to reload the app if there are any changes.

+
+
+
    +
  • +

    SwUpdate: This object comes inside the @angular/pwa package and it is used to detect changes and reload the page if needed.

    +
  • +
+
+
+
+
  ...
+  import { SwUpdate } from '@angular/service-worker';
+
+  export class AppComponent implements OnInit {
+
+  ...
+    constructor(updates: SwUpdate, private data: DataService) {
+      updates.available.subscribe((event) => {
+        updates.activateUpdate().then(() => document.location.reload());
+      });
+    }
+    ...
+  }
+
+
+
+
+
+

Step 5: Make it Progressive.

+
+
+

Install Angular PWA package with ng add @angular/pwa --project=<name>. As before substitute name with basic-ng-pwa.

+
+
+

The above command completes the following actions:

+
+
+
    +
  1. +

    Adds the @angular/service-worker package to your project.

    +
  2. +
  3. +

    Enables service worker build support in the CLI.

    +
  4. +
  5. +

    Imports and registers the service worker in the app module.

    +
  6. +
  7. +

    Updates the index.html file:

    +
    +
      +
    • +

      Includes a link to add the manifest.json file.

      +
    • +
    • +

      Adds meta tags for theme-color.

      +
    • +
    • +

      Installs icon files to support the installed Progressive Web App (PWA).

      +
    • +
    • +

      Creates the service worker configuration file called ngsw-config.json, which specifies the caching behaviors and other settings.

      +
    • +
    +
    +
  8. +
+
+
+
+
+

== manifest.json

+
+
+

manifest.json is a file that allows to control how the app is displayed in places where native apps are displayed.

+
+
+

Fields

+
+
+

name: Name of the web application.

+
+
+

short_name: Short version of name.

+
+
+

theme_color: Default theme color for an application context.

+
+
+

background_color: Expected background color of the web application.

+
+
+

display: Preferred display mode.

+
+
+

scope: Navigation scope of this web application’s application context.

+
+
+

start_url: URL loaded when the user launches the web application.

+
+
+

icons: Array of icons that serve as representations of the web app.

+
+
+

Additional information can be found here.

+
+
+
+
+

== ngsw-config.json

+
+
+

ngsw-config.json specifies which files and data URLs have to be cached and updated by the Angular service worker.

+
+
+

Fields

+
+
+
    +
  • +

    index: File that serves as index page to satisfy navigation requests.

    +
  • +
  • +

    assetGroups: Resources that are part of the app version that update along with the app.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      installMode: How the resources are cached (pre-fetch or lazy).

      +
    • +
    • +

      updateMode: Caching behavior when a new version of the app is found (pre-fetch or lazy).

      +
    • +
    • +

      resources: Resources to cache. There are three groups.

      +
      +
        +
      • +

        files: Lists patterns that match files in the distribution directory.

        +
      • +
      • +

        urls: URL patterns matched at runtime.

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    dataGroups: UsefulIdentifies the group. for API requests.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      urls: URL patterns matched at runtime.

      +
    • +
    • +

      version: Indicates that the resources being cached have been updated in a backwards-incompatible way.

      +
    • +
    • +

      cacheConfig: Policy by which matching requests will be cached

      +
      +
        +
      • +

        maxSize: The maximum number of entries, or responses, in the cache.

        +
      • +
      • +

        maxAge: How long responses are allowed to remain in the cache.

        +
        +
          +
        • +

          d: days. (5d = 5 days).

          +
        • +
        • +

          h: hours

          +
        • +
        • +

          m: minutes

          +
        • +
        • +

          s: seconds. (5m20s = 5 minutes and 20 seconds).

          +
        • +
        • +

          u: milliseconds

          +
        • +
        +
        +
      • +
      • +

        timeout: How long the Angular service worker will wait for the network to respond before using a cached response. Same dataformat as maxAge.

        +
      • +
      • +

        strategy: Caching strategies (performance or freshness).

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    navigationUrls: List of URLs that will be redirected to the index file.

    +
  • +
+
+
+

Additional information can be found here.

+
+
+
+
+

Step 6: Configure the app

+
+
+

manifest.json

+
+
+

Default configuration.

+
+
+

 

+
+
+

ngsw-config.json

+
+
+

At assetGroups → resources → urls: In this field the google fonts API is added in order to use Montserrat font even without network.

+
+
+
+
  "urls": [
+          "https://fonts.googleapis.com/**"
+        ]
+
+
+
+

At the root of the json: A data group to cache API calls.

+
+
+
+
  {
+    ...
+    "dataGroups": [{
+      "name": "mythaistar-dishes",
+      "urls": [
+        "https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1"
+      ],
+      "cacheConfig": {
+        "maxSize": 100,
+        "maxAge": "1h",
+        "timeout": "10s",
+        "strategy": "freshness"
+      }
+    }]
+  }
+
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ng build --prod to build the app using production settings.(nx build <name> --prod in Nx CLI)

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here.

+
+
+

Go to the dist/basic-ng-pwa/ folder running cd dist/basic-ng-pwa. In an Nx workspace, the path will be dist/apps/basic-ng-pwa

+
+
+

http-server -o to serve your built app.

+
+
+
+Http server running +
+
Figure 2. Http server running on localhost:8081.
+
+
+

 

+
+
+

In another console instance run ng serve (or nx serve basic-ng-pwa for Nx) to open the common app (not built).

+
+
+
+.Angular server running +
+
Figure 3. Angular server running on localhost:4200.
+
+
+

 

+
+
+

The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common (right) one does not.

+
+
+
+Application comparison +
+
Figure 4. Application service worker comparison.
+
+
+

 

+
+
+

If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 5. Offline application.
+
+
+

 

+
+
+

Finally, browser extensions like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 6. Lighthouse report.
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-angular-theming.html b/docs/devon4ng/1.0/guide-angular-theming.html new file mode 100644 index 00000000..73e427a0 --- /dev/null +++ b/docs/devon4ng/1.0/guide-angular-theming.html @@ -0,0 +1,774 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Material Theming

+
+
+

Angular Material library offers UI components for developers, those components follows Google Material design baselines but characteristics like colors can be modified in order to adapt them to the needs of the client: corporative colors, corporative identity, dark themes, …​

+
+
+
+
+

Theming basics

+
+
+

In Angular Material, a theme is created mixing multiple colors. Colors and its light and dark variants conform a palette. In general, a theme consists of the following palettes:

+
+
+
    +
  • +

    primary: Most used across screens and components.

    +
  • +
  • +

    accent: Floating action button and interactive elements.

    +
  • +
  • +

    warn: Error state.

    +
  • +
  • +

    foreground: Text and icons.

    +
  • +
  • +

    background: Element backgrounds.

    +
  • +
+
+
+
+Theme palette +
+
Figure 1. Palettes and variants.
+
+
+

In angular material, a palette is represented as a SCSS map.

+
+
+
+SCSS map +
+
Figure 2. SCSS map and palettes.
+
+
+ + + + + +
+ + +Some components can be forced to use primary, accent or warn palettes using the attribute color, for example: <mat-toolbar color="primary">. +
+
+
+
+
+

Pre-built themes

+
+
+

Available pre-built themes:

+
+
+
    +
  • +

    deeppurple-amber.css

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 3. deeppurple-amber theme.
+
+
+
    +
  • +

    indigo-pink.css

    +
  • +
+
+
+
+indigo-pink theme +
+
Figure 4. indigo-pink theme.
+
+
+
    +
  • +

    pink-bluegrey.css

    +
  • +
+
+
+
+` pink-bluegrey theme` +
+
Figure 5. ink-bluegrey theme.
+
+
+
    +
  • +

    purple-green.css

    +
  • +
+
+
+
+purple-green theme +
+
Figure 6. purple-green theme.
+
+
+

The pre-built themes can be added using @import.

+
+
+
+
@import '@angular/material/prebuilt-themes/deeppurple-amber.css';
+
+
+
+
+
+

Custom themes

+
+
+

Sometimes pre-built themes do not meet the needs of a project, because color schemas are too specific or do not incorporate branding colors, in those situations custom themes can be built to offer a better solution to the client.

+
+
+

For this topic, we are going to use a basic layout project that can be found in devon4ts-samples repository.

+
+
+
+
+

Basics

+
+
+

Before starting writing custom themes, there are some necessary things that have to be mentioned:

+
+
+
    +
  • +

    Add a default theme: The project mentioned before has just one global SCSS style sheet styles.scss that includes indigo-pink.scss which will be the default theme.

    +
  • +
  • +

    Add @import '~@angular/material/theming'; at the beginning of the every style sheet to be able to use angular material pre-built color palettes and functions.

    +
  • +
  • +

    Add @include mat-core(); once per project, so if you are writing multiple themes in multiple files you could import those files from a 'central' one (for example styles.scss). This includes all common styles that are used by multiple components.

    +
  • +
+
+
+
+Theme files structure +
+
Figure 7. Theme files structure.
+
+
+
+
+

Basic custom theme

+
+
+

To create a new custom theme, the .scss file containing it has to have imported the angular _theming.scss file (angular/material/theming) file and mat-core included. _theming.scss includes multiple color palettes and some functions that we are going to see below. The file for this basic theme is going to be named styles-custom-dark.scss.

+
+
+

First, declare new variables for primary, accent and warn palettes. Those variables are going to store the result of the function mat-palette.

+
+
+

mat-palette accepts four arguments: base color palette, main, lighter and darker variants (See Palettes and variants.) and returns a new palette including some additional map values: default, lighter and darker ([id_scss_map]). Only the first argument is mandatory.

+
+
+
Listing 1. File styles-custom-dark.scss.
+
+
$custom-dark-theme-primary: mat-palette($mat-pink);
+$custom-dark-theme-accent: mat-palette($mat-blue);
+$custom-dark-theme-warn: mat-palette($mat-red);
+);
+
+
+
+

In this example we are using colors available in _theming.scss: mat-pink, mat-blue, mat-red. If you want to use a custom color you need to define a new map, for instance:

+
+
+
Listing 2. File styles-custom-dark.scss custom pink.
+
+
$my-pink: (
+    50 : #fcf3f3,
+    100 : #f9e0e0,
+    200 : #f5cccc,
+    300 : #f0b8b8,
+    500 : #ea9999,
+    900 : #db6b6b,
+    A100 : #ffffff,
+    A200 : #ffffff,
+    A400 : #ffeaea,
+    A700 : #ffd0d0,
+    contrast: (
+        50 : #000000,
+        100 : #000000,
+        200 : #000000,
+        300 : #000000,
+        900 : #000000,
+        A100 : #000000,
+        A200 : #000000,
+        A400 : #000000,
+        A700 : #000000,
+    )
+);
+
+$custom-dark-theme-primary: mat-palette($my-pink);
+...
+
+
+
+ + + + + +
+ + +Some pages allows to create these palettes easily, for instance: http://mcg.mbitson.com +
+
+
+

Until now, we just have defined primary, accent and warn palettes but what about foreground and background? Angular material has two functions to change both:

+
+
+
    +
  • +

    mat-light-theme: Receives as arguments primary, accent and warn palettes and return a theme whose foreground is basically black (texts, icons, …​), the background is white and the other palettes are the received ones.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 8. Custom light theme.
+
+
+
    +
  • +

    mat-dark-theme: Similar to mat-light-theme but returns a theme whose foreground is basically white and background black.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 9. Custom dark theme.
+
+
+

For this example we are going to use mat-dark-theme and save its result in $custom-dark-theme.

+
+
+
Listing 3. File styles-custom-dark.scss updated with mat-dark-theme.
+
+
...
+
+$custom-dark-theme: mat-dark-theme(
+  $custom-dark-theme-primary,
+  $custom-dark-theme-accent,
+  $custom-dark-theme-warn
+);
+
+
+
+

To apply the saved theme, we have to go to styles.scss and import our styles-custom-dark.scss and include a function called angular-material-theme using the theme variable as argument.

+
+
+
Listing 4. File styles.scss.
+
+
...
+@import 'styles-custom-dark.scss';
+@include angular-material-theme($custom-dark-theme);
+
+
+
+

If we have multiple themes it is necessary to add the include statement inside a css class and use it in src/index.html → app-root component.

+
+
+
Listing 5. File styles.scss updated with custom-dark-theme class.
+
+
...
+@import 'styles-custom-dark.scss';
+
+.custom-dark-theme {
+  @include angular-material-theme($custom-dark-theme);
+}
+
+
+
+
Listing 6. File src/index.html.
+
+
...
+<app-root class="custom-dark-theme"></app-root>
+...
+
+
+
+

This will apply $custom-dark-theme theme for the entire application.

+
+
+
+
+

Full custom theme

+
+
+

Sometimes it is needed to custom different elements from background and foreground, in those situations we have to create a new function similar to mat-light-theme and mat-dark-theme. Let’s focus con mat-light-theme:

+
+
+
Listing 7. Source code of mat-light-theme
+
+
@function mat-light-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $mat-light-theme-foreground,
+    background: $mat-light-theme-background,
+  );
+}
+
+
+
+

As we can see, mat-light-theme takes three arguments and returns a map including them as primary, accent and warn color; but there are three more keys in that map: is-dark, foreground and background.

+
+
+
    +
  • +

    is-dark: Boolean true if it is a dark theme, false otherwise.

    +
  • +
  • +

    background: Map that stores the color for multiple background elements.

    +
  • +
  • +

    foreground: Map that stores the color for multiple foreground elements.

    +
  • +
+
+
+

To show which elements can be colored lets create a new theme in a file styles-custom-cap.scss:

+
+
+
Listing 8. File styles-custom-cap.scss: Background and foreground variables.
+
+
@import '~@angular/material/theming';
+
+// custom background and foreground palettes
+$my-cap-theme-background: (
+  status-bar: #0070ad,
+  app-bar: map_get($mat-blue, 900),
+  background: #12abdb,
+  hover: rgba(white, 0.04),
+  card: map_get($mat-red, 800),
+  dialog: map_get($mat-grey, 800),
+  disabled-button: $white-12-opacity,
+  raised-button: map-get($mat-grey, 800),
+  focused-button: $white-6-opacity,
+  selected-button: map_get($mat-grey, 900),
+  selected-disabled-button: map_get($mat-grey, 800),
+  disabled-button-toggle: black,
+  unselected-chip: map_get($mat-grey, 700),
+  disabled-list-option: black,
+);
+
+$my-cap-theme-foreground: (
+  base: yellow,
+  divider: $white-12-opacity,
+  dividers: $white-12-opacity,
+  disabled: rgba(white, 0.3),
+  disabled-button: rgba(white, 0.3),
+  disabled-text: rgba(white, 0.3),
+  hint-text: rgba(white, 0.3),
+  secondary-text: rgba(white, 0.7),
+  icon: white,
+  icons: white,
+  text: white,
+  slider-min: white,
+  slider-off: rgba(white, 0.3),
+  slider-off-active: rgba(white, 0.3),
+);
+
+
+
+

Function which uses the variables defined before to create a new theme:

+
+
+
Listing 9. File styles-custom-cap.scss: Creating a new theme function.
+
+
// instead of creating a theme with mat-light-theme or mat-dark-theme,
+// we will create our own theme-creating function that lets us apply our own foreground and background palettes.
+@function create-my-cap-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $my-cap-theme-foreground,
+    background: $my-cap-theme-background
+  );
+}
+
+
+
+

Calling the new function and storing its value in $custom-cap-theme.

+
+
+
Listing 10. File styles-custom-cap.scss: Storing the new theme.
+
+
// We use create-my-cap-theme instead of mat-light-theme or mat-dark-theme
+$custom-cap-theme-primary: mat-palette($mat-green);
+$custom-cap-theme-accent: mat-palette($mat-blue);
+$custom-cap-theme-warn: mat-palette($mat-red);
+
+$custom-cap-theme: create-my-cap-theme(
+  $custom-cap-theme-primary,
+  $custom-cap-theme-accent,
+  $custom-cap-theme-warn
+);
+
+
+
+

After defining our new theme, we can import it from styles.scss.

+
+
+
Listing 11. File styles.scss updated with custom-cap-theme class.
+
+
...
+@import 'styles-custom-cap.scss';
+.custom-cap-theme {
+  @include angular-material-theme($custom-cap-theme);
+}
+
+
+
+
+
+

Multiple themes and overlay-based components

+
+
+

Certain components (e.g. menu, select, dialog, etc.) that are inside of a global overlay container,require an additional step to be affected by the theme’s css class selector.

+
+
+
Listing 12. File app.module.ts
+
+
import {OverlayContainer} from '@angular/cdk/overlay';
+
+@NgModule({
+  // ...
+})
+export class AppModule {
+  constructor(overlayContainer: OverlayContainer) {
+    overlayContainer.getContainerElement().classList.add('custom-cap-theme');
+  }
+}
+
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-app-initializer.html b/docs/devon4ng/1.0/guide-app-initializer.html new file mode 100644 index 00000000..62115d4a --- /dev/null +++ b/docs/devon4ng/1.0/guide-app-initializer.html @@ -0,0 +1,790 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

APP_INITIALIZER

+
+ +
+
+
+

What is the APP_INITIALIZER pattern

+
+
+

The APP_INITIALIZER pattern allows an application to choose which configuration is going to be used in the start of the application, this is useful because it allows to setup different configurations, for example, for docker or a remote configuration. This provides benefits since this is done on runtime, so there’s no need to recompile the whole application to switch from configuration.

+
+
+
+
+

What is APP_INITIALIZER

+
+
+

APP_INITIALIZER allows to provide a service in the initialization of the application in a @NgModule. It also allows to use a factory, allowing to create a singleton in the same service. An example can be found in MyThaiStar /core/config/config.module.ts:

+
+
+
+
+

==

+
+
+

The provider expects the return of a Promise, if it is using Observables, a change with the method toPromise() will allow a switch from Observable to Promise +== ==

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

This is going to allow the creation of a ConfigService where, using a singleton, the service is going to load an external config depending on a route. This dependence with a route, allows to setup different configuration for docker etc. This is seen in the ConfigService of MyThaiStar:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  //and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it is mentioned earlier, you can see the use of a factory to create a singleton at the start. After that, loadExternalConfig is going to look for a Boolean inside the corresponding environment file inside the path src/environments/, this Boolean loadExternalConfig is going to easily allow to switch to a external config. If it is true, it generates a promise that overwrites the parameters of the local config, allowing to load the external config. Finally, the last method getValues() is going to allow to return the file config with the values (overwritten or not). The local config file from MyThaiStar can be seen here:

+
+
+
+
export enum BackendType {
+  IN_MEMORY,
+  REST,
+  GRAPHQL,
+}
+
+interface Role {
+  name: string;
+  permission: number;
+}
+
+interface Lang {
+  label: string;
+  value: string;
+}
+
+export interface Config {
+  version: string;
+  backendType: BackendType;
+  restPathRoot: string;
+  restServiceRoot: string;
+  pageSizes: number[];
+  pageSizesDialog: number[];
+  roles: Role[];
+  langs: Lang[];
+}
+
+export const config: Config = {
+  version: 'dev',
+  backendType: BackendType.REST,
+  restPathRoot: 'http://localhost:8081/mythaistar/',
+  restServiceRoot: 'http://localhost:8081/mythaistar/services/rest/',
+  pageSizes: [8, 16, 24],
+  pageSizesDialog: [4, 8, 12],
+  roles: [
+    { name: 'CUSTOMER', permission: 0 },
+    { name: 'WAITER', permission: 1 },
+  ],
+  langs: [
+    { label: 'English', value: 'en' },
+    { label: 'Deutsch', value: 'de' },
+    { label: 'Español', value: 'es' },
+    { label: 'Català', value: 'ca' },
+    { label: 'Français', value: 'fr' },
+    { label: 'Nederlands', value: 'nl' },
+    { label: 'हिन्दी', value: 'hi' },
+    { label: 'Polski', value: 'pl' },
+    { label: 'Русский', value: 'ru' },
+    { label: 'български', value: 'bg' },
+  ],
+};
+
+
+
+

Finally, inside a environment file src/environments/environment.ts the use of the Boolean loadExternalConfig is seen:

+
+
+
+
// The file contents for the current environment will overwrite these during build.
+// The build system defaults to the dev environment which uses `environment.ts`, but if you do
+// `ng build --env=prod` then `environment.prod.ts` will be used instead.
+// The list of which env maps to which file can be found in `.angular-cli.json`.
+
+export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+
+
+

Creating a APP_INITIALIZER configuration

+
+
+

This section is going to be used to create a new APP_INITIALIZER basic example. For this, a basic app with angular is going to be generated using ng new "appname" substituting appname for the name of the app opted. +If you are using Nx, the command would be nx generate @nrwl/angular:app "appname" in your Nx workspace. Click here to get started with using Nx.

+
+
+
+
+

Setting up the config files

+
+ +
+
+
+

Docker external configuration (Optional)

+
+
+

This section is only done if there is a docker configuration in the app you are setting up this type of configuration.

+
+
+

1.- Create in the root folder /docker-external-config.json. This external config is going to be used when the application is loaded with docker (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load with docker:

+
+
+
+
{
+    "version": "docker-version"
+}
+
+
+
+

2.- In the root, in the file /Dockerfile angular is going to copy the docker-external-config.json that was created before into the Nginx html route:

+
+
+
+
....
+COPY docker-external-config.json /usr/share/nginx/html/docker-external-config.json
+....
+
+
+
+
+
+

External json configuration

+
+
+

1.- Create a json file in the route /src/external-config.json. This external config is going to be used when the application is loaded with the start script (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load:

+
+
+
+
{
+    "version": "external-config"
+}
+
+
+
+

2.- The file named /angular.json (/workspace.json if using Nx) located at the root is going to be modified to add the file external-config.json that was just created to both "assets" inside Build and Test:

+
+
+
+
	....
+	"build": {
+          ....
+            "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	        ....
+        "test": {
+	  ....
+	   "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	  ....
+
+
+
+
+
+

Setting up the proxies

+
+
+

This step is going to setup two proxies. This is going to allow to load the config desired by the context, in case that it is using docker to load the app or in case it loads the app with angular. Loading different files is made possible by the fact that the ConfigService method loadExternalConfig() looks for the path /config.

+
+
+
+
+

Docker (Optional)

+
+
+

1.- This step is going to be for docker. Add docker-external-config.json to Nginx configuration (/nginx.conf) that is in the root of the application:

+
+
+
+
....
+  location  ~ ^/config {
+        alias /usr/share/nginx/html/docker-external-config.json;
+  }
+....
+
+
+
+
+
+

External Configuration

+
+
+

1.- Now the file /proxy.conf.json, needs to be created/modified this file can be found in the root of the application. In this file you can add the route of the external configuration in target and the name of the file in ^/config::

+
+
+
+
....
+  "/config": {
+    "target": "http://localhost:4200",
+    "secure": false,
+    "pathRewrite": {
+      "^/config": "/external-config.json"
+    }
+  }
+....
+
+
+
+

2.- The file package.json found in the root of the application is gonna use the start script to load the proxy config that was just created :

+
+
+
+
  "scripts": {
+....
+    "start": "ng serve --proxy-config proxy.conf.json -o",
+....
+
+
+
+

If using Nx, you need to run the command manually:

+
+
+

nx run angular-app-initializer:serve:development --proxyConfig=proxy.conf.json --o

+
+
+
+
+

Adding the loadExternalConfig Boolean to the environments

+
+
+

In order to load an external config we need to add the loadExternalConfig Boolean to the environments. To do so, inside the folder environments/ the files are going to get modified adding this Boolean to each environment that is going to be used. In this case, only two environments are going to be modified (environment.ts and environment.prod.ts). Down below there is an example of the modification being done in the environment.prod.ts:

+
+
+
+
export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+

In the file in first instance there is the declaration of the types of the variables. After that, there is the definition of those variables. This variable loadExternalConfig is going to be used by the service, allowing to setup a external config just by switching the loadExternalConfig to true.

+
+
+
+
+

Creating core configuration service

+
+
+

In order to create the whole configuration module three are going to be created:

+
+
+

1.- Create in the core app/core/config/ a config.ts

+
+
+
+
  export interface Config {
+    version: string;
+  }
+
+  export const config: Config = {
+    version: 'dev'
+  };
+
+
+
+

Taking a look to this file, it creates a interface (Config) that is going to be used by the variable that exports (export const config: Config). This variable config is going to be used by the service that is going to be created.

+
+
+

2.- Create in the core app/core/config/ a config.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  // and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it was explained in previous steps, at first, there is a factory that uses the method loadExternalConfig(), this factory is going to be used in later steps in the module. After that, the loadExternalConfig() method checks if the Boolean in the environment is false. If it is false it will return the promise resolved with the normal config. Else, it is going to load the external config in the path (/config), and overwrite the values from the external config to the config that’s going to be used by the app, this is all returned in a promise.

+
+
+

3.- Create in the core a module for the config app/core/config/ a config.module.ts:

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

As seen earlier, the ConfigService is added to the module. In this addition, the app is initialized(provide) and it uses the factory that was created in the ConfigService loading the config with or without the external values depending on the Boolean in the config.

+
+
+
+
+

Using the Config Service

+
+
+

As a first step, in the file /app/app.module.ts the ConfigModule created earlier in the other step is going to be imported:

+
+
+
+
  imports: [
+    ....
+    ConfigModule,
+    ....
+  ]
+
+
+
+

After that, the ConfigService is going to be injected into the app.component.ts

+
+
+
+
....
+import { ConfigService } from './core/config/config.service';
+....
+export class AppComponent {
+....
+  constructor(public configService: ConfigService) { }
+....
+
+
+
+

Finally, for this demonstration app, the component app/app.component.html is going to show the version of the config it is using at that moment.

+
+
+
+
<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+</div>
+<h2>Here is the configuration version that is using angular right now: {{configService.getValues().version}}</h2>
+
+
+
+
+
+

Final steps

+
+
+

The script start that was created earlier in the package.json (npm start) is going to be used to start the application. After that, modifying the Boolean loadExternalConfig inside the corresponding environment file inside /app/environments/ should show the different config versions.

+
+
+
+loadExternalConfigFalse +
+
+
+
+loadExternalConfigTrue +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-blob-streaming.html b/docs/devon4ng/1.0/guide-blob-streaming.html new file mode 100644 index 00000000..6956ad30 --- /dev/null +++ b/docs/devon4ng/1.0/guide-blob-streaming.html @@ -0,0 +1,552 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Angular File Uploading

+
+
+

This sample demonstrates how to upload a file to a server. For this, we will need to use an Angular form. In this case we have chosen a simple template-driven form, as the goal of this sample is just to show the process to upload a file. You can learn more about Forms in Angular in the official documentation.

+
+
+ + + + + +
+ + +The back-end implementation for this sample is located here: +devon4j-blob-streaming +
+
+
+
+
+

FormData

+
+
+

FormData is an object where you can store key-value pairs that allows you to send through XMLHttpRequest. You can create a FormData object as simply as:

+
+
+
+
....
+const formData = new FormData();
+formData.append('key', value);
+....
+
+
+
+
+
+

Let’s begin

+
+
+

I assume you already have your angular application running, if not, you can have a look to our AngularBasicPWA sample +Unresolved include directive in modules/ROOT/pages/guide-blob-streaming.adoc - include::guide-angular-pwa[]

+
+
+

We are going to use Angular Material components, so it is necessary to install the dependency with the following command:

+
+
+

npm install --save @angular/material @angular/cdk @angular/animations

+
+
+
+
+

Importing necessary components

+
+
+

These are the component I am going to use for our sample, are material HTML components. For use the template-driven form you do not need to import any component. I am going to create a module called Core where I place the needed imports. After that, I will import Core module on my main App module, and I be able to use these components in any part of my application.

+
+
+
+
....
+@NgModule({
+  declarations: [],
+  imports: [CommonModule],
+  exports: [
+    MatButtonModule,
+    MatFormFieldModule,
+    MatInputModule,
+    FormsModule,
+    MatProgressBarModule,
+  ],
+})
+export class CoreModule {}
+....
+
+
+
+

FormsModule Will allow us data binding through html and component.

+
+
+

The next step will be to create a component to place the uploading component: +ng generate component uploader

+
+
+

So this will be our project structure so far:

+
+
+
+folder structure +
+
+
+

Then, in the app.component.html we need to add the selector for our new component, so it will be represented there. We are not going to create any route for this sample. We can also modify the values for the toolbar.

+
+
+
+
....
+<div class="toolbar" role="banner">
+  <img
+    width="40"
+    alt="Angular Logo"
+    src="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNTAgMjUwIj4KICAgIDxwYXRoIGZpbGw9IiNERDAwMzEiIGQ9Ik0xMjUgMzBMMzEuOSA2My4ybDE0LjIgMTIzLjFMMTI1IDIzMGw3OC45LTQzLjcgMTQuMi0xMjMuMXoiIC8+CiAgICA8cGF0aCBmaWxsPSIjQzMwMDJGIiBkPSJNMTI1IDMwdjIyLjItLjFWMjMwbDc4LjktNDMuNyAxNC4yLTEyMy4xTDEyNSAzMHoiIC8+CiAgICA8cGF0aCAgZmlsbD0iI0ZGRkZGRiIgZD0iTTEyNSA1Mi4xTDY2LjggMTgyLjZoMjEuN2wxMS43LTI5LjJoNDkuNGwxMS43IDI5LjJIMTgzTDEyNSA1Mi4xem0xNyA4My4zaC0zNGwxNy00MC45IDE3IDQwLjl6IiAvPgogIDwvc3ZnPg== "
+  />
+  <span>File uploader</span>
+</div>
+
+<app-uploader></app-uploader>
+
+<router-outlet></router-outlet>
+....
+
+
+
+

Now, our new component uploader will be loaded in the root page. Let’s add some code to it.

+
+
+
+
+

Uploader component

+
+
+

I will begin editing the html file. First thing we need is an input component, which will allow us to select the file to upload. Furthermore, I added a button which will be the responsible of calling the upload file window. Apart from this, there is also two labels and a progress bar. Labels will give feedback about file upload request, both with an if clause with uploadSuccess and uploadFail global variables that will be in uploader.component.ts. The progress bar will show the progress of the file being uploaded.

+
+
+
+
....
+  <div class="upload">
+    <div>
+      <button mat-raised-button (click)="upload()">Upload file</button>
+    </div>
+        <label mat-label *ngIf="uploadSuccess"
+      >The file was upload succesfully!</label
+    >
+    <label mat-label *ngIf="uploadFail"
+      >There was an error uploading the file</label
+    >
+    <input
+      type="file"
+      #fileUpload
+      name="fileUpload"
+      accept="*"
+      style="display: none"
+    />
+  </div>
+    <mat-progress-bar
+    *ngIf="fileInProgress"
+    [value]="fileProgress"
+  ></mat-progress-bar>
+</div>
+....
+
+
+
+

The button will call the upload() method in our uploader.component.ts, and as we can see, I assigned an identifier for the input, #fileUpload, so we can reference it from uploader.component.ts. It accepts any file, and the display none style is because it will be called when we click the button, so it is no necessary to be present in the view.

+
+
+

Our html view should look something similar to this:

+
+
+
+html view 1 +
+
+
+

Let’s start in our .ts file. In order to interact with the input #fileUpload, it is necessary to declare it like this:

+
+
+
+
....
+@ViewChild('fileUpload') fileUpload: ElementRef;
+constructor() {}
+....
+
+
+
+

And then, the upload() method that the button in html is calling:

+
+
+
+
....
+ upload(): void {
+    this.fileUpload.nativeElement.click();
+
+    this.fileUpload.nativeElement.onchange = () => {
+      const file = this.fileUpload.nativeElement.files[0];
+      this.uploadFile(file);
+    };
+  }
+....
+
+
+
+

The click method at first line will open the file explorer in order to select the desired file to upload, and on change method will be called when a new file is selected, so a change is detected. Then, uploadFile(…​) method will be called.

+
+
+

Before explain this uploadFile(…​) method, there is something still missing, a service to communicate with back-end through HTTP. +I am going to place the service in a service folder inside our uploader component folder. +Execute the following command ng generate service data and paste the following code

+
+
+
+
....
+export class DataService {
+  SERVER_URL = 'http://localhost:8081/services/rest/binary/v1/';
+
+  constructor(private httpClient: HttpClient) {}
+
+  uploadFile(formdData: FormData): Observable<HttpEvent<BinaryObject>> {
+    const headers = new HttpHeaders({
+      'Content-Type': 'multipart/form-data',
+    });
+
+    return this.httpClient.post<BinaryObject>(
+      this.SERVER_URL + 'binaryobject',
+      formdData,
+      {
+        headers,
+        reportProgress: true,
+        observe: 'events',
+      }
+    );
+  }
+}
+....
+
+
+
+

We have declared the URL as a global variable. Also is necessary to set the content-type as multipart/form-data in the headers sections, that will be the body of the request. There is also two options to talk about:

+
+
+
    +
  • +

    reportProgress: to have a feedback about the file upload so we can show percentage on the view.

    +
  • +
  • +

    observe: ' events' in order to receive this type of events information.

    +
  • +
+
+
+

In uploader.component.ts is missing uploadFile(…​) method.

+
+
+
+
....
+  uploadFile(file: File): void {
+    const formDataBody = this.getFormData(file);
+    this.dataService.uploadFile(formDataBody).subscribe(
+      (event) => {
+        if (event.type == HttpEventType.UploadProgress) {
+          this.fileProgress = Math.round((100 * event.loaded) / event.total);
+        } else if (event instanceof HttpResponse) {
+          this.fileInProgress = false;
+          this.uploadSuccess = true;
+        }
+      },
+      (err) => {
+        console.log('Could not upload the file!');
+        this.uploadFail = true;
+      }
+    );
+  }
+....
+
+
+
+

Notice that whether we have a correct response, or an error response, we set the variable this.uploadSuccess or this.uploadFail to show the labels in the html giving feedback. +Once we call the service to do the HTTP request, we expect two types of response(three if we count the error), the first one is the progress of the upload, and will update the progress bar through this.fileProgress variable. The second one is a response when the request is finished. +That is why the type of the response is checked between HttpEventType or HttpResponse.

+
+
+

Now, if you have your back-end running, you should be able to upload a file, and check in DB that all the process worked fine.

+
+
+ + + + + +
+ + +Download method is not implemented yet. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-component-decomposition.html b/docs/devon4ng/1.0/guide-component-decomposition.html new file mode 100644 index 00000000..d82b20b2 --- /dev/null +++ b/docs/devon4ng/1.0/guide-component-decomposition.html @@ -0,0 +1,504 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Component Decomposition

+
+
+

When implementing a new requirement there are a few design decisions, which need to be considered. +A decomposition in Smart and Dumb Components should be done first. +This includes the definition of state and responsibilities. +Implementing a new dialog will most likely be done by defining a new Smart Component with multiple Dumb Component children.

+
+
+

In the component tree this would translate to the definition of a new sub-tree.

+
+
+
+Component Tree With Highlighted Sub Tree +
+
Figure 1. Component Tree with highlighted sub-tree
+
+
+
+
+

Defining Components

+
+
+

The following gives an example for component decomposition. +Shown is a screenshot from a style guide to be implemented. +It is a widget called Listpicker.

+
+
+

The basic function is an input field accepting direct input. +So typing otto puts otto inside the FormControl. +With arrow down key or by clicking the icon displayed in the inputs right edge a dropdown is opened. +Inside possible values can be selected and filtered beforehand. +After pressing arrow down key the focus should move into the filter input field. +Up and down arrow keys can be used to select an element from the list. +Typing into the filter input field filters the list from which the elements can be selected. +The current selected element is highlighted with green background color.

+
+
+
+Component Decomposition Example 1v2 +
+
Figure 2. Component decomposition example before
+
+
+

What should be done, is to define small reusable Dumb Components. +This way the complexity becomes manageable. +In the example every colored box describes a component with the purple box being a Smart Component.

+
+
+
+Component Decomposition Example 2v2 +
+
Figure 3. Component decomposition example after
+
+
+

This leads to the following component tree.

+
+
+
+Component Decomposition Example component tree +
+
Figure 4. Component decomposition example component tree
+
+
+

Note the uppermost component is a Dumb Component. +It is a wrapper for the label and the component to be displayed inside a form. +The Smart Component is Listpicker. +This way the widget can be reused without a form needed.

+
+
+

A widgets is a typical Smart Component to be shared across feature modules. +So the SharedModule is the place for it to be defined.

+
+
+
+
+

Defining state

+
+
+

Every UI has state. +There are different kinds of state, for example

+
+
+
    +
  • +

    View State: e.g. is a panel open, a css transition pending, etc.

    +
  • +
  • +

    Application State: e.g. is a payment pending, current URL, user info, etc.

    +
  • +
  • +

    Business Data: e.g. products loaded from back-end

    +
  • +
+
+
+

It is good practice to base the component decomposition on the state handled by a component and to define a simplified state model beforehand. +Starting with the parent - the Smart Component:

+
+
+
    +
  • +

    What overall state does the dialog have: e.g. loading, error, valid data loaded, valid input, invalid input, etc. +Every defined value should correspond to an overall appearance of the whole dialog.

    +
  • +
  • +

    What events can occur to the dialog: e.g. submitting a form, changing a filter, pressing buttons, pressing keys, etc.

    +
  • +
+
+
+

For every Dumb Component:

+
+
+
    +
  • +

    What data does a component display: e.g. a header text, user information to be displayed, a loading flag, etc.
    +This will be a slice of the overall state of the parent Smart Component. +In general a Dumb Component presents a slice of its parent Smart Components state to the user.

    +
  • +
  • +

    What events can occur: keyboard events, mouse events, etc.
    +These events are all handled by its parent Smart Component - every event is passed up the tree to be handled by a Smart Component.

    +
  • +
+
+
+

These information should be reflected inside the modeled state. +The implementation is a TypeScript type - an interface or a class describing the model.

+
+
+

So there should be a type describing all state relevant for a Smart Component. +An instance of that type is send down the component tree at runtime. +Not every Dumb Component will need the whole state. +For instance a single Dumb Component could only need a single string.

+
+
+

The state model for the previous Listpicker example is shown in the following listing.

+
+
+
Listing 1. Listpicker state model
+
+
export class ListpickerState {
+
+  items: {}[]|undefined;
+  columns = ['key', 'value'];
+  keyColumn = 'key';
+  displayValueColumn = 'value';
+  filteredItems: {}[]|undefined;
+  filter = '';
+  placeholder = '';
+  caseSensitive = true;
+  isDisabled = false;
+  isDropdownOpen = false;
+  selectedItem: {}|undefined;
+  displayValue = '';
+
+}
+
+
+
+

Listpicker holds an instance of ListpickerState which is passed down the component tree via @Input() bindings in the Dumb Components. +Events emitted by children - Dumb Components - create a new instance of ListpickerState based on the current instance and the event and its data. +So a state transition is just setting a new instance of ListpickerState. +Angular Bindings propagate the value down the tree after exchanging the state.

+
+
+
Listing 2. Listpicker State transition
+
+
export class ListpickerComponent {
+
+  // initial default values are set
+  state = new ListpickerState();
+
+  /** User changes filter */
+  onFilterChange(filter: string): void {
+    // apply filter ...
+    const filteredList = this.filterService.filter(...);
+
+    // important: A new instance is created, instead of altering the existing one.
+    //            This makes change detection easier and prevents hard to find bugs.
+    this.state = Object.assing({}, this.state, {
+      filteredItems: filteredList,
+      filter: filter
+    });
+  }
+
+}
+
+
+
+
Note:
+

It is not always necessary to define the model as independent type. +So there would be no state property and just properties for every state defined directly in the component class. +When complexity grows and state becomes larger this is usually a good idea. +If the state should be shared between Smart Components a store is to be used.

+
+
+
+
+

When are Dumb Components needed

+
+
+

Sometimes it is not necessary to perform a full decomposition. The architecture does not enforce it generally. What you should keep in mind is, that there is always a point when it becomes recommendable.

+
+
+

For example a template with 800 line of code is:

+
+
+
    +
  • +

    not understandable

    +
  • +
  • +

    not maintainable

    +
  • +
  • +

    not testable

    +
  • +
  • +

    not reusable

    +
  • +
+
+
+

So when implementing a template with more than 50 line of code you should think about decomposition.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-consuming-rest-services.html b/docs/devon4ng/1.0/guide-consuming-rest-services.html new file mode 100644 index 00000000..c4cc4361 --- /dev/null +++ b/docs/devon4ng/1.0/guide-consuming-rest-services.html @@ -0,0 +1,527 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Consuming REST services

+
+
+

A good introduction to working with Angular HttpClient can be found in Angular Docs

+
+
+

This guide will cover, how to embed Angular HttpClient in the application architecture. +For back-end request a special service with the suffix Adapter needs to be defined.

+
+
+
+
+

Defining Adapters

+
+
+

It is a good practice to have a Angular service whose single responsibility is to call the back-end and parse the received value to a transfer data model (e.g. Swagger generated TOs). +Those services need to have the suffix Adapter to make them easy to recognize.

+
+
+
+Adapters handle back-end communication +
+
Figure 1. Adapters handle back-end communication
+
+
+

As illustrated in the figure a Use Case service does not use Angular HttpClient directly but uses an adapter. +A basic adapter could look like this:

+
+
+
Listing 1. Example adapter
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+
+import { FlightTo } from './flight-to';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  getFlights(): Observable<FlightTo> {
+    return this.httpClient.get<FlightTo>('/relative/url/to/flights');
+  }
+
+}
+
+
+
+

The adapters should use a well-defined transfer data model. +This could be generated from server endpoints with CobiGen, Swagger, typescript-maven-plugin, etc. +If inside the application there is a business model defined, the adapter has to parse to the transfer model. +This is illustrated in the following listing.

+
+
+
Listing 2. Example adapter mapping from business model to transfer model
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+import { map } from 'rxjs/operators';
+
+import { FlightTo } from './flight-to';
+import { Flight } from '../../../model/flight';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  updateFlight(flight: Flight): Observable<Flight> {
+    const to = this.mapFlight(flight);
+
+    return this.httpClient.post<FlightTo>('/relative/url/to/flights', to).pipe(
+      map(to => this.mapFlightTo(to))
+    );
+  }
+
+  private mapFlight(flight: Flight): FlightTo {
+    // mapping logic
+  }
+
+  private mapFlightTo(flightTo: FlightTo): Flight {
+    // mapping logic
+  }
+
+}
+
+
+
+
+
+

Token management

+
+
+

In most cases the access to back-end API is secured using well known mechanisms as CSRF, JWT or both. In these cases the front-end application must manage the tokens that are generated when the user authenticates. More concretely it must store them to include them in every request automatically. Obviously, when user logs out these tokens must be removed from localStorage, memory, etc.

+
+
+
+
+

Store security token

+
+
+

In order to make this guide simple we are going to store the token in memory. Therefore, if we consider that we already have a login mechanism implemented we would like to store the token using a auth.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { Router } from '@angular/router';
+
+@Injectable({
+  providedIn: 'root',
+})
+export class AuthService {
+  private loggedIn = false;
+  private token: string;
+
+  constructor(public router: Router) {}
+
+  public isLogged(): boolean {
+    return this.loggedIn || false;
+  }
+
+  public setLogged(login: boolean): void {
+    this.loggedIn = login;
+  }
+
+  public getToken(): string {
+    return this.token;
+  }
+
+  public setToken(token: string): void {
+    this.token = token;
+  }
+}
+
+
+
+

Using the previous service we will be able to store the token obtained in the login request using the method setToken(token). Please consider that, if you want a more sophisticated approach using localStorage API, you will need to modify this service accordingly.

+
+
+
+
+

Include token in every request

+
+
+

Now that the token is available in the application it is necessary to include it in every request to a protected API endpoint. Instead of modifying all the HTTP requests in our application, Angular provides a class to intercept every request (and every response if we need to) called HttpInterceptor. Let’s create a service called http-interceptor.service.ts to implement the intercept method of this class:

+
+
+
+
import {
+  HttpEvent,
+  HttpHandler,
+  HttpInterceptor,
+  HttpRequest,
+} from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { environment } from '../../../environments/environment';
+import { AuthService } from './auth.service';
+
+@Injectable()
+export class HttpRequestInterceptorService implements HttpInterceptor {
+
+  constructor(private auth: AuthService) {}
+
+  intercept(
+    req: HttpRequest<any>,
+    next: HttpHandler,
+  ): Observable<HttpEvent<any>> {
+    // Get the auth header from the service.
+    const authHeader: string = this.auth.getToken();
+    if (authHeader) {
+      let authReq: HttpRequest<any>;
+
+      // CSRF
+      if (environment.security == 'csrf') {
+        authReq = req.clone({
+          withCredentials: true,
+          setHeaders: { 'x-csrf-token': authHeader },
+        });
+      }
+
+      // JWT
+      if (environment.security == 'jwt') {
+        authReq = req.clone({
+          setHeaders: { Authorization: authHeader },
+        });
+      }
+
+      return next.handle(authReq);
+    } else {
+      return next.handle(req);
+    }
+  }
+}
+
+
+
+

As you may notice, this service is making use of an environment field environment.security to determine if we are using JWT or CSRF in order to inject the token accordingly. In your application you can combine both if necessary.

+
+
+

Configure environment.ts file to use the CSRF/JWT.

+
+
+
+
security: 'csrf'
+
+
+
+

The authHeader used is obtained using the injected service AuthService already presented above.

+
+
+

In order to activate the interceptor we need to provide it in our app.module.ts or core.module.ts depending on the application structure. Let’s assume that we are using the latter and the interceptor file is inside a security folder:

+
+
+
+
...
+import { HttpRequestInterceptorService } from './security/http-request-interceptor.service';
+...
+
+@NgModule({
+  imports: [...],
+  exports: [...],
+  declarations: [],
+  providers: [
+    ...
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: HttpRequestInterceptorService,
+      multi: true,
+    },
+  ],
+})
+export class CoreModule {}
+
+
+
+

Angular automatically will now modify every request and include in the header the token if it is convenient.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-creating-angular-app-with-nx-cli.html b/docs/devon4ng/1.0/guide-creating-angular-app-with-nx-cli.html new file mode 100644 index 00000000..9172412c --- /dev/null +++ b/docs/devon4ng/1.0/guide-creating-angular-app-with-nx-cli.html @@ -0,0 +1,408 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Nx CLI

+
+
+

Nx CLI provides a wrapper around Angular CLI and makes it faster, in addition to other benefits. Its computational cache significantly speeds up building and serving applications successively.

+
+
+

With Nx CLI you always get the latest tools to develop your angular applications. By default it is integrated with tools like Jest, Cypress, ESLint and many more. Though you can always configure to use other tools as per your preference.

+
+
+

One difference you will find while working with Nx CLI is that an Nx workspace follows a certain folder structure. That is because Nx strongly supports monorepo architecture, wherein you place all the different components that make up your entire application (front-end, back-end, libraries, models) into one single repository. Nx also provides the tooling between these different components, so that you can share your code across your different applications in the same repo and avoid re-writing. We will go through the folder structure later in this guide. But we might not always want to follow a monorepo architecture and it is possible to create a single application with Nx CLI.

+
+
+

In this guide we are going to learn how to create an angular app with Nx CLI. But first, let us start by installing Nx

+
+
+
+
+

Installing Nx

+
+
+

You can install Nx globally in your system using the following command:

+
+
+
+
npm install -g nx
+
+
+
+

Now let us proceed to creating an angular application using Nx.

+
+
+
+
+

Creating Angular app with Nx

+
+
+

To create an angular app with Nx, we simply create an Nx workspace with angular preset using the following command:

+
+
+
+
npx create-nx-workspace --preset=angular
+
+
+
+

The CLI will ask a series of questions and setup your workspace with an empty angular application.

+
+
+
+Creating a Nx workspace +
+
Figure 1. Creating a Nx workspace.
+
+
+

Here we have given the workspace name as nx-guide and the app name as nx-ng-app. Let us have a look at the folder structure created by Nx CLI.

+
+
+
+
+

Nx workspace folder structure

+
+
+

Every Nx workspace has the following folder structure:

+
+
+
+
myorg/
+├── apps/
+├── libs/
+├── tools/
+├── workspace.json
+├── nx.json
+├── package.json
+└── tsconfig.base.json
+
+
+
+

Nx creates such a folder structure because it strongly supports the concept of monorepo, wherein all the inter-related applications and libraries are put together in the same repository for better maintenance, code-sharing and avoiding duplication of codes.

+
+
+

As per the structure, all runnable applications should belong in the apps/ directory. Supporting applications and libraries can be put in the libs/ folder, with each library defining its own external API to differentiate between them. tools/ folder can contain scripts which act on your code like database scripts, for example. The JSON files contain various information and configuration settings about the workspace and the projects within them.

+
+
+

You will find your angular app named nx-ng-app in the apps/ folder. The folder structure within your app is similar to any Angular app created with Angular CLI.

+
+
+
+Your Nx workspace in VSCode +
+
Figure 2. Your Nx workspace in VSCode.
+
+
+

You will also notice another app named nx-ng-app-e2e automatically generated in the apps folder. This for performing end-to-end testing with Cypress on your app.

+
+
+

Now that we have created our angular app, let us serve it so we can view the application in our browser.

+
+
+
+
+

Running your angular application

+
+
+

You can still use the ng command to serve your application from your workspace root directory as such:

+
+
+
+
ng serve nx-ng-app
+
+
+
+

Using Nx, you can use either of the commands below for the same purpose:

+
+
+
+
nx run my-app:serve
+nx serve my-app
+
+
+
+

Once your code is compiled, you can view your application at http://localhost:4200 as usual.

+
+
+
+
+

Conclusion

+
+
+

In this guide you learned how to install Nx and create an Angular application with it. Nx comes with a host of features and documentation. You can read more about using Nx for you angular projects over here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-cypress.html b/docs/devon4ng/1.0/guide-cypress.html new file mode 100644 index 00000000..e69c387f --- /dev/null +++ b/docs/devon4ng/1.0/guide-cypress.html @@ -0,0 +1,1064 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Testing e2e with Cypress

+
+
+

This guide will cover the basics of e2e testing using Cypress.

+
+
+

Cypress is a framework “all in one” that provides the necessary libraries to write specific e2e tests, without the need of Selenium.

+
+
+

Why Cypress?

+
+
+
    +
  • +

    Uses JavaScript

    +
  • +
  • +

    It works directly with the browser so the compatibility with the front-end framework the project uses (in this case Angular) is not a problem.

    +
  • +
  • +

    Easy cross browser testing

    +
  • +
+
+
+
+
+

Setup

+
+
+

Install +First of all we need to install it, we can use npm install:

+
+
+
+
$ npm install -D cypress
+
+
+
+

Or we can install it with yarn:

+
+
+
+
$ yarn add -D cypress
+
+
+
+

We need to run Cypress in order to get the folder tree downloaded, then create a tsconfig.json file inside cypress folder to add the typescript configuration.

+
+
+
+
$ . /node_modules/.bin/cypress open
+
+
+
+
Listing 1. tsconfig.json
+
+
{
+  "compilerOptions": {
+    "strict": true,
+    "baseUrl": "../node_modules",
+    "target": "es5",
+    "lib": ["es5", "dom"],
+    "types": ["cypress"]
+  },
+  "include": [
+    "**/*.ts"
+  ]
+}
+
+
+
+

BaseUrl

+
+
+

Let’s setup the base URL so when we run the tests cypress will "navigate" to the right place, go to cypress.json on the root of the project.

+
+
+
Listing 2. cypress.json
+
+
{
+  "baseUrl": "http://localhost:4200"
+}
+
+
+
+
+
+

Files / Structure

+
+
+
+
/cypress
+  tsconfig.json
+  /fixtures
+    - example.json
+  /integration
+    - button.spec.ts
+    - test.spec.ts
+    /examples
+  /plugins
+    - index.js
+  /support
+    - commands.js
+    - index.js
+
+
+
+

tsconfig.json for typescript configuration.

+
+
+

fixtures to store our mock data or files (images, mp3…​) to use on our tests.

+
+
+

integration is where our tests go, by default it comes with an examples folder with tested samples.

+
+
+

plugins is where the configuration files of the plugins go.

+
+
+

support to add custom commands.

+
+
+
+
+

== =

+
+
+

If you are using Nx, it automatically generates a e2e cypress project for every project that you generate. So you already get the configuration files like tsconfig.json and cypress.json and also get the folder structure described above. This helps you focus more on writing your tests rather than setting up Cypress.

+
+
+
+
+

== =

+
+ +
+
+
+

Tests

+
+
+

The structure is the same than Mocha.

+
+
+

First, we create a file, for example form.spec.ts, inside we define a context to group all our tests referred to the same subject.

+
+
+
Listing 3. form.spec.ts
+
+
context('Button page', () => {
+  beforeEach(() => {
+    cy.visit('/');
+  });
+  it('should have button',()=>{
+    cy.get('button').should('exist');
+  });
+  it('should contain PRESS',()=>{
+    cy.contains('button', 'PRESS');
+  });
+});
+
+
+
+
beforeEach
+

Visit '/' before every test.

+
+
+
it
+

Inside we write the test.

+
+
+

The result:

+
+
+
+contextImg +
+
+
+

For more info check Cypress documentation

+
+
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+
+
+
+

Fixtures

+
+
+

We use fixtures to mock data, it can be a json, an image, video…​

+
+
+
+
{
+  "name": "Dummy name",
+  "phone": 999 99 99 99,
+  "body": "Mock data"
+}
+
+
+
+

You can store multiple mocks on the same fixture file.

+
+
+
+
{
+  "create":{"name": "e2etestBox"},
+  "boxFruit":{
+    "uuid":"3376339576e33dfb9145362426a33333",
+    "name":"e2etestBox",
+    "visibility":true,
+    "items":[
+      {"name":"apple","units":3},
+      {"name":"kiwi","units":2},
+    ]
+  },
+}
+
+
+
+

To access data we don’t need to import any file, we just call cy.fixture(filename) inside the **.spec.ts. We can name it as we want.

+
+
+
+
cy.fixture('box.json').as('fruitBox')
+
+
+
+

cy.fixture('box.json') we get access to box.json +.as(fruitBox) is used to create an alias (fruitBox) to the fixture.

+
+
+

For more info check Fixtures documentation

+
+
+
+
+

Request / Route

+
+
+

With cypress you can test your application with real data or with mocks.

+
+
+

Not using mocks guarantees that your tests are real e2e test but makes them vulnerable to external issues. +When you mock data you don’t know exactly if the data and the structure received from the backend is correct because you are forcing a mock on the response, but you can avoid external issues, run test faster and have better control on the structure and status.

+
+
+

To get more information go to Testing Strategies

+
+
+
+
+

Route

+
+
+

Cypress can intercept a XHR request and interact with it.

+
+
+
+
cy.server();
+cy.route(
+  'GET',
+  '/apiUrl/list',
+  [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]
+)
+
+
+
+

cy.server(options) start a server to interact with the responses.

+
+
+
cy.route(options) intercepts a XMLHttpRequests
+
    +
  • +

    method GET

    +
  • +
  • +

    URL /apiUrl/list'

    +
  • +
  • +

    response [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]

    +
  • +
+
+
+

Waits

+
+
+

Every cypress action has a default await time to avoid asynchronous issues, but this time can be short for some particular actions like API calls, for those cases we can use cy.wait().

+
+
+
+
cy.server();
+cy.route('/apiUrl/list').as('list');
+cy.visit('/boxList');
+cy.wait('@list');
+
+
+
+

You can find more information about cy.wait() here

+
+
+

To mock data with fixtures:

+
+
+
+
cy.fixture('box')
+  .then(({boxFruit}) => {
+    cy.route(
+      'GET',
+      '/apiUrl/list',
+      boxFruit
+    ).as('boxFruit');
+    cy.get('#button').click();
+    cy.wait('@journalsList');
+    cy.get('#list').contains('apple');
+  })
+
+
+
+

We get boxFruit data from the box fixture and then we mock the API call with it so now the response of the call is boxFruit object. +When the button is clicked, it waits to receive the response of the call and then checks if the list contains one of the elements of the fruitBox.

+
+
+
+
+

Request

+
+
+

Make a HTTP request.

+
+
+
+
cy.server();
+cy.request('http://localhost:4200/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+
+
+
+

If we have 'http://localhost:4200' as baseUrl on cypress.json

+
+
+
+
cy.server();
+cy.request('/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+// Goes to http://localhost:4200/
+
+
+
+

We can add other options, like we can send the body of a form.

+
+
+
+
cy.server();
+cy.request({
+  method: 'POST',
+  url: '/send',
+  form: true,
+  body: {
+    name: 'name task',
+    description: 'description of the task'
+  }
+});
+
+
+
+
+
+

Custom commands

+
+
+

If you see yourself writing the same test more than once (login is a common one), you can create a custom command to make things faster.

+
+
+

Cypress.Commands.add('name', ()⇒{}) to create the test.

+
+
+
Listing 4. commands.ts
+
+
Cypress.Commands.add('checkPlaceholder', (name) => {
+  cy.get(`[name='${name}']`).click();
+  cy.get('mat-form-field.mat-focused').should('exist');
+});
+
+
+
+
index.ts
+

To use the commands we need to import the files on support/index.ts

+
+
+
Listing 5. index.ts
+
+
import './commands'
+import './file1'
+import './folder/file2'
+
+
+
+

index.ts is where all our custom commands files unite so Cypress knows where to find them.

+
+
+

And as we are using typescript we need to define a namespace, interface and define our function.

+
+
+
    +
  • +

    index.d.ts

    +
  • +
+
+
+
+
declare namespace Cypress {
+  interface Chainable<Subject> {
+    checkPlaceholder(name:string):Chainable<void>
+  }
+}
+
+
+ +
+
+
+

Cross browser testing

+
+
+

By default the browser used by Cypress is Chrome, it has compatibility with it’s family browsers (including Microsoft Edge) and has beta support for Mozilla Firefox.

+
+
+

To change the browser on the panel we can do it by selecting the desired one on the browsers tab before running the spec file.

+
+
+

Cypress will detect and display, except electron, only the browsers that you have already installed on your machine.

+
+
+
+browserTab +
+
+
+

Once the browser is selected, you can run your tests.

+
+
+

To change the browser on the automatic test run, you can add a flag on the node command

+
+
+
+
cypress run --browser edge
+
+
+
+

Only if we use the cypress run command.

+
+
+

Or we can change the script file.

+
+
+
    +
  • +

    cypress/script.js

    +
  • +
+
+
+
+
const runTests= async ()=>{
+  ...
+  const {totalFailed} = await cypress.run({browser:'edge'});
+  ...
+};
+
+
+ +
+
+
+

Viewport

+
+
+

Cypress allow us to create tests depending on the Viewport, so we can test responsiveness.

+
+
+

There are different ways to use it:

+
+
+

Inside a test case

+
+
+
+
it('should change title when viewport is less than 320px', ()=>{
+  cy.get('.title-l').should('be.visible');
+  cy.get('.title-s').should('not.be.visible');
+  cy.viewport(320, 480);
+  cy.get('.title-l').should('not.be.visible');
+  cy.get('.title-s').should('be.visible');
+})
+
+
+
+

Passing the configuration as an option

+
+
+
+
describe('page display on medium size screen', {
+  viewportHeight: 1000,
+  viewportWidth: 400
+}, () => {
+  ...
+})
+
+
+
+

Or we can set a default

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+{
+ "viewportHeight": 1000
+ "viewportWidth": 400,
+}
+...
+
+
+ +
+
+
+

Test retries

+
+
+

We can get false negatives intermittently due external issues that can affect our tests, because of that we can add, in the configuration, a retries entry so Cypress can run again a certain failed test the selected number of times to verify that the error is real.

+
+
+

We can set retries for run or open mode.

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+"retries": {
+    "runMode": 3,
+    "openMode": 3
+  }
+...
+
+
+
+

The retries can be configured on the cypress.json or directly on a specific test.

+
+
+
+
it('should get button', {
+  retries: {
+    runMode: 2,
+    openMode: 2
+  }
+}, () => {
+  ...
+})
+
+
+
+

This retries those not shown on the test log.

+
+
+

Check more on retries documentation

+
+
+
+
+

Reporter

+
+
+

The tests results appear on the terminal, but to have a more friendly view we can add a reporter.

+
+
+
+reporter +
+
+
+
+
+

Mochawesome

+
+
+

In this case we are going to use Mochawesome, initially its a Mocha reporter but as Cypress uses Mocha it works the same.

+
+
+

Install

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome
+
+
+
+

To run the reporter:

+
+
+
+
cypress run --reporter mochawesome
+
+
+
+

Mochawesome saves by default the generated files on `./mochawesome-report/` but we can add options to change this behavior.

+
+
+

Options can be passed to the reporter in two ways

+
+
+

Using a flag

+
+
+
+
cypress run --reporter mochawesome --reporter-options reportDir=report
+
+
+
+

Or on cypress.json

+
+
+
+
{
+  "baseUrl": "http://localhost:4200",
+  "reporter": "mochawesome",
+  "reporterOptions": {
+    "overwrite": false,
+    "html": false,
+    "json": true,
+    "reportDir": "cypress/report"
+  }
+}
+
+
+
+

Overwrite:false to not overwrite every **:spec.ts test report, we want them to create a merged version later.

+
+
+

reportDir to set a custom directory.

+
+
+

html:false because we don’t need it.

+
+
+

json:true to save them on json.

+
+
+

Mochawesome only creates the html file of the last .spec.ts file that the tests run, that’s why we don’t generate html reports directly, in order to stack them all on the same final html we need to merge the reports.

+
+ +
+

mochawesome-merge

+
+
+

Mochawesome-merge is a library that helps us to merge the different json.

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome-merge
+npm install --save-dev mochawesome-report-generator
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome-merge
+yarn add -D mochawesome-report-generator
+
+
+
+

To merge the files we execute this command:

+
+
+
+
mochawesome-merge cypress/report/*.json > cypress/reportFinal.json
+
+
+
+

reportFinal.json is the result of this merge, whit that we have the data of all the spec files in one json.

+
+
+

We can also automate the test, merge and conversion to html using a script.

+
+
+
+
const cypress = require('cypress');
+const fse = require('fs-extra');
+const { merge } = require('mochawesome-merge');
+const generator = require('mochawesome-report-generator');
+const runTests= async ()=>{
+  await fse.remove('mochawesome-report');
+  await fse.remove('cypress/report');
+  const {totalFailed} = await cypress.run();
+  const reporterOptions = {
+    files: ["cypress/report/*.json"]
+  };
+  await generateReport(reporterOptions);
+  if(totalFailed !==  0){
+    process.exit(2);
+  };
+};
+const generateReport = (options)=> {
+  return merge(options).then((jsonReport)=>{
+    generator.create(jsonReport).then(()=>{
+      process.exit();
+    });
+  });
+};
+runTests();
+
+
+
+

fse.remove() to remove older reports data.

+
+
+

cypress.run() to run the tests.

+
+
+

merge(options) we merge the json output from running the tests.

+
+
+

generator.create(jsonReport) then we generate the html view of the report.

+
+ +
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-error-handler.html b/docs/devon4ng/1.0/guide-error-handler.html new file mode 100644 index 00000000..f5a938ac --- /dev/null +++ b/docs/devon4ng/1.0/guide-error-handler.html @@ -0,0 +1,510 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Error Handler in angular

+
+
+

Angular allows us to set up a custom error handler that can be used to control the different errors and them in a correct way. Using a global error handler will avoid mistakes and provide a use friendly interface allowing us to indicate the user what problem is happening.

+
+
+
+
+

What is ErrorHandler

+
+
+

ErrorHandler is the class that Angular uses by default to control the errors. This means that, even if the application doesn’t have a ErrorHandler it is going to use the one setup by default in Angular. This can be tested by trying to find a page not existing in any app, instantly Angular will print the error in the console.

+
+
+
+
+

Creating your custom ErrorHandler step by step

+
+
+

In order to create a custom ErrorHandler three steps are going to be needed:

+
+
+
+
+

Creating the custom ErrorHandler class

+
+
+

In this first step the custom ErrorHandler class is going to be created inside the folder /app/core/errors/errors-handler.ts:

+
+
+
+
import { ErrorHandler, Injectable, Injector } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      //  To do: Use injector to get the necessary services to redirect or
+      // show a message to the user
+      const classname  = error.constructor.name;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          console.error('HttpError:' + error.message);
+          if (!navigator.onLine) {
+            console.error('There's no internet connection');
+            // To do: control here in internet what you wanna do if user has no internet
+          } else {
+            console.error('Server Error:' + error.message);
+            // To do: control here if the server gave an error
+          }
+          break;
+        default:
+          console.error('Error:' + error.message);
+          // To do: control here if the client/other things gave an error
+      }
+    }
+}
+
+
+
+

This class can be used to control the different type of errors. If wanted, the classname variable could be used to add more switch cases. This would allow control of more specific situations.

+
+
+
+
+

Creating a ErrorInterceptor

+
+
+

Inside the same folder created in the last step we are going to create the ErrorInterceptor(errors-handler-interceptor.ts). This ErrorInterceptor is going to retry any failed calls to the server to make sure it is not being found before showing the error:

+
+
+
+
import { HttpInterceptor, HttpRequest, HttpHandler, HttpEvent } from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { retry } from 'rxjs/operators';
+
+@Injectable()
+export class ErrorsHandlerInterceptor implements HttpInterceptor {
+
+    constructor() {}
+    intercept(req: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>> {
+        return next.handle(req).pipe(
+            retryWhen((errors: Observable<any>) => errors.pipe(
+                delay(500),
+                take(5),
+                concatMap((error: any, retryIndex: number) => {
+                    if (++retryIndex == 5) {
+                        throw error;
+                    }
+                    return of(error);
+                })
+            ))
+        );
+    }
+}
+
+
+
+

This custom made interceptor is implementing the HttpInterceptor and inside the method intercept using the method pipe,retryWhen,delay,take and concatMap from RxJs it is going to do the next things if there is errors:

+
+
+
    +
  1. +

    With delay(500) do a delay to allow some time in between requests

    +
  2. +
  3. +

    With take(5) retry five times.

    +
  4. +
  5. +

    With concatMap if the index that take() gives is not 5 it returns the error, else, it throws the error.

    +
  6. +
+
+
+
+
+

Creating a Error Module

+
+
+

Finally, creating a module(errors-handler.module.ts) is necessary to include the interceptor and the custom error handler. In this case, the module is going to be created in the same folder as the last two:

+
+
+
+
import { NgModule, ErrorHandler } from '@angular/core';
+import { CommonModule } from '@angular/common';
+import { ErrorsHandler } from './errors-handler';
+import { HTTP_INTERCEPTORS } from '@angular/common/http';
+import { ErrorsHandlerInterceptor } from './errors-handler-interceptor';
+
+@NgModule({
+  declarations: [], // Declare here component if you want to use routing to error component
+  imports: [
+    CommonModule
+  ],
+  providers: [
+    {
+      provide: ErrorHandler,
+      useClass: ErrorsHandler,
+    },
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: ErrorsHandlerInterceptor,
+      multi: true,
+    }
+  ]
+})
+export class ErrorsHandlerModule { }
+
+
+
+

This module simply is providing the services that are implemented by our custom classes and then telling angular to use our custom made classes instead of the default ones. After doing this, the module has to be included in the app module app.module.ts in order to be used.

+
+
+
+
....
+  imports: [
+    ErrorsHandlerModule,
+    ....
+
+
+
+
+
+

Handling Errors

+
+
+

As a final step, handling these errors is necessary. There are different ways that can be used to control the errors, here are a few:

+
+
+
    +
  • +

    Creating a custom page and using with Router to redirect to a page showing an error.

    +
  • +
  • +

    Creating a service in the server side or Backend to create a log with the error and calling it with HttpClient.

    +
  • +
  • +

    Showing a custom made SnackBar with the error message.

    +
  • +
+
+
+
+
+

== Using SnackBarService and NgZone

+
+
+

If the SnackBar is used directly, some errors can occur, this is due to SnackBar being out of the Angular zone. In order to use this service properly, NgZone is necessary. The method run() from NgZone will allow the service to be inside the Angular Zone. An example on how to use it:

+
+
+
+
import { ErrorHandler, Injectable, Injector, NgZone } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+import { MatSnackBar } from '@angular/material';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector, private zone: NgZone) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      // Use injector to get the necessary services to redirect or
+      const snackBar: MatSnackBar = this.injector.get(MatSnackBar);
+      const classname  = error.constructor.name;
+      let message: string;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          message = !(navigator.onLine) ? 'There is no internet connection' : error.message;
+          break;
+        default:
+          message = error.message;
+      }
+      this.zone.run(
+        () => snackBar.open(message, 'danger', { duration : 4000})
+      );
+    }
+}
+
+
+
+

Using Injector the MatSnackBar is obtained, then the correct message is obtained inside the switch. Finally, using NgZone and run(), we open the SnackBar passing the message, and the parameters wanted.

+
+
+

You can find a working example of this guide in devon4ts-samples.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-eslint.html b/docs/devon4ng/1.0/guide-eslint.html new file mode 100644 index 00000000..7e4e3f3b --- /dev/null +++ b/docs/devon4ng/1.0/guide-eslint.html @@ -0,0 +1,385 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular ESLint support

+
+
+ + + + + +
+ + +ESLint is supported in Angular 10.1.0 onward. +
+
+
+
+
+

What about TSLint?

+
+
+

TSLint is a fantastic tool. It is a linter that was written specifically to work based on the TypeScript AST format. This has advantages and disadvantages, as with most decisions we are faced with in software engineering!

+
+
+

One advantage is there is no tooling required to reconcile differences between ESLint and TypeScript AST formats, but the major disadvantage is that the tool is therefore unable to reuse any of the previous work which has been done in the JavaScript ecosystem around linting, and it has to re-implement everything from scratch. Everything from rules to auto-fixing capabilities and more.

+
+
+

However, the backers behind TSLint announced in 2019 that they would be deprecating TSLint in favor of supporting typescript-eslint in order to benefit the community. You can read more about that here

+
+
+

The TypeScript Team themselves also announced their plans to move the TypeScript codebase from TSLint to typescript-eslint, and they have been big supporters of this project. More details at https://github.com/microsoft/TypeScript/issues/30553

+
+
+

Angular ESLint support comes from the angular-eslint tooling package. Angular documentation also links to this repository as you can check in the ng lint section of the Angular CLI documentation.

+
+
+
+
+

Quick start with Angular and ESLint

+
+
+

In order to create a brand new Angular CLI workspace which uses ESLint instead of TSLint and Codelyzer, simply run the following commands:

+
+
+
+
##Install the Angular CLI and @angular-eslint/schematics globally however you want (e.g. npm, yarn, volta etc)
+
+$ npm i -g @angular/cli @angular-devkit/core @angular-devkit/schematics @angular-eslint/schematics
+
+##Create a new Angular CLI workspace using the @angular-eslint/schematics collection (instead of the default)
+
+$ ng new --collection=@angular-eslint/schematics
+
+
+
+
+
+

Migrating an Angular CLI project from Codelyzer and TSLint

+
+ +
+
+
+

1 - Add relevant dependencies

+
+
+

The first step is to run the schematic to add @angular-eslint to your project:

+
+
+
+
$ ng add @angular-eslint/schematics
+
+
+
+

This will handle installing the latest version of all the relevant packages for you and adding them to the devDependencies of your package.json.

+
+
+
+
+

2 - Run the convert-tslint-to-eslint schematic on a project

+
+
+

The next thing to do is consider which "project" you want to migrate to use ESLint. If you have a single application in your workspace you will likely have just a single entry in the projects configuration object within your angular.json file. If you have a projects/` directory in your workspace, you will have multiple entries in your projects configuration and you will need to chose which one you want to migrate using the convert-tslint-to-eslint schematic.

+
+
+

You can run it like so:

+
+
+
+
$ ng g @angular-eslint/schematics:convert-tslint-to-eslint {{YOUR_PROJECT_NAME_GOES_HERE}}
+
+
+
+

From now on, ng lint will use ESLint!

+
+
+
+
+

3 - Remove root TSLint configuration and use only ESLint

+
+
+

Once you are happy with your ESLint setup, you simply need to remove the root-level tslint.json and potentially uninstall TSLint and any TSLint-related plugins/dependencies if your Angular CLI workspace is now no longer using TSLint at all.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-file-structure.html b/docs/devon4ng/1.0/guide-file-structure.html new file mode 100644 index 00000000..0817914e --- /dev/null +++ b/docs/devon4ng/1.0/guide-file-structure.html @@ -0,0 +1,421 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

File Structure

+
+ +
+
+
+

Top-level

+
+
+

The top-level file structure is defined by Angular CLI. You might put this "top-level file structure" into a sub-directory to facilitate your build, but this is not relevant for this guide. So the applications file structure relevant to this guide is the folder /src/app inside the part managed by Angular CLI.

+
+
+
Listing 1. Top-level file structure shows feature modules
+
+
    /src
+    └── /app
+        ├── /account-management
+        ├── /billing
+        ├── /booking
+        ├── /core
+        ├── /shared
+        ├── /status
+        |
+        ├── app.module.ts
+        ├── app.component.spec.ts
+        ├── app.component.ts
+        └── app.routing-module.ts
+
+
+
+

Besides the definition of app module the app folder has feature modules on top-level. +The special modules shared and core are present as well.

+
+
+
+
+

Feature Modules

+
+
+

A feature module contains the modules definition and two folders representing both layers.

+
+
+
Listing 2. Feature module file structure has both layers
+
+
    /src
+    └── /app
+        └── /account-management
+            ├── /components
+            ├── /services
+            |
+            ├── account-management.module.ts
+            ├── account-management.component.spec.ts
+            ├── account-management.component.ts
+            └── account-management.routing-module.ts
+
+
+
+

Additionally an entry component is possible. This would be the case in lazy loading scenarios. +So account-management.component.ts would be only present if account-management is lazy loaded. +Otherwise, the module’s routes would be defined Component-less +(see vsavkin blog post).

+
+
+
+
+

Components Layer

+
+
+

The component layer reflects the distinction between Smart Components and Dumb Components.

+
+
+
Listing 3. Components layer file structure shows Smart Components on top-level
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                ├── /account-overview
+                ├── /confirm-modal
+                ├── /create-account
+                ├── /forgot-password
+                └── /shared
+
+
+
+

Every folder inside the /components folder represents a smart component. The only exception is /shared. +/shared contains Dumb Components shared across Smart Components inside the components layer.

+
+
+
Listing 4. Smart components contain Dumb components
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                └── /account-overview
+                    ├── /user-info-panel
+                    |   ├── /address-tab
+                    |   ├── /last-activities-tab
+                    |   |
+                    |   ├── user-info-panel.component.html
+                    |   ├── user-info-panel.component.scss
+                    |   ├── user-info-panel.component.spec.ts
+                    |   └── user-info-panel.component.ts
+                    |
+                    ├── /user-header
+                    ├── /user-toolbar
+                    |
+                    ├── account-overview.component.html
+                    ├── account-overview.component.scss
+                    ├── account-overview.component.spec.ts
+                    └── account-overview.component.ts
+
+
+
+

Inside the folder of a Smart Component the component is defined. +Besides that are folders containing the Dumb Components the Smart Component consists of. +This can be recursive - a Dumb Component can consist of other Dumb Components. +This is reflected by the file structure as well. This way the structure of a view becomes very readable. +As mentioned before, if a Dumb Component is used by multiple Smart Components inside the components layer +it is put inside the /shared folder inside the components layer.

+
+
+

With this way of thinking the shared module makes a lot of sense. If a Dumb Component is used by multiple Smart Components +from different feature modules, the Dumb Component is placed into the shared module.

+
+
+
Listing 5. The shared module contains Dumb Components shared across Smart Components from different feature modules
+
+
    /src
+    └── /app
+        └── /shared
+            └── /user-panel
+                |
+                ├── user-panel.component.html
+                ├── user-panel.component.scss
+                ├── user-panel.component.spec.ts
+                └── user-panel.component.ts
+
+
+
+

The layer folder /components is not necessary inside the shared module. +The shared module only contains components!

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-internationalization.html b/docs/devon4ng/1.0/guide-internationalization.html new file mode 100644 index 00000000..8a265a21 --- /dev/null +++ b/docs/devon4ng/1.0/guide-internationalization.html @@ -0,0 +1,575 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Internationalization

+
+
+

Nowadays, a common scenario in front-end applications is to have the ability to translate labels and locate numbers, dates, currency and so on when the user clicks over a language selector or similar. devon4ng and specifically Angular has a default mechanism in order to fill the gap of such features, and besides there are some wide used libraries that make even easier to translate applications.

+
+ +
+
+
+

devon4ng i18n approach

+
+
+

The official approach could be a bit complicated, therefore the recommended one is to use the recommended library Transloco from https://github.com/ngneat/transloco/.

+
+
+
+
+

Install and configure Transloco

+
+
+

In order to include this library in your devon4ng Angular >= 7.2 project you will need to execute in a terminal:

+
+
+
+
$ ng add @ngneat/transloco
+
+
+
+

As part of the installation process you’ll be presented with questions; Once you answer them, everything you need will automatically be created for you.

+
+
+
    +
  • +

    First, Transloco creates boilerplate files for the requested translations.

    +
  • +
  • +

    Next, it will create a new file, transloco-root.module.ts which exposes an Angular’s module with a default configuration, and inject it into the AppModule.

    +
  • +
+
+
+
+
import { HttpClient } from '@angular/common/http';
+import {
+  TRANSLOCO_LOADER,
+  Translation,
+  TranslocoLoader,
+  TRANSLOCO_CONFIG,
+  translocoConfig,
+  TranslocoModule
+} from '@ngneat/transloco';
+import { Injectable, NgModule } from '@angular/core';
+import { environment } from '../environments/environment';
+
+@Injectable({ providedIn: 'root' })
+export class TranslocoHttpLoader implements TranslocoLoader {
+  constructor(private http: HttpClient) {}
+
+  getTranslation(lang: string) {
+    return this.http.get<Translation>(`/assets/i18n/${lang}.json`);
+  }
+}
+
+@NgModule({
+  exports: [ TranslocoModule ],
+  providers: [
+    {
+      provide: TRANSLOCO_CONFIG,
+      useValue: translocoConfig({
+        availableLangs: ['en', 'es'],
+        defaultLang: 'en',
+        // Remove this option if your application doesn't support changing language in runtime.
+        reRenderOnLangChange: true,
+        prodMode: environment.production,
+      })
+    },
+    { provide: TRANSLOCO_LOADER, useClass: TranslocoHttpLoader }
+  ]
+})
+export class TranslocoRootModule {}
+
+
+
+ + + + + +
+ + +As you might have noticed it also set an HttpLoader into the module’s providers. The HttpLoader is a class that implements the TranslocoLoader interface. It’s responsible for instructing Transloco how to load the translation files. It uses Angular HTTP client to fetch the files, based on the given path. +
+
+
+
+
+

Usage

+
+
+

In order to translate any label in any HTML template you will need to use the transloco pipe available:

+
+
+
+
{{ 'HELLO' | transloco }}
+
+
+
+

An optional parameter from the component TypeScript class could be included as follows:

+
+
+
+
{{ 'HELLO' | transloco: { value: dynamic } }}
+
+
+
+

It is possible to use with inputs:

+
+
+
+
<span [attr.alt]="'hello' | transloco">Attribute</span>
+<span [title]="'hello' | transloco">Property</span>
+
+
+
+

In order to change the language used you will need to create a button or selector that calls the this.translocoService.use(language: string) method from TranslocoService. For example:

+
+
+
+
export class AppComponent {
+  constructor(private translocoService: TranslocoService) {}
+
+  changeLanguage(lang) {
+      this.translocoService.setActiveLang(lang);
+  }
+}
+
+
+
+

The translations will be included in the en.json, es.json, de.json, etc. files inside the /assets/i18n folder. For example en.json would be (using the previous parameter):

+
+
+
+
{
+    "HELLO": "hello"
+}
+
+
+
+

Or with an optional parameter:

+
+
+
+
{
+    "HELLO": "hello {{value}}"
+}
+
+
+
+

Transloco understands nested JSON objects. This means that you can have a translation that looks like this:

+
+
+
+
{
+    "HOME": {
+        "HELLO": "hello {{value}}"
+    }
+}
+
+
+
+

In order to access access the value, use the dot notation, in this case HOME.HELLO.

+
+
+
+
+

Using the service, pipe or directive

+
+ +
+
+
+

== Structural Directive

+
+
+

Using a structural directive is the recommended approach. It’s DRY and efficient, as it creates one subscription per template:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('title') }}</p>
+
+  <comp [title]="t('title')"></comp>
+</ng-container>
+
+
+
+

Note that the t function is memoized. It means that given the same key it will return the result directly from the cache.

+
+
+

We can pass a params object as the second parameter:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('name', { name: 'Transloco' }) }}</p>
+</ng-container>
+
+
+
+

We can instruct the directive to use a different language in our template:

+
+
+
+
<ng-container *transloco="let t; lang: 'es'">
+  <p>{{ t('title') }}</p>
+</ng-container>
+
+
+
+
+
+

== Pipe

+
+
+

The use of pipes can be possible too:

+
+
+

template:

+
+
+
+
<div>{{ 'HELLO' | transloco:param }}</div>
+
+
+
+

component:

+
+
+
+
param = {value: 'world'};
+
+
+
+
+
+

== Attribute Directive

+
+
+

The last option available with transloco is the attribute directive:

+
+
+
+
<div transloco="HELLO" [translocoParams]="{ value: 'world' }"></div>
+
+
+
+
+
+

== Service

+
+
+

If you need to access translations in any component or service you can do it injecting the TranslocoService into them:

+
+
+
+
// Sync translation
+translocoService.translate('HELLO', {value: 'world'});
+
+// Async translation
+translocoService.selectTranslate('HELLO', { value: 'world' }).subscribe(res => {
+    console.log(res);
+    //=> 'hello world'
+});
+
+
+
+ + + + + +
+ + +You can find a complete example at https://github.com/devonfw/devon4ng-application-template. +
+
+
+

Please, visit https://github.com/ngneat/transloco/ for more info.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-ionic-from-code-to-android.html b/docs/devon4ng/1.0/guide-ionic-from-code-to-android.html new file mode 100644 index 00000000..3f75d60c --- /dev/null +++ b/docs/devon4ng/1.0/guide-ionic-from-code-to-android.html @@ -0,0 +1,606 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic to android

+
+
+

This page is written to help developers to go from the source code of an ionic application to an android one, with this in mind, topics such as: environment, commands, modifications,…​ are covered.

+
+
+
+
+

Assumptions

+
+
+

This document assumes that the reader has already:

+
+
+
    +
  • +

    Source code of an Ionic application and wants to build it on an android device,

    +
  • +
  • +

    A working installation of NodeJS

    +
  • +
  • +

    An Ionic CLI installed and up-to-date.

    +
  • +
  • +

    Android Studio and Android SDK.

    +
  • +
+
+
+
+
+

From Ionic to Android project

+
+
+

When a native application is being designed, sometimes, functionalities that uses camera, geolocation, push notification, …​ are requested. To resolve these requests, Capacitor can be used.

+
+
+

In general terms, Capacitor wraps apps made with Ionic (HTML, SCSS, Typescript) into WebViews that can be displayed in native applications (Android, IOS) and allows the developer to access native functionalities like the ones said before.

+
+
+

Installing capacitor is as easy as installing any node module, just a few commands have to be run in a console:

+
+
+
    +
  • +

    cd name-of-ionic-4-app

    +
  • +
  • +

    npm install --save @capacitor/core @capacitor/cli

    +
  • +
+
+
+

Then, it is necessary to initialize capacitor with some information: app id, name of the app and the directory where your app is stored. To fill this information, run:

+
+
+
    +
  • +

    npx cap init

    +
  • +
+
+
+
+
+

Modifications

+
+
+

Throughout the development process, usually back-end and front-end are on a local computer, so it’s a common practice to have different configuration files for each environment (commonly production and development). Ionic uses an angular.json file to store those configurations and some rules to be applied.

+
+
+

If a back-end is hosted on http://localhost:8081, and that direction is used in every environment, the application built for android will not work because computer and device do not have the same localhost. Fortunately, different configurations can be defined.

+
+
+

Android Studio uses 10.0.0.2 as alias for 127.0.0.1 (computer’s localhost) so adding http://10.0.0.2:8081 in a new environment file and modifying angular.json accordingly, will make possible connect front-end and back-end.

+
+
+
+Android environment and angular.json +
+
+
+
+
    "build": {
+    ...
+        "configurations": {
+            ...
+            "android": {
+                "fileReplacements": [
+                    {
+                        "replace": "src/environments/environment.ts",
+                        "with": "src/environments/environment.android.ts"
+                    }
+                ]
+            },
+        }
+    }
+
+
+
+
+
+

Build

+
+
+

Once configured, it is necessary to build the Ionic app using this new configuration:

+
+
+
    +
  • +

    ionic build --configuration=android

    +
  • +
+
+
+

The next commands copy the build application on a folder named android and open android studio.

+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+
+
+

From Android project to emulated device

+
+
+

Once Android Studio is opened, follow these steps:

+
+
+
    +
  1. +

    Click on "Build" → Make project.

    +
  2. +
  3. +

    Click on "Build" → Make Module 'app' (default name).

    +
  4. +
+
+
+

Click on make project +click on make app

+
+
+
    +
  1. +

    Click on" Build" → Build Bundle(s) / APK(s) → Build APK(s).

    +
  2. +
  3. +

    Click on run and choose a device.

    +
  4. +
+
+
+

click on build APK +click on running device

+
+
+

If there are no devices available, a new one can be created:

+
+
+
    +
  1. +

    Click on "Create new device"

    +
  2. +
  3. +

    Select hardware and click "Next". For example: Phone → Nexus 5X.

    +
  4. +
+
+
+

Create new device +Select hardware

+
+
+
    +
  1. +

    Download a system image.

    +
    +
      +
    1. +

      Click on download.

      +
    2. +
    3. +

      Wait until the installation finished and then click "Finish".

      +
    4. +
    5. +

      Click "Next".

      +
    6. +
    +
    +
  2. +
  3. +

    Verify configuration (default configuration should be enough) and click "Next".

    +
  4. +
+
+
+

Download system image +Check configuration

+
+
+
    +
  1. +

    Check that the new device is created correctly.

    +
  2. +
+
+
+
+New created device +
+
+
+
+
+

From Android project to real device

+
+
+

To test on a real android device, an easy approach to communicate a smartphone (front-end) and computer (back-end) is to configure a WiFi hotspot and connect the computer to it. A guide about this process can be found here.

+
+
+

Once connected, run ipconfig on a console if you are using windows or ifconfig on a Linux machine to get the IP address of your machine’s Wireless LAN adapter WiFi.

+
+
+
+Result of `ipconfig` command on Windows 10 +
+
+
+

This obtained IP must be used instead of "localhost" or "10.0.2.2" at environment.android.ts.

+
+
+
+Android environment file server URL +
+
+
+

After this configuration, follow the build steps in "From Ionic to Android project" and the first three steps in "From Android project to emulated device".

+
+
+
+
+

Send APK to Android through USB

+
+
+

To send the built application to a device, you can connect computer and mobile through USB, but first, it is necessary to unlock developer options.

+
+
+
    +
  1. +

    Open "Settings" and go to "System".

    +
  2. +
  3. +

    Click on "About".

    +
  4. +
  5. +

    Click "Build number" seven times to unlock developer options.

    +
  6. +
+
+
+
+Steps to enable developer options: 1, 2, 3 +
+
+
+
    +
  1. +

    Go to "System" again an then to "Developer options"

    +
  2. +
  3. +

    Check that the options are "On".

    +
  4. +
  5. +

    Check that "USB debugging" is activated.

    +
  6. +
+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+

After this, do the step four in "From Android project to emulated device" and choose the connected smartphone.

+
+
+
+
+

Send APK to Android through email

+
+
+

When you build an APK, a dialog gives two options: locate or analyze. If the first one is chosen, Windows file explorer will be opened showing an APK that can be send using email. Download the APK on your phone and click it to install.

+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+
+
+

Result

+
+
+

If everything goes correctly, the Ionic application will be ready to be tested.

+
+
+
+Application running on a real device +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-ionic-getting-started.html b/docs/devon4ng/1.0/guide-ionic-getting-started.html new file mode 100644 index 00000000..376f3ee4 --- /dev/null +++ b/docs/devon4ng/1.0/guide-ionic-getting-started.html @@ -0,0 +1,383 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic 5 Getting started

+
+
+

Ionic is a front-end focused framework which offers different tools for developing hybrid mobile applications. The web technologies used for this purpose are CSS, Sass, HTML5 and Typescript.

+
+
+
+
+

Why Ionic?

+
+
+

Ionic is used for developing hybrid applications, which means not having to rely on a specific IDE such as Android Studio or Xcode. Furthermore, development of native apps require learning different languages (Java/Kotlin for Android and Objective-C/Swift for Apple), with Ionic, a developer does not have to code the same functionality for multiple platforms, just use the adequate libraries and components.

+
+
+
+
+

Basic environment set up

+
+ +
+
+
+

Install Ionic CLI

+
+
+

Although the devonfw distribution comes with and already installed Ionic CLI, here are the steps to install it. The installation of Ionic is easy, just one command has to be written:

+
+
+

$ npm install -g @ionic/cli

+
+
+
+
+

Update Ionic CLI

+
+
+

If there was a previous installation of the Ionic CLI, it will need to be uninstalled due to a change in package name.

+
+
+
+
$ npm uninstall -g ionic
+$ npm install -g @ionic/cli
+
+
+
+

##Basic project set up +The set up of an ionic application is pretty immediate and can be done in one line:

+
+
+

ionic start <name> <template> --type=angular

+
+
+
    +
  • +

    ionic start: Command to create an app.

    +
  • +
  • +

    <name>: Name of the application.

    +
  • +
  • +

    <template>: Model of the application.

    +
  • +
  • +

    --type=angular: With this flag, the app produced will be based on angular.

    +
  • +
+
+
+

To create an empty project, the following command can be used:

+
+
+

ionic start MyApp blank --type=angular

+
+
+
+Ionic blank project +
+
+
+

The image above shows the directory structure generated.

+
+
+

There are more templates available that can be seen with the command +ionic start --list

+
+
+
+List of ionic templates +
+
+
+

The templates surrounded by red line are based on angular and comes with Ionic v5, while the others belong to earlier versions (before v4).

+
+
+ + + + + +
+ + +More info at https://ionicframework.com/docs. Remember to select Angular documentation, since Ionic supports React, Vue and Vanilla JS. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-ionic-pwa.html b/docs/devon4ng/1.0/guide-ionic-pwa.html new file mode 100644 index 00000000..af139049 --- /dev/null +++ b/docs/devon4ng/1.0/guide-ionic-pwa.html @@ -0,0 +1,545 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic Progressive Web App

+
+
+

This guide is a continuation of the guide Angular PWAs, therefore, valid concepts explained there are still valid in this page but focused on Ionic.

+
+
+
+
+

Assumptions

+
+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
  • +

    Ionic 5 CLI

    +
  • +
  • +

    Capacitor

    +
  • +
+
+
+

Also, it is a good idea to read the document about PWA using Angular.

+
+
+
+
+

Sample Application

+
+
+
+Ionic 5 PWA Base +
+
Figure 1. Basic ionic PWA.
+
+
+

To explain how to build progressive web apps (PWA) using Ionic, a basic application is going to be built. This app will be able to take photos even without network using PWA elements.

+
+
+
+
+

Step 1: Create a new project

+
+
+

This step can be completed with one simple command: ionic start <name> <template>, where <name> is the name and <template> a model for the app. In this case, the app is going to be named basic-ion-pwa.

+
+
+

If you are using Nx, there is a pre-requisite to this step. And that is, you have to add the @nxtend/ionic-angular plugin to your Nx workspace. The command for that is npm install --save-dev @nxtend/ionic-angular. Once you have the plugin installed, you can generate an ionic app in your Nx workspace with the command nx generate @nxtend/ionic-angular:app basic-ion-pwa. (You can refer this guide if you want to get started with Nx).

+
+
+
+
+

Step 2: Structures and styles

+
+
+

The styles (scss) and structures (html) do not have anything specially relevant, just colors and ionic web components. The code can be found in devon4ts-samples.

+
+
+
+
+

Step 3: Add functionality

+
+
+

After this step, the app will allow users to take photos and display them in the main screen. +First we have to import three important elements:

+
+
+
    +
  • +

    DomSanitizer: Sanitizes values to be safe to use.

    +
  • +
  • +

    SafeResourceUrl: Interface for values that are safe to use as URL.

    +
  • +
  • +

    Plugins: Capacitor constant value used to access to the device’s camera and toast dialogs.

    +
  • +
+
+
+
+
  import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
+  import { Plugins, CameraResultType } from '@capacitor/core';
+
+
+
+

The process of taking a picture is enclosed in a takePicture() method. takePicture() calls the Camera’s getPhoto() function which returns an URL or an exception. If a photo is taken then the image displayed in the main page will be changed for the new picture, else, if the app is closed without changing it, a toast message will be displayed.

+
+
+
+
  export class HomePage {
+    image: SafeResourceUrl;
+    ...
+
+    async takePicture() {
+      try {
+        const image = await Plugins.Camera.getPhoto({
+          quality: 90,
+          allowEditing: true,
+          resultType: CameraResultType.Uri,
+        });
+
+        // Change last picture shown
+        this.image = this.sanitizer.bypassSecurityTrustResourceUrl(image.webPath);
+      } catch (e) {
+        this.show('Closing camera');
+      }
+    }
+
+    async show(message: string) {
+      await Plugins.Toast.show({
+        text: message,
+      });
+    }
+  }
+
+
+
+
+
+

Step 4: PWA Elements

+
+
+

When Ionic apps are not running natively, some resources like Camera do not work by default but can be enabled using PWA Elements. To use Capacitor’s PWA elements run npm install @ionic/pwa-elements and modify src/main.ts as shown below.

+
+
+
+
...
+
+// Import for PWA elements
+import { defineCustomElements } from '@ionic/pwa-elements/loader';
+
+if (environment.production) {
+  enableProdMode();
+}
+
+platformBrowserDynamic().bootstrapModule(AppModule)
+  .catch(err => console.log(err));
+
+// Call the element loader after the platform has been bootstrapped
+defineCustomElements(window);
+
+
+
+
+
+

Step 5: Make it Progressive.

+
+
+

Turning an Ionic 5 app into a PWA is pretty easy. The same module used to turn Angular apps into PWAs has to be added. To do so, run: ng add @angular/pwa. This command also creates an icons folder inside src/assets and contains angular icons for multiple resolutions. (Note: In an Nx workspace, you have to add it like a normal package using npm install @angular/pwa, and you have to manually add the icons). If you want to use other images, be sure that they have the same resolution, the names can be different but the file manifest.json has to be changed accordingly.

+
+
+
+
+

Step 6: Configure the app

+
+
+

manifest.json

+
+
+

Default configuration.

+
+
+

ngsw-config.json

+
+
+

At assetGroupsresources add a URLs field and a pattern to match PWA Elements scripts and other resources (images, styles, …​):

+
+
+
+
  "urls": ["https://unpkg.com/@ionic/pwa-elements@1.0.2/dist/**"]
+
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ionic build --configuration production to build the app using production settings. (nx build basic-ion-pwa --configuration production in your Nx workspace root).

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here. A good alternative is also npm install serve. It can be checked here.

+
+
+

Go to the www folder running cd www.

+
+
+

http-server -o or serve to serve your built app.

+
+
+ + + + + +
+ + +In order not to install anything not necessary npx can be used directly to serve the app. i.e run npx serve [folder] will automatically download and run this HTTP server without installing it in the project dependencies. +
+
+
+
+Http server running +
+
Figure 2. Http server running on localhost:8081.
+
+
+

 
+In another console instance run ionic serve (nx serve basic-ion-pwa if using Nx CLI) to open the common app (not built).

+
+
+
+Ionic serve on Visual Studio Code console +
+
Figure 3. Ionic server running on localhost:8100.
+
+
+

 
+The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common one does not.

+
+
+
+Application comparison +
+
Figure 4. Application service worker comparison.
+
+
+

 
+If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 5. Offline application.
+
+
+

 
+Finally, plugins like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 6. Lighthouse report.
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-layout-with-angular-material.html b/docs/devon4ng/1.0/guide-layout-with-angular-material.html new file mode 100644 index 00000000..1d95b2db --- /dev/null +++ b/docs/devon4ng/1.0/guide-layout-with-angular-material.html @@ -0,0 +1,750 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Material Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts using Angular Material in a devon4ng application. We will create an application with a header containing some menu links and a sidenav with some navigation links.

+
+
+
+Finished application +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Create a new angular application

+
+
+

We start with opening the devonfw IDE(right-click anywhere in your workspace and click "Open devonfw CMD shell here") and running the following command to start a project named devon4ng-mat-layout

+
+
+
    +
  • +

    ng new devon4ng-mat-layout --routing --style=scss. If you are using Nx, the command would be nx generate @nrwl/angular:app devon4ng-mat-layout --routing --style=scss in your Nx workspace. Click here to get started with using Nx.

    +
  • +
+
+
+

We are providing the routing flag so that a routing module is generated, and we are also setting the style sheet format to SCSS with --style=scss.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+
    +
  • +

    ng serve. (If you are using Nx, you have to specify the project name along with the --project flag, so the command becomes ng serve --project=devon4ng-mat-layout)

    +
  • +
+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+Blank application +
+
Figure 2. Blank application
+
+
+
+
+

Adding Angular Material library to the project

+
+
+

Next we will add Angular Material to our application. In the integrated terminal, press Ctrl + C to terminate the running application and run the following command:

+
+
+
    +
  • +

    npm install --save @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

You can also use Yarn to install the dependencies if you prefer that:

+
+
+
    +
  • +

    yarn add @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

Once the dependencies are installed, we need to import the BrowserAnimationsModule in our AppModule for animations support.

+
+
+
Listing 1. Importing BrowserAnimationsModule in AppModule
+
+
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
+
+@NgModule({
+  ...
+  imports: [BrowserAnimationsModule],
+  ...
+})
+export class AppModule { }
+
+
+
+

Angular Material provides a host of components for designing our application. All the components are well structured into individual NgModules. For each component from the Angular Material library that we want to use, we have to import the respective NgModule.

+
+
+
Listing 2. We will be using the following components in our application:
+
+
import { MatIconModule, MatButtonModule, MatMenuModule, MatListModule, MatToolbarModule, MatSidenavModule } from '@angular/material';
+
+@NgModule({
+  ...
+  imports: [
+	...
+    MatIconModule,
+    MatButtonModule,
+    MatMenuModule,
+    MatListModule,
+    MatToolbarModule,
+    MatSidenavModule,
+	...
+	],
+  ...
+})
+export class AppModule { }
+
+
+
+

A better approach is to import and then export all the required components in a shared module. But for the sake of simplicity, we are importing all the required components in the AppModule itself.

+
+
+
+
+

==

+
+
+
+
  You can find a working copy of this application https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-basic-layout[here]. The sample application is part of a Nx workspace, which means it is one of the many apps in a monorepo and capable of importing reusable code from a shared library. This guide describes the implementaion by assuming a stand-alone single-repo application, but the pages and layout described in this sample app are similar to the ones used in another sample app in the monorepo (https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-theming[angular-material-theming]), which is why we have exported the required components from a shared library and reused them in both the apps. As a result, the code in our monorepo will be slightly different. It would still help you in following this guide.
+== ==
+
+
+
+

Next, we include a theme in our application. Angular Material comes with four pre-defined themes: indigo-pink, deeppurple-amber, pink-bluegrey and purple-green. It is also possible to create our own custom theme, but that is beyond the scope of this guide. Including a theme is required to apply all of the core and theme styles to your application. +We will include the indigo-pink theme in our application by importing the indigo-pink.css file in our src/styles.scss:

+
+
+
Listing 3. In src/styles.scss:
+
+
@import "~@angular/material/prebuilt-themes/indigo-pink.css";
+
+
+
+

To use Material Design Icons along with the mat-icon component, we will load the Material Icons library in our src/index.html file

+
+
+
Listing 4. In src/index.html:
+
+
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
+
+
+
+
+
+

Development

+
+
+

Now that we have all the Angular Material related dependencies set up in our project, we can start coding. Let’s begin by adding a suitable margin and font to the body element of our single page application. We will add it in the src/styles.scss file to apply it globally:

+
+
+
Listing 5. In src/styles.scss:
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

At this point, if we run our application, this is how it will look like:

+
+
+
+Angular Material added to the application +
+
Figure 3. Application with Angular Material set up
+
+
+

We will clear the app.component.html file and setup a header with a menu button and some navigational links. We will use mat-toolbar, mat-button, mat-menu, mat-icon and mat-icon-button for this:

+
+
+
Listing 6. app.component.html:
+
+
<mat-toolbar color="primary">
+  <button mat-icon-button aria-label="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+  <button mat-button [matMenuTriggerFor]="submenu">Menu 1</button>
+  <button mat-button>Menu 2</button>
+  <button mat-button>Menu 3</button>
+
+  <mat-menu #submenu="matMenu">
+    <button mat-menu-item>Sub-menu 1</button>
+    <button mat-menu-item [matMenuTriggerFor]="submenu2">Sub-menu 2</button>
+  </mat-menu>
+
+  <mat-menu #submenu2="matMenu">
+    <button mat-menu-item>Menu Item 1</button>
+    <button mat-menu-item>Menu Item 2</button>
+    <button mat-menu-item>Menu Item 3</button>
+  </mat-menu>
+
+</mat-toolbar>
+
+
+
+

The color attribute on the mat-toolbar element will give it the primary (indigo) color as defined by our theme. The color attribute works with most Angular Material components; the possible values are 'primary', 'accent' and 'warn'. +The mat-toolbar is a suitable component to represent a header. It serves as a placeholder for elements we want in our header. +Inside the mat-toolbar, we start with a button having mat-icon-button attribute, which itself contains a mat-icon element having the value menu. This will serve as a menu button which we can use to toggle the sidenav. +We follow it with some sample buttons having the mat-button attribute. Notice the first button has a property matMenuTriggerFor bound to a local reference submenu. As the property name suggests, the click of this button will display the mat-menu element with the specified local reference as a drop-down menu. The rest of the code is self explanatory.

+
+
+
+Header added to the application +
+
Figure 4. This is how our application looks with the first menu button (Menu 1) clicked.
+
+
+

We want to keep the sidenav toggling menu button on the left and move the rest to the right to make it look better. To do this we add a class to the menu icon button:

+
+
+
Listing 7. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+

And in the app.component.scss file, we add the following style:

+
+
+
Listing 8. app.component.scss:
+
+
.menu {
+    margin-right: auto;
+}
+
+
+
+

The mat-toolbar element already has it’s display property set to flex. Setting the menu icon button’s margin-right property to auto keeps itself on the left and pushes the other elements to the right.

+
+
+
+Final look of the header +
+
Figure 5. Final look of the header.
+
+
+

Next, we will create a sidenav. But before that lets create a couple of components to navigate between, the links of which we will add to the sidenav. +We will use the ng generate component (or ng g c command for short) to create Home and Data components. (Append --project=devon4ng-mat-layout to the command in a Nx workspace). We nest them in the pages sub-directory since they represent our pages.

+
+
+
    +
  • +

    ng g c pages/home

    +
  • +
  • +

    ng g c pages/data;

    +
  • +
+
+
+

Let us set up the routing such that when we visit http://localhost:4200/ root url we see the HomeComponent and when we visit http://localhost:4200/data url we see the DataComponent. +We had opted for routing while creating the application, so we have the routing module app-routing.module.ts setup for us. In this file, we have the empty routes array where we set up our routes.

+
+
+
Listing 9. app-routing.module.ts:
+
+
import { HomeComponent } from './pages/home/home.component';
+import { DataComponent } from './pages/data/data.component';
+
+	const routes: Routes = [
+	  { path: '', component: HomeComponent },
+	  { path: 'data', component: DataComponent }
+	];
+
+
+
+

We need to provide a hook where the components will be loaded when their respective URLs are loaded. We do that by using the router-outlet directive in the app.component.html.

+
+
+
Listing 10. app.component.html:
+
+
...
+	</mat-toolbar>
+	<router-outlet></router-outlet>
+
+
+
+

Now when we visit the defined URLs we see the appropriate components rendered on screen.

+
+
+

Lets change the contents of the components to have something better.

+
+
+
Listing 11. home.component.html:
+
+
<h2>Home Page</h2>
+
+
+
+
Listing 12. home.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+
Listing 13. data.component.html:
+
+
<h2>Data Page</h2>
+
+
+
+
Listing 14. data.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+

The pages look somewhat better now:

+
+
+
+Home page +
+
Figure 6. Home page
+
+
+
+Data page +
+
Figure 7. Data page
+
+
+

Let us finally create the sidenav. To implement the sidenav we need to use 3 Angular Material components: mat-sidenav-container, mat-sidenav and mat-sidenav-content. +The mat-sidenav-container, as the name suggests, acts as a container for the sidenav and the associated content. So it is the parent element, and mat-sidenav and mat-sidenav-content are the children sibling elements. mat-sidenav represents the sidenav. We can put any content we want, though it is usually used to contain a list of navigational links. The mat-sidenav-content element is for containing the contents of our current page. Since we need the sidenav application-wide, we will put it in the app.component.html.

+
+
+
Listing 15. app.component.html:
+
+
...
+</mat-toolbar>
+
+<mat-sidenav-container>
+  <mat-sidenav mode="over" [disableClose]="false" #sidenav>
+    Sidenav
+  </mat-sidenav>
+  <mat-sidenav-content>
+    <router-outlet></router-outlet>
+  </mat-sidenav-content>
+</mat-sidenav-container>
+
+
+
+

The mat-sidenav has a mode property, which accepts one of the 3 values: over, push and side. It decides the behavior of the sidenav. mat-sidenav also has a disableClose property which accents a boolean value. It toggles the behavior where we click on the backdrop or press the Esc key to close the sidenav. There are other properties which we can use to customize the appearance, behavior and position of the sidenav. You can find the properties documented online at https://material.angular.io/components/sidenav/api +We moved the router-outlet directive inside the mat-sidenav-content where it will render the routed component. +But if you check the running application in the browser, we don’t see the sidenav yet. That is because it is closed. We want to have the sidenav opened/closed at the click of the menu icon button on the left side of the header we implemented earlier. Notice we have set a local reference #sidenav on the mat-sidenav element. We can access this element and call its toggle() function to toggle open or close the sidenav.

+
+
+
Listing 16. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu" (click)="sidenav.toggle()">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+
+Sidenav works +
+
Figure 8. Sidenav is implemented
+
+
+

We can now open the sidenav by clicking the menu icon button. But it does not look right. The sidenav is only as wide as its content. Also the page does not stretch the entire viewport due to lack of content. +Let’s add the following styles to make the page fill the viewport:

+
+
+
Listing 17. app.component.scss:
+
+
...
+mat-sidenav-container {
+    position: absolute;
+    top: 64px;
+    left: 0;
+    right: 0;
+    bottom: 0;
+}
+
+
+
+

The sidenav width will be corrected when we add the navigational links to it. That is the only thing remaining to be done. Lets implement it now:

+
+
+
Listing 18. app.component.html:
+
+
...
+  <mat-sidenav [disableClose]="false" mode="over" #sidenav>
+	<mat-nav-list>
+      <a
+        id="home"
+        mat-list-item
+        [routerLink]="['./']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+        [routerLinkActiveOptions]="{exact: true}"
+      >
+        <mat-icon matListAvatar>home</mat-icon>
+        <h3 matLine>Home</h3>
+        <p matLine>sample home page</p>
+      </a>
+      <a
+        id="sampleData"
+        mat-list-item
+        [routerLink]="['./data']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+      >
+        <mat-icon matListAvatar>grid_on</mat-icon>
+        <h3 matLine>Data</h3>
+        <p matLine>sample data page</p>
+      </a>
+    </mat-nav-list>
+  </mat-sidenav>
+...
+
+
+
+

We use the mat-nav-list element to set a list of navigational links. We use the a tags with mat-list-item directive. We implement a click listener on each link to close the sidenav when it is clicked. The routerLink directive is used to provide the URLs to navigate to. The routerLinkActive directive is used to provide the class name which will be added to the link when it’s URL is visited. Here we name the class`active`. To style it, let' modify the app.component.scss file:

+
+
+
Listing 19. app.component.scss:
+
+
...
+mat-sidenav-container {
+...
+	a.active {
+        background: #8e8d8d;
+        color: #fff;
+
+        p {
+            color: #4a4a4a;
+        }
+    }
+}
+
+
+
+

Now we have a working application with a basic layout: a header with some menu and a sidenav with some navigational links.

+
+
+
+Finished application +
+
Figure 9. Finished application
+
+
+
+
+

Conclusion

+
+
+

The purpose of this guide was to provide a basic understanding of creating layouts with Angular Material. The Angular Material library has a huge collection of ready to use components which can be found at https://material.angular.io/components/categories +It has provided documentation and example usage for each of its components. Going through the documentation will give a better understanding of using Angular Material components in our devon4ng applications.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-layout-with-clarity-angular.html b/docs/devon4ng/1.0/guide-layout-with-clarity-angular.html new file mode 100644 index 00000000..3c1f8cf5 --- /dev/null +++ b/docs/devon4ng/1.0/guide-layout-with-clarity-angular.html @@ -0,0 +1,675 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Clarity Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts Angular Clarity in a devon4ng application. Angular Clarity is a HTML/CSS framework.

+
+
+
+1 +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Let’s begin

+
+
+

We start with opening the console(in the Devon distribution folder) and running the following command to start a project named AngularZorroLayout.

+
+
+

devon ng new AngularClarityLayout

+
+
+

Select y when it asks whether it would like to add Angular routing and select SCSS when it asks for the style sheet format. You can also use the devonfw IDE CLI to create a new devon4ng application.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+

devon ng serve

+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+2 +
+
Figure 2. Blank Application
+
+
+
+
+

Adding Angular Clarity framework to the project

+
+
+

Next we will add Angular Clarity to our application. In the integrated terminal, press CTRL + C to terminate the running application and run the following command:

+
+
+

Generate a new Angular application (if you haven’t already): +ng new my-app +Navigate to the directory: +cd my-app +Run the ng add command for Clarity: +ng add @clr/angular

+
+
+

After that we can see that the module is imported on app.module.ts

+
+
+
+
import { ClarityModule } from '@clr/angular';
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+imports: [
+    ClarityModule,
+ ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+
+3 +
+
Figure 3. ClarityModule
+
+
+
+
+

Development

+
+
+

After installed the library we can start to develop the code.

+
+
+

Adding styles in styles.css

+
+
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

First thing that we need to do is the menu

+
+
+
+
<header class="header-6">
+  <div class="branding">
+    <a href="..." class="nav-link">
+      <clr-icon shape="vm-bug"></clr-icon>
+      <span class="title">Project Clarity</span>
+    </a>
+  </div>
+
+  <div class="header-nav">
+    <a href="..." class="active nav-link"><span class="nav-text">Dashboard</span></a>
+    <a href="..." class="nav-link"><span class="nav-text">Interactive Analytics</span></a>
+  </div>
+  <div class="header-actions">
+      <form class="search">
+        <label for="search_input">
+          <input id="search_input" type="text" placeholder="Search for keywords...">
+        </label>
+      </form>
+        <clr-dropdown>
+          <button class="nav-text" clrDropdownTrigger aria-label="open user profile">
+            devonfw@clarityangular
+            <clr-icon shape="caret down"></clr-icon>
+          </button>
+          <clr-dropdown-menu *clrIfOpen clrPosition="bottom-right">
+            <a href="..." clrDropdownItem>Settings</a>
+            <a href="..." clrDropdownItem>Log out</a>
+          </clr-dropdown-menu>
+        </clr-dropdown>
+<clr-dropdown>
+  <button class="nav-icon" clrDropdownTrigger aria-label="toggle settings menu">
+    <clr-icon shape="cog"></clr-icon>
+    <clr-icon shape="caret down"></clr-icon>
+  </button>
+  <clr-dropdown-menu *clrIfOpen clrPosition="bottom-right">
+    <a href="..." clrDropdownItem>About</a>
+    <a href="..." clrDropdownItem>Preferences</a>
+  </clr-dropdown-menu>
+</clr-dropdown>
+  </div>
+</header>
+
+
+
+
+4 +
+
Figure 4. Clarity Menu
+
+
+

The framework has its own css classes. +For example, the first class that we can see is the header-6 that one is a css style that change the color from the menu. +Also, we can see it that the framework has some icons where we can choose, where the tag is +<clr-icon shape”vm-bug></clr-icon> +The next div on the menu will have the navigation header. +As with all this framework we can see that has is own css class <div class=”header-nav” +We can see 2 <a> tags with a different css class +The first one has the activated class. The difference between both of them shows like this.

+
+
+
+5 +
+
Figure 5. Difference
+
+
+

After seeing this piece of code, can see that the other part of the menu has another css class. +<div class=”header-actions”> +After this all divs inside the last one, they are going to be aligned to the right.

+
+
+
+6 +
+
Figure 6. Search
+
+
+

To do this search bar, just need to create a form with the class search +<form class=”search”> +To shows the icon we use the tag <label for="search_input"></label> +And the normal input with the id=”search_input” to match with the previous label

+
+
+
+7 +
+
Figure 7. Dropdown
+
+
+

To do the menu-dropdown , we use the tag owned by the framework called +<clr-dropdown>

+
+
+
+
 <clr-dropdown>
+          <button class="nav-text" clrDropdownTrigger aria-label="open user profile">
+            devonfw@clarityangular
+            <clr-icon shape="caret down"></clr-icon>
+          </button>
+          <clr-dropdown-menu *clrIfOpen clrPosition="bottom-right">
+            <a href="..." clrDropdownItem>Settings</a>
+            <a href="..." clrDropdownItem>Log out</a>
+          </clr-dropdown-menu>
+  </clr-dropdown>
+
+
+
+
+8 +
+
Figure 8. Dropdown
+
+
+

The attribute clrDropdownTrigger is needed because if not, we cannot do the tag <clr-dropdown-menu> because that tag is going to be activated just if the clrDropdownTrigger is activated too. +Also, with the attribute clrPosition when can decided where will be positioned the dropdown.

+
+
+

The other part the menu, check the next figure.

+
+
+
+9 +
+
Figure 9. Button
+
+
+

It’s pretty much the same code but we just change the attribute aria-abel and the icons`

+
+
+
+10 +
+
Figure 10. Button Logic
+
+
+

After we have the whole menu finished, time to see the card.

+
+
+
+11 +
+
Figure 11. Card
+
+
+

In the first figure, all the elements are aligned to the center. +To do this, we just need to use the classes by the framework. +<div class="clr-main-container"> +The first css class is giving the style to the main container.

+
+
+

<div class="clr-row clr-justify-content-center">

+
+
+

This one, is saying that its going to be sorted by rows and all the content inside of this div will be in the center + <div class="clr-col-lg-4"> +And the last one it’s the size of the div. This framework has as maximum 12, is like bootstrap. +More examples in: +https://clarity.design/documentation/grid

+
+
+

To create the card with his border and all the properties we just call the class +<div class=”card”> +To do the tooltip, check next figure.

+
+
+
+12 +
+
Figure 12. Tooltip
+
+
+

We just need to do an <a> tag with this attributes + <a href="…​" role="tooltip" aria-haspopup="true" class="tooltip tooltip-bottom-right"> +The class is giving us the toltip and his position. +After that we have the: +<clr-icon shape="info-circle" size="24"></clr-icon> +That is giving us the icon and the size +And the content of the tooltip is coming from the: +<span class="tooltip-content"> +After the tooltip done, we just need to add a image and the text. +To do it we just need to code

+
+
+
+
<div class="card-img">
+            <img src="../assets/images/clarity.png">
+          </div>
+          <div class="card-block">
+            <p class="card-text">
+              Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard
+              dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen
+              book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially
+              unchanged. It was popularised in the 1930s with the release of Letraset sheets containing Lorem Ipsum passages, and more
+              recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.
+            </p>
+          </div>
+
+
+
+

For the next card, check next figure

+
+
+
+13 +
+
Figure 13. Card
+
+
+

We are using the same class from the card that we used before. +But to do the numbers on the top we used : +<span class="badge"> +And to give some colors we used: +<span class="badge badge-purple"> for example

+
+
+

The next step is do the bars with the progress, to do it we just need to create a div with the class “progress-block”

+
+
+
+
        <div class="progress-block">
+              <label>Label</label>
+                <div class="progress-static">
+                  <div class="progress-meter" data-value="25"></div>
+                </div>
+              </div>
+
+
+
+

To do the bar with that widh and high we ull the class “progress-static” +And finally to change the color and the value is with the class “progress-meter” and “progress success” +Depends with class are we using, we will have different attributes to put the value +If we have the progress-static we will use +<div class="progress-meter" data-value="43"></div> +If we have the progress success we need to use: +<progress value="75" max="100" data-displayval="…​%"></progress>

+
+
+

As you can see, the card has a footer. Check next picture

+
+
+
+14 +
+
Figure 14. Card
+
+
+

We just need to add a div with this class inside of the card div +<div class="card-footer"> +And will link to the card:

+
+
+

<a class="card-link" (click)="send()">Click to see the modal</a> +And the method send() its just a method to convert the variable basic to true when is false and false when its true:

+
+
+
+
  basic = false;
+  send(): void {
+    this.basic = !this.basic;
+  }
+
+
+
+

So in the html file we need to write a div with a ngIf, to check if the variable is true and create a model with the tag <clr-modal> and the attribute clrModalOpen and the same name as the variable has.

+
+
+
+
<div *ngIf="basic">
+        <clr-modal [(clrModalOpen)]="basic">
+
+
+
+

After this we need to create the body of the modal, to do it we will use a div with the classes from the framework

+
+
+
+
<div class="modal-body">
+            <p>But not much to say...</p>
+          </div>
+          <div class="modal-footer">
+            <button type="button" class="btn btn-primary" (click)="basic = true">OK</button>
+            <button type="button" class="btn btn-outline" (click)="basic = false">Cancel</button>          </div>
+        </clr-modal>
+
+
+
+

The class to create the body of the modal, it’s just to create a div with the class +<div class="modal-body"> +And to create the footer +<div class="modal-footer"> +We can see that the footer has 2 buttons, with different style coming from the framework and with 2 methods with different values +(click)="basic = true" on OK button, this button won’t change the variable value so would not do anything. +(click)="basic = false" on Cancel button. This button will change the value of the variable and will leave the modal.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-layout-with-ng-zorro-layout.html b/docs/devon4ng/1.0/guide-layout-with-ng-zorro-layout.html new file mode 100644 index 00000000..fb0c0241 --- /dev/null +++ b/docs/devon4ng/1.0/guide-layout-with-ng-zorro-layout.html @@ -0,0 +1,897 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

NG ZORRO Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts using NG ZORRO in a devon4ng application.

+
+
+
+figure1 +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Let’s begin

+
+
+

Starts with opening the console(in the Devon distribution folder) and running the following command to start a project named AngularZorroLayout. +devon ng new AngularZorroLayout

+
+
+
    +
  • +

    devon ng new AngularZorroLayout

    +
  • +
+
+
+

Select y when it asks whether it would like to add Angular routing and select scss when it asks for the style sheet format.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+
    +
  • +

    devon ng serve

    +
  • +
+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+
+

Adding Angular ZORRO library to the project

+
+
+
Blank application
+

Next we will add Angular Material to our application. In the integrated terminal, press CTRL + C to terminate the running application and run the following command:

+
+
+
    +
  • +

    ng add ng-zorro-antd

    +
  • +
+
+
+
+figure3 +
+
Figure 2. CLI Angular ZORRO Layout
+
+
+

Or if we would like to customize our workflow we can install it with:

+
+
+
    +
  • +

    npm install ng-zorro-antd

    +
  • +
+
+
+

After run that command, need to import the pre-build styles in angular.json

+
+
+
Listing 1. Styles on angular.json
+
+
"styles": [
+    "src/styles.scss",
+    "node_modules/ng-zorro-antd/src/ng-zorro-antd.min.css",
+    "node_modules/ng-zorro-antd/resizable/style/index.min.css"
+],
+
+
+
+

Once the dependencies are installed, need to import the BrowserAnimationsModule in our AppModule for animations support.

+
+
+
Listing 2. Importing BrowserAnimationsModule in AppModule
+
+
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
+
+@NgModule({
+  ...
+  imports: [BrowserAnimationsModule],
+  ...
+})
+export class AppModule { }
+
+
+
+
+
+

Internationalization

+
+
+

The default language of ng-zorro-antd is Chinese . If you want to use other languages, you can follow the instructions below. You can also set the language with ng add ng-zorro-antd when creating project.

+
+
+

ng-zorro-antd provides several configuration tokens for global configuration of international copy and date, NZ_I18N for international copy.

+
+
+
Listing 3. Importing Configuration in App.module
+
+
import { NZ_I18N, en_US } from 'ng-zorro-antd/i18n';
+
+@NgModule({
+  ...
+  providers: [
+    { provide: NZ_I18N, useValue: en_US },
+  ...
+})
+export class AppModule { }
+
+
+
+

To finish the configuration, we need to import the icons from the Library.

+
+
+
Listing 4. Importing Icons in App.module
+
+
import * as AllIcons from '@ant-design/icons-angular/icons';
+
+const antDesignIcons = AllIcons as {
+  [key: string]: IconDefinition;
+};
+const icons: IconDefinition[] = Object.keys(antDesignIcons).map(key => antDesignIcons[key]);
+
+
+
+
+
+

Development

+
+
+

We have all the NG ZORRO related dependencies set up in our project, we can start coding.

+
+
+
Listing 5. Adding styles in styles.css
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

Next step is to create a component for the header. We will create it with the next command. +We will create a folder component to have a good practices.

+
+
+

ng generate component components/header

+
+
+

In this component, we are going to create the menu.

+
+
+

First, we need to import the menu module on app.module.

+
+
+
Listing 6. Adding module in app.module
+
+
import { NzMenuModule } from 'ng-zorro-antd/menu';
+
+
+
+

And we will create the header with this code:

+
+
+
+
<ul nz-menu nzMode="horizontal" class="container">
+  <li nz-menu-item nzSelected>
+    <i nz-icon nzType="mail"></i>
+    Navigation One
+  </li>
+  <li nz-menu-item nzDisabled>
+    <i nz-icon nzType="appstore"></i>
+    Navigation Two
+  </li>
+  <li nz-submenu nzTitle="Navigation Three - Submenu" nzIcon="setting">
+    <ul>
+      <li nz-menu-group nzTitle="Modals">
+        <ul>
+             <li nz-menu-item nz-button (click)="info()"> Info</li>
+               <li nz-menu-item nz-button (click)="success()">Success</li>
+             <li nz-menu-item nz-button (click)="error()">Error</li>
+             <li nz-menu-item nz-button (click)="warning()">Warning</li>
+        </ul>
+      </li>
+      <li nz-menu-group nzTitle="Item 2">
+        <ul>
+          <li nz-menu-item>Option 3</li>
+          <li nz-submenu nzTitle="Sub Menu">
+            <ul>
+              <li nz-menu-item nzDisabled>Option 4</li>
+              <li nz-menu-item>Option 5</li>
+            </ul>
+          </li>
+          <li nz-submenu nzDisabled nzTitle="Disabled Sub Menu">
+            <ul>
+              <li nz-menu-item>Option 6</li>
+              <li nz-menu-item>Option 7</li>
+            </ul>
+          </li>
+        </ul>
+      </li>
+    </ul>
+  </li>
+  <li nz-menu-item>
+    <a href="https://ng.ant.design" target="_blank" rel="noopener noreferrer">Navigation Four - Link</a>
+  </li>
+</ul>
+
+
+
+
+figure4 +
+
Figure 3. Header component
+
+
+

Note +The menu has some properties like nzTitle, nzButton, nzDisabled or nzSelected.

+
+
+

And modify the styles on header.component.scss

+
+
+
Listing 7. Adding styles on header.scss
+
+
.container{
+  margin: auto;
+  text-align: center;
+}
+
+
+
+

The library has enough styles and we don’t need to change to much. +We’ll be like:

+
+
+
+figure5 +
+
Figure 4. Header Component
+
+
+

In the menu, we added an example of a modal

+
+
+

To use it we need to import that module on app.module.ts

+
+
+
+
import { NzModalModule } from 'ng-zorro-antd/modal';
+
+
+
+

In the HTML file we just need to create a method on (click) to call the modal.

+
+
+
+
  <li nz-submenu nzTitle="Navigation Three - Submenu" nzIcon="setting">
+    <ul>
+      <li nz-menu-group nzTitle="Modals">
+        <ul>
+             <li nz-menu-item nz-button (click)="info()"> Info</li>
+               <li nz-menu-item nz-button (click)="success()">Success</li>
+             <li nz-menu-item nz-button (click)="error()">Error</li>
+             <li nz-menu-item nz-button (click)="warning()">Warning</li>
+        </ul>
+      </li>
+
+
+
+
+figure6 +
+
Figure 5. Modal
+
+
+

And now, we just need to create those methods in the file header.component.ts +Also, need to import the modal service and we use it in the constructor of the class.

+
+
+

import {NzModalService} from 'ng-zorro-antd/modal'; +constructor(private modal: NzModalService){}

+
+
+
+figure7 +
+
Figure 6. Import ModalService from ZORRO
+
+
+
+
  info(): void {
+    this.modal.info({
+      nzTitle: 'This is a notification message',
+      nzContent: '<p>some messages...some messages...</p><p>some messages...some messages...</p>',
+      nzOnOk: () => console.log('Info OK')
+    });
+  }
+
+  success(): void {
+    this.modal.success({
+      nzTitle: 'This is a success message',
+      nzContent: 'some messages...some messages...'
+    });
+  }
+
+  error(): void {
+    this.modal.error({
+      nzTitle: 'This is an error message',
+      nzContent: 'some messages...some messages...'
+    });
+  }
+
+  warning(): void {
+    this.modal.warning({
+      nzTitle: 'This is an warning message',
+      nzContent: 'some messages...some messages...'
+    });
+  }
+
+
+
+
+figure8 +
+
Figure 7. Logic on ts file looks like
+
+
+

Once the header is done, time to create the main component. In this case will be those elements.

+
+
+
+figure9 +
+
Figure 8. Main Component
+
+
+

The first element that we can see, it’s a carousel. +To implement it on the code, we just need to do the same that we done before, import the module and import the component. +Do we import the next module on app.module

+
+
+
Listing 8. Import carousel Module
+
+
import { NzCarouselModule } from 'ng-zorro-antd/carousel';
+
+
+
+

And use the label “nz-carousel” to create the Carousel, it has some attributes coming from the library.

+
+
+
+figure10 +
+
Figure 9. Import ModalService from ZORRO
+
+
+

**NOTE +The loop that we are doing its how many images we will have. +And finally, we will give some styles.

+
+
+
+
.container{
+  margin: auto;
+  text-align: center;
+  margin-top: 20px;
+}
+[nz-carousel-content] {
+        text-align: center;
+        height: 160px;
+        line-height: 160px;
+        background: #364d79;
+        color: #fff;
+        overflow: hidden;
+      }
+
+      h3 {
+        color: #fff;
+        margin-bottom: 0;
+      }
+
+nz-content{
+  padding: 0 30px 0 30px;
+}
+
+
+
+
+figure11 +
+
Figure 10. Styling
+
+
+

Next element, the cards

+
+
+
+figure12 +
+
Figure 11. Cards1
+
+
+
+figure13 +
+
Figure 12. Cards Unlocked
+
+
+

We will have a button to activate or deactivate the cards. +To do it, we will write the next code in our file html.

+
+
+
+
        <div nz-row>
+          <div nz-col [nzXs]="{ span: 5, offset: 1 }" [nzLg]="{ span: 6, offset: 2 }">
+            <nz-card nzXs="8">
+              <nz-skeleton [nzActive]="true" [nzLoading]="loading" [nzAvatar]="{ size: 'large' }">
+                <nz-card-meta [nzAvatar]="avatarTemplate" nzTitle="Card title" nzDescription="This is the description">
+                </nz-card-meta>
+              </nz-skeleton>
+            </nz-card>
+          </div>
+          <div nz-col [nzXs]="{ span: 11, offset: 1 }" [nzLg]="{ span: 6, offset: 2 }">
+            <nz-card nzXs="8">
+              <nz-skeleton [nzActive]="true" [nzLoading]="!loading" [nzAvatar]="{ size: 'small' }">
+                <nz-card-meta [nzAvatar]="avatarTemplate" nzTitle="Card title" nzDescription="This is the description">
+                </nz-card-meta>
+              </nz-skeleton>
+            </nz-card>
+          </div>
+          <div nz-col [nzXs]="{ span: 5, offset: 1 }" [nzLg]="{ span: 6, offset: 2 }">
+            <nz-card nzXs="8">
+              <nz-skeleton [nzActive]="true" [nzLoading]="loading" [nzAvatar]="{ size: 'large' }">
+                <nz-card-meta [nzAvatar]="avatarTemplate" nzTitle="Card title" nzDescription="This is the description">
+                </nz-card-meta>
+              </nz-skeleton>
+            </nz-card>
+          </div>
+        </div>
+
+
+
+
+figure14 +
+
Figure 13. Cards HTML
+
+
+

The first thing that we can see, it’s a button to switch between see it or not. +So,first thing, we need to import that switch.

+
+
+

import { NzSwitchModule } from 'ng-zorro-antd/switch';

+
+
+

Next step, that we need to do its write the `HTML code. It’s simple:

+
+
+

<nz-switch [(ngModel)]="loading"></nz-switch>

+
+
+

So now, in the ts file we just need to create a Boolean variable. +With the ngModel and the switch, each time that we will click on the button the variable will swap between true or false. +After create the button, we are going to create the card.

+
+
+

Need to import the following module on app.module +import { NzCardModule } from 'ng-zorro-antd/card'; +And after that we need to write the HTML code

+
+
+
+figure15 +
+
Figure 14. Cards Logic
+
+
+

We will find a lot of attributes. +We can find their explication in the api: +NG ZORRO

+
+
+

Last Element, the table

+
+
+
+figure16 +
+
Figure 15. Table
+
+
+

We need to import the module +import { NzTableModule } from 'ng-zorro-antd/table';

+
+
+

After that we can see a button, this is just to create a new row in the table. +The button only has a method to add a new value to our array

+
+
+

Table Interface

+
+
+
+
interface ItemData {
+  id: string;
+  name: string;
+  age: string;
+  address: string;
+}
+
+
+
+
+figure17 +
+
Figure 16. Table Interface
+
+
+

Add Row Method

+
+
+
+
  addRow(): void {
+    this.listOfData = [
+      ...this.listOfData,
+      {
+        id: `${this.i}`,
+        name: `Edward King ${this.i}`,
+        age: '32',
+        address: `London, Park Lane no. ${this.i}`
+      }
+    ];
+    this.i++;
+  }
+
+
+
+
+figure18 +
+
Figure 17. Add Method
+
+
+

After that we need to create the table

+
+
+
+
<nz-table #editRowTable nzBordered [nzData]="listOfData">
+          <thead>
+            <tr>
+              <th nzWidth="30%">Name</th>
+              <th>Age</th>
+              <th>Address</th>
+              <th>Action</th>
+            </tr>
+          </thead>
+          <tbody>
+            <tr *ngFor="let data of editRowTable.data" class="editable-row">
+              <td>
+                <div class="editable-cell" [hidden]="editId == data.id" (click)="startEdit(data.id)">
+                  {{ data.name }}
+                </div>
+                <input [hidden]="editId !==  data.id" type="text" nz-input [(ngModel)]="data.name" (blur)="stopEdit()" />
+              </td>
+              <td>{{ data.age }}</td>
+              <td>{{ data.address }}</td>
+              <td>
+                <a nz-popconfirm nzPopconfirmTitle="Sure to delete?" (nzOnConfirm)="deleteRow(data.id)">Delete</a>
+              </td>
+            </tr>
+          </tbody>
+        </nz-table>
+
+
+
+
+figure19 +
+
Figure 18. Table HTML Logic
+
+
+

To create the table we need to use the tag <nz-table> and after that is like a Html table, with the <thead> and <tbody>

+
+
+

How it shows with the for, we are showing the data from the array created before. +In the first cell we can see, that we have a method to edit the value.

+
+
+
+figure20 +
+
Figure 19. Table methods
+
+
+
+figure21 +
+
Figure 20. Table
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-layout-with-primeng-angular.html b/docs/devon4ng/1.0/guide-layout-with-primeng-angular.html new file mode 100644 index 00000000..106cc84d --- /dev/null +++ b/docs/devon4ng/1.0/guide-layout-with-primeng-angular.html @@ -0,0 +1,1721 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

PrimeNG Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts of PrimeNG in a devon4ng application. PrimeNG is a HTML/CSS framework.

+
+
+
+Screenshot 0 +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Let’s begin

+
+
+

We start with opening the console(in the Devon distribution folder) and running the following command to start a project named AngularZorroLayout.

+
+
+

devon ng new AngularPrimeNgLayout

+
+
+

Select y when it asks whether it would like to add Angular routing and select SCSS when it asks for the style sheet format. You can also use the devonfw IDE CLI to create a new devon4ng application.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+

devon ng serve

+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+Screenshot 1 +
+
Figure 2. Blank Application
+
+
+
+
+

Adding PrimeNG to the project

+
+
+

Next we will add Angular Material to our application. In the integrated terminal, press CTRL + C to terminate the running application and run the following command:

+
+
+

Run the ng add command for PrimeNG:

+
+
+
+
npm install primeng
+npm install primeicons --save
+
+
+
+

After that we can see that the module is imported on app.module.ts

+
+
+

The css dependencies are as follows, Prime Icons, theme of your choice and structural css of components.

+
+
+
+
 "src/styles.scss",
+ "node_modules/primeicons/primeicons.css",
+ "node_modules/primeng/resources/themes/saga-blue/thcss",
+ "node_modules/primeng/resources/primeng.min.css"
+
+
+
+
+Screenshot 2 +
+
Figure 3. Styles on angular.json
+
+
+
+
+

Development

+
+
+

Now we need to create a component for the header. We will create it with the command +We will create a folder component to have a good practices.

+
+
+
+
ng generate component components/header
+
+
+
+

In this component, we are going to create the menu.

+
+
+
+Screenshot 5 +
+
Figure 4. Menu
+
+
+
+Screenshot 6 +
+
Figure 5. Menu Dropdown
+
+
+

And will create the code like:

+
+
+
+Screenshot 3 +
+
Figure 6. Header
+
+
+
+
<p-menubar [model]="items">
+  <ng-template pTemplate="start">
+    <img src="assets/images/primeng.svg" height="40" class="p-mr-2">
+  </ng-template>
+</p-menubar>
+
+
+
+

How we see the menu has some properties from the library.

+
+
+

<p-menubar> is the first one, with this label we can create the menu and with the <ng-template pTemplate> we decided where the menu will be aligned.

+
+
+

The [model]=items means that the menu is looking for the "items" to print.

+
+
+

The items is a array but his type come from the PrimeNG. So we just need to import the MenuItem.

+
+
+
+
import { MenuItem } from 'primeng/api';`
+
+
+
+

And give some values.

+
+
+
+
this.items = [
+      {
+        label: 'File',
+        icon: 'pi pi-fw pi-file',
+        items: [
+          {
+            label: 'New',
+            icon: 'pi pi-fw pi-plus',
+            items: [
+              {
+                label: 'Bookmark',
+                icon: 'pi pi-fw pi-bookmark'
+              },
+              {
+                label: 'Video',
+                icon: 'pi pi-fw pi-video'
+              },
+
+            ]
+          },
+          {
+            label: 'Delete',
+            icon: 'pi pi-fw pi-trash'
+          },
+          {
+            separator: true
+          },
+          {
+            label: 'Export',
+            icon: 'pi pi-fw pi-external-link'
+          }
+        ]
+      },
+      {
+        label: 'Edit',
+        icon: 'pi pi-fw pi-pencil',
+        items: [
+          {
+            label: 'Left',
+            icon: 'pi pi-fw pi-align-left'
+          },
+          {
+            label: 'Right',
+            icon: 'pi pi-fw pi-align-right'
+          },
+          {
+            label: 'Center',
+            icon: 'pi pi-fw pi-align-center'
+          },
+          {
+            label: 'Justify',
+            icon: 'pi pi-fw pi-align-justify'
+          },
+
+        ]
+      },
+      {
+        label: 'Users',
+        icon: 'pi pi-fw pi-user',
+        items: [
+          {
+            label: 'New',
+            icon: 'pi pi-fw pi-user-plus',
+
+          },
+          {
+            label: 'Delete',
+            icon: 'pi pi-fw pi-user-minus',
+
+          },
+          {
+            label: 'Search',
+            icon: 'pi pi-fw pi-users',
+            items: [
+              {
+                label: 'Filter',
+                icon: 'pi pi-fw pi-filter',
+                items: [
+                  {
+                    label: 'Print',
+                    icon: 'pi pi-fw pi-print'
+                  }
+                ]
+              },
+              {
+                icon: 'pi pi-fw pi-bars',
+                label: 'List'
+              }
+            ]
+          }
+        ]
+      },
+      {
+        label: 'Events',
+        icon: 'pi pi-fw pi-calendar',
+        items: [
+          {
+            label: 'Edit',
+            icon: 'pi pi-fw pi-pencil',
+            items: [
+              {
+                label: 'Save',
+                icon: 'pi pi-fw pi-calendar-plus'
+              },
+              {
+                label: 'Delete',
+                icon: 'pi pi-fw pi-calendar-minus'
+              },
+
+            ]
+          },
+          {
+            label: 'Archieve',
+            icon: 'pi pi-fw pi-calendar-times',
+            items: [
+              {
+                label: 'Remove',
+                icon: 'pi pi-fw pi-calendar-minus'
+              }
+            ]
+          }
+        ]
+      },
+      {
+        label: 'Quit',
+        icon: 'pi pi-fw pi-power-off'
+      }
+    ];
+  }
+
+
+
+
+Screenshot 4 +
+
Figure 7. Menu Values
+
+
+

After the menus is done. The next step is create the main container, in this case will be the table.

+
+
+
+Screenshot 7 +
+
Figure 8. Table
+
+
+

How is a very complex table we are going to explain component by component

+
+
+
+Screenshot 8 +
+
Figure 9. Buttons
+
+
+

To create those buttons we just need to write this piece of code

+
+
+
+
<p-toolbar styleClass="p-mb-4">
+    <ng-template pTemplate="left">
+      <button pButton pRipple label="New" icon="pi pi-plus" class="p-button-success p-mr-2"
+        (click)="openNew()"></button>
+      <button pButton pRipple label="Delete" icon="pi pi-trash" class="p-button-danger"
+        (click)="deleteSelectedProducts()" [disabled]="!selectedProducts || !selectedProducts.length"></button>
+    </ng-template>
+
+    <ng-template pTemplate="right">
+      <p-fileUpload mode="basic" accept="image/*" [maxFileSize]="1000000" label="Import" chooseLabel="Import"
+        class="p-mr-2 p-d-inline-block"></p-fileUpload>
+      <button pButton pRipple label="Export" icon="pi pi-upload" class="p-button-help"></button>
+    </ng-template>
+  </p-toolbar>
+
+
+
+
+Screenshot 9 +
+
Figure 10. Buttons Code
+
+
+

We can see some labels and attributes, for example <p-toolbar>, pButton, <p-fuleUpload>.

+
+
+

To use them, we need to import on app.module with the following code

+
+
+
+
import { TableModule } from 'primeng/table';
+import { ButtonModule } from 'primeng/button';
+import {ToolbarModule} from 'primeng/toolbar';
+import {FileUploadModule} from 'primeng/fileupload';
+
+
+
+

We see the first method is openNew() when we call this method a variable is going to be true

+
+
+
+
  openNew(): any {
+    this.product = {};
+    this.submitted = false;
+    this.productDialog = true;
+  }
+
+
+
+

And when the productDialog its true, we will open a Modal with the following code and will look like:

+
+
+
+Screenshot 11 +
+
Figure 11. Modal
+
+
+
+
<p-dialog [(visible)]="productDialog" [style]="{width: '450px'}" header="Product Details" [modal]="true"
+  styleClass="p-fluid">
+  <ng-template pTemplate="content">
+    <div class="p-field">
+      <label for="name">Name</label>
+      <input type="text" pInputText id="name" [(ngModel)]="product.name" required autofocus />
+      <small class="p-invalid" *ngIf="submitted && !product.name">Name is required.</small>
+    </div>
+    <div class="p-field">
+      <label for="description">Description</label>
+      <textarea id="description" pInputTextarea [(ngModel)]="product.description" required rows="3"
+        cols="20"></textarea>
+    </div>
+
+    <div class="p-field">
+      <label class="p-mb-3">Category</label>
+      <div class="p-formgrid p-grid">
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category1" name="category" value="Accessories" [(ngModel)]="product.category">
+          </p-radioButton>
+          <label for="category1">Accessories</label>
+        </div>
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category2" name="category" value="Clothing" [(ngModel)]="product.category"></p-radioButton>
+          <label for="category2">Clothing</label>
+        </div>
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category3" name="category" value="Electronics" [(ngModel)]="product.category">
+          </p-radioButton>
+          <label for="category3">Electronics</label>
+        </div>
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category4" name="category" value="Fitness" [(ngModel)]="product.category"></p-radioButton>
+          <label for="category4">Fitness</label>
+        </div>
+      </div>
+    </div>
+
+    <div class="p-formgrid p-grid">
+      <div class="p-field p-col">
+        <label for="price">Price</label>
+        <p-inputNumber id="price" [(ngModel)]="product.price" mode="currency" currency="USD" locale="en-US">
+        </p-inputNumber>
+      </div>
+      <div class="p-field p-col">
+        <label for="quantity">Quantity</label>
+        <p-inputNumber id="quantity" [(ngModel)]="product.quantity"></p-inputNumber>
+      </div>
+    </div>
+  </ng-template>
+
+  <ng-template pTemplate="footer">
+    <button pButton pRipple label="Cancel" icon="pi pi-times" class="p-button-text" (click)="hideDialog()"></button>
+    <button pButton pRipple label="Save" icon="pi pi-check" class="p-button-text" (click)="saveProduct()"></button>
+  </ng-template>
+</p-dialog>
+
+
+
+
+Screenshot 10 +
+
Figure 12. Modal Code
+
+
+

To start to development this, we need to import DialogModule, ConfirmDialogMoudle, InputTextModule, RadioButtonModule and ` FormsModule` to do it we just need to write on app.module

+
+
+
+
import { DialogModule } from 'primeng/dialog';
+import { ConfirmDialogModule } from 'primeng/confirmdialog';
+import {FormsModule} from '@angular/forms';
+import { RadioButtonModule } from 'primeng/radiobutton';
+import { InputTextModule } from 'primeng/inputtext';
+
+
+
+
+Screenshot 11 +
+
Figure 13. Modal Code
+
+
+

After that we can see a Modal with the form and when we click on the "Save Button", We will create a new product.

+
+
+
+
  saveProduct(): any {
+    this.submitted = true;
+
+    if (this.product.name.trim()) {
+      if (this.product.id) {
+        this.products[this.findIndexById(this.product.id)] = this.product;
+        this.messageService.add({ severity: 'success', summary: 'Successful', detail: 'Product Updated', life: 3000 });
+      }
+      else {
+        this.product.id = this.createId();
+        this.product.image = 'product-placeholder.svg';
+        this.products.push(this.product);
+        this.messageService.add({ severity: 'success', summary: 'Successful', detail: 'Product Created', life: 3000 });
+      }
+
+      this.products = [...this.products];
+      this.productDialog = false;
+      this.product = {};
+    }
+  }
+
+
+
+

After done the first buttons, just need to do the rest of the table

+
+
+
+
<p-table #dt [value]="products" [rows]="10" [paginator]="true"
+    [globalFilterFields]="['name','country.name','representative.name','status']" [(selection)]="selectedProducts"
+    [rowHover]="true" dataKey="id" currentPageReportTemplate="Showing {first} to {last} of {totalRecords} entries"
+    [showCurrentPageReport]="true">
+    <ng-template pTemplate="caption">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        <h5 class="p-m-0">Manage Products</h5>
+        <span class="p-input-icon-left">
+          <i class="pi pi-search"></i>
+          <input pInputText type="text" (input)="dt.filterGlobal($event.target.value, 'contains')"
+            placeholder="Search..." />
+        </span>
+      </div>
+    </ng-template>
+    <ng-template pTemplate="header">
+      <tr>
+        <th style="width: 3rem">
+          <p-tableHeaderCheckbox></p-tableHeaderCheckbox>
+        </th>
+        <th pSortableColumn="name">Name <p-sortIcon field="name"></p-sortIcon>
+        </th>
+        <th pSortableColumn="price">Price <p-sortIcon field="price"></p-sortIcon>
+        </th>
+        <th pSortableColumn="category">Category <p-sortIcon field="category"></p-sortIcon>
+        </th>
+        <th pSortableColumn="rating">Reviews <p-sortIcon field="rating"></p-sortIcon>
+        </th>
+        <th pSortableColumn="inventoryStatus">Status <p-sortIcon field="inventoryStatus"></p-sortIcon>
+        </th>
+        <th></th>
+      </tr>
+    </ng-template>
+    <ng-template pTemplate="body" let-product>
+      <tr>
+        <td>
+          <p-tableCheckbox [value]="product"></p-tableCheckbox>
+        </td>
+        <td>{{product.name}}</td>
+        <td>{{product.price | currency:'USD'}}</td>
+        <td>{{product.category}}</td>
+        <td>
+          <p-rating [ngModel]="product.rating" [readonly]="true" [cancel]="false"></p-rating>
+        </td>
+        <td><span
+            [class]="'product-badge status-' + product.inventoryStatus.toLowerCase()">{{product.inventoryStatus}}</span>
+        </td>
+        <td>
+          <button pButton pRipple icon="pi pi-pencil" class="p-button-rounded p-button-success p-mr-2"
+            (click)="editProduct(product)"></button>
+          <button pButton pRipple icon="pi pi-trash" class="p-button-rounded p-button-warning"
+            (click)="deleteProduct(product)"></button>
+        </td>
+      </tr>
+    </ng-template>
+    <ng-template pTemplate="summary">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        In total there are {{products ? products.length : 0 }} products.
+      </div>
+    </ng-template>
+  </p-table>
+
+
+
+
+Screenshot 12 +
+
Figure 14. Table Code
+
+
+

After that, need to add some styles to the code.

+
+
+
+
:host ::ng-deep {
+    .p-paginator {
+        .p-paginator-current {
+            margin-left: auto;
+        }
+    }
+
+    .p-progressbar {
+        height: .5rem;
+        background-color: #D8DADC;
+
+        .p-progressbar-value {
+            background-color: #607D8B;
+        }
+    }
+
+    .table-header {
+        display: flex;
+        justify-content: space-between;
+    }
+
+    .p-calendar .p-datepicker {
+        min-width: 25rem;
+
+        td {
+            font-weight: 400;
+        }
+    }
+
+    .p-datatable.p-datatable-customers {
+        .p-datatable-header {
+            padding: 1rem;
+            text-align: left;
+            font-size: 1.5rem;
+        }
+
+        .p-paginator {
+            padding: 1rem;
+        }
+
+        .p-datatable-thead > tr > th {
+            text-align: left;
+        }
+
+        .p-datatable-tbody > tr > td {
+            cursor: auto;
+        }
+
+        .p-dropdown-label:not(.p-placeholder) {
+            text-transform: uppercase;
+        }
+    }
+
+    /* Responsive */
+    .p-datatable-customers .p-datatable-tbody > tr > td .p-column-title {
+        display: none;
+    }
+}
+
+@media screen and (max-width: 960px) {
+    :host ::ng-deep {
+        .p-datatable {
+            &.p-datatable-customers {
+                .p-datatable-thead > tr > th,
+                .p-datatable-tfoot > tr > td {
+                    display: none !important;
+                }
+
+                .p-datatable-tbody > tr {
+                    border-bottom: 1px solid var(--layer-2);
+
+                    > td {
+                        text-align: left;
+                        display: block;
+                        border: 0 none !important;
+                        width: 100% !important;
+                        float: left;
+                        clear: left;
+                        border: 0 none;
+
+                        .p-column-title {
+                            padding: .4rem;
+                            min-width: 30%;
+                            display: inline-block;
+                            margin: -.4rem 1rem -.4rem -.4rem;
+                            font-weight: bold;
+                        }
+
+                        .p-progressbar {
+                            margin-top: .5rem;
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+}
+
+
+
+
+tablestyle +
+
Figure 15. Table CSS
+
+
+

How we see it, we have some values already logged like products and some attributes that we need to import to use correctly the table.

+
+
+

All the moduls need to be in app.module

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+import { HeaderComponent } from './components/header/header.component';
+
+import { MenubarModule } from 'primeng/menubar';
+import { HttpClientModule } from '@angular/common/http';
+import { TableModule } from 'primeng/table';
+import { CalendarModule } from 'primeng/calendar';
+import { SliderModule } from 'primeng/slider';
+import { DialogModule } from 'primeng/dialog';
+import { MultiSelectModule } from 'primeng/multiselect';
+import { ContextMenuModule } from 'primeng/contextmenu';
+import { ButtonModule } from 'primeng/button';
+import { ToastModule } from 'primeng/toast';
+import { InputTextModule } from 'primeng/inputtext';
+import { ProgressBarModule } from 'primeng/progressbar';
+import { DropdownModule } from 'primeng/dropdown';
+import {ToolbarModule} from 'primeng/toolbar';
+import {FileUploadModule} from 'primeng/fileupload';
+import {RatingModule} from 'primeng/rating';
+import { RadioButtonModule } from 'primeng/radiobutton';
+import { InputNumberModule } from 'primeng/inputnumber';
+import { ConfirmDialogModule } from 'primeng/confirmdialog';
+import { ConfirmationService, MessageService } from 'primeng/api';
+import { ProductService } from './services/product.service';
+import { InputTextareaModule } from 'primeng/inputtextarea';
+import {FormsModule} from '@angular/forms';
+
+import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
+import { NoopAnimationsModule } from '@angular/platform-browser/animations';
+
+
+@NgModule({
+  declarations: [AppComponent, HeaderComponent],
+  imports: [
+    BrowserModule,
+    BrowserAnimationsModule,
+    NoopAnimationsModule,
+    AppRoutingModule,
+    MenubarModule,
+    TableModule,
+    CalendarModule,
+    SliderModule,
+    DialogModule,
+    MultiSelectModule,
+    ContextMenuModule,
+    ButtonModule,
+    ToastModule,
+    InputTextModule,
+    ProgressBarModule,
+    DropdownModule,
+    ToolbarModule,
+    FileUploadModule,
+    RatingModule,
+    RadioButtonModule,
+    InputNumberModule,
+    ConfirmDialogModule,
+    InputTextareaModule,
+    FormsModule,
+    HttpClientModule,
+  ],
+
+
+
+
+Screenshot 13 +
+
Figure 16. All modules imported
+
+
+

How we can see, the first thing that the table is doing is loading all the products that we have.

+
+
+

To do it, we will create a service to get all the data.

+
+
+

To create a service we need to use the next command

+
+
+

ng generate service services/product

+
+
+

In the service we are simulating a endpoint to get data.

+
+
+

We will have our products "hardcoded" and the methods to get or to set some values.

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Product } from '../models/product';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class ProductService {
+  status: string[] = ['OUTOFSTOCK', 'INSTOCK', 'LOWSTOCK'];
+
+  productNames: string[] = [
+    'Bamboo Watch',
+    'Black Watch',
+    'Blue Band',
+    'Blue T-Shirt',
+    'Bracelet',
+    'Brown Purse',
+    'Chakra Bracelet',
+    'Galaxy Earrings',
+    'Game Controller',
+    'Gaming Set',
+    'Gold Phone Case',
+    'Green Earbuds',
+    'Green T-Shirt',
+    'Grey T-Shirt',
+    'Headphones',
+    'Light Green T-Shirt',
+    'Lime Band',
+    'Mini Speakers',
+    'Painted Phone Case',
+    'Pink Band',
+    'Pink Purse',
+    'Purple Band',
+    'Purple Gemstone Necklace',
+    'Purple T-Shirt',
+    'Shoes',
+    'Sneakers',
+    'Teal T-Shirt',
+    'Yellow Earbuds',
+    'Yoga Mat',
+    'Yoga Set',
+  ];
+
+  constructor(private http: HttpClient) { }
+
+  getProductsSmall(): any {
+    return this.http.get<any>('assets/products-small.json')
+      .toPromise()
+      .then(res => res.data as Product[])
+      .then(data => data);
+  }
+
+  getProducts(): any {
+    return this.http.get<any>('assets/products.json')
+      .toPromise()
+      .then(res => res.data as Product[])
+      .then(data => data);
+  }
+
+  getProductsWithOrdersSmall(): any {
+    return this.http.get<any>('assets/products-orders-small.json')
+      .toPromise()
+      .then(res => res.data as Product[])
+      .then(data => data);
+  }
+
+  generatePrduct(): Product {
+    const product: Product = {
+      id: this.generateId(),
+      name: this.generateName(),
+      description: 'Product Description',
+      price: this.generatePrice(),
+      quantity: this.generateQuantity(),
+      category: 'Product Category',
+      inventoryStatus: this.generateStatus(),
+      rating: this.generateRating()
+    };
+
+    product.image = product.name.toLocaleLowerCase().split(/[ ,]+/).join('-') + '.jpg';
+    return product;
+  }
+
+  generateId(): string {
+    let text = '';
+    const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
+
+    for (let  i = 0; i < 5; i++) {
+      text += possible.charAt(Math.floor(Math.random() * possible.length));
+    }
+
+    return text;
+  }
+
+
+  generateName(): any {
+    return this.productNames[Math.floor(Math.random() * Math.floor(30))];
+  }
+
+  generatePrice(): any {
+    return Math.floor(Math.random() * Math.floor(299) + 1);
+  }
+
+  generateQuantity(): any {
+    return Math.floor(Math.random() * Math.floor(75) + 1);
+  }
+
+  generateStatus(): any {
+    return this.status[Math.floor(Math.random() * Math.floor(3))];
+  }
+
+  generateRating(): any {
+    return Math.floor(Math.random() * Math.floor(5) + 1);
+  }
+}
+
+
+
+
+Screenshot 14 +
+
Figure 17. Product Service
+
+
+

Also we create a interface for the Product, so all the products will have the same structure:

+
+
+
+
export interface Product {
+  id?: string;
+  code?: string;
+  name?: string;
+  description?: string;
+  price?: number;
+  quantity?: number;
+  inventoryStatus?: string;
+  category?: string;
+  image?: string;
+  rating?: number;
+}
+
+
+
+
+Screenshot 15 +
+
Figure 18. Product Interface
+
+
+

How we can see in the methods, we are getting the data from a hardcoded file product.json.

+
+
+
+
{
+	"data": [
+		{
+			"id": "1000",
+			"code": "f230fh0g3",
+			"name": "Bamboo Watch",
+			"description": "Product Description",
+			"image": "bamboo-watch.jpg",
+			"price": 65,
+			"category": "Accessories",
+			"quantity": 24,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1001",
+			"code": "nvklal433",
+			"name": "Black Watch",
+			"description": "Product Description",
+			"image": "black-watch.jpg",
+			"price": 72,
+			"category": "Accessories",
+			"quantity": 61,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1002",
+			"code": "zz21cz3c1",
+			"name": "Blue Band",
+			"description": "Product Description",
+			"image": "blue-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 2,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1003",
+			"code": "244wgerg2",
+			"name": "Blue T-Shirt",
+			"description": "Product Description",
+			"image": "blue-t-shirt.jpg",
+			"price": 29,
+			"category": "Clothing",
+			"quantity": 25,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1004",
+			"code": "h456wer53",
+			"name": "Bracelet",
+			"description": "Product Description",
+			"image": "bracelet.jpg",
+			"price": 15,
+			"category": "Accessories",
+			"quantity": 73,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1005",
+			"code": "av2231fwg",
+			"name": "Brown Purse",
+			"description": "Product Description",
+			"image": "brown-purse.jpg",
+			"price": 120,
+			"category": "Accessories",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1006",
+			"code": "bib36pfvm",
+			"name": "Chakra Bracelet",
+			"description": "Product Description",
+			"image": "chakra-bracelet.jpg",
+			"price": 32,
+			"category": "Accessories",
+			"quantity": 5,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1007",
+			"code": "mbvjkgip5",
+			"name": "Galaxy Earrings",
+			"description": "Product Description",
+			"image": "galaxy-earrings.jpg",
+			"price": 34,
+			"category": "Accessories",
+			"quantity": 23,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1008",
+			"code": "vbb124btr",
+			"name": "Game Controller",
+			"description": "Product Description",
+			"image": "game-controller.jpg",
+			"price": 99,
+			"category": "Electronics",
+			"quantity": 2,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1009",
+			"code": "cm230f032",
+			"name": "Gaming Set",
+			"description": "Product Description",
+			"image": "gaming-set.jpg",
+			"price": 299,
+			"category": "Electronics",
+			"quantity": 63,
+			"inventoryStatus": "INSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1010",
+			"code": "plb34234v",
+			"name": "Gold Phone Case",
+			"description": "Product Description",
+			"image": "gold-phone-case.jpg",
+			"price": 24,
+			"category": "Accessories",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1011",
+			"code": "4920nnc2d",
+			"name": "Green Earbuds",
+			"description": "Product Description",
+			"image": "green-earbuds.jpg",
+			"price": 89,
+			"category": "Electronics",
+			"quantity": 23,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1012",
+			"code": "250vm23cc",
+			"name": "Green T-Shirt",
+			"description": "Product Description",
+			"image": "green-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 74,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1013",
+			"code": "fldsmn31b",
+			"name": "Grey T-Shirt",
+			"description": "Product Description",
+			"image": "grey-t-shirt.jpg",
+			"price": 48,
+			"category": "Clothing",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1014",
+			"code": "waas1x2as",
+			"name": "Headphones",
+			"description": "Product Description",
+			"image": "headphones.jpg",
+			"price": 175,
+			"category": "Electronics",
+			"quantity": 8,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1015",
+			"code": "vb34btbg5",
+			"name": "Light Green T-Shirt",
+			"description": "Product Description",
+			"image": "light-green-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 34,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1016",
+			"code": "k8l6j58jl",
+			"name": "Lime Band",
+			"description": "Product Description",
+			"image": "lime-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 12,
+			"inventoryStatus": "INSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1017",
+			"code": "v435nn85n",
+			"name": "Mini Speakers",
+			"description": "Product Description",
+			"image": "mini-speakers.jpg",
+			"price": 85,
+			"category": "Clothing",
+			"quantity": 42,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1018",
+			"code": "09zx9c0zc",
+			"name": "Painted Phone Case",
+			"description": "Product Description",
+			"image": "painted-phone-case.jpg",
+			"price": 56,
+			"category": "Accessories",
+			"quantity": 41,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1019",
+			"code": "mnb5mb2m5",
+			"name": "Pink Band",
+			"description": "Product Description",
+			"image": "pink-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 63,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1020",
+			"code": "r23fwf2w3",
+			"name": "Pink Purse",
+			"description": "Product Description",
+			"image": "pink-purse.jpg",
+			"price": 110,
+			"category": "Accessories",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1021",
+			"code": "pxpzczo23",
+			"name": "Purple Band",
+			"description": "Product Description",
+			"image": "purple-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 6,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1022",
+			"code": "2c42cb5cb",
+			"name": "Purple Gemstone Necklace",
+			"description": "Product Description",
+			"image": "purple-gemstone-necklace.jpg",
+			"price": 45,
+			"category": "Accessories",
+			"quantity": 62,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1023",
+			"code": "5k43kkk23",
+			"name": "Purple T-Shirt",
+			"description": "Product Description",
+			"image": "purple-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 2,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1024",
+			"code": "lm2tny2k4",
+			"name": "Shoes",
+			"description": "Product Description",
+			"image": "shoes.jpg",
+			"price": 64,
+			"category": "Clothing",
+			"quantity": 0,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1025",
+			"code": "nbm5mv45n",
+			"name": "Sneakers",
+			"description": "Product Description",
+			"image": "sneakers.jpg",
+			"price": 78,
+			"category": "Clothing",
+			"quantity": 52,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1026",
+			"code": "zx23zc42c",
+			"name": "Teal T-Shirt",
+			"description": "Product Description",
+			"image": "teal-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 3,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1027",
+			"code": "acvx872gc",
+			"name": "Yellow Earbuds",
+			"description": "Product Description",
+			"image": "yellow-earbuds.jpg",
+			"price": 89,
+			"category": "Electronics",
+			"quantity": 35,
+			"inventoryStatus": "INSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1028",
+			"code": "tx125ck42",
+			"name": "Yoga Mat",
+			"description": "Product Description",
+			"image": "yoga-mat.jpg",
+			"price": 20,
+			"category": "Fitness",
+			"quantity": 15,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1029",
+			"code": "gwuby345v",
+			"name": "Yoga Set",
+			"description": "Product Description",
+			"image": "yoga-set.jpg",
+			"price": 20,
+			"category": "Fitness",
+			"quantity": 25,
+			"inventoryStatus": "INSTOCK",
+			"rating": 8
+		}
+	]
+}
+
+
+
+
+Screenshot 16 +
+
Figure 19. Product Json Data
+
+
+

But in our component.ts we can see in ngOninit that we are getting the data when the component is ready. So when the component is rendered the data will be in the table.

+
+
+

The first lines of our table we can see a some attributes and events like value, rows, paginator, globalFilterFields, selection, rowHover, dataKey, currentPageReportTemplate, showCurrentPageReport.

+
+
+

We can see more details from those attributes and events here: https://primefaces.org/primeng/showcase/#/table

+
+
+

In the first section, we can see the <ng-template>, there is where we can search a value from the table.

+
+
+
+
<p-table #dt [value]="products" [rows]="10" [paginator]="true"
+    [globalFilterFields]="['name','country.name','representative.name','status']" [(selection)]="selectedProducts"
+    [rowHover]="true" dataKey="id" currentPageReportTemplate="Showing {first} to {last} of {totalRecords} entries"
+    [showCurrentPageReport]="true">
+    <ng-template pTemplate="caption">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        <h5 class="p-m-0">Manage Products</h5>
+        <span class="p-input-icon-left">
+          <i class="pi pi-search"></i>
+          <input pInputText type="text" (input)="dt.filterGlobal($event.target.value, 'contains')"
+            placeholder="Search..." />
+        </span>
+      </div>
+    </ng-template>
+
+
+
+
+Screenshot 17 +
+
Figure 20. Search on Table
+
+
+

The next <ng-template> is the header of the table. We’re we can see the name of each column.

+
+
+
+
    <ng-template pTemplate="header">
+      <tr>
+        <th style="width: 3rem">
+          <p-tableHeaderCheckbox></p-tableHeaderCheckbox>
+        </th>
+        <th pSortableColumn="name">Name <p-sortIcon field="name"></p-sortIcon>
+        </th>
+        <th pSortableColumn="price">Price <p-sortIcon field="price"></p-sortIcon>
+        </th>
+        <th pSortableColumn="category">Category <p-sortIcon field="category"></p-sortIcon>
+        </th>
+        <th pSortableColumn="rating">Reviews <p-sortIcon field="rating"></p-sortIcon>
+        </th>
+        <th pSortableColumn="inventoryStatus">Status <p-sortIcon field="inventoryStatus"></p-sortIcon>
+        </th>
+        <th></th>
+      </tr>
+    </ng-template>
+
+
+
+
+Screenshot 18 +
+
Figure 21. Table Headers
+
+
+

After done the header, we need to do the table body. Here is where we need to print each row values

+
+
+
+
    <ng-template pTemplate="body" let-product>
+      <tr>
+        <td>
+          <p-tableCheckbox [value]="product"></p-tableCheckbox>
+        </td>
+        <td>{{product.name}}</td>
+        <td>{{product.price | currency:'USD'}}</td>
+        <td>{{product.category}}</td>
+        <td>
+          <p-rating [ngModel]="product.rating" [readonly]="true" [cancel]="false"></p-rating>
+        </td>
+        <td><span
+            [class]="'product-badge status-' + product.inventoryStatus.toLowerCase()">{{product.inventoryStatus}}</span>
+        </td>
+        <td>
+          <button pButton pRipple icon="pi pi-pencil" class="p-button-rounded p-button-success p-mr-2"
+            (click)="editProduct(product)"></button>
+          <button pButton pRipple icon="pi pi-trash" class="p-button-rounded p-button-warning"
+            (click)="deleteProduct(product)"></button>
+        </td>
+      </tr>
+    </ng-template>
+
+
+
+
+Screenshot 19 +
+
Figure 22. Table Body
+
+
+

As we can see, we have some buttons with methods

+
+
+

The first method is to edit a specifict product (click)="editProduct(product)" and the second one is to delete it deleteProduct(product)

+
+
+
+
  editProduct(product: Product): any {
+    this.product = { ...product };
+    this.productDialog = true;
+  }
+
+  deleteProduct(product: Product): any {
+    this.confirmationService.confirm({
+      message: 'Are you sure you want to delete ' + product.name + '?',
+      header: 'Confirm',
+      icon: 'pi pi-exclamation-triangle',
+      accept: () => {
+        this.products = this.products.filter(val => val.id !==  product.id);
+        this.product = {};
+        this.messageService.add({ severity: 'success', summary: 'Successful', detail: 'Product Deleted', life: 3000 });
+      }
+    });
+  }
+
+
+
+
+Screenshot 20 +
+
Figure 23. Delete and Edit methods
+
+
+

The last part of the table, we will have a section to know how many products we have.

+
+
+
+Screenshot 21 +
+
Figure 24. Table footer
+
+
+

To do it just need to do another template and add the following code:

+
+
+
+
    <ng-template pTemplate="summary">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        In total there are {{products ? products.length : 0 }} products.
+      </div>
+    </ng-template>
+
+
+
+
+Screenshot 22 +
+
Figure 25. Table footer code
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-ngrx-effects.html b/docs/devon4ng/1.0/guide-ngrx-effects.html new file mode 100644 index 00000000..adaeef33 --- /dev/null +++ b/docs/devon4ng/1.0/guide-ngrx-effects.html @@ -0,0 +1,448 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Side effects with NgRx/Effects

+
+
+

Reducers are pure functions, meaning they are side-effect free and deterministic. Many actions however have side effects like sending messages or displaying a toast notification. NgRx encapsulates these actions in effects.

+
+
+

Let’s build a recommended movies list so the user can add movies to their watchlist.

+
+
+
+
+

Obtaining the recommendation list from the server

+
+
+

Create a module for recommendations and add stores and states as in the previous chapter. Add EffectsModule.forRoot([]) to the imports in AppModule below StoreModule.forRoot(). Add effects to the feature module:

+
+
+
+
ng generate effect recommendation/Recommendation -m recommendation/recommendation.module.ts
+
+
+
+

We need actions for loading the movie list, success and failure cases:

+
+
+

recommendation/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+import { Movie } from 'src/app/watchlist/models/movies';
+
+export const loadRecommendedMovies = createAction('[Recommendation List] Load movies');
+export const loadRecommendedMoviesSuccess = createAction('[Recommendation API] Load movies success', props<{movies: Movie[]}>());
+export const loadRecommendedMoviesFailure = createAction('[Recommendation API] Load movies failure', props<{error: any}>());
+
+const actions = union({
+    loadRecommendedMovies,
+    loadRecommendedMoviesSuccess,
+    loadRecommendedMoviesFailure
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

In the reducer, we use a loading flag so the UI can show a loading spinner. The store is updated with arriving data.

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State {
+  items: Movie[];
+  loading: boolean;
+}
+
+export const initialState: State = {
+  items: [],
+  loading: false
+};
+
+export function reducer(state = initialState, action: recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case '[Recommendation List] Load movies':
+      return {
+        ...state,
+        items: [],
+        loading: true
+      };
+
+    case '[Recommendation API] Load movies failure':
+      return {
+        ...state,
+          loading: false
+      };
+
+    case '[Recommendation API] Load movies success':
+      return {
+        ...state,
+        items: action.movies,
+        loading: false
+      };
+
+    default:
+      return state;
+  }
+}
+
+export const getAll = (state: State) => state.items;
+export const isLoading = (state: State) => state.loading;
+
+
+
+

We need an API service to talk to the server. For demonstration purposes, we simulate an answer delayed by one second:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable({
+  providedIn: 'root'
+})
+export class RecommendationApiService {
+
+  private readonly recommendedMovies: Movie[] = [
+    {
+      id: 2,
+      title: 'The Hunger Games',
+      genre: 'sci-fi',
+      releaseYear: 2012,
+      runtimeMinutes: 144
+    },
+    {
+      id: 4,
+      title: 'Avengers: Endgame',
+      genre: 'fantasy',
+      releaseYear: 2019,
+      runtimeMinutes: 181
+    }
+  ];
+
+  loadRecommendedMovies(): Observable<Movie[]> {
+    return of(this.recommendedMovies).pipe(delay(1000));
+  }
+}
+
+
+
+

Here are the effects:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable()
+export class RecommendationEffects {
+
+  constructor(
+    private actions$: Actions,
+    private recommendationApi: RecommendationApiService,
+  ) { }
+
+  @Effect()
+  loadBooks$ = this.actions$.pipe(
+    ofType(recommendationActions.loadRecommendedMovies.type),
+    switchMap(() => this.recommendationApi.loadRecommendedMovies().pipe(
+      map(movies => recommendationActions.loadRecommendedMoviesSuccess({ movies })),
+      catchError(error => of(recommendationActions.loadRecommendedMoviesFailure({ error })))
+    ))
+  );
+}
+
+
+
+

Effects are always observables and return actions. In this example, we consume the actions observable provided by NgRx and listen only for the loadRecommendedMovies actions by using the ofType operator. Using switchMap, we map to a new observable, one that loads movies and maps the successful result to a new loadRecommendedMoviesSuccess action or a failure to loadRecommendedMoviesFailure. In a real application we would show a notification in the error case.

+
+
+
+
+

==

+
+
+

If an effect should not dispatch another action, return an empty observable. +== ==

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-ngrx-entity.html b/docs/devon4ng/1.0/guide-ngrx-entity.html new file mode 100644 index 00000000..9209de99 --- /dev/null +++ b/docs/devon4ng/1.0/guide-ngrx-entity.html @@ -0,0 +1,376 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Simplifying CRUD with NgRx/Entity

+
+
+

Most of the time when manipulating entries in the store, we like to create, add, update, or delete entries (CRUD). NgRx/Entity provides convenience functions if each item of a collection has an id property. Luckily all our entities already have this property.

+
+
+

Let’s add functionality to add a movie to the watchlist. First, create the required action:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export const addToWatchlist = createAction('[Recommendation List] Add to watchlist',
+    props<{ watchlistItemId: number, movie: Movie, addedAt: Date }>());
+
+
+
+
+
+

==

+
+
+

You may wonder why the Date object is not created inside the reducer instead, since it should always be the current time. However, remember that reducers should be deterministic state machines — State A + Action B should always result in the same State C. This makes reducers easily testable. +== ==

+
+
+

Then, rewrite the watchlistData reducer to make use of NgRx/Entity:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State extends EntityState<WatchlistItem> { (1)
+}
+
+export const entityAdapter = createEntityAdapter<WatchlistItem>(); (2)
+
+export const initialState: State = entityAdapter.getInitialState(); (3)
+
+const entitySelectors = entityAdapter.getSelectors();
+
+export function reducer(state = initialState, action: playbackActions.ActionsUnion | recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      const itemToUpdate = entitySelectors
+      .selectAll(state) (4)
+      .find(item => item.movie.id == action.movieId);
+      if (itemToUpdate) {
+        return entityAdapter.updateOne({ (5)
+          id: itemToUpdate.id,
+          changes: { playbackMinutes: action.stoppedAtMinute } (6)
+        }, state);
+      } else {
+        return state;
+      }
+
+    case recommendationActions.addToWatchlist.type:
+      return entityAdapter.addOne({id: action.watchlistItemId, movie: action.movie, added: action.addedAt, playbackMinutes: 0}, state);
+
+    default:
+      return state;
+  }
+}
+
+
+export const getAllItems = entitySelectors.selectAll;
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1NgRx/Entity requires state to extend EntityState. It provides a list of ids and a dictionary of id ⇒ entity entries
2The entity adapter provides data manipulation operations and selectors
3The state can be initialized with getInitialState(), which accepts an optional object to define any additional state beyond EntityState
4selectAll returns an array of all entities
5All adapter operations consume the state object as the last argument and produce a new state
6Update methods accept a partial change definition; you don’t have to clone the object
+
+
+

This concludes the tutorial on NgRx. If you want to learn about advanced topics such as selectors with arguments, testing, or router state, head over to the official NgRx documentation.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-ngrx-getting-started.html b/docs/devon4ng/1.0/guide-ngrx-getting-started.html new file mode 100644 index 00000000..b218415e --- /dev/null +++ b/docs/devon4ng/1.0/guide-ngrx-getting-started.html @@ -0,0 +1,408 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Introduction to NgRx

+
+
+

NgRx is a state management framework for Angular based on the Redux pattern.

+
+
+
+
+

The need for client side state management

+
+
+

You may wonder why you should bother with state management. Usually data resides in a back-end storage system, e.g. a database, and is retrieved by the client on a per-need basis. To add, update, or delete entities from this store, clients have to invoke API endpoints at the back-end. Mimicking database-like transactions on the client side may seem redundant. However, there are many use cases for which a global client-side state is appropriate:

+
+
+
    +
  • +

    the client has some kind of global state which should survive the destruction of a component, but does not warrant server side persistence, for example: volume level of media, expansion status of menus

    +
  • +
  • +

    sever side data should not be retrieved every time it is needed, either because multiple components consume it, or because it should be cached, e.g. the personal watchlist in an online streaming app

    +
  • +
  • +

    the app provides a rich experience with offline functionality, e.g. a native app built with Ionic

    +
  • +
+
+
+

Saving global states inside the services they originates from results in a data flow that is hard to follow and state becoming inconsistent due to unordered state mutations. Following the single source of truth principle, there should be a central location holding all your application’s state, just like a server side database does. State management libraries for Angular provide tools for storing, retrieving, and updating client-side state.

+
+
+
+
+

Why NgRx?

+
+
+

As stated in the introduction, devon4ng does not stipulate a particular state library, or require using one at all. However, NgRx has proven to be a robust, mature solution for this task, with good tooling and 3rd-party library support. Albeit introducing a level of indirection that requires additional effort even for simple features, the Redux concept enforces a clear separation of concerns leading to a cleaner architecture.

+
+
+

Nonetheless, you should always compare different approaches to state management and pick the best one suiting your use case. Here’s a (non-exhaustive) list of competing state management libraries:

+
+
+
    +
  • +

    Plain RxJS using the simple store described in Abstract Class Store

    +
  • +
  • +

    NgXS reduces some boilerplate of NgRx by leveraging the power of decorators and moving side effects to the store

    +
  • +
  • +

    MobX follows a more imperative approach in contrast to the functional Redux pattern

    +
  • +
  • +

    Akita also uses an imperative approach with direct setters in the store, but keeps the concept of immutable state transitions

    +
  • +
+
+
+
+
+

Setup

+
+
+

To get a quick start, use the provided template for devon4ng + NgRx.

+
+
+

To manually install the core store package together with a set of useful extensions:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools --save`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools`
+
+
+
+

We recommend to add the NgRx schematics to your project so you can create code artifacts from the command line:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/schematics --save-dev`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/schematics --dev`
+
+
+
+

Afterwards, make NgRx your default schematics provider, so you don’t have to type the qualified package name every time:

+
+
+
+
`ng config cli.defaultCollection @ngrx/schematics`
+
+
+
+

If you have custom settings for Angular schematics, you have to configure them as described here.

+
+
+
+
+

Concept

+
+
+
+NgRx Architecture +
+
Figure 1. NgRx architecture overview
+
+
+

Figure 1 gives an overview of the NgRx data flow. The single source of truth is managed as an immutable state object by the store. Components dispatch actions to trigger state changes. Actions are handed over to reducers, which take the current state and action data to compute the next state. Actions are also consumed by-effects, which perform side-effects such as retrieving data from the back-end, and may dispatch new actions as a result. Components subscribe to state changes using selectors.

+
+
+

Continue with Creating a Simple Store.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-ngrx-simple-store.html b/docs/devon4ng/1.0/guide-ngrx-simple-store.html new file mode 100644 index 00000000..ae8a40de --- /dev/null +++ b/docs/devon4ng/1.0/guide-ngrx-simple-store.html @@ -0,0 +1,771 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

State, Selection and Reducers

+
+ +
+
+
+

Creating a Simple Store

+
+
+

In the following pages we use the example of an online streaming service. We will model a particular feature, a watchlist that can be populated by the user with movies she or he wants to see in the future.

+
+
+
+
+

Initializing NgRx

+
+
+

If you’re starting fresh, you first have to initialize NgRx and create a root state. The fastest way to do this is using the schematic:

+
+
+
+
`ng generate @ngrx/schematics:store State --root --module app.module.ts`
+
+
+
+

This will automatically generate a root store and register it in the app module. Next we generate a feature module for the watchlist:

+
+
+

` ng generate module watchlist`

+
+
+

and create a corresponding feature store:

+
+
+

` ng generate store watchlist/Watchlist -m watchlist.module.ts`

+
+
+

This generates a file watchlist/reducers/index.ts with the reducer function, and registers the store in the watchlist module declaration.

+
+
+
+
+

== =

+
+
+

If you’re getting an error Schematic "store" not found in collection "@schematics/angular", this means you forgot to register the NgRx schematics as default. +== == =

+
+
+

Next, add the WatchlistModule to the AppModule imports so the feature store is registered when the application starts. We also added the store devtools which we will use later, resulting in the following file:

+
+
+

app.module.ts

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppComponent } from './app.component';
+import { EffectsModule } from '@ngrx/effects';
+import { AppEffects } from './app.effects';
+import { StoreModule } from '@ngrx/store';
+import { reducers, metaReducers } from './reducers';
+import { StoreDevtoolsModule } from '@ngrx/store-devtools';
+import { environment } from '../environments/environment';
+import { WatchlistModule } from './watchlist/watchlist.module';
+
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    WatchlistModule,
+    StoreModule.forRoot(reducers, { metaReducers }),
+    // Instrumentation must be imported after importing StoreModule (config is optional)
+    StoreDevtoolsModule.instrument({
+      maxAge: 25, // Retains last 25 states
+      logOnly: environment.production, // Restrict extension to log-only mode
+    }),
+    !environment.production ? StoreDevtoolsModule.instrument() : []
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+
+
+

Create an entity model and initial state

+
+
+

We need a simple model for our list of movies. Create a file watchlist/models/movies.ts and insert the following code:

+
+
+
+
export interface Movie {
+    id: number;
+    title: string;
+    releaseYear: number;
+    runtimeMinutes: number;
+    genre: Genre;
+}
+
+export type Genre = 'action' | 'fantasy' | 'sci-fi' | 'romantic' | 'comedy' | 'mystery';
+
+export interface WatchlistItem {
+    id: number;
+    movie: Movie;
+    added: Date;
+    playbackMinutes: number;
+}
+
+
+
+
+
+

== =

+
+
+

We discourage putting several types into the same file and do this only for the sake of keeping this tutorial brief. +== == =

+
+
+

Later we will learn how to retrieve data from the back-end using effects. For now we will create an initial state for the user with a default movie.

+
+
+

State is defined and transforms by a reducer function. Let’s create a watchlist reducer:

+
+
+
+
```
+cd watchlist/reducers
+ng g reducer WatchlistData --reducers index.ts
+```
+
+
+
+

Open the generated file watchlist-data.reducer.ts. You see three exports: The State interface defines the shape of the state. There is only one instance of a feature state in the store at all times. The initialState constant is the state at application creation time. The reducer function will later be called by the store to produce the next state instance based on the current state and an action object.

+
+
+

Let’s put a movie into the user’s watchlist:

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export interface State {
+  items: WatchlistItem[];
+}
+
+export const initialState: State = {
+  items: [
+    {
+      id: 42,
+      movie: {
+        id: 1,
+        title: 'Die Hard',
+        genre: 'action',
+        releaseYear: 1988,
+        runtimeMinutes: 132
+      },
+      playbackMinutes: 0,
+      added: new Date(),
+    }
+  ]
+};
+
+
+
+
+
+

Select the current watchlist

+
+
+

State slices can be retrieved from the store using selectors.

+
+
+

Create a watchlist component:

+
+
+
+
`ng g c watchlist/Watchlist`
+
+
+
+

and add it to the exports of WatchlistModule. Also, replace app.component.html with

+
+
+
+
<app-watchlist></app-watchlist>
+
+
+
+

State observables are obtained using selectors. They are memoized by default, meaning that you don’t have to worry about performance if you use complicated calculations when deriving state — these are only performed once per state emission.

+
+
+

Add a selector to watchlist-data.reducer.ts:

+
+
+
+
`export const getAllItems = (state: State) => state.items;`
+
+
+
+

Next, we have to re-export the selector for this sub-state in the feature reducer. Modify the watchlist/reducers/index.ts like this:

+
+
+

watchlist/reducers/index.ts

+
+
+
+
import {
+  ActionReducer,
+  ActionReducerMap,
+  createFeatureSelector,
+  createSelector,
+  MetaReducer
+} from '@ngrx/store';
+import { environment } from 'src/environments/environment';
+import * as fromWatchlistData from './watchlist-data.reducer';
+import * as fromRoot from 'src/app/reducers/index';
+
+export interface WatchlistState { (1)
+  watchlistData: fromWatchlistData.State;
+}
+
+export interface State extends fromRoot.State { (2)
+  watchlist: WatchlistState;
+}
+
+export const reducers: ActionReducerMap<WatchlistState> = { (3)
+  watchlistData: fromWatchlistData.reducer,
+};
+
+export const metaReducers: MetaReducer<WatchlistState>[] = !environment.production ? [] : [];
+
+export const getFeature = createFeatureSelector<State, WatchlistState>('watchlist'); (4)
+
+export const getWatchlistData = createSelector( (5)
+  getFeature,
+  state => state.watchlistData
+);
+
+export const getAllItems = createSelector( (6)
+  getWatchlistData,
+  fromWatchlistData.getAllItems
+);
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1The feature state, each member is managed by a different reducer
2Feature states are registered by the forFeature method. This interface provides a typesafe path from root to feature state.
3Tie sub-states of a feature state to the corresponding reducers
4Create a selector to access the 'watchlist' feature state
5select the watchlistData sub state
6re-export the selector
+
+
+

Note how createSelector allows to chain selectors. This is a powerful tool that also allows for selecting from multiple states.

+
+
+

You can use selectors as pipeable operators:

+
+
+

watchlist.component.ts

+
+
+
+
export class WatchlistComponent {
+  watchlistItems$: Observable<WatchlistItem[]>;
+
+  constructor(
+    private store: Store<fromWatchlist.State>
+  ) {
+    this.watchlistItems$ = this.store.pipe(select(fromWatchlist.getAllItems));
+  }
+}
+
+
+
+

watchlist.component.html

+
+
+
+
<h1>Watchlist</h1>
+<ul>
+    <li *ngFor="let item of watchlistItems$ | async">{{item.movie.title}} ({{item.movie.releaseYear}}): {{item.playbackMinutes}}/{{item.movie.runtimeMinutes}} min watched</li>
+</ul>
+
+
+
+
+
+

Dispatching an action to update watched minutes

+
+
+

We track the user’s current progress at watching a movie as the playbackMinutes property. After closing a video, the watched minutes have to be updated. In NgRx, state is being updated by dispatching actions. An action is an option with a (globally unique) type discriminator and an optional payload.

+
+
+
+
+

== Creating the action

+
+
+

Create a file playback/actions/index.ts. In this example, we do not further separate the actions per sub state. Actions can be defined by using action creators:

+
+
+

playback/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+
+export const playbackFinished = createAction('[Playback] Playback finished', props<{ movieId: number, stoppedAtMinute: number }>());
+
+const actions = union({
+    playbackFinished
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

First we specify the type, followed by a call to the payload definition function. Next, we create a union of all possible actions for this file using union, which allows us a to access action payloads in the reducer in a typesafe way.

+
+
+
+
+

== =

+
+
+

Action types should follow the naming convention [Source] Event, e.g. [Recommended List] Hide Recommendation or [Auth API] Login Success. Think of actions rather as events than commands. You should never use the same action at two different places (you can still handle multiple actions the same way). This facilitate tracing the source of an action. For details see Good Action Hygiene with NgRx by Mike Ryan (video). +== == =

+
+
+
+
+

== Dispatch

+
+
+

We skip the implementation of an actual video playback page and simulate watching a movie in 10 minute segments by adding a link in the template:

+
+
+

watchlist-component.html

+
+
+
+
<li *ngFor="let item of watchlistItems$ | async">... <button (click)="stoppedPlayback(item.movie.id, item.playbackMinutes + 10)">Add 10 Minutes</button></li>
+
+
+
+

watchlist-component.ts

+
+
+
+
import * as playbackActions from 'src/app/playback/actions';
+...
+  stoppedPlayback(movieId: number, stoppedAtMinute: number) {
+    this.store.dispatch(playbackActions.playbackFinished({ movieId, stoppedAtMinute }));
+  }
+
+
+
+
+
+

== State reduction

+
+
+

Next, we handle the action inside the watchlistData reducer. Note that actions can be handled by multiple reducers and effects at the same time to update different states, for example if we’d like to show a rating modal after playback has finished.

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export function reducer(state = initialState, action: playbackActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      return {
+        ...state,
+        items: state.items.map(updatePlaybackMinutesMapper(action.movieId, action.stoppedAtMinute))
+      };
+
+    default:
+      return state;
+  }
+}
+
+export function updatePlaybackMinutesMapper(movieId: number, stoppedAtMinute: number) {
+  return (item: WatchlistItem) => {
+    if (item.movie.id == movieId) {
+      return {
+        ...item,
+        playbackMinutes: stoppedAtMinute
+      };
+    } else {
+      return item;
+    }
+  };
+}
+
+
+
+

Note how we changed the reducer’s function signature to reference the actions union. The switch-case handles all incoming actions to produce the next state. The default case handles all actions a reducer is not interested in by returning the state unchanged. Then we find the watchlist item corresponding to the movie with the given id and update the playback minutes. Since state is immutable, we have to clone all objects down to the one we would like to change using the object spread operator (…​).

+
+
+
+
+

== =

+
+
+

Selectors rely on object identity to decide whether the value has to be recalculated. Do not clone objects that are not on the path to the change you want to make. This is why updatePlaybackMinutesMapper returns the same item if the movie id does not match. +== == =

+
+
+
+
+

== Alternative state mapping with Immer

+
+
+

It can be hard to think in immutable changes, especially if your team has a strong background in imperative programming. In this case, you may find the Immer library convenient, which allows to produce immutable objects by manipulating a proxied draft. The same reducer can then be written as:

+
+
+

watchlist-data.reducer.ts with Immer

+
+
+
+
import { produce } from 'immer';
+...
+case playbackActions.playbackFinished.type:
+      return produce(state, draft => {
+        const itemToUpdate = draft.items.find(item => item.movie.id == action.movieId);
+        if (itemToUpdate) {
+          itemToUpdate.playbackMinutes = action.stoppedAtMinute;
+        }
+      });
+
+
+
+

Immer works out of the box with plain objects and arrays.

+
+
+
+
+

== Redux devtools

+
+
+

If the StoreDevToolsModule is instrumented as described above, you can use the browser extension Redux devtools to see all dispatched actions and the resulting state diff, as well as the current state, and even travel back in time by undoing actions.

+
+
+
+Redux Devtools +
+
Figure 1. Redux devtools
+
+
+

Continue with learning about effects

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-npm-yarn-workflow.html b/docs/devon4ng/1.0/guide-npm-yarn-workflow.html new file mode 100644 index 00000000..3ea943c1 --- /dev/null +++ b/docs/devon4ng/1.0/guide-npm-yarn-workflow.html @@ -0,0 +1,975 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Package Managers Workflow

+
+ +
+
+
+

Introduction

+
+
+

This document aims to provide you the necessary documentation and sources in order to help you understand the importance of dependencies between packages.

+
+
+

Projects in NodeJS make use of modules, chunks of reusable code made by other people or teams. These small chunks of reusable code are called packages [1]. Packages are used to solve specific problems or tasks. These relations between your project and the external packages are called dependencies.

+
+
+

For example, imagine we are doing a small program that takes your birthday as an input and tells you how many days are left until your birthday. We search in the repository if someone has published a package to retrieve the actual date and manage date types, and maybe we could search for another package to show a calendar, because we want to optimize our time, and we wish the user to click a calendar button and choose the day in the calendar instead of typing it.

+
+
+

As you can see, packages are convenient. In some cases, they may be even needed, as they can manage aspects of your program you may not be proficient in, or provide an easier use of them.

+
+
+

For more comprehensive information visit npm definition

+
+
+
+
+

Package.json

+
+
+

Dependencies in your project are stored in a file called package.json. Every package.json must contain, at least, the name and version of your project.

+
+
+

Package.json is located in the root of your project.

+
+
+ + + + + +
+ + +If package.json is not on your root directory refer to Problems you may encounter section +
+
+
+

If you wish to learn more information about package.json, click on the following links:

+
+ +
+
+
+

== Content of package.json

+
+
+

As you noticed, package.json is a really important file in your project. It contains essential information about our project, therefore you need to understand what’s inside.

+
+
+

The structure of package.json is divided in blocks, inside the first one you can find essential information of your project such as the name, version, license and optionally some [Scripts].

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e"
+  }
+
+
+
+

The next block is called dependencies and contains the packages that project needs in order to be developed, compiled and executed.

+
+
+
+
"private": true,
+  "dependencies": {
+    "@angular/animations": "^4.2.4",
+    "@angular/common": "^4.2.4",
+    "@angular/forms": "^4.2.4",
+    ...
+    "zone.js": "^0.8.14"
+  }
+
+
+
+

After dependencies we find devDependencies, another kind of dependencies present in the development of the application but unnecessary for its execution. One example is typescript. Code is written in typescript, and then, transpiled to JavaScript. This means the application is not using typescript in execution and consequently not included in the deployment of our application.

+
+
+
+
"devDependencies": {
+    "@angular/cli": "1.4.9",
+    "@angular/compiler-cli": "^4.2.4",
+    ...
+    "@types/node": "~6.0.60",
+    "typescript": "~2.3.3"
+  }
+
+
+
+

Having a peer dependency means that your package needs a dependency that is the same exact dependency as the person installing your package

+
+
+
+
"peerDependencies": {
+    "package-123": "^2.7.18"
+  }
+
+
+
+

Optional dependencies are just that: optional. If they fail to install, Yarn will still say the install process was successful.

+
+
+
+
"optionalDependencies": {
+    "package-321": "^2.7.18"
+  }
+
+
+
+

Finally you can have bundled dependencies which are packages bundled together when publishing your package in a repository.

+
+
+
+
{
+  "bundledDependencies": [
+    "package-4"
+  ]
+}
+
+
+
+

Here is the link to an in-depth explanation of dependency types​.

+
+
+
+
+

== Scripts

+
+
+

Scripts are a great way of automating tasks related to your package, such as simple build processes or development tools.

+
+
+

For example:

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "build-project": "node hello-world.js",
+  }
+
+
+
+

You can run that script by running the command yarn (run) script or npm run script, check the example below:

+
+
+
+
$ yarn (run) build-project    # run is optional
+$ npm run build-project
+
+
+
+

There are special reserved words for scripts, like pre-install, which will execute the script automatically +before the package you install are installed.

+
+
+

Check different uses for scripts in the following links:

+
+ +
+

Or you can go back to +[Content of package.json]​.

+
+
+
+
+

Managing dependencies

+
+
+

In order to manage dependencies we recommend using package managers in your projects.

+
+
+

A big reason is their usability. Adding or removing a package is really easy, and by doing so, packet manager update the package.json and copies (or removes) the package in the needed location, with a single command.

+
+
+

Another reason, closely related to the first one, is reducing human error by automating the package management process.

+
+
+

Two of the package managers you can use in NodeJS projects are "yarn" and "npm". While you can use both, we encourage you to use only one of them while working on projects. Using both may lead to different dependencies between members of the team.

+
+
+
+
+

== npm

+
+
+

We’ll start by installing npm following this small guide here.

+
+
+

As stated on the web, npm comes inside of NodeJS, and must be updated after installing NodeJS, in the same guide you used earlier are written the instructions to update npm.

+
+
+

How npm works

+
+
+

In order to explain how npm works, let’s take a command as an example:

+
+
+
+
$ npm install @angular/material @angular/cdk
+
+
+
+

This command tells npm to look for the packages @angular/material and @angular/cdk in the npm registry, download and decompress them in the folder node_modules along with their own dependencies. Additionally, npm will update package.json and create a new file called package-lock.json.

+
+
+

After initialization and installing the first package there will be a new folder called node_modules in your project. This folder is where your packages are unzipped and stored, following a tree scheme.

+
+
+

Take in consideration both npm and yarn need a package.json in the root of your project in order to work properly. If after creating your project don’t have it, download again the package.json from the repository or you’ll have to start again.

+
+
+

Brief overview of commands

+
+
+

If we need to create a package.json from scratch, we can use the command init. This command asks the user for basic information about the project and creates a brand new package.json.

+
+
+
+
$ npm init
+
+
+
+

Install (or i) installs all modules listed as dependencies in package.json locally. You can also specify a package, and install that package. Install can also be used with the parameter -g, which tells npm to install the [Global package].

+
+
+
+
$ npm install
+$ npm i
+$ npm install Package
+
+
+
+ + + + + +
+ + +Earlier versions of npm did not add dependencies to package.json unless it was used with the flag --save, so npm install package would be npm install --save package, you have one example below. +
+
+
+
+
$ npm install --save Package
+
+
+
+

Npm needs flags in order to know what kind of dependency you want in your project, in npm you need to put the flag -D or --save-dev to install devDependencies, for more information consult the links at the end of this section.

+
+
+
+
$ npm install -D package
+$ npm install --save-dev package
+
+
+
+

+
+
+

The next command uninstalls the module you specified in the command.

+
+
+
+
$ npm uninstall Package
+
+
+
+

ls command shows us the dependencies like a nested tree, useful if you have few packages, not so useful when you need a lot of packages.

+
+
+
+
$ npm ls
+
+
+
+
+
npm@@VERSION@ /path/to/npm
+└─┬ init-package-json@0.0.4
+  └── promzard@0.1.5
+
+
+
+
example tree
+

We recommend you to learn more about npm commands in the following link, navigating to the section CLI commands.

+
+
+

About Package-lock.json

+
+
+

Package-lock.json describes the dependency tree resulting of using package.json and npm. +Whenever you update, add or remove a package, package-lock.json is deleted and redone with +the new dependencies.

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

This lock file is checked every time the command npm i (or npm install) is used without specifying a package, +in the case it exists and it’s valid, npm will install the exact tree that was generated, such that subsequent +installs are able to generate identical dependency trees.

+
+
+ + + + + +
+ + +It is not recommended to modify this file yourself. It’s better to leave its management to npm. +
+
+
+

More information is provided by the npm team at package-lock.json

+
+
+
+
+

== Yarn

+
+
+

Yarn is an alternative to npm, if you wish to install yarn follow the guide getting started with yarn and download the correct version for your operative system. NodeJS is also needed you can find it here.

+
+
+

Working with yarn

+
+
+

Yarn is used like npm, with small differences in syntax, for example npm install module is changed to yarn add module.

+
+
+
+
$ yarn add @covalent
+
+
+
+

This command is going to download the required packages, modify package.json, put the package in the folder node_modules and makes a new yarn.lock with the new dependency.

+
+
+

However, unlike npm, yarn maintains a cache with packages you download inside. You don’t need to download every file every time you do a general installation. This means installations faster than npm.

+
+
+

Similarly to npm, yarn creates and maintains his own lock file, called yarn.lock. Yarn.lock gives enough information about the project for dependency tree to be reproduced.

+
+
+

yarn commands

+
+
+

Here we have a brief description of yarn’s most used commands:

+
+
+
+
$ yarn add Package
+$ yarn add --dev Package
+
+
+
+

Adds a package locally to use in your package. Adding the flags --dev or -D will add them to devDependencies instead of the default dependencies, if you need more information check the links at the end of the section.

+
+
+
+
$ yarn init
+
+
+
+

Initializes the development of a package.

+
+
+
+
$ yarn install
+
+
+
+

Installs all the dependencies defined in a package.json file, you can also write "yarn" to achieve the same effect.

+
+
+
+
$ yarn remove Package
+
+
+
+

You use it when you wish to remove a package from your project.

+
+
+
+
$ yarn global add Package
+
+
+
+

Installs the [Global package].

+
+
+

Please, refer to the documentation to learn more about yarn commands and their attributes: yarn commands

+
+
+

yarn.lock

+
+
+

This file has the same purpose as Package-lock.json, to guide the packet manager, in this case yarn, +to install the dependency tree specified in yarn.lock.

+
+
+

Yarn.lock and package.json are +essential files when collaborating in a project more co-workers and may be a +source of errors if programmers do not use the same manager.

+
+
+

Yarn.lock follows the same structure as package-lock.json, you can find an example of dependency below:

+
+
+
+
"@angular/animations@^4.2.4":
+  version "4.4.6"
+  resolved "https://registry.yarnpkg.com/@angular/animations/-/animations-4.4.6.tgz#fa661899a8a4e38cb7c583c7a5c97ce65d592a35"
+  dependencies:
+    tslib "^1.7.1"
+
+
+
+ + + + + +
+ + +As with package-lock.json, it’s strongly not advised to modify this file. Leave its management to yarn +
+
+
+

You can learn more about yarn.lock here: yarn.lock

+
+
+
+
+

== Global package

+
+
+

Global packages are packages installed in your operative system instead of your local project, +global packages useful for developer tooling that is not part of any individual project but instead is used for local commands.

+
+
+

A good example of global package is @angular/cli, a command line interface for angular used in our projects. You can install +a global package in npm with "npm install -g package" and "yarn global add package" with yarn, you have a npm example below:

+
+
+
Listing 1. npm global package
+
+
npm install –g @angular/cli
+
+
+ +
+
+
+

== Package version

+
+
+

Dependencies are critical to the success of a package. You must be extra careful about +which version packages are using, one package in a different version may break your code.

+
+
+

Versioning in npm and yarn, follows a semantic called semver, following the logic +MAJOR.MINOR.PATCH, like for example, @angular/animations: 4.4.6.

+
+
+

Different versions

+
+
+

Sometimes, packages are installed with a different version from the one initially installed. +This happens because package.json also contains the range of versions we allow yarn or npm to +install or update to, example:

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

And here the installed one:

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

As you can see, the version we initially added is 4.2.4, and the version finally installed after +a global installation of all packages, 4.4.6.

+
+
+

Installing packages without package-lock.json or yarn.lock using their respective packet managers, will always +end with npm or yarn installing the latest version allowed by package.json.

+
+
+

"@angular/animations": "^4.2.4" contains not only the version we added, but also the range we allow npm and yarn +to update. Here are some examples:

+
+
+
+
"@angular/animations": "<4.2.4"
+
+
+
+

The version installed must be lower than 4.2.4 .

+
+
+
+
"@angular/animations": ">=4.2.4"
+
+
+
+

The version installed must be greater than or equal to 4.2.4 .

+
+
+
+
"@angular/animations": "=4.2.4"
+
+
+
+

the version installed must be equal to 4.2.4 .

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

The version installed cannot modify the first non zero digit, for example in this case +it cannot surpass 5.0.0 or be lower than 4.2.4 .

+
+
+

You can learn more about this in Versions

+
+
+
+
+

Problems you may encounter

+
+
+

If you can’t find package.json, you may have deleted the one you had previously, +which means you have to download the package.json from the repository. +In the case you are creating a new project you can create a new package.json. More information +in the links below. Click on Package.json if you come from that section.

+
+ +
+ + + + + +
+ + +Using npm install or yarn without package.json in your projects will +result in compilation errors. As we mentioned earlier, +Package.json contains essential information about your project. +
+
+
+

If you have package.json, but you don’t have package-lock.json or yarn.lock the use of +command "npm install" or "yarn" may result in a different dependency tree.

+
+
+

If you are trying to import a module and visual code studio is not able to find it, +is usually caused by error adding the package to the project, try to add the module again with yarn or npm, +and restart Visual Studio Code.

+
+
+

Be careful with the semantic versioning inside your package.json of the packages, +or you may find a new update on one of your dependencies breaking your code.

+
+
+ + + + + +
+ + +In the following link +there is a solution to a problematic update to one package. +
+
+
+

A list of common errors of npm can be found in: npm errors

+
+
+
+
+

== Recomendations

+
+
+

Use yarn or npm in your project, reach an agreement with your team in order to choose one, this will avoid +undesired situations like forgetting to upload an updated yarn.lock or package-lock.json. +Be sure to have the latest version of your project when possible.

+
+
+ + + + + +
+ + +Pull your project every time it’s updated. Erase your node_modules folder and reinstall all +dependencies. This assures you to be working with the same dependencies your team has. +
+
+
+

AD Center recommends the use of yarn.

+
+
+
+
+
+
+1. A package is a file or directory that is described by a package.json. . +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-package-managers.html b/docs/devon4ng/1.0/guide-package-managers.html new file mode 100644 index 00000000..948d2097 --- /dev/null +++ b/docs/devon4ng/1.0/guide-package-managers.html @@ -0,0 +1,502 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Package Managers

+
+
+

There are two major package managers currently used for JavaScript / TypeScript projects which leverage NodeJS as a build platform.

+
+
+
    +
  1. +

    npm

    +
  2. +
  3. +

    yarn

    +
  4. +
+
+
+

Our recommendation is to use yarn but both package managers are fine.

+
+
+ + + + + +
+ + +When using npm it is important to use a version greater 5.0 as npm 3 has major drawbacks compared to yarn. +The following guide assumes that you are using npm >= 5 or yarn. +
+
+
+

Before you start reading further, please take a look at the docs:

+
+ +
+

The following guide will describe best practices for working with yarn / npm.

+
+
+
+
+

Semantic Versioning

+
+
+

When working with package managers it is very important to understand the concept of semantic versioning.

+
+
+
Version example 1.2.3
+

|== == == = +|Version |1. |2. |3 +|Version name when incrementing |Major (2.0.0) |Minor (1.3.0) |Patch (1.2.4) +|Has breaking changes |yes |no |no +|Has features |yes |yes |no +|Has bug fixes |yes |yes |yes +|== == == =

+
+
+

The table gives an overview of the most important parts of semantic versioning. +In the header version 1.2.3 is displayed. +The first row shows the name and the resulting version when incrementing a part of the version. +The next rows show specifics of the resulting version - e.g. a major version can have breaking changes, features and bug fixes.

+
+
+

Packages from npm and yarn leverage semantic versioning and instead of selecting a fixed version one can specify a selector. +The most common selectors are:

+
+
+
    +
  • +

    ^1.2.3 +At least 1.2.3 - 1.2.4 or 1.3.0 can be used, 2.0.0 can not be used

    +
  • +
  • +

    ~1.2.3 +At lease 1.2.3 - 1.2.4 can be used, 2.0.0 and 1.3.0 can not be used

    +
  • +
  • +

    >=1.2.3 +At least 1.2.3 - every version greater can also be used

    +
  • +
+
+
+

This achieves a lower number of duplicates. +To give an example:

+
+
+

If package A needs version 1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 4 packages.

+
+
+

If package A needs version ^1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 3 packages. +A would use the same version of C as B - 1.4.0.

+
+
+
+
+

Do not modify package.json and lock files by hand

+
+
+

Dependencies are always added using a yarn or npm command. +Altering the package.json, package-json.lock or yarn.lock file by hand is not recommended.

+
+
+

Always use a yarn or npm command to add a new dependency.

+
+
+

Adding the package express with yarn to dependencies.

+
+
+
+
yarn add express
+
+
+
+

Adding the package express with npm to dependencies.

+
+
+
+
npm install express
+
+
+
+
+
+

What does the lock file do

+
+
+

The purpose of files yarn.lock and package-json.lock is to freeze versions for a short time.

+
+
+

The following problem is solved:

+
+
+
    +
  • +

    Developer A upgrades the dependency express to fixed version 4.16.3.

    +
  • +
  • +

    express has sub-dependency accepts with version selector ~1.3.5

    +
  • +
  • +

    His local node_modules folder receives accepts in version 1.3.5

    +
  • +
  • +

    On his machine everything is working fine

    +
  • +
  • +

    Afterward version 1.3.6 of accepts is published - it contains a major bug

    +
  • +
  • +

    Developer B now clones the repo and loads the dependencies.

    +
  • +
  • +

    He receives version 1.3.6 of accepts and blames developer A for upgrading to a broken version.

    +
  • +
+
+
+

Both yarn.lock and package-json.lock freeze all the dependencies. +For example in yarn lock you will find.

+
+
+
Listing 1. yarn.lock example (excerp)
+
+
accepts@~1.3.5:
+  version "1.3.5"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-types "~2.1.18"
+    negotiator "0.6.1"
+
+mime-db@~1.33.0:
+  version "1.33.0"
+  resolved "[...URL to registry]"
+
+mime-types@~2.1.18:
+  version "2.1.18"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-db "~1.33.0"
+
+negotiator@0.6.1:
+  version "0.6.1"
+  resolved "[...URL to registry]"
+
+
+
+

The described problem is solved by the example yarn.lock file.

+
+
+
    +
  • +

    accepts is frozen at version ~1.3.5

    +
  • +
  • +

    All of its sub-dependencies are also frozen. +It needs mime-types at version ~2.1.18 which is frozen at 2.1.18. +mime-types needs mime-db at ~1.33.0 which is frozen at 1.33.0

    +
  • +
+
+
+

Every developer will receive the same versions of every dependency.

+
+
+ + + + + +
+ + +You have to make sure all your developers are using the same npm/yarn version - this includes the CI build. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-routing.html b/docs/devon4ng/1.0/guide-routing.html new file mode 100644 index 00000000..61ff9173 --- /dev/null +++ b/docs/devon4ng/1.0/guide-routing.html @@ -0,0 +1,666 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Routing

+
+
+

A basic introduction to the Angular Router can be found in Angular Docs.

+
+
+

This guide will show common tasks and best practices.

+
+
+
+
+

Defining Routes

+
+
+

For each feature module and the app module all routes should be defined in a separate module with the suffix RoutingModule. +This way the routing modules are the only place where routes are defined. +This pattern achieves a clear separation of concerns. +The following figure illustrates this.

+
+
+
+Routing module declaration +
+
Figure 1. Routing module declaration
+
+
+

It is important to define routes inside app routing module with .forRoot() and in feature routing modules with .forChild().

+
+
+
+
+

Example 1 - No Lazy Loading

+
+
+

In this example two modules need to be configured with routes - AppModule and FlightModule.

+
+
+

The following routes will be configured

+
+
+
    +
  • +

    / will redirect to /search

    +
  • +
  • +

    /search displays FlightSearchComponent (FlightModule)

    +
  • +
  • +

    /search/print/:flightId/:date displays FlightPrintComponent (FlightModule)

    +
  • +
  • +

    /search/details/:flightId/:date displays FlightDetailsComponent (FlightModule)

    +
  • +
  • +

    All other routes will display ErrorPage404 (AppModule)

    +
  • +
+
+
+
Listing 1. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '', redirectTo: 'search', pathMatch: 'full' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 2. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: 'search', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+ + + + + +
+ + +The import order inside AppModule is important. +AppRoutingModule needs to be imported after FlightModule. +
+
+
+
+
+

Example 2 - Lazy Loading

+
+
+

Lazy Loading is a good practice when the application has multiple feature areas and a user might not visit every dialog. +Or at least he might not need every dialog up front.

+
+
+

The following example will configure the same routes as example 1 but will lazy load FlightModule.

+
+
+
Listing 3. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '/search', loadChildren: 'app/flight-search/flight-search.module#FlightSearchModule' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 4. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+
+
+

Triggering Route Changes

+
+
+

With Angular you have two ways of triggering route changes.

+
+
+
    +
  1. +

    Declarative with bindings in component HTML templates

    +
  2. +
  3. +

    Programmatic with Angular Router service inside component classes

    +
  4. +
+
+
+

On the one hand, architecture-wise it is a much cleaner solution to trigger route changes in Smart Components. +This way you have every UI event that should trigger a navigation handled in one place - in a Smart Component. +It becomes very easy to look inside the code for every navigation, that can occur. +Refactoring is also much easier, as there are no navigation events "hidden" in the HTML templates

+
+
+

On the other hand, in terms of accessibility and SEO it is a better solution to rely on bindings in the view - e.g. by using Angular router-link directive. +This way screen readers and the Google crawler can move through the page easily.

+
+
+ + + + + +
+ + +If you do not have to support accessibility (screen readers, etc.) and to care about SEO (Google rank, etc.), +then you should aim for triggering navigation only in Smart Components. +
+
+
+
+Triggering navigation +
+
Figure 2. Triggering navigation
+
+
+
+
+

Guards

+
+
+

Guards are Angular services implemented on routes which determines whether a user can navigate to/from the route. There are examples below which will explain things better. We have the following types of Guards:

+
+
+
    +
  • +

    CanActivate: It is used to determine whether a user can visit a route. The most common scenario for this guard is to check if the user is authenticated. For example, if we want only logged in users to be able to go to a particular route, we will implement the CanActivate guard on this route.

    +
  • +
  • +

    CanActivateChild: Same as above, only implemented on child routes.

    +
  • +
  • +

    CanDeactivate: It is used to determine if a user can navigate away from a route. Most common example is when a user tries to go to a different page after filling up a form and does not save/submit the changes, we can use this guard to confirm whether the user really wants to leave the page without saving/submitting.

    +
  • +
  • +

    Resolve: For resolving dynamic data.

    +
  • +
  • +

    CanLoad: It is used to determine whether an Angular module can be loaded lazily. Example below will be helpful to understand it.

    +
  • +
+
+
+

Let’s have a look at some examples.

+
+
+
+
+

Example 1 - CanActivate and CanActivateChild guards

+
+ +
+
+
+

== CanActivate guard

+
+
+

As mentioned earlier, a guard is an Angular service and services are simply TypeScript classes. So we begin by creating a class. This class has to implement the CanActivate interface (imported from angular/router), and therefore, must have a canActivate function. The logic of this function determines whether the requested route can be navigated to or not. It returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. If it is true, the route is loaded, else not.

+
+
+
Listing 5. CanActivate example
+
+
...
+import {CanActivate} from "@angular/router";
+
+@Injectable()
+class ExampleAuthGuard implements CanActivate {
+  constructor(private authService: AuthService) {}
+
+  canActivate(route: ActivatedRouterSnapshot, state: RouterStateSnapshot) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

In the above example, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. We use it to return true or false from the canActivate function. +The canActivate function accepts two parameters (provided by Angular). The first parameter of type ActivatedRouterSnapshot is the snapshot of the route the user is trying to navigate to (where the guard is implemented); we can extract the route parameters from this instance. The second parameter of type RouterStateSnapshot is a snapshot of the router state the user is trying to navigate to; we can fetch the URL from it’s url property.

+
+
+ + + + + +
+ + +We can also redirect the user to another page (maybe a login page) if the authService returns false. To do that, inject Router and use it’s navigate function to redirect to the appropriate page. +
+
+
+

Since it is a service, it needs to be provided in our module:

+
+
+
Listing 6. provide the guard in a module
+
+
@NgModule({
+  ...
+  providers: [
+    ...
+    ExampleAuthGuard
+  ]
+})
+
+
+
+

Now this guard is ready to use on our routes. We implement it where we define our array of routes in the application:

+
+
+
Listing 7. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivate: [ExampleAuthGuard] }
+];
+
+
+
+

As you can see, the canActivate property accepts an array of guards. So we can implement more than one guard on a route.

+
+
+
+
+

== CanActivateChild guard

+
+
+

To use the guard on nested (children) routes, we add it to the canActivateChild property like so:

+
+
+
Listing 8. Implementing the guard on child routes
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivateChild: [ExampleAuthGuard], children: [
+	{path: 'sub-page1', component: SubPageComponent},
+    {path: 'sub-page2', component: SubPageComponent}
+  ] }
+];
+
+
+
+
+
+

Example 2 - CanLoad guard

+
+
+

Similar to CanActivate, to use this guard we implement the CanLoad interface and overwrite it’s canLoad function. Again, this function returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. The fundamental difference between CanActivate and CanLoad is that CanLoad is used to determine whether an entire module can be lazily loaded or not. If the guard returns false for a module protected by CanLoad, the entire module is not loaded.

+
+
+
Listing 9. CanLoad example
+
+
...
+import {CanLoad, Route} from "@angular/router";
+
+@Injectable()
+class ExampleCanLoadGuard implements CanLoad {
+  constructor(private authService: AuthService) {}
+
+  canLoad(route: Route) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

Again, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. The canLoad function accepts a parameter of type Route which we can use to fetch the path a user is trying to navigate to (using the path property of Route).

+
+
+

This guard needs to be provided in our module like any other service.

+
+
+

To implement the guard, we use the canLoad property:

+
+
+
Listing 10. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: 'home', component: HomeComponent },
+  { path: 'admin', loadChildren: 'app/admin/admin.module#AdminModule', canLoad: [ExampleCanLoadGuard] }
+];
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-testing.html b/docs/devon4ng/1.0/guide-testing.html new file mode 100644 index 00000000..aa05ebe8 --- /dev/null +++ b/docs/devon4ng/1.0/guide-testing.html @@ -0,0 +1,719 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Testing

+
+
+

This guide will cover the basics of testing logic inside your code with unit test cases. +The guide assumes that you are familiar with Angular CLI (see the guide)

+
+
+

For testing your Angular application with unit test cases there are two main strategies:

+
+
+
    +
  1. +

    Isolated unit test cases
    +Isolated unit tests examine an instance of a class all by itself without any dependence on Angular or any injected values. +The amount of code and effort needed to create such tests in minimal.

    +
  2. +
  3. +

    Angular Testing Utilities
    +Let you test components including their interaction with Angular. +The amount of code and effort needed to create such tests is a little higher.

    +
  4. +
+
+
+
+
+

Testing Concept

+
+
+

The following figure shows you an overview of the application architecture divided in testing areas.

+
+
+
+Testing Areas +
+
Figure 1. Testing Areas
+
+
+

There are three areas, which need to be covered by different testing strategies.

+
+
+
    +
  1. +

    Components:
    +Smart Components need to be tested because they contain view logic. +Also the interaction with 3rd party components needs to be tested. +When a 3rd party component changes with an upgrade a test will be failing and warn you, that there is something wrong with the new version. +Most of the time Dumb Components do not need to be tested because they mainly display data and do not contain any logic. +Smart Components are always tested with Angular Testing Utilities. +For example selectors, which select data from the store and transform it further, need to be tested.

    +
  2. +
  3. +

    Stores:
    +A store contains methods representing state transitions. +If these methods contain logic, they need to be tested. +Stores are always tested using Isolated unit tests.

    +
  4. +
  5. +

    Services:
    +Services contain Business Logic, which needs to be tested. +UseCase Services represent a whole business use case. +For instance this could be initializing a store with all the data that is needed for a dialog - loading, transforming, storing. +Often Angular Testing Utilities are the optimal solution for testing UseCase Services, because they allow for an easy stubbing of the back-end. +All other services should be tested with Isolated unit tests as they are much easier to write and maintain.

    +
  6. +
+
+
+
+
+

Testing Smart Components

+
+
+

Testing Smart Components should assure the following.

+
+
+
    +
  1. +

    Bindings are correct.

    +
  2. +
  3. +

    Selectors which load data from the store are correct.

    +
  4. +
  5. +

    Asynchronous behavior is correct (loading state, error state, "normal" state).

    +
  6. +
  7. +

    Oftentimes through testing one realizes, that important edge cases are forgotten.

    +
  8. +
  9. +

    Do these test become very complex, it is often an indicator for poor code quality in the component. +Then the implementation is to be adjusted / refactored.

    +
  10. +
  11. +

    When testing values received from the native DOM, you will test also that 3rd party libraries did not change with a version upgrade. +A failing test will show you what part of a 3rd party library has changed. +This is much better than the users doing this for you. +For example a binding might fail because the property name was changed with a newer version of a 3rd party library.

    +
  12. +
+
+
+

In the function beforeEach() the TestBed imported from Angular Testing Utilities needs to be initialized. +The goal should be to define a minimal test-module with TestBed. +The following code gives you an example.

+
+
+
Listing 1. Example test setup for Smart Components
+
+
describe('PrintFlightComponent', () => {
+
+  let fixture: ComponentFixture<PrintCPrintFlightComponentomponent>;
+  let store: FlightStore;
+  let printServiceSpy: jasmine.SpyObj<FlightPrintService>;
+
+  beforeEach(() => {
+    const urlParam = '1337';
+    const activatedRouteStub = { params: of({ id: urlParam }) };
+    printServiceSpy = jasmine.createSpyObj('FlightPrintService', ['initializePrintDialog']);
+    TestBed.configureTestingModule({
+      imports: [
+        TranslateModule.forRoot(),
+        RouterTestingModule
+      ],
+      declarations: [
+        PrintFlightComponent,
+        PrintContentComponent,
+        GeneralInformationPrintPanelComponent,
+        PassengersPrintPanelComponent
+      ],
+      providers: [
+        FlightStore,
+        {provide: FlightPrintService, useValue: printServiceSpy},
+        {provide: ActivatedRoute, useValue: activatedRouteStub}
+      ]
+    });
+    fixture = TestBed.createComponent(PrintFlightComponent);
+    store = fixture.debugElement.injector.get(FlightStore);
+    fixture.detectChanges();
+  });
+
+  // ... test cases
+})
+
+
+
+

It is important:

+
+
+
    +
  • +

    Use RouterTestingModule instead of RouterModule

    +
  • +
  • +

    Use TranslateModule.forRoot() without translations +This way you can test language-neutral without translation marks.

    +
  • +
  • +

    Do not add a whole module from your application - in declarations add the tested Smart Component with all its Dumb Components

    +
  • +
  • +

    The store should never be stubbed. +If you need a complex test setup, just use the regular methods defined on the store.

    +
  • +
  • +

    Stub all services used by the Smart Component. +These are mostly UseCase services. +They should not be tested by these tests. +Only the correct call to their functions should be assured. +The logic inside the UseCase services is tested with separate tests.

    +
  • +
  • +

    detectChanges() performance an Angular Change Detection cycle (Angular refreshes all the bindings present in the view)

    +
  • +
  • +

    tick() performance a virtual macro task, tick(1000) is equal to the virtual passing of 1s.

    +
  • +
+
+
+

The following test cases show the testing strategy in action.

+
+
+
Listing 2. Example
+
+
it('calls initializePrintDialog for url parameter 1337', fakeAsync(() => {
+  expect(printServiceSpy.initializePrintDialog).toHaveBeenCalledWith(1337);
+}));
+
+it('creates correct loading subtitle', fakeAsync(() => {
+  store.setPrintStateLoading(123);
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT STATE.IS_LOADING');
+}));
+
+it('creates correct subtitle for loaded flight', fakeAsync(() => {
+  store.setPrintStateLoadedSuccess({
+    id: 123,
+    description: 'Description',
+    iata: 'FRA',
+    name: 'Frankfurt',
+    // ...
+  });
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT "FRA (Frankfurt)" (ID: 123)');
+}));
+
+
+
+

The examples show the basic testing method

+
+
+
    +
  • +

    Set the store to a well-defined state

    +
  • +
  • +

    check if the component displays the correct values

    +
  • +
  • +

    …​ via checking values inside the native DOM.

    +
  • +
+
+
+
+
+

Testing state transitions performed by stores

+
+
+

Stores are always tested with Isolated unit tests.

+
+
+

Actions triggered by dispatchAction() calls are asynchronously performed to alter the state. +A good solution to test such a state transition is to use the done callback from Jasmine.

+
+
+
Listing 3. Example for testing a store
+
+
let sut: FlightStore;
+
+beforeEach(() => {
+  sut = new FlightStore();
+});
+
+it('setPrintStateLoading sets print state to loading', (done: Function) => {
+  sut.setPrintStateLoading(4711);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.print.isLoading).toBe(true);
+    expect(result.print.loadingId).toBe(4711);
+    done();
+  });
+});
+
+it('toggleRowChecked adds flight with given id to selectedValues Property', (done: Function) => {
+  const flight: FlightTO = {
+    id: 12
+    // dummy data
+  };
+  sut.setRegisterabgleichListe([flight]);
+  sut.toggleRowChecked(12);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.selectedValues).toContain(flight);
+    done();
+  });
+});
+
+
+
+
+
+

Testing services

+
+
+

When testing services both strategies - Isolated unit tests and Angular Testing Utilities - are valid options.

+
+
+

The goal of such tests are

+
+
+
    +
  • +

    assuring the behavior for valid data.

    +
  • +
  • +

    assuring the behavior for invalid data.

    +
  • +
  • +

    documenting functionality

    +
  • +
  • +

    save performing refactoring

    +
  • +
  • +

    thinking about edge case behavior while testing

    +
  • +
+
+
+

For simple services Isolated unit tests can be written. +Writing these tests takes lesser effort and they can be written very fast.

+
+
+

The following listing gives an example of such tests.

+
+
+
Listing 4. Testing a simple services with Isolated unit tests
+
+
let sut: IsyDatePipe;
+
+beforeEach(() => {
+  sut = new IsyDatePipe();
+});
+
+it('transform should return empty string if input value is empty', () => {
+  expect(sut.transform('')).toBe('');
+});
+
+it('transform should return empty string if input value is null', () => {
+  expect(sut.transform(undefined)).toBe('');
+});
+
+// ...more tests
+
+
+
+

For testing Use Case services the Angular Testing Utilities should be used. +The following listing gives an example.

+
+
+
Listing 5. Test setup for testing use case services with Angular Testing Utilities
+
+
let sut: FlightPrintService;
+let store: FlightStore;
+let httpController: HttpTestingController;
+let flightCalculationServiceStub: jasmine.SpyObj<FlightCalculationService>;
+const flight: FlightTo = {
+  // ... valid dummy data
+};
+
+beforeEach(() => {
+  flightCalculationServiceStub = jasmine.createSpyObj('FlightCalculationService', ['getFlightType']);
+  flightCalculationServiceStub.getFlightType.and.callFake((catalog: string, type: string, key: string) => of(`${key}_long`));
+  TestBed.configureTestingModule({
+    imports: [
+      HttpClientTestingModule,
+      RouterTestingModule,
+    ],
+    providers: [
+      FlightPrintService,
+      FlightStore,
+      FlightAdapter,
+      {provide: FlightCalculationService, useValue: flightCalculationServiceStub}
+    ]
+  });
+
+  sut = TestBed.get(FlightPrintService);
+  store = TestBed.get(FlightStore);
+  httpController = TestBed.get(HttpTestingController);
+});
+
+
+
+

When using TestBed, it is important

+
+
+
    +
  • +

    to import HttpClientTestingModule for stubbing the back-end

    +
  • +
  • +

    to import RouterTestingModule for stubbing the Angular router

    +
  • +
  • +

    not to stub stores, adapters and business services

    +
  • +
  • +

    to stub services from libraries like FlightCalculationService - the correct implementation of libraries should not be tested by these tests.

    +
  • +
+
+
+

Testing back-end communication looks like this:

+
+
+
Listing 6. Testing back-end communication with Angular HttpTestingController
+
+
it('loads flight if not present in store', fakeAsync(() => {
+  sut.initializePrintDialog(1337);
+  const processRequest = httpController.expectOne('/path/to/flight');
+  processRequest.flush(flight);
+
+  httpController.verify();
+}));
+
+it('does not load flight if present in store', fakeAsync(() => {
+  const flight = {...flight, id: 4711};
+  store.setRegisterabgleich(flight);
+
+  sut.initializePrintDialog(4711);
+  httpController.expectNone('/path/to/flight');
+
+  httpController.verify();
+}));
+
+
+
+

The first test assures a correct XHR request is performed if initializePrintDialog() is called and no data is in the store. +The second test assures no XHR request IST performed if the needed data is already in the store.

+
+
+

The next steps are checks for the correct implementation of logic.

+
+
+
Listing 7. Example testing a Use Case service
+
+
it('creates flight destination for valid key in svz', fakeAsync(() => {
+  const flightTo: FlightTo = {
+    ...flight,
+    id: 4712,
+    profile: '77'
+  };
+  store.setFlight(flightTo);
+  let result: FlightPrintContent|undefined;
+
+  sut.initializePrintDialog(4712);
+  store.select(s => s.print.content).subscribe(content => result = content);
+  tick();
+
+  expect(result!.destination).toBe('77_long (ID: 77)');
+}));
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-update-angular-cli.html b/docs/devon4ng/1.0/guide-update-angular-cli.html new file mode 100644 index 00000000..4e251a5c --- /dev/null +++ b/docs/devon4ng/1.0/guide-update-angular-cli.html @@ -0,0 +1,346 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Update Angular CLI

+
+ +
+
+
+

Angular CLI common issues

+
+
+

There are constant updates for the official Angular framework dependencies. These dependencies are directly related with the Angular CLI package. Since this package comes installed by default inside the devonfw distribution folder for Windows OS and the distribution is updated every few months it needs to be updated in order to avoid known issues.

+
+
+
+
+

Angular CLI update guide

+
+
+

For Linux users is as easy as updating the global package:

+
+
+
+
$ npm unistall -g @angular/cli
+$ npm install -g @angular/cli
+
+
+
+

For Windows users the process is only a bit harder. Open the devonfw bundled console and do as follows:

+
+
+
+
$ cd [devonfw_dist_folder]
+$ cd software/nodejs
+$ npm uninstall @angular/cli --no-save
+$ npm install @angular/cli --no-save
+
+
+
+

After following these steps you should have the latest Angular CLI version installed in your system. In order to check it run in the distribution console:

+
+
+ + + + + +
+ + +At the time of this writing, the Angular CLI is at 1.7.4 version. +
+
+
+
+
λ ng version
+
+     _                      _                 ____ _     ___
+    / \   _ __   __ _ _   _| | __ _ _ __     / ___| |   |_ _|
+   / △ \ | '_ \ / _` | | | | |/ _` | '__|   | |   | |    | |
+  / ___ \| | | | (_| | |_| | | (_| | |      | |___| |___ | |
+ /_/   \_\_| |_|\__, |\__,_|_|\__,_|_|       \____|_____|___|
+                |___/
+
+
+Angular CLI: 7.2.3
+Node: 10.13.0
+OS: win32 x64
+Angular:
+...
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-upgrade-devon4ng.html b/docs/devon4ng/1.0/guide-upgrade-devon4ng.html new file mode 100644 index 00000000..b07d59ef --- /dev/null +++ b/docs/devon4ng/1.0/guide-upgrade-devon4ng.html @@ -0,0 +1,441 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Upgrade devon4ng Angular and Ionic/Angular applications

+
+
+

Angular CLI provides a powerful tool to upgrade Angular based applications to the current stable release of the core framework.

+
+
+

This tool is ng update. It will not only upgrade dependencies and their related ones but also will perform some fixes in your code if available thanks to the provided schematics. It will check even if the update is not possible as there is another library or libraries that are not compatible with the versions of the upgraded dependencies. In this case it will keep your application untouched.

+
+
+ + + + + +
+ + +The repository must be in a clean state before executing a ng update. So, remember to commit your changes first. +
+
+
+
+
+

Basic usage

+
+
+

In order to perform a basic upgrade we will execute:

+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
+
+

Upgrade to new Angular version

+
+
+

The process will be the same, but first we need to make sure that our devon4ng application is in the lates version of Angular 8, so the ng update command can perform the upgrade not only in the dependencies but also making code changes to reflect the new features and fixes.

+
+
+
    +
  • +

    First, upgrade to latest Angular 9 version:

    +
  • +
+
+
+
+
$ ng update @angular/cli@9 @angular/core@9
+
+
+
+

Optionally the flag -C can be added to previous command to make a commit automatically. This is also valid for the next steps.

+
+
+
    +
  • +

    Then, upgrade Angular:

    +
  • +
+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
    +
  • +

    In case you use Angular Material:

    +
  • +
+
+
+
+
$ ng update @angular/material
+
+
+
+
    +
  • +

    If the application depends on third party libraries, the new tool ngcc can be run to make them compatible with the new Ivy compiler. In this case it is recommended to include a postinstall script in the package.json:

    +
  • +
+
+
+
+
{
+  "scripts": {
+    "postinstall": "ngcc --properties es2015 browser module main --first-only --create-ivy-entry-points"
+  }
+}
+
+
+ +
+

Important use cases:

+
+
+
    +
  • +

    To update to the next beta or pre-release version, use the --next=true option.

    +
  • +
  • +

    To update from one major version to another, use the format ng update @angular/cli@^<major_version> @angular/core@^<major_version>.

    +
  • +
  • +

    In case your Angular application uses @angular/material include it in the first command:

    +
    +
    +
    $ ng update @angular/cli @angular/core @angular/material
    +
    +
    +
  • +
+
+
+
+
+

Ionic/Angular applications

+
+
+

Just following the same procedure we can upgrade Angular applications, but we must take care of important specific Ionic dependencies:

+
+
+
+
$ ng update @angular/cli @angular/core @ionic/angular @ionic/angular-toolkit [@ionic/...]
+
+
+
+
+
+

Other dependencies

+
+
+

Every application will make use of different dependencies. Angular CLI ng upgrade will also take care of these ones. For example, if you need to upgrade @capacitor you will perform:

+
+
+
+
$ ng update @capacitor/cli @capacitor/core [@capacitor/...]
+
+
+
+

Another example could be that you need to upgrade @ngx-translate packages. As always in this case you will execute:

+
+
+
+
$ ng update @ngx-translate/core @ngx-translate/http-loader
+
+
+
+
+
+

Angular Update Guide online tool

+
+
+

It is recommended to use the Angular Update Guide tool at https://update.angular.io/ that will provide the necessary steps to upgrade any Angular application depending on multiple criteria.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-working-with-angular-cli.html b/docs/devon4ng/1.0/guide-working-with-angular-cli.html new file mode 100644 index 00000000..48aa1b89 --- /dev/null +++ b/docs/devon4ng/1.0/guide-working-with-angular-cli.html @@ -0,0 +1,585 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Working with Angular CLI

+
+
+

Angular CLI provides a facade for building, testing, linting, debugging and generating code. +Under the hood Angular CLI uses specific tools to achieve these tasks. +The user does no need to maintain them and can rely on Angular to keep them up to date and maybe switch to other tools which come up in the future.

+
+
+

The Angular CLI provides a wiki with common tasks you encounter when working on applications with the Angular CLI. +The Angular CLI Wiki can be found here.

+
+
+

In this guide we will go through the most important tasks. +To go into more details, please visit the Angular CLI wiki.

+
+
+
+
+

Installing Angular CLI

+
+
+

Angular CLI should be added as global and local dependency. +The following commands add Angular CLI as global Dependency.

+
+
+

yarn command

+
+
+
+
yarn global add @angular/cli
+
+
+
+

npm command

+
+
+
+
npm install -g @angular/cli
+
+
+
+

You can check a successful installtion with ng --version. +This should print out the version installed.

+
+
+
+Printing Angular CLI Version +
+
Figure 1. Printing Angular CLI Version
+
+
+
+
+

Running a live development server

+
+
+

The Angular CLI can be used to start a live development server. +First your application will be compiled and then the server will be started. +If you change the code of a file, the server will reload the displayed page. +Run your application with the following command:

+
+
+
+
ng serve -o
+
+
+
+
+
+

Running Unit Tests

+
+
+

All unit tests can be executed with the command:

+
+
+
+
ng test
+
+
+
+

To make a single run and create a code coverage file use the following command:

+
+
+
+
ng test -sr -cc
+
+
+
+ + + + + +
+ + +You can configure the output format for code coverage files to match your requirements in the file karma.conf.js which can be found on toplevel of your project folder. +For instance, this can be useful for exporting the results to a SonarQube. +
+
+
+
+
+

Linting the code quality

+
+
+

You can lint your files with the command

+
+
+
+
ng lint --type-check
+
+
+
+ + + + + +
+ + +You can adjust the linting rules in the file tslint.json which can be found on toplevel of your project folder. +
+
+
+
+
+

Generating Code

+
+ +
+
+
+

Creating a new Angular CLI project

+
+
+

For creating a new Angular CLI project the command ng new is used.

+
+
+

The following command creates a new application named my-app.

+
+
+
+
ng create my-app
+
+
+
+
+
+

Creating a new feature module

+
+
+

A new feature module can be created via ng generate module` command.

+
+
+

The following command generates a new feature module named todo.

+
+
+
+
ng generate module todo
+
+
+
+
+Generate a module with Angular CLI +
+
Figure 2. Generate a module with Angular CLI
+
+
+ + + + + +
+ + +The created feature module needs to be added to the AppModule by hand. +Other option would be to define a lazy route in AppRoutingModule to make this a lazy loaded module. +
+
+
+
+
+

Creating a new component

+
+
+

To create components the command ng generate component can be used.

+
+
+

The following command will generate the component todo-details inside the components layer of todo module. +It will generate a class, a html file, a css file and a test file. +Also, it will register this component as declaration inside the nearest module - this ist TodoModule.

+
+
+
+
ng generate component todo/components/todo-details
+
+
+
+
+Generate a component with Angular CLI +
+
Figure 3. Generate a component with Angular CLI
+
+
+ + + + + +
+ + +If you want to export the component, you have to add the component to exports array of the module. +This would be the case if you generate a component inside shared module. +
+
+
+
+
+

Configuring an Angular CLI project

+
+
+

Inside an Angular CLI project the file .angular-cli.json can be used to configure the Angular CLI.

+
+
+

The following options are very important to understand.

+
+
+
    +
  • +

    The property defaults` can be used to change the default style extension. +The following settings will make the Angular CLI generate .less files, when a new component is generated.

    +
  • +
+
+
+
+
"defaults": {
+  "styleExt": "less",
+  "component": {}
+}
+
+
+
+
    +
  • +

    The property apps contains all applications maintained with Angular CLI. +Most of the time you will have only one.

    +
    +
      +
    • +

      assets configures all the static files, that the application needs - this can be images, fonts, json files, etc. +When you add them to assets the Angular CLI will put these files to the build target and serve them while debugging. +The following will put all files in /i18n to the output folder /i18n

      +
    • +
    +
    +
  • +
+
+
+
+
"assets": [
+  { "glob": "**/*.json", "input": "./i18n", "output": "./i18n" }
+]
+
+
+
+
    +
  • +

    styles property contains all style files that will be globally available. +The Angular CLI will create a styles bundle that goes directly into index.html with it. +The following will make all styles in styles.less globally available.

    +
  • +
+
+
+
+
"styles": [
+  "styles.less"
+]
+
+
+
+
    +
  • +

    environmentSource and environments are used to configure configuration with the Angular CLI. +Inside the code always the file specified in environmentSource will be referenced. +You can define different environments - eg. production, staging, etc. - which you list in enviroments. +At compile time the Angular CLI will override all values in environmentSource with the values from the matching environment target. +The following code will build the application for the environment staging.

    +
  • +
+
+
+
+
ng build --environment=staging
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/guide-yarn-2-support.html b/docs/devon4ng/1.0/guide-yarn-2-support.html new file mode 100644 index 00000000..ff0f8b1f --- /dev/null +++ b/docs/devon4ng/1.0/guide-yarn-2-support.html @@ -0,0 +1,427 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Yarn 2

+
+
+

Yarn v2 is a very different software from the v1. The following list contains the main new features:

+
+ +
+

Please, read them carefully to decide if your current project is suitable to use Yarn 2 as package manager.

+
+
+ + + + + +
+ + +Some features are still experimental, so please do not use them in production environments. +
+
+
+

More info at https://yarnpkg.com/

+
+
+
+
+

Global Install

+
+
+

Installing Yarn 2.x globally is discouraged as Yarn team is moving to a per-project install strategy. We advise you to keep Yarn 1.x (Classic) as your global binary by installing it via the instructions you can find here.

+
+
+

Once you’ve followed the instructions (running yarn --version from your home directory should yield something like 1.22.0), go to the next section to see how to enable Yarn 2 on your project.

+
+
+
+
+

Per-project install

+
+
+

Follow these instructions to update your current devon4ng project to Yarn 2:

+
+
+
    +
  1. +

    Follow the global install instructions.

    +
  2. +
  3. +

    Move into your project folder:

    +
    +
    +
    cd ~/path/to/project
    +
    +
    +
  4. +
  5. +

    Run the following command:

    +
    +
    +
    yarn policies set-version berry # below v1.22
    +yarn set version berry          # on v1.22+
    +
    +
    +
  6. +
  7. +

    Since Angular CLI still is not fully supported with the new Yarn architecture as it is not compatible with PnP it is necessary to include the node-modules plugin adding the following line in the .yarnrc.yml file:

    +
    +
    +
    nodeLinker: node-modules
    +
    +
    +
  8. +
  9. +

    Commit the .yarn and .yarnrc.yml changes

    +
  10. +
  11. +

    Run again yarn install.

    +
  12. +
+
+
+ + + + + +
+ + +For more advanced migration topics please refer to https://yarnpkg.com/advanced/migration +
+
+
+
+
+

Which files should be added to gitignore file?

+
+
+

If you’re using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/cache
+!.yarn/releases
+!.yarn/plugins
+
+
+
+

If you’re not using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/releases
+!.yarn/plugins
+.pnp.*
+
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/home.html b/docs/devon4ng/1.0/home.html new file mode 100644 index 00000000..bb3cba66 --- /dev/null +++ b/docs/devon4ng/1.0/home.html @@ -0,0 +1,446 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4ng

+
+
+

This guide describes an application architecture for web client development with Angular.

+
+
+
+
+

Motivation

+
+
+

Front-end development is a very difficult task since there are a lot of different frameworks, patterns and practices nowadays. For that reason, in devonfw we decided to make use of Angular since it is a full front-end framework that includes almost all the different patterns and features that any SPA may need and provides a well defined architecture to development, build and deploy.

+
+
+

The idea with devon4ng is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends like reactive style development, on the other hand, providing a short on-boarding time while still using an architecture that helps us scale and be productive at the same time.

+
+
+

At the same time devon4ng aims to help developers to solve common problems that appear in many projects and provide samples and blueprints to show how to apply this solutions in real situations.

+
+
+
+
+

Contents

+
+ +
+

This section introduces in an easy way the main principles and guidelines based on Angular Style Guide.

+
+ +
+

The goal of this topic is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview.

+
+
+
+
+

Layers

+
+
+

This section provides a condensed explanation about the different layers a good Angular application must provide.

+
+ +
+
+
+

Guides

+
+
+

This section introduces concepts to help developers with the tooling and package managers.

+
+ +
+
+
+

Angular

+
+
+

This is the main section of the documentation, where the developer will find guidelines for accessibility, how to use the Angular toolchain, how to refactor components, create libraries and, in general, maintain Angular applications. But last and not least, developers will also find solutions to common problems many of the Angular projects may have.

+
+
+ + + + + +
+ + +All the different topics are demonstrated in the samples folder with a small application. +
+
+ +
+
+
+

Ionic

+
+
+

As part of the devon4ng stack, we include a small section to explain how to develop hybrid mobile Ionic/Angular applications and create PWAs with this UI library. As the previous section, the contents are demonstrated in the samples folder.

+
+ +
+
+
+

Layouts

+
+
+

Any SPA application must have a layout. So, the purpose of this section is to explain the Angular Material approach.

+
+ +
+
+
+

NgRx

+
+
+

State Management is a big topic in big front-end application. This section explains the fundamentals of the industry standard library NgRx, showing its main components.

+
+ +
+
+
+

Cookbook

+
+
+

The Cookbook section aims to provide solutions to cross-topic challenges that at this moment do not fit in the previous sections. As the Angular section, some of the topics are demonstrated with a sample located in the samples folder.

+
+ +
+
+
+

devon4ng templates

+
+
+

In order to support CobiGen generation tool for Angular applications, devon4ng demos realization and provide more opinionated samples, the following templates are also included in devon4ng contents:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/index.html b/docs/devon4ng/1.0/index.html new file mode 100644 index 00000000..dcc86a2c --- /dev/null +++ b/docs/devon4ng/1.0/index.html @@ -0,0 +1,550 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4ng

+
+
+

This guide describes an application architecture for web client development with Angular.

+
+
+
+
+

Motivation

+
+
+

Front-end development is a very difficult task since there are a lot of different frameworks, patterns and practices nowadays. For that reason, in devonfw we decided to make use of Angular since it is a full front-end framework that includes almost all the different patterns and features that any SPA may need and provides a well defined architecture to development, build and deploy.

+
+
+

The idea with devon4ng is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends like reactive style development, on the other hand, providing a short on-boarding time while still using an architecture that helps us scale and be productive at the same time.

+
+
+

At the same time devon4ng aims to help developers to solve common problems that appear in many projects and provide samples and blueprints to show how to apply this solutions in real situations.

+
+
+
+
+

Contents

+
+ +
+

This section introduces in an easy way the main principles and guidelines based on Angular Style Guide.

+
+ +
+

The goal of this topic is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview.

+
+
+
+
+

Layers

+
+
+

This section provides a condensed explanation about the different layers a good Angular application must provide.

+
+ +
+
+
+

Guides

+
+
+

This section introduces concepts to help developers with the tooling and package managers.

+
+ +
+
+
+

Angular

+
+
+

This is the main section of the documentation, where the developer will find guidelines for accessibility, how to use the Angular toolchain, how to refactor components, create libraries and, in general, maintain Angular applications. But last and not least, developers will also find solutions to common problems many of the Angular projects may have.

+
+
+ + + + + +
+ + +All the different topics are demonstrated in the samples folder with a small application. +
+
+ +
+
+
+

Ionic

+
+
+

As part of the devon4ng stack, we include a small section to explain how to develop hybrid mobile Ionic/Angular applications and create PWAs with this UI library. As the previous section, the contents are demonstrated in the samples folder.

+
+ +
+
+
+

Layouts

+
+
+

Any SPA application must have a layout. So, the purpose of this section is to explain the Angular Material approach.

+
+ +
+
+
+

NgRx

+
+
+

State Management is a big topic in big front-end application. This section explains the fundamentals of the industry standard library NgRx, showing its main components.

+
+ +
+
+
+

Cookbook

+
+
+

The Cookbook section aims to provide solutions to cross-topic challenges that at this moment do not fit in the previous sections. As the Angular section, some of the topics are demonstrated with a sample located in the samples folder.

+
+ +
+
+
+

devon4ng templates

+
+
+

In order to support CobiGen generation tool for Angular applications, devon4ng demos realization and provide more opinionated samples, the following templates are also included in devon4ng contents:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/master-devon4ng.html b/docs/devon4ng/1.0/master-devon4ng.html new file mode 100644 index 00000000..29ae5e29 --- /dev/null +++ b/docs/devon4ng/1.0/master-devon4ng.html @@ -0,0 +1,11187 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Angular

+
+ +
+
+
+

Introduction

+
+
+

Unresolved include directive in modules/ROOT/pages/master-devon4ng.adoc - include::home.adoc[]

+
+
+
+
+

Architecture

+
+ +
+

Architecture

+
+

The following principles and guidelines are based on Angular Style Guide - especially Angular modules (see Angular Docs). +It extends those where additional guidance is needed to define an architecture which is:

+
+
+
    +
  • +

    maintainable across applications and teams

    +
  • +
  • +

    easy to understand, especially when coming from a classic Java/.Net perspective - so whenever possible the same principles apply both to the server and the client

    +
  • +
  • +

    pattern based to solve common problems

    +
  • +
  • +

    based on best of breed solutions coming from open source and Capgemini project experiences

    +
  • +
  • +

    gives as much guidance as necessary and as little as possible

    +
  • +
+
+
+
+

Overview

+
+

When using Angular the web client architecture is driven by the framework in a certain way Google and the Angular community think about web client architecture. +Angular gives an opinion on how to look at architecture. +It is a component based like devon4j but uses different terms which are common language in web application development. +The important term is module which is used instead of component. The primary reason is the naming collision with the Web Components standard (see Web Components).
+To clarify this:

+
+
+
    +
  • +

    A component describes an UI element containing HTML, CSS and JavaScript - structure, design and logic encapsulated inside a reusable container called component.

    +
  • +
  • +

    A module describes an applications feature area. The application flight-app may have a module called booking.

    +
  • +
+
+
+

An application developed using Angular consists of multiple modules. +There are feature modules and special modules described by the Angular Style Guide - core and shared. +Angular or Angular Style Guide give no guidance on how to structure a module internally. +This is where this architecture comes in.

+
+
+
+

Layers

+
+

The architecture describes two layers. The terminology is based on common language in web development.

+
+
+
+Architecture - Layers +
+
Figure 1. Layers
+
+
+
    +
  • +

    Components Layer encapsulates components which present the current application state. +Components are separated into Smart and Dumb Components. +The only logic present is view logic inside Smart Components.

    +
  • +
  • +

    Services Layer is more or less what we call 'business logic layer' on the server side. +The layer defines the applications state, the transitions between state and classic business logic. +Stores contain application state over time to which Smart Components subscribe to. +Adapters are used to perform XHR, WebSocket connections, etc. +The business model is described inside the module. +Use case services perform business logic needed for use cases. +A use case services interacts with the store and adapters. +Methods of use case services are the API for Smart Components. +Those methods are Actions in reactive terminology.

    +
  • +
+
+
+
+

Modules

+
+

Angular requires a module called app which is the main entrance to an application at runtime - this module gets bootstrapped. +Angular Style Guide defines feature modules and two special modules - core and shared.

+
+
+
+Architecture - Modules +
+
Figure 2. Modules
+
+
+

A feature module is basically a vertical cut through both layers. +The shared module consists of components shared across feature modules. +The core module holds services shared across modules. +So core module is a module only having a services layer +and shared module is a module only having a components layer.

+
+ +
+
+

Meta Architecture

+ +
+
+

Introduction

+ +
+
+

Purpose of this document

+
+

In our business applications, the client easily gets underestimated. Sometimes the client is more complex to develop and design than the server. While the server architecture is nowadays easily to agree as common sense, for clients this is not as obvious and stable especially as it typically depends on the client framework used. Finding a concrete architecture applicable for all clients may therefore be difficult to accomplish.

+
+
+

This document tries to define on a high abstract level, a reference architecture which is supposed to be a mental image and frame for orientation regarding the evaluation and appliance of different client frameworks. As such it defines terms and concepts required to be provided for in any framework and thus gives a common ground of understanding for those acquainted with the reference architecture. This allows better comparison between the various frameworks out there, each having their own terms for essentially the same concepts. It also means that for each framework we need to explicitly map how it implements the concepts defined in this document.

+
+
+

The architecture proposed herein is neither new nor was it developed from scratch. Instead it is the gathered and consolidated knowledge and best practices of various projects (s. References).

+
+
+
+

Goal of the Client Architecture

+
+

The goal of the client architecture is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview. Furthermore it ensures a homogeneity regarding how different concrete UI technologies are being applied in the projects, solving the common requirements in the same way.

+
+
+
+

Architecture Views

+
+

As for the server we distinguish between the business and the technical architecture. Where the business architecture is different from project to project and relates to the concrete design of dialog components given concrete requirements, the technical architecture can be applied to multiple projects.

+
+
+

The focus of this document is to provide a technical reference architecture on the client on a very abstract level defining required layers and components. How the architecture is implemented has to be defined for each UI technology.

+
+
+

The technical infrastructure architecture is out of scope for this document and although it needs to be considered, the concepts of the reference architecture should work across multiple TI architecture, i.e. native or web clients.

+
+
+
+

devonfw Reference Client Architecture

+
+

The following gives a complete overview of the proposed reference architecture. It will be built up incrementally in the following sections.

+
+
+
+Complete Client Architecture Overview +
+
+
+

Figure 1 Overview

+
+
+
+

Client Architecture

+
+

On the highest level of abstraction we see the need to differentiate between dialog components and their container they are managed in, as well as the access to the application server being the back-end for the client (e.g. an devon4j instance). This section gives a summary of these components and how they relate to each other. Detailed architectures for each component will be supplied in subsequent sections

+
+
+
+Client Architecture Overview +
+
+
+

Figure 2 Overview of Client Architecture

+
+
+
+

== Dialog Component

+
+

A dialog component is a logical, self-contained part of the user interface. It accepts user input and actions and controls communication with the user. Dialog components use the services provided by the dialog container in order to execute the business logic. They are self-contained, i.e. they possess their own user interface together with the associated logic, data and states.

+
+
+
    +
  • +

    Dialog components can be composed of other dialog components forming a hierarchy

    +
  • +
  • +

    Dialog components can interact with each other. This includes communication of a parent to its children, but also between components independent of each other regarding the hierarchy.

    +
  • +
+
+
+
+

== Dialog Container

+
+

Dialog components need to be managed in their life-cycle and how they can be coupled to each other. The dialog container is responsible for this along with the following:

+
+
+
    +
  • +

    Bootstrapping the client application and environment

    +
    +
      +
    • +

      Configuration of the client

      +
    • +
    • +

      Initialization of the application server access component

      +
    • +
    +
    +
  • +
  • +

    Dialog Component Management

    +
    +
      +
    • +

      Controlling the life-cycle

      +
    • +
    • +

      Controlling the dialog flow

      +
    • +
    • +

      Providing means of interaction between the dialogs

      +
    • +
    • +

      Providing application server access

      +
    • +
    • +

      Providing services to the dialog components
      +(e.g. printing, caching, data storage)

      +
    • +
    +
    +
  • +
  • +

    Shutdown of the application

    +
  • +
+
+
+
+

== Application Server Access

+
+

Dialogs will require a back-end application server in order to execute their business logic. Typically in an devonfw application the service layer will provide interfaces for the functionality exposed to the client. These business oriented interfaces should also be present on the client backed by a proxy handling the concrete call of the server over the network. This component provides the set of interfaces as well as the proxy.

+
+
+
+

Dialog Container Architecture

+
+

The dialog container can be further structured into the following components with their respective tasks described in own sections:

+
+
+
+Dialog Container Architecture Overview +
+
+
+

Figure 3 Dialog Container Architecture

+
+
+
+

== Application

+
+

The application component represents the overall client in our architecture. It is responsible for bootstrapping all other components and connecting them with each other. As such it initializes the components below and provides an environment for them to work in.

+
+
+
+

== Configuration Management

+
+

The configuration management manages the configuration of the client, so the client can be deployed in different environments. This includes configuration of the concrete application server to be called or any other environment-specific property.

+
+
+
+

== Dialog Management

+
+

The Dialog Management component provides the means to define, create and destroy dialog components. It therefore offers basic life-cycle capabilities for a component. In addition it also allows composition of dialog components in a hierarchy. The life-cycle is then managed along the hierarchy, meaning when creating/destroying a parent dialog, this affects all child components, which are created/destroyed as well.

+
+
+
+

== Service Registry

+
+

Apart from dialog components, a client application also consists of services offered to these. A service can thereby encompass among others:

+
+
+
    +
  • +

    Access to the application server

    +
  • +
  • +

    Access to the dialog container functions for managing dialogs or accessing the configuration

    +
  • +
  • +

    Dialog independent client functionality such as Printing, Caching, Logging, Encapsulated business logic such as tax calculation

    +
  • +
  • +

    Dialog component interaction

    +
  • +
+
+
+

The service registry offers the possibility to define, register and lookup these services. Note that these services could be dependent on the dialog hierarchy, meaning different child instances could obtain different instances / implementations of a service via the service registry, depending on which service implementations are registered by the parents.

+
+
+

Services should be defined as interfaces allowing for different implementations and thus loose coupling.

+
+
+
+

Dialog Component Architecture

+
+

A dialog component has to support all or a subset of the following tasks:
+(T1) Displaying the user interface incl. internationalization
+(T2) Displaying business data incl. changes made to the data due to user interactions and localization of the data
+(T3) Accepting user input including possible conversion from e.g. entered Text to an Integer
+(T4) Displaying the dialog state
+(T5) Validation of user input
+(T6) Managing the business data incl. business logic altering it due to user interactions
+(T7) Execution of user interactions
+(T8) Managing the state of the dialog (e.g. Edit vs. View)
+(T9) Calling the application server in the course of user interactions

+
+
+

Following the principle of separation of concerns, we further structure a dialog component in an own architecture allowing us the distribute responsibility for these tasks along the defined components:

+
+
+
+Dialog Component Architecture +
+
+
+

Figure 4 Overview of dialog component architecture

+
+
+
+

== Presentation Layer

+
+

The presentation layer generates and displays the user interface, accepts user input and user actions and binds these to the dialog core layer (T1-5). The tasks of the presentation layer fall into two categories:

+
+
+
    +
  • +

    Provision of the visual representation (View component)
    +The presentation layer generates and displays the user interface and accepts user input and user actions. The logical processing of the data, actions and states is performed in the dialog core layer. The data and user interface are displayed in localized and internationalized form.

    +
  • +
  • +

    Binding of the visual representation to the dialog core layer
    +The presentation layer itself does not contain any dialog logic. The data or actions entered by the user are then processed in the dialog core layer. There are three aspects to the binding to the dialog core layer. We refer to “data binding”, “state binding” and “action binding”. Syntactical and (to a certain extent) semantic validations are performed during data binding (e.g. cross-field plausibility checks). Furthermore, the formatted, localized data in the presentation layer is converted into the presentation-independent, neutral data in the dialog core layer (parsing) and vice versa (formatting).

    +
  • +
+
+
+
+

== Dialog Core Layer

+
+

The dialog core layer contains the business logic, the control logic, and the logical state of the dialog. It therefore covers tasks T5-9:

+
+
+
    +
  • +

    Maintenance of the logical dialog state and the logical data
    +The dialog core layer maintains the logical dialog state and the logical data in a form which is independent of the presentation. The states of the presentation (e.g. individual widgets) must not be maintained in the dialog core layer, e.g. the view state could lead to multiple presentation states disabling all editable widgets on the view.

    +
  • +
  • +

    Implementation of the dialog and dialog control logic
    +The component parts in the dialog core layer implement the client specific business logic and the dialog control logic. This includes, for example, the manipulation of dialog data and dialog states as well as the opening and closing of dialogs.

    +
  • +
  • +

    Communication with the application server
    +The dialog core layer calls the interfaces of the application server via the application server access component services.

    +
  • +
+
+
+

The dialog core layer should not depend on the presentation layer enforcing a strict layering and thus minimizing dependencies.

+
+
+
+

== Interactions between dialog components

+
+

Dialog components can interact in the following ways:

+
+
+
+Dialog Interactions +
+
+
+
    +
  • +

    Embedding of dialog components
    +As already said dialog components can be hierarchically composed. This composition works by embedding on dialog component within the other. Apart from the life-cycle managed by the dialog container, the embedding needs to cope for the visual embedding of the presentation and core layer.

    +
    +
      +
    • +

      Embedding dialog presentation
      +The parent dialog needs to either integrate the embedded dialog in its layout or open it in an own model window.

      +
    • +
    • +

      Embedding dialog core
      +The parent dialog needs to be able to access the embedded instance of its children. This allows initializing and changing their data and states. On the other hand the children might require context information offered by the parent dialog by registering services in the hierarchical service registry.

      +
    • +
    +
    +
  • +
  • +

    Dialog flow
    +Apart from the embedding of dialog components representing a tight coupling, dialogs can interact with each other by passing the control of the UI, i.e. switching from one dialog to another.

    +
  • +
+
+
+

When interacting, dialog components should interact only between the same or lower layers, i.e. the dialog core should not access the presentation layer of another dialog component.

+
+
+
+

Appendix

+ +
+
+

Notes about Quasar Client

+
+

The Quasar client architecture as the consolidated knowledge of our CSD projects is the major source for the above drafted architecture. However, the above is a much simplified and more agile version thereof:

+
+
+
    +
  • +

    Quasar Client tried to abstract from the concrete UI library being used, so it could decouple the business from the technical logic of a dialog. The presentation layer should be the only one knowing the concrete UI framework used. This level of abstraction was dropped in this reference architecture, although it might of course still make sense in some projects. For fast-moving agile projects in the web however introducing such a level of abstraction takes effort with little gained benefits. With frameworks like Angular 2 we would even introduce one additional seemingly artificial and redundant layer, since it already separates the dialog core from its presentation.

    +
  • +
  • +

    In the past and in the days of Struts, JSF, etc. the concept of session handling was important for the client since part of the client was sitting on a server with a session relating it to its remote counterpart on the users PC. Quasar Client catered for this need, by very prominently differentiating between session and application in the root of the dialog component hierarchy. However, in the current days of SPA applications and the lowered importance of servers-side web clients, this prominent differentiation was dropped. When still needed the referenced documents will provide in more detail how to tailor the respective architecture to this end.

    +
  • +
+
+
+ +
+
+
+

Layers

+
+ +
+

Components Layer

+
+

The components layer encapsulates all components presenting the current application view state, which means data to be shown to the user. +The term component refers to a component described by the standard Web Components. +So this layer has all Angular components, directives and pipes defined for an application. +The main challenges are:

+
+
+
    +
  • +

    how to structure the components layer (see File Structure Guide)

    +
  • +
  • +

    decompose components into maintainable chunks (see Component Decomposition Guide)

    +
  • +
  • +

    handle component interaction

    +
  • +
  • +

    manage calls to the services layer

    +
  • +
  • +

    apply a maintainable data and event flow throughout the component tree

    +
  • +
+
+
+
+

Smart and Dumb Components

+
+

The architecture applies the concept of Smart and Dumb Components (syn. Containers and Presenters). +The concept means that components are divided into Smart and Dumb Components.

+
+
+

A Smart Component typically is a top-level dialog inside the component tree.

+
+
+
    +
  • +

    a component, that can be routed to

    +
  • +
  • +

    a modal dialog

    +
  • +
  • +

    a component, which is placed inside AppComponent

    +
  • +
+
+
+

A Dumb Component can be used by one to many Smart Components. +Inside the component tree a Dumb Component is a child of a Smart Component.

+
+
+
+Component Tree +
+
Figure 3. Component tree example
+
+
+

As shown the topmost component is always the AppComponent in Angular applications. +The component tree describes the hierarchy of components starting from AppComponent. +The figure shows Smart Components in blue and Dumb Components in green. +AppComponent is a Smart Component by definition. +Inside the template of AppComponent placed components are static components inside the component tree. +So they are always displayed. +In the example OverviewComponent and DetailsComponent are rendered by Angular compiler depending on current URL the application displays. +So OverviewComponents sub-tree is displayed if the URL is /overview and DetailsComponents sub-tree is displayed if the URL is /details. +To clarify this distinction further the following table shows the main differences.

+
+
+
Smart vs Dumb Components
+

|== = +|Smart Components |Dumb Components

+
+
+

|contain the current view state +|show data via binding (@Input) and contain no view state

+
+
+

|handle events emitted by Dumb Components +|pass events up the component tree to be handled by Smart Components (@Output)

+
+
+

|call the services layer +|never call the services layer

+
+
+

|use services +|do not use services

+
+
+

|consists of n Dumb Components +|is independent of Smart Components +|== =

+
+
+
+

Interaction of Smart and Dumb Components

+
+

With the usage of the Smart and Dumb Components pattern one of the most important part is component interaction. +Angular comes with built in support for component interaction with @Input() and @Output() Decorators. +The following figure illustrates an unidirectional data flow.

+
+
+
    +
  • +

    Data always goes down the component tree - from a Smart Component down its children.

    +
  • +
  • +

    Events bubble up, to be handled by a Smart Component.

    +
  • +
+
+
+
+Smart and Dumb Components Interaction +
+
Figure 4. Smart and Dumb Component Interaction
+
+
+

As shown a Dumb Components role is to define a signature by declaring Input and Output Bindings.

+
+
+
    +
  • +

    @Input() defines what data is necessary for that component to work

    +
  • +
  • +

    @Output() defines which events can be listened on by the parent component

    +
  • +
+
+
+
Listing 1. Dumb Components define a signature
+
+
export class ValuePickerComponent {
+
+  @Input() columns: string[];
+  @Input() items: {}[];
+  @Input() selected: {};
+  @Input() filter: string;
+  @Input() isChunked = false;
+  @Input() showInput = true;
+  @Input() showDropdownHeader = true;
+
+  @Output() elementSelected = new EventEmitter<{}>();
+  @Output() filterChanged = new EventEmitter<string>();
+  @Output() loadNextChunk = new EventEmitter();
+  @Output() escapeKeyPressed = new EventEmitter();
+
+}
+
+
+
+

The example shows the Dumb Component ValuePickerComponent. +It describes seven input bindings with isChunked, showHeader and showDropdownHeader being non mandatory as they have a default value. +Four output bindings are present. Typically, a Dumb Component has very little code to no code inside the TypeScript class.

+
+
+
Listing 2. Smart Components use the Dumb Components signature inside the template
+
+
<div>
+
+  <value-input
+    ...>
+  </value-input>
+
+  <value-picker
+    *ngIf="isValuePickerOpen"
+    [columns]="columns"
+    [items]="filteredItems"
+    [isChunked]="isChunked"
+    [filter]="filter"
+    [selected]="selectedItem"
+    [showDropdownHeader]="showDropdownHeader"
+    (loadNextChunk)="onLoadNextChunk()"
+    (elementSelected)="onElementSelected($event)"
+    (filterChanged)="onFilterChanged($event)"
+    (escapeKeyPressed)="onEscapePressedInsideChildTable()">
+  </value-picker>
+
+</div>
+
+
+
+

Inside the Smart Components template the events emitted by Dumb Components are handled. +It is a good practice to name the handlers with the prefix on* (e.g. onInputChanged()).

+
+ +
+
+

Services Layer

+
+

The services layer is more or less what we call 'business logic layer' on the server side. +It is the layer where the business logic is placed. +The main challenges are:

+
+
+
    +
  • +

    Define application state and an API for the components layer to use it

    +
  • +
  • +

    Handle application state transitions

    +
  • +
  • +

    Perform back-end interaction (XHR, WebSocket, etc.)

    +
  • +
  • +

    Handle business logic in a maintainable way

    +
  • +
  • +

    Configuration management

    +
  • +
+
+
+

All parts of the services layer are described in this chapter. +An example which puts the concepts together can be found at the end Interaction of Smart Components through the services layer.

+
+
+
+

Boundaries

+
+

There are two APIs for the components layer to interact with the services layer:

+
+
+
    +
  • +

    A store can be subscribed to for receiving state updates over time

    +
  • +
  • +

    A use case service can be called to trigger an action

    +
  • +
+
+
+

To illustrate the fact the following figure shows an abstract overview.

+
+
+
+Smart and Dumb Components Interaction +
+
Figure 5. Boundaries to components layer
+
+
+
+

Store

+
+

A store is a class which defines and handles application state with its transitions over time. +Interaction with a store is always synchronous. +A basic implementation using RxJS can look like this.

+
+
+ + + + + +
+ + +A more profound implementation taken from a real-life project can be found here (Abstract Class Store). +
+
+
+
Listing 3. Store defined using RxJS
+
+
@Injectable()
+export class ProductSearchStore {
+
+  private stateSource = new BehaviorSubject<ProductSearchState>(defaultProductSearchState);
+  state$ = this.stateSource.asObservable();
+
+  setLoading(isLoading: boolean) {
+    const currentState = this.stateSource.getValue();
+    this.stateSource.next({
+      isLoading: isLoading,
+      products: currentState.products,
+      searchCriteria: currentState.searchCriteria
+    });
+  }
+
+}
+
+
+
+

In the example ProductSearchStore handles state of type ProductSearchState. +The public API is the property state$ which is an observable of type ProductSearchState. +The state can be changed with method calls. +So every desired change to the state needs to be modeled with an method. +In reactive terminology this would be an Action. +The store does not use any services. +Subscribing to the state$ observable leads to the subscribers receiving every new state.

+
+
+

This is basically the Observer Pattern:
+The store consumer registers itself to the observable via state$.subscribe() method call. +The first parameter of subscribe() is a callback function to be called when the subject changes. +This way the consumer - the observer - is registered. +When next() is called with a new state inside the store, all callback functions are called with the new value. +So every observer is notified of the state change. +This equals the Observer Pattern push type.

+
+
+

A store is the API for Smart Components to receive state from the service layer. +State transitions are handled automatically with Smart Components registering to the state$ observable.

+
+
+
+

Use Case Service

+
+

A use case service is a service which has methods to perform asynchronous state transitions. +In reactive terminology this would be an Action of Actions - a thunk (redux) or an effect (@ngrx).

+
+
+
+Use Case Service +
+
Figure 6. Use case services are the main API to trigger state transitions
+
+
+

A use case services method - an action - interacts with adapters, business services and stores. +So use case services orchestrate whole use cases. +For an example see use case service example.

+
+
+
+

Adapter

+
+

An adapter is used to communicate with the back-end. +This could be a simple XHR request, a WebSocket connection, etc. +An adapter is simple in the way that it does not add anything other than the pure network call. +So there is no caching or logging performed here. +The following listing shows an example.

+
+
+

For further information on back-end interaction see Consuming REST Services

+
+
+
Listing 4. Calling the back-end via an adapter
+
+
@Injectable()
+export class ProducsAdapter {
+
+  private baseUrl = environment.baseUrl;
+
+  constructor(private http: HttpClient) { }
+
+  getAll(): Observable<Product[]> {
+    return this.http.get<Product[]>(this.baseUrl + '/products');
+  }
+
+}
+
+
+
+
+

Interaction of Smart Components through the services layer

+
+

The interaction of smart components is a classic problem which has to be solved in every UI technology. +It is basically how one dialog tells the other something has changed.

+
+
+

An example is adding an item to the shopping basket. +With this action there need to be multiple state updates.

+
+
+
    +
  • +

    The small logo showing how many items are currently inside the basket needs to be updated from 0 to 1

    +
  • +
  • +

    The price needs to be recalculated

    +
  • +
  • +

    Shipping costs need to be checked

    +
  • +
  • +

    Discounts need to be updated

    +
  • +
  • +

    Ads need to be updated with related products

    +
  • +
  • +

    etc.

    +
  • +
+
+
+
+

Pattern

+
+

To handle this interaction in a scalable way we apply the following pattern.

+
+
+
+Interaction of Smart Components via services layer +
+
Figure 7. Smart Component interaction
+
+
+

The state of interest is encapsulated inside a store. All Smart Components interested in the state have to subscribe to the store’s API served by the public observable. Thus, with every update to the store the subscribed components receive the new value. The components basically react to state changes. Altering a store can be done directly if the desired change is synchronous. Most actions are of asynchronous nature so the UseCaseService comes into play. Its actions are void methods, which implement a use case, i.e., adding a new item to the basket. It calls asynchronous actions and can perform multiple store updates over time.

+
+
+

To put this pattern into perspective the UseCaseService is a programmatic alternative to redux-thunk or @ngrx/effects. The main motivation here is to use the full power of TypeScript --strictNullChecks and to let the learning curve not to become as steep as it would be when learning a new state management framework. This way actions are just void method calls.

+
+
+
+

Example

+
+
+Smart component interaction example +
+
Figure 8. Smart Components interaction example
+
+
+

The example shows two Smart Components sharing the FlightSearchState by using the FlightSearchStore. +The use case shown is started by an event in the Smart Component FlightSearchComponent. The action loadFlight() is called. This could be submitting a search form. +The UseCaseService is FlightSearchService, which handles the use case Load Flights.

+
+
+
UseCaseService example
+

+
+
+
+
export class FlightSearchService {
+
+  constructor(
+    private flightSearchAdapter: FlightSearchAdapter,
+    private store: FlightSearchStore
+  ) { }
+
+  loadFlights(criteria: FlightSearchCriteria): void {
+    this.store.setLoadingFlights(true);
+    this.store.clearFlights();
+
+    this.flightSearchAdapter.getFlights(criteria.departureDate,
+        {
+          from: criteria.departureAirport,
+          to: criteria.destinationAirport
+        })
+      .finally(() => this.store.setLoadingFlights(false))
+      .subscribe((result: FlightTo[]) => this.store.setFlights(result, criteria));
+  }
+
+}
+
+
+
+

First the loading flag is set to true and the current flights are cleared. This leads the Smart Component showing a spinner indicating the loading action. Then the asynchronous XHR is triggered by calling the adapter. After completion the loading flag is set to false causing the loading indication no longer to be shown. If the XHR was successful, the data would be put into the store. If the XHR was not successful, this would be the place to handle a custom error. All general network issues should be handled in a dedicated class, i.e., an interceptor. So for example the basic handling of 404 errors is not done here.

+
+
+
+
+
+

Guides

+
+ +
+

Package Managers

+
+

There are two major package managers currently used for JavaScript / TypeScript projects which leverage NodeJS as a build platform.

+
+
+
    +
  1. +

    npm

    +
  2. +
  3. +

    yarn

    +
  4. +
+
+
+

Our recommendation is to use yarn but both package managers are fine.

+
+
+ + + + + +
+ + +When using npm it is important to use a version greater 5.0 as npm 3 has major drawbacks compared to yarn. +The following guide assumes that you are using npm >= 5 or yarn. +
+
+
+

Before you start reading further, please take a look at the docs:

+
+ +
+

The following guide will describe best practices for working with yarn / npm.

+
+
+
+

Semantic Versioning

+
+

When working with package managers it is very important to understand the concept of semantic versioning.

+
+
+
Version example 1.2.3
+

|== == == = +|Version |1. |2. |3 +|Version name when incrementing |Major (2.0.0) |Minor (1.3.0) |Patch (1.2.4) +|Has breaking changes |yes |no |no +|Has features |yes |yes |no +|Has bug fixes |yes |yes |yes +|== == == =

+
+
+

The table gives an overview of the most important parts of semantic versioning. +In the header version 1.2.3 is displayed. +The first row shows the name and the resulting version when incrementing a part of the version. +The next rows show specifics of the resulting version - e.g. a major version can have breaking changes, features and bug fixes.

+
+
+

Packages from npm and yarn leverage semantic versioning and instead of selecting a fixed version one can specify a selector. +The most common selectors are:

+
+
+
    +
  • +

    ^1.2.3 +At least 1.2.3 - 1.2.4 or 1.3.0 can be used, 2.0.0 can not be used

    +
  • +
  • +

    ~1.2.3 +At lease 1.2.3 - 1.2.4 can be used, 2.0.0 and 1.3.0 can not be used

    +
  • +
  • +

    >=1.2.3 +At least 1.2.3 - every version greater can also be used

    +
  • +
+
+
+

This achieves a lower number of duplicates. +To give an example:

+
+
+

If package A needs version 1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 4 packages.

+
+
+

If package A needs version ^1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 3 packages. +A would use the same version of C as B - 1.4.0.

+
+
+
+

Do not modify package.json and lock files by hand

+
+

Dependencies are always added using a yarn or npm command. +Altering the package.json, package-json.lock or yarn.lock file by hand is not recommended.

+
+
+

Always use a yarn or npm command to add a new dependency.

+
+
+

Adding the package express with yarn to dependencies.

+
+
+
+
yarn add express
+
+
+
+

Adding the package express with npm to dependencies.

+
+
+
+
npm install express
+
+
+
+
+

What does the lock file do

+
+

The purpose of files yarn.lock and package-json.lock is to freeze versions for a short time.

+
+
+

The following problem is solved:

+
+
+
    +
  • +

    Developer A upgrades the dependency express to fixed version 4.16.3.

    +
  • +
  • +

    express has sub-dependency accepts with version selector ~1.3.5

    +
  • +
  • +

    His local node_modules folder receives accepts in version 1.3.5

    +
  • +
  • +

    On his machine everything is working fine

    +
  • +
  • +

    Afterward version 1.3.6 of accepts is published - it contains a major bug

    +
  • +
  • +

    Developer B now clones the repo and loads the dependencies.

    +
  • +
  • +

    He receives version 1.3.6 of accepts and blames developer A for upgrading to a broken version.

    +
  • +
+
+
+

Both yarn.lock and package-json.lock freeze all the dependencies. +For example in yarn lock you will find.

+
+
+
Listing 5. yarn.lock example (excerp)
+
+
accepts@~1.3.5:
+  version "1.3.5"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-types "~2.1.18"
+    negotiator "0.6.1"
+
+mime-db@~1.33.0:
+  version "1.33.0"
+  resolved "[...URL to registry]"
+
+mime-types@~2.1.18:
+  version "2.1.18"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-db "~1.33.0"
+
+negotiator@0.6.1:
+  version "0.6.1"
+  resolved "[...URL to registry]"
+
+
+
+

The described problem is solved by the example yarn.lock file.

+
+
+
    +
  • +

    accepts is frozen at version ~1.3.5

    +
  • +
  • +

    All of its sub-dependencies are also frozen. +It needs mime-types at version ~2.1.18 which is frozen at 2.1.18. +mime-types needs mime-db at ~1.33.0 which is frozen at 1.33.0

    +
  • +
+
+
+

Every developer will receive the same versions of every dependency.

+
+
+ + + + + +
+ + +You have to make sure all your developers are using the same npm/yarn version - this includes the CI build. +
+
+ +
+
+

Package Managers Workflow

+ +
+
+

Introduction

+
+

This document aims to provide you the necessary documentation and sources in order to help you understand the importance of dependencies between packages.

+
+
+

Projects in NodeJS make use of modules, chunks of reusable code made by other people or teams. These small chunks of reusable code are called packages [1]. Packages are used to solve specific problems or tasks. These relations between your project and the external packages are called dependencies.

+
+
+

For example, imagine we are doing a small program that takes your birthday as an input and tells you how many days are left until your birthday. We search in the repository if someone has published a package to retrieve the actual date and manage date types, and maybe we could search for another package to show a calendar, because we want to optimize our time, and we wish the user to click a calendar button and choose the day in the calendar instead of typing it.

+
+
+

As you can see, packages are convenient. In some cases, they may be even needed, as they can manage aspects of your program you may not be proficient in, or provide an easier use of them.

+
+
+

For more comprehensive information visit npm definition

+
+
+
+

Package.json

+
+

Dependencies in your project are stored in a file called package.json. Every package.json must contain, at least, the name and version of your project.

+
+
+

Package.json is located in the root of your project.

+
+
+ + + + + +
+ + +If package.json is not on your root directory refer to Problems you may encounter section +
+
+
+

If you wish to learn more information about package.json, click on the following links:

+
+ +
+
+

== Content of package.json

+
+

As you noticed, package.json is a really important file in your project. It contains essential information about our project, therefore you need to understand what’s inside.

+
+
+

The structure of package.json is divided in blocks, inside the first one you can find essential information of your project such as the name, version, license and optionally some [Scripts].

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e"
+  }
+
+
+
+

The next block is called dependencies and contains the packages that project needs in order to be developed, compiled and executed.

+
+
+
+
"private": true,
+  "dependencies": {
+    "@angular/animations": "^4.2.4",
+    "@angular/common": "^4.2.4",
+    "@angular/forms": "^4.2.4",
+    ...
+    "zone.js": "^0.8.14"
+  }
+
+
+
+

After dependencies we find devDependencies, another kind of dependencies present in the development of the application but unnecessary for its execution. One example is typescript. Code is written in typescript, and then, transpiled to JavaScript. This means the application is not using typescript in execution and consequently not included in the deployment of our application.

+
+
+
+
"devDependencies": {
+    "@angular/cli": "1.4.9",
+    "@angular/compiler-cli": "^4.2.4",
+    ...
+    "@types/node": "~6.0.60",
+    "typescript": "~2.3.3"
+  }
+
+
+
+

Having a peer dependency means that your package needs a dependency that is the same exact dependency as the person installing your package

+
+
+
+
"peerDependencies": {
+    "package-123": "^2.7.18"
+  }
+
+
+
+

Optional dependencies are just that: optional. If they fail to install, Yarn will still say the install process was successful.

+
+
+
+
"optionalDependencies": {
+    "package-321": "^2.7.18"
+  }
+
+
+
+

Finally you can have bundled dependencies which are packages bundled together when publishing your package in a repository.

+
+
+
+
{
+  "bundledDependencies": [
+    "package-4"
+  ]
+}
+
+
+
+

Here is the link to an in-depth explanation of dependency types​.

+
+
+
+

== Scripts

+
+

Scripts are a great way of automating tasks related to your package, such as simple build processes or development tools.

+
+
+

For example:

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "build-project": "node hello-world.js",
+  }
+
+
+
+

You can run that script by running the command yarn (run) script or npm run script, check the example below:

+
+
+
+
$ yarn (run) build-project    # run is optional
+$ npm run build-project
+
+
+
+

There are special reserved words for scripts, like pre-install, which will execute the script automatically +before the package you install are installed.

+
+
+

Check different uses for scripts in the following links:

+
+ +
+

Or you can go back to +[Content of package.json]​.

+
+
+
+

Managing dependencies

+
+

In order to manage dependencies we recommend using package managers in your projects.

+
+
+

A big reason is their usability. Adding or removing a package is really easy, and by doing so, packet manager update the package.json and copies (or removes) the package in the needed location, with a single command.

+
+
+

Another reason, closely related to the first one, is reducing human error by automating the package management process.

+
+
+

Two of the package managers you can use in NodeJS projects are "yarn" and "npm". While you can use both, we encourage you to use only one of them while working on projects. Using both may lead to different dependencies between members of the team.

+
+
+
+

== npm

+
+

We’ll start by installing npm following this small guide here.

+
+
+

As stated on the web, npm comes inside of NodeJS, and must be updated after installing NodeJS, in the same guide you used earlier are written the instructions to update npm.

+
+
+

How npm works

+
+
+

In order to explain how npm works, let’s take a command as an example:

+
+
+
+
$ npm install @angular/material @angular/cdk
+
+
+
+

This command tells npm to look for the packages @angular/material and @angular/cdk in the npm registry, download and decompress them in the folder node_modules along with their own dependencies. Additionally, npm will update package.json and create a new file called package-lock.json.

+
+
+

After initialization and installing the first package there will be a new folder called node_modules in your project. This folder is where your packages are unzipped and stored, following a tree scheme.

+
+
+

Take in consideration both npm and yarn need a package.json in the root of your project in order to work properly. If after creating your project don’t have it, download again the package.json from the repository or you’ll have to start again.

+
+
+

Brief overview of commands

+
+
+

If we need to create a package.json from scratch, we can use the command init. This command asks the user for basic information about the project and creates a brand new package.json.

+
+
+
+
$ npm init
+
+
+
+

Install (or i) installs all modules listed as dependencies in package.json locally. You can also specify a package, and install that package. Install can also be used with the parameter -g, which tells npm to install the [Global package].

+
+
+
+
$ npm install
+$ npm i
+$ npm install Package
+
+
+
+ + + + + +
+ + +Earlier versions of npm did not add dependencies to package.json unless it was used with the flag --save, so npm install package would be npm install --save package, you have one example below. +
+
+
+
+
$ npm install --save Package
+
+
+
+

Npm needs flags in order to know what kind of dependency you want in your project, in npm you need to put the flag -D or --save-dev to install devDependencies, for more information consult the links at the end of this section.

+
+
+
+
$ npm install -D package
+$ npm install --save-dev package
+
+
+
+

+
+
+

The next command uninstalls the module you specified in the command.

+
+
+
+
$ npm uninstall Package
+
+
+
+

ls command shows us the dependencies like a nested tree, useful if you have few packages, not so useful when you need a lot of packages.

+
+
+
+
$ npm ls
+
+
+
+
+
npm@@VERSION@ /path/to/npm
+└─┬ init-package-json@0.0.4
+  └── promzard@0.1.5
+
+
+
+
example tree
+

We recommend you to learn more about npm commands in the following link, navigating to the section CLI commands.

+
+
+

About Package-lock.json

+
+
+

Package-lock.json describes the dependency tree resulting of using package.json and npm. +Whenever you update, add or remove a package, package-lock.json is deleted and redone with +the new dependencies.

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

This lock file is checked every time the command npm i (or npm install) is used without specifying a package, +in the case it exists and it’s valid, npm will install the exact tree that was generated, such that subsequent +installs are able to generate identical dependency trees.

+
+
+ + + + + +
+ + +It is not recommended to modify this file yourself. It’s better to leave its management to npm. +
+
+
+

More information is provided by the npm team at package-lock.json

+
+
+
+

== Yarn

+
+

Yarn is an alternative to npm, if you wish to install yarn follow the guide getting started with yarn and download the correct version for your operative system. NodeJS is also needed you can find it here.

+
+
+

Working with yarn

+
+
+

Yarn is used like npm, with small differences in syntax, for example npm install module is changed to yarn add module.

+
+
+
+
$ yarn add @covalent
+
+
+
+

This command is going to download the required packages, modify package.json, put the package in the folder node_modules and makes a new yarn.lock with the new dependency.

+
+
+

However, unlike npm, yarn maintains a cache with packages you download inside. You don’t need to download every file every time you do a general installation. This means installations faster than npm.

+
+
+

Similarly to npm, yarn creates and maintains his own lock file, called yarn.lock. Yarn.lock gives enough information about the project for dependency tree to be reproduced.

+
+
+

yarn commands

+
+
+

Here we have a brief description of yarn’s most used commands:

+
+
+
+
$ yarn add Package
+$ yarn add --dev Package
+
+
+
+

Adds a package locally to use in your package. Adding the flags --dev or -D will add them to devDependencies instead of the default dependencies, if you need more information check the links at the end of the section.

+
+
+
+
$ yarn init
+
+
+
+

Initializes the development of a package.

+
+
+
+
$ yarn install
+
+
+
+

Installs all the dependencies defined in a package.json file, you can also write "yarn" to achieve the same effect.

+
+
+
+
$ yarn remove Package
+
+
+
+

You use it when you wish to remove a package from your project.

+
+
+
+
$ yarn global add Package
+
+
+
+

Installs the [Global package].

+
+
+

Please, refer to the documentation to learn more about yarn commands and their attributes: yarn commands

+
+
+

yarn.lock

+
+
+

This file has the same purpose as Package-lock.json, to guide the packet manager, in this case yarn, +to install the dependency tree specified in yarn.lock.

+
+
+

Yarn.lock and package.json are +essential files when collaborating in a project more co-workers and may be a +source of errors if programmers do not use the same manager.

+
+
+

Yarn.lock follows the same structure as package-lock.json, you can find an example of dependency below:

+
+
+
+
"@angular/animations@^4.2.4":
+  version "4.4.6"
+  resolved "https://registry.yarnpkg.com/@angular/animations/-/animations-4.4.6.tgz#fa661899a8a4e38cb7c583c7a5c97ce65d592a35"
+  dependencies:
+    tslib "^1.7.1"
+
+
+
+ + + + + +
+ + +As with package-lock.json, it’s strongly not advised to modify this file. Leave its management to yarn +
+
+
+

You can learn more about yarn.lock here: yarn.lock

+
+
+
+

== Global package

+
+

Global packages are packages installed in your operative system instead of your local project, +global packages useful for developer tooling that is not part of any individual project but instead is used for local commands.

+
+
+

A good example of global package is @angular/cli, a command line interface for angular used in our projects. You can install +a global package in npm with "npm install -g package" and "yarn global add package" with yarn, you have a npm example below:

+
+
+
Listing 6. npm global package
+
+
npm install –g @angular/cli
+
+
+ +
+
+

== Package version

+
+

Dependencies are critical to the success of a package. You must be extra careful about +which version packages are using, one package in a different version may break your code.

+
+
+

Versioning in npm and yarn, follows a semantic called semver, following the logic +MAJOR.MINOR.PATCH, like for example, @angular/animations: 4.4.6.

+
+
+

Different versions

+
+
+

Sometimes, packages are installed with a different version from the one initially installed. +This happens because package.json also contains the range of versions we allow yarn or npm to +install or update to, example:

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

And here the installed one:

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

As you can see, the version we initially added is 4.2.4, and the version finally installed after +a global installation of all packages, 4.4.6.

+
+
+

Installing packages without package-lock.json or yarn.lock using their respective packet managers, will always +end with npm or yarn installing the latest version allowed by package.json.

+
+
+

"@angular/animations": "^4.2.4" contains not only the version we added, but also the range we allow npm and yarn +to update. Here are some examples:

+
+
+
+
"@angular/animations": "<4.2.4"
+
+
+
+

The version installed must be lower than 4.2.4 .

+
+
+
+
"@angular/animations": ">=4.2.4"
+
+
+
+

The version installed must be greater than or equal to 4.2.4 .

+
+
+
+
"@angular/animations": "=4.2.4"
+
+
+
+

the version installed must be equal to 4.2.4 .

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

The version installed cannot modify the first non zero digit, for example in this case +it cannot surpass 5.0.0 or be lower than 4.2.4 .

+
+
+

You can learn more about this in Versions

+
+
+
+

Problems you may encounter

+
+

If you can’t find package.json, you may have deleted the one you had previously, +which means you have to download the package.json from the repository. +In the case you are creating a new project you can create a new package.json. More information +in the links below. Click on Package.json if you come from that section.

+
+ +
+ + + + + +
+ + +Using npm install or yarn without package.json in your projects will +result in compilation errors. As we mentioned earlier, +Package.json contains essential information about your project. +
+
+
+

If you have package.json, but you don’t have package-lock.json or yarn.lock the use of +command "npm install" or "yarn" may result in a different dependency tree.

+
+
+

If you are trying to import a module and visual code studio is not able to find it, +is usually caused by error adding the package to the project, try to add the module again with yarn or npm, +and restart Visual Studio Code.

+
+
+

Be careful with the semantic versioning inside your package.json of the packages, +or you may find a new update on one of your dependencies breaking your code.

+
+
+ + + + + +
+ + +In the following link +there is a solution to a problematic update to one package. +
+
+
+

A list of common errors of npm can be found in: npm errors

+
+
+
+

== Recomendations

+
+

Use yarn or npm in your project, reach an agreement with your team in order to choose one, this will avoid +undesired situations like forgetting to upload an updated yarn.lock or package-lock.json. +Be sure to have the latest version of your project when possible.

+
+
+ + + + + +
+ + +Pull your project every time it’s updated. Erase your node_modules folder and reinstall all +dependencies. This assures you to be working with the same dependencies your team has. +
+
+
+

AD Center recommends the use of yarn.

+
+ +
+
+

Yarn 2

+
+

Yarn v2 is a very different software from the v1. The following list contains the main new features:

+
+ +
+

Please, read them carefully to decide if your current project is suitable to use Yarn 2 as package manager.

+
+
+ + + + + +
+ + +Some features are still experimental, so please do not use them in production environments. +
+
+
+

More info at https://yarnpkg.com/

+
+
+
+

Global Install

+
+

Installing Yarn 2.x globally is discouraged as Yarn team is moving to a per-project install strategy. We advise you to keep Yarn 1.x (Classic) as your global binary by installing it via the instructions you can find here.

+
+
+

Once you’ve followed the instructions (running yarn --version from your home directory should yield something like 1.22.0), go to the next section to see how to enable Yarn 2 on your project.

+
+
+
+

Per-project install

+
+

Follow these instructions to update your current devon4ng project to Yarn 2:

+
+
+
    +
  1. +

    Follow the global install instructions.

    +
  2. +
  3. +

    Move into your project folder:

    +
    +
    +
    cd ~/path/to/project
    +
    +
    +
  4. +
  5. +

    Run the following command:

    +
    +
    +
    yarn policies set-version berry # below v1.22
    +yarn set version berry          # on v1.22+
    +
    +
    +
  6. +
  7. +

    Since Angular CLI still is not fully supported with the new Yarn architecture as it is not compatible with PnP it is necessary to include the node-modules plugin adding the following line in the .yarnrc.yml file:

    +
    +
    +
    nodeLinker: node-modules
    +
    +
    +
  8. +
  9. +

    Commit the .yarn and .yarnrc.yml changes

    +
  10. +
  11. +

    Run again yarn install.

    +
  12. +
+
+
+ + + + + +
+ + +For more advanced migration topics please refer to https://yarnpkg.com/advanced/migration +
+
+
+
+

Which files should be added to gitignore file?

+
+

If you’re using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/cache
+!.yarn/releases
+!.yarn/plugins
+
+
+
+

If you’re not using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/releases
+!.yarn/plugins
+.pnp.*
+
+
+ +
+
+
+
+

Angular

+
+ +
+

Accessibility

+
+

Multiple studies suggest that around 15-20% of the population are living with a disability of some kind. In comparison, that number is higher than any single browser demographic currently, other than Chrome2. Not considering those users when developing an application means excluding a large number of people from being able to use it comfortable or at all.

+
+
+

Some people are unable to use the mouse, view a screen, see low contrast text, Hear dialogue or music and some people having difficulty to understanding the complex language.This kind of people needed the support like Keyboard support, screen reader support, high contrast text, captions and transcripts and Plain language support. This disability may change the from permanent to the situation.

+
+
+
+

Key Concerns of Accessible Web Applications

+
+
    +
  • +

    Semantic Markup - Allows the application to be understood on a more general level rather than just details of whats being rendered

    +
  • +
  • +

    Keyboard Accessibility - Applications must still be usable when using only a keyboard

    +
  • +
  • +

    Visual Assistance - color contrast, focus of elements and text representations of audio and events

    +
  • +
+
+
+
+

Semantic Markup

+
+

If you’re creating custom element directives, Web Components or HTML in general, use native elements wherever possible to utilize built-in events and properties. Alternatively, use ARIA to communicate semantic meaning.

+
+
+

HTML tags have attributes that providers extra context on what’s being displayed on the browser. For example, the <img> tag’s alt attribute lets the reader know what is being shown using a short description.However, native tags don’t cover all cases. This is where ARIA fits in. ARIA attributes can provide context on what roles specific elements have in the application or on how elements within the document relate to each other.

+
+
+

A modal component can be given the role of dialog or alertdialog to let the browser know that that component is acting as a modal. The modal component template can use the ARIA attributes aria-labelledby and aria-described to describe to readers what the title and purpose of the modal is.

+
+
+
+
@Component({
+    selector: 'ngc2-app',
+    template: `
+      <ngc2-notification-button
+        message="Hello!"
+        label="Greeting"
+        role="button">
+      </ngc2-notification-button>
+      <ngc2-modal
+        [title]="modal.title"
+        [description]="modal.description"
+        [visible]="modal.visible"
+        (close)="modal.close()">
+      </ngc2-modal>
+    `
+})
+export class AppComponent {
+  constructor(private modal: ModalService) { }
+}
+
+
+
+

notification-button.component.ts

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `
+})
+export class ModalComponent {
+  ...
+}
+
+
+
+
+

Keyboard Accessibility

+
+

Keyboard accessibility is the ability of your application to be interacted with using just a keyboard. The more streamlined the site can be used this way, the more keyboard accessible it is. Keyboard accessibility is one of the largest aspects of web accessibility since it targets:

+
+
+
    +
  • +

    those with motor disabilities who can’t use a mouse

    +
  • +
  • +

    users who rely on screen readers and other assistive technology, which require keyboard navigation

    +
  • +
  • +

    those who prefer not to use a mouse

    +
  • +
+
+
+
+

== Focus

+
+

Keyboard interaction is driven by something called focus. In web applications, only one element on a document has focus at a time, and keypress will activate whatever function is bound to that element. +Focus element border can be styled with CSS using the outline property, but it should not be removed. Elements can also be styled using the :focus psuedo-selector.

+
+
+
+

== Tabbing

+
+

The most common way of moving focus along the page is through the tab key. Elements will be traversed in the order they appear in the document outline - so that order must be carefully considered during development. +There is way change the default behavior or tab order. This can be done through the tabindex attribute. The tabindex can be given the values: +* less than zero - to let readers know that an element should be focusable but not keyboard accessible +* 0 - to let readers know that that element should be accessible by keyboard +* greater than zero - to let readers know the order in which the focusable element should be reached using the keyboard. Order is calculated from lowest to highest.

+
+
+
+

== Transitions

+
+

The majority of transitions that happen in an Angular application will not involve a page reload. This means that developers will need to carefully manage what happens to focus in these cases.

+
+
+

For example:

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `,
+})
+export class ModalComponent {
+  constructor(private modal: ModalService, private element: ElementRef) { }
+
+  ngOnInit() {
+    this.modal.visible$.subscribe(visible => {
+      if(visible) {
+        setTimeout(() => {
+          this.element.nativeElement.querySelector('button').focus();
+        }, 0);
+      }
+    })
+  }
+}
+
+
+
+
+

Visual Assistance

+
+

One large category of disability is visual impairment. This includes not just the blind, but those who are color blind or partially sighted, and require some additional consideration.

+
+
+
+

Color Contrast

+
+

When choosing colors for text or elements on a website, the contrast between them needs to be considered. For WCAG 2.0 AA, this means that the contrast ratio for text or visual representations of text needs to be at least 4.5:1. There are tools online to measure the contrast ratio such as this color contrast checker from WebAIM or be checked with using automation tests.

+
+
+
+

Visual Information

+
+

Color can help a user’s understanding of information, but it should never be the only way to convey information to a user. For example, a user with red/green color-blindness may have trouble discerning at a glance if an alert is informing them of success or failure.

+
+
+
+

Audiovisual Media

+
+

Audiovisual elements in the application such as video, sound effects or audio (that is, podcasts) need related textual representations such as transcripts, captions or descriptions. They also should never auto-play and playback controls should be provided to the user.

+
+
+
+

Accessibility with Angular Material

+
+

The a11y package provides a number of tools to improve accessibility. Import

+
+
+
+
import { A11yModule } from '@angular/cdk/a11y';
+
+
+
+
+

ListKeyManager

+
+

ListKeyManager manages the active option in a list of items based on keyboard interaction. Intended to be used with components that correspond to a role="menu" or role="listbox" pattern . Any component that uses a ListKeyManager will generally do three things:

+
+
+
    +
  • +

    Create a @ViewChildren query for the options being managed.

    +
  • +
  • +

    Initialize the ListKeyManager, passing in the options.

    +
  • +
  • +

    Forward keyboard events from the managed component to the ListKeyManager.

    +
  • +
+
+
+

Each option should implement the ListKeyManagerOption interface:

+
+
+
+
interface ListKeyManagerOption {
+  disabled?: boolean;
+  getLabel?(): string;
+}
+
+
+
+
+

== Types of ListKeyManager

+
+

There are two varieties of ListKeyManager, FocusKeyManager and ActiveDescendantKeyManager.

+
+
+
+

FocusKeyManager

+
+

Used when options will directly receive browser focus. Each item managed must implement the FocusableOption interface:

+
+
+
+
interface FocusableOption extends ListKeyManagerOption {
+  focus(): void;
+}
+
+
+
+
+

ActiveDescendantKeyManager

+
+

Used when options will be marked as active via aria-activedescendant. Each item managed must implement the Highlightable interface:

+
+
+
+
interface Highlightable extends ListKeyManagerOption {
+  setActiveStyles(): void;
+  setInactiveStyles(): void;
+}
+
+
+
+

Each item must also have an ID bound to the listbox’s or menu’s aria-activedescendant.

+
+
+
+

FocusTrap

+
+

The cdkTrapFocus directive traps Tab key focus within an element. This is intended to be used to create accessible experience for components like modal dialogs, where focus must be constrained. This directive is declared in A11yModule.

+
+
+

This directive will not prevent focus from moving out of the trapped region due to mouse interaction.

+
+
+

For example:

+
+
+
+
<div class="my-inner-dialog-content" cdkTrapFocus>
+  <!-- Tab and Shift + Tab will not leave this element. -->
+</div>
+
+
+
+
+

Regions

+
+

Regions can be declared explicitly with an initial focus element by using the cdkFocusRegionStart, cdkFocusRegionEnd and cdkFocusInitial DOM attributes. When using the tab key, focus will move through this region and wrap around on either end.

+
+
+

For example:

+
+
+
+
<a mat-list-item routerLink cdkFocusRegionStart>Focus region start</a>
+<a mat-list-item routerLink>Link</a>
+<a mat-list-item routerLink cdkFocusInitial>Initially focused</a>
+<a mat-list-item routerLink cdkFocusRegionEnd>Focus region end</a>
+
+
+
+
+

InteractivityChecker

+
+

InteractivityChecker is used to check the interactivity of an element, capturing disabled, visible, tabbable, and focusable states for accessibility purposes.

+
+
+
+

LiveAnnouncer

+
+

LiveAnnouncer is used to announce messages for screen-reader users using an aria-live region.

+
+
+

For example:

+
+
+
+
@Component({...})
+export class MyComponent {
+
+ constructor(liveAnnouncer: LiveAnnouncer) {
+   liveAnnouncer.announce("Hey Google");
+ }
+}
+
+
+
+
+

API reference for Angular CDK a11y

+ + +
+
+

Angular Elements

+ +
+
+

What are Angular Elements?

+
+

Angular elements are Angular components packaged as custom elements, a web standard for defining new HTML elements in a framework-agnostic way.

+
+
+

Custom elements are a Web Platform feature currently supported by Chrome, Firefox, Opera, and Safari, and available in other browsers through Polyfills. A custom element extends HTML by allowing you to define a tag whose content is created and controlled by JavaScript code. The browser maintains a CustomElementRegistry of defined custom elements (also called Web Components), which maps an instantiable JavaScript class to an HTML tag.

+
+
+
+

Why use Angular Elements?

+
+

Angular Elements allows Angular to work with different frameworks by using input and output elements. This allows Angular to work with many different frameworks if needed. This is an ideal situation if a slow transformation of an application to Angular is needed or some Angular needs to be added in other web applications(For example. ASP.net, JSP etc )

+
+
+
+

Negative points about Elements

+
+

Angular Elements is really powerful but since, the transition between views is going to be handled by another framework or HTML/JavaScript, using Angular Router is not possible. the view transitions have to be handled manually. This fact also eliminates the possibility of just porting an application completely.

+
+
+
+

How to use Angular Elements?

+
+

In a generalized way, a simple Angular component could be transformed to an Angular Element with this steps:

+
+
+
+

Installing Angular Elements

+
+

The first step is going to be install the library using our preferred packet manager:

+
+
+
+

== NPM

+
+
+
npm install @angular/elements
+
+
+
+
+

== YARN

+
+
+
yarn add @angular/elements
+
+
+
+
+

Preparing the components in the modules

+
+

Inside the app.module.ts, in addition to the normal declaration of the components inside declarations, the modules inside imports and the services inside providers, the components need to added in entryComponents. If there are components that have their own module, the same logic is going to be applied for them, only adding in the app.module.ts the components that do not have their own module. Here is an example of this:

+
+
+
+
....
+@NgModule({
+  declarations: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  imports: [
+    CoreModule,  // Module containing Angular Materials
+    FormsModule
+  ],
+  entryComponents: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  providers: [DishShareService]
+})
+....
+
+
+
+

After that is done, the constructor of the module is going to be modified to use injector and bootstrap the application defining the components. This is going to allow the Angular Element to get the injections and to define a component tag that will be used later:

+
+
+
+
....
+})
+export class AppModule {
+  constructor(private injector: Injector) {
+
+  }
+
+  ngDoBootstrap() {
+    const el = createCustomElement(DishFormComponent, {injector: this.injector});
+    customElements.define('dish-form', el);
+
+    const elView = createCustomElement(DishViewComponent, {injector: this.injector});
+    customElements.define('dish-view', elView);
+  }
+}
+....
+
+
+
+
+

A component example

+
+

In order to be able to use a component, @Input() and @Output() variables are used. These variables are going to be the ones that will allow the Angular Element to communicate with the framework/JavaScript:

+
+
+

Component html

+
+
+
+
<mat-card>
+    <mat-grid-list cols="1" rowHeight="100px" rowWidth="50%">
+				<mat-grid-tile colspan="1" rowspan="1">
+					<span>{{ platename }}</span>
+				</mat-grid-tile>
+				<form (ngSubmit)="onSubmit(dishForm)" #dishForm="ngForm">
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<input matInput placeholder="Name" name="name" [(ngModel)]="dish.name">
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<textarea matInput placeholder="Description" name="description" [(ngModel)]="dish.description"></textarea>
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<button mat-raised-button color="primary" type="submit">Submit</button>
+					</mat-grid-tile>
+				</form>
+		</mat-grid-list>
+</mat-card>
+
+
+
+

Component ts

+
+
+
+
@Component({
+  templateUrl: './dish-form.component.html',
+  styleUrls: ['./dish-form.component.scss']
+})
+export class DishFormComponent implements OnInit {
+
+  @Input() platename;
+
+  @Input() platedescription;
+
+  @Output()
+  submitDishEvent = new EventEmitter();
+
+  submitted = false;
+  dish = {name: '', description: ''};
+
+  constructor(public dishShareService: DishShareService) { }
+
+  ngOnInit() {
+    this.dish.name = this.platename;
+    this.dish.description = this.platedescription;
+  }
+
+  onSubmit(dishForm: NgForm): void {
+    this.dishShareService.createDish(dishForm.value.name, dishForm.value.description);
+    this.submitDishEvent.emit('dishSubmited');
+  }
+
+}
+
+
+
+

In this file there are definitions of multiple variables that will be used as input and output. Since the input variables are going to be used directly by html, only lowercase and underscore strategies can be used for them. On the onSubmit(dishForm: NgForm) a service is used to pass this variables to another component. Finally, as a last thing, the selector inside @Component has been removed since a tag that will be used dynamically was already defined in the last step.

+
+
+
+

Solving the error

+
+

In order to be able to use this Angular Element a Polyfills/Browser support related error needs to solved. This error can be solved in two ways:

+
+
+
+

== Changing the target

+
+

One solution is to change the target in tsconfig.json to es2015. This might not be doable for every application since maybe a specific target is required.

+
+
+
+

== Installing Polyfaces

+
+

Another solution is to use AutoPollyfill. In order to do so, the library is going to be installed with a packet manager:

+
+
+

Yarn

+
+
+
+
yarn add @webcomponents/webcomponentsjs
+
+
+
+

Npm

+
+
+
+
npm install @webcomponents/webcomponentsjs
+
+
+
+

After the packet manager has finished, inside the src folder a new file polyfills.ts is found. To solve the error, importing the corresponding adapter (custom-elements-es5-adapter.js) is necessary:

+
+
+
+
....
+/***************************************************************************************************
+ * APPLICATION IMPORTS
+ */
+
+import '@webcomponents/webcomponentsjs/custom-elements-es5-adapter.js';
+....
+
+
+
+

If you want to learn more about polyfills in angular you can do it here

+
+
+
+

Building the Angular Element

+
+

First, before building the Angular Element, every element inside that app component except the module need to be removed. After that, a bash script is created in the root folder,. This script will allow to put every necessary file into a JS.

+
+
+
+
ng build "projectName" --configuration production --output-hashing=none && cat dist/"projectName"/runtime.js dist/"projectName"/polyfills.js dist/"projectName"/scripts.js dist/"projectName"/main.js > ./dist/"projectName"/"nameWantedAngularElement".js
+
+
+
+

After executing the bash script, it will generate inside the path dist/"projectName" (or dist/apps/projectname in a Nx workspace) a JS file named "nameWantedAngularElement".js and a css file.

+
+
+
+ +
+

The library ngx-build-plus allows to add different options when building. In addition, it solves some errors that will occur when trying to use multiple angular elements in an application. In order to use it, yarn or npm can be used:

+
+
+

Yarn

+
+
+
+
yarn add ngx-build-plus
+
+
+
+

Npm

+
+
+
+
npm install ngx-build-plus
+
+
+
+

If you want to add it to a specific sub project in your projects folder, use the --project:

+
+
+
+
.... ngx-build-plus --project "project-name"
+
+
+
+

Using this library and the following command, an isolated Angular Element which won’t have conflict with others can be generated. This Angular Element will not have a polyfill so, the project where we use them will need to include a poliyfill with the Angular Element requirements.

+
+
+
+
ng build "projectName" --output-hashing none --single-bundle true --configuration production --bundle-styles false
+
+
+
+

This command will generate three things:

+
+
+
    +
  1. +

    The main JS bundle

    +
  2. +
  3. +

    The script JS

    +
  4. +
  5. +

    The css

    +
  6. +
+
+
+

These files will be used later instead of the single JS generated in the last step.

+
+
+
+

== == Extra parameters

+
+

Here are some extra useful parameters that ngx-build-plus provides:

+
+
+
    +
  • +

    --keep-polyfills: This parameter is going to allow us to keep the polyfills. This needs to be used with caution, avoiding using multiple different polyfills that could cause an error is necessary.

    +
  • +
  • +

    --extraWebpackConfig webpack.extra.js: This parameter allows us to create a JavaScript file inside our Angular Elements project with the name of different libraries. Using webpack these libraries will not be included in the Angular Element. This is useful to lower the size of our Angular Element by removing libraries shared. Example:

    +
  • +
+
+
+
+
const webpack = require('webpack');
+
+module.exports = {
+    "externals": {
+        "rxjs": "rxjs",
+        "@angular/core": "ng.core",
+        "@angular/common": "ng.common",
+        "@angular/common/http": "ng.common.http",
+        "@angular/platform-browser": "ng.platformBrowser",
+        "@angular/platform-browser-dynamic": "ng.platformBrowserDynamic",
+        "@angular/compiler": "ng.compiler",
+        "@angular/elements": "ng.elements",
+        "@angular/router": "ng.router",
+        "@angular/forms": "ng.forms"
+    }
+}
+
+
+
+
+

==

+
+
+
  If some libraries are excluded from the `Angular Element` you will need to add the bundled UMD files of those libraries manually.
+== ==
+
+
+
+
+

Using the Angular Element

+
+

The Angular Element that got generated in the last step can be used in almost every framework. In this case, the Angular Element is going to be used in html:

+
+
+
Listing 7. Sample index.html version without ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+        <script src="./devon4ngAngularElements.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+
Listing 8. Sample index.html version with ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+         <script src="./polyfills.js"> </script> <!-- Created using --keep-polyfills options -->
+        <script src="./scripts.js"> </script>
+         <script src="./main.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+

In this html, the css generated in the last step is going to be imported inside the <head> and then, the JavaScript element is going to be imported at the end of the body. After that is done, There is two uses of Angular Elements in the html, one directly with use of the @input() variables as parameters commented in the html:

+
+
+
+
....
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+....
+
+
+
+

and one dynamically inside the script:

+
+
+
+
....
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+....
+
+
+
+

This JavaScript is an example of how to create dynamically an Angular Element inserting attributed to fill our @Input() variables and listen to the @Output() that was defined earlier. This is done with:

+
+
+
+
                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+
+
+
+

This allows JavaScript to hook with the @Output() event emitter that was defined. When this event gets called, another component that was defined gets inserted dynamically.

+
+
+
+

Angular Element within another Angular project

+
+

In order to use an Angular Element within another Angular project the following steps need to be followed:

+
+
+
+

Copy bundled script and css to resources

+
+

First copy the generated .js and .css inside assets in the corresponding folder.

+
+
+
+

Add bundled script to angular.json

+
+

Inside angular.json both of the files that were copied in the last step are going to be included. This will be done both, in test and in build. Including it on the test, will allow to perform unitary tests.

+
+
+
+
{
+....
+  "architect": {
+    ....
+    "build": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+    ....
+    "test": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+  }
+}
+
+
+
+

By declaring the files in the angular.json angular will take care of including them in a proper way.

+
+
+
+

==

+
+
+
  If you are using Nx, the configuration file `angular.json` might be named as `workspace.json`, depending on how you had setup the workspace. The structure of the file remains similar though.
+== ==
+
+
+
+
+

Using Angular Element

+
+

There are two ways that Angular Element can be used:

+
+
+
+

== Create component dynamically

+
+

In order to add the component in a dynamic way, first adding a container is necessary:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+</div>
+....
+
+
+
+

With this container created, inside the app.component.ts a method is going to be created. This method is going to find the container, create the dynamic element and append it into the container.

+
+
+

app.component.ts

+
+
+
+
export class AppComponent implements OnInit {
+  ....
+  ngOnInit(): void {
+    this.createComponent();
+  }
+  ....
+  createComponent(): void {
+    const container = document.getElementById('container');
+    const component = document.createElement('dish-form');
+    container.appendChild(component);
+  }
+  ....
+
+
+
+
+

== Using it directly

+
+

In order to use it directly on the templates, in the app.module.ts the CUSTOM_ELEMENTS_SCHEMA needs to be added:

+
+
+
+
....
+import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
+....
+@NgModule({
+  ....
+  schemas: [ CUSTOM_ELEMENTS_SCHEMA ],
+
+
+
+

This is going to allow the use of the Angular Element in the templates directly:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+  <dish-form></dish-form>
+</div>
+
+
+
+

You can find a working example of Angular Elements in our devon4ts-samples repo by referring the samples named angular-elements and angular-elements-test.

+
+ +
+
+

Angular Lazy loading

+
+

When the development of an application starts, it just contains a small set of features so the app usually loads fast. However, as new features are added, the overall application size grows up and its loading speed decreases. It is in this context where Lazy loading finds its place. +Lazy loading is a design pattern that defers initialization of objects until it is needed, so, for example, users that just access to a website’s home page do not need to have other areas loaded. +Angular handles lazy loading through the routing module which redirects to requested pages. Those pages can be loaded at start or on demand.

+
+
+
+

An example with Angular

+
+

To explain how lazy loading is implemented using angular, a basic sample app is going to be developed. This app will consist in a window named "level 1" that contains two buttons that redirects to other windows in a "second level". It is a simple example, but useful to understand the relation between angular modules and lazy loading.

+
+
+
+Levels app structure +
+
Figure 9. Levels app structure.
+
+
+

This graphic shows that modules acts as gates to access components "inside" them.

+
+
+

Because the objective of this guide is related mainly with logic, the html structure and SCSS styles are less relevant, but the complete code can be found as a sample here.

+
+
+
+

Implementation

+
+

First write in a console ng new level-app --routing, to generate a new project called level-app including an app-routing.module.ts file (--routing flag). If you are using Nx, the command would be nx generate @nrwl/angular:app level-app --routing in your Nx workspace.

+
+
+

In the file app.component.html delete all the content except the router-outlet tag.

+
+
+
Listing 9. File app.component.html
+
+
<router-outlet></router-outlet>
+
+
+
+

The next steps consists on creating features modules.

+
+
+
    +
  • +

    run ng generate module first --routing to generate a module named first.

    +
  • +
  • +

    run ng generate module first/second-left --routing to generate a module named second-left under first.

    +
  • +
  • +

    run ng generate module first/second-right --routing to generate a module second-right under first.

    +
  • +
  • +

    run ng generate component first/first to generate a component named first inside the module first.

    +
  • +
  • +

    run ng generate component first/second-left/content to generate a component content inside the module second-left.

    +
  • +
  • +

    run ng generate component first/second-right/content to generate a component content inside the module second-right.

    +
  • +
+
+
+
+

==

+
+
+
  If you are using Nx, you have to specify the project name (level-app) along with the --project flag. For example, command for generating the first module will be `ng generate module first --project=level-app --routing`
+== ==
+
+
+
+

To move between components we have to configure the routes used:

+
+
+

In app-routing.module.ts add a path 'first' to FirstComponent and a redirection from '' to 'first'.

+
+
+
Listing 10. File app-routing.module.ts.
+
+
...
+import { FirstComponent } from './first/first/first.component';
+
+const routes: Routes = [
+  {
+    path: 'first',
+    component: FirstComponent
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

In app.module.ts import the module which includes FirstComponent.

+
+
+
Listing 11. File app.module.ts
+
+
....
+import { FirstModule } from './first/first.module';
+
+@NgModule({
+  ...
+  imports: [
+    ....
+    FirstModule
+  ],
+  ...
+})
+export class AppModule { }
+
+
+
+

In first-routing.module.ts add routes that direct to the content of SecondRightModule and SecondLeftModule. The content of both modules have the same name so, in order to avoid conflicts the name of the components are going to be changed using as ( original-name as new-name).

+
+
+
Listing 12. File first-routing.module.ts
+
+
...
+import { ContentComponent as ContentLeft} from './second-left/content/content.component';
+import { ContentComponent as ContentRight} from './second-right/content/content.component';
+import { FirstComponent } from './first/first.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'first/second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'first/second-right',
+    component: ContentRight
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class FirstRoutingModule { }
+
+
+
+

In first.module.ts import SecondLeftModule and SecondRightModule.

+
+
+
Listing 13. File first.module.ts
+
+
...
+import { SecondLeftModule } from './second-left/second-left.module';
+import { SecondRightModule } from './second-right/second-right.module';
+
+@NgModule({
+  ...
+  imports: [
+    ...
+    SecondLeftModule,
+    SecondRightModule,
+  ]
+})
+export class FirstModule { }
+
+
+
+

Using the current configuration, we have a project that loads all the modules in a eager way. Run ng serve (with --project=level-app in an Nx workspace) to see what happens.

+
+
+

First, during the compilation we can see that just a main file is built.

+
+
+
+Compile eager +
+
Figure 10. Compile eager.
+
+
+

If we go to http://localhost:4200/first and open developer options (F12 on Chrome), it is found that a document named "first" is loaded.

+
+
+
+First level eager +
+
Figure 11. First level eager.
+
+
+

If we click on [Go to right module] a second level module opens, but there is no 'second-right' document.

+
+
+
+Second level right eager +
+
Figure 12. Second level right eager.
+
+
+

But, typing the URL directly will load 'second-right' but no 'first', even if we click on [Go back]

+
+
+
+Second level right eager +
+
Figure 13. Second level right eager direct URL.
+
+
+

Modifying an angular application to load its modules lazily is easy, you have to change the routing configuration of the desired module (for example FirstModule).

+
+
+
Listing 14. File app-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: 'first',
+    loadChildren: () => import('./first/first.module').then(m => m.FirstModule),
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

Notice that instead of loading a component, you dynamically import it in a loadChildren attribute because modules acts as gates to access components "inside" them. Updating the app to load lazily has four consequences:

+
+
+
    +
  1. +

    No component attribute.

    +
  2. +
  3. +

    No import of FirstComponent.

    +
  4. +
  5. +

    FirstModule import has to be removed from the imports array at app.module.ts.

    +
  6. +
  7. +

    Change of context.

    +
  8. +
+
+
+

If we check first-routing.module.ts again, we can see that the path for ContentLeft and ContentRight is set to 'first/second-left' and 'first/second-right' respectively, so writing http://localhost:4200/first/second-left will redirect us to ContentLeft. However, after loading a module with loadChildren setting the path to 'second-left' and 'second-right' is enough because it acquires the context set by AppRoutingModule.

+
+
+
Listing 15. File first-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+

If we go to 'first' then FirstModule is situated in '/first' but also its children ContentLeft and ContentRight, so it is not necessary to write in their path 'first/second-left' and 'first/second-right', because that will situate the components on 'first/first/second-left' and 'first/first/second-right'.

+
+
+
+First level wrong path +
+
Figure 14. First level lazy wrong path.
+
+
+

When we compile an app with lazy loaded modules, files containing them will be generated

+
+
+
+First level lazy compilation +
+
Figure 15. First level lazy compilation.
+
+
+

And if we go to developer tools → network, we can find those modules loaded (if they are needed).

+
+
+
+First level lazy +
+
Figure 16. First level lazy.
+
+
+

To load the component ContentComponent of SecondLeftModule lazily, we have to load SecondLeftModule as a children of FirstModule:

+
+
+
    +
  • +

    Change component to loadChildren and reference SecondLeftModule.

    +
  • +
+
+
+
Listing 16. File first-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    loadChildren: () => import('./second-left/second-left.module').then(m => m.SecondLeftModule),
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+
    +
  • +

    Remove SecondLeftModule at first.component.ts

    +
  • +
  • +

    Route the components inside SecondLeftModule. Without this step nothing would be displayed.

    +
  • +
+
+
+
Listing 17. File second-left-routing.module.ts.
+
+
...
+import { ContentComponent } from './content/content.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: ContentComponent
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class SecondLeftRoutingModule { }
+
+
+
+
    +
  • +

    run ng serve to generate files containing the lazy modules.

    +
  • +
+
+
+
+Second level lazy +
+
Figure 17. Second level lazy loading compilation.
+
+
+

Clicking on [Go to left module] triggers the load of SecondLeftModule.

+
+
+
+Second level lazy network +
+
Figure 18. Second level lazy loading network.
+
+
+
+

Conclusion

+
+

Lazy loading is a pattern useful when new features are added, these features are usually identified as modules which can be loaded only if needed as shown in this document, reducing the time spent loading an application.

+
+ +
+
+

Angular Library

+
+

Angular CLI provides us with methods that allow the creation of a library. After that, using a packet manager (either npm or yarn) the library can be build and packed which will allow later to install/publish it.

+
+
+
+

Whats a library?

+
+

From wikipedia: a library is a collection of non-volatile resources used by computer programs, often for software development. These may include configuration data, documentation, help data, message templates, pre-written code and subroutines, classes, values or type specifications.

+
+
+
+

How to build a library

+
+

In this section, a library is going to be build step by step. Please note, we will be explaining the steps using both Angular CLI and Nx CLI. You are free to choose either one for your development.

+
+
+
+

1. Creating an empty application

+
+

First, using Angular CLI we are going to generate a empty application which will be later filled with the generated library. In order to do so, Angular CLI allows us to add to ng new "application-name" an option (--create-application). This option is going to tell Angular CLI not to create the initial app project. This is convenient since a library is going to be generated in later steps. Using this command ng new "application-name" --create-application=false an empty project with the name wanted is created.

+
+
+
+
ng new "application-name" --create-application=false
+
+
+
+

This step is much more easier and straight-forward when using Nx. Nx allows us to work in a monorepo workspace, where you can develop a project as an application, or a library, or a tool. You can follow this guide to get started with Nx. +The command for generating a library in Nx is nx generate @nrwl/angular:library library-name --publishable --importPath=library-name. This will create an empty angular application which we can modify and publish as a library.

+
+
+
+

2. Generating a library

+
+

After generating an empty application, a library is going to be generated. Inside the folder of the project, the Angular CLI command ng generate library "library-name" is going to generate the library as a project (projects/"library-name"). As an addition, the option --prefix="library-prefix-wanted" allows us to switch the default prefix that Angular generated with (lib). Using the option to change the prefix the command will look like this ng generate library "library-name" --prefix="library-prefix-wanted".

+
+
+
+
ng generate library "library-name" --prefix="library-prefix-wanted"
+
+
+
+

If you are using Nx, this step is not needed as it is already covered in step 1. In this case, the library project will be generated in the libs folder of a Nx workspace.

+
+
+
+

3. Modifying our library

+
+

In the last step we generated a library. This automatically generates a module,service and a component inside projects/"library-name" that we can modify adding new methods, components etc that we want to use in other projects. We can generate other elements, using the usual Angular CLI generate commands adding the option --project="library-name", this will allow to generate elements within our project . An example of this is: ng generate service "name" --project="library-name".

+
+
+
+
ng generate "element" "name" --project="library-name"
+
+
+
+

You can use the same command as above in a Nx workspace.

+
+
+
+

4. Exporting the generated things

+
+

Inside the library (projects/"library-name) there’s a public_api.ts which is the file that exports the elements inside the library. (The file is named as index.ts in an Nx workspace). In case we generated other things, this file needs to be modified adding the extra exports with the generated elements. In addition, changing the library version is possible in the file package.json.

+
+
+
+

5. Building our library

+
+

Once we added the necessary exports, in order to use the library in other applications, we need to build the library. The command ng build "library-name" is going to build the library, generating the necessary files in "project-name"/dist/"library-name".

+
+
+
+
ng build "library-name"
+
+
+
+

You can use the same command in Nx as well. Only the path for the generated files will be slightly different: "project-name"/dist/libs/"library-name"

+
+
+
+

6. Packing the library

+
+

In this step we are going to pack the build library. In order to do so, we need to go inside dist/"library-name" (or dist/libs/"library-name") and then run either npm pack or yarn pack to generate a "library-name-version.tgz" file.

+
+
+
Listing 18. Packing using npm
+
+
npm pack
+
+
+
+
Listing 19. Packing using yarn
+
+
yarn pack
+
+
+
+
+

7. Publishing to npm repository (optional)

+
+
    +
  • +

    Add a README.md and LICENSE file. The text inside README.md will be used in you npm package web page as documentation.

    +
  • +
  • +

    run npm adduser if you do not have a npm account to create it, otherwise run npm login and introduce your credentials.

    +
  • +
  • +

    run npm publish inside dist/"library-name" folder.

    +
  • +
  • +

    Check that the library is published: https://npmjs.com/package/library-name

    +
  • +
+
+
+
+

8. Installing our library in other projects

+
+

In this step we are going to install/add the library on other projects.

+
+
+
+

== npm

+
+

In order to add the library in other applications, there are two ways:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command npm install "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run npm install "library-name" to install it from npm repository.

    +
  • +
+
+
+
+

== yarn

+
+

To add the package using yarn:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command yarn add "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run yarn add "library-name" to install it from npm repository.

    +
  • +
+
+
+
+

9. Using the library

+
+

Finally, once the library was installed with either packet manager, you can start using the elements from inside like they would be used in a normal element inside the application. Example app.component.ts:

+
+
+
+
import { Component, OnInit } from '@angular/core';
+import { MyLibraryService } from 'my-library';
+
+@Component({
+  selector: 'app-root',
+  templateUrl: './app.component.html',
+  styleUrls: ['./app.component.scss']
+})
+export class AppComponent implements OnInit {
+
+  toUpper: string;
+
+  constructor(private myLibraryService: MyLibraryService) {}
+  title = 'devon4ng library test';
+  ngOnInit(): void {
+    this.toUpper = this.myLibraryService.firstLetterToUpper('test');
+  }
+}
+
+
+
+

Example app.component.html:

+
+
+
+
<!--The content below is only a placeholder and can be replaced.-->
+<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+  <img width="300" alt="Angular Logo" src="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNTAgMjUwIj4KICAgIDxwYXRoIGZpbGw9IiNERDAwMzEiIGQ9Ik0xMjUgMzBMMzEuOSA2My4ybDE0LjIgMTIzLjFMMTI1IDIzMGw3OC45LTQzLjcgMTQuMi0xMjMuMXoiIC8+CiAgICA8cGF0aCBmaWxsPSIjQzMwMDJGIiBkPSJNMTI1IDMwdjIyLjItLjFWMjMwbDc4LjktNDMuNyAxNC4yLTEyMy4xTDEyNSAzMHoiIC8+CiAgICA8cGF0aCAgZmlsbD0iI0ZGRkZGRiIgZD0iTTEyNSA1Mi4xTDY2LjggMTgyLjZoMjEuN2wxMS43LTI5LjJoNDkuNGwxMS43IDI5LjJIMTgzTDEyNSA1Mi4xem0xNyA4My4zaC0zNGwxNy00MC45IDE3IDQwLjl6IiAvPgogIDwvc3ZnPg== ">
+</div>
+<h2>Here is my library service being used: {{toUpper}}</h2>
+<lib-my-library></lib-my-library>
+
+
+
+

Example app.module.ts:

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+
+import { MyLibraryModule } from 'my-library';
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    AppRoutingModule,
+    MyLibraryModule
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+

The result from using the library:

+
+
+
+result +
+
+
+
+

devon4ng libraries

+
+

In devonfw/devon4ng-library you can find some useful libraries:

+
+
+
    +
  • +

    Authorization module: This devon4ng Angular module adds rights-based authorization to your Angular app.

    +
  • +
  • +

    Cache module: Use this devon4ng Angular module when you want to cache requests to server. You may configure it to store in cache only the requests you need and to set the duration you want.

    +
  • +
+
+ +
+
+

Angular Material Theming

+
+

Angular Material library offers UI components for developers, those components follows Google Material design baselines but characteristics like colors can be modified in order to adapt them to the needs of the client: corporative colors, corporative identity, dark themes, …​

+
+
+
+

Theming basics

+
+

In Angular Material, a theme is created mixing multiple colors. Colors and its light and dark variants conform a palette. In general, a theme consists of the following palettes:

+
+
+
    +
  • +

    primary: Most used across screens and components.

    +
  • +
  • +

    accent: Floating action button and interactive elements.

    +
  • +
  • +

    warn: Error state.

    +
  • +
  • +

    foreground: Text and icons.

    +
  • +
  • +

    background: Element backgrounds.

    +
  • +
+
+
+
+Theme palette +
+
Figure 19. Palettes and variants.
+
+
+

In angular material, a palette is represented as a SCSS map.

+
+
+
+SCSS map +
+
Figure 20. SCSS map and palettes.
+
+
+ + + + + +
+ + +Some components can be forced to use primary, accent or warn palettes using the attribute color, for example: <mat-toolbar color="primary">. +
+
+
+
+

Pre-built themes

+
+

Available pre-built themes:

+
+
+
    +
  • +

    deeppurple-amber.css

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 21. deeppurple-amber theme.
+
+
+
    +
  • +

    indigo-pink.css

    +
  • +
+
+
+
+indigo-pink theme +
+
Figure 22. indigo-pink theme.
+
+
+
    +
  • +

    pink-bluegrey.css

    +
  • +
+
+
+
+` pink-bluegrey theme` +
+
Figure 23. ink-bluegrey theme.
+
+
+
    +
  • +

    purple-green.css

    +
  • +
+
+
+
+purple-green theme +
+
Figure 24. purple-green theme.
+
+
+

The pre-built themes can be added using @import.

+
+
+
+
@import '@angular/material/prebuilt-themes/deeppurple-amber.css';
+
+
+
+
+

Custom themes

+
+

Sometimes pre-built themes do not meet the needs of a project, because color schemas are too specific or do not incorporate branding colors, in those situations custom themes can be built to offer a better solution to the client.

+
+
+

For this topic, we are going to use a basic layout project that can be found in devon4ts-samples repository.

+
+
+
+

Basics

+
+

Before starting writing custom themes, there are some necessary things that have to be mentioned:

+
+
+
    +
  • +

    Add a default theme: The project mentioned before has just one global SCSS style sheet styles.scss that includes indigo-pink.scss which will be the default theme.

    +
  • +
  • +

    Add @import '~@angular/material/theming'; at the beginning of the every style sheet to be able to use angular material pre-built color palettes and functions.

    +
  • +
  • +

    Add @include mat-core(); once per project, so if you are writing multiple themes in multiple files you could import those files from a 'central' one (for example styles.scss). This includes all common styles that are used by multiple components.

    +
  • +
+
+
+
+Theme files structure +
+
Figure 25. Theme files structure.
+
+
+
+

Basic custom theme

+
+

To create a new custom theme, the .scss file containing it has to have imported the angular _theming.scss file (angular/material/theming) file and mat-core included. _theming.scss includes multiple color palettes and some functions that we are going to see below. The file for this basic theme is going to be named styles-custom-dark.scss.

+
+
+

First, declare new variables for primary, accent and warn palettes. Those variables are going to store the result of the function mat-palette.

+
+
+

mat-palette accepts four arguments: base color palette, main, lighter and darker variants (See Palettes and variants.) and returns a new palette including some additional map values: default, lighter and darker ([id_scss_map]). Only the first argument is mandatory.

+
+
+
Listing 20. File styles-custom-dark.scss.
+
+
$custom-dark-theme-primary: mat-palette($mat-pink);
+$custom-dark-theme-accent: mat-palette($mat-blue);
+$custom-dark-theme-warn: mat-palette($mat-red);
+);
+
+
+
+

In this example we are using colors available in _theming.scss: mat-pink, mat-blue, mat-red. If you want to use a custom color you need to define a new map, for instance:

+
+
+
Listing 21. File styles-custom-dark.scss custom pink.
+
+
$my-pink: (
+    50 : #fcf3f3,
+    100 : #f9e0e0,
+    200 : #f5cccc,
+    300 : #f0b8b8,
+    500 : #ea9999,
+    900 : #db6b6b,
+    A100 : #ffffff,
+    A200 : #ffffff,
+    A400 : #ffeaea,
+    A700 : #ffd0d0,
+    contrast: (
+        50 : #000000,
+        100 : #000000,
+        200 : #000000,
+        300 : #000000,
+        900 : #000000,
+        A100 : #000000,
+        A200 : #000000,
+        A400 : #000000,
+        A700 : #000000,
+    )
+);
+
+$custom-dark-theme-primary: mat-palette($my-pink);
+...
+
+
+
+ + + + + +
+ + +Some pages allows to create these palettes easily, for instance: http://mcg.mbitson.com +
+
+
+

Until now, we just have defined primary, accent and warn palettes but what about foreground and background? Angular material has two functions to change both:

+
+
+
    +
  • +

    mat-light-theme: Receives as arguments primary, accent and warn palettes and return a theme whose foreground is basically black (texts, icons, …​), the background is white and the other palettes are the received ones.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 26. Custom light theme.
+
+
+
    +
  • +

    mat-dark-theme: Similar to mat-light-theme but returns a theme whose foreground is basically white and background black.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 27. Custom dark theme.
+
+
+

For this example we are going to use mat-dark-theme and save its result in $custom-dark-theme.

+
+
+
Listing 22. File styles-custom-dark.scss updated with mat-dark-theme.
+
+
...
+
+$custom-dark-theme: mat-dark-theme(
+  $custom-dark-theme-primary,
+  $custom-dark-theme-accent,
+  $custom-dark-theme-warn
+);
+
+
+
+

To apply the saved theme, we have to go to styles.scss and import our styles-custom-dark.scss and include a function called angular-material-theme using the theme variable as argument.

+
+
+
Listing 23. File styles.scss.
+
+
...
+@import 'styles-custom-dark.scss';
+@include angular-material-theme($custom-dark-theme);
+
+
+
+

If we have multiple themes it is necessary to add the include statement inside a css class and use it in src/index.html → app-root component.

+
+
+
Listing 24. File styles.scss updated with custom-dark-theme class.
+
+
...
+@import 'styles-custom-dark.scss';
+
+.custom-dark-theme {
+  @include angular-material-theme($custom-dark-theme);
+}
+
+
+
+
Listing 25. File src/index.html.
+
+
...
+<app-root class="custom-dark-theme"></app-root>
+...
+
+
+
+

This will apply $custom-dark-theme theme for the entire application.

+
+
+
+

Full custom theme

+
+

Sometimes it is needed to custom different elements from background and foreground, in those situations we have to create a new function similar to mat-light-theme and mat-dark-theme. Let’s focus con mat-light-theme:

+
+
+
Listing 26. Source code of mat-light-theme
+
+
@function mat-light-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $mat-light-theme-foreground,
+    background: $mat-light-theme-background,
+  );
+}
+
+
+
+

As we can see, mat-light-theme takes three arguments and returns a map including them as primary, accent and warn color; but there are three more keys in that map: is-dark, foreground and background.

+
+
+
    +
  • +

    is-dark: Boolean true if it is a dark theme, false otherwise.

    +
  • +
  • +

    background: Map that stores the color for multiple background elements.

    +
  • +
  • +

    foreground: Map that stores the color for multiple foreground elements.

    +
  • +
+
+
+

To show which elements can be colored lets create a new theme in a file styles-custom-cap.scss:

+
+
+
Listing 27. File styles-custom-cap.scss: Background and foreground variables.
+
+
@import '~@angular/material/theming';
+
+// custom background and foreground palettes
+$my-cap-theme-background: (
+  status-bar: #0070ad,
+  app-bar: map_get($mat-blue, 900),
+  background: #12abdb,
+  hover: rgba(white, 0.04),
+  card: map_get($mat-red, 800),
+  dialog: map_get($mat-grey, 800),
+  disabled-button: $white-12-opacity,
+  raised-button: map-get($mat-grey, 800),
+  focused-button: $white-6-opacity,
+  selected-button: map_get($mat-grey, 900),
+  selected-disabled-button: map_get($mat-grey, 800),
+  disabled-button-toggle: black,
+  unselected-chip: map_get($mat-grey, 700),
+  disabled-list-option: black,
+);
+
+$my-cap-theme-foreground: (
+  base: yellow,
+  divider: $white-12-opacity,
+  dividers: $white-12-opacity,
+  disabled: rgba(white, 0.3),
+  disabled-button: rgba(white, 0.3),
+  disabled-text: rgba(white, 0.3),
+  hint-text: rgba(white, 0.3),
+  secondary-text: rgba(white, 0.7),
+  icon: white,
+  icons: white,
+  text: white,
+  slider-min: white,
+  slider-off: rgba(white, 0.3),
+  slider-off-active: rgba(white, 0.3),
+);
+
+
+
+

Function which uses the variables defined before to create a new theme:

+
+
+
Listing 28. File styles-custom-cap.scss: Creating a new theme function.
+
+
// instead of creating a theme with mat-light-theme or mat-dark-theme,
+// we will create our own theme-creating function that lets us apply our own foreground and background palettes.
+@function create-my-cap-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $my-cap-theme-foreground,
+    background: $my-cap-theme-background
+  );
+}
+
+
+
+

Calling the new function and storing its value in $custom-cap-theme.

+
+
+
Listing 29. File styles-custom-cap.scss: Storing the new theme.
+
+
// We use create-my-cap-theme instead of mat-light-theme or mat-dark-theme
+$custom-cap-theme-primary: mat-palette($mat-green);
+$custom-cap-theme-accent: mat-palette($mat-blue);
+$custom-cap-theme-warn: mat-palette($mat-red);
+
+$custom-cap-theme: create-my-cap-theme(
+  $custom-cap-theme-primary,
+  $custom-cap-theme-accent,
+  $custom-cap-theme-warn
+);
+
+
+
+

After defining our new theme, we can import it from styles.scss.

+
+
+
Listing 30. File styles.scss updated with custom-cap-theme class.
+
+
...
+@import 'styles-custom-cap.scss';
+.custom-cap-theme {
+  @include angular-material-theme($custom-cap-theme);
+}
+
+
+
+
+

Multiple themes and overlay-based components

+
+

Certain components (e.g. menu, select, dialog, etc.) that are inside of a global overlay container,require an additional step to be affected by the theme’s css class selector.

+
+
+
Listing 31. File app.module.ts
+
+
import {OverlayContainer} from '@angular/cdk/overlay';
+
+@NgModule({
+  // ...
+})
+export class AppModule {
+  constructor(overlayContainer: OverlayContainer) {
+    overlayContainer.getContainerElement().classList.add('custom-cap-theme');
+  }
+}
+
+
+
+ +
+

Angular Progressive Web App

+
+

Progressive web applications (PWA) are web application that offer better user experience than the traditional ones. In general, they solve problems related with reliability and speed:

+
+
+
    +
  • +

    Reliability: PWA are stable. In this context stability means than even with slow connections or even with no network at all, the application still works. To achieve this, some basic resources like styles, fonts, requests, …​ are stored; due to this caching, it is not possible to assure that the content is always up-to-date.

    +
  • +
  • +

    Speed: When an users opens an application, he or she will expect it to load almost immediately (almost 53% of users abandon sites that take longer that 3 seconds, source: https://developers.google.com/web/progressive-web-apps/#fast).

    +
  • +
+
+
+

PWA uses a script called service worker, which runs in background and essentially act as proxy between web app and network, intercepting requests and acting depending on the network conditions.

+
+
+
+

Assumptions

+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
+
+
+
+

Sample Application

+
+
+My Thai Star recommendation +
+
Figure 28. Basic angular PWA.
+
+
+

To explain how to build PWA using angular, a basic application is going to be built. This app will be able to ask for resources and save in the cache in order to work even offline.

+
+
+
+

Step 1: Create a new project

+
+

This step can be completed with one simple command using the Angular CLI: ng new <name>, where <name> is the name for the app. In this case, the app is going to be named basic-ng-pwa. If you are using Nx CLI, you can use the command nx generate @nrwl/angular:app <name> in your Nx workspace. You can follow this guide if you want to get started with Nx workspace.

+
+
+
+

Step 2: Create a service

+
+

Web applications usually uses external resources, making necessary the addition of services which can get those resources. This application gets a dish from My Thai Star’s back-end and shows it. To do so, a new service is going to be created.

+
+
+
    +
  • +

    go to project folder: cd basic-ng-pwa. If using Nx, go to the root folder of the workspace.

    +
  • +
  • +

    run ng generate service data. For Nx CLI, specify the project name with --project flag. So the command becomes ng generate service data --project=basic-ng-pwa

    +
  • +
  • +

    Modify data.service.ts, environment.ts, environment.prod.ts

    +
  • +
+
+
+

To retrieve data with this service, you have to import the module HttpClient and add it to the service’s constructor. Once added, use it to create a function getDishes() that sends HTTP request to My Thai Start’s back-end. The URL of the back-end can be stored as an environment variable MY_THAI_STAR_DISH.

+
+
+

data.service.ts

+
+
+
+
  ...
+  import { HttpClient } from '@angular/common/http';
+  import { MY_THAI_STAR_DISH } from '../environments/environment';
+  ...
+
+  export class DataService {
+    constructor(private http: HttpClient) {}
+
+    /* Get data from Back-end */
+    getDishes() {
+      return this.http.get(MY_THAI_STAR_DISH);
+    }
+    ...
+  }
+
+
+
+

environments.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+

environments.prod.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+
+

Step 3: Use the service

+
+

The component AppComponent implements the interface OnInit and inside its method ngOnInit() the subscription to the services is done. When a dish arrives, it is saved and shown (app.component.html).

+
+
+
+
  ...
+  import { DataService } from './data.service';
+  export class AppComponent implements OnInit {
+  dish: { name: string; description: string } = { name: '', description: ''};
+
+  ...
+  ngOnInit() {
+    this.data
+      .getDishes()
+      .subscribe(
+        (dishToday: { dish: { name: string; description: string } }) => {
+          this.dish = {
+            name: dishToday.dish.name,
+            description: dishToday.dish.description,
+          };
+        },
+      );
+  }
+}
+
+
+
+
+

Step 4: Structures, styles and updates

+
+

This step shows code interesting inside the sample app. The complete content can be found in devon4ts-samples.

+
+
+

index.html

+
+
+

To use the Montserrat font add the following link inside the head tag of the app’s index.html file.

+
+
+
+
  <link href="https://fonts.googleapis.com/css?family=Montserrat" rel="stylesheet">
+
+
+
+

styles.scss

+
+
+
+
  body {
+    ...
+    font-family: 'Montserrat', sans-serif;
+  }
+
+
+
+

app.component.ts

+
+
+

This file is also used to reload the app if there are any changes.

+
+
+
    +
  • +

    SwUpdate: This object comes inside the @angular/pwa package and it is used to detect changes and reload the page if needed.

    +
  • +
+
+
+
+
  ...
+  import { SwUpdate } from '@angular/service-worker';
+
+  export class AppComponent implements OnInit {
+
+  ...
+    constructor(updates: SwUpdate, private data: DataService) {
+      updates.available.subscribe((event) => {
+        updates.activateUpdate().then(() => document.location.reload());
+      });
+    }
+    ...
+  }
+
+
+
+
+

Step 5: Make it Progressive.

+
+

Install Angular PWA package with ng add @angular/pwa --project=<name>. As before substitute name with basic-ng-pwa.

+
+
+

The above command completes the following actions:

+
+
+
    +
  1. +

    Adds the @angular/service-worker package to your project.

    +
  2. +
  3. +

    Enables service worker build support in the CLI.

    +
  4. +
  5. +

    Imports and registers the service worker in the app module.

    +
  6. +
  7. +

    Updates the index.html file:

    +
    +
      +
    • +

      Includes a link to add the manifest.json file.

      +
    • +
    • +

      Adds meta tags for theme-color.

      +
    • +
    • +

      Installs icon files to support the installed Progressive Web App (PWA).

      +
    • +
    • +

      Creates the service worker configuration file called ngsw-config.json, which specifies the caching behaviors and other settings.

      +
    • +
    +
    +
  8. +
+
+
+
+

== manifest.json

+
+

manifest.json is a file that allows to control how the app is displayed in places where native apps are displayed.

+
+
+

Fields

+
+
+

name: Name of the web application.

+
+
+

short_name: Short version of name.

+
+
+

theme_color: Default theme color for an application context.

+
+
+

background_color: Expected background color of the web application.

+
+
+

display: Preferred display mode.

+
+
+

scope: Navigation scope of this web application’s application context.

+
+
+

start_url: URL loaded when the user launches the web application.

+
+
+

icons: Array of icons that serve as representations of the web app.

+
+
+

Additional information can be found here.

+
+
+
+

== ngsw-config.json

+
+

ngsw-config.json specifies which files and data URLs have to be cached and updated by the Angular service worker.

+
+
+

Fields

+
+
+
    +
  • +

    index: File that serves as index page to satisfy navigation requests.

    +
  • +
  • +

    assetGroups: Resources that are part of the app version that update along with the app.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      installMode: How the resources are cached (pre-fetch or lazy).

      +
    • +
    • +

      updateMode: Caching behavior when a new version of the app is found (pre-fetch or lazy).

      +
    • +
    • +

      resources: Resources to cache. There are three groups.

      +
      +
        +
      • +

        files: Lists patterns that match files in the distribution directory.

        +
      • +
      • +

        urls: URL patterns matched at runtime.

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    dataGroups: UsefulIdentifies the group. for API requests.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      urls: URL patterns matched at runtime.

      +
    • +
    • +

      version: Indicates that the resources being cached have been updated in a backwards-incompatible way.

      +
    • +
    • +

      cacheConfig: Policy by which matching requests will be cached

      +
      +
        +
      • +

        maxSize: The maximum number of entries, or responses, in the cache.

        +
      • +
      • +

        maxAge: How long responses are allowed to remain in the cache.

        +
        +
          +
        • +

          d: days. (5d = 5 days).

          +
        • +
        • +

          h: hours

          +
        • +
        • +

          m: minutes

          +
        • +
        • +

          s: seconds. (5m20s = 5 minutes and 20 seconds).

          +
        • +
        • +

          u: milliseconds

          +
        • +
        +
        +
      • +
      • +

        timeout: How long the Angular service worker will wait for the network to respond before using a cached response. Same dataformat as maxAge.

        +
      • +
      • +

        strategy: Caching strategies (performance or freshness).

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    navigationUrls: List of URLs that will be redirected to the index file.

    +
  • +
+
+
+

Additional information can be found here.

+
+
+
+

Step 6: Configure the app

+
+

manifest.json

+
+
+

Default configuration.

+
+
+

 

+
+
+

ngsw-config.json

+
+
+

At assetGroups → resources → urls: In this field the google fonts API is added in order to use Montserrat font even without network.

+
+
+
+
  "urls": [
+          "https://fonts.googleapis.com/**"
+        ]
+
+
+
+

At the root of the json: A data group to cache API calls.

+
+
+
+
  {
+    ...
+    "dataGroups": [{
+      "name": "mythaistar-dishes",
+      "urls": [
+        "https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1"
+      ],
+      "cacheConfig": {
+        "maxSize": 100,
+        "maxAge": "1h",
+        "timeout": "10s",
+        "strategy": "freshness"
+      }
+    }]
+  }
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ng build --prod to build the app using production settings.(nx build <name> --prod in Nx CLI)

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here.

+
+
+

Go to the dist/basic-ng-pwa/ folder running cd dist/basic-ng-pwa. In an Nx workspace, the path will be dist/apps/basic-ng-pwa

+
+
+

http-server -o to serve your built app.

+
+
+
+Http server running +
+
Figure 29. Http server running on localhost:8081.
+
+
+

 

+
+
+

In another console instance run ng serve (or nx serve basic-ng-pwa for Nx) to open the common app (not built).

+
+
+
+.Angular server running +
+
Figure 30. Angular server running on localhost:4200.
+
+
+

 

+
+
+

The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common (right) one does not.

+
+
+
+Application comparison +
+
Figure 31. Application service worker comparison.
+
+
+

 

+
+
+

If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 32. Offline application.
+
+
+

 

+
+
+

Finally, browser extensions like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 33. Lighthouse report.
+
+ +
+
+

APP_INITIALIZER

+ +
+
+

What is the APP_INITIALIZER pattern

+
+

The APP_INITIALIZER pattern allows an application to choose which configuration is going to be used in the start of the application, this is useful because it allows to setup different configurations, for example, for docker or a remote configuration. This provides benefits since this is done on runtime, so there’s no need to recompile the whole application to switch from configuration.

+
+
+
+

What is APP_INITIALIZER

+
+

APP_INITIALIZER allows to provide a service in the initialization of the application in a @NgModule. It also allows to use a factory, allowing to create a singleton in the same service. An example can be found in MyThaiStar /core/config/config.module.ts:

+
+
+
+

==

+
+

The provider expects the return of a Promise, if it is using Observables, a change with the method toPromise() will allow a switch from Observable to Promise +== ==

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

This is going to allow the creation of a ConfigService where, using a singleton, the service is going to load an external config depending on a route. This dependence with a route, allows to setup different configuration for docker etc. This is seen in the ConfigService of MyThaiStar:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  //and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it is mentioned earlier, you can see the use of a factory to create a singleton at the start. After that, loadExternalConfig is going to look for a Boolean inside the corresponding environment file inside the path src/environments/, this Boolean loadExternalConfig is going to easily allow to switch to a external config. If it is true, it generates a promise that overwrites the parameters of the local config, allowing to load the external config. Finally, the last method getValues() is going to allow to return the file config with the values (overwritten or not). The local config file from MyThaiStar can be seen here:

+
+
+
+
export enum BackendType {
+  IN_MEMORY,
+  REST,
+  GRAPHQL,
+}
+
+interface Role {
+  name: string;
+  permission: number;
+}
+
+interface Lang {
+  label: string;
+  value: string;
+}
+
+export interface Config {
+  version: string;
+  backendType: BackendType;
+  restPathRoot: string;
+  restServiceRoot: string;
+  pageSizes: number[];
+  pageSizesDialog: number[];
+  roles: Role[];
+  langs: Lang[];
+}
+
+export const config: Config = {
+  version: 'dev',
+  backendType: BackendType.REST,
+  restPathRoot: 'http://localhost:8081/mythaistar/',
+  restServiceRoot: 'http://localhost:8081/mythaistar/services/rest/',
+  pageSizes: [8, 16, 24],
+  pageSizesDialog: [4, 8, 12],
+  roles: [
+    { name: 'CUSTOMER', permission: 0 },
+    { name: 'WAITER', permission: 1 },
+  ],
+  langs: [
+    { label: 'English', value: 'en' },
+    { label: 'Deutsch', value: 'de' },
+    { label: 'Español', value: 'es' },
+    { label: 'Català', value: 'ca' },
+    { label: 'Français', value: 'fr' },
+    { label: 'Nederlands', value: 'nl' },
+    { label: 'हिन्दी', value: 'hi' },
+    { label: 'Polski', value: 'pl' },
+    { label: 'Русский', value: 'ru' },
+    { label: 'български', value: 'bg' },
+  ],
+};
+
+
+
+

Finally, inside a environment file src/environments/environment.ts the use of the Boolean loadExternalConfig is seen:

+
+
+
+
// The file contents for the current environment will overwrite these during build.
+// The build system defaults to the dev environment which uses `environment.ts`, but if you do
+// `ng build --env=prod` then `environment.prod.ts` will be used instead.
+// The list of which env maps to which file can be found in `.angular-cli.json`.
+
+export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+
+

Creating a APP_INITIALIZER configuration

+
+

This section is going to be used to create a new APP_INITIALIZER basic example. For this, a basic app with angular is going to be generated using ng new "appname" substituting appname for the name of the app opted. +If you are using Nx, the command would be nx generate @nrwl/angular:app "appname" in your Nx workspace. Click here to get started with using Nx.

+
+
+
+

Setting up the config files

+ +
+
+

Docker external configuration (Optional)

+
+

This section is only done if there is a docker configuration in the app you are setting up this type of configuration.

+
+
+

1.- Create in the root folder /docker-external-config.json. This external config is going to be used when the application is loaded with docker (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load with docker:

+
+
+
+
{
+    "version": "docker-version"
+}
+
+
+
+

2.- In the root, in the file /Dockerfile angular is going to copy the docker-external-config.json that was created before into the Nginx html route:

+
+
+
+
....
+COPY docker-external-config.json /usr/share/nginx/html/docker-external-config.json
+....
+
+
+
+
+

External json configuration

+
+

1.- Create a json file in the route /src/external-config.json. This external config is going to be used when the application is loaded with the start script (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load:

+
+
+
+
{
+    "version": "external-config"
+}
+
+
+
+

2.- The file named /angular.json (/workspace.json if using Nx) located at the root is going to be modified to add the file external-config.json that was just created to both "assets" inside Build and Test:

+
+
+
+
	....
+	"build": {
+          ....
+            "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	        ....
+        "test": {
+	  ....
+	   "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	  ....
+
+
+
+
+

Setting up the proxies

+
+

This step is going to setup two proxies. This is going to allow to load the config desired by the context, in case that it is using docker to load the app or in case it loads the app with angular. Loading different files is made possible by the fact that the ConfigService method loadExternalConfig() looks for the path /config.

+
+
+
+

Docker (Optional)

+
+

1.- This step is going to be for docker. Add docker-external-config.json to Nginx configuration (/nginx.conf) that is in the root of the application:

+
+
+
+
....
+  location  ~ ^/config {
+        alias /usr/share/nginx/html/docker-external-config.json;
+  }
+....
+
+
+
+
+

External Configuration

+
+

1.- Now the file /proxy.conf.json, needs to be created/modified this file can be found in the root of the application. In this file you can add the route of the external configuration in target and the name of the file in ^/config::

+
+
+
+
....
+  "/config": {
+    "target": "http://localhost:4200",
+    "secure": false,
+    "pathRewrite": {
+      "^/config": "/external-config.json"
+    }
+  }
+....
+
+
+
+

2.- The file package.json found in the root of the application is gonna use the start script to load the proxy config that was just created :

+
+
+
+
  "scripts": {
+....
+    "start": "ng serve --proxy-config proxy.conf.json -o",
+....
+
+
+
+

If using Nx, you need to run the command manually:

+
+
+

nx run angular-app-initializer:serve:development --proxyConfig=proxy.conf.json --o

+
+
+
+

Adding the loadExternalConfig Boolean to the environments

+
+

In order to load an external config we need to add the loadExternalConfig Boolean to the environments. To do so, inside the folder environments/ the files are going to get modified adding this Boolean to each environment that is going to be used. In this case, only two environments are going to be modified (environment.ts and environment.prod.ts). Down below there is an example of the modification being done in the environment.prod.ts:

+
+
+
+
export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+

In the file in first instance there is the declaration of the types of the variables. After that, there is the definition of those variables. This variable loadExternalConfig is going to be used by the service, allowing to setup a external config just by switching the loadExternalConfig to true.

+
+
+
+

Creating core configuration service

+
+

In order to create the whole configuration module three are going to be created:

+
+
+

1.- Create in the core app/core/config/ a config.ts

+
+
+
+
  export interface Config {
+    version: string;
+  }
+
+  export const config: Config = {
+    version: 'dev'
+  };
+
+
+
+

Taking a look to this file, it creates a interface (Config) that is going to be used by the variable that exports (export const config: Config). This variable config is going to be used by the service that is going to be created.

+
+
+

2.- Create in the core app/core/config/ a config.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  // and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it was explained in previous steps, at first, there is a factory that uses the method loadExternalConfig(), this factory is going to be used in later steps in the module. After that, the loadExternalConfig() method checks if the Boolean in the environment is false. If it is false it will return the promise resolved with the normal config. Else, it is going to load the external config in the path (/config), and overwrite the values from the external config to the config that’s going to be used by the app, this is all returned in a promise.

+
+
+

3.- Create in the core a module for the config app/core/config/ a config.module.ts:

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

As seen earlier, the ConfigService is added to the module. In this addition, the app is initialized(provide) and it uses the factory that was created in the ConfigService loading the config with or without the external values depending on the Boolean in the config.

+
+
+
+

Using the Config Service

+
+

As a first step, in the file /app/app.module.ts the ConfigModule created earlier in the other step is going to be imported:

+
+
+
+
  imports: [
+    ....
+    ConfigModule,
+    ....
+  ]
+
+
+
+

After that, the ConfigService is going to be injected into the app.component.ts

+
+
+
+
....
+import { ConfigService } from './core/config/config.service';
+....
+export class AppComponent {
+....
+  constructor(public configService: ConfigService) { }
+....
+
+
+
+

Finally, for this demonstration app, the component app/app.component.html is going to show the version of the config it is using at that moment.

+
+
+
+
<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+</div>
+<h2>Here is the configuration version that is using angular right now: {{configService.getValues().version}}</h2>
+
+
+
+
+

Final steps

+
+

The script start that was created earlier in the package.json (npm start) is going to be used to start the application. After that, modifying the Boolean loadExternalConfig inside the corresponding environment file inside /app/environments/ should show the different config versions.

+
+
+
+loadExternalConfigFalse +
+
+
+
+loadExternalConfigTrue +
+
+ +
+
+

Component Decomposition

+
+

When implementing a new requirement there are a few design decisions, which need to be considered. +A decomposition in Smart and Dumb Components should be done first. +This includes the definition of state and responsibilities. +Implementing a new dialog will most likely be done by defining a new Smart Component with multiple Dumb Component children.

+
+
+

In the component tree this would translate to the definition of a new sub-tree.

+
+
+
+Component Tree With Highlighted Sub Tree +
+
Figure 34. Component Tree with highlighted sub-tree
+
+
+
+

Defining Components

+
+

The following gives an example for component decomposition. +Shown is a screenshot from a style guide to be implemented. +It is a widget called Listpicker.

+
+
+

The basic function is an input field accepting direct input. +So typing otto puts otto inside the FormControl. +With arrow down key or by clicking the icon displayed in the inputs right edge a dropdown is opened. +Inside possible values can be selected and filtered beforehand. +After pressing arrow down key the focus should move into the filter input field. +Up and down arrow keys can be used to select an element from the list. +Typing into the filter input field filters the list from which the elements can be selected. +The current selected element is highlighted with green background color.

+
+
+
+Component Decomposition Example 1v2 +
+
Figure 35. Component decomposition example before
+
+
+

What should be done, is to define small reusable Dumb Components. +This way the complexity becomes manageable. +In the example every colored box describes a component with the purple box being a Smart Component.

+
+
+
+Component Decomposition Example 2v2 +
+
Figure 36. Component decomposition example after
+
+
+

This leads to the following component tree.

+
+
+
+Component Decomposition Example component tree +
+
Figure 37. Component decomposition example component tree
+
+
+

Note the uppermost component is a Dumb Component. +It is a wrapper for the label and the component to be displayed inside a form. +The Smart Component is Listpicker. +This way the widget can be reused without a form needed.

+
+
+

A widgets is a typical Smart Component to be shared across feature modules. +So the SharedModule is the place for it to be defined.

+
+
+
+

Defining state

+
+

Every UI has state. +There are different kinds of state, for example

+
+
+
    +
  • +

    View State: e.g. is a panel open, a css transition pending, etc.

    +
  • +
  • +

    Application State: e.g. is a payment pending, current URL, user info, etc.

    +
  • +
  • +

    Business Data: e.g. products loaded from back-end

    +
  • +
+
+
+

It is good practice to base the component decomposition on the state handled by a component and to define a simplified state model beforehand. +Starting with the parent - the Smart Component:

+
+
+
    +
  • +

    What overall state does the dialog have: e.g. loading, error, valid data loaded, valid input, invalid input, etc. +Every defined value should correspond to an overall appearance of the whole dialog.

    +
  • +
  • +

    What events can occur to the dialog: e.g. submitting a form, changing a filter, pressing buttons, pressing keys, etc.

    +
  • +
+
+
+

For every Dumb Component:

+
+
+
    +
  • +

    What data does a component display: e.g. a header text, user information to be displayed, a loading flag, etc.
    +This will be a slice of the overall state of the parent Smart Component. +In general a Dumb Component presents a slice of its parent Smart Components state to the user.

    +
  • +
  • +

    What events can occur: keyboard events, mouse events, etc.
    +These events are all handled by its parent Smart Component - every event is passed up the tree to be handled by a Smart Component.

    +
  • +
+
+
+

These information should be reflected inside the modeled state. +The implementation is a TypeScript type - an interface or a class describing the model.

+
+
+

So there should be a type describing all state relevant for a Smart Component. +An instance of that type is send down the component tree at runtime. +Not every Dumb Component will need the whole state. +For instance a single Dumb Component could only need a single string.

+
+
+

The state model for the previous Listpicker example is shown in the following listing.

+
+
+
Listing 32. Listpicker state model
+
+
export class ListpickerState {
+
+  items: {}[]|undefined;
+  columns = ['key', 'value'];
+  keyColumn = 'key';
+  displayValueColumn = 'value';
+  filteredItems: {}[]|undefined;
+  filter = '';
+  placeholder = '';
+  caseSensitive = true;
+  isDisabled = false;
+  isDropdownOpen = false;
+  selectedItem: {}|undefined;
+  displayValue = '';
+
+}
+
+
+
+

Listpicker holds an instance of ListpickerState which is passed down the component tree via @Input() bindings in the Dumb Components. +Events emitted by children - Dumb Components - create a new instance of ListpickerState based on the current instance and the event and its data. +So a state transition is just setting a new instance of ListpickerState. +Angular Bindings propagate the value down the tree after exchanging the state.

+
+
+
Listing 33. Listpicker State transition
+
+
export class ListpickerComponent {
+
+  // initial default values are set
+  state = new ListpickerState();
+
+  /** User changes filter */
+  onFilterChange(filter: string): void {
+    // apply filter ...
+    const filteredList = this.filterService.filter(...);
+
+    // important: A new instance is created, instead of altering the existing one.
+    //            This makes change detection easier and prevents hard to find bugs.
+    this.state = Object.assing({}, this.state, {
+      filteredItems: filteredList,
+      filter: filter
+    });
+  }
+
+}
+
+
+
+
Note:
+

It is not always necessary to define the model as independent type. +So there would be no state property and just properties for every state defined directly in the component class. +When complexity grows and state becomes larger this is usually a good idea. +If the state should be shared between Smart Components a store is to be used.

+
+
+
+

When are Dumb Components needed

+
+

Sometimes it is not necessary to perform a full decomposition. The architecture does not enforce it generally. What you should keep in mind is, that there is always a point when it becomes recommendable.

+
+
+

For example a template with 800 line of code is:

+
+
+
    +
  • +

    not understandable

    +
  • +
  • +

    not maintainable

    +
  • +
  • +

    not testable

    +
  • +
  • +

    not reusable

    +
  • +
+
+
+

So when implementing a template with more than 50 line of code you should think about decomposition.

+
+ +
+
+

Consuming REST services

+
+

A good introduction to working with Angular HttpClient can be found in Angular Docs

+
+
+

This guide will cover, how to embed Angular HttpClient in the application architecture. +For back-end request a special service with the suffix Adapter needs to be defined.

+
+
+
+

Defining Adapters

+
+

It is a good practice to have a Angular service whose single responsibility is to call the back-end and parse the received value to a transfer data model (e.g. Swagger generated TOs). +Those services need to have the suffix Adapter to make them easy to recognize.

+
+
+
+Adapters handle back-end communication +
+
Figure 38. Adapters handle back-end communication
+
+
+

As illustrated in the figure a Use Case service does not use Angular HttpClient directly but uses an adapter. +A basic adapter could look like this:

+
+
+
Listing 34. Example adapter
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+
+import { FlightTo } from './flight-to';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  getFlights(): Observable<FlightTo> {
+    return this.httpClient.get<FlightTo>('/relative/url/to/flights');
+  }
+
+}
+
+
+
+

The adapters should use a well-defined transfer data model. +This could be generated from server endpoints with CobiGen, Swagger, typescript-maven-plugin, etc. +If inside the application there is a business model defined, the adapter has to parse to the transfer model. +This is illustrated in the following listing.

+
+
+
Listing 35. Example adapter mapping from business model to transfer model
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+import { map } from 'rxjs/operators';
+
+import { FlightTo } from './flight-to';
+import { Flight } from '../../../model/flight';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  updateFlight(flight: Flight): Observable<Flight> {
+    const to = this.mapFlight(flight);
+
+    return this.httpClient.post<FlightTo>('/relative/url/to/flights', to).pipe(
+      map(to => this.mapFlightTo(to))
+    );
+  }
+
+  private mapFlight(flight: Flight): FlightTo {
+    // mapping logic
+  }
+
+  private mapFlightTo(flightTo: FlightTo): Flight {
+    // mapping logic
+  }
+
+}
+
+
+
+
+

Token management

+
+

In most cases the access to back-end API is secured using well known mechanisms as CSRF, JWT or both. In these cases the front-end application must manage the tokens that are generated when the user authenticates. More concretely it must store them to include them in every request automatically. Obviously, when user logs out these tokens must be removed from localStorage, memory, etc.

+
+
+
+

Store security token

+
+

In order to make this guide simple we are going to store the token in memory. Therefore, if we consider that we already have a login mechanism implemented we would like to store the token using a auth.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { Router } from '@angular/router';
+
+@Injectable({
+  providedIn: 'root',
+})
+export class AuthService {
+  private loggedIn = false;
+  private token: string;
+
+  constructor(public router: Router) {}
+
+  public isLogged(): boolean {
+    return this.loggedIn || false;
+  }
+
+  public setLogged(login: boolean): void {
+    this.loggedIn = login;
+  }
+
+  public getToken(): string {
+    return this.token;
+  }
+
+  public setToken(token: string): void {
+    this.token = token;
+  }
+}
+
+
+
+

Using the previous service we will be able to store the token obtained in the login request using the method setToken(token). Please consider that, if you want a more sophisticated approach using localStorage API, you will need to modify this service accordingly.

+
+
+
+

Include token in every request

+
+

Now that the token is available in the application it is necessary to include it in every request to a protected API endpoint. Instead of modifying all the HTTP requests in our application, Angular provides a class to intercept every request (and every response if we need to) called HttpInterceptor. Let’s create a service called http-interceptor.service.ts to implement the intercept method of this class:

+
+
+
+
import {
+  HttpEvent,
+  HttpHandler,
+  HttpInterceptor,
+  HttpRequest,
+} from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { environment } from '../../../environments/environment';
+import { AuthService } from './auth.service';
+
+@Injectable()
+export class HttpRequestInterceptorService implements HttpInterceptor {
+
+  constructor(private auth: AuthService) {}
+
+  intercept(
+    req: HttpRequest<any>,
+    next: HttpHandler,
+  ): Observable<HttpEvent<any>> {
+    // Get the auth header from the service.
+    const authHeader: string = this.auth.getToken();
+    if (authHeader) {
+      let authReq: HttpRequest<any>;
+
+      // CSRF
+      if (environment.security == 'csrf') {
+        authReq = req.clone({
+          withCredentials: true,
+          setHeaders: { 'x-csrf-token': authHeader },
+        });
+      }
+
+      // JWT
+      if (environment.security == 'jwt') {
+        authReq = req.clone({
+          setHeaders: { Authorization: authHeader },
+        });
+      }
+
+      return next.handle(authReq);
+    } else {
+      return next.handle(req);
+    }
+  }
+}
+
+
+
+

As you may notice, this service is making use of an environment field environment.security to determine if we are using JWT or CSRF in order to inject the token accordingly. In your application you can combine both if necessary.

+
+
+

Configure environment.ts file to use the CSRF/JWT.

+
+
+
+
security: 'csrf'
+
+
+
+

The authHeader used is obtained using the injected service AuthService already presented above.

+
+
+

In order to activate the interceptor we need to provide it in our app.module.ts or core.module.ts depending on the application structure. Let’s assume that we are using the latter and the interceptor file is inside a security folder:

+
+
+
+
...
+import { HttpRequestInterceptorService } from './security/http-request-interceptor.service';
+...
+
+@NgModule({
+  imports: [...],
+  exports: [...],
+  declarations: [],
+  providers: [
+    ...
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: HttpRequestInterceptorService,
+      multi: true,
+    },
+  ],
+})
+export class CoreModule {}
+
+
+
+

Angular automatically will now modify every request and include in the header the token if it is convenient.

+
+ +
+
+

Error Handler in angular

+
+

Angular allows us to set up a custom error handler that can be used to control the different errors and them in a correct way. Using a global error handler will avoid mistakes and provide a use friendly interface allowing us to indicate the user what problem is happening.

+
+
+
+

What is ErrorHandler

+
+

ErrorHandler is the class that Angular uses by default to control the errors. This means that, even if the application doesn’t have a ErrorHandler it is going to use the one setup by default in Angular. This can be tested by trying to find a page not existing in any app, instantly Angular will print the error in the console.

+
+
+
+

Creating your custom ErrorHandler step by step

+
+

In order to create a custom ErrorHandler three steps are going to be needed:

+
+
+
+

Creating the custom ErrorHandler class

+
+

In this first step the custom ErrorHandler class is going to be created inside the folder /app/core/errors/errors-handler.ts:

+
+
+
+
import { ErrorHandler, Injectable, Injector } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      //  To do: Use injector to get the necessary services to redirect or
+      // show a message to the user
+      const classname  = error.constructor.name;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          console.error('HttpError:' + error.message);
+          if (!navigator.onLine) {
+            console.error('There's no internet connection');
+            // To do: control here in internet what you wanna do if user has no internet
+          } else {
+            console.error('Server Error:' + error.message);
+            // To do: control here if the server gave an error
+          }
+          break;
+        default:
+          console.error('Error:' + error.message);
+          // To do: control here if the client/other things gave an error
+      }
+    }
+}
+
+
+
+

This class can be used to control the different type of errors. If wanted, the classname variable could be used to add more switch cases. This would allow control of more specific situations.

+
+
+
+

Creating a ErrorInterceptor

+
+

Inside the same folder created in the last step we are going to create the ErrorInterceptor(errors-handler-interceptor.ts). This ErrorInterceptor is going to retry any failed calls to the server to make sure it is not being found before showing the error:

+
+
+
+
import { HttpInterceptor, HttpRequest, HttpHandler, HttpEvent } from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { retry } from 'rxjs/operators';
+
+@Injectable()
+export class ErrorsHandlerInterceptor implements HttpInterceptor {
+
+    constructor() {}
+    intercept(req: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>> {
+        return next.handle(req).pipe(
+            retryWhen((errors: Observable<any>) => errors.pipe(
+                delay(500),
+                take(5),
+                concatMap((error: any, retryIndex: number) => {
+                    if (++retryIndex == 5) {
+                        throw error;
+                    }
+                    return of(error);
+                })
+            ))
+        );
+    }
+}
+
+
+
+

This custom made interceptor is implementing the HttpInterceptor and inside the method intercept using the method pipe,retryWhen,delay,take and concatMap from RxJs it is going to do the next things if there is errors:

+
+
+
    +
  1. +

    With delay(500) do a delay to allow some time in between requests

    +
  2. +
  3. +

    With take(5) retry five times.

    +
  4. +
  5. +

    With concatMap if the index that take() gives is not 5 it returns the error, else, it throws the error.

    +
  6. +
+
+
+
+

Creating a Error Module

+
+

Finally, creating a module(errors-handler.module.ts) is necessary to include the interceptor and the custom error handler. In this case, the module is going to be created in the same folder as the last two:

+
+
+
+
import { NgModule, ErrorHandler } from '@angular/core';
+import { CommonModule } from '@angular/common';
+import { ErrorsHandler } from './errors-handler';
+import { HTTP_INTERCEPTORS } from '@angular/common/http';
+import { ErrorsHandlerInterceptor } from './errors-handler-interceptor';
+
+@NgModule({
+  declarations: [], // Declare here component if you want to use routing to error component
+  imports: [
+    CommonModule
+  ],
+  providers: [
+    {
+      provide: ErrorHandler,
+      useClass: ErrorsHandler,
+    },
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: ErrorsHandlerInterceptor,
+      multi: true,
+    }
+  ]
+})
+export class ErrorsHandlerModule { }
+
+
+
+

This module simply is providing the services that are implemented by our custom classes and then telling angular to use our custom made classes instead of the default ones. After doing this, the module has to be included in the app module app.module.ts in order to be used.

+
+
+
+
....
+  imports: [
+    ErrorsHandlerModule,
+    ....
+
+
+
+
+

Handling Errors

+
+

As a final step, handling these errors is necessary. There are different ways that can be used to control the errors, here are a few:

+
+
+
    +
  • +

    Creating a custom page and using with Router to redirect to a page showing an error.

    +
  • +
  • +

    Creating a service in the server side or Backend to create a log with the error and calling it with HttpClient.

    +
  • +
  • +

    Showing a custom made SnackBar with the error message.

    +
  • +
+
+
+
+

== Using SnackBarService and NgZone

+
+

If the SnackBar is used directly, some errors can occur, this is due to SnackBar being out of the Angular zone. In order to use this service properly, NgZone is necessary. The method run() from NgZone will allow the service to be inside the Angular Zone. An example on how to use it:

+
+
+
+
import { ErrorHandler, Injectable, Injector, NgZone } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+import { MatSnackBar } from '@angular/material';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector, private zone: NgZone) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      // Use injector to get the necessary services to redirect or
+      const snackBar: MatSnackBar = this.injector.get(MatSnackBar);
+      const classname  = error.constructor.name;
+      let message: string;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          message = !(navigator.onLine) ? 'There is no internet connection' : error.message;
+          break;
+        default:
+          message = error.message;
+      }
+      this.zone.run(
+        () => snackBar.open(message, 'danger', { duration : 4000})
+      );
+    }
+}
+
+
+
+

Using Injector the MatSnackBar is obtained, then the correct message is obtained inside the switch. Finally, using NgZone and run(), we open the SnackBar passing the message, and the parameters wanted.

+
+
+

You can find a working example of this guide in devon4ts-samples.

+
+ +
+
+

File Structure

+ +
+
+

Top-level

+
+

The top-level file structure is defined by Angular CLI. You might put this "top-level file structure" into a sub-directory to facilitate your build, but this is not relevant for this guide. So the applications file structure relevant to this guide is the folder /src/app inside the part managed by Angular CLI.

+
+
+
Listing 36. Top-level file structure shows feature modules
+
+
    /src
+    └── /app
+        ├── /account-management
+        ├── /billing
+        ├── /booking
+        ├── /core
+        ├── /shared
+        ├── /status
+        |
+        ├── app.module.ts
+        ├── app.component.spec.ts
+        ├── app.component.ts
+        └── app.routing-module.ts
+
+
+
+

Besides the definition of app module the app folder has feature modules on top-level. +The special modules shared and core are present as well.

+
+
+
+

Feature Modules

+
+

A feature module contains the modules definition and two folders representing both layers.

+
+
+
Listing 37. Feature module file structure has both layers
+
+
    /src
+    └── /app
+        └── /account-management
+            ├── /components
+            ├── /services
+            |
+            ├── account-management.module.ts
+            ├── account-management.component.spec.ts
+            ├── account-management.component.ts
+            └── account-management.routing-module.ts
+
+
+
+

Additionally an entry component is possible. This would be the case in lazy loading scenarios. +So account-management.component.ts would be only present if account-management is lazy loaded. +Otherwise, the module’s routes would be defined Component-less +(see vsavkin blog post).

+
+
+
+

Components Layer

+
+

The component layer reflects the distinction between Smart Components and Dumb Components.

+
+
+
Listing 38. Components layer file structure shows Smart Components on top-level
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                ├── /account-overview
+                ├── /confirm-modal
+                ├── /create-account
+                ├── /forgot-password
+                └── /shared
+
+
+
+

Every folder inside the /components folder represents a smart component. The only exception is /shared. +/shared contains Dumb Components shared across Smart Components inside the components layer.

+
+
+
Listing 39. Smart components contain Dumb components
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                └── /account-overview
+                    ├── /user-info-panel
+                    |   ├── /address-tab
+                    |   ├── /last-activities-tab
+                    |   |
+                    |   ├── user-info-panel.component.html
+                    |   ├── user-info-panel.component.scss
+                    |   ├── user-info-panel.component.spec.ts
+                    |   └── user-info-panel.component.ts
+                    |
+                    ├── /user-header
+                    ├── /user-toolbar
+                    |
+                    ├── account-overview.component.html
+                    ├── account-overview.component.scss
+                    ├── account-overview.component.spec.ts
+                    └── account-overview.component.ts
+
+
+
+

Inside the folder of a Smart Component the component is defined. +Besides that are folders containing the Dumb Components the Smart Component consists of. +This can be recursive - a Dumb Component can consist of other Dumb Components. +This is reflected by the file structure as well. This way the structure of a view becomes very readable. +As mentioned before, if a Dumb Component is used by multiple Smart Components inside the components layer +it is put inside the /shared folder inside the components layer.

+
+
+

With this way of thinking the shared module makes a lot of sense. If a Dumb Component is used by multiple Smart Components +from different feature modules, the Dumb Component is placed into the shared module.

+
+
+
Listing 40. The shared module contains Dumb Components shared across Smart Components from different feature modules
+
+
    /src
+    └── /app
+        └── /shared
+            └── /user-panel
+                |
+                ├── user-panel.component.html
+                ├── user-panel.component.scss
+                ├── user-panel.component.spec.ts
+                └── user-panel.component.ts
+
+
+
+

The layer folder /components is not necessary inside the shared module. +The shared module only contains components!

+
+ +
+
+

Internationalization

+
+

Nowadays, a common scenario in front-end applications is to have the ability to translate labels and locate numbers, dates, currency and so on when the user clicks over a language selector or similar. devon4ng and specifically Angular has a default mechanism in order to fill the gap of such features, and besides there are some wide used libraries that make even easier to translate applications.

+
+ +
+
+

devon4ng i18n approach

+
+

The official approach could be a bit complicated, therefore the recommended one is to use the recommended library Transloco from https://github.com/ngneat/transloco/.

+
+
+
+

Install and configure Transloco

+
+

In order to include this library in your devon4ng Angular >= 7.2 project you will need to execute in a terminal:

+
+
+
+
$ ng add @ngneat/transloco
+
+
+
+

As part of the installation process you’ll be presented with questions; Once you answer them, everything you need will automatically be created for you.

+
+
+
    +
  • +

    First, Transloco creates boilerplate files for the requested translations.

    +
  • +
  • +

    Next, it will create a new file, transloco-root.module.ts which exposes an Angular’s module with a default configuration, and inject it into the AppModule.

    +
  • +
+
+
+
+
import { HttpClient } from '@angular/common/http';
+import {
+  TRANSLOCO_LOADER,
+  Translation,
+  TranslocoLoader,
+  TRANSLOCO_CONFIG,
+  translocoConfig,
+  TranslocoModule
+} from '@ngneat/transloco';
+import { Injectable, NgModule } from '@angular/core';
+import { environment } from '../environments/environment';
+
+@Injectable({ providedIn: 'root' })
+export class TranslocoHttpLoader implements TranslocoLoader {
+  constructor(private http: HttpClient) {}
+
+  getTranslation(lang: string) {
+    return this.http.get<Translation>(`/assets/i18n/${lang}.json`);
+  }
+}
+
+@NgModule({
+  exports: [ TranslocoModule ],
+  providers: [
+    {
+      provide: TRANSLOCO_CONFIG,
+      useValue: translocoConfig({
+        availableLangs: ['en', 'es'],
+        defaultLang: 'en',
+        // Remove this option if your application doesn't support changing language in runtime.
+        reRenderOnLangChange: true,
+        prodMode: environment.production,
+      })
+    },
+    { provide: TRANSLOCO_LOADER, useClass: TranslocoHttpLoader }
+  ]
+})
+export class TranslocoRootModule {}
+
+
+
+ + + + + +
+ + +As you might have noticed it also set an HttpLoader into the module’s providers. The HttpLoader is a class that implements the TranslocoLoader interface. It’s responsible for instructing Transloco how to load the translation files. It uses Angular HTTP client to fetch the files, based on the given path. +
+
+
+
+

Usage

+
+

In order to translate any label in any HTML template you will need to use the transloco pipe available:

+
+
+
+
{{ 'HELLO' | transloco }}
+
+
+
+

An optional parameter from the component TypeScript class could be included as follows:

+
+
+
+
{{ 'HELLO' | transloco: { value: dynamic } }}
+
+
+
+

It is possible to use with inputs:

+
+
+
+
<span [attr.alt]="'hello' | transloco">Attribute</span>
+<span [title]="'hello' | transloco">Property</span>
+
+
+
+

In order to change the language used you will need to create a button or selector that calls the this.translocoService.use(language: string) method from TranslocoService. For example:

+
+
+
+
export class AppComponent {
+  constructor(private translocoService: TranslocoService) {}
+
+  changeLanguage(lang) {
+      this.translocoService.setActiveLang(lang);
+  }
+}
+
+
+
+

The translations will be included in the en.json, es.json, de.json, etc. files inside the /assets/i18n folder. For example en.json would be (using the previous parameter):

+
+
+
+
{
+    "HELLO": "hello"
+}
+
+
+
+

Or with an optional parameter:

+
+
+
+
{
+    "HELLO": "hello {{value}}"
+}
+
+
+
+

Transloco understands nested JSON objects. This means that you can have a translation that looks like this:

+
+
+
+
{
+    "HOME": {
+        "HELLO": "hello {{value}}"
+    }
+}
+
+
+
+

In order to access access the value, use the dot notation, in this case HOME.HELLO.

+
+
+
+

Using the service, pipe or directive

+ +
+
+

== Structural Directive

+
+

Using a structural directive is the recommended approach. It’s DRY and efficient, as it creates one subscription per template:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('title') }}</p>
+
+  <comp [title]="t('title')"></comp>
+</ng-container>
+
+
+
+

Note that the t function is memoized. It means that given the same key it will return the result directly from the cache.

+
+
+

We can pass a params object as the second parameter:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('name', { name: 'Transloco' }) }}</p>
+</ng-container>
+
+
+
+

We can instruct the directive to use a different language in our template:

+
+
+
+
<ng-container *transloco="let t; lang: 'es'">
+  <p>{{ t('title') }}</p>
+</ng-container>
+
+
+
+
+

== Pipe

+
+

The use of pipes can be possible too:

+
+
+

template:

+
+
+
+
<div>{{ 'HELLO' | transloco:param }}</div>
+
+
+
+

component:

+
+
+
+
param = {value: 'world'};
+
+
+
+
+

== Attribute Directive

+
+

The last option available with transloco is the attribute directive:

+
+
+
+
<div transloco="HELLO" [translocoParams]="{ value: 'world' }"></div>
+
+
+
+
+

== Service

+
+

If you need to access translations in any component or service you can do it injecting the TranslocoService into them:

+
+
+
+
// Sync translation
+translocoService.translate('HELLO', {value: 'world'});
+
+// Async translation
+translocoService.selectTranslate('HELLO', { value: 'world' }).subscribe(res => {
+    console.log(res);
+    //=> 'hello world'
+});
+
+
+
+ + + + + +
+ + +You can find a complete example at https://github.com/devonfw/devon4ng-application-template. +
+
+
+

Please, visit https://github.com/ngneat/transloco/ for more info.

+
+ +
+
+

Routing

+
+

A basic introduction to the Angular Router can be found in Angular Docs.

+
+
+

This guide will show common tasks and best practices.

+
+
+
+

Defining Routes

+
+

For each feature module and the app module all routes should be defined in a separate module with the suffix RoutingModule. +This way the routing modules are the only place where routes are defined. +This pattern achieves a clear separation of concerns. +The following figure illustrates this.

+
+
+
+Routing module declaration +
+
Figure 39. Routing module declaration
+
+
+

It is important to define routes inside app routing module with .forRoot() and in feature routing modules with .forChild().

+
+
+
+

Example 1 - No Lazy Loading

+
+

In this example two modules need to be configured with routes - AppModule and FlightModule.

+
+
+

The following routes will be configured

+
+
+
    +
  • +

    / will redirect to /search

    +
  • +
  • +

    /search displays FlightSearchComponent (FlightModule)

    +
  • +
  • +

    /search/print/:flightId/:date displays FlightPrintComponent (FlightModule)

    +
  • +
  • +

    /search/details/:flightId/:date displays FlightDetailsComponent (FlightModule)

    +
  • +
  • +

    All other routes will display ErrorPage404 (AppModule)

    +
  • +
+
+
+
Listing 41. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '', redirectTo: 'search', pathMatch: 'full' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 42. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: 'search', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+ + + + + +
+ + +The import order inside AppModule is important. +AppRoutingModule needs to be imported after FlightModule. +
+
+
+
+

Example 2 - Lazy Loading

+
+

Lazy Loading is a good practice when the application has multiple feature areas and a user might not visit every dialog. +Or at least he might not need every dialog up front.

+
+
+

The following example will configure the same routes as example 1 but will lazy load FlightModule.

+
+
+
Listing 43. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '/search', loadChildren: 'app/flight-search/flight-search.module#FlightSearchModule' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 44. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+
+

Triggering Route Changes

+
+

With Angular you have two ways of triggering route changes.

+
+
+
    +
  1. +

    Declarative with bindings in component HTML templates

    +
  2. +
  3. +

    Programmatic with Angular Router service inside component classes

    +
  4. +
+
+
+

On the one hand, architecture-wise it is a much cleaner solution to trigger route changes in Smart Components. +This way you have every UI event that should trigger a navigation handled in one place - in a Smart Component. +It becomes very easy to look inside the code for every navigation, that can occur. +Refactoring is also much easier, as there are no navigation events "hidden" in the HTML templates

+
+
+

On the other hand, in terms of accessibility and SEO it is a better solution to rely on bindings in the view - e.g. by using Angular router-link directive. +This way screen readers and the Google crawler can move through the page easily.

+
+
+ + + + + +
+ + +If you do not have to support accessibility (screen readers, etc.) and to care about SEO (Google rank, etc.), +then you should aim for triggering navigation only in Smart Components. +
+
+
+
+Triggering navigation +
+
Figure 40. Triggering navigation
+
+
+
+

Guards

+
+

Guards are Angular services implemented on routes which determines whether a user can navigate to/from the route. There are examples below which will explain things better. We have the following types of Guards:

+
+
+
    +
  • +

    CanActivate: It is used to determine whether a user can visit a route. The most common scenario for this guard is to check if the user is authenticated. For example, if we want only logged in users to be able to go to a particular route, we will implement the CanActivate guard on this route.

    +
  • +
  • +

    CanActivateChild: Same as above, only implemented on child routes.

    +
  • +
  • +

    CanDeactivate: It is used to determine if a user can navigate away from a route. Most common example is when a user tries to go to a different page after filling up a form and does not save/submit the changes, we can use this guard to confirm whether the user really wants to leave the page without saving/submitting.

    +
  • +
  • +

    Resolve: For resolving dynamic data.

    +
  • +
  • +

    CanLoad: It is used to determine whether an Angular module can be loaded lazily. Example below will be helpful to understand it.

    +
  • +
+
+
+

Let’s have a look at some examples.

+
+
+
+

Example 1 - CanActivate and CanActivateChild guards

+ +
+
+

== CanActivate guard

+
+

As mentioned earlier, a guard is an Angular service and services are simply TypeScript classes. So we begin by creating a class. This class has to implement the CanActivate interface (imported from angular/router), and therefore, must have a canActivate function. The logic of this function determines whether the requested route can be navigated to or not. It returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. If it is true, the route is loaded, else not.

+
+
+
Listing 45. CanActivate example
+
+
...
+import {CanActivate} from "@angular/router";
+
+@Injectable()
+class ExampleAuthGuard implements CanActivate {
+  constructor(private authService: AuthService) {}
+
+  canActivate(route: ActivatedRouterSnapshot, state: RouterStateSnapshot) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

In the above example, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. We use it to return true or false from the canActivate function. +The canActivate function accepts two parameters (provided by Angular). The first parameter of type ActivatedRouterSnapshot is the snapshot of the route the user is trying to navigate to (where the guard is implemented); we can extract the route parameters from this instance. The second parameter of type RouterStateSnapshot is a snapshot of the router state the user is trying to navigate to; we can fetch the URL from it’s url property.

+
+
+ + + + + +
+ + +We can also redirect the user to another page (maybe a login page) if the authService returns false. To do that, inject Router and use it’s navigate function to redirect to the appropriate page. +
+
+
+

Since it is a service, it needs to be provided in our module:

+
+
+
Listing 46. provide the guard in a module
+
+
@NgModule({
+  ...
+  providers: [
+    ...
+    ExampleAuthGuard
+  ]
+})
+
+
+
+

Now this guard is ready to use on our routes. We implement it where we define our array of routes in the application:

+
+
+
Listing 47. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivate: [ExampleAuthGuard] }
+];
+
+
+
+

As you can see, the canActivate property accepts an array of guards. So we can implement more than one guard on a route.

+
+
+
+

== CanActivateChild guard

+
+

To use the guard on nested (children) routes, we add it to the canActivateChild property like so:

+
+
+
Listing 48. Implementing the guard on child routes
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivateChild: [ExampleAuthGuard], children: [
+	{path: 'sub-page1', component: SubPageComponent},
+    {path: 'sub-page2', component: SubPageComponent}
+  ] }
+];
+
+
+
+
+

Example 2 - CanLoad guard

+
+

Similar to CanActivate, to use this guard we implement the CanLoad interface and overwrite it’s canLoad function. Again, this function returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. The fundamental difference between CanActivate and CanLoad is that CanLoad is used to determine whether an entire module can be lazily loaded or not. If the guard returns false for a module protected by CanLoad, the entire module is not loaded.

+
+
+
Listing 49. CanLoad example
+
+
...
+import {CanLoad, Route} from "@angular/router";
+
+@Injectable()
+class ExampleCanLoadGuard implements CanLoad {
+  constructor(private authService: AuthService) {}
+
+  canLoad(route: Route) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

Again, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. The canLoad function accepts a parameter of type Route which we can use to fetch the path a user is trying to navigate to (using the path property of Route).

+
+
+

This guard needs to be provided in our module like any other service.

+
+
+

To implement the guard, we use the canLoad property:

+
+
+
Listing 50. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: 'home', component: HomeComponent },
+  { path: 'admin', loadChildren: 'app/admin/admin.module#AdminModule', canLoad: [ExampleCanLoadGuard] }
+];
+
+
+ +
+
+

Testing

+
+

This guide will cover the basics of testing logic inside your code with unit test cases. +The guide assumes that you are familiar with Angular CLI (see the guide)

+
+
+

For testing your Angular application with unit test cases there are two main strategies:

+
+
+
    +
  1. +

    Isolated unit test cases
    +Isolated unit tests examine an instance of a class all by itself without any dependence on Angular or any injected values. +The amount of code and effort needed to create such tests in minimal.

    +
  2. +
  3. +

    Angular Testing Utilities
    +Let you test components including their interaction with Angular. +The amount of code and effort needed to create such tests is a little higher.

    +
  4. +
+
+
+
+

Testing Concept

+
+

The following figure shows you an overview of the application architecture divided in testing areas.

+
+
+
+Testing Areas +
+
Figure 41. Testing Areas
+
+
+

There are three areas, which need to be covered by different testing strategies.

+
+
+
    +
  1. +

    Components:
    +Smart Components need to be tested because they contain view logic. +Also the interaction with 3rd party components needs to be tested. +When a 3rd party component changes with an upgrade a test will be failing and warn you, that there is something wrong with the new version. +Most of the time Dumb Components do not need to be tested because they mainly display data and do not contain any logic. +Smart Components are always tested with Angular Testing Utilities. +For example selectors, which select data from the store and transform it further, need to be tested.

    +
  2. +
  3. +

    Stores:
    +A store contains methods representing state transitions. +If these methods contain logic, they need to be tested. +Stores are always tested using Isolated unit tests.

    +
  4. +
  5. +

    Services:
    +Services contain Business Logic, which needs to be tested. +UseCase Services represent a whole business use case. +For instance this could be initializing a store with all the data that is needed for a dialog - loading, transforming, storing. +Often Angular Testing Utilities are the optimal solution for testing UseCase Services, because they allow for an easy stubbing of the back-end. +All other services should be tested with Isolated unit tests as they are much easier to write and maintain.

    +
  6. +
+
+
+
+

Testing Smart Components

+
+

Testing Smart Components should assure the following.

+
+
+
    +
  1. +

    Bindings are correct.

    +
  2. +
  3. +

    Selectors which load data from the store are correct.

    +
  4. +
  5. +

    Asynchronous behavior is correct (loading state, error state, "normal" state).

    +
  6. +
  7. +

    Oftentimes through testing one realizes, that important edge cases are forgotten.

    +
  8. +
  9. +

    Do these test become very complex, it is often an indicator for poor code quality in the component. +Then the implementation is to be adjusted / refactored.

    +
  10. +
  11. +

    When testing values received from the native DOM, you will test also that 3rd party libraries did not change with a version upgrade. +A failing test will show you what part of a 3rd party library has changed. +This is much better than the users doing this for you. +For example a binding might fail because the property name was changed with a newer version of a 3rd party library.

    +
  12. +
+
+
+

In the function beforeEach() the TestBed imported from Angular Testing Utilities needs to be initialized. +The goal should be to define a minimal test-module with TestBed. +The following code gives you an example.

+
+
+
Listing 51. Example test setup for Smart Components
+
+
describe('PrintFlightComponent', () => {
+
+  let fixture: ComponentFixture<PrintCPrintFlightComponentomponent>;
+  let store: FlightStore;
+  let printServiceSpy: jasmine.SpyObj<FlightPrintService>;
+
+  beforeEach(() => {
+    const urlParam = '1337';
+    const activatedRouteStub = { params: of({ id: urlParam }) };
+    printServiceSpy = jasmine.createSpyObj('FlightPrintService', ['initializePrintDialog']);
+    TestBed.configureTestingModule({
+      imports: [
+        TranslateModule.forRoot(),
+        RouterTestingModule
+      ],
+      declarations: [
+        PrintFlightComponent,
+        PrintContentComponent,
+        GeneralInformationPrintPanelComponent,
+        PassengersPrintPanelComponent
+      ],
+      providers: [
+        FlightStore,
+        {provide: FlightPrintService, useValue: printServiceSpy},
+        {provide: ActivatedRoute, useValue: activatedRouteStub}
+      ]
+    });
+    fixture = TestBed.createComponent(PrintFlightComponent);
+    store = fixture.debugElement.injector.get(FlightStore);
+    fixture.detectChanges();
+  });
+
+  // ... test cases
+})
+
+
+
+

It is important:

+
+
+
    +
  • +

    Use RouterTestingModule instead of RouterModule

    +
  • +
  • +

    Use TranslateModule.forRoot() without translations +This way you can test language-neutral without translation marks.

    +
  • +
  • +

    Do not add a whole module from your application - in declarations add the tested Smart Component with all its Dumb Components

    +
  • +
  • +

    The store should never be stubbed. +If you need a complex test setup, just use the regular methods defined on the store.

    +
  • +
  • +

    Stub all services used by the Smart Component. +These are mostly UseCase services. +They should not be tested by these tests. +Only the correct call to their functions should be assured. +The logic inside the UseCase services is tested with separate tests.

    +
  • +
  • +

    detectChanges() performance an Angular Change Detection cycle (Angular refreshes all the bindings present in the view)

    +
  • +
  • +

    tick() performance a virtual macro task, tick(1000) is equal to the virtual passing of 1s.

    +
  • +
+
+
+

The following test cases show the testing strategy in action.

+
+
+
Listing 52. Example
+
+
it('calls initializePrintDialog for url parameter 1337', fakeAsync(() => {
+  expect(printServiceSpy.initializePrintDialog).toHaveBeenCalledWith(1337);
+}));
+
+it('creates correct loading subtitle', fakeAsync(() => {
+  store.setPrintStateLoading(123);
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT STATE.IS_LOADING');
+}));
+
+it('creates correct subtitle for loaded flight', fakeAsync(() => {
+  store.setPrintStateLoadedSuccess({
+    id: 123,
+    description: 'Description',
+    iata: 'FRA',
+    name: 'Frankfurt',
+    // ...
+  });
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT "FRA (Frankfurt)" (ID: 123)');
+}));
+
+
+
+

The examples show the basic testing method

+
+
+
    +
  • +

    Set the store to a well-defined state

    +
  • +
  • +

    check if the component displays the correct values

    +
  • +
  • +

    …​ via checking values inside the native DOM.

    +
  • +
+
+
+
+

Testing state transitions performed by stores

+
+

Stores are always tested with Isolated unit tests.

+
+
+

Actions triggered by dispatchAction() calls are asynchronously performed to alter the state. +A good solution to test such a state transition is to use the done callback from Jasmine.

+
+
+
Listing 53. Example for testing a store
+
+
let sut: FlightStore;
+
+beforeEach(() => {
+  sut = new FlightStore();
+});
+
+it('setPrintStateLoading sets print state to loading', (done: Function) => {
+  sut.setPrintStateLoading(4711);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.print.isLoading).toBe(true);
+    expect(result.print.loadingId).toBe(4711);
+    done();
+  });
+});
+
+it('toggleRowChecked adds flight with given id to selectedValues Property', (done: Function) => {
+  const flight: FlightTO = {
+    id: 12
+    // dummy data
+  };
+  sut.setRegisterabgleichListe([flight]);
+  sut.toggleRowChecked(12);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.selectedValues).toContain(flight);
+    done();
+  });
+});
+
+
+
+
+

Testing services

+
+

When testing services both strategies - Isolated unit tests and Angular Testing Utilities - are valid options.

+
+
+

The goal of such tests are

+
+
+
    +
  • +

    assuring the behavior for valid data.

    +
  • +
  • +

    assuring the behavior for invalid data.

    +
  • +
  • +

    documenting functionality

    +
  • +
  • +

    save performing refactoring

    +
  • +
  • +

    thinking about edge case behavior while testing

    +
  • +
+
+
+

For simple services Isolated unit tests can be written. +Writing these tests takes lesser effort and they can be written very fast.

+
+
+

The following listing gives an example of such tests.

+
+
+
Listing 54. Testing a simple services with Isolated unit tests
+
+
let sut: IsyDatePipe;
+
+beforeEach(() => {
+  sut = new IsyDatePipe();
+});
+
+it('transform should return empty string if input value is empty', () => {
+  expect(sut.transform('')).toBe('');
+});
+
+it('transform should return empty string if input value is null', () => {
+  expect(sut.transform(undefined)).toBe('');
+});
+
+// ...more tests
+
+
+
+

For testing Use Case services the Angular Testing Utilities should be used. +The following listing gives an example.

+
+
+
Listing 55. Test setup for testing use case services with Angular Testing Utilities
+
+
let sut: FlightPrintService;
+let store: FlightStore;
+let httpController: HttpTestingController;
+let flightCalculationServiceStub: jasmine.SpyObj<FlightCalculationService>;
+const flight: FlightTo = {
+  // ... valid dummy data
+};
+
+beforeEach(() => {
+  flightCalculationServiceStub = jasmine.createSpyObj('FlightCalculationService', ['getFlightType']);
+  flightCalculationServiceStub.getFlightType.and.callFake((catalog: string, type: string, key: string) => of(`${key}_long`));
+  TestBed.configureTestingModule({
+    imports: [
+      HttpClientTestingModule,
+      RouterTestingModule,
+    ],
+    providers: [
+      FlightPrintService,
+      FlightStore,
+      FlightAdapter,
+      {provide: FlightCalculationService, useValue: flightCalculationServiceStub}
+    ]
+  });
+
+  sut = TestBed.get(FlightPrintService);
+  store = TestBed.get(FlightStore);
+  httpController = TestBed.get(HttpTestingController);
+});
+
+
+
+

When using TestBed, it is important

+
+
+
    +
  • +

    to import HttpClientTestingModule for stubbing the back-end

    +
  • +
  • +

    to import RouterTestingModule for stubbing the Angular router

    +
  • +
  • +

    not to stub stores, adapters and business services

    +
  • +
  • +

    to stub services from libraries like FlightCalculationService - the correct implementation of libraries should not be tested by these tests.

    +
  • +
+
+
+

Testing back-end communication looks like this:

+
+
+
Listing 56. Testing back-end communication with Angular HttpTestingController
+
+
it('loads flight if not present in store', fakeAsync(() => {
+  sut.initializePrintDialog(1337);
+  const processRequest = httpController.expectOne('/path/to/flight');
+  processRequest.flush(flight);
+
+  httpController.verify();
+}));
+
+it('does not load flight if present in store', fakeAsync(() => {
+  const flight = {...flight, id: 4711};
+  store.setRegisterabgleich(flight);
+
+  sut.initializePrintDialog(4711);
+  httpController.expectNone('/path/to/flight');
+
+  httpController.verify();
+}));
+
+
+
+

The first test assures a correct XHR request is performed if initializePrintDialog() is called and no data is in the store. +The second test assures no XHR request IST performed if the needed data is already in the store.

+
+
+

The next steps are checks for the correct implementation of logic.

+
+
+
Listing 57. Example testing a Use Case service
+
+
it('creates flight destination for valid key in svz', fakeAsync(() => {
+  const flightTo: FlightTo = {
+    ...flight,
+    id: 4712,
+    profile: '77'
+  };
+  store.setFlight(flightTo);
+  let result: FlightPrintContent|undefined;
+
+  sut.initializePrintDialog(4712);
+  store.select(s => s.print.content).subscribe(content => result = content);
+  tick();
+
+  expect(result!.destination).toBe('77_long (ID: 77)');
+}));
+
+
+ +
+
+

Update Angular CLI

+ +
+
+

Angular CLI common issues

+
+

There are constant updates for the official Angular framework dependencies. These dependencies are directly related with the Angular CLI package. Since this package comes installed by default inside the devonfw distribution folder for Windows OS and the distribution is updated every few months it needs to be updated in order to avoid known issues.

+
+
+
+

Angular CLI update guide

+
+

For Linux users is as easy as updating the global package:

+
+
+
+
$ npm unistall -g @angular/cli
+$ npm install -g @angular/cli
+
+
+
+

For Windows users the process is only a bit harder. Open the devonfw bundled console and do as follows:

+
+
+
+
$ cd [devonfw_dist_folder]
+$ cd software/nodejs
+$ npm uninstall @angular/cli --no-save
+$ npm install @angular/cli --no-save
+
+
+
+

After following these steps you should have the latest Angular CLI version installed in your system. In order to check it run in the distribution console:

+
+
+ + + + + +
+ + +At the time of this writing, the Angular CLI is at 1.7.4 version. +
+
+
+
+
λ ng version
+
+     _                      _                 ____ _     ___
+    / \   _ __   __ _ _   _| | __ _ _ __     / ___| |   |_ _|
+   / △ \ | '_ \ / _` | | | | |/ _` | '__|   | |   | |    | |
+  / ___ \| | | | (_| | |_| | | (_| | |      | |___| |___ | |
+ /_/   \_\_| |_|\__, |\__,_|_|\__,_|_|       \____|_____|___|
+                |___/
+
+
+Angular CLI: 7.2.3
+Node: 10.13.0
+OS: win32 x64
+Angular:
+...
+
+
+ +
+
+

Upgrade devon4ng Angular and Ionic/Angular applications

+
+

Angular CLI provides a powerful tool to upgrade Angular based applications to the current stable release of the core framework.

+
+
+

This tool is ng update. It will not only upgrade dependencies and their related ones but also will perform some fixes in your code if available thanks to the provided schematics. It will check even if the update is not possible as there is another library or libraries that are not compatible with the versions of the upgraded dependencies. In this case it will keep your application untouched.

+
+
+ + + + + +
+ + +The repository must be in a clean state before executing a ng update. So, remember to commit your changes first. +
+
+
+
+

Basic usage

+
+

In order to perform a basic upgrade we will execute:

+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
+

Upgrade to new Angular version

+
+

The process will be the same, but first we need to make sure that our devon4ng application is in the lates version of Angular 8, so the ng update command can perform the upgrade not only in the dependencies but also making code changes to reflect the new features and fixes.

+
+
+
    +
  • +

    First, upgrade to latest Angular 9 version:

    +
  • +
+
+
+
+
$ ng update @angular/cli@9 @angular/core@9
+
+
+
+

Optionally the flag -C can be added to previous command to make a commit automatically. This is also valid for the next steps.

+
+
+
    +
  • +

    Then, upgrade Angular:

    +
  • +
+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
    +
  • +

    In case you use Angular Material:

    +
  • +
+
+
+
+
$ ng update @angular/material
+
+
+
+
    +
  • +

    If the application depends on third party libraries, the new tool ngcc can be run to make them compatible with the new Ivy compiler. In this case it is recommended to include a postinstall script in the package.json:

    +
  • +
+
+
+
+
{
+  "scripts": {
+    "postinstall": "ngcc --properties es2015 browser module main --first-only --create-ivy-entry-points"
+  }
+}
+
+
+ +
+

Important use cases:

+
+
+
    +
  • +

    To update to the next beta or pre-release version, use the --next=true option.

    +
  • +
  • +

    To update from one major version to another, use the format ng update @angular/cli@^<major_version> @angular/core@^<major_version>.

    +
  • +
  • +

    In case your Angular application uses @angular/material include it in the first command:

    +
    +
    +
    $ ng update @angular/cli @angular/core @angular/material
    +
    +
    +
  • +
+
+
+
+

Ionic/Angular applications

+
+

Just following the same procedure we can upgrade Angular applications, but we must take care of important specific Ionic dependencies:

+
+
+
+
$ ng update @angular/cli @angular/core @ionic/angular @ionic/angular-toolkit [@ionic/...]
+
+
+
+
+

Other dependencies

+
+

Every application will make use of different dependencies. Angular CLI ng upgrade will also take care of these ones. For example, if you need to upgrade @capacitor you will perform:

+
+
+
+
$ ng update @capacitor/cli @capacitor/core [@capacitor/...]
+
+
+
+

Another example could be that you need to upgrade @ngx-translate packages. As always in this case you will execute:

+
+
+
+
$ ng update @ngx-translate/core @ngx-translate/http-loader
+
+
+
+
+

Angular Update Guide online tool

+
+

It is recommended to use the Angular Update Guide tool at https://update.angular.io/ that will provide the necessary steps to upgrade any Angular application depending on multiple criteria.

+
+ +
+
+

Working with Angular CLI

+
+

Angular CLI provides a facade for building, testing, linting, debugging and generating code. +Under the hood Angular CLI uses specific tools to achieve these tasks. +The user does no need to maintain them and can rely on Angular to keep them up to date and maybe switch to other tools which come up in the future.

+
+
+

The Angular CLI provides a wiki with common tasks you encounter when working on applications with the Angular CLI. +The Angular CLI Wiki can be found here.

+
+
+

In this guide we will go through the most important tasks. +To go into more details, please visit the Angular CLI wiki.

+
+
+
+

Installing Angular CLI

+
+

Angular CLI should be added as global and local dependency. +The following commands add Angular CLI as global Dependency.

+
+
+

yarn command

+
+
+
+
yarn global add @angular/cli
+
+
+
+

npm command

+
+
+
+
npm install -g @angular/cli
+
+
+
+

You can check a successful installtion with ng --version. +This should print out the version installed.

+
+
+
+Printing Angular CLI Version +
+
Figure 42. Printing Angular CLI Version
+
+
+
+

Running a live development server

+
+

The Angular CLI can be used to start a live development server. +First your application will be compiled and then the server will be started. +If you change the code of a file, the server will reload the displayed page. +Run your application with the following command:

+
+
+
+
ng serve -o
+
+
+
+
+

Running Unit Tests

+
+

All unit tests can be executed with the command:

+
+
+
+
ng test
+
+
+
+

To make a single run and create a code coverage file use the following command:

+
+
+
+
ng test -sr -cc
+
+
+
+ + + + + +
+ + +You can configure the output format for code coverage files to match your requirements in the file karma.conf.js which can be found on toplevel of your project folder. +For instance, this can be useful for exporting the results to a SonarQube. +
+
+
+
+

Linting the code quality

+
+

You can lint your files with the command

+
+
+
+
ng lint --type-check
+
+
+
+ + + + + +
+ + +You can adjust the linting rules in the file tslint.json which can be found on toplevel of your project folder. +
+
+
+
+

Generating Code

+ +
+
+

Creating a new Angular CLI project

+
+

For creating a new Angular CLI project the command ng new is used.

+
+
+

The following command creates a new application named my-app.

+
+
+
+
ng create my-app
+
+
+
+
+

Creating a new feature module

+
+

A new feature module can be created via ng generate module` command.

+
+
+

The following command generates a new feature module named todo.

+
+
+
+
ng generate module todo
+
+
+
+
+Generate a module with Angular CLI +
+
Figure 43. Generate a module with Angular CLI
+
+
+ + + + + +
+ + +The created feature module needs to be added to the AppModule by hand. +Other option would be to define a lazy route in AppRoutingModule to make this a lazy loaded module. +
+
+
+
+

Creating a new component

+
+

To create components the command ng generate component can be used.

+
+
+

The following command will generate the component todo-details inside the components layer of todo module. +It will generate a class, a html file, a css file and a test file. +Also, it will register this component as declaration inside the nearest module - this ist TodoModule.

+
+
+
+
ng generate component todo/components/todo-details
+
+
+
+
+Generate a component with Angular CLI +
+
Figure 44. Generate a component with Angular CLI
+
+
+ + + + + +
+ + +If you want to export the component, you have to add the component to exports array of the module. +This would be the case if you generate a component inside shared module. +
+
+
+
+

Configuring an Angular CLI project

+
+

Inside an Angular CLI project the file .angular-cli.json can be used to configure the Angular CLI.

+
+
+

The following options are very important to understand.

+
+
+
    +
  • +

    The property defaults` can be used to change the default style extension. +The following settings will make the Angular CLI generate .less files, when a new component is generated.

    +
  • +
+
+
+
+
"defaults": {
+  "styleExt": "less",
+  "component": {}
+}
+
+
+
+
    +
  • +

    The property apps contains all applications maintained with Angular CLI. +Most of the time you will have only one.

    +
    +
      +
    • +

      assets configures all the static files, that the application needs - this can be images, fonts, json files, etc. +When you add them to assets the Angular CLI will put these files to the build target and serve them while debugging. +The following will put all files in /i18n to the output folder /i18n

      +
    • +
    +
    +
  • +
+
+
+
+
"assets": [
+  { "glob": "**/*.json", "input": "./i18n", "output": "./i18n" }
+]
+
+
+
+
    +
  • +

    styles property contains all style files that will be globally available. +The Angular CLI will create a styles bundle that goes directly into index.html with it. +The following will make all styles in styles.less globally available.

    +
  • +
+
+
+
+
"styles": [
+  "styles.less"
+]
+
+
+
+
    +
  • +

    environmentSource and environments are used to configure configuration with the Angular CLI. +Inside the code always the file specified in environmentSource will be referenced. +You can define different environments - eg. production, staging, etc. - which you list in enviroments. +At compile time the Angular CLI will override all values in environmentSource with the values from the matching environment target. +The following code will build the application for the environment staging.

    +
  • +
+
+
+
+
ng build --environment=staging
+
+
+
+
+
+
+

Ionic

+
+ +
+

Ionic 5 Getting started

+
+

Ionic is a front-end focused framework which offers different tools for developing hybrid mobile applications. The web technologies used for this purpose are CSS, Sass, HTML5 and Typescript.

+
+
+
+

Why Ionic?

+
+

Ionic is used for developing hybrid applications, which means not having to rely on a specific IDE such as Android Studio or Xcode. Furthermore, development of native apps require learning different languages (Java/Kotlin for Android and Objective-C/Swift for Apple), with Ionic, a developer does not have to code the same functionality for multiple platforms, just use the adequate libraries and components.

+
+
+
+

Basic environment set up

+ +
+
+

Install Ionic CLI

+
+

Although the devonfw distribution comes with and already installed Ionic CLI, here are the steps to install it. The installation of Ionic is easy, just one command has to be written:

+
+
+

$ npm install -g @ionic/cli

+
+
+
+

Update Ionic CLI

+
+

If there was a previous installation of the Ionic CLI, it will need to be uninstalled due to a change in package name.

+
+
+
+
$ npm uninstall -g ionic
+$ npm install -g @ionic/cli
+
+
+
+

##Basic project set up +The set up of an ionic application is pretty immediate and can be done in one line:

+
+
+

ionic start <name> <template> --type=angular

+
+
+
    +
  • +

    ionic start: Command to create an app.

    +
  • +
  • +

    <name>: Name of the application.

    +
  • +
  • +

    <template>: Model of the application.

    +
  • +
  • +

    --type=angular: With this flag, the app produced will be based on angular.

    +
  • +
+
+
+

To create an empty project, the following command can be used:

+
+
+

ionic start MyApp blank --type=angular

+
+
+
+Ionic blank project +
+
+
+

The image above shows the directory structure generated.

+
+
+

There are more templates available that can be seen with the command +ionic start --list

+
+
+
+List of ionic templates +
+
+
+

The templates surrounded by red line are based on angular and comes with Ionic v5, while the others belong to earlier versions (before v4).

+
+
+ + + + + +
+ + +More info at https://ionicframework.com/docs. Remember to select Angular documentation, since Ionic supports React, Vue and Vanilla JS. +
+
+ +
+
+

Ionic to android

+
+

This page is written to help developers to go from the source code of an ionic application to an android one, with this in mind, topics such as: environment, commands, modifications,…​ are covered.

+
+
+
+

Assumptions

+
+

This document assumes that the reader has already:

+
+
+
    +
  • +

    Source code of an Ionic application and wants to build it on an android device,

    +
  • +
  • +

    A working installation of NodeJS

    +
  • +
  • +

    An Ionic CLI installed and up-to-date.

    +
  • +
  • +

    Android Studio and Android SDK.

    +
  • +
+
+
+
+

From Ionic to Android project

+
+

When a native application is being designed, sometimes, functionalities that uses camera, geolocation, push notification, …​ are requested. To resolve these requests, Capacitor can be used.

+
+
+

In general terms, Capacitor wraps apps made with Ionic (HTML, SCSS, Typescript) into WebViews that can be displayed in native applications (Android, IOS) and allows the developer to access native functionalities like the ones said before.

+
+
+

Installing capacitor is as easy as installing any node module, just a few commands have to be run in a console:

+
+
+
    +
  • +

    cd name-of-ionic-4-app

    +
  • +
  • +

    npm install --save @capacitor/core @capacitor/cli

    +
  • +
+
+
+

Then, it is necessary to initialize capacitor with some information: app id, name of the app and the directory where your app is stored. To fill this information, run:

+
+
+
    +
  • +

    npx cap init

    +
  • +
+
+
+
+

Modifications

+
+

Throughout the development process, usually back-end and front-end are on a local computer, so it’s a common practice to have different configuration files for each environment (commonly production and development). Ionic uses an angular.json file to store those configurations and some rules to be applied.

+
+
+

If a back-end is hosted on http://localhost:8081, and that direction is used in every environment, the application built for android will not work because computer and device do not have the same localhost. Fortunately, different configurations can be defined.

+
+
+

Android Studio uses 10.0.0.2 as alias for 127.0.0.1 (computer’s localhost) so adding http://10.0.0.2:8081 in a new environment file and modifying angular.json accordingly, will make possible connect front-end and back-end.

+
+
+
+Android environment and angular.json +
+
+
+
+
    "build": {
+    ...
+        "configurations": {
+            ...
+            "android": {
+                "fileReplacements": [
+                    {
+                        "replace": "src/environments/environment.ts",
+                        "with": "src/environments/environment.android.ts"
+                    }
+                ]
+            },
+        }
+    }
+
+
+
+
+

Build

+
+

Once configured, it is necessary to build the Ionic app using this new configuration:

+
+
+
    +
  • +

    ionic build --configuration=android

    +
  • +
+
+
+

The next commands copy the build application on a folder named android and open android studio.

+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+
+

From Android project to emulated device

+
+

Once Android Studio is opened, follow these steps:

+
+
+
    +
  1. +

    Click on "Build" → Make project.

    +
  2. +
  3. +

    Click on "Build" → Make Module 'app' (default name).

    +
  4. +
+
+
+

Click on make project +click on make app

+
+
+
    +
  1. +

    Click on" Build" → Build Bundle(s) / APK(s) → Build APK(s).

    +
  2. +
  3. +

    Click on run and choose a device.

    +
  4. +
+
+
+

click on build APK +click on running device

+
+
+

If there are no devices available, a new one can be created:

+
+
+
    +
  1. +

    Click on "Create new device"

    +
  2. +
  3. +

    Select hardware and click "Next". For example: Phone → Nexus 5X.

    +
  4. +
+
+
+

Create new device +Select hardware

+
+
+
    +
  1. +

    Download a system image.

    +
    +
      +
    1. +

      Click on download.

      +
    2. +
    3. +

      Wait until the installation finished and then click "Finish".

      +
    4. +
    5. +

      Click "Next".

      +
    6. +
    +
    +
  2. +
  3. +

    Verify configuration (default configuration should be enough) and click "Next".

    +
  4. +
+
+
+

Download system image +Check configuration

+
+
+
    +
  1. +

    Check that the new device is created correctly.

    +
  2. +
+
+
+
+New created device +
+
+
+
+

From Android project to real device

+
+

To test on a real android device, an easy approach to communicate a smartphone (front-end) and computer (back-end) is to configure a WiFi hotspot and connect the computer to it. A guide about this process can be found here.

+
+
+

Once connected, run ipconfig on a console if you are using windows or ifconfig on a Linux machine to get the IP address of your machine’s Wireless LAN adapter WiFi.

+
+
+
+Result of `ipconfig` command on Windows 10 +
+
+
+

This obtained IP must be used instead of "localhost" or "10.0.2.2" at environment.android.ts.

+
+
+
+Android environment file server URL +
+
+
+

After this configuration, follow the build steps in "From Ionic to Android project" and the first three steps in "From Android project to emulated device".

+
+
+
+

Send APK to Android through USB

+
+

To send the built application to a device, you can connect computer and mobile through USB, but first, it is necessary to unlock developer options.

+
+
+
    +
  1. +

    Open "Settings" and go to "System".

    +
  2. +
  3. +

    Click on "About".

    +
  4. +
  5. +

    Click "Build number" seven times to unlock developer options.

    +
  6. +
+
+
+
+Steps to enable developer options: 1, 2, 3 +
+
+
+
    +
  1. +

    Go to "System" again an then to "Developer options"

    +
  2. +
  3. +

    Check that the options are "On".

    +
  4. +
  5. +

    Check that "USB debugging" is activated.

    +
  6. +
+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+

After this, do the step four in "From Android project to emulated device" and choose the connected smartphone.

+
+
+
+

Send APK to Android through email

+
+

When you build an APK, a dialog gives two options: locate or analyze. If the first one is chosen, Windows file explorer will be opened showing an APK that can be send using email. Download the APK on your phone and click it to install.

+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+
+

Result

+
+

If everything goes correctly, the Ionic application will be ready to be tested.

+
+
+
+Application running on a real device +
+
+ +
+
+

Ionic Progressive Web App

+
+

This guide is a continuation of the guide Angular PWAs, therefore, valid concepts explained there are still valid in this page but focused on Ionic.

+
+
+
+

Assumptions

+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
  • +

    Ionic 5 CLI

    +
  • +
  • +

    Capacitor

    +
  • +
+
+
+

Also, it is a good idea to read the document about PWA using Angular.

+
+
+
+

Sample Application

+
+
+Ionic 5 PWA Base +
+
Figure 45. Basic ionic PWA.
+
+
+

To explain how to build progressive web apps (PWA) using Ionic, a basic application is going to be built. This app will be able to take photos even without network using PWA elements.

+
+
+
+

Step 1: Create a new project

+
+

This step can be completed with one simple command: ionic start <name> <template>, where <name> is the name and <template> a model for the app. In this case, the app is going to be named basic-ion-pwa.

+
+
+

If you are using Nx, there is a pre-requisite to this step. And that is, you have to add the @nxtend/ionic-angular plugin to your Nx workspace. The command for that is npm install --save-dev @nxtend/ionic-angular. Once you have the plugin installed, you can generate an ionic app in your Nx workspace with the command nx generate @nxtend/ionic-angular:app basic-ion-pwa. (You can refer this guide if you want to get started with Nx).

+
+
+
+

Step 2: Structures and styles

+
+

The styles (scss) and structures (html) do not have anything specially relevant, just colors and ionic web components. The code can be found in devon4ts-samples.

+
+
+
+

Step 3: Add functionality

+
+

After this step, the app will allow users to take photos and display them in the main screen. +First we have to import three important elements:

+
+
+
    +
  • +

    DomSanitizer: Sanitizes values to be safe to use.

    +
  • +
  • +

    SafeResourceUrl: Interface for values that are safe to use as URL.

    +
  • +
  • +

    Plugins: Capacitor constant value used to access to the device’s camera and toast dialogs.

    +
  • +
+
+
+
+
  import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
+  import { Plugins, CameraResultType } from '@capacitor/core';
+
+
+
+

The process of taking a picture is enclosed in a takePicture() method. takePicture() calls the Camera’s getPhoto() function which returns an URL or an exception. If a photo is taken then the image displayed in the main page will be changed for the new picture, else, if the app is closed without changing it, a toast message will be displayed.

+
+
+
+
  export class HomePage {
+    image: SafeResourceUrl;
+    ...
+
+    async takePicture() {
+      try {
+        const image = await Plugins.Camera.getPhoto({
+          quality: 90,
+          allowEditing: true,
+          resultType: CameraResultType.Uri,
+        });
+
+        // Change last picture shown
+        this.image = this.sanitizer.bypassSecurityTrustResourceUrl(image.webPath);
+      } catch (e) {
+        this.show('Closing camera');
+      }
+    }
+
+    async show(message: string) {
+      await Plugins.Toast.show({
+        text: message,
+      });
+    }
+  }
+
+
+
+
+

Step 4: PWA Elements

+
+

When Ionic apps are not running natively, some resources like Camera do not work by default but can be enabled using PWA Elements. To use Capacitor’s PWA elements run npm install @ionic/pwa-elements and modify src/main.ts as shown below.

+
+
+
+
...
+
+// Import for PWA elements
+import { defineCustomElements } from '@ionic/pwa-elements/loader';
+
+if (environment.production) {
+  enableProdMode();
+}
+
+platformBrowserDynamic().bootstrapModule(AppModule)
+  .catch(err => console.log(err));
+
+// Call the element loader after the platform has been bootstrapped
+defineCustomElements(window);
+
+
+
+
+

Step 5: Make it Progressive.

+
+

Turning an Ionic 5 app into a PWA is pretty easy. The same module used to turn Angular apps into PWAs has to be added. To do so, run: ng add @angular/pwa. This command also creates an icons folder inside src/assets and contains angular icons for multiple resolutions. (Note: In an Nx workspace, you have to add it like a normal package using npm install @angular/pwa, and you have to manually add the icons). If you want to use other images, be sure that they have the same resolution, the names can be different but the file manifest.json has to be changed accordingly.

+
+
+
+

Step 6: Configure the app

+
+

manifest.json

+
+
+

Default configuration.

+
+
+

ngsw-config.json

+
+
+

At assetGroupsresources add a URLs field and a pattern to match PWA Elements scripts and other resources (images, styles, …​):

+
+
+
+
  "urls": ["https://unpkg.com/@ionic/pwa-elements@1.0.2/dist/**"]
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ionic build --configuration production to build the app using production settings. (nx build basic-ion-pwa --configuration production in your Nx workspace root).

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here. A good alternative is also npm install serve. It can be checked here.

+
+
+

Go to the www folder running cd www.

+
+
+

http-server -o or serve to serve your built app.

+
+
+ + + + + +
+ + +In order not to install anything not necessary npx can be used directly to serve the app. i.e run npx serve [folder] will automatically download and run this HTTP server without installing it in the project dependencies. +
+
+
+
+Http server running +
+
Figure 46. Http server running on localhost:8081.
+
+
+

 
+In another console instance run ionic serve (nx serve basic-ion-pwa if using Nx CLI) to open the common app (not built).

+
+
+
+Ionic serve on Visual Studio Code console +
+
Figure 47. Ionic server running on localhost:8100.
+
+
+

 
+The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common one does not.

+
+
+
+Application comparison +
+
Figure 48. Application service worker comparison.
+
+
+

 
+If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 49. Offline application.
+
+
+

 
+Finally, plugins like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 50. Lighthouse report.
+
+
+
+
+
+

Layouts

+
+ +
+

Angular Material Layout

+
+

The purpose of this guide is to get a basic understanding of creating layouts using Angular Material in a devon4ng application. We will create an application with a header containing some menu links and a sidenav with some navigation links.

+
+
+
+Finished application +
+
Figure 51. This is what the finished application will look like
+
+
+
+

Create a new angular application

+
+

We start with opening the devonfw IDE(right-click anywhere in your workspace and click "Open devonfw CMD shell here") and running the following command to start a project named devon4ng-mat-layout

+
+
+
    +
  • +

    ng new devon4ng-mat-layout --routing --style=scss. If you are using Nx, the command would be nx generate @nrwl/angular:app devon4ng-mat-layout --routing --style=scss in your Nx workspace. Click here to get started with using Nx.

    +
  • +
+
+
+

We are providing the routing flag so that a routing module is generated, and we are also setting the style sheet format to SCSS with --style=scss.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+
    +
  • +

    ng serve. (If you are using Nx, you have to specify the project name along with the --project flag, so the command becomes ng serve --project=devon4ng-mat-layout)

    +
  • +
+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+Blank application +
+
Figure 52. Blank application
+
+
+
+

Adding Angular Material library to the project

+
+

Next we will add Angular Material to our application. In the integrated terminal, press Ctrl + C to terminate the running application and run the following command:

+
+
+
    +
  • +

    npm install --save @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

You can also use Yarn to install the dependencies if you prefer that:

+
+
+
    +
  • +

    yarn add @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

Once the dependencies are installed, we need to import the BrowserAnimationsModule in our AppModule for animations support.

+
+
+
Listing 58. Importing BrowserAnimationsModule in AppModule
+
+
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
+
+@NgModule({
+  ...
+  imports: [BrowserAnimationsModule],
+  ...
+})
+export class AppModule { }
+
+
+
+

Angular Material provides a host of components for designing our application. All the components are well structured into individual NgModules. For each component from the Angular Material library that we want to use, we have to import the respective NgModule.

+
+
+
Listing 59. We will be using the following components in our application:
+
+
import { MatIconModule, MatButtonModule, MatMenuModule, MatListModule, MatToolbarModule, MatSidenavModule } from '@angular/material';
+
+@NgModule({
+  ...
+  imports: [
+	...
+    MatIconModule,
+    MatButtonModule,
+    MatMenuModule,
+    MatListModule,
+    MatToolbarModule,
+    MatSidenavModule,
+	...
+	],
+  ...
+})
+export class AppModule { }
+
+
+
+

A better approach is to import and then export all the required components in a shared module. But for the sake of simplicity, we are importing all the required components in the AppModule itself.

+
+
+
+

==

+
+
+
  You can find a working copy of this application https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-basic-layout[here]. The sample application is part of a Nx workspace, which means it is one of the many apps in a monorepo and capable of importing reusable code from a shared library. This guide describes the implementaion by assuming a stand-alone single-repo application, but the pages and layout described in this sample app are similar to the ones used in another sample app in the monorepo (https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-theming[angular-material-theming]), which is why we have exported the required components from a shared library and reused them in both the apps. As a result, the code in our monorepo will be slightly different. It would still help you in following this guide.
+== ==
+
+
+
+

Next, we include a theme in our application. Angular Material comes with four pre-defined themes: indigo-pink, deeppurple-amber, pink-bluegrey and purple-green. It is also possible to create our own custom theme, but that is beyond the scope of this guide. Including a theme is required to apply all of the core and theme styles to your application. +We will include the indigo-pink theme in our application by importing the indigo-pink.css file in our src/styles.scss:

+
+
+
Listing 60. In src/styles.scss:
+
+
@import "~@angular/material/prebuilt-themes/indigo-pink.css";
+
+
+
+

To use Material Design Icons along with the mat-icon component, we will load the Material Icons library in our src/index.html file

+
+
+
Listing 61. In src/index.html:
+
+
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
+
+
+
+
+

Development

+
+

Now that we have all the Angular Material related dependencies set up in our project, we can start coding. Let’s begin by adding a suitable margin and font to the body element of our single page application. We will add it in the src/styles.scss file to apply it globally:

+
+
+
Listing 62. In src/styles.scss:
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

At this point, if we run our application, this is how it will look like:

+
+
+
+Angular Material added to the application +
+
Figure 53. Application with Angular Material set up
+
+
+

We will clear the app.component.html file and setup a header with a menu button and some navigational links. We will use mat-toolbar, mat-button, mat-menu, mat-icon and mat-icon-button for this:

+
+
+
Listing 63. app.component.html:
+
+
<mat-toolbar color="primary">
+  <button mat-icon-button aria-label="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+  <button mat-button [matMenuTriggerFor]="submenu">Menu 1</button>
+  <button mat-button>Menu 2</button>
+  <button mat-button>Menu 3</button>
+
+  <mat-menu #submenu="matMenu">
+    <button mat-menu-item>Sub-menu 1</button>
+    <button mat-menu-item [matMenuTriggerFor]="submenu2">Sub-menu 2</button>
+  </mat-menu>
+
+  <mat-menu #submenu2="matMenu">
+    <button mat-menu-item>Menu Item 1</button>
+    <button mat-menu-item>Menu Item 2</button>
+    <button mat-menu-item>Menu Item 3</button>
+  </mat-menu>
+
+</mat-toolbar>
+
+
+
+

The color attribute on the mat-toolbar element will give it the primary (indigo) color as defined by our theme. The color attribute works with most Angular Material components; the possible values are 'primary', 'accent' and 'warn'. +The mat-toolbar is a suitable component to represent a header. It serves as a placeholder for elements we want in our header. +Inside the mat-toolbar, we start with a button having mat-icon-button attribute, which itself contains a mat-icon element having the value menu. This will serve as a menu button which we can use to toggle the sidenav. +We follow it with some sample buttons having the mat-button attribute. Notice the first button has a property matMenuTriggerFor bound to a local reference submenu. As the property name suggests, the click of this button will display the mat-menu element with the specified local reference as a drop-down menu. The rest of the code is self explanatory.

+
+
+
+Header added to the application +
+
Figure 54. This is how our application looks with the first menu button (Menu 1) clicked.
+
+
+

We want to keep the sidenav toggling menu button on the left and move the rest to the right to make it look better. To do this we add a class to the menu icon button:

+
+
+
Listing 64. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+

And in the app.component.scss file, we add the following style:

+
+
+
Listing 65. app.component.scss:
+
+
.menu {
+    margin-right: auto;
+}
+
+
+
+

The mat-toolbar element already has it’s display property set to flex. Setting the menu icon button’s margin-right property to auto keeps itself on the left and pushes the other elements to the right.

+
+
+
+Final look of the header +
+
Figure 55. Final look of the header.
+
+
+

Next, we will create a sidenav. But before that lets create a couple of components to navigate between, the links of which we will add to the sidenav. +We will use the ng generate component (or ng g c command for short) to create Home and Data components. (Append --project=devon4ng-mat-layout to the command in a Nx workspace). We nest them in the pages sub-directory since they represent our pages.

+
+
+
    +
  • +

    ng g c pages/home

    +
  • +
  • +

    ng g c pages/data;

    +
  • +
+
+
+

Let us set up the routing such that when we visit http://localhost:4200/ root url we see the HomeComponent and when we visit http://localhost:4200/data url we see the DataComponent. +We had opted for routing while creating the application, so we have the routing module app-routing.module.ts setup for us. In this file, we have the empty routes array where we set up our routes.

+
+
+
Listing 66. app-routing.module.ts:
+
+
import { HomeComponent } from './pages/home/home.component';
+import { DataComponent } from './pages/data/data.component';
+
+	const routes: Routes = [
+	  { path: '', component: HomeComponent },
+	  { path: 'data', component: DataComponent }
+	];
+
+
+
+

We need to provide a hook where the components will be loaded when their respective URLs are loaded. We do that by using the router-outlet directive in the app.component.html.

+
+
+
Listing 67. app.component.html:
+
+
...
+	</mat-toolbar>
+	<router-outlet></router-outlet>
+
+
+
+

Now when we visit the defined URLs we see the appropriate components rendered on screen.

+
+
+

Lets change the contents of the components to have something better.

+
+
+
Listing 68. home.component.html:
+
+
<h2>Home Page</h2>
+
+
+
+
Listing 69. home.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+
Listing 70. data.component.html:
+
+
<h2>Data Page</h2>
+
+
+
+
Listing 71. data.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+

The pages look somewhat better now:

+
+
+
+Home page +
+
Figure 56. Home page
+
+
+
+Data page +
+
Figure 57. Data page
+
+
+

Let us finally create the sidenav. To implement the sidenav we need to use 3 Angular Material components: mat-sidenav-container, mat-sidenav and mat-sidenav-content. +The mat-sidenav-container, as the name suggests, acts as a container for the sidenav and the associated content. So it is the parent element, and mat-sidenav and mat-sidenav-content are the children sibling elements. mat-sidenav represents the sidenav. We can put any content we want, though it is usually used to contain a list of navigational links. The mat-sidenav-content element is for containing the contents of our current page. Since we need the sidenav application-wide, we will put it in the app.component.html.

+
+
+
Listing 72. app.component.html:
+
+
...
+</mat-toolbar>
+
+<mat-sidenav-container>
+  <mat-sidenav mode="over" [disableClose]="false" #sidenav>
+    Sidenav
+  </mat-sidenav>
+  <mat-sidenav-content>
+    <router-outlet></router-outlet>
+  </mat-sidenav-content>
+</mat-sidenav-container>
+
+
+
+

The mat-sidenav has a mode property, which accepts one of the 3 values: over, push and side. It decides the behavior of the sidenav. mat-sidenav also has a disableClose property which accents a boolean value. It toggles the behavior where we click on the backdrop or press the Esc key to close the sidenav. There are other properties which we can use to customize the appearance, behavior and position of the sidenav. You can find the properties documented online at https://material.angular.io/components/sidenav/api +We moved the router-outlet directive inside the mat-sidenav-content where it will render the routed component. +But if you check the running application in the browser, we don’t see the sidenav yet. That is because it is closed. We want to have the sidenav opened/closed at the click of the menu icon button on the left side of the header we implemented earlier. Notice we have set a local reference #sidenav on the mat-sidenav element. We can access this element and call its toggle() function to toggle open or close the sidenav.

+
+
+
Listing 73. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu" (click)="sidenav.toggle()">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+
+Sidenav works +
+
Figure 58. Sidenav is implemented
+
+
+

We can now open the sidenav by clicking the menu icon button. But it does not look right. The sidenav is only as wide as its content. Also the page does not stretch the entire viewport due to lack of content. +Let’s add the following styles to make the page fill the viewport:

+
+
+
Listing 74. app.component.scss:
+
+
...
+mat-sidenav-container {
+    position: absolute;
+    top: 64px;
+    left: 0;
+    right: 0;
+    bottom: 0;
+}
+
+
+
+

The sidenav width will be corrected when we add the navigational links to it. That is the only thing remaining to be done. Lets implement it now:

+
+
+
Listing 75. app.component.html:
+
+
...
+  <mat-sidenav [disableClose]="false" mode="over" #sidenav>
+	<mat-nav-list>
+      <a
+        id="home"
+        mat-list-item
+        [routerLink]="['./']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+        [routerLinkActiveOptions]="{exact: true}"
+      >
+        <mat-icon matListAvatar>home</mat-icon>
+        <h3 matLine>Home</h3>
+        <p matLine>sample home page</p>
+      </a>
+      <a
+        id="sampleData"
+        mat-list-item
+        [routerLink]="['./data']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+      >
+        <mat-icon matListAvatar>grid_on</mat-icon>
+        <h3 matLine>Data</h3>
+        <p matLine>sample data page</p>
+      </a>
+    </mat-nav-list>
+  </mat-sidenav>
+...
+
+
+
+

We use the mat-nav-list element to set a list of navigational links. We use the a tags with mat-list-item directive. We implement a click listener on each link to close the sidenav when it is clicked. The routerLink directive is used to provide the URLs to navigate to. The routerLinkActive directive is used to provide the class name which will be added to the link when it’s URL is visited. Here we name the class`active`. To style it, let' modify the app.component.scss file:

+
+
+
Listing 76. app.component.scss:
+
+
...
+mat-sidenav-container {
+...
+	a.active {
+        background: #8e8d8d;
+        color: #fff;
+
+        p {
+            color: #4a4a4a;
+        }
+    }
+}
+
+
+
+

Now we have a working application with a basic layout: a header with some menu and a sidenav with some navigational links.

+
+
+
+Finished application +
+
Figure 59. Finished application
+
+
+
+

Conclusion

+
+

The purpose of this guide was to provide a basic understanding of creating layouts with Angular Material. The Angular Material library has a huge collection of ready to use components which can be found at https://material.angular.io/components/categories +It has provided documentation and example usage for each of its components. Going through the documentation will give a better understanding of using Angular Material components in our devon4ng applications.

+
+
+
+
+
+

NgRx

+
+ +
+

Introduction to NgRx

+
+

NgRx is a state management framework for Angular based on the Redux pattern.

+
+
+
+

The need for client side state management

+
+

You may wonder why you should bother with state management. Usually data resides in a back-end storage system, e.g. a database, and is retrieved by the client on a per-need basis. To add, update, or delete entities from this store, clients have to invoke API endpoints at the back-end. Mimicking database-like transactions on the client side may seem redundant. However, there are many use cases for which a global client-side state is appropriate:

+
+
+
    +
  • +

    the client has some kind of global state which should survive the destruction of a component, but does not warrant server side persistence, for example: volume level of media, expansion status of menus

    +
  • +
  • +

    sever side data should not be retrieved every time it is needed, either because multiple components consume it, or because it should be cached, e.g. the personal watchlist in an online streaming app

    +
  • +
  • +

    the app provides a rich experience with offline functionality, e.g. a native app built with Ionic

    +
  • +
+
+
+

Saving global states inside the services they originates from results in a data flow that is hard to follow and state becoming inconsistent due to unordered state mutations. Following the single source of truth principle, there should be a central location holding all your application’s state, just like a server side database does. State management libraries for Angular provide tools for storing, retrieving, and updating client-side state.

+
+
+
+

Why NgRx?

+
+

As stated in the introduction, devon4ng does not stipulate a particular state library, or require using one at all. However, NgRx has proven to be a robust, mature solution for this task, with good tooling and 3rd-party library support. Albeit introducing a level of indirection that requires additional effort even for simple features, the Redux concept enforces a clear separation of concerns leading to a cleaner architecture.

+
+
+

Nonetheless, you should always compare different approaches to state management and pick the best one suiting your use case. Here’s a (non-exhaustive) list of competing state management libraries:

+
+
+
    +
  • +

    Plain RxJS using the simple store described in Abstract Class Store

    +
  • +
  • +

    NgXS reduces some boilerplate of NgRx by leveraging the power of decorators and moving side effects to the store

    +
  • +
  • +

    MobX follows a more imperative approach in contrast to the functional Redux pattern

    +
  • +
  • +

    Akita also uses an imperative approach with direct setters in the store, but keeps the concept of immutable state transitions

    +
  • +
+
+
+
+

Setup

+
+

To get a quick start, use the provided template for devon4ng + NgRx.

+
+
+

To manually install the core store package together with a set of useful extensions:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools --save`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools`
+
+
+
+

We recommend to add the NgRx schematics to your project so you can create code artifacts from the command line:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/schematics --save-dev`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/schematics --dev`
+
+
+
+

Afterwards, make NgRx your default schematics provider, so you don’t have to type the qualified package name every time:

+
+
+
+
`ng config cli.defaultCollection @ngrx/schematics`
+
+
+
+

If you have custom settings for Angular schematics, you have to configure them as described here.

+
+
+
+

Concept

+
+
+NgRx Architecture +
+
Figure 60. NgRx architecture overview
+
+
+

Figure 1 gives an overview of the NgRx data flow. The single source of truth is managed as an immutable state object by the store. Components dispatch actions to trigger state changes. Actions are handed over to reducers, which take the current state and action data to compute the next state. Actions are also consumed by-effects, which perform side-effects such as retrieving data from the back-end, and may dispatch new actions as a result. Components subscribe to state changes using selectors.

+
+
+

Continue with Creating a Simple Store.

+
+ +
+
+

State, Selection and Reducers

+ +
+
+

Creating a Simple Store

+
+

In the following pages we use the example of an online streaming service. We will model a particular feature, a watchlist that can be populated by the user with movies she or he wants to see in the future.

+
+
+
+

Initializing NgRx

+
+

If you’re starting fresh, you first have to initialize NgRx and create a root state. The fastest way to do this is using the schematic:

+
+
+
+
`ng generate @ngrx/schematics:store State --root --module app.module.ts`
+
+
+
+

This will automatically generate a root store and register it in the app module. Next we generate a feature module for the watchlist:

+
+
+

` ng generate module watchlist`

+
+
+

and create a corresponding feature store:

+
+
+

` ng generate store watchlist/Watchlist -m watchlist.module.ts`

+
+
+

This generates a file watchlist/reducers/index.ts with the reducer function, and registers the store in the watchlist module declaration.

+
+
+
+

== =

+
+

If you’re getting an error Schematic "store" not found in collection "@schematics/angular", this means you forgot to register the NgRx schematics as default. +== == =

+
+
+

Next, add the WatchlistModule to the AppModule imports so the feature store is registered when the application starts. We also added the store devtools which we will use later, resulting in the following file:

+
+
+

app.module.ts

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppComponent } from './app.component';
+import { EffectsModule } from '@ngrx/effects';
+import { AppEffects } from './app.effects';
+import { StoreModule } from '@ngrx/store';
+import { reducers, metaReducers } from './reducers';
+import { StoreDevtoolsModule } from '@ngrx/store-devtools';
+import { environment } from '../environments/environment';
+import { WatchlistModule } from './watchlist/watchlist.module';
+
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    WatchlistModule,
+    StoreModule.forRoot(reducers, { metaReducers }),
+    // Instrumentation must be imported after importing StoreModule (config is optional)
+    StoreDevtoolsModule.instrument({
+      maxAge: 25, // Retains last 25 states
+      logOnly: environment.production, // Restrict extension to log-only mode
+    }),
+    !environment.production ? StoreDevtoolsModule.instrument() : []
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+
+

Create an entity model and initial state

+
+

We need a simple model for our list of movies. Create a file watchlist/models/movies.ts and insert the following code:

+
+
+
+
export interface Movie {
+    id: number;
+    title: string;
+    releaseYear: number;
+    runtimeMinutes: number;
+    genre: Genre;
+}
+
+export type Genre = 'action' | 'fantasy' | 'sci-fi' | 'romantic' | 'comedy' | 'mystery';
+
+export interface WatchlistItem {
+    id: number;
+    movie: Movie;
+    added: Date;
+    playbackMinutes: number;
+}
+
+
+
+
+

== =

+
+

We discourage putting several types into the same file and do this only for the sake of keeping this tutorial brief. +== == =

+
+
+

Later we will learn how to retrieve data from the back-end using effects. For now we will create an initial state for the user with a default movie.

+
+
+

State is defined and transforms by a reducer function. Let’s create a watchlist reducer:

+
+
+
+
```
+cd watchlist/reducers
+ng g reducer WatchlistData --reducers index.ts
+```
+
+
+
+

Open the generated file watchlist-data.reducer.ts. You see three exports: The State interface defines the shape of the state. There is only one instance of a feature state in the store at all times. The initialState constant is the state at application creation time. The reducer function will later be called by the store to produce the next state instance based on the current state and an action object.

+
+
+

Let’s put a movie into the user’s watchlist:

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export interface State {
+  items: WatchlistItem[];
+}
+
+export const initialState: State = {
+  items: [
+    {
+      id: 42,
+      movie: {
+        id: 1,
+        title: 'Die Hard',
+        genre: 'action',
+        releaseYear: 1988,
+        runtimeMinutes: 132
+      },
+      playbackMinutes: 0,
+      added: new Date(),
+    }
+  ]
+};
+
+
+
+
+

Select the current watchlist

+
+

State slices can be retrieved from the store using selectors.

+
+
+

Create a watchlist component:

+
+
+
+
`ng g c watchlist/Watchlist`
+
+
+
+

and add it to the exports of WatchlistModule. Also, replace app.component.html with

+
+
+
+
<app-watchlist></app-watchlist>
+
+
+
+

State observables are obtained using selectors. They are memoized by default, meaning that you don’t have to worry about performance if you use complicated calculations when deriving state — these are only performed once per state emission.

+
+
+

Add a selector to watchlist-data.reducer.ts:

+
+
+
+
`export const getAllItems = (state: State) => state.items;`
+
+
+
+

Next, we have to re-export the selector for this sub-state in the feature reducer. Modify the watchlist/reducers/index.ts like this:

+
+
+

watchlist/reducers/index.ts

+
+
+
+
import {
+  ActionReducer,
+  ActionReducerMap,
+  createFeatureSelector,
+  createSelector,
+  MetaReducer
+} from '@ngrx/store';
+import { environment } from 'src/environments/environment';
+import * as fromWatchlistData from './watchlist-data.reducer';
+import * as fromRoot from 'src/app/reducers/index';
+
+export interface WatchlistState { (1)
+  watchlistData: fromWatchlistData.State;
+}
+
+export interface State extends fromRoot.State { (2)
+  watchlist: WatchlistState;
+}
+
+export const reducers: ActionReducerMap<WatchlistState> = { (3)
+  watchlistData: fromWatchlistData.reducer,
+};
+
+export const metaReducers: MetaReducer<WatchlistState>[] = !environment.production ? [] : [];
+
+export const getFeature = createFeatureSelector<State, WatchlistState>('watchlist'); (4)
+
+export const getWatchlistData = createSelector( (5)
+  getFeature,
+  state => state.watchlistData
+);
+
+export const getAllItems = createSelector( (6)
+  getWatchlistData,
+  fromWatchlistData.getAllItems
+);
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1The feature state, each member is managed by a different reducer
2Feature states are registered by the forFeature method. This interface provides a typesafe path from root to feature state.
3Tie sub-states of a feature state to the corresponding reducers
4Create a selector to access the 'watchlist' feature state
5select the watchlistData sub state
6re-export the selector
+
+
+

Note how createSelector allows to chain selectors. This is a powerful tool that also allows for selecting from multiple states.

+
+
+

You can use selectors as pipeable operators:

+
+
+

watchlist.component.ts

+
+
+
+
export class WatchlistComponent {
+  watchlistItems$: Observable<WatchlistItem[]>;
+
+  constructor(
+    private store: Store<fromWatchlist.State>
+  ) {
+    this.watchlistItems$ = this.store.pipe(select(fromWatchlist.getAllItems));
+  }
+}
+
+
+
+

watchlist.component.html

+
+
+
+
<h1>Watchlist</h1>
+<ul>
+    <li *ngFor="let item of watchlistItems$ | async">{{item.movie.title}} ({{item.movie.releaseYear}}): {{item.playbackMinutes}}/{{item.movie.runtimeMinutes}} min watched</li>
+</ul>
+
+
+
+
+

Dispatching an action to update watched minutes

+
+

We track the user’s current progress at watching a movie as the playbackMinutes property. After closing a video, the watched minutes have to be updated. In NgRx, state is being updated by dispatching actions. An action is an option with a (globally unique) type discriminator and an optional payload.

+
+
+
+

== Creating the action

+
+

Create a file playback/actions/index.ts. In this example, we do not further separate the actions per sub state. Actions can be defined by using action creators:

+
+
+

playback/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+
+export const playbackFinished = createAction('[Playback] Playback finished', props<{ movieId: number, stoppedAtMinute: number }>());
+
+const actions = union({
+    playbackFinished
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

First we specify the type, followed by a call to the payload definition function. Next, we create a union of all possible actions for this file using union, which allows us a to access action payloads in the reducer in a typesafe way.

+
+
+
+

== =

+
+

Action types should follow the naming convention [Source] Event, e.g. [Recommended List] Hide Recommendation or [Auth API] Login Success. Think of actions rather as events than commands. You should never use the same action at two different places (you can still handle multiple actions the same way). This facilitate tracing the source of an action. For details see Good Action Hygiene with NgRx by Mike Ryan (video). +== == =

+
+
+
+

== Dispatch

+
+

We skip the implementation of an actual video playback page and simulate watching a movie in 10 minute segments by adding a link in the template:

+
+
+

watchlist-component.html

+
+
+
+
<li *ngFor="let item of watchlistItems$ | async">... <button (click)="stoppedPlayback(item.movie.id, item.playbackMinutes + 10)">Add 10 Minutes</button></li>
+
+
+
+

watchlist-component.ts

+
+
+
+
import * as playbackActions from 'src/app/playback/actions';
+...
+  stoppedPlayback(movieId: number, stoppedAtMinute: number) {
+    this.store.dispatch(playbackActions.playbackFinished({ movieId, stoppedAtMinute }));
+  }
+
+
+
+
+

== State reduction

+
+

Next, we handle the action inside the watchlistData reducer. Note that actions can be handled by multiple reducers and effects at the same time to update different states, for example if we’d like to show a rating modal after playback has finished.

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export function reducer(state = initialState, action: playbackActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      return {
+        ...state,
+        items: state.items.map(updatePlaybackMinutesMapper(action.movieId, action.stoppedAtMinute))
+      };
+
+    default:
+      return state;
+  }
+}
+
+export function updatePlaybackMinutesMapper(movieId: number, stoppedAtMinute: number) {
+  return (item: WatchlistItem) => {
+    if (item.movie.id == movieId) {
+      return {
+        ...item,
+        playbackMinutes: stoppedAtMinute
+      };
+    } else {
+      return item;
+    }
+  };
+}
+
+
+
+

Note how we changed the reducer’s function signature to reference the actions union. The switch-case handles all incoming actions to produce the next state. The default case handles all actions a reducer is not interested in by returning the state unchanged. Then we find the watchlist item corresponding to the movie with the given id and update the playback minutes. Since state is immutable, we have to clone all objects down to the one we would like to change using the object spread operator (…​).

+
+
+
+

== =

+
+

Selectors rely on object identity to decide whether the value has to be recalculated. Do not clone objects that are not on the path to the change you want to make. This is why updatePlaybackMinutesMapper returns the same item if the movie id does not match. +== == =

+
+
+
+

== Alternative state mapping with Immer

+
+

It can be hard to think in immutable changes, especially if your team has a strong background in imperative programming. In this case, you may find the Immer library convenient, which allows to produce immutable objects by manipulating a proxied draft. The same reducer can then be written as:

+
+
+

watchlist-data.reducer.ts with Immer

+
+
+
+
import { produce } from 'immer';
+...
+case playbackActions.playbackFinished.type:
+      return produce(state, draft => {
+        const itemToUpdate = draft.items.find(item => item.movie.id == action.movieId);
+        if (itemToUpdate) {
+          itemToUpdate.playbackMinutes = action.stoppedAtMinute;
+        }
+      });
+
+
+
+

Immer works out of the box with plain objects and arrays.

+
+
+
+

== Redux devtools

+
+

If the StoreDevToolsModule is instrumented as described above, you can use the browser extension Redux devtools to see all dispatched actions and the resulting state diff, as well as the current state, and even travel back in time by undoing actions.

+
+
+
+Redux Devtools +
+
Figure 61. Redux devtools
+
+
+

Continue with learning about effects

+
+ +
+
+

Side effects with NgRx/Effects

+
+

Reducers are pure functions, meaning they are side-effect free and deterministic. Many actions however have side effects like sending messages or displaying a toast notification. NgRx encapsulates these actions in effects.

+
+
+

Let’s build a recommended movies list so the user can add movies to their watchlist.

+
+
+
+

Obtaining the recommendation list from the server

+
+

Create a module for recommendations and add stores and states as in the previous chapter. Add EffectsModule.forRoot([]) to the imports in AppModule below StoreModule.forRoot(). Add effects to the feature module:

+
+
+
+
ng generate effect recommendation/Recommendation -m recommendation/recommendation.module.ts
+
+
+
+

We need actions for loading the movie list, success and failure cases:

+
+
+

recommendation/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+import { Movie } from 'src/app/watchlist/models/movies';
+
+export const loadRecommendedMovies = createAction('[Recommendation List] Load movies');
+export const loadRecommendedMoviesSuccess = createAction('[Recommendation API] Load movies success', props<{movies: Movie[]}>());
+export const loadRecommendedMoviesFailure = createAction('[Recommendation API] Load movies failure', props<{error: any}>());
+
+const actions = union({
+    loadRecommendedMovies,
+    loadRecommendedMoviesSuccess,
+    loadRecommendedMoviesFailure
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

In the reducer, we use a loading flag so the UI can show a loading spinner. The store is updated with arriving data.

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State {
+  items: Movie[];
+  loading: boolean;
+}
+
+export const initialState: State = {
+  items: [],
+  loading: false
+};
+
+export function reducer(state = initialState, action: recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case '[Recommendation List] Load movies':
+      return {
+        ...state,
+        items: [],
+        loading: true
+      };
+
+    case '[Recommendation API] Load movies failure':
+      return {
+        ...state,
+          loading: false
+      };
+
+    case '[Recommendation API] Load movies success':
+      return {
+        ...state,
+        items: action.movies,
+        loading: false
+      };
+
+    default:
+      return state;
+  }
+}
+
+export const getAll = (state: State) => state.items;
+export const isLoading = (state: State) => state.loading;
+
+
+
+

We need an API service to talk to the server. For demonstration purposes, we simulate an answer delayed by one second:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable({
+  providedIn: 'root'
+})
+export class RecommendationApiService {
+
+  private readonly recommendedMovies: Movie[] = [
+    {
+      id: 2,
+      title: 'The Hunger Games',
+      genre: 'sci-fi',
+      releaseYear: 2012,
+      runtimeMinutes: 144
+    },
+    {
+      id: 4,
+      title: 'Avengers: Endgame',
+      genre: 'fantasy',
+      releaseYear: 2019,
+      runtimeMinutes: 181
+    }
+  ];
+
+  loadRecommendedMovies(): Observable<Movie[]> {
+    return of(this.recommendedMovies).pipe(delay(1000));
+  }
+}
+
+
+
+

Here are the effects:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable()
+export class RecommendationEffects {
+
+  constructor(
+    private actions$: Actions,
+    private recommendationApi: RecommendationApiService,
+  ) { }
+
+  @Effect()
+  loadBooks$ = this.actions$.pipe(
+    ofType(recommendationActions.loadRecommendedMovies.type),
+    switchMap(() => this.recommendationApi.loadRecommendedMovies().pipe(
+      map(movies => recommendationActions.loadRecommendedMoviesSuccess({ movies })),
+      catchError(error => of(recommendationActions.loadRecommendedMoviesFailure({ error })))
+    ))
+  );
+}
+
+
+
+

Effects are always observables and return actions. In this example, we consume the actions observable provided by NgRx and listen only for the loadRecommendedMovies actions by using the ofType operator. Using switchMap, we map to a new observable, one that loads movies and maps the successful result to a new loadRecommendedMoviesSuccess action or a failure to loadRecommendedMoviesFailure. In a real application we would show a notification in the error case.

+
+
+
+

==

+
+

If an effect should not dispatch another action, return an empty observable. +== ==

+
+ + +
+
+

Simplifying CRUD with NgRx/Entity

+
+

Most of the time when manipulating entries in the store, we like to create, add, update, or delete entries (CRUD). NgRx/Entity provides convenience functions if each item of a collection has an id property. Luckily all our entities already have this property.

+
+
+

Let’s add functionality to add a movie to the watchlist. First, create the required action:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export const addToWatchlist = createAction('[Recommendation List] Add to watchlist',
+    props<{ watchlistItemId: number, movie: Movie, addedAt: Date }>());
+
+
+
+
+

==

+
+

You may wonder why the Date object is not created inside the reducer instead, since it should always be the current time. However, remember that reducers should be deterministic state machines — State A + Action B should always result in the same State C. This makes reducers easily testable. +== ==

+
+
+

Then, rewrite the watchlistData reducer to make use of NgRx/Entity:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State extends EntityState<WatchlistItem> { (1)
+}
+
+export const entityAdapter = createEntityAdapter<WatchlistItem>(); (2)
+
+export const initialState: State = entityAdapter.getInitialState(); (3)
+
+const entitySelectors = entityAdapter.getSelectors();
+
+export function reducer(state = initialState, action: playbackActions.ActionsUnion | recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      const itemToUpdate = entitySelectors
+      .selectAll(state) (4)
+      .find(item => item.movie.id == action.movieId);
+      if (itemToUpdate) {
+        return entityAdapter.updateOne({ (5)
+          id: itemToUpdate.id,
+          changes: { playbackMinutes: action.stoppedAtMinute } (6)
+        }, state);
+      } else {
+        return state;
+      }
+
+    case recommendationActions.addToWatchlist.type:
+      return entityAdapter.addOne({id: action.watchlistItemId, movie: action.movie, added: action.addedAt, playbackMinutes: 0}, state);
+
+    default:
+      return state;
+  }
+}
+
+
+export const getAllItems = entitySelectors.selectAll;
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1NgRx/Entity requires state to extend EntityState. It provides a list of ids and a dictionary of id ⇒ entity entries
2The entity adapter provides data manipulation operations and selectors
3The state can be initialized with getInitialState(), which accepts an optional object to define any additional state beyond EntityState
4selectAll returns an array of all entities
5All adapter operations consume the state object as the last argument and produce a new state
6Update methods accept a partial change definition; you don’t have to clone the object
+
+
+

This concludes the tutorial on NgRx. If you want to learn about advanced topics such as selectors with arguments, testing, or router state, head over to the official NgRx documentation.

+
+
+
+
+
+

Cookbook

+
+ +
+

Abstract Class Store

+
+

The following solution presents a base class for implementing stores which handle state and its transitions. +Working with the base class achieves:

+
+
+
    +
  • +

    common API across all stores

    +
  • +
  • +

    logging (when activated in the constructor)

    +
  • +
  • +

    state transitions are asynchronous by design - sequential order problems are avoided

    +
  • +
+
+
+
Listing 77. Usage Example
+
+
@Injectable()
+export class ModalStore extends Store<ModalState> {
+
+  constructor() {
+    super({ isOpen: false }, !environment.production);
+  }
+
+  closeDialog() {
+    this.dispatchAction('Close Dialog', (currentState) => ({...currentState, isOpen: false}));
+  }
+
+  openDialog() {
+    this.dispatchAction('Open Dialog', (currentState) => ({...currentState, isOpen: true}));
+  }
+
+}
+
+
+
+
Listing 78. Abstract Base Class Store
+
+
import { OnDestroy } from '@angular/core';
+import { BehaviorSubject } from 'rxjs/BehaviorSubject';
+import { Observable } from 'rxjs/Observable';
+import { intersection, difference } from 'lodash';
+import { map, distinctUntilChanged, observeOn } from 'rxjs/operators';
+import { Subject } from 'rxjs/Subject';
+import { queue } from 'rxjs/scheduler/queue';
+import { Subscription } from 'rxjs/Subscription';
+
+interface Action<T> {
+  name: string;
+  actionFn: (state: T) => T;
+}
+
+/** Base class for implementing stores. */
+export abstract class Store<T> implements OnDestroy {
+
+  private actionSubscription: Subscription;
+  private actionSource: Subject<Action<T>>;
+  private stateSource: BehaviorSubject<T>;
+  state$: Observable<T>;
+
+  /**
+   * Initializes a store with initial state and logging.
+   * @param initialState Initial state
+   * @param logChanges When true state transitions are logged to the console.
+   */
+  constructor(initialState: T, public logChanges = false) {
+    this.stateSource = new BehaviorSubject<T>(initialState);
+    this.state$ = this.stateSource.asObservable();
+    this.actionSource = new Subject<Action<T>>();
+
+    this.actionSubscription = this.actionSource.pipe(observeOn(queue)).subscribe(action => {
+      const currentState = this.stateSource.getValue();
+      const nextState = action.actionFn(currentState);
+
+      if (this.logChanges) {
+        this.log(action.name, currentState, nextState);
+      }
+
+      this.stateSource.next(nextState);
+    });
+  }
+
+  /**
+   * Selects a property from the stores state.
+   * Will do distinctUntilChanged() and map() with the given selector.
+   * @param selector Selector function which selects the needed property from the state.
+   * @returns Observable of return type from selector function.
+   */
+  select<TX>(selector: (state: T) => TX): Observable<TX> {
+    return this.state$.pipe(
+      map(selector),
+      distinctUntilChanged()
+    );
+  }
+
+  protected dispatchAction(name: string, action: (state: T) => T) {
+    this.actionSource.next({ name, actionFn: action });
+  }
+
+  private log(actionName: string, before: T, after: T) {
+    const result: { [key: string]: { from: any, to: any} } = {};
+    const sameProbs = intersection(Object.keys(after), Object.keys(before));
+    const newProbs = difference(Object.keys(after), Object.keys(before));
+    for (const prop of newProbs) {
+      result[prop] = { from: undefined, to: (<any>after)[prop] };
+    }
+
+    for (const prop of sameProbs) {
+      if ((<any>before)[prop] !==  (<any>after)[prop]) {
+        result[prop] = { from: (<any>before)[prop], to: (<any>after)[prop] };
+      }
+    }
+
+    console.log(this.constructor.name, actionName, result);
+  }
+
+  ngOnDestroy() {
+    this.actionSubscription.unsubscribe();
+  }
+
+}
+
+
+ +
+
+

Add Electron to an Angular application using Angular CLI

+
+

This cookbook recipe explains how to integrate Electron in an Angular 10+ application. Electron is a framework for creating native applications with web technologies like JavaScript, HTML, and CSS. As an example, very well known applications as Visual Studio Code, Atom, Slack or Skype (and many more) are using Electron too.

+
+
+ + + + + +
+ + +At the moment of this writing Angular 11.2.0, Electron 11.2.3 and Electron-builder 22.9.1 were the versions available. +
+
+
+

Here are the steps to achieve this goal. Follow them in order.

+
+
+
+

Add Electron and other relevant dependencies

+
+

There are two different approaches to add the dependencies in the package.json file:

+
+
+
    +
  • +

    Writing the dependencies directly in that file.

    +
  • +
  • +

    Installing using npm install or yarn add.

    +
  • +
+
+
+ + + + + +
+ + +Please remember if the project has a package-lock.json or yarn.lock file use npm or yarn respectively. +
+
+
+

In order to add the dependencies directly in the package.json file, include the following lines in the devDependencies section:

+
+
+
+
"devDependencies": {
+...
+    "electron": "^11.2.3",
+    "electron-builder": "^22.9.1",
+...
+},
+
+
+
+

As indicated above, instead of this npm install can be used:

+
+
+
+
$ npm install -D electron electron-builder
+
+
+
+

Or with yarn:

+
+
+
+
$ yarn add -D electron electron-builder
+
+
+
+
+

Create the necessary typescript configurations

+
+

In order to initiate electron in an angular app we need to modify the tsconfig.json file and create a tsconfig.serve.json and a tsconfig.base.json in the root folder.

+
+
+
+

== tsconfig.json

+
+

This file needs to be modified to create references to ./src/tsconfig.app.json and ./src/tsconfig.spec.json to support different configurations.

+
+
+
+
{
+  "files": [],
+  "references": [
+    {
+      "path": "./src/tsconfig.app.json"
+    },
+    {
+      "path": "./src/tsconfig.spec.json"
+    }
+  ]
+}
+
+
+
+
+

== tsconfig.app.json

+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../app",
+    "module": "es2015",
+    "baseUrl": "",
+    "types": []
+  },
+  "include": [
+    "**/*.ts",
+  ],
+  "exclude": [
+    "**/*.spec.ts"
+  ],
+  "angularCompilerOptions": {
+    "fullTemplateTypeCheck": true,
+    "strictInjectionParameters": true,
+    "preserveWhitespaces": true
+  }
+}
+
+
+
+
+

== tsconfig.spec.json

+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../spec",
+    "module": "commonjs",
+    "types": [
+      "jasmine",
+      "node"
+    ]
+  },
+  "files": [
+    "test.ts",
+  ],
+  "include": [
+    "**/*.spec.ts",
+    "**/*.d.ts"
+  ],
+  "exclude": [
+    "dist",
+    "release",
+    "node_modules"
+  ]
+}
+
+
+
+
+

== tsconfig.base.json

+
+

This is shared between tsconfig.app.json and tsconfig.spec.json and it will be extended on each config file.

+
+
+
+
{
+  "compileOnSave": false,
+  "compilerOptions": {
+    "outDir": "./dist",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "es2016",
+      "es2015",
+      "dom"
+    ]
+  },
+  "files": [
+    "electron-main.ts"
+    "src/polyfills.ts"
+  ],
+  "include": [
+    "src/**/*.d.ts"
+  ],
+  "exclude": [
+    "node_modules"
+  ]
+}
+
+
+
+
+

== tsconfig.serve.json

+
+

In the root, tsconfig.serve.json needs to be created. This typescript config file is going to be used when we serve electron:

+
+
+
+
{
+  "compilerOptions": {
+    "outDir": ".",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "dom"
+    ]
+  },
+  "include": [
+    "electron-main.ts"
+  ],
+  "exclude": [
+    "node_modules",
+    "**/*.spec.ts"
+  ]
+}
+
+
+
+
+

Add Electron build configuration

+
+

In order to configure electron builds properly we need to create a new json on our application, let’s call it electron-builder.json. For more information and fine tuning please refer to the Electron Builder official documentation.

+
+
+

The contents of the file will be something similar to the following:

+
+
+
+
{
+  "productName": "devon4ngElectron",
+  "directories":{
+    "output": "./builder-release"
+  },
+  "win": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "portable"
+    ]
+  },
+  "mac": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "dmg"
+    ]
+  },
+  "linux": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "AppImage"
+    ]
+  }
+}
+
+
+
+

There are two important things in this files:

+
+
+
    +
  1. +

    "output": this is where electron builder is going to build our application

    +
  2. +
  3. +

    "icon": in every OS possible there is an icon parameter, the route to the icon folder that will be created after building with angular needs to be used here. This will make it so the electron builder can find the icons and build.

    +
  4. +
+
+
+
+

Modify angular.json

+
+

angular.json has to to be modified so the project is build inside /dist without an intermediate folder.

+
+
+
+
{
+  "architect": {
+    "build": {
+      "outputPath": "dist"
+    }
+  }
+}
+
+
+
+
+

Create the electron window in electron-main.ts

+
+

In order to use electron, a file needs to be created at the root of the application (main.ts). This file will create a window with different settings checking if we are using --serve as an argument:

+
+
+
+
import { app, BrowserWindow } from 'electron';
+import * as path from 'path';
+import * as url from 'url';
+
+let win: any;
+const args: any = process.argv.slice(1);
+const serve: any = args.some((val) => val == '--serve');
+
+const createWindow:any = ()=>{
+  // Create the browser window.
+  win = new BrowserWindow({
+    fullscreen: true,
+    webPreferences: {
+      nodeIntegration: true,
+    }
+  });
+
+  if (serve) {
+    require('electron-reload')(__dirname, {
+      electron: require(`${__dirname}/node_modules/electron`)
+    });
+    win.loadURL('http://localhost:4200');
+  } else {
+    win.loadURL(
+      url.format({
+        pathname: path.join(__dirname, 'dist/index.html'),
+        protocol: 'file:',
+        slashes: true
+      })
+    );
+  }
+
+  if (serve) {
+    win.webContents.openDevTools();
+  }
+
+  // Emitted when the window is closed.
+  win.on('closed', () => {
+    // Dereference the window object, usually you would store window
+    // in an array if your app supports multi windows, this is the time
+    // when you should delete the corresponding element.
+    // tslint:disable-next-line:no-null-keyword
+    win = null;
+  });
+}
+
+try {
+  // This method will be called when Electron has finished
+  // initialization and is ready to create browser windows.
+  // Some APIs can only be used after this event occurs.
+  app.on('ready', createWindow);
+
+   // Quit when all windows are closed.
+  app.on('window-all-closed', () => {
+    // On OS X it is common for applications and their menu bar
+    // to stay active until the user quits explicitly with Cmd + Q
+    if (process.platform !==  'darwin') {
+      app.quit();
+    }
+  });
+
+   app.on('activate', () => {
+    // On OS X it's common to re-create a window in the app when the
+    // dock icon is clicked and there are no other windows open.
+    if (win == null) {
+      createWindow();
+    }
+  });
+} catch (e) {
+  // Catch Error
+  // throw e;
+}
+
+
+
+
+

Add the electron window and improve the package.json scripts

+
+

Inside package.json the electron window that will be transformed to electron-main.js when building needs to be added.

+
+
+
+
{
+  ....
+  "main": "electron-main.js",
+  "scripts": {...}
+  ....
+}
+
+
+
+

The scripts section in the package.json can be improved to avoid running too verbose commands. As a very complete example we can take a look to the My Thai Star’s scripts section and copy the lines useful in your project. In any case, at least we recommend to add the following lines:

+
+
+
+
  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e",
+    "electron:tsc": "tsc -p tsconfig.serve.json",
+    "electron:run": "npm run electron:tsc && ng build --base-href ./ && npx electron .",
+    "electron:serve": "npm run electron:tsc && npx electron . --serve",
+    "electron:pack": "npm run electron:tsc && electron-builder --dir --config electron-builder.json",
+    "electron:build": "npm run electron:tsc && electron-builder --config electron-builder.json build"
+  },
+
+
+
+

The electron: scripts do the following:

+
+
+
    +
  • +

    electron:tsc: Compiles electron TS files.

    +
  • +
  • +

    electron:run: Serves Angular app and runs electron.

    +
  • +
  • +

    electron:serve: Serves electron with an already running angular app (i.e. a ng serve command running on another terminal).

    +
  • +
  • +

    electron:pack: Packs electron app.

    +
  • +
  • +

    electron:build: Builds electron app.

    +
  • +
+
+
+
+

Add Electron to an Angular application using Nx CLI

+
+

Creating an Electron app is very easy and straight-forward if you are using Nx CLI. As a pre-requisite, you should already have an application in your Nx workspace which you want to run as a front-end in your Electron app. (You can follow this guide if you want to get started with Nx).

+
+
+

Follow the steps below to develop an Electron app in your Nx workspace:

+
+
+
+

Install nx-electron

+
+

Install nx-electron using the command:

+
+
+
+
  npm install -D nx-electron
+
+
+
+

This will add the packages electron and nx-electron as dev dependencies to your Nx workspace. This will help us generate our Electron app in the next step.

+
+
+
+

Generate your Electron app

+
+

Once you have installed nx-electron, you can generate your electron app using the command:

+
+
+
+
  nx g nx-electron:app <electron-app-name> --frontendProject=<frontend-app-name>
+
+
+
+

And that is it! You have generated your Electron app already. All the configuration files (tsconfig.*) are generated for you under <electron-app-name> in your Nx workspace.

+
+
+
+

Serving your app

+
+

You can use this command to serve your Electron app:

+
+
+
+
  nx run-many --target=serve --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+

If you see a blank application, it is because the Electron app was served before the front-end was served. To avoid this, you can serve the front-end and back-end separately, (that is, serve the back-end only after the front-end is served).

+
+
+
+

Building your app

+
+

The command for building your Electron app in Nx is similar to the serve command above, you only change the target from serve to build:

+
+
+
+
  nx run-many --target=build --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+
+

Packaging your app

+
+

Make sure you have build your app before you try to package it using the following command:

+
+
+
+
  nx run <electron-app-name>:package [--options]
+
+
+
+

The options that can be passed can be found here.

+
+
+

You can find a working example of an Electron app in devon4ts-samples.

+
+
+

Unresolved include directive in modules/ROOT/pages/master-devon4ng.adoc - include::guide-angular-mock-service.adoc.adoc[]

+
+ +
+
+

Testing e2e with Cypress

+
+

This guide will cover the basics of e2e testing using Cypress.

+
+
+

Cypress is a framework “all in one” that provides the necessary libraries to write specific e2e tests, without the need of Selenium.

+
+
+

Why Cypress?

+
+
+
    +
  • +

    Uses JavaScript

    +
  • +
  • +

    It works directly with the browser so the compatibility with the front-end framework the project uses (in this case Angular) is not a problem.

    +
  • +
  • +

    Easy cross browser testing

    +
  • +
+
+
+
+

Setup

+
+

Install +First of all we need to install it, we can use npm install:

+
+
+
+
$ npm install -D cypress
+
+
+
+

Or we can install it with yarn:

+
+
+
+
$ yarn add -D cypress
+
+
+
+

We need to run Cypress in order to get the folder tree downloaded, then create a tsconfig.json file inside cypress folder to add the typescript configuration.

+
+
+
+
$ . /node_modules/.bin/cypress open
+
+
+
+
Listing 79. tsconfig.json
+
+
{
+  "compilerOptions": {
+    "strict": true,
+    "baseUrl": "../node_modules",
+    "target": "es5",
+    "lib": ["es5", "dom"],
+    "types": ["cypress"]
+  },
+  "include": [
+    "**/*.ts"
+  ]
+}
+
+
+
+

BaseUrl

+
+
+

Let’s setup the base URL so when we run the tests cypress will "navigate" to the right place, go to cypress.json on the root of the project.

+
+
+
Listing 80. cypress.json
+
+
{
+  "baseUrl": "http://localhost:4200"
+}
+
+
+
+
+

Files / Structure

+
+
+
/cypress
+  tsconfig.json
+  /fixtures
+    - example.json
+  /integration
+    - button.spec.ts
+    - test.spec.ts
+    /examples
+  /plugins
+    - index.js
+  /support
+    - commands.js
+    - index.js
+
+
+
+

tsconfig.json for typescript configuration.

+
+
+

fixtures to store our mock data or files (images, mp3…​) to use on our tests.

+
+
+

integration is where our tests go, by default it comes with an examples folder with tested samples.

+
+
+

plugins is where the configuration files of the plugins go.

+
+
+

support to add custom commands.

+
+
+
+

== =

+
+

If you are using Nx, it automatically generates a e2e cypress project for every project that you generate. So you already get the configuration files like tsconfig.json and cypress.json and also get the folder structure described above. This helps you focus more on writing your tests rather than setting up Cypress.

+
+
+
+

== =

+ +
+
+

Tests

+
+

The structure is the same than Mocha.

+
+
+

First, we create a file, for example form.spec.ts, inside we define a context to group all our tests referred to the same subject.

+
+
+
Listing 81. form.spec.ts
+
+
context('Button page', () => {
+  beforeEach(() => {
+    cy.visit('/');
+  });
+  it('should have button',()=>{
+    cy.get('button').should('exist');
+  });
+  it('should contain PRESS',()=>{
+    cy.contains('button', 'PRESS');
+  });
+});
+
+
+
+
beforeEach
+

Visit '/' before every test.

+
+
+
it
+

Inside we write the test.

+
+
+

The result:

+
+
+
+contextImg +
+
+
+

For more info check Cypress documentation

+
+
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+
+
+

Fixtures

+
+

We use fixtures to mock data, it can be a json, an image, video…​

+
+
+
+
{
+  "name": "Dummy name",
+  "phone": 999 99 99 99,
+  "body": "Mock data"
+}
+
+
+
+

You can store multiple mocks on the same fixture file.

+
+
+
+
{
+  "create":{"name": "e2etestBox"},
+  "boxFruit":{
+    "uuid":"3376339576e33dfb9145362426a33333",
+    "name":"e2etestBox",
+    "visibility":true,
+    "items":[
+      {"name":"apple","units":3},
+      {"name":"kiwi","units":2},
+    ]
+  },
+}
+
+
+
+

To access data we don’t need to import any file, we just call cy.fixture(filename) inside the **.spec.ts. We can name it as we want.

+
+
+
+
cy.fixture('box.json').as('fruitBox')
+
+
+
+

cy.fixture('box.json') we get access to box.json +.as(fruitBox) is used to create an alias (fruitBox) to the fixture.

+
+
+

For more info check Fixtures documentation

+
+
+
+

Request / Route

+
+

With cypress you can test your application with real data or with mocks.

+
+
+

Not using mocks guarantees that your tests are real e2e test but makes them vulnerable to external issues. +When you mock data you don’t know exactly if the data and the structure received from the backend is correct because you are forcing a mock on the response, but you can avoid external issues, run test faster and have better control on the structure and status.

+
+
+

To get more information go to Testing Strategies

+
+
+
+

Route

+
+

Cypress can intercept a XHR request and interact with it.

+
+
+
+
cy.server();
+cy.route(
+  'GET',
+  '/apiUrl/list',
+  [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]
+)
+
+
+
+

cy.server(options) start a server to interact with the responses.

+
+
+
cy.route(options) intercepts a XMLHttpRequests
+
    +
  • +

    method GET

    +
  • +
  • +

    URL /apiUrl/list'

    +
  • +
  • +

    response [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]

    +
  • +
+
+
+

Waits

+
+
+

Every cypress action has a default await time to avoid asynchronous issues, but this time can be short for some particular actions like API calls, for those cases we can use cy.wait().

+
+
+
+
cy.server();
+cy.route('/apiUrl/list').as('list');
+cy.visit('/boxList');
+cy.wait('@list');
+
+
+
+

You can find more information about cy.wait() here

+
+
+

To mock data with fixtures:

+
+
+
+
cy.fixture('box')
+  .then(({boxFruit}) => {
+    cy.route(
+      'GET',
+      '/apiUrl/list',
+      boxFruit
+    ).as('boxFruit');
+    cy.get('#button').click();
+    cy.wait('@journalsList');
+    cy.get('#list').contains('apple');
+  })
+
+
+
+

We get boxFruit data from the box fixture and then we mock the API call with it so now the response of the call is boxFruit object. +When the button is clicked, it waits to receive the response of the call and then checks if the list contains one of the elements of the fruitBox.

+
+
+
+

Request

+
+

Make a HTTP request.

+
+
+
+
cy.server();
+cy.request('http://localhost:4200/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+
+
+
+

If we have 'http://localhost:4200' as baseUrl on cypress.json

+
+
+
+
cy.server();
+cy.request('/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+// Goes to http://localhost:4200/
+
+
+
+

We can add other options, like we can send the body of a form.

+
+
+
+
cy.server();
+cy.request({
+  method: 'POST',
+  url: '/send',
+  form: true,
+  body: {
+    name: 'name task',
+    description: 'description of the task'
+  }
+});
+
+
+
+
+

Custom commands

+
+

If you see yourself writing the same test more than once (login is a common one), you can create a custom command to make things faster.

+
+
+

Cypress.Commands.add('name', ()⇒{}) to create the test.

+
+
+
Listing 82. commands.ts
+
+
Cypress.Commands.add('checkPlaceholder', (name) => {
+  cy.get(`[name='${name}']`).click();
+  cy.get('mat-form-field.mat-focused').should('exist');
+});
+
+
+
+
index.ts
+

To use the commands we need to import the files on support/index.ts

+
+
+
Listing 83. index.ts
+
+
import './commands'
+import './file1'
+import './folder/file2'
+
+
+
+

index.ts is where all our custom commands files unite so Cypress knows where to find them.

+
+
+

And as we are using typescript we need to define a namespace, interface and define our function.

+
+
+
    +
  • +

    index.d.ts

    +
  • +
+
+
+
+
declare namespace Cypress {
+  interface Chainable<Subject> {
+    checkPlaceholder(name:string):Chainable<void>
+  }
+}
+
+
+ +
+
+

Cross browser testing

+
+

By default the browser used by Cypress is Chrome, it has compatibility with it’s family browsers (including Microsoft Edge) and has beta support for Mozilla Firefox.

+
+
+

To change the browser on the panel we can do it by selecting the desired one on the browsers tab before running the spec file.

+
+
+

Cypress will detect and display, except electron, only the browsers that you have already installed on your machine.

+
+
+
+browserTab +
+
+
+

Once the browser is selected, you can run your tests.

+
+
+

To change the browser on the automatic test run, you can add a flag on the node command

+
+
+
+
cypress run --browser edge
+
+
+
+

Only if we use the cypress run command.

+
+
+

Or we can change the script file.

+
+
+
    +
  • +

    cypress/script.js

    +
  • +
+
+
+
+
const runTests= async ()=>{
+  ...
+  const {totalFailed} = await cypress.run({browser:'edge'});
+  ...
+};
+
+
+ +
+
+

Viewport

+
+

Cypress allow us to create tests depending on the Viewport, so we can test responsiveness.

+
+
+

There are different ways to use it:

+
+
+

Inside a test case

+
+
+
+
it('should change title when viewport is less than 320px', ()=>{
+  cy.get('.title-l').should('be.visible');
+  cy.get('.title-s').should('not.be.visible');
+  cy.viewport(320, 480);
+  cy.get('.title-l').should('not.be.visible');
+  cy.get('.title-s').should('be.visible');
+})
+
+
+
+

Passing the configuration as an option

+
+
+
+
describe('page display on medium size screen', {
+  viewportHeight: 1000,
+  viewportWidth: 400
+}, () => {
+  ...
+})
+
+
+
+

Or we can set a default

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+{
+ "viewportHeight": 1000
+ "viewportWidth": 400,
+}
+...
+
+
+ +
+
+

Test retries

+
+

We can get false negatives intermittently due external issues that can affect our tests, because of that we can add, in the configuration, a retries entry so Cypress can run again a certain failed test the selected number of times to verify that the error is real.

+
+
+

We can set retries for run or open mode.

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+"retries": {
+    "runMode": 3,
+    "openMode": 3
+  }
+...
+
+
+
+

The retries can be configured on the cypress.json or directly on a specific test.

+
+
+
+
it('should get button', {
+  retries: {
+    runMode: 2,
+    openMode: 2
+  }
+}, () => {
+  ...
+})
+
+
+
+

This retries those not shown on the test log.

+
+
+

Check more on retries documentation

+
+
+
+

Reporter

+
+

The tests results appear on the terminal, but to have a more friendly view we can add a reporter.

+
+
+
+reporter +
+
+
+
+

Mochawesome

+
+

In this case we are going to use Mochawesome, initially its a Mocha reporter but as Cypress uses Mocha it works the same.

+
+
+

Install

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome
+
+
+
+

To run the reporter:

+
+
+
+
cypress run --reporter mochawesome
+
+
+
+

Mochawesome saves by default the generated files on `./mochawesome-report/` but we can add options to change this behavior.

+
+
+

Options can be passed to the reporter in two ways

+
+
+

Using a flag

+
+
+
+
cypress run --reporter mochawesome --reporter-options reportDir=report
+
+
+
+

Or on cypress.json

+
+
+
+
{
+  "baseUrl": "http://localhost:4200",
+  "reporter": "mochawesome",
+  "reporterOptions": {
+    "overwrite": false,
+    "html": false,
+    "json": true,
+    "reportDir": "cypress/report"
+  }
+}
+
+
+
+

Overwrite:false to not overwrite every **:spec.ts test report, we want them to create a merged version later.

+
+
+

reportDir to set a custom directory.

+
+
+

html:false because we don’t need it.

+
+
+

json:true to save them on json.

+
+
+

Mochawesome only creates the html file of the last .spec.ts file that the tests run, that’s why we don’t generate html reports directly, in order to stack them all on the same final html we need to merge the reports.

+
+ +
+

mochawesome-merge

+
+
+

Mochawesome-merge is a library that helps us to merge the different json.

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome-merge
+npm install --save-dev mochawesome-report-generator
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome-merge
+yarn add -D mochawesome-report-generator
+
+
+
+

To merge the files we execute this command:

+
+
+
+
mochawesome-merge cypress/report/*.json > cypress/reportFinal.json
+
+
+
+

reportFinal.json is the result of this merge, whit that we have the data of all the spec files in one json.

+
+
+

We can also automate the test, merge and conversion to html using a script.

+
+
+
+
const cypress = require('cypress');
+const fse = require('fs-extra');
+const { merge } = require('mochawesome-merge');
+const generator = require('mochawesome-report-generator');
+const runTests= async ()=>{
+  await fse.remove('mochawesome-report');
+  await fse.remove('cypress/report');
+  const {totalFailed} = await cypress.run();
+  const reporterOptions = {
+    files: ["cypress/report/*.json"]
+  };
+  await generateReport(reporterOptions);
+  if(totalFailed !==  0){
+    process.exit(2);
+  };
+};
+const generateReport = (options)=> {
+  return merge(options).then((jsonReport)=>{
+    generator.create(jsonReport).then(()=>{
+      process.exit();
+    });
+  });
+};
+runTests();
+
+
+
+

fse.remove() to remove older reports data.

+
+
+

cypress.run() to run the tests.

+
+
+

merge(options) we merge the json output from running the tests.

+
+
+

generator.create(jsonReport) then we generate the html view of the report.

+
+ +
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+ +
+
+

Angular ESLint support

+
+ + + + + +
+ + +ESLint is supported in Angular 10.1.0 onward. +
+
+
+
+

What about TSLint?

+
+

TSLint is a fantastic tool. It is a linter that was written specifically to work based on the TypeScript AST format. This has advantages and disadvantages, as with most decisions we are faced with in software engineering!

+
+
+

One advantage is there is no tooling required to reconcile differences between ESLint and TypeScript AST formats, but the major disadvantage is that the tool is therefore unable to reuse any of the previous work which has been done in the JavaScript ecosystem around linting, and it has to re-implement everything from scratch. Everything from rules to auto-fixing capabilities and more.

+
+
+

However, the backers behind TSLint announced in 2019 that they would be deprecating TSLint in favor of supporting typescript-eslint in order to benefit the community. You can read more about that here

+
+
+

The TypeScript Team themselves also announced their plans to move the TypeScript codebase from TSLint to typescript-eslint, and they have been big supporters of this project. More details at https://github.com/microsoft/TypeScript/issues/30553

+
+
+

Angular ESLint support comes from the angular-eslint tooling package. Angular documentation also links to this repository as you can check in the ng lint section of the Angular CLI documentation.

+
+
+
+

Quick start with Angular and ESLint

+
+

In order to create a brand new Angular CLI workspace which uses ESLint instead of TSLint and Codelyzer, simply run the following commands:

+
+
+
+
##Install the Angular CLI and @angular-eslint/schematics globally however you want (e.g. npm, yarn, volta etc)
+
+$ npm i -g @angular/cli @angular-devkit/core @angular-devkit/schematics @angular-eslint/schematics
+
+##Create a new Angular CLI workspace using the @angular-eslint/schematics collection (instead of the default)
+
+$ ng new --collection=@angular-eslint/schematics
+
+
+
+
+

Migrating an Angular CLI project from Codelyzer and TSLint

+ +
+
+

1 - Add relevant dependencies

+
+

The first step is to run the schematic to add @angular-eslint to your project:

+
+
+
+
$ ng add @angular-eslint/schematics
+
+
+
+

This will handle installing the latest version of all the relevant packages for you and adding them to the devDependencies of your package.json.

+
+
+
+

2 - Run the convert-tslint-to-eslint schematic on a project

+
+

The next thing to do is consider which "project" you want to migrate to use ESLint. If you have a single application in your workspace you will likely have just a single entry in the projects configuration object within your angular.json file. If you have a projects/` directory in your workspace, you will have multiple entries in your projects configuration and you will need to chose which one you want to migrate using the convert-tslint-to-eslint schematic.

+
+
+

You can run it like so:

+
+
+
+
$ ng g @angular-eslint/schematics:convert-tslint-to-eslint {{YOUR_PROJECT_NAME_GOES_HERE}}
+
+
+
+

From now on, ng lint will use ESLint!

+
+
+
+

3 - Remove root TSLint configuration and use only ESLint

+
+

Once you are happy with your ESLint setup, you simply need to remove the root-level tslint.json and potentially uninstall TSLint and any TSLint-related plugins/dependencies if your Angular CLI workspace is now no longer using TSLint at all.

+
+ +
+
+
+
+
+
+1. A package is a file or directory that is described by a package.json. . +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/meta-architecture.html b/docs/devon4ng/1.0/meta-architecture.html new file mode 100644 index 00000000..f537b7b7 --- /dev/null +++ b/docs/devon4ng/1.0/meta-architecture.html @@ -0,0 +1,675 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Meta Architecture

+
+ +
+
+
+

Introduction

+
+ +
+
+
+

Purpose of this document

+
+
+

In our business applications, the client easily gets underestimated. Sometimes the client is more complex to develop and design than the server. While the server architecture is nowadays easily to agree as common sense, for clients this is not as obvious and stable especially as it typically depends on the client framework used. Finding a concrete architecture applicable for all clients may therefore be difficult to accomplish.

+
+
+

This document tries to define on a high abstract level, a reference architecture which is supposed to be a mental image and frame for orientation regarding the evaluation and appliance of different client frameworks. As such it defines terms and concepts required to be provided for in any framework and thus gives a common ground of understanding for those acquainted with the reference architecture. This allows better comparison between the various frameworks out there, each having their own terms for essentially the same concepts. It also means that for each framework we need to explicitly map how it implements the concepts defined in this document.

+
+
+

The architecture proposed herein is neither new nor was it developed from scratch. Instead it is the gathered and consolidated knowledge and best practices of various projects (s. References).

+
+
+
+
+

Goal of the Client Architecture

+
+
+

The goal of the client architecture is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview. Furthermore it ensures a homogeneity regarding how different concrete UI technologies are being applied in the projects, solving the common requirements in the same way.

+
+
+
+
+

Architecture Views

+
+
+

As for the server we distinguish between the business and the technical architecture. Where the business architecture is different from project to project and relates to the concrete design of dialog components given concrete requirements, the technical architecture can be applied to multiple projects.

+
+
+

The focus of this document is to provide a technical reference architecture on the client on a very abstract level defining required layers and components. How the architecture is implemented has to be defined for each UI technology.

+
+
+

The technical infrastructure architecture is out of scope for this document and although it needs to be considered, the concepts of the reference architecture should work across multiple TI architecture, i.e. native or web clients.

+
+
+
+
+

devonfw Reference Client Architecture

+
+
+

The following gives a complete overview of the proposed reference architecture. It will be built up incrementally in the following sections.

+
+
+
+Complete Client Architecture Overview +
+
+
+

Figure 1 Overview

+
+
+
+
+

Client Architecture

+
+
+

On the highest level of abstraction we see the need to differentiate between dialog components and their container they are managed in, as well as the access to the application server being the back-end for the client (e.g. an devon4j instance). This section gives a summary of these components and how they relate to each other. Detailed architectures for each component will be supplied in subsequent sections

+
+
+
+Client Architecture Overview +
+
+
+

Figure 2 Overview of Client Architecture

+
+
+
+
+

== Dialog Component

+
+
+

A dialog component is a logical, self-contained part of the user interface. It accepts user input and actions and controls communication with the user. Dialog components use the services provided by the dialog container in order to execute the business logic. They are self-contained, i.e. they possess their own user interface together with the associated logic, data and states.

+
+
+
    +
  • +

    Dialog components can be composed of other dialog components forming a hierarchy

    +
  • +
  • +

    Dialog components can interact with each other. This includes communication of a parent to its children, but also between components independent of each other regarding the hierarchy.

    +
  • +
+
+
+
+
+

== Dialog Container

+
+
+

Dialog components need to be managed in their life-cycle and how they can be coupled to each other. The dialog container is responsible for this along with the following:

+
+
+
    +
  • +

    Bootstrapping the client application and environment

    +
    +
      +
    • +

      Configuration of the client

      +
    • +
    • +

      Initialization of the application server access component

      +
    • +
    +
    +
  • +
  • +

    Dialog Component Management

    +
    +
      +
    • +

      Controlling the life-cycle

      +
    • +
    • +

      Controlling the dialog flow

      +
    • +
    • +

      Providing means of interaction between the dialogs

      +
    • +
    • +

      Providing application server access

      +
    • +
    • +

      Providing services to the dialog components
      +(e.g. printing, caching, data storage)

      +
    • +
    +
    +
  • +
  • +

    Shutdown of the application

    +
  • +
+
+
+
+
+

== Application Server Access

+
+
+

Dialogs will require a back-end application server in order to execute their business logic. Typically in an devonfw application the service layer will provide interfaces for the functionality exposed to the client. These business oriented interfaces should also be present on the client backed by a proxy handling the concrete call of the server over the network. This component provides the set of interfaces as well as the proxy.

+
+
+
+
+

Dialog Container Architecture

+
+
+

The dialog container can be further structured into the following components with their respective tasks described in own sections:

+
+
+
+Dialog Container Architecture Overview +
+
+
+

Figure 3 Dialog Container Architecture

+
+
+
+
+

== Application

+
+
+

The application component represents the overall client in our architecture. It is responsible for bootstrapping all other components and connecting them with each other. As such it initializes the components below and provides an environment for them to work in.

+
+
+
+
+

== Configuration Management

+
+
+

The configuration management manages the configuration of the client, so the client can be deployed in different environments. This includes configuration of the concrete application server to be called or any other environment-specific property.

+
+
+
+
+

== Dialog Management

+
+
+

The Dialog Management component provides the means to define, create and destroy dialog components. It therefore offers basic life-cycle capabilities for a component. In addition it also allows composition of dialog components in a hierarchy. The life-cycle is then managed along the hierarchy, meaning when creating/destroying a parent dialog, this affects all child components, which are created/destroyed as well.

+
+
+
+
+

== Service Registry

+
+
+

Apart from dialog components, a client application also consists of services offered to these. A service can thereby encompass among others:

+
+
+
    +
  • +

    Access to the application server

    +
  • +
  • +

    Access to the dialog container functions for managing dialogs or accessing the configuration

    +
  • +
  • +

    Dialog independent client functionality such as Printing, Caching, Logging, Encapsulated business logic such as tax calculation

    +
  • +
  • +

    Dialog component interaction

    +
  • +
+
+
+

The service registry offers the possibility to define, register and lookup these services. Note that these services could be dependent on the dialog hierarchy, meaning different child instances could obtain different instances / implementations of a service via the service registry, depending on which service implementations are registered by the parents.

+
+
+

Services should be defined as interfaces allowing for different implementations and thus loose coupling.

+
+
+
+
+

Dialog Component Architecture

+
+
+

A dialog component has to support all or a subset of the following tasks:
+(T1) Displaying the user interface incl. internationalization
+(T2) Displaying business data incl. changes made to the data due to user interactions and localization of the data
+(T3) Accepting user input including possible conversion from e.g. entered Text to an Integer
+(T4) Displaying the dialog state
+(T5) Validation of user input
+(T6) Managing the business data incl. business logic altering it due to user interactions
+(T7) Execution of user interactions
+(T8) Managing the state of the dialog (e.g. Edit vs. View)
+(T9) Calling the application server in the course of user interactions

+
+
+

Following the principle of separation of concerns, we further structure a dialog component in an own architecture allowing us the distribute responsibility for these tasks along the defined components:

+
+
+
+Dialog Component Architecture +
+
+
+

Figure 4 Overview of dialog component architecture

+
+
+
+
+

== Presentation Layer

+
+
+

The presentation layer generates and displays the user interface, accepts user input and user actions and binds these to the dialog core layer (T1-5). The tasks of the presentation layer fall into two categories:

+
+
+
    +
  • +

    Provision of the visual representation (View component)
    +The presentation layer generates and displays the user interface and accepts user input and user actions. The logical processing of the data, actions and states is performed in the dialog core layer. The data and user interface are displayed in localized and internationalized form.

    +
  • +
  • +

    Binding of the visual representation to the dialog core layer
    +The presentation layer itself does not contain any dialog logic. The data or actions entered by the user are then processed in the dialog core layer. There are three aspects to the binding to the dialog core layer. We refer to “data binding”, “state binding” and “action binding”. Syntactical and (to a certain extent) semantic validations are performed during data binding (e.g. cross-field plausibility checks). Furthermore, the formatted, localized data in the presentation layer is converted into the presentation-independent, neutral data in the dialog core layer (parsing) and vice versa (formatting).

    +
  • +
+
+
+
+
+

== Dialog Core Layer

+
+
+

The dialog core layer contains the business logic, the control logic, and the logical state of the dialog. It therefore covers tasks T5-9:

+
+
+
    +
  • +

    Maintenance of the logical dialog state and the logical data
    +The dialog core layer maintains the logical dialog state and the logical data in a form which is independent of the presentation. The states of the presentation (e.g. individual widgets) must not be maintained in the dialog core layer, e.g. the view state could lead to multiple presentation states disabling all editable widgets on the view.

    +
  • +
  • +

    Implementation of the dialog and dialog control logic
    +The component parts in the dialog core layer implement the client specific business logic and the dialog control logic. This includes, for example, the manipulation of dialog data and dialog states as well as the opening and closing of dialogs.

    +
  • +
  • +

    Communication with the application server
    +The dialog core layer calls the interfaces of the application server via the application server access component services.

    +
  • +
+
+
+

The dialog core layer should not depend on the presentation layer enforcing a strict layering and thus minimizing dependencies.

+
+
+
+
+

== Interactions between dialog components

+
+
+

Dialog components can interact in the following ways:

+
+
+
+Dialog Interactions +
+
+
+
    +
  • +

    Embedding of dialog components
    +As already said dialog components can be hierarchically composed. This composition works by embedding on dialog component within the other. Apart from the life-cycle managed by the dialog container, the embedding needs to cope for the visual embedding of the presentation and core layer.

    +
    +
      +
    • +

      Embedding dialog presentation
      +The parent dialog needs to either integrate the embedded dialog in its layout or open it in an own model window.

      +
    • +
    • +

      Embedding dialog core
      +The parent dialog needs to be able to access the embedded instance of its children. This allows initializing and changing their data and states. On the other hand the children might require context information offered by the parent dialog by registering services in the hierarchical service registry.

      +
    • +
    +
    +
  • +
  • +

    Dialog flow
    +Apart from the embedding of dialog components representing a tight coupling, dialogs can interact with each other by passing the control of the UI, i.e. switching from one dialog to another.

    +
  • +
+
+
+

When interacting, dialog components should interact only between the same or lower layers, i.e. the dialog core should not access the presentation layer of another dialog component.

+
+
+
+
+

Appendix

+
+ +
+
+
+

Notes about Quasar Client

+
+
+

The Quasar client architecture as the consolidated knowledge of our CSD projects is the major source for the above drafted architecture. However, the above is a much simplified and more agile version thereof:

+
+
+
    +
  • +

    Quasar Client tried to abstract from the concrete UI library being used, so it could decouple the business from the technical logic of a dialog. The presentation layer should be the only one knowing the concrete UI framework used. This level of abstraction was dropped in this reference architecture, although it might of course still make sense in some projects. For fast-moving agile projects in the web however introducing such a level of abstraction takes effort with little gained benefits. With frameworks like Angular 2 we would even introduce one additional seemingly artificial and redundant layer, since it already separates the dialog core from its presentation.

    +
  • +
  • +

    In the past and in the days of Struts, JSF, etc. the concept of session handling was important for the client since part of the client was sitting on a server with a session relating it to its remote counterpart on the users PC. Quasar Client catered for this need, by very prominently differentiating between session and application in the root of the dialog component hierarchy. However, in the current days of SPA applications and the lowered importance of servers-side web clients, this prominent differentiation was dropped. When still needed the referenced documents will provide in more detail how to tailor the respective architecture to this end.

    +
  • +
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ng/1.0/services-layer.html b/docs/devon4ng/1.0/services-layer.html new file mode 100644 index 00000000..cc1f8f60 --- /dev/null +++ b/docs/devon4ng/1.0/services-layer.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Services Layer

+
+
+

The services layer is more or less what we call 'business logic layer' on the server side. +It is the layer where the business logic is placed. +The main challenges are:

+
+
+
    +
  • +

    Define application state and an API for the components layer to use it

    +
  • +
  • +

    Handle application state transitions

    +
  • +
  • +

    Perform back-end interaction (XHR, WebSocket, etc.)

    +
  • +
  • +

    Handle business logic in a maintainable way

    +
  • +
  • +

    Configuration management

    +
  • +
+
+
+

All parts of the services layer are described in this chapter. +An example which puts the concepts together can be found at the end Interaction of Smart Components through the services layer.

+
+
+
+
+

Boundaries

+
+
+

There are two APIs for the components layer to interact with the services layer:

+
+
+
    +
  • +

    A store can be subscribed to for receiving state updates over time

    +
  • +
  • +

    A use case service can be called to trigger an action

    +
  • +
+
+
+

To illustrate the fact the following figure shows an abstract overview.

+
+
+
+Smart and Dumb Components Interaction +
+
Figure 1. Boundaries to components layer
+
+
+
+
+

Store

+
+
+

A store is a class which defines and handles application state with its transitions over time. +Interaction with a store is always synchronous. +A basic implementation using RxJS can look like this.

+
+
+ + + + + +
+ + +A more profound implementation taken from a real-life project can be found here (Abstract Class Store). +
+
+
+
Listing 1. Store defined using RxJS
+
+
@Injectable()
+export class ProductSearchStore {
+
+  private stateSource = new BehaviorSubject<ProductSearchState>(defaultProductSearchState);
+  state$ = this.stateSource.asObservable();
+
+  setLoading(isLoading: boolean) {
+    const currentState = this.stateSource.getValue();
+    this.stateSource.next({
+      isLoading: isLoading,
+      products: currentState.products,
+      searchCriteria: currentState.searchCriteria
+    });
+  }
+
+}
+
+
+
+

In the example ProductSearchStore handles state of type ProductSearchState. +The public API is the property state$ which is an observable of type ProductSearchState. +The state can be changed with method calls. +So every desired change to the state needs to be modeled with an method. +In reactive terminology this would be an Action. +The store does not use any services. +Subscribing to the state$ observable leads to the subscribers receiving every new state.

+
+
+

This is basically the Observer Pattern:
+The store consumer registers itself to the observable via state$.subscribe() method call. +The first parameter of subscribe() is a callback function to be called when the subject changes. +This way the consumer - the observer - is registered. +When next() is called with a new state inside the store, all callback functions are called with the new value. +So every observer is notified of the state change. +This equals the Observer Pattern push type.

+
+
+

A store is the API for Smart Components to receive state from the service layer. +State transitions are handled automatically with Smart Components registering to the state$ observable.

+
+
+
+
+

Use Case Service

+
+
+

A use case service is a service which has methods to perform asynchronous state transitions. +In reactive terminology this would be an Action of Actions - a thunk (redux) or an effect (@ngrx).

+
+
+
+Use Case Service +
+
Figure 2. Use case services are the main API to trigger state transitions
+
+
+

A use case services method - an action - interacts with adapters, business services and stores. +So use case services orchestrate whole use cases. +For an example see use case service example.

+
+
+
+
+

Adapter

+
+
+

An adapter is used to communicate with the back-end. +This could be a simple XHR request, a WebSocket connection, etc. +An adapter is simple in the way that it does not add anything other than the pure network call. +So there is no caching or logging performed here. +The following listing shows an example.

+
+
+

For further information on back-end interaction see Consuming REST Services

+
+
+
Listing 2. Calling the back-end via an adapter
+
+
@Injectable()
+export class ProducsAdapter {
+
+  private baseUrl = environment.baseUrl;
+
+  constructor(private http: HttpClient) { }
+
+  getAll(): Observable<Product[]> {
+    return this.http.get<Product[]>(this.baseUrl + '/products');
+  }
+
+}
+
+
+
+
+
+

Interaction of Smart Components through the services layer

+
+
+

The interaction of smart components is a classic problem which has to be solved in every UI technology. +It is basically how one dialog tells the other something has changed.

+
+
+

An example is adding an item to the shopping basket. +With this action there need to be multiple state updates.

+
+
+
    +
  • +

    The small logo showing how many items are currently inside the basket needs to be updated from 0 to 1

    +
  • +
  • +

    The price needs to be recalculated

    +
  • +
  • +

    Shipping costs need to be checked

    +
  • +
  • +

    Discounts need to be updated

    +
  • +
  • +

    Ads need to be updated with related products

    +
  • +
  • +

    etc.

    +
  • +
+
+
+
+
+

Pattern

+
+
+

To handle this interaction in a scalable way we apply the following pattern.

+
+
+
+Interaction of Smart Components via services layer +
+
Figure 3. Smart Component interaction
+
+
+

The state of interest is encapsulated inside a store. All Smart Components interested in the state have to subscribe to the store’s API served by the public observable. Thus, with every update to the store the subscribed components receive the new value. The components basically react to state changes. Altering a store can be done directly if the desired change is synchronous. Most actions are of asynchronous nature so the UseCaseService comes into play. Its actions are void methods, which implement a use case, i.e., adding a new item to the basket. It calls asynchronous actions and can perform multiple store updates over time.

+
+
+

To put this pattern into perspective the UseCaseService is a programmatic alternative to redux-thunk or @ngrx/effects. The main motivation here is to use the full power of TypeScript --strictNullChecks and to let the learning curve not to become as steep as it would be when learning a new state management framework. This way actions are just void method calls.

+
+
+
+
+

Example

+
+
+
+Smart component interaction example +
+
Figure 4. Smart Components interaction example
+
+
+

The example shows two Smart Components sharing the FlightSearchState by using the FlightSearchStore. +The use case shown is started by an event in the Smart Component FlightSearchComponent. The action loadFlight() is called. This could be submitting a search form. +The UseCaseService is FlightSearchService, which handles the use case Load Flights.

+
+
+
UseCaseService example
+

+
+
+
+
export class FlightSearchService {
+
+  constructor(
+    private flightSearchAdapter: FlightSearchAdapter,
+    private store: FlightSearchStore
+  ) { }
+
+  loadFlights(criteria: FlightSearchCriteria): void {
+    this.store.setLoadingFlights(true);
+    this.store.clearFlights();
+
+    this.flightSearchAdapter.getFlights(criteria.departureDate,
+        {
+          from: criteria.departureAirport,
+          to: criteria.destinationAirport
+        })
+      .finally(() => this.store.setLoadingFlights(false))
+      .subscribe((result: FlightTo[]) => this.store.setFlights(result, criteria));
+  }
+
+}
+
+
+
+

First the loading flag is set to true and the current flights are cleared. This leads the Smart Component showing a spinner indicating the loading action. Then the asynchronous XHR is triggered by calling the adapter. After completion the loading flag is set to false causing the loading indication no longer to be shown. If the XHR was successful, the data would be put into the store. If the XHR was not successful, this would be the place to handle a custom error. All general network issues should be handled in a dedicated class, i.e., an interceptor. So for example the basic handling of 404 errors is not done here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/Home.html b/docs/devon4node/1.0/Home.html new file mode 100644 index 00000000..bd12f6b3 --- /dev/null +++ b/docs/devon4node/1.0/Home.html @@ -0,0 +1,269 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node Wiki

+
+ +
+
+
+

Layers

+
+
+ +
+
+
+ +
+

devon4node applications

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/_images/images/crud-schematic.PNG b/docs/devon4node/1.0/_images/images/crud-schematic.PNG new file mode 100644 index 00000000..f1b94f8d Binary files /dev/null and b/docs/devon4node/1.0/_images/images/crud-schematic.PNG differ diff --git a/docs/devon4node/1.0/_images/images/devon4node-architechture.png b/docs/devon4node/1.0/_images/images/devon4node-architechture.png new file mode 100644 index 00000000..653b0bbe Binary files /dev/null and b/docs/devon4node/1.0/_images/images/devon4node-architechture.png differ diff --git a/docs/devon4node/1.0/_images/images/generate-interactive.jpg b/docs/devon4node/1.0/_images/images/generate-interactive.jpg new file mode 100644 index 00000000..54e97b52 Binary files /dev/null and b/docs/devon4node/1.0/_images/images/generate-interactive.jpg differ diff --git a/docs/devon4node/1.0/_images/images/insert-data.PNG b/docs/devon4node/1.0/_images/images/insert-data.PNG new file mode 100644 index 00000000..0b0dfd2a Binary files /dev/null and b/docs/devon4node/1.0/_images/images/insert-data.PNG differ diff --git a/docs/devon4node/1.0/_images/images/new-app1.jpg b/docs/devon4node/1.0/_images/images/new-app1.jpg new file mode 100644 index 00000000..40c150a0 Binary files /dev/null and b/docs/devon4node/1.0/_images/images/new-app1.jpg differ diff --git a/docs/devon4node/1.0/_images/images/new-app2.jpg b/docs/devon4node/1.0/_images/images/new-app2.jpg new file mode 100644 index 00000000..67c86f2c Binary files /dev/null and b/docs/devon4node/1.0/_images/images/new-app2.jpg differ diff --git a/docs/devon4node/1.0/_images/images/new-app3.jpg b/docs/devon4node/1.0/_images/images/new-app3.jpg new file mode 100644 index 00000000..7b3f6021 Binary files /dev/null and b/docs/devon4node/1.0/_images/images/new-app3.jpg differ diff --git a/docs/devon4node/1.0/_images/images/new-app4.jpg b/docs/devon4node/1.0/_images/images/new-app4.jpg new file mode 100644 index 00000000..27eccdef Binary files /dev/null and b/docs/devon4node/1.0/_images/images/new-app4.jpg differ diff --git a/docs/devon4node/1.0/_images/images/new-app5.jpg b/docs/devon4node/1.0/_images/images/new-app5.jpg new file mode 100644 index 00000000..fe34d164 Binary files /dev/null and b/docs/devon4node/1.0/_images/images/new-app5.jpg differ diff --git a/docs/devon4node/1.0/_images/images/plantuml/components.png b/docs/devon4node/1.0/_images/images/plantuml/components.png new file mode 100644 index 00000000..ec0207dd Binary files /dev/null and b/docs/devon4node/1.0/_images/images/plantuml/components.png differ diff --git a/docs/devon4node/1.0/_images/images/plantuml/dependency-injection1.png b/docs/devon4node/1.0/_images/images/plantuml/dependency-injection1.png new file mode 100644 index 00000000..e909d946 Binary files /dev/null and b/docs/devon4node/1.0/_images/images/plantuml/dependency-injection1.png differ diff --git a/docs/devon4node/1.0/_images/images/plantuml/dependency-injection2.png b/docs/devon4node/1.0/_images/images/plantuml/dependency-injection2.png new file mode 100644 index 00000000..e79d2401 Binary files /dev/null and b/docs/devon4node/1.0/_images/images/plantuml/dependency-injection2.png differ diff --git a/docs/devon4node/1.0/_images/images/plantuml/layers.png b/docs/devon4node/1.0/_images/images/plantuml/layers.png new file mode 100644 index 00000000..d464104f Binary files /dev/null and b/docs/devon4node/1.0/_images/images/plantuml/layers.png differ diff --git a/docs/devon4node/1.0/_images/images/plantuml/module2.png b/docs/devon4node/1.0/_images/images/plantuml/module2.png new file mode 100644 index 00000000..bc1f31bc Binary files /dev/null and b/docs/devon4node/1.0/_images/images/plantuml/module2.png differ diff --git a/docs/devon4node/1.0/_images/images/plantuml/modules.png b/docs/devon4node/1.0/_images/images/plantuml/modules.png new file mode 100644 index 00000000..ffb3653f Binary files /dev/null and b/docs/devon4node/1.0/_images/images/plantuml/modules.png differ diff --git a/docs/devon4node/1.0/_images/images/sample/employees.png b/docs/devon4node/1.0/_images/images/sample/employees.png new file mode 100644 index 00000000..434ea28c Binary files /dev/null and b/docs/devon4node/1.0/_images/images/sample/employees.png differ diff --git a/docs/devon4node/1.0/_images/images/sample/generate-migrations.png b/docs/devon4node/1.0/_images/images/sample/generate-migrations.png new file mode 100644 index 00000000..a0414e9b Binary files /dev/null and b/docs/devon4node/1.0/_images/images/sample/generate-migrations.png differ diff --git a/docs/devon4node/1.0/_images/images/sample/insert-data.png b/docs/devon4node/1.0/_images/images/sample/insert-data.png new file mode 100644 index 00000000..0b0dfd2a Binary files /dev/null and b/docs/devon4node/1.0/_images/images/sample/insert-data.png differ diff --git a/docs/devon4node/1.0/_images/images/sample/new-app.png b/docs/devon4node/1.0/_images/images/sample/new-app.png new file mode 100644 index 00000000..f2c3638c Binary files /dev/null and b/docs/devon4node/1.0/_images/images/sample/new-app.png differ diff --git a/docs/devon4node/1.0/_images/images/sample/start-app.png b/docs/devon4node/1.0/_images/images/sample/start-app.png new file mode 100644 index 00000000..e44baee0 Binary files /dev/null and b/docs/devon4node/1.0/_images/images/sample/start-app.png differ diff --git a/docs/devon4node/1.0/_images/images/sample/swagger.png b/docs/devon4node/1.0/_images/images/sample/swagger.png new file mode 100644 index 00000000..8d7aa48d Binary files /dev/null and b/docs/devon4node/1.0/_images/images/sample/swagger.png differ diff --git a/docs/devon4node/1.0/_images/images/sample/test.png b/docs/devon4node/1.0/_images/images/sample/test.png new file mode 100644 index 00000000..ba775b27 Binary files /dev/null and b/docs/devon4node/1.0/_images/images/sample/test.png differ diff --git a/docs/devon4node/1.0/_images/images/typeorm-schematic.PNG b/docs/devon4node/1.0/_images/images/typeorm-schematic.PNG new file mode 100644 index 00000000..2a3d09b9 Binary files /dev/null and b/docs/devon4node/1.0/_images/images/typeorm-schematic.PNG differ diff --git a/docs/devon4node/1.0/devon4node-architecture.html b/docs/devon4node/1.0/devon4node-architecture.html new file mode 100644 index 00000000..d1d73dd4 --- /dev/null +++ b/docs/devon4node/1.0/devon4node-architecture.html @@ -0,0 +1,515 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node Architecture

+
+
+

As we have mentioned in the introduction, devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications.

+
+
+
+
+

HTTP layer

+
+
+

By using NestJS, devon4node is a platform-agnostic framework. NestJS focuses only on the logical layer, and delegates the transport layer to another framework, such as ExpressJS. You can see it in the following diagram:

+
+
+
+devon4node architecture +
+
+
+

As you can see, NestJS do not listen directly for incoming request. It has an adapter to communicate with ExpressJS and ExpressJS is the responsible for that. ExpressJS is only one of the frameworks that NestJS can work with. We have also another adapter available out-of-the-box: the Fastify adapter. With that, you can replace ExpressJS for Fastify But you can still use all your NestJS components. You can also create your own adapter to make NestJS work with other HTTP framework.

+
+
+

At this point, you may think: why is NestJS (and devon4node) using ExpressJS by default instead of Fastify? Because, as you can see in the previous diagram, there is a component that is dependent on the HTTP framework: the middleware. As ExpressJS is the most widely used framework, there exists a lot of middleware for it, so, in order to reuse them in our NestJS applications, NestJS use ExpressJS by default. Anyway, you may think which HTTP framework best fits your requirements.

+
+
+
+
+

devon4node layers

+
+
+

As other devonfw technologies, devon4node separates the application into layers.

+
+
+

Those layers are:

+
+ +
+
+layers +
+
+
+
+
+

devon4node application structure

+
+
+

Although there are many frameworks to create backend applications in NodeJS, none of them effectively solve the main problem of - Architecture. This is the main reason we have chosen NestJS for the devon4node applications. Besides, NestJS is highly inspired by Angular, therefore a developer who knows Angular can use his already acquired knowledge to write devon4node applications.

+
+
+

NestJS adopts various Angular concepts, such as dependency injection, piping, interceptors and modularity, among others. By using modularity we can reuse some of our modules between applications. One example that devon4node provide is the mailer module.

+
+
+
+
+

Modules

+
+
+

Create a application module is simple, you only need to create an empty class with the decorator Module:

+
+
+
+
@Module({})
+export class AppModule {}
+
+
+
+

In the module you can define:

+
+
+
    +
  • +

    Imports: the list of imported modules that export the providers which are required in this module

    +
  • +
  • +

    Controllers: the set of controllers defined in this module which have to be instantiated

    +
  • +
  • +

    Providers: the providers that will be instantiated by the Nest injector and that may be shared at least across this module

    +
  • +
  • +

    Exports: the subset of providers that are provided by this module and should be available in other modules which import this module

    +
  • +
+
+
+

The main difference between Angular and NestJS is NestJS modules encapsulates providers by default. This means that it’s impossible to inject providers that are neither directly part of the current module nor exported from the imported modules. Thus, you may consider the exported providers from a module as the module’s public interface, or API. Example of modules graph:

+
+
+
+modules +
+
+
+

In devon4node we three different kind of modules:

+
+
+
    +
  • +

    AppModule: this is the root module. Everything that our application need must be imported here.

    +
  • +
  • +

    Global Modules: this is a special kind of modules. When you make a module global, it’s accessible for every module in your application. Your can see it in the next diagram. It’s the same as the previous one, but now the CoreModule is global:

    +
    +
    +module2 +
    +
    +
    +

    One example of global module is the CoreModule. In the CoreModule you must import every module which have providers that needs to be accessible in all modules of you application

    +
    +
  • +
  • +

    Feature (or application) modules: modules which contains the logic of our application. We must import it in the AppModule.

    +
  • +
+
+
+

For more information about modules, see NestJS documentation page

+
+
+
+
+

Folder structure

+
+
+

devon4node defines a folder structure that every devon4node application must follow. The folder structure is:

+
+
+
+
├───src
+│   ├───app
+│   │   ├───core
+│   │   │   ├───auth
+│   │   │   ├───configuration
+│   │   │   ├───user
+│   │   │   └───core.module.ts
+│   │   ├───shared
+│   │   └───feature
+│   │       ├───sub-module
+│   │       │   ├───controllers
+│   │       │   ├───...
+│   │       │   ├───services
+│   │       │   └───sub-module.module.ts
+│   │       ├───controllers
+│   │       ├───interceptors
+│   │       ├───pipes
+│   │       ├───guards
+│   │       ├───filters
+│   │       ├───middlewares
+│   │       ├───model
+│   │       │   ├───dto
+│   │       │   └───entities
+│   │       ├───services
+│   │       └───feature.module.ts
+│   ├───config
+│   └───migration
+├───test
+└───package.json
+
+
+
+

devon4node schematics ensures this folder structure so, please, do not create files by your own, use the devon4node schematics.

+
+
+
+
+

NestJS components

+
+
+

NestJS provides several components that you can use in your application:

+
+
+ +
+
+

In the NestJS documentation you can find all information about each component. But, something that is missing in the documentation is the execution order. Every component can be defined in different levels: globally, in the controller or in the handler. As middleware is part of the HTTP server we can define it in a different way: globally or in the module.

+
+
+
+components +
+
+
+

It is not necessary to have defined components in every level. For example, you can have defined a interceptor globally but you do not have any other in the controller or handler level. If nothing is defined in some level, the request will continue to the next component.

+
+
+

As you can see in the previous image, the first component which receive the request is the global defined middleware. Then, it send the request to the module middleware. Each of them can return a response to the client, without passing the request to the next level.

+
+
+

Then, the request continue to the guards: first the global guard, next to controller guard and finally to the handler guard. At this point, we can throw an exception in all components and the exception filter will catch it and send a proper error message to the client. We do not paint the filters in the graphic in order to simplify it.

+
+
+

After the guards, is time to interceptors: global interceptors, controller interceptors and handler interceptors. And last, before arrive to the handler inside the controller, the request pass through the pipes.

+
+
+

When the handler has the response ready to send to the client, it does not go directly to the client. It come again to the interceptors, so we can also intercept the response. The order this time is the reverse: handler interceptors, controller interceptors and global interceptors. After that, we can finally send the response to the client.

+
+
+

Now, with this in mind, you are able to create the components in a better way.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/devon4node-introduction.html b/docs/devon4node/1.0/devon4node-introduction.html new file mode 100644 index 00000000..579407ca --- /dev/null +++ b/docs/devon4node/1.0/devon4node-introduction.html @@ -0,0 +1,288 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

devon4node

+
+
+

devonfw is a platform which provides solutions to building business applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. devonfw is 100% Open Source (Apache License version 2.0) since the beginning of 2018.

+
+
+

devon4node is the NodeJS stack of devonfw. It allows you to build business applications (backends) using NodeJS technology in standardized way based on established best-practices.

+
+
+

devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications. It uses progressive TypeScript and combines elements of OOP (Object Oriented Programming), FP (Functional Programming), and FRP (Functional Reactive Programming).

+
+
+

In this wiki you can find all documentation related with devon4node. See choose the wiki page at the side-bar.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-auth-jwt.html b/docs/devon4node/1.0/guides-auth-jwt.html new file mode 100644 index 00000000..f05ea32a --- /dev/null +++ b/docs/devon4node/1.0/guides-auth-jwt.html @@ -0,0 +1,363 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Auth JWT module

+
+
+

devon4node provides a way to generate a default authentication module using JWT (JSON Web Token). It uses the @nestjs/passport library describe here.

+
+
+

To generate the devon4node auth-jwt module you only need to execute the command: nest generate -c @devon4node/schematics auth-jwt. We generate this module inside the applications instead of distributing a npm package because this module is prone to be modified depending on the requirements. It also generate a basic user module.

+
+
+

In this page we will explain the default implementation provided by devon4node. For more information about authentication, JWT, passport and other you can see:

+
+
+ +
+
+
+
+

Auth JWT endpoints

+
+
+

In order to execute authentication operations, the auth-jwt module exposes the following endpoints:

+
+
+
    +
  • +

    POST /auth/login: receive an username and a password and return the token in the header if the combination of username and password is correct.

    +
  • +
  • +

    POST /auth/register: register a new user.

    +
  • +
  • +

    GET /auth/currentuser: return the user data if he is authenticated.

    +
  • +
+
+
+
+
+

Protect endpoints with auth-jwt

+
+
+

In order to protect your endpoints with auth-jwt module you only need to add the AuthGuard() in the UseGuards decorator. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+
+

Now, all request to currentuser are protected by the AuthGuard.

+
+
+
+
+

Role based Access Control

+
+
+

The auth-jwt module provides also a way to control the access to some endpoints by using roles. For example, if you want to grant access to a endpoint only to admins, you only need to add the Roles decorator to those endpoints with the roles allowed. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+@Roles(roles.ADMIN)
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-cli.html b/docs/devon4node/1.0/guides-cli.html new file mode 100644 index 00000000..dcf15a18 --- /dev/null +++ b/docs/devon4node/1.0/guides-cli.html @@ -0,0 +1,1230 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node CLI

+
+
+

devon4node CLI is a tool designed to manage devon4node applications in a easy way. Highly inspired by Nest CLI.

+
+
+

In this page we will explain all commands available and their arguments.

+
+
+
+
+

Prerequisites

+
+
+
    +
  • +

    NodeJS lts

    +
  • +
  • +

    yarn

    +
  • +
+
+
+
+
+

devon4node

+
+
+

After install the devon4node CLI package npm i -g @devon4node/cli, the command devon4node (or d4n) must be available in your system. This have new, generate and db subcommands and also accepts the following arguments:

+
+
+

|== == +| Arguments | Description +|--help, -h | Shows help +|-v, --version | Shows version number +|== ==

+
+
+

Examples:

+
+
+
+
devon4node -h
+devon4node new -h
+
+
+
+
+
+

new

+
+
+

devon4node new allows you to create new devon4node applications. It’s an interactive command and it will ask you for everything that it need in order to create a new application.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Show help +| --no-interactive, -n | Execute the command without ask anything to the user +| --dry-run | Allow to test changes before execute command. +| --skip-git, -g | Allow to skip git repository initialization. +| --skip-install, -s | Allow to skip package installation. +| --typeorm, -t | Allow to select the type of database. +| --config-module, -c | Allow to add config module or not. +| --swagger, -a | Allow to add swagger module or not. +| --security, -y | Allow to add security (cors + HTTP security headers) or not. +| --mailer, -m | Allow to add mailer module or not. +| --auth-jwt, -j | Allow to add Auth JWT module or not. +| --version, -v | Show version number +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node new my-app -sg
+devon4node new my-app -n
+devon4node new my-app -n -typeorm sqlite -config-module -auth-jwt
+
+
+
+
+
+

generate

+
+
+

This command allows you to generate code into your application. It receive he name of the schematic that will generate the code.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Show help +| --interactive, -i | Generate code using the interactive mode (same as new command). +| --skip-install, -s | Allow to skip package installation. +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node generate -i
+devon4node generate service --name my-service
+
+
+
+
+
+

== application

+
+
+

Create a devon4node application. It is used by the new command.

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, | Path to project. +| --name, -n | The name of the application. +|== == == ==

+
+
+
+
+

== angular-app

+
+
+

Create a new Angular application. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, | Path to project. +| --initApp | Flag to skip the angular application generation. +| --name, -n | The name of the application. +|== == == ==

+
+
+
+
+

== class

+
+
+

Create a new class.Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +|--dry-run, -d | Allow to test changes before execute command. +|--path, -p | The path to create the class. +|--name, -n | The name of the class. +|--flat | Flag to indicate if a directory is created. +|--spec | Specifies if a spec file is generated. +|--language | Nest class language (ts/js). +|--sourceRoot | Nest controller source root directory. +|== == == ==

+
+
+
+
+

== controller

+
+
+

Create a Nest controller.

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the controller. +| --name, -n | The name of the controller. To create a controller with name Banana in the module fruits you need to introduce fruits/banana +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== decorator

+
+
+

Create a Nest decorator. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the decorator. +| --name, -n | The name of the decorator. +| --language | Nest decorator language (ts/js). +| --sourceRoot | Nest decorator source root directory. +| --flat | Flag to indicate if a directory is created. +|== == == ==

+
+
+
+
+

== filter

+
+
+

Create a Nest filter.

+
+
+

|== == == == +| Arguments | Description +|--dry-run, -d | Allow to test changes before execute command. +|--path, -p | The path to create the filter. +|--name, -n | The name of the filter. To create a filter with name Banana in the module fruits you need to introduce fruits/banana +|--language | Nest filter language (ts/js). +|--sourceRoot | Nest filter source root directory. +|--flat | Flag to indicate if a directory is created. +|--spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== gateway

+
+
+

Create a Nest gateway. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the gateway. +| --name, -n | The name of the gateway. +| --language | Nest gateway language (ts/js). +| --sourceRoot | Nest gateway source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== guard

+
+
+

Create a Nest guard.

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the guard. +| --name, -n | The name of the guard. To create a guard with name Banana in the module fruits you need to introduce fruits/banana +| --language | Nest guard language (ts/js). +| --sourceRoot | Nest guard source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== interceptor

+
+
+

Create a Nest interceptor.

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the interceptor. +| --name, -n | The name of the interceptor. To create an interceptor with name Banana in the module fruits you need to introduce fruits/banana +| --language | Nest interceptor language (ts/js). +| --sourceRoot | Nest interceptor source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== interface

+
+
+

Create a Nest interface. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the interface. +| --name, -n | The name of the interface. +| --sourceRoot | Nest interface source root directory +| --flat | Flag to indicate if a directory is created. +|== == == ==

+
+
+
+
+

== middleware

+
+
+

Create a Nest middleware.

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the middleware. +| --name, -n | The name of the middleware. To create a middleware with name Banana in the module fruits you need to introduce fruits/banana +| --language | Nest middleware language (ts/js). +| --sourceRoot | Nest middleware source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== module

+
+
+

Create a Nest module.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the module. +| --name, -n | The name of the module. To create a module named module-b as a submodule of module-a, you need to introduce module-a/module-b +| --module | The path to import the module. +| --language | Nest module language (ts/js). +| --sourceRoot | Nest module source root directory. +| --skipImport | Flag to skip the module import. +|== == == ==

+
+
+
+
+

== pipe

+
+
+

Create a Nest pipe.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the pipe. +| --name, -n | The name of the pipe. To create a pipe with name Banana in the module fruits you need to introduce fruits/banana +| --language | Nest pipe language (ts/js). +| --sourceRoot | Nest pipe source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== provider

+
+
+

Create a Nest provider. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the provider. +| --name, -n | The name of the provider. +| --language | Nest provider language (ts/js). +| --sourceRoot | Nest provider source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== service

+
+
+

Create a Nest service.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the service. +| --name, -n | The name of the service. +| --spec | Specifies if a spec file is generated. To create a service with name Banana in the module fruits you need to introduce fruits/banana +|== == == ==

+
+
+
+
+

== resolver

+
+
+

Create a Nest resolver. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the resolver. +| --name, -n | The name of the resolver. +| --language | Nest resolver language (ts/js). +| --sourceRoot | Nest resolver source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== configuration

+
+
+

Create a Nest CLI configuration. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, | Path to project. +|== == == ==

+
+
+
+
+

== library

+
+
+

Create a Nest library (mono-repo). Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the library. +| --name, -n | The name of the library. +| --prefix | The prefix of the library. +| --language | Nest library language. +| --rootDir | The libraries root directory. +|== == == ==

+
+
+
+
+

== sub-app

+
+
+

Create a Nest application (mono-repo). Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the application. +| --name, -n | The name of the application. +| --language | Nest application language. +| --rootDir | Applications root directory. +|== == == ==

+
+
+
+
+

== typeorm

+
+
+

Initialize typeorm into your current project in a correct way.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +| --db | Database type. +|== == == ==

+
+
+
+
+

== entity

+
+
+

Add a TypeOrm entity to your project. Requires TypeORM installed in the project.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Change the application folder where you will create the entity +| --name, -n | The entity name. To create a entity with name Banana in the module fruits you need to introduce fruits/banana +|== == == ==

+
+
+
+
+

== config-module

+
+
+

Add the config module to the project.

+
+
+

It will add the @devon4node/common module as a project dependency. Then, it will generate the configuration module into your project and add it in the core module. Also, it generates the config files for the most common environments.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+
+
+

== crud

+
+
+

Generate CRUD methods for a entity. Requires TypeORM installed in the project.

+
+
+

It will add the @nestjsx/crud module as a project dependency. Then, generates an entity, a CRUD controller and a CRUD service. It also register the entity, controller and service in the module.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Change the application folder where you will create the crud +| --name, -n | The crud name. To create crud with name Banana in the module fruits you need to introduce fruits/banana +|== == == ==

+
+
+
+
+

== mailer

+
+
+

Add @devon4node/mailer module to project.

+
+
+

It will add the @devon4node/mailer module as a project dependency. Also, it will add it to the core module and it will generate some email template examples.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+
+
+

== swagger

+
+
+

Add swagger module to project.

+
+
+

It will add the @nestjs/swagger module as a project dependency. Also, it will update the main.ts file in order to expose the endpoint for swagger. The default endpoint is: /v1/api

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+
+
+

== auth-jwt

+
+
+

Add the auth JWT module to the project.

+
+
+

It will add to your project the auth-jwt and user module. Also, it will import those modules into the core module.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+
+
+

== all-in-one

+
+
+

Execute multiple schematics at the same time.

+
+
+

This schematic is used by the interactive mode.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to config file +|== == == ==

+
+
+
+
+

== security

+
+
+

Add cors and helmet to your project.

+
+
+

It will add helmet package as project dependency and update the main.ts file in order to enable the cors and helmet in your application.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+
+
+

db

+
+
+

Execute a database command. This command is an alias of typeorm command, so if you exetue the command devon4node db migration:create under the hood it will execute typeorm migration:create. For more information see typeorm CLI documentation.

+
+
+
+
+

== schema:sync

+
+
+

Synchronizes your entities with database schema. It runs schema update queries on all connections you have. To run update queries on a concrete connection use -c option.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which schema synchronization needs to to run. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node db schema:sync
+
+
+
+
+
+

== schema:log

+
+
+

Shows sql to be executed by schema:sync command. It shows sql log only for your default connection. To run update queries on a concrete connection use -c option.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which schema synchronization needs to to run. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== schema:drop

+
+
+

Drops all tables in the database on your default connection. To drop table of a concrete connection’s database use -c option.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which schema synchronization needs to to run. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== query

+
+
+

Executes given SQL query on a default connection. Specify connection name to run query on a specific connection.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which schema synchronization needs to to run. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== entity:create

+
+
+

Generates a new entity.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which to run a query +| --name, -n | Name of the entity class. +| --dir | Directory where entity should be created. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== subscriber:create

+
+
+

Generates a new subscriber.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which to run a query +| --name, -n | Name of the entity class. +| --dir | Directory where entity should be created. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== migration:create

+
+
+

Creates a new migration file.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which to run a query +| --name, -n | Name of the entity class. +| --dir | Directory where entity should be created. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node db migration:create -n InsertData
+
+
+
+
+
+

== migration:generate

+
+
+

Generates a new migration file with sql needs to be executed to update schema.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which to run a query +| --name, -n | Name of the entity class. +| --dir | Directory where entity should be created. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node db migration:generate -n CreateTables
+
+
+
+
+
+

== migration:run

+
+
+

Runs all pending migrations.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which run a query. +| --transaction, -t | Indicates if transaction should be used or not for migration run. Enabled by default. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== migration:show

+
+
+

Show all migrations and whether they have been run or not

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which run a query. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== migration:revert

+
+
+

Reverts last executed migration.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which run a query. +| --transaction, -t | Indicates if transaction should be used or not for migration revert. Enabled by default. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== version

+
+
+

Prints TypeORM version this project uses.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --version, -v | Shows number version +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node db version
+
+
+
+
+
+

== cache:clear

+
+
+

Clears all data stored in query runner cache.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which run a query. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-code-generation.html b/docs/devon4node/1.0/guides-code-generation.html new file mode 100644 index 00000000..f7ec04cf --- /dev/null +++ b/docs/devon4node/1.0/guides-code-generation.html @@ -0,0 +1,515 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Code Generation

+
+
+

As we mention in the page key principles, one of our key principles is Productivity. In order to provide that productivity, we have some tools to generate code. These tools will help you generate the common parts of the application so that you can focus only on the specific functionality.

+
+
+

Those tools are:

+
+ +
+
+
+

Nest CLI and Devon4node schematics

+
+
+

We are going to use the Nest CLI to generate code of our application, you can know more about NodeJs CLI in the official documentation.

+
+
+
+
+

Install devon4node schematics

+
+
+

First of all, you need to install Nest CLI

+
+
+

Execute the command yarn global add @nestjs/cli. +You can also use npm: npm install -g @nestjs/cli

+
+
+

And then Devon4node schematics globally with the following command:

+
+
+

yarn global add @devon4node/schematics or npm install -g @devon4node/schematics

+
+
+
+
+

==

+
+
+

If you get an error trying execute any devon4node schematic related to collection not found, try to reinstall devon4node/schematics on the project folder or be sure that schematics folder is inside @devon4node in node_modules. +yarn add @devon4node/schematics +== ==

+
+
+
+
+

Generate new devon4node application

+
+
+

To start creating a devon4node application, execute the command:

+
+
+

nest g -c @devon4node/schematics application [application-name]

+
+
+

If you do not put a name, the command line will ask you for one.

+
+
+
+
+

Generate code for TypeORM

+
+
+

Initialize TypeORM into your current project in a correct way.

+
+
+

nest g -c @devon4node/schematics typeorm

+
+
+

Then, you will be asked about which DB you want to use.

+
+
+

typeorm schematic

+
+
+
+
+

Generate CRUD

+
+
+

Generate CRUD methods for a entity. Requires TypeORM installed in the project.

+
+
+

It will add the @nestjsx/crud module as a project dependency. Then, generates an entity, a CRUD controller and a CRUD service. It also register the entity, controller and service in the module.

+
+
+

Execute nest g -c @devon4node/schematics crud and then you will need to write a name for the crud.

+
+
+
+crud schematic +
+
+
+
+
+

Generate TypeORM entity

+
+
+

Add a TypeORM entity to your project. Requires TypeORM installed in the project.

+
+
+

Execute nest g -c @devon4node/schematics entity and you will be asked for an entity name.

+
+
+
+
+

Add config-module

+
+
+

Add the config module to the project.

+
+
+

It will add the @devon4node/common module as a project dependency. Then, it will generate the configuration module into your project and add it in the core module. Also, it generates the config files for the most common environments.

+
+
+

The command to execute will be nest g -c @devon4node/schematics config-module

+
+
+
+
+

Add mailer module

+
+
+

Add @devon4node/mailer module to project.

+
+
+

It will add the @devon4node/mailer module as a project dependency. Also, it will add it to the core module and it will generate some email template examples.

+
+
+

Write the command nest g -c @devon4node/schematics mailer

+
+
+
+
+

Add swagger module

+
+
+

Add swagger module to project.

+
+
+

It will add the @nestjs/swagger module as a project dependency. Also, it will update the main.ts file in order to expose the endpoint for swagger. The default endpoint is: /v1/api

+
+
+

Execute the command nest g -c @devon4node/schematics swagger

+
+
+
+
+

Add auth-jwt module

+
+
+

Add the auth JWT module to the project.

+
+
+

It will add to your project the auth-jwt and user module. Also, it will import those modules into the core module.

+
+
+

Execute nest g -c @devon4node/schematics auth-jwt

+
+
+
+
+

Add security

+
+
+

Add cors and helmet to your project.

+
+
+

It will add helmet package as project dependency and update the main.ts file in order to enable the cors and helmet in your application.

+
+
+

Execute nest g -c @devon4node/schematics security

+
+
+
+
+

Generate database migrations

+
+
+
    +
  1. +

    Generate database migrations

    +
    +
      +
    1. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node or npm i -g ts-node

      +
    2. +
    3. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +insert data +
      +
      +
      +

      It will connect to the database, read all entities and then it will generate a migration file with all sql queries need to transform the current status of the database to the status defined by the entities. If the database is empty, it will generate all sql queries need to create all tables defined in the entities. You can find a example in the todo example

      +
      +
    4. +
    +
    +
  2. +
+
+
+

As TypeORM is the tool used for DB. You can check official documentation for more information. +See TypeORM CLI documentation.

+
+
+
+
+

CobiGen

+
+
+

Currently, we do not have templates to generate devon4node code (we have planned to do that in the future). Instead, we have templates that read the code of a devon4node application and generate a devon4ng application. Visit the CobiGen page for more information.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-coding-conventions.html b/docs/devon4node/1.0/guides-coding-conventions.html new file mode 100644 index 00000000..6ad84cc9 --- /dev/null +++ b/docs/devon4node/1.0/guides-coding-conventions.html @@ -0,0 +1,537 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Coding Conventions

+
+
+

devon4node defines some coding conventions in order to improve the readability, reduce the merge conflicts and be able to develop applications in an industrialized way.

+
+
+

In order to ensure that you are following the devon4node coding conventions, you can use the following tools:

+
+
+
    +
  • +

    ESLint: ESLint ESLint is a tool for identifying and reporting on patterns found in ECMAScript/JavaScript code, with the goal of making code more consistent and avoiding bugs. We recommend to use the ESLint VSCode extension (included in the devonfw Platform Extension Pack) in order to be able to see the linting errors while you are developing.

    +
  • +
  • +

    Prettier: Prettier is a code formatter. We recommend to use the Prettier VSCode extension (included in the devonfw Platform Extension Pack) and enable the editor.formatOnSave option.

    +
  • +
  • +

    devon4node application schematic: this tool will generate code following the devon4node coding conventions. Also, when you generate a new project using the devon4node application schematic, it generates the configuration files for TSLint and Prettier that satisfy the devon4node coding conventions.

    +
  • +
+
+
+

When you combine all tools, you can be sure that you follow the devon4node coding conventions.

+
+
+
+
+

Detailed devon4node Coding Conventions

+
+
+

Here we will detail some of most important devon4node coding conventions. To be sure that you follows all devon4node coding conventions use the tools described before.

+
+
+
+
+

Indentation

+
+
+

All devon4node code files must be indented using spaces. The indentation with must be 2 spaces.

+
+
+
+
+

White space

+
+
+

In order to improve the readability of your code, you must introduce whitespaces. Example:

+
+
+
+
if(condition){
+
+
+
+

must be

+
+
+
+
if (condition) {
+
+
+
+
+
+

Naming conventions

+
+ +
+
+
+

== File naming

+
+
+

The file name must follow the pattern: (name in kebab case).(kind of component).(extension) +The test file name must follow the pattern: (name in kebab case).(kind of component).spec.(extension)

+
+
+

Example:

+
+
+
+
auth-jwt.service.ts
+auth-jwt.service.spec.ts
+
+
+
+
+
+

== Interface naming

+
+
+

The interface names must be in pascal case, and must start with I. There is some controversy in starting the interface names with an I, but we decided to do it because is most of cases you will have an interface and a class with the same name, so, to differentiate them, we decided to start the interfaces with I. Other devonfw stacks solves it by adding the suffix Impl in the class implementations.

+
+
+

Example:

+
+
+
+
interface ICoffee {}
+
+
+
+
+
+

== Class naming

+
+
+

The class names must be in pascal case.

+
+
+

Example:

+
+
+
+
class Coffee {}
+
+
+
+
+
+

== Variable naming

+
+
+

All variable names must be in camel case.

+
+
+
+
const coffeeList: Coffe[];
+
+
+
+
+
+

Declarations

+
+
+

For all variable declarations we must use const or let. var is forbidden. We prefer to use const when possible.

+
+
+
+
+

Programming practices

+
+ +
+
+
+

== Trailing comma

+
+
+

All statements must end with a trailing comma. Example:

+
+
+
+
{
+  one: 'one',
+  two: 'two'  // bad
+}
+{
+  one: 'one',
+  two: 'two', // good
+}
+
+
+
+
+
+

== Arrow functions

+
+
+

All anonymous functions must be defined with the arrow function notation. In most of cases it’s not a problem, but sometimes, when you do not want to bind this when you define the function, you can use the other function definition. In this special cases you must disable the linter for those sentence.

+
+
+
+
+

== Comments

+
+
+

Comments must start with a whitespace. Example:

+
+
+
+
//This is a bad comment
+// This is OK
+
+
+
+
+
+

== Quotemarks

+
+
+

For string definitions, we must use single quotes.

+
+
+
+
+

== if statements

+
+
+

In all if statements you always must use brackets. Example:

+
+
+
+
// Bad if statement
+if (condition)
+  return true;
+
+// Good if statement
+if (condition) {
+  return true;
+}
+
+
+
+
+
+

Pre-commit hooks

+
+
+

In order to ensure that your new code follows the coding conventions, devon4node uses by default husky. Husky is a tool that allows you to configure git hooks easily in your project. When you make a git commit in your devon4node project, it will execute two actions:

+
+
+
    +
  • +

    Prettify the staged files

    +
  • +
  • +

    Execute the linter in the staged files

    +
  • +
+
+
+

If any action fails, you won’t be able to commit your new changes.

+
+
+ + + + + +
+ + +If you want to skip the git hooks, you can do a commit passing the --no-verify flag. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-configuration-module.html b/docs/devon4node/1.0/guides-configuration-module.html new file mode 100644 index 00000000..50adb9bd --- /dev/null +++ b/docs/devon4node/1.0/guides-configuration-module.html @@ -0,0 +1,415 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Configuration Module

+
+
+

devon4node provides a way to generate a configuration module inside your application. To generate it you only need to execute the command nest g -c @devon4node/schematics config-module. This command will generate inside your application:

+
+
+
    +
  • +

    Configuration module inside the core module.

    +
  • +
  • +

    config folder where all environment configuration are stored.

    +
    +
      +
    • +

      default configuration: configuration for your local development environment.

      +
    • +
    • +

      develop environment configuration for the develop environment.

      +
    • +
    • +

      uat environment configuration for the uat environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      test environment configuration used by test.

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +some code generators will add some properties to this module, so, be sure that the config module is the first module that you generate in your application. +
+
+
+
+
+

Use the configuration service

+
+
+

To use the configuration service, you only need to inject it as dependency. As configuration module is defined in the core module, it will be available everywhere in your application. Example:

+
+
+
+
export class MyProvider {
+  constructor(public readonly configService: ConfigurationService) {}
+
+  myMethod() {
+    return this.confiService.isDev;
+  }
+}
+
+
+
+
+
+

Choose an environment file

+
+
+

By default, when you use the configuration service it will take the properties defined in the default.ts file. If you want to change the configuration file, you only need to set the NODE_ENV environment property with the name of the desired environment. Examples: in windows execute set NODE_ENV=develop before executing the application, in linux execute NODE_ENV=develop before executing the application or NODE_ENV=develop yarn start.

+
+
+
+
+

Override configuration properties

+
+
+

Sometimes, you want to keep some configuration property secure, and you do not want to publish it to the repository, or you want to reuse some configuration file but you need to change some properties. For those scenarios, you can override configuration properties by defining a environment variable with the same name. For example, if you want to override the property host, you can do: set host="newhost". It also works with objects. For example, if you want to change the value of secret in the property jwtConfig for this example, you can set a environment variable like this: set jwtConfig="{"secret": "newsecret"}". As you can see, this environment variable has a JSON value. It will take object and merge the jwtConfig property with the properties defined inside the environment variable. It other properties maintain their value. The behaviour is the same for the nested objects.

+
+
+
+
+

Add a configuration property

+
+
+

In order to add a new property to the configuration module, you need to follow some steps:

+
+
+
    +
  • +

    Add the property to IConfig interface in src/app/core/configuration/types.ts file. With this, we can ensure that the ConfigurationService and the environment files has those property at compiling time.

    +
  • +
  • +

    Add the new property getter to ConfigurationService. You must use the get method of ConfigurationService to ensure that the property will be loaded from the desired config file. You can also add extra logic if needed.

    +
  • +
  • +

    Add the property to all config files inside the src/config folder.

    +
  • +
+
+
+

Example:

+
+
+

We want to add the property devonfwUrl to our ConfigurationService, so:

+
+
+

We add the following code in IConfig interface:

+
+
+
+
devonfwUrl: string;
+
+
+
+

Then, we add the getter in the ConfigurationService:

+
+
+
+
get devonfwUrl(): string {
+  return this.get('devonfwUrl')!;
+}
+
+
+
+

Finally, we add the definition in all config files:

+
+
+
+
devonfwUrl: 'https://devonfw.com',
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-dependency-injection.html b/docs/devon4node/1.0/guides-dependency-injection.html new file mode 100644 index 00000000..d5b21b21 --- /dev/null +++ b/docs/devon4node/1.0/guides-dependency-injection.html @@ -0,0 +1,389 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Dependency Injection

+
+
+

The dependency injection is a well-known common design pattern applied by frameworks in all languages, like Spring in Java, Angular and others. The intention of this page is not to explain how dependency injection works, but instead how it is addressed by NestJS.

+
+
+

NestJS resolve the dependency injection in their modules. When you define a provider in a module, it can be injected in all components of the module. By default, those providers are only available in the module where it is defined. The only way to export a module provider to other modules which import it is adding those provider to the export array. You can also reexport modules.

+
+
+
+
+

Inject dependencies in NestJS

+
+
+

In order to inject a dependency in a NestJS component, you need to declare it in the component constructor. Example:

+
+
+
+
export class CoffeeController {
+  constructor(public readonly conffeeService: CoffeeService) {}
+}
+
+
+
+

NestJS can resolve all dependencies that are defined in the module as provider, and also the dependencies exported by the modules imported. Example:

+
+
+
+
@Module({
+  controllers: [CoffeeController],
+  providers: [CoffeeService],
+})
+export class CoffeeModule {}
+
+
+
+

Inject dependencies in the constructor is the is the preferred choice, but, sometimes it is not possible. For example, when you are extending another class and want to keep the constructor definition. In this specific cases we can inject dependencies in the class properties. Example:

+
+
+
+
export class CoffeeController {
+  @Inject(CoffeeService)
+  private readonly conffeeService: CoffeeService;
+}
+
+
+
+
+
+

Dependency Graph

+
+
+
+dependency injection1 +
+
+
+

In the previous image, the Module A can inject dependencies exported by Module B, Module E and Module F. If module B reexport Module C and Module D, they are also accessible by Module A.

+
+
+

If there is a conflict with the injection token, it resolves the provider with less distance with the module. For example: if the modules C and F exports a UserService provider, the Module A will resolve the UserService exported by the Module F, because the distance from Module A to Module F is 1, and the distance from Module A to Module C is 2.

+
+
+

When you define a module as global, the dependency injection system is the same. The only difference is now all modules as a link to the global module. For example, if we make the Module C as global the dependency graph will be:

+
+
+
+dependency injection2 +
+
+
+
+
+

Custom providers

+
+
+

When you want to change the provider name, you can use a NestJS feature called custom providers. For example, if you want to define a provider called MockUserService with the provider token UserService you can define it like:

+
+
+
+
@Module({
+  providers: [{
+    provide: UserService,
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

With this, when you inject want to inject UserService as dependency, the MockUserService will be injected.

+
+
+

Custom provider token can be also a string:

+
+
+
+
@Module({
+  providers: [{
+    provide: 'USER_SERVICE',
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

but now, when you want to inject it as dependency you need to use the @Inject decorator.

+
+
+
+
constructor(@Inject('USER_SERVICE') userService: any) {}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-entities.html b/docs/devon4node/1.0/guides-entities.html new file mode 100644 index 00000000..9590f121 --- /dev/null +++ b/docs/devon4node/1.0/guides-entities.html @@ -0,0 +1,277 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Entities

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-eslint-sonarqube-config.html b/docs/devon4node/1.0/guides-eslint-sonarqube-config.html new file mode 100644 index 00000000..753898ff --- /dev/null +++ b/docs/devon4node/1.0/guides-eslint-sonarqube-config.html @@ -0,0 +1,307 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Importing your ESLint reports into SonarQube

+
+
+

This guide covers the import of ESLint reports into SonarQube instances in CI environments, as this is the recommended way of using ESLint and SonarQube for devon4node projects. The prerequisites for this process are a CI environment, preferably a Production Line instance, and the ESLint CLI, which is already included when generating a new devon4node project.

+
+
+
+
+

Configuring the ESLint analysis

+
+
+

You can configure the ESLint analysis parameters in the .eslintrc.js file inside the top-level directory of your project. If you created your node project using the devon4node application schematic, this file will already exist. If you want to make further adjustments to it, have a look at the ESLint documentation.

+
+
+

The ESLint analysis script lint is already configured in the scripts part of your package.json. Simply add -f json > report.json, so that the output of the analysis is saved in a .json file. Additional information to customization options for the ESLint CLI can be found here.

+
+
+

To run the analysis, execute the script with npm run lint inside the base directory of your project.

+
+
+
+
+

Configuring SonarQube

+
+
+

If you haven’t already generated your CICD-related files, follow the tutorial on the devon4node schematic of our CICDGEN project, as you will need a Jenkinsfile configured in your project to proceed.

+
+
+

Inside the script for the SonarQube code analysis in your Jenkinsfile, add the parameter -Dsonar.eslint.reportPaths=report.json. Now, whenever a SonarQube analysis is triggered by your CI environment, the generated report will be loaded into your SonarQube instance. +To avoid duplicated issues, you can associate an empty TypeScript quality profile with your project in its server configurations.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-grapql.html b/docs/devon4node/1.0/guides-grapql.html new file mode 100644 index 00000000..4bc81195 --- /dev/null +++ b/docs/devon4node/1.0/guides-grapql.html @@ -0,0 +1,603 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

GraphQL on Devon4Node

+
+
+

GraphQL is a query language that gets exactly the data that we ask for instead of static predefined responses.

+
+
+

For example, on a regular API a get by id method would return something like:

+
+
+
+
{
+  "location": {
+    "lon": 00.14,
+    "lat": 54.11
+  },
+  "station": "dsrEE3Sg",
+  "visibility": 5000,
+  "wind":{
+    "speed": 6.2,
+    "deg": 78
+  },
+  "logs": [...]
+  ...
+}
+
+
+
+

But if we want to get only the wind data we have to create another endpoint that returns the specified data.

+
+
+

But instead with graphQL we can get different information without creating new endpoints, in this case we only want the wind data so it would return:

+
+
+
+
{
+  "wind":{
+    "speed": 6.2,
+    "deg": 78
+  }
+}
+
+
+
+

To install it:

+
+
+
+
yarn add @nestjs/graphql graphql-tools graphql apollo-server-express
+
+
+
+
+
+

Schema first

+
+ +
+
+
+

==

+
+
+

This tutorial uses the schema first method.

+
+
+

We assume you have already a functioning TODO module / app.

+
+
+

If not you can use Devon4node GraphQL sample +== ==

+
+
+

First we need to import GraphQLModule to our app.module.ts.

+
+
+
+
...
+import { GraphQLModule } from '@nestjs/graphql';
+import { join } from 'path';
+
+@Module({
+  imports: [
+    // Your module import
+    GraphQLModule.forRoot({
+      typePaths: ['./**/*.graphql'],
+      definitions: {
+        path: join(process.cwd(), 'src/graphql.ts'),
+        outputAs: 'class',
+      },
+    }),
+  ],
+})
+export class AppModule {}
+
+
+
+

The typePaths indicates the location of the schema definition files.

+
+
+

The definitions indicates the file where the typescript definitions will automatically save, adding the outputAs: 'class' saves those definitions as classes.

+
+
+
+
+

Schema

+
+
+

Graphql is a typed language with object types, scalars, and enums.

+
+
+

We use query to define the methods we are going to use for fetching data, and mutations are used for modifying this data, similar to how GET and POST work.

+
+
+

Let’s define the elements, queries and mutations that our module is going to have.

+
+
+

For that we have to create a graphql file on our module, on this case we are going to name it "schema.graphql".

+
+
+
+
type Todo {
+  id: ID
+  task: String
+}
+
+type Query {
+  todos: [Todo]
+  todoById: Todo
+}
+
+type Mutation {
+  createTodo(task: String): Todo
+  deleteTodo(id: String): Todo
+}
+
+
+
+

For more information about Types go to the official graphQL documentation

+
+
+
+
+

Resolver

+
+
+

Resolver has the instructions to turn GraphQL orders into the data requested.

+
+
+

To create a resolver we go to our module and then create a new todo.resolver.ts file, import the decorators needed and set the resolver.

+
+
+
+
import { Resolver, Args, Mutation, Query } from '@nestjs/graphql';
+import { TodoService } from '../services/todo.service';
+import { Todo } from '../schemas/todo.schema';
+
+@Resolver()
+export class TodoResolver {
+  constructor(private readonly todoService: TodoService) {}
+
+  @Query('todos')
+  findAll(): Promise<Todo[]> {
+    return this.todoService.findAll();
+  }
+
+  @Query('todoById')
+  findOneById(@Args('id') id: string): Promise<Todo | null> {
+    return this.todoService.findOneById(id);
+  }
+
+  @Mutation()
+  createTodo(@Args('task') task: string): Promise<Todo> {
+    return this.todoService.create(task);
+  }
+
+  @Mutation()
+  deleteTodo(@Args('id') id: string): Promise<Todo | null> {
+    return this.todoService.delete(id);
+  }
+}
+
+
+
+

@Resolver() indicates that the next class is a resolver.

+
+
+

@Query is used to get data.

+
+
+

@Mutation is used to create or modify data.

+
+
+

Here we have also an argument decorator @Args which is an object with the arguments passed into the field in the query.

+
+
+

By default we can access the query or mutation using the method’s name, for example:

+
+
+

For the deleteTodo mutation.

+
+
+
+
mutation {
+  deleteTodo( id: "6f7ed2q8" ){
+    id,
+    task
+  }
+}
+
+
+
+

But if we write something different on the decorator, we change the name, for example:

+
+
+

For the findAll query, we named it todos.

+
+
+
+
{
+  todos{
+    id,
+    task
+  }
+}
+
+
+
+

Also if we go back to the schema.graphql, we will see how we define the query with todos.

+
+
+

Learn more about Resolvers, mutations and their argument decorators on the NestJS documentation.

+
+
+
+
+

Playground

+
+
+

To test our backend we can use tools as Postman, but graphql already gives us a playground to test our Resolvers, we can access by default on http://localhost:3000/graphql.

+
+
+

We can call a query, or several queries this way:

+
+
+
+
{
+  findAll{
+    id,
+    task
+  }
+}
+
+
+
+

And the output will look something like:

+
+
+
+
{
+  "data": {
+    "findAll": [
+      {
+        "id": "5fb54b30e686cb49500b6728",
+        "task": "clean dishes"
+      },
+      {
+        "id": "5fb54b3be686cb49500b672a",
+        "task": "burn house"
+      }
+    ]
+  }
+}
+
+
+
+

As we can see, we get a json "data" with an array of results.

+
+
+

And for our mutations it’s very similar, in this case we create a todo with task "rebuild house" and we are going to ask on the response just for the task data, we don’t want the id.

+
+
+
+
mutation{
+  createTodo (
+    task: "rebuild house"
+  ){
+    task
+  }
+}
+
+
+
+

And the output

+
+
+
+
{
+  "data": {
+    "createTodo": {
+      "task": "rebuild house"
+    }
+  }
+}
+
+
+
+

In this case we return just one item so there is no array, we also got just the task data but if we want the id too, we just have to add it on the request.

+
+
+

To make the playground unavailable we can add an option to the app.module import:

+
+
+
+
...
+GraphQLModule.forRoot({
+  ...
+  playground: false,
+}),
+...
+
+
+
+

For further information go to the official NestJS documentation

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-key-principles.html b/docs/devon4node/1.0/guides-key-principles.html new file mode 100644 index 00000000..3416eb9b --- /dev/null +++ b/docs/devon4node/1.0/guides-key-principles.html @@ -0,0 +1,388 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Key Principles

+
+
+

devon4node is built following some basic principles like:

+
+
+ +
+
+

But key principles that best define devon4node (and are inherited from NestJS) are:

+
+
+
    +
  • +

    Simplicity (aka KISS)

    +
  • +
  • +

    Reusability

    +
  • +
  • +

    Productivity

    +
  • +
+
+
+
+
+

Simplicity

+
+
+

In devon4node we tried to do everything as simple as possible. Following this principle we will be able to do easy to maintain applications.

+
+
+

For example, in order to expose all CRUD operations for an entity, you only need to create a controller like:

+
+
+
+
@Crud({
+  model: {
+    type: Employee,
+  },
+})
+@CrudType(Employee)
+@Controller('employee/employees')
+export class EmployeeCrudController {
+  constructor(public service: EmployeeCrudService) {}
+}
+
+
+
+

You can find this code in the employee example. Only with this code your exposing the full CRUD operations for the employee entity. As you can see, it’s an empty class with some decorators and the EmployeeCrudService injected as dependency. Simple, isn’t it? The EmployeeCrudService is also simple:

+
+
+
+
@Injectable()
+export class EmployeeCrudService extends TypeOrmCrudService<Employee> {
+  constructor(@InjectRepository(Employee) repo: Repository<Employee>) {
+    super(repo);
+  }
+}
+
+
+
+

Another empty class which extends from TypeOrmCrudService<Employee> and injects the Employee Repository as dependency. Nothing else.

+
+
+

With these examples you can get an idea of how simple it can be to code a devon4node application .

+
+
+
+
+

Reusability

+
+
+

NestJS (and devon4node) applications are designed in a modular way. This allows you to isolate some functionality in a module, and then reuse it in every application that you need. This is the same behaviour that Angular has. You can see it in the NestJS modules like TypeORM, Swagger and others. Also, in devon4node we have the Mailer module.

+
+
+

In your applications, you only need to import those modules and then you will be able to use the functionality that they implement. Example

+
+
+
+
@Module({
+  imports: [ AuthModule, ConfigurationModule ],
+})
+export class SomeModule {}
+
+
+
+
+
+

Productivity

+
+
+

devon4node is designed to create secure enterprise applications. But also, it allow you to do it in a fast way. To increase the productivity devon4node, devon4node provide schematics in order to generate some boilerplate code.

+
+
+

For example, to create a module you need to create a new file for a module (or copy it) and write the code, then you need to import it in the AppModule. This is a easy example, but you can introduce some errors: forget to import it in the AppModule, introduce errors with the copy/paste and so on. By using the command nest g module --name <module-name> it will do everything for you. Just a simple command. In this specific case probably you do not see any advantage, but there are other complex cases where you can generate more complex code with nest and devon4node schematics command.

+
+
+

See code generation in order to know how to increase your productivity creating devon4node applications.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-logger.html b/docs/devon4node/1.0/guides-logger.html new file mode 100644 index 00000000..8f474a22 --- /dev/null +++ b/docs/devon4node/1.0/guides-logger.html @@ -0,0 +1,310 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Logger

+
+
+

When you create a new devon4node application, it already has a logger: src/app/shared/logger/winston.logger.ts. This logger provide the methods log, error and warn. All of those methods will write a log message, but with a different log level.

+
+
+

The winston logger has two transports: one to log everything inside the file logs/general.log and the other to log only the error logs inside the file logs/error.log. In addition, it uses the default NestJS logger in order to show the logs in the console.

+
+
+

As you can see it is a simple example about how to use logger in a devon4node application. It will be update to a complex one in the next versions.

+
+
+
+
+

How to use logger

+
+
+

In order to use the logger you only need to inject the logger as a dependency:

+
+
+
+
constructor(logger: WinstonLogger){}
+
+
+
+

and then use it

+
+
+
+
async getAll() {
+  this.service.getAll();
+  this.logger.log('Returning all data');
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-mailer.html b/docs/devon4node/1.0/guides-mailer.html new file mode 100644 index 00000000..4cb70d48 --- /dev/null +++ b/docs/devon4node/1.0/guides-mailer.html @@ -0,0 +1,624 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Mailer Module

+
+
+

This module enables you to send emails in devon4node. It also provides a template engine using Handlebars.

+
+
+

It is a NestJS module that inject into your application a MailerService, which is the responsible to send the emails using the nodemailer library.

+
+
+
+
+

Installing

+
+
+

Execute the following command in a devon4node project:

+
+
+
+
yarn add @devon4node/mailer
+
+
+
+
+
+

Configuring

+
+
+

To configure the mailer module, you only need to import it in your application into another module. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot(),
+  ],
+  ...
+})
+
+
+
+

Your must pass the configuration using the forRoot or forRootAsync methods.

+
+
+
+
+

forRoot()

+
+
+

The forRoot method receives an MailerModuleOptions object as parameter. It configures the MailerModule using the input MailerModuleOptions object.

+
+
+

The structure of MailerModuleOptions is:

+
+
+
+
{
+  hbsOptions?: {
+    templatesDir: string;
+    extension?: string;
+    partialsDir?: string;
+    helpers?: IHelperFunction[];
+    compilerOptions?: ICompileOptions;
+  },
+  mailOptions?: nodemailerSmtpTransportOptions;
+  emailFrom: string;
+}
+
+
+
+

Here, you need to specify the Handlebars compile options, the nodemailer transport options and the email address which will send the emails. +Then, you need to call to forRoot function in the module imports. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot({
+      mailOptions: {
+        host: 'localhost',
+        port: 1025,
+        secure: false,
+        tls: {
+          rejectUnauthorized: false,
+        },
+      },
+      emailFrom: 'noreply@capgemini.com',
+      hbsOptions: {
+        templatesDir: join(__dirname, '../..', 'templates/views'),
+        partialsDir: join(__dirname, '../..', 'templates/partials'),
+        helpers: [{
+          name: 'fullname',
+          func: person => `${person.name} ${person.surname}`,s
+        }],
+      },
+    }),
+  ...
+})
+
+
+
+
+
+

forRootAsync()

+
+
+

The method forRootAsync enables you to get the mailer configuration in a asynchronous way. It is useful when you need to get the configuration using, for example, a service (e.g. ConfigurationService).

+
+
+

Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRootAsync({
+      imports: [ConfigurationModule],
+      useFactory: (config: ConfigurationService) => {
+        return config.mailerConfig;
+      },
+      inject: [ConfigurationService],
+    }),
+  ...
+})
+
+
+
+

In this example, we use the ConfigurationService in order to get the MailerModuleOptions (the same as forRoot)

+
+
+
+
+

Usage

+
+
+

In order to use, you only need to inject using the dependency injection the MailerService.

+
+
+

Example:

+
+
+
+
@Injectable()
+export class CatsService {
+  constructor(private readonly mailer: MailerService) {}
+}
+
+
+
+

Then, you only need to use the methods provided by the MailerService in your service. Take into account that you can inject it in every place that support NestJS dependency injection.

+
+
+
+
+

MailerService methods

+
+ +
+
+
+

== sendPlainMail

+
+
+

The method sendPlainMail receive a string sends a email.

+
+
+

The method signatures are:

+
+
+
+
sendPlainMail(emailOptions: SendMailOptions): Promise<SentMessageInfo>;
+sendPlainMail(to: string, subject: string, mail: string): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendPlainMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+});
+this.mailer.sendPlainMail('example@example.com', 'This is a subject', '<h1>Hello world</h1>');
+
+
+
+
+
+

== sendTemplateMail

+
+
+

The method sendTemplateMail sends a email based on a Handlebars template. The templates are registered using the templatesDir option or using the addTemplate method. +The template name is the name of the template (without extension) or the first parameter of the method addTemplate.

+
+
+

The method signatures are:

+
+
+
+
sendTemplateMail(emailOptions: SendMailOptions, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+sendTemplateMail(to: string, subject: string, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendTemplateMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+}, 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+this.mailer.sendTemplateMail('example@example.com', 'This is a subject', 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+
+
+
+
+
+

== addTemplate

+
+
+

Adds a new template to the MailerService.

+
+
+

Method signature:

+
+
+
+
addTemplate(name: string, template: string, options?: CompileOptions): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.addTemplate('newTemplate', '<html><head></head><body>{{>partial1}}</body></html>')
+
+
+
+
+
+

== registerPartial

+
+
+

Register a new partial in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerPartial(name: string, partial: Handlebars.Template<any>): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerPartial('partial', '<h1>Hello World</h1>')
+
+
+
+
+
+

== registerHelper

+
+
+

Register a new helper in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerHelper(name: string, helper: Handlebars.HelperDelegate): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerHelper('fullname', person => `${person.name} ${person.surname}`)
+
+
+
+
+
+

Handlebars templates

+
+
+

As mentioned above, this module allow you to use Handlebars as template engine, but it is optional. If you do not need the Handlebars, you just need to keep the hbsOptions undefined.

+
+
+

In order to get the templates form the file system, you can specify the template folder, the partials folder and the helpers. +At the moment of module initialization, it will read the content of the template folder, and will register every file with the name (without extension) and the content as Handlebars template. It will do the same for the partials.

+
+
+

You can specify the extension of template files using the extension parameter. The default value is .handlebars

+
+
+
+
+

Local development

+
+
+

If you want to work with this module but you don’t have a SMTP server, you can use the streamTransport. Example:

+
+
+
+
{
+  mailOptions: {
+    streamTransport: true,
+    newline: 'windows',
+  },
+  emailFrom: ...
+  hbsOptions: ...
+}
+
+
+
+

Then, you need to get the sendPlainMail or sendTemplateMail result, and print the email to the standard output (STDOUT). Example:

+
+
+
+
const mail = await this.mailer.sendTemplateMail(...);
+
+mail.message.pipe(process.stdout);
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-serializer.html b/docs/devon4node/1.0/guides-serializer.html new file mode 100644 index 00000000..1731e134 --- /dev/null +++ b/docs/devon4node/1.0/guides-serializer.html @@ -0,0 +1,338 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Serializer

+
+
+

Serialization is the process of translating data structures or object state into a format that can be transmitted across network and reconstructed later.

+
+
+

NestJS by default serialize all data to JSON (JSON.stringify). Sometimes this is not enough. In some situations you need to exclude some property (e.g password). Instead doing it manually, devon4node provides an interceptor (ClassSerializerInterceptor) that will do it for you. You only need to return a class instance as always and the interceptor will transform those class to the expected data.

+
+
+

The ClassSerializerInterceptor takes the class-transformer decorators in order to know how to transform the class and then send the result to the client.

+
+
+

Some of class-transformer decorators are:

+
+
+
    +
  • +

    Expose

    +
  • +
  • +

    Exclude

    +
  • +
  • +

    Type

    +
  • +
  • +

    Transform

    +
  • +
+
+
+

And methods to transform data:

+
+
+
    +
  • +

    plainToClass

    +
  • +
  • +

    plainToClassFromExist

    +
  • +
  • +

    classToPlain

    +
  • +
  • +

    classToClass

    +
  • +
  • +

    serialize

    +
  • +
  • +

    deserialize

    +
  • +
  • +

    deserializeArray

    +
  • +
+
+
+

See the class-transformer page for more information.

+
+
+

See NestJS serialization page for more information about ClassSerializerInterceptor.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-swagger.html b/docs/devon4node/1.0/guides-swagger.html new file mode 100644 index 00000000..f4309707 --- /dev/null +++ b/docs/devon4node/1.0/guides-swagger.html @@ -0,0 +1,303 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Swagger

+
+
+

We can use swagger (OpenAPI) in order to describe the endpoints that our application exposes.

+
+
+

NestJS provides a module which will read the code of our application and will expose one endpoint where we can see the swagger.

+
+
+

Add swagger to a devon4node application is simple, you only need to execute the command nest g -c @devon4node/schematics swagger and it will do everything for you. The next time that you start your application, you will be able to see the swagger at /v1/api endpoint.

+
+
+

The swagger module can read your code in order to create the swagger definition, but sometimes you need to help him by decorating your handlers.

+
+
+

For more information about decorators and other behaviour about swagger module, you can see the NestJS swagger documentation page

+
+
+ + + + + +
+ + +the OpenAPI specification that this module supports is v2.0. The OpenAPI v3.0 is not available yet by using this module. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-typeorm.html b/docs/devon4node/1.0/guides-typeorm.html new file mode 100644 index 00000000..23bc77b6 --- /dev/null +++ b/docs/devon4node/1.0/guides-typeorm.html @@ -0,0 +1,407 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

TypeORM

+
+
+

TypeORM is the default ORM provided by devon4node. It supports MySQL, MariaDB, Postgres, CockroachDB, SQLite, Microsoft SQL Server, Oracle, sql.js relational database and also supports MongoDB NoSQL database.

+
+
+

Add TypeORM support to a devon4node application is very easy: you only need to execute the command nest g -c @devon4node/schematics typeorm and it will add all required dependencies to the project and also imports the @nestjs/typeorm module.

+
+
+

For more information about TypeORM and the integration with NestJS you can visit TypeORM webpage, TypeORM GitHub repository and NestJS TypeORM documentation page

+
+
+
+
+

Configuration

+
+
+

When you have the configuration module, the TypeORM generator will add one property in order to be able to configure the database depending on the environment. Example:

+
+
+
+
database: {
+  type: 'sqlite',
+  database: ':memory:',
+  synchronize: false,
+  migrationsRun: true,
+  logging: true,
+  entities: ['dist/**/*.entity.js'],
+  migrations: ['dist/migration/**/*.js'],
+  subscribers: ['dist/subscriber/**/*.js'],
+  cli: {
+    entitiesDir: 'src/entity',
+    migrationsDir: 'src/migration',
+    subscribersDir: 'src/subscriber',
+  },
+},
+
+
+
+

This object is a TypeORM ConnectionOptions. For fore information about it visit the TypeORM Connection Options page.

+
+
+

There is also a special case: the default configuration. As the devon4node CLI need the database configuration when you use the devon4node db command, we also provide the ormconfig.json file. In this file you must put the configuration for you local environment. In order to do not have duplicated the configuration for local environment, in the default config file the database property is set-up like:

+
+
+
+
database: require('../../ormconfig.json'),
+
+
+
+

So, you only need to maintain the ormconfig.json file for the local environment.

+
+
+
+
+

Entity

+
+
+

Entity is a class that maps to a database table. The devon4node schematics has a generator to create new entities. You only need to execute the command nest g -c @devon4node/schematics entity <entity-name> and it generate the entity.

+
+
+

In the entity, you must define all columns, relations, primary keys of your database table. By default, devon4node provides a class named BaseEntity. All entities created with the devon4node schematics will extends the BaseEntity. This entity provides you some common columns:

+
+
+
    +
  • +

    id: the primary key of you table

    +
  • +
  • +

    version: the version of the entry (used for auditing purposes)

    +
  • +
  • +

    createdAt: creation date of the entry (used for auditing purposes)

    +
  • +
  • +

    updatedAt: last update date of the entry (used for auditing purposes)

    +
  • +
+
+
+

For more information about Entities, please visit the TypeORM entities page

+
+
+
+
+

Repository

+
+
+

With repositories, you can manage (insert, update, delete, load, etc.) a concrete entity. Using this pattern, we have separated the data (Entities) from the methods to manage it (Repositories).

+
+
+

To use a repository you only need to:

+
+
+
    +
  • +

    Import it in the module as follows:

    +
    +
    +
    @Module({
    +  imports: [TypeOrmModule.forFeature([Employee])],
    +})
    +
    +
    +
    + + + + + +
    + + +if you generate the entities with the devon4node schematic, this step is not necessary, devon4node schematic will do it for you. +
    +
    +
  • +
  • +

    Inject the repository as dependency in your service:

    +
    +
    +
    constructor(@InjectRepository(Employee) employeeRepository: Repository<Employee>) {}
    +
    +
    +
  • +
+
+
+

You can see more details in the NestJS database and NestJS TypeORM documentation pages.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/guides-validation.html b/docs/devon4node/1.0/guides-validation.html new file mode 100644 index 00000000..62cf51fa --- /dev/null +++ b/docs/devon4node/1.0/guides-validation.html @@ -0,0 +1,340 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Validation

+
+
+

To be sure that your application will works well, you must validate any input data. devon4node by default provides a ValidationPipe. This ValidationPipe is the responsible of validate the request input and, if the input do not pass the validation process, it returns a 400 Bad Request error.

+
+
+
+
+

Defining Validators

+
+
+

The ValidationPipe needs to know how to validate the input. For that purpose we use the class-validator package. This package allows you to define the validation of a class by using decorators.

+
+
+

For example:

+
+
+
+
export class Coffee {
+  @IsDefined()
+  @IsString()
+  @MaxLength(255)
+  name: string;
+
+  @IsDefined()
+  @IsString()
+  @MaxLength(25)
+  type: string;
+
+  @IsDefined()
+  @IsNumber()
+  quantity: number;
+}
+
+
+
+

As you can see in the previous example, we used some decorators in order to define the validators for every property of the Coffee class. You can find all decorators in the class-validator github repository.

+
+
+

Now, when you want to receive a Coffee as input in some endpoint, it will execute the validations before executing the handler function.

+
+
+ + + + + +
+ + +In order to be able to use the class-validator package, you must use classes instead of interfaces. As you know interfaces disappear at compiling time, and class-validator need to know the metadata of the properties in order to be able to validate. +
+
+
+ + + + + +
+ + +The ValidationPipe only works if you put a specific type in the handler definition. For example, if you define a handler like getCoffee(@Body() coffee: any): Coffee {} the ValidationPipe will not do anything. You must specify the type of the input: getCoffee(@Body() coffee: Coffee): Coffee {} +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/index.html b/docs/devon4node/1.0/index.html new file mode 100644 index 00000000..1d669aab --- /dev/null +++ b/docs/devon4node/1.0/index.html @@ -0,0 +1,373 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node Wiki

+
+ +
+
+
+

Layers

+
+
+ +
+
+
+ +
+

devon4node applications

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/layer-controller.html b/docs/devon4node/1.0/layer-controller.html new file mode 100644 index 00000000..4cce3deb --- /dev/null +++ b/docs/devon4node/1.0/layer-controller.html @@ -0,0 +1,354 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Controller Layer

+
+
+

The controller layer is responsible for handling the requests/responses to the client. This layer knows everything about the endpoints exposed, the expected input (and also validate it), the response schema, the HTTP codes for the response and the HTTP errors that every endpoint can send.

+
+
+
+
+

How to implement the controller layer

+
+
+

This layer is implemented by the NestJS controllers. Let’s see how it works with an example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    try {
+      return await this.coffeeService.searchCoffees(search);
+    } catch (error) {
+      throw new BadRequestException(error.message, error);
+    }
+  }
+}
+
+
+
+

As you can see in the example, to create a controller you only need to decorate a class with the Controller decorator. This example is handling all request to coffee/coffees.

+
+
+

Also, you have defined one handler. This handler is listening to POST request for the route coffee/coffees/search. In addition, this handler is waiting for a CoffeeSearch object and returns an array of Coffee. In order to keep it simple, that’s all that you need in order to define one route.

+
+
+

One important thing that can be observed in this example is that there is no business logic. It delegates to the service layer and return the response to the client. At this point, transformations from the value that you receive from the service layer to the desired return type are also allowed.

+
+
+

By default, every POST handler return an HTTP 204 response with the returned value as body, but you can change it in a easy way by using decorators. As you can see in the example, the handler will return a HTTP 200 response (@HttpCode(200)).

+
+
+

Finally, if the service layer throws an error, this handler will catch it and return a HTTP 400 Bad Request response. The controller layer is the only one that knows about the answers to the client, therefore it is the only one that knows which error codes should be sent.

+
+
+
+
+

Validation

+
+
+

In order to do not propagate errors in the incoming payload, we need to validate all data in the controller layer. See the validation guide for more information.

+
+
+
+
+

Error handling

+
+
+

In the previous example, we catch all errors using the try/catch statement. This is not the usual implementation. In order to catch properly the errors you must use the exception filters. Example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  @UseFilters(CaffeExceptionFilter)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    return await this.coffeeService.searchCoffees(search);
+  }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/layer-dataaccess.html b/docs/devon4node/1.0/layer-dataaccess.html new file mode 100644 index 00000000..d9244b36 --- /dev/null +++ b/docs/devon4node/1.0/layer-dataaccess.html @@ -0,0 +1,303 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Data Access Layer

+
+
+

The data access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store but also about invoking external services.

+
+
+

This layer is implemented using providers. Those providers could be: services, repositories and others. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic.

+
+
+
+
+

Database

+
+
+

We strongly recommend TypeORM for database management in devon4node applications. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic. TypeORM supports the most commonly used relational databases, link Oracle, MySQL, MariaDB, PostgreSQL, SQLite, MSSQL and others. Also, it supports no-relational databases like MongoDB.

+
+
+

TypeORM supports Active Record and Repository patterns. We recommend to use the Repository pattern. This pattern allows you to separate the data objects from the methods to manipulate the database.

+
+
+
+
+

External APIs

+
+
+

In order to manage the data in a external API, you need to create a service for that purpose. In order to manage the connections with the external API, we strongly recommend the NestJS HTTP module

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/layer-service.html b/docs/devon4node/1.0/layer-service.html new file mode 100644 index 00000000..5f29f224 --- /dev/null +++ b/docs/devon4node/1.0/layer-service.html @@ -0,0 +1,308 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Service Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. It knows everything about the business logic, but it does not know about the response to the client and the HTTP errors. That’s why this layer is separated from the controller layer.

+
+
+
+
+

How to implement the service layer

+
+
+

This layer is implemented by services, a specific kind of providers. Let’s see one example:

+
+
+
+
@Injectable()
+export class CoffeeService {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  async searchCoffees(@InjectRepository(Coffee) coffeeRepository: Repository<Coffee>): Promise<Array<Coffee>> {
+    const coffees = this.coffeeRepository.find();
+
+    return doSomeBusinessLogic(coffees);
+  }
+}
+
+
+
+

This is the CoffeeService that we inject in the example of controller layer. As you can see, a service is a regular class with the Injectable decorator. Also, it inject as dependency the data access layer (in this specific case, the Repository<Coffee>).

+
+
+

The services expose methods in order to transform the input from the controllers by applying some business logic. They can also request data from the data access layer. And that’s all.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/master-devon4node.html b/docs/devon4node/1.0/master-devon4node.html new file mode 100644 index 00000000..f7a1b346 --- /dev/null +++ b/docs/devon4node/1.0/master-devon4node.html @@ -0,0 +1,2768 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

NodeJS

+
+
+

devonfw is a platform which provides solutions to building business applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. devonfw is 100% Open Source (Apache License version 2.0) since the beginning of 2018.

+
+
+

devon4node is the NodeJS stack of devonfw. It allows you to build business applications (backends) using NodeJS technology in standardized way based on established best-practices.

+
+
+

devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications. It uses progressive TypeScript and combines elements of OOP (Object Oriented Programming), FP (Functional Programming), and FRP (Functional Reactive Programming).

+
+ +
+

devon4node Architecture

+
+

As we have mentioned in the introduction, devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications.

+
+
+
+

HTTP layer

+
+

By using NestJS, devon4node is a platform-agnostic framework. NestJS focuses only on the logical layer, and delegates the transport layer to another framework, such as ExpressJS. You can see it in the following diagram:

+
+
+
+devon4node architecture +
+
+
+

As you can see, NestJS do not listen directly for incoming request. It has an adapter to communicate with ExpressJS and ExpressJS is the responsible for that. ExpressJS is only one of the frameworks that NestJS can work with. We have also another adapter available out-of-the-box: the Fastify adapter. With that, you can replace ExpressJS for Fastify But you can still use all your NestJS components. You can also create your own adapter to make NestJS work with other HTTP framework.

+
+
+

At this point, you may think: why is NestJS (and devon4node) using ExpressJS by default instead of Fastify? Because, as you can see in the previous diagram, there is a component that is dependent on the HTTP framework: the middleware. As ExpressJS is the most widely used framework, there exists a lot of middleware for it, so, in order to reuse them in our NestJS applications, NestJS use ExpressJS by default. Anyway, you may think which HTTP framework best fits your requirements.

+
+
+
+

devon4node layers

+
+

As other devonfw technologies, devon4node separates the application into layers.

+
+
+

Those layers are:

+
+ +
+
+layers +
+
+
+
+

devon4node application structure

+
+

Although there are many frameworks to create backend applications in NodeJS, none of them effectively solve the main problem of - Architecture. This is the main reason we have chosen NestJS for the devon4node applications. Besides, NestJS is highly inspired by Angular, therefore a developer who knows Angular can use his already acquired knowledge to write devon4node applications.

+
+
+

NestJS adopts various Angular concepts, such as dependency injection, piping, interceptors and modularity, among others. By using modularity we can reuse some of our modules between applications. One example that devon4node provide is the mailer module.

+
+
+
+

Modules

+
+

Create a application module is simple, you only need to create an empty class with the decorator Module:

+
+
+
+
@Module({})
+export class AppModule {}
+
+
+
+

In the module you can define:

+
+
+
    +
  • +

    Imports: the list of imported modules that export the providers which are required in this module

    +
  • +
  • +

    Controllers: the set of controllers defined in this module which have to be instantiated

    +
  • +
  • +

    Providers: the providers that will be instantiated by the Nest injector and that may be shared at least across this module

    +
  • +
  • +

    Exports: the subset of providers that are provided by this module and should be available in other modules which import this module

    +
  • +
+
+
+

The main difference between Angular and NestJS is NestJS modules encapsulates providers by default. This means that it’s impossible to inject providers that are neither directly part of the current module nor exported from the imported modules. Thus, you may consider the exported providers from a module as the module’s public interface, or API. Example of modules graph:

+
+
+
+modules +
+
+
+

In devon4node we three different kind of modules:

+
+
+
    +
  • +

    AppModule: this is the root module. Everything that our application need must be imported here.

    +
  • +
  • +

    Global Modules: this is a special kind of modules. When you make a module global, it’s accessible for every module in your application. Your can see it in the next diagram. It’s the same as the previous one, but now the CoreModule is global:

    +
    +
    +module2 +
    +
    +
    +

    One example of global module is the CoreModule. In the CoreModule you must import every module which have providers that needs to be accessible in all modules of you application

    +
    +
  • +
  • +

    Feature (or application) modules: modules which contains the logic of our application. We must import it in the AppModule.

    +
  • +
+
+
+

For more information about modules, see NestJS documentation page

+
+
+
+

Folder structure

+
+

devon4node defines a folder structure that every devon4node application must follow. The folder structure is:

+
+
+
+
├───src
+│   ├───app
+│   │   ├───core
+│   │   │   ├───auth
+│   │   │   ├───configuration
+│   │   │   ├───user
+│   │   │   └───core.module.ts
+│   │   ├───shared
+│   │   └───feature
+│   │       ├───sub-module
+│   │       │   ├───controllers
+│   │       │   ├───...
+│   │       │   ├───services
+│   │       │   └───sub-module.module.ts
+│   │       ├───controllers
+│   │       ├───interceptors
+│   │       ├───pipes
+│   │       ├───guards
+│   │       ├───filters
+│   │       ├───middlewares
+│   │       ├───model
+│   │       │   ├───dto
+│   │       │   └───entities
+│   │       ├───services
+│   │       └───feature.module.ts
+│   ├───config
+│   └───migration
+├───test
+└───package.json
+
+
+
+

devon4node schematics ensures this folder structure so, please, do not create files by your own, use the devon4node schematics.

+
+
+
+

NestJS components

+
+

NestJS provides several components that you can use in your application:

+
+
+ +
+
+

In the NestJS documentation you can find all information about each component. But, something that is missing in the documentation is the execution order. Every component can be defined in different levels: globally, in the controller or in the handler. As middleware is part of the HTTP server we can define it in a different way: globally or in the module.

+
+
+
+components +
+
+
+

It is not necessary to have defined components in every level. For example, you can have defined a interceptor globally but you do not have any other in the controller or handler level. If nothing is defined in some level, the request will continue to the next component.

+
+
+

As you can see in the previous image, the first component which receive the request is the global defined middleware. Then, it send the request to the module middleware. Each of them can return a response to the client, without passing the request to the next level.

+
+
+

Then, the request continue to the guards: first the global guard, next to controller guard and finally to the handler guard. At this point, we can throw an exception in all components and the exception filter will catch it and send a proper error message to the client. We do not paint the filters in the graphic in order to simplify it.

+
+
+

After the guards, is time to interceptors: global interceptors, controller interceptors and handler interceptors. And last, before arrive to the handler inside the controller, the request pass through the pipes.

+
+
+

When the handler has the response ready to send to the client, it does not go directly to the client. It come again to the interceptors, so we can also intercept the response. The order this time is the reverse: handler interceptors, controller interceptors and global interceptors. After that, we can finally send the response to the client.

+
+
+

Now, with this in mind, you are able to create the components in a better way.

+
+
+
+
+
+

Layers

+
+ +
+

Controller Layer

+
+

The controller layer is responsible for handling the requests/responses to the client. This layer knows everything about the endpoints exposed, the expected input (and also validate it), the response schema, the HTTP codes for the response and the HTTP errors that every endpoint can send.

+
+
+
+

How to implement the controller layer

+
+

This layer is implemented by the NestJS controllers. Let’s see how it works with an example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    try {
+      return await this.coffeeService.searchCoffees(search);
+    } catch (error) {
+      throw new BadRequestException(error.message, error);
+    }
+  }
+}
+
+
+
+

As you can see in the example, to create a controller you only need to decorate a class with the Controller decorator. This example is handling all request to coffee/coffees.

+
+
+

Also, you have defined one handler. This handler is listening to POST request for the route coffee/coffees/search. In addition, this handler is waiting for a CoffeeSearch object and returns an array of Coffee. In order to keep it simple, that’s all that you need in order to define one route.

+
+
+

One important thing that can be observed in this example is that there is no business logic. It delegates to the service layer and return the response to the client. At this point, transformations from the value that you receive from the service layer to the desired return type are also allowed.

+
+
+

By default, every POST handler return an HTTP 204 response with the returned value as body, but you can change it in a easy way by using decorators. As you can see in the example, the handler will return a HTTP 200 response (@HttpCode(200)).

+
+
+

Finally, if the service layer throws an error, this handler will catch it and return a HTTP 400 Bad Request response. The controller layer is the only one that knows about the answers to the client, therefore it is the only one that knows which error codes should be sent.

+
+
+
+

Validation

+
+

In order to do not propagate errors in the incoming payload, we need to validate all data in the controller layer. See the validation guide for more information.

+
+
+
+

Error handling

+
+

In the previous example, we catch all errors using the try/catch statement. This is not the usual implementation. In order to catch properly the errors you must use the exception filters. Example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  @UseFilters(CaffeExceptionFilter)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    return await this.coffeeService.searchCoffees(search);
+  }
+}
+
+
+ +
+
+

Service Layer

+
+

The logic layer is the heart of the application and contains the main business logic. It knows everything about the business logic, but it does not know about the response to the client and the HTTP errors. That’s why this layer is separated from the controller layer.

+
+
+
+

How to implement the service layer

+
+

This layer is implemented by services, a specific kind of providers. Let’s see one example:

+
+
+
+
@Injectable()
+export class CoffeeService {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  async searchCoffees(@InjectRepository(Coffee) coffeeRepository: Repository<Coffee>): Promise<Array<Coffee>> {
+    const coffees = this.coffeeRepository.find();
+
+    return doSomeBusinessLogic(coffees);
+  }
+}
+
+
+
+

This is the CoffeeService that we inject in the example of controller layer. As you can see, a service is a regular class with the Injectable decorator. Also, it inject as dependency the data access layer (in this specific case, the Repository<Coffee>).

+
+
+

The services expose methods in order to transform the input from the controllers by applying some business logic. They can also request data from the data access layer. And that’s all.

+
+ +
+
+

Data Access Layer

+
+

The data access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store but also about invoking external services.

+
+
+

This layer is implemented using providers. Those providers could be: services, repositories and others. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic.

+
+
+
+

Database

+
+

We strongly recommend TypeORM for database management in devon4node applications. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic. TypeORM supports the most commonly used relational databases, link Oracle, MySQL, MariaDB, PostgreSQL, SQLite, MSSQL and others. Also, it supports no-relational databases like MongoDB.

+
+
+

TypeORM supports Active Record and Repository patterns. We recommend to use the Repository pattern. This pattern allows you to separate the data objects from the methods to manipulate the database.

+
+
+
+

External APIs

+
+

In order to manage the data in a external API, you need to create a service for that purpose. In order to manage the connections with the external API, we strongly recommend the NestJS HTTP module

+
+
+
+
+
+

Guides

+
+ +
+

Key Principles

+
+

devon4node is built following some basic principles like:

+
+
+ +
+
+

But key principles that best define devon4node (and are inherited from NestJS) are:

+
+
+
    +
  • +

    Simplicity (aka KISS)

    +
  • +
  • +

    Reusability

    +
  • +
  • +

    Productivity

    +
  • +
+
+
+
+

Simplicity

+
+

In devon4node we tried to do everything as simple as possible. Following this principle we will be able to do easy to maintain applications.

+
+
+

For example, in order to expose all CRUD operations for an entity, you only need to create a controller like:

+
+
+
+
@Crud({
+  model: {
+    type: Employee,
+  },
+})
+@CrudType(Employee)
+@Controller('employee/employees')
+export class EmployeeCrudController {
+  constructor(public service: EmployeeCrudService) {}
+}
+
+
+
+

You can find this code in the employee example. Only with this code your exposing the full CRUD operations for the employee entity. As you can see, it’s an empty class with some decorators and the EmployeeCrudService injected as dependency. Simple, isn’t it? The EmployeeCrudService is also simple:

+
+
+
+
@Injectable()
+export class EmployeeCrudService extends TypeOrmCrudService<Employee> {
+  constructor(@InjectRepository(Employee) repo: Repository<Employee>) {
+    super(repo);
+  }
+}
+
+
+
+

Another empty class which extends from TypeOrmCrudService<Employee> and injects the Employee Repository as dependency. Nothing else.

+
+
+

With these examples you can get an idea of how simple it can be to code a devon4node application .

+
+
+
+

Reusability

+
+

NestJS (and devon4node) applications are designed in a modular way. This allows you to isolate some functionality in a module, and then reuse it in every application that you need. This is the same behaviour that Angular has. You can see it in the NestJS modules like TypeORM, Swagger and others. Also, in devon4node we have the Mailer module.

+
+
+

In your applications, you only need to import those modules and then you will be able to use the functionality that they implement. Example

+
+
+
+
@Module({
+  imports: [ AuthModule, ConfigurationModule ],
+})
+export class SomeModule {}
+
+
+
+
+

Productivity

+
+

devon4node is designed to create secure enterprise applications. But also, it allow you to do it in a fast way. To increase the productivity devon4node, devon4node provide schematics in order to generate some boilerplate code.

+
+
+

For example, to create a module you need to create a new file for a module (or copy it) and write the code, then you need to import it in the AppModule. This is a easy example, but you can introduce some errors: forget to import it in the AppModule, introduce errors with the copy/paste and so on. By using the command nest g module --name <module-name> it will do everything for you. Just a simple command. In this specific case probably you do not see any advantage, but there are other complex cases where you can generate more complex code with nest and devon4node schematics command.

+
+
+

See code generation in order to know how to increase your productivity creating devon4node applications.

+
+ +
+
+

Code Generation

+
+

As we mention in the page key principles, one of our key principles is Productivity. In order to provide that productivity, we have some tools to generate code. These tools will help you generate the common parts of the application so that you can focus only on the specific functionality.

+
+
+

Those tools are:

+
+ +
+
+

Nest CLI and Devon4node schematics

+
+

We are going to use the Nest CLI to generate code of our application, you can know more about NodeJs CLI in the official documentation.

+
+
+
+

Install devon4node schematics

+
+

First of all, you need to install Nest CLI

+
+
+

Execute the command yarn global add @nestjs/cli. +You can also use npm: npm install -g @nestjs/cli

+
+
+

And then Devon4node schematics globally with the following command:

+
+
+

yarn global add @devon4node/schematics or npm install -g @devon4node/schematics

+
+
+
+

==

+
+

If you get an error trying execute any devon4node schematic related to collection not found, try to reinstall devon4node/schematics on the project folder or be sure that schematics folder is inside @devon4node in node_modules. +yarn add @devon4node/schematics +== ==

+
+
+
+

Generate new devon4node application

+
+

To start creating a devon4node application, execute the command:

+
+
+

nest g -c @devon4node/schematics application [application-name]

+
+
+

If you do not put a name, the command line will ask you for one.

+
+
+
+

Generate code for TypeORM

+
+

Initialize TypeORM into your current project in a correct way.

+
+
+

nest g -c @devon4node/schematics typeorm

+
+
+

Then, you will be asked about which DB you want to use.

+
+
+

typeorm schematic

+
+
+
+

Generate CRUD

+
+

Generate CRUD methods for a entity. Requires TypeORM installed in the project.

+
+
+

It will add the @nestjsx/crud module as a project dependency. Then, generates an entity, a CRUD controller and a CRUD service. It also register the entity, controller and service in the module.

+
+
+

Execute nest g -c @devon4node/schematics crud and then you will need to write a name for the crud.

+
+
+
+crud schematic +
+
+
+
+

Generate TypeORM entity

+
+

Add a TypeORM entity to your project. Requires TypeORM installed in the project.

+
+
+

Execute nest g -c @devon4node/schematics entity and you will be asked for an entity name.

+
+
+
+

Add config-module

+
+

Add the config module to the project.

+
+
+

It will add the @devon4node/common module as a project dependency. Then, it will generate the configuration module into your project and add it in the core module. Also, it generates the config files for the most common environments.

+
+
+

The command to execute will be nest g -c @devon4node/schematics config-module

+
+
+
+

Add mailer module

+
+

Add @devon4node/mailer module to project.

+
+
+

It will add the @devon4node/mailer module as a project dependency. Also, it will add it to the core module and it will generate some email template examples.

+
+
+

Write the command nest g -c @devon4node/schematics mailer

+
+
+
+

Add swagger module

+
+

Add swagger module to project.

+
+
+

It will add the @nestjs/swagger module as a project dependency. Also, it will update the main.ts file in order to expose the endpoint for swagger. The default endpoint is: /v1/api

+
+
+

Execute the command nest g -c @devon4node/schematics swagger

+
+
+
+

Add auth-jwt module

+
+

Add the auth JWT module to the project.

+
+
+

It will add to your project the auth-jwt and user module. Also, it will import those modules into the core module.

+
+
+

Execute nest g -c @devon4node/schematics auth-jwt

+
+
+
+

Add security

+
+

Add cors and helmet to your project.

+
+
+

It will add helmet package as project dependency and update the main.ts file in order to enable the cors and helmet in your application.

+
+
+

Execute nest g -c @devon4node/schematics security

+
+
+
+

Generate database migrations

+
+
    +
  1. +

    Generate database migrations

    +
    +
      +
    1. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node or npm i -g ts-node

      +
    2. +
    3. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +insert data +
      +
      +
      +

      It will connect to the database, read all entities and then it will generate a migration file with all sql queries need to transform the current status of the database to the status defined by the entities. If the database is empty, it will generate all sql queries need to create all tables defined in the entities. You can find a example in the todo example

      +
      +
    4. +
    +
    +
  2. +
+
+
+

As TypeORM is the tool used for DB. You can check official documentation for more information. +See TypeORM CLI documentation.

+
+
+
+

CobiGen

+
+

Currently, we do not have templates to generate devon4node code (we have planned to do that in the future). Instead, we have templates that read the code of a devon4node application and generate a devon4ng application. Visit the CobiGen page for more information.

+
+ +
+
+

Coding Conventions

+
+

devon4node defines some coding conventions in order to improve the readability, reduce the merge conflicts and be able to develop applications in an industrialized way.

+
+
+

In order to ensure that you are following the devon4node coding conventions, you can use the following tools:

+
+
+
    +
  • +

    ESLint: ESLint ESLint is a tool for identifying and reporting on patterns found in ECMAScript/JavaScript code, with the goal of making code more consistent and avoiding bugs. We recommend to use the ESLint VSCode extension (included in the devonfw Platform Extension Pack) in order to be able to see the linting errors while you are developing.

    +
  • +
  • +

    Prettier: Prettier is a code formatter. We recommend to use the Prettier VSCode extension (included in the devonfw Platform Extension Pack) and enable the editor.formatOnSave option.

    +
  • +
  • +

    devon4node application schematic: this tool will generate code following the devon4node coding conventions. Also, when you generate a new project using the devon4node application schematic, it generates the configuration files for TSLint and Prettier that satisfy the devon4node coding conventions.

    +
  • +
+
+
+

When you combine all tools, you can be sure that you follow the devon4node coding conventions.

+
+
+
+

Detailed devon4node Coding Conventions

+
+

Here we will detail some of most important devon4node coding conventions. To be sure that you follows all devon4node coding conventions use the tools described before.

+
+
+
+

Indentation

+
+

All devon4node code files must be indented using spaces. The indentation with must be 2 spaces.

+
+
+
+

White space

+
+

In order to improve the readability of your code, you must introduce whitespaces. Example:

+
+
+
+
if(condition){
+
+
+
+

must be

+
+
+
+
if (condition) {
+
+
+
+
+

Naming conventions

+ +
+
+

== File naming

+
+

The file name must follow the pattern: (name in kebab case).(kind of component).(extension) +The test file name must follow the pattern: (name in kebab case).(kind of component).spec.(extension)

+
+
+

Example:

+
+
+
+
auth-jwt.service.ts
+auth-jwt.service.spec.ts
+
+
+
+
+

== Interface naming

+
+

The interface names must be in pascal case, and must start with I. There is some controversy in starting the interface names with an I, but we decided to do it because is most of cases you will have an interface and a class with the same name, so, to differentiate them, we decided to start the interfaces with I. Other devonfw stacks solves it by adding the suffix Impl in the class implementations.

+
+
+

Example:

+
+
+
+
interface ICoffee {}
+
+
+
+
+

== Class naming

+
+

The class names must be in pascal case.

+
+
+

Example:

+
+
+
+
class Coffee {}
+
+
+
+
+

== Variable naming

+
+

All variable names must be in camel case.

+
+
+
+
const coffeeList: Coffe[];
+
+
+
+
+

Declarations

+
+

For all variable declarations we must use const or let. var is forbidden. We prefer to use const when possible.

+
+
+
+

Programming practices

+ +
+
+

== Trailing comma

+
+

All statements must end with a trailing comma. Example:

+
+
+
+
{
+  one: 'one',
+  two: 'two'  // bad
+}
+{
+  one: 'one',
+  two: 'two', // good
+}
+
+
+
+
+

== Arrow functions

+
+

All anonymous functions must be defined with the arrow function notation. In most of cases it’s not a problem, but sometimes, when you do not want to bind this when you define the function, you can use the other function definition. In this special cases you must disable the linter for those sentence.

+
+
+
+

== Comments

+
+

Comments must start with a whitespace. Example:

+
+
+
+
//This is a bad comment
+// This is OK
+
+
+
+
+

== Quotemarks

+
+

For string definitions, we must use single quotes.

+
+
+
+

== if statements

+
+

In all if statements you always must use brackets. Example:

+
+
+
+
// Bad if statement
+if (condition)
+  return true;
+
+// Good if statement
+if (condition) {
+  return true;
+}
+
+
+
+
+

Pre-commit hooks

+
+

In order to ensure that your new code follows the coding conventions, devon4node uses by default husky. Husky is a tool that allows you to configure git hooks easily in your project. When you make a git commit in your devon4node project, it will execute two actions:

+
+
+
    +
  • +

    Prettify the staged files

    +
  • +
  • +

    Execute the linter in the staged files

    +
  • +
+
+
+

If any action fails, you won’t be able to commit your new changes.

+
+
+ + + + + +
+ + +If you want to skip the git hooks, you can do a commit passing the --no-verify flag. +
+
+ +
+
+

Dependency Injection

+
+

The dependency injection is a well-known common design pattern applied by frameworks in all languages, like Spring in Java, Angular and others. The intention of this page is not to explain how dependency injection works, but instead how it is addressed by NestJS.

+
+
+

NestJS resolve the dependency injection in their modules. When you define a provider in a module, it can be injected in all components of the module. By default, those providers are only available in the module where it is defined. The only way to export a module provider to other modules which import it is adding those provider to the export array. You can also reexport modules.

+
+
+
+

Inject dependencies in NestJS

+
+

In order to inject a dependency in a NestJS component, you need to declare it in the component constructor. Example:

+
+
+
+
export class CoffeeController {
+  constructor(public readonly conffeeService: CoffeeService) {}
+}
+
+
+
+

NestJS can resolve all dependencies that are defined in the module as provider, and also the dependencies exported by the modules imported. Example:

+
+
+
+
@Module({
+  controllers: [CoffeeController],
+  providers: [CoffeeService],
+})
+export class CoffeeModule {}
+
+
+
+

Inject dependencies in the constructor is the is the preferred choice, but, sometimes it is not possible. For example, when you are extending another class and want to keep the constructor definition. In this specific cases we can inject dependencies in the class properties. Example:

+
+
+
+
export class CoffeeController {
+  @Inject(CoffeeService)
+  private readonly conffeeService: CoffeeService;
+}
+
+
+
+
+

Dependency Graph

+
+
+dependency injection1 +
+
+
+

In the previous image, the Module A can inject dependencies exported by Module B, Module E and Module F. If module B reexport Module C and Module D, they are also accessible by Module A.

+
+
+

If there is a conflict with the injection token, it resolves the provider with less distance with the module. For example: if the modules C and F exports a UserService provider, the Module A will resolve the UserService exported by the Module F, because the distance from Module A to Module F is 1, and the distance from Module A to Module C is 2.

+
+
+

When you define a module as global, the dependency injection system is the same. The only difference is now all modules as a link to the global module. For example, if we make the Module C as global the dependency graph will be:

+
+
+
+dependency injection2 +
+
+
+
+

Custom providers

+
+

When you want to change the provider name, you can use a NestJS feature called custom providers. For example, if you want to define a provider called MockUserService with the provider token UserService you can define it like:

+
+
+
+
@Module({
+  providers: [{
+    provide: UserService,
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

With this, when you inject want to inject UserService as dependency, the MockUserService will be injected.

+
+
+

Custom provider token can be also a string:

+
+
+
+
@Module({
+  providers: [{
+    provide: 'USER_SERVICE',
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

but now, when you want to inject it as dependency you need to use the @Inject decorator.

+
+
+
+
constructor(@Inject('USER_SERVICE') userService: any) {}
+
+
+ +
+
+

Configuration Module

+
+

devon4node provides a way to generate a configuration module inside your application. To generate it you only need to execute the command nest g -c @devon4node/schematics config-module. This command will generate inside your application:

+
+
+
    +
  • +

    Configuration module inside the core module.

    +
  • +
  • +

    config folder where all environment configuration are stored.

    +
    +
      +
    • +

      default configuration: configuration for your local development environment.

      +
    • +
    • +

      develop environment configuration for the develop environment.

      +
    • +
    • +

      uat environment configuration for the uat environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      test environment configuration used by test.

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +some code generators will add some properties to this module, so, be sure that the config module is the first module that you generate in your application. +
+
+
+
+

Use the configuration service

+
+

To use the configuration service, you only need to inject it as dependency. As configuration module is defined in the core module, it will be available everywhere in your application. Example:

+
+
+
+
export class MyProvider {
+  constructor(public readonly configService: ConfigurationService) {}
+
+  myMethod() {
+    return this.confiService.isDev;
+  }
+}
+
+
+
+
+

Choose an environment file

+
+

By default, when you use the configuration service it will take the properties defined in the default.ts file. If you want to change the configuration file, you only need to set the NODE_ENV environment property with the name of the desired environment. Examples: in windows execute set NODE_ENV=develop before executing the application, in linux execute NODE_ENV=develop before executing the application or NODE_ENV=develop yarn start.

+
+
+
+

Override configuration properties

+
+

Sometimes, you want to keep some configuration property secure, and you do not want to publish it to the repository, or you want to reuse some configuration file but you need to change some properties. For those scenarios, you can override configuration properties by defining a environment variable with the same name. For example, if you want to override the property host, you can do: set host="newhost". It also works with objects. For example, if you want to change the value of secret in the property jwtConfig for this example, you can set a environment variable like this: set jwtConfig="{"secret": "newsecret"}". As you can see, this environment variable has a JSON value. It will take object and merge the jwtConfig property with the properties defined inside the environment variable. It other properties maintain their value. The behaviour is the same for the nested objects.

+
+
+
+

Add a configuration property

+
+

In order to add a new property to the configuration module, you need to follow some steps:

+
+
+
    +
  • +

    Add the property to IConfig interface in src/app/core/configuration/types.ts file. With this, we can ensure that the ConfigurationService and the environment files has those property at compiling time.

    +
  • +
  • +

    Add the new property getter to ConfigurationService. You must use the get method of ConfigurationService to ensure that the property will be loaded from the desired config file. You can also add extra logic if needed.

    +
  • +
  • +

    Add the property to all config files inside the src/config folder.

    +
  • +
+
+
+

Example:

+
+
+

We want to add the property devonfwUrl to our ConfigurationService, so:

+
+
+

We add the following code in IConfig interface:

+
+
+
+
devonfwUrl: string;
+
+
+
+

Then, we add the getter in the ConfigurationService:

+
+
+
+
get devonfwUrl(): string {
+  return this.get('devonfwUrl')!;
+}
+
+
+
+

Finally, we add the definition in all config files:

+
+
+
+
devonfwUrl: 'https://devonfw.com',
+
+
+ +
+
+

Auth JWT module

+
+

devon4node provides a way to generate a default authentication module using JWT (JSON Web Token). It uses the @nestjs/passport library describe here.

+
+
+

To generate the devon4node auth-jwt module you only need to execute the command: nest generate -c @devon4node/schematics auth-jwt. We generate this module inside the applications instead of distributing a npm package because this module is prone to be modified depending on the requirements. It also generate a basic user module.

+
+
+

In this page we will explain the default implementation provided by devon4node. For more information about authentication, JWT, passport and other you can see:

+
+
+ +
+
+
+

Auth JWT endpoints

+
+

In order to execute authentication operations, the auth-jwt module exposes the following endpoints:

+
+
+
    +
  • +

    POST /auth/login: receive an username and a password and return the token in the header if the combination of username and password is correct.

    +
  • +
  • +

    POST /auth/register: register a new user.

    +
  • +
  • +

    GET /auth/currentuser: return the user data if he is authenticated.

    +
  • +
+
+
+
+

Protect endpoints with auth-jwt

+
+

In order to protect your endpoints with auth-jwt module you only need to add the AuthGuard() in the UseGuards decorator. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+
+

Now, all request to currentuser are protected by the AuthGuard.

+
+
+
+

Role based Access Control

+
+

The auth-jwt module provides also a way to control the access to some endpoints by using roles. For example, if you want to grant access to a endpoint only to admins, you only need to add the Roles decorator to those endpoints with the roles allowed. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+@Roles(roles.ADMIN)
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+ +
+
+

Swagger

+
+

We can use swagger (OpenAPI) in order to describe the endpoints that our application exposes.

+
+
+

NestJS provides a module which will read the code of our application and will expose one endpoint where we can see the swagger.

+
+
+

Add swagger to a devon4node application is simple, you only need to execute the command nest g -c @devon4node/schematics swagger and it will do everything for you. The next time that you start your application, you will be able to see the swagger at /v1/api endpoint.

+
+
+

The swagger module can read your code in order to create the swagger definition, but sometimes you need to help him by decorating your handlers.

+
+
+

For more information about decorators and other behaviour about swagger module, you can see the NestJS swagger documentation page

+
+
+ + + + + +
+ + +the OpenAPI specification that this module supports is v2.0. The OpenAPI v3.0 is not available yet by using this module. +
+
+ +
+
+

TypeORM

+
+

TypeORM is the default ORM provided by devon4node. It supports MySQL, MariaDB, Postgres, CockroachDB, SQLite, Microsoft SQL Server, Oracle, sql.js relational database and also supports MongoDB NoSQL database.

+
+
+

Add TypeORM support to a devon4node application is very easy: you only need to execute the command nest g -c @devon4node/schematics typeorm and it will add all required dependencies to the project and also imports the @nestjs/typeorm module.

+
+
+

For more information about TypeORM and the integration with NestJS you can visit TypeORM webpage, TypeORM GitHub repository and NestJS TypeORM documentation page

+
+
+
+

Configuration

+
+

When you have the configuration module, the TypeORM generator will add one property in order to be able to configure the database depending on the environment. Example:

+
+
+
+
database: {
+  type: 'sqlite',
+  database: ':memory:',
+  synchronize: false,
+  migrationsRun: true,
+  logging: true,
+  entities: ['dist/**/*.entity.js'],
+  migrations: ['dist/migration/**/*.js'],
+  subscribers: ['dist/subscriber/**/*.js'],
+  cli: {
+    entitiesDir: 'src/entity',
+    migrationsDir: 'src/migration',
+    subscribersDir: 'src/subscriber',
+  },
+},
+
+
+
+

This object is a TypeORM ConnectionOptions. For fore information about it visit the TypeORM Connection Options page.

+
+
+

There is also a special case: the default configuration. As the devon4node CLI need the database configuration when you use the devon4node db command, we also provide the ormconfig.json file. In this file you must put the configuration for you local environment. In order to do not have duplicated the configuration for local environment, in the default config file the database property is set-up like:

+
+
+
+
database: require('../../ormconfig.json'),
+
+
+
+

So, you only need to maintain the ormconfig.json file for the local environment.

+
+
+
+

Entity

+
+

Entity is a class that maps to a database table. The devon4node schematics has a generator to create new entities. You only need to execute the command nest g -c @devon4node/schematics entity <entity-name> and it generate the entity.

+
+
+

In the entity, you must define all columns, relations, primary keys of your database table. By default, devon4node provides a class named BaseEntity. All entities created with the devon4node schematics will extends the BaseEntity. This entity provides you some common columns:

+
+
+
    +
  • +

    id: the primary key of you table

    +
  • +
  • +

    version: the version of the entry (used for auditing purposes)

    +
  • +
  • +

    createdAt: creation date of the entry (used for auditing purposes)

    +
  • +
  • +

    updatedAt: last update date of the entry (used for auditing purposes)

    +
  • +
+
+
+

For more information about Entities, please visit the TypeORM entities page

+
+
+
+

Repository

+
+

With repositories, you can manage (insert, update, delete, load, etc.) a concrete entity. Using this pattern, we have separated the data (Entities) from the methods to manage it (Repositories).

+
+
+

To use a repository you only need to:

+
+
+
    +
  • +

    Import it in the module as follows:

    +
    +
    +
    @Module({
    +  imports: [TypeOrmModule.forFeature([Employee])],
    +})
    +
    +
    +
    + + + + + +
    + + +if you generate the entities with the devon4node schematic, this step is not necessary, devon4node schematic will do it for you. +
    +
    +
  • +
  • +

    Inject the repository as dependency in your service:

    +
    +
    +
    constructor(@InjectRepository(Employee) employeeRepository: Repository<Employee>) {}
    +
    +
    +
  • +
+
+
+

You can see more details in the NestJS database and NestJS TypeORM documentation pages.

+
+ +
+
+

Serializer

+
+

Serialization is the process of translating data structures or object state into a format that can be transmitted across network and reconstructed later.

+
+
+

NestJS by default serialize all data to JSON (JSON.stringify). Sometimes this is not enough. In some situations you need to exclude some property (e.g password). Instead doing it manually, devon4node provides an interceptor (ClassSerializerInterceptor) that will do it for you. You only need to return a class instance as always and the interceptor will transform those class to the expected data.

+
+
+

The ClassSerializerInterceptor takes the class-transformer decorators in order to know how to transform the class and then send the result to the client.

+
+
+

Some of class-transformer decorators are:

+
+
+
    +
  • +

    Expose

    +
  • +
  • +

    Exclude

    +
  • +
  • +

    Type

    +
  • +
  • +

    Transform

    +
  • +
+
+
+

And methods to transform data:

+
+
+
    +
  • +

    plainToClass

    +
  • +
  • +

    plainToClassFromExist

    +
  • +
  • +

    classToPlain

    +
  • +
  • +

    classToClass

    +
  • +
  • +

    serialize

    +
  • +
  • +

    deserialize

    +
  • +
  • +

    deserializeArray

    +
  • +
+
+
+

See the class-transformer page for more information.

+
+
+

See NestJS serialization page for more information about ClassSerializerInterceptor.

+
+ +
+
+

Validation

+
+

To be sure that your application will works well, you must validate any input data. devon4node by default provides a ValidationPipe. This ValidationPipe is the responsible of validate the request input and, if the input do not pass the validation process, it returns a 400 Bad Request error.

+
+
+
+

Defining Validators

+
+

The ValidationPipe needs to know how to validate the input. For that purpose we use the class-validator package. This package allows you to define the validation of a class by using decorators.

+
+
+

For example:

+
+
+
+
export class Coffee {
+  @IsDefined()
+  @IsString()
+  @MaxLength(255)
+  name: string;
+
+  @IsDefined()
+  @IsString()
+  @MaxLength(25)
+  type: string;
+
+  @IsDefined()
+  @IsNumber()
+  quantity: number;
+}
+
+
+
+

As you can see in the previous example, we used some decorators in order to define the validators for every property of the Coffee class. You can find all decorators in the class-validator github repository.

+
+
+

Now, when you want to receive a Coffee as input in some endpoint, it will execute the validations before executing the handler function.

+
+
+ + + + + +
+ + +In order to be able to use the class-validator package, you must use classes instead of interfaces. As you know interfaces disappear at compiling time, and class-validator need to know the metadata of the properties in order to be able to validate. +
+
+
+ + + + + +
+ + +The ValidationPipe only works if you put a specific type in the handler definition. For example, if you define a handler like getCoffee(@Body() coffee: any): Coffee {} the ValidationPipe will not do anything. You must specify the type of the input: getCoffee(@Body() coffee: Coffee): Coffee {} +
+
+ +
+
+

Logger

+
+

When you create a new devon4node application, it already has a logger: src/app/shared/logger/winston.logger.ts. This logger provide the methods log, error and warn. All of those methods will write a log message, but with a different log level.

+
+
+

The winston logger has two transports: one to log everything inside the file logs/general.log and the other to log only the error logs inside the file logs/error.log. In addition, it uses the default NestJS logger in order to show the logs in the console.

+
+
+

As you can see it is a simple example about how to use logger in a devon4node application. It will be update to a complex one in the next versions.

+
+
+
+

How to use logger

+
+

In order to use the logger you only need to inject the logger as a dependency:

+
+
+
+
constructor(logger: WinstonLogger){}
+
+
+
+

and then use it

+
+
+
+
async getAll() {
+  this.service.getAll();
+  this.logger.log('Returning all data');
+}
+
+
+ +
+
+

Mailer Module

+
+

This module enables you to send emails in devon4node. It also provides a template engine using Handlebars.

+
+
+

It is a NestJS module that inject into your application a MailerService, which is the responsible to send the emails using the nodemailer library.

+
+
+
+

Installing

+
+

Execute the following command in a devon4node project:

+
+
+
+
yarn add @devon4node/mailer
+
+
+
+
+

Configuring

+
+

To configure the mailer module, you only need to import it in your application into another module. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot(),
+  ],
+  ...
+})
+
+
+
+

Your must pass the configuration using the forRoot or forRootAsync methods.

+
+
+
+

forRoot()

+
+

The forRoot method receives an MailerModuleOptions object as parameter. It configures the MailerModule using the input MailerModuleOptions object.

+
+
+

The structure of MailerModuleOptions is:

+
+
+
+
{
+  hbsOptions?: {
+    templatesDir: string;
+    extension?: string;
+    partialsDir?: string;
+    helpers?: IHelperFunction[];
+    compilerOptions?: ICompileOptions;
+  },
+  mailOptions?: nodemailerSmtpTransportOptions;
+  emailFrom: string;
+}
+
+
+
+

Here, you need to specify the Handlebars compile options, the nodemailer transport options and the email address which will send the emails. +Then, you need to call to forRoot function in the module imports. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot({
+      mailOptions: {
+        host: 'localhost',
+        port: 1025,
+        secure: false,
+        tls: {
+          rejectUnauthorized: false,
+        },
+      },
+      emailFrom: 'noreply@capgemini.com',
+      hbsOptions: {
+        templatesDir: join(__dirname, '../..', 'templates/views'),
+        partialsDir: join(__dirname, '../..', 'templates/partials'),
+        helpers: [{
+          name: 'fullname',
+          func: person => `${person.name} ${person.surname}`,s
+        }],
+      },
+    }),
+  ...
+})
+
+
+
+
+

forRootAsync()

+
+

The method forRootAsync enables you to get the mailer configuration in a asynchronous way. It is useful when you need to get the configuration using, for example, a service (e.g. ConfigurationService).

+
+
+

Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRootAsync({
+      imports: [ConfigurationModule],
+      useFactory: (config: ConfigurationService) => {
+        return config.mailerConfig;
+      },
+      inject: [ConfigurationService],
+    }),
+  ...
+})
+
+
+
+

In this example, we use the ConfigurationService in order to get the MailerModuleOptions (the same as forRoot)

+
+
+
+

Usage

+
+

In order to use, you only need to inject using the dependency injection the MailerService.

+
+
+

Example:

+
+
+
+
@Injectable()
+export class CatsService {
+  constructor(private readonly mailer: MailerService) {}
+}
+
+
+
+

Then, you only need to use the methods provided by the MailerService in your service. Take into account that you can inject it in every place that support NestJS dependency injection.

+
+
+
+

MailerService methods

+ +
+
+

== sendPlainMail

+
+

The method sendPlainMail receive a string sends a email.

+
+
+

The method signatures are:

+
+
+
+
sendPlainMail(emailOptions: SendMailOptions): Promise<SentMessageInfo>;
+sendPlainMail(to: string, subject: string, mail: string): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendPlainMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+});
+this.mailer.sendPlainMail('example@example.com', 'This is a subject', '<h1>Hello world</h1>');
+
+
+
+
+

== sendTemplateMail

+
+

The method sendTemplateMail sends a email based on a Handlebars template. The templates are registered using the templatesDir option or using the addTemplate method. +The template name is the name of the template (without extension) or the first parameter of the method addTemplate.

+
+
+

The method signatures are:

+
+
+
+
sendTemplateMail(emailOptions: SendMailOptions, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+sendTemplateMail(to: string, subject: string, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendTemplateMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+}, 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+this.mailer.sendTemplateMail('example@example.com', 'This is a subject', 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+
+
+
+
+

== addTemplate

+
+

Adds a new template to the MailerService.

+
+
+

Method signature:

+
+
+
+
addTemplate(name: string, template: string, options?: CompileOptions): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.addTemplate('newTemplate', '<html><head></head><body>{{>partial1}}</body></html>')
+
+
+
+
+

== registerPartial

+
+

Register a new partial in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerPartial(name: string, partial: Handlebars.Template<any>): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerPartial('partial', '<h1>Hello World</h1>')
+
+
+
+
+

== registerHelper

+
+

Register a new helper in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerHelper(name: string, helper: Handlebars.HelperDelegate): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerHelper('fullname', person => `${person.name} ${person.surname}`)
+
+
+
+
+

Handlebars templates

+
+

As mentioned above, this module allow you to use Handlebars as template engine, but it is optional. If you do not need the Handlebars, you just need to keep the hbsOptions undefined.

+
+
+

In order to get the templates form the file system, you can specify the template folder, the partials folder and the helpers. +At the moment of module initialization, it will read the content of the template folder, and will register every file with the name (without extension) and the content as Handlebars template. It will do the same for the partials.

+
+
+

You can specify the extension of template files using the extension parameter. The default value is .handlebars

+
+
+
+

Local development

+
+

If you want to work with this module but you don’t have a SMTP server, you can use the streamTransport. Example:

+
+
+
+
{
+  mailOptions: {
+    streamTransport: true,
+    newline: 'windows',
+  },
+  emailFrom: ...
+  hbsOptions: ...
+}
+
+
+
+

Then, you need to get the sendPlainMail or sendTemplateMail result, and print the email to the standard output (STDOUT). Example:

+
+
+
+
const mail = await this.mailer.sendTemplateMail(...);
+
+mail.message.pipe(process.stdout);
+
+
+ +
+
+

Importing your ESLint reports into SonarQube

+
+

This guide covers the import of ESLint reports into SonarQube instances in CI environments, as this is the recommended way of using ESLint and SonarQube for devon4node projects. The prerequisites for this process are a CI environment, preferably a Production Line instance, and the ESLint CLI, which is already included when generating a new devon4node project.

+
+
+
+

Configuring the ESLint analysis

+
+

You can configure the ESLint analysis parameters in the .eslintrc.js file inside the top-level directory of your project. If you created your node project using the devon4node application schematic, this file will already exist. If you want to make further adjustments to it, have a look at the ESLint documentation.

+
+
+

The ESLint analysis script lint is already configured in the scripts part of your package.json. Simply add -f json > report.json, so that the output of the analysis is saved in a .json file. Additional information to customization options for the ESLint CLI can be found here.

+
+
+

To run the analysis, execute the script with npm run lint inside the base directory of your project.

+
+
+
+

Configuring SonarQube

+
+

If you haven’t already generated your CICD-related files, follow the tutorial on the devon4node schematic of our CICDGEN project, as you will need a Jenkinsfile configured in your project to proceed.

+
+
+

Inside the script for the SonarQube code analysis in your Jenkinsfile, add the parameter -Dsonar.eslint.reportPaths=report.json. Now, whenever a SonarQube analysis is triggered by your CI environment, the generated report will be loaded into your SonarQube instance. +To avoid duplicated issues, you can associate an empty TypeScript quality profile with your project in its server configurations.

+
+
+
+
+
+

devon4node applications

+
+ +
+

devon4node Samples

+
+

In the folder /samples, you can find some devon4node examples that could be useful for you in order to understand better the framework.

+
+
+

The samples are:

+
+
+ +
+
+

Also, we have another realistic example in the My Thai Star repository. This example is the implementation of My Thai Star backend, which is compatible with the frontend made with Angular. To do that, this node implementation exposes the same API as Java backend. Take care with this example, as we need to follow the Java API, some components do not follow the devon4node patterns and code conventions.

+
+
+
+

Todo example

+
+

This example is the backend part of an TO-DO application. It exposes and API where you can create, read, update and delete a TO-DO list.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:3000/v1/todo/todos. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:3000/v1/api.

+
+
+

Also, in this example we show you how to control the access to you application by implementing an authentication mechanism using JWT and rol based strategy. In order to access to the list of todos (http://localhost:3000/v1/todo/todos), first you need to call to POST http://localhost:3000/v1/auth/login and in the body you need to send the user information:

+
+
+
+
{
+  "username": "user",
+  "password": "password"
+}
+
+
+
+

It will return a JWT token for the user user. The rol of this user is USER, so you can only access to the methods GET, POST and DELETE of the endpoint http://localhost:3000/v1/todo/todos. If you login with the user admin/admin, you will be able to access to the methods UPDATE and PATCH.

+
+
+
+

Employee example

+
+

This is an example of employee management application. With the application you can create, read, update and delete employees.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:8081/v1/employee/employees. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:8081/v1/api.

+
+
+

This is a simple example without authentication. With this example you can learn how to work with database migrations. You can find them in the folder /src/migrations. The TypeORM is configured in order to execute the migrations every time that you start this application at ormconfig.json with the following flag:

+
+
+
+
"migrationsRun": true
+
+
+
+

You can also execute the migration manually by typing the command devon4node db migration:run, or revert executing devon4node db migration:revert. Take into account that the database that this application is using is an in-memory sqlite, so every time that you stop the application all data is lost.

+
+
+
+

Components example

+
+

This example allow you to understand better the execution order of the components of a devon4node application (guards, pipes, interceptors, filters, middleware).

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

In order to see the execution order, you can call to http://localhost:3000/v1. It will show you the execution order of all components except the filters. If you want to know the execution order while a filter is applied, call to the endpoint with the following queries: ?hello=error, ?hello=controller, ?hello=global.

+
+ +
+
+

Create the employee sample step by step

+ +
+
+

Application requisites

+
+

The employee application needs:

+
+
+
    +
  • +

    A configuration module

    +
  • +
  • +

    A SQLite in memory database

    +
  • +
  • +

    Security: CORS

    +
  • +
  • +

    Swagger support

    +
  • +
  • +

    Authentication using JWT

    +
  • +
  • +

    CRUD for manage employees. The employees will have the following properties:

    +
    +
      +
    • +

      name

      +
    • +
    • +

      surname

      +
    • +
    • +

      email

      +
    • +
    +
    +
  • +
+
+
+
+

Create the application

+
+
    +
  1. +

    Install Nest CLI

    +
    +

    Execute the command yarn global add @nestjs/cli

    +
    +
  2. +
  3. +

    Install devon4node schematics

    +
  4. +
  5. +

    Execute the command yarn global add @devon4node/schematics

    +
  6. +
  7. +

    Create the new application

    +
    +

    Execute the command nest g -c @devon4node/schematics application employee

    +
    +
  8. +
  9. +

    Then, we need to add some components, go inside the project folder and execute the following commands:

    +
    +

    Go inside project folder: cd employee.

    +
    +
    +

    Config module: nest g -c @devon4node/schematics config-module.

    +
    +
    +

    TypeORM database, choose sqlite DB when asked nest g -c @devon4node/schematics typeorm.

    +
    +
    +

    Add security: nest g -c @devon4node/schematics security.

    +
    +
    +

    Swagger module: nest g -c @devon4node/schematics swagger.

    +
    +
    +

    Auth-jwt authentication: nest g -c @devon4node/schematics auth-jwt.

    +
    +
    +

    Add an application module: nest g -c @devon4node/schematics module employee.

    +
    +
    +

    Add CRUD component: nest g -c @devon4node/schematics crud employee/employee.

    +
    +
    +

    With this, you will generate the following files:

    +
    +
    +
    +
    /employee/.prettierrc
    +/employee/nest-cli.json
    +/employee/package.json
    +/employee/README.md
    +/employee/tsconfig.build.json
    +/employee/tsconfig.json
    +/employee/tslint.json
    +/employee/src/main.ts
    +/employee/test/app.e2e-spec.ts
    +/employee/test/jest-e2e.json
    +/employee/src/app/app.controller.spec.ts
    +/employee/src/app/app.controller.ts
    +/employee/src/app/app.module.ts
    +/employee/src/app/app.service.ts
    +/employee/src/app/core/core.module.ts
    +/employee/src/app/shared/logger/winston.logger.ts
    +/employee/src/app/core/configuration/configuration.module.ts
    +/employee/src/app/core/configuration/model/index.ts
    +/employee/src/app/core/configuration/model/types.ts
    +/employee/src/app/core/configuration/services/configuration.service.spec.ts
    +/employee/src/app/core/configuration/services/configuration.service.ts
    +/employee/src/app/core/configuration/services/index.ts
    +/employee/src/config/default.ts
    +/employee/src/config/develop.ts
    +/employee/src/config/production.ts
    +/employee/src/config/test.ts
    +/employee/src/config/uat.ts
    +/employee/docker-compose.yml
    +/employee/ormconfig.json
    +/employee/src/app/shared/model/entities/base-entity.entity.ts
    +/employee/src/app/core/auth/auth.module.ts
    +/employee/src/app/core/auth/controllers/auth.controller.spec.ts
    +/employee/src/app/core/auth/controllers/auth.controller.ts
    +/employee/src/app/core/auth/controllers/index.ts
    +/employee/src/app/core/auth/decorators/index.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.spec.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.ts
    +/employee/src/app/core/auth/guards/index.ts
    +/employee/src/app/core/auth/guards/roles.guard.spec.ts
    +/employee/src/app/core/auth/guards/roles.guard.ts
    +/employee/src/app/core/auth/model/index.ts
    +/employee/src/app/core/auth/model/roles.enum.ts
    +/employee/src/app/core/auth/model/user-request.interface.ts
    +/employee/src/app/core/auth/services/auth.service.spec.ts
    +/employee/src/app/core/auth/services/auth.service.ts
    +/employee/src/app/core/auth/services/index.ts
    +/employee/src/app/core/auth/strategies/index.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.spec.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.ts
    +/employee/src/app/core/user/user.module.ts
    +/employee/src/app/core/user/model/index.ts
    +/employee/src/app/core/user/model/dto/user-payload.dto.ts
    +/employee/src/app/core/user/model/entities/user.entity.ts
    +/employee/src/app/core/user/services/index.ts
    +/employee/src/app/core/user/services/user.service.spec.ts
    +/employee/src/app/core/user/services/user.service.ts
    +/employee/test/auth/auth.service.mock.ts
    +/employee/test/user/user.repository.mock.ts
    +/employee/src/app/employee/employee.module.ts
    +/employee/src/app/employee/model/entities/employee.entity.ts
    +/employee/src/app/employee/model/index.ts
    +/employee/src/app/employee/controllers/employee.crud.controller.ts
    +/employee/src/app/employee/services/employee.crud.service.ts
    +/employee/src/app/employee/services/index.ts
    +/employee/src/app/employee/controllers/index.ts
    +
    +
    +
  10. +
  11. +

    Open the VSCode

    +
    +

    Execute the commands:

    +
    +
    +
    +
    yarn install
    +code .
    +
    +
    +
  12. +
  13. +

    Fill in the entity: src/app/employee/model/entities/employee.entity.ts

    +
    +
      +
    1. +

      Add the columns

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    2. +
    3. +

      Add the validations

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    4. +
    5. +

      Add the transformations

      +
      +

      In this specific case, we will not transform any property, but you can see an example in the src/app/shared/model/entities/base-entity.entity.ts file.

      +
      +
      +
      +
      export abstract class BaseEntity {
      +  @PrimaryGeneratedColumn('increment')
      +  id!: number;
      +
      +  @VersionColumn({ default: 1 })
      +  @Exclude({ toPlainOnly: true })
      +  version!: number;
      +
      +  @CreateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  createdAt!: string;
      +
      +  @UpdateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  updatedAt!: string;
      +}
      +
      +
      +
    6. +
    7. +

      Add swagger metadata

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    8. +
    +
    +
  14. +
  15. +

    Add swagger metadata to src/app/employee/controllers/employee.crud.controller.ts

    +
    +
    +
    @ApiTags('employee')
    +
    +
    +
  16. +
  17. +

    Generate database migrations

    +
    +
      +
    1. +

      Build the application: yarn build

      +
    2. +
    3. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node

      +
    4. +
    5. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +generate migrations +
      +
      +
      +

      The output will be something similar to:

      +
      +
      +
      +
      export class CreateTables1572480273012 implements MigrationInterface {
      +  name = 'CreateTables1572480273012';
      +
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `CREATE TABLE "user" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "username" varchar(255) NOT NULL, "password" varchar(255) NOT NULL, "role" integer NOT NULL DEFAULT (0))`,
      +      undefined,
      +    );
      +    await queryRunner.query(
      +      `CREATE TABLE "employee" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "name" varchar(255), "surname" varchar(255), "email" varchar(255))`,
      +      undefined,
      +    );
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DROP TABLE "employee"`, undefined);
      +    await queryRunner.query(`DROP TABLE "user"`, undefined);
      +  }
      +}
      +
      +
      +
      +

      The number in the name is a timestamp, so may change in your application.

      +
      +
    6. +
    7. +

      Create a migration to insert data:`yarn run typeorm migration:generate -n InsertData`

      +
      +
      +insert data +
      +
      +
      +

      and fill in with the following code:

      +
      +
      +
      +
      export class InsertData1572480830290 implements MigrationInterface {
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(1, 'Santiago', 'Fowler', 'Santiago.Fowler@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(2, 'Clinton', 'Thornton', 'Clinton.Thornton@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(3, 'Lisa', 'Rodriquez', 'Lisa.Rodriquez@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(4, 'Calvin', 'Becker', 'Calvin.Becker@example.com');`,
      +    );
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      1,
      +      'user',
      +      await hash('password', await genSalt(12)),
      +      roles.USER,
      +    ]);
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      2,
      +      'admin',
      +      await hash('admin', await genSalt(12)),
      +      roles.ADMIN,
      +    ]);
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DELETE FROM EMPLOYEE`);
      +    await queryRunner.query(`DELETE FROM USER`);
      +  }
      +}
      +
      +
      +
    8. +
    +
    +
  18. +
  19. +

    Start the application: yarn start:dev

    +
    +
    +start app +
    +
    +
  20. +
  21. +

    Check the swagger endpoint: http://localhost:3000/v1/api

    +
    +
    +swagger +
    +
    +
  22. +
  23. +

    Make petitions to the employee CRUD: http://localhost:3000/v1/employee/employees

    +
    +
    +employees +
    +
    +
  24. +
  25. +

    Write the tests

    +
    +

    As we do not create any method, only add some properties to the entity, all application must be tested by the autogenerated code. As we add some modules, you need to uncomment some lines in the src/app/core/configuration/services/configuration.service.spec.ts:

    +
    +
    +
    +
    describe('ConfigurationService', () => {
    +  const configService: ConfigurationService = new ConfigurationService();
    +
    +  it('should return the values of test config file', () => {
    +    expect(configService.isDev).toStrictEqual(def.isDev);
    +    expect(configService.host).toStrictEqual(def.host);
    +    expect(configService.port).toStrictEqual(def.port);
    +    expect(configService.clientUrl).toStrictEqual(def.clientUrl);
    +    expect(configService.globalPrefix).toStrictEqual(def.globalPrefix);
    +    // Remove comments if you add those modules
    +    expect(configService.database).toStrictEqual(def.database);
    +    expect(configService.swaggerConfig).toStrictEqual(def.swaggerConfig);
    +    expect(configService.jwtConfig).toStrictEqual(def.jwtConfig);
    +    // expect(configService.mailerConfig).toStrictEqual(def.mailerConfig);
    +  });
    +  it('should take the value of environment varible if defined', () => {
    +    process.env.isDev = 'true';
    +    process.env.host = 'notlocalhost';
    +    process.env.port = '123456';
    +    process.env.clientUrl = 'http://theclienturl.net';
    +    process.env.globalPrefix = 'v2';
    +    process.env.swaggerConfig = JSON.stringify({
    +      swaggerTitle: 'Test Application',
    +    });
    +    process.env.database = JSON.stringify({
    +      type: 'oracle',
    +      cli: { entitiesDir: 'src/notentitiesdir' },
    +    });
    +    process.env.jwtConfig = JSON.stringify({ secret: 'NOTSECRET' });
    +    // process.env.mailerConfig = JSON.stringify({ mailOptions: { host: 'notlocalhost' }});
    +
    +    expect(configService.isDev).toBe(true);
    +    expect(configService.host).toBe('notlocalhost');
    +    expect(configService.port).toBe(123456);
    +    expect(configService.clientUrl).toBe('http://theclienturl.net');
    +    expect(configService.globalPrefix).toBe('v2');
    +    const database: any = { ...def.database, type: 'oracle' };
    +    database.cli.entitiesDir = 'src/notentitiesdir';
    +    expect(configService.database).toStrictEqual(database);
    +    expect(configService.swaggerConfig).toStrictEqual({
    +      ...def.swaggerConfig,
    +      swaggerTitle: 'Test Application',
    +    });
    +    expect(configService.jwtConfig).toStrictEqual({
    +      ...def.jwtConfig,
    +      secret: 'NOTSECRET',
    +    });
    +    // const mail: any = { ...def.mailerConfig };
    +    // mail.mailOptions.host = 'notlocalhost';
    +    // expect(configService.mailerConfig).toStrictEqual(mail);
    +
    +    process.env.isDev = undefined;
    +    process.env.host = undefined;
    +    process.env.port = undefined;
    +    process.env.clientUrl = undefined;
    +    process.env.globalPrefix = undefined;
    +    process.env.database = undefined;
    +    process.env.swaggerConfig = undefined;
    +    process.env.jwtConfig = undefined;
    +    // process.env.mailerConfig = undefined;
    +  });
    +});
    +
    +
    +
    +

    And the output should be:

    +
    +
    +
    +test +
    +
    +
  26. +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/samples-step-by-step.html b/docs/devon4node/1.0/samples-step-by-step.html new file mode 100644 index 00000000..2870b28c --- /dev/null +++ b/docs/devon4node/1.0/samples-step-by-step.html @@ -0,0 +1,780 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Create the employee sample step by step

+
+ +
+
+
+

Application requisites

+
+
+

The employee application needs:

+
+
+
    +
  • +

    A configuration module

    +
  • +
  • +

    A SQLite in memory database

    +
  • +
  • +

    Security: CORS

    +
  • +
  • +

    Swagger support

    +
  • +
  • +

    Authentication using JWT

    +
  • +
  • +

    CRUD for manage employees. The employees will have the following properties:

    +
    +
      +
    • +

      name

      +
    • +
    • +

      surname

      +
    • +
    • +

      email

      +
    • +
    +
    +
  • +
+
+
+
+
+

Create the application

+
+
+
    +
  1. +

    Install Nest CLI

    +
    +

    Execute the command yarn global add @nestjs/cli

    +
    +
  2. +
  3. +

    Install devon4node schematics

    +
  4. +
  5. +

    Execute the command yarn global add @devon4node/schematics

    +
  6. +
  7. +

    Create the new application

    +
    +

    Execute the command nest g -c @devon4node/schematics application employee

    +
    +
  8. +
  9. +

    Then, we need to add some components, go inside the project folder and execute the following commands:

    +
    +

    Go inside project folder: cd employee.

    +
    +
    +

    Config module: nest g -c @devon4node/schematics config-module.

    +
    +
    +

    TypeORM database, choose sqlite DB when asked nest g -c @devon4node/schematics typeorm.

    +
    +
    +

    Add security: nest g -c @devon4node/schematics security.

    +
    +
    +

    Swagger module: nest g -c @devon4node/schematics swagger.

    +
    +
    +

    Auth-jwt authentication: nest g -c @devon4node/schematics auth-jwt.

    +
    +
    +

    Add an application module: nest g -c @devon4node/schematics module employee.

    +
    +
    +

    Add CRUD component: nest g -c @devon4node/schematics crud employee/employee.

    +
    +
    +

    With this, you will generate the following files:

    +
    +
    +
    +
    /employee/.prettierrc
    +/employee/nest-cli.json
    +/employee/package.json
    +/employee/README.md
    +/employee/tsconfig.build.json
    +/employee/tsconfig.json
    +/employee/tslint.json
    +/employee/src/main.ts
    +/employee/test/app.e2e-spec.ts
    +/employee/test/jest-e2e.json
    +/employee/src/app/app.controller.spec.ts
    +/employee/src/app/app.controller.ts
    +/employee/src/app/app.module.ts
    +/employee/src/app/app.service.ts
    +/employee/src/app/core/core.module.ts
    +/employee/src/app/shared/logger/winston.logger.ts
    +/employee/src/app/core/configuration/configuration.module.ts
    +/employee/src/app/core/configuration/model/index.ts
    +/employee/src/app/core/configuration/model/types.ts
    +/employee/src/app/core/configuration/services/configuration.service.spec.ts
    +/employee/src/app/core/configuration/services/configuration.service.ts
    +/employee/src/app/core/configuration/services/index.ts
    +/employee/src/config/default.ts
    +/employee/src/config/develop.ts
    +/employee/src/config/production.ts
    +/employee/src/config/test.ts
    +/employee/src/config/uat.ts
    +/employee/docker-compose.yml
    +/employee/ormconfig.json
    +/employee/src/app/shared/model/entities/base-entity.entity.ts
    +/employee/src/app/core/auth/auth.module.ts
    +/employee/src/app/core/auth/controllers/auth.controller.spec.ts
    +/employee/src/app/core/auth/controllers/auth.controller.ts
    +/employee/src/app/core/auth/controllers/index.ts
    +/employee/src/app/core/auth/decorators/index.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.spec.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.ts
    +/employee/src/app/core/auth/guards/index.ts
    +/employee/src/app/core/auth/guards/roles.guard.spec.ts
    +/employee/src/app/core/auth/guards/roles.guard.ts
    +/employee/src/app/core/auth/model/index.ts
    +/employee/src/app/core/auth/model/roles.enum.ts
    +/employee/src/app/core/auth/model/user-request.interface.ts
    +/employee/src/app/core/auth/services/auth.service.spec.ts
    +/employee/src/app/core/auth/services/auth.service.ts
    +/employee/src/app/core/auth/services/index.ts
    +/employee/src/app/core/auth/strategies/index.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.spec.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.ts
    +/employee/src/app/core/user/user.module.ts
    +/employee/src/app/core/user/model/index.ts
    +/employee/src/app/core/user/model/dto/user-payload.dto.ts
    +/employee/src/app/core/user/model/entities/user.entity.ts
    +/employee/src/app/core/user/services/index.ts
    +/employee/src/app/core/user/services/user.service.spec.ts
    +/employee/src/app/core/user/services/user.service.ts
    +/employee/test/auth/auth.service.mock.ts
    +/employee/test/user/user.repository.mock.ts
    +/employee/src/app/employee/employee.module.ts
    +/employee/src/app/employee/model/entities/employee.entity.ts
    +/employee/src/app/employee/model/index.ts
    +/employee/src/app/employee/controllers/employee.crud.controller.ts
    +/employee/src/app/employee/services/employee.crud.service.ts
    +/employee/src/app/employee/services/index.ts
    +/employee/src/app/employee/controllers/index.ts
    +
    +
    +
  10. +
  11. +

    Open the VSCode

    +
    +

    Execute the commands:

    +
    +
    +
    +
    yarn install
    +code .
    +
    +
    +
  12. +
  13. +

    Fill in the entity: src/app/employee/model/entities/employee.entity.ts

    +
    +
      +
    1. +

      Add the columns

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    2. +
    3. +

      Add the validations

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    4. +
    5. +

      Add the transformations

      +
      +

      In this specific case, we will not transform any property, but you can see an example in the src/app/shared/model/entities/base-entity.entity.ts file.

      +
      +
      +
      +
      export abstract class BaseEntity {
      +  @PrimaryGeneratedColumn('increment')
      +  id!: number;
      +
      +  @VersionColumn({ default: 1 })
      +  @Exclude({ toPlainOnly: true })
      +  version!: number;
      +
      +  @CreateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  createdAt!: string;
      +
      +  @UpdateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  updatedAt!: string;
      +}
      +
      +
      +
    6. +
    7. +

      Add swagger metadata

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    8. +
    +
    +
  14. +
  15. +

    Add swagger metadata to src/app/employee/controllers/employee.crud.controller.ts

    +
    +
    +
    @ApiTags('employee')
    +
    +
    +
  16. +
  17. +

    Generate database migrations

    +
    +
      +
    1. +

      Build the application: yarn build

      +
    2. +
    3. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node

      +
    4. +
    5. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +generate migrations +
      +
      +
      +

      The output will be something similar to:

      +
      +
      +
      +
      export class CreateTables1572480273012 implements MigrationInterface {
      +  name = 'CreateTables1572480273012';
      +
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `CREATE TABLE "user" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "username" varchar(255) NOT NULL, "password" varchar(255) NOT NULL, "role" integer NOT NULL DEFAULT (0))`,
      +      undefined,
      +    );
      +    await queryRunner.query(
      +      `CREATE TABLE "employee" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "name" varchar(255), "surname" varchar(255), "email" varchar(255))`,
      +      undefined,
      +    );
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DROP TABLE "employee"`, undefined);
      +    await queryRunner.query(`DROP TABLE "user"`, undefined);
      +  }
      +}
      +
      +
      +
      +

      The number in the name is a timestamp, so may change in your application.

      +
      +
    6. +
    7. +

      Create a migration to insert data:`yarn run typeorm migration:generate -n InsertData`

      +
      +
      +insert data +
      +
      +
      +

      and fill in with the following code:

      +
      +
      +
      +
      export class InsertData1572480830290 implements MigrationInterface {
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(1, 'Santiago', 'Fowler', 'Santiago.Fowler@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(2, 'Clinton', 'Thornton', 'Clinton.Thornton@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(3, 'Lisa', 'Rodriquez', 'Lisa.Rodriquez@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(4, 'Calvin', 'Becker', 'Calvin.Becker@example.com');`,
      +    );
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      1,
      +      'user',
      +      await hash('password', await genSalt(12)),
      +      roles.USER,
      +    ]);
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      2,
      +      'admin',
      +      await hash('admin', await genSalt(12)),
      +      roles.ADMIN,
      +    ]);
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DELETE FROM EMPLOYEE`);
      +    await queryRunner.query(`DELETE FROM USER`);
      +  }
      +}
      +
      +
      +
    8. +
    +
    +
  18. +
  19. +

    Start the application: yarn start:dev

    +
    +
    +start app +
    +
    +
  20. +
  21. +

    Check the swagger endpoint: http://localhost:3000/v1/api

    +
    +
    +swagger +
    +
    +
  22. +
  23. +

    Make petitions to the employee CRUD: http://localhost:3000/v1/employee/employees

    +
    +
    +employees +
    +
    +
  24. +
  25. +

    Write the tests

    +
    +

    As we do not create any method, only add some properties to the entity, all application must be tested by the autogenerated code. As we add some modules, you need to uncomment some lines in the src/app/core/configuration/services/configuration.service.spec.ts:

    +
    +
    +
    +
    describe('ConfigurationService', () => {
    +  const configService: ConfigurationService = new ConfigurationService();
    +
    +  it('should return the values of test config file', () => {
    +    expect(configService.isDev).toStrictEqual(def.isDev);
    +    expect(configService.host).toStrictEqual(def.host);
    +    expect(configService.port).toStrictEqual(def.port);
    +    expect(configService.clientUrl).toStrictEqual(def.clientUrl);
    +    expect(configService.globalPrefix).toStrictEqual(def.globalPrefix);
    +    // Remove comments if you add those modules
    +    expect(configService.database).toStrictEqual(def.database);
    +    expect(configService.swaggerConfig).toStrictEqual(def.swaggerConfig);
    +    expect(configService.jwtConfig).toStrictEqual(def.jwtConfig);
    +    // expect(configService.mailerConfig).toStrictEqual(def.mailerConfig);
    +  });
    +  it('should take the value of environment varible if defined', () => {
    +    process.env.isDev = 'true';
    +    process.env.host = 'notlocalhost';
    +    process.env.port = '123456';
    +    process.env.clientUrl = 'http://theclienturl.net';
    +    process.env.globalPrefix = 'v2';
    +    process.env.swaggerConfig = JSON.stringify({
    +      swaggerTitle: 'Test Application',
    +    });
    +    process.env.database = JSON.stringify({
    +      type: 'oracle',
    +      cli: { entitiesDir: 'src/notentitiesdir' },
    +    });
    +    process.env.jwtConfig = JSON.stringify({ secret: 'NOTSECRET' });
    +    // process.env.mailerConfig = JSON.stringify({ mailOptions: { host: 'notlocalhost' }});
    +
    +    expect(configService.isDev).toBe(true);
    +    expect(configService.host).toBe('notlocalhost');
    +    expect(configService.port).toBe(123456);
    +    expect(configService.clientUrl).toBe('http://theclienturl.net');
    +    expect(configService.globalPrefix).toBe('v2');
    +    const database: any = { ...def.database, type: 'oracle' };
    +    database.cli.entitiesDir = 'src/notentitiesdir';
    +    expect(configService.database).toStrictEqual(database);
    +    expect(configService.swaggerConfig).toStrictEqual({
    +      ...def.swaggerConfig,
    +      swaggerTitle: 'Test Application',
    +    });
    +    expect(configService.jwtConfig).toStrictEqual({
    +      ...def.jwtConfig,
    +      secret: 'NOTSECRET',
    +    });
    +    // const mail: any = { ...def.mailerConfig };
    +    // mail.mailOptions.host = 'notlocalhost';
    +    // expect(configService.mailerConfig).toStrictEqual(mail);
    +
    +    process.env.isDev = undefined;
    +    process.env.host = undefined;
    +    process.env.port = undefined;
    +    process.env.clientUrl = undefined;
    +    process.env.globalPrefix = undefined;
    +    process.env.database = undefined;
    +    process.env.swaggerConfig = undefined;
    +    process.env.jwtConfig = undefined;
    +    // process.env.mailerConfig = undefined;
    +  });
    +});
    +
    +
    +
    +

    And the output should be:

    +
    +
    +
    +test +
    +
    +
  26. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4node/1.0/samples.html b/docs/devon4node/1.0/samples.html new file mode 100644 index 00000000..bcb813cb --- /dev/null +++ b/docs/devon4node/1.0/samples.html @@ -0,0 +1,386 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node Samples

+
+
+

In the folder /samples, you can find some devon4node examples that could be useful for you in order to understand better the framework.

+
+
+

The samples are:

+
+
+ +
+
+

Also, we have another realistic example in the My Thai Star repository. This example is the implementation of My Thai Star backend, which is compatible with the frontend made with Angular. To do that, this node implementation exposes the same API as Java backend. Take care with this example, as we need to follow the Java API, some components do not follow the devon4node patterns and code conventions.

+
+
+
+
+

Todo example

+
+
+

This example is the backend part of an TO-DO application. It exposes and API where you can create, read, update and delete a TO-DO list.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:3000/v1/todo/todos. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:3000/v1/api.

+
+
+

Also, in this example we show you how to control the access to you application by implementing an authentication mechanism using JWT and rol based strategy. In order to access to the list of todos (http://localhost:3000/v1/todo/todos), first you need to call to POST http://localhost:3000/v1/auth/login and in the body you need to send the user information:

+
+
+
+
{
+  "username": "user",
+  "password": "password"
+}
+
+
+
+

It will return a JWT token for the user user. The rol of this user is USER, so you can only access to the methods GET, POST and DELETE of the endpoint http://localhost:3000/v1/todo/todos. If you login with the user admin/admin, you will be able to access to the methods UPDATE and PATCH.

+
+
+
+
+

Employee example

+
+
+

This is an example of employee management application. With the application you can create, read, update and delete employees.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:8081/v1/employee/employees. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:8081/v1/api.

+
+
+

This is a simple example without authentication. With this example you can learn how to work with database migrations. You can find them in the folder /src/migrations. The TypeORM is configured in order to execute the migrations every time that you start this application at ormconfig.json with the following flag:

+
+
+
+
"migrationsRun": true
+
+
+
+

You can also execute the migration manually by typing the command devon4node db migration:run, or revert executing devon4node db migration:revert. Take into account that the database that this application is using is an in-memory sqlite, so every time that you stop the application all data is lost.

+
+
+
+
+

Components example

+
+
+

This example allow you to understand better the execution order of the components of a devon4node application (guards, pipes, interceptors, filters, middleware).

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

In order to see the execution order, you can call to http://localhost:3000/v1. It will show you the execution order of all components except the filters. If you want to know the execution order while a filter is applied, call to the endpoint with the following queries: ?hello=error, ?hello=controller, ?hello=global.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4quarkus/1.0/Home.html b/docs/devon4quarkus/1.0/Home.html new file mode 100644 index 00000000..89219253 --- /dev/null +++ b/docs/devon4quarkus/1.0/Home.html @@ -0,0 +1,177 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw for Quarkus (devon4quarkus)

+
+
+

Welcome to the Quarkus stack of devonfw. devon4quarkus is documented by a platform guide (see the side-bar of this wiki) to be used in your projects.

+
+
+
+
+

For contributors

+
+
+

Contributions and improvements to devonfw are more than welcome. Please read our contributing guide to get started.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4quarkus/1.0/devon4quarkus.html b/docs/devon4quarkus/1.0/devon4quarkus.html new file mode 100644 index 00000000..50da4f31 --- /dev/null +++ b/docs/devon4quarkus/1.0/devon4quarkus.html @@ -0,0 +1,274 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

quarkus

+
+
+

The devonfw community +${project.version}, ${buildtime}

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4quarkus/1.0/guide-coding-conventions.html b/docs/devon4quarkus/1.0/guide-coding-conventions.html new file mode 100644 index 00000000..957b563b --- /dev/null +++ b/docs/devon4quarkus/1.0/guide-coding-conventions.html @@ -0,0 +1,523 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4quarkus coding conventions

+
+
+

This guide describe the conventions for naming and structure in a devon4quarkus application. In addition to the points mentioned here, the code should follow the general Java conventions (see Oracle Naming Conventions, Google Java Style, etc.).

+
+
+
+
+

Things to discuss

+
+
+ +
+
+
+
+

Structure

+
+ +
+
+
+

Packages

+
+
+

For a devon4quarkus-based application, we recommend using the following Java package scheme:

+
+
+
+
«root».«component».«layer»[.«detail»]*
+
+
+
+

See also existing devon4j packaging that we want to simplify and modernize here.

+
+
+
Segments of package schema
+

|== == == == == == == == == == == == == == == == == == == == == == = +| Segment | Description | Example +| «root» | The root package. Typically we suggest to use «group».«artifact» for «root». However, we will not enforce anything and let you the freedom of choice if the recommendation does not fit for you.|com.devonfw +| «group» | Matches your maven groupId coordinate, basic name-space of the organization or IT project owning the code. Corresponds to domain name reversed. |com.devonfw +| «artifact» | Matches your maven artifact coordinate, converted to java package conventions(e.g. - omitted). Can be further simplified if it does not cause disambiguity(e.g. presales-order-bffpobff) | orderservice, usermgmt.. +| «component» | Only used if the service serves multiple business components. Most microservices usually address a single business component/domain. In this case it is ommited(its already implied by the artifactid)| user, order +| «layer» | The name of the technical layer (See technical architecture) which is one of the predefined layers (domain, rest, logic) or common for code not assigned to a technical layer (datatypes, cross-cutting concerns). Additional layers can be introduced if they have clear scope e.g. batch, process.. | rest +| «detail» | You will technically have the freedom of choice to define your sub-packages. Compared to devon4j we neither enforce nor recommend anymore to use the «scope» segment in your packaging. You are still free to do so if you like it. However, we now suggest the «type» segment for furhter classification within «detail».|dao +| «type» | Further division based on common stereotypes based on type of component. Suggested stereotypes for rest layer are [controller, model, mapper, filter..] for domain layer: [model, dao] | dao +|== == == == == == == == == == == == == == == == == == == == == == =

+
+
+

For a typical backend microservice that provides HTTP API and data persistence via JPA in the business domain serviceorder, the structure would be something like this:

+
+
+
+
«group».«artifact»
+├──.domain
+|  ├──.dao
+|  |  ├──ServiceOrderDAO
+|  |  └──ServiceOrderItemDAO
+|  ├──.model
+|  |  ├──ServiceOrderEntity
+|  |  └──ServiceOrderItemEntity
+├──.logic
+|  ├──NewServiceOrderValidator
+|  └──ServiceOrderEventsEmitter
+└──.rest
+   ├──.controller
+   |  └──ServiceOrderRestController
+   ├──.mapper
+   |  └──ServiceOrderMapper
+   └──.model
+      ├──NewServiceOrderDTO
+      ├──ServiceOrderPageResutltDTO
+      └──ServiceOrderDTO
+
+
+
+
+
+

Layer

+
+ +
+
+
+

== Data access layer

+
+
+

When using JPA/Hibernate for data persistence, please use the following subpackages under your domain package:

+
+
+
    +
  • +

    dao: For the Data Access Objects (DAOs). The naming should be always «entity»DAO

    +
  • +
  • +

    repo: For repositories, if you use Spring Data for data access

    +
  • +
  • +

    model: For all entities, views or other objects used to read and write to DB.

    +
  • +
+
+
+
+
+

== Logic layer

+
+
+

Use the layer to provide any microservice-specific business logic and add sub-packages as needed, depending on the type and number of classes required.

+
+
+

Before introducing a new service, check whether it is really needed or whether it can be replaced by a standard/framework solution (e.g. validators can be covered by the bean validation specification in 90% of cases by using annotations on models). +Strive for clear naming, based on the scope of the class, instead of generic names. BAD: OrderService, EmailManagement, BETTER: OrderValidator, EmailSender

+
+
+
+
+

== REST layer

+
+
+

Depending on the requirements of the project, a service may provide several APIs, e.g. a fixed version, a public API that must remain strictly backward compatible, and a separate non-public API used for internal functions or operations. Often the app needs to provide multiple public API versions. +If this is the case, we suggest to introduce «version» as an intermediate package:

+
+
+
+
└──.rest
+   ├──internal
+   |  ├──.controller
+   |  |  ├──AdminOperationsRestController
+   |  |  └──EventRestController
+   |  ├──.mapper
+   |  |  └──AdminOperationMapper
+   |  └──.model
+   |     ├──EventDTO
+   |     ├──AdminOperationDTO
+   |     └──AdminOperationResultDTO
+   ├──v1
+   |  ├──.controller
+   |  |  └──ServiceOrderRestController
+   |  ├──.mapper
+   |  |  └──ServiceOrderMapper
+   |  └──.model
+   |     ├──NewServiceOrderDTO
+   |     ├──ServiceOrderPageResutltDTO
+   |     └──ServiceOrderDTO
+   └──v2
+      ├──.controller
+      |  ├──ServiceOrderItemRestController
+      |  └──ServiceOrderRestController
+      ├──.mapper
+      |  └──ServiceOrderMapper
+      ├──.filter
+      |   └──CustomPayloadFilter
+      └──.model
+         ├──NewServiceOrderDTO
+         ├──ServiceOrderItemDTO
+         ├──ServiceOrderPageResutltDTO
+         ├──ServiceOrderPatchRequestDTO
+         └──ServiceOrderDTO
+
+
+
+
+
+

Comparison with devon4j

+
+
+
    +
  • +

    service.[api|impl].[rest|ws] simply becomes rest, ws (in case someone is still using legacy SOAP), grpc, etc. Technically we can still derive that this all implies the service layer.

    +
  • +
  • +

    dataaccess becomes domain. You are not forced to follow this and architcture validation such as our sonar-devon4j-plugin will in the future support both. However, new CobiGen templates for quarkus/cloud-native will use this new default and would need to adopt them if you want to change. We also suggest to put entities in the model sub-package (see «type»).

    +
  • +
  • +

    logic remains logic

    +
  • +
+
+
+
+
+

Naming conventions

+
+
+

In addition to the general Java naming conventions, the following rules should be observed

+
+
+
    +
  • +

    Names should be descriptive and concise. Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Name should indicate the type of object it represents.

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    All classes in single «type» package should have the same naming structure (e.g. dont mix EntityRepo and OtherEntityDAO inside dao package).

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc').

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4quarkus/1.0/guide-cors-support.html b/docs/devon4quarkus/1.0/guide-cors-support.html new file mode 100644 index 00000000..482c8b13 --- /dev/null +++ b/docs/devon4quarkus/1.0/guide-cors-support.html @@ -0,0 +1,389 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

CORS support

+
+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+
+
+

Configuring CORS support

+
+
+

Quarkus comes with a CORS filter which implements the javax.servlet.Filter interface and intercepts all incoming HTTP requests. It can be enabled in the Quarkus configuration file, src/main/resources/application.properties:

+
+
+
+
quarkus.http.cors=true
+
+
+
+
+
+

Configuration with quarkus

+
+
+

Here’s an example of a full CORS filter configuration, including a regular expression defining an allowed origin:

+
+
+
+
##enable cors filter
+quarkus.http.cors=true
+##configurations cors
+quarkus.http.cors.origins=http://foo.com,http://www.bar.io,/https://([a-z0-9\\-_]+)\\.app\\.mydomain\\.com/
+quarkus.http.cors.methods=OPTIONS,HEAD,GET,PUT,POST,DELETE,PATCH
+quarkus.http.cors.headers=X-Custom
+quarkus.http.cors.exposed-headers=Content-Disposition
+quarkus.http.cors.access-control-max-age=24H
+quarkus.http.cors.access-control-allow-credentials=true
+
+
+
+

|== = +|Attribute | Default| Description |HTTP Header

+
+
+

|quarkus.http.cors.access-control-allow-credentials +|- +|Boolean value to tell the browsers to expose the response to front-end JavaScript code when the request’s credentials mode Request.credentials is “include” +|Access-Control-Allow-Credentials

+
+
+

|quarkus.http.cors.origins +|* +|The comma-separated list of origins allowed for CORS. Values starting and ending with '/'' will be treated as regular expressions. The filter allows any origin if this is not set or set to '*'. +|Access-Control-Allow-Origin

+
+
+

|quarkus.http.cors.methods +|* +|The comma-separated list of HTTP methods allowed for CORS. The filter allows any method if this is not set or set to '*'. +|Access-Control-Allow-Methods

+
+
+

|quarkus.http.cors.headers +|* +|The comma-separated list of HTTP headers allowed for CORS. The filter allows any header if this is not set or set to '*'. +|Access-Control-Allow-Headers

+
+
+

|quarkus.http.cors.exposed-headers +|* +|The comma-separated list of HTTP headers exposed in CORS. The filter allows any headers to be exposed if this is not set or set to '*'. +|Access-Control-Expose-Headers

+
+
+

|quarkus.http.cors.access-control-max-age +|- +|The duration (see note below) indicating how long the results of a pre-flight request can be cached. +|Access-Control-Max-Age

+
+
+

|== =

+
+
+
+
+

Configuration with service mesh

+
+
+

Alternatively, if you use service mesh, you can also define your CORS policy directly there. Here is an example from istio

+
+
+

More information about the CORS headers can be found here

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4quarkus/1.0/guide-logging.html b/docs/devon4quarkus/1.0/guide-logging.html new file mode 100644 index 00000000..2b0733bf --- /dev/null +++ b/docs/devon4quarkus/1.0/guide-logging.html @@ -0,0 +1,533 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Logging

+
+
+

Logging is the recording of messages during the execution of an application. The log messages provide information for developers or administrators that can be used to troubleshoot or maintain the application, such as errors and warnings, but also info messages such as runtime statistics that can be used for analysis.

+
+
+

One must distinguish between a logging API and logging implementations. A logging API provides a standardised interface, while the specific implementation is a framework that is developed against and uses the API.

+
+
+

Internally, Quarkus uses the JBoss Logging facade, an abstraction layer that provides support for multiple logging APIs and JBoss LogManager, which provides implementations for the specific APIs. The following logging APIs are supported:

+
+ +
+
+
+

Usage

+
+ +
+
+
+

Maven integration

+
+
+

We recommend using SLF4j as the logging API. Since Quarkus uses JBoss logging internally, you can use it out of the box and do not need to add any dependencies to your project to use it. JBoss LogManager will send it to the appropriate implementation.

+
+
+

Exceptional case: +If you use a dependency in your project that has dependencies on other logging libraries like SLF4j, then you need to exclude them from the dependency and use a JBoss Logging adapter. For more information, see here. +For example, if you have a dependency that uses SLF4j, you need to add the following dependency to your pom.xml file:

+
+
+
+
<dependency>
+    <groupId>org.jboss.slf4j</groupId>
+    <artifactId>slf4j-jboss-logmanager</artifactId>
+</dependency>
+
+
+
+
+
+

==

+
+
+

This is not needed for libraries that are dependencies of a Quarkus extension as the extension will take care of this for you. +== ==

+
+
+
+
+

Logger access

+
+
+

The general pattern for accessing an instance of a logger class is to use static instances. So for SLF4j, the following lines are sufficient to create a log object:

+
+
+
+
...
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+    private static final Logger log = LoggerFactory.getLogger(MyClass.class);
+
+}
+
+
+
+

If you are use Lombok in your project, you can simply add the @SLF4J annotation to your class. Lombok will then automatically create a logger instance that you can use in your code.

+
+
+
+
+

Using the logger

+
+
+

After you have created the logger instance, you can simply use one of the log methods of the corresponding object. Different logging APIs provide different methods for creating log messages. When using SLF4j, there are several methods such as info, warn, error that are logged depending on the log level set (see [configuration]).

+
+
+
+
...
+public void myMethod(...) {
+    log.info("your log message");
+}
+...
+
+
+
+

For detailed documentation on the SLF4j API, see here.

+
+
+
+
+

Configuration

+
+
+

The are several options you can set in the application.properties file to configure the bahaviour of the logger. For example, to set the log level or the format of the log messages.

+
+
+
+
+

== Log levels

+
+
+

Quarkus supports eight different log levels (see here for an overview). Use quarkus.log.level to set the default log level of the application (default is INFO). To define more specific log levels, you can set different levels per category.

+
+
+
+
quarkus.log.level=INFO
+quarkus.log.category."org.hibernate".level=DEBUG
+
+
+
+

This would set the default log level in your application to INFO and the Hibernate log level to DEBUG.

+
+
+

To understand when to use which log level, you can take a look at the devon4j logging guide.

+
+
+
+
+

== Format

+
+
+

To configure the output format of the log messages, set the property quarkus.log.console.format. Information on the supported options can be found here.

+
+
+
+
quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n
+
+
+
+

Result:

+
+
+
+
[D: 2021-07-20 11:54:33,127] [P: DEBUG] [C: «MDC values»] [T: executor-thread-0] [L: my.package.MyClass] [M: log message...]
+
+
+
+
+
+

Customizing log messages

+
+
+

You can use Mapped Diagnostic Context to add custom fields to your log messages. MDC is a simple map consisting of key-value pairs to store additional useful information such as session or request ids that can be helpful when filtering log messages or debugging applications.

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+
+...
+public class MyClass {
+
+    private static final Logger log = LoggerFactory.getLogger(SLF4JLoggingResource.class);
+
+    public String myMethod() {
+    	MDC.put("yourKey", "yourValue");
+    	log.info("log message ...");
+    }
+
+    ...
+}
+
+
+
+

Result:

+
+
+
+
[D: 2021-07-20 11:54:33,127] [P: DEBUG] [C: {yourKey=yourValue}] [T: executor-thread-0] [L: my.package.MyClass] [M: log message...]
+
+
+
+
+
+

JSON Logging

+
+
+

For production environments we suggest to use JSON logs instead of plain text. The JSON output can be captured by external services for storing and analysis. To do this add the quarkus-logging-json extension to your project`s pom.xml file.

+
+
+
+
<dependency>
+    <groupId>io.quarkus</groupId>
+    <artifactId>quarkus-logging-json</artifactId>
+</dependency>
+
+
+
+

This will change the output format by default. Since it makes sense in development environments to have the output format in a human readable format, you can disable JSON logging for development (or test) environments by adding the following properties to your application.properties file.

+
+
+
+
%dev.quarkus.log.console.json=false
+%test.quarkus.log.console.json=false
+
+
+
+
+
+

Centralized Log Management

+
+
+

As mentioned in the section on JSON logging, in production environments it makes sense to have a service to store and analyse the logs. For this, you can use a central log management system like Graylog or Logstash in combination with Elasticsearch, which provides you with a powerful search engine.

+
+
+

For this, Quarkus provides the quarkus-logging-gelf extension to send the logs in the Graylog Extended Log Format (GELF) to your log management system.

+
+
+
+
<dependency>
+    <groupId>io.quarkus</groupId>
+    <artifactId>quarkus-logging-gelf</artifactId>
+</dependency>
+
+
+
+

You do not have to extend your code, just configure the GELF log handler to your management system.

+
+
+
+
quarkus.log.handler.gelf.enabled=true
+quarkus.log.handler.gelf.host=tcp:localhost
+quarkus.log.handler.gelf.port=12201
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4quarkus/1.0/index.html b/docs/devon4quarkus/1.0/index.html new file mode 100644 index 00000000..5b2011c6 --- /dev/null +++ b/docs/devon4quarkus/1.0/index.html @@ -0,0 +1,281 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw for Quarkus (devon4quarkus)

+
+
+

Welcome to the Quarkus stack of devonfw. devon4quarkus is documented by a platform guide (see the side-bar of this wiki) to be used in your projects.

+
+
+
+
+

For contributors

+
+
+

Contributions and improvements to devonfw are more than welcome. Please read our contributing guide to get started.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/_images/images/CG-architectureBackground.png b/docs/devon4ts/1.0/_images/images/CG-architectureBackground.png new file mode 100644 index 00000000..91b3d696 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/CG-architectureBackground.png differ diff --git a/docs/devon4ts/1.0/_images/images/CapgeminiLogo.png b/docs/devon4ts/1.0/_images/images/CapgeminiLogo.png new file mode 100644 index 00000000..9ce5e7e5 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/CapgeminiLogo.png differ diff --git a/docs/devon4ts/1.0/_images/images/CapgeminiLogoWhite.png b/docs/devon4ts/1.0/_images/images/CapgeminiLogoWhite.png new file mode 100644 index 00000000..3e5bf717 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/CapgeminiLogoWhite.png differ diff --git a/docs/devon4ts/1.0/_images/images/Example_Angular_Restaurant_Screen.png b/docs/devon4ts/1.0/_images/images/Example_Angular_Restaurant_Screen.png new file mode 100644 index 00000000..3d8793d1 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/Example_Angular_Restaurant_Screen.png differ diff --git a/docs/devon4ts/1.0/_images/images/IntegratedIDE.png b/docs/devon4ts/1.0/_images/images/IntegratedIDE.png new file mode 100644 index 00000000..bb2068dc Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/IntegratedIDE.png differ diff --git a/docs/devon4ts/1.0/_images/images/Logo.png b/docs/devon4ts/1.0/_images/images/Logo.png new file mode 100644 index 00000000..f6e4e645 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/Logo.png differ diff --git a/docs/devon4ts/1.0/_images/images/OASP-Layering.png b/docs/devon4ts/1.0/_images/images/OASP-Layering.png new file mode 100644 index 00000000..84a065eb Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/OASP-Layering.png differ diff --git a/docs/devon4ts/1.0/_images/images/OASP.png b/docs/devon4ts/1.0/_images/images/OASP.png new file mode 100644 index 00000000..2b0e2574 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/OASP.png differ diff --git a/docs/devon4ts/1.0/_images/images/OASP_dark.png b/docs/devon4ts/1.0/_images/images/OASP_dark.png new file mode 100644 index 00000000..edf59f1a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/OASP_dark.png differ diff --git a/docs/devon4ts/1.0/_images/images/OASP_technologies_used.png b/docs/devon4ts/1.0/_images/images/OASP_technologies_used.png new file mode 100644 index 00000000..98db5b7f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/OASP_technologies_used.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/0.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/0.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/0.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/1.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/1.png new file mode 100644 index 00000000..a168ebfa Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/1.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/10.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/10.png new file mode 100644 index 00000000..ee452fec Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/10.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/11.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/11.png new file mode 100644 index 00000000..bf031376 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/11.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/12.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/12.png new file mode 100644 index 00000000..37ecfef2 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/12.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/13.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/13.png new file mode 100644 index 00000000..aa68cf4f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/13.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/14.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/14.png new file mode 100644 index 00000000..63c2ed55 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/14.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/2.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/2.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/2.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/3.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/3.png new file mode 100644 index 00000000..21f206b0 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/3.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/4.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/4.png new file mode 100644 index 00000000..c097c867 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/4.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/5.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/5.png new file mode 100644 index 00000000..d0941916 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/5.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/6.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/6.png new file mode 100644 index 00000000..8f7450f8 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/6.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/7.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/7.png new file mode 100644 index 00000000..0244ebdb Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/7.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/8.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/8.png new file mode 100644 index 00000000..e4ed5871 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/8.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-clarity-layout/9.png b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/9.png new file mode 100644 index 00000000..5464bac8 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-clarity-layout/9.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-library/result.png b/docs/devon4ts/1.0/_images/images/angular-library/result.png new file mode 100644 index 00000000..2fe702a8 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-library/result.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_0.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_0.png new file mode 100644 index 00000000..f4aeadca Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_0.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_1.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_1.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_1.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_10.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_10.png new file mode 100644 index 00000000..d84563a7 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_10.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_11.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_11.png new file mode 100644 index 00000000..2eeb8fdd Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_11.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_12.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_12.png new file mode 100644 index 00000000..d0e81eaa Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_12.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_13.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_13.png new file mode 100644 index 00000000..4b3b4074 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_13.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_14.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_14.png new file mode 100644 index 00000000..f1ff7d9f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_14.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_15.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_15.png new file mode 100644 index 00000000..b00554fe Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_15.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_16.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_16.png new file mode 100644 index 00000000..4367bd60 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_16.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_17.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_17.png new file mode 100644 index 00000000..d3f5edfb Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_17.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_18.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_18.png new file mode 100644 index 00000000..54cb5b00 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_18.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_19.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_19.png new file mode 100644 index 00000000..dc441ee0 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_19.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_2.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_2.png new file mode 100644 index 00000000..a8f8b70f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_2.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_20.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_20.png new file mode 100644 index 00000000..6728163f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_20.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_21.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_21.png new file mode 100644 index 00000000..a4f23dba Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_21.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_22.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_22.png new file mode 100644 index 00000000..98a258c7 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_22.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_3.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_3.png new file mode 100644 index 00000000..625228b0 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_3.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_4.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_4.png new file mode 100644 index 00000000..97f33148 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_4.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_5.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_5.png new file mode 100644 index 00000000..32de7eee Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_5.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_6.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_6.png new file mode 100644 index 00000000..331b345f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_6.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_7.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_7.png new file mode 100644 index 00000000..fc7a638e Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_7.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_8.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_8.png new file mode 100644 index 00000000..db26df0a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_8.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_9.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_9.png new file mode 100644 index 00000000..cae3b40a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/Screenshot_9.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-primeng-layout/tablestyle.png b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/tablestyle.png new file mode 100644 index 00000000..8f8a5435 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-primeng-layout/tablestyle.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure1.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure1.png new file mode 100644 index 00000000..8638e11e Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure1.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure10.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure10.png new file mode 100644 index 00000000..b85a70bb Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure10.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure11.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure11.png new file mode 100644 index 00000000..b351a5eb Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure11.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure12.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure12.png new file mode 100644 index 00000000..7a2f4d7b Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure12.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure13.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure13.png new file mode 100644 index 00000000..9d2e909f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure13.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure14.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure14.png new file mode 100644 index 00000000..fad86295 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure14.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure15.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure15.png new file mode 100644 index 00000000..d7e0f1a7 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure15.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure16.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure16.png new file mode 100644 index 00000000..cfe9fbd9 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure16.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure17.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure17.png new file mode 100644 index 00000000..b60a8367 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure17.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure18.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure18.png new file mode 100644 index 00000000..fe6ee92b Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure18.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure19.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure19.png new file mode 100644 index 00000000..1fe8f608 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure19.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure20.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure20.png new file mode 100644 index 00000000..54267100 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure20.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure21.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure21.png new file mode 100644 index 00000000..db215fce Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure21.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure3.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure3.png new file mode 100644 index 00000000..9fa7f617 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure3.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure4.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure4.png new file mode 100644 index 00000000..58c84427 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure4.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure5.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure5.png new file mode 100644 index 00000000..883bcb04 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure5.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure6.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure6.png new file mode 100644 index 00000000..3c05e35c Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure6.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure7.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure7.png new file mode 100644 index 00000000..3c3cbf0c Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure7.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure8.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure8.png new file mode 100644 index 00000000..29d9fc2a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure8.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure9.png b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure9.png new file mode 100644 index 00000000..42915cc7 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular-zorro-layout/figure9.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-lazy/compile-eager.png b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/compile-eager.png new file mode 100644 index 00000000..ffce0d19 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/compile-eager.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-lazy/compile-first-lazy.png b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/compile-first-lazy.png new file mode 100644 index 00000000..8bd56e5d Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/compile-first-lazy.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-lazy/first-lvl-eager.png b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/first-lvl-eager.png new file mode 100644 index 00000000..2480f2d6 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/first-lvl-eager.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-lazy/first-lvl-lazy.png b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/first-lvl-lazy.png new file mode 100644 index 00000000..7f89915d Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/first-lvl-lazy.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-lazy/first-lvl-wrong-path.png b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/first-lvl-wrong-path.png new file mode 100644 index 00000000..68587c34 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/first-lvl-wrong-path.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-lazy/levels-app.png b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/levels-app.png new file mode 100644 index 00000000..0a147442 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/levels-app.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-lazy.png b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-lazy.png new file mode 100644 index 00000000..0afe2f5a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-lazy.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-left-lazy.png b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-left-lazy.png new file mode 100644 index 00000000..b2005351 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-left-lazy.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager-d.png b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager-d.png new file mode 100644 index 00000000..47addfcc Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager-d.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager.png b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager.png new file mode 100644 index 00000000..c55c77e2 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-pwa/http-serve.png b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/http-serve.png new file mode 100644 index 00000000..d6926625 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/http-serve.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-pwa/lighthouse-ng.png b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/lighthouse-ng.png new file mode 100644 index 00000000..774a40f6 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/lighthouse-ng.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-pwa/mts-pwa-rec.png b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/mts-pwa-rec.png new file mode 100644 index 00000000..8cf524e3 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/mts-pwa-rec.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-pwa/ng-serve.png b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/ng-serve.png new file mode 100644 index 00000000..9f614131 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/ng-serve.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-pwa/online-offline-ng.png b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/online-offline-ng.png new file mode 100644 index 00000000..0f42e5a4 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/online-offline-ng.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-pwa/pwa-nopwa-app-ng.png b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/pwa-nopwa-app-ng.png new file mode 100644 index 00000000..4724098e Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-pwa/pwa-nopwa-app-ng.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-theming/custom-dark.png b/docs/devon4ts/1.0/_images/images/angular/angular-theming/custom-dark.png new file mode 100644 index 00000000..701ccadf Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-theming/custom-dark.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-theming/custom-light.png b/docs/devon4ts/1.0/_images/images/angular/angular-theming/custom-light.png new file mode 100644 index 00000000..32aa97ef Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-theming/custom-light.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-theming/deeppurple-amber.png b/docs/devon4ts/1.0/_images/images/angular/angular-theming/deeppurple-amber.png new file mode 100644 index 00000000..26fcdd93 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-theming/deeppurple-amber.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-theming/indigo-pink.png b/docs/devon4ts/1.0/_images/images/angular/angular-theming/indigo-pink.png new file mode 100644 index 00000000..9af01630 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-theming/indigo-pink.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-theming/palette.PNG b/docs/devon4ts/1.0/_images/images/angular/angular-theming/palette.PNG new file mode 100644 index 00000000..3e2e7af4 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-theming/palette.PNG differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-theming/pink-bluegrey.png b/docs/devon4ts/1.0/_images/images/angular/angular-theming/pink-bluegrey.png new file mode 100644 index 00000000..9cc6a27d Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-theming/pink-bluegrey.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-theming/purple-green.png b/docs/devon4ts/1.0/_images/images/angular/angular-theming/purple-green.png new file mode 100644 index 00000000..d23d948d Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-theming/purple-green.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-theming/scss-map.png b/docs/devon4ts/1.0/_images/images/angular/angular-theming/scss-map.png new file mode 100644 index 00000000..87285543 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-theming/scss-map.png differ diff --git a/docs/devon4ts/1.0/_images/images/angular/angular-theming/theme-files-structure.png b/docs/devon4ts/1.0/_images/images/angular/angular-theming/theme-files-structure.png new file mode 100644 index 00000000..953d3eaf Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/angular/angular-theming/theme-files-structure.png differ diff --git a/docs/devon4ts/1.0/_images/images/apache_logo.png b/docs/devon4ts/1.0/_images/images/apache_logo.png new file mode 100644 index 00000000..5b5e925b Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/apache_logo.png differ diff --git a/docs/devon4ts/1.0/_images/images/app-initializer/loadExternalConfigFalse.png b/docs/devon4ts/1.0/_images/images/app-initializer/loadExternalConfigFalse.png new file mode 100644 index 00000000..f6c90dd9 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/app-initializer/loadExternalConfigFalse.png differ diff --git a/docs/devon4ts/1.0/_images/images/app-initializer/loadExternalConfigTrue.png b/docs/devon4ts/1.0/_images/images/app-initializer/loadExternalConfigTrue.png new file mode 100644 index 00000000..647ea162 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/app-initializer/loadExternalConfigTrue.png differ diff --git a/docs/devon4ts/1.0/_images/images/architecture-layers.svg b/docs/devon4ts/1.0/_images/images/architecture-layers.svg new file mode 100644 index 00000000..63c0c475 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/architecture-layers.svg @@ -0,0 +1,639 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + c + + + + + + + omponents + + + + + + + + s + + + + + + + ervices + + + + + + + + adapter + + + + + + + + + + module + + + + + + + + s + + + + + + + mart + + + + + + + + dumb + + + + + + + + + store + + + + + + + + model + + + + + + + + use + + + + + + + - + + + + + + + case + + + + + diff --git a/docs/devon4ts/1.0/_images/images/architecture-modules.svg b/docs/devon4ts/1.0/_images/images/architecture-modules.svg new file mode 100644 index 00000000..2ec15f98 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/architecture-modules.svg @@ -0,0 +1,358 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + shared + + + + + + + + c + + + + + + + ore + + + + + + + + f + + + + + + + eature + + + + + + + (e.g. booking) + + + + + + + + a + + + + + + + pp + + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/architecture.png b/docs/devon4ts/1.0/_images/images/architecture.png new file mode 100644 index 00000000..d21165e7 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/architecture.png differ diff --git a/docs/devon4ts/1.0/_images/images/architecture_background.png b/docs/devon4ts/1.0/_images/images/architecture_background.png new file mode 100644 index 00000000..91d5af3a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/architecture_background.png differ diff --git a/docs/devon4ts/1.0/_images/images/batch_icon.png b/docs/devon4ts/1.0/_images/images/batch_icon.png new file mode 100644 index 00000000..1ca97b15 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/batch_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/blob-streaming/folder-structure.PNG b/docs/devon4ts/1.0/_images/images/blob-streaming/folder-structure.PNG new file mode 100644 index 00000000..d4880bfd Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/blob-streaming/folder-structure.PNG differ diff --git a/docs/devon4ts/1.0/_images/images/blob-streaming/html-view-1.PNG b/docs/devon4ts/1.0/_images/images/blob-streaming/html-view-1.PNG new file mode 100644 index 00000000..adef8fff Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/blob-streaming/html-view-1.PNG differ diff --git a/docs/devon4ts/1.0/_images/images/capgemini.png b/docs/devon4ts/1.0/_images/images/capgemini.png new file mode 100644 index 00000000..e323d3dd Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/capgemini.png differ diff --git a/docs/devon4ts/1.0/_images/images/cloud_icon.png b/docs/devon4ts/1.0/_images/images/cloud_icon.png new file mode 100644 index 00000000..fc565675 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/cloud_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/code_icon.png b/docs/devon4ts/1.0/_images/images/code_icon.png new file mode 100644 index 00000000..72c4d880 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/code_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/comillas.png b/docs/devon4ts/1.0/_images/images/comillas.png new file mode 100644 index 00000000..01644235 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/comillas.png differ diff --git a/docs/devon4ts/1.0/_images/images/component-decomposition-example-1.svg b/docs/devon4ts/1.0/_images/images/component-decomposition-example-1.svg new file mode 100644 index 00000000..1370c66d --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/component-decomposition-example-1.svg @@ -0,0 +1,101 @@ + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/component-decomposition-example-2.svg b/docs/devon4ts/1.0/_images/images/component-decomposition-example-2.svg new file mode 100644 index 00000000..747697ae --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/component-decomposition-example-2.svg @@ -0,0 +1,254 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/component-decomposition-example-component-tree.svg b/docs/devon4ts/1.0/_images/images/component-decomposition-example-component-tree.svg new file mode 100644 index 00000000..3b0a7061 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/component-decomposition-example-component-tree.svg @@ -0,0 +1,356 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + FormListpicker + + + + + + + + FilterResultTable + + + + + + + + FilterInput + + + + + + + + + + + + + + + + + + + DirectInput + + + + + + + + Listpicker + + + + + + + + ListpickerDropdown + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/component-tree-highlighted-subtree.svg b/docs/devon4ts/1.0/_images/images/component-tree-highlighted-subtree.svg new file mode 100644 index 00000000..d380bf5b --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/component-tree-highlighted-subtree.svg @@ -0,0 +1,950 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Overview + + + + + + + + SearchPanel + + + + + + + + QuickSearchTab + + + + + + + + Details + + + + + + + + App + + + + + + + + Toolbar + + + + + + + + DetailSearchTab + + + + + + + + CriteriaForm + + + + + + + + Header + + + + + + + + UserInfo + + + + + + + + ActionToolbar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + ResultPanel + + + + + + + + Table + + + + + + + + ResultActions + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + + + + + + + + + + + + + + + /overview + + + + + + + /details + + + + + diff --git a/docs/devon4ts/1.0/_images/images/component-tree.svg b/docs/devon4ts/1.0/_images/images/component-tree.svg new file mode 100644 index 00000000..010e562b --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/component-tree.svg @@ -0,0 +1,950 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Overview + + + + + + + + SearchPanel + + + + + + + + QuickSearchTab + + + + + + + + Details + + + + + + + + App + + + + + + + + Toolbar + + + + + + + + DetailSearchTab + + + + + + + + CriteriaForm + + + + + + + + Header + + + + + + + + UserInfo + + + + + + + + ActionToolbar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + ResultPanel + + + + + + + + Table + + + + + + + + ResultActions + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + + + + + + + + + + + + + + + /overview + + + + + + + /details + + + + + diff --git a/docs/devon4ts/1.0/_images/images/components-layer-service-layer-boundaries.svg b/docs/devon4ts/1.0/_images/images/components-layer-service-layer-boundaries.svg new file mode 100644 index 00000000..70484237 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/components-layer-service-layer-boundaries.svg @@ -0,0 +1,355 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart + + + + + + + Component + + + + + + + + Use Case Service + + + + + + + + + + + + + Store + + + + + + + + + + + + subscribe() + + + + + + + action() + + + + + + + + + + Services Layer + + + + + + + Components Layer + + + + + diff --git a/docs/devon4ts/1.0/_images/images/corte_1.png b/docs/devon4ts/1.0/_images/images/corte_1.png new file mode 100644 index 00000000..e1c81c12 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/corte_1.png differ diff --git a/docs/devon4ts/1.0/_images/images/corte_2.png b/docs/devon4ts/1.0/_images/images/corte_2.png new file mode 100644 index 00000000..a0bf4db5 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/corte_2.png differ diff --git a/docs/devon4ts/1.0/_images/images/corte_3.png b/docs/devon4ts/1.0/_images/images/corte_3.png new file mode 100644 index 00000000..a1b02164 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/corte_3.png differ diff --git a/docs/devon4ts/1.0/_images/images/corte_4.png b/docs/devon4ts/1.0/_images/images/corte_4.png new file mode 100644 index 00000000..c0867326 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/corte_4.png differ diff --git a/docs/devon4ts/1.0/_images/images/crud-schematic.PNG b/docs/devon4ts/1.0/_images/images/crud-schematic.PNG new file mode 100644 index 00000000..f1b94f8d Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/crud-schematic.PNG differ diff --git a/docs/devon4ts/1.0/_images/images/cypress/browserTab.jpg b/docs/devon4ts/1.0/_images/images/cypress/browserTab.jpg new file mode 100644 index 00000000..9dff976e Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/cypress/browserTab.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/cypress/contextImg.jpg b/docs/devon4ts/1.0/_images/images/cypress/contextImg.jpg new file mode 100644 index 00000000..c925bb34 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/cypress/contextImg.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/cypress/reporter.jpg b/docs/devon4ts/1.0/_images/images/cypress/reporter.jpg new file mode 100644 index 00000000..fe821059 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/cypress/reporter.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/data-box.jpg b/docs/devon4ts/1.0/_images/images/data-box.jpg new file mode 100644 index 00000000..6d38b892 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/data-box.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/december.png b/docs/devon4ts/1.0/_images/images/december.png new file mode 100644 index 00000000..0c7a6800 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/december.png differ diff --git a/docs/devon4ts/1.0/_images/images/desktop_icon.png b/docs/devon4ts/1.0/_images/images/desktop_icon.png new file mode 100644 index 00000000..0fbcb96b Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/desktop_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/devon4node-architechture.png b/docs/devon4ts/1.0/_images/images/devon4node-architechture.png new file mode 100644 index 00000000..653b0bbe Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/devon4node-architechture.png differ diff --git a/docs/devon4ts/1.0/_images/images/devon_logo - responsive.png b/docs/devon4ts/1.0/_images/images/devon_logo - responsive.png new file mode 100644 index 00000000..908cc66e Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/devon_logo - responsive.png differ diff --git a/docs/devon4ts/1.0/_images/images/devon_logo.png b/docs/devon4ts/1.0/_images/images/devon_logo.png new file mode 100644 index 00000000..908cc66e Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/devon_logo.png differ diff --git a/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_CompleteOverview.png b/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_CompleteOverview.png new file mode 100644 index 00000000..8591d2cb Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_CompleteOverview.png differ diff --git a/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogComponent.png b/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogComponent.png new file mode 100644 index 00000000..bb81efe5 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogComponent.png differ diff --git a/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogContainer.png b/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogContainer.png new file mode 100644 index 00000000..1f9222f1 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogContainer.png differ diff --git a/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogInteractions.png b/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogInteractions.png new file mode 100644 index 00000000..6ac503fb Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogInteractions.png differ diff --git a/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_Overview.png b/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_Overview.png new file mode 100644 index 00000000..b694090e Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_Overview.png differ diff --git a/docs/devon4ts/1.0/_images/images/devonfw-oasp.png b/docs/devon4ts/1.0/_images/images/devonfw-oasp.png new file mode 100644 index 00000000..4d1171ff Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/devonfw-oasp.png differ diff --git a/docs/devon4ts/1.0/_images/images/enviroment_icon.png b/docs/devon4ts/1.0/_images/images/enviroment_icon.png new file mode 100644 index 00000000..16d91378 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/enviroment_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/examples.png b/docs/devon4ts/1.0/_images/images/examples.png new file mode 100644 index 00000000..ea8796c2 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/examples.png differ diff --git a/docs/devon4ts/1.0/_images/images/facebook.png b/docs/devon4ts/1.0/_images/images/facebook.png new file mode 100644 index 00000000..56f12068 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/facebook.png differ diff --git a/docs/devon4ts/1.0/_images/images/february.png b/docs/devon4ts/1.0/_images/images/february.png new file mode 100644 index 00000000..d5db9e90 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/february.png differ diff --git a/docs/devon4ts/1.0/_images/images/flexibility.png b/docs/devon4ts/1.0/_images/images/flexibility.png new file mode 100644 index 00000000..a9e880d8 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/flexibility.png differ diff --git a/docs/devon4ts/1.0/_images/images/generate-component.png b/docs/devon4ts/1.0/_images/images/generate-component.png new file mode 100644 index 00000000..778a70c7 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/generate-component.png differ diff --git a/docs/devon4ts/1.0/_images/images/generate-interactive.jpg b/docs/devon4ts/1.0/_images/images/generate-interactive.jpg new file mode 100644 index 00000000..54e97b52 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/generate-interactive.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/generate-module.png b/docs/devon4ts/1.0/_images/images/generate-module.png new file mode 100644 index 00000000..a0c9ea0a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/generate-module.png differ diff --git a/docs/devon4ts/1.0/_images/images/getting_started.png b/docs/devon4ts/1.0/_images/images/getting_started.png new file mode 100644 index 00000000..8f3340bf Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/getting_started.png differ diff --git a/docs/devon4ts/1.0/_images/images/github.png b/docs/devon4ts/1.0/_images/images/github.png new file mode 100644 index 00000000..602e9527 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/github.png differ diff --git a/docs/devon4ts/1.0/_images/images/help_icon.png b/docs/devon4ts/1.0/_images/images/help_icon.png new file mode 100644 index 00000000..a8c7a71e Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/help_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/high-speed.png b/docs/devon4ts/1.0/_images/images/high-speed.png new file mode 100644 index 00000000..2799a180 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/high-speed.png differ diff --git a/docs/devon4ts/1.0/_images/images/ico_flexibility.svg b/docs/devon4ts/1.0/_images/images/ico_flexibility.svg new file mode 100644 index 00000000..a4c54c4d --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/ico_flexibility.svg @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/ico_highSpeed.svg b/docs/devon4ts/1.0/_images/images/ico_highSpeed.svg new file mode 100644 index 00000000..c944092d --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/ico_highSpeed.svg @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/ico_innovation.svg b/docs/devon4ts/1.0/_images/images/ico_innovation.svg new file mode 100644 index 00000000..9f2dfda7 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/ico_innovation.svg @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/ico_quality.svg b/docs/devon4ts/1.0/_images/images/ico_quality.svg new file mode 100644 index 00000000..5e2a8375 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/ico_quality.svg @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/ide.png b/docs/devon4ts/1.0/_images/images/ide.png new file mode 100644 index 00000000..ec3fed60 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ide.png differ diff --git a/docs/devon4ts/1.0/_images/images/img.png b/docs/devon4ts/1.0/_images/images/img.png new file mode 100644 index 00000000..4b0bfaa8 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/img.png differ diff --git a/docs/devon4ts/1.0/_images/images/img_1.png b/docs/devon4ts/1.0/_images/images/img_1.png new file mode 100644 index 00000000..583bc83d Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/img_1.png differ diff --git a/docs/devon4ts/1.0/_images/images/innovation.png b/docs/devon4ts/1.0/_images/images/innovation.png new file mode 100644 index 00000000..a2fc9a6a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/innovation.png differ diff --git a/docs/devon4ts/1.0/_images/images/insert-data.PNG b/docs/devon4ts/1.0/_images/images/insert-data.PNG new file mode 100644 index 00000000..0b0dfd2a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/insert-data.PNG differ diff --git a/docs/devon4ts/1.0/_images/images/install-cli-success.png b/docs/devon4ts/1.0/_images/images/install-cli-success.png new file mode 100644 index 00000000..b14462cd Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/install-cli-success.png differ diff --git a/docs/devon4ts/1.0/_images/images/integration_icon.png b/docs/devon4ts/1.0/_images/images/integration_icon.png new file mode 100644 index 00000000..2faf2830 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/integration_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-getting-started/ionic-blank-project.PNG b/docs/devon4ts/1.0/_images/images/ionic-getting-started/ionic-blank-project.PNG new file mode 100644 index 00000000..94b9772e Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-getting-started/ionic-blank-project.PNG differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-getting-started/ionic-start-list.png b/docs/devon4ts/1.0/_images/images/ionic-getting-started/ionic-start-list.png new file mode 100644 index 00000000..aca2d6d1 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-getting-started/ionic-start-list.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-getting-started/update-ionic-cli.PNG b/docs/devon4ts/1.0/_images/images/ionic-getting-started/update-ionic-cli.PNG new file mode 100644 index 00000000..b28e83d5 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-getting-started/update-ionic-cli.PNG differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-build-apk.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-build-apk.png new file mode 100644 index 00000000..f321c1df Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-build-apk.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-build-run.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-build-run.png new file mode 100644 index 00000000..aaad2c91 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-build-run.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-make-app.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-make-app.png new file mode 100644 index 00000000..52fa226f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-make-app.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-make.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-make.png new file mode 100644 index 00000000..384e9079 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/and-vsc-make.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/config-device.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/config-device.png new file mode 100644 index 00000000..d68d982b Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/config-device.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/create-new-device.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/create-new-device.png new file mode 100644 index 00000000..7c8a5a7c Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/create-new-device.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/download-so.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/download-so.png new file mode 100644 index 00000000..0048db46 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/download-so.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/enable-developer-options1_2_3.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/enable-developer-options1_2_3.png new file mode 100644 index 00000000..d17e22b9 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/enable-developer-options1_2_3.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/enable-developer-options4_5_6.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/enable-developer-options4_5_6.png new file mode 100644 index 00000000..529fdf07 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/enable-developer-options4_5_6.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/environments.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/environments.png new file mode 100644 index 00000000..47d7d367 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/environments.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/ipconfig-short.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/ipconfig-short.png new file mode 100644 index 00000000..c2a77d81 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/ipconfig-short.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/locate-apk.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/locate-apk.png new file mode 100644 index 00000000..e27bda40 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/locate-apk.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/new-backend-url.PNG b/docs/devon4ts/1.0/_images/images/ionic-to-android/new-backend-url.PNG new file mode 100644 index 00000000..7f92fa2f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/new-backend-url.PNG differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/new-phone-created.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/new-phone-created.png new file mode 100644 index 00000000..f395296a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/new-phone-created.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/new-phone-nexus.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/new-phone-nexus.png new file mode 100644 index 00000000..7a166ba0 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/new-phone-nexus.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic-to-android/real-device.png b/docs/devon4ts/1.0/_images/images/ionic-to-android/real-device.png new file mode 100644 index 00000000..524038f2 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic-to-android/real-device.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/base.png b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/base.png new file mode 100644 index 00000000..159aa873 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/base.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/http-server.png b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/http-server.png new file mode 100644 index 00000000..dc5084f3 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/http-server.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/ionic-serve.png b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/ionic-serve.png new file mode 100644 index 00000000..cad3c335 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/ionic-serve.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/lighthouse.png b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/lighthouse.png new file mode 100644 index 00000000..f24e8806 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/lighthouse.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/online-offline.png b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/online-offline.png new file mode 100644 index 00000000..2c44171d Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/online-offline.png differ diff --git a/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/pwa-nopwa-app.png b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/pwa-nopwa-app.png new file mode 100644 index 00000000..7dd4b467 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ionic/ionic-pwa/pwa-nopwa-app.png differ diff --git a/docs/devon4ts/1.0/_images/images/iwan.jpg b/docs/devon4ts/1.0/_images/images/iwan.jpg new file mode 100644 index 00000000..5c4d2af3 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/iwan.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/january.png b/docs/devon4ts/1.0/_images/images/january.png new file mode 100644 index 00000000..ccc123cb Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/january.png differ diff --git a/docs/devon4ts/1.0/_images/images/java_icon.png b/docs/devon4ts/1.0/_images/images/java_icon.png new file mode 100644 index 00000000..b99f7003 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/java_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/javascript_icon.png b/docs/devon4ts/1.0/_images/images/javascript_icon.png new file mode 100644 index 00000000..e5aecbfc Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/javascript_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/june.png b/docs/devon4ts/1.0/_images/images/june.png new file mode 100644 index 00000000..04247755 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/june.png differ diff --git a/docs/devon4ts/1.0/_images/images/layout-angular-material/1-finished-application.png b/docs/devon4ts/1.0/_images/images/layout-angular-material/1-finished-application.png new file mode 100644 index 00000000..359cb08b Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/layout-angular-material/1-finished-application.png differ diff --git a/docs/devon4ts/1.0/_images/images/layout-angular-material/2-blank-application.png b/docs/devon4ts/1.0/_images/images/layout-angular-material/2-blank-application.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/layout-angular-material/2-blank-application.png differ diff --git a/docs/devon4ts/1.0/_images/images/layout-angular-material/3-material-added.png b/docs/devon4ts/1.0/_images/images/layout-angular-material/3-material-added.png new file mode 100644 index 00000000..c33d83bd Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/layout-angular-material/3-material-added.png differ diff --git a/docs/devon4ts/1.0/_images/images/layout-angular-material/4-header.png b/docs/devon4ts/1.0/_images/images/layout-angular-material/4-header.png new file mode 100644 index 00000000..8f336afb Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/layout-angular-material/4-header.png differ diff --git a/docs/devon4ts/1.0/_images/images/layout-angular-material/5-header-layout-final.png b/docs/devon4ts/1.0/_images/images/layout-angular-material/5-header-layout-final.png new file mode 100644 index 00000000..1d7fb776 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/layout-angular-material/5-header-layout-final.png differ diff --git a/docs/devon4ts/1.0/_images/images/layout-angular-material/6-home-page.png b/docs/devon4ts/1.0/_images/images/layout-angular-material/6-home-page.png new file mode 100644 index 00000000..8eea07fa Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/layout-angular-material/6-home-page.png differ diff --git a/docs/devon4ts/1.0/_images/images/layout-angular-material/7-data-page.png b/docs/devon4ts/1.0/_images/images/layout-angular-material/7-data-page.png new file mode 100644 index 00000000..e4fadfa5 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/layout-angular-material/7-data-page.png differ diff --git a/docs/devon4ts/1.0/_images/images/layout-angular-material/8-sidenav-started.png b/docs/devon4ts/1.0/_images/images/layout-angular-material/8-sidenav-started.png new file mode 100644 index 00000000..d7b06579 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/layout-angular-material/8-sidenav-started.png differ diff --git a/docs/devon4ts/1.0/_images/images/layout-angular-material/9-finished.png b/docs/devon4ts/1.0/_images/images/layout-angular-material/9-finished.png new file mode 100644 index 00000000..beb49f9f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/layout-angular-material/9-finished.png differ diff --git a/docs/devon4ts/1.0/_images/images/linkedin.png b/docs/devon4ts/1.0/_images/images/linkedin.png new file mode 100644 index 00000000..0d863462 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/linkedin.png differ diff --git a/docs/devon4ts/1.0/_images/images/logo_capgemini_white.png b/docs/devon4ts/1.0/_images/images/logo_capgemini_white.png new file mode 100644 index 00000000..7e6c447f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/logo_capgemini_white.png differ diff --git a/docs/devon4ts/1.0/_images/images/menu.svg b/docs/devon4ts/1.0/_images/images/menu.svg new file mode 100644 index 00000000..e22f434d --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/menu.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/mesteve.jpg b/docs/devon4ts/1.0/_images/images/mesteve.jpg new file mode 100644 index 00000000..f8a96dd1 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/mesteve.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/mkurz.jpg b/docs/devon4ts/1.0/_images/images/mkurz.jpg new file mode 100644 index 00000000..3571aebe Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/mkurz.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/mobile_icon.png b/docs/devon4ts/1.0/_images/images/mobile_icon.png new file mode 100644 index 00000000..02d47454 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/mobile_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/module-declaration.svg b/docs/devon4ts/1.0/_images/images/module-declaration.svg new file mode 100644 index 00000000..41cd8c07 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/module-declaration.svg @@ -0,0 +1,448 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + AppRoutingModule + + + + + + + + AppModule + + + + + + + + + + + + RouterModule + + + + + + + + forRoot + + + + + + + () + + + + + + + + FeatureModule + + + + + + + + + forChild + + + + + + + () + + + + + + + + FeatureRoutingModule + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/net_icon.png b/docs/devon4ts/1.0/_images/images/net_icon.png new file mode 100644 index 00000000..6e65ecd0 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/net_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/new-app1.jpg b/docs/devon4ts/1.0/_images/images/new-app1.jpg new file mode 100644 index 00000000..40c150a0 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/new-app1.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/new-app2.jpg b/docs/devon4ts/1.0/_images/images/new-app2.jpg new file mode 100644 index 00000000..67c86f2c Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/new-app2.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/new-app3.jpg b/docs/devon4ts/1.0/_images/images/new-app3.jpg new file mode 100644 index 00000000..7b3f6021 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/new-app3.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/new-app4.jpg b/docs/devon4ts/1.0/_images/images/new-app4.jpg new file mode 100644 index 00000000..27eccdef Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/new-app4.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/new-app5.jpg b/docs/devon4ts/1.0/_images/images/new-app5.jpg new file mode 100644 index 00000000..fe34d164 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/new-app5.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/next.scg.svg b/docs/devon4ts/1.0/_images/images/next.scg.svg new file mode 100644 index 00000000..aec8cd2a --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/next.scg.svg @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/ngrx-concept.svg b/docs/devon4ts/1.0/_images/images/ngrx-concept.svg new file mode 100644 index 00000000..adb2887d --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/ngrx-concept.svg @@ -0,0 +1,403 @@ + + + + + + image/svg+xml + + + + + + + + + + + + + Store + + + Component + + + Effect + + + + + + Reducer + + + Services + + dispatches + [Action] + + + + + + + + + + + selects + state + slice + + emits + updates + + Invoke + side + effect + diff --git a/docs/devon4ts/1.0/_images/images/ngrx-devtools.png b/docs/devon4ts/1.0/_images/images/ngrx-devtools.png new file mode 100644 index 00000000..965e1b27 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/ngrx-devtools.png differ diff --git a/docs/devon4ts/1.0/_images/images/nx-cli/create-nx-workspace.png b/docs/devon4ts/1.0/_images/images/nx-cli/create-nx-workspace.png new file mode 100644 index 00000000..cb9044dd Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/nx-cli/create-nx-workspace.png differ diff --git a/docs/devon4ts/1.0/_images/images/nx-cli/nx-workspace-in-vscode.png b/docs/devon4ts/1.0/_images/images/nx-cli/nx-workspace-in-vscode.png new file mode 100644 index 00000000..f42be339 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/nx-cli/nx-workspace-in-vscode.png differ diff --git a/docs/devon4ts/1.0/_images/images/oasp-logo.png b/docs/devon4ts/1.0/_images/images/oasp-logo.png new file mode 100644 index 00000000..5b20ebf4 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/oasp-logo.png differ diff --git a/docs/devon4ts/1.0/_images/images/on_the_flexible_solution.png b/docs/devon4ts/1.0/_images/images/on_the_flexible_solution.png new file mode 100644 index 00000000..e1a29757 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/on_the_flexible_solution.png differ diff --git a/docs/devon4ts/1.0/_images/images/plantuml/components.png b/docs/devon4ts/1.0/_images/images/plantuml/components.png new file mode 100644 index 00000000..ec0207dd Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/plantuml/components.png differ diff --git a/docs/devon4ts/1.0/_images/images/plantuml/dependency-injection1.png b/docs/devon4ts/1.0/_images/images/plantuml/dependency-injection1.png new file mode 100644 index 00000000..e909d946 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/plantuml/dependency-injection1.png differ diff --git a/docs/devon4ts/1.0/_images/images/plantuml/dependency-injection2.png b/docs/devon4ts/1.0/_images/images/plantuml/dependency-injection2.png new file mode 100644 index 00000000..e79d2401 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/plantuml/dependency-injection2.png differ diff --git a/docs/devon4ts/1.0/_images/images/plantuml/layers.png b/docs/devon4ts/1.0/_images/images/plantuml/layers.png new file mode 100644 index 00000000..d464104f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/plantuml/layers.png differ diff --git a/docs/devon4ts/1.0/_images/images/plantuml/module2.png b/docs/devon4ts/1.0/_images/images/plantuml/module2.png new file mode 100644 index 00000000..bc1f31bc Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/plantuml/module2.png differ diff --git a/docs/devon4ts/1.0/_images/images/plantuml/modules.png b/docs/devon4ts/1.0/_images/images/plantuml/modules.png new file mode 100644 index 00000000..ffb3653f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/plantuml/modules.png differ diff --git a/docs/devon4ts/1.0/_images/images/prev.svg b/docs/devon4ts/1.0/_images/images/prev.svg new file mode 100644 index 00000000..cb0d1d41 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/prev.svg @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/project-structure.png b/docs/devon4ts/1.0/_images/images/project-structure.png new file mode 100644 index 00000000..75f2c617 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/project-structure.png differ diff --git a/docs/devon4ts/1.0/_images/images/quality.png b/docs/devon4ts/1.0/_images/images/quality.png new file mode 100644 index 00000000..7a6424a2 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/quality.png differ diff --git a/docs/devon4ts/1.0/_images/images/query_logo.png b/docs/devon4ts/1.0/_images/images/query_logo.png new file mode 100644 index 00000000..a1391f04 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/query_logo.png differ diff --git a/docs/devon4ts/1.0/_images/images/read_icon.png b/docs/devon4ts/1.0/_images/images/read_icon.png new file mode 100644 index 00000000..731650ab Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/read_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/rest-adapter.svg b/docs/devon4ts/1.0/_images/images/rest-adapter.svg new file mode 100644 index 00000000..bb2a7c24 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/rest-adapter.svg @@ -0,0 +1,366 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + client + + + + + + + + Use Case Service + + + + + + + + Adapter + + + + + + + + + HttpClient + + + + + + + + + + server + + + + + + + + + HTTP + + + + + + + Endpoint + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/sample/employees.png b/docs/devon4ts/1.0/_images/images/sample/employees.png new file mode 100644 index 00000000..434ea28c Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/sample/employees.png differ diff --git a/docs/devon4ts/1.0/_images/images/sample/generate-migrations.png b/docs/devon4ts/1.0/_images/images/sample/generate-migrations.png new file mode 100644 index 00000000..a0414e9b Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/sample/generate-migrations.png differ diff --git a/docs/devon4ts/1.0/_images/images/sample/insert-data.png b/docs/devon4ts/1.0/_images/images/sample/insert-data.png new file mode 100644 index 00000000..0b0dfd2a Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/sample/insert-data.png differ diff --git a/docs/devon4ts/1.0/_images/images/sample/new-app.png b/docs/devon4ts/1.0/_images/images/sample/new-app.png new file mode 100644 index 00000000..f2c3638c Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/sample/new-app.png differ diff --git a/docs/devon4ts/1.0/_images/images/sample/start-app.png b/docs/devon4ts/1.0/_images/images/sample/start-app.png new file mode 100644 index 00000000..e44baee0 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/sample/start-app.png differ diff --git a/docs/devon4ts/1.0/_images/images/sample/swagger.png b/docs/devon4ts/1.0/_images/images/sample/swagger.png new file mode 100644 index 00000000..8d7aa48d Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/sample/swagger.png differ diff --git a/docs/devon4ts/1.0/_images/images/sample/test.png b/docs/devon4ts/1.0/_images/images/sample/test.png new file mode 100644 index 00000000..ba775b27 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/sample/test.png differ diff --git a/docs/devon4ts/1.0/_images/images/slider1.jpg b/docs/devon4ts/1.0/_images/images/slider1.jpg new file mode 100644 index 00000000..49d1c706 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/slider1.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/slider2.jpg b/docs/devon4ts/1.0/_images/images/slider2.jpg new file mode 100644 index 00000000..f34ef1fe Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/slider2.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/slider3.jpg b/docs/devon4ts/1.0/_images/images/slider3.jpg new file mode 100644 index 00000000..cabfc561 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/slider3.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/slideshare.png b/docs/devon4ts/1.0/_images/images/slideshare.png new file mode 100644 index 00000000..069568fa Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/slideshare.png differ diff --git a/docs/devon4ts/1.0/_images/images/smart-component-interaction-via-services-layer.svg b/docs/devon4ts/1.0/_images/images/smart-component-interaction-via-services-layer.svg new file mode 100644 index 00000000..636e0028 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/smart-component-interaction-via-services-layer.svg @@ -0,0 +1,724 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Store + + + + + + + + state$: Observable<State> + + + + + + + + + + + + + + + changeState + + + + + + + ( + + + + + + + args + + + + + + + ): void + + + + + + + + Smart + + + + + + + Component A + + + + + + + + Smart + + + + + + + Component B + + + + + + + + Smart + + + + + + + Component + + + + + + + C + + + + + + + + + + + + + + + + + action() + + + + + + + subscribe() + + + + + + + + UseCaseService + + + + + + + + action(): void + + + + + + + + + subscribe() + + + + + + + + + + Services Layer + + + + + + + Components Layer + + + + + diff --git a/docs/devon4ts/1.0/_images/images/smart-dumb-components-interaction.svg b/docs/devon4ts/1.0/_images/images/smart-dumb-components-interaction.svg new file mode 100644 index 00000000..15706ef0 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/smart-dumb-components-interaction.svg @@ -0,0 +1,501 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart + + + + + + + Component + + + + + + + + Dumb + + + + + + + Component A + + + + + + + + Dumb + + + + + + + Component B + + + + + + + + Dumb + + + + + + + Component C + + + + + + + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + + Event Binding + + + + + + + + + + + + + Event Binding + + + + + diff --git a/docs/devon4ts/1.0/_images/images/smart-dumb-components.svg b/docs/devon4ts/1.0/_images/images/smart-dumb-components.svg new file mode 100644 index 00000000..df8809db --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/smart-dumb-components.svg @@ -0,0 +1,887 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart + + + + + + + Component + + + + + + + + Dumb + + + + + + + Component + + + + + + + A + + + + + + + + Dumb + + + + + + + Component + + + + + + + B + + + + + + + + Dumb + + + + + + + Component + + + + + + + C + + + + + + + + + + + + + + Service + + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + + Event Binding + + + + + + + + + + + + + Event Binding + + + + + + + + Store + + + + + + + + subscribe() + + + + + + + action() + + + + + diff --git a/docs/devon4ts/1.0/_images/images/smart-smart-components-example.svg b/docs/devon4ts/1.0/_images/images/smart-smart-components-example.svg new file mode 100644 index 00000000..dacb06d9 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/smart-smart-components-example.svg @@ -0,0 +1,1456 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + FlightSearchStore + + + + + + + + state$: Observable<State> + + + + + + + + + + + + + + + setFlights + + + + + + + ( + + + + + + + Flug + + + + + + + []): void + + + + + + + + + + + + + + + clearFlights + + + + + + + (): void + + + + + + + + + + + + + + + setLoadingFlights + + + + + + + ( + + + + + + + boolean + + + + + + + ): void + + + + + + + + FlightSearchComponent + + + + + + + + FlightDetailsComponent + + + + + + + + + + + + + + + + + subscribe() + + + + + + + + FlightSearchService + + + + + + + + + + + + + + + loadFlights + + + + + + + (): void + + + + + + + + + FlightSearchState + + + + + + + + + + + + + + + isLoadingFlights + + + + + + + : + + + + + + + boolean + + + + + + + + flights: + + + + + + + Flug + + + + + + + [] + + + + + + + + c + + + + + + + riteria: + + + + + + + FlightSearchCriteria + + + + + + + + + + + loadFlights + + + + + + + () + + + + + + + + FlightSearchAdapter + + + + + + + + + + + + + + + getFlights + + + + + + + (): + + + + + + + Observable<Flight[]> + + + + + + + + + HttpClient + + + + + + + + + + + + + + + get + + + + + + + <T> + + + + + + + (): Observable<T> + + + + + + + + subscribe() + + + + + + + + FlightSearchCriteria + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/smart-smart-components.svg b/docs/devon4ts/1.0/_images/images/smart-smart-components.svg new file mode 100644 index 00000000..b4fc8369 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/smart-smart-components.svg @@ -0,0 +1,794 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Store + + + + + + + + state$: Observable<State> + + + + + + + + + + + + + + + changeState + + + + + + + ( + + + + + + + args + + + + + + + ): + + + + + + + void + + + + + + + + Smart + + + + + + + Component + + + + + + + A + + + + + + + + Smart + + + + + + + Component + + + + + + + B + + + + + + + + Smart + + + + + + + Component + + + + + + + C + + + + + + + + + + + + + + + + + action() + + + + + + + subscribe() + + + + + + + + UseCaseService + + + + + + + + + + + + + + + action(): + + + + + + + void + + + + + + + + + subscribe() + + + + + diff --git a/docs/devon4ts/1.0/_images/images/src/ngrx-concept.pptx b/docs/devon4ts/1.0/_images/images/src/ngrx-concept.pptx new file mode 100644 index 00000000..219d5cd5 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/src/ngrx-concept.pptx differ diff --git a/docs/devon4ts/1.0/_images/images/testimonials.png b/docs/devon4ts/1.0/_images/images/testimonials.png new file mode 100644 index 00000000..9835e68f Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/testimonials.png differ diff --git a/docs/devon4ts/1.0/_images/images/testing-areas.svg b/docs/devon4ts/1.0/_images/images/testing-areas.svg new file mode 100644 index 00000000..45b461ed --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/testing-areas.svg @@ -0,0 +1,1161 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Store + + + + + + + + + + + + + Smart + + + + + + + + Dumb + + + + + + + + Dumb + + + + + + + + Dumb + + + + + + + + Dumb + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dumb + + + + + + + + + + + + + Dumb + + + + + + + + + + + + + Use + + + + + + + Case + + + + + + + Service + + + + + + + + + + Adapter + + + + + + + + + Service + + + + + + + + + + + + + + + + + View + + + + + + + l + + + + + + + ogic + + + + + + + in + + + + + + + Smart + + + + + + + Components + + + + + + + (1) + + + + + + + State + + + + + + + t + + + + + + + ransitions + + + + + + + in Stores + + + + + + + (2) + + + + + + + Business + + + + + + + logic + + + + + + + in + + + + + + + S + + + + + + + ervices + + + + + + + (3) + + + + + diff --git a/docs/devon4ts/1.0/_images/images/triggering-navigation.svg b/docs/devon4ts/1.0/_images/images/triggering-navigation.svg new file mode 100644 index 00000000..f54fd123 --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/triggering-navigation.svg @@ -0,0 +1,422 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart Component + + + + + + + + Dumb Component A + + + + + + + + Dumb Component C + + + + + + + + navigationButtonClick + + + + + + + Event + + + + + + + + navigationButtonClick + + + + + + + Event + + + + + + + User clicks button to + + + + + + + trigger navigation + + + + + + + + Router + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/twitter.png b/docs/devon4ts/1.0/_images/images/twitter.png new file mode 100644 index 00000000..846ef2e2 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/twitter.png differ diff --git a/docs/devon4ts/1.0/_images/images/typeorm-schematic.PNG b/docs/devon4ts/1.0/_images/images/typeorm-schematic.PNG new file mode 100644 index 00000000..2a3d09b9 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/typeorm-schematic.PNG differ diff --git a/docs/devon4ts/1.0/_images/images/university.png b/docs/devon4ts/1.0/_images/images/university.png new file mode 100644 index 00000000..e3ebe33c Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/university.png differ diff --git a/docs/devon4ts/1.0/_images/images/use-case-service.svg b/docs/devon4ts/1.0/_images/images/use-case-service.svg new file mode 100644 index 00000000..cfabc02a --- /dev/null +++ b/docs/devon4ts/1.0/_images/images/use-case-service.svg @@ -0,0 +1,319 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + UseCaseService + + + + + + + + action(): void + + + + + + + + Store + + + + + + + + Adapter + + + + + + + + Business + + + + + + + Service + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devon4ts/1.0/_images/images/used-technologies.jpg b/docs/devon4ts/1.0/_images/images/used-technologies.jpg new file mode 100644 index 00000000..f79fe526 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/used-technologies.jpg differ diff --git a/docs/devon4ts/1.0/_images/images/view_icon.png b/docs/devon4ts/1.0/_images/images/view_icon.png new file mode 100644 index 00000000..51257d45 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/view_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/web_icon.png b/docs/devon4ts/1.0/_images/images/web_icon.png new file mode 100644 index 00000000..0afc937e Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/web_icon.png differ diff --git a/docs/devon4ts/1.0/_images/images/youtube.png b/docs/devon4ts/1.0/_images/images/youtube.png new file mode 100644 index 00000000..b5eb06a6 Binary files /dev/null and b/docs/devon4ts/1.0/_images/images/youtube.png differ diff --git a/docs/devon4ts/1.0/angular/architecture.html b/docs/devon4ts/1.0/angular/architecture.html new file mode 100644 index 00000000..1549fe48 --- /dev/null +++ b/docs/devon4ts/1.0/angular/architecture.html @@ -0,0 +1,386 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Architecture

+
+
+

The following principles and guidelines are based on Angular Style Guide - especially Angular modules (see Angular Docs). +It extends those where additional guidance is needed to define an architecture which is:

+
+
+
    +
  • +

    maintainable across applications and teams

    +
  • +
  • +

    easy to understand, especially when coming from a classic Java/.Net perspective - so whenever possible the same principles apply both to the server and the client

    +
  • +
  • +

    pattern based to solve common problems

    +
  • +
  • +

    based on best of breed solutions coming from open source and Capgemini project experiences

    +
  • +
  • +

    gives as much guidance as necessary and as little as possible

    +
  • +
+
+
+
+
+

Overview

+
+
+

When using Angular the web client architecture is driven by the framework in a certain way Google and the Angular community think about web client architecture. +Angular gives an opinion on how to look at architecture. +It is a component based like devon4j but uses different terms which are common language in web application development. +The important term is module which is used instead of component. The primary reason is the naming collision with the Web Components standard (see Web Components).
+To clarify this:

+
+
+
    +
  • +

    A component describes an UI element containing HTML, CSS and JavaScript - structure, design and logic encapsulated inside a reusable container called component.

    +
  • +
  • +

    A module describes an applications feature area. The application flight-app may have a module called booking.

    +
  • +
+
+
+

An application developed using Angular consists of multiple modules. +There are feature modules and special modules described by the Angular Style Guide - core and shared. +Angular or Angular Style Guide give no guidance on how to structure a module internally. +This is where this architecture comes in.

+
+
+
+
+

Layers

+
+
+

The architecture describes two layers. The terminology is based on common language in web development.

+
+
+
+Architecture - Layers +
+
Figure 1. Layers
+
+
+
    +
  • +

    Components Layer encapsulates components which present the current application state. +Components are separated into Smart and Dumb Components. +The only logic present is view logic inside Smart Components.

    +
  • +
  • +

    Services Layer is more or less what we call 'business logic layer' on the server side. +The layer defines the applications state, the transitions between state and classic business logic. +Stores contain application state over time to which Smart Components subscribe to. +Adapters are used to perform XHR, WebSocket connections, etc. +The business model is described inside the module. +Use case services perform business logic needed for use cases. +A use case services interacts with the store and adapters. +Methods of use case services are the API for Smart Components. +Those methods are Actions in reactive terminology.

    +
  • +
+
+
+
+
+

Modules

+
+
+

Angular requires a module called app which is the main entrance to an application at runtime - this module gets bootstrapped. +Angular Style Guide defines feature modules and two special modules - core and shared.

+
+
+
+Architecture - Modules +
+
Figure 2. Modules
+
+
+

A feature module is basically a vertical cut through both layers. +The shared module consists of components shared across feature modules. +The core module holds services shared across modules. +So core module is a module only having a services layer +and shared module is a module only having a components layer.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/components-layer.html b/docs/devon4ts/1.0/angular/components-layer.html new file mode 100644 index 00000000..016616fe --- /dev/null +++ b/docs/devon4ts/1.0/angular/components-layer.html @@ -0,0 +1,470 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Components Layer

+
+
+

The components layer encapsulates all components presenting the current application view state, which means data to be shown to the user. +The term component refers to a component described by the standard Web Components. +So this layer has all Angular components, directives and pipes defined for an application. +The main challenges are:

+
+
+
    +
  • +

    how to structure the components layer (see File Structure Guide)

    +
  • +
  • +

    decompose components into maintainable chunks (see Component Decomposition Guide)

    +
  • +
  • +

    handle component interaction

    +
  • +
  • +

    manage calls to the services layer

    +
  • +
  • +

    apply a maintainable data and event flow throughout the component tree

    +
  • +
+
+
+
+
+

Smart and Dumb Components

+
+
+

The architecture applies the concept of Smart and Dumb Components (syn. Containers and Presenters). +The concept means that components are divided into Smart and Dumb Components.

+
+
+

A Smart Component typically is a top-level dialog inside the component tree.

+
+
+
    +
  • +

    a component, that can be routed to

    +
  • +
  • +

    a modal dialog

    +
  • +
  • +

    a component, which is placed inside AppComponent

    +
  • +
+
+
+

A Dumb Component can be used by one to many Smart Components. +Inside the component tree a Dumb Component is a child of a Smart Component.

+
+
+
+Component Tree +
+
Figure 1. Component tree example
+
+
+

As shown the topmost component is always the AppComponent in Angular applications. +The component tree describes the hierarchy of components starting from AppComponent. +The figure shows Smart Components in blue and Dumb Components in green. +AppComponent is a Smart Component by definition. +Inside the template of AppComponent placed components are static components inside the component tree. +So they are always displayed. +In the example OverviewComponent and DetailsComponent are rendered by Angular compiler depending on current URL the application displays. +So OverviewComponents sub-tree is displayed if the URL is /overview and DetailsComponents sub-tree is displayed if the URL is /details. +To clarify this distinction further the following table shows the main differences.

+
+
+
Smart vs Dumb Components
+

|== = +|Smart Components |Dumb Components

+
+
+

|contain the current view state +|show data via binding (@Input) and contain no view state

+
+
+

|handle events emitted by Dumb Components +|pass events up the component tree to be handled by Smart Components (@Output)

+
+
+

|call the services layer +|never call the services layer

+
+
+

|use services +|do not use services

+
+
+

|consists of n Dumb Components +|is independent of Smart Components +|== =

+
+
+
+
+

Interaction of Smart and Dumb Components

+
+
+

With the usage of the Smart and Dumb Components pattern one of the most important part is component interaction. +Angular comes with built in support for component interaction with @Input() and @Output() Decorators. +The following figure illustrates an unidirectional data flow.

+
+
+
    +
  • +

    Data always goes down the component tree - from a Smart Component down its children.

    +
  • +
  • +

    Events bubble up, to be handled by a Smart Component.

    +
  • +
+
+
+
+Smart and Dumb Components Interaction +
+
Figure 2. Smart and Dumb Component Interaction
+
+
+

As shown a Dumb Components role is to define a signature by declaring Input and Output Bindings.

+
+
+
    +
  • +

    @Input() defines what data is necessary for that component to work

    +
  • +
  • +

    @Output() defines which events can be listened on by the parent component

    +
  • +
+
+
+
Listing 1. Dumb Components define a signature
+
+
export class ValuePickerComponent {
+
+  @Input() columns: string[];
+  @Input() items: {}[];
+  @Input() selected: {};
+  @Input() filter: string;
+  @Input() isChunked = false;
+  @Input() showInput = true;
+  @Input() showDropdownHeader = true;
+
+  @Output() elementSelected = new EventEmitter<{}>();
+  @Output() filterChanged = new EventEmitter<string>();
+  @Output() loadNextChunk = new EventEmitter();
+  @Output() escapeKeyPressed = new EventEmitter();
+
+}
+
+
+
+

The example shows the Dumb Component ValuePickerComponent. +It describes seven input bindings with isChunked, showHeader and showDropdownHeader being non mandatory as they have a default value. +Four output bindings are present. Typically, a Dumb Component has very little code to no code inside the TypeScript class.

+
+
+
Listing 2. Smart Components use the Dumb Components signature inside the template
+
+
<div>
+
+  <value-input
+    ...>
+  </value-input>
+
+  <value-picker
+    *ngIf="isValuePickerOpen"
+    [columns]="columns"
+    [items]="filteredItems"
+    [isChunked]="isChunked"
+    [filter]="filter"
+    [selected]="selectedItem"
+    [showDropdownHeader]="showDropdownHeader"
+    (loadNextChunk)="onLoadNextChunk()"
+    (elementSelected)="onElementSelected($event)"
+    (filterChanged)="onFilterChanged($event)"
+    (escapeKeyPressed)="onEscapePressedInsideChildTable()">
+  </value-picker>
+
+</div>
+
+
+
+

Inside the Smart Components template the events emitted by Dumb Components are handled. +It is a good practice to name the handlers with the prefix on* (e.g. onInputChanged()).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/cookbook-abstract-class-store.html b/docs/devon4ts/1.0/angular/cookbook-abstract-class-store.html new file mode 100644 index 00000000..6a19ebf2 --- /dev/null +++ b/docs/devon4ts/1.0/angular/cookbook-abstract-class-store.html @@ -0,0 +1,402 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Abstract Class Store

+
+
+

The following solution presents a base class for implementing stores which handle state and its transitions. +Working with the base class achieves:

+
+
+
    +
  • +

    common API across all stores

    +
  • +
  • +

    logging (when activated in the constructor)

    +
  • +
  • +

    state transitions are asynchronous by design - sequential order problems are avoided

    +
  • +
+
+
+
Listing 1. Usage Example
+
+
@Injectable()
+export class ModalStore extends Store<ModalState> {
+
+  constructor() {
+    super({ isOpen: false }, !environment.production);
+  }
+
+  closeDialog() {
+    this.dispatchAction('Close Dialog', (currentState) => ({...currentState, isOpen: false}));
+  }
+
+  openDialog() {
+    this.dispatchAction('Open Dialog', (currentState) => ({...currentState, isOpen: true}));
+  }
+
+}
+
+
+
+
Listing 2. Abstract Base Class Store
+
+
import { OnDestroy } from '@angular/core';
+import { BehaviorSubject } from 'rxjs/BehaviorSubject';
+import { Observable } from 'rxjs/Observable';
+import { intersection, difference } from 'lodash';
+import { map, distinctUntilChanged, observeOn } from 'rxjs/operators';
+import { Subject } from 'rxjs/Subject';
+import { queue } from 'rxjs/scheduler/queue';
+import { Subscription } from 'rxjs/Subscription';
+
+interface Action<T> {
+  name: string;
+  actionFn: (state: T) => T;
+}
+
+/** Base class for implementing stores. */
+export abstract class Store<T> implements OnDestroy {
+
+  private actionSubscription: Subscription;
+  private actionSource: Subject<Action<T>>;
+  private stateSource: BehaviorSubject<T>;
+  state$: Observable<T>;
+
+  /**
+   * Initializes a store with initial state and logging.
+   * @param initialState Initial state
+   * @param logChanges When true state transitions are logged to the console.
+   */
+  constructor(initialState: T, public logChanges = false) {
+    this.stateSource = new BehaviorSubject<T>(initialState);
+    this.state$ = this.stateSource.asObservable();
+    this.actionSource = new Subject<Action<T>>();
+
+    this.actionSubscription = this.actionSource.pipe(observeOn(queue)).subscribe(action => {
+      const currentState = this.stateSource.getValue();
+      const nextState = action.actionFn(currentState);
+
+      if (this.logChanges) {
+        this.log(action.name, currentState, nextState);
+      }
+
+      this.stateSource.next(nextState);
+    });
+  }
+
+  /**
+   * Selects a property from the stores state.
+   * Will do distinctUntilChanged() and map() with the given selector.
+   * @param selector Selector function which selects the needed property from the state.
+   * @returns Observable of return type from selector function.
+   */
+  select<TX>(selector: (state: T) => TX): Observable<TX> {
+    return this.state$.pipe(
+      map(selector),
+      distinctUntilChanged()
+    );
+  }
+
+  protected dispatchAction(name: string, action: (state: T) => T) {
+    this.actionSource.next({ name, actionFn: action });
+  }
+
+  private log(actionName: string, before: T, after: T) {
+    const result: { [key: string]: { from: any, to: any} } = {};
+    const sameProbs = intersection(Object.keys(after), Object.keys(before));
+    const newProbs = difference(Object.keys(after), Object.keys(before));
+    for (const prop of newProbs) {
+      result[prop] = { from: undefined, to: (<any>after)[prop] };
+    }
+
+    for (const prop of sameProbs) {
+      if ((<any>before)[prop] !==  (<any>after)[prop]) {
+        result[prop] = { from: (<any>before)[prop], to: (<any>after)[prop] };
+      }
+    }
+
+    console.log(this.constructor.name, actionName, result);
+  }
+
+  ngOnDestroy() {
+    this.actionSubscription.unsubscribe();
+  }
+
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-accessibility.html b/docs/devon4ts/1.0/angular/guide-accessibility.html new file mode 100644 index 00000000..08dc3997 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-accessibility.html @@ -0,0 +1,660 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Accessibility

+
+
+

Multiple studies suggest that around 15-20% of the population are living with a disability of some kind. In comparison, that number is higher than any single browser demographic currently, other than Chrome2. Not considering those users when developing an application means excluding a large number of people from being able to use it comfortable or at all.

+
+
+

Some people are unable to use the mouse, view a screen, see low contrast text, Hear dialogue or music and some people having difficulty to understanding the complex language.This kind of people needed the support like Keyboard support, screen reader support, high contrast text, captions and transcripts and Plain language support. This disability may change the from permanent to the situation.

+
+
+
+
+

Key Concerns of Accessible Web Applications

+
+
+
    +
  • +

    Semantic Markup - Allows the application to be understood on a more general level rather than just details of whats being rendered

    +
  • +
  • +

    Keyboard Accessibility - Applications must still be usable when using only a keyboard

    +
  • +
  • +

    Visual Assistance - color contrast, focus of elements and text representations of audio and events

    +
  • +
+
+
+
+
+

Semantic Markup

+
+
+

If you’re creating custom element directives, Web Components or HTML in general, use native elements wherever possible to utilize built-in events and properties. Alternatively, use ARIA to communicate semantic meaning.

+
+
+

HTML tags have attributes that providers extra context on what’s being displayed on the browser. For example, the <img> tag’s alt attribute lets the reader know what is being shown using a short description.However, native tags don’t cover all cases. This is where ARIA fits in. ARIA attributes can provide context on what roles specific elements have in the application or on how elements within the document relate to each other.

+
+
+

A modal component can be given the role of dialog or alertdialog to let the browser know that that component is acting as a modal. The modal component template can use the ARIA attributes aria-labelledby and aria-described to describe to readers what the title and purpose of the modal is.

+
+
+
+
@Component({
+    selector: 'ngc2-app',
+    template: `
+      <ngc2-notification-button
+        message="Hello!"
+        label="Greeting"
+        role="button">
+      </ngc2-notification-button>
+      <ngc2-modal
+        [title]="modal.title"
+        [description]="modal.description"
+        [visible]="modal.visible"
+        (close)="modal.close()">
+      </ngc2-modal>
+    `
+})
+export class AppComponent {
+  constructor(private modal: ModalService) { }
+}
+
+
+
+

notification-button.component.ts

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `
+})
+export class ModalComponent {
+  ...
+}
+
+
+
+
+
+

Keyboard Accessibility

+
+
+

Keyboard accessibility is the ability of your application to be interacted with using just a keyboard. The more streamlined the site can be used this way, the more keyboard accessible it is. Keyboard accessibility is one of the largest aspects of web accessibility since it targets:

+
+
+
    +
  • +

    those with motor disabilities who can’t use a mouse

    +
  • +
  • +

    users who rely on screen readers and other assistive technology, which require keyboard navigation

    +
  • +
  • +

    those who prefer not to use a mouse

    +
  • +
+
+
+
+
+

== Focus

+
+
+

Keyboard interaction is driven by something called focus. In web applications, only one element on a document has focus at a time, and keypress will activate whatever function is bound to that element. +Focus element border can be styled with CSS using the outline property, but it should not be removed. Elements can also be styled using the :focus psuedo-selector.

+
+
+
+
+

== Tabbing

+
+
+

The most common way of moving focus along the page is through the tab key. Elements will be traversed in the order they appear in the document outline - so that order must be carefully considered during development. +There is way change the default behavior or tab order. This can be done through the tabindex attribute. The tabindex can be given the values: +* less than zero - to let readers know that an element should be focusable but not keyboard accessible +* 0 - to let readers know that that element should be accessible by keyboard +* greater than zero - to let readers know the order in which the focusable element should be reached using the keyboard. Order is calculated from lowest to highest.

+
+
+
+
+

== Transitions

+
+
+

The majority of transitions that happen in an Angular application will not involve a page reload. This means that developers will need to carefully manage what happens to focus in these cases.

+
+
+

For example:

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `,
+})
+export class ModalComponent {
+  constructor(private modal: ModalService, private element: ElementRef) { }
+
+  ngOnInit() {
+    this.modal.visible$.subscribe(visible => {
+      if(visible) {
+        setTimeout(() => {
+          this.element.nativeElement.querySelector('button').focus();
+        }, 0);
+      }
+    })
+  }
+}
+
+
+
+
+
+

Visual Assistance

+
+
+

One large category of disability is visual impairment. This includes not just the blind, but those who are color blind or partially sighted, and require some additional consideration.

+
+
+
+
+

Color Contrast

+
+
+

When choosing colors for text or elements on a website, the contrast between them needs to be considered. For WCAG 2.0 AA, this means that the contrast ratio for text or visual representations of text needs to be at least 4.5:1. There are tools online to measure the contrast ratio such as this color contrast checker from WebAIM or be checked with using automation tests.

+
+
+
+
+

Visual Information

+
+
+

Color can help a user’s understanding of information, but it should never be the only way to convey information to a user. For example, a user with red/green color-blindness may have trouble discerning at a glance if an alert is informing them of success or failure.

+
+
+
+
+

Audiovisual Media

+
+
+

Audiovisual elements in the application such as video, sound effects or audio (that is, podcasts) need related textual representations such as transcripts, captions or descriptions. They also should never auto-play and playback controls should be provided to the user.

+
+
+
+
+

Accessibility with Angular Material

+
+
+

The a11y package provides a number of tools to improve accessibility. Import

+
+
+
+
import { A11yModule } from '@angular/cdk/a11y';
+
+
+
+
+
+

ListKeyManager

+
+
+

ListKeyManager manages the active option in a list of items based on keyboard interaction. Intended to be used with components that correspond to a role="menu" or role="listbox" pattern . Any component that uses a ListKeyManager will generally do three things:

+
+
+
    +
  • +

    Create a @ViewChildren query for the options being managed.

    +
  • +
  • +

    Initialize the ListKeyManager, passing in the options.

    +
  • +
  • +

    Forward keyboard events from the managed component to the ListKeyManager.

    +
  • +
+
+
+

Each option should implement the ListKeyManagerOption interface:

+
+
+
+
interface ListKeyManagerOption {
+  disabled?: boolean;
+  getLabel?(): string;
+}
+
+
+
+
+
+

== Types of ListKeyManager

+
+
+

There are two varieties of ListKeyManager, FocusKeyManager and ActiveDescendantKeyManager.

+
+
+
+
+

FocusKeyManager

+
+
+

Used when options will directly receive browser focus. Each item managed must implement the FocusableOption interface:

+
+
+
+
interface FocusableOption extends ListKeyManagerOption {
+  focus(): void;
+}
+
+
+
+
+
+

ActiveDescendantKeyManager

+
+
+

Used when options will be marked as active via aria-activedescendant. Each item managed must implement the Highlightable interface:

+
+
+
+
interface Highlightable extends ListKeyManagerOption {
+  setActiveStyles(): void;
+  setInactiveStyles(): void;
+}
+
+
+
+

Each item must also have an ID bound to the listbox’s or menu’s aria-activedescendant.

+
+
+
+
+

FocusTrap

+
+
+

The cdkTrapFocus directive traps Tab key focus within an element. This is intended to be used to create accessible experience for components like modal dialogs, where focus must be constrained. This directive is declared in A11yModule.

+
+
+

This directive will not prevent focus from moving out of the trapped region due to mouse interaction.

+
+
+

For example:

+
+
+
+
<div class="my-inner-dialog-content" cdkTrapFocus>
+  <!-- Tab and Shift + Tab will not leave this element. -->
+</div>
+
+
+
+
+
+

Regions

+
+
+

Regions can be declared explicitly with an initial focus element by using the cdkFocusRegionStart, cdkFocusRegionEnd and cdkFocusInitial DOM attributes. When using the tab key, focus will move through this region and wrap around on either end.

+
+
+

For example:

+
+
+
+
<a mat-list-item routerLink cdkFocusRegionStart>Focus region start</a>
+<a mat-list-item routerLink>Link</a>
+<a mat-list-item routerLink cdkFocusInitial>Initially focused</a>
+<a mat-list-item routerLink cdkFocusRegionEnd>Focus region end</a>
+
+
+
+
+
+

InteractivityChecker

+
+
+

InteractivityChecker is used to check the interactivity of an element, capturing disabled, visible, tabbable, and focusable states for accessibility purposes.

+
+
+
+
+

LiveAnnouncer

+
+
+

LiveAnnouncer is used to announce messages for screen-reader users using an aria-live region.

+
+
+

For example:

+
+
+
+
@Component({...})
+export class MyComponent {
+
+ constructor(liveAnnouncer: LiveAnnouncer) {
+   liveAnnouncer.announce("Hey Google");
+ }
+}
+
+
+
+
+
+

API reference for Angular CDK a11y

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-add-electron.html b/docs/devon4ts/1.0/angular/guide-add-electron.html new file mode 100644 index 00000000..bcaf7bb5 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-add-electron.html @@ -0,0 +1,848 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Add Electron to an Angular application using Angular CLI

+
+
+

This cookbook recipe explains how to integrate Electron in an Angular 10+ application. Electron is a framework for creating native applications with web technologies like JavaScript, HTML, and CSS. As an example, very well known applications as Visual Studio Code, Atom, Slack or Skype (and many more) are using Electron too.

+
+
+ + + + + +
+ + +At the moment of this writing Angular 11.2.0, Electron 11.2.3 and Electron-builder 22.9.1 were the versions available. +
+
+
+

Here are the steps to achieve this goal. Follow them in order.

+
+
+
+
+

Add Electron and other relevant dependencies

+
+
+

There are two different approaches to add the dependencies in the package.json file:

+
+
+
    +
  • +

    Writing the dependencies directly in that file.

    +
  • +
  • +

    Installing using npm install or yarn add.

    +
  • +
+
+
+ + + + + +
+ + +Please remember if the project has a package-lock.json or yarn.lock file use npm or yarn respectively. +
+
+
+

In order to add the dependencies directly in the package.json file, include the following lines in the devDependencies section:

+
+
+
+
"devDependencies": {
+...
+    "electron": "^11.2.3",
+    "electron-builder": "^22.9.1",
+...
+},
+
+
+
+

As indicated above, instead of this npm install can be used:

+
+
+
+
$ npm install -D electron electron-builder
+
+
+
+

Or with yarn:

+
+
+
+
$ yarn add -D electron electron-builder
+
+
+
+
+
+

Create the necessary typescript configurations

+
+
+

In order to initiate electron in an angular app we need to modify the tsconfig.json file and create a tsconfig.serve.json and a tsconfig.base.json in the root folder.

+
+
+
+
+

== tsconfig.json

+
+
+

This file needs to be modified to create references to ./src/tsconfig.app.json and ./src/tsconfig.spec.json to support different configurations.

+
+
+
+
{
+  "files": [],
+  "references": [
+    {
+      "path": "./src/tsconfig.app.json"
+    },
+    {
+      "path": "./src/tsconfig.spec.json"
+    }
+  ]
+}
+
+
+
+
+
+

== tsconfig.app.json

+
+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../app",
+    "module": "es2015",
+    "baseUrl": "",
+    "types": []
+  },
+  "include": [
+    "**/*.ts",
+  ],
+  "exclude": [
+    "**/*.spec.ts"
+  ],
+  "angularCompilerOptions": {
+    "fullTemplateTypeCheck": true,
+    "strictInjectionParameters": true,
+    "preserveWhitespaces": true
+  }
+}
+
+
+
+
+
+

== tsconfig.spec.json

+
+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../spec",
+    "module": "commonjs",
+    "types": [
+      "jasmine",
+      "node"
+    ]
+  },
+  "files": [
+    "test.ts",
+  ],
+  "include": [
+    "**/*.spec.ts",
+    "**/*.d.ts"
+  ],
+  "exclude": [
+    "dist",
+    "release",
+    "node_modules"
+  ]
+}
+
+
+
+
+
+

== tsconfig.base.json

+
+
+

This is shared between tsconfig.app.json and tsconfig.spec.json and it will be extended on each config file.

+
+
+
+
{
+  "compileOnSave": false,
+  "compilerOptions": {
+    "outDir": "./dist",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "es2016",
+      "es2015",
+      "dom"
+    ]
+  },
+  "files": [
+    "electron-main.ts"
+    "src/polyfills.ts"
+  ],
+  "include": [
+    "src/**/*.d.ts"
+  ],
+  "exclude": [
+    "node_modules"
+  ]
+}
+
+
+
+
+
+

== tsconfig.serve.json

+
+
+

In the root, tsconfig.serve.json needs to be created. This typescript config file is going to be used when we serve electron:

+
+
+
+
{
+  "compilerOptions": {
+    "outDir": ".",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "dom"
+    ]
+  },
+  "include": [
+    "electron-main.ts"
+  ],
+  "exclude": [
+    "node_modules",
+    "**/*.spec.ts"
+  ]
+}
+
+
+
+
+
+

Add Electron build configuration

+
+
+

In order to configure electron builds properly we need to create a new json on our application, let’s call it electron-builder.json. For more information and fine tuning please refer to the Electron Builder official documentation.

+
+
+

The contents of the file will be something similar to the following:

+
+
+
+
{
+  "productName": "devon4ngElectron",
+  "directories":{
+    "output": "./builder-release"
+  },
+  "win": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "portable"
+    ]
+  },
+  "mac": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "dmg"
+    ]
+  },
+  "linux": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "AppImage"
+    ]
+  }
+}
+
+
+
+

There are two important things in this files:

+
+
+
    +
  1. +

    "output": this is where electron builder is going to build our application

    +
  2. +
  3. +

    "icon": in every OS possible there is an icon parameter, the route to the icon folder that will be created after building with angular needs to be used here. This will make it so the electron builder can find the icons and build.

    +
  4. +
+
+
+
+
+

Modify angular.json

+
+
+

angular.json has to to be modified so the project is build inside /dist without an intermediate folder.

+
+
+
+
{
+  "architect": {
+    "build": {
+      "outputPath": "dist"
+    }
+  }
+}
+
+
+
+
+
+

Create the electron window in electron-main.ts

+
+
+

In order to use electron, a file needs to be created at the root of the application (main.ts). This file will create a window with different settings checking if we are using --serve as an argument:

+
+
+
+
import { app, BrowserWindow } from 'electron';
+import * as path from 'path';
+import * as url from 'url';
+
+let win: any;
+const args: any = process.argv.slice(1);
+const serve: any = args.some((val) => val == '--serve');
+
+const createWindow:any = ()=>{
+  // Create the browser window.
+  win = new BrowserWindow({
+    fullscreen: true,
+    webPreferences: {
+      nodeIntegration: true,
+    }
+  });
+
+  if (serve) {
+    require('electron-reload')(__dirname, {
+      electron: require(`${__dirname}/node_modules/electron`)
+    });
+    win.loadURL('http://localhost:4200');
+  } else {
+    win.loadURL(
+      url.format({
+        pathname: path.join(__dirname, 'dist/index.html'),
+        protocol: 'file:',
+        slashes: true
+      })
+    );
+  }
+
+  if (serve) {
+    win.webContents.openDevTools();
+  }
+
+  // Emitted when the window is closed.
+  win.on('closed', () => {
+    // Dereference the window object, usually you would store window
+    // in an array if your app supports multi windows, this is the time
+    // when you should delete the corresponding element.
+    // tslint:disable-next-line:no-null-keyword
+    win = null;
+  });
+}
+
+try {
+  // This method will be called when Electron has finished
+  // initialization and is ready to create browser windows.
+  // Some APIs can only be used after this event occurs.
+  app.on('ready', createWindow);
+
+   // Quit when all windows are closed.
+  app.on('window-all-closed', () => {
+    // On OS X it is common for applications and their menu bar
+    // to stay active until the user quits explicitly with Cmd + Q
+    if (process.platform !==  'darwin') {
+      app.quit();
+    }
+  });
+
+   app.on('activate', () => {
+    // On OS X it's common to re-create a window in the app when the
+    // dock icon is clicked and there are no other windows open.
+    if (win == null) {
+      createWindow();
+    }
+  });
+} catch (e) {
+  // Catch Error
+  // throw e;
+}
+
+
+
+
+
+

Add the electron window and improve the package.json scripts

+
+
+

Inside package.json the electron window that will be transformed to electron-main.js when building needs to be added.

+
+
+
+
{
+  ....
+  "main": "electron-main.js",
+  "scripts": {...}
+  ....
+}
+
+
+
+

The scripts section in the package.json can be improved to avoid running too verbose commands. As a very complete example we can take a look to the My Thai Star’s scripts section and copy the lines useful in your project. In any case, at least we recommend to add the following lines:

+
+
+
+
  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e",
+    "electron:tsc": "tsc -p tsconfig.serve.json",
+    "electron:run": "npm run electron:tsc && ng build --base-href ./ && npx electron .",
+    "electron:serve": "npm run electron:tsc && npx electron . --serve",
+    "electron:pack": "npm run electron:tsc && electron-builder --dir --config electron-builder.json",
+    "electron:build": "npm run electron:tsc && electron-builder --config electron-builder.json build"
+  },
+
+
+
+

The electron: scripts do the following:

+
+
+
    +
  • +

    electron:tsc: Compiles electron TS files.

    +
  • +
  • +

    electron:run: Serves Angular app and runs electron.

    +
  • +
  • +

    electron:serve: Serves electron with an already running angular app (i.e. a ng serve command running on another terminal).

    +
  • +
  • +

    electron:pack: Packs electron app.

    +
  • +
  • +

    electron:build: Builds electron app.

    +
  • +
+
+
+
+
+

Add Electron to an Angular application using Nx CLI

+
+
+

Creating an Electron app is very easy and straight-forward if you are using Nx CLI. As a pre-requisite, you should already have an application in your Nx workspace which you want to run as a front-end in your Electron app. (You can follow this guide if you want to get started with Nx).

+
+
+

Follow the steps below to develop an Electron app in your Nx workspace:

+
+
+
+
+

Install nx-electron

+
+
+

Install nx-electron using the command:

+
+
+
+
  npm install -D nx-electron
+
+
+
+

This will add the packages electron and nx-electron as dev dependencies to your Nx workspace. This will help us generate our Electron app in the next step.

+
+
+
+
+

Generate your Electron app

+
+
+

Once you have installed nx-electron, you can generate your electron app using the command:

+
+
+
+
  nx g nx-electron:app <electron-app-name> --frontendProject=<frontend-app-name>
+
+
+
+

And that is it! You have generated your Electron app already. All the configuration files (tsconfig.*) are generated for you under <electron-app-name> in your Nx workspace.

+
+
+
+
+

Serving your app

+
+
+

You can use this command to serve your Electron app:

+
+
+
+
  nx run-many --target=serve --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+

If you see a blank application, it is because the Electron app was served before the front-end was served. To avoid this, you can serve the front-end and back-end separately, (that is, serve the back-end only after the front-end is served).

+
+
+
+
+

Building your app

+
+
+

The command for building your Electron app in Nx is similar to the serve command above, you only change the target from serve to build:

+
+
+
+
  nx run-many --target=build --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+
+
+

Packaging your app

+
+
+

Make sure you have build your app before you try to package it using the following command:

+
+
+
+
  nx run <electron-app-name>:package [--options]
+
+
+
+

The options that can be passed can be found here.

+
+
+

You can find a working example of an Electron app in devon4ts-samples.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-angular-elements.html b/docs/devon4ts/1.0/angular/guide-angular-elements.html new file mode 100644 index 00000000..2d836b78 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-angular-elements.html @@ -0,0 +1,949 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Elements

+
+ +
+
+
+

What are Angular Elements?

+
+
+

Angular elements are Angular components packaged as custom elements, a web standard for defining new HTML elements in a framework-agnostic way.

+
+
+

Custom elements are a Web Platform feature currently supported by Chrome, Firefox, Opera, and Safari, and available in other browsers through Polyfills. A custom element extends HTML by allowing you to define a tag whose content is created and controlled by JavaScript code. The browser maintains a CustomElementRegistry of defined custom elements (also called Web Components), which maps an instantiable JavaScript class to an HTML tag.

+
+
+
+
+

Why use Angular Elements?

+
+
+

Angular Elements allows Angular to work with different frameworks by using input and output elements. This allows Angular to work with many different frameworks if needed. This is an ideal situation if a slow transformation of an application to Angular is needed or some Angular needs to be added in other web applications(For example. ASP.net, JSP etc )

+
+
+
+
+

Negative points about Elements

+
+
+

Angular Elements is really powerful but since, the transition between views is going to be handled by another framework or HTML/JavaScript, using Angular Router is not possible. the view transitions have to be handled manually. This fact also eliminates the possibility of just porting an application completely.

+
+
+
+
+

How to use Angular Elements?

+
+
+

In a generalized way, a simple Angular component could be transformed to an Angular Element with this steps:

+
+
+
+
+

Installing Angular Elements

+
+
+

The first step is going to be install the library using our preferred packet manager:

+
+
+
+
+

== NPM

+
+
+
+
npm install @angular/elements
+
+
+
+
+
+

== YARN

+
+
+
+
yarn add @angular/elements
+
+
+
+
+
+

Preparing the components in the modules

+
+
+

Inside the app.module.ts, in addition to the normal declaration of the components inside declarations, the modules inside imports and the services inside providers, the components need to added in entryComponents. If there are components that have their own module, the same logic is going to be applied for them, only adding in the app.module.ts the components that do not have their own module. Here is an example of this:

+
+
+
+
....
+@NgModule({
+  declarations: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  imports: [
+    CoreModule,  // Module containing Angular Materials
+    FormsModule
+  ],
+  entryComponents: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  providers: [DishShareService]
+})
+....
+
+
+
+

After that is done, the constructor of the module is going to be modified to use injector and bootstrap the application defining the components. This is going to allow the Angular Element to get the injections and to define a component tag that will be used later:

+
+
+
+
....
+})
+export class AppModule {
+  constructor(private injector: Injector) {
+
+  }
+
+  ngDoBootstrap() {
+    const el = createCustomElement(DishFormComponent, {injector: this.injector});
+    customElements.define('dish-form', el);
+
+    const elView = createCustomElement(DishViewComponent, {injector: this.injector});
+    customElements.define('dish-view', elView);
+  }
+}
+....
+
+
+
+
+
+

A component example

+
+
+

In order to be able to use a component, @Input() and @Output() variables are used. These variables are going to be the ones that will allow the Angular Element to communicate with the framework/JavaScript:

+
+
+

Component html

+
+
+
+
<mat-card>
+    <mat-grid-list cols="1" rowHeight="100px" rowWidth="50%">
+				<mat-grid-tile colspan="1" rowspan="1">
+					<span>{{ platename }}</span>
+				</mat-grid-tile>
+				<form (ngSubmit)="onSubmit(dishForm)" #dishForm="ngForm">
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<input matInput placeholder="Name" name="name" [(ngModel)]="dish.name">
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<textarea matInput placeholder="Description" name="description" [(ngModel)]="dish.description"></textarea>
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<button mat-raised-button color="primary" type="submit">Submit</button>
+					</mat-grid-tile>
+				</form>
+		</mat-grid-list>
+</mat-card>
+
+
+
+

Component ts

+
+
+
+
@Component({
+  templateUrl: './dish-form.component.html',
+  styleUrls: ['./dish-form.component.scss']
+})
+export class DishFormComponent implements OnInit {
+
+  @Input() platename;
+
+  @Input() platedescription;
+
+  @Output()
+  submitDishEvent = new EventEmitter();
+
+  submitted = false;
+  dish = {name: '', description: ''};
+
+  constructor(public dishShareService: DishShareService) { }
+
+  ngOnInit() {
+    this.dish.name = this.platename;
+    this.dish.description = this.platedescription;
+  }
+
+  onSubmit(dishForm: NgForm): void {
+    this.dishShareService.createDish(dishForm.value.name, dishForm.value.description);
+    this.submitDishEvent.emit('dishSubmited');
+  }
+
+}
+
+
+
+

In this file there are definitions of multiple variables that will be used as input and output. Since the input variables are going to be used directly by html, only lowercase and underscore strategies can be used for them. On the onSubmit(dishForm: NgForm) a service is used to pass this variables to another component. Finally, as a last thing, the selector inside @Component has been removed since a tag that will be used dynamically was already defined in the last step.

+
+
+
+
+

Solving the error

+
+
+

In order to be able to use this Angular Element a Polyfills/Browser support related error needs to solved. This error can be solved in two ways:

+
+
+
+
+

== Changing the target

+
+
+

One solution is to change the target in tsconfig.json to es2015. This might not be doable for every application since maybe a specific target is required.

+
+
+
+
+

== Installing Polyfaces

+
+
+

Another solution is to use AutoPollyfill. In order to do so, the library is going to be installed with a packet manager:

+
+
+

Yarn

+
+
+
+
yarn add @webcomponents/webcomponentsjs
+
+
+
+

Npm

+
+
+
+
npm install @webcomponents/webcomponentsjs
+
+
+
+

After the packet manager has finished, inside the src folder a new file polyfills.ts is found. To solve the error, importing the corresponding adapter (custom-elements-es5-adapter.js) is necessary:

+
+
+
+
....
+/***************************************************************************************************
+ * APPLICATION IMPORTS
+ */
+
+import '@webcomponents/webcomponentsjs/custom-elements-es5-adapter.js';
+....
+
+
+
+

If you want to learn more about polyfills in angular you can do it here

+
+
+
+
+

Building the Angular Element

+
+
+

First, before building the Angular Element, every element inside that app component except the module need to be removed. After that, a bash script is created in the root folder,. This script will allow to put every necessary file into a JS.

+
+
+
+
ng build "projectName" --configuration production --output-hashing=none && cat dist/"projectName"/runtime.js dist/"projectName"/polyfills.js dist/"projectName"/scripts.js dist/"projectName"/main.js > ./dist/"projectName"/"nameWantedAngularElement".js
+
+
+
+

After executing the bash script, it will generate inside the path dist/"projectName" (or dist/apps/projectname in a Nx workspace) a JS file named "nameWantedAngularElement".js and a css file.

+
+
+
+
+ +
+
+

The library ngx-build-plus allows to add different options when building. In addition, it solves some errors that will occur when trying to use multiple angular elements in an application. In order to use it, yarn or npm can be used:

+
+
+

Yarn

+
+
+
+
yarn add ngx-build-plus
+
+
+
+

Npm

+
+
+
+
npm install ngx-build-plus
+
+
+
+

If you want to add it to a specific sub project in your projects folder, use the --project:

+
+
+
+
.... ngx-build-plus --project "project-name"
+
+
+
+

Using this library and the following command, an isolated Angular Element which won’t have conflict with others can be generated. This Angular Element will not have a polyfill so, the project where we use them will need to include a poliyfill with the Angular Element requirements.

+
+
+
+
ng build "projectName" --output-hashing none --single-bundle true --configuration production --bundle-styles false
+
+
+
+

This command will generate three things:

+
+
+
    +
  1. +

    The main JS bundle

    +
  2. +
  3. +

    The script JS

    +
  4. +
  5. +

    The css

    +
  6. +
+
+
+

These files will be used later instead of the single JS generated in the last step.

+
+
+
+
+

== == Extra parameters

+
+
+

Here are some extra useful parameters that ngx-build-plus provides:

+
+
+
    +
  • +

    --keep-polyfills: This parameter is going to allow us to keep the polyfills. This needs to be used with caution, avoiding using multiple different polyfills that could cause an error is necessary.

    +
  • +
  • +

    --extraWebpackConfig webpack.extra.js: This parameter allows us to create a JavaScript file inside our Angular Elements project with the name of different libraries. Using webpack these libraries will not be included in the Angular Element. This is useful to lower the size of our Angular Element by removing libraries shared. Example:

    +
  • +
+
+
+
+
const webpack = require('webpack');
+
+module.exports = {
+    "externals": {
+        "rxjs": "rxjs",
+        "@angular/core": "ng.core",
+        "@angular/common": "ng.common",
+        "@angular/common/http": "ng.common.http",
+        "@angular/platform-browser": "ng.platformBrowser",
+        "@angular/platform-browser-dynamic": "ng.platformBrowserDynamic",
+        "@angular/compiler": "ng.compiler",
+        "@angular/elements": "ng.elements",
+        "@angular/router": "ng.router",
+        "@angular/forms": "ng.forms"
+    }
+}
+
+
+
+
+
+

==

+
+
+
+
  If some libraries are excluded from the `Angular Element` you will need to add the bundled UMD files of those libraries manually.
+== ==
+
+
+
+
+
+

Using the Angular Element

+
+
+

The Angular Element that got generated in the last step can be used in almost every framework. In this case, the Angular Element is going to be used in html:

+
+
+
Listing 1. Sample index.html version without ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+        <script src="./devon4ngAngularElements.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+
Listing 2. Sample index.html version with ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+         <script src="./polyfills.js"> </script> <!-- Created using --keep-polyfills options -->
+        <script src="./scripts.js"> </script>
+         <script src="./main.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+

In this html, the css generated in the last step is going to be imported inside the <head> and then, the JavaScript element is going to be imported at the end of the body. After that is done, There is two uses of Angular Elements in the html, one directly with use of the @input() variables as parameters commented in the html:

+
+
+
+
....
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+....
+
+
+
+

and one dynamically inside the script:

+
+
+
+
....
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+....
+
+
+
+

This JavaScript is an example of how to create dynamically an Angular Element inserting attributed to fill our @Input() variables and listen to the @Output() that was defined earlier. This is done with:

+
+
+
+
                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+
+
+
+

This allows JavaScript to hook with the @Output() event emitter that was defined. When this event gets called, another component that was defined gets inserted dynamically.

+
+
+
+
+

Angular Element within another Angular project

+
+
+

In order to use an Angular Element within another Angular project the following steps need to be followed:

+
+
+
+
+

Copy bundled script and css to resources

+
+
+

First copy the generated .js and .css inside assets in the corresponding folder.

+
+
+
+
+

Add bundled script to angular.json

+
+
+

Inside angular.json both of the files that were copied in the last step are going to be included. This will be done both, in test and in build. Including it on the test, will allow to perform unitary tests.

+
+
+
+
{
+....
+  "architect": {
+    ....
+    "build": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+    ....
+    "test": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+  }
+}
+
+
+
+

By declaring the files in the angular.json angular will take care of including them in a proper way.

+
+
+
+
+

==

+
+
+
+
  If you are using Nx, the configuration file `angular.json` might be named as `workspace.json`, depending on how you had setup the workspace. The structure of the file remains similar though.
+== ==
+
+
+
+
+
+

Using Angular Element

+
+
+

There are two ways that Angular Element can be used:

+
+
+
+
+

== Create component dynamically

+
+
+

In order to add the component in a dynamic way, first adding a container is necessary:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+</div>
+....
+
+
+
+

With this container created, inside the app.component.ts a method is going to be created. This method is going to find the container, create the dynamic element and append it into the container.

+
+
+

app.component.ts

+
+
+
+
export class AppComponent implements OnInit {
+  ....
+  ngOnInit(): void {
+    this.createComponent();
+  }
+  ....
+  createComponent(): void {
+    const container = document.getElementById('container');
+    const component = document.createElement('dish-form');
+    container.appendChild(component);
+  }
+  ....
+
+
+
+
+
+

== Using it directly

+
+
+

In order to use it directly on the templates, in the app.module.ts the CUSTOM_ELEMENTS_SCHEMA needs to be added:

+
+
+
+
....
+import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
+....
+@NgModule({
+  ....
+  schemas: [ CUSTOM_ELEMENTS_SCHEMA ],
+
+
+
+

This is going to allow the use of the Angular Element in the templates directly:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+  <dish-form></dish-form>
+</div>
+
+
+
+

You can find a working example of Angular Elements in our devon4ts-samples repo by referring the samples named angular-elements and angular-elements-test.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-angular-lazy-loading.html b/docs/devon4ts/1.0/angular/guide-angular-lazy-loading.html new file mode 100644 index 00000000..5e0b69c8 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-angular-lazy-loading.html @@ -0,0 +1,684 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Lazy loading

+
+
+

When the development of an application starts, it just contains a small set of features so the app usually loads fast. However, as new features are added, the overall application size grows up and its loading speed decreases. It is in this context where Lazy loading finds its place. +Lazy loading is a design pattern that defers initialization of objects until it is needed, so, for example, users that just access to a website’s home page do not need to have other areas loaded. +Angular handles lazy loading through the routing module which redirects to requested pages. Those pages can be loaded at start or on demand.

+
+
+
+
+

An example with Angular

+
+
+

To explain how lazy loading is implemented using angular, a basic sample app is going to be developed. This app will consist in a window named "level 1" that contains two buttons that redirects to other windows in a "second level". It is a simple example, but useful to understand the relation between angular modules and lazy loading.

+
+
+
+Levels app structure +
+
Figure 1. Levels app structure.
+
+
+

This graphic shows that modules acts as gates to access components "inside" them.

+
+
+

Because the objective of this guide is related mainly with logic, the html structure and SCSS styles are less relevant, but the complete code can be found as a sample here.

+
+
+
+
+

Implementation

+
+
+

First write in a console ng new level-app --routing, to generate a new project called level-app including an app-routing.module.ts file (--routing flag). If you are using Nx, the command would be nx generate @nrwl/angular:app level-app --routing in your Nx workspace.

+
+
+

In the file app.component.html delete all the content except the router-outlet tag.

+
+
+
Listing 1. File app.component.html
+
+
<router-outlet></router-outlet>
+
+
+
+

The next steps consists on creating features modules.

+
+
+
    +
  • +

    run ng generate module first --routing to generate a module named first.

    +
  • +
  • +

    run ng generate module first/second-left --routing to generate a module named second-left under first.

    +
  • +
  • +

    run ng generate module first/second-right --routing to generate a module second-right under first.

    +
  • +
  • +

    run ng generate component first/first to generate a component named first inside the module first.

    +
  • +
  • +

    run ng generate component first/second-left/content to generate a component content inside the module second-left.

    +
  • +
  • +

    run ng generate component first/second-right/content to generate a component content inside the module second-right.

    +
  • +
+
+
+
+
+

==

+
+
+
+
  If you are using Nx, you have to specify the project name (level-app) along with the --project flag. For example, command for generating the first module will be `ng generate module first --project=level-app --routing`
+== ==
+
+
+
+

To move between components we have to configure the routes used:

+
+
+

In app-routing.module.ts add a path 'first' to FirstComponent and a redirection from '' to 'first'.

+
+
+
Listing 2. File app-routing.module.ts.
+
+
...
+import { FirstComponent } from './first/first/first.component';
+
+const routes: Routes = [
+  {
+    path: 'first',
+    component: FirstComponent
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

In app.module.ts import the module which includes FirstComponent.

+
+
+
Listing 3. File app.module.ts
+
+
....
+import { FirstModule } from './first/first.module';
+
+@NgModule({
+  ...
+  imports: [
+    ....
+    FirstModule
+  ],
+  ...
+})
+export class AppModule { }
+
+
+
+

In first-routing.module.ts add routes that direct to the content of SecondRightModule and SecondLeftModule. The content of both modules have the same name so, in order to avoid conflicts the name of the components are going to be changed using as ( original-name as new-name).

+
+
+
Listing 4. File first-routing.module.ts
+
+
...
+import { ContentComponent as ContentLeft} from './second-left/content/content.component';
+import { ContentComponent as ContentRight} from './second-right/content/content.component';
+import { FirstComponent } from './first/first.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'first/second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'first/second-right',
+    component: ContentRight
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class FirstRoutingModule { }
+
+
+
+

In first.module.ts import SecondLeftModule and SecondRightModule.

+
+
+
Listing 5. File first.module.ts
+
+
...
+import { SecondLeftModule } from './second-left/second-left.module';
+import { SecondRightModule } from './second-right/second-right.module';
+
+@NgModule({
+  ...
+  imports: [
+    ...
+    SecondLeftModule,
+    SecondRightModule,
+  ]
+})
+export class FirstModule { }
+
+
+
+

Using the current configuration, we have a project that loads all the modules in a eager way. Run ng serve (with --project=level-app in an Nx workspace) to see what happens.

+
+
+

First, during the compilation we can see that just a main file is built.

+
+
+
+Compile eager +
+
Figure 2. Compile eager.
+
+
+

If we go to http://localhost:4200/first and open developer options (F12 on Chrome), it is found that a document named "first" is loaded.

+
+
+
+First level eager +
+
Figure 3. First level eager.
+
+
+

If we click on [Go to right module] a second level module opens, but there is no 'second-right' document.

+
+
+
+Second level right eager +
+
Figure 4. Second level right eager.
+
+
+

But, typing the URL directly will load 'second-right' but no 'first', even if we click on [Go back]

+
+
+
+Second level right eager +
+
Figure 5. Second level right eager direct URL.
+
+
+

Modifying an angular application to load its modules lazily is easy, you have to change the routing configuration of the desired module (for example FirstModule).

+
+
+
Listing 6. File app-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: 'first',
+    loadChildren: () => import('./first/first.module').then(m => m.FirstModule),
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

Notice that instead of loading a component, you dynamically import it in a loadChildren attribute because modules acts as gates to access components "inside" them. Updating the app to load lazily has four consequences:

+
+
+
    +
  1. +

    No component attribute.

    +
  2. +
  3. +

    No import of FirstComponent.

    +
  4. +
  5. +

    FirstModule import has to be removed from the imports array at app.module.ts.

    +
  6. +
  7. +

    Change of context.

    +
  8. +
+
+
+

If we check first-routing.module.ts again, we can see that the path for ContentLeft and ContentRight is set to 'first/second-left' and 'first/second-right' respectively, so writing http://localhost:4200/first/second-left will redirect us to ContentLeft. However, after loading a module with loadChildren setting the path to 'second-left' and 'second-right' is enough because it acquires the context set by AppRoutingModule.

+
+
+
Listing 7. File first-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+

If we go to 'first' then FirstModule is situated in '/first' but also its children ContentLeft and ContentRight, so it is not necessary to write in their path 'first/second-left' and 'first/second-right', because that will situate the components on 'first/first/second-left' and 'first/first/second-right'.

+
+
+
+First level wrong path +
+
Figure 6. First level lazy wrong path.
+
+
+

When we compile an app with lazy loaded modules, files containing them will be generated

+
+
+
+First level lazy compilation +
+
Figure 7. First level lazy compilation.
+
+
+

And if we go to developer tools → network, we can find those modules loaded (if they are needed).

+
+
+
+First level lazy +
+
Figure 8. First level lazy.
+
+
+

To load the component ContentComponent of SecondLeftModule lazily, we have to load SecondLeftModule as a children of FirstModule:

+
+
+
    +
  • +

    Change component to loadChildren and reference SecondLeftModule.

    +
  • +
+
+
+
Listing 8. File first-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    loadChildren: () => import('./second-left/second-left.module').then(m => m.SecondLeftModule),
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+
    +
  • +

    Remove SecondLeftModule at first.component.ts

    +
  • +
  • +

    Route the components inside SecondLeftModule. Without this step nothing would be displayed.

    +
  • +
+
+
+
Listing 9. File second-left-routing.module.ts.
+
+
...
+import { ContentComponent } from './content/content.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: ContentComponent
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class SecondLeftRoutingModule { }
+
+
+
+
    +
  • +

    run ng serve to generate files containing the lazy modules.

    +
  • +
+
+
+
+Second level lazy +
+
Figure 9. Second level lazy loading compilation.
+
+
+

Clicking on [Go to left module] triggers the load of SecondLeftModule.

+
+
+
+Second level lazy network +
+
Figure 10. Second level lazy loading network.
+
+
+
+
+

Conclusion

+
+
+

Lazy loading is a pattern useful when new features are added, these features are usually identified as modules which can be loaded only if needed as shown in this document, reducing the time spent loading an application.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-angular-library.html b/docs/devon4ts/1.0/angular/guide-angular-library.html new file mode 100644 index 00000000..99c955de --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-angular-library.html @@ -0,0 +1,566 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Library

+
+
+

Angular CLI provides us with methods that allow the creation of a library. After that, using a packet manager (either npm or yarn) the library can be build and packed which will allow later to install/publish it.

+
+
+
+
+

Whats a library?

+
+
+

From wikipedia: a library is a collection of non-volatile resources used by computer programs, often for software development. These may include configuration data, documentation, help data, message templates, pre-written code and subroutines, classes, values or type specifications.

+
+
+
+
+

How to build a library

+
+
+

In this section, a library is going to be build step by step. Please note, we will be explaining the steps using both Angular CLI and Nx CLI. You are free to choose either one for your development.

+
+
+
+
+

1. Creating an empty application

+
+
+

First, using Angular CLI we are going to generate a empty application which will be later filled with the generated library. In order to do so, Angular CLI allows us to add to ng new "application-name" an option (--create-application). This option is going to tell Angular CLI not to create the initial app project. This is convenient since a library is going to be generated in later steps. Using this command ng new "application-name" --create-application=false an empty project with the name wanted is created.

+
+
+
+
ng new "application-name" --create-application=false
+
+
+
+

This step is much more easier and straight-forward when using Nx. Nx allows us to work in a monorepo workspace, where you can develop a project as an application, or a library, or a tool. You can follow this guide to get started with Nx. +The command for generating a library in Nx is nx generate @nrwl/angular:library library-name --publishable --importPath=library-name. This will create an empty angular application which we can modify and publish as a library.

+
+
+
+
+

2. Generating a library

+
+
+

After generating an empty application, a library is going to be generated. Inside the folder of the project, the Angular CLI command ng generate library "library-name" is going to generate the library as a project (projects/"library-name"). As an addition, the option --prefix="library-prefix-wanted" allows us to switch the default prefix that Angular generated with (lib). Using the option to change the prefix the command will look like this ng generate library "library-name" --prefix="library-prefix-wanted".

+
+
+
+
ng generate library "library-name" --prefix="library-prefix-wanted"
+
+
+
+

If you are using Nx, this step is not needed as it is already covered in step 1. In this case, the library project will be generated in the libs folder of a Nx workspace.

+
+
+
+
+

3. Modifying our library

+
+
+

In the last step we generated a library. This automatically generates a module,service and a component inside projects/"library-name" that we can modify adding new methods, components etc that we want to use in other projects. We can generate other elements, using the usual Angular CLI generate commands adding the option --project="library-name", this will allow to generate elements within our project . An example of this is: ng generate service "name" --project="library-name".

+
+
+
+
ng generate "element" "name" --project="library-name"
+
+
+
+

You can use the same command as above in a Nx workspace.

+
+
+
+
+

4. Exporting the generated things

+
+
+

Inside the library (projects/"library-name) there’s a public_api.ts which is the file that exports the elements inside the library. (The file is named as index.ts in an Nx workspace). In case we generated other things, this file needs to be modified adding the extra exports with the generated elements. In addition, changing the library version is possible in the file package.json.

+
+
+
+
+

5. Building our library

+
+
+

Once we added the necessary exports, in order to use the library in other applications, we need to build the library. The command ng build "library-name" is going to build the library, generating the necessary files in "project-name"/dist/"library-name".

+
+
+
+
ng build "library-name"
+
+
+
+

You can use the same command in Nx as well. Only the path for the generated files will be slightly different: "project-name"/dist/libs/"library-name"

+
+
+
+
+

6. Packing the library

+
+
+

In this step we are going to pack the build library. In order to do so, we need to go inside dist/"library-name" (or dist/libs/"library-name") and then run either npm pack or yarn pack to generate a "library-name-version.tgz" file.

+
+
+
Listing 1. Packing using npm
+
+
npm pack
+
+
+
+
Listing 2. Packing using yarn
+
+
yarn pack
+
+
+
+
+
+

7. Publishing to npm repository (optional)

+
+
+
    +
  • +

    Add a README.md and LICENSE file. The text inside README.md will be used in you npm package web page as documentation.

    +
  • +
  • +

    run npm adduser if you do not have a npm account to create it, otherwise run npm login and introduce your credentials.

    +
  • +
  • +

    run npm publish inside dist/"library-name" folder.

    +
  • +
  • +

    Check that the library is published: https://npmjs.com/package/library-name

    +
  • +
+
+
+
+
+

8. Installing our library in other projects

+
+
+

In this step we are going to install/add the library on other projects.

+
+
+
+
+

== npm

+
+
+

In order to add the library in other applications, there are two ways:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command npm install "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run npm install "library-name" to install it from npm repository.

    +
  • +
+
+
+
+
+

== yarn

+
+
+

To add the package using yarn:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command yarn add "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run yarn add "library-name" to install it from npm repository.

    +
  • +
+
+
+
+
+

9. Using the library

+
+
+

Finally, once the library was installed with either packet manager, you can start using the elements from inside like they would be used in a normal element inside the application. Example app.component.ts:

+
+
+
+
import { Component, OnInit } from '@angular/core';
+import { MyLibraryService } from 'my-library';
+
+@Component({
+  selector: 'app-root',
+  templateUrl: './app.component.html',
+  styleUrls: ['./app.component.scss']
+})
+export class AppComponent implements OnInit {
+
+  toUpper: string;
+
+  constructor(private myLibraryService: MyLibraryService) {}
+  title = 'devon4ng library test';
+  ngOnInit(): void {
+    this.toUpper = this.myLibraryService.firstLetterToUpper('test');
+  }
+}
+
+
+
+

Example app.component.html:

+
+
+
+
<!--The content below is only a placeholder and can be replaced.-->
+<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+  <img width="300" alt="Angular Logo" src="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNTAgMjUwIj4KICAgIDxwYXRoIGZpbGw9IiNERDAwMzEiIGQ9Ik0xMjUgMzBMMzEuOSA2My4ybDE0LjIgMTIzLjFMMTI1IDIzMGw3OC45LTQzLjcgMTQuMi0xMjMuMXoiIC8+CiAgICA8cGF0aCBmaWxsPSIjQzMwMDJGIiBkPSJNMTI1IDMwdjIyLjItLjFWMjMwbDc4LjktNDMuNyAxNC4yLTEyMy4xTDEyNSAzMHoiIC8+CiAgICA8cGF0aCAgZmlsbD0iI0ZGRkZGRiIgZD0iTTEyNSA1Mi4xTDY2LjggMTgyLjZoMjEuN2wxMS43LTI5LjJoNDkuNGwxMS43IDI5LjJIMTgzTDEyNSA1Mi4xem0xNyA4My4zaC0zNGwxNy00MC45IDE3IDQwLjl6IiAvPgogIDwvc3ZnPg== ">
+</div>
+<h2>Here is my library service being used: {{toUpper}}</h2>
+<lib-my-library></lib-my-library>
+
+
+
+

Example app.module.ts:

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+
+import { MyLibraryModule } from 'my-library';
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    AppRoutingModule,
+    MyLibraryModule
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+

The result from using the library:

+
+
+
+result +
+
+
+
+
+

devon4ng libraries

+
+
+

In devonfw/devon4ng-library you can find some useful libraries:

+
+
+
    +
  • +

    Authorization module: This devon4ng Angular module adds rights-based authorization to your Angular app.

    +
  • +
  • +

    Cache module: Use this devon4ng Angular module when you want to cache requests to server. You may configure it to store in cache only the requests you need and to set the duration you want.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-angular-mock-service.html b/docs/devon4ts/1.0/angular/guide-angular-mock-service.html new file mode 100644 index 00000000..bf4c71ca --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-angular-mock-service.html @@ -0,0 +1,409 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Mock Service

+
+
+

We’ve all been there: A new idea comes, let’s quickly prototype it. But wait, there’s no back-end. What can we do?

+
+
+

Below you will find a solution that will get your started quick and easy. The idea is to write a simple mock service that helps us by feeding data into our components.

+
+
+
+
+

The app we start with

+
+
+

Let’s say you have a simple boilerplate code, with your favorite styling library hooked up and you’re ready to go. The angular-material-basic-layout sample is a good starting place.

+
+
+
+
+

The Components

+
+
+

Components - are the building blocks of our application. Their main role is to enable fragments of user interfaces. They will either display data (a list, a table, a chart, etc.), or 'collect' user interaction (e.g: a form, a menu, etc.)

+
+
+

Components stay at the forefront of the application. They should also be reusable (as much as possible). Reusability is key for what we are trying to achieve - a stable, maintainable front-end where multiple people can contribute and collaborate.

+
+
+

In our project, we are at the beginning. That means we may have more ideas than plans. We are exploring possibilities. In order to code efficiently:
+1) We will not store mock data in the components.
+2) We will not fetch or save data directly in the components.

+
+ +
+
+
+

The Service

+
+
+

So, how do we get data in our app? How do we propagate the data to the components and how can we send user interaction from the components to the our data "manager" logic.

+
+
+

The answer to all these questions is an Angular Service (that we will just call a service from now on).

+
+
+

A service is an injectable logic that can be consumed by all the components that need it. It can carry manipulation functions and ,in our case, fetch data from a provider.

+
+
+
+Service Architecture +
+
Figure 1. Angular Components & Services architecture.
+
+
+

Inside the Angular App, an Injector gives access to each component to their required services. It’s good coding practice to use a distinct service to each data type you want to manipulate. The type is described in a interface.

+
+
+

Still, our ideas drive in different ways, so we have to stay flexible. We cannot use a database at the moment, but we want a way to represent data on screen, which can grow organically.

+
+ +
+
+
+

The Model

+
+
+
+Data Box +
+
Figure 2. Data box in relation to services and components.
+
+
+

Let’s consider a 'box of data' represented in JSON. Physically, this means a folder with some JSON/TS files in it. They are located in the app/mock folder. The example uses only one mock data file. The file is typed according to our data model.

+
+
+

Pro tip: separate your files based on purpose. In your source code, put the mock files in the mock folder, components in the components folder, services in the services folder and data models in the models folder.

+
+
+
+Project Structure +
+
Figure 3. Project structure.
+
+
+

Aligned with the Angular way of development, we are implementing a model-view-controller pattern.

+
+
+

The model is represented by the interfaces we make. These interfaces describe the data structures we will use in our application. In this example, there is one data model, corresponding with the 'type' of data that was mocked. In the models folder you will find the .ts script file that describes chemical elements. The corresponding mock file defines a set is chemical elements objects, in accordance to our interface definition.

+
+
+
+
+

Use case

+
+
+

Enough with the theory, let’s see what we have here. The app presents 3 pages as follows:

+
+
+
    +
  • +

    A leader board with the top 3 elements

    +
  • +
  • +

    A data table with all the elements

    +
  • +
  • +

    A details page that reads a route parameter and displays the details of the element.

    +
  • +
+
+
+

There are a lot of business cases which have these requirements:

+
+
+
    +
  • +

    A leader board can be understood as "the most popular items in a set", "the latest updated items", "you favorite items" etc.

    +
  • +
  • +

    A data table with CRUD operations is very useful (in our case we only view details or delete an item, but they illustrate two important things: the details view shows how to navigate and consume a parametric route, the delete action shows how to invoke service operations over the loaded data - this means that the component is reusable and when the data comes with and API, only the service will need it’s implementation changed)

    +
  • +
+
+
+

Check out the angular-mock-service sample from the apps folder and easily get started with front-end development using dummy data.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-angular-pwa.html b/docs/devon4ts/1.0/angular/guide-angular-pwa.html new file mode 100644 index 00000000..94fa1806 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-angular-pwa.html @@ -0,0 +1,816 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Progressive Web App

+
+
+

Progressive web applications (PWA) are web application that offer better user experience than the traditional ones. In general, they solve problems related with reliability and speed:

+
+
+
    +
  • +

    Reliability: PWA are stable. In this context stability means than even with slow connections or even with no network at all, the application still works. To achieve this, some basic resources like styles, fonts, requests, …​ are stored; due to this caching, it is not possible to assure that the content is always up-to-date.

    +
  • +
  • +

    Speed: When an users opens an application, he or she will expect it to load almost immediately (almost 53% of users abandon sites that take longer that 3 seconds, source: https://developers.google.com/web/progressive-web-apps/#fast).

    +
  • +
+
+
+

PWA uses a script called service worker, which runs in background and essentially act as proxy between web app and network, intercepting requests and acting depending on the network conditions.

+
+
+
+
+

Assumptions

+
+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
+
+
+
+
+

Sample Application

+
+
+
+My Thai Star recommendation +
+
Figure 1. Basic angular PWA.
+
+
+

To explain how to build PWA using angular, a basic application is going to be built. This app will be able to ask for resources and save in the cache in order to work even offline.

+
+
+
+
+

Step 1: Create a new project

+
+
+

This step can be completed with one simple command using the Angular CLI: ng new <name>, where <name> is the name for the app. In this case, the app is going to be named basic-ng-pwa. If you are using Nx CLI, you can use the command nx generate @nrwl/angular:app <name> in your Nx workspace. You can follow this guide if you want to get started with Nx workspace.

+
+
+
+
+

Step 2: Create a service

+
+
+

Web applications usually uses external resources, making necessary the addition of services which can get those resources. This application gets a dish from My Thai Star’s back-end and shows it. To do so, a new service is going to be created.

+
+
+
    +
  • +

    go to project folder: cd basic-ng-pwa. If using Nx, go to the root folder of the workspace.

    +
  • +
  • +

    run ng generate service data. For Nx CLI, specify the project name with --project flag. So the command becomes ng generate service data --project=basic-ng-pwa

    +
  • +
  • +

    Modify data.service.ts, environment.ts, environment.prod.ts

    +
  • +
+
+
+

To retrieve data with this service, you have to import the module HttpClient and add it to the service’s constructor. Once added, use it to create a function getDishes() that sends HTTP request to My Thai Start’s back-end. The URL of the back-end can be stored as an environment variable MY_THAI_STAR_DISH.

+
+
+

data.service.ts

+
+
+
+
  ...
+  import { HttpClient } from '@angular/common/http';
+  import { MY_THAI_STAR_DISH } from '../environments/environment';
+  ...
+
+  export class DataService {
+    constructor(private http: HttpClient) {}
+
+    /* Get data from Back-end */
+    getDishes() {
+      return this.http.get(MY_THAI_STAR_DISH);
+    }
+    ...
+  }
+
+
+
+

environments.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+

environments.prod.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+
+
+

Step 3: Use the service

+
+
+

The component AppComponent implements the interface OnInit and inside its method ngOnInit() the subscription to the services is done. When a dish arrives, it is saved and shown (app.component.html).

+
+
+
+
  ...
+  import { DataService } from './data.service';
+  export class AppComponent implements OnInit {
+  dish: { name: string; description: string } = { name: '', description: ''};
+
+  ...
+  ngOnInit() {
+    this.data
+      .getDishes()
+      .subscribe(
+        (dishToday: { dish: { name: string; description: string } }) => {
+          this.dish = {
+            name: dishToday.dish.name,
+            description: dishToday.dish.description,
+          };
+        },
+      );
+  }
+}
+
+
+
+
+
+

Step 4: Structures, styles and updates

+
+
+

This step shows code interesting inside the sample app. The complete content can be found in devon4ts-samples.

+
+
+

index.html

+
+
+

To use the Montserrat font add the following link inside the head tag of the app’s index.html file.

+
+
+
+
  <link href="https://fonts.googleapis.com/css?family=Montserrat" rel="stylesheet">
+
+
+
+

styles.scss

+
+
+
+
  body {
+    ...
+    font-family: 'Montserrat', sans-serif;
+  }
+
+
+
+

app.component.ts

+
+
+

This file is also used to reload the app if there are any changes.

+
+
+
    +
  • +

    SwUpdate: This object comes inside the @angular/pwa package and it is used to detect changes and reload the page if needed.

    +
  • +
+
+
+
+
  ...
+  import { SwUpdate } from '@angular/service-worker';
+
+  export class AppComponent implements OnInit {
+
+  ...
+    constructor(updates: SwUpdate, private data: DataService) {
+      updates.available.subscribe((event) => {
+        updates.activateUpdate().then(() => document.location.reload());
+      });
+    }
+    ...
+  }
+
+
+
+
+
+

Step 5: Make it Progressive.

+
+
+

Install Angular PWA package with ng add @angular/pwa --project=<name>. As before substitute name with basic-ng-pwa.

+
+
+

The above command completes the following actions:

+
+
+
    +
  1. +

    Adds the @angular/service-worker package to your project.

    +
  2. +
  3. +

    Enables service worker build support in the CLI.

    +
  4. +
  5. +

    Imports and registers the service worker in the app module.

    +
  6. +
  7. +

    Updates the index.html file:

    +
    +
      +
    • +

      Includes a link to add the manifest.json file.

      +
    • +
    • +

      Adds meta tags for theme-color.

      +
    • +
    • +

      Installs icon files to support the installed Progressive Web App (PWA).

      +
    • +
    • +

      Creates the service worker configuration file called ngsw-config.json, which specifies the caching behaviors and other settings.

      +
    • +
    +
    +
  8. +
+
+
+
+
+

== manifest.json

+
+
+

manifest.json is a file that allows to control how the app is displayed in places where native apps are displayed.

+
+
+

Fields

+
+
+

name: Name of the web application.

+
+
+

short_name: Short version of name.

+
+
+

theme_color: Default theme color for an application context.

+
+
+

background_color: Expected background color of the web application.

+
+
+

display: Preferred display mode.

+
+
+

scope: Navigation scope of this web application’s application context.

+
+
+

start_url: URL loaded when the user launches the web application.

+
+
+

icons: Array of icons that serve as representations of the web app.

+
+
+

Additional information can be found here.

+
+
+
+
+

== ngsw-config.json

+
+
+

ngsw-config.json specifies which files and data URLs have to be cached and updated by the Angular service worker.

+
+
+

Fields

+
+
+
    +
  • +

    index: File that serves as index page to satisfy navigation requests.

    +
  • +
  • +

    assetGroups: Resources that are part of the app version that update along with the app.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      installMode: How the resources are cached (pre-fetch or lazy).

      +
    • +
    • +

      updateMode: Caching behavior when a new version of the app is found (pre-fetch or lazy).

      +
    • +
    • +

      resources: Resources to cache. There are three groups.

      +
      +
        +
      • +

        files: Lists patterns that match files in the distribution directory.

        +
      • +
      • +

        urls: URL patterns matched at runtime.

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    dataGroups: UsefulIdentifies the group. for API requests.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      urls: URL patterns matched at runtime.

      +
    • +
    • +

      version: Indicates that the resources being cached have been updated in a backwards-incompatible way.

      +
    • +
    • +

      cacheConfig: Policy by which matching requests will be cached

      +
      +
        +
      • +

        maxSize: The maximum number of entries, or responses, in the cache.

        +
      • +
      • +

        maxAge: How long responses are allowed to remain in the cache.

        +
        +
          +
        • +

          d: days. (5d = 5 days).

          +
        • +
        • +

          h: hours

          +
        • +
        • +

          m: minutes

          +
        • +
        • +

          s: seconds. (5m20s = 5 minutes and 20 seconds).

          +
        • +
        • +

          u: milliseconds

          +
        • +
        +
        +
      • +
      • +

        timeout: How long the Angular service worker will wait for the network to respond before using a cached response. Same dataformat as maxAge.

        +
      • +
      • +

        strategy: Caching strategies (performance or freshness).

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    navigationUrls: List of URLs that will be redirected to the index file.

    +
  • +
+
+
+

Additional information can be found here.

+
+
+
+
+

Step 6: Configure the app

+
+
+

manifest.json

+
+
+

Default configuration.

+
+
+

 

+
+
+

ngsw-config.json

+
+
+

At assetGroups → resources → urls: In this field the google fonts API is added in order to use Montserrat font even without network.

+
+
+
+
  "urls": [
+          "https://fonts.googleapis.com/**"
+        ]
+
+
+
+

At the root of the json: A data group to cache API calls.

+
+
+
+
  {
+    ...
+    "dataGroups": [{
+      "name": "mythaistar-dishes",
+      "urls": [
+        "https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1"
+      ],
+      "cacheConfig": {
+        "maxSize": 100,
+        "maxAge": "1h",
+        "timeout": "10s",
+        "strategy": "freshness"
+      }
+    }]
+  }
+
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ng build --prod to build the app using production settings.(nx build <name> --prod in Nx CLI)

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here.

+
+
+

Go to the dist/basic-ng-pwa/ folder running cd dist/basic-ng-pwa. In an Nx workspace, the path will be dist/apps/basic-ng-pwa

+
+
+

http-server -o to serve your built app.

+
+
+
+Http server running +
+
Figure 2. Http server running on localhost:8081.
+
+
+

 

+
+
+

In another console instance run ng serve (or nx serve basic-ng-pwa for Nx) to open the common app (not built).

+
+
+
+.Angular server running +
+
Figure 3. Angular server running on localhost:4200.
+
+
+

 

+
+
+

The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common (right) one does not.

+
+
+
+Application comparison +
+
Figure 4. Application service worker comparison.
+
+
+

 

+
+
+

If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 5. Offline application.
+
+
+

 

+
+
+

Finally, browser extensions like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 6. Lighthouse report.
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-angular-theming.html b/docs/devon4ts/1.0/angular/guide-angular-theming.html new file mode 100644 index 00000000..ff69b746 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-angular-theming.html @@ -0,0 +1,774 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Material Theming

+
+
+

Angular Material library offers UI components for developers, those components follows Google Material design baselines but characteristics like colors can be modified in order to adapt them to the needs of the client: corporative colors, corporative identity, dark themes, …​

+
+
+
+
+

Theming basics

+
+
+

In Angular Material, a theme is created mixing multiple colors. Colors and its light and dark variants conform a palette. In general, a theme consists of the following palettes:

+
+
+
    +
  • +

    primary: Most used across screens and components.

    +
  • +
  • +

    accent: Floating action button and interactive elements.

    +
  • +
  • +

    warn: Error state.

    +
  • +
  • +

    foreground: Text and icons.

    +
  • +
  • +

    background: Element backgrounds.

    +
  • +
+
+
+
+Theme palette +
+
Figure 1. Palettes and variants.
+
+
+

In angular material, a palette is represented as a SCSS map.

+
+
+
+SCSS map +
+
Figure 2. SCSS map and palettes.
+
+
+ + + + + +
+ + +Some components can be forced to use primary, accent or warn palettes using the attribute color, for example: <mat-toolbar color="primary">. +
+
+
+
+
+

Pre-built themes

+
+
+

Available pre-built themes:

+
+
+
    +
  • +

    deeppurple-amber.css

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 3. deeppurple-amber theme.
+
+
+
    +
  • +

    indigo-pink.css

    +
  • +
+
+
+
+indigo-pink theme +
+
Figure 4. indigo-pink theme.
+
+
+
    +
  • +

    pink-bluegrey.css

    +
  • +
+
+
+
+` pink-bluegrey theme` +
+
Figure 5. ink-bluegrey theme.
+
+
+
    +
  • +

    purple-green.css

    +
  • +
+
+
+
+purple-green theme +
+
Figure 6. purple-green theme.
+
+
+

The pre-built themes can be added using @import.

+
+
+
+
@import '@angular/material/prebuilt-themes/deeppurple-amber.css';
+
+
+
+
+
+

Custom themes

+
+
+

Sometimes pre-built themes do not meet the needs of a project, because color schemas are too specific or do not incorporate branding colors, in those situations custom themes can be built to offer a better solution to the client.

+
+
+

For this topic, we are going to use a basic layout project that can be found in devon4ts-samples repository.

+
+
+
+
+

Basics

+
+
+

Before starting writing custom themes, there are some necessary things that have to be mentioned:

+
+
+
    +
  • +

    Add a default theme: The project mentioned before has just one global SCSS style sheet styles.scss that includes indigo-pink.scss which will be the default theme.

    +
  • +
  • +

    Add @import '~@angular/material/theming'; at the beginning of the every style sheet to be able to use angular material pre-built color palettes and functions.

    +
  • +
  • +

    Add @include mat-core(); once per project, so if you are writing multiple themes in multiple files you could import those files from a 'central' one (for example styles.scss). This includes all common styles that are used by multiple components.

    +
  • +
+
+
+
+Theme files structure +
+
Figure 7. Theme files structure.
+
+
+
+
+

Basic custom theme

+
+
+

To create a new custom theme, the .scss file containing it has to have imported the angular _theming.scss file (angular/material/theming) file and mat-core included. _theming.scss includes multiple color palettes and some functions that we are going to see below. The file for this basic theme is going to be named styles-custom-dark.scss.

+
+
+

First, declare new variables for primary, accent and warn palettes. Those variables are going to store the result of the function mat-palette.

+
+
+

mat-palette accepts four arguments: base color palette, main, lighter and darker variants (See Palettes and variants.) and returns a new palette including some additional map values: default, lighter and darker ([id_scss_map]). Only the first argument is mandatory.

+
+
+
Listing 1. File styles-custom-dark.scss.
+
+
$custom-dark-theme-primary: mat-palette($mat-pink);
+$custom-dark-theme-accent: mat-palette($mat-blue);
+$custom-dark-theme-warn: mat-palette($mat-red);
+);
+
+
+
+

In this example we are using colors available in _theming.scss: mat-pink, mat-blue, mat-red. If you want to use a custom color you need to define a new map, for instance:

+
+
+
Listing 2. File styles-custom-dark.scss custom pink.
+
+
$my-pink: (
+    50 : #fcf3f3,
+    100 : #f9e0e0,
+    200 : #f5cccc,
+    300 : #f0b8b8,
+    500 : #ea9999,
+    900 : #db6b6b,
+    A100 : #ffffff,
+    A200 : #ffffff,
+    A400 : #ffeaea,
+    A700 : #ffd0d0,
+    contrast: (
+        50 : #000000,
+        100 : #000000,
+        200 : #000000,
+        300 : #000000,
+        900 : #000000,
+        A100 : #000000,
+        A200 : #000000,
+        A400 : #000000,
+        A700 : #000000,
+    )
+);
+
+$custom-dark-theme-primary: mat-palette($my-pink);
+...
+
+
+
+ + + + + +
+ + +Some pages allows to create these palettes easily, for instance: http://mcg.mbitson.com +
+
+
+

Until now, we just have defined primary, accent and warn palettes but what about foreground and background? Angular material has two functions to change both:

+
+
+
    +
  • +

    mat-light-theme: Receives as arguments primary, accent and warn palettes and return a theme whose foreground is basically black (texts, icons, …​), the background is white and the other palettes are the received ones.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 8. Custom light theme.
+
+
+
    +
  • +

    mat-dark-theme: Similar to mat-light-theme but returns a theme whose foreground is basically white and background black.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 9. Custom dark theme.
+
+
+

For this example we are going to use mat-dark-theme and save its result in $custom-dark-theme.

+
+
+
Listing 3. File styles-custom-dark.scss updated with mat-dark-theme.
+
+
...
+
+$custom-dark-theme: mat-dark-theme(
+  $custom-dark-theme-primary,
+  $custom-dark-theme-accent,
+  $custom-dark-theme-warn
+);
+
+
+
+

To apply the saved theme, we have to go to styles.scss and import our styles-custom-dark.scss and include a function called angular-material-theme using the theme variable as argument.

+
+
+
Listing 4. File styles.scss.
+
+
...
+@import 'styles-custom-dark.scss';
+@include angular-material-theme($custom-dark-theme);
+
+
+
+

If we have multiple themes it is necessary to add the include statement inside a css class and use it in src/index.html → app-root component.

+
+
+
Listing 5. File styles.scss updated with custom-dark-theme class.
+
+
...
+@import 'styles-custom-dark.scss';
+
+.custom-dark-theme {
+  @include angular-material-theme($custom-dark-theme);
+}
+
+
+
+
Listing 6. File src/index.html.
+
+
...
+<app-root class="custom-dark-theme"></app-root>
+...
+
+
+
+

This will apply $custom-dark-theme theme for the entire application.

+
+
+
+
+

Full custom theme

+
+
+

Sometimes it is needed to custom different elements from background and foreground, in those situations we have to create a new function similar to mat-light-theme and mat-dark-theme. Let’s focus con mat-light-theme:

+
+
+
Listing 7. Source code of mat-light-theme
+
+
@function mat-light-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $mat-light-theme-foreground,
+    background: $mat-light-theme-background,
+  );
+}
+
+
+
+

As we can see, mat-light-theme takes three arguments and returns a map including them as primary, accent and warn color; but there are three more keys in that map: is-dark, foreground and background.

+
+
+
    +
  • +

    is-dark: Boolean true if it is a dark theme, false otherwise.

    +
  • +
  • +

    background: Map that stores the color for multiple background elements.

    +
  • +
  • +

    foreground: Map that stores the color for multiple foreground elements.

    +
  • +
+
+
+

To show which elements can be colored lets create a new theme in a file styles-custom-cap.scss:

+
+
+
Listing 8. File styles-custom-cap.scss: Background and foreground variables.
+
+
@import '~@angular/material/theming';
+
+// custom background and foreground palettes
+$my-cap-theme-background: (
+  status-bar: #0070ad,
+  app-bar: map_get($mat-blue, 900),
+  background: #12abdb,
+  hover: rgba(white, 0.04),
+  card: map_get($mat-red, 800),
+  dialog: map_get($mat-grey, 800),
+  disabled-button: $white-12-opacity,
+  raised-button: map-get($mat-grey, 800),
+  focused-button: $white-6-opacity,
+  selected-button: map_get($mat-grey, 900),
+  selected-disabled-button: map_get($mat-grey, 800),
+  disabled-button-toggle: black,
+  unselected-chip: map_get($mat-grey, 700),
+  disabled-list-option: black,
+);
+
+$my-cap-theme-foreground: (
+  base: yellow,
+  divider: $white-12-opacity,
+  dividers: $white-12-opacity,
+  disabled: rgba(white, 0.3),
+  disabled-button: rgba(white, 0.3),
+  disabled-text: rgba(white, 0.3),
+  hint-text: rgba(white, 0.3),
+  secondary-text: rgba(white, 0.7),
+  icon: white,
+  icons: white,
+  text: white,
+  slider-min: white,
+  slider-off: rgba(white, 0.3),
+  slider-off-active: rgba(white, 0.3),
+);
+
+
+
+

Function which uses the variables defined before to create a new theme:

+
+
+
Listing 9. File styles-custom-cap.scss: Creating a new theme function.
+
+
// instead of creating a theme with mat-light-theme or mat-dark-theme,
+// we will create our own theme-creating function that lets us apply our own foreground and background palettes.
+@function create-my-cap-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $my-cap-theme-foreground,
+    background: $my-cap-theme-background
+  );
+}
+
+
+
+

Calling the new function and storing its value in $custom-cap-theme.

+
+
+
Listing 10. File styles-custom-cap.scss: Storing the new theme.
+
+
// We use create-my-cap-theme instead of mat-light-theme or mat-dark-theme
+$custom-cap-theme-primary: mat-palette($mat-green);
+$custom-cap-theme-accent: mat-palette($mat-blue);
+$custom-cap-theme-warn: mat-palette($mat-red);
+
+$custom-cap-theme: create-my-cap-theme(
+  $custom-cap-theme-primary,
+  $custom-cap-theme-accent,
+  $custom-cap-theme-warn
+);
+
+
+
+

After defining our new theme, we can import it from styles.scss.

+
+
+
Listing 11. File styles.scss updated with custom-cap-theme class.
+
+
...
+@import 'styles-custom-cap.scss';
+.custom-cap-theme {
+  @include angular-material-theme($custom-cap-theme);
+}
+
+
+
+
+
+

Multiple themes and overlay-based components

+
+
+

Certain components (e.g. menu, select, dialog, etc.) that are inside of a global overlay container,require an additional step to be affected by the theme’s css class selector.

+
+
+
Listing 12. File app.module.ts
+
+
import {OverlayContainer} from '@angular/cdk/overlay';
+
+@NgModule({
+  // ...
+})
+export class AppModule {
+  constructor(overlayContainer: OverlayContainer) {
+    overlayContainer.getContainerElement().classList.add('custom-cap-theme');
+  }
+}
+
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-app-initializer.html b/docs/devon4ts/1.0/angular/guide-app-initializer.html new file mode 100644 index 00000000..0ef964fb --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-app-initializer.html @@ -0,0 +1,790 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

APP_INITIALIZER

+
+ +
+
+
+

What is the APP_INITIALIZER pattern

+
+
+

The APP_INITIALIZER pattern allows an application to choose which configuration is going to be used in the start of the application, this is useful because it allows to setup different configurations, for example, for docker or a remote configuration. This provides benefits since this is done on runtime, so there’s no need to recompile the whole application to switch from configuration.

+
+
+
+
+

What is APP_INITIALIZER

+
+
+

APP_INITIALIZER allows to provide a service in the initialization of the application in a @NgModule. It also allows to use a factory, allowing to create a singleton in the same service. An example can be found in MyThaiStar /core/config/config.module.ts:

+
+
+
+
+

==

+
+
+

The provider expects the return of a Promise, if it is using Observables, a change with the method toPromise() will allow a switch from Observable to Promise +== ==

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

This is going to allow the creation of a ConfigService where, using a singleton, the service is going to load an external config depending on a route. This dependence with a route, allows to setup different configuration for docker etc. This is seen in the ConfigService of MyThaiStar:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  //and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it is mentioned earlier, you can see the use of a factory to create a singleton at the start. After that, loadExternalConfig is going to look for a Boolean inside the corresponding environment file inside the path src/environments/, this Boolean loadExternalConfig is going to easily allow to switch to a external config. If it is true, it generates a promise that overwrites the parameters of the local config, allowing to load the external config. Finally, the last method getValues() is going to allow to return the file config with the values (overwritten or not). The local config file from MyThaiStar can be seen here:

+
+
+
+
export enum BackendType {
+  IN_MEMORY,
+  REST,
+  GRAPHQL,
+}
+
+interface Role {
+  name: string;
+  permission: number;
+}
+
+interface Lang {
+  label: string;
+  value: string;
+}
+
+export interface Config {
+  version: string;
+  backendType: BackendType;
+  restPathRoot: string;
+  restServiceRoot: string;
+  pageSizes: number[];
+  pageSizesDialog: number[];
+  roles: Role[];
+  langs: Lang[];
+}
+
+export const config: Config = {
+  version: 'dev',
+  backendType: BackendType.REST,
+  restPathRoot: 'http://localhost:8081/mythaistar/',
+  restServiceRoot: 'http://localhost:8081/mythaistar/services/rest/',
+  pageSizes: [8, 16, 24],
+  pageSizesDialog: [4, 8, 12],
+  roles: [
+    { name: 'CUSTOMER', permission: 0 },
+    { name: 'WAITER', permission: 1 },
+  ],
+  langs: [
+    { label: 'English', value: 'en' },
+    { label: 'Deutsch', value: 'de' },
+    { label: 'Español', value: 'es' },
+    { label: 'Català', value: 'ca' },
+    { label: 'Français', value: 'fr' },
+    { label: 'Nederlands', value: 'nl' },
+    { label: 'हिन्दी', value: 'hi' },
+    { label: 'Polski', value: 'pl' },
+    { label: 'Русский', value: 'ru' },
+    { label: 'български', value: 'bg' },
+  ],
+};
+
+
+
+

Finally, inside a environment file src/environments/environment.ts the use of the Boolean loadExternalConfig is seen:

+
+
+
+
// The file contents for the current environment will overwrite these during build.
+// The build system defaults to the dev environment which uses `environment.ts`, but if you do
+// `ng build --env=prod` then `environment.prod.ts` will be used instead.
+// The list of which env maps to which file can be found in `.angular-cli.json`.
+
+export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+
+
+

Creating a APP_INITIALIZER configuration

+
+
+

This section is going to be used to create a new APP_INITIALIZER basic example. For this, a basic app with angular is going to be generated using ng new "appname" substituting appname for the name of the app opted. +If you are using Nx, the command would be nx generate @nrwl/angular:app "appname" in your Nx workspace. Click here to get started with using Nx.

+
+
+
+
+

Setting up the config files

+
+ +
+
+
+

Docker external configuration (Optional)

+
+
+

This section is only done if there is a docker configuration in the app you are setting up this type of configuration.

+
+
+

1.- Create in the root folder /docker-external-config.json. This external config is going to be used when the application is loaded with docker (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load with docker:

+
+
+
+
{
+    "version": "docker-version"
+}
+
+
+
+

2.- In the root, in the file /Dockerfile angular is going to copy the docker-external-config.json that was created before into the Nginx html route:

+
+
+
+
....
+COPY docker-external-config.json /usr/share/nginx/html/docker-external-config.json
+....
+
+
+
+
+
+

External json configuration

+
+
+

1.- Create a json file in the route /src/external-config.json. This external config is going to be used when the application is loaded with the start script (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load:

+
+
+
+
{
+    "version": "external-config"
+}
+
+
+
+

2.- The file named /angular.json (/workspace.json if using Nx) located at the root is going to be modified to add the file external-config.json that was just created to both "assets" inside Build and Test:

+
+
+
+
	....
+	"build": {
+          ....
+            "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	        ....
+        "test": {
+	  ....
+	   "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	  ....
+
+
+
+
+
+

Setting up the proxies

+
+
+

This step is going to setup two proxies. This is going to allow to load the config desired by the context, in case that it is using docker to load the app or in case it loads the app with angular. Loading different files is made possible by the fact that the ConfigService method loadExternalConfig() looks for the path /config.

+
+
+
+
+

Docker (Optional)

+
+
+

1.- This step is going to be for docker. Add docker-external-config.json to Nginx configuration (/nginx.conf) that is in the root of the application:

+
+
+
+
....
+  location  ~ ^/config {
+        alias /usr/share/nginx/html/docker-external-config.json;
+  }
+....
+
+
+
+
+
+

External Configuration

+
+
+

1.- Now the file /proxy.conf.json, needs to be created/modified this file can be found in the root of the application. In this file you can add the route of the external configuration in target and the name of the file in ^/config::

+
+
+
+
....
+  "/config": {
+    "target": "http://localhost:4200",
+    "secure": false,
+    "pathRewrite": {
+      "^/config": "/external-config.json"
+    }
+  }
+....
+
+
+
+

2.- The file package.json found in the root of the application is gonna use the start script to load the proxy config that was just created :

+
+
+
+
  "scripts": {
+....
+    "start": "ng serve --proxy-config proxy.conf.json -o",
+....
+
+
+
+

If using Nx, you need to run the command manually:

+
+
+

nx run angular-app-initializer:serve:development --proxyConfig=proxy.conf.json --o

+
+
+
+
+

Adding the loadExternalConfig Boolean to the environments

+
+
+

In order to load an external config we need to add the loadExternalConfig Boolean to the environments. To do so, inside the folder environments/ the files are going to get modified adding this Boolean to each environment that is going to be used. In this case, only two environments are going to be modified (environment.ts and environment.prod.ts). Down below there is an example of the modification being done in the environment.prod.ts:

+
+
+
+
export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+

In the file in first instance there is the declaration of the types of the variables. After that, there is the definition of those variables. This variable loadExternalConfig is going to be used by the service, allowing to setup a external config just by switching the loadExternalConfig to true.

+
+
+
+
+

Creating core configuration service

+
+
+

In order to create the whole configuration module three are going to be created:

+
+
+

1.- Create in the core app/core/config/ a config.ts

+
+
+
+
  export interface Config {
+    version: string;
+  }
+
+  export const config: Config = {
+    version: 'dev'
+  };
+
+
+
+

Taking a look to this file, it creates a interface (Config) that is going to be used by the variable that exports (export const config: Config). This variable config is going to be used by the service that is going to be created.

+
+
+

2.- Create in the core app/core/config/ a config.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  // and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it was explained in previous steps, at first, there is a factory that uses the method loadExternalConfig(), this factory is going to be used in later steps in the module. After that, the loadExternalConfig() method checks if the Boolean in the environment is false. If it is false it will return the promise resolved with the normal config. Else, it is going to load the external config in the path (/config), and overwrite the values from the external config to the config that’s going to be used by the app, this is all returned in a promise.

+
+
+

3.- Create in the core a module for the config app/core/config/ a config.module.ts:

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

As seen earlier, the ConfigService is added to the module. In this addition, the app is initialized(provide) and it uses the factory that was created in the ConfigService loading the config with or without the external values depending on the Boolean in the config.

+
+
+
+
+

Using the Config Service

+
+
+

As a first step, in the file /app/app.module.ts the ConfigModule created earlier in the other step is going to be imported:

+
+
+
+
  imports: [
+    ....
+    ConfigModule,
+    ....
+  ]
+
+
+
+

After that, the ConfigService is going to be injected into the app.component.ts

+
+
+
+
....
+import { ConfigService } from './core/config/config.service';
+....
+export class AppComponent {
+....
+  constructor(public configService: ConfigService) { }
+....
+
+
+
+

Finally, for this demonstration app, the component app/app.component.html is going to show the version of the config it is using at that moment.

+
+
+
+
<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+</div>
+<h2>Here is the configuration version that is using angular right now: {{configService.getValues().version}}</h2>
+
+
+
+
+
+

Final steps

+
+
+

The script start that was created earlier in the package.json (npm start) is going to be used to start the application. After that, modifying the Boolean loadExternalConfig inside the corresponding environment file inside /app/environments/ should show the different config versions.

+
+
+
+loadExternalConfigFalse +
+
+
+
+loadExternalConfigTrue +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-blob-streaming.html b/docs/devon4ts/1.0/angular/guide-blob-streaming.html new file mode 100644 index 00000000..bda74fd4 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-blob-streaming.html @@ -0,0 +1,552 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Angular File Uploading

+
+
+

This sample demonstrates how to upload a file to a server. For this, we will need to use an Angular form. In this case we have chosen a simple template-driven form, as the goal of this sample is just to show the process to upload a file. You can learn more about Forms in Angular in the official documentation.

+
+
+ + + + + +
+ + +The back-end implementation for this sample is located here: +devon4j-blob-streaming +
+
+
+
+
+

FormData

+
+
+

FormData is an object where you can store key-value pairs that allows you to send through XMLHttpRequest. You can create a FormData object as simply as:

+
+
+
+
....
+const formData = new FormData();
+formData.append('key', value);
+....
+
+
+
+
+
+

Let’s begin

+
+
+

I assume you already have your angular application running, if not, you can have a look to our AngularBasicPWA sample +Unresolved include directive in modules/ROOT/pages/angular/guide-blob-streaming.adoc - include::guide-angular-pwa[]

+
+
+

We are going to use Angular Material components, so it is necessary to install the dependency with the following command:

+
+
+

npm install --save @angular/material @angular/cdk @angular/animations

+
+
+
+
+

Importing necessary components

+
+
+

These are the component I am going to use for our sample, are material HTML components. For use the template-driven form you do not need to import any component. I am going to create a module called Core where I place the needed imports. After that, I will import Core module on my main App module, and I be able to use these components in any part of my application.

+
+
+
+
....
+@NgModule({
+  declarations: [],
+  imports: [CommonModule],
+  exports: [
+    MatButtonModule,
+    MatFormFieldModule,
+    MatInputModule,
+    FormsModule,
+    MatProgressBarModule,
+  ],
+})
+export class CoreModule {}
+....
+
+
+
+

FormsModule Will allow us data binding through html and component.

+
+
+

The next step will be to create a component to place the uploading component: +ng generate component uploader

+
+
+

So this will be our project structure so far:

+
+
+
+folder structure +
+
+
+

Then, in the app.component.html we need to add the selector for our new component, so it will be represented there. We are not going to create any route for this sample. We can also modify the values for the toolbar.

+
+
+
+
....
+<div class="toolbar" role="banner">
+  <img
+    width="40"
+    alt="Angular Logo"
+    src="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNTAgMjUwIj4KICAgIDxwYXRoIGZpbGw9IiNERDAwMzEiIGQ9Ik0xMjUgMzBMMzEuOSA2My4ybDE0LjIgMTIzLjFMMTI1IDIzMGw3OC45LTQzLjcgMTQuMi0xMjMuMXoiIC8+CiAgICA8cGF0aCBmaWxsPSIjQzMwMDJGIiBkPSJNMTI1IDMwdjIyLjItLjFWMjMwbDc4LjktNDMuNyAxNC4yLTEyMy4xTDEyNSAzMHoiIC8+CiAgICA8cGF0aCAgZmlsbD0iI0ZGRkZGRiIgZD0iTTEyNSA1Mi4xTDY2LjggMTgyLjZoMjEuN2wxMS43LTI5LjJoNDkuNGwxMS43IDI5LjJIMTgzTDEyNSA1Mi4xem0xNyA4My4zaC0zNGwxNy00MC45IDE3IDQwLjl6IiAvPgogIDwvc3ZnPg== "
+  />
+  <span>File uploader</span>
+</div>
+
+<app-uploader></app-uploader>
+
+<router-outlet></router-outlet>
+....
+
+
+
+

Now, our new component uploader will be loaded in the root page. Let’s add some code to it.

+
+
+
+
+

Uploader component

+
+
+

I will begin editing the html file. First thing we need is an input component, which will allow us to select the file to upload. Furthermore, I added a button which will be the responsible of calling the upload file window. Apart from this, there is also two labels and a progress bar. Labels will give feedback about file upload request, both with an if clause with uploadSuccess and uploadFail global variables that will be in uploader.component.ts. The progress bar will show the progress of the file being uploaded.

+
+
+
+
....
+  <div class="upload">
+    <div>
+      <button mat-raised-button (click)="upload()">Upload file</button>
+    </div>
+        <label mat-label *ngIf="uploadSuccess"
+      >The file was upload succesfully!</label
+    >
+    <label mat-label *ngIf="uploadFail"
+      >There was an error uploading the file</label
+    >
+    <input
+      type="file"
+      #fileUpload
+      name="fileUpload"
+      accept="*"
+      style="display: none"
+    />
+  </div>
+    <mat-progress-bar
+    *ngIf="fileInProgress"
+    [value]="fileProgress"
+  ></mat-progress-bar>
+</div>
+....
+
+
+
+

The button will call the upload() method in our uploader.component.ts, and as we can see, I assigned an identifier for the input, #fileUpload, so we can reference it from uploader.component.ts. It accepts any file, and the display none style is because it will be called when we click the button, so it is no necessary to be present in the view.

+
+
+

Our html view should look something similar to this:

+
+
+
+html view 1 +
+
+
+

Let’s start in our .ts file. In order to interact with the input #fileUpload, it is necessary to declare it like this:

+
+
+
+
....
+@ViewChild('fileUpload') fileUpload: ElementRef;
+constructor() {}
+....
+
+
+
+

And then, the upload() method that the button in html is calling:

+
+
+
+
....
+ upload(): void {
+    this.fileUpload.nativeElement.click();
+
+    this.fileUpload.nativeElement.onchange = () => {
+      const file = this.fileUpload.nativeElement.files[0];
+      this.uploadFile(file);
+    };
+  }
+....
+
+
+
+

The click method at first line will open the file explorer in order to select the desired file to upload, and on change method will be called when a new file is selected, so a change is detected. Then, uploadFile(…​) method will be called.

+
+
+

Before explain this uploadFile(…​) method, there is something still missing, a service to communicate with back-end through HTTP. +I am going to place the service in a service folder inside our uploader component folder. +Execute the following command ng generate service data and paste the following code

+
+
+
+
....
+export class DataService {
+  SERVER_URL = 'http://localhost:8081/services/rest/binary/v1/';
+
+  constructor(private httpClient: HttpClient) {}
+
+  uploadFile(formdData: FormData): Observable<HttpEvent<BinaryObject>> {
+    const headers = new HttpHeaders({
+      'Content-Type': 'multipart/form-data',
+    });
+
+    return this.httpClient.post<BinaryObject>(
+      this.SERVER_URL + 'binaryobject',
+      formdData,
+      {
+        headers,
+        reportProgress: true,
+        observe: 'events',
+      }
+    );
+  }
+}
+....
+
+
+
+

We have declared the URL as a global variable. Also is necessary to set the content-type as multipart/form-data in the headers sections, that will be the body of the request. There is also two options to talk about:

+
+
+
    +
  • +

    reportProgress: to have a feedback about the file upload so we can show percentage on the view.

    +
  • +
  • +

    observe: ' events' in order to receive this type of events information.

    +
  • +
+
+
+

In uploader.component.ts is missing uploadFile(…​) method.

+
+
+
+
....
+  uploadFile(file: File): void {
+    const formDataBody = this.getFormData(file);
+    this.dataService.uploadFile(formDataBody).subscribe(
+      (event) => {
+        if (event.type == HttpEventType.UploadProgress) {
+          this.fileProgress = Math.round((100 * event.loaded) / event.total);
+        } else if (event instanceof HttpResponse) {
+          this.fileInProgress = false;
+          this.uploadSuccess = true;
+        }
+      },
+      (err) => {
+        console.log('Could not upload the file!');
+        this.uploadFail = true;
+      }
+    );
+  }
+....
+
+
+
+

Notice that whether we have a correct response, or an error response, we set the variable this.uploadSuccess or this.uploadFail to show the labels in the html giving feedback. +Once we call the service to do the HTTP request, we expect two types of response(three if we count the error), the first one is the progress of the upload, and will update the progress bar through this.fileProgress variable. The second one is a response when the request is finished. +That is why the type of the response is checked between HttpEventType or HttpResponse.

+
+
+

Now, if you have your back-end running, you should be able to upload a file, and check in DB that all the process worked fine.

+
+
+ + + + + +
+ + +Download method is not implemented yet. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-component-decomposition.html b/docs/devon4ts/1.0/angular/guide-component-decomposition.html new file mode 100644 index 00000000..7401acd3 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-component-decomposition.html @@ -0,0 +1,504 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Component Decomposition

+
+
+

When implementing a new requirement there are a few design decisions, which need to be considered. +A decomposition in Smart and Dumb Components should be done first. +This includes the definition of state and responsibilities. +Implementing a new dialog will most likely be done by defining a new Smart Component with multiple Dumb Component children.

+
+
+

In the component tree this would translate to the definition of a new sub-tree.

+
+
+
+Component Tree With Highlighted Sub Tree +
+
Figure 1. Component Tree with highlighted sub-tree
+
+
+
+
+

Defining Components

+
+
+

The following gives an example for component decomposition. +Shown is a screenshot from a style guide to be implemented. +It is a widget called Listpicker.

+
+
+

The basic function is an input field accepting direct input. +So typing otto puts otto inside the FormControl. +With arrow down key or by clicking the icon displayed in the inputs right edge a dropdown is opened. +Inside possible values can be selected and filtered beforehand. +After pressing arrow down key the focus should move into the filter input field. +Up and down arrow keys can be used to select an element from the list. +Typing into the filter input field filters the list from which the elements can be selected. +The current selected element is highlighted with green background color.

+
+
+
+Component Decomposition Example 1v2 +
+
Figure 2. Component decomposition example before
+
+
+

What should be done, is to define small reusable Dumb Components. +This way the complexity becomes manageable. +In the example every colored box describes a component with the purple box being a Smart Component.

+
+
+
+Component Decomposition Example 2v2 +
+
Figure 3. Component decomposition example after
+
+
+

This leads to the following component tree.

+
+
+
+Component Decomposition Example component tree +
+
Figure 4. Component decomposition example component tree
+
+
+

Note the uppermost component is a Dumb Component. +It is a wrapper for the label and the component to be displayed inside a form. +The Smart Component is Listpicker. +This way the widget can be reused without a form needed.

+
+
+

A widgets is a typical Smart Component to be shared across feature modules. +So the SharedModule is the place for it to be defined.

+
+
+
+
+

Defining state

+
+
+

Every UI has state. +There are different kinds of state, for example

+
+
+
    +
  • +

    View State: e.g. is a panel open, a css transition pending, etc.

    +
  • +
  • +

    Application State: e.g. is a payment pending, current URL, user info, etc.

    +
  • +
  • +

    Business Data: e.g. products loaded from back-end

    +
  • +
+
+
+

It is good practice to base the component decomposition on the state handled by a component and to define a simplified state model beforehand. +Starting with the parent - the Smart Component:

+
+
+
    +
  • +

    What overall state does the dialog have: e.g. loading, error, valid data loaded, valid input, invalid input, etc. +Every defined value should correspond to an overall appearance of the whole dialog.

    +
  • +
  • +

    What events can occur to the dialog: e.g. submitting a form, changing a filter, pressing buttons, pressing keys, etc.

    +
  • +
+
+
+

For every Dumb Component:

+
+
+
    +
  • +

    What data does a component display: e.g. a header text, user information to be displayed, a loading flag, etc.
    +This will be a slice of the overall state of the parent Smart Component. +In general a Dumb Component presents a slice of its parent Smart Components state to the user.

    +
  • +
  • +

    What events can occur: keyboard events, mouse events, etc.
    +These events are all handled by its parent Smart Component - every event is passed up the tree to be handled by a Smart Component.

    +
  • +
+
+
+

These information should be reflected inside the modeled state. +The implementation is a TypeScript type - an interface or a class describing the model.

+
+
+

So there should be a type describing all state relevant for a Smart Component. +An instance of that type is send down the component tree at runtime. +Not every Dumb Component will need the whole state. +For instance a single Dumb Component could only need a single string.

+
+
+

The state model for the previous Listpicker example is shown in the following listing.

+
+
+
Listing 1. Listpicker state model
+
+
export class ListpickerState {
+
+  items: {}[]|undefined;
+  columns = ['key', 'value'];
+  keyColumn = 'key';
+  displayValueColumn = 'value';
+  filteredItems: {}[]|undefined;
+  filter = '';
+  placeholder = '';
+  caseSensitive = true;
+  isDisabled = false;
+  isDropdownOpen = false;
+  selectedItem: {}|undefined;
+  displayValue = '';
+
+}
+
+
+
+

Listpicker holds an instance of ListpickerState which is passed down the component tree via @Input() bindings in the Dumb Components. +Events emitted by children - Dumb Components - create a new instance of ListpickerState based on the current instance and the event and its data. +So a state transition is just setting a new instance of ListpickerState. +Angular Bindings propagate the value down the tree after exchanging the state.

+
+
+
Listing 2. Listpicker State transition
+
+
export class ListpickerComponent {
+
+  // initial default values are set
+  state = new ListpickerState();
+
+  /** User changes filter */
+  onFilterChange(filter: string): void {
+    // apply filter ...
+    const filteredList = this.filterService.filter(...);
+
+    // important: A new instance is created, instead of altering the existing one.
+    //            This makes change detection easier and prevents hard to find bugs.
+    this.state = Object.assing({}, this.state, {
+      filteredItems: filteredList,
+      filter: filter
+    });
+  }
+
+}
+
+
+
+
Note:
+

It is not always necessary to define the model as independent type. +So there would be no state property and just properties for every state defined directly in the component class. +When complexity grows and state becomes larger this is usually a good idea. +If the state should be shared between Smart Components a store is to be used.

+
+
+
+
+

When are Dumb Components needed

+
+
+

Sometimes it is not necessary to perform a full decomposition. The architecture does not enforce it generally. What you should keep in mind is, that there is always a point when it becomes recommendable.

+
+
+

For example a template with 800 line of code is:

+
+
+
    +
  • +

    not understandable

    +
  • +
  • +

    not maintainable

    +
  • +
  • +

    not testable

    +
  • +
  • +

    not reusable

    +
  • +
+
+
+

So when implementing a template with more than 50 line of code you should think about decomposition.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-consuming-rest-services.html b/docs/devon4ts/1.0/angular/guide-consuming-rest-services.html new file mode 100644 index 00000000..5236279d --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-consuming-rest-services.html @@ -0,0 +1,527 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Consuming REST services

+
+
+

A good introduction to working with Angular HttpClient can be found in Angular Docs

+
+
+

This guide will cover, how to embed Angular HttpClient in the application architecture. +For back-end request a special service with the suffix Adapter needs to be defined.

+
+
+
+
+

Defining Adapters

+
+
+

It is a good practice to have a Angular service whose single responsibility is to call the back-end and parse the received value to a transfer data model (e.g. Swagger generated TOs). +Those services need to have the suffix Adapter to make them easy to recognize.

+
+
+
+Adapters handle back-end communication +
+
Figure 1. Adapters handle back-end communication
+
+
+

As illustrated in the figure a Use Case service does not use Angular HttpClient directly but uses an adapter. +A basic adapter could look like this:

+
+
+
Listing 1. Example adapter
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+
+import { FlightTo } from './flight-to';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  getFlights(): Observable<FlightTo> {
+    return this.httpClient.get<FlightTo>('/relative/url/to/flights');
+  }
+
+}
+
+
+
+

The adapters should use a well-defined transfer data model. +This could be generated from server endpoints with CobiGen, Swagger, typescript-maven-plugin, etc. +If inside the application there is a business model defined, the adapter has to parse to the transfer model. +This is illustrated in the following listing.

+
+
+
Listing 2. Example adapter mapping from business model to transfer model
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+import { map } from 'rxjs/operators';
+
+import { FlightTo } from './flight-to';
+import { Flight } from '../../../model/flight';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  updateFlight(flight: Flight): Observable<Flight> {
+    const to = this.mapFlight(flight);
+
+    return this.httpClient.post<FlightTo>('/relative/url/to/flights', to).pipe(
+      map(to => this.mapFlightTo(to))
+    );
+  }
+
+  private mapFlight(flight: Flight): FlightTo {
+    // mapping logic
+  }
+
+  private mapFlightTo(flightTo: FlightTo): Flight {
+    // mapping logic
+  }
+
+}
+
+
+
+
+
+

Token management

+
+
+

In most cases the access to back-end API is secured using well known mechanisms as CSRF, JWT or both. In these cases the front-end application must manage the tokens that are generated when the user authenticates. More concretely it must store them to include them in every request automatically. Obviously, when user logs out these tokens must be removed from localStorage, memory, etc.

+
+
+
+
+

Store security token

+
+
+

In order to make this guide simple we are going to store the token in memory. Therefore, if we consider that we already have a login mechanism implemented we would like to store the token using a auth.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { Router } from '@angular/router';
+
+@Injectable({
+  providedIn: 'root',
+})
+export class AuthService {
+  private loggedIn = false;
+  private token: string;
+
+  constructor(public router: Router) {}
+
+  public isLogged(): boolean {
+    return this.loggedIn || false;
+  }
+
+  public setLogged(login: boolean): void {
+    this.loggedIn = login;
+  }
+
+  public getToken(): string {
+    return this.token;
+  }
+
+  public setToken(token: string): void {
+    this.token = token;
+  }
+}
+
+
+
+

Using the previous service we will be able to store the token obtained in the login request using the method setToken(token). Please consider that, if you want a more sophisticated approach using localStorage API, you will need to modify this service accordingly.

+
+
+
+
+

Include token in every request

+
+
+

Now that the token is available in the application it is necessary to include it in every request to a protected API endpoint. Instead of modifying all the HTTP requests in our application, Angular provides a class to intercept every request (and every response if we need to) called HttpInterceptor. Let’s create a service called http-interceptor.service.ts to implement the intercept method of this class:

+
+
+
+
import {
+  HttpEvent,
+  HttpHandler,
+  HttpInterceptor,
+  HttpRequest,
+} from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { environment } from '../../../environments/environment';
+import { AuthService } from './auth.service';
+
+@Injectable()
+export class HttpRequestInterceptorService implements HttpInterceptor {
+
+  constructor(private auth: AuthService) {}
+
+  intercept(
+    req: HttpRequest<any>,
+    next: HttpHandler,
+  ): Observable<HttpEvent<any>> {
+    // Get the auth header from the service.
+    const authHeader: string = this.auth.getToken();
+    if (authHeader) {
+      let authReq: HttpRequest<any>;
+
+      // CSRF
+      if (environment.security == 'csrf') {
+        authReq = req.clone({
+          withCredentials: true,
+          setHeaders: { 'x-csrf-token': authHeader },
+        });
+      }
+
+      // JWT
+      if (environment.security == 'jwt') {
+        authReq = req.clone({
+          setHeaders: { Authorization: authHeader },
+        });
+      }
+
+      return next.handle(authReq);
+    } else {
+      return next.handle(req);
+    }
+  }
+}
+
+
+
+

As you may notice, this service is making use of an environment field environment.security to determine if we are using JWT or CSRF in order to inject the token accordingly. In your application you can combine both if necessary.

+
+
+

Configure environment.ts file to use the CSRF/JWT.

+
+
+
+
security: 'csrf'
+
+
+
+

The authHeader used is obtained using the injected service AuthService already presented above.

+
+
+

In order to activate the interceptor we need to provide it in our app.module.ts or core.module.ts depending on the application structure. Let’s assume that we are using the latter and the interceptor file is inside a security folder:

+
+
+
+
...
+import { HttpRequestInterceptorService } from './security/http-request-interceptor.service';
+...
+
+@NgModule({
+  imports: [...],
+  exports: [...],
+  declarations: [],
+  providers: [
+    ...
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: HttpRequestInterceptorService,
+      multi: true,
+    },
+  ],
+})
+export class CoreModule {}
+
+
+
+

Angular automatically will now modify every request and include in the header the token if it is convenient.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-creating-angular-app-with-nx-cli.html b/docs/devon4ts/1.0/angular/guide-creating-angular-app-with-nx-cli.html new file mode 100644 index 00000000..c8638917 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-creating-angular-app-with-nx-cli.html @@ -0,0 +1,408 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Nx CLI

+
+
+

Nx CLI provides a wrapper around Angular CLI and makes it faster, in addition to other benefits. Its computational cache significantly speeds up building and serving applications successively.

+
+
+

With Nx CLI you always get the latest tools to develop your angular applications. By default it is integrated with tools like Jest, Cypress, ESLint and many more. Though you can always configure to use other tools as per your preference.

+
+
+

One difference you will find while working with Nx CLI is that an Nx workspace follows a certain folder structure. That is because Nx strongly supports monorepo architecture, wherein you place all the different components that make up your entire application (front-end, back-end, libraries, models) into one single repository. Nx also provides the tooling between these different components, so that you can share your code across your different applications in the same repo and avoid re-writing. We will go through the folder structure later in this guide. But we might not always want to follow a monorepo architecture and it is possible to create a single application with Nx CLI.

+
+
+

In this guide we are going to learn how to create an angular app with Nx CLI. But first, let us start by installing Nx

+
+
+
+
+

Installing Nx

+
+
+

You can install Nx globally in your system using the following command:

+
+
+
+
npm install -g nx
+
+
+
+

Now let us proceed to creating an angular application using Nx.

+
+
+
+
+

Creating Angular app with Nx

+
+
+

To create an angular app with Nx, we simply create an Nx workspace with angular preset using the following command:

+
+
+
+
npx create-nx-workspace --preset=angular
+
+
+
+

The CLI will ask a series of questions and setup your workspace with an empty angular application.

+
+
+
+Creating a Nx workspace +
+
Figure 1. Creating a Nx workspace.
+
+
+

Here we have given the workspace name as nx-guide and the app name as nx-ng-app. Let us have a look at the folder structure created by Nx CLI.

+
+
+
+
+

Nx workspace folder structure

+
+
+

Every Nx workspace has the following folder structure:

+
+
+
+
myorg/
+├── apps/
+├── libs/
+├── tools/
+├── workspace.json
+├── nx.json
+├── package.json
+└── tsconfig.base.json
+
+
+
+

Nx creates such a folder structure because it strongly supports the concept of monorepo, wherein all the inter-related applications and libraries are put together in the same repository for better maintenance, code-sharing and avoiding duplication of codes.

+
+
+

As per the structure, all runnable applications should belong in the apps/ directory. Supporting applications and libraries can be put in the libs/ folder, with each library defining its own external API to differentiate between them. tools/ folder can contain scripts which act on your code like database scripts, for example. The JSON files contain various information and configuration settings about the workspace and the projects within them.

+
+
+

You will find your angular app named nx-ng-app in the apps/ folder. The folder structure within your app is similar to any Angular app created with Angular CLI.

+
+
+
+Your Nx workspace in VSCode +
+
Figure 2. Your Nx workspace in VSCode.
+
+
+

You will also notice another app named nx-ng-app-e2e automatically generated in the apps folder. This for performing end-to-end testing with Cypress on your app.

+
+
+

Now that we have created our angular app, let us serve it so we can view the application in our browser.

+
+
+
+
+

Running your angular application

+
+
+

You can still use the ng command to serve your application from your workspace root directory as such:

+
+
+
+
ng serve nx-ng-app
+
+
+
+

Using Nx, you can use either of the commands below for the same purpose:

+
+
+
+
nx run my-app:serve
+nx serve my-app
+
+
+
+

Once your code is compiled, you can view your application at http://localhost:4200 as usual.

+
+
+
+
+

Conclusion

+
+
+

In this guide you learned how to install Nx and create an Angular application with it. Nx comes with a host of features and documentation. You can read more about using Nx for you angular projects over here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-cypress.html b/docs/devon4ts/1.0/angular/guide-cypress.html new file mode 100644 index 00000000..eaec3c04 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-cypress.html @@ -0,0 +1,1064 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Testing e2e with Cypress

+
+
+

This guide will cover the basics of e2e testing using Cypress.

+
+
+

Cypress is a framework “all in one” that provides the necessary libraries to write specific e2e tests, without the need of Selenium.

+
+
+

Why Cypress?

+
+
+
    +
  • +

    Uses JavaScript

    +
  • +
  • +

    It works directly with the browser so the compatibility with the front-end framework the project uses (in this case Angular) is not a problem.

    +
  • +
  • +

    Easy cross browser testing

    +
  • +
+
+
+
+
+

Setup

+
+
+

Install +First of all we need to install it, we can use npm install:

+
+
+
+
$ npm install -D cypress
+
+
+
+

Or we can install it with yarn:

+
+
+
+
$ yarn add -D cypress
+
+
+
+

We need to run Cypress in order to get the folder tree downloaded, then create a tsconfig.json file inside cypress folder to add the typescript configuration.

+
+
+
+
$ . /node_modules/.bin/cypress open
+
+
+
+
Listing 1. tsconfig.json
+
+
{
+  "compilerOptions": {
+    "strict": true,
+    "baseUrl": "../node_modules",
+    "target": "es5",
+    "lib": ["es5", "dom"],
+    "types": ["cypress"]
+  },
+  "include": [
+    "**/*.ts"
+  ]
+}
+
+
+
+

BaseUrl

+
+
+

Let’s setup the base URL so when we run the tests cypress will "navigate" to the right place, go to cypress.json on the root of the project.

+
+
+
Listing 2. cypress.json
+
+
{
+  "baseUrl": "http://localhost:4200"
+}
+
+
+
+
+
+

Files / Structure

+
+
+
+
/cypress
+  tsconfig.json
+  /fixtures
+    - example.json
+  /integration
+    - button.spec.ts
+    - test.spec.ts
+    /examples
+  /plugins
+    - index.js
+  /support
+    - commands.js
+    - index.js
+
+
+
+

tsconfig.json for typescript configuration.

+
+
+

fixtures to store our mock data or files (images, mp3…​) to use on our tests.

+
+
+

integration is where our tests go, by default it comes with an examples folder with tested samples.

+
+
+

plugins is where the configuration files of the plugins go.

+
+
+

support to add custom commands.

+
+
+
+
+

== =

+
+
+

If you are using Nx, it automatically generates a e2e cypress project for every project that you generate. So you already get the configuration files like tsconfig.json and cypress.json and also get the folder structure described above. This helps you focus more on writing your tests rather than setting up Cypress.

+
+
+
+
+

== =

+
+ +
+
+
+

Tests

+
+
+

The structure is the same than Mocha.

+
+
+

First, we create a file, for example form.spec.ts, inside we define a context to group all our tests referred to the same subject.

+
+
+
Listing 3. form.spec.ts
+
+
context('Button page', () => {
+  beforeEach(() => {
+    cy.visit('/');
+  });
+  it('should have button',()=>{
+    cy.get('button').should('exist');
+  });
+  it('should contain PRESS',()=>{
+    cy.contains('button', 'PRESS');
+  });
+});
+
+
+
+
beforeEach
+

Visit '/' before every test.

+
+
+
it
+

Inside we write the test.

+
+
+

The result:

+
+
+
+contextImg +
+
+
+

For more info check Cypress documentation

+
+
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+
+
+
+

Fixtures

+
+
+

We use fixtures to mock data, it can be a json, an image, video…​

+
+
+
+
{
+  "name": "Dummy name",
+  "phone": 999 99 99 99,
+  "body": "Mock data"
+}
+
+
+
+

You can store multiple mocks on the same fixture file.

+
+
+
+
{
+  "create":{"name": "e2etestBox"},
+  "boxFruit":{
+    "uuid":"3376339576e33dfb9145362426a33333",
+    "name":"e2etestBox",
+    "visibility":true,
+    "items":[
+      {"name":"apple","units":3},
+      {"name":"kiwi","units":2},
+    ]
+  },
+}
+
+
+
+

To access data we don’t need to import any file, we just call cy.fixture(filename) inside the **.spec.ts. We can name it as we want.

+
+
+
+
cy.fixture('box.json').as('fruitBox')
+
+
+
+

cy.fixture('box.json') we get access to box.json +.as(fruitBox) is used to create an alias (fruitBox) to the fixture.

+
+
+

For more info check Fixtures documentation

+
+
+
+
+

Request / Route

+
+
+

With cypress you can test your application with real data or with mocks.

+
+
+

Not using mocks guarantees that your tests are real e2e test but makes them vulnerable to external issues. +When you mock data you don’t know exactly if the data and the structure received from the backend is correct because you are forcing a mock on the response, but you can avoid external issues, run test faster and have better control on the structure and status.

+
+
+

To get more information go to Testing Strategies

+
+
+
+
+

Route

+
+
+

Cypress can intercept a XHR request and interact with it.

+
+
+
+
cy.server();
+cy.route(
+  'GET',
+  '/apiUrl/list',
+  [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]
+)
+
+
+
+

cy.server(options) start a server to interact with the responses.

+
+
+
cy.route(options) intercepts a XMLHttpRequests
+
    +
  • +

    method GET

    +
  • +
  • +

    URL /apiUrl/list'

    +
  • +
  • +

    response [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]

    +
  • +
+
+
+

Waits

+
+
+

Every cypress action has a default await time to avoid asynchronous issues, but this time can be short for some particular actions like API calls, for those cases we can use cy.wait().

+
+
+
+
cy.server();
+cy.route('/apiUrl/list').as('list');
+cy.visit('/boxList');
+cy.wait('@list');
+
+
+
+

You can find more information about cy.wait() here

+
+
+

To mock data with fixtures:

+
+
+
+
cy.fixture('box')
+  .then(({boxFruit}) => {
+    cy.route(
+      'GET',
+      '/apiUrl/list',
+      boxFruit
+    ).as('boxFruit');
+    cy.get('#button').click();
+    cy.wait('@journalsList');
+    cy.get('#list').contains('apple');
+  })
+
+
+
+

We get boxFruit data from the box fixture and then we mock the API call with it so now the response of the call is boxFruit object. +When the button is clicked, it waits to receive the response of the call and then checks if the list contains one of the elements of the fruitBox.

+
+
+
+
+

Request

+
+
+

Make a HTTP request.

+
+
+
+
cy.server();
+cy.request('http://localhost:4200/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+
+
+
+

If we have 'http://localhost:4200' as baseUrl on cypress.json

+
+
+
+
cy.server();
+cy.request('/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+// Goes to http://localhost:4200/
+
+
+
+

We can add other options, like we can send the body of a form.

+
+
+
+
cy.server();
+cy.request({
+  method: 'POST',
+  url: '/send',
+  form: true,
+  body: {
+    name: 'name task',
+    description: 'description of the task'
+  }
+});
+
+
+
+
+
+

Custom commands

+
+
+

If you see yourself writing the same test more than once (login is a common one), you can create a custom command to make things faster.

+
+
+

Cypress.Commands.add('name', ()⇒{}) to create the test.

+
+
+
Listing 4. commands.ts
+
+
Cypress.Commands.add('checkPlaceholder', (name) => {
+  cy.get(`[name='${name}']`).click();
+  cy.get('mat-form-field.mat-focused').should('exist');
+});
+
+
+
+
index.ts
+

To use the commands we need to import the files on support/index.ts

+
+
+
Listing 5. index.ts
+
+
import './commands'
+import './file1'
+import './folder/file2'
+
+
+
+

index.ts is where all our custom commands files unite so Cypress knows where to find them.

+
+
+

And as we are using typescript we need to define a namespace, interface and define our function.

+
+
+
    +
  • +

    index.d.ts

    +
  • +
+
+
+
+
declare namespace Cypress {
+  interface Chainable<Subject> {
+    checkPlaceholder(name:string):Chainable<void>
+  }
+}
+
+
+ +
+
+
+

Cross browser testing

+
+
+

By default the browser used by Cypress is Chrome, it has compatibility with it’s family browsers (including Microsoft Edge) and has beta support for Mozilla Firefox.

+
+
+

To change the browser on the panel we can do it by selecting the desired one on the browsers tab before running the spec file.

+
+
+

Cypress will detect and display, except electron, only the browsers that you have already installed on your machine.

+
+
+
+browserTab +
+
+
+

Once the browser is selected, you can run your tests.

+
+
+

To change the browser on the automatic test run, you can add a flag on the node command

+
+
+
+
cypress run --browser edge
+
+
+
+

Only if we use the cypress run command.

+
+
+

Or we can change the script file.

+
+
+
    +
  • +

    cypress/script.js

    +
  • +
+
+
+
+
const runTests= async ()=>{
+  ...
+  const {totalFailed} = await cypress.run({browser:'edge'});
+  ...
+};
+
+
+ +
+
+
+

Viewport

+
+
+

Cypress allow us to create tests depending on the Viewport, so we can test responsiveness.

+
+
+

There are different ways to use it:

+
+
+

Inside a test case

+
+
+
+
it('should change title when viewport is less than 320px', ()=>{
+  cy.get('.title-l').should('be.visible');
+  cy.get('.title-s').should('not.be.visible');
+  cy.viewport(320, 480);
+  cy.get('.title-l').should('not.be.visible');
+  cy.get('.title-s').should('be.visible');
+})
+
+
+
+

Passing the configuration as an option

+
+
+
+
describe('page display on medium size screen', {
+  viewportHeight: 1000,
+  viewportWidth: 400
+}, () => {
+  ...
+})
+
+
+
+

Or we can set a default

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+{
+ "viewportHeight": 1000
+ "viewportWidth": 400,
+}
+...
+
+
+ +
+
+
+

Test retries

+
+
+

We can get false negatives intermittently due external issues that can affect our tests, because of that we can add, in the configuration, a retries entry so Cypress can run again a certain failed test the selected number of times to verify that the error is real.

+
+
+

We can set retries for run or open mode.

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+"retries": {
+    "runMode": 3,
+    "openMode": 3
+  }
+...
+
+
+
+

The retries can be configured on the cypress.json or directly on a specific test.

+
+
+
+
it('should get button', {
+  retries: {
+    runMode: 2,
+    openMode: 2
+  }
+}, () => {
+  ...
+})
+
+
+
+

This retries those not shown on the test log.

+
+
+

Check more on retries documentation

+
+
+
+
+

Reporter

+
+
+

The tests results appear on the terminal, but to have a more friendly view we can add a reporter.

+
+
+
+reporter +
+
+
+
+
+

Mochawesome

+
+
+

In this case we are going to use Mochawesome, initially its a Mocha reporter but as Cypress uses Mocha it works the same.

+
+
+

Install

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome
+
+
+
+

To run the reporter:

+
+
+
+
cypress run --reporter mochawesome
+
+
+
+

Mochawesome saves by default the generated files on `./mochawesome-report/` but we can add options to change this behavior.

+
+
+

Options can be passed to the reporter in two ways

+
+
+

Using a flag

+
+
+
+
cypress run --reporter mochawesome --reporter-options reportDir=report
+
+
+
+

Or on cypress.json

+
+
+
+
{
+  "baseUrl": "http://localhost:4200",
+  "reporter": "mochawesome",
+  "reporterOptions": {
+    "overwrite": false,
+    "html": false,
+    "json": true,
+    "reportDir": "cypress/report"
+  }
+}
+
+
+
+

Overwrite:false to not overwrite every **:spec.ts test report, we want them to create a merged version later.

+
+
+

reportDir to set a custom directory.

+
+
+

html:false because we don’t need it.

+
+
+

json:true to save them on json.

+
+
+

Mochawesome only creates the html file of the last .spec.ts file that the tests run, that’s why we don’t generate html reports directly, in order to stack them all on the same final html we need to merge the reports.

+
+ +
+

mochawesome-merge

+
+
+

Mochawesome-merge is a library that helps us to merge the different json.

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome-merge
+npm install --save-dev mochawesome-report-generator
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome-merge
+yarn add -D mochawesome-report-generator
+
+
+
+

To merge the files we execute this command:

+
+
+
+
mochawesome-merge cypress/report/*.json > cypress/reportFinal.json
+
+
+
+

reportFinal.json is the result of this merge, whit that we have the data of all the spec files in one json.

+
+
+

We can also automate the test, merge and conversion to html using a script.

+
+
+
+
const cypress = require('cypress');
+const fse = require('fs-extra');
+const { merge } = require('mochawesome-merge');
+const generator = require('mochawesome-report-generator');
+const runTests= async ()=>{
+  await fse.remove('mochawesome-report');
+  await fse.remove('cypress/report');
+  const {totalFailed} = await cypress.run();
+  const reporterOptions = {
+    files: ["cypress/report/*.json"]
+  };
+  await generateReport(reporterOptions);
+  if(totalFailed !==  0){
+    process.exit(2);
+  };
+};
+const generateReport = (options)=> {
+  return merge(options).then((jsonReport)=>{
+    generator.create(jsonReport).then(()=>{
+      process.exit();
+    });
+  });
+};
+runTests();
+
+
+
+

fse.remove() to remove older reports data.

+
+
+

cypress.run() to run the tests.

+
+
+

merge(options) we merge the json output from running the tests.

+
+
+

generator.create(jsonReport) then we generate the html view of the report.

+
+ +
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-error-handler.html b/docs/devon4ts/1.0/angular/guide-error-handler.html new file mode 100644 index 00000000..9c805c64 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-error-handler.html @@ -0,0 +1,510 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Error Handler in angular

+
+
+

Angular allows us to set up a custom error handler that can be used to control the different errors and them in a correct way. Using a global error handler will avoid mistakes and provide a use friendly interface allowing us to indicate the user what problem is happening.

+
+
+
+
+

What is ErrorHandler

+
+
+

ErrorHandler is the class that Angular uses by default to control the errors. This means that, even if the application doesn’t have a ErrorHandler it is going to use the one setup by default in Angular. This can be tested by trying to find a page not existing in any app, instantly Angular will print the error in the console.

+
+
+
+
+

Creating your custom ErrorHandler step by step

+
+
+

In order to create a custom ErrorHandler three steps are going to be needed:

+
+
+
+
+

Creating the custom ErrorHandler class

+
+
+

In this first step the custom ErrorHandler class is going to be created inside the folder /app/core/errors/errors-handler.ts:

+
+
+
+
import { ErrorHandler, Injectable, Injector } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      //  To do: Use injector to get the necessary services to redirect or
+      // show a message to the user
+      const classname  = error.constructor.name;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          console.error('HttpError:' + error.message);
+          if (!navigator.onLine) {
+            console.error('There's no internet connection');
+            // To do: control here in internet what you wanna do if user has no internet
+          } else {
+            console.error('Server Error:' + error.message);
+            // To do: control here if the server gave an error
+          }
+          break;
+        default:
+          console.error('Error:' + error.message);
+          // To do: control here if the client/other things gave an error
+      }
+    }
+}
+
+
+
+

This class can be used to control the different type of errors. If wanted, the classname variable could be used to add more switch cases. This would allow control of more specific situations.

+
+
+
+
+

Creating a ErrorInterceptor

+
+
+

Inside the same folder created in the last step we are going to create the ErrorInterceptor(errors-handler-interceptor.ts). This ErrorInterceptor is going to retry any failed calls to the server to make sure it is not being found before showing the error:

+
+
+
+
import { HttpInterceptor, HttpRequest, HttpHandler, HttpEvent } from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { retry } from 'rxjs/operators';
+
+@Injectable()
+export class ErrorsHandlerInterceptor implements HttpInterceptor {
+
+    constructor() {}
+    intercept(req: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>> {
+        return next.handle(req).pipe(
+            retryWhen((errors: Observable<any>) => errors.pipe(
+                delay(500),
+                take(5),
+                concatMap((error: any, retryIndex: number) => {
+                    if (++retryIndex == 5) {
+                        throw error;
+                    }
+                    return of(error);
+                })
+            ))
+        );
+    }
+}
+
+
+
+

This custom made interceptor is implementing the HttpInterceptor and inside the method intercept using the method pipe,retryWhen,delay,take and concatMap from RxJs it is going to do the next things if there is errors:

+
+
+
    +
  1. +

    With delay(500) do a delay to allow some time in between requests

    +
  2. +
  3. +

    With take(5) retry five times.

    +
  4. +
  5. +

    With concatMap if the index that take() gives is not 5 it returns the error, else, it throws the error.

    +
  6. +
+
+
+
+
+

Creating a Error Module

+
+
+

Finally, creating a module(errors-handler.module.ts) is necessary to include the interceptor and the custom error handler. In this case, the module is going to be created in the same folder as the last two:

+
+
+
+
import { NgModule, ErrorHandler } from '@angular/core';
+import { CommonModule } from '@angular/common';
+import { ErrorsHandler } from './errors-handler';
+import { HTTP_INTERCEPTORS } from '@angular/common/http';
+import { ErrorsHandlerInterceptor } from './errors-handler-interceptor';
+
+@NgModule({
+  declarations: [], // Declare here component if you want to use routing to error component
+  imports: [
+    CommonModule
+  ],
+  providers: [
+    {
+      provide: ErrorHandler,
+      useClass: ErrorsHandler,
+    },
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: ErrorsHandlerInterceptor,
+      multi: true,
+    }
+  ]
+})
+export class ErrorsHandlerModule { }
+
+
+
+

This module simply is providing the services that are implemented by our custom classes and then telling angular to use our custom made classes instead of the default ones. After doing this, the module has to be included in the app module app.module.ts in order to be used.

+
+
+
+
....
+  imports: [
+    ErrorsHandlerModule,
+    ....
+
+
+
+
+
+

Handling Errors

+
+
+

As a final step, handling these errors is necessary. There are different ways that can be used to control the errors, here are a few:

+
+
+
    +
  • +

    Creating a custom page and using with Router to redirect to a page showing an error.

    +
  • +
  • +

    Creating a service in the server side or Backend to create a log with the error and calling it with HttpClient.

    +
  • +
  • +

    Showing a custom made SnackBar with the error message.

    +
  • +
+
+
+
+
+

== Using SnackBarService and NgZone

+
+
+

If the SnackBar is used directly, some errors can occur, this is due to SnackBar being out of the Angular zone. In order to use this service properly, NgZone is necessary. The method run() from NgZone will allow the service to be inside the Angular Zone. An example on how to use it:

+
+
+
+
import { ErrorHandler, Injectable, Injector, NgZone } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+import { MatSnackBar } from '@angular/material';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector, private zone: NgZone) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      // Use injector to get the necessary services to redirect or
+      const snackBar: MatSnackBar = this.injector.get(MatSnackBar);
+      const classname  = error.constructor.name;
+      let message: string;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          message = !(navigator.onLine) ? 'There is no internet connection' : error.message;
+          break;
+        default:
+          message = error.message;
+      }
+      this.zone.run(
+        () => snackBar.open(message, 'danger', { duration : 4000})
+      );
+    }
+}
+
+
+
+

Using Injector the MatSnackBar is obtained, then the correct message is obtained inside the switch. Finally, using NgZone and run(), we open the SnackBar passing the message, and the parameters wanted.

+
+
+

You can find a working example of this guide in devon4ts-samples.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-eslint.html b/docs/devon4ts/1.0/angular/guide-eslint.html new file mode 100644 index 00000000..4033e1ec --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-eslint.html @@ -0,0 +1,385 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular ESLint support

+
+
+ + + + + +
+ + +ESLint is supported in Angular 10.1.0 onward. +
+
+
+
+
+

What about TSLint?

+
+
+

TSLint is a fantastic tool. It is a linter that was written specifically to work based on the TypeScript AST format. This has advantages and disadvantages, as with most decisions we are faced with in software engineering!

+
+
+

One advantage is there is no tooling required to reconcile differences between ESLint and TypeScript AST formats, but the major disadvantage is that the tool is therefore unable to reuse any of the previous work which has been done in the JavaScript ecosystem around linting, and it has to re-implement everything from scratch. Everything from rules to auto-fixing capabilities and more.

+
+
+

However, the backers behind TSLint announced in 2019 that they would be deprecating TSLint in favor of supporting typescript-eslint in order to benefit the community. You can read more about that here

+
+
+

The TypeScript Team themselves also announced their plans to move the TypeScript codebase from TSLint to typescript-eslint, and they have been big supporters of this project. More details at https://github.com/microsoft/TypeScript/issues/30553

+
+
+

Angular ESLint support comes from the angular-eslint tooling package. Angular documentation also links to this repository as you can check in the ng lint section of the Angular CLI documentation.

+
+
+
+
+

Quick start with Angular and ESLint

+
+
+

In order to create a brand new Angular CLI workspace which uses ESLint instead of TSLint and Codelyzer, simply run the following commands:

+
+
+
+
##Install the Angular CLI and @angular-eslint/schematics globally however you want (e.g. npm, yarn, volta etc)
+
+$ npm i -g @angular/cli @angular-devkit/core @angular-devkit/schematics @angular-eslint/schematics
+
+##Create a new Angular CLI workspace using the @angular-eslint/schematics collection (instead of the default)
+
+$ ng new --collection=@angular-eslint/schematics
+
+
+
+
+
+

Migrating an Angular CLI project from Codelyzer and TSLint

+
+ +
+
+
+

1 - Add relevant dependencies

+
+
+

The first step is to run the schematic to add @angular-eslint to your project:

+
+
+
+
$ ng add @angular-eslint/schematics
+
+
+
+

This will handle installing the latest version of all the relevant packages for you and adding them to the devDependencies of your package.json.

+
+
+
+
+

2 - Run the convert-tslint-to-eslint schematic on a project

+
+
+

The next thing to do is consider which "project" you want to migrate to use ESLint. If you have a single application in your workspace you will likely have just a single entry in the projects configuration object within your angular.json file. If you have a projects/` directory in your workspace, you will have multiple entries in your projects configuration and you will need to chose which one you want to migrate using the convert-tslint-to-eslint schematic.

+
+
+

You can run it like so:

+
+
+
+
$ ng g @angular-eslint/schematics:convert-tslint-to-eslint {{YOUR_PROJECT_NAME_GOES_HERE}}
+
+
+
+

From now on, ng lint will use ESLint!

+
+
+
+
+

3 - Remove root TSLint configuration and use only ESLint

+
+
+

Once you are happy with your ESLint setup, you simply need to remove the root-level tslint.json and potentially uninstall TSLint and any TSLint-related plugins/dependencies if your Angular CLI workspace is now no longer using TSLint at all.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-file-structure.html b/docs/devon4ts/1.0/angular/guide-file-structure.html new file mode 100644 index 00000000..1ebbc5e7 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-file-structure.html @@ -0,0 +1,421 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

File Structure

+
+ +
+
+
+

Top-level

+
+
+

The top-level file structure is defined by Angular CLI. You might put this "top-level file structure" into a sub-directory to facilitate your build, but this is not relevant for this guide. So the applications file structure relevant to this guide is the folder /src/app inside the part managed by Angular CLI.

+
+
+
Listing 1. Top-level file structure shows feature modules
+
+
    /src
+    └── /app
+        ├── /account-management
+        ├── /billing
+        ├── /booking
+        ├── /core
+        ├── /shared
+        ├── /status
+        |
+        ├── app.module.ts
+        ├── app.component.spec.ts
+        ├── app.component.ts
+        └── app.routing-module.ts
+
+
+
+

Besides the definition of app module the app folder has feature modules on top-level. +The special modules shared and core are present as well.

+
+
+
+
+

Feature Modules

+
+
+

A feature module contains the modules definition and two folders representing both layers.

+
+
+
Listing 2. Feature module file structure has both layers
+
+
    /src
+    └── /app
+        └── /account-management
+            ├── /components
+            ├── /services
+            |
+            ├── account-management.module.ts
+            ├── account-management.component.spec.ts
+            ├── account-management.component.ts
+            └── account-management.routing-module.ts
+
+
+
+

Additionally an entry component is possible. This would be the case in lazy loading scenarios. +So account-management.component.ts would be only present if account-management is lazy loaded. +Otherwise, the module’s routes would be defined Component-less +(see vsavkin blog post).

+
+
+
+
+

Components Layer

+
+
+

The component layer reflects the distinction between Smart Components and Dumb Components.

+
+
+
Listing 3. Components layer file structure shows Smart Components on top-level
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                ├── /account-overview
+                ├── /confirm-modal
+                ├── /create-account
+                ├── /forgot-password
+                └── /shared
+
+
+
+

Every folder inside the /components folder represents a smart component. The only exception is /shared. +/shared contains Dumb Components shared across Smart Components inside the components layer.

+
+
+
Listing 4. Smart components contain Dumb components
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                └── /account-overview
+                    ├── /user-info-panel
+                    |   ├── /address-tab
+                    |   ├── /last-activities-tab
+                    |   |
+                    |   ├── user-info-panel.component.html
+                    |   ├── user-info-panel.component.scss
+                    |   ├── user-info-panel.component.spec.ts
+                    |   └── user-info-panel.component.ts
+                    |
+                    ├── /user-header
+                    ├── /user-toolbar
+                    |
+                    ├── account-overview.component.html
+                    ├── account-overview.component.scss
+                    ├── account-overview.component.spec.ts
+                    └── account-overview.component.ts
+
+
+
+

Inside the folder of a Smart Component the component is defined. +Besides that are folders containing the Dumb Components the Smart Component consists of. +This can be recursive - a Dumb Component can consist of other Dumb Components. +This is reflected by the file structure as well. This way the structure of a view becomes very readable. +As mentioned before, if a Dumb Component is used by multiple Smart Components inside the components layer +it is put inside the /shared folder inside the components layer.

+
+
+

With this way of thinking the shared module makes a lot of sense. If a Dumb Component is used by multiple Smart Components +from different feature modules, the Dumb Component is placed into the shared module.

+
+
+
Listing 5. The shared module contains Dumb Components shared across Smart Components from different feature modules
+
+
    /src
+    └── /app
+        └── /shared
+            └── /user-panel
+                |
+                ├── user-panel.component.html
+                ├── user-panel.component.scss
+                ├── user-panel.component.spec.ts
+                └── user-panel.component.ts
+
+
+
+

The layer folder /components is not necessary inside the shared module. +The shared module only contains components!

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-internationalization.html b/docs/devon4ts/1.0/angular/guide-internationalization.html new file mode 100644 index 00000000..cb802393 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-internationalization.html @@ -0,0 +1,575 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Internationalization

+
+
+

Nowadays, a common scenario in front-end applications is to have the ability to translate labels and locate numbers, dates, currency and so on when the user clicks over a language selector or similar. devon4ng and specifically Angular has a default mechanism in order to fill the gap of such features, and besides there are some wide used libraries that make even easier to translate applications.

+
+ +
+
+
+

devon4ng i18n approach

+
+
+

The official approach could be a bit complicated, therefore the recommended one is to use the recommended library Transloco from https://github.com/ngneat/transloco/.

+
+
+
+
+

Install and configure Transloco

+
+
+

In order to include this library in your devon4ng Angular >= 7.2 project you will need to execute in a terminal:

+
+
+
+
$ ng add @ngneat/transloco
+
+
+
+

As part of the installation process you’ll be presented with questions; Once you answer them, everything you need will automatically be created for you.

+
+
+
    +
  • +

    First, Transloco creates boilerplate files for the requested translations.

    +
  • +
  • +

    Next, it will create a new file, transloco-root.module.ts which exposes an Angular’s module with a default configuration, and inject it into the AppModule.

    +
  • +
+
+
+
+
import { HttpClient } from '@angular/common/http';
+import {
+  TRANSLOCO_LOADER,
+  Translation,
+  TranslocoLoader,
+  TRANSLOCO_CONFIG,
+  translocoConfig,
+  TranslocoModule
+} from '@ngneat/transloco';
+import { Injectable, NgModule } from '@angular/core';
+import { environment } from '../environments/environment';
+
+@Injectable({ providedIn: 'root' })
+export class TranslocoHttpLoader implements TranslocoLoader {
+  constructor(private http: HttpClient) {}
+
+  getTranslation(lang: string) {
+    return this.http.get<Translation>(`/assets/i18n/${lang}.json`);
+  }
+}
+
+@NgModule({
+  exports: [ TranslocoModule ],
+  providers: [
+    {
+      provide: TRANSLOCO_CONFIG,
+      useValue: translocoConfig({
+        availableLangs: ['en', 'es'],
+        defaultLang: 'en',
+        // Remove this option if your application doesn't support changing language in runtime.
+        reRenderOnLangChange: true,
+        prodMode: environment.production,
+      })
+    },
+    { provide: TRANSLOCO_LOADER, useClass: TranslocoHttpLoader }
+  ]
+})
+export class TranslocoRootModule {}
+
+
+
+ + + + + +
+ + +As you might have noticed it also set an HttpLoader into the module’s providers. The HttpLoader is a class that implements the TranslocoLoader interface. It’s responsible for instructing Transloco how to load the translation files. It uses Angular HTTP client to fetch the files, based on the given path. +
+
+
+
+
+

Usage

+
+
+

In order to translate any label in any HTML template you will need to use the transloco pipe available:

+
+
+
+
{{ 'HELLO' | transloco }}
+
+
+
+

An optional parameter from the component TypeScript class could be included as follows:

+
+
+
+
{{ 'HELLO' | transloco: { value: dynamic } }}
+
+
+
+

It is possible to use with inputs:

+
+
+
+
<span [attr.alt]="'hello' | transloco">Attribute</span>
+<span [title]="'hello' | transloco">Property</span>
+
+
+
+

In order to change the language used you will need to create a button or selector that calls the this.translocoService.use(language: string) method from TranslocoService. For example:

+
+
+
+
export class AppComponent {
+  constructor(private translocoService: TranslocoService) {}
+
+  changeLanguage(lang) {
+      this.translocoService.setActiveLang(lang);
+  }
+}
+
+
+
+

The translations will be included in the en.json, es.json, de.json, etc. files inside the /assets/i18n folder. For example en.json would be (using the previous parameter):

+
+
+
+
{
+    "HELLO": "hello"
+}
+
+
+
+

Or with an optional parameter:

+
+
+
+
{
+    "HELLO": "hello {{value}}"
+}
+
+
+
+

Transloco understands nested JSON objects. This means that you can have a translation that looks like this:

+
+
+
+
{
+    "HOME": {
+        "HELLO": "hello {{value}}"
+    }
+}
+
+
+
+

In order to access access the value, use the dot notation, in this case HOME.HELLO.

+
+
+
+
+

Using the service, pipe or directive

+
+ +
+
+
+

== Structural Directive

+
+
+

Using a structural directive is the recommended approach. It’s DRY and efficient, as it creates one subscription per template:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('title') }}</p>
+
+  <comp [title]="t('title')"></comp>
+</ng-container>
+
+
+
+

Note that the t function is memoized. It means that given the same key it will return the result directly from the cache.

+
+
+

We can pass a params object as the second parameter:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('name', { name: 'Transloco' }) }}</p>
+</ng-container>
+
+
+
+

We can instruct the directive to use a different language in our template:

+
+
+
+
<ng-container *transloco="let t; lang: 'es'">
+  <p>{{ t('title') }}</p>
+</ng-container>
+
+
+
+
+
+

== Pipe

+
+
+

The use of pipes can be possible too:

+
+
+

template:

+
+
+
+
<div>{{ 'HELLO' | transloco:param }}</div>
+
+
+
+

component:

+
+
+
+
param = {value: 'world'};
+
+
+
+
+
+

== Attribute Directive

+
+
+

The last option available with transloco is the attribute directive:

+
+
+
+
<div transloco="HELLO" [translocoParams]="{ value: 'world' }"></div>
+
+
+
+
+
+

== Service

+
+
+

If you need to access translations in any component or service you can do it injecting the TranslocoService into them:

+
+
+
+
// Sync translation
+translocoService.translate('HELLO', {value: 'world'});
+
+// Async translation
+translocoService.selectTranslate('HELLO', { value: 'world' }).subscribe(res => {
+    console.log(res);
+    //=> 'hello world'
+});
+
+
+
+ + + + + +
+ + +You can find a complete example at https://github.com/devonfw/devon4ng-application-template. +
+
+
+

Please, visit https://github.com/ngneat/transloco/ for more info.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-ionic-from-code-to-android.html b/docs/devon4ts/1.0/angular/guide-ionic-from-code-to-android.html new file mode 100644 index 00000000..a7fff3e8 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-ionic-from-code-to-android.html @@ -0,0 +1,606 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic to android

+
+
+

This page is written to help developers to go from the source code of an ionic application to an android one, with this in mind, topics such as: environment, commands, modifications,…​ are covered.

+
+
+
+
+

Assumptions

+
+
+

This document assumes that the reader has already:

+
+
+
    +
  • +

    Source code of an Ionic application and wants to build it on an android device,

    +
  • +
  • +

    A working installation of NodeJS

    +
  • +
  • +

    An Ionic CLI installed and up-to-date.

    +
  • +
  • +

    Android Studio and Android SDK.

    +
  • +
+
+
+
+
+

From Ionic to Android project

+
+
+

When a native application is being designed, sometimes, functionalities that uses camera, geolocation, push notification, …​ are requested. To resolve these requests, Capacitor can be used.

+
+
+

In general terms, Capacitor wraps apps made with Ionic (HTML, SCSS, Typescript) into WebViews that can be displayed in native applications (Android, IOS) and allows the developer to access native functionalities like the ones said before.

+
+
+

Installing capacitor is as easy as installing any node module, just a few commands have to be run in a console:

+
+
+
    +
  • +

    cd name-of-ionic-4-app

    +
  • +
  • +

    npm install --save @capacitor/core @capacitor/cli

    +
  • +
+
+
+

Then, it is necessary to initialize capacitor with some information: app id, name of the app and the directory where your app is stored. To fill this information, run:

+
+
+
    +
  • +

    npx cap init

    +
  • +
+
+
+
+
+

Modifications

+
+
+

Throughout the development process, usually back-end and front-end are on a local computer, so it’s a common practice to have different configuration files for each environment (commonly production and development). Ionic uses an angular.json file to store those configurations and some rules to be applied.

+
+
+

If a back-end is hosted on http://localhost:8081, and that direction is used in every environment, the application built for android will not work because computer and device do not have the same localhost. Fortunately, different configurations can be defined.

+
+
+

Android Studio uses 10.0.0.2 as alias for 127.0.0.1 (computer’s localhost) so adding http://10.0.0.2:8081 in a new environment file and modifying angular.json accordingly, will make possible connect front-end and back-end.

+
+
+
+Android environment and angular.json +
+
+
+
+
    "build": {
+    ...
+        "configurations": {
+            ...
+            "android": {
+                "fileReplacements": [
+                    {
+                        "replace": "src/environments/environment.ts",
+                        "with": "src/environments/environment.android.ts"
+                    }
+                ]
+            },
+        }
+    }
+
+
+
+
+
+

Build

+
+
+

Once configured, it is necessary to build the Ionic app using this new configuration:

+
+
+
    +
  • +

    ionic build --configuration=android

    +
  • +
+
+
+

The next commands copy the build application on a folder named android and open android studio.

+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+
+
+

From Android project to emulated device

+
+
+

Once Android Studio is opened, follow these steps:

+
+
+
    +
  1. +

    Click on "Build" → Make project.

    +
  2. +
  3. +

    Click on "Build" → Make Module 'app' (default name).

    +
  4. +
+
+
+

Click on make project +click on make app

+
+
+
    +
  1. +

    Click on" Build" → Build Bundle(s) / APK(s) → Build APK(s).

    +
  2. +
  3. +

    Click on run and choose a device.

    +
  4. +
+
+
+

click on build APK +click on running device

+
+
+

If there are no devices available, a new one can be created:

+
+
+
    +
  1. +

    Click on "Create new device"

    +
  2. +
  3. +

    Select hardware and click "Next". For example: Phone → Nexus 5X.

    +
  4. +
+
+
+

Create new device +Select hardware

+
+
+
    +
  1. +

    Download a system image.

    +
    +
      +
    1. +

      Click on download.

      +
    2. +
    3. +

      Wait until the installation finished and then click "Finish".

      +
    4. +
    5. +

      Click "Next".

      +
    6. +
    +
    +
  2. +
  3. +

    Verify configuration (default configuration should be enough) and click "Next".

    +
  4. +
+
+
+

Download system image +Check configuration

+
+
+
    +
  1. +

    Check that the new device is created correctly.

    +
  2. +
+
+
+
+New created device +
+
+
+
+
+

From Android project to real device

+
+
+

To test on a real android device, an easy approach to communicate a smartphone (front-end) and computer (back-end) is to configure a WiFi hotspot and connect the computer to it. A guide about this process can be found here.

+
+
+

Once connected, run ipconfig on a console if you are using windows or ifconfig on a Linux machine to get the IP address of your machine’s Wireless LAN adapter WiFi.

+
+
+
+Result of `ipconfig` command on Windows 10 +
+
+
+

This obtained IP must be used instead of "localhost" or "10.0.2.2" at environment.android.ts.

+
+
+
+Android environment file server URL +
+
+
+

After this configuration, follow the build steps in "From Ionic to Android project" and the first three steps in "From Android project to emulated device".

+
+
+
+
+

Send APK to Android through USB

+
+
+

To send the built application to a device, you can connect computer and mobile through USB, but first, it is necessary to unlock developer options.

+
+
+
    +
  1. +

    Open "Settings" and go to "System".

    +
  2. +
  3. +

    Click on "About".

    +
  4. +
  5. +

    Click "Build number" seven times to unlock developer options.

    +
  6. +
+
+
+
+Steps to enable developer options: 1, 2, 3 +
+
+
+
    +
  1. +

    Go to "System" again an then to "Developer options"

    +
  2. +
  3. +

    Check that the options are "On".

    +
  4. +
  5. +

    Check that "USB debugging" is activated.

    +
  6. +
+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+

After this, do the step four in "From Android project to emulated device" and choose the connected smartphone.

+
+
+
+
+

Send APK to Android through email

+
+
+

When you build an APK, a dialog gives two options: locate or analyze. If the first one is chosen, Windows file explorer will be opened showing an APK that can be send using email. Download the APK on your phone and click it to install.

+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+
+
+

Result

+
+
+

If everything goes correctly, the Ionic application will be ready to be tested.

+
+
+
+Application running on a real device +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-ionic-getting-started.html b/docs/devon4ts/1.0/angular/guide-ionic-getting-started.html new file mode 100644 index 00000000..cedebf05 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-ionic-getting-started.html @@ -0,0 +1,383 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic 5 Getting started

+
+
+

Ionic is a front-end focused framework which offers different tools for developing hybrid mobile applications. The web technologies used for this purpose are CSS, Sass, HTML5 and Typescript.

+
+
+
+
+

Why Ionic?

+
+
+

Ionic is used for developing hybrid applications, which means not having to rely on a specific IDE such as Android Studio or Xcode. Furthermore, development of native apps require learning different languages (Java/Kotlin for Android and Objective-C/Swift for Apple), with Ionic, a developer does not have to code the same functionality for multiple platforms, just use the adequate libraries and components.

+
+
+
+
+

Basic environment set up

+
+ +
+
+
+

Install Ionic CLI

+
+
+

Although the devonfw distribution comes with and already installed Ionic CLI, here are the steps to install it. The installation of Ionic is easy, just one command has to be written:

+
+
+

$ npm install -g @ionic/cli

+
+
+
+
+

Update Ionic CLI

+
+
+

If there was a previous installation of the Ionic CLI, it will need to be uninstalled due to a change in package name.

+
+
+
+
$ npm uninstall -g ionic
+$ npm install -g @ionic/cli
+
+
+
+

##Basic project set up +The set up of an ionic application is pretty immediate and can be done in one line:

+
+
+

ionic start <name> <template> --type=angular

+
+
+
    +
  • +

    ionic start: Command to create an app.

    +
  • +
  • +

    <name>: Name of the application.

    +
  • +
  • +

    <template>: Model of the application.

    +
  • +
  • +

    --type=angular: With this flag, the app produced will be based on angular.

    +
  • +
+
+
+

To create an empty project, the following command can be used:

+
+
+

ionic start MyApp blank --type=angular

+
+
+
+Ionic blank project +
+
+
+

The image above shows the directory structure generated.

+
+
+

There are more templates available that can be seen with the command +ionic start --list

+
+
+
+List of ionic templates +
+
+
+

The templates surrounded by red line are based on angular and comes with Ionic v5, while the others belong to earlier versions (before v4).

+
+
+ + + + + +
+ + +More info at https://ionicframework.com/docs. Remember to select Angular documentation, since Ionic supports React, Vue and Vanilla JS. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-ionic-pwa.html b/docs/devon4ts/1.0/angular/guide-ionic-pwa.html new file mode 100644 index 00000000..f03cd894 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-ionic-pwa.html @@ -0,0 +1,545 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic Progressive Web App

+
+
+

This guide is a continuation of the guide Angular PWAs, therefore, valid concepts explained there are still valid in this page but focused on Ionic.

+
+
+
+
+

Assumptions

+
+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
  • +

    Ionic 5 CLI

    +
  • +
  • +

    Capacitor

    +
  • +
+
+
+

Also, it is a good idea to read the document about PWA using Angular.

+
+
+
+
+

Sample Application

+
+
+
+Ionic 5 PWA Base +
+
Figure 1. Basic ionic PWA.
+
+
+

To explain how to build progressive web apps (PWA) using Ionic, a basic application is going to be built. This app will be able to take photos even without network using PWA elements.

+
+
+
+
+

Step 1: Create a new project

+
+
+

This step can be completed with one simple command: ionic start <name> <template>, where <name> is the name and <template> a model for the app. In this case, the app is going to be named basic-ion-pwa.

+
+
+

If you are using Nx, there is a pre-requisite to this step. And that is, you have to add the @nxtend/ionic-angular plugin to your Nx workspace. The command for that is npm install --save-dev @nxtend/ionic-angular. Once you have the plugin installed, you can generate an ionic app in your Nx workspace with the command nx generate @nxtend/ionic-angular:app basic-ion-pwa. (You can refer this guide if you want to get started with Nx).

+
+
+
+
+

Step 2: Structures and styles

+
+
+

The styles (scss) and structures (html) do not have anything specially relevant, just colors and ionic web components. The code can be found in devon4ts-samples.

+
+
+
+
+

Step 3: Add functionality

+
+
+

After this step, the app will allow users to take photos and display them in the main screen. +First we have to import three important elements:

+
+
+
    +
  • +

    DomSanitizer: Sanitizes values to be safe to use.

    +
  • +
  • +

    SafeResourceUrl: Interface for values that are safe to use as URL.

    +
  • +
  • +

    Plugins: Capacitor constant value used to access to the device’s camera and toast dialogs.

    +
  • +
+
+
+
+
  import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
+  import { Plugins, CameraResultType } from '@capacitor/core';
+
+
+
+

The process of taking a picture is enclosed in a takePicture() method. takePicture() calls the Camera’s getPhoto() function which returns an URL or an exception. If a photo is taken then the image displayed in the main page will be changed for the new picture, else, if the app is closed without changing it, a toast message will be displayed.

+
+
+
+
  export class HomePage {
+    image: SafeResourceUrl;
+    ...
+
+    async takePicture() {
+      try {
+        const image = await Plugins.Camera.getPhoto({
+          quality: 90,
+          allowEditing: true,
+          resultType: CameraResultType.Uri,
+        });
+
+        // Change last picture shown
+        this.image = this.sanitizer.bypassSecurityTrustResourceUrl(image.webPath);
+      } catch (e) {
+        this.show('Closing camera');
+      }
+    }
+
+    async show(message: string) {
+      await Plugins.Toast.show({
+        text: message,
+      });
+    }
+  }
+
+
+
+
+
+

Step 4: PWA Elements

+
+
+

When Ionic apps are not running natively, some resources like Camera do not work by default but can be enabled using PWA Elements. To use Capacitor’s PWA elements run npm install @ionic/pwa-elements and modify src/main.ts as shown below.

+
+
+
+
...
+
+// Import for PWA elements
+import { defineCustomElements } from '@ionic/pwa-elements/loader';
+
+if (environment.production) {
+  enableProdMode();
+}
+
+platformBrowserDynamic().bootstrapModule(AppModule)
+  .catch(err => console.log(err));
+
+// Call the element loader after the platform has been bootstrapped
+defineCustomElements(window);
+
+
+
+
+
+

Step 5: Make it Progressive.

+
+
+

Turning an Ionic 5 app into a PWA is pretty easy. The same module used to turn Angular apps into PWAs has to be added. To do so, run: ng add @angular/pwa. This command also creates an icons folder inside src/assets and contains angular icons for multiple resolutions. (Note: In an Nx workspace, you have to add it like a normal package using npm install @angular/pwa, and you have to manually add the icons). If you want to use other images, be sure that they have the same resolution, the names can be different but the file manifest.json has to be changed accordingly.

+
+
+
+
+

Step 6: Configure the app

+
+
+

manifest.json

+
+
+

Default configuration.

+
+
+

ngsw-config.json

+
+
+

At assetGroupsresources add a URLs field and a pattern to match PWA Elements scripts and other resources (images, styles, …​):

+
+
+
+
  "urls": ["https://unpkg.com/@ionic/pwa-elements@1.0.2/dist/**"]
+
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ionic build --configuration production to build the app using production settings. (nx build basic-ion-pwa --configuration production in your Nx workspace root).

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here. A good alternative is also npm install serve. It can be checked here.

+
+
+

Go to the www folder running cd www.

+
+
+

http-server -o or serve to serve your built app.

+
+
+ + + + + +
+ + +In order not to install anything not necessary npx can be used directly to serve the app. i.e run npx serve [folder] will automatically download and run this HTTP server without installing it in the project dependencies. +
+
+
+
+Http server running +
+
Figure 2. Http server running on localhost:8081.
+
+
+

 
+In another console instance run ionic serve (nx serve basic-ion-pwa if using Nx CLI) to open the common app (not built).

+
+
+
+Ionic serve on Visual Studio Code console +
+
Figure 3. Ionic server running on localhost:8100.
+
+
+

 
+The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common one does not.

+
+
+
+Application comparison +
+
Figure 4. Application service worker comparison.
+
+
+

 
+If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 5. Offline application.
+
+
+

 
+Finally, plugins like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 6. Lighthouse report.
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-layout-with-angular-material.html b/docs/devon4ts/1.0/angular/guide-layout-with-angular-material.html new file mode 100644 index 00000000..7490c6b3 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-layout-with-angular-material.html @@ -0,0 +1,750 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Material Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts using Angular Material in a devon4ng application. We will create an application with a header containing some menu links and a sidenav with some navigation links.

+
+
+
+Finished application +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Create a new angular application

+
+
+

We start with opening the devonfw IDE(right-click anywhere in your workspace and click "Open devonfw CMD shell here") and running the following command to start a project named devon4ng-mat-layout

+
+
+
    +
  • +

    ng new devon4ng-mat-layout --routing --style=scss. If you are using Nx, the command would be nx generate @nrwl/angular:app devon4ng-mat-layout --routing --style=scss in your Nx workspace. Click here to get started with using Nx.

    +
  • +
+
+
+

We are providing the routing flag so that a routing module is generated, and we are also setting the style sheet format to SCSS with --style=scss.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+
    +
  • +

    ng serve. (If you are using Nx, you have to specify the project name along with the --project flag, so the command becomes ng serve --project=devon4ng-mat-layout)

    +
  • +
+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+Blank application +
+
Figure 2. Blank application
+
+
+
+
+

Adding Angular Material library to the project

+
+
+

Next we will add Angular Material to our application. In the integrated terminal, press Ctrl + C to terminate the running application and run the following command:

+
+
+
    +
  • +

    npm install --save @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

You can also use Yarn to install the dependencies if you prefer that:

+
+
+
    +
  • +

    yarn add @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

Once the dependencies are installed, we need to import the BrowserAnimationsModule in our AppModule for animations support.

+
+
+
Listing 1. Importing BrowserAnimationsModule in AppModule
+
+
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
+
+@NgModule({
+  ...
+  imports: [BrowserAnimationsModule],
+  ...
+})
+export class AppModule { }
+
+
+
+

Angular Material provides a host of components for designing our application. All the components are well structured into individual NgModules. For each component from the Angular Material library that we want to use, we have to import the respective NgModule.

+
+
+
Listing 2. We will be using the following components in our application:
+
+
import { MatIconModule, MatButtonModule, MatMenuModule, MatListModule, MatToolbarModule, MatSidenavModule } from '@angular/material';
+
+@NgModule({
+  ...
+  imports: [
+	...
+    MatIconModule,
+    MatButtonModule,
+    MatMenuModule,
+    MatListModule,
+    MatToolbarModule,
+    MatSidenavModule,
+	...
+	],
+  ...
+})
+export class AppModule { }
+
+
+
+

A better approach is to import and then export all the required components in a shared module. But for the sake of simplicity, we are importing all the required components in the AppModule itself.

+
+
+
+
+

==

+
+
+
+
  You can find a working copy of this application https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-basic-layout[here]. The sample application is part of a Nx workspace, which means it is one of the many apps in a monorepo and capable of importing reusable code from a shared library. This guide describes the implementaion by assuming a stand-alone single-repo application, but the pages and layout described in this sample app are similar to the ones used in another sample app in the monorepo (https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-theming[angular-material-theming]), which is why we have exported the required components from a shared library and reused them in both the apps. As a result, the code in our monorepo will be slightly different. It would still help you in following this guide.
+== ==
+
+
+
+

Next, we include a theme in our application. Angular Material comes with four pre-defined themes: indigo-pink, deeppurple-amber, pink-bluegrey and purple-green. It is also possible to create our own custom theme, but that is beyond the scope of this guide. Including a theme is required to apply all of the core and theme styles to your application. +We will include the indigo-pink theme in our application by importing the indigo-pink.css file in our src/styles.scss:

+
+
+
Listing 3. In src/styles.scss:
+
+
@import "~@angular/material/prebuilt-themes/indigo-pink.css";
+
+
+
+

To use Material Design Icons along with the mat-icon component, we will load the Material Icons library in our src/index.html file

+
+
+
Listing 4. In src/index.html:
+
+
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
+
+
+
+
+
+

Development

+
+
+

Now that we have all the Angular Material related dependencies set up in our project, we can start coding. Let’s begin by adding a suitable margin and font to the body element of our single page application. We will add it in the src/styles.scss file to apply it globally:

+
+
+
Listing 5. In src/styles.scss:
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

At this point, if we run our application, this is how it will look like:

+
+
+
+Angular Material added to the application +
+
Figure 3. Application with Angular Material set up
+
+
+

We will clear the app.component.html file and setup a header with a menu button and some navigational links. We will use mat-toolbar, mat-button, mat-menu, mat-icon and mat-icon-button for this:

+
+
+
Listing 6. app.component.html:
+
+
<mat-toolbar color="primary">
+  <button mat-icon-button aria-label="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+  <button mat-button [matMenuTriggerFor]="submenu">Menu 1</button>
+  <button mat-button>Menu 2</button>
+  <button mat-button>Menu 3</button>
+
+  <mat-menu #submenu="matMenu">
+    <button mat-menu-item>Sub-menu 1</button>
+    <button mat-menu-item [matMenuTriggerFor]="submenu2">Sub-menu 2</button>
+  </mat-menu>
+
+  <mat-menu #submenu2="matMenu">
+    <button mat-menu-item>Menu Item 1</button>
+    <button mat-menu-item>Menu Item 2</button>
+    <button mat-menu-item>Menu Item 3</button>
+  </mat-menu>
+
+</mat-toolbar>
+
+
+
+

The color attribute on the mat-toolbar element will give it the primary (indigo) color as defined by our theme. The color attribute works with most Angular Material components; the possible values are 'primary', 'accent' and 'warn'. +The mat-toolbar is a suitable component to represent a header. It serves as a placeholder for elements we want in our header. +Inside the mat-toolbar, we start with a button having mat-icon-button attribute, which itself contains a mat-icon element having the value menu. This will serve as a menu button which we can use to toggle the sidenav. +We follow it with some sample buttons having the mat-button attribute. Notice the first button has a property matMenuTriggerFor bound to a local reference submenu. As the property name suggests, the click of this button will display the mat-menu element with the specified local reference as a drop-down menu. The rest of the code is self explanatory.

+
+
+
+Header added to the application +
+
Figure 4. This is how our application looks with the first menu button (Menu 1) clicked.
+
+
+

We want to keep the sidenav toggling menu button on the left and move the rest to the right to make it look better. To do this we add a class to the menu icon button:

+
+
+
Listing 7. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+

And in the app.component.scss file, we add the following style:

+
+
+
Listing 8. app.component.scss:
+
+
.menu {
+    margin-right: auto;
+}
+
+
+
+

The mat-toolbar element already has it’s display property set to flex. Setting the menu icon button’s margin-right property to auto keeps itself on the left and pushes the other elements to the right.

+
+
+
+Final look of the header +
+
Figure 5. Final look of the header.
+
+
+

Next, we will create a sidenav. But before that lets create a couple of components to navigate between, the links of which we will add to the sidenav. +We will use the ng generate component (or ng g c command for short) to create Home and Data components. (Append --project=devon4ng-mat-layout to the command in a Nx workspace). We nest them in the pages sub-directory since they represent our pages.

+
+
+
    +
  • +

    ng g c pages/home

    +
  • +
  • +

    ng g c pages/data;

    +
  • +
+
+
+

Let us set up the routing such that when we visit http://localhost:4200/ root url we see the HomeComponent and when we visit http://localhost:4200/data url we see the DataComponent. +We had opted for routing while creating the application, so we have the routing module app-routing.module.ts setup for us. In this file, we have the empty routes array where we set up our routes.

+
+
+
Listing 9. app-routing.module.ts:
+
+
import { HomeComponent } from './pages/home/home.component';
+import { DataComponent } from './pages/data/data.component';
+
+	const routes: Routes = [
+	  { path: '', component: HomeComponent },
+	  { path: 'data', component: DataComponent }
+	];
+
+
+
+

We need to provide a hook where the components will be loaded when their respective URLs are loaded. We do that by using the router-outlet directive in the app.component.html.

+
+
+
Listing 10. app.component.html:
+
+
...
+	</mat-toolbar>
+	<router-outlet></router-outlet>
+
+
+
+

Now when we visit the defined URLs we see the appropriate components rendered on screen.

+
+
+

Lets change the contents of the components to have something better.

+
+
+
Listing 11. home.component.html:
+
+
<h2>Home Page</h2>
+
+
+
+
Listing 12. home.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+
Listing 13. data.component.html:
+
+
<h2>Data Page</h2>
+
+
+
+
Listing 14. data.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+

The pages look somewhat better now:

+
+
+
+Home page +
+
Figure 6. Home page
+
+
+
+Data page +
+
Figure 7. Data page
+
+
+

Let us finally create the sidenav. To implement the sidenav we need to use 3 Angular Material components: mat-sidenav-container, mat-sidenav and mat-sidenav-content. +The mat-sidenav-container, as the name suggests, acts as a container for the sidenav and the associated content. So it is the parent element, and mat-sidenav and mat-sidenav-content are the children sibling elements. mat-sidenav represents the sidenav. We can put any content we want, though it is usually used to contain a list of navigational links. The mat-sidenav-content element is for containing the contents of our current page. Since we need the sidenav application-wide, we will put it in the app.component.html.

+
+
+
Listing 15. app.component.html:
+
+
...
+</mat-toolbar>
+
+<mat-sidenav-container>
+  <mat-sidenav mode="over" [disableClose]="false" #sidenav>
+    Sidenav
+  </mat-sidenav>
+  <mat-sidenav-content>
+    <router-outlet></router-outlet>
+  </mat-sidenav-content>
+</mat-sidenav-container>
+
+
+
+

The mat-sidenav has a mode property, which accepts one of the 3 values: over, push and side. It decides the behavior of the sidenav. mat-sidenav also has a disableClose property which accents a boolean value. It toggles the behavior where we click on the backdrop or press the Esc key to close the sidenav. There are other properties which we can use to customize the appearance, behavior and position of the sidenav. You can find the properties documented online at https://material.angular.io/components/sidenav/api +We moved the router-outlet directive inside the mat-sidenav-content where it will render the routed component. +But if you check the running application in the browser, we don’t see the sidenav yet. That is because it is closed. We want to have the sidenav opened/closed at the click of the menu icon button on the left side of the header we implemented earlier. Notice we have set a local reference #sidenav on the mat-sidenav element. We can access this element and call its toggle() function to toggle open or close the sidenav.

+
+
+
Listing 16. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu" (click)="sidenav.toggle()">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+
+Sidenav works +
+
Figure 8. Sidenav is implemented
+
+
+

We can now open the sidenav by clicking the menu icon button. But it does not look right. The sidenav is only as wide as its content. Also the page does not stretch the entire viewport due to lack of content. +Let’s add the following styles to make the page fill the viewport:

+
+
+
Listing 17. app.component.scss:
+
+
...
+mat-sidenav-container {
+    position: absolute;
+    top: 64px;
+    left: 0;
+    right: 0;
+    bottom: 0;
+}
+
+
+
+

The sidenav width will be corrected when we add the navigational links to it. That is the only thing remaining to be done. Lets implement it now:

+
+
+
Listing 18. app.component.html:
+
+
...
+  <mat-sidenav [disableClose]="false" mode="over" #sidenav>
+	<mat-nav-list>
+      <a
+        id="home"
+        mat-list-item
+        [routerLink]="['./']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+        [routerLinkActiveOptions]="{exact: true}"
+      >
+        <mat-icon matListAvatar>home</mat-icon>
+        <h3 matLine>Home</h3>
+        <p matLine>sample home page</p>
+      </a>
+      <a
+        id="sampleData"
+        mat-list-item
+        [routerLink]="['./data']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+      >
+        <mat-icon matListAvatar>grid_on</mat-icon>
+        <h3 matLine>Data</h3>
+        <p matLine>sample data page</p>
+      </a>
+    </mat-nav-list>
+  </mat-sidenav>
+...
+
+
+
+

We use the mat-nav-list element to set a list of navigational links. We use the a tags with mat-list-item directive. We implement a click listener on each link to close the sidenav when it is clicked. The routerLink directive is used to provide the URLs to navigate to. The routerLinkActive directive is used to provide the class name which will be added to the link when it’s URL is visited. Here we name the class`active`. To style it, let' modify the app.component.scss file:

+
+
+
Listing 19. app.component.scss:
+
+
...
+mat-sidenav-container {
+...
+	a.active {
+        background: #8e8d8d;
+        color: #fff;
+
+        p {
+            color: #4a4a4a;
+        }
+    }
+}
+
+
+
+

Now we have a working application with a basic layout: a header with some menu and a sidenav with some navigational links.

+
+
+
+Finished application +
+
Figure 9. Finished application
+
+
+
+
+

Conclusion

+
+
+

The purpose of this guide was to provide a basic understanding of creating layouts with Angular Material. The Angular Material library has a huge collection of ready to use components which can be found at https://material.angular.io/components/categories +It has provided documentation and example usage for each of its components. Going through the documentation will give a better understanding of using Angular Material components in our devon4ng applications.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-layout-with-clarity-angular.html b/docs/devon4ts/1.0/angular/guide-layout-with-clarity-angular.html new file mode 100644 index 00000000..ae53fe55 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-layout-with-clarity-angular.html @@ -0,0 +1,675 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Clarity Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts Angular Clarity in a devon4ng application. Angular Clarity is a HTML/CSS framework.

+
+
+
+1 +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Let’s begin

+
+
+

We start with opening the console(in the Devon distribution folder) and running the following command to start a project named AngularZorroLayout.

+
+
+

devon ng new AngularClarityLayout

+
+
+

Select y when it asks whether it would like to add Angular routing and select SCSS when it asks for the style sheet format. You can also use the devonfw IDE CLI to create a new devon4ng application.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+

devon ng serve

+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+2 +
+
Figure 2. Blank Application
+
+
+
+
+

Adding Angular Clarity framework to the project

+
+
+

Next we will add Angular Clarity to our application. In the integrated terminal, press CTRL + C to terminate the running application and run the following command:

+
+
+

Generate a new Angular application (if you haven’t already): +ng new my-app +Navigate to the directory: +cd my-app +Run the ng add command for Clarity: +ng add @clr/angular

+
+
+

After that we can see that the module is imported on app.module.ts

+
+
+
+
import { ClarityModule } from '@clr/angular';
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+imports: [
+    ClarityModule,
+ ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+
+3 +
+
Figure 3. ClarityModule
+
+
+
+
+

Development

+
+
+

After installed the library we can start to develop the code.

+
+
+

Adding styles in styles.css

+
+
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

First thing that we need to do is the menu

+
+
+
+
<header class="header-6">
+  <div class="branding">
+    <a href="..." class="nav-link">
+      <clr-icon shape="vm-bug"></clr-icon>
+      <span class="title">Project Clarity</span>
+    </a>
+  </div>
+
+  <div class="header-nav">
+    <a href="..." class="active nav-link"><span class="nav-text">Dashboard</span></a>
+    <a href="..." class="nav-link"><span class="nav-text">Interactive Analytics</span></a>
+  </div>
+  <div class="header-actions">
+      <form class="search">
+        <label for="search_input">
+          <input id="search_input" type="text" placeholder="Search for keywords...">
+        </label>
+      </form>
+        <clr-dropdown>
+          <button class="nav-text" clrDropdownTrigger aria-label="open user profile">
+            devonfw@clarityangular
+            <clr-icon shape="caret down"></clr-icon>
+          </button>
+          <clr-dropdown-menu *clrIfOpen clrPosition="bottom-right">
+            <a href="..." clrDropdownItem>Settings</a>
+            <a href="..." clrDropdownItem>Log out</a>
+          </clr-dropdown-menu>
+        </clr-dropdown>
+<clr-dropdown>
+  <button class="nav-icon" clrDropdownTrigger aria-label="toggle settings menu">
+    <clr-icon shape="cog"></clr-icon>
+    <clr-icon shape="caret down"></clr-icon>
+  </button>
+  <clr-dropdown-menu *clrIfOpen clrPosition="bottom-right">
+    <a href="..." clrDropdownItem>About</a>
+    <a href="..." clrDropdownItem>Preferences</a>
+  </clr-dropdown-menu>
+</clr-dropdown>
+  </div>
+</header>
+
+
+
+
+4 +
+
Figure 4. Clarity Menu
+
+
+

The framework has its own css classes. +For example, the first class that we can see is the header-6 that one is a css style that change the color from the menu. +Also, we can see it that the framework has some icons where we can choose, where the tag is +<clr-icon shape”vm-bug></clr-icon> +The next div on the menu will have the navigation header. +As with all this framework we can see that has is own css class <div class=”header-nav” +We can see 2 <a> tags with a different css class +The first one has the activated class. The difference between both of them shows like this.

+
+
+
+5 +
+
Figure 5. Difference
+
+
+

After seeing this piece of code, can see that the other part of the menu has another css class. +<div class=”header-actions”> +After this all divs inside the last one, they are going to be aligned to the right.

+
+
+
+6 +
+
Figure 6. Search
+
+
+

To do this search bar, just need to create a form with the class search +<form class=”search”> +To shows the icon we use the tag <label for="search_input"></label> +And the normal input with the id=”search_input” to match with the previous label

+
+
+
+7 +
+
Figure 7. Dropdown
+
+
+

To do the menu-dropdown , we use the tag owned by the framework called +<clr-dropdown>

+
+
+
+
 <clr-dropdown>
+          <button class="nav-text" clrDropdownTrigger aria-label="open user profile">
+            devonfw@clarityangular
+            <clr-icon shape="caret down"></clr-icon>
+          </button>
+          <clr-dropdown-menu *clrIfOpen clrPosition="bottom-right">
+            <a href="..." clrDropdownItem>Settings</a>
+            <a href="..." clrDropdownItem>Log out</a>
+          </clr-dropdown-menu>
+  </clr-dropdown>
+
+
+
+
+8 +
+
Figure 8. Dropdown
+
+
+

The attribute clrDropdownTrigger is needed because if not, we cannot do the tag <clr-dropdown-menu> because that tag is going to be activated just if the clrDropdownTrigger is activated too. +Also, with the attribute clrPosition when can decided where will be positioned the dropdown.

+
+
+

The other part the menu, check the next figure.

+
+
+
+9 +
+
Figure 9. Button
+
+
+

It’s pretty much the same code but we just change the attribute aria-abel and the icons`

+
+
+
+10 +
+
Figure 10. Button Logic
+
+
+

After we have the whole menu finished, time to see the card.

+
+
+
+11 +
+
Figure 11. Card
+
+
+

In the first figure, all the elements are aligned to the center. +To do this, we just need to use the classes by the framework. +<div class="clr-main-container"> +The first css class is giving the style to the main container.

+
+
+

<div class="clr-row clr-justify-content-center">

+
+
+

This one, is saying that its going to be sorted by rows and all the content inside of this div will be in the center + <div class="clr-col-lg-4"> +And the last one it’s the size of the div. This framework has as maximum 12, is like bootstrap. +More examples in: +https://clarity.design/documentation/grid

+
+
+

To create the card with his border and all the properties we just call the class +<div class=”card”> +To do the tooltip, check next figure.

+
+
+
+12 +
+
Figure 12. Tooltip
+
+
+

We just need to do an <a> tag with this attributes + <a href="…​" role="tooltip" aria-haspopup="true" class="tooltip tooltip-bottom-right"> +The class is giving us the toltip and his position. +After that we have the: +<clr-icon shape="info-circle" size="24"></clr-icon> +That is giving us the icon and the size +And the content of the tooltip is coming from the: +<span class="tooltip-content"> +After the tooltip done, we just need to add a image and the text. +To do it we just need to code

+
+
+
+
<div class="card-img">
+            <img src="../assets/images/clarity.png">
+          </div>
+          <div class="card-block">
+            <p class="card-text">
+              Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard
+              dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen
+              book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially
+              unchanged. It was popularised in the 1930s with the release of Letraset sheets containing Lorem Ipsum passages, and more
+              recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.
+            </p>
+          </div>
+
+
+
+

For the next card, check next figure

+
+
+
+13 +
+
Figure 13. Card
+
+
+

We are using the same class from the card that we used before. +But to do the numbers on the top we used : +<span class="badge"> +And to give some colors we used: +<span class="badge badge-purple"> for example

+
+
+

The next step is do the bars with the progress, to do it we just need to create a div with the class “progress-block”

+
+
+
+
        <div class="progress-block">
+              <label>Label</label>
+                <div class="progress-static">
+                  <div class="progress-meter" data-value="25"></div>
+                </div>
+              </div>
+
+
+
+

To do the bar with that widh and high we ull the class “progress-static” +And finally to change the color and the value is with the class “progress-meter” and “progress success” +Depends with class are we using, we will have different attributes to put the value +If we have the progress-static we will use +<div class="progress-meter" data-value="43"></div> +If we have the progress success we need to use: +<progress value="75" max="100" data-displayval="…​%"></progress>

+
+
+

As you can see, the card has a footer. Check next picture

+
+
+
+14 +
+
Figure 14. Card
+
+
+

We just need to add a div with this class inside of the card div +<div class="card-footer"> +And will link to the card:

+
+
+

<a class="card-link" (click)="send()">Click to see the modal</a> +And the method send() its just a method to convert the variable basic to true when is false and false when its true:

+
+
+
+
  basic = false;
+  send(): void {
+    this.basic = !this.basic;
+  }
+
+
+
+

So in the html file we need to write a div with a ngIf, to check if the variable is true and create a model with the tag <clr-modal> and the attribute clrModalOpen and the same name as the variable has.

+
+
+
+
<div *ngIf="basic">
+        <clr-modal [(clrModalOpen)]="basic">
+
+
+
+

After this we need to create the body of the modal, to do it we will use a div with the classes from the framework

+
+
+
+
<div class="modal-body">
+            <p>But not much to say...</p>
+          </div>
+          <div class="modal-footer">
+            <button type="button" class="btn btn-primary" (click)="basic = true">OK</button>
+            <button type="button" class="btn btn-outline" (click)="basic = false">Cancel</button>          </div>
+        </clr-modal>
+
+
+
+

The class to create the body of the modal, it’s just to create a div with the class +<div class="modal-body"> +And to create the footer +<div class="modal-footer"> +We can see that the footer has 2 buttons, with different style coming from the framework and with 2 methods with different values +(click)="basic = true" on OK button, this button won’t change the variable value so would not do anything. +(click)="basic = false" on Cancel button. This button will change the value of the variable and will leave the modal.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-layout-with-ng-zorro-layout.html b/docs/devon4ts/1.0/angular/guide-layout-with-ng-zorro-layout.html new file mode 100644 index 00000000..2625d9a3 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-layout-with-ng-zorro-layout.html @@ -0,0 +1,897 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

NG ZORRO Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts using NG ZORRO in a devon4ng application.

+
+
+
+figure1 +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Let’s begin

+
+
+

Starts with opening the console(in the Devon distribution folder) and running the following command to start a project named AngularZorroLayout. +devon ng new AngularZorroLayout

+
+
+
    +
  • +

    devon ng new AngularZorroLayout

    +
  • +
+
+
+

Select y when it asks whether it would like to add Angular routing and select scss when it asks for the style sheet format.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+
    +
  • +

    devon ng serve

    +
  • +
+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+
+

Adding Angular ZORRO library to the project

+
+
+
Blank application
+

Next we will add Angular Material to our application. In the integrated terminal, press CTRL + C to terminate the running application and run the following command:

+
+
+
    +
  • +

    ng add ng-zorro-antd

    +
  • +
+
+
+
+figure3 +
+
Figure 2. CLI Angular ZORRO Layout
+
+
+

Or if we would like to customize our workflow we can install it with:

+
+
+
    +
  • +

    npm install ng-zorro-antd

    +
  • +
+
+
+

After run that command, need to import the pre-build styles in angular.json

+
+
+
Listing 1. Styles on angular.json
+
+
"styles": [
+    "src/styles.scss",
+    "node_modules/ng-zorro-antd/src/ng-zorro-antd.min.css",
+    "node_modules/ng-zorro-antd/resizable/style/index.min.css"
+],
+
+
+
+

Once the dependencies are installed, need to import the BrowserAnimationsModule in our AppModule for animations support.

+
+
+
Listing 2. Importing BrowserAnimationsModule in AppModule
+
+
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
+
+@NgModule({
+  ...
+  imports: [BrowserAnimationsModule],
+  ...
+})
+export class AppModule { }
+
+
+
+
+
+

Internationalization

+
+
+

The default language of ng-zorro-antd is Chinese . If you want to use other languages, you can follow the instructions below. You can also set the language with ng add ng-zorro-antd when creating project.

+
+
+

ng-zorro-antd provides several configuration tokens for global configuration of international copy and date, NZ_I18N for international copy.

+
+
+
Listing 3. Importing Configuration in App.module
+
+
import { NZ_I18N, en_US } from 'ng-zorro-antd/i18n';
+
+@NgModule({
+  ...
+  providers: [
+    { provide: NZ_I18N, useValue: en_US },
+  ...
+})
+export class AppModule { }
+
+
+
+

To finish the configuration, we need to import the icons from the Library.

+
+
+
Listing 4. Importing Icons in App.module
+
+
import * as AllIcons from '@ant-design/icons-angular/icons';
+
+const antDesignIcons = AllIcons as {
+  [key: string]: IconDefinition;
+};
+const icons: IconDefinition[] = Object.keys(antDesignIcons).map(key => antDesignIcons[key]);
+
+
+
+
+
+

Development

+
+
+

We have all the NG ZORRO related dependencies set up in our project, we can start coding.

+
+
+
Listing 5. Adding styles in styles.css
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

Next step is to create a component for the header. We will create it with the next command. +We will create a folder component to have a good practices.

+
+
+

ng generate component components/header

+
+
+

In this component, we are going to create the menu.

+
+
+

First, we need to import the menu module on app.module.

+
+
+
Listing 6. Adding module in app.module
+
+
import { NzMenuModule } from 'ng-zorro-antd/menu';
+
+
+
+

And we will create the header with this code:

+
+
+
+
<ul nz-menu nzMode="horizontal" class="container">
+  <li nz-menu-item nzSelected>
+    <i nz-icon nzType="mail"></i>
+    Navigation One
+  </li>
+  <li nz-menu-item nzDisabled>
+    <i nz-icon nzType="appstore"></i>
+    Navigation Two
+  </li>
+  <li nz-submenu nzTitle="Navigation Three - Submenu" nzIcon="setting">
+    <ul>
+      <li nz-menu-group nzTitle="Modals">
+        <ul>
+             <li nz-menu-item nz-button (click)="info()"> Info</li>
+               <li nz-menu-item nz-button (click)="success()">Success</li>
+             <li nz-menu-item nz-button (click)="error()">Error</li>
+             <li nz-menu-item nz-button (click)="warning()">Warning</li>
+        </ul>
+      </li>
+      <li nz-menu-group nzTitle="Item 2">
+        <ul>
+          <li nz-menu-item>Option 3</li>
+          <li nz-submenu nzTitle="Sub Menu">
+            <ul>
+              <li nz-menu-item nzDisabled>Option 4</li>
+              <li nz-menu-item>Option 5</li>
+            </ul>
+          </li>
+          <li nz-submenu nzDisabled nzTitle="Disabled Sub Menu">
+            <ul>
+              <li nz-menu-item>Option 6</li>
+              <li nz-menu-item>Option 7</li>
+            </ul>
+          </li>
+        </ul>
+      </li>
+    </ul>
+  </li>
+  <li nz-menu-item>
+    <a href="https://ng.ant.design" target="_blank" rel="noopener noreferrer">Navigation Four - Link</a>
+  </li>
+</ul>
+
+
+
+
+figure4 +
+
Figure 3. Header component
+
+
+

Note +The menu has some properties like nzTitle, nzButton, nzDisabled or nzSelected.

+
+
+

And modify the styles on header.component.scss

+
+
+
Listing 7. Adding styles on header.scss
+
+
.container{
+  margin: auto;
+  text-align: center;
+}
+
+
+
+

The library has enough styles and we don’t need to change to much. +We’ll be like:

+
+
+
+figure5 +
+
Figure 4. Header Component
+
+
+

In the menu, we added an example of a modal

+
+
+

To use it we need to import that module on app.module.ts

+
+
+
+
import { NzModalModule } from 'ng-zorro-antd/modal';
+
+
+
+

In the HTML file we just need to create a method on (click) to call the modal.

+
+
+
+
  <li nz-submenu nzTitle="Navigation Three - Submenu" nzIcon="setting">
+    <ul>
+      <li nz-menu-group nzTitle="Modals">
+        <ul>
+             <li nz-menu-item nz-button (click)="info()"> Info</li>
+               <li nz-menu-item nz-button (click)="success()">Success</li>
+             <li nz-menu-item nz-button (click)="error()">Error</li>
+             <li nz-menu-item nz-button (click)="warning()">Warning</li>
+        </ul>
+      </li>
+
+
+
+
+figure6 +
+
Figure 5. Modal
+
+
+

And now, we just need to create those methods in the file header.component.ts +Also, need to import the modal service and we use it in the constructor of the class.

+
+
+

import {NzModalService} from 'ng-zorro-antd/modal'; +constructor(private modal: NzModalService){}

+
+
+
+figure7 +
+
Figure 6. Import ModalService from ZORRO
+
+
+
+
  info(): void {
+    this.modal.info({
+      nzTitle: 'This is a notification message',
+      nzContent: '<p>some messages...some messages...</p><p>some messages...some messages...</p>',
+      nzOnOk: () => console.log('Info OK')
+    });
+  }
+
+  success(): void {
+    this.modal.success({
+      nzTitle: 'This is a success message',
+      nzContent: 'some messages...some messages...'
+    });
+  }
+
+  error(): void {
+    this.modal.error({
+      nzTitle: 'This is an error message',
+      nzContent: 'some messages...some messages...'
+    });
+  }
+
+  warning(): void {
+    this.modal.warning({
+      nzTitle: 'This is an warning message',
+      nzContent: 'some messages...some messages...'
+    });
+  }
+
+
+
+
+figure8 +
+
Figure 7. Logic on ts file looks like
+
+
+

Once the header is done, time to create the main component. In this case will be those elements.

+
+
+
+figure9 +
+
Figure 8. Main Component
+
+
+

The first element that we can see, it’s a carousel. +To implement it on the code, we just need to do the same that we done before, import the module and import the component. +Do we import the next module on app.module

+
+
+
Listing 8. Import carousel Module
+
+
import { NzCarouselModule } from 'ng-zorro-antd/carousel';
+
+
+
+

And use the label “nz-carousel” to create the Carousel, it has some attributes coming from the library.

+
+
+
+figure10 +
+
Figure 9. Import ModalService from ZORRO
+
+
+

**NOTE +The loop that we are doing its how many images we will have. +And finally, we will give some styles.

+
+
+
+
.container{
+  margin: auto;
+  text-align: center;
+  margin-top: 20px;
+}
+[nz-carousel-content] {
+        text-align: center;
+        height: 160px;
+        line-height: 160px;
+        background: #364d79;
+        color: #fff;
+        overflow: hidden;
+      }
+
+      h3 {
+        color: #fff;
+        margin-bottom: 0;
+      }
+
+nz-content{
+  padding: 0 30px 0 30px;
+}
+
+
+
+
+figure11 +
+
Figure 10. Styling
+
+
+

Next element, the cards

+
+
+
+figure12 +
+
Figure 11. Cards1
+
+
+
+figure13 +
+
Figure 12. Cards Unlocked
+
+
+

We will have a button to activate or deactivate the cards. +To do it, we will write the next code in our file html.

+
+
+
+
        <div nz-row>
+          <div nz-col [nzXs]="{ span: 5, offset: 1 }" [nzLg]="{ span: 6, offset: 2 }">
+            <nz-card nzXs="8">
+              <nz-skeleton [nzActive]="true" [nzLoading]="loading" [nzAvatar]="{ size: 'large' }">
+                <nz-card-meta [nzAvatar]="avatarTemplate" nzTitle="Card title" nzDescription="This is the description">
+                </nz-card-meta>
+              </nz-skeleton>
+            </nz-card>
+          </div>
+          <div nz-col [nzXs]="{ span: 11, offset: 1 }" [nzLg]="{ span: 6, offset: 2 }">
+            <nz-card nzXs="8">
+              <nz-skeleton [nzActive]="true" [nzLoading]="!loading" [nzAvatar]="{ size: 'small' }">
+                <nz-card-meta [nzAvatar]="avatarTemplate" nzTitle="Card title" nzDescription="This is the description">
+                </nz-card-meta>
+              </nz-skeleton>
+            </nz-card>
+          </div>
+          <div nz-col [nzXs]="{ span: 5, offset: 1 }" [nzLg]="{ span: 6, offset: 2 }">
+            <nz-card nzXs="8">
+              <nz-skeleton [nzActive]="true" [nzLoading]="loading" [nzAvatar]="{ size: 'large' }">
+                <nz-card-meta [nzAvatar]="avatarTemplate" nzTitle="Card title" nzDescription="This is the description">
+                </nz-card-meta>
+              </nz-skeleton>
+            </nz-card>
+          </div>
+        </div>
+
+
+
+
+figure14 +
+
Figure 13. Cards HTML
+
+
+

The first thing that we can see, it’s a button to switch between see it or not. +So,first thing, we need to import that switch.

+
+
+

import { NzSwitchModule } from 'ng-zorro-antd/switch';

+
+
+

Next step, that we need to do its write the `HTML code. It’s simple:

+
+
+

<nz-switch [(ngModel)]="loading"></nz-switch>

+
+
+

So now, in the ts file we just need to create a Boolean variable. +With the ngModel and the switch, each time that we will click on the button the variable will swap between true or false. +After create the button, we are going to create the card.

+
+
+

Need to import the following module on app.module +import { NzCardModule } from 'ng-zorro-antd/card'; +And after that we need to write the HTML code

+
+
+
+figure15 +
+
Figure 14. Cards Logic
+
+
+

We will find a lot of attributes. +We can find their explication in the api: +NG ZORRO

+
+
+

Last Element, the table

+
+
+
+figure16 +
+
Figure 15. Table
+
+
+

We need to import the module +import { NzTableModule } from 'ng-zorro-antd/table';

+
+
+

After that we can see a button, this is just to create a new row in the table. +The button only has a method to add a new value to our array

+
+
+

Table Interface

+
+
+
+
interface ItemData {
+  id: string;
+  name: string;
+  age: string;
+  address: string;
+}
+
+
+
+
+figure17 +
+
Figure 16. Table Interface
+
+
+

Add Row Method

+
+
+
+
  addRow(): void {
+    this.listOfData = [
+      ...this.listOfData,
+      {
+        id: `${this.i}`,
+        name: `Edward King ${this.i}`,
+        age: '32',
+        address: `London, Park Lane no. ${this.i}`
+      }
+    ];
+    this.i++;
+  }
+
+
+
+
+figure18 +
+
Figure 17. Add Method
+
+
+

After that we need to create the table

+
+
+
+
<nz-table #editRowTable nzBordered [nzData]="listOfData">
+          <thead>
+            <tr>
+              <th nzWidth="30%">Name</th>
+              <th>Age</th>
+              <th>Address</th>
+              <th>Action</th>
+            </tr>
+          </thead>
+          <tbody>
+            <tr *ngFor="let data of editRowTable.data" class="editable-row">
+              <td>
+                <div class="editable-cell" [hidden]="editId == data.id" (click)="startEdit(data.id)">
+                  {{ data.name }}
+                </div>
+                <input [hidden]="editId !==  data.id" type="text" nz-input [(ngModel)]="data.name" (blur)="stopEdit()" />
+              </td>
+              <td>{{ data.age }}</td>
+              <td>{{ data.address }}</td>
+              <td>
+                <a nz-popconfirm nzPopconfirmTitle="Sure to delete?" (nzOnConfirm)="deleteRow(data.id)">Delete</a>
+              </td>
+            </tr>
+          </tbody>
+        </nz-table>
+
+
+
+
+figure19 +
+
Figure 18. Table HTML Logic
+
+
+

To create the table we need to use the tag <nz-table> and after that is like a Html table, with the <thead> and <tbody>

+
+
+

How it shows with the for, we are showing the data from the array created before. +In the first cell we can see, that we have a method to edit the value.

+
+
+
+figure20 +
+
Figure 19. Table methods
+
+
+
+figure21 +
+
Figure 20. Table
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-layout-with-primeng-angular.html b/docs/devon4ts/1.0/angular/guide-layout-with-primeng-angular.html new file mode 100644 index 00000000..407520d1 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-layout-with-primeng-angular.html @@ -0,0 +1,1721 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

PrimeNG Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts of PrimeNG in a devon4ng application. PrimeNG is a HTML/CSS framework.

+
+
+
+Screenshot 0 +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Let’s begin

+
+
+

We start with opening the console(in the Devon distribution folder) and running the following command to start a project named AngularZorroLayout.

+
+
+

devon ng new AngularPrimeNgLayout

+
+
+

Select y when it asks whether it would like to add Angular routing and select SCSS when it asks for the style sheet format. You can also use the devonfw IDE CLI to create a new devon4ng application.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+

devon ng serve

+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+Screenshot 1 +
+
Figure 2. Blank Application
+
+
+
+
+

Adding PrimeNG to the project

+
+
+

Next we will add Angular Material to our application. In the integrated terminal, press CTRL + C to terminate the running application and run the following command:

+
+
+

Run the ng add command for PrimeNG:

+
+
+
+
npm install primeng
+npm install primeicons --save
+
+
+
+

After that we can see that the module is imported on app.module.ts

+
+
+

The css dependencies are as follows, Prime Icons, theme of your choice and structural css of components.

+
+
+
+
 "src/styles.scss",
+ "node_modules/primeicons/primeicons.css",
+ "node_modules/primeng/resources/themes/saga-blue/thcss",
+ "node_modules/primeng/resources/primeng.min.css"
+
+
+
+
+Screenshot 2 +
+
Figure 3. Styles on angular.json
+
+
+
+
+

Development

+
+
+

Now we need to create a component for the header. We will create it with the command +We will create a folder component to have a good practices.

+
+
+
+
ng generate component components/header
+
+
+
+

In this component, we are going to create the menu.

+
+
+
+Screenshot 5 +
+
Figure 4. Menu
+
+
+
+Screenshot 6 +
+
Figure 5. Menu Dropdown
+
+
+

And will create the code like:

+
+
+
+Screenshot 3 +
+
Figure 6. Header
+
+
+
+
<p-menubar [model]="items">
+  <ng-template pTemplate="start">
+    <img src="assets/images/primeng.svg" height="40" class="p-mr-2">
+  </ng-template>
+</p-menubar>
+
+
+
+

How we see the menu has some properties from the library.

+
+
+

<p-menubar> is the first one, with this label we can create the menu and with the <ng-template pTemplate> we decided where the menu will be aligned.

+
+
+

The [model]=items means that the menu is looking for the "items" to print.

+
+
+

The items is a array but his type come from the PrimeNG. So we just need to import the MenuItem.

+
+
+
+
import { MenuItem } from 'primeng/api';`
+
+
+
+

And give some values.

+
+
+
+
this.items = [
+      {
+        label: 'File',
+        icon: 'pi pi-fw pi-file',
+        items: [
+          {
+            label: 'New',
+            icon: 'pi pi-fw pi-plus',
+            items: [
+              {
+                label: 'Bookmark',
+                icon: 'pi pi-fw pi-bookmark'
+              },
+              {
+                label: 'Video',
+                icon: 'pi pi-fw pi-video'
+              },
+
+            ]
+          },
+          {
+            label: 'Delete',
+            icon: 'pi pi-fw pi-trash'
+          },
+          {
+            separator: true
+          },
+          {
+            label: 'Export',
+            icon: 'pi pi-fw pi-external-link'
+          }
+        ]
+      },
+      {
+        label: 'Edit',
+        icon: 'pi pi-fw pi-pencil',
+        items: [
+          {
+            label: 'Left',
+            icon: 'pi pi-fw pi-align-left'
+          },
+          {
+            label: 'Right',
+            icon: 'pi pi-fw pi-align-right'
+          },
+          {
+            label: 'Center',
+            icon: 'pi pi-fw pi-align-center'
+          },
+          {
+            label: 'Justify',
+            icon: 'pi pi-fw pi-align-justify'
+          },
+
+        ]
+      },
+      {
+        label: 'Users',
+        icon: 'pi pi-fw pi-user',
+        items: [
+          {
+            label: 'New',
+            icon: 'pi pi-fw pi-user-plus',
+
+          },
+          {
+            label: 'Delete',
+            icon: 'pi pi-fw pi-user-minus',
+
+          },
+          {
+            label: 'Search',
+            icon: 'pi pi-fw pi-users',
+            items: [
+              {
+                label: 'Filter',
+                icon: 'pi pi-fw pi-filter',
+                items: [
+                  {
+                    label: 'Print',
+                    icon: 'pi pi-fw pi-print'
+                  }
+                ]
+              },
+              {
+                icon: 'pi pi-fw pi-bars',
+                label: 'List'
+              }
+            ]
+          }
+        ]
+      },
+      {
+        label: 'Events',
+        icon: 'pi pi-fw pi-calendar',
+        items: [
+          {
+            label: 'Edit',
+            icon: 'pi pi-fw pi-pencil',
+            items: [
+              {
+                label: 'Save',
+                icon: 'pi pi-fw pi-calendar-plus'
+              },
+              {
+                label: 'Delete',
+                icon: 'pi pi-fw pi-calendar-minus'
+              },
+
+            ]
+          },
+          {
+            label: 'Archieve',
+            icon: 'pi pi-fw pi-calendar-times',
+            items: [
+              {
+                label: 'Remove',
+                icon: 'pi pi-fw pi-calendar-minus'
+              }
+            ]
+          }
+        ]
+      },
+      {
+        label: 'Quit',
+        icon: 'pi pi-fw pi-power-off'
+      }
+    ];
+  }
+
+
+
+
+Screenshot 4 +
+
Figure 7. Menu Values
+
+
+

After the menus is done. The next step is create the main container, in this case will be the table.

+
+
+
+Screenshot 7 +
+
Figure 8. Table
+
+
+

How is a very complex table we are going to explain component by component

+
+
+
+Screenshot 8 +
+
Figure 9. Buttons
+
+
+

To create those buttons we just need to write this piece of code

+
+
+
+
<p-toolbar styleClass="p-mb-4">
+    <ng-template pTemplate="left">
+      <button pButton pRipple label="New" icon="pi pi-plus" class="p-button-success p-mr-2"
+        (click)="openNew()"></button>
+      <button pButton pRipple label="Delete" icon="pi pi-trash" class="p-button-danger"
+        (click)="deleteSelectedProducts()" [disabled]="!selectedProducts || !selectedProducts.length"></button>
+    </ng-template>
+
+    <ng-template pTemplate="right">
+      <p-fileUpload mode="basic" accept="image/*" [maxFileSize]="1000000" label="Import" chooseLabel="Import"
+        class="p-mr-2 p-d-inline-block"></p-fileUpload>
+      <button pButton pRipple label="Export" icon="pi pi-upload" class="p-button-help"></button>
+    </ng-template>
+  </p-toolbar>
+
+
+
+
+Screenshot 9 +
+
Figure 10. Buttons Code
+
+
+

We can see some labels and attributes, for example <p-toolbar>, pButton, <p-fuleUpload>.

+
+
+

To use them, we need to import on app.module with the following code

+
+
+
+
import { TableModule } from 'primeng/table';
+import { ButtonModule } from 'primeng/button';
+import {ToolbarModule} from 'primeng/toolbar';
+import {FileUploadModule} from 'primeng/fileupload';
+
+
+
+

We see the first method is openNew() when we call this method a variable is going to be true

+
+
+
+
  openNew(): any {
+    this.product = {};
+    this.submitted = false;
+    this.productDialog = true;
+  }
+
+
+
+

And when the productDialog its true, we will open a Modal with the following code and will look like:

+
+
+
+Screenshot 11 +
+
Figure 11. Modal
+
+
+
+
<p-dialog [(visible)]="productDialog" [style]="{width: '450px'}" header="Product Details" [modal]="true"
+  styleClass="p-fluid">
+  <ng-template pTemplate="content">
+    <div class="p-field">
+      <label for="name">Name</label>
+      <input type="text" pInputText id="name" [(ngModel)]="product.name" required autofocus />
+      <small class="p-invalid" *ngIf="submitted && !product.name">Name is required.</small>
+    </div>
+    <div class="p-field">
+      <label for="description">Description</label>
+      <textarea id="description" pInputTextarea [(ngModel)]="product.description" required rows="3"
+        cols="20"></textarea>
+    </div>
+
+    <div class="p-field">
+      <label class="p-mb-3">Category</label>
+      <div class="p-formgrid p-grid">
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category1" name="category" value="Accessories" [(ngModel)]="product.category">
+          </p-radioButton>
+          <label for="category1">Accessories</label>
+        </div>
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category2" name="category" value="Clothing" [(ngModel)]="product.category"></p-radioButton>
+          <label for="category2">Clothing</label>
+        </div>
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category3" name="category" value="Electronics" [(ngModel)]="product.category">
+          </p-radioButton>
+          <label for="category3">Electronics</label>
+        </div>
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category4" name="category" value="Fitness" [(ngModel)]="product.category"></p-radioButton>
+          <label for="category4">Fitness</label>
+        </div>
+      </div>
+    </div>
+
+    <div class="p-formgrid p-grid">
+      <div class="p-field p-col">
+        <label for="price">Price</label>
+        <p-inputNumber id="price" [(ngModel)]="product.price" mode="currency" currency="USD" locale="en-US">
+        </p-inputNumber>
+      </div>
+      <div class="p-field p-col">
+        <label for="quantity">Quantity</label>
+        <p-inputNumber id="quantity" [(ngModel)]="product.quantity"></p-inputNumber>
+      </div>
+    </div>
+  </ng-template>
+
+  <ng-template pTemplate="footer">
+    <button pButton pRipple label="Cancel" icon="pi pi-times" class="p-button-text" (click)="hideDialog()"></button>
+    <button pButton pRipple label="Save" icon="pi pi-check" class="p-button-text" (click)="saveProduct()"></button>
+  </ng-template>
+</p-dialog>
+
+
+
+
+Screenshot 10 +
+
Figure 12. Modal Code
+
+
+

To start to development this, we need to import DialogModule, ConfirmDialogMoudle, InputTextModule, RadioButtonModule and ` FormsModule` to do it we just need to write on app.module

+
+
+
+
import { DialogModule } from 'primeng/dialog';
+import { ConfirmDialogModule } from 'primeng/confirmdialog';
+import {FormsModule} from '@angular/forms';
+import { RadioButtonModule } from 'primeng/radiobutton';
+import { InputTextModule } from 'primeng/inputtext';
+
+
+
+
+Screenshot 11 +
+
Figure 13. Modal Code
+
+
+

After that we can see a Modal with the form and when we click on the "Save Button", We will create a new product.

+
+
+
+
  saveProduct(): any {
+    this.submitted = true;
+
+    if (this.product.name.trim()) {
+      if (this.product.id) {
+        this.products[this.findIndexById(this.product.id)] = this.product;
+        this.messageService.add({ severity: 'success', summary: 'Successful', detail: 'Product Updated', life: 3000 });
+      }
+      else {
+        this.product.id = this.createId();
+        this.product.image = 'product-placeholder.svg';
+        this.products.push(this.product);
+        this.messageService.add({ severity: 'success', summary: 'Successful', detail: 'Product Created', life: 3000 });
+      }
+
+      this.products = [...this.products];
+      this.productDialog = false;
+      this.product = {};
+    }
+  }
+
+
+
+

After done the first buttons, just need to do the rest of the table

+
+
+
+
<p-table #dt [value]="products" [rows]="10" [paginator]="true"
+    [globalFilterFields]="['name','country.name','representative.name','status']" [(selection)]="selectedProducts"
+    [rowHover]="true" dataKey="id" currentPageReportTemplate="Showing {first} to {last} of {totalRecords} entries"
+    [showCurrentPageReport]="true">
+    <ng-template pTemplate="caption">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        <h5 class="p-m-0">Manage Products</h5>
+        <span class="p-input-icon-left">
+          <i class="pi pi-search"></i>
+          <input pInputText type="text" (input)="dt.filterGlobal($event.target.value, 'contains')"
+            placeholder="Search..." />
+        </span>
+      </div>
+    </ng-template>
+    <ng-template pTemplate="header">
+      <tr>
+        <th style="width: 3rem">
+          <p-tableHeaderCheckbox></p-tableHeaderCheckbox>
+        </th>
+        <th pSortableColumn="name">Name <p-sortIcon field="name"></p-sortIcon>
+        </th>
+        <th pSortableColumn="price">Price <p-sortIcon field="price"></p-sortIcon>
+        </th>
+        <th pSortableColumn="category">Category <p-sortIcon field="category"></p-sortIcon>
+        </th>
+        <th pSortableColumn="rating">Reviews <p-sortIcon field="rating"></p-sortIcon>
+        </th>
+        <th pSortableColumn="inventoryStatus">Status <p-sortIcon field="inventoryStatus"></p-sortIcon>
+        </th>
+        <th></th>
+      </tr>
+    </ng-template>
+    <ng-template pTemplate="body" let-product>
+      <tr>
+        <td>
+          <p-tableCheckbox [value]="product"></p-tableCheckbox>
+        </td>
+        <td>{{product.name}}</td>
+        <td>{{product.price | currency:'USD'}}</td>
+        <td>{{product.category}}</td>
+        <td>
+          <p-rating [ngModel]="product.rating" [readonly]="true" [cancel]="false"></p-rating>
+        </td>
+        <td><span
+            [class]="'product-badge status-' + product.inventoryStatus.toLowerCase()">{{product.inventoryStatus}}</span>
+        </td>
+        <td>
+          <button pButton pRipple icon="pi pi-pencil" class="p-button-rounded p-button-success p-mr-2"
+            (click)="editProduct(product)"></button>
+          <button pButton pRipple icon="pi pi-trash" class="p-button-rounded p-button-warning"
+            (click)="deleteProduct(product)"></button>
+        </td>
+      </tr>
+    </ng-template>
+    <ng-template pTemplate="summary">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        In total there are {{products ? products.length : 0 }} products.
+      </div>
+    </ng-template>
+  </p-table>
+
+
+
+
+Screenshot 12 +
+
Figure 14. Table Code
+
+
+

After that, need to add some styles to the code.

+
+
+
+
:host ::ng-deep {
+    .p-paginator {
+        .p-paginator-current {
+            margin-left: auto;
+        }
+    }
+
+    .p-progressbar {
+        height: .5rem;
+        background-color: #D8DADC;
+
+        .p-progressbar-value {
+            background-color: #607D8B;
+        }
+    }
+
+    .table-header {
+        display: flex;
+        justify-content: space-between;
+    }
+
+    .p-calendar .p-datepicker {
+        min-width: 25rem;
+
+        td {
+            font-weight: 400;
+        }
+    }
+
+    .p-datatable.p-datatable-customers {
+        .p-datatable-header {
+            padding: 1rem;
+            text-align: left;
+            font-size: 1.5rem;
+        }
+
+        .p-paginator {
+            padding: 1rem;
+        }
+
+        .p-datatable-thead > tr > th {
+            text-align: left;
+        }
+
+        .p-datatable-tbody > tr > td {
+            cursor: auto;
+        }
+
+        .p-dropdown-label:not(.p-placeholder) {
+            text-transform: uppercase;
+        }
+    }
+
+    /* Responsive */
+    .p-datatable-customers .p-datatable-tbody > tr > td .p-column-title {
+        display: none;
+    }
+}
+
+@media screen and (max-width: 960px) {
+    :host ::ng-deep {
+        .p-datatable {
+            &.p-datatable-customers {
+                .p-datatable-thead > tr > th,
+                .p-datatable-tfoot > tr > td {
+                    display: none !important;
+                }
+
+                .p-datatable-tbody > tr {
+                    border-bottom: 1px solid var(--layer-2);
+
+                    > td {
+                        text-align: left;
+                        display: block;
+                        border: 0 none !important;
+                        width: 100% !important;
+                        float: left;
+                        clear: left;
+                        border: 0 none;
+
+                        .p-column-title {
+                            padding: .4rem;
+                            min-width: 30%;
+                            display: inline-block;
+                            margin: -.4rem 1rem -.4rem -.4rem;
+                            font-weight: bold;
+                        }
+
+                        .p-progressbar {
+                            margin-top: .5rem;
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+}
+
+
+
+
+tablestyle +
+
Figure 15. Table CSS
+
+
+

How we see it, we have some values already logged like products and some attributes that we need to import to use correctly the table.

+
+
+

All the moduls need to be in app.module

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+import { HeaderComponent } from './components/header/header.component';
+
+import { MenubarModule } from 'primeng/menubar';
+import { HttpClientModule } from '@angular/common/http';
+import { TableModule } from 'primeng/table';
+import { CalendarModule } from 'primeng/calendar';
+import { SliderModule } from 'primeng/slider';
+import { DialogModule } from 'primeng/dialog';
+import { MultiSelectModule } from 'primeng/multiselect';
+import { ContextMenuModule } from 'primeng/contextmenu';
+import { ButtonModule } from 'primeng/button';
+import { ToastModule } from 'primeng/toast';
+import { InputTextModule } from 'primeng/inputtext';
+import { ProgressBarModule } from 'primeng/progressbar';
+import { DropdownModule } from 'primeng/dropdown';
+import {ToolbarModule} from 'primeng/toolbar';
+import {FileUploadModule} from 'primeng/fileupload';
+import {RatingModule} from 'primeng/rating';
+import { RadioButtonModule } from 'primeng/radiobutton';
+import { InputNumberModule } from 'primeng/inputnumber';
+import { ConfirmDialogModule } from 'primeng/confirmdialog';
+import { ConfirmationService, MessageService } from 'primeng/api';
+import { ProductService } from './services/product.service';
+import { InputTextareaModule } from 'primeng/inputtextarea';
+import {FormsModule} from '@angular/forms';
+
+import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
+import { NoopAnimationsModule } from '@angular/platform-browser/animations';
+
+
+@NgModule({
+  declarations: [AppComponent, HeaderComponent],
+  imports: [
+    BrowserModule,
+    BrowserAnimationsModule,
+    NoopAnimationsModule,
+    AppRoutingModule,
+    MenubarModule,
+    TableModule,
+    CalendarModule,
+    SliderModule,
+    DialogModule,
+    MultiSelectModule,
+    ContextMenuModule,
+    ButtonModule,
+    ToastModule,
+    InputTextModule,
+    ProgressBarModule,
+    DropdownModule,
+    ToolbarModule,
+    FileUploadModule,
+    RatingModule,
+    RadioButtonModule,
+    InputNumberModule,
+    ConfirmDialogModule,
+    InputTextareaModule,
+    FormsModule,
+    HttpClientModule,
+  ],
+
+
+
+
+Screenshot 13 +
+
Figure 16. All modules imported
+
+
+

How we can see, the first thing that the table is doing is loading all the products that we have.

+
+
+

To do it, we will create a service to get all the data.

+
+
+

To create a service we need to use the next command

+
+
+

ng generate service services/product

+
+
+

In the service we are simulating a endpoint to get data.

+
+
+

We will have our products "hardcoded" and the methods to get or to set some values.

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Product } from '../models/product';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class ProductService {
+  status: string[] = ['OUTOFSTOCK', 'INSTOCK', 'LOWSTOCK'];
+
+  productNames: string[] = [
+    'Bamboo Watch',
+    'Black Watch',
+    'Blue Band',
+    'Blue T-Shirt',
+    'Bracelet',
+    'Brown Purse',
+    'Chakra Bracelet',
+    'Galaxy Earrings',
+    'Game Controller',
+    'Gaming Set',
+    'Gold Phone Case',
+    'Green Earbuds',
+    'Green T-Shirt',
+    'Grey T-Shirt',
+    'Headphones',
+    'Light Green T-Shirt',
+    'Lime Band',
+    'Mini Speakers',
+    'Painted Phone Case',
+    'Pink Band',
+    'Pink Purse',
+    'Purple Band',
+    'Purple Gemstone Necklace',
+    'Purple T-Shirt',
+    'Shoes',
+    'Sneakers',
+    'Teal T-Shirt',
+    'Yellow Earbuds',
+    'Yoga Mat',
+    'Yoga Set',
+  ];
+
+  constructor(private http: HttpClient) { }
+
+  getProductsSmall(): any {
+    return this.http.get<any>('assets/products-small.json')
+      .toPromise()
+      .then(res => res.data as Product[])
+      .then(data => data);
+  }
+
+  getProducts(): any {
+    return this.http.get<any>('assets/products.json')
+      .toPromise()
+      .then(res => res.data as Product[])
+      .then(data => data);
+  }
+
+  getProductsWithOrdersSmall(): any {
+    return this.http.get<any>('assets/products-orders-small.json')
+      .toPromise()
+      .then(res => res.data as Product[])
+      .then(data => data);
+  }
+
+  generatePrduct(): Product {
+    const product: Product = {
+      id: this.generateId(),
+      name: this.generateName(),
+      description: 'Product Description',
+      price: this.generatePrice(),
+      quantity: this.generateQuantity(),
+      category: 'Product Category',
+      inventoryStatus: this.generateStatus(),
+      rating: this.generateRating()
+    };
+
+    product.image = product.name.toLocaleLowerCase().split(/[ ,]+/).join('-') + '.jpg';
+    return product;
+  }
+
+  generateId(): string {
+    let text = '';
+    const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
+
+    for (let  i = 0; i < 5; i++) {
+      text += possible.charAt(Math.floor(Math.random() * possible.length));
+    }
+
+    return text;
+  }
+
+
+  generateName(): any {
+    return this.productNames[Math.floor(Math.random() * Math.floor(30))];
+  }
+
+  generatePrice(): any {
+    return Math.floor(Math.random() * Math.floor(299) + 1);
+  }
+
+  generateQuantity(): any {
+    return Math.floor(Math.random() * Math.floor(75) + 1);
+  }
+
+  generateStatus(): any {
+    return this.status[Math.floor(Math.random() * Math.floor(3))];
+  }
+
+  generateRating(): any {
+    return Math.floor(Math.random() * Math.floor(5) + 1);
+  }
+}
+
+
+
+
+Screenshot 14 +
+
Figure 17. Product Service
+
+
+

Also we create a interface for the Product, so all the products will have the same structure:

+
+
+
+
export interface Product {
+  id?: string;
+  code?: string;
+  name?: string;
+  description?: string;
+  price?: number;
+  quantity?: number;
+  inventoryStatus?: string;
+  category?: string;
+  image?: string;
+  rating?: number;
+}
+
+
+
+
+Screenshot 15 +
+
Figure 18. Product Interface
+
+
+

How we can see in the methods, we are getting the data from a hardcoded file product.json.

+
+
+
+
{
+	"data": [
+		{
+			"id": "1000",
+			"code": "f230fh0g3",
+			"name": "Bamboo Watch",
+			"description": "Product Description",
+			"image": "bamboo-watch.jpg",
+			"price": 65,
+			"category": "Accessories",
+			"quantity": 24,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1001",
+			"code": "nvklal433",
+			"name": "Black Watch",
+			"description": "Product Description",
+			"image": "black-watch.jpg",
+			"price": 72,
+			"category": "Accessories",
+			"quantity": 61,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1002",
+			"code": "zz21cz3c1",
+			"name": "Blue Band",
+			"description": "Product Description",
+			"image": "blue-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 2,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1003",
+			"code": "244wgerg2",
+			"name": "Blue T-Shirt",
+			"description": "Product Description",
+			"image": "blue-t-shirt.jpg",
+			"price": 29,
+			"category": "Clothing",
+			"quantity": 25,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1004",
+			"code": "h456wer53",
+			"name": "Bracelet",
+			"description": "Product Description",
+			"image": "bracelet.jpg",
+			"price": 15,
+			"category": "Accessories",
+			"quantity": 73,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1005",
+			"code": "av2231fwg",
+			"name": "Brown Purse",
+			"description": "Product Description",
+			"image": "brown-purse.jpg",
+			"price": 120,
+			"category": "Accessories",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1006",
+			"code": "bib36pfvm",
+			"name": "Chakra Bracelet",
+			"description": "Product Description",
+			"image": "chakra-bracelet.jpg",
+			"price": 32,
+			"category": "Accessories",
+			"quantity": 5,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1007",
+			"code": "mbvjkgip5",
+			"name": "Galaxy Earrings",
+			"description": "Product Description",
+			"image": "galaxy-earrings.jpg",
+			"price": 34,
+			"category": "Accessories",
+			"quantity": 23,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1008",
+			"code": "vbb124btr",
+			"name": "Game Controller",
+			"description": "Product Description",
+			"image": "game-controller.jpg",
+			"price": 99,
+			"category": "Electronics",
+			"quantity": 2,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1009",
+			"code": "cm230f032",
+			"name": "Gaming Set",
+			"description": "Product Description",
+			"image": "gaming-set.jpg",
+			"price": 299,
+			"category": "Electronics",
+			"quantity": 63,
+			"inventoryStatus": "INSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1010",
+			"code": "plb34234v",
+			"name": "Gold Phone Case",
+			"description": "Product Description",
+			"image": "gold-phone-case.jpg",
+			"price": 24,
+			"category": "Accessories",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1011",
+			"code": "4920nnc2d",
+			"name": "Green Earbuds",
+			"description": "Product Description",
+			"image": "green-earbuds.jpg",
+			"price": 89,
+			"category": "Electronics",
+			"quantity": 23,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1012",
+			"code": "250vm23cc",
+			"name": "Green T-Shirt",
+			"description": "Product Description",
+			"image": "green-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 74,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1013",
+			"code": "fldsmn31b",
+			"name": "Grey T-Shirt",
+			"description": "Product Description",
+			"image": "grey-t-shirt.jpg",
+			"price": 48,
+			"category": "Clothing",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1014",
+			"code": "waas1x2as",
+			"name": "Headphones",
+			"description": "Product Description",
+			"image": "headphones.jpg",
+			"price": 175,
+			"category": "Electronics",
+			"quantity": 8,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1015",
+			"code": "vb34btbg5",
+			"name": "Light Green T-Shirt",
+			"description": "Product Description",
+			"image": "light-green-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 34,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1016",
+			"code": "k8l6j58jl",
+			"name": "Lime Band",
+			"description": "Product Description",
+			"image": "lime-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 12,
+			"inventoryStatus": "INSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1017",
+			"code": "v435nn85n",
+			"name": "Mini Speakers",
+			"description": "Product Description",
+			"image": "mini-speakers.jpg",
+			"price": 85,
+			"category": "Clothing",
+			"quantity": 42,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1018",
+			"code": "09zx9c0zc",
+			"name": "Painted Phone Case",
+			"description": "Product Description",
+			"image": "painted-phone-case.jpg",
+			"price": 56,
+			"category": "Accessories",
+			"quantity": 41,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1019",
+			"code": "mnb5mb2m5",
+			"name": "Pink Band",
+			"description": "Product Description",
+			"image": "pink-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 63,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1020",
+			"code": "r23fwf2w3",
+			"name": "Pink Purse",
+			"description": "Product Description",
+			"image": "pink-purse.jpg",
+			"price": 110,
+			"category": "Accessories",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1021",
+			"code": "pxpzczo23",
+			"name": "Purple Band",
+			"description": "Product Description",
+			"image": "purple-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 6,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1022",
+			"code": "2c42cb5cb",
+			"name": "Purple Gemstone Necklace",
+			"description": "Product Description",
+			"image": "purple-gemstone-necklace.jpg",
+			"price": 45,
+			"category": "Accessories",
+			"quantity": 62,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1023",
+			"code": "5k43kkk23",
+			"name": "Purple T-Shirt",
+			"description": "Product Description",
+			"image": "purple-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 2,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1024",
+			"code": "lm2tny2k4",
+			"name": "Shoes",
+			"description": "Product Description",
+			"image": "shoes.jpg",
+			"price": 64,
+			"category": "Clothing",
+			"quantity": 0,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1025",
+			"code": "nbm5mv45n",
+			"name": "Sneakers",
+			"description": "Product Description",
+			"image": "sneakers.jpg",
+			"price": 78,
+			"category": "Clothing",
+			"quantity": 52,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1026",
+			"code": "zx23zc42c",
+			"name": "Teal T-Shirt",
+			"description": "Product Description",
+			"image": "teal-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 3,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1027",
+			"code": "acvx872gc",
+			"name": "Yellow Earbuds",
+			"description": "Product Description",
+			"image": "yellow-earbuds.jpg",
+			"price": 89,
+			"category": "Electronics",
+			"quantity": 35,
+			"inventoryStatus": "INSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1028",
+			"code": "tx125ck42",
+			"name": "Yoga Mat",
+			"description": "Product Description",
+			"image": "yoga-mat.jpg",
+			"price": 20,
+			"category": "Fitness",
+			"quantity": 15,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1029",
+			"code": "gwuby345v",
+			"name": "Yoga Set",
+			"description": "Product Description",
+			"image": "yoga-set.jpg",
+			"price": 20,
+			"category": "Fitness",
+			"quantity": 25,
+			"inventoryStatus": "INSTOCK",
+			"rating": 8
+		}
+	]
+}
+
+
+
+
+Screenshot 16 +
+
Figure 19. Product Json Data
+
+
+

But in our component.ts we can see in ngOninit that we are getting the data when the component is ready. So when the component is rendered the data will be in the table.

+
+
+

The first lines of our table we can see a some attributes and events like value, rows, paginator, globalFilterFields, selection, rowHover, dataKey, currentPageReportTemplate, showCurrentPageReport.

+
+
+

We can see more details from those attributes and events here: https://primefaces.org/primeng/showcase/#/table

+
+
+

In the first section, we can see the <ng-template>, there is where we can search a value from the table.

+
+
+
+
<p-table #dt [value]="products" [rows]="10" [paginator]="true"
+    [globalFilterFields]="['name','country.name','representative.name','status']" [(selection)]="selectedProducts"
+    [rowHover]="true" dataKey="id" currentPageReportTemplate="Showing {first} to {last} of {totalRecords} entries"
+    [showCurrentPageReport]="true">
+    <ng-template pTemplate="caption">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        <h5 class="p-m-0">Manage Products</h5>
+        <span class="p-input-icon-left">
+          <i class="pi pi-search"></i>
+          <input pInputText type="text" (input)="dt.filterGlobal($event.target.value, 'contains')"
+            placeholder="Search..." />
+        </span>
+      </div>
+    </ng-template>
+
+
+
+
+Screenshot 17 +
+
Figure 20. Search on Table
+
+
+

The next <ng-template> is the header of the table. We’re we can see the name of each column.

+
+
+
+
    <ng-template pTemplate="header">
+      <tr>
+        <th style="width: 3rem">
+          <p-tableHeaderCheckbox></p-tableHeaderCheckbox>
+        </th>
+        <th pSortableColumn="name">Name <p-sortIcon field="name"></p-sortIcon>
+        </th>
+        <th pSortableColumn="price">Price <p-sortIcon field="price"></p-sortIcon>
+        </th>
+        <th pSortableColumn="category">Category <p-sortIcon field="category"></p-sortIcon>
+        </th>
+        <th pSortableColumn="rating">Reviews <p-sortIcon field="rating"></p-sortIcon>
+        </th>
+        <th pSortableColumn="inventoryStatus">Status <p-sortIcon field="inventoryStatus"></p-sortIcon>
+        </th>
+        <th></th>
+      </tr>
+    </ng-template>
+
+
+
+
+Screenshot 18 +
+
Figure 21. Table Headers
+
+
+

After done the header, we need to do the table body. Here is where we need to print each row values

+
+
+
+
    <ng-template pTemplate="body" let-product>
+      <tr>
+        <td>
+          <p-tableCheckbox [value]="product"></p-tableCheckbox>
+        </td>
+        <td>{{product.name}}</td>
+        <td>{{product.price | currency:'USD'}}</td>
+        <td>{{product.category}}</td>
+        <td>
+          <p-rating [ngModel]="product.rating" [readonly]="true" [cancel]="false"></p-rating>
+        </td>
+        <td><span
+            [class]="'product-badge status-' + product.inventoryStatus.toLowerCase()">{{product.inventoryStatus}}</span>
+        </td>
+        <td>
+          <button pButton pRipple icon="pi pi-pencil" class="p-button-rounded p-button-success p-mr-2"
+            (click)="editProduct(product)"></button>
+          <button pButton pRipple icon="pi pi-trash" class="p-button-rounded p-button-warning"
+            (click)="deleteProduct(product)"></button>
+        </td>
+      </tr>
+    </ng-template>
+
+
+
+
+Screenshot 19 +
+
Figure 22. Table Body
+
+
+

As we can see, we have some buttons with methods

+
+
+

The first method is to edit a specifict product (click)="editProduct(product)" and the second one is to delete it deleteProduct(product)

+
+
+
+
  editProduct(product: Product): any {
+    this.product = { ...product };
+    this.productDialog = true;
+  }
+
+  deleteProduct(product: Product): any {
+    this.confirmationService.confirm({
+      message: 'Are you sure you want to delete ' + product.name + '?',
+      header: 'Confirm',
+      icon: 'pi pi-exclamation-triangle',
+      accept: () => {
+        this.products = this.products.filter(val => val.id !==  product.id);
+        this.product = {};
+        this.messageService.add({ severity: 'success', summary: 'Successful', detail: 'Product Deleted', life: 3000 });
+      }
+    });
+  }
+
+
+
+
+Screenshot 20 +
+
Figure 23. Delete and Edit methods
+
+
+

The last part of the table, we will have a section to know how many products we have.

+
+
+
+Screenshot 21 +
+
Figure 24. Table footer
+
+
+

To do it just need to do another template and add the following code:

+
+
+
+
    <ng-template pTemplate="summary">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        In total there are {{products ? products.length : 0 }} products.
+      </div>
+    </ng-template>
+
+
+
+
+Screenshot 22 +
+
Figure 25. Table footer code
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-ngrx-effects.html b/docs/devon4ts/1.0/angular/guide-ngrx-effects.html new file mode 100644 index 00000000..578d2228 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-ngrx-effects.html @@ -0,0 +1,448 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Side effects with NgRx/Effects

+
+
+

Reducers are pure functions, meaning they are side-effect free and deterministic. Many actions however have side effects like sending messages or displaying a toast notification. NgRx encapsulates these actions in effects.

+
+
+

Let’s build a recommended movies list so the user can add movies to their watchlist.

+
+
+
+
+

Obtaining the recommendation list from the server

+
+
+

Create a module for recommendations and add stores and states as in the previous chapter. Add EffectsModule.forRoot([]) to the imports in AppModule below StoreModule.forRoot(). Add effects to the feature module:

+
+
+
+
ng generate effect recommendation/Recommendation -m recommendation/recommendation.module.ts
+
+
+
+

We need actions for loading the movie list, success and failure cases:

+
+
+

recommendation/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+import { Movie } from 'src/app/watchlist/models/movies';
+
+export const loadRecommendedMovies = createAction('[Recommendation List] Load movies');
+export const loadRecommendedMoviesSuccess = createAction('[Recommendation API] Load movies success', props<{movies: Movie[]}>());
+export const loadRecommendedMoviesFailure = createAction('[Recommendation API] Load movies failure', props<{error: any}>());
+
+const actions = union({
+    loadRecommendedMovies,
+    loadRecommendedMoviesSuccess,
+    loadRecommendedMoviesFailure
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

In the reducer, we use a loading flag so the UI can show a loading spinner. The store is updated with arriving data.

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State {
+  items: Movie[];
+  loading: boolean;
+}
+
+export const initialState: State = {
+  items: [],
+  loading: false
+};
+
+export function reducer(state = initialState, action: recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case '[Recommendation List] Load movies':
+      return {
+        ...state,
+        items: [],
+        loading: true
+      };
+
+    case '[Recommendation API] Load movies failure':
+      return {
+        ...state,
+          loading: false
+      };
+
+    case '[Recommendation API] Load movies success':
+      return {
+        ...state,
+        items: action.movies,
+        loading: false
+      };
+
+    default:
+      return state;
+  }
+}
+
+export const getAll = (state: State) => state.items;
+export const isLoading = (state: State) => state.loading;
+
+
+
+

We need an API service to talk to the server. For demonstration purposes, we simulate an answer delayed by one second:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable({
+  providedIn: 'root'
+})
+export class RecommendationApiService {
+
+  private readonly recommendedMovies: Movie[] = [
+    {
+      id: 2,
+      title: 'The Hunger Games',
+      genre: 'sci-fi',
+      releaseYear: 2012,
+      runtimeMinutes: 144
+    },
+    {
+      id: 4,
+      title: 'Avengers: Endgame',
+      genre: 'fantasy',
+      releaseYear: 2019,
+      runtimeMinutes: 181
+    }
+  ];
+
+  loadRecommendedMovies(): Observable<Movie[]> {
+    return of(this.recommendedMovies).pipe(delay(1000));
+  }
+}
+
+
+
+

Here are the effects:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable()
+export class RecommendationEffects {
+
+  constructor(
+    private actions$: Actions,
+    private recommendationApi: RecommendationApiService,
+  ) { }
+
+  @Effect()
+  loadBooks$ = this.actions$.pipe(
+    ofType(recommendationActions.loadRecommendedMovies.type),
+    switchMap(() => this.recommendationApi.loadRecommendedMovies().pipe(
+      map(movies => recommendationActions.loadRecommendedMoviesSuccess({ movies })),
+      catchError(error => of(recommendationActions.loadRecommendedMoviesFailure({ error })))
+    ))
+  );
+}
+
+
+
+

Effects are always observables and return actions. In this example, we consume the actions observable provided by NgRx and listen only for the loadRecommendedMovies actions by using the ofType operator. Using switchMap, we map to a new observable, one that loads movies and maps the successful result to a new loadRecommendedMoviesSuccess action or a failure to loadRecommendedMoviesFailure. In a real application we would show a notification in the error case.

+
+
+
+
+

==

+
+
+

If an effect should not dispatch another action, return an empty observable. +== ==

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-ngrx-entity.html b/docs/devon4ts/1.0/angular/guide-ngrx-entity.html new file mode 100644 index 00000000..f51be977 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-ngrx-entity.html @@ -0,0 +1,376 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Simplifying CRUD with NgRx/Entity

+
+
+

Most of the time when manipulating entries in the store, we like to create, add, update, or delete entries (CRUD). NgRx/Entity provides convenience functions if each item of a collection has an id property. Luckily all our entities already have this property.

+
+
+

Let’s add functionality to add a movie to the watchlist. First, create the required action:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export const addToWatchlist = createAction('[Recommendation List] Add to watchlist',
+    props<{ watchlistItemId: number, movie: Movie, addedAt: Date }>());
+
+
+
+
+
+

==

+
+
+

You may wonder why the Date object is not created inside the reducer instead, since it should always be the current time. However, remember that reducers should be deterministic state machines — State A + Action B should always result in the same State C. This makes reducers easily testable. +== ==

+
+
+

Then, rewrite the watchlistData reducer to make use of NgRx/Entity:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State extends EntityState<WatchlistItem> { (1)
+}
+
+export const entityAdapter = createEntityAdapter<WatchlistItem>(); (2)
+
+export const initialState: State = entityAdapter.getInitialState(); (3)
+
+const entitySelectors = entityAdapter.getSelectors();
+
+export function reducer(state = initialState, action: playbackActions.ActionsUnion | recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      const itemToUpdate = entitySelectors
+      .selectAll(state) (4)
+      .find(item => item.movie.id == action.movieId);
+      if (itemToUpdate) {
+        return entityAdapter.updateOne({ (5)
+          id: itemToUpdate.id,
+          changes: { playbackMinutes: action.stoppedAtMinute } (6)
+        }, state);
+      } else {
+        return state;
+      }
+
+    case recommendationActions.addToWatchlist.type:
+      return entityAdapter.addOne({id: action.watchlistItemId, movie: action.movie, added: action.addedAt, playbackMinutes: 0}, state);
+
+    default:
+      return state;
+  }
+}
+
+
+export const getAllItems = entitySelectors.selectAll;
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1NgRx/Entity requires state to extend EntityState. It provides a list of ids and a dictionary of id ⇒ entity entries
2The entity adapter provides data manipulation operations and selectors
3The state can be initialized with getInitialState(), which accepts an optional object to define any additional state beyond EntityState
4selectAll returns an array of all entities
5All adapter operations consume the state object as the last argument and produce a new state
6Update methods accept a partial change definition; you don’t have to clone the object
+
+
+

This concludes the tutorial on NgRx. If you want to learn about advanced topics such as selectors with arguments, testing, or router state, head over to the official NgRx documentation.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-ngrx-getting-started.html b/docs/devon4ts/1.0/angular/guide-ngrx-getting-started.html new file mode 100644 index 00000000..d1c14341 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-ngrx-getting-started.html @@ -0,0 +1,408 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Introduction to NgRx

+
+
+

NgRx is a state management framework for Angular based on the Redux pattern.

+
+
+
+
+

The need for client side state management

+
+
+

You may wonder why you should bother with state management. Usually data resides in a back-end storage system, e.g. a database, and is retrieved by the client on a per-need basis. To add, update, or delete entities from this store, clients have to invoke API endpoints at the back-end. Mimicking database-like transactions on the client side may seem redundant. However, there are many use cases for which a global client-side state is appropriate:

+
+
+
    +
  • +

    the client has some kind of global state which should survive the destruction of a component, but does not warrant server side persistence, for example: volume level of media, expansion status of menus

    +
  • +
  • +

    sever side data should not be retrieved every time it is needed, either because multiple components consume it, or because it should be cached, e.g. the personal watchlist in an online streaming app

    +
  • +
  • +

    the app provides a rich experience with offline functionality, e.g. a native app built with Ionic

    +
  • +
+
+
+

Saving global states inside the services they originates from results in a data flow that is hard to follow and state becoming inconsistent due to unordered state mutations. Following the single source of truth principle, there should be a central location holding all your application’s state, just like a server side database does. State management libraries for Angular provide tools for storing, retrieving, and updating client-side state.

+
+
+
+
+

Why NgRx?

+
+
+

As stated in the introduction, devon4ng does not stipulate a particular state library, or require using one at all. However, NgRx has proven to be a robust, mature solution for this task, with good tooling and 3rd-party library support. Albeit introducing a level of indirection that requires additional effort even for simple features, the Redux concept enforces a clear separation of concerns leading to a cleaner architecture.

+
+
+

Nonetheless, you should always compare different approaches to state management and pick the best one suiting your use case. Here’s a (non-exhaustive) list of competing state management libraries:

+
+
+
    +
  • +

    Plain RxJS using the simple store described in Abstract Class Store

    +
  • +
  • +

    NgXS reduces some boilerplate of NgRx by leveraging the power of decorators and moving side effects to the store

    +
  • +
  • +

    MobX follows a more imperative approach in contrast to the functional Redux pattern

    +
  • +
  • +

    Akita also uses an imperative approach with direct setters in the store, but keeps the concept of immutable state transitions

    +
  • +
+
+
+
+
+

Setup

+
+
+

To get a quick start, use the provided template for devon4ng + NgRx.

+
+
+

To manually install the core store package together with a set of useful extensions:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools --save`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools`
+
+
+
+

We recommend to add the NgRx schematics to your project so you can create code artifacts from the command line:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/schematics --save-dev`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/schematics --dev`
+
+
+
+

Afterwards, make NgRx your default schematics provider, so you don’t have to type the qualified package name every time:

+
+
+
+
`ng config cli.defaultCollection @ngrx/schematics`
+
+
+
+

If you have custom settings for Angular schematics, you have to configure them as described here.

+
+
+
+
+

Concept

+
+
+
+NgRx Architecture +
+
Figure 1. NgRx architecture overview
+
+
+

Figure 1 gives an overview of the NgRx data flow. The single source of truth is managed as an immutable state object by the store. Components dispatch actions to trigger state changes. Actions are handed over to reducers, which take the current state and action data to compute the next state. Actions are also consumed by-effects, which perform side-effects such as retrieving data from the back-end, and may dispatch new actions as a result. Components subscribe to state changes using selectors.

+
+
+

Continue with Creating a Simple Store.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-ngrx-simple-store.html b/docs/devon4ts/1.0/angular/guide-ngrx-simple-store.html new file mode 100644 index 00000000..478eb253 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-ngrx-simple-store.html @@ -0,0 +1,771 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

State, Selection and Reducers

+
+ +
+
+
+

Creating a Simple Store

+
+
+

In the following pages we use the example of an online streaming service. We will model a particular feature, a watchlist that can be populated by the user with movies she or he wants to see in the future.

+
+
+
+
+

Initializing NgRx

+
+
+

If you’re starting fresh, you first have to initialize NgRx and create a root state. The fastest way to do this is using the schematic:

+
+
+
+
`ng generate @ngrx/schematics:store State --root --module app.module.ts`
+
+
+
+

This will automatically generate a root store and register it in the app module. Next we generate a feature module for the watchlist:

+
+
+

` ng generate module watchlist`

+
+
+

and create a corresponding feature store:

+
+
+

` ng generate store watchlist/Watchlist -m watchlist.module.ts`

+
+
+

This generates a file watchlist/reducers/index.ts with the reducer function, and registers the store in the watchlist module declaration.

+
+
+
+
+

== =

+
+
+

If you’re getting an error Schematic "store" not found in collection "@schematics/angular", this means you forgot to register the NgRx schematics as default. +== == =

+
+
+

Next, add the WatchlistModule to the AppModule imports so the feature store is registered when the application starts. We also added the store devtools which we will use later, resulting in the following file:

+
+
+

app.module.ts

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppComponent } from './app.component';
+import { EffectsModule } from '@ngrx/effects';
+import { AppEffects } from './app.effects';
+import { StoreModule } from '@ngrx/store';
+import { reducers, metaReducers } from './reducers';
+import { StoreDevtoolsModule } from '@ngrx/store-devtools';
+import { environment } from '../environments/environment';
+import { WatchlistModule } from './watchlist/watchlist.module';
+
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    WatchlistModule,
+    StoreModule.forRoot(reducers, { metaReducers }),
+    // Instrumentation must be imported after importing StoreModule (config is optional)
+    StoreDevtoolsModule.instrument({
+      maxAge: 25, // Retains last 25 states
+      logOnly: environment.production, // Restrict extension to log-only mode
+    }),
+    !environment.production ? StoreDevtoolsModule.instrument() : []
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+
+
+

Create an entity model and initial state

+
+
+

We need a simple model for our list of movies. Create a file watchlist/models/movies.ts and insert the following code:

+
+
+
+
export interface Movie {
+    id: number;
+    title: string;
+    releaseYear: number;
+    runtimeMinutes: number;
+    genre: Genre;
+}
+
+export type Genre = 'action' | 'fantasy' | 'sci-fi' | 'romantic' | 'comedy' | 'mystery';
+
+export interface WatchlistItem {
+    id: number;
+    movie: Movie;
+    added: Date;
+    playbackMinutes: number;
+}
+
+
+
+
+
+

== =

+
+
+

We discourage putting several types into the same file and do this only for the sake of keeping this tutorial brief. +== == =

+
+
+

Later we will learn how to retrieve data from the back-end using effects. For now we will create an initial state for the user with a default movie.

+
+
+

State is defined and transforms by a reducer function. Let’s create a watchlist reducer:

+
+
+
+
```
+cd watchlist/reducers
+ng g reducer WatchlistData --reducers index.ts
+```
+
+
+
+

Open the generated file watchlist-data.reducer.ts. You see three exports: The State interface defines the shape of the state. There is only one instance of a feature state in the store at all times. The initialState constant is the state at application creation time. The reducer function will later be called by the store to produce the next state instance based on the current state and an action object.

+
+
+

Let’s put a movie into the user’s watchlist:

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export interface State {
+  items: WatchlistItem[];
+}
+
+export const initialState: State = {
+  items: [
+    {
+      id: 42,
+      movie: {
+        id: 1,
+        title: 'Die Hard',
+        genre: 'action',
+        releaseYear: 1988,
+        runtimeMinutes: 132
+      },
+      playbackMinutes: 0,
+      added: new Date(),
+    }
+  ]
+};
+
+
+
+
+
+

Select the current watchlist

+
+
+

State slices can be retrieved from the store using selectors.

+
+
+

Create a watchlist component:

+
+
+
+
`ng g c watchlist/Watchlist`
+
+
+
+

and add it to the exports of WatchlistModule. Also, replace app.component.html with

+
+
+
+
<app-watchlist></app-watchlist>
+
+
+
+

State observables are obtained using selectors. They are memoized by default, meaning that you don’t have to worry about performance if you use complicated calculations when deriving state — these are only performed once per state emission.

+
+
+

Add a selector to watchlist-data.reducer.ts:

+
+
+
+
`export const getAllItems = (state: State) => state.items;`
+
+
+
+

Next, we have to re-export the selector for this sub-state in the feature reducer. Modify the watchlist/reducers/index.ts like this:

+
+
+

watchlist/reducers/index.ts

+
+
+
+
import {
+  ActionReducer,
+  ActionReducerMap,
+  createFeatureSelector,
+  createSelector,
+  MetaReducer
+} from '@ngrx/store';
+import { environment } from 'src/environments/environment';
+import * as fromWatchlistData from './watchlist-data.reducer';
+import * as fromRoot from 'src/app/reducers/index';
+
+export interface WatchlistState { (1)
+  watchlistData: fromWatchlistData.State;
+}
+
+export interface State extends fromRoot.State { (2)
+  watchlist: WatchlistState;
+}
+
+export const reducers: ActionReducerMap<WatchlistState> = { (3)
+  watchlistData: fromWatchlistData.reducer,
+};
+
+export const metaReducers: MetaReducer<WatchlistState>[] = !environment.production ? [] : [];
+
+export const getFeature = createFeatureSelector<State, WatchlistState>('watchlist'); (4)
+
+export const getWatchlistData = createSelector( (5)
+  getFeature,
+  state => state.watchlistData
+);
+
+export const getAllItems = createSelector( (6)
+  getWatchlistData,
+  fromWatchlistData.getAllItems
+);
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1The feature state, each member is managed by a different reducer
2Feature states are registered by the forFeature method. This interface provides a typesafe path from root to feature state.
3Tie sub-states of a feature state to the corresponding reducers
4Create a selector to access the 'watchlist' feature state
5select the watchlistData sub state
6re-export the selector
+
+
+

Note how createSelector allows to chain selectors. This is a powerful tool that also allows for selecting from multiple states.

+
+
+

You can use selectors as pipeable operators:

+
+
+

watchlist.component.ts

+
+
+
+
export class WatchlistComponent {
+  watchlistItems$: Observable<WatchlistItem[]>;
+
+  constructor(
+    private store: Store<fromWatchlist.State>
+  ) {
+    this.watchlistItems$ = this.store.pipe(select(fromWatchlist.getAllItems));
+  }
+}
+
+
+
+

watchlist.component.html

+
+
+
+
<h1>Watchlist</h1>
+<ul>
+    <li *ngFor="let item of watchlistItems$ | async">{{item.movie.title}} ({{item.movie.releaseYear}}): {{item.playbackMinutes}}/{{item.movie.runtimeMinutes}} min watched</li>
+</ul>
+
+
+
+
+
+

Dispatching an action to update watched minutes

+
+
+

We track the user’s current progress at watching a movie as the playbackMinutes property. After closing a video, the watched minutes have to be updated. In NgRx, state is being updated by dispatching actions. An action is an option with a (globally unique) type discriminator and an optional payload.

+
+
+
+
+

== Creating the action

+
+
+

Create a file playback/actions/index.ts. In this example, we do not further separate the actions per sub state. Actions can be defined by using action creators:

+
+
+

playback/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+
+export const playbackFinished = createAction('[Playback] Playback finished', props<{ movieId: number, stoppedAtMinute: number }>());
+
+const actions = union({
+    playbackFinished
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

First we specify the type, followed by a call to the payload definition function. Next, we create a union of all possible actions for this file using union, which allows us a to access action payloads in the reducer in a typesafe way.

+
+
+
+
+

== =

+
+
+

Action types should follow the naming convention [Source] Event, e.g. [Recommended List] Hide Recommendation or [Auth API] Login Success. Think of actions rather as events than commands. You should never use the same action at two different places (you can still handle multiple actions the same way). This facilitate tracing the source of an action. For details see Good Action Hygiene with NgRx by Mike Ryan (video). +== == =

+
+
+
+
+

== Dispatch

+
+
+

We skip the implementation of an actual video playback page and simulate watching a movie in 10 minute segments by adding a link in the template:

+
+
+

watchlist-component.html

+
+
+
+
<li *ngFor="let item of watchlistItems$ | async">... <button (click)="stoppedPlayback(item.movie.id, item.playbackMinutes + 10)">Add 10 Minutes</button></li>
+
+
+
+

watchlist-component.ts

+
+
+
+
import * as playbackActions from 'src/app/playback/actions';
+...
+  stoppedPlayback(movieId: number, stoppedAtMinute: number) {
+    this.store.dispatch(playbackActions.playbackFinished({ movieId, stoppedAtMinute }));
+  }
+
+
+
+
+
+

== State reduction

+
+
+

Next, we handle the action inside the watchlistData reducer. Note that actions can be handled by multiple reducers and effects at the same time to update different states, for example if we’d like to show a rating modal after playback has finished.

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export function reducer(state = initialState, action: playbackActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      return {
+        ...state,
+        items: state.items.map(updatePlaybackMinutesMapper(action.movieId, action.stoppedAtMinute))
+      };
+
+    default:
+      return state;
+  }
+}
+
+export function updatePlaybackMinutesMapper(movieId: number, stoppedAtMinute: number) {
+  return (item: WatchlistItem) => {
+    if (item.movie.id == movieId) {
+      return {
+        ...item,
+        playbackMinutes: stoppedAtMinute
+      };
+    } else {
+      return item;
+    }
+  };
+}
+
+
+
+

Note how we changed the reducer’s function signature to reference the actions union. The switch-case handles all incoming actions to produce the next state. The default case handles all actions a reducer is not interested in by returning the state unchanged. Then we find the watchlist item corresponding to the movie with the given id and update the playback minutes. Since state is immutable, we have to clone all objects down to the one we would like to change using the object spread operator (…​).

+
+
+
+
+

== =

+
+
+

Selectors rely on object identity to decide whether the value has to be recalculated. Do not clone objects that are not on the path to the change you want to make. This is why updatePlaybackMinutesMapper returns the same item if the movie id does not match. +== == =

+
+
+
+
+

== Alternative state mapping with Immer

+
+
+

It can be hard to think in immutable changes, especially if your team has a strong background in imperative programming. In this case, you may find the Immer library convenient, which allows to produce immutable objects by manipulating a proxied draft. The same reducer can then be written as:

+
+
+

watchlist-data.reducer.ts with Immer

+
+
+
+
import { produce } from 'immer';
+...
+case playbackActions.playbackFinished.type:
+      return produce(state, draft => {
+        const itemToUpdate = draft.items.find(item => item.movie.id == action.movieId);
+        if (itemToUpdate) {
+          itemToUpdate.playbackMinutes = action.stoppedAtMinute;
+        }
+      });
+
+
+
+

Immer works out of the box with plain objects and arrays.

+
+
+
+
+

== Redux devtools

+
+
+

If the StoreDevToolsModule is instrumented as described above, you can use the browser extension Redux devtools to see all dispatched actions and the resulting state diff, as well as the current state, and even travel back in time by undoing actions.

+
+
+
+Redux Devtools +
+
Figure 1. Redux devtools
+
+
+

Continue with learning about effects

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-npm-yarn-workflow.html b/docs/devon4ts/1.0/angular/guide-npm-yarn-workflow.html new file mode 100644 index 00000000..31729a70 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-npm-yarn-workflow.html @@ -0,0 +1,975 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Package Managers Workflow

+
+ +
+
+
+

Introduction

+
+
+

This document aims to provide you the necessary documentation and sources in order to help you understand the importance of dependencies between packages.

+
+
+

Projects in NodeJS make use of modules, chunks of reusable code made by other people or teams. These small chunks of reusable code are called packages [1]. Packages are used to solve specific problems or tasks. These relations between your project and the external packages are called dependencies.

+
+
+

For example, imagine we are doing a small program that takes your birthday as an input and tells you how many days are left until your birthday. We search in the repository if someone has published a package to retrieve the actual date and manage date types, and maybe we could search for another package to show a calendar, because we want to optimize our time, and we wish the user to click a calendar button and choose the day in the calendar instead of typing it.

+
+
+

As you can see, packages are convenient. In some cases, they may be even needed, as they can manage aspects of your program you may not be proficient in, or provide an easier use of them.

+
+
+

For more comprehensive information visit npm definition

+
+
+
+
+

Package.json

+
+
+

Dependencies in your project are stored in a file called package.json. Every package.json must contain, at least, the name and version of your project.

+
+
+

Package.json is located in the root of your project.

+
+
+ + + + + +
+ + +If package.json is not on your root directory refer to Problems you may encounter section +
+
+
+

If you wish to learn more information about package.json, click on the following links:

+
+ +
+
+
+

== Content of package.json

+
+
+

As you noticed, package.json is a really important file in your project. It contains essential information about our project, therefore you need to understand what’s inside.

+
+
+

The structure of package.json is divided in blocks, inside the first one you can find essential information of your project such as the name, version, license and optionally some [Scripts].

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e"
+  }
+
+
+
+

The next block is called dependencies and contains the packages that project needs in order to be developed, compiled and executed.

+
+
+
+
"private": true,
+  "dependencies": {
+    "@angular/animations": "^4.2.4",
+    "@angular/common": "^4.2.4",
+    "@angular/forms": "^4.2.4",
+    ...
+    "zone.js": "^0.8.14"
+  }
+
+
+
+

After dependencies we find devDependencies, another kind of dependencies present in the development of the application but unnecessary for its execution. One example is typescript. Code is written in typescript, and then, transpiled to JavaScript. This means the application is not using typescript in execution and consequently not included in the deployment of our application.

+
+
+
+
"devDependencies": {
+    "@angular/cli": "1.4.9",
+    "@angular/compiler-cli": "^4.2.4",
+    ...
+    "@types/node": "~6.0.60",
+    "typescript": "~2.3.3"
+  }
+
+
+
+

Having a peer dependency means that your package needs a dependency that is the same exact dependency as the person installing your package

+
+
+
+
"peerDependencies": {
+    "package-123": "^2.7.18"
+  }
+
+
+
+

Optional dependencies are just that: optional. If they fail to install, Yarn will still say the install process was successful.

+
+
+
+
"optionalDependencies": {
+    "package-321": "^2.7.18"
+  }
+
+
+
+

Finally you can have bundled dependencies which are packages bundled together when publishing your package in a repository.

+
+
+
+
{
+  "bundledDependencies": [
+    "package-4"
+  ]
+}
+
+
+
+

Here is the link to an in-depth explanation of dependency types​.

+
+
+
+
+

== Scripts

+
+
+

Scripts are a great way of automating tasks related to your package, such as simple build processes or development tools.

+
+
+

For example:

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "build-project": "node hello-world.js",
+  }
+
+
+
+

You can run that script by running the command yarn (run) script or npm run script, check the example below:

+
+
+
+
$ yarn (run) build-project    # run is optional
+$ npm run build-project
+
+
+
+

There are special reserved words for scripts, like pre-install, which will execute the script automatically +before the package you install are installed.

+
+
+

Check different uses for scripts in the following links:

+
+ +
+

Or you can go back to +[Content of package.json]​.

+
+
+
+
+

Managing dependencies

+
+
+

In order to manage dependencies we recommend using package managers in your projects.

+
+
+

A big reason is their usability. Adding or removing a package is really easy, and by doing so, packet manager update the package.json and copies (or removes) the package in the needed location, with a single command.

+
+
+

Another reason, closely related to the first one, is reducing human error by automating the package management process.

+
+
+

Two of the package managers you can use in NodeJS projects are "yarn" and "npm". While you can use both, we encourage you to use only one of them while working on projects. Using both may lead to different dependencies between members of the team.

+
+
+
+
+

== npm

+
+
+

We’ll start by installing npm following this small guide here.

+
+
+

As stated on the web, npm comes inside of NodeJS, and must be updated after installing NodeJS, in the same guide you used earlier are written the instructions to update npm.

+
+
+

How npm works

+
+
+

In order to explain how npm works, let’s take a command as an example:

+
+
+
+
$ npm install @angular/material @angular/cdk
+
+
+
+

This command tells npm to look for the packages @angular/material and @angular/cdk in the npm registry, download and decompress them in the folder node_modules along with their own dependencies. Additionally, npm will update package.json and create a new file called package-lock.json.

+
+
+

After initialization and installing the first package there will be a new folder called node_modules in your project. This folder is where your packages are unzipped and stored, following a tree scheme.

+
+
+

Take in consideration both npm and yarn need a package.json in the root of your project in order to work properly. If after creating your project don’t have it, download again the package.json from the repository or you’ll have to start again.

+
+
+

Brief overview of commands

+
+
+

If we need to create a package.json from scratch, we can use the command init. This command asks the user for basic information about the project and creates a brand new package.json.

+
+
+
+
$ npm init
+
+
+
+

Install (or i) installs all modules listed as dependencies in package.json locally. You can also specify a package, and install that package. Install can also be used with the parameter -g, which tells npm to install the [Global package].

+
+
+
+
$ npm install
+$ npm i
+$ npm install Package
+
+
+
+ + + + + +
+ + +Earlier versions of npm did not add dependencies to package.json unless it was used with the flag --save, so npm install package would be npm install --save package, you have one example below. +
+
+
+
+
$ npm install --save Package
+
+
+
+

Npm needs flags in order to know what kind of dependency you want in your project, in npm you need to put the flag -D or --save-dev to install devDependencies, for more information consult the links at the end of this section.

+
+
+
+
$ npm install -D package
+$ npm install --save-dev package
+
+
+
+

+
+
+

The next command uninstalls the module you specified in the command.

+
+
+
+
$ npm uninstall Package
+
+
+
+

ls command shows us the dependencies like a nested tree, useful if you have few packages, not so useful when you need a lot of packages.

+
+
+
+
$ npm ls
+
+
+
+
+
npm@@VERSION@ /path/to/npm
+└─┬ init-package-json@0.0.4
+  └── promzard@0.1.5
+
+
+
+
example tree
+

We recommend you to learn more about npm commands in the following link, navigating to the section CLI commands.

+
+
+

About Package-lock.json

+
+
+

Package-lock.json describes the dependency tree resulting of using package.json and npm. +Whenever you update, add or remove a package, package-lock.json is deleted and redone with +the new dependencies.

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

This lock file is checked every time the command npm i (or npm install) is used without specifying a package, +in the case it exists and it’s valid, npm will install the exact tree that was generated, such that subsequent +installs are able to generate identical dependency trees.

+
+
+ + + + + +
+ + +It is not recommended to modify this file yourself. It’s better to leave its management to npm. +
+
+
+

More information is provided by the npm team at package-lock.json

+
+
+
+
+

== Yarn

+
+
+

Yarn is an alternative to npm, if you wish to install yarn follow the guide getting started with yarn and download the correct version for your operative system. NodeJS is also needed you can find it here.

+
+
+

Working with yarn

+
+
+

Yarn is used like npm, with small differences in syntax, for example npm install module is changed to yarn add module.

+
+
+
+
$ yarn add @covalent
+
+
+
+

This command is going to download the required packages, modify package.json, put the package in the folder node_modules and makes a new yarn.lock with the new dependency.

+
+
+

However, unlike npm, yarn maintains a cache with packages you download inside. You don’t need to download every file every time you do a general installation. This means installations faster than npm.

+
+
+

Similarly to npm, yarn creates and maintains his own lock file, called yarn.lock. Yarn.lock gives enough information about the project for dependency tree to be reproduced.

+
+
+

yarn commands

+
+
+

Here we have a brief description of yarn’s most used commands:

+
+
+
+
$ yarn add Package
+$ yarn add --dev Package
+
+
+
+

Adds a package locally to use in your package. Adding the flags --dev or -D will add them to devDependencies instead of the default dependencies, if you need more information check the links at the end of the section.

+
+
+
+
$ yarn init
+
+
+
+

Initializes the development of a package.

+
+
+
+
$ yarn install
+
+
+
+

Installs all the dependencies defined in a package.json file, you can also write "yarn" to achieve the same effect.

+
+
+
+
$ yarn remove Package
+
+
+
+

You use it when you wish to remove a package from your project.

+
+
+
+
$ yarn global add Package
+
+
+
+

Installs the [Global package].

+
+
+

Please, refer to the documentation to learn more about yarn commands and their attributes: yarn commands

+
+
+

yarn.lock

+
+
+

This file has the same purpose as Package-lock.json, to guide the packet manager, in this case yarn, +to install the dependency tree specified in yarn.lock.

+
+
+

Yarn.lock and package.json are +essential files when collaborating in a project more co-workers and may be a +source of errors if programmers do not use the same manager.

+
+
+

Yarn.lock follows the same structure as package-lock.json, you can find an example of dependency below:

+
+
+
+
"@angular/animations@^4.2.4":
+  version "4.4.6"
+  resolved "https://registry.yarnpkg.com/@angular/animations/-/animations-4.4.6.tgz#fa661899a8a4e38cb7c583c7a5c97ce65d592a35"
+  dependencies:
+    tslib "^1.7.1"
+
+
+
+ + + + + +
+ + +As with package-lock.json, it’s strongly not advised to modify this file. Leave its management to yarn +
+
+
+

You can learn more about yarn.lock here: yarn.lock

+
+
+
+
+

== Global package

+
+
+

Global packages are packages installed in your operative system instead of your local project, +global packages useful for developer tooling that is not part of any individual project but instead is used for local commands.

+
+
+

A good example of global package is @angular/cli, a command line interface for angular used in our projects. You can install +a global package in npm with "npm install -g package" and "yarn global add package" with yarn, you have a npm example below:

+
+
+
Listing 1. npm global package
+
+
npm install –g @angular/cli
+
+
+ +
+
+
+

== Package version

+
+
+

Dependencies are critical to the success of a package. You must be extra careful about +which version packages are using, one package in a different version may break your code.

+
+
+

Versioning in npm and yarn, follows a semantic called semver, following the logic +MAJOR.MINOR.PATCH, like for example, @angular/animations: 4.4.6.

+
+
+

Different versions

+
+
+

Sometimes, packages are installed with a different version from the one initially installed. +This happens because package.json also contains the range of versions we allow yarn or npm to +install or update to, example:

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

And here the installed one:

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

As you can see, the version we initially added is 4.2.4, and the version finally installed after +a global installation of all packages, 4.4.6.

+
+
+

Installing packages without package-lock.json or yarn.lock using their respective packet managers, will always +end with npm or yarn installing the latest version allowed by package.json.

+
+
+

"@angular/animations": "^4.2.4" contains not only the version we added, but also the range we allow npm and yarn +to update. Here are some examples:

+
+
+
+
"@angular/animations": "<4.2.4"
+
+
+
+

The version installed must be lower than 4.2.4 .

+
+
+
+
"@angular/animations": ">=4.2.4"
+
+
+
+

The version installed must be greater than or equal to 4.2.4 .

+
+
+
+
"@angular/animations": "=4.2.4"
+
+
+
+

the version installed must be equal to 4.2.4 .

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

The version installed cannot modify the first non zero digit, for example in this case +it cannot surpass 5.0.0 or be lower than 4.2.4 .

+
+
+

You can learn more about this in Versions

+
+
+
+
+

Problems you may encounter

+
+
+

If you can’t find package.json, you may have deleted the one you had previously, +which means you have to download the package.json from the repository. +In the case you are creating a new project you can create a new package.json. More information +in the links below. Click on Package.json if you come from that section.

+
+ +
+ + + + + +
+ + +Using npm install or yarn without package.json in your projects will +result in compilation errors. As we mentioned earlier, +Package.json contains essential information about your project. +
+
+
+

If you have package.json, but you don’t have package-lock.json or yarn.lock the use of +command "npm install" or "yarn" may result in a different dependency tree.

+
+
+

If you are trying to import a module and visual code studio is not able to find it, +is usually caused by error adding the package to the project, try to add the module again with yarn or npm, +and restart Visual Studio Code.

+
+
+

Be careful with the semantic versioning inside your package.json of the packages, +or you may find a new update on one of your dependencies breaking your code.

+
+
+ + + + + +
+ + +In the following link +there is a solution to a problematic update to one package. +
+
+
+

A list of common errors of npm can be found in: npm errors

+
+
+
+
+

== Recomendations

+
+
+

Use yarn or npm in your project, reach an agreement with your team in order to choose one, this will avoid +undesired situations like forgetting to upload an updated yarn.lock or package-lock.json. +Be sure to have the latest version of your project when possible.

+
+
+ + + + + +
+ + +Pull your project every time it’s updated. Erase your node_modules folder and reinstall all +dependencies. This assures you to be working with the same dependencies your team has. +
+
+
+

AD Center recommends the use of yarn.

+
+
+
+
+
+
+1. A package is a file or directory that is described by a package.json. . +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-package-managers.html b/docs/devon4ts/1.0/angular/guide-package-managers.html new file mode 100644 index 00000000..4f2de3f7 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-package-managers.html @@ -0,0 +1,502 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Package Managers

+
+
+

There are two major package managers currently used for JavaScript / TypeScript projects which leverage NodeJS as a build platform.

+
+
+
    +
  1. +

    npm

    +
  2. +
  3. +

    yarn

    +
  4. +
+
+
+

Our recommendation is to use yarn but both package managers are fine.

+
+
+ + + + + +
+ + +When using npm it is important to use a version greater 5.0 as npm 3 has major drawbacks compared to yarn. +The following guide assumes that you are using npm >= 5 or yarn. +
+
+
+

Before you start reading further, please take a look at the docs:

+
+ +
+

The following guide will describe best practices for working with yarn / npm.

+
+
+
+
+

Semantic Versioning

+
+
+

When working with package managers it is very important to understand the concept of semantic versioning.

+
+
+
Version example 1.2.3
+

|== == == = +|Version |1. |2. |3 +|Version name when incrementing |Major (2.0.0) |Minor (1.3.0) |Patch (1.2.4) +|Has breaking changes |yes |no |no +|Has features |yes |yes |no +|Has bug fixes |yes |yes |yes +|== == == =

+
+
+

The table gives an overview of the most important parts of semantic versioning. +In the header version 1.2.3 is displayed. +The first row shows the name and the resulting version when incrementing a part of the version. +The next rows show specifics of the resulting version - e.g. a major version can have breaking changes, features and bug fixes.

+
+
+

Packages from npm and yarn leverage semantic versioning and instead of selecting a fixed version one can specify a selector. +The most common selectors are:

+
+
+
    +
  • +

    ^1.2.3 +At least 1.2.3 - 1.2.4 or 1.3.0 can be used, 2.0.0 can not be used

    +
  • +
  • +

    ~1.2.3 +At lease 1.2.3 - 1.2.4 can be used, 2.0.0 and 1.3.0 can not be used

    +
  • +
  • +

    >=1.2.3 +At least 1.2.3 - every version greater can also be used

    +
  • +
+
+
+

This achieves a lower number of duplicates. +To give an example:

+
+
+

If package A needs version 1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 4 packages.

+
+
+

If package A needs version ^1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 3 packages. +A would use the same version of C as B - 1.4.0.

+
+
+
+
+

Do not modify package.json and lock files by hand

+
+
+

Dependencies are always added using a yarn or npm command. +Altering the package.json, package-json.lock or yarn.lock file by hand is not recommended.

+
+
+

Always use a yarn or npm command to add a new dependency.

+
+
+

Adding the package express with yarn to dependencies.

+
+
+
+
yarn add express
+
+
+
+

Adding the package express with npm to dependencies.

+
+
+
+
npm install express
+
+
+
+
+
+

What does the lock file do

+
+
+

The purpose of files yarn.lock and package-json.lock is to freeze versions for a short time.

+
+
+

The following problem is solved:

+
+
+
    +
  • +

    Developer A upgrades the dependency express to fixed version 4.16.3.

    +
  • +
  • +

    express has sub-dependency accepts with version selector ~1.3.5

    +
  • +
  • +

    His local node_modules folder receives accepts in version 1.3.5

    +
  • +
  • +

    On his machine everything is working fine

    +
  • +
  • +

    Afterward version 1.3.6 of accepts is published - it contains a major bug

    +
  • +
  • +

    Developer B now clones the repo and loads the dependencies.

    +
  • +
  • +

    He receives version 1.3.6 of accepts and blames developer A for upgrading to a broken version.

    +
  • +
+
+
+

Both yarn.lock and package-json.lock freeze all the dependencies. +For example in yarn lock you will find.

+
+
+
Listing 1. yarn.lock example (excerp)
+
+
accepts@~1.3.5:
+  version "1.3.5"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-types "~2.1.18"
+    negotiator "0.6.1"
+
+mime-db@~1.33.0:
+  version "1.33.0"
+  resolved "[...URL to registry]"
+
+mime-types@~2.1.18:
+  version "2.1.18"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-db "~1.33.0"
+
+negotiator@0.6.1:
+  version "0.6.1"
+  resolved "[...URL to registry]"
+
+
+
+

The described problem is solved by the example yarn.lock file.

+
+
+
    +
  • +

    accepts is frozen at version ~1.3.5

    +
  • +
  • +

    All of its sub-dependencies are also frozen. +It needs mime-types at version ~2.1.18 which is frozen at 2.1.18. +mime-types needs mime-db at ~1.33.0 which is frozen at 1.33.0

    +
  • +
+
+
+

Every developer will receive the same versions of every dependency.

+
+
+ + + + + +
+ + +You have to make sure all your developers are using the same npm/yarn version - this includes the CI build. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-routing.html b/docs/devon4ts/1.0/angular/guide-routing.html new file mode 100644 index 00000000..12649722 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-routing.html @@ -0,0 +1,666 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Routing

+
+
+

A basic introduction to the Angular Router can be found in Angular Docs.

+
+
+

This guide will show common tasks and best practices.

+
+
+
+
+

Defining Routes

+
+
+

For each feature module and the app module all routes should be defined in a separate module with the suffix RoutingModule. +This way the routing modules are the only place where routes are defined. +This pattern achieves a clear separation of concerns. +The following figure illustrates this.

+
+
+
+Routing module declaration +
+
Figure 1. Routing module declaration
+
+
+

It is important to define routes inside app routing module with .forRoot() and in feature routing modules with .forChild().

+
+
+
+
+

Example 1 - No Lazy Loading

+
+
+

In this example two modules need to be configured with routes - AppModule and FlightModule.

+
+
+

The following routes will be configured

+
+
+
    +
  • +

    / will redirect to /search

    +
  • +
  • +

    /search displays FlightSearchComponent (FlightModule)

    +
  • +
  • +

    /search/print/:flightId/:date displays FlightPrintComponent (FlightModule)

    +
  • +
  • +

    /search/details/:flightId/:date displays FlightDetailsComponent (FlightModule)

    +
  • +
  • +

    All other routes will display ErrorPage404 (AppModule)

    +
  • +
+
+
+
Listing 1. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '', redirectTo: 'search', pathMatch: 'full' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 2. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: 'search', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+ + + + + +
+ + +The import order inside AppModule is important. +AppRoutingModule needs to be imported after FlightModule. +
+
+
+
+
+

Example 2 - Lazy Loading

+
+
+

Lazy Loading is a good practice when the application has multiple feature areas and a user might not visit every dialog. +Or at least he might not need every dialog up front.

+
+
+

The following example will configure the same routes as example 1 but will lazy load FlightModule.

+
+
+
Listing 3. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '/search', loadChildren: 'app/flight-search/flight-search.module#FlightSearchModule' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 4. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+
+
+

Triggering Route Changes

+
+
+

With Angular you have two ways of triggering route changes.

+
+
+
    +
  1. +

    Declarative with bindings in component HTML templates

    +
  2. +
  3. +

    Programmatic with Angular Router service inside component classes

    +
  4. +
+
+
+

On the one hand, architecture-wise it is a much cleaner solution to trigger route changes in Smart Components. +This way you have every UI event that should trigger a navigation handled in one place - in a Smart Component. +It becomes very easy to look inside the code for every navigation, that can occur. +Refactoring is also much easier, as there are no navigation events "hidden" in the HTML templates

+
+
+

On the other hand, in terms of accessibility and SEO it is a better solution to rely on bindings in the view - e.g. by using Angular router-link directive. +This way screen readers and the Google crawler can move through the page easily.

+
+
+ + + + + +
+ + +If you do not have to support accessibility (screen readers, etc.) and to care about SEO (Google rank, etc.), +then you should aim for triggering navigation only in Smart Components. +
+
+
+
+Triggering navigation +
+
Figure 2. Triggering navigation
+
+
+
+
+

Guards

+
+
+

Guards are Angular services implemented on routes which determines whether a user can navigate to/from the route. There are examples below which will explain things better. We have the following types of Guards:

+
+
+
    +
  • +

    CanActivate: It is used to determine whether a user can visit a route. The most common scenario for this guard is to check if the user is authenticated. For example, if we want only logged in users to be able to go to a particular route, we will implement the CanActivate guard on this route.

    +
  • +
  • +

    CanActivateChild: Same as above, only implemented on child routes.

    +
  • +
  • +

    CanDeactivate: It is used to determine if a user can navigate away from a route. Most common example is when a user tries to go to a different page after filling up a form and does not save/submit the changes, we can use this guard to confirm whether the user really wants to leave the page without saving/submitting.

    +
  • +
  • +

    Resolve: For resolving dynamic data.

    +
  • +
  • +

    CanLoad: It is used to determine whether an Angular module can be loaded lazily. Example below will be helpful to understand it.

    +
  • +
+
+
+

Let’s have a look at some examples.

+
+
+
+
+

Example 1 - CanActivate and CanActivateChild guards

+
+ +
+
+
+

== CanActivate guard

+
+
+

As mentioned earlier, a guard is an Angular service and services are simply TypeScript classes. So we begin by creating a class. This class has to implement the CanActivate interface (imported from angular/router), and therefore, must have a canActivate function. The logic of this function determines whether the requested route can be navigated to or not. It returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. If it is true, the route is loaded, else not.

+
+
+
Listing 5. CanActivate example
+
+
...
+import {CanActivate} from "@angular/router";
+
+@Injectable()
+class ExampleAuthGuard implements CanActivate {
+  constructor(private authService: AuthService) {}
+
+  canActivate(route: ActivatedRouterSnapshot, state: RouterStateSnapshot) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

In the above example, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. We use it to return true or false from the canActivate function. +The canActivate function accepts two parameters (provided by Angular). The first parameter of type ActivatedRouterSnapshot is the snapshot of the route the user is trying to navigate to (where the guard is implemented); we can extract the route parameters from this instance. The second parameter of type RouterStateSnapshot is a snapshot of the router state the user is trying to navigate to; we can fetch the URL from it’s url property.

+
+
+ + + + + +
+ + +We can also redirect the user to another page (maybe a login page) if the authService returns false. To do that, inject Router and use it’s navigate function to redirect to the appropriate page. +
+
+
+

Since it is a service, it needs to be provided in our module:

+
+
+
Listing 6. provide the guard in a module
+
+
@NgModule({
+  ...
+  providers: [
+    ...
+    ExampleAuthGuard
+  ]
+})
+
+
+
+

Now this guard is ready to use on our routes. We implement it where we define our array of routes in the application:

+
+
+
Listing 7. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivate: [ExampleAuthGuard] }
+];
+
+
+
+

As you can see, the canActivate property accepts an array of guards. So we can implement more than one guard on a route.

+
+
+
+
+

== CanActivateChild guard

+
+
+

To use the guard on nested (children) routes, we add it to the canActivateChild property like so:

+
+
+
Listing 8. Implementing the guard on child routes
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivateChild: [ExampleAuthGuard], children: [
+	{path: 'sub-page1', component: SubPageComponent},
+    {path: 'sub-page2', component: SubPageComponent}
+  ] }
+];
+
+
+
+
+
+

Example 2 - CanLoad guard

+
+
+

Similar to CanActivate, to use this guard we implement the CanLoad interface and overwrite it’s canLoad function. Again, this function returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. The fundamental difference between CanActivate and CanLoad is that CanLoad is used to determine whether an entire module can be lazily loaded or not. If the guard returns false for a module protected by CanLoad, the entire module is not loaded.

+
+
+
Listing 9. CanLoad example
+
+
...
+import {CanLoad, Route} from "@angular/router";
+
+@Injectable()
+class ExampleCanLoadGuard implements CanLoad {
+  constructor(private authService: AuthService) {}
+
+  canLoad(route: Route) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

Again, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. The canLoad function accepts a parameter of type Route which we can use to fetch the path a user is trying to navigate to (using the path property of Route).

+
+
+

This guard needs to be provided in our module like any other service.

+
+
+

To implement the guard, we use the canLoad property:

+
+
+
Listing 10. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: 'home', component: HomeComponent },
+  { path: 'admin', loadChildren: 'app/admin/admin.module#AdminModule', canLoad: [ExampleCanLoadGuard] }
+];
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-testing.html b/docs/devon4ts/1.0/angular/guide-testing.html new file mode 100644 index 00000000..0ae68ac9 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-testing.html @@ -0,0 +1,719 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Testing

+
+
+

This guide will cover the basics of testing logic inside your code with unit test cases. +The guide assumes that you are familiar with Angular CLI (see the guide)

+
+
+

For testing your Angular application with unit test cases there are two main strategies:

+
+
+
    +
  1. +

    Isolated unit test cases
    +Isolated unit tests examine an instance of a class all by itself without any dependence on Angular or any injected values. +The amount of code and effort needed to create such tests in minimal.

    +
  2. +
  3. +

    Angular Testing Utilities
    +Let you test components including their interaction with Angular. +The amount of code and effort needed to create such tests is a little higher.

    +
  4. +
+
+
+
+
+

Testing Concept

+
+
+

The following figure shows you an overview of the application architecture divided in testing areas.

+
+
+
+Testing Areas +
+
Figure 1. Testing Areas
+
+
+

There are three areas, which need to be covered by different testing strategies.

+
+
+
    +
  1. +

    Components:
    +Smart Components need to be tested because they contain view logic. +Also the interaction with 3rd party components needs to be tested. +When a 3rd party component changes with an upgrade a test will be failing and warn you, that there is something wrong with the new version. +Most of the time Dumb Components do not need to be tested because they mainly display data and do not contain any logic. +Smart Components are always tested with Angular Testing Utilities. +For example selectors, which select data from the store and transform it further, need to be tested.

    +
  2. +
  3. +

    Stores:
    +A store contains methods representing state transitions. +If these methods contain logic, they need to be tested. +Stores are always tested using Isolated unit tests.

    +
  4. +
  5. +

    Services:
    +Services contain Business Logic, which needs to be tested. +UseCase Services represent a whole business use case. +For instance this could be initializing a store with all the data that is needed for a dialog - loading, transforming, storing. +Often Angular Testing Utilities are the optimal solution for testing UseCase Services, because they allow for an easy stubbing of the back-end. +All other services should be tested with Isolated unit tests as they are much easier to write and maintain.

    +
  6. +
+
+
+
+
+

Testing Smart Components

+
+
+

Testing Smart Components should assure the following.

+
+
+
    +
  1. +

    Bindings are correct.

    +
  2. +
  3. +

    Selectors which load data from the store are correct.

    +
  4. +
  5. +

    Asynchronous behavior is correct (loading state, error state, "normal" state).

    +
  6. +
  7. +

    Oftentimes through testing one realizes, that important edge cases are forgotten.

    +
  8. +
  9. +

    Do these test become very complex, it is often an indicator for poor code quality in the component. +Then the implementation is to be adjusted / refactored.

    +
  10. +
  11. +

    When testing values received from the native DOM, you will test also that 3rd party libraries did not change with a version upgrade. +A failing test will show you what part of a 3rd party library has changed. +This is much better than the users doing this for you. +For example a binding might fail because the property name was changed with a newer version of a 3rd party library.

    +
  12. +
+
+
+

In the function beforeEach() the TestBed imported from Angular Testing Utilities needs to be initialized. +The goal should be to define a minimal test-module with TestBed. +The following code gives you an example.

+
+
+
Listing 1. Example test setup for Smart Components
+
+
describe('PrintFlightComponent', () => {
+
+  let fixture: ComponentFixture<PrintCPrintFlightComponentomponent>;
+  let store: FlightStore;
+  let printServiceSpy: jasmine.SpyObj<FlightPrintService>;
+
+  beforeEach(() => {
+    const urlParam = '1337';
+    const activatedRouteStub = { params: of({ id: urlParam }) };
+    printServiceSpy = jasmine.createSpyObj('FlightPrintService', ['initializePrintDialog']);
+    TestBed.configureTestingModule({
+      imports: [
+        TranslateModule.forRoot(),
+        RouterTestingModule
+      ],
+      declarations: [
+        PrintFlightComponent,
+        PrintContentComponent,
+        GeneralInformationPrintPanelComponent,
+        PassengersPrintPanelComponent
+      ],
+      providers: [
+        FlightStore,
+        {provide: FlightPrintService, useValue: printServiceSpy},
+        {provide: ActivatedRoute, useValue: activatedRouteStub}
+      ]
+    });
+    fixture = TestBed.createComponent(PrintFlightComponent);
+    store = fixture.debugElement.injector.get(FlightStore);
+    fixture.detectChanges();
+  });
+
+  // ... test cases
+})
+
+
+
+

It is important:

+
+
+
    +
  • +

    Use RouterTestingModule instead of RouterModule

    +
  • +
  • +

    Use TranslateModule.forRoot() without translations +This way you can test language-neutral without translation marks.

    +
  • +
  • +

    Do not add a whole module from your application - in declarations add the tested Smart Component with all its Dumb Components

    +
  • +
  • +

    The store should never be stubbed. +If you need a complex test setup, just use the regular methods defined on the store.

    +
  • +
  • +

    Stub all services used by the Smart Component. +These are mostly UseCase services. +They should not be tested by these tests. +Only the correct call to their functions should be assured. +The logic inside the UseCase services is tested with separate tests.

    +
  • +
  • +

    detectChanges() performance an Angular Change Detection cycle (Angular refreshes all the bindings present in the view)

    +
  • +
  • +

    tick() performance a virtual macro task, tick(1000) is equal to the virtual passing of 1s.

    +
  • +
+
+
+

The following test cases show the testing strategy in action.

+
+
+
Listing 2. Example
+
+
it('calls initializePrintDialog for url parameter 1337', fakeAsync(() => {
+  expect(printServiceSpy.initializePrintDialog).toHaveBeenCalledWith(1337);
+}));
+
+it('creates correct loading subtitle', fakeAsync(() => {
+  store.setPrintStateLoading(123);
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT STATE.IS_LOADING');
+}));
+
+it('creates correct subtitle for loaded flight', fakeAsync(() => {
+  store.setPrintStateLoadedSuccess({
+    id: 123,
+    description: 'Description',
+    iata: 'FRA',
+    name: 'Frankfurt',
+    // ...
+  });
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT "FRA (Frankfurt)" (ID: 123)');
+}));
+
+
+
+

The examples show the basic testing method

+
+
+
    +
  • +

    Set the store to a well-defined state

    +
  • +
  • +

    check if the component displays the correct values

    +
  • +
  • +

    …​ via checking values inside the native DOM.

    +
  • +
+
+
+
+
+

Testing state transitions performed by stores

+
+
+

Stores are always tested with Isolated unit tests.

+
+
+

Actions triggered by dispatchAction() calls are asynchronously performed to alter the state. +A good solution to test such a state transition is to use the done callback from Jasmine.

+
+
+
Listing 3. Example for testing a store
+
+
let sut: FlightStore;
+
+beforeEach(() => {
+  sut = new FlightStore();
+});
+
+it('setPrintStateLoading sets print state to loading', (done: Function) => {
+  sut.setPrintStateLoading(4711);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.print.isLoading).toBe(true);
+    expect(result.print.loadingId).toBe(4711);
+    done();
+  });
+});
+
+it('toggleRowChecked adds flight with given id to selectedValues Property', (done: Function) => {
+  const flight: FlightTO = {
+    id: 12
+    // dummy data
+  };
+  sut.setRegisterabgleichListe([flight]);
+  sut.toggleRowChecked(12);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.selectedValues).toContain(flight);
+    done();
+  });
+});
+
+
+
+
+
+

Testing services

+
+
+

When testing services both strategies - Isolated unit tests and Angular Testing Utilities - are valid options.

+
+
+

The goal of such tests are

+
+
+
    +
  • +

    assuring the behavior for valid data.

    +
  • +
  • +

    assuring the behavior for invalid data.

    +
  • +
  • +

    documenting functionality

    +
  • +
  • +

    save performing refactoring

    +
  • +
  • +

    thinking about edge case behavior while testing

    +
  • +
+
+
+

For simple services Isolated unit tests can be written. +Writing these tests takes lesser effort and they can be written very fast.

+
+
+

The following listing gives an example of such tests.

+
+
+
Listing 4. Testing a simple services with Isolated unit tests
+
+
let sut: IsyDatePipe;
+
+beforeEach(() => {
+  sut = new IsyDatePipe();
+});
+
+it('transform should return empty string if input value is empty', () => {
+  expect(sut.transform('')).toBe('');
+});
+
+it('transform should return empty string if input value is null', () => {
+  expect(sut.transform(undefined)).toBe('');
+});
+
+// ...more tests
+
+
+
+

For testing Use Case services the Angular Testing Utilities should be used. +The following listing gives an example.

+
+
+
Listing 5. Test setup for testing use case services with Angular Testing Utilities
+
+
let sut: FlightPrintService;
+let store: FlightStore;
+let httpController: HttpTestingController;
+let flightCalculationServiceStub: jasmine.SpyObj<FlightCalculationService>;
+const flight: FlightTo = {
+  // ... valid dummy data
+};
+
+beforeEach(() => {
+  flightCalculationServiceStub = jasmine.createSpyObj('FlightCalculationService', ['getFlightType']);
+  flightCalculationServiceStub.getFlightType.and.callFake((catalog: string, type: string, key: string) => of(`${key}_long`));
+  TestBed.configureTestingModule({
+    imports: [
+      HttpClientTestingModule,
+      RouterTestingModule,
+    ],
+    providers: [
+      FlightPrintService,
+      FlightStore,
+      FlightAdapter,
+      {provide: FlightCalculationService, useValue: flightCalculationServiceStub}
+    ]
+  });
+
+  sut = TestBed.get(FlightPrintService);
+  store = TestBed.get(FlightStore);
+  httpController = TestBed.get(HttpTestingController);
+});
+
+
+
+

When using TestBed, it is important

+
+
+
    +
  • +

    to import HttpClientTestingModule for stubbing the back-end

    +
  • +
  • +

    to import RouterTestingModule for stubbing the Angular router

    +
  • +
  • +

    not to stub stores, adapters and business services

    +
  • +
  • +

    to stub services from libraries like FlightCalculationService - the correct implementation of libraries should not be tested by these tests.

    +
  • +
+
+
+

Testing back-end communication looks like this:

+
+
+
Listing 6. Testing back-end communication with Angular HttpTestingController
+
+
it('loads flight if not present in store', fakeAsync(() => {
+  sut.initializePrintDialog(1337);
+  const processRequest = httpController.expectOne('/path/to/flight');
+  processRequest.flush(flight);
+
+  httpController.verify();
+}));
+
+it('does not load flight if present in store', fakeAsync(() => {
+  const flight = {...flight, id: 4711};
+  store.setRegisterabgleich(flight);
+
+  sut.initializePrintDialog(4711);
+  httpController.expectNone('/path/to/flight');
+
+  httpController.verify();
+}));
+
+
+
+

The first test assures a correct XHR request is performed if initializePrintDialog() is called and no data is in the store. +The second test assures no XHR request IST performed if the needed data is already in the store.

+
+
+

The next steps are checks for the correct implementation of logic.

+
+
+
Listing 7. Example testing a Use Case service
+
+
it('creates flight destination for valid key in svz', fakeAsync(() => {
+  const flightTo: FlightTo = {
+    ...flight,
+    id: 4712,
+    profile: '77'
+  };
+  store.setFlight(flightTo);
+  let result: FlightPrintContent|undefined;
+
+  sut.initializePrintDialog(4712);
+  store.select(s => s.print.content).subscribe(content => result = content);
+  tick();
+
+  expect(result!.destination).toBe('77_long (ID: 77)');
+}));
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-update-angular-cli.html b/docs/devon4ts/1.0/angular/guide-update-angular-cli.html new file mode 100644 index 00000000..1e192e3c --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-update-angular-cli.html @@ -0,0 +1,346 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Update Angular CLI

+
+ +
+
+
+

Angular CLI common issues

+
+
+

There are constant updates for the official Angular framework dependencies. These dependencies are directly related with the Angular CLI package. Since this package comes installed by default inside the devonfw distribution folder for Windows OS and the distribution is updated every few months it needs to be updated in order to avoid known issues.

+
+
+
+
+

Angular CLI update guide

+
+
+

For Linux users is as easy as updating the global package:

+
+
+
+
$ npm unistall -g @angular/cli
+$ npm install -g @angular/cli
+
+
+
+

For Windows users the process is only a bit harder. Open the devonfw bundled console and do as follows:

+
+
+
+
$ cd [devonfw_dist_folder]
+$ cd software/nodejs
+$ npm uninstall @angular/cli --no-save
+$ npm install @angular/cli --no-save
+
+
+
+

After following these steps you should have the latest Angular CLI version installed in your system. In order to check it run in the distribution console:

+
+
+ + + + + +
+ + +At the time of this writing, the Angular CLI is at 1.7.4 version. +
+
+
+
+
λ ng version
+
+     _                      _                 ____ _     ___
+    / \   _ __   __ _ _   _| | __ _ _ __     / ___| |   |_ _|
+   / △ \ | '_ \ / _` | | | | |/ _` | '__|   | |   | |    | |
+  / ___ \| | | | (_| | |_| | | (_| | |      | |___| |___ | |
+ /_/   \_\_| |_|\__, |\__,_|_|\__,_|_|       \____|_____|___|
+                |___/
+
+
+Angular CLI: 7.2.3
+Node: 10.13.0
+OS: win32 x64
+Angular:
+...
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-upgrade-devon4ng.html b/docs/devon4ts/1.0/angular/guide-upgrade-devon4ng.html new file mode 100644 index 00000000..d170b277 --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-upgrade-devon4ng.html @@ -0,0 +1,441 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Upgrade devon4ng Angular and Ionic/Angular applications

+
+
+

Angular CLI provides a powerful tool to upgrade Angular based applications to the current stable release of the core framework.

+
+
+

This tool is ng update. It will not only upgrade dependencies and their related ones but also will perform some fixes in your code if available thanks to the provided schematics. It will check even if the update is not possible as there is another library or libraries that are not compatible with the versions of the upgraded dependencies. In this case it will keep your application untouched.

+
+
+ + + + + +
+ + +The repository must be in a clean state before executing a ng update. So, remember to commit your changes first. +
+
+
+
+
+

Basic usage

+
+
+

In order to perform a basic upgrade we will execute:

+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
+
+

Upgrade to new Angular version

+
+
+

The process will be the same, but first we need to make sure that our devon4ng application is in the lates version of Angular 8, so the ng update command can perform the upgrade not only in the dependencies but also making code changes to reflect the new features and fixes.

+
+
+
    +
  • +

    First, upgrade to latest Angular 9 version:

    +
  • +
+
+
+
+
$ ng update @angular/cli@9 @angular/core@9
+
+
+
+

Optionally the flag -C can be added to previous command to make a commit automatically. This is also valid for the next steps.

+
+
+
    +
  • +

    Then, upgrade Angular:

    +
  • +
+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
    +
  • +

    In case you use Angular Material:

    +
  • +
+
+
+
+
$ ng update @angular/material
+
+
+
+
    +
  • +

    If the application depends on third party libraries, the new tool ngcc can be run to make them compatible with the new Ivy compiler. In this case it is recommended to include a postinstall script in the package.json:

    +
  • +
+
+
+
+
{
+  "scripts": {
+    "postinstall": "ngcc --properties es2015 browser module main --first-only --create-ivy-entry-points"
+  }
+}
+
+
+ +
+

Important use cases:

+
+
+
    +
  • +

    To update to the next beta or pre-release version, use the --next=true option.

    +
  • +
  • +

    To update from one major version to another, use the format ng update @angular/cli@^<major_version> @angular/core@^<major_version>.

    +
  • +
  • +

    In case your Angular application uses @angular/material include it in the first command:

    +
    +
    +
    $ ng update @angular/cli @angular/core @angular/material
    +
    +
    +
  • +
+
+
+
+
+

Ionic/Angular applications

+
+
+

Just following the same procedure we can upgrade Angular applications, but we must take care of important specific Ionic dependencies:

+
+
+
+
$ ng update @angular/cli @angular/core @ionic/angular @ionic/angular-toolkit [@ionic/...]
+
+
+
+
+
+

Other dependencies

+
+
+

Every application will make use of different dependencies. Angular CLI ng upgrade will also take care of these ones. For example, if you need to upgrade @capacitor you will perform:

+
+
+
+
$ ng update @capacitor/cli @capacitor/core [@capacitor/...]
+
+
+
+

Another example could be that you need to upgrade @ngx-translate packages. As always in this case you will execute:

+
+
+
+
$ ng update @ngx-translate/core @ngx-translate/http-loader
+
+
+
+
+
+

Angular Update Guide online tool

+
+
+

It is recommended to use the Angular Update Guide tool at https://update.angular.io/ that will provide the necessary steps to upgrade any Angular application depending on multiple criteria.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-working-with-angular-cli.html b/docs/devon4ts/1.0/angular/guide-working-with-angular-cli.html new file mode 100644 index 00000000..0c16755f --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-working-with-angular-cli.html @@ -0,0 +1,585 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Working with Angular CLI

+
+
+

Angular CLI provides a facade for building, testing, linting, debugging and generating code. +Under the hood Angular CLI uses specific tools to achieve these tasks. +The user does no need to maintain them and can rely on Angular to keep them up to date and maybe switch to other tools which come up in the future.

+
+
+

The Angular CLI provides a wiki with common tasks you encounter when working on applications with the Angular CLI. +The Angular CLI Wiki can be found here.

+
+
+

In this guide we will go through the most important tasks. +To go into more details, please visit the Angular CLI wiki.

+
+
+
+
+

Installing Angular CLI

+
+
+

Angular CLI should be added as global and local dependency. +The following commands add Angular CLI as global Dependency.

+
+
+

yarn command

+
+
+
+
yarn global add @angular/cli
+
+
+
+

npm command

+
+
+
+
npm install -g @angular/cli
+
+
+
+

You can check a successful installtion with ng --version. +This should print out the version installed.

+
+
+
+Printing Angular CLI Version +
+
Figure 1. Printing Angular CLI Version
+
+
+
+
+

Running a live development server

+
+
+

The Angular CLI can be used to start a live development server. +First your application will be compiled and then the server will be started. +If you change the code of a file, the server will reload the displayed page. +Run your application with the following command:

+
+
+
+
ng serve -o
+
+
+
+
+
+

Running Unit Tests

+
+
+

All unit tests can be executed with the command:

+
+
+
+
ng test
+
+
+
+

To make a single run and create a code coverage file use the following command:

+
+
+
+
ng test -sr -cc
+
+
+
+ + + + + +
+ + +You can configure the output format for code coverage files to match your requirements in the file karma.conf.js which can be found on toplevel of your project folder. +For instance, this can be useful for exporting the results to a SonarQube. +
+
+
+
+
+

Linting the code quality

+
+
+

You can lint your files with the command

+
+
+
+
ng lint --type-check
+
+
+
+ + + + + +
+ + +You can adjust the linting rules in the file tslint.json which can be found on toplevel of your project folder. +
+
+
+
+
+

Generating Code

+
+ +
+
+
+

Creating a new Angular CLI project

+
+
+

For creating a new Angular CLI project the command ng new is used.

+
+
+

The following command creates a new application named my-app.

+
+
+
+
ng create my-app
+
+
+
+
+
+

Creating a new feature module

+
+
+

A new feature module can be created via ng generate module` command.

+
+
+

The following command generates a new feature module named todo.

+
+
+
+
ng generate module todo
+
+
+
+
+Generate a module with Angular CLI +
+
Figure 2. Generate a module with Angular CLI
+
+
+ + + + + +
+ + +The created feature module needs to be added to the AppModule by hand. +Other option would be to define a lazy route in AppRoutingModule to make this a lazy loaded module. +
+
+
+
+
+

Creating a new component

+
+
+

To create components the command ng generate component can be used.

+
+
+

The following command will generate the component todo-details inside the components layer of todo module. +It will generate a class, a html file, a css file and a test file. +Also, it will register this component as declaration inside the nearest module - this ist TodoModule.

+
+
+
+
ng generate component todo/components/todo-details
+
+
+
+
+Generate a component with Angular CLI +
+
Figure 3. Generate a component with Angular CLI
+
+
+ + + + + +
+ + +If you want to export the component, you have to add the component to exports array of the module. +This would be the case if you generate a component inside shared module. +
+
+
+
+
+

Configuring an Angular CLI project

+
+
+

Inside an Angular CLI project the file .angular-cli.json can be used to configure the Angular CLI.

+
+
+

The following options are very important to understand.

+
+
+
    +
  • +

    The property defaults` can be used to change the default style extension. +The following settings will make the Angular CLI generate .less files, when a new component is generated.

    +
  • +
+
+
+
+
"defaults": {
+  "styleExt": "less",
+  "component": {}
+}
+
+
+
+
    +
  • +

    The property apps contains all applications maintained with Angular CLI. +Most of the time you will have only one.

    +
    +
      +
    • +

      assets configures all the static files, that the application needs - this can be images, fonts, json files, etc. +When you add them to assets the Angular CLI will put these files to the build target and serve them while debugging. +The following will put all files in /i18n to the output folder /i18n

      +
    • +
    +
    +
  • +
+
+
+
+
"assets": [
+  { "glob": "**/*.json", "input": "./i18n", "output": "./i18n" }
+]
+
+
+
+
    +
  • +

    styles property contains all style files that will be globally available. +The Angular CLI will create a styles bundle that goes directly into index.html with it. +The following will make all styles in styles.less globally available.

    +
  • +
+
+
+
+
"styles": [
+  "styles.less"
+]
+
+
+
+
    +
  • +

    environmentSource and environments are used to configure configuration with the Angular CLI. +Inside the code always the file specified in environmentSource will be referenced. +You can define different environments - eg. production, staging, etc. - which you list in enviroments. +At compile time the Angular CLI will override all values in environmentSource with the values from the matching environment target. +The following code will build the application for the environment staging.

    +
  • +
+
+
+
+
ng build --environment=staging
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/guide-yarn-2-support.html b/docs/devon4ts/1.0/angular/guide-yarn-2-support.html new file mode 100644 index 00000000..b09c70ed --- /dev/null +++ b/docs/devon4ts/1.0/angular/guide-yarn-2-support.html @@ -0,0 +1,427 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Yarn 2

+
+
+

Yarn v2 is a very different software from the v1. The following list contains the main new features:

+
+ +
+

Please, read them carefully to decide if your current project is suitable to use Yarn 2 as package manager.

+
+
+ + + + + +
+ + +Some features are still experimental, so please do not use them in production environments. +
+
+
+

More info at https://yarnpkg.com/

+
+
+
+
+

Global Install

+
+
+

Installing Yarn 2.x globally is discouraged as Yarn team is moving to a per-project install strategy. We advise you to keep Yarn 1.x (Classic) as your global binary by installing it via the instructions you can find here.

+
+
+

Once you’ve followed the instructions (running yarn --version from your home directory should yield something like 1.22.0), go to the next section to see how to enable Yarn 2 on your project.

+
+
+
+
+

Per-project install

+
+
+

Follow these instructions to update your current devon4ng project to Yarn 2:

+
+
+
    +
  1. +

    Follow the global install instructions.

    +
  2. +
  3. +

    Move into your project folder:

    +
    +
    +
    cd ~/path/to/project
    +
    +
    +
  4. +
  5. +

    Run the following command:

    +
    +
    +
    yarn policies set-version berry # below v1.22
    +yarn set version berry          # on v1.22+
    +
    +
    +
  6. +
  7. +

    Since Angular CLI still is not fully supported with the new Yarn architecture as it is not compatible with PnP it is necessary to include the node-modules plugin adding the following line in the .yarnrc.yml file:

    +
    +
    +
    nodeLinker: node-modules
    +
    +
    +
  8. +
  9. +

    Commit the .yarn and .yarnrc.yml changes

    +
  10. +
  11. +

    Run again yarn install.

    +
  12. +
+
+
+ + + + + +
+ + +For more advanced migration topics please refer to https://yarnpkg.com/advanced/migration +
+
+
+
+
+

Which files should be added to gitignore file?

+
+
+

If you’re using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/cache
+!.yarn/releases
+!.yarn/plugins
+
+
+
+

If you’re not using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/releases
+!.yarn/plugins
+.pnp.*
+
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/home.html b/docs/devon4ts/1.0/angular/home.html new file mode 100644 index 00000000..45963f2e --- /dev/null +++ b/docs/devon4ts/1.0/angular/home.html @@ -0,0 +1,550 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4ng

+
+
+

This guide describes an application architecture for web client development with Angular.

+
+
+
+
+

Motivation

+
+
+

Front-end development is a very difficult task since there are a lot of different frameworks, patterns and practices nowadays. For that reason, in devonfw we decided to make use of Angular since it is a full front-end framework that includes almost all the different patterns and features that any SPA may need and provides a well defined architecture to development, build and deploy.

+
+
+

The idea with devon4ng is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends like reactive style development, on the other hand, providing a short on-boarding time while still using an architecture that helps us scale and be productive at the same time.

+
+
+

At the same time devon4ng aims to help developers to solve common problems that appear in many projects and provide samples and blueprints to show how to apply this solutions in real situations.

+
+
+
+
+

Contents

+
+ +
+

This section introduces in an easy way the main principles and guidelines based on Angular Style Guide.

+
+ +
+

The goal of this topic is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview.

+
+
+
+
+

Layers

+
+
+

This section provides a condensed explanation about the different layers a good Angular application must provide.

+
+ +
+
+
+

Guides

+
+
+

This section introduces concepts to help developers with the tooling and package managers.

+
+ +
+
+
+

Angular

+
+
+

This is the main section of the documentation, where the developer will find guidelines for accessibility, how to use the Angular toolchain, how to refactor components, create libraries and, in general, maintain Angular applications. But last and not least, developers will also find solutions to common problems many of the Angular projects may have.

+
+
+ + + + + +
+ + +All the different topics are demonstrated in the samples folder with a small application. +
+
+ +
+
+
+

Ionic

+
+
+

As part of the devon4ng stack, we include a small section to explain how to develop hybrid mobile Ionic/Angular applications and create PWAs with this UI library. As the previous section, the contents are demonstrated in the samples folder.

+
+ +
+
+
+

Layouts

+
+
+

Any SPA application must have a layout. So, the purpose of this section is to explain the Angular Material approach.

+
+ +
+
+
+

NgRx

+
+
+

State Management is a big topic in big front-end application. This section explains the fundamentals of the industry standard library NgRx, showing its main components.

+
+ +
+
+
+

Cookbook

+
+
+

The Cookbook section aims to provide solutions to cross-topic challenges that at this moment do not fit in the previous sections. As the Angular section, some of the topics are demonstrated with a sample located in the samples folder.

+
+ +
+
+
+

devon4ng templates

+
+
+

In order to support CobiGen generation tool for Angular applications, devon4ng demos realization and provide more opinionated samples, the following templates are also included in devon4ng contents:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/master-devon4ng.html b/docs/devon4ts/1.0/angular/master-devon4ng.html new file mode 100644 index 00000000..e39f61c7 --- /dev/null +++ b/docs/devon4ts/1.0/angular/master-devon4ng.html @@ -0,0 +1,11551 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Angular

+
+ +
+
+
+

Introduction

+
+ +
+

devon4ng

+
+

This guide describes an application architecture for web client development with Angular.

+
+
+
+

Motivation

+
+

Front-end development is a very difficult task since there are a lot of different frameworks, patterns and practices nowadays. For that reason, in devonfw we decided to make use of Angular since it is a full front-end framework that includes almost all the different patterns and features that any SPA may need and provides a well defined architecture to development, build and deploy.

+
+
+

The idea with devon4ng is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends like reactive style development, on the other hand, providing a short on-boarding time while still using an architecture that helps us scale and be productive at the same time.

+
+
+

At the same time devon4ng aims to help developers to solve common problems that appear in many projects and provide samples and blueprints to show how to apply this solutions in real situations.

+
+
+
+

Contents

+ +
+

This section introduces in an easy way the main principles and guidelines based on Angular Style Guide.

+
+ +
+

The goal of this topic is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview.

+
+
+
+

Layers

+
+

This section provides a condensed explanation about the different layers a good Angular application must provide.

+
+ +
+
+

Guides

+
+

This section introduces concepts to help developers with the tooling and package managers.

+
+ +
+
+

Angular

+
+

This is the main section of the documentation, where the developer will find guidelines for accessibility, how to use the Angular toolchain, how to refactor components, create libraries and, in general, maintain Angular applications. But last and not least, developers will also find solutions to common problems many of the Angular projects may have.

+
+
+ + + + + +
+ + +All the different topics are demonstrated in the samples folder with a small application. +
+
+ +
+
+

Ionic

+
+

As part of the devon4ng stack, we include a small section to explain how to develop hybrid mobile Ionic/Angular applications and create PWAs with this UI library. As the previous section, the contents are demonstrated in the samples folder.

+
+ +
+
+

Layouts

+
+

Any SPA application must have a layout. So, the purpose of this section is to explain the Angular Material approach.

+
+ +
+
+

NgRx

+
+

State Management is a big topic in big front-end application. This section explains the fundamentals of the industry standard library NgRx, showing its main components.

+
+ +
+
+

Cookbook

+
+

The Cookbook section aims to provide solutions to cross-topic challenges that at this moment do not fit in the previous sections. As the Angular section, some of the topics are demonstrated with a sample located in the samples folder.

+
+ +
+
+

devon4ng templates

+
+

In order to support CobiGen generation tool for Angular applications, devon4ng demos realization and provide more opinionated samples, the following templates are also included in devon4ng contents:

+
+
+ +
+
+
+
+
+

Architecture

+
+ +
+

Architecture

+
+

The following principles and guidelines are based on Angular Style Guide - especially Angular modules (see Angular Docs). +It extends those where additional guidance is needed to define an architecture which is:

+
+
+
    +
  • +

    maintainable across applications and teams

    +
  • +
  • +

    easy to understand, especially when coming from a classic Java/.Net perspective - so whenever possible the same principles apply both to the server and the client

    +
  • +
  • +

    pattern based to solve common problems

    +
  • +
  • +

    based on best of breed solutions coming from open source and Capgemini project experiences

    +
  • +
  • +

    gives as much guidance as necessary and as little as possible

    +
  • +
+
+
+
+

Overview

+
+

When using Angular the web client architecture is driven by the framework in a certain way Google and the Angular community think about web client architecture. +Angular gives an opinion on how to look at architecture. +It is a component based like devon4j but uses different terms which are common language in web application development. +The important term is module which is used instead of component. The primary reason is the naming collision with the Web Components standard (see Web Components).
+To clarify this:

+
+
+
    +
  • +

    A component describes an UI element containing HTML, CSS and JavaScript - structure, design and logic encapsulated inside a reusable container called component.

    +
  • +
  • +

    A module describes an applications feature area. The application flight-app may have a module called booking.

    +
  • +
+
+
+

An application developed using Angular consists of multiple modules. +There are feature modules and special modules described by the Angular Style Guide - core and shared. +Angular or Angular Style Guide give no guidance on how to structure a module internally. +This is where this architecture comes in.

+
+
+
+

Layers

+
+

The architecture describes two layers. The terminology is based on common language in web development.

+
+
+
+Architecture - Layers +
+
Figure 1. Layers
+
+
+
    +
  • +

    Components Layer encapsulates components which present the current application state. +Components are separated into Smart and Dumb Components. +The only logic present is view logic inside Smart Components.

    +
  • +
  • +

    Services Layer is more or less what we call 'business logic layer' on the server side. +The layer defines the applications state, the transitions between state and classic business logic. +Stores contain application state over time to which Smart Components subscribe to. +Adapters are used to perform XHR, WebSocket connections, etc. +The business model is described inside the module. +Use case services perform business logic needed for use cases. +A use case services interacts with the store and adapters. +Methods of use case services are the API for Smart Components. +Those methods are Actions in reactive terminology.

    +
  • +
+
+
+
+

Modules

+
+

Angular requires a module called app which is the main entrance to an application at runtime - this module gets bootstrapped. +Angular Style Guide defines feature modules and two special modules - core and shared.

+
+
+
+Architecture - Modules +
+
Figure 2. Modules
+
+
+

A feature module is basically a vertical cut through both layers. +The shared module consists of components shared across feature modules. +The core module holds services shared across modules. +So core module is a module only having a services layer +and shared module is a module only having a components layer.

+
+ +
+
+

Meta Architecture

+ +
+
+

Introduction

+ +
+
+

Purpose of this document

+
+

In our business applications, the client easily gets underestimated. Sometimes the client is more complex to develop and design than the server. While the server architecture is nowadays easily to agree as common sense, for clients this is not as obvious and stable especially as it typically depends on the client framework used. Finding a concrete architecture applicable for all clients may therefore be difficult to accomplish.

+
+
+

This document tries to define on a high abstract level, a reference architecture which is supposed to be a mental image and frame for orientation regarding the evaluation and appliance of different client frameworks. As such it defines terms and concepts required to be provided for in any framework and thus gives a common ground of understanding for those acquainted with the reference architecture. This allows better comparison between the various frameworks out there, each having their own terms for essentially the same concepts. It also means that for each framework we need to explicitly map how it implements the concepts defined in this document.

+
+
+

The architecture proposed herein is neither new nor was it developed from scratch. Instead it is the gathered and consolidated knowledge and best practices of various projects (s. References).

+
+
+
+

Goal of the Client Architecture

+
+

The goal of the client architecture is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview. Furthermore it ensures a homogeneity regarding how different concrete UI technologies are being applied in the projects, solving the common requirements in the same way.

+
+
+
+

Architecture Views

+
+

As for the server we distinguish between the business and the technical architecture. Where the business architecture is different from project to project and relates to the concrete design of dialog components given concrete requirements, the technical architecture can be applied to multiple projects.

+
+
+

The focus of this document is to provide a technical reference architecture on the client on a very abstract level defining required layers and components. How the architecture is implemented has to be defined for each UI technology.

+
+
+

The technical infrastructure architecture is out of scope for this document and although it needs to be considered, the concepts of the reference architecture should work across multiple TI architecture, i.e. native or web clients.

+
+
+
+

devonfw Reference Client Architecture

+
+

The following gives a complete overview of the proposed reference architecture. It will be built up incrementally in the following sections.

+
+
+
+Complete Client Architecture Overview +
+
+
+

Figure 1 Overview

+
+
+
+

Client Architecture

+
+

On the highest level of abstraction we see the need to differentiate between dialog components and their container they are managed in, as well as the access to the application server being the back-end for the client (e.g. an devon4j instance). This section gives a summary of these components and how they relate to each other. Detailed architectures for each component will be supplied in subsequent sections

+
+
+
+Client Architecture Overview +
+
+
+

Figure 2 Overview of Client Architecture

+
+
+
+

== Dialog Component

+
+

A dialog component is a logical, self-contained part of the user interface. It accepts user input and actions and controls communication with the user. Dialog components use the services provided by the dialog container in order to execute the business logic. They are self-contained, i.e. they possess their own user interface together with the associated logic, data and states.

+
+
+
    +
  • +

    Dialog components can be composed of other dialog components forming a hierarchy

    +
  • +
  • +

    Dialog components can interact with each other. This includes communication of a parent to its children, but also between components independent of each other regarding the hierarchy.

    +
  • +
+
+
+
+

== Dialog Container

+
+

Dialog components need to be managed in their life-cycle and how they can be coupled to each other. The dialog container is responsible for this along with the following:

+
+
+
    +
  • +

    Bootstrapping the client application and environment

    +
    +
      +
    • +

      Configuration of the client

      +
    • +
    • +

      Initialization of the application server access component

      +
    • +
    +
    +
  • +
  • +

    Dialog Component Management

    +
    +
      +
    • +

      Controlling the life-cycle

      +
    • +
    • +

      Controlling the dialog flow

      +
    • +
    • +

      Providing means of interaction between the dialogs

      +
    • +
    • +

      Providing application server access

      +
    • +
    • +

      Providing services to the dialog components
      +(e.g. printing, caching, data storage)

      +
    • +
    +
    +
  • +
  • +

    Shutdown of the application

    +
  • +
+
+
+
+

== Application Server Access

+
+

Dialogs will require a back-end application server in order to execute their business logic. Typically in an devonfw application the service layer will provide interfaces for the functionality exposed to the client. These business oriented interfaces should also be present on the client backed by a proxy handling the concrete call of the server over the network. This component provides the set of interfaces as well as the proxy.

+
+
+
+

Dialog Container Architecture

+
+

The dialog container can be further structured into the following components with their respective tasks described in own sections:

+
+
+
+Dialog Container Architecture Overview +
+
+
+

Figure 3 Dialog Container Architecture

+
+
+
+

== Application

+
+

The application component represents the overall client in our architecture. It is responsible for bootstrapping all other components and connecting them with each other. As such it initializes the components below and provides an environment for them to work in.

+
+
+
+

== Configuration Management

+
+

The configuration management manages the configuration of the client, so the client can be deployed in different environments. This includes configuration of the concrete application server to be called or any other environment-specific property.

+
+
+
+

== Dialog Management

+
+

The Dialog Management component provides the means to define, create and destroy dialog components. It therefore offers basic life-cycle capabilities for a component. In addition it also allows composition of dialog components in a hierarchy. The life-cycle is then managed along the hierarchy, meaning when creating/destroying a parent dialog, this affects all child components, which are created/destroyed as well.

+
+
+
+

== Service Registry

+
+

Apart from dialog components, a client application also consists of services offered to these. A service can thereby encompass among others:

+
+
+
    +
  • +

    Access to the application server

    +
  • +
  • +

    Access to the dialog container functions for managing dialogs or accessing the configuration

    +
  • +
  • +

    Dialog independent client functionality such as Printing, Caching, Logging, Encapsulated business logic such as tax calculation

    +
  • +
  • +

    Dialog component interaction

    +
  • +
+
+
+

The service registry offers the possibility to define, register and lookup these services. Note that these services could be dependent on the dialog hierarchy, meaning different child instances could obtain different instances / implementations of a service via the service registry, depending on which service implementations are registered by the parents.

+
+
+

Services should be defined as interfaces allowing for different implementations and thus loose coupling.

+
+
+
+

Dialog Component Architecture

+
+

A dialog component has to support all or a subset of the following tasks:
+(T1) Displaying the user interface incl. internationalization
+(T2) Displaying business data incl. changes made to the data due to user interactions and localization of the data
+(T3) Accepting user input including possible conversion from e.g. entered Text to an Integer
+(T4) Displaying the dialog state
+(T5) Validation of user input
+(T6) Managing the business data incl. business logic altering it due to user interactions
+(T7) Execution of user interactions
+(T8) Managing the state of the dialog (e.g. Edit vs. View)
+(T9) Calling the application server in the course of user interactions

+
+
+

Following the principle of separation of concerns, we further structure a dialog component in an own architecture allowing us the distribute responsibility for these tasks along the defined components:

+
+
+
+Dialog Component Architecture +
+
+
+

Figure 4 Overview of dialog component architecture

+
+
+
+

== Presentation Layer

+
+

The presentation layer generates and displays the user interface, accepts user input and user actions and binds these to the dialog core layer (T1-5). The tasks of the presentation layer fall into two categories:

+
+
+
    +
  • +

    Provision of the visual representation (View component)
    +The presentation layer generates and displays the user interface and accepts user input and user actions. The logical processing of the data, actions and states is performed in the dialog core layer. The data and user interface are displayed in localized and internationalized form.

    +
  • +
  • +

    Binding of the visual representation to the dialog core layer
    +The presentation layer itself does not contain any dialog logic. The data or actions entered by the user are then processed in the dialog core layer. There are three aspects to the binding to the dialog core layer. We refer to “data binding”, “state binding” and “action binding”. Syntactical and (to a certain extent) semantic validations are performed during data binding (e.g. cross-field plausibility checks). Furthermore, the formatted, localized data in the presentation layer is converted into the presentation-independent, neutral data in the dialog core layer (parsing) and vice versa (formatting).

    +
  • +
+
+
+
+

== Dialog Core Layer

+
+

The dialog core layer contains the business logic, the control logic, and the logical state of the dialog. It therefore covers tasks T5-9:

+
+
+
    +
  • +

    Maintenance of the logical dialog state and the logical data
    +The dialog core layer maintains the logical dialog state and the logical data in a form which is independent of the presentation. The states of the presentation (e.g. individual widgets) must not be maintained in the dialog core layer, e.g. the view state could lead to multiple presentation states disabling all editable widgets on the view.

    +
  • +
  • +

    Implementation of the dialog and dialog control logic
    +The component parts in the dialog core layer implement the client specific business logic and the dialog control logic. This includes, for example, the manipulation of dialog data and dialog states as well as the opening and closing of dialogs.

    +
  • +
  • +

    Communication with the application server
    +The dialog core layer calls the interfaces of the application server via the application server access component services.

    +
  • +
+
+
+

The dialog core layer should not depend on the presentation layer enforcing a strict layering and thus minimizing dependencies.

+
+
+
+

== Interactions between dialog components

+
+

Dialog components can interact in the following ways:

+
+
+
+Dialog Interactions +
+
+
+
    +
  • +

    Embedding of dialog components
    +As already said dialog components can be hierarchically composed. This composition works by embedding on dialog component within the other. Apart from the life-cycle managed by the dialog container, the embedding needs to cope for the visual embedding of the presentation and core layer.

    +
    +
      +
    • +

      Embedding dialog presentation
      +The parent dialog needs to either integrate the embedded dialog in its layout or open it in an own model window.

      +
    • +
    • +

      Embedding dialog core
      +The parent dialog needs to be able to access the embedded instance of its children. This allows initializing and changing their data and states. On the other hand the children might require context information offered by the parent dialog by registering services in the hierarchical service registry.

      +
    • +
    +
    +
  • +
  • +

    Dialog flow
    +Apart from the embedding of dialog components representing a tight coupling, dialogs can interact with each other by passing the control of the UI, i.e. switching from one dialog to another.

    +
  • +
+
+
+

When interacting, dialog components should interact only between the same or lower layers, i.e. the dialog core should not access the presentation layer of another dialog component.

+
+
+
+

Appendix

+ +
+
+

Notes about Quasar Client

+
+

The Quasar client architecture as the consolidated knowledge of our CSD projects is the major source for the above drafted architecture. However, the above is a much simplified and more agile version thereof:

+
+
+
    +
  • +

    Quasar Client tried to abstract from the concrete UI library being used, so it could decouple the business from the technical logic of a dialog. The presentation layer should be the only one knowing the concrete UI framework used. This level of abstraction was dropped in this reference architecture, although it might of course still make sense in some projects. For fast-moving agile projects in the web however introducing such a level of abstraction takes effort with little gained benefits. With frameworks like Angular 2 we would even introduce one additional seemingly artificial and redundant layer, since it already separates the dialog core from its presentation.

    +
  • +
  • +

    In the past and in the days of Struts, JSF, etc. the concept of session handling was important for the client since part of the client was sitting on a server with a session relating it to its remote counterpart on the users PC. Quasar Client catered for this need, by very prominently differentiating between session and application in the root of the dialog component hierarchy. However, in the current days of SPA applications and the lowered importance of servers-side web clients, this prominent differentiation was dropped. When still needed the referenced documents will provide in more detail how to tailor the respective architecture to this end.

    +
  • +
+
+
+ +
+
+
+

Layers

+
+ +
+

Components Layer

+
+

The components layer encapsulates all components presenting the current application view state, which means data to be shown to the user. +The term component refers to a component described by the standard Web Components. +So this layer has all Angular components, directives and pipes defined for an application. +The main challenges are:

+
+
+
    +
  • +

    how to structure the components layer (see File Structure Guide)

    +
  • +
  • +

    decompose components into maintainable chunks (see Component Decomposition Guide)

    +
  • +
  • +

    handle component interaction

    +
  • +
  • +

    manage calls to the services layer

    +
  • +
  • +

    apply a maintainable data and event flow throughout the component tree

    +
  • +
+
+
+
+

Smart and Dumb Components

+
+

The architecture applies the concept of Smart and Dumb Components (syn. Containers and Presenters). +The concept means that components are divided into Smart and Dumb Components.

+
+
+

A Smart Component typically is a top-level dialog inside the component tree.

+
+
+
    +
  • +

    a component, that can be routed to

    +
  • +
  • +

    a modal dialog

    +
  • +
  • +

    a component, which is placed inside AppComponent

    +
  • +
+
+
+

A Dumb Component can be used by one to many Smart Components. +Inside the component tree a Dumb Component is a child of a Smart Component.

+
+
+
+Component Tree +
+
Figure 3. Component tree example
+
+
+

As shown the topmost component is always the AppComponent in Angular applications. +The component tree describes the hierarchy of components starting from AppComponent. +The figure shows Smart Components in blue and Dumb Components in green. +AppComponent is a Smart Component by definition. +Inside the template of AppComponent placed components are static components inside the component tree. +So they are always displayed. +In the example OverviewComponent and DetailsComponent are rendered by Angular compiler depending on current URL the application displays. +So OverviewComponents sub-tree is displayed if the URL is /overview and DetailsComponents sub-tree is displayed if the URL is /details. +To clarify this distinction further the following table shows the main differences.

+
+
+
Smart vs Dumb Components
+

|== = +|Smart Components |Dumb Components

+
+
+

|contain the current view state +|show data via binding (@Input) and contain no view state

+
+
+

|handle events emitted by Dumb Components +|pass events up the component tree to be handled by Smart Components (@Output)

+
+
+

|call the services layer +|never call the services layer

+
+
+

|use services +|do not use services

+
+
+

|consists of n Dumb Components +|is independent of Smart Components +|== =

+
+
+
+

Interaction of Smart and Dumb Components

+
+

With the usage of the Smart and Dumb Components pattern one of the most important part is component interaction. +Angular comes with built in support for component interaction with @Input() and @Output() Decorators. +The following figure illustrates an unidirectional data flow.

+
+
+
    +
  • +

    Data always goes down the component tree - from a Smart Component down its children.

    +
  • +
  • +

    Events bubble up, to be handled by a Smart Component.

    +
  • +
+
+
+
+Smart and Dumb Components Interaction +
+
Figure 4. Smart and Dumb Component Interaction
+
+
+

As shown a Dumb Components role is to define a signature by declaring Input and Output Bindings.

+
+
+
    +
  • +

    @Input() defines what data is necessary for that component to work

    +
  • +
  • +

    @Output() defines which events can be listened on by the parent component

    +
  • +
+
+
+
Listing 1. Dumb Components define a signature
+
+
export class ValuePickerComponent {
+
+  @Input() columns: string[];
+  @Input() items: {}[];
+  @Input() selected: {};
+  @Input() filter: string;
+  @Input() isChunked = false;
+  @Input() showInput = true;
+  @Input() showDropdownHeader = true;
+
+  @Output() elementSelected = new EventEmitter<{}>();
+  @Output() filterChanged = new EventEmitter<string>();
+  @Output() loadNextChunk = new EventEmitter();
+  @Output() escapeKeyPressed = new EventEmitter();
+
+}
+
+
+
+

The example shows the Dumb Component ValuePickerComponent. +It describes seven input bindings with isChunked, showHeader and showDropdownHeader being non mandatory as they have a default value. +Four output bindings are present. Typically, a Dumb Component has very little code to no code inside the TypeScript class.

+
+
+
Listing 2. Smart Components use the Dumb Components signature inside the template
+
+
<div>
+
+  <value-input
+    ...>
+  </value-input>
+
+  <value-picker
+    *ngIf="isValuePickerOpen"
+    [columns]="columns"
+    [items]="filteredItems"
+    [isChunked]="isChunked"
+    [filter]="filter"
+    [selected]="selectedItem"
+    [showDropdownHeader]="showDropdownHeader"
+    (loadNextChunk)="onLoadNextChunk()"
+    (elementSelected)="onElementSelected($event)"
+    (filterChanged)="onFilterChanged($event)"
+    (escapeKeyPressed)="onEscapePressedInsideChildTable()">
+  </value-picker>
+
+</div>
+
+
+
+

Inside the Smart Components template the events emitted by Dumb Components are handled. +It is a good practice to name the handlers with the prefix on* (e.g. onInputChanged()).

+
+ +
+
+

Services Layer

+
+

The services layer is more or less what we call 'business logic layer' on the server side. +It is the layer where the business logic is placed. +The main challenges are:

+
+
+
    +
  • +

    Define application state and an API for the components layer to use it

    +
  • +
  • +

    Handle application state transitions

    +
  • +
  • +

    Perform back-end interaction (XHR, WebSocket, etc.)

    +
  • +
  • +

    Handle business logic in a maintainable way

    +
  • +
  • +

    Configuration management

    +
  • +
+
+
+

All parts of the services layer are described in this chapter. +An example which puts the concepts together can be found at the end Interaction of Smart Components through the services layer.

+
+
+
+

Boundaries

+
+

There are two APIs for the components layer to interact with the services layer:

+
+
+
    +
  • +

    A store can be subscribed to for receiving state updates over time

    +
  • +
  • +

    A use case service can be called to trigger an action

    +
  • +
+
+
+

To illustrate the fact the following figure shows an abstract overview.

+
+
+
+Smart and Dumb Components Interaction +
+
Figure 5. Boundaries to components layer
+
+
+
+

Store

+
+

A store is a class which defines and handles application state with its transitions over time. +Interaction with a store is always synchronous. +A basic implementation using RxJS can look like this.

+
+
+ + + + + +
+ + +A more profound implementation taken from a real-life project can be found here (Abstract Class Store). +
+
+
+
Listing 3. Store defined using RxJS
+
+
@Injectable()
+export class ProductSearchStore {
+
+  private stateSource = new BehaviorSubject<ProductSearchState>(defaultProductSearchState);
+  state$ = this.stateSource.asObservable();
+
+  setLoading(isLoading: boolean) {
+    const currentState = this.stateSource.getValue();
+    this.stateSource.next({
+      isLoading: isLoading,
+      products: currentState.products,
+      searchCriteria: currentState.searchCriteria
+    });
+  }
+
+}
+
+
+
+

In the example ProductSearchStore handles state of type ProductSearchState. +The public API is the property state$ which is an observable of type ProductSearchState. +The state can be changed with method calls. +So every desired change to the state needs to be modeled with an method. +In reactive terminology this would be an Action. +The store does not use any services. +Subscribing to the state$ observable leads to the subscribers receiving every new state.

+
+
+

This is basically the Observer Pattern:
+The store consumer registers itself to the observable via state$.subscribe() method call. +The first parameter of subscribe() is a callback function to be called when the subject changes. +This way the consumer - the observer - is registered. +When next() is called with a new state inside the store, all callback functions are called with the new value. +So every observer is notified of the state change. +This equals the Observer Pattern push type.

+
+
+

A store is the API for Smart Components to receive state from the service layer. +State transitions are handled automatically with Smart Components registering to the state$ observable.

+
+
+
+

Use Case Service

+
+

A use case service is a service which has methods to perform asynchronous state transitions. +In reactive terminology this would be an Action of Actions - a thunk (redux) or an effect (@ngrx).

+
+
+
+Use Case Service +
+
Figure 6. Use case services are the main API to trigger state transitions
+
+
+

A use case services method - an action - interacts with adapters, business services and stores. +So use case services orchestrate whole use cases. +For an example see use case service example.

+
+
+
+

Adapter

+
+

An adapter is used to communicate with the back-end. +This could be a simple XHR request, a WebSocket connection, etc. +An adapter is simple in the way that it does not add anything other than the pure network call. +So there is no caching or logging performed here. +The following listing shows an example.

+
+
+

For further information on back-end interaction see Consuming REST Services

+
+
+
Listing 4. Calling the back-end via an adapter
+
+
@Injectable()
+export class ProducsAdapter {
+
+  private baseUrl = environment.baseUrl;
+
+  constructor(private http: HttpClient) { }
+
+  getAll(): Observable<Product[]> {
+    return this.http.get<Product[]>(this.baseUrl + '/products');
+  }
+
+}
+
+
+
+
+

Interaction of Smart Components through the services layer

+
+

The interaction of smart components is a classic problem which has to be solved in every UI technology. +It is basically how one dialog tells the other something has changed.

+
+
+

An example is adding an item to the shopping basket. +With this action there need to be multiple state updates.

+
+
+
    +
  • +

    The small logo showing how many items are currently inside the basket needs to be updated from 0 to 1

    +
  • +
  • +

    The price needs to be recalculated

    +
  • +
  • +

    Shipping costs need to be checked

    +
  • +
  • +

    Discounts need to be updated

    +
  • +
  • +

    Ads need to be updated with related products

    +
  • +
  • +

    etc.

    +
  • +
+
+
+
+

Pattern

+
+

To handle this interaction in a scalable way we apply the following pattern.

+
+
+
+Interaction of Smart Components via services layer +
+
Figure 7. Smart Component interaction
+
+
+

The state of interest is encapsulated inside a store. All Smart Components interested in the state have to subscribe to the store’s API served by the public observable. Thus, with every update to the store the subscribed components receive the new value. The components basically react to state changes. Altering a store can be done directly if the desired change is synchronous. Most actions are of asynchronous nature so the UseCaseService comes into play. Its actions are void methods, which implement a use case, i.e., adding a new item to the basket. It calls asynchronous actions and can perform multiple store updates over time.

+
+
+

To put this pattern into perspective the UseCaseService is a programmatic alternative to redux-thunk or @ngrx/effects. The main motivation here is to use the full power of TypeScript --strictNullChecks and to let the learning curve not to become as steep as it would be when learning a new state management framework. This way actions are just void method calls.

+
+
+
+

Example

+
+
+Smart component interaction example +
+
Figure 8. Smart Components interaction example
+
+
+

The example shows two Smart Components sharing the FlightSearchState by using the FlightSearchStore. +The use case shown is started by an event in the Smart Component FlightSearchComponent. The action loadFlight() is called. This could be submitting a search form. +The UseCaseService is FlightSearchService, which handles the use case Load Flights.

+
+
+
UseCaseService example
+

+
+
+
+
export class FlightSearchService {
+
+  constructor(
+    private flightSearchAdapter: FlightSearchAdapter,
+    private store: FlightSearchStore
+  ) { }
+
+  loadFlights(criteria: FlightSearchCriteria): void {
+    this.store.setLoadingFlights(true);
+    this.store.clearFlights();
+
+    this.flightSearchAdapter.getFlights(criteria.departureDate,
+        {
+          from: criteria.departureAirport,
+          to: criteria.destinationAirport
+        })
+      .finally(() => this.store.setLoadingFlights(false))
+      .subscribe((result: FlightTo[]) => this.store.setFlights(result, criteria));
+  }
+
+}
+
+
+
+

First the loading flag is set to true and the current flights are cleared. This leads the Smart Component showing a spinner indicating the loading action. Then the asynchronous XHR is triggered by calling the adapter. After completion the loading flag is set to false causing the loading indication no longer to be shown. If the XHR was successful, the data would be put into the store. If the XHR was not successful, this would be the place to handle a custom error. All general network issues should be handled in a dedicated class, i.e., an interceptor. So for example the basic handling of 404 errors is not done here.

+
+
+
+
+
+

Guides

+
+ +
+

Package Managers

+
+

There are two major package managers currently used for JavaScript / TypeScript projects which leverage NodeJS as a build platform.

+
+
+
    +
  1. +

    npm

    +
  2. +
  3. +

    yarn

    +
  4. +
+
+
+

Our recommendation is to use yarn but both package managers are fine.

+
+
+ + + + + +
+ + +When using npm it is important to use a version greater 5.0 as npm 3 has major drawbacks compared to yarn. +The following guide assumes that you are using npm >= 5 or yarn. +
+
+
+

Before you start reading further, please take a look at the docs:

+
+ +
+

The following guide will describe best practices for working with yarn / npm.

+
+
+
+

Semantic Versioning

+
+

When working with package managers it is very important to understand the concept of semantic versioning.

+
+
+
Version example 1.2.3
+

|== == == = +|Version |1. |2. |3 +|Version name when incrementing |Major (2.0.0) |Minor (1.3.0) |Patch (1.2.4) +|Has breaking changes |yes |no |no +|Has features |yes |yes |no +|Has bug fixes |yes |yes |yes +|== == == =

+
+
+

The table gives an overview of the most important parts of semantic versioning. +In the header version 1.2.3 is displayed. +The first row shows the name and the resulting version when incrementing a part of the version. +The next rows show specifics of the resulting version - e.g. a major version can have breaking changes, features and bug fixes.

+
+
+

Packages from npm and yarn leverage semantic versioning and instead of selecting a fixed version one can specify a selector. +The most common selectors are:

+
+
+
    +
  • +

    ^1.2.3 +At least 1.2.3 - 1.2.4 or 1.3.0 can be used, 2.0.0 can not be used

    +
  • +
  • +

    ~1.2.3 +At lease 1.2.3 - 1.2.4 can be used, 2.0.0 and 1.3.0 can not be used

    +
  • +
  • +

    >=1.2.3 +At least 1.2.3 - every version greater can also be used

    +
  • +
+
+
+

This achieves a lower number of duplicates. +To give an example:

+
+
+

If package A needs version 1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 4 packages.

+
+
+

If package A needs version ^1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 3 packages. +A would use the same version of C as B - 1.4.0.

+
+
+
+

Do not modify package.json and lock files by hand

+
+

Dependencies are always added using a yarn or npm command. +Altering the package.json, package-json.lock or yarn.lock file by hand is not recommended.

+
+
+

Always use a yarn or npm command to add a new dependency.

+
+
+

Adding the package express with yarn to dependencies.

+
+
+
+
yarn add express
+
+
+
+

Adding the package express with npm to dependencies.

+
+
+
+
npm install express
+
+
+
+
+

What does the lock file do

+
+

The purpose of files yarn.lock and package-json.lock is to freeze versions for a short time.

+
+
+

The following problem is solved:

+
+
+
    +
  • +

    Developer A upgrades the dependency express to fixed version 4.16.3.

    +
  • +
  • +

    express has sub-dependency accepts with version selector ~1.3.5

    +
  • +
  • +

    His local node_modules folder receives accepts in version 1.3.5

    +
  • +
  • +

    On his machine everything is working fine

    +
  • +
  • +

    Afterward version 1.3.6 of accepts is published - it contains a major bug

    +
  • +
  • +

    Developer B now clones the repo and loads the dependencies.

    +
  • +
  • +

    He receives version 1.3.6 of accepts and blames developer A for upgrading to a broken version.

    +
  • +
+
+
+

Both yarn.lock and package-json.lock freeze all the dependencies. +For example in yarn lock you will find.

+
+
+
Listing 5. yarn.lock example (excerp)
+
+
accepts@~1.3.5:
+  version "1.3.5"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-types "~2.1.18"
+    negotiator "0.6.1"
+
+mime-db@~1.33.0:
+  version "1.33.0"
+  resolved "[...URL to registry]"
+
+mime-types@~2.1.18:
+  version "2.1.18"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-db "~1.33.0"
+
+negotiator@0.6.1:
+  version "0.6.1"
+  resolved "[...URL to registry]"
+
+
+
+

The described problem is solved by the example yarn.lock file.

+
+
+
    +
  • +

    accepts is frozen at version ~1.3.5

    +
  • +
  • +

    All of its sub-dependencies are also frozen. +It needs mime-types at version ~2.1.18 which is frozen at 2.1.18. +mime-types needs mime-db at ~1.33.0 which is frozen at 1.33.0

    +
  • +
+
+
+

Every developer will receive the same versions of every dependency.

+
+
+ + + + + +
+ + +You have to make sure all your developers are using the same npm/yarn version - this includes the CI build. +
+
+ +
+
+

Package Managers Workflow

+ +
+
+

Introduction

+
+

This document aims to provide you the necessary documentation and sources in order to help you understand the importance of dependencies between packages.

+
+
+

Projects in NodeJS make use of modules, chunks of reusable code made by other people or teams. These small chunks of reusable code are called packages [1]. Packages are used to solve specific problems or tasks. These relations between your project and the external packages are called dependencies.

+
+
+

For example, imagine we are doing a small program that takes your birthday as an input and tells you how many days are left until your birthday. We search in the repository if someone has published a package to retrieve the actual date and manage date types, and maybe we could search for another package to show a calendar, because we want to optimize our time, and we wish the user to click a calendar button and choose the day in the calendar instead of typing it.

+
+
+

As you can see, packages are convenient. In some cases, they may be even needed, as they can manage aspects of your program you may not be proficient in, or provide an easier use of them.

+
+
+

For more comprehensive information visit npm definition

+
+
+
+

Package.json

+
+

Dependencies in your project are stored in a file called package.json. Every package.json must contain, at least, the name and version of your project.

+
+
+

Package.json is located in the root of your project.

+
+
+ + + + + +
+ + +If package.json is not on your root directory refer to Problems you may encounter section +
+
+
+

If you wish to learn more information about package.json, click on the following links:

+
+ +
+
+

== Content of package.json

+
+

As you noticed, package.json is a really important file in your project. It contains essential information about our project, therefore you need to understand what’s inside.

+
+
+

The structure of package.json is divided in blocks, inside the first one you can find essential information of your project such as the name, version, license and optionally some [Scripts].

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e"
+  }
+
+
+
+

The next block is called dependencies and contains the packages that project needs in order to be developed, compiled and executed.

+
+
+
+
"private": true,
+  "dependencies": {
+    "@angular/animations": "^4.2.4",
+    "@angular/common": "^4.2.4",
+    "@angular/forms": "^4.2.4",
+    ...
+    "zone.js": "^0.8.14"
+  }
+
+
+
+

After dependencies we find devDependencies, another kind of dependencies present in the development of the application but unnecessary for its execution. One example is typescript. Code is written in typescript, and then, transpiled to JavaScript. This means the application is not using typescript in execution and consequently not included in the deployment of our application.

+
+
+
+
"devDependencies": {
+    "@angular/cli": "1.4.9",
+    "@angular/compiler-cli": "^4.2.4",
+    ...
+    "@types/node": "~6.0.60",
+    "typescript": "~2.3.3"
+  }
+
+
+
+

Having a peer dependency means that your package needs a dependency that is the same exact dependency as the person installing your package

+
+
+
+
"peerDependencies": {
+    "package-123": "^2.7.18"
+  }
+
+
+
+

Optional dependencies are just that: optional. If they fail to install, Yarn will still say the install process was successful.

+
+
+
+
"optionalDependencies": {
+    "package-321": "^2.7.18"
+  }
+
+
+
+

Finally you can have bundled dependencies which are packages bundled together when publishing your package in a repository.

+
+
+
+
{
+  "bundledDependencies": [
+    "package-4"
+  ]
+}
+
+
+
+

Here is the link to an in-depth explanation of dependency types​.

+
+
+
+

== Scripts

+
+

Scripts are a great way of automating tasks related to your package, such as simple build processes or development tools.

+
+
+

For example:

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "build-project": "node hello-world.js",
+  }
+
+
+
+

You can run that script by running the command yarn (run) script or npm run script, check the example below:

+
+
+
+
$ yarn (run) build-project    # run is optional
+$ npm run build-project
+
+
+
+

There are special reserved words for scripts, like pre-install, which will execute the script automatically +before the package you install are installed.

+
+
+

Check different uses for scripts in the following links:

+
+ +
+

Or you can go back to +[Content of package.json]​.

+
+
+
+

Managing dependencies

+
+

In order to manage dependencies we recommend using package managers in your projects.

+
+
+

A big reason is their usability. Adding or removing a package is really easy, and by doing so, packet manager update the package.json and copies (or removes) the package in the needed location, with a single command.

+
+
+

Another reason, closely related to the first one, is reducing human error by automating the package management process.

+
+
+

Two of the package managers you can use in NodeJS projects are "yarn" and "npm". While you can use both, we encourage you to use only one of them while working on projects. Using both may lead to different dependencies between members of the team.

+
+
+
+

== npm

+
+

We’ll start by installing npm following this small guide here.

+
+
+

As stated on the web, npm comes inside of NodeJS, and must be updated after installing NodeJS, in the same guide you used earlier are written the instructions to update npm.

+
+
+

How npm works

+
+
+

In order to explain how npm works, let’s take a command as an example:

+
+
+
+
$ npm install @angular/material @angular/cdk
+
+
+
+

This command tells npm to look for the packages @angular/material and @angular/cdk in the npm registry, download and decompress them in the folder node_modules along with their own dependencies. Additionally, npm will update package.json and create a new file called package-lock.json.

+
+
+

After initialization and installing the first package there will be a new folder called node_modules in your project. This folder is where your packages are unzipped and stored, following a tree scheme.

+
+
+

Take in consideration both npm and yarn need a package.json in the root of your project in order to work properly. If after creating your project don’t have it, download again the package.json from the repository or you’ll have to start again.

+
+
+

Brief overview of commands

+
+
+

If we need to create a package.json from scratch, we can use the command init. This command asks the user for basic information about the project and creates a brand new package.json.

+
+
+
+
$ npm init
+
+
+
+

Install (or i) installs all modules listed as dependencies in package.json locally. You can also specify a package, and install that package. Install can also be used with the parameter -g, which tells npm to install the [Global package].

+
+
+
+
$ npm install
+$ npm i
+$ npm install Package
+
+
+
+ + + + + +
+ + +Earlier versions of npm did not add dependencies to package.json unless it was used with the flag --save, so npm install package would be npm install --save package, you have one example below. +
+
+
+
+
$ npm install --save Package
+
+
+
+

Npm needs flags in order to know what kind of dependency you want in your project, in npm you need to put the flag -D or --save-dev to install devDependencies, for more information consult the links at the end of this section.

+
+
+
+
$ npm install -D package
+$ npm install --save-dev package
+
+
+
+

+
+
+

The next command uninstalls the module you specified in the command.

+
+
+
+
$ npm uninstall Package
+
+
+
+

ls command shows us the dependencies like a nested tree, useful if you have few packages, not so useful when you need a lot of packages.

+
+
+
+
$ npm ls
+
+
+
+
+
npm@@VERSION@ /path/to/npm
+└─┬ init-package-json@0.0.4
+  └── promzard@0.1.5
+
+
+
+
example tree
+

We recommend you to learn more about npm commands in the following link, navigating to the section CLI commands.

+
+
+

About Package-lock.json

+
+
+

Package-lock.json describes the dependency tree resulting of using package.json and npm. +Whenever you update, add or remove a package, package-lock.json is deleted and redone with +the new dependencies.

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

This lock file is checked every time the command npm i (or npm install) is used without specifying a package, +in the case it exists and it’s valid, npm will install the exact tree that was generated, such that subsequent +installs are able to generate identical dependency trees.

+
+
+ + + + + +
+ + +It is not recommended to modify this file yourself. It’s better to leave its management to npm. +
+
+
+

More information is provided by the npm team at package-lock.json

+
+
+
+

== Yarn

+
+

Yarn is an alternative to npm, if you wish to install yarn follow the guide getting started with yarn and download the correct version for your operative system. NodeJS is also needed you can find it here.

+
+
+

Working with yarn

+
+
+

Yarn is used like npm, with small differences in syntax, for example npm install module is changed to yarn add module.

+
+
+
+
$ yarn add @covalent
+
+
+
+

This command is going to download the required packages, modify package.json, put the package in the folder node_modules and makes a new yarn.lock with the new dependency.

+
+
+

However, unlike npm, yarn maintains a cache with packages you download inside. You don’t need to download every file every time you do a general installation. This means installations faster than npm.

+
+
+

Similarly to npm, yarn creates and maintains his own lock file, called yarn.lock. Yarn.lock gives enough information about the project for dependency tree to be reproduced.

+
+
+

yarn commands

+
+
+

Here we have a brief description of yarn’s most used commands:

+
+
+
+
$ yarn add Package
+$ yarn add --dev Package
+
+
+
+

Adds a package locally to use in your package. Adding the flags --dev or -D will add them to devDependencies instead of the default dependencies, if you need more information check the links at the end of the section.

+
+
+
+
$ yarn init
+
+
+
+

Initializes the development of a package.

+
+
+
+
$ yarn install
+
+
+
+

Installs all the dependencies defined in a package.json file, you can also write "yarn" to achieve the same effect.

+
+
+
+
$ yarn remove Package
+
+
+
+

You use it when you wish to remove a package from your project.

+
+
+
+
$ yarn global add Package
+
+
+
+

Installs the [Global package].

+
+
+

Please, refer to the documentation to learn more about yarn commands and their attributes: yarn commands

+
+
+

yarn.lock

+
+
+

This file has the same purpose as Package-lock.json, to guide the packet manager, in this case yarn, +to install the dependency tree specified in yarn.lock.

+
+
+

Yarn.lock and package.json are +essential files when collaborating in a project more co-workers and may be a +source of errors if programmers do not use the same manager.

+
+
+

Yarn.lock follows the same structure as package-lock.json, you can find an example of dependency below:

+
+
+
+
"@angular/animations@^4.2.4":
+  version "4.4.6"
+  resolved "https://registry.yarnpkg.com/@angular/animations/-/animations-4.4.6.tgz#fa661899a8a4e38cb7c583c7a5c97ce65d592a35"
+  dependencies:
+    tslib "^1.7.1"
+
+
+
+ + + + + +
+ + +As with package-lock.json, it’s strongly not advised to modify this file. Leave its management to yarn +
+
+
+

You can learn more about yarn.lock here: yarn.lock

+
+
+
+

== Global package

+
+

Global packages are packages installed in your operative system instead of your local project, +global packages useful for developer tooling that is not part of any individual project but instead is used for local commands.

+
+
+

A good example of global package is @angular/cli, a command line interface for angular used in our projects. You can install +a global package in npm with "npm install -g package" and "yarn global add package" with yarn, you have a npm example below:

+
+
+
Listing 6. npm global package
+
+
npm install –g @angular/cli
+
+
+ +
+
+

== Package version

+
+

Dependencies are critical to the success of a package. You must be extra careful about +which version packages are using, one package in a different version may break your code.

+
+
+

Versioning in npm and yarn, follows a semantic called semver, following the logic +MAJOR.MINOR.PATCH, like for example, @angular/animations: 4.4.6.

+
+
+

Different versions

+
+
+

Sometimes, packages are installed with a different version from the one initially installed. +This happens because package.json also contains the range of versions we allow yarn or npm to +install or update to, example:

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

And here the installed one:

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

As you can see, the version we initially added is 4.2.4, and the version finally installed after +a global installation of all packages, 4.4.6.

+
+
+

Installing packages without package-lock.json or yarn.lock using their respective packet managers, will always +end with npm or yarn installing the latest version allowed by package.json.

+
+
+

"@angular/animations": "^4.2.4" contains not only the version we added, but also the range we allow npm and yarn +to update. Here are some examples:

+
+
+
+
"@angular/animations": "<4.2.4"
+
+
+
+

The version installed must be lower than 4.2.4 .

+
+
+
+
"@angular/animations": ">=4.2.4"
+
+
+
+

The version installed must be greater than or equal to 4.2.4 .

+
+
+
+
"@angular/animations": "=4.2.4"
+
+
+
+

the version installed must be equal to 4.2.4 .

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

The version installed cannot modify the first non zero digit, for example in this case +it cannot surpass 5.0.0 or be lower than 4.2.4 .

+
+
+

You can learn more about this in Versions

+
+
+
+

Problems you may encounter

+
+

If you can’t find package.json, you may have deleted the one you had previously, +which means you have to download the package.json from the repository. +In the case you are creating a new project you can create a new package.json. More information +in the links below. Click on Package.json if you come from that section.

+
+ +
+ + + + + +
+ + +Using npm install or yarn without package.json in your projects will +result in compilation errors. As we mentioned earlier, +Package.json contains essential information about your project. +
+
+
+

If you have package.json, but you don’t have package-lock.json or yarn.lock the use of +command "npm install" or "yarn" may result in a different dependency tree.

+
+
+

If you are trying to import a module and visual code studio is not able to find it, +is usually caused by error adding the package to the project, try to add the module again with yarn or npm, +and restart Visual Studio Code.

+
+
+

Be careful with the semantic versioning inside your package.json of the packages, +or you may find a new update on one of your dependencies breaking your code.

+
+
+ + + + + +
+ + +In the following link +there is a solution to a problematic update to one package. +
+
+
+

A list of common errors of npm can be found in: npm errors

+
+
+
+

== Recomendations

+
+

Use yarn or npm in your project, reach an agreement with your team in order to choose one, this will avoid +undesired situations like forgetting to upload an updated yarn.lock or package-lock.json. +Be sure to have the latest version of your project when possible.

+
+
+ + + + + +
+ + +Pull your project every time it’s updated. Erase your node_modules folder and reinstall all +dependencies. This assures you to be working with the same dependencies your team has. +
+
+
+

AD Center recommends the use of yarn.

+
+ +
+
+

Yarn 2

+
+

Yarn v2 is a very different software from the v1. The following list contains the main new features:

+
+ +
+

Please, read them carefully to decide if your current project is suitable to use Yarn 2 as package manager.

+
+
+ + + + + +
+ + +Some features are still experimental, so please do not use them in production environments. +
+
+
+

More info at https://yarnpkg.com/

+
+
+
+

Global Install

+
+

Installing Yarn 2.x globally is discouraged as Yarn team is moving to a per-project install strategy. We advise you to keep Yarn 1.x (Classic) as your global binary by installing it via the instructions you can find here.

+
+
+

Once you’ve followed the instructions (running yarn --version from your home directory should yield something like 1.22.0), go to the next section to see how to enable Yarn 2 on your project.

+
+
+
+

Per-project install

+
+

Follow these instructions to update your current devon4ng project to Yarn 2:

+
+
+
    +
  1. +

    Follow the global install instructions.

    +
  2. +
  3. +

    Move into your project folder:

    +
    +
    +
    cd ~/path/to/project
    +
    +
    +
  4. +
  5. +

    Run the following command:

    +
    +
    +
    yarn policies set-version berry # below v1.22
    +yarn set version berry          # on v1.22+
    +
    +
    +
  6. +
  7. +

    Since Angular CLI still is not fully supported with the new Yarn architecture as it is not compatible with PnP it is necessary to include the node-modules plugin adding the following line in the .yarnrc.yml file:

    +
    +
    +
    nodeLinker: node-modules
    +
    +
    +
  8. +
  9. +

    Commit the .yarn and .yarnrc.yml changes

    +
  10. +
  11. +

    Run again yarn install.

    +
  12. +
+
+
+ + + + + +
+ + +For more advanced migration topics please refer to https://yarnpkg.com/advanced/migration +
+
+
+
+

Which files should be added to gitignore file?

+
+

If you’re using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/cache
+!.yarn/releases
+!.yarn/plugins
+
+
+
+

If you’re not using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/releases
+!.yarn/plugins
+.pnp.*
+
+
+ +
+
+
+
+

Angular

+
+ +
+

Accessibility

+
+

Multiple studies suggest that around 15-20% of the population are living with a disability of some kind. In comparison, that number is higher than any single browser demographic currently, other than Chrome2. Not considering those users when developing an application means excluding a large number of people from being able to use it comfortable or at all.

+
+
+

Some people are unable to use the mouse, view a screen, see low contrast text, Hear dialogue or music and some people having difficulty to understanding the complex language.This kind of people needed the support like Keyboard support, screen reader support, high contrast text, captions and transcripts and Plain language support. This disability may change the from permanent to the situation.

+
+
+
+

Key Concerns of Accessible Web Applications

+
+
    +
  • +

    Semantic Markup - Allows the application to be understood on a more general level rather than just details of whats being rendered

    +
  • +
  • +

    Keyboard Accessibility - Applications must still be usable when using only a keyboard

    +
  • +
  • +

    Visual Assistance - color contrast, focus of elements and text representations of audio and events

    +
  • +
+
+
+
+

Semantic Markup

+
+

If you’re creating custom element directives, Web Components or HTML in general, use native elements wherever possible to utilize built-in events and properties. Alternatively, use ARIA to communicate semantic meaning.

+
+
+

HTML tags have attributes that providers extra context on what’s being displayed on the browser. For example, the <img> tag’s alt attribute lets the reader know what is being shown using a short description.However, native tags don’t cover all cases. This is where ARIA fits in. ARIA attributes can provide context on what roles specific elements have in the application or on how elements within the document relate to each other.

+
+
+

A modal component can be given the role of dialog or alertdialog to let the browser know that that component is acting as a modal. The modal component template can use the ARIA attributes aria-labelledby and aria-described to describe to readers what the title and purpose of the modal is.

+
+
+
+
@Component({
+    selector: 'ngc2-app',
+    template: `
+      <ngc2-notification-button
+        message="Hello!"
+        label="Greeting"
+        role="button">
+      </ngc2-notification-button>
+      <ngc2-modal
+        [title]="modal.title"
+        [description]="modal.description"
+        [visible]="modal.visible"
+        (close)="modal.close()">
+      </ngc2-modal>
+    `
+})
+export class AppComponent {
+  constructor(private modal: ModalService) { }
+}
+
+
+
+

notification-button.component.ts

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `
+})
+export class ModalComponent {
+  ...
+}
+
+
+
+
+

Keyboard Accessibility

+
+

Keyboard accessibility is the ability of your application to be interacted with using just a keyboard. The more streamlined the site can be used this way, the more keyboard accessible it is. Keyboard accessibility is one of the largest aspects of web accessibility since it targets:

+
+
+
    +
  • +

    those with motor disabilities who can’t use a mouse

    +
  • +
  • +

    users who rely on screen readers and other assistive technology, which require keyboard navigation

    +
  • +
  • +

    those who prefer not to use a mouse

    +
  • +
+
+
+
+

== Focus

+
+

Keyboard interaction is driven by something called focus. In web applications, only one element on a document has focus at a time, and keypress will activate whatever function is bound to that element. +Focus element border can be styled with CSS using the outline property, but it should not be removed. Elements can also be styled using the :focus psuedo-selector.

+
+
+
+

== Tabbing

+
+

The most common way of moving focus along the page is through the tab key. Elements will be traversed in the order they appear in the document outline - so that order must be carefully considered during development. +There is way change the default behavior or tab order. This can be done through the tabindex attribute. The tabindex can be given the values: +* less than zero - to let readers know that an element should be focusable but not keyboard accessible +* 0 - to let readers know that that element should be accessible by keyboard +* greater than zero - to let readers know the order in which the focusable element should be reached using the keyboard. Order is calculated from lowest to highest.

+
+
+
+

== Transitions

+
+

The majority of transitions that happen in an Angular application will not involve a page reload. This means that developers will need to carefully manage what happens to focus in these cases.

+
+
+

For example:

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `,
+})
+export class ModalComponent {
+  constructor(private modal: ModalService, private element: ElementRef) { }
+
+  ngOnInit() {
+    this.modal.visible$.subscribe(visible => {
+      if(visible) {
+        setTimeout(() => {
+          this.element.nativeElement.querySelector('button').focus();
+        }, 0);
+      }
+    })
+  }
+}
+
+
+
+
+

Visual Assistance

+
+

One large category of disability is visual impairment. This includes not just the blind, but those who are color blind or partially sighted, and require some additional consideration.

+
+
+
+

Color Contrast

+
+

When choosing colors for text or elements on a website, the contrast between them needs to be considered. For WCAG 2.0 AA, this means that the contrast ratio for text or visual representations of text needs to be at least 4.5:1. There are tools online to measure the contrast ratio such as this color contrast checker from WebAIM or be checked with using automation tests.

+
+
+
+

Visual Information

+
+

Color can help a user’s understanding of information, but it should never be the only way to convey information to a user. For example, a user with red/green color-blindness may have trouble discerning at a glance if an alert is informing them of success or failure.

+
+
+
+

Audiovisual Media

+
+

Audiovisual elements in the application such as video, sound effects or audio (that is, podcasts) need related textual representations such as transcripts, captions or descriptions. They also should never auto-play and playback controls should be provided to the user.

+
+
+
+

Accessibility with Angular Material

+
+

The a11y package provides a number of tools to improve accessibility. Import

+
+
+
+
import { A11yModule } from '@angular/cdk/a11y';
+
+
+
+
+

ListKeyManager

+
+

ListKeyManager manages the active option in a list of items based on keyboard interaction. Intended to be used with components that correspond to a role="menu" or role="listbox" pattern . Any component that uses a ListKeyManager will generally do three things:

+
+
+
    +
  • +

    Create a @ViewChildren query for the options being managed.

    +
  • +
  • +

    Initialize the ListKeyManager, passing in the options.

    +
  • +
  • +

    Forward keyboard events from the managed component to the ListKeyManager.

    +
  • +
+
+
+

Each option should implement the ListKeyManagerOption interface:

+
+
+
+
interface ListKeyManagerOption {
+  disabled?: boolean;
+  getLabel?(): string;
+}
+
+
+
+
+

== Types of ListKeyManager

+
+

There are two varieties of ListKeyManager, FocusKeyManager and ActiveDescendantKeyManager.

+
+
+
+

FocusKeyManager

+
+

Used when options will directly receive browser focus. Each item managed must implement the FocusableOption interface:

+
+
+
+
interface FocusableOption extends ListKeyManagerOption {
+  focus(): void;
+}
+
+
+
+
+

ActiveDescendantKeyManager

+
+

Used when options will be marked as active via aria-activedescendant. Each item managed must implement the Highlightable interface:

+
+
+
+
interface Highlightable extends ListKeyManagerOption {
+  setActiveStyles(): void;
+  setInactiveStyles(): void;
+}
+
+
+
+

Each item must also have an ID bound to the listbox’s or menu’s aria-activedescendant.

+
+
+
+

FocusTrap

+
+

The cdkTrapFocus directive traps Tab key focus within an element. This is intended to be used to create accessible experience for components like modal dialogs, where focus must be constrained. This directive is declared in A11yModule.

+
+
+

This directive will not prevent focus from moving out of the trapped region due to mouse interaction.

+
+
+

For example:

+
+
+
+
<div class="my-inner-dialog-content" cdkTrapFocus>
+  <!-- Tab and Shift + Tab will not leave this element. -->
+</div>
+
+
+
+
+

Regions

+
+

Regions can be declared explicitly with an initial focus element by using the cdkFocusRegionStart, cdkFocusRegionEnd and cdkFocusInitial DOM attributes. When using the tab key, focus will move through this region and wrap around on either end.

+
+
+

For example:

+
+
+
+
<a mat-list-item routerLink cdkFocusRegionStart>Focus region start</a>
+<a mat-list-item routerLink>Link</a>
+<a mat-list-item routerLink cdkFocusInitial>Initially focused</a>
+<a mat-list-item routerLink cdkFocusRegionEnd>Focus region end</a>
+
+
+
+
+

InteractivityChecker

+
+

InteractivityChecker is used to check the interactivity of an element, capturing disabled, visible, tabbable, and focusable states for accessibility purposes.

+
+
+
+

LiveAnnouncer

+
+

LiveAnnouncer is used to announce messages for screen-reader users using an aria-live region.

+
+
+

For example:

+
+
+
+
@Component({...})
+export class MyComponent {
+
+ constructor(liveAnnouncer: LiveAnnouncer) {
+   liveAnnouncer.announce("Hey Google");
+ }
+}
+
+
+
+
+

API reference for Angular CDK a11y

+ + +
+
+

Angular Elements

+ +
+
+

What are Angular Elements?

+
+

Angular elements are Angular components packaged as custom elements, a web standard for defining new HTML elements in a framework-agnostic way.

+
+
+

Custom elements are a Web Platform feature currently supported by Chrome, Firefox, Opera, and Safari, and available in other browsers through Polyfills. A custom element extends HTML by allowing you to define a tag whose content is created and controlled by JavaScript code. The browser maintains a CustomElementRegistry of defined custom elements (also called Web Components), which maps an instantiable JavaScript class to an HTML tag.

+
+
+
+

Why use Angular Elements?

+
+

Angular Elements allows Angular to work with different frameworks by using input and output elements. This allows Angular to work with many different frameworks if needed. This is an ideal situation if a slow transformation of an application to Angular is needed or some Angular needs to be added in other web applications(For example. ASP.net, JSP etc )

+
+
+
+

Negative points about Elements

+
+

Angular Elements is really powerful but since, the transition between views is going to be handled by another framework or HTML/JavaScript, using Angular Router is not possible. the view transitions have to be handled manually. This fact also eliminates the possibility of just porting an application completely.

+
+
+
+

How to use Angular Elements?

+
+

In a generalized way, a simple Angular component could be transformed to an Angular Element with this steps:

+
+
+
+

Installing Angular Elements

+
+

The first step is going to be install the library using our preferred packet manager:

+
+
+
+

== NPM

+
+
+
npm install @angular/elements
+
+
+
+
+

== YARN

+
+
+
yarn add @angular/elements
+
+
+
+
+

Preparing the components in the modules

+
+

Inside the app.module.ts, in addition to the normal declaration of the components inside declarations, the modules inside imports and the services inside providers, the components need to added in entryComponents. If there are components that have their own module, the same logic is going to be applied for them, only adding in the app.module.ts the components that do not have their own module. Here is an example of this:

+
+
+
+
....
+@NgModule({
+  declarations: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  imports: [
+    CoreModule,  // Module containing Angular Materials
+    FormsModule
+  ],
+  entryComponents: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  providers: [DishShareService]
+})
+....
+
+
+
+

After that is done, the constructor of the module is going to be modified to use injector and bootstrap the application defining the components. This is going to allow the Angular Element to get the injections and to define a component tag that will be used later:

+
+
+
+
....
+})
+export class AppModule {
+  constructor(private injector: Injector) {
+
+  }
+
+  ngDoBootstrap() {
+    const el = createCustomElement(DishFormComponent, {injector: this.injector});
+    customElements.define('dish-form', el);
+
+    const elView = createCustomElement(DishViewComponent, {injector: this.injector});
+    customElements.define('dish-view', elView);
+  }
+}
+....
+
+
+
+
+

A component example

+
+

In order to be able to use a component, @Input() and @Output() variables are used. These variables are going to be the ones that will allow the Angular Element to communicate with the framework/JavaScript:

+
+
+

Component html

+
+
+
+
<mat-card>
+    <mat-grid-list cols="1" rowHeight="100px" rowWidth="50%">
+				<mat-grid-tile colspan="1" rowspan="1">
+					<span>{{ platename }}</span>
+				</mat-grid-tile>
+				<form (ngSubmit)="onSubmit(dishForm)" #dishForm="ngForm">
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<input matInput placeholder="Name" name="name" [(ngModel)]="dish.name">
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<textarea matInput placeholder="Description" name="description" [(ngModel)]="dish.description"></textarea>
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<button mat-raised-button color="primary" type="submit">Submit</button>
+					</mat-grid-tile>
+				</form>
+		</mat-grid-list>
+</mat-card>
+
+
+
+

Component ts

+
+
+
+
@Component({
+  templateUrl: './dish-form.component.html',
+  styleUrls: ['./dish-form.component.scss']
+})
+export class DishFormComponent implements OnInit {
+
+  @Input() platename;
+
+  @Input() platedescription;
+
+  @Output()
+  submitDishEvent = new EventEmitter();
+
+  submitted = false;
+  dish = {name: '', description: ''};
+
+  constructor(public dishShareService: DishShareService) { }
+
+  ngOnInit() {
+    this.dish.name = this.platename;
+    this.dish.description = this.platedescription;
+  }
+
+  onSubmit(dishForm: NgForm): void {
+    this.dishShareService.createDish(dishForm.value.name, dishForm.value.description);
+    this.submitDishEvent.emit('dishSubmited');
+  }
+
+}
+
+
+
+

In this file there are definitions of multiple variables that will be used as input and output. Since the input variables are going to be used directly by html, only lowercase and underscore strategies can be used for them. On the onSubmit(dishForm: NgForm) a service is used to pass this variables to another component. Finally, as a last thing, the selector inside @Component has been removed since a tag that will be used dynamically was already defined in the last step.

+
+
+
+

Solving the error

+
+

In order to be able to use this Angular Element a Polyfills/Browser support related error needs to solved. This error can be solved in two ways:

+
+
+
+

== Changing the target

+
+

One solution is to change the target in tsconfig.json to es2015. This might not be doable for every application since maybe a specific target is required.

+
+
+
+

== Installing Polyfaces

+
+

Another solution is to use AutoPollyfill. In order to do so, the library is going to be installed with a packet manager:

+
+
+

Yarn

+
+
+
+
yarn add @webcomponents/webcomponentsjs
+
+
+
+

Npm

+
+
+
+
npm install @webcomponents/webcomponentsjs
+
+
+
+

After the packet manager has finished, inside the src folder a new file polyfills.ts is found. To solve the error, importing the corresponding adapter (custom-elements-es5-adapter.js) is necessary:

+
+
+
+
....
+/***************************************************************************************************
+ * APPLICATION IMPORTS
+ */
+
+import '@webcomponents/webcomponentsjs/custom-elements-es5-adapter.js';
+....
+
+
+
+

If you want to learn more about polyfills in angular you can do it here

+
+
+
+

Building the Angular Element

+
+

First, before building the Angular Element, every element inside that app component except the module need to be removed. After that, a bash script is created in the root folder,. This script will allow to put every necessary file into a JS.

+
+
+
+
ng build "projectName" --configuration production --output-hashing=none && cat dist/"projectName"/runtime.js dist/"projectName"/polyfills.js dist/"projectName"/scripts.js dist/"projectName"/main.js > ./dist/"projectName"/"nameWantedAngularElement".js
+
+
+
+

After executing the bash script, it will generate inside the path dist/"projectName" (or dist/apps/projectname in a Nx workspace) a JS file named "nameWantedAngularElement".js and a css file.

+
+
+
+ +
+

The library ngx-build-plus allows to add different options when building. In addition, it solves some errors that will occur when trying to use multiple angular elements in an application. In order to use it, yarn or npm can be used:

+
+
+

Yarn

+
+
+
+
yarn add ngx-build-plus
+
+
+
+

Npm

+
+
+
+
npm install ngx-build-plus
+
+
+
+

If you want to add it to a specific sub project in your projects folder, use the --project:

+
+
+
+
.... ngx-build-plus --project "project-name"
+
+
+
+

Using this library and the following command, an isolated Angular Element which won’t have conflict with others can be generated. This Angular Element will not have a polyfill so, the project where we use them will need to include a poliyfill with the Angular Element requirements.

+
+
+
+
ng build "projectName" --output-hashing none --single-bundle true --configuration production --bundle-styles false
+
+
+
+

This command will generate three things:

+
+
+
    +
  1. +

    The main JS bundle

    +
  2. +
  3. +

    The script JS

    +
  4. +
  5. +

    The css

    +
  6. +
+
+
+

These files will be used later instead of the single JS generated in the last step.

+
+
+
+

== == Extra parameters

+
+

Here are some extra useful parameters that ngx-build-plus provides:

+
+
+
    +
  • +

    --keep-polyfills: This parameter is going to allow us to keep the polyfills. This needs to be used with caution, avoiding using multiple different polyfills that could cause an error is necessary.

    +
  • +
  • +

    --extraWebpackConfig webpack.extra.js: This parameter allows us to create a JavaScript file inside our Angular Elements project with the name of different libraries. Using webpack these libraries will not be included in the Angular Element. This is useful to lower the size of our Angular Element by removing libraries shared. Example:

    +
  • +
+
+
+
+
const webpack = require('webpack');
+
+module.exports = {
+    "externals": {
+        "rxjs": "rxjs",
+        "@angular/core": "ng.core",
+        "@angular/common": "ng.common",
+        "@angular/common/http": "ng.common.http",
+        "@angular/platform-browser": "ng.platformBrowser",
+        "@angular/platform-browser-dynamic": "ng.platformBrowserDynamic",
+        "@angular/compiler": "ng.compiler",
+        "@angular/elements": "ng.elements",
+        "@angular/router": "ng.router",
+        "@angular/forms": "ng.forms"
+    }
+}
+
+
+
+
+

==

+
+
+
  If some libraries are excluded from the `Angular Element` you will need to add the bundled UMD files of those libraries manually.
+== ==
+
+
+
+
+

Using the Angular Element

+
+

The Angular Element that got generated in the last step can be used in almost every framework. In this case, the Angular Element is going to be used in html:

+
+
+
Listing 7. Sample index.html version without ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+        <script src="./devon4ngAngularElements.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+
Listing 8. Sample index.html version with ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+         <script src="./polyfills.js"> </script> <!-- Created using --keep-polyfills options -->
+        <script src="./scripts.js"> </script>
+         <script src="./main.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+

In this html, the css generated in the last step is going to be imported inside the <head> and then, the JavaScript element is going to be imported at the end of the body. After that is done, There is two uses of Angular Elements in the html, one directly with use of the @input() variables as parameters commented in the html:

+
+
+
+
....
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+....
+
+
+
+

and one dynamically inside the script:

+
+
+
+
....
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+....
+
+
+
+

This JavaScript is an example of how to create dynamically an Angular Element inserting attributed to fill our @Input() variables and listen to the @Output() that was defined earlier. This is done with:

+
+
+
+
                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+
+
+
+

This allows JavaScript to hook with the @Output() event emitter that was defined. When this event gets called, another component that was defined gets inserted dynamically.

+
+
+
+

Angular Element within another Angular project

+
+

In order to use an Angular Element within another Angular project the following steps need to be followed:

+
+
+
+

Copy bundled script and css to resources

+
+

First copy the generated .js and .css inside assets in the corresponding folder.

+
+
+
+

Add bundled script to angular.json

+
+

Inside angular.json both of the files that were copied in the last step are going to be included. This will be done both, in test and in build. Including it on the test, will allow to perform unitary tests.

+
+
+
+
{
+....
+  "architect": {
+    ....
+    "build": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+    ....
+    "test": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+  }
+}
+
+
+
+

By declaring the files in the angular.json angular will take care of including them in a proper way.

+
+
+
+

==

+
+
+
  If you are using Nx, the configuration file `angular.json` might be named as `workspace.json`, depending on how you had setup the workspace. The structure of the file remains similar though.
+== ==
+
+
+
+
+

Using Angular Element

+
+

There are two ways that Angular Element can be used:

+
+
+
+

== Create component dynamically

+
+

In order to add the component in a dynamic way, first adding a container is necessary:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+</div>
+....
+
+
+
+

With this container created, inside the app.component.ts a method is going to be created. This method is going to find the container, create the dynamic element and append it into the container.

+
+
+

app.component.ts

+
+
+
+
export class AppComponent implements OnInit {
+  ....
+  ngOnInit(): void {
+    this.createComponent();
+  }
+  ....
+  createComponent(): void {
+    const container = document.getElementById('container');
+    const component = document.createElement('dish-form');
+    container.appendChild(component);
+  }
+  ....
+
+
+
+
+

== Using it directly

+
+

In order to use it directly on the templates, in the app.module.ts the CUSTOM_ELEMENTS_SCHEMA needs to be added:

+
+
+
+
....
+import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
+....
+@NgModule({
+  ....
+  schemas: [ CUSTOM_ELEMENTS_SCHEMA ],
+
+
+
+

This is going to allow the use of the Angular Element in the templates directly:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+  <dish-form></dish-form>
+</div>
+
+
+
+

You can find a working example of Angular Elements in our devon4ts-samples repo by referring the samples named angular-elements and angular-elements-test.

+
+ +
+
+

Angular Lazy loading

+
+

When the development of an application starts, it just contains a small set of features so the app usually loads fast. However, as new features are added, the overall application size grows up and its loading speed decreases. It is in this context where Lazy loading finds its place. +Lazy loading is a design pattern that defers initialization of objects until it is needed, so, for example, users that just access to a website’s home page do not need to have other areas loaded. +Angular handles lazy loading through the routing module which redirects to requested pages. Those pages can be loaded at start or on demand.

+
+
+
+

An example with Angular

+
+

To explain how lazy loading is implemented using angular, a basic sample app is going to be developed. This app will consist in a window named "level 1" that contains two buttons that redirects to other windows in a "second level". It is a simple example, but useful to understand the relation between angular modules and lazy loading.

+
+
+
+Levels app structure +
+
Figure 9. Levels app structure.
+
+
+

This graphic shows that modules acts as gates to access components "inside" them.

+
+
+

Because the objective of this guide is related mainly with logic, the html structure and SCSS styles are less relevant, but the complete code can be found as a sample here.

+
+
+
+

Implementation

+
+

First write in a console ng new level-app --routing, to generate a new project called level-app including an app-routing.module.ts file (--routing flag). If you are using Nx, the command would be nx generate @nrwl/angular:app level-app --routing in your Nx workspace.

+
+
+

In the file app.component.html delete all the content except the router-outlet tag.

+
+
+
Listing 9. File app.component.html
+
+
<router-outlet></router-outlet>
+
+
+
+

The next steps consists on creating features modules.

+
+
+
    +
  • +

    run ng generate module first --routing to generate a module named first.

    +
  • +
  • +

    run ng generate module first/second-left --routing to generate a module named second-left under first.

    +
  • +
  • +

    run ng generate module first/second-right --routing to generate a module second-right under first.

    +
  • +
  • +

    run ng generate component first/first to generate a component named first inside the module first.

    +
  • +
  • +

    run ng generate component first/second-left/content to generate a component content inside the module second-left.

    +
  • +
  • +

    run ng generate component first/second-right/content to generate a component content inside the module second-right.

    +
  • +
+
+
+
+

==

+
+
+
  If you are using Nx, you have to specify the project name (level-app) along with the --project flag. For example, command for generating the first module will be `ng generate module first --project=level-app --routing`
+== ==
+
+
+
+

To move between components we have to configure the routes used:

+
+
+

In app-routing.module.ts add a path 'first' to FirstComponent and a redirection from '' to 'first'.

+
+
+
Listing 10. File app-routing.module.ts.
+
+
...
+import { FirstComponent } from './first/first/first.component';
+
+const routes: Routes = [
+  {
+    path: 'first',
+    component: FirstComponent
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

In app.module.ts import the module which includes FirstComponent.

+
+
+
Listing 11. File app.module.ts
+
+
....
+import { FirstModule } from './first/first.module';
+
+@NgModule({
+  ...
+  imports: [
+    ....
+    FirstModule
+  ],
+  ...
+})
+export class AppModule { }
+
+
+
+

In first-routing.module.ts add routes that direct to the content of SecondRightModule and SecondLeftModule. The content of both modules have the same name so, in order to avoid conflicts the name of the components are going to be changed using as ( original-name as new-name).

+
+
+
Listing 12. File first-routing.module.ts
+
+
...
+import { ContentComponent as ContentLeft} from './second-left/content/content.component';
+import { ContentComponent as ContentRight} from './second-right/content/content.component';
+import { FirstComponent } from './first/first.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'first/second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'first/second-right',
+    component: ContentRight
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class FirstRoutingModule { }
+
+
+
+

In first.module.ts import SecondLeftModule and SecondRightModule.

+
+
+
Listing 13. File first.module.ts
+
+
...
+import { SecondLeftModule } from './second-left/second-left.module';
+import { SecondRightModule } from './second-right/second-right.module';
+
+@NgModule({
+  ...
+  imports: [
+    ...
+    SecondLeftModule,
+    SecondRightModule,
+  ]
+})
+export class FirstModule { }
+
+
+
+

Using the current configuration, we have a project that loads all the modules in a eager way. Run ng serve (with --project=level-app in an Nx workspace) to see what happens.

+
+
+

First, during the compilation we can see that just a main file is built.

+
+
+
+Compile eager +
+
Figure 10. Compile eager.
+
+
+

If we go to http://localhost:4200/first and open developer options (F12 on Chrome), it is found that a document named "first" is loaded.

+
+
+
+First level eager +
+
Figure 11. First level eager.
+
+
+

If we click on [Go to right module] a second level module opens, but there is no 'second-right' document.

+
+
+
+Second level right eager +
+
Figure 12. Second level right eager.
+
+
+

But, typing the URL directly will load 'second-right' but no 'first', even if we click on [Go back]

+
+
+
+Second level right eager +
+
Figure 13. Second level right eager direct URL.
+
+
+

Modifying an angular application to load its modules lazily is easy, you have to change the routing configuration of the desired module (for example FirstModule).

+
+
+
Listing 14. File app-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: 'first',
+    loadChildren: () => import('./first/first.module').then(m => m.FirstModule),
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

Notice that instead of loading a component, you dynamically import it in a loadChildren attribute because modules acts as gates to access components "inside" them. Updating the app to load lazily has four consequences:

+
+
+
    +
  1. +

    No component attribute.

    +
  2. +
  3. +

    No import of FirstComponent.

    +
  4. +
  5. +

    FirstModule import has to be removed from the imports array at app.module.ts.

    +
  6. +
  7. +

    Change of context.

    +
  8. +
+
+
+

If we check first-routing.module.ts again, we can see that the path for ContentLeft and ContentRight is set to 'first/second-left' and 'first/second-right' respectively, so writing http://localhost:4200/first/second-left will redirect us to ContentLeft. However, after loading a module with loadChildren setting the path to 'second-left' and 'second-right' is enough because it acquires the context set by AppRoutingModule.

+
+
+
Listing 15. File first-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+

If we go to 'first' then FirstModule is situated in '/first' but also its children ContentLeft and ContentRight, so it is not necessary to write in their path 'first/second-left' and 'first/second-right', because that will situate the components on 'first/first/second-left' and 'first/first/second-right'.

+
+
+
+First level wrong path +
+
Figure 14. First level lazy wrong path.
+
+
+

When we compile an app with lazy loaded modules, files containing them will be generated

+
+
+
+First level lazy compilation +
+
Figure 15. First level lazy compilation.
+
+
+

And if we go to developer tools → network, we can find those modules loaded (if they are needed).

+
+
+
+First level lazy +
+
Figure 16. First level lazy.
+
+
+

To load the component ContentComponent of SecondLeftModule lazily, we have to load SecondLeftModule as a children of FirstModule:

+
+
+
    +
  • +

    Change component to loadChildren and reference SecondLeftModule.

    +
  • +
+
+
+
Listing 16. File first-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    loadChildren: () => import('./second-left/second-left.module').then(m => m.SecondLeftModule),
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+
    +
  • +

    Remove SecondLeftModule at first.component.ts

    +
  • +
  • +

    Route the components inside SecondLeftModule. Without this step nothing would be displayed.

    +
  • +
+
+
+
Listing 17. File second-left-routing.module.ts.
+
+
...
+import { ContentComponent } from './content/content.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: ContentComponent
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class SecondLeftRoutingModule { }
+
+
+
+
    +
  • +

    run ng serve to generate files containing the lazy modules.

    +
  • +
+
+
+
+Second level lazy +
+
Figure 17. Second level lazy loading compilation.
+
+
+

Clicking on [Go to left module] triggers the load of SecondLeftModule.

+
+
+
+Second level lazy network +
+
Figure 18. Second level lazy loading network.
+
+
+
+

Conclusion

+
+

Lazy loading is a pattern useful when new features are added, these features are usually identified as modules which can be loaded only if needed as shown in this document, reducing the time spent loading an application.

+
+ +
+
+

Angular Library

+
+

Angular CLI provides us with methods that allow the creation of a library. After that, using a packet manager (either npm or yarn) the library can be build and packed which will allow later to install/publish it.

+
+
+
+

Whats a library?

+
+

From wikipedia: a library is a collection of non-volatile resources used by computer programs, often for software development. These may include configuration data, documentation, help data, message templates, pre-written code and subroutines, classes, values or type specifications.

+
+
+
+

How to build a library

+
+

In this section, a library is going to be build step by step. Please note, we will be explaining the steps using both Angular CLI and Nx CLI. You are free to choose either one for your development.

+
+
+
+

1. Creating an empty application

+
+

First, using Angular CLI we are going to generate a empty application which will be later filled with the generated library. In order to do so, Angular CLI allows us to add to ng new "application-name" an option (--create-application). This option is going to tell Angular CLI not to create the initial app project. This is convenient since a library is going to be generated in later steps. Using this command ng new "application-name" --create-application=false an empty project with the name wanted is created.

+
+
+
+
ng new "application-name" --create-application=false
+
+
+
+

This step is much more easier and straight-forward when using Nx. Nx allows us to work in a monorepo workspace, where you can develop a project as an application, or a library, or a tool. You can follow this guide to get started with Nx. +The command for generating a library in Nx is nx generate @nrwl/angular:library library-name --publishable --importPath=library-name. This will create an empty angular application which we can modify and publish as a library.

+
+
+
+

2. Generating a library

+
+

After generating an empty application, a library is going to be generated. Inside the folder of the project, the Angular CLI command ng generate library "library-name" is going to generate the library as a project (projects/"library-name"). As an addition, the option --prefix="library-prefix-wanted" allows us to switch the default prefix that Angular generated with (lib). Using the option to change the prefix the command will look like this ng generate library "library-name" --prefix="library-prefix-wanted".

+
+
+
+
ng generate library "library-name" --prefix="library-prefix-wanted"
+
+
+
+

If you are using Nx, this step is not needed as it is already covered in step 1. In this case, the library project will be generated in the libs folder of a Nx workspace.

+
+
+
+

3. Modifying our library

+
+

In the last step we generated a library. This automatically generates a module,service and a component inside projects/"library-name" that we can modify adding new methods, components etc that we want to use in other projects. We can generate other elements, using the usual Angular CLI generate commands adding the option --project="library-name", this will allow to generate elements within our project . An example of this is: ng generate service "name" --project="library-name".

+
+
+
+
ng generate "element" "name" --project="library-name"
+
+
+
+

You can use the same command as above in a Nx workspace.

+
+
+
+

4. Exporting the generated things

+
+

Inside the library (projects/"library-name) there’s a public_api.ts which is the file that exports the elements inside the library. (The file is named as index.ts in an Nx workspace). In case we generated other things, this file needs to be modified adding the extra exports with the generated elements. In addition, changing the library version is possible in the file package.json.

+
+
+
+

5. Building our library

+
+

Once we added the necessary exports, in order to use the library in other applications, we need to build the library. The command ng build "library-name" is going to build the library, generating the necessary files in "project-name"/dist/"library-name".

+
+
+
+
ng build "library-name"
+
+
+
+

You can use the same command in Nx as well. Only the path for the generated files will be slightly different: "project-name"/dist/libs/"library-name"

+
+
+
+

6. Packing the library

+
+

In this step we are going to pack the build library. In order to do so, we need to go inside dist/"library-name" (or dist/libs/"library-name") and then run either npm pack or yarn pack to generate a "library-name-version.tgz" file.

+
+
+
Listing 18. Packing using npm
+
+
npm pack
+
+
+
+
Listing 19. Packing using yarn
+
+
yarn pack
+
+
+
+
+

7. Publishing to npm repository (optional)

+
+
    +
  • +

    Add a README.md and LICENSE file. The text inside README.md will be used in you npm package web page as documentation.

    +
  • +
  • +

    run npm adduser if you do not have a npm account to create it, otherwise run npm login and introduce your credentials.

    +
  • +
  • +

    run npm publish inside dist/"library-name" folder.

    +
  • +
  • +

    Check that the library is published: https://npmjs.com/package/library-name

    +
  • +
+
+
+
+

8. Installing our library in other projects

+
+

In this step we are going to install/add the library on other projects.

+
+
+
+

== npm

+
+

In order to add the library in other applications, there are two ways:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command npm install "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run npm install "library-name" to install it from npm repository.

    +
  • +
+
+
+
+

== yarn

+
+

To add the package using yarn:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command yarn add "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run yarn add "library-name" to install it from npm repository.

    +
  • +
+
+
+
+

9. Using the library

+
+

Finally, once the library was installed with either packet manager, you can start using the elements from inside like they would be used in a normal element inside the application. Example app.component.ts:

+
+
+
+
import { Component, OnInit } from '@angular/core';
+import { MyLibraryService } from 'my-library';
+
+@Component({
+  selector: 'app-root',
+  templateUrl: './app.component.html',
+  styleUrls: ['./app.component.scss']
+})
+export class AppComponent implements OnInit {
+
+  toUpper: string;
+
+  constructor(private myLibraryService: MyLibraryService) {}
+  title = 'devon4ng library test';
+  ngOnInit(): void {
+    this.toUpper = this.myLibraryService.firstLetterToUpper('test');
+  }
+}
+
+
+
+

Example app.component.html:

+
+
+
+
<!--The content below is only a placeholder and can be replaced.-->
+<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+  <img width="300" alt="Angular Logo" src="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNTAgMjUwIj4KICAgIDxwYXRoIGZpbGw9IiNERDAwMzEiIGQ9Ik0xMjUgMzBMMzEuOSA2My4ybDE0LjIgMTIzLjFMMTI1IDIzMGw3OC45LTQzLjcgMTQuMi0xMjMuMXoiIC8+CiAgICA8cGF0aCBmaWxsPSIjQzMwMDJGIiBkPSJNMTI1IDMwdjIyLjItLjFWMjMwbDc4LjktNDMuNyAxNC4yLTEyMy4xTDEyNSAzMHoiIC8+CiAgICA8cGF0aCAgZmlsbD0iI0ZGRkZGRiIgZD0iTTEyNSA1Mi4xTDY2LjggMTgyLjZoMjEuN2wxMS43LTI5LjJoNDkuNGwxMS43IDI5LjJIMTgzTDEyNSA1Mi4xem0xNyA4My4zaC0zNGwxNy00MC45IDE3IDQwLjl6IiAvPgogIDwvc3ZnPg== ">
+</div>
+<h2>Here is my library service being used: {{toUpper}}</h2>
+<lib-my-library></lib-my-library>
+
+
+
+

Example app.module.ts:

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+
+import { MyLibraryModule } from 'my-library';
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    AppRoutingModule,
+    MyLibraryModule
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+

The result from using the library:

+
+
+
+result +
+
+
+
+

devon4ng libraries

+
+

In devonfw/devon4ng-library you can find some useful libraries:

+
+
+
    +
  • +

    Authorization module: This devon4ng Angular module adds rights-based authorization to your Angular app.

    +
  • +
  • +

    Cache module: Use this devon4ng Angular module when you want to cache requests to server. You may configure it to store in cache only the requests you need and to set the duration you want.

    +
  • +
+
+ +
+
+

Angular Material Theming

+
+

Angular Material library offers UI components for developers, those components follows Google Material design baselines but characteristics like colors can be modified in order to adapt them to the needs of the client: corporative colors, corporative identity, dark themes, …​

+
+
+
+

Theming basics

+
+

In Angular Material, a theme is created mixing multiple colors. Colors and its light and dark variants conform a palette. In general, a theme consists of the following palettes:

+
+
+
    +
  • +

    primary: Most used across screens and components.

    +
  • +
  • +

    accent: Floating action button and interactive elements.

    +
  • +
  • +

    warn: Error state.

    +
  • +
  • +

    foreground: Text and icons.

    +
  • +
  • +

    background: Element backgrounds.

    +
  • +
+
+
+
+Theme palette +
+
Figure 19. Palettes and variants.
+
+
+

In angular material, a palette is represented as a SCSS map.

+
+
+
+SCSS map +
+
Figure 20. SCSS map and palettes.
+
+
+ + + + + +
+ + +Some components can be forced to use primary, accent or warn palettes using the attribute color, for example: <mat-toolbar color="primary">. +
+
+
+
+

Pre-built themes

+
+

Available pre-built themes:

+
+
+
    +
  • +

    deeppurple-amber.css

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 21. deeppurple-amber theme.
+
+
+
    +
  • +

    indigo-pink.css

    +
  • +
+
+
+
+indigo-pink theme +
+
Figure 22. indigo-pink theme.
+
+
+
    +
  • +

    pink-bluegrey.css

    +
  • +
+
+
+
+` pink-bluegrey theme` +
+
Figure 23. ink-bluegrey theme.
+
+
+
    +
  • +

    purple-green.css

    +
  • +
+
+
+
+purple-green theme +
+
Figure 24. purple-green theme.
+
+
+

The pre-built themes can be added using @import.

+
+
+
+
@import '@angular/material/prebuilt-themes/deeppurple-amber.css';
+
+
+
+
+

Custom themes

+
+

Sometimes pre-built themes do not meet the needs of a project, because color schemas are too specific or do not incorporate branding colors, in those situations custom themes can be built to offer a better solution to the client.

+
+
+

For this topic, we are going to use a basic layout project that can be found in devon4ts-samples repository.

+
+
+
+

Basics

+
+

Before starting writing custom themes, there are some necessary things that have to be mentioned:

+
+
+
    +
  • +

    Add a default theme: The project mentioned before has just one global SCSS style sheet styles.scss that includes indigo-pink.scss which will be the default theme.

    +
  • +
  • +

    Add @import '~@angular/material/theming'; at the beginning of the every style sheet to be able to use angular material pre-built color palettes and functions.

    +
  • +
  • +

    Add @include mat-core(); once per project, so if you are writing multiple themes in multiple files you could import those files from a 'central' one (for example styles.scss). This includes all common styles that are used by multiple components.

    +
  • +
+
+
+
+Theme files structure +
+
Figure 25. Theme files structure.
+
+
+
+

Basic custom theme

+
+

To create a new custom theme, the .scss file containing it has to have imported the angular _theming.scss file (angular/material/theming) file and mat-core included. _theming.scss includes multiple color palettes and some functions that we are going to see below. The file for this basic theme is going to be named styles-custom-dark.scss.

+
+
+

First, declare new variables for primary, accent and warn palettes. Those variables are going to store the result of the function mat-palette.

+
+
+

mat-palette accepts four arguments: base color palette, main, lighter and darker variants (See Palettes and variants.) and returns a new palette including some additional map values: default, lighter and darker ([id_scss_map]). Only the first argument is mandatory.

+
+
+
Listing 20. File styles-custom-dark.scss.
+
+
$custom-dark-theme-primary: mat-palette($mat-pink);
+$custom-dark-theme-accent: mat-palette($mat-blue);
+$custom-dark-theme-warn: mat-palette($mat-red);
+);
+
+
+
+

In this example we are using colors available in _theming.scss: mat-pink, mat-blue, mat-red. If you want to use a custom color you need to define a new map, for instance:

+
+
+
Listing 21. File styles-custom-dark.scss custom pink.
+
+
$my-pink: (
+    50 : #fcf3f3,
+    100 : #f9e0e0,
+    200 : #f5cccc,
+    300 : #f0b8b8,
+    500 : #ea9999,
+    900 : #db6b6b,
+    A100 : #ffffff,
+    A200 : #ffffff,
+    A400 : #ffeaea,
+    A700 : #ffd0d0,
+    contrast: (
+        50 : #000000,
+        100 : #000000,
+        200 : #000000,
+        300 : #000000,
+        900 : #000000,
+        A100 : #000000,
+        A200 : #000000,
+        A400 : #000000,
+        A700 : #000000,
+    )
+);
+
+$custom-dark-theme-primary: mat-palette($my-pink);
+...
+
+
+
+ + + + + +
+ + +Some pages allows to create these palettes easily, for instance: http://mcg.mbitson.com +
+
+
+

Until now, we just have defined primary, accent and warn palettes but what about foreground and background? Angular material has two functions to change both:

+
+
+
    +
  • +

    mat-light-theme: Receives as arguments primary, accent and warn palettes and return a theme whose foreground is basically black (texts, icons, …​), the background is white and the other palettes are the received ones.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 26. Custom light theme.
+
+
+
    +
  • +

    mat-dark-theme: Similar to mat-light-theme but returns a theme whose foreground is basically white and background black.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 27. Custom dark theme.
+
+
+

For this example we are going to use mat-dark-theme and save its result in $custom-dark-theme.

+
+
+
Listing 22. File styles-custom-dark.scss updated with mat-dark-theme.
+
+
...
+
+$custom-dark-theme: mat-dark-theme(
+  $custom-dark-theme-primary,
+  $custom-dark-theme-accent,
+  $custom-dark-theme-warn
+);
+
+
+
+

To apply the saved theme, we have to go to styles.scss and import our styles-custom-dark.scss and include a function called angular-material-theme using the theme variable as argument.

+
+
+
Listing 23. File styles.scss.
+
+
...
+@import 'styles-custom-dark.scss';
+@include angular-material-theme($custom-dark-theme);
+
+
+
+

If we have multiple themes it is necessary to add the include statement inside a css class and use it in src/index.html → app-root component.

+
+
+
Listing 24. File styles.scss updated with custom-dark-theme class.
+
+
...
+@import 'styles-custom-dark.scss';
+
+.custom-dark-theme {
+  @include angular-material-theme($custom-dark-theme);
+}
+
+
+
+
Listing 25. File src/index.html.
+
+
...
+<app-root class="custom-dark-theme"></app-root>
+...
+
+
+
+

This will apply $custom-dark-theme theme for the entire application.

+
+
+
+

Full custom theme

+
+

Sometimes it is needed to custom different elements from background and foreground, in those situations we have to create a new function similar to mat-light-theme and mat-dark-theme. Let’s focus con mat-light-theme:

+
+
+
Listing 26. Source code of mat-light-theme
+
+
@function mat-light-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $mat-light-theme-foreground,
+    background: $mat-light-theme-background,
+  );
+}
+
+
+
+

As we can see, mat-light-theme takes three arguments and returns a map including them as primary, accent and warn color; but there are three more keys in that map: is-dark, foreground and background.

+
+
+
    +
  • +

    is-dark: Boolean true if it is a dark theme, false otherwise.

    +
  • +
  • +

    background: Map that stores the color for multiple background elements.

    +
  • +
  • +

    foreground: Map that stores the color for multiple foreground elements.

    +
  • +
+
+
+

To show which elements can be colored lets create a new theme in a file styles-custom-cap.scss:

+
+
+
Listing 27. File styles-custom-cap.scss: Background and foreground variables.
+
+
@import '~@angular/material/theming';
+
+// custom background and foreground palettes
+$my-cap-theme-background: (
+  status-bar: #0070ad,
+  app-bar: map_get($mat-blue, 900),
+  background: #12abdb,
+  hover: rgba(white, 0.04),
+  card: map_get($mat-red, 800),
+  dialog: map_get($mat-grey, 800),
+  disabled-button: $white-12-opacity,
+  raised-button: map-get($mat-grey, 800),
+  focused-button: $white-6-opacity,
+  selected-button: map_get($mat-grey, 900),
+  selected-disabled-button: map_get($mat-grey, 800),
+  disabled-button-toggle: black,
+  unselected-chip: map_get($mat-grey, 700),
+  disabled-list-option: black,
+);
+
+$my-cap-theme-foreground: (
+  base: yellow,
+  divider: $white-12-opacity,
+  dividers: $white-12-opacity,
+  disabled: rgba(white, 0.3),
+  disabled-button: rgba(white, 0.3),
+  disabled-text: rgba(white, 0.3),
+  hint-text: rgba(white, 0.3),
+  secondary-text: rgba(white, 0.7),
+  icon: white,
+  icons: white,
+  text: white,
+  slider-min: white,
+  slider-off: rgba(white, 0.3),
+  slider-off-active: rgba(white, 0.3),
+);
+
+
+
+

Function which uses the variables defined before to create a new theme:

+
+
+
Listing 28. File styles-custom-cap.scss: Creating a new theme function.
+
+
// instead of creating a theme with mat-light-theme or mat-dark-theme,
+// we will create our own theme-creating function that lets us apply our own foreground and background palettes.
+@function create-my-cap-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $my-cap-theme-foreground,
+    background: $my-cap-theme-background
+  );
+}
+
+
+
+

Calling the new function and storing its value in $custom-cap-theme.

+
+
+
Listing 29. File styles-custom-cap.scss: Storing the new theme.
+
+
// We use create-my-cap-theme instead of mat-light-theme or mat-dark-theme
+$custom-cap-theme-primary: mat-palette($mat-green);
+$custom-cap-theme-accent: mat-palette($mat-blue);
+$custom-cap-theme-warn: mat-palette($mat-red);
+
+$custom-cap-theme: create-my-cap-theme(
+  $custom-cap-theme-primary,
+  $custom-cap-theme-accent,
+  $custom-cap-theme-warn
+);
+
+
+
+

After defining our new theme, we can import it from styles.scss.

+
+
+
Listing 30. File styles.scss updated with custom-cap-theme class.
+
+
...
+@import 'styles-custom-cap.scss';
+.custom-cap-theme {
+  @include angular-material-theme($custom-cap-theme);
+}
+
+
+
+
+

Multiple themes and overlay-based components

+
+

Certain components (e.g. menu, select, dialog, etc.) that are inside of a global overlay container,require an additional step to be affected by the theme’s css class selector.

+
+
+
Listing 31. File app.module.ts
+
+
import {OverlayContainer} from '@angular/cdk/overlay';
+
+@NgModule({
+  // ...
+})
+export class AppModule {
+  constructor(overlayContainer: OverlayContainer) {
+    overlayContainer.getContainerElement().classList.add('custom-cap-theme');
+  }
+}
+
+
+
+ +
+

Angular Progressive Web App

+
+

Progressive web applications (PWA) are web application that offer better user experience than the traditional ones. In general, they solve problems related with reliability and speed:

+
+
+
    +
  • +

    Reliability: PWA are stable. In this context stability means than even with slow connections or even with no network at all, the application still works. To achieve this, some basic resources like styles, fonts, requests, …​ are stored; due to this caching, it is not possible to assure that the content is always up-to-date.

    +
  • +
  • +

    Speed: When an users opens an application, he or she will expect it to load almost immediately (almost 53% of users abandon sites that take longer that 3 seconds, source: https://developers.google.com/web/progressive-web-apps/#fast).

    +
  • +
+
+
+

PWA uses a script called service worker, which runs in background and essentially act as proxy between web app and network, intercepting requests and acting depending on the network conditions.

+
+
+
+

Assumptions

+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
+
+
+
+

Sample Application

+
+
+My Thai Star recommendation +
+
Figure 28. Basic angular PWA.
+
+
+

To explain how to build PWA using angular, a basic application is going to be built. This app will be able to ask for resources and save in the cache in order to work even offline.

+
+
+
+

Step 1: Create a new project

+
+

This step can be completed with one simple command using the Angular CLI: ng new <name>, where <name> is the name for the app. In this case, the app is going to be named basic-ng-pwa. If you are using Nx CLI, you can use the command nx generate @nrwl/angular:app <name> in your Nx workspace. You can follow this guide if you want to get started with Nx workspace.

+
+
+
+

Step 2: Create a service

+
+

Web applications usually uses external resources, making necessary the addition of services which can get those resources. This application gets a dish from My Thai Star’s back-end and shows it. To do so, a new service is going to be created.

+
+
+
    +
  • +

    go to project folder: cd basic-ng-pwa. If using Nx, go to the root folder of the workspace.

    +
  • +
  • +

    run ng generate service data. For Nx CLI, specify the project name with --project flag. So the command becomes ng generate service data --project=basic-ng-pwa

    +
  • +
  • +

    Modify data.service.ts, environment.ts, environment.prod.ts

    +
  • +
+
+
+

To retrieve data with this service, you have to import the module HttpClient and add it to the service’s constructor. Once added, use it to create a function getDishes() that sends HTTP request to My Thai Start’s back-end. The URL of the back-end can be stored as an environment variable MY_THAI_STAR_DISH.

+
+
+

data.service.ts

+
+
+
+
  ...
+  import { HttpClient } from '@angular/common/http';
+  import { MY_THAI_STAR_DISH } from '../environments/environment';
+  ...
+
+  export class DataService {
+    constructor(private http: HttpClient) {}
+
+    /* Get data from Back-end */
+    getDishes() {
+      return this.http.get(MY_THAI_STAR_DISH);
+    }
+    ...
+  }
+
+
+
+

environments.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+

environments.prod.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+
+

Step 3: Use the service

+
+

The component AppComponent implements the interface OnInit and inside its method ngOnInit() the subscription to the services is done. When a dish arrives, it is saved and shown (app.component.html).

+
+
+
+
  ...
+  import { DataService } from './data.service';
+  export class AppComponent implements OnInit {
+  dish: { name: string; description: string } = { name: '', description: ''};
+
+  ...
+  ngOnInit() {
+    this.data
+      .getDishes()
+      .subscribe(
+        (dishToday: { dish: { name: string; description: string } }) => {
+          this.dish = {
+            name: dishToday.dish.name,
+            description: dishToday.dish.description,
+          };
+        },
+      );
+  }
+}
+
+
+
+
+

Step 4: Structures, styles and updates

+
+

This step shows code interesting inside the sample app. The complete content can be found in devon4ts-samples.

+
+
+

index.html

+
+
+

To use the Montserrat font add the following link inside the head tag of the app’s index.html file.

+
+
+
+
  <link href="https://fonts.googleapis.com/css?family=Montserrat" rel="stylesheet">
+
+
+
+

styles.scss

+
+
+
+
  body {
+    ...
+    font-family: 'Montserrat', sans-serif;
+  }
+
+
+
+

app.component.ts

+
+
+

This file is also used to reload the app if there are any changes.

+
+
+
    +
  • +

    SwUpdate: This object comes inside the @angular/pwa package and it is used to detect changes and reload the page if needed.

    +
  • +
+
+
+
+
  ...
+  import { SwUpdate } from '@angular/service-worker';
+
+  export class AppComponent implements OnInit {
+
+  ...
+    constructor(updates: SwUpdate, private data: DataService) {
+      updates.available.subscribe((event) => {
+        updates.activateUpdate().then(() => document.location.reload());
+      });
+    }
+    ...
+  }
+
+
+
+
+

Step 5: Make it Progressive.

+
+

Install Angular PWA package with ng add @angular/pwa --project=<name>. As before substitute name with basic-ng-pwa.

+
+
+

The above command completes the following actions:

+
+
+
    +
  1. +

    Adds the @angular/service-worker package to your project.

    +
  2. +
  3. +

    Enables service worker build support in the CLI.

    +
  4. +
  5. +

    Imports and registers the service worker in the app module.

    +
  6. +
  7. +

    Updates the index.html file:

    +
    +
      +
    • +

      Includes a link to add the manifest.json file.

      +
    • +
    • +

      Adds meta tags for theme-color.

      +
    • +
    • +

      Installs icon files to support the installed Progressive Web App (PWA).

      +
    • +
    • +

      Creates the service worker configuration file called ngsw-config.json, which specifies the caching behaviors and other settings.

      +
    • +
    +
    +
  8. +
+
+
+
+

== manifest.json

+
+

manifest.json is a file that allows to control how the app is displayed in places where native apps are displayed.

+
+
+

Fields

+
+
+

name: Name of the web application.

+
+
+

short_name: Short version of name.

+
+
+

theme_color: Default theme color for an application context.

+
+
+

background_color: Expected background color of the web application.

+
+
+

display: Preferred display mode.

+
+
+

scope: Navigation scope of this web application’s application context.

+
+
+

start_url: URL loaded when the user launches the web application.

+
+
+

icons: Array of icons that serve as representations of the web app.

+
+
+

Additional information can be found here.

+
+
+
+

== ngsw-config.json

+
+

ngsw-config.json specifies which files and data URLs have to be cached and updated by the Angular service worker.

+
+
+

Fields

+
+
+
    +
  • +

    index: File that serves as index page to satisfy navigation requests.

    +
  • +
  • +

    assetGroups: Resources that are part of the app version that update along with the app.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      installMode: How the resources are cached (pre-fetch or lazy).

      +
    • +
    • +

      updateMode: Caching behavior when a new version of the app is found (pre-fetch or lazy).

      +
    • +
    • +

      resources: Resources to cache. There are three groups.

      +
      +
        +
      • +

        files: Lists patterns that match files in the distribution directory.

        +
      • +
      • +

        urls: URL patterns matched at runtime.

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    dataGroups: UsefulIdentifies the group. for API requests.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      urls: URL patterns matched at runtime.

      +
    • +
    • +

      version: Indicates that the resources being cached have been updated in a backwards-incompatible way.

      +
    • +
    • +

      cacheConfig: Policy by which matching requests will be cached

      +
      +
        +
      • +

        maxSize: The maximum number of entries, or responses, in the cache.

        +
      • +
      • +

        maxAge: How long responses are allowed to remain in the cache.

        +
        +
          +
        • +

          d: days. (5d = 5 days).

          +
        • +
        • +

          h: hours

          +
        • +
        • +

          m: minutes

          +
        • +
        • +

          s: seconds. (5m20s = 5 minutes and 20 seconds).

          +
        • +
        • +

          u: milliseconds

          +
        • +
        +
        +
      • +
      • +

        timeout: How long the Angular service worker will wait for the network to respond before using a cached response. Same dataformat as maxAge.

        +
      • +
      • +

        strategy: Caching strategies (performance or freshness).

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    navigationUrls: List of URLs that will be redirected to the index file.

    +
  • +
+
+
+

Additional information can be found here.

+
+
+
+

Step 6: Configure the app

+
+

manifest.json

+
+
+

Default configuration.

+
+
+

 

+
+
+

ngsw-config.json

+
+
+

At assetGroups → resources → urls: In this field the google fonts API is added in order to use Montserrat font even without network.

+
+
+
+
  "urls": [
+          "https://fonts.googleapis.com/**"
+        ]
+
+
+
+

At the root of the json: A data group to cache API calls.

+
+
+
+
  {
+    ...
+    "dataGroups": [{
+      "name": "mythaistar-dishes",
+      "urls": [
+        "https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1"
+      ],
+      "cacheConfig": {
+        "maxSize": 100,
+        "maxAge": "1h",
+        "timeout": "10s",
+        "strategy": "freshness"
+      }
+    }]
+  }
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ng build --prod to build the app using production settings.(nx build <name> --prod in Nx CLI)

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here.

+
+
+

Go to the dist/basic-ng-pwa/ folder running cd dist/basic-ng-pwa. In an Nx workspace, the path will be dist/apps/basic-ng-pwa

+
+
+

http-server -o to serve your built app.

+
+
+
+Http server running +
+
Figure 29. Http server running on localhost:8081.
+
+
+

 

+
+
+

In another console instance run ng serve (or nx serve basic-ng-pwa for Nx) to open the common app (not built).

+
+
+
+.Angular server running +
+
Figure 30. Angular server running on localhost:4200.
+
+
+

 

+
+
+

The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common (right) one does not.

+
+
+
+Application comparison +
+
Figure 31. Application service worker comparison.
+
+
+

 

+
+
+

If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 32. Offline application.
+
+
+

 

+
+
+

Finally, browser extensions like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 33. Lighthouse report.
+
+ +
+
+

APP_INITIALIZER

+ +
+
+

What is the APP_INITIALIZER pattern

+
+

The APP_INITIALIZER pattern allows an application to choose which configuration is going to be used in the start of the application, this is useful because it allows to setup different configurations, for example, for docker or a remote configuration. This provides benefits since this is done on runtime, so there’s no need to recompile the whole application to switch from configuration.

+
+
+
+

What is APP_INITIALIZER

+
+

APP_INITIALIZER allows to provide a service in the initialization of the application in a @NgModule. It also allows to use a factory, allowing to create a singleton in the same service. An example can be found in MyThaiStar /core/config/config.module.ts:

+
+
+
+

==

+
+

The provider expects the return of a Promise, if it is using Observables, a change with the method toPromise() will allow a switch from Observable to Promise +== ==

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

This is going to allow the creation of a ConfigService where, using a singleton, the service is going to load an external config depending on a route. This dependence with a route, allows to setup different configuration for docker etc. This is seen in the ConfigService of MyThaiStar:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  //and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it is mentioned earlier, you can see the use of a factory to create a singleton at the start. After that, loadExternalConfig is going to look for a Boolean inside the corresponding environment file inside the path src/environments/, this Boolean loadExternalConfig is going to easily allow to switch to a external config. If it is true, it generates a promise that overwrites the parameters of the local config, allowing to load the external config. Finally, the last method getValues() is going to allow to return the file config with the values (overwritten or not). The local config file from MyThaiStar can be seen here:

+
+
+
+
export enum BackendType {
+  IN_MEMORY,
+  REST,
+  GRAPHQL,
+}
+
+interface Role {
+  name: string;
+  permission: number;
+}
+
+interface Lang {
+  label: string;
+  value: string;
+}
+
+export interface Config {
+  version: string;
+  backendType: BackendType;
+  restPathRoot: string;
+  restServiceRoot: string;
+  pageSizes: number[];
+  pageSizesDialog: number[];
+  roles: Role[];
+  langs: Lang[];
+}
+
+export const config: Config = {
+  version: 'dev',
+  backendType: BackendType.REST,
+  restPathRoot: 'http://localhost:8081/mythaistar/',
+  restServiceRoot: 'http://localhost:8081/mythaistar/services/rest/',
+  pageSizes: [8, 16, 24],
+  pageSizesDialog: [4, 8, 12],
+  roles: [
+    { name: 'CUSTOMER', permission: 0 },
+    { name: 'WAITER', permission: 1 },
+  ],
+  langs: [
+    { label: 'English', value: 'en' },
+    { label: 'Deutsch', value: 'de' },
+    { label: 'Español', value: 'es' },
+    { label: 'Català', value: 'ca' },
+    { label: 'Français', value: 'fr' },
+    { label: 'Nederlands', value: 'nl' },
+    { label: 'हिन्दी', value: 'hi' },
+    { label: 'Polski', value: 'pl' },
+    { label: 'Русский', value: 'ru' },
+    { label: 'български', value: 'bg' },
+  ],
+};
+
+
+
+

Finally, inside a environment file src/environments/environment.ts the use of the Boolean loadExternalConfig is seen:

+
+
+
+
// The file contents for the current environment will overwrite these during build.
+// The build system defaults to the dev environment which uses `environment.ts`, but if you do
+// `ng build --env=prod` then `environment.prod.ts` will be used instead.
+// The list of which env maps to which file can be found in `.angular-cli.json`.
+
+export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+
+

Creating a APP_INITIALIZER configuration

+
+

This section is going to be used to create a new APP_INITIALIZER basic example. For this, a basic app with angular is going to be generated using ng new "appname" substituting appname for the name of the app opted. +If you are using Nx, the command would be nx generate @nrwl/angular:app "appname" in your Nx workspace. Click here to get started with using Nx.

+
+
+
+

Setting up the config files

+ +
+
+

Docker external configuration (Optional)

+
+

This section is only done if there is a docker configuration in the app you are setting up this type of configuration.

+
+
+

1.- Create in the root folder /docker-external-config.json. This external config is going to be used when the application is loaded with docker (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load with docker:

+
+
+
+
{
+    "version": "docker-version"
+}
+
+
+
+

2.- In the root, in the file /Dockerfile angular is going to copy the docker-external-config.json that was created before into the Nginx html route:

+
+
+
+
....
+COPY docker-external-config.json /usr/share/nginx/html/docker-external-config.json
+....
+
+
+
+
+

External json configuration

+
+

1.- Create a json file in the route /src/external-config.json. This external config is going to be used when the application is loaded with the start script (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load:

+
+
+
+
{
+    "version": "external-config"
+}
+
+
+
+

2.- The file named /angular.json (/workspace.json if using Nx) located at the root is going to be modified to add the file external-config.json that was just created to both "assets" inside Build and Test:

+
+
+
+
	....
+	"build": {
+          ....
+            "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	        ....
+        "test": {
+	  ....
+	   "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	  ....
+
+
+
+
+

Setting up the proxies

+
+

This step is going to setup two proxies. This is going to allow to load the config desired by the context, in case that it is using docker to load the app or in case it loads the app with angular. Loading different files is made possible by the fact that the ConfigService method loadExternalConfig() looks for the path /config.

+
+
+
+

Docker (Optional)

+
+

1.- This step is going to be for docker. Add docker-external-config.json to Nginx configuration (/nginx.conf) that is in the root of the application:

+
+
+
+
....
+  location  ~ ^/config {
+        alias /usr/share/nginx/html/docker-external-config.json;
+  }
+....
+
+
+
+
+

External Configuration

+
+

1.- Now the file /proxy.conf.json, needs to be created/modified this file can be found in the root of the application. In this file you can add the route of the external configuration in target and the name of the file in ^/config::

+
+
+
+
....
+  "/config": {
+    "target": "http://localhost:4200",
+    "secure": false,
+    "pathRewrite": {
+      "^/config": "/external-config.json"
+    }
+  }
+....
+
+
+
+

2.- The file package.json found in the root of the application is gonna use the start script to load the proxy config that was just created :

+
+
+
+
  "scripts": {
+....
+    "start": "ng serve --proxy-config proxy.conf.json -o",
+....
+
+
+
+

If using Nx, you need to run the command manually:

+
+
+

nx run angular-app-initializer:serve:development --proxyConfig=proxy.conf.json --o

+
+
+
+

Adding the loadExternalConfig Boolean to the environments

+
+

In order to load an external config we need to add the loadExternalConfig Boolean to the environments. To do so, inside the folder environments/ the files are going to get modified adding this Boolean to each environment that is going to be used. In this case, only two environments are going to be modified (environment.ts and environment.prod.ts). Down below there is an example of the modification being done in the environment.prod.ts:

+
+
+
+
export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+

In the file in first instance there is the declaration of the types of the variables. After that, there is the definition of those variables. This variable loadExternalConfig is going to be used by the service, allowing to setup a external config just by switching the loadExternalConfig to true.

+
+
+
+

Creating core configuration service

+
+

In order to create the whole configuration module three are going to be created:

+
+
+

1.- Create in the core app/core/config/ a config.ts

+
+
+
+
  export interface Config {
+    version: string;
+  }
+
+  export const config: Config = {
+    version: 'dev'
+  };
+
+
+
+

Taking a look to this file, it creates a interface (Config) that is going to be used by the variable that exports (export const config: Config). This variable config is going to be used by the service that is going to be created.

+
+
+

2.- Create in the core app/core/config/ a config.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  // and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it was explained in previous steps, at first, there is a factory that uses the method loadExternalConfig(), this factory is going to be used in later steps in the module. After that, the loadExternalConfig() method checks if the Boolean in the environment is false. If it is false it will return the promise resolved with the normal config. Else, it is going to load the external config in the path (/config), and overwrite the values from the external config to the config that’s going to be used by the app, this is all returned in a promise.

+
+
+

3.- Create in the core a module for the config app/core/config/ a config.module.ts:

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

As seen earlier, the ConfigService is added to the module. In this addition, the app is initialized(provide) and it uses the factory that was created in the ConfigService loading the config with or without the external values depending on the Boolean in the config.

+
+
+
+

Using the Config Service

+
+

As a first step, in the file /app/app.module.ts the ConfigModule created earlier in the other step is going to be imported:

+
+
+
+
  imports: [
+    ....
+    ConfigModule,
+    ....
+  ]
+
+
+
+

After that, the ConfigService is going to be injected into the app.component.ts

+
+
+
+
....
+import { ConfigService } from './core/config/config.service';
+....
+export class AppComponent {
+....
+  constructor(public configService: ConfigService) { }
+....
+
+
+
+

Finally, for this demonstration app, the component app/app.component.html is going to show the version of the config it is using at that moment.

+
+
+
+
<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+</div>
+<h2>Here is the configuration version that is using angular right now: {{configService.getValues().version}}</h2>
+
+
+
+
+

Final steps

+
+

The script start that was created earlier in the package.json (npm start) is going to be used to start the application. After that, modifying the Boolean loadExternalConfig inside the corresponding environment file inside /app/environments/ should show the different config versions.

+
+
+
+loadExternalConfigFalse +
+
+
+
+loadExternalConfigTrue +
+
+ +
+
+

Component Decomposition

+
+

When implementing a new requirement there are a few design decisions, which need to be considered. +A decomposition in Smart and Dumb Components should be done first. +This includes the definition of state and responsibilities. +Implementing a new dialog will most likely be done by defining a new Smart Component with multiple Dumb Component children.

+
+
+

In the component tree this would translate to the definition of a new sub-tree.

+
+
+
+Component Tree With Highlighted Sub Tree +
+
Figure 34. Component Tree with highlighted sub-tree
+
+
+
+

Defining Components

+
+

The following gives an example for component decomposition. +Shown is a screenshot from a style guide to be implemented. +It is a widget called Listpicker.

+
+
+

The basic function is an input field accepting direct input. +So typing otto puts otto inside the FormControl. +With arrow down key or by clicking the icon displayed in the inputs right edge a dropdown is opened. +Inside possible values can be selected and filtered beforehand. +After pressing arrow down key the focus should move into the filter input field. +Up and down arrow keys can be used to select an element from the list. +Typing into the filter input field filters the list from which the elements can be selected. +The current selected element is highlighted with green background color.

+
+
+
+Component Decomposition Example 1v2 +
+
Figure 35. Component decomposition example before
+
+
+

What should be done, is to define small reusable Dumb Components. +This way the complexity becomes manageable. +In the example every colored box describes a component with the purple box being a Smart Component.

+
+
+
+Component Decomposition Example 2v2 +
+
Figure 36. Component decomposition example after
+
+
+

This leads to the following component tree.

+
+
+
+Component Decomposition Example component tree +
+
Figure 37. Component decomposition example component tree
+
+
+

Note the uppermost component is a Dumb Component. +It is a wrapper for the label and the component to be displayed inside a form. +The Smart Component is Listpicker. +This way the widget can be reused without a form needed.

+
+
+

A widgets is a typical Smart Component to be shared across feature modules. +So the SharedModule is the place for it to be defined.

+
+
+
+

Defining state

+
+

Every UI has state. +There are different kinds of state, for example

+
+
+
    +
  • +

    View State: e.g. is a panel open, a css transition pending, etc.

    +
  • +
  • +

    Application State: e.g. is a payment pending, current URL, user info, etc.

    +
  • +
  • +

    Business Data: e.g. products loaded from back-end

    +
  • +
+
+
+

It is good practice to base the component decomposition on the state handled by a component and to define a simplified state model beforehand. +Starting with the parent - the Smart Component:

+
+
+
    +
  • +

    What overall state does the dialog have: e.g. loading, error, valid data loaded, valid input, invalid input, etc. +Every defined value should correspond to an overall appearance of the whole dialog.

    +
  • +
  • +

    What events can occur to the dialog: e.g. submitting a form, changing a filter, pressing buttons, pressing keys, etc.

    +
  • +
+
+
+

For every Dumb Component:

+
+
+
    +
  • +

    What data does a component display: e.g. a header text, user information to be displayed, a loading flag, etc.
    +This will be a slice of the overall state of the parent Smart Component. +In general a Dumb Component presents a slice of its parent Smart Components state to the user.

    +
  • +
  • +

    What events can occur: keyboard events, mouse events, etc.
    +These events are all handled by its parent Smart Component - every event is passed up the tree to be handled by a Smart Component.

    +
  • +
+
+
+

These information should be reflected inside the modeled state. +The implementation is a TypeScript type - an interface or a class describing the model.

+
+
+

So there should be a type describing all state relevant for a Smart Component. +An instance of that type is send down the component tree at runtime. +Not every Dumb Component will need the whole state. +For instance a single Dumb Component could only need a single string.

+
+
+

The state model for the previous Listpicker example is shown in the following listing.

+
+
+
Listing 32. Listpicker state model
+
+
export class ListpickerState {
+
+  items: {}[]|undefined;
+  columns = ['key', 'value'];
+  keyColumn = 'key';
+  displayValueColumn = 'value';
+  filteredItems: {}[]|undefined;
+  filter = '';
+  placeholder = '';
+  caseSensitive = true;
+  isDisabled = false;
+  isDropdownOpen = false;
+  selectedItem: {}|undefined;
+  displayValue = '';
+
+}
+
+
+
+

Listpicker holds an instance of ListpickerState which is passed down the component tree via @Input() bindings in the Dumb Components. +Events emitted by children - Dumb Components - create a new instance of ListpickerState based on the current instance and the event and its data. +So a state transition is just setting a new instance of ListpickerState. +Angular Bindings propagate the value down the tree after exchanging the state.

+
+
+
Listing 33. Listpicker State transition
+
+
export class ListpickerComponent {
+
+  // initial default values are set
+  state = new ListpickerState();
+
+  /** User changes filter */
+  onFilterChange(filter: string): void {
+    // apply filter ...
+    const filteredList = this.filterService.filter(...);
+
+    // important: A new instance is created, instead of altering the existing one.
+    //            This makes change detection easier and prevents hard to find bugs.
+    this.state = Object.assing({}, this.state, {
+      filteredItems: filteredList,
+      filter: filter
+    });
+  }
+
+}
+
+
+
+
Note:
+

It is not always necessary to define the model as independent type. +So there would be no state property and just properties for every state defined directly in the component class. +When complexity grows and state becomes larger this is usually a good idea. +If the state should be shared between Smart Components a store is to be used.

+
+
+
+

When are Dumb Components needed

+
+

Sometimes it is not necessary to perform a full decomposition. The architecture does not enforce it generally. What you should keep in mind is, that there is always a point when it becomes recommendable.

+
+
+

For example a template with 800 line of code is:

+
+
+
    +
  • +

    not understandable

    +
  • +
  • +

    not maintainable

    +
  • +
  • +

    not testable

    +
  • +
  • +

    not reusable

    +
  • +
+
+
+

So when implementing a template with more than 50 line of code you should think about decomposition.

+
+ +
+
+

Consuming REST services

+
+

A good introduction to working with Angular HttpClient can be found in Angular Docs

+
+
+

This guide will cover, how to embed Angular HttpClient in the application architecture. +For back-end request a special service with the suffix Adapter needs to be defined.

+
+
+
+

Defining Adapters

+
+

It is a good practice to have a Angular service whose single responsibility is to call the back-end and parse the received value to a transfer data model (e.g. Swagger generated TOs). +Those services need to have the suffix Adapter to make them easy to recognize.

+
+
+
+Adapters handle back-end communication +
+
Figure 38. Adapters handle back-end communication
+
+
+

As illustrated in the figure a Use Case service does not use Angular HttpClient directly but uses an adapter. +A basic adapter could look like this:

+
+
+
Listing 34. Example adapter
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+
+import { FlightTo } from './flight-to';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  getFlights(): Observable<FlightTo> {
+    return this.httpClient.get<FlightTo>('/relative/url/to/flights');
+  }
+
+}
+
+
+
+

The adapters should use a well-defined transfer data model. +This could be generated from server endpoints with CobiGen, Swagger, typescript-maven-plugin, etc. +If inside the application there is a business model defined, the adapter has to parse to the transfer model. +This is illustrated in the following listing.

+
+
+
Listing 35. Example adapter mapping from business model to transfer model
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+import { map } from 'rxjs/operators';
+
+import { FlightTo } from './flight-to';
+import { Flight } from '../../../model/flight';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  updateFlight(flight: Flight): Observable<Flight> {
+    const to = this.mapFlight(flight);
+
+    return this.httpClient.post<FlightTo>('/relative/url/to/flights', to).pipe(
+      map(to => this.mapFlightTo(to))
+    );
+  }
+
+  private mapFlight(flight: Flight): FlightTo {
+    // mapping logic
+  }
+
+  private mapFlightTo(flightTo: FlightTo): Flight {
+    // mapping logic
+  }
+
+}
+
+
+
+
+

Token management

+
+

In most cases the access to back-end API is secured using well known mechanisms as CSRF, JWT or both. In these cases the front-end application must manage the tokens that are generated when the user authenticates. More concretely it must store them to include them in every request automatically. Obviously, when user logs out these tokens must be removed from localStorage, memory, etc.

+
+
+
+

Store security token

+
+

In order to make this guide simple we are going to store the token in memory. Therefore, if we consider that we already have a login mechanism implemented we would like to store the token using a auth.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { Router } from '@angular/router';
+
+@Injectable({
+  providedIn: 'root',
+})
+export class AuthService {
+  private loggedIn = false;
+  private token: string;
+
+  constructor(public router: Router) {}
+
+  public isLogged(): boolean {
+    return this.loggedIn || false;
+  }
+
+  public setLogged(login: boolean): void {
+    this.loggedIn = login;
+  }
+
+  public getToken(): string {
+    return this.token;
+  }
+
+  public setToken(token: string): void {
+    this.token = token;
+  }
+}
+
+
+
+

Using the previous service we will be able to store the token obtained in the login request using the method setToken(token). Please consider that, if you want a more sophisticated approach using localStorage API, you will need to modify this service accordingly.

+
+
+
+

Include token in every request

+
+

Now that the token is available in the application it is necessary to include it in every request to a protected API endpoint. Instead of modifying all the HTTP requests in our application, Angular provides a class to intercept every request (and every response if we need to) called HttpInterceptor. Let’s create a service called http-interceptor.service.ts to implement the intercept method of this class:

+
+
+
+
import {
+  HttpEvent,
+  HttpHandler,
+  HttpInterceptor,
+  HttpRequest,
+} from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { environment } from '../../../environments/environment';
+import { AuthService } from './auth.service';
+
+@Injectable()
+export class HttpRequestInterceptorService implements HttpInterceptor {
+
+  constructor(private auth: AuthService) {}
+
+  intercept(
+    req: HttpRequest<any>,
+    next: HttpHandler,
+  ): Observable<HttpEvent<any>> {
+    // Get the auth header from the service.
+    const authHeader: string = this.auth.getToken();
+    if (authHeader) {
+      let authReq: HttpRequest<any>;
+
+      // CSRF
+      if (environment.security == 'csrf') {
+        authReq = req.clone({
+          withCredentials: true,
+          setHeaders: { 'x-csrf-token': authHeader },
+        });
+      }
+
+      // JWT
+      if (environment.security == 'jwt') {
+        authReq = req.clone({
+          setHeaders: { Authorization: authHeader },
+        });
+      }
+
+      return next.handle(authReq);
+    } else {
+      return next.handle(req);
+    }
+  }
+}
+
+
+
+

As you may notice, this service is making use of an environment field environment.security to determine if we are using JWT or CSRF in order to inject the token accordingly. In your application you can combine both if necessary.

+
+
+

Configure environment.ts file to use the CSRF/JWT.

+
+
+
+
security: 'csrf'
+
+
+
+

The authHeader used is obtained using the injected service AuthService already presented above.

+
+
+

In order to activate the interceptor we need to provide it in our app.module.ts or core.module.ts depending on the application structure. Let’s assume that we are using the latter and the interceptor file is inside a security folder:

+
+
+
+
...
+import { HttpRequestInterceptorService } from './security/http-request-interceptor.service';
+...
+
+@NgModule({
+  imports: [...],
+  exports: [...],
+  declarations: [],
+  providers: [
+    ...
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: HttpRequestInterceptorService,
+      multi: true,
+    },
+  ],
+})
+export class CoreModule {}
+
+
+
+

Angular automatically will now modify every request and include in the header the token if it is convenient.

+
+ +
+
+

Error Handler in angular

+
+

Angular allows us to set up a custom error handler that can be used to control the different errors and them in a correct way. Using a global error handler will avoid mistakes and provide a use friendly interface allowing us to indicate the user what problem is happening.

+
+
+
+

What is ErrorHandler

+
+

ErrorHandler is the class that Angular uses by default to control the errors. This means that, even if the application doesn’t have a ErrorHandler it is going to use the one setup by default in Angular. This can be tested by trying to find a page not existing in any app, instantly Angular will print the error in the console.

+
+
+
+

Creating your custom ErrorHandler step by step

+
+

In order to create a custom ErrorHandler three steps are going to be needed:

+
+
+
+

Creating the custom ErrorHandler class

+
+

In this first step the custom ErrorHandler class is going to be created inside the folder /app/core/errors/errors-handler.ts:

+
+
+
+
import { ErrorHandler, Injectable, Injector } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      //  To do: Use injector to get the necessary services to redirect or
+      // show a message to the user
+      const classname  = error.constructor.name;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          console.error('HttpError:' + error.message);
+          if (!navigator.onLine) {
+            console.error('There's no internet connection');
+            // To do: control here in internet what you wanna do if user has no internet
+          } else {
+            console.error('Server Error:' + error.message);
+            // To do: control here if the server gave an error
+          }
+          break;
+        default:
+          console.error('Error:' + error.message);
+          // To do: control here if the client/other things gave an error
+      }
+    }
+}
+
+
+
+

This class can be used to control the different type of errors. If wanted, the classname variable could be used to add more switch cases. This would allow control of more specific situations.

+
+
+
+

Creating a ErrorInterceptor

+
+

Inside the same folder created in the last step we are going to create the ErrorInterceptor(errors-handler-interceptor.ts). This ErrorInterceptor is going to retry any failed calls to the server to make sure it is not being found before showing the error:

+
+
+
+
import { HttpInterceptor, HttpRequest, HttpHandler, HttpEvent } from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { retry } from 'rxjs/operators';
+
+@Injectable()
+export class ErrorsHandlerInterceptor implements HttpInterceptor {
+
+    constructor() {}
+    intercept(req: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>> {
+        return next.handle(req).pipe(
+            retryWhen((errors: Observable<any>) => errors.pipe(
+                delay(500),
+                take(5),
+                concatMap((error: any, retryIndex: number) => {
+                    if (++retryIndex == 5) {
+                        throw error;
+                    }
+                    return of(error);
+                })
+            ))
+        );
+    }
+}
+
+
+
+

This custom made interceptor is implementing the HttpInterceptor and inside the method intercept using the method pipe,retryWhen,delay,take and concatMap from RxJs it is going to do the next things if there is errors:

+
+
+
    +
  1. +

    With delay(500) do a delay to allow some time in between requests

    +
  2. +
  3. +

    With take(5) retry five times.

    +
  4. +
  5. +

    With concatMap if the index that take() gives is not 5 it returns the error, else, it throws the error.

    +
  6. +
+
+
+
+

Creating a Error Module

+
+

Finally, creating a module(errors-handler.module.ts) is necessary to include the interceptor and the custom error handler. In this case, the module is going to be created in the same folder as the last two:

+
+
+
+
import { NgModule, ErrorHandler } from '@angular/core';
+import { CommonModule } from '@angular/common';
+import { ErrorsHandler } from './errors-handler';
+import { HTTP_INTERCEPTORS } from '@angular/common/http';
+import { ErrorsHandlerInterceptor } from './errors-handler-interceptor';
+
+@NgModule({
+  declarations: [], // Declare here component if you want to use routing to error component
+  imports: [
+    CommonModule
+  ],
+  providers: [
+    {
+      provide: ErrorHandler,
+      useClass: ErrorsHandler,
+    },
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: ErrorsHandlerInterceptor,
+      multi: true,
+    }
+  ]
+})
+export class ErrorsHandlerModule { }
+
+
+
+

This module simply is providing the services that are implemented by our custom classes and then telling angular to use our custom made classes instead of the default ones. After doing this, the module has to be included in the app module app.module.ts in order to be used.

+
+
+
+
....
+  imports: [
+    ErrorsHandlerModule,
+    ....
+
+
+
+
+

Handling Errors

+
+

As a final step, handling these errors is necessary. There are different ways that can be used to control the errors, here are a few:

+
+
+
    +
  • +

    Creating a custom page and using with Router to redirect to a page showing an error.

    +
  • +
  • +

    Creating a service in the server side or Backend to create a log with the error and calling it with HttpClient.

    +
  • +
  • +

    Showing a custom made SnackBar with the error message.

    +
  • +
+
+
+
+

== Using SnackBarService and NgZone

+
+

If the SnackBar is used directly, some errors can occur, this is due to SnackBar being out of the Angular zone. In order to use this service properly, NgZone is necessary. The method run() from NgZone will allow the service to be inside the Angular Zone. An example on how to use it:

+
+
+
+
import { ErrorHandler, Injectable, Injector, NgZone } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+import { MatSnackBar } from '@angular/material';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector, private zone: NgZone) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      // Use injector to get the necessary services to redirect or
+      const snackBar: MatSnackBar = this.injector.get(MatSnackBar);
+      const classname  = error.constructor.name;
+      let message: string;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          message = !(navigator.onLine) ? 'There is no internet connection' : error.message;
+          break;
+        default:
+          message = error.message;
+      }
+      this.zone.run(
+        () => snackBar.open(message, 'danger', { duration : 4000})
+      );
+    }
+}
+
+
+
+

Using Injector the MatSnackBar is obtained, then the correct message is obtained inside the switch. Finally, using NgZone and run(), we open the SnackBar passing the message, and the parameters wanted.

+
+
+

You can find a working example of this guide in devon4ts-samples.

+
+ +
+
+

File Structure

+ +
+
+

Top-level

+
+

The top-level file structure is defined by Angular CLI. You might put this "top-level file structure" into a sub-directory to facilitate your build, but this is not relevant for this guide. So the applications file structure relevant to this guide is the folder /src/app inside the part managed by Angular CLI.

+
+
+
Listing 36. Top-level file structure shows feature modules
+
+
    /src
+    └── /app
+        ├── /account-management
+        ├── /billing
+        ├── /booking
+        ├── /core
+        ├── /shared
+        ├── /status
+        |
+        ├── app.module.ts
+        ├── app.component.spec.ts
+        ├── app.component.ts
+        └── app.routing-module.ts
+
+
+
+

Besides the definition of app module the app folder has feature modules on top-level. +The special modules shared and core are present as well.

+
+
+
+

Feature Modules

+
+

A feature module contains the modules definition and two folders representing both layers.

+
+
+
Listing 37. Feature module file structure has both layers
+
+
    /src
+    └── /app
+        └── /account-management
+            ├── /components
+            ├── /services
+            |
+            ├── account-management.module.ts
+            ├── account-management.component.spec.ts
+            ├── account-management.component.ts
+            └── account-management.routing-module.ts
+
+
+
+

Additionally an entry component is possible. This would be the case in lazy loading scenarios. +So account-management.component.ts would be only present if account-management is lazy loaded. +Otherwise, the module’s routes would be defined Component-less +(see vsavkin blog post).

+
+
+
+

Components Layer

+
+

The component layer reflects the distinction between Smart Components and Dumb Components.

+
+
+
Listing 38. Components layer file structure shows Smart Components on top-level
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                ├── /account-overview
+                ├── /confirm-modal
+                ├── /create-account
+                ├── /forgot-password
+                └── /shared
+
+
+
+

Every folder inside the /components folder represents a smart component. The only exception is /shared. +/shared contains Dumb Components shared across Smart Components inside the components layer.

+
+
+
Listing 39. Smart components contain Dumb components
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                └── /account-overview
+                    ├── /user-info-panel
+                    |   ├── /address-tab
+                    |   ├── /last-activities-tab
+                    |   |
+                    |   ├── user-info-panel.component.html
+                    |   ├── user-info-panel.component.scss
+                    |   ├── user-info-panel.component.spec.ts
+                    |   └── user-info-panel.component.ts
+                    |
+                    ├── /user-header
+                    ├── /user-toolbar
+                    |
+                    ├── account-overview.component.html
+                    ├── account-overview.component.scss
+                    ├── account-overview.component.spec.ts
+                    └── account-overview.component.ts
+
+
+
+

Inside the folder of a Smart Component the component is defined. +Besides that are folders containing the Dumb Components the Smart Component consists of. +This can be recursive - a Dumb Component can consist of other Dumb Components. +This is reflected by the file structure as well. This way the structure of a view becomes very readable. +As mentioned before, if a Dumb Component is used by multiple Smart Components inside the components layer +it is put inside the /shared folder inside the components layer.

+
+
+

With this way of thinking the shared module makes a lot of sense. If a Dumb Component is used by multiple Smart Components +from different feature modules, the Dumb Component is placed into the shared module.

+
+
+
Listing 40. The shared module contains Dumb Components shared across Smart Components from different feature modules
+
+
    /src
+    └── /app
+        └── /shared
+            └── /user-panel
+                |
+                ├── user-panel.component.html
+                ├── user-panel.component.scss
+                ├── user-panel.component.spec.ts
+                └── user-panel.component.ts
+
+
+
+

The layer folder /components is not necessary inside the shared module. +The shared module only contains components!

+
+ +
+
+

Internationalization

+
+

Nowadays, a common scenario in front-end applications is to have the ability to translate labels and locate numbers, dates, currency and so on when the user clicks over a language selector or similar. devon4ng and specifically Angular has a default mechanism in order to fill the gap of such features, and besides there are some wide used libraries that make even easier to translate applications.

+
+ +
+
+

devon4ng i18n approach

+
+

The official approach could be a bit complicated, therefore the recommended one is to use the recommended library Transloco from https://github.com/ngneat/transloco/.

+
+
+
+

Install and configure Transloco

+
+

In order to include this library in your devon4ng Angular >= 7.2 project you will need to execute in a terminal:

+
+
+
+
$ ng add @ngneat/transloco
+
+
+
+

As part of the installation process you’ll be presented with questions; Once you answer them, everything you need will automatically be created for you.

+
+
+
    +
  • +

    First, Transloco creates boilerplate files for the requested translations.

    +
  • +
  • +

    Next, it will create a new file, transloco-root.module.ts which exposes an Angular’s module with a default configuration, and inject it into the AppModule.

    +
  • +
+
+
+
+
import { HttpClient } from '@angular/common/http';
+import {
+  TRANSLOCO_LOADER,
+  Translation,
+  TranslocoLoader,
+  TRANSLOCO_CONFIG,
+  translocoConfig,
+  TranslocoModule
+} from '@ngneat/transloco';
+import { Injectable, NgModule } from '@angular/core';
+import { environment } from '../environments/environment';
+
+@Injectable({ providedIn: 'root' })
+export class TranslocoHttpLoader implements TranslocoLoader {
+  constructor(private http: HttpClient) {}
+
+  getTranslation(lang: string) {
+    return this.http.get<Translation>(`/assets/i18n/${lang}.json`);
+  }
+}
+
+@NgModule({
+  exports: [ TranslocoModule ],
+  providers: [
+    {
+      provide: TRANSLOCO_CONFIG,
+      useValue: translocoConfig({
+        availableLangs: ['en', 'es'],
+        defaultLang: 'en',
+        // Remove this option if your application doesn't support changing language in runtime.
+        reRenderOnLangChange: true,
+        prodMode: environment.production,
+      })
+    },
+    { provide: TRANSLOCO_LOADER, useClass: TranslocoHttpLoader }
+  ]
+})
+export class TranslocoRootModule {}
+
+
+
+ + + + + +
+ + +As you might have noticed it also set an HttpLoader into the module’s providers. The HttpLoader is a class that implements the TranslocoLoader interface. It’s responsible for instructing Transloco how to load the translation files. It uses Angular HTTP client to fetch the files, based on the given path. +
+
+
+
+

Usage

+
+

In order to translate any label in any HTML template you will need to use the transloco pipe available:

+
+
+
+
{{ 'HELLO' | transloco }}
+
+
+
+

An optional parameter from the component TypeScript class could be included as follows:

+
+
+
+
{{ 'HELLO' | transloco: { value: dynamic } }}
+
+
+
+

It is possible to use with inputs:

+
+
+
+
<span [attr.alt]="'hello' | transloco">Attribute</span>
+<span [title]="'hello' | transloco">Property</span>
+
+
+
+

In order to change the language used you will need to create a button or selector that calls the this.translocoService.use(language: string) method from TranslocoService. For example:

+
+
+
+
export class AppComponent {
+  constructor(private translocoService: TranslocoService) {}
+
+  changeLanguage(lang) {
+      this.translocoService.setActiveLang(lang);
+  }
+}
+
+
+
+

The translations will be included in the en.json, es.json, de.json, etc. files inside the /assets/i18n folder. For example en.json would be (using the previous parameter):

+
+
+
+
{
+    "HELLO": "hello"
+}
+
+
+
+

Or with an optional parameter:

+
+
+
+
{
+    "HELLO": "hello {{value}}"
+}
+
+
+
+

Transloco understands nested JSON objects. This means that you can have a translation that looks like this:

+
+
+
+
{
+    "HOME": {
+        "HELLO": "hello {{value}}"
+    }
+}
+
+
+
+

In order to access access the value, use the dot notation, in this case HOME.HELLO.

+
+
+
+

Using the service, pipe or directive

+ +
+
+

== Structural Directive

+
+

Using a structural directive is the recommended approach. It’s DRY and efficient, as it creates one subscription per template:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('title') }}</p>
+
+  <comp [title]="t('title')"></comp>
+</ng-container>
+
+
+
+

Note that the t function is memoized. It means that given the same key it will return the result directly from the cache.

+
+
+

We can pass a params object as the second parameter:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('name', { name: 'Transloco' }) }}</p>
+</ng-container>
+
+
+
+

We can instruct the directive to use a different language in our template:

+
+
+
+
<ng-container *transloco="let t; lang: 'es'">
+  <p>{{ t('title') }}</p>
+</ng-container>
+
+
+
+
+

== Pipe

+
+

The use of pipes can be possible too:

+
+
+

template:

+
+
+
+
<div>{{ 'HELLO' | transloco:param }}</div>
+
+
+
+

component:

+
+
+
+
param = {value: 'world'};
+
+
+
+
+

== Attribute Directive

+
+

The last option available with transloco is the attribute directive:

+
+
+
+
<div transloco="HELLO" [translocoParams]="{ value: 'world' }"></div>
+
+
+
+
+

== Service

+
+

If you need to access translations in any component or service you can do it injecting the TranslocoService into them:

+
+
+
+
// Sync translation
+translocoService.translate('HELLO', {value: 'world'});
+
+// Async translation
+translocoService.selectTranslate('HELLO', { value: 'world' }).subscribe(res => {
+    console.log(res);
+    //=> 'hello world'
+});
+
+
+
+ + + + + +
+ + +You can find a complete example at https://github.com/devonfw/devon4ng-application-template. +
+
+
+

Please, visit https://github.com/ngneat/transloco/ for more info.

+
+ +
+
+

Routing

+
+

A basic introduction to the Angular Router can be found in Angular Docs.

+
+
+

This guide will show common tasks and best practices.

+
+
+
+

Defining Routes

+
+

For each feature module and the app module all routes should be defined in a separate module with the suffix RoutingModule. +This way the routing modules are the only place where routes are defined. +This pattern achieves a clear separation of concerns. +The following figure illustrates this.

+
+
+
+Routing module declaration +
+
Figure 39. Routing module declaration
+
+
+

It is important to define routes inside app routing module with .forRoot() and in feature routing modules with .forChild().

+
+
+
+

Example 1 - No Lazy Loading

+
+

In this example two modules need to be configured with routes - AppModule and FlightModule.

+
+
+

The following routes will be configured

+
+
+
    +
  • +

    / will redirect to /search

    +
  • +
  • +

    /search displays FlightSearchComponent (FlightModule)

    +
  • +
  • +

    /search/print/:flightId/:date displays FlightPrintComponent (FlightModule)

    +
  • +
  • +

    /search/details/:flightId/:date displays FlightDetailsComponent (FlightModule)

    +
  • +
  • +

    All other routes will display ErrorPage404 (AppModule)

    +
  • +
+
+
+
Listing 41. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '', redirectTo: 'search', pathMatch: 'full' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 42. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: 'search', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+ + + + + +
+ + +The import order inside AppModule is important. +AppRoutingModule needs to be imported after FlightModule. +
+
+
+
+

Example 2 - Lazy Loading

+
+

Lazy Loading is a good practice when the application has multiple feature areas and a user might not visit every dialog. +Or at least he might not need every dialog up front.

+
+
+

The following example will configure the same routes as example 1 but will lazy load FlightModule.

+
+
+
Listing 43. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '/search', loadChildren: 'app/flight-search/flight-search.module#FlightSearchModule' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 44. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+
+

Triggering Route Changes

+
+

With Angular you have two ways of triggering route changes.

+
+
+
    +
  1. +

    Declarative with bindings in component HTML templates

    +
  2. +
  3. +

    Programmatic with Angular Router service inside component classes

    +
  4. +
+
+
+

On the one hand, architecture-wise it is a much cleaner solution to trigger route changes in Smart Components. +This way you have every UI event that should trigger a navigation handled in one place - in a Smart Component. +It becomes very easy to look inside the code for every navigation, that can occur. +Refactoring is also much easier, as there are no navigation events "hidden" in the HTML templates

+
+
+

On the other hand, in terms of accessibility and SEO it is a better solution to rely on bindings in the view - e.g. by using Angular router-link directive. +This way screen readers and the Google crawler can move through the page easily.

+
+
+ + + + + +
+ + +If you do not have to support accessibility (screen readers, etc.) and to care about SEO (Google rank, etc.), +then you should aim for triggering navigation only in Smart Components. +
+
+
+
+Triggering navigation +
+
Figure 40. Triggering navigation
+
+
+
+

Guards

+
+

Guards are Angular services implemented on routes which determines whether a user can navigate to/from the route. There are examples below which will explain things better. We have the following types of Guards:

+
+
+
    +
  • +

    CanActivate: It is used to determine whether a user can visit a route. The most common scenario for this guard is to check if the user is authenticated. For example, if we want only logged in users to be able to go to a particular route, we will implement the CanActivate guard on this route.

    +
  • +
  • +

    CanActivateChild: Same as above, only implemented on child routes.

    +
  • +
  • +

    CanDeactivate: It is used to determine if a user can navigate away from a route. Most common example is when a user tries to go to a different page after filling up a form and does not save/submit the changes, we can use this guard to confirm whether the user really wants to leave the page without saving/submitting.

    +
  • +
  • +

    Resolve: For resolving dynamic data.

    +
  • +
  • +

    CanLoad: It is used to determine whether an Angular module can be loaded lazily. Example below will be helpful to understand it.

    +
  • +
+
+
+

Let’s have a look at some examples.

+
+
+
+

Example 1 - CanActivate and CanActivateChild guards

+ +
+
+

== CanActivate guard

+
+

As mentioned earlier, a guard is an Angular service and services are simply TypeScript classes. So we begin by creating a class. This class has to implement the CanActivate interface (imported from angular/router), and therefore, must have a canActivate function. The logic of this function determines whether the requested route can be navigated to or not. It returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. If it is true, the route is loaded, else not.

+
+
+
Listing 45. CanActivate example
+
+
...
+import {CanActivate} from "@angular/router";
+
+@Injectable()
+class ExampleAuthGuard implements CanActivate {
+  constructor(private authService: AuthService) {}
+
+  canActivate(route: ActivatedRouterSnapshot, state: RouterStateSnapshot) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

In the above example, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. We use it to return true or false from the canActivate function. +The canActivate function accepts two parameters (provided by Angular). The first parameter of type ActivatedRouterSnapshot is the snapshot of the route the user is trying to navigate to (where the guard is implemented); we can extract the route parameters from this instance. The second parameter of type RouterStateSnapshot is a snapshot of the router state the user is trying to navigate to; we can fetch the URL from it’s url property.

+
+
+ + + + + +
+ + +We can also redirect the user to another page (maybe a login page) if the authService returns false. To do that, inject Router and use it’s navigate function to redirect to the appropriate page. +
+
+
+

Since it is a service, it needs to be provided in our module:

+
+
+
Listing 46. provide the guard in a module
+
+
@NgModule({
+  ...
+  providers: [
+    ...
+    ExampleAuthGuard
+  ]
+})
+
+
+
+

Now this guard is ready to use on our routes. We implement it where we define our array of routes in the application:

+
+
+
Listing 47. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivate: [ExampleAuthGuard] }
+];
+
+
+
+

As you can see, the canActivate property accepts an array of guards. So we can implement more than one guard on a route.

+
+
+
+

== CanActivateChild guard

+
+

To use the guard on nested (children) routes, we add it to the canActivateChild property like so:

+
+
+
Listing 48. Implementing the guard on child routes
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivateChild: [ExampleAuthGuard], children: [
+	{path: 'sub-page1', component: SubPageComponent},
+    {path: 'sub-page2', component: SubPageComponent}
+  ] }
+];
+
+
+
+
+

Example 2 - CanLoad guard

+
+

Similar to CanActivate, to use this guard we implement the CanLoad interface and overwrite it’s canLoad function. Again, this function returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. The fundamental difference between CanActivate and CanLoad is that CanLoad is used to determine whether an entire module can be lazily loaded or not. If the guard returns false for a module protected by CanLoad, the entire module is not loaded.

+
+
+
Listing 49. CanLoad example
+
+
...
+import {CanLoad, Route} from "@angular/router";
+
+@Injectable()
+class ExampleCanLoadGuard implements CanLoad {
+  constructor(private authService: AuthService) {}
+
+  canLoad(route: Route) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

Again, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. The canLoad function accepts a parameter of type Route which we can use to fetch the path a user is trying to navigate to (using the path property of Route).

+
+
+

This guard needs to be provided in our module like any other service.

+
+
+

To implement the guard, we use the canLoad property:

+
+
+
Listing 50. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: 'home', component: HomeComponent },
+  { path: 'admin', loadChildren: 'app/admin/admin.module#AdminModule', canLoad: [ExampleCanLoadGuard] }
+];
+
+
+ +
+
+

Testing

+
+

This guide will cover the basics of testing logic inside your code with unit test cases. +The guide assumes that you are familiar with Angular CLI (see the guide)

+
+
+

For testing your Angular application with unit test cases there are two main strategies:

+
+
+
    +
  1. +

    Isolated unit test cases
    +Isolated unit tests examine an instance of a class all by itself without any dependence on Angular or any injected values. +The amount of code and effort needed to create such tests in minimal.

    +
  2. +
  3. +

    Angular Testing Utilities
    +Let you test components including their interaction with Angular. +The amount of code and effort needed to create such tests is a little higher.

    +
  4. +
+
+
+
+

Testing Concept

+
+

The following figure shows you an overview of the application architecture divided in testing areas.

+
+
+
+Testing Areas +
+
Figure 41. Testing Areas
+
+
+

There are three areas, which need to be covered by different testing strategies.

+
+
+
    +
  1. +

    Components:
    +Smart Components need to be tested because they contain view logic. +Also the interaction with 3rd party components needs to be tested. +When a 3rd party component changes with an upgrade a test will be failing and warn you, that there is something wrong with the new version. +Most of the time Dumb Components do not need to be tested because they mainly display data and do not contain any logic. +Smart Components are always tested with Angular Testing Utilities. +For example selectors, which select data from the store and transform it further, need to be tested.

    +
  2. +
  3. +

    Stores:
    +A store contains methods representing state transitions. +If these methods contain logic, they need to be tested. +Stores are always tested using Isolated unit tests.

    +
  4. +
  5. +

    Services:
    +Services contain Business Logic, which needs to be tested. +UseCase Services represent a whole business use case. +For instance this could be initializing a store with all the data that is needed for a dialog - loading, transforming, storing. +Often Angular Testing Utilities are the optimal solution for testing UseCase Services, because they allow for an easy stubbing of the back-end. +All other services should be tested with Isolated unit tests as they are much easier to write and maintain.

    +
  6. +
+
+
+
+

Testing Smart Components

+
+

Testing Smart Components should assure the following.

+
+
+
    +
  1. +

    Bindings are correct.

    +
  2. +
  3. +

    Selectors which load data from the store are correct.

    +
  4. +
  5. +

    Asynchronous behavior is correct (loading state, error state, "normal" state).

    +
  6. +
  7. +

    Oftentimes through testing one realizes, that important edge cases are forgotten.

    +
  8. +
  9. +

    Do these test become very complex, it is often an indicator for poor code quality in the component. +Then the implementation is to be adjusted / refactored.

    +
  10. +
  11. +

    When testing values received from the native DOM, you will test also that 3rd party libraries did not change with a version upgrade. +A failing test will show you what part of a 3rd party library has changed. +This is much better than the users doing this for you. +For example a binding might fail because the property name was changed with a newer version of a 3rd party library.

    +
  12. +
+
+
+

In the function beforeEach() the TestBed imported from Angular Testing Utilities needs to be initialized. +The goal should be to define a minimal test-module with TestBed. +The following code gives you an example.

+
+
+
Listing 51. Example test setup for Smart Components
+
+
describe('PrintFlightComponent', () => {
+
+  let fixture: ComponentFixture<PrintCPrintFlightComponentomponent>;
+  let store: FlightStore;
+  let printServiceSpy: jasmine.SpyObj<FlightPrintService>;
+
+  beforeEach(() => {
+    const urlParam = '1337';
+    const activatedRouteStub = { params: of({ id: urlParam }) };
+    printServiceSpy = jasmine.createSpyObj('FlightPrintService', ['initializePrintDialog']);
+    TestBed.configureTestingModule({
+      imports: [
+        TranslateModule.forRoot(),
+        RouterTestingModule
+      ],
+      declarations: [
+        PrintFlightComponent,
+        PrintContentComponent,
+        GeneralInformationPrintPanelComponent,
+        PassengersPrintPanelComponent
+      ],
+      providers: [
+        FlightStore,
+        {provide: FlightPrintService, useValue: printServiceSpy},
+        {provide: ActivatedRoute, useValue: activatedRouteStub}
+      ]
+    });
+    fixture = TestBed.createComponent(PrintFlightComponent);
+    store = fixture.debugElement.injector.get(FlightStore);
+    fixture.detectChanges();
+  });
+
+  // ... test cases
+})
+
+
+
+

It is important:

+
+
+
    +
  • +

    Use RouterTestingModule instead of RouterModule

    +
  • +
  • +

    Use TranslateModule.forRoot() without translations +This way you can test language-neutral without translation marks.

    +
  • +
  • +

    Do not add a whole module from your application - in declarations add the tested Smart Component with all its Dumb Components

    +
  • +
  • +

    The store should never be stubbed. +If you need a complex test setup, just use the regular methods defined on the store.

    +
  • +
  • +

    Stub all services used by the Smart Component. +These are mostly UseCase services. +They should not be tested by these tests. +Only the correct call to their functions should be assured. +The logic inside the UseCase services is tested with separate tests.

    +
  • +
  • +

    detectChanges() performance an Angular Change Detection cycle (Angular refreshes all the bindings present in the view)

    +
  • +
  • +

    tick() performance a virtual macro task, tick(1000) is equal to the virtual passing of 1s.

    +
  • +
+
+
+

The following test cases show the testing strategy in action.

+
+
+
Listing 52. Example
+
+
it('calls initializePrintDialog for url parameter 1337', fakeAsync(() => {
+  expect(printServiceSpy.initializePrintDialog).toHaveBeenCalledWith(1337);
+}));
+
+it('creates correct loading subtitle', fakeAsync(() => {
+  store.setPrintStateLoading(123);
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT STATE.IS_LOADING');
+}));
+
+it('creates correct subtitle for loaded flight', fakeAsync(() => {
+  store.setPrintStateLoadedSuccess({
+    id: 123,
+    description: 'Description',
+    iata: 'FRA',
+    name: 'Frankfurt',
+    // ...
+  });
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT "FRA (Frankfurt)" (ID: 123)');
+}));
+
+
+
+

The examples show the basic testing method

+
+
+
    +
  • +

    Set the store to a well-defined state

    +
  • +
  • +

    check if the component displays the correct values

    +
  • +
  • +

    …​ via checking values inside the native DOM.

    +
  • +
+
+
+
+

Testing state transitions performed by stores

+
+

Stores are always tested with Isolated unit tests.

+
+
+

Actions triggered by dispatchAction() calls are asynchronously performed to alter the state. +A good solution to test such a state transition is to use the done callback from Jasmine.

+
+
+
Listing 53. Example for testing a store
+
+
let sut: FlightStore;
+
+beforeEach(() => {
+  sut = new FlightStore();
+});
+
+it('setPrintStateLoading sets print state to loading', (done: Function) => {
+  sut.setPrintStateLoading(4711);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.print.isLoading).toBe(true);
+    expect(result.print.loadingId).toBe(4711);
+    done();
+  });
+});
+
+it('toggleRowChecked adds flight with given id to selectedValues Property', (done: Function) => {
+  const flight: FlightTO = {
+    id: 12
+    // dummy data
+  };
+  sut.setRegisterabgleichListe([flight]);
+  sut.toggleRowChecked(12);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.selectedValues).toContain(flight);
+    done();
+  });
+});
+
+
+
+
+

Testing services

+
+

When testing services both strategies - Isolated unit tests and Angular Testing Utilities - are valid options.

+
+
+

The goal of such tests are

+
+
+
    +
  • +

    assuring the behavior for valid data.

    +
  • +
  • +

    assuring the behavior for invalid data.

    +
  • +
  • +

    documenting functionality

    +
  • +
  • +

    save performing refactoring

    +
  • +
  • +

    thinking about edge case behavior while testing

    +
  • +
+
+
+

For simple services Isolated unit tests can be written. +Writing these tests takes lesser effort and they can be written very fast.

+
+
+

The following listing gives an example of such tests.

+
+
+
Listing 54. Testing a simple services with Isolated unit tests
+
+
let sut: IsyDatePipe;
+
+beforeEach(() => {
+  sut = new IsyDatePipe();
+});
+
+it('transform should return empty string if input value is empty', () => {
+  expect(sut.transform('')).toBe('');
+});
+
+it('transform should return empty string if input value is null', () => {
+  expect(sut.transform(undefined)).toBe('');
+});
+
+// ...more tests
+
+
+
+

For testing Use Case services the Angular Testing Utilities should be used. +The following listing gives an example.

+
+
+
Listing 55. Test setup for testing use case services with Angular Testing Utilities
+
+
let sut: FlightPrintService;
+let store: FlightStore;
+let httpController: HttpTestingController;
+let flightCalculationServiceStub: jasmine.SpyObj<FlightCalculationService>;
+const flight: FlightTo = {
+  // ... valid dummy data
+};
+
+beforeEach(() => {
+  flightCalculationServiceStub = jasmine.createSpyObj('FlightCalculationService', ['getFlightType']);
+  flightCalculationServiceStub.getFlightType.and.callFake((catalog: string, type: string, key: string) => of(`${key}_long`));
+  TestBed.configureTestingModule({
+    imports: [
+      HttpClientTestingModule,
+      RouterTestingModule,
+    ],
+    providers: [
+      FlightPrintService,
+      FlightStore,
+      FlightAdapter,
+      {provide: FlightCalculationService, useValue: flightCalculationServiceStub}
+    ]
+  });
+
+  sut = TestBed.get(FlightPrintService);
+  store = TestBed.get(FlightStore);
+  httpController = TestBed.get(HttpTestingController);
+});
+
+
+
+

When using TestBed, it is important

+
+
+
    +
  • +

    to import HttpClientTestingModule for stubbing the back-end

    +
  • +
  • +

    to import RouterTestingModule for stubbing the Angular router

    +
  • +
  • +

    not to stub stores, adapters and business services

    +
  • +
  • +

    to stub services from libraries like FlightCalculationService - the correct implementation of libraries should not be tested by these tests.

    +
  • +
+
+
+

Testing back-end communication looks like this:

+
+
+
Listing 56. Testing back-end communication with Angular HttpTestingController
+
+
it('loads flight if not present in store', fakeAsync(() => {
+  sut.initializePrintDialog(1337);
+  const processRequest = httpController.expectOne('/path/to/flight');
+  processRequest.flush(flight);
+
+  httpController.verify();
+}));
+
+it('does not load flight if present in store', fakeAsync(() => {
+  const flight = {...flight, id: 4711};
+  store.setRegisterabgleich(flight);
+
+  sut.initializePrintDialog(4711);
+  httpController.expectNone('/path/to/flight');
+
+  httpController.verify();
+}));
+
+
+
+

The first test assures a correct XHR request is performed if initializePrintDialog() is called and no data is in the store. +The second test assures no XHR request IST performed if the needed data is already in the store.

+
+
+

The next steps are checks for the correct implementation of logic.

+
+
+
Listing 57. Example testing a Use Case service
+
+
it('creates flight destination for valid key in svz', fakeAsync(() => {
+  const flightTo: FlightTo = {
+    ...flight,
+    id: 4712,
+    profile: '77'
+  };
+  store.setFlight(flightTo);
+  let result: FlightPrintContent|undefined;
+
+  sut.initializePrintDialog(4712);
+  store.select(s => s.print.content).subscribe(content => result = content);
+  tick();
+
+  expect(result!.destination).toBe('77_long (ID: 77)');
+}));
+
+
+ +
+
+

Update Angular CLI

+ +
+
+

Angular CLI common issues

+
+

There are constant updates for the official Angular framework dependencies. These dependencies are directly related with the Angular CLI package. Since this package comes installed by default inside the devonfw distribution folder for Windows OS and the distribution is updated every few months it needs to be updated in order to avoid known issues.

+
+
+
+

Angular CLI update guide

+
+

For Linux users is as easy as updating the global package:

+
+
+
+
$ npm unistall -g @angular/cli
+$ npm install -g @angular/cli
+
+
+
+

For Windows users the process is only a bit harder. Open the devonfw bundled console and do as follows:

+
+
+
+
$ cd [devonfw_dist_folder]
+$ cd software/nodejs
+$ npm uninstall @angular/cli --no-save
+$ npm install @angular/cli --no-save
+
+
+
+

After following these steps you should have the latest Angular CLI version installed in your system. In order to check it run in the distribution console:

+
+
+ + + + + +
+ + +At the time of this writing, the Angular CLI is at 1.7.4 version. +
+
+
+
+
λ ng version
+
+     _                      _                 ____ _     ___
+    / \   _ __   __ _ _   _| | __ _ _ __     / ___| |   |_ _|
+   / △ \ | '_ \ / _` | | | | |/ _` | '__|   | |   | |    | |
+  / ___ \| | | | (_| | |_| | | (_| | |      | |___| |___ | |
+ /_/   \_\_| |_|\__, |\__,_|_|\__,_|_|       \____|_____|___|
+                |___/
+
+
+Angular CLI: 7.2.3
+Node: 10.13.0
+OS: win32 x64
+Angular:
+...
+
+
+ +
+
+

Upgrade devon4ng Angular and Ionic/Angular applications

+
+

Angular CLI provides a powerful tool to upgrade Angular based applications to the current stable release of the core framework.

+
+
+

This tool is ng update. It will not only upgrade dependencies and their related ones but also will perform some fixes in your code if available thanks to the provided schematics. It will check even if the update is not possible as there is another library or libraries that are not compatible with the versions of the upgraded dependencies. In this case it will keep your application untouched.

+
+
+ + + + + +
+ + +The repository must be in a clean state before executing a ng update. So, remember to commit your changes first. +
+
+
+
+

Basic usage

+
+

In order to perform a basic upgrade we will execute:

+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
+

Upgrade to new Angular version

+
+

The process will be the same, but first we need to make sure that our devon4ng application is in the lates version of Angular 8, so the ng update command can perform the upgrade not only in the dependencies but also making code changes to reflect the new features and fixes.

+
+
+
    +
  • +

    First, upgrade to latest Angular 9 version:

    +
  • +
+
+
+
+
$ ng update @angular/cli@9 @angular/core@9
+
+
+
+

Optionally the flag -C can be added to previous command to make a commit automatically. This is also valid for the next steps.

+
+
+
    +
  • +

    Then, upgrade Angular:

    +
  • +
+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
    +
  • +

    In case you use Angular Material:

    +
  • +
+
+
+
+
$ ng update @angular/material
+
+
+
+
    +
  • +

    If the application depends on third party libraries, the new tool ngcc can be run to make them compatible with the new Ivy compiler. In this case it is recommended to include a postinstall script in the package.json:

    +
  • +
+
+
+
+
{
+  "scripts": {
+    "postinstall": "ngcc --properties es2015 browser module main --first-only --create-ivy-entry-points"
+  }
+}
+
+
+ +
+

Important use cases:

+
+
+
    +
  • +

    To update to the next beta or pre-release version, use the --next=true option.

    +
  • +
  • +

    To update from one major version to another, use the format ng update @angular/cli@^<major_version> @angular/core@^<major_version>.

    +
  • +
  • +

    In case your Angular application uses @angular/material include it in the first command:

    +
    +
    +
    $ ng update @angular/cli @angular/core @angular/material
    +
    +
    +
  • +
+
+
+
+

Ionic/Angular applications

+
+

Just following the same procedure we can upgrade Angular applications, but we must take care of important specific Ionic dependencies:

+
+
+
+
$ ng update @angular/cli @angular/core @ionic/angular @ionic/angular-toolkit [@ionic/...]
+
+
+
+
+

Other dependencies

+
+

Every application will make use of different dependencies. Angular CLI ng upgrade will also take care of these ones. For example, if you need to upgrade @capacitor you will perform:

+
+
+
+
$ ng update @capacitor/cli @capacitor/core [@capacitor/...]
+
+
+
+

Another example could be that you need to upgrade @ngx-translate packages. As always in this case you will execute:

+
+
+
+
$ ng update @ngx-translate/core @ngx-translate/http-loader
+
+
+
+
+

Angular Update Guide online tool

+
+

It is recommended to use the Angular Update Guide tool at https://update.angular.io/ that will provide the necessary steps to upgrade any Angular application depending on multiple criteria.

+
+ +
+
+

Working with Angular CLI

+
+

Angular CLI provides a facade for building, testing, linting, debugging and generating code. +Under the hood Angular CLI uses specific tools to achieve these tasks. +The user does no need to maintain them and can rely on Angular to keep them up to date and maybe switch to other tools which come up in the future.

+
+
+

The Angular CLI provides a wiki with common tasks you encounter when working on applications with the Angular CLI. +The Angular CLI Wiki can be found here.

+
+
+

In this guide we will go through the most important tasks. +To go into more details, please visit the Angular CLI wiki.

+
+
+
+

Installing Angular CLI

+
+

Angular CLI should be added as global and local dependency. +The following commands add Angular CLI as global Dependency.

+
+
+

yarn command

+
+
+
+
yarn global add @angular/cli
+
+
+
+

npm command

+
+
+
+
npm install -g @angular/cli
+
+
+
+

You can check a successful installtion with ng --version. +This should print out the version installed.

+
+
+
+Printing Angular CLI Version +
+
Figure 42. Printing Angular CLI Version
+
+
+
+

Running a live development server

+
+

The Angular CLI can be used to start a live development server. +First your application will be compiled and then the server will be started. +If you change the code of a file, the server will reload the displayed page. +Run your application with the following command:

+
+
+
+
ng serve -o
+
+
+
+
+

Running Unit Tests

+
+

All unit tests can be executed with the command:

+
+
+
+
ng test
+
+
+
+

To make a single run and create a code coverage file use the following command:

+
+
+
+
ng test -sr -cc
+
+
+
+ + + + + +
+ + +You can configure the output format for code coverage files to match your requirements in the file karma.conf.js which can be found on toplevel of your project folder. +For instance, this can be useful for exporting the results to a SonarQube. +
+
+
+
+

Linting the code quality

+
+

You can lint your files with the command

+
+
+
+
ng lint --type-check
+
+
+
+ + + + + +
+ + +You can adjust the linting rules in the file tslint.json which can be found on toplevel of your project folder. +
+
+
+
+

Generating Code

+ +
+
+

Creating a new Angular CLI project

+
+

For creating a new Angular CLI project the command ng new is used.

+
+
+

The following command creates a new application named my-app.

+
+
+
+
ng create my-app
+
+
+
+
+

Creating a new feature module

+
+

A new feature module can be created via ng generate module` command.

+
+
+

The following command generates a new feature module named todo.

+
+
+
+
ng generate module todo
+
+
+
+
+Generate a module with Angular CLI +
+
Figure 43. Generate a module with Angular CLI
+
+
+ + + + + +
+ + +The created feature module needs to be added to the AppModule by hand. +Other option would be to define a lazy route in AppRoutingModule to make this a lazy loaded module. +
+
+
+
+

Creating a new component

+
+

To create components the command ng generate component can be used.

+
+
+

The following command will generate the component todo-details inside the components layer of todo module. +It will generate a class, a html file, a css file and a test file. +Also, it will register this component as declaration inside the nearest module - this ist TodoModule.

+
+
+
+
ng generate component todo/components/todo-details
+
+
+
+
+Generate a component with Angular CLI +
+
Figure 44. Generate a component with Angular CLI
+
+
+ + + + + +
+ + +If you want to export the component, you have to add the component to exports array of the module. +This would be the case if you generate a component inside shared module. +
+
+
+
+

Configuring an Angular CLI project

+
+

Inside an Angular CLI project the file .angular-cli.json can be used to configure the Angular CLI.

+
+
+

The following options are very important to understand.

+
+
+
    +
  • +

    The property defaults` can be used to change the default style extension. +The following settings will make the Angular CLI generate .less files, when a new component is generated.

    +
  • +
+
+
+
+
"defaults": {
+  "styleExt": "less",
+  "component": {}
+}
+
+
+
+
    +
  • +

    The property apps contains all applications maintained with Angular CLI. +Most of the time you will have only one.

    +
    +
      +
    • +

      assets configures all the static files, that the application needs - this can be images, fonts, json files, etc. +When you add them to assets the Angular CLI will put these files to the build target and serve them while debugging. +The following will put all files in /i18n to the output folder /i18n

      +
    • +
    +
    +
  • +
+
+
+
+
"assets": [
+  { "glob": "**/*.json", "input": "./i18n", "output": "./i18n" }
+]
+
+
+
+
    +
  • +

    styles property contains all style files that will be globally available. +The Angular CLI will create a styles bundle that goes directly into index.html with it. +The following will make all styles in styles.less globally available.

    +
  • +
+
+
+
+
"styles": [
+  "styles.less"
+]
+
+
+
+
    +
  • +

    environmentSource and environments are used to configure configuration with the Angular CLI. +Inside the code always the file specified in environmentSource will be referenced. +You can define different environments - eg. production, staging, etc. - which you list in enviroments. +At compile time the Angular CLI will override all values in environmentSource with the values from the matching environment target. +The following code will build the application for the environment staging.

    +
  • +
+
+
+
+
ng build --environment=staging
+
+
+
+
+
+
+

Ionic

+
+ +
+

Ionic 5 Getting started

+
+

Ionic is a front-end focused framework which offers different tools for developing hybrid mobile applications. The web technologies used for this purpose are CSS, Sass, HTML5 and Typescript.

+
+
+
+

Why Ionic?

+
+

Ionic is used for developing hybrid applications, which means not having to rely on a specific IDE such as Android Studio or Xcode. Furthermore, development of native apps require learning different languages (Java/Kotlin for Android and Objective-C/Swift for Apple), with Ionic, a developer does not have to code the same functionality for multiple platforms, just use the adequate libraries and components.

+
+
+
+

Basic environment set up

+ +
+
+

Install Ionic CLI

+
+

Although the devonfw distribution comes with and already installed Ionic CLI, here are the steps to install it. The installation of Ionic is easy, just one command has to be written:

+
+
+

$ npm install -g @ionic/cli

+
+
+
+

Update Ionic CLI

+
+

If there was a previous installation of the Ionic CLI, it will need to be uninstalled due to a change in package name.

+
+
+
+
$ npm uninstall -g ionic
+$ npm install -g @ionic/cli
+
+
+
+

##Basic project set up +The set up of an ionic application is pretty immediate and can be done in one line:

+
+
+

ionic start <name> <template> --type=angular

+
+
+
    +
  • +

    ionic start: Command to create an app.

    +
  • +
  • +

    <name>: Name of the application.

    +
  • +
  • +

    <template>: Model of the application.

    +
  • +
  • +

    --type=angular: With this flag, the app produced will be based on angular.

    +
  • +
+
+
+

To create an empty project, the following command can be used:

+
+
+

ionic start MyApp blank --type=angular

+
+
+
+Ionic blank project +
+
+
+

The image above shows the directory structure generated.

+
+
+

There are more templates available that can be seen with the command +ionic start --list

+
+
+
+List of ionic templates +
+
+
+

The templates surrounded by red line are based on angular and comes with Ionic v5, while the others belong to earlier versions (before v4).

+
+
+ + + + + +
+ + +More info at https://ionicframework.com/docs. Remember to select Angular documentation, since Ionic supports React, Vue and Vanilla JS. +
+
+ +
+
+

Ionic to android

+
+

This page is written to help developers to go from the source code of an ionic application to an android one, with this in mind, topics such as: environment, commands, modifications,…​ are covered.

+
+
+
+

Assumptions

+
+

This document assumes that the reader has already:

+
+
+
    +
  • +

    Source code of an Ionic application and wants to build it on an android device,

    +
  • +
  • +

    A working installation of NodeJS

    +
  • +
  • +

    An Ionic CLI installed and up-to-date.

    +
  • +
  • +

    Android Studio and Android SDK.

    +
  • +
+
+
+
+

From Ionic to Android project

+
+

When a native application is being designed, sometimes, functionalities that uses camera, geolocation, push notification, …​ are requested. To resolve these requests, Capacitor can be used.

+
+
+

In general terms, Capacitor wraps apps made with Ionic (HTML, SCSS, Typescript) into WebViews that can be displayed in native applications (Android, IOS) and allows the developer to access native functionalities like the ones said before.

+
+
+

Installing capacitor is as easy as installing any node module, just a few commands have to be run in a console:

+
+
+
    +
  • +

    cd name-of-ionic-4-app

    +
  • +
  • +

    npm install --save @capacitor/core @capacitor/cli

    +
  • +
+
+
+

Then, it is necessary to initialize capacitor with some information: app id, name of the app and the directory where your app is stored. To fill this information, run:

+
+
+
    +
  • +

    npx cap init

    +
  • +
+
+
+
+

Modifications

+
+

Throughout the development process, usually back-end and front-end are on a local computer, so it’s a common practice to have different configuration files for each environment (commonly production and development). Ionic uses an angular.json file to store those configurations and some rules to be applied.

+
+
+

If a back-end is hosted on http://localhost:8081, and that direction is used in every environment, the application built for android will not work because computer and device do not have the same localhost. Fortunately, different configurations can be defined.

+
+
+

Android Studio uses 10.0.0.2 as alias for 127.0.0.1 (computer’s localhost) so adding http://10.0.0.2:8081 in a new environment file and modifying angular.json accordingly, will make possible connect front-end and back-end.

+
+
+
+Android environment and angular.json +
+
+
+
+
    "build": {
+    ...
+        "configurations": {
+            ...
+            "android": {
+                "fileReplacements": [
+                    {
+                        "replace": "src/environments/environment.ts",
+                        "with": "src/environments/environment.android.ts"
+                    }
+                ]
+            },
+        }
+    }
+
+
+
+
+

Build

+
+

Once configured, it is necessary to build the Ionic app using this new configuration:

+
+
+
    +
  • +

    ionic build --configuration=android

    +
  • +
+
+
+

The next commands copy the build application on a folder named android and open android studio.

+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+
+

From Android project to emulated device

+
+

Once Android Studio is opened, follow these steps:

+
+
+
    +
  1. +

    Click on "Build" → Make project.

    +
  2. +
  3. +

    Click on "Build" → Make Module 'app' (default name).

    +
  4. +
+
+
+

Click on make project +click on make app

+
+
+
    +
  1. +

    Click on" Build" → Build Bundle(s) / APK(s) → Build APK(s).

    +
  2. +
  3. +

    Click on run and choose a device.

    +
  4. +
+
+
+

click on build APK +click on running device

+
+
+

If there are no devices available, a new one can be created:

+
+
+
    +
  1. +

    Click on "Create new device"

    +
  2. +
  3. +

    Select hardware and click "Next". For example: Phone → Nexus 5X.

    +
  4. +
+
+
+

Create new device +Select hardware

+
+
+
    +
  1. +

    Download a system image.

    +
    +
      +
    1. +

      Click on download.

      +
    2. +
    3. +

      Wait until the installation finished and then click "Finish".

      +
    4. +
    5. +

      Click "Next".

      +
    6. +
    +
    +
  2. +
  3. +

    Verify configuration (default configuration should be enough) and click "Next".

    +
  4. +
+
+
+

Download system image +Check configuration

+
+
+
    +
  1. +

    Check that the new device is created correctly.

    +
  2. +
+
+
+
+New created device +
+
+
+
+

From Android project to real device

+
+

To test on a real android device, an easy approach to communicate a smartphone (front-end) and computer (back-end) is to configure a WiFi hotspot and connect the computer to it. A guide about this process can be found here.

+
+
+

Once connected, run ipconfig on a console if you are using windows or ifconfig on a Linux machine to get the IP address of your machine’s Wireless LAN adapter WiFi.

+
+
+
+Result of `ipconfig` command on Windows 10 +
+
+
+

This obtained IP must be used instead of "localhost" or "10.0.2.2" at environment.android.ts.

+
+
+
+Android environment file server URL +
+
+
+

After this configuration, follow the build steps in "From Ionic to Android project" and the first three steps in "From Android project to emulated device".

+
+
+
+

Send APK to Android through USB

+
+

To send the built application to a device, you can connect computer and mobile through USB, but first, it is necessary to unlock developer options.

+
+
+
    +
  1. +

    Open "Settings" and go to "System".

    +
  2. +
  3. +

    Click on "About".

    +
  4. +
  5. +

    Click "Build number" seven times to unlock developer options.

    +
  6. +
+
+
+
+Steps to enable developer options: 1, 2, 3 +
+
+
+
    +
  1. +

    Go to "System" again an then to "Developer options"

    +
  2. +
  3. +

    Check that the options are "On".

    +
  4. +
  5. +

    Check that "USB debugging" is activated.

    +
  6. +
+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+

After this, do the step four in "From Android project to emulated device" and choose the connected smartphone.

+
+
+
+

Send APK to Android through email

+
+

When you build an APK, a dialog gives two options: locate or analyze. If the first one is chosen, Windows file explorer will be opened showing an APK that can be send using email. Download the APK on your phone and click it to install.

+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+
+

Result

+
+

If everything goes correctly, the Ionic application will be ready to be tested.

+
+
+
+Application running on a real device +
+
+ +
+
+

Ionic Progressive Web App

+
+

This guide is a continuation of the guide Angular PWAs, therefore, valid concepts explained there are still valid in this page but focused on Ionic.

+
+
+
+

Assumptions

+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
  • +

    Ionic 5 CLI

    +
  • +
  • +

    Capacitor

    +
  • +
+
+
+

Also, it is a good idea to read the document about PWA using Angular.

+
+
+
+

Sample Application

+
+
+Ionic 5 PWA Base +
+
Figure 45. Basic ionic PWA.
+
+
+

To explain how to build progressive web apps (PWA) using Ionic, a basic application is going to be built. This app will be able to take photos even without network using PWA elements.

+
+
+
+

Step 1: Create a new project

+
+

This step can be completed with one simple command: ionic start <name> <template>, where <name> is the name and <template> a model for the app. In this case, the app is going to be named basic-ion-pwa.

+
+
+

If you are using Nx, there is a pre-requisite to this step. And that is, you have to add the @nxtend/ionic-angular plugin to your Nx workspace. The command for that is npm install --save-dev @nxtend/ionic-angular. Once you have the plugin installed, you can generate an ionic app in your Nx workspace with the command nx generate @nxtend/ionic-angular:app basic-ion-pwa. (You can refer this guide if you want to get started with Nx).

+
+
+
+

Step 2: Structures and styles

+
+

The styles (scss) and structures (html) do not have anything specially relevant, just colors and ionic web components. The code can be found in devon4ts-samples.

+
+
+
+

Step 3: Add functionality

+
+

After this step, the app will allow users to take photos and display them in the main screen. +First we have to import three important elements:

+
+
+
    +
  • +

    DomSanitizer: Sanitizes values to be safe to use.

    +
  • +
  • +

    SafeResourceUrl: Interface for values that are safe to use as URL.

    +
  • +
  • +

    Plugins: Capacitor constant value used to access to the device’s camera and toast dialogs.

    +
  • +
+
+
+
+
  import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
+  import { Plugins, CameraResultType } from '@capacitor/core';
+
+
+
+

The process of taking a picture is enclosed in a takePicture() method. takePicture() calls the Camera’s getPhoto() function which returns an URL or an exception. If a photo is taken then the image displayed in the main page will be changed for the new picture, else, if the app is closed without changing it, a toast message will be displayed.

+
+
+
+
  export class HomePage {
+    image: SafeResourceUrl;
+    ...
+
+    async takePicture() {
+      try {
+        const image = await Plugins.Camera.getPhoto({
+          quality: 90,
+          allowEditing: true,
+          resultType: CameraResultType.Uri,
+        });
+
+        // Change last picture shown
+        this.image = this.sanitizer.bypassSecurityTrustResourceUrl(image.webPath);
+      } catch (e) {
+        this.show('Closing camera');
+      }
+    }
+
+    async show(message: string) {
+      await Plugins.Toast.show({
+        text: message,
+      });
+    }
+  }
+
+
+
+
+

Step 4: PWA Elements

+
+

When Ionic apps are not running natively, some resources like Camera do not work by default but can be enabled using PWA Elements. To use Capacitor’s PWA elements run npm install @ionic/pwa-elements and modify src/main.ts as shown below.

+
+
+
+
...
+
+// Import for PWA elements
+import { defineCustomElements } from '@ionic/pwa-elements/loader';
+
+if (environment.production) {
+  enableProdMode();
+}
+
+platformBrowserDynamic().bootstrapModule(AppModule)
+  .catch(err => console.log(err));
+
+// Call the element loader after the platform has been bootstrapped
+defineCustomElements(window);
+
+
+
+
+

Step 5: Make it Progressive.

+
+

Turning an Ionic 5 app into a PWA is pretty easy. The same module used to turn Angular apps into PWAs has to be added. To do so, run: ng add @angular/pwa. This command also creates an icons folder inside src/assets and contains angular icons for multiple resolutions. (Note: In an Nx workspace, you have to add it like a normal package using npm install @angular/pwa, and you have to manually add the icons). If you want to use other images, be sure that they have the same resolution, the names can be different but the file manifest.json has to be changed accordingly.

+
+
+
+

Step 6: Configure the app

+
+

manifest.json

+
+
+

Default configuration.

+
+
+

ngsw-config.json

+
+
+

At assetGroupsresources add a URLs field and a pattern to match PWA Elements scripts and other resources (images, styles, …​):

+
+
+
+
  "urls": ["https://unpkg.com/@ionic/pwa-elements@1.0.2/dist/**"]
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ionic build --configuration production to build the app using production settings. (nx build basic-ion-pwa --configuration production in your Nx workspace root).

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here. A good alternative is also npm install serve. It can be checked here.

+
+
+

Go to the www folder running cd www.

+
+
+

http-server -o or serve to serve your built app.

+
+
+ + + + + +
+ + +In order not to install anything not necessary npx can be used directly to serve the app. i.e run npx serve [folder] will automatically download and run this HTTP server without installing it in the project dependencies. +
+
+
+
+Http server running +
+
Figure 46. Http server running on localhost:8081.
+
+
+

 
+In another console instance run ionic serve (nx serve basic-ion-pwa if using Nx CLI) to open the common app (not built).

+
+
+
+Ionic serve on Visual Studio Code console +
+
Figure 47. Ionic server running on localhost:8100.
+
+
+

 
+The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common one does not.

+
+
+
+Application comparison +
+
Figure 48. Application service worker comparison.
+
+
+

 
+If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 49. Offline application.
+
+
+

 
+Finally, plugins like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 50. Lighthouse report.
+
+
+
+
+
+

Layouts

+
+ +
+

Angular Material Layout

+
+

The purpose of this guide is to get a basic understanding of creating layouts using Angular Material in a devon4ng application. We will create an application with a header containing some menu links and a sidenav with some navigation links.

+
+
+
+Finished application +
+
Figure 51. This is what the finished application will look like
+
+
+
+

Create a new angular application

+
+

We start with opening the devonfw IDE(right-click anywhere in your workspace and click "Open devonfw CMD shell here") and running the following command to start a project named devon4ng-mat-layout

+
+
+
    +
  • +

    ng new devon4ng-mat-layout --routing --style=scss. If you are using Nx, the command would be nx generate @nrwl/angular:app devon4ng-mat-layout --routing --style=scss in your Nx workspace. Click here to get started with using Nx.

    +
  • +
+
+
+

We are providing the routing flag so that a routing module is generated, and we are also setting the style sheet format to SCSS with --style=scss.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+
    +
  • +

    ng serve. (If you are using Nx, you have to specify the project name along with the --project flag, so the command becomes ng serve --project=devon4ng-mat-layout)

    +
  • +
+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+Blank application +
+
Figure 52. Blank application
+
+
+
+

Adding Angular Material library to the project

+
+

Next we will add Angular Material to our application. In the integrated terminal, press Ctrl + C to terminate the running application and run the following command:

+
+
+
    +
  • +

    npm install --save @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

You can also use Yarn to install the dependencies if you prefer that:

+
+
+
    +
  • +

    yarn add @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

Once the dependencies are installed, we need to import the BrowserAnimationsModule in our AppModule for animations support.

+
+
+
Listing 58. Importing BrowserAnimationsModule in AppModule
+
+
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
+
+@NgModule({
+  ...
+  imports: [BrowserAnimationsModule],
+  ...
+})
+export class AppModule { }
+
+
+
+

Angular Material provides a host of components for designing our application. All the components are well structured into individual NgModules. For each component from the Angular Material library that we want to use, we have to import the respective NgModule.

+
+
+
Listing 59. We will be using the following components in our application:
+
+
import { MatIconModule, MatButtonModule, MatMenuModule, MatListModule, MatToolbarModule, MatSidenavModule } from '@angular/material';
+
+@NgModule({
+  ...
+  imports: [
+	...
+    MatIconModule,
+    MatButtonModule,
+    MatMenuModule,
+    MatListModule,
+    MatToolbarModule,
+    MatSidenavModule,
+	...
+	],
+  ...
+})
+export class AppModule { }
+
+
+
+

A better approach is to import and then export all the required components in a shared module. But for the sake of simplicity, we are importing all the required components in the AppModule itself.

+
+
+
+

==

+
+
+
  You can find a working copy of this application https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-basic-layout[here]. The sample application is part of a Nx workspace, which means it is one of the many apps in a monorepo and capable of importing reusable code from a shared library. This guide describes the implementaion by assuming a stand-alone single-repo application, but the pages and layout described in this sample app are similar to the ones used in another sample app in the monorepo (https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-theming[angular-material-theming]), which is why we have exported the required components from a shared library and reused them in both the apps. As a result, the code in our monorepo will be slightly different. It would still help you in following this guide.
+== ==
+
+
+
+

Next, we include a theme in our application. Angular Material comes with four pre-defined themes: indigo-pink, deeppurple-amber, pink-bluegrey and purple-green. It is also possible to create our own custom theme, but that is beyond the scope of this guide. Including a theme is required to apply all of the core and theme styles to your application. +We will include the indigo-pink theme in our application by importing the indigo-pink.css file in our src/styles.scss:

+
+
+
Listing 60. In src/styles.scss:
+
+
@import "~@angular/material/prebuilt-themes/indigo-pink.css";
+
+
+
+

To use Material Design Icons along with the mat-icon component, we will load the Material Icons library in our src/index.html file

+
+
+
Listing 61. In src/index.html:
+
+
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
+
+
+
+
+

Development

+
+

Now that we have all the Angular Material related dependencies set up in our project, we can start coding. Let’s begin by adding a suitable margin and font to the body element of our single page application. We will add it in the src/styles.scss file to apply it globally:

+
+
+
Listing 62. In src/styles.scss:
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

At this point, if we run our application, this is how it will look like:

+
+
+
+Angular Material added to the application +
+
Figure 53. Application with Angular Material set up
+
+
+

We will clear the app.component.html file and setup a header with a menu button and some navigational links. We will use mat-toolbar, mat-button, mat-menu, mat-icon and mat-icon-button for this:

+
+
+
Listing 63. app.component.html:
+
+
<mat-toolbar color="primary">
+  <button mat-icon-button aria-label="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+  <button mat-button [matMenuTriggerFor]="submenu">Menu 1</button>
+  <button mat-button>Menu 2</button>
+  <button mat-button>Menu 3</button>
+
+  <mat-menu #submenu="matMenu">
+    <button mat-menu-item>Sub-menu 1</button>
+    <button mat-menu-item [matMenuTriggerFor]="submenu2">Sub-menu 2</button>
+  </mat-menu>
+
+  <mat-menu #submenu2="matMenu">
+    <button mat-menu-item>Menu Item 1</button>
+    <button mat-menu-item>Menu Item 2</button>
+    <button mat-menu-item>Menu Item 3</button>
+  </mat-menu>
+
+</mat-toolbar>
+
+
+
+

The color attribute on the mat-toolbar element will give it the primary (indigo) color as defined by our theme. The color attribute works with most Angular Material components; the possible values are 'primary', 'accent' and 'warn'. +The mat-toolbar is a suitable component to represent a header. It serves as a placeholder for elements we want in our header. +Inside the mat-toolbar, we start with a button having mat-icon-button attribute, which itself contains a mat-icon element having the value menu. This will serve as a menu button which we can use to toggle the sidenav. +We follow it with some sample buttons having the mat-button attribute. Notice the first button has a property matMenuTriggerFor bound to a local reference submenu. As the property name suggests, the click of this button will display the mat-menu element with the specified local reference as a drop-down menu. The rest of the code is self explanatory.

+
+
+
+Header added to the application +
+
Figure 54. This is how our application looks with the first menu button (Menu 1) clicked.
+
+
+

We want to keep the sidenav toggling menu button on the left and move the rest to the right to make it look better. To do this we add a class to the menu icon button:

+
+
+
Listing 64. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+

And in the app.component.scss file, we add the following style:

+
+
+
Listing 65. app.component.scss:
+
+
.menu {
+    margin-right: auto;
+}
+
+
+
+

The mat-toolbar element already has it’s display property set to flex. Setting the menu icon button’s margin-right property to auto keeps itself on the left and pushes the other elements to the right.

+
+
+
+Final look of the header +
+
Figure 55. Final look of the header.
+
+
+

Next, we will create a sidenav. But before that lets create a couple of components to navigate between, the links of which we will add to the sidenav. +We will use the ng generate component (or ng g c command for short) to create Home and Data components. (Append --project=devon4ng-mat-layout to the command in a Nx workspace). We nest them in the pages sub-directory since they represent our pages.

+
+
+
    +
  • +

    ng g c pages/home

    +
  • +
  • +

    ng g c pages/data;

    +
  • +
+
+
+

Let us set up the routing such that when we visit http://localhost:4200/ root url we see the HomeComponent and when we visit http://localhost:4200/data url we see the DataComponent. +We had opted for routing while creating the application, so we have the routing module app-routing.module.ts setup for us. In this file, we have the empty routes array where we set up our routes.

+
+
+
Listing 66. app-routing.module.ts:
+
+
import { HomeComponent } from './pages/home/home.component';
+import { DataComponent } from './pages/data/data.component';
+
+	const routes: Routes = [
+	  { path: '', component: HomeComponent },
+	  { path: 'data', component: DataComponent }
+	];
+
+
+
+

We need to provide a hook where the components will be loaded when their respective URLs are loaded. We do that by using the router-outlet directive in the app.component.html.

+
+
+
Listing 67. app.component.html:
+
+
...
+	</mat-toolbar>
+	<router-outlet></router-outlet>
+
+
+
+

Now when we visit the defined URLs we see the appropriate components rendered on screen.

+
+
+

Lets change the contents of the components to have something better.

+
+
+
Listing 68. home.component.html:
+
+
<h2>Home Page</h2>
+
+
+
+
Listing 69. home.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+
Listing 70. data.component.html:
+
+
<h2>Data Page</h2>
+
+
+
+
Listing 71. data.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+

The pages look somewhat better now:

+
+
+
+Home page +
+
Figure 56. Home page
+
+
+
+Data page +
+
Figure 57. Data page
+
+
+

Let us finally create the sidenav. To implement the sidenav we need to use 3 Angular Material components: mat-sidenav-container, mat-sidenav and mat-sidenav-content. +The mat-sidenav-container, as the name suggests, acts as a container for the sidenav and the associated content. So it is the parent element, and mat-sidenav and mat-sidenav-content are the children sibling elements. mat-sidenav represents the sidenav. We can put any content we want, though it is usually used to contain a list of navigational links. The mat-sidenav-content element is for containing the contents of our current page. Since we need the sidenav application-wide, we will put it in the app.component.html.

+
+
+
Listing 72. app.component.html:
+
+
...
+</mat-toolbar>
+
+<mat-sidenav-container>
+  <mat-sidenav mode="over" [disableClose]="false" #sidenav>
+    Sidenav
+  </mat-sidenav>
+  <mat-sidenav-content>
+    <router-outlet></router-outlet>
+  </mat-sidenav-content>
+</mat-sidenav-container>
+
+
+
+

The mat-sidenav has a mode property, which accepts one of the 3 values: over, push and side. It decides the behavior of the sidenav. mat-sidenav also has a disableClose property which accents a boolean value. It toggles the behavior where we click on the backdrop or press the Esc key to close the sidenav. There are other properties which we can use to customize the appearance, behavior and position of the sidenav. You can find the properties documented online at https://material.angular.io/components/sidenav/api +We moved the router-outlet directive inside the mat-sidenav-content where it will render the routed component. +But if you check the running application in the browser, we don’t see the sidenav yet. That is because it is closed. We want to have the sidenav opened/closed at the click of the menu icon button on the left side of the header we implemented earlier. Notice we have set a local reference #sidenav on the mat-sidenav element. We can access this element and call its toggle() function to toggle open or close the sidenav.

+
+
+
Listing 73. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu" (click)="sidenav.toggle()">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+
+Sidenav works +
+
Figure 58. Sidenav is implemented
+
+
+

We can now open the sidenav by clicking the menu icon button. But it does not look right. The sidenav is only as wide as its content. Also the page does not stretch the entire viewport due to lack of content. +Let’s add the following styles to make the page fill the viewport:

+
+
+
Listing 74. app.component.scss:
+
+
...
+mat-sidenav-container {
+    position: absolute;
+    top: 64px;
+    left: 0;
+    right: 0;
+    bottom: 0;
+}
+
+
+
+

The sidenav width will be corrected when we add the navigational links to it. That is the only thing remaining to be done. Lets implement it now:

+
+
+
Listing 75. app.component.html:
+
+
...
+  <mat-sidenav [disableClose]="false" mode="over" #sidenav>
+	<mat-nav-list>
+      <a
+        id="home"
+        mat-list-item
+        [routerLink]="['./']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+        [routerLinkActiveOptions]="{exact: true}"
+      >
+        <mat-icon matListAvatar>home</mat-icon>
+        <h3 matLine>Home</h3>
+        <p matLine>sample home page</p>
+      </a>
+      <a
+        id="sampleData"
+        mat-list-item
+        [routerLink]="['./data']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+      >
+        <mat-icon matListAvatar>grid_on</mat-icon>
+        <h3 matLine>Data</h3>
+        <p matLine>sample data page</p>
+      </a>
+    </mat-nav-list>
+  </mat-sidenav>
+...
+
+
+
+

We use the mat-nav-list element to set a list of navigational links. We use the a tags with mat-list-item directive. We implement a click listener on each link to close the sidenav when it is clicked. The routerLink directive is used to provide the URLs to navigate to. The routerLinkActive directive is used to provide the class name which will be added to the link when it’s URL is visited. Here we name the class`active`. To style it, let' modify the app.component.scss file:

+
+
+
Listing 76. app.component.scss:
+
+
...
+mat-sidenav-container {
+...
+	a.active {
+        background: #8e8d8d;
+        color: #fff;
+
+        p {
+            color: #4a4a4a;
+        }
+    }
+}
+
+
+
+

Now we have a working application with a basic layout: a header with some menu and a sidenav with some navigational links.

+
+
+
+Finished application +
+
Figure 59. Finished application
+
+
+
+

Conclusion

+
+

The purpose of this guide was to provide a basic understanding of creating layouts with Angular Material. The Angular Material library has a huge collection of ready to use components which can be found at https://material.angular.io/components/categories +It has provided documentation and example usage for each of its components. Going through the documentation will give a better understanding of using Angular Material components in our devon4ng applications.

+
+
+
+
+
+

NgRx

+
+ +
+

Introduction to NgRx

+
+

NgRx is a state management framework for Angular based on the Redux pattern.

+
+
+
+

The need for client side state management

+
+

You may wonder why you should bother with state management. Usually data resides in a back-end storage system, e.g. a database, and is retrieved by the client on a per-need basis. To add, update, or delete entities from this store, clients have to invoke API endpoints at the back-end. Mimicking database-like transactions on the client side may seem redundant. However, there are many use cases for which a global client-side state is appropriate:

+
+
+
    +
  • +

    the client has some kind of global state which should survive the destruction of a component, but does not warrant server side persistence, for example: volume level of media, expansion status of menus

    +
  • +
  • +

    sever side data should not be retrieved every time it is needed, either because multiple components consume it, or because it should be cached, e.g. the personal watchlist in an online streaming app

    +
  • +
  • +

    the app provides a rich experience with offline functionality, e.g. a native app built with Ionic

    +
  • +
+
+
+

Saving global states inside the services they originates from results in a data flow that is hard to follow and state becoming inconsistent due to unordered state mutations. Following the single source of truth principle, there should be a central location holding all your application’s state, just like a server side database does. State management libraries for Angular provide tools for storing, retrieving, and updating client-side state.

+
+
+
+

Why NgRx?

+
+

As stated in the introduction, devon4ng does not stipulate a particular state library, or require using one at all. However, NgRx has proven to be a robust, mature solution for this task, with good tooling and 3rd-party library support. Albeit introducing a level of indirection that requires additional effort even for simple features, the Redux concept enforces a clear separation of concerns leading to a cleaner architecture.

+
+
+

Nonetheless, you should always compare different approaches to state management and pick the best one suiting your use case. Here’s a (non-exhaustive) list of competing state management libraries:

+
+
+
    +
  • +

    Plain RxJS using the simple store described in Abstract Class Store

    +
  • +
  • +

    NgXS reduces some boilerplate of NgRx by leveraging the power of decorators and moving side effects to the store

    +
  • +
  • +

    MobX follows a more imperative approach in contrast to the functional Redux pattern

    +
  • +
  • +

    Akita also uses an imperative approach with direct setters in the store, but keeps the concept of immutable state transitions

    +
  • +
+
+
+
+

Setup

+
+

To get a quick start, use the provided template for devon4ng + NgRx.

+
+
+

To manually install the core store package together with a set of useful extensions:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools --save`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools`
+
+
+
+

We recommend to add the NgRx schematics to your project so you can create code artifacts from the command line:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/schematics --save-dev`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/schematics --dev`
+
+
+
+

Afterwards, make NgRx your default schematics provider, so you don’t have to type the qualified package name every time:

+
+
+
+
`ng config cli.defaultCollection @ngrx/schematics`
+
+
+
+

If you have custom settings for Angular schematics, you have to configure them as described here.

+
+
+
+

Concept

+
+
+NgRx Architecture +
+
Figure 60. NgRx architecture overview
+
+
+

Figure 1 gives an overview of the NgRx data flow. The single source of truth is managed as an immutable state object by the store. Components dispatch actions to trigger state changes. Actions are handed over to reducers, which take the current state and action data to compute the next state. Actions are also consumed by-effects, which perform side-effects such as retrieving data from the back-end, and may dispatch new actions as a result. Components subscribe to state changes using selectors.

+
+
+

Continue with Creating a Simple Store.

+
+ +
+
+

State, Selection and Reducers

+ +
+
+

Creating a Simple Store

+
+

In the following pages we use the example of an online streaming service. We will model a particular feature, a watchlist that can be populated by the user with movies she or he wants to see in the future.

+
+
+
+

Initializing NgRx

+
+

If you’re starting fresh, you first have to initialize NgRx and create a root state. The fastest way to do this is using the schematic:

+
+
+
+
`ng generate @ngrx/schematics:store State --root --module app.module.ts`
+
+
+
+

This will automatically generate a root store and register it in the app module. Next we generate a feature module for the watchlist:

+
+
+

` ng generate module watchlist`

+
+
+

and create a corresponding feature store:

+
+
+

` ng generate store watchlist/Watchlist -m watchlist.module.ts`

+
+
+

This generates a file watchlist/reducers/index.ts with the reducer function, and registers the store in the watchlist module declaration.

+
+
+
+

== =

+
+

If you’re getting an error Schematic "store" not found in collection "@schematics/angular", this means you forgot to register the NgRx schematics as default. +== == =

+
+
+

Next, add the WatchlistModule to the AppModule imports so the feature store is registered when the application starts. We also added the store devtools which we will use later, resulting in the following file:

+
+
+

app.module.ts

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppComponent } from './app.component';
+import { EffectsModule } from '@ngrx/effects';
+import { AppEffects } from './app.effects';
+import { StoreModule } from '@ngrx/store';
+import { reducers, metaReducers } from './reducers';
+import { StoreDevtoolsModule } from '@ngrx/store-devtools';
+import { environment } from '../environments/environment';
+import { WatchlistModule } from './watchlist/watchlist.module';
+
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    WatchlistModule,
+    StoreModule.forRoot(reducers, { metaReducers }),
+    // Instrumentation must be imported after importing StoreModule (config is optional)
+    StoreDevtoolsModule.instrument({
+      maxAge: 25, // Retains last 25 states
+      logOnly: environment.production, // Restrict extension to log-only mode
+    }),
+    !environment.production ? StoreDevtoolsModule.instrument() : []
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+
+

Create an entity model and initial state

+
+

We need a simple model for our list of movies. Create a file watchlist/models/movies.ts and insert the following code:

+
+
+
+
export interface Movie {
+    id: number;
+    title: string;
+    releaseYear: number;
+    runtimeMinutes: number;
+    genre: Genre;
+}
+
+export type Genre = 'action' | 'fantasy' | 'sci-fi' | 'romantic' | 'comedy' | 'mystery';
+
+export interface WatchlistItem {
+    id: number;
+    movie: Movie;
+    added: Date;
+    playbackMinutes: number;
+}
+
+
+
+
+

== =

+
+

We discourage putting several types into the same file and do this only for the sake of keeping this tutorial brief. +== == =

+
+
+

Later we will learn how to retrieve data from the back-end using effects. For now we will create an initial state for the user with a default movie.

+
+
+

State is defined and transforms by a reducer function. Let’s create a watchlist reducer:

+
+
+
+
```
+cd watchlist/reducers
+ng g reducer WatchlistData --reducers index.ts
+```
+
+
+
+

Open the generated file watchlist-data.reducer.ts. You see three exports: The State interface defines the shape of the state. There is only one instance of a feature state in the store at all times. The initialState constant is the state at application creation time. The reducer function will later be called by the store to produce the next state instance based on the current state and an action object.

+
+
+

Let’s put a movie into the user’s watchlist:

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export interface State {
+  items: WatchlistItem[];
+}
+
+export const initialState: State = {
+  items: [
+    {
+      id: 42,
+      movie: {
+        id: 1,
+        title: 'Die Hard',
+        genre: 'action',
+        releaseYear: 1988,
+        runtimeMinutes: 132
+      },
+      playbackMinutes: 0,
+      added: new Date(),
+    }
+  ]
+};
+
+
+
+
+

Select the current watchlist

+
+

State slices can be retrieved from the store using selectors.

+
+
+

Create a watchlist component:

+
+
+
+
`ng g c watchlist/Watchlist`
+
+
+
+

and add it to the exports of WatchlistModule. Also, replace app.component.html with

+
+
+
+
<app-watchlist></app-watchlist>
+
+
+
+

State observables are obtained using selectors. They are memoized by default, meaning that you don’t have to worry about performance if you use complicated calculations when deriving state — these are only performed once per state emission.

+
+
+

Add a selector to watchlist-data.reducer.ts:

+
+
+
+
`export const getAllItems = (state: State) => state.items;`
+
+
+
+

Next, we have to re-export the selector for this sub-state in the feature reducer. Modify the watchlist/reducers/index.ts like this:

+
+
+

watchlist/reducers/index.ts

+
+
+
+
import {
+  ActionReducer,
+  ActionReducerMap,
+  createFeatureSelector,
+  createSelector,
+  MetaReducer
+} from '@ngrx/store';
+import { environment } from 'src/environments/environment';
+import * as fromWatchlistData from './watchlist-data.reducer';
+import * as fromRoot from 'src/app/reducers/index';
+
+export interface WatchlistState { (1)
+  watchlistData: fromWatchlistData.State;
+}
+
+export interface State extends fromRoot.State { (2)
+  watchlist: WatchlistState;
+}
+
+export const reducers: ActionReducerMap<WatchlistState> = { (3)
+  watchlistData: fromWatchlistData.reducer,
+};
+
+export const metaReducers: MetaReducer<WatchlistState>[] = !environment.production ? [] : [];
+
+export const getFeature = createFeatureSelector<State, WatchlistState>('watchlist'); (4)
+
+export const getWatchlistData = createSelector( (5)
+  getFeature,
+  state => state.watchlistData
+);
+
+export const getAllItems = createSelector( (6)
+  getWatchlistData,
+  fromWatchlistData.getAllItems
+);
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1The feature state, each member is managed by a different reducer
2Feature states are registered by the forFeature method. This interface provides a typesafe path from root to feature state.
3Tie sub-states of a feature state to the corresponding reducers
4Create a selector to access the 'watchlist' feature state
5select the watchlistData sub state
6re-export the selector
+
+
+

Note how createSelector allows to chain selectors. This is a powerful tool that also allows for selecting from multiple states.

+
+
+

You can use selectors as pipeable operators:

+
+
+

watchlist.component.ts

+
+
+
+
export class WatchlistComponent {
+  watchlistItems$: Observable<WatchlistItem[]>;
+
+  constructor(
+    private store: Store<fromWatchlist.State>
+  ) {
+    this.watchlistItems$ = this.store.pipe(select(fromWatchlist.getAllItems));
+  }
+}
+
+
+
+

watchlist.component.html

+
+
+
+
<h1>Watchlist</h1>
+<ul>
+    <li *ngFor="let item of watchlistItems$ | async">{{item.movie.title}} ({{item.movie.releaseYear}}): {{item.playbackMinutes}}/{{item.movie.runtimeMinutes}} min watched</li>
+</ul>
+
+
+
+
+

Dispatching an action to update watched minutes

+
+

We track the user’s current progress at watching a movie as the playbackMinutes property. After closing a video, the watched minutes have to be updated. In NgRx, state is being updated by dispatching actions. An action is an option with a (globally unique) type discriminator and an optional payload.

+
+
+
+

== Creating the action

+
+

Create a file playback/actions/index.ts. In this example, we do not further separate the actions per sub state. Actions can be defined by using action creators:

+
+
+

playback/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+
+export const playbackFinished = createAction('[Playback] Playback finished', props<{ movieId: number, stoppedAtMinute: number }>());
+
+const actions = union({
+    playbackFinished
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

First we specify the type, followed by a call to the payload definition function. Next, we create a union of all possible actions for this file using union, which allows us a to access action payloads in the reducer in a typesafe way.

+
+
+
+

== =

+
+

Action types should follow the naming convention [Source] Event, e.g. [Recommended List] Hide Recommendation or [Auth API] Login Success. Think of actions rather as events than commands. You should never use the same action at two different places (you can still handle multiple actions the same way). This facilitate tracing the source of an action. For details see Good Action Hygiene with NgRx by Mike Ryan (video). +== == =

+
+
+
+

== Dispatch

+
+

We skip the implementation of an actual video playback page and simulate watching a movie in 10 minute segments by adding a link in the template:

+
+
+

watchlist-component.html

+
+
+
+
<li *ngFor="let item of watchlistItems$ | async">... <button (click)="stoppedPlayback(item.movie.id, item.playbackMinutes + 10)">Add 10 Minutes</button></li>
+
+
+
+

watchlist-component.ts

+
+
+
+
import * as playbackActions from 'src/app/playback/actions';
+...
+  stoppedPlayback(movieId: number, stoppedAtMinute: number) {
+    this.store.dispatch(playbackActions.playbackFinished({ movieId, stoppedAtMinute }));
+  }
+
+
+
+
+

== State reduction

+
+

Next, we handle the action inside the watchlistData reducer. Note that actions can be handled by multiple reducers and effects at the same time to update different states, for example if we’d like to show a rating modal after playback has finished.

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export function reducer(state = initialState, action: playbackActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      return {
+        ...state,
+        items: state.items.map(updatePlaybackMinutesMapper(action.movieId, action.stoppedAtMinute))
+      };
+
+    default:
+      return state;
+  }
+}
+
+export function updatePlaybackMinutesMapper(movieId: number, stoppedAtMinute: number) {
+  return (item: WatchlistItem) => {
+    if (item.movie.id == movieId) {
+      return {
+        ...item,
+        playbackMinutes: stoppedAtMinute
+      };
+    } else {
+      return item;
+    }
+  };
+}
+
+
+
+

Note how we changed the reducer’s function signature to reference the actions union. The switch-case handles all incoming actions to produce the next state. The default case handles all actions a reducer is not interested in by returning the state unchanged. Then we find the watchlist item corresponding to the movie with the given id and update the playback minutes. Since state is immutable, we have to clone all objects down to the one we would like to change using the object spread operator (…​).

+
+
+
+

== =

+
+

Selectors rely on object identity to decide whether the value has to be recalculated. Do not clone objects that are not on the path to the change you want to make. This is why updatePlaybackMinutesMapper returns the same item if the movie id does not match. +== == =

+
+
+
+

== Alternative state mapping with Immer

+
+

It can be hard to think in immutable changes, especially if your team has a strong background in imperative programming. In this case, you may find the Immer library convenient, which allows to produce immutable objects by manipulating a proxied draft. The same reducer can then be written as:

+
+
+

watchlist-data.reducer.ts with Immer

+
+
+
+
import { produce } from 'immer';
+...
+case playbackActions.playbackFinished.type:
+      return produce(state, draft => {
+        const itemToUpdate = draft.items.find(item => item.movie.id == action.movieId);
+        if (itemToUpdate) {
+          itemToUpdate.playbackMinutes = action.stoppedAtMinute;
+        }
+      });
+
+
+
+

Immer works out of the box with plain objects and arrays.

+
+
+
+

== Redux devtools

+
+

If the StoreDevToolsModule is instrumented as described above, you can use the browser extension Redux devtools to see all dispatched actions and the resulting state diff, as well as the current state, and even travel back in time by undoing actions.

+
+
+
+Redux Devtools +
+
Figure 61. Redux devtools
+
+
+

Continue with learning about effects

+
+ +
+
+

Side effects with NgRx/Effects

+
+

Reducers are pure functions, meaning they are side-effect free and deterministic. Many actions however have side effects like sending messages or displaying a toast notification. NgRx encapsulates these actions in effects.

+
+
+

Let’s build a recommended movies list so the user can add movies to their watchlist.

+
+
+
+

Obtaining the recommendation list from the server

+
+

Create a module for recommendations and add stores and states as in the previous chapter. Add EffectsModule.forRoot([]) to the imports in AppModule below StoreModule.forRoot(). Add effects to the feature module:

+
+
+
+
ng generate effect recommendation/Recommendation -m recommendation/recommendation.module.ts
+
+
+
+

We need actions for loading the movie list, success and failure cases:

+
+
+

recommendation/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+import { Movie } from 'src/app/watchlist/models/movies';
+
+export const loadRecommendedMovies = createAction('[Recommendation List] Load movies');
+export const loadRecommendedMoviesSuccess = createAction('[Recommendation API] Load movies success', props<{movies: Movie[]}>());
+export const loadRecommendedMoviesFailure = createAction('[Recommendation API] Load movies failure', props<{error: any}>());
+
+const actions = union({
+    loadRecommendedMovies,
+    loadRecommendedMoviesSuccess,
+    loadRecommendedMoviesFailure
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

In the reducer, we use a loading flag so the UI can show a loading spinner. The store is updated with arriving data.

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State {
+  items: Movie[];
+  loading: boolean;
+}
+
+export const initialState: State = {
+  items: [],
+  loading: false
+};
+
+export function reducer(state = initialState, action: recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case '[Recommendation List] Load movies':
+      return {
+        ...state,
+        items: [],
+        loading: true
+      };
+
+    case '[Recommendation API] Load movies failure':
+      return {
+        ...state,
+          loading: false
+      };
+
+    case '[Recommendation API] Load movies success':
+      return {
+        ...state,
+        items: action.movies,
+        loading: false
+      };
+
+    default:
+      return state;
+  }
+}
+
+export const getAll = (state: State) => state.items;
+export const isLoading = (state: State) => state.loading;
+
+
+
+

We need an API service to talk to the server. For demonstration purposes, we simulate an answer delayed by one second:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable({
+  providedIn: 'root'
+})
+export class RecommendationApiService {
+
+  private readonly recommendedMovies: Movie[] = [
+    {
+      id: 2,
+      title: 'The Hunger Games',
+      genre: 'sci-fi',
+      releaseYear: 2012,
+      runtimeMinutes: 144
+    },
+    {
+      id: 4,
+      title: 'Avengers: Endgame',
+      genre: 'fantasy',
+      releaseYear: 2019,
+      runtimeMinutes: 181
+    }
+  ];
+
+  loadRecommendedMovies(): Observable<Movie[]> {
+    return of(this.recommendedMovies).pipe(delay(1000));
+  }
+}
+
+
+
+

Here are the effects:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable()
+export class RecommendationEffects {
+
+  constructor(
+    private actions$: Actions,
+    private recommendationApi: RecommendationApiService,
+  ) { }
+
+  @Effect()
+  loadBooks$ = this.actions$.pipe(
+    ofType(recommendationActions.loadRecommendedMovies.type),
+    switchMap(() => this.recommendationApi.loadRecommendedMovies().pipe(
+      map(movies => recommendationActions.loadRecommendedMoviesSuccess({ movies })),
+      catchError(error => of(recommendationActions.loadRecommendedMoviesFailure({ error })))
+    ))
+  );
+}
+
+
+
+

Effects are always observables and return actions. In this example, we consume the actions observable provided by NgRx and listen only for the loadRecommendedMovies actions by using the ofType operator. Using switchMap, we map to a new observable, one that loads movies and maps the successful result to a new loadRecommendedMoviesSuccess action or a failure to loadRecommendedMoviesFailure. In a real application we would show a notification in the error case.

+
+
+
+

==

+
+

If an effect should not dispatch another action, return an empty observable. +== ==

+
+ + +
+
+

Simplifying CRUD with NgRx/Entity

+
+

Most of the time when manipulating entries in the store, we like to create, add, update, or delete entries (CRUD). NgRx/Entity provides convenience functions if each item of a collection has an id property. Luckily all our entities already have this property.

+
+
+

Let’s add functionality to add a movie to the watchlist. First, create the required action:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export const addToWatchlist = createAction('[Recommendation List] Add to watchlist',
+    props<{ watchlistItemId: number, movie: Movie, addedAt: Date }>());
+
+
+
+
+

==

+
+

You may wonder why the Date object is not created inside the reducer instead, since it should always be the current time. However, remember that reducers should be deterministic state machines — State A + Action B should always result in the same State C. This makes reducers easily testable. +== ==

+
+
+

Then, rewrite the watchlistData reducer to make use of NgRx/Entity:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State extends EntityState<WatchlistItem> { (1)
+}
+
+export const entityAdapter = createEntityAdapter<WatchlistItem>(); (2)
+
+export const initialState: State = entityAdapter.getInitialState(); (3)
+
+const entitySelectors = entityAdapter.getSelectors();
+
+export function reducer(state = initialState, action: playbackActions.ActionsUnion | recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      const itemToUpdate = entitySelectors
+      .selectAll(state) (4)
+      .find(item => item.movie.id == action.movieId);
+      if (itemToUpdate) {
+        return entityAdapter.updateOne({ (5)
+          id: itemToUpdate.id,
+          changes: { playbackMinutes: action.stoppedAtMinute } (6)
+        }, state);
+      } else {
+        return state;
+      }
+
+    case recommendationActions.addToWatchlist.type:
+      return entityAdapter.addOne({id: action.watchlistItemId, movie: action.movie, added: action.addedAt, playbackMinutes: 0}, state);
+
+    default:
+      return state;
+  }
+}
+
+
+export const getAllItems = entitySelectors.selectAll;
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1NgRx/Entity requires state to extend EntityState. It provides a list of ids and a dictionary of id ⇒ entity entries
2The entity adapter provides data manipulation operations and selectors
3The state can be initialized with getInitialState(), which accepts an optional object to define any additional state beyond EntityState
4selectAll returns an array of all entities
5All adapter operations consume the state object as the last argument and produce a new state
6Update methods accept a partial change definition; you don’t have to clone the object
+
+
+

This concludes the tutorial on NgRx. If you want to learn about advanced topics such as selectors with arguments, testing, or router state, head over to the official NgRx documentation.

+
+
+
+
+
+

Cookbook

+
+ +
+

Abstract Class Store

+
+

The following solution presents a base class for implementing stores which handle state and its transitions. +Working with the base class achieves:

+
+
+
    +
  • +

    common API across all stores

    +
  • +
  • +

    logging (when activated in the constructor)

    +
  • +
  • +

    state transitions are asynchronous by design - sequential order problems are avoided

    +
  • +
+
+
+
Listing 77. Usage Example
+
+
@Injectable()
+export class ModalStore extends Store<ModalState> {
+
+  constructor() {
+    super({ isOpen: false }, !environment.production);
+  }
+
+  closeDialog() {
+    this.dispatchAction('Close Dialog', (currentState) => ({...currentState, isOpen: false}));
+  }
+
+  openDialog() {
+    this.dispatchAction('Open Dialog', (currentState) => ({...currentState, isOpen: true}));
+  }
+
+}
+
+
+
+
Listing 78. Abstract Base Class Store
+
+
import { OnDestroy } from '@angular/core';
+import { BehaviorSubject } from 'rxjs/BehaviorSubject';
+import { Observable } from 'rxjs/Observable';
+import { intersection, difference } from 'lodash';
+import { map, distinctUntilChanged, observeOn } from 'rxjs/operators';
+import { Subject } from 'rxjs/Subject';
+import { queue } from 'rxjs/scheduler/queue';
+import { Subscription } from 'rxjs/Subscription';
+
+interface Action<T> {
+  name: string;
+  actionFn: (state: T) => T;
+}
+
+/** Base class for implementing stores. */
+export abstract class Store<T> implements OnDestroy {
+
+  private actionSubscription: Subscription;
+  private actionSource: Subject<Action<T>>;
+  private stateSource: BehaviorSubject<T>;
+  state$: Observable<T>;
+
+  /**
+   * Initializes a store with initial state and logging.
+   * @param initialState Initial state
+   * @param logChanges When true state transitions are logged to the console.
+   */
+  constructor(initialState: T, public logChanges = false) {
+    this.stateSource = new BehaviorSubject<T>(initialState);
+    this.state$ = this.stateSource.asObservable();
+    this.actionSource = new Subject<Action<T>>();
+
+    this.actionSubscription = this.actionSource.pipe(observeOn(queue)).subscribe(action => {
+      const currentState = this.stateSource.getValue();
+      const nextState = action.actionFn(currentState);
+
+      if (this.logChanges) {
+        this.log(action.name, currentState, nextState);
+      }
+
+      this.stateSource.next(nextState);
+    });
+  }
+
+  /**
+   * Selects a property from the stores state.
+   * Will do distinctUntilChanged() and map() with the given selector.
+   * @param selector Selector function which selects the needed property from the state.
+   * @returns Observable of return type from selector function.
+   */
+  select<TX>(selector: (state: T) => TX): Observable<TX> {
+    return this.state$.pipe(
+      map(selector),
+      distinctUntilChanged()
+    );
+  }
+
+  protected dispatchAction(name: string, action: (state: T) => T) {
+    this.actionSource.next({ name, actionFn: action });
+  }
+
+  private log(actionName: string, before: T, after: T) {
+    const result: { [key: string]: { from: any, to: any} } = {};
+    const sameProbs = intersection(Object.keys(after), Object.keys(before));
+    const newProbs = difference(Object.keys(after), Object.keys(before));
+    for (const prop of newProbs) {
+      result[prop] = { from: undefined, to: (<any>after)[prop] };
+    }
+
+    for (const prop of sameProbs) {
+      if ((<any>before)[prop] !==  (<any>after)[prop]) {
+        result[prop] = { from: (<any>before)[prop], to: (<any>after)[prop] };
+      }
+    }
+
+    console.log(this.constructor.name, actionName, result);
+  }
+
+  ngOnDestroy() {
+    this.actionSubscription.unsubscribe();
+  }
+
+}
+
+
+ +
+
+

Add Electron to an Angular application using Angular CLI

+
+

This cookbook recipe explains how to integrate Electron in an Angular 10+ application. Electron is a framework for creating native applications with web technologies like JavaScript, HTML, and CSS. As an example, very well known applications as Visual Studio Code, Atom, Slack or Skype (and many more) are using Electron too.

+
+
+ + + + + +
+ + +At the moment of this writing Angular 11.2.0, Electron 11.2.3 and Electron-builder 22.9.1 were the versions available. +
+
+
+

Here are the steps to achieve this goal. Follow them in order.

+
+
+
+

Add Electron and other relevant dependencies

+
+

There are two different approaches to add the dependencies in the package.json file:

+
+
+
    +
  • +

    Writing the dependencies directly in that file.

    +
  • +
  • +

    Installing using npm install or yarn add.

    +
  • +
+
+
+ + + + + +
+ + +Please remember if the project has a package-lock.json or yarn.lock file use npm or yarn respectively. +
+
+
+

In order to add the dependencies directly in the package.json file, include the following lines in the devDependencies section:

+
+
+
+
"devDependencies": {
+...
+    "electron": "^11.2.3",
+    "electron-builder": "^22.9.1",
+...
+},
+
+
+
+

As indicated above, instead of this npm install can be used:

+
+
+
+
$ npm install -D electron electron-builder
+
+
+
+

Or with yarn:

+
+
+
+
$ yarn add -D electron electron-builder
+
+
+
+
+

Create the necessary typescript configurations

+
+

In order to initiate electron in an angular app we need to modify the tsconfig.json file and create a tsconfig.serve.json and a tsconfig.base.json in the root folder.

+
+
+
+

== tsconfig.json

+
+

This file needs to be modified to create references to ./src/tsconfig.app.json and ./src/tsconfig.spec.json to support different configurations.

+
+
+
+
{
+  "files": [],
+  "references": [
+    {
+      "path": "./src/tsconfig.app.json"
+    },
+    {
+      "path": "./src/tsconfig.spec.json"
+    }
+  ]
+}
+
+
+
+
+

== tsconfig.app.json

+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../app",
+    "module": "es2015",
+    "baseUrl": "",
+    "types": []
+  },
+  "include": [
+    "**/*.ts",
+  ],
+  "exclude": [
+    "**/*.spec.ts"
+  ],
+  "angularCompilerOptions": {
+    "fullTemplateTypeCheck": true,
+    "strictInjectionParameters": true,
+    "preserveWhitespaces": true
+  }
+}
+
+
+
+
+

== tsconfig.spec.json

+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../spec",
+    "module": "commonjs",
+    "types": [
+      "jasmine",
+      "node"
+    ]
+  },
+  "files": [
+    "test.ts",
+  ],
+  "include": [
+    "**/*.spec.ts",
+    "**/*.d.ts"
+  ],
+  "exclude": [
+    "dist",
+    "release",
+    "node_modules"
+  ]
+}
+
+
+
+
+

== tsconfig.base.json

+
+

This is shared between tsconfig.app.json and tsconfig.spec.json and it will be extended on each config file.

+
+
+
+
{
+  "compileOnSave": false,
+  "compilerOptions": {
+    "outDir": "./dist",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "es2016",
+      "es2015",
+      "dom"
+    ]
+  },
+  "files": [
+    "electron-main.ts"
+    "src/polyfills.ts"
+  ],
+  "include": [
+    "src/**/*.d.ts"
+  ],
+  "exclude": [
+    "node_modules"
+  ]
+}
+
+
+
+
+

== tsconfig.serve.json

+
+

In the root, tsconfig.serve.json needs to be created. This typescript config file is going to be used when we serve electron:

+
+
+
+
{
+  "compilerOptions": {
+    "outDir": ".",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "dom"
+    ]
+  },
+  "include": [
+    "electron-main.ts"
+  ],
+  "exclude": [
+    "node_modules",
+    "**/*.spec.ts"
+  ]
+}
+
+
+
+
+

Add Electron build configuration

+
+

In order to configure electron builds properly we need to create a new json on our application, let’s call it electron-builder.json. For more information and fine tuning please refer to the Electron Builder official documentation.

+
+
+

The contents of the file will be something similar to the following:

+
+
+
+
{
+  "productName": "devon4ngElectron",
+  "directories":{
+    "output": "./builder-release"
+  },
+  "win": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "portable"
+    ]
+  },
+  "mac": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "dmg"
+    ]
+  },
+  "linux": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "AppImage"
+    ]
+  }
+}
+
+
+
+

There are two important things in this files:

+
+
+
    +
  1. +

    "output": this is where electron builder is going to build our application

    +
  2. +
  3. +

    "icon": in every OS possible there is an icon parameter, the route to the icon folder that will be created after building with angular needs to be used here. This will make it so the electron builder can find the icons and build.

    +
  4. +
+
+
+
+

Modify angular.json

+
+

angular.json has to to be modified so the project is build inside /dist without an intermediate folder.

+
+
+
+
{
+  "architect": {
+    "build": {
+      "outputPath": "dist"
+    }
+  }
+}
+
+
+
+
+

Create the electron window in electron-main.ts

+
+

In order to use electron, a file needs to be created at the root of the application (main.ts). This file will create a window with different settings checking if we are using --serve as an argument:

+
+
+
+
import { app, BrowserWindow } from 'electron';
+import * as path from 'path';
+import * as url from 'url';
+
+let win: any;
+const args: any = process.argv.slice(1);
+const serve: any = args.some((val) => val == '--serve');
+
+const createWindow:any = ()=>{
+  // Create the browser window.
+  win = new BrowserWindow({
+    fullscreen: true,
+    webPreferences: {
+      nodeIntegration: true,
+    }
+  });
+
+  if (serve) {
+    require('electron-reload')(__dirname, {
+      electron: require(`${__dirname}/node_modules/electron`)
+    });
+    win.loadURL('http://localhost:4200');
+  } else {
+    win.loadURL(
+      url.format({
+        pathname: path.join(__dirname, 'dist/index.html'),
+        protocol: 'file:',
+        slashes: true
+      })
+    );
+  }
+
+  if (serve) {
+    win.webContents.openDevTools();
+  }
+
+  // Emitted when the window is closed.
+  win.on('closed', () => {
+    // Dereference the window object, usually you would store window
+    // in an array if your app supports multi windows, this is the time
+    // when you should delete the corresponding element.
+    // tslint:disable-next-line:no-null-keyword
+    win = null;
+  });
+}
+
+try {
+  // This method will be called when Electron has finished
+  // initialization and is ready to create browser windows.
+  // Some APIs can only be used after this event occurs.
+  app.on('ready', createWindow);
+
+   // Quit when all windows are closed.
+  app.on('window-all-closed', () => {
+    // On OS X it is common for applications and their menu bar
+    // to stay active until the user quits explicitly with Cmd + Q
+    if (process.platform !==  'darwin') {
+      app.quit();
+    }
+  });
+
+   app.on('activate', () => {
+    // On OS X it's common to re-create a window in the app when the
+    // dock icon is clicked and there are no other windows open.
+    if (win == null) {
+      createWindow();
+    }
+  });
+} catch (e) {
+  // Catch Error
+  // throw e;
+}
+
+
+
+
+

Add the electron window and improve the package.json scripts

+
+

Inside package.json the electron window that will be transformed to electron-main.js when building needs to be added.

+
+
+
+
{
+  ....
+  "main": "electron-main.js",
+  "scripts": {...}
+  ....
+}
+
+
+
+

The scripts section in the package.json can be improved to avoid running too verbose commands. As a very complete example we can take a look to the My Thai Star’s scripts section and copy the lines useful in your project. In any case, at least we recommend to add the following lines:

+
+
+
+
  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e",
+    "electron:tsc": "tsc -p tsconfig.serve.json",
+    "electron:run": "npm run electron:tsc && ng build --base-href ./ && npx electron .",
+    "electron:serve": "npm run electron:tsc && npx electron . --serve",
+    "electron:pack": "npm run electron:tsc && electron-builder --dir --config electron-builder.json",
+    "electron:build": "npm run electron:tsc && electron-builder --config electron-builder.json build"
+  },
+
+
+
+

The electron: scripts do the following:

+
+
+
    +
  • +

    electron:tsc: Compiles electron TS files.

    +
  • +
  • +

    electron:run: Serves Angular app and runs electron.

    +
  • +
  • +

    electron:serve: Serves electron with an already running angular app (i.e. a ng serve command running on another terminal).

    +
  • +
  • +

    electron:pack: Packs electron app.

    +
  • +
  • +

    electron:build: Builds electron app.

    +
  • +
+
+
+
+

Add Electron to an Angular application using Nx CLI

+
+

Creating an Electron app is very easy and straight-forward if you are using Nx CLI. As a pre-requisite, you should already have an application in your Nx workspace which you want to run as a front-end in your Electron app. (You can follow this guide if you want to get started with Nx).

+
+
+

Follow the steps below to develop an Electron app in your Nx workspace:

+
+
+
+

Install nx-electron

+
+

Install nx-electron using the command:

+
+
+
+
  npm install -D nx-electron
+
+
+
+

This will add the packages electron and nx-electron as dev dependencies to your Nx workspace. This will help us generate our Electron app in the next step.

+
+
+
+

Generate your Electron app

+
+

Once you have installed nx-electron, you can generate your electron app using the command:

+
+
+
+
  nx g nx-electron:app <electron-app-name> --frontendProject=<frontend-app-name>
+
+
+
+

And that is it! You have generated your Electron app already. All the configuration files (tsconfig.*) are generated for you under <electron-app-name> in your Nx workspace.

+
+
+
+

Serving your app

+
+

You can use this command to serve your Electron app:

+
+
+
+
  nx run-many --target=serve --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+

If you see a blank application, it is because the Electron app was served before the front-end was served. To avoid this, you can serve the front-end and back-end separately, (that is, serve the back-end only after the front-end is served).

+
+
+
+

Building your app

+
+

The command for building your Electron app in Nx is similar to the serve command above, you only change the target from serve to build:

+
+
+
+
  nx run-many --target=build --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+
+

Packaging your app

+
+

Make sure you have build your app before you try to package it using the following command:

+
+
+
+
  nx run <electron-app-name>:package [--options]
+
+
+
+

The options that can be passed can be found here.

+
+
+

You can find a working example of an Electron app in devon4ts-samples.

+
+ +
+
+

Angular Mock Service

+
+

We’ve all been there: A new idea comes, let’s quickly prototype it. But wait, there’s no back-end. What can we do?

+
+
+

Below you will find a solution that will get your started quick and easy. The idea is to write a simple mock service that helps us by feeding data into our components.

+
+
+
+

The app we start with

+
+

Let’s say you have a simple boilerplate code, with your favorite styling library hooked up and you’re ready to go. The angular-material-basic-layout sample is a good starting place.

+
+
+
+

The Components

+
+

Components - are the building blocks of our application. Their main role is to enable fragments of user interfaces. They will either display data (a list, a table, a chart, etc.), or 'collect' user interaction (e.g: a form, a menu, etc.)

+
+
+

Components stay at the forefront of the application. They should also be reusable (as much as possible). Reusability is key for what we are trying to achieve - a stable, maintainable front-end where multiple people can contribute and collaborate.

+
+
+

In our project, we are at the beginning. That means we may have more ideas than plans. We are exploring possibilities. In order to code efficiently:
+1) We will not store mock data in the components.
+2) We will not fetch or save data directly in the components.

+
+ +
+
+

The Service

+
+

So, how do we get data in our app? How do we propagate the data to the components and how can we send user interaction from the components to the our data "manager" logic.

+
+
+

The answer to all these questions is an Angular Service (that we will just call a service from now on).

+
+
+

A service is an injectable logic that can be consumed by all the components that need it. It can carry manipulation functions and ,in our case, fetch data from a provider.

+
+
+
+Service Architecture +
+
Figure 62. Angular Components & Services architecture.
+
+
+

Inside the Angular App, an Injector gives access to each component to their required services. It’s good coding practice to use a distinct service to each data type you want to manipulate. The type is described in a interface.

+
+
+

Still, our ideas drive in different ways, so we have to stay flexible. We cannot use a database at the moment, but we want a way to represent data on screen, which can grow organically.

+
+ +
+
+

The Model

+
+
+Data Box +
+
Figure 63. Data box in relation to services and components.
+
+
+

Let’s consider a 'box of data' represented in JSON. Physically, this means a folder with some JSON/TS files in it. They are located in the app/mock folder. The example uses only one mock data file. The file is typed according to our data model.

+
+
+

Pro tip: separate your files based on purpose. In your source code, put the mock files in the mock folder, components in the components folder, services in the services folder and data models in the models folder.

+
+
+
+Project Structure +
+
Figure 64. Project structure.
+
+
+

Aligned with the Angular way of development, we are implementing a model-view-controller pattern.

+
+
+

The model is represented by the interfaces we make. These interfaces describe the data structures we will use in our application. In this example, there is one data model, corresponding with the 'type' of data that was mocked. In the models folder you will find the .ts script file that describes chemical elements. The corresponding mock file defines a set is chemical elements objects, in accordance to our interface definition.

+
+
+
+

Use case

+
+

Enough with the theory, let’s see what we have here. The app presents 3 pages as follows:

+
+
+
    +
  • +

    A leader board with the top 3 elements

    +
  • +
  • +

    A data table with all the elements

    +
  • +
  • +

    A details page that reads a route parameter and displays the details of the element.

    +
  • +
+
+
+

There are a lot of business cases which have these requirements:

+
+
+
    +
  • +

    A leader board can be understood as "the most popular items in a set", "the latest updated items", "you favorite items" etc.

    +
  • +
  • +

    A data table with CRUD operations is very useful (in our case we only view details or delete an item, but they illustrate two important things: the details view shows how to navigate and consume a parametric route, the delete action shows how to invoke service operations over the loaded data - this means that the component is reusable and when the data comes with and API, only the service will need it’s implementation changed)

    +
  • +
+
+
+

Check out the angular-mock-service sample from the apps folder and easily get started with front-end development using dummy data.

+
+ +
+
+

Testing e2e with Cypress

+
+

This guide will cover the basics of e2e testing using Cypress.

+
+
+

Cypress is a framework “all in one” that provides the necessary libraries to write specific e2e tests, without the need of Selenium.

+
+
+

Why Cypress?

+
+
+
    +
  • +

    Uses JavaScript

    +
  • +
  • +

    It works directly with the browser so the compatibility with the front-end framework the project uses (in this case Angular) is not a problem.

    +
  • +
  • +

    Easy cross browser testing

    +
  • +
+
+
+
+

Setup

+
+

Install +First of all we need to install it, we can use npm install:

+
+
+
+
$ npm install -D cypress
+
+
+
+

Or we can install it with yarn:

+
+
+
+
$ yarn add -D cypress
+
+
+
+

We need to run Cypress in order to get the folder tree downloaded, then create a tsconfig.json file inside cypress folder to add the typescript configuration.

+
+
+
+
$ . /node_modules/.bin/cypress open
+
+
+
+
Listing 79. tsconfig.json
+
+
{
+  "compilerOptions": {
+    "strict": true,
+    "baseUrl": "../node_modules",
+    "target": "es5",
+    "lib": ["es5", "dom"],
+    "types": ["cypress"]
+  },
+  "include": [
+    "**/*.ts"
+  ]
+}
+
+
+
+

BaseUrl

+
+
+

Let’s setup the base URL so when we run the tests cypress will "navigate" to the right place, go to cypress.json on the root of the project.

+
+
+
Listing 80. cypress.json
+
+
{
+  "baseUrl": "http://localhost:4200"
+}
+
+
+
+
+

Files / Structure

+
+
+
/cypress
+  tsconfig.json
+  /fixtures
+    - example.json
+  /integration
+    - button.spec.ts
+    - test.spec.ts
+    /examples
+  /plugins
+    - index.js
+  /support
+    - commands.js
+    - index.js
+
+
+
+

tsconfig.json for typescript configuration.

+
+
+

fixtures to store our mock data or files (images, mp3…​) to use on our tests.

+
+
+

integration is where our tests go, by default it comes with an examples folder with tested samples.

+
+
+

plugins is where the configuration files of the plugins go.

+
+
+

support to add custom commands.

+
+
+
+

== =

+
+

If you are using Nx, it automatically generates a e2e cypress project for every project that you generate. So you already get the configuration files like tsconfig.json and cypress.json and also get the folder structure described above. This helps you focus more on writing your tests rather than setting up Cypress.

+
+
+
+

== =

+ +
+
+

Tests

+
+

The structure is the same than Mocha.

+
+
+

First, we create a file, for example form.spec.ts, inside we define a context to group all our tests referred to the same subject.

+
+
+
Listing 81. form.spec.ts
+
+
context('Button page', () => {
+  beforeEach(() => {
+    cy.visit('/');
+  });
+  it('should have button',()=>{
+    cy.get('button').should('exist');
+  });
+  it('should contain PRESS',()=>{
+    cy.contains('button', 'PRESS');
+  });
+});
+
+
+
+
beforeEach
+

Visit '/' before every test.

+
+
+
it
+

Inside we write the test.

+
+
+

The result:

+
+
+
+contextImg +
+
+
+

For more info check Cypress documentation

+
+
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+
+
+

Fixtures

+
+

We use fixtures to mock data, it can be a json, an image, video…​

+
+
+
+
{
+  "name": "Dummy name",
+  "phone": 999 99 99 99,
+  "body": "Mock data"
+}
+
+
+
+

You can store multiple mocks on the same fixture file.

+
+
+
+
{
+  "create":{"name": "e2etestBox"},
+  "boxFruit":{
+    "uuid":"3376339576e33dfb9145362426a33333",
+    "name":"e2etestBox",
+    "visibility":true,
+    "items":[
+      {"name":"apple","units":3},
+      {"name":"kiwi","units":2},
+    ]
+  },
+}
+
+
+
+

To access data we don’t need to import any file, we just call cy.fixture(filename) inside the **.spec.ts. We can name it as we want.

+
+
+
+
cy.fixture('box.json').as('fruitBox')
+
+
+
+

cy.fixture('box.json') we get access to box.json +.as(fruitBox) is used to create an alias (fruitBox) to the fixture.

+
+
+

For more info check Fixtures documentation

+
+
+
+

Request / Route

+
+

With cypress you can test your application with real data or with mocks.

+
+
+

Not using mocks guarantees that your tests are real e2e test but makes them vulnerable to external issues. +When you mock data you don’t know exactly if the data and the structure received from the backend is correct because you are forcing a mock on the response, but you can avoid external issues, run test faster and have better control on the structure and status.

+
+
+

To get more information go to Testing Strategies

+
+
+
+

Route

+
+

Cypress can intercept a XHR request and interact with it.

+
+
+
+
cy.server();
+cy.route(
+  'GET',
+  '/apiUrl/list',
+  [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]
+)
+
+
+
+

cy.server(options) start a server to interact with the responses.

+
+
+
cy.route(options) intercepts a XMLHttpRequests
+
    +
  • +

    method GET

    +
  • +
  • +

    URL /apiUrl/list'

    +
  • +
  • +

    response [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]

    +
  • +
+
+
+

Waits

+
+
+

Every cypress action has a default await time to avoid asynchronous issues, but this time can be short for some particular actions like API calls, for those cases we can use cy.wait().

+
+
+
+
cy.server();
+cy.route('/apiUrl/list').as('list');
+cy.visit('/boxList');
+cy.wait('@list');
+
+
+
+

You can find more information about cy.wait() here

+
+
+

To mock data with fixtures:

+
+
+
+
cy.fixture('box')
+  .then(({boxFruit}) => {
+    cy.route(
+      'GET',
+      '/apiUrl/list',
+      boxFruit
+    ).as('boxFruit');
+    cy.get('#button').click();
+    cy.wait('@journalsList');
+    cy.get('#list').contains('apple');
+  })
+
+
+
+

We get boxFruit data from the box fixture and then we mock the API call with it so now the response of the call is boxFruit object. +When the button is clicked, it waits to receive the response of the call and then checks if the list contains one of the elements of the fruitBox.

+
+
+
+

Request

+
+

Make a HTTP request.

+
+
+
+
cy.server();
+cy.request('http://localhost:4200/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+
+
+
+

If we have 'http://localhost:4200' as baseUrl on cypress.json

+
+
+
+
cy.server();
+cy.request('/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+// Goes to http://localhost:4200/
+
+
+
+

We can add other options, like we can send the body of a form.

+
+
+
+
cy.server();
+cy.request({
+  method: 'POST',
+  url: '/send',
+  form: true,
+  body: {
+    name: 'name task',
+    description: 'description of the task'
+  }
+});
+
+
+
+
+

Custom commands

+
+

If you see yourself writing the same test more than once (login is a common one), you can create a custom command to make things faster.

+
+
+

Cypress.Commands.add('name', ()⇒{}) to create the test.

+
+
+
Listing 82. commands.ts
+
+
Cypress.Commands.add('checkPlaceholder', (name) => {
+  cy.get(`[name='${name}']`).click();
+  cy.get('mat-form-field.mat-focused').should('exist');
+});
+
+
+
+
index.ts
+

To use the commands we need to import the files on support/index.ts

+
+
+
Listing 83. index.ts
+
+
import './commands'
+import './file1'
+import './folder/file2'
+
+
+
+

index.ts is where all our custom commands files unite so Cypress knows where to find them.

+
+
+

And as we are using typescript we need to define a namespace, interface and define our function.

+
+
+
    +
  • +

    index.d.ts

    +
  • +
+
+
+
+
declare namespace Cypress {
+  interface Chainable<Subject> {
+    checkPlaceholder(name:string):Chainable<void>
+  }
+}
+
+
+ +
+
+

Cross browser testing

+
+

By default the browser used by Cypress is Chrome, it has compatibility with it’s family browsers (including Microsoft Edge) and has beta support for Mozilla Firefox.

+
+
+

To change the browser on the panel we can do it by selecting the desired one on the browsers tab before running the spec file.

+
+
+

Cypress will detect and display, except electron, only the browsers that you have already installed on your machine.

+
+
+
+browserTab +
+
+
+

Once the browser is selected, you can run your tests.

+
+
+

To change the browser on the automatic test run, you can add a flag on the node command

+
+
+
+
cypress run --browser edge
+
+
+
+

Only if we use the cypress run command.

+
+
+

Or we can change the script file.

+
+
+
    +
  • +

    cypress/script.js

    +
  • +
+
+
+
+
const runTests= async ()=>{
+  ...
+  const {totalFailed} = await cypress.run({browser:'edge'});
+  ...
+};
+
+
+ +
+
+

Viewport

+
+

Cypress allow us to create tests depending on the Viewport, so we can test responsiveness.

+
+
+

There are different ways to use it:

+
+
+

Inside a test case

+
+
+
+
it('should change title when viewport is less than 320px', ()=>{
+  cy.get('.title-l').should('be.visible');
+  cy.get('.title-s').should('not.be.visible');
+  cy.viewport(320, 480);
+  cy.get('.title-l').should('not.be.visible');
+  cy.get('.title-s').should('be.visible');
+})
+
+
+
+

Passing the configuration as an option

+
+
+
+
describe('page display on medium size screen', {
+  viewportHeight: 1000,
+  viewportWidth: 400
+}, () => {
+  ...
+})
+
+
+
+

Or we can set a default

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+{
+ "viewportHeight": 1000
+ "viewportWidth": 400,
+}
+...
+
+
+ +
+
+

Test retries

+
+

We can get false negatives intermittently due external issues that can affect our tests, because of that we can add, in the configuration, a retries entry so Cypress can run again a certain failed test the selected number of times to verify that the error is real.

+
+
+

We can set retries for run or open mode.

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+"retries": {
+    "runMode": 3,
+    "openMode": 3
+  }
+...
+
+
+
+

The retries can be configured on the cypress.json or directly on a specific test.

+
+
+
+
it('should get button', {
+  retries: {
+    runMode: 2,
+    openMode: 2
+  }
+}, () => {
+  ...
+})
+
+
+
+

This retries those not shown on the test log.

+
+
+

Check more on retries documentation

+
+
+
+

Reporter

+
+

The tests results appear on the terminal, but to have a more friendly view we can add a reporter.

+
+
+
+reporter +
+
+
+
+

Mochawesome

+
+

In this case we are going to use Mochawesome, initially its a Mocha reporter but as Cypress uses Mocha it works the same.

+
+
+

Install

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome
+
+
+
+

To run the reporter:

+
+
+
+
cypress run --reporter mochawesome
+
+
+
+

Mochawesome saves by default the generated files on `./mochawesome-report/` but we can add options to change this behavior.

+
+
+

Options can be passed to the reporter in two ways

+
+
+

Using a flag

+
+
+
+
cypress run --reporter mochawesome --reporter-options reportDir=report
+
+
+
+

Or on cypress.json

+
+
+
+
{
+  "baseUrl": "http://localhost:4200",
+  "reporter": "mochawesome",
+  "reporterOptions": {
+    "overwrite": false,
+    "html": false,
+    "json": true,
+    "reportDir": "cypress/report"
+  }
+}
+
+
+
+

Overwrite:false to not overwrite every **:spec.ts test report, we want them to create a merged version later.

+
+
+

reportDir to set a custom directory.

+
+
+

html:false because we don’t need it.

+
+
+

json:true to save them on json.

+
+
+

Mochawesome only creates the html file of the last .spec.ts file that the tests run, that’s why we don’t generate html reports directly, in order to stack them all on the same final html we need to merge the reports.

+
+ +
+

mochawesome-merge

+
+
+

Mochawesome-merge is a library that helps us to merge the different json.

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome-merge
+npm install --save-dev mochawesome-report-generator
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome-merge
+yarn add -D mochawesome-report-generator
+
+
+
+

To merge the files we execute this command:

+
+
+
+
mochawesome-merge cypress/report/*.json > cypress/reportFinal.json
+
+
+
+

reportFinal.json is the result of this merge, whit that we have the data of all the spec files in one json.

+
+
+

We can also automate the test, merge and conversion to html using a script.

+
+
+
+
const cypress = require('cypress');
+const fse = require('fs-extra');
+const { merge } = require('mochawesome-merge');
+const generator = require('mochawesome-report-generator');
+const runTests= async ()=>{
+  await fse.remove('mochawesome-report');
+  await fse.remove('cypress/report');
+  const {totalFailed} = await cypress.run();
+  const reporterOptions = {
+    files: ["cypress/report/*.json"]
+  };
+  await generateReport(reporterOptions);
+  if(totalFailed !==  0){
+    process.exit(2);
+  };
+};
+const generateReport = (options)=> {
+  return merge(options).then((jsonReport)=>{
+    generator.create(jsonReport).then(()=>{
+      process.exit();
+    });
+  });
+};
+runTests();
+
+
+
+

fse.remove() to remove older reports data.

+
+
+

cypress.run() to run the tests.

+
+
+

merge(options) we merge the json output from running the tests.

+
+
+

generator.create(jsonReport) then we generate the html view of the report.

+
+ +
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+ +
+
+

Angular ESLint support

+
+ + + + + +
+ + +ESLint is supported in Angular 10.1.0 onward. +
+
+
+
+

What about TSLint?

+
+

TSLint is a fantastic tool. It is a linter that was written specifically to work based on the TypeScript AST format. This has advantages and disadvantages, as with most decisions we are faced with in software engineering!

+
+
+

One advantage is there is no tooling required to reconcile differences between ESLint and TypeScript AST formats, but the major disadvantage is that the tool is therefore unable to reuse any of the previous work which has been done in the JavaScript ecosystem around linting, and it has to re-implement everything from scratch. Everything from rules to auto-fixing capabilities and more.

+
+
+

However, the backers behind TSLint announced in 2019 that they would be deprecating TSLint in favor of supporting typescript-eslint in order to benefit the community. You can read more about that here

+
+
+

The TypeScript Team themselves also announced their plans to move the TypeScript codebase from TSLint to typescript-eslint, and they have been big supporters of this project. More details at https://github.com/microsoft/TypeScript/issues/30553

+
+
+

Angular ESLint support comes from the angular-eslint tooling package. Angular documentation also links to this repository as you can check in the ng lint section of the Angular CLI documentation.

+
+
+
+

Quick start with Angular and ESLint

+
+

In order to create a brand new Angular CLI workspace which uses ESLint instead of TSLint and Codelyzer, simply run the following commands:

+
+
+
+
##Install the Angular CLI and @angular-eslint/schematics globally however you want (e.g. npm, yarn, volta etc)
+
+$ npm i -g @angular/cli @angular-devkit/core @angular-devkit/schematics @angular-eslint/schematics
+
+##Create a new Angular CLI workspace using the @angular-eslint/schematics collection (instead of the default)
+
+$ ng new --collection=@angular-eslint/schematics
+
+
+
+
+

Migrating an Angular CLI project from Codelyzer and TSLint

+ +
+
+

1 - Add relevant dependencies

+
+

The first step is to run the schematic to add @angular-eslint to your project:

+
+
+
+
$ ng add @angular-eslint/schematics
+
+
+
+

This will handle installing the latest version of all the relevant packages for you and adding them to the devDependencies of your package.json.

+
+
+
+

2 - Run the convert-tslint-to-eslint schematic on a project

+
+

The next thing to do is consider which "project" you want to migrate to use ESLint. If you have a single application in your workspace you will likely have just a single entry in the projects configuration object within your angular.json file. If you have a projects/` directory in your workspace, you will have multiple entries in your projects configuration and you will need to chose which one you want to migrate using the convert-tslint-to-eslint schematic.

+
+
+

You can run it like so:

+
+
+
+
$ ng g @angular-eslint/schematics:convert-tslint-to-eslint {{YOUR_PROJECT_NAME_GOES_HERE}}
+
+
+
+

From now on, ng lint will use ESLint!

+
+
+
+

3 - Remove root TSLint configuration and use only ESLint

+
+

Once you are happy with your ESLint setup, you simply need to remove the root-level tslint.json and potentially uninstall TSLint and any TSLint-related plugins/dependencies if your Angular CLI workspace is now no longer using TSLint at all.

+
+ +
+
+
+
+
+
+1. A package is a file or directory that is described by a package.json. . +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/meta-architecture.html b/docs/devon4ts/1.0/angular/meta-architecture.html new file mode 100644 index 00000000..adc483a4 --- /dev/null +++ b/docs/devon4ts/1.0/angular/meta-architecture.html @@ -0,0 +1,675 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Meta Architecture

+
+ +
+
+
+

Introduction

+
+ +
+
+
+

Purpose of this document

+
+
+

In our business applications, the client easily gets underestimated. Sometimes the client is more complex to develop and design than the server. While the server architecture is nowadays easily to agree as common sense, for clients this is not as obvious and stable especially as it typically depends on the client framework used. Finding a concrete architecture applicable for all clients may therefore be difficult to accomplish.

+
+
+

This document tries to define on a high abstract level, a reference architecture which is supposed to be a mental image and frame for orientation regarding the evaluation and appliance of different client frameworks. As such it defines terms and concepts required to be provided for in any framework and thus gives a common ground of understanding for those acquainted with the reference architecture. This allows better comparison between the various frameworks out there, each having their own terms for essentially the same concepts. It also means that for each framework we need to explicitly map how it implements the concepts defined in this document.

+
+
+

The architecture proposed herein is neither new nor was it developed from scratch. Instead it is the gathered and consolidated knowledge and best practices of various projects (s. References).

+
+
+
+
+

Goal of the Client Architecture

+
+
+

The goal of the client architecture is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview. Furthermore it ensures a homogeneity regarding how different concrete UI technologies are being applied in the projects, solving the common requirements in the same way.

+
+
+
+
+

Architecture Views

+
+
+

As for the server we distinguish between the business and the technical architecture. Where the business architecture is different from project to project and relates to the concrete design of dialog components given concrete requirements, the technical architecture can be applied to multiple projects.

+
+
+

The focus of this document is to provide a technical reference architecture on the client on a very abstract level defining required layers and components. How the architecture is implemented has to be defined for each UI technology.

+
+
+

The technical infrastructure architecture is out of scope for this document and although it needs to be considered, the concepts of the reference architecture should work across multiple TI architecture, i.e. native or web clients.

+
+
+
+
+

devonfw Reference Client Architecture

+
+
+

The following gives a complete overview of the proposed reference architecture. It will be built up incrementally in the following sections.

+
+
+
+Complete Client Architecture Overview +
+
+
+

Figure 1 Overview

+
+
+
+
+

Client Architecture

+
+
+

On the highest level of abstraction we see the need to differentiate between dialog components and their container they are managed in, as well as the access to the application server being the back-end for the client (e.g. an devon4j instance). This section gives a summary of these components and how they relate to each other. Detailed architectures for each component will be supplied in subsequent sections

+
+
+
+Client Architecture Overview +
+
+
+

Figure 2 Overview of Client Architecture

+
+
+
+
+

== Dialog Component

+
+
+

A dialog component is a logical, self-contained part of the user interface. It accepts user input and actions and controls communication with the user. Dialog components use the services provided by the dialog container in order to execute the business logic. They are self-contained, i.e. they possess their own user interface together with the associated logic, data and states.

+
+
+
    +
  • +

    Dialog components can be composed of other dialog components forming a hierarchy

    +
  • +
  • +

    Dialog components can interact with each other. This includes communication of a parent to its children, but also between components independent of each other regarding the hierarchy.

    +
  • +
+
+
+
+
+

== Dialog Container

+
+
+

Dialog components need to be managed in their life-cycle and how they can be coupled to each other. The dialog container is responsible for this along with the following:

+
+
+
    +
  • +

    Bootstrapping the client application and environment

    +
    +
      +
    • +

      Configuration of the client

      +
    • +
    • +

      Initialization of the application server access component

      +
    • +
    +
    +
  • +
  • +

    Dialog Component Management

    +
    +
      +
    • +

      Controlling the life-cycle

      +
    • +
    • +

      Controlling the dialog flow

      +
    • +
    • +

      Providing means of interaction between the dialogs

      +
    • +
    • +

      Providing application server access

      +
    • +
    • +

      Providing services to the dialog components
      +(e.g. printing, caching, data storage)

      +
    • +
    +
    +
  • +
  • +

    Shutdown of the application

    +
  • +
+
+
+
+
+

== Application Server Access

+
+
+

Dialogs will require a back-end application server in order to execute their business logic. Typically in an devonfw application the service layer will provide interfaces for the functionality exposed to the client. These business oriented interfaces should also be present on the client backed by a proxy handling the concrete call of the server over the network. This component provides the set of interfaces as well as the proxy.

+
+
+
+
+

Dialog Container Architecture

+
+
+

The dialog container can be further structured into the following components with their respective tasks described in own sections:

+
+
+
+Dialog Container Architecture Overview +
+
+
+

Figure 3 Dialog Container Architecture

+
+
+
+
+

== Application

+
+
+

The application component represents the overall client in our architecture. It is responsible for bootstrapping all other components and connecting them with each other. As such it initializes the components below and provides an environment for them to work in.

+
+
+
+
+

== Configuration Management

+
+
+

The configuration management manages the configuration of the client, so the client can be deployed in different environments. This includes configuration of the concrete application server to be called or any other environment-specific property.

+
+
+
+
+

== Dialog Management

+
+
+

The Dialog Management component provides the means to define, create and destroy dialog components. It therefore offers basic life-cycle capabilities for a component. In addition it also allows composition of dialog components in a hierarchy. The life-cycle is then managed along the hierarchy, meaning when creating/destroying a parent dialog, this affects all child components, which are created/destroyed as well.

+
+
+
+
+

== Service Registry

+
+
+

Apart from dialog components, a client application also consists of services offered to these. A service can thereby encompass among others:

+
+
+
    +
  • +

    Access to the application server

    +
  • +
  • +

    Access to the dialog container functions for managing dialogs or accessing the configuration

    +
  • +
  • +

    Dialog independent client functionality such as Printing, Caching, Logging, Encapsulated business logic such as tax calculation

    +
  • +
  • +

    Dialog component interaction

    +
  • +
+
+
+

The service registry offers the possibility to define, register and lookup these services. Note that these services could be dependent on the dialog hierarchy, meaning different child instances could obtain different instances / implementations of a service via the service registry, depending on which service implementations are registered by the parents.

+
+
+

Services should be defined as interfaces allowing for different implementations and thus loose coupling.

+
+
+
+
+

Dialog Component Architecture

+
+
+

A dialog component has to support all or a subset of the following tasks:
+(T1) Displaying the user interface incl. internationalization
+(T2) Displaying business data incl. changes made to the data due to user interactions and localization of the data
+(T3) Accepting user input including possible conversion from e.g. entered Text to an Integer
+(T4) Displaying the dialog state
+(T5) Validation of user input
+(T6) Managing the business data incl. business logic altering it due to user interactions
+(T7) Execution of user interactions
+(T8) Managing the state of the dialog (e.g. Edit vs. View)
+(T9) Calling the application server in the course of user interactions

+
+
+

Following the principle of separation of concerns, we further structure a dialog component in an own architecture allowing us the distribute responsibility for these tasks along the defined components:

+
+
+
+Dialog Component Architecture +
+
+
+

Figure 4 Overview of dialog component architecture

+
+
+
+
+

== Presentation Layer

+
+
+

The presentation layer generates and displays the user interface, accepts user input and user actions and binds these to the dialog core layer (T1-5). The tasks of the presentation layer fall into two categories:

+
+
+
    +
  • +

    Provision of the visual representation (View component)
    +The presentation layer generates and displays the user interface and accepts user input and user actions. The logical processing of the data, actions and states is performed in the dialog core layer. The data and user interface are displayed in localized and internationalized form.

    +
  • +
  • +

    Binding of the visual representation to the dialog core layer
    +The presentation layer itself does not contain any dialog logic. The data or actions entered by the user are then processed in the dialog core layer. There are three aspects to the binding to the dialog core layer. We refer to “data binding”, “state binding” and “action binding”. Syntactical and (to a certain extent) semantic validations are performed during data binding (e.g. cross-field plausibility checks). Furthermore, the formatted, localized data in the presentation layer is converted into the presentation-independent, neutral data in the dialog core layer (parsing) and vice versa (formatting).

    +
  • +
+
+
+
+
+

== Dialog Core Layer

+
+
+

The dialog core layer contains the business logic, the control logic, and the logical state of the dialog. It therefore covers tasks T5-9:

+
+
+
    +
  • +

    Maintenance of the logical dialog state and the logical data
    +The dialog core layer maintains the logical dialog state and the logical data in a form which is independent of the presentation. The states of the presentation (e.g. individual widgets) must not be maintained in the dialog core layer, e.g. the view state could lead to multiple presentation states disabling all editable widgets on the view.

    +
  • +
  • +

    Implementation of the dialog and dialog control logic
    +The component parts in the dialog core layer implement the client specific business logic and the dialog control logic. This includes, for example, the manipulation of dialog data and dialog states as well as the opening and closing of dialogs.

    +
  • +
  • +

    Communication with the application server
    +The dialog core layer calls the interfaces of the application server via the application server access component services.

    +
  • +
+
+
+

The dialog core layer should not depend on the presentation layer enforcing a strict layering and thus minimizing dependencies.

+
+
+
+
+

== Interactions between dialog components

+
+
+

Dialog components can interact in the following ways:

+
+
+
+Dialog Interactions +
+
+
+
    +
  • +

    Embedding of dialog components
    +As already said dialog components can be hierarchically composed. This composition works by embedding on dialog component within the other. Apart from the life-cycle managed by the dialog container, the embedding needs to cope for the visual embedding of the presentation and core layer.

    +
    +
      +
    • +

      Embedding dialog presentation
      +The parent dialog needs to either integrate the embedded dialog in its layout or open it in an own model window.

      +
    • +
    • +

      Embedding dialog core
      +The parent dialog needs to be able to access the embedded instance of its children. This allows initializing and changing their data and states. On the other hand the children might require context information offered by the parent dialog by registering services in the hierarchical service registry.

      +
    • +
    +
    +
  • +
  • +

    Dialog flow
    +Apart from the embedding of dialog components representing a tight coupling, dialogs can interact with each other by passing the control of the UI, i.e. switching from one dialog to another.

    +
  • +
+
+
+

When interacting, dialog components should interact only between the same or lower layers, i.e. the dialog core should not access the presentation layer of another dialog component.

+
+
+
+
+

Appendix

+
+ +
+
+
+

Notes about Quasar Client

+
+
+

The Quasar client architecture as the consolidated knowledge of our CSD projects is the major source for the above drafted architecture. However, the above is a much simplified and more agile version thereof:

+
+
+
    +
  • +

    Quasar Client tried to abstract from the concrete UI library being used, so it could decouple the business from the technical logic of a dialog. The presentation layer should be the only one knowing the concrete UI framework used. This level of abstraction was dropped in this reference architecture, although it might of course still make sense in some projects. For fast-moving agile projects in the web however introducing such a level of abstraction takes effort with little gained benefits. With frameworks like Angular 2 we would even introduce one additional seemingly artificial and redundant layer, since it already separates the dialog core from its presentation.

    +
  • +
  • +

    In the past and in the days of Struts, JSF, etc. the concept of session handling was important for the client since part of the client was sitting on a server with a session relating it to its remote counterpart on the users PC. Quasar Client catered for this need, by very prominently differentiating between session and application in the root of the dialog component hierarchy. However, in the current days of SPA applications and the lowered importance of servers-side web clients, this prominent differentiation was dropped. When still needed the referenced documents will provide in more detail how to tailor the respective architecture to this end.

    +
  • +
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/angular/services-layer.html b/docs/devon4ts/1.0/angular/services-layer.html new file mode 100644 index 00000000..a50273c8 --- /dev/null +++ b/docs/devon4ts/1.0/angular/services-layer.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Services Layer

+
+
+

The services layer is more or less what we call 'business logic layer' on the server side. +It is the layer where the business logic is placed. +The main challenges are:

+
+
+
    +
  • +

    Define application state and an API for the components layer to use it

    +
  • +
  • +

    Handle application state transitions

    +
  • +
  • +

    Perform back-end interaction (XHR, WebSocket, etc.)

    +
  • +
  • +

    Handle business logic in a maintainable way

    +
  • +
  • +

    Configuration management

    +
  • +
+
+
+

All parts of the services layer are described in this chapter. +An example which puts the concepts together can be found at the end Interaction of Smart Components through the services layer.

+
+
+
+
+

Boundaries

+
+
+

There are two APIs for the components layer to interact with the services layer:

+
+
+
    +
  • +

    A store can be subscribed to for receiving state updates over time

    +
  • +
  • +

    A use case service can be called to trigger an action

    +
  • +
+
+
+

To illustrate the fact the following figure shows an abstract overview.

+
+
+
+Smart and Dumb Components Interaction +
+
Figure 1. Boundaries to components layer
+
+
+
+
+

Store

+
+
+

A store is a class which defines and handles application state with its transitions over time. +Interaction with a store is always synchronous. +A basic implementation using RxJS can look like this.

+
+
+ + + + + +
+ + +A more profound implementation taken from a real-life project can be found here (Abstract Class Store). +
+
+
+
Listing 1. Store defined using RxJS
+
+
@Injectable()
+export class ProductSearchStore {
+
+  private stateSource = new BehaviorSubject<ProductSearchState>(defaultProductSearchState);
+  state$ = this.stateSource.asObservable();
+
+  setLoading(isLoading: boolean) {
+    const currentState = this.stateSource.getValue();
+    this.stateSource.next({
+      isLoading: isLoading,
+      products: currentState.products,
+      searchCriteria: currentState.searchCriteria
+    });
+  }
+
+}
+
+
+
+

In the example ProductSearchStore handles state of type ProductSearchState. +The public API is the property state$ which is an observable of type ProductSearchState. +The state can be changed with method calls. +So every desired change to the state needs to be modeled with an method. +In reactive terminology this would be an Action. +The store does not use any services. +Subscribing to the state$ observable leads to the subscribers receiving every new state.

+
+
+

This is basically the Observer Pattern:
+The store consumer registers itself to the observable via state$.subscribe() method call. +The first parameter of subscribe() is a callback function to be called when the subject changes. +This way the consumer - the observer - is registered. +When next() is called with a new state inside the store, all callback functions are called with the new value. +So every observer is notified of the state change. +This equals the Observer Pattern push type.

+
+
+

A store is the API for Smart Components to receive state from the service layer. +State transitions are handled automatically with Smart Components registering to the state$ observable.

+
+
+
+
+

Use Case Service

+
+
+

A use case service is a service which has methods to perform asynchronous state transitions. +In reactive terminology this would be an Action of Actions - a thunk (redux) or an effect (@ngrx).

+
+
+
+Use Case Service +
+
Figure 2. Use case services are the main API to trigger state transitions
+
+
+

A use case services method - an action - interacts with adapters, business services and stores. +So use case services orchestrate whole use cases. +For an example see use case service example.

+
+
+
+
+

Adapter

+
+
+

An adapter is used to communicate with the back-end. +This could be a simple XHR request, a WebSocket connection, etc. +An adapter is simple in the way that it does not add anything other than the pure network call. +So there is no caching or logging performed here. +The following listing shows an example.

+
+
+

For further information on back-end interaction see Consuming REST Services

+
+
+
Listing 2. Calling the back-end via an adapter
+
+
@Injectable()
+export class ProducsAdapter {
+
+  private baseUrl = environment.baseUrl;
+
+  constructor(private http: HttpClient) { }
+
+  getAll(): Observable<Product[]> {
+    return this.http.get<Product[]>(this.baseUrl + '/products');
+  }
+
+}
+
+
+
+
+
+

Interaction of Smart Components through the services layer

+
+
+

The interaction of smart components is a classic problem which has to be solved in every UI technology. +It is basically how one dialog tells the other something has changed.

+
+
+

An example is adding an item to the shopping basket. +With this action there need to be multiple state updates.

+
+
+
    +
  • +

    The small logo showing how many items are currently inside the basket needs to be updated from 0 to 1

    +
  • +
  • +

    The price needs to be recalculated

    +
  • +
  • +

    Shipping costs need to be checked

    +
  • +
  • +

    Discounts need to be updated

    +
  • +
  • +

    Ads need to be updated with related products

    +
  • +
  • +

    etc.

    +
  • +
+
+
+
+
+

Pattern

+
+
+

To handle this interaction in a scalable way we apply the following pattern.

+
+
+
+Interaction of Smart Components via services layer +
+
Figure 3. Smart Component interaction
+
+
+

The state of interest is encapsulated inside a store. All Smart Components interested in the state have to subscribe to the store’s API served by the public observable. Thus, with every update to the store the subscribed components receive the new value. The components basically react to state changes. Altering a store can be done directly if the desired change is synchronous. Most actions are of asynchronous nature so the UseCaseService comes into play. Its actions are void methods, which implement a use case, i.e., adding a new item to the basket. It calls asynchronous actions and can perform multiple store updates over time.

+
+
+

To put this pattern into perspective the UseCaseService is a programmatic alternative to redux-thunk or @ngrx/effects. The main motivation here is to use the full power of TypeScript --strictNullChecks and to let the learning curve not to become as steep as it would be when learning a new state management framework. This way actions are just void method calls.

+
+
+
+
+

Example

+
+
+
+Smart component interaction example +
+
Figure 4. Smart Components interaction example
+
+
+

The example shows two Smart Components sharing the FlightSearchState by using the FlightSearchStore. +The use case shown is started by an event in the Smart Component FlightSearchComponent. The action loadFlight() is called. This could be submitting a search form. +The UseCaseService is FlightSearchService, which handles the use case Load Flights.

+
+
+
UseCaseService example
+

+
+
+
+
export class FlightSearchService {
+
+  constructor(
+    private flightSearchAdapter: FlightSearchAdapter,
+    private store: FlightSearchStore
+  ) { }
+
+  loadFlights(criteria: FlightSearchCriteria): void {
+    this.store.setLoadingFlights(true);
+    this.store.clearFlights();
+
+    this.flightSearchAdapter.getFlights(criteria.departureDate,
+        {
+          from: criteria.departureAirport,
+          to: criteria.destinationAirport
+        })
+      .finally(() => this.store.setLoadingFlights(false))
+      .subscribe((result: FlightTo[]) => this.store.setFlights(result, criteria));
+  }
+
+}
+
+
+
+

First the loading flag is set to true and the current flights are cleared. This leads the Smart Component showing a spinner indicating the loading action. Then the asynchronous XHR is triggered by calling the adapter. After completion the loading flag is set to false causing the loading indication no longer to be shown. If the XHR was successful, the data would be put into the store. If the XHR was not successful, this would be the place to handle a custom error. All general network issues should be handled in a dedicated class, i.e., an interceptor. So for example the basic handling of 404 errors is not done here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/general/architecture.html b/docs/devon4ts/1.0/general/architecture.html new file mode 100644 index 00000000..1b188b5b --- /dev/null +++ b/docs/devon4ts/1.0/general/architecture.html @@ -0,0 +1,386 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Architecture

+
+
+

The following principles and guidelines are based on Angular Style Guide - especially Angular modules (see Angular Docs). +It extends those where additional guidance is needed to define an architecture which is:

+
+
+
    +
  • +

    maintainable across applications and teams

    +
  • +
  • +

    easy to understand, especially when coming from a classic Java/.Net perspective - so whenever possible the same principles apply both to the server and the client

    +
  • +
  • +

    pattern based to solve common problems

    +
  • +
  • +

    based on best of breed solutions coming from open source and Capgemini project experiences

    +
  • +
  • +

    gives as much guidance as necessary and as little as possible

    +
  • +
+
+
+
+
+

Overview

+
+
+

When using Angular the web client architecture is driven by the framework in a certain way Google and the Angular community think about web client architecture. +Angular gives an opinion on how to look at architecture. +It is a component based like devon4j but uses different terms which are common language in web application development. +The important term is module which is used instead of component. The primary reason is the naming collision with the Web Components standard (see Web Components).
+To clarify this:

+
+
+
    +
  • +

    A component describes an UI element containing HTML, CSS and JavaScript - structure, design and logic encapsulated inside a reusable container called component.

    +
  • +
  • +

    A module describes an applications feature area. The application flight-app may have a module called booking.

    +
  • +
+
+
+

An application developed using Angular consists of multiple modules. +There are feature modules and special modules described by the Angular Style Guide - core and shared. +Angular or Angular Style Guide give no guidance on how to structure a module internally. +This is where this architecture comes in.

+
+
+
+
+

Layers

+
+
+

The architecture describes two layers. The terminology is based on common language in web development.

+
+
+
+Architecture - Layers +
+
Figure 1. Layers
+
+
+
    +
  • +

    Components Layer encapsulates components which present the current application state. +Components are separated into Smart and Dumb Components. +The only logic present is view logic inside Smart Components.

    +
  • +
  • +

    Services Layer is more or less what we call 'business logic layer' on the server side. +The layer defines the applications state, the transitions between state and classic business logic. +Stores contain application state over time to which Smart Components subscribe to. +Adapters are used to perform XHR, WebSocket connections, etc. +The business model is described inside the module. +Use case services perform business logic needed for use cases. +A use case services interacts with the store and adapters. +Methods of use case services are the API for Smart Components. +Those methods are Actions in reactive terminology.

    +
  • +
+
+
+
+
+

Modules

+
+
+

Angular requires a module called app which is the main entrance to an application at runtime - this module gets bootstrapped. +Angular Style Guide defines feature modules and two special modules - core and shared.

+
+
+
+Architecture - Modules +
+
Figure 2. Modules
+
+
+

A feature module is basically a vertical cut through both layers. +The shared module consists of components shared across feature modules. +The core module holds services shared across modules. +So core module is a module only having a services layer +and shared module is a module only having a components layer.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/general/components-layer.html b/docs/devon4ts/1.0/general/components-layer.html new file mode 100644 index 00000000..a85ee633 --- /dev/null +++ b/docs/devon4ts/1.0/general/components-layer.html @@ -0,0 +1,470 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Components Layer

+
+
+

The components layer encapsulates all components presenting the current application view state, which means data to be shown to the user. +The term component refers to a component described by the standard Web Components. +So this layer has all Angular components, directives and pipes defined for an application. +The main challenges are:

+
+
+
    +
  • +

    how to structure the components layer (see File Structure Guide)

    +
  • +
  • +

    decompose components into maintainable chunks (see Component Decomposition Guide)

    +
  • +
  • +

    handle component interaction

    +
  • +
  • +

    manage calls to the services layer

    +
  • +
  • +

    apply a maintainable data and event flow throughout the component tree

    +
  • +
+
+
+
+
+

Smart and Dumb Components

+
+
+

The architecture applies the concept of Smart and Dumb Components (syn. Containers and Presenters). +The concept means that components are divided into Smart and Dumb Components.

+
+
+

A Smart Component typically is a top-level dialog inside the component tree.

+
+
+
    +
  • +

    a component, that can be routed to

    +
  • +
  • +

    a modal dialog

    +
  • +
  • +

    a component, which is placed inside AppComponent

    +
  • +
+
+
+

A Dumb Component can be used by one to many Smart Components. +Inside the component tree a Dumb Component is a child of a Smart Component.

+
+
+
+Component Tree +
+
Figure 1. Component tree example
+
+
+

As shown the topmost component is always the AppComponent in Angular applications. +The component tree describes the hierarchy of components starting from AppComponent. +The figure shows Smart Components in blue and Dumb Components in green. +AppComponent is a Smart Component by definition. +Inside the template of AppComponent placed components are static components inside the component tree. +So they are always displayed. +In the example OverviewComponent and DetailsComponent are rendered by Angular compiler depending on current URL the application displays. +So OverviewComponents sub-tree is displayed if the URL is /overview and DetailsComponents sub-tree is displayed if the URL is /details. +To clarify this distinction further the following table shows the main differences.

+
+
+
Smart vs Dumb Components
+

|== = +|Smart Components |Dumb Components

+
+
+

|contain the current view state +|show data via binding (@Input) and contain no view state

+
+
+

|handle events emitted by Dumb Components +|pass events up the component tree to be handled by Smart Components (@Output)

+
+
+

|call the services layer +|never call the services layer

+
+
+

|use services +|do not use services

+
+
+

|consists of n Dumb Components +|is independent of Smart Components +|== =

+
+
+
+
+

Interaction of Smart and Dumb Components

+
+
+

With the usage of the Smart and Dumb Components pattern one of the most important part is component interaction. +Angular comes with built in support for component interaction with @Input() and @Output() Decorators. +The following figure illustrates an unidirectional data flow.

+
+
+
    +
  • +

    Data always goes down the component tree - from a Smart Component down its children.

    +
  • +
  • +

    Events bubble up, to be handled by a Smart Component.

    +
  • +
+
+
+
+Smart and Dumb Components Interaction +
+
Figure 2. Smart and Dumb Component Interaction
+
+
+

As shown a Dumb Components role is to define a signature by declaring Input and Output Bindings.

+
+
+
    +
  • +

    @Input() defines what data is necessary for that component to work

    +
  • +
  • +

    @Output() defines which events can be listened on by the parent component

    +
  • +
+
+
+
Listing 1. Dumb Components define a signature
+
+
export class ValuePickerComponent {
+
+  @Input() columns: string[];
+  @Input() items: {}[];
+  @Input() selected: {};
+  @Input() filter: string;
+  @Input() isChunked = false;
+  @Input() showInput = true;
+  @Input() showDropdownHeader = true;
+
+  @Output() elementSelected = new EventEmitter<{}>();
+  @Output() filterChanged = new EventEmitter<string>();
+  @Output() loadNextChunk = new EventEmitter();
+  @Output() escapeKeyPressed = new EventEmitter();
+
+}
+
+
+
+

The example shows the Dumb Component ValuePickerComponent. +It describes seven input bindings with isChunked, showHeader and showDropdownHeader being non mandatory as they have a default value. +Four output bindings are present. Typically, a Dumb Component has very little code to no code inside the TypeScript class.

+
+
+
Listing 2. Smart Components use the Dumb Components signature inside the template
+
+
<div>
+
+  <value-input
+    ...>
+  </value-input>
+
+  <value-picker
+    *ngIf="isValuePickerOpen"
+    [columns]="columns"
+    [items]="filteredItems"
+    [isChunked]="isChunked"
+    [filter]="filter"
+    [selected]="selectedItem"
+    [showDropdownHeader]="showDropdownHeader"
+    (loadNextChunk)="onLoadNextChunk()"
+    (elementSelected)="onElementSelected($event)"
+    (filterChanged)="onFilterChanged($event)"
+    (escapeKeyPressed)="onEscapePressedInsideChildTable()">
+  </value-picker>
+
+</div>
+
+
+
+

Inside the Smart Components template the events emitted by Dumb Components are handled. +It is a good practice to name the handlers with the prefix on* (e.g. onInputChanged()).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/general/guide-angular-libraries.html b/docs/devon4ts/1.0/general/guide-angular-libraries.html new file mode 100644 index 00000000..99db5b44 --- /dev/null +++ b/docs/devon4ts/1.0/general/guide-angular-libraries.html @@ -0,0 +1,350 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+ +
+
+

We have listed some links to UI libraries and common requested feature components we recommend. Here is the list

+
+
+
+ + +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/general/guide-nestjs-libraries.html b/docs/devon4ts/1.0/general/guide-nestjs-libraries.html new file mode 100644 index 00000000..47a05eee --- /dev/null +++ b/docs/devon4ts/1.0/general/guide-nestjs-libraries.html @@ -0,0 +1,366 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+ +
+
+

We have listed some links to NestJS libraries and common requested feature components we recommend. Here is the list

+
+
+
+ +
+

Internationalization (i18n)

+ +
+
+

Monitoring

+
+ +
+
+
+

Logging

+
+ +
+
+
+

Microservice

+
+ +
+
+
+

RBAC (Role-based access control)

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/general/guide-npm-yarn-workflow.html b/docs/devon4ts/1.0/general/guide-npm-yarn-workflow.html new file mode 100644 index 00000000..25af1465 --- /dev/null +++ b/docs/devon4ts/1.0/general/guide-npm-yarn-workflow.html @@ -0,0 +1,975 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Package Managers Workflow

+
+ +
+
+
+

Introduction

+
+
+

This document aims to provide you the necessary documentation and sources in order to help you understand the importance of dependencies between packages.

+
+
+

Projects in NodeJS make use of modules, chunks of reusable code made by other people or teams. These small chunks of reusable code are called packages [1]. Packages are used to solve specific problems or tasks. These relations between your project and the external packages are called dependencies.

+
+
+

For example, imagine we are doing a small program that takes your birthday as an input and tells you how many days are left until your birthday. We search in the repository if someone has published a package to retrieve the actual date and manage date types, and maybe we could search for another package to show a calendar, because we want to optimize our time, and we wish the user to click a calendar button and choose the day in the calendar instead of typing it.

+
+
+

As you can see, packages are convenient. In some cases, they may be even needed, as they can manage aspects of your program you may not be proficient in, or provide an easier use of them.

+
+
+

For more comprehensive information visit npm definition

+
+
+
+
+

Package.json

+
+
+

Dependencies in your project are stored in a file called package.json. Every package.json must contain, at least, the name and version of your project.

+
+
+

Package.json is located in the root of your project.

+
+
+ + + + + +
+ + +If package.json is not on your root directory refer to Problems you may encounter section +
+
+
+

If you wish to learn more information about package.json, click on the following links:

+
+ +
+
+
+

== Content of package.json

+
+
+

As you noticed, package.json is a really important file in your project. It contains essential information about our project, therefore you need to understand what’s inside.

+
+
+

The structure of package.json is divided in blocks, inside the first one you can find essential information of your project such as the name, version, license and optionally some [Scripts].

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e"
+  }
+
+
+
+

The next block is called dependencies and contains the packages that project needs in order to be developed, compiled and executed.

+
+
+
+
"private": true,
+  "dependencies": {
+    "@angular/animations": "^4.2.4",
+    "@angular/common": "^4.2.4",
+    "@angular/forms": "^4.2.4",
+    ...
+    "zone.js": "^0.8.14"
+  }
+
+
+
+

After dependencies we find devDependencies, another kind of dependencies present in the development of the application but unnecessary for its execution. One example is typescript. Code is written in typescript, and then, transpiled to JavaScript. This means the application is not using typescript in execution and consequently not included in the deployment of our application.

+
+
+
+
"devDependencies": {
+    "@angular/cli": "1.4.9",
+    "@angular/compiler-cli": "^4.2.4",
+    ...
+    "@types/node": "~6.0.60",
+    "typescript": "~2.3.3"
+  }
+
+
+
+

Having a peer dependency means that your package needs a dependency that is the same exact dependency as the person installing your package

+
+
+
+
"peerDependencies": {
+    "package-123": "^2.7.18"
+  }
+
+
+
+

Optional dependencies are just that: optional. If they fail to install, Yarn will still say the install process was successful.

+
+
+
+
"optionalDependencies": {
+    "package-321": "^2.7.18"
+  }
+
+
+
+

Finally you can have bundled dependencies which are packages bundled together when publishing your package in a repository.

+
+
+
+
{
+  "bundledDependencies": [
+    "package-4"
+  ]
+}
+
+
+
+

Here is the link to an in-depth explanation of dependency types​.

+
+
+
+
+

== Scripts

+
+
+

Scripts are a great way of automating tasks related to your package, such as simple build processes or development tools.

+
+
+

For example:

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "build-project": "node hello-world.js",
+  }
+
+
+
+

You can run that script by running the command yarn (run) script or npm run script, check the example below:

+
+
+
+
$ yarn (run) build-project    # run is optional
+$ npm run build-project
+
+
+
+

There are special reserved words for scripts, like pre-install, which will execute the script automatically +before the package you install are installed.

+
+
+

Check different uses for scripts in the following links:

+
+ +
+

Or you can go back to +[Content of package.json]​.

+
+
+
+
+

Managing dependencies

+
+
+

In order to manage dependencies we recommend using package managers in your projects.

+
+
+

A big reason is their usability. Adding or removing a package is really easy, and by doing so, packet manager update the package.json and copies (or removes) the package in the needed location, with a single command.

+
+
+

Another reason, closely related to the first one, is reducing human error by automating the package management process.

+
+
+

Two of the package managers you can use in NodeJS projects are "yarn" and "npm". While you can use both, we encourage you to use only one of them while working on projects. Using both may lead to different dependencies between members of the team.

+
+
+
+
+

== npm

+
+
+

We’ll start by installing npm following this small guide here.

+
+
+

As stated on the web, npm comes inside of NodeJS, and must be updated after installing NodeJS, in the same guide you used earlier are written the instructions to update npm.

+
+
+

How npm works

+
+
+

In order to explain how npm works, let’s take a command as an example:

+
+
+
+
$ npm install @angular/material @angular/cdk
+
+
+
+

This command tells npm to look for the packages @angular/material and @angular/cdk in the npm registry, download and decompress them in the folder node_modules along with their own dependencies. Additionally, npm will update package.json and create a new file called package-lock.json.

+
+
+

After initialization and installing the first package there will be a new folder called node_modules in your project. This folder is where your packages are unzipped and stored, following a tree scheme.

+
+
+

Take in consideration both npm and yarn need a package.json in the root of your project in order to work properly. If after creating your project don’t have it, download again the package.json from the repository or you’ll have to start again.

+
+
+

Brief overview of commands

+
+
+

If we need to create a package.json from scratch, we can use the command init. This command asks the user for basic information about the project and creates a brand new package.json.

+
+
+
+
$ npm init
+
+
+
+

Install (or i) installs all modules listed as dependencies in package.json locally. You can also specify a package, and install that package. Install can also be used with the parameter -g, which tells npm to install the [Global package].

+
+
+
+
$ npm install
+$ npm i
+$ npm install Package
+
+
+
+ + + + + +
+ + +Earlier versions of npm did not add dependencies to package.json unless it was used with the flag --save, so npm install package would be npm install --save package, you have one example below. +
+
+
+
+
$ npm install --save Package
+
+
+
+

Npm needs flags in order to know what kind of dependency you want in your project, in npm you need to put the flag -D or --save-dev to install devDependencies, for more information consult the links at the end of this section.

+
+
+
+
$ npm install -D package
+$ npm install --save-dev package
+
+
+
+

+
+
+

The next command uninstalls the module you specified in the command.

+
+
+
+
$ npm uninstall Package
+
+
+
+

ls command shows us the dependencies like a nested tree, useful if you have few packages, not so useful when you need a lot of packages.

+
+
+
+
$ npm ls
+
+
+
+
+
npm@@VERSION@ /path/to/npm
+└─┬ init-package-json@0.0.4
+  └── promzard@0.1.5
+
+
+
+
example tree
+

We recommend you to learn more about npm commands in the following link, navigating to the section CLI commands.

+
+
+

About Package-lock.json

+
+
+

Package-lock.json describes the dependency tree resulting of using package.json and npm. +Whenever you update, add or remove a package, package-lock.json is deleted and redone with +the new dependencies.

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

This lock file is checked every time the command npm i (or npm install) is used without specifying a package, +in the case it exists and it’s valid, npm will install the exact tree that was generated, such that subsequent +installs are able to generate identical dependency trees.

+
+
+ + + + + +
+ + +It is not recommended to modify this file yourself. It’s better to leave its management to npm. +
+
+
+

More information is provided by the npm team at package-lock.json

+
+
+
+
+

== Yarn

+
+
+

Yarn is an alternative to npm, if you wish to install yarn follow the guide getting started with yarn and download the correct version for your operative system. NodeJS is also needed you can find it here.

+
+
+

Working with yarn

+
+
+

Yarn is used like npm, with small differences in syntax, for example npm install module is changed to yarn add module.

+
+
+
+
$ yarn add @covalent
+
+
+
+

This command is going to download the required packages, modify package.json, put the package in the folder node_modules and makes a new yarn.lock with the new dependency.

+
+
+

However, unlike npm, yarn maintains a cache with packages you download inside. You don’t need to download every file every time you do a general installation. This means installations faster than npm.

+
+
+

Similarly to npm, yarn creates and maintains his own lock file, called yarn.lock. Yarn.lock gives enough information about the project for dependency tree to be reproduced.

+
+
+

yarn commands

+
+
+

Here we have a brief description of yarn’s most used commands:

+
+
+
+
$ yarn add Package
+$ yarn add --dev Package
+
+
+
+

Adds a package locally to use in your package. Adding the flags --dev or -D will add them to devDependencies instead of the default dependencies, if you need more information check the links at the end of the section.

+
+
+
+
$ yarn init
+
+
+
+

Initializes the development of a package.

+
+
+
+
$ yarn install
+
+
+
+

Installs all the dependencies defined in a package.json file, you can also write "yarn" to achieve the same effect.

+
+
+
+
$ yarn remove Package
+
+
+
+

You use it when you wish to remove a package from your project.

+
+
+
+
$ yarn global add Package
+
+
+
+

Installs the [Global package].

+
+
+

Please, refer to the documentation to learn more about yarn commands and their attributes: yarn commands

+
+
+

yarn.lock

+
+
+

This file has the same purpose as Package-lock.json, to guide the packet manager, in this case yarn, +to install the dependency tree specified in yarn.lock.

+
+
+

Yarn.lock and package.json are +essential files when collaborating in a project more co-workers and may be a +source of errors if programmers do not use the same manager.

+
+
+

Yarn.lock follows the same structure as package-lock.json, you can find an example of dependency below:

+
+
+
+
"@angular/animations@^4.2.4":
+  version "4.4.6"
+  resolved "https://registry.yarnpkg.com/@angular/animations/-/animations-4.4.6.tgz#fa661899a8a4e38cb7c583c7a5c97ce65d592a35"
+  dependencies:
+    tslib "^1.7.1"
+
+
+
+ + + + + +
+ + +As with package-lock.json, it’s strongly not advised to modify this file. Leave its management to yarn +
+
+
+

You can learn more about yarn.lock here: yarn.lock

+
+
+
+
+

== Global package

+
+
+

Global packages are packages installed in your operative system instead of your local project, +global packages useful for developer tooling that is not part of any individual project but instead is used for local commands.

+
+
+

A good example of global package is @angular/cli, a command line interface for angular used in our projects. You can install +a global package in npm with "npm install -g package" and "yarn global add package" with yarn, you have a npm example below:

+
+
+
Listing 1. npm global package
+
+
npm install –g @angular/cli
+
+
+ +
+
+
+

== Package version

+
+
+

Dependencies are critical to the success of a package. You must be extra careful about +which version packages are using, one package in a different version may break your code.

+
+
+

Versioning in npm and yarn, follows a semantic called semver, following the logic +MAJOR.MINOR.PATCH, like for example, @angular/animations: 4.4.6.

+
+
+

Different versions

+
+
+

Sometimes, packages are installed with a different version from the one initially installed. +This happens because package.json also contains the range of versions we allow yarn or npm to +install or update to, example:

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

And here the installed one:

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

As you can see, the version we initially added is 4.2.4, and the version finally installed after +a global installation of all packages, 4.4.6.

+
+
+

Installing packages without package-lock.json or yarn.lock using their respective packet managers, will always +end with npm or yarn installing the latest version allowed by package.json.

+
+
+

"@angular/animations": "^4.2.4" contains not only the version we added, but also the range we allow npm and yarn +to update. Here are some examples:

+
+
+
+
"@angular/animations": "<4.2.4"
+
+
+
+

The version installed must be lower than 4.2.4 .

+
+
+
+
"@angular/animations": ">=4.2.4"
+
+
+
+

The version installed must be greater than or equal to 4.2.4 .

+
+
+
+
"@angular/animations": "=4.2.4"
+
+
+
+

the version installed must be equal to 4.2.4 .

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

The version installed cannot modify the first non zero digit, for example in this case +it cannot surpass 5.0.0 or be lower than 4.2.4 .

+
+
+

You can learn more about this in Versions

+
+
+
+
+

Problems you may encounter

+
+
+

If you can’t find package.json, you may have deleted the one you had previously, +which means you have to download the package.json from the repository. +In the case you are creating a new project you can create a new package.json. More information +in the links below. Click on Package.json if you come from that section.

+
+ +
+ + + + + +
+ + +Using npm install or yarn without package.json in your projects will +result in compilation errors. As we mentioned earlier, +Package.json contains essential information about your project. +
+
+
+

If you have package.json, but you don’t have package-lock.json or yarn.lock the use of +command "npm install" or "yarn" may result in a different dependency tree.

+
+
+

If you are trying to import a module and visual code studio is not able to find it, +is usually caused by error adding the package to the project, try to add the module again with yarn or npm, +and restart Visual Studio Code.

+
+
+

Be careful with the semantic versioning inside your package.json of the packages, +or you may find a new update on one of your dependencies breaking your code.

+
+
+ + + + + +
+ + +In the following link +there is a solution to a problematic update to one package. +
+
+
+

A list of common errors of npm can be found in: npm errors

+
+
+
+
+

== Recomendations

+
+
+

Use yarn or npm in your project, reach an agreement with your team in order to choose one, this will avoid +undesired situations like forgetting to upload an updated yarn.lock or package-lock.json. +Be sure to have the latest version of your project when possible.

+
+
+ + + + + +
+ + +Pull your project every time it’s updated. Erase your node_modules folder and reinstall all +dependencies. This assures you to be working with the same dependencies your team has. +
+
+
+

AD Center recommends the use of yarn.

+
+
+
+
+
+
+1. A package is a file or directory that is described by a package.json. . +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/general/guide-package-managers.html b/docs/devon4ts/1.0/general/guide-package-managers.html new file mode 100644 index 00000000..648e53d8 --- /dev/null +++ b/docs/devon4ts/1.0/general/guide-package-managers.html @@ -0,0 +1,502 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Package Managers

+
+
+

There are two major package managers currently used for JavaScript / TypeScript projects which leverage NodeJS as a build platform.

+
+
+
    +
  1. +

    npm

    +
  2. +
  3. +

    yarn

    +
  4. +
+
+
+

Our recommendation is to use yarn but both package managers are fine.

+
+
+ + + + + +
+ + +When using npm it is important to use a version greater 5.0 as npm 3 has major drawbacks compared to yarn. +The following guide assumes that you are using npm >= 5 or yarn. +
+
+
+

Before you start reading further, please take a look at the docs:

+
+ +
+

The following guide will describe best practices for working with yarn / npm.

+
+
+
+
+

Semantic Versioning

+
+
+

When working with package managers it is very important to understand the concept of semantic versioning.

+
+
+
Version example 1.2.3
+

|== == == = +|Version |1. |2. |3 +|Version name when incrementing |Major (2.0.0) |Minor (1.3.0) |Patch (1.2.4) +|Has breaking changes |yes |no |no +|Has features |yes |yes |no +|Has bug fixes |yes |yes |yes +|== == == =

+
+
+

The table gives an overview of the most important parts of semantic versioning. +In the header version 1.2.3 is displayed. +The first row shows the name and the resulting version when incrementing a part of the version. +The next rows show specifics of the resulting version - e.g. a major version can have breaking changes, features and bug fixes.

+
+
+

Packages from npm and yarn leverage semantic versioning and instead of selecting a fixed version one can specify a selector. +The most common selectors are:

+
+
+
    +
  • +

    ^1.2.3 +At least 1.2.3 - 1.2.4 or 1.3.0 can be used, 2.0.0 can not be used

    +
  • +
  • +

    ~1.2.3 +At lease 1.2.3 - 1.2.4 can be used, 2.0.0 and 1.3.0 can not be used

    +
  • +
  • +

    >=1.2.3 +At least 1.2.3 - every version greater can also be used

    +
  • +
+
+
+

This achieves a lower number of duplicates. +To give an example:

+
+
+

If package A needs version 1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 4 packages.

+
+
+

If package A needs version ^1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 3 packages. +A would use the same version of C as B - 1.4.0.

+
+
+
+
+

Do not modify package.json and lock files by hand

+
+
+

Dependencies are always added using a yarn or npm command. +Altering the package.json, package-json.lock or yarn.lock file by hand is not recommended.

+
+
+

Always use a yarn or npm command to add a new dependency.

+
+
+

Adding the package express with yarn to dependencies.

+
+
+
+
yarn add express
+
+
+
+

Adding the package express with npm to dependencies.

+
+
+
+
npm install express
+
+
+
+
+
+

What does the lock file do

+
+
+

The purpose of files yarn.lock and package-json.lock is to freeze versions for a short time.

+
+
+

The following problem is solved:

+
+
+
    +
  • +

    Developer A upgrades the dependency express to fixed version 4.16.3.

    +
  • +
  • +

    express has sub-dependency accepts with version selector ~1.3.5

    +
  • +
  • +

    His local node_modules folder receives accepts in version 1.3.5

    +
  • +
  • +

    On his machine everything is working fine

    +
  • +
  • +

    Afterward version 1.3.6 of accepts is published - it contains a major bug

    +
  • +
  • +

    Developer B now clones the repo and loads the dependencies.

    +
  • +
  • +

    He receives version 1.3.6 of accepts and blames developer A for upgrading to a broken version.

    +
  • +
+
+
+

Both yarn.lock and package-json.lock freeze all the dependencies. +For example in yarn lock you will find.

+
+
+
Listing 1. yarn.lock example (excerp)
+
+
accepts@~1.3.5:
+  version "1.3.5"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-types "~2.1.18"
+    negotiator "0.6.1"
+
+mime-db@~1.33.0:
+  version "1.33.0"
+  resolved "[...URL to registry]"
+
+mime-types@~2.1.18:
+  version "2.1.18"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-db "~1.33.0"
+
+negotiator@0.6.1:
+  version "0.6.1"
+  resolved "[...URL to registry]"
+
+
+
+

The described problem is solved by the example yarn.lock file.

+
+
+
    +
  • +

    accepts is frozen at version ~1.3.5

    +
  • +
  • +

    All of its sub-dependencies are also frozen. +It needs mime-types at version ~2.1.18 which is frozen at 2.1.18. +mime-types needs mime-db at ~1.33.0 which is frozen at 1.33.0

    +
  • +
+
+
+

Every developer will receive the same versions of every dependency.

+
+
+ + + + + +
+ + +You have to make sure all your developers are using the same npm/yarn version - this includes the CI build. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/general/guide-yarn-2-support.html b/docs/devon4ts/1.0/general/guide-yarn-2-support.html new file mode 100644 index 00000000..ee43f553 --- /dev/null +++ b/docs/devon4ts/1.0/general/guide-yarn-2-support.html @@ -0,0 +1,427 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Yarn 2

+
+
+

Yarn v2 is a very different software from the v1. The following list contains the main new features:

+
+ +
+

Please, read them carefully to decide if your current project is suitable to use Yarn 2 as package manager.

+
+
+ + + + + +
+ + +Some features are still experimental, so please do not use them in production environments. +
+
+
+

More info at https://yarnpkg.com/

+
+
+
+
+

Global Install

+
+
+

Installing Yarn 2.x globally is discouraged as Yarn team is moving to a per-project install strategy. We advise you to keep Yarn 1.x (Classic) as your global binary by installing it via the instructions you can find here.

+
+
+

Once you’ve followed the instructions (running yarn --version from your home directory should yield something like 1.22.0), go to the next section to see how to enable Yarn 2 on your project.

+
+
+
+
+

Per-project install

+
+
+

Follow these instructions to update your current devon4ng project to Yarn 2:

+
+
+
    +
  1. +

    Follow the global install instructions.

    +
  2. +
  3. +

    Move into your project folder:

    +
    +
    +
    cd ~/path/to/project
    +
    +
    +
  4. +
  5. +

    Run the following command:

    +
    +
    +
    yarn policies set-version berry # below v1.22
    +yarn set version berry          # on v1.22+
    +
    +
    +
  6. +
  7. +

    Since Angular CLI still is not fully supported with the new Yarn architecture as it is not compatible with PnP it is necessary to include the node-modules plugin adding the following line in the .yarnrc.yml file:

    +
    +
    +
    nodeLinker: node-modules
    +
    +
    +
  8. +
  9. +

    Commit the .yarn and .yarnrc.yml changes

    +
  10. +
  11. +

    Run again yarn install.

    +
  12. +
+
+
+ + + + + +
+ + +For more advanced migration topics please refer to https://yarnpkg.com/advanced/migration +
+
+
+
+
+

Which files should be added to gitignore file?

+
+
+

If you’re using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/cache
+!.yarn/releases
+!.yarn/plugins
+
+
+
+

If you’re not using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/releases
+!.yarn/plugins
+.pnp.*
+
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/general/meta-architecture.html b/docs/devon4ts/1.0/general/meta-architecture.html new file mode 100644 index 00000000..395dd337 --- /dev/null +++ b/docs/devon4ts/1.0/general/meta-architecture.html @@ -0,0 +1,675 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Meta Architecture

+
+ +
+
+
+

Introduction

+
+ +
+
+
+

Purpose of this document

+
+
+

In our business applications, the client easily gets underestimated. Sometimes the client is more complex to develop and design than the server. While the server architecture is nowadays easily to agree as common sense, for clients this is not as obvious and stable especially as it typically depends on the client framework used. Finding a concrete architecture applicable for all clients may therefore be difficult to accomplish.

+
+
+

This document tries to define on a high abstract level, a reference architecture which is supposed to be a mental image and frame for orientation regarding the evaluation and appliance of different client frameworks. As such it defines terms and concepts required to be provided for in any framework and thus gives a common ground of understanding for those acquainted with the reference architecture. This allows better comparison between the various frameworks out there, each having their own terms for essentially the same concepts. It also means that for each framework we need to explicitly map how it implements the concepts defined in this document.

+
+
+

The architecture proposed herein is neither new nor was it developed from scratch. Instead it is the gathered and consolidated knowledge and best practices of various projects (s. References).

+
+
+
+
+

Goal of the Client Architecture

+
+
+

The goal of the client architecture is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview. Furthermore it ensures a homogeneity regarding how different concrete UI technologies are being applied in the projects, solving the common requirements in the same way.

+
+
+
+
+

Architecture Views

+
+
+

As for the server we distinguish between the business and the technical architecture. Where the business architecture is different from project to project and relates to the concrete design of dialog components given concrete requirements, the technical architecture can be applied to multiple projects.

+
+
+

The focus of this document is to provide a technical reference architecture on the client on a very abstract level defining required layers and components. How the architecture is implemented has to be defined for each UI technology.

+
+
+

The technical infrastructure architecture is out of scope for this document and although it needs to be considered, the concepts of the reference architecture should work across multiple TI architecture, i.e. native or web clients.

+
+
+
+
+

devonfw Reference Client Architecture

+
+
+

The following gives a complete overview of the proposed reference architecture. It will be built up incrementally in the following sections.

+
+
+
+Complete Client Architecture Overview +
+
+
+

Figure 1 Overview

+
+
+
+
+

Client Architecture

+
+
+

On the highest level of abstraction we see the need to differentiate between dialog components and their container they are managed in, as well as the access to the application server being the back-end for the client (e.g. an devon4j instance). This section gives a summary of these components and how they relate to each other. Detailed architectures for each component will be supplied in subsequent sections

+
+
+
+Client Architecture Overview +
+
+
+

Figure 2 Overview of Client Architecture

+
+
+
+
+

== Dialog Component

+
+
+

A dialog component is a logical, self-contained part of the user interface. It accepts user input and actions and controls communication with the user. Dialog components use the services provided by the dialog container in order to execute the business logic. They are self-contained, i.e. they possess their own user interface together with the associated logic, data and states.

+
+
+
    +
  • +

    Dialog components can be composed of other dialog components forming a hierarchy

    +
  • +
  • +

    Dialog components can interact with each other. This includes communication of a parent to its children, but also between components independent of each other regarding the hierarchy.

    +
  • +
+
+
+
+
+

== Dialog Container

+
+
+

Dialog components need to be managed in their life-cycle and how they can be coupled to each other. The dialog container is responsible for this along with the following:

+
+
+
    +
  • +

    Bootstrapping the client application and environment

    +
    +
      +
    • +

      Configuration of the client

      +
    • +
    • +

      Initialization of the application server access component

      +
    • +
    +
    +
  • +
  • +

    Dialog Component Management

    +
    +
      +
    • +

      Controlling the life-cycle

      +
    • +
    • +

      Controlling the dialog flow

      +
    • +
    • +

      Providing means of interaction between the dialogs

      +
    • +
    • +

      Providing application server access

      +
    • +
    • +

      Providing services to the dialog components
      +(e.g. printing, caching, data storage)

      +
    • +
    +
    +
  • +
  • +

    Shutdown of the application

    +
  • +
+
+
+
+
+

== Application Server Access

+
+
+

Dialogs will require a back-end application server in order to execute their business logic. Typically in an devonfw application the service layer will provide interfaces for the functionality exposed to the client. These business oriented interfaces should also be present on the client backed by a proxy handling the concrete call of the server over the network. This component provides the set of interfaces as well as the proxy.

+
+
+
+
+

Dialog Container Architecture

+
+
+

The dialog container can be further structured into the following components with their respective tasks described in own sections:

+
+
+
+Dialog Container Architecture Overview +
+
+
+

Figure 3 Dialog Container Architecture

+
+
+
+
+

== Application

+
+
+

The application component represents the overall client in our architecture. It is responsible for bootstrapping all other components and connecting them with each other. As such it initializes the components below and provides an environment for them to work in.

+
+
+
+
+

== Configuration Management

+
+
+

The configuration management manages the configuration of the client, so the client can be deployed in different environments. This includes configuration of the concrete application server to be called or any other environment-specific property.

+
+
+
+
+

== Dialog Management

+
+
+

The Dialog Management component provides the means to define, create and destroy dialog components. It therefore offers basic life-cycle capabilities for a component. In addition it also allows composition of dialog components in a hierarchy. The life-cycle is then managed along the hierarchy, meaning when creating/destroying a parent dialog, this affects all child components, which are created/destroyed as well.

+
+
+
+
+

== Service Registry

+
+
+

Apart from dialog components, a client application also consists of services offered to these. A service can thereby encompass among others:

+
+
+
    +
  • +

    Access to the application server

    +
  • +
  • +

    Access to the dialog container functions for managing dialogs or accessing the configuration

    +
  • +
  • +

    Dialog independent client functionality such as Printing, Caching, Logging, Encapsulated business logic such as tax calculation

    +
  • +
  • +

    Dialog component interaction

    +
  • +
+
+
+

The service registry offers the possibility to define, register and lookup these services. Note that these services could be dependent on the dialog hierarchy, meaning different child instances could obtain different instances / implementations of a service via the service registry, depending on which service implementations are registered by the parents.

+
+
+

Services should be defined as interfaces allowing for different implementations and thus loose coupling.

+
+
+
+
+

Dialog Component Architecture

+
+
+

A dialog component has to support all or a subset of the following tasks:
+(T1) Displaying the user interface incl. internationalization
+(T2) Displaying business data incl. changes made to the data due to user interactions and localization of the data
+(T3) Accepting user input including possible conversion from e.g. entered Text to an Integer
+(T4) Displaying the dialog state
+(T5) Validation of user input
+(T6) Managing the business data incl. business logic altering it due to user interactions
+(T7) Execution of user interactions
+(T8) Managing the state of the dialog (e.g. Edit vs. View)
+(T9) Calling the application server in the course of user interactions

+
+
+

Following the principle of separation of concerns, we further structure a dialog component in an own architecture allowing us the distribute responsibility for these tasks along the defined components:

+
+
+
+Dialog Component Architecture +
+
+
+

Figure 4 Overview of dialog component architecture

+
+
+
+
+

== Presentation Layer

+
+
+

The presentation layer generates and displays the user interface, accepts user input and user actions and binds these to the dialog core layer (T1-5). The tasks of the presentation layer fall into two categories:

+
+
+
    +
  • +

    Provision of the visual representation (View component)
    +The presentation layer generates and displays the user interface and accepts user input and user actions. The logical processing of the data, actions and states is performed in the dialog core layer. The data and user interface are displayed in localized and internationalized form.

    +
  • +
  • +

    Binding of the visual representation to the dialog core layer
    +The presentation layer itself does not contain any dialog logic. The data or actions entered by the user are then processed in the dialog core layer. There are three aspects to the binding to the dialog core layer. We refer to “data binding”, “state binding” and “action binding”. Syntactical and (to a certain extent) semantic validations are performed during data binding (e.g. cross-field plausibility checks). Furthermore, the formatted, localized data in the presentation layer is converted into the presentation-independent, neutral data in the dialog core layer (parsing) and vice versa (formatting).

    +
  • +
+
+
+
+
+

== Dialog Core Layer

+
+
+

The dialog core layer contains the business logic, the control logic, and the logical state of the dialog. It therefore covers tasks T5-9:

+
+
+
    +
  • +

    Maintenance of the logical dialog state and the logical data
    +The dialog core layer maintains the logical dialog state and the logical data in a form which is independent of the presentation. The states of the presentation (e.g. individual widgets) must not be maintained in the dialog core layer, e.g. the view state could lead to multiple presentation states disabling all editable widgets on the view.

    +
  • +
  • +

    Implementation of the dialog and dialog control logic
    +The component parts in the dialog core layer implement the client specific business logic and the dialog control logic. This includes, for example, the manipulation of dialog data and dialog states as well as the opening and closing of dialogs.

    +
  • +
  • +

    Communication with the application server
    +The dialog core layer calls the interfaces of the application server via the application server access component services.

    +
  • +
+
+
+

The dialog core layer should not depend on the presentation layer enforcing a strict layering and thus minimizing dependencies.

+
+
+
+
+

== Interactions between dialog components

+
+
+

Dialog components can interact in the following ways:

+
+
+
+Dialog Interactions +
+
+
+
    +
  • +

    Embedding of dialog components
    +As already said dialog components can be hierarchically composed. This composition works by embedding on dialog component within the other. Apart from the life-cycle managed by the dialog container, the embedding needs to cope for the visual embedding of the presentation and core layer.

    +
    +
      +
    • +

      Embedding dialog presentation
      +The parent dialog needs to either integrate the embedded dialog in its layout or open it in an own model window.

      +
    • +
    • +

      Embedding dialog core
      +The parent dialog needs to be able to access the embedded instance of its children. This allows initializing and changing their data and states. On the other hand the children might require context information offered by the parent dialog by registering services in the hierarchical service registry.

      +
    • +
    +
    +
  • +
  • +

    Dialog flow
    +Apart from the embedding of dialog components representing a tight coupling, dialogs can interact with each other by passing the control of the UI, i.e. switching from one dialog to another.

    +
  • +
+
+
+

When interacting, dialog components should interact only between the same or lower layers, i.e. the dialog core should not access the presentation layer of another dialog component.

+
+
+
+
+

Appendix

+
+ +
+
+
+

Notes about Quasar Client

+
+
+

The Quasar client architecture as the consolidated knowledge of our CSD projects is the major source for the above drafted architecture. However, the above is a much simplified and more agile version thereof:

+
+
+
    +
  • +

    Quasar Client tried to abstract from the concrete UI library being used, so it could decouple the business from the technical logic of a dialog. The presentation layer should be the only one knowing the concrete UI framework used. This level of abstraction was dropped in this reference architecture, although it might of course still make sense in some projects. For fast-moving agile projects in the web however introducing such a level of abstraction takes effort with little gained benefits. With frameworks like Angular 2 we would even introduce one additional seemingly artificial and redundant layer, since it already separates the dialog core from its presentation.

    +
  • +
  • +

    In the past and in the days of Struts, JSF, etc. the concept of session handling was important for the client since part of the client was sitting on a server with a session relating it to its remote counterpart on the users PC. Quasar Client catered for this need, by very prominently differentiating between session and application in the root of the dialog component hierarchy. However, in the current days of SPA applications and the lowered importance of servers-side web clients, this prominent differentiation was dropped. When still needed the referenced documents will provide in more detail how to tailor the respective architecture to this end.

    +
  • +
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/general/services-layer.html b/docs/devon4ts/1.0/general/services-layer.html new file mode 100644 index 00000000..dd1b5199 --- /dev/null +++ b/docs/devon4ts/1.0/general/services-layer.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Services Layer

+
+
+

The services layer is more or less what we call 'business logic layer' on the server side. +It is the layer where the business logic is placed. +The main challenges are:

+
+
+
    +
  • +

    Define application state and an API for the components layer to use it

    +
  • +
  • +

    Handle application state transitions

    +
  • +
  • +

    Perform back-end interaction (XHR, WebSocket, etc.)

    +
  • +
  • +

    Handle business logic in a maintainable way

    +
  • +
  • +

    Configuration management

    +
  • +
+
+
+

All parts of the services layer are described in this chapter. +An example which puts the concepts together can be found at the end Interaction of Smart Components through the services layer.

+
+
+
+
+

Boundaries

+
+
+

There are two APIs for the components layer to interact with the services layer:

+
+
+
    +
  • +

    A store can be subscribed to for receiving state updates over time

    +
  • +
  • +

    A use case service can be called to trigger an action

    +
  • +
+
+
+

To illustrate the fact the following figure shows an abstract overview.

+
+
+
+Smart and Dumb Components Interaction +
+
Figure 1. Boundaries to components layer
+
+
+
+
+

Store

+
+
+

A store is a class which defines and handles application state with its transitions over time. +Interaction with a store is always synchronous. +A basic implementation using RxJS can look like this.

+
+
+ + + + + +
+ + +A more profound implementation taken from a real-life project can be found here (Abstract Class Store). +
+
+
+
Listing 1. Store defined using RxJS
+
+
@Injectable()
+export class ProductSearchStore {
+
+  private stateSource = new BehaviorSubject<ProductSearchState>(defaultProductSearchState);
+  state$ = this.stateSource.asObservable();
+
+  setLoading(isLoading: boolean) {
+    const currentState = this.stateSource.getValue();
+    this.stateSource.next({
+      isLoading: isLoading,
+      products: currentState.products,
+      searchCriteria: currentState.searchCriteria
+    });
+  }
+
+}
+
+
+
+

In the example ProductSearchStore handles state of type ProductSearchState. +The public API is the property state$ which is an observable of type ProductSearchState. +The state can be changed with method calls. +So every desired change to the state needs to be modeled with an method. +In reactive terminology this would be an Action. +The store does not use any services. +Subscribing to the state$ observable leads to the subscribers receiving every new state.

+
+
+

This is basically the Observer Pattern:
+The store consumer registers itself to the observable via state$.subscribe() method call. +The first parameter of subscribe() is a callback function to be called when the subject changes. +This way the consumer - the observer - is registered. +When next() is called with a new state inside the store, all callback functions are called with the new value. +So every observer is notified of the state change. +This equals the Observer Pattern push type.

+
+
+

A store is the API for Smart Components to receive state from the service layer. +State transitions are handled automatically with Smart Components registering to the state$ observable.

+
+
+
+
+

Use Case Service

+
+
+

A use case service is a service which has methods to perform asynchronous state transitions. +In reactive terminology this would be an Action of Actions - a thunk (redux) or an effect (@ngrx).

+
+
+
+Use Case Service +
+
Figure 2. Use case services are the main API to trigger state transitions
+
+
+

A use case services method - an action - interacts with adapters, business services and stores. +So use case services orchestrate whole use cases. +For an example see use case service example.

+
+
+
+
+

Adapter

+
+
+

An adapter is used to communicate with the back-end. +This could be a simple XHR request, a WebSocket connection, etc. +An adapter is simple in the way that it does not add anything other than the pure network call. +So there is no caching or logging performed here. +The following listing shows an example.

+
+
+

For further information on back-end interaction see Consuming REST Services

+
+
+
Listing 2. Calling the back-end via an adapter
+
+
@Injectable()
+export class ProducsAdapter {
+
+  private baseUrl = environment.baseUrl;
+
+  constructor(private http: HttpClient) { }
+
+  getAll(): Observable<Product[]> {
+    return this.http.get<Product[]>(this.baseUrl + '/products');
+  }
+
+}
+
+
+
+
+
+

Interaction of Smart Components through the services layer

+
+
+

The interaction of smart components is a classic problem which has to be solved in every UI technology. +It is basically how one dialog tells the other something has changed.

+
+
+

An example is adding an item to the shopping basket. +With this action there need to be multiple state updates.

+
+
+
    +
  • +

    The small logo showing how many items are currently inside the basket needs to be updated from 0 to 1

    +
  • +
  • +

    The price needs to be recalculated

    +
  • +
  • +

    Shipping costs need to be checked

    +
  • +
  • +

    Discounts need to be updated

    +
  • +
  • +

    Ads need to be updated with related products

    +
  • +
  • +

    etc.

    +
  • +
+
+
+
+
+

Pattern

+
+
+

To handle this interaction in a scalable way we apply the following pattern.

+
+
+
+Interaction of Smart Components via services layer +
+
Figure 3. Smart Component interaction
+
+
+

The state of interest is encapsulated inside a store. All Smart Components interested in the state have to subscribe to the store’s API served by the public observable. Thus, with every update to the store the subscribed components receive the new value. The components basically react to state changes. Altering a store can be done directly if the desired change is synchronous. Most actions are of asynchronous nature so the UseCaseService comes into play. Its actions are void methods, which implement a use case, i.e., adding a new item to the basket. It calls asynchronous actions and can perform multiple store updates over time.

+
+
+

To put this pattern into perspective the UseCaseService is a programmatic alternative to redux-thunk or @ngrx/effects. The main motivation here is to use the full power of TypeScript --strictNullChecks and to let the learning curve not to become as steep as it would be when learning a new state management framework. This way actions are just void method calls.

+
+
+
+
+

Example

+
+
+
+Smart component interaction example +
+
Figure 4. Smart Components interaction example
+
+
+

The example shows two Smart Components sharing the FlightSearchState by using the FlightSearchStore. +The use case shown is started by an event in the Smart Component FlightSearchComponent. The action loadFlight() is called. This could be submitting a search form. +The UseCaseService is FlightSearchService, which handles the use case Load Flights.

+
+
+
UseCaseService example
+

+
+
+
+
export class FlightSearchService {
+
+  constructor(
+    private flightSearchAdapter: FlightSearchAdapter,
+    private store: FlightSearchStore
+  ) { }
+
+  loadFlights(criteria: FlightSearchCriteria): void {
+    this.store.setLoadingFlights(true);
+    this.store.clearFlights();
+
+    this.flightSearchAdapter.getFlights(criteria.departureDate,
+        {
+          from: criteria.departureAirport,
+          to: criteria.destinationAirport
+        })
+      .finally(() => this.store.setLoadingFlights(false))
+      .subscribe((result: FlightTo[]) => this.store.setFlights(result, criteria));
+  }
+
+}
+
+
+
+

First the loading flag is set to true and the current flights are cleared. This leads the Smart Component showing a spinner indicating the loading action. Then the asynchronous XHR is triggered by calling the adapter. After completion the loading flag is set to false causing the loading indication no longer to be shown. If the XHR was successful, the data would be put into the store. If the XHR was not successful, this would be the place to handle a custom error. All general network issues should be handled in a dedicated class, i.e., an interceptor. So for example the basic handling of 404 errors is not done here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/index.html b/docs/devon4ts/1.0/index.html new file mode 100644 index 00000000..dcdc503e --- /dev/null +++ b/docs/devon4ts/1.0/index.html @@ -0,0 +1,550 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4ng

+
+
+

This guide describes an application architecture for web client development with Angular.

+
+
+
+
+

Motivation

+
+
+

Front-end development is a very difficult task since there are a lot of different frameworks, patterns and practices nowadays. For that reason, in devonfw we decided to make use of Angular since it is a full front-end framework that includes almost all the different patterns and features that any SPA may need and provides a well defined architecture to development, build and deploy.

+
+
+

The idea with devon4ng is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends like reactive style development, on the other hand, providing a short on-boarding time while still using an architecture that helps us scale and be productive at the same time.

+
+
+

At the same time devon4ng aims to help developers to solve common problems that appear in many projects and provide samples and blueprints to show how to apply this solutions in real situations.

+
+
+
+
+

Contents

+
+ +
+

This section introduces in an easy way the main principles and guidelines based on Angular Style Guide.

+
+ +
+

The goal of this topic is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview.

+
+
+
+
+

Layers

+
+
+

This section provides a condensed explanation about the different layers a good Angular application must provide.

+
+ +
+
+
+

Guides

+
+
+

This section introduces concepts to help developers with the tooling and package managers.

+
+ +
+
+
+

Angular

+
+
+

This is the main section of the documentation, where the developer will find guidelines for accessibility, how to use the Angular toolchain, how to refactor components, create libraries and, in general, maintain Angular applications. But last and not least, developers will also find solutions to common problems many of the Angular projects may have.

+
+
+ + + + + +
+ + +All the different topics are demonstrated in the samples folder with a small application. +
+
+ +
+
+
+

Ionic

+
+
+

As part of the devon4ng stack, we include a small section to explain how to develop hybrid mobile Ionic/Angular applications and create PWAs with this UI library. As the previous section, the contents are demonstrated in the samples folder.

+
+ +
+
+
+

Layouts

+
+
+

Any SPA application must have a layout. So, the purpose of this section is to explain the Angular Material approach.

+
+ +
+
+
+

NgRx

+
+
+

State Management is a big topic in big front-end application. This section explains the fundamentals of the industry standard library NgRx, showing its main components.

+
+ +
+
+
+

Cookbook

+
+
+

The Cookbook section aims to provide solutions to cross-topic challenges that at this moment do not fit in the previous sections. As the Angular section, some of the topics are demonstrated with a sample located in the samples folder.

+
+ +
+
+
+

devon4ng templates

+
+
+

In order to support CobiGen generation tool for Angular applications, devon4ng demos realization and provide more opinionated samples, the following templates are also included in devon4ng contents:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/ionic/guide-ionic-from-code-to-android.html b/docs/devon4ts/1.0/ionic/guide-ionic-from-code-to-android.html new file mode 100644 index 00000000..c66a2b4d --- /dev/null +++ b/docs/devon4ts/1.0/ionic/guide-ionic-from-code-to-android.html @@ -0,0 +1,606 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic to android

+
+
+

This page is written to help developers to go from the source code of an ionic application to an android one, with this in mind, topics such as: environment, commands, modifications,…​ are covered.

+
+
+
+
+

Assumptions

+
+
+

This document assumes that the reader has already:

+
+
+
    +
  • +

    Source code of an Ionic application and wants to build it on an android device,

    +
  • +
  • +

    A working installation of Node.js

    +
  • +
  • +

    An Ionic CLI installed and up-to-date.

    +
  • +
  • +

    Android Studio and Android SDK.

    +
  • +
+
+
+
+
+

From Ionic to Android project

+
+
+

When a native application is being designed, sometimes, functionalities that uses camera, geolocation, push notification, …​ are requested. To resolve these requests, Capacitor can be used.

+
+
+

In general terms, Capacitor wraps apps made with Ionic (HTML, SCSS, Typescript) into WebViews that can be displayed in native applications (Android, IOS) and allows the developer to access native functionalities like the ones said before.

+
+
+

Installing capacitor is as easy as installing any node module, just a few commands have to be run in a console:

+
+
+
    +
  • +

    cd name-of-ionic-4-app

    +
  • +
  • +

    npm install --save @capacitor/core @capacitor/cli

    +
  • +
+
+
+

Then, it is necessary to initialize capacitor with some information: app id, name of the app and the directory where your app is stored. To fill this information, run:

+
+
+
    +
  • +

    npx cap init

    +
  • +
+
+
+
+
+

Modifications

+
+
+

Throughout the development process, usually back-end and front-end are on a local computer, so it’s a common practice to have different configuration files for each environment (commonly production and development). Ionic uses an angular.json file to store those configurations and some rules to be applied.

+
+
+

If a back-end is hosted on http://localhost:8081, and that direction is used in every environment, the application built for android will not work because computer and device do not have the same localhost. Fortunately, different configurations can be defined.

+
+
+

Android Studio uses 10.0.0.2 as alias for 127.0.0.1 (computer’s localhost) so adding http//10.0.0.2:8081 in a new environment file and modifying angular.json accordingly, will make possible connect front-end and back-end.

+
+
+
+Android environment and angular.json +
+
+
+
+
    "build": {
+    ...
+        "configurations": {
+            ...
+            "android": {
+                "fileReplacements": [
+                    {
+                        "replace": "src/environments/environment.ts",
+                        "with": "src/environments/environment.android.ts"
+                    }
+                ]
+            },
+        }
+    }
+
+
+
+
+
+

Build

+
+
+

Once configured, it is necessary to build the Ionic app using this new configuration:

+
+
+
    +
  • +

    ionic build --configuration=android

    +
  • +
+
+
+

The next commands copy the build application on a folder named android and open android studio.

+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+
+
+

From Android project to emulated device

+
+
+

Once Android Studio is opened, follow these steps:

+
+
+
    +
  1. +

    Click on "Build" → Make project.

    +
  2. +
  3. +

    Click on "Build" → Make Module 'app' (default name).

    +
  4. +
+
+
+

Click on make project +click on make app

+
+
+
    +
  1. +

    Click on" Build" → Build Bundle(s) / APK(s) → Build APK(s).

    +
  2. +
  3. +

    Click on run and choose a device.

    +
  4. +
+
+
+

click on build APK +click on running device

+
+
+

If there are no devices available, a new one can be created:

+
+
+
    +
  1. +

    Click on "Create new device"

    +
  2. +
  3. +

    Select hardware and click "Next". For example: Phone → Nexus 5X.

    +
  4. +
+
+
+

Create new device +Select hardware

+
+
+
    +
  1. +

    Download a system image.

    +
    +
      +
    1. +

      Click on download.

      +
    2. +
    3. +

      Wait until the installation finished and then click "Finish".

      +
    4. +
    5. +

      Click "Next".

      +
    6. +
    +
    +
  2. +
  3. +

    Verify configuration (default configuration should be enough) and click "Next".

    +
  4. +
+
+
+

Download system image +Check configuration

+
+
+
    +
  1. +

    Check that the new device is created correctly.

    +
  2. +
+
+
+
+New created device +
+
+
+
+
+

From Android project to real device

+
+
+

To test on a real android device, an easy approach to communicate a smartphone (front-end) and computer (back-end) is to configure a WiFi hotspot and connect the computer to it. A guide about this process can be found at https://support.google.com/nexus/answer/9059108?hl=en

+
+
+

Once connected, run ipconfig on a console if you are using windows or ifconfig on a linux machine to get the IP address of your machine’s Wireless LAN adapter WiFi.

+
+
+
+Result of `ipconfig` command on Windows 10 +
+
+
+

This obtained IP must be used instead of "localhost" or "10.0.2.2" at environment.android.ts.

+
+
+
+Android environment file server URL +
+
+
+

After this configuration, follow the build steps in "From Ionic to Android project" and the first three steps in "From Android project to emulated device".

+
+
+
+
+

Send APK to Android through USB

+
+
+

To send the built application to a device, you can connect computer and mobile through USB, but first, it is necessary to unlock developer options.

+
+
+
    +
  1. +

    Open "Settings" and go to "System".

    +
  2. +
  3. +

    Click on "About".

    +
  4. +
  5. +

    Click "Build number" seven times to unlock developer options.

    +
  6. +
+
+
+
+Steps to enable developer options: 1, 2, 3 +
+
+
+
    +
  1. +

    Go to "System" again an then to "Developer options"

    +
  2. +
  3. +

    Check that the options are "On".

    +
  4. +
  5. +

    Check that "USB debugging" is activated.

    +
  6. +
+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+

After this, do the step four in "From Android project to emulated device" and choose the connected smartphone.

+
+
+
+
+

Send APK to Android through email

+
+
+

When you build an APK, a dialog gives two options: locate or analyze. If the first one is chosen, Windows file explorer will be opened showing an APK that can be send using email. Download the APK on your phone and click it to install.

+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+
+
+

Result

+
+
+

If everything goes correctly, the Ionic application will be ready to be tested.

+
+
+
+Application running on a real device +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/ionic/guide-ionic-getting-started.html b/docs/devon4ts/1.0/ionic/guide-ionic-getting-started.html new file mode 100644 index 00000000..32010007 --- /dev/null +++ b/docs/devon4ts/1.0/ionic/guide-ionic-getting-started.html @@ -0,0 +1,388 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic 5 Getting started

+
+
+

Ionic is a front-end focused framework which offers different tools for developing hybrid mobile applications. The web technologies used for this purpose are CSS, Sass, HTML5 and Typescript.

+
+
+
+
+

Why Ionic?

+
+
+

Ionic is used for developing hybrid applications, which means not having to rely on a specific IDE such as Android Studio or Xcode. Furthermore, development of native apps require learning different languages (Java/Kotlin for Android and Objective-C/Swift for Apple), with Ionic, a developer does not have to code the same functionality for multiple platforms, just use the adequate libraries and components.

+
+
+
+
+

Basic environment set up

+
+ +
+
+
+

Install Ionic CLI

+
+
+

Although the devonfw distribution comes with and already installed Ionic CLI, here are the steps to install it. The installation of Ionic is easy, just one command has to be written:

+
+
+

$ npm install -g @ionic/cli

+
+
+
+
+

Update Ionic CLI

+
+
+

If there was a previous installation of the Ionic CLI, it will need to be uninstalled due to a change in package name.

+
+
+
+
$ npm uninstall -g ionic
+$ npm install -g @ionic/cli
+
+
+
+
+
+

Basic project set up

+
+
+

The set up of an ionic application is pretty immediate and can be done in one line:

+
+
+

ionic start <name> <template> --type=angular

+
+
+
    +
  • +

    ionic start: Command to create an app.

    +
  • +
  • +

    <name>: Name of the application.

    +
  • +
  • +

    <template>: Model of the application.

    +
  • +
  • +

    --type=angular: With this flag, the app produced will be based on angular.

    +
  • +
+
+
+

To create an empty project, the following command can be used:

+
+
+

ionic start MyApp blank --type=angular

+
+
+
+Ionic blank project +
+
+
+

The image above shows the directory structure generated.

+
+
+

There are more templates available that can be seen with the command +ionic start --list

+
+
+
+List of ionic templates +
+
+
+

The templates surrounded by red line are based on angular and comes with Ionic v5, while the others belong to earlier versions (before v4).

+
+
+ + + + + +
+ + +More info at https://ionicframework.com/docs. Remember to select Angular documentation, since Ionic supports React, Vue and Vanilla JS. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/ionic/guide-ionic-pwa.html b/docs/devon4ts/1.0/ionic/guide-ionic-pwa.html new file mode 100644 index 00000000..76a89876 --- /dev/null +++ b/docs/devon4ts/1.0/ionic/guide-ionic-pwa.html @@ -0,0 +1,543 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic Progressive Web App

+
+
+

This guide is a continuation of the guide Angular PWAs, therefore, valid concepts explained there are still valid in this page but focused on Ionic.

+
+
+
+
+

Assumptions

+
+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    Node.js

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI

    +
  • +
  • +

    Ionic 5 CLI

    +
  • +
  • +

    Capacitor

    +
  • +
+
+
+

Also, it is a good idea to read the document about PWA using Angular.

+
+
+
+
+

Sample Application

+
+
+
+Ionic 5 PWA Base +
+
Figure 1. Basic ionic PWA.
+
+
+

To explain how to build progressive web apps (PWA) using Ionic, a basic application is going to be built. This app will be able to take photos even without network using PWA elements.

+
+
+
+
+

Step 1: Create a new project

+
+
+

This step can be completed with one simple command: ionic start <name> <template>, where <name> is the name and <template> a model for the app. In this case, the app is going to be named basic-ion-pwa.

+
+
+
+
+

Step 2: Structures and styles

+
+
+

The styles (scss) and structures (html) do not have anything specially relevant, just colors and ionic web components. The code can be found in devon4ng samples.

+
+
+
+
+

Step 3: Add functionality

+
+
+

After this step, the app will allow users take photos and display them in the main screen. +First we have to import three important elements:

+
+
+
    +
  • +

    DomSanitizer: Sanitizes values to be safe to use.

    +
  • +
  • +

    SafeResourceUrl: Interface for values that are safe to use as URL.

    +
  • +
  • +

    Plugins: Capacitor constant value used to access to the device’s camera and toast dialogs.

    +
  • +
+
+
+
+
  import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
+  import { Plugins, CameraResultType } from '@capacitor/core';
+  const { Camera, Toast } = Plugins;
+
+
+
+

The process of taking a picture is enclosed in a takePicture method. takePicture calls the Camera’s getPhoto function which returns an URL or an exception. If a photo is taken then the image displayed in the main page will be changed for the new picture, else, if the app is closed without changing it, a toast message will be displayed.

+
+
+
+
  export class HomePage {
+    image: SafeResourceUrl;
+    ...
+
+    async takePicture() {
+      try {
+        const image = await Camera.getPhoto({
+          quality: 90,
+          allowEditing: true,
+          resultType: CameraResultType.Uri,
+        });
+
+        // Change last picture shown
+        this.image = this.sanitizer.bypassSecurityTrustResourceUrl(image.webPath);
+      } catch (e) {
+        this.show('Closing camera');
+      }
+    }
+
+    async show(message: string) {
+      await Toast.show({
+        text: message,
+      });
+    }
+  }
+
+
+
+
+
+

Step 4: PWA Elements

+
+
+

When Ionic apps are not running natively, some resources like Camera do not work by default but can be enabled using PWA Elements. To use Capacitor’s PWA elements run npm install @ionic/pwa-elements and modify src/main.ts as shown below.

+
+
+
+
...
+
+// Import for PWA elements
+import { defineCustomElements } from '@ionic/pwa-elements/loader';
+
+if (environment.production) {
+  enableProdMode();
+}
+
+platformBrowserDynamic().bootstrapModule(AppModule)
+  .catch(err => console.log(err));
+
+// Call the element loader after the platform has been bootstrapped
+defineCustomElements(window);
+
+
+
+
+
+

Step 5: Make it Progressive.

+
+
+

Turning an Ionic 5 app into a PWA is pretty easy, the same module used to turn Angular apps into PWAs has to be added, to do so, run: ng add @angular/pwa. This command also creates an icons folder inside src/assets and contains angular icons for multiple resolutions. If you want use other images, be sure that they have the same resolution, the names can be different but the file manifest.json has to be changed accordingly.

+
+
+
+
+

Step 6: Configure the app

+
+
+

manifest.json

+
+
+

Default configuration.

+
+
+

ngsw-config.json

+
+
+

At assetGroups → resources add a URLs field and a pattern to match PWA Elements scripts and other resources (images, styles, …​):

+
+
+
+
  "urls": ["https://unpkg.com/@ionic/pwa-elements@1.0.2/dist/**"]
+
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+
+

To check if an app is a PWA lets compare its normal behaviour against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ionic build --prod to build the app using production settings.

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here. A good alternative is also npm install serve. It can be checked here.

+
+
+

Go to the www folder running cd www.

+
+
+

http-server -o or serve to serve your built app.

+
+
+ + + + + +
+ + +In order not to install anything not necessary npx can be used directly to serve the app. i.e run npx serve [folder] will automatically download and run this HTTP server without installing it in the project dependencies. +
+
+
+
+Http server running +
+
Figure 2. Http server running on localhost:8081.
+
+
+

 
+In another console instance run ionic serve to open the common app (not built).

+
+
+
+Ionic serve on Visual Studio Code console +
+
Figure 3. Ionic server running on localhost:8100.
+
+
+

 
+The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common one does not.

+
+
+
+Application comparison +
+
Figure 4. Application service worker comparison.
+
+
+

 
+If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 5. Offline application.
+
+
+

 
+Finally, plugins like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 6. Lighthouse report.
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/Home.html b/docs/devon4ts/1.0/nest/Home.html new file mode 100644 index 00000000..599465fa --- /dev/null +++ b/docs/devon4ts/1.0/nest/Home.html @@ -0,0 +1,373 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node Wiki

+
+ +
+
+
+

Layers

+
+
+ +
+
+
+ +
+

devon4node applications

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/devon4node-architecture.html b/docs/devon4ts/1.0/nest/devon4node-architecture.html new file mode 100644 index 00000000..ce63b6a4 --- /dev/null +++ b/docs/devon4ts/1.0/nest/devon4node-architecture.html @@ -0,0 +1,515 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node Architecture

+
+
+

As we have mentioned in the introduction, devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications.

+
+
+
+
+

HTTP layer

+
+
+

By using NestJS, devon4node is a platform-agnostic framework. NestJS focuses only on the logical layer, and delegates the transport layer to another framework, such as ExpressJS. You can see it in the following diagram:

+
+
+
+devon4node architecture +
+
+
+

As you can see, NestJS do not listen directly for incoming request. It has an adapter to communicate with ExpressJS and ExpressJS is the responsible for that. ExpressJS is only one of the frameworks that NestJS can work with. We have also another adapter available out-of-the-box: the Fastify adapter. With that, you can replace ExpressJS for Fastify But you can still use all your NestJS components. You can also create your own adapter to make NestJS work with other HTTP framework.

+
+
+

At this point, you may think: why is NestJS (and devon4node) using ExpressJS by default instead of Fastify? Because, as you can see in the previous diagram, there is a component that is dependent on the HTTP framework: the middleware. As ExpressJS is the most widely used framework, there exists a lot of middleware for it, so, in order to reuse them in our NestJS applications, NestJS use ExpressJS by default. Anyway, you may think which HTTP framework best fits your requirements.

+
+
+
+
+

devon4node layers

+
+
+

As other devonfw technologies, devon4node separates the application into layers.

+
+
+

Those layers are:

+
+ +
+
+layers +
+
+
+
+
+

devon4node application structure

+
+
+

Although there are many frameworks to create backend applications in NodeJS, none of them effectively solve the main problem of - Architecture. This is the main reason we have chosen NestJS for the devon4node applications. Besides, NestJS is highly inspired by Angular, therefore a developer who knows Angular can use his already acquired knowledge to write devon4node applications.

+
+
+

NestJS adopts various Angular concepts, such as dependency injection, piping, interceptors and modularity, among others. By using modularity we can reuse some of our modules between applications. One example that devon4node provide is the mailer module.

+
+
+
+
+

Modules

+
+
+

Create a application module is simple, you only need to create an empty class with the decorator Module:

+
+
+
+
@Module({})
+export class AppModule {}
+
+
+
+

In the module you can define:

+
+
+
    +
  • +

    Imports: the list of imported modules that export the providers which are required in this module

    +
  • +
  • +

    Controllers: the set of controllers defined in this module which have to be instantiated

    +
  • +
  • +

    Providers: the providers that will be instantiated by the Nest injector and that may be shared at least across this module

    +
  • +
  • +

    Exports: the subset of providers that are provided by this module and should be available in other modules which import this module

    +
  • +
+
+
+

The main difference between Angular and NestJS is NestJS modules encapsulates providers by default. This means that it’s impossible to inject providers that are neither directly part of the current module nor exported from the imported modules. Thus, you may consider the exported providers from a module as the module’s public interface, or API. Example of modules graph:

+
+
+
+modules +
+
+
+

In devon4node we three different kind of modules:

+
+
+
    +
  • +

    AppModule: this is the root module. Everything that our application need must be imported here.

    +
  • +
  • +

    Global Modules: this is a special kind of modules. When you make a module global, it’s accessible for every module in your application. Your can see it in the next diagram. It’s the same as the previous one, but now the CoreModule is global:

    +
    +
    +module2 +
    +
    +
    +

    One example of global module is the CoreModule. In the CoreModule you must import every module which have providers that needs to be accessible in all modules of you application

    +
    +
  • +
  • +

    Feature (or application) modules: modules which contains the logic of our application. We must import it in the AppModule.

    +
  • +
+
+
+

For more information about modules, see NestJS documentation page

+
+
+
+
+

Folder structure

+
+
+

devon4node defines a folder structure that every devon4node application must follow. The folder structure is:

+
+
+
+
├───src
+│   ├───app
+│   │   ├───core
+│   │   │   ├───auth
+│   │   │   ├───configuration
+│   │   │   ├───user
+│   │   │   └───core.module.ts
+│   │   ├───shared
+│   │   └───feature
+│   │       ├───sub-module
+│   │       │   ├───controllers
+│   │       │   ├───...
+│   │       │   ├───services
+│   │       │   └───sub-module.module.ts
+│   │       ├───controllers
+│   │       ├───interceptors
+│   │       ├───pipes
+│   │       ├───guards
+│   │       ├───filters
+│   │       ├───middlewares
+│   │       ├───model
+│   │       │   ├───dto
+│   │       │   └───entities
+│   │       ├───services
+│   │       └───feature.module.ts
+│   ├───config
+│   └───migration
+├───test
+└───package.json
+
+
+
+

devon4node schematics ensures this folder structure so, please, do not create files by your own, use the devon4node schematics.

+
+
+
+
+

NestJS components

+
+
+

NestJS provides several components that you can use in your application:

+
+
+ +
+
+

In the NestJS documentation you can find all information about each component. But, something that is missing in the documentation is the execution order. Every component can be defined in different levels: globally, in the controller or in the handler. As middleware is part of the HTTP server we can define it in a different way: globally or in the module.

+
+
+
+components +
+
+
+

It is not necessary to have defined components in every level. For example, you can have defined a interceptor globally but you do not have any other in the controller or handler level. If nothing is defined in some level, the request will continue to the next component.

+
+
+

As you can see in the previous image, the first component which receive the request is the global defined middleware. Then, it send the request to the module middleware. Each of them can return a response to the client, without passing the request to the next level.

+
+
+

Then, the request continue to the guards: first the global guard, next to controller guard and finally to the handler guard. At this point, we can throw an exception in all components and the exception filter will catch it and send a proper error message to the client. We do not paint the filters in the graphic in order to simplify it.

+
+
+

After the guards, is time to interceptors: global interceptors, controller interceptors and handler interceptors. And last, before arrive to the handler inside the controller, the request pass through the pipes.

+
+
+

When the handler has the response ready to send to the client, it does not go directly to the client. It come again to the interceptors, so we can also intercept the response. The order this time is the reverse: handler interceptors, controller interceptors and global interceptors. After that, we can finally send the response to the client.

+
+
+

Now, with this in mind, you are able to create the components in a better way.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/devon4node-introduction.html b/docs/devon4ts/1.0/nest/devon4node-introduction.html new file mode 100644 index 00000000..fb235848 --- /dev/null +++ b/docs/devon4ts/1.0/nest/devon4node-introduction.html @@ -0,0 +1,288 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

devon4node

+
+
+

devonfw is a platform which provides solutions to building business applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. devonfw is 100% Open Source (Apache License version 2.0) since the beginning of 2018.

+
+
+

devon4node is the NodeJS stack of devonfw. It allows you to build business applications (backends) using NodeJS technology in standardized way based on established best-practices.

+
+
+

devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications. It uses progressive TypeScript and combines elements of OOP (Object Oriented Programming), FP (Functional Programming), and FRP (Functional Reactive Programming).

+
+
+

In this wiki you can find all documentation related with devon4node. See choose the wiki page at the side-bar.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-auth-jwt.html b/docs/devon4ts/1.0/nest/guides-auth-jwt.html new file mode 100644 index 00000000..cb0924f3 --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-auth-jwt.html @@ -0,0 +1,363 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Auth JWT module

+
+
+

devon4node provides a way to generate a default authentication module using JWT (JSON Web Token). It uses the @nestjs/passport library describe here.

+
+
+

To generate the devon4node auth-jwt module you only need to execute the command: nest generate -c @devon4node/schematics auth-jwt. We generate this module inside the applications instead of distributing a npm package because this module is prone to be modified depending on the requirements. It also generate a basic user module.

+
+
+

In this page we will explain the default implementation provided by devon4node. For more information about authentication, JWT, passport and other you can see:

+
+
+ +
+
+
+
+

Auth JWT endpoints

+
+
+

In order to execute authentication operations, the auth-jwt module exposes the following endpoints:

+
+
+
    +
  • +

    POST /auth/login: receive an username and a password and return the token in the header if the combination of username and password is correct.

    +
  • +
  • +

    POST /auth/register: register a new user.

    +
  • +
  • +

    GET /auth/currentuser: return the user data if he is authenticated.

    +
  • +
+
+
+
+
+

Protect endpoints with auth-jwt

+
+
+

In order to protect your endpoints with auth-jwt module you only need to add the AuthGuard() in the UseGuards decorator. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+
+

Now, all request to currentuser are protected by the AuthGuard.

+
+
+
+
+

Role based Access Control

+
+
+

The auth-jwt module provides also a way to control the access to some endpoints by using roles. For example, if you want to grant access to a endpoint only to admins, you only need to add the Roles decorator to those endpoints with the roles allowed. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+@Roles(roles.ADMIN)
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-code-generation.html b/docs/devon4ts/1.0/nest/guides-code-generation.html new file mode 100644 index 00000000..21697249 --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-code-generation.html @@ -0,0 +1,515 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Code Generation

+
+
+

As we mention in the page key principles, one of our key principles is Productivity. In order to provide that productivity, we have some tools to generate code. These tools will help you generate the common parts of the application so that you can focus only on the specific functionality.

+
+
+

Those tools are:

+
+ +
+
+
+

Nest CLI and Devon4node schematics

+
+
+

We are going to use the Nest CLI to generate code of our application, you can know more about NodeJs CLI in the official documentation.

+
+
+
+
+

Install devon4node schematics

+
+
+

First of all, you need to install Nest CLI

+
+
+

Execute the command yarn global add @nestjs/cli. +You can also use npm: npm install -g @nestjs/cli

+
+
+

And then Devon4node schematics globally with the following command:

+
+
+

yarn global add @devon4node/schematics or npm install -g @devon4node/schematics

+
+
+
+
+

==

+
+
+

If you get an error trying execute any devon4node schematic related to collection not found, try to reinstall devon4node/schematics on the project folder or be sure that schematics folder is inside @devon4node in node_modules. +yarn add @devon4node/schematics +== ==

+
+
+
+
+

Generate new devon4node application

+
+
+

To start creating a devon4node application, execute the command:

+
+
+

nest g -c @devon4node/schematics application [application-name]

+
+
+

If you do not put a name, the command line will ask you for one.

+
+
+
+
+

Generate code for TypeORM

+
+
+

Initialize TypeORM into your current project in a correct way.

+
+
+

nest g -c @devon4node/schematics typeorm

+
+
+

Then, you will be asked about which DB you want to use.

+
+
+

typeorm schematic

+
+
+
+
+

Generate CRUD

+
+
+

Generate CRUD methods for a entity. Requires TypeORM installed in the project.

+
+
+

It will add the @nestjsx/crud module as a project dependency. Then, generates an entity, a CRUD controller and a CRUD service. It also register the entity, controller and service in the module.

+
+
+

Execute nest g -c @devon4node/schematics crud and then you will need to write a name for the crud.

+
+
+
+crud schematic +
+
+
+
+
+

Generate TypeORM entity

+
+
+

Add a TypeORM entity to your project. Requires TypeORM installed in the project.

+
+
+

Execute nest g -c @devon4node/schematics entity and you will be asked for an entity name.

+
+
+
+
+

Add config-module

+
+
+

Add the config module to the project.

+
+
+

It will add the @devon4node/common module as a project dependency. Then, it will generate the configuration module into your project and add it in the core module. Also, it generates the config files for the most common environments.

+
+
+

The command to execute will be nest g -c @devon4node/schematics config-module

+
+
+
+
+

Add mailer module

+
+
+

Add @devon4node/mailer module to project.

+
+
+

It will add the @devon4node/mailer module as a project dependency. Also, it will add it to the core module and it will generate some email template examples.

+
+
+

Write the command nest g -c @devon4node/schematics mailer

+
+
+
+
+

Add swagger module

+
+
+

Add swagger module to project.

+
+
+

It will add the @nestjs/swagger module as a project dependency. Also, it will update the main.ts file in order to expose the endpoint for swagger. The default endpoint is: /v1/api

+
+
+

Execute the command nest g -c @devon4node/schematics swagger

+
+
+
+
+

Add auth-jwt module

+
+
+

Add the auth JWT module to the project.

+
+
+

It will add to your project the auth-jwt and user module. Also, it will import those modules into the core module.

+
+
+

Execute nest g -c @devon4node/schematics auth-jwt

+
+
+
+
+

Add security

+
+
+

Add cors and helmet to your project.

+
+
+

It will add helmet package as project dependency and update the main.ts file in order to enable the cors and helmet in your application.

+
+
+

Execute nest g -c @devon4node/schematics security

+
+
+
+
+

Generate database migrations

+
+
+
    +
  1. +

    Generate database migrations

    +
    +
      +
    1. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node or npm i -g ts-node

      +
    2. +
    3. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +insert data +
      +
      +
      +

      It will connect to the database, read all entities and then it will generate a migration file with all sql queries need to transform the current status of the database to the status defined by the entities. If the database is empty, it will generate all sql queries need to create all tables defined in the entities. You can find a example in the todo example

      +
      +
    4. +
    +
    +
  2. +
+
+
+

As TypeORM is the tool used for DB. You can check official documentation for more information. +See TypeORM CLI documentation.

+
+
+
+
+

CobiGen

+
+
+

Currently, we do not have templates to generate devon4node code (we have planned to do that in the future). Instead, we have templates that read the code of a devon4node application and generate a devon4ng application. Visit the CobiGen page for more information.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-coding-conventions.html b/docs/devon4ts/1.0/nest/guides-coding-conventions.html new file mode 100644 index 00000000..1799dc27 --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-coding-conventions.html @@ -0,0 +1,537 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Coding Conventions

+
+
+

devon4node defines some coding conventions in order to improve the readability, reduce the merge conflicts and be able to develop applications in an industrialized way.

+
+
+

In order to ensure that you are following the devon4node coding conventions, you can use the following tools:

+
+
+
    +
  • +

    ESLint: ESLint ESLint is a tool for identifying and reporting on patterns found in ECMAScript/JavaScript code, with the goal of making code more consistent and avoiding bugs. We recommend to use the ESLint VSCode extension (included in the devonfw Platform Extension Pack) in order to be able to see the linting errors while you are developing.

    +
  • +
  • +

    Prettier: Prettier is a code formatter. We recommend to use the Prettier VSCode extension (included in the devonfw Platform Extension Pack) and enable the editor.formatOnSave option.

    +
  • +
  • +

    devon4node application schematic: this tool will generate code following the devon4node coding conventions. Also, when you generate a new project using the devon4node application schematic, it generates the configuration files for TSLint and Prettier that satisfy the devon4node coding conventions.

    +
  • +
+
+
+

When you combine all tools, you can be sure that you follow the devon4node coding conventions.

+
+
+
+
+

Detailed devon4node Coding Conventions

+
+
+

Here we will detail some of most important devon4node coding conventions. To be sure that you follows all devon4node coding conventions use the tools described before.

+
+
+
+
+

Indentation

+
+
+

All devon4node code files must be indented using spaces. The indentation with must be 2 spaces.

+
+
+
+
+

White space

+
+
+

In order to improve the readability of your code, you must introduce whitespaces. Example:

+
+
+
+
if(condition){
+
+
+
+

must be

+
+
+
+
if (condition) {
+
+
+
+
+
+

Naming conventions

+
+ +
+
+
+

== File naming

+
+
+

The file name must follow the pattern: (name in kebab case).(kind of component).(extension) +The test file name must follow the pattern: (name in kebab case).(kind of component).spec.(extension)

+
+
+

Example:

+
+
+
+
auth-jwt.service.ts
+auth-jwt.service.spec.ts
+
+
+
+
+
+

== Interface naming

+
+
+

The interface names must be in pascal case, and must start with I. There is some controversy in starting the interface names with an I, but we decided to do it because is most of cases you will have an interface and a class with the same name, so, to differentiate them, we decided to start the interfaces with I. Other devonfw stacks solves it by adding the suffix Impl in the class implementations.

+
+
+

Example:

+
+
+
+
interface ICoffee {}
+
+
+
+
+
+

== Class naming

+
+
+

The class names must be in pascal case.

+
+
+

Example:

+
+
+
+
class Coffee {}
+
+
+
+
+
+

== Variable naming

+
+
+

All variable names must be in camel case.

+
+
+
+
const coffeeList: Coffe[];
+
+
+
+
+
+

Declarations

+
+
+

For all variable declarations we must use const or let. var is forbidden. We prefer to use const when possible.

+
+
+
+
+

Programming practices

+
+ +
+
+
+

== Trailing comma

+
+
+

All statements must end with a trailing comma. Example:

+
+
+
+
{
+  one: 'one',
+  two: 'two'  // bad
+}
+{
+  one: 'one',
+  two: 'two', // good
+}
+
+
+
+
+
+

== Arrow functions

+
+
+

All anonymous functions must be defined with the arrow function notation. In most of cases it’s not a problem, but sometimes, when you do not want to bind this when you define the function, you can use the other function definition. In this special cases you must disable the linter for those sentence.

+
+
+
+
+

== Comments

+
+
+

Comments must start with a whitespace. Example:

+
+
+
+
//This is a bad comment
+// This is OK
+
+
+
+
+
+

== Quotemarks

+
+
+

For string definitions, we must use single quotes.

+
+
+
+
+

== if statements

+
+
+

In all if statements you always must use brackets. Example:

+
+
+
+
// Bad if statement
+if (condition)
+  return true;
+
+// Good if statement
+if (condition) {
+  return true;
+}
+
+
+
+
+
+

Pre-commit hooks

+
+
+

In order to ensure that your new code follows the coding conventions, devon4node uses by default husky. Husky is a tool that allows you to configure git hooks easily in your project. When you make a git commit in your devon4node project, it will execute two actions:

+
+
+
    +
  • +

    Prettify the staged files

    +
  • +
  • +

    Execute the linter in the staged files

    +
  • +
+
+
+

If any action fails, you won’t be able to commit your new changes.

+
+
+ + + + + +
+ + +If you want to skip the git hooks, you can do a commit passing the --no-verify flag. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-configuration-module.html b/docs/devon4ts/1.0/nest/guides-configuration-module.html new file mode 100644 index 00000000..1d59dc2e --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-configuration-module.html @@ -0,0 +1,415 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Configuration Module

+
+
+

devon4node provides a way to generate a configuration module inside your application. To generate it you only need to execute the command nest g -c @devon4node/schematics config-module. This command will generate inside your application:

+
+
+
    +
  • +

    Configuration module inside the core module.

    +
  • +
  • +

    config folder where all environment configuration are stored.

    +
    +
      +
    • +

      default configuration: configuration for your local development environment.

      +
    • +
    • +

      develop environment configuration for the develop environment.

      +
    • +
    • +

      uat environment configuration for the uat environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      test environment configuration used by test.

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +some code generators will add some properties to this module, so, be sure that the config module is the first module that you generate in your application. +
+
+
+
+
+

Use the configuration service

+
+
+

To use the configuration service, you only need to inject it as dependency. As configuration module is defined in the core module, it will be available everywhere in your application. Example:

+
+
+
+
export class MyProvider {
+  constructor(public readonly configService: ConfigurationService) {}
+
+  myMethod() {
+    return this.confiService.isDev;
+  }
+}
+
+
+
+
+
+

Choose an environment file

+
+
+

By default, when you use the configuration service it will take the properties defined in the default.ts file. If you want to change the configuration file, you only need to set the NODE_ENV environment property with the name of the desired environment. Examples: in windows execute set NODE_ENV=develop before executing the application, in linux execute NODE_ENV=develop before executing the application or NODE_ENV=develop yarn start.

+
+
+
+
+

Override configuration properties

+
+
+

Sometimes, you want to keep some configuration property secure, and you do not want to publish it to the repository, or you want to reuse some configuration file but you need to change some properties. For those scenarios, you can override configuration properties by defining a environment variable with the same name. For example, if you want to override the property host, you can do: set host="newhost". It also works with objects. For example, if you want to change the value of secret in the property jwtConfig for this example, you can set a environment variable like this: set jwtConfig="{"secret": "newsecret"}". As you can see, this environment variable has a JSON value. It will take object and merge the jwtConfig property with the properties defined inside the environment variable. It other properties maintain their value. The behaviour is the same for the nested objects.

+
+
+
+
+

Add a configuration property

+
+
+

In order to add a new property to the configuration module, you need to follow some steps:

+
+
+
    +
  • +

    Add the property to IConfig interface in src/app/core/configuration/types.ts file. With this, we can ensure that the ConfigurationService and the environment files has those property at compiling time.

    +
  • +
  • +

    Add the new property getter to ConfigurationService. You must use the get method of ConfigurationService to ensure that the property will be loaded from the desired config file. You can also add extra logic if needed.

    +
  • +
  • +

    Add the property to all config files inside the src/config folder.

    +
  • +
+
+
+

Example:

+
+
+

We want to add the property devonfwUrl to our ConfigurationService, so:

+
+
+

We add the following code in IConfig interface:

+
+
+
+
devonfwUrl: string;
+
+
+
+

Then, we add the getter in the ConfigurationService:

+
+
+
+
get devonfwUrl(): string {
+  return this.get('devonfwUrl')!;
+}
+
+
+
+

Finally, we add the definition in all config files:

+
+
+
+
devonfwUrl: 'https://devonfw.com',
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-dependency-injection.html b/docs/devon4ts/1.0/nest/guides-dependency-injection.html new file mode 100644 index 00000000..a4f3e75f --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-dependency-injection.html @@ -0,0 +1,389 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Dependency Injection

+
+
+

The dependency injection is a well-known common design pattern applied by frameworks in all languages, like Spring in Java, Angular and others. The intention of this page is not to explain how dependency injection works, but instead how it is addressed by NestJS.

+
+
+

NestJS resolve the dependency injection in their modules. When you define a provider in a module, it can be injected in all components of the module. By default, those providers are only available in the module where it is defined. The only way to export a module provider to other modules which import it is adding those provider to the export array. You can also reexport modules.

+
+
+
+
+

Inject dependencies in NestJS

+
+
+

In order to inject a dependency in a NestJS component, you need to declare it in the component constructor. Example:

+
+
+
+
export class CoffeeController {
+  constructor(public readonly conffeeService: CoffeeService) {}
+}
+
+
+
+

NestJS can resolve all dependencies that are defined in the module as provider, and also the dependencies exported by the modules imported. Example:

+
+
+
+
@Module({
+  controllers: [CoffeeController],
+  providers: [CoffeeService],
+})
+export class CoffeeModule {}
+
+
+
+

Inject dependencies in the constructor is the is the preferred choice, but, sometimes it is not possible. For example, when you are extending another class and want to keep the constructor definition. In this specific cases we can inject dependencies in the class properties. Example:

+
+
+
+
export class CoffeeController {
+  @Inject(CoffeeService)
+  private readonly conffeeService: CoffeeService;
+}
+
+
+
+
+
+

Dependency Graph

+
+
+
+dependency injection1 +
+
+
+

In the previous image, the Module A can inject dependencies exported by Module B, Module E and Module F. If module B reexport Module C and Module D, they are also accessible by Module A.

+
+
+

If there is a conflict with the injection token, it resolves the provider with less distance with the module. For example: if the modules C and F exports a UserService provider, the Module A will resolve the UserService exported by the Module F, because the distance from Module A to Module F is 1, and the distance from Module A to Module C is 2.

+
+
+

When you define a module as global, the dependency injection system is the same. The only difference is now all modules as a link to the global module. For example, if we make the Module C as global the dependency graph will be:

+
+
+
+dependency injection2 +
+
+
+
+
+

Custom providers

+
+
+

When you want to change the provider name, you can use a NestJS feature called custom providers. For example, if you want to define a provider called MockUserService with the provider token UserService you can define it like:

+
+
+
+
@Module({
+  providers: [{
+    provide: UserService,
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

With this, when you inject want to inject UserService as dependency, the MockUserService will be injected.

+
+
+

Custom provider token can be also a string:

+
+
+
+
@Module({
+  providers: [{
+    provide: 'USER_SERVICE',
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

but now, when you want to inject it as dependency you need to use the @Inject decorator.

+
+
+
+
constructor(@Inject('USER_SERVICE') userService: any) {}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-eslint-sonarqube-config.html b/docs/devon4ts/1.0/nest/guides-eslint-sonarqube-config.html new file mode 100644 index 00000000..14a1f14c --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-eslint-sonarqube-config.html @@ -0,0 +1,307 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Importing your ESLint reports into SonarQube

+
+
+

This guide covers the import of ESLint reports into SonarQube instances in CI environments, as this is the recommended way of using ESLint and SonarQube for devon4node projects. The prerequisites for this process are a CI environment, preferably a Production Line instance, and the ESLint CLI, which is already included when generating a new devon4node project.

+
+
+
+
+

Configuring the ESLint analysis

+
+
+

You can configure the ESLint analysis parameters in the .eslintrc.js file inside the top-level directory of your project. If you created your node project using the devon4node application schematic, this file will already exist. If you want to make further adjustments to it, have a look at the ESLint documentation.

+
+
+

The ESLint analysis script lint is already configured in the scripts part of your package.json. Simply add -f json > report.json, so that the output of the analysis is saved in a .json file. Additional information to customization options for the ESLint CLI can be found here.

+
+
+

To run the analysis, execute the script with npm run lint inside the base directory of your project.

+
+
+
+
+

Configuring SonarQube

+
+
+

If you haven’t already generated your CICD-related files, follow the tutorial on the devon4node schematic of our CICDGEN project, as you will need a Jenkinsfile configured in your project to proceed.

+
+
+

Inside the script for the SonarQube code analysis in your Jenkinsfile, add the parameter -Dsonar.eslint.reportPaths=report.json. Now, whenever a SonarQube analysis is triggered by your CI environment, the generated report will be loaded into your SonarQube instance. +To avoid duplicated issues, you can associate an empty TypeScript quality profile with your project in its server configurations.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-grapql.html b/docs/devon4ts/1.0/nest/guides-grapql.html new file mode 100644 index 00000000..03cbace0 --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-grapql.html @@ -0,0 +1,603 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

GraphQL on Devon4Node

+
+
+

GraphQL is a query language that gets exactly the data that we ask for instead of static predefined responses.

+
+
+

For example, on a regular API a get by id method would return something like:

+
+
+
+
{
+  "location": {
+    "lon": 00.14,
+    "lat": 54.11
+  },
+  "station": "dsrEE3Sg",
+  "visibility": 5000,
+  "wind":{
+    "speed": 6.2,
+    "deg": 78
+  },
+  "logs": [...]
+  ...
+}
+
+
+
+

But if we want to get only the wind data we have to create another endpoint that returns the specified data.

+
+
+

But instead with graphQL we can get different information without creating new endpoints, in this case we only want the wind data so it would return:

+
+
+
+
{
+  "wind":{
+    "speed": 6.2,
+    "deg": 78
+  }
+}
+
+
+
+

To install it:

+
+
+
+
yarn add @nestjs/graphql graphql-tools graphql apollo-server-express
+
+
+
+
+
+

Schema first

+
+ +
+
+
+

==

+
+
+

This tutorial uses the schema first method.

+
+
+

We assume you have already a functioning TODO module / app.

+
+
+

If not you can use Devon4node GraphQL sample +== ==

+
+
+

First we need to import GraphQLModule to our app.module.ts.

+
+
+
+
...
+import { GraphQLModule } from '@nestjs/graphql';
+import { join } from 'path';
+
+@Module({
+  imports: [
+    // Your module import
+    GraphQLModule.forRoot({
+      typePaths: ['./**/*.graphql'],
+      definitions: {
+        path: join(process.cwd(), 'src/graphql.ts'),
+        outputAs: 'class',
+      },
+    }),
+  ],
+})
+export class AppModule {}
+
+
+
+

The typePaths indicates the location of the schema definition files.

+
+
+

The definitions indicates the file where the typescript definitions will automatically save, adding the outputAs: 'class' saves those definitions as classes.

+
+
+
+
+

Schema

+
+
+

Graphql is a typed language with object types, scalars, and enums.

+
+
+

We use query to define the methods we are going to use for fetching data, and mutations are used for modifying this data, similar to how GET and POST work.

+
+
+

Let’s define the elements, queries and mutations that our module is going to have.

+
+
+

For that we have to create a graphql file on our module, on this case we are going to name it "schema.graphql".

+
+
+
+
type Todo {
+  id: ID
+  task: String
+}
+
+type Query {
+  todos: [Todo]
+  todoById: Todo
+}
+
+type Mutation {
+  createTodo(task: String): Todo
+  deleteTodo(id: String): Todo
+}
+
+
+
+

For more information about Types go to the official graphQL documentation

+
+
+
+
+

Resolver

+
+
+

Resolver has the instructions to turn GraphQL orders into the data requested.

+
+
+

To create a resolver we go to our module and then create a new todo.resolver.ts file, import the decorators needed and set the resolver.

+
+
+
+
import { Resolver, Args, Mutation, Query } from '@nestjs/graphql';
+import { TodoService } from '../services/todo.service';
+import { Todo } from '../schemas/todo.schema';
+
+@Resolver()
+export class TodoResolver {
+  constructor(private readonly todoService: TodoService) {}
+
+  @Query('todos')
+  findAll(): Promise<Todo[]> {
+    return this.todoService.findAll();
+  }
+
+  @Query('todoById')
+  findOneById(@Args('id') id: string): Promise<Todo | null> {
+    return this.todoService.findOneById(id);
+  }
+
+  @Mutation()
+  createTodo(@Args('task') task: string): Promise<Todo> {
+    return this.todoService.create(task);
+  }
+
+  @Mutation()
+  deleteTodo(@Args('id') id: string): Promise<Todo | null> {
+    return this.todoService.delete(id);
+  }
+}
+
+
+
+

@Resolver() indicates that the next class is a resolver.

+
+
+

@Query is used to get data.

+
+
+

@Mutation is used to create or modify data.

+
+
+

Here we have also an argument decorator @Args which is an object with the arguments passed into the field in the query.

+
+
+

By default we can access the query or mutation using the method’s name, for example:

+
+
+

For the deleteTodo mutation.

+
+
+
+
mutation {
+  deleteTodo( id: "6f7ed2q8" ){
+    id,
+    task
+  }
+}
+
+
+
+

But if we write something different on the decorator, we change the name, for example:

+
+
+

For the findAll query, we named it todos.

+
+
+
+
{
+  todos{
+    id,
+    task
+  }
+}
+
+
+
+

Also if we go back to the schema.graphql, we will see how we define the query with todos.

+
+
+

Learn more about Resolvers, mutations and their argument decorators on the NestJS documentation.

+
+
+
+
+

Playground

+
+
+

To test our backend we can use tools as Postman, but graphql already gives us a playground to test our Resolvers, we can access by default on http://localhost:3000/graphql.

+
+
+

We can call a query, or several queries this way:

+
+
+
+
{
+  findAll{
+    id,
+    task
+  }
+}
+
+
+
+

And the output will look something like:

+
+
+
+
{
+  "data": {
+    "findAll": [
+      {
+        "id": "5fb54b30e686cb49500b6728",
+        "task": "clean dishes"
+      },
+      {
+        "id": "5fb54b3be686cb49500b672a",
+        "task": "burn house"
+      }
+    ]
+  }
+}
+
+
+
+

As we can see, we get a json "data" with an array of results.

+
+
+

And for our mutations it’s very similar, in this case we create a todo with task "rebuild house" and we are going to ask on the response just for the task data, we don’t want the id.

+
+
+
+
mutation{
+  createTodo (
+    task: "rebuild house"
+  ){
+    task
+  }
+}
+
+
+
+

And the output

+
+
+
+
{
+  "data": {
+    "createTodo": {
+      "task": "rebuild house"
+    }
+  }
+}
+
+
+
+

In this case we return just one item so there is no array, we also got just the task data but if we want the id too, we just have to add it on the request.

+
+
+

To make the playground unavailable we can add an option to the app.module import:

+
+
+
+
...
+GraphQLModule.forRoot({
+  ...
+  playground: false,
+}),
+...
+
+
+
+

For further information go to the official NestJS documentation

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-key-principles.html b/docs/devon4ts/1.0/nest/guides-key-principles.html new file mode 100644 index 00000000..172dc3de --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-key-principles.html @@ -0,0 +1,388 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Key Principles

+
+
+

devon4node is built following some basic principles like:

+
+
+ +
+
+

But key principles that best define devon4node (and are inherited from NestJS) are:

+
+
+
    +
  • +

    Simplicity (aka KISS)

    +
  • +
  • +

    Reusability

    +
  • +
  • +

    Productivity

    +
  • +
+
+
+
+
+

Simplicity

+
+
+

In devon4node we tried to do everything as simple as possible. Following this principle we will be able to do easy to maintain applications.

+
+
+

For example, in order to expose all CRUD operations for an entity, you only need to create a controller like:

+
+
+
+
@Crud({
+  model: {
+    type: Employee,
+  },
+})
+@CrudType(Employee)
+@Controller('employee/employees')
+export class EmployeeCrudController {
+  constructor(public service: EmployeeCrudService) {}
+}
+
+
+
+

You can find this code in the employee example. Only with this code your exposing the full CRUD operations for the employee entity. As you can see, it’s an empty class with some decorators and the EmployeeCrudService injected as dependency. Simple, isn’t it? The EmployeeCrudService is also simple:

+
+
+
+
@Injectable()
+export class EmployeeCrudService extends TypeOrmCrudService<Employee> {
+  constructor(@InjectRepository(Employee) repo: Repository<Employee>) {
+    super(repo);
+  }
+}
+
+
+
+

Another empty class which extends from TypeOrmCrudService<Employee> and injects the Employee Repository as dependency. Nothing else.

+
+
+

With these examples you can get an idea of how simple it can be to code a devon4node application .

+
+
+
+
+

Reusability

+
+
+

NestJS (and devon4node) applications are designed in a modular way. This allows you to isolate some functionality in a module, and then reuse it in every application that you need. This is the same behaviour that Angular has. You can see it in the NestJS modules like TypeORM, Swagger and others. Also, in devon4node we have the Mailer module.

+
+
+

In your applications, you only need to import those modules and then you will be able to use the functionality that they implement. Example

+
+
+
+
@Module({
+  imports: [ AuthModule, ConfigurationModule ],
+})
+export class SomeModule {}
+
+
+
+
+
+

Productivity

+
+
+

devon4node is designed to create secure enterprise applications. But also, it allow you to do it in a fast way. To increase the productivity devon4node, devon4node provide schematics in order to generate some boilerplate code.

+
+
+

For example, to create a module you need to create a new file for a module (or copy it) and write the code, then you need to import it in the AppModule. This is a easy example, but you can introduce some errors: forget to import it in the AppModule, introduce errors with the copy/paste and so on. By using the command nest g module --name <module-name> it will do everything for you. Just a simple command. In this specific case probably you do not see any advantage, but there are other complex cases where you can generate more complex code with nest and devon4node schematics command.

+
+
+

See code generation in order to know how to increase your productivity creating devon4node applications.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-logger.html b/docs/devon4ts/1.0/nest/guides-logger.html new file mode 100644 index 00000000..baf00f4a --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-logger.html @@ -0,0 +1,310 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Logger

+
+
+

When you create a new devon4node application, it already has a logger: src/app/shared/logger/winston.logger.ts. This logger provide the methods log, error and warn. All of those methods will write a log message, but with a different log level.

+
+
+

The winston logger has two transports: one to log everything inside the file logs/general.log and the other to log only the error logs inside the file logs/error.log. In addition, it uses the default NestJS logger in order to show the logs in the console.

+
+
+

As you can see it is a simple example about how to use logger in a devon4node application. It will be update to a complex one in the next versions.

+
+
+
+
+

How to use logger

+
+
+

In order to use the logger you only need to inject the logger as a dependency:

+
+
+
+
constructor(logger: WinstonLogger){}
+
+
+
+

and then use it

+
+
+
+
async getAll() {
+  this.service.getAll();
+  this.logger.log('Returning all data');
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-mailer.html b/docs/devon4ts/1.0/nest/guides-mailer.html new file mode 100644 index 00000000..ac5b3779 --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-mailer.html @@ -0,0 +1,624 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Mailer Module

+
+
+

This module enables you to send emails in devon4node. It also provides a template engine using Handlebars.

+
+
+

It is a NestJS module that inject into your application a MailerService, which is the responsible to send the emails using the nodemailer library.

+
+
+
+
+

Installing

+
+
+

Execute the following command in a devon4node project:

+
+
+
+
yarn add @devon4node/mailer
+
+
+
+
+
+

Configuring

+
+
+

To configure the mailer module, you only need to import it in your application into another module. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot(),
+  ],
+  ...
+})
+
+
+
+

Your must pass the configuration using the forRoot or forRootAsync methods.

+
+
+
+
+

forRoot()

+
+
+

The forRoot method receives an MailerModuleOptions object as parameter. It configures the MailerModule using the input MailerModuleOptions object.

+
+
+

The structure of MailerModuleOptions is:

+
+
+
+
{
+  hbsOptions?: {
+    templatesDir: string;
+    extension?: string;
+    partialsDir?: string;
+    helpers?: IHelperFunction[];
+    compilerOptions?: ICompileOptions;
+  },
+  mailOptions?: nodemailerSmtpTransportOptions;
+  emailFrom: string;
+}
+
+
+
+

Here, you need to specify the Handlebars compile options, the nodemailer transport options and the email address which will send the emails. +Then, you need to call to forRoot function in the module imports. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot({
+      mailOptions: {
+        host: 'localhost',
+        port: 1025,
+        secure: false,
+        tls: {
+          rejectUnauthorized: false,
+        },
+      },
+      emailFrom: 'noreply@capgemini.com',
+      hbsOptions: {
+        templatesDir: join(__dirname, '../..', 'templates/views'),
+        partialsDir: join(__dirname, '../..', 'templates/partials'),
+        helpers: [{
+          name: 'fullname',
+          func: person => `${person.name} ${person.surname}`,s
+        }],
+      },
+    }),
+  ...
+})
+
+
+
+
+
+

forRootAsync()

+
+
+

The method forRootAsync enables you to get the mailer configuration in a asynchronous way. It is useful when you need to get the configuration using, for example, a service (e.g. ConfigurationService).

+
+
+

Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRootAsync({
+      imports: [ConfigurationModule],
+      useFactory: (config: ConfigurationService) => {
+        return config.mailerConfig;
+      },
+      inject: [ConfigurationService],
+    }),
+  ...
+})
+
+
+
+

In this example, we use the ConfigurationService in order to get the MailerModuleOptions (the same as forRoot)

+
+
+
+
+

Usage

+
+
+

In order to use, you only need to inject using the dependency injection the MailerService.

+
+
+

Example:

+
+
+
+
@Injectable()
+export class CatsService {
+  constructor(private readonly mailer: MailerService) {}
+}
+
+
+
+

Then, you only need to use the methods provided by the MailerService in your service. Take into account that you can inject it in every place that support NestJS dependency injection.

+
+
+
+
+

MailerService methods

+
+ +
+
+
+

== sendPlainMail

+
+
+

The method sendPlainMail receive a string sends a email.

+
+
+

The method signatures are:

+
+
+
+
sendPlainMail(emailOptions: SendMailOptions): Promise<SentMessageInfo>;
+sendPlainMail(to: string, subject: string, mail: string): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendPlainMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+});
+this.mailer.sendPlainMail('example@example.com', 'This is a subject', '<h1>Hello world</h1>');
+
+
+
+
+
+

== sendTemplateMail

+
+
+

The method sendTemplateMail sends a email based on a Handlebars template. The templates are registered using the templatesDir option or using the addTemplate method. +The template name is the name of the template (without extension) or the first parameter of the method addTemplate.

+
+
+

The method signatures are:

+
+
+
+
sendTemplateMail(emailOptions: SendMailOptions, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+sendTemplateMail(to: string, subject: string, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendTemplateMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+}, 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+this.mailer.sendTemplateMail('example@example.com', 'This is a subject', 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+
+
+
+
+
+

== addTemplate

+
+
+

Adds a new template to the MailerService.

+
+
+

Method signature:

+
+
+
+
addTemplate(name: string, template: string, options?: CompileOptions): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.addTemplate('newTemplate', '<html><head></head><body>{{>partial1}}</body></html>')
+
+
+
+
+
+

== registerPartial

+
+
+

Register a new partial in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerPartial(name: string, partial: Handlebars.Template<any>): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerPartial('partial', '<h1>Hello World</h1>')
+
+
+
+
+
+

== registerHelper

+
+
+

Register a new helper in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerHelper(name: string, helper: Handlebars.HelperDelegate): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerHelper('fullname', person => `${person.name} ${person.surname}`)
+
+
+
+
+
+

Handlebars templates

+
+
+

As mentioned above, this module allow you to use Handlebars as template engine, but it is optional. If you do not need the Handlebars, you just need to keep the hbsOptions undefined.

+
+
+

In order to get the templates form the file system, you can specify the template folder, the partials folder and the helpers. +At the moment of module initialization, it will read the content of the template folder, and will register every file with the name (without extension) and the content as Handlebars template. It will do the same for the partials.

+
+
+

You can specify the extension of template files using the extension parameter. The default value is .handlebars

+
+
+
+
+

Local development

+
+
+

If you want to work with this module but you don’t have a SMTP server, you can use the streamTransport. Example:

+
+
+
+
{
+  mailOptions: {
+    streamTransport: true,
+    newline: 'windows',
+  },
+  emailFrom: ...
+  hbsOptions: ...
+}
+
+
+
+

Then, you need to get the sendPlainMail or sendTemplateMail result, and print the email to the standard output (STDOUT). Example:

+
+
+
+
const mail = await this.mailer.sendTemplateMail(...);
+
+mail.message.pipe(process.stdout);
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-serializer.html b/docs/devon4ts/1.0/nest/guides-serializer.html new file mode 100644 index 00000000..24c14981 --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-serializer.html @@ -0,0 +1,338 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Serializer

+
+
+

Serialization is the process of translating data structures or object state into a format that can be transmitted across network and reconstructed later.

+
+
+

NestJS by default serialize all data to JSON (JSON.stringify). Sometimes this is not enough. In some situations you need to exclude some property (e.g password). Instead doing it manually, devon4node provides an interceptor (ClassSerializerInterceptor) that will do it for you. You only need to return a class instance as always and the interceptor will transform those class to the expected data.

+
+
+

The ClassSerializerInterceptor takes the class-transformer decorators in order to know how to transform the class and then send the result to the client.

+
+
+

Some of class-transformer decorators are:

+
+
+
    +
  • +

    Expose

    +
  • +
  • +

    Exclude

    +
  • +
  • +

    Type

    +
  • +
  • +

    Transform

    +
  • +
+
+
+

And methods to transform data:

+
+
+
    +
  • +

    plainToClass

    +
  • +
  • +

    plainToClassFromExist

    +
  • +
  • +

    classToPlain

    +
  • +
  • +

    classToClass

    +
  • +
  • +

    serialize

    +
  • +
  • +

    deserialize

    +
  • +
  • +

    deserializeArray

    +
  • +
+
+
+

See the class-transformer page for more information.

+
+
+

See NestJS serialization page for more information about ClassSerializerInterceptor.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-swagger.html b/docs/devon4ts/1.0/nest/guides-swagger.html new file mode 100644 index 00000000..f9dd71ad --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-swagger.html @@ -0,0 +1,303 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Swagger

+
+
+

We can use swagger (OpenAPI) in order to describe the endpoints that our application exposes.

+
+
+

NestJS provides a module which will read the code of our application and will expose one endpoint where we can see the swagger.

+
+
+

Add swagger to a devon4node application is simple, you only need to execute the command nest g -c @devon4node/schematics swagger and it will do everything for you. The next time that you start your application, you will be able to see the swagger at /v1/api endpoint.

+
+
+

The swagger module can read your code in order to create the swagger definition, but sometimes you need to help him by decorating your handlers.

+
+
+

For more information about decorators and other behaviour about swagger module, you can see the NestJS swagger documentation page

+
+
+ + + + + +
+ + +the OpenAPI specification that this module supports is v2.0. The OpenAPI v3.0 is not available yet by using this module. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-typeorm.html b/docs/devon4ts/1.0/nest/guides-typeorm.html new file mode 100644 index 00000000..cbe9efdf --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-typeorm.html @@ -0,0 +1,407 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

TypeORM

+
+
+

TypeORM is the default ORM provided by devon4node. It supports MySQL, MariaDB, Postgres, CockroachDB, SQLite, Microsoft SQL Server, Oracle, sql.js relational database and also supports MongoDB NoSQL database.

+
+
+

Add TypeORM support to a devon4node application is very easy: you only need to execute the command nest g -c @devon4node/schematics typeorm and it will add all required dependencies to the project and also imports the @nestjs/typeorm module.

+
+
+

For more information about TypeORM and the integration with NestJS you can visit TypeORM webpage, TypeORM GitHub repository and NestJS TypeORM documentation page

+
+
+
+
+

Configuration

+
+
+

When you have the configuration module, the TypeORM generator will add one property in order to be able to configure the database depending on the environment. Example:

+
+
+
+
database: {
+  type: 'sqlite',
+  database: ':memory:',
+  synchronize: false,
+  migrationsRun: true,
+  logging: true,
+  entities: ['dist/**/*.entity.js'],
+  migrations: ['dist/migration/**/*.js'],
+  subscribers: ['dist/subscriber/**/*.js'],
+  cli: {
+    entitiesDir: 'src/entity',
+    migrationsDir: 'src/migration',
+    subscribersDir: 'src/subscriber',
+  },
+},
+
+
+
+

This object is a TypeORM ConnectionOptions. For fore information about it visit the TypeORM Connection Options page.

+
+
+

There is also a special case: the default configuration. As the devon4node CLI need the database configuration when you use the devon4node db command, we also provide the ormconfig.json file. In this file you must put the configuration for you local environment. In order to do not have duplicated the configuration for local environment, in the default config file the database property is set-up like:

+
+
+
+
database: require('../../ormconfig.json'),
+
+
+
+

So, you only need to maintain the ormconfig.json file for the local environment.

+
+
+
+
+

Entity

+
+
+

Entity is a class that maps to a database table. The devon4node schematics has a generator to create new entities. You only need to execute the command nest g -c @devon4node/schematics entity <entity-name> and it generate the entity.

+
+
+

In the entity, you must define all columns, relations, primary keys of your database table. By default, devon4node provides a class named BaseEntity. All entities created with the devon4node schematics will extends the BaseEntity. This entity provides you some common columns:

+
+
+
    +
  • +

    id: the primary key of you table

    +
  • +
  • +

    version: the version of the entry (used for auditing purposes)

    +
  • +
  • +

    createdAt: creation date of the entry (used for auditing purposes)

    +
  • +
  • +

    updatedAt: last update date of the entry (used for auditing purposes)

    +
  • +
+
+
+

For more information about Entities, please visit the TypeORM entities page

+
+
+
+
+

Repository

+
+
+

With repositories, you can manage (insert, update, delete, load, etc.) a concrete entity. Using this pattern, we have separated the data (Entities) from the methods to manage it (Repositories).

+
+
+

To use a repository you only need to:

+
+
+
    +
  • +

    Import it in the module as follows:

    +
    +
    +
    @Module({
    +  imports: [TypeOrmModule.forFeature([Employee])],
    +})
    +
    +
    +
    + + + + + +
    + + +if you generate the entities with the devon4node schematic, this step is not necessary, devon4node schematic will do it for you. +
    +
    +
  • +
  • +

    Inject the repository as dependency in your service:

    +
    +
    +
    constructor(@InjectRepository(Employee) employeeRepository: Repository<Employee>) {}
    +
    +
    +
  • +
+
+
+

You can see more details in the NestJS database and NestJS TypeORM documentation pages.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/guides-validation.html b/docs/devon4ts/1.0/nest/guides-validation.html new file mode 100644 index 00000000..ebcae33f --- /dev/null +++ b/docs/devon4ts/1.0/nest/guides-validation.html @@ -0,0 +1,340 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Validation

+
+
+

To be sure that your application will works well, you must validate any input data. devon4node by default provides a ValidationPipe. This ValidationPipe is the responsible of validate the request input and, if the input do not pass the validation process, it returns a 400 Bad Request error.

+
+
+
+
+

Defining Validators

+
+
+

The ValidationPipe needs to know how to validate the input. For that purpose we use the class-validator package. This package allows you to define the validation of a class by using decorators.

+
+
+

For example:

+
+
+
+
export class Coffee {
+  @IsDefined()
+  @IsString()
+  @MaxLength(255)
+  name: string;
+
+  @IsDefined()
+  @IsString()
+  @MaxLength(25)
+  type: string;
+
+  @IsDefined()
+  @IsNumber()
+  quantity: number;
+}
+
+
+
+

As you can see in the previous example, we used some decorators in order to define the validators for every property of the Coffee class. You can find all decorators in the class-validator github repository.

+
+
+

Now, when you want to receive a Coffee as input in some endpoint, it will execute the validations before executing the handler function.

+
+
+ + + + + +
+ + +In order to be able to use the class-validator package, you must use classes instead of interfaces. As you know interfaces disappear at compiling time, and class-validator need to know the metadata of the properties in order to be able to validate. +
+
+
+ + + + + +
+ + +The ValidationPipe only works if you put a specific type in the handler definition. For example, if you define a handler like getCoffee(@Body() coffee: any): Coffee {} the ValidationPipe will not do anything. You must specify the type of the input: getCoffee(@Body() coffee: Coffee): Coffee {} +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/layer-controller.html b/docs/devon4ts/1.0/nest/layer-controller.html new file mode 100644 index 00000000..0bc5a992 --- /dev/null +++ b/docs/devon4ts/1.0/nest/layer-controller.html @@ -0,0 +1,354 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Controller Layer

+
+
+

The controller layer is responsible for handling the requests/responses to the client. This layer knows everything about the endpoints exposed, the expected input (and also validate it), the response schema, the HTTP codes for the response and the HTTP errors that every endpoint can send.

+
+
+
+
+

How to implement the controller layer

+
+
+

This layer is implemented by the NestJS controllers. Let’s see how it works with an example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    try {
+      return await this.coffeeService.searchCoffees(search);
+    } catch (error) {
+      throw new BadRequestException(error.message, error);
+    }
+  }
+}
+
+
+
+

As you can see in the example, to create a controller you only need to decorate a class with the Controller decorator. This example is handling all request to coffee/coffees.

+
+
+

Also, you have defined one handler. This handler is listening to POST request for the route coffee/coffees/search. In addition, this handler is waiting for a CoffeeSearch object and returns an array of Coffee. In order to keep it simple, that’s all that you need in order to define one route.

+
+
+

One important thing that can be observed in this example is that there is no business logic. It delegates to the service layer and return the response to the client. At this point, transformations from the value that you receive from the service layer to the desired return type are also allowed.

+
+
+

By default, every POST handler return an HTTP 204 response with the returned value as body, but you can change it in a easy way by using decorators. As you can see in the example, the handler will return a HTTP 200 response (@HttpCode(200)).

+
+
+

Finally, if the service layer throws an error, this handler will catch it and return a HTTP 400 Bad Request response. The controller layer is the only one that knows about the answers to the client, therefore it is the only one that knows which error codes should be sent.

+
+
+
+
+

Validation

+
+
+

In order to do not propagate errors in the incoming payload, we need to validate all data in the controller layer. See the validation guide for more information.

+
+
+
+
+

Error handling

+
+
+

In the previous example, we catch all errors using the try/catch statement. This is not the usual implementation. In order to catch properly the errors you must use the exception filters. Example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  @UseFilters(CaffeExceptionFilter)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    return await this.coffeeService.searchCoffees(search);
+  }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/layer-dataaccess.html b/docs/devon4ts/1.0/nest/layer-dataaccess.html new file mode 100644 index 00000000..0b3b4029 --- /dev/null +++ b/docs/devon4ts/1.0/nest/layer-dataaccess.html @@ -0,0 +1,303 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Data Access Layer

+
+
+

The data access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store but also about invoking external services.

+
+
+

This layer is implemented using providers. Those providers could be: services, repositories and others. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic.

+
+
+
+
+

Database

+
+
+

We strongly recommend TypeORM for database management in devon4node applications. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic. TypeORM supports the most commonly used relational databases, link Oracle, MySQL, MariaDB, PostgreSQL, SQLite, MSSQL and others. Also, it supports no-relational databases like MongoDB.

+
+
+

TypeORM supports Active Record and Repository patterns. We recommend to use the Repository pattern. This pattern allows you to separate the data objects from the methods to manipulate the database.

+
+
+
+
+

External APIs

+
+
+

In order to manage the data in a external API, you need to create a service for that purpose. In order to manage the connections with the external API, we strongly recommend the NestJS HTTP module

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/layer-service.html b/docs/devon4ts/1.0/nest/layer-service.html new file mode 100644 index 00000000..bffc9482 --- /dev/null +++ b/docs/devon4ts/1.0/nest/layer-service.html @@ -0,0 +1,308 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Service Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. It knows everything about the business logic, but it does not know about the response to the client and the HTTP errors. That’s why this layer is separated from the controller layer.

+
+
+
+
+

How to implement the service layer

+
+
+

This layer is implemented by services, a specific kind of providers. Let’s see one example:

+
+
+
+
@Injectable()
+export class CoffeeService {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  async searchCoffees(@InjectRepository(Coffee) coffeeRepository: Repository<Coffee>): Promise<Array<Coffee>> {
+    const coffees = this.coffeeRepository.find();
+
+    return doSomeBusinessLogic(coffees);
+  }
+}
+
+
+
+

This is the CoffeeService that we inject in the example of controller layer. As you can see, a service is a regular class with the Injectable decorator. Also, it inject as dependency the data access layer (in this specific case, the Repository<Coffee>).

+
+
+

The services expose methods in order to transform the input from the controllers by applying some business logic. They can also request data from the data access layer. And that’s all.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/master-devon4node.html b/docs/devon4ts/1.0/nest/master-devon4node.html new file mode 100644 index 00000000..2db43b1c --- /dev/null +++ b/docs/devon4ts/1.0/nest/master-devon4node.html @@ -0,0 +1,2768 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

NodeJS

+
+
+

devonfw is a platform which provides solutions to building business applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. devonfw is 100% Open Source (Apache License version 2.0) since the beginning of 2018.

+
+
+

devon4node is the NodeJS stack of devonfw. It allows you to build business applications (backends) using NodeJS technology in standardized way based on established best-practices.

+
+
+

devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications. It uses progressive TypeScript and combines elements of OOP (Object Oriented Programming), FP (Functional Programming), and FRP (Functional Reactive Programming).

+
+ +
+

devon4node Architecture

+
+

As we have mentioned in the introduction, devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications.

+
+
+
+

HTTP layer

+
+

By using NestJS, devon4node is a platform-agnostic framework. NestJS focuses only on the logical layer, and delegates the transport layer to another framework, such as ExpressJS. You can see it in the following diagram:

+
+
+
+devon4node architecture +
+
+
+

As you can see, NestJS do not listen directly for incoming request. It has an adapter to communicate with ExpressJS and ExpressJS is the responsible for that. ExpressJS is only one of the frameworks that NestJS can work with. We have also another adapter available out-of-the-box: the Fastify adapter. With that, you can replace ExpressJS for Fastify But you can still use all your NestJS components. You can also create your own adapter to make NestJS work with other HTTP framework.

+
+
+

At this point, you may think: why is NestJS (and devon4node) using ExpressJS by default instead of Fastify? Because, as you can see in the previous diagram, there is a component that is dependent on the HTTP framework: the middleware. As ExpressJS is the most widely used framework, there exists a lot of middleware for it, so, in order to reuse them in our NestJS applications, NestJS use ExpressJS by default. Anyway, you may think which HTTP framework best fits your requirements.

+
+
+
+

devon4node layers

+
+

As other devonfw technologies, devon4node separates the application into layers.

+
+
+

Those layers are:

+
+ +
+
+layers +
+
+
+
+

devon4node application structure

+
+

Although there are many frameworks to create backend applications in NodeJS, none of them effectively solve the main problem of - Architecture. This is the main reason we have chosen NestJS for the devon4node applications. Besides, NestJS is highly inspired by Angular, therefore a developer who knows Angular can use his already acquired knowledge to write devon4node applications.

+
+
+

NestJS adopts various Angular concepts, such as dependency injection, piping, interceptors and modularity, among others. By using modularity we can reuse some of our modules between applications. One example that devon4node provide is the mailer module.

+
+
+
+

Modules

+
+

Create a application module is simple, you only need to create an empty class with the decorator Module:

+
+
+
+
@Module({})
+export class AppModule {}
+
+
+
+

In the module you can define:

+
+
+
    +
  • +

    Imports: the list of imported modules that export the providers which are required in this module

    +
  • +
  • +

    Controllers: the set of controllers defined in this module which have to be instantiated

    +
  • +
  • +

    Providers: the providers that will be instantiated by the Nest injector and that may be shared at least across this module

    +
  • +
  • +

    Exports: the subset of providers that are provided by this module and should be available in other modules which import this module

    +
  • +
+
+
+

The main difference between Angular and NestJS is NestJS modules encapsulates providers by default. This means that it’s impossible to inject providers that are neither directly part of the current module nor exported from the imported modules. Thus, you may consider the exported providers from a module as the module’s public interface, or API. Example of modules graph:

+
+
+
+modules +
+
+
+

In devon4node we three different kind of modules:

+
+
+
    +
  • +

    AppModule: this is the root module. Everything that our application need must be imported here.

    +
  • +
  • +

    Global Modules: this is a special kind of modules. When you make a module global, it’s accessible for every module in your application. Your can see it in the next diagram. It’s the same as the previous one, but now the CoreModule is global:

    +
    +
    +module2 +
    +
    +
    +

    One example of global module is the CoreModule. In the CoreModule you must import every module which have providers that needs to be accessible in all modules of you application

    +
    +
  • +
  • +

    Feature (or application) modules: modules which contains the logic of our application. We must import it in the AppModule.

    +
  • +
+
+
+

For more information about modules, see NestJS documentation page

+
+
+
+

Folder structure

+
+

devon4node defines a folder structure that every devon4node application must follow. The folder structure is:

+
+
+
+
├───src
+│   ├───app
+│   │   ├───core
+│   │   │   ├───auth
+│   │   │   ├───configuration
+│   │   │   ├───user
+│   │   │   └───core.module.ts
+│   │   ├───shared
+│   │   └───feature
+│   │       ├───sub-module
+│   │       │   ├───controllers
+│   │       │   ├───...
+│   │       │   ├───services
+│   │       │   └───sub-module.module.ts
+│   │       ├───controllers
+│   │       ├───interceptors
+│   │       ├───pipes
+│   │       ├───guards
+│   │       ├───filters
+│   │       ├───middlewares
+│   │       ├───model
+│   │       │   ├───dto
+│   │       │   └───entities
+│   │       ├───services
+│   │       └───feature.module.ts
+│   ├───config
+│   └───migration
+├───test
+└───package.json
+
+
+
+

devon4node schematics ensures this folder structure so, please, do not create files by your own, use the devon4node schematics.

+
+
+
+

NestJS components

+
+

NestJS provides several components that you can use in your application:

+
+
+ +
+
+

In the NestJS documentation you can find all information about each component. But, something that is missing in the documentation is the execution order. Every component can be defined in different levels: globally, in the controller or in the handler. As middleware is part of the HTTP server we can define it in a different way: globally or in the module.

+
+
+
+components +
+
+
+

It is not necessary to have defined components in every level. For example, you can have defined a interceptor globally but you do not have any other in the controller or handler level. If nothing is defined in some level, the request will continue to the next component.

+
+
+

As you can see in the previous image, the first component which receive the request is the global defined middleware. Then, it send the request to the module middleware. Each of them can return a response to the client, without passing the request to the next level.

+
+
+

Then, the request continue to the guards: first the global guard, next to controller guard and finally to the handler guard. At this point, we can throw an exception in all components and the exception filter will catch it and send a proper error message to the client. We do not paint the filters in the graphic in order to simplify it.

+
+
+

After the guards, is time to interceptors: global interceptors, controller interceptors and handler interceptors. And last, before arrive to the handler inside the controller, the request pass through the pipes.

+
+
+

When the handler has the response ready to send to the client, it does not go directly to the client. It come again to the interceptors, so we can also intercept the response. The order this time is the reverse: handler interceptors, controller interceptors and global interceptors. After that, we can finally send the response to the client.

+
+
+

Now, with this in mind, you are able to create the components in a better way.

+
+
+
+
+
+

Layers

+
+ +
+

Controller Layer

+
+

The controller layer is responsible for handling the requests/responses to the client. This layer knows everything about the endpoints exposed, the expected input (and also validate it), the response schema, the HTTP codes for the response and the HTTP errors that every endpoint can send.

+
+
+
+

How to implement the controller layer

+
+

This layer is implemented by the NestJS controllers. Let’s see how it works with an example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    try {
+      return await this.coffeeService.searchCoffees(search);
+    } catch (error) {
+      throw new BadRequestException(error.message, error);
+    }
+  }
+}
+
+
+
+

As you can see in the example, to create a controller you only need to decorate a class with the Controller decorator. This example is handling all request to coffee/coffees.

+
+
+

Also, you have defined one handler. This handler is listening to POST request for the route coffee/coffees/search. In addition, this handler is waiting for a CoffeeSearch object and returns an array of Coffee. In order to keep it simple, that’s all that you need in order to define one route.

+
+
+

One important thing that can be observed in this example is that there is no business logic. It delegates to the service layer and return the response to the client. At this point, transformations from the value that you receive from the service layer to the desired return type are also allowed.

+
+
+

By default, every POST handler return an HTTP 204 response with the returned value as body, but you can change it in a easy way by using decorators. As you can see in the example, the handler will return a HTTP 200 response (@HttpCode(200)).

+
+
+

Finally, if the service layer throws an error, this handler will catch it and return a HTTP 400 Bad Request response. The controller layer is the only one that knows about the answers to the client, therefore it is the only one that knows which error codes should be sent.

+
+
+
+

Validation

+
+

In order to do not propagate errors in the incoming payload, we need to validate all data in the controller layer. See the validation guide for more information.

+
+
+
+

Error handling

+
+

In the previous example, we catch all errors using the try/catch statement. This is not the usual implementation. In order to catch properly the errors you must use the exception filters. Example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  @UseFilters(CaffeExceptionFilter)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    return await this.coffeeService.searchCoffees(search);
+  }
+}
+
+
+ +
+
+

Service Layer

+
+

The logic layer is the heart of the application and contains the main business logic. It knows everything about the business logic, but it does not know about the response to the client and the HTTP errors. That’s why this layer is separated from the controller layer.

+
+
+
+

How to implement the service layer

+
+

This layer is implemented by services, a specific kind of providers. Let’s see one example:

+
+
+
+
@Injectable()
+export class CoffeeService {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  async searchCoffees(@InjectRepository(Coffee) coffeeRepository: Repository<Coffee>): Promise<Array<Coffee>> {
+    const coffees = this.coffeeRepository.find();
+
+    return doSomeBusinessLogic(coffees);
+  }
+}
+
+
+
+

This is the CoffeeService that we inject in the example of controller layer. As you can see, a service is a regular class with the Injectable decorator. Also, it inject as dependency the data access layer (in this specific case, the Repository<Coffee>).

+
+
+

The services expose methods in order to transform the input from the controllers by applying some business logic. They can also request data from the data access layer. And that’s all.

+
+ +
+
+

Data Access Layer

+
+

The data access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store but also about invoking external services.

+
+
+

This layer is implemented using providers. Those providers could be: services, repositories and others. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic.

+
+
+
+

Database

+
+

We strongly recommend TypeORM for database management in devon4node applications. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic. TypeORM supports the most commonly used relational databases, link Oracle, MySQL, MariaDB, PostgreSQL, SQLite, MSSQL and others. Also, it supports no-relational databases like MongoDB.

+
+
+

TypeORM supports Active Record and Repository patterns. We recommend to use the Repository pattern. This pattern allows you to separate the data objects from the methods to manipulate the database.

+
+
+
+

External APIs

+
+

In order to manage the data in a external API, you need to create a service for that purpose. In order to manage the connections with the external API, we strongly recommend the NestJS HTTP module

+
+
+
+
+
+

Guides

+
+ +
+

Key Principles

+
+

devon4node is built following some basic principles like:

+
+
+ +
+
+

But key principles that best define devon4node (and are inherited from NestJS) are:

+
+
+
    +
  • +

    Simplicity (aka KISS)

    +
  • +
  • +

    Reusability

    +
  • +
  • +

    Productivity

    +
  • +
+
+
+
+

Simplicity

+
+

In devon4node we tried to do everything as simple as possible. Following this principle we will be able to do easy to maintain applications.

+
+
+

For example, in order to expose all CRUD operations for an entity, you only need to create a controller like:

+
+
+
+
@Crud({
+  model: {
+    type: Employee,
+  },
+})
+@CrudType(Employee)
+@Controller('employee/employees')
+export class EmployeeCrudController {
+  constructor(public service: EmployeeCrudService) {}
+}
+
+
+
+

You can find this code in the employee example. Only with this code your exposing the full CRUD operations for the employee entity. As you can see, it’s an empty class with some decorators and the EmployeeCrudService injected as dependency. Simple, isn’t it? The EmployeeCrudService is also simple:

+
+
+
+
@Injectable()
+export class EmployeeCrudService extends TypeOrmCrudService<Employee> {
+  constructor(@InjectRepository(Employee) repo: Repository<Employee>) {
+    super(repo);
+  }
+}
+
+
+
+

Another empty class which extends from TypeOrmCrudService<Employee> and injects the Employee Repository as dependency. Nothing else.

+
+
+

With these examples you can get an idea of how simple it can be to code a devon4node application .

+
+
+
+

Reusability

+
+

NestJS (and devon4node) applications are designed in a modular way. This allows you to isolate some functionality in a module, and then reuse it in every application that you need. This is the same behaviour that Angular has. You can see it in the NestJS modules like TypeORM, Swagger and others. Also, in devon4node we have the Mailer module.

+
+
+

In your applications, you only need to import those modules and then you will be able to use the functionality that they implement. Example

+
+
+
+
@Module({
+  imports: [ AuthModule, ConfigurationModule ],
+})
+export class SomeModule {}
+
+
+
+
+

Productivity

+
+

devon4node is designed to create secure enterprise applications. But also, it allow you to do it in a fast way. To increase the productivity devon4node, devon4node provide schematics in order to generate some boilerplate code.

+
+
+

For example, to create a module you need to create a new file for a module (or copy it) and write the code, then you need to import it in the AppModule. This is a easy example, but you can introduce some errors: forget to import it in the AppModule, introduce errors with the copy/paste and so on. By using the command nest g module --name <module-name> it will do everything for you. Just a simple command. In this specific case probably you do not see any advantage, but there are other complex cases where you can generate more complex code with nest and devon4node schematics command.

+
+
+

See code generation in order to know how to increase your productivity creating devon4node applications.

+
+ +
+
+

Code Generation

+
+

As we mention in the page key principles, one of our key principles is Productivity. In order to provide that productivity, we have some tools to generate code. These tools will help you generate the common parts of the application so that you can focus only on the specific functionality.

+
+
+

Those tools are:

+
+ +
+
+

Nest CLI and Devon4node schematics

+
+

We are going to use the Nest CLI to generate code of our application, you can know more about NodeJs CLI in the official documentation.

+
+
+
+

Install devon4node schematics

+
+

First of all, you need to install Nest CLI

+
+
+

Execute the command yarn global add @nestjs/cli. +You can also use npm: npm install -g @nestjs/cli

+
+
+

And then Devon4node schematics globally with the following command:

+
+
+

yarn global add @devon4node/schematics or npm install -g @devon4node/schematics

+
+
+
+

==

+
+

If you get an error trying execute any devon4node schematic related to collection not found, try to reinstall devon4node/schematics on the project folder or be sure that schematics folder is inside @devon4node in node_modules. +yarn add @devon4node/schematics +== ==

+
+
+
+

Generate new devon4node application

+
+

To start creating a devon4node application, execute the command:

+
+
+

nest g -c @devon4node/schematics application [application-name]

+
+
+

If you do not put a name, the command line will ask you for one.

+
+
+
+

Generate code for TypeORM

+
+

Initialize TypeORM into your current project in a correct way.

+
+
+

nest g -c @devon4node/schematics typeorm

+
+
+

Then, you will be asked about which DB you want to use.

+
+
+

typeorm schematic

+
+
+
+

Generate CRUD

+
+

Generate CRUD methods for a entity. Requires TypeORM installed in the project.

+
+
+

It will add the @nestjsx/crud module as a project dependency. Then, generates an entity, a CRUD controller and a CRUD service. It also register the entity, controller and service in the module.

+
+
+

Execute nest g -c @devon4node/schematics crud and then you will need to write a name for the crud.

+
+
+
+crud schematic +
+
+
+
+

Generate TypeORM entity

+
+

Add a TypeORM entity to your project. Requires TypeORM installed in the project.

+
+
+

Execute nest g -c @devon4node/schematics entity and you will be asked for an entity name.

+
+
+
+

Add config-module

+
+

Add the config module to the project.

+
+
+

It will add the @devon4node/common module as a project dependency. Then, it will generate the configuration module into your project and add it in the core module. Also, it generates the config files for the most common environments.

+
+
+

The command to execute will be nest g -c @devon4node/schematics config-module

+
+
+
+

Add mailer module

+
+

Add @devon4node/mailer module to project.

+
+
+

It will add the @devon4node/mailer module as a project dependency. Also, it will add it to the core module and it will generate some email template examples.

+
+
+

Write the command nest g -c @devon4node/schematics mailer

+
+
+
+

Add swagger module

+
+

Add swagger module to project.

+
+
+

It will add the @nestjs/swagger module as a project dependency. Also, it will update the main.ts file in order to expose the endpoint for swagger. The default endpoint is: /v1/api

+
+
+

Execute the command nest g -c @devon4node/schematics swagger

+
+
+
+

Add auth-jwt module

+
+

Add the auth JWT module to the project.

+
+
+

It will add to your project the auth-jwt and user module. Also, it will import those modules into the core module.

+
+
+

Execute nest g -c @devon4node/schematics auth-jwt

+
+
+
+

Add security

+
+

Add cors and helmet to your project.

+
+
+

It will add helmet package as project dependency and update the main.ts file in order to enable the cors and helmet in your application.

+
+
+

Execute nest g -c @devon4node/schematics security

+
+
+
+

Generate database migrations

+
+
    +
  1. +

    Generate database migrations

    +
    +
      +
    1. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node or npm i -g ts-node

      +
    2. +
    3. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +insert data +
      +
      +
      +

      It will connect to the database, read all entities and then it will generate a migration file with all sql queries need to transform the current status of the database to the status defined by the entities. If the database is empty, it will generate all sql queries need to create all tables defined in the entities. You can find a example in the todo example

      +
      +
    4. +
    +
    +
  2. +
+
+
+

As TypeORM is the tool used for DB. You can check official documentation for more information. +See TypeORM CLI documentation.

+
+
+
+

CobiGen

+
+

Currently, we do not have templates to generate devon4node code (we have planned to do that in the future). Instead, we have templates that read the code of a devon4node application and generate a devon4ng application. Visit the CobiGen page for more information.

+
+ +
+
+

Coding Conventions

+
+

devon4node defines some coding conventions in order to improve the readability, reduce the merge conflicts and be able to develop applications in an industrialized way.

+
+
+

In order to ensure that you are following the devon4node coding conventions, you can use the following tools:

+
+
+
    +
  • +

    ESLint: ESLint ESLint is a tool for identifying and reporting on patterns found in ECMAScript/JavaScript code, with the goal of making code more consistent and avoiding bugs. We recommend to use the ESLint VSCode extension (included in the devonfw Platform Extension Pack) in order to be able to see the linting errors while you are developing.

    +
  • +
  • +

    Prettier: Prettier is a code formatter. We recommend to use the Prettier VSCode extension (included in the devonfw Platform Extension Pack) and enable the editor.formatOnSave option.

    +
  • +
  • +

    devon4node application schematic: this tool will generate code following the devon4node coding conventions. Also, when you generate a new project using the devon4node application schematic, it generates the configuration files for TSLint and Prettier that satisfy the devon4node coding conventions.

    +
  • +
+
+
+

When you combine all tools, you can be sure that you follow the devon4node coding conventions.

+
+
+
+

Detailed devon4node Coding Conventions

+
+

Here we will detail some of most important devon4node coding conventions. To be sure that you follows all devon4node coding conventions use the tools described before.

+
+
+
+

Indentation

+
+

All devon4node code files must be indented using spaces. The indentation with must be 2 spaces.

+
+
+
+

White space

+
+

In order to improve the readability of your code, you must introduce whitespaces. Example:

+
+
+
+
if(condition){
+
+
+
+

must be

+
+
+
+
if (condition) {
+
+
+
+
+

Naming conventions

+ +
+
+

== File naming

+
+

The file name must follow the pattern: (name in kebab case).(kind of component).(extension) +The test file name must follow the pattern: (name in kebab case).(kind of component).spec.(extension)

+
+
+

Example:

+
+
+
+
auth-jwt.service.ts
+auth-jwt.service.spec.ts
+
+
+
+
+

== Interface naming

+
+

The interface names must be in pascal case, and must start with I. There is some controversy in starting the interface names with an I, but we decided to do it because is most of cases you will have an interface and a class with the same name, so, to differentiate them, we decided to start the interfaces with I. Other devonfw stacks solves it by adding the suffix Impl in the class implementations.

+
+
+

Example:

+
+
+
+
interface ICoffee {}
+
+
+
+
+

== Class naming

+
+

The class names must be in pascal case.

+
+
+

Example:

+
+
+
+
class Coffee {}
+
+
+
+
+

== Variable naming

+
+

All variable names must be in camel case.

+
+
+
+
const coffeeList: Coffe[];
+
+
+
+
+

Declarations

+
+

For all variable declarations we must use const or let. var is forbidden. We prefer to use const when possible.

+
+
+
+

Programming practices

+ +
+
+

== Trailing comma

+
+

All statements must end with a trailing comma. Example:

+
+
+
+
{
+  one: 'one',
+  two: 'two'  // bad
+}
+{
+  one: 'one',
+  two: 'two', // good
+}
+
+
+
+
+

== Arrow functions

+
+

All anonymous functions must be defined with the arrow function notation. In most of cases it’s not a problem, but sometimes, when you do not want to bind this when you define the function, you can use the other function definition. In this special cases you must disable the linter for those sentence.

+
+
+
+

== Comments

+
+

Comments must start with a whitespace. Example:

+
+
+
+
//This is a bad comment
+// This is OK
+
+
+
+
+

== Quotemarks

+
+

For string definitions, we must use single quotes.

+
+
+
+

== if statements

+
+

In all if statements you always must use brackets. Example:

+
+
+
+
// Bad if statement
+if (condition)
+  return true;
+
+// Good if statement
+if (condition) {
+  return true;
+}
+
+
+
+
+

Pre-commit hooks

+
+

In order to ensure that your new code follows the coding conventions, devon4node uses by default husky. Husky is a tool that allows you to configure git hooks easily in your project. When you make a git commit in your devon4node project, it will execute two actions:

+
+
+
    +
  • +

    Prettify the staged files

    +
  • +
  • +

    Execute the linter in the staged files

    +
  • +
+
+
+

If any action fails, you won’t be able to commit your new changes.

+
+
+ + + + + +
+ + +If you want to skip the git hooks, you can do a commit passing the --no-verify flag. +
+
+ +
+
+

Dependency Injection

+
+

The dependency injection is a well-known common design pattern applied by frameworks in all languages, like Spring in Java, Angular and others. The intention of this page is not to explain how dependency injection works, but instead how it is addressed by NestJS.

+
+
+

NestJS resolve the dependency injection in their modules. When you define a provider in a module, it can be injected in all components of the module. By default, those providers are only available in the module where it is defined. The only way to export a module provider to other modules which import it is adding those provider to the export array. You can also reexport modules.

+
+
+
+

Inject dependencies in NestJS

+
+

In order to inject a dependency in a NestJS component, you need to declare it in the component constructor. Example:

+
+
+
+
export class CoffeeController {
+  constructor(public readonly conffeeService: CoffeeService) {}
+}
+
+
+
+

NestJS can resolve all dependencies that are defined in the module as provider, and also the dependencies exported by the modules imported. Example:

+
+
+
+
@Module({
+  controllers: [CoffeeController],
+  providers: [CoffeeService],
+})
+export class CoffeeModule {}
+
+
+
+

Inject dependencies in the constructor is the is the preferred choice, but, sometimes it is not possible. For example, when you are extending another class and want to keep the constructor definition. In this specific cases we can inject dependencies in the class properties. Example:

+
+
+
+
export class CoffeeController {
+  @Inject(CoffeeService)
+  private readonly conffeeService: CoffeeService;
+}
+
+
+
+
+

Dependency Graph

+
+
+dependency injection1 +
+
+
+

In the previous image, the Module A can inject dependencies exported by Module B, Module E and Module F. If module B reexport Module C and Module D, they are also accessible by Module A.

+
+
+

If there is a conflict with the injection token, it resolves the provider with less distance with the module. For example: if the modules C and F exports a UserService provider, the Module A will resolve the UserService exported by the Module F, because the distance from Module A to Module F is 1, and the distance from Module A to Module C is 2.

+
+
+

When you define a module as global, the dependency injection system is the same. The only difference is now all modules as a link to the global module. For example, if we make the Module C as global the dependency graph will be:

+
+
+
+dependency injection2 +
+
+
+
+

Custom providers

+
+

When you want to change the provider name, you can use a NestJS feature called custom providers. For example, if you want to define a provider called MockUserService with the provider token UserService you can define it like:

+
+
+
+
@Module({
+  providers: [{
+    provide: UserService,
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

With this, when you inject want to inject UserService as dependency, the MockUserService will be injected.

+
+
+

Custom provider token can be also a string:

+
+
+
+
@Module({
+  providers: [{
+    provide: 'USER_SERVICE',
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

but now, when you want to inject it as dependency you need to use the @Inject decorator.

+
+
+
+
constructor(@Inject('USER_SERVICE') userService: any) {}
+
+
+ +
+
+

Configuration Module

+
+

devon4node provides a way to generate a configuration module inside your application. To generate it you only need to execute the command nest g -c @devon4node/schematics config-module. This command will generate inside your application:

+
+
+
    +
  • +

    Configuration module inside the core module.

    +
  • +
  • +

    config folder where all environment configuration are stored.

    +
    +
      +
    • +

      default configuration: configuration for your local development environment.

      +
    • +
    • +

      develop environment configuration for the develop environment.

      +
    • +
    • +

      uat environment configuration for the uat environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      test environment configuration used by test.

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +some code generators will add some properties to this module, so, be sure that the config module is the first module that you generate in your application. +
+
+
+
+

Use the configuration service

+
+

To use the configuration service, you only need to inject it as dependency. As configuration module is defined in the core module, it will be available everywhere in your application. Example:

+
+
+
+
export class MyProvider {
+  constructor(public readonly configService: ConfigurationService) {}
+
+  myMethod() {
+    return this.confiService.isDev;
+  }
+}
+
+
+
+
+

Choose an environment file

+
+

By default, when you use the configuration service it will take the properties defined in the default.ts file. If you want to change the configuration file, you only need to set the NODE_ENV environment property with the name of the desired environment. Examples: in windows execute set NODE_ENV=develop before executing the application, in linux execute NODE_ENV=develop before executing the application or NODE_ENV=develop yarn start.

+
+
+
+

Override configuration properties

+
+

Sometimes, you want to keep some configuration property secure, and you do not want to publish it to the repository, or you want to reuse some configuration file but you need to change some properties. For those scenarios, you can override configuration properties by defining a environment variable with the same name. For example, if you want to override the property host, you can do: set host="newhost". It also works with objects. For example, if you want to change the value of secret in the property jwtConfig for this example, you can set a environment variable like this: set jwtConfig="{"secret": "newsecret"}". As you can see, this environment variable has a JSON value. It will take object and merge the jwtConfig property with the properties defined inside the environment variable. It other properties maintain their value. The behaviour is the same for the nested objects.

+
+
+
+

Add a configuration property

+
+

In order to add a new property to the configuration module, you need to follow some steps:

+
+
+
    +
  • +

    Add the property to IConfig interface in src/app/core/configuration/types.ts file. With this, we can ensure that the ConfigurationService and the environment files has those property at compiling time.

    +
  • +
  • +

    Add the new property getter to ConfigurationService. You must use the get method of ConfigurationService to ensure that the property will be loaded from the desired config file. You can also add extra logic if needed.

    +
  • +
  • +

    Add the property to all config files inside the src/config folder.

    +
  • +
+
+
+

Example:

+
+
+

We want to add the property devonfwUrl to our ConfigurationService, so:

+
+
+

We add the following code in IConfig interface:

+
+
+
+
devonfwUrl: string;
+
+
+
+

Then, we add the getter in the ConfigurationService:

+
+
+
+
get devonfwUrl(): string {
+  return this.get('devonfwUrl')!;
+}
+
+
+
+

Finally, we add the definition in all config files:

+
+
+
+
devonfwUrl: 'https://devonfw.com',
+
+
+ +
+
+

Auth JWT module

+
+

devon4node provides a way to generate a default authentication module using JWT (JSON Web Token). It uses the @nestjs/passport library describe here.

+
+
+

To generate the devon4node auth-jwt module you only need to execute the command: nest generate -c @devon4node/schematics auth-jwt. We generate this module inside the applications instead of distributing a npm package because this module is prone to be modified depending on the requirements. It also generate a basic user module.

+
+
+

In this page we will explain the default implementation provided by devon4node. For more information about authentication, JWT, passport and other you can see:

+
+
+ +
+
+
+

Auth JWT endpoints

+
+

In order to execute authentication operations, the auth-jwt module exposes the following endpoints:

+
+
+
    +
  • +

    POST /auth/login: receive an username and a password and return the token in the header if the combination of username and password is correct.

    +
  • +
  • +

    POST /auth/register: register a new user.

    +
  • +
  • +

    GET /auth/currentuser: return the user data if he is authenticated.

    +
  • +
+
+
+
+

Protect endpoints with auth-jwt

+
+

In order to protect your endpoints with auth-jwt module you only need to add the AuthGuard() in the UseGuards decorator. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+
+

Now, all request to currentuser are protected by the AuthGuard.

+
+
+
+

Role based Access Control

+
+

The auth-jwt module provides also a way to control the access to some endpoints by using roles. For example, if you want to grant access to a endpoint only to admins, you only need to add the Roles decorator to those endpoints with the roles allowed. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+@Roles(roles.ADMIN)
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+ +
+
+

Swagger

+
+

We can use swagger (OpenAPI) in order to describe the endpoints that our application exposes.

+
+
+

NestJS provides a module which will read the code of our application and will expose one endpoint where we can see the swagger.

+
+
+

Add swagger to a devon4node application is simple, you only need to execute the command nest g -c @devon4node/schematics swagger and it will do everything for you. The next time that you start your application, you will be able to see the swagger at /v1/api endpoint.

+
+
+

The swagger module can read your code in order to create the swagger definition, but sometimes you need to help him by decorating your handlers.

+
+
+

For more information about decorators and other behaviour about swagger module, you can see the NestJS swagger documentation page

+
+
+ + + + + +
+ + +the OpenAPI specification that this module supports is v2.0. The OpenAPI v3.0 is not available yet by using this module. +
+
+ +
+
+

TypeORM

+
+

TypeORM is the default ORM provided by devon4node. It supports MySQL, MariaDB, Postgres, CockroachDB, SQLite, Microsoft SQL Server, Oracle, sql.js relational database and also supports MongoDB NoSQL database.

+
+
+

Add TypeORM support to a devon4node application is very easy: you only need to execute the command nest g -c @devon4node/schematics typeorm and it will add all required dependencies to the project and also imports the @nestjs/typeorm module.

+
+
+

For more information about TypeORM and the integration with NestJS you can visit TypeORM webpage, TypeORM GitHub repository and NestJS TypeORM documentation page

+
+
+
+

Configuration

+
+

When you have the configuration module, the TypeORM generator will add one property in order to be able to configure the database depending on the environment. Example:

+
+
+
+
database: {
+  type: 'sqlite',
+  database: ':memory:',
+  synchronize: false,
+  migrationsRun: true,
+  logging: true,
+  entities: ['dist/**/*.entity.js'],
+  migrations: ['dist/migration/**/*.js'],
+  subscribers: ['dist/subscriber/**/*.js'],
+  cli: {
+    entitiesDir: 'src/entity',
+    migrationsDir: 'src/migration',
+    subscribersDir: 'src/subscriber',
+  },
+},
+
+
+
+

This object is a TypeORM ConnectionOptions. For fore information about it visit the TypeORM Connection Options page.

+
+
+

There is also a special case: the default configuration. As the devon4node CLI need the database configuration when you use the devon4node db command, we also provide the ormconfig.json file. In this file you must put the configuration for you local environment. In order to do not have duplicated the configuration for local environment, in the default config file the database property is set-up like:

+
+
+
+
database: require('../../ormconfig.json'),
+
+
+
+

So, you only need to maintain the ormconfig.json file for the local environment.

+
+
+
+

Entity

+
+

Entity is a class that maps to a database table. The devon4node schematics has a generator to create new entities. You only need to execute the command nest g -c @devon4node/schematics entity <entity-name> and it generate the entity.

+
+
+

In the entity, you must define all columns, relations, primary keys of your database table. By default, devon4node provides a class named BaseEntity. All entities created with the devon4node schematics will extends the BaseEntity. This entity provides you some common columns:

+
+
+
    +
  • +

    id: the primary key of you table

    +
  • +
  • +

    version: the version of the entry (used for auditing purposes)

    +
  • +
  • +

    createdAt: creation date of the entry (used for auditing purposes)

    +
  • +
  • +

    updatedAt: last update date of the entry (used for auditing purposes)

    +
  • +
+
+
+

For more information about Entities, please visit the TypeORM entities page

+
+
+
+

Repository

+
+

With repositories, you can manage (insert, update, delete, load, etc.) a concrete entity. Using this pattern, we have separated the data (Entities) from the methods to manage it (Repositories).

+
+
+

To use a repository you only need to:

+
+
+
    +
  • +

    Import it in the module as follows:

    +
    +
    +
    @Module({
    +  imports: [TypeOrmModule.forFeature([Employee])],
    +})
    +
    +
    +
    + + + + + +
    + + +if you generate the entities with the devon4node schematic, this step is not necessary, devon4node schematic will do it for you. +
    +
    +
  • +
  • +

    Inject the repository as dependency in your service:

    +
    +
    +
    constructor(@InjectRepository(Employee) employeeRepository: Repository<Employee>) {}
    +
    +
    +
  • +
+
+
+

You can see more details in the NestJS database and NestJS TypeORM documentation pages.

+
+ +
+
+

Serializer

+
+

Serialization is the process of translating data structures or object state into a format that can be transmitted across network and reconstructed later.

+
+
+

NestJS by default serialize all data to JSON (JSON.stringify). Sometimes this is not enough. In some situations you need to exclude some property (e.g password). Instead doing it manually, devon4node provides an interceptor (ClassSerializerInterceptor) that will do it for you. You only need to return a class instance as always and the interceptor will transform those class to the expected data.

+
+
+

The ClassSerializerInterceptor takes the class-transformer decorators in order to know how to transform the class and then send the result to the client.

+
+
+

Some of class-transformer decorators are:

+
+
+
    +
  • +

    Expose

    +
  • +
  • +

    Exclude

    +
  • +
  • +

    Type

    +
  • +
  • +

    Transform

    +
  • +
+
+
+

And methods to transform data:

+
+
+
    +
  • +

    plainToClass

    +
  • +
  • +

    plainToClassFromExist

    +
  • +
  • +

    classToPlain

    +
  • +
  • +

    classToClass

    +
  • +
  • +

    serialize

    +
  • +
  • +

    deserialize

    +
  • +
  • +

    deserializeArray

    +
  • +
+
+
+

See the class-transformer page for more information.

+
+
+

See NestJS serialization page for more information about ClassSerializerInterceptor.

+
+ +
+
+

Validation

+
+

To be sure that your application will works well, you must validate any input data. devon4node by default provides a ValidationPipe. This ValidationPipe is the responsible of validate the request input and, if the input do not pass the validation process, it returns a 400 Bad Request error.

+
+
+
+

Defining Validators

+
+

The ValidationPipe needs to know how to validate the input. For that purpose we use the class-validator package. This package allows you to define the validation of a class by using decorators.

+
+
+

For example:

+
+
+
+
export class Coffee {
+  @IsDefined()
+  @IsString()
+  @MaxLength(255)
+  name: string;
+
+  @IsDefined()
+  @IsString()
+  @MaxLength(25)
+  type: string;
+
+  @IsDefined()
+  @IsNumber()
+  quantity: number;
+}
+
+
+
+

As you can see in the previous example, we used some decorators in order to define the validators for every property of the Coffee class. You can find all decorators in the class-validator github repository.

+
+
+

Now, when you want to receive a Coffee as input in some endpoint, it will execute the validations before executing the handler function.

+
+
+ + + + + +
+ + +In order to be able to use the class-validator package, you must use classes instead of interfaces. As you know interfaces disappear at compiling time, and class-validator need to know the metadata of the properties in order to be able to validate. +
+
+
+ + + + + +
+ + +The ValidationPipe only works if you put a specific type in the handler definition. For example, if you define a handler like getCoffee(@Body() coffee: any): Coffee {} the ValidationPipe will not do anything. You must specify the type of the input: getCoffee(@Body() coffee: Coffee): Coffee {} +
+
+ +
+
+

Logger

+
+

When you create a new devon4node application, it already has a logger: src/app/shared/logger/winston.logger.ts. This logger provide the methods log, error and warn. All of those methods will write a log message, but with a different log level.

+
+
+

The winston logger has two transports: one to log everything inside the file logs/general.log and the other to log only the error logs inside the file logs/error.log. In addition, it uses the default NestJS logger in order to show the logs in the console.

+
+
+

As you can see it is a simple example about how to use logger in a devon4node application. It will be update to a complex one in the next versions.

+
+
+
+

How to use logger

+
+

In order to use the logger you only need to inject the logger as a dependency:

+
+
+
+
constructor(logger: WinstonLogger){}
+
+
+
+

and then use it

+
+
+
+
async getAll() {
+  this.service.getAll();
+  this.logger.log('Returning all data');
+}
+
+
+ +
+
+

Mailer Module

+
+

This module enables you to send emails in devon4node. It also provides a template engine using Handlebars.

+
+
+

It is a NestJS module that inject into your application a MailerService, which is the responsible to send the emails using the nodemailer library.

+
+
+
+

Installing

+
+

Execute the following command in a devon4node project:

+
+
+
+
yarn add @devon4node/mailer
+
+
+
+
+

Configuring

+
+

To configure the mailer module, you only need to import it in your application into another module. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot(),
+  ],
+  ...
+})
+
+
+
+

Your must pass the configuration using the forRoot or forRootAsync methods.

+
+
+
+

forRoot()

+
+

The forRoot method receives an MailerModuleOptions object as parameter. It configures the MailerModule using the input MailerModuleOptions object.

+
+
+

The structure of MailerModuleOptions is:

+
+
+
+
{
+  hbsOptions?: {
+    templatesDir: string;
+    extension?: string;
+    partialsDir?: string;
+    helpers?: IHelperFunction[];
+    compilerOptions?: ICompileOptions;
+  },
+  mailOptions?: nodemailerSmtpTransportOptions;
+  emailFrom: string;
+}
+
+
+
+

Here, you need to specify the Handlebars compile options, the nodemailer transport options and the email address which will send the emails. +Then, you need to call to forRoot function in the module imports. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot({
+      mailOptions: {
+        host: 'localhost',
+        port: 1025,
+        secure: false,
+        tls: {
+          rejectUnauthorized: false,
+        },
+      },
+      emailFrom: 'noreply@capgemini.com',
+      hbsOptions: {
+        templatesDir: join(__dirname, '../..', 'templates/views'),
+        partialsDir: join(__dirname, '../..', 'templates/partials'),
+        helpers: [{
+          name: 'fullname',
+          func: person => `${person.name} ${person.surname}`,s
+        }],
+      },
+    }),
+  ...
+})
+
+
+
+
+

forRootAsync()

+
+

The method forRootAsync enables you to get the mailer configuration in a asynchronous way. It is useful when you need to get the configuration using, for example, a service (e.g. ConfigurationService).

+
+
+

Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRootAsync({
+      imports: [ConfigurationModule],
+      useFactory: (config: ConfigurationService) => {
+        return config.mailerConfig;
+      },
+      inject: [ConfigurationService],
+    }),
+  ...
+})
+
+
+
+

In this example, we use the ConfigurationService in order to get the MailerModuleOptions (the same as forRoot)

+
+
+
+

Usage

+
+

In order to use, you only need to inject using the dependency injection the MailerService.

+
+
+

Example:

+
+
+
+
@Injectable()
+export class CatsService {
+  constructor(private readonly mailer: MailerService) {}
+}
+
+
+
+

Then, you only need to use the methods provided by the MailerService in your service. Take into account that you can inject it in every place that support NestJS dependency injection.

+
+
+
+

MailerService methods

+ +
+
+

== sendPlainMail

+
+

The method sendPlainMail receive a string sends a email.

+
+
+

The method signatures are:

+
+
+
+
sendPlainMail(emailOptions: SendMailOptions): Promise<SentMessageInfo>;
+sendPlainMail(to: string, subject: string, mail: string): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendPlainMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+});
+this.mailer.sendPlainMail('example@example.com', 'This is a subject', '<h1>Hello world</h1>');
+
+
+
+
+

== sendTemplateMail

+
+

The method sendTemplateMail sends a email based on a Handlebars template. The templates are registered using the templatesDir option or using the addTemplate method. +The template name is the name of the template (without extension) or the first parameter of the method addTemplate.

+
+
+

The method signatures are:

+
+
+
+
sendTemplateMail(emailOptions: SendMailOptions, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+sendTemplateMail(to: string, subject: string, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendTemplateMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+}, 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+this.mailer.sendTemplateMail('example@example.com', 'This is a subject', 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+
+
+
+
+

== addTemplate

+
+

Adds a new template to the MailerService.

+
+
+

Method signature:

+
+
+
+
addTemplate(name: string, template: string, options?: CompileOptions): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.addTemplate('newTemplate', '<html><head></head><body>{{>partial1}}</body></html>')
+
+
+
+
+

== registerPartial

+
+

Register a new partial in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerPartial(name: string, partial: Handlebars.Template<any>): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerPartial('partial', '<h1>Hello World</h1>')
+
+
+
+
+

== registerHelper

+
+

Register a new helper in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerHelper(name: string, helper: Handlebars.HelperDelegate): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerHelper('fullname', person => `${person.name} ${person.surname}`)
+
+
+
+
+

Handlebars templates

+
+

As mentioned above, this module allow you to use Handlebars as template engine, but it is optional. If you do not need the Handlebars, you just need to keep the hbsOptions undefined.

+
+
+

In order to get the templates form the file system, you can specify the template folder, the partials folder and the helpers. +At the moment of module initialization, it will read the content of the template folder, and will register every file with the name (without extension) and the content as Handlebars template. It will do the same for the partials.

+
+
+

You can specify the extension of template files using the extension parameter. The default value is .handlebars

+
+
+
+

Local development

+
+

If you want to work with this module but you don’t have a SMTP server, you can use the streamTransport. Example:

+
+
+
+
{
+  mailOptions: {
+    streamTransport: true,
+    newline: 'windows',
+  },
+  emailFrom: ...
+  hbsOptions: ...
+}
+
+
+
+

Then, you need to get the sendPlainMail or sendTemplateMail result, and print the email to the standard output (STDOUT). Example:

+
+
+
+
const mail = await this.mailer.sendTemplateMail(...);
+
+mail.message.pipe(process.stdout);
+
+
+ +
+
+

Importing your ESLint reports into SonarQube

+
+

This guide covers the import of ESLint reports into SonarQube instances in CI environments, as this is the recommended way of using ESLint and SonarQube for devon4node projects. The prerequisites for this process are a CI environment, preferably a Production Line instance, and the ESLint CLI, which is already included when generating a new devon4node project.

+
+
+
+

Configuring the ESLint analysis

+
+

You can configure the ESLint analysis parameters in the .eslintrc.js file inside the top-level directory of your project. If you created your node project using the devon4node application schematic, this file will already exist. If you want to make further adjustments to it, have a look at the ESLint documentation.

+
+
+

The ESLint analysis script lint is already configured in the scripts part of your package.json. Simply add -f json > report.json, so that the output of the analysis is saved in a .json file. Additional information to customization options for the ESLint CLI can be found here.

+
+
+

To run the analysis, execute the script with npm run lint inside the base directory of your project.

+
+
+
+

Configuring SonarQube

+
+

If you haven’t already generated your CICD-related files, follow the tutorial on the devon4node schematic of our CICDGEN project, as you will need a Jenkinsfile configured in your project to proceed.

+
+
+

Inside the script for the SonarQube code analysis in your Jenkinsfile, add the parameter -Dsonar.eslint.reportPaths=report.json. Now, whenever a SonarQube analysis is triggered by your CI environment, the generated report will be loaded into your SonarQube instance. +To avoid duplicated issues, you can associate an empty TypeScript quality profile with your project in its server configurations.

+
+
+
+
+
+

devon4node applications

+
+ +
+

devon4node Samples

+
+

In the folder /samples, you can find some devon4node examples that could be useful for you in order to understand better the framework.

+
+
+

The samples are:

+
+
+ +
+
+

Also, we have another realistic example in the My Thai Star repository. This example is the implementation of My Thai Star backend, which is compatible with the frontend made with Angular. To do that, this node implementation exposes the same API as Java backend. Take care with this example, as we need to follow the Java API, some components do not follow the devon4node patterns and code conventions.

+
+
+
+

Todo example

+
+

This example is the backend part of an TO-DO application. It exposes and API where you can create, read, update and delete a TO-DO list.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:3000/v1/todo/todos. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:3000/v1/api.

+
+
+

Also, in this example we show you how to control the access to you application by implementing an authentication mechanism using JWT and rol based strategy. In order to access to the list of todos (http://localhost:3000/v1/todo/todos), first you need to call to POST http://localhost:3000/v1/auth/login and in the body you need to send the user information:

+
+
+
+
{
+  "username": "user",
+  "password": "password"
+}
+
+
+
+

It will return a JWT token for the user user. The rol of this user is USER, so you can only access to the methods GET, POST and DELETE of the endpoint http://localhost:3000/v1/todo/todos. If you login with the user admin/admin, you will be able to access to the methods UPDATE and PATCH.

+
+
+
+

Employee example

+
+

This is an example of employee management application. With the application you can create, read, update and delete employees.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:8081/v1/employee/employees. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:8081/v1/api.

+
+
+

This is a simple example without authentication. With this example you can learn how to work with database migrations. You can find them in the folder /src/migrations. The TypeORM is configured in order to execute the migrations every time that you start this application at ormconfig.json with the following flag:

+
+
+
+
"migrationsRun": true
+
+
+
+

You can also execute the migration manually by typing the command devon4node db migration:run, or revert executing devon4node db migration:revert. Take into account that the database that this application is using is an in-memory sqlite, so every time that you stop the application all data is lost.

+
+
+
+

Components example

+
+

This example allow you to understand better the execution order of the components of a devon4node application (guards, pipes, interceptors, filters, middleware).

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

In order to see the execution order, you can call to http://localhost:3000/v1. It will show you the execution order of all components except the filters. If you want to know the execution order while a filter is applied, call to the endpoint with the following queries: ?hello=error, ?hello=controller, ?hello=global.

+
+ +
+
+

Create the employee sample step by step

+ +
+
+

Application requisites

+
+

The employee application needs:

+
+
+
    +
  • +

    A configuration module

    +
  • +
  • +

    A SQLite in memory database

    +
  • +
  • +

    Security: CORS

    +
  • +
  • +

    Swagger support

    +
  • +
  • +

    Authentication using JWT

    +
  • +
  • +

    CRUD for manage employees. The employees will have the following properties:

    +
    +
      +
    • +

      name

      +
    • +
    • +

      surname

      +
    • +
    • +

      email

      +
    • +
    +
    +
  • +
+
+
+
+

Create the application

+
+
    +
  1. +

    Install Nest CLI

    +
    +

    Execute the command yarn global add @nestjs/cli

    +
    +
  2. +
  3. +

    Install devon4node schematics

    +
  4. +
  5. +

    Execute the command yarn global add @devon4node/schematics

    +
  6. +
  7. +

    Create the new application

    +
    +

    Execute the command nest g -c @devon4node/schematics application employee

    +
    +
  8. +
  9. +

    Then, we need to add some components, go inside the project folder and execute the following commands:

    +
    +

    Go inside project folder: cd employee.

    +
    +
    +

    Config module: nest g -c @devon4node/schematics config-module.

    +
    +
    +

    TypeORM database, choose sqlite DB when asked nest g -c @devon4node/schematics typeorm.

    +
    +
    +

    Add security: nest g -c @devon4node/schematics security.

    +
    +
    +

    Swagger module: nest g -c @devon4node/schematics swagger.

    +
    +
    +

    Auth-jwt authentication: nest g -c @devon4node/schematics auth-jwt.

    +
    +
    +

    Add an application module: nest g -c @devon4node/schematics module employee.

    +
    +
    +

    Add CRUD component: nest g -c @devon4node/schematics crud employee/employee.

    +
    +
    +

    With this, you will generate the following files:

    +
    +
    +
    +
    /employee/.prettierrc
    +/employee/nest-cli.json
    +/employee/package.json
    +/employee/README.md
    +/employee/tsconfig.build.json
    +/employee/tsconfig.json
    +/employee/tslint.json
    +/employee/src/main.ts
    +/employee/test/app.e2e-spec.ts
    +/employee/test/jest-e2e.json
    +/employee/src/app/app.controller.spec.ts
    +/employee/src/app/app.controller.ts
    +/employee/src/app/app.module.ts
    +/employee/src/app/app.service.ts
    +/employee/src/app/core/core.module.ts
    +/employee/src/app/shared/logger/winston.logger.ts
    +/employee/src/app/core/configuration/configuration.module.ts
    +/employee/src/app/core/configuration/model/index.ts
    +/employee/src/app/core/configuration/model/types.ts
    +/employee/src/app/core/configuration/services/configuration.service.spec.ts
    +/employee/src/app/core/configuration/services/configuration.service.ts
    +/employee/src/app/core/configuration/services/index.ts
    +/employee/src/config/default.ts
    +/employee/src/config/develop.ts
    +/employee/src/config/production.ts
    +/employee/src/config/test.ts
    +/employee/src/config/uat.ts
    +/employee/docker-compose.yml
    +/employee/ormconfig.json
    +/employee/src/app/shared/model/entities/base-entity.entity.ts
    +/employee/src/app/core/auth/auth.module.ts
    +/employee/src/app/core/auth/controllers/auth.controller.spec.ts
    +/employee/src/app/core/auth/controllers/auth.controller.ts
    +/employee/src/app/core/auth/controllers/index.ts
    +/employee/src/app/core/auth/decorators/index.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.spec.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.ts
    +/employee/src/app/core/auth/guards/index.ts
    +/employee/src/app/core/auth/guards/roles.guard.spec.ts
    +/employee/src/app/core/auth/guards/roles.guard.ts
    +/employee/src/app/core/auth/model/index.ts
    +/employee/src/app/core/auth/model/roles.enum.ts
    +/employee/src/app/core/auth/model/user-request.interface.ts
    +/employee/src/app/core/auth/services/auth.service.spec.ts
    +/employee/src/app/core/auth/services/auth.service.ts
    +/employee/src/app/core/auth/services/index.ts
    +/employee/src/app/core/auth/strategies/index.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.spec.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.ts
    +/employee/src/app/core/user/user.module.ts
    +/employee/src/app/core/user/model/index.ts
    +/employee/src/app/core/user/model/dto/user-payload.dto.ts
    +/employee/src/app/core/user/model/entities/user.entity.ts
    +/employee/src/app/core/user/services/index.ts
    +/employee/src/app/core/user/services/user.service.spec.ts
    +/employee/src/app/core/user/services/user.service.ts
    +/employee/test/auth/auth.service.mock.ts
    +/employee/test/user/user.repository.mock.ts
    +/employee/src/app/employee/employee.module.ts
    +/employee/src/app/employee/model/entities/employee.entity.ts
    +/employee/src/app/employee/model/index.ts
    +/employee/src/app/employee/controllers/employee.crud.controller.ts
    +/employee/src/app/employee/services/employee.crud.service.ts
    +/employee/src/app/employee/services/index.ts
    +/employee/src/app/employee/controllers/index.ts
    +
    +
    +
  10. +
  11. +

    Open the VSCode

    +
    +

    Execute the commands:

    +
    +
    +
    +
    yarn install
    +code .
    +
    +
    +
  12. +
  13. +

    Fill in the entity: src/app/employee/model/entities/employee.entity.ts

    +
    +
      +
    1. +

      Add the columns

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    2. +
    3. +

      Add the validations

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    4. +
    5. +

      Add the transformations

      +
      +

      In this specific case, we will not transform any property, but you can see an example in the src/app/shared/model/entities/base-entity.entity.ts file.

      +
      +
      +
      +
      export abstract class BaseEntity {
      +  @PrimaryGeneratedColumn('increment')
      +  id!: number;
      +
      +  @VersionColumn({ default: 1 })
      +  @Exclude({ toPlainOnly: true })
      +  version!: number;
      +
      +  @CreateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  createdAt!: string;
      +
      +  @UpdateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  updatedAt!: string;
      +}
      +
      +
      +
    6. +
    7. +

      Add swagger metadata

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    8. +
    +
    +
  14. +
  15. +

    Add swagger metadata to src/app/employee/controllers/employee.crud.controller.ts

    +
    +
    +
    @ApiTags('employee')
    +
    +
    +
  16. +
  17. +

    Generate database migrations

    +
    +
      +
    1. +

      Build the application: yarn build

      +
    2. +
    3. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node

      +
    4. +
    5. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +generate migrations +
      +
      +
      +

      The output will be something similar to:

      +
      +
      +
      +
      export class CreateTables1572480273012 implements MigrationInterface {
      +  name = 'CreateTables1572480273012';
      +
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `CREATE TABLE "user" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "username" varchar(255) NOT NULL, "password" varchar(255) NOT NULL, "role" integer NOT NULL DEFAULT (0))`,
      +      undefined,
      +    );
      +    await queryRunner.query(
      +      `CREATE TABLE "employee" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "name" varchar(255), "surname" varchar(255), "email" varchar(255))`,
      +      undefined,
      +    );
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DROP TABLE "employee"`, undefined);
      +    await queryRunner.query(`DROP TABLE "user"`, undefined);
      +  }
      +}
      +
      +
      +
      +

      The number in the name is a timestamp, so may change in your application.

      +
      +
    6. +
    7. +

      Create a migration to insert data:`yarn run typeorm migration:generate -n InsertData`

      +
      +
      +insert data +
      +
      +
      +

      and fill in with the following code:

      +
      +
      +
      +
      export class InsertData1572480830290 implements MigrationInterface {
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(1, 'Santiago', 'Fowler', 'Santiago.Fowler@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(2, 'Clinton', 'Thornton', 'Clinton.Thornton@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(3, 'Lisa', 'Rodriquez', 'Lisa.Rodriquez@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(4, 'Calvin', 'Becker', 'Calvin.Becker@example.com');`,
      +    );
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      1,
      +      'user',
      +      await hash('password', await genSalt(12)),
      +      roles.USER,
      +    ]);
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      2,
      +      'admin',
      +      await hash('admin', await genSalt(12)),
      +      roles.ADMIN,
      +    ]);
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DELETE FROM EMPLOYEE`);
      +    await queryRunner.query(`DELETE FROM USER`);
      +  }
      +}
      +
      +
      +
    8. +
    +
    +
  18. +
  19. +

    Start the application: yarn start:dev

    +
    +
    +start app +
    +
    +
  20. +
  21. +

    Check the swagger endpoint: http://localhost:3000/v1/api

    +
    +
    +swagger +
    +
    +
  22. +
  23. +

    Make petitions to the employee CRUD: http://localhost:3000/v1/employee/employees

    +
    +
    +employees +
    +
    +
  24. +
  25. +

    Write the tests

    +
    +

    As we do not create any method, only add some properties to the entity, all application must be tested by the autogenerated code. As we add some modules, you need to uncomment some lines in the src/app/core/configuration/services/configuration.service.spec.ts:

    +
    +
    +
    +
    describe('ConfigurationService', () => {
    +  const configService: ConfigurationService = new ConfigurationService();
    +
    +  it('should return the values of test config file', () => {
    +    expect(configService.isDev).toStrictEqual(def.isDev);
    +    expect(configService.host).toStrictEqual(def.host);
    +    expect(configService.port).toStrictEqual(def.port);
    +    expect(configService.clientUrl).toStrictEqual(def.clientUrl);
    +    expect(configService.globalPrefix).toStrictEqual(def.globalPrefix);
    +    // Remove comments if you add those modules
    +    expect(configService.database).toStrictEqual(def.database);
    +    expect(configService.swaggerConfig).toStrictEqual(def.swaggerConfig);
    +    expect(configService.jwtConfig).toStrictEqual(def.jwtConfig);
    +    // expect(configService.mailerConfig).toStrictEqual(def.mailerConfig);
    +  });
    +  it('should take the value of environment varible if defined', () => {
    +    process.env.isDev = 'true';
    +    process.env.host = 'notlocalhost';
    +    process.env.port = '123456';
    +    process.env.clientUrl = 'http://theclienturl.net';
    +    process.env.globalPrefix = 'v2';
    +    process.env.swaggerConfig = JSON.stringify({
    +      swaggerTitle: 'Test Application',
    +    });
    +    process.env.database = JSON.stringify({
    +      type: 'oracle',
    +      cli: { entitiesDir: 'src/notentitiesdir' },
    +    });
    +    process.env.jwtConfig = JSON.stringify({ secret: 'NOTSECRET' });
    +    // process.env.mailerConfig = JSON.stringify({ mailOptions: { host: 'notlocalhost' }});
    +
    +    expect(configService.isDev).toBe(true);
    +    expect(configService.host).toBe('notlocalhost');
    +    expect(configService.port).toBe(123456);
    +    expect(configService.clientUrl).toBe('http://theclienturl.net');
    +    expect(configService.globalPrefix).toBe('v2');
    +    const database: any = { ...def.database, type: 'oracle' };
    +    database.cli.entitiesDir = 'src/notentitiesdir';
    +    expect(configService.database).toStrictEqual(database);
    +    expect(configService.swaggerConfig).toStrictEqual({
    +      ...def.swaggerConfig,
    +      swaggerTitle: 'Test Application',
    +    });
    +    expect(configService.jwtConfig).toStrictEqual({
    +      ...def.jwtConfig,
    +      secret: 'NOTSECRET',
    +    });
    +    // const mail: any = { ...def.mailerConfig };
    +    // mail.mailOptions.host = 'notlocalhost';
    +    // expect(configService.mailerConfig).toStrictEqual(mail);
    +
    +    process.env.isDev = undefined;
    +    process.env.host = undefined;
    +    process.env.port = undefined;
    +    process.env.clientUrl = undefined;
    +    process.env.globalPrefix = undefined;
    +    process.env.database = undefined;
    +    process.env.swaggerConfig = undefined;
    +    process.env.jwtConfig = undefined;
    +    // process.env.mailerConfig = undefined;
    +  });
    +});
    +
    +
    +
    +

    And the output should be:

    +
    +
    +
    +test +
    +
    +
  26. +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/samples-step-by-step.html b/docs/devon4ts/1.0/nest/samples-step-by-step.html new file mode 100644 index 00000000..40aa2cc1 --- /dev/null +++ b/docs/devon4ts/1.0/nest/samples-step-by-step.html @@ -0,0 +1,780 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Create the employee sample step by step

+
+ +
+
+
+

Application requisites

+
+
+

The employee application needs:

+
+
+
    +
  • +

    A configuration module

    +
  • +
  • +

    A SQLite in memory database

    +
  • +
  • +

    Security: CORS

    +
  • +
  • +

    Swagger support

    +
  • +
  • +

    Authentication using JWT

    +
  • +
  • +

    CRUD for manage employees. The employees will have the following properties:

    +
    +
      +
    • +

      name

      +
    • +
    • +

      surname

      +
    • +
    • +

      email

      +
    • +
    +
    +
  • +
+
+
+
+
+

Create the application

+
+
+
    +
  1. +

    Install Nest CLI

    +
    +

    Execute the command yarn global add @nestjs/cli

    +
    +
  2. +
  3. +

    Install devon4node schematics

    +
  4. +
  5. +

    Execute the command yarn global add @devon4node/schematics

    +
  6. +
  7. +

    Create the new application

    +
    +

    Execute the command nest g -c @devon4node/schematics application employee

    +
    +
  8. +
  9. +

    Then, we need to add some components, go inside the project folder and execute the following commands:

    +
    +

    Go inside project folder: cd employee.

    +
    +
    +

    Config module: nest g -c @devon4node/schematics config-module.

    +
    +
    +

    TypeORM database, choose sqlite DB when asked nest g -c @devon4node/schematics typeorm.

    +
    +
    +

    Add security: nest g -c @devon4node/schematics security.

    +
    +
    +

    Swagger module: nest g -c @devon4node/schematics swagger.

    +
    +
    +

    Auth-jwt authentication: nest g -c @devon4node/schematics auth-jwt.

    +
    +
    +

    Add an application module: nest g -c @devon4node/schematics module employee.

    +
    +
    +

    Add CRUD component: nest g -c @devon4node/schematics crud employee/employee.

    +
    +
    +

    With this, you will generate the following files:

    +
    +
    +
    +
    /employee/.prettierrc
    +/employee/nest-cli.json
    +/employee/package.json
    +/employee/README.md
    +/employee/tsconfig.build.json
    +/employee/tsconfig.json
    +/employee/tslint.json
    +/employee/src/main.ts
    +/employee/test/app.e2e-spec.ts
    +/employee/test/jest-e2e.json
    +/employee/src/app/app.controller.spec.ts
    +/employee/src/app/app.controller.ts
    +/employee/src/app/app.module.ts
    +/employee/src/app/app.service.ts
    +/employee/src/app/core/core.module.ts
    +/employee/src/app/shared/logger/winston.logger.ts
    +/employee/src/app/core/configuration/configuration.module.ts
    +/employee/src/app/core/configuration/model/index.ts
    +/employee/src/app/core/configuration/model/types.ts
    +/employee/src/app/core/configuration/services/configuration.service.spec.ts
    +/employee/src/app/core/configuration/services/configuration.service.ts
    +/employee/src/app/core/configuration/services/index.ts
    +/employee/src/config/default.ts
    +/employee/src/config/develop.ts
    +/employee/src/config/production.ts
    +/employee/src/config/test.ts
    +/employee/src/config/uat.ts
    +/employee/docker-compose.yml
    +/employee/ormconfig.json
    +/employee/src/app/shared/model/entities/base-entity.entity.ts
    +/employee/src/app/core/auth/auth.module.ts
    +/employee/src/app/core/auth/controllers/auth.controller.spec.ts
    +/employee/src/app/core/auth/controllers/auth.controller.ts
    +/employee/src/app/core/auth/controllers/index.ts
    +/employee/src/app/core/auth/decorators/index.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.spec.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.ts
    +/employee/src/app/core/auth/guards/index.ts
    +/employee/src/app/core/auth/guards/roles.guard.spec.ts
    +/employee/src/app/core/auth/guards/roles.guard.ts
    +/employee/src/app/core/auth/model/index.ts
    +/employee/src/app/core/auth/model/roles.enum.ts
    +/employee/src/app/core/auth/model/user-request.interface.ts
    +/employee/src/app/core/auth/services/auth.service.spec.ts
    +/employee/src/app/core/auth/services/auth.service.ts
    +/employee/src/app/core/auth/services/index.ts
    +/employee/src/app/core/auth/strategies/index.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.spec.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.ts
    +/employee/src/app/core/user/user.module.ts
    +/employee/src/app/core/user/model/index.ts
    +/employee/src/app/core/user/model/dto/user-payload.dto.ts
    +/employee/src/app/core/user/model/entities/user.entity.ts
    +/employee/src/app/core/user/services/index.ts
    +/employee/src/app/core/user/services/user.service.spec.ts
    +/employee/src/app/core/user/services/user.service.ts
    +/employee/test/auth/auth.service.mock.ts
    +/employee/test/user/user.repository.mock.ts
    +/employee/src/app/employee/employee.module.ts
    +/employee/src/app/employee/model/entities/employee.entity.ts
    +/employee/src/app/employee/model/index.ts
    +/employee/src/app/employee/controllers/employee.crud.controller.ts
    +/employee/src/app/employee/services/employee.crud.service.ts
    +/employee/src/app/employee/services/index.ts
    +/employee/src/app/employee/controllers/index.ts
    +
    +
    +
  10. +
  11. +

    Open the VSCode

    +
    +

    Execute the commands:

    +
    +
    +
    +
    yarn install
    +code .
    +
    +
    +
  12. +
  13. +

    Fill in the entity: src/app/employee/model/entities/employee.entity.ts

    +
    +
      +
    1. +

      Add the columns

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    2. +
    3. +

      Add the validations

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    4. +
    5. +

      Add the transformations

      +
      +

      In this specific case, we will not transform any property, but you can see an example in the src/app/shared/model/entities/base-entity.entity.ts file.

      +
      +
      +
      +
      export abstract class BaseEntity {
      +  @PrimaryGeneratedColumn('increment')
      +  id!: number;
      +
      +  @VersionColumn({ default: 1 })
      +  @Exclude({ toPlainOnly: true })
      +  version!: number;
      +
      +  @CreateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  createdAt!: string;
      +
      +  @UpdateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  updatedAt!: string;
      +}
      +
      +
      +
    6. +
    7. +

      Add swagger metadata

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    8. +
    +
    +
  14. +
  15. +

    Add swagger metadata to src/app/employee/controllers/employee.crud.controller.ts

    +
    +
    +
    @ApiTags('employee')
    +
    +
    +
  16. +
  17. +

    Generate database migrations

    +
    +
      +
    1. +

      Build the application: yarn build

      +
    2. +
    3. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node

      +
    4. +
    5. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +generate migrations +
      +
      +
      +

      The output will be something similar to:

      +
      +
      +
      +
      export class CreateTables1572480273012 implements MigrationInterface {
      +  name = 'CreateTables1572480273012';
      +
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `CREATE TABLE "user" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "username" varchar(255) NOT NULL, "password" varchar(255) NOT NULL, "role" integer NOT NULL DEFAULT (0))`,
      +      undefined,
      +    );
      +    await queryRunner.query(
      +      `CREATE TABLE "employee" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "name" varchar(255), "surname" varchar(255), "email" varchar(255))`,
      +      undefined,
      +    );
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DROP TABLE "employee"`, undefined);
      +    await queryRunner.query(`DROP TABLE "user"`, undefined);
      +  }
      +}
      +
      +
      +
      +

      The number in the name is a timestamp, so may change in your application.

      +
      +
    6. +
    7. +

      Create a migration to insert data:`yarn run typeorm migration:generate -n InsertData`

      +
      +
      +insert data +
      +
      +
      +

      and fill in with the following code:

      +
      +
      +
      +
      export class InsertData1572480830290 implements MigrationInterface {
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(1, 'Santiago', 'Fowler', 'Santiago.Fowler@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(2, 'Clinton', 'Thornton', 'Clinton.Thornton@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(3, 'Lisa', 'Rodriquez', 'Lisa.Rodriquez@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(4, 'Calvin', 'Becker', 'Calvin.Becker@example.com');`,
      +    );
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      1,
      +      'user',
      +      await hash('password', await genSalt(12)),
      +      roles.USER,
      +    ]);
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      2,
      +      'admin',
      +      await hash('admin', await genSalt(12)),
      +      roles.ADMIN,
      +    ]);
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DELETE FROM EMPLOYEE`);
      +    await queryRunner.query(`DELETE FROM USER`);
      +  }
      +}
      +
      +
      +
    8. +
    +
    +
  18. +
  19. +

    Start the application: yarn start:dev

    +
    +
    +start app +
    +
    +
  20. +
  21. +

    Check the swagger endpoint: http://localhost:3000/v1/api

    +
    +
    +swagger +
    +
    +
  22. +
  23. +

    Make petitions to the employee CRUD: http://localhost:3000/v1/employee/employees

    +
    +
    +employees +
    +
    +
  24. +
  25. +

    Write the tests

    +
    +

    As we do not create any method, only add some properties to the entity, all application must be tested by the autogenerated code. As we add some modules, you need to uncomment some lines in the src/app/core/configuration/services/configuration.service.spec.ts:

    +
    +
    +
    +
    describe('ConfigurationService', () => {
    +  const configService: ConfigurationService = new ConfigurationService();
    +
    +  it('should return the values of test config file', () => {
    +    expect(configService.isDev).toStrictEqual(def.isDev);
    +    expect(configService.host).toStrictEqual(def.host);
    +    expect(configService.port).toStrictEqual(def.port);
    +    expect(configService.clientUrl).toStrictEqual(def.clientUrl);
    +    expect(configService.globalPrefix).toStrictEqual(def.globalPrefix);
    +    // Remove comments if you add those modules
    +    expect(configService.database).toStrictEqual(def.database);
    +    expect(configService.swaggerConfig).toStrictEqual(def.swaggerConfig);
    +    expect(configService.jwtConfig).toStrictEqual(def.jwtConfig);
    +    // expect(configService.mailerConfig).toStrictEqual(def.mailerConfig);
    +  });
    +  it('should take the value of environment varible if defined', () => {
    +    process.env.isDev = 'true';
    +    process.env.host = 'notlocalhost';
    +    process.env.port = '123456';
    +    process.env.clientUrl = 'http://theclienturl.net';
    +    process.env.globalPrefix = 'v2';
    +    process.env.swaggerConfig = JSON.stringify({
    +      swaggerTitle: 'Test Application',
    +    });
    +    process.env.database = JSON.stringify({
    +      type: 'oracle',
    +      cli: { entitiesDir: 'src/notentitiesdir' },
    +    });
    +    process.env.jwtConfig = JSON.stringify({ secret: 'NOTSECRET' });
    +    // process.env.mailerConfig = JSON.stringify({ mailOptions: { host: 'notlocalhost' }});
    +
    +    expect(configService.isDev).toBe(true);
    +    expect(configService.host).toBe('notlocalhost');
    +    expect(configService.port).toBe(123456);
    +    expect(configService.clientUrl).toBe('http://theclienturl.net');
    +    expect(configService.globalPrefix).toBe('v2');
    +    const database: any = { ...def.database, type: 'oracle' };
    +    database.cli.entitiesDir = 'src/notentitiesdir';
    +    expect(configService.database).toStrictEqual(database);
    +    expect(configService.swaggerConfig).toStrictEqual({
    +      ...def.swaggerConfig,
    +      swaggerTitle: 'Test Application',
    +    });
    +    expect(configService.jwtConfig).toStrictEqual({
    +      ...def.jwtConfig,
    +      secret: 'NOTSECRET',
    +    });
    +    // const mail: any = { ...def.mailerConfig };
    +    // mail.mailOptions.host = 'notlocalhost';
    +    // expect(configService.mailerConfig).toStrictEqual(mail);
    +
    +    process.env.isDev = undefined;
    +    process.env.host = undefined;
    +    process.env.port = undefined;
    +    process.env.clientUrl = undefined;
    +    process.env.globalPrefix = undefined;
    +    process.env.database = undefined;
    +    process.env.swaggerConfig = undefined;
    +    process.env.jwtConfig = undefined;
    +    // process.env.mailerConfig = undefined;
    +  });
    +});
    +
    +
    +
    +

    And the output should be:

    +
    +
    +
    +test +
    +
    +
  26. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devon4ts/1.0/nest/samples.html b/docs/devon4ts/1.0/nest/samples.html new file mode 100644 index 00000000..ef6d996e --- /dev/null +++ b/docs/devon4ts/1.0/nest/samples.html @@ -0,0 +1,386 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node Samples

+
+
+

In the folder /samples, you can find some devon4node examples that could be useful for you in order to understand better the framework.

+
+
+

The samples are:

+
+
+ +
+
+

Also, we have another realistic example in the My Thai Star repository. This example is the implementation of My Thai Star backend, which is compatible with the frontend made with Angular. To do that, this node implementation exposes the same API as Java backend. Take care with this example, as we need to follow the Java API, some components do not follow the devon4node patterns and code conventions.

+
+
+
+
+

Todo example

+
+
+

This example is the backend part of an TO-DO application. It exposes and API where you can create, read, update and delete a TO-DO list.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:3000/v1/todo/todos. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:3000/v1/api.

+
+
+

Also, in this example we show you how to control the access to you application by implementing an authentication mechanism using JWT and rol based strategy. In order to access to the list of todos (http://localhost:3000/v1/todo/todos), first you need to call to POST http://localhost:3000/v1/auth/login and in the body you need to send the user information:

+
+
+
+
{
+  "username": "user",
+  "password": "password"
+}
+
+
+
+

It will return a JWT token for the user user. The rol of this user is USER, so you can only access to the methods GET, POST and DELETE of the endpoint http://localhost:3000/v1/todo/todos. If you login with the user admin/admin, you will be able to access to the methods UPDATE and PATCH.

+
+
+
+
+

Employee example

+
+
+

This is an example of employee management application. With the application you can create, read, update and delete employees.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:8081/v1/employee/employees. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:8081/v1/api.

+
+
+

This is a simple example without authentication. With this example you can learn how to work with database migrations. You can find them in the folder /src/migrations. The TypeORM is configured in order to execute the migrations every time that you start this application at ormconfig.json with the following flag:

+
+
+
+
"migrationsRun": true
+
+
+
+

You can also execute the migration manually by typing the command devon4node db migration:run, or revert executing devon4node db migration:revert. Take into account that the database that this application is using is an in-memory sqlite, so every time that you stop the application all data is lost.

+
+
+
+
+

Components example

+
+
+

This example allow you to understand better the execution order of the components of a devon4node application (guards, pipes, interceptors, filters, middleware).

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

In order to see the execution order, you can call to http://localhost:3000/v1. It will show you the execution order of all components except the filters. If you want to know the execution order while a filter is applied, call to the endpoint with the following queries: ?hello=error, ?hello=controller, ?hello=global.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/1.png b/docs/devonfw.github.io/1.0/_images/images/1.png new file mode 100644 index 00000000..ba345d97 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/2.png b/docs/devonfw.github.io/1.0/_images/images/2.png new file mode 100644 index 00000000..b39a4b14 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/3.png b/docs/devonfw.github.io/1.0/_images/images/3.png new file mode 100644 index 00000000..a078a12a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/4.png b/docs/devonfw.github.io/1.0/_images/images/4.png new file mode 100644 index 00000000..7c196534 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/6.png b/docs/devonfw.github.io/1.0/_images/images/6.png new file mode 100644 index 00000000..635d6a5c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/6.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/7.png b/docs/devonfw.github.io/1.0/_images/images/7.png new file mode 100644 index 00000000..0c0ab107 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/7.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/8.png b/docs/devonfw.github.io/1.0/_images/images/8.png new file mode 100644 index 00000000..6014c7a8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/8.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/9.png b/docs/devonfw.github.io/1.0/_images/images/9.png new file mode 100644 index 00000000..e9d6fd82 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/9.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/BussinessConfiguration.png b/docs/devonfw.github.io/1.0/_images/images/BussinessConfiguration.png new file mode 100644 index 00000000..1dffe8f6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/BussinessConfiguration.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/CG-architectureBackground.png b/docs/devonfw.github.io/1.0/_images/images/CG-architectureBackground.png new file mode 100644 index 00000000..91b3d696 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/CG-architectureBackground.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/CapgeminiLogo.png b/docs/devonfw.github.io/1.0/_images/images/CapgeminiLogo.png new file mode 100644 index 00000000..9ce5e7e5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/CapgeminiLogo.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/CapgeminiLogoWhite.png b/docs/devonfw.github.io/1.0/_images/images/CapgeminiLogoWhite.png new file mode 100644 index 00000000..3e5bf717 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/CapgeminiLogoWhite.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Capgemini_Logo_2COL_RGB.png b/docs/devonfw.github.io/1.0/_images/images/Capgemini_Logo_2COL_RGB.png new file mode 100644 index 00000000..b04a08ab Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Capgemini_Logo_2COL_RGB.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Capgemini_Logo_Small.png b/docs/devonfw.github.io/1.0/_images/images/Capgemini_Logo_Small.png new file mode 100644 index 00000000..fee3a391 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Capgemini_Logo_Small.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/CobigenContextLocation.png b/docs/devonfw.github.io/1.0/_images/images/CobigenContextLocation.png new file mode 100644 index 00000000..9744124a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/CobigenContextLocation.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Devcon_Form.JPG b/docs/devonfw.github.io/1.0/_images/images/Devcon_Form.JPG new file mode 100644 index 00000000..e1f667a9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Devcon_Form.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Devcon_homepage.JPG b/docs/devonfw.github.io/1.0/_images/images/Devcon_homepage.JPG new file mode 100644 index 00000000..d7ceaf43 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Devcon_homepage.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Example_Angular_Restaurant_Screen.png b/docs/devonfw.github.io/1.0/_images/images/Example_Angular_Restaurant_Screen.png new file mode 100644 index 00000000..3d8793d1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Example_Angular_Restaurant_Screen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Image-140917-123815.636.png b/docs/devonfw.github.io/1.0/_images/images/Image-140917-123815.636.png new file mode 100644 index 00000000..6fa3b3b1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Image-140917-123815.636.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/IntegratedIDE.png b/docs/devonfw.github.io/1.0/_images/images/IntegratedIDE.png new file mode 100644 index 00000000..bb2068dc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/IntegratedIDE.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Integrating-Spring-Data/Existing_Dataaccess_Structure.JPG b/docs/devonfw.github.io/1.0/_images/images/Integrating-Spring-Data/Existing_Dataaccess_Structure.JPG new file mode 100644 index 00000000..ec7c6626 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Integrating-Spring-Data/Existing_Dataaccess_Structure.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Integrating-Spring-Data/Structure_With_Spring_Data.JPG b/docs/devonfw.github.io/1.0/_images/images/Integrating-Spring-Data/Structure_With_Spring_Data.JPG new file mode 100644 index 00000000..e4ce58aa Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Integrating-Spring-Data/Structure_With_Spring_Data.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Integrating-Spring-Data/TableDaoImpl_Structure.JPG b/docs/devonfw.github.io/1.0/_images/images/Integrating-Spring-Data/TableDaoImpl_Structure.JPG new file mode 100644 index 00000000..05c0f8f7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Integrating-Spring-Data/TableDaoImpl_Structure.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/OASP-Layering.png b/docs/devonfw.github.io/1.0/_images/images/OASP-Layering.png new file mode 100644 index 00000000..84a065eb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/OASP-Layering.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/OASP.png b/docs/devonfw.github.io/1.0/_images/images/OASP.png new file mode 100644 index 00000000..2b0e2574 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/OASP.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/OASP_dark.png b/docs/devonfw.github.io/1.0/_images/images/OASP_dark.png new file mode 100644 index 00000000..edf59f1a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/OASP_dark.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/OASP_technologies_used.png b/docs/devonfw.github.io/1.0/_images/images/OASP_technologies_used.png new file mode 100644 index 00000000..98db5b7f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/OASP_technologies_used.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/OpenAPI_file_root_folder.png b/docs/devonfw.github.io/1.0/_images/images/OpenAPI_file_root_folder.png new file mode 100644 index 00000000..033bc985 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/OpenAPI_file_root_folder.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Pom.png b/docs/devonfw.github.io/1.0/_images/images/Pom.png new file mode 100644 index 00000000..ee6f8787 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Pom.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Project_selection.PNG b/docs/devonfw.github.io/1.0/_images/images/Project_selection.PNG new file mode 100644 index 00000000..76c5b862 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Project_selection.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Repositories.PNG b/docs/devonfw.github.io/1.0/_images/images/Repositories.PNG new file mode 100644 index 00000000..fdde31be Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Repositories.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/SOAP_Code_Gen_CobiGen/SOAP_with_nested_data.png b/docs/devonfw.github.io/1.0/_images/images/SOAP_Code_Gen_CobiGen/SOAP_with_nested_data.png new file mode 100644 index 00000000..4d4e2803 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/SOAP_Code_Gen_CobiGen/SOAP_with_nested_data.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/SOAP_Code_Gen_CobiGen/SOAP_without_nested_data.png b/docs/devonfw.github.io/1.0/_images/images/SOAP_Code_Gen_CobiGen/SOAP_without_nested_data.png new file mode 100644 index 00000000..6785aa58 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/SOAP_Code_Gen_CobiGen/SOAP_without_nested_data.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ServiceExample.png b/docs/devonfw.github.io/1.0/_images/images/ServiceExample.png new file mode 100644 index 00000000..551610e3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ServiceExample.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/Services.png b/docs/devonfw.github.io/1.0/_images/images/Services.png new file mode 100644 index 00000000..f9fac78f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/Services.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/0.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/0.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/0.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/1.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/1.png new file mode 100644 index 00000000..a168ebfa Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/10.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/10.png new file mode 100644 index 00000000..ee452fec Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/10.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/11.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/11.png new file mode 100644 index 00000000..bf031376 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/11.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/12.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/12.png new file mode 100644 index 00000000..37ecfef2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/12.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/13.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/13.png new file mode 100644 index 00000000..aa68cf4f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/13.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/14.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/14.png new file mode 100644 index 00000000..63c2ed55 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/14.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/2.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/2.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/3.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/3.png new file mode 100644 index 00000000..21f206b0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/4.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/4.png new file mode 100644 index 00000000..c097c867 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/5.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/5.png new file mode 100644 index 00000000..d0941916 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/6.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/6.png new file mode 100644 index 00000000..8f7450f8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/6.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/7.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/7.png new file mode 100644 index 00000000..0244ebdb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/7.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/8.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/8.png new file mode 100644 index 00000000..e4ed5871 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/8.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/9.png b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/9.png new file mode 100644 index 00000000..5464bac8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-clarity-layout/9.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-library/result.png b/docs/devonfw.github.io/1.0/_images/images/angular-library/result.png new file mode 100644 index 00000000..2fe702a8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-library/result.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_0.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_0.png new file mode 100644 index 00000000..f4aeadca Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_0.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_1.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_1.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_10.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_10.png new file mode 100644 index 00000000..d84563a7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_10.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_11.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_11.png new file mode 100644 index 00000000..2eeb8fdd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_11.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_12.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_12.png new file mode 100644 index 00000000..d0e81eaa Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_12.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_13.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_13.png new file mode 100644 index 00000000..4b3b4074 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_13.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_14.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_14.png new file mode 100644 index 00000000..f1ff7d9f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_14.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_15.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_15.png new file mode 100644 index 00000000..b00554fe Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_15.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_16.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_16.png new file mode 100644 index 00000000..4367bd60 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_16.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_17.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_17.png new file mode 100644 index 00000000..d3f5edfb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_17.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_18.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_18.png new file mode 100644 index 00000000..54cb5b00 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_18.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_19.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_19.png new file mode 100644 index 00000000..dc441ee0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_19.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_2.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_2.png new file mode 100644 index 00000000..a8f8b70f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_20.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_20.png new file mode 100644 index 00000000..6728163f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_20.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_21.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_21.png new file mode 100644 index 00000000..a4f23dba Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_21.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_22.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_22.png new file mode 100644 index 00000000..98a258c7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_22.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_3.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_3.png new file mode 100644 index 00000000..625228b0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_4.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_4.png new file mode 100644 index 00000000..97f33148 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_5.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_5.png new file mode 100644 index 00000000..32de7eee Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_6.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_6.png new file mode 100644 index 00000000..331b345f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_6.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_7.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_7.png new file mode 100644 index 00000000..fc7a638e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_7.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_8.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_8.png new file mode 100644 index 00000000..db26df0a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_8.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_9.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_9.png new file mode 100644 index 00000000..cae3b40a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/Screenshot_9.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/tablestyle.png b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/tablestyle.png new file mode 100644 index 00000000..8f8a5435 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-primeng-layout/tablestyle.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure1.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure1.png new file mode 100644 index 00000000..8638e11e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure10.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure10.png new file mode 100644 index 00000000..b85a70bb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure10.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure11.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure11.png new file mode 100644 index 00000000..b351a5eb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure11.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure12.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure12.png new file mode 100644 index 00000000..7a2f4d7b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure12.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure13.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure13.png new file mode 100644 index 00000000..9d2e909f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure13.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure14.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure14.png new file mode 100644 index 00000000..fad86295 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure14.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure15.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure15.png new file mode 100644 index 00000000..d7e0f1a7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure15.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure16.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure16.png new file mode 100644 index 00000000..cfe9fbd9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure16.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure17.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure17.png new file mode 100644 index 00000000..b60a8367 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure17.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure18.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure18.png new file mode 100644 index 00000000..fe6ee92b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure18.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure19.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure19.png new file mode 100644 index 00000000..1fe8f608 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure19.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure20.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure20.png new file mode 100644 index 00000000..54267100 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure20.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure21.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure21.png new file mode 100644 index 00000000..db215fce Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure21.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure3.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure3.png new file mode 100644 index 00000000..9fa7f617 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure4.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure4.png new file mode 100644 index 00000000..58c84427 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure5.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure5.png new file mode 100644 index 00000000..883bcb04 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure6.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure6.png new file mode 100644 index 00000000..3c05e35c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure6.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure7.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure7.png new file mode 100644 index 00000000..3c3cbf0c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure7.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure8.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure8.png new file mode 100644 index 00000000..29d9fc2a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure8.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure9.png b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure9.png new file mode 100644 index 00000000..42915cc7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular-zorro-layout/figure9.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/compile-eager.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/compile-eager.png new file mode 100644 index 00000000..ffce0d19 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/compile-eager.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/compile-first-lazy.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/compile-first-lazy.png new file mode 100644 index 00000000..8bd56e5d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/compile-first-lazy.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/first-lvl-eager.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/first-lvl-eager.png new file mode 100644 index 00000000..2480f2d6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/first-lvl-eager.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/first-lvl-lazy.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/first-lvl-lazy.png new file mode 100644 index 00000000..7f89915d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/first-lvl-lazy.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/first-lvl-wrong-path.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/first-lvl-wrong-path.png new file mode 100644 index 00000000..68587c34 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/first-lvl-wrong-path.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/levels-app.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/levels-app.png new file mode 100644 index 00000000..0a147442 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/levels-app.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-lazy.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-lazy.png new file mode 100644 index 00000000..0afe2f5a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-lazy.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-left-lazy.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-left-lazy.png new file mode 100644 index 00000000..b2005351 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-left-lazy.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager-d.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager-d.png new file mode 100644 index 00000000..47addfcc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager-d.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager.png new file mode 100644 index 00000000..c55c77e2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-lazy/second-lvl-right-eager.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/http-serve.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/http-serve.png new file mode 100644 index 00000000..d6926625 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/http-serve.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/lighthouse-ng.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/lighthouse-ng.png new file mode 100644 index 00000000..774a40f6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/lighthouse-ng.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/mts-pwa-rec.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/mts-pwa-rec.png new file mode 100644 index 00000000..8cf524e3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/mts-pwa-rec.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/ng-serve.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/ng-serve.png new file mode 100644 index 00000000..9f614131 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/ng-serve.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/online-offline-ng.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/online-offline-ng.png new file mode 100644 index 00000000..0f42e5a4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/online-offline-ng.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/pwa-nopwa-app-ng.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/pwa-nopwa-app-ng.png new file mode 100644 index 00000000..4724098e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-pwa/pwa-nopwa-app-ng.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/custom-dark.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/custom-dark.png new file mode 100644 index 00000000..701ccadf Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/custom-dark.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/custom-light.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/custom-light.png new file mode 100644 index 00000000..32aa97ef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/custom-light.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/deeppurple-amber.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/deeppurple-amber.png new file mode 100644 index 00000000..26fcdd93 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/deeppurple-amber.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/indigo-pink.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/indigo-pink.png new file mode 100644 index 00000000..9af01630 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/indigo-pink.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/palette.PNG b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/palette.PNG new file mode 100644 index 00000000..3e2e7af4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/palette.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/pink-bluegrey.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/pink-bluegrey.png new file mode 100644 index 00000000..9cc6a27d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/pink-bluegrey.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/purple-green.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/purple-green.png new file mode 100644 index 00000000..d23d948d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/purple-green.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/scss-map.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/scss-map.png new file mode 100644 index 00000000..87285543 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/scss-map.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/theme-files-structure.png b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/theme-files-structure.png new file mode 100644 index 00000000..953d3eaf Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/angular-theming/theme-files-structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/back-end.png b/docs/devonfw.github.io/1.0/_images/images/angular/back-end.png new file mode 100644 index 00000000..097b0a0d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/back-end.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/folder_organization.png b/docs/devonfw.github.io/1.0/_images/images/angular/folder_organization.png new file mode 100644 index 00000000..2e921692 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/folder_organization.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/routes.png b/docs/devonfw.github.io/1.0/_images/images/angular/routes.png new file mode 100644 index 00000000..c218eb7a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/routes.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/angular/testing.JPG b/docs/devonfw.github.io/1.0/_images/images/angular/testing.JPG new file mode 100644 index 00000000..2c9153e6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/angular/testing.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/apache_logo.png b/docs/devonfw.github.io/1.0/_images/images/apache_logo.png new file mode 100644 index 00000000..5b5e925b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/apache_logo.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/app-initializer/loadExternalConfigFalse.png b/docs/devonfw.github.io/1.0/_images/images/app-initializer/loadExternalConfigFalse.png new file mode 100644 index 00000000..f6c90dd9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/app-initializer/loadExternalConfigFalse.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/app-initializer/loadExternalConfigTrue.png b/docs/devonfw.github.io/1.0/_images/images/app-initializer/loadExternalConfigTrue.png new file mode 100644 index 00000000..647ea162 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/app-initializer/loadExternalConfigTrue.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/architecture-layers.svg b/docs/devonfw.github.io/1.0/_images/images/architecture-layers.svg new file mode 100644 index 00000000..63c0c475 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/architecture-layers.svg @@ -0,0 +1,639 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + c + + + + + + + omponents + + + + + + + + s + + + + + + + ervices + + + + + + + + adapter + + + + + + + + + + module + + + + + + + + s + + + + + + + mart + + + + + + + + dumb + + + + + + + + + store + + + + + + + + model + + + + + + + + use + + + + + + + - + + + + + + + case + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/architecture-modules.svg b/docs/devonfw.github.io/1.0/_images/images/architecture-modules.svg new file mode 100644 index 00000000..2ec15f98 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/architecture-modules.svg @@ -0,0 +1,358 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + shared + + + + + + + + c + + + + + + + ore + + + + + + + + f + + + + + + + eature + + + + + + + (e.g. booking) + + + + + + + + a + + + + + + + pp + + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/architecture.png b/docs/devonfw.github.io/1.0/_images/images/architecture.png new file mode 100644 index 00000000..e21753fa Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/architecture.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/architecture_background.png b/docs/devonfw.github.io/1.0/_images/images/architecture_background.png new file mode 100644 index 00000000..91d5af3a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/architecture_background.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/batch_icon.png b/docs/devonfw.github.io/1.0/_images/images/batch_icon.png new file mode 100644 index 00000000..1ca97b15 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/batch_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/bean-mapping-using-dozer/dozer-functionality-overview.png b/docs/devonfw.github.io/1.0/_images/images/bean-mapping-using-dozer/dozer-functionality-overview.png new file mode 100644 index 00000000..ee1b324d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/bean-mapping-using-dozer/dozer-functionality-overview.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/bean-mapping-using-dozer/dozer1.png b/docs/devonfw.github.io/1.0/_images/images/bean-mapping-using-dozer/dozer1.png new file mode 100644 index 00000000..2f9151e7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/bean-mapping-using-dozer/dozer1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/bg.jpg b/docs/devonfw.github.io/1.0/_images/images/bg.jpg new file mode 100644 index 00000000..32e25fac Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/bg.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/bgbw.jpg b/docs/devonfw.github.io/1.0/_images/images/bgbw.jpg new file mode 100644 index 00000000..4a0d30d8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/bgbw.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/bgbw2.jpg b/docs/devonfw.github.io/1.0/_images/images/bgbw2.jpg new file mode 100644 index 00000000..455c7702 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/bgbw2.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/bgsat.jpg b/docs/devonfw.github.io/1.0/_images/images/bgsat.jpg new file mode 100644 index 00000000..37e05ce7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/bgsat.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/blob-streaming/folder-structure.PNG b/docs/devonfw.github.io/1.0/_images/images/blob-streaming/folder-structure.PNG new file mode 100644 index 00000000..d4880bfd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/blob-streaming/folder-structure.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/blob-streaming/html-view-1.PNG b/docs/devonfw.github.io/1.0/_images/images/blob-streaming/html-view-1.PNG new file mode 100644 index 00000000..adef8fff Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/blob-streaming/html-view-1.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/bundle_folder.png b/docs/devonfw.github.io/1.0/_images/images/bundle_folder.png new file mode 100644 index 00000000..ce1d69f3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/bundle_folder.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/business_ide.png b/docs/devonfw.github.io/1.0/_images/images/business_ide.png new file mode 100644 index 00000000..e81472b3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/business_ide.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/business_ide_ext.png b/docs/devonfw.github.io/1.0/_images/images/business_ide_ext.png new file mode 100644 index 00000000..a7a3214f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/business_ide_ext.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/capgemini.png b/docs/devonfw.github.io/1.0/_images/images/capgemini.png new file mode 100644 index 00000000..e323d3dd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/capgemini.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/capgeminiLogo.jpg b/docs/devonfw.github.io/1.0/_images/images/capgeminiLogo.jpg new file mode 100644 index 00000000..4260a548 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/capgeminiLogo.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/angular_directory.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/angular_directory.png new file mode 100644 index 00000000..af8ae5f1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/angular_directory.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/angular_pipeline_flow.PNG b/docs/devonfw.github.io/1.0/_images/images/ci/angular/angular_pipeline_flow.PNG new file mode 100644 index 00000000..19240869 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/angular_pipeline_flow.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/chrome_installation.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/chrome_installation.png new file mode 100644 index 00000000..7e4c1c19 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/chrome_installation.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/container1.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/container1.png new file mode 100644 index 00000000..e2d56614 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/container1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/container2.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/container2.png new file mode 100644 index 00000000..007662df Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/container2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/container3.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/container3.png new file mode 100644 index 00000000..743c6753 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/container3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/maven_tool.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/maven_tool.png new file mode 100644 index 00000000..0e74c771 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/maven_tool.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/nexus3_global_config.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/nexus3_global_config.png new file mode 100644 index 00000000..ea403bb3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/nexus3_global_config.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/nexus3_groupid.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/nexus3_groupid.png new file mode 100644 index 00000000..6407d402 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/nexus3_groupid.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/nexus3_id.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/nexus3_id.png new file mode 100644 index 00000000..907334c2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/nexus3_id.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/pipeline-config.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/pipeline-config.png new file mode 100644 index 00000000..41c2e972 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/pipeline-config.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/sonar-env.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/sonar-env.png new file mode 100644 index 00000000..cbac6079 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/sonar-env.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/sonar-scanner.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/sonar-scanner.png new file mode 100644 index 00000000..7c44f9a2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/sonar-scanner.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/angular/src_directory.png b/docs/devonfw.github.io/1.0/_images/images/ci/angular/src_directory.png new file mode 100644 index 00000000..5a099a7d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/angular/src_directory.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/clientserver/clientserver_pipeline_flow.PNG b/docs/devonfw.github.io/1.0/_images/images/ci/clientserver/clientserver_pipeline_flow.PNG new file mode 100644 index 00000000..b0a2bcd2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/clientserver/clientserver_pipeline_flow.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/deployment/deployment_arch.png b/docs/devonfw.github.io/1.0/_images/images/ci/deployment/deployment_arch.png new file mode 100644 index 00000000..f8f050d1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/deployment/deployment_arch.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/deployment/docker-compose.PNG b/docs/devonfw.github.io/1.0/_images/images/ci/deployment/docker-compose.PNG new file mode 100644 index 00000000..9cdf6bc1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/deployment/docker-compose.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/deployment/docker.png b/docs/devonfw.github.io/1.0/_images/images/ci/deployment/docker.png new file mode 100644 index 00000000..68bd7d47 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/deployment/docker.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/deployment/dockerfile-angular.PNG b/docs/devonfw.github.io/1.0/_images/images/ci/deployment/dockerfile-angular.PNG new file mode 100644 index 00000000..d23463d5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/deployment/dockerfile-angular.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/deployment/dockerfile-java.PNG b/docs/devonfw.github.io/1.0/_images/images/ci/deployment/dockerfile-java.PNG new file mode 100644 index 00000000..13a3cc9e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/deployment/dockerfile-java.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/future/deployment_schema.PNG b/docs/devonfw.github.io/1.0/_images/images/ci/future/deployment_schema.PNG new file mode 100644 index 00000000..c42d555c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/future/deployment_schema.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/java/java_directory.png b/docs/devonfw.github.io/1.0/_images/images/ci/java/java_directory.png new file mode 100644 index 00000000..01a9f3c8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/java/java_directory.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/java/java_pipeline_flow.PNG b/docs/devonfw.github.io/1.0/_images/images/ci/java/java_pipeline_flow.PNG new file mode 100644 index 00000000..63b2c861 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/java/java_pipeline_flow.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/mts_folder.PNG b/docs/devonfw.github.io/1.0/_images/images/ci/mts_folder.PNG new file mode 100644 index 00000000..c09fbf79 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/mts_folder.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ci/pl_logo.png b/docs/devonfw.github.io/1.0/_images/images/ci/pl_logo.png new file mode 100644 index 00000000..923f5c24 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ci/pl_logo.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-dashboard-githubgist.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-dashboard-githubgist.jpg new file mode 100644 index 00000000..02984dd9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-dashboard-githubgist.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-dashboard-new.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-dashboard-new.jpg new file mode 100644 index 00000000..202fe073 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-dashboard-new.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-dashboard-webhookerror.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-dashboard-webhookerror.jpg new file mode 100644 index 00000000..b43dbb48 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-dashboard-webhookerror.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-diagram-admin.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-diagram-admin.jpg new file mode 100644 index 00000000..fb14fae1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-diagram-admin.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-diagram-user.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-diagram-user.jpg new file mode 100644 index 00000000..94a3bae5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-diagram-user.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-01.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-01.jpg new file mode 100644 index 00000000..7e1ebda5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-01.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-02.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-02.jpg new file mode 100644 index 00000000..e52b4d32 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-02.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-confirmation.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-confirmation.jpg new file mode 100644 index 00000000..af31cd86 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-confirmation.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-done.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-done.jpg new file mode 100644 index 00000000..0e96d7ad Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface-done.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface.jpg new file mode 100644 index 00000000..13ba3701 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-interface.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-permissions.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-permissions.jpg new file mode 100644 index 00000000..0227f886 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-permissions.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-claassistantconditions.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-claassistantconditions.jpg new file mode 100644 index 00000000..dea1cf23 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-claassistantconditions.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-claform.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-claform.jpg new file mode 100644 index 00000000..09cd4d30 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-claform.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-error.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-error.jpg new file mode 100644 index 00000000..c0149016 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-error.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-howtosign.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-howtosign.jpg new file mode 100644 index 00000000..26180eda Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-howtosign.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-notsigned.jpg b/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-notsigned.jpg new file mode 100644 index 00000000..fcfe4b3f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cla/cla-pr-notsigned.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/client-gui-cordova/cordova_init.png b/docs/devonfw.github.io/1.0/_images/images/client-gui-cordova/cordova_init.png new file mode 100644 index 00000000..3c0f3309 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/client-gui-cordova/cordova_init.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/client-gui-cordova/cordova_platforms.png b/docs/devonfw.github.io/1.0/_images/images/client-gui-cordova/cordova_platforms.png new file mode 100644 index 00000000..b9e5721b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/client-gui-cordova/cordova_platforms.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/client-gui-cordova/standard_build.png b/docs/devonfw.github.io/1.0/_images/images/client-gui-cordova/standard_build.png new file mode 100644 index 00000000..c9dac0e4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/client-gui-cordova/standard_build.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cloud_icon.png b/docs/devonfw.github.io/1.0/_images/images/cloud_icon.png new file mode 100644 index 00000000..fc565675 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cloud_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cobigen.png b/docs/devonfw.github.io/1.0/_images/images/cobigen.png new file mode 100644 index 00000000..3e7ae247 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cobigen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cobigen_generate0.png b/docs/devonfw.github.io/1.0/_images/images/cobigen_generate0.png new file mode 100644 index 00000000..3f9ddfcb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cobigen_generate0.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cobigen_generate1.png b/docs/devonfw.github.io/1.0/_images/images/cobigen_generate1.png new file mode 100644 index 00000000..3917e4ee Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cobigen_generate1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/code.jpg b/docs/devonfw.github.io/1.0/_images/images/code.jpg new file mode 100644 index 00000000..faac9299 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/code.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/codeSat.jpg b/docs/devonfw.github.io/1.0/_images/images/codeSat.jpg new file mode 100644 index 00000000..6e7a8316 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/codeSat.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/codeSat2.jpg b/docs/devonfw.github.io/1.0/_images/images/codeSat2.jpg new file mode 100644 index 00000000..4d9afa59 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/codeSat2.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/codeSat3.jpg b/docs/devonfw.github.io/1.0/_images/images/codeSat3.jpg new file mode 100644 index 00000000..32e25fac Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/codeSat3.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/code_icon.png b/docs/devonfw.github.io/1.0/_images/images/code_icon.png new file mode 100644 index 00000000..72c4d880 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/code_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/coding.png b/docs/devonfw.github.io/1.0/_images/images/coding.png new file mode 100644 index 00000000..76c0c682 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/coding.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/collage.png b/docs/devonfw.github.io/1.0/_images/images/collage.png new file mode 100644 index 00000000..8d2eabd1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/collage.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/comillas.png b/docs/devonfw.github.io/1.0/_images/images/comillas.png new file mode 100644 index 00000000..01644235 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/comillas.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-01.png b/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-01.png new file mode 100644 index 00000000..8c97611b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-01.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-02.png b/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-02.png new file mode 100644 index 00000000..ec766cb7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-02.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-03.png b/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-03.png new file mode 100644 index 00000000..adb3c604 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-03.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-04.png b/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-04.png new file mode 100644 index 00000000..a7202229 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/compatibility-guide-for-java7/compatibility-guide-for-java7-04.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/component-decomposition-example-1.svg b/docs/devonfw.github.io/1.0/_images/images/component-decomposition-example-1.svg new file mode 100644 index 00000000..1370c66d --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/component-decomposition-example-1.svg @@ -0,0 +1,101 @@ + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/component-decomposition-example-2.svg b/docs/devonfw.github.io/1.0/_images/images/component-decomposition-example-2.svg new file mode 100644 index 00000000..747697ae --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/component-decomposition-example-2.svg @@ -0,0 +1,254 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/component-decomposition-example-component-tree.svg b/docs/devonfw.github.io/1.0/_images/images/component-decomposition-example-component-tree.svg new file mode 100644 index 00000000..3b0a7061 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/component-decomposition-example-component-tree.svg @@ -0,0 +1,356 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + FormListpicker + + + + + + + + FilterResultTable + + + + + + + + FilterInput + + + + + + + + + + + + + + + + + + + DirectInput + + + + + + + + Listpicker + + + + + + + + ListpickerDropdown + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/component-tree-highlighted-subtree.svg b/docs/devonfw.github.io/1.0/_images/images/component-tree-highlighted-subtree.svg new file mode 100644 index 00000000..d380bf5b --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/component-tree-highlighted-subtree.svg @@ -0,0 +1,950 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Overview + + + + + + + + SearchPanel + + + + + + + + QuickSearchTab + + + + + + + + Details + + + + + + + + App + + + + + + + + Toolbar + + + + + + + + DetailSearchTab + + + + + + + + CriteriaForm + + + + + + + + Header + + + + + + + + UserInfo + + + + + + + + ActionToolbar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + ResultPanel + + + + + + + + Table + + + + + + + + ResultActions + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + + + + + + + + + + + + + + + /overview + + + + + + + /details + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/component-tree.svg b/docs/devonfw.github.io/1.0/_images/images/component-tree.svg new file mode 100644 index 00000000..010e562b --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/component-tree.svg @@ -0,0 +1,950 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Overview + + + + + + + + SearchPanel + + + + + + + + QuickSearchTab + + + + + + + + Details + + + + + + + + App + + + + + + + + Toolbar + + + + + + + + DetailSearchTab + + + + + + + + CriteriaForm + + + + + + + + Header + + + + + + + + UserInfo + + + + + + + + ActionToolbar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + ResultPanel + + + + + + + + Table + + + + + + + + ResultActions + + + + + + + + LastActivitiesPanel + + + + + + + + + + + + + + + + + + + + + + + + + + + /overview + + + + + + + /details + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/components-layer-service-layer-boundaries.svg b/docs/devonfw.github.io/1.0/_images/images/components-layer-service-layer-boundaries.svg new file mode 100644 index 00000000..70484237 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/components-layer-service-layer-boundaries.svg @@ -0,0 +1,355 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart + + + + + + + Component + + + + + + + + Use Case Service + + + + + + + + + + + + + Store + + + + + + + + + + + + subscribe() + + + + + + + action() + + + + + + + + + + Services Layer + + + + + + + Components Layer + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/gitlab-new-prject-form.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/gitlab-new-prject-form.jpg new file mode 100644 index 00000000..427a1e08 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/gitlab-new-prject-form.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/gitlab-new-prject.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/gitlab-new-prject.jpg new file mode 100644 index 00000000..ff235b79 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/gitlab-new-prject.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-add.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-add.jpg new file mode 100644 index 00000000..5a4836cb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-add.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-configuration.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-configuration.jpg new file mode 100644 index 00000000..a40c30ff Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-configuration.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-output-config.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-output-config.jpg new file mode 100644 index 00000000..dbe0b50e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-output-config.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-output.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-output.jpg new file mode 100644 index 00000000..9ce1112b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-build-monitor-view-output.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-config-file-management.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-config-file-management.jpg new file mode 100644 index 00000000..b881b616 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-config-file-management.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-config-fp.png b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-config-fp.png new file mode 100644 index 00000000..183527a6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-config-fp.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-edit-configuration-file.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-edit-configuration-file.jpg new file mode 100644 index 00000000..54bce934 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-edit-configuration-file.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-first-admin-user.png b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-first-admin-user.png new file mode 100644 index 00000000..c53a6c4e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-first-admin-user.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-global-maven.png b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-global-maven.png new file mode 100644 index 00000000..ba365448 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-global-maven.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-mave-tool-name.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-mave-tool-name.jpg new file mode 100644 index 00000000..5ab16f30 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-mave-tool-name.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-maven-settings.png b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-maven-settings.png new file mode 100644 index 00000000..fc38a7c5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-maven-settings.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-new-view.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-new-view.jpg new file mode 100644 index 00000000..a8193a4c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-new-view.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-node-tool-name.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-node-tool-name.jpg new file mode 100644 index 00000000..02c5f21f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-node-tool-name.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-url.png b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-url.png new file mode 100644 index 00000000..d4dee095 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-url.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-yarn-tool-name.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-yarn-tool-name.jpg new file mode 100644 index 00000000..bcdc71f4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkins-yarn-tool-name.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkinsfile-cicd-activity-diagram.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkinsfile-cicd-activity-diagram.jpg new file mode 100644 index 00000000..10315158 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkinsfile-cicd-activity-diagram.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/jenkinsfile-stages.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkinsfile-stages.jpg new file mode 100644 index 00000000..c1b51e30 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/jenkinsfile-stages.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-repository-form.png b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-repository-form.png new file mode 100644 index 00000000..a3403eca Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-repository-form.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-repository.png b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-repository.png new file mode 100644 index 00000000..7fafd300 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-repository.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-user-form.png b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-user-form.png new file mode 100644 index 00000000..03d4b1ed Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-user-form.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-user.png b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-user.png new file mode 100644 index 00000000..0c5750cb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-create-user.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-jenkins-credentials-form.png b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-jenkins-credentials-form.png new file mode 100644 index 00000000..865731f8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-jenkins-credentials-form.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-jenkins-credentials.png b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-jenkins-credentials.png new file mode 100644 index 00000000..2dc26974 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-jenkins-credentials.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-jenkins-global-maven-form.png b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-jenkins-global-maven-form.png new file mode 100644 index 00000000..3b50093f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-jenkins-global-maven-form.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-stored-artifacts.png b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-stored-artifacts.png new file mode 100644 index 00000000..0d7bd156 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/nexus-stored-artifacts.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-actions.png b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-actions.png new file mode 100644 index 00000000..0a6b9ac2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-actions.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-menu.png b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-menu.png new file mode 100644 index 00000000..585a4cde Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-menu.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-resource-limits.png b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-resource-limits.png new file mode 100644 index 00000000..39709b6b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-resource-limits.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-yaml-resources.png b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-yaml-resources.png new file mode 100644 index 00000000..89123d38 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-deployments-yaml-resources.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-jenkins-configure-environments-repo.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-jenkins-configure-environments-repo.jpg new file mode 100644 index 00000000..82a87080 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-jenkins-configure-environments-repo.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-jenkins-plugin-name.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-jenkins-plugin-name.jpg new file mode 100644 index 00000000..6206d30f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-jenkins-plugin-name.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-jenkins-plugin.png b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-jenkins-plugin.png new file mode 100644 index 00000000..3c973820 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-jenkins-plugin.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-namespace-name.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-namespace-name.jpg new file mode 100644 index 00000000..3e2925d1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-namespace-name.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-secrets-menu.jpg b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-secrets-menu.jpg new file mode 100644 index 00000000..d73675c2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/openshift-secrets-menu.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/plugins-jenkins.png b/docs/devonfw.github.io/1.0/_images/images/configuration/plugins-jenkins.png new file mode 100644 index 00000000..fa61a478 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/plugins-jenkins.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/sa-secret.png b/docs/devonfw.github.io/1.0/_images/images/configuration/sa-secret.png new file mode 100644 index 00000000..8a4d915b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/sa-secret.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/sa-secret2.png b/docs/devonfw.github.io/1.0/_images/images/configuration/sa-secret2.png new file mode 100644 index 00000000..e8aeccef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/sa-secret2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-administration.png b/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-administration.png new file mode 100644 index 00000000..9f17519b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-administration.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-jenkins-scanner.png b/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-jenkins-scanner.png new file mode 100644 index 00000000..510608fc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-jenkins-scanner.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-jenkins-server.png b/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-jenkins-server.png new file mode 100644 index 00000000..ea657b26 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-jenkins-server.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-token.png b/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-token.png new file mode 100644 index 00000000..745dfde1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-token.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-webhook.png b/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-webhook.png new file mode 100644 index 00000000..6a895f77 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/configuration/sonarqube-webhook.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/content.png b/docs/devonfw.github.io/1.0/_images/images/content.png new file mode 100644 index 00000000..57dde1a3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/content.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/contributing/fork-github-1.PNG b/docs/devonfw.github.io/1.0/_images/images/contributing/fork-github-1.PNG new file mode 100644 index 00000000..c27bcac8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/contributing/fork-github-1.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/contributing/fork_repo.PNG b/docs/devonfw.github.io/1.0/_images/images/contributing/fork_repo.PNG new file mode 100644 index 00000000..ecb11a4a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/contributing/fork_repo.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/contributing/issue_list.PNG b/docs/devonfw.github.io/1.0/_images/images/contributing/issue_list.PNG new file mode 100644 index 00000000..3edb446e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/contributing/issue_list.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/contributing/pr_commenting.PNG b/docs/devonfw.github.io/1.0/_images/images/contributing/pr_commenting.PNG new file mode 100644 index 00000000..3bcf0c24 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/contributing/pr_commenting.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/contributing/travis_failure.png b/docs/devonfw.github.io/1.0/_images/images/contributing/travis_failure.png new file mode 100644 index 00000000..9725aeff Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/contributing/travis_failure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/corte_1.png b/docs/devonfw.github.io/1.0/_images/images/corte_1.png new file mode 100644 index 00000000..e1c81c12 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/corte_1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/corte_2.png b/docs/devonfw.github.io/1.0/_images/images/corte_2.png new file mode 100644 index 00000000..a0bf4db5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/corte_2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/corte_3.png b/docs/devonfw.github.io/1.0/_images/images/corte_3.png new file mode 100644 index 00000000..a1b02164 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/corte_3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/corte_4.png b/docs/devonfw.github.io/1.0/_images/images/corte_4.png new file mode 100644 index 00000000..c0867326 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/corte_4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-add-archetype.png b/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-add-archetype.png new file mode 100644 index 00000000..abd00b69 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-add-archetype.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-archetype-parameters.PNG b/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-archetype-parameters.PNG new file mode 100644 index 00000000..6dfb512b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-archetype-parameters.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-archetype-parameters_new.PNG b/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-archetype-parameters_new.PNG new file mode 100644 index 00000000..59aa5ec6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-archetype-parameters_new.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-choose-archetype.PNG b/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-choose-archetype.PNG new file mode 100644 index 00000000..28e3b3ce Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/create-new-app/devon-guide-newapp-choose-archetype.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/crud-schematic.PNG b/docs/devonfw.github.io/1.0/_images/images/crud-schematic.PNG new file mode 100644 index 00000000..f1b94f8d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/crud-schematic.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cypress/browserTab.jpg b/docs/devonfw.github.io/1.0/_images/images/cypress/browserTab.jpg new file mode 100644 index 00000000..9dff976e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cypress/browserTab.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cypress/contextImg.jpg b/docs/devonfw.github.io/1.0/_images/images/cypress/contextImg.jpg new file mode 100644 index 00000000..c925bb34 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cypress/contextImg.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/cypress/reporter.jpg b/docs/devonfw.github.io/1.0/_images/images/cypress/reporter.jpg new file mode 100644 index 00000000..fe821059 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/cypress/reporter.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/dashboard-multiple-ides.png b/docs/devonfw.github.io/1.0/_images/images/dashboard-multiple-ides.png new file mode 100644 index 00000000..f5dd6648 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/dashboard-multiple-ides.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/data-box.jpg b/docs/devonfw.github.io/1.0/_images/images/data-box.jpg new file mode 100644 index 00000000..6d38b892 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/data-box.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config1.png b/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config1.png new file mode 100644 index 00000000..be794eea Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config2.png b/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config2.png new file mode 100644 index 00000000..ba161351 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config3.png b/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config3.png new file mode 100644 index 00000000..6cc575b4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config4.png b/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config4.png new file mode 100644 index 00000000..31bc48c7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/database-configuration/db-config4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/datamodel.drawio b/docs/devonfw.github.io/1.0/_images/images/datamodel.drawio new file mode 100644 index 00000000..d3e28c4e --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/datamodel.drawio @@ -0,0 +1 @@ +7VpLc9sgEP41PqZjPWwrx9h59JB0mrgzbU4dIrDEFIGKcGzn1xcs0APZlqO4bWaiTA5iWXZh+b6FlTzwZsn6hoM0vmMQkYE7hOuBdzlwXcd33YH6H8JNLpmMhrkg4hhqpVIwxy9IC43aEkOU1RQFY0TgtC4MGaUoFDUZ4Jyt6moLRupeUxChhmAeAtKUfsdQxLk0cCel/DPCUWw8O+PzvCcBRlmvJIsBZKuKyLsaeDPOmMifkvUMERU8E5d83PWe3mJiHFFxzIDPZ8ibO9D7ubjzw/nyPvBv47NRbuUZkKVe8BWN5JoSZTSfttiYWGQrnBBAZWu6YFTMdc9QtsMYE3gLNmyp5pIJEP4yrWnMOH6R+oDILkcKZDcXeqvdsbKGCZkxwrgUULZ1UA6aK2PaDUeZHPbVrNmxRHdgXVO8BZkwE2SEgDTDT9spq4EJ4BGmUyYES7RSM6Q6ys+IC7SuiHSIbxBLkOAbqWJ6x3q7N4YBur0q0VPoxBXkGBnQgI0K04W3BwlwQCO5gsKdN7TcOU7T3XCHOxn3mjtABOIUCDRlSwqzKpLkQ2WlpWiLr1dgbdzA2gNY3eIQ0Qz1WOuANb++977v91DLoRY0oHaRpgSHQGBGZyxJ5cb3Ce4kCc7zd8DgY6LuvIG6L4wngMiNgn2e6w45N+gT3R7IOX4Dc7SBMUThhboKV5AgEgMc2alh47gGR0YbYpAwCr/FmFoQk3614BqrGW+3GkF5ddZuGRcxixgF5KqUTiUXImnqkqCFQrGCgEzK5EKLnzRw5KRkWH4os59GpvlovKjG5brW2pjWGovKMNl6NMuUz+Ug1SjGUFhZw164ZmzJQ9R+lZYxiZA4tGW5ngrWUfm2hsUzUxdxRORh9lyvUg7g/CvD6sArKFWHuDu0mJKvVg+qFhWWHSdoMZSH44Aho8gWiwzVdE7GkUOXgT4hn+AO4PZFTgG2ZpXTJ+T/k5DN66K2hBy8i4Q8sjAejLplZNuQ61uG3kNGnvQkqZKkBviSMu7xnOnOkuBIlozfA0sczyo+x5aJrizxRpahPSyRGAObilqqFLJD9yyrdLFf0toLdA/qy4d8Bv+ess1XKj1lm5SdvCfKnnei7FsPMsdCsNORojbX/clfoqj9dqGFot74/1P0xlmmk8kQ/J5O7omIA/gFpzvqnO3nuAf1hemDVjmNkmYH4Y6ucj5kkbMTac2vhq88Clrzd7e6oVsu35klWzPzIQq2ZubRbuC9MfV6gQ2hjm91Rn6LoT2p91T4apYHTgNg27QAnoq0VYGXOfx5TpL9pz8BT4hMZU6KuOKKyVsD11ts/0x21FeREiz61wba/aB4ndaSb/aT6WSHcm0ryrjLZvlLg3ybyt9reFd/AA== \ No newline at end of file diff --git a/docs/devonfw.github.io/1.0/_images/images/datamodel.png b/docs/devonfw.github.io/1.0/_images/images/datamodel.png new file mode 100644 index 00000000..33060517 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/datamodel.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/december.png b/docs/devonfw.github.io/1.0/_images/images/december.png new file mode 100644 index 00000000..0c7a6800 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/december.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/dependency_injection.png b/docs/devonfw.github.io/1.0/_images/images/dependency_injection.png new file mode 100644 index 00000000..e743a8ad Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/dependency_injection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/desktop_icon.png b/docs/devonfw.github.io/1.0/_images/images/desktop_icon.png new file mode 100644 index 00000000..0fbcb96b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/desktop_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-js-structure.png b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-js-structure.png new file mode 100644 index 00000000..1162f263 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-js-structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-new-js-module.png b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-new-js-module.png new file mode 100644 index 00000000..f59d034b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-new-js-module.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-new-module.png b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-new-module.png new file mode 100644 index 00000000..86f498b7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-new-module.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-new-module2.png b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-new-module2.png new file mode 100644 index 00000000..5202efd9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-new-module2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-structure-gui.png b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-structure-gui.png new file mode 100644 index 00000000..9c134388 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-structure-gui.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-structure.png b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-structure.png new file mode 100644 index 00000000..709b194b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-using-custom-command-gui.png b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-using-custom-command-gui.png new file mode 100644 index 00000000..5d0057d7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-using-custom-command-gui.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-using-custom-js-command-gui.png b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-using-custom-js-command-gui.png new file mode 100644 index 00000000..7760a215 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon-using-custom-js-command-gui.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devcon/devcon.png b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon.png new file mode 100644 index 00000000..94fb069e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devcon/devcon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devconlogo_full.png b/docs/devonfw.github.io/1.0/_images/images/devconlogo_full.png new file mode 100644 index 00000000..83897d13 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devconlogo_full.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devconlogo_imgonly.png b/docs/devonfw.github.io/1.0/_images/images/devconlogo_imgonly.png new file mode 100644 index 00000000..12bfcf54 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devconlogo_imgonly.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs1.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs1.png new file mode 100644 index 00000000..67282d9f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs2.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs2.png new file mode 100644 index 00000000..37acb557 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs3.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs3.png new file mode 100644 index 00000000..c280465b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs4.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs4.png new file mode 100644 index 00000000..32b4ae1e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/FindBugs4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/Sonar_add_server.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/Sonar_add_server.png new file mode 100644 index 00000000..c81cc35d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/Sonar_add_server.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/analyse-project.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/analyse-project.png new file mode 100644 index 00000000..0ba623ba Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/analyse-project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/associate-sonarqube.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/associate-sonarqube.png new file mode 100644 index 00000000..d126b58e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/associate-sonarqube.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/change-link-with-project.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/change-link-with-project.png new file mode 100644 index 00000000..45c6c98b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/change-link-with-project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/check_code_pmd.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/check_code_pmd.png new file mode 100644 index 00000000..338d29eb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/check_code_pmd.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle.png new file mode 100644 index 00000000..b8d3db49 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle2.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle2.png new file mode 100644 index 00000000..3c1a46c7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle3.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle3.png new file mode 100644 index 00000000..244ecaba Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle4.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle4.png new file mode 100644 index 00000000..de7c9ace Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle5.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle5.png new file mode 100644 index 00000000..79be814c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/checkstyle5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/cobigen.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/cobigen.png new file mode 100644 index 00000000..3fd75807 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/cobigen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/eclipse-settings.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/eclipse-settings.png new file mode 100644 index 00000000..6d6c6ebc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/eclipse-settings.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/integrated-ide.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/integrated-ide.png new file mode 100644 index 00000000..9b0361eb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/integrated-ide.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/link-with-project.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/link-with-project.png new file mode 100644 index 00000000..c7cfc12c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/link-with-project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_config.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_config.png new file mode 100644 index 00000000..14bbac2c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_config.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_generate_reports.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_generate_reports.png new file mode 100644 index 00000000..d83e3104 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_generate_reports.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_run.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_run.png new file mode 100644 index 00000000..cbb06867 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_run.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_select_reports.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_select_reports.png new file mode 100644 index 00000000..4f049e53 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/pmd_select_reports.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-create-new-project.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-create-new-project.png new file mode 100644 index 00000000..53737101 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-create-new-project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-new-project.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-new-project.png new file mode 100644 index 00000000..18043e5a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-new-project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-perspective.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-perspective.png new file mode 100644 index 00000000..a4f1a0f1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-perspective.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-preferences.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-preferences.png new file mode 100644 index 00000000..d2f980cc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-preferences.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-req-response.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-req-response.png new file mode 100644 index 00000000..b63e6afc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/soap-req-response.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/sonarQube-issues-view.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/sonarQube-issues-view.png new file mode 100644 index 00000000..07668543 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/sonarQube-issues-view.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/sonarqube1.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/sonarqube1.png new file mode 100644 index 00000000..69df8c4b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/sonarqube1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-ide/unlink-with-project.png b/docs/devonfw.github.io/1.0/_images/images/devon-ide/unlink-with-project.png new file mode 100644 index 00000000..de95e003 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-ide/unlink-with-project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/create-project-eclipse.png b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/create-project-eclipse.png new file mode 100644 index 00000000..0629b6a7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/create-project-eclipse.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/module.png b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/module.png new file mode 100644 index 00000000..625a8441 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/module.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module2.png b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module2.png new file mode 100644 index 00000000..ee85a5f7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module3.png b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module3.png new file mode 100644 index 00000000..c50ee118 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module4.png b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module4.png new file mode 100644 index 00000000..caed0554 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module5.png b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module5.png new file mode 100644 index 00000000..12b1fdf2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-module5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-project.png b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-project.png new file mode 100644 index 00000000..c484627a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new-maven-project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new_maven_module.png b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new_maven_module.png new file mode 100644 index 00000000..93348844 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new_maven_module.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new_maven_module0.png b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new_maven_module0.png new file mode 100644 index 00000000..4050136c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/new_maven_module0.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/select-archetype.png b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/select-archetype.png new file mode 100644 index 00000000..fa158148 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon-module-dev-guide/select-archetype.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon.png b/docs/devonfw.github.io/1.0/_images/images/devon.png new file mode 100644 index 00000000..ed4f4d8f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/JenkinsDeployParameters.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/JenkinsDeployParameters.png new file mode 100644 index 00000000..2662d3a9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/JenkinsDeployParameters.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/SonarqubeWebhook.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/SonarqubeWebhook.png new file mode 100644 index 00000000..b479c328 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/SonarqubeWebhook.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/allure.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/allure.JPG new file mode 100644 index 00000000..97d4f53f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/allure.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/created_token.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/created_token.JPG new file mode 100644 index 00000000..9bd39497 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/created_token.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/credential.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/credential.png new file mode 100644 index 00000000..865731f8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/credential.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/devonfw.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/devonfw.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/devonfw.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/jenkinsonarscanner.PNG b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/jenkinsonarscanner.PNG new file mode 100644 index 00000000..63c9e859 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/jenkinsonarscanner.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/job.PNG b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/job.PNG new file mode 100644 index 00000000..ac2f1046 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/job.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/mavensettings.PNG b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/mavensettings.PNG new file mode 100644 index 00000000..ba365448 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/mavensettings.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/mavensettings2.PNG b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/mavensettings2.PNG new file mode 100644 index 00000000..67de91c4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/mavensettings2.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/nexusadmin.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/nexusadmin.png new file mode 100644 index 00000000..9536b077 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/nexusadmin.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/nexususer.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/nexususer.png new file mode 100644 index 00000000..03d4b1ed Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/nexususer.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/pen.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/pen.png new file mode 100644 index 00000000..a5993c09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/pen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/profile.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/profile.png new file mode 100644 index 00000000..8209c816 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/profile.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/sonaraccount.PNG b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/sonaraccount.PNG new file mode 100644 index 00000000..63018fe9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/sonaraccount.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/sonarjenkins.PNG b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/sonarjenkins.PNG new file mode 100644 index 00000000..947237a7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/sonarjenkins.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/token.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/token.JPG new file mode 100644 index 00000000..ff44568d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-mts/token.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/allure.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/allure.JPG new file mode 100644 index 00000000..97d4f53f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/allure.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/created_token.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/created_token.JPG new file mode 100644 index 00000000..9bd39497 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/created_token.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/devon4j.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/devon4j.JPG new file mode 100644 index 00000000..0907afa4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/devon4j.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/devonfw.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/devonfw.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/devonfw.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/jenkinsfile.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/jenkinsfile.JPG new file mode 100644 index 00000000..e0467cc6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/jenkinsfile.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/maven.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/maven.JPG new file mode 100644 index 00000000..14e801ea Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/maven.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/pen.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/pen.png new file mode 100644 index 00000000..a5993c09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/pen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/pl.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/pl.png new file mode 100644 index 00000000..f788df66 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/pl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/profile.png b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/profile.png new file mode 100644 index 00000000..8209c816 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/profile.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/token.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/token.JPG new file mode 100644 index 00000000..ff44568d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j-pl/token.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/1.Overview/devon4j_architecture.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/1.Overview/devon4j_architecture.png new file mode 100644 index 00000000..afee7298 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/1.Overview/devon4j_architecture.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/component_layers.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/component_layers.png new file mode 100644 index 00000000..1d1d76dd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/component_layers.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/get_request.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/get_request.png new file mode 100644 index 00000000..3cc519ef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/get_request.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/mts.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/mts.png new file mode 100644 index 00000000..2613c436 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/mts.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/project_components.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/project_components.png new file mode 100644 index 00000000..0d689202 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/project_components.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/project_modules.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/project_modules.png new file mode 100644 index 00000000..c4607d12 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/project_modules.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/run.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/run.png new file mode 100644 index 00000000..a678f0ab Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/2.Example_app/run.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_dbtype.PNG b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_dbtype.PNG new file mode 100644 index 00000000..dc9ef9aa Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_dbtype.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_create.PNG b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_create.PNG new file mode 100644 index 00000000..2ad3dbce Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_create.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_flyway.PNG b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_flyway.PNG new file mode 100644 index 00000000..78bbc8af Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_flyway.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_project_exp.PNG b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_project_exp.PNG new file mode 100644 index 00000000..a4728a3b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_project_exp.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_beanmapper.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_beanmapper.png new file mode 100644 index 00000000..16d02fc2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_beanmapper.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_cxfconfig.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_cxfconfig.png new file mode 100644 index 00000000..f2746140 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_cxfconfig.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_dbmodel.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_dbmodel.png new file mode 100644 index 00000000..b06f1b51 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_dbmodel.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_dbprofile.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_dbprofile.png new file mode 100644 index 00000000..43c30285 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_dbprofile.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_project.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_project.png new file mode 100644 index 00000000..9f676ce9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_security.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_security.png new file mode 100644 index 00000000..10be622f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_security.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_test.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_test.png new file mode 100644 index 00000000..38a93180 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/emptyapp_test.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/login.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/login.png new file mode 100644 index 00000000..2b5d371f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/login.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp1.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp1.png new file mode 100644 index 00000000..03e594fb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp2.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp2.png new file mode 100644 index 00000000..36d5685f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp3.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp3.png new file mode 100644 index 00000000..f756214e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp4.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp4.png new file mode 100644 index 00000000..0c54ecf1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/newapp4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/run.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/run.png new file mode 100644 index 00000000..6bd7f576 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/3.BuildYourOwn/run.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen0.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen0.png new file mode 100644 index 00000000..1d3e051e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen0.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen1.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen1.png new file mode 100644 index 00000000..fd16fa97 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_dao.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_dao.png new file mode 100644 index 00000000..452ee5d7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_dao.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_logic_allinone.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_logic_allinone.png new file mode 100644 index 00000000..c93fffda Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_logic_allinone.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_logic_withusecases.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_logic_withusecases.png new file mode 100644 index 00000000..2466b59b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_logic_withusecases.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest.png new file mode 100644 index 00000000..bce8cbc2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_entityinfrastructure.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_entityinfrastructure.png new file mode 100644 index 00000000..dac45edc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_entityinfrastructure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_tos.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_tos.png new file mode 100644 index 00000000..7953d0ff Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen2_tos.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen3_allpackages.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen3_allpackages.png new file mode 100644 index 00000000..6ad28a8c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen3_allpackages.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen4_review_imports.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen4_review_imports.png new file mode 100644 index 00000000..ed8b02f7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen4_review_imports.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen5_expected_errors.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen5_expected_errors.png new file mode 100644 index 00000000..96bb128a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen5_expected_errors.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen6_manual_import.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen6_manual_import.png new file mode 100644 index 00000000..f20de0d8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen6_manual_import.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen_health1.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen_health1.png new file mode 100644 index 00000000..dff4df17 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen_health1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen_health2.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen_health2.png new file mode 100644 index 00000000..98f7937a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen_health2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen_templates.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen_templates.png new file mode 100644 index 00000000..362aa551 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/cobigen_templates.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/devon4j_architecture.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/devon4j_architecture.png new file mode 100644 index 00000000..afee7298 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/devon4j_architecture.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/jumpthequeue_emptycomponents.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/jumpthequeue_emptycomponents.png new file mode 100644 index 00000000..a84006b5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/jumpthequeue_emptycomponents.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_component_core1.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_component_core1.png new file mode 100644 index 00000000..9e2697ef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_component_core1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_component_core2.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_component_core2.png new file mode 100644 index 00000000..a694b339 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_component_core2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_component_structure.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_component_structure.png new file mode 100644 index 00000000..c7260cb7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_component_structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_components.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_components.png new file mode 100644 index 00000000..0d689202 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/mythaistar_components.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor-accesscode.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor-accesscode.png new file mode 100644 index 00000000..17b4fb30 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor-accesscode.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_entity1.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_entity1.png new file mode 100644 index 00000000..d097039e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_entity1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_entity2.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_entity2.png new file mode 100644 index 00000000..95ec3101 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_entity2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_gettersandsetters.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_gettersandsetters.png new file mode 100644 index 00000000..656f6602 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_gettersandsetters.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_packages1.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_packages1.png new file mode 100644 index 00000000..b9fbb94b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_packages1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_packages2.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_packages2.png new file mode 100644 index 00000000..5e3b7046 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/4.Components/visitor_component_packages2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/dependency_injection.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/dependency_injection.png new file mode 100644 index 00000000..764eec73 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/dependency_injection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated1.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated1.png new file mode 100644 index 00000000..3bc82371 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated2.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated2.png new file mode 100644 index 00000000..311996f4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_saveVisitor.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_saveVisitor.png new file mode 100644 index 00000000..62354776 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_saveVisitor.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet1.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet1.png new file mode 100644 index 00000000..d06ce40d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet2.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet2.png new file mode 100644 index 00000000..e83629bf Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure.png new file mode 100644 index 00000000..0b6710b3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/layer_api_impl.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/layer_api_impl.png new file mode 100644 index 00000000..35f91f62 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/layer_api_impl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/layers_impl.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/layers_impl.png new file mode 100644 index 00000000..51c6b5c4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/5.Layers/layers_impl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_accesscode.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_accesscode.png new file mode 100644 index 00000000..a38baf36 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_accesscode.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_listwithcode.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_listwithcode.png new file mode 100644 index 00000000..9a23626c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_listwithcode.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_listwithoutcode.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_listwithoutcode.png new file mode 100644 index 00000000..65d0b7b7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_listwithoutcode.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_name.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_name.png new file mode 100644 index 00000000..cfcd36ad Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_name.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_nullemail.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_nullemail.png new file mode 100644 index 00000000..0b4fc99a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_nullemail.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_validphone.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_validphone.png new file mode 100644 index 00000000..8b23fb71 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_validphone.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongemail.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongemail.png new file mode 100644 index 00000000..a5428d04 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongemail.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongphone.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongphone.png new file mode 100644 index 00000000..f5d6b76c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongphone.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_maven.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_maven.png new file mode 100644 index 00000000..ebb1b1f7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_maven.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result.png new file mode 100644 index 00000000..959057bf Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result2.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result2.png new file mode 100644 index 00000000..e50ec89d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result3.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result3.png new file mode 100644 index 00000000..650aaf45 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_runtest.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_runtest.png new file mode 100644 index 00000000..d23f957d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_runtest.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_structure.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_structure.png new file mode 100644 index 00000000..8f994737 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/mythaistar_testing_structure.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/mythaistar_testing_structure.png new file mode 100644 index 00000000..d514a659 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/8.Testing/mythaistar_testing_structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_server_structure.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_server_structure.png new file mode 100644 index 00000000..36bc033b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_server_structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_simpleget1.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_simpleget1.png new file mode 100644 index 00000000..cad561bd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_simpleget1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_simpleget2.png b/docs/devonfw.github.io/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_simpleget2.png new file mode 100644 index 00000000..30fd1d74 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_simpleget2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/allure.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/allure.JPG new file mode 100644 index 00000000..97d4f53f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/allure.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/created_token.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/created_token.JPG new file mode 100644 index 00000000..9bd39497 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/created_token.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/devon4ng.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/devon4ng.JPG new file mode 100644 index 00000000..cafaf809 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/devon4ng.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/devonfw.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/devonfw.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/devonfw.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/jenkinsfile.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/jenkinsfile.JPG new file mode 100644 index 00000000..e0467cc6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/jenkinsfile.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/maven.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/maven.JPG new file mode 100644 index 00000000..14e801ea Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/maven.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/pen.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/pen.png new file mode 100644 index 00000000..a5993c09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/pen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/pl.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/pl.png new file mode 100644 index 00000000..f788df66 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/pl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/profile.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/profile.png new file mode 100644 index 00000000..8209c816 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/profile.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/token.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/token.JPG new file mode 100644 index 00000000..ff44568d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng-pl/token.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/1.Intro/architecture_overview.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng/1.Intro/architecture_overview.png new file mode 100644 index 00000000..c27ad95a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/1.Intro/architecture_overview.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/1.Intro/flex_box.jpeg b/docs/devonfw.github.io/1.0/_images/images/devon4ng/1.Intro/flex_box.jpeg new file mode 100644 index 00000000..77a18166 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/1.Intro/flex_box.jpeg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/1.Intro/theming.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng/1.Intro/theming.png new file mode 100644 index 00000000..6db71e98 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/1.Intro/theming.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/app_structure.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/app_structure.JPG new file mode 100644 index 00000000..9ad3caf9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/app_structure.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/authentication.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/authentication.JPG new file mode 100644 index 00000000..fa5c49b5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/authentication.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/authorization_header.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/authorization_header.JPG new file mode 100644 index 00000000..373af982 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/authorization_header.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/book_table.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/book_table.JPG new file mode 100644 index 00000000..8108dd98 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/book_table.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/menu_cards.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/menu_cards.JPG new file mode 100644 index 00000000..e2d5a170 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/menu_cards.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/price_calculator.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/price_calculator.JPG new file mode 100644 index 00000000..2deeb0e8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/price_calculator.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/project_main_files.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/project_main_files.JPG new file mode 100644 index 00000000..d920a317 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/project_main_files.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/waiter_cockpit.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/waiter_cockpit.JPG new file mode 100644 index 00000000..e8740759 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/waiter_cockpit.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/webpack.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/webpack.JPG new file mode 100644 index 00000000..e94b0a8a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/2.Example_app/webpack.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/JTQ_codeview.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/JTQ_codeview.png new file mode 100644 index 00000000..e73c0ab7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/JTQ_codeview.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/JTQ_queue.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/JTQ_queue.png new file mode 100644 index 00000000..061479f5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/JTQ_queue.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/JTQ_register.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/JTQ_register.png new file mode 100644 index 00000000..a0af726d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/JTQ_register.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/access_form.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/access_form.JPG new file mode 100644 index 00000000..fc9f3e05 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/access_form.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/angularcli.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/angularcli.JPG new file mode 100644 index 00000000..2adc384a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/angularcli.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/appnew.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/appnew.JPG new file mode 100644 index 00000000..1ad7c9c3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/appnew.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/code_viewer.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/code_viewer.JPG new file mode 100644 index 00000000..2b6ad3f9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/code_viewer.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/filesnew.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/filesnew.JPG new file mode 100644 index 00000000..ebff64d6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/filesnew.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/installedpackages.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/installedpackages.JPG new file mode 100644 index 00000000..8aec1baf Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/installedpackages.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/jumptheq.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/jumptheq.png new file mode 100644 index 00000000..6bf0881d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/jumptheq.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/login_error.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/login_error.JPG new file mode 100644 index 00000000..ff7fcc56 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/login_error.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/ngnew.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/ngnew.JPG new file mode 100644 index 00000000..01ca0269 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/ngnew.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/queue_viewer.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/queue_viewer.JPG new file mode 100644 index 00000000..ffb92b4b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/queue_viewer.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/root_header.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/root_header.JPG new file mode 100644 index 00000000..7e6667f1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/root_header.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/root_router.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/root_router.JPG new file mode 100644 index 00000000..7dbcc6b2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/3.BuildYourOwn/root_router.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/5.Angular_Services/injector.png b/docs/devonfw.github.io/1.0/_images/images/devon4ng/5.Angular_Services/injector.png new file mode 100644 index 00000000..4764c6a9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/5.Angular_Services/injector.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4ng/6.Deployment/dist_folder.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4ng/6.Deployment/dist_folder.JPG new file mode 100644 index 00000000..31e59b9e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4ng/6.Deployment/dist_folder.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-architechture.png b/docs/devonfw.github.io/1.0/_images/images/devon4node-architechture.png new file mode 100644 index 00000000..653b0bbe Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-architechture.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/allure.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/allure.JPG new file mode 100644 index 00000000..97d4f53f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/allure.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/created_token.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/created_token.JPG new file mode 100644 index 00000000..9bd39497 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/created_token.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/devon4node.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/devon4node.JPG new file mode 100644 index 00000000..31955c18 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/devon4node.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/devonfw.png b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/devonfw.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/devonfw.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/jenkinsfile.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/jenkinsfile.JPG new file mode 100644 index 00000000..e0467cc6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/jenkinsfile.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/maven.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/maven.JPG new file mode 100644 index 00000000..14e801ea Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/maven.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/pen.png b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/pen.png new file mode 100644 index 00000000..a5993c09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/pen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/pl.png b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/pl.png new file mode 100644 index 00000000..f788df66 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/pl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/profile.png b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/profile.png new file mode 100644 index 00000000..8209c816 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/profile.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/token.JPG b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/token.JPG new file mode 100644 index 00000000..ff44568d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon4node-pl/token.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonWhite.png b/docs/devonfw.github.io/1.0/_images/images/devonWhite.png new file mode 100644 index 00000000..dd7c7849 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonWhite.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon_logo - responsive.png b/docs/devonfw.github.io/1.0/_images/images/devon_logo - responsive.png new file mode 100644 index 00000000..908cc66e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon_logo - responsive.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devon_logo.png b/docs/devonfw.github.io/1.0/_images/images/devon_logo.png new file mode 100644 index 00000000..908cc66e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devon_logo.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9_sencha_app_gen.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9_sencha_app_gen.png new file mode 100644 index 00000000..df9a46e5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9_sencha_app_gen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9a_sencha_app_gen.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9a_sencha_app_gen.png new file mode 100644 index 00000000..34da3e3a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9a_sencha_app_gen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9b_sencha_app_gen.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9b_sencha_app_gen.png new file mode 100644 index 00000000..e6898c21 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9b_sencha_app_gen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9c_sencha_app_gen.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9c_sencha_app_gen.png new file mode 100644 index 00000000..2bb89041 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen9c_sencha_app_gen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_1_context_menu.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_1_context_menu.png new file mode 100644 index 00000000..6e83b054 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_1_context_menu.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_2_template_selection.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_2_template_selection.png new file mode 100644 index 00000000..acf9ba69 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_2_template_selection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_3_field_selection.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_3_field_selection.png new file mode 100644 index 00000000..92d873ec Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_3_field_selection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_4_warnings.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_4_warnings.png new file mode 100644 index 00000000..84f6089c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_4_warnings.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_5_error.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_5_error.png new file mode 100644 index 00000000..67d392fb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_5_error.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_6_error.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_6_error.png new file mode 100644 index 00000000..a2729709 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_6_error.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_7.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_7.png new file mode 100644 index 00000000..568932e1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_7.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_7_error.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_7_error.png new file mode 100644 index 00000000..cd3e2970 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_7_error.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_8_permissions_template_selection.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_8_permissions_template_selection.png new file mode 100644 index 00000000..9ab87b1e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_8_permissions_template_selection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_8a_permissions_template_selection.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_8a_permissions_template_selection.png new file mode 100644 index 00000000..820820b2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-cobigen/devon_guide_cobigen_8a_permissions_template_selection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_app_websphere_liberty.PNG b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_app_websphere_liberty.PNG new file mode 100644 index 00000000..65b848a9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_app_websphere_liberty.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_websphere_liberty-1.PNG b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_websphere_liberty-1.PNG new file mode 100644 index 00000000..6e9b359b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_websphere_liberty-1.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_websphere_liberty-2.PNG b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_websphere_liberty-2.PNG new file mode 100644 index 00000000..b7f6d040 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_websphere_liberty-2.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_websphere_liberty-3.PNG b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_websphere_liberty-3.PNG new file mode 100644 index 00000000..f26c6ee7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_add_websphere_liberty-3.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_jspackaging_process.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_jspackaging_process.png new file mode 100644 index 00000000..9d3c7f99 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_jspackaging_process.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_jspackaging_process02.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_jspackaging_process02.png new file mode 100644 index 00000000..54c8cbdf Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_jspackaging_process02.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_jspackaging_process03.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_jspackaging_process03.png new file mode 100644 index 00000000..769aa3ab Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/tutorial_devon_jspackaging_process03.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_LoginPage.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_LoginPage.png new file mode 100644 index 00000000..e7498c3e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_LoginPage.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_RestServiceCall.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_RestServiceCall.png new file mode 100644 index 00000000..d06c64f7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_RestServiceCall.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_UploadDeployement.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_UploadDeployement.png new file mode 100644 index 00000000..c592df5b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_UploadDeployement.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_choose_file.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_choose_file.png new file mode 100644 index 00000000..1d77e081 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_choose_file.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_console.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_console.png new file mode 100644 index 00000000..453422e1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_console.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_console_Add.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_console_Add.png new file mode 100644 index 00000000..82e1edba Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_console_Add.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_console_start.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_console_start.png new file mode 100644 index 00000000..1f379ca1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_console_start.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_deployment_success.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_deployment_success.png new file mode 100644 index 00000000..c8bfafb1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_deployment_success.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_verify_upload.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_verify_upload.png new file mode 100644 index 00000000..246d1e83 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_administration_verify_upload.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_standalone.PNG b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_standalone.PNG new file mode 100644 index 00000000..a9fcb959 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-deployment/wildfly/tutorial_devon_standalone.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/node-modules-fig2-2.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/node-modules-fig2-2.png new file mode 100644 index 00000000..36b5ecc0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/node-modules-fig2-2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/node-modules-result.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/node-modules-result.png new file mode 100644 index 00000000..559f335b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/node-modules-result.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/nodejs-fig1.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/nodejs-fig1.png new file mode 100644 index 00000000..320d51b4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/nodejs-fig1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/nodejs-result.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/nodejs-result.png new file mode 100644 index 00000000..bceb8fd5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/nodejs-result.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/npm-fig2.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/npm-fig2.png new file mode 100644 index 00000000..6a2eb138 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-developers-guide/npm-fig2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-structure/devonfw-dist-structure-01.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-structure/devonfw-dist-structure-01.png new file mode 100644 index 00000000..0b557d21 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-structure/devonfw-dist-structure-01.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-structure/devonfw-dist-structure-02.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-structure/devonfw-dist-structure-02.png new file mode 100644 index 00000000..0748331d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-dist-structure/devonfw-dist-structure-02.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/ResourceBundleSync_Main.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/ResourceBundleSync_Main.png new file mode 100644 index 00000000..99fdac5d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/ResourceBundleSync_Main.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/ResourceBundleSync_argument.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/ResourceBundleSync_argument.png new file mode 100644 index 00000000..971edfc9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/ResourceBundleSync_argument.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/code_springbootapp.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/code_springbootapp.png new file mode 100644 index 00000000..b7cb2f5d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/code_springbootapp.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/conceptual_schema_with_mmm_impl.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/conceptual_schema_with_mmm_impl.png new file mode 100644 index 00000000..77ba06a7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/conceptual_schema_with_mmm_impl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/conceptual_schema_without_mmm_impl.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/conceptual_schema_without_mmm_impl.png new file mode 100644 index 00000000..96055f86 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/conceptual_schema_without_mmm_impl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/locale_path.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/locale_path.png new file mode 100644 index 00000000..19bddbae Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-i18n/locale_path.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_CompleteOverview.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_CompleteOverview.png new file mode 100644 index 00000000..8591d2cb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_CompleteOverview.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogComponent.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogComponent.png new file mode 100644 index 00000000..bb81efe5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogComponent.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogContainer.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogContainer.png new file mode 100644 index 00000000..1f9222f1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogContainer.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogInteractions.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogInteractions.png new file mode 100644 index 00000000..6ac503fb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_DialogInteractions.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_Overview.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_Overview.png new file mode 100644 index 00000000..b694090e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-methodology/OASP_ClientArchitecture_Overview.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-oasp.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-oasp.png new file mode 100644 index 00000000..4d1171ff Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-oasp.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-org-old.drawio b/docs/devonfw.github.io/1.0/_images/images/devonfw-org-old.drawio new file mode 100644 index 00000000..53ce818d --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/devonfw-org-old.drawio @@ -0,0 +1 @@ +7L1Zk6tIli38a+oxy5iHR2aQmMUkXtqYQcwz6Nd/uCKyqjJPftbdt+r0zb5WkWYnJAc5sIe119ruivwLyrWHNEVDqfVp1vwFgdLjLyj/FwRBcJy8foGR82uEhtGvgWKq0q8h+O8Dj+qdfQ9C36NrlWbzb05c+r5ZquG3g0nfdVmy/GYsmqZ+/+1ped/89qpDVGQ/DDySqPlx1K/SpfwapRDy7+NyVhXlr1eGCfrrSBv9evL3k8xllPb7Pwyhwl9Qbur75etVe3BZA4z3q13ewnPL1Sa/ucewqkIi9w38y9dk4n/nI397hCnrlv/jqccSF07WSWX15vf/scXZuoXfH4G2qFm/7fX9rMv5qwGnfu3SDEwC/wVl97JasscQJeDofoXMNVYubfN9OK+ahuubfvp8Fs1x8B8Y77tFjNqqAVGkrkmVRtdVuL6be3AdNpqS79ABZ8/L1NfZP8wDfX6+5/mH8a+fa/y/aKFvS27ZtGTHP8THt8WkrG+zZTqvU76PYt8G+g5+GP1+v/89lK6I+R4s/yGO8F8Ho+/4Lf429999dL34dtN/w2XwH7iMaJZv4/zGd8S49r8e+GX+2Je5TrgMdfz94PWqAL/TbOu7fL+OT9nQz9XST9WVuN9zX7f6Nf3XyT9EyWXP5beh8Fsndn2X/S4+voeipiq6621yuS67xlngnepKYOb7QFul6SdI/ij2/h6d0O/DTKymryBLs+9Dj+/7hf504fLTogX5Fyf4z7IcDv3WdDjyB6ZD/sByfxv8l5sO/V9iOvR3UYcS/9dNh/3npruq6QBeVu2ngLOf38w8fHEAkMzRr2/y6gA2/hswqFGcNSaAqKoHABH3y9K3f4AcSz/8WJMYiBUF6r9Qk77vjC+XBXAVBpgEEfd9/+vcd9E09+uUZH9Nrisj4vXvEHWX4cSymi/kBK+qKzQunBU/Z49rnP116IqfGAcIhf0V/20k0PAPkYD8ynx+EwnQz4oE/OcSDFEU6I8zf5pRf4dLf3v/fy+5iP8luET86SxH/ueWS6O5/Bud6NelqbqLw/wqTaDfWu4H6tL8Dpl+4DT/Tej6FSTbowDq7K97Fjd90c9/fUVb9LP8BkPobx2H/eg4FPsr9gcsBv9JjqP++/XkH9z0x7b9T13ycSYbJXXxSad/lDWfn/9a0frjKpKkHfrX6tK8OagS03cVSaMlAoXjGr+It/hxNaAh4kWPrn+jrlibaPoFRqhPKflZkIf8toogKPlrXfmHCCD/IHPJn5W49A/+/6gV7PVDGPzpdcifDW2xn6ZTf+wtfDnt18j9t9f+CXrx89z2R/2FfyFj43mBEcX/QTn0J+Ad8I8q/DsVsuXfufBPO+7n5cK/ugXwu1wQKFEUsf9Bk/4Juirwj72B9vxlKaPql3mJpn+nwz/tu5+XDv9q5fl7MS8I7E8tDb+vtn+GdPhRk77WdrgSIvtlXLNr7N8J8c967+clxP+LuhT6T3XpOmfT/EvUpVNfpb80vzR901RDP/wCTvjlipoaSNYL50Vwh8Pf9OqP9Y8QRYH4L7Rif1pIIeTvWqY4/qPU/bWt8T/S6oB/1Lr/HMj+T6Xjn2DZ4deJ/8F2SR9XRdb9G0X/abf9NBT9NcB/GstGLpSBfqJJMepPpziRH5XLt+IE0fXvZPhnPffzkuHPvHQqYAL6Iej/Z0unIPZe81/7qQCrokt0XRCwCnDu39rdfzvtly7bLy7RLVeg/RI3F6n4ycunGP17LoBgxA/RQP0BF8DRv/7qpX99PPy4gPpZT/7l14058fSbDTzY65ehWYvq3xXvn98f8fOS/H/pEu6fgeL9qJerf1e0f95jPy/W/1/UyNh/qpGXLCm766rF+QsFVnBhsH8V7BNKs/+I1yb+jytqo/+4tMn3iwm44uv1558mGq5HBa///+XzRWt5gf+/KJ8p7LfBRv8YbCTyY6z9Ovavj7X/Ldr597zzTwCs6I/auZ1+ScosqS8Y/De+/rOe+2kAi/4XFmx/D7D/Cvz8Y2T+Ywz9Dcz+iqofxESZr7eI+NnRwlUea9g7dJeKnrl+9IdbCm7BMCyLXW9FiGOe129OoPLxcwIrpazjCgyjStnTPVh6B4fZ10O86dcL+TrCvFiFYUwYvGfNay4/mxnGVszrEIVf76WEIM6HcoLpVKnhLZhlQ4muwgf7sCGluPMCbjjFcB3v34hePPnbnsriGdq34qm99PtZ35MufVRKpbkt8OKu8eyuQcVb4rXnDXkG4ruQWL7Yd0bI8gYfrf2J6TPM1k8r9+ogZJ989GSmNeJ6VnhdMzD+/dXoThUyT8kutMIgV2oA138V8smGt7OI3jrGP+2HyL5rHhEeXKweKLcVt+oGyU8H4mdlZLrC2DfFDIYn/3RYukg4cZYODr9NFh9escdySQCzYXNrMze6FaHEFr3BPevhOuSOJZv43vlom3XGbm3QibRbi+CIf7tHTGOpiU09OUFxi/qm7TT/gOq7UyQ9IzKUBa8pk5jFWQ0wW3Eik7SNLYXO7ZRsrVWoywCYwGDm9axu8ZSaF6VooZ6x/VM0il27hkXNfrajxDP7ZVL2M3alNXv5XzM2V6ILIvGBexkuYBU/0K4XWHW9XZVCGrEFeVzuJHq3ESzPxoIVTdmgSGDalTBV68Ninh9I2KbzbiCWJNfYE3dcXYf6KNKFenjr9i4h59OIxVfYhq1PRs8kSK5sFANTglFKmyd3T9Gwpbs1t+kgm1P0OdJtuGbXD0Ysvvu5ua8fVn0eQfL39xpUUn9/x0AtN7EWzzzt0RuR+ytpO0h/dHmLT8MuBI+w0s97Tr8uMchShnR2+WvlG5MwKT/H39c9mfS7MNX4jDrKCvPzCKjsMzHHPj5xzkqyik5k1B1usNBKhM1xJFyTwQ0aBtnYYtMgjZuL1vjKMXlA31sdRXyMA9/7gE8nmbuaxEkpXF3YDR3ertE7cl+BR4ICYs+Ha14Q3SEmNV4feJN5MgGyIzAvuPU7/57pyByRo7CeLTq269jor5tG3NTWqorn50abXmWYnRXKO9rIzZAjNfNOl8B61f12i1+LZ6TTUEQG0z8U7aixnmXu794pnsJjWbC26uRn1NgwfyMf5nAQQ7wJ5JdteY17CjKjcnMXehfQsG3JsxF03UTS0aWf3YbbcaZaRenMg68d9zoDH4eEvMdxeT4fK+QE2pUO4gPkdfWZUmFvH6ve4jcdPkcwJx6LjtuDhY/n6BAadu+v0XnXTLYvDyTNN3lQyCMV2i2UlZJhvmLj1svgqZ9smwCSqHJLOx3623tsu/2859cUtZ3xFRNB6f5M5vdaOdYQI49D/I4tQ+eway5HuU5FPX1/jxBLT26glzU/SEGB7drTU1kP5f2FV0j9BevfUShEws5oTKtPqbbP/USPk9i6WoMyglyQBVK6OPfAiVDaW9vZ7OHQPh97MRnL7sw+OIHI5w43l8o4+Wt9vNj8mJUw44viuHyuZXBFRBE7ZoryZTPm+Njs/nw/0WTJ40fZMcOYGzclY+uS0rhHfyBuCzCpyjHSEITvO9UUs7ASRmOD1bvMeNTjGN/4fEpuuV3UlKbmvL10h9pNCPjwcbBfnyu2T9FgnVGi56HIO1FX1zvw1io/NuoZOczF6FmTq+nucLphtMpQNGAeZaxP3HAcjhZMwrj5AocWOWuSbO1Kj3X42W1wCL2p2bW/no3freTJM+wrk1EyfJD+Ne9tnBCJN/un+tDKXcOoU3jDdWu+qI9brwL5/rpPS+6KB2uxfIaU1yEhaN1HDNvFjktHJzd70+fg21dkZwp3Qfq+NxbENM/Ny8PVF1XWLJCK8FHVdeSE+juBR8iQ4MYPSH7/ugzBnF+eUwKSWAs9jG8J6cqW4jnZ0cYA2aKzGfDMrbgvXzPmwILo4u/5HYLsbaqIVxe6WWLZ3OHVL4TqaV2mt84puM/5d8YEJt9l/mNXI3+sq0VEHLylXqZHPF/Y0NfMKlQA/Cs3dUW6+0i9L7ljNUxGeLD58q+7/IBXIe5WdgXpvjQpFuXjKo3T8ay97Dbj2OVa8zuysitkrzgxscu0ok17OsGodnTqeRUReprq8eMNVavxjQfKdWngKw53QB9SCTf9Vj8LH/F45Q1CaLyVd1eY+S8EtXlgg30l3k7o1Qvd9lLgpunNyzFCB2jHza7ybWPDvh5f4fZrEoSly7jnLkuIU2UJ15vH5Sg3jwGbuaa1WOzyIFNNLRFfRgp5iXchfRFz/LBGAAbCd3gwOHi6QmXxSBBsMY2Jg3lx5XMBF289v7CLr4CXXircn41dMEqBGuAWavcWF5fpE/B1lCO5DQsubac9BlAFakZc0dlXrLM9c9Qmc+fIKF7Gu44vC+RYuztiIwWatfiR2RNUfyckb11GzBiBvQLpuszUS+Bi3qBecO2K0805duN2qWRQA2h2P5jiO0ssBVwjPkkyt0A12W5RdFONiQ2xpygaBgpBUgKs8knfdGl6ppAfyQri/7m3N5UtkbkqFgPd1MlcRs4b5i8w427X7XNMqn5J9OwCa9bTHfvRQz7PN7EGcElPwtv7IHL72azHvWa/bqrE6sJiRvsKfMH2uir+XGWaHMOGc7rlUon5LqUgx5ir7jj4s9Oa6UEPvRJKi1Dv65vW30e2fXuM+0SY8GIXC9AlRAx13bvcnZxGer23QJD3XRw0WPFlmsKkLFCw34E0YzYIqmqs6neVGYXeeddHwqHHYozwm+2hKNh3dhYywEVIP7r2nKwtuXN045FE4jU3X/kqEAaTg4y0ZDOPCfiBvCWODp1Ah0WRQQnk+5zrgp+zRNNJ6dTKH8YpR+edw0AxXOLy+fp+ML/PQb3qicH1Br9Ko0CO76/DdZWNkL+QqZBBzgrMG3RBcleo5PYptzBaPeH9K9gYjKGukGYZDSTbeq91+7SqY0pP1S++WBHGJp8zBHxTTaFEKo730CqCs73+gkxp/tjLCc596++jqcAVK8YBqh+aiZyTWH0hWHHXP9lrzWGqqpGrY4VtKwPW4LAvpr4qfEU/cx6fvL0IbbspuI1U/Ol1dUxE9epJ37V6+cJPBUlPyp8Ch3EfGZyUX85QWP9T3Ti5vACr7XBh0pKBjbDxeFtfDOdVEJ/Pn0/szkMuHkuKUEfRvA3SdS+P29e98Ez9hSEWJQ1oZhmI71oj2RJHwlLcF5aze8pchhJYhS4z35aC20MMXfw0hK748kHEfs1B3jukahJrDKrJCV25t75SOGJrMJe1yEOukUpasKPnIiy5utk39DNGmVw3c50y0YOqvub7A4oIo6WcK+7FcJ9/O480XHWq6yzqyQsGstB9E3yngmytl4EVVaopmJCvmIgRhpbXx+IQX5bjfCUHKSzHLhbcbusRuIA7aezXs/YSQA2Wg4pm0PRW+nwP4gF/f/ZlGWD2uyT2w17oS9mfEQjar/rBiBWwGMtSqJLbFieiz5j5ri0Qr3zwN+Lfui5IO9JXMdAZXyHK2JBaMBbHlW3dyhIPJBkl3L4DePm6qtou981xLHaM12hm8f2rHslM/TG/VNp8vUPP+PqshLcxR3zPrbryZ25xvY5wN8tBqLeFCMftCwzuTNPyLmPp/ONZvivRfRA+8yi/c7Wn7pZyWUutOuPmHfzgjopVMsrneZtK/jwvhu0ywEF1skIheX08CS5saQAxOHFYi36tOhBFomR6aop9PTe735krsTi2H/1RZ69MVgaUD0dtPa3v1BR49VOPrrtcV9jFkfmhYKWrPbB4cX36m21gXPE1T2esVnm/c71WKUdQfwcF/JWVi+Ld86c9UWTsyMCDX8dtTOhs68qUiNMiVQ1dIynrzlP211NlvgKc+A5w2h21Xh63VPBhy+6Pvz3HpUf9yyA1ad8TXFpWsTIUnRdWY0e1iv0GNIV5fWKPEw8EgVyMejQ38XbcKcRApP6rqmoZg1wGL6UDExt1UY4WQb6yjJkzdhAsltNfCik2kK4sj12Vzvc3Irx2aL8gTPXVN3InysXGT/lh892iq43al99V+1efsDUKIWdeyCigh1GEr2EX3a7Xasd/XU8u5+uiUnmxiQjy28JS6myGIPwq9aL2NVvPugqog7eanAWBurlxVdtMb2gDGorp/JW0dxZuOZcpuPDEBrSODoWf1ePZ4IgRH1RnF98ZgH1hPb6eK5cpni8iFQKRt1Nmvu4cUJtitAVGl/UkrfZmZQEKh81MX4V7UtIv+11E8AqKQoqjw1kHfpsXFbRIsjBKDtJXRe7bpcgXW9dfCco+pten6o3x/bEZd/cLQS9fSECzRNIgIEd5Iqn4gpLhKkbRzftmYJNrXrnFQPCDzGxPNFiWmCcm+UZOkL+gOrFOE+ISZY3zODrGDcbYUkzxKcx8kqsa/siXQabpBXuOxPSuwNyVMo4SclX08kbElADo1TKjI9Hifr5zjPEy4HG3pv2K6i6WX/I78lbM7qlXseibAh8W3tDKZWM0MxxfbiSKWedqm+65zno+tMbcfSk9D4LjxiCp1X08PRZfLjOhp3N/9Luv7JzC8qdzPtatPxGxyQb42c5HcBpjEwhekyZA277Ok06lN6cuLJMo/IcfaG/A9viBiH0vPO6IYsNTsWf+BU0VMgeODPH2wXIsWJ9g5Sjwxhdb2hg1X0DSkXqNZRG7fSo1wACxCFAkRZvl0cO7Be93NlFAIFxcySOB6F8R+0JH5UMDLkaT7oga0ojnhv5SGOqiMXyFBiERwQ6CC7FzqS1FoF/g4kSx14LD7th+eWG9XWKaVcigsK95dnPHKkoNylpaBaobushC8cZY5PsnaoovlsPqlJJ3qU7Or7hQDeUl7x9PIRBNy6HHBaGaYMzCEQDN3+GjB79nQEJl+KMhLi0p5/AgTxSaWNO9ve3XEyuWma4FV3DxnTjmAe4b9TE+etq5iNTLvzvCrYheiA71isFIoEMF56AERS6oL8X2ZpXrzkzAX86n7V5kyhp4LBYJCfvKIxlnABrqmjwJWPFr14LB7K98sK+UES9WfXtzsa2m/MZ8CWqD77840ls02qtGS+edRxDpxfLpqnxxCct47TDTM+2zORBqvPWBZu/AIKcrKr7GvmFMwtfhvSwKH2MtayafKzc6awg7A1HksqXE6gxC4VQZBAmEltCGg6naqNj5BYkKer8mi+/TcosQEc6dZbyNZ7VuQ37es9WfXvMwmp6TYqIxregcGWm5E2pzIUTdI6tfDGwR3PJbsCjzKRCmN5HhTUst3AwJ4i5S+D2Wa0185SubdXF06/Km3uxl4A0IYhJ0K0Y4AIHTq8tztOhF1wSO/eaWZGqeRJY2NnZU5WlLcqUk6TRc0XGeik8ZoCrPI1G97HRRo6wzhGZ0PEieaNiztjgFLPzeJ66LyWlC33uVXOq2YV7b3WshNdUOE4jchdMBIsnMfCEM7w9uVzz9xdYDhNRA/GoLn63Z/tTjgarsJUIM0H9I8gbOFSGO1rbE2PdVW73Hpa7PV0emQeXJdaU5jEW+Y+diCGzaVLqU8C/Du4FEXsLy4hJnQdjzPZLS2yuvPEoZ1WRRd6etobGeiVeG+BetOIX5sRGMAXgnJoKKfD/UmuJ5UDKMKys+HVXRf3gSsq5R7X6vcIIKJJwwBXgYlx5HP78jA824ThT8k871/jDIcF7sgo9acnyXzaWdgkGILtS2fbXoveUu8tBhNoZWmDJSedcNry3pBTUUehCQZqlHHhH19C7G6fPj4B1RClcglsou1Gr00mgZYFjgphOGoJX6EYTyI/awW3IBVzURsVavWnxfYzhuH9ZMzCwqHk15gI7OdncC2zZldVOg+xe7eXoenTsJrFz1chOGVEM/ZK9Sw3jRhcAuMt+HaY2g4sApI1TvcQ60oKET/DuJwbZ+8YoIah9SLyR3ItAdnSi8LULuHrTm7eO2ic8ivKVuMHHzBPetVzKFhce2M5FWrz2HxBQAe8VJzdPN+LjnDyoBCFTtA9seheTqSLj0z850xkwNcFp16bzrsfmjPyPh7uyasdbNdDxKbH2bAbuXcBXySK4ubbMp9MnsmmIq7PzwRGTc7KcOMaAvSLlEnALtqgXKW+6iAF7uudkQrbMAf/vx3THZp1ptuj9aQVytHUqeZ/1IaN0jM0Nhvup9bxU4pTGmQm4qfeus7XXQNT2pqNRjINQNMXhn99p5J2sBM8uM39RNuE/3KYOJo10gK4o7+eYfopGZQwB6phNO9lUfxne8v6lfzX1NeLTbu+Jv43twi+SLzZXaqUhyMQ4E6oVkNc4xocM4VUxGySxdslZ2Py+hQuOSKq5sIBjv51y2O4qpd2jZEaoOn5FLrUxsci5oSYwtLlK8JwVNbK4T5KVmG4Om3EvFg0341mHXiWX+7k+u7bJuWOA2Gmmv6AhMJleVOp+k9jwTf0rQrjnlnlmtsDlEhuPqI0ImDtGyu0eHgZsZL4k+cie2qbWP7mPWCZDWiUzivWSPFz7kh7pRUmGlNLFH7h04ig2US1cveD4LZlDCLc1ArpU5fbNSlcpnl17MjBNCxeVJBQpShvOdkJ3rJg2TQMM7hgvLLKRuVXc3BzBIGjC6O7fUjGZm1AR1FEg1s51VOCipZ2tnwR3E4RJ2RMjPiaAQuQhZ/kLD0ukJhmZd2vkRj02r0vugbG+61BoSwwFFQyHGXFPTYeXvuqQ4F78uyR20nNJVbtKUIFQUx8HCAEyplGjQD0IMpqpbIWbevKWAI9Wbg+Vi8gk8oiXmN7P7WnzT7ByjHUZrjQ3mQcjdI0bj87sbowhHmDz5DDF9r9HR1bjCbzjx3klrU2Eyts0lV3zUnv2JKR51Jva6wDiho+W6q6+JsjRQr/24v+pAlj6MtzQItjafj8H3xcnppafW6gFYhlqHGDQqJMbMAiIkSgZn26c8IutLMeD1ZuXE2qYC5Sh+PUym7NwkVB+g/PlILMtIt5gJTOH+uWmZKdmDwGzfwBLtyYQ78eSj/CQaNMaeHH1eDER2LTK2H+KpmBefhhCinUBeFNgFmQTczUa0ybeDZTD/0tKsX9Xo0FU26FBcY4JyN1nb03Gme7Gf3mT5wkJTNDTnbjpXOPWzeqdYa77D/CUd4tnfn+lLKyU6WCcsiK7Stnr+ghb9CQifhlMIeZ9dYER2B2sg9lIRsPORgbe71VUOIj1YQFNpP/Spsuv7GgJN1YlvMpRZxTKywoqTSzY81kAYRyjYnUEK50tO0DEDX/PqsdmUHAiOyK0eplVPmtyHs/jxCBdBu6A9F7hOZjWjmgmD3Ie0A8yHMvJBSTiqaarJR+eWIb2rxJ1krLckfY1lHu9PPwHovTWwrr0Tv2Il6MZpR1uHyMNOPgqwMLinufFMMCBHGw66Nou4nbdkuSyZeCsE2blkGws9XCvI72BlJnVSa3NTs2nP87gtre+SCcQY3Oxynz6SoGgmW7zCgI5dJ6AXOdoktgsaOcAXtQatVHNV6lxgslUwOPV48Gsc3E/0rpWmleYXIfHpfDqUl8GG8/ylWZaSt14nn18kMm6ITDZvUqULsZL6Ks4ydVjk3rtjrgL8BkQ8jky8KfngQca1A7VQro4iERMa9mzet9kn784TeXUt5EQuu3JX/jXQSSNVaA/v+GhumOX45K0jkFWyOvf2Xm5fUpnjQmTxKz1hP00L2HjGY5aTWRrpvSOmL7PcN0QMOZEESGpjb47AQRMWyZ17eFU+TuBWGI58KLRSOLX7ZoMtK2BGAdXeYlzrnqLGFlAwGVwT2ttASpKOLbACdgXDQ39VskJEIk+Khjs1VlzfxpUtw9P1lcFDt9yH5ZKKXly40Pxyv7dllwTewoF6Wy/xPEXc3Qe1FgsSCU/g27ZuHRa39/pl/yOzHOOcpuqzHjXQQdRus7w1g0iChXJr2eGVIPTumc9P61YJzSYWs4Ep01GAfcKs7GlydBq309aaANCQkZxb4tAmmQv0czaM5u0jxwVB84I2NKBZOII54Ka4+wtV7cw1I4hYA55IGx7H7zL4pqzYrte/7GCnOrEY/tomYIGM9aCnLK431Lu9iOxAU3WgVo2ANsQBcK+SMIZU4mcNFqct0hlt+V3CCshgb7L3S+KnXoyl0RDuqKAX80cr2PWF29CdZ22yjR9IBQdd82ytBIil1IPGQy99xc3bquOC8Z4uZW/r8zNpudsePqrOiSJElyn0cWJpFuYbXN6Uh4NHzzuqvg9QKSo2yk2qztaRs/yx2FOqsAgGU2T5oBgSIqFVHmdB5+sy0eP+kmcRgpYh/FBS96zbw9sW9xOJoKQVzP5FI3GeHAT4UV4KlISQmlxWiUv0leRG9GL2lsZEcYkkdtXZSRUGip5NAmmKd1Qn048H/AkQRqt/Q/D7OZQDQUbci6bGxCHknEcaA3rQIGsZhJKlBYK7HEfsF5B3IDZpsIa+8iggyi3XZ9BwrJALsFO37lx/A/frF+MlG822qMYI0KQd0hzFAFl02+lsTm+wKi98edzUN9LogV1yWTfel8WeVBAcjdZf0H1/MXyNwO+2z7KgadjBOjX4s8g4nK5qq8TmZUJlog4VmlEaKwManfk75IcANksQBwdrjTwpt5bDYid19/r3ZAehTjyoZeYY/zpDx6kZHbt1TPHwqsVZ8u7mxai2AWbPyYyXRJRfSn53YbnqB3Vkx3VCVbjEJXxTnnHmQIjomKVuPPoScZshEblA7E3kbLbK90BFFUn0bFf67MDX6kV0M2EzzVefPqgsFfDrRqvOc5L+iAEKnLL51mzdJuYSbHowDQFIoU0jJrV5v9fk5ThvsHmEtU/tBceGlblUfDe1B8xZqD5y/OutXekhNCSEO9ip4aFiy1lXJnukuMEL+DTcPFrDqCdoFw/mUqILhO1p7J1588pcOLwqzjLBEfSyPVf37nHyNBusDG42qFYmNgo4vhuSKtE01VKPuhaTUAB4ww3vw6Iw3chqxQ0fgeiqWy1JYy7VeXh26cyF/PYuO3y5o1xElFovss97zlv6govpfuj9Sd4urgwmY0FLJbIRWrqKomHgdyo9RAspKXo/jkN9YSBEXjwxoRAJYjAeRfjOq/BxWK/jTvf3T+0uU6RepnQJeh8ncTOOs2Sk+yuyg7Mwdx1rdascF7pzI9B81BBIuLkP/F4FxsRTIgXCnSUZhI6JKEi0Q3YNftieb2O5AlY60cgfZ0/XD8+FerKSQEa9n2Sn3C9mwPLG2W16muCYn7jq9LrTSIIZEwknghzi8XKPIvO8slNKz64zpezT/nwUu00lNw123y21xtoCVgB5s5/yzy6gyVOHxmkoggmuG9Om2JEsaHWjnctf6FvHc8MGm+pewCTkW75iPjMN0r31OBFWXRdA2Mknh675TmGWbywbhiAY8+wgX75SpECg8mi3c6SZI31Ezx8A0Cbqkl0A6HFn0l4ztt+M8Q0WNABMDzHboXkDoHqMpXFBIF15EawOYYYMVEX/PqgN1DEs6ZpOpTWPi3wsgHe8eL1HDawKuo+ctTczHzHihaFLWw3iAQd91FhZmGx+mUT6yaRL5AmXRhvXwTFb68wuinhveWqVp4l/Y6mor+sLeQvj548SLvL0vDGaNK69dxLCBRji+bRR90v2A1C+v7R9LJ7JE4/CN375WOdfjjEY09z05ivA52olg2dMSJscwA8peEjVarQM+N7OoVVRpBWGqZ1keiqe986tToPFRS7lyulveJ8cU87wCM0Zo/OokmMmrLiVpconteaNBbf2jEvAHefq0hvtvngrGSIdaE1krKq9z43KQXivbSspXt7s2O4HF7Fae7iNOTxAt+djlEiLPK5aIMrMszPi7DZkRgIZNYaT79a6Ho88HiPLwmQIk0bswXfNQHa2iT0StK6qTs8XAN59SOFBS7ubLt4CDJEJhqrA327MNWxu6Rl/dIDWLK+BGrLLm8jrAPslGt16XOKrMRvTjQMCCbMj0rI82YmZIjUVJtQRdkI7zuTUwmuZcozV6xXDVHck2WeaXvXoYuaZD3pYsxVQxBolzzJJ1SstfehQLv2nHohc80RyK+XkKht8yW0oG1f5xJyAQlCWs1qTzRqyIqQoT4h2590GrKGRYTi1VoBoClt7Bmp4/1bHzfNmyhyIb5BlHY5zXrduuDS/g+cLPR4rcjEH8pGGgaVKliy/SsfywmKtAqqQ4xUTdyKHSlJ35krYuCbeRonZhvp0iWjodjMUXdqT961bgPiTPYjoR5N4ssctY4M3gns4bXaPIJnert89tSIEDFE0CpR9wJpBDucM+k0z/IBCJWB22gEVLnDoFVvAPR9tqj04OZhKrGJfQ3GkoItgTzsR3tLl7u+P8u3ZwJbNDDaCBJcCcVUaG0PG8+iLiqyTITRVlZPuR+9KOdx1Nx4W1foIdsOSeYuKnMz3e+bO2uyl1DSE2ein+d61jNoE1N0w2WMKgAG34/VGKnI6n7WNOsYhLvjbCp5vroSTF45AwzA1cUU7i0FugtwaL42+IfKkR65Xmu+Tmvr8RmCHP/ChdU1JJWRIWBKUoXyNeTZ1bsdqvOon8HA/Pgti9NSsuNHJrD1ZFjGsLi4U75040xbl26PFyCXWi5XrEGDCYDoddDhpOwW6VWgJSpB5tUrhYTBvTCz5lGKaLXlHISy5p+5Li4tZRvmMYHbMG40HV9vOSCGWZgL+bEyEegThWA8k2j/RqtXRaVkvowHQWmwJkyBqosjEEakVOVCkcoQAVJ1ngZvBax36ldK2AIKC+EE/cB6lj3BKbLEZ4v2oLRP8iVJ2YwtRXV/Mjlx6JfI2b91tBTEJnK55WCVbLOfLfoAh9EimQyuxqXyFVY4fXkcdUAHweWji9R5pobRPbmPfjDetTMp4Nug5B2u+GMSpS8J9V7elWSp3Z++7ldWjktB6hTnaeI/ITi3sltZR6oL9F3x/SY/Xfil+mqKCBdq4B/KOVr5k7phDxUspZy+jiqFZQN8VtlS5qjfiG0LDXiC4lx9qy34SHlnnNBk+2cLeAMuQIhB2dDwLxuIhHkR5wtoCvMttwyMjM7G0PcnWGz8iIDF0T1ot4im8pxfg4Em0r4B54AnQVehwzzaPZ7KXE7J4nXeR5qH9FTWkmx628XLDPY2q6OXFsTlJTDOePWZRt+VYTbWk7x2wOgQXB9e8LSwWKobGKX7LcsRb3UcEPQkFLHm35F49pYd4Fp5wkYo96uNTiTcSRJkLkXcpJ3QAk29CCfIFVsA3YNS7YzfQUzxmzr2hfkbXaeIKe0XhZysNPaOTIU5QJZ3zVABQVRQ8XggeSsIfN8AL+ZJ/TeUYddARqyk2lriaLp496Hl3F9aBLa685MFqeq198AMjv1qHPqGv9psh+RZYWfNn9RXQ1X2D0XcTOix73RLx1PSsLMKBlG7TVG6uT7A2ghkGG3sH1ZnZi4HfoAEnnwllhO4nlxynvm9nJ3esFY9ON+CSS8dNRZjSLN9Mw0ER7N1HEwnscHuj9P2NfrahjdFCBEojCPIxg5msQ+QwTcYy0sD1tHLpfMwL9n3DCqWwxzgQLyxosaUnhE/nefE21eNeg/6OTmvEMO4hvBqTfOK7YiqP+KonUEd3nj/7UXajapEwvRP1XivNP+O1qdibZHDwlFFJfa5bwA/+aIeqWT5Sh0cBgjUUCIHQsTfEe46vY/YdJSRqB8XLchhukRcmjrlCOJJbyvs6c9bTE6mWSCPbaVFBCAJ2l2zdnba9mEf3i+wD0O2QxO9K79YQJaDneeNnE36/6/ukx+f9s10M1J4kgJpvv7EyonCOWVMJZdaBXsjNgrexE3Pe5uQFx+/REgQclkhWJvFoXGS1lxmMJ1z6Ojq95G03qTo9bFHyO/+WfE16qCWhX+wcQ7VlCUFAZAkxvSLUIzA54BMZk/l8Ws4XueXtY7Xb/TmDr9qKB9ls/fwko9kWScgAX0xokNwZoAatKr7ble3+dt+xtSZ9HaMGS1karV5FVw8UfoLfTCvNGaFIKGBIKFmP8gPschJxeR5Zk8PPiI+SNpED7Y7LwgNLRkPyGzh2TMp5q2PR5TWR6tRSJ5uMCC+fm9jMWEBA5H51SEfjzUW/e8CE6jiNXFr78GtOoeqtFnQXuhfwVzDd517mKzWKGc6XOTqQKMekVY4mvbsLApcLzLMCA8Oy1iDxwyj1js95v1TBvq3AxYEz/dPwqztVy7lNoKarRlUCt7iEGt2VfSCmP0KY9IIOet8s/Vk5+UZhmaK6qQm+QcAB2r4QYorip3PjU9FV4DmmHq+JRN+mR/r34uVVaE0OdXl7phL9cNxOgjuWqOb1FeZPbeLxmQV9C/a4VJgS3MvB7rieQvv6OS2iZApIRWykv64JfnG+rAjkNVKxKkZr03pwes7f4dPb3qKvvo/aC/cmAcn55E7EurgcvjlTjVCrNqpwlNLZTUtDWXurlTiLQSQVEXcjPTSfbsnyru2UMMhMrShvK5LIfNHrvg1o3hGwmz0qb54CYkjSd1twozxBJzbg2n22rOvgOOqgtaXPJwCXiaZBHnBbPfHxuWDmAhrpI3KWydhPDqHzm5sut6BeGsNlMElOlAZB2rJvppBGNH05p4HGBo/KqacPtlIwDlCJMiiDzqzCzz3p0YZEXxEB1ppw3izQE3I7+UZPkEC+NDT26z3YXqEMUjB1kLiCo8gpleGWOBC3YcDgHa8R3jr6JgLROZ0+ubaMJKQrzjERXDyeVPUxl7o4zOReExglAl0mRCNYNnVWNOji01+rFr30R0GzRJqsdrxzk/W+qJCIVG++PO4pVtFB+Gmd0UVQNCKZedIiDM5zZ/wppbyzpqkNBhxZoo947mWamukRCqIwAl6Uqy53ILDlc2z2bbkwLOTWbQS45DnO0zy5PMeesi/DcjSNuT/ZqYpkPdk0JGt2/KG8yFwGW/bZqUFJfQeaSUKrIuMLSEM3NXvCQ1u4d8KOYnmXWqewPFKy0OjsmFsnn2R0jwPu7tOgYyHxNv+SH6A9yoYnVGliZS5KzliGQTodrldT6pXKu5oi7A76QBZPYC3kA8zJdg+0g0gycWFP5Ib3PlXNBbQVnIAFutUQa76W/AAl09bPVuCdmGiIaGlkejl6NnwPF3Hy38uikZ2+DAMipC4VgLW/qTAO8OecxUtOxNwWDO3sxL5I0Dlb4BYFH+Y+rkTAg6Y9HlvKoG80UuvxW+7eSvq4ND+qvfsLLnp5TT0O7R8egDNBiwAVodnorODayNXQC5IwJtNcz5QKSWFuVbPJ04XLBqK31kvkZ6GY2K/mgXMPEsaLnKg6OSQyy6AuZcJbC+RPcqkY1pCn6114gvU3Q70id6aDUkUNaFMvWeIE4OGnxQHdPzTDHbTTZ74szSA1XzzQCY8P82dAxXrb5Jw8W5u+Ch5dreGsltZrjcsUb5bgRVjYAmsJlAL5vsGwDr3TZ4RuIpTty0QEsQn3o11ywIpcEaRuDwItqI9V4/iDaaZHs7uB09wvlz4Q1GzEgASxtIjRigxQCI239LnadD3o3rjxnMjnVoi91+rSU1hgrDdoyOH2Fowz8t2XRTB57qzCUPevVXjTCzF/pd+On2V6kD5Ks4AXG8+Nsd6PrL3zOUpnedXF7zOqfHvR0IrPXPDklMjFsRG/zjDMcvxrQR8fbdDdvz3x/nWkOoq36JVnFTDkWy9zJ19DGY7pkEPR8Ha+DEJSHCZha3NMepmMAaNHwofdm5APmtvk9javVNOe4oO0jTW7Zo8q5FaPl5hKsZptmHkEJcCIXpuirdabLe9zWtSIVmHg4apn2t2Rpfed4IWCpSwHEwVlCkKEKKsXC9fmyzmotZ/1W0LHIzRz0dbpRDKBJTmmpDcrIXi5AnmDjxxhQ278Pt4eSaRCDmOtHdwOlgI3ML1GbSVy5RReHQlsQWwT8KQ+Vh6ocdPDoIdHuBHV1E3OSLC565IkqLiONFA+AQPvnM2Buq5NOnDTX7BHJrWtEavQXLybESqAwU+ctbAQRO0NenR3+7aKLiWsGgQRXbq3i0yadiIGa5wEl1zuXv2wQxNn3B/1rc8MP16hrnci6Og9Y1xZiaaM+XLQcjNNVM2aERnhhzYpxlZa+jnFVHEl/PsrTN7HVcCRWy5Vu0ptshvxCLWQAdkY7KU+DGbuhU44nBSvmI10og1zAi26uHZwwvrr08CGLwJjuxwd9q9gxaOpk2M4HVQXjXsJEunIfuo4RN+FlO4e/YHtEltSXH2DbKeZ7aqDGOgSl/6WtVwuH7gTs08dm/Vr4nOAM4eve0aTT0bk6BynyD5qgGwfPlyxTs0odsYRtinfmSQbIQPunVwJ78eOe/KAfbl2M9kWYRY4uq+PuXK8tMEAyNYvfCdqAR3aqtLPjW/IO4FG7hlK9bOLY7BVJnvGcNFzb1pPJoxfxwFwlrmqilStAn9A6U2w6U8FWFEXhqfqUkaonhwWZlLbZNOQekw+llMqnVQz5XHSZD9Z4rlx9XJPkUfAJoNeKtiAIGt3N5cwHtL7lOhvi7tPBd/djunpDUYZIl8NtxkdzxvS5HoRnO98cper8iQWkHYXiz6PeNfNiwW879ELhmAQ551LNMabWkCH4bYdWxT07xr8bQGb+vL92kiasqyOCaWIoIf5wxHWCET9+E7jpL+RexgR7g10+qAdKE2Y4MiqiYEaDcgovq8Lv13D4lWsL+ei2a12IqzM4kZ5XwiVk5U23d17eZUTQCMRP6kCYUWK0HBV3LjNJSyHcDurK4zIlWXz6bFZF67dNazOVpb0YDkgPFApOs/cnawN7C7UCYhaAz4Ce2AuZKchwso/ioShX4TaquVyz50Vvz3JpSH0PuyjdvAj5FDSddLeucMPyPrAxN3IdhcPHkKg8+HZu43hv4TWwgnUy1ZJ98MYqGZ2PV43YKzSJFDLNNeoPRZR3N/bw6Soq/rQiYYDNrbKNaDsAR9TueOpmTls2SKNXp2ZRj51o7dlX04Uwf+/jnDx2oxt6cS8wEcN5ukBJqyYH1oCzwe0ECcLUXR4C74cJeYdpCsp0iTDFxxvQ561xBwJor4Q8VV/lpEwGrA9ZWih8xIZecs4lufpRhdG/r2ennYvEh87IU11YxI5y0gqvNXPM9HQN366kjEm3chKuKM4T9rhOqE91H6wZSSccWkZlZZ+CXDgZ1eq9msvLbGCofADo6RyMmMfh8Dq8UzzSYE03TBYeSc2t6ZA23EKjDGKi/Eau/fB4b9B+tkPNocbPRt1+FhzmQLyN7bb6bI3UBAXuyUs11IwQYWu6gHwlJobQsMNdWk75GJ23oiK0FsYywW6CkzRbMtDv9emSboZvvarOmbpELuQeUvDbEDeyu0Ovnd+TYPZ5Jqrs4Ka1duliZDcMQtBu5EBDakN5FoCCdCyr9O2YXNQ8ni4iUY6afP87DCm+zQsSByqY3eZ8talZTBt7YnwgxY3bn96V+Sa7uQLVJKUXtej0S3Jxj3i+AWbDg/k14kUcWPmqRV0LeCbTfdsRFr8rAzdI19nB3usI7cZA9Za8oxxtVQ1Phseln5LgphNXqQP0zR3XMr/s4EW17fLioA+xzekexcVOLmjEVpSxc9aZ7CGo+NhYpd3D0eEzsSsQDlTdpbMXvc6hPuEAQ8y7xaKi9nog43hrD/hcgPkQYfl2jrlfBcpKljPThPexGjI+ahxvkWrQ4YufIUflQg0NOg4HW/MrE2wauD37kgczSUr7yMZmV8xXU4UejdVA3ZeM+qspZGC+3k8sIMTUL4oQI+gVoC1cf95QgdYZDSukmmYxoOjTGTcAkdlsU188AvFhPgBU+0SdKeBR49uoGVfaRHSD95Q+RagftE3OwGL4WKq7iVKPRQEBbhGogPmGPWnJeXIQYWFl7ahE9UfEehh58IZDzzGUDVbsT554v6mrS673CzkXoKENzuS6bfF7HMbyxJQHzAnRd/aRauyoD2T4gBjHCu/weaNC9JVrzrsRAP2CYk2D/AW+3wpfMypQr+USEhqPJ8YwYf74GQ5TWinmi4zQRvFavYCL2S8DhLtEAXAgSvSCFEuy3IgkfQiTV4q+g81dbZBJXJkqFeTOByIAnchgq9VigQGT4SrZEgZUPgCqgf9DI7gii8/IOnkAqrpeGr6W0DKyxaoy6WuadYKm1/pt+5973X3EFOnNoMft0aTKcnBB3eVqkFEXuCLI9LnK/m4iaXv4hkCbrzvwzNLn8aa+0npePMG+jrrkKLPhxkA6qYY4ytnKJ8lCYy5x35gaSZ+scsow3NnCSnNFRHMkLFuaKDJUWSseGsHC6QNDr/JEgEJRLFH3+SKtgQ3MP1DDLWITvTmxLvJ8nqaEUzRcNYE4VDyxat7TPEUfIdeWJPJgKtAOnbhYAQbJV4Lcvfac1m2i01SNo7KkgzMGbUhmqnwSp/P5yOVLpS4sUGkYXldz2aXIvk20XDxul+FDSw3bDKhtZV6PxzMTnXelPfsKgNQYIlzEt/eiZFID8vKpSKQQ2RMw2n3h8YB4U55DBwEzfOgHwVZbHcbsvRAPg4IrogcveXoc0ovsRbdRPJGRYqRkT05aIGto1QyIVjAZ97ywEoEqi6VemeC5kY18Z5Zj2xb5iLfQSSeoKXDOlR+0PpCHYLuo/SJbzL0DHijt5JU4imQEsztDfZnGYSQvMn3eMunN52p0zLcSHd2ESqM03Dj7aVGcLaE5AVEewvfsXgSOqeI7/kglTkf722eASBITZ64+Jz54hxvOzOFaSAAJkcVE9lWZ2CfGfYeURf0nfjeeqY+WPvmRzh+x2mGDFuYYm9/RiQc2fBPn3SeSABPDHqHEaS3KRHBA+Mm6HEr6hgNtlIUeWDq/MM4BbcNjm7bnsFgaYsOAkgiIuaIx+QZRVTSPRDjktm9rCe4E7QK6cZm8CoFmXHpVDigT8tKFtVFqY9p6YU2xU1X5Mmsum8tyx4tm/BMG9QwQdk3I8fB8ss95C7YBXvpIlKGVDtKG6Xrw3dmDA9yQBo4Ny5m9nos8sNo9rTgtmEkc+9sJGTrLlqHjnCN0ZTZ4Dtmr0Sv3iStY4sbHlaBQz4cGODvAHVQ4T+qk5P2M1XTRXiDfHeGTFNhXidA1X9dAO0VADFaqwRcZOaCt5YboN4VYS+4fEm+xcF5l1RBwyQeto80YzJtDgqSCvh2rPHJO/QZXVh1h72jtbH1JXp3KpRL+R7MOY5QE1+NYklqzaJgEfX5+q6qixyFMgvrP6DnFmksya0lldZuuROBVEkyhMvhnFrheFKW9MQ8J8f4pY5u7kIwr3uuFO3GdNLCLJ2PN2AP1iXLmkzYeKfBFiwM4IWgwjF4viB6esZrB02kJlc3IdkYWHygOwCH7nU4L+lRYdr5himoNDB8iBOwGsI+5Rnu47cHIFikWRrg7sspnheXJWmUdEt9blRqdwJ9a6gmRRV6dulYdxctOt4o7jNgu4LIq3MVbYEGP5it5t5o1X/U1V6z78lYx9pkbSoJarTSdlD5XnoU4CVm0qQN1bcmnZ799F4Qy5aYxGh9bIyMR66CTfZuCcWfDZFLylAFs0lELbYVou1Ds5gkJ1zsN/Ax3DibC+dsrVlBknW7YWlwRAa+0GLbabr3UJdBwcmb9rCJMQr5XH9yZK/C2OkwsLbb3IzY8ugN5mdDWZG6YaWb6LYuEiFWgJs+12Yd7Rq72DI9WrNU4TJimBuNPFdpVDw7zIToVrE5pkiy39VmSqUREtOEx6bw4eLTpVjiyY2AhkgCr3LotG+h2qdhI1ne9oLrOya4fustlyC+wSgqD6JxwWBt9JkUjPQVKJ9IP6SK4IYiWdWitctLEecv2nAKmSuoBnRacvr8LOUM9dmuJSpeeNmwhfX/sXUVi44CWfSXcFlCcHcIO5wQ3OHrh8rrmdUsW4LVlXOuGpjyAO222jZQOMfW74K7MCoiYCnBaBoEVQ1NQNu6ZhetJlEp2jXjBYKbUK32CjAiOQ0NRmGwHqnNTQ4iLNubTjjI7WbdGbZv+9loEe+OXVBxZvzoL26X+2mxxy+t1+qg8C7UHZi6oCyL0vQWXdbYZMO5tQqStVIEwcpnFNGjTdiD0d+hentOrZPnd1HMb+KOr0pqsu3MW4lnR1BKoBXrBy3n5TyCxoxyW+sF0h5bpG1dLaBnPNi1AveWPsmNz+wFAKrwd2iBEKX5fjhIFG8I6a1qsnoTpSnnkeV2SGkAIXzUCqjDYym31KJdtBb29IFaq6qF7qYguDVFPnBTsnG+y1v5VisjLQzMklW+VJeWd/XNvdlGh71qSEPaawvTmjVsqNOiLwDCzIcVxTvwKOA+2gT9Ws8K156Ea40Xw381JpGUS8cvvHVgK/zQ40fjNcRxFjGYyoiZ06yiHtQpp+X02NN0RtpxvtXx27Z+1nNkoPlMF2CbmAvObmiFPsDJSM0P76UwtLxP6kgU3nY1EP5xciqm8UBgxhLMGhQErO/t8eGs/SCfHyqBbwZHH/MlVi9I4XFcoqhkstCwoN4wp2VB8nPZta8DzWbsVr/mN7KAIiY3RS71lQOJg32yqTwTyRVCJZbn+4/DmySQKWbpNl4AghEgQhi/q5FKAbPc1bZjYyt/Anzg4tSLJ+EvEfPRlj58oGAFh8KuzUZie9EB4rPNJhZqIHGrllD+83nud/h6wb5B4DaqCSqKHqMMp/ny0BDQbIEXrJ9oLTOIRJEOVL0m4kA3kWcgxzqEzzt+3AcVggvfV81Do26eQHWD9pu3ry8JxMnLE/ysPigU5+3qpxHjfYiD0wuFTaJUhNseoHk05iOA2/X2+1WkNRN9TfNsS7mjxg41g6tea28MY7+k/dlJmvNm3fd5Q4hW63qqIkoNTV/PLL2KNx/XsW0u7ZVDiKi/hPbmL+FexxaxGmFj2fc4bhmmUwTnKdzGb+9bS4ZG3ozoggDnunM3UyRBpOnysXcAdBPH26POco8OrCkGt59Obu5E1AQRnSJWfrVxQHHc100gsfSSAnmEQW1Ku8+jYcdHZsbJR68RKyi4pjo6/oGSXoiLU1dQk7mv0+Gh36rTFBNB3RbWqmEnBudCLlS+TZtVmfYB97Vj3CxoJxO237ikezWOIPwYJ7JHMbNJzhS/eREE2sr827oVqGfMfhmfts73Lxn03fPhLqQrkLhlSTOGcdNabtgqSgLy5HQJMCQSc5VPXzrKfLfC9GLPmcFLnq+qPR4AH2n1d7dabgjJ9V0vZLNQHgpFdFPNOgjEmZGKFHo/aIwrvZr7OTtl2n9lfeCJMRo/HtsyI7tbttU+51V9VjkRoS1Mer9wMnZhW/UPcbFh+7UjQdEVk3QIxwsHYLKhyL+zZl0ekbi7dRadDCfPHGFpkzuJwNCY4ZIU9MzCAAlN4AkssbEJEHuSZ4Lohqkl9JXLLu+1YZFem23LeeTsT770hSTLJTLlkzHj5jgyRUjCoRFqoNU0r1iZmWZudSOYaBD1VBAQZ828d7rtSqQaouQML0mdh7h7gT4SFwAnE367CKEKsdDIyMPsguMcP7PcyuUkXN4t/63db0vhuKvaXu77CthW72FkPZzrre+CHgVybpilZoDK7AcbbfVN9o4CmVpeMhs3Id/AaDD55RJVeqqNe9HXJCUkTdS3g6NRZ7ZlNHEEOjnsbWDW+6FfU1alKbhc+bhrLHsPazje5sIAqdpH9BUXj360xD1RK3mFuTHBYsq/npM8WOHsehjYn+Rsj4RCGwVI8K9zJYXfmWj3dNqGRbjBvkuU0F3GJJ65paewVZBwIoM3Oq5jigAmSZnCgLUnsoH4DpZzoippGlKyuEmgC9vxW3fKhxRs+/kJihpxDzxO6yi8v3MzofW0p5nsM2I6QW9NIZZE4Hus/ZgksDzsxz0y8QBDYdbhpSCTZeppld1zpV4LA+yUjGypIL5RQfx5ryEMEbxikJENQjg35Yfxh+DsoLV8TJ87oDfAbTNxIzsK2eiuOSCb55nepO0pDlXOvoFwUK7od2KKPX0c6xpsDzI6OBr3enzZnHX+wBpscDhVz4FG4tSKgHIHjdUbMgLRFwz+wBBz8u3e/gbdNELsBn6MvyBQO05SohoNmPJaxF+hNlPpVi/8AG4t4DSRcabYUFcXcSdTvehChbzS00Gf2y640n6T+PJ9eD7/V10xenmXOvxzZ6S3JHqAIm0HKs5GEPIwAuzr/Ii29guB0beEfwS2MsPhxEk2O7+bdXdUMoY2HdIUGwFk3NCPNKGbV6gMJEpXicJkqRyGAaa4zfCh5W5yg2FKafQWCcsyJWUK1e8erRYBPbQJPNTkbG/H1xOONKLT1t5+Ju7G+v2e78nOm7a+9fjNtRs0Qfc9HDpvlJhuemYcvjCMvADKYpmL4z7m8c7Z/KGDWmo7z4NotLWx0uFOVeF20yHWmhKzMRm35xkdC+kiOw9KNthI1f12PUsNdXbTsE1seGXvLy2+uOFCOZcbeK7ZvMq6WDbGvlzNNpFMMOvUXMCnXxfiwvAIgHxI73qOhBDEXIfzDV+48IZ1IJYbw2lHnayoSH7HG4l9MkSAjwZJ3Dccc3kmY0N+I2UifmXMwOusVLzyGITohPZDLd5+O7uTj20tLueJlPBoT0tNTqw185aftxZfGWt/2AON1c0+vEJu0IXkvoeLFXcL1atIzbRhJmXgMUIizmCQ0keCo13wQ+h7pvN4yyIbxfdnGE83RD/16CZvKHtMvGcW9wt3qwMDIRhtDbTTzgrjNVHH6+1hZ5p+Hl4/QFN3Bh+oFg/fHOdLIb+5D1u/z8skPLm0YYgBPl/YB+9NC2HSrwuM0qI+yUD54kJM2W+qXaMw5G9clrUkoAUFFO6+0NKuqSkKus5i4QObGUNg6Q/XJeZYlURsi6+ZjWunmG5vRhPo69q0MmvkRi2TKfkHOpJicWe1vnRTTdgtOd8uxlto63NBzXE1pA7plUuy1g3qUa4t6T2Q6y7m51QMObb1/luHGCXC/pVyMXfwIVVWV/tg0HY9IFnkOHi61IP6QYU9tnfj0Oc8fauL0AlCsspG9uGWTP7kbFpPGBT6HTHqwgIF13kwhzaUwIKwRT71cwZhVk3bgJVnuEtXiumooP97eC1Sk+btX0S0c+MhHCaMG2zpRV3i5KsEgydqyX/xDeKPgn1Uwbh/g6Kv6wcrVx4PLE3ohS38slZeSeZNYgfFS8tLL8zI8dbmeePX+6VTzGWj4Ef1NLSxRtJv4+xcrq6SIf2+howTaI9idRMBYBwnjeHDa9PeAlvemTceOw8ptwIvwO7B5lh7a6/taJhCiVz41QX1/brs+/XySRtJq8GR4ecJ+E3HwhQEHaTsm2NUKmat0JzuRLYbSs4l41SGKi4oX1xjzV54GFjBpNpol6kRQHhvG/3LdhTqt67AdajUOV4nvYx2TDJMRYkgYi4cx2Ohl6s1ahKhh+KTfB8KsR1pGQLb6D+2FAa9kQsEMjUHIekLO8w+lj1OXA0i4/yIyfJ5C249G6ek94vspGcLpFXLzN5bmBA+KFPabjzR/LJ+/CulvoAQayz6fW2jHGfJImDHfp2ZTn5G/owc4QMG+7K4aFOyJT8+goOClQ64whmAaTzRsZ0ECy9zIr30FYX9zqk4sapCEuNdwdfd9INxGi6jR0LucanhEBAQdN7u4CO8EoxnbMH2CJypFspVrqqepw/oKGpPKQnKXzJJAOnPxy7H8uwsAntWFjM48yE/Vibp6O+MOogrbx7Uqbji6Y/LrCNGjz+wmNUDthhyyVUtd8ZUwmqfoqdyCg+LTQIjK77WrftHE35C/bt+LyO3xaK9mXby5GaVZ90dkNbtkePUx/SuN8ftuQ06eH7Ra4zhFnEkq4qWfiMuj2k5KfSaCxAtY0MbMLA5/85Rxb8/57rWg+5KiIMabvwYG/WRdCCPtHn53INBsBb+jC86YzHZS5VcoHiaiYlIfyWP/z/a75q/9TqDu4FaOVRMWXvo+RVKO6EemDCqYRT6rBN2VfMj8NtRfhMQqGunF+iQGMfcIYgQ485Lv+xVU3nC5EoD79N30Dr528DYML5RPJsTM+nsRnI5AuMOu2/Nh9W8rvv50uJLrI2PUzEpqHv+q2gxGtHQ4T6dvjVMHp6YigwZJxXbB8W4yxXFcbI7neo3pTPcEoChBW6cWw1VWF7nW/c3U/7Wu0grekie7kB+1A5NN6i4YK6mP7R0M0tr/HoOuzVa5pMV6J9F0c492795YT7mejn9+f4Y0Orq7R4ctQ8i9RLejuXviMzJzlKOLyv6E3nPnxIFYCqFXsIDZR58b1oXxb+g06XV6dup3IPRoU+sp4VRx+FnLewEdIhvYrcgXWt6pF9IMIreX3maVnnPLn+NNgxPgkiRg0MaJFDAxp7ERPSwW+C7mIzceehY0rGWe/uGBtKuOa9ZGserXlLB8kC0r4GXdFWgDY98aAw2TFD5KFsBmfqofmq+2YrJr6kvGiNUIB9c4wIGJf9SbEx0deeR0FD88dQSVF+hQvu84/maUMMGOHq/usxzly+Ks8kYl75fRju52WL/HM3bmW2JdXnpY19rusS38+uPaqL18wF5GGCXZ2fVMLJuP4IySiV8SjMQ7EzjU5A1YumxLBoiC1BB5dxSW2EhR6kJDPMWDgJIeKZ1wFcgD03IVzPvr+M3ohWmy814i+tRB++j+jYXPcpeR6pfByBI9uGA8LRk65FdwKP8huKYnIpwTR1lQ3DeJruyHHmhBRXnysMBCgn1cgqdzrBLCaUt4WFz6C0tDei7Gh9UW6/9JgzQt/Zx6IbAysY1SOMUG1lBL2e9k5IM34/ko1OGNoLfGeubwO+vYNENjfqgvUsWZgoL5bv99vSDVrL/YcscoNg1lW5Y03xPT2NhLwESOckaUe/6Y9sXnVm8q+NG6wXfvAGtUrTQaDa6inpiRRErc+LFbYQ5vsNc8XwIy7oRzHHRiuJd7DKJQc8H7IkhciEz39uzz6oC72O3N2MMSxnzXduEBnx0+aj8/SANw3WgSwJ/cJJvmCSxhQxJaXlf4AvkDqo49jdSa7oz64aiT9qttpS/VroOFH/u4WSWqvZ+kJxSyEEiiyUE/yX935WbI6ADETizIyLzzS+KLgTtEXrkkKb9nkK4MN9mMwLxcJCUZrRN56XGXd7xy+O/ek9ER23mZ1yh8qD6Ccl8giT1fVxccZVBmK51cH0Bocw+bjFo2EhwLxaLcUXLgPbSVa/wSIlvFByGjC22DmyN72A7Jv5XPRG7Dv9wTXVFmJSp6CS724W2lPZA6lcDnafD0r96p7Moprcwg6TLBX+NmFrmGRIhgalju7GLeFXwd3SH04JWgdQWJmz9qvpvtGEivzhe9iSO4+BFPEcmq7iY55Fufk3MXasEzhs5G6ezf54+PH0Ytc/vQKOaQeJXJPtd5XGEQgSqQD7WL7+/USWwVLWFXA3QXRt+dHFgO/SAiD16FMzp30r53jwQJ/gU2eO+XKRNs9TH5G9mffIMHE5T6mfkM8wGkufnYH3Za1rVm7tZ30OaMh/ouYT/3uL5+TkdERdUyyGOGIbTJZYI4ENLKYJvw7n1eGzh6AV/EJLOHYre5pfc7QyfD65af+MkbFnp0Th5/NUgIT7yh/wQf7ceG16L6TJPYd4G5OUIzHN+WQKvVkG1E0EmZnv135GrXrwf08uWoIKAHPvYQ0EeyvwsUuRW0Pg78GktU5DX1f1KJMlSdmBhv0DYKK4io6HrPQtB9g96q9ZCUGZwleEwVpHPfy2+KpJNqNmInijTpyPur+Inir/xfT1v7b9PoAy9QHw9fADvgPog59AFAXNfxzYA9IgGn68Xg1w7QeCR6qVDmLqm9RkAfRVYHG+d9sgSxIo3ZBviImzzF5cd8zj9iuUITdtpGvzNLpBksrkRmvjD/dXCSerI7qxYEksm/828bUu495bYkct49RGweWVHnkzztuDru/hzkRO5ApDk8J68RqzpjOCw3okIuqkexAN7Lgocn6L+WgAnDc2wbb3X2nPgj4qHzlpp2Vux+PoNxlKwKXHdA1hB0Uca200VRYmjkQ+QlGqhXV9Ln0GJIqD7HqLRBpPrGEUiZ3ZSR2NrRLFLwoe2HIQWoJQdy/dpoSSju8N+BOeW5T+nENZCJKh74YOClwnlL4suNfYAYoreyIP9WJD3Zo97VvvIpXSESum+KmOxArazgOISJkjgTaGLRdXvCZVOSVCE1090Mxj0fo1W2O5dlfaKvy15ZnNXF1DIhxUY4jgdjBK6BRYh4Ht2hgHCWW7SB8tH0G0hqAICK5zt8yAm+OBJ5EJAyJK9gWHYpTeILRShJkSemtHmCB9WOQUUyCnsPJeDf52N28Mi8P6uAxTKGhMQCwn9h8firmYd0UOu/M8tLSIfNvZJ45MGtsIII85N5QO6PgJ0TqOBjaAGGsU7orVAL+LVcAfWXqIeGxk+lRFG6OIeDF8nFwdvPJO4VVunKyqaKnVCt+V46/wOem0R55pSs6KntNPj48fy0B1FZBPwr3mF1C+yck/WCAy1s7r23MXvj87SeBrHFsD3Ka6fdALehAH/xxXUChivJREf+BwMu53xb449TdAIwjr9tom0U3rnMCKQntLuHb2xUOiM+ow3lUiisUM+C/AZiZtktAVZ6+v8vbw9diTRtWp3lRAL99Z9nYg256IEeDRxcgoHwxLIC4UXGnlnglrBtdKdT+TZShPxioM0b0QGkcTwt7hmqw2j72/R9UYEehBA7ky46SrnStwJH0x7t7tKzsPFuURyD3TBg1y2HmsWTNf8il5HzU9BDLlrU2yPryOVjH5ACtJaYgj8u23uFNfcsqeHkZm0W3s+jwhglR/u1mKnXbKuDNXJ16oyOYe9BCy6yNI+zaKbBpGnxpl5tGc26MhOgYPeZM8sH2OZusTAf7PmJi96DbxY/sUh+NnVdegMBSPRvl6Um6pIFipIO8TpqhKp14+03rtooshUHPpCa3l/nviq/PfPM+6/fv41oHWSepQyOuTb5QlleIuEKur9fSN0NOEgIWhrExQX3kzXSKnxeiRl3SYanaLVc+G/7+8FfOkw0KCGLZxm3F49Ht213n5FAffLr2yDw2sVq94CLyxrLFLNJqmQ9fW7OE0P3CIpfQWVV1O9HbCr2LuKfryT2ocbPOQueEbMVIIHc2YpfrsyDajmrtzQn2c+smUBzy+BZC8Fe3sFErgehpAcE74WQgLGOjx+DQMEbKaeOhjbLHTH7AdlejYg2lYHRLLuAGnNIGo4Om75ABMGNsIcGR+st5rSm/z7eMKvozkahdgABpbr49cNXep3ZoH8SgzlpqC+SvCjKTsamQL/G+jTd9kh6XMg8aIRcRh3DzgX6bsLomQOqETQP7RL9gbpxU0LrN5wnsJbQ5Idl+8N/eaz1vYSedd4nIHUzFt3cZx1E4dymPe+tr/0YvybkD9zwVeltLeF1ikFA2KnhGICUZTnE94S/ga1W3PvGOQHLusFrclXjNIk3P/6m7Noqi0iMzULIUEFIZBqrBcsTw21CP8Fc5umyYEh5KhsnvIHKT+ORyfkizVzFQ1vYKdz0E31c0uzJyypo9PK1p++jyUoBH6aNFu6k8E7CgOxed4kYrB0j+5Y5yetORUJl/burzqMDlotEs+Sg5ffCoKPLiClO/OoSMaX8BZgTHStQ+b6pgqgcUvDjrSAvCiNwVTnGr1NksqVloDoDNTfsY/MwNt3TNAp1HWFW6y+NyiUGa3PLD+wKd9kv2yO4obhyMNzyhwXc49wovTE+YVAIwRK5+poLP3bXO/PGhF3CpCrhX/WrFnB/HTBGtq8T9U0H73ok5PRssG+bCl1HHQqpPXbzzE0e6/Cv3Ip/R6No6wogV33Pv2s4fpyM4pOFkxdy73exgiW7qL9yHB4q9mZTLg2iMGG7wdoAfoIH+xxNoB2mjpWYy+EaOvfVlD8IE04gd9x1xoMjMJwpcE6UvgUoVLwd3kI1jT+5pOWHTZskil8PyoULtXSL73+Xdf0289UT+NJeX+au/Q1sfqzHj/x5/vcCW3lyv/+LjoFx0R+j2FQ5VFpHmLcDgOC79LYZAGdTXiCxgsIRLnS3tba++F0D52VC8U0FTBBxRdf2gd73yw2Ri2Z2ajxYNeKbaKf/l6C+enHv7tzaG4mmk6HBBcc+vJSmbBxKcv2fgVxl7Eh6PgJMlpD20ZrUeJsThgPTHwsOlW5Hydg9duNpx4kAxs78mvRQ2upGasBJpeMjreb9MVLdee3jt04u7aqKtFlQL+LscRJupwWao8zMGWBxTsrkR0UD+fPHkafI4YTHyhdKfJSJ38jGI81Dz1gfzhfLZzi0UG5yQPPsKsENvh7GxCB73+9r8Gck5dA3UHXoyLAFjqSzVGZdhY7IxRPQzOqWlHakZqqzZPTsZ73BXZNG9qM35nj2qEEC+fz16+ButX53UpHokuHOOe9DzW0XxNpBq4zCh6b3ZXNkku+1dr36nfKeDdQ/8EFxGqzzZLAt1m+l75aJd6EZrseA9EJRTzK8I/ZzeuEgw4OsHsDeZwDOqb3GAY20YNdGJBy0M9Xxw1P3Y5Mna8u5X9GyrqpI5CRlSS0BSYGU4YiYSoldXER1GhPbR4X+93Sk9ZHdNjoFOx1yE577qTZCH4Jp+K9TWLarF7zoCrHUbdDiATiEd3erc/Kw5M682CMARCUdFRx+1pUTwsIozVDjoCKGYFM+FRL/hSLfrJFNtNro0ki0EMdWQPO4bHxYnonN5gY+yW9d4N/90HBJo9XqiMtxyPHeLDaY9qswwfyL/J6Y06h8vUOYAtMixlWDNv0QdBACmBuIVSiqdOfH8iECQMMYFMeQrq2KuHYax1JExw8uAeMfq09QkoQHbGtUgtVtKWqGbZU+Ftar23IcGF/PDmIbsBemOIkqPpXYobeFD/8hCrA6jcFE7HmpwTWLwmL0GaQkRqPrm0SEQbejrykn+UQHFywoXF5Qo+8SCiI/nR9hLqh/L0pwhXH+cAF+4Seo8yAlQKa5QQYJhdZuue0DKUJhlvH7a6g5oxCzRGYa8XwJr7nXrbSy+5pS73zzVMde6R8v0EGWfA5DVRhPhB9x6HOWacz3F7scsIgRAA1BcCQJTg54wQzr0ZixJ0JBVnayA7ofWpfjX5LtyxKMFuBwowwd9e6jkUAlX0PbTexTQ+G8oRfITFFRtX5qqDX46o6cH+O2STu7G+8UL9pzRNldpONBwYTsYibu6fn3O24uNOf+OR4tdypa1N+vDmKFpYH/OpKxFNaZhG9l46HmBJVuxPAxJ6tdR5N8pepjhRQq96Dmc71NBL2GNEWspkLbfKlrlSeMNLYKbBK5/oemd6kIpcn+lN0INCmZYLqU79WLSQml0sO78NIBu2D3/FtUndOlvUMI62Rus3Gvw1c4thDNOmb0skxl6TyrBpn4s/k+apCkAa/Bm6bcPlIsLLIwvefBu5nsiQvoam8y27Sgau9UgQT4ia3/75yZHFn/ApQrPhNwocpDpVJ+oa9Kc+4aBTjFmgXiLHvOq452WzJIMIY3ygvQhfZRgvGwv7rWpuvvGxEOUK7tX7Lagh/0U4Hx4Qwsi2UskFkD8nRmcr3yFN8TnQBohpRgRb9PJ/aWhLn3i8tyWXFrAWLFtn2gKHPwaHEC7F6tRPFXkoGUSJoPHZBini7T4CP2JjAM/lDeFkzxdu1jYBrB2zzW1xkO53WHbz1kIyayNWbEnpWPNwVRkXhdR0spCg1cWK9aiMqJTK+Vb3z5ns+0m9YNM0+UJ3YXt/iUv3H5klIOX2hHruG26B1xyIoT0b6aqawQfLOsIFYMP311aVubfeHz72ij2Av25LpmGxaZYukM42+0WmDl94HNmhRnu8Aupcfugov6UTYBYisljeiuifJEMWafNcW1JZeA2pRvE7MVfSbQW6+kEzYFNHsuBeUC9VwvlnIJ43fFhtQaMPCHv4q59D/2vfz0B9Uyp+vRRnZwxCCIWlu51fBAO2d7G9pOmwgakEmW4Dr94IHw7U0nk6a7FI9Sv6AiRW+lAF/q+QlWeCjVzX8EdjZNMY5i/BtFZFENAFoqDG4rOwolRi6H7WqbCw/TuhUp70O9dSUQslKcrz5tyNPtKVOv8wBz4kdyh++Md2/QucQWWyMCyJMg5Xj6sQo5lohx9CFHg1i2rfqAGozpB24yJvcWuj8crNCs/l1pYoRKUkFjDFCg/DM9qbeIXBw3UuZdSoc7vS07Iv4TS02woU55Sj8EnIzHUJNGSvXmjTguQyJUTxR2aU4RXECcSM5NA/zvAoI5i4EXmfzcpixoKUGjRKV0sGXX+T6RnK38gj3RN6fYkcqOc/jioBe34d8+bOTRAxqkJQMUhfJF7DpINtTSp5g3CTj1fg5EZlEx45l91tGspQDjMKy0N15SzJBm718+iG/+ZsjzJ/w2BFPvnfy5XwtrDBKjBeuiQqGPtjHyFMp5oJJynDt/UoniNfutyFs25uXEJvvQO27pVLRNPsR1sUTlPNy/R4xKOGmyebKuTo6GefTNMAqZns/7gp2zdGzHeuoVGFmyht8X4v+lLo05a359ecdzgjprr82nxexV3aOEYM+TgglyHpNaTKU9xEiQPvLgu71nPS/bi7RaaWx3T4A5NquRHW7GCHh2TC5SRRv9fpVBhIRWjZ1GvRd6Eghc21HXBavVIwknuA3RyQYG+vNKcjuYXWAG5lnXDg6oFDoXBMj+xGr/G8ej8mxr/4DKMIAJaufwTLcrtdv/Ex1YHvEnb5WKGsc81iXPgDWxKfzfNGWdT8wBUyHXLJKvv1fj2wep0FdEImVy6lccI1tVZ3pRQnD1l/rPN/tlqUi5HsqThiNv3mBWFUouaT1x+JcsUdBSo+EMfxF6by166DVmg0xXIthNnqwL8n7PZ1h7kBVbrOjuUxu0FCu5oImdA19JipiGBQyf0Mk4CrbOuhRUq2wyL8sqjLxyaugs8KXV9dQRlMEI73bTZsWX6af51SwyANYtlw026zs4vgsI8eaXgj6a1JBYDqbIl4ULK55ATdU33kkOPSwVj2ci+qVn/YWYWcmmzH5NoSLKtzBSgi6MJFP81pBcRaJNEJuqMsZf52shl4zM9Qd1OjzNyTdLFuE/PwkAyxJC4Wv2NYZKpQOfd9urNrcPIuJSDg4/Rh8F1yksQ9GHqKDbEl7YmPww6O54HyWra1dwU25AuulH4gAuShgKjeto+IR3iN5mvGyf+evhU3ZL4ag89n+kKDhPjRUfqyrSv+G7QyA+OXmXUeNevT8rBeikROEIgYqn15WyQXqgiCv6p47deysr5NQoqDJxUDVsn7qNXHAmbwvH0olxPFMWdTFPgAJuSsNHjh92CyYfB0JH+eyI4tZtN1ApIvvlMGld+mb9QR8YAh/qIBCrSNWvl3dVu9HbRBo36Mm0NIKylRryOYDZpZu51dy+WrsagnyPaky/XbdGFq5Lb4QszmpoxyDJW8clId3b7yX8yOk3zvKtajwXRdGfE+00QYTos/QNkfN5WVRpKYjRTVzLuZJlfDXMTNIzzVJNYOOZ7pp51EImmU3WWJ3mjCk5QB9KgHHzKTPgN/QjzxzRIF7xbYhKZ0vkIG+wv6z0/fzDpN7/ybbyM/TRvd0SOUYY7hysLmxKpYLRekPRDZMhsivIp7XGOycmfU79fpZJnR7zrNsvskkQxs1QNRYdBJnNxOI+VK/HGMlH5L9Yy2wreRaMN5nxdHScmCQ5F7nGwiJkGcwVG/ztT2IulxkMO1fODYr7S1tdEAhN4uCI+Pu8n79hmuCy2mFOcbwTkNdaWBLGRTE92TWmXJqFT0g0oqaDy81n0a6v41ssl1tRCZYdyxMtkuX5vdGPX6K5Rk2MAfxilY0vdpCi45qrPKijcHvQNnMx2l9Duh1xL/ArgL5jb9hgNd3tWrSHaBobSJhqfqyJ3HcdLUvjV74wnXlSQlfRIK4nvBRjEI47mmYCDqkzW0T57nq6F80gnRoWjcRdZohQ5EtHu8OTEIArfeCYutzhAnQC3Vw4h18xzYPQK9YBO/O9ChvqVZ0Xd4dzFEhNpdtzLHuXjfI4PEErnkP47zDxqTNyMCQRtv6cO6DT/t8B2DvJ2R06K1DXrE80umLWBrzU8XDffUW6pnMg4lIIoMLpC/gTj6hb3zN8l5s4Rs4xzKC9gfiiWZ65+xCgC2SlSnaVCfvK320iQbvAKJaX5JtPxAOfwGDtTRREoWe6zZZ3Lo8ZGL0hl2cKMNwFLvPQLlakrJ2Vqm1t3fpNk9SCgIQpVehukbEWQ4XrkLJjP0wQhzkUT4bbS65QnO4TY/pXoODgqKX0WFFAOR6HnMc5+MDXhSMeAmRqP1qjoKrfK1Xg8ppLq4LnyYojqTiQ91Rrw774hNbGRdj4vbtEx9+12589qUFSVEKfa1ydsXHh5oUALYfYgnQoaaXflLharZHWNq9PqISDmcdCpaaA6C0W+dFUG7CyvmuKrp0SfuBN0jZ4BD6NbTPg2PTzta1HiGW0aDpBd+KSDj1DhpS2DQLp7MfoBx42W0ERuO5xKQQRb+uZoDSb5wkFnrNhbxNda/xiAxYIMlagfXcPe285U4TeoIWBvJxS39Tm39fpcXuYWLi8ihnbCtOCxjh4gsMNg4xI/oc4UILxM0n1ga8d6S9+BsyMwWjVaP0AUAsupK/OxJk0jIPhhlosYjSiSAiVvP3ZK/pXCpzApxPWXwPo6wX44G8AwesO3PQl0K4DrzIbajJt9QNtAvIEwLDesQpb5ctEqmxs44mAqXMzjwAuOQGdYdEHvgceFBSjZ4Xte2cAC0esRHrSkY8uDJQATe+qW4e9/X78K/AKkrvbrq5xa9fbP3wPvm4W5Hn8vEK0/r+G+Ao5CoYj05Jr+cMLP9+gRc3UE16UGjxwFchNjWFXTMQgDHnLvMcxwR9rbo0cBV+1YVlslVXC2dlWfHrKGbkok0i3M2KobqMsDdJ4sUPTAAZDEMOjL1mQxq7BQS7erQXP8jcNe/H/Wa5SxX6/mB0g4TysKHdRXztO/74Bnt3NNN5Iw+KUjUXmjY85MHbalYFf01Kn/iAWxiaInd2nCE7/aAoJKzztUYJsGNsFkHVQ432x+UsRBDU3JoUyrUt6aOGQRx5OI3uAwyuiEJO2sYnMT5C2RDar8B+fQHkcqXOB8/00TGIOH4syBGDSD7L03hPh9LigtOwvsAA7GT4CsfTC7H9tXAEfnleQo3f/hfBvaAcGdrKS+2+ejMghGfVQDbSx45vO+Wv76rLL+ZYRjz+/E1n/4UJfmlsU0c/tEJhDIBq14BfWOKuhUVck+nM6Q2OyJvWL1Iv6gSoc7P23dkoR76Y5V1AVgk/jLOdMBlPKWUYdSWJRd6MpLVQLBCwYw39bxgGi7gE/va2VlW84KGZ5xTrr/aBO6ULMBi8AxJh2mvmohQBdmDJVSjfn6Qp2yDXxxuiHLk4D7aXLOsK1W8D3j0ikzdpwd/OWX/1DXpyGlrNFfJylWn7i1ej80YHYAgnpYjXQa2chVzb872nfTDKTgdQhl7Uk+c81Xe62rFnrHYQ5bzZT98XDy17/kPve7sE/X00bMS65jtNNwrlDhB3pBHPr5rsj+25us0Q8ebXrUkMcwcPpZXr4ko1NplJC/t/J8/79H2fZDZQYGqfAmr1ooeCT18KYpc8hurrna57a+Uge7K+PxN4b7kqFCVj6BZbME/GMR2YRdcCI1CIWEzoriTpaUZBYIEMpIJEtEucmMeTqrqLIpaZNJyUfqBteFOU4U8FiEixOagLWmPppuW1QtY0835Mcz2hyMPKTzZbFWmBjlqz+LQn1BqlK1DjbfVvAhFft6e4UuK8rWSkX4O2faG1X0Q6T3D7DNA7bv7icitBA0Pnn3HMWfNvL++ZxjESoePnccIEmvE5w9i1maR4XXJFgmpU3rlIk61kEfbjlNmtYCLw5PbY90N7zHEIDtcssAp6VV5Aph152A1SUZR0JjLit/HZq8qy0Fbcsb8r2q7aZBUhNJE2orEts3OzYftwsRZEX0pitUC/qc+50R5OhYWoZuIfFd4a3Pr039ew5vg9dq/dQhTmgCmgS8hcPogj3WPmIAUfLnf/AOumWJrc5LUuIwJ4BXa8ch7CGXPKZljGQjFHcCCg+dC+jwXERjnKtJc7CdS6YM540w581F3cTgu1maS1G1Zg6LaBvriCJaXokd6yN3V+fWGMwNj04XXAqlC7qeLvthiLDzAd/ssUfFrDsKXyKpHPIdoOmBlnugV5zvXxMsPo5qlmRA7HdDIvcZVC7VPrak4yCjA+4irHfRz0QyhKcS2gxZVHSxoER3g+zcwfExiTgEdIF1nHAPkaoJx/wLWJ/YzhPs0TAxBsdRUUPqPemXyMXi+5r/xaxq5XPIyo6WQ6E0m+yWvTWc7yXiAL0Cc7MyYzfs8mNE3JO7v0HE99PxPaMvB5nwl5g7qL0kfkW0j2KL+Uat2zQru3FL7K3qJfCaHwZt8bXZ7M+Zra8aLSLZ3mtRP5JHlFVUmUuwnXzMNu+l0lb2A7qWjPbLy3wQ6wlffYhOYsgNHDeUMjE8qWAMIVtIjgAJyp40GBumVtDiokTgEtQ/eNaP6Gr34x0M1Ca9Y3syJGJ9ahu1NzxVWwZ9RnrhTR78IZUjVLW8NEJ0agvuVah14JtgjFWw3B624DjdaDucIS26QizztYhygfT1Sm4ujXd8BB1Ps9Qoj5MMl2IocvdCnp6p3Uu0zec8qOKH4mTdj7qLlnHmniJ7gi6KR5gDV18xuLTsbbLzeW9ZnH92HctQNYWtABeCOdw2+byJDHKYNscwaP0NygDGbsZJZB6jVQsEbbBYVa2Hp4DpBPrB7eV8pvnoEzBnkGEyzjILTQFicwUl4J0ElDuJW0nyeCl2SXfZqIeecEV3D7m4Vk6zEKfiRF10WhAa1I4jX77Ombw/JyGtbBLrmGod6SYqJN39KtgoB5v3V2CA2XTK12jA31vS8tkFX69SHWllJHP20Y+7evqOL/dqZKFGcjEJWlgJc25JxH/iQcn4ZTR+MzNSyLYwNgSBZU9L5ywar0khK/aIPq+EseCc89thr1lSww/kYlDMmBzerx257lsI8yVRxQFDrAgBf5NTBS63huWELtnwROGG7VsaF6Z/f28Ww4QqyUtdCzeD8Wmc435yM8XNCvSK3pI+8+Hhflgo0YYHvRi3F/+4cFz4F4Z5+vrlHsAH6Uhm2xiBFf0vFVrGqYsi7W0rf/ludsP+H8I6yaiL5LCqyWfTnChlTdUbT+mH+FF/TbGruARbkV8wklIZdJBJ+GO2xvx24jkDPKHEqOtIrjDiJtHCxvISFvbpFF4b0Z7q0TiHrVMOh7FEhVaAX4SVNhbTSB159j4W8F4mthsN9NIA/v+Oy76uhOl0jx2053GUOYPnc4S8RTZa24XBXKJXrTkARt60D5piK/0cjEwt7v6kNrx50jb3+La0RGAMs/HeWVtwFOFL4/4nn53I2zfe65KnkaWCqcXeLpSIleNNoKqzoN0XDpCoW3AbZ+0cY+Wj/uTuzz75ps97umzBnxwyGT6OGQXgbJG1PZafXiLkJqYizpoRBnHGfU1HYeja3HYdOA4TrVKizCSMUDlE+JdlGoXl9UbhxD+u0X1l86+BaLxsCnjULQLrvT7iM+7j//nsdHHWCbOu5gp6viiyDUy+VNfiE/3NLIx3p1bYSQawGyfqzgbUv22PvfprFK+VYUw1Z2tqjE1gu4G02x1iQgPHgFqbvIINP9qnN4AGQ2XHV27wAnZGU5p6BlUNFWavjLmgC8wgTDxLOVUGghjhxDKbLld5zHywbLf4+v1uJrOBgsHM+U4BAqzZMUyzWrod8nkjvxdHIS0+vIKItFz25ftyM77ePT9NjJ93Qnl5JDL/M8y6T+d5ZgzSIT6q99Q+ZhdcIWF5juXYaUxlwE1OxHZlpMwE24jbIwiDSM9B1ORk0pSX/XH289X62mV17LFfUDPpTAXPiD/1uoqRqP55RACQX4IY2G6j7vRKOq2foZB4dna5T7DbJiDoUzQRi9HzPx5lVUX4DCTXc3nDrAZVvFSv2ySscsPFy2ox4jPK9Y+7d7+/itRmYOEaTXrk8Jf/wroLdZeHlejxrfNHTI2pgVS8Qpx/UGTSPPxF3WfkVlgd0ozmp72SpTs3V/wTyeoPUJzBQQ9g/SOkyGbgHu+r9AgfaqXBnZa4Z3qQvCaQTynLWEPfX1cFYRGv7em6HB9lRPVeslBr0KaMuyYIv6CDYGECuq+3pjqgItWXBtv4aRgnYNqzAzp45h0M5pvpuruzdxnSSHDDVe5Viw7ZQof3OK2zxId3JVhnVlo0DLcrDhWBD0SaWAEAXq375TucoYrtKlQGUtkabjADIP2ijmP5itvkHELGuQ1DGM8hfD9NrYGiX6zr2eXkxUTuUS3H1sJmF81y424okPaRcVNqLKya3DLZOZ0YpRwfJoUHzbVH87s9H6eXnuNXyGd1d0qyG60DoJBcfhgH35ETx4cpqABpKgzn5lP0InDLOTwupHKr3sFXQ+L7rTiT/36sfOfX/fjkOaj2Mxspx77qeOCamkEvNbX8Ts7iPCInM4UWjQZjLp62hD31cgbVt/lhtnnjVViok5pvoDVPr01BxKnTByWUHwR8iK9regRE9AL4XQ0jwNIaVDBwXMUoYurb75WCxOHuBSYf/WZTPNI3Ds8eIQYJThR2tWzXJd3DdeXJU1r0IjcTLxhmshsqgdvediGASKOor02+wChGGMr9vAYbx9EOpTX2bDv86t8BCEaMLjMXTJS8cY+W+juwmgN5Co4wOg3vktUC9fJcjHSKbCpXcZMQehUWY/Lu/0V5jovhv4C1w4EJee3dkVcEss9jNjT6crkO8q0/hto4cAHxJ2Cufa/+3se9nANo0wUjH6h1x9S7uqj0kkYcIi+pezZU8JCmUeWido37udphWNAzDP+YOsm2z6nICVJlkjvtrVITqqeQ71Dv25/2438uBMQN0E5k9NEuYbcwdbU4Yogik/2enDhqiVO/A61BU1WOEXRjjYt2SeiHj7ApLjkBPy9OwCrcoWhYzXjN+m0VrqSqh3qqMpahwtv+XKxap+Y1EjKVUbyXKrS7/V11fN/u1Mn4HUqK9mLkDnwQP78yHkT4r+4+1fr9Dqk2iwaeK6S1f1opfA3nn2LeuW5vLhDcCW4Ypa1Z2fXwlCoqBfE7INBmd6Y8CrjBzb7+e0wyT7t7d7X1SbGxSrb8dkmGgWTmToN0rydQAO/0Hk8DEUJ4yA7oR+qA4oeFcei+lXPN9xBAG635NTSBE7w//tsnstAAYxeosEUT9W3WOQiANUgu4H/VZNLb23K3fxy41EJuyWuAcGJPTbZBH4FwP3i+I9R6Zy2TEOoS1M1t9VeWZ4riqDr4wENQi54XU68fjna/rMXHQuKbw5jpI+FAKnm1/oNrNkpVRPrgdwBJxIDP+zBfaDk5jXA34DqCET9IQSipzoUHlB7AyqB8Lr8lNIiv2NqLU7Dj61MHzSXNrTkGhMIg4euRuOv2sdwFW9OBbEFbe+V6PPtJwlflBgUna7x/Ko0NjoDNJpcJFHUQ/qSEVhVqZNDcnz9dXY1CvQtu+H+vkCWPW3lv35LfOg8+NlWOl0dBz+KxMA3APTwBmniTJmY44BfQVYMw58gQgCDJwbqK3QZFRtar95sb6U1+k+Eda909+1+dMnxjofxTU4O8aX6YG3Qo3N89dsK2WVTEqA1DcOxJ2eEM6/+vc4gKGp+7AhsATXtPyJUpu2Dfd8kNwxfi2UI+G/qNfH1X/PD7FgazUDLRPxrSIyD9/W+ui/R5KpB4IJj9R7+vb4JnNIJyK91fa8G+BR4L2Lf+bpgwVaehJvt+pZenP5NzIfRiAI9mnSWzrq/z3LNyV46psRC+A56PexaSoImnaX5KC6ls6/kKedz9d/UWN4IW9LN6tGh1QsNoma6Z/TIUl8vNRuqCEpcGu0Snr2uzoflhuTwmm3NJrOb/+3x5v9oZNEZmfzZ01s+RYVf4OrZECvE+1aJxeTiLXAskIt4+bxfKvDmw1xxU7RxCiCQ0wFc5vifak/rMeeQ8mSYA3lb/Nw/fhHW246kkCautyncDsGRk8FwwFwJRMqo+YBohG24buBOZuAg3xHP1PKSv1S3p5eVADGMbLH+U2jF/VBpnbHv+/vBAPDK3ypTavf/xBupTBg9Xk0ql3kLROYNE9adSqDZZvcQWkpmxTuceEI1lCIMsBSrIeyvwl51LYCbhP6z38kurYLE16lw0iTkSPsmhU7UZMP2+8ufPF8L+NRkiQq3Cas86AFeXs+efGm5VdmneGq9ps2Wqh8mrgYjECAxpEsSQXvPi607KHY1yruVKtkhtjuKSfI9aEm5z6CHAf8WKTfOl+mhqyKcRkeWFxfMB96elPTWzjsMueJcf4IH2NFvWNoC5UjudgJV1qeCYpaRv0VI01mfRzjzivg2iIh4nN8C94v0/hTyvXo9MlWGOTRVp8ZhEpqDw57g/XaMM9VvZOAvT7NCllpqiSKVBijz6cH6gITvSeqnrTD8F6GWByvkSkEL1jomIMykAA6/p4f+NNqT9f2M0xTpqsj3upkfVpleS1VzHgk7UAEpHS9Ym6vSUpPnSKTrsbt+OfJk1hb2n2Ca+74jcUdwxTq/uz8ywEK+TLUV/kQ/+UoG79bKprH3J/iPHdRrDIf+J4QjsPCNx75+lJoi05EGA2NF+GFi/U9zcmhjL1SsbY/HgjSkqw9Gz9uyh4g+Q623wYFjn+r51M4yqrkGJere41SoBpby94pstzE7R4a7FbIFlcPi5Xqs3BBkosVpk0Jf8N0O7ji2LBQximtMebnHA+TP7JDlpm6cCekZny/u135MdXkjmHiUI16hhHmYK/f5qvM8/BtCTYOhoxUlvUhPeBz8KJ+6B/sMv/T3pVtuYks26/pte59KC/m4REEQhOSQKDpxYt5nme+/maCqtquqj52d7t6ONdlP0gIEhSxM2LHzgBpMq6dg7kq5VawphbXytLB2vNJMW5c787q9fzfiEsL9tgdDpGQishmUUvhcVzhp3sj9T0DKL99PtOUtcKW0lq8o8fEGPaHtdHBWCgACgicCkpGbUgx1ADEep8yTXkwYl/TA3TlmX5xk1LuGsEsjy52tOmLDIncSfcSlc4m6oPRUuOalRwj7tz5kikJzihvW7CaqtiABEMZFDaoXWPSPsUnh9hp61y/GlQL71PdSrtLYxAFSLzVxd05G220L0h724Z9wF9cAc3U0DZnlB88O9KUtYCdV9HGUuxxm7BZDmc84FeEaOABJfYHcxnYK7WBtw4QNZgJl6qFrxOmYlj5kpwAc4QJUMKSotl5cLpXU9W74wJtw3BcpaFck5FCT8qGSzuXvEBhQg8PHDmiEajn6AUy3Sl2PuSAHGsgOYSic292VFduFEPXj2J93HbopO4IIHaCMMad1g2a6ivqgq7voSBR7baYfsJGv3ULcfblAG8s1p0QRN8E86j2emAk7tq0pn4eKu28RQAiizPa3VFzssbSWE6WXmSGvI26ps9bkcaQdgkSXWgRMpkldtgNDah3q1p2Qa45RUJVGatzz+HwxqxSL+RuO7UZcY9KBERWjl94l4A9EVvUg9wRLtwMoZp1Cxu9une5kA+4QvfadVu5p1sn5O5qKaRGR+QSt5M8nZjp4toGtazoKTJaKAjWeaUL4vKeN6/cfbcXp+IV7fVFeTeP7SgR97i3FxvaOeoosyJgU1Y2bIt8aSpcqayiwyyUyT4wpbLeNTdjvSP3Lijx1ud4o6rVLR3beuFRFVItNrpCUvI9BIwgGw3A8/MCm0S+ajFyl+tiLraF7ArIwIKTk+30MB1YVkB5vy1OfFtHJ3cJygI8ixEHE5DBI7dluDOMSxqcSVu8Djtt5ByV5tnzlO25ajHRCqHftu6a5v2tzQHuNJEekjmiS/NUbkv/IAZ0vK2LNjw3Mte6xRCuC5WBzyz2B5AN9+Hm2BwH0p45BMgaFcy5HLUx06xM4E/ZEH1z5aYnWayQhg89Bql3xXmVtm40sqx28koMvXlqEjc8hog6CzlpsD/ZabGf9UbOzY7wmytOYVCUBeoM9kDggg9QvMM8Nq6n51Eqm74qex02LERtLjmpfAah/3Y1NU1L8PDqakFwwXN6S2yIYJ6eK26ASULBtYwA89FKdPLQIDjLtUfqFsuxeaLHYAMf7rxESZBur85Nrg3PX4lNjyKOw0ZOm0SgGNXva+Q4ezzgNoPOCVd1Y9eHFLIE8wTV/8hFvVuG70dXW1TxbtjLqQ0HXo2hnxv62WvVhMPkE0Hbuo8gVJXW1FU7VM4umxg73+3hvJTCzPfEnN0vAD2/slh2rXj0CO+PAQ5zMGAB0YeLxRMzhGvzDSBemHUi6ZVat71K0ZlSsdpcV9ccedQUzhgQtENujgYXxgJk9NK1Gy5ymrbPiFMGPHzOA5RfK2I9bPmp6f+y8fZ8n1C5rqkJeTSQW1oTjKpOwx6BYcGlRvF43pORI5FOfi5r/EAK7E24tVMYiQ/5NM508xiA3VJCClTAAu4qLA+QsEy6CStrlJblOLHjJi4sLqaqJTX6MM1xG5z3HNsiuQLjbpa1RlMjVtZNRFcFOIvat5HpiBxtXcBXOaWadmItNDuEFrZZ6taUsNYgkBzBzCI2G+fkaoi/6Lfh9a7EvOwe6MOtLhop9k/i1TudxMFxLmi+vVbxcouI5j5v9pnfY01yVpK4GHlDn+tAwL37IeJS87SxbCjFaEm8WuLqMRcVvIK6t0w0gJafFZPIjV042jqyJraunSNK4hMGq3GhFFLwJk/FkmoRU/1u+nV3bwntuoApYHmgNuxm3NBGPAK6VPmsnO0GPzLMquClsNvr+L4+Xi9UCXLZuDXLg4pgVHjs2H3W2avcP26raRLwGU/LDiAI68DcacVgpwh1YaPAczDYki+xgBTJoA4z1VPYCUNfrFVGc/zgdjlZnnZiTKVnrcRokgaZ0+k+n+JoKNNZFHFtyg4Xy99YRnNm5C2xInujoDVOkK+suzKQrOTlUAb030p4PMR8ge3gDyL4wzTWBuzXY5ws1h3mpEuRbcCxDhmExE3HpDocSYvG2Wvg5608YmSxDORO8oiBEOnztSfttC2i/vaiWwmbMOb4UOzXdM/uS9GuFRLTsX673benDajwC80+a0R/KKHQOAbMqWT49WplmEmTdii5yu1jo+xX4jx9JG1BcyanSFt7X15rstUFPYikTrEFqDeI2gVDwPSjCsLA4vEC6Iy3hKpaWPiu2rNufGtsne0Daq7z1nydAHYs7QUijev5N8xxVFvjC9ZrhOIydsi9KQfmmp0kfa9dsp0Wcgc6Nn27DrMVJY/U1uALZ509uMdS7rMp8G43SHEM5fyy0fPSsGs5WTRLmMGPGNPeMizGW5ESHGtxXOGNDXx7Olmktsj1EVylCK+yRwJjvspsYcG0IZuJigk7uGrjeDRqMMc7mL2soSrW7cg5aGMG1ICf611bDGtOnNqr42opWQNCgUvBItjvX8TBdh5V5AgwDxc3b7+XjLwkUsKIIqtuYFOLIXC2RiZNuPTou77cKxhS1sAOyrrLknEl01VUeBK9L5cH5jxHIM67TvqspHb3UGVt6dYIZ1gabAaBXjtjQm8T/6zr+34dHQWGqPEVo3SBub0ZTrTlrCnu7JrG3qznVQBl5/lwFUBuB3/6WQvC3uWlwzfo7gRnuOJ6TFUsbmkMTBBdoFnK650XohOuEFSNV0F7RYsDU8jq/TatEXkbcVpX0ONx9OxqEVbD4XyybZGnnDwwjJa2pVDkW6gB1rcYRUo5ve9XrROcto+MMBecm9BKreMJ5bp12baecrwJKi84Zqqq46BSN2yN5foNSh7cbToK/ngqt/S5EFVw97q87DL8GJ5lwAaXt8tt1LQ67Xdp2ee7O3KuLdFS53oq5Aqe7zjYEuEcvfMq7+5CRtLafdguGe7sH9u6PsbufZfjg4PmJy3ezwfeViRtcIshOdFb8xgiYzyA+lB0MQ71pVCuDyt1Z7OHcoNd1SLH/X458x2Zg8rXugG8m9ketS7REXRnDyua07sKTS7pjCFkBSwDkMX4M/BH7nwEgfgS50cv55iEZPk8nfujlEXSIZJEF7HKblgNq0XI4ER9vs5OQptRKTuJ41oT9nkeQge7XXRxH4p4ZIzupSdxnJ2V395gm/GkB0Vp9kro2nG9T7ItQl2RLqc26MKqT9lwYPeXyxakKtOMbolip1TdIdvNVarX1VmdvChwJYTq2utWGJVlm0mQOx/izThorCQQIh5wcR7VDC+0drxYqbzLmmdFup2tOScIV69SCK+LR0smEuegUTtAk+3l4SpulIXcXYwxsmtfH5vxKCiSosxJastI4UrnxFNW98WxTVzWCiPgh+KIcMT9AjjAQZCLXd4BA+6FrnzOQdy25mB898CX2t0l50iL9d26kaiGo2pywRJflZBE8FiCLG9jWO3bckQEgEPpRMn+9NiA6W9x8SamqO7C8ou8w17O9+EYdPLRkXLmppyANVbhcosdfW1TqeblmCHjfjQuvFQL3vZRV4gd4AeFSLfTL8DDJecyqsmTQWxlAz6lgIf1FLa81dcM8vhFkpnGOeZT1bt7zAE7Urif+oCrHmjfVPyNMGVG4eJTxIVbLcRCq0tqg6H3s1Yb6mbVTT8mKbAEdgC+JI0FOz3hrJj6x22Yt60z4oKKcPROLqj0oE8rEheISFPntW3IE+E119vwgCLdcVwknjtxtqLVjU3H9WsNuLIhbhqb9lq69UGghMxLyh3ikkTx7Yqy8jXZBkUuMyaoBNwBsrepcREG2CI2g7iyeGVKLNNKEceP2ihgKMh8W73IRv0o8MbKD+hLu8hNJU4PeieHzcX0A2WdEZNplS0suEXuUl2XKxeqpSUJPIJH+9rFF1uA3AO/STNcuAaVu7qj25W6sYjFrIyqCzh/j4LppSg463gSQ13x9HZl9vA4NT1cuTpMWnr9WAWdJJv1pNjelbKPgFmwuDxcIE/er4swUuanKKKW4Ddk7LpB4C7vhoccHpiSoE156Xy/aQhbF/xa0FVYrCa66N1c9qIQqU0E6YBn6bO6yh0rWKNxirMvBIJpd9vbfqOgumSt7gIpWs4a1DmLVBpM3aoCL0QD6cDN+GCgMAvi4p1q1hBkdWspzS6JI0R1xGSDyrwAV/AAHY0QTmfFSp+z05oL11B72OSkE0Wb8lRUIR1WPuLtjqJC06oGIqmsEeM636j5fUYj35mTvLhI9vmdpPbknTc0uTeiXglMMHNV4rTiaGo3NsFVvaCd2s8mFTz4SCTwIgeM0zll7UhRwdC5Jfx2Cvh2PXaK0CGSAGgaGADs51U3g0egOOuNAMk7FhllKIwQ6+QYB2Z65mAPR29mrlFeNqceRO3H+Ti3g9fJIybVFRkdk/VZ9RGnWxXdzQGJDBsyH0WifSgH/sFRkEf+7wCr4Lkl3hTuVcPD1Ths75ZlUgd0F9YXQeg75LAy3CBGlLYiLGUO2WvBg7yBK2vyfkFoza7Pt6I1hFhZH/EtFlzNAYq6IUF51NrGF9x5Bgo3f7UDPcAli5HpFkXCVuuMWUJ+zldiGnr7PMEFEeQXBw16da5uvT03oeXmHlXfKynr2qCUKbsjTxLEYaum9w0gD8JaENc5P8d3RYKXyPOXzfWyt49OxRVlv48loUQia686I++T1T5vB3twKAP1TCm/ndeP7gNO8nYeiNjyWmn3qHs6k01Z7Ddh23QrGdgTEOMDlZtBXjJOjNDba8/M83ZaMxA4eZEZgDkBLn/WrzW7tk/m1g15jZCXTdWvVpJDuIlOCDK1w1cP3TTiApjnuxS2puFn+7iShiIbAg0wvNvmuDhRK1vBj3ppX7zZKhlvTWeLtgWjXeHiAC2ui6Ldq6m7M4Z15zgr4WbuT9n1kLnS3d60523YL+ejuRUCBSfOD+/ovcPYChQcRYhLgkbcDu5CkQ4XT1FT9cG954NE3p09OD1I717mnZksaxj+jAKHWUbms5D3aYKefiAqY8Bpz7vr/QjVSK+bT3yDVKxb3PqiY2sXJKF4J5gm0s13JvfSvXJPAlyb4R/eP/JTNNTkS6gdYeuhbDpr2CRT2JnXn5y9mPrd9socp9sxoAq782UqF+ATXObpIa9nf271x/MUyqwx9JiU4ipPebfPOsU5cJXfJaR1VVEce5nHnDTNY0FjgvhaEm66ZKWLFbG3Cia3hXT1uwM2FvCkR0AmPXoMkkek4m58BzAo8dTQtSV1F5eqDXwh4wtLj3bQgPD7dol9G1KoIL3ERtkQYES9cBt34EnzIo7lqS+yU+NWPI0IAioeDsyFaDZ3YZlcxUP1UABAkaU0HM8t+li7SgKWllpe9g3SpEOeuN7aXtWZACrYWiBBWbwUlYc/4IqDwlXUPYo35mkoNkQBA6uawh5RTwXRCsOClDfPFn0uVHHNPPKG3AnQqtW2399VmhqoblHVICgHusQnympaOwqhRqIO8FcIz/yzWiVzzIRcJiaNa1RM64FNdJ8efrcIR7E5DLrUYJ7K3BO92Jm77DEvN94OhmORl0A2DPM0Tsl+aTHlTRZlMN+BUeirfaMvDmpMWi3g3XdvOkB0LvuzZOH7gtK3AogdNdcx/a6sAB8pc9hAsR8MhViS7vqhqXf7DjYHCQG9TTHEBuGKPXueafWs2HhnMNGFsGQgR9asYC8i1sN33RHaJFiGbRlg6RV3z/5ypNe2Nj3lii8N3m2zdFpALMAUBll4Po43YIHnrYPL9ZkDn/X1oU1UEEsYwGA4xXR2wt2UCo+4Ta0BWKzbRffI+7y3U6YJnR3rXFE0tik7Amr+g3TrxAODIZIdxSNCyJ5MQtKye56Py0yD85HTl7trIcFFmPOqMQt5lcL7ae7CGvCJMd102C1tVjl5UEYQRc6PKHKAXbygjhR41yyoAK6n68V1BW9B61YuIJTBhVjZeo0iOBIiUUquH2cVFQmuDnPLWEFA+clWXZO1O0ql4nLjJl7NSAscu4WhAHnJ4VTACEJZD4ViLawhjwVA6l1YGdWXAvbq6CtMBqaGzZbEQC1bx9fsPVzJ4GZFU1xGsKOJM7jr1d1BTmMH4hltk0wDJWSzuS3m7sDiDBvSqpAYt/kZK/TDo7NCXM3HM/ujnmpXhM2zAfAjuDaw4kHuEPshWgpycszQSsqfV8KVgzrZaM0J09rwTSJ82oC//7dMGY6IBIAqMMLYM/epnxHeE5cZc8YUThNn4Rcidq0L6q6f80vpnxSCgNjdC2s6TohbtIAFDP+wrMzBi1wsQHQr8rQGAXpxWHgM62IbE/7m5tITS2aeT4+VIzD+IjR0J0iP9+3J5bc34jGUwENUvvxJaUWE1ctb73Cb7PH8Z0zrRM9/fP71sdHH7CtOD5rw2LXgM3I1iJE/1+kbVSfFMtp4nvcLLkz/QeQHVkdyo3TSGm7BsFG8te4udjd6nzc70VplMfqEzvu1Tlk7/bzftAkXf8EXSS85WeLU8CGqyONTgn0cMjzeo9j8vgtAEfgYAUHmbb4TeP7j9Dj1CWHmzUY1b/JeRp8e2TidE65L9Qsnjp8vYXqNIYE9H1P4pDjwmr3abS7Z59Z0mvb+hD+uoTXixnmM3QS2M2+t6iF+bAVfElwO79cJGF9AwcuqLrPIWWRxVoItaZaCPXnAAuNXm4w48FLw1gL2dMB2HpossIyYe3yQBLYNT8N3flA7p9yw4Dm7EuAQh0k4tR34FRA4fJbWSyMJYmjDZVACOoEsMtv5QLeRzy55uI3EkLduw95xG4F8lM/wNz57467KN3L4MkgMz/nab6/NX2f5F1t3hunEx6wK6iCDn5qgTsoSsEMMP+ANK/Imlzw7+RcMd6c/sMt0Mq7KHat++Mt4fuMGPXQi/7gewa/rHNiFmzUBy06xT4GVpW4AnF1+sjJ4e5htTM+Wh9uhrG6UZdZVT0ZqPzVpAK63MuInw4LXWT3BnZ4qB4ZFDDbFLZOsdJ5QjPmUp94rXIJLFin47zWgdo0V2DOk0iqLPxJVFEp/Ir/CFc68xRXzDqyYD4MV8W1Y/Tob0d+Yr18A7aNshyIU8sp43z0pXzb+ePORb8xnZ5YHbz/7GUr/vOM+LppS/xLY08w/EfX0G+tVfpY/uXEGgPsT+X/edx8HfOZfAnyK+IMM7ANhz76xnWFZTuyURu3YT79gVAyBDvtyKQ++AnSigUzlyXYqCNifM+PPOvfD5sXzwP/4ecFi6B8z3cfNCwJ9Y7vKsZoyqIefkP/Tfvs4yL9VAd64619XUeLfrCi7fMyyBITk1omz3Cm/rCKnsn+JItg/uIhkyVexk2TeAInC3uLoeduPx9F/ozLxbRx58ZD72ZNbOvAOOwo2QQM0uVmZGKnlPFlxZkVP4HyAGfxzwURR1NdgotC3YGLeAdNHSZPEd+gR/zowod8EU5BVTyj6NGEKvodP2F1ONdVvYgeZ/v5G7DDP+vRLIHpRt75ED/kOesiPQs9bOeYNer4Jhm/C6Q0T+SaF+dJdz+hNeg9QE/+TMTal86myyiCvP4OD4LF5FsChxRacoXpcy7fc/AaOH+V3FGFfOZ56S2WId9xOUp+wD/P8jxZ0XrlvuVxyIvLG11869sNi9D+uECbe6j+eU9dB6j1VtQG+l/2T9/9Z730c7f8OBej/S4YtAUdz6t/Isc+Q/LvS6+u1ovfC7F+bXd/qX8+aFzTSVxCiiiZ7/uCpCkZn8hYCDNX/+uGzTlY7lp8Cq3vDNAZASfU8MLjQeex5zw+IK+VsuQ8KK/Cj0+NykY+CCgD211EGw9+JMvg7UYb6qChDvqen/QisxI5RpiDTgB1Kp8qa0nJ+guV3guWVEkX+7WB5KyD+GLBUTZ5nJRwA5IQcuBLS6Z9g+RORBWff4y/sXwmW/0bZEvkmf0mM/CnOLGNaTwLUI7Cf5i+MLWGL2VI28s9Pn3ePPT6f4B6fJ3x8RvAvGM5v1y/zJ5eHX7G3NdDLN/1mKfqwbey4v06tD2jSQrHXFInA3pEg2HdIEvtRJIn8DjXUSW0O9jX9GjK+LDy/Ydw3DmSnv1cRAf19Rndsz/mPJv/CnOQ7qeF5W+nEAH/tl2O9b+PHGY5Q6PgP4ealD+95jDnjPw771VVvRkKxb40EClVQtL4ZaXL6yxf/Ezj41zRWvTSXPYf4d+qMv1hgIN/qeJ1jgqj8s0X1B/jtw6QF8jtEuPekTZCu7SkaQlPaRuW/2PX3uPXLXBa/yuNvfPybif59fpA1dRyk4Oxp+kV6f6MDv5J2M9cNLAfk89Ry8rr69Izhv09UQHGU+Ao0BEK+kzHJdwjdc/X/41HzVk38ezImDh/QUg7XL9/coK8/0eTze6F/OH9+Nzze/T5/zannP5mE/UelZeJVpCFfB/7vzcqvB6Jeh6KPTsrfoX3+JGffS87w1977w+TszUgfjYOPUjLB9ZaZ3Vh1Bi35VwkNH8xgvlYa5vfvQfyjkhbxG0Hjy+VG4j/g/odnrGei9cPhY9gtbB6xf0LnR0GHQb+JHZr6S7Hzntb5DfnqO5SlP8hr3xekKseJ4szLHkrUtBPUoOBDo5LAKrMqc+un1Kmf4F5PFI6KS5EmCO7pyyMfgtNHuRanXi+QYc+daN9aIMM+YcSfdy/7mQoKRbDEm9KY/JrWw+j49B1txK+rBDNP0k+VU7agVvhcG1X09Yx+t/T4WqvD30PER9kdw1+rbgTytmmMfK//4wdobu8a/b9REf72irabZfZ0Y6RdBmn01Gbx0+TOJQnvMV6ib0TfZ7gQHzkr6VeLS++IEuQ7DYXkD2go1AjlsmOsi846NmLKn7fUnXl3Qv7OVE2+l6pf7vZ43rA/aOJ0wEvqNn/dFxH7x0IU+IIYctxLv2CLaWxwfWBaILAned5Ul0ZaPRyDIeYLwMCb/wGhN//febfKiZ3pNljwOkvj4dO/izP8Bq6+F5LfyxUZ5B1NDH0v4f8BUWxqIIGg+LU0gQFdBtQH7vF/ \ No newline at end of file diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-org-old.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-org-old.png new file mode 100644 index 00000000..9491742a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-org-old.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-shop-floor.jpg b/docs/devonfw.github.io/1.0/_images/images/devonfw-shop-floor.jpg new file mode 100644 index 00000000..32c6fa52 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-shop-floor.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-small.jpg b/docs/devonfw.github.io/1.0/_images/images/devonfw-small.jpg new file mode 100644 index 00000000..0688fce7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-small.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw-small.png b/docs/devonfw.github.io/1.0/_images/images/devonfw-small.png new file mode 100644 index 00000000..309089cf Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw-small.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw.jpg b/docs/devonfw.github.io/1.0/_images/images/devonfw.jpg new file mode 100644 index 00000000..8179ab56 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/devonfw.png b/docs/devonfw.github.io/1.0/_images/images/devonfw.png new file mode 100644 index 00000000..37eeb991 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/devonfw.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/doc_copy_always.png b/docs/devonfw.github.io/1.0/_images/images/doc_copy_always.png new file mode 100644 index 00000000..b72c3840 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/doc_copy_always.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-configuration.png b/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-configuration.png new file mode 100644 index 00000000..7f5ca594 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-configuration.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-configuration2.png b/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-configuration2.png new file mode 100644 index 00000000..fff9e6e8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-configuration2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-custom-tool.png b/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-custom-tool.png new file mode 100644 index 00000000..f4a51e95 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-custom-tool.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-env-var.png b/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-env-var.png new file mode 100644 index 00000000..86a753d7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/docker-configuration/docker-env-var.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/documentation_workflow.png b/docs/devonfw.github.io/1.0/_images/images/documentation_workflow.png new file mode 100644 index 00000000..869bc179 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/documentation_workflow.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/domain_model.png b/docs/devonfw.github.io/1.0/_images/images/domain_model.png new file mode 100644 index 00000000..ae30fc87 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/domain_model.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/create_update_ws.png b/docs/devonfw.github.io/1.0/_images/images/download-install/create_update_ws.png new file mode 100644 index 00000000..089064f7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/create_update_ws.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_1_create_workspace.png b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_1_create_workspace.png new file mode 100644 index 00000000..85dcf9cd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_1_create_workspace.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_2_create_workspace.png b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_2_create_workspace.png new file mode 100644 index 00000000..a8cbb1be Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_2_create_workspace.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_3_proxy_maven.png b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_3_proxy_maven.png new file mode 100644 index 00000000..d6f37ba8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_3_proxy_maven.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_4_proxy_maven.png b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_4_proxy_maven.png new file mode 100644 index 00000000..7f575a98 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_4_proxy_maven.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_5_proxy_sencha.png b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_5_proxy_sencha.png new file mode 100644 index 00000000..1dba814f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_5_proxy_sencha.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_6_proxy_sencha.png b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_6_proxy_sencha.png new file mode 100644 index 00000000..e86c6379 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_6_proxy_sencha.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_7_proxy_eclipse.png b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_7_proxy_eclipse.png new file mode 100644 index 00000000..731f26a9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_7_proxy_eclipse.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_8_proxy_eclipse.png b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_8_proxy_eclipse.png new file mode 100644 index 00000000..64b0c189 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_8_proxy_eclipse.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_9_proxy_eclipse.png b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_9_proxy_eclipse.png new file mode 100644 index 00000000..a5da1b95 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/devon_guide_environment_setup_9_proxy_eclipse.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/download-install/run_env_sh.png b/docs/devonfw.github.io/1.0/_images/images/download-install/run_env_sh.png new file mode 100644 index 00000000..a4574a34 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/download-install/run_env_sh.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/dsf4docker/docker.png b/docs/devonfw.github.io/1.0/_images/images/dsf4docker/docker.png new file mode 100644 index 00000000..47f7523b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/dsf4docker/docker.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/dsf4docker/dsf-docker-arch.png b/docs/devonfw.github.io/1.0/_images/images/dsf4docker/dsf-docker-arch.png new file mode 100644 index 00000000..111bf45a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/dsf4docker/dsf-docker-arch.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/dsf4openshift/primed.jpg b/docs/devonfw.github.io/1.0/_images/images/dsf4openshift/primed.jpg new file mode 100644 index 00000000..6bd07bb3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/dsf4openshift/primed.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/dsf4pl/pl.png b/docs/devonfw.github.io/1.0/_images/images/dsf4pl/pl.png new file mode 100644 index 00000000..dce37c09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/dsf4pl/pl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/eclipse-spellcheck.png b/docs/devonfw.github.io/1.0/_images/images/eclipse-spellcheck.png new file mode 100644 index 00000000..ccad7e5b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/eclipse-spellcheck.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/email_swagger.png b/docs/devonfw.github.io/1.0/_images/images/email_swagger.png new file mode 100644 index 00000000..d01c8246 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/email_swagger.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/enviroment_icon.png b/docs/devonfw.github.io/1.0/_images/images/enviroment_icon.png new file mode 100644 index 00000000..16d91378 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/enviroment_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/environment.png b/docs/devonfw.github.io/1.0/_images/images/environment.png new file mode 100644 index 00000000..e4fa98a5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/environment.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/chrome-stable.png b/docs/devonfw.github.io/1.0/_images/images/example/chrome-stable.png new file mode 100644 index 00000000..68f662c5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/chrome-stable.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/cicdgen-command.png b/docs/devonfw.github.io/1.0/_images/images/example/cicdgen-command.png new file mode 100644 index 00000000..31301861 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/cicdgen-command.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/docker-global.png b/docs/devonfw.github.io/1.0/_images/images/example/docker-global.png new file mode 100644 index 00000000..70c99574 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/docker-global.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/gitlab-2.png b/docs/devonfw.github.io/1.0/_images/images/example/gitlab-2.png new file mode 100644 index 00000000..2cf98212 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/gitlab-2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/gitlab-webhook.png b/docs/devonfw.github.io/1.0/_images/images/example/gitlab-webhook.png new file mode 100644 index 00000000..5d12afb5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/gitlab-webhook.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/gitlab.png b/docs/devonfw.github.io/1.0/_images/images/example/gitlab.png new file mode 100644 index 00000000..bcf569f3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/gitlab.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/global-settings-id.png b/docs/devonfw.github.io/1.0/_images/images/example/global-settings-id.png new file mode 100644 index 00000000..75aa37b9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/global-settings-id.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/help-1.png b/docs/devonfw.github.io/1.0/_images/images/example/help-1.png new file mode 100644 index 00000000..eb387525 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/help-1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/help-2.png b/docs/devonfw.github.io/1.0/_images/images/example/help-2.png new file mode 100644 index 00000000..56edeb48 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/help-2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/maven-installation.png b/docs/devonfw.github.io/1.0/_images/images/example/maven-installation.png new file mode 100644 index 00000000..401d01cd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/maven-installation.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/new-pipeline.png b/docs/devonfw.github.io/1.0/_images/images/example/new-pipeline.png new file mode 100644 index 00000000..5c0d365e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/new-pipeline.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/push-code.png b/docs/devonfw.github.io/1.0/_images/images/example/push-code.png new file mode 100644 index 00000000..1a816516 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/push-code.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/repository-id.png b/docs/devonfw.github.io/1.0/_images/images/example/repository-id.png new file mode 100644 index 00000000..6a99f8b7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/repository-id.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/sonar-env.png b/docs/devonfw.github.io/1.0/_images/images/example/sonar-env.png new file mode 100644 index 00000000..47d4ba81 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/sonar-env.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/sonar-tool.png b/docs/devonfw.github.io/1.0/_images/images/example/sonar-tool.png new file mode 100644 index 00000000..ad15e518 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/sonar-tool.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/teams-1.png b/docs/devonfw.github.io/1.0/_images/images/example/teams-1.png new file mode 100644 index 00000000..afafd9b0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/teams-1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/teams-2.png b/docs/devonfw.github.io/1.0/_images/images/example/teams-2.png new file mode 100644 index 00000000..8636fb07 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/teams-2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example/teams-3.png b/docs/devonfw.github.io/1.0/_images/images/example/teams-3.png new file mode 100644 index 00000000..8de6fec2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example/teams-3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example1.png b/docs/devonfw.github.io/1.0/_images/images/example1.png new file mode 100644 index 00000000..c23188c3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example10.png b/docs/devonfw.github.io/1.0/_images/images/example10.png new file mode 100644 index 00000000..67ef9c20 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example10.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example11.png b/docs/devonfw.github.io/1.0/_images/images/example11.png new file mode 100644 index 00000000..6d8903e9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example11.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example12.png b/docs/devonfw.github.io/1.0/_images/images/example12.png new file mode 100644 index 00000000..24c523ea Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example12.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example13.png b/docs/devonfw.github.io/1.0/_images/images/example13.png new file mode 100644 index 00000000..3d13cd05 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example13.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example14.png b/docs/devonfw.github.io/1.0/_images/images/example14.png new file mode 100644 index 00000000..e34c4c64 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example14.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example15.png b/docs/devonfw.github.io/1.0/_images/images/example15.png new file mode 100644 index 00000000..2d0ccde2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example15.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example16.png b/docs/devonfw.github.io/1.0/_images/images/example16.png new file mode 100644 index 00000000..0944dfff Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example16.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example17.png b/docs/devonfw.github.io/1.0/_images/images/example17.png new file mode 100644 index 00000000..85e16336 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example17.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example18.png b/docs/devonfw.github.io/1.0/_images/images/example18.png new file mode 100644 index 00000000..ca2095d7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example18.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example19.png b/docs/devonfw.github.io/1.0/_images/images/example19.png new file mode 100644 index 00000000..6144f3e1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example19.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example2.png b/docs/devonfw.github.io/1.0/_images/images/example2.png new file mode 100644 index 00000000..f4fbb13e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example23.png b/docs/devonfw.github.io/1.0/_images/images/example23.png new file mode 100644 index 00000000..52b0c3f4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example23.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example24.png b/docs/devonfw.github.io/1.0/_images/images/example24.png new file mode 100644 index 00000000..3a4a0cba Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example24.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example25.png b/docs/devonfw.github.io/1.0/_images/images/example25.png new file mode 100644 index 00000000..cc7b7809 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example25.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example26.png b/docs/devonfw.github.io/1.0/_images/images/example26.png new file mode 100644 index 00000000..482acf88 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example26.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example27.png b/docs/devonfw.github.io/1.0/_images/images/example27.png new file mode 100644 index 00000000..91bb1ca6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example27.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example28.png b/docs/devonfw.github.io/1.0/_images/images/example28.png new file mode 100644 index 00000000..4a737c5b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example28.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example29.png b/docs/devonfw.github.io/1.0/_images/images/example29.png new file mode 100644 index 00000000..756ae948 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example29.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example3.png b/docs/devonfw.github.io/1.0/_images/images/example3.png new file mode 100644 index 00000000..a7aa84b4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example30.png b/docs/devonfw.github.io/1.0/_images/images/example30.png new file mode 100644 index 00000000..c124512c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example30.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example31.png b/docs/devonfw.github.io/1.0/_images/images/example31.png new file mode 100644 index 00000000..0b820545 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example31.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example32.png b/docs/devonfw.github.io/1.0/_images/images/example32.png new file mode 100644 index 00000000..7b8f9f6c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example32.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example33.png b/docs/devonfw.github.io/1.0/_images/images/example33.png new file mode 100644 index 00000000..d1825de2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example33.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example34.png b/docs/devonfw.github.io/1.0/_images/images/example34.png new file mode 100644 index 00000000..1ac33fd7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example34.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example35.png b/docs/devonfw.github.io/1.0/_images/images/example35.png new file mode 100644 index 00000000..a9ad830e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example35.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example36.png b/docs/devonfw.github.io/1.0/_images/images/example36.png new file mode 100644 index 00000000..57bb6987 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example36.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example37.png b/docs/devonfw.github.io/1.0/_images/images/example37.png new file mode 100644 index 00000000..86f35ad3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example37.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example38.png b/docs/devonfw.github.io/1.0/_images/images/example38.png new file mode 100644 index 00000000..bcdd4a90 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example38.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example39.png b/docs/devonfw.github.io/1.0/_images/images/example39.png new file mode 100644 index 00000000..ffc07f3c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example39.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example4.png b/docs/devonfw.github.io/1.0/_images/images/example4.png new file mode 100644 index 00000000..6258ae7b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example40.png b/docs/devonfw.github.io/1.0/_images/images/example40.png new file mode 100644 index 00000000..3835bb18 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example40.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example41.png b/docs/devonfw.github.io/1.0/_images/images/example41.png new file mode 100644 index 00000000..0f02aec9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example41.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example42.png b/docs/devonfw.github.io/1.0/_images/images/example42.png new file mode 100644 index 00000000..fd0594af Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example42.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example43.png b/docs/devonfw.github.io/1.0/_images/images/example43.png new file mode 100644 index 00000000..21aad882 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example43.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example44.png b/docs/devonfw.github.io/1.0/_images/images/example44.png new file mode 100644 index 00000000..538f4fa4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example44.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example45.png b/docs/devonfw.github.io/1.0/_images/images/example45.png new file mode 100644 index 00000000..a6fbdac9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example45.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example46.png b/docs/devonfw.github.io/1.0/_images/images/example46.png new file mode 100644 index 00000000..279a0998 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example46.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example47.png b/docs/devonfw.github.io/1.0/_images/images/example47.png new file mode 100644 index 00000000..89d52d2a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example47.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example48.png b/docs/devonfw.github.io/1.0/_images/images/example48.png new file mode 100644 index 00000000..a394b107 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example48.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example49.png b/docs/devonfw.github.io/1.0/_images/images/example49.png new file mode 100644 index 00000000..e691840a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example49.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example5.png b/docs/devonfw.github.io/1.0/_images/images/example5.png new file mode 100644 index 00000000..11496ffc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example50.png b/docs/devonfw.github.io/1.0/_images/images/example50.png new file mode 100644 index 00000000..3a75a6da Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example50.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example51.png b/docs/devonfw.github.io/1.0/_images/images/example51.png new file mode 100644 index 00000000..fa42b7b8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example51.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example52.png b/docs/devonfw.github.io/1.0/_images/images/example52.png new file mode 100644 index 00000000..9abdc9f6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example52.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example53.png b/docs/devonfw.github.io/1.0/_images/images/example53.png new file mode 100644 index 00000000..fa8cd2d2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example53.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example54.png b/docs/devonfw.github.io/1.0/_images/images/example54.png new file mode 100644 index 00000000..686c9e46 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example54.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example55.png b/docs/devonfw.github.io/1.0/_images/images/example55.png new file mode 100644 index 00000000..5a89533c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example55.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example56.png b/docs/devonfw.github.io/1.0/_images/images/example56.png new file mode 100644 index 00000000..85324ddd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example56.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example57.png b/docs/devonfw.github.io/1.0/_images/images/example57.png new file mode 100644 index 00000000..8d6117fd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example57.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example6.png b/docs/devonfw.github.io/1.0/_images/images/example6.png new file mode 100644 index 00000000..d1867da1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example6.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example7.png b/docs/devonfw.github.io/1.0/_images/images/example7.png new file mode 100644 index 00000000..efa8c5ac Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example7.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example8.png b/docs/devonfw.github.io/1.0/_images/images/example8.png new file mode 100644 index 00000000..adc9b395 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example8.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/example9.png b/docs/devonfw.github.io/1.0/_images/images/example9.png new file mode 100644 index 00000000..d9e6f8f7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/example9.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/examples.png b/docs/devonfw.github.io/1.0/_images/images/examples.png new file mode 100644 index 00000000..ea8796c2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/examples.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/facebook.png b/docs/devonfw.github.io/1.0/_images/images/facebook.png new file mode 100644 index 00000000..56f12068 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/facebook.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/february.png b/docs/devonfw.github.io/1.0/_images/images/february.png new file mode 100644 index 00000000..d5db9e90 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/february.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/finder-integration.png b/docs/devonfw.github.io/1.0/_images/images/finder-integration.png new file mode 100644 index 00000000..989351aa Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/finder-integration.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/flexibility.png b/docs/devonfw.github.io/1.0/_images/images/flexibility.png new file mode 100644 index 00000000..a9e880d8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/flexibility.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/functionality_stack.png b/docs/devonfw.github.io/1.0/_images/images/functionality_stack.png new file mode 100644 index 00000000..629019b8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/functionality_stack.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org-old.drawio b/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org-old.drawio new file mode 100644 index 00000000..7082aecd --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org-old.drawio @@ -0,0 +1 @@ +7LzZkrPMki34NHW5jzEPl8wgZsQkbsqYQcwz6OmbUOZfe6q2U6erdveutsrPLFMKUADuy5cv9wh9/4Jy3SnN8VjpQ5a3/4JA2fkvKP8vCIKgMHT/ASPXzwhJoT8D5VxnP0Pwnwee9Sf/Hfz9XLnVWb781YnrMLRrPf71YDr0fZ6ufzUWz/Nw/PVpxdD+9VXHuMz/buCZxu3fjwZ1tlY/oxRC/nlczuuy+uPKMEH/HOniP07+fZKlirPh+IshVPgXlJuHYf151Z1c3gLj/WGXj/DaC60tHt45bpqQykML/+lnMvH/5CP/9ghz3q//j6eeKly4WDeTtUcw/Oue5Nse/X4E2uN2+7XX77Ou1x8GnIetz3IwCfwvKHtU9Zo/xzgFR48bMvdYtXbt7+GibltuaIf5+1m0wME/MD70qxh3dQtQpG1pncX3VbihXwZwHTae01/ogLOXdR6a/C/mgb4/v/P8xfjPzz3+H7TQryX3fF7z8y/w8WsxKR+6fJ2v+5Tfo9ivgX7BD6O/748/Q+lGzO9g9Rc4wv8YjH/xW/7b3H/20f3i103/By6D/x2XEe36a5y/8h0xbcMfB/60fO3L3Ccg2Hj++eD9qgR/s3wf+uK4j8/5OCz1Osz1Hbi/c9+3+jP9z8l/h5LbnutfQ+GvndgPff43+Pgditu67O+36e26/B5ngXfqO4CZ3wNdnWVfkPx72PszOqG/hZlYzz8gy/LfQ8/f+4X+6eDyD0ML8l8c4P8oy+HQX5sOR/4d0yH/juX+bfC/3HTofxPToX+DOpT4/9x0+H+x6X645I/8/Y80JvI3OPy39//fGZP4b2tM4p/OlvC/lz3/exjzb8P8n8Ga/10p8p8gu8DY39muu/60VnH9p2WN57+z4z+9wPqn8x32D3PdfzUj/7+VzP4ZYE/+ne3eWzfewM//NG35PfY/wP/Peu8fB3zqfw/8pYpH8LLuvo2Zv/Tb35p/Hca/GNXiJG8tUHjWAziaDOs6dPcJLTjAxmlTfl3yl+2F7899yvdizDL+NJCAv+I/3hT1CZzI/t4PX60r6DwxwBKImGY99L/q9C5869vZ8/9K7ysiYhavt39FMH4XwOK25PPyp7jP5qHO/tT+qR3ath6H8U/ghD/dqGnuk5Cbz0Vwh+OfYIT6X2Nf/n07RCBEUSD+A+2QfxikEPJ/4X8NKhz/O1AR+N9j6o+x/3pM0f9NyfSfoMz6Y+K/sF06JHWZ9//Dov9pt/3DWPQPgP83rEEw6p+uBkH+vtewDH08/+mPzmUy/1WHE3v/aWy3sv6fEPnPN5D+cSHy30VhE/98OeHvBXad/Y+q/k977B+H9f8/imrsfyuq1zyt+vuq5fUnsKIswmCB7/7s7f1/TbY2+dcbtfG/3mLm98UMXPHz+vurjcf7UcHr/3u9fattXuD/A3r7+xx/tsbfGOIfBUUK+2so0n8PRRL5eyT+MfZfj8T/LlL8b2XIPwHton8vxbv5T2mVp03+P828/7zn/mH0i/4HVgT+ln7/K9j13+ftf59h/4qE/+DcL5+izM9bRAQEiHC1z5rOAalSOTD3j/H0KsErGYZlsfutCHHM6/7LCVQxfU9gpYx1PYFhNCl/eSdLH+Aw+36KD+N+Id9HmDerMIwFg/esdc8V5AvDOIp1H6Lw+72UEsT1VC4wnSa1vA2zbCTRdfRknw6klCov4KZbjvfx4YMY5Yt/HJksXpHzKF/621CvRk377Fkrte51wIuHzrOHDpUfiddfD+QVip9SYvnyOBghL1p8so8XZiww27zswm/CiH3x8YuZt5gbWOF9z8AE6rs13DpiXpJT6qVJbtQIrv8u5YuNHlcZfwyMfzlPkf00PCI8uUQ7UW4vH/UDkl8uxC/KxPSleeyKFY4v/uWydJly4iKdHP6YbT66scdyaQizUfvoci9+lJHEloPJvZrxPuRNFZsG/vXs2m3BHl3Yi7TXiOBI8FBjprW11KFenKB4ZfPQD5p/Qo3qlunAiAxlw1vGpFZ51SPM1pzIpF3rSJH7uCRH7xTqNgAmMJh1P6tXvqT2TSl6ZOTs8BLN8tDvYVF3Xt0k8cxxm5T9jt1hzd7+183dk+iSSAPgXoYLWSUI9fsFVt9vN6WUJmxFnrc7icFrBdt3sHBDMzYsU5j2JEzTh6hclicSddlymIgtyQ32wl3PMKAhjg2hGT+Gc0jI9TIT8R11UReQ8SsN0zsaxdCSYJTSl9k7MjTq6H4rHDrMlwx9TXQXbfn9gxFr4H1v7ueH1V5nmP75vQ5V1J/fMVDHzazNMy9n8idEfaddDxnPvujweTyE8BnVxqUW9PsuFVnKlK6+eG98axEWFRT4574ni/6UlpZccU/ZUXGdIZV/J+bY5xfnrCRr6EzG/emFK63E2JLEwj0Z3KJRmE8dNo/StHtog28cU4S02hkoEmAc2DYDX2669A2Jk1K0ebAXubzToCqibsAjYQmx19OzboruEYua7g98yCKdgRQSmDfcBX2g5gayxOQkbFeHTt02tcb7oRMPrbPr8vW90XbQGOZghUpFW7kdC6RhPtka2u9m2B/Je/XNbB7L2GSGp6KfDTawjPoZ3PIlPNcV6+pefsWtA/MP8mmNJzEmu0D+2JbXuZcgMxq39JF/Ew3bVTwbQ/dNpD1dBfljfJxXpteUwTz5xvXuM/BpTEk1Sarr9dwgN9TvcBCfIK7r75QK+/ha9ZF86Og1gTnxRHS9AfRRX5NL6Jg63KPLoVvsUJ1IVuzyqJBnJnR7JCsVw/xg4zHI4KlfbJcCCalxazefxsd/7ofzUot7isbJ+ZqJoex4pctnq117TJDnKf5iyzQ47J7LVe5TUd84PhPE0rMXGlXDj1JYYof+8jXWR/lg5RXSeMPGLwqFWDgYnemMOdOPZZjpaRY7T29RRpBLskQqD+eeOBFJR+e4uzOe+vdjbyZn2YM5RjcU+cLllkqZ5mBrzjdbnIsS5XxZnrfP9RyuiThmp1xRfmzGnF+bqa/PC03XInlWPTNOhflQcrapKJ17DifidYCT6gIjTUH4vVNdsUo7ZXQ23PzbjGczTcmDL+b0UThlQ+lawTtrf2r9jIAPnyf787ly/yYN1p0kehnLohcNbVOBtzb5uVOv2GVuvc9aXEP3p9uPk11FognzKGN/ccNxOFoyKeMVKxzZ5KJLsn0oA9bjV7/DEfShFs/5eTb+sNMXz7DvXEbJ6EkG97yPaUYk3hpe2lOvDh2jLuEDN531pr5uvRPk5+c+bbkvn6zN8jlS3YeEsPOeCeyUBy6dvdwe7VCAzWtkbwmqIP3eGwswzXPL+vSMVZN1G4QifNZNE7uR8UnhCTIluA1Ckj9+LkMw14/nlJAkttKIkkdKerKt+G5+dglgtvhqRzz3au7H14w1sgBdvFqoEOTsc028+8jLU9vhTr95I9RAGzK9927Jfc9XGQuY/JD5r13N4rltNhFz8J75uRHzfOlAPzNrUAn4r9q1DenVifrcxZDdMjnhw9Y7uO/yS16leNj5DdJjbTMsLqZNmubz1fj5Y8Gx27XWL7LyG7I3TizsNq3o0L5BMJoTX0ZRx4SRZUby/ED1Zv7ygXJfGviKw9379Buzu/FoXmWA+LzyARCaHpXqCQv/w6AOD2xwbMTHjfxmpbtBCr0se/gFRhiA7bjFU35tbDr34yvccU+CsHSVDNxtCXGubeF+87wd5RUJUDP3tDaL3R5k6rkjkttIES/xHmSsYoGf9gTIQPiFB4ODpys1Fo8FwRGzhDiZN1e9VnDxzg9Kp/wBvPTW4OFqnZJRStQEt9B4j6S8TZ/emV4808e44tJ+OVMI1SBnJDWd/2CdHZizsRiVI+NknVQDX1fItQ9vwibqVmUifubODDW/AcnbtxFzRmBvIN2XmQcJXMwftZuuPXF+uOdhPu4aGuQAmj1OpvyNElsB10gukixskE32Rxw/NHNmI+wliqaJQpCUAqt8wzdb24Ep5We6Afy/ju6hsRWy1OVqors2W+vE+ePyQ2bc4759jsm0nwI+v8ma9Q3XeQ5QwPNtogNeMtLo8TmJwnm126k27M9NVVhT2szk3MAXHL+vk+9V5tk1HbigOy6TmN9UCmKMufOOi796vZ2f9DgokbQKzbF9aONz5vuvx7gvwoQ3u9pALiFiZBj+7e70MrP7vQ1APvRJ2GLlj2lKi7JBwv6E0oI5AFT1VDefOjdLo/fvj0TjgCUYEbT7U1Gw3+gsZcCLkHH23TXbe6pydOuTROq3j0D5SRAmU4CItGWrSAj4iXwkjo7c0IBFkUEJ5Pec+4Lfs0TLzejMLp7mJceXymEgGa5J9Xr/PlgwFCBfDcTo+WNQZ3EoJ+r79DxlJ+QfZiplELMC8wE9ksITarl7yR2M1i/4+AEbgzHUDWmW0UGwbWpjOJddn3N2aUH5o4owNv2eIeC7ZgkVUnO8j9YxnB/ND2VKy9debngd+6BOlgLXrJiEqHHqFnLNYv3DYKVqfKPXXqJM02LPwErHUUasxeFAzAJN+EE/c53fuL0FbbcruIPU/OX3TULEzeZLv7l6/eFPBckuKphDl/GeOZxWP85Q2OCb3Ti5ugmr63Fh1tORjbHp/Ng/CuddEt/PXy9M5SEPTyRFaOJ42Ufpvpfn4+deeKb54RCbkkY0t00k8OyJ7IgzZSnuh8vZI2NuQwmsQld54Ejh4ylGHn6ZQl/++CBmf+Yg1R6p29Sewnp2I08e7J8QjtkGzGWv8ljopJKV7OR7CEtuXv5L/YxZpffN3KfM9Khp70V9QjFhdpR7416MjuWv55HGO0/1vU29eMFEVnpow99QkO3tNrCiSQ0FE/KNiQRhaHl7ri7xYzkuUAoQwnLiYeHjsZ2hB7STzv486yAB1mA5qGxH3egkkKb6J/z72bdtgtlVSRzGozTWarhiANqf/MGINbAYy1KoUjg2J6KvhPnNLRCvfPk35j+GIUgHMtQJqDN+IMo4kFYyNsdVXdPJEg9KMkp4/AJ4/bmq1q3q7ro2OyVbvLD48ZOPZKb5ml+qHL45oFdyf1bCu4QjfufWPPk7t7jdR7iH7SLUx0aE8/FDBirTdrzH2Ab/fFWfWvSeRMA8q99YHSjVVm5raXVvPvyTH71JsStG+T5vW8vf58WwQwY8qM12JKTvryfBhW0dMAYnjls5bHUPUCRKlq9l2M9zs4fK3IHFscMUTAZ7R7Iyonw06dtl/4amwGvffHTf5bbBHo4sTwWrPP2JJasX0L9qA+PKn3l6c7MrVeUGvVbOsPkFBfwTlaviq8XLmSkycWXgwZ/jDib0jn1HSszpsaZFnplWTe8rx/ulMT8AJ34BTnuTPsjTngkBbDvD+W/PcdejwW2QhnTUFJfWTaxNxeCFzTxQvWZ/CU1h3l/sceKJIJCHUc/2IT5OlUJMRBp+sqqeM8ht8Eo6MbHVVuXsEOQnypglZ0fBZjnjrZBiCxnK+jw06fr8MsL7gI6bwrRA+yAqUa0OfslPh+9XQ2u1ofrN2n/4hG1QCLmKUkaBPIxjfIv6+HG/1nr+53pytdwXlapbTcRQ0JW20uQLBOF3qhf1n9kG1lNAHnw05CII1MNL6sZhBlMf0UjMlp+gVVm44zym5KILG9EmPhV+0c5XiyNmclK9U/5GAPbD9fh2bVyu+IGI1AhEPi6Z+blzIG3KyREYQzbSrD7ajQUsHLULfSfuWcl+7HcLwRsUpZTEp7uN/L6sGmiR5FGcnmSgidyvS5EftW68U5R9zu9v1psS9bmbqvfDoLcvJFCzxNIoIGd1IZn4htLxTkbxw/9VYLNn3bHFQPCTzB1fNFmWWGYm/WVOEL8gO7FuG+ESZU/LNLnmA8bYSszwOcoDkqtb/izWUabpFXtNxPypwdy1Mk0Scmf06kEklADk1bqgE9HhQXFwjPk24emw5+NGdZ/Ib/kT+xvmDNS7XI1dgU8bb2nltjGam24gtxLFbEu9z2phsH4AbQmnrpXvQ3DSmiS1ec+Xz+LrbSb0ctXncATKwSksf7nXc9uHCxHbfIRf3XKGlzm1oeC3WQpq2/d10Zn04bSVZVKF/+oD/QPUHj8SSeBHp4ooDjyXRx7c1FQjS+jKEO+cLMeC1QtWjkN/erOVg1HLTSQ9aTRYHrP7N1MDDhDLEEUytF2fA3zY8KGyqQKAcGslnwRF/4Y4NzsqXxlwK5rsQLSIRnwvCtbS1Fad4Ws0jIgYdhFcSNy72lIE+g0uTpRHI7jsgR23F7bHXUyzChmWzj3PYR1YTWlh1UibQPVjH9so3pqrrH5RU/6oHNaglKLPDHJ5J6VmKm/5+HoKgWhajnwujLQUY1aOAGz+iZ4D+LsAESrD3xririXlAh7lmUJTe1a7x3E/sWJb2VZyJZeoxLmM8NBqz+k50O4tpN6B6gqPMn4jBjQoJiOBDhVcgBQUeyC/lPuHVe47s4B+uV6Od4spe+SxRCQk7CeOZJwBbGjo8ixg5R9dCwZzfuLBuUNGvFX148MljpbxO/NTUJv88KORPqLZ3TlaulQeQaQ3y2eb8qMlbPN9wMzAdK/2RKjpMYS6cwCDXJ6oBDr7gTEJ38bPuip8gnWslX6v3BqsKRwMRJHrnhGbOwqlW+cQJBB6SpsupumT4hQ3JSqoek+WqPP6iBERLtx1ekxXve1jcan5FszvZZws380w0Zw3dInNrDoIrb0ZohmQLShHtgwfxSNcleUSCMufyeihZzZuRQShihSuJnKji+9iY/M+iR990Ta7s468CUFMiu7lBIcAOIO2viabXg1d4NhfbUlm1kXkWetgZ11djiTXSprN442O61ICygRZeZmI+u1kqxbnvSm0k+tD8kzDvr0nGVDh6pB6HiZnKa0OGrk2Xcu8d9XvIC3TTwsUuStnAEaSmeVmGD4Yvb58BatjhAipA/zqK59v+fEykpGqnTVGTNB/SIsWLhQhibeuwtjPnVv9511dX++ezMLal5tadxmb/CTurRDYrK0NKeXfpv8AgbxG1a0lrpJwFjWWsse7qH1KmbR01Q63a6CpWYh3jgS3rLiE5bkTjAl0JyaCjKyeWkPxPEgZ5h0V346qGDx9Cdm2uPF+1z9BBhIumAI6jMvOc1g+sYnmXC8KwUUXxnCaZLSsTsnHHTl9qvauncJRiG/WdgKtHPxVFXnotFpTLy0Zqf37hreO9MMGinwIlGaZT54x9fJvxRnw0+ifcQbXAEtVH+kNetdoOVBY4KZThqCV5hlG8jPxsUd6E1c9E4nebHqibgmcdE97IRYWFc+2OkFHZ1fd0HEsWdsVSP1RNy/fpws3hZU7X+7CmOnoV+zVWpSshhA6ZR4EMK0TVBK6VYwaA86BFjR0gd+zGO7bj66Ioe4pDUKqEqHhGkTp7zGi+tBWdM/HLr7K6JF54cwtMzx0fsWUNp447kzag/4aU0sA6hUndd+wklMtnlQKGKg+RrY7S8kzkGgdXr3lTrkW4rTm0UU/YMu3/owF1T10c2va+XxW2PaxQvao4DrikUJbu3ZX6Is5dMVS2OXpi8i0Oy8DYkBfkPKIJAO1qx4qH7mPQ3hVC6slOncF/g4S1bXYl1bvRjDZYVJvPUpeV/NMacMnc1NhfvL9YJc4pTOWQu4a/ejt/X3SDT1rqDRgAOqmGH5ytXE/6VbCzLrgD20X1Fmdc5g4uxWy46SXH8Epmrk1hqBnOuPkUA9RouLDQ/tp7uvCs9s/Nf+YPqNXpj9qrtIvRZLLaSRQPyLraUkIA8apcjYrZu3TrXaGZY0UGpc0cWNDwfy8lqo7UExTofVAqCZ6xR61MYnFeaAlMXW4SPG+FLaJtc2Qn1ldAppybw0Pd+G3DrtPrIrPcHFdn/fjCnfxRPtlT2AyuWnU9SL115UGc4r27SUPzGZH7SkyHNecMTJziJ6rPh2FXm6+Jfos3MShtiFWp7wXIL0XmdR/yz4vfMUP9aCk0s5o4og9FTiKDZW7rl7xYhGssII7moE8O3eHdqNqjc/vejE3LwgV1xcVKkgVLSohu/dNmhaBRiqGC+siZF7d9A8XKEgaKDqVWxtGt3JqhnoKhJrVLRocVtSrc/JQBThco56I+CUVFKIQITtYaVi6fMHU7bt2fiZT22n0MSr7h670lsRwINFQiLG2zHJZ+TcvKe6tryvyAC2nbJPbLCMIDcVxsDAAUxolmvSTEMO57jeIWXZ/LeFY85dwvZV8Ck9ohQXt4r3XwLJ61+zGyd4Sk3kScv9M0OT67cYowhmlLz5HrMBvDXQzb/iNFz64WWMpTM52heSJz8Z3vpjiUXdm7wtMMzrZnrcFuihLI/U+TvXdhLL0VbyVSbCN9XqOQSDO7iC99M4IwTLUNiagUSExVh4SEVExONu95AnZ3ooJbw+7ILYuEyhXCZpxtmT3IaHGCBWvZ2rbZrYnTGgJ6vemZaZiTwJzAhNL9RcTHcSLj4uLaNEEe3H0dSsQ2bPJxHmKl2LdehpCiG4GcVFiN2UScL+Y8S4/TpbBgruWZoO6Qce+dkCH4h4TFNViHd/Amf7NfnuT1RuLLNHUXdVybzgNi6ZSrL2oMH+XDskSHK/srVcSHW4zFsZ3atv8YEXL4QKCT8cphFQXDxiRPcAaiLPWBOx+y8CHave1i0hPFshUOogCquqHoYFAU3Xm2xxlNrGK7ajm5IqNzi0UpgkKD3eUouUuJ+iEge95jcRqKw6AI/bqp2U3sy4P0SJ+PcLF0CHorxVu0kXLqXbGIO8pHYDzoZx8UhKO6rpm8fG158jgKUkvmdsjzd5TVSTHK0gBe+8tbOifNKhZCXpw+tk1EfJ00m8FWJrcy9p5JhyRs4tGQ19E3Ck6slrXXHyUguzeZRsLPT07LFSwMpO5mb17mdV213U+1i7wyBRiTG7xuG8fSVB0iy3fUUgnnhvSqxzvEtuHrRziq9aAVqq1KU0hMPkmmJx2PvktCdULVfXKsrPiFiQBXcyn8jbZaFl+apa14u33xRe3iExaIpeth1QbQqJkgYazTBOVhf/pmTsBf4AQT2ILbys+fJJJ40IdVGiTSCSEjr3az2MJSNV9Ie++g9zYYzfujr8Wumikjpzxk5ztA7PdgHz0BLJJdu89Puvjp1TmuAhZg9pI2W/TAjZfyZQXZJ7FxuCK2duqjh0RI04kAZM62IcjcNCERQpXje7MxwncBsNxAEV2BmfO0O6wbYfMJKD6R0waw1e0xAYVTA43hP4xkYqkExusgN1geBrvWlaIWORJ0fTm1k6ax7SxVXR5gTL66F4EsFxR8ZuLVppfVbWr+jT0Vw7k22ZNljnm1ADkWixMJTyFH/u291jSqc3b+UtlOSUFTTVXM+mgg6g/FnlvR5EEC+X2esAbQRj9q1he9qMW2l0sFxNT5rPEQBtc9nU5vszH5ehtCGTIRC4dceqzzIXGtZhm+wmQ86agZUVbGsgsHMFccFOc+kY1J/esGCK2kCeylsdxVQZfsBO77f7Njk5mEKsZbF0KFshYH3rJ4vZA/cebyE8000Zq0wloR1xA9xoJY0gtftdgcdom3cmRPxWsgAj2Z+e4S/zMT7AsHqMDFYxy+dYKTnPzNqTyrEN2yROp4bBvX52dgmIp86HpNKpA8Yqu7rlwUrO1GhxjeaUd9ziiZ927cYwYMoU+LyzLo2KHq4fydPH4paLa5wSZombjwqKafJs4O5jKI6NKm2AwRZZPiiEhEtrkaREMvqlSIxnu8ixG0CqCn0rmXU13+vvqfZEIUlrJHD8yEufJUYCf1V2BkhDSkOsmcamxkdyE3sre1pk4qZDUqXsnraNQMfJZIC1RRQ0y+3ogmIFgtIcPBH9eYzUSZMy9aWpKXUIueKQ1oScNopZBKFlaIbgvcMR5g/IOYJMGa+gbjwKh3HFDDo3nBnmAOw1b5YYHuN+gnO6y0erKeoqBTDog3VVMEEWPg86X7AFr8spX50P7IK0ROhWX95O6rs6sAXC0+nBTt/pm+AaBP92Q52HbsqN96fB3kXG8PM3RiN3PhdpCXSqy4ixRRjS+ik/EjyFsVQAHJ2tPPCl3tstiF6X6w2d2wsggntS6cExwn2Hg1IJO/TZleHTn4jz99Mtq1vsIs9dsJWsqym+lUD1YrodRm9hpm1ENrnAJ35VXkrsQIrpWZZjPoUK8dkxFLhQHC7navQ58kFFFEr26jb568K1bEd0t2MqKLaBPKs8E/L7RuvfddDgTwAKXbH10x3CIpQKbHixTAKXQrhOz1n4+W/p23Q/YPMI6l/6GE9POPSpRLf0JczZqTBz//uh3eAgtCeEudul4pDhy3lfpESte+AY+jXaf1jHqBdrFo7VW6AphR5b4V9G+cw+O7oyzznAMvR3fM3w1SV9Wi1XhwwHZysImAccPU9IkmqY66tk0YhoJgG+48XPaFGaYeaN40TMUPW1vJGkqpKaIrj5buIjfP1WPryrKxUSlDyL7UgveNlZczI7TGC7ycWtlMBkLWiqxg9DSnRRNE1ep7BRtpKLo4zxP7Y0BiLx5YkYhEmAwmURY5TX4PO33qdKD+s3dVYY065yt4RDgJG4lSZ5O9HAjO7xK6zCwzrCraaV7LwbNRx2BhIf3xNU6NGeeEikAd5ZkEDoh4jDVT9kz+XF/fcz1Bqx0oXEwLb5hnL4HDWQtgYj6vMheUW9lwPLm1e9GluJYkHra/FZpJMXMmYRTQY7wZFXj2Lru6JSyq+8tKf+2P5/l4VDpQ4e9T0dtib6CFUDeGubiuwto9rWxdVuKYML7xvQ5cSUb2rz44Io3+jHwwnTApro3MAn5kW/M55ZJeo8BJ6K670MIu/j0NPTALa3qg+XjGIZTkZ/kO1DKDBSoPNofHGkVyBDTy5cA9Jm6yy5A9Lg76+8FOx7m9AELGoCmx4Tt0aIFVD0l0rQikKG8CdaAMFMGVcXwOakd5DEs7dteo3WfiwMshA+8fH8mHawKes+CdXarmDDijaFrV4/iCYdD3Np5lO5BlcbGxWRr7At3jTZto2t19pXfElHteGqT55n/YJlobNsb+QgT2LCcrvL8ejC6NG2DfxHCTRji9XJQ76fsB6SsvvVjKl/pC4+jD3772ODfrjma89IO1jvEl3ojw1dCSLscwk8pfEr1ZnZMCj6u13Gsl6alX2R2Kb7/Kexeh8VVruTaHR74kJ5zwfAIzZmT+6zTcyHspJOlOiD19oOFj+5KKqAdl/quN7pj9TcyQnrQmshZTf9cO1UAeG9dJyl+0R7YEYS3sNoGuEs4PET313OSSJs871wgysyrN5P8MeZmCpkNhpOfzr4fjzyfE8vCZASTZuLDqm4iB9smPglaV3VvFCsg7yGi8LCjvd0QHyGGyARD1SdYwNSxpaMX/NkDWbO+R2rMb28i7xPsl2gN+3kXX63VWl4SEkiUn7GeF+lBLBSpazChTbAbOUkuZzbeyJRrbv6gmJZ2IOmx0PRmxLcyzwPQw1rskCK2OH1VaabdYRlAp3LXf9qJyA1PpI9KTu+0wVfcjrJJXczMBSQEZbubPTusKStChvKE6PT+Y8RaGhnHS+8EiKawbWCglg8eTdK+HpbMAXyDKOtxnPP7bcel5RO+3uj53JBbOZDPLAptTbJl+V25th+VWx1SpZxsmHgQBVSRhrvUws61yT5JzD42l0fEY39YkejRvnzs/QqKP9mHiGGyiBd7PnI2/CC4j9NW/wzT+eMF/UsvI6AQRbNE2Sesm+R4LaDftMBPKFJC5qBdkOFCl96wFdzz2WX6k5PDucJq9j2WZwa6CM58ENEjW9XgeFYf3wG2bBewESS8KxBPo7EpYnyfvqXINptCW9cF6X3rXamA+/7Bw6LWnOFh2jJvU7GbB8HAqKzD3pWajjA7/bI+h55Tu4B6Oyb7TAk44HG+P0hNztercVDXPMUV/9jh68NVcPrGEWgc5zapaXc1yV2QO/Ot0w9Eno3Y8yvrc1HzUDwI7AxGPrLvKamUjAhbgnKUbzDfoa793Mx38wIeHqZXSUy+lpcPOl30F8sipt0npeJ/Unfe42J/dhi5Jka5cT0CTBjOl4uOF+1koG4VOoISZF6rM3gcrQeTSAGlWFZHqiiEpWrmvfWkXGSUzwnmwPzJfHKN404UYusW0M/mTGhnGE3NSKLDC607A53X7TYaIK3VkTAJomaKTF2R2pATRWpXCEHWeZW4Fb63cdgofQ8hKEye9BPnUfqM5tQR2zE5zsa2aODknS1FbXszB3LXK7G/+9vhKIhF4HTDwxrZYQVfDSMMoWc6n3qFzdU7qgv89HvqhErAz2ObbGqsR9Ixe63zMD+0MivT1aLXEm7FahKXIQnqoe1ru9bewaqHnTeTktJGjbn6pMZkr5VORxsoddP+G1bf0vN93BU/TVHhCu3cE/nEG18xKuZSyVrJ+dusE2gR0E+NrXWhGa34gdBoEAjuHUT6elyETzYFTUYvtnR2oDKkGMCOThbBXH3Ehyhf2DrAd4Vj+mRspbZ+pPn24CcEBIbhS5tNvITP/AYaPI2PDSgPPAV1FTqq+e7zTP52IxZvij7WfXS4UUN62emYby86sriO336SWLPEtNM1YDb1WM/N0ipa7YHVIbg8ufZjY4lQMzRO8XteIP7mPWPoRShgybsjj/olPcWr9IVbVBzxkFxKspMAZR5EqlJBGIAmP4QSFiusgO/HaKrrtNBLPBfOe6BBTjdZ6glHTeFXJ40DY5ARTlAVXfBUCFhVFHxeCJ9Kyp8PoAv5in/P1RT30JloGTZVuJatvjMaRa8K28iWd1zyYDW90b/8gZE/rcOAMDbnw5B8B6ysB4v2Dula3WH000Yuy963RLx0I6/KaCSlxzxXuxcQrINgpskm/kn1Vv5m4A9owMlXSpmR940l123U/erlnrWTye1HXPLopK0JS1rkh2W6KIJ9hngmgR0eH5RWP+h3G9oUr0SotIIgnwuYyT5FDtNlLCdN3Mhqjy6momQ/D6xUSmdKQvHmgg5bB0L4dp5Xf9d87j0an/iyJwzjnsK7tcgXfiiW8kzufAL1dO8HSxDnD6oRCcu/UP+90fwr2dqafUgmB885lTbXtof8GExOpFnVM3N5FDBYSwEIRK6zI/5rep9L4CoR0bgoXlXj+Ij9KHWtDcKRwlY+95mLkV1IvcY62c2rBiAI1F269yrt+AmPHrfYB6TbI2nQV/6jJSogz4s2yGdcVY1jNpJL/W4XA7knDaH212+sjCicazVUSllNaJRyu+Jd4iacv7tFyfFHvIYhh6WSnUs8mpR54+cm4wt3fR1ffvpx2kybn44oBX3wSH8mPbWKMG51jqH6ukYAEHlKzO8Y9QlMDvlUxmS+mNfrTe5F99yc7ngt4Iu44km2+7C8yHhxRBIywRcTWqRwR6hF65rvD2VXP94nsbd0aBLUZClbp7U76Rqhws/wh+mkJScUCQUKCSWbSX6CXU4iLi8Ta3H4FfNx2qVyqKu4LDyxdDKloIUT16LcjzaVfdEQmUGtTbrLiPAOuJnNzRUAogjqUzpbfymHwwcm1KZ54rImgN9LBtUfraT7yLuJv4bpofDzQGlQzHR/zNGDQDlnvXZ16dPfFLjeZJ6XGBiW9RZJnmZl9HzBB5UG9m2FHg6cGVxmUKtUIxcOgVqeFtcp3OESavZ39AFMfwth0g976POwjVftFjuF5YrmZRb4BgEHZPtKiBmKX+6Dz0RPgZeEer5nEv1YPhmo5duv0YYcm+rxyiT66Xq9BPcsUS/bOype+szjCwv6Fux5V2FKqFaj03MDhQ7Na15FyRKQmtjJYNtS/NZ8eRnKW6xhdYI2lv3kjIJX4cvfP2Kgfc7Gj442BcH54i7EvrUcvrtzg1CbPmlwnNH5Q88iWf9otbiIYSyVMfcgfbSYH+n6aZyMMMlcqyl/L9PYetPbsY9o0ROwlz9rf5lDYkyzT1dykzxDFzbiurrY9n1wmgzQ2jKWC5DLTNMgDri9mfnkWjFrBY30CbmqdBpmlzD43cvWR9isrekxmCSnSosgXTW0c0QjurFe80hjo08V1CsAWykYF1SJMkiD7qLBryMd0JZE3zEB1ppw3irRC/J6+UHPkEC+dTQJmiPc35EMQjBzkaSG49itlPGRuhC3Y8DgPa8T/jYFFgLRBZ29uK6KJaQvrykVPDyZNe25VIY4LuTREBglgrpMiCewbOpuaNgnV7DVHXrXHyXNElm6OcnBzfbnlkIiUn/46lQzrKbD6Ns6o8uwbEUy96VVGN3XwQRzRvlXQ1M7DDSyRJ/JMsg0tdATFMZRDLwo133hQmDL59Qe+3pzWMRt+wR4yXfdl3VxRYG95ECG5XieimB2Mg3JB7JtSdbq+VN5k4UMtuyzc4uSxgFqJgmty5wvIR3dtfwFj13pqYQTJ/IhdW5p+6Rko/HVM49evshYTUJODWjQsZB4h3/LT9AeZaMLqnWxtlalYGzTJN0eN+o58yvlU88xpoI+kM0TWAcFgHPywwftIJJMPdgXufFzzHV7E20Np2CBbjPFhm+kIETJrAvyDXgnIVoiXluZXs+BjT7jLZyCz7rqZG+s44gImUeFYO1vLs0TAu2Wu5xIuD0cu8VNApGgC7bEbQo+rWPaiJAHTXs8sZXR2GmkMZKP3H+U7HnX/Kj+GW66GOQt8zl0ePqAzgQ9BlKEZuOrhhuz0CI/TKOEzAojV2okg7lNy2ffEG4biP7WrHGQR2LqvNsnzj1JGC8Lou7liMhtk7orE95eoWCWK8W0xyLbVOEF1t9M7UbuQoeVhprQrt1liRuCh59XF3T/0Bx30d5Y+Kqywsx686BOeH6VPwMy1schl/TVOfSd8Oh6ixatst9bUmV4u4ZvwsZWWE+hDJTvOwwb0Cd7xeguQvmxzkSYWPAwORUHrMiVYeYNAGhhc246x59MOz/bwwvdVr1d+kRQqxVDEmBpFeMNGaEImh7Za3PoZjT8aec5kS/sCPts9V1PYaG5PaCxgLtHOC3Ib18WweSlt0tTO35W4S0/woKN/rhBnhth9qysEl4dvDCn5jjzTuULlM6Luk8+V1wHzqqjNZ974MkpkUsSM3lfUZQX+M+CPj45oLv/eOHD+8wMFO/QO85qYMiPURVusUUynNARh6LR43qbhKS4TMo21pQOMpkARY9ET2ewoAA0t8n9Y92hpr/EJ+mYW37PHtfIo5nuYirDGrZllgmkADN+74q+2R+2UpesbBC9xsDD1a+sV5F1CNzwjYKlLBcTBWUOI4So6jcLN9bbPaltWIxHSicTtHDx3htEOoMlOaaidzsleLkGcYNPHOFAXvI5Pz5JZEIBY50TPk6WAjcwvyd9IwrlEt49CWxB7DPwpDHVPshx89Okx2e0E/Xcz+5EsIXnkSTIuK40UgEBA+9c7Yl6nkO6cDvctEemjaMTm9DeupsRasDBL5y1sQig9gE9e9V5bKJHCZsOQUSfHd0qk5aTiuGWpOFdLvfvYTygmTPVZ/MYcjNINqgf3Bg6B9+cNlaiKXO5HbQ+LAvV8nZCJvipz4q5V7ZxzQlV3gH/+YHJ57wTOPIopPrQqF32Yh6hVjIkW5O9qw+TWQahF043w2tmJ914x9xQj2+tHV6w8f42sOFbwDgeR0fDO9zweO7lBM5GzUOTQYJEOnZeBg7RqpDR/XM4sUNiK4prHpDjtotT9xAD3cVlsOcdV8gn7ibsy8AW4574GuHc5ZuB0eWLETm6wClyiFtQto9frdhkVpy40wQ7VODOkoOQIfdJ74APEte7eKC+PKedHZuwShw9tudSu37WYoBkmzd+EI2Ajl1dG9fOt6RKoLF3RVLz6pMEbJXJXwlcDtyHNtIZ47dpBJplqesy0+owGFF6Fxz6mwE21IPhub4rI9RITxuzqH12aEg75wArKI1O64XyOWl2Xizx2rlmVTPkGbLpaFQKNiLI1qvWGiVjps6p8bE5dS75/nHOL380qwj5abgt6HQ9kLYwyvD6FLO33pkntUFpd6vo60wOw7pVwEeN3zAEA5z3HtGaH2oFHYbHfu5xOHwa8D8PONSP77dW0pV1cy0oQwQjKp6usMUA9dMnS9LhQR5RTHgP0OmDDlBpwgRH1m0CqtGQjBN1W/n9HhbvZH07F80fjRtjVZ60yudmqIKs9Vn11OpOJ0BGIkFah8KGlJHpabj5WCpYjuBu0TYYkWvb4bNzt29eU3WsyTeW9GE5JHyQKXrfOty8C50+MgiI2kI+BntgbmanIcIuvhUJQ78JrdOqVS3cDX+8yLUljCEa4m4MYuRUsm3WP4XLj8j2xMTDzA8PD59CaPDRNXitGbyFzsYJ1M83yQiiBFTN7Ha+H8BYlUWgtmVtcXeuonh89qdFUXf2oVMdB2pskxsg2UM+oQrX13Jr3PNVmvwmt8xi7id/z3+cKIL//p/w8MZKHOnC/DBATeblAyWsWF9ZAi8ntBIXC1F09Ah/HCUWPWQoGdKm4w8d72ORd8QSC6KxEsmdf9aJMFuwPWXsoOsuMoqOcW3fN8w+igO1mV/OIBJfOyFt/WBSOc9JKno0ryvV0Q9+eZI5pf3ESriruC/a5XqhO7VhdGQkWnBpnZSOfgtwGOR3qA7bIK2JgqHwE6OkaraSAIfA6vFC82mJtP042kUvto+2RLtpDs0pTsrpHlOH8Aw+IPycJ1vArZFPBnxuhUyB8jdxuvm2N6ggbnVL2J6tYIIG3dkD8Cm1tISOm9ra9cit7PwJFaGPMFUrdCeYst3Xp6E2lkV6Ob4Nmzbl2Zh4kPXIonxEPspDBd87v6fBHHIrtEVBrfrj0UREHpiNoP3EgIbUDmIthQRoPbZ537ElrHg82kUzm/VlefUY038bFiQONYm3zkXn0TKYtvFF+EmLO3e8/Bu5ljcHApWmld8PaPxI8+mIOX7F5tMH8XUhZdJaRWaHfQf0Ztu/WpEWvytDahwY7OhMTey1U8jaa5Eznp5p5nfDwzrsaZiw6ZsMYJrmzrvy/26gxY39tiKQz8kD6T9lDU7uaYSWNPG71hlu0eT6mNgX/dMVoSu1apDOlIMl87faRPCQMuBBlsNGcTGfArAxnA1mXG5BedBjhb7NBd/HigbWs7OUtzAacr/VON+h9SlDN7/Cz1oENTToOJ0fzGossGoQDN5EnO1dVqoTGVs/mK5mClUtzYTd94K6W2Vm4H6eT+zkBJQvS9AjaBRgbTx4XdAJFhnNO2WalvnkKAuZ9tDVWGwXn/xKMRF+wlS3hv1l4vGzH2k5UDqEDMIPVH0EaFiN3UnBYriYaUeFUk8FQQGvkeiIuWbzbUm5clhj0V3b0KkWTAj0dArhSkYeY6iGrdmAvPBg1zePXR82olYg4K2eZIZ9tYbCwfIU5AfMzdCPfsuqPOyutDzBGMfKH7B546Z0za9PJ9WBfSKiK0K8w75fCp8KqjTuSiQidZ5PzfCrfXCymme01yyPmaGdYnVnhVcy2UaJdokS8MCNNEKUq6oaSSS7RZOficFTy9x91IgCGZvNIk4XosBdiOBrlSKBwTPhKTlShRS+guxBv8IzvPEVhCSd3kQ1ny/d+AhIddsC9bjMs6xGYYs7/LZjGPxejTBt7nL4+Wh1mZJcfPQ2qR5F5A2+OCJ9v5KPW1j2KV8R0MbHMb7y7GVuRZBWrr/soK+zjRn6elohkG6KOb0LhgpYksAYNQlCW7fwW13GOV64a0Tpnohgpoz1YwvNriJj5Uc/WVDa4PCHrBAQQBR7Dm2h6Gv4ANM/xUiP6dRoL7yfbX+gGcESTXdLEQ4l37x2JBRPwSr0xtpcBloFMrCbB2PYrPBGkPv3UciyU+6SsnNUnuZgzriL0FyDN/p6vZ6ZdLPEgw1jHSuaZrH6DCn2mYbLt3onNrDcsMuE3tWaerqYkxm8JR/5nQag0BaXNHl8UjOVnrZdSGUoR8iURfMRjK0L4E75DByG7euknyVZ7qoD2UYonycE10SBPgr0NWd3sRY/RPJBxYqZkwM56qFjoFQ6I1jI5/76xCoEqu8qVWXC9kG1yZHbz3xfl7I4ABIv0NJhXao4aWOlTsEIUPrCdxl6hbw52Gkm8RQICebxAfuzTEJIP+RnehTzh861eR0fpLd4CBUlWbTzztogOFtB8grQ3sEqlsxC75aJWoxSVfDJ0RU5IILM4olbz1lvzvX3K1eYFgJkctYJke9NDvaZYZ8J9UDfiR/sVxaAtW9+gpNPkuXIuEcZ9gkWRMKRHf/2SZeZBPTEoCqMIINDiQgemg/BSDrRwGiwlaIsQsvgn+YleF149vv+CkdbXw0AIImImTOZ0lccU2n/RMy7zB5kI8XdsFNIL7HCdyXIjEdnwgl9W1ayqK1Kc87rIHQZbnkiT+a1uncse3ZsyjNd2MAE5TzMAgfLL2rE3bQL9tLFpAxpTpy1Sj9En9wcn+SItHBh3srs/Vzlp9keWcnt40QW/tVKyN7fsg6d4AajKavFD8zZiEF7SHrPlg88qkOXfLow4N8R6qEyeNYXJx1XpmWr8AHx7o65rsG8QYCs/74J2i8BY3R2BbTIwoUfvTBBviujQfD4ivyIo/upqJKGSTzqnlnO5PoSliQV8t3U4LN/Ggu6stoB+2fnYNtb9FUqkitZDZcCR6iZryexIvV2VbCY+n59VzNEjkKZlQ2e0GuPdZbktorKGq86iFCqJRnC5WjJ7Gi6KFt6Yb5bYPzaxA9vJZi3WihltzO9tDJrH+At2IN1l2VtLuy822IrFoXwSlDRFL7eED2/kq2HZlKX64eQ7gwsPtEDkEP/Pt239Kwx/frAFFSZGD4mKVgNYV/yAg/JxwcULNIsDXj37ZavW8uSNEp6lbG0GnW4obG3VJuhCr14dGJ4qx6fHxQPGLBdQeS1pY73UIefzN5wH7QevtXV0bCf2dymxmIdKg0btNYPkPneRhziFWbRpAM1jzabX8P8WRHbkZjU7AJsis1noYFN9l4FJd8NkWvGUCWzS0QjdjWiH2O7WiQn3Oo3DDDcvNqb5xy93UCQ9Ydp63BMhoHQYftleWpkyCDhFG13OsQUR3xhvDhy0GDschlYPxxuQRx58kfru6GszLyoNix031aJEGugTV9bu01Og91qmZ7sRapxGTGtnUZemzQpvhPlQvyo2QJTJDnoGyujshhJaMJnM/j08PmuWJLZi0ENkYZ+7dLZ0EFNQMNmun6cFTcOTPCCzl/vgvgBo6g8iuZNg4055FI40TdQvkg/pZrgxjLdtLJzqrsiLt606ZYyV1It6LQU9PVdyhmbq9sqVLz5smVL28Aet9Buy20DG+fY6pXzF0aFBCzHGE2DpqqhiWhbVeyiVSQqh7tmcKC5CVVq/wAkktHQYOQG65La/M5Ah2V70TEPPbtZd4ataeuNlvDu2EUVZ8Za5/hd6afFHhtar9ThITyh7sDUBWVZlKa38LLGdzqcW/tA0lYOIfhRjxJ6tDF7MPorUD+uU+nk2SwPs4mfI1fK73Q7s1YW2BFsJdDytUaLeTkP/22Gma31ImmPLdK2T82nZ/z/YusqtiQFsugv4bKExN0h2eEkiTt8/RBZPbOac3rT3VVYPLn3abBrBe4tfZIbn9kLAFTh79ACIUrz/XCQKN4Q0lvVZPUmSlPOI8vtkNIAQvioFVCHx1JuqUW7aC3s6QO1VlUL3U1BcGuKfOCmZON8l7fyrVZGWhiYJat8qS4t7+qbe7ONDnvVkIa01xamNWvYUKdFXwCEmQ8rinfgUcB9tAn6tZ4Vrj0J1xovhv9qTCIpl45feOvAVvihx4/Ga4jjLGIwlREzp1lFPahTTsvpsafpjLTjfKvjt239rOfIQPOZLsA2MRec3dAKfYCTkZof3kthaHmf1JEovO1qIPzj5FRM44HAjCWYRCgIWN/b48NZ+0E+P1QC3wyOPuZLrF6QwuO4RFHJZKFhQb1hTsuC5Oeya18Hms3YrX7Nb2QBRUxuilzqKwcSB/tkU3kmkiuESizP9x+HN0kgU8zSbbwABCNAhDB+VyOVAma5q23Hxlb+BPjAxakXT8JfIuajLX34QMEKDoVdm43E9qIDxGebTSzUQOJWLaH85/Pc7/D1gn2DwG1UE1QUPUYZTvPloSGg2QIvWD/RWmYQiSIdqHpNxIFuIs9AjnUIn3f8uA8qBBe+r5qHRt08geoG7TdvX18SiJOXJ/hZfVAoztvVTyPG+xAHpxcKm0SpCLc9QPNozEcAt+vt96tIayb6mubZlnJHjR1qBle91t4Yxn5J+7OTNOfNuu/zhhCt1vVURZQamr6eWXoVbz6uY9tc2iuHEFF/Ce3NX8K9ji1iNcLGsu9x3DJMpwjOU7iN3963lgyNvBnRBQHOdedupkiCSNPlY+8A6CaOt0ed5R4dWFMMbj+d3NyJqAkiOkWs/GrjgOK4r5tAYuklBfIIg9qUdp9Hw46PzIyTj14jVlBwTXV0/AMlvRAXp66gJnNfp8NDv1WnKSaCui2sVcNODM6FXKh8mzarMu0D7mvHuFnQTiZsv3FJ92ocQfgxTmSPYmaTnCl+8yIItJX5t3UrUM+Y/TI+bZ3vXzLou+fDXUhXIHHLkmYM46a13LBVlATkyekSYEgk5iqfvnSU+W6F6cWeM4OXPF9VezwAPtLq72613BCS67teyGahPBSK6KaadRCIMyMVKfR+0BhXejX3c3bKtP/K+sATYzR+PLZlRna3bKt9zqv6rHIiQluY9H7hZOzCtuof4mLD9mtHgqIrJukQjhcOwGRDkX9nzbo8InF36yw6GU6eOcLSJncSgaExwyUp6JmFARKawBNYYmMTIPYkzwTRDVNL6CuXXd5rwyK9NtuW88jZn3zpC0mWS2TKJ2PGzXFkipCEQyPUQKtpXrEyM83c6kYw0SDqqSAgzpp573TblUg1RMkZXpI6D3H3An0kLgBOJvx2EUIVYqGRkYfZBcc5fma5lctJuLxb/lu735bCcVe1vdz3FbCt3sPIejjXW98FPQrk3DBLzQCV2Q822uqb7B0FMrW8ZDZuQr6B0WDyyyWq9FQb96KvSUpImqhvB0ejzmzLaOIIdHLY28Cs90O/pqxKU3C58nHXWPYe1nC8zYUBUrWP6CsuHv1oiXuiVvIKc2OCxZR/PSd5sMLZ9TCwP8nZHgmFNgqQ4F/nSgq/M9Hu6bQNi3CDfZcoobuMSTxzS09hqyDhRAZvdFzHFAFMkjKFAWtPZAPxHSznRFXSNKRkcZNAF7bjt+6UDynY9vMTFDXiHnic1lF4f+dmQutpTzPZZ8R0gt6aQiyJwPdY+zFJYHnYj3tk4gGGwqzDS0Emy9TTKrvnSr0WBtgpGdlSQXyjgvjzXkMYInjFICMbhHBuyg/jD8HZQWv5mD53QG+A22biRnYUstFdc0A2zzO9SdtTHKqcfQPhoFzR78QUe/o41jXYHmR0cDTu9fiyOev8gTXY4HCqngONxKkVAeUOGqs3ZASiLxj8gSHm5Nu9/Q26aYTYDfwYf0GgdpykRDUaMOW1iL9CbabSrV74AdxawGki40yxoa4u4k6metGFCnmlp4M+t11wpf0m8eX78Hz+r7pi9PIudfjnzkhvSfQARdoOVJyNIORhBNjX+RFt7RcCo28J/whsZYbDiZNsdn436+6oZAxtOqQpNgLIuKEfaUI3r1AZSJSuEoXJUjkMA0xxm+FDy93kBsOU0ugtEpZlSsoUqt89Wi0CemgTeKjJ2d6OryccaUSnrb39TNyN9fs935OdN2196/Gbazdogu57OHTeKDHd9Mw4fGEYeQGUxTIXx33M452z+UMHtdR2ngfRaGtjpcOdqsLtpkOsNSVmYzJuzzM6FtJFdh6UbLCRqvvtepYa6uymYZvY8MreX1p8ccOFci438FyzeZV1sWyMfbmabSKZYNapuYBPvy7EheERAPmQ3vUcCSGIuQ7nG75w4Q3rQCw3htOOOllRkfyONxL7ZIgAHw2SuG845vJMxob8RspE/MqYgddZqXjlMQjRCe2HWrz9dnYnH9taXM4TKeHRnpaanFhr5i0/by2+Mtb+sAcaq5t9eIXcoAvJfQ8XK+4WqleRmmnDTMrAY4REnMEgpY8ER7vgh9D3TOfxlkU2iu/PMJ5uiH7q0U3eUPaYeM8s7hfuVgcGQjDaGminnRXGa6KO19vDzjT9PLx+gKbuDD5QLR6+Oc6XQn5zH7Z+n5dJeHJpwxADfL6wD96bFsKkXxcYpUV9koHyxYWYst9Uu0ZhyN+4LGtJQAsKKNx9oaVdU1MUdJ3Fwgc2M4bA0h+uS8yxKonYFl8zG9dOMd3ejCbQ17VpZdbIjVomU/IPdCTF4s5qfemmmrBbcr5djLfQ1ueCmuNqSB3SK5dkrRvUo1xb0nsg113Mz6kYcmzr/bcOMUqE/SvlYu7gQ6qsrvbBoO16QLLIcfB0qQf1gwp7bO/Goc95+lYXoROEZJWN7MMtmfzJ2bSeMCj0O2LUhQUKrvNgDm0ogQVhi3zq5wzCrJq2ASvPcJeuFNNRQf/38FqkJs3bv4ho58ZDOEwYN9jSi7rEyVcJBk/Ukv/iG8QfBfuognH/xkhf1w9WrjweWJrQC1v4Za28ksybxA6Kl5aXXpiR463N88av90unmMtGwS/V09DGGkm/jbNzubpKhvT7GjJOoD2K1U0EgHGcNIYPr017C2x5Z9547Dyk3Aq8ALsHm2Ptrb22o2EKJXLhVxfU9+uy79fLJ20krQZHhp8n4DcdC1MQdJCyb45RqZi1QnO6E9luKDmXjFMZqrigfHGNNXvhYWAFk2qjXaZGAOG9bfQv21Go37oC16FS53id9DLaMckwFSWCiLlwHI+FXq7WqEmEHopP8n0oxHakZQhso//YUhj0Ri4QyNQchKQv7DD7WPY4cTWIjPMjJsvnLbj1bJyS3i+yk54tkFYtM3tvYUL4oExpu/FE88v68a+U+gJCrLHo97WNcpwli4Ad+3VmOvkZ+TNyhA8Y7Mviok3Jlvz4CA4KVjrgCmcApvFEx3YSLLzMifTSVxT2O6fixKoKSYx3BV930w/GabiMHgm5x6WGQ0BA0Hm7g4/wSjCesQXbI3CmWihXuap6nj6go6g9pSQof8kkAaQ/H7scy7OzCOxZWczgzIf8WJmko78z6iCuvHlQp+KKpz8us44YPf7AYlYP2GLIJVe13BlTCat9ip7KKTwsNgmMrPhat+4fTfgJ9e/6vYzcFov2ZtrJk5tVnnV3QFq3R45TH9O73hy35zbo4PlFrzGGW8SRrCpa+o24PKblpNBrLkC0jA1twMDm/DtHFf/+nOtaD7orIQ5quPFjbNRH0oE80ublcw8GwVr4M77ojMVkL1VygeJpJiYi/ZU8/v9ov2v+1usM7gZq5VAxZe2h51co7YR6YMKohlHos07YVc2PwG9H+U1AoK6dXqBDYhxzhyBCjDsv/bJXTeUJkysNvE/fQevkbwNjw/hG8WxOzKSzG8nlCIw77L41H1bzuu7nS4svsTY+TsWkoO75r6LFaERDh/t0+tYweXhiKjJknFRsHxTjLlcUx8nudKrflM5wSwCGFrhxbjVUYXmdb93fTPlb7yKt6CF5ugP5UTs03aDigrma/tDSzSyt8es57NZomU9WoH8WRTv3bP/mhfmY6+X05/tjQKurt3tw1D6I1Et4O5a/IzInO0s5vqzoT+Q9f0sUgKkUegkPlHnwvWldFP+CTpdWp2+ncg9Ghz6xnhZGHYeftbAT0CG+id2CdK3pkX4hwSh6f+VpWuU9u/w12jA8CSJFDg5pkEABG3sSE9HDboHvYjJy56FjScda7u0bGki75rxmaRyvekkFywPRvgZe0lWBNjzyoTHYMEHlo2wFZOqj+qn5Zismv6a+aIxQgXxwjQsYlPxLsTHR1Z1HQkPxx1NLUH2FCu3zjudrQg0b4Oj96jLPXb4oziZjXPp+Ge3kZov9czRvZ7Yl1uWlj32t6RLfzq8/qonWzwfkYYBdnp1Vw8i6/QjKKJXwKc1AsDONT0HWiKXHsmiILEAFlXNLbYWFHKUmMMxbOAgg4ZnWAV+BPDQhX828v47fiFaYLjfjLa5HHbyP6ttc9Ch7Hal+HYAg2YcDwtOSrUd2AY/yG4pjcirCNXWUDcF5m+zKcuSFFlScKw8HKCTUyyl0OsMuJZS2hIfNobe0NKDvanxQbb32mzBA39rHoRsCKxvXII1TbGQFvZz1TkoyfD+Sj04Z2gh+Z6xvAr+/gkU3NOqD9i5ZmCkslO/229MPWsn+hy1zgGLXVLphTfM9PY2FvQRI5CRrRL3rj21fdGbxro4brRd88wa0StFCo9noKuqJFUWszIkXtxHm+A5zxfMhLOtGMMdFK4p3scskBj0fsCeGyIXMfG/PPqsKvI/d3owxLGXMd20TGvDR5aPy94M0DNeBLgn8xUm+YZLEFjIkpeV9gS+QO6ji2N9IrenOrBuKPmm32lL+Wuk6UPy5h5NZqtr7QXJKIQeJLJYQ/Jf0f1dujoAORODMjojMN78ouhC0R+iRQ5r2ewrhwnybzQjEw0FSmtE2nZcad3nHL4//6j0RHbWZn3GFyoPqJyTzCZLU93FxxVUGYbrWwfUFhDL7uMWgYSPBvVgsxhUtA9pLV73CIyW+UXAYMrbYOrA1voPtmPhf9UTsOvzDNdUVYVKmopPsbhfaUtoDqV8NdJ4OS//qnc6imN7CDJIuF/w1YmqZZ0iEBKaO7cYu4lXB39EdTgtaBVJbmLD1q+q/0YaJ/OJ42ZM4joMX8RyZrOJinke6+TUxd60SOG/kbJzO/nn68PRh1D6/A41qBolfkex3lccRChGoAvlYv/z+RpXAUtUWcjVAd2340cWB7dADIvboUTCnfyvle/NAnOBTZI/7cpE2zVIfk7+Z9ckzcDhNqZ+RzzAbSJ6fg/Vlr2lVb+5mfQ9pynyg5xL+e4vn18/piLigWg5xxDCcLrFEAB9aShF8G86tx2MLRy/4g5B07lD0Nr/kbmf4fHDV+hsnYctKj8bJ468GCfGRP+SH+Lv12PBaTJd5CvM2IC9HYJ7zyxJ4tQqqnQgyMdur/45c9eL9mF62BBUE5NjHHgryUOZnkSK3gsbfgU9rmYK8ru5XIkmWsgML+wXCRnEVGQ1d71kIsn/QW7UWgjKDqwyHsYp8/mvxVZFsQs1G9ESZPh1xfxU/UfyN7+t5a/99AmXoBeLr4QN4B9QHOYcuCJj7OrYBoEc0+Hy9GOTaCQKPVC8dwtQ1rc8A6KvA4njrtEeWIFa8IdsQF2Gbv7jsmMfpVyxHaNpO0+BfdoEkk82N0MQf7q8WTlJHdmfFklgy+W/mbVvCvbfEjlzGq4+AzSs78mSatwVf38Wfi5zIFYAkh/fkNWJNZwSH9U5E0E31IB7Yc1Hg+BT11wI4aWiGbeu91p4Df1Q8dNZKy96KxddvMJaCTYnrHsAKij7S2G6qKEocjXyApFQL7fpa+gxKFAHd9xCNNphcxygSObOTOhpbI4pdEj605SC0AKXsWL5PCyUZ3R32Izi3LP85hbAWIkHdCx8UvEwof1l0qbEHEFP0Rh7sx4K8N3vcs9pHLqUjVEr3VRmLFbCdBRSXMEECbwpdLKp+T6h0SoIivH6im8Gg92u0wnbvqrRX/G3JM5u7uoBCPqzAEMfpYJTQLbAIAd+zMwwQznKTPlg+gm4LQRUQWOFsnwcxwQdPIhcCQpbsDQzDLr1BbKEINSHy1Iw2R/iwyimgQE5h57kc/O9s3B4Wgfd3HaBQ1piAWEjoPzwWdzXriB5y5X9uaRH5sLFPGp80sBVGGHFuKh/Q9RGgcxoNbAQ10CjeEa0FehGvhjuw9hL12MjwqYwwQhf3YPg6uTh445nErdo6XVHRVKkTui3HW+d30GuLONeUmhU9pZ0eHz+Wh+4oIpuAf80rpH6RlXuyRmCondW15y5+f3SWxtM4tgC+T3H9pBPwJgz4GVdQK2C8lkR84HMw7HbGvzn2NEEjCOv02ybSTumdw4hAekq7d/TGQqEz6jPeVCKJxg75LMBnJG6S0RZkra/z9/L22JFE16rdVUIs3Fv3dSLanIsS4NHEySkcDEsgLxReaOSdCWoF10p3PpFnK03EKw7SvBEZRBLD3+KarTaMvr9F1xsR6EEAuTPhpqucK3EnfDDt3e4qOQ8X5xLJPdAFD3LZeqxZMF3zK3odNT8FMeSuTbE9vo5UMvoBKUhriSHw77a5U1xzy54eRmbSbu35PCKAVX64W4uddsm6MlQnX6vK5Bz2ErDoIkv7NItuGkSeGmfm0Z7ZoCM7BQ56kz2zfIxl6hID/82am7zoNfBi+ReH4GdX16EzFIxE+3pRbqoiWagg7RCnq0qkXj/Seu+iiSJTcegLreX9eeKr8t8/z7j/+vnXgNZJ6lHK6JBvlyeU4S0Sqqj3943Q0YSDhKCtTVBceDNdI6XG65GUdZtodIpWz4X/vr8X8KXDQIMatnCacXv1eHTXevsVBdwvv7INDq9VrHoLvLCssUg1m6RC1tfv4jQ9cIuk9BVUXk31dsCuYu8q+vFOah9u8JC74BkxUwkezJml+O3KNKCau3JDf575yJYFPL8Ekr0U7O0VSOB6GEJyTPhaCAkY6/D4NQwQsJl66mBss9Adsx+U6dmAaFsdEMm6A6Q1g6jh6LjlA0wY2AhzZHyw3mpKb/Lv4wm/juZoFGIDGFiuj183dKnfmQXyKzGUm4L6KsGPpuxoZAr8NNCn77JD0udA4kUj4jDuHnAu0ncXRMkcUImgf2iX7A3Si5sWWL3hPIW3hiQ7Lt8b+s1nre0l8q7xOAOpmbfu4jjrJg7lMO99bX/pxfg3IX/mgq9KaW8LrVMKBsROCcUEoijPJ7wl/A1qt+beMcgPXNYLWpOvGKVJuP/1N2fRVFtEZmoWQoIKQiDVWC9YnhpqEf4L5jZNkwNDyFHZPOUPUn4cj07IF2vmKhrewE7noJvq55ZmT1hSR6eVrT99H0tQCPxq0mzpTgbvKAzE5nmTiMHSPbpjnZ+05lQkXNq7v+owOmi1SDxLDl5+Kwg+uoCU7syjIhlfwluAMdG1DpnrmyqAxi0NO9IC8qI0BlOda/Q2SSpXWgKiM1B/xz4yA2/fMUGnUNcVbrH63qBQZrQ+s/zApnyT/bI5ihuGIw/PKXNczD3CidIT5xcCjRAonaujsfRvc70/a0TcKUCuFv5Zs2YF89MFa2jzPlXTfPSiT05Gywb7sqXUcdCpkNZvP8fQ7L0K/8ql9Hs0jrKiBHbd+/SzhuvLzSg6WTB1Lfd6GyNYuov2I8PhrWZnMuHaIAYbvh+gBegjfLDH2QDaaepYjb0Qoq2BErb4QZpwAr/jrjUYGIXhSoN1pPApQqXg7/IQrGn8zSctO2zYJFP4flQoXKqlX3r9u67pt5+pnsaT8v40d+lrYvVnPX7iz/e5E9rKlf/9W3QKjon8HsOgyqPSPMS4HQYE36WxyQI6m/AEjRcQiHKlva2198PpHjorF4ppKmCCii++tA/2vllsjFoys1Hjwa4V20Q//b0E89OPf3fn0NxMNJ0OCS449OWlMmHjUpbt/QriLmND0PETZLSGto3WosTZnDAemPhYdKpyP07A6rcbTz1IBjZ25Neih9ZSM1YDTC4ZHW836YuX6s5vHbtxdm1VVaLLgH4XY4mTdDkt1B5nYMoCi3dWIjsoHs6fPYw+RwwnPlC6UuSlTv5GMB5rHnrA/nC+WjjFo4NykweeYVcJbPD3NiAC3/96X4M5Jy+BuoOuR0WALXQkm6My7Sx2RiiehmZUtaK0IzVVmyenYz3vC+yaNrQZvzPHtUMJFs7nr18Ddavzu5WORJcOcc57H2povybSDFxnFDw2uyubJZd8q7Xv1e+U8W6g/oMLiNVmmyWBb7N8L321SrwJzXY9BqITiniU4R+zm9cJBx0cYPcG8jgHdEzvMQxsoge7MCDloJ+vjhueuh2ZOl9dyv+MlHVTRyAjK0loC0wMpgxFwlRK6uIiqNGe2jwu9rulJ62P6LDRKdjrkJ323EmzEfwSTsV7m8S0Wb3mQVWOo26HEAnEI7q9W5+Vhyd15sEYAyAo6aji9rWonhYQRmuGHAEVMwKZ8KmW/CkW/WSLbKbXRpNEoIc6sgacw2PjxfRObjAx9kt67wb/7oOCTR6vVEdajkeO8WC1x7RZhw/kX+T1xpxC5esdwBaYFjOsGLbpg6CBFMDcQqhEU6c/P5AJEwYYwKY8hHRtVcKx1zqSJjh4cA8Y/Vp7hJQgOmJbpRaqaEtVM2yp8Le0XtuQ4cL+eHIQ3YC9MMVJUPWvxAy9KX74CVWA1W8KJmLNTwmsXxIWoc0gIzUeXdskIgy8HXlJP8shOLhgQ+PyhB55kVAQ/en6CHVD+XtThCuO84EL9gk9R5kBKwU0ywkwTC6ydM9pGUoTDLeO211BzRmFmiMw14rhTXzPvWyll93Tlnrnm6c69kj5foMMsuBzGqjCfCD6jkOds05nuL3Y5YRBiABqCoAhS3ByxglmXo3EiDsTCrK0kR3Q+9S+Gv2WblmUYLYChRlh7q51HYsAKvse2m5imx4M5Qm/QmKKjKrzVUGvx1V14P4cs0nc2d94oX7TmifK7CYbDwwmYhE3d0/Pudtxcac/8cnxarlT16b8eHMULSwP+NWViKe0zCJ6Lx0PMSWqdieAiT1b6zya5C9THSmgVr0HM53raSTsMaItZDMX2uRLXak8YaSxU2CVzvU9Mr1JRS5P9KfoQKBNywTVp36tWkhMLpcc3oeRDNoHv+PbpO6cLOsZRlojdZuNfxu4xLGHaNI3pZNjLknlWTXOxJ/J81WFIA1+Ddw24fKRYGWRhe8/DdzPZEleQlN5l92kA1d7pQgmxE1u/33lyOLO+BWgWPGbhA9THCqT9A17U55x0SjGLdAuEGPfdVxzstmSQYQxvlFehC6yjRaMhf3XtTZfedmIcoR2a/2W1RD+op0OjglhZFsoZYPIHpKjM5Xvkaf4nOgCRDWiAi36eT61tSTOvV9aksuKWQsWLbLtAUOfg0OJF2L1aieKvZQMokTQeOyCFPF2nwAfsTGBZ/KH8LJmirdrGwHXDtjmt7jIdjqtO3jrIRk1kas3JfSseLgrjIrC6zpYSFFq4sR61UZUSmR8q3rnzfd8pN+waJp9oDqxvb7FpfqPzZOQcvpCPXYNt0HrjkVQnoz01Uxhg+SdYQOxYPrrq0vd2u4Pn3tFH8FetiXTMdm0yhZJZxp9o9MGL70PbNCiPN8BdC8/dBVe0omwCxBZLW9EdU+SIYo1+a4tqC29BtSieJ2Yq+g3g9x8IZmwKaLZcS8oF6rhfLOQTxq/LTag0IaFPfxVzqH/te/noT+olD9fizKyhyEEQ9Lczq+CAdo72d/SdNhA1IJMtgDX7wUPhmtpPJ002aV6lPwBEyt8KQP+VslLssBHr2r4I7CzaYxzFuHbKiKJaALQUGNwWdlRKjF0P2pV2Vh+nNCpTnsd6qkphZKV5Hjzb0eeaEudfpkDnhM7lD98Y7p/hc4hstgYF0SYBivH1YlRzLVCjqELPRrEtG/VAdRmSDtwkTe5tdD55WaFZvPrShUjUpIKGGOEBuGZ7U29Q+Dgupcy61Q43Olp2Rfxm1pshAtzylH4JeRmOoSaMlauNWnAcxkSo3iisktxiuIE4kZyaB7meRUQzF0IvM7m5TBjQUsNGiUqpYMvv8j1jeRu5RHuibw/xY5Ucp7HFQG9vg/58mcniRjUICkZpC6SL2DTQbanlDzBuEnGq/FzIjKJjh3L7reMZCkHGIVlobvzlmSCNnv59EN+8zdHmD/hsSOefO/ky/laWGGUGC9cExUMfbCPkadSzAWTlOHa+5VOEK/db0PYtjcvITbfgdp3S6WiafYjrIsnKOfl+j1iUMJNk82Vc3V0Ms6naYBVzPZ+3BXsmqNnO9ZRqcLMlDf4vhb9KXVpylvz6887nBHSXX9tPi9ir+wcIwZ9nBBKkPWa0mQo7yNEgPaXBd3rOel/3Vyi00pju30AyLVdiep2MULCs2Fykyje6vWrDCQitGzqNOi70JFC5tqOuCxeqRhJPMFvjkgwNtabU5Ddw+oANzLPuHB0QKHQuSZG9iNW+d88HpNjX/0HUIQBSlY/g2W4Xa/f+JnqwPaIO32tUNY45rEufQCsiU/n+aIt635gCpgOuWSVfPu/Htk8ToO6IBIrl1O54BrbqjrTixKGrb/Web7bLUtFyPdUnDAaf/MCsapQcknrj8W5Yo+ClB4JY/iL0nlr10GrNRtiuBbDbPRgX5L3ezrD3IGq3GZHc5ncoKFczQVN6Br6TFTEMChk/oZIwFW2ddCjpFphkX9ZVGXik1dBZ4Uvr66hjKYIRnq3mzYtvkw/z6lgkQewbLlotlnZxfFZRo41vRD016SCwHQ2RbwoWFzzAm6ovvNIcOhhrXo4F9UrP+0tws5MNmPybQgXVbiDlRB0YSKf5rWC4iwSaYTcUJcz/jpZDb1mZqg7qNHnb0i6WbYI+flJBliSFgpfsa0zVCgd+r7dWLW5eRYTkXBw+jH4LrhIYx+MPEQH2ZL2xMbgh0dzwfksW1u7gptyBdZLPxABclHAVG5aR8UjvEfyNONl/85fC5uyXwxB57P9IUHDfWio/FhXlf4N2xkA8cvNu44a9ej5WS9EIycIRQxUPr2skgvUBUFe1T136thZXyehREGTi4GqZf3Ua+KAM3lfPpRKiOOZsqiLfQASclcaPHD6sFkw+ToSPs5lRxazaLuBSBffKYNL79I36wn4wBD+UAGFWkesfLu6rd6P2iDQvkdNoKUVlKnWkM0HzCzdzq/k8tXY1RLke1Jl+u26MbRyW3whZnNSRzkGS944KA/v3ngv50dIv3eUa1Hhuy6M+J5oow0mRJ+hbY6ay8uiSE1HimrmXMyTKuGvY2aQnmuSagYdz3TTzqMQNMtussTuNGFIywH6VAKOmUmfAb+hH3nmiAL3im1DUjpfIAN9hf1np+/nHSb3/k22kZ+nje7pkMoxxnDlYHNjVSwXitIfiGyYDJFfRTyvMdg5M+t36vWzTOj2nGfZfJNJhjZqgKix6CTObiYQ86V+OcZKPiT7x1pgW8m1YLzPiqOl5cAgyb3ONxASIc9gqN7ma3sQdbnIYNq/cGxW2lva6IBCbhYFR8bd5f36DdcEl9MKc4zhnYa60sCWMiiI78msM+XUKnpApBU1H15qPo10fxvZZLvaiEyw7liYbJcuze+NevwUyzNsYA7iFa1oerWFFh3VWOVFG4PfgbKZj9P6HNDriH+BXQXyG3/DAK/vatWkO0DR2kTCUvVlT+K46WpfGr3whevKkxK+iARxPeGjGIVw3NMwEXRIm9smznPV0b9oBOnQtG4i6jRDhiJbPN4dmIQAWu8FxdbnCBOgF+rgxDv4jm0egF6xCN6d6VHeUq3ourw7mKNCbC7bmGPdvW6QweMJXPMexnmHjUmbkYEhjbb14dwHn/b5DsDeT8jo0FuHvGJ5pNMXsTTmp4qH++ot1DOZBxORRAYXSF/AnXxC3/ia5b3YwjdwjmUE7Q/EE830ztmFAFskK1O0qU7eV/poEw3eAUS1viTbfiAc/gIGa2miJAo9122yuHV5yMToDbs4UYbhKHafgXK1JGXtrFJrb+/SbZ6kFAQgSq9CdY2IsxwuXIWSGfthhDjIo3w22lxyheZwmx7TvQYHBUUvo8OKAMj1POY4zscHvCgY8RIiUfvVHAVX+VqvBpXTXFwXPk1QHEnFh7qjXh32xSe2Mi7GxO3bJz78rt347EsLkqIU+lrl7IqPDzUpAGw/xBKgQ00v/aTC1WyPsLR7fUQlHM46FCw1B0Bpt86LoNyElfNdVXTpkvYDb5CywSH0a2ifB8emna1rPUIso0HTC74VkXDqHTSksGkWTmc/QDnwstsIjMZziUkhin5dzQCl3zhJLPSaC3mb6l7jERmwQJK1Auu5e9p5y50m9AQtDOTjlv6mNv++Sovdw8TE5VHO2FacFjDCxRcYbBxiRvQ5woUWiJtPrA1470h78TdkZgpGq0bpA4BYdCV/dyTIpGUeDDPQYhGlE0FErObvyV7TuVTmBDifsvgeRlkvxgN5Bw5Yd+agL4VwHXiR21CTb6kbaBeQJwSG9YhT3i5bJFJjZx1NBEqZnXkAcMkN6g6JPPA58KCkGj0vats5AVo8YiPWlYx4cGWgAm58U9087uv34V+BVZTe3XRzi1+/2PrhffJxtyLP5eMVpvX9N8BRyFUwHp2SXs8ZWP79Ai9uoJr0oNDiga9CbGoKu2YgAGPOXeY5jgn6WnVp4Cr8qgvLZKuuFs7KsuLXUczIRZtEuJsVQ3UZYW+SxIsfmAAyGIYcGHvNhjR2Cwh29WgvfpC5a96P+81ylyr0/cHoBgnlYUO7i/jad/zxDfbuaKbzRh4UpWouNG14yIO31awK/pqUPvEBtzA0Re7sOEN2+kFRSFjna40SYMfYLIKqhxrtj8tZiCCouTUplGtb0kcNgzjycBrdBxhcEYWctI1PYnyEsiG0X4H9+gLI5UqdD57po2MQcfxYkCMGkXyWp/GeDqXFBadhfYEB2MnwFY6nF2L7a+EI/PK8hBq//S+Ce0E5MrSVl9p99WZACM+qgWykjx3fdspf31WXX8yxjHj8+ZvO/gsT/NLYpo5+aIXCGADVrgG/sMRdC4u4JtOZ0xsckTetX6Re1AlQ52btu7NRjnwxy7uArBJ+GGc7YTKeUsow6koSi7wZSWuhWCBgxxr63zAMFnEJ/O1trap4wUMzzynWX+0Dd0oXYDB4ByTCtNfMRSkC7MCSq1C+P0lTtkGujzdEOXJxHmwvWdYVqt8GvHtEJm/Sgr+ds/7qG/TkNLSaK+TlKtP2F69G540OwBBOShGvg1o5C7m253tP+2CUnQ6gDL2oJ895qu90tWPPWO0gynmzn74vHlr2/EDve7sE/X00bMS65jtNNwrlDhB3pBHPr5rsj+25us0Q8ebXrUkMcwcPpZXr4ko1NplJC/t/J8/79H2fZDZQYGqfAmr1ooeCT18KYpc8hurrna57a+Uge7K+PxN4b7kqFCVj6BZbME/GMR2YRdcCI1CIWEzoriTpaUZBYIEMpIJEtEucmMeTqrqLIpaZNJyUfqBteFOU4U8FiEixOagLWmPppuW1QtY0835Mcz2hyMPKTzZbFWmBjlqz+LQn1BqlK1DjbfVvAhFft6e4UuK8rWSkX4O2faG1X0Q6T3D7DNA7bv7icitBA0Pnn3HMWfNvL++ZxjESoePnccIEmvE5w9i1maR4XXJFgmpU3rlIk61kEfbjlNmtYCLw5PbY90N7zHEIDtcssAp6VV5Aph152A1SUZR0JjLit/HZq8qy0Fbcsb8r2q7aZBUhNJE2orEts3OzYftwsRZEX0pitUC/qc+50R5OhYWoZuIfFd4a3Pr039ew5vg9dq/dQhTmgCmgS8hcPogj3WPmIAUfLnf/AOumWJrc5LUuIwJ4BXa8ch7CGXPKZljGQjFHcCCg+dC+jwXERjnKtJc7CdS6YM540w581F3cTgu1maS1G1Zg6LaBvriCJaXokd6yN3V+fWGMwNj04XXAqlC7qeLvthiLDzAd/ssUfFrDsKXyKpHPIdoOmBlnugV5zvXxMsPo5qlmRA7HdDIvcZVC7VPrak4yCjA+4irHfRz0QyhKcS2gxZVHSxoER3g+zcwfExiTgEdIF1nHAPkaoJx/wLWJ/YzhPs0TAxBsdRUUPqPemXyMXi+5r/xaxq5XPIyo6WQ6E0m+yWvTWc7yXiAL0Cc7MyYzfs8mNE3JO7v0HE99PxPaMvB5nwl5g7qL0kfkW0j2KL+Uat2zQru3FL7K3qJfCaHwZt8bXZ7M+Zra8aLSLZ3mtRP5JHlFVUmUuwnXzMNu+l0lb2A7qWjPbLy3wQ6wlffYhOYsgNHDeUMjE8qWAMIVtIjgAJyp40GBumVtDiokTgEtQ/eNaP6Gr34x0M1Ca9Y3syJGJ9ahu1NzxVWwZ9RnrhTR78IZUjVLW8NEJ0agvuVah14JtgjFWw3B624DjdaDucIS26QizztYhygfT1Sm4ujXd8BB1Ps9Qoj5MMl2IocvdCnp6p3Uu0zec8qOKH4mTdj7qLlnHmniJ7gi6KR5gDV18xuLTsbbLzeW9ZnH92HctQNYWtABeCOdw2+byJDHKYNscwaP0NygDGbsZJZB6jVQsEbbBYVa2Hp4DpBPrB7eV8pvnoEzBnkGEyzjILTQFicwUl4J0ElDuJW0nyeCl2SXfZqIeecEV3D7m4Vk6zEKfiRF10WhAa1I4jX77Ombw/JyGtbBLrmGod6SYqJN39KtgoB5v3V2CA2XTK12jA31vS8tkFX69SHWllJHP20Y+7evqOL/dqZKFGcjEJWlgJc25JxH/iQcn4ZTR+MzNSyLYwNgSBZU9L5ywar0khK/aIPq+EseCc89thr1lSww/kYlDMmBzerx257lsI8yVRxQFDrAgBf5NTBS63huWELtnwROGG7VsaF6Z/f28Ww4QqyUtdCzeD8Wmc435yM8XNCvSK3pI+8+Hhflgo0YYHvRi3F/+4cFz4F4Z5+vrlHsAH6Uhm2xiBFf0vFVrGqYsi7W0rf/ludsP+H8I6yaiL5LCqyWfTnChlTdUbT+mH+FF/TbGruARbkV8wklIZdJBJ+GO2xvx24jkDPKHEqOtIrjDiJtHCxvISFvbpFF4b0Z7q0TiHrVMOh7FEhVaAX4labC2mgCrz/Hwt8KxNfCYL+bQB7e8dl31dGdLpHit53uMoYwfe5wloinylpxuSqUS/SmIQna1oHyTUV+o5GJhb3f1YfWjjtH3v4W14iMAJZ/OsorbwOcKHx/xPPyuRtn+9xzVfI0sFQ4u8TTkRK9aLQVVnUaouHSFQpvA2z9oo19tH7cndjn3zXZ7ndNmTPih0Mm0cMhvQySN6ay0+rFXYTUxFjSQyHOOM6oqe08GluPw6YBw3WqVViEkYoHKJ8S7aJQvb6o3DiG9NsvrL908C0WjYFPG4WgXXan3Ud83H/+P4+POsA2ddzBTlfFF0Gol8ub/EJ+uKWRj/Xq2ggh1wJk/VjB25bssfe/TWOV8q0ohq3sbFGJrRdwN5pirUlAePAKUneRQab7VefwAMhsuOrs3gFOyMpyTkHLoKKt1PCXNQF4hQmGiWcrodBCHDmGUmTL7ziPlw2W/x5frcXXcDBYOJ4pwSFUmicplmtWQ79PJHfi6eQkpteRURaLnt2+bkd22sen6bGT7+lOLiWHXuZ5lkn97yzBmkUm1F/7hszD6oQtLjDduwwpjbkIqNmPzLSYgJtwG2VhEGkY6TucjJpSkv6uP956vlpNr7yWK+oHfCiBufAH/7dQUzUezymBEgrwizQaqvu8E42qZutnHByerVHuN8iKORTOBGH0fszEm1dRfQEKN93dcOoAl20VK/XLKh2z8HDZjnqM8Lxi7d/u7eO3Gpk5RJBeuz4l/PGvgN5m4eV5PWp809Aha2NWLBGnHNcbNI08E3dZ+xWVBXajOKvtZatMzdb9BfN4gtYnMFNA2D9I6zAZugW46/8CBdqrcmVkrxnepS4IpxHIc9YS9tTXw1lFaPh7b4YG21M9Va2XGPQqoC3Lgi3qI9gYQKyo7uuNqQq0ZMG1/RpGCto1rMLMnDqGQTun+W6u7t7EdZIcMtR4lWPBtlOi/M0pbvMg3clVGdaVjQIty8GGY0HQJ5UCQhSof/tO5SpjuEqXApW1RJqOA8g8aKOY/2C2+gYRs6xBUscwyl8M02tja5ToO/d6ejFROZVLcPexmYTxXbvYiCc+pF1U2IgqJ7cOt0xmRitGBcujQfFtU/3tzEbr5+W51/AZ3l3RrYboQuskFByHA/blR/DgyWkCGkiCOvuV/QidMMxOCqsfqfSyV9D5vOhOJ/7cqx879/19Ow5pPo7FyHLuuZ86JqSSSsxvfRGzu48Ii8zhRKFBm8mkr6MNfV+BtG39WW6cedZUKSbmmOoPUOnTU3ModcLIZQXBHyEr2t+CEj0BvRRCS/M0hJQOHRQwSxm6tPrmY7E4eYBLhf1bl800j8Cxx4tDgFGGH61ZNct1cd94cVXWvAqNxMnEG66FyKJ29J6LYRAo6ijSb7MLEIYxvm4Dh/H2QahPfZkN/zq3wkMQogmPx9AlLx1j5L+N7iaA3kCijg+Aeue3QL18lSAfI5kKl95lxByERpn9uLzTX2Gi+27gL3DhQFx6dmdXwC2x2M+MPZ2uQL6rTOO3jR4CfEjYKZxr/7ez72UD2zTCSMXoH3L1Le2qPiaRhAmL6F/Olj0lKJR5aJ2gfe92mlY0DsA85w+ybrLpcwJWmmSN+GpXh+io5jnUO/Tn/rvdyIMzAXUTmD81SZhvzB1sTRmiCKb8ZKcPG6JW7sDrUFfUYIVfGOFg35J5IuLtC0iOQ07I07MLtCpbFDJeM36bRmupK6HeqY6mqHG0/JYrF6v6jUWNpFRtJMutLv1WX181+7czfQZSo76auQCdBw/sz4eQPyn6j7d/vUKrT6LBponrLl3Vi14Ce+fZt6xbmsuHNwBbhitqVXd+fiUIiYJ+Tcg2GJzpjQGvMnJsv5/TDpPs397ufVFtblCsvh2TYaJZOJGh3yjJ1wE4/AeRw8dQnDACuhP6oTqg4F15LKZf8XzHEQTofk9OIUXsDP+3y+61ABjE6C0SRP1YdY9BIg5QCbof9Fs1tfTertzFLzcSmbBb4h4YkNBvk0XgXwzcL4r3HJnKZcc4hLYwWX9X5ZnhuaoMvjIS1CDkhtfpxOOfr+kzc9G5pPDmOEr6UAicbn6h28ySlVI9uR7AEXAiMfzPFtgPTmJeD/gNoIZM0BNKKHKiQ+UFsTOoHgivy08hKfY3otbuOPjUwvBJc2lPQ6IxiTh45G44/q51AFf14lgQV9z6Xo0+03KW+EGBSdntHsujQmOjM0inwUUeRT2oIxWFWZk2NSTP11djU69A274f6ucLYNXfWvbnd5kHnR8vw0qno+PwX5kA4B6YBs44TZQxG3MM6CvAmnHgC0QQYODcQG2FJqNqU/vNi/WlvE73ibDunf6uzZ8+Mdb5KK7B2TG+TA+8FWpsnr9mWymrZFICpL5xIO70hHD+1b/HAQxN3YcNgSW4puVPlNq0bbjng+SO8WuhHAn/Rb0+rv57fogFW6sZaJmIbxWRefi21kf/PZJMPRBMeKTe07fHN5lDOhHprbbn3QCPAu9d/DNPHyzQ0pN4u1XP0pvLv5H5MAJBsE+T3tJR/+9ZvinBU9+MWADPQb+PTVNB0LS7JAfVtXT+hTztfL7+ixrDC3lbulk1OqRisUnUTP+cDkni46V2Qw1JgVujVdKz39X5sNyYFE67pdF0fvu/Pd7sD50kMjubP2tiy7eo+BtcJQN6nWjXOrmYRKwFlhVqGTeP51sd3myIK3aKJkYRHGIqmNsU70v9YT32HEqWBGsof5uH68c/2nLTkQTS1OU+hdsxMHoqGA6AK5lQGTUPEI2wDd8NzNkEHOQ7+plSVuqX8vb0ogIwjpE9zm8avagPMrU7/n1/JxgYXuFLbVr9/odwK4UBq8+jUe0ib5nApHnSqlMZLNvkDkpL2aRwjwtHsIZClAGWYj2U/U3Io7YVcJvQf/4j0bVdmPAqHUaajBxh16zYiZp82H534YvnexmPkiRR4TZhnQctyNvzyYs3Lb8y6wxXtd+00ULl08TFYAQCNI5kSSp493GhZQ/FvlZxp1olM8R2TzlBrg81OfcR5DjgxyL91vkyNWRVjMvwwOL6gvnQ05ua3sJhlzlPjPNH+Bgr6h1DW6gcycVOuNLyTFDUMuqvGGky6+MYd14B1xYJEZ/jW/B+mcafUq5Hp0+2wiCPtvrMIFRSe3DYG6zXhnmu6p0E7PVpVshKUyVRpMIYfT49UBeY6D1R9aQdhvcyxOJ4jUwheMFCxxyUgQTQ8ff8wJ9We7q2n2GaMl0d8VYn69Mqy2upYsYjaQciIKXrFXN7TVJ66hSZdDVuxz9PnsTa0u4TXHPHbyzuGKZQ92fnXw5QyJehvsqH+C9H2fjdUtE85v4U57mLYpX5wPeEcBwWvvHI15dCW3QiwmhovAgvXKzvaU4OZeyVirX98UCQlmTt2fhxU/YAyXew/TYocPxbPZ/CUVYlx7hc3WuUAtXYWvZOkeUmbvfQYLdCtrh6WKxUn4ULklysMG1K+Bum28EVx4aFMk5pjTE/53iY/JEdsszUhTshNeP73e3Kj6kmdwwTh2rUM4wwB3v9Nl9lnodvS7BxMGSksqwP6QGfgxf1Q/9gl+npqBd8/lgpIwFOzcu2UCB74NrJmznLv+j135+knTNQY2eaX67nIeW1io11S6gbb+J5Ug/kz4OAJDIJEUSZj2GrSy7DlJMD2ELugYDPoT6U0bt6BE4eYG301DabSVt7/geWqrSe3mLPRF/g5eGXRqY1T+FQjJfhdy6U7/m5M6ddabFI2qP8e2RCBBpVqRPtOXb+gGAQBgUFalGL527rFpjmyaMfJcQO+lRVUQu3BJsex7uEpVYo3p2H0P5Wm/PDhiUHD06Tp39Sbv6nvevqchMJ1r9mz7n3YeaQwyMIhBKSkEDpxYccRA4i/PrbDdKsPZLX9u5od73XYz9ICBqo+rrqq0DjWmdVmQrYbnKemYrVzSM2SeGMB/yKEHXcp8RmZYx9a7Kp4KMDRAlmwr64wM8RUzCsvI+2gDlCByhhUVYtXDjdiz7qXXC+OmM4rlBRrkpIoSFl3aHtfZqh0KEHK47s0DOI5+gR0j8ptlulgByrwDkEon2qFlSdzxRd09ZiuZ7XaJ/dEYDtBGaM204rNNYm1B6dngJBoi7zrH+FjXasR+KgyxY+WKzZAbC+EeZSl8OKkbhDdTG0XVuouzkCEJnt0PqEGr00xvq4l/Qo0eX5ua6a9CLSGHIZA0cXmIRMJpEV1G0F4t2ilB3ga7ZnoSj0ya7hcPhgVq5lcj3v24y4ayQCLCvHj9y9z26JOepC7ggLN22wSeqRhR6ck5zJK1yhG/UwL5ztsRZSZzIWYr0mUolbSK5GDHRxaoFYVnQVGc0UBKvd3AF2eckbB+60WIp98Io22ig/GetLJxGnsLFGM9peaygzIWBTVtLOs3RsKFyuTM6rIVEme0CUynRRHfXpglw6IMSb7sLZZlMc4+5SjlyqQIrRTFNISj4FgBEknQ54fpphfZKvGHXc/jAagm0hOQAyMOLkaN4vpgPDCpjev2Rb/lKet84YhAV4EiI2JiCtS87zYKHr+9jfkZZ4aBdqx9kbmmd3vbfnilFPK4RmfnGmNO/NLQ5wp570kMwaHRvbfJ57K9Gnw3mZXYJdJXMXJ2uDabZh4JrFXgu84TKYrat1S1oDhwBeo4A+l6NmRpzkEXyVDdFUB65fyWKCVHzgMki5yHaT+OKcO5ZVt26OoUd3E4UVjyGixkJO6i+3Vpwth3wj5yRreOeKnekUZYI4g10RuOABFC8wlw3Lfj1KZdYUeaPBhoXzJZXsWN4B0388GKqqRnhwcFTf3+MpPSdmhD9MzwnXQieh4GpCgPloRhq5qhCc5S5r6hjKobGlO38GF3ceoyRwtwf7KJe6603EqkER22bP9iU6g2BUO02R9aBxn5u1GiccNjOrXMWQJRhbmP0/O6h7TPBl56ijIly0Szm24MCTLvBSXdu5l03EYfKWoC3NQxCqiEvqoK4Ke5H0jJ2vl3BeSkHiuWLKLkeAnh9YLDkUPLqGz8cAhdkYkIDowWJxzwxhbb4CxAsztyQ92ZSXZkPRiVKw6hBXlxy5VhVObxG0Ro62CgtjPtK58dQJRilNWzvEzn0ervMA068FMW3nfN/0v5+5S76JqFRTNxG51pFjXBLMZtMPuwaCBZd6DrvdkjzbEmmnu7zEV6TAHoXjpTcj4Srtx+kfHgOwG0tIhgqYzx2E8QoSlj5vwsoqpSYpTiy4nguLoz5qifUmiFPcAufdhZZITsC4s3Gp0lSH5WV1posMnGXTXM6GLXK0uQe3so1VdcuaaLIKTGw21szeYU2BIVmDmUXMZvbWURFv1MyDw0kJedlZ0atjmVVS6G3Fg7vdiq1t79F0fijC8RwRjWVaLROvwapop0Rh1vG6NsSBgHs37ZmLje3MtGAqRo3CyRjfrFNRwQuY95aJCtDynWIQqb4IOktDpsTcsVJEiTxCZ1UukAIKPuSpmFIpYhuv7t/u7o6hXEfQBYxX1IyddTNaDztAlwqPlZNF6511o8h4KaiXGr4s14c9lQNf1s2NfLVBMCpY1+wyqa1J6q3nRT8J+ISnZRsQhKlvLNSstWKE2rNn37Ux2JIvsYAUySAOMzbboBbaJptuGNX2/ON+a7rqljGUhjUjvYoqZHCny7S3o4FMJ+czd4nZdm96M1Ovdow8JyZko2e0ygnygXUmOpLkvBzIgP6bEY8HmCewNXwhgtf2Y83Afg3GyWJZY3Y8FtkKHGuTfkAcNUwqg440aZw9+F56kTuMzMa+XEsu0RIivTs0pBVfsnNzfMtbCbMg5PhAbKZ0wy5z0SoVEtOwZj5fXrYzEOFnqrVTiWaVw0Rj5zPbnOGnk4luRFVco+QktdaVspyIw/SR1BHNGZwiza1lfijJiyZo/lmqFUuA+QZR3WMImH5URuhY2O0BnXHHMKsWZJ6zaVgnPFaWxjY+NcR5U76MADuWlgIRh+XwDnMcVaf4iHUrIdt3NXKq8pY5JFtJW6r7ZKEG3IoODc8qg2RCyR011/nMniZX7jGWm6Q3vPMZkq0DOd3PtDTXrVKORtUYevA1xlyOCRbiF5ESbHO0nuCVBXS73ZqkOkq1DlylCK+yQXx9uMpkZEK3IRvRBhMWsGpjuzSqM+sTmL2svlHM45qz0crwqRbflYtL1k45sW+vDouxZLYIBS4FO8N+/yz058OoIkeAeTg6usulpKc5ERP6+WyWFWxq0QXOUsmoCsYufdLGSwVD8hLIQZnWSdRNZLo4Z65EL/PxitkNFohzD31+VtrUp2DDWtKxEnYwNJi1Aj21u4ieR95O05bN9LwWGKLEJ4xS+8b8qNvnOWf2dmdRVdZsOlQBlIXrwSqAfGm9/rUWhLVIc5uv0MUWznDFcZkiGx3jEIjgvIdiyQ8nXjhvcYWgSrzwLwc0WzGZvDkd+xqROxP7uoIWdp1rFaOgaFe7rWWJPGWnvq5faEsKRP4Cc4DlMUSRXI5Py8nF9rfzq0cYAs5ZYMbmeoty9TS/XFxlfRQ2vGAb8WbTtRvqiE2xVDvClAd37I+CL0/lxh4XoAruHMb7RYKvg50M2OD4uD92qlrGzSLOm3RxQnalKZqbIZ4KuIznaw62RNhrdzdJ65OQkLR6audjhtt560tZrkPntEjx1kbTrRouhwOPE5LWuVEbbem5sQ6QLmxBfCg6GId6UiCXq8lmYbGrfIYdNlmKe8144DsyBzNf0wrwbma+VutIQ9CF1U5oTqsLNNrHA4aQCZAMQBbjDcDvuN0aGOJ9mK7dlGMikuXTeOiPUkZRjUgSnYUbdsaqWClCBidqw3XWElp1Sl5LHHcxYJ/nKrCx414Tl4GIn/XO2TckjrND5rfR2arban6WG40SOFZYLqNkjlAHpE6pGToyy23Srtjlfj8HrsowzsdIsWKqrJH57CCV02K36bUocDmE6tStJxiVJLM+IbdbhbOuVVlJIETc58L0XDK8cLHC0WTDO6yxU6Tjzhx8gnBwC4Vw67AzZSKyVyq1ADTZGq8O4kwZyfVe785W6Wld1a0FRVKUwUnNGSmYaJy4TcomW18ihzWDM9BDtkY44rQHHGAlyNkirYEAl0Kd33wQNy85aN9dcFOLk2SvabE8mUcSVXF0E+2xyNtISCS4LEHmxy4olpe8QwSAQ2lLyV6/bED/N9q7PVPcLIL8M7/D7nendu3X8tqWUuaobIE0JsF4jq09dVZsjP06Qbplp+95qRTc+TWuEGvADzKRvvRvgIcl5/xckludmMs6XKWAh/EUNj6WhwTy+FGUGPou5OONe3KZFbamcC/2AFdd0Z6heDOh94zC3qOIPTcZiZla5tQMQ087tdQ3s0ndv0xSYAlsBXRJ6iO2X+Es6/vHLei3zR3igIiwc7cOiPSgTgsSF4izuhlq25Anwmsu58EKRep1N4pcp+ds2UXTZzXXTFWgyoo4qmzcqPHcA4YSMi8ptYl9dA6PB5SVD9Hcz1KZMUAk4LSQvfWNi9DAZqHhh4XJK71j6StFHN+pnYChwPPNtSzptLXA6xPPp/eXUWooYbzSajmo9obnK9OE6EWrzGHALXL74jCeODBbmpNAI/h5WTr4aA6Qu+JncYILB79wJid0PtnMTGI0ZEY3Izh/14Lhxig4a7cVA01xtcvEaOBxm3h14MogutDTaxW0T9lM+4ztScmbMxALFuarPeTJy2kWnJVhFUXUFLyKDB3H953xSXeR1RVTEpQpL+1ORxVhy4yfCtoGBquRJrpHh90rRGwRftziSXzLrnLrAsZonGIvM4FgLov5cTlTUE0yJyeBFE17CuKcUSy1hmYWvhugvrTiBnwwMDEL7OKJqqYQZOXFVKpFFJ6RjS1GM1TmBVjBA3T0jHAaKxba4J2mXDCFuYdZStrn8yzfZkVAB4WHuIu1qND0RgWWVFaJbprONulpQCNfG316cRQt0xNJLckTr6tyo58bxTfAzN0Q2wlHU4uu8g+bPVpvmkGkgguXRAIfUsA47W1y6SjKb2snh3engLtrsO0Zbc8SAE0FDYB1q7rpPAKTs24HkLxgkU6GiRFiGq1D34h3HOzhaIzE0fP9bNsAq309H+fU8Dp5xKDqLKFDstxtPMSuJ1l9tIEjw9rEQ5HzMpB9b2UryNX/14BV8NwYrzLnoOLBpGvnJ9M0qBW6CMq9IDQ1sprojh8iyqUgTGUw2VPBhbyBy0vytEdo1Sp3x+yiC6EyXeNzzD8YLUzqBgTlUlMLH3G7ASjccGsruoUli46pR1nEFtOEGUN+zhdiHLjLNMIFEfgXG/WbzRDdukuuR8vRWW88N6fMQ4VShux0PEkQq/kmPs0AeRCmgjhN+cG+KxK8RJ7fzw77pbW2Cy7Lm2UoCTlyNpcbu+M9sliml9ZqbUpHXUNKj7vptfuAk9yFCyy2PFUuS9TZ7sgqz5az4FLVExnIExDjFZUafpozdojQ80PDDPO2rxkInDxKdMCcAJffaYeSnVpbY+4EvErI46poJhPJJpxIIwSZWuCTa970zPnQz9cxbE3Dd9Z6IrVZ0voqYHjH2Xq0pSaWgq+13Nq7g1QS3uzPdp5njHqAxQFanGbZZbmJnYXeTmvbnghHY7lNDqvEkU7W7LKbB814OJqbIDDhxHnBCT3VGFuAgCMLcElQiePKGSnSau8qm3hz5d7DQSLvDBrsF9I75WltROMSmj89w6GXkfkk4D2aoPsXRCUMOO1ucTitYTbSrYcTHyEVq0fHJqvZ0gFOKFwIhoHUw5PJjXQqnK0AazP8VftrvreGqrwP1DVsPZQNewqbZDIrcZutvRRjr54fmHX/OAbMwi48mUoFuILLMD3k6aDPuXZdTyFPKl0LSSks0ph3mqRW7BVXeHVEmocNimNv85iT+nksqIwfHnLCicestDfP7LGAzm0kHbx6hXUZPOkakEmX7vzoaqm4I18DDEo81daXnDqJ440FdCHjI1M7L6AA4f3WkXVsY5hBerONsi5Ai7rnZk7Lk8Ze7PJtkyXbyil4GhEEVFytmD1RzU7CODqIq+KaAQBBllJxPDdqQvUgCVicq2neVEgVt2nkuFNrUiYCiGBLgQRh8VhUrvqAFQeFK6jTOZwZ2zabERk0rJsY9oi6G2CtMMyPeWNn0rtsI06Zq9+QawFKtZg3y9OGplqqHhUlMMq+JvGRMulrRwHMkWxa+BbCHX/LVskc0yOXCUn9cM76emB1PvWL342CTqxWrSZVmLthTpGWLYxFcp2XM3cBzbHIS8AbBmkcxmQzNpn8KIsymO9AKPTBOtJ7G9X7XC3g3Se3P0C098udZOLLjNLmArAdJVczzSIvAB/JU9hAsWx1hRiTzvSaU6+XNWwOEnx6HmOIBcwVu3Ndw2xYsXJ3YKILQc5Ajqya/lJEzKvu6jWUiT8OLrmPxQfc2Xnjjp5aar/KFZ/rvHNJ4r6AmIEpDLzwcByvwwDPnfr7w40D77Tp6hJtgC1hAIPhFMNeCCdDylzi2LcGYKFmZfXV7/PuQukndLIuU0VR2SqvCZjzb6VjLa4YDJGsc9ghhOzKJCQti9t8HCcqnI+cNl4cMgkWYXaTysjkSQyfpzkJU8AnunhWY8e4mqTkSumAFdldrcgKdvGCOFLgHSOjfFhP17LDBD6CVk8cQCj9PTGxtBJFcCRAzjE5vZ5VVCRYHebGoYKA8JMt6iq5LKgNFeYzJ3JLRhrh2DEIBMhLVtsMWhDKvGYopsIU8lgApMaBkVG5z2CvjjbBZCBq2GxJtNT4YnuqtYSVDG7IaIrjM+xo4nTucHAWkNNYvrhDL1GighCymh1HQ3dgtoMNaUVAdPN0h2Xa6tpZIU6G45nlWovVA8KmSQv4EawNTHjgO8SmPY8FOVonaCGlt0q4str0MppyQl8bPkqER+vw/X/jmOGIswBQBUboGubU9zPCZ+ISffCYwrbnLPxIxA5lRp20XbrPva1CEBC7S2FKhxFxPI9gAMNfJStz8CJHI2DdsjQugYEerUYuwzrYzIDv3By7Ys4M8+laOQLjjwJds/14fZpvHX5+JK5DCTxE5dufFBdEULx9dVfHXh63P72vE93++PTLY8/P2VfsF5pw2angMXLRimdviNNnG40U8/PMdd3fcKH/Dyw/kDqS6rkdl3ALhnXi8eIsQmemNWm1EM1JEqIv6LDfxc5Luxn26zfh4m/4KGokO4nsEi6iilx/JdjrIe31O0oN32sfBIHXERBk2ObZvutdT49TrwgzbNaLYZP7Nnq/ZGN/TliXakZ2GN4uof+MIb41HJN5pNjyqjVZzPbJp4thV5fTCyze9Lehh5V9HbvyLXvYWpRteN0KbhJcDu+VERhfQMHHosyTsz1KwiQHW+IkBnvygAWG7zbpoe/G4KsJ5GmD7TwUmW/qIXf9IfItC56Grz2/tLepbsJz1jnAIQ6dcGzZ8BYQOHwSl2M98kMow7GfAzqBjBLLfqLayJtKrmojMeRebdgDtRHIs3SG3+nsTl2Fp6fwox/prv2l3t6Lv0zSz7YudMMO10nhl34CfzVAnJREYIcQ/sDr5tntVXJT8m8Y7vR/YJf+ZFyR2mZ51Zd+++L4DVQif70ewSvLFMiFG3ICphVjr76ZxI4PlJ2/mgl8PMzS+7Xl4XaYVtfzPKmLFz22XqrYB9db6OGLbsLrLF7gTi+FDc0iBpvixlGS2y8oxrymsfsOl+CSRQr+ew+oRWX61gCpuEjCZ6KKQulX8gtc4cw9rpgHsGKeBivi27D6fTaiX5mvnwHtWbJDEQp5J7zvnpRvGz9efOSd+KzEdOHjZ79M6V9X3POsKfWTwJ5m/o2op++kV3hJ+uKECQDuL+T/dd09D/jMTwJ8iviTDOyJsGfvZKebph3auV7a1stvGBVCoMO+XMqFnwCdqCBTebHsAgL218z4q8p92ry4Dfyvnxcshv450T1vXhDonewK26xyv2x/Qf4v6+15kL/PAtyp66eLKPFvRpR12iVJBEzyxQ6T1M4/jyL7sH+MIti/OIhkyXe2k2TugERh9zi6bft4HP0XMxPfxpEbtqmXvDi5DZ+wo2ATNECTk+SRHpv2ixkm5vkFnA8wg38vmCiK+hJMFHoPJuYBmJ6VmiS+Ix/x04EJ/SaY/KR4QdGXHlPwO1xhd9zHVF/FDtL//YPYYW756TdD9Jbd+hw95AP0kM9Cz3065g493wTDN+F0x0S+SWE+V9cNvVHjAmrivepdlduvhZn7afkJHASPTRMfDi1ewBmK67V8S813cHyW3lGEfad46p7KEA/UTlKv2NM0/7MkdKh/XVhL3GdzXLss/dh9KUod3Jf1i8X/Ve09j8R/Rz7n/4u/zAHjssuveMwbJP8pZ/m+8vPIaP69vvI+m3XLYEEhfQEhKquS2w8vhd/ZvbYQIKjm9x9vWa/SNr0YSN1t+zEASorbwOBCh7GHPZ9gV/JBck8yK/Cn7fVykWdBBQD7SyuD4Q+sDP7AylDPsjLko+zYR2AltPU8Bp4G7JDbRVLlpv0LLD8Ilnd5JfIfB8t9OvBjwFJUaZrkcADgE1KgSkiOf4HlL1gWnH3EX9i/Eyz/xSQk8k3+EunpS5iYel8dAtTDt16GG8bGsGFsLOvpp5dPi+sen7Zwj089Pj4h+GcM50sQfxFm9r/sr3rF7gPStzv9ZmB5lW1oO79PrSe0XKHYe4pEYA8SCuwDksQ+iySR35HbtGOLg11Kv5uMzwD6LeHeKZDt/95ZBPTHhG5brv2HIv9MnOQD13DbltshwN/l87Eey/h6hjVMW/yBuXnrqruNMXj862G/q+puJBT71kggUAVB691IvdLfbvwv4OCnaZN6axW7mfgHccbfnGAg77NytW0Aq/yr4fQD9Pa01AL5HSm1R4lK4K6t3hpCUVp64b3J9UfU+rkvC9/58Tsdf9XRP+YHSVWGfgzOHsefufe7rO67RG3iOL5pA38em3ZaFq83DP9zSQUUR4kvQEMg5AOPST4gdLfo/+NRc59N/Gc8Jg6XW8nbw+dfjlDXrzR5+y40V+UP39rrtx/T1+B6/kgk7L/KLRPvLA353vB/r1d+PxD13hQ92yl/R+7zFzn7XnKGv9fenyZndyM9GwfPymSC680TqzLLBEry70o0PJnBfJlpGL4/gviznBbxFaPxefGQ+APcf7jHuhGtD4ePbl1gK4j1CzofBR0G/SZ2aPQB2/kI7LCfKD9TBFM8KpXBT2ktOK9fvqNr9D2NNNIofi3s/ALI5KdSL85fqvwhN/0ymYM/SkY9S+YY/j4tQyD3PULko3L/ByRlHgr9v5gy/HbJ00kSq38Ozsr9+PxyScKXXp1jEoXyRO+ygje4EE9EB06/qz48iFrJB/1j5Af0j6mEsl8w5l5jbQsx5E9z6sQ8nJA/aMvJR7b8rbn/tmG5UsX+gDfbbvy+LyI210oFuEEMWS+l37BRPza4PjAtENiCOmwqcz0urorBEOMNYODL/5h5kv7vsFthh3b/1CP4nMRh+/pzOZW/iUwwyIOkCUo9KWsyt7GTg5kyO8ePEf8ywptC/BGPcDNOH2F7vtKp9tD+fGGiHlsky74Aa1RfLVG/T3G1RS8zwAmuhuYr6vpeTX+9zeZLq4Kh+AO1PvDzbxs/XK2Papr/LbXqsVuFev5kzaLsl2QCQ+671x+Wq982fjj//3r0COxyfDOsveiI4DOz+/mv/1Kz+08XnN+1y2GPzPPfmtNGv84P7pUN21J+afsHQgXkH1T3Q6v9JyKFn8tqv8ZAv0822cQ7jo+yD5aweZSxQZ+m1z/xxNHPpdeX19gun6xX/B15xlDs+6br8yrF6KMJ+1XzDB/Z+2WffyBYf6/wf9wdY48m8lf1DUX1S+E/lC/9tznkj26j+TLThT7RWlLYn5Tlh1jLh8L8jv6Mn9wLNnqk53789GzD+xQ3hjxIOOAPctxvGz9cuY+eZPyaZTz8l83iv2gqP80sot/x3CqckOlXRWIO9SLduO3+Q07lD/L7X4qKQIg7UVH0K42hDMLSLMOyFI3fC46kXkmKYCkCQQgUxemnZem+IzC4M1Qf28P20b1qdxPve58rjgpTt1/7o9LcL+zX2s/t0C6KT1es9Kd/X8MlTMO0zX7Klvr1Cl8w5Hut7x+g++sYY19pCqVwjCJxBNji++oiRr+yFILSNEqQLE6SNxB+sV4q/koT4BcUZ3Dk93UMPhxi39He9qDY+FWl36n3Tzvg+4b/P3g44Kvwfo8iPynovjRZvJqVYT8FB1fbAmBAYiQDL5FGWOzBsiYEMCMkgiEEwgx4eWCiqVcGxXAGWBoQ7hDk7bnwH8BB/yQpLP69/SZBUcjAQ8E9/g8= \ No newline at end of file diff --git a/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org-old.png b/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org-old.png new file mode 100644 index 00000000..23bca7be Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org-old.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org.drawio b/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org.drawio new file mode 100644 index 00000000..7082aecd --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org.drawio @@ -0,0 +1 @@ +7LzZkrPMki34NHW5jzEPl8wgZsQkbsqYQcwz6OmbUOZfe6q2U6erdveutsrPLFMKUADuy5cv9wh9/4Jy3SnN8VjpQ5a3/4JA2fkvKP8vCIKgMHT/ASPXzwhJoT8D5VxnP0Pwnwee9Sf/Hfz9XLnVWb781YnrMLRrPf71YDr0fZ6ufzUWz/Nw/PVpxdD+9VXHuMz/buCZxu3fjwZ1tlY/oxRC/nlczuuy+uPKMEH/HOniP07+fZKlirPh+IshVPgXlJuHYf151Z1c3gLj/WGXj/DaC60tHt45bpqQykML/+lnMvH/5CP/9ghz3q//j6eeKly4WDeTtUcw/Oue5Nse/X4E2uN2+7XX77Ou1x8GnIetz3IwCfwvKHtU9Zo/xzgFR48bMvdYtXbt7+GibltuaIf5+1m0wME/MD70qxh3dQtQpG1pncX3VbihXwZwHTae01/ogLOXdR6a/C/mgb4/v/P8xfjPzz3+H7TQryX3fF7z8y/w8WsxKR+6fJ2v+5Tfo9ivgX7BD6O/748/Q+lGzO9g9Rc4wv8YjH/xW/7b3H/20f3i103/By6D/x2XEe36a5y/8h0xbcMfB/60fO3L3Ccg2Hj++eD9qgR/s3wf+uK4j8/5OCz1Osz1Hbi/c9+3+jP9z8l/h5LbnutfQ+GvndgPff43+Pgditu67O+36e26/B5ngXfqO4CZ3wNdnWVfkPx72PszOqG/hZlYzz8gy/LfQ8/f+4X+6eDyD0ML8l8c4P8oy+HQX5sOR/4d0yH/juX+bfC/3HTofxPToX+DOpT4/9x0+H+x6X645I/8/Y80JvI3OPy39//fGZP4b2tM4p/OlvC/lz3/exjzb8P8n8Ga/10p8p8gu8DY39muu/60VnH9p2WN57+z4z+9wPqn8x32D3PdfzUj/7+VzP4ZYE/+ne3eWzfewM//NG35PfY/wP/Peu8fB3zqfw/8pYpH8LLuvo2Zv/Tb35p/Hca/GNXiJG8tUHjWAziaDOs6dPcJLTjAxmlTfl3yl+2F7899yvdizDL+NJCAv+I/3hT1CZzI/t4PX60r6DwxwBKImGY99L/q9C5869vZ8/9K7ysiYhavt39FMH4XwOK25PPyp7jP5qHO/tT+qR3ath6H8U/ghD/dqGnuk5Cbz0Vwh+OfYIT6X2Nf/n07RCBEUSD+A+2QfxikEPJ/4X8NKhz/O1AR+N9j6o+x/3pM0f9NyfSfoMz6Y+K/sF06JHWZ9//Dov9pt/3DWPQPgP83rEEw6p+uBkH+vtewDH08/+mPzmUy/1WHE3v/aWy3sv6fEPnPN5D+cSHy30VhE/98OeHvBXad/Y+q/k977B+H9f8/imrsfyuq1zyt+vuq5fUnsKIswmCB7/7s7f1/TbY2+dcbtfG/3mLm98UMXPHz+vurjcf7UcHr/3u9fattXuD/A3r7+xx/tsbfGOIfBUUK+2so0n8PRRL5eyT+MfZfj8T/LlL8b2XIPwHton8vxbv5T2mVp03+P828/7zn/mH0i/4HVgT+ln7/K9j13+ftf59h/4qE/+DcL5+izM9bRAQEiHC1z5rOAalSOTD3j/H0KsErGYZlsfutCHHM6/7LCVQxfU9gpYx1PYFhNCl/eSdLH+Aw+36KD+N+Id9HmDerMIwFg/esdc8V5AvDOIp1H6Lw+72UEsT1VC4wnSa1vA2zbCTRdfRknw6klCov4KZbjvfx4YMY5Yt/HJksXpHzKF/621CvRk377Fkrte51wIuHzrOHDpUfiddfD+QVip9SYvnyOBghL1p8so8XZiww27zswm/CiH3x8YuZt5gbWOF9z8AE6rs13DpiXpJT6qVJbtQIrv8u5YuNHlcZfwyMfzlPkf00PCI8uUQ7UW4vH/UDkl8uxC/KxPSleeyKFY4v/uWydJly4iKdHP6YbT66scdyaQizUfvoci9+lJHEloPJvZrxPuRNFZsG/vXs2m3BHl3Yi7TXiOBI8FBjprW11KFenKB4ZfPQD5p/Qo3qlunAiAxlw1vGpFZ51SPM1pzIpF3rSJH7uCRH7xTqNgAmMJh1P6tXvqT2TSl6ZOTs8BLN8tDvYVF3Xt0k8cxxm5T9jt1hzd7+183dk+iSSAPgXoYLWSUI9fsFVt9vN6WUJmxFnrc7icFrBdt3sHBDMzYsU5j2JEzTh6hclicSddlymIgtyQ32wl3PMKAhjg2hGT+Gc0jI9TIT8R11UReQ8SsN0zsaxdCSYJTSl9k7MjTq6H4rHDrMlwx9TXQXbfn9gxFr4H1v7ueH1V5nmP75vQ5V1J/fMVDHzazNMy9n8idEfaddDxnPvujweTyE8BnVxqUW9PsuFVnKlK6+eG98axEWFRT4574ni/6UlpZccU/ZUXGdIZV/J+bY5xfnrCRr6EzG/emFK63E2JLEwj0Z3KJRmE8dNo/StHtog28cU4S02hkoEmAc2DYDX2669A2Jk1K0ebAXubzToCqibsAjYQmx19OzboruEYua7g98yCKdgRQSmDfcBX2g5gayxOQkbFeHTt02tcb7oRMPrbPr8vW90XbQGOZghUpFW7kdC6RhPtka2u9m2B/Je/XNbB7L2GSGp6KfDTawjPoZ3PIlPNcV6+pefsWtA/MP8mmNJzEmu0D+2JbXuZcgMxq39JF/Ew3bVTwbQ/dNpD1dBfljfJxXpteUwTz5xvXuM/BpTEk1Sarr9dwgN9TvcBCfIK7r75QK+/ha9ZF86Og1gTnxRHS9AfRRX5NL6Jg63KPLoVvsUJ1IVuzyqJBnJnR7JCsVw/xg4zHI4KlfbJcCCalxazefxsd/7ofzUot7isbJ+ZqJoex4pctnq117TJDnKf5iyzQ47J7LVe5TUd84PhPE0rMXGlXDj1JYYof+8jXWR/lg5RXSeMPGLwqFWDgYnemMOdOPZZjpaRY7T29RRpBLskQqD+eeOBFJR+e4uzOe+vdjbyZn2YM5RjcU+cLllkqZ5mBrzjdbnIsS5XxZnrfP9RyuiThmp1xRfmzGnF+bqa/PC03XInlWPTNOhflQcrapKJ17DifidYCT6gIjTUH4vVNdsUo7ZXQ23PzbjGczTcmDL+b0UThlQ+lawTtrf2r9jIAPnyf787ly/yYN1p0kehnLohcNbVOBtzb5uVOv2GVuvc9aXEP3p9uPk11FognzKGN/ccNxOFoyKeMVKxzZ5KJLsn0oA9bjV7/DEfShFs/5eTb+sNMXz7DvXEbJ6EkG97yPaUYk3hpe2lOvDh2jLuEDN531pr5uvRPk5+c+bbkvn6zN8jlS3YeEsPOeCeyUBy6dvdwe7VCAzWtkbwmqIP3eGwswzXPL+vSMVZN1G4QifNZNE7uR8UnhCTIluA1Ckj9+LkMw14/nlJAkttKIkkdKerKt+G5+dglgtvhqRzz3au7H14w1sgBdvFqoEOTsc028+8jLU9vhTr95I9RAGzK9927Jfc9XGQuY/JD5r13N4rltNhFz8J75uRHzfOlAPzNrUAn4r9q1DenVifrcxZDdMjnhw9Y7uO/yS16leNj5DdJjbTMsLqZNmubz1fj5Y8Gx27XWL7LyG7I3TizsNq3o0L5BMJoTX0ZRx4SRZUby/ED1Zv7ygXJfGviKw9379Buzu/FoXmWA+LzyARCaHpXqCQv/w6AOD2xwbMTHjfxmpbtBCr0se/gFRhiA7bjFU35tbDr34yvccU+CsHSVDNxtCXGubeF+87wd5RUJUDP3tDaL3R5k6rkjkttIES/xHmSsYoGf9gTIQPiFB4ODpys1Fo8FwRGzhDiZN1e9VnDxzg9Kp/wBvPTW4OFqnZJRStQEt9B4j6S8TZ/emV4808e44tJ+OVMI1SBnJDWd/2CdHZizsRiVI+NknVQDX1fItQ9vwibqVmUifubODDW/AcnbtxFzRmBvIN2XmQcJXMwftZuuPXF+uOdhPu4aGuQAmj1OpvyNElsB10gukixskE32Rxw/NHNmI+wliqaJQpCUAqt8wzdb24Ep5We6Afy/ju6hsRWy1OVqors2W+vE+ePyQ2bc4759jsm0nwI+v8ma9Q3XeQ5QwPNtogNeMtLo8TmJwnm126k27M9NVVhT2szk3MAXHL+vk+9V5tk1HbigOy6TmN9UCmKMufOOi796vZ2f9DgokbQKzbF9aONz5vuvx7gvwoQ3u9pALiFiZBj+7e70MrP7vQ1APvRJ2GLlj2lKi7JBwv6E0oI5AFT1VDefOjdLo/fvj0TjgCUYEbT7U1Gw3+gsZcCLkHH23TXbe6pydOuTROq3j0D5SRAmU4CItGWrSAj4iXwkjo7c0IBFkUEJ5Pec+4Lfs0TLzejMLp7mJceXymEgGa5J9Xr/PlgwFCBfDcTo+WNQZ3EoJ+r79DxlJ+QfZiplELMC8wE9ksITarl7yR2M1i/4+AEbgzHUDWmW0UGwbWpjOJddn3N2aUH5o4owNv2eIeC7ZgkVUnO8j9YxnB/ND2VKy9debngd+6BOlgLXrJiEqHHqFnLNYv3DYKVqfKPXXqJM02LPwErHUUasxeFAzAJN+EE/c53fuL0FbbcruIPU/OX3TULEzeZLv7l6/eFPBckuKphDl/GeOZxWP85Q2OCb3Ti5ugmr63Fh1tORjbHp/Ng/CuddEt/PXy9M5SEPTyRFaOJ42Ufpvpfn4+deeKb54RCbkkY0t00k8OyJ7IgzZSnuh8vZI2NuQwmsQld54Ejh4ylGHn6ZQl/++CBmf+Yg1R6p29Sewnp2I08e7J8QjtkGzGWv8ljopJKV7OR7CEtuXv5L/YxZpffN3KfM9Khp70V9QjFhdpR7416MjuWv55HGO0/1vU29eMFEVnpow99QkO3tNrCiSQ0FE/KNiQRhaHl7ri7xYzkuUAoQwnLiYeHjsZ2hB7STzv486yAB1mA5qGxH3egkkKb6J/z72bdtgtlVSRzGozTWarhiANqf/MGINbAYy1KoUjg2J6KvhPnNLRCvfPk35j+GIUgHMtQJqDN+IMo4kFYyNsdVXdPJEg9KMkp4/AJ4/bmq1q3q7ro2OyVbvLD48ZOPZKb5ml+qHL45oFdyf1bCu4QjfufWPPk7t7jdR7iH7SLUx0aE8/FDBirTdrzH2Ab/fFWfWvSeRMA8q99YHSjVVm5raXVvPvyTH71JsStG+T5vW8vf58WwQwY8qM12JKTvryfBhW0dMAYnjls5bHUPUCRKlq9l2M9zs4fK3IHFscMUTAZ7R7Iyonw06dtl/4amwGvffHTf5bbBHo4sTwWrPP2JJasX0L9qA+PKn3l6c7MrVeUGvVbOsPkFBfwTlaviq8XLmSkycWXgwZ/jDib0jn1HSszpsaZFnplWTe8rx/ulMT8AJ34BTnuTPsjTngkBbDvD+W/PcdejwW2QhnTUFJfWTaxNxeCFzTxQvWZ/CU1h3l/sceKJIJCHUc/2IT5OlUJMRBp+sqqeM8ht8Eo6MbHVVuXsEOQnypglZ0fBZjnjrZBiCxnK+jw06fr8MsL7gI6bwrRA+yAqUa0OfslPh+9XQ2u1ofrN2n/4hG1QCLmKUkaBPIxjfIv6+HG/1nr+53pytdwXlapbTcRQ0JW20uQLBOF3qhf1n9kG1lNAHnw05CII1MNL6sZhBlMf0UjMlp+gVVm44zym5KILG9EmPhV+0c5XiyNmclK9U/5GAPbD9fh2bVyu+IGI1AhEPi6Z+blzIG3KyREYQzbSrD7ajQUsHLULfSfuWcl+7HcLwRsUpZTEp7uN/L6sGmiR5FGcnmSgidyvS5EftW68U5R9zu9v1psS9bmbqvfDoLcvJFCzxNIoIGd1IZn4htLxTkbxw/9VYLNn3bHFQPCTzB1fNFmWWGYm/WVOEL8gO7FuG+ESZU/LNLnmA8bYSszwOcoDkqtb/izWUabpFXtNxPypwdy1Mk0Scmf06kEklADk1bqgE9HhQXFwjPk24emw5+NGdZ/Ib/kT+xvmDNS7XI1dgU8bb2nltjGam24gtxLFbEu9z2phsH4AbQmnrpXvQ3DSmiS1ec+Xz+LrbSb0ctXncATKwSksf7nXc9uHCxHbfIRf3XKGlzm1oeC3WQpq2/d10Zn04bSVZVKF/+oD/QPUHj8SSeBHp4ooDjyXRx7c1FQjS+jKEO+cLMeC1QtWjkN/erOVg1HLTSQ9aTRYHrP7N1MDDhDLEEUytF2fA3zY8KGyqQKAcGslnwRF/4Y4NzsqXxlwK5rsQLSIRnwvCtbS1Fad4Ws0jIgYdhFcSNy72lIE+g0uTpRHI7jsgR23F7bHXUyzChmWzj3PYR1YTWlh1UibQPVjH9so3pqrrH5RU/6oHNaglKLPDHJ5J6VmKm/5+HoKgWhajnwujLQUY1aOAGz+iZ4D+LsAESrD3xririXlAh7lmUJTe1a7x3E/sWJb2VZyJZeoxLmM8NBqz+k50O4tpN6B6gqPMn4jBjQoJiOBDhVcgBQUeyC/lPuHVe47s4B+uV6Od4spe+SxRCQk7CeOZJwBbGjo8ixg5R9dCwZzfuLBuUNGvFX148MljpbxO/NTUJv88KORPqLZ3TlaulQeQaQ3y2eb8qMlbPN9wMzAdK/2RKjpMYS6cwCDXJ6oBDr7gTEJ38bPuip8gnWslX6v3BqsKRwMRJHrnhGbOwqlW+cQJBB6SpsupumT4hQ3JSqoek+WqPP6iBERLtx1ekxXve1jcan5FszvZZws380w0Zw3dInNrDoIrb0ZohmQLShHtgwfxSNcleUSCMufyeihZzZuRQShihSuJnKji+9iY/M+iR990Ta7s468CUFMiu7lBIcAOIO2viabXg1d4NhfbUlm1kXkWetgZ11djiTXSprN442O61ICygRZeZmI+u1kqxbnvSm0k+tD8kzDvr0nGVDh6pB6HiZnKa0OGrk2Xcu8d9XvIC3TTwsUuStnAEaSmeVmGD4Yvb58BatjhAipA/zqK59v+fEykpGqnTVGTNB/SIsWLhQhibeuwtjPnVv9511dX++ezMLal5tadxmb/CTurRDYrK0NKeXfpv8AgbxG1a0lrpJwFjWWsse7qH1KmbR01Q63a6CpWYh3jgS3rLiE5bkTjAl0JyaCjKyeWkPxPEgZ5h0V346qGDx9Cdm2uPF+1z9BBhIumAI6jMvOc1g+sYnmXC8KwUUXxnCaZLSsTsnHHTl9qvauncJRiG/WdgKtHPxVFXnotFpTLy0Zqf37hreO9MMGinwIlGaZT54x9fJvxRnw0+ifcQbXAEtVH+kNetdoOVBY4KZThqCV5hlG8jPxsUd6E1c9E4nebHqibgmcdE97IRYWFc+2OkFHZ1fd0HEsWdsVSP1RNy/fpws3hZU7X+7CmOnoV+zVWpSshhA6ZR4EMK0TVBK6VYwaA86BFjR0gd+zGO7bj66Ioe4pDUKqEqHhGkTp7zGi+tBWdM/HLr7K6JF54cwtMzx0fsWUNp447kzag/4aU0sA6hUndd+wklMtnlQKGKg+RrY7S8kzkGgdXr3lTrkW4rTm0UU/YMu3/owF1T10c2va+XxW2PaxQvao4DrikUJbu3ZX6Is5dMVS2OXpi8i0Oy8DYkBfkPKIJAO1qx4qH7mPQ3hVC6slOncF/g4S1bXYl1bvRjDZYVJvPUpeV/NMacMnc1NhfvL9YJc4pTOWQu4a/ejt/X3SDT1rqDRgAOqmGH5ytXE/6VbCzLrgD20X1Fmdc5g4uxWy46SXH8Epmrk1hqBnOuPkUA9RouLDQ/tp7uvCs9s/Nf+YPqNXpj9qrtIvRZLLaSRQPyLraUkIA8apcjYrZu3TrXaGZY0UGpc0cWNDwfy8lqo7UExTofVAqCZ6xR61MYnFeaAlMXW4SPG+FLaJtc2Qn1ldAppybw0Pd+G3DrtPrIrPcHFdn/fjCnfxRPtlT2AyuWnU9SL115UGc4r27SUPzGZH7SkyHNecMTJziJ6rPh2FXm6+Jfos3MShtiFWp7wXIL0XmdR/yz4vfMUP9aCk0s5o4og9FTiKDZW7rl7xYhGssII7moE8O3eHdqNqjc/vejE3LwgV1xcVKkgVLSohu/dNmhaBRiqGC+siZF7d9A8XKEgaKDqVWxtGt3JqhnoKhJrVLRocVtSrc/JQBThco56I+CUVFKIQITtYaVi6fMHU7bt2fiZT22n0MSr7h670lsRwINFQiLG2zHJZ+TcvKe6tryvyAC2nbJPbLCMIDcVxsDAAUxolmvSTEMO57jeIWXZ/LeFY85dwvZV8Ck9ohQXt4r3XwLJ61+zGyd4Sk3kScv9M0OT67cYowhmlLz5HrMBvDXQzb/iNFz64WWMpTM52heSJz8Z3vpjiUXdm7wtMMzrZnrcFuihLI/U+TvXdhLL0VbyVSbCN9XqOQSDO7iC99M4IwTLUNiagUSExVh4SEVExONu95AnZ3ooJbw+7ILYuEyhXCZpxtmT3IaHGCBWvZ2rbZrYnTGgJ6vemZaZiTwJzAhNL9RcTHcSLj4uLaNEEe3H0dSsQ2bPJxHmKl2LdehpCiG4GcVFiN2UScL+Y8S4/TpbBgruWZoO6Qce+dkCH4h4TFNViHd/Amf7NfnuT1RuLLNHUXdVybzgNi6ZSrL2oMH+XDskSHK/srVcSHW4zFsZ3atv8YEXL4QKCT8cphFQXDxiRPcAaiLPWBOx+y8CHave1i0hPFshUOogCquqHoYFAU3Xm2xxlNrGK7ajm5IqNzi0UpgkKD3eUouUuJ+iEge95jcRqKw6AI/bqp2U3sy4P0SJ+PcLF0CHorxVu0kXLqXbGIO8pHYDzoZx8UhKO6rpm8fG158jgKUkvmdsjzd5TVSTHK0gBe+8tbOifNKhZCXpw+tk1EfJ00m8FWJrcy9p5JhyRs4tGQ19E3Ck6slrXXHyUguzeZRsLPT07LFSwMpO5mb17mdV213U+1i7wyBRiTG7xuG8fSVB0iy3fUUgnnhvSqxzvEtuHrRziq9aAVqq1KU0hMPkmmJx2PvktCdULVfXKsrPiFiQBXcyn8jbZaFl+apa14u33xRe3iExaIpeth1QbQqJkgYazTBOVhf/pmTsBf4AQT2ILbys+fJJJ40IdVGiTSCSEjr3az2MJSNV9Ie++g9zYYzfujr8Wumikjpzxk5ztA7PdgHz0BLJJdu89Puvjp1TmuAhZg9pI2W/TAjZfyZQXZJ7FxuCK2duqjh0RI04kAZM62IcjcNCERQpXje7MxwncBsNxAEV2BmfO0O6wbYfMJKD6R0waw1e0xAYVTA43hP4xkYqkExusgN1geBrvWlaIWORJ0fTm1k6ax7SxVXR5gTL66F4EsFxR8ZuLVppfVbWr+jT0Vw7k22ZNljnm1ADkWixMJTyFH/u291jSqc3b+UtlOSUFTTVXM+mgg6g/FnlvR5EEC+X2esAbQRj9q1he9qMW2l0sFxNT5rPEQBtc9nU5vszH5ehtCGTIRC4dceqzzIXGtZhm+wmQ86agZUVbGsgsHMFccFOc+kY1J/esGCK2kCeylsdxVQZfsBO77f7Njk5mEKsZbF0KFshYH3rJ4vZA/cebyE8000Zq0wloR1xA9xoJY0gtftdgcdom3cmRPxWsgAj2Z+e4S/zMT7AsHqMDFYxy+dYKTnPzNqTyrEN2yROp4bBvX52dgmIp86HpNKpA8Yqu7rlwUrO1GhxjeaUd9ziiZ927cYwYMoU+LyzLo2KHq4fydPH4paLa5wSZombjwqKafJs4O5jKI6NKm2AwRZZPiiEhEtrkaREMvqlSIxnu8ixG0CqCn0rmXU13+vvqfZEIUlrJHD8yEufJUYCf1V2BkhDSkOsmcamxkdyE3sre1pk4qZDUqXsnraNQMfJZIC1RRQ0y+3ogmIFgtIcPBH9eYzUSZMy9aWpKXUIueKQ1oScNopZBKFlaIbgvcMR5g/IOYJMGa+gbjwKh3HFDDo3nBnmAOw1b5YYHuN+gnO6y0erKeoqBTDog3VVMEEWPg86X7AFr8spX50P7IK0ROhWX95O6rs6sAXC0+nBTt/pm+AaBP92Q52HbsqN96fB3kXG8PM3RiN3PhdpCXSqy4ixRRjS+ik/EjyFsVQAHJ2tPPCl3tstiF6X6w2d2wsggntS6cExwn2Hg1IJO/TZleHTn4jz99Mtq1vsIs9dsJWsqym+lUD1YrodRm9hpm1ENrnAJ35VXkrsQIrpWZZjPoUK8dkxFLhQHC7navQ58kFFFEr26jb568K1bEd0t2MqKLaBPKs8E/L7RuvfddDgTwAKXbH10x3CIpQKbHixTAKXQrhOz1n4+W/p23Q/YPMI6l/6GE9POPSpRLf0JczZqTBz//uh3eAgtCeEudul4pDhy3lfpESte+AY+jXaf1jHqBdrFo7VW6AphR5b4V9G+cw+O7oyzznAMvR3fM3w1SV9Wi1XhwwHZysImAccPU9IkmqY66tk0YhoJgG+48XPaFGaYeaN40TMUPW1vJGkqpKaIrj5buIjfP1WPryrKxUSlDyL7UgveNlZczI7TGC7ycWtlMBkLWiqxg9DSnRRNE1ep7BRtpKLo4zxP7Y0BiLx5YkYhEmAwmURY5TX4PO33qdKD+s3dVYY065yt4RDgJG4lSZ5O9HAjO7xK6zCwzrCraaV7LwbNRx2BhIf3xNU6NGeeEikAd5ZkEDoh4jDVT9kz+XF/fcz1Bqx0oXEwLb5hnL4HDWQtgYj6vMheUW9lwPLm1e9GluJYkHra/FZpJMXMmYRTQY7wZFXj2Lru6JSyq+8tKf+2P5/l4VDpQ4e9T0dtib6CFUDeGubiuwto9rWxdVuKYML7xvQ5cSUb2rz44Io3+jHwwnTApro3MAn5kW/M55ZJeo8BJ6K670MIu/j0NPTALa3qg+XjGIZTkZ/kO1DKDBSoPNofHGkVyBDTy5cA9Jm6yy5A9Lg76+8FOx7m9AELGoCmx4Tt0aIFVD0l0rQikKG8CdaAMFMGVcXwOakd5DEs7dteo3WfiwMshA+8fH8mHawKes+CdXarmDDijaFrV4/iCYdD3Np5lO5BlcbGxWRr7At3jTZto2t19pXfElHteGqT55n/YJlobNsb+QgT2LCcrvL8ejC6NG2DfxHCTRji9XJQ76fsB6SsvvVjKl/pC4+jD3772ODfrjma89IO1jvEl3ojw1dCSLscwk8pfEr1ZnZMCj6u13Gsl6alX2R2Kb7/Kexeh8VVruTaHR74kJ5zwfAIzZmT+6zTcyHspJOlOiD19oOFj+5KKqAdl/quN7pj9TcyQnrQmshZTf9cO1UAeG9dJyl+0R7YEYS3sNoGuEs4PET313OSSJs871wgysyrN5P8MeZmCpkNhpOfzr4fjzyfE8vCZASTZuLDqm4iB9smPglaV3VvFCsg7yGi8LCjvd0QHyGGyARD1SdYwNSxpaMX/NkDWbO+R2rMb28i7xPsl2gN+3kXX63VWl4SEkiUn7GeF+lBLBSpazChTbAbOUkuZzbeyJRrbv6gmJZ2IOmx0PRmxLcyzwPQw1rskCK2OH1VaabdYRlAp3LXf9qJyA1PpI9KTu+0wVfcjrJJXczMBSQEZbubPTusKStChvKE6PT+Y8RaGhnHS+8EiKawbWCglg8eTdK+HpbMAXyDKOtxnPP7bcel5RO+3uj53JBbOZDPLAptTbJl+V25th+VWx1SpZxsmHgQBVSRhrvUws61yT5JzD42l0fEY39YkejRvnzs/QqKP9mHiGGyiBd7PnI2/CC4j9NW/wzT+eMF/UsvI6AQRbNE2Sesm+R4LaDftMBPKFJC5qBdkOFCl96wFdzz2WX6k5PDucJq9j2WZwa6CM58ENEjW9XgeFYf3wG2bBewESS8KxBPo7EpYnyfvqXINptCW9cF6X3rXamA+/7Bw6LWnOFh2jJvU7GbB8HAqKzD3pWajjA7/bI+h55Tu4B6Oyb7TAk44HG+P0hNztercVDXPMUV/9jh68NVcPrGEWgc5zapaXc1yV2QO/Ot0w9Eno3Y8yvrc1HzUDwI7AxGPrLvKamUjAhbgnKUbzDfoa793Mx38wIeHqZXSUy+lpcPOl30F8sipt0npeJ/Unfe42J/dhi5Jka5cT0CTBjOl4uOF+1koG4VOoISZF6rM3gcrQeTSAGlWFZHqiiEpWrmvfWkXGSUzwnmwPzJfHKN404UYusW0M/mTGhnGE3NSKLDC607A53X7TYaIK3VkTAJomaKTF2R2pATRWpXCEHWeZW4Fb63cdgofQ8hKEye9BPnUfqM5tQR2zE5zsa2aODknS1FbXszB3LXK7G/+9vhKIhF4HTDwxrZYQVfDSMMoWc6n3qFzdU7qgv89HvqhErAz2ObbGqsR9Ixe63zMD+0MivT1aLXEm7FahKXIQnqoe1ru9bewaqHnTeTktJGjbn6pMZkr5VORxsoddP+G1bf0vN93BU/TVHhCu3cE/nEG18xKuZSyVrJ+dusE2gR0E+NrXWhGa34gdBoEAjuHUT6elyETzYFTUYvtnR2oDKkGMCOThbBXH3Ehyhf2DrAd4Vj+mRspbZ+pPn24CcEBIbhS5tNvITP/AYaPI2PDSgPPAV1FTqq+e7zTP52IxZvij7WfXS4UUN62emYby86sriO336SWLPEtNM1YDb1WM/N0ipa7YHVIbg8ufZjY4lQMzRO8XteIP7mPWPoRShgybsjj/olPcWr9IVbVBzxkFxKspMAZR5EqlJBGIAmP4QSFiusgO/HaKrrtNBLPBfOe6BBTjdZ6glHTeFXJ40DY5ARTlAVXfBUCFhVFHxeCJ9Kyp8PoAv5in/P1RT30JloGTZVuJatvjMaRa8K28iWd1zyYDW90b/8gZE/rcOAMDbnw5B8B6ysB4v2Dula3WH000Yuy963RLx0I6/KaCSlxzxXuxcQrINgpskm/kn1Vv5m4A9owMlXSpmR940l123U/erlnrWTye1HXPLopK0JS1rkh2W6KIJ9hngmgR0eH5RWP+h3G9oUr0SotIIgnwuYyT5FDtNlLCdN3Mhqjy6momQ/D6xUSmdKQvHmgg5bB0L4dp5Xf9d87j0an/iyJwzjnsK7tcgXfiiW8kzufAL1dO8HSxDnD6oRCcu/UP+90fwr2dqafUgmB885lTbXtof8GExOpFnVM3N5FDBYSwEIRK6zI/5rep9L4CoR0bgoXlXj+Ij9KHWtDcKRwlY+95mLkV1IvcY62c2rBiAI1F269yrt+AmPHrfYB6TbI2nQV/6jJSogz4s2yGdcVY1jNpJL/W4XA7knDaH212+sjCicazVUSllNaJRyu+Jd4iacv7tFyfFHvIYhh6WSnUs8mpR54+cm4wt3fR1ffvpx2kybn44oBX3wSH8mPbWKMG51jqH6ukYAEHlKzO8Y9QlMDvlUxmS+mNfrTe5F99yc7ngt4Iu44km2+7C8yHhxRBIywRcTWqRwR6hF65rvD2VXP94nsbd0aBLUZClbp7U76Rqhws/wh+mkJScUCQUKCSWbSX6CXU4iLi8Ta3H4FfNx2qVyqKu4LDyxdDKloIUT16LcjzaVfdEQmUGtTbrLiPAOuJnNzRUAogjqUzpbfymHwwcm1KZ54rImgN9LBtUfraT7yLuJv4bpofDzQGlQzHR/zNGDQDlnvXZ16dPfFLjeZJ6XGBiW9RZJnmZl9HzBB5UG9m2FHg6cGVxmUKtUIxcOgVqeFtcp3OESavZ39AFMfwth0g976POwjVftFjuF5YrmZRb4BgEHZPtKiBmKX+6Dz0RPgZeEer5nEv1YPhmo5duv0YYcm+rxyiT66Xq9BPcsUS/bOype+szjCwv6Fux5V2FKqFaj03MDhQ7Na15FyRKQmtjJYNtS/NZ8eRnKW6xhdYI2lv3kjIJX4cvfP2Kgfc7Gj442BcH54i7EvrUcvrtzg1CbPmlwnNH5Q88iWf9otbiIYSyVMfcgfbSYH+n6aZyMMMlcqyl/L9PYetPbsY9o0ROwlz9rf5lDYkyzT1dykzxDFzbiurrY9n1wmgzQ2jKWC5DLTNMgDri9mfnkWjFrBY30CbmqdBpmlzD43cvWR9isrekxmCSnSosgXTW0c0QjurFe80hjo08V1CsAWykYF1SJMkiD7qLBryMd0JZE3zEB1ppw3irRC/J6+UHPkEC+dTQJmiPc35EMQjBzkaSG49itlPGRuhC3Y8DgPa8T/jYFFgLRBZ29uK6KJaQvrykVPDyZNe25VIY4LuTREBglgrpMiCewbOpuaNgnV7DVHXrXHyXNElm6OcnBzfbnlkIiUn/46lQzrKbD6Ns6o8uwbEUy96VVGN3XwQRzRvlXQ1M7DDSyRJ/JMsg0tdATFMZRDLwo133hQmDL59Qe+3pzWMRt+wR4yXfdl3VxRYG95ECG5XieimB2Mg3JB7JtSdbq+VN5k4UMtuyzc4uSxgFqJgmty5wvIR3dtfwFj13pqYQTJ/IhdW5p+6Rko/HVM49evshYTUJODWjQsZB4h3/LT9AeZaMLqnWxtlalYGzTJN0eN+o58yvlU88xpoI+kM0TWAcFgHPywwftIJJMPdgXufFzzHV7E20Np2CBbjPFhm+kIETJrAvyDXgnIVoiXluZXs+BjT7jLZyCz7rqZG+s44gImUeFYO1vLs0TAu2Wu5xIuD0cu8VNApGgC7bEbQo+rWPaiJAHTXs8sZXR2GmkMZKP3H+U7HnX/Kj+GW66GOQt8zl0ePqAzgQ9BlKEZuOrhhuz0CI/TKOEzAojV2okg7lNy2ffEG4biP7WrHGQR2LqvNsnzj1JGC8Lou7liMhtk7orE95eoWCWK8W0xyLbVOEF1t9M7UbuQoeVhprQrt1liRuCh59XF3T/0Bx30d5Y+Kqywsx686BOeH6VPwMy1schl/TVOfSd8Oh6ixatst9bUmV4u4ZvwsZWWE+hDJTvOwwb0Cd7xeguQvmxzkSYWPAwORUHrMiVYeYNAGhhc246x59MOz/bwwvdVr1d+kRQqxVDEmBpFeMNGaEImh7Za3PoZjT8aec5kS/sCPts9V1PYaG5PaCxgLtHOC3Ib18WweSlt0tTO35W4S0/woKN/rhBnhth9qysEl4dvDCn5jjzTuULlM6Luk8+V1wHzqqjNZ974MkpkUsSM3lfUZQX+M+CPj45oLv/eOHD+8wMFO/QO85qYMiPURVusUUynNARh6LR43qbhKS4TMo21pQOMpkARY9ET2ewoAA0t8n9Y92hpr/EJ+mYW37PHtfIo5nuYirDGrZllgmkADN+74q+2R+2UpesbBC9xsDD1a+sV5F1CNzwjYKlLBcTBWUOI4So6jcLN9bbPaltWIxHSicTtHDx3htEOoMlOaaidzsleLkGcYNPHOFAXvI5Pz5JZEIBY50TPk6WAjcwvyd9IwrlEt49CWxB7DPwpDHVPshx89Okx2e0E/Xcz+5EsIXnkSTIuK40UgEBA+9c7Yl6nkO6cDvctEemjaMTm9DeupsRasDBL5y1sQig9gE9e9V5bKJHCZsOQUSfHd0qk5aTiuGWpOFdLvfvYTygmTPVZ/MYcjNINqgf3Bg6B9+cNlaiKXO5HbQ+LAvV8nZCJvipz4q5V7ZxzQlV3gH/+YHJ57wTOPIopPrQqF32Yh6hVjIkW5O9qw+TWQahF043w2tmJ914x9xQj2+tHV6w8f42sOFbwDgeR0fDO9zweO7lBM5GzUOTQYJEOnZeBg7RqpDR/XM4sUNiK4prHpDjtotT9xAD3cVlsOcdV8gn7ibsy8AW4574GuHc5ZuB0eWLETm6wClyiFtQto9frdhkVpy40wQ7VODOkoOQIfdJ74APEte7eKC+PKedHZuwShw9tudSu37WYoBkmzd+EI2Ajl1dG9fOt6RKoLF3RVLz6pMEbJXJXwlcDtyHNtIZ47dpBJplqesy0+owGFF6Fxz6mwE21IPhub4rI9RITxuzqH12aEg75wArKI1O64XyOWl2Xizx2rlmVTPkGbLpaFQKNiLI1qvWGiVjps6p8bE5dS75/nHOL380qwj5abgt6HQ9kLYwyvD6FLO33pkntUFpd6vo60wOw7pVwEeN3zAEA5z3HtGaH2oFHYbHfu5xOHwa8D8PONSP77dW0pV1cy0oQwQjKp6usMUA9dMnS9LhQR5RTHgP0OmDDlBpwgRH1m0CqtGQjBN1W/n9HhbvZH07F80fjRtjVZ60yudmqIKs9Vn11OpOJ0BGIkFah8KGlJHpabj5WCpYjuBu0TYYkWvb4bNzt29eU3WsyTeW9GE5JHyQKXrfOty8C50+MgiI2kI+BntgbmanIcIuvhUJQ78JrdOqVS3cDX+8yLUljCEa4m4MYuRUsm3WP4XLj8j2xMTDzA8PD59CaPDRNXitGbyFzsYJ1M83yQiiBFTN7Ha+H8BYlUWgtmVtcXeuonh89qdFUXf2oVMdB2pskxsg2UM+oQrX13Jr3PNVmvwmt8xi7id/z3+cKIL//p/w8MZKHOnC/DBATeblAyWsWF9ZAi8ntBIXC1F09Ah/HCUWPWQoGdKm4w8d72ORd8QSC6KxEsmdf9aJMFuwPWXsoOsuMoqOcW3fN8w+igO1mV/OIBJfOyFt/WBSOc9JKno0ryvV0Q9+eZI5pf3ESriruC/a5XqhO7VhdGQkWnBpnZSOfgtwGOR3qA7bIK2JgqHwE6OkaraSAIfA6vFC82mJtP042kUvto+2RLtpDs0pTsrpHlOH8Aw+IPycJ1vArZFPBnxuhUyB8jdxuvm2N6ggbnVL2J6tYIIG3dkD8Cm1tISOm9ra9cit7PwJFaGPMFUrdCeYst3Xp6E2lkV6Ob4Nmzbl2Zh4kPXIonxEPspDBd87v6fBHHIrtEVBrfrj0UREHpiNoP3EgIbUDmIthQRoPbZ537ElrHg82kUzm/VlefUY038bFiQONYm3zkXn0TKYtvFF+EmLO3e8/Bu5ljcHApWmld8PaPxI8+mIOX7F5tMH8XUhZdJaRWaHfQf0Ztu/WpEWvytDahwY7OhMTey1U8jaa5Eznp5p5nfDwzrsaZiw6ZsMYJrmzrvy/26gxY39tiKQz8kD6T9lDU7uaYSWNPG71hlu0eT6mNgX/dMVoSu1apDOlIMl87faRPCQMuBBlsNGcTGfArAxnA1mXG5BedBjhb7NBd/HigbWs7OUtzAacr/VON+h9SlDN7/Cz1oENTToOJ0fzGossGoQDN5EnO1dVqoTGVs/mK5mClUtzYTd94K6W2Vm4H6eT+zkBJQvS9AjaBRgbTx4XdAJFhnNO2WalvnkKAuZ9tDVWGwXn/xKMRF+wlS3hv1l4vGzH2k5UDqEDMIPVH0EaFiN3UnBYriYaUeFUk8FQQGvkeiIuWbzbUm5clhj0V3b0KkWTAj0dArhSkYeY6iGrdmAvPBg1zePXR82olYg4K2eZIZ9tYbCwfIU5AfMzdCPfsuqPOyutDzBGMfKH7B546Z0za9PJ9WBfSKiK0K8w75fCp8KqjTuSiQidZ5PzfCrfXCymme01yyPmaGdYnVnhVcy2UaJdokS8MCNNEKUq6oaSSS7RZOficFTy9x91IgCGZvNIk4XosBdiOBrlSKBwTPhKTlShRS+guxBv8IzvPEVhCSd3kQ1ny/d+AhIddsC9bjMs6xGYYs7/LZjGPxejTBt7nL4+Wh1mZJcfPQ2qR5F5A2+OCJ9v5KPW1j2KV8R0MbHMb7y7GVuRZBWrr/soK+zjRn6elohkG6KOb0LhgpYksAYNQlCW7fwW13GOV64a0Tpnohgpoz1YwvNriJj5Uc/WVDa4PCHrBAQQBR7Dm2h6Gv4ANM/xUiP6dRoL7yfbX+gGcESTXdLEQ4l37x2JBRPwSr0xtpcBloFMrCbB2PYrPBGkPv3UciyU+6SsnNUnuZgzriL0FyDN/p6vZ6ZdLPEgw1jHSuaZrH6DCn2mYbLt3onNrDcsMuE3tWaerqYkxm8JR/5nQag0BaXNHl8UjOVnrZdSGUoR8iURfMRjK0L4E75DByG7euknyVZ7qoD2UYonycE10SBPgr0NWd3sRY/RPJBxYqZkwM56qFjoFQ6I1jI5/76xCoEqu8qVWXC9kG1yZHbz3xfl7I4ABIv0NJhXao4aWOlTsEIUPrCdxl6hbw52Gkm8RQICebxAfuzTEJIP+RnehTzh861eR0fpLd4CBUlWbTzztogOFtB8grQ3sEqlsxC75aJWoxSVfDJ0RU5IILM4olbz1lvzvX3K1eYFgJkctYJke9NDvaZYZ8J9UDfiR/sVxaAtW9+gpNPkuXIuEcZ9gkWRMKRHf/2SZeZBPTEoCqMIINDiQgemg/BSDrRwGiwlaIsQsvgn+YleF149vv+CkdbXw0AIImImTOZ0lccU2n/RMy7zB5kI8XdsFNIL7HCdyXIjEdnwgl9W1ayqK1Kc87rIHQZbnkiT+a1uncse3ZsyjNd2MAE5TzMAgfLL2rE3bQL9tLFpAxpTpy1Sj9En9wcn+SItHBh3srs/Vzlp9keWcnt40QW/tVKyN7fsg6d4AajKavFD8zZiEF7SHrPlg88qkOXfLow4N8R6qEyeNYXJx1XpmWr8AHx7o65rsG8QYCs/74J2i8BY3R2BbTIwoUfvTBBviujQfD4ivyIo/upqJKGSTzqnlnO5PoSliQV8t3U4LN/Ggu6stoB+2fnYNtb9FUqkitZDZcCR6iZryexIvV2VbCY+n59VzNEjkKZlQ2e0GuPdZbktorKGq86iFCqJRnC5WjJ7Gi6KFt6Yb5bYPzaxA9vJZi3WihltzO9tDJrH+At2IN1l2VtLuy822IrFoXwSlDRFL7eED2/kq2HZlKX64eQ7gwsPtEDkEP/Pt239Kwx/frAFFSZGD4mKVgNYV/yAg/JxwcULNIsDXj37ZavW8uSNEp6lbG0GnW4obG3VJuhCr14dGJ4qx6fHxQPGLBdQeS1pY73UIefzN5wH7QevtXV0bCf2dymxmIdKg0btNYPkPneRhziFWbRpAM1jzabX8P8WRHbkZjU7AJsis1noYFN9l4FJd8NkWvGUCWzS0QjdjWiH2O7WiQn3Oo3DDDcvNqb5xy93UCQ9Ydp63BMhoHQYftleWpkyCDhFG13OsQUR3xhvDhy0GDschlYPxxuQRx58kfru6GszLyoNix031aJEGugTV9bu01Og91qmZ7sRapxGTGtnUZemzQpvhPlQvyo2QJTJDnoGyujshhJaMJnM/j08PmuWJLZi0ENkYZ+7dLZ0EFNQMNmun6cFTcOTPCCzl/vgvgBo6g8iuZNg4055FI40TdQvkg/pZrgxjLdtLJzqrsiLt606ZYyV1It6LQU9PVdyhmbq9sqVLz5smVL28Aet9Buy20DG+fY6pXzF0aFBCzHGE2DpqqhiWhbVeyiVSQqh7tmcKC5CVVq/wAkktHQYOQG65La/M5Ah2V70TEPPbtZd4ataeuNlvDu2EUVZ8Za5/hd6afFHhtar9ThITyh7sDUBWVZlKa38LLGdzqcW/tA0lYOIfhRjxJ6tDF7MPorUD+uU+nk2SwPs4mfI1fK73Q7s1YW2BFsJdDytUaLeTkP/22Gma31ImmPLdK2T82nZ/z/YusqtiQFsugv4bKExN0h2eEkiTt8/RBZPbOac3rT3VVYPLn3abBrBe4tfZIbn9kLAFTh79ACIUrz/XCQKN4Q0lvVZPUmSlPOI8vtkNIAQvioFVCHx1JuqUW7aC3s6QO1VlUL3U1BcGuKfOCmZON8l7fyrVZGWhiYJat8qS4t7+qbe7ONDnvVkIa01xamNWvYUKdFXwCEmQ8rinfgUcB9tAn6tZ4Vrj0J1xovhv9qTCIpl45feOvAVvihx4/Ga4jjLGIwlREzp1lFPahTTsvpsafpjLTjfKvjt239rOfIQPOZLsA2MRec3dAKfYCTkZof3kthaHmf1JEovO1qIPzj5FRM44HAjCWYRCgIWN/b48NZ+0E+P1QC3wyOPuZLrF6QwuO4RFHJZKFhQb1hTsuC5Oeya18Hms3YrX7Nb2QBRUxuilzqKwcSB/tkU3kmkiuESizP9x+HN0kgU8zSbbwABCNAhDB+VyOVAma5q23Hxlb+BPjAxakXT8JfIuajLX34QMEKDoVdm43E9qIDxGebTSzUQOJWLaH85/Pc7/D1gn2DwG1UE1QUPUYZTvPloSGg2QIvWD/RWmYQiSIdqHpNxIFuIs9AjnUIn3f8uA8qBBe+r5qHRt08geoG7TdvX18SiJOXJ/hZfVAoztvVTyPG+xAHpxcKm0SpCLc9QPNozEcAt+vt96tIayb6mubZlnJHjR1qBle91t4Yxn5J+7OTNOfNuu/zhhCt1vVURZQamr6eWXoVbz6uY9tc2iuHEFF/Ce3NX8K9ji1iNcLGsu9x3DJMpwjOU7iN3963lgyNvBnRBQHOdedupkiCSNPlY+8A6CaOt0ed5R4dWFMMbj+d3NyJqAkiOkWs/GrjgOK4r5tAYuklBfIIg9qUdp9Hw46PzIyTj14jVlBwTXV0/AMlvRAXp66gJnNfp8NDv1WnKSaCui2sVcNODM6FXKh8mzarMu0D7mvHuFnQTiZsv3FJ92ocQfgxTmSPYmaTnCl+8yIItJX5t3UrUM+Y/TI+bZ3vXzLou+fDXUhXIHHLkmYM46a13LBVlATkyekSYEgk5iqfvnSU+W6F6cWeM4OXPF9VezwAPtLq72613BCS67teyGahPBSK6KaadRCIMyMVKfR+0BhXejX3c3bKtP/K+sATYzR+PLZlRna3bKt9zqv6rHIiQluY9H7hZOzCtuof4mLD9mtHgqIrJukQjhcOwGRDkX9nzbo8InF36yw6GU6eOcLSJncSgaExwyUp6JmFARKawBNYYmMTIPYkzwTRDVNL6CuXXd5rwyK9NtuW88jZn3zpC0mWS2TKJ2PGzXFkipCEQyPUQKtpXrEyM83c6kYw0SDqqSAgzpp573TblUg1RMkZXpI6D3H3An0kLgBOJvx2EUIVYqGRkYfZBcc5fma5lctJuLxb/lu735bCcVe1vdz3FbCt3sPIejjXW98FPQrk3DBLzQCV2Q822uqb7B0FMrW8ZDZuQr6B0WDyyyWq9FQb96KvSUpImqhvB0ejzmzLaOIIdHLY28Cs90O/pqxKU3C58nHXWPYe1nC8zYUBUrWP6CsuHv1oiXuiVvIKc2OCxZR/PSd5sMLZ9TCwP8nZHgmFNgqQ4F/nSgq/M9Hu6bQNi3CDfZcoobuMSTxzS09hqyDhRAZvdFzHFAFMkjKFAWtPZAPxHSznRFXSNKRkcZNAF7bjt+6UDynY9vMTFDXiHnic1lF4f+dmQutpTzPZZ8R0gt6aQiyJwPdY+zFJYHnYj3tk4gGGwqzDS0Emy9TTKrvnSr0WBtgpGdlSQXyjgvjzXkMYInjFICMbhHBuyg/jD8HZQWv5mD53QG+A22biRnYUstFdc0A2zzO9SdtTHKqcfQPhoFzR78QUe/o41jXYHmR0cDTu9fiyOev8gTXY4HCqngONxKkVAeUOGqs3ZASiLxj8gSHm5Nu9/Q26aYTYDfwYf0GgdpykRDUaMOW1iL9CbabSrV74AdxawGki40yxoa4u4k6metGFCnmlp4M+t11wpf0m8eX78Hz+r7pi9PIudfjnzkhvSfQARdoOVJyNIORhBNjX+RFt7RcCo28J/whsZYbDiZNsdn436+6oZAxtOqQpNgLIuKEfaUI3r1AZSJSuEoXJUjkMA0xxm+FDy93kBsOU0ugtEpZlSsoUqt89Wi0CemgTeKjJ2d6OryccaUSnrb39TNyN9fs935OdN2196/Gbazdogu57OHTeKDHd9Mw4fGEYeQGUxTIXx33M452z+UMHtdR2ngfRaGtjpcOdqsLtpkOsNSVmYzJuzzM6FtJFdh6UbLCRqvvtepYa6uymYZvY8MreX1p8ccOFci438FyzeZV1sWyMfbmabSKZYNapuYBPvy7EheERAPmQ3vUcCSGIuQ7nG75w4Q3rQCw3htOOOllRkfyONxL7ZIgAHw2SuG845vJMxob8RspE/MqYgddZqXjlMQjRCe2HWrz9dnYnH9taXM4TKeHRnpaanFhr5i0/by2+Mtb+sAcaq5t9eIXcoAvJfQ8XK+4WqleRmmnDTMrAY4REnMEgpY8ER7vgh9D3TOfxlkU2iu/PMJ5uiH7q0U3eUPaYeM8s7hfuVgcGQjDaGminnRXGa6KO19vDzjT9PLx+gKbuDD5QLR6+Oc6XQn5zH7Z+n5dJeHJpwxADfL6wD96bFsKkXxcYpUV9koHyxYWYst9Uu0ZhyN+4LGtJQAsKKNx9oaVdU1MUdJ3Fwgc2M4bA0h+uS8yxKonYFl8zG9dOMd3ejCbQ17VpZdbIjVomU/IPdCTF4s5qfemmmrBbcr5djLfQ1ueCmuNqSB3SK5dkrRvUo1xb0nsg113Mz6kYcmzr/bcOMUqE/SvlYu7gQ6qsrvbBoO16QLLIcfB0qQf1gwp7bO/Goc95+lYXoROEZJWN7MMtmfzJ2bSeMCj0O2LUhQUKrvNgDm0ogQVhi3zq5wzCrJq2ASvPcJeuFNNRQf/38FqkJs3bv4ho58ZDOEwYN9jSi7rEyVcJBk/Ukv/iG8QfBfuognH/xkhf1w9WrjweWJrQC1v4Za28ksybxA6Kl5aXXpiR463N88av90unmMtGwS/V09DGGkm/jbNzubpKhvT7GjJOoD2K1U0EgHGcNIYPr017C2x5Z9547Dyk3Aq8ALsHm2Ptrb22o2EKJXLhVxfU9+uy79fLJ20krQZHhp8n4DcdC1MQdJCyb45RqZi1QnO6E9luKDmXjFMZqrigfHGNNXvhYWAFk2qjXaZGAOG9bfQv21Go37oC16FS53id9DLaMckwFSWCiLlwHI+FXq7WqEmEHopP8n0oxHakZQhso//YUhj0Ri4QyNQchKQv7DD7WPY4cTWIjPMjJsvnLbj1bJyS3i+yk54tkFYtM3tvYUL4oExpu/FE88v68a+U+gJCrLHo97WNcpwli4Ad+3VmOvkZ+TNyhA8Y7Mviok3Jlvz4CA4KVjrgCmcApvFEx3YSLLzMifTSVxT2O6fixKoKSYx3BV930w/GabiMHgm5x6WGQ0BA0Hm7g4/wSjCesQXbI3CmWihXuap6nj6go6g9pSQof8kkAaQ/H7scy7OzCOxZWczgzIf8WJmko78z6iCuvHlQp+KKpz8us44YPf7AYlYP2GLIJVe13BlTCat9ip7KKTwsNgmMrPhat+4fTfgJ9e/6vYzcFov2ZtrJk5tVnnV3QFq3R45TH9O73hy35zbo4PlFrzGGW8SRrCpa+o24PKblpNBrLkC0jA1twMDm/DtHFf/+nOtaD7orIQ5quPFjbNRH0oE80ublcw8GwVr4M77ojMVkL1VygeJpJiYi/ZU8/v9ov2v+1usM7gZq5VAxZe2h51co7YR6YMKohlHos07YVc2PwG9H+U1AoK6dXqBDYhxzhyBCjDsv/bJXTeUJkysNvE/fQevkbwNjw/hG8WxOzKSzG8nlCIw77L41H1bzuu7nS4svsTY+TsWkoO75r6LFaERDh/t0+tYweXhiKjJknFRsHxTjLlcUx8nudKrflM5wSwCGFrhxbjVUYXmdb93fTPlb7yKt6CF5ugP5UTs03aDigrma/tDSzSyt8es57NZomU9WoH8WRTv3bP/mhfmY6+X05/tjQKurt3tw1D6I1Et4O5a/IzInO0s5vqzoT+Q9f0sUgKkUegkPlHnwvWldFP+CTpdWp2+ncg9Ghz6xnhZGHYeftbAT0CG+id2CdK3pkX4hwSh6f+VpWuU9u/w12jA8CSJFDg5pkEABG3sSE9HDboHvYjJy56FjScda7u0bGki75rxmaRyvekkFywPRvgZe0lWBNjzyoTHYMEHlo2wFZOqj+qn5Zismv6a+aIxQgXxwjQsYlPxLsTHR1Z1HQkPxx1NLUH2FCu3zjudrQg0b4Oj96jLPXb4oziZjXPp+Ge3kZov9czRvZ7Yl1uWlj32t6RLfzq8/qonWzwfkYYBdnp1Vw8i6/QjKKJXwKc1AsDONT0HWiKXHsmiILEAFlXNLbYWFHKUmMMxbOAgg4ZnWAV+BPDQhX828v47fiFaYLjfjLa5HHbyP6ttc9Ch7Hal+HYAg2YcDwtOSrUd2AY/yG4pjcirCNXWUDcF5m+zKcuSFFlScKw8HKCTUyyl0OsMuJZS2hIfNobe0NKDvanxQbb32mzBA39rHoRsCKxvXII1TbGQFvZz1TkoyfD+Sj04Z2gh+Z6xvAr+/gkU3NOqD9i5ZmCkslO/229MPWsn+hy1zgGLXVLphTfM9PY2FvQRI5CRrRL3rj21fdGbxro4brRd88wa0StFCo9noKuqJFUWszIkXtxHm+A5zxfMhLOtGMMdFK4p3scskBj0fsCeGyIXMfG/PPqsKvI/d3owxLGXMd20TGvDR5aPy94M0DNeBLgn8xUm+YZLEFjIkpeV9gS+QO6ji2N9IrenOrBuKPmm32lL+Wuk6UPy5h5NZqtr7QXJKIQeJLJYQ/Jf0f1dujoAORODMjojMN78ouhC0R+iRQ5r2ewrhwnybzQjEw0FSmtE2nZcad3nHL4//6j0RHbWZn3GFyoPqJyTzCZLU93FxxVUGYbrWwfUFhDL7uMWgYSPBvVgsxhUtA9pLV73CIyW+UXAYMrbYOrA1voPtmPhf9UTsOvzDNdUVYVKmopPsbhfaUtoDqV8NdJ4OS//qnc6imN7CDJIuF/w1YmqZZ0iEBKaO7cYu4lXB39EdTgtaBVJbmLD1q+q/0YaJ/OJ42ZM4joMX8RyZrOJinke6+TUxd60SOG/kbJzO/nn68PRh1D6/A41qBolfkex3lccRChGoAvlYv/z+RpXAUtUWcjVAd2340cWB7dADIvboUTCnfyvle/NAnOBTZI/7cpE2zVIfk7+Z9ckzcDhNqZ+RzzAbSJ6fg/Vlr2lVb+5mfQ9pynyg5xL+e4vn18/piLigWg5xxDCcLrFEAB9aShF8G86tx2MLRy/4g5B07lD0Nr/kbmf4fHDV+hsnYctKj8bJ468GCfGRP+SH+Lv12PBaTJd5CvM2IC9HYJ7zyxJ4tQqqnQgyMdur/45c9eL9mF62BBUE5NjHHgryUOZnkSK3gsbfgU9rmYK8ru5XIkmWsgML+wXCRnEVGQ1d71kIsn/QW7UWgjKDqwyHsYp8/mvxVZFsQs1G9ESZPh1xfxU/UfyN7+t5a/99AmXoBeLr4QN4B9QHOYcuCJj7OrYBoEc0+Hy9GOTaCQKPVC8dwtQ1rc8A6KvA4njrtEeWIFa8IdsQF2Gbv7jsmMfpVyxHaNpO0+BfdoEkk82N0MQf7q8WTlJHdmfFklgy+W/mbVvCvbfEjlzGq4+AzSs78mSatwVf38Wfi5zIFYAkh/fkNWJNZwSH9U5E0E31IB7Yc1Hg+BT11wI4aWiGbeu91p4Df1Q8dNZKy96KxddvMJaCTYnrHsAKij7S2G6qKEocjXyApFQL7fpa+gxKFAHd9xCNNphcxygSObOTOhpbI4pdEj605SC0AKXsWL5PCyUZ3R32Izi3LP85hbAWIkHdCx8UvEwof1l0qbEHEFP0Rh7sx4K8N3vcs9pHLqUjVEr3VRmLFbCdBRSXMEECbwpdLKp+T6h0SoIivH6im8Gg92u0wnbvqrRX/G3JM5u7uoBCPqzAEMfpYJTQLbAIAd+zMwwQznKTPlg+gm4LQRUQWOFsnwcxwQdPIhcCQpbsDQzDLr1BbKEINSHy1Iw2R/iwyimgQE5h57kc/O9s3B4Wgfd3HaBQ1piAWEjoPzwWdzXriB5y5X9uaRH5sLFPGp80sBVGGHFuKh/Q9RGgcxoNbAQ10CjeEa0FehGvhjuw9hL12MjwqYwwQhf3YPg6uTh445nErdo6XVHRVKkTui3HW+d30GuLONeUmhU9pZ0eHz+Wh+4oIpuAf80rpH6RlXuyRmCondW15y5+f3SWxtM4tgC+T3H9pBPwJgz4GVdQK2C8lkR84HMw7HbGvzn2NEEjCOv02ybSTumdw4hAekq7d/TGQqEz6jPeVCKJxg75LMBnJG6S0RZkra/z9/L22JFE16rdVUIs3Fv3dSLanIsS4NHEySkcDEsgLxReaOSdCWoF10p3PpFnK03EKw7SvBEZRBLD3+KarTaMvr9F1xsR6EEAuTPhpqucK3EnfDDt3e4qOQ8X5xLJPdAFD3LZeqxZMF3zK3odNT8FMeSuTbE9vo5UMvoBKUhriSHw77a5U1xzy54eRmbSbu35PCKAVX64W4uddsm6MlQnX6vK5Bz2ErDoIkv7NItuGkSeGmfm0Z7ZoCM7BQ56kz2zfIxl6hID/82am7zoNfBi+ReH4GdX16EzFIxE+3pRbqoiWagg7RCnq0qkXj/Seu+iiSJTcegLreX9eeKr8t8/z7j/+vnXgNZJ6lHK6JBvlyeU4S0Sqqj3943Q0YSDhKCtTVBceDNdI6XG65GUdZtodIpWz4X/vr8X8KXDQIMatnCacXv1eHTXevsVBdwvv7INDq9VrHoLvLCssUg1m6RC1tfv4jQ9cIuk9BVUXk31dsCuYu8q+vFOah9u8JC74BkxUwkezJml+O3KNKCau3JDf575yJYFPL8Ekr0U7O0VSOB6GEJyTPhaCAkY6/D4NQwQsJl66mBss9Adsx+U6dmAaFsdEMm6A6Q1g6jh6LjlA0wY2AhzZHyw3mpKb/Lv4wm/juZoFGIDGFiuj183dKnfmQXyKzGUm4L6KsGPpuxoZAr8NNCn77JD0udA4kUj4jDuHnAu0ncXRMkcUImgf2iX7A3Si5sWWL3hPIW3hiQ7Lt8b+s1nre0l8q7xOAOpmbfu4jjrJg7lMO99bX/pxfg3IX/mgq9KaW8LrVMKBsROCcUEoijPJ7wl/A1qt+beMcgPXNYLWpOvGKVJuP/1N2fRVFtEZmoWQoIKQiDVWC9YnhpqEf4L5jZNkwNDyFHZPOUPUn4cj07IF2vmKhrewE7noJvq55ZmT1hSR6eVrT99H0tQCPxq0mzpTgbvKAzE5nmTiMHSPbpjnZ+05lQkXNq7v+owOmi1SDxLDl5+Kwg+uoCU7syjIhlfwluAMdG1DpnrmyqAxi0NO9IC8qI0BlOda/Q2SSpXWgKiM1B/xz4yA2/fMUGnUNcVbrH63qBQZrQ+s/zApnyT/bI5ihuGIw/PKXNczD3CidIT5xcCjRAonaujsfRvc70/a0TcKUCuFv5Zs2YF89MFa2jzPlXTfPSiT05Gywb7sqXUcdCpkNZvP8fQ7L0K/8ql9Hs0jrKiBHbd+/SzhuvLzSg6WTB1Lfd6GyNYuov2I8PhrWZnMuHaIAYbvh+gBegjfLDH2QDaaepYjb0Qoq2BErb4QZpwAr/jrjUYGIXhSoN1pPApQqXg7/IQrGn8zSctO2zYJFP4flQoXKqlX3r9u67pt5+pnsaT8v40d+lrYvVnPX7iz/e5E9rKlf/9W3QKjon8HsOgyqPSPMS4HQYE36WxyQI6m/AEjRcQiHKlva2198PpHjorF4ppKmCCii++tA/2vllsjFoys1Hjwa4V20Q//b0E89OPf3fn0NxMNJ0OCS449OWlMmHjUpbt/QriLmND0PETZLSGto3WosTZnDAemPhYdKpyP07A6rcbTz1IBjZ25Neih9ZSM1YDTC4ZHW836YuX6s5vHbtxdm1VVaLLgH4XY4mTdDkt1B5nYMoCi3dWIjsoHs6fPYw+RwwnPlC6UuSlTv5GMB5rHnrA/nC+WjjFo4NykweeYVcJbPD3NiAC3/96X4M5Jy+BuoOuR0WALXQkm6My7Sx2RiiehmZUtaK0IzVVmyenYz3vC+yaNrQZvzPHtUMJFs7nr18Ddavzu5WORJcOcc57H2povybSDFxnFDw2uyubJZd8q7Xv1e+U8W6g/oMLiNVmmyWBb7N8L321SrwJzXY9BqITiniU4R+zm9cJBx0cYPcG8jgHdEzvMQxsoge7MCDloJ+vjhueuh2ZOl9dyv+MlHVTRyAjK0loC0wMpgxFwlRK6uIiqNGe2jwu9rulJ62P6LDRKdjrkJ323EmzEfwSTsV7m8S0Wb3mQVWOo26HEAnEI7q9W5+Vhyd15sEYAyAo6aji9rWonhYQRmuGHAEVMwKZ8KmW/CkW/WSLbKbXRpNEoIc6sgacw2PjxfRObjAx9kt67wb/7oOCTR6vVEdajkeO8WC1x7RZhw/kX+T1xpxC5esdwBaYFjOsGLbpg6CBFMDcQqhEU6c/P5AJEwYYwKY8hHRtVcKx1zqSJjh4cA8Y/Vp7hJQgOmJbpRaqaEtVM2yp8Le0XtuQ4cL+eHIQ3YC9MMVJUPWvxAy9KX74CVWA1W8KJmLNTwmsXxIWoc0gIzUeXdskIgy8HXlJP8shOLhgQ+PyhB55kVAQ/en6CHVD+XtThCuO84EL9gk9R5kBKwU0ywkwTC6ydM9pGUoTDLeO211BzRmFmiMw14rhTXzPvWyll93Tlnrnm6c69kj5foMMsuBzGqjCfCD6jkOds05nuL3Y5YRBiABqCoAhS3ByxglmXo3EiDsTCrK0kR3Q+9S+Gv2WblmUYLYChRlh7q51HYsAKvse2m5imx4M5Qm/QmKKjKrzVUGvx1V14P4cs0nc2d94oX7TmifK7CYbDwwmYhE3d0/Pudtxcac/8cnxarlT16b8eHMULSwP+NWViKe0zCJ6Lx0PMSWqdieAiT1b6zya5C9THSmgVr0HM53raSTsMaItZDMX2uRLXak8YaSxU2CVzvU9Mr1JRS5P9KfoQKBNywTVp36tWkhMLpcc3oeRDNoHv+PbpO6cLOsZRlojdZuNfxu4xLGHaNI3pZNjLknlWTXOxJ/J81WFIA1+Ddw24fKRYGWRhe8/DdzPZEleQlN5l92kA1d7pQgmxE1u/33lyOLO+BWgWPGbhA9THCqT9A17U55x0SjGLdAuEGPfdVxzstmSQYQxvlFehC6yjRaMhf3XtTZfedmIcoR2a/2W1RD+op0OjglhZFsoZYPIHpKjM5Xvkaf4nOgCRDWiAi36eT61tSTOvV9aksuKWQsWLbLtAUOfg0OJF2L1aieKvZQMokTQeOyCFPF2nwAfsTGBZ/KH8LJmirdrGwHXDtjmt7jIdjqtO3jrIRk1kas3JfSseLgrjIrC6zpYSFFq4sR61UZUSmR8q3rnzfd8pN+waJp9oDqxvb7FpfqPzZOQcvpCPXYNt0HrjkVQnoz01Uxhg+SdYQOxYPrrq0vd2u4Pn3tFH8FetiXTMdm0yhZJZxp9o9MGL70PbNCiPN8BdC8/dBVe0omwCxBZLW9EdU+SIYo1+a4tqC29BtSieJ2Yq+g3g9x8IZmwKaLZcS8oF6rhfLOQTxq/LTag0IaFPfxVzqH/te/noT+olD9fizKyhyEEQ9Lczq+CAdo72d/SdNhA1IJMtgDX7wUPhmtpPJ002aV6lPwBEyt8KQP+VslLssBHr2r4I7CzaYxzFuHbKiKJaALQUGNwWdlRKjF0P2pV2Vh+nNCpTnsd6qkphZKV5Hjzb0eeaEudfpkDnhM7lD98Y7p/hc4hstgYF0SYBivH1YlRzLVCjqELPRrEtG/VAdRmSDtwkTe5tdD55WaFZvPrShUjUpIKGGOEBuGZ7U29Q+Dgupcy61Q43Olp2Rfxm1pshAtzylH4JeRmOoSaMlauNWnAcxkSo3iisktxiuIE4kZyaB7meRUQzF0IvM7m5TBjQUsNGiUqpYMvv8j1jeRu5RHuibw/xY5Ucp7HFQG9vg/58mcniRjUICkZpC6SL2DTQbanlDzBuEnGq/FzIjKJjh3L7reMZCkHGIVlobvzlmSCNnv59EN+8zdHmD/hsSOefO/ky/laWGGUGC9cExUMfbCPkadSzAWTlOHa+5VOEK/db0PYtjcvITbfgdp3S6WiafYjrIsnKOfl+j1iUMJNk82Vc3V0Ms6naYBVzPZ+3BXsmqNnO9ZRqcLMlDf4vhb9KXVpylvz6887nBHSXX9tPi9ir+wcIwZ9nBBKkPWa0mQo7yNEgPaXBd3rOel/3Vyi00pju30AyLVdiep2MULCs2Fykyje6vWrDCQitGzqNOi70JFC5tqOuCxeqRhJPMFvjkgwNtabU5Ddw+oANzLPuHB0QKHQuSZG9iNW+d88HpNjX/0HUIQBSlY/g2W4Xa/f+JnqwPaIO32tUNY45rEufQCsiU/n+aIt635gCpgOuWSVfPu/Htk8ToO6IBIrl1O54BrbqjrTixKGrb/Web7bLUtFyPdUnDAaf/MCsapQcknrj8W5Yo+ClB4JY/iL0nlr10GrNRtiuBbDbPRgX5L3ezrD3IGq3GZHc5ncoKFczQVN6Br6TFTEMChk/oZIwFW2ddCjpFphkX9ZVGXik1dBZ4Uvr66hjKYIRnq3mzYtvkw/z6lgkQewbLlotlnZxfFZRo41vRD016SCwHQ2RbwoWFzzAm6ovvNIcOhhrXo4F9UrP+0tws5MNmPybQgXVbiDlRB0YSKf5rWC4iwSaYTcUJcz/jpZDb1mZqg7qNHnb0i6WbYI+flJBliSFgpfsa0zVCgd+r7dWLW5eRYTkXBw+jH4LrhIYx+MPEQH2ZL2xMbgh0dzwfksW1u7gptyBdZLPxABclHAVG5aR8UjvEfyNONl/85fC5uyXwxB57P9IUHDfWio/FhXlf4N2xkA8cvNu44a9ej5WS9EIycIRQxUPr2skgvUBUFe1T136thZXyehREGTi4GqZf3Ua+KAM3lfPpRKiOOZsqiLfQASclcaPHD6sFkw+ToSPs5lRxazaLuBSBffKYNL79I36wn4wBD+UAGFWkesfLu6rd6P2iDQvkdNoKUVlKnWkM0HzCzdzq/k8tXY1RLke1Jl+u26MbRyW3whZnNSRzkGS944KA/v3ngv50dIv3eUa1Hhuy6M+J5oow0mRJ+hbY6ay8uiSE1HimrmXMyTKuGvY2aQnmuSagYdz3TTzqMQNMtussTuNGFIywH6VAKOmUmfAb+hH3nmiAL3im1DUjpfIAN9hf1np+/nHSb3/k22kZ+nje7pkMoxxnDlYHNjVSwXitIfiGyYDJFfRTyvMdg5M+t36vWzTOj2nGfZfJNJhjZqgKix6CTObiYQ86V+OcZKPiT7x1pgW8m1YLzPiqOl5cAgyb3ONxASIc9gqN7ma3sQdbnIYNq/cGxW2lva6IBCbhYFR8bd5f36DdcEl9MKc4zhnYa60sCWMiiI78msM+XUKnpApBU1H15qPo10fxvZZLvaiEyw7liYbJcuze+NevwUyzNsYA7iFa1oerWFFh3VWOVFG4PfgbKZj9P6HNDriH+BXQXyG3/DAK/vatWkO0DR2kTCUvVlT+K46WpfGr3whevKkxK+iARxPeGjGIVw3NMwEXRIm9smznPV0b9oBOnQtG4i6jRDhiJbPN4dmIQAWu8FxdbnCBOgF+rgxDv4jm0egF6xCN6d6VHeUq3ourw7mKNCbC7bmGPdvW6QweMJXPMexnmHjUmbkYEhjbb14dwHn/b5DsDeT8jo0FuHvGJ5pNMXsTTmp4qH++ot1DOZBxORRAYXSF/AnXxC3/ia5b3YwjdwjmUE7Q/EE830ztmFAFskK1O0qU7eV/poEw3eAUS1viTbfiAc/gIGa2miJAo9122yuHV5yMToDbs4UYbhKHafgXK1JGXtrFJrb+/SbZ6kFAQgSq9CdY2IsxwuXIWSGfthhDjIo3w22lxyheZwmx7TvQYHBUUvo8OKAMj1POY4zscHvCgY8RIiUfvVHAVX+VqvBpXTXFwXPk1QHEnFh7qjXh32xSe2Mi7GxO3bJz78rt347EsLkqIU+lrl7IqPDzUpAGw/xBKgQ00v/aTC1WyPsLR7fUQlHM46FCw1B0Bpt86LoNyElfNdVXTpkvYDb5CywSH0a2ifB8emna1rPUIso0HTC74VkXDqHTSksGkWTmc/QDnwstsIjMZziUkhin5dzQCl3zhJLPSaC3mb6l7jERmwQJK1Auu5e9p5y50m9AQtDOTjlv6mNv++Sovdw8TE5VHO2FacFjDCxRcYbBxiRvQ5woUWiJtPrA1470h78TdkZgpGq0bpA4BYdCV/dyTIpGUeDDPQYhGlE0FErObvyV7TuVTmBDifsvgeRlkvxgN5Bw5Yd+agL4VwHXiR21CTb6kbaBeQJwSG9YhT3i5bJFJjZx1NBEqZnXkAcMkN6g6JPPA58KCkGj0vats5AVo8YiPWlYx4cGWgAm58U9087uv34V+BVZTe3XRzi1+/2PrhffJxtyLP5eMVpvX9N8BRyFUwHp2SXs8ZWP79Ai9uoJr0oNDiga9CbGoKu2YgAGPOXeY5jgn6WnVp4Cr8qgvLZKuuFs7KsuLXUczIRZtEuJsVQ3UZYW+SxIsfmAAyGIYcGHvNhjR2Cwh29WgvfpC5a96P+81ylyr0/cHoBgnlYUO7i/jad/zxDfbuaKbzRh4UpWouNG14yIO31awK/pqUPvEBtzA0Re7sOEN2+kFRSFjna40SYMfYLIKqhxrtj8tZiCCouTUplGtb0kcNgzjycBrdBxhcEYWctI1PYnyEsiG0X4H9+gLI5UqdD57po2MQcfxYkCMGkXyWp/GeDqXFBadhfYEB2MnwFY6nF2L7a+EI/PK8hBq//S+Ce0E5MrSVl9p99WZACM+qgWykjx3fdspf31WXX8yxjHj8+ZvO/gsT/NLYpo5+aIXCGADVrgG/sMRdC4u4JtOZ0xsckTetX6Re1AlQ52btu7NRjnwxy7uArBJ+GGc7YTKeUsow6koSi7wZSWuhWCBgxxr63zAMFnEJ/O1trap4wUMzzynWX+0Dd0oXYDB4ByTCtNfMRSkC7MCSq1C+P0lTtkGujzdEOXJxHmwvWdYVqt8GvHtEJm/Sgr+ds/7qG/TkNLSaK+TlKtP2F69G540OwBBOShGvg1o5C7m253tP+2CUnQ6gDL2oJ895qu90tWPPWO0gynmzn74vHlr2/EDve7sE/X00bMS65jtNNwrlDhB3pBHPr5rsj+25us0Q8ebXrUkMcwcPpZXr4ko1NplJC/t/J8/79H2fZDZQYGqfAmr1ooeCT18KYpc8hurrna57a+Uge7K+PxN4b7kqFCVj6BZbME/GMR2YRdcCI1CIWEzoriTpaUZBYIEMpIJEtEucmMeTqrqLIpaZNJyUfqBteFOU4U8FiEixOagLWmPppuW1QtY0835Mcz2hyMPKTzZbFWmBjlqz+LQn1BqlK1DjbfVvAhFft6e4UuK8rWSkX4O2faG1X0Q6T3D7DNA7bv7icitBA0Pnn3HMWfNvL++ZxjESoePnccIEmvE5w9i1maR4XXJFgmpU3rlIk61kEfbjlNmtYCLw5PbY90N7zHEIDtcssAp6VV5Aph152A1SUZR0JjLit/HZq8qy0Fbcsb8r2q7aZBUhNJE2orEts3OzYftwsRZEX0pitUC/qc+50R5OhYWoZuIfFd4a3Pr039ew5vg9dq/dQhTmgCmgS8hcPogj3WPmIAUfLnf/AOumWJrc5LUuIwJ4BXa8ch7CGXPKZljGQjFHcCCg+dC+jwXERjnKtJc7CdS6YM540w581F3cTgu1maS1G1Zg6LaBvriCJaXokd6yN3V+fWGMwNj04XXAqlC7qeLvthiLDzAd/ssUfFrDsKXyKpHPIdoOmBlnugV5zvXxMsPo5qlmRA7HdDIvcZVC7VPrak4yCjA+4irHfRz0QyhKcS2gxZVHSxoER3g+zcwfExiTgEdIF1nHAPkaoJx/wLWJ/YzhPs0TAxBsdRUUPqPemXyMXi+5r/xaxq5XPIyo6WQ6E0m+yWvTWc7yXiAL0Cc7MyYzfs8mNE3JO7v0HE99PxPaMvB5nwl5g7qL0kfkW0j2KL+Uat2zQru3FL7K3qJfCaHwZt8bXZ7M+Zra8aLSLZ3mtRP5JHlFVUmUuwnXzMNu+l0lb2A7qWjPbLy3wQ6wlffYhOYsgNHDeUMjE8qWAMIVtIjgAJyp40GBumVtDiokTgEtQ/eNaP6Gr34x0M1Ca9Y3syJGJ9ahu1NzxVWwZ9RnrhTR78IZUjVLW8NEJ0agvuVah14JtgjFWw3B624DjdaDucIS26QizztYhygfT1Sm4ujXd8BB1Ps9Qoj5MMl2IocvdCnp6p3Uu0zec8qOKH4mTdj7qLlnHmniJ7gi6KR5gDV18xuLTsbbLzeW9ZnH92HctQNYWtABeCOdw2+byJDHKYNscwaP0NygDGbsZJZB6jVQsEbbBYVa2Hp4DpBPrB7eV8pvnoEzBnkGEyzjILTQFicwUl4J0ElDuJW0nyeCl2SXfZqIeecEV3D7m4Vk6zEKfiRF10WhAa1I4jX77Ombw/JyGtbBLrmGod6SYqJN39KtgoB5v3V2CA2XTK12jA31vS8tkFX69SHWllJHP20Y+7evqOL/dqZKFGcjEJWlgJc25JxH/iQcn4ZTR+MzNSyLYwNgSBZU9L5ywar0khK/aIPq+EseCc89thr1lSww/kYlDMmBzerx257lsI8yVRxQFDrAgBf5NTBS63huWELtnwROGG7VsaF6Z/f28Ww4QqyUtdCzeD8Wmc435yM8XNCvSK3pI+8+Hhflgo0YYHvRi3F/+4cFz4F4Z5+vrlHsAH6Uhm2xiBFf0vFVrGqYsi7W0rf/ludsP+H8I6yaiL5LCqyWfTnChlTdUbT+mH+FF/TbGruARbkV8wklIZdJBJ+GO2xvx24jkDPKHEqOtIrjDiJtHCxvISFvbpFF4b0Z7q0TiHrVMOh7FEhVaAX4labC2mgCrz/Hwt8KxNfCYL+bQB7e8dl31dGdLpHit53uMoYwfe5wloinylpxuSqUS/SmIQna1oHyTUV+o5GJhb3f1YfWjjtH3v4W14iMAJZ/OsorbwOcKHx/xPPyuRtn+9xzVfI0sFQ4u8TTkRK9aLQVVnUaouHSFQpvA2z9oo19tH7cndjn3zXZ7ndNmTPih0Mm0cMhvQySN6ay0+rFXYTUxFjSQyHOOM6oqe08GluPw6YBw3WqVViEkYoHKJ8S7aJQvb6o3DiG9NsvrL908C0WjYFPG4WgXXan3Ud83H/+P4+POsA2ddzBTlfFF0Gol8ub/EJ+uKWRj/Xq2ggh1wJk/VjB25bssfe/TWOV8q0ohq3sbFGJrRdwN5pirUlAePAKUneRQab7VefwAMhsuOrs3gFOyMpyTkHLoKKt1PCXNQF4hQmGiWcrodBCHDmGUmTL7ziPlw2W/x5frcXXcDBYOJ4pwSFUmicplmtWQ79PJHfi6eQkpteRURaLnt2+bkd22sen6bGT7+lOLiWHXuZ5lkn97yzBmkUm1F/7hszD6oQtLjDduwwpjbkIqNmPzLSYgJtwG2VhEGkY6TucjJpSkv6uP956vlpNr7yWK+oHfCiBufAH/7dQUzUezymBEgrwizQaqvu8E42qZutnHByerVHuN8iKORTOBGH0fszEm1dRfQEKN93dcOoAl20VK/XLKh2z8HDZjnqM8Lxi7d/u7eO3Gpk5RJBeuz4l/PGvgN5m4eV5PWp809Aha2NWLBGnHNcbNI08E3dZ+xWVBXajOKvtZatMzdb9BfN4gtYnMFNA2D9I6zAZugW46/8CBdqrcmVkrxnepS4IpxHIc9YS9tTXw1lFaPh7b4YG21M9Va2XGPQqoC3Lgi3qI9gYQKyo7uuNqQq0ZMG1/RpGCto1rMLMnDqGQTun+W6u7t7EdZIcMtR4lWPBtlOi/M0pbvMg3clVGdaVjQIty8GGY0HQJ5UCQhSof/tO5SpjuEqXApW1RJqOA8g8aKOY/2C2+gYRs6xBUscwyl8M02tja5ToO/d6ejFROZVLcPexmYTxXbvYiCc+pF1U2IgqJ7cOt0xmRitGBcujQfFtU/3tzEbr5+W51/AZ3l3RrYboQuskFByHA/blR/DgyWkCGkiCOvuV/QidMMxOCqsfqfSyV9D5vOhOJ/7cqx879/19Ow5pPo7FyHLuuZ86JqSSSsxvfRGzu48Ii8zhRKFBm8mkr6MNfV+BtG39WW6cedZUKSbmmOoPUOnTU3ModcLIZQXBHyEr2t+CEj0BvRRCS/M0hJQOHRQwSxm6tPrmY7E4eYBLhf1bl800j8Cxx4tDgFGGH61ZNct1cd94cVXWvAqNxMnEG66FyKJ29J6LYRAo6ijSb7MLEIYxvm4Dh/H2QahPfZkN/zq3wkMQogmPx9AlLx1j5L+N7iaA3kCijg+Aeue3QL18lSAfI5kKl95lxByERpn9uLzTX2Gi+27gL3DhQFx6dmdXwC2x2M+MPZ2uQL6rTOO3jR4CfEjYKZxr/7ez72UD2zTCSMXoH3L1Le2qPiaRhAmL6F/Olj0lKJR5aJ2gfe92mlY0DsA85w+ybrLpcwJWmmSN+GpXh+io5jnUO/Tn/rvdyIMzAXUTmD81SZhvzB1sTRmiCKb8ZKcPG6JW7sDrUFfUYIVfGOFg35J5IuLtC0iOQ07I07MLtCpbFDJeM36bRmupK6HeqY6mqHG0/JYrF6v6jUWNpFRtJMutLv1WX181+7czfQZSo76auQCdBw/sz4eQPyn6j7d/vUKrT6LBponrLl3Vi14Ce+fZt6xbmsuHNwBbhitqVXd+fiUIiYJ+Tcg2GJzpjQGvMnJsv5/TDpPs397ufVFtblCsvh2TYaJZOJGh3yjJ1wE4/AeRw8dQnDACuhP6oTqg4F15LKZf8XzHEQTofk9OIUXsDP+3y+61ABjE6C0SRP1YdY9BIg5QCbof9Fs1tfTertzFLzcSmbBb4h4YkNBvk0XgXwzcL4r3HJnKZcc4hLYwWX9X5ZnhuaoMvjIS1CDkhtfpxOOfr+kzc9G5pPDmOEr6UAicbn6h28ySlVI9uR7AEXAiMfzPFtgPTmJeD/gNoIZM0BNKKHKiQ+UFsTOoHgivy08hKfY3otbuOPjUwvBJc2lPQ6IxiTh45G44/q51AFf14lgQV9z6Xo0+03KW+EGBSdntHsujQmOjM0inwUUeRT2oIxWFWZk2NSTP11djU69A274f6ucLYNXfWvbnd5kHnR8vw0qno+PwX5kA4B6YBs44TZQxG3MM6CvAmnHgC0QQYODcQG2FJqNqU/vNi/WlvE73ibDunf6uzZ8+Mdb5KK7B2TG+TA+8FWpsnr9mWymrZFICpL5xIO70hHD+1b/HAQxN3YcNgSW4puVPlNq0bbjng+SO8WuhHAn/Rb0+rv57fogFW6sZaJmIbxWRefi21kf/PZJMPRBMeKTe07fHN5lDOhHprbbn3QCPAu9d/DNPHyzQ0pN4u1XP0pvLv5H5MAJBsE+T3tJR/+9ZvinBU9+MWADPQb+PTVNB0LS7JAfVtXT+hTztfL7+ixrDC3lbulk1OqRisUnUTP+cDkni46V2Qw1JgVujVdKz39X5sNyYFE67pdF0fvu/Pd7sD50kMjubP2tiy7eo+BtcJQN6nWjXOrmYRKwFlhVqGTeP51sd3myIK3aKJkYRHGIqmNsU70v9YT32HEqWBGsof5uH68c/2nLTkQTS1OU+hdsxMHoqGA6AK5lQGTUPEI2wDd8NzNkEHOQ7+plSVuqX8vb0ogIwjpE9zm8avagPMrU7/n1/JxgYXuFLbVr9/odwK4UBq8+jUe0ib5nApHnSqlMZLNvkDkpL2aRwjwtHsIZClAGWYj2U/U3Io7YVcJvQf/4j0bVdmPAqHUaajBxh16zYiZp82H534YvnexmPkiRR4TZhnQctyNvzyYs3Lb8y6wxXtd+00ULl08TFYAQCNI5kSSp493GhZQ/FvlZxp1olM8R2TzlBrg81OfcR5DjgxyL91vkyNWRVjMvwwOL6gvnQ05ua3sJhlzlPjPNH+Bgr6h1DW6gcycVOuNLyTFDUMuqvGGky6+MYd14B1xYJEZ/jW/B+mcafUq5Hp0+2wiCPtvrMIFRSe3DYG6zXhnmu6p0E7PVpVshKUyVRpMIYfT49UBeY6D1R9aQdhvcyxOJ4jUwheMFCxxyUgQTQ8ff8wJ9We7q2n2GaMl0d8VYn69Mqy2upYsYjaQciIKXrFXN7TVJ66hSZdDVuxz9PnsTa0u4TXHPHbyzuGKZQ92fnXw5QyJehvsqH+C9H2fjdUtE85v4U57mLYpX5wPeEcBwWvvHI15dCW3QiwmhovAgvXKzvaU4OZeyVirX98UCQlmTt2fhxU/YAyXew/TYocPxbPZ/CUVYlx7hc3WuUAtXYWvZOkeUmbvfQYLdCtrh6WKxUn4ULklysMG1K+Bum28EVx4aFMk5pjTE/53iY/JEdsszUhTshNeP73e3Kj6kmdwwTh2rUM4wwB3v9Nl9lnodvS7BxMGSksqwP6QGfgxf1Q/9gl+npqBd8/lgpIwFOzcu2UCB74NrJmznLv+j135+knTNQY2eaX67nIeW1io11S6gbb+J5Ug/kz4OAJDIJEUSZj2GrSy7DlJMD2ELugYDPoT6U0bt6BE4eYG301DabSVt7/geWqrSe3mLPRF/g5eGXRqY1T+FQjJfhdy6U7/m5M6ddabFI2qP8e2RCBBpVqRPtOXb+gGAQBgUFalGL527rFpjmyaMfJcQO+lRVUQu3BJsex7uEpVYo3p2H0P5Wm/PDhiUHD06Tp39Sbv6nvevqchMJ1r9mz7n3YeaQwyMIhBKSkEDpxYccRA4i/PrbDdKsPZLX9u5od73XYz9ICBqo+rrqq0DjWmdVmQrYbnKemYrVzSM2SeGMB/yKEHXcp8RmZYx9a7Kp4KMDRAlmwr64wM8RUzCsvI+2gDlCByhhUVYtXDjdiz7qXXC+OmM4rlBRrkpIoSFl3aHtfZqh0KEHK47s0DOI5+gR0j8ptlulgByrwDkEon2qFlSdzxRd09ZiuZ7XaJ/dEYDtBGaM204rNNYm1B6dngJBoi7zrH+FjXasR+KgyxY+WKzZAbC+EeZSl8OKkbhDdTG0XVuouzkCEJnt0PqEGr00xvq4l/Qo0eX5ua6a9CLSGHIZA0cXmIRMJpEV1G0F4t2ilB3ga7ZnoSj0ya7hcPhgVq5lcj3v24y4ayQCLCvHj9y9z26JOepC7ggLN22wSeqRhR6ck5zJK1yhG/UwL5ztsRZSZzIWYr0mUolbSK5GDHRxaoFYVnQVGc0UBKvd3AF2eckbB+60WIp98Io22ig/GetLJxGnsLFGM9peaygzIWBTVtLOs3RsKFyuTM6rIVEme0CUynRRHfXpglw6IMSb7sLZZlMc4+5SjlyqQIrRTFNISj4FgBEknQ54fpphfZKvGHXc/jAagm0hOQAyMOLkaN4vpgPDCpjev2Rb/lKet84YhAV4EiI2JiCtS87zYKHr+9jfkZZ4aBdqx9kbmmd3vbfnilFPK4RmfnGmNO/NLQ5wp570kMwaHRvbfJ57K9Gnw3mZXYJdJXMXJ2uDabZh4JrFXgu84TKYrat1S1oDhwBeo4A+l6NmRpzkEXyVDdFUB65fyWKCVHzgMki5yHaT+OKcO5ZVt26OoUd3E4UVjyGixkJO6i+3Vpwth3wj5yRreOeKnekUZYI4g10RuOABFC8wlw3Lfj1KZdYUeaPBhoXzJZXsWN4B0388GKqqRnhwcFTf3+MpPSdmhD9MzwnXQieh4GpCgPloRhq5qhCc5S5r6hjKobGlO38GF3ceoyRwtwf7KJe6603EqkER22bP9iU6g2BUO02R9aBxn5u1GiccNjOrXMWQJRhbmP0/O6h7TPBl56ijIly0Szm24MCTLvBSXdu5l03EYfKWoC3NQxCqiEvqoK4Ke5H0jJ2vl3BeSkHiuWLKLkeAnh9YLDkUPLqGz8cAhdkYkIDowWJxzwxhbb4CxAsztyQ92ZSXZkPRiVKw6hBXlxy5VhVObxG0Ro62CgtjPtK58dQJRilNWzvEzn0ervMA068FMW3nfN/0v5+5S76JqFRTNxG51pFjXBLMZtMPuwaCBZd6DrvdkjzbEmmnu7zEV6TAHoXjpTcj4Srtx+kfHgOwG0tIhgqYzx2E8QoSlj5vwsoqpSYpTiy4nguLoz5qifUmiFPcAufdhZZITsC4s3Gp0lSH5WV1posMnGXTXM6GLXK0uQe3so1VdcuaaLIKTGw21szeYU2BIVmDmUXMZvbWURFv1MyDw0kJedlZ0atjmVVS6G3Fg7vdiq1t79F0fijC8RwRjWVaLROvwapop0Rh1vG6NsSBgHs37ZmLje3MtGAqRo3CyRjfrFNRwQuY95aJCtDynWIQqb4IOktDpsTcsVJEiTxCZ1UukAIKPuSpmFIpYhuv7t/u7o6hXEfQBYxX1IyddTNaDztAlwqPlZNF6511o8h4KaiXGr4s14c9lQNf1s2NfLVBMCpY1+wyqa1J6q3nRT8J+ISnZRsQhKlvLNSstWKE2rNn37Ux2JIvsYAUySAOMzbboBbaJptuGNX2/ON+a7rqljGUhjUjvYoqZHCny7S3o4FMJ+czd4nZdm96M1Ovdow8JyZko2e0ygnygXUmOpLkvBzIgP6bEY8HmCewNXwhgtf2Y83Afg3GyWJZY3Y8FtkKHGuTfkAcNUwqg440aZw9+F56kTuMzMa+XEsu0RIivTs0pBVfsnNzfMtbCbMg5PhAbKZ0wy5z0SoVEtOwZj5fXrYzEOFnqrVTiWaVw0Rj5zPbnOGnk4luRFVco+QktdaVspyIw/SR1BHNGZwiza1lfijJiyZo/lmqFUuA+QZR3WMImH5URuhY2O0BnXHHMKsWZJ6zaVgnPFaWxjY+NcR5U76MADuWlgIRh+XwDnMcVaf4iHUrIdt3NXKq8pY5JFtJW6r7ZKEG3IoODc8qg2RCyR011/nMniZX7jGWm6Q3vPMZkq0DOd3PtDTXrVKORtUYevA1xlyOCRbiF5ESbHO0nuCVBXS73ZqkOkq1DlylCK+yQXx9uMpkZEK3IRvRBhMWsGpjuzSqM+sTmL2svlHM45qz0crwqRbflYtL1k45sW+vDouxZLYIBS4FO8N+/yz058OoIkeAeTg6usulpKc5ERP6+WyWFWxq0QXOUsmoCsYufdLGSwVD8hLIQZnWSdRNZLo4Z65EL/PxitkNFohzD31+VtrUp2DDWtKxEnYwNJi1Aj21u4ieR95O05bN9LwWGKLEJ4xS+8b8qNvnOWf2dmdRVdZsOlQBlIXrwSqAfGm9/rUWhLVIc5uv0MUWznDFcZkiGx3jEIjgvIdiyQ8nXjhvcYWgSrzwLwc0WzGZvDkd+xqROxP7uoIWdp1rFaOgaFe7rWWJPGWnvq5faEsKRP4Cc4DlMUSRXI5Py8nF9rfzq0cYAs5ZYMbmeoty9TS/XFxlfRQ2vGAb8WbTtRvqiE2xVDvClAd37I+CL0/lxh4XoAruHMb7RYKvg50M2OD4uD92qlrGzSLOm3RxQnalKZqbIZ4KuIznaw62RNhrdzdJ65OQkLR6audjhtt560tZrkPntEjx1kbTrRouhwOPE5LWuVEbbem5sQ6QLmxBfCg6GId6UiCXq8lmYbGrfIYdNlmKe8144DsyBzNf0wrwbma+VutIQ9CF1U5oTqsLNNrHA4aQCZAMQBbjDcDvuN0aGOJ9mK7dlGMikuXTeOiPUkZRjUgSnYUbdsaqWClCBidqw3XWElp1Sl5LHHcxYJ/nKrCx414Tl4GIn/XO2TckjrND5rfR2arban6WG40SOFZYLqNkjlAHpE6pGToyy23Srtjlfj8HrsowzsdIsWKqrJH57CCV02K36bUocDmE6tStJxiVJLM+IbdbhbOuVVlJIETc58L0XDK8cLHC0WTDO6yxU6Tjzhx8gnBwC4Vw67AzZSKyVyq1ADTZGq8O4kwZyfVe785W6Wld1a0FRVKUwUnNGSmYaJy4TcomW18ihzWDM9BDtkY44rQHHGAlyNkirYEAl0Kd33wQNy85aN9dcFOLk2SvabE8mUcSVXF0E+2xyNtISCS4LEHmxy4olpe8QwSAQ2lLyV6/bED/N9q7PVPcLIL8M7/D7nendu3X8tqWUuaobIE0JsF4jq09dVZsjP06Qbplp+95qRTc+TWuEGvADzKRvvRvgIcl5/xckludmMs6XKWAh/EUNj6WhwTy+FGUGPou5OONe3KZFbamcC/2AFdd0Z6heDOh94zC3qOIPTcZiZla5tQMQ087tdQ3s0ndv0xSYAlsBXRJ6iO2X+Es6/vHLei3zR3igIiwc7cOiPSgTgsSF4izuhlq25Anwmsu58EKRep1N4pcp+ds2UXTZzXXTFWgyoo4qmzcqPHcA4YSMi8ptYl9dA6PB5SVD9Hcz1KZMUAk4LSQvfWNi9DAZqHhh4XJK71j6StFHN+pnYChwPPNtSzptLXA6xPPp/eXUWooYbzSajmo9obnK9OE6EWrzGHALXL74jCeODBbmpNAI/h5WTr4aA6Qu+JncYILB79wJid0PtnMTGI0ZEY3Izh/14Lhxig4a7cVA01xtcvEaOBxm3h14MogutDTaxW0T9lM+4ztScmbMxALFuarPeTJy2kWnJVhFUXUFLyKDB3H953xSXeR1RVTEpQpL+1ORxVhy4yfCtoGBquRJrpHh90rRGwRftziSXzLrnLrAsZonGIvM4FgLov5cTlTUE0yJyeBFE17CuKcUSy1hmYWvhugvrTiBnwwMDEL7OKJqqYQZOXFVKpFFJ6RjS1GM1TmBVjBA3T0jHAaKxba4J2mXDCFuYdZStrn8yzfZkVAB4WHuIu1qND0RgWWVFaJbprONulpQCNfG316cRQt0xNJLckTr6tyo58bxTfAzN0Q2wlHU4uu8g+bPVpvmkGkgguXRAIfUsA47W1y6SjKb2snh3engLtrsO0Zbc8SAE0FDYB1q7rpPAKTs24HkLxgkU6GiRFiGq1D34h3HOzhaIzE0fP9bNsAq309H+fU8Dp5xKDqLKFDstxtPMSuJ1l9tIEjw9rEQ5HzMpB9b2UryNX/14BV8NwYrzLnoOLBpGvnJ9M0qBW6CMq9IDQ1sprojh8iyqUgTGUw2VPBhbyBy0vytEdo1Sp3x+yiC6EyXeNzzD8YLUzqBgTlUlMLH3G7ASjccGsruoUli46pR1nEFtOEGUN+zhdiHLjLNMIFEfgXG/WbzRDdukuuR8vRWW88N6fMQ4VShux0PEkQq/kmPs0AeRCmgjhN+cG+KxK8RJ7fzw77pbW2Cy7Lm2UoCTlyNpcbu+M9sliml9ZqbUpHXUNKj7vptfuAk9yFCyy2PFUuS9TZ7sgqz5az4FLVExnIExDjFZUafpozdojQ80PDDPO2rxkInDxKdMCcAJffaYeSnVpbY+4EvErI46poJhPJJpxIIwSZWuCTa970zPnQz9cxbE3Dd9Z6IrVZ0voqYHjH2Xq0pSaWgq+13Nq7g1QS3uzPdp5njHqAxQFanGbZZbmJnYXeTmvbnghHY7lNDqvEkU7W7LKbB814OJqbIDDhxHnBCT3VGFuAgCMLcElQiePKGSnSau8qm3hz5d7DQSLvDBrsF9I75WltROMSmj89w6GXkfkk4D2aoPsXRCUMOO1ucTitYTbSrYcTHyEVq0fHJqvZ0gFOKFwIhoHUw5PJjXQqnK0AazP8VftrvreGqrwP1DVsPZQNewqbZDIrcZutvRRjr54fmHX/OAbMwi48mUoFuILLMD3k6aDPuXZdTyFPKl0LSSks0ph3mqRW7BVXeHVEmocNimNv85iT+nksqIwfHnLCicestDfP7LGAzm0kHbx6hXUZPOkakEmX7vzoaqm4I18DDEo81daXnDqJ440FdCHjI1M7L6AA4f3WkXVsY5hBerONsi5Ai7rnZk7Lk8Ze7PJtkyXbyil4GhEEVFytmD1RzU7CODqIq+KaAQBBllJxPDdqQvUgCVicq2neVEgVt2nkuFNrUiYCiGBLgQRh8VhUrvqAFQeFK6jTOZwZ2zabERk0rJsY9oi6G2CtMMyPeWNn0rtsI06Zq9+QawFKtZg3y9OGplqqHhUlMMq+JvGRMulrRwHMkWxa+BbCHX/LVskc0yOXCUn9cM76emB1PvWL342CTqxWrSZVmLthTpGWLYxFcp2XM3cBzbHIS8AbBmkcxmQzNpn8KIsymO9AKPTBOtJ7G9X7XC3g3Se3P0C098udZOLLjNLmArAdJVczzSIvAB/JU9hAsWx1hRiTzvSaU6+XNWwOEnx6HmOIBcwVu3Ndw2xYsXJ3YKILQc5Ajqya/lJEzKvu6jWUiT8OLrmPxQfc2Xnjjp5aar/KFZ/rvHNJ4r6AmIEpDLzwcByvwwDPnfr7w40D77Tp6hJtgC1hAIPhFMNeCCdDylzi2LcGYKFmZfXV7/PuQukndLIuU0VR2SqvCZjzb6VjLa4YDJGsc9ghhOzKJCQti9t8HCcqnI+cNl4cMgkWYXaTysjkSQyfpzkJU8AnunhWY8e4mqTkSumAFdldrcgKdvGCOFLgHSOjfFhP17LDBD6CVk8cQCj9PTGxtBJFcCRAzjE5vZ5VVCRYHebGoYKA8JMt6iq5LKgNFeYzJ3JLRhrh2DEIBMhLVtsMWhDKvGYopsIU8lgApMaBkVG5z2CvjjbBZCBq2GxJtNT4YnuqtYSVDG7IaIrjM+xo4nTucHAWkNNYvrhDL1GighCymh1HQ3dgtoMNaUVAdPN0h2Xa6tpZIU6G45nlWovVA8KmSQv4EawNTHjgO8SmPY8FOVonaCGlt0q4str0MppyQl8bPkqER+vw/X/jmOGIswBQBUboGubU9zPCZ+ISffCYwrbnLPxIxA5lRp20XbrPva1CEBC7S2FKhxFxPI9gAMNfJStz8CJHI2DdsjQugYEerUYuwzrYzIDv3By7Ys4M8+laOQLjjwJds/14fZpvHX5+JK5DCTxE5dufFBdEULx9dVfHXh63P72vE93++PTLY8/P2VfsF5pw2angMXLRimdviNNnG40U8/PMdd3fcKH/Dyw/kDqS6rkdl3ALhnXi8eIsQmemNWm1EM1JEqIv6LDfxc5Luxn26zfh4m/4KGokO4nsEi6iilx/JdjrIe31O0oN32sfBIHXERBk2ObZvutdT49TrwgzbNaLYZP7Nnq/ZGN/TliXakZ2GN4uof+MIb41HJN5pNjyqjVZzPbJp4thV5fTCyze9Lehh5V9HbvyLXvYWpRteN0KbhJcDu+VERhfQMHHosyTsz1KwiQHW+IkBnvygAWG7zbpoe/G4KsJ5GmD7TwUmW/qIXf9IfItC56Grz2/tLepbsJz1jnAIQ6dcGzZ8BYQOHwSl2M98kMow7GfAzqBjBLLfqLayJtKrmojMeRebdgDtRHIs3SG3+nsTl2Fp6fwox/prv2l3t6Lv0zSz7YudMMO10nhl34CfzVAnJREYIcQ/sDr5tntVXJT8m8Y7vR/YJf+ZFyR2mZ51Zd+++L4DVQif70ewSvLFMiFG3ICphVjr76ZxI4PlJ2/mgl8PMzS+7Xl4XaYVtfzPKmLFz22XqrYB9db6OGLbsLrLF7gTi+FDc0iBpvixlGS2y8oxrymsfsOl+CSRQr+ew+oRWX61gCpuEjCZ6KKQulX8gtc4cw9rpgHsGKeBivi27D6fTaiX5mvnwHtWbJDEQp5J7zvnpRvGz9efOSd+KzEdOHjZ79M6V9X3POsKfWTwJ5m/o2op++kV3hJ+uKECQDuL+T/dd09D/jMTwJ8iviTDOyJsGfvZKebph3auV7a1stvGBVCoMO+XMqFnwCdqCBTebHsAgL218z4q8p92ry4Dfyvnxcshv450T1vXhDonewK26xyv2x/Qf4v6+15kL/PAtyp66eLKPFvRpR12iVJBEzyxQ6T1M4/jyL7sH+MIti/OIhkyXe2k2TugERh9zi6bft4HP0XMxPfxpEbtqmXvDi5DZ+wo2ATNECTk+SRHpv2ixkm5vkFnA8wg38vmCiK+hJMFHoPJuYBmJ6VmiS+Ix/x04EJ/SaY/KR4QdGXHlPwO1xhd9zHVF/FDtL//YPYYW756TdD9Jbd+hw95AP0kM9Cz3065g493wTDN+F0x0S+SWE+V9cNvVHjAmrivepdlduvhZn7afkJHASPTRMfDi1ewBmK67V8S813cHyW3lGEfad46p7KEA/UTlKv2NM0/7MkdKh/XVhL3GdzXLss/dh9KUod3Jf1i8X/Ve09j8R/Rz7n/4u/zAHjssuveMwbJP8pZ/m+8vPIaP69vvI+m3XLYEEhfQEhKquS2w8vhd/ZvbYQIKjm9x9vWa/SNr0YSN1t+zEASorbwOBCh7GHPZ9gV/JBck8yK/Cn7fVykWdBBQD7SyuD4Q+sDP7AylDPsjLko+zYR2AltPU8Bp4G7JDbRVLlpv0LLD8Ilnd5JfIfB8t9OvBjwFJUaZrkcADgE1KgSkiOf4HlL1gWnH3EX9i/Eyz/xSQk8k3+EunpS5iYel8dAtTDt16GG8bGsGFsLOvpp5dPi+sen7Zwj089Pj4h+GcM50sQfxFm9r/sr3rF7gPStzv9ZmB5lW1oO79PrSe0XKHYe4pEYA8SCuwDksQ+iySR35HbtGOLg11Kv5uMzwD6LeHeKZDt/95ZBPTHhG5brv2HIv9MnOQD13DbltshwN/l87Eey/h6hjVMW/yBuXnrqruNMXj862G/q+puJBT71kggUAVB691IvdLfbvwv4OCnaZN6axW7mfgHccbfnGAg77NytW0Aq/yr4fQD9Pa01AL5HSm1R4lK4K6t3hpCUVp64b3J9UfU+rkvC9/58Tsdf9XRP+YHSVWGfgzOHsefufe7rO67RG3iOL5pA38em3ZaFq83DP9zSQUUR4kvQEMg5AOPST4gdLfo/+NRc59N/Gc8Jg6XW8nbw+dfjlDXrzR5+y40V+UP39rrtx/T1+B6/kgk7L/KLRPvLA353vB/r1d+PxD13hQ92yl/R+7zFzn7XnKGv9fenyZndyM9GwfPymSC680TqzLLBEry70o0PJnBfJlpGL4/gviznBbxFaPxefGQ+APcf7jHuhGtD4ePbl1gK4j1CzofBR0G/SZ2aPQB2/kI7LCfKD9TBFM8KpXBT2ktOK9fvqNr9D2NNNIofi3s/ALI5KdSL85fqvwhN/0ymYM/SkY9S+YY/j4tQyD3PULko3L/ByRlHgr9v5gy/HbJ00kSq38Ozsr9+PxyScKXXp1jEoXyRO+ygje4EE9EB06/qz48iFrJB/1j5Af0j6mEsl8w5l5jbQsx5E9z6sQ8nJA/aMvJR7b8rbn/tmG5UsX+gDfbbvy+LyI210oFuEEMWS+l37BRPza4PjAtENiCOmwqcz0urorBEOMNYODL/5h5kv7vsFthh3b/1CP4nMRh+/pzOZW/iUwwyIOkCUo9KWsyt7GTg5kyO8ePEf8ywptC/BGPcDNOH2F7vtKp9tD+fGGiHlsky74Aa1RfLVG/T3G1RS8zwAmuhuYr6vpeTX+9zeZLq4Kh+AO1PvDzbxs/XK2Papr/LbXqsVuFev5kzaLsl2QCQ+671x+Wq982fjj//3r0COxyfDOsveiI4DOz+/mv/1Kz+08XnN+1y2GPzPPfmtNGv84P7pUN21J+afsHQgXkH1T3Q6v9JyKFn8tqv8ZAv0822cQ7jo+yD5aweZSxQZ+m1z/xxNHPpdeX19gun6xX/B15xlDs+6br8yrF6KMJ+1XzDB/Z+2WffyBYf6/wf9wdY48m8lf1DUX1S+E/lC/9tznkj26j+TLThT7RWlLYn5Tlh1jLh8L8jv6Mn9wLNnqk53789GzD+xQ3hjxIOOAPctxvGz9cuY+eZPyaZTz8l83iv2gqP80sot/x3CqckOlXRWIO9SLduO3+Q07lD/L7X4qKQIg7UVH0K42hDMLSLMOyFI3fC46kXkmKYCkCQQgUxemnZem+IzC4M1Qf28P20b1qdxPve58rjgpTt1/7o9LcL+zX2s/t0C6KT1es9Kd/X8MlTMO0zX7Klvr1Cl8w5Hut7x+g++sYY19pCqVwjCJxBNji++oiRr+yFILSNEqQLE6SNxB+sV4q/koT4BcUZ3Dk93UMPhxi39He9qDY+FWl36n3Tzvg+4b/P3g44Kvwfo8iPynovjRZvJqVYT8FB1fbAmBAYiQDL5FGWOzBsiYEMCMkgiEEwgx4eWCiqVcGxXAGWBoQ7hDk7bnwH8BB/yQpLP69/SZBUcjAQ8E9/g8= \ No newline at end of file diff --git a/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org.png b/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org.png new file mode 100644 index 00000000..23bca7be Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/further-info/devonfw-org.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/further-info/teams.png b/docs/devonfw.github.io/1.0/_images/images/further-info/teams.png new file mode 100644 index 00000000..b486cac5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/further-info/teams.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/further-info/yammer.png b/docs/devonfw.github.io/1.0/_images/images/further-info/yammer.png new file mode 100644 index 00000000..8fb51547 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/further-info/yammer.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/generate-component.png b/docs/devonfw.github.io/1.0/_images/images/generate-component.png new file mode 100644 index 00000000..778a70c7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/generate-component.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/generate-interactive.jpg b/docs/devonfw.github.io/1.0/_images/images/generate-interactive.jpg new file mode 100644 index 00000000..54e97b52 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/generate-interactive.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/generate-module.png b/docs/devonfw.github.io/1.0/_images/images/generate-module.png new file mode 100644 index 00000000..a0c9ea0a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/generate-module.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/getting_started.png b/docs/devonfw.github.io/1.0/_images/images/getting_started.png new file mode 100644 index 00000000..8f3340bf Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/getting_started.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/github.png b/docs/devonfw.github.io/1.0/_images/images/github.png new file mode 100644 index 00000000..602e9527 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/github.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/global_arch.png b/docs/devonfw.github.io/1.0/_images/images/global_arch.png new file mode 100644 index 00000000..97d79d95 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/global_arch.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/guide/jtq-screens.png b/docs/devonfw.github.io/1.0/_images/images/guide/jtq-screens.png new file mode 100644 index 00000000..d4c93cdb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/guide/jtq-screens.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/guide/run-mythaistar.png b/docs/devonfw.github.io/1.0/_images/images/guide/run-mythaistar.png new file mode 100644 index 00000000..1a139198 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/guide/run-mythaistar.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/helmet_white.png b/docs/devonfw.github.io/1.0/_images/images/helmet_white.png new file mode 100644 index 00000000..d2007a19 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/helmet_white.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/help_icon.png b/docs/devonfw.github.io/1.0/_images/images/help_icon.png new file mode 100644 index 00000000..a8c7a71e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/help_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/high-speed.png b/docs/devonfw.github.io/1.0/_images/images/high-speed.png new file mode 100644 index 00000000..2799a180 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/high-speed.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/home_page/dolwnload_latest_version.png b/docs/devonfw.github.io/1.0/_images/images/home_page/dolwnload_latest_version.png new file mode 100644 index 00000000..f9020857 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/home_page/dolwnload_latest_version.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/home_page/installation_options.png b/docs/devonfw.github.io/1.0/_images/images/home_page/installation_options.png new file mode 100644 index 00000000..5a883532 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/home_page/installation_options.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/home_page/installation_setup.png b/docs/devonfw.github.io/1.0/_images/images/home_page/installation_setup.png new file mode 100644 index 00000000..41991c42 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/home_page/installation_setup.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/home_page/installing_devonfw.png b/docs/devonfw.github.io/1.0/_images/images/home_page/installing_devonfw.png new file mode 100644 index 00000000..ca80740f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/home_page/installing_devonfw.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/home_page/installing_devonfw_dowload_completes.png b/docs/devonfw.github.io/1.0/_images/images/home_page/installing_devonfw_dowload_completes.png new file mode 100644 index 00000000..eace4193 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/home_page/installing_devonfw_dowload_completes.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/home_page/installing_devonfw_download_location_set.png b/docs/devonfw.github.io/1.0/_images/images/home_page/installing_devonfw_download_location_set.png new file mode 100644 index 00000000..ce329eda Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/home_page/installing_devonfw_download_location_set.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/home_page/quick_help.png b/docs/devonfw.github.io/1.0/_images/images/home_page/quick_help.png new file mode 100644 index 00000000..861c3a37 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/home_page/quick_help.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/home_page/sidebar.png b/docs/devonfw.github.io/1.0/_images/images/home_page/sidebar.png new file mode 100644 index 00000000..f9b8a23f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/home_page/sidebar.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/home_page/toolbar.png b/docs/devonfw.github.io/1.0/_images/images/home_page/toolbar.png new file mode 100644 index 00000000..1ee60cef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/home_page/toolbar.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/home_page/toolbar_workspace.png b/docs/devonfw.github.io/1.0/_images/images/home_page/toolbar_workspace.png new file mode 100644 index 00000000..328d76bc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/home_page/toolbar_workspace.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ht_image1.png b/docs/devonfw.github.io/1.0/_images/images/ht_image1.png new file mode 100644 index 00000000..d2e3333c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ht_image1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ht_image2.png b/docs/devonfw.github.io/1.0/_images/images/ht_image2.png new file mode 100644 index 00000000..71cbf184 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ht_image2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ht_image3.png b/docs/devonfw.github.io/1.0/_images/images/ht_image3.png new file mode 100644 index 00000000..c9dd7b6e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ht_image3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ico_flexibility.svg b/docs/devonfw.github.io/1.0/_images/images/ico_flexibility.svg new file mode 100644 index 00000000..a4c54c4d --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/ico_flexibility.svg @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/ico_highSpeed.svg b/docs/devonfw.github.io/1.0/_images/images/ico_highSpeed.svg new file mode 100644 index 00000000..c944092d --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/ico_highSpeed.svg @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/ico_innovation.svg b/docs/devonfw.github.io/1.0/_images/images/ico_innovation.svg new file mode 100644 index 00000000..9f2dfda7 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/ico_innovation.svg @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/ico_quality.svg b/docs/devonfw.github.io/1.0/_images/images/ico_quality.svg new file mode 100644 index 00000000..5e2a8375 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/ico_quality.svg @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/ide.png b/docs/devonfw.github.io/1.0/_images/images/ide.png new file mode 100644 index 00000000..ec3fed60 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ide.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ides_page/2-ides.png b/docs/devonfw.github.io/1.0/_images/images/ides_page/2-ides.png new file mode 100644 index 00000000..0e19c52c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ides_page/2-ides.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ides_page/3-ides.png b/docs/devonfw.github.io/1.0/_images/images/ides_page/3-ides.png new file mode 100644 index 00000000..934f77c6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ides_page/3-ides.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ides_page/devonfw-instance-dropdown.png b/docs/devonfw.github.io/1.0/_images/images/ides_page/devonfw-instance-dropdown.png new file mode 100644 index 00000000..bec45d42 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ides_page/devonfw-instance-dropdown.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ides_page/ides.png b/docs/devonfw.github.io/1.0/_images/images/ides_page/ides.png new file mode 100644 index 00000000..0fbf05b2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ides_page/ides.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/iis_1.png b/docs/devonfw.github.io/1.0/_images/images/iis_1.png new file mode 100644 index 00000000..be03aaf7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/iis_1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/iis_2.png b/docs/devonfw.github.io/1.0/_images/images/iis_2.png new file mode 100644 index 00000000..3aadb285 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/iis_2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/iis_3.png b/docs/devonfw.github.io/1.0/_images/images/iis_3.png new file mode 100644 index 00000000..acbc3833 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/iis_3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image0.png b/docs/devonfw.github.io/1.0/_images/images/image0.png new file mode 100644 index 00000000..120800f9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image0.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image001.png b/docs/devonfw.github.io/1.0/_images/images/image001.png new file mode 100644 index 00000000..c7089828 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image001.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image00100.jpg b/docs/devonfw.github.io/1.0/_images/images/image00100.jpg new file mode 100644 index 00000000..d6e95f0f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image00100.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image00101.jpg b/docs/devonfw.github.io/1.0/_images/images/image00101.jpg new file mode 100644 index 00000000..1d9ddc71 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image00101.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image002.png b/docs/devonfw.github.io/1.0/_images/images/image002.png new file mode 100644 index 00000000..ea4eb988 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image002.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image01.png b/docs/devonfw.github.io/1.0/_images/images/image01.png new file mode 100644 index 00000000..d5b5e661 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image01.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image02.png b/docs/devonfw.github.io/1.0/_images/images/image02.png new file mode 100644 index 00000000..f1c6989e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image02.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image051.png b/docs/devonfw.github.io/1.0/_images/images/image051.png new file mode 100644 index 00000000..8a5504ae Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image051.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image080.png b/docs/devonfw.github.io/1.0/_images/images/image080.png new file mode 100644 index 00000000..fb531d8e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image080.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image081.png b/docs/devonfw.github.io/1.0/_images/images/image081.png new file mode 100644 index 00000000..8595ec4d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image081.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image082.png b/docs/devonfw.github.io/1.0/_images/images/image082.png new file mode 100644 index 00000000..3bb19951 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image082.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image083.png b/docs/devonfw.github.io/1.0/_images/images/image083.png new file mode 100644 index 00000000..0211d8d7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image083.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image084.png b/docs/devonfw.github.io/1.0/_images/images/image084.png new file mode 100644 index 00000000..7ea06582 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image084.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image085.png b/docs/devonfw.github.io/1.0/_images/images/image085.png new file mode 100644 index 00000000..13afb174 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image085.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image086.png b/docs/devonfw.github.io/1.0/_images/images/image086.png new file mode 100644 index 00000000..ea054e42 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image086.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image086_new.png b/docs/devonfw.github.io/1.0/_images/images/image086_new.png new file mode 100644 index 00000000..4d4c0d8b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image086_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image1.png b/docs/devonfw.github.io/1.0/_images/images/image1.png new file mode 100644 index 00000000..9111fe87 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image10.png b/docs/devonfw.github.io/1.0/_images/images/image10.png new file mode 100644 index 00000000..44dcb88e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image10.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image11.png b/docs/devonfw.github.io/1.0/_images/images/image11.png new file mode 100644 index 00000000..5d9d9ec2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image11.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image12.png b/docs/devonfw.github.io/1.0/_images/images/image12.png new file mode 100644 index 00000000..b3c0df64 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image12.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image13.png b/docs/devonfw.github.io/1.0/_images/images/image13.png new file mode 100644 index 00000000..341497d2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image13.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image14.png b/docs/devonfw.github.io/1.0/_images/images/image14.png new file mode 100644 index 00000000..2c69f519 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image14.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image15.png b/docs/devonfw.github.io/1.0/_images/images/image15.png new file mode 100644 index 00000000..8e345e26 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image15.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image16.png b/docs/devonfw.github.io/1.0/_images/images/image16.png new file mode 100644 index 00000000..b80fb279 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image16.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image17.png b/docs/devonfw.github.io/1.0/_images/images/image17.png new file mode 100644 index 00000000..b80fb279 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image17.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image18.png b/docs/devonfw.github.io/1.0/_images/images/image18.png new file mode 100644 index 00000000..f206e104 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image18.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image19.png b/docs/devonfw.github.io/1.0/_images/images/image19.png new file mode 100644 index 00000000..f7c4ec52 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image19.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image1_new.png b/docs/devonfw.github.io/1.0/_images/images/image1_new.png new file mode 100644 index 00000000..f7a5a277 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image1_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image2.png b/docs/devonfw.github.io/1.0/_images/images/image2.png new file mode 100644 index 00000000..9662ca13 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image20.png b/docs/devonfw.github.io/1.0/_images/images/image20.png new file mode 100644 index 00000000..4dbda49c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image20.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image21.png b/docs/devonfw.github.io/1.0/_images/images/image21.png new file mode 100644 index 00000000..6d74a456 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image21.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image22.png b/docs/devonfw.github.io/1.0/_images/images/image22.png new file mode 100644 index 00000000..30eb89b7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image22.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image23.png b/docs/devonfw.github.io/1.0/_images/images/image23.png new file mode 100644 index 00000000..1ad040a8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image23.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image23_new.png b/docs/devonfw.github.io/1.0/_images/images/image23_new.png new file mode 100644 index 00000000..a060ba38 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image23_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image24.png b/docs/devonfw.github.io/1.0/_images/images/image24.png new file mode 100644 index 00000000..15fc8b1b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image24.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image24_new.png b/docs/devonfw.github.io/1.0/_images/images/image24_new.png new file mode 100644 index 00000000..95a26c8e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image24_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image25.png b/docs/devonfw.github.io/1.0/_images/images/image25.png new file mode 100644 index 00000000..ed11a270 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image25.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image25_new.png b/docs/devonfw.github.io/1.0/_images/images/image25_new.png new file mode 100644 index 00000000..599026e5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image25_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image26.png b/docs/devonfw.github.io/1.0/_images/images/image26.png new file mode 100644 index 00000000..f443d230 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image26.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image26_new.png b/docs/devonfw.github.io/1.0/_images/images/image26_new.png new file mode 100644 index 00000000..ab539721 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image26_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image27.png b/docs/devonfw.github.io/1.0/_images/images/image27.png new file mode 100644 index 00000000..a3af5f72 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image27.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image27_new.png b/docs/devonfw.github.io/1.0/_images/images/image27_new.png new file mode 100644 index 00000000..ec386d01 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image27_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image28.png b/docs/devonfw.github.io/1.0/_images/images/image28.png new file mode 100644 index 00000000..9f566a26 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image28.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image29.png b/docs/devonfw.github.io/1.0/_images/images/image29.png new file mode 100644 index 00000000..eadd217b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image29.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image3.png b/docs/devonfw.github.io/1.0/_images/images/image3.png new file mode 100644 index 00000000..d28d33b1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image30.png b/docs/devonfw.github.io/1.0/_images/images/image30.png new file mode 100644 index 00000000..8f1780df Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image30.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image30_new.png b/docs/devonfw.github.io/1.0/_images/images/image30_new.png new file mode 100644 index 00000000..a958512f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image30_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image31.png b/docs/devonfw.github.io/1.0/_images/images/image31.png new file mode 100644 index 00000000..1170960f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image31.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image32.png b/docs/devonfw.github.io/1.0/_images/images/image32.png new file mode 100644 index 00000000..2dd1e936 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image32.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image32_new.png b/docs/devonfw.github.io/1.0/_images/images/image32_new.png new file mode 100644 index 00000000..580f61f1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image32_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image33.png b/docs/devonfw.github.io/1.0/_images/images/image33.png new file mode 100644 index 00000000..0a1ac82f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image33.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image33_new.png b/docs/devonfw.github.io/1.0/_images/images/image33_new.png new file mode 100644 index 00000000..d84bd25b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image33_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image34.png b/docs/devonfw.github.io/1.0/_images/images/image34.png new file mode 100644 index 00000000..205dc6fa Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image34.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image35.png b/docs/devonfw.github.io/1.0/_images/images/image35.png new file mode 100644 index 00000000..cf5b537e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image35.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image35_new.png b/docs/devonfw.github.io/1.0/_images/images/image35_new.png new file mode 100644 index 00000000..cf5029b0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image35_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image36.png b/docs/devonfw.github.io/1.0/_images/images/image36.png new file mode 100644 index 00000000..e033bc44 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image36.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image37.png b/docs/devonfw.github.io/1.0/_images/images/image37.png new file mode 100644 index 00000000..3c79164b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image37.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image37_new.png b/docs/devonfw.github.io/1.0/_images/images/image37_new.png new file mode 100644 index 00000000..c6600d6c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image37_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image38.png b/docs/devonfw.github.io/1.0/_images/images/image38.png new file mode 100644 index 00000000..e9c2b411 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image38.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image38_new.png b/docs/devonfw.github.io/1.0/_images/images/image38_new.png new file mode 100644 index 00000000..0dea0792 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image38_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image39.png b/docs/devonfw.github.io/1.0/_images/images/image39.png new file mode 100644 index 00000000..2c8375c8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image39.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image39a.png b/docs/devonfw.github.io/1.0/_images/images/image39a.png new file mode 100644 index 00000000..39708505 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image39a.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image4.png b/docs/devonfw.github.io/1.0/_images/images/image4.png new file mode 100644 index 00000000..835605d1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image40.png b/docs/devonfw.github.io/1.0/_images/images/image40.png new file mode 100644 index 00000000..a41952c2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image40.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image41.png b/docs/devonfw.github.io/1.0/_images/images/image41.png new file mode 100644 index 00000000..ad2a2f01 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image41.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image42.png b/docs/devonfw.github.io/1.0/_images/images/image42.png new file mode 100644 index 00000000..00e52f61 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image42.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image43.png b/docs/devonfw.github.io/1.0/_images/images/image43.png new file mode 100644 index 00000000..09c5ae11 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image43.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image44.png b/docs/devonfw.github.io/1.0/_images/images/image44.png new file mode 100644 index 00000000..1e9f9f8e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image44.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image45.png b/docs/devonfw.github.io/1.0/_images/images/image45.png new file mode 100644 index 00000000..cb0deb26 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image45.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image46.png b/docs/devonfw.github.io/1.0/_images/images/image46.png new file mode 100644 index 00000000..7b4931ad Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image46.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image47.png b/docs/devonfw.github.io/1.0/_images/images/image47.png new file mode 100644 index 00000000..3ba3b220 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image47.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image48.png b/docs/devonfw.github.io/1.0/_images/images/image48.png new file mode 100644 index 00000000..d81cf9b5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image48.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image49.png b/docs/devonfw.github.io/1.0/_images/images/image49.png new file mode 100644 index 00000000..c0a27f2a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image49.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image5.png b/docs/devonfw.github.io/1.0/_images/images/image5.png new file mode 100644 index 00000000..eb70eafb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image50.png b/docs/devonfw.github.io/1.0/_images/images/image50.png new file mode 100644 index 00000000..4ca51d44 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image50.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image51.png b/docs/devonfw.github.io/1.0/_images/images/image51.png new file mode 100644 index 00000000..7d6048dd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image51.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image52.png b/docs/devonfw.github.io/1.0/_images/images/image52.png new file mode 100644 index 00000000..3c4e4f86 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image52.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image53.png b/docs/devonfw.github.io/1.0/_images/images/image53.png new file mode 100644 index 00000000..92a8a519 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image53.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image54.png b/docs/devonfw.github.io/1.0/_images/images/image54.png new file mode 100644 index 00000000..1ac37e36 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image54.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image55.png b/docs/devonfw.github.io/1.0/_images/images/image55.png new file mode 100644 index 00000000..f64a5235 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image55.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image56.png b/docs/devonfw.github.io/1.0/_images/images/image56.png new file mode 100644 index 00000000..f526dc8c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image56.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image57.png b/docs/devonfw.github.io/1.0/_images/images/image57.png new file mode 100644 index 00000000..66620deb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image57.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image58.png b/docs/devonfw.github.io/1.0/_images/images/image58.png new file mode 100644 index 00000000..8c74b804 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image58.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image59.png b/docs/devonfw.github.io/1.0/_images/images/image59.png new file mode 100644 index 00000000..37402f77 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image59.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image6.png b/docs/devonfw.github.io/1.0/_images/images/image6.png new file mode 100644 index 00000000..23ac54b5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image6.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image60.png b/docs/devonfw.github.io/1.0/_images/images/image60.png new file mode 100644 index 00000000..d05358b7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image60.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image61.png b/docs/devonfw.github.io/1.0/_images/images/image61.png new file mode 100644 index 00000000..aabfa444 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image61.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image62.png b/docs/devonfw.github.io/1.0/_images/images/image62.png new file mode 100644 index 00000000..83f53936 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image62.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image63.png b/docs/devonfw.github.io/1.0/_images/images/image63.png new file mode 100644 index 00000000..8650c5e5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image63.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image64.png b/docs/devonfw.github.io/1.0/_images/images/image64.png new file mode 100644 index 00000000..1ace42ba Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image64.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image65.png b/docs/devonfw.github.io/1.0/_images/images/image65.png new file mode 100644 index 00000000..580445a5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image65.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image66.png b/docs/devonfw.github.io/1.0/_images/images/image66.png new file mode 100644 index 00000000..119e1085 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image66.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image67.png b/docs/devonfw.github.io/1.0/_images/images/image67.png new file mode 100644 index 00000000..90b504a6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image67.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image68.png b/docs/devonfw.github.io/1.0/_images/images/image68.png new file mode 100644 index 00000000..5054824e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image68.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image69.png b/docs/devonfw.github.io/1.0/_images/images/image69.png new file mode 100644 index 00000000..cf991cb0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image69.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image7.png b/docs/devonfw.github.io/1.0/_images/images/image7.png new file mode 100644 index 00000000..f02a7489 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image7.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image70.png b/docs/devonfw.github.io/1.0/_images/images/image70.png new file mode 100644 index 00000000..2dbbff06 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image70.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image71.png b/docs/devonfw.github.io/1.0/_images/images/image71.png new file mode 100644 index 00000000..f78b7980 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image71.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image72.png b/docs/devonfw.github.io/1.0/_images/images/image72.png new file mode 100644 index 00000000..52104845 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image72.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image73.png b/docs/devonfw.github.io/1.0/_images/images/image73.png new file mode 100644 index 00000000..ddfc5780 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image73.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image74.png b/docs/devonfw.github.io/1.0/_images/images/image74.png new file mode 100644 index 00000000..9a6732c6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image74.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image75.png b/docs/devonfw.github.io/1.0/_images/images/image75.png new file mode 100644 index 00000000..a67016f7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image75.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image76.png b/docs/devonfw.github.io/1.0/_images/images/image76.png new file mode 100644 index 00000000..1a57181f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image76.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image77.png b/docs/devonfw.github.io/1.0/_images/images/image77.png new file mode 100644 index 00000000..9f317c22 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image77.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image78.png b/docs/devonfw.github.io/1.0/_images/images/image78.png new file mode 100644 index 00000000..d9da5acd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image78.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image79.png b/docs/devonfw.github.io/1.0/_images/images/image79.png new file mode 100644 index 00000000..efc1744c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image79.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image8.png b/docs/devonfw.github.io/1.0/_images/images/image8.png new file mode 100644 index 00000000..cc532bab Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image8.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image80.png b/docs/devonfw.github.io/1.0/_images/images/image80.png new file mode 100644 index 00000000..a5218b33 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image80.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image81.png b/docs/devonfw.github.io/1.0/_images/images/image81.png new file mode 100644 index 00000000..29f7fe94 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image81.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image82.png b/docs/devonfw.github.io/1.0/_images/images/image82.png new file mode 100644 index 00000000..23b8aff9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image82.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image83.png b/docs/devonfw.github.io/1.0/_images/images/image83.png new file mode 100644 index 00000000..77fefd2c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image83.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image84.png b/docs/devonfw.github.io/1.0/_images/images/image84.png new file mode 100644 index 00000000..b1e6ac5e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image84.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image85.png b/docs/devonfw.github.io/1.0/_images/images/image85.png new file mode 100644 index 00000000..fcde60c8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image85.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image86.png b/docs/devonfw.github.io/1.0/_images/images/image86.png new file mode 100644 index 00000000..e9e7f09a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image86.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image87.png b/docs/devonfw.github.io/1.0/_images/images/image87.png new file mode 100644 index 00000000..57788f1c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image87.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image88.png b/docs/devonfw.github.io/1.0/_images/images/image88.png new file mode 100644 index 00000000..e2737d35 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image88.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image89.png b/docs/devonfw.github.io/1.0/_images/images/image89.png new file mode 100644 index 00000000..8abe43fb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image89.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image9.png b/docs/devonfw.github.io/1.0/_images/images/image9.png new file mode 100644 index 00000000..4ea5eeed Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image9.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image90.png b/docs/devonfw.github.io/1.0/_images/images/image90.png new file mode 100644 index 00000000..1573c9d9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image90.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/image91.png b/docs/devonfw.github.io/1.0/_images/images/image91.png new file mode 100644 index 00000000..7fc6ebd1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/image91.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/img.png b/docs/devonfw.github.io/1.0/_images/images/img.png new file mode 100644 index 00000000..4b0bfaa8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/img.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/img_1.png b/docs/devonfw.github.io/1.0/_images/images/img_1.png new file mode 100644 index 00000000..583bc83d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/img_1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account.png b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account.png new file mode 100644 index 00000000..f0807f48 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account2.png b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account2.png new file mode 100644 index 00000000..3c75a3ac Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account3.png b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account3.png new file mode 100644 index 00000000..60f6f1c7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account4.png b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account4.png new file mode 100644 index 00000000..2849e99f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account5.png b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account5.png new file mode 100644 index 00000000..5f253399 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/create-account5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/initialize-instance/initialize-instance.png b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/initialize-instance.png new file mode 100644 index 00000000..48d85ba8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/initialize-instance.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/initialize-instance/initialize-instance2.png b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/initialize-instance2.png new file mode 100644 index 00000000..afd44043 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/initialize-instance2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/initialize-instance/maven-config.png b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/maven-config.png new file mode 100644 index 00000000..73107aad Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/initialize-instance/maven-config.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/innovation.png b/docs/devonfw.github.io/1.0/_images/images/innovation.png new file mode 100644 index 00000000..a2fc9a6a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/innovation.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/insert-data.PNG b/docs/devonfw.github.io/1.0/_images/images/insert-data.PNG new file mode 100644 index 00000000..0b0dfd2a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/insert-data.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install-cli-success.png b/docs/devonfw.github.io/1.0/_images/images/install-cli-success.png new file mode 100644 index 00000000..b14462cd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install-cli-success.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin.png b/docs/devonfw.github.io/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin.png new file mode 100644 index 00000000..927317ab Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin2.png b/docs/devonfw.github.io/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin2.png new file mode 100644 index 00000000..5744ab7b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install-sonar-plugin/sonar-plugin.png b/docs/devonfw.github.io/1.0/_images/images/install-sonar-plugin/sonar-plugin.png new file mode 100644 index 00000000..cc096944 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install-sonar-plugin/sonar-plugin.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install_win01.png b/docs/devonfw.github.io/1.0/_images/images/install_win01.png new file mode 100644 index 00000000..9b0420cd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install_win01.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install_win02.png b/docs/devonfw.github.io/1.0/_images/images/install_win02.png new file mode 100644 index 00000000..9c75140d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install_win02.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install_win03.png b/docs/devonfw.github.io/1.0/_images/images/install_win03.png new file mode 100644 index 00000000..331c135d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install_win03.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install_win04.png b/docs/devonfw.github.io/1.0/_images/images/install_win04.png new file mode 100644 index 00000000..1dc09128 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install_win04.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install_win05.png b/docs/devonfw.github.io/1.0/_images/images/install_win05.png new file mode 100644 index 00000000..4702ce5b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install_win05.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install_win06.png b/docs/devonfw.github.io/1.0/_images/images/install_win06.png new file mode 100644 index 00000000..f07d9c51 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install_win06.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install_win07.png b/docs/devonfw.github.io/1.0/_images/images/install_win07.png new file mode 100644 index 00000000..f01fcbc0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install_win07.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/install_win08.png b/docs/devonfw.github.io/1.0/_images/images/install_win08.png new file mode 100644 index 00000000..2abc8919 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/install_win08.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integrating-mybatis/mybatis_devonfw_classdiagram-1.jpg b/docs/devonfw.github.io/1.0/_images/images/integrating-mybatis/mybatis_devonfw_classdiagram-1.jpg new file mode 100644 index 00000000..d2ae4c79 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integrating-mybatis/mybatis_devonfw_classdiagram-1.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integrating-mybatis/mybatis_devonfw_classdiagram.jpg b/docs/devonfw.github.io/1.0/_images/images/integrating-mybatis/mybatis_devonfw_classdiagram.jpg new file mode 100644 index 00000000..d2ae4c79 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integrating-mybatis/mybatis_devonfw_classdiagram.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/activemq_access.png b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_access.png new file mode 100644 index 00000000..cd22fbef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_access.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/activemq_client.png b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_client.png new file mode 100644 index 00000000..2f8b5503 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_client.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/activemq_rrachannel_subscriber.png b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_rrachannel_subscriber.png new file mode 100644 index 00000000..8c90c935 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_rrachannel_subscriber.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/activemq_rrchannel_message.png b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_rrchannel_message.png new file mode 100644 index 00000000..2d06b838 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_rrchannel_message.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/activemq_rrchannel_subscriber.png b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_rrchannel_subscriber.png new file mode 100644 index 00000000..13604b63 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_rrchannel_subscriber.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_message.png b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_message.png new file mode 100644 index 00000000..3f530f6a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_message.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_message2.png b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_message2.png new file mode 100644 index 00000000..ac15224d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_message2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_read.png b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_read.png new file mode 100644 index 00000000..4040736a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_read.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_send.png b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_send.png new file mode 100644 index 00000000..3b41ffe1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/activemq_simplechannel_send.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/requestreply_channel.png b/docs/devonfw.github.io/1.0/_images/images/integration/requestreply_channel.png new file mode 100644 index 00000000..fb3f83a5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/requestreply_channel.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration/simple_channel.png b/docs/devonfw.github.io/1.0/_images/images/integration/simple_channel.png new file mode 100644 index 00000000..acfb607f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration/simple_channel.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/integration_icon.png b/docs/devonfw.github.io/1.0/_images/images/integration_icon.png new file mode 100644 index 00000000..2faf2830 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/integration_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/Capgemini_Logo_Small.png b/docs/devonfw.github.io/1.0/_images/images/introduction/Capgemini_Logo_Small.png new file mode 100644 index 00000000..92281a7e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/Capgemini_Logo_Small.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/cobigen.png b/docs/devonfw.github.io/1.0/_images/images/introduction/cobigen.png new file mode 100644 index 00000000..0d0ecfc3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/cobigen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs1.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs1.png new file mode 100644 index 00000000..55229a0e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs2.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs2.png new file mode 100644 index 00000000..5133a861 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs3.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs3.png new file mode 100644 index 00000000..299a686b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs4.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs4.png new file mode 100644 index 00000000..0c48e491 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/FindBugs4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/Sonar_add_server.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/Sonar_add_server.png new file mode 100644 index 00000000..23f7b1c5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/Sonar_add_server.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/associate-sonarqube.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/associate-sonarqube.png new file mode 100644 index 00000000..b7afaaf8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/associate-sonarqube.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/change-link-with-project.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/change-link-with-project.png new file mode 100644 index 00000000..c9a4a51f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/change-link-with-project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle.png new file mode 100644 index 00000000..48cf00a3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle2.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle2.png new file mode 100644 index 00000000..cf43b2b4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle3.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle3.png new file mode 100644 index 00000000..2918c1a6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle4.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle4.png new file mode 100644 index 00000000..422cdbfd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle5.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle5.png new file mode 100644 index 00000000..90b4b772 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/checkstyle5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/cobigen.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/cobigen.png new file mode 100644 index 00000000..a8849d0a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/cobigen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/eclipse-settings.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/eclipse-settings.png new file mode 100644 index 00000000..65cd5773 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/eclipse-settings.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/integrated-ide.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/integrated-ide.png new file mode 100644 index 00000000..bb2068dc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/integrated-ide.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/link-with-project.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/link-with-project.png new file mode 100644 index 00000000..fe9e9f88 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/link-with-project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/sonarQube-issues-view.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/sonarQube-issues-view.png new file mode 100644 index 00000000..27050c25 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/sonarQube-issues-view.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/unlink-with-project.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/unlink-with-project.png new file mode 100644 index 00000000..91b17bd7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon-ide/unlink-with-project.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devon_quality_agility.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devon_quality_agility.png new file mode 100644 index 00000000..c10b2e61 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devon_quality_agility.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devonfw-ide.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devonfw-ide.png new file mode 100644 index 00000000..99c0db1a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devonfw-ide.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devonfw-small.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devonfw-small.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devonfw-small.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/devonfwcatalog.png b/docs/devonfw.github.io/1.0/_images/images/introduction/devonfwcatalog.png new file mode 100644 index 00000000..7bc600b0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/devonfwcatalog.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/create_update_ws.png b/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/create_update_ws.png new file mode 100644 index 00000000..92d4bce9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/create_update_ws.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/run_env_sh.png b/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/run_env_sh.png new file mode 100644 index 00000000..cf29ec09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/run_env_sh.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/setup_1.png b/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/setup_1.png new file mode 100644 index 00000000..cb973e08 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/setup_1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/setup_2.png b/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/setup_2.png new file mode 100644 index 00000000..fdace6d2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/download-install/setup_2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/introduction/productivity_flexibility_impact.png b/docs/devonfw.github.io/1.0/_images/images/introduction/productivity_flexibility_impact.png new file mode 100644 index 00000000..7125dc8c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/introduction/productivity_flexibility_impact.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-getting-started/ionic-blank-project.PNG b/docs/devonfw.github.io/1.0/_images/images/ionic-getting-started/ionic-blank-project.PNG new file mode 100644 index 00000000..94b9772e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-getting-started/ionic-blank-project.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-getting-started/ionic-start-list.png b/docs/devonfw.github.io/1.0/_images/images/ionic-getting-started/ionic-start-list.png new file mode 100644 index 00000000..aca2d6d1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-getting-started/ionic-start-list.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-getting-started/update-ionic-cli.PNG b/docs/devonfw.github.io/1.0/_images/images/ionic-getting-started/update-ionic-cli.PNG new file mode 100644 index 00000000..b28e83d5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-getting-started/update-ionic-cli.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-build-apk.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-build-apk.png new file mode 100644 index 00000000..f321c1df Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-build-apk.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-build-run.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-build-run.png new file mode 100644 index 00000000..aaad2c91 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-build-run.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-make-app.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-make-app.png new file mode 100644 index 00000000..52fa226f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-make-app.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-make.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-make.png new file mode 100644 index 00000000..384e9079 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/and-vsc-make.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/config-device.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/config-device.png new file mode 100644 index 00000000..d68d982b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/config-device.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/create-new-device.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/create-new-device.png new file mode 100644 index 00000000..7c8a5a7c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/create-new-device.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/download-so.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/download-so.png new file mode 100644 index 00000000..0048db46 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/download-so.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/enable-developer-options1_2_3.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/enable-developer-options1_2_3.png new file mode 100644 index 00000000..d17e22b9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/enable-developer-options1_2_3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/enable-developer-options4_5_6.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/enable-developer-options4_5_6.png new file mode 100644 index 00000000..529fdf07 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/enable-developer-options4_5_6.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/environments.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/environments.png new file mode 100644 index 00000000..47d7d367 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/environments.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/ipconfig-short.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/ipconfig-short.png new file mode 100644 index 00000000..c2a77d81 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/ipconfig-short.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/locate-apk.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/locate-apk.png new file mode 100644 index 00000000..e27bda40 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/locate-apk.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/new-backend-url.PNG b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/new-backend-url.PNG new file mode 100644 index 00000000..7f92fa2f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/new-backend-url.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/new-phone-created.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/new-phone-created.png new file mode 100644 index 00000000..f395296a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/new-phone-created.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/new-phone-nexus.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/new-phone-nexus.png new file mode 100644 index 00000000..7a166ba0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/new-phone-nexus.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/real-device.png b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/real-device.png new file mode 100644 index 00000000..524038f2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic-to-android/real-device.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/base.png b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/base.png new file mode 100644 index 00000000..159aa873 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/base.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/http-server.png b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/http-server.png new file mode 100644 index 00000000..dc5084f3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/http-server.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/ionic-serve.png b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/ionic-serve.png new file mode 100644 index 00000000..cad3c335 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/ionic-serve.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/lighthouse.png b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/lighthouse.png new file mode 100644 index 00000000..f24e8806 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/lighthouse.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/online-offline.png b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/online-offline.png new file mode 100644 index 00000000..2c44171d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/online-offline.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/pwa-nopwa-app.png b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/pwa-nopwa-app.png new file mode 100644 index 00000000..7dd4b467 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ionic/ionic-pwa/pwa-nopwa-app.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/iwan.jpg b/docs/devonfw.github.io/1.0/_images/images/iwan.jpg new file mode 100644 index 00000000..5c4d2af3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/iwan.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/january.png b/docs/devonfw.github.io/1.0/_images/images/january.png new file mode 100644 index 00000000..ccc123cb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/january.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/java/component_structure.png b/docs/devonfw.github.io/1.0/_images/images/java/component_structure.png new file mode 100644 index 00000000..04af748f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/java/component_structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/java/dependency_injection.png b/docs/devonfw.github.io/1.0/_images/images/java/dependency_injection.png new file mode 100644 index 00000000..e743a8ad Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/java/dependency_injection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/java/jwt_schema.png b/docs/devonfw.github.io/1.0/_images/images/java/jwt_schema.png new file mode 100644 index 00000000..5a8320d0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/java/jwt_schema.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/java/layer_api_impl.png b/docs/devonfw.github.io/1.0/_images/images/java/layer_api_impl.png new file mode 100644 index 00000000..495c4749 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/java/layer_api_impl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/java/layers_impl.png b/docs/devonfw.github.io/1.0/_images/images/java/layers_impl.png new file mode 100644 index 00000000..3d203a31 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/java/layers_impl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/java/logic_layer.png b/docs/devonfw.github.io/1.0/_images/images/java/logic_layer.png new file mode 100644 index 00000000..39f53135 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/java/logic_layer.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/java/mtsj_components.png b/docs/devonfw.github.io/1.0/_images/images/java/mtsj_components.png new file mode 100644 index 00000000..9e6d7f67 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/java/mtsj_components.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/java/project_modules.png b/docs/devonfw.github.io/1.0/_images/images/java/project_modules.png new file mode 100644 index 00000000..4f66f1f7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/java/project_modules.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/java/test_results_eclipse.PNG b/docs/devonfw.github.io/1.0/_images/images/java/test_results_eclipse.PNG new file mode 100644 index 00000000..06685d6c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/java/test_results_eclipse.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/java_icon.png b/docs/devonfw.github.io/1.0/_images/images/java_icon.png new file mode 100644 index 00000000..b99f7003 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/java_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/javascript_icon.png b/docs/devonfw.github.io/1.0/_images/images/javascript_icon.png new file mode 100644 index 00000000..e5aecbfc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/javascript_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jax-ws-webservices/web-services.png b/docs/devonfw.github.io/1.0/_images/images/jax-ws-webservices/web-services.png new file mode 100644 index 00000000..b27a73bb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jax-ws-webservices/web-services.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jenkins/mts-pipelines-cicd.png b/docs/devonfw.github.io/1.0/_images/images/jenkins/mts-pipelines-cicd.png new file mode 100644 index 00000000..86f0effd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jenkins/mts-pipelines-cicd.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jenkins/mts-pipelines.png b/docs/devonfw.github.io/1.0/_images/images/jenkins/mts-pipelines.png new file mode 100644 index 00000000..35003986 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jenkins/mts-pipelines.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jenkins/pipeline_output.PNG b/docs/devonfw.github.io/1.0/_images/images/jenkins/pipeline_output.PNG new file mode 100644 index 00000000..37dda415 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jenkins/pipeline_output.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/created_entity.PNG b/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/created_entity.PNG new file mode 100644 index 00000000..5d687237 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/created_entity.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/fields.PNG b/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/fields.PNG new file mode 100644 index 00000000..d4fabcd9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/fields.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/update_entity.PNG b/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/update_entity.PNG new file mode 100644 index 00000000..c27565d2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/update_entity.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/yarn_link.PNG b/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/yarn_link.PNG new file mode 100644 index 00000000..e26ab6cb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/yarn_link.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/yarn_link_project.PNG b/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/yarn_link_project.PNG new file mode 100644 index 00000000..26d4a127 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jhipster-devon-module/yarn_link_project.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/event.png b/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/event.png new file mode 100644 index 00000000..dcf50ff8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/event.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/flow.png b/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/flow.png new file mode 100644 index 00000000..b558eaff Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/flow.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/mockups.png b/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/mockups.png new file mode 100644 index 00000000..e45ba755 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/mockups.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/model.png b/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/model.png new file mode 100644 index 00000000..11595251 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/model.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/scene.png b/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/scene.png new file mode 100644 index 00000000..607f82c4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jumpthequeue/scene.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/june.png b/docs/devonfw.github.io/1.0/_images/images/june.png new file mode 100644 index 00000000..04247755 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/june.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jwt.png b/docs/devonfw.github.io/1.0/_images/images/jwt.png new file mode 100644 index 00000000..7856fa56 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jwt.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/jwt_schema.png b/docs/devonfw.github.io/1.0/_images/images/jwt_schema.png new file mode 100644 index 00000000..5a8320d0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/jwt_schema.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/keyboard-layouts.png b/docs/devonfw.github.io/1.0/_images/images/keyboard-layouts.png new file mode 100644 index 00000000..3964c038 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/keyboard-layouts.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/keyboard-modifier-keys.png b/docs/devonfw.github.io/1.0/_images/images/keyboard-modifier-keys.png new file mode 100644 index 00000000..191aca29 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/keyboard-modifier-keys.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/keyboard-shortcuts.png b/docs/devonfw.github.io/1.0/_images/images/keyboard-shortcuts.png new file mode 100644 index 00000000..9b31daa5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/keyboard-shortcuts.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/landing_page/devonfw_distributions_dialog.png b/docs/devonfw.github.io/1.0/_images/images/landing_page/devonfw_distributions_dialog.png new file mode 100644 index 00000000..615e152e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/landing_page/devonfw_distributions_dialog.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/landing_page/get_started.png b/docs/devonfw.github.io/1.0/_images/images/landing_page/get_started.png new file mode 100644 index 00000000..b367fdc6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/landing_page/get_started.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/landing_page/profile_form.png b/docs/devonfw.github.io/1.0/_images/images/landing_page/profile_form.png new file mode 100644 index 00000000..9085bcd3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/landing_page/profile_form.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/laryer_arch_detail.png b/docs/devonfw.github.io/1.0/_images/images/laryer_arch_detail.png new file mode 100644 index 00000000..3c1c2eef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/laryer_arch_detail.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layer_arch_detail.png b/docs/devonfw.github.io/1.0/_images/images/layer_arch_detail.png new file mode 100644 index 00000000..afca0433 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layer_arch_detail.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layer_impl.png b/docs/devonfw.github.io/1.0/_images/images/layer_impl.png new file mode 100644 index 00000000..819979c9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layer_impl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/1-finished-application.png b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/1-finished-application.png new file mode 100644 index 00000000..359cb08b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/1-finished-application.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/2-blank-application.png b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/2-blank-application.png new file mode 100644 index 00000000..3e4b31d7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/2-blank-application.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/3-material-added.png b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/3-material-added.png new file mode 100644 index 00000000..c33d83bd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/3-material-added.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/4-header.png b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/4-header.png new file mode 100644 index 00000000..8f336afb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/4-header.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/5-header-layout-final.png b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/5-header-layout-final.png new file mode 100644 index 00000000..1d7fb776 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/5-header-layout-final.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/6-home-page.png b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/6-home-page.png new file mode 100644 index 00000000..8eea07fa Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/6-home-page.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/7-data-page.png b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/7-data-page.png new file mode 100644 index 00000000..e4fadfa5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/7-data-page.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/8-sidenav-started.png b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/8-sidenav-started.png new file mode 100644 index 00000000..d7b06579 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/8-sidenav-started.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/9-finished.png b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/9-finished.png new file mode 100644 index 00000000..beb49f9f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/layout-angular-material/9-finished.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/linkedin.png b/docs/devonfw.github.io/1.0/_images/images/linkedin.png new file mode 100644 index 00000000..0d863462 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/linkedin.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/logic-layer/devon-guide-logic-layer-layers.png b/docs/devonfw.github.io/1.0/_images/images/logic-layer/devon-guide-logic-layer-layers.png new file mode 100644 index 00000000..04044520 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/logic-layer/devon-guide-logic-layer-layers.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/logo-dark.old.png b/docs/devonfw.github.io/1.0/_images/images/logo-dark.old.png new file mode 100644 index 00000000..c2a66006 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/logo-dark.old.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/logo-dark.png b/docs/devonfw.github.io/1.0/_images/images/logo-dark.png new file mode 100644 index 00000000..b6867b41 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/logo-dark.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/logo.png b/docs/devonfw.github.io/1.0/_images/images/logo.png new file mode 100644 index 00000000..9bc501dd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/logo.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/logo_capgemini_white.png b/docs/devonfw.github.io/1.0/_images/images/logo_capgemini_white.png new file mode 100644 index 00000000..7e6c447f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/logo_capgemini_white.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/menu.svg b/docs/devonfw.github.io/1.0/_images/images/menu.svg new file mode 100644 index 00000000..e22f434d --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/menu.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/merge-combine-vscode.png b/docs/devonfw.github.io/1.0/_images/images/merge-combine-vscode.png new file mode 100644 index 00000000..46871bb7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/merge-combine-vscode.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mesteve.jpg b/docs/devonfw.github.io/1.0/_images/images/mesteve.jpg new file mode 100644 index 00000000..f8a96dd1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mesteve.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/methodology_1.png b/docs/devonfw.github.io/1.0/_images/images/methodology_1.png new file mode 100644 index 00000000..5c2b889e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/methodology_1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/methodology_2.png b/docs/devonfw.github.io/1.0/_images/images/methodology_2.png new file mode 100644 index 00000000..dec1ece4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/methodology_2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/methodology_3.png b/docs/devonfw.github.io/1.0/_images/images/methodology_3.png new file mode 100644 index 00000000..fad41fee Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/methodology_3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/microservices/microservices.pptx b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices.pptx new file mode 100644 index 00000000..a40b1565 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices.pptx differ diff --git a/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_01.png b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_01.png new file mode 100644 index 00000000..dd2eefbd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_01.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_02.png b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_02.png new file mode 100644 index 00000000..92a28967 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_02.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_03.png b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_03.png new file mode 100644 index 00000000..98081787 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_03.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_04.png b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_04.png new file mode 100644 index 00000000..d533550d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_04.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_05.png b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_05.png new file mode 100644 index 00000000..fe205492 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_05.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_06.png b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_06.png new file mode 100644 index 00000000..12cbd0b8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_06.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_07.png b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_07.png new file mode 100644 index 00000000..0d8744a7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/microservices/microservices_07.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration01.png b/docs/devonfw.github.io/1.0/_images/images/migration01.png new file mode 100644 index 00000000..2eca177e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration01.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration02.png b/docs/devonfw.github.io/1.0/_images/images/migration02.png new file mode 100644 index 00000000..712ca452 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration02.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration03.png b/docs/devonfw.github.io/1.0/_images/images/migration03.png new file mode 100644 index 00000000..ecf920bb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration03.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration04.png b/docs/devonfw.github.io/1.0/_images/images/migration04.png new file mode 100644 index 00000000..9b67bf7e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration04.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration05.png b/docs/devonfw.github.io/1.0/_images/images/migration05.png new file mode 100644 index 00000000..b5ec2fc2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration05.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration06.png b/docs/devonfw.github.io/1.0/_images/images/migration06.png new file mode 100644 index 00000000..e6a780ed Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration06.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration07.png b/docs/devonfw.github.io/1.0/_images/images/migration07.png new file mode 100644 index 00000000..d2598f21 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration07.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration08.png b/docs/devonfw.github.io/1.0/_images/images/migration08.png new file mode 100644 index 00000000..8daccc9b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration08.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration09.png b/docs/devonfw.github.io/1.0/_images/images/migration09.png new file mode 100644 index 00000000..4d3b071c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration09.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration10.png b/docs/devonfw.github.io/1.0/_images/images/migration10.png new file mode 100644 index 00000000..41e8a7f9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration10.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration11.png b/docs/devonfw.github.io/1.0/_images/images/migration11.png new file mode 100644 index 00000000..4f7ec759 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration11.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration12.png b/docs/devonfw.github.io/1.0/_images/images/migration12.png new file mode 100644 index 00000000..a8897524 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration12.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration13.png b/docs/devonfw.github.io/1.0/_images/images/migration13.png new file mode 100644 index 00000000..fca0a733 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration13.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration14.png b/docs/devonfw.github.io/1.0/_images/images/migration14.png new file mode 100644 index 00000000..f5e1e07c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration14.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration15.png b/docs/devonfw.github.io/1.0/_images/images/migration15.png new file mode 100644 index 00000000..b1f27b00 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration15.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration16.png b/docs/devonfw.github.io/1.0/_images/images/migration16.png new file mode 100644 index 00000000..ff742019 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration16.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration17.png b/docs/devonfw.github.io/1.0/_images/images/migration17.png new file mode 100644 index 00000000..2f32f144 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration17.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration18.png b/docs/devonfw.github.io/1.0/_images/images/migration18.png new file mode 100644 index 00000000..f470a17a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration18.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration19.png b/docs/devonfw.github.io/1.0/_images/images/migration19.png new file mode 100644 index 00000000..74a14574 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration19.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration20.png b/docs/devonfw.github.io/1.0/_images/images/migration20.png new file mode 100644 index 00000000..b8436df1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration20.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration21.png b/docs/devonfw.github.io/1.0/_images/images/migration21.png new file mode 100644 index 00000000..e577b277 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration21.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration22.png b/docs/devonfw.github.io/1.0/_images/images/migration22.png new file mode 100644 index 00000000..0cf6db66 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration22.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration23.png b/docs/devonfw.github.io/1.0/_images/images/migration23.png new file mode 100644 index 00000000..7db55a75 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration23.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration24.png b/docs/devonfw.github.io/1.0/_images/images/migration24.png new file mode 100644 index 00000000..5921870d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration24.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration25.png b/docs/devonfw.github.io/1.0/_images/images/migration25.png new file mode 100644 index 00000000..1c6a1e24 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration25.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration26.png b/docs/devonfw.github.io/1.0/_images/images/migration26.png new file mode 100644 index 00000000..af11a30e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration26.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration27.png b/docs/devonfw.github.io/1.0/_images/images/migration27.png new file mode 100644 index 00000000..4e7cf6c2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration27.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration28.png b/docs/devonfw.github.io/1.0/_images/images/migration28.png new file mode 100644 index 00000000..cd911431 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration28.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration29.png b/docs/devonfw.github.io/1.0/_images/images/migration29.png new file mode 100644 index 00000000..a6f30bb4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration29.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration30.png b/docs/devonfw.github.io/1.0/_images/images/migration30.png new file mode 100644 index 00000000..33b0a362 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration30.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration31.png b/docs/devonfw.github.io/1.0/_images/images/migration31.png new file mode 100644 index 00000000..34ebc4b4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration31.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/migration_arrow_down.png b/docs/devonfw.github.io/1.0/_images/images/migration_arrow_down.png new file mode 100644 index 00000000..dc4f1fec Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/migration_arrow_down.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mkurz.jpg b/docs/devonfw.github.io/1.0/_images/images/mkurz.jpg new file mode 100644 index 00000000..3571aebe Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mkurz.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mobile_icon.png b/docs/devonfw.github.io/1.0/_images/images/mobile_icon.png new file mode 100644 index 00000000..02d47454 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mobile_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/module-declaration.svg b/docs/devonfw.github.io/1.0/_images/images/module-declaration.svg new file mode 100644 index 00000000..41cd8c07 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/module-declaration.svg @@ -0,0 +1,448 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + AppRoutingModule + + + + + + + + AppModule + + + + + + + + + + + + RouterModule + + + + + + + + forRoot + + + + + + + () + + + + + + + + FeatureModule + + + + + + + + + forChild + + + + + + + () + + + + + + + + FeatureRoutingModule + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar1.png b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar1.png new file mode 100644 index 00000000..4af614a5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar2.png b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar2.png new file mode 100644 index 00000000..a6f504cc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar3.png b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar3.png new file mode 100644 index 00000000..77c037f2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar4.png b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar4.png new file mode 100644 index 00000000..da67930d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar5.png b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar5.png new file mode 100644 index 00000000..7a01c22b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/mythaistar5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/selenium_hub.png b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/selenium_hub.png new file mode 100644 index 00000000..1a0b03ad Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker-mts/selenium_hub.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/allure.JPG b/docs/devonfw.github.io/1.0/_images/images/mrchecker/allure.JPG new file mode 100644 index 00000000..97d4f53f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/allure.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/allure_report.JPG b/docs/devonfw.github.io/1.0/_images/images/mrchecker/allure_report.JPG new file mode 100644 index 00000000..eeffc793 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/allure_report.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/created_token.JPG b/docs/devonfw.github.io/1.0/_images/images/mrchecker/created_token.JPG new file mode 100644 index 00000000..9bd39497 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/created_token.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/maven.JPG b/docs/devonfw.github.io/1.0/_images/images/mrchecker/maven.JPG new file mode 100644 index 00000000..14e801ea Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/maven.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/mrchecker&pl.JPG b/docs/devonfw.github.io/1.0/_images/images/mrchecker/mrchecker&pl.JPG new file mode 100644 index 00000000..97a787d8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/mrchecker&pl.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/mrchecker.png b/docs/devonfw.github.io/1.0/_images/images/mrchecker/mrchecker.png new file mode 100644 index 00000000..e3709bfe Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/mrchecker.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/pen.png b/docs/devonfw.github.io/1.0/_images/images/mrchecker/pen.png new file mode 100644 index 00000000..a5993c09 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/pen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/pipeline_script.JPG b/docs/devonfw.github.io/1.0/_images/images/mrchecker/pipeline_script.JPG new file mode 100644 index 00000000..08aa6ca7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/pipeline_script.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/pl.png b/docs/devonfw.github.io/1.0/_images/images/mrchecker/pl.png new file mode 100644 index 00000000..f788df66 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/pl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/pljob.JPG b/docs/devonfw.github.io/1.0/_images/images/mrchecker/pljob.JPG new file mode 100644 index 00000000..cb505ebd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/pljob.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/profile.png b/docs/devonfw.github.io/1.0/_images/images/mrchecker/profile.png new file mode 100644 index 00000000..8209c816 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/profile.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mrchecker/token.JPG b/docs/devonfw.github.io/1.0/_images/images/mrchecker/token.JPG new file mode 100644 index 00000000..ff44568d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mrchecker/token.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mts_datamodel.png b/docs/devonfw.github.io/1.0/_images/images/mts_datamodel.png new file mode 100644 index 00000000..3ab83ec7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mts_datamodel.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mts_styleguide.png b/docs/devonfw.github.io/1.0/_images/images/mts_styleguide.png new file mode 100644 index 00000000..1987880c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mts_styleguide.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/mtsn_components.png b/docs/devonfw.github.io/1.0/_images/images/mtsn_components.png new file mode 100644 index 00000000..47b9342f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/mtsn_components.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/net_icon.png b/docs/devonfw.github.io/1.0/_images/images/net_icon.png new file mode 100644 index 00000000..6e65ecd0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/net_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/new-app1.jpg b/docs/devonfw.github.io/1.0/_images/images/new-app1.jpg new file mode 100644 index 00000000..40c150a0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/new-app1.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/new-app2.jpg b/docs/devonfw.github.io/1.0/_images/images/new-app2.jpg new file mode 100644 index 00000000..67c86f2c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/new-app2.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/new-app3.jpg b/docs/devonfw.github.io/1.0/_images/images/new-app3.jpg new file mode 100644 index 00000000..7b3f6021 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/new-app3.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/new-app4.jpg b/docs/devonfw.github.io/1.0/_images/images/new-app4.jpg new file mode 100644 index 00000000..27eccdef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/new-app4.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/new-app5.jpg b/docs/devonfw.github.io/1.0/_images/images/new-app5.jpg new file mode 100644 index 00000000..fe34d164 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/new-app5.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/newjenkinsjob.PNG b/docs/devonfw.github.io/1.0/_images/images/newjenkinsjob.PNG new file mode 100644 index 00000000..9ee3c1d9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/newjenkinsjob.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/next.scg.svg b/docs/devonfw.github.io/1.0/_images/images/next.scg.svg new file mode 100644 index 00000000..aec8cd2a --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/next.scg.svg @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/ngrx-concept.svg b/docs/devonfw.github.io/1.0/_images/images/ngrx-concept.svg new file mode 100644 index 00000000..adb2887d --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/ngrx-concept.svg @@ -0,0 +1,403 @@ + + + + + + image/svg+xml + + + + + + + + + + + + + Store + + + Component + + + Effect + + + + + + Reducer + + + Services + + dispatches + [Action] + + + + + + + + + + + selects + state + slice + + emits + updates + + Invoke + side + effect + diff --git a/docs/devonfw.github.io/1.0/_images/images/ngrx-devtools.png b/docs/devonfw.github.io/1.0/_images/images/ngrx-devtools.png new file mode 100644 index 00000000..965e1b27 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/ngrx-devtools.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/nodejs/dynamodb-data-model-1.4.1.png b/docs/devonfw.github.io/1.0/_images/images/nodejs/dynamodb-data-model-1.4.1.png new file mode 100644 index 00000000..36ebf82a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/nodejs/dynamodb-data-model-1.4.1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/nodejs/folder_organization.png b/docs/devonfw.github.io/1.0/_images/images/nodejs/folder_organization.png new file mode 100644 index 00000000..a690d8e1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/nodejs/folder_organization.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/nx-cli/create-nx-workspace.png b/docs/devonfw.github.io/1.0/_images/images/nx-cli/create-nx-workspace.png new file mode 100644 index 00000000..cb9044dd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/nx-cli/create-nx-workspace.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/nx-cli/nx-workspace-in-vscode.png b/docs/devonfw.github.io/1.0/_images/images/nx-cli/nx-workspace-in-vscode.png new file mode 100644 index 00000000..f42be339 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/nx-cli/nx-workspace-in-vscode.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oasp-app-structure/app-structure.png b/docs/devonfw.github.io/1.0/_images/images/oasp-app-structure/app-structure.png new file mode 100644 index 00000000..c1698d7e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oasp-app-structure/app-structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oasp-app-structure/config-structure.png b/docs/devonfw.github.io/1.0/_images/images/oasp-app-structure/config-structure.png new file mode 100644 index 00000000..7e2e0b59 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oasp-app-structure/config-structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oasp-app-structure/project_modules.png b/docs/devonfw.github.io/1.0/_images/images/oasp-app-structure/project_modules.png new file mode 100644 index 00000000..c4607d12 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oasp-app-structure/project_modules.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oasp-logo.png b/docs/devonfw.github.io/1.0/_images/images/oasp-logo.png new file mode 100644 index 00000000..5b20ebf4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oasp-logo.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/on_the_flexible_solution.png b/docs/devonfw.github.io/1.0/_images/images/on_the_flexible_solution.png new file mode 100644 index 00000000..e1a29757 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/on_the_flexible_solution.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/onion.png b/docs/devonfw.github.io/1.0/_images/images/onion.png new file mode 100644 index 00000000..5fadda5b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/onion.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/onion_architecture.png b/docs/devonfw.github.io/1.0/_images/images/onion_architecture.png new file mode 100644 index 00000000..9e2571b6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/onion_architecture.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/onion_architecture_solution.png b/docs/devonfw.github.io/1.0/_images/images/onion_architecture_solution.png new file mode 100644 index 00000000..282a5d83 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/onion_architecture_solution.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/adding-content/01_index-structure.png b/docs/devonfw.github.io/1.0/_images/images/oomph/adding-content/01_index-structure.png new file mode 100644 index 00000000..fee93a8a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/adding-content/01_index-structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/installation/01_productpage.png b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/01_productpage.png new file mode 100644 index 00000000..2fb6c397 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/01_productpage.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/installation/02_projectpage.png b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/02_projectpage.png new file mode 100644 index 00000000..9115c9d7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/02_projectpage.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/installation/03_additionaltasks.png b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/03_additionaltasks.png new file mode 100644 index 00000000..e85ad591 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/03_additionaltasks.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/installation/04_variablepage.png b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/04_variablepage.png new file mode 100644 index 00000000..9f777491 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/04_variablepage.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/installation/05_summarypage.png b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/05_summarypage.png new file mode 100644 index 00000000..44d11a31 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/05_summarypage.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/installation/06_installationpage.png b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/06_installationpage.png new file mode 100644 index 00000000..a8f7062b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/06_installationpage.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/installation/07_certificate.png b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/07_certificate.png new file mode 100644 index 00000000..79f2499d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/installation/07_certificate.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/product/01_advprop.png b/docs/devonfw.github.io/1.0/_images/images/oomph/product/01_advprop.png new file mode 100644 index 00000000..4e1d699b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/product/01_advprop.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/product/02_headlessjdk.png b/docs/devonfw.github.io/1.0/_images/images/oomph/product/02_headlessjdk.png new file mode 100644 index 00000000..da992687 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/product/02_headlessjdk.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/product/03_senchazip.png b/docs/devonfw.github.io/1.0/_images/images/oomph/product/03_senchazip.png new file mode 100644 index 00000000..40b5dc9b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/product/03_senchazip.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/project/01_new.png b/docs/devonfw.github.io/1.0/_images/images/oomph/project/01_new.png new file mode 100644 index 00000000..b72f2d4c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/project/01_new.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/project/02_creation.png b/docs/devonfw.github.io/1.0/_images/images/oomph/project/02_creation.png new file mode 100644 index 00000000..b1221660 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/project/02_creation.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/project/03_emptyfile.png b/docs/devonfw.github.io/1.0/_images/images/oomph/project/03_emptyfile.png new file mode 100644 index 00000000..5f229948 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/project/03_emptyfile.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/project/04_gitclone.png b/docs/devonfw.github.io/1.0/_images/images/oomph/project/04_gitclone.png new file mode 100644 index 00000000..b6479ef9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/project/04_gitclone.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/project/05_basicproject.png b/docs/devonfw.github.io/1.0/_images/images/oomph/project/05_basicproject.png new file mode 100644 index 00000000..176c0ab7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/project/05_basicproject.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/oomph/working/01_oomph-bar.png b/docs/devonfw.github.io/1.0/_images/images/oomph/working/01_oomph-bar.png new file mode 100644 index 00000000..b85ee608 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/oomph/working/01_oomph-bar.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-clusters.png b/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-clusters.png new file mode 100644 index 00000000..e8e90a8c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-clusters.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-configuration.png b/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-configuration.png new file mode 100644 index 00000000..73dc0d8f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-configuration.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-configuration2.png b/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-configuration2.png new file mode 100644 index 00000000..e1ad83b5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-configuration2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-configuration3.png b/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-configuration3.png new file mode 100644 index 00000000..3260daae Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/openshift-configuration/openshift-configuration3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/appservice.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/appservice.png new file mode 100644 index 00000000..b1af7779 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/appservice.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/connection-string.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/connection-string.png new file mode 100644 index 00000000..4b83c4d2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/connection-string.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/create.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/create.png new file mode 100644 index 00000000..d6a9243d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/create.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/db.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/db.png new file mode 100644 index 00000000..9f042dd7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/db.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/resource-group.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/resource-group.png new file mode 100644 index 00000000..8baded97 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-connection-strings/resource-group.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-pipelines/configuration.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-pipelines/configuration.png new file mode 100644 index 00000000..d5a9ff10 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-pipelines/configuration.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-pipelines/pipeline-settings.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-pipelines/pipeline-settings.png new file mode 100644 index 00000000..21d62273 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-pipelines/pipeline-settings.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/ServiceConnection.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/ServiceConnection.png new file mode 100644 index 00000000..c9c37409 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/ServiceConnection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/extension.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/extension.png new file mode 100644 index 00000000..45b47515 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/extension.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/install.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/install.png new file mode 100644 index 00000000..531b02e4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/install.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/marketplace.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/marketplace.png new file mode 100644 index 00000000..fa819c76 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/marketplace.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/share-unshare.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/share-unshare.png new file mode 100644 index 00000000..016e736c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/share-unshare.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/wizard.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/wizard.png new file mode 100644 index 00000000..66e852ba Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/custom-plugin/wizard.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/install-sonar.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/install-sonar.png new file mode 100644 index 00000000..a1d13c22 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/install-sonar.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/publish.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/publish.png new file mode 100644 index 00000000..da9105b4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/publish.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/runAnalysis.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/runAnalysis.png new file mode 100644 index 00000000..91de46b3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/runAnalysis.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/sonarprepare.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/sonarprepare.png new file mode 100644 index 00000000..35f3f836 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/sonarprepare.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/vm-connection.png b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/vm-connection.png new file mode 100644 index 00000000..c1187206 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/azure-sonarqube/vm-connection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/done.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/done.png new file mode 100644 index 00000000..a351e314 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/done.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step0.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step0.png new file mode 100644 index 00000000..b8bf5e57 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step0.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step1.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step1.png new file mode 100644 index 00000000..a8688ca9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.1.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.1.png new file mode 100644 index 00000000..ce4ff1ef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.2.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.2.png new file mode 100644 index 00000000..312f1f26 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.3.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.3.png new file mode 100644 index 00000000..bdfb006c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.4.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.4.png new file mode 100644 index 00000000..fd0a81ff Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.5.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.5.png new file mode 100644 index 00000000..9fd4409f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step2.5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step3a.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step3a.png new file mode 100644 index 00000000..2f3da533 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step3a.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step3b.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step3b.png new file mode 100644 index 00000000..3bf16c81 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step3b.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.2.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.2.png new file mode 100644 index 00000000..8489f86e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.3.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.3.png new file mode 100644 index 00000000..fa97577f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.4.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.4.png new file mode 100644 index 00000000..ce1aaf5b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.6.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.6.png new file mode 100644 index 00000000..d540cc37 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.6.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.7.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.7.png new file mode 100644 index 00000000..91711612 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/step4.7.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/done.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/done.png new file mode 100644 index 00000000..71251d2d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/done.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc0.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc0.png new file mode 100644 index 00000000..d3390499 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc0.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc1.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc1.png new file mode 100644 index 00000000..2aaf268c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc2.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc2.png new file mode 100644 index 00000000..a3fb991a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc3.png b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc3.png new file mode 100644 index 00000000..f8ec6ef1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/bitbucket/xtraconfig/pvc3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image1.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image1.png new file mode 100644 index 00000000..78a6931c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image10.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image10.png new file mode 100644 index 00000000..b7b107b3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image10.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image11.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image11.png new file mode 100644 index 00000000..7259b5b5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image11.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image12.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image12.png new file mode 100644 index 00000000..bb713cb6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image12.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image13.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image13.png new file mode 100644 index 00000000..72ec8d1a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image13.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image14.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image14.png new file mode 100644 index 00000000..d5f7dc5f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image14.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image2.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image2.png new file mode 100644 index 00000000..d2f459d6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image3.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image3.png new file mode 100644 index 00000000..4ef5e0af Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image4.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image4.png new file mode 100644 index 00000000..affef4e2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image5.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image5.png new file mode 100644 index 00000000..831e2dd6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image6.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image6.png new file mode 100644 index 00000000..bd9501ce Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image6.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image7.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image7.png new file mode 100644 index 00000000..de9eccf6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image7.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image8.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image8.png new file mode 100644 index 00000000..fe736e77 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image8.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image9.png b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image9.png new file mode 100644 index 00000000..bbd20e1a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/istio/media/image9.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generated.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generated.png new file mode 100644 index 00000000..62e77c6a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generated.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generation.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generation.png new file mode 100644 index 00000000..b62e7972 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generation.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token.png new file mode 100644 index 00000000..7a913948 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-webhook.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-webhook.png new file mode 100644 index 00000000..2a062ea4 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/gitlab-webhook.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-complete.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-complete.png new file mode 100644 index 00000000..ff84d342 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-complete.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-kind.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-kind.png new file mode 100644 index 00000000..8592d1cc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-kind.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials.png new file mode 100644 index 00000000..01d864c2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-connection.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-connection.png new file mode 100644 index 00000000..b5ed0122 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-connection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-plugins.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-plugins.png new file mode 100644 index 00000000..fdc97cc1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-plugins.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-build-trigger.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-build-trigger.png new file mode 100644 index 00000000..c9639fdd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-build-trigger.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-gitlab-connection.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-gitlab-connection.png new file mode 100644 index 00000000..73493ef5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-gitlab-connection.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-result.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-result.png new file mode 100644 index 00000000..60ef3034 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-result.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers-credentials.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers-credentials.png new file mode 100644 index 00000000..97b62441 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers-credentials.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers.png new file mode 100644 index 00000000..a70f4507 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings.png new file mode 100644 index 00000000..ef14002a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-completed.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-completed.png new file mode 100644 index 00000000..916dd413 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-completed.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-filled.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-filled.png new file mode 100644 index 00000000..cf898fef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-filled.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials.png new file mode 100644 index 00000000..fa518dcb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/1.jpg b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/1.jpg new file mode 100644 index 00000000..72602804 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/1.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/2.jpg b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/2.jpg new file mode 100644 index 00000000..498b915d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/2.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/3.jpg b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/3.jpg new file mode 100644 index 00000000..d4c54a0c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/3.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/4.jpg b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/4.jpg new file mode 100644 index 00000000..6c1b79b3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/4.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/7.jpg b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/7.jpg new file mode 100644 index 00000000..d68e936d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/7.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/root_directory.jpg b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/root_directory.jpg new file mode 100644 index 00000000..12a1b455 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-slave/root_directory.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-feedback.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-feedback.png new file mode 100644 index 00000000..ad55e886 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-feedback.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-plugin.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-plugin.png new file mode 100644 index 00000000..e49da617 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-plugin.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-scanner.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-scanner.png new file mode 100644 index 00000000..6796d3be Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-scanner.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-server-setup.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-server-setup.png new file mode 100644 index 00000000..025e1bc8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-server-setup.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-login.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-login.png new file mode 100644 index 00000000..d218d386 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-login.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-project-result.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-project-result.png new file mode 100644 index 00000000..17c7fbe1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-project-result.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-done.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-done.png new file mode 100644 index 00000000..567b9214 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-done.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-generation.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-generation.png new file mode 100644 index 00000000..c8acbdba Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-generation.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-name.png b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-name.png new file mode 100644 index 00000000..072a257d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-name.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/done1.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/done1.png new file mode 100644 index 00000000..2d58140d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/done1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/done2.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/done2.png new file mode 100644 index 00000000..c01f4420 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/done2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step1.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step1.png new file mode 100644 index 00000000..2c16ec97 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step2.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step2.png new file mode 100644 index 00000000..d0cd2a19 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step3a.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step3a.png new file mode 100644 index 00000000..240da9a8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step3a.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step3b.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step3b.png new file mode 100644 index 00000000..e839aa41 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step3b.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step4.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step4.png new file mode 100644 index 00000000..10165a0b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step4.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step5.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step5.png new file mode 100644 index 00000000..b6fea9be Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/hub/step5.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/done1.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/done1.png new file mode 100644 index 00000000..c6c90866 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/done1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/done2.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/done2.png new file mode 100644 index 00000000..1ec7f6ab Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/done2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/done3.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/done3.png new file mode 100644 index 00000000..99705780 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/done3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/step1.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/step1.png new file mode 100644 index 00000000..2c16ec97 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/step1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/step2.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/step2.png new file mode 100644 index 00000000..9cd019f7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/step2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/step3.png b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/step3.png new file mode 100644 index 00000000..9481e1ac Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/others/selenium/node/step3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/overlay.png b/docs/devonfw.github.io/1.0/_images/images/overlay.png new file mode 100644 index 00000000..3cd22cc9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/overlay.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/packages.png b/docs/devonfw.github.io/1.0/_images/images/packages.png new file mode 100644 index 00000000..6f9c727e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/packages.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/pic01.jpg b/docs/devonfw.github.io/1.0/_images/images/pic01.jpg new file mode 100644 index 00000000..2c4200ca Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/pic01.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/pic02.jpg b/docs/devonfw.github.io/1.0/_images/images/pic02.jpg new file mode 100644 index 00000000..34ac5e86 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/pic02.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/pic03.jpg b/docs/devonfw.github.io/1.0/_images/images/pic03.jpg new file mode 100644 index 00000000..db243174 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/pic03.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/pipelinesettings.PNG b/docs/devonfw.github.io/1.0/_images/images/pipelinesettings.PNG new file mode 100644 index 00000000..9cc5edc7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/pipelinesettings.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/piramida.png b/docs/devonfw.github.io/1.0/_images/images/piramida.png new file mode 100644 index 00000000..efcd3c21 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/piramida.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/plantuml/components.png b/docs/devonfw.github.io/1.0/_images/images/plantuml/components.png new file mode 100644 index 00000000..ec0207dd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/plantuml/components.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/plantuml/dependency-injection1.png b/docs/devonfw.github.io/1.0/_images/images/plantuml/dependency-injection1.png new file mode 100644 index 00000000..e909d946 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/plantuml/dependency-injection1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/plantuml/dependency-injection2.png b/docs/devonfw.github.io/1.0/_images/images/plantuml/dependency-injection2.png new file mode 100644 index 00000000..e79d2401 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/plantuml/dependency-injection2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/plantuml/layers.png b/docs/devonfw.github.io/1.0/_images/images/plantuml/layers.png new file mode 100644 index 00000000..d464104f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/plantuml/layers.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/plantuml/module2.png b/docs/devonfw.github.io/1.0/_images/images/plantuml/module2.png new file mode 100644 index 00000000..bc1f31bc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/plantuml/module2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/plantuml/modules.png b/docs/devonfw.github.io/1.0/_images/images/plantuml/modules.png new file mode 100644 index 00000000..ffb3653f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/plantuml/modules.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/prev.svg b/docs/devonfw.github.io/1.0/_images/images/prev.svg new file mode 100644 index 00000000..cb0d1d41 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/prev.svg @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/ci-process-diagram.png b/docs/devonfw.github.io/1.0/_images/images/productionline/ci-process-diagram.png new file mode 100644 index 00000000..f58a999b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/ci-process-diagram.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/pl-pipeline.png b/docs/devonfw.github.io/1.0/_images/images/productionline/pl-pipeline.png new file mode 100644 index 00000000..f8393159 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/pl-pipeline.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/pl-pipelinetools.png b/docs/devonfw.github.io/1.0/_images/images/productionline/pl-pipelinetools.png new file mode 100644 index 00000000..89ff8a00 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/pl-pipelinetools.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/pl-schema.png b/docs/devonfw.github.io/1.0/_images/images/productionline/pl-schema.png new file mode 100644 index 00000000..43b930f1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/pl-schema.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/devonfw-project-distribution-management.PNG b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/devonfw-project-distribution-management.PNG new file mode 100644 index 00000000..5664f87b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/devonfw-project-distribution-management.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-custom-tools.PNG b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-custom-tools.PNG new file mode 100644 index 00000000..0c5044bb Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-custom-tools.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-git-repository.PNG b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-git-repository.PNG new file mode 100644 index 00000000..b2a16e97 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-git-repository.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-script.PNG b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-script.PNG new file mode 100644 index 00000000..2b95af36 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-script.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-ssh-agent.PNG b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-ssh-agent.PNG new file mode 100644 index 00000000..e9690df8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-ssh-agent.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-steps.PNG b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-steps.PNG new file mode 100644 index 00000000..c35b8112 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-job-steps.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-pipeline-script.PNG b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-pipeline-script.PNG new file mode 100644 index 00000000..51d9a65f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-pipeline-script.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-pipeline-steps.PNG b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-pipeline-steps.PNG new file mode 100644 index 00000000..b4f448de Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/jenkins-pipeline-steps.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/nexus-repository.PNG b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/nexus-repository.PNG new file mode 100644 index 00000000..81741bb9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/productionline/practical-guide/nexus-repository.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project-structure.png b/docs/devonfw.github.io/1.0/_images/images/project-structure.png new file mode 100644 index 00000000..75f2c617 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project-structure.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_doc.png b/docs/devonfw.github.io/1.0/_images/images/project_doc.png new file mode 100644 index 00000000..ddcf8dc7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_doc.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_modules.png b/docs/devonfw.github.io/1.0/_images/images/project_modules.png new file mode 100644 index 00000000..7c3865d2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_modules.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_nature.png b/docs/devonfw.github.io/1.0/_images/images/project_nature.png new file mode 100644 index 00000000..573c8fbc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_nature.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/creation.png b/docs/devonfw.github.io/1.0/_images/images/project_page/creation.png new file mode 100644 index 00000000..cd0ad071 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/creation.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/folders.png b/docs/devonfw.github.io/1.0/_images/images/project_page/folders.png new file mode 100644 index 00000000..b95bfbdc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/folders.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/installation.png b/docs/devonfw.github.io/1.0/_images/images/project_page/installation.png new file mode 100644 index 00000000..93ac431e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/installation.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/project_data.png b/docs/devonfw.github.io/1.0/_images/images/project_page/project_data.png new file mode 100644 index 00000000..9ebb2809 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/project_data.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/project_details_features.png b/docs/devonfw.github.io/1.0/_images/images/project_page/project_details_features.png new file mode 100644 index 00000000..f80af00d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/project_details_features.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/project_details_screen.png b/docs/devonfw.github.io/1.0/_images/images/project_page/project_details_screen.png new file mode 100644 index 00000000..1aa88ea7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/project_details_screen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/project_execution.png b/docs/devonfw.github.io/1.0/_images/images/project_page/project_execution.png new file mode 100644 index 00000000..bbfd984b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/project_execution.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/project_page.png b/docs/devonfw.github.io/1.0/_images/images/project_page/project_page.png new file mode 100644 index 00000000..7529e03b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/project_page.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/project_screen.png b/docs/devonfw.github.io/1.0/_images/images/project_page/project_screen.png new file mode 100644 index 00000000..d0adef6a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/project_screen.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/project_type.png b/docs/devonfw.github.io/1.0/_images/images/project_page/project_type.png new file mode 100644 index 00000000..d0adef6a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/project_type.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/project_page/retry.png b/docs/devonfw.github.io/1.0/_images/images/project_page/retry.png new file mode 100644 index 00000000..a8de973a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/project_page/retry.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/quality.png b/docs/devonfw.github.io/1.0/_images/images/quality.png new file mode 100644 index 00000000..7a6424a2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/quality.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/query_logo.png b/docs/devonfw.github.io/1.0/_images/images/query_logo.png new file mode 100644 index 00000000..a1391f04 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/query_logo.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/read_icon.png b/docs/devonfw.github.io/1.0/_images/images/read_icon.png new file mode 100644 index 00000000..731650ab Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/read_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/release-notes-2.4/eve.jpg b/docs/devonfw.github.io/1.0/_images/images/release-notes-2.4/eve.jpg new file mode 100644 index 00000000..c97bb725 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/release-notes-2.4/eve.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/repositories_page/repositories.png b/docs/devonfw.github.io/1.0/_images/images/repositories_page/repositories.png new file mode 100644 index 00000000..e2f70aad Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/repositories_page/repositories.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/repositories_page/repositories_search.png b/docs/devonfw.github.io/1.0/_images/images/repositories_page/repositories_search.png new file mode 100644 index 00000000..6c16b571 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/repositories_page/repositories_search.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/rest-adapter.svg b/docs/devonfw.github.io/1.0/_images/images/rest-adapter.svg new file mode 100644 index 00000000..bb2a7c24 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/rest-adapter.svg @@ -0,0 +1,366 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + client + + + + + + + + Use Case Service + + + + + + + + Adapter + + + + + + + + + HttpClient + + + + + + + + + + server + + + + + + + + + HTTP + + + + + + + Endpoint + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/get_request.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/get_request.png new file mode 100644 index 00000000..3cc519ef Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/get_request.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/mts.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/mts.png new file mode 100644 index 00000000..2613c436 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/mts.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/my-thai-start-login.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/my-thai-start-login.png new file mode 100644 index 00000000..cfda4abe Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/my-thai-start-login.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/run.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/run.png new file mode 100644 index 00000000..a678f0ab Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/run.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_1_import_maven_prject.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_1_import_maven_prject.png new file mode 100644 index 00000000..897d3c2c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_1_import_maven_prject.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_1_new_server.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_1_new_server.png new file mode 100644 index 00000000..e220cfb0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_1_new_server.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_1_run_embedded_server.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_1_run_embedded_server.png new file mode 100644 index 00000000..95a95112 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_1_run_embedded_server.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_2_new_server.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_2_new_server.png new file mode 100644 index 00000000..738d1235 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_2_new_server.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_2_select_folder_oasp.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_2_select_folder_oasp.png new file mode 100644 index 00000000..887dcac1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_2_select_folder_oasp.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_3_import_maven.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_3_import_maven.png new file mode 100644 index 00000000..feb9a10f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_3_import_maven.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_3_new_server.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_3_new_server.png new file mode 100644 index 00000000..b0de1800 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_3_new_server.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_4_new_server.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_4_new_server.png new file mode 100644 index 00000000..3ab63d35 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/running_sample_application_4_new_server.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/select_maven_project_1.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/select_maven_project_1.png new file mode 100644 index 00000000..24ed7358 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/select_maven_project_1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/select_maven_project_2.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/select_maven_project_2.png new file mode 100644 index 00000000..54b3ba42 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/select_maven_project_2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_1_import_maven.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_1_import_maven.png new file mode 100644 index 00000000..2bae19b0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_1_import_maven.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_2_select_folder.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_2_select_folder.png new file mode 100644 index 00000000..880b6dba Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_2_select_folder.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_2_select_folder_oasp.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_2_select_folder_oasp.png new file mode 100644 index 00000000..eca45540 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_2_select_folder_oasp.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_3_import_maven.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_3_import_maven.png new file mode 100644 index 00000000..e2ae5fdc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_3_import_maven.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_3_import_maven_oasp.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_3_import_maven_oasp.png new file mode 100644 index 00000000..5d92dc5c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_3_import_maven_oasp.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_4_server_tab.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_4_server_tab.png new file mode 100644 index 00000000..fd0b7289 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_4_server_tab.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_5_add_remove.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_5_add_remove.png new file mode 100644 index 00000000..2de81d8a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_5_add_remove.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_6_services.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_6_services.png new file mode 100644 index 00000000..0e6dbdad Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/tutorial_running_sample_6_services.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/webpack.JPG b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/webpack.JPG new file mode 100644 index 00000000..e94b0a8a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/webpack.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/running-sample-application/yarn_install.png b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/yarn_install.png new file mode 100644 index 00000000..d02718cc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/running-sample-application/yarn_install.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/sample/employees.png b/docs/devonfw.github.io/1.0/_images/images/sample/employees.png new file mode 100644 index 00000000..434ea28c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/sample/employees.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/sample/generate-migrations.png b/docs/devonfw.github.io/1.0/_images/images/sample/generate-migrations.png new file mode 100644 index 00000000..a0414e9b Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/sample/generate-migrations.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/sample/insert-data.png b/docs/devonfw.github.io/1.0/_images/images/sample/insert-data.png new file mode 100644 index 00000000..0b0dfd2a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/sample/insert-data.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/sample/new-app.png b/docs/devonfw.github.io/1.0/_images/images/sample/new-app.png new file mode 100644 index 00000000..f2c3638c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/sample/new-app.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/sample/start-app.png b/docs/devonfw.github.io/1.0/_images/images/sample/start-app.png new file mode 100644 index 00000000..e44baee0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/sample/start-app.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/sample/swagger.png b/docs/devonfw.github.io/1.0/_images/images/sample/swagger.png new file mode 100644 index 00000000..8d7aa48d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/sample/swagger.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/sample/test.png b/docs/devonfw.github.io/1.0/_images/images/sample/test.png new file mode 100644 index 00000000..ba775b27 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/sample/test.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/samples.png b/docs/devonfw.github.io/1.0/_images/images/samples.png new file mode 100644 index 00000000..05f6dc4a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/samples.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/saphana/Prediction_usecase.JPG b/docs/devonfw.github.io/1.0/_images/images/saphana/Prediction_usecase.JPG new file mode 100644 index 00000000..c755a7cf Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/saphana/Prediction_usecase.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/saphana/db_screenshot_1.JPG b/docs/devonfw.github.io/1.0/_images/images/saphana/db_screenshot_1.JPG new file mode 100644 index 00000000..b6c394c6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/saphana/db_screenshot_1.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/saphana/mts_login.JPG b/docs/devonfw.github.io/1.0/_images/images/saphana/mts_login.JPG new file mode 100644 index 00000000..f2523036 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/saphana/mts_login.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/saphana/sap_hana_data_generator_output.JPG b/docs/devonfw.github.io/1.0/_images/images/saphana/sap_hana_data_generator_output.JPG new file mode 100644 index 00000000..0acd0cb6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/saphana/sap_hana_data_generator_output.JPG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/security-layer/Security-AccessControl.png b/docs/devonfw.github.io/1.0/_images/images/security-layer/Security-AccessControl.png new file mode 100644 index 00000000..4815aa74 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/security-layer/Security-AccessControl.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/security/2FA_qr_code_menu.png b/docs/devonfw.github.io/1.0/_images/images/security/2FA_qr_code_menu.png new file mode 100644 index 00000000..565c80cc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/security/2FA_qr_code_menu.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/security/2FA_secret_menu.png b/docs/devonfw.github.io/1.0/_images/images/security/2FA_secret_menu.png new file mode 100644 index 00000000..27509b4e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/security/2FA_secret_menu.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/security/2FA_sidemenu.png b/docs/devonfw.github.io/1.0/_images/images/security/2FA_sidemenu.png new file mode 100644 index 00000000..c8303f91 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/security/2FA_sidemenu.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/security/filters_png.png b/docs/devonfw.github.io/1.0/_images/images/security/filters_png.png new file mode 100644 index 00000000..a0c7db85 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/security/filters_png.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/security/otp_prompt.png b/docs/devonfw.github.io/1.0/_images/images/security/otp_prompt.png new file mode 100644 index 00000000..96663e87 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/security/otp_prompt.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/security/security_cross_component.png b/docs/devonfw.github.io/1.0/_images/images/security/security_cross_component.png new file mode 100644 index 00000000..d14a7c18 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/security/security_cross_component.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/security/security_cross_component_twofactor.png b/docs/devonfw.github.io/1.0/_images/images/security/security_cross_component_twofactor.png new file mode 100644 index 00000000..99abcd32 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/security/security_cross_component_twofactor.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/serilog_seq.png b/docs/devonfw.github.io/1.0/_images/images/serilog_seq.png new file mode 100644 index 00000000..0bdbbe2e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/serilog_seq.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/serverless/folder_organization.png b/docs/devonfw.github.io/1.0/_images/images/serverless/folder_organization.png new file mode 100644 index 00000000..0dd63315 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/serverless/folder_organization.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/settings_page/account-settings.png b/docs/devonfw.github.io/1.0/_images/images/settings_page/account-settings.png new file mode 100644 index 00000000..d18e3f31 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/settings_page/account-settings.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/settings_page/installed-versions.png b/docs/devonfw.github.io/1.0/_images/images/settings_page/installed-versions.png new file mode 100644 index 00000000..a0f8146f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/settings_page/installed-versions.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/environments.png b/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/environments.png new file mode 100644 index 00000000..d3793e19 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/environments.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/how-flyway-works-1.png b/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/how-flyway-works-1.png new file mode 100644 index 00000000..a3142eb8 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/how-flyway-works-1.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/how-flyway-works-2.png b/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/how-flyway-works-2.png new file mode 100644 index 00000000..5253fb82 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/how-flyway-works-2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/how-flyway-works-3.png b/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/how-flyway-works-3.png new file mode 100644 index 00000000..a08292e9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/how-flyway-works-3.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/schema-version-metadata-table.png b/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/schema-version-metadata-table.png new file mode 100644 index 00000000..46f1c8bd Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/setup-and-maintain-db-schemas-with-flyway/schema-version-metadata-table.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/slider1.jpg b/docs/devonfw.github.io/1.0/_images/images/slider1.jpg new file mode 100644 index 00000000..49d1c706 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/slider1.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/slider2.jpg b/docs/devonfw.github.io/1.0/_images/images/slider2.jpg new file mode 100644 index 00000000..f34ef1fe Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/slider2.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/slider3.jpg b/docs/devonfw.github.io/1.0/_images/images/slider3.jpg new file mode 100644 index 00000000..cabfc561 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/slider3.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/slideshare.png b/docs/devonfw.github.io/1.0/_images/images/slideshare.png new file mode 100644 index 00000000..069568fa Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/slideshare.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/smart-component-interaction-via-services-layer.svg b/docs/devonfw.github.io/1.0/_images/images/smart-component-interaction-via-services-layer.svg new file mode 100644 index 00000000..636e0028 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/smart-component-interaction-via-services-layer.svg @@ -0,0 +1,724 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Store + + + + + + + + state$: Observable<State> + + + + + + + + + + + + + + + changeState + + + + + + + ( + + + + + + + args + + + + + + + ): void + + + + + + + + Smart + + + + + + + Component A + + + + + + + + Smart + + + + + + + Component B + + + + + + + + Smart + + + + + + + Component + + + + + + + C + + + + + + + + + + + + + + + + + action() + + + + + + + subscribe() + + + + + + + + UseCaseService + + + + + + + + action(): void + + + + + + + + + subscribe() + + + + + + + + + + Services Layer + + + + + + + Components Layer + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/smart-dumb-components-interaction.svg b/docs/devonfw.github.io/1.0/_images/images/smart-dumb-components-interaction.svg new file mode 100644 index 00000000..15706ef0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/smart-dumb-components-interaction.svg @@ -0,0 +1,501 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart + + + + + + + Component + + + + + + + + Dumb + + + + + + + Component A + + + + + + + + Dumb + + + + + + + Component B + + + + + + + + Dumb + + + + + + + Component C + + + + + + + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + + Event Binding + + + + + + + + + + + + + Event Binding + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/smart-dumb-components.svg b/docs/devonfw.github.io/1.0/_images/images/smart-dumb-components.svg new file mode 100644 index 00000000..df8809db --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/smart-dumb-components.svg @@ -0,0 +1,887 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart + + + + + + + Component + + + + + + + + Dumb + + + + + + + Component + + + + + + + A + + + + + + + + Dumb + + + + + + + Component + + + + + + + B + + + + + + + + Dumb + + + + + + + Component + + + + + + + C + + + + + + + + + + + + + + Service + + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + Data Binding + + + + + + + + Event Binding + + + + + + + + + + + + + Event Binding + + + + + + + + Store + + + + + + + + subscribe() + + + + + + + action() + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/smart-smart-components-example.svg b/docs/devonfw.github.io/1.0/_images/images/smart-smart-components-example.svg new file mode 100644 index 00000000..dacb06d9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/smart-smart-components-example.svg @@ -0,0 +1,1456 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + FlightSearchStore + + + + + + + + state$: Observable<State> + + + + + + + + + + + + + + + setFlights + + + + + + + ( + + + + + + + Flug + + + + + + + []): void + + + + + + + + + + + + + + + clearFlights + + + + + + + (): void + + + + + + + + + + + + + + + setLoadingFlights + + + + + + + ( + + + + + + + boolean + + + + + + + ): void + + + + + + + + FlightSearchComponent + + + + + + + + FlightDetailsComponent + + + + + + + + + + + + + + + + + subscribe() + + + + + + + + FlightSearchService + + + + + + + + + + + + + + + loadFlights + + + + + + + (): void + + + + + + + + + FlightSearchState + + + + + + + + + + + + + + + isLoadingFlights + + + + + + + : + + + + + + + boolean + + + + + + + + flights: + + + + + + + Flug + + + + + + + [] + + + + + + + + c + + + + + + + riteria: + + + + + + + FlightSearchCriteria + + + + + + + + + + + loadFlights + + + + + + + () + + + + + + + + FlightSearchAdapter + + + + + + + + + + + + + + + getFlights + + + + + + + (): + + + + + + + Observable<Flight[]> + + + + + + + + + HttpClient + + + + + + + + + + + + + + + get + + + + + + + <T> + + + + + + + (): Observable<T> + + + + + + + + subscribe() + + + + + + + + FlightSearchCriteria + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/smart-smart-components.svg b/docs/devonfw.github.io/1.0/_images/images/smart-smart-components.svg new file mode 100644 index 00000000..b4fc8369 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/smart-smart-components.svg @@ -0,0 +1,794 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Store + + + + + + + + state$: Observable<State> + + + + + + + + + + + + + + + changeState + + + + + + + ( + + + + + + + args + + + + + + + ): + + + + + + + void + + + + + + + + Smart + + + + + + + Component + + + + + + + A + + + + + + + + Smart + + + + + + + Component + + + + + + + B + + + + + + + + Smart + + + + + + + Component + + + + + + + C + + + + + + + + + + + + + + + + + action() + + + + + + + subscribe() + + + + + + + + UseCaseService + + + + + + + + + + + + + + + action(): + + + + + + + void + + + + + + + + + subscribe() + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/solution.png b/docs/devonfw.github.io/1.0/_images/images/solution.png new file mode 100644 index 00000000..f0d1d5d0 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/solution.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/springbootadmin/Logging.PNG b/docs/devonfw.github.io/1.0/_images/images/springbootadmin/Logging.PNG new file mode 100644 index 00000000..6c6c2ba1 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/springbootadmin/Logging.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/springbootadmin/Springbootclient.PNG b/docs/devonfw.github.io/1.0/_images/images/springbootadmin/Springbootclient.PNG new file mode 100644 index 00000000..982d7a9a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/springbootadmin/Springbootclient.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/springbootadmin/springbootadminserver.PNG b/docs/devonfw.github.io/1.0/_images/images/springbootadmin/springbootadminserver.PNG new file mode 100644 index 00000000..6569eed2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/springbootadmin/springbootadminserver.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/src/ngrx-concept.pptx b/docs/devonfw.github.io/1.0/_images/images/src/ngrx-concept.pptx new file mode 100644 index 00000000..219d5cd5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/src/ngrx-concept.pptx differ diff --git a/docs/devonfw.github.io/1.0/_images/images/stack.png b/docs/devonfw.github.io/1.0/_images/images/stack.png new file mode 100644 index 00000000..ef19c183 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/stack.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/swaggerDoc.png b/docs/devonfw.github.io/1.0/_images/images/swaggerDoc.png new file mode 100644 index 00000000..9a822b4f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/swaggerDoc.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/swaggerDocXMLCheck.png b/docs/devonfw.github.io/1.0/_images/images/swaggerDocXMLCheck.png new file mode 100644 index 00000000..7fd7fa7a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/swaggerDocXMLCheck.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/templates.png b/docs/devonfw.github.io/1.0/_images/images/templates.png new file mode 100644 index 00000000..098870dc Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/templates.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/testimonials.png b/docs/devonfw.github.io/1.0/_images/images/testimonials.png new file mode 100644 index 00000000..9835e68f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/testimonials.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/testing-areas.svg b/docs/devonfw.github.io/1.0/_images/images/testing-areas.svg new file mode 100644 index 00000000..45b461ed --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/testing-areas.svg @@ -0,0 +1,1161 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Store + + + + + + + + + + + + + Smart + + + + + + + + Dumb + + + + + + + + Dumb + + + + + + + + Dumb + + + + + + + + Dumb + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dumb + + + + + + + + + + + + + Dumb + + + + + + + + + + + + + Use + + + + + + + Case + + + + + + + Service + + + + + + + + + + Adapter + + + + + + + + + Service + + + + + + + + + + + + + + + + + View + + + + + + + l + + + + + + + ogic + + + + + + + in + + + + + + + Smart + + + + + + + Components + + + + + + + (1) + + + + + + + State + + + + + + + t + + + + + + + ransitions + + + + + + + in Stores + + + + + + + (2) + + + + + + + Business + + + + + + + logic + + + + + + + in + + + + + + + S + + + + + + + ervices + + + + + + + (3) + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/tools-tabs-cmd.png b/docs/devonfw.github.io/1.0/_images/images/tools-tabs-cmd.png new file mode 100644 index 00000000..e7e149f6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/tools-tabs-cmd.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/tools-tabs-explorer.png b/docs/devonfw.github.io/1.0/_images/images/tools-tabs-explorer.png new file mode 100644 index 00000000..c79cf21d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/tools-tabs-explorer.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/tools-tabs-firefox.png b/docs/devonfw.github.io/1.0/_images/images/tools-tabs-firefox.png new file mode 100644 index 00000000..da31539d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/tools-tabs-firefox.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/tools-tabs-ssh.png b/docs/devonfw.github.io/1.0/_images/images/tools-tabs-ssh.png new file mode 100644 index 00000000..96f6055f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/tools-tabs-ssh.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/topLeftLogo.png b/docs/devonfw.github.io/1.0/_images/images/topLeftLogo.png new file mode 100644 index 00000000..309089cf Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/topLeftLogo.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/topRightLogo.png b/docs/devonfw.github.io/1.0/_images/images/topRightLogo.png new file mode 100644 index 00000000..9c0c04f6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/topRightLogo.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/traiectum_white.png b/docs/devonfw.github.io/1.0/_images/images/traiectum_white.png new file mode 100644 index 00000000..c4e4bd34 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/traiectum_white.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/transfer-objects/devon-guide-transferobject.png b/docs/devonfw.github.io/1.0/_images/images/transfer-objects/devon-guide-transferobject.png new file mode 100644 index 00000000..453a5051 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/transfer-objects/devon-guide-transferobject.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/triggering-navigation.svg b/docs/devonfw.github.io/1.0/_images/images/triggering-navigation.svg new file mode 100644 index 00000000..f54fd123 --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/triggering-navigation.svg @@ -0,0 +1,422 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + Smart Component + + + + + + + + Dumb Component A + + + + + + + + Dumb Component C + + + + + + + + navigationButtonClick + + + + + + + Event + + + + + + + + navigationButtonClick + + + + + + + Event + + + + + + + User clicks button to + + + + + + + trigger navigation + + + + + + + + Router + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/twitter.png b/docs/devonfw.github.io/1.0/_images/images/twitter.png new file mode 100644 index 00000000..846ef2e2 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/twitter.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/typeorm-schematic.PNG b/docs/devonfw.github.io/1.0/_images/images/typeorm-schematic.PNG new file mode 100644 index 00000000..2a3d09b9 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/typeorm-schematic.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/university.png b/docs/devonfw.github.io/1.0/_images/images/university.png new file mode 100644 index 00000000..e3ebe33c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/university.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/uow_sample.png b/docs/devonfw.github.io/1.0/_images/images/uow_sample.png new file mode 100644 index 00000000..cf521a79 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/uow_sample.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/use-case-service.svg b/docs/devonfw.github.io/1.0/_images/images/use-case-service.svg new file mode 100644 index 00000000..cfabc02a --- /dev/null +++ b/docs/devonfw.github.io/1.0/_images/images/use-case-service.svg @@ -0,0 +1,319 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + UseCaseService + + + + + + + + action(): void + + + + + + + + Store + + + + + + + + Adapter + + + + + + + + Business + + + + + + + Service + + + + + + + + + + + + + + + + + + + + diff --git a/docs/devonfw.github.io/1.0/_images/images/used-technologies.jpg b/docs/devonfw.github.io/1.0/_images/images/used-technologies.jpg new file mode 100644 index 00000000..f79fe526 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/used-technologies.jpg differ diff --git a/docs/devonfw.github.io/1.0/_images/images/userguide.png b/docs/devonfw.github.io/1.0/_images/images/userguide.png new file mode 100644 index 00000000..ed3a222f Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/userguide.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/view_icon.png b/docs/devonfw.github.io/1.0/_images/images/view_icon.png new file mode 100644 index 00000000..51257d45 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/view_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/vscodeopenfolder.png b/docs/devonfw.github.io/1.0/_images/images/vscodeopenfolder.png new file mode 100644 index 00000000..4a98816d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/vscodeopenfolder.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/warning.png b/docs/devonfw.github.io/1.0/_images/images/warning.png new file mode 100644 index 00000000..fca077d3 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/warning.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/web_icon.png b/docs/devonfw.github.io/1.0/_images/images/web_icon.png new file mode 100644 index 00000000..0afc937e Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/web_icon.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/wiki_page/wiki.png b/docs/devonfw.github.io/1.0/_images/images/wiki_page/wiki.png new file mode 100644 index 00000000..97bad35c Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/wiki_page/wiki.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-available-to-pull.PNG b/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-available-to-pull.PNG new file mode 100644 index 00000000..f5d816a5 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-available-to-pull.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-diagram.PNG b/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-diagram.PNG new file mode 100644 index 00000000..0fa09da7 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-diagram.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-fork.PNG b/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-fork.PNG new file mode 100644 index 00000000..d2f4ad3d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-fork.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-new-pull-request-description.PNG b/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-new-pull-request-description.PNG new file mode 100644 index 00000000..6431027d Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-new-pull-request-description.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-new-pull-request.PNG b/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-new-pull-request.PNG new file mode 100644 index 00000000..92f6250a Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/working-with-git/devon-guide-working-with-git-new-pull-request.PNG differ diff --git a/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-TDD-failed.png b/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-TDD-failed.png new file mode 100644 index 00000000..55402266 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-TDD-failed.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-TDD-failed2.png b/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-TDD-failed2.png new file mode 100644 index 00000000..cd5edeae Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-TDD-failed2.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-TDD-ok.png b/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-TDD-ok.png new file mode 100644 index 00000000..4583dc67 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-TDD-ok.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-failed.png b/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-failed.png new file mode 100644 index 00000000..059f8cce Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-failed.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-ok.png b/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-ok.png new file mode 100644 index 00000000..6ee9cefe Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/writing-unittest-cases/getting-started-writing-unittest-cases-ok.png differ diff --git a/docs/devonfw.github.io/1.0/_images/images/youtube.png b/docs/devonfw.github.io/1.0/_images/images/youtube.png new file mode 100644 index 00000000..b5eb06a6 Binary files /dev/null and b/docs/devonfw.github.io/1.0/_images/images/youtube.png differ diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/Home.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/Home.html new file mode 100644 index 00000000..079e0604 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/Home.html @@ -0,0 +1,315 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==CICDGEN

+
+
+

cicdgen is a devonfw tool for generate all code/files related to CICD. It will include/modify into your project all files that the project needs run a Jenkins cicd pipeline, to create a docker image based on your project, etc. It’s based on angular schematics, so you can add it as a dependency into your project and generate the code using ng generate. In addition, it has its own CLI for those projects that are not angular based.

+
+
+

What is angular schematics?

+
+
+

Schematics are generators that transform an existing filesystem. They can create files, refactor existing files, or move files from one place to another.

+
+
+

What distinguishes Schematics from other generators, such as Yeoman or Yarn Create, is that schematics are purely descriptive; no changes are applied to the actual filesystem until everything is ready to be committed. There is no side effect, by design, in Schematics.

+
+
+
+
+

cicdgen CLI

+
+
+

In order to know more about how to use the cicdgen CLI, you can check the CLI page

+
+
+
+
+

cicdgen Schematics

+
+
+

In order to know more about how to use the cicdgen schematics, you can check the schematics page

+
+
+
+
+

Usage example

+
+
+

A specific page about how to use cicdgen is also available.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/cicdgen-cli.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/cicdgen-cli.html new file mode 100644 index 00000000..ca64fdb9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/cicdgen-cli.html @@ -0,0 +1,377 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

CICDGEN CLI

+
+
+

cicdgen is a command line interface that helps you with some CICD in a devonfw project. At this moment we can only generate files related to CICD in a project but we plan to add more functionality in a future.

+
+
+

Installation

+
+
+
$ npm i -g @devonfw/cicdgen
+
+
+
+
+

Usage

+
+

Global arguments

+
+
    +
  • +

    --version

    +
    +
    +
    Prints the cicdgen version number
    +
    +
    +
  • +
  • +

    --help

    +
    +
    +
    Shows the usage of the command
    +
    +
    +
  • +
+
+
+
+

Commands

+
+
Generate.
+
+

This command wraps the usage of angular schematics CLI. With this we generate files in a easy way and also print a better help about usage.

+
+
+

Available schematics that generate the code:

+
+
+ +
+
+
+
+

Examples

+
+
    +
  • +

    Generate all CICD files related to a devon4j project

    +
    +
    +
    $ cicdgen generate devon4j
    +
    +
    +
  • +
  • +

    Generate all CICD files related to a devon4ng project with docker deployment.

    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --docker --registryurl docker-registry-devon.s2-eu.capgemini.com
    +
    +
    +
  • +
  • +

    Generate all CICD files related to a devon4node project with OpenShift deployment.

    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --openshift --registryurl docker-registry-devon.s2-eu.capgemini.com --ocname default --ocn devonfw
    +
    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/cicdgen-schematics.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/cicdgen-schematics.html new file mode 100644 index 00000000..18c69c22 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/cicdgen-schematics.html @@ -0,0 +1,338 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

CICDGEN SCHEMATICS

+
+
+

We use angular schematics to create and update an existing devonfw project in order to adapt it to a CICD environment. All schematics are prepared to work with Production Line, a Capgemini CICD platform, but it can also work in other environment which have the following tools:

+
+
+
    +
  • +

    Jenkins

    +
  • +
  • +

    GitLab

    +
  • +
  • +

    Nexus 3

    +
  • +
  • +

    SonarQube

    +
  • +
+
+
+

The list of available schematics are:

+
+
+ +
+
+

How to run the schematics

+
+

You can run the schematics using the schematics CLI provided by the angular team, but the easiest way to run it is using the cicdgen CLI which is a wrapper for the schematics CLI in order to use it in a easy way.

+
+
+

To generate files you only need to run the command

+
+
+
+
$ cicdgen generate <schematic-name> [arguments]
+
+
+
+

<schematic-name> is the name of the schematic that you want to execute.

+
+
+

You can find all information about arguments in the schematic section.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4j-schematic.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4j-schematic.html new file mode 100644 index 00000000..b9068b0c --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4j-schematic.html @@ -0,0 +1,581 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4j schematic

+
+
+

With the cicdgen generate devon4j command you will be able to generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

devon4j schematic arguments

+
+

When you execute the cicdgen generate devon4j command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for docker (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+

Devon4ng generated files

+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    .gitignore

    +
    +

    Defines all files that git will ignore. e.g: compiled files, IDE configurations. It will download the content from: https://gitignore.io/api/java,maven,eclipse,intellij,intellij+all,intellij+iml,visualstudiocode

    +
    +
  • +
  • +

    pom.xml

    +
    +

    The pom.xml is modified in order to add, if needed, the distributionManagement.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        Java 11 installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker to deploy:

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift to deploy:

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options to the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: Load some custom tools that can not be loaded in the tools section. Also set some variables depending on the git branch which you are executing. Also, we set properly the version number in all pom files. It means that if your branch is develop, your version should end with the word -SNAPSHOT, in order case, if -SNAPSHOT is present it will be removed.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your java project.

          +
        • +
        • +

          Unit Tests: execute the mvn test command.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Deliver application into Nexus: build the project and send all bundle files to Nexsus3.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4j Docker generated files

+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker --registryurl docker-registry-test.s2-eu.capgemini.com. +
+
+
+

Files

+
+
    +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a maven image in order to compile the source code, then it will use a java image to run the application. With the multi-stage build we keep the final image as clean as possible.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes the compiled war from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4j/devon4j-schematic.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4j/devon4j-schematic.html new file mode 100644 index 00000000..2d109bd0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4j/devon4j-schematic.html @@ -0,0 +1,557 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Devon4j schematic

+
+
+

With the cicdgen generate devon4j command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

Devon4j schematic arguments

+
+

When you execute the cicdgen generate devon4j command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this paramter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --plurl

    +
    +

    Url of Production Line. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this paramter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for docker (same as --docker)

    +
    +
  • +
  • +

    --ocurl

    +
    +

    OpenShift cluster url where the application will be builded and deployed.

    +
    +
  • +
  • +

    --ocn

    +
    +

    Openshift cluster namespace

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
+
+
+
+

Devon4ng generated files

+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    .gitignore

    +
    +

    Defines all files that git will ignore. e.g: compiled files, IDE configurations.

    +
    +
  • +
  • +

    pom.xml

    +
    +

    The pom.xml is modified in order to add the distributionManagement.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        Java installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker to deploy:

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift to deploy:

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agente where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options to the pipeline. By default, we add a build discarded to delete old artifacts/buils of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Setup pipeline: We set some variables depending on the git branch which you are executing. Also, we set properly the version number in all pom files. It means that if your branch is develop, your version should end with the word -SNAPSHOT, in order case, if -SNAPSHOT is present it will be removed.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your java project.

          +
        • +
        • +

          Unit Tests: execute the mvn test command.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Deliver application into Nexus: build the project and send all bundle files to Nexsu3.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

Devon4j Docker generated files

+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --plurl is also required. It will be used to upload the images to the Nexus3 inside Production Line. Example: if your PL url is test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4ng --groupid com.devonfw --docker --plurl test.s2-eu.capgemini.com, and it will use docker-registry-test.s2-eu.capgemini.com as docker registry. +
+
+
+

Files

+
+
    +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a maven image in order to compile the source code, then it will use a java image to run the application. With the multi-stage build we keep the final image as clean as possible.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes the compiled war from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4net-schematic.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4net-schematic.html new file mode 100644 index 00000000..51925f7b --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4net-schematic.html @@ -0,0 +1,597 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4net schematic

+
+
+

With the cicdgen generate devon4net command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

devon4net schematic arguments

+
+

When you execute the cicdgen generate devon4net command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --appname

    +
    +

    The name of your devon4net application.

    +
    +
  • +
  • +

    --appversion

    +
    +

    The initial version of your devon4net application

    +
    +
  • +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the xref:`jenkinsfile-teams`[Jenkinsfile].

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+

devon4net generated files

+
+

When you execute the generate devon4net command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        dotnet core installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all dependencies need to build/run your dotnet project.

          +
        • +
        • +

          Execute dotnet tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4net Docker generated files

+
+

When you generate the files for devon4net you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4net --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for your project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4net.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4net.html new file mode 100644 index 00000000..bcf0d6e1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4net.html @@ -0,0 +1,578 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4net schematic

+
+
+

With the cicdgen generate devon4net command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

devon4net schematic arguments

+
+

When you execute the cicdgen generate devon4net command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --appname

    +
    +

    The name of your devon4net application.

    +
    +
  • +
  • +

    --appversion

    +
    +

    The initial version of your devon4net application

    +
    +
  • +
  • +

    --docker

    +
    +

    The type of this paramter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this paramter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your Openshift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    Openshift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
+
+
+
+

devon4net generated files

+
+

When you execute the generate devon4net command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        dotnet core installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agente where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/buils of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all dependencies need to build/run your dotnet project.

          +
        • +
        • +

          Execute dotnet tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4net Docker generated files

+
+

When you generate the files for devon4net you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4net --groupid com.devonfw --docker --registryurl docker-registry-test.s2-eu.capgemini.com. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for your project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4ng-schematic.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4ng-schematic.html new file mode 100644 index 00000000..78610c2a --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4ng-schematic.html @@ -0,0 +1,615 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4ng schematic

+
+
+

With the cicdgen generate devon4ng command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

devon4ng schematic arguments

+
+

When you execute the cicdgen generate devon4ng command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+

devon4ng generated files

+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    angular.json

    +
    +

    The angular.json is modified in order to change the compiled files destination folder. Now, when you make a build of your project, the compiled files will be generated into dist folder instead of dist/<project-name> folder.

    +
    +
  • +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for test the application using Chrome Headless instead of a regular chrome. This script is called test:ci.

    +
    +
  • +
  • +

    karma.conf.js

    +
    +

    The karma.conf.js is also modified in order to add the Chrome Headless as a browser to execute test. The coverage output folder is change to ./coverage instead of ./coverage/<project-name>

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your angular project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute Angular tests: execute the angular test in a Chrome Headless.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4ng Docker generated files

+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a node image in order to compile the source code, then it will use a nginx image as a web server for our devon4ng application. With the multi-stage build we avoid everything related to node.js in our final image, where we only have a nginx with our application compiled.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files and the nginx.conf from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4ng/devon4ng-schematic.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4ng/devon4ng-schematic.html new file mode 100644 index 00000000..5863b9e0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4ng/devon4ng-schematic.html @@ -0,0 +1,590 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Devon4ng schematic

+
+
+

With the cicdgen generate devon4ng command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

Devon4ng schematic arguments

+
+

When you execute the cicdgen generate devon4ng command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this paramter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --plurl

    +
    +

    Url of Production Line. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this paramter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocurl

    +
    +

    OpenShift cluster url where the application will be builded and deployed.

    +
    +
  • +
  • +

    --ocn

    +
    +

    Openshift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
+
+
+
+

Devon4ng generated files

+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    angular.json

    +
    +

    The angular.json is modified in order to change the compiled files destination folder. Now, when you make a build of your project, the compiled files will be generated into dist folder instead of dist/<project-name> folder.

    +
    +
  • +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for test the application using Chrome Headless instead of a regular chrome. This script is called test:ci.

    +
    +
  • +
  • +

    karma.conf.js

    +
    +

    The karma.conf.js is also modified in order to add the Chrome Headless as a browser to execute test. The coverage output folder is change to ./coverage instead of ./coverage/<project-name>

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agente where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/buils of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your angular project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute Angular tests: execute the angular test in a Chrome Headless.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

Devon4ng Docker generated files

+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --plurl is also required. It will be used to upload the images to the Nexus3 inside Production Line. Example: if your PL url is test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4ng --groupid com.devonfw --docker --plurl test.s2-eu.capgemini.com, and it will use docker-registry-test.s2-eu.capgemini.com as docker registry. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a node image in order to compile the source code, then it will use a nginx image as a web server for our devon4ng application. With the multi-stage build we avoid everything related to node.js in our final image, where we only have a nginx with our application compiled.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files and the nginx.conf from Jenkins to the image.

    +
    +
  • +
  • +

    nginx.conf

    +
    +

    Configuration file for our nginx server. It defines the root folder of our application where docker copy the files to. Also it defines a fallback route to the index as described in the angular deployment guide in oder to enable the angular routes.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4node-schematic.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4node-schematic.html new file mode 100644 index 00000000..a942e964 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4node-schematic.html @@ -0,0 +1,603 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node schematic

+
+
+

With the cicdgen generate devon4node command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

devon4node schematic arguments

+
+

When you execute the cicdgen generate devon4node command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+

devon4node generated files

+
+

When you execute the generate devon4node command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for run the linter and generate the json report. This script is called lint:ci.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your node project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4node Docker generated files

+
+

When you generate the files for a devon4node you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is installs all dependencies in ordre to build the project and then remove all devDependencies in order to keep only the production dependencies.

    +
    +
  • +
  • +

    .dockerignore.ci

    +
    +

    Another .dockerignore. The purpose of this one is to define de file exclusions in your CI pipeline.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4node/devon4node-schematic.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4node/devon4node-schematic.html new file mode 100644 index 00000000..883c5a37 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/devon4node/devon4node-schematic.html @@ -0,0 +1,578 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Devon4node schematic

+
+
+

With the cicdgen generate devon4node command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+

Devon4node schematic arguments

+
+

When you execute the cicdgen generate devon4node command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this paramter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --plurl

    +
    +

    Url of Production Line. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this paramter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocurl

    +
    +

    OpenShift cluster url where the application will be builded and deployed.

    +
    +
  • +
  • +

    --ocn

    +
    +

    Openshift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
+
+
+
+

Devon4node generated files

+
+

When you execute the generate devon4node command, some files will be added/updated in your project.

+
+
+

Files

+
+
    +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script to run the application in a docker container. It is necessary because we change a little bit the folder structure when we put all files in a docker image, so the script start:prod does not work.

    +
    +
  • +
  • +

    .gitignore

    +
    +

    Defines all files that git will ignore. e.g: compiled files, IDE configurations.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agente where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/buils of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your node project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

Devon4node Docker generated files

+
+

When you generate the files for a devon4node you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --plurl is also required. It will be used to upload the images to the Nexus3 inside Production Line. Example: if your PL url is test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker --plurl test.s2-eu.capgemini.com, and it will use docker-registry-test.s2-eu.capgemini.com as docker registry. +
+
+
+

Files

+
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is installs all dependencies in ordre to build the project and then remove all devDependencies in order to keep only the production dependencies.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/master-cicdgen.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/master-cicdgen.html new file mode 100644 index 00000000..b6e349d6 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/master-cicdgen.html @@ -0,0 +1,2090 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==cicdgen

+
+ +
+

==CICDGEN

+
+
+

cicdgen is a devonfw tool for generate all code/files related to CICD. It will include/modify into your project all files that the project needs run a Jenkins cicd pipeline, to create a docker image based on your project, etc. It’s based on angular schematics, so you can add it as a dependency into your project and generate the code using ng generate. In addition, it has its own CLI for those projects that are not angular based.

+
+
+

What is angular schematics?

+
+

Schematics are generators that transform an existing filesystem. They can create files, refactor existing files, or move files from one place to another.

+
+
+

What distinguishes Schematics from other generators, such as Yeoman or Yarn Create, is that schematics are purely descriptive; no changes are applied to the actual filesystem until everything is ready to be committed. There is no side effect, by design, in Schematics.

+
+
+
+

cicdgen CLI

+
+

In order to know more about how to use the cicdgen CLI, you can check the CLI page

+
+
+
+

cicdgen Schematics

+
+

In order to know more about how to use the cicdgen schematics, you can check the schematics page

+
+
+
+

Usage example

+
+

A specific page about how to use cicdgen is also available.

+
+
+
+

cicdgen CLI

+
+ +
+
CICDGEN CLI
+
+

cicdgen is a command line interface that helps you with some CICD in a devonfw project. At this moment we can only generate files related to CICD in a project but we plan to add more functionality in a future.

+
+
+
Installation
+
+
+
$ npm i -g @devonfw/cicdgen
+
+
+
+
+
Usage
+
+Global arguments +
+
    +
  • +

    --version

    +
    +
    +
    Prints the cicdgen version number
    +
    +
    +
  • +
  • +

    --help

    +
    +
    +
    Shows the usage of the command
    +
    +
    +
  • +
+
+
+
+Commands +
+Generate. +
+

This command wraps the usage of angular schematics CLI. With this we generate files in a easy way and also print a better help about usage.

+
+
+

Available schematics that generate the code:

+
+
+ +
+
+
+
+Examples +
+
    +
  • +

    Generate all CICD files related to a devon4j project

    +
    +
    +
    $ cicdgen generate devon4j
    +
    +
    +
  • +
  • +

    Generate all CICD files related to a devon4ng project with docker deployment.

    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --docker --registryurl docker-registry-devon.s2-eu.capgemini.com
    +
    +
    +
  • +
  • +

    Generate all CICD files related to a devon4node project with OpenShift deployment.

    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --openshift --registryurl docker-registry-devon.s2-eu.capgemini.com --ocname default --ocn devonfw
    +
    +
    +
  • +
+
+ +
+
+
+
+
cicdgen usage example
+
+

In this example we are going to show how to use cicdgen step by step in a devon4ng project.

+
+
+
    +
  1. +

    Install cicdgen

    +
    +

    cicdgen is already included in the devonfw distribution, but if you want to use it outside the devonfw console you can execute the following command:

    +
    +
    +
    +
    $ npm i -g cicdgen
    +
    +
    +
  2. +
  3. +

    Generate a new devon4ng project using devonfw ide.

    +
    +

    Inside a devonfw ide distribution execute the command (devon ng create <app-name>):

    +
    +
    +
    +
    $ devon ng create devon4ng
    +
    +
    +
  4. +
  5. +

    Execute cicdgen generate command

    +
    +

    As we want to send notifications to MS Teams, we need to create the connector first:

    +
    +
    +
    +
    +
      +
    • +

      Go to a channel in teams and click at the connectors button. Then click at the jenkins configure button.

      +
      +

      teams 1

      +
      +
    • +
    • +

      Put a name for the connector

      +
      +

      teams 2

      +
      +
    • +
    • +

      Copy the name and the Webhook URL, we will use it later.

      +
      +

      teams 3

      +
      +
    • +
    +
    +
    +
    +
    +

    With the values that we get in the previous steps, we will execute the cicdgen command inside the project folder. If you have any doubt you can use the help.

    +
    +
    +

    help 1

    +
    +
    +

    help 2

    +
    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --docker --dockerurl tpc://127.0.0.1:2376 `--registryurl docker-registry-devon.s2-eu.capgemini.com --teams --teamsname devon4ng --teamsurl https://outlook.office.com/webhook/...`
    +
    +
    +
    +

    cicdgen command

    +
    +
  6. +
  7. +

    Create a git repository and upload the code

    +
    +

    gitlab

    +
    +
    +

    gitlab 2

    +
    +
    +
    +
    $ git remote add origin https://devon.s2-eu.capgemini.com/gitlab/darrodri/devon4ng.git
    +$ git push -u origin master
    +
    +
    +
    +

    push code

    +
    +
    +

    As you can see, no git init or git commit is required, cicdgen do it for you.

    +
    +
  8. +
  9. +

    Create a multibranch-pipeline in Jenkins

    +
    +

    new pipeline

    +
    +
    +

    When you push the save button, it will download the repository and execute the pipeline defined in the Jenkinsfile. If you get any problem, check the environment variables defined in the Jenkinsfile. Here we show all variables related with Jenkins:

    +
    +
    +
    +
    +
      +
    • +

      chrome

      +
      +

      chrome stable

      +
      +
    • +
    • +

      sonarTool

      +
      +

      sonar tool

      +
      +
    • +
    • +

      sonarEnv

      +
      +

      sonar env

      +
      +
    • +
    • +

      repositoryId

      +
      +

      repository id

      +
      +
    • +
    • +

      globalSettingsId

      +
      +

      global settings id

      +
      +
    • +
    • +

      mavenInstallation

      +
      +

      maven installation

      +
      +
    • +
    • +

      dockerTool

      +
      +

      docker global

      +
      +
    • +
    +
    +
    +
    +
  10. +
  11. +

    Add a webhook in GitLab

    +
    +

    In order to run the pipeline every time that you push code to GitLab, you need to configure a webhook in your repository.

    +
    +
    +

    gitlab webhook

    +
    +
  12. +
+
+
+

Now your project is ready to work following a CICD strategy.

+
+
+

The last thing to take into account is the branch naming. We prepare the pipeline in order to work following the git-flow strategy. So all stages of the pipeline will be executed for the branches: develop, release/*, master. For the branches: feature/*, hotfix/*, bugfix/* only the steps related to unit testing will be executed.

+
+
+
+
+
+

cicdgen Schematics

+
+ +
+
CICDGEN SCHEMATICS
+
+

We use angular schematics to create and update an existing devonfw project in order to adapt it to a CICD environment. All schematics are prepared to work with Production Line, a Capgemini CICD platform, but it can also work in other environment which have the following tools:

+
+
+
    +
  • +

    Jenkins

    +
  • +
  • +

    GitLab

    +
  • +
  • +

    Nexus 3

    +
  • +
  • +

    SonarQube

    +
  • +
+
+
+

The list of available schematics are:

+
+
+ +
+
+
How to run the schematics
+
+

You can run the schematics using the schematics CLI provided by the angular team, but the easiest way to run it is using the cicdgen CLI which is a wrapper for the schematics CLI in order to use it in a easy way.

+
+
+

To generate files you only need to run the command

+
+
+
+
$ cicdgen generate <schematic-name> [arguments]
+
+
+
+

<schematic-name> is the name of the schematic that you want to execute.

+
+
+

You can find all information about arguments in the schematic section.

+
+ +
+
+
Merge Strategies
+
+

When you execute cicdgen in a project, is possible that you already have some files that cicdgen will generate. Until version 1.5 the behaviour in these cases was to throw an error and not create/modify any file. Since version 1.6 you can choose what to do in case of conflict. In this page we will explain who to choose one merge strategy and how it works.

+
+
+Choose a merge strategy +
+

To choose a merge strategy, you must pass to cicdgen the merge parameter followed by the name of the strategy. The strategies available are: error, keep, override, combine.

+
+
+

Example:

+
+
+
+
$ cicdgen generate devon4j --merge keep
+
+
+
+
+Merge strategies +
+
    +
  • +

    error: The error strategy is the same as until version 1.5, throwing an error and do not create/modify any file. This is the default value, if you do not pass the merge parameter this value will be taken.

    +
  • +
  • +

    keep: The keep strategy will keep the actual content of your files in case of conflict. If there is no conflict, the file will be created with the new content.

    +
  • +
  • +

    override: The override strategy will override your current files, without throwing any error, and create a new ones with the new content. If there is no conflict, the file will be created with the new content.

    +
  • +
  • +

    combine: The combine strategy will create a new file combining the current content with the new content. In order to combine both files, it will apply a diff algorithm and it will show the conflicts in the same way that git does. If there is no conflict, the file will be created with the new content.

    +
    +

    By resolving the conflicts in the same way as git, you can use the same tools in order to solve them. For example, you can use VSCode:

    +
    +
    +

    merge combine vscode

    +
    +
  • +
+
+
+

Examples:

+
+
+

keep +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

override +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

combine +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
<<<<<<< HEAD
+Line 1
+=======
+Line 5
+>>>>>>> new_content
+Line 2
+Line 3
+Line 4
+
+
+ +
+
+
+
+
devon4j schematic
+
+

With the cicdgen generate devon4j command you will be able to generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4j schematic arguments
+
+

When you execute the cicdgen generate devon4j command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for docker (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
Devon4ng generated files
+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    .gitignore

    +
    +

    Defines all files that git will ignore. e.g: compiled files, IDE configurations. It will download the content from: https://gitignore.io/api/java,maven,eclipse,intellij,intellij+all,intellij+iml,visualstudiocode

    +
    +
  • +
  • +

    pom.xml

    +
    +

    The pom.xml is modified in order to add, if needed, the distributionManagement.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        Java 11 installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker to deploy:

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift to deploy:

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options to the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: Load some custom tools that can not be loaded in the tools section. Also set some variables depending on the git branch which you are executing. Also, we set properly the version number in all pom files. It means that if your branch is develop, your version should end with the word -SNAPSHOT, in order case, if -SNAPSHOT is present it will be removed.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your java project.

          +
        • +
        • +

          Unit Tests: execute the mvn test command.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Deliver application into Nexus: build the project and send all bundle files to Nexsus3.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4j Docker generated files
+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker --registryurl docker-registry-test.s2-eu.capgemini.com. +
+
+
+Files +
+
    +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a maven image in order to compile the source code, then it will use a java image to run the application. With the multi-stage build we keep the final image as clean as possible.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes the compiled war from Jenkins to the image.

    +
    +
  • +
+
+ +
+
+
+
+
devon4ng schematic
+
+

With the cicdgen generate devon4ng command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4ng schematic arguments
+
+

When you execute the cicdgen generate devon4ng command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
devon4ng generated files
+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    angular.json

    +
    +

    The angular.json is modified in order to change the compiled files destination folder. Now, when you make a build of your project, the compiled files will be generated into dist folder instead of dist/<project-name> folder.

    +
    +
  • +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for test the application using Chrome Headless instead of a regular chrome. This script is called test:ci.

    +
    +
  • +
  • +

    karma.conf.js

    +
    +

    The karma.conf.js is also modified in order to add the Chrome Headless as a browser to execute test. The coverage output folder is change to ./coverage instead of ./coverage/<project-name>

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your angular project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute Angular tests: execute the angular test in a Chrome Headless.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4ng Docker generated files
+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+Files +
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a node image in order to compile the source code, then it will use a nginx image as a web server for our devon4ng application. With the multi-stage build we avoid everything related to node.js in our final image, where we only have a nginx with our application compiled.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files and the nginx.conf from Jenkins to the image.

    +
    +
  • +
+
+ +
+
+
+
+
devon4net schematic
+
+

With the cicdgen generate devon4net command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4net schematic arguments
+
+

When you execute the cicdgen generate devon4net command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --appname

    +
    +

    The name of your devon4net application.

    +
    +
  • +
  • +

    --appversion

    +
    +

    The initial version of your devon4net application

    +
    +
  • +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the xref:`jenkinsfile-teams`[Jenkinsfile].

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
devon4net generated files
+
+

When you execute the generate devon4net command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        dotnet core installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all dependencies need to build/run your dotnet project.

          +
        • +
        • +

          Execute dotnet tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4net Docker generated files
+
+

When you generate the files for devon4net you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4net --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+Files +
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for your project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+ +
+
+
+
+
devon4node schematic
+
+

With the cicdgen generate devon4node command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4node schematic arguments
+
+

When you execute the cicdgen generate devon4node command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
devon4node generated files
+
+

When you execute the generate devon4node command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for run the linter and generate the json report. This script is called lint:ci.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your node project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4node Docker generated files
+
+

When you generate the files for a devon4node you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+Files +
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is installs all dependencies in ordre to build the project and then remove all devDependencies in order to keep only the production dependencies.

    +
    +
  • +
  • +

    .dockerignore.ci

    +
    +

    Another .dockerignore. The purpose of this one is to define de file exclusions in your CI pipeline.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/merge-strategies.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/merge-strategies.html new file mode 100644 index 00000000..3091ed13 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/merge-strategies.html @@ -0,0 +1,432 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Merge Strategies

+
+
+

When you execute cicdgen in a project, is possible that you already have some files that cicdgen will generate. Until version 1.5 the behaviour in these cases was to throw an error and not create/modify any file. Since version 1.6 you can choose what to do in case of conflict. In this page we will explain who to choose one merge strategy and how it works.

+
+
+

Choose a merge strategy

+
+

To choose a merge strategy, you must pass to cicdgen the merge parameter followed by the name of the strategy. The strategies available are: error, keep, override, combine.

+
+
+

Example:

+
+
+
+
$ cicdgen generate devon4j --merge keep
+
+
+
+
+

Merge strategies

+
+
    +
  • +

    error: The error strategy is the same as until version 1.5, throwing an error and do not create/modify any file. This is the default value, if you do not pass the merge parameter this value will be taken.

    +
  • +
  • +

    keep: The keep strategy will keep the actual content of your files in case of conflict. If there is no conflict, the file will be created with the new content.

    +
  • +
  • +

    override: The override strategy will override your current files, without throwing any error, and create a new ones with the new content. If there is no conflict, the file will be created with the new content.

    +
  • +
  • +

    combine: The combine strategy will create a new file combining the current content with the new content. In order to combine both files, it will apply a diff algorithm and it will show the conflicts in the same way that git does. If there is no conflict, the file will be created with the new content.

    +
    +

    By resolving the conflicts in the same way as git, you can use the same tools in order to solve them. For example, you can use VSCode:

    +
    +
    +

    merge combine vscode

    +
    +
  • +
+
+
+

Examples:

+
+
+

keep +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

override +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

combine +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
<<<<<<< HEAD
+Line 1
+=======
+Line 5
+>>>>>>> new_content
+Line 2
+Line 3
+Line 4
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cicdgen.wiki/usage-example.html b/docs/devonfw.github.io/1.0/cicdgen.wiki/usage-example.html new file mode 100644 index 00000000..aa024d1f --- /dev/null +++ b/docs/devonfw.github.io/1.0/cicdgen.wiki/usage-example.html @@ -0,0 +1,449 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

cicdgen usage example

+
+
+

In this example we are going to show how to use cicdgen step by step in a devon4ng project.

+
+
+
    +
  1. +

    Install cicdgen

    +
    +

    cicdgen is already included in the devonfw distribution, but if you want to use it outside the devonfw console you can execute the following command:

    +
    +
    +
    +
    $ npm i -g cicdgen
    +
    +
    +
  2. +
  3. +

    Generate a new devon4ng project using devonfw ide.

    +
    +

    Inside a devonfw ide distribution execute the command (devon ng create <app-name>):

    +
    +
    +
    +
    $ devon ng create devon4ng
    +
    +
    +
  4. +
  5. +

    Execute cicdgen generate command

    +
    +

    As we want to send notifications to MS Teams, we need to create the connector first:

    +
    +
    +
    +
    +
      +
    • +

      Go to a channel in teams and click at the connectors button. Then click at the jenkins configure button.

      +
      +

      teams 1

      +
      +
    • +
    • +

      Put a name for the connector

      +
      +

      teams 2

      +
      +
    • +
    • +

      Copy the name and the Webhook URL, we will use it later.

      +
      +

      teams 3

      +
      +
    • +
    +
    +
    +
    +
    +

    With the values that we get in the previous steps, we will execute the cicdgen command inside the project folder. If you have any doubt you can use the help.

    +
    +
    +

    help 1

    +
    +
    +

    help 2

    +
    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --docker --dockerurl tpc://127.0.0.1:2376 `--registryurl docker-registry-devon.s2-eu.capgemini.com --teams --teamsname devon4ng --teamsurl https://outlook.office.com/webhook/...`
    +
    +
    +
    +

    cicdgen command

    +
    +
  6. +
  7. +

    Create a git repository and upload the code

    +
    +

    gitlab

    +
    +
    +

    gitlab 2

    +
    +
    +
    +
    $ git remote add origin https://devon.s2-eu.capgemini.com/gitlab/darrodri/devon4ng.git
    +$ git push -u origin master
    +
    +
    +
    +

    push code

    +
    +
    +

    As you can see, no git init or git commit is required, cicdgen do it for you.

    +
    +
  8. +
  9. +

    Create a multibranch-pipeline in Jenkins

    +
    +

    new pipeline

    +
    +
    +

    When you push the save button, it will download the repository and execute the pipeline defined in the Jenkinsfile. If you get any problem, check the environment variables defined in the Jenkinsfile. Here we show all variables related with Jenkins:

    +
    +
    +
    +
    +
      +
    • +

      chrome

      +
      +

      chrome stable

      +
      +
    • +
    • +

      sonarTool

      +
      +

      sonar tool

      +
      +
    • +
    • +

      sonarEnv

      +
      +

      sonar env

      +
      +
    • +
    • +

      repositoryId

      +
      +

      repository id

      +
      +
    • +
    • +

      globalSettingsId

      +
      +

      global settings id

      +
      +
    • +
    • +

      mavenInstallation

      +
      +

      maven installation

      +
      +
    • +
    • +

      dockerTool

      +
      +

      docker global

      +
      +
    • +
    +
    +
    +
    +
  10. +
  11. +

    Add a webhook in GitLab

    +
    +

    In order to run the pipeline every time that you push code to GitLab, you need to configure a webhook in your repository.

    +
    +
    +

    gitlab webhook

    +
    +
  12. +
+
+
+

Now your project is ready to work following a CICD strategy.

+
+
+

The last thing to take into account is the branch naming. We prepare the pipeline in order to work following the git-flow strategy. So all stages of the pipeline will be executed for the branches: develop, release/*, master. For the branches: feature/*, hotfix/*, bugfix/* only the steps related to unit testing will be executed.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/CobiGen.html b/docs/devonfw.github.io/1.0/cobigen.wiki/CobiGen.html new file mode 100644 index 00000000..86e14134 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/CobiGen.html @@ -0,0 +1,3195 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

CobiGen — Code-based incremental Generator

+
+ +
+
+
+

Document Description

+
+
+

This document contains the documentation of the CobiGen core module as well as all CobiGen plug-ins and the CobiGen eclipse integration.

+
+
+

Current versions:

+
+
+
    +
  • +

    CobiGen - Eclipse Plug-in v4.4.1

    +
  • +
  • +

    CobiGen - Maven Build Plug-in v4.1.0

    +
  • +
+
+
+
+
    +
  • +

    CobiGen v5.3.1

    +
  • +
  • +

    CobiGen - Java Plug-in v2.1.0

    +
  • +
  • +

    CobiGen - XML Plug-in v4.1.0

    +
  • +
  • +

    CobiGen - TypeScript Plug-in v2.2.0

    +
  • +
  • +

    CobiGen - Property Plug-in v2.0.0

    +
  • +
  • +

    CobiGen - Text Merger v2.0.0

    +
  • +
  • +

    CobiGen - JSON Plug-in v2.0.0

    +
  • +
  • +

    CobiGen - HTML Plug-in v2.0.1

    +
  • +
  • +

    CobiGen - Open API Plug-in v2.3.0

    +
  • +
  • +

    CobiGen - FreeMaker Template Engine v2.0.0

    +
  • +
  • +

    CobiGen - Velocity Template Engine v2.0.0

    +
  • +
+
+
+

Authors:

+
+
+
    +
  • +

    Malte Brunnlieb

    +
  • +
  • +

    Jaime Diaz Gonzalez

    +
  • +
  • +

    Steffen Holzer

    +
  • +
  • +

    Ruben Diaz Martinez

    +
  • +
  • +

    Joerg Hohwiller

    +
  • +
  • +

    Fabian Kreis

    +
  • +
  • +

    Lukas Goerlach

    +
  • +
  • +

    Krati Shah

    +
  • +
  • +

    Christian Richter

    +
  • +
  • +

    Erik Grüner

    +
  • +
  • +

    Mike Schumacher

    +
  • +
  • +

    Marco Rose

    +
  • +
+
+
+

==Guide to the Reader

+
+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If this is your first contact with CobiGen, you will be interested in the general purpose of CobiGen, in the licensing of CobiGen, as well as in the Shared Service provided for CobiGen. Additionally, there are some general use cases, which are currently implemented and maintained to be used out of the box.

    +
  • +
  • +

    As a user of the CobiGen Eclipse integration, you should focus on the Installation and Usage chapters to get a good introduction about how to use CobiGen in eclipse.

    +
  • +
  • +

    As a user of the Maven integration, you should focus on the Maven configuration chapter, which guides you through the integration of CobiGen into your build configuration.

    +
  • +
  • +

    If you like to adapt the configuration of CobiGen, you have to step deeper into the configuration guide as well as into the plug-in configuration extensions for the Java Plug-in, XML-Plugin, Java Property Plug-in, as well as for the Text-Merger Plug-in.

    +
  • +
  • +

    Finally, if you want to develop your own templates, you will be thankful for helpful links in addition to the plug-ins documentation as referenced in the previous point.

    +
  • +
+
+
+

Unresolved include directive in modules/ROOT/pages/cobigen.wiki/CobiGen.adoc - include::Home[]

+
+ +
+

==General use cases

+
+
+

In addition to the selection of CobiGen applications introduced before, this chapter provides a more detailed overview about the currently implemented and maintained general use cases. These can be used by any project following a supported reference architecture as e.g. the devonfw or Register Factory.

+
+
+
+
+

devon4j

+
+
+

With our templates for devon4j, you can generate a whole CRUD application from a single Entity class. You save the effort for creating, DAOs, Transfer Objects, simple CRUD use cases with REST services and even the client application can be generated.

+
+
+

CRUD server application for devon4j

+
+

For the server, the required files for all architectural layers (Data access, logic, and service layer) can be created based on your Entity class. After the generation, you have CRUD functionality for the entity from bottom to top which can be accessed via a RESTful web service. Details are provided in the devonfw wiki.

+
+
+
+

CRUD client application for devon4ng

+
+

Based on the REST services on the server, you can also generate an Angular client based on devon4ng. With the help of Node.js, you have a working client application for displaying your entities within minutes!

+
+
+
+

Test data Builder for devon4j

+
+

Generating a builder pattern for POJOs to easily create test data in your tests. CobiGen is not only able to generate a plain builder pattern but rather builder, which follow a specific concept to minimize test data generation efforts in your unit tests. The following Person class as an example:

+
+
+
Person class
+
+
public class Person {
+
+    private String firstname;
+    private String lastname;
+    private int birthyear;
+    @NotNull
+    private Address address;
+
+    @NotNull
+    public String getFirstname() {
+        return this.firstname;
+    }
+
+    // additional default setter and getter
+}
+
+
+
+

It is a simple POJO with a validation annotation, to indicate, that firstname should never be null. Creating this object in a test would imply to call every setter, which is kind of nasty. Therefore, the Builder Pattern has been introduced for quite a long time in software engineering, allowing to easily create POJOs with a fluent API. See below.

+
+
+
Builder pattern example
+
+
Person person = new PersonBuilder()
+                .firstname("Heinz")
+                .lastname("Erhardt")
+                .birthyear(1909)
+                .address(
+                    new AddressBuilder().postcode("22222")
+                        .city("Hamburg").street("Luebecker Str. 123")
+                        .createNew())
+                .addChild(
+                    new PersonBuilder()[...].createNew()).createNew();
+
+
+
+

The Builder API generated by CobiGen allows you to set any setter accessible field of a POJO in a fluent way. But in addition lets assume a test, which should check the birth year as precondition for any business operation. So specifying all other fields of Person, especially firstname as it is mandatory to enter business code, would not make sense. The test behavior should just depend on the specification of the birth year and on no other data. So we would like to just provide this data to the test.

+
+
+

The Builder classes generated by CobiGen try to tackle this inconvenience by providing the ability to declare default values for any mandatory field due to validation or database constraints.

+
+
+
Builder Outline
+
+
public class PersonBuilder {
+
+    private void fillMandatoryFields() {
+        firstname("lasdjfaöskdlfja");
+        address(new AddressBuilder().createNew());
+    };
+    private void fillMandatoryFields_custom() {...};
+
+    public PersonBuilder firstname(String value);
+    public PersonBuilder lastname(String value);
+    ...
+
+    public Person createNew();
+    public Person persist(EntityManager em);
+    public List<Person> persistAndDuplicate(EntityManager em, int count);
+}
+
+
+
+

Looking at the plotted builder API generated by CobiGen, you will find two private methods. The method fillMandatoryFields will be generated by CobiGen and regenerated every time CobiGen generation will be triggered for the Person class. This method will set every automatically detected field with not null constraints to a default value. However, by implementing fillMandatoryFields_custom on your own, you can reset these values or even specify more default values for any other field of the object. Thus, running new PersonBuilder().birthyear(1909).createNew(); will create a valid object of Person, which is already pre-filled such that it does not influence the test execution besides the fact that it circumvents database and validation issues.

+
+
+

This even holds for complex data structures as indicated by address(new AddressBuilder().createNew());. Due to the use of the AddressBuilder for setting the default value for the field address, also the default values for Address will be set automatically.

+
+
+

Finally, the builder API provides different methods to create new objects.

+
+
+
    +
  • +

    createNew() just creates a new object from the builder specification and returns it.

    +
  • +
  • +

    persist(EntityManager) will create a new object from the builder specification and persists it to the database.

    +
  • +
  • +

    persistAndDuplicate(EntityManager, int) will create the given amount of objects form the builder specification and persists all of these. After the initial generation of each builder, you might want to adapt the method body as you will most probably not be able to persist more than one object with the same field assignments to the database due to unique constraints. Thus, please see the generated comment in the method to adapt unique fields accordingly before persisting to the database.

    +
  • +
+
+
+

Custom Builder for Business Needs

+
+

CobiGen just generates basic builder for any POJO. However, for project needs you probably would like to have even more complex builders, which enable the easy generation of more complex test data which are encoded in a large object hierarchy. Therefore, the generated builders can just be seen as a tool to achieve this. You can define your own business driven builders in the same way as the generated builders, but explicitly focusing on your business needs. Just take this example as a demonstration of that idea:

+
+
+
+
  University uni = new ComplexUniversityBuilder()
+    .withStudents(200)
+    .withProfessors(4)
+    .withExternalStudent()
+    .createNew();
+
+
+
+

E.g. the method withExternalStudent() might create a person, which is a student and is flagged to be an external student. Basing this implementation on the generated builders will even assure that you would benefit from any default values you have set before. In addition, you can even imagine any more complex builder methods setting values driven your reusable testing needs based on the specific business knowledge.

+
+
+
+
+
+
+

Register Factory

+
+
+

CRUD server application

+
+

Generates a CRUD application with persistence entities as inputs. This includes DAOs, TOs, use cases, as well as a CRUD JSF user interface if needed.

+
+
+
+

Test data Builder

+ +
+
+

Test documentation

+
+

Generate test documentation from test classes. The input are the doclet tags of several test classes, which e.g. can specify a description, a cross-reference, or a test target description. The result currently is a csv file, which lists all tests with the corresponding meta-information. Afterwards, this file might be styled and passed to the customer if needed and it will be up-to-date every time!

+
+
+
+
+
+

CobiGen

+
+ +
+

==Configuration

+
+
+

CobiGen is maintaining a home directory further referenced in this documentation as $cghome, which is used to maintain temporary or transient data. The home folder is determined with the following location fall-back:

+
+
+
    +
  1. +

    System environment variable COBIGEN_HOME (e.g. C:\project\ide\conf\cobigen-home)

    +
  2. +
  3. +

    .cobigen directory in OS user home (e.g. ~/.cobigen)

    +
  4. +
+
+
+

The actual configuration of CobiGen is maintained by a single folder or jar. The location can be configured with respect to the implemented configuration fall-back mechanism. CobiGen will search for the location of the configuration in the following order:

+
+
+
    +
  1. +

    A configuration jar or directory, which is passed to CobiGen by the Maven or Eclipse integration or any other program using the CobiGen programming interface: +1.1. the Maven integration allows to configure a jar dependency to be included in the currently running classpath (of interest for maven configuration +1.2. the Eclipse integration allows to specify a CobiGen_Templates project in the eclipse workspace

    +
  2. +
  3. +

    The file $cghome/.cobigen exists and the property templates is set to a valid configuration (e.g. templates=C:\project\ide\conf\templates or templates=C:\project\ide\conf\templates.jar) Hint: Check for log entry like Value of property templates in $cghome/.cobigen is invalid to identify an invalid configuration which is not taken up as expected

    +
  4. +
  5. +

    The folder $cghome/templates/CobiGen_Templates exists

    +
  6. +
  7. +

    The lexicographical sorted first configuration jar of the following path pattern $cghome/templates/templates-([^-]+)-(\\d+\\.?)+.jar if exists (e.g. templates-devon4j-2020.04.001)

    +
  8. +
  9. +

    CobiGen will automatically download the latest jar configuration from maven central with groupId com.devonfw.cobigen and artifactId templates-devon4j and take it like described in 4.

    +
  10. +
+
+
+

Within the configuration jar or directory you will find the following structure:

+
+
+
+
CobiGen_Templates
+ |- templateFolder1
+    |- templates.xml
+ |- templateFolder2
+    |- templates.xml
+ |- context.xml
+
+
+
+

Find some examples here.

+
+
+

Context Configuration

+
+

The context configuration (context.xml) always has the following root structure:

+
+
+
Context Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        ...
+    </triggers>
+</contextConfiguration>
+
+
+
+

The context configuration has a version attribute, which should match the XSD version the context configuration is an instance of. It should not state the version of the currently released version of CobiGen. This attribute should be maintained by the context configuration developers. If configured correctly, it will provide a better feedback for the user and thus higher user experience. Currently there is only the version v1.0. For further version there will be a changelog later on.

+
+
+
Trigger Node
+
+

As children of the <triggers> node you can define different triggers. By defining a <trigger> you declare a mapping between special inputs and a templateFolder, which contains all templates, which are worth to be generated with the given input.

+
+
+
trigger configuration
+
+
<trigger id="..." type="..." templateFolder="..." inputCharset="UTF-8" >
+    ...
+</trigger>
+
+
+
+
    +
  • +

    The attribute id should be unique within an context configuration. It is necessary for efficient internal processing.

    +
  • +
  • +

    The attribute type declares a specific trigger interpreter, which might be provided by additional plug-ins. A trigger interpreter has to provide an input reader, which reads specific inputs and creates a template object model out of it to be processed by the FreeMarker template engine later on. Have a look at the plug-in’s documentation of your interest and see, which trigger types and thus inputs are currently supported.

    +
  • +
  • +

    The attribute templateFolder declares the relative path to the template folder, which will be used if the trigger gets activated.

    +
  • +
  • +

    The attribute inputCharset (optional) determines the charset to be used for reading any input file.

    +
  • +
+
+
+
+
Matcher Node
+
+

A trigger will be activated if its matchers hold the following formula:

+
+
+

!(NOT || …​ || NOT) && AND && …​ && AND && (OR || …​ || OR)

+
+
+

Whereas NOT/AND/OR describes the accumulationType of a matcher (see below) and e.g. NOT means 'a matcher with accumulationType NOT matches a given input'. Thus additionally to an input reader, a trigger interpreter has to define at least one set of matchers, which are satisfiable, to be fully functional. A <matcher> node declares a specific characteristics a valid input should have.

+
+
+
Matcher Configuration
+
+
<matcher type="..." value="..." accumulationType="...">
+    ...
+</matcher>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute accumulationType (optional) specifies how the matcher will influence the trigger activation. Valid values are:

    +
    +
      +
    • +

      OR (default): if any matcher of accumulation type OR matches, the trigger will be activated as long as there are no further matchers with different accumulation types

      +
    • +
    • +

      AND: if any matcher with AND accumulation type does not match, the trigger will not be activated

      +
    • +
    • +

      NOT: if any matcher with NOT accumulation type matches, the trigger will not be activated

      +
    • +
    +
    +
  • +
+
+
+
+
Variable Assignment Node
+
+

Finally, a <matcher> node can have multiple <variableAssignment> nodes as children. Variable assignments allow to parametrize the generation by additional values, which will be added to the object model for template processing. The variables declared using variable assignments, will be made accessible in the templates.xml as well in the object model for template processing via the namespace variables.*.

+
+
+
Complete Configuration Pattern
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="...">
+            <matcher type="..." value="...">
+                <variableAssignment type="..." key="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares the type of variable assignment to be processed by the trigger interpreter providing plug-in. This attribute enables variable assignments with different dynamic value resolutions.

    +
  • +
  • +

    The attribute key declares the namespace under which the resolved value will be accessible later on.

    +
  • +
  • +

    The attribute value might declare a constant value to be assigned or any hint for value resolution done by the trigger interpreter providing plug-in. For instance, if type is regex, then on value you will assign the matched group number by the regex (1, 2, 3…​)

    +
  • +
+
+
+
+
Container Matcher Node
+
+

The <containerMatcher> node is an additional matcher for matching containers of multiple input objects. +Such a container might be a package, which encloses multiple types or---more generic---a model, which encloses multiple elements. A container matcher can be declared side by side with other matchers:

+
+
+
ContainerMatcher Declaration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="..." >
+            <containerMatcher type="..." value="..." retrieveObjectsRecursively="..." />
+            <matcher type="..." value="...">
+                <variableAssignment type="..." variable="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute retrieveObjectsRecursively (optional boolean) states, whether the children of the input should be retrieved recursively to find matching inputs for generation.

    +
  • +
+
+
+

The semantics of a container matchers are the following:

+
+
+
    +
  • +

    A <containerMatcher> does not declare any <variableAssignment> nodes

    +
  • +
  • +

    A <containerMatcher> matches an input if and only if one of its enclosed elements satisfies a set of <matcher> nodes of the same <trigger>

    +
  • +
  • +

    Inputs, which match a <containerMatcher> will cause a generation for each enclosed element

    +
  • +
+
+
+
+
+

Templates Configuration

+
+

The template configuration (templates.xml) specifies, which templates exist and under which circumstances it will be generated. There are two possible configuration styles:

+
+
+
    +
  1. +

    Configure the template meta-data for each template file by template nodes

    +
  2. +
  3. +

    (since cobigen-core-v1.2.0): Configure templateScan nodes to automatically retrieve a default configuration for all files within a configured folder and possibly modify the automatically configured templates using templateExtension nodes

    +
  4. +
+
+
+

To get an intuition of the idea, the following will initially describe the first (more extensive) configuration style. Such an configuration root structure looks as follows:

+
+
+
Extensive Templates Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.0" templateEngine="FreeMarker">
+    <templates>
+            ...
+    </templates>
+    <increments>
+            ...
+    </increments>
+</templatesConfiguration>
+
+
+
+

The root node <templatesConfiguration> specifies two attributes. The attribute version provides further usability support and will be handled analogous to the version attribute of the context configuration. The optional attribute templateEngine specifies the template engine to be used for processing the templates (since `cobigen-core-4.0.0`). By default it is set to FreeMarker.

+
+
+

The node <templatesConfiguration> allows two different grouping nodes as children. First, there is the <templates> node, which groups all declarations of templates. Second, there is the <increments> node, which groups all declarations about increments.

+
+
+
Template Node
+
+

The <templates> node groups multiple <template> declarations, which enables further generation. Each template file should be registered at least once as a template to be considered.

+
+
+
Example Template Configuration
+
+
<templates>
+    <template name="..." destinationPath="..." templateFile="..." mergeStrategy="..." targetCharset="..." />
+    ...
+</templates>
+
+
+
+

A template declaration consist of multiple information:

+
+
+
    +
  • +

    The attribute name specifies an unique ID within the templates configuration, which will later be reused in the increment definitions.

    +
  • +
  • +

    The attribute destinationPath specifies the destination path the template will be generated to. It is possible to use all variables defined by variable assignments within the path declaration using the FreeMarker syntax ${variables.*}. While resolving the variable expressions, each dot within the value will be automatically replaced by a slash. This behavior is accounted for by the transformations of Java packages to paths as CobiGen has first been developed in the context of the Java world. Furthermore, the destination path variable resolution provides the following additional built-in operators analogue to the FreeMarker syntax:

    +
    +
      +
    • +

      ?cap_first analogue to FreeMarker

      +
    • +
    • +

      ?uncap_first analogue to FreeMarker

      +
    • +
    • +

      ?lower_case analogue to FreeMarker

      +
    • +
    • +

      ?upper_case analogue to FreeMarker

      +
    • +
    • +

      ?replace(regex, replacement) - Replaces all occurrences of the regular expression regex in the variable’s value with the given replacement string. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removeSuffix(suffix) - Removes the given suffix in the variable’s value iff the variable’s value ends with the given suffix. Otherwise nothing will happen. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removePrefix(prefix) - Analogue to ?removeSuffix but removes the prefix of the variable’s value. (since cobigen-core v1.1.0)

      +
    • +
    +
    +
  • +
  • +

    The attribute templateFile describes the relative path dependent on the template folder specified in the trigger to the template file to be generated.

    +
  • +
  • +

    The attribute mergeStrategy (optional) can be optionally specified and declares the type of merge mechanism to be used, when the destinationPath points to an already existing file. CobiGen by itself just comes with a mergeStrategy override, which enforces file regeneration in total. Additional available merge strategies have to be obtained from the different plug-in’s documentations (see here for java, XML, properties, and text). Default: not set (means not mergeable)

    +
  • +
  • +

    The attribute targetCharset (optional) can be optionally specified and declares the encoding with which the contents will be written into the destination file. This also includes reading an existing file at the destination path for merging its contents with the newly generated ones. Default: UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external template (templates defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Template Scan Node
+
+

(since cobigen-core-v1.2.0)

+
+
+

The second configuration style for template meta-data is driven by initially scanning all available templates and automatically configure them with a default set of meta-data. A scanning configuration might look like this:

+
+
+
Example of Template-scan configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.2">
+    <templateScans>
+        <templateScan templatePath="templates" templateNamePrefix="prefix_" destinationPath="src/main/java"/>
+    </templateScans>
+</templatesConfiguration>
+
+
+
+

You can specify multiple <templateScan …​> nodes for different templatePaths and different templateNamePrefixes.

+
+
+
    +
  • +

    The name can be specified to later on reference the templates found by a template-scan within an increment. (since cobigen-core-v2.1.)

    +
  • +
  • +

    The templatePath specifies the relative path from the templates.xml to the root folder from which the template scan should be performed.

    +
  • +
  • +

    The templateNamePrefix (optional) defines a common id prefix, which will be added to all found and automatically configured templates.

    +
  • +
  • +

    The destinationPath defines the root folder all found templates should be generated to, whereas the root folder will be a prefix for all found and automatically configured templates.

    +
  • +
+
+
+

A templateScan will result in the following default configuration of templates. For each file found, new template will be created virtually with the following default values:

+
+
+
    +
  • +

    id: file name without .ftl extension prefixed by templateNamePrefix from template-scan

    +
  • +
  • +

    destinationPath: relative file path of the file found with the prefix defined by destinationPath from template-scan. Furthermore,

    +
    +
      +
    • +

      it is possible to use the syntax for accessing and modifying variables as described for the attribute destinationPath of the template node, besides the only difference, that due to file system restrictions you have to replace all ?-signs (for built-ins) with #-signs.

      +
    • +
    • +

      the files to be scanned, should provide their final file extension by the following file naming convention: <filename>.<extension>.ftl Thus the file extension .ftl will be removed after generation.

      +
    • +
    +
    +
  • +
  • +

    templateFile: relative path to the file found

    +
  • +
  • +

    mergeStrategy: (optional) not set means not mergeable

    +
  • +
  • +

    targetCharset: (optional) defaults to UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external templateScan (templateScans defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Template Extension Node
+
+

(since cobigen-core-v1.2.0)

+
+
+

Additionally to the templateScan declaration it is easily possible to rewrite specific attributes for any scanned and automatically configured template.

+
+
+
Example Configuration of a TemplateExtension
+
+
<templates>
+    <templateExtension ref="prefix_FooClass.java" mergeStrategy="javamerge" />
+</templates>
+
+<templateScans>
+    <templateScan templatePath="foo" templateNamePrefix="prefix_" destinationPath="src/main/java/foo"/>
+</templateScans>
+
+
+
+

Lets assume, that the above example declares a template-scan for the folder foo, which contains a file FooClass.java.ftl in any folder depth. Thus the template scan will automatically create a virtual template declaration with id=prefix_FooClass.java and further default configuration.

+
+
+

Using the templateExtension declaration above will reference the scanned template by the attribute ref and overrides the mergeStrategy of the automatically configured template by the value javamerge. Thus we are able to minimize the needed templates configuration.

+
+
+

(Since version 4.1.0) It is possible to reference external templateExtension (templateExtensions defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Increment Node
+
+

The <increments> node groups multiple <increment> nodes, which can be seen as a collection of templates to be generated. An increment will be defined by a unique id and a human readable description.

+
+
+
+
<increments>
+    <increment id="..." description="...">
+        <incrementRef ref="..." />
+        <templateRef ref="..." />
+        <templateScanRef ref="..." />
+    </increment>
+</increments>
+
+
+
+

An increment might contain multiple increments and/or templates, which will be referenced using <incrementRef …​>, <templateRef …​>, resp. <templateScanRef …​> nodes. These nodes only declare the attribute ref, which will reference an increment, a template, or a template-scan by its id or name.

+
+
+

(Since version 4.1.0) An special case of <incrementRef …​> is the external incrementsRef. By default, <incrementRef …​> are used to reference increments defined in the same templates.xml file. So for example, we could have:

+
+
+
+
<increments>
+    <increment id="incA" description="...">
+        <incrementRef ref="incB" />
+    </increment>
+    <increment id="incB" description="...">
+        <templateRef .... />
+        <templateScan .... />
+    </increment>
+</increments>
+
+
+
+

However, if we want to reference an increment that it is not defined inside our templates.xml (an increment defined for another trigger), then we can use external incrementRef as shown below:

+
+
+
+
<increment name="..." description="...">
+    <incrementRef ref="trigger_id::increment_id"/>
+</increment>
+
+
+
+

The ref string is split using as delimiter ::. The first part of the string, is the trigger_id to reference. That trigger contains an increment_id. Currently, this functionality only works when both templates use the same kind of input file.

+
+
+
+
+

Java Template Logic

+
+

since cobigen-core-3.0.0 which is included in the Eclipse and Maven Plugin since version 2.0.0 +In addition, it is possible to implement more complex template logic by custom Java code. To enable this feature, you can simply import the the CobiGen_Templates by clicking on Adapt Templates, turn it into a simple maven project (if it is not already) and implement any Java logic in the common maven layout (e.g. in the source folder src/main/java). Each Java class will be instantiated by CobiGen for each generation process. Thus, you can even store any state within a Java class instance during generation. However, there is currently no guarantee according to the template processing order.

+
+
+

As a consequence, you have to implement your Java classes with a public default (non-parameter) constructor to be used by any template. Methods of the implemented Java classes can be called within templates by the simple standard FreeMarker expression for calling Bean methods: SimpleType.methodName(param1). Until now, CobiGen will shadow multiple types with the same simple name non-deterministically. So please prevent yourself from that situation.

+
+
+

Finally, if you would like to do some reflection within your Java code accessing any type of the template project or any type referenced by the input, you should load classes by making use of the classloader of the util classes. CobiGen will take care of the correct classloader building including the classpath of the input source as well as of the classpath of the template project. If you use any other classloader or build it by your own, there will be no guarantee, that generation succeeds.

+
+
+
+

Template Properties

+
+

since cobigen-core-4.0.0` +Using a configuration with `template scan, you can make use of properties in templates specified in property files named cobigen.properties next to the templates. The property files are specified as Java property files. Property files can be nested in sub-folders. Properties will be resolved including property shading. Properties defined nearest to the template to be generated will take precedence. +In addition, a cobigen.properties file can be specified in the target folder root (in eclipse plugin, this is equal to the source project root). These properties take precedence over template properties specified in the template folder.

+
+
+ + + + + +
+ + +It is not allowed to override context variables in cobigen.properties specifications as we have not found any interesting use case. This is most probably an error of the template designer, CobiGen will raise an error in this case. +
+
+
+
Multi module support or template target path redirects
+
+

since cobigen-core-4.0.0` +One special property you can specify in the template properties is the property `relocate. It will cause the current folder and its sub-folders to be relocated at destination path resolution time. Take the following example:

+
+
+
+
folder
+  - sub1
+    Template.java.ftl
+    cobigen.properties
+
+
+
+

Let the cobigen.properties file contain the line relocate=../sub2/${cwd}. Given that, the relative destination path of Template.java.ftl will be resolved to folder/sub2/Template.java. Compare template scan configuration for more information about basic path resolution. The relocate property specifies a relative path from the location of the cobigen.properties. The ${cwd} placeholder will contain the remaining relative path from the cobigen.properties location to the template file. In this basic example it just contains Template.java.ftl, but it may even be any relative path including sub-folders of sub1 and its templates. +Given the relocate feature, you can even step out of the root path, which in general is the project/maven module the input is located in. This enables template designers to even address, e.g., maven modules located next to the module the input is coming from.

+
+
+
+
+

Basic Template Model

+
+

In addition to what is served by the different model builders of the different plug-ins, CobiGen provides a minimal model based on context variables as well as CobiGen properties. The following model is independent of the input format and will be served as a template model all the time:

+
+
+ +
+
+
+

Plugin Mechanism

+
+

Since cobigen-core 4.1.0, we changed the plug-in discovery mechanism. So far it was necessary to register new plugins programmatically, which introduces the need to let every tool integration, i.e. for eclipse or maven, be dependent on every plug-in, which should be released. This made release cycles take long time as all plug-ins have to be integrated into a final release of maven or eclipse integration.

+
+
+

Now, plug-ins are automatically discovered by the Java Service Loader mechanism from the classpath. This also effects the setup of eclipse and maven integration to allow modular releases of CobiGen in future. We are now able to provide faster rollouts of bug-fixes in any of the plug-ins as they can be released completely independently.

+
+
+
+
+
+

Plug-ins

+
+ +
+

==Java Plug-in +The CobiGen Java Plug-in comes with a new input reader for java artifacts, new java related trigger and matchers, as well as a merging mechanism for Java sources.

+
+
+
Trigger extension
+
+

The Java Plug-in provides a new trigger for Java related inputs. It accepts different representations as inputs (see Java input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'java'

    +
    +
    Example of a java trigger definition
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables Java elements as inputs.

    +
    +
  • +
+
+
+
Matcher types
+
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type fqn → full qualified name matching

    +
    +
    Example of a java trigger definition with a full qualified name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the full qualified name (fqn) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'package' → package name of the input

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="package" value="(.+)\.persistence\.([^\.]+)\.entity">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the package name (package) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'expression'

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="expression" value="instanceof java.lang.String">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the expression evaluates to true. Valid expressions are

    +
    +
  • +
  • +

    instanceof fqn: checks an 'is a' relation of the input type

    +
  • +
  • +

    isAbstract: checks, whether the input type is declared abstract

    +
  • +
+
+
+
+
Container Matcher types
+
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'package'

    +
    +
    Example of a java trigger definition with a container matcher for packages
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <containerMatcher type="package" value="com\.example\.app\.component1\.persistence.entity" />
    +</trigger>
    +
    +
    +
    +

    The container matcher matches packages provided by the type com.capgemini.cobigen.javaplugin.inputreader.to.PackageFolder with a regular expression stated in the value attribute. (See containerMatcher semantics to get more information about containerMatchers itself.)

    +
    +
  • +
+
+
+
+
Variable Assignment types
+
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The Java Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'regex' → regular expression group

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="regex" key="rootPackage" value="1" />
    +        <variableAssignment type="regex" key="component" value="2" />
    +        <variableAssignment type="regex" key="pojoName" value="3" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key.

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+
Java input reader
+
+

The CobiGen Java Plug-in implements an input reader for parsed java sources as well as for java Class<?> objects (loaded by reflection). So API user can pass Class<?> objects as well as JavaClass objects for generation. The latter depends on QDox, which will be used for parsing and merging java sources. For getting the right parsed java inputs you can easily use the JavaParserUtil, which provides static functionality to parse java files and get the appropriate JavaClass object.

+
+
+

Furthermore, due to restrictions on both inputs according to model building (see below), it is also possible to provide an array of length two as an input, which contains the Class<?> as well as the JavaClass object of the same class.

+
+
+Template object model +
+

No matter whether you use reflection objects or parsed java classes as input, you will get the following object model for template creation:

+
+
+
    +
  • +

    classObject ('Class' :: Class object of the Java input)

    +
  • +
  • +

    POJO

    +
    +
      +
    • +

      name ('String' :: Simple name of the input class)

      +
    • +
    • +

      package ('String' :: Package name of the input class)

      +
    • +
    • +

      canonicalName ('String' :: Full qualified name of the input class)

      +
    • +
    • +

      annotations ('Map<String, Object>' :: Annotations, which will be represented by a mapping of the full qualified type of an annotation to its value. To gain template compatibility, the key will be stored with '_' instead of '.' in the full qualified annotation type. Furthermore, the annotation might be recursively defined and thus be accessed using the same type of mapping. Example ${pojo.annotations.javax_persistence_Id})

      +
    • +
    • +

      JavaDoc ('Map<String, Object>') :: A generic way of addressing all available JavaDoc doclets and comments. The only fixed variable is comment (see below). All other provided variables depend on the doclets found while parsing. The value of a doclet can be accessed by the doclets name (e.g. ${…​JavaDoc.author}). In case of doclet tags that can be declared multiple times (currently @param and @throws), you will get a map, which you access in a specific way (see below).

      +
      +
        +
      • +

        comment ('String' :: JavaDoc comment, which does not include any doclets)

        +
      • +
      • +

        params ('Map<String,String> :: JavaDoc parameter info. If the comment follows proper conventions, the key will be the name of the parameter and the value being its description. You can also access the parameters by their number, as in arg0, arg1 etc, following the order of declaration in the signature, not in order of JavaDoc)

        +
      • +
      • +

        throws ('Map<String,String> :: JavaDoc exception info. If the comment follows proper conventions, the key will be the name of the thrown exception and the value being its description)

        +
      • +
      +
      +
    • +
    • +

      extendedType ('Map<String, Object>' :: The supertype, represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        name ('String' :: Simple name of the supertype)

        +
      • +
      • +

        canonicalName ('String' :: Full qualified name of the supertype)

        +
      • +
      • +

        package ('String' :: Package name of the supertype)

        +
      • +
      +
      +
    • +
    • +

      implementedTypes ('List<Map<String, Object>>' :: A list of all implementedTypes (interfaces) represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        interface ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Simple name of the interface)

          +
        • +
        • +

          canonicalName ('String' :: Full qualified name of the interface)

          +
        • +
        • +

          package ('String' :: Package name of the interface)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      fields ('List<Map<String, Object>>' :: List of fields of the input class) (renamed since cobigen-javaplugin v1.2.0; previously attributes)

      +
      +
        +
      • +

        field ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the Java field)

          +
        • +
        • +

          type ('String' :: Type of the Java field)

          +
        • +
        • +

          canonicalType ('String' :: Full qualified type declaration of the Java field’s type)

          +
        • +
        • +

          'isId' (Deprecated :: boolean :: true if the Java field or its setter or its getter is annotated with the javax.persistence.Id annotation, false otherwise. Equivalent to ${pojo.attributes[i].annotations.javax_persistence_Id?has_content})

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations with the remark, that for fields all annotations of its setter and getter will also be collected)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      methodAccessibleFields ('List<Map<String, Object>>' :: List of fields of the input class or its inherited classes, which are accessible using setter and getter methods)

      +
      +
        +
      • +

        same as for field (but without JavaDoc!)

        +
      • +
      +
      +
    • +
    • +

      methods ('List<Map<String, Object>>' :: The list of all methods, whereas one method will be represented by a set of property mappings)

      +
      +
        +
      • +

        method ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the method)

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

Furthermore, when providing a Class<?> object as input, the Java Plug-in will provide additional functionalities as template methods (deprecated):

+
+
+
    +
  1. +

    isAbstract(String fqn) (Checks whether the type with the given full qualified name is an abstract class. Returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  2. +
  3. +

    isSubtypeOf(String subType, String superType) (Checks whether the subType declared by its full qualified name is a sub type of the superType declared by its full qualified name. Equals the Java expression subType instanceof superType and so also returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  4. +
+
+
+
+Model Restrictions +
+

As stated before both inputs (Class<?> objects and JavaClass objects ) have their restrictions according to model building. In the following these restrictions are listed for both models, the ParsedJava Model which results from an JavaClass input and the ReflectedJava Model, which results from a Class<?> input.

+
+
+

It is important to understand, that these restrictions are only present if you work with either Parsed Model OR the Reflected Model. If you use the Maven Build Plug-in or Eclipse Plug-in these two models are merged together so that they can mutually compensate their weaknesses.

+
+
+Parsed Model +
+
    +
  • +

    annotations of the input’s supertype are not accessible due to restrictions in the QDox library. So pojo.methodAccessibleFields[i].annotations will always be empty for super type fields.

    +
  • +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Reflected Model.

    +
  • +
  • +

    fields of "supertypes" of the input JavaClass are not available at all. So pojo.methodAccessibleFields will only contain the input type’s and the direct superclass’s fields.

    +
  • +
  • +

    [resolved, since cobigen-javaplugin 1.3.1] field types of supertypes are always canonical. So pojo.methodAccessibleFields[i].type will always provide the same value as pojo.methodAccessibleFields[i].canonicalType (e.g. java.lang.String instead of the expected String) for super type fields.

    +
  • +
+
+
+
+Reflected Model +
+
    +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Parsed Model.

    +
  • +
  • +

    annotations are only available if the respective annotation has @Retention(value=RUNTIME), otherwise the annotations are to be discarded by the compiler or by the VM at run time. For more information see RetentionPolicy.

    +
  • +
  • +

    information about generic types is lost. E.g. a field’s/ methodAccessibleField’s type for List<String> can only be provided as List<?>.

    +
  • +
+
+
+
+
+
+
+
Merger extensions
+
+

The Java Plug-in provides two additional merging strategies for Java sources, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy javamerge (merges two Java resources and keeps the existing Java elements on conflicts)

    +
  • +
  • +

    Merge strategy javamerge_override (merges two Java resources and overrides the existing Java elements on conflicts)

    +
  • +
+
+
+

In general merging of two Java sources will be processed as follows:

+
+
+

Precondition of processing a merge of generated contents and existing ones is a common Java root class resp. surrounding class. If this is the case this class and all further inner classes will be merged recursively. Therefore, the following Java elements will be merged and conflicts will be resolved according to the configured merge strategy:

+
+
+
    +
  • +

    extends and implements relations of a class: Conflicts can only occur for the extends relation.

    +
  • +
  • +

    Annotations of a class: Conflicted if an annotation declaration already exists.

    +
  • +
  • +

    Fields of a class: Conflicted if there is already a field with the same name in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
  • +

    Methods of a class: Conflicted if there is already a method with the same signature in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
+
+ +
+

==Property Plug-in +The CobiGen Property Plug-in currently only provides different merge mechanisms for documents written in Java property syntax.

+
+
+
+
Merger extensions
+
+

There are two merge strategies for Java properties, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy propertymerge (merges two properties documents and keeps the existing properties on conflicts)

    +
  • +
  • +

    Merge strategy propertymerge_override (merges two properties documents and overrides the existing properties on conflicts)

    +
  • +
+
+
+

Both documents (base and patch) will be parsed using the Java 7 API and will be compared according their keys. Conflicts will occur if a key in the patch already exists in the base document.

+
+ +
+

==XML Plug-in +The CobiGen XML Plug-in comes with an input reader for XML artifacts, XML related trigger and matchers and provides different merge mechanisms for XML result documents.

+
+
+
+
Trigger extension
+
+

(since cobigen-xmlplugin v2.0.0)

+
+
+

The XML Plug-in provides a trigger for XML related inputs. It accepts XML documents as input (see XML input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'xml'

    +
    +
    Example of a XML trigger definition.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as inputs.

    +
    +
  • +
  • +

    type xpath

    +
    +
    Example of a xpath trigger definition.
    +
    +
    <trigger id="..." type="xpath" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as container inputs, which consists of several sub-documents.

    +
    +
  • +
+
+
+
Container Matcher type
+
+

A ContainerMatcher check if the input is a valid container.

+
+
+
    +
  • +

    xpath: type: xpath

    +
    +
    Example of a XML trigger definition with a node name matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <containerMatcher type="xpath" value="./uml:Model//packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    Before applying any Matcher, this containerMatcher checks if the XML file contains a node uml:Model with a childnode packagedElement which contains an attribute xmi:type with the value uml:Class.

    +
    +
  • +
+
+
+
+
Matcher types
+
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    XML: type nodename → document’s root name matching

    +
    +
    Example of a XML trigger definition with a node name matcher
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the root name of the declaring input document matches the given regular expression (value).

    +
    +
  • +
  • +

    xpath: type: xpath → matching a node with a xpath value

    +
    +
    Example of a xpath trigger definition with a xpath matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="xpath" value="/packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the XML file contains a node /packagedElement where the xmi:type property equals uml:Class.

    +
    +
  • +
+
+
+
+
Variable Assignment types
+
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The XML Plug-in currently provides only one mechanism:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+
XML input reader
+
+

The CobiGen XML Plug-in implements an input reader for parsed XML documents. So API user can pass org.w3c.dom.Document objects for generation. For getting the right parsed XML inputs you can easily use the xmlplugin.util.XmlUtil, which provides static functionality to parse XML files or input streams and get the appropriate Document object.

+
+
+Template object +
+

Due to the heterogeneous structure an XML document can have, the XML input reader does not always create exactly the same model structure (in contrast to the java input reader). For example the model’s depth differs strongly, according to it’s input document. To allow navigational access to the nodes, the model also depends on the document’s element’s node names. All child elements with unique names, are directly accessible via their names. In addition it is possible to iterate over all child elements with held of the child list Children. So it is also possible to access child elements with non unique names.

+
+
+

The XML input reader will create the following object model for template creation (EXAMPLEROOT, EXAMPLENODE1, EXAMPLENODE2, EXAMPLEATTR1,…​ are just used here as examples. Of course they will be replaced later by the actual node or attribute names):

+
+
+
    +
  • +

    ~EXAMPLEROOT~ ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      _nodeName_ ('String' :: Simple name of the root node)

      +
    • +
    • +

      _text_ ('String' :: Concatenated text content (PCDATA) of the root node)

      +
    • +
    • +

      TextNodes ('List<String>' :: List of all the root’s text node contents)

      +
    • +
    • +

      _at_~EXAMPLEATTR1~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_~EXAMPLEATTR2~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_…​

      +
    • +
    • +

      Attributes ('List<Map<String, Object>>' :: List of the root’s attributes

      +
      +
        +
      • +

        at ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          _attName_ ('String' :: Name of the attribute)

          +
        • +
        • +

          _attValue_ ('String' :: String representation of the attribute’s value)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      Children ('List<Map<String, Object>>' :: List of the root’s child elements

      +
      +
        +
      • +

        child ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          …​common element sub structure…​

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE1~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element structure…​

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE2~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element sub structure…​

        +
      • +
      • +

        ~EXAMPLENODE21~ ('Map<String, Object>' :: One of the nodes' child nodes)

        +
        +
          +
        • +

          …​common element structure…​

          +
        • +
        +
        +
      • +
      • +

        ~EXAMPLENODE…​~

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE…​~

      +
    • +
    +
    +
  • +
+
+
+

In contrast to the java input reader, this XML input reader does currently not provide any additional template methods.

+
+
+
+
+
+
Merger extensions
+
+

The XML plugin uses the LeXeMe merger library to produce semantically correct merge products. The merge strategies can be found in the MergeType enum and can be configured in the templates.xml as a mergeStrategy attribute:

+
+
+
    +
  • +

    mergeStrategy xmlmerge

    +
    +
    Example of a template using the mergeStrategy xmlmerge
    +
    +
    <templates>
    +	<template name="..." destinationPath="..." templateFile="..." mergeStrategy="xmlmerge"/>
    +</templates>
    +
    +
    +
  • +
+
+
+

Currently only the document types included in LeXeMe are supported. +On how the merger works consult the LeXeMe Wiki.

+
+ +
+

==Text Merger Plug-in +The Text Merger Plug-in enables merging result free text documents to existing free text documents. Therefore, the algorithms are also very rudimentary.

+
+
+
+
Merger extensions
+
+

There are currently three main merge strategies that apply for the whole document:

+
+
+
    +
  • +

    merge strategy textmerge_append (appends the text directly to the end of the existing document) +_Remark_: If no anchors are defined, this will simply append the patch.

    +
  • +
  • +

    merge strategy textmerge_appendWithNewLine (appends the text after adding a new line break to the existing document) +_Remark_: empty patches will not result in appending a new line any more since v1.0.1 +Remark: Only suitable if no anchors are defined, otherwise it will simply act as textmerge_append

    +
  • +
  • +

    merge strategy textmerge_override (replaces the contents of the existing file with the patch) +_Remark_: If anchors are defined, override is set as the default mergestrategy for every text block if not redefined in an anchor specification.

    +
  • +
+
+
+
+
Anchor functionality
+
+

If a template contains text that fits the definition of anchor:${documentpart}:${mergestrategy}:anchorend or more specifically the regular expression (.*)anchor:([:]+):(newline_)?([:]+)(_newline)?:anchorend\\s*(\\r\\n|\\r|\\n), some additional functionality becomes available about specific parts of the incoming text and the way it will be merged with the existing text. These anchors always change things about the text to come up until the next anchor, text before it is ignored.

+
+
+

If no anchors are defined, the complete patch will be appended depending on your choice for the template in the file templates.xml.

+
+
+

[[anchordef]]

+
+
+
Anchor Definition
+
+

Anchors should always be defined as a comment of the language the template results in, as you do not want them to appear in your readable version, but cannot define them as FreeMarker comments in the template, or the merger will not know about them. +Anchors will also be read when they are not comments due to the merger being able to merge multiple types of text-based languages, thus making it practically impossible to filter for the correct comment declaration. That is why anchors have to always be followed by line breaks. That way there is a universal way to filter anchors that should have anchor functionality and ones that should appear in the text. +Remark: If the resulting language has closing tags for comments, they have to appear in the next line. +Remark: If you do not put the anchor into a new line, all the text that appears before it will be added to the anchor.

+
+
+
+
Document parts
+
+

In general, ${documentpart} is an id to mark a part of the document, that way the merger knows what parts of the text to merge with which parts of the patch (e.g. if the existing text contains anchor:table:${}:anchorend that part will be merged with the part tagged anchor:table:${}:anchorend of the patch).

+
+
+

If the same documentpart is defined multiple times, it can lead to errors, so instead of defining table multiple times, use table1, table2, table3 etc.

+
+
+

If a ${documentpart} is defined in the document but not in the patch and they are in the same position, it is processed in the following way: If only the documentparts header, test and footer are defined in the document in that order, and the patch contains header, order and footer, the resulting order will be header, test, order then footer.

+
+
+

The following documentparts have default functionality:

+
+
+
    +
  1. +

    anchor:header:${mergestrategy}:anchorend marks the beginning of a header, that will be added once when the document is created, but not again. +Remark: This is only done once, if you have header in another anchor, it will be ignored

    +
  2. +
  3. +

    anchor:footer:${mergestrategy}:anchorend marks the beginning of a footer, that will be added once when the document is created, but not again. Once this is invoked, all following text will be included in the footer, including other anchors.

    +
  4. +
+
+
+

[[mergestrategies]]

+
+
+
+
Mergestrategies
+
+

Mergestrategies are only relevant in the patch, as the merger is only interested in how text in the patch should be managed, not how it was managed in the past.

+
+
+
    +
  1. +

    anchor:${documentpart}::anchorend will use the merge strategy from templates.xml, see Merger-Extensions.

    +
  2. +
  3. +

    anchor:${}:${mergestrategy}_newline:anchorend or anchor:${}:newline_${mergestrategy}:anchorend states that a new line should be appended before or after this anchors text, depending on where the newline is (before or after the mergestrategy). anchor:${documentpart}:newline:anchorend puts a new line after the anchors text. +Remark: Only works with appending strategies, not merging/replacing ones. These strategies currently include: appendbefore, append/appendafter

    +
  4. +
  5. +

    anchor:${documentpart}:override:anchorend means that the new text of this documentpart will replace the existing one completely

    +
  6. +
  7. +

    anchor:${documentpart}:appendbefore:anchorend or anchor:${documentpart}:appendafter:anchorend/anchor:${documentpart}:append:anchorend specifies whether the text of the patch should come before the existing text or after.

    +
  8. +
+
+
+
+
+
Usage Examples
+
+
General
+
+

Below you can see how a file with anchors might look like (using adoc comment tags), with examples of what you might want to use the different functions for.

+
+
+
+
// anchor:header:append:anchorend
+
+Table of contents
+Introduction/Header
+
+// anchor:part1:appendafter:anchorend
+
+Lists
+Table entries
+
+// anchor:part2:nomerge:anchorend
+
+Document Separators
+adoc table definitions
+
+// anchor:part3:override:anchorend
+
+Anything that you only want once but changes from time to time
+
+// anchor:footer:append:anchorend
+
+Copyright Info
+Imprint
+
+
+
+
+
Merging
+
+

In this section you will see a comparison on what files look like before and after merging

+
+
+override +
+
Before
+
+
// anchor:part:override:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
+Appending +
+
Before
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+// anchor:part3:appendbefore:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:append:anchorend
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+Lorem Ipsum
+
+
+
+
+Newline +
+
Before
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+(end of file)
+
+
+
+
Patch
+
+
// anchor:part:newline_append:anchorend
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Dolor Sit
+(end of file)
+
+
+
+
After
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+Dolor Sit
+
+(end of file)
+
+
+
+
+
+
+
Error List
+
+
    +
  • +

    If there are anchors in the text, but either base or patch do not start with one, the merging process will be aborted, as text might go missing this way.

    +
  • +
  • +

    Using _newline or newline_ with mergestrategies that don’t support it , like override, will abort the merging process. See <<`mergestrategies`,Merge Strategies>> →2 for details.

    +
  • +
  • +

    Using undefined mergestrategies will abort the merging process.

    +
  • +
  • +

    Wrong anchor definitions, for example anchor:${}:anchorend will abort the merging process, see <<`anchordef`,Anchor Definition>> for details.

    +
  • +
+
+ +
+

==JSON Plug-in +At the moment the plug-in can be used for merge generic JSON files depending on the merge strategy defined at the templates.

+
+
+
+
Merger extensions
+
+

There are currently these merge strategies:

+
+
+

Generic JSON Merge

+
+
+
    +
  • +

    merge strategy jsonmerge(add the new code respecting the existent is case of conflict)

    +
  • +
  • +

    merge strategy jsonmerge_override (add the new code overwriting the existent in case of conflict)

    +
    +
      +
    1. +

      JsonArray’s will be ignored / replaced in total

      +
    2. +
    3. +

      JsonObjects in conflict will be processed recursively ignoring adding non existent elements.

      +
    4. +
    +
    +
  • +
+
+
+
+
Merge Process
+
+
Generic JSON Merging
+
+

The merge process will be:

+
+
+
    +
  1. +

    Add non existent JSON Objects from patch file to base file.

    +
  2. +
  3. +

    For existent object in both files, will add non existent keys from patch to base object. This process will be done recursively for all existent objects.

    +
  4. +
  5. +

    For JSON Arrays existent in both files, the arrays will be just concatenated.

    +
  6. +
+
+ +
+

==TypeScript Plug-in

+
+
+

The TypeScript Plug-in enables merging result TS files to existing ones. This plug-in is used at the moment for generate an Angular2 client with all CRUD functionalities enabled. The plug-in also generates i18n functionality just appending at the end of the word the ES or EN suffixes, to put into the developer knowledge that this words must been translated to the correspondent language. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+
+
+
Trigger Extensions
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
Merger extensions
+
+

This plugin uses the TypeScript Merger to merge files. There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy tsmerge (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy tsmerge_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

<<<<<<< HEAD +The merge algorithm mainly handles the following AST nodes:

+
+
+
    +
  • +

    ImportDeclaration

    +
    +
      +
    • +

      Will add non existent imports whatever the merge strategy is.

      +
    • +
    • +

      For different imports from same module, the import clauses will be merged.

      +
      +
      +
      import { a } from 'b';
      +import { c } from 'b';
      +//Result
      +import { a, c } from 'b';
      +
      +
      +
    • +
    +
    +
  • +
  • +

    ClassDeclaration

    +
    +
      +
    • +

      Adds non existent base properties from patch based on the name property.

      +
    • +
    • +

      Adds non existent base methods from patch based on the name signature.

      +
    • +
    • +

      Adds non existent annotations to class, properties and methods.

      +
    • +
    +
    +
  • +
  • +

    PropertyDeclaration

    +
    +
      +
    • +

      Adds non existent decorators.

      +
    • +
    • +

      Merge existent decorators.

      +
    • +
    • +

      With override strategy, the value of the property will be replaced by the patch value.

      +
    • +
    +
    +
  • +
  • +

    MethodDeclaration

    +
    +
      +
    • +

      With override strategy, the body will be replaced.

      +
    • +
    • +

      The parameters will be merged.

      +
    • +
    +
    +
  • +
  • +

    ParameterDeclaration

    +
    +
      +
    • +

      Replace type and modifiers with override merge strategy, adding non existent from patch into base.

      +
    • +
    +
    +
  • +
  • +

    ConstructorDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
  • +

    FunctionDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
+
+
+
+
Input reader
+
+

The TypeScript input reader is based on the one that the TypeScript merger uses. The current extensions are additional module fields giving from which library any entity originates. +module: null specifies a standard entity or type as string or number.

+
+
+
Object model
+
+

To get a first impression of the created object after parsing, let us start with analyzing a small example, namely the parsing of a simple type-orm model written in TypeScript.

+
+
+
+
import {Entity, PrimaryGeneratedColumn, Column} from "typeorm";
+
+@Entity()
+export class User {
+
+    @PrimaryGeneratedColumn()
+    id: number;
+
+    @Column()
+    firstName: string;
+
+    @Column()
+    lastName: string;
+
+    @Column()
+    age: number;
+
+}
+
+
+
+

The returned object has the following structure

+
+
+
+
{
+  "importDeclarations": [
+    {
+      "module": "typeorm",
+      "named": [
+        "Entity",
+        "PrimaryGeneratedColumn",
+        "Column"
+      ],
+      "spaceBinding": true
+    }
+  ],
+  "classes": [
+    {
+      "identifier": "User",
+      "modifiers": [
+        "export"
+      ],
+      "decorators": [
+        {
+          "identifier": {
+            "name": "Entity",
+            "module": "typeorm"
+          },
+          "isCallExpression": true
+        }
+      ],
+      "properties": [
+        {
+          "identifier": "id",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "PrimaryGeneratedColumn",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "firstName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "lastName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "age",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
+
+
+
+

If we only consider the first level of the JSON response, we spot two lists of imports and classes, providing information about the only import statement and the only User class, respectively. Moving one level deeper we observe that:

+
+
+
    +
  • +

    Every import statement is translated to an import declaration entry in the declarations list, containing the module name, as well as a list of entities imported from the given module.

    +
  • +
  • +

    Every class entry provides besides the class identifier, its decoration(s), modifier(s), as well as a list of properties that the original class contains.

    +
  • +
+
+
+

Note that, for each given type, the module from which it is imported is also given as in

+
+
+
+
  "identifier": {
+    "name": "Column",
+    "module": "typeorm"
+  }
+
+
+
+

Returning to the general case, independently from the given TypeScript file, an object having the following Structure will be created.

+
+
+
    +
  • +

    importDeclarations: A list of import statement as described above

    +
  • +
  • +

    exportDeclarations: A list of export declarations

    +
  • +
  • +

    classes: A list of classes extracted from the given file, where each entry is full of class specific fields, describing its properties and decorator for example.

    +
  • +
  • +

    interfaces: A list of interfaces.

    +
  • +
  • +

    variables: A list of variables.

    +
  • +
  • +

    functions: A list of functions.

    +
  • +
  • +

    enums: A list of enumerations.

    +
  • +
+
+ +
+

==HTML Plug-in

+
+
+

The HTML Plug-in enables merging result HTML files to existing ones. This plug-in is used at the moment for generate an Angular2 client. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+
+
+
Trigger Extensions
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
Merger extensions
+
+

There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy html-ng* (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy html-ng*_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

The merging of two Angular2 files will be processed as follows:

+
+
+

The merge algorithm handles the following AST nodes:

+
+
+
    +
  • +

    md-nav-list

    +
  • +
  • +

    a

    +
  • +
  • +

    form

    +
  • +
  • +

    md-input-container

    +
  • +
  • +

    input

    +
  • +
  • +

    name (for name attribute)

    +
  • +
  • +

    ngIf

    +
  • +
+
+
+ + + + + +
+ + +Be aware, that the HTML merger is not generic and only handles the described tags needed for merging code of a basic Angular client implementation. For future versions, it is planned to implement a more generic solution. +
+
+
+
+
+
+

Maven Build Integration

+
+ +
+

==Maven Build Integration

+
+
+

For maven integration of CobiGen you can include the following build plugin into your build:

+
+
+
Build integration of CobiGen
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        <execution>
+          <id>cobigen-generate</id>
+          <phase>generate-resources</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

Available goals

+
+
+
    +
  • +

    generate: Generates contents configured by the standard non-compiled configuration folder. Thus generation can be controlled/configured due to an location URI of the configuration and template or increment ids to be generated for a set of inputs.

    +
  • +
+
+
+

Available phases are all phases, which already provide compiled sources such that CobiGen can perform reflection on it. Thus possible phases are for example package, site.

+
+
+

Provide Template Set

+
+

For generation using the CobiGen maven plug-in, the CobiGen configuration can be provided in two different styles:

+
+
+
    +
  1. +

    By a configurationFolder, which should be available on the file system whenever you are running the generation. The value of configurationFolder should correspond to the maven file path syntax.

    +
    +
    Provide CobiGen configuration by configuration folder (file)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <configuration>
    +        <configurationFolder>cobigen-templates</configurationFolder>
    +      </configuration>
    +       ...
    +     </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
  2. +
  3. +

    By maven dependency, whereas the maven dependency should stick on the same conventions as the configuration folder. This explicitly means that it should contain non-compiled resources as well as the context.xml on top-level.

    +
    +
    Provide CobiGen configuration by maven dependency (jar)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <dependencies>
    +        <dependency>
    +          <groupId>com.devonfw.cobigen</groupId>
    +          <artifactId>templates-XYZ</artifactId>
    +          <version>VERSION-YOU-LIKE</version>
    +        </dependency>
    +      </dependencies>
    +      ...
    +    </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
    +

    We currently provide a generic deployed version of the templates on the devonfw-nexus for Register Factory (<artifactId>cobigen-templates-rf</artifactId>) and for the devonfw itself (<artifactId>cobigen-templates-devonfw</artifactId>).

    +
    +
  4. +
+
+
+
+

Build Configuration

+
+

Using the following configuration you will be able to customize your generation as follows:

+
+
+
    +
  • +

    <destinationRoot> specifies the root directory the relative destinationPath of CobiGen templates configuration should depend on. Default ${basedir}

    +
  • +
  • +

    <inputPackage> declares a package name to be used as input for batch generation. This refers directly to the CobiGen Java Plug-in container matchers of type package configuration.

    +
  • +
  • +

    <inputFile> declares a file to be used as input. The CobiGen maven plug-in will try to parse this file to get an appropriate input to be interpreted by any CobiGen plug-in.

    +
  • +
  • +

    <increment> specifies an increment ID to be generated. You can specify one single increment with content ALL to generate all increments matching the input(s).

    +
  • +
  • +

    <template> specifies a template ID to be generated. You can specify one single template with content ALL to generate all templates matching the input(s).

    +
  • +
  • +

    <forceOverride> specifies an overriding behavior, which enables non-mergeable resources to be completely rewritten by generated contents. For mergeable resources this flag indicates, that conflicting fragments during merge will be replaced by generated content. Default: false

    +
  • +
  • +

    <failOnNothingGenerated> specifies whether the build should fail if the execution does not generate anything.

    +
  • +
+
+
+
Example for a simple build configuration
+
+
<build>
+  <plugins>
+    <plugin>
+       ...
+      <configuration>
+        <destinationRoot>${basedir}</destinationRoot>
+        <inputPackages>
+          <inputPackage>package.to.be.used.as.input</inputPackage>
+        </inputPackages>
+        <inputFiles>
+          <inputFile>path/to/file/to/be/used/as/input</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>IncrementID</increment>
+        </increments>
+        <templates>
+          <template>TemplateID</template>
+        </templates>
+        <forceOverride>false</forceOverride>
+      </configuration>
+        ...
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+

Plugin Injection Since v3

+
+

Since version 3.0.0, the plug-in mechanism has changed to support modular releases of the CobiGen plug-ins. Therefore, you need to add all plug-ins to be used for generation. Take the following example to get the idea:

+
+
+
Example of a full configuration including plugins
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        ...
+      </executions>
+      <configuration>
+        ...
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen<groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>1.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+

A full example

+
+
    +
  1. +

    A complete maven configuration example

    +
  2. +
+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>6.0.0</version>
+      <executions>
+        <execution>
+          <id>generate</id>
+          <phase>package</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <inputFiles>
+          <inputFile>src/main/java/io/github/devonfw/cobigen/generator/dataaccess/api/InputEntity.java</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>dataaccess_infrastructure</increment>
+          <increment>daos</increment>
+        </increments>
+        <failOnNothingGenerated>false</failOnNothingGenerated>
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+
+

Eclipse Integration

+
+ +
+

==Installation

+
+
+
+
+

Remark: CobiGen is preinstalled in the devonfw/devon-ide.

+
+
+
+
+

Preconditions

+
+
    +
  • +

    Eclipse 4.x

    +
  • +
  • +

    Java 7 Runtime (for starting eclipse with CobiGen). This is independent from the target version of your developed code.

    +
  • +
+
+
+
+

Installation steps

+
+
    +
  1. +

    Open the eclipse installation dialog
    +menu bar → HelpInstall new Software…​

    +
    +

    01 install new software

    +
    +
  2. +
  3. +

    Open CobiGen’s update site
    +Insert the update site of your interest into the filed Work with and press Add …​
    +Unless you know what you are doing we recommend you install every plugin as shown in the picture below.

    +
    + +
    +
  4. +
  5. +

    Follow the installation wizard
    +Select CobiGen Eclipse Plug-inNextNext → accept the license → FinishOKYes

    +
  6. +
  7. +

    Once installed, a new menu entry named "CobiGen" will show up in the Package Explorer’s context menu. In the sub menu there will the Generate…​ command, which may ask you to update the templates, and then you can start the generation wizard of CobiGen. You can adapt the templates by clicking on Adapt Templates which will give you the possibility to import the CobiGen_Templates automatically so that you can modified them.

    +
  8. +
  9. +

    Checkout (clone) your project’s templates folder or use the current templates released with CobiGen (https://github.com/devonfw/cobigen/tree/master/cobigen-templates) and then choose Import -> General -> Existing Projects into Workspace to import the templates into your workspace.

    +
  10. +
  11. +

    Now you can start generating. To get an introduction of CobiGen try the devon4j templates and work on the devon4j sample application. There you might want to start with Entity objects as a selection to run CobiGen with, which will give you a good overview of what CobiGen can be used for right out of the box in devon4j based development. If you need some more introduction in how to come up with your templates and increments, please be referred to the documentation of the context configuration and the templates configuration

    +
  12. +
+
+
+

Dependent on your context configuration menu entry Generate…​ may be gray out or not. See for more information about valid selections for generation.

+
+
+
+

Updating

+
+

In general updating CobiGen for eclipse is done via the update mechanism of eclipse directly, as shown on image below:

+
+
+

03 update software

+
+
+

Upgrading eclipse CobiGen plug-in to v3.0.0 needs some more attention of the user due to a changed plug-in architecture of CobiGen’s core module and the eclipse integration. Eventually, we were able to provide any plug-in of CobiGen separately as its own eclipse bundle (fragment), which is automatically discovered by the main CobiGen Eclipse plug-in after installation.

+
+ +
+

==Usage

+
+
+

CobiGen has two different generation modes depending on the input selected for generation. The first one is the simple mode, which will be started if the input contains only one input artifact, e.g. for Java an input artifact currently is a Java file. The second one is the batch mode, which will be started if the input contains multiple input artifacts, e.g. for Java this means a list of files. In general this means also that the batch mode might be started when selecting complex models as inputs, which contain multiple input artifacts. The latter scenario has only been covered in the research group,yet.

+
+
+
+

Simple Mode

+
+

Selecting the menu entry Generate…​ the generation wizard will be opened:

+
+
+

generate wizard page1

+
+
+

The left side of the wizard shows all available increments, which can be selected to be generated. Increments are a container like concept encompassing multiple files to be generated, which should result in a semantically closed generation output. +On the right side of the wizard all files are shown, which might be effected by the generation - dependent on the increment selection of files on the left side. The type of modification of each file will be encoded into following color scheme if the files are selected for generation:

+
+
+
    +
  • +

    green: files, which are currently non-existent in the file system. These files will be created during generation

    +
  • +
  • +

    yellow: files, which are currently existent in the file system and which are configured to be merged with generated contents.

    +
  • +
  • +

    red: files, which are currently existent in the file system. These files will be overwritten if manually selected.

    +
  • +
  • +

    no color: files, which are currently existent in the file system. Additionally files, which were deselected and thus will be ignored during generation.

    +
  • +
+
+
+

Selecting an increment on the left side will initialize the selection of all shown files to be generated on the right side, whereas green and yellow categorized files will be selected initially. A manual modification of the pre-selection can be performed by switching to the customization tree using the Customize button on the right lower corner.

+
+
+
+
+

Optional: If you want to customize the generation object model of a Java input class, you might continue with the Next > button instead of finishing the generation wizard. The next generation wizard page is currently available for Java file inputs and lists all non-static fields of the input. deselecting entries will lead to an adapted object model for generation, such that deselected fields will be removed in the object model for generation. By default all fields will be included in the object model.

+
+
+
+
+

Using the Finish button, the generation will be performed. Finally, CobiGen runs the eclipse internal organize imports and format source code for all generated sources and modified sources. Thus it is possible, that---especially organize imports opens a dialog if some types could not be determined automatically. This dialog can be easily closed by pressing on Continue. If the generation is finished, the Success! dialog will pop up.

+
+
+
+

Batch mode

+
+

If there are multiple input elements selected, e.g., Java files, CobiGen will be started in batch mode. For the generation wizard dialog this means, that the generation preview will be constrained to the first selected input element. It does not preview the generation for each element of the selection or of a complex input. The selection of the files to be generated will be generated for each input element analogously afterwards.

+
+
+

generate wizard page1 batch

+
+
+

Thus the color encoding differs also a little bit:

+
+
+
    +
  • +

    yellow: files, which are configured to be merged.

    +
  • +
  • +

    red: files, which are not configured with any merge strategy and thus will be created if the file does not exist or overwritten if the file already exists

    +
  • +
  • +

    no color: files, which will be ignored during generation

    +
  • +
+
+
+

Initially all possible files to be generated will be selected.

+
+
+
+

Health Check

+
+

To check whether CobiGen runs appropriately for the selected element(s) the user can perform a Health Check by activating the respective menu entry as shown below.

+
+
+

health check menu entry

+
+
+

The simple Health Check includes 3 checks. As long as any of these steps fails, the Generate menu entry is grayed out.

+
+
+

The first step is to check whether the generation configuration is available at all. If this check fails you will see the following message:

+
+
+

health check no templates

+
+
+

This indicates, that there is no Project named CobiGen_Templates available in the current workspace. To run CobiGen appropriately, it is necessary to have a configuration project named CobiGen_Templates imported into your workspace. For more information see chapter Eclipse Installation.

+
+
+

The second step is to check whether the template project includes a valid context.xml. If this check fails, you will see the following message:

+
+
+

health check invalid config

+
+
+

This means that either your context.xml

+
+
+
    +
  • +

    does not exist (or has another name)

    +
  • +
  • +

    or it is not valid one in any released version of CobiGen

    +
  • +
  • +

    or there is simply no automatic routine of upgrading your context configuration to a valid state.

    +
  • +
+
+
+

If all this is not the case, such as, there is a context.xml, which can be successfully read by CobiGen, you might get the following information:

+
+
+

health check old context

+
+
+

This means that your context.xml is available with the correct name but it is outdated (belongs to an older CobiGen version). In this case just click on Upgrade Context Configuration to get the latest version.

+
+
+
+
+

Remark: This will create a backup of your current context configuration and converts your old configuration to the new format. The upgrade will remove all comments from the file, which could be retrieved later on again from the backup. +If the creation of the backup fails, you will be asked to continue or to abort.

+
+
+
+
+

The third step checks whether there are templates for the selected element(s). If this check fails, you will see the following message:

+
+
+

health check no matching triggers

+
+
+

This indicates, that there no trigger has been activated, which matches the current selection. The reason might be that your selection is faulty or that you imported the wrong template project (e.g. you are working on a devon4j project, but imported the Templates for the Register Factory). If you are a template developer, have a look at the trigger configuration and at the corresponding available plug-in implementations of triggers, like e.g., Java Plug-in or XML Plug-in.

+
+
+

If all the checks are passed you see the following message:

+
+
+

health check all OK

+
+
+

In this case everything is OK and the Generate button is not grayed out anymore so that you are able to trigger it and see the [simple-mode].

+
+
+

In addition to the basic check of the context configuration, you also have the opportunity to perform an Advanced Health Check, which will check all available templates configurations (templates.xml) of path-depth=1 from the configuration project root according to their compatibility.

+
+
+

health check advanced up to date

+
+
+

Analogous to the upgrade of the context configuration, the Advanced Health Check will also provide upgrade functionality for templates configurations if available.

+
+
+
+

Update Templates

+
+

Update Template: Select Entity file and right click then select CobiGen Update Templates after that click on download then download successfully message will be come .

+
+
+
+

Adapt Templates

+
+

Adapt Template: Select any file and right click, then select `cobigen → Adapt Templates `.If CobiGen templates jar is not available then it downloads them automatically. If CobiGen templates is already present then it will override existing template in workspace and click on OK then imported template successfully message will be come.

+
+
+

Finally, please change the Java version of the project to 1.8 so that you don’t have any compilation errors.

+
+ +
+

==Logging

+
+
+

If you have any problem with the CobiGen eclipse plug-in, you might want to enable logging to provide more information for further problem analysis. This can be done easily by adding the logback.xml to the root of the CobiGen_templates configuration folder. The file should contain at least the following contents, whereas you should specify an absolute path to the target log file (at the TODO). If you are using the (cobigen-templates project, you might have the contents already specified but partially commented.

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<!-- This file is for logback classic. The file contains the configuration for sl4j logging -->
+<configuration>
+    <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+        <file><!-- TODO choose your log file location --></file>
+        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+            <Pattern>%n%date %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
+            </Pattern>
+        </encoder>
+    </appender>
+    <root level="DEBUG">
+        <appender-ref ref="FILE" />
+    </root>
+</configuration>
+
+
+
+
+
+
+

Template Development

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/Guide-to-the-Reader.html b/docs/devonfw.github.io/1.0/cobigen.wiki/Guide-to-the-Reader.html new file mode 100644 index 00000000..f44f02ff --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/Guide-to-the-Reader.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Guide to the Reader

+
+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If this is your first contact with CobiGen, you will be interested in the general purpose of CobiGen, in the licensing of CobiGen, as well as in the Shared Service provided for CobiGen. Additionally, there are some general use cases, which are currently implemented and maintained to be used out of the box.

    +
  • +
  • +

    As a user of the CobiGen Eclipse integration, you should focus on the Installation and Usage chapters to get a good introduction about how to use CobiGen in eclipse.

    +
  • +
  • +

    As a user of the Maven integration, you should focus on the Maven configuration chapter, which guides you through the integration of CobiGen into your build configuration.

    +
  • +
  • +

    If you like to adapt the configuration of CobiGen, you have to step deeper into the configuration guide as well as into the plug-in configuration extensions for the Java Plug-in, XML-Plugin, Java Property Plug-in, as well as for the Text-Merger Plug-in.

    +
  • +
  • +

    Finally, if you want to develop your own templates, you will be thankful for helpful links in addition to the plug-ins documentation as referenced in the previous point.

    +
  • +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/Home.html b/docs/devonfw.github.io/1.0/cobigen.wiki/Home.html new file mode 100644 index 00000000..954376c2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/Home.html @@ -0,0 +1,397 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==CobiGen - Code-based incremental Generator

+
+
+

Overview

+
+
+

CobiGen is a generic incremental generator for end to end code generation tasks, mostly used in Java projects. +Due to a template-based approach, CobiGen generates any set of text-based documents and document fragments.

+
+
+

Input (currently):

+
+
+
    +
  • +

    Java classes

    +
  • +
  • +

    XML-based files

    +
  • +
  • +

    OpenAPI documents

    +
  • +
  • +

    Possibly more inputs like WSDL, which is currently not implemented.

    +
  • +
+
+
+

Output:

+
+
+
    +
  • +

    any text-based document or document fragments specified by templates

    +
  • +
+
+
+
+
+

Architecture

+
+
+

CobiGen is build as an extensible framework for incremental code generation. It provides extension points for new input readers which allow reading new input types and converting them to an internally processed model. The model is used to process templates of different kinds to generate patches. The template processing will be done by different template engines. There is an extension point for template engines to support multiple ones as well. Finally, the patch will be structurally merged into potentially already existing code. To allow structural merge on different programming languages, the extension point for structural mergers has been introduced. Here you will see an overview of the currently available extension points and plug-ins:

+
+
+
+
+

Features and Characteristics

+
+
+
    +
  • +

    Generate fresh files across all the layers of a application - ready to run.

    +
  • +
  • +

    Add on to existing files merging code into it. E.g. generate new methods into existing java classes or adding nodes to an XML file. Merging of contents into existing files will be done using structural merge mechanisms.

    +
  • +
  • +

    Structural merge mechanisms are currently implemented for Java, XML, Java Property Syntax, JSON, Basic HTML, Text Append, TypeScript.

    +
  • +
  • +

    Conflicts can be resolved individually but automatically by former configuration for each template.

    +
  • +
  • +

    CobiGen provides an Eclipse integration as well as a Maven Integration.

    +
  • +
  • +

    CobiGen comes with an extensive documentation for users and developers.

    +
  • +
  • +

    Templates can be fully tailored to project needs - this is considered as a simple task.

    +
  • +
+
+
+
+
+

Selection of current and past CobiGen applications

+
+
+

General applications:

+
+
+
    +
  • +

    Generation of a Java CRUD application based on devonfw architecture including all software-layers on the server plus code for JS-clients (Angular). You can find details here.

    +
  • +
  • +

    Generation of a Java CRUD application according to the Register Factory architecture. Persistence entities are the input for generation.

    +
  • +
  • +

    Generation of builder classes for generating test data for JUnit-Tests. Input are the persistence entities.

    +
  • +
  • +

    Generation of a EXT JS 6 client with full CRUD operations connected a devon4j server.

    +
  • +
  • +

    Generation of a Angular 6 client with full CRUD operations connected a devon4j server.

    +
  • +
+
+
+

Project-specific applications in the past:

+
+
+
    +
  • +

    Generation of an additional Java type hierarchy on top of existing Java classes in combination with additional methods to be integrated in the modified classes. Hibernate entities were considered as input as well as output of the generation. The rational in this case, was to generate an additional business object hierarchy on top of an existing data model for efficient business processing.

    +
  • +
  • +

    Generation of hash- and equals-methods as well as copy constructors depending on the field types of the input Java class. Furthermore, CobiGen is able to re-generate these methods/constructors triggered by the user, i.e, when fields have been changed.

    +
  • +
  • +

    Extraction of JavaDoc of test classes and their methods for generating a csv test documentation. This test documentation has been further processed manually in Excel to provide a good overview about the currently available tests in the software system, which enables further human analysis.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-core_configuration.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-core_configuration.html new file mode 100644 index 00000000..b041129e --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-core_configuration.html @@ -0,0 +1,905 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Configuration

+
+
+

CobiGen is maintaining a home directory further referenced in this documentation as $cghome, which is used to maintain temporary or transient data. The home folder is determined with the following location fall-back:

+
+
+
    +
  1. +

    System environment variable COBIGEN_HOME (e.g. C:\project\ide\conf\cobigen-home)

    +
  2. +
  3. +

    .cobigen directory in OS user home (e.g. ~/.cobigen)

    +
  4. +
+
+
+

The actual configuration of CobiGen is maintained by a single folder or jar. The location can be configured with respect to the implemented configuration fall-back mechanism. CobiGen will search for the location of the configuration in the following order:

+
+
+
    +
  1. +

    A configuration jar or directory, which is passed to CobiGen by the Maven or Eclipse integration or any other program using the CobiGen programming interface: +1.1. the Maven integration allows to configure a jar dependency to be included in the currently running classpath (of interest for maven configuration +1.2. the Eclipse integration allows to specify a CobiGen_Templates project in the eclipse workspace

    +
  2. +
  3. +

    The file $cghome/.cobigen exists and the property templates is set to a valid configuration (e.g. templates=C:\project\ide\conf\templates or templates=C:\project\ide\conf\templates.jar) Hint: Check for log entry like Value of property templates in $cghome/.cobigen is invalid to identify an invalid configuration which is not taken up as expected

    +
  4. +
  5. +

    The folder $cghome/templates/CobiGen_Templates exists

    +
  6. +
  7. +

    The lexicographical sorted first configuration jar of the following path pattern $cghome/templates/templates-([^-]+)-(\\d+\\.?)+.jar if exists (e.g. templates-devon4j-2020.04.001)

    +
  8. +
  9. +

    CobiGen will automatically download the latest jar configuration from maven central with groupId com.devonfw.cobigen and artifactId templates-devon4j and take it like described in 4.

    +
  10. +
+
+
+

Within the configuration jar or directory you will find the following structure:

+
+
+
+
CobiGen_Templates
+ |- templateFolder1
+    |- templates.xml
+ |- templateFolder2
+    |- templates.xml
+ |- context.xml
+
+
+
+

Find some examples here.

+
+
+

Context Configuration

+
+
+

The context configuration (context.xml) always has the following root structure:

+
+
+
Context Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        ...
+    </triggers>
+</contextConfiguration>
+
+
+
+

The context configuration has a version attribute, which should match the XSD version the context configuration is an instance of. It should not state the version of the currently released version of CobiGen. This attribute should be maintained by the context configuration developers. If configured correctly, it will provide a better feedback for the user and thus higher user experience. Currently there is only the version v1.0. For further version there will be a changelog later on.

+
+
+

Trigger Node

+
+

As children of the <triggers> node you can define different triggers. By defining a <trigger> you declare a mapping between special inputs and a templateFolder, which contains all templates, which are worth to be generated with the given input.

+
+
+
trigger configuration
+
+
<trigger id="..." type="..." templateFolder="..." inputCharset="UTF-8" >
+    ...
+</trigger>
+
+
+
+
    +
  • +

    The attribute id should be unique within an context configuration. It is necessary for efficient internal processing.

    +
  • +
  • +

    The attribute type declares a specific trigger interpreter, which might be provided by additional plug-ins. A trigger interpreter has to provide an input reader, which reads specific inputs and creates a template object model out of it to be processed by the FreeMarker template engine later on. Have a look at the plug-in’s documentation of your interest and see, which trigger types and thus inputs are currently supported.

    +
  • +
  • +

    The attribute templateFolder declares the relative path to the template folder, which will be used if the trigger gets activated.

    +
  • +
  • +

    The attribute inputCharset (optional) determines the charset to be used for reading any input file.

    +
  • +
+
+
+
+

Matcher Node

+
+

A trigger will be activated if its matchers hold the following formula:

+
+
+

!(NOT || …​ || NOT) && AND && …​ && AND && (OR || …​ || OR)

+
+
+

Whereas NOT/AND/OR describes the accumulationType of a matcher (see below) and e.g. NOT means 'a matcher with accumulationType NOT matches a given input'. Thus additionally to an input reader, a trigger interpreter has to define at least one set of matchers, which are satisfiable, to be fully functional. A <matcher> node declares a specific characteristics a valid input should have.

+
+
+
Matcher Configuration
+
+
<matcher type="..." value="..." accumulationType="...">
+    ...
+</matcher>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute accumulationType (optional) specifies how the matcher will influence the trigger activation. Valid values are:

    +
    +
      +
    • +

      OR (default): if any matcher of accumulation type OR matches, the trigger will be activated as long as there are no further matchers with different accumulation types

      +
    • +
    • +

      AND: if any matcher with AND accumulation type does not match, the trigger will not be activated

      +
    • +
    • +

      NOT: if any matcher with NOT accumulation type matches, the trigger will not be activated

      +
    • +
    +
    +
  • +
+
+
+
+

Variable Assignment Node

+
+

Finally, a <matcher> node can have multiple <variableAssignment> nodes as children. Variable assignments allow to parametrize the generation by additional values, which will be added to the object model for template processing. The variables declared using variable assignments, will be made accessible in the templates.xml as well in the object model for template processing via the namespace variables.*.

+
+
+
Complete Configuration Pattern
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="...">
+            <matcher type="..." value="...">
+                <variableAssignment type="..." key="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares the type of variable assignment to be processed by the trigger interpreter providing plug-in. This attribute enables variable assignments with different dynamic value resolutions.

    +
  • +
  • +

    The attribute key declares the namespace under which the resolved value will be accessible later on.

    +
  • +
  • +

    The attribute value might declare a constant value to be assigned or any hint for value resolution done by the trigger interpreter providing plug-in. For instance, if type is regex, then on value you will assign the matched group number by the regex (1, 2, 3…​)

    +
  • +
+
+
+
+

Container Matcher Node

+
+

The <containerMatcher> node is an additional matcher for matching containers of multiple input objects. +Such a container might be a package, which encloses multiple types or---more generic---a model, which encloses multiple elements. A container matcher can be declared side by side with other matchers:

+
+
+
ContainerMatcher Declaration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="..." >
+            <containerMatcher type="..." value="..." retrieveObjectsRecursively="..." />
+            <matcher type="..." value="...">
+                <variableAssignment type="..." variable="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute retrieveObjectsRecursively (optional boolean) states, whether the children of the input should be retrieved recursively to find matching inputs for generation.

    +
  • +
+
+
+

The semantics of a container matchers are the following:

+
+
+
    +
  • +

    A <containerMatcher> does not declare any <variableAssignment> nodes

    +
  • +
  • +

    A <containerMatcher> matches an input if and only if one of its enclosed elements satisfies a set of <matcher> nodes of the same <trigger>

    +
  • +
  • +

    Inputs, which match a <containerMatcher> will cause a generation for each enclosed element

    +
  • +
+
+
+
+
+
+

Templates Configuration

+
+
+

The template configuration (templates.xml) specifies, which templates exist and under which circumstances it will be generated. There are two possible configuration styles:

+
+
+
    +
  1. +

    Configure the template meta-data for each template file by template nodes

    +
  2. +
  3. +

    (since cobigen-core-v1.2.0): Configure templateScan nodes to automatically retrieve a default configuration for all files within a configured folder and possibly modify the automatically configured templates using templateExtension nodes

    +
  4. +
+
+
+

To get an intuition of the idea, the following will initially describe the first (more extensive) configuration style. Such an configuration root structure looks as follows:

+
+
+
Extensive Templates Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.0" templateEngine="FreeMarker">
+    <templates>
+            ...
+    </templates>
+    <increments>
+            ...
+    </increments>
+</templatesConfiguration>
+
+
+
+

The root node <templatesConfiguration> specifies two attributes. The attribute version provides further usability support and will be handled analogous to the version attribute of the context configuration. The optional attribute templateEngine specifies the template engine to be used for processing the templates (since `cobigen-core-4.0.0`). By default it is set to FreeMarker.

+
+
+

The node <templatesConfiguration> allows two different grouping nodes as children. First, there is the <templates> node, which groups all declarations of templates. Second, there is the <increments> node, which groups all declarations about increments.

+
+
+

Template Node

+
+

The <templates> node groups multiple <template> declarations, which enables further generation. Each template file should be registered at least once as a template to be considered.

+
+
+
Example Template Configuration
+
+
<templates>
+    <template name="..." destinationPath="..." templateFile="..." mergeStrategy="..." targetCharset="..." />
+    ...
+</templates>
+
+
+
+

A template declaration consist of multiple information:

+
+
+
    +
  • +

    The attribute name specifies an unique ID within the templates configuration, which will later be reused in the increment definitions.

    +
  • +
  • +

    The attribute destinationPath specifies the destination path the template will be generated to. It is possible to use all variables defined by variable assignments within the path declaration using the FreeMarker syntax ${variables.*}. While resolving the variable expressions, each dot within the value will be automatically replaced by a slash. This behavior is accounted for by the transformations of Java packages to paths as CobiGen has first been developed in the context of the Java world. Furthermore, the destination path variable resolution provides the following additional built-in operators analogue to the FreeMarker syntax:

    +
    +
      +
    • +

      ?cap_first analogue to FreeMarker

      +
    • +
    • +

      ?uncap_first analogue to FreeMarker

      +
    • +
    • +

      ?lower_case analogue to FreeMarker

      +
    • +
    • +

      ?upper_case analogue to FreeMarker

      +
    • +
    • +

      ?replace(regex, replacement) - Replaces all occurrences of the regular expression regex in the variable’s value with the given replacement string. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removeSuffix(suffix) - Removes the given suffix in the variable’s value iff the variable’s value ends with the given suffix. Otherwise nothing will happen. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removePrefix(prefix) - Analogue to ?removeSuffix but removes the prefix of the variable’s value. (since cobigen-core v1.1.0)

      +
    • +
    +
    +
  • +
  • +

    The attribute templateFile describes the relative path dependent on the template folder specified in the trigger to the template file to be generated.

    +
  • +
  • +

    The attribute mergeStrategy (optional) can be optionally specified and declares the type of merge mechanism to be used, when the destinationPath points to an already existing file. CobiGen by itself just comes with a mergeStrategy override, which enforces file regeneration in total. Additional available merge strategies have to be obtained from the different plug-in’s documentations (see here for java, XML, properties, and text). Default: not set (means not mergeable)

    +
  • +
  • +

    The attribute targetCharset (optional) can be optionally specified and declares the encoding with which the contents will be written into the destination file. This also includes reading an existing file at the destination path for merging its contents with the newly generated ones. Default: UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external template (templates defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+

Template Scan Node

+
+

(since cobigen-core-v1.2.0)

+
+
+

The second configuration style for template meta-data is driven by initially scanning all available templates and automatically configure them with a default set of meta-data. A scanning configuration might look like this:

+
+
+
Example of Template-scan configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.2">
+    <templateScans>
+        <templateScan templatePath="templates" templateNamePrefix="prefix_" destinationPath="src/main/java"/>
+    </templateScans>
+</templatesConfiguration>
+
+
+
+

You can specify multiple <templateScan …​> nodes for different templatePaths and different templateNamePrefixes.

+
+
+
    +
  • +

    The name can be specified to later on reference the templates found by a template-scan within an increment. (since cobigen-core-v2.1.)

    +
  • +
  • +

    The templatePath specifies the relative path from the templates.xml to the root folder from which the template scan should be performed.

    +
  • +
  • +

    The templateNamePrefix (optional) defines a common id prefix, which will be added to all found and automatically configured templates.

    +
  • +
  • +

    The destinationPath defines the root folder all found templates should be generated to, whereas the root folder will be a prefix for all found and automatically configured templates.

    +
  • +
+
+
+

A templateScan will result in the following default configuration of templates. For each file found, new template will be created virtually with the following default values:

+
+
+
    +
  • +

    id: file name without .ftl extension prefixed by templateNamePrefix from template-scan

    +
  • +
  • +

    destinationPath: relative file path of the file found with the prefix defined by destinationPath from template-scan. Furthermore,

    +
    +
      +
    • +

      it is possible to use the syntax for accessing and modifying variables as described for the attribute destinationPath of the template node, besides the only difference, that due to file system restrictions you have to replace all ?-signs (for built-ins) with #-signs.

      +
    • +
    • +

      the files to be scanned, should provide their final file extension by the following file naming convention: <filename>.<extension>.ftl Thus the file extension .ftl will be removed after generation.

      +
    • +
    +
    +
  • +
  • +

    templateFile: relative path to the file found

    +
  • +
  • +

    mergeStrategy: (optional) not set means not mergeable

    +
  • +
  • +

    targetCharset: (optional) defaults to UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external templateScan (templateScans defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+

Template Extension Node

+
+

(since cobigen-core-v1.2.0)

+
+
+

Additionally to the templateScan declaration it is easily possible to rewrite specific attributes for any scanned and automatically configured template.

+
+
+
Example Configuration of a TemplateExtension
+
+
<templates>
+    <templateExtension ref="prefix_FooClass.java" mergeStrategy="javamerge" />
+</templates>
+
+<templateScans>
+    <templateScan templatePath="foo" templateNamePrefix="prefix_" destinationPath="src/main/java/foo"/>
+</templateScans>
+
+
+
+

Lets assume, that the above example declares a template-scan for the folder foo, which contains a file FooClass.java.ftl in any folder depth. Thus the template scan will automatically create a virtual template declaration with id=prefix_FooClass.java and further default configuration.

+
+
+

Using the templateExtension declaration above will reference the scanned template by the attribute ref and overrides the mergeStrategy of the automatically configured template by the value javamerge. Thus we are able to minimize the needed templates configuration.

+
+
+

(Since version 4.1.0) It is possible to reference external templateExtension (templateExtensions defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+

Increment Node

+
+

The <increments> node groups multiple <increment> nodes, which can be seen as a collection of templates to be generated. An increment will be defined by a unique id and a human readable description.

+
+
+
+
<increments>
+    <increment id="..." description="...">
+        <incrementRef ref="..." />
+        <templateRef ref="..." />
+        <templateScanRef ref="..." />
+    </increment>
+</increments>
+
+
+
+

An increment might contain multiple increments and/or templates, which will be referenced using <incrementRef …​>, <templateRef …​>, resp. <templateScanRef …​> nodes. These nodes only declare the attribute ref, which will reference an increment, a template, or a template-scan by its id or name.

+
+
+

(Since version 4.1.0) An special case of <incrementRef …​> is the external incrementsRef. By default, <incrementRef …​> are used to reference increments defined in the same templates.xml file. So for example, we could have:

+
+
+
+
<increments>
+    <increment id="incA" description="...">
+        <incrementRef ref="incB" />
+    </increment>
+    <increment id="incB" description="...">
+        <templateRef .... />
+        <templateScan .... />
+    </increment>
+</increments>
+
+
+
+

However, if we want to reference an increment that it is not defined inside our templates.xml (an increment defined for another trigger), then we can use external incrementRef as shown below:

+
+
+
+
<increment name="..." description="...">
+    <incrementRef ref="trigger_id::increment_id"/>
+</increment>
+
+
+
+

The ref string is split using as delimiter ::. The first part of the string, is the trigger_id to reference. That trigger contains an increment_id. Currently, this functionality only works when both templates use the same kind of input file.

+
+
+
+
+
+

Java Template Logic

+
+
+

since cobigen-core-3.0.0 which is included in the Eclipse and Maven Plugin since version 2.0.0 +In addition, it is possible to implement more complex template logic by custom Java code. To enable this feature, you can simply import the the CobiGen_Templates by clicking on Adapt Templates, turn it into a simple maven project (if it is not already) and implement any Java logic in the common maven layout (e.g. in the source folder src/main/java). Each Java class will be instantiated by CobiGen for each generation process. Thus, you can even store any state within a Java class instance during generation. However, there is currently no guarantee according to the template processing order.

+
+
+

As a consequence, you have to implement your Java classes with a public default (non-parameter) constructor to be used by any template. Methods of the implemented Java classes can be called within templates by the simple standard FreeMarker expression for calling Bean methods: SimpleType.methodName(param1). Until now, CobiGen will shadow multiple types with the same simple name non-deterministically. So please prevent yourself from that situation.

+
+
+

Finally, if you would like to do some reflection within your Java code accessing any type of the template project or any type referenced by the input, you should load classes by making use of the classloader of the util classes. CobiGen will take care of the correct classloader building including the classpath of the input source as well as of the classpath of the template project. If you use any other classloader or build it by your own, there will be no guarantee, that generation succeeds.

+
+
+
+
+

Template Properties

+
+
+

since cobigen-core-4.0.0` +Using a configuration with `template scan, you can make use of properties in templates specified in property files named cobigen.properties next to the templates. The property files are specified as Java property files. Property files can be nested in sub-folders. Properties will be resolved including property shading. Properties defined nearest to the template to be generated will take precedence. +In addition, a cobigen.properties file can be specified in the target folder root (in eclipse plugin, this is equal to the source project root). These properties take precedence over template properties specified in the template folder.

+
+
+ + + + + +
+ + +It is not allowed to override context variables in cobigen.properties specifications as we have not found any interesting use case. This is most probably an error of the template designer, CobiGen will raise an error in this case. +
+
+
+

Multi module support or template target path redirects

+
+

since cobigen-core-4.0.0` +One special property you can specify in the template properties is the property `relocate. It will cause the current folder and its sub-folders to be relocated at destination path resolution time. Take the following example:

+
+
+
+
folder
+  - sub1
+    Template.java.ftl
+    cobigen.properties
+
+
+
+

Let the cobigen.properties file contain the line relocate=../sub2/${cwd}. Given that, the relative destination path of Template.java.ftl will be resolved to folder/sub2/Template.java. Compare template scan configuration for more information about basic path resolution. The relocate property specifies a relative path from the location of the cobigen.properties. The ${cwd} placeholder will contain the remaining relative path from the cobigen.properties location to the template file. In this basic example it just contains Template.java.ftl, but it may even be any relative path including sub-folders of sub1 and its templates. +Given the relocate feature, you can even step out of the root path, which in general is the project/maven module the input is located in. This enables template designers to even address, e.g., maven modules located next to the module the input is coming from.

+
+
+
+
+
+

Basic Template Model

+
+
+

In addition to what is served by the different model builders of the different plug-ins, CobiGen provides a minimal model based on context variables as well as CobiGen properties. The following model is independent of the input format and will be served as a template model all the time:

+
+
+ +
+
+
+
+

Plugin Mechanism

+
+
+

Since cobigen-core 4.1.0, we changed the plug-in discovery mechanism. So far it was necessary to register new plugins programmatically, which introduces the need to let every tool integration, i.e. for eclipse or maven, be dependent on every plug-in, which should be released. This made release cycles take long time as all plug-ins have to be integrated into a final release of maven or eclipse integration.

+
+
+

Now, plug-ins are automatically discovered by the Java Service Loader mechanism from the classpath. This also effects the setup of eclipse and maven integration to allow modular releases of CobiGen in future. We are now able to provide faster rollouts of bug-fixes in any of the plug-ins as they can be released completely independently.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-core_development.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-core_development.html new file mode 100644 index 00000000..1237c8a9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-core_development.html @@ -0,0 +1,784 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==CobiGen Core Development

+
+
+

CobiGen uses Apache FreeMarker as engine for generation through FreeMarker templates.

+
+
+ + + + + +
+ + +
+ +
+
+
+
+

The core implementation are divided in three projects:

+
+
+
    +
  • +

    cobigen-core-api: Mainly composed by interfaces that will be called from the Eclipse plug-in.

    +
  • +
  • +

    cobigen-core: The implementation of the interfaces are within.

    +
  • +
  • +

    cobigen-core-test: As the name suggests, used for test purposes.

    +
  • +
+
+
+

Extension Mechanism

+
+
+

The extension package from the API project contains the interfaces to be implemented if necessary by the sub plugins:

+
+
+
    +
  • +

    `GeneratorPluginActivator.java`

    +
  • +
  • +

    `InputReader.java`

    +
  • +
  • +

    `MatcherInterpreter.java`

    +
  • +
  • +

    Merger.java

    +
  • +
  • +

    `TriggerInterpreter.java`

    +
  • +
  • +

    `ModelBuilder.java`

    +
  • +
+
+
+

The ModelBuilder is an interface for accessing the internal model builder instance. Is implemented by `ModelBuilder.java` from the model package from the implementation project that provides the methods to call the createModel() from the correspondent input reader from the correspondent trigger interpreter to create the object models for a given object.

+
+
+

The to package have the transfer objects of template, matcher, increment and variable assignment classes that will be used as "communication channel" between the core and sub plug-ins methods

+
+
+
+
+

Plugin Registry

+
+
+

The core must load all the sub plugins to get their Merger, Matcher, TriggerInterpreter and InputReader. That elements must implement their respective interfaces from the core.

+
+
+

Diagram 1

+
+
+

Is important to note that not all the sub plug-ins need to have implemented a Matcher and/or an InputReader (advanced information here)

+
+
+

Load Plugin

+
+

The process of loading plugins to the core is done at the eclipse-plugin initialization.

+
+
+

Each sub plugin has an activator class that extends the `GeneratorPluginActivator` interface from the extension package. That class implements the methods bindMerger() and bindTriggerInterpreter().

+
+
+

Diagram 2

+
+
+

This is the class passed as argument to the loadPlugin() method of `PluginRegister.java` of the pluginmanager package.

+
+
+

This method registers the mergers and the trigger interpreter of the sub plugins to the core. +The trigger interpreter has the correspondent input reader of the plugin.

+
+
+ + + + + +
+ + + +
+
+
+
+
+
+

CobiGen Initialization

+
+
+

The CobiGen initialization must initialize the context configuration and the FreeMarker configuration

+
+
+

FreeMarker Initialization

+
+

When a CobiGen object is instantiated, the constructor initializes the FreeMarker configuration creating a configuration instance from the class freemarker.template.Configuration and adjust its settings.

+
+
+
+
freeMarkerConfig = new Configuration(Configuration.VERSION_2_3_23);
+freeMarkerConfig.setObjectWrapper(new DefaultObjectWrapperBuilder(Configuration.VERSION_2_3_23).build());
+freeMarkerConfig.clearEncodingMap();
+freeMarkerConfig.setDefaultEncoding("UTF-8");
+freeMarkerConfig.setLocalizedLookup(false);
+freeMarkerConfig.setTemplateLoader(new NioFileSystemTemplateLoader(`configFolder`));
+
+
+
+

Using the FileSystemUtil from the util package the URI of the root folder containing the context.xml and all templates, configurations etc…​ is converted to a Path object passing it as argument to the ContextConfiguration constructor. +The ContextConfiguration creates a new ContextConfiguration from the config package with the contents initially loaded from the context.xml

+
+
+ + + + + +
+ + +
+

How the ContextConfiguration works explained deeply here.

+
+
+
+
+

The Configuration initialization requires the version of FreeMarker to be used and at the ObjectWrapper initialization as well. +The DefaultObjectWrapperBuilder creates an DefaultObjectWrapper object that maps Java objects to the type-system of FreeMarker Template Language (FTL) with the given incompatibleImprovements specified by the version used as argument.

+
+
+

The configuration of FreeMarker requires to specify to a `TemplateLoader`. A `TemplateLoader` is an interface provided by FreeMarker library that the developer should implement to fit the needs. The `TemplateLoader` implementation at CobiGen is the class `NioFileSystemTemplateLoader.java` from the config.nio package.

+
+
+

Diagram 5

+
+
+
+

Context Configuration

+
+

The context configuration reads the context.xml file from the template project (default: CobiGen_Templates) passing the path as argument to the constructor. At the constructor, it is created an instance of ContextConfigurationReader.java from the config.reader package.

+
+
+ + + + + +
+ + +
+

Please, check the CobiGen configuration for extended information about the context.xml and templates.xml configuration.

+
+
+
+
+

That reader uses the JAXB, JAXB (Java Architecture for XML Binding) provides a fast and convenient way to bind XML schemas and Java representations, making it easy for Java developers to incorporate XML data and processing functions in Java applications. As part of this process, JAXB provides methods for unmarshalling (reading) XML instance documents into Java content trees.

+
+
+

Java Architecture for XML Binding

+
+

<<<<<<< HEAD +JAXB auto generates the Java object within the JAXBContext specified at the xmlns attribute of the contextConfiguration field from the context.xml file

+
+
+
+
Unmarshaller unmarshaller = JAXBContext.newInstance(ContextConfiguration.class).createUnmarshaller();
+
+
+
+

That auto-generation follows the contextConfiguration.xsd schema. Each Java object follows the template specified with the field <xs:CompleType> from the schema file.

+
+
+
+
<xs:complexType name="trigger">
+    <xs:sequence>
+         <xs:element name="containerMatcher" type="tns:containerMatcher" minOccurs="0" maxOccurs="unbounded"/>
+         <xs:element name="matcher" type="tns:matcher" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="id" use="required" type="xs:NCName"/>
+    <xs:attribute name="type" use="required" type="xs:string"/>
+    <xs:attribute name="templateFolder" use="required" type="xs:string"/>
+    <xs:attribute name="inputCharset" use="optional" type="xs:string" default="UTF-8"/>
+ </xs:complexType>
+ <xs:complexType name="matcher">
+    <xs:sequence>
+        <xs:element name="variableAssignment" type="tns:variableAssignment" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="type" type="xs:string" use="required"/>
+    <xs:attribute name="value" type="xs:string" use="required"/>
+    <xs:attribute name="accumulationType" type="tns:accumulationType" use="optional" default="OR"/>
+  </xs:complexType>
+
+
+
+

<code>JAXB</code>

+
+
+

The generated Java objects has the elements and attributes specified at the schema:

+
+
+
+
@XmlAccessorType(XmlAccessType.FIELD)
+@XmlType(name = "trigger", namespace = "http://capgemini.com/devonfw/cobigen/ContextConfiguration", propOrder = {
+    "containerMatcher",
+    "matcher"
+})
+public class Trigger {
+    @XmlElement(namespace = "http://capgemini.com/devonfw/cobigen/ContextConfiguration")
+    protected List<ContainerMatcher> containerMatcher;
+    @XmlElement(namespace = "http://capgemini.com/devonfw/cobigen/ContextConfiguration")
+    protected List<Matcher> matcher;
+    @XmlAttribute(name = "id", required = true)
+    @XmlJavaTypeAdapter(CollapsedStringAdapter.class)
+    @XmlSchemaType(name = "NCName")
+    protected String id;
+    @XmlAttribute(name = "type", required = true)
+    protected String type;
+    @XmlAttribute(name = "templateFolder", required = true)
+    protected String templateFolder;
+    @XmlAttribute(name = "inputCharset")
+    protected String inputCharset;
+    ...
+    ..
+    .
+}
+
+
+
+

This process it is done when calling the unmarshal() method.

+
+
+
+
Object rootNode = unmarshaller.unmarshal(Files.newInputStream(contextFile));
+
+
+
+ + + + + +
+ + +
+

For extended information about JAXB check the offical documentation.

+
+
+
+
+
+

Version Validation

+
+

If the version retrieved after the unmarshal process is null, an InvalidConfigurationException defined at exceptions package will be thrown.

+
+
+

If it is not null, will be compared using the validate() method from `VersionValidator.java` from config.versioning package with the project version retrieved by the `MavenMetadata.java`. The `MavenMetadata.java` file is provided by the POM while building the JAR file

+
+
+
+
<build>
+    <plugins>
+      <!-- Inject Maven Properties in java-templates source folder -->
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>templating-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>generate-version-class</id>
+            <goals>
+              <goal>filter-sources</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      ...
+      ..
+      .
+    </plugins>
+</build>
+
+
+
+

MavenMetadata gets the current CobiGen version by reading the <version> label inside the <project> label from the POM file

+
+
+
+
public class MavenMetadata {
+    /** Maven version */
+    public static final String VERSION = "${project.version}";
+}
+
+
+
+
+
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>cobigen-core</artifactId>
+  <name>CobiGen</name>
+  <version>2.2.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+  ...
+  ..
+  .
+}
+
+
+
+

The comparison has three possibilities:

+
+
+
    +
  1. +

    Versions are equal → Valid +<<<<<<< HEAD

    +
  2. +
  3. +

    context.xml version is greater than current CobiGen version → InvalidConfigurationException

    +
  4. +
  5. +

    Current CobiGen version is greater that context.xml version → Compatible if there not exists a version step (breaking change) in between, otherwise, throw an error.

    +
  6. +
+
+
+

Reaching this point, the configuration version and root node has been validated. Unmarshal with schema checks for checking the correctness and give the user more hints to correct his failures.

+
+
+
+
SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
+ContextConfigurationVersion latestConfigurationVersion = ContextConfigurationVersion.getLatest();
+try (
+    InputStream schemaStream = getClass().getResourceAsStream("/schema/" + latestConfigurationVersion
+                                                              + "/contextConfiguration.xsd");
+    InputStream configInputStream = Files.newInputStream(contextFile)) {
+    Schema schema = schemaFactory.newSchema(new StreamSource(schemaStream));
+    unmarshaller.setSchema(schema);
+    rootNode = unmarshaller.unmarshal(configInputStream);
+    contextNode = (ContextConfiguration) rootNode;
+}
+
+
+
+
+

Load Triggers, Matchers, container Matcher, Accumulation Types and Variable Assignments

+
+

To finish the context configuration initialization, the, trigger, matchers, container matchers, accumulation types and variables assignments are retrieved from the correspondent Java objects generated by JAXB.

+
+
+
+
public Map<String, Trigger> loadTriggers()
+private List<Matcher> loadMatchers(Trigger trigger)
+private List<ContainerMatcher> loadContainerMatchers(Trigger trigger)
+private List<VariableAssignment> loadVariableAssignments(Matcher matcher)
+
+
+
+
+
+
+
+

Perform Generation

+
+
+

Depending on the input, the generation process can begin from two different generate() methods called at the CobiGenWrapper from the eclipse-plugin:

+
+
+
+
public void generate(TemplateTo template, boolean forceOverride) throws IOException, TemplateException, MergeException {
+    if (singleNonContainerInput) {
+        Map<String, Object> model = cobiGen.getModelBuilder(inputs.get(0), template.getTriggerId()).createModel();
+        adaptModel(model);
+        cobiGen.generate(inputs.get(0), template, model, forceOverride);
+    } else {
+        for (Object input : inputs) {
+            cobiGen.generate(input, template, forceOverride);
+        }
+    }
+}
+
+
+
+

Single Non Container Input

+
+

If the input is a single non container input, first step is to create the model, then allow customization by the user (adaptModel()) and finally call the generate() method from CobiGen using the input, template, model and the boolean forceOverride.

+
+
+

The generation process in this case will follow this main steps:

+
+
+
    +
  1. +

    Check if the input is not null

    +
  2. +
  3. +

    Get the trigger interpreter for the type of the trigger of the template

    +
  4. +
  5. +

    Set the root folder for the templates to use for the generation

    +
  6. +
  7. +

    Get the input reader for the trigger interpreter retrieved

    +
  8. +
  9. +

    Test if the input is a package.
    +This only can be possible in the case of java inputs. As the input is a single non container input, this check will fail and the execution will continue.

    +
  10. +
  11. +

    Check if the model parameter is null and if it is, create a new model
    +As the model has been created at the CobiGenWrapper, there is no need to create it again.

    +
  12. +
  13. +

    Get the destination file.

    +
  14. +
  15. +

    Check if the destination file already exists
    +If it exists, but the forceOverride is set to true or the merge strategy of the template is null, the file will be overwritten, not merged. Otherwise, first generate output into a writer object, get the merger and merge the original file with the writer and write the file with the merge result.

    +
  16. +
  17. +

    If the file does not exist, simple write the file.

    +
  18. +
+
+
+
+

Single Container Input or multiple files selection

+
+

The other case is, or the input is multiple files selection, the generation process will be performed for each individual file of the selection, but the model will be created at the step 6 of the steps of the Single Non Container Input and not allowing the user customization.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-documentation.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-documentation.html new file mode 100644 index 00000000..e0246687 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-documentation.html @@ -0,0 +1,292 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Wiki documentation — conventions & hints +== Conventions +* Stick to the devonfw-docgen conventions to make the generation of the PDF document from the wiki work properly. +* The source code of CobiGen should be documented completely and consistent using JavaDoc. Please check JavaDoc as well after changing any logic. +* Further documentation of more abstract and informative issues for users, template developers and CobiGen developers should be done using the GitHub Wiki + * All GitHub Wiki pages should be edited in adoc mode to ensure the PDF documentation generation possibility

+
+
+

Hints

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-eclipse_installation.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-eclipse_installation.html new file mode 100644 index 00000000..dd5d06b2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-eclipse_installation.html @@ -0,0 +1,358 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Installation

+
+
+
+
+

Remark: CobiGen is preinstalled in the devonfw/devon-ide.

+
+
+
+
+

Preconditions

+
+
+
    +
  • +

    Eclipse 4.x

    +
  • +
  • +

    Java 7 Runtime (for starting eclipse with CobiGen). This is independent from the target version of your developed code.

    +
  • +
+
+
+
+
+

Installation steps

+
+
+
    +
  1. +

    Open the eclipse installation dialog
    +menu bar → HelpInstall new Software…​

    +
    +

    01 install new software

    +
    +
  2. +
  3. +

    Open CobiGen’s update site
    +Insert the update site of your interest into the filed Work with and press Add …​
    +Unless you know what you are doing we recommend you install every plugin as shown in the picture below.

    +
    + +
    +
  4. +
  5. +

    Follow the installation wizard
    +Select CobiGen Eclipse Plug-inNextNext → accept the license → FinishOKYes

    +
  6. +
  7. +

    Once installed, a new menu entry named "CobiGen" will show up in the Package Explorer’s context menu. In the sub menu there will the Generate…​ command, which may ask you to update the templates, and then you can start the generation wizard of CobiGen. You can adapt the templates by clicking on Adapt Templates which will give you the possibility to import the CobiGen_Templates automatically so that you can modified them.

    +
  8. +
  9. +

    Checkout (clone) your project’s templates folder or use the current templates released with CobiGen (https://github.com/devonfw/cobigen/tree/master/cobigen-templates) and then choose Import -> General -> Existing Projects into Workspace to import the templates into your workspace.

    +
  10. +
  11. +

    Now you can start generating. To get an introduction of CobiGen try the devon4j templates and work on the devon4j sample application. There you might want to start with Entity objects as a selection to run CobiGen with, which will give you a good overview of what CobiGen can be used for right out of the box in devon4j based development. If you need some more introduction in how to come up with your templates and increments, please be referred to the documentation of the context configuration and the templates configuration

    +
  12. +
+
+
+

Dependent on your context configuration menu entry Generate…​ may be gray out or not. See for more information about valid selections for generation.

+
+
+
+
+

Updating

+
+
+

In general updating CobiGen for eclipse is done via the update mechanism of eclipse directly, as shown on image below:

+
+
+

03 update software

+
+
+

Upgrading eclipse CobiGen plug-in to v3.0.0 needs some more attention of the user due to a changed plug-in architecture of CobiGen’s core module and the eclipse integration. Eventually, we were able to provide any plug-in of CobiGen separately as its own eclipse bundle (fragment), which is automatically discovered by the main CobiGen Eclipse plug-in after installation.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-eclipse_logging.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-eclipse_logging.html new file mode 100644 index 00000000..0e198a4f --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-eclipse_logging.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Logging

+
+
+

If you have any problem with the CobiGen eclipse plug-in, you might want to enable logging to provide more information for further problem analysis. This can be done easily by adding the logback.xml to the root of the CobiGen_templates configuration folder. The file should contain at least the following contents, whereas you should specify an absolute path to the target log file (at the TODO). If you are using the (cobigen-templates project, you might have the contents already specified but partially commented.

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<!-- This file is for logback classic. The file contains the configuration for sl4j logging -->
+<configuration>
+    <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+        <file><!-- TODO choose your log file location --></file>
+        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+            <Pattern>%n%date %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
+            </Pattern>
+        </encoder>
+    </appender>
+    <root level="DEBUG">
+        <appender-ref ref="FILE" />
+    </root>
+</configuration>
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-eclipse_usage.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-eclipse_usage.html new file mode 100644 index 00000000..03f839a1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-eclipse_usage.html @@ -0,0 +1,464 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Usage

+
+
+

CobiGen has two different generation modes depending on the input selected for generation. The first one is the simple mode, which will be started if the input contains only one input artifact, e.g. for Java an input artifact currently is a Java file. The second one is the batch mode, which will be started if the input contains multiple input artifacts, e.g. for Java this means a list of files. In general this means also that the batch mode might be started when selecting complex models as inputs, which contain multiple input artifacts. The latter scenario has only been covered in the research group,yet.

+
+
+

Simple Mode

+
+
+

Selecting the menu entry Generate…​ the generation wizard will be opened:

+
+
+

generate wizard page1

+
+
+

The left side of the wizard shows all available increments, which can be selected to be generated. Increments are a container like concept encompassing multiple files to be generated, which should result in a semantically closed generation output. +On the right side of the wizard all files are shown, which might be effected by the generation - dependent on the increment selection of files on the left side. The type of modification of each file will be encoded into following color scheme if the files are selected for generation:

+
+
+
    +
  • +

    green: files, which are currently non-existent in the file system. These files will be created during generation

    +
  • +
  • +

    yellow: files, which are currently existent in the file system and which are configured to be merged with generated contents.

    +
  • +
  • +

    red: files, which are currently existent in the file system. These files will be overwritten if manually selected.

    +
  • +
  • +

    no color: files, which are currently existent in the file system. Additionally files, which were deselected and thus will be ignored during generation.

    +
  • +
+
+
+

Selecting an increment on the left side will initialize the selection of all shown files to be generated on the right side, whereas green and yellow categorized files will be selected initially. A manual modification of the pre-selection can be performed by switching to the customization tree using the Customize button on the right lower corner.

+
+
+
+
+

Optional: If you want to customize the generation object model of a Java input class, you might continue with the Next > button instead of finishing the generation wizard. The next generation wizard page is currently available for Java file inputs and lists all non-static fields of the input. deselecting entries will lead to an adapted object model for generation, such that deselected fields will be removed in the object model for generation. By default all fields will be included in the object model.

+
+
+
+
+

Using the Finish button, the generation will be performed. Finally, CobiGen runs the eclipse internal organize imports and format source code for all generated sources and modified sources. Thus it is possible, that---especially organize imports opens a dialog if some types could not be determined automatically. This dialog can be easily closed by pressing on Continue. If the generation is finished, the Success! dialog will pop up.

+
+
+
+
+

Batch mode

+
+
+

If there are multiple input elements selected, e.g., Java files, CobiGen will be started in batch mode. For the generation wizard dialog this means, that the generation preview will be constrained to the first selected input element. It does not preview the generation for each element of the selection or of a complex input. The selection of the files to be generated will be generated for each input element analogously afterwards.

+
+
+

generate wizard page1 batch

+
+
+

Thus the color encoding differs also a little bit:

+
+
+
    +
  • +

    yellow: files, which are configured to be merged.

    +
  • +
  • +

    red: files, which are not configured with any merge strategy and thus will be created if the file does not exist or overwritten if the file already exists

    +
  • +
  • +

    no color: files, which will be ignored during generation

    +
  • +
+
+
+

Initially all possible files to be generated will be selected.

+
+
+
+
+

Health Check

+
+
+

To check whether CobiGen runs appropriately for the selected element(s) the user can perform a Health Check by activating the respective menu entry as shown below.

+
+
+

health check menu entry

+
+
+

The simple Health Check includes 3 checks. As long as any of these steps fails, the Generate menu entry is grayed out.

+
+
+

The first step is to check whether the generation configuration is available at all. If this check fails you will see the following message:

+
+
+

health check no templates

+
+
+

This indicates, that there is no Project named CobiGen_Templates available in the current workspace. To run CobiGen appropriately, it is necessary to have a configuration project named CobiGen_Templates imported into your workspace. For more information see chapter Eclipse Installation.

+
+
+

The second step is to check whether the template project includes a valid context.xml. If this check fails, you will see the following message:

+
+
+

health check invalid config

+
+
+

This means that either your context.xml

+
+
+
    +
  • +

    does not exist (or has another name)

    +
  • +
  • +

    or it is not valid one in any released version of CobiGen

    +
  • +
  • +

    or there is simply no automatic routine of upgrading your context configuration to a valid state.

    +
  • +
+
+
+

If all this is not the case, such as, there is a context.xml, which can be successfully read by CobiGen, you might get the following information:

+
+
+

health check old context

+
+
+

This means that your context.xml is available with the correct name but it is outdated (belongs to an older CobiGen version). In this case just click on Upgrade Context Configuration to get the latest version.

+
+
+
+
+

Remark: This will create a backup of your current context configuration and converts your old configuration to the new format. The upgrade will remove all comments from the file, which could be retrieved later on again from the backup. +If the creation of the backup fails, you will be asked to continue or to abort.

+
+
+
+
+

The third step checks whether there are templates for the selected element(s). If this check fails, you will see the following message:

+
+
+

health check no matching triggers

+
+
+

This indicates, that there no trigger has been activated, which matches the current selection. The reason might be that your selection is faulty or that you imported the wrong template project (e.g. you are working on a devon4j project, but imported the Templates for the Register Factory). If you are a template developer, have a look at the trigger configuration and at the corresponding available plug-in implementations of triggers, like e.g., Java Plug-in or XML Plug-in.

+
+
+

If all the checks are passed you see the following message:

+
+
+

health check all OK

+
+
+

In this case everything is OK and the Generate button is not grayed out anymore so that you are able to trigger it and see the [simple-mode].

+
+
+

In addition to the basic check of the context configuration, you also have the opportunity to perform an Advanced Health Check, which will check all available templates configurations (templates.xml) of path-depth=1 from the configuration project root according to their compatibility.

+
+
+

health check advanced up to date

+
+
+

Analogous to the upgrade of the context configuration, the Advanced Health Check will also provide upgrade functionality for templates configurations if available.

+
+
+
+
+

Update Templates

+
+
+

Update Template: Select Entity file and right click then select CobiGen Update Templates after that click on download then download successfully message will be come .

+
+
+
+
+

Adapt Templates

+
+
+

Adapt Template: Select any file and right click, then select `cobigen → Adapt Templates `.If CobiGen templates jar is not available then it downloads them automatically. If CobiGen templates is already present then it will override existing template in workspace and click on OK then imported template successfully message will be come.

+
+
+

Finally, please change the Java version of the project to 1.8 so that you don’t have any compilation errors.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-htmlplugin.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-htmlplugin.html new file mode 100644 index 00000000..b22585e8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-htmlplugin.html @@ -0,0 +1,348 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==HTML Plug-in

+
+
+

The HTML Plug-in enables merging result HTML files to existing ones. This plug-in is used at the moment for generate an Angular2 client. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+

Trigger Extensions

+
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
+

Merger extensions

+
+
+

There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy html-ng* (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy html-ng*_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

The merging of two Angular2 files will be processed as follows:

+
+
+

The merge algorithm handles the following AST nodes:

+
+
+
    +
  • +

    md-nav-list

    +
  • +
  • +

    a

    +
  • +
  • +

    form

    +
  • +
  • +

    md-input-container

    +
  • +
  • +

    input

    +
  • +
  • +

    name (for name attribute)

    +
  • +
  • +

    ngIf

    +
  • +
+
+
+ + + + + +
+ + +Be aware, that the HTML merger is not generic and only handles the described tags needed for merging code of a basic Angular client implementation. For future versions, it is planned to implement a more generic solution. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-javaplugin.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-javaplugin.html new file mode 100644 index 00000000..ce6657c2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-javaplugin.html @@ -0,0 +1,702 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Java Plug-in +The CobiGen Java Plug-in comes with a new input reader for java artifacts, new java related trigger and matchers, as well as a merging mechanism for Java sources.

+
+
+

Trigger extension

+
+
+

The Java Plug-in provides a new trigger for Java related inputs. It accepts different representations as inputs (see Java input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'java'

    +
    +
    Example of a java trigger definition
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables Java elements as inputs.

    +
    +
  • +
+
+
+

Matcher types

+
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type fqn → full qualified name matching

    +
    +
    Example of a java trigger definition with a full qualified name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the full qualified name (fqn) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'package' → package name of the input

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="package" value="(.+)\.persistence\.([^\.]+)\.entity">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the package name (package) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'expression'

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="expression" value="instanceof java.lang.String">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the expression evaluates to true. Valid expressions are

    +
    +
  • +
  • +

    instanceof fqn: checks an 'is a' relation of the input type

    +
  • +
  • +

    isAbstract: checks, whether the input type is declared abstract

    +
  • +
+
+
+
+

Container Matcher types

+
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'package'

    +
    +
    Example of a java trigger definition with a container matcher for packages
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <containerMatcher type="package" value="com\.example\.app\.component1\.persistence.entity" />
    +</trigger>
    +
    +
    +
    +

    The container matcher matches packages provided by the type com.capgemini.cobigen.javaplugin.inputreader.to.PackageFolder with a regular expression stated in the value attribute. (See containerMatcher semantics to get more information about containerMatchers itself.)

    +
    +
  • +
+
+
+
+

Variable Assignment types

+
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The Java Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'regex' → regular expression group

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="regex" key="rootPackage" value="1" />
    +        <variableAssignment type="regex" key="component" value="2" />
    +        <variableAssignment type="regex" key="pojoName" value="3" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key.

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+

Java input reader

+
+

The CobiGen Java Plug-in implements an input reader for parsed java sources as well as for java Class<?> objects (loaded by reflection). So API user can pass Class<?> objects as well as JavaClass objects for generation. The latter depends on QDox, which will be used for parsing and merging java sources. For getting the right parsed java inputs you can easily use the JavaParserUtil, which provides static functionality to parse java files and get the appropriate JavaClass object.

+
+
+

Furthermore, due to restrictions on both inputs according to model building (see below), it is also possible to provide an array of length two as an input, which contains the Class<?> as well as the JavaClass object of the same class.

+
+
+

Template object model

+
+

No matter whether you use reflection objects or parsed java classes as input, you will get the following object model for template creation:

+
+
+
    +
  • +

    classObject ('Class' :: Class object of the Java input)

    +
  • +
  • +

    POJO

    +
    +
      +
    • +

      name ('String' :: Simple name of the input class)

      +
    • +
    • +

      package ('String' :: Package name of the input class)

      +
    • +
    • +

      canonicalName ('String' :: Full qualified name of the input class)

      +
    • +
    • +

      annotations ('Map<String, Object>' :: Annotations, which will be represented by a mapping of the full qualified type of an annotation to its value. To gain template compatibility, the key will be stored with '_' instead of '.' in the full qualified annotation type. Furthermore, the annotation might be recursively defined and thus be accessed using the same type of mapping. Example ${pojo.annotations.javax_persistence_Id})

      +
    • +
    • +

      JavaDoc ('Map<String, Object>') :: A generic way of addressing all available JavaDoc doclets and comments. The only fixed variable is comment (see below). All other provided variables depend on the doclets found while parsing. The value of a doclet can be accessed by the doclets name (e.g. ${…​JavaDoc.author}). In case of doclet tags that can be declared multiple times (currently @param and @throws), you will get a map, which you access in a specific way (see below).

      +
      +
        +
      • +

        comment ('String' :: JavaDoc comment, which does not include any doclets)

        +
      • +
      • +

        params ('Map<String,String> :: JavaDoc parameter info. If the comment follows proper conventions, the key will be the name of the parameter and the value being its description. You can also access the parameters by their number, as in arg0, arg1 etc, following the order of declaration in the signature, not in order of JavaDoc)

        +
      • +
      • +

        throws ('Map<String,String> :: JavaDoc exception info. If the comment follows proper conventions, the key will be the name of the thrown exception and the value being its description)

        +
      • +
      +
      +
    • +
    • +

      extendedType ('Map<String, Object>' :: The supertype, represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        name ('String' :: Simple name of the supertype)

        +
      • +
      • +

        canonicalName ('String' :: Full qualified name of the supertype)

        +
      • +
      • +

        package ('String' :: Package name of the supertype)

        +
      • +
      +
      +
    • +
    • +

      implementedTypes ('List<Map<String, Object>>' :: A list of all implementedTypes (interfaces) represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        interface ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Simple name of the interface)

          +
        • +
        • +

          canonicalName ('String' :: Full qualified name of the interface)

          +
        • +
        • +

          package ('String' :: Package name of the interface)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      fields ('List<Map<String, Object>>' :: List of fields of the input class) (renamed since cobigen-javaplugin v1.2.0; previously attributes)

      +
      +
        +
      • +

        field ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the Java field)

          +
        • +
        • +

          type ('String' :: Type of the Java field)

          +
        • +
        • +

          canonicalType ('String' :: Full qualified type declaration of the Java field’s type)

          +
        • +
        • +

          'isId' (Deprecated :: boolean :: true if the Java field or its setter or its getter is annotated with the javax.persistence.Id annotation, false otherwise. Equivalent to ${pojo.attributes[i].annotations.javax_persistence_Id?has_content})

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations with the remark, that for fields all annotations of its setter and getter will also be collected)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      methodAccessibleFields ('List<Map<String, Object>>' :: List of fields of the input class or its inherited classes, which are accessible using setter and getter methods)

      +
      +
        +
      • +

        same as for field (but without JavaDoc!)

        +
      • +
      +
      +
    • +
    • +

      methods ('List<Map<String, Object>>' :: The list of all methods, whereas one method will be represented by a set of property mappings)

      +
      +
        +
      • +

        method ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the method)

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

Furthermore, when providing a Class<?> object as input, the Java Plug-in will provide additional functionalities as template methods (deprecated):

+
+
+
    +
  1. +

    isAbstract(String fqn) (Checks whether the type with the given full qualified name is an abstract class. Returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  2. +
  3. +

    isSubtypeOf(String subType, String superType) (Checks whether the subType declared by its full qualified name is a sub type of the superType declared by its full qualified name. Equals the Java expression subType instanceof superType and so also returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  4. +
+
+
+
+

Model Restrictions

+
+

As stated before both inputs (Class<?> objects and JavaClass objects ) have their restrictions according to model building. In the following these restrictions are listed for both models, the ParsedJava Model which results from an JavaClass input and the ReflectedJava Model, which results from a Class<?> input.

+
+
+

It is important to understand, that these restrictions are only present if you work with either Parsed Model OR the Reflected Model. If you use the Maven Build Plug-in or Eclipse Plug-in these two models are merged together so that they can mutually compensate their weaknesses.

+
+
+
Parsed Model
+
+
    +
  • +

    annotations of the input’s supertype are not accessible due to restrictions in the QDox library. So pojo.methodAccessibleFields[i].annotations will always be empty for super type fields.

    +
  • +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Reflected Model.

    +
  • +
  • +

    fields of "supertypes" of the input JavaClass are not available at all. So pojo.methodAccessibleFields will only contain the input type’s and the direct superclass’s fields.

    +
  • +
  • +

    [resolved, since cobigen-javaplugin 1.3.1] field types of supertypes are always canonical. So pojo.methodAccessibleFields[i].type will always provide the same value as pojo.methodAccessibleFields[i].canonicalType (e.g. java.lang.String instead of the expected String) for super type fields.

    +
  • +
+
+
+
+
Reflected Model
+
+
    +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Parsed Model.

    +
  • +
  • +

    annotations are only available if the respective annotation has @Retention(value=RUNTIME), otherwise the annotations are to be discarded by the compiler or by the VM at run time. For more information see RetentionPolicy.

    +
  • +
  • +

    information about generic types is lost. E.g. a field’s/ methodAccessibleField’s type for List<String> can only be provided as List<?>.

    +
  • +
+
+
+
+
+
+
+
+

Merger extensions

+
+
+

The Java Plug-in provides two additional merging strategies for Java sources, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy javamerge (merges two Java resources and keeps the existing Java elements on conflicts)

    +
  • +
  • +

    Merge strategy javamerge_override (merges two Java resources and overrides the existing Java elements on conflicts)

    +
  • +
+
+
+

In general merging of two Java sources will be processed as follows:

+
+
+

Precondition of processing a merge of generated contents and existing ones is a common Java root class resp. surrounding class. If this is the case this class and all further inner classes will be merged recursively. Therefore, the following Java elements will be merged and conflicts will be resolved according to the configured merge strategy:

+
+
+
    +
  • +

    extends and implements relations of a class: Conflicts can only occur for the extends relation.

    +
  • +
  • +

    Annotations of a class: Conflicted if an annotation declaration already exists.

    +
  • +
  • +

    Fields of a class: Conflicted if there is already a field with the same name in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
  • +

    Methods of a class: Conflicted if there is already a method with the same signature in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-jsonplugin.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-jsonplugin.html new file mode 100644 index 00000000..9f42b4a5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-jsonplugin.html @@ -0,0 +1,336 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==JSON Plug-in +At the moment the plug-in can be used for merge generic JSON files depending on the merge strategy defined at the templates.

+
+
+

Merger extensions

+
+
+

There are currently these merge strategies:

+
+
+

Generic JSON Merge

+
+
+
    +
  • +

    merge strategy jsonmerge(add the new code respecting the existent is case of conflict)

    +
  • +
  • +

    merge strategy jsonmerge_override (add the new code overwriting the existent in case of conflict)

    +
    +
      +
    1. +

      JsonArray’s will be ignored / replaced in total

      +
    2. +
    3. +

      JsonObjects in conflict will be processed recursively ignoring adding non existent elements.

      +
    4. +
    +
    +
  • +
+
+
+
+
+

Merge Process

+
+
+

Generic JSON Merging

+
+

The merge process will be:

+
+
+
    +
  1. +

    Add non existent JSON Objects from patch file to base file.

    +
  2. +
  3. +

    For existent object in both files, will add non existent keys from patch to base object. This process will be done recursively for all existent objects.

    +
  4. +
  5. +

    For JSON Arrays existent in both files, the arrays will be just concatenated.

    +
  6. +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-maven_configuration.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-maven_configuration.html new file mode 100644 index 00000000..2d9338bb --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-maven_configuration.html @@ -0,0 +1,542 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Maven Build Integration

+
+
+

For maven integration of CobiGen you can include the following build plugin into your build:

+
+
+
Build integration of CobiGen
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        <execution>
+          <id>cobigen-generate</id>
+          <phase>generate-resources</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

Available goals

+
+
+
    +
  • +

    generate: Generates contents configured by the standard non-compiled configuration folder. Thus generation can be controlled/configured due to an location URI of the configuration and template or increment ids to be generated for a set of inputs.

    +
  • +
+
+
+

Available phases are all phases, which already provide compiled sources such that CobiGen can perform reflection on it. Thus possible phases are for example package, site.

+
+
+

Provide Template Set

+
+
+

For generation using the CobiGen maven plug-in, the CobiGen configuration can be provided in two different styles:

+
+
+
    +
  1. +

    By a configurationFolder, which should be available on the file system whenever you are running the generation. The value of configurationFolder should correspond to the maven file path syntax.

    +
    +
    Provide CobiGen configuration by configuration folder (file)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <configuration>
    +        <configurationFolder>cobigen-templates</configurationFolder>
    +      </configuration>
    +       ...
    +     </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
  2. +
  3. +

    By maven dependency, whereas the maven dependency should stick on the same conventions as the configuration folder. This explicitly means that it should contain non-compiled resources as well as the context.xml on top-level.

    +
    +
    Provide CobiGen configuration by maven dependency (jar)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <dependencies>
    +        <dependency>
    +          <groupId>com.devonfw.cobigen</groupId>
    +          <artifactId>templates-XYZ</artifactId>
    +          <version>VERSION-YOU-LIKE</version>
    +        </dependency>
    +      </dependencies>
    +      ...
    +    </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
    +

    We currently provide a generic deployed version of the templates on the devonfw-nexus for Register Factory (<artifactId>cobigen-templates-rf</artifactId>) and for the devonfw itself (<artifactId>cobigen-templates-devonfw</artifactId>).

    +
    +
  4. +
+
+
+
+
+

Build Configuration

+
+
+

Using the following configuration you will be able to customize your generation as follows:

+
+
+
    +
  • +

    <destinationRoot> specifies the root directory the relative destinationPath of CobiGen templates configuration should depend on. Default ${basedir}

    +
  • +
  • +

    <inputPackage> declares a package name to be used as input for batch generation. This refers directly to the CobiGen Java Plug-in container matchers of type package configuration.

    +
  • +
  • +

    <inputFile> declares a file to be used as input. The CobiGen maven plug-in will try to parse this file to get an appropriate input to be interpreted by any CobiGen plug-in.

    +
  • +
  • +

    <increment> specifies an increment ID to be generated. You can specify one single increment with content ALL to generate all increments matching the input(s).

    +
  • +
  • +

    <template> specifies a template ID to be generated. You can specify one single template with content ALL to generate all templates matching the input(s).

    +
  • +
  • +

    <forceOverride> specifies an overriding behavior, which enables non-mergeable resources to be completely rewritten by generated contents. For mergeable resources this flag indicates, that conflicting fragments during merge will be replaced by generated content. Default: false

    +
  • +
  • +

    <failOnNothingGenerated> specifies whether the build should fail if the execution does not generate anything.

    +
  • +
+
+
+
Example for a simple build configuration
+
+
<build>
+  <plugins>
+    <plugin>
+       ...
+      <configuration>
+        <destinationRoot>${basedir}</destinationRoot>
+        <inputPackages>
+          <inputPackage>package.to.be.used.as.input</inputPackage>
+        </inputPackages>
+        <inputFiles>
+          <inputFile>path/to/file/to/be/used/as/input</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>IncrementID</increment>
+        </increments>
+        <templates>
+          <template>TemplateID</template>
+        </templates>
+        <forceOverride>false</forceOverride>
+      </configuration>
+        ...
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+

Plugin Injection Since v3

+
+
+

Since version 3.0.0, the plug-in mechanism has changed to support modular releases of the CobiGen plug-ins. Therefore, you need to add all plug-ins to be used for generation. Take the following example to get the idea:

+
+
+
Example of a full configuration including plugins
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        ...
+      </executions>
+      <configuration>
+        ...
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen<groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>1.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+

A full example

+
+
+
    +
  1. +

    A complete maven configuration example

    +
  2. +
+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>6.0.0</version>
+      <executions>
+        <execution>
+          <id>generate</id>
+          <phase>package</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <inputFiles>
+          <inputFile>src/main/java/io/github/devonfw/cobigen/generator/dataaccess/api/InputEntity.java</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>dataaccess_infrastructure</increment>
+          <increment>daos</increment>
+        </increments>
+        <failOnNothingGenerated>false</failOnNothingGenerated>
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-openapiplugin.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-openapiplugin.html new file mode 100644 index 00000000..bb1eb78a --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-openapiplugin.html @@ -0,0 +1,1084 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==OpenAPI Plug-in

+
+
+

The OpenAPI Plug-in enables the support for Swagger files that follows the OpenAPI 3.0 standard as input for CobiGen. Until now, CobiGen was thought to follow a "code first" generation, with this plugin, now it can also follow the "contract first" strategy

+
+
+
    +
  • +

    Code First

    +
    +
      +
    • +

      Generating from a file with code (Java/XML code in our case)

      +
    • +
    +
    +
  • +
  • +

    Contract First

    +
    +
      +
    • +

      Generation from a full definition file (Swagger in this case). This file contains all the information about entities, operations, etc…​

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +If you are not a CobiGen developer, you will be more interested in usage. +
+
+
+

Trigger Extensions

+
+
+

The OpenAPI Plug-in provides a new trigger for Swagger OpenAPI 3.0 related inputs. It accepts different representations as inputs (see OpenAPI input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type openapi

    +
    +
    Example of a OpenAPI trigger definition
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables OpenAPI elements as inputs.

    +
    +
  • +
+
+
+

Matcher type

+
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type 'element' → An object

    +
  • +
+
+
+

This trigger will be enabled if the element (Java Object) of the input file is and EntityDef (value).

+
+
+
+

Container Matcher type

+
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'element'

    +
  • +
+
+
+

The container matcher matches elements as Java Objects, in this case will be always an OpenAPIFile object. (See containerMatcher semantics to get more information about containerMatchers itself.)

+
+
+
+

Variable Assignment types

+
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The OpenAPI Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +        <variableAssignment type="constant" key="rootPackage" value="com.capgemini.demo" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key. +In this case, the constant type variableAssignment is used to specify the root package where the generate will place the files generated.

+
+
+
    +
  • +

    type 'extension' → Extraction of the info extensions and the extensions of each entity. (the tags that start with "x-…​").

    +
    +
    +
      <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +      <variableAssignment type="extension" key="testingAttribute" value="x-test"/>
    +      <variableAssignment type="extension" key="rootPackage" value="x-rootpackage"/>
    +      <variableAssignment type="extension" key="globalVariable" value="x-global"/>
    +    </matcher>
    +  </trigger>
    +
    +
    +
  • +
+
+
+

The 'extension' variable assignment tries to find 'extensions' (tags that start with "x-…​") on the 'info' +part of your file and on the extensions of each entity. value is the extension that our plug-in will try to find on your OpenAPI file. The result will +be stored in the variable key.

+
+
+

As you will see on the figure below, there are two types of variables: The global ones, that are defined +on the 'info' part of the file, and the local ones, that are defined inside each entity.

+
+
+

Therefore, if you want to define the root package, then you will have to declare it on the 'info' part. +That way, all your entities will be generated under the same root package (e.g. com.devonfw.project).

+
+
+

Swagger at devon4j Project

+
+
+

If no extension with that name was found, then an empty string will be assigned. In the case of not defining the root package, then the code will be generated into src/main/java.

+
+
+
    +
  • +

    type 'property' → property of the Java Object

    +
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +        <variableAssignment type="property" key="entityName" value="name" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

The 'property' variable assignment tries to find the property value of the entities defined on the schema. +The value is assigned to the key. The current properties that you will able to get are:

+
+
+
    +
  1. +

    ComponentDef component: It is an object that stores the configuration of an devon4j component. Its only +property is List<PathDef> paths which contains the paths as the ones shown here.

    +
  2. +
  3. +

    String componentName: Stores the name of the x-component tag for this entity.

    +
  4. +
  5. +

    String name: Name of this entity (as shown on the example above).

    +
  6. +
  7. +

    String description: Description of this entity.

    +
  8. +
  9. +

    List<PropertyDef> properties: List containing all the properties of this entity. PropertyDef is an object that has the next properties:

    +
    +
      +
    1. +

      String name.

      +
    2. +
    3. +

      String type.

      +
    4. +
    5. +

      String format.

      +
    6. +
    7. +

      String description.

      +
    8. +
    9. +

      Boolean isCollection.

      +
    10. +
    11. +

      Boolean isEntity.

      +
    12. +
    13. +

      Boolean required.

      +
    14. +
    15. +

      Map<String, Object> constraints

      +
    16. +
    +
    +
  10. +
+
+
+

If no property with that name was found, then it will be set to null.

+
+
+
+

Full trigger configuration

+
+
+
<trigger id="..." type="openapi" templateFolder="...">
+    <containerMatcher type="element" value="OpenApiFile">
+    <matcher type="element" value="EntityDef">
+        <variableAssignment type="constant" key="rootPackage" value="com.capgemini.demo" />
+        <variableAssignment type="property" key="component" value="componentName" />
+        <variableAssignment type="property" key="entityName" value="name" />
+    </matcher>
+</trigger>
+
+
+
+
+
+
+

Input reader

+
+
+

The CobiGen OpenAPI Plug-in implements an input reader for OpenAPI 3.0 files. The XML input reader will create the following object model for template creation:

+
+
+
    +
  • +

    model ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      header (HeaderDef :: Definition of the header found at the top of the file)

      +
    • +
    • +

      name ('String' :: Name of the current Entity)

      +
    • +
    • +

      componentName ('String' :: name of the component the entity belongs to)

      +
    • +
    • +

      component (ComponentDef :: Full definition of the component that entity belongs to)

      +
    • +
    • +

      description ('String' :: Description of the Entity)

      +
    • +
    • +

      properties (List<PropertyDef> :: List of properties the entity has)

      +
    • +
    • +

      relationShips (List<RelationShip> :: List of Relationships the entity has)

      +
    • +
    +
    +
  • +
  • +

    HeaderDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      info (InfoDef :: Definition of the info found in the header)

      +
    • +
    • +

      servers (List<ServerDef> :: List of servers the specification uses)

      +
    • +
    +
    +
  • +
  • +

    InfoDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      title ('String' :: The title of the specification)

      +
    • +
    • +

      description ('String' :: The description of the specification)

      +
    • +
    +
    +
  • +
  • +

    ServerDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      URI ('String' :: String representation of the Server location)

      +
    • +
    • +

      description ('String' :: description of the server)

      +
    • +
    +
    +
  • +
  • +

    ComponentDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      paths (List<PathDef> :: List of services for this component)

      +
    • +
    +
    +
  • +
  • +

    PropertyDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      name ('String' :: Name of the property)

      +
    • +
    • +

      type ('String' :: type of the property)

      +
    • +
    • +

      format ('String' :: format of the property (i.e. int64))

      +
    • +
    • +

      isCollection (boolean :: true if the property is a collection, false by default)

      +
    • +
    • +

      isEntity (boolean :: true if the property refers to another entity, false by default)

      +
    • +
    • +

      sameComponent (boolean :: true if the entity that the property refers to belongs to the same component, false by default)

      +
    • +
    • +

      description ('String' :: Description of the property)

      +
    • +
    • +

      required (boolean :: true if the property is set as required)

      +
    • +
    • +

      constraints ('Map<String, Object>')

      +
    • +
    +
    +
  • +
  • +

    RelationShip ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      type ('String' :: type of the relationship (OneToOne, ManyToMany, etc…​))

      +
    • +
    • +

      entity ('String' :: destination entity name)

      +
    • +
    • +

      sameComponent (boolean :: true if the destination entity belongs to the same component of the source entity, false by default)

      +
    • +
    • +

      unidirectional (boolean :: true if the relationship is unidirectional, false by default)

      +
    • +
    +
    +
  • +
  • +

    PathDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      rootComponent ('String' :: the first segment of the path)

      +
    • +
    • +

      version ('String' :: version of the service)

      +
    • +
    • +

      pathURI ('String' :: URI of the path, the segment after the version)

      +
    • +
    • +

      operations (List<OperationDef> :: List of operations for this path)

      +
    • +
    +
    +
  • +
  • +

    OperationDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      type ('String' :: type of the operation (GET, PUT, etc…​))

      +
    • +
    • +

      parameters (List<ParameterDef> :: List of parameters)

      +
    • +
    • +

      operationId ('String' :: name of the operation prototype)

      +
    • +
    • +

      description ('String' :: JavaDoc Description of the operation)

      +
    • +
    • +

      summary (List<PropertyDef> :: JavaDoc operation Summary)

      +
    • +
    • +

      tags ('List<String>' :: List of different tags)

      +
    • +
    • +

      responses (List<ResponseDef> :: Responses of the operation)

      +
    • +
    +
    +
  • +
  • +

    ParameterDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      isSearchCriteria (boolean :: true if the response is an SearchCriteria object)

      +
    • +
    • +

      inPath (boolean :: true if this parameter is contained in the request path)

      +
    • +
    • +

      inQuery (boolean :: true if this parameter is contained in a query)

      +
    • +
    • +

      isBody (boolean :: true if this parameter is a response body)

      +
    • +
    • +

      inHeader (boolean :: true if this parameter is contained in a header)

      +
    • +
    • +

      mediaType ('String' :: String representation of the media type of the parameter)

      +
    • +
    +
    +
  • +
  • +

    ResponseDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      isArray (boolean :: true if the type of the response is an Array)

      +
    • +
    • +

      isPaginated (boolean :: true if the type of the response is paginated)

      +
    • +
    • +

      isVoid (boolean :: true if there is no type/an empty type)

      +
    • +
    • +

      isEntity (boolean :: true if the type of the response is an Entity)

      +
    • +
    • +

      entityRef (EntityDef :: Incomplete EntityDef containing the name and properties of the referenced Entity)

      +
    • +
    • +

      type ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      code ('String' :: String representation of the HTTP status code)

      +
    • +
    • +

      mediaTypes ('List<String>' :: List of media types that can be returned)

      +
    • +
    • +

      description ('String' :: Description of the response)

      +
    • +
    +
    +
  • +
+
+
+
+
+

Merger extensions

+
+
+

This plugin only provides an input reader, there is no support for OpenAPI merging. Nevertheless, the files generated from an OpenAPI file will be Java, XML, JSON, TS, etc…​ so, +for each file to be generated defined at templates.xml, must set the mergeStrategy for the specific language (javamerge, javamerge_override, jsonmerge, etc…​)

+
+
+
+
<templates>
+    ...
+    <templateExtension ref="${variables.entityName}.java" mergeStrategy="javamerge"/>
+    ...
+    <templateExtension ref="${variables.entityName}dataGrid.component.ts" mergeStrategy="tsmerge"/>
+    ...
+    <templateExtension ref="en.json" mergeStrategy="jsonmerge"/>
+</templates>
+
+
+
+
+
+

Usage

+
+
+

Writing OpenAPI 3.0 contract file

+
+

The Swagger file must follow the OpenAPI 3.0 standard to be readable by CobiGen, otherwise and error will be thrown. +A full documentation about how to follow this standard can be found Swagger3 Docs.

+
+
+

The Swagger file must be at the core folder of your devon4j project, like shown below:

+
+
+

Swagger at devon4j Project

+
+
+

To be compatible with CobiGen and devon4j, it must follow some specific configurations. This configurations allows us to avoid redundant definitions as SearchCriteria and PaginatedList objects are used at the services definitions.

+
+
+
+

Paths

+
+
    +
  • +

    Just adding the tags property at the end of the service definitions with the items `SearchCriteria` and/or paginated put into CobiGen knowledge that an standard devon4j SearchCriteria and/or PaginateListTo object must be generated. That way, the Swagger file will be easier to write and even more understandable.

    +
  • +
  • +

    The path must start with the component name, and define an x-component tag with the component name. That way this service will be included into the component services list.

    +
  • +
+
+
+
+
  /componentnamemanagement/v1/entityname/customOperation/:
+    x-component: componentnamemanagement
+    post:
+      summary: 'Summary of the operation'
+      description: Description of the operation.
+      operationId: customOperation
+      responses:
+        '200':
+          description: Description of the response.
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/EntityName'
+      requestBody:
+        $ref: '#/components/requestBodies/EntityName'
+      tags:
+        - searchCriteria
+        - paginated
+
+
+
+

That way, CobiGen will be able to generate the endpoint (REST service) customOperation on componentmanagement. If you do not specify the component to generate to (the x-component tag) then this service will not be taken into account for generation.

+
+
+
+

Service based generation

+
+

In previous CobiGen versions, we were able to generate code from a contract-first OpenAPI specification only when we defined components like the following:

+
+
+
+
components:
+    schemas:
+        Shop:
+          x-component: shopmanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            shopExample:
+              type: string
+              maxLength: 100
+              minLength: 5
+              uniqueItems: true
+
+
+
+

We could not generate services without the definition of those components.

+
+
+

In our current version, we have overcome it, so that now we are able to generate all the services independently. You just need to add an x-component tag with the name of the component that will make use of that service. See here.

+
+
+

An small OpenAPI example defining only services can be found below:

+
+
+
+
openapi: 3.0.0
+servers:
+  - url: 'https://localhost:8081/server/services/rest'
+    description: Just some data
+info:
+  title: Devon Example
+  description: Example of a API definition
+  version: 1.0.0
+  x-rootpackage: com.capgemini.spoc.openapi
+paths:
+  /salemanagement/v1/sale/{saleId}:
+    x-component: salemanagement
+    get:
+      operationId: findSale
+      parameters:
+        - name: saleId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Any
+  /salemanagement/v1/sale/{bla}:
+    x-component: salemanagement
+    get:
+      operationId: findSaleBla
+      parameters:
+        - name: bla
+          in: path
+          required: true
+          schema:
+            type: integer
+            format: int64
+            minimum: 10
+            maximum: 200
+      responses:
+        '200':
+          description: Any
+
+
+
+

Then, the increment that you need to select for generating those services is Crud devon4ng Service based Angular:

+
+
+

Service based generation

+
+
+
+

Full example

+
+

This example yaml file can be download from here.

+
+
+ + + + + +
+ + +As you will see on the file, "x-component" tags are obligatory if you want to generate components (entities). They have to be defined for each one. +In addition, you will find the global variable "x-rootpackage" that are explained <<,here>>. +
+
+
+
+
openapi: 3.0.0
+servers:
+  - url: 'https://localhost:8081/server/services/rest'
+    description: Just some data
+info:
+  title: Devon Example
+  description: Example of a API definition
+  version: 1.0.0
+  x-rootpackage: com.devonfw.angular.test
+paths:
+  /shopmanagement/v1/shop/{shopId}:
+    x-component: shopmanagement
+    get:
+      operationId: findShop
+      parameters:
+        - name: shopId
+          in: path
+          required: true
+          schema:
+            type: integer
+            format: int64
+            minimum: 0
+            maximum: 50
+      responses:
+        '200':
+          description: Any
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Shop'
+            text/plain:
+              schema:
+                type: string
+        '404':
+          description: Not found
+  /salemanagement/v1/sale/{saleId}:
+    x-component: salemanagement
+    get:
+      operationId: findSale
+      parameters:
+        - name: saleId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Any
+  /salemanagement/v1/sale/:
+    x-component: salemanagement
+    post:
+      responses:
+        '200':
+          description: Any
+      requestBody:
+        $ref: '#/components/requestBodies/SaleData'
+      tags:
+       - searchCriteria
+  /shopmanagement/v1/shop/new:
+    x-component: shopmanagement
+    post:
+      responses:
+       '200':
+          description: Any
+      requestBody:
+        $ref: '#/components/requestBodies/ShopData'
+components:
+    schemas:
+        Shop:
+          x-component: shopmanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            shopExample:
+              type: string
+              maxLength: 100
+              minLength: 5
+              uniqueItems: true
+            sales:
+              type: array # Many to One relationship
+              items:
+                $ref: '#/components/schemas/Sale'
+        Sale:
+          x-component: salemanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            saleExample:
+              type: number
+              format: int64
+              maximum: 100
+              minimum: 0
+          required:
+            - saleExample
+
+    requestBodies:
+        ShopData:
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Shop'
+          required: true
+        SaleData:
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Sale'
+          required: true
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-propertyplugin.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-propertyplugin.html new file mode 100644 index 00000000..bb11255e --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-propertyplugin.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Property Plug-in +The CobiGen Property Plug-in currently only provides different merge mechanisms for documents written in Java property syntax.

+
+
+

Merger extensions

+
+
+

There are two merge strategies for Java properties, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy propertymerge (merges two properties documents and keeps the existing properties on conflicts)

    +
  • +
  • +

    Merge strategy propertymerge_override (merges two properties documents and overrides the existing properties on conflicts)

    +
  • +
+
+
+

Both documents (base and patch) will be parsed using the Java 7 API and will be compared according their keys. Conflicts will occur if a key in the patch already exists in the base document.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-templates_helpful-links.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-templates_helpful-links.html new file mode 100644 index 00000000..a25a13b7 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-templates_helpful-links.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-textmerger.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-textmerger.html new file mode 100644 index 00000000..d4299008 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-textmerger.html @@ -0,0 +1,555 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Text Merger Plug-in +The Text Merger Plug-in enables merging result free text documents to existing free text documents. Therefore, the algorithms are also very rudimentary.

+
+
+

Merger extensions

+
+
+

There are currently three main merge strategies that apply for the whole document:

+
+
+
    +
  • +

    merge strategy textmerge_append (appends the text directly to the end of the existing document) +_Remark_: If no anchors are defined, this will simply append the patch.

    +
  • +
  • +

    merge strategy textmerge_appendWithNewLine (appends the text after adding a new line break to the existing document) +_Remark_: empty patches will not result in appending a new line any more since v1.0.1 +Remark: Only suitable if no anchors are defined, otherwise it will simply act as textmerge_append

    +
  • +
  • +

    merge strategy textmerge_override (replaces the contents of the existing file with the patch) +_Remark_: If anchors are defined, override is set as the default mergestrategy for every text block if not redefined in an anchor specification.

    +
  • +
+
+
+
+
+

Anchor functionality

+
+
+

If a template contains text that fits the definition of anchor:${documentpart}:${mergestrategy}:anchorend or more specifically the regular expression (.*)anchor:([:]+):(newline_)?([:]+)(_newline)?:anchorend\\s*(\\r\\n|\\r|\\n), some additional functionality becomes available about specific parts of the incoming text and the way it will be merged with the existing text. These anchors always change things about the text to come up until the next anchor, text before it is ignored.

+
+
+

If no anchors are defined, the complete patch will be appended depending on your choice for the template in the file templates.xml.

+
+
+

[[anchordef]]

+
+
+

Anchor Definition

+
+

Anchors should always be defined as a comment of the language the template results in, as you do not want them to appear in your readable version, but cannot define them as FreeMarker comments in the template, or the merger will not know about them. +Anchors will also be read when they are not comments due to the merger being able to merge multiple types of text-based languages, thus making it practically impossible to filter for the correct comment declaration. That is why anchors have to always be followed by line breaks. That way there is a universal way to filter anchors that should have anchor functionality and ones that should appear in the text. +Remark: If the resulting language has closing tags for comments, they have to appear in the next line. +Remark: If you do not put the anchor into a new line, all the text that appears before it will be added to the anchor.

+
+
+
+

Document parts

+
+

In general, ${documentpart} is an id to mark a part of the document, that way the merger knows what parts of the text to merge with which parts of the patch (e.g. if the existing text contains anchor:table:${}:anchorend that part will be merged with the part tagged anchor:table:${}:anchorend of the patch).

+
+
+

If the same documentpart is defined multiple times, it can lead to errors, so instead of defining table multiple times, use table1, table2, table3 etc.

+
+
+

If a ${documentpart} is defined in the document but not in the patch and they are in the same position, it is processed in the following way: If only the documentparts header, test and footer are defined in the document in that order, and the patch contains header, order and footer, the resulting order will be header, test, order then footer.

+
+
+

The following documentparts have default functionality:

+
+
+
    +
  1. +

    anchor:header:${mergestrategy}:anchorend marks the beginning of a header, that will be added once when the document is created, but not again. +Remark: This is only done once, if you have header in another anchor, it will be ignored

    +
  2. +
  3. +

    anchor:footer:${mergestrategy}:anchorend marks the beginning of a footer, that will be added once when the document is created, but not again. Once this is invoked, all following text will be included in the footer, including other anchors.

    +
  4. +
+
+
+

[[mergestrategies]]

+
+
+
+

Mergestrategies

+
+

Mergestrategies are only relevant in the patch, as the merger is only interested in how text in the patch should be managed, not how it was managed in the past.

+
+
+
    +
  1. +

    anchor:${documentpart}::anchorend will use the merge strategy from templates.xml, see Merger-Extensions.

    +
  2. +
  3. +

    anchor:${}:${mergestrategy}_newline:anchorend or anchor:${}:newline_${mergestrategy}:anchorend states that a new line should be appended before or after this anchors text, depending on where the newline is (before or after the mergestrategy). anchor:${documentpart}:newline:anchorend puts a new line after the anchors text. +Remark: Only works with appending strategies, not merging/replacing ones. These strategies currently include: appendbefore, append/appendafter

    +
  4. +
  5. +

    anchor:${documentpart}:override:anchorend means that the new text of this documentpart will replace the existing one completely

    +
  6. +
  7. +

    anchor:${documentpart}:appendbefore:anchorend or anchor:${documentpart}:appendafter:anchorend/anchor:${documentpart}:append:anchorend specifies whether the text of the patch should come before the existing text or after.

    +
  8. +
+
+
+
+
+
+

Usage Examples

+
+
+

General

+
+

Below you can see how a file with anchors might look like (using adoc comment tags), with examples of what you might want to use the different functions for.

+
+
+
+
// anchor:header:append:anchorend
+
+Table of contents
+Introduction/Header
+
+// anchor:part1:appendafter:anchorend
+
+Lists
+Table entries
+
+// anchor:part2:nomerge:anchorend
+
+Document Separators
+adoc table definitions
+
+// anchor:part3:override:anchorend
+
+Anything that you only want once but changes from time to time
+
+// anchor:footer:append:anchorend
+
+Copyright Info
+Imprint
+
+
+
+
+

Merging

+
+

In this section you will see a comparison on what files look like before and after merging

+
+
+

override

+
+
Before
+
+
// anchor:part:override:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
+

Appending

+
+
Before
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+// anchor:part3:appendbefore:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:append:anchorend
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+Lorem Ipsum
+
+
+
+
+

Newline

+
+
Before
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+(end of file)
+
+
+
+
Patch
+
+
// anchor:part:newline_append:anchorend
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Dolor Sit
+(end of file)
+
+
+
+
After
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+Dolor Sit
+
+(end of file)
+
+
+
+
+
+
+
+

Error List

+
+
+
    +
  • +

    If there are anchors in the text, but either base or patch do not start with one, the merging process will be aborted, as text might go missing this way.

    +
  • +
  • +

    Using _newline or newline_ with mergestrategies that don’t support it , like override, will abort the merging process. See <<`mergestrategies`,Merge Strategies>> →2 for details.

    +
  • +
  • +

    Using undefined mergestrategies will abort the merging process.

    +
  • +
  • +

    Wrong anchor definitions, for example anchor:${}:anchorend will abort the merging process, see <<`anchordef`,Anchor Definition>> for details.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-tsplugin.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-tsplugin.html new file mode 100644 index 00000000..9e7a1d76 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-tsplugin.html @@ -0,0 +1,606 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==TypeScript Plug-in

+
+
+

The TypeScript Plug-in enables merging result TS files to existing ones. This plug-in is used at the moment for generate an Angular2 client with all CRUD functionalities enabled. The plug-in also generates i18n functionality just appending at the end of the word the ES or EN suffixes, to put into the developer knowledge that this words must been translated to the correspondent language. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+

Trigger Extensions

+
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
+

Merger extensions

+
+
+

This plugin uses the TypeScript Merger to merge files. There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy tsmerge (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy tsmerge_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

<<<<<<< HEAD +The merge algorithm mainly handles the following AST nodes:

+
+
+
    +
  • +

    ImportDeclaration

    +
    +
      +
    • +

      Will add non existent imports whatever the merge strategy is.

      +
    • +
    • +

      For different imports from same module, the import clauses will be merged.

      +
      +
      +
      import { a } from 'b';
      +import { c } from 'b';
      +//Result
      +import { a, c } from 'b';
      +
      +
      +
    • +
    +
    +
  • +
  • +

    ClassDeclaration

    +
    +
      +
    • +

      Adds non existent base properties from patch based on the name property.

      +
    • +
    • +

      Adds non existent base methods from patch based on the name signature.

      +
    • +
    • +

      Adds non existent annotations to class, properties and methods.

      +
    • +
    +
    +
  • +
  • +

    PropertyDeclaration

    +
    +
      +
    • +

      Adds non existent decorators.

      +
    • +
    • +

      Merge existent decorators.

      +
    • +
    • +

      With override strategy, the value of the property will be replaced by the patch value.

      +
    • +
    +
    +
  • +
  • +

    MethodDeclaration

    +
    +
      +
    • +

      With override strategy, the body will be replaced.

      +
    • +
    • +

      The parameters will be merged.

      +
    • +
    +
    +
  • +
  • +

    ParameterDeclaration

    +
    +
      +
    • +

      Replace type and modifiers with override merge strategy, adding non existent from patch into base.

      +
    • +
    +
    +
  • +
  • +

    ConstructorDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
  • +

    FunctionDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
+
+
+
+
+

Input reader

+
+
+

The TypeScript input reader is based on the one that the TypeScript merger uses. The current extensions are additional module fields giving from which library any entity originates. +module: null specifies a standard entity or type as string or number.

+
+
+

Object model

+
+

To get a first impression of the created object after parsing, let us start with analyzing a small example, namely the parsing of a simple type-orm model written in TypeScript.

+
+
+
+
import {Entity, PrimaryGeneratedColumn, Column} from "typeorm";
+
+@Entity()
+export class User {
+
+    @PrimaryGeneratedColumn()
+    id: number;
+
+    @Column()
+    firstName: string;
+
+    @Column()
+    lastName: string;
+
+    @Column()
+    age: number;
+
+}
+
+
+
+

The returned object has the following structure

+
+
+
+
{
+  "importDeclarations": [
+    {
+      "module": "typeorm",
+      "named": [
+        "Entity",
+        "PrimaryGeneratedColumn",
+        "Column"
+      ],
+      "spaceBinding": true
+    }
+  ],
+  "classes": [
+    {
+      "identifier": "User",
+      "modifiers": [
+        "export"
+      ],
+      "decorators": [
+        {
+          "identifier": {
+            "name": "Entity",
+            "module": "typeorm"
+          },
+          "isCallExpression": true
+        }
+      ],
+      "properties": [
+        {
+          "identifier": "id",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "PrimaryGeneratedColumn",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "firstName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "lastName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "age",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
+
+
+
+

If we only consider the first level of the JSON response, we spot two lists of imports and classes, providing information about the only import statement and the only User class, respectively. Moving one level deeper we observe that:

+
+
+
    +
  • +

    Every import statement is translated to an import declaration entry in the declarations list, containing the module name, as well as a list of entities imported from the given module.

    +
  • +
  • +

    Every class entry provides besides the class identifier, its decoration(s), modifier(s), as well as a list of properties that the original class contains.

    +
  • +
+
+
+

Note that, for each given type, the module from which it is imported is also given as in

+
+
+
+
  "identifier": {
+    "name": "Column",
+    "module": "typeorm"
+  }
+
+
+
+

Returning to the general case, independently from the given TypeScript file, an object having the following Structure will be created.

+
+
+
    +
  • +

    importDeclarations: A list of import statement as described above

    +
  • +
  • +

    exportDeclarations: A list of export declarations

    +
  • +
  • +

    classes: A list of classes extracted from the given file, where each entry is full of class specific fields, describing its properties and decorator for example.

    +
  • +
  • +

    interfaces: A list of interfaces.

    +
  • +
  • +

    variables: A list of variables.

    +
  • +
  • +

    functions: A list of functions.

    +
  • +
  • +

    enums: A list of enumerations.

    +
  • +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-usecases.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-usecases.html new file mode 100644 index 00000000..6af149c4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-usecases.html @@ -0,0 +1,445 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==General use cases

+
+
+

In addition to the selection of CobiGen applications introduced before, this chapter provides a more detailed overview about the currently implemented and maintained general use cases. These can be used by any project following a supported reference architecture as e.g. the devonfw or Register Factory.

+
+
+

devon4j

+
+
+

With our templates for devon4j, you can generate a whole CRUD application from a single Entity class. You save the effort for creating, DAOs, Transfer Objects, simple CRUD use cases with REST services and even the client application can be generated.

+
+
+

CRUD server application for devon4j

+
+

For the server, the required files for all architectural layers (Data access, logic, and service layer) can be created based on your Entity class. After the generation, you have CRUD functionality for the entity from bottom to top which can be accessed via a RESTful web service. Details are provided in the devonfw wiki.

+
+
+
+

CRUD client application for devon4ng

+
+

Based on the REST services on the server, you can also generate an Angular client based on devon4ng. With the help of Node.js, you have a working client application for displaying your entities within minutes!

+
+
+
+

Test data Builder for devon4j

+
+

Generating a builder pattern for POJOs to easily create test data in your tests. CobiGen is not only able to generate a plain builder pattern but rather builder, which follow a specific concept to minimize test data generation efforts in your unit tests. The following Person class as an example:

+
+
+
Person class
+
+
public class Person {
+
+    private String firstname;
+    private String lastname;
+    private int birthyear;
+    @NotNull
+    private Address address;
+
+    @NotNull
+    public String getFirstname() {
+        return this.firstname;
+    }
+
+    // additional default setter and getter
+}
+
+
+
+

It is a simple POJO with a validation annotation, to indicate, that firstname should never be null. Creating this object in a test would imply to call every setter, which is kind of nasty. Therefore, the Builder Pattern has been introduced for quite a long time in software engineering, allowing to easily create POJOs with a fluent API. See below.

+
+
+
Builder pattern example
+
+
Person person = new PersonBuilder()
+                .firstname("Heinz")
+                .lastname("Erhardt")
+                .birthyear(1909)
+                .address(
+                    new AddressBuilder().postcode("22222")
+                        .city("Hamburg").street("Luebecker Str. 123")
+                        .createNew())
+                .addChild(
+                    new PersonBuilder()[...].createNew()).createNew();
+
+
+
+

The Builder API generated by CobiGen allows you to set any setter accessible field of a POJO in a fluent way. But in addition lets assume a test, which should check the birth year as precondition for any business operation. So specifying all other fields of Person, especially firstname as it is mandatory to enter business code, would not make sense. The test behavior should just depend on the specification of the birth year and on no other data. So we would like to just provide this data to the test.

+
+
+

The Builder classes generated by CobiGen try to tackle this inconvenience by providing the ability to declare default values for any mandatory field due to validation or database constraints.

+
+
+
Builder Outline
+
+
public class PersonBuilder {
+
+    private void fillMandatoryFields() {
+        firstname("lasdjfaöskdlfja");
+        address(new AddressBuilder().createNew());
+    };
+    private void fillMandatoryFields_custom() {...};
+
+    public PersonBuilder firstname(String value);
+    public PersonBuilder lastname(String value);
+    ...
+
+    public Person createNew();
+    public Person persist(EntityManager em);
+    public List<Person> persistAndDuplicate(EntityManager em, int count);
+}
+
+
+
+

Looking at the plotted builder API generated by CobiGen, you will find two private methods. The method fillMandatoryFields will be generated by CobiGen and regenerated every time CobiGen generation will be triggered for the Person class. This method will set every automatically detected field with not null constraints to a default value. However, by implementing fillMandatoryFields_custom on your own, you can reset these values or even specify more default values for any other field of the object. Thus, running new PersonBuilder().birthyear(1909).createNew(); will create a valid object of Person, which is already pre-filled such that it does not influence the test execution besides the fact that it circumvents database and validation issues.

+
+
+

This even holds for complex data structures as indicated by address(new AddressBuilder().createNew());. Due to the use of the AddressBuilder for setting the default value for the field address, also the default values for Address will be set automatically.

+
+
+

Finally, the builder API provides different methods to create new objects.

+
+
+
    +
  • +

    createNew() just creates a new object from the builder specification and returns it.

    +
  • +
  • +

    persist(EntityManager) will create a new object from the builder specification and persists it to the database.

    +
  • +
  • +

    persistAndDuplicate(EntityManager, int) will create the given amount of objects form the builder specification and persists all of these. After the initial generation of each builder, you might want to adapt the method body as you will most probably not be able to persist more than one object with the same field assignments to the database due to unique constraints. Thus, please see the generated comment in the method to adapt unique fields accordingly before persisting to the database.

    +
  • +
+
+
+

Custom Builder for Business Needs

+
+

CobiGen just generates basic builder for any POJO. However, for project needs you probably would like to have even more complex builders, which enable the easy generation of more complex test data which are encoded in a large object hierarchy. Therefore, the generated builders can just be seen as a tool to achieve this. You can define your own business driven builders in the same way as the generated builders, but explicitly focusing on your business needs. Just take this example as a demonstration of that idea:

+
+
+
+
  University uni = new ComplexUniversityBuilder()
+    .withStudents(200)
+    .withProfessors(4)
+    .withExternalStudent()
+    .createNew();
+
+
+
+

E.g. the method withExternalStudent() might create a person, which is a student and is flagged to be an external student. Basing this implementation on the generated builders will even assure that you would benefit from any default values you have set before. In addition, you can even imagine any more complex builder methods setting values driven your reusable testing needs based on the specific business knowledge.

+
+
+
+
+
+
+

Register Factory

+
+
+

CRUD server application

+
+

Generates a CRUD application with persistence entities as inputs. This includes DAOs, TOs, use cases, as well as a CRUD JSF user interface if needed.

+
+
+
+

Test data Builder

+ +
+
+

Test documentation

+
+

Generate test documentation from test classes. The input are the doclet tags of several test classes, which e.g. can specify a description, a cross-reference, or a test target description. The result currently is a csv file, which lists all tests with the corresponding meta-information. Afterwards, this file might be styled and passed to the customer if needed and it will be up-to-date every time!

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-xmlplugin.html b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-xmlplugin.html new file mode 100644 index 00000000..4c2f9162 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/cobigen-xmlplugin.html @@ -0,0 +1,567 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==XML Plug-in +The CobiGen XML Plug-in comes with an input reader for XML artifacts, XML related trigger and matchers and provides different merge mechanisms for XML result documents.

+
+
+

Trigger extension

+
+
+

(since cobigen-xmlplugin v2.0.0)

+
+
+

The XML Plug-in provides a trigger for XML related inputs. It accepts XML documents as input (see XML input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'xml'

    +
    +
    Example of a XML trigger definition.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as inputs.

    +
    +
  • +
  • +

    type xpath

    +
    +
    Example of a xpath trigger definition.
    +
    +
    <trigger id="..." type="xpath" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as container inputs, which consists of several sub-documents.

    +
    +
  • +
+
+
+

Container Matcher type

+
+

A ContainerMatcher check if the input is a valid container.

+
+
+
    +
  • +

    xpath: type: xpath

    +
    +
    Example of a XML trigger definition with a node name matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <containerMatcher type="xpath" value="./uml:Model//packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    Before applying any Matcher, this containerMatcher checks if the XML file contains a node uml:Model with a childnode packagedElement which contains an attribute xmi:type with the value uml:Class.

    +
    +
  • +
+
+
+
+

Matcher types

+
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    XML: type nodename → document’s root name matching

    +
    +
    Example of a XML trigger definition with a node name matcher
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the root name of the declaring input document matches the given regular expression (value).

    +
    +
  • +
  • +

    xpath: type: xpath → matching a node with a xpath value

    +
    +
    Example of a xpath trigger definition with a xpath matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="xpath" value="/packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the XML file contains a node /packagedElement where the xmi:type property equals uml:Class.

    +
    +
  • +
+
+
+
+

Variable Assignment types

+
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The XML Plug-in currently provides only one mechanism:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+

XML input reader

+
+

The CobiGen XML Plug-in implements an input reader for parsed XML documents. So API user can pass org.w3c.dom.Document objects for generation. For getting the right parsed XML inputs you can easily use the xmlplugin.util.XmlUtil, which provides static functionality to parse XML files or input streams and get the appropriate Document object.

+
+
+

Template object

+
+

Due to the heterogeneous structure an XML document can have, the XML input reader does not always create exactly the same model structure (in contrast to the java input reader). For example the model’s depth differs strongly, according to it’s input document. To allow navigational access to the nodes, the model also depends on the document’s element’s node names. All child elements with unique names, are directly accessible via their names. In addition it is possible to iterate over all child elements with held of the child list Children. So it is also possible to access child elements with non unique names.

+
+
+

The XML input reader will create the following object model for template creation (EXAMPLEROOT, EXAMPLENODE1, EXAMPLENODE2, EXAMPLEATTR1,…​ are just used here as examples. Of course they will be replaced later by the actual node or attribute names):

+
+
+
    +
  • +

    ~EXAMPLEROOT~ ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      _nodeName_ ('String' :: Simple name of the root node)

      +
    • +
    • +

      _text_ ('String' :: Concatenated text content (PCDATA) of the root node)

      +
    • +
    • +

      TextNodes ('List<String>' :: List of all the root’s text node contents)

      +
    • +
    • +

      _at_~EXAMPLEATTR1~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_~EXAMPLEATTR2~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_…​

      +
    • +
    • +

      Attributes ('List<Map<String, Object>>' :: List of the root’s attributes

      +
      +
        +
      • +

        at ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          _attName_ ('String' :: Name of the attribute)

          +
        • +
        • +

          _attValue_ ('String' :: String representation of the attribute’s value)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      Children ('List<Map<String, Object>>' :: List of the root’s child elements

      +
      +
        +
      • +

        child ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          …​common element sub structure…​

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE1~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element structure…​

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE2~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element sub structure…​

        +
      • +
      • +

        ~EXAMPLENODE21~ ('Map<String, Object>' :: One of the nodes' child nodes)

        +
        +
          +
        • +

          …​common element structure…​

          +
        • +
        +
        +
      • +
      • +

        ~EXAMPLENODE…​~

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE…​~

      +
    • +
    +
    +
  • +
+
+
+

In contrast to the java input reader, this XML input reader does currently not provide any additional template methods.

+
+
+
+
+
+
+

Merger extensions

+
+
+

The XML plugin uses the LeXeMe merger library to produce semantically correct merge products. The merge strategies can be found in the MergeType enum and can be configured in the templates.xml as a mergeStrategy attribute:

+
+
+
    +
  • +

    mergeStrategy xmlmerge

    +
    +
    Example of a template using the mergeStrategy xmlmerge
    +
    +
    <templates>
    +	<template name="..." destinationPath="..." templateFile="..." mergeStrategy="xmlmerge"/>
    +</templates>
    +
    +
    +
  • +
+
+
+

Currently only the document types included in LeXeMe are supported. +On how the merger works consult the LeXeMe Wiki.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/eclipse-plugin_development.html b/docs/devonfw.github.io/1.0/cobigen.wiki/eclipse-plugin_development.html new file mode 100644 index 00000000..5d953fef --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/eclipse-plugin_development.html @@ -0,0 +1,793 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Eclipse Plugin Development

+
+
+

The Eclipse plugin is where all the other plugins (JavaPlugin, XMLPlugin, PropertyPlugin, TextMerger and the core) are loaded.

+
+
+

Configuration

+
+
+

Activator java

+
+

Activator class is the start point of the plugin. Activator class is loaded initially and it extends the AbstractUIPlugin, which tells the Eclipse Run-time that this Plugin is someway related to the Eclipse Platform UI. +An ID for the plugin is defined for configuration at Plugin.xml needs.

+
+
+
+
/**
+* The plug-in ID
+*/
+public static final String PLUGIN_ID = "com.capgemini.cobigen.eclipseplugin"; //$NON-NLS-1$
+
+
+
+

The overrode start() method starts the plugin and loads all the sub-plugins using the PluginRegistry from the core for each plug-in:

+
+
+
+
PluginRegistry.loadPlugin(PluginActivator.class);
+
+
+
+ + + + + +
+ + +
+

How the loadPlugin works is explained deeply at core development.

+
+
+
+
+

The activator has the listener `ConfigurationProjectListener.java` from the workbenchcontrol package that checks continuously changes on the templates project

+
+
+
+

Plugin XML

+
+

Plugin.xml file is used to initialize plugin. Here are defined the commands and the handler for each command, and also in which views should be shown the plugin menu with the commands.

+
+
+

The command configuration:

+
+
+
+
<extension point="org.eclipse.ui.commands">
+    <command
+        id="com.capgemini.cobigen.eclipseplugin.generate"
+        name="Generate">
+    </command>
+    <command
+        id="com.capgemini.cobigen.eclipseplugin.healthy_check"
+        name="Healthy Check">
+    </command>
+</extension>
+<extension point="org.eclipse.ui.handlers">
+    <handler
+        class="com.capgemini.cobigen.eclipse.workbenchcontrol.handler.GenerateHandler"
+        commandId="com.capgemini.cobigen.eclipseplugin.generate">
+    </handler>
+    <handler
+        class="com.capgemini.cobigen.eclipse.workbenchcontrol.handler.HealthCheckHandler"
+        commandId="com.capgemini.cobigen.eclipseplugin.health_check">
+    </handler>
+</extension>
+
+
+
+

As can be seen, to define the commands, the PLUGIN_ID defined at the Activator.java is used followed of the name of the command. Then, a handler from workbenchcontrol.handler package is assigned for each command.

+
+
+

After that, is defined the views where we want to show the CobiGen menu as Popup menu. +(e.g. Project Explorer view)

+
+
+
+
<extension point="org.eclipse.ui.menus">
+    <menuContribution
+        allPopups="false"
+        locationURI="popup:org.eclipse.ui.navigator.ProjectExplorer#PopupMenu">
+        <separator
+            name="com.capgemini.cobigen.eclipseplugin.separator3"
+            visible="true">
+        </separator>
+        <menu label="CobiGen">
+            <command
+                commandId="com.capgemini.cobigen.eclipseplugin.generate"
+                label="Generate..."
+                style="push">
+            </command>
+            <command
+                commandId="com.capgemini.cobigen.eclipseplugin.health_check"
+                label="Health Check..."
+                style="push">
+            </command>
+        </menu>
+        <separator
+            name="com.capgemini.cobigen.eclipseplugin.separator4"
+            visible="true">
+        </separator>
+    </menuContribution>
+</extension>
+
+
+
+

CobiGen Menu

+
+
+
+
+
+

Handlers

+
+
+

The workbenchcontrol package provides to the plugin the listener regarding to the templates project, the listener for logging needs and the handler for the two main use cases (Generate and HealthCheck).

+
+
+

Update Templates:

+
+

Update Template: Select Entity file and right click, then select CobiGen Update Templates after that click on download then download successfully will be come.

+
+
+
+

Adapt Templates

+
+

Adapt Template: Select Entity file and right click then select CobiGen Adapt Template .If CobiGen template jar not available then it download automatically. If CobiGen templates is already then it will override existing template in workspace and click on OK then imported template successfully message will come.

+
+
+
+

Generate Action Handler

+
+

The wizard launching is the responsibility of the generate handler (`GenerateHandler.java`). In case of Generate action and depending of the input provided for that, the handler will create a JavaGeneratorWrapper or XMlGeneratorWrapper object. +For JavaGeneratorWrapper, if the input is a package or a selection of multiple entity files, the wizard will be launched in batch mode calling the `GenerateBatchWizard.java` from the wizard.generate package. But if the input is a single entity java class file, it will be launched in normal mode calling the `GenerateWizard.java` from the same package.

+
+
+ + + + + +
+ + +
+

For both Wrapper objects, the inputs will be converted to valid inputs for FreeMarker using the `Xml/JavaInputConverter.java` from the generator.xml/java package.

+
+
+
+
+

Diagram 1

+
+
+

For XmlGeneratorWrapper, the input must be a single valid XML file. As only has a single file as input, the `GenerateWizard.java` will be called.

+
+
+

In summary, this will be the process for the Generate Action before calling the wizard:

+
+
+

diagram 2

+
+
+
+

Health Check Action Handler

+
+

At the case of Health Check action, a success/error dialog is shown instead of a wizard itself. The `HealtchCheckHandler.java` will call the execute method of `HealthCheck.java` from the healthcheck package. That class will test first if the templates project exists at the workspace opening and error dialog if not by throwing and handling the custom exception `GeneratorProjectNotExistentException.java` from the common.exceptions package.

+
+
+
+
try {
+    // check configuration project existence
+    //That method will throw GeneratorProjectNotExistentException
+    generatorConfProj = ResourcesPluginUtil.getGeneratorConfigurationProject();
+    ...
+    ..
+    .
+ } catch (GeneratorProjectNotExistentException e) {
+     LOG.warn("Configuration project not found!", e);
+     healthyCheckMessage = firstStep + "NOT FOUND!\n"
+                           + "=> Please import the configuration project into your workspace as stated in the "
+                           + "documentation of CobiGen or in the one of your project.";
+     PlatformUIUtil.openErrorDialog(HEALTH_CHECK_DIALOG_TITLE, healthyCheckMessage, null);
+}
+
+
+
+

If the project exists, HealthCheck will test if the context.xml file is valid. In case of invalid, HealthCheck will throw and handle the InvalidConfigurationException from the core and check if it is possible to upgrade the version of the XML file, showing an UPGRADE button at the dialog. If the upgrade is not possible, will show a dialog message telling the user to check the context.xml file for errors.

+
+
+
+
try {
+   //The CobiGen constructor will throw the InvalidConfigurationException
+   new CobiGen(generatorConfProj.getLocationURI());
+    ...
+    ..
+    .
+} catch (InvalidConfigurationException e) {
+    healthyCheckMessage = firstStep + "OK.";
+    healthyCheckMessage += secondStep + "INVALID!";
+    if (generatorConfProj != null) {
+        Path configurationProject = Paths.get(generatorConfProj.getLocationURI());
+        ContextConfigurationVersion currentVersion = new ContextConfigurationUpgrader()
+                                                     .resolveLatestCompatibleSchemaVersion(configurationProject);
+        if (currentVersion != null) {
+            // upgrade possible
+            healthyCheckMessage += "\n\nAutomatic upgrade of the context configuration available.\n" + "Detected: "
+                                   + currentVersion + " / Currently Supported: "
+                                   + ContextConfigurationVersion.getLatest();
+            boolean upgraded = openErrorDialogWithContextUpgrade(healthyCheckMessage, configurationProject);
+            if (upgraded) {
+                // re-run Health Check
+                Display.getCurrent().asyncExec(new Runnable() {
+                    @Override
+                    public void run() {
+                        execute();
+                    }
+                });
+            }
+            return;
+        } else {
+            healthyCheckMessage += "\n\nNo automatic upgrade of the context configuration possible. "
+                                   + "Maybe just a mistake in the context configuration?";
+            healthyCheckMessage += "\n\n=> " + e.getLocalizedMessage();
+        }
+}
+
+
+
+

At this point, if all is correct, the user can choose to finish the HealtCheck process or run the Advance Health Check running the `AdvancedHealthCheck.java` to check the the validity of template configurations. That check has three steps:

+
+
+
    +
  1. +

    Get configuration resources
    +Will get the template configuration file from the template folder corresponding to the input of the plugin provided by the triggers defined at the context.xml file for that input.

    +
  2. +
  3. +

    Determine current state
    +Will check if the template configuration file exists, if it is accessible and if the version is up-to-date allowing upgrading if not.

    +
  4. +
  5. +

    Show current status to the user
    +Will call the `AdvancedHealthCheckDialog.java` showing a dialog with the current state of each configuration template, showing an UPGRADE button if the configuration version can be upgraded.

    +
  6. +
+
+
+
+
+
+

Wizard Development

+
+
+

Starting the Wizard

+
+

To open a wizard, use the WizardDialog class from the org.eclipse.jface.wizard package. +The plugin does that at `GenerateHandler.java` as previously explained here:

+
+
+
+
if (((IStructuredSelection) sel).size() > 1 || (((IStructuredSelection) sel).size() == 1)
+     && ((IStructuredSelection) sel).getFirstElement() instanceof IPackageFragment) {
+     WizardDialog wiz = new WizardDialog(HandlerUtil.getActiveShell(event),
+                        new GenerateBatchWizard(generator));
+     wiz.setPageSize(new Point(800, 500));
+     wiz.open();
+     LOG.info("Generate Wizard (Batchmode) opened.");
+} else if (((IStructuredSelection) sel).size() == 1) {
+     WizardDialog wiz = new WizardDialog(HandlerUtil.getActiveShell(event), new GenerateWizard(generator));
+     wiz.setPageSize(new Point(800, 500));
+     wiz.open();
+     LOG.info("Generate Wizard opened.");
+}
+
+
+
+

Adapt Template: Select Entity file and right click then select CobiGen Adapt Template.If CobiGen template jar not available then it download automatically.If CobiGen templates is already then it will override existing template in workspace and click on OK then imported template successfully message will come .If Template not available the it automatically +=== Wizard and WizardPages

+
+
+

The Wizard class from the org.eclipse.jface.wizard package provides the functionality to build custom wizards. This class controls the navigation between the different pages and provides the base user interface, for example, an area for error and information messages.

+
+
+

A wizard contains one or several pages of the type WizardPage. Such a page is added to a Wizard object via the addPage() method.

+
+
+

A WizardPage must create a new Composite in its createControl() method. This new Composite must use the Composite of the method parameter as parent. It also must call the setControl() method with this new Composite as parameter. If this is omitted, Eclipse will throw an error.

+
+
+

On the CobiGen eclipse-plugin project: +Diagram 3

+
+
+

The WizardPage class defines the canFlipToNextPage() and setPageComplete() methods to control if the NEXT or the FINISH button in the wizard becomes active.

+
+
+

The Wizard class defines the canFinish() method in which you can define if the wizard can be completed. This last method is overrode at AbstractGenerateWizard.java.

+
+
+
+

Select Files Page and Select Attributes Page

+
+

In case that has been launched in batch mode, the wizard only will have the select increment and files page (initialized and configured at `SelectFilePage.java` from the package wizard.common)

+
+
+

In case of normal mode with an entity java class as input, the wizard will have an optional second page provided for `SelectAttributesPage.java` of the package wizard.generate.common for selecting attributes of the entity that will be used for the generation. The page is optional because the user can finish the wizard and perform the generation from the first page.

+
+
+

The pages of the CobiGen wizard is composed essentially for container. The containers have a CheckBoxTreeViewer object, a content provider object and a listener (that defines the behavior of the wizard when a check box is checked or unchecked)

+
+
+

Diagram 4

+
+
+

Select Files Page

+
+

The first page (`SelectFilesPage`) is composed by two containers:

+
+
+
    +
  1. +

    Left container - Increment Selector

    +
    +
      +
    • +

      Created as a CustomizedCheckBoxTreeViewer

      +
    • +
    • +

      The content provider is a SelectIncrementContentProvider

      +
    • +
    • +

      Setting the input will upgrade the labels to show

      +
    • +
    • +

      Set CheckStateListener as listener

      +
    • +
    +
    +
  2. +
+
+
+
+
incrementSelector = new CustomizedCheckboxTreeViewer(containerLeft);
+incrementSelector.setContentProvider(new SelectIncrementContentProvider());
+incrementSelector.setInput(cobigenWrapper.getAllIncrements());
+gd = new GridData(GridData.FILL_BOTH);
+gd.grabExcessVerticalSpace = true;
+incrementSelector.getTree().setLayoutData(gd);
+CheckStateListener checkListener = new CheckStateListener(cobigenWrapper, this, batch);
+incrementSelector.addCheckStateListener(checkListener);incrementSelector.expandAll();
+
+
+
+
    +
  1. +

    Right Container - Resources to be generated

    +
    +
      +
    • +

      Created as SimulatedCheckBoxTreeViewer if the Customize button is not enabled or as CustomizedCheckBoxTreeViewer if it is.

      +
    • +
    • +

      SelectFileContentProvider as content provider.

      +
    • +
    • +

      SelectFileLabelProvider as label provider

      +
    • +
    • +

      Generation target project as input

      +
    • +
    • +

      Set CheckStateListener as listener

      +
    • +
    +
    +
  2. +
+
+
+ + + + + +
+ + +
+

To know how a content provider works check the official documentation here.

+
+
+
+
+
+

Select Attributes Page

+
+

As previously explained, this page is optional, the user can press the Finish button at the previous page. Nevertheless, this page can only be accessed in case of a single entity file as input, never on batch mode.

+
+
+

The container is composed by a single CheckBoxTableViewer with a `SelectAttributesContentProvider` as content provider and a `SelectAttributesLabelProvider` as label provider.

+
+
+
+
+
+
+

Finish and perform generation

+
+
+

When the user press the Finish button, the generation process will begin. For that, a generation job will be created using as argument a list of templates to be generated retrieving them from the user selection of the first page (Select Files Page). +The generate wizard will use the `GenerateSelectionJob.java` or the `GenerateBatchSelectionJob.java` for normal mode or batch mode respectively.

+
+
+

Diagram 5

+
+
+

Normal Mode

+
+
+
@Override
+protected void generateContents(ProgressMonitorDialog dialog) {
+    if (cobigenWrapper instanceof JavaGeneratorWrapper) {
+        for (String attr : page2.getUncheckedAttributes()) {
+            ((JavaGeneratorWrapper) cobigenWrapper).removeFieldFromModel(attr);
+        }
+    }
+    //Here are retrieved the templates to use for the generation selected at the first page
+    GenerateSelectionJob job = new GenerateSelectionJob(cobigenWrapper, page1.getTemplatesToBeGenerated());
+    try {
+        dialog.run(true, false, job);
+    } catch (InvocationTargetException e) {
+        LOG.error("An internal error occurred while invoking the generation job.", e);
+    } catch (InterruptedException e) {
+        LOG.warn("The working thread doing the generation job has been interrupted.", e);
+    }
+}
+
+
+
+

The dialog.run(true, false, job) method will call the performGeneration() method from `GenerateSelectionJob.java`

+
+
+

Calling the generate() method from the CobiGenWrapper will call the method with the same name from the core and the generation will begin.

+
+
+
+

Batch Mode

+
+

At batch mode, the generation job will be instantiated depending if the selection was a container or a multiple files selection.

+
+
+
+
@Override
+protected void generateContents(ProgressMonitorDialog dialog) {
+    List<TemplateTo> templatesToBeGenerated = page1.getTemplatesToBeGenerated();
+    List<String> templateIds = Lists.newLinkedList();
+    for (TemplateTo template : templatesToBeGenerated) {
+        templateIds.add(template.getId());
+    }
+    GenerateBatchSelectionJob job;
+    if (container == null) {
+        job = new GenerateBatchSelectionJob(cobigenWrapper, cobigenWrapper.getTemplates(templateIds),
+                  inputTypes);
+    } else {
+        job = new GenerateBatchSelectionJob(cobigenWrapper, cobigenWrapper.getTemplates(templateIds),
+                  container);
+    }
+    try {
+        dialog.run(true, false, job);
+    } catch (InvocationTargetException e) {
+        LOG.error("An internal error occurred while invoking the generation batch job.", e);
+    } catch (InterruptedException e) {
+        LOG.warn("The working thread doing the generation job has been interrupted.", e);
+    }
+}
+
+
+
+

The dialog.run(true, false, job) method will call the performGeneration() method from `GenerateBatchSelectionJob.java`

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/guide_dev_troubleshooting.html b/docs/devonfw.github.io/1.0/cobigen.wiki/guide_dev_troubleshooting.html new file mode 100644 index 00000000..bf315e1e --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/guide_dev_troubleshooting.html @@ -0,0 +1,446 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Troubleshooting for Developers

+
+
+

CobiGen-eclipse or CobiGen-eclipse-test has build errors after git clone or pull

+
+
+

This might be caused as of the fact, that the cobigen-eclipse*/lib folder is not available after initial cloning or the contents of the lib folder are not in sync with the dependencies specified in the pom.xml respectively with the classpath inclusions in the plugin.xml (tab runtime).

+
+
+

Solution

+
+

In any of these cases you can fix the issue by running a mvn clean package -Pp2-build-photon.

+
+
+
+
+
+

Getting Not authorized , Reason Phrase: Unauthorized

+
+
+

You are facing an error like

+
+
+
+
[ERROR] [ERROR] Some problems were encountered while processing the POMs:
+[ERROR] Unresolveable build extension: Plugin org.apache.maven.wagon:wagon-ftp:1.0-beta-6 or one of its dependencies could not be resolved: Failed to read artifact descriptor for org.apache.maven.wagon:wagon-ftp:jar:1.0-beta-6 @@
+[ERROR] The build could not read 1 project -> [Help 1]
+[ERROR]
+[ERROR]   The project com.capgemini:cobigen-htmlplugin:1.1.0 (D:\toolscobigen\tools-cobigen2\cobigen\cobigen\cobigen-htmlplugin\pom.xml) has 1 error
+[ERROR]     Unresolveable build extension: Plugin org.apache.maven.wagon:wagon-ftp:1.0-beta-6 or one of its dependencies could not be resolved: Failed to read a
+rtifact descriptor for org.apache.maven.wagon:wagon-ftp:jar:1.0-beta-6: Could not transfer artifact org.apache.maven.wagon:wagon-ftp:pom:1.0-beta-6 from/to publ
+ic (https://devon.s2-eu.capgemini.com/nexus/content/groups/public/): Not authorized , ReasonPhrase: Unauthorized. -> [Help 2]
+[ERROR]
+[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
+[ERROR] Re-run Maven using the -X switch to enable full debug logging.
+[ERROR]
+[ERROR] For more information about the errors and possible solutions, please read the following articles:
+[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/ProjectBuildingException
+[ERROR] [Help 2] http://cwiki.apache.org/confluence/display/MAVEN/PluginManagerException
+
+
+
+

Solution

+
+

Please note the message Not authorized , ReasonPhrase: Unauthorized. → [Help 2]!

+
+
+
    +
  1. +

    Please check, that you run the command by using the console.bat or a similar console initialized with the IDE environment variables.

    +
  2. +
  3. +

    Please check your corporate login in the variables-customized.bat to be correct (DEVON_NEXUS_USER and DEVON_NEXUS_PASSWD). Make sure, that you restart the console.bat you are working in after changing the variables-customized.bat. Same holds for eclipse instances running. Please restart to make the new values accessible.

    +
  4. +
  5. +

    Please check your password to escape special characters.

    +
  6. +
  7. +

    Please check whether you are able to login to https://devon.s2-eu.capgemini.com and Nexus is up and running. If you cannot login, contact one of the main developers.

    +
  8. +
+
+
+
+
+
+

Testing changes on the CobiGen-core

+
+
+

To test changes implemented on the cobigen-core you have to follow the next process:

+
+
+
    +
  1. +

    Open a console and step into cobigen/cobigen-core-parent. Run mvn clean install and remember the jar version you have just installed.

    +
  2. +
  3. +

    On Eclipse, go to cobigen/cobigen-eclipse pom.xml and change the <version> of your cobigen-core.

    +
  4. +
  5. +

    Also check on the cobigen/cobigen-core-parent pom-xml that it is using the just installed version.

    +
  6. +
  7. +

    Open a console and step into cobigen/cobigen-eclipse. Run mvn clean package -Pp2-build-photon,p2-build-stable,p2-build-experimental.

    +
  8. +
  9. +

    On Eclipse, go to cobigen/cobigen-eclipse and double-click 'plugins.xml'. On the bottom, click on 'runtime' tab. On 'classpath', add a new library and choose the jars you have just installed.

    +
  10. +
  11. +

    Refresh on Eclipse, press F5 on the cobigen-eclipse inside Eclipse.

    +
  12. +
  13. +

    If you still see compilation errors: On Eclipse, right-click cobigen/cobigen-eclipse → Maven → Update projects.

    +
  14. +
+
+
+
+
+

Issues with workspace when Oomph automatic updates don’t work (especially for Indian colleagues)

+
+
+

Executing eclipse-cobigen-development.bat file will open Eclipse with all the projects automatically imported. Oomph creates 'Working Sets' and set 'Top Level Elements' pointing to that working set. For Countries where proxy restricts Oomph to execute, we see no projects imported into project explorer/Navigator. Rather than trying manual import which later can give build issues we should follow below solution. +Build Issues could be like:

+
+
+
+
[ERROR] Cannot resolve project dependencies:
+[ERROR]   You requested to install 'com.capgemini.cobigen-htmlplugin 0.0.0' but it could not be found
+[ERROR]
+[ERROR] See http://wiki.eclipse.org/Tycho/Dependency_Resolution_Troubleshooting for help.
+[ERROR] Cannot resolve dependencies of MavenProject: com.capgemini:com.capgemini.cobigen.eclipse.test:3.0.1 @
+
+
+
+

Solution

+
+

In Eclipse, you can click the small downward arrow in the upper right corner of the Navigator/Project Explorer view and go to 'Top Level Elements' and point them to 'Projects'. This should show all the projects inside Project Explorer View. Also, Each plugin should point to respective branch.

+
+
+
+
+
+

Issue when testing Eclipse plugin by Running as Eclipse Application.

+
+
+

Error message will be like:

+
+
+
+
1) Caused by: java.lang.ClassNotFoundException: An error occurred while automatically activating bundle com.devonfw.cobigen.eclipse
+2) org.osgi.framework.BundleException: Error starting module.
+3) org.eclipse.core.runtime.CoreException: Plug-in com.devonfw.cobigen.eclipse was unable to load class com.devonfw.cobigen.eclipse.workbenchcontrol.handler.XXXXHandler.
+
+
+
+

Solution

+
+

Delete or rename the runtime-EclipseApplication inside workspaces folder. Re- run and try setting up workspace in that environment again. It should work!!

+
+
+
+
+
+
1) Caused by: java.lang.ClassNotFoundException: An error occurred while automatically activating bundle com.devonfw.cobigen.eclipse
+2) org.osgi.framework.BundleException: Error starting module.
+3) org.eclipse.core.runtime.CoreException: Plug-in com.devonfw.cobigen.eclipse was unable to load class com.devonfw.cobigen.eclipse.workbenchcontrol.handler.XXXXHandler.
+
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-CobiGen-OpenAPI.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-CobiGen-OpenAPI.html new file mode 100644 index 00000000..3981f11c --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-CobiGen-OpenAPI.html @@ -0,0 +1,937 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + + +
+

==End to End POC Code generation using OpenAPI +This article helps to create a sample application using cobigen.

+
+
+

Prerequisites

+
+
+

Download and install devonnfw IDE here,

+
+
+
+
+

Steps to create a Sample Project using Cobigen

+
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to star

+
+
+

t from the BE part …

+
+
+

Back End

+
+

run \devonfw-ide-scripts-3.2.4\eclipse-main.bat

+
+
+

It will open eclipse

+
+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  1. +

    Create a new SQL file (i.e: V0005CreateTables_ItaPoc.sql) inside jwtsample-__core and insert the following script:

    +
  2. +
+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment, modificationCounter *INTEGER* *NOT* *NULL*,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+

sql file

+
+
+
    +
  1. +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql) and add following script about the INSERT in order to populate the table created before

    +
  2. +
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Stefano','Rossini','stefano.rossini@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Angelo','Muresu', 'angelo.muresu@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Gonzalez', 'jaime.diaz-gonzalez@capgemini.com');
+
+
+
+

sql insert

+
+
+

Let’s create the yml file for the code generation

+
+
+
    +
  1. +

    Now create a new file devonfw.yml in the root of your core folder. This will be our OpenAPI contract, like shown below. Then, copy the contents of this file into your OpenAPI. It defines some REST service endpoints and a EmployeeEntity with its properties defined.

    +
  2. +
+
+
+

Important: if you want to know how to write an OpenAPI contract compatible with CobiGen, please read this tutorial.

+
+
+

Swagger at OASP4J Project

+
+
+
    +
  1. +

    Right click devonfw.yml. CobiGen → Generate

    +
  2. +
+
+
+

It will ask you to download the templates, click on update:

+
+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  1. +

    Click on all the option selected as below:

    +
  2. +
+
+
+

cobigen option selection

+
+
+
    +
  1. +

    Click on finish. Below Screen would be seen. Click on continue

    +
  2. +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (jwtsample-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (jwtsample-core), normally it will be implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+

Last but not least: We make a quick REST services test !

+
+
+

See in the application.properties the TCP Port and the PATH

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id}  (i.e: for  getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

For all employees

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+
+
+
+

For the specific employee

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

Now download Postman to test the rest services.

+
+
+

Once done, you have to create a POST Request for the LOGIN and insert in the body the JSON containing the username and password waiter

+
+
+

postman

+
+
+

Once done with success (Status: 200 OK) …

+
+
+

postman

+
+
+

… We create a NEW POST Request and We copy the Authorization Bearer field (see above) and We paste it in the Token field (see below)

+
+
+

postman

+
+
+

and specific the JSON parameters for the pagination of the Request that We’re going to send:

+
+
+

postman

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below list of Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+

Front End

+
+

Let’s start now with angular Web and then Ionic app.

+
+
+

Angular Web App

+
+
    +
  1. +

    To generate angular structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  2. +
+
+
+

devon dist folder

+
+
+
    +
  1. +

    Once done, right click on devonfw.yml again (the OpenAPI contract). CobiGen → Generate

    +
  2. +
  3. +

    Click on the selected options as seen in the screenshot:

    +
  4. +
+
+
+

eclipse generate

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
+
+
+

eclipse

+
+
+
    +
  1. +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  2. +
+
+
+

angular ee layer

+
+
+
    +
  1. +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  2. +
+
+
+
+
,\{
+path: 'employee',
+component: EmployeeGridComponent,
+canActivate: [AuthGuard],
+},
+
+
+
+

Following picture explain where to place the above content:

+
+
+

routes

+
+
+
    +
  1. +

    Open the command prompt and execute devon yarn install from the base folder, which would download all the required libraries..

    +
  2. +
+
+
+
    +
  1. +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  2. +
+
+
+

environment

+
+
+

In order to do that it’s important to look at the application.properties to see the values as PATH, TCP port etc …

+
+
+

configure

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  1. +

    Now run the *ng serve -o* command to run the Angular Application.

    +
  2. +
+
+
+

image44

+
+
+
    +
  1. +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  2. +
+
+
+

image45

+
+
+

WebApp DONE

+
+
+
+

Ionic Mobile App

+
+
    +
  1. +

    To generate Ionic structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  2. +
  3. +

    Once done, Right click on the devonfw.yml as you already did before in order to use CobiGen.

    +
  4. +
  5. +

    Click on the selected options as seen in the screenshot:

    +
  6. +
+
+
+

image46

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
  3. +

    The entire ionic structure will be auto generated.

    +
  4. +
+
+
+

image47

+
+
+
    +
  1. +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  2. +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  1. +

    Run npm install in the root folder to download the dependecies

    +
  2. +
  3. +

    Run ionic serve

    +
  4. +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App DONE*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+
Build APK
+
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
+

Adapt CobiGen_Templates

+
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-CobiGen.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-CobiGen.html new file mode 100644 index 00000000..415f7bad --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-CobiGen.html @@ -0,0 +1,1089 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + + +
+

==End to End POC Code generation using OpenAPI and Entity class +This article helps to create a sample application using cobigen.

+
+
+

Prerequisites

+
+
+

Download and install devonnfw IDE here,

+
+
+
+
+

Steps to create a sample Project using Cobigen

+
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to start from the BE part …

+
+
+

Back End

+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

run +\devonfw-ide-scripts-<version>\eclipse-main.bat +to open eclipse

+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

Remove the existing configure method from myapp-core com.example.domain.myapp.general.service.impl.config.BaseWebSecurityConfig and copy below security configuration code and paste.

+
+
+
+
@Override
+ public void configure(HttpSecurity http) throws Exception {
+   http.authorizeRequests().anyRequest().permitAll().and().csrf().disable()
+       .addFilterAfter(getSimpleRestAuthenticationFilter(), BasicAuthenticationFilter.class)
+       .addFilterAfter(getSimpleRestLogoutFilter(), LogoutFilter.class);
+   if (this.corsEnabled) {
+     http.addFilterBefore(getCorsFilter(), CsrfFilter.class);
+   }
+ }
+
+
+
+

Check resources/config/application.properties to see the values as PATH, TCP port etc … +Also make sure the below property is present.

+
+
+

security.cors.enabled=true

+
+
+

configure

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  • +

    Create new SQL file inside myapp-core resources/db/migration/specific/h2 and insert the following script:

    +
  • +
+
+
+

V0005__CreateTables_ItaPoc.sql (Please note 2 underscores after V0005)

+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment,
+modificationCounter INT NOT NULL,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

sql file

+
+
+
    +
  • +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql ) and add following script about the INSERT in order to populate the table created before

    +
  • +
+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Stefano','Rossini','stefano.rossini@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Angelo','Muresu', 'angelo.muresu@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Gonzalez', 'jaime.diaz-gonzalez@capgemini.com');
+
+
+
+

sql insert

+
+
+
+
+
+

Back end Code Generation

+
+
+

Back end code can be generated from either of the below 2 methods

+
+
+
    +
  1. +

    OpenAPI .yml file

    +
  2. +
  3. +

    java Entity class.

    +
  4. +
+
+
+

Prepare OpenAPI .yml file

+
+
+

Create a .yml file which satisfies the OpenAPI standards or check the sample file devonfw_employee.yml and prepare.

+
+
+

Important: if you want to know how to write an OpenAPI contract compatible with CobiGen, please read this tutorial.

+
+
+

Swagger at OASP4J Project

+
+
+

Right click devonfw.yml. CobiGen → Generate

+
+
+

It will ask you to download the templates, click on update:

+
+
+

Prepare Entity class

+
+
+

Create a package com.example.domain.myapp.employeemanagement.dataaccess.api

+
+
+

under the folder myapp-core. Note: It is important to follow this naming convention for CobiGen to work properly.

+
+
+

package

+
+
+

Now create a JPA Entity class in this package

+
+
+
+
import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Column;
+@Entity
+@javax.persistence.Table(name = "EMPLOYEE")
+public class EmployeeEntity {
+ @Column(name = "EMPLOYEEID")
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ private Long employeeId;
+ @Column(name = "NAME")
+ private String name;
+ @Column(name = "SURNAME")
+ private String surname;
+ @Column(name = "EMAIL")
+ private String email;
+}
+
+
+
+

then generate getters and setters for all attributes

+
+
+
+
Use Cobigen to generate code. Right click on EmployeeEntity. CobiGen -> Generate
+
+
+
+

It will ask you to download the templates, click on update:

+
+
+

Code Generation

+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  • +

    Click on all the option selected as below:

    +
  • +
+
+
+

cobigen option selection

+
+
+
    +
  • +

    Click on finish. Below Screen would be seen. Click on continue

    +
  • +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (jwtsample-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (jwtsample-core), those are the implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+
+

Test the Services

+
+

Download Postman to test the rest services.

+
+
+

Get the port and path from application.properties

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id}  (i.e: for  getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

For all employees

+
+
+
+
POST
+http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+Content-Type    application/json
+{"name":"Angelo"}
+
+
+
+

For the specific employee

+
+
+
+
GET
+http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

In postman, create a POST Request for the LOGIN and insert in the body the JSON containing the username and password admin

+
+
+

Login Test using postman

+
+
+
+
    POST
+    http://localhost:8081/services/rest/login
+    Content-Type    application/json
+    {
+    "j_username":"admin",
+    "j_password":"admin"
+     }
+
+
+
+
    +
  • +

    Set the header

    +
  • +
+
+
+

Send will return 200 OK as response.

+
+
+

postman

+
+
+

postman

+
+
+

… We create a NEW POST Request and We copy the Authorization Bearer field (see above) and We paste it in the Token field (see below)

+
+
+

postman

+
+
+

and specific the JSON parameters for the pagination of the Request that We’re going to send:

+
+
+

postman

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below list of Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+

Front End

+
+

Let’s start now with angular Web and then Ionic app.

+
+
+

Angular Web App

+
+
    +
  • +

    To generate angular structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  • +
+
+
+

devon dist folder

+
+
+

Place the files inside workspace under the folder devon4ng-application-template.

+
+
+

eg: C:\projects\devonfw-ide-scripts-2020.08.002\workspaces\main\devon4ng-application-template

+
+
+

In Devon IDE, right click on EmployeeEto.java file present under the package com.example.domain.myapp.employeemanagement.logic.api.to

+
+
+

For OpenAPI, right click on devonfw.yml again.

+
+
+
    +
  • +

    CobiGen → Generate

    +
  • +
+
+
+

Click on the selected options as seen in the screenshot:

+
+
+

eclipse generate

+
+
+
    +
  • +

    Click on Finish

    +
  • +
+
+
+

eclipse

+
+
+
    +
  • +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  • +
+
+
+

angular ee layer

+
+
+
    +
  • +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  • +
+
+
+
+
{
+      path: 'employee',
+      loadChildren: () =>
+          import('./employee/employee.module').then(
+              m => m.EmployeeModule,
+          )
+}
+
+
+
+

Following picture explain where to place the above content, also remove if any duplicate code is there.

+
+
+

routes

+
+
+
    +
  • +

    Add newly generated module to the left menu. +Modify the file app\layout\nav-bar\nav-bar.component.html, add the below code.

    +
  • +
+
+
+
+
<a id="employee" mat-list-item [routerLink]="['./employee']" (click)="close()">
+       <mat-icon matListAvatar>
+        grid_on
+       </mat-icon> <h3 matLine> {{ 'employeemanagement.Employee.navData' | transloco }} </h3>
+       <p matLine class="desc"> {{ 'employeemanagement.Employee.navDataSub' | transloco }} </p>
+</a>
+
+
+
+

nav bar

+
+
+
    +
  • +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  • +
+
+
+

environment

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  • +

    Open the command prompt and execute below command from the base folder, which would download all the required libraries..

    +
  • +
+
+
+
+
devon yarn install
+
+
+
+

Run the below command for the front end.

+
+
+
+
devon ng serve
+
+
+
+

image44

+
+
+
    +
  • +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  • +
+
+
+

image45

+
+
+

WebApp Done

+
+
+
+

Ionic Mobile App

+
+
    +
  • +

    To generate Ionic structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  • +
+
+
+

right click on EmployeeEto.java file present under the package com.devonfw.poc.employeemanagement.logic.api.to

+
+
+

For OpenAPI, Right click on the devonfw.yml as you already did before in order to use CobiGen.

+
+
+
    +
  • +

    Click on the selected options as seen in the screenshot:

    +
  • +
+
+
+

image46

+
+
+
    +
  • +

    Click on Finish

    +
  • +
  • +

    The entire ionic structure will be auto generated.

    +
  • +
+
+
+

image47

+
+
+
    +
  • +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  • +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  • +

    Run npm install in the root folder to download the dependecies

    +
  • +
  • +

    Run ionic serve

    +
  • +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App Done*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+
Build APK
+
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
+

Adapt CobiGen_Templates

+
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 11

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-adapt_template.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-adapt_template.html new file mode 100644 index 00000000..2a4a8460 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-adapt_template.html @@ -0,0 +1,332 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+
+ +
+

==Adapt Templates from CobiGen

+
+
+
+
+

Adapt CobiGen_Templates

+
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click OK:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-ide-CobiGen-PoC-E2E.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-ide-CobiGen-PoC-E2E.html new file mode 100644 index 00000000..0496b303 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto-devonfw-ide-CobiGen-PoC-E2E.html @@ -0,0 +1,946 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==End to End POC Code generation using Entity class +This article helps to create a sample application using cobigen.

+
+
+

Prerequisites

+
+
+

Download and install devonnfw IDE here,

+
+
+
+
+

Steps to create a Sample Project using Cobigen

+
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to start from the BE part …

+
+
+

Back End

+
+

run \devonfw-ide-scripts-3.2.4\eclipse-main.bat

+
+
+

It will open eclipse

+
+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  1. +

    Create a new SQL file (i.e: V0005CreateTables-ItaPoc.sql) inside myapp-__core and insert the following script:

    +
  2. +
+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment, modificationCounter INTEGER NOT NULL,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+

sql file

+
+
+
    +
  1. +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql) and add following script about the INSERT in order to populate the table created before

    +
  2. +
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Albert','Miller','albert.miller@capgemini.com');
+INSERT INTO  EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Wills','Smith', 'wills.smith@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Thomas', 'jaime.thomas@capgemini.com');
+
+
+
+

sql insert

+
+
+

Let’s create the Entity Class for the code generation

+
+
+
    +
  1. +

    Create a package employeemanagement.dataaccess.api under the folder myapp-core. Note: It is important to follow this naming convention for CobiGen to work properly.

    +
  2. +
+
+
+

package

+
+
+
    +
  1. +

    Now create a JPA Entity class in this package

    +
  2. +
+
+
+
+
import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Column;
+@Entity
+@javax.persistence.Table(name = "EMPLOYEE")
+public class EmployeeEntity {
+  @Column(name = "EMPLOYEEID")
+  @GeneratedValue(strategy = GenerationType.IDENTITY)
+  private Long employeeId;
+  @Column(name = "NAME")
+  private String name;
+  @Column(name = "SURNAME")
+  private String surname;
+  @Column(name = "EMAIL")
+  private String email;
+}
+
+
+
+

then generate getters and setters for all attributes …

+
+
+
    +
  1. +

    Use Cobigen to generate code. Right click on EmployeeEntity. CobiGen → Generate

    +
  2. +
+
+
+

It will ask you to download the templates, click on update:

+
+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  1. +

    Click on all the option selected as below:

    +
  2. +
+
+
+

cobigen option selection

+
+
+
    +
  1. +

    Click on finish. Below Screen would be seen. Click on continue

    +
  2. +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (myapp-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (myapp-core), normally it will be implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+

Last but not least: We make a quick REST services test !

+
+
+

See in the application.properties the TCP Port and the PATH

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id} (i.e: for getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

for all employees

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+
+
+
+

for the specific employee

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

Now download Postman to test the rest services.

+
+
+

Once done, you have to create a POST Request for the LOGIN and insert in the body the JSON containing the username and password waiter

+
+
+

postman

+
+
+

Once done with success (Status: 200 OK) …

+
+
+

… We create a NEW GET Request in order to get one employee

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+

Front End

+
+

Let’s start now with angular Web and then Ionic app.

+
+
+

Angular Web App

+
+
    +
  1. +

    To generate angular structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  2. +
+
+
+

devon dist folder

+
+
+
    +
  1. +

    Once done, right click on EmployeeEto.java file present under the package com.devonfw.poc.employeemanagement.logic.api.to

    +
  2. +
+
+
+

eclipse generate

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
+
+
+

eclipse

+
+
+
    +
  1. +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  2. +
+
+
+

angular ee layer

+
+
+
    +
  1. +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  2. +
+
+
+
+
,\{
+path: 'employee',
+component: EmployeeGridComponent,
+canActivate: [AuthGuard],
+},
+
+
+
+

Following picture explain where to place the above content:

+
+
+

routes

+
+
+
    +
  1. +

    Open the command prompt and execute devon yarn install from the base folder, which would download all the required libraries..

    +
  2. +
+
+
+
    +
  1. +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  2. +
+
+
+

environment

+
+
+

In order to do that it’s important to look at the application.properties to see the values as PATH, TCP port etc …

+
+
+

configure

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  1. +

    Now run the ng serve -o command to run the Angular Application.

    +
  2. +
+
+
+

image44

+
+
+
    +
  1. +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  2. +
+
+
+

image45

+
+
+

WebApp DONE

+
+
+
+

Ionic Mobile App

+
+
    +
  1. +

    To generate Ionic structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  2. +
  3. +

    Once done, Right click on the EmployeeEto as you already did before in order to use CobiGen.

    +
  4. +
  5. +

    Click on the selected options as seen in the screenshot:

    +
  6. +
+
+
+

image46

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
  3. +

    The entire ionic structure will be auto generated.

    +
  4. +
+
+
+

image47

+
+
+
    +
  1. +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  2. +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  1. +

    Run npm install in the root folder to download the dependecies

    +
  2. +
  3. +

    Run ionic serve

    +
  4. +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App DONE*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+
Build APK
+
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
+

Adapt CobiGen_Templates

+
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto_Cobigen-CLI-generation.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_Cobigen-CLI-generation.html new file mode 100644 index 00000000..4644a7ab --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_Cobigen-CLI-generation.html @@ -0,0 +1,449 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==CobiGen CLI

+
+
+

The command line interface (CLI) for CobiGen enables the generation of code using few commands. This feature allows us to decouple CobiGen from Eclipse.

+
+
+

Install CobiGen CLI

+
+
+

In order to install the CobiGen CLI you will need to use the devonfw/ide. In a console run devon cobigen.

+
+
+
+
+

Commands and options

+
+
+

Using the following command and option you will be able to customize your generation as follows:

+
+
+
    +
  • +

    cobigen, cg: Main entry point of the CLI. If no arguments are passed, man page will be printed.

    +
  • +
  • +

    [generate, g]: Command used for code generation.

    +
    +
      +
    • +

      InputGlob: Glob pattern of the input file or the whole path of the input file from which the code will be generated.

      +
    • +
    • +

      < --increment, -i > : Specifies an increment ID to be generated. You can also search increments by name and CobiGen will output the resultant list. If an exact match found, code generation will happen.

      +
    • +
    • +

      < --template, -t > : specifies a template ID to be generated. You can also search templates by name and CobiGen will output the resultant list.

      +
    • +
    • +

      < --outputRootPath, -out >: The project file path in which you want to generate your code. If no output path is given, CobiGen will use the project of your input file.

      +
    • +
    +
    +
  • +
  • +

    [adapt-templates, a]: Generates a new templates folder next to the CobiGen CLI and stores its location inside a configuration file. After executing this command, the CLI will attempt to use the specified Templates folder.

    +
  • +
  • +

    < --verbose, -v > : Prints debug information, verbose log.

    +
  • +
  • +

    < --help, -h > : Prints man page.

    +
  • +
  • +

    < update, u> : This command compare the artificial pom plug-ins version with central latest version available and user can update any outdated plug-ins version .

    +
  • +
+
+
+
+
+

CLI Execution steps:

+
+
+

CobiGen CLI is installed inside your devonfw distribution. In order to execute it follow the next steps:

+
+
+
    +
  1. +

    Run console.bat, this will open a console.

    +
  2. +
  3. +

    Execute cobigen or cg and the man page should be printed.

    +
  4. +
  5. +

    Use a valid CobiGen input file and run cobigen generate <pathToInputFile>. Note: On the first execution of the CLI, CobiGen will download all the needed dependencies, please be patient.

    +
  6. +
  7. +

    A list of increments will be printed so that you can start the generation.

    +
  8. +
+
+
+

Preview of the man page for generate command:

+
+
+
+Generation path +
+
+
+
+
+

Examples

+
+
+

A selection of commands that you can use with the CLI:

+
+
+
    +
  • +

    cobigen generate foo\bar\EmployeeEntity.java: As no output path has been defined, CobiGen will try to find the pom.xml of the current project in order to set the generation root path.

    +
  • +
  • +

    cobigen generate foo\bar\*.java --out other\project: Will retrieve all the Java files on that input folder and generate the code on the path specified by --out.

    +
  • +
  • +

    cg g foo\bar\webServices.yml --increment TO: Performs a string search using TO and will print the closest increments like in the following image:

    +
  • +
+
+
+
+Generation path +
+
+
+
    +
  • +

    cg g foo\bar\webServices.yml -i 1,4,6: Directly generates increments with IDs 1, 4 and 6. CobiGen will not request you any other input.

    +
  • +
  • +

    cg a: Downloads the latest CobiGen_Templates and unpacks them next to the CLI. CobiGen will from now on use these unpacked Templates for generation.

    +
  • +
  • +

    cg a -cl C:\my\custom\location: Downloads the latest CobiGen_Templates and unpacks them in C:\my\custom\location. CobiGen will from now on use these unpacked Templates for generation.

    +
  • +
+
+
+
+
+

CLI update command

+
+
+

Example of Update Command :

+
+
+
+Generation path +
+
+
+

Select the plug-ins which you want to update like below :

+
+
+
+Generation path +
+
+
+
+
+

CLI custom templates

+
+
+

To use custom templates, it’s necessary to set up a custom configuration path as described here.

+
+
+
+
+

Troubleshooting

+
+
+

When generating code from a Java file, CobiGen makes use of Java reflection for generating templates. In order to do that, the CLI needs to find the compiled source code of your project.

+
+
+

If you find an error like Compiled class foo\bar\EmployeeEntity.java has not been found, it means you need to run mvn clean install on the input project so that a new target folder gets created with the needed compiled sources.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto_EA-client-generation.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_EA-client-generation.html new file mode 100644 index 00000000..caf87e76 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_EA-client-generation.html @@ -0,0 +1,353 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Enterprise Architect client generation

+
+
+

We are going to show you how to generate source code from an Enterprise Architect diagram +using CobiGen.

+
+
+

Prerequisites

+
+
+

If CobiGen_Templates is not already imported into your workspace, follow the next steps:

+
+
+
    +
  • +

    Click on the Eclipse’s menu File > Import > Existing Projects into Workspace and browse to select the workspaces/main/CobiGen_Templates directory.

    +
  • +
  • +

    Click Finish and you should have the CobiGen_Templates as a new project in Eclipse’s workspace.

    +
  • +
+
+
+

Also verify that you have the latest templates of CobiGen. Your templates folder must contain the crud_java_ea_uml folder. +If you do not see it, please follow the next steps:

+
+
+
    +
  • +

    Download the accumulative patch.

    +
  • +
  • +

    Open the zip file and extract its content inside the root folder of your Devonfw distribution Devon-dist_2.4.0/

    +
  • +
+
+
+

After following those steps correctly, you should have the latest version of the templates ready to use.

+
+
+
+
+

Generation

+
+
+

In this tutorial, we are going to generate the entity infrastructure using as input a class diagram, modelled with Enterprise Architect (EA). First, create a class diagram, an example is shown on figure below:

+
+
+
+Eclipse CobiGen generation +
+
+
+

When you are finished, you will have to export that UML diagram into an XMI version 2.1 file. This is the file format that CobiGen understands. See below a figure showing this process:

+
+
+
+Eclipse CobiGen generation +
+
+
+

To open that window, see this tutorial.

+
+
+

After having that exported file, change its extension from xmi to xml. Then create an devon4j project and import the exported file into the core of your devon4j project.

+
+
+

Now we are going to start the generation, right-click your exported file and select CobiGen > Generate, finally select the entity infrastructure increment:

+
+
+
+Eclipse CobiGen generation +
+
+
+

After following all these steps, your generated files should be inside src\main\java folder. If you want an XMI example, you will find it here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto_Release-creation.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_Release-creation.html new file mode 100644 index 00000000..32ce8835 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_Release-creation.html @@ -0,0 +1,305 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==CobiGen Release creation +In this guide we explain how to create CobiGen related releases, i.e. release of a new core version using our useful release automation script.

+
+
+

Usage

+
+
+

Fire up a command prompt from the CobiGen IDE environment (using console.bat for example). Then, you will need to execute the following command:

+
+
+
+
python "<path_to_release_script_parent_folder>/create_release.py" -d -g devonfw/cobigen -r "<path_of_your_just_cloned_fork>" -k "yourcapgemini@mail.com" -c
+
+
+
+ + + + + +
+ + +The CobiGen development environment comes with all required python packages needed for the release script. However, if you encounter errors like no module named xyz found you might want to consider running the following command: +
+
+
+
+
python -m pip install -r "<path_to_release_script_parent_folder>/requirements.txt"
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto_angular-client-generation.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_angular-client-generation.html new file mode 100644 index 00000000..69796eb0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_angular-client-generation.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Angular 8 Client Generation

+
+
+

The generation can create a full Angular 8 client using the devon4ng-application-template package located at workspaces/examples folder of the distribution. For more details about this package, please refer here.

+
+
+

Take into account that the TypeScript merging for CobiGen needs Node 6 or higher to be installed at your machine.

+
+
+ + + + + +
+ + +This is a short introduction to the Angular generation. For a deeper tutorial including the generation of the backend, we strongly recommend you to follow this document. +
+
+
+

Requisites

+
+
+

Install yarn globally:

+
+
+
+
npm install -g yarn
+
+
+
+
+
+

Angular 8 workspace

+
+
+

The output location of the generation can be defined editing the cobigen.properties file located at crud_angular_client_app/templates folder of the CobiGen_Templates project.

+
+
+
+`cobigen.properties file` +
+
+
+

By default, the output path would be into the devon4ng-application-template folder at the root of the devon4j project parent folder:

+
+
+
+
root/
+ |- devon4ng-application-template/
+ |- devon4j-project-parent/
+   |- core/
+   |- server/
+
+
+
+

However, this path can be changed, for example to src/main/client folder of the devon4j project:

+
+
+

relocate: ./src/main/client/${cwd}

+
+
+
+
root/
+ |- devon4j-project-parent/
+   |- core/
+      |- src
+        |- main
+          |- client
+   |- server/
+
+
+
+

Once the output path is chosen, copy the files of DEVON4NG-APPLICATION-TEMPLATE repository into this output path.

+
+
+
+
+

Install Node dependencies

+
+
+

Open a terminal into devon4ng-application-template copied and just run the command:

+
+
+
+
yarn
+
+
+
+

This will start the installation of all node packages needed by the project into the node_modules folder.

+
+
+
+
+

Generating

+
+
+

From an ETO object, right click, CobiGen → Generate will show the CobiGen wizard relative to client generation:

+
+
+
+CobiGen Client Generation Wizard +
+
+
+

Check all the increments relative to Angular:

+
+
+ + + + + +
+ + +
+

The Angular devon4j URL increment is only needed for the first generations however, checking it again on next generation will not cause any problem.

+
+
+
+
+

As we done on other generations, we click Next to choose which fields to include at the generation or simply clicking Finish will start the generation.

+
+
+
+CobiGen Client Generation Wizard 3 +
+
+
+
+
+

Routing

+
+
+

Due to the nature of the TypeScript merger, currently is not possible to merge properly the array of paths objects of the routings at app.routing.ts file so, this modification should be done by hand on this file. However, the import related to the new component generated is added.

+
+
+

This would be the generated app-routing.module file:

+
+
+
+
import { Routes, RouterModule } from '@angular/router';
+import { LoginComponent } from './login/login.component';
+import { AuthGuard } from './shared/security/auth-guard.service';
+import { InitialPageComponent } from './initial-page/initial-page.component';
+import { HomeComponent } from './home/home.component';
+import { SampleDataGridComponent } from './sampledata/sampledata-grid/sampledata-grid.component';
+//Routing array
+const appRoutes: Routes = [{
+    path: 'login',
+    component: LoginComponent
+}, {
+    path: 'home',
+    component: HomeComponent,
+    canActivate: [AuthGuard],
+    children: [{
+        path: '',
+        redirectTo: '/home/initialPage',
+        pathMatch: 'full',
+        canActivate: [AuthGuard]
+    }, {
+        path: 'initialPage',
+        component: InitialPageComponent,
+        canActivate: [AuthGuard]
+    }]
+}, {
+    path: '**',
+    redirectTo: '/login',
+    pathMatch: 'full'
+}];
+export const routing = RouterModule.forRoot(appRoutes);
+
+
+
+

Adding the following into the children object of home, will add into the side menu the entry for the component generated:

+
+
+
+
{
+    path: 'sampleData',
+    component: SampleDataGridComponent,
+    canActivate: [AuthGuard],
+}
+
+
+
+
+
import { Routes, RouterModule } from '@angular/router';
+import { LoginComponent } from './login/login.component';
+import { AuthGuard } from './shared/security/auth-guard.service';
+import { InitialPageComponent } from './initial-page/initial-page.component';
+import { HomeComponent } from './home/home.component';
+import { SampleDataGridComponent } from './sampledata/sampledata-grid/sampledata-grid.component';
+//Routing array
+const appRoutes: Routes = [{
+    path: 'login',
+    component: LoginComponent
+}, {
+    path: 'home',
+    component: HomeComponent,
+    canActivate: [AuthGuard],
+    children: [{
+        path: '',
+        redirectTo: '/home/initialPage',
+        pathMatch: 'full',
+        canActivate: [AuthGuard]
+    }, {
+        path: 'initialPage',
+        component: InitialPageComponent,
+        canActivate: [AuthGuard]
+    }, {
+        path: 'sampleData',
+        component: SampleDataGridComponent,
+        canActivate: [AuthGuard],
+    }]
+}, {
+    path: '**',
+    redirectTo: '/login',
+    pathMatch: 'full'
+}];
+export const routing = RouterModule.forRoot(appRoutes);
+
+
+
+
+`APP SideMenu` +
+
+
+
+
+

JWT Authentication

+
+
+

If you are using a backend server with JWT Authentication (there is a sample in workspaces/folder called sampleJwt) you have to specify the Angular application to use this kind of authentication.

+
+
+

By default the variable is set to CSRF but you can change it to JWT by going to the Enviroment.ts and setting security: 'jwt'.

+
+
+
+
+

Running

+
+
+

First of all, run your devon4j java server by right clicking over SpringBootApp.java Run As → Java Application. This will start to run the SpringBoot server. Once you see the Started SpringBoot in XX seconds, the backend is running.

+
+
+
+Starting `SpringBoot` +
+
+
+

Once the the server is running, open a Devon console at the output directory defined previously and run:

+
+
+
+
ng serve --open
+
+
+
+

This will run the Angular 8 application at:

+
+
+
+
http://localhost:4200
+
+
+
+
+Running Angular 8 app +
+
+
+

Once finished, the browser will open automatically at the previous localhost URL showing the Angular 8 application, using the credentials set at the devon4j java server you will be able to access.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto_create-a-new-plugin.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_create-a-new-plugin.html new file mode 100644 index 00000000..d7132a33 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_create-a-new-plugin.html @@ -0,0 +1,615 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Implementing a new Plug-in

+
+
+

New plug-ins can implement an input reader, a merger, a matcher, a trigger interpreter, and/or a template engine as explained here.

+
+
+ + + + + +
+ + +
+

It is discouraged to have cobigen-core dependencies at runtime, except for cobigen-core-api which definitely must be present.

+
+
+
+
+

Plugin Activator

+
+
+

Each plug-in has to have an plug-in activator class implementing the interface GeneratorPluginActivator from the core-api. This class will be used to load the plug-in using the PluginRegistry as explained here. This class implements two methods:

+
+
+
    +
  1. +

    bindMerger() → returns a mapping of merge strategies and its implementation to be registered.

    +
  2. +
  3. +

    bindTriggerInterpreter()→ returns the trigger interpreters to be provided by this plug-in.

    +
  4. +
+
+
+

Both methods create and register instances of mergers and trigger interpreters to be provided by the new plug-in.

+
+
+
+
+

Adding Trigger Interpreter

+
+
+

The trigger interpreter has to implement the TriggerInterpreter interface from the core. The trigger interpreter defines the type for the new plugin and creates new InputReader and new Matcher objects.

+
+
+
+
+

Adding Input Reader

+
+
+

The input reader is responsible of read the input object and parse it into + FreeMarker models. The input reader must be implemented for the type of the + input file. If there is any existent plugin that has the same file type as input, + there will be no need to add a new input reader to the new plug-in.

+
+
+

Input Reader Interface

+
+

The interface needed to add a new input reader is defined at the core. Each new +sub plug-in must implements this interface if is needed an input reader for it.

+
+
+

The interface implements the basic methods that an input reader must have, +but if additional methods are required, the developer must add a new interface +that extends the original interface `InputReader.java` from the core-api +and implement that on the sub plug-in.

+
+
+

The methods to be implemented by the input reader of the new sub plugin are:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodReturn TypeDescription

isValidInput(Object input)

boolean

This function will be called if matching triggers or matching templates should be retrieved for a given input object.

createModel(Object input)

Map<String, Object>

This function should create the FreeMarker object model from the given input.

combinesMultipleInputObjects(Object input)

boolean

States whether the given input object combines multiple input objects to be used for generation.

getInputObjects(Object input, Charset inputCharset)

List<Object>

Will return the set of combined input objects if the given input combines multiple input objects.

getTemplateMethods(Object input)

Map<String, Object>

This method returns available template methods from the plugins as Map. If the plugin which corresponds to the input does not provide any template methods an empty Map will be returned.

getInputObjectsRecursively(Object input, Charset inputCharset)

List<Object>

Will return the set of combined input objects if the given input combines multiple input objects.

+
+
+

Model Constants

+
+

The Input reader will create a model for FreeMarker. A FreeMarker model must +have variables to use them at the .ftl template file. Refer to Java Model to see the FreeMarker model example for java input files.

+
+
+
+

Registering the Input Reader

+
+

The input reader is an object that can be retrieved using the correspondent get + method of the trigger interpreter object. The trigger interpreter object is + loaded at the eclipse plug-in using the load plug-in method explained + here. + That way, when the core needs the input reader, only needs to call that getInputReader method.

+
+
+
+
+
+

Adding Matcher

+
+
+

The matcher implements the MatcherInterpreter interface from the core-api. +Should be implemented for providing a new input matcher. Input matcher are +defined as part of a trigger and provide the ability to restrict specific +inputs to a set of templates. +This restriction is implemented with a MatcherType enum.

+
+
+

E.g JavaPlugin

+
+
+
+
private enum MatcherType {
+    /** Full Qualified Name Matching */
+    FQN,
+    /** Package Name Matching */
+    PACKAGE,
+    /** Expression interpretation */
+    EXPRESSION
+}
+
+
+
+

Furthermore, matchers may provide several variable assignments, which might be +dependent on any information of the matched input and thus should be resolvable +by the defined matcher.

+
+
+

E.g JavaPlugin

+
+
+
+
private enum VariableType {
+    /** Constant variable assignment */
+    CONSTANT,
+    /** Regular expression group assignment */
+    REGEX
+}
+
+
+
+
+
+

Adding Merger

+
+
+

The merger is responsible to perform merge action between new output with the +existent data at the file if it already exists. Must implement the Merger +interface from the core-api. +The implementation of the Merge interface must override the following methods:

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
MethodReturn TypeDescription

getType()

String

Returns the type, this merger should handle.

merge(File base, String patch, String targetCharset)

String

Merges the patch into the base file.

+
+

Is important to know that any exception caused by the merger must throw a MergeException from the core-api to the eclipse-plugin handle it.

+
+
+
+
+

Changes since Eclipse / Maven 3.x

+
+
+

Since version 3.x the Eclipse and Maven plugins of CobiGen utilize the Java ServiceLoader mechanic to find and register plugins at runtime. To enable a new plugin to be discovered by this mechanic the following steps are needed:

+
+
+
    +
  • +

    create the file META-INF/services/com.devonfw.cobigen.api.extension.GeneratorPluginActivator containing just the full qualified name of the class implementing the GeneratorPluginActivator interface, if the plugin provides a Merger and/or a TriggerInterpreter

    +
  • +
  • +

    create the file META-INF/services/com.devonfw.cobigen.api.extension.TextTemplateEngine containing just the full qualified name of the class implementing the TextTemplateEngine interface, if provided by the plugin

    +
  • +
  • +

    include META-INF into the target bundle (i.e. the folder META-INF has to be present in the target jar file)

    +
  • +
+
+
+
+
Example: Java Plugin
+
+

The java plugin provides both a Merger and a TriggerInterpreter. It contains therefore a com.devonfw.cobigen.api.extension.GeneratorPluginActivator file with the following content:

+
+
+
+
com.devonfw.cobigen.javaplugin.JavaPluginActivator
+
+
+
+

This makes the JavaPluginActivator class discoverable by the ServiceLoader at runtime.

+
+
+
+
+
    +
  • +

    to properly include the plugin into the current system and use existing infrastructure, you need to add the plugin as a module in /cobigen/pom.xml (in case of a Merger/TriggerInterpreter providing plugin) and declare that as the plugin’s parent in it’s own pom.xml via

    +
  • +
+
+
+
+
<parent>
+    <groupId>com.devonfw</groupId>
+    <artifactId>cobigen-parent</artifactId>
+    <version>dev-SNAPSHOT</version>
+</parent>
+
+
+
+

or /cobigen/cobigen-templateengines/pom.xml (in case of a Merger/TriggerInterpreter providing plugin) and declare that as the plugin’s parent in it’s own pom.xml via

+
+
+
+
<parent>
+    <groupId>com.devonfw</groupId>
+    <artifactId>cobigen-tempeng-parent</artifactId>
+    <version>dev-SNAPSHOT</version>
+</parent>
+
+
+
+

If the plugin provides both just use the /cobigen/pom.xml.

+
+
+
    +
  • +

    The dependencies of the plugin are included in the bundle

    +
  • +
  • +

    To make the plugin available to the Eclipse plugin it must be included into the current compositeContent.xml and compositeArtifacts.xml files. Both files are located in https://github.com/devonfw/cobigen/tree/gh-pages/updatesite/{test|stable}. To do so, add an <child> entry to the <children> tag in both files and adapt the size attribute to match the new number of references. The location attribute of the new <child> tag needs to be the artifact id of the plugins pom.xml.

    +
  • +
+
+
+
+
Example: Java Plugin
+
+

In case of the Java plugin, the entry is

+
+
+
+
<child location="cobigen-javaplugin"/>
+
+
+
+
+
+

Deployment

+
+

If you want to create a test release of eclipse you need to run the command

+
+
+
+
sh deploy.sh
+
+
+
+

on the cloned CobiGen repository while making sure, that your current version of CobiGen cloned is a snapshot version. This will automatically be detected by the deploy script.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto_create-external-plugin.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_create-external-plugin.html new file mode 100644 index 00000000..eb95e98b --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_create-external-plugin.html @@ -0,0 +1,896 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Introduction to CobiGen external plug-ins

+
+
+

Since September of 2019, a major change on CobiGen has taken place. CobiGen is written in Java code and previously, it was very hard for developers to create new plug-ins in other languages.

+
+
+

Creating a new plug-in means:

+
+
+
    +
  • +

    Being able to parse a file in that language.

    +
  • +
  • +

    Create a human readable model that can be used to generate templates (by retrieving properties from the model).

    +
  • +
  • +

    Enable merging files, so that user’s code does not get removed.

    +
  • +
+
+
+

For the Java plug-in it was relatively easy. As you are inside the Java world, you can use multiple utilities or libraries in order to get the AST or to merge Java code. With this new feature, we wanted that behaviour to be possible for any programming language.

+
+
+

General intuition

+
+
+

Below you will find a very high level description of how CobiGen worked in previous versions:

+
+
+
+Old CobiGen +
+
+
+

Basically, when a new input file was sent to CobiGen, it called the input reader to create a model of it (see here an example of a model). That model was sent to the template engine.

+
+
+

Afterwards, the template engine generated a new file which had to be merged with the original one. All this code was implemented in Java.

+
+
+

On the new version, we have implemented a handler (ExternalProcessHandler) which connects through TCP/IP connection to a server (normally on localhost:5000). This server can be implemented in any language (.Net, Node.js, Python…​) it just needs to implement a REST API defined here. The most important services are the input reading and merging:

+
+
+
+New CobiGen +
+
+
+

CobiGen acts as a client that sends requests to the server in order to read the input file and create a model. The model is returned to the template engine so that it generates a new file. Finally, it is sent back to get merged with the original file.

+
+
+
+
+

How to create new external plug-in

+
+
+

The creation of a new plug-in consists mainly in three steps:

+
+
+
    +
  • +

    Creation of the server (external process).

    +
  • +
  • +

    Creation of a CobiGen plug-in.

    +
  • +
  • +

    Creation of templates.

    +
  • +
+
+
+

Server (external process)

+
+

The server can be programmed in any language that is able to implement REST services endpoints. The API that needs to implement is defined with this contract. You can paste the content to https://editor.swagger.io/ for a better look.

+
+
+

We have already created a NestJS server that implements the API defined above. You can find the code here which you can use as an example.

+
+
+

As you can see, the endpoints have the following naming convention: processmanagement/todoplugin/nameOfService where you will have to change todo to your plug-in name (e.g. rustplugin, pyplugin, goplugin…​)

+
+
+

When implementing service getInputModel which returns a model from the input file there are only two restrictions:

+
+
+
    +
  • +

    A path key must be added. Its value can be the full path of the input file or just the file name. It is needed because in CobiGen there is a batch mode, in which you can have multiple input objects inside the same input file. You do not need to worry about batch mode for now.

    +
  • +
  • +

    On the root of your model, for each found key that is an object (defined with brackets [{}]), CobiGen will try to use it as an input object. For example, this could be a valid model:

    +
    +
    +
    {
    +  "path": "example/path/employee.entity.ts"
    +  "classes": [
    +    {
    +      "identifier": "Employee",
    +      "modifiers": [
    +        "export"
    +      ],
    +      "decorators": [
    +        {
    +          "identifier": {
    +            "name": "Entity",
    +            "module": "typeorm"
    +          },
    +          "isCallExpression": true
    +        }
    +      ],
    +      "properties": [
    +        {
    +          "identifier": "id",
    +    ...
    +    ...
    +    ...
    +    }]
    +    "interfaces": [{
    +        ...
    +    }]
    +}
    +
    +
    +
  • +
+
+
+

For this model, CobiGen would use as input objects all the classes and interfaces defined. On the templates we would be able to do model.classes[0].identifier to get the class name. These input objects depend on the language, therefore you can use any key.

+
+
+

In order to test the server, you will have to deploy it on your local machine (localhost), default port is 5000. If that port is already in use, you can deploy it on higher port values (5001, 5002…​). Nevertheless, we explain later the testing process as you need to complete the next step before.

+
+
+ + + + + +
+ + +Your server must accept one argument when running it. The argument will be the port number (as an integer). This will be used for CobiGen in order to handle blocked ports when deploying your server. Check this code to see how we implemented that argument on our NestJS server. +
+
+
+
+

CobiGen plug-in

+
+

You will have to create a new CobiGen plug-in that connects to the server. But do not worry, you will not have to implement anything new. We have a CobiGen plug-in template available, the only changes needed are renaming files and setting some properties on the pom.xml. Please follow these steps:

+
+
+
    +
  • +

    Get the CobiGen plug-in template from here. It is a template repository (new GitHub feature), so you can click on "Use this template" as shown below:

    +
    +
    +Plugin CobiGen template +
    +
    +
  • +
  • +

    Name your repo as cobigen-name-plugin where name can be python, rust, go…​ In our case we will create a nest plug-in. It will create a repo with only one commit which contains all the needed files.

    +
  • +
  • +

    Clone your just created repo and import folder cobigen-todoplugin as a Maven project on any Java IDE, though we recommend you devonfw ;)

    +
    +
    +Import plugin +
    +
    +
  • +
  • +

    Rename all the todoplugin folders, files and class names to nameplugin. In our case nestplugin. In Eclipse you can easily rename by right clicking and then refactor → rename:

    +
  • +
+
+
+
+Rename plugin +
+
+
+ + + + + +
+ + +We recommend you to select all the checkboxes +
+
+
+
+Rename checkbox +
+
+
+
    +
  • +

    Remember to change in src/main/java and src/test/java all the package, files and class names to use your plug-in name. The final result would be:

    +
    +
    +Package structure +
    +
    +
  • +
  • +

    Now we just need to change some strings, this is needed for CobiGen to register all the different plugins (they need unique names). In class TodoPluginActivator (in our case NestPluginActivator), change all the todo to your plug-in name. See below the 3 strings that need to be changed:

    +
    +
    +Plugin activator +
    +
    +
  • +
  • +

    Finally, we will change some properties from the pom.xml of the project. These properties define the server (external process) that is going to be used:

    +
    +
      +
    1. +

      Inside pom.xml, press Ctrl + F to perform a find and replace operation. Replace all todo with your plugin name:

      +
      +
      +Pom properties +
      +
      +
    2. +
    3. +

      We are going to explain the server properties:

      +
      +
        +
      1. +

        artifactId: This is the name of your plug-in, that will be used for a future release on Maven Central.

        +
      2. +
      3. +

        plugin.name: does not need to be changed as it uses the property from the artifactId. When connecting to the server, it will send a request to localhost:5000/{plugin.name}plugin/isConnectionReady, that is why it is important to use an unique name for the plug-in.

        +
      4. +
      5. +

        server.name: This defines how the server executable (.exe) file will be named. This .exe file contains all the needed resources for deploying the server. You can use any name you want.

        +
      6. +
      7. +

        server.version: You will specify here the server version that needs to be used. The .exe file will be named as {server.name}-{server.version}.exe.

        +
      8. +
      9. +

        server.url: This will define from where to download the server. We really recommend you using NPM which is a package manager we know it works well. We explain here how to release the server on NPM. This will download the .exe file for Windows.

        +
      10. +
      11. +

        server.url.linux: Same as before, but this should download the .exe file for Linux systems. If you do not want to implement a Linux version of the plug-in, just use the same URL from Windows or MacOS.

        +
      12. +
      13. +

        server.url.macos: Same as before, but this should download the .exe file for MacOS systems. If you do not want to implement a MacOS version of the plug-in, just use the same URL from Linux or Windows.

        +
      14. +
      +
      +
    4. +
    +
    +
  • +
+
+
+
+
+
+

Testing phase

+
+
+

Now that you have finished with the implementation of the server and the creation of a new CobiGen plug-in, we are going to explain how you can test that everything works fine:

+
+
+
    +
  1. +

    Deploy the server on port 5000.

    +
  2. +
  3. +

    Run mvn clean test on the CobiGen-plugin or run the JUnit tests directly on Eclipse.

    +
    +
      +
    1. +

      If the server and the plug-in are working properly, some tests will pass and other will fail (we need to tweak them).

      +
    2. +
    3. +

      If every test fails, something is wrong in your code.

      +
    4. +
    +
    +
  4. +
  5. +

    In order to fix the failing tests, go to src/test/java. The failing tests make use of sample input files that we added in sake of example:

    +
    +
    +Pom properties +
    +
    +
  6. +
+
+
+

Replace those files (on src/test/resources/testadata/unittest/files/…​) with the correct input files for your server.

+
+
+
+
+

Releasing

+
+
+

Now that you have already tested that everything works fine, we are going to explain how to release the server and the plug-in.

+
+
+

Release the server

+
+

We are going to use NPM to store the executable of our server. Even though NPM is a package manager for JavaScript, it can be used for our purpose.

+
+
+
    +
  • +

    Get the CobiGen server template from here. It is a template repository (new GitHub feature), so you can click on "Use this template" as shown below:

    +
    +
    +Server CobiGen template +
    +
    +
  • +
  • +

    Name your repo as cobigen-name-server where name can be python, rust, go…​ In our case we will create a nest plug-in. It will create a repo with only one commit which contains all the needed files.

    +
  • +
  • +

    Clone your just created repo and go to folder cobigen-todo-server. It will just contain two files: ExternalProcessContract.yml is the OpenAPI definition which you can modify with your own server definition (this step is optional), and package.json is a file needed for NPM in order to define where to publish this package:

    +
    +
    +
    {
    +  "name": "@devonfw/cobigen-todo-server",
    +  "version": "1.0.0",
    +  "description": "Todo server to implement the input reader and merger for CobiGen",
    +  "author": "CobiGen Team",
    +  "license": "Apache"
    +}
    +
    +
    +
  • +
+
+
+

Those are the default properties. This would push a new package cobigen-todo-server on the devonfw organization, with version 1.0.0. We have no restrictions here, you can use any organization, though we always recommend devonfw.

+
+
+ + + + + +
+ + +Remember to change all the todo to your server name. +
+
+
+
    +
  • +

    Add your executable file into the cobigen-todo-server folder, just like below. As we said previously, this .exe is the server ready to be deployed.

    +
    +
    +
    cobigen-template-server/
    + |- cobigen-todo-server/
    +   |- ExternalProcessContract.yml
    +   |- package.json
    +   |- todoserver-1.0.0.exe
    +
    +
    +
  • +
  • +

    Finally, we have to publish to NPM. If you have never done it, you can follow this tutorial. Basically you need to login into NPM and run:

    +
    +
    +
    cd cobigen-todo-server/
    +npm publish --access=public
    +
    +
    +
  • +
+
+
+ + + + + +
+ + +To release Linux and MacOS versions of your plug-in, just add the suffix into the package name (e.g. @devonfw/cobigen-todo-server-linux) +
+
+
+

That’s it! You have published the first version of your server. Now you just need to modify the properties defined on the pom of your CobiGen plug-in. Please see next section for more information.

+
+
+
+

Releasing CobiGen plug-in

+
+
    +
  • +

    Change the pom.xml to define all the properties. You can see below a final example for nest:

    +
    +
    +
    ...
    +   <groupId>com.devonfw.cobigen</groupId>
    +   <artifactId>nestplugin</artifactId>
    +   <name>CobiGen - Nest Plug-in</name>
    +   <version>1.0.0</version>
    +   <packaging>jar</packaging>
    +   <description>CobiGen - nest Plug-in</description>
    +
    +   <properties>
    +      <!-- External server properties -->
    +      <plugin.name>${project.artifactId}</plugin.name>
    +      <server.name>nestserver</server.name>
    +      <server.version>1.0.0</server.version>
    +      <server.url>https\://registry.npmjs.org/@devonfw/cobigen-nest-server/-/cobigen-nest-server-${server.version}.tgz</server.url>
    +      <server.url.linux>https\://registry.npmjs.org/@devonfw/cobigen-nest-server-linux/-/cobigen-nest-server-linux-${server.version}.tgz</server.url.linux>
    +      <server.url.macos>https\://registry.npmjs.org/@devonfw/cobigen-nest-server-macos/-/cobigen-nest-server-macos-${server.version}.tgz</server.url.macos>
    +...
    +
    +
    +
  • +
  • +

    Deploy to Maven Central.

    +
  • +
+
+
+
+
+
+

Templates creation

+
+
+

After following above steps, we now have a CobiGen plug-in that connects to a server (external process) which reads your input files, returns a model and is able to merge files.

+
+
+

However, we need a key component for our plug-in to be useful. We need to define templates:

+
+
+
    +
  • +

    Fork our CobiGen main repository, from here and clone it into your PC. Stay in the master branch and import into your IDE cobigen-templates\templates-devon4j. Set the Java version of the project to 1.8 if needed.

    +
  • +
  • +

    Create a new folder on src/main/templates, this will contain all your templates. You can use any name, but please use underscores as separators. In our case, we created a folder crud_typescript_angular_client_app to generate an Angular client from a TypeORM entity (NodeJS entity).

    +
    +
    +Templates project +
    +
    +
  • +
  • +

    Inside your folder, create a templates folder. As you can see below, the folder structure of the generated files starts here (the sources). Also we need a configuration file templates.xml that should be on the same level as templates/ folder. For now, copy and paste a templates.xml file from any of the templates folder.

    +
    +
    +Templates project +
    +
    +
  • +
  • +

    Start creating your own templates. Our default templates language is Freemarker, but you can also use Velocity. Add the extension to the file (.ftl) and start developing templates! You can find useful documentation here.

    +
  • +
  • +

    After creating all the templates, you need to modify context.xml which is located on the root of src/main/templates. There you need to define a trigger, which is used for CobiGen to know when to trigger a plug-in. I recommend you to copy and paste the following trigger:

    +
    +
    +
      <trigger id="crud_typescript_angular_client_app" type="nest" templateFolder="crud_typescript_angular_client_app">
    +    <matcher type="fqn" value="([^\.]+).entity.ts">
    +      <variableAssignment type="regex" key="entityName" value="1"/>
    +      <variableAssignment type="regex" key="component" value="1"/>
    +      <variableAssignment type="constant" key="domain" value="demo"/>
    +    </matcher>
    +  </trigger>
    +
    +
    +
  • +
  • +

    Change templateFolder to your templates folder name. id you can use any, but it is recommendable to use the same as the template folder name. type is the TRIGGER_TYPE we defined above on the NestPluginActivator class. On matcher just change the value: ([^\.]+).entity.ts means that we will only accept input files that contain anyString.entity.ts. This improves usability, so that users only generate using the correct input files. You will find more info about variableAssignment here.

    +
  • +
  • +

    Finally, is time to configure templates.xml. It is needed for organizing templates into increments, please take a look into this documentation.

    +
  • +
+
+
+

Testing templates

+
+
    +
  • +

    When you have finished your templates you will like to test them. On the templates-devon4j pom.xml remove the SNAPSHOT from the version (in our case the version will be 3.1.8). Run mvn clean install -DskipTests on the project. We skip tests because you need special permissions to download artifacts from our Nexus. Remember the version that has just been installed:

    +
    +
    +Templates snapshot version +
    +
    +
  • +
+
+
+ + + + + +
+ + +We always recommend using the devonfw console, which already contains a working Maven version. +
+
+
+
    +
  • +

    Now we have your last version of the templates ready to be used. We need to use that latest version in CobiGen. We will use the CobiGen CLI that you will find in your cloned repo, at cobigen-cli/cli. Import the project into your IDE.

    +
  • +
  • +

    Inside the project, go to src/main/resources/pom.xml. This pom.xml is used on runtime in order to install all the CobiGen plug-ins and templates. Add there your latest templates version and the previously created plug-in:

    +
    +
    +CLI pom +
    +
    +
  • +
  • +

    Afterwards, run mvn clean install -DskipTests and CobiGen will get your plug-ins. Now you have three options to test templates:

    +
    +
      +
    1. +

      Using Eclipse run as:

      +
      +
        +
      1. +

        Inside Eclipse, you can run the CobiGen-CLI as a Java application. Right click class CobiGenCLI.java → run as → run configurations…​ and create a new Java application as shown below:

        +
        +
        +Create configuration +
        +
        +
      2. +
      3. +

        That will create a CobiGenCLI configuration where we can set arguments to the CLI. Let’s first begin with showing the CLI version, which should print a list of all plug-ins, including ours.

        +
        +
        +Run version +
        +
        +
        +
        +
         ...
        + name:= propertyplugin version = 2.0.0
        + name:= jsonplugin version = 2.0.0
        + name:= templates-devon4j version = 3.1.8
        + name:= nestplugin version = 1.0.0
        + ...
        +
        +
        +
      4. +
      5. +

        If that worked, now you can send any arguments to the CLI in order to generate with your templates. Please follow this guide that explains all the CLI commands.

        +
      6. +
      +
      +
    2. +
    3. +

      Modify the already present JUnit tests on the CLI project: They test the generation of templates from multiple plug-ins, you can add your own tests and input files.

      +
    4. +
    5. +

      Use the CLI jar to execute commands:

      +
      +
        +
      1. +

        The mvn clean install -DskipTests command will have created a Cobigen.jar inside your target folder (cobigen-cli/cli/target). Open the jar with any unzipper and extract to the current location class-loader-agent.jar, cobigen.bat and cg.bat:

        +
        +
        +Extract files +
        +
        +
      2. +
      3. +

        Now you can run any CobiGen CLI commands using a console. This guide explains all the CLI commands.

        +
        +
        +Run CLI +
        +
        +
      4. +
      +
      +
    6. +
    +
    +
  • +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto_devon4net.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_devon4net.html new file mode 100644 index 00000000..efd6a2f1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_devon4net.html @@ -0,0 +1,443 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4net CobiGen Guide

+
+
+

Overview

+
+

In this guide we will explain how to generate a new WebAPI project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+

Getting things ready

+
+

devonfw-IDE

+
+

First, we will install the devonfw-IDE. It is a tool that will setup your IDE within minutes. Please follow the install guide here.

+
+
+
+

devon4net Templates

+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
`dotnet new -i Devon4Net.WebAPI.Template`
+
+
+
+

and then:

+
+
+
+
`dotnet new Devon4NetAPI`
+
+
+
+
+

OpenAPI File

+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+
+

Generating files

+
+

CobiGen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the CobiGen CLI tool.

+
+
+

Generating files through Eclipse

+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+CobiGen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+CobiGen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+CobiGen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+CobiGen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+

Generating files through CobiGen CLI

+
+

In order to generate the files using the CobiGen CLI it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    `cobigen generate {yourOpenAPIFile}.yml`
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+
+

Configuration

+
+

Data base

+
+

CobiGen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+CobiGen +
+
+
+
+

Run the application

+
+

After the configuration of the database, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
`dotnet run`
+
+
+
+

This will deploy our application in our localhost with the port 8082, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto_enable_composite_primary_keys_in_entity.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_enable_composite_primary_keys_in_entity.html new file mode 100644 index 00000000..bfec5bde --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_enable_composite_primary_keys_in_entity.html @@ -0,0 +1,346 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Enable Composite Primary Keys in Entity

+
+
+

In order to enable Composite Primary Keys in entity in CobiGen, the below approach is suggested

+
+
+

The templates in CobiGen have been enhanced to support Composite primary keys while still supporting the default devonfw/Cobigen values with Long id.

+
+
+

Also, the current generation from Entity still holds good - right click from an Entity object, CobiGen → Generate will show the CobiGen wizard relative to the entity generation.

+
+
+

After generating, below example shows how composite primary keys can be enabled.

+
+
+
+
@Entity
+@Table(name = "employee")
+public class EmployeeEntity {
+	private CompositeEmployeeKey id;
+	private String name;
+	private String lastName;
+	@Override
+	@EmbeddedId
+	public CompositeEmployeeKey getId() {
+		return id;
+	}
+	@Override
+	public void setId(CompositeEmployeeKey id) {
+		this.id = id;
+	}
+	.
+	.
+	.
+
+
+
+
+
public class CompositeEmployeeKey implements Serializable {
+  private String companyId;
+  private String employeeId;
+
+
+
+

Once the generation is complete, implement PersistenceEntity<ID>.java in the EmployeeEntity and pass the composite primary key object which is CompositeEmployeeKey in this case as the parameter ID.

+
+
+
+
import com.devonfw.module.basic.common.api.entity.PersistenceEntity;
+@Entity
+@Table(name = "employee")
+public class EmployeeEntity implements PersistenceEntity<CompositeEmployeeKey> {
+	private CompositeEmployeeKey id;
+	private String name;
+	private String lastName;
+
+
+
+

Also, the modificationCounter methods needs to be implemented from the interface PersistenceEntity<ID>. The sample implementation of the modification counter can be referred below.

+
+
+
+
@Override
+  public int getModificationCounter() {
+    if (this.persistentEntity != null) {
+      // JPA implementations will update modification counter only after the transaction has been committed.
+      // Conversion will typically happen before and would result in the wrong (old) modification counter.
+      // Therefore we update the modification counter here (that has to be called before serialization takes
+      // place).
+      this.modificationCounter = this.persistentEntity.getModificationCounter();
+    }
+    return this.modificationCounter;
+  }
+  @Override
+  public void setModificationCounter(int version) {
+    this.modificationCounter = version;
+  }
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto_ionic-client-generation.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_ionic-client-generation.html new file mode 100644 index 00000000..4ecaed28 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_ionic-client-generation.html @@ -0,0 +1,520 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Ionic client generation

+
+
+

We are going to show you how to generate a CRUD Ionic application from an ETO +using CobiGen.

+
+
+ + + + + +
+ + +This is a short introduction to the Ionic generation. For a deeper tutorial including the generation of the backend, we strongly recommend you to follow this document. +
+
+
+

Prerequisites

+
+
+

Before starting, make sure you already have in your computer:

+
+
+
    +
  • +

    Ionic: by following the steps defined on that page. +It includes installing:

    +
    +
      +
    • +

      NodeJS: We have to use "NPM" for downloading packages.

      +
    • +
    • +

      Ionic CLI.

      +
    • +
    +
    +
  • +
  • +

    Capacitor: Necessary to access to native device features.

    +
  • +
+
+
+

If CobiGen_Templates are not already downloaded, follow the next steps:

+
+
+
    +
  • +

    Right click on any file of your workspace CobiGen > Update Templates and now you are able to start the generation.

    +
  • +
  • +

    If you want to adapt them, click Adapt Templates and you should have the CobiGen_Templates as a new project in Eclipse’s workspace.

    +
  • +
+
+
+

After following those steps correctly, you should have the latest version of the templates ready to use.

+
+
+
+
+

Generation

+
+
+

We are going to generate the CRUD into a sample application that we have developed for +testing this functionality. It is present on your workspaces/examples folder (devon4ng-ionic-application-template). If you do not see it, you can clone or download it from here.

+
+
+

After having that sample app, please create an devon4j project and then start implementing the ETO: You will find an example here.

+
+
+

As you can see, TableEto contains 3 attributes: 2 of them are Long and the third one TableState is an enum that you will find +here. +The Ionic generation works fine for any Java primitive attribute (Strings, floats, chars, boolean…​) and enums. However, if you want to use your own objects, you should +override the toString() method, as explained here.

+
+
+

The attributes explained above will be used for generating a page that shows a list. Each item of that list +will show the values of those attributes.

+
+
+

For generating the files:

+
+
+
    +
  • +

    Right click your ETO file and click on CobiGen > Generate as shown on the figure below.

    +
  • +
+
+
+
+Eclipse CobiGen generation +
+
+
+
    +
  • +

    Select the Ionic increments for generating as shown below. Increments group a set of templates for generating +different projects.

    +
    +
      +
    1. +

      Ionic List used for generating the page containing the list.

      +
    2. +
    3. +

      Ionic devon4ng environments is for stating the server path.

      +
    4. +
    5. +

      Ionic i18n used for generating the different language translations for the `translationService` (currently English and Spanish).

      +
    6. +
    7. +

      Ionic routing adds an app-routing.module.ts file to allow navigation similar to the one available in Angular.

      +
    8. +
    9. +

      Ionic theme generates the variables.scss file which contains variables to style the application.

      +
    10. +
    +
    +
  • +
+
+
+
+CobiGen Ionic Wizard +
+
+
+ + + + + +
+ + +By default, the generated files will be placed inside "devon4ng-ionic-application-template", next to the root of your project’s folder. +See the image below to know where they are generated. For changing the generation path and the name of the application go to CobiGen_Templates/crud_ionic_client_app/cobigen.properties. +
+
+
+
+Generation path +
+
+
+

Now that we have generated the files, lets start testing them:

+
+
+
    +
  • +

    First change the SERVER_URL of your application. For doing that, modify src/environments/environments.ts, also modify src/environments/environments.android.ts (android) and src/environments/environments.prod.ts (production) if you want to test in different environments.

    +
  • +
  • +

    Check that there are no duplicated imports. Sometimes there are duplicated imports in src/app/app.module.ts. +This happens because the merger of CobiGen prefers to duplicate rather than to delete.

    +
  • +
  • +

    Run npm install to install all the required dependencies.

    +
  • +
  • +

    Run `ionic serve on your console.

    +
  • +
+
+
+

After following all these steps your application should start. However, remember that you will need your server to be running for access to the list page.

+
+
+
+
+

Running it on Android

+
+
+

To run the application in an android emulated device, it is necessary to have Android Studio and Android SDK. After its installation, the following commands have to be run on your console:

+
+
+
    +
  • +

    npx cap init "name-for-the-app (between quotes)" "id-for-the-app (between quotes)"

    +
  • +
  • +

    ionic build --configuration=android. To use this command, you must add an android build configuration at angular.json

    +
  • +
+
+
+
+
    "build": {
+      ...
+      "configurations": {
+        ...
+        "android": {
+          "fileReplacements": [
+            {
+              "replace": "src/environments/environment.ts",
+              "with": "src/environments/environment.android.ts"
+            }
+          ]
+        },
+      }
+    }
+
+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+

The last steps are done in Android studio: make the project, make the app, build and APK and run in a device.

+
+
+
+Click on make project +
+
+
+
+click on make app +
+
+
+
+click on build APK +
+
+
+
+click on running device +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/howto_update_CobiGen.html b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_update_CobiGen.html new file mode 100644 index 00000000..1fdff820 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/howto_update_CobiGen.html @@ -0,0 +1,366 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==How to update CobiGen

+
+
+

In order to update CobiGen from our devonfw distribution, we have two options:

+
+
+
    +
  • +

    Open Eclipse, click on HelpCheck for updates

    +
  • +
+
+
+
+Check updates +
+
+
+
    +
  • +

    Select all the CobiGen plugins listed and click on Next.

    +
  • +
+
+
+
+All the updates +
+
+
+

If this option is not working properly, then you can try the second option:

+
+
+
    +
  • +

    Open Eclipse, click on HelpAbout Eclipse IDE:

    +
  • +
+
+
+
+About Eclipse +
+
+
+
    +
  • +

    Click on Installation details:

    +
  • +
+
+
+
+Installation details +
+
+
+
    +
  • +

    Select all the CobiGen plugins and click on Update:

    +
  • +
+
+
+
+All updates details +
+
+
+

After the update process finishes, remember to restart Eclipse.

+
+
+

Updating templates:

+
+
+

To update your CobiGen templates to the latest version, you just need to do one step:

+
+
+
    +
  • +

    Right click any file on your package explorer, click on CobiGenUpdate templates, then click on download:

    +
  • +
+
+
+
+Update templates +
+
+
+

Now you will have the latest templates ready!

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/master-cobigen.html b/docs/devonfw.github.io/1.0/cobigen.wiki/master-cobigen.html new file mode 100644 index 00000000..fa6f9354 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/master-cobigen.html @@ -0,0 +1,7463 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==CobiGen — Code-based incremental Generator +:title-logo-image: images/logo/cobigen_logo.png

+
+
+

Document Description

+
+
+

This document contains the documentation of the CobiGen core module as well as all CobiGen plug-ins and the CobiGen eclipse integration.

+
+
+ + + + + +
+ + +
+

DISCLAIMER: All CobiGen plugins are compatible with the latest release of Devonfw unless otherwise denoted.

+
+
+
+
+

Current versions:

+
+
+
    +
  • +

    CobiGen - Eclipse Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - Maven Build Plug-in v7.1.0

    +
  • +
  • +

    CobiGen CLI v1.2.0

    +
  • +
+
+
+
+
    +
  • +

    CobiGen v7.1.0

    +
  • +
  • +

    CobiGen - Java Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - XML Plug-in v7.0.0

    +
  • +
  • +

    CobiGen - TypeScript Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - Property Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - Text Merger v7.1.1

    +
  • +
  • +

    CobiGen - JSON Plug-in v7.0.0

    +
  • +
  • +

    CobiGen - HTML Plug-in v7.0.0

    +
  • +
  • +

    CobiGen - Open API Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - FreeMarker Template Engine v7.0.0

    +
  • +
  • +

    CobiGen - Velocity Template Engine v7.0.0

    +
  • +
+
+
+

Authors:

+
+
+
+
* Malte Brunnlieb
+* Jaime Diaz Gonzalez
+* Steffen Holzer
+* Ruben Diaz Martinez
+* Joerg Hohwiller
+* Fabian Kreis
+* Lukas Goerlach
+* Krati Shah
+* Christian Richter
+* Erik Grüner
+* Mike Schumacher
+* Marco Rose
+* Mohamed Ghanmi
+
+
+
+

==Guide to the Reader

+
+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If this is your first contact with CobiGen, you will be interested in the general purpose of CobiGen, in the licensing of CobiGen, as well as in the Shared Service provided for CobiGen. Additionally, there are some general use cases, which are currently implemented and maintained to be used out of the box.

    +
  • +
  • +

    As a user of the CobiGen Eclipse integration, you should focus on the Installation and Usage chapters to get a good introduction about how to use CobiGen in eclipse.

    +
  • +
  • +

    As a user of the Maven integration, you should focus on the Maven configuration chapter, which guides you through the integration of CobiGen into your build configuration.

    +
  • +
  • +

    If you like to adapt the configuration of CobiGen, you have to step deeper into the configuration guide as well as into the plug-in configuration extensions for the Java Plug-in, XML-Plugin, Java Property Plug-in, as well as for the Text-Merger Plug-in.

    +
  • +
  • +

    Finally, if you want to develop your own templates, you will be thankful for helpful links in addition to the plug-ins documentation as referenced in the previous point.

    +
  • +
+
+ +
+

==CobiGen - Code-based incremental Generator

+
+
+
Overview
+
+

CobiGen is a generic incremental generator for end to end code generation tasks, mostly used in Java projects. +Due to a template-based approach, CobiGen generates any set of text-based documents and document fragments.

+
+
+

Input (currently):

+
+
+
    +
  • +

    Java classes

    +
  • +
  • +

    XML-based files

    +
  • +
  • +

    OpenAPI documents

    +
  • +
  • +

    Possibly more inputs like WSDL, which is currently not implemented.

    +
  • +
+
+
+

Output:

+
+
+
    +
  • +

    any text-based document or document fragments specified by templates

    +
  • +
+
+
+
+
Architecture
+
+

CobiGen is build as an extensible framework for incremental code generation. It provides extension points for new input readers which allow reading new input types and converting them to an internally processed model. The model is used to process templates of different kinds to generate patches. The template processing will be done by different template engines. There is an extension point for template engines to support multiple ones as well. Finally, the patch will be structurally merged into potentially already existing code. To allow structural merge on different programming languages, the extension point for structural mergers has been introduced. Here you will see an overview of the currently available extension points and plug-ins:

+
+
+
+
Features and Characteristics
+
+
    +
  • +

    Generate fresh files across all the layers of a application - ready to run.

    +
  • +
  • +

    Add on to existing files merging code into it. E.g. generate new methods into existing java classes or adding nodes to an XML file. Merging of contents into existing files will be done using structural merge mechanisms.

    +
  • +
  • +

    Structural merge mechanisms are currently implemented for Java, XML, Java Property Syntax, JSON, Basic HTML, Text Append, TypeScript.

    +
  • +
  • +

    Conflicts can be resolved individually but automatically by former configuration for each template.

    +
  • +
  • +

    CobiGen provides an Eclipse integration as well as a Maven Integration.

    +
  • +
  • +

    CobiGen comes with an extensive documentation for users and developers.

    +
  • +
  • +

    Templates can be fully tailored to project needs - this is considered as a simple task.

    +
  • +
+
+
+
+
Selection of current and past CobiGen applications
+
+

General applications:

+
+
+
    +
  • +

    Generation of a Java CRUD application based on devonfw architecture including all software-layers on the server plus code for JS-clients (Angular). You can find details here.

    +
  • +
  • +

    Generation of a Java CRUD application according to the Register Factory architecture. Persistence entities are the input for generation.

    +
  • +
  • +

    Generation of builder classes for generating test data for JUnit-Tests. Input are the persistence entities.

    +
  • +
  • +

    Generation of a EXT JS 6 client with full CRUD operations connected a devon4j server.

    +
  • +
  • +

    Generation of a Angular 6 client with full CRUD operations connected a devon4j server.

    +
  • +
+
+
+

Project-specific applications in the past:

+
+
+
    +
  • +

    Generation of an additional Java type hierarchy on top of existing Java classes in combination with additional methods to be integrated in the modified classes. Hibernate entities were considered as input as well as output of the generation. The rational in this case, was to generate an additional business object hierarchy on top of an existing data model for efficient business processing.

    +
  • +
  • +

    Generation of hash- and equals-methods as well as copy constructors depending on the field types of the input Java class. Furthermore, CobiGen is able to re-generate these methods/constructors triggered by the user, i.e, when fields have been changed.

    +
  • +
  • +

    Extraction of JavaDoc of test classes and their methods for generating a csv test documentation. This test documentation has been further processed manually in Excel to provide a good overview about the currently available tests in the software system, which enables further human analysis.

    +
  • +
+
+ +
+

==General use cases

+
+
+

In addition to the selection of CobiGen applications introduced before, this chapter provides a more detailed overview about the currently implemented and maintained general use cases. These can be used by any project following a supported reference architecture as e.g. the devonfw or Register Factory.

+
+
+
+
devon4j
+
+

With our templates for devon4j, you can generate a whole CRUD application from a single Entity class. You save the effort for creating, DAOs, Transfer Objects, simple CRUD use cases with REST services and even the client application can be generated.

+
+
+
CRUD server application for devon4j
+
+

For the server, the required files for all architectural layers (Data access, logic, and service layer) can be created based on your Entity class. After the generation, you have CRUD functionality for the entity from bottom to top which can be accessed via a RESTful web service. Details are provided in the devonfw wiki.

+
+
+
+
CRUD client application for devon4ng
+
+

Based on the REST services on the server, you can also generate an Angular client based on devon4ng. With the help of Node.js, you have a working client application for displaying your entities within minutes!

+
+
+
+
Test data Builder for devon4j
+
+

Generating a builder pattern for POJOs to easily create test data in your tests. CobiGen is not only able to generate a plain builder pattern but rather builder, which follow a specific concept to minimize test data generation efforts in your unit tests. The following Person class as an example:

+
+
+
Person class
+
+
public class Person {
+
+    private String firstname;
+    private String lastname;
+    private int birthyear;
+    @NotNull
+    private Address address;
+
+    @NotNull
+    public String getFirstname() {
+        return this.firstname;
+    }
+
+    // additional default setter and getter
+}
+
+
+
+

It is a simple POJO with a validation annotation, to indicate, that firstname should never be null. Creating this object in a test would imply to call every setter, which is kind of nasty. Therefore, the Builder Pattern has been introduced for quite a long time in software engineering, allowing to easily create POJOs with a fluent API. See below.

+
+
+
Builder pattern example
+
+
Person person = new PersonBuilder()
+                .firstname("Heinz")
+                .lastname("Erhardt")
+                .birthyear(1909)
+                .address(
+                    new AddressBuilder().postcode("22222")
+                        .city("Hamburg").street("Luebecker Str. 123")
+                        .createNew())
+                .addChild(
+                    new PersonBuilder()[...].createNew()).createNew();
+
+
+
+

The Builder API generated by CobiGen allows you to set any setter accessible field of a POJO in a fluent way. But in addition lets assume a test, which should check the birth year as precondition for any business operation. So specifying all other fields of Person, especially firstname as it is mandatory to enter business code, would not make sense. The test behavior should just depend on the specification of the birth year and on no other data. So we would like to just provide this data to the test.

+
+
+

The Builder classes generated by CobiGen try to tackle this inconvenience by providing the ability to declare default values for any mandatory field due to validation or database constraints.

+
+
+
Builder Outline
+
+
public class PersonBuilder {
+
+    private void fillMandatoryFields() {
+        firstname("lasdjfaöskdlfja");
+        address(new AddressBuilder().createNew());
+    };
+    private void fillMandatoryFields_custom() {...};
+
+    public PersonBuilder firstname(String value);
+    public PersonBuilder lastname(String value);
+    ...
+
+    public Person createNew();
+    public Person persist(EntityManager em);
+    public List<Person> persistAndDuplicate(EntityManager em, int count);
+}
+
+
+
+

Looking at the plotted builder API generated by CobiGen, you will find two private methods. The method fillMandatoryFields will be generated by CobiGen and regenerated every time CobiGen generation will be triggered for the Person class. This method will set every automatically detected field with not null constraints to a default value. However, by implementing fillMandatoryFields_custom on your own, you can reset these values or even specify more default values for any other field of the object. Thus, running new PersonBuilder().birthyear(1909).createNew(); will create a valid object of Person, which is already pre-filled such that it does not influence the test execution besides the fact that it circumvents database and validation issues.

+
+
+

This even holds for complex data structures as indicated by address(new AddressBuilder().createNew());. Due to the use of the AddressBuilder for setting the default value for the field address, also the default values for Address will be set automatically.

+
+
+

Finally, the builder API provides different methods to create new objects.

+
+
+
    +
  • +

    createNew() just creates a new object from the builder specification and returns it.

    +
  • +
  • +

    persist(EntityManager) will create a new object from the builder specification and persists it to the database.

    +
  • +
  • +

    persistAndDuplicate(EntityManager, int) will create the given amount of objects form the builder specification and persists all of these. After the initial generation of each builder, you might want to adapt the method body as you will most probably not be able to persist more than one object with the same field assignments to the database due to unique constraints. Thus, please see the generated comment in the method to adapt unique fields accordingly before persisting to the database.

    +
  • +
+
+
+Custom Builder for Business Needs +
+

CobiGen just generates basic builder for any POJO. However, for project needs you probably would like to have even more complex builders, which enable the easy generation of more complex test data which are encoded in a large object hierarchy. Therefore, the generated builders can just be seen as a tool to achieve this. You can define your own business driven builders in the same way as the generated builders, but explicitly focusing on your business needs. Just take this example as a demonstration of that idea:

+
+
+
+
  University uni = new ComplexUniversityBuilder()
+    .withStudents(200)
+    .withProfessors(4)
+    .withExternalStudent()
+    .createNew();
+
+
+
+

E.g. the method withExternalStudent() might create a person, which is a student and is flagged to be an external student. Basing this implementation on the generated builders will even assure that you would benefit from any default values you have set before. In addition, you can even imagine any more complex builder methods setting values driven your reusable testing needs based on the specific business knowledge.

+
+
+
+
+
+
Register Factory
+
+
CRUD server application
+
+

Generates a CRUD application with persistence entities as inputs. This includes DAOs, TOs, use cases, as well as a CRUD JSF user interface if needed.

+
+
+
+
Test data Builder
+ +
+
+
Test documentation
+
+

Generate test documentation from test classes. The input are the doclet tags of several test classes, which e.g. can specify a description, a cross-reference, or a test target description. The result currently is a csv file, which lists all tests with the corresponding meta-information. Afterwards, this file might be styled and passed to the customer if needed and it will be up-to-date every time!

+
+
+
+
+
+
+

CobiGen

+
+ +
+

==Configuration

+
+
+

CobiGen is maintaining a home directory further referenced in this documentation as $cghome, which is used to maintain temporary or transient data. The home folder is determined with the following location fall-back:

+
+
+
    +
  1. +

    System environment variable COBIGEN_HOME (e.g. C:\project\ide\conf\cobigen-home)

    +
  2. +
  3. +

    .cobigen directory in OS user home (e.g. ~/.cobigen)

    +
  4. +
+
+
+

The actual configuration of CobiGen is maintained by a single folder or jar. The location can be configured with respect to the implemented configuration fall-back mechanism. CobiGen will search for the location of the configuration in the following order:

+
+
+
    +
  1. +

    A configuration jar or directory, which is passed to CobiGen by the Maven or Eclipse integration or any other program using the CobiGen programming interface: +1.1. the Maven integration allows to configure a jar dependency to be included in the currently running classpath (of interest for maven configuration +1.2. the Eclipse integration allows to specify a CobiGen_Templates project in the eclipse workspace

    +
  2. +
  3. +

    The file $cghome/.cobigen exists and the property templates is set to a valid configuration (e.g. templates=C:\project\ide\conf\templates or templates=C:\project\ide\conf\templates.jar) Hint: Check for log entry like Value of property templates in $cghome/.cobigen is invalid to identify an invalid configuration which is not taken up as expected

    +
  4. +
  5. +

    The folder $cghome/templates/CobiGen_Templates exists

    +
  6. +
  7. +

    The lexicographical sorted first configuration jar of the following path pattern $cghome/templates/templates-([^-]+)-(\\d+\\.?)+.jar if exists (e.g. templates-devon4j-2020.04.001)

    +
  8. +
  9. +

    CobiGen will automatically download the latest jar configuration from maven central with groupId com.devonfw.cobigen and artifactId templates-devon4j and take it like described in 4.

    +
  10. +
+
+
+

Within the configuration jar or directory you will find the following structure:

+
+
+
+
CobiGen_Templates
+ |- templateFolder1
+    |- templates.xml
+ |- templateFolder2
+    |- templates.xml
+ |- context.xml
+
+
+
+

Find some examples here.

+
+
+
Context Configuration
+
+

The context configuration (context.xml) always has the following root structure:

+
+
+
Context Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        ...
+    </triggers>
+</contextConfiguration>
+
+
+
+

The context configuration has a version attribute, which should match the XSD version the context configuration is an instance of. It should not state the version of the currently released version of CobiGen. This attribute should be maintained by the context configuration developers. If configured correctly, it will provide a better feedback for the user and thus higher user experience. Currently there is only the version v1.0. For further version there will be a changelog later on.

+
+
+
Trigger Node
+
+

As children of the <triggers> node you can define different triggers. By defining a <trigger> you declare a mapping between special inputs and a templateFolder, which contains all templates, which are worth to be generated with the given input.

+
+
+
trigger configuration
+
+
<trigger id="..." type="..." templateFolder="..." inputCharset="UTF-8" >
+    ...
+</trigger>
+
+
+
+
    +
  • +

    The attribute id should be unique within an context configuration. It is necessary for efficient internal processing.

    +
  • +
  • +

    The attribute type declares a specific trigger interpreter, which might be provided by additional plug-ins. A trigger interpreter has to provide an input reader, which reads specific inputs and creates a template object model out of it to be processed by the FreeMarker template engine later on. Have a look at the plug-in’s documentation of your interest and see, which trigger types and thus inputs are currently supported.

    +
  • +
  • +

    The attribute templateFolder declares the relative path to the template folder, which will be used if the trigger gets activated.

    +
  • +
  • +

    The attribute inputCharset (optional) determines the charset to be used for reading any input file.

    +
  • +
+
+
+
+
Matcher Node
+
+

A trigger will be activated if its matchers hold the following formula:

+
+
+

!(NOT || …​ || NOT) && AND && …​ && AND && (OR || …​ || OR)

+
+
+

Whereas NOT/AND/OR describes the accumulationType of a matcher (see below) and e.g. NOT means 'a matcher with accumulationType NOT matches a given input'. Thus additionally to an input reader, a trigger interpreter has to define at least one set of matchers, which are satisfiable, to be fully functional. A <matcher> node declares a specific characteristics a valid input should have.

+
+
+
Matcher Configuration
+
+
<matcher type="..." value="..." accumulationType="...">
+    ...
+</matcher>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute accumulationType (optional) specifies how the matcher will influence the trigger activation. Valid values are:

    +
    +
      +
    • +

      OR (default): if any matcher of accumulation type OR matches, the trigger will be activated as long as there are no further matchers with different accumulation types

      +
    • +
    • +

      AND: if any matcher with AND accumulation type does not match, the trigger will not be activated

      +
    • +
    • +

      NOT: if any matcher with NOT accumulation type matches, the trigger will not be activated

      +
    • +
    +
    +
  • +
+
+
+
+
Variable Assignment Node
+
+

Finally, a <matcher> node can have multiple <variableAssignment> nodes as children. Variable assignments allow to parametrize the generation by additional values, which will be added to the object model for template processing. The variables declared using variable assignments, will be made accessible in the templates.xml as well in the object model for template processing via the namespace variables.*.

+
+
+
Complete Configuration Pattern
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="...">
+            <matcher type="..." value="...">
+                <variableAssignment type="..." key="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares the type of variable assignment to be processed by the trigger interpreter providing plug-in. This attribute enables variable assignments with different dynamic value resolutions.

    +
  • +
  • +

    The attribute key declares the namespace under which the resolved value will be accessible later on.

    +
  • +
  • +

    The attribute value might declare a constant value to be assigned or any hint for value resolution done by the trigger interpreter providing plug-in. For instance, if type is regex, then on value you will assign the matched group number by the regex (1, 2, 3…​)

    +
  • +
+
+
+
+
Container Matcher Node
+
+

The <containerMatcher> node is an additional matcher for matching containers of multiple input objects. +Such a container might be a package, which encloses multiple types or---more generic---a model, which encloses multiple elements. A container matcher can be declared side by side with other matchers:

+
+
+
ContainerMatcher Declaration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="..." >
+            <containerMatcher type="..." value="..." retrieveObjectsRecursively="..." />
+            <matcher type="..." value="...">
+                <variableAssignment type="..." variable="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute retrieveObjectsRecursively (optional boolean) states, whether the children of the input should be retrieved recursively to find matching inputs for generation.

    +
  • +
+
+
+

The semantics of a container matchers are the following:

+
+
+
    +
  • +

    A <containerMatcher> does not declare any <variableAssignment> nodes

    +
  • +
  • +

    A <containerMatcher> matches an input if and only if one of its enclosed elements satisfies a set of <matcher> nodes of the same <trigger>

    +
  • +
  • +

    Inputs, which match a <containerMatcher> will cause a generation for each enclosed element

    +
  • +
+
+
+
+
+
Templates Configuration
+
+

The template configuration (templates.xml) specifies, which templates exist and under which circumstances it will be generated. There are two possible configuration styles:

+
+
+
    +
  1. +

    Configure the template meta-data for each template file by template nodes

    +
  2. +
  3. +

    (since cobigen-core-v1.2.0): Configure templateScan nodes to automatically retrieve a default configuration for all files within a configured folder and possibly modify the automatically configured templates using templateExtension nodes

    +
  4. +
+
+
+

To get an intuition of the idea, the following will initially describe the first (more extensive) configuration style. Such an configuration root structure looks as follows:

+
+
+
Extensive Templates Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.0" templateEngine="FreeMarker">
+    <templates>
+            ...
+    </templates>
+    <increments>
+            ...
+    </increments>
+</templatesConfiguration>
+
+
+
+

The root node <templatesConfiguration> specifies two attributes. The attribute version provides further usability support and will be handled analogous to the version attribute of the context configuration. The optional attribute templateEngine specifies the template engine to be used for processing the templates (since `cobigen-core-4.0.0`). By default it is set to FreeMarker.

+
+
+

The node <templatesConfiguration> allows two different grouping nodes as children. First, there is the <templates> node, which groups all declarations of templates. Second, there is the <increments> node, which groups all declarations about increments.

+
+
+
Template Node
+
+

The <templates> node groups multiple <template> declarations, which enables further generation. Each template file should be registered at least once as a template to be considered.

+
+
+
Example Template Configuration
+
+
<templates>
+    <template name="..." destinationPath="..." templateFile="..." mergeStrategy="..." targetCharset="..." />
+    ...
+</templates>
+
+
+
+

A template declaration consist of multiple information:

+
+
+
    +
  • +

    The attribute name specifies an unique ID within the templates configuration, which will later be reused in the increment definitions.

    +
  • +
  • +

    The attribute destinationPath specifies the destination path the template will be generated to. It is possible to use all variables defined by variable assignments within the path declaration using the FreeMarker syntax ${variables.*}. While resolving the variable expressions, each dot within the value will be automatically replaced by a slash. This behavior is accounted for by the transformations of Java packages to paths as CobiGen has first been developed in the context of the Java world. Furthermore, the destination path variable resolution provides the following additional built-in operators analogue to the FreeMarker syntax:

    +
    +
      +
    • +

      ?cap_first analogue to FreeMarker

      +
    • +
    • +

      ?uncap_first analogue to FreeMarker

      +
    • +
    • +

      ?lower_case analogue to FreeMarker

      +
    • +
    • +

      ?upper_case analogue to FreeMarker

      +
    • +
    • +

      ?replace(regex, replacement) - Replaces all occurrences of the regular expression regex in the variable’s value with the given replacement string. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removeSuffix(suffix) - Removes the given suffix in the variable’s value iff the variable’s value ends with the given suffix. Otherwise nothing will happen. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removePrefix(prefix) - Analogue to ?removeSuffix but removes the prefix of the variable’s value. (since cobigen-core v1.1.0)

      +
    • +
    +
    +
  • +
  • +

    The attribute templateFile describes the relative path dependent on the template folder specified in the trigger to the template file to be generated.

    +
  • +
  • +

    The attribute mergeStrategy (optional) can be optionally specified and declares the type of merge mechanism to be used, when the destinationPath points to an already existing file. CobiGen by itself just comes with a mergeStrategy override, which enforces file regeneration in total. Additional available merge strategies have to be obtained from the different plug-in’s documentations (see here for java, XML, properties, and text). Default: not set (means not mergeable)

    +
  • +
  • +

    The attribute targetCharset (optional) can be optionally specified and declares the encoding with which the contents will be written into the destination file. This also includes reading an existing file at the destination path for merging its contents with the newly generated ones. Default: UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external template (templates defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Template Scan Node
+
+

(since cobigen-core-v1.2.0)

+
+
+

The second configuration style for template meta-data is driven by initially scanning all available templates and automatically configure them with a default set of meta-data. A scanning configuration might look like this:

+
+
+
Example of Template-scan configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.2">
+    <templateScans>
+        <templateScan templatePath="templates" templateNamePrefix="prefix_" destinationPath="src/main/java"/>
+    </templateScans>
+</templatesConfiguration>
+
+
+
+

You can specify multiple <templateScan …​> nodes for different templatePaths and different templateNamePrefixes.

+
+
+
    +
  • +

    The name can be specified to later on reference the templates found by a template-scan within an increment. (since cobigen-core-v2.1.)

    +
  • +
  • +

    The templatePath specifies the relative path from the templates.xml to the root folder from which the template scan should be performed.

    +
  • +
  • +

    The templateNamePrefix (optional) defines a common id prefix, which will be added to all found and automatically configured templates.

    +
  • +
  • +

    The destinationPath defines the root folder all found templates should be generated to, whereas the root folder will be a prefix for all found and automatically configured templates.

    +
  • +
+
+
+

A templateScan will result in the following default configuration of templates. For each file found, new template will be created virtually with the following default values:

+
+
+
    +
  • +

    id: file name without .ftl extension prefixed by templateNamePrefix from template-scan

    +
  • +
  • +

    destinationPath: relative file path of the file found with the prefix defined by destinationPath from template-scan. Furthermore,

    +
    +
      +
    • +

      it is possible to use the syntax for accessing and modifying variables as described for the attribute destinationPath of the template node, besides the only difference, that due to file system restrictions you have to replace all ?-signs (for built-ins) with #-signs.

      +
    • +
    • +

      the files to be scanned, should provide their final file extension by the following file naming convention: <filename>.<extension>.ftl Thus the file extension .ftl will be removed after generation.

      +
    • +
    +
    +
  • +
  • +

    templateFile: relative path to the file found

    +
  • +
  • +

    mergeStrategy: (optional) not set means not mergeable

    +
  • +
  • +

    targetCharset: (optional) defaults to UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external templateScan (templateScans defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Template Extension Node
+
+

(since cobigen-core-v1.2.0)

+
+
+

Additionally to the templateScan declaration it is easily possible to rewrite specific attributes for any scanned and automatically configured template.

+
+
+
Example Configuration of a TemplateExtension
+
+
<templates>
+    <templateExtension ref="prefix_FooClass.java" mergeStrategy="javamerge" />
+</templates>
+
+<templateScans>
+    <templateScan templatePath="foo" templateNamePrefix="prefix_" destinationPath="src/main/java/foo"/>
+</templateScans>
+
+
+
+

Lets assume, that the above example declares a template-scan for the folder foo, which contains a file FooClass.java.ftl in any folder depth. Thus the template scan will automatically create a virtual template declaration with id=prefix_FooClass.java and further default configuration.

+
+
+

Using the templateExtension declaration above will reference the scanned template by the attribute ref and overrides the mergeStrategy of the automatically configured template by the value javamerge. Thus we are able to minimize the needed templates configuration.

+
+
+

(Since version 4.1.0) It is possible to reference external templateExtension (templateExtensions defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Increment Node
+
+

The <increments> node groups multiple <increment> nodes, which can be seen as a collection of templates to be generated. An increment will be defined by a unique id and a human readable description.

+
+
+
+
<increments>
+    <increment id="..." description="...">
+        <incrementRef ref="..." />
+        <templateRef ref="..." />
+        <templateScanRef ref="..." />
+    </increment>
+</increments>
+
+
+
+

An increment might contain multiple increments and/or templates, which will be referenced using <incrementRef …​>, <templateRef …​>, resp. <templateScanRef …​> nodes. These nodes only declare the attribute ref, which will reference an increment, a template, or a template-scan by its id or name.

+
+
+

(Since version 4.1.0) An special case of <incrementRef …​> is the external incrementsRef. By default, <incrementRef …​> are used to reference increments defined in the same templates.xml file. So for example, we could have:

+
+
+
+
<increments>
+    <increment id="incA" description="...">
+        <incrementRef ref="incB" />
+    </increment>
+    <increment id="incB" description="...">
+        <templateRef .... />
+        <templateScan .... />
+    </increment>
+</increments>
+
+
+
+

However, if we want to reference an increment that it is not defined inside our templates.xml (an increment defined for another trigger), then we can use external incrementRef as shown below:

+
+
+
+
<increment name="..." description="...">
+    <incrementRef ref="trigger_id::increment_id"/>
+</increment>
+
+
+
+

The ref string is split using as delimiter ::. The first part of the string, is the trigger_id to reference. That trigger contains an increment_id. Currently, this functionality only works when both templates use the same kind of input file.

+
+
+
+
+
Java Template Logic
+
+

since cobigen-core-3.0.0 which is included in the Eclipse and Maven Plugin since version 2.0.0 +In addition, it is possible to implement more complex template logic by custom Java code. To enable this feature, you can simply import the the CobiGen_Templates by clicking on Adapt Templates, turn it into a simple maven project (if it is not already) and implement any Java logic in the common maven layout (e.g. in the source folder src/main/java). Each Java class will be instantiated by CobiGen for each generation process. Thus, you can even store any state within a Java class instance during generation. However, there is currently no guarantee according to the template processing order.

+
+
+

As a consequence, you have to implement your Java classes with a public default (non-parameter) constructor to be used by any template. Methods of the implemented Java classes can be called within templates by the simple standard FreeMarker expression for calling Bean methods: SimpleType.methodName(param1). Until now, CobiGen will shadow multiple types with the same simple name non-deterministically. So please prevent yourself from that situation.

+
+
+

Finally, if you would like to do some reflection within your Java code accessing any type of the template project or any type referenced by the input, you should load classes by making use of the classloader of the util classes. CobiGen will take care of the correct classloader building including the classpath of the input source as well as of the classpath of the template project. If you use any other classloader or build it by your own, there will be no guarantee, that generation succeeds.

+
+
+
+
Template Properties
+
+

since cobigen-core-4.0.0` +Using a configuration with `template scan, you can make use of properties in templates specified in property files named cobigen.properties next to the templates. The property files are specified as Java property files. Property files can be nested in sub-folders. Properties will be resolved including property shading. Properties defined nearest to the template to be generated will take precedence. +In addition, a cobigen.properties file can be specified in the target folder root (in eclipse plugin, this is equal to the source project root). These properties take precedence over template properties specified in the template folder.

+
+
+ + + + + +
+ + +It is not allowed to override context variables in cobigen.properties specifications as we have not found any interesting use case. This is most probably an error of the template designer, CobiGen will raise an error in this case. +
+
+
+
Multi module support or template target path redirects
+
+

since cobigen-core-4.0.0` +One special property you can specify in the template properties is the property `relocate. It will cause the current folder and its sub-folders to be relocated at destination path resolution time. Take the following example:

+
+
+
+
folder
+  - sub1
+    Template.java.ftl
+    cobigen.properties
+
+
+
+

Let the cobigen.properties file contain the line relocate=../sub2/${cwd}. Given that, the relative destination path of Template.java.ftl will be resolved to folder/sub2/Template.java. Compare template scan configuration for more information about basic path resolution. The relocate property specifies a relative path from the location of the cobigen.properties. The ${cwd} placeholder will contain the remaining relative path from the cobigen.properties location to the template file. In this basic example it just contains Template.java.ftl, but it may even be any relative path including sub-folders of sub1 and its templates. +Given the relocate feature, you can even step out of the root path, which in general is the project/maven module the input is located in. This enables template designers to even address, e.g., maven modules located next to the module the input is coming from.

+
+
+
+
+
Basic Template Model
+
+

In addition to what is served by the different model builders of the different plug-ins, CobiGen provides a minimal model based on context variables as well as CobiGen properties. The following model is independent of the input format and will be served as a template model all the time:

+
+
+ +
+
+
+
Plugin Mechanism
+
+

Since cobigen-core 4.1.0, we changed the plug-in discovery mechanism. So far it was necessary to register new plugins programmatically, which introduces the need to let every tool integration, i.e. for eclipse or maven, be dependent on every plug-in, which should be released. This made release cycles take long time as all plug-ins have to be integrated into a final release of maven or eclipse integration.

+
+
+

Now, plug-ins are automatically discovered by the Java Service Loader mechanism from the classpath. This also effects the setup of eclipse and maven integration to allow modular releases of CobiGen in future. We are now able to provide faster rollouts of bug-fixes in any of the plug-ins as they can be released completely independently.

+
+
+
+

Plug-ins

+ +
+

==Java Plug-in +The CobiGen Java Plug-in comes with a new input reader for java artifacts, new java related trigger and matchers, as well as a merging mechanism for Java sources.

+
+
+
Trigger extension
+
+

The Java Plug-in provides a new trigger for Java related inputs. It accepts different representations as inputs (see Java input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'java'

    +
    +
    Example of a java trigger definition
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables Java elements as inputs.

    +
    +
  • +
+
+
+Matcher types +
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type fqn → full qualified name matching

    +
    +
    Example of a java trigger definition with a full qualified name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the full qualified name (fqn) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'package' → package name of the input

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="package" value="(.+)\.persistence\.([^\.]+)\.entity">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the package name (package) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'expression'

    +
    +
    Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="expression" value="instanceof java.lang.String">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the expression evaluates to true. Valid expressions are

    +
    +
  • +
  • +

    instanceof fqn: checks an 'is a' relation of the input type

    +
  • +
  • +

    isAbstract: checks, whether the input type is declared abstract

    +
  • +
+
+
+
+Container Matcher types +
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'package'

    +
    +
    Example of a java trigger definition with a container matcher for packages
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <containerMatcher type="package" value="com\.example\.app\.component1\.persistence.entity" />
    +</trigger>
    +
    +
    +
    +

    The container matcher matches packages provided by the type com.capgemini.cobigen.javaplugin.inputreader.to.PackageFolder with a regular expression stated in the value attribute. (See containerMatcher semantics to get more information about containerMatchers itself.)

    +
    +
  • +
+
+
+
+Variable Assignment types +
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The Java Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'regex' → regular expression group

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="regex" key="rootPackage" value="1" />
    +        <variableAssignment type="regex" key="component" value="2" />
    +        <variableAssignment type="regex" key="pojoName" value="3" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key.

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+Java input reader +
+

The CobiGen Java Plug-in implements an input reader for parsed java sources as well as for java Class<?> objects (loaded by reflection). So API user can pass Class<?> objects as well as JavaClass objects for generation. The latter depends on QDox, which will be used for parsing and merging java sources. For getting the right parsed java inputs you can easily use the JavaParserUtil, which provides static functionality to parse java files and get the appropriate JavaClass object.

+
+
+

Furthermore, due to restrictions on both inputs according to model building (see below), it is also possible to provide an array of length two as an input, which contains the Class<?> as well as the JavaClass object of the same class.

+
+
+Template object model +
+

No matter whether you use reflection objects or parsed java classes as input, you will get the following object model for template creation:

+
+
+
    +
  • +

    classObject ('Class' :: Class object of the Java input)

    +
  • +
  • +

    POJO

    +
    +
      +
    • +

      name ('String' :: Simple name of the input class)

      +
    • +
    • +

      package ('String' :: Package name of the input class)

      +
    • +
    • +

      canonicalName ('String' :: Full qualified name of the input class)

      +
    • +
    • +

      annotations ('Map<String, Object>' :: Annotations, which will be represented by a mapping of the full qualified type of an annotation to its value. To gain template compatibility, the key will be stored with '_' instead of '.' in the full qualified annotation type. Furthermore, the annotation might be recursively defined and thus be accessed using the same type of mapping. Example ${pojo.annotations.javax_persistence_Id})

      +
    • +
    • +

      JavaDoc ('Map<String, Object>') :: A generic way of addressing all available JavaDoc doclets and comments. The only fixed variable is comment (see below). All other provided variables depend on the doclets found while parsing. The value of a doclet can be accessed by the doclets name (e.g. ${…​JavaDoc.author}). In case of doclet tags that can be declared multiple times (currently @param and @throws), you will get a map, which you access in a specific way (see below).

      +
      +
        +
      • +

        comment ('String' :: JavaDoc comment, which does not include any doclets)

        +
      • +
      • +

        params ('Map<String,String> :: JavaDoc parameter info. If the comment follows proper conventions, the key will be the name of the parameter and the value being its description. You can also access the parameters by their number, as in arg0, arg1 etc, following the order of declaration in the signature, not in order of JavaDoc)

        +
      • +
      • +

        throws ('Map<String,String> :: JavaDoc exception info. If the comment follows proper conventions, the key will be the name of the thrown exception and the value being its description)

        +
      • +
      +
      +
    • +
    • +

      extendedType ('Map<String, Object>' :: The supertype, represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        name ('String' :: Simple name of the supertype)

        +
      • +
      • +

        canonicalName ('String' :: Full qualified name of the supertype)

        +
      • +
      • +

        package ('String' :: Package name of the supertype)

        +
      • +
      +
      +
    • +
    • +

      implementedTypes ('List<Map<String, Object>>' :: A list of all implementedTypes (interfaces) represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        interface ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Simple name of the interface)

          +
        • +
        • +

          canonicalName ('String' :: Full qualified name of the interface)

          +
        • +
        • +

          package ('String' :: Package name of the interface)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      fields ('List<Map<String, Object>>' :: List of fields of the input class) (renamed since cobigen-javaplugin v1.2.0; previously attributes)

      +
      +
        +
      • +

        field ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the Java field)

          +
        • +
        • +

          type ('String' :: Type of the Java field)

          +
        • +
        • +

          canonicalType ('String' :: Full qualified type declaration of the Java field’s type)

          +
        • +
        • +

          'isId' (Deprecated :: boolean :: true if the Java field or its setter or its getter is annotated with the javax.persistence.Id annotation, false otherwise. Equivalent to ${pojo.attributes[i].annotations.javax_persistence_Id?has_content})

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations with the remark, that for fields all annotations of its setter and getter will also be collected)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      methodAccessibleFields ('List<Map<String, Object>>' :: List of fields of the input class or its inherited classes, which are accessible using setter and getter methods)

      +
      +
        +
      • +

        same as for field (but without JavaDoc!)

        +
      • +
      +
      +
    • +
    • +

      methods ('List<Map<String, Object>>' :: The list of all methods, whereas one method will be represented by a set of property mappings)

      +
      +
        +
      • +

        method ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the method)

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

Furthermore, when providing a Class<?> object as input, the Java Plug-in will provide additional functionalities as template methods (deprecated):

+
+
+
    +
  1. +

    isAbstract(String fqn) (Checks whether the type with the given full qualified name is an abstract class. Returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  2. +
  3. +

    isSubtypeOf(String subType, String superType) (Checks whether the subType declared by its full qualified name is a sub type of the superType declared by its full qualified name. Equals the Java expression subType instanceof superType and so also returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  4. +
+
+
+
+Model Restrictions +
+

As stated before both inputs (Class<?> objects and JavaClass objects ) have their restrictions according to model building. In the following these restrictions are listed for both models, the ParsedJava Model which results from an JavaClass input and the ReflectedJava Model, which results from a Class<?> input.

+
+
+

It is important to understand, that these restrictions are only present if you work with either Parsed Model OR the Reflected Model. If you use the Maven Build Plug-in or Eclipse Plug-in these two models are merged together so that they can mutually compensate their weaknesses.

+
+
+Parsed Model +
+
    +
  • +

    annotations of the input’s supertype are not accessible due to restrictions in the QDox library. So pojo.methodAccessibleFields[i].annotations will always be empty for super type fields.

    +
  • +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Reflected Model.

    +
  • +
  • +

    fields of "supertypes" of the input JavaClass are not available at all. So pojo.methodAccessibleFields will only contain the input type’s and the direct superclass’s fields.

    +
  • +
  • +

    [resolved, since cobigen-javaplugin 1.3.1] field types of supertypes are always canonical. So pojo.methodAccessibleFields[i].type will always provide the same value as pojo.methodAccessibleFields[i].canonicalType (e.g. java.lang.String instead of the expected String) for super type fields.

    +
  • +
+
+
+
+Reflected Model +
+
    +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Parsed Model.

    +
  • +
  • +

    annotations are only available if the respective annotation has @Retention(value=RUNTIME), otherwise the annotations are to be discarded by the compiler or by the VM at run time. For more information see RetentionPolicy.

    +
  • +
  • +

    information about generic types is lost. E.g. a field’s/ methodAccessibleField’s type for List<String> can only be provided as List<?>.

    +
  • +
+
+
+
+
+
+
+
Merger extensions
+
+

The Java Plug-in provides two additional merging strategies for Java sources, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy javamerge (merges two Java resources and keeps the existing Java elements on conflicts)

    +
  • +
  • +

    Merge strategy javamerge_override (merges two Java resources and overrides the existing Java elements on conflicts)

    +
  • +
+
+
+

In general merging of two Java sources will be processed as follows:

+
+
+

Precondition of processing a merge of generated contents and existing ones is a common Java root class resp. surrounding class. If this is the case this class and all further inner classes will be merged recursively. Therefore, the following Java elements will be merged and conflicts will be resolved according to the configured merge strategy:

+
+
+
    +
  • +

    extends and implements relations of a class: Conflicts can only occur for the extends relation.

    +
  • +
  • +

    Annotations of a class: Conflicted if an annotation declaration already exists.

    +
  • +
  • +

    Fields of a class: Conflicted if there is already a field with the same name in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
  • +

    Methods of a class: Conflicted if there is already a method with the same signature in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
+
+ +
+

==Property Plug-in +The CobiGen Property Plug-in currently only provides different merge mechanisms for documents written in Java property syntax.

+
+
+
+
Merger extensions
+
+

There are two merge strategies for Java properties, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy propertymerge (merges two properties documents and keeps the existing properties on conflicts)

    +
  • +
  • +

    Merge strategy propertymerge_override (merges two properties documents and overrides the existing properties on conflicts)

    +
  • +
+
+
+

Both documents (base and patch) will be parsed using the Java 7 API and will be compared according their keys. Conflicts will occur if a key in the patch already exists in the base document.

+
+ +
+

==XML Plug-in +The CobiGen XML Plug-in comes with an input reader for XML artifacts, XML related trigger and matchers and provides different merge mechanisms for XML result documents.

+
+
+
+
Trigger extension
+
+

(since cobigen-xmlplugin v2.0.0)

+
+
+

The XML Plug-in provides a trigger for XML related inputs. It accepts XML documents as input (see XML input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'xml'

    +
    +
    Example of a XML trigger definition.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as inputs.

    +
    +
  • +
  • +

    type xpath

    +
    +
    Example of a xpath trigger definition.
    +
    +
    <trigger id="..." type="xpath" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as container inputs, which consists of several sub-documents.

    +
    +
  • +
+
+
+Container Matcher type +
+

A ContainerMatcher check if the input is a valid container.

+
+
+
    +
  • +

    xpath: type: xpath

    +
    +
    Example of a XML trigger definition with a node name matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <containerMatcher type="xpath" value="./uml:Model//packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    Before applying any Matcher, this containerMatcher checks if the XML file contains a node uml:Model with a childnode packagedElement which contains an attribute xmi:type with the value uml:Class.

    +
    +
  • +
+
+
+
+Matcher types +
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    XML: type nodename → document’s root name matching

    +
    +
    Example of a XML trigger definition with a node name matcher
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the root name of the declaring input document matches the given regular expression (value).

    +
    +
  • +
  • +

    xpath: type: xpath → matching a node with a xpath value

    +
    +
    Example of a xpath trigger definition with a xpath matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="xpath" value="/packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the XML file contains a node /packagedElement where the xmi:type property equals uml:Class.

    +
    +
  • +
+
+
+
+Variable Assignment types +
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The XML Plug-in currently provides only one mechanism:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+XML input reader +
+

The CobiGen XML Plug-in implements an input reader for parsed XML documents. So API user can pass org.w3c.dom.Document objects for generation. For getting the right parsed XML inputs you can easily use the xmlplugin.util.XmlUtil, which provides static functionality to parse XML files or input streams and get the appropriate Document object.

+
+
+Template object +
+

Due to the heterogeneous structure an XML document can have, the XML input reader does not always create exactly the same model structure (in contrast to the java input reader). For example the model’s depth differs strongly, according to it’s input document. To allow navigational access to the nodes, the model also depends on the document’s element’s node names. All child elements with unique names, are directly accessible via their names. In addition it is possible to iterate over all child elements with held of the child list Children. So it is also possible to access child elements with non unique names.

+
+
+

The XML input reader will create the following object model for template creation (EXAMPLEROOT, EXAMPLENODE1, EXAMPLENODE2, EXAMPLEATTR1,…​ are just used here as examples. Of course they will be replaced later by the actual node or attribute names):

+
+
+
    +
  • +

    ~EXAMPLEROOT~ ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      _nodeName_ ('String' :: Simple name of the root node)

      +
    • +
    • +

      _text_ ('String' :: Concatenated text content (PCDATA) of the root node)

      +
    • +
    • +

      TextNodes ('List<String>' :: List of all the root’s text node contents)

      +
    • +
    • +

      _at_~EXAMPLEATTR1~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_~EXAMPLEATTR2~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_…​

      +
    • +
    • +

      Attributes ('List<Map<String, Object>>' :: List of the root’s attributes

      +
      +
        +
      • +

        at ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          _attName_ ('String' :: Name of the attribute)

          +
        • +
        • +

          _attValue_ ('String' :: String representation of the attribute’s value)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      Children ('List<Map<String, Object>>' :: List of the root’s child elements

      +
      +
        +
      • +

        child ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          …​common element sub structure…​

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE1~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element structure…​

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE2~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element sub structure…​

        +
      • +
      • +

        ~EXAMPLENODE21~ ('Map<String, Object>' :: One of the nodes' child nodes)

        +
        +
          +
        • +

          …​common element structure…​

          +
        • +
        +
        +
      • +
      • +

        ~EXAMPLENODE…​~

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE…​~

      +
    • +
    +
    +
  • +
+
+
+

In contrast to the java input reader, this XML input reader does currently not provide any additional template methods.

+
+
+
+
+
+
Merger extensions
+
+

The XML plugin uses the LeXeMe merger library to produce semantically correct merge products. The merge strategies can be found in the MergeType enum and can be configured in the templates.xml as a mergeStrategy attribute:

+
+
+
    +
  • +

    mergeStrategy xmlmerge

    +
    +
    Example of a template using the mergeStrategy xmlmerge
    +
    +
    <templates>
    +	<template name="..." destinationPath="..." templateFile="..." mergeStrategy="xmlmerge"/>
    +</templates>
    +
    +
    +
  • +
+
+
+

Currently only the document types included in LeXeMe are supported. +On how the merger works consult the LeXeMe Wiki.

+
+ +
+

==Text Merger Plug-in +The Text Merger Plug-in enables merging result free text documents to existing free text documents. Therefore, the algorithms are also very rudimentary.

+
+
+
+
Merger extensions
+
+

There are currently three main merge strategies that apply for the whole document:

+
+
+
    +
  • +

    merge strategy textmerge_append (appends the text directly to the end of the existing document) +_Remark_: If no anchors are defined, this will simply append the patch.

    +
  • +
  • +

    merge strategy textmerge_appendWithNewLine (appends the text after adding a new line break to the existing document) +_Remark_: empty patches will not result in appending a new line any more since v1.0.1 +Remark: Only suitable if no anchors are defined, otherwise it will simply act as textmerge_append

    +
  • +
  • +

    merge strategy textmerge_override (replaces the contents of the existing file with the patch) +_Remark_: If anchors are defined, override is set as the default mergestrategy for every text block if not redefined in an anchor specification.

    +
  • +
+
+
+
+
Anchor functionality
+
+

If a template contains text that fits the definition of anchor:${documentpart}:${mergestrategy}:anchorend or more specifically the regular expression (.*)anchor:([:]+):(newline_)?([:]+)(_newline)?:anchorend\\s*(\\r\\n|\\r|\\n), some additional functionality becomes available about specific parts of the incoming text and the way it will be merged with the existing text. These anchors always change things about the text to come up until the next anchor, text before it is ignored.

+
+
+

If no anchors are defined, the complete patch will be appended depending on your choice for the template in the file templates.xml.

+
+
+

[[anchordef]]

+
+
+Anchor Definition +
+

Anchors should always be defined as a comment of the language the template results in, as you do not want them to appear in your readable version, but cannot define them as FreeMarker comments in the template, or the merger will not know about them. +Anchors will also be read when they are not comments due to the merger being able to merge multiple types of text-based languages, thus making it practically impossible to filter for the correct comment declaration. That is why anchors have to always be followed by line breaks. That way there is a universal way to filter anchors that should have anchor functionality and ones that should appear in the text. +Remark: If the resulting language has closing tags for comments, they have to appear in the next line. +Remark: If you do not put the anchor into a new line, all the text that appears before it will be added to the anchor.

+
+
+
+Document parts +
+

In general, ${documentpart} is an id to mark a part of the document, that way the merger knows what parts of the text to merge with which parts of the patch (e.g. if the existing text contains anchor:table:${}:anchorend that part will be merged with the part tagged anchor:table:${}:anchorend of the patch).

+
+
+

If the same documentpart is defined multiple times, it can lead to errors, so instead of defining table multiple times, use table1, table2, table3 etc.

+
+
+

If a ${documentpart} is defined in the document but not in the patch and they are in the same position, it is processed in the following way: If only the documentparts header, test and footer are defined in the document in that order, and the patch contains header, order and footer, the resulting order will be header, test, order then footer.

+
+
+

The following documentparts have default functionality:

+
+
+
    +
  1. +

    anchor:header:${mergestrategy}:anchorend marks the beginning of a header, that will be added once when the document is created, but not again. +Remark: This is only done once, if you have header in another anchor, it will be ignored

    +
  2. +
  3. +

    anchor:footer:${mergestrategy}:anchorend marks the beginning of a footer, that will be added once when the document is created, but not again. Once this is invoked, all following text will be included in the footer, including other anchors.

    +
  4. +
+
+
+

[[mergestrategies]]

+
+
+
+Mergestrategies +
+

Mergestrategies are only relevant in the patch, as the merger is only interested in how text in the patch should be managed, not how it was managed in the past.

+
+
+
    +
  1. +

    anchor:${documentpart}::anchorend will use the merge strategy from templates.xml, see Merger-Extensions.

    +
  2. +
  3. +

    anchor:${}:${mergestrategy}_newline:anchorend or anchor:${}:newline_${mergestrategy}:anchorend states that a new line should be appended before or after this anchors text, depending on where the newline is (before or after the mergestrategy). anchor:${documentpart}:newline:anchorend puts a new line after the anchors text. +Remark: Only works with appending strategies, not merging/replacing ones. These strategies currently include: appendbefore, append/appendafter

    +
  4. +
  5. +

    anchor:${documentpart}:override:anchorend means that the new text of this documentpart will replace the existing one completely

    +
  6. +
  7. +

    anchor:${documentpart}:appendbefore:anchorend or anchor:${documentpart}:appendafter:anchorend/anchor:${documentpart}:append:anchorend specifies whether the text of the patch should come before the existing text or after.

    +
  8. +
+
+
+
+
+
Usage Examples
+
+General +
+

Below you can see how a file with anchors might look like (using adoc comment tags), with examples of what you might want to use the different functions for.

+
+
+
+
// anchor:header:append:anchorend
+
+Table of contents
+Introduction/Header
+
+// anchor:part1:appendafter:anchorend
+
+Lists
+Table entries
+
+// anchor:part2:nomerge:anchorend
+
+Document Separators
+adoc table definitions
+
+// anchor:part3:override:anchorend
+
+Anything that you only want once but changes from time to time
+
+// anchor:footer:append:anchorend
+
+Copyright Info
+Imprint
+
+
+
+
+Merging +
+

In this section you will see a comparison on what files look like before and after merging

+
+
+override +
+
Before
+
+
// anchor:part:override:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
+Appending +
+
Before
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+// anchor:part3:appendbefore:anchorend
+Lorem Ipsum
+
+
+
+
Patch
+
+
// anchor:part:append:anchorend
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+
+
+
+
After
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+Lorem Ipsum
+
+
+
+
+Newline +
+
Before
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+(end of file)
+
+
+
+
Patch
+
+
// anchor:part:newline_append:anchorend
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Dolor Sit
+(end of file)
+
+
+
+
After
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+Dolor Sit
+
+(end of file)
+
+
+
+
+
+
+
Error List
+
+
    +
  • +

    If there are anchors in the text, but either base or patch do not start with one, the merging process will be aborted, as text might go missing this way.

    +
  • +
  • +

    Using _newline or newline_ with mergestrategies that don’t support it , like override, will abort the merging process. See <<`mergestrategies`,Merge Strategies>> →2 for details.

    +
  • +
  • +

    Using undefined mergestrategies will abort the merging process.

    +
  • +
  • +

    Wrong anchor definitions, for example anchor:${}:anchorend will abort the merging process, see <<`anchordef`,Anchor Definition>> for details.

    +
  • +
+
+ +
+

==JSON Plug-in +At the moment the plug-in can be used for merge generic JSON files depending on the merge strategy defined at the templates.

+
+
+
+
Merger extensions
+
+

There are currently these merge strategies:

+
+
+

Generic JSON Merge

+
+
+
    +
  • +

    merge strategy jsonmerge(add the new code respecting the existent is case of conflict)

    +
  • +
  • +

    merge strategy jsonmerge_override (add the new code overwriting the existent in case of conflict)

    +
    +
      +
    1. +

      JsonArray’s will be ignored / replaced in total

      +
    2. +
    3. +

      JsonObjects in conflict will be processed recursively ignoring adding non existent elements.

      +
    4. +
    +
    +
  • +
+
+
+
+
Merge Process
+
+Generic JSON Merging +
+

The merge process will be:

+
+
+
    +
  1. +

    Add non existent JSON Objects from patch file to base file.

    +
  2. +
  3. +

    For existent object in both files, will add non existent keys from patch to base object. This process will be done recursively for all existent objects.

    +
  4. +
  5. +

    For JSON Arrays existent in both files, the arrays will be just concatenated.

    +
  6. +
+
+ +
+

==TypeScript Plug-in

+
+
+

The TypeScript Plug-in enables merging result TS files to existing ones. This plug-in is used at the moment for generate an Angular2 client with all CRUD functionalities enabled. The plug-in also generates i18n functionality just appending at the end of the word the ES or EN suffixes, to put into the developer knowledge that this words must been translated to the correspondent language. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+
+
+
Trigger Extensions
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
Merger extensions
+
+

This plugin uses the TypeScript Merger to merge files. There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy tsmerge (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy tsmerge_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

<<<<<<< HEAD +The merge algorithm mainly handles the following AST nodes:

+
+
+
    +
  • +

    ImportDeclaration

    +
    +
      +
    • +

      Will add non existent imports whatever the merge strategy is.

      +
    • +
    • +

      For different imports from same module, the import clauses will be merged.

      +
      +
      +
      import { a } from 'b';
      +import { c } from 'b';
      +//Result
      +import { a, c } from 'b';
      +
      +
      +
    • +
    +
    +
  • +
  • +

    ClassDeclaration

    +
    +
      +
    • +

      Adds non existent base properties from patch based on the name property.

      +
    • +
    • +

      Adds non existent base methods from patch based on the name signature.

      +
    • +
    • +

      Adds non existent annotations to class, properties and methods.

      +
    • +
    +
    +
  • +
  • +

    PropertyDeclaration

    +
    +
      +
    • +

      Adds non existent decorators.

      +
    • +
    • +

      Merge existent decorators.

      +
    • +
    • +

      With override strategy, the value of the property will be replaced by the patch value.

      +
    • +
    +
    +
  • +
  • +

    MethodDeclaration

    +
    +
      +
    • +

      With override strategy, the body will be replaced.

      +
    • +
    • +

      The parameters will be merged.

      +
    • +
    +
    +
  • +
  • +

    ParameterDeclaration

    +
    +
      +
    • +

      Replace type and modifiers with override merge strategy, adding non existent from patch into base.

      +
    • +
    +
    +
  • +
  • +

    ConstructorDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
  • +

    FunctionDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
+
+
+
+
Input reader
+
+

The TypeScript input reader is based on the one that the TypeScript merger uses. The current extensions are additional module fields giving from which library any entity originates. +module: null specifies a standard entity or type as string or number.

+
+
+Object model +
+

To get a first impression of the created object after parsing, let us start with analyzing a small example, namely the parsing of a simple type-orm model written in TypeScript.

+
+
+
+
import {Entity, PrimaryGeneratedColumn, Column} from "typeorm";
+
+@Entity()
+export class User {
+
+    @PrimaryGeneratedColumn()
+    id: number;
+
+    @Column()
+    firstName: string;
+
+    @Column()
+    lastName: string;
+
+    @Column()
+    age: number;
+
+}
+
+
+
+

The returned object has the following structure

+
+
+
+
{
+  "importDeclarations": [
+    {
+      "module": "typeorm",
+      "named": [
+        "Entity",
+        "PrimaryGeneratedColumn",
+        "Column"
+      ],
+      "spaceBinding": true
+    }
+  ],
+  "classes": [
+    {
+      "identifier": "User",
+      "modifiers": [
+        "export"
+      ],
+      "decorators": [
+        {
+          "identifier": {
+            "name": "Entity",
+            "module": "typeorm"
+          },
+          "isCallExpression": true
+        }
+      ],
+      "properties": [
+        {
+          "identifier": "id",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "PrimaryGeneratedColumn",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "firstName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "lastName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "age",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
+
+
+
+

If we only consider the first level of the JSON response, we spot two lists of imports and classes, providing information about the only import statement and the only User class, respectively. Moving one level deeper we observe that:

+
+
+
    +
  • +

    Every import statement is translated to an import declaration entry in the declarations list, containing the module name, as well as a list of entities imported from the given module.

    +
  • +
  • +

    Every class entry provides besides the class identifier, its decoration(s), modifier(s), as well as a list of properties that the original class contains.

    +
  • +
+
+
+

Note that, for each given type, the module from which it is imported is also given as in

+
+
+
+
  "identifier": {
+    "name": "Column",
+    "module": "typeorm"
+  }
+
+
+
+

Returning to the general case, independently from the given TypeScript file, an object having the following Structure will be created.

+
+
+
    +
  • +

    importDeclarations: A list of import statement as described above

    +
  • +
  • +

    exportDeclarations: A list of export declarations

    +
  • +
  • +

    classes: A list of classes extracted from the given file, where each entry is full of class specific fields, describing its properties and decorator for example.

    +
  • +
  • +

    interfaces: A list of interfaces.

    +
  • +
  • +

    variables: A list of variables.

    +
  • +
  • +

    functions: A list of functions.

    +
  • +
  • +

    enums: A list of enumerations.

    +
  • +
+
+ +
+

==HTML Plug-in

+
+
+

The HTML Plug-in enables merging result HTML files to existing ones. This plug-in is used at the moment for generate an Angular2 client. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+
+
+
Trigger Extensions
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
Merger extensions
+
+

There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy html-ng* (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy html-ng*_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

The merging of two Angular2 files will be processed as follows:

+
+
+

The merge algorithm handles the following AST nodes:

+
+
+
    +
  • +

    md-nav-list

    +
  • +
  • +

    a

    +
  • +
  • +

    form

    +
  • +
  • +

    md-input-container

    +
  • +
  • +

    input

    +
  • +
  • +

    name (for name attribute)

    +
  • +
  • +

    ngIf

    +
  • +
+
+
+ + + + + +
+ + +Be aware, that the HTML merger is not generic and only handles the described tags needed for merging code of a basic Angular client implementation. For future versions, it is planned to implement a more generic solution. +
+
+ +
+

==OpenAPI Plug-in

+
+
+

The OpenAPI Plug-in enables the support for Swagger files that follows the OpenAPI 3.0 standard as input for CobiGen. Until now, CobiGen was thought to follow a "code first" generation, with this plugin, now it can also follow the "contract first" strategy

+
+
+
    +
  • +

    Code First

    +
    +
      +
    • +

      Generating from a file with code (Java/XML code in our case)

      +
    • +
    +
    +
  • +
  • +

    Contract First

    +
    +
      +
    • +

      Generation from a full definition file (Swagger in this case). This file contains all the information about entities, operations, etc…​

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +If you are not a CobiGen developer, you will be more interested in usage. +
+
+
+
+
Trigger Extensions
+
+

The OpenAPI Plug-in provides a new trigger for Swagger OpenAPI 3.0 related inputs. It accepts different representations as inputs (see OpenAPI input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type openapi

    +
    +
    Example of a OpenAPI trigger definition
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables OpenAPI elements as inputs.

    +
    +
  • +
+
+
+Matcher type +
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type 'element' → An object

    +
  • +
+
+
+

This trigger will be enabled if the element (Java Object) of the input file is and EntityDef (value).

+
+
+
+Container Matcher type +
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'element'

    +
  • +
+
+
+

The container matcher matches elements as Java Objects, in this case will be always an OpenAPIFile object. (See containerMatcher semantics to get more information about containerMatchers itself.)

+
+
+
+Variable Assignment types +
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The OpenAPI Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +        <variableAssignment type="constant" key="rootPackage" value="com.capgemini.demo" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key. +In this case, the constant type variableAssignment is used to specify the root package where the generate will place the files generated.

+
+
+
    +
  • +

    type 'extension' → Extraction of the info extensions and the extensions of each entity. (the tags that start with "x-…​").

    +
    +
    +
      <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +      <variableAssignment type="extension" key="testingAttribute" value="x-test"/>
    +      <variableAssignment type="extension" key="rootPackage" value="x-rootpackage"/>
    +      <variableAssignment type="extension" key="globalVariable" value="x-global"/>
    +    </matcher>
    +  </trigger>
    +
    +
    +
  • +
+
+
+

The 'extension' variable assignment tries to find 'extensions' (tags that start with "x-…​") on the 'info' +part of your file and on the extensions of each entity. value is the extension that our plug-in will try to find on your OpenAPI file. The result will +be stored in the variable key.

+
+
+

As you will see on the figure below, there are two types of variables: The global ones, that are defined +on the 'info' part of the file, and the local ones, that are defined inside each entity.

+
+
+

Therefore, if you want to define the root package, then you will have to declare it on the 'info' part. +That way, all your entities will be generated under the same root package (e.g. com.devonfw.project).

+
+
+

Swagger at devon4j Project

+
+
+

If no extension with that name was found, then an empty string will be assigned. In the case of not defining the root package, then the code will be generated into src/main/java.

+
+
+
    +
  • +

    type 'property' → property of the Java Object

    +
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +        <variableAssignment type="property" key="entityName" value="name" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

The 'property' variable assignment tries to find the property value of the entities defined on the schema. +The value is assigned to the key. The current properties that you will able to get are:

+
+
+
    +
  1. +

    ComponentDef component: It is an object that stores the configuration of an devon4j component. Its only +property is List<PathDef> paths which contains the paths as the ones shown here.

    +
  2. +
  3. +

    String componentName: Stores the name of the x-component tag for this entity.

    +
  4. +
  5. +

    String name: Name of this entity (as shown on the example above).

    +
  6. +
  7. +

    String description: Description of this entity.

    +
  8. +
  9. +

    List<PropertyDef> properties: List containing all the properties of this entity. PropertyDef is an object that has the next properties:

    +
    +
      +
    1. +

      String name.

      +
    2. +
    3. +

      String type.

      +
    4. +
    5. +

      String format.

      +
    6. +
    7. +

      String description.

      +
    8. +
    9. +

      Boolean isCollection.

      +
    10. +
    11. +

      Boolean isEntity.

      +
    12. +
    13. +

      Boolean required.

      +
    14. +
    15. +

      Map<String, Object> constraints

      +
    16. +
    +
    +
  10. +
+
+
+

If no property with that name was found, then it will be set to null.

+
+
+
+Full trigger configuration +
+
+
<trigger id="..." type="openapi" templateFolder="...">
+    <containerMatcher type="element" value="OpenApiFile">
+    <matcher type="element" value="EntityDef">
+        <variableAssignment type="constant" key="rootPackage" value="com.capgemini.demo" />
+        <variableAssignment type="property" key="component" value="componentName" />
+        <variableAssignment type="property" key="entityName" value="name" />
+    </matcher>
+</trigger>
+
+
+
+
+
+
Input reader
+
+

The CobiGen OpenAPI Plug-in implements an input reader for OpenAPI 3.0 files. The XML input reader will create the following object model for template creation:

+
+
+
    +
  • +

    model ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      header (HeaderDef :: Definition of the header found at the top of the file)

      +
    • +
    • +

      name ('String' :: Name of the current Entity)

      +
    • +
    • +

      componentName ('String' :: name of the component the entity belongs to)

      +
    • +
    • +

      component (ComponentDef :: Full definition of the component that entity belongs to)

      +
    • +
    • +

      description ('String' :: Description of the Entity)

      +
    • +
    • +

      properties (List<PropertyDef> :: List of properties the entity has)

      +
    • +
    • +

      relationShips (List<RelationShip> :: List of Relationships the entity has)

      +
    • +
    +
    +
  • +
  • +

    HeaderDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      info (InfoDef :: Definition of the info found in the header)

      +
    • +
    • +

      servers (List<ServerDef> :: List of servers the specification uses)

      +
    • +
    +
    +
  • +
  • +

    InfoDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      title ('String' :: The title of the specification)

      +
    • +
    • +

      description ('String' :: The description of the specification)

      +
    • +
    +
    +
  • +
  • +

    ServerDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      URI ('String' :: String representation of the Server location)

      +
    • +
    • +

      description ('String' :: description of the server)

      +
    • +
    +
    +
  • +
  • +

    ComponentDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      paths (List<PathDef> :: List of services for this component)

      +
    • +
    +
    +
  • +
  • +

    PropertyDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      name ('String' :: Name of the property)

      +
    • +
    • +

      type ('String' :: type of the property)

      +
    • +
    • +

      format ('String' :: format of the property (i.e. int64))

      +
    • +
    • +

      isCollection (boolean :: true if the property is a collection, false by default)

      +
    • +
    • +

      isEntity (boolean :: true if the property refers to another entity, false by default)

      +
    • +
    • +

      sameComponent (boolean :: true if the entity that the property refers to belongs to the same component, false by default)

      +
    • +
    • +

      description ('String' :: Description of the property)

      +
    • +
    • +

      required (boolean :: true if the property is set as required)

      +
    • +
    • +

      constraints ('Map<String, Object>')

      +
    • +
    +
    +
  • +
  • +

    RelationShip ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      type ('String' :: type of the relationship (OneToOne, ManyToMany, etc…​))

      +
    • +
    • +

      entity ('String' :: destination entity name)

      +
    • +
    • +

      sameComponent (boolean :: true if the destination entity belongs to the same component of the source entity, false by default)

      +
    • +
    • +

      unidirectional (boolean :: true if the relationship is unidirectional, false by default)

      +
    • +
    +
    +
  • +
  • +

    PathDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      rootComponent ('String' :: the first segment of the path)

      +
    • +
    • +

      version ('String' :: version of the service)

      +
    • +
    • +

      pathURI ('String' :: URI of the path, the segment after the version)

      +
    • +
    • +

      operations (List<OperationDef> :: List of operations for this path)

      +
    • +
    +
    +
  • +
  • +

    OperationDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      type ('String' :: type of the operation (GET, PUT, etc…​))

      +
    • +
    • +

      parameters (List<ParameterDef> :: List of parameters)

      +
    • +
    • +

      operationId ('String' :: name of the operation prototype)

      +
    • +
    • +

      description ('String' :: JavaDoc Description of the operation)

      +
    • +
    • +

      summary (List<PropertyDef> :: JavaDoc operation Summary)

      +
    • +
    • +

      tags ('List<String>' :: List of different tags)

      +
    • +
    • +

      responses (List<ResponseDef> :: Responses of the operation)

      +
    • +
    +
    +
  • +
  • +

    ParameterDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      isSearchCriteria (boolean :: true if the response is an SearchCriteria object)

      +
    • +
    • +

      inPath (boolean :: true if this parameter is contained in the request path)

      +
    • +
    • +

      inQuery (boolean :: true if this parameter is contained in a query)

      +
    • +
    • +

      isBody (boolean :: true if this parameter is a response body)

      +
    • +
    • +

      inHeader (boolean :: true if this parameter is contained in a header)

      +
    • +
    • +

      mediaType ('String' :: String representation of the media type of the parameter)

      +
    • +
    +
    +
  • +
  • +

    ResponseDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      isArray (boolean :: true if the type of the response is an Array)

      +
    • +
    • +

      isPaginated (boolean :: true if the type of the response is paginated)

      +
    • +
    • +

      isVoid (boolean :: true if there is no type/an empty type)

      +
    • +
    • +

      isEntity (boolean :: true if the type of the response is an Entity)

      +
    • +
    • +

      entityRef (EntityDef :: Incomplete EntityDef containing the name and properties of the referenced Entity)

      +
    • +
    • +

      type ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      code ('String' :: String representation of the HTTP status code)

      +
    • +
    • +

      mediaTypes ('List<String>' :: List of media types that can be returned)

      +
    • +
    • +

      description ('String' :: Description of the response)

      +
    • +
    +
    +
  • +
+
+
+
+
Merger extensions
+
+

This plugin only provides an input reader, there is no support for OpenAPI merging. Nevertheless, the files generated from an OpenAPI file will be Java, XML, JSON, TS, etc…​ so, +for each file to be generated defined at templates.xml, must set the mergeStrategy for the specific language (javamerge, javamerge_override, jsonmerge, etc…​)

+
+
+
+
<templates>
+    ...
+    <templateExtension ref="${variables.entityName}.java" mergeStrategy="javamerge"/>
+    ...
+    <templateExtension ref="${variables.entityName}dataGrid.component.ts" mergeStrategy="tsmerge"/>
+    ...
+    <templateExtension ref="en.json" mergeStrategy="jsonmerge"/>
+</templates>
+
+
+
+
+
Usage
+
+Writing OpenAPI 3.0 contract file +
+

The Swagger file must follow the OpenAPI 3.0 standard to be readable by CobiGen, otherwise and error will be thrown. +A full documentation about how to follow this standard can be found Swagger3 Docs.

+
+
+

The Swagger file must be at the core folder of your devon4j project, like shown below:

+
+
+

Swagger at devon4j Project

+
+
+

To be compatible with CobiGen and devon4j, it must follow some specific configurations. This configurations allows us to avoid redundant definitions as SearchCriteria and PaginatedList objects are used at the services definitions.

+
+
+
+Paths +
+
    +
  • +

    Just adding the tags property at the end of the service definitions with the items `SearchCriteria` and/or paginated put into CobiGen knowledge that an standard devon4j SearchCriteria and/or PaginateListTo object must be generated. That way, the Swagger file will be easier to write and even more understandable.

    +
  • +
  • +

    The path must start with the component name, and define an x-component tag with the component name. That way this service will be included into the component services list.

    +
  • +
+
+
+
+
  /componentnamemanagement/v1/entityname/customOperation/:
+    x-component: componentnamemanagement
+    post:
+      summary: 'Summary of the operation'
+      description: Description of the operation.
+      operationId: customOperation
+      responses:
+        '200':
+          description: Description of the response.
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/EntityName'
+      requestBody:
+        $ref: '#/components/requestBodies/EntityName'
+      tags:
+        - searchCriteria
+        - paginated
+
+
+
+

That way, CobiGen will be able to generate the endpoint (REST service) customOperation on componentmanagement. If you do not specify the component to generate to (the x-component tag) then this service will not be taken into account for generation.

+
+
+
+Service based generation +
+

In previous CobiGen versions, we were able to generate code from a contract-first OpenAPI specification only when we defined components like the following:

+
+
+
+
components:
+    schemas:
+        Shop:
+          x-component: shopmanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            shopExample:
+              type: string
+              maxLength: 100
+              minLength: 5
+              uniqueItems: true
+
+
+
+

We could not generate services without the definition of those components.

+
+
+

In our current version, we have overcome it, so that now we are able to generate all the services independently. You just need to add an x-component tag with the name of the component that will make use of that service. See here.

+
+
+

An small OpenAPI example defining only services can be found below:

+
+
+
+
openapi: 3.0.0
+servers:
+  - url: 'https://localhost:8081/server/services/rest'
+    description: Just some data
+info:
+  title: Devon Example
+  description: Example of a API definition
+  version: 1.0.0
+  x-rootpackage: com.capgemini.spoc.openapi
+paths:
+  /salemanagement/v1/sale/{saleId}:
+    x-component: salemanagement
+    get:
+      operationId: findSale
+      parameters:
+        - name: saleId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Any
+  /salemanagement/v1/sale/{bla}:
+    x-component: salemanagement
+    get:
+      operationId: findSaleBla
+      parameters:
+        - name: bla
+          in: path
+          required: true
+          schema:
+            type: integer
+            format: int64
+            minimum: 10
+            maximum: 200
+      responses:
+        '200':
+          description: Any
+
+
+
+

Then, the increment that you need to select for generating those services is Crud devon4ng Service based Angular:

+
+
+

Service based generation

+
+
+
+Full example +
+

This example yaml file can be download from here.

+
+
+ + + + + +
+ + +As you will see on the file, "x-component" tags are obligatory if you want to generate components (entities). They have to be defined for each one. +In addition, you will find the global variable "x-rootpackage" that are explained <<,here>>. +
+
+
+
+
openapi: 3.0.0
+servers:
+  - url: 'https://localhost:8081/server/services/rest'
+    description: Just some data
+info:
+  title: Devon Example
+  description: Example of a API definition
+  version: 1.0.0
+  x-rootpackage: com.devonfw.angular.test
+paths:
+  /shopmanagement/v1/shop/{shopId}:
+    x-component: shopmanagement
+    get:
+      operationId: findShop
+      parameters:
+        - name: shopId
+          in: path
+          required: true
+          schema:
+            type: integer
+            format: int64
+            minimum: 0
+            maximum: 50
+      responses:
+        '200':
+          description: Any
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Shop'
+            text/plain:
+              schema:
+                type: string
+        '404':
+          description: Not found
+  /salemanagement/v1/sale/{saleId}:
+    x-component: salemanagement
+    get:
+      operationId: findSale
+      parameters:
+        - name: saleId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Any
+  /salemanagement/v1/sale/:
+    x-component: salemanagement
+    post:
+      responses:
+        '200':
+          description: Any
+      requestBody:
+        $ref: '#/components/requestBodies/SaleData'
+      tags:
+       - searchCriteria
+  /shopmanagement/v1/shop/new:
+    x-component: shopmanagement
+    post:
+      responses:
+       '200':
+          description: Any
+      requestBody:
+        $ref: '#/components/requestBodies/ShopData'
+components:
+    schemas:
+        Shop:
+          x-component: shopmanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            shopExample:
+              type: string
+              maxLength: 100
+              minLength: 5
+              uniqueItems: true
+            sales:
+              type: array # Many to One relationship
+              items:
+                $ref: '#/components/schemas/Sale'
+        Sale:
+          x-component: salemanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            saleExample:
+              type: number
+              format: int64
+              maximum: 100
+              minimum: 0
+          required:
+            - saleExample
+
+    requestBodies:
+        ShopData:
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Shop'
+          required: true
+        SaleData:
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Sale'
+          required: true
+
+
+
+
+
+
+
+
+

CobiGen CLI

+
+ +
+

==CobiGen CLI

+
+
+

The command line interface (CLI) for CobiGen enables the generation of code using few commands. This feature allows us to decouple CobiGen from Eclipse.

+
+
+
Install CobiGen CLI
+
+

In order to install the CobiGen CLI you will need to use the devonfw/ide. In a console run devon cobigen.

+
+
+
+
Commands and options
+
+

Using the following command and option you will be able to customize your generation as follows:

+
+
+
    +
  • +

    cobigen, cg: Main entry point of the CLI. If no arguments are passed, man page will be printed.

    +
  • +
  • +

    [generate, g]: Command used for code generation.

    +
    +
      +
    • +

      InputGlob: Glob pattern of the input file or the whole path of the input file from which the code will be generated.

      +
    • +
    • +

      < --increment, -i > : Specifies an increment ID to be generated. You can also search increments by name and CobiGen will output the resultant list. If an exact match found, code generation will happen.

      +
    • +
    • +

      < --template, -t > : specifies a template ID to be generated. You can also search templates by name and CobiGen will output the resultant list.

      +
    • +
    • +

      < --outputRootPath, -out >: The project file path in which you want to generate your code. If no output path is given, CobiGen will use the project of your input file.

      +
    • +
    +
    +
  • +
  • +

    [adapt-templates, a]: Generates a new templates folder next to the CobiGen CLI and stores its location inside a configuration file. After executing this command, the CLI will attempt to use the specified Templates folder.

    +
  • +
  • +

    < --verbose, -v > : Prints debug information, verbose log.

    +
  • +
  • +

    < --help, -h > : Prints man page.

    +
  • +
  • +

    < update, u> : This command compare the artificial pom plug-ins version with central latest version available and user can update any outdated plug-ins version .

    +
  • +
+
+
+
+
CLI Execution steps:
+
+

CobiGen CLI is installed inside your devonfw distribution. In order to execute it follow the next steps:

+
+
+
    +
  1. +

    Run console.bat, this will open a console.

    +
  2. +
  3. +

    Execute cobigen or cg and the man page should be printed.

    +
  4. +
  5. +

    Use a valid CobiGen input file and run cobigen generate <pathToInputFile>. Note: On the first execution of the CLI, CobiGen will download all the needed dependencies, please be patient.

    +
  6. +
  7. +

    A list of increments will be printed so that you can start the generation.

    +
  8. +
+
+
+

Preview of the man page for generate command:

+
+
+
+Generation path +
+
+
+
+
Examples
+
+

A selection of commands that you can use with the CLI:

+
+
+
    +
  • +

    cobigen generate foo\bar\EmployeeEntity.java: As no output path has been defined, CobiGen will try to find the pom.xml of the current project in order to set the generation root path.

    +
  • +
  • +

    cobigen generate foo\bar\*.java --out other\project: Will retrieve all the Java files on that input folder and generate the code on the path specified by --out.

    +
  • +
  • +

    cg g foo\bar\webServices.yml --increment TO: Performs a string search using TO and will print the closest increments like in the following image:

    +
  • +
+
+
+
+Generation path +
+
+
+
    +
  • +

    cg g foo\bar\webServices.yml -i 1,4,6: Directly generates increments with IDs 1, 4 and 6. CobiGen will not request you any other input.

    +
  • +
  • +

    cg a: Downloads the latest CobiGen_Templates and unpacks them next to the CLI. CobiGen will from now on use these unpacked Templates for generation.

    +
  • +
  • +

    cg a -cl C:\my\custom\location: Downloads the latest CobiGen_Templates and unpacks them in C:\my\custom\location. CobiGen will from now on use these unpacked Templates for generation.

    +
  • +
+
+
+
+
CLI update command
+
+

Example of Update Command :

+
+
+
+Generation path +
+
+
+

Select the plug-ins which you want to update like below :

+
+
+
+Generation path +
+
+
+
+
CLI custom templates
+
+

To use custom templates, it’s necessary to set up a custom configuration path as described here.

+
+
+
+
Troubleshooting
+
+

When generating code from a Java file, CobiGen makes use of Java reflection for generating templates. In order to do that, the CLI needs to find the compiled source code of your project.

+
+
+

If you find an error like Compiled class foo\bar\EmployeeEntity.java has not been found, it means you need to run mvn clean install on the input project so that a new target folder gets created with the needed compiled sources.

+
+
+
+
+
+

Maven Build Integration

+
+ +
+

==Maven Build Integration

+
+
+

For maven integration of CobiGen you can include the following build plugin into your build:

+
+
+
Build integration of CobiGen
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        <execution>
+          <id>cobigen-generate</id>
+          <phase>generate-resources</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

Available goals

+
+
+
    +
  • +

    generate: Generates contents configured by the standard non-compiled configuration folder. Thus generation can be controlled/configured due to an location URI of the configuration and template or increment ids to be generated for a set of inputs.

    +
  • +
+
+
+

Available phases are all phases, which already provide compiled sources such that CobiGen can perform reflection on it. Thus possible phases are for example package, site.

+
+
+
Provide Template Set
+
+

For generation using the CobiGen maven plug-in, the CobiGen configuration can be provided in two different styles:

+
+
+
    +
  1. +

    By a configurationFolder, which should be available on the file system whenever you are running the generation. The value of configurationFolder should correspond to the maven file path syntax.

    +
    +
    Provide CobiGen configuration by configuration folder (file)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <configuration>
    +        <configurationFolder>cobigen-templates</configurationFolder>
    +      </configuration>
    +       ...
    +     </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
  2. +
  3. +

    By maven dependency, whereas the maven dependency should stick on the same conventions as the configuration folder. This explicitly means that it should contain non-compiled resources as well as the context.xml on top-level.

    +
    +
    Provide CobiGen configuration by maven dependency (jar)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <dependencies>
    +        <dependency>
    +          <groupId>com.devonfw.cobigen</groupId>
    +          <artifactId>templates-XYZ</artifactId>
    +          <version>VERSION-YOU-LIKE</version>
    +        </dependency>
    +      </dependencies>
    +      ...
    +    </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
    +

    We currently provide a generic deployed version of the templates on the devonfw-nexus for Register Factory (<artifactId>cobigen-templates-rf</artifactId>) and for the devonfw itself (<artifactId>cobigen-templates-devonfw</artifactId>).

    +
    +
  4. +
+
+
+
+
Build Configuration
+
+

Using the following configuration you will be able to customize your generation as follows:

+
+
+
    +
  • +

    <destinationRoot> specifies the root directory the relative destinationPath of CobiGen templates configuration should depend on. Default ${basedir}

    +
  • +
  • +

    <inputPackage> declares a package name to be used as input for batch generation. This refers directly to the CobiGen Java Plug-in container matchers of type package configuration.

    +
  • +
  • +

    <inputFile> declares a file to be used as input. The CobiGen maven plug-in will try to parse this file to get an appropriate input to be interpreted by any CobiGen plug-in.

    +
  • +
  • +

    <increment> specifies an increment ID to be generated. You can specify one single increment with content ALL to generate all increments matching the input(s).

    +
  • +
  • +

    <template> specifies a template ID to be generated. You can specify one single template with content ALL to generate all templates matching the input(s).

    +
  • +
  • +

    <forceOverride> specifies an overriding behavior, which enables non-mergeable resources to be completely rewritten by generated contents. For mergeable resources this flag indicates, that conflicting fragments during merge will be replaced by generated content. Default: false

    +
  • +
  • +

    <failOnNothingGenerated> specifies whether the build should fail if the execution does not generate anything.

    +
  • +
+
+
+
Example for a simple build configuration
+
+
<build>
+  <plugins>
+    <plugin>
+       ...
+      <configuration>
+        <destinationRoot>${basedir}</destinationRoot>
+        <inputPackages>
+          <inputPackage>package.to.be.used.as.input</inputPackage>
+        </inputPackages>
+        <inputFiles>
+          <inputFile>path/to/file/to/be/used/as/input</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>IncrementID</increment>
+        </increments>
+        <templates>
+          <template>TemplateID</template>
+        </templates>
+        <forceOverride>false</forceOverride>
+      </configuration>
+        ...
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
Plugin Injection Since v3
+
+

Since version 3.0.0, the plug-in mechanism has changed to support modular releases of the CobiGen plug-ins. Therefore, you need to add all plug-ins to be used for generation. Take the following example to get the idea:

+
+
+
Example of a full configuration including plugins
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        ...
+      </executions>
+      <configuration>
+        ...
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen<groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>1.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
A full example
+
+
    +
  1. +

    A complete maven configuration example

    +
  2. +
+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>6.0.0</version>
+      <executions>
+        <execution>
+          <id>generate</id>
+          <phase>package</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <inputFiles>
+          <inputFile>src/main/java/io/github/devonfw/cobigen/generator/dataaccess/api/InputEntity.java</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>dataaccess_infrastructure</increment>
+          <increment>daos</increment>
+        </increments>
+        <failOnNothingGenerated>false</failOnNothingGenerated>
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+
+

Eclipse Integration

+
+ +
+

==Installation

+
+
+
+
+

Remark: CobiGen is preinstalled in the devonfw/devon-ide.

+
+
+
+
+
Preconditions
+
+
    +
  • +

    Eclipse 4.x

    +
  • +
  • +

    Java 7 Runtime (for starting eclipse with CobiGen). This is independent from the target version of your developed code.

    +
  • +
+
+
+
+
Installation steps
+
+
    +
  1. +

    Open the eclipse installation dialog
    +menu bar → HelpInstall new Software…​

    +
    +

    01 install new software

    +
    +
  2. +
  3. +

    Open CobiGen’s update site
    +Insert the update site of your interest into the filed Work with and press Add …​
    +Unless you know what you are doing we recommend you install every plugin as shown in the picture below.

    +
    + +
    +
  4. +
  5. +

    Follow the installation wizard
    +Select CobiGen Eclipse Plug-inNextNext → accept the license → FinishOKYes

    +
  6. +
  7. +

    Once installed, a new menu entry named "CobiGen" will show up in the Package Explorer’s context menu. In the sub menu there will the Generate…​ command, which may ask you to update the templates, and then you can start the generation wizard of CobiGen. You can adapt the templates by clicking on Adapt Templates which will give you the possibility to import the CobiGen_Templates automatically so that you can modified them.

    +
  8. +
  9. +

    Checkout (clone) your project’s templates folder or use the current templates released with CobiGen (https://github.com/devonfw/cobigen/tree/master/cobigen-templates) and then choose Import -> General -> Existing Projects into Workspace to import the templates into your workspace.

    +
  10. +
  11. +

    Now you can start generating. To get an introduction of CobiGen try the devon4j templates and work on the devon4j sample application. There you might want to start with Entity objects as a selection to run CobiGen with, which will give you a good overview of what CobiGen can be used for right out of the box in devon4j based development. If you need some more introduction in how to come up with your templates and increments, please be referred to the documentation of the context configuration and the templates configuration

    +
  12. +
+
+
+

Dependent on your context configuration menu entry Generate…​ may be gray out or not. See for more information about valid selections for generation.

+
+
+
+
Updating
+
+

In general updating CobiGen for eclipse is done via the update mechanism of eclipse directly, as shown on image below:

+
+
+

03 update software

+
+
+

Upgrading eclipse CobiGen plug-in to v3.0.0 needs some more attention of the user due to a changed plug-in architecture of CobiGen’s core module and the eclipse integration. Eventually, we were able to provide any plug-in of CobiGen separately as its own eclipse bundle (fragment), which is automatically discovered by the main CobiGen Eclipse plug-in after installation.

+
+ +
+

==Usage

+
+
+

CobiGen has two different generation modes depending on the input selected for generation. The first one is the simple mode, which will be started if the input contains only one input artifact, e.g. for Java an input artifact currently is a Java file. The second one is the batch mode, which will be started if the input contains multiple input artifacts, e.g. for Java this means a list of files. In general this means also that the batch mode might be started when selecting complex models as inputs, which contain multiple input artifacts. The latter scenario has only been covered in the research group,yet.

+
+
+
+
Simple Mode
+
+

Selecting the menu entry Generate…​ the generation wizard will be opened:

+
+
+

generate wizard page1

+
+
+

The left side of the wizard shows all available increments, which can be selected to be generated. Increments are a container like concept encompassing multiple files to be generated, which should result in a semantically closed generation output. +On the right side of the wizard all files are shown, which might be effected by the generation - dependent on the increment selection of files on the left side. The type of modification of each file will be encoded into following color scheme if the files are selected for generation:

+
+
+
    +
  • +

    green: files, which are currently non-existent in the file system. These files will be created during generation

    +
  • +
  • +

    yellow: files, which are currently existent in the file system and which are configured to be merged with generated contents.

    +
  • +
  • +

    red: files, which are currently existent in the file system. These files will be overwritten if manually selected.

    +
  • +
  • +

    no color: files, which are currently existent in the file system. Additionally files, which were deselected and thus will be ignored during generation.

    +
  • +
+
+
+

Selecting an increment on the left side will initialize the selection of all shown files to be generated on the right side, whereas green and yellow categorized files will be selected initially. A manual modification of the pre-selection can be performed by switching to the customization tree using the Customize button on the right lower corner.

+
+
+
+
+

Optional: If you want to customize the generation object model of a Java input class, you might continue with the Next > button instead of finishing the generation wizard. The next generation wizard page is currently available for Java file inputs and lists all non-static fields of the input. deselecting entries will lead to an adapted object model for generation, such that deselected fields will be removed in the object model for generation. By default all fields will be included in the object model.

+
+
+
+
+

Using the Finish button, the generation will be performed. Finally, CobiGen runs the eclipse internal organize imports and format source code for all generated sources and modified sources. Thus it is possible, that---especially organize imports opens a dialog if some types could not be determined automatically. This dialog can be easily closed by pressing on Continue. If the generation is finished, the Success! dialog will pop up.

+
+
+
+
Batch mode
+
+

If there are multiple input elements selected, e.g., Java files, CobiGen will be started in batch mode. For the generation wizard dialog this means, that the generation preview will be constrained to the first selected input element. It does not preview the generation for each element of the selection or of a complex input. The selection of the files to be generated will be generated for each input element analogously afterwards.

+
+
+

generate wizard page1 batch

+
+
+

Thus the color encoding differs also a little bit:

+
+
+
    +
  • +

    yellow: files, which are configured to be merged.

    +
  • +
  • +

    red: files, which are not configured with any merge strategy and thus will be created if the file does not exist or overwritten if the file already exists

    +
  • +
  • +

    no color: files, which will be ignored during generation

    +
  • +
+
+
+

Initially all possible files to be generated will be selected.

+
+
+
+
Health Check
+
+

To check whether CobiGen runs appropriately for the selected element(s) the user can perform a Health Check by activating the respective menu entry as shown below.

+
+
+

health check menu entry

+
+
+

The simple Health Check includes 3 checks. As long as any of these steps fails, the Generate menu entry is grayed out.

+
+
+

The first step is to check whether the generation configuration is available at all. If this check fails you will see the following message:

+
+
+

health check no templates

+
+
+

This indicates, that there is no Project named CobiGen_Templates available in the current workspace. To run CobiGen appropriately, it is necessary to have a configuration project named CobiGen_Templates imported into your workspace. For more information see chapter Eclipse Installation.

+
+
+

The second step is to check whether the template project includes a valid context.xml. If this check fails, you will see the following message:

+
+
+

health check invalid config

+
+
+

This means that either your context.xml

+
+
+
    +
  • +

    does not exist (or has another name)

    +
  • +
  • +

    or it is not valid one in any released version of CobiGen

    +
  • +
  • +

    or there is simply no automatic routine of upgrading your context configuration to a valid state.

    +
  • +
+
+
+

If all this is not the case, such as, there is a context.xml, which can be successfully read by CobiGen, you might get the following information:

+
+
+

health check old context

+
+
+

This means that your context.xml is available with the correct name but it is outdated (belongs to an older CobiGen version). In this case just click on Upgrade Context Configuration to get the latest version.

+
+
+
+
+

Remark: This will create a backup of your current context configuration and converts your old configuration to the new format. The upgrade will remove all comments from the file, which could be retrieved later on again from the backup. +If the creation of the backup fails, you will be asked to continue or to abort.

+
+
+
+
+

The third step checks whether there are templates for the selected element(s). If this check fails, you will see the following message:

+
+
+

health check no matching triggers

+
+
+

This indicates, that there no trigger has been activated, which matches the current selection. The reason might be that your selection is faulty or that you imported the wrong template project (e.g. you are working on a devon4j project, but imported the Templates for the Register Factory). If you are a template developer, have a look at the trigger configuration and at the corresponding available plug-in implementations of triggers, like e.g., Java Plug-in or XML Plug-in.

+
+
+

If all the checks are passed you see the following message:

+
+
+

health check all OK

+
+
+

In this case everything is OK and the Generate button is not grayed out anymore so that you are able to trigger it and see the [simple-mode].

+
+
+

In addition to the basic check of the context configuration, you also have the opportunity to perform an Advanced Health Check, which will check all available templates configurations (templates.xml) of path-depth=1 from the configuration project root according to their compatibility.

+
+
+

health check advanced up to date

+
+
+

Analogous to the upgrade of the context configuration, the Advanced Health Check will also provide upgrade functionality for templates configurations if available.

+
+
+
+
Update Templates
+
+

Update Template: Select Entity file and right click then select CobiGen Update Templates after that click on download then download successfully message will be come .

+
+
+
+
Adapt Templates
+
+

Adapt Template: Select any file and right click, then select `cobigen → Adapt Templates `.If CobiGen templates jar is not available then it downloads them automatically. If CobiGen templates is already present then it will override existing template in workspace and click on OK then imported template successfully message will be come.

+
+
+

Finally, please change the Java version of the project to 1.8 so that you don’t have any compilation errors.

+
+ +
+

==Logging

+
+
+

If you have any problem with the CobiGen eclipse plug-in, you might want to enable logging to provide more information for further problem analysis. This can be done easily by adding the logback.xml to the root of the CobiGen_templates configuration folder. The file should contain at least the following contents, whereas you should specify an absolute path to the target log file (at the TODO). If you are using the (cobigen-templates project, you might have the contents already specified but partially commented.

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<!-- This file is for logback classic. The file contains the configuration for sl4j logging -->
+<configuration>
+    <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+        <file><!-- TODO choose your log file location --></file>
+        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+            <Pattern>%n%date %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
+            </Pattern>
+        </encoder>
+    </appender>
+    <root level="DEBUG">
+        <appender-ref ref="FILE" />
+    </root>
+</configuration>
+
+
+
+
+
+
+

How to

+
+ +
+

==Enterprise Architect client generation

+
+
+

We are going to show you how to generate source code from an Enterprise Architect diagram +using CobiGen.

+
+
+
Prerequisites
+
+

If CobiGen_Templates is not already imported into your workspace, follow the next steps:

+
+
+
    +
  • +

    Click on the Eclipse’s menu File > Import > Existing Projects into Workspace and browse to select the workspaces/main/CobiGen_Templates directory.

    +
  • +
  • +

    Click Finish and you should have the CobiGen_Templates as a new project in Eclipse’s workspace.

    +
  • +
+
+
+

Also verify that you have the latest templates of CobiGen. Your templates folder must contain the crud_java_ea_uml folder. +If you do not see it, please follow the next steps:

+
+
+
    +
  • +

    Download the accumulative patch.

    +
  • +
  • +

    Open the zip file and extract its content inside the root folder of your Devonfw distribution Devon-dist_2.4.0/

    +
  • +
+
+
+

After following those steps correctly, you should have the latest version of the templates ready to use.

+
+
+
+
Generation
+
+

In this tutorial, we are going to generate the entity infrastructure using as input a class diagram, modelled with Enterprise Architect (EA). First, create a class diagram, an example is shown on figure below:

+
+
+
+Eclipse CobiGen generation +
+
+
+

When you are finished, you will have to export that UML diagram into an XMI version 2.1 file. This is the file format that CobiGen understands. See below a figure showing this process:

+
+
+
+Eclipse CobiGen generation +
+
+
+

To open that window, see this tutorial.

+
+
+

After having that exported file, change its extension from xmi to xml. Then create an devon4j project and import the exported file into the core of your devon4j project.

+
+
+

Now we are going to start the generation, right-click your exported file and select CobiGen > Generate, finally select the entity infrastructure increment:

+
+
+
+Eclipse CobiGen generation +
+
+
+

After following all these steps, your generated files should be inside src\main\java folder. If you want an XMI example, you will find it here.

+
+ +
+

==Angular 8 Client Generation

+
+
+

The generation can create a full Angular 8 client using the devon4ng-application-template package located at workspaces/examples folder of the distribution. For more details about this package, please refer here.

+
+
+

Take into account that the TypeScript merging for CobiGen needs Node 6 or higher to be installed at your machine.

+
+
+ + + + + +
+ + +This is a short introduction to the Angular generation. For a deeper tutorial including the generation of the backend, we strongly recommend you to follow this document. +
+
+
+
+
Requisites
+
+

Install yarn globally:

+
+
+
+
npm install -g yarn
+
+
+
+
+
Angular 8 workspace
+
+

The output location of the generation can be defined editing the cobigen.properties file located at crud_angular_client_app/templates folder of the CobiGen_Templates project.

+
+
+
+`cobigen.properties file` +
+
+
+

By default, the output path would be into the devon4ng-application-template folder at the root of the devon4j project parent folder:

+
+
+
+
root/
+ |- devon4ng-application-template/
+ |- devon4j-project-parent/
+   |- core/
+   |- server/
+
+
+
+

However, this path can be changed, for example to src/main/client folder of the devon4j project:

+
+
+

relocate: ./src/main/client/${cwd}

+
+
+
+
root/
+ |- devon4j-project-parent/
+   |- core/
+      |- src
+        |- main
+          |- client
+   |- server/
+
+
+
+

Once the output path is chosen, copy the files of DEVON4NG-APPLICATION-TEMPLATE repository into this output path.

+
+
+
+
Install Node dependencies
+
+

Open a terminal into devon4ng-application-template copied and just run the command:

+
+
+
+
yarn
+
+
+
+

This will start the installation of all node packages needed by the project into the node_modules folder.

+
+
+
+
Generating
+
+

From an ETO object, right click, CobiGen → Generate will show the CobiGen wizard relative to client generation:

+
+
+
+CobiGen Client Generation Wizard +
+
+
+

Check all the increments relative to Angular:

+
+
+ + + + + +
+ + +
+

The Angular devon4j URL increment is only needed for the first generations however, checking it again on next generation will not cause any problem.

+
+
+
+
+

As we done on other generations, we click Next to choose which fields to include at the generation or simply clicking Finish will start the generation.

+
+
+
+CobiGen Client Generation Wizard 3 +
+
+
+
+
Routing
+
+

Due to the nature of the TypeScript merger, currently is not possible to merge properly the array of paths objects of the routings at app.routing.ts file so, this modification should be done by hand on this file. However, the import related to the new component generated is added.

+
+
+

This would be the generated app-routing.module file:

+
+
+
+
import { Routes, RouterModule } from '@angular/router';
+import { LoginComponent } from './login/login.component';
+import { AuthGuard } from './shared/security/auth-guard.service';
+import { InitialPageComponent } from './initial-page/initial-page.component';
+import { HomeComponent } from './home/home.component';
+import { SampleDataGridComponent } from './sampledata/sampledata-grid/sampledata-grid.component';
+//Routing array
+const appRoutes: Routes = [{
+    path: 'login',
+    component: LoginComponent
+}, {
+    path: 'home',
+    component: HomeComponent,
+    canActivate: [AuthGuard],
+    children: [{
+        path: '',
+        redirectTo: '/home/initialPage',
+        pathMatch: 'full',
+        canActivate: [AuthGuard]
+    }, {
+        path: 'initialPage',
+        component: InitialPageComponent,
+        canActivate: [AuthGuard]
+    }]
+}, {
+    path: '**',
+    redirectTo: '/login',
+    pathMatch: 'full'
+}];
+export const routing = RouterModule.forRoot(appRoutes);
+
+
+
+

Adding the following into the children object of home, will add into the side menu the entry for the component generated:

+
+
+
+
{
+    path: 'sampleData',
+    component: SampleDataGridComponent,
+    canActivate: [AuthGuard],
+}
+
+
+
+
+
import { Routes, RouterModule } from '@angular/router';
+import { LoginComponent } from './login/login.component';
+import { AuthGuard } from './shared/security/auth-guard.service';
+import { InitialPageComponent } from './initial-page/initial-page.component';
+import { HomeComponent } from './home/home.component';
+import { SampleDataGridComponent } from './sampledata/sampledata-grid/sampledata-grid.component';
+//Routing array
+const appRoutes: Routes = [{
+    path: 'login',
+    component: LoginComponent
+}, {
+    path: 'home',
+    component: HomeComponent,
+    canActivate: [AuthGuard],
+    children: [{
+        path: '',
+        redirectTo: '/home/initialPage',
+        pathMatch: 'full',
+        canActivate: [AuthGuard]
+    }, {
+        path: 'initialPage',
+        component: InitialPageComponent,
+        canActivate: [AuthGuard]
+    }, {
+        path: 'sampleData',
+        component: SampleDataGridComponent,
+        canActivate: [AuthGuard],
+    }]
+}, {
+    path: '**',
+    redirectTo: '/login',
+    pathMatch: 'full'
+}];
+export const routing = RouterModule.forRoot(appRoutes);
+
+
+
+
+`APP SideMenu` +
+
+
+
+
JWT Authentication
+
+

If you are using a backend server with JWT Authentication (there is a sample in workspaces/folder called sampleJwt) you have to specify the Angular application to use this kind of authentication.

+
+
+

By default the variable is set to CSRF but you can change it to JWT by going to the Enviroment.ts and setting security: 'jwt'.

+
+
+
+
Running
+
+

First of all, run your devon4j java server by right clicking over SpringBootApp.java Run As → Java Application. This will start to run the SpringBoot server. Once you see the Started SpringBoot in XX seconds, the backend is running.

+
+
+
+Starting `SpringBoot` +
+
+
+

Once the the server is running, open a Devon console at the output directory defined previously and run:

+
+
+
+
ng serve --open
+
+
+
+

This will run the Angular 8 application at:

+
+
+
+
http://localhost:4200
+
+
+
+
+Running Angular 8 app +
+
+
+

Once finished, the browser will open automatically at the previous localhost URL showing the Angular 8 application, using the credentials set at the devon4j java server you will be able to access.

+
+ +
+

==Ionic client generation

+
+
+

We are going to show you how to generate a CRUD Ionic application from an ETO +using CobiGen.

+
+
+ + + + + +
+ + +This is a short introduction to the Ionic generation. For a deeper tutorial including the generation of the backend, we strongly recommend you to follow this document. +
+
+
+
+
Prerequisites
+
+

Before starting, make sure you already have in your computer:

+
+
+
    +
  • +

    Ionic: by following the steps defined on that page. +It includes installing:

    +
    +
      +
    • +

      NodeJS: We have to use "NPM" for downloading packages.

      +
    • +
    • +

      Ionic CLI.

      +
    • +
    +
    +
  • +
  • +

    Capacitor: Necessary to access to native device features.

    +
  • +
+
+
+

If CobiGen_Templates are not already downloaded, follow the next steps:

+
+
+
    +
  • +

    Right click on any file of your workspace CobiGen > Update Templates and now you are able to start the generation.

    +
  • +
  • +

    If you want to adapt them, click Adapt Templates and you should have the CobiGen_Templates as a new project in Eclipse’s workspace.

    +
  • +
+
+
+

After following those steps correctly, you should have the latest version of the templates ready to use.

+
+
+
+
Generation
+
+

We are going to generate the CRUD into a sample application that we have developed for +testing this functionality. It is present on your workspaces/examples folder (devon4ng-ionic-application-template). If you do not see it, you can clone or download it from here.

+
+
+

After having that sample app, please create an devon4j project and then start implementing the ETO: You will find an example here.

+
+
+

As you can see, TableEto contains 3 attributes: 2 of them are Long and the third one TableState is an enum that you will find +here. +The Ionic generation works fine for any Java primitive attribute (Strings, floats, chars, boolean…​) and enums. However, if you want to use your own objects, you should +override the toString() method, as explained here.

+
+
+

The attributes explained above will be used for generating a page that shows a list. Each item of that list +will show the values of those attributes.

+
+
+

For generating the files:

+
+
+
    +
  • +

    Right click your ETO file and click on CobiGen > Generate as shown on the figure below.

    +
  • +
+
+
+
+Eclipse CobiGen generation +
+
+
+
    +
  • +

    Select the Ionic increments for generating as shown below. Increments group a set of templates for generating +different projects.

    +
    +
      +
    1. +

      Ionic List used for generating the page containing the list.

      +
    2. +
    3. +

      Ionic devon4ng environments is for stating the server path.

      +
    4. +
    5. +

      Ionic i18n used for generating the different language translations for the `translationService` (currently English and Spanish).

      +
    6. +
    7. +

      Ionic routing adds an app-routing.module.ts file to allow navigation similar to the one available in Angular.

      +
    8. +
    9. +

      Ionic theme generates the variables.scss file which contains variables to style the application.

      +
    10. +
    +
    +
  • +
+
+
+
+CobiGen Ionic Wizard +
+
+
+ + + + + +
+ + +By default, the generated files will be placed inside "devon4ng-ionic-application-template", next to the root of your project’s folder. +See the image below to know where they are generated. For changing the generation path and the name of the application go to CobiGen_Templates/crud_ionic_client_app/cobigen.properties. +
+
+
+
+Generation path +
+
+
+

Now that we have generated the files, lets start testing them:

+
+
+
    +
  • +

    First change the SERVER_URL of your application. For doing that, modify src/environments/environments.ts, also modify src/environments/environments.android.ts (android) and src/environments/environments.prod.ts (production) if you want to test in different environments.

    +
  • +
  • +

    Check that there are no duplicated imports. Sometimes there are duplicated imports in src/app/app.module.ts. +This happens because the merger of CobiGen prefers to duplicate rather than to delete.

    +
  • +
  • +

    Run npm install to install all the required dependencies.

    +
  • +
  • +

    Run `ionic serve on your console.

    +
  • +
+
+
+

After following all these steps your application should start. However, remember that you will need your server to be running for access to the list page.

+
+
+
+
Running it on Android
+
+

To run the application in an android emulated device, it is necessary to have Android Studio and Android SDK. After its installation, the following commands have to be run on your console:

+
+
+
    +
  • +

    npx cap init "name-for-the-app (between quotes)" "id-for-the-app (between quotes)"

    +
  • +
  • +

    ionic build --configuration=android. To use this command, you must add an android build configuration at angular.json

    +
  • +
+
+
+
+
    "build": {
+      ...
+      "configurations": {
+        ...
+        "android": {
+          "fileReplacements": [
+            {
+              "replace": "src/environments/environment.ts",
+              "with": "src/environments/environment.android.ts"
+            }
+          ]
+        },
+      }
+    }
+
+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+

The last steps are done in Android studio: make the project, make the app, build and APK and run in a device.

+
+
+
+Click on make project +
+
+
+
+click on make app +
+
+
+
+click on build APK +
+
+
+
+click on running device +
+
+ +
+

==Implementing a new Plug-in

+
+
+

New plug-ins can implement an input reader, a merger, a matcher, a trigger interpreter, and/or a template engine as explained here.

+
+
+ + + + + +
+ + +
+

It is discouraged to have cobigen-core dependencies at runtime, except for cobigen-core-api which definitely must be present.

+
+
+
+
+
+
Plugin Activator
+
+

Each plug-in has to have an plug-in activator class implementing the interface GeneratorPluginActivator from the core-api. This class will be used to load the plug-in using the PluginRegistry as explained here. This class implements two methods:

+
+
+
    +
  1. +

    bindMerger() → returns a mapping of merge strategies and its implementation to be registered.

    +
  2. +
  3. +

    bindTriggerInterpreter()→ returns the trigger interpreters to be provided by this plug-in.

    +
  4. +
+
+
+

Both methods create and register instances of mergers and trigger interpreters to be provided by the new plug-in.

+
+
+
+
Adding Trigger Interpreter
+
+

The trigger interpreter has to implement the TriggerInterpreter interface from the core. The trigger interpreter defines the type for the new plugin and creates new InputReader and new Matcher objects.

+
+
+
+
Adding Input Reader
+
+

The input reader is responsible of read the input object and parse it into + FreeMarker models. The input reader must be implemented for the type of the + input file. If there is any existent plugin that has the same file type as input, + there will be no need to add a new input reader to the new plug-in.

+
+
+
Input Reader Interface
+
+

The interface needed to add a new input reader is defined at the core. Each new +sub plug-in must implements this interface if is needed an input reader for it.

+
+
+

The interface implements the basic methods that an input reader must have, +but if additional methods are required, the developer must add a new interface +that extends the original interface `InputReader.java` from the core-api +and implement that on the sub plug-in.

+
+
+

The methods to be implemented by the input reader of the new sub plugin are:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodReturn TypeDescription

isValidInput(Object input)

boolean

This function will be called if matching triggers or matching templates should be retrieved for a given input object.

createModel(Object input)

Map<String, Object>

This function should create the FreeMarker object model from the given input.

combinesMultipleInputObjects(Object input)

boolean

States whether the given input object combines multiple input objects to be used for generation.

getInputObjects(Object input, Charset inputCharset)

List<Object>

Will return the set of combined input objects if the given input combines multiple input objects.

getTemplateMethods(Object input)

Map<String, Object>

This method returns available template methods from the plugins as Map. If the plugin which corresponds to the input does not provide any template methods an empty Map will be returned.

getInputObjectsRecursively(Object input, Charset inputCharset)

List<Object>

Will return the set of combined input objects if the given input combines multiple input objects.

+
+
+
Model Constants
+
+

The Input reader will create a model for FreeMarker. A FreeMarker model must +have variables to use them at the .ftl template file. Refer to Java Model to see the FreeMarker model example for java input files.

+
+
+
+
Registering the Input Reader
+
+

The input reader is an object that can be retrieved using the correspondent get + method of the trigger interpreter object. The trigger interpreter object is + loaded at the eclipse plug-in using the load plug-in method explained + here. + That way, when the core needs the input reader, only needs to call that getInputReader method.

+
+
+
+
+
Adding Matcher
+
+

The matcher implements the MatcherInterpreter interface from the core-api. +Should be implemented for providing a new input matcher. Input matcher are +defined as part of a trigger and provide the ability to restrict specific +inputs to a set of templates. +This restriction is implemented with a MatcherType enum.

+
+
+

E.g JavaPlugin

+
+
+
+
private enum MatcherType {
+    /** Full Qualified Name Matching */
+    FQN,
+    /** Package Name Matching */
+    PACKAGE,
+    /** Expression interpretation */
+    EXPRESSION
+}
+
+
+
+

Furthermore, matchers may provide several variable assignments, which might be +dependent on any information of the matched input and thus should be resolvable +by the defined matcher.

+
+
+

E.g JavaPlugin

+
+
+
+
private enum VariableType {
+    /** Constant variable assignment */
+    CONSTANT,
+    /** Regular expression group assignment */
+    REGEX
+}
+
+
+
+
+
Adding Merger
+
+

The merger is responsible to perform merge action between new output with the +existent data at the file if it already exists. Must implement the Merger +interface from the core-api. +The implementation of the Merge interface must override the following methods:

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
MethodReturn TypeDescription

getType()

String

Returns the type, this merger should handle.

merge(File base, String patch, String targetCharset)

String

Merges the patch into the base file.

+
+

Is important to know that any exception caused by the merger must throw a MergeException from the core-api to the eclipse-plugin handle it.

+
+
+
+
Changes since Eclipse / Maven 3.x
+
+

Since version 3.x the Eclipse and Maven plugins of CobiGen utilize the Java ServiceLoader mechanic to find and register plugins at runtime. To enable a new plugin to be discovered by this mechanic the following steps are needed:

+
+
+
    +
  • +

    create the file META-INF/services/com.devonfw.cobigen.api.extension.GeneratorPluginActivator containing just the full qualified name of the class implementing the GeneratorPluginActivator interface, if the plugin provides a Merger and/or a TriggerInterpreter

    +
  • +
  • +

    create the file META-INF/services/com.devonfw.cobigen.api.extension.TextTemplateEngine containing just the full qualified name of the class implementing the TextTemplateEngine interface, if provided by the plugin

    +
  • +
  • +

    include META-INF into the target bundle (i.e. the folder META-INF has to be present in the target jar file)

    +
  • +
+
+
+
+
Example: Java Plugin
+
+

The java plugin provides both a Merger and a TriggerInterpreter. It contains therefore a com.devonfw.cobigen.api.extension.GeneratorPluginActivator file with the following content:

+
+
+
+
com.devonfw.cobigen.javaplugin.JavaPluginActivator
+
+
+
+

This makes the JavaPluginActivator class discoverable by the ServiceLoader at runtime.

+
+
+
+
+
    +
  • +

    to properly include the plugin into the current system and use existing infrastructure, you need to add the plugin as a module in /cobigen/pom.xml (in case of a Merger/TriggerInterpreter providing plugin) and declare that as the plugin’s parent in it’s own pom.xml via

    +
  • +
+
+
+
+
<parent>
+    <groupId>com.devonfw</groupId>
+    <artifactId>cobigen-parent</artifactId>
+    <version>dev-SNAPSHOT</version>
+</parent>
+
+
+
+

or /cobigen/cobigen-templateengines/pom.xml (in case of a Merger/TriggerInterpreter providing plugin) and declare that as the plugin’s parent in it’s own pom.xml via

+
+
+
+
<parent>
+    <groupId>com.devonfw</groupId>
+    <artifactId>cobigen-tempeng-parent</artifactId>
+    <version>dev-SNAPSHOT</version>
+</parent>
+
+
+
+

If the plugin provides both just use the /cobigen/pom.xml.

+
+
+
    +
  • +

    The dependencies of the plugin are included in the bundle

    +
  • +
  • +

    To make the plugin available to the Eclipse plugin it must be included into the current compositeContent.xml and compositeArtifacts.xml files. Both files are located in https://github.com/devonfw/cobigen/tree/gh-pages/updatesite/{test|stable}. To do so, add an <child> entry to the <children> tag in both files and adapt the size attribute to match the new number of references. The location attribute of the new <child> tag needs to be the artifact id of the plugins pom.xml.

    +
  • +
+
+
+
+
Example: Java Plugin
+
+

In case of the Java plugin, the entry is

+
+
+
+
<child location="cobigen-javaplugin"/>
+
+
+
+
+
+
Deployment
+
+

If you want to create a test release of eclipse you need to run the command

+
+
+
+
sh deploy.sh
+
+
+
+

on the cloned CobiGen repository while making sure, that your current version of CobiGen cloned is a snapshot version. This will automatically be detected by the deploy script.

+
+ +
+

==Introduction to CobiGen external plug-ins

+
+
+

Since September of 2019, a major change on CobiGen has taken place. CobiGen is written in Java code and previously, it was very hard for developers to create new plug-ins in other languages.

+
+
+

Creating a new plug-in means:

+
+
+
    +
  • +

    Being able to parse a file in that language.

    +
  • +
  • +

    Create a human readable model that can be used to generate templates (by retrieving properties from the model).

    +
  • +
  • +

    Enable merging files, so that user’s code does not get removed.

    +
  • +
+
+
+

For the Java plug-in it was relatively easy. As you are inside the Java world, you can use multiple utilities or libraries in order to get the AST or to merge Java code. With this new feature, we wanted that behaviour to be possible for any programming language.

+
+
+
+
+
General intuition
+
+

Below you will find a very high level description of how CobiGen worked in previous versions:

+
+
+
+Old CobiGen +
+
+
+

Basically, when a new input file was sent to CobiGen, it called the input reader to create a model of it (see here an example of a model). That model was sent to the template engine.

+
+
+

Afterwards, the template engine generated a new file which had to be merged with the original one. All this code was implemented in Java.

+
+
+

On the new version, we have implemented a handler (ExternalProcessHandler) which connects through TCP/IP connection to a server (normally on localhost:5000). This server can be implemented in any language (.Net, Node.js, Python…​) it just needs to implement a REST API defined here. The most important services are the input reading and merging:

+
+
+
+New CobiGen +
+
+
+

CobiGen acts as a client that sends requests to the server in order to read the input file and create a model. The model is returned to the template engine so that it generates a new file. Finally, it is sent back to get merged with the original file.

+
+
+
+
How to create new external plug-in
+
+

The creation of a new plug-in consists mainly in three steps:

+
+
+
    +
  • +

    Creation of the server (external process).

    +
  • +
  • +

    Creation of a CobiGen plug-in.

    +
  • +
  • +

    Creation of templates.

    +
  • +
+
+
+
Server (external process)
+
+

The server can be programmed in any language that is able to implement REST services endpoints. The API that needs to implement is defined with this contract. You can paste the content to https://editor.swagger.io/ for a better look.

+
+
+

We have already created a NestJS server that implements the API defined above. You can find the code here which you can use as an example.

+
+
+

As you can see, the endpoints have the following naming convention: processmanagement/todoplugin/nameOfService where you will have to change todo to your plug-in name (e.g. rustplugin, pyplugin, goplugin…​)

+
+
+

When implementing service getInputModel which returns a model from the input file there are only two restrictions:

+
+
+
    +
  • +

    A path key must be added. Its value can be the full path of the input file or just the file name. It is needed because in CobiGen there is a batch mode, in which you can have multiple input objects inside the same input file. You do not need to worry about batch mode for now.

    +
  • +
  • +

    On the root of your model, for each found key that is an object (defined with brackets [{}]), CobiGen will try to use it as an input object. For example, this could be a valid model:

    +
    +
    +
    {
    +  "path": "example/path/employee.entity.ts"
    +  "classes": [
    +    {
    +      "identifier": "Employee",
    +      "modifiers": [
    +        "export"
    +      ],
    +      "decorators": [
    +        {
    +          "identifier": {
    +            "name": "Entity",
    +            "module": "typeorm"
    +          },
    +          "isCallExpression": true
    +        }
    +      ],
    +      "properties": [
    +        {
    +          "identifier": "id",
    +    ...
    +    ...
    +    ...
    +    }]
    +    "interfaces": [{
    +        ...
    +    }]
    +}
    +
    +
    +
  • +
+
+
+

For this model, CobiGen would use as input objects all the classes and interfaces defined. On the templates we would be able to do model.classes[0].identifier to get the class name. These input objects depend on the language, therefore you can use any key.

+
+
+

In order to test the server, you will have to deploy it on your local machine (localhost), default port is 5000. If that port is already in use, you can deploy it on higher port values (5001, 5002…​). Nevertheless, we explain later the testing process as you need to complete the next step before.

+
+
+ + + + + +
+ + +Your server must accept one argument when running it. The argument will be the port number (as an integer). This will be used for CobiGen in order to handle blocked ports when deploying your server. Check this code to see how we implemented that argument on our NestJS server. +
+
+
+
+
CobiGen plug-in
+
+

You will have to create a new CobiGen plug-in that connects to the server. But do not worry, you will not have to implement anything new. We have a CobiGen plug-in template available, the only changes needed are renaming files and setting some properties on the pom.xml. Please follow these steps:

+
+
+
    +
  • +

    Get the CobiGen plug-in template from here. It is a template repository (new GitHub feature), so you can click on "Use this template" as shown below:

    +
    +
    +Plugin CobiGen template +
    +
    +
  • +
  • +

    Name your repo as cobigen-name-plugin where name can be python, rust, go…​ In our case we will create a nest plug-in. It will create a repo with only one commit which contains all the needed files.

    +
  • +
  • +

    Clone your just created repo and import folder cobigen-todoplugin as a Maven project on any Java IDE, though we recommend you devonfw ;)

    +
    +
    +Import plugin +
    +
    +
  • +
  • +

    Rename all the todoplugin folders, files and class names to nameplugin. In our case nestplugin. In Eclipse you can easily rename by right clicking and then refactor → rename:

    +
  • +
+
+
+
+Rename plugin +
+
+
+ + + + + +
+ + +We recommend you to select all the checkboxes +
+
+
+
+Rename checkbox +
+
+
+
    +
  • +

    Remember to change in src/main/java and src/test/java all the package, files and class names to use your plug-in name. The final result would be:

    +
    +
    +Package structure +
    +
    +
  • +
  • +

    Now we just need to change some strings, this is needed for CobiGen to register all the different plugins (they need unique names). In class TodoPluginActivator (in our case NestPluginActivator), change all the todo to your plug-in name. See below the 3 strings that need to be changed:

    +
    +
    +Plugin activator +
    +
    +
  • +
  • +

    Finally, we will change some properties from the pom.xml of the project. These properties define the server (external process) that is going to be used:

    +
    +
      +
    1. +

      Inside pom.xml, press Ctrl + F to perform a find and replace operation. Replace all todo with your plugin name:

      +
      +
      +Pom properties +
      +
      +
    2. +
    3. +

      We are going to explain the server properties:

      +
      +
        +
      1. +

        artifactId: This is the name of your plug-in, that will be used for a future release on Maven Central.

        +
      2. +
      3. +

        plugin.name: does not need to be changed as it uses the property from the artifactId. When connecting to the server, it will send a request to localhost:5000/{plugin.name}plugin/isConnectionReady, that is why it is important to use an unique name for the plug-in.

        +
      4. +
      5. +

        server.name: This defines how the server executable (.exe) file will be named. This .exe file contains all the needed resources for deploying the server. You can use any name you want.

        +
      6. +
      7. +

        server.version: You will specify here the server version that needs to be used. The .exe file will be named as {server.name}-{server.version}.exe.

        +
      8. +
      9. +

        server.url: This will define from where to download the server. We really recommend you using NPM which is a package manager we know it works well. We explain here how to release the server on NPM. This will download the .exe file for Windows.

        +
      10. +
      11. +

        server.url.linux: Same as before, but this should download the .exe file for Linux systems. If you do not want to implement a Linux version of the plug-in, just use the same URL from Windows or MacOS.

        +
      12. +
      13. +

        server.url.macos: Same as before, but this should download the .exe file for MacOS systems. If you do not want to implement a MacOS version of the plug-in, just use the same URL from Linux or Windows.

        +
      14. +
      +
      +
    4. +
    +
    +
  • +
+
+
+
+
+
Testing phase
+
+

Now that you have finished with the implementation of the server and the creation of a new CobiGen plug-in, we are going to explain how you can test that everything works fine:

+
+
+
    +
  1. +

    Deploy the server on port 5000.

    +
  2. +
  3. +

    Run mvn clean test on the CobiGen-plugin or run the JUnit tests directly on Eclipse.

    +
    +
      +
    1. +

      If the server and the plug-in are working properly, some tests will pass and other will fail (we need to tweak them).

      +
    2. +
    3. +

      If every test fails, something is wrong in your code.

      +
    4. +
    +
    +
  4. +
  5. +

    In order to fix the failing tests, go to src/test/java. The failing tests make use of sample input files that we added in sake of example:

    +
    +
    +Pom properties +
    +
    +
  6. +
+
+
+

Replace those files (on src/test/resources/testadata/unittest/files/…​) with the correct input files for your server.

+
+
+
+
Releasing
+
+

Now that you have already tested that everything works fine, we are going to explain how to release the server and the plug-in.

+
+
+
Release the server
+
+

We are going to use NPM to store the executable of our server. Even though NPM is a package manager for JavaScript, it can be used for our purpose.

+
+
+
    +
  • +

    Get the CobiGen server template from here. It is a template repository (new GitHub feature), so you can click on "Use this template" as shown below:

    +
    +
    +Server CobiGen template +
    +
    +
  • +
  • +

    Name your repo as cobigen-name-server where name can be python, rust, go…​ In our case we will create a nest plug-in. It will create a repo with only one commit which contains all the needed files.

    +
  • +
  • +

    Clone your just created repo and go to folder cobigen-todo-server. It will just contain two files: ExternalProcessContract.yml is the OpenAPI definition which you can modify with your own server definition (this step is optional), and package.json is a file needed for NPM in order to define where to publish this package:

    +
    +
    +
    {
    +  "name": "@devonfw/cobigen-todo-server",
    +  "version": "1.0.0",
    +  "description": "Todo server to implement the input reader and merger for CobiGen",
    +  "author": "CobiGen Team",
    +  "license": "Apache"
    +}
    +
    +
    +
  • +
+
+
+

Those are the default properties. This would push a new package cobigen-todo-server on the devonfw organization, with version 1.0.0. We have no restrictions here, you can use any organization, though we always recommend devonfw.

+
+
+ + + + + +
+ + +Remember to change all the todo to your server name. +
+
+
+
    +
  • +

    Add your executable file into the cobigen-todo-server folder, just like below. As we said previously, this .exe is the server ready to be deployed.

    +
    +
    +
    cobigen-template-server/
    + |- cobigen-todo-server/
    +   |- ExternalProcessContract.yml
    +   |- package.json
    +   |- todoserver-1.0.0.exe
    +
    +
    +
  • +
  • +

    Finally, we have to publish to NPM. If you have never done it, you can follow this tutorial. Basically you need to login into NPM and run:

    +
    +
    +
    cd cobigen-todo-server/
    +npm publish --access=public
    +
    +
    +
  • +
+
+
+ + + + + +
+ + +To release Linux and MacOS versions of your plug-in, just add the suffix into the package name (e.g. @devonfw/cobigen-todo-server-linux) +
+
+
+

That’s it! You have published the first version of your server. Now you just need to modify the properties defined on the pom of your CobiGen plug-in. Please see next section for more information.

+
+
+
+
Releasing CobiGen plug-in
+
+
    +
  • +

    Change the pom.xml to define all the properties. You can see below a final example for nest:

    +
    +
    +
    ...
    +   <groupId>com.devonfw.cobigen</groupId>
    +   <artifactId>nestplugin</artifactId>
    +   <name>CobiGen - Nest Plug-in</name>
    +   <version>1.0.0</version>
    +   <packaging>jar</packaging>
    +   <description>CobiGen - nest Plug-in</description>
    +
    +   <properties>
    +      <!-- External server properties -->
    +      <plugin.name>${project.artifactId}</plugin.name>
    +      <server.name>nestserver</server.name>
    +      <server.version>1.0.0</server.version>
    +      <server.url>https\://registry.npmjs.org/@devonfw/cobigen-nest-server/-/cobigen-nest-server-${server.version}.tgz</server.url>
    +      <server.url.linux>https\://registry.npmjs.org/@devonfw/cobigen-nest-server-linux/-/cobigen-nest-server-linux-${server.version}.tgz</server.url.linux>
    +      <server.url.macos>https\://registry.npmjs.org/@devonfw/cobigen-nest-server-macos/-/cobigen-nest-server-macos-${server.version}.tgz</server.url.macos>
    +...
    +
    +
    +
  • +
  • +

    Deploy to Maven Central.

    +
  • +
+
+
+
+
+
Templates creation
+
+

After following above steps, we now have a CobiGen plug-in that connects to a server (external process) which reads your input files, returns a model and is able to merge files.

+
+
+

However, we need a key component for our plug-in to be useful. We need to define templates:

+
+
+
    +
  • +

    Fork our CobiGen main repository, from here and clone it into your PC. Stay in the master branch and import into your IDE cobigen-templates\templates-devon4j. Set the Java version of the project to 1.8 if needed.

    +
  • +
  • +

    Create a new folder on src/main/templates, this will contain all your templates. You can use any name, but please use underscores as separators. In our case, we created a folder crud_typescript_angular_client_app to generate an Angular client from a TypeORM entity (NodeJS entity).

    +
    +
    +Templates project +
    +
    +
  • +
  • +

    Inside your folder, create a templates folder. As you can see below, the folder structure of the generated files starts here (the sources). Also we need a configuration file templates.xml that should be on the same level as templates/ folder. For now, copy and paste a templates.xml file from any of the templates folder.

    +
    +
    +Templates project +
    +
    +
  • +
  • +

    Start creating your own templates. Our default templates language is Freemarker, but you can also use Velocity. Add the extension to the file (.ftl) and start developing templates! You can find useful documentation here.

    +
  • +
  • +

    After creating all the templates, you need to modify context.xml which is located on the root of src/main/templates. There you need to define a trigger, which is used for CobiGen to know when to trigger a plug-in. I recommend you to copy and paste the following trigger:

    +
    +
    +
      <trigger id="crud_typescript_angular_client_app" type="nest" templateFolder="crud_typescript_angular_client_app">
    +    <matcher type="fqn" value="([^\.]+).entity.ts">
    +      <variableAssignment type="regex" key="entityName" value="1"/>
    +      <variableAssignment type="regex" key="component" value="1"/>
    +      <variableAssignment type="constant" key="domain" value="demo"/>
    +    </matcher>
    +  </trigger>
    +
    +
    +
  • +
  • +

    Change templateFolder to your templates folder name. id you can use any, but it is recommendable to use the same as the template folder name. type is the TRIGGER_TYPE we defined above on the NestPluginActivator class. On matcher just change the value: ([^\.]+).entity.ts means that we will only accept input files that contain anyString.entity.ts. This improves usability, so that users only generate using the correct input files. You will find more info about variableAssignment here.

    +
  • +
  • +

    Finally, is time to configure templates.xml. It is needed for organizing templates into increments, please take a look into this documentation.

    +
  • +
+
+
+
Testing templates
+
+
    +
  • +

    When you have finished your templates you will like to test them. On the templates-devon4j pom.xml remove the SNAPSHOT from the version (in our case the version will be 3.1.8). Run mvn clean install -DskipTests on the project. We skip tests because you need special permissions to download artifacts from our Nexus. Remember the version that has just been installed:

    +
    +
    +Templates snapshot version +
    +
    +
  • +
+
+
+ + + + + +
+ + +We always recommend using the devonfw console, which already contains a working Maven version. +
+
+
+
    +
  • +

    Now we have your last version of the templates ready to be used. We need to use that latest version in CobiGen. We will use the CobiGen CLI that you will find in your cloned repo, at cobigen-cli/cli. Import the project into your IDE.

    +
  • +
  • +

    Inside the project, go to src/main/resources/pom.xml. This pom.xml is used on runtime in order to install all the CobiGen plug-ins and templates. Add there your latest templates version and the previously created plug-in:

    +
    +
    +CLI pom +
    +
    +
  • +
  • +

    Afterwards, run mvn clean install -DskipTests and CobiGen will get your plug-ins. Now you have three options to test templates:

    +
    +
      +
    1. +

      Using Eclipse run as:

      +
      +
        +
      1. +

        Inside Eclipse, you can run the CobiGen-CLI as a Java application. Right click class CobiGenCLI.java → run as → run configurations…​ and create a new Java application as shown below:

        +
        +
        +Create configuration +
        +
        +
      2. +
      3. +

        That will create a CobiGenCLI configuration where we can set arguments to the CLI. Let’s first begin with showing the CLI version, which should print a list of all plug-ins, including ours.

        +
        +
        +Run version +
        +
        +
        +
        +
         ...
        + name:= propertyplugin version = 2.0.0
        + name:= jsonplugin version = 2.0.0
        + name:= templates-devon4j version = 3.1.8
        + name:= nestplugin version = 1.0.0
        + ...
        +
        +
        +
      4. +
      5. +

        If that worked, now you can send any arguments to the CLI in order to generate with your templates. Please follow this guide that explains all the CLI commands.

        +
      6. +
      +
      +
    2. +
    3. +

      Modify the already present JUnit tests on the CLI project: They test the generation of templates from multiple plug-ins, you can add your own tests and input files.

      +
    4. +
    5. +

      Use the CLI jar to execute commands:

      +
      +
        +
      1. +

        The mvn clean install -DskipTests command will have created a Cobigen.jar inside your target folder (cobigen-cli/cli/target). Open the jar with any unzipper and extract to the current location class-loader-agent.jar, cobigen.bat and cg.bat:

        +
        +
        +Extract files +
        +
        +
      2. +
      3. +

        Now you can run any CobiGen CLI commands using a console. This guide explains all the CLI commands.

        +
        +
        +Run CLI +
        +
        +
      4. +
      +
      +
    6. +
    +
    +
  • +
+
+ +
+
+
+
devon4net CobiGen Guide
+
+
Overview
+
+

In this guide we will explain how to generate a new WebAPI project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+
Getting things ready
+
+devonfw-IDE +
+

First, we will install the devonfw-IDE. It is a tool that will setup your IDE within minutes. Please follow the install guide here.

+
+
+
+devon4net Templates +
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
`dotnet new -i Devon4Net.WebAPI.Template`
+
+
+
+

and then:

+
+
+
+
`dotnet new Devon4NetAPI`
+
+
+
+
+OpenAPI File +
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+
+
Generating files
+
+

CobiGen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the CobiGen CLI tool.

+
+
+Generating files through Eclipse +
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+CobiGen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+CobiGen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+CobiGen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+CobiGen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+Generating files through CobiGen CLI +
+

In order to generate the files using the CobiGen CLI it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    `cobigen generate {yourOpenAPIFile}.yml`
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+
+
Configuration
+
+Data base +
+

CobiGen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+CobiGen +
+
+
+
+Run the application +
+

After the configuration of the database, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
`dotnet run`
+
+
+
+

This will deploy our application in our localhost with the port 8082, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+ +
+

==How to update CobiGen

+
+
+

In order to update CobiGen from our devonfw distribution, we have two options:

+
+
+
    +
  • +

    Open Eclipse, click on HelpCheck for updates

    +
  • +
+
+
+
+Check updates +
+
+
+
    +
  • +

    Select all the CobiGen plugins listed and click on Next.

    +
  • +
+
+
+
+All the updates +
+
+
+

If this option is not working properly, then you can try the second option:

+
+
+
    +
  • +

    Open Eclipse, click on HelpAbout Eclipse IDE:

    +
  • +
+
+
+
+About Eclipse +
+
+
+
    +
  • +

    Click on Installation details:

    +
  • +
+
+
+
+Installation details +
+
+
+
    +
  • +

    Select all the CobiGen plugins and click on Update:

    +
  • +
+
+
+
+All updates details +
+
+
+

After the update process finishes, remember to restart Eclipse.

+
+
+
+
+
+
Updating templates:
+
+

To update your CobiGen templates to the latest version, you just need to do one step:

+
+
+
    +
  • +

    Right click any file on your package explorer, click on CobiGenUpdate templates, then click on download:

    +
  • +
+
+
+
+Update templates +
+
+
+

Now you will have the latest templates ready!

+
+
+

Unresolved include directive in modules/ROOT/pages/cobigen.wiki/master-cobigen.adoc - include::howto-Cobigen-CLI-generation.adoc[]

+
+ +
+

==End to End POC Code generation using Entity class +This article helps to create a sample application using cobigen.

+
+
+
+
Prerequisites
+
+

Download and install devonnfw IDE here,

+
+
+
+
Steps to create a Sample Project using Cobigen
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to start from the BE part …

+
+
+
Back End
+
+

run \devonfw-ide-scripts-3.2.4\eclipse-main.bat

+
+
+

It will open eclipse

+
+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  1. +

    Create a new SQL file (i.e: V0005CreateTables-ItaPoc.sql) inside myapp-__core and insert the following script:

    +
  2. +
+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment, modificationCounter INTEGER NOT NULL,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+

sql file

+
+
+
    +
  1. +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql) and add following script about the INSERT in order to populate the table created before

    +
  2. +
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Albert','Miller','albert.miller@capgemini.com');
+INSERT INTO  EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Wills','Smith', 'wills.smith@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Thomas', 'jaime.thomas@capgemini.com');
+
+
+
+

sql insert

+
+
+

Let’s create the Entity Class for the code generation

+
+
+
    +
  1. +

    Create a package employeemanagement.dataaccess.api under the folder myapp-core. Note: It is important to follow this naming convention for CobiGen to work properly.

    +
  2. +
+
+
+

package

+
+
+
    +
  1. +

    Now create a JPA Entity class in this package

    +
  2. +
+
+
+
+
import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Column;
+@Entity
+@javax.persistence.Table(name = "EMPLOYEE")
+public class EmployeeEntity {
+  @Column(name = "EMPLOYEEID")
+  @GeneratedValue(strategy = GenerationType.IDENTITY)
+  private Long employeeId;
+  @Column(name = "NAME")
+  private String name;
+  @Column(name = "SURNAME")
+  private String surname;
+  @Column(name = "EMAIL")
+  private String email;
+}
+
+
+
+

then generate getters and setters for all attributes …

+
+
+
    +
  1. +

    Use Cobigen to generate code. Right click on EmployeeEntity. CobiGen → Generate

    +
  2. +
+
+
+

It will ask you to download the templates, click on update:

+
+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  1. +

    Click on all the option selected as below:

    +
  2. +
+
+
+

cobigen option selection

+
+
+
    +
  1. +

    Click on finish. Below Screen would be seen. Click on continue

    +
  2. +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (myapp-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (myapp-core), normally it will be implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+

Last but not least: We make a quick REST services test !

+
+
+

See in the application.properties the TCP Port and the PATH

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id} (i.e: for getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

for all employees

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+
+
+
+

for the specific employee

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

Now download Postman to test the rest services.

+
+
+

Once done, you have to create a POST Request for the LOGIN and insert in the body the JSON containing the username and password waiter

+
+
+

postman

+
+
+

Once done with success (Status: 200 OK) …

+
+
+

… We create a NEW GET Request in order to get one employee

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+
Front End
+
+

Let’s start now with angular Web and then Ionic app.

+
+
+Angular Web App +
+
    +
  1. +

    To generate angular structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  2. +
+
+
+

devon dist folder

+
+
+
    +
  1. +

    Once done, right click on EmployeeEto.java file present under the package com.devonfw.poc.employeemanagement.logic.api.to

    +
  2. +
+
+
+

eclipse generate

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
+
+
+

eclipse

+
+
+
    +
  1. +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  2. +
+
+
+

angular ee layer

+
+
+
    +
  1. +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  2. +
+
+
+
+
,\{
+path: 'employee',
+component: EmployeeGridComponent,
+canActivate: [AuthGuard],
+},
+
+
+
+

Following picture explain where to place the above content:

+
+
+

routes

+
+
+
    +
  1. +

    Open the command prompt and execute devon yarn install from the base folder, which would download all the required libraries..

    +
  2. +
+
+
+
    +
  1. +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  2. +
+
+
+

environment

+
+
+

In order to do that it’s important to look at the application.properties to see the values as PATH, TCP port etc …

+
+
+

configure

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  1. +

    Now run the ng serve -o command to run the Angular Application.

    +
  2. +
+
+
+

image44

+
+
+
    +
  1. +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  2. +
+
+
+

image45

+
+
+

WebApp DONE

+
+
+
+Ionic Mobile App +
+
    +
  1. +

    To generate Ionic structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  2. +
  3. +

    Once done, Right click on the EmployeeEto as you already did before in order to use CobiGen.

    +
  4. +
  5. +

    Click on the selected options as seen in the screenshot:

    +
  6. +
+
+
+

image46

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
  3. +

    The entire ionic structure will be auto generated.

    +
  4. +
+
+
+

image47

+
+
+
    +
  1. +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  2. +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  1. +

    Run npm install in the root folder to download the dependecies

    +
  2. +
  3. +

    Run ionic serve

    +
  4. +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App DONE*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+Build APK +
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
Adapt CobiGen_Templates
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+ + +
+

==End to End POC Code generation using OpenAPI +This article helps to create a sample application using cobigen.

+
+
+
+
Prerequisites
+
+

Download and install devonnfw IDE here,

+
+
+
+
Steps to create a Sample Project using Cobigen
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to star

+
+
+

t from the BE part …

+
+
+
Back End
+
+

run \devonfw-ide-scripts-3.2.4\eclipse-main.bat

+
+
+

It will open eclipse

+
+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  1. +

    Create a new SQL file (i.e: V0005CreateTables_ItaPoc.sql) inside jwtsample-__core and insert the following script:

    +
  2. +
+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment, modificationCounter *INTEGER* *NOT* *NULL*,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+

sql file

+
+
+
    +
  1. +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql) and add following script about the INSERT in order to populate the table created before

    +
  2. +
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Stefano','Rossini','stefano.rossini@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Angelo','Muresu', 'angelo.muresu@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Gonzalez', 'jaime.diaz-gonzalez@capgemini.com');
+
+
+
+

sql insert

+
+
+

Let’s create the yml file for the code generation

+
+
+
    +
  1. +

    Now create a new file devonfw.yml in the root of your core folder. This will be our OpenAPI contract, like shown below. Then, copy the contents of this file into your OpenAPI. It defines some REST service endpoints and a EmployeeEntity with its properties defined.

    +
  2. +
+
+
+

Important: if you want to know how to write an OpenAPI contract compatible with CobiGen, please read this tutorial.

+
+
+

Swagger at OASP4J Project

+
+
+
    +
  1. +

    Right click devonfw.yml. CobiGen → Generate

    +
  2. +
+
+
+

It will ask you to download the templates, click on update:

+
+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  1. +

    Click on all the option selected as below:

    +
  2. +
+
+
+

cobigen option selection

+
+
+
    +
  1. +

    Click on finish. Below Screen would be seen. Click on continue

    +
  2. +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (jwtsample-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (jwtsample-core), normally it will be implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+

Last but not least: We make a quick REST services test !

+
+
+

See in the application.properties the TCP Port and the PATH

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id}  (i.e: for  getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

For all employees

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+
+
+
+

For the specific employee

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

Now download Postman to test the rest services.

+
+
+

Once done, you have to create a POST Request for the LOGIN and insert in the body the JSON containing the username and password waiter

+
+
+

postman

+
+
+

Once done with success (Status: 200 OK) …

+
+
+

postman

+
+
+

… We create a NEW POST Request and We copy the Authorization Bearer field (see above) and We paste it in the Token field (see below)

+
+
+

postman

+
+
+

and specific the JSON parameters for the pagination of the Request that We’re going to send:

+
+
+

postman

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below list of Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+
Front End
+
+

Let’s start now with angular Web and then Ionic app.

+
+
+Angular Web App +
+
    +
  1. +

    To generate angular structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  2. +
+
+
+

devon dist folder

+
+
+
    +
  1. +

    Once done, right click on devonfw.yml again (the OpenAPI contract). CobiGen → Generate

    +
  2. +
  3. +

    Click on the selected options as seen in the screenshot:

    +
  4. +
+
+
+

eclipse generate

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
+
+
+

eclipse

+
+
+
    +
  1. +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  2. +
+
+
+

angular ee layer

+
+
+
    +
  1. +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  2. +
+
+
+
+
,\{
+path: 'employee',
+component: EmployeeGridComponent,
+canActivate: [AuthGuard],
+},
+
+
+
+

Following picture explain where to place the above content:

+
+
+

routes

+
+
+
    +
  1. +

    Open the command prompt and execute devon yarn install from the base folder, which would download all the required libraries..

    +
  2. +
+
+
+
    +
  1. +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  2. +
+
+
+

environment

+
+
+

In order to do that it’s important to look at the application.properties to see the values as PATH, TCP port etc …

+
+
+

configure

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  1. +

    Now run the *ng serve -o* command to run the Angular Application.

    +
  2. +
+
+
+

image44

+
+
+
    +
  1. +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  2. +
+
+
+

image45

+
+
+

WebApp DONE

+
+
+
+Ionic Mobile App +
+
    +
  1. +

    To generate Ionic structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  2. +
  3. +

    Once done, Right click on the devonfw.yml as you already did before in order to use CobiGen.

    +
  4. +
  5. +

    Click on the selected options as seen in the screenshot:

    +
  6. +
+
+
+

image46

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
  3. +

    The entire ionic structure will be auto generated.

    +
  4. +
+
+
+

image47

+
+
+
    +
  1. +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  2. +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  1. +

    Run npm install in the root folder to download the dependecies

    +
  2. +
  3. +

    Run ionic serve

    +
  4. +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App DONE*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+Build APK +
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
Adapt CobiGen_Templates
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+ +
+

==Adapt Templates from CobiGen

+
+
+
+
Adapt CobiGen_Templates
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click OK:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+ +
+

==Enable Composite Primary Keys in Entity

+
+
+

In order to enable Composite Primary Keys in entity in CobiGen, the below approach is suggested

+
+
+

The templates in CobiGen have been enhanced to support Composite primary keys while still supporting the default devonfw/Cobigen values with Long id.

+
+
+

Also, the current generation from Entity still holds good - right click from an Entity object, CobiGen → Generate will show the CobiGen wizard relative to the entity generation.

+
+
+

After generating, below example shows how composite primary keys can be enabled.

+
+
+
+
@Entity
+@Table(name = "employee")
+public class EmployeeEntity {
+	private CompositeEmployeeKey id;
+	private String name;
+	private String lastName;
+	@Override
+	@EmbeddedId
+	public CompositeEmployeeKey getId() {
+		return id;
+	}
+	@Override
+	public void setId(CompositeEmployeeKey id) {
+		this.id = id;
+	}
+	.
+	.
+	.
+
+
+
+
+
public class CompositeEmployeeKey implements Serializable {
+  private String companyId;
+  private String employeeId;
+
+
+
+

Once the generation is complete, implement PersistenceEntity<ID>.java in the EmployeeEntity and pass the composite primary key object which is CompositeEmployeeKey in this case as the parameter ID.

+
+
+
+
import com.devonfw.module.basic.common.api.entity.PersistenceEntity;
+@Entity
+@Table(name = "employee")
+public class EmployeeEntity implements PersistenceEntity<CompositeEmployeeKey> {
+	private CompositeEmployeeKey id;
+	private String name;
+	private String lastName;
+
+
+
+

Also, the modificationCounter methods needs to be implemented from the interface PersistenceEntity<ID>. The sample implementation of the modification counter can be referred below.

+
+
+
+
@Override
+  public int getModificationCounter() {
+    if (this.persistentEntity != null) {
+      // JPA implementations will update modification counter only after the transaction has been committed.
+      // Conversion will typically happen before and would result in the wrong (old) modification counter.
+      // Therefore we update the modification counter here (that has to be called before serialization takes
+      // place).
+      this.modificationCounter = this.persistentEntity.getModificationCounter();
+    }
+    return this.modificationCounter;
+  }
+  @Override
+  public void setModificationCounter(int version) {
+    this.modificationCounter = version;
+  }
+
+
+
+
+
+
+

Template Development

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt__release_and_deployment_process.html b/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt__release_and_deployment_process.html new file mode 100644 index 00000000..2c238a5d --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt__release_and_deployment_process.html @@ -0,0 +1,536 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Release and Deployment Process

+
+
+

Updated: This explains the manual process of releasing. We now have an automatic script that enables a fast and easy release. Please check it here

+
+
+

Create a new issue with the following markdown contents for each release of any plugin/module containing the following tasks to be performed on release:

+
+
+
Template for release tasks (markdown)
+
+
 **Release tasks:**
+***1. Preparation***
+* [ ] Check running maven build on the development branch `mvn clean install`
+  * especially for eclipse plugin release run `mvn clean install -Pp2-build-mars,p2-build-stable` in cobigen-eclipse folder to run SWTBot UI tests locally. **Be patient, do not touch mouse and keyboard.**
+* [ ] Check if all tests are green and if there are no ignored ones left. As there are ignored ones, please check them if they can be removed or they only should be temporarily ignored. Potentially fix them.
+* [ ] Check/Update documentation according to changelog to be released
+  * [ ] especially update version number of module to be released [here](https://github.com/devonfw/cobigen/wiki/CobiGen)
+  * [ ] Update the wiki submodule and commit the latest version to target the updated release version of the wiki
+    \```
+    cd cobigen-documentation/cobigen.wiki
+    git pull origin master
+    cd ..
+    git add cobigen.wiki
+    git commit -m"#<releaseIssueNo> update docs"
+    git push
+    \```
+  * [ ] Check branch build to not fail in production line https://devon.s2-eu.capgemini.com/
+
+***2. Merging / Review***
+* [ ] **Locally** merge development branch to master branch
+  * [ ] Check for changed maven dependencies and document them. _As dependencies have been changed:_
+    * [ ] check new dependencies with legal (in case of not yet used licenses).
+    * **If there are any issues with the dependencies. Abort, get in contact.**
+    * [ ] document the changes in the [`ChangeLog` of the dependency tracking](https://github.com/devonfw/cobigen/wiki/mgmt_dependency-and-license-tracking).
+    * [ ] create a new licensing document
+  * [ ] Perform final review of merged contents
+    * [ ] Are there any changes in a different module not corresponding to the current development branch? Try to find the cause and potentially discuss with the guy failing.
+    * [ ] Any major issues, which would prevent from merging? Missing files, changes?
+    * if ok - commit (if not yet done) **but do not push** to master branch
+    * if not - abort merge, cleanup working copy, and fix on dev branch
+
+***3. Testing / Consolidation***
+* [ ] Higher component version number to release version
+* [ ] Fix snapshot versions of dependencies of all components to be released to its release versions
+* [ ] Install components locally and/or deploy to experimental update site
+* [ ] Perform a final manual test of all issues resolved in the milestone to be released.
+* [ ] Perform integration tests
+  * especially for cobigen-eclipse if cobigen internal dependencies have been changed
+
+***4. Deployment***
+* [ ] Close eclipse IDE
+* [ ] In case of non-eclipse component (for cobigen-core, just execute first line):
+  \```
+  mvn clean package bundle:bundle -Pp2-bundle -Dmaven.test.skip=true
+  mvn install bundle:bundle -Pp2-bundle p2:site -Dmaven.test.skip=true
+  mvn deploy -Pp2-upload-stable -Dmaven.test.skip=true -Dp2.upload=stable
+  \```
+* [ ] In case of eclipse plug-in release:
+  \```
+  cd cobigen-eclipse
+  mvn clean deploy -Pp2-build-stable,p2-upload-stable,p2-build-mars -Dp2.upload=stable
+  \```
+* [ ] Check the update site `http://de-mucevolve02/files/cobigen/updatesite/stable/` by installing/updating it once to an eclipse distribution.
+* [ ] Assure, that everything is committed and the working copy is clean
+* [ ] Create a tag according to the naming conventions
+* [ ] Push
+* [ ] Close milestone and create new release with binaries on GitHub
+
+***5. Follow-up***
+* [ ] Merge master branch back to corresponding dev_ branch
+* [ ] Create new Milestone (minor version update)
+* [ ] increase version on dev branch to next minor version + SNAPSHOT
+* [ ] Push
+
+
+
+

Testing process

+
+
+

In this section, the testing process of certain CobiGen features will be described. This should be used as a quality assurance document to follow up before releasing these features:

+
+
+

Update templates feature

+
+

Starting from a clean devonfw 3.0.0 distribution, follow the next steps to test the new feature for updating templates:

+
+
+
    +
  • +

    Open devonfw distribution, right click on a Java entity. Click on CobiGen → Healtcheck. It should:

    +
    +
      +
    1. +

      Throw message stating that there are no templates. It asks you to download them. If you cancel it, nothing happens. If you accept, it should say "Templates downloaded successfully".

      +
    2. +
    3. +

      After downloading the templates, you should see two OK values on CobiGen_Templates and on context.xml.

      +
      +
        +
      1. +

        If you click on Advanced Health Check everything should be green.

        +
      2. +
      +
      +
    4. +
    5. +

      Now, right click again on CobiGen → Generate. As you have already downloaded the templates, it should directly start loading them, without asking to download them again.

      +
    6. +
    7. +

      Try to generate something. The generated files should be visible after generating.

      +
    8. +
    +
    +
  • +
+
+
+
+
**Quality assurance plan Update Templates feature:**
+
+***1. Preparation***
+* [ ] Follow [this tutorial](https://github.com/devonfw/cobigen/wiki/mgmt__release_and_deployment_process#update-templates-feature) to start the testing phase
+
+***2. Testing scenarios***
+* [ ] A message is thrown informing that there are no templates.
+* [ ] It asks you to download templates.
+* [ ] If you cancel it, nothing happens.
+* [ ] If you accept it, a new window is shown with: Templates downloaded successfully.
+* [ ] You should see two OK values.
+* [ ] If you press on Advanced Health check, everything should be green.
+* [ ] If you try to generate, it directly reads the templates.
+* [ ] You are able to generate and you see the generated files.
+
+
+***3. Deployment***
+
+If every of these test scenarios are checked out, then release process can continue.
+
+
+
+
+

Ionic and Angular

+
+

To properly test the Ionic and Angular templates we need to follow the next steps:

+
+
+
    +
  • +

    Copy the jwtsample project from the worskpaces/examples folder and paste it to the workspaces/main folder, then import it into your workspace.

    +
  • +
  • +

    Add to the database of the project the following SQL script, so that we can test the retrieval of data.

    +
  • +
+
+
+
+
CREATE TABLE EMPLOYEE (
+
+  id BIGINT auto_increment ,
+
+  modificationCounter INTEGER NOT NULL,
+
+  employeeid BIGINT auto_increment,
+
+  name VARCHAR(255),
+
+  surname VARCHAR(255),
+
+  email VARCHAR(255),
+
+  PRIMARY KEY (employeeid)
+
+);
+
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Mister','Boss','mister.boss@capgemini.com');
+
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Intern','Student', 'intern.student@capgemini.com');
+
+
+
+
    +
  • +

    Create a Hibernate entity to map the data of the previous SQL script.

    +
  • +
+
+
+
+
import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Column;
+
+
+@Entity
+@javax.persistence.Table(name = "EMPLOYEE")
+
+public class EmployeeEntity {
+
+  @Column(name = "EMPLOYEEID")
+
+  @GeneratedValue(strategy = GenerationType.IDENTITY)
+
+  private Long employeeId;
+
+  @Column(name = "NAME")
+
+  private String name;
+
+  @Column(name = "SURNAME")
+
+  private String surname;
+
+  @Column(name = "EMAIL")
+
+  private String email;
+
+}
+
+
+
+
    +
  • +

    Using the EmployeeEntity, generate increments CRUD DAO’S, CRUD REST services, CRUD SOAP services, CRUD logic (all in one), Entity infrastructure and TO’s. After generating, follow first the following tutorial related to Ionic Client Generation and afterwards the Angular tutorial.

    +
  • +
  • +

    The final step before releasing should be creating an issue with the following Markdown template. If every test scenario is completed, then testing phase is over and you can release.

    +
  • +
+
+
+
+
**Quality assurance plan Ionic and Angular:**
+
+***1. Preparation***
+* [ ] Follow [this tutorial](https://github.com/devonfw/cobigen/wiki/mgmt__release_and_deployment_process#ionic-and-angular) to start the testing phase
+
+***2. Testing scenarios***
+* [ ] You are able to log-in into both Ionic and Angular apps using JWT authentication.
+* [ ] You are able to log-in into Angular using CSRF authentication.
+* [ ] You are able to retrieve all the employees in both Ionic and Angular.
+* [ ] You are able to create an employee in both Ionic and Angular.
+* [ ] You are able to find an employee by any of its fields in both Ionic and Angular.
+* [ ] You are able to update an employee by any of its fields in both Ionic and Angular.
+* [ ] You are able to use [swipe functionality](https://ionicframework.com/docs/api/components/item/ItemSliding/) to update or delete an employee in Ionic.
+* [ ] You are able to use the [Ionic refresher](https://ionicframework.com/docs/api/components/refresher/Refresher/).
+
+
+***3. Deployment***
+
+If every of these test scenarios are checked out, then release process can continue.
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt_dependency-and-license-tracking.html b/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt_dependency-and-license-tracking.html new file mode 100644 index 00000000..93f8fae0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt_dependency-and-license-tracking.html @@ -0,0 +1,2540 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==License Tracking of Dependencies

+
+
+

Current Releases

+
+
+

CobiGen-core v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

SLF4J

1.7.7

MIT

http://www.slf4j.org/license.html

Guava

17.0

Apache License 2.0

http://code.google.com/p/guava-libraries/

Reflections

0.9.9-RC2

WTFPL

https://code.google.com/p/reflections/

FreeMarker

2.3.20

BSD-style

http://freemarker.org/docs/app_license.html

Jaxen

1.1.4

"Apache-style open source license"

http://jaxen.codehaus.org/license.html

Apache Commons IO

2.4

Apache License 2.0

http://commons.apache.org/proper/commons-io/

`Apache Commons Lang `

3.1

Apache License 2.0

http://commons.apache.org/proper/commons-lang/

Apache Commons JXPath

1.3

Apache License 2.0

http://commons.apache.org/proper/commons-jxpath/

JDOM

1.1.3

"Apache-style open source license"

http://www.jdom.org/docs/faq.html#a0030

+
+
+

CobiGen-Java plugin v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

cobigen-core

v1.0.0

SLF4J

1.7.7

MIT

http://www.slf4j.org/license.html

QDox

2.0-M2

Apache License 2.0

http://qdox.codehaus.org/license.html

+
+
+

CobiGen-property plugin v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

cobigen-core

v1.0.0

SLF4J

1.7.7

MIT

http://www.slf4j.org/license.html

+
+
+

CobiGen-XML plugin v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

cobigen-core

v1.0.0

SLF4J

1.7.7

MIT

http://www.slf4j.org/license.html

XMLMerge

3.1

LGPL 2.0

http://geonetwork.tv/xmlmerge/License.txt http://el4j.sourceforge.net/license.html

atinject

1

Apache License 2.0

https://code.google.com/p/atinject/

+
+
+

CobiGen-text merger v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

cobigen-core

v1.0.0

SLF4J

1.7.7

MIT

http://www.slf4j.org/license.html

+
+
+

CobiGen-eclipse v1.0.0

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Dependency NameVersionLicenseURL

cobigen-core

v1.0.0

cobigen-javaplugin

v1.0.0

cobigen-propertyplugin

v1.0.0

cobigen-xmlplugin

v1.0.0

cobigen-textmerger

v1.0.0

+
+
+
+
+

Changelog

+
+
+

CobiGen-core v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

JDOM

+
+
+

CobiGen-Java plugin v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

` cobigen-core `

v1.1.0

+
+
+

CobiGen-XML plugin v1.0.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

JDOM

1.1.3

"Apache-style open source license"

http://www.jdom.org/docs/faq.html#a0030

+
+
+

CobiGen-eclipse v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

1.1.0

updated

cobigen-javaplugin

1.1.1

updated

cobigen-xmlplugin

1.0.1

+
+
+

CobiGen-XML plugin v2.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

1.2.0

+
+
+

CobiGen-Java plugin v1.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

mmm-util-core

5.0.0

Apache License 2.0

https://github.com/m-m-m/mmm/wiki/FAQ#will-mmm-ever-change-its-license-in-later-releases

updated

cobigen-core

1.2.0

+
+
+

CobiGen-eclipse v1.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

1.2.0

updated

cobigen-javaplugin

1.2.0

updated

cobigen-xmlplugin

2.0.0

+
+
+

CobiGen-eclipse v1.2.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-javaplugin

1.2.1

+
+
+

CobiGen-Java plugin v1.3.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

2.0.0

+
+
+

CobiGen-maven v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

maven-core

3.0

Apache License 2.0

http://maven.apache.org/ref/3.0/maven-core/

added

maven-compat

3.0

Apache License 2.0

http://maven.apache.org/ref/3.0/maven-compat/

added

maven-plugin-api

3.0

Apache License 2.0

http://maven.apache.org/ref/3.0/maven-plugin-api/

added

cobigen-core

2.0.0

added

cobigen-xmlplugin

2.1.0

added

cobigen-javaplugin

1.3.0

added

cobigen-propertyplugin

1.0.0

added

cobigen-textmerger

1.0.1

+
+
+

CobiGen-eclipse v1.3.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

changed

cobigen-core

2.0.0

changed

cobigen-xmlplugin

2.1.0

changed

cobigen-javaplugin

1.3.0

changed

cobigen-textmerger

1.0.1

+
+
+

CobiGen-core v2.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

Dozer

5.5.1

Apache License 2.0

http://dozer.sourceforge.net/license.html

+
+
+

CobiGen-Java plugin v1.3.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

QDox

2.0-M3

+
+
+

CobiGen-eclipse v1.4.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

2.1.0

updated

cobigen-javaplugin

1.3.1

+
+
+

CobiGen-maven v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

2.1.0

updated

cobigen-javaplugin

1.3.1

+
+
+

CobiGen-core v2.1.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

FreeMarker

2.3.23

Apache License 2.0

http://freemarker.org/LICENSE.txt

+
+
+

CobiGen-eclipse v1.4.1

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

2.1.1

updated

cobigen-javaplugin

1.3.2

added

ant

1.9.6

Apache License 2.0

http://www.apache.org/licenses/LICENSE-2.0.html

+
+
+

CobiGen-Java plugin v1.4.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

3.0.0

+
+
+

CobiGen-JSON plugin v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

cobigen-core

3.0.0

added

` mmm-util-core`

5.0.0

Apache License 2.0

added

json

20160810

MIT

https://github.com/stleary/JSON-java

added

gson

2.7

Apache License 2.0

https://github.com/google/gson

+
+
+

CobiGen-XML plugin v3.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

3.0.0

removed

XMLMerge

removed

module-xml_merge-common

removed

javax.inject

removed

JDOM

added

LeXeMe

1.0.0

Apache License 2.0

https://github.com/maybeec/lexeme

+
+
+

CobiGen-maven v2.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

3.0.0

updated

cobigen-javaplugin

1.4.0

updated

cobigen-xmlplugin

3.0.0

updated

cobigen-propertyplugin

1.1.0

updated

cobigen-textmerger

1.1.0

added

cobigen-jsonplugin

1.0.0

+
+
+

CobiGen-maven v2.0.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-propertyplugin

1.1.1

+
+
+

CobiGen-eclipse v2.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

3.0.0

updated

cobigen-javaplugin

1.4.0

updated

cobigen-xmlplugin

3.0.0

updated

cobigen-propertyplugin

1.1.0

updated

cobigen-textmerger

1.1.1

added

cobigen-jsonplugin

1.0.0

+
+
+

CobiGen-HTML merger v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

cobigen-core

4.0.0

added

jsoup

1.10.2

MIT

https://jsoup.org/

+
+
+

CobiGen-JSON plugin v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

mmm-util-core

+
+
+

CobiGen-core v4.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

mmm-util-core

7.4.0

Apache Software License 2.0

https://github.com/m-m-m/mmm/wiki/License

removed

FreeMarker

2.3.23

+
+
+

CobiGen-Java plugin v1.5.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

mmm-util-core

added

mmm-util-pojo

7.4.0

Apache Software License 2.0

https://github.com/m-m-m/mmm/wiki/License

+
+
+

CobiGen-tempeng-velocity-plugin v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

velocity

1.7

Apache Software License 2.0

http://velocity.apache.org/engine/1.7/license.html

+
+
+

CobiGen-TS plugin v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

cobigen-core

4.0.0

added

ts-merger

1.0.0

Apache Public License 2.0

https://github.com/devonfw/ts-merger

added

js-beautifier

1.6.14

MIT

https://github.com/beautify-web/js-beautify

added

rhino

1.7R4

Mozilla Public License 2.0

https://github.com/mozilla/rhino/blob/master/LICENSE.txt

+
+
+

CobiGen-eclipse v2.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

4.0.0

updated

cobigen-javaplugin

1.5.0

updated

cobigen-jsonplugin

1.1.0

added

cobigen-tsplugin

1.0.0

added

cobigen-htmlplugin

1.0.0

added

cobigen-tempeng-freemarkerplugin

1.0.0-SNAPSHOT

+
+
+

CobiGen-maven v2.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

4.0.0

added

cobigen-core-test

4.0.0

updated

cobigen-javaplugin

1.5.0

updated

cobigen-jsonplugin

1.1.0

added

cobigen-tsplugin

1.0.0

added

cobigen-htmlplugin

1.0.0

added

cobigen-tempeng-freemarkerplugin

1.0.0-SNAPSHOT

+
+
+

CobiGen-TS plugin v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

updated

ts-merger

2.0.0

updated

beautify

1.6.14

removed

rhino

+
+
+

CobiGen-tempeng-FreeMarker-plugin v1.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

cobigen-core-api

4.1.0

added

FreeMarker

2.3.23

Apache Software License 2.0

http://freemarker.org/docs/app_license.html

added

Jaxen

1.1.4

"Apache-style open source license"

http://jaxen.codehaus.org/license.html

+
+
+

CobiGen-eclipse v3.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

4.1.0

added

`cobigen-java`plugin-model

1.0.0

removed

cobigen-jsonplugin

removed

cobigen-javaplugin

removed

cobigen-htmlplugin

removed

cobigen-propertyplugin

removed

cobigen-textmerger

removed

`cobigen-tsplugin `

removed

cobigen-xmlplugin

removed

cobigen-tempeng-freemarkerplugin

+
+
+

CobiGen-XML plugin v3.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

+
+
+

CobiGen-maven v3.0.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

4.1.0

removed

cobigen-jsonplugin

removed

cobigen-javaplugin

removed

cobigen-htmlplugin

removed

cobigen-propertyplugin

removed

cobigen-textmerger

removed

cobigen-tsplugin

removed

cobigen-xmlplugin

removed

cobigen-tempeng-freemarkerplugin

+
+
+

CobiGen-property plugin v1.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

+
+
+

CobiGen-text merger v1.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

+
+
+

CobiGen-HTML plugin v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

added

commons-io

2.4

Apache License 2.0

https://commons.apache.org/proper/commons-io/

+
+
+

CobiGen-JSON plugin v1.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

cobigen-core

updated

cobigen-core-api

v4.1.0

+
+
+

CobiGen-OpenAPI plugin v1.0.1

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

added

cobigen-core-api

v4.1.0

added

kaizen.openapi-parser

v0.0.1.201709142043

EPL v1.0

KaiZen Open API parser

+
+
+

CobiGen-OpenAPI plugin v1.1.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

changed

kaizen.openapi-parser

v0.0.3.201803041924

EPL v1.0

KaiZen Open API parser

added

json-path

2.4.0

Apache License 2.0

https://github.com/json-path/JsonPath/blob/master/LICENSE

+
+
+

CobiGen-JSON plugin v1.2.1

+ +++++++ + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

removed

json

20160810

MIT

https://github.com/stleary/JSON-java

+
+
+

CobiGen-maven v3.2.0

+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ActionDependency NameVersionLicenseURL

updated

cobigen-core

4.2.1

added

cobigen-core-api

4.2.1

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt_ide-setup-oomph.html b/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt_ide-setup-oomph.html new file mode 100644 index 00000000..9dd601a1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt_ide-setup-oomph.html @@ -0,0 +1,552 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==IDE Setup with the Oomph Installer

+
+
+ + + + + +
+ + +
+

This page is still under construction.

+
+
+
+
+

As an alternate and faster way to set up Eclipse for CobiGen development we also provide an customized Eclipse Installer and Oomph setups.

+
+
+
    +
  1. +

    The installer can be downloaded from within the corp network.

    +
  2. +
  3. +

    Unarchive it in a folder of your choice (e.g. %home%\Eclipse Installer Capgemini\ if you want to use the installer frequently)

    +
  4. +
  5. +

    Run eclipse-inst.exe or eclipse-inst on linux

    +
  6. +
+
+
+ + + + + +
+ + +
+

Before starting the installation make sure to

+
+
+
    +
  1. +

    have git configured for your preferred github authentication method

    +
  2. +
  3. +

    have git configured to handle long file names (e.g. by setting git config --system core.longpaths true)

    +
  4. +
  5. +

    have read access to http://de-mucevolve02/ in the corp network

    +
  6. +
+
+
+
+
+

Quick start guide

+
+
+
    +
  1. +

    On the Product page choose CobiGen IDE

    +
  2. +
  3. +

    On the Project page choose CobiGen

    +
    +
      +
    1. +

      master clones only the master branch from the specified origin

      +
    2. +
    3. +

      development clones all development branches and the master branch from the specified origin into %installation location%/workspaces/cobigen-development and the master branch from the devonfw repository into %installation location%/workspaces/cobigen-master

      +
    4. +
    +
    +
  4. +
+
+
+
+
+

Detailed Walkthrough

+
+
+

Clean (with Eclipse installation)

+
+

On the first installer page you need to choose what Eclipse bundle you want to use. The Product page (picture below) displays the possible choices. +Product page of the installer

+
+
+
    +
  1. +

    the current Product Catalog. Each entry represents a pre-configured Eclipse bundle. In case of doubt choose CobiGen IDE

    +
  2. +
  3. +

    the Eclipse version to be installed.

    +
  4. +
  5. +

    the bitness of the Eclipse version. Be sure to choose the bitness of your OS

    +
  6. +
  7. +

    the Java VM used during installation.

    +
  8. +
  9. +

    the bundle pool. If activated Eclipse will create a p2 pool. This can be helpful if you want to create multiple installations of eclipse. This option is hidden and deactivated by default. You can make it visible by removing the -Doomph.p2.pool=@none line in the installers eclipse-inst.ini

    +
  10. +
  11. +

    the update indicator. If those arrows spin you can update the installer or any of it’s components by clicking on this button

    +
  12. +
  13. +

    Chooses the selected product and continues with the installation

    +
  14. +
+
+
+

The next installer page lets you choose a project to be checked out during installation. +Project page of the installer

+
+
+
    +
  1. +

    the current Project Catalog. Select CobiGen

    +
  2. +
  3. +

    the project stream. In case of CobiGen:

    +
    +
      +
    1. +

      master: Only the master branch of Cobigen will be checked out

      +
    2. +
    3. +

      development: the master branch and ALL development branches will be checked out.

      +
    4. +
    5. +

      In each case you can specify an own fork as git origin

      +
    6. +
    +
    +
  4. +
+
+
+

After choosing a project the installer fetches additional Oomph tasks. You need to accept the installation of said tasks in order to proceed.

+
+
+

Installation of external Oomph tasks

+
+
+

The installer restarts then and open at the Project page again. Simply repeat the instructions for the Project page. Installation and restart is only done the first time a new task is requested by a product or project configuration.

+
+
+

By proceeding with the Next button the installer opens the Variables page. On this page the installation and configuration of the Eclipse bundle and the chosen projects is done by setting the variables presented.

+
+
+

Variable page of the installer

+
+
+
    +
  1. +

    the folder into that Eclipse will be installed. It is recommended to use the Browse…​ button to locate the folder. A direct input into the text field is possible but due to a randomly occurring bug in the installer the input is only partially parsed.

    +
  2. +
  3. +

    the User name to access the Devon Maven Nexus. Typically your corp user name. This value will be stored in variables-customized(.bat)

    +
  4. +
  5. +

    the password to access the Devon Maven Nexus. Typically your corp password. This value will be stored (PLAIN!) in variables-customized(.bat)

    +
  6. +
  7. +

    the User name to access the iCSD Fileserver. This value will be stored in variables-customized(.bat). If no credentials were provided insert anything.

    +
  8. +
  9. +

    the password to access the iCSD Fileserver. This value will be stored (PLAIN!) in variables-customized(.bat). If no credentials were provided insert anything.

    +
  10. +
  11. +

    the Github remote URI for cloning the devonfw repository of CobiGen. Target of this URI is %installation location%/workspaces/cobigen-master if the chosen stream is development.

    +
    +
      +
    1. +

      SSH: The remote URI to access the repository via ssh. Make sure to have your git configured to work with a ssh client and have this client running.

      +
    2. +
    3. +

      HTTPS: The remote URI to access the repository via https. Activates the Github user ID and Github user Password variables. User id and password are stored in the cloning scripts in plain text.

      +
    4. +
    5. +

      Two-Factor-Authentification isn’t supported and probably won’t be in the future.

      +
    6. +
    +
    +
  12. +
  13. +

    the Github remote URI for cloning a CobiGen repository.

    +
    +
      +
    1. +

      Existing own fork (SSH): Same as above. The Github user ID is used in the remote URI instead of devonfw. Activates and requires the Github user ID variable.

      +
    2. +
    3. +

      Existing own fork (HTTPS): Same as above. The Github user ID is used in the remote URI instead of devonfw.

      +
    4. +
    5. +

      devonfw repository: Uses the remote URI from above.

      +
    6. +
    +
    +
  14. +
  15. +

    The Eclipse version you want to develop cobigen for. This is not the Eclipse version to be installed. When running integration tests for the CobiGen Eclipse Plugin this Eclipse version is launched.

    +
  16. +
  17. +

    Your Github user id.

    +
  18. +
  19. +

    Your Github user password. Be aware that this is stored in plain text! Moreover, if you use special characters as for example ! or % in your password, you need to escape them in the batch file. See http://www.robvanderwoude.com/escapechars.php for further information.

    +
  20. +
  21. +

    Reveals all variables that can be set. Activated by default. If not activated preset variables and variables with default values are hidden.

    +
  22. +
+
+
+

The Next button can only be used if all variables are set. Proceeding the installer opens the Confirmation page. All tasks needed for installation are shown here with all variables resolved. Only the tasks needed for the installation are activated. Tasks like Project import are triggered at first startup of Eclipse.

+
+
+

Confirmation page

+
+
+

The Finish button triggers the installation process. Once started the installation proceeds automatically.

+
+
+

Progress page

+
+
+
    +
  1. +

    indicates the task that is currently executed

    +
  2. +
  3. +

    the task output. Provides progress and debugging information

    +
  4. +
  5. +

    if activated the installer exits after successful installation

    +
  6. +
  7. +

    stops the installation

    +
  8. +
+
+
+
+

Into an existing Eclipse installation

+
+

The following instructions only hold for OASP4J-like Eclipse installations. Furthermore you need to install Oomph Setup from the Oomph Update site. When Oomph is installed activate the Oomph tool bar via the Show tool bar contributions check box.

+
+
+

Oomph preferences page

+
+
+

The tool bar looks like this: Oomph tool bar

+
+
+
+
+
+

Configuration of the CobiGen Oomph Setup

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt_ide-setup.html b/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt_ide-setup.html new file mode 100644 index 00000000..ae923556 --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/mgmt_ide-setup.html @@ -0,0 +1,396 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==IDE setup using devonfw IDE

+
+
+

General Installation Process

+
+
+
    +
  1. +

    First of all you will have to setup the devonfw IDE. Once the download is complete and you started the installation process via the setup command (or setup.bat), you will be prompted for a settings URL. You can just press enter as no special settings are used. Following that, you will simply have to wait and follow any instructions given.

    +
    +

    After you have executed the steps mentioned above, your install location should look like the picture below.

    +
    +
    +
    +Install directory after executing setup.bat +
    +
    +
  2. +
  3. +

    As devonfw IDE by default does not install eclipse anymore, we need to setup eclipse first by executing devon eclipse

    +
  4. +
  5. +

    The next step is to open a console and execute the following command: devon project setup cobigen +This downloads all the necessary files for CobiGen development.

    +
  6. +
  7. +

    Build the project by running the build.sh located in the workspaces/main/cobigen folder within git bash bash build.sh parallel.

    +
  8. +
  9. +

    Optional: If you are making use of SSH private key authentication working with Git on GitHub, you can change the HTTPS default setup git remote URL by executing git remote set-url origin git@github.com:devonfw/cobigen.git (possibly with git@github.com:<your user>/cobigen.git in case you want to work on your fork)

    +
  10. +
  11. +

    Now open eclipse using the eclipse-main.bat file or by executing devon eclipse on the console and import the CobiGen projects you want to work on

    +
  12. +
  13. +

    Switch to the "Project Explorer" view (Window→Show View→Project Explorer). This extra step is required because an import from the default view "Package Explorer" doesn’t work properly.

    +
  14. +
  15. +

    Click on File→import…​→Maven→Existing Maven Project and entering `{Install directory}/workspaces/main/cobigen `(Should be the default location when clicking on "Browse…​")

    +
  16. +
  17. +

    After you have have finished your installation run a maven update. To do so right click on a project, select maven and afterwards update project. Select all projects and the checkbox Force Update of Snapshot/Releases. +You might be asked to install some Tycho-plugins. You need those, if you want to debug eclipse-plugins.

    +
  18. +
+
+
+
+
+

Eclipse Plugin Installation

+
+
+

For some parts of CobiGen, you will have to have additional plugins installed.

+
+
+

Plugin development

+
+

If you want to develop CobiGen plugins (OpenAPI plugin, Java plugin etc.) you need to have the eclipse PDE plugin available. +It is not strictly necessary to install this manually as Eclipse should prompt you for installation once you try to build a relevant project.

+
+
+

To install the plugin manually, open a console in your IDE Install location and execute the command devon eclipse add-plugin eclipsepde

+
+
+

When using the default devonfw IDE, you should get an error here that stems from parts of the plugin being installed with the devonfw IDE by default.

+
+
+
+

Eclipse Testing

+
+

To properly test CobiGen in an Eclipse environment we use the Eclipse SWTBot which can automate eclipse interactions in a new Eclipse instance. +It is not strictly necessary to install this manually as Eclipse should prompt you for installation once you try to build a relevant project.

+
+
+

To install the plugin manually, open a console in your IDE Install location and execute the command devon eclipse add-plugin swtbot in a console

+
+
+

When using the default devonfw IDE, you should get an error here that stems from parts of the plugin being installed with the devonfw IDE by default.

+
+
+
+

Optional

+
+

Template Development

+
+

Since CobiGen is a template-based code generator, we have to develop templates. We do this using the template language FreeMarker. +It is not necessary to install any plugin though for easier usage we recommend you install an IDE into Eclipse if you do not want to use another platform.

+
+
+

To install an IDE for FreeMarker, open a console in your IDE Install location and execute the command devon eclipse add-plugin freemarker in a console

+
+
+
+

Script Development

+
+

There are some scripts used in CobiGen development that are written in Python. +You may use any platform to write in python you want, but if you want to work in Eclipse, we recommend to install pydev.

+
+
+

You can do this by opening a console in your IDE Install location and executing the command devon eclipse add-plugin pydev in a console

+
+
+
+
+

Contributing

+
+

If you want to contribute to CobiGen you should fork CobiGen and change the origin of the local repository to your fork. +You can check your remote settings by entering workspaces/main/cobigen and run the command: git remote -v. +Now let us change the URL to your Fork: git remote set-url origin <Fork url> +You can use the CobiGen repository as another remote, to get the latest changes. check out the following tutorial to do so. +https://devonfw.com/website/pages/docs/CONTRIBUTING.adoc.html#contributing.asciidoc_forking

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/cobigen.wiki/setup-jre.html b/docs/devonfw.github.io/1.0/cobigen.wiki/setup-jre.html new file mode 100644 index 00000000..9886a3aa --- /dev/null +++ b/docs/devonfw.github.io/1.0/cobigen.wiki/setup-jre.html @@ -0,0 +1,292 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Set up JRE for development

+
+
+

CobiGen is supposed to support both java 8 and java 11 even though we are moving to 11. Here is a short description of how to setup the execution environment for developing so that you can test both environments.

+
+
+

By default, CobiGen development tools come with some installed JREs in /software/java (11) and /software/java/additionalJdk (7 and 8)

+
+
+

installed jre

+
+
+

In CobiGen, there is a fixed setup of JAVASE-1.8 in maven, which leads to the odd that no matter which Java is currently used, eclipse keeps showing JAVASE-1.8. A temporary reconfiguration of JRE in build path will also be overwritten by a maven update.

+
+
+

java 11

+
+
+

Eclipse has a fixed list of execution environments, which is automatically matched with the current most suitable installed JRE, in our case JDK-8 by default. The matching JRE is the actual one, which is used to compile no matter which name eclipse shows

+
+
+

execution environments

+
+
+

As that, to move to 11, just setup the match JRE of JAVASE-1.8 to 11 or any version you need

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/dashboard.wiki/home-page.html b/docs/devonfw.github.io/1.0/dashboard.wiki/home-page.html new file mode 100644 index 00000000..8cfc5967 --- /dev/null +++ b/docs/devonfw.github.io/1.0/dashboard.wiki/home-page.html @@ -0,0 +1,429 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Home page

+
+
+

This is the main page that you will find after your profile creation and the page where you will start from henceforth.

+
+
+

It contains three sections as below:

+
+
+
    +
  1. +

    Toolbar

    +
  2. +
  3. +

    Sidebar

    +
  4. +
  5. +

    Content

    +
  6. +
+
+
+

Topbar

+
+
+

This section is at the top of the page, it contains devonfw instance dropdown to select devonfw-ide that can be used as a base for the projects.

+
+
+
+Toolbar +
+
+
+

Next to the devonfw instance dropdown, there is a quick help icon, clicking on it will open a popup which gives some tips for how to use Devon Dashboard IDE.

+
+
+
+Quick Help +
+
+
+
+
+ +
+
+

The sidebar has divided into two sections:

+
+
+
    +
  1. +

    User Profile - Users can see his/her pic, name, and role.

    +
  2. +
  3. +

    Links to access to different sections of the dashboard.

    +
  4. +
+
+
+
+Sidebar +
+
+
+
+
+

Content Section

+
+
+

The Content section has also divided into three sections:

+
+
+
    +
  1. +

    A small introduction about the devonfw IDE

    +
  2. +
  3. +

    A button to Download latest version of devonfw IDE

    +
  4. +
  5. +

    A "Project" block which shows the total number of Projects which are available in different devonfw IDE

    +
  6. +
+
+
+
+
+

Steps to download and Install devonfw IDE

+
+
+

Step 1: Click on Download latest version button which is in the Content section. Check the below screen for the reference.

+
+
+
+Download Latest Version +
+
+
+

Step 2: By clicking Download latest version button, Installing devonfw popup will open.

+
+
+
+Installing Devonfw +
+
+
+

Step 3: Installing devonfw popup will automatically trigger one more popup to specify the location for downloading Devonfw IDE. Specify the location and click the Save button to download.

+
+
+
+Download location popup +
+
+
+

Step 3: Once the download completes successfully, the Next button will be enabled for the further installation process.

+
+
+
+Download Devonfw Completed +
+
+
+

Step 4: By Clicking Next button in the Installing devonfw pop up, two options are shown:
+1: Select the Git url for the installation setup.
+2: Skip this process.

+
+
+
+Installation Options +
+
+
+

Step 5: Select one of the above options.

+
+
+
    +
  • +

    If the selection is Git url, then Configuration file url should be filled in the input box and needs to click Next button to start the further installation process.

    +
  • +
  • +

    In case the user doesn’t have Git url, then simply Skip the process and click the Next button to start the further installation process.

    +
  • +
+
+
+

Step 6: Click on the Next button for the final installation process. Wait for some time to complete the installation setup. Once the installation setup completes, the Close button will appear. Just click on it and go to the specified folder location.

+
+
+
+Installation Setup +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/dashboard.wiki/home.html b/docs/devonfw.github.io/1.0/dashboard.wiki/home.html new file mode 100644 index 00000000..ac0b8ba7 --- /dev/null +++ b/docs/devonfw.github.io/1.0/dashboard.wiki/home.html @@ -0,0 +1,358 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==dashboard

+
+
+

Welcome to dashboard! This document will guide you in using the dashboard as per your needs. But before proceeding to how, let us understand what is it and why has it been developed.

+
+
+

Overview

+
+
+

The dashboard is your one stop destination for all your devonfw-ide needs. It serves as a UI on top of devonfw-ide.

+
+
+
+dashboard +
+
Figure 1. dashboard
+
+
+

Are you new to devonfw? You can download and setup the latest devonfw-ide from dashboard and get started. Or if you prefer, you can also download one of our older devonfw-ide versions available from our maven repository. The dashboard will be a fantastic introduction to devonfw as it encapsulates all that devonfw offers.

+
+
+

If you are already using devonfw-ide, you can update its settings and software to the latest versions available.

+
+
+

If you are an existing user, you will find all your workspaces spread across your different devonfw-ide versions (2020.04.003 or higher) all in one place in the dashboard. You can also create new (devon4ng, devon4j and devon4node) projects from within the dashboard and manage it from there itself.

+
+
+

You can launch any of your most used IDE’s (VS Code or Eclipse) from your active devonfw-ide instance.

+
+
+

You can also go through the list of devonfw repositories, open any one of them up in the browser or copy their URL for cloning locally.

+
+
+

You also have the devonfw wiki to know more about devonfw right from within the dashboard.

+
+
+
+
+

Motivation

+
+
+

The devon-ide is a collections of tools and software which you can configure and customize as per your requirements. It ships with a number of command line tools under an umbrella devon command.

+
+
+

The main motivation behind dashboard is to provide a user interface on top of this collection of command line tools. So it can do (almost) everything that the devonfw-ide could and more.

+
+
+

It was also developed to serve as a single point of contact for the most common devonfw tasks. You can have multiple devonfw-ide of different versions, each holding multiple projects and softwares configured differently. With dashboard you can manage all your projects from the same place.

+
+
+
+Handle multiple 'devon-ide’s +
+
Figure 2. Handle multiple 'devon-ide’s
+
+
+
+
+

Features

+
+
+
    +
  • +

    More user friendly than a command line tool

    +
  • +
  • +

    Saves time getting to know devonfw and facilitates its usage

    +
  • +
  • +

    Enhanced visibility of your projects, IDEs and devonfw-ide instances

    +
  • +
  • +

    Better version control of all your devonfw-ide

    +
  • +
  • +

    Project inventory management

    +
  • +
  • +

    Connected to devonfw docs

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/dashboard.wiki/ides-page.html b/docs/devonfw.github.io/1.0/dashboard.wiki/ides-page.html new file mode 100644 index 00000000..f83adcf0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/dashboard.wiki/ides-page.html @@ -0,0 +1,299 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==IDE’s

+
+
+

This page allows you to open the IDE of your choice.

+
+
+
+IDE’s +
+
Figure 1. IDE’s
+
+
+

It currently lists only 2 IDEs: Eclipse and VS Code.

+
+
+

It will open the IDE from the devonfw instance that you have selected in the top bar:

+
+
+
+Choose your devonfw instance +
+
Figure 2. Choose your devonfw instance
+
+
+

Click on OPEN to launch the IDE of your choice. By default, the IDE will display projects from your main workspace in the selected devonfw instance.

+
+
+
+Open an IDE +
+
Figure 3. Open an IDE
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/dashboard.wiki/landing-page.html b/docs/devonfw.github.io/1.0/dashboard.wiki/landing-page.html new file mode 100644 index 00000000..6c764f9c --- /dev/null +++ b/docs/devonfw.github.io/1.0/dashboard.wiki/landing-page.html @@ -0,0 +1,315 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Landing page

+
+
+

This is the entry point of the devonfw dashboard. Click on GET STARTED NOW to start using it.

+
+
+
+Get Started +
+
Figure 1. Get Started
+
+
+

Your devonfw distributions

+
+
+

The first time you open the application you will get a dialog with all the devonfw distributions found on your machine. Click on OK GOT IT to continue.

+
+
+
+devon-ide distributions +
+
Figure 2. devon-ide distributions
+
+
+
+
+

Profile form

+
+
+

Here you will find a screen that allows you to create a profile. This is just for the purpose of customizing your dashboard.

+
+
+
+Profile +
+
Figure 3. Profile
+
+
+

Fill the data and click on CREATE MY PROFILE if you want to create the profile at the moment or click WILL DO IT LATER to skip the creation.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/dashboard.wiki/master-dashboard.html b/docs/devonfw.github.io/1.0/dashboard.wiki/master-dashboard.html new file mode 100644 index 00000000..b682208f --- /dev/null +++ b/docs/devonfw.github.io/1.0/dashboard.wiki/master-dashboard.html @@ -0,0 +1,761 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw dashboard

+
+ +
+
+
+

Landing page

+
+ +
+

==Landing page

+
+
+

This is the entry point of the devonfw dashboard. Click on GET STARTED NOW to start using it.

+
+
+
+Get Started +
+
Figure 1. Get Started
+
+
+
Your devonfw distributions
+
+

The first time you open the application you will get a dialog with all the devonfw distributions found on your machine. Click on OK GOT IT to continue.

+
+
+
+devon-ide distributions +
+
Figure 2. devon-ide distributions
+
+
+
+
Profile form
+
+

Here you will find a screen that allows you to create a profile. This is just for the purpose of customizing your dashboard.

+
+
+
+Profile +
+
Figure 3. Profile
+
+
+

Fill the data and click on CREATE MY PROFILE if you want to create the profile at the moment or click WILL DO IT LATER to skip the creation.

+
+
+
+
+
+

Home

+
+ +
+

==Home page

+
+
+

This is the main page that you will find after your profile creation and the page where you will start from henceforth.

+
+
+

It contains three sections as below:

+
+
+
    +
  1. +

    Toolbar

    +
  2. +
  3. +

    Sidebar

    +
  4. +
  5. +

    Content

    +
  6. +
+
+
+
Topbar
+
+

This section is at the top of the page, it contains devonfw instance dropdown to select devonfw-ide that can be used as a base for the projects.

+
+
+
+Toolbar +
+
+
+

Next to the devonfw instance dropdown, there is a quick help icon, clicking on it will open a popup which gives some tips for how to use Devon Dashboard IDE.

+
+
+
+Quick Help +
+
+
+
+ +
+

The sidebar has divided into two sections:

+
+
+
    +
  1. +

    User Profile - Users can see his/her pic, name, and role.

    +
  2. +
  3. +

    Links to access to different sections of the dashboard.

    +
  4. +
+
+
+
+Sidebar +
+
+
+
+
Content Section
+
+

The Content section has also divided into three sections:

+
+
+
    +
  1. +

    A small introduction about the devonfw IDE

    +
  2. +
  3. +

    A button to Download latest version of devonfw IDE

    +
  4. +
  5. +

    A "Project" block which shows the total number of Projects which are available in different devonfw IDE

    +
  6. +
+
+
+
+
Steps to download and Install devonfw IDE
+
+

Step 1: Click on Download latest version button which is in the Content section. Check the below screen for the reference.

+
+
+
+Download Latest Version +
+
+
+

Step 2: By clicking Download latest version button, Installing devonfw popup will open.

+
+
+
+Installing Devonfw +
+
+
+

Step 3: Installing devonfw popup will automatically trigger one more popup to specify the location for downloading Devonfw IDE. Specify the location and click the Save button to download.

+
+
+
+Download location popup +
+
+
+

Step 3: Once the download completes successfully, the Next button will be enabled for the further installation process.

+
+
+
+Download Devonfw Completed +
+
+
+

Step 4: By Clicking Next button in the Installing devonfw pop up, two options are shown:
+1: Select the Git url for the installation setup.
+2: Skip this process.

+
+
+
+Installation Options +
+
+
+

Step 5: Select one of the above options.

+
+
+
    +
  • +

    If the selection is Git url, then Configuration file url should be filled in the input box and needs to click Next button to start the further installation process.

    +
  • +
  • +

    In case the user doesn’t have Git url, then simply Skip the process and click the Next button to start the further installation process.

    +
  • +
+
+
+

Step 6: Click on the Next button for the final installation process. Wait for some time to complete the installation setup. Once the installation setup completes, the Close button will appear. Just click on it and go to the specified folder location.

+
+
+
+Installation Setup +
+
+
+
+
+
+

Projects

+
+ +
+
Introduction to project management in the dashboard
+
+
    +
  • +

    The dashboard manages multiple projects in multiple workspaces that include Angular, JAVA, and Node.

    +
  • +
  • +

    The dashboard provides rich UI for creating multiple projects, abstracting all the functionality which is usually required while creating an application like opening a command terminal, specifying workspace, and executing commands.

    +
  • +
  • +

    The dashboard makes it easy to see all the projects which are in different devonfw-ide workspace, just by changing the "devonfw Instance" dropdown.

    +
  • +
  • +

    The dashboard makes it very easy to open a project in a different IDE like Visual Studio or Eclipse respectively just by right click on the Project folder and open option.

    +
  • +
  • +

    The dashboard also makes it easy to delete the project, explore the project location.

    +
  • +
+
+
+
+
Projects
+
+

Click on the Projects link on the sidebar to navigate to the project’s screen. The screen displays all the projects in the currently selected devonfw-ide, grouped by the workspaces in which they exist.
+Note: Currently it only displays projects created through the dashboard.

+
+
+
+Project Screen +
+
+
+
    +
  • +

    It shows the total number of projects available in each devonfw-ide.

    +
  • +
  • +

    Filtering and searching the projects.

    +
  • +
  • +

    Add New Project - For creating a Project.

    +
  • +
  • +

    Project folder which gives information about the project like which technology the project belongs to, the name of the project, and when it has created.

    +
  • +
  • +

    There are many operations that are available on right-click on Project folder they are :

    +
    +
      +
    1. +

      Opening a project in different IDE ( Visual Studio or Eclipse )

      +
    2. +
    3. +

      Enclosing Folder, and

      +
    4. +
    5. +

      Deleting the project.

      +
    6. +
    +
    +
  • +
  • +

    Users can see projects of different devonfw-ide workspace just by changing the option in the devonfw instance dropdown which is set globally at the top of the screen.

    +
  • +
+
+
+

Click on Add New Project to start creating a new project.

+
+
+
+
How to create a project
+
+

Three main steps are involved in creating any devonfw project. They are:

+
+
+

Step 1. Project Type

+
+
+

In this first step the user has to choose the language technology to start the project with, e.g. Angular, Java or Node and click the Next button for to continue to the next step.

+
+
+
+Project Type +
+
+
+

Step 2. Project Data

+
+
+

After the Project type selection, the second screen will appear for the user to fill up all the required fields. User can select the workspace in the active devonfw-ide for the project in this step. Once the user enters all the required fields, the Next button will be enabled for the final step.

+
+
+
+Project Data +
+
+
+

User can change the devonfw-ide workspace where the project is going to generate, just by changing the option in the devonfw instance dropdown which is set globally in the header of the dashboard.

+
+
+
+Toolbar +
+
+
+

Step 3. Execution

+
+
+

The execution step takes all the user entered data from the Project Data step and executes the respective commands to generate the project.

+
+
+

Execution has divided into two sections:
+- Creation
+- Setup Installation

+
+
+3.1 Creation +
+
    +
  • +

    Creates only source code and notify the user if the project creation fails or success.

    +
  • +
+
+
+
+Creation +
+
+
+
    +
  • +

    In case any network issue or technical issue and the user wants to re-run the Project execution process, then the Retry button will help to start the process again.

    +
  • +
+
+
+
+Retry +
+
+
+
+3.2 Setup installation +
+

Allows user to install the dependencies of application (maven modules for java, node modules for node, angular) by clicking Proceed button.

+
+
+

The installation can be skipped by clicking cancel button.

+
+
+
+Installation +
+
+
+

Step 4. Click on Finish button to go to Project Details Screen.

+
+
+
+
+
+
+

Repositories

+
+ +
+

==Repositories

+
+
+

This page lists the different repositories under devonfw organization.

+
+
+
+Repositories +
+
Figure 4. Repositories
+
+
+

The list updates as you type in the search bar.

+
+
+
+Search Repositories +
+
Figure 5. Search Repositories
+
+
+
    +
  • +

    You can click COPY GITHUB URL for any of the repository list item to copy its github URL to your clipboard and clone it locally.

    +
  • +
  • +

    You can also click the OPEN REPOSITORY button to view its github repository page in your default browser.

    +
  • +
+
+
+
+
+

Wiki

+
+ +
+

==Wiki page.

+
+
+

This page displays the documentation of devonfw. You can also find it at https://devonfw.com/

+
+
+
+Wiki +
+
Figure 6. Wiki
+
+
+
+
+

Settings

+
+ +
+

==Settings

+
+
+
Account settings
+
+

Here you get a screen that allows you to create a profile. This is the same screen which you see during the initial setup of the dashboard. It is completely optional.

+
+
+
+Account settings +
+
Figure 7. Account settings
+
+
+

Fill the data and click on Save if you want to create the profile.

+
+
+
+
Installed versions
+
+

The installed versions subsection allows you to manage the different versions of devonfw-ide available.

+
+
+
+Installed versions +
+
Figure 8. Installed versions
+
+
+
    +
  • +

    It lists the devonfw-ide you have installed in your system, along with the ones available for download from our maven repository

    +
  • +
  • +

    If you want to install specific version, you can search it here and DOWNLOAD it

    +
  • +
  • +

    To check the release notes for a version, simply click on Consolidated list of features

    +
  • +
  • +

    For the installed versions:

    +
    +
      +
    • +

      Hovering over the eye icon shows you the path for the devonfw-ide in a tooltip

      +
    • +
    • +

      You can view it in your system explorer by clicking the eye icon

      +
    • +
    • +

      You can update its settings and softwares by clicking on UPDATE

      +
    • +
    • +

      You can also UNINSTALL an installed version, after which the dashboard will no longer keep track of the projects and IDEs belonging to that devonfw-ide

      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/dashboard.wiki/projects-page.html b/docs/devonfw.github.io/1.0/dashboard.wiki/projects-page.html new file mode 100644 index 00000000..a05bd01e --- /dev/null +++ b/docs/devonfw.github.io/1.0/dashboard.wiki/projects-page.html @@ -0,0 +1,447 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Introduction to project management in the dashboard

+
+
+
    +
  • +

    The dashboard manages multiple projects in multiple workspaces that include Angular, JAVA, and Node.

    +
  • +
  • +

    The dashboard provides rich UI for creating multiple projects, abstracting all the functionality which is usually required while creating an application like opening a command terminal, specifying workspace, and executing commands.

    +
  • +
  • +

    The dashboard makes it easy to see all the projects which are in different devonfw-ide workspace, just by changing the "devonfw Instance" dropdown.

    +
  • +
  • +

    The dashboard makes it very easy to open a project in a different IDE like Visual Studio or Eclipse respectively just by right click on the Project folder and open option.

    +
  • +
  • +

    The dashboard also makes it easy to delete the project, explore the project location.

    +
  • +
+
+
+
+
+

Projects

+
+
+

Click on the Projects link on the sidebar to navigate to the project’s screen. The screen displays all the projects in the currently selected devonfw-ide, grouped by the workspaces in which they exist.
+Note: Currently it only displays projects created through the dashboard.

+
+
+
+Project Screen +
+
+
+
    +
  • +

    It shows the total number of projects available in each devonfw-ide.

    +
  • +
  • +

    Filtering and searching the projects.

    +
  • +
  • +

    Add New Project - For creating a Project.

    +
  • +
  • +

    Project folder which gives information about the project like which technology the project belongs to, the name of the project, and when it has created.

    +
  • +
  • +

    There are many operations that are available on right-click on Project folder they are :

    +
    +
      +
    1. +

      Opening a project in different IDE ( Visual Studio or Eclipse )

      +
    2. +
    3. +

      Enclosing Folder, and

      +
    4. +
    5. +

      Deleting the project.

      +
    6. +
    +
    +
  • +
  • +

    Users can see projects of different devonfw-ide workspace just by changing the option in the devonfw instance dropdown which is set globally at the top of the screen.

    +
  • +
+
+
+

Click on Add New Project to start creating a new project.

+
+
+
+
+

How to create a project

+
+
+

Three main steps are involved in creating any devonfw project. They are:

+
+
+

Step 1. Project Type

+
+
+

In this first step the user has to choose the language technology to start the project with, e.g. Angular, Java or Node and click the Next button for to continue to the next step.

+
+
+
+Project Type +
+
+
+

Step 2. Project Data

+
+
+

After the Project type selection, the second screen will appear for the user to fill up all the required fields. User can select the workspace in the active devonfw-ide for the project in this step. Once the user enters all the required fields, the Next button will be enabled for the final step.

+
+
+
+Project Data +
+
+
+

User can change the devonfw-ide workspace where the project is going to generate, just by changing the option in the devonfw instance dropdown which is set globally in the header of the dashboard.

+
+
+
+Toolbar +
+
+
+

Step 3. Execution

+
+
+

The execution step takes all the user entered data from the Project Data step and executes the respective commands to generate the project.

+
+
+

Execution has divided into two sections:
+- Creation
+- Setup Installation

+
+
+

3.1 Creation

+
+
    +
  • +

    Creates only source code and notify the user if the project creation fails or success.

    +
  • +
+
+
+
+Creation +
+
+
+
    +
  • +

    In case any network issue or technical issue and the user wants to re-run the Project execution process, then the Retry button will help to start the process again.

    +
  • +
+
+
+
+Retry +
+
+
+
+

3.2 Setup installation

+
+

Allows user to install the dependencies of application (maven modules for java, node modules for node, angular) by clicking Proceed button.

+
+
+

The installation can be skipped by clicking cancel button.

+
+
+
+Installation +
+
+
+

Step 4. Click on Finish button to go to Project Details Screen.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/dashboard.wiki/repositories-page.html b/docs/devonfw.github.io/1.0/dashboard.wiki/repositories-page.html new file mode 100644 index 00000000..587f64a9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/dashboard.wiki/repositories-page.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Repositories

+
+
+

This page lists the different repositories under devonfw organization.

+
+
+
+Repositories +
+
Figure 1. Repositories
+
+
+

The list updates as you type in the search bar.

+
+
+
+Search Repositories +
+
Figure 2. Search Repositories
+
+
+
    +
  • +

    You can click COPY GITHUB URL for any of the repository list item to copy its github URL to your clipboard and clone it locally.

    +
  • +
  • +

    You can also click the OPEN REPOSITORY button to view its github repository page in your default browser.

    +
  • +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/dashboard.wiki/settings-page.html b/docs/devonfw.github.io/1.0/dashboard.wiki/settings-page.html new file mode 100644 index 00000000..1d756926 --- /dev/null +++ b/docs/devonfw.github.io/1.0/dashboard.wiki/settings-page.html @@ -0,0 +1,338 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Settings

+
+
+

Account settings

+
+
+

Here you get a screen that allows you to create a profile. This is the same screen which you see during the initial setup of the dashboard. It is completely optional.

+
+
+
+Account settings +
+
Figure 1. Account settings
+
+
+

Fill the data and click on Save if you want to create the profile.

+
+
+
+
+

Installed versions

+
+
+

The installed versions subsection allows you to manage the different versions of devonfw-ide available.

+
+
+
+Installed versions +
+
Figure 2. Installed versions
+
+
+
    +
  • +

    It lists the devonfw-ide you have installed in your system, along with the ones available for download from our maven repository

    +
  • +
  • +

    If you want to install specific version, you can search it here and DOWNLOAD it

    +
  • +
  • +

    To check the release notes for a version, simply click on Consolidated list of features

    +
  • +
  • +

    For the installed versions:

    +
    +
      +
    • +

      Hovering over the eye icon shows you the path for the devonfw-ide in a tooltip

      +
    • +
    • +

      You can view it in your system explorer by clicking the eye icon

      +
    • +
    • +

      You can update its settings and softwares by clicking on UPDATE

      +
    • +
    • +

      You can also UNINSTALL an installed version, after which the dashboard will no longer keep track of the projects and IDEs belonging to that devonfw-ide

      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/dashboard.wiki/wiki-page.html b/docs/devonfw.github.io/1.0/dashboard.wiki/wiki-page.html new file mode 100644 index 00000000..04a35728 --- /dev/null +++ b/docs/devonfw.github.io/1.0/dashboard.wiki/wiki-page.html @@ -0,0 +1,278 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Wiki page.

+
+
+

This page displays the documentation of devonfw. You can also find it at https://devonfw.com/

+
+
+
+Wiki +
+
Figure 1. Wiki
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-MSSQL-Server-2008.html b/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-MSSQL-Server-2008.html new file mode 100644 index 00000000..ee95dd49 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-MSSQL-Server-2008.html @@ -0,0 +1,608 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

##Guide for DBIntegration of MS SQL Server 2008

+
+
+

devon4j is by default configured with the H2 Databse.

+
+
+

MSSQL Installation and Configuration using Docker

+
+
+

We can now use SQL Server in Linux and run it easily using docker executing:

+
+
+
+
docker run --name mssql -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=Passw0rd' -p 1433:1433 -d microsoft/mssql-server-linux
+
+
+
+

This makes MSSQL avaiable on the docker-machine host on port 1433. If using docker on windows with docker toolbox it usually means that MSSQL will be on 192.168.99.100 (please check the IP of your docker machine)

+
+
+

So the configuration for the datasource url strig will be:

+
+
+

jdbc:sqlserver://192.168.99.100:1433;databaseName=restaurant

+
+
+

There are no client tools on this image to test and connect to the MSSQL but we can create a connection within eclipse using a generic JDBC connection

+
+
+
+
+

Installing mssql ojdbc driver dependency

+
+
+

The maven dependency required for MS SQL server JDBC driver is not avaiable in Maven central so a manual install can be required.

+
+
+

In order to do so yo can manually download the driver from:

+
+ +
+

And then install using the command line:

+
+
+
+
mvn install:install-file -Dfile=sqljdbc4-4.0.jar -DgroupId=com.microsoft.sqlserver -DartifactId=sqljdbc4 -Dversion=4.0  -Dpackaging=jar
+
+
+
+
+
+

Installing MSQL Server on Windows

+
+
+

Following are the steps with screen shots to configure the MS SQL Server 2008 in windows.

+
+
+

Note : One can ignore the following section if they are well versed with installation process of the MS SQL Server 2008. +MSSQL Server 2008 Installation and Configuration

+
+
+
    +
  • +

    In ‘Server Configuration’ step, specify the “Service Accounts” as shown in the screenshot. Click NEXT Button.

    +
  • +
+
+
+
+serviceconfig +
+
+
+
    +
  • +

    In ‘Database Engine Configuration’ step, specify the “Authentication Mode” as shown in the screenshot. Click NEXT Button.

    +
  • +
+
+
+
+databaseconfig +
+
+
+
    +
  • +

    In ‘Analysis Services Configuration’ step, specify the “Account Provisioning” as shown in the screenshot. Click NEXT Button.

    +
  • +
+
+
+
+servicesconfig +
+
+
+
    +
  • +

    In ‘Reporting Services Configuration’ step, specify the “reporting service configuration mode” as shown in the screenshot. Click NEXT Button.

    +
  • +
+
+
+
+reportingconfig +
+
+
+
    +
  • +

    In ‘Error and Usage Reporting’ step, check if you want to automatically send information to the server, as shown in screenshot. Click NEXT Button.

    +
  • +
+
+
+
+reportingconfig +
+
+
+
    +
  • +

    Alternatively, you can select the default configuration for above steps and complete the installation.

    +
  • +
+
+
+
+
+

Enabling MSSQL Server 2008 and disabling H2 Database

+
+
+
    +
  • +

    Assuming the MS SQL database that is created is restaurant, execute the following script to create Flyway MetaData Table schema_version in the database restaurant

    +
  • +
+
+
+
+
USE [restaurant]
+GO
+
+/****** Object:  Table [dbo].[schema_version]    Script Date: 12/02/2016 15:48:34 ******/
+SET ANSI_NULLS ON
+GO
+
+SET QUOTED_IDENTIFIER ON
+GO
+
+CREATE TABLE [dbo].[schema_version](
+	[version_rank] [int] NOT NULL,
+	[installed_rank] [int] NOT NULL,
+	[version] [nvarchar](50) NOT NULL,
+	[description] [nvarchar](200) NULL,
+	[type] [nvarchar](20) NOT NULL,
+	[script] [nvarchar](1000) NOT NULL,
+	[checksum] [int] NULL,
+	[installed_by] [nvarchar](100) NOT NULL,
+	[installed_on] [datetime] NOT NULL,
+	[execution_time] [int] NOT NULL,
+	[success] [bit] NOT NULL,
+ CONSTRAINT [schema_version_pk] PRIMARY KEY CLUSTERED
+(
+	[version] ASC
+)WITH (PAD_INDEX  = OFF, STATISTICS_NORECOMPUTE  = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS  = ON, ALLOW_PAGE_LOCKS  = ON) ON [PRIMARY]
+) ON [PRIMARY]
+
+GO
+
+ALTER TABLE [dbo].[schema_version] ADD  DEFAULT (getdate()) FOR [installed_on]
+GO
+
+
+
+
    +
  • +

    Add the dependency for MSSQLServer 2008 jdbc driver in devon4j core module pom.xml.

    +
  • +
+
+
+
+
<dependency>
+    <groupId>com.microsoft.sqlserver</groupId>
+    <artifactId>sqljdbc4</artifactId>
+    <version>4.0</version>
+</dependency>
+
+
+
+
    +
  • +

    Uncomment the query to retrieve id’s from Bill table for payed=1 in devon4j core module src/main/resources/META-INF/orm.xml and comment the one for H2 Database. Uncomment code below:

    +
  • +
+
+
+
+
<named-native-query name="get.all.ids.of.payed.bills">
+    <query><![CDATA[SELECT id FROM Bill WHERE payed = 1]]></query>
+</named-native-query>
+
+
+
+
    +
  • +

    Change the value of following property ‘spring.datasource.url’ in following file ‘devon4j-sample-core/src/main/resources/config/application-mssql.properties’. Accordingly, change the following properties:

    +
    +
      +
    • +

      Hostname

      +
    • +
    • +

      Port

      +
    • +
    • +

      Database Name

      +
    • +
    • +

      spring.datasource.username

      +
    • +
    • +

      spring.datasource.password

      +
    • +
    +
    +
  • +
  • +

    Run the script core/src/test/setup/mssqldb.bat for Windows Environment and the script core/src/test/setup/mssqldb.sh for Unix/Linux Environments.

    +
  • +
  • +

    Comment the spring active profile h2mem and uncomment the spring active profile mssql in devon4j core module src/main/resources/config/application.properties.

    +
  • +
  • +

    Uncomment the line that has spring active profile junit and mssql separated by comma and comment the line has spring active profiles junit in the file devon4j-sample-core/src/test/resources/config/application.properties.

    +
  • +
+
+
+

Note: Make sure that JUNIT Test cases run successfully for devon4j Project using the command ‘mvn clean install’.

+
+
+

Assuming that devon4j is integrated with MS SQL Server 2008, following are the steps to enable H2 Database.

+
+
+
+
+

Disabling MSSQL Server 2008 and enabling H2 Database

+
+
+
    +
  • +

    Comment the dependency for MSSQLServer 2008 jdbc driver in devon4j core module pom.xml.

    +
  • +
+
+
+
+
<!--
+<dependency>
+   <groupId>com.microsoft.sqlserver</groupId>
+   <artifactId>sqljdbc4</artifactId>
+   <version>4.0</version>
+</dependency>
+-->
+
+
+
+
    +
  • +

    Uncomment the query to retrieve id’s from Bill table for payed=true in devon4j-sample-core/src/main/resources/META-INF/orm.xml and comment the one that exists for MS SQL Server.

    +
  • +
+
+
+
+
<named-native-query name="get.all.ids.of.payed.bills">
+   <query><![CDATA[SELECT id FROM Bill WHERE payed = true]]></query>
+</named-native-query>
+
+
+
+
    +
  • +

    Run the script core/src/test/setup/disablemssqldb.bat for Windows Environment and the script core/src/test/setup/disablemssqldb.sh for Unix/Linux Environments.

    +
  • +
  • +

    Uncomment the spring active profile h2mem and comment the spring active profile mssql in devon4j-sample-core/src/main/resources/config/application.properties

    +
  • +
  • +

    Uncomment the line that has spring active profile junit and comment the line has spring active profiles junit and mssql separated by comma in the file devon4j-sample-core/src/test/resources/config/application.properties

    +
  • +
+
+
+

Note: Make sure that JUNIT Test cases run successfully for devon4j Project using the command ‘mvn clean install’.

+
+
+
+
+

Run the sample application with the Angular JS Client

+
+
+
    +
  • +

    Follow the steps mentioned here

    +
  • +
+
+
+
+
+

Run the sample application with the Sencha Client

+
+
+
    +
  • +

    Follow the steps mentioned here

    +
  • +
+
+
+

Note : One has to recompile devon4j project by executing the command mvn clean install in devon4j project after doing the changes mentioned in the above said instructions.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-MariaDB-10.0.27.html b/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-MariaDB-10.0.27.html new file mode 100644 index 00000000..e94c5694 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-MariaDB-10.0.27.html @@ -0,0 +1,475 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

##Guide for DBIntegration of MariaDB

+
+
+

devon4j is by default configured with the H2 Databse.

+
+
+

To integrate devon4j with the MariaDB 10.0.27, as a first step, MariaDB 10.0.27 Database has to be installed . Follow the link [here](https://mariadb.com/kb/en/mariadb/installing-mariadb-msi-packages-on-windows/) to install MariaDB 10.0.27

+
+
+

Using MariaDB with docker

+
+
+

We can provision a MariaDB with docker by running the following line:

+
+
+
+
docker run --name mariadb -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mariadb:10.0.28 --lower-case-table-names=1
+
+
+
+

And access it using mysql console using also a docker process

+
+
+
+
docker run -it --rm --link mariadb:mariadb mariadb:10.0.28 sh -c 'exec mysql -hmariadb -P3306 -uroot -ppassword'
+
+
+
+

Also, when configuring the connection url value, take into acount the address of the docker machine (in windows it usually point to 192.168.99.100)

+
+
+
+
spring.datasource.url=jdbc:mariadb://192.168.99.100:3306/restaurant?user=root&password=password
+
+
+
+
+
+

Enabling MariaDB and disabling h2 Database

+
+
+
    +
  • +

    Assuming the MariaDB database that is created is RESTAURANT , execute the following script to create Flyway MetaData Table schema_version in the database RESTAURANT

    +
  • +
+
+
+
+
CREATE TABLE `schema_version` (
+	`version_rank` INT(11) NOT NULL,
+	`installed_rank` INT(11) NOT NULL,
+	`version` VARCHAR(50) NOT NULL,
+	`description` VARCHAR(200) NOT NULL,
+	`type` VARCHAR(20) NOT NULL,
+	`script` VARCHAR(1000) NOT NULL,
+	`checksum` INT(11) NULL DEFAULT NULL,
+	`installed_by` VARCHAR(100) NOT NULL,
+	`installed_on` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+	`execution_time` INT(11) NOT NULL,
+	`success` TINYINT(1) NOT NULL,
+	PRIMARY KEY (`version`),
+	INDEX `schema_version_vr_idx` (`version_rank`),
+	INDEX `schema_version_ir_idx` (`installed_rank`),
+	INDEX `schema_version_s_idx` (`success`)
+)
+COLLATE='latin1_swedish_ci'
+ENGINE=InnoDB
+;
+
+
+
+
    +
  • +

    Add the dependency for MariaDB 10.0.27 Java Connector in devon4j core module pom.xml. Dependency for MariaDB 10.0.27 is as follows :

    +
  • +
+
+
+
+
<dependency>
+   <groupId>org.mariadb.jdbc</groupId>
+   <artifactId>mariadb-java-client</artifactId>
+   <version>1.5.4</version>
+</dependency>
+
+
+
+
    +
  • +

    Change the value of following property ‘spring.datasource.url’ in devon4j core module file ‘src/main/resources/config/application-mysql.properties’. Accordingly, change the following properties

    +
    +
      +
    • +

      Hostname

      +
    • +
    • +

      Port

      +
    • +
    • +

      Database Name

      +
    • +
    • +

      spring.datasource.username

      +
    • +
    • +

      spring.datasource.password

      +
    • +
    +
    +
  • +
  • +

    Comment the spring active profile h2mem and uncomment the spring active profile mysql in core module /src/main/resources/config/application.properties

    +
  • +
  • +

    Comment the line that has spring active profile junit and uncomment the line that has spring active profiles junit and mysql separated by comma in the core module src/test/resources/config/application.properties

    +
  • +
  • +

    Run the script core/src/test/setup/mariadb.bat for Windows Environment and the script core/src/test/setup/mariadb.sh for Unix/Linux Environments.*

    +
  • +
  • +

    Open /devon4j-sample-core/src/test/java/io/oasp/gastronomy/restaurant/tablemanagement/service/impl/rest/TablemanagementRestServiceTest.java. In test testSaveTable() & testFindTablesByPost() change the waiterId from 2L to 3L.

    +
  • +
+
+
+

Note: Make sure that JUNIT Test cases run successfully for devon4j Project using the command ‘mvn clean install’.

+
+
+

Assuming that devon4j is integrated with MariaDB 10.0.27, following are the steps to enable H2 Database

+
+
+
+
+

Disabling MariaDB and enabling H2 Database

+
+
+
    +
  • +

    Comment the dependency for MariaDB 10.0.27 Java Connector in devon4j core module pom.xml. Dependency for MariaDB 10.0.27 is as follows

    +
  • +
+
+
+
+
<!--
+<dependency>
+<groupId>org.mariadb.jdbc</groupId>
+      	<artifactId>mariadb-java-client</artifactId>
+      	<version>1.5.4</version>
+</dependency>
+-->
+
+
+
+
    +
  • +

    Comment the spring active profile mysql and uncomment the spring active profile h2mem in core module src/main/resources/config/application.properties

    +
  • +
  • +

    Uncomment the line that has spring active profile junit and comment the line that has spring active profiles junit and mysql separated by comma in the file core module src/test/resources/config/application.properties.

    +
  • +
  • +

    Run the script core/src/test/setup/disablemariadb.bat for Windows Environment and the script core/src/test/setup/disablemariadb.sh for Unix/Linux Environments.

    +
  • +
+
+
+

Note: Make sure that JUNIT Test cases run successfully for devon4j Project using the command ‘mvn clean install’.

+
+
+
+
+

Run the sample application with the Angular JS Client

+
+
+
    +
  • +

    Follow the steps mentioned here

    +
  • +
+
+
+
+
+

Run the sample application with the Sencha Client

+
+
+
    +
  • +

    Follow the steps mentioned here

    +
  • +
+
+
+

Note : One has to recompile devon4j project by executing the command mvn clean install in devon4j project after doing the changes mentioned in the above said instructions.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-Oracle11G.html b/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-Oracle11G.html new file mode 100644 index 00000000..a5b6208c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-Oracle11G.html @@ -0,0 +1,694 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

##Guide for DBIntegration of Oracle 11g

+
+
+

devon4j is by default configured with the H2 Databse.

+
+
+

To integrate devon4j with the Oracle 11g, as a first step , Oracle 11g Database has to be installed . Follow the link to install Oracle 11g.

+
+
+

Installing Oracle JDBC Driver

+
+
+

To install the Oracle JDBC Driver, run the following command

+
+
+
+
mvn install:install-file -Dfile=C:\app\vkiran\product\11.2.0\dbhome_1\jdbc\lib\ojdbc.jar -DgroupId=com.oracle -DartifactId=ojdbc6 -Dversion=11.2.0 -Dpackaging=jar
+
+
+
+

Note: Location of ojdbc.jar might differ based on the path that is selected at the time of installation of the Oracle 11g.

+
+
+
+
+

Enabling Oracle 11g and disabling h2 Database

+
+
+
    +
  • +

    Assuming the Oracle database that is created is devon4j , execute the following script to create Flyway MetaData Table schema_version in the database devon4j

    +
  • +
+
+
+
+
--------------------------------------------------------
+--  File created - Friday-December-02-2016
+--------------------------------------------------------
+--------------------------------------------------------
+--  DDL for Table schema_version
+--------------------------------------------------------
+
+  CREATE TABLE "devon4j"."schema_version"
+   (	"version_rank" NUMBER(*,0),
+	"installed_rank" NUMBER(*,0),
+	"version" VARCHAR2(50 BYTE),
+	"description" VARCHAR2(200 BYTE),
+	"type" VARCHAR2(20 BYTE),
+	"script" VARCHAR2(1000 BYTE),
+	"checksum" NUMBER(*,0),
+	"installed_by" VARCHAR2(100 BYTE),
+	"installed_on" TIMESTAMP (6) DEFAULT CURRENT_TIMESTAMP,
+	"execution_time" NUMBER(*,0),
+	"success" NUMBER(1,0)
+   ) SEGMENT CREATION IMMEDIATE
+  PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING
+  STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
+  PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
+  TABLESPACE "USERS" ;
+--------------------------------------------------------
+--  DDL for Index schema_version_pk
+--------------------------------------------------------
+
+  CREATE UNIQUE INDEX "devon4j"."schema_version_pk" ON "devon4j"."schema_version" ("version")
+  PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS
+  STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
+  PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
+  TABLESPACE "USERS" ;
+--------------------------------------------------------
+--  DDL for Index schema_version_vr_idx
+--------------------------------------------------------
+
+  CREATE INDEX "devon4j"."schema_version_vr_idx" ON "devon4j"."schema_version" ("version_rank")
+  PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS
+  STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
+  PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
+  TABLESPACE "USERS" ;
+--------------------------------------------------------
+--  DDL for Index schema_version_ir_idx
+--------------------------------------------------------
+
+  CREATE INDEX "devon4j"."schema_version_ir_idx" ON "devon4j"."schema_version" ("installed_rank")
+  PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS
+  STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
+  PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
+  TABLESPACE "USERS" ;
+--------------------------------------------------------
+--  DDL for Index schema_version_s_idx
+--------------------------------------------------------
+
+  CREATE INDEX "devon4j"."schema_version_s_idx" ON "devon4j"."schema_version" ("success")
+  PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS
+  STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
+  PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
+  TABLESPACE "USERS" ;
+--------------------------------------------------------
+--  Constraints for Table schema_version
+--------------------------------------------------------
+
+  ALTER TABLE "devon4j"."schema_version" MODIFY ("version_rank" NOT NULL ENABLE);
+
+  ALTER TABLE "devon4j"."schema_version" MODIFY ("installed_rank" NOT NULL ENABLE);
+
+  ALTER TABLE "devon4j"."schema_version" MODIFY ("version" NOT NULL ENABLE);
+
+  ALTER TABLE "devon4j"."schema_version" MODIFY ("description" NOT NULL ENABLE);
+
+  ALTER TABLE "devon4j"."schema_version" MODIFY ("type" NOT NULL ENABLE);
+
+  ALTER TABLE "devon4j"."schema_version" MODIFY ("script" NOT NULL ENABLE);
+
+  ALTER TABLE "devon4j"."schema_version" MODIFY ("installed_by" NOT NULL ENABLE);
+
+  ALTER TABLE "devon4j"."schema_version" MODIFY ("installed_on" NOT NULL ENABLE);
+
+  ALTER TABLE "devon4j"."schema_version" MODIFY ("execution_time" NOT NULL ENABLE);
+
+  ALTER TABLE "devon4j"."schema_version" MODIFY ("success" NOT NULL ENABLE);
+
+  ALTER TABLE "devon4j"."schema_version" ADD CONSTRAINT "schema_version_pk" PRIMARY KEY ("version")
+  USING INDEX PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS
+  STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
+  PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
+  TABLESPACE "USERS"  ENABLE;
+
+
+
+
    +
  • +

    Uncomment column annotation for ‘number’ attribute in devon4j-sample-core/src/main/java/io/gastronomy/restaurant/offermanagement/dataaccess/api/OfferEntity.java. Below is the uncommented code for reference.

    +
  • +
+
+
+
+
/** Uncomment the following Column annotation if the database used is Oracle 11g and comment the Column annotation just before @Override annotation **/
+
+@Column(name = "\"number\"", unique = true)
+//@Column(name = "number", unique = true)
+
+@Override
+
+public Long getNumber() {
+return this.number;
+}
+
+
+
+
    +
  • +

    Uncomment column annotation for ‘comment’ attribute in devon4j-sample-core/src/main/java/io/gastronomy/restaurant/offermanagement/dataaccess/api/OrderPositionEntity.java. Below is the uncommented code for reference

    +
  • +
+
+
+
+
@Override
+  /*
+   * Uncomment the following Column annotation if the database used is Oracle 11g
+   */
+
+@Column(name = "\"comment\"")
+public String getComment() {
+
+return this.comment;
+}
+
+
+
+
    +
  • +

    Uncomment column annotation for ‘comment’ attribute in devon4j-sample-core/src/main/java/io/gastronomy/restaurant/offermanagement/dataaccess/api/TableEntity.java. Below is the uncommented code for reference

    +
  • +
+
+
+
+
@Override
+  /*
+   * Uncomment the following Column annotation if the database used is Oracle 11g and comment the Column annotation just
+   * before @Override annotation
+   */
+
+@Column(name = "\"number\"", unique = true)
+//@Column (unique = true)
+
+  	public Long getNumber() {
+
+    		return this.number;
+}
+
+
+
+
    +
  • +

    Uncomment the dependency for the Oracle 11g jdbc driver in devon4j-sample-core/pom.xml. Dependency for Oracle 11g is as follows :

    +
  • +
+
+
+
+
<dependency>
+   <groupId>com.oracle</groupId>
+   <artifactId>ojdbc6</artifactId>
+   <version>11.2.0</version>
+</dependency>
+
+
+
+
    +
  • +

    Uncomment the named native query for oracle in /devon4j-sample-core/src/main/resources/META-INF/orm.xml shown below :

    +
  • +
+
+
+
+
<named-native-query name="get.all.ids.of.payed.bills">
+    <query><![CDATA[SELECT id FROM Bill WHERE payed = 1]]></query>
+</named-native-query>
+
+
+
+

And comment out the named native query for H2 shown below

+
+
+
+
<named-native-query name="get.all.ids.of.payed.bills">
+    <query><![CDATA[SELECT id FROM Bill WHERE payed = true]]></query>
+  </named-native-query>
+
+
+
+
    +
  • +

    Rename file bills.csv at following path devon4j-sample-core/src/test/resources/BillExportJobTest/expected/ to bills_h2.csv

    +
  • +
  • +

    Rename the file bills_orcl.csv in devon4j-sample-core/src/test/resources/BillExportJobTest/expected/ to bills.csv

    +
  • +
  • +

    Change the value of following property ‘spring.datasource.url’ in this file ‘devon4j-sample-core/src/main/resources/config/application-orcl.properties’. Accordingly, change the following properties:

    +
    +
      +
    • +

      Hostname

      +
    • +
    • +

      Port

      +
    • +
    • +

      Database Name

      +
    • +
    • +

      spring.datasource.username

      +
    • +
    • +

      spring.datasource.password

      +
    • +
    +
    +
  • +
  • +

    Comment the spring active profile h2mem and uncomment the spring active profile orcl in devon4j-sample-core/src/main/resources/config/application.properties.

    +
  • +
  • +

    Comment the line that has spring active profile junit and uncomment the line that has spring active profiles junit and orcl separated by comma in the file devon4j-sample-core/src/test/resources/config/application.properties.

    +
  • +
  • +

    Run the script core/src/test/setup/oracledb.bat for Windows Environment and the script core/src/test/setup/oracledb.sh for Unix/Linux Environments.

    +
  • +
+
+
+

Note: Make sure that JUNIT Test cases run successfully for devon4j Project using the command ‘mvn clean install’. +Assuming that devon4j is integrated with Oracle 11g, following are the steps to enable H2 Database

+
+
+
+
+

Disabling Oracle 11g and enabling H2 Database

+
+
+
    +
  • +

    Comment column annotation for ‘number’ attribute in devon4j-sample-core/src/main/java/io/gastronomy/restaurant/offermanagement/dataaccess/api/OfferEntity.java. Below is the uncommented code for reference.

    +
  • +
+
+
+
+
/** Uncomment the following Column annotation if the database used is Oracle 11g and comment the Column annotation just before @Override annotation **/
+
+//@Column(name = "\"number\"", unique = true)
+@Column(name = "number", unique = true)
+
+@Override
+
+public Long getNumber() {
+return this.number;
+}
+
+
+
+
    +
  • +

    Comment column annotation for ‘comment’ attribute in devon4j-sample-core/src/main/java/io/gastronomy/restaurant/offermanagement/dataaccess/api/OrderPositionEntity.java. Below is the uncommented code for reference

    +
  • +
+
+
+
+
@Override
+  /*
+   * Uncomment the following Column annotation if the database used is Oracle 11g
+   */
+
+//@Column(name = "\"comment\"")
+public String getComment() {
+
+return this.comment;
+}
+
+
+
+
    +
  • +

    Comment column annotation for ‘comment’ attribute in devon4j-sample-core/src/main/java/io/gastronomy/restaurant/offermanagement/dataaccess/api/TableEntity.java. Below is the uncommented code for reference

    +
  • +
+
+
+
+
@Override
+  /*
+   * Uncomment the following Column annotation if the database used is Oracle 11g and comment the Column annotation just
+   * before @Override annotation
+   */
+
+//@Column(name = "\"number\"", unique = true)
+@Column (unique = true)
+
+  	public Long getNumber() {
+
+    		return this.number;
+}
+
+
+
+
    +
  • +

    Comment the dependency for the Oracle 11g jdbc driver in devon4j-sample-core/pom.xml. Dependency for Oracle 11g is as follows :

    +
  • +
+
+
+
+
<!--
+<dependency>
+   <groupId>com.oracle</groupId>
+   <artifactId>ojdbc6</artifactId>
+   <version>11.2.0</version>
+</dependency>
+-->
+
+
+
+
    +
  • +

    Comment the spring active profile orcl and uncomment the spring active profile h2mem in devon4j-sample-core/src/main/resources/config/application.properties.

    +
  • +
  • +

    Uncomment the line that has spring active profile junit and comment the line that has spring active profiles junit and orcl separated by comma in the file devon4j-sample-core/src/test/resources/config/application.properties.

    +
  • +
  • +

    Run the script core/src/test/setup/disableoracledb.bat for Windows Environment and the script core/src/test/setup/disableoracledb.sh for Unix/Linux Environments.

    +
  • +
  • +

    Make a copy of bills.csv at following path devon4j-sample-core/src/test/resources/BillExportJobTest/expected/ and rename it to bills_orcl.csv.

    +
  • +
  • +

    Rename bills_h2.csv in devon4j-sample-core/src/test/resources/BillExportJobTest/expected/ to bills.csv

    +
  • +
+
+
+

Note: Make sure that JUNIT Test cases run successfully for devon4j Project using the command ‘mvn clean install’.

+
+
+
+
+

Run the sample application with the Angular JS Client

+
+
+
    +
  • +

    Follow the steps mentioned here

    +
  • +
+
+
+
+
+

Run the sample application with the Sencha Client

+
+
+
    +
  • +

    Follow the steps mentioned here

    +
  • +
+
+
+

Note : One has to recompile devon4j project by executing the command mvn clean install in devon4j project after doing the changes mentioned in the above said instructions.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-PostGres-Server-9.5.4.html b/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-PostGres-Server-9.5.4.html new file mode 100644 index 00000000..d08dbe08 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/DB-Integration-PostGres-Server-9.5.4.html @@ -0,0 +1,683 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

##Guide for DBIntegration of PostGres Server

+
+
+

devon4j is by default configured with the H2 Databse.

+
+
+

As a first step to integrate devon4j with the PostGres 9.5.4, PostGres 9.5.4 has to be installed. Following are the snapshots of the configuration chosen during various stages of installation .

+
+
+

Note : One can ignore the following section if they are well versed with the installation process of PostGres 9.5.4.

+
+
+

PostGres Installation and Configuration using Docker

+
+
+

In order to have a Postgres up and running with docker we can execute

+
+
+
+
docker run --name postgres -p 5432:5432 -e POSTGRES_PASSWORD=mysecretpassword -d postgres:9.5.4
+
+
+
+

This makes Postgres avaiable on the docker-machine host on port 5432. If using docker on windows with docker toolbox it usually means that Postgres will be on 192.168.99.100 (please check the IP of your docker machine)

+
+
+

So the configuration for the datasource url strig will be:

+
+
+

jdbc:postgresql://192.168.99.100:5432/mydb?currentSchema=devon4j

+
+
+

To check the installation or to have an interactive query tool with Postgres we can run another docker process like this:

+
+
+
+
docker run -it --rm --link postgres:postgres postgres psql -h postgres -U postgres
+
+
+
+

Now we can create the databas and schema by running on the psql console

+
+
+
+
create database mydb;
+create schema devon4j;
+
+
+
+
+
+

PostGres Installation and Configuration

+
+
+
    +
  • +

    Download PostGres 9.5.4 for Windows 64 bit Operating System from here. Screenshot of the download page below.

    +
  • +
+
+
+
+download postgre +
+
+
+
    +
  • +

    Once installable for PostGres 9.5.4 is downloaded , click on the installable to start the installation process.It is shown in the below screenshot.

    +
  • +
+
+
+
+downloaded postgre +
+
+
+
    +
  • +

    The ‘Setup’ Wizard starts with screen shown below. Click Next button.

    +
  • +
+
+
+
+setup postgre +
+
+
+
    +
  • +

    In the next step, select installation directory path and click Next button

    +
  • +
+
+
+
+setup installation directory +
+
+
+
    +
  • +

    In the next step, select data directory path and click Next button.

    +
  • +
+
+
+
+setup data directory +
+
+
+
    +
  • +

    In the next step, enter the password for PostGres and click Next button.

    +
  • +
+
+
+
+setup password +
+
+
+
    +
  • +

    In the next step, enter the port for PostGres and click Next button.

    +
  • +
+
+
+
+setup port +
+
+
+
    +
  • +

    In the next step, select the Locale for PostGres and click Next button.

    +
  • +
+
+
+
+setup advanced options +
+
+
+
    +
  • +

    In the next step, select the check box for launching the Stack Builder if needed and click Finish button.

    +
  • +
+
+
+
+setup completing postgre +
+
+
+
+
+

Enabling PostGres and disabling H2 Database

+
+
+
    +
  • +

    Add an entry similar to the following entry in 'IPv4 local connections' section in pg_hba.conf file that is located inside 'data' directory of PostGres installation. For instance , if the installation path of PostGres is D:\installations\PostGres9.5.4 , path of pg_hba.conf will be D:\installations\PostGres9.5.4\data\pg_hba.conf

    +
  • +
+
+
+
+
host    all             postgres        10.102.114.142/32       trust
+
+
+
+

In the above entry , replace the IP details with details of your machine.

+
+
+
    +
  • +

    Assuming the schema created under PostGres database mydb is devon4j, execute the following script to create Flyway MetaData Table schema_version in the schema devon4j.

    +
  • +
+
+
+
+
-- Table: devon4j.schema_version
+
+-- DROP TABLE devon4j.schema_version;
+
+CREATE TABLE devon4j.schema_version
+(
+  version_rank integer NOT NULL,
+  installed_rank integer NOT NULL,
+  version character varying(50) NOT NULL,
+  description character varying(200) NOT NULL,
+  type character varying(20) NOT NULL,
+  script character varying(1000) NOT NULL,
+  checksum integer,
+  installed_by character varying(100) NOT NULL,
+  installed_on timestamp without time zone NOT NULL DEFAULT now(),
+  execution_time integer NOT NULL,
+  success boolean NOT NULL,
+  CONSTRAINT schema_version_pk PRIMARY KEY (version)
+)
+WITH (
+  OIDS=FALSE
+);
+ALTER TABLE devon4j.schema_version
+  OWNER TO postgres;
+
+-- Index: devon4j.schema_version_ir_idx
+
+-- DROP INDEX devon4j.schema_version_ir_idx;
+
+CREATE INDEX schema_version_ir_idx
+  ON devon4j.schema_version
+  USING btree
+  (installed_rank);
+
+-- Index: devon4j.schema_version_s_idx
+
+-- DROP INDEX devon4j.schema_version_s_idx;
+
+CREATE INDEX schema_version_s_idx
+  ON devon4j.schema_version
+  USING btree
+  (success);
+
+-- Index: devon4j.schema_version_vr_idx
+
+-- DROP INDEX devon4j.schema_version_vr_idx;
+
+CREATE INDEX schema_version_vr_idx
+  ON devon4j.schema_version
+  USING btree
+  (version_rank);
+
+
+
+
    +
  • +

    Uncomment Type annotation for ‘data’ attribute in devon4j-sample-core/src/main/java/io/oasp/gastronomy/restaurant/general/dataacess/api/BinaryObjectEntity.java

    +
  • +
+
+
+
+
@Type(type = "org.hibernate.type.BinaryType")
+public Blob getData() {
+
+
+
+
    +
  • +

    Uncomment the dependency for the PostGres 9.5.4 jdbc driver in devon4j-sample-core/pom.xml. Dependency for PostGres 9.5.4 is as follows :

    +
  • +
+
+
+
+
<dependency>
+      <groupId>org.postgresql</groupId>
+      <artifactId>postgresql</artifactId>
+      <version>9.4.1211.jre7</version>
+</dependency>
+
+
+
+
    +
  • +

    Change the value of following property ‘spring.datasource.url’ in following file ‘devon4j-sample-core/src/main/resources/config/application-postgre.properties’. Accordingly, change the following properties:

    +
    +
      +
    • +

      Hostname

      +
    • +
    • +

      Port

      +
    • +
    • +

      Database Name

      +
    • +
    • +

      spring.datasource.username

      +
    • +
    • +

      spring.datasource.password

      +
    • +
    +
    +
  • +
  • +

    Run the script core/src/test/setup/postgresdb.bat for Windows Environment and the script core/src/test/setup/postgresdb.sh for Unix/Linux Environments.

    +
  • +
  • +

    Make a copy of bills.csv_at following path devon4j-sample-core/src/test/resources/BillExportJobTest/expected/ and rename it to _bills_h2.csv

    +
  • +
  • +

    Rename the file bills_pg.csv in devon4j-sample-core/src/test/resources/BillExportJobTest/expected/ to bills.csv

    +
  • +
  • +

    Comment the spring active profile h2mem and uncomment the spring active profile postgre in devon4j-sample-core/src/main/resources/config/application.properties.

    +
  • +
  • +

    Comment the line that has spring active profile junit and comment the line that has spring active profiles junit and postgre separated by comma in the file devon4j-sample-core/src/test/resources/config/application.properties.

    +
  • +
+
+
+

Note : Make sure that JUNIT Test cases run successfully for devon4j Project using the command ‘mvn clean install’.

+
+
+

Assuming that devon4j is integrated with the PostGres 9.5.4, following are the steps to enable H2 Database.

+
+
+
+
+

Disabling PostGres and enabling H2 Database

+
+
+
    +
  • +

    Comment Type annotation for ‘data’ attribute in devon4j-sample-core/src/main/java/io/oasp/gastronomy/restaurant/general/dataacess/api/BinaryObjectEntity.java

    +
  • +
+
+
+
+
//@Type(type = "org.hibernate.type.BinaryType")
+public Blob getData() {
+
+
+
+
    +
  • +

    Comment the dependency for the PostGres 9.5.4 jdbc driver in devon4j-sample-core/pom.xml. Commented code below.

    +
  • +
+
+
+
+
<!--
+    <dependency>
+      	<groupId>org.postgresql</groupId>
+      	<artifactId>postgresql</artifactId>
+      	<version>9.4.1211.jre7</version>
+    </dependency>
+-->
+
+
+
+
    +
  • +

    Run the script core/src/test/setup/disablepostgresdb.bat for Windows Environment and the script core/src/test/setup/disablepostgresdb.sh for Unix/Linux Environments.

    +
  • +
  • +

    Make a copy of bills.csv at following path devon4j-sample-core/src/test/resources/BillExportJobTest/expected/ and rename it to bills_pg.csv

    +
  • +
  • +

    Rename bills_h2.csv in devon4j-sample-core/src/test/resources/BillExportJobTest/expected/ to bills.csv

    +
  • +
  • +

    Uncomment the spring active profile h2mem and comment the spring active profile postgre in devon4j-sample-core/src/main/resources/config/application.properties

    +
  • +
  • +

    Uncomment the line that has spring active profile junit and comment the line that has spring active profiles junit and postgre separated by comma in the file devon4j-sample-core/src/test/resources/config/application.properties

    +
  • +
+
+
+

Note: Make sure that JUNIT Test cases run successfully for devon4j Project using the command ‘mvn clean install’.

+
+
+
+
+

Run the sample application with the Angular JS Client

+
+
+
    +
  • +

    Follow the steps mentioned here

    +
  • +
+
+
+
+
+

Run the sample application with the Sencha Client

+
+
+
    +
  • +

    Follow the steps mentioned here

    +
  • +
+
+
+

Note : One has to recompile devon4j project by executing the command mvn clean install in devon4j project after doing the changes mentioned in the above said instructions.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/Home.html b/docs/devonfw.github.io/1.0/devon4j.wiki/Home.html new file mode 100644 index 00000000..d1aa699e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/Home.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==devonfw for Java (devon4j)

+
+
+

Welcome to the Java edition of devonfw. devon4j is documented by a platform guide (see the side-bar of this wiki) to be used in your projects.

+
+
+

You will find the latest stable versions of documents generated from this wiki here:

+
+
+ +
+
+

For contributors

+
+
+

Contributions and improvements to devonfw are more than welcome. Please read our contributing guide to get started.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/How-to-steer-an-devonfw-project-(a-guide-for-Product-Owners).html b/docs/devonfw.github.io/1.0/devon4j.wiki/How-to-steer-an-devonfw-project-(a-guide-for-Product-Owners).html new file mode 100644 index 00000000..28f48887 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/How-to-steer-an-devonfw-project-(a-guide-for-Product-Owners).html @@ -0,0 +1,506 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Introduction

+
+
+

This short document describes the outline of how to manage an devonfw project. It´s purpose is to be descriptive rather than prescriptive as each Product Owner should find his or her own way on the route towards efficient steering (“management”) of an Open Source project. An exception is when the word MUST (thus capitalized) is being used. In that case it is an absolute requirement to comply with the stated directive.

+
+
+

==Prerequisites

+
+
+

It is critical that you and your contributors know Git and Github. An important distinction as they are not synonymous.

+
+ +
+

Pro Git is an excellent and free online book https://git-scm.com/book/en/v2

+
+ +
+

License. Every devonfw project MUST be licensed under the Apache License version 2. For the short explanation of what this entails see: https://tldrlegal.com/license/apache-license-2.0-(apache-2.0) and for a more elaborate description see: https://www.whitesourcesoftware.com/whitesource-blog/top-10-apache-license-questions-answered/

+
+
+

Documentation: every devonfw project MUST use adoc as text document format for its own documentation. This is done in order to unify on a common documentation tool chain. See: http://adoc.org/

+
+
+

Every project MUST have a README file (see down) OR use the Wiki for it´s documentation.

+
+
+

Every project MUST have a CONTRIBUTING file and MUST have a Code of Conduct (see down). We maintain the “official” Covenant Code of Conduct that must be present in every devonfw or devonfw project at the root folder as CODE_OF_CONDUCT.adoc or CODE_OF_CONDUCT.md here:

+
+ +
+

(private repo)

+
+
+

==Steering the Project

+
+
+

The project needs steering or managing: planning out goals, maintaining and improving documentation. This requires certain practices and tools which GitHub supports and provides out of the box:

+
+
+

Project Management

+
+
+

Leverage features like milestones and projects to group issues into related buckets of work. Projects should be managed in sprints (in agile terms with one or more projects making up a milestone. This allows the Product Owner to communicate the roadmap clearly and can help direct contributors to where help is most immediately needed. It also helps establish feature / bug delivery expectations.

+
+
+
+
+

Release Management

+
+
+

Whenever a Git tag is pushed to your repository, GitHub will create a link to a page for that tag, thus enabling the easy creation of release notes for every tag. This is useful for tracking progress over time and to keep a record of features and bugs completed.

+
+
+
+
+

Workflow and branches

+
+
+

We recommend using Git Flow as the main workflow or paradigm. It is simple, well known and has great support in Github.

+
+ +
+

Important the contributors MUST be using sensible working practices which fit the community’s and projects working model. A good description you can find on https://github.com/devonfw/devon/wiki/devon-guide-working-with-git-and-github

+
+
+

(private devonfw repository)

+
+
+
+
+

Issues

+
+
+

The central foundation or building stone of Github is the “Issue”. These are items which allow you keep track of tasks, enhancements, and bugs for your projects. They serve as the central communication medium within the team and between team and the world outsider. GitHub’s tracker is called Issues, and has its own section in every repository.

+
+ + +
+

Note that the issues and especially the comments should not be used for generic communication. Diverting from the main thread is a common anti-pattern which should be guarded against.

+
+
+
+
+

Wiki

+
+
+

The wiki is the place to organize and manage supplemental information related with the product. Rather than pack everything into the README, the wiki should be used to establish separate documentation for onboarding, migration guides, API docs and more.

+
+
+
+
+

Communication: Gitter/Slack/Yammer

+
+
+

Critical in any OSS project is communication between all participants. The issues are too limites Gitter and Slack are chat platform that integrate well with GitHub and tools like Jenkins. They allow you to foster a real time community around your project, assist others or let the community jump in and help. Yammer is a more traditional “messageboard” style service. Advantage of the latter is that it is fully supported by Capgemini whereas the former two are tolerated and just barely.

+
+
+
+
+

Contributors

+
+
+

The essence of any Open Source project is not the code but the people work contribute to the project. These people, ideally a “community”, is vital to an open source project. An active and supportive community is the heart of the project.

+
+
+

See How to Build an OSS Community: http://oss-watch.ac.uk/resources/howtobuildcommunity

+
+
+
+
+

CONTRIBUTING file

+
+
+

The root of the project MUST contain a CONTRIBUTING file. It should explain how a participant should do things like:

+
+
+
    +
  • +

    How to report bugs

    +
  • +
  • +

    How to suggest improvements / new features

    +
  • +
  • +

    How to contribute code

    +
  • +
  • +

    format code

    +
  • +
  • +

    test fixes

    +
  • +
  • +

    submit patches.

    +
  • +
+
+
+

And more. From the Products Owner point of view, the document succinctly communicates how best to collaborate. And for a contributor, one quick check of this file verifies their submission follows the projects guidelines.

+
+
+

The document can consist of a series of pointers to external references and standards. But it should be concise and clear.

+
+
+
+
+

Code of Conduct

+
+
+

Every project should foster and apply a code of conduct which defines standards for how to engage in a community. It signals an inclusive environment that respects all contributions. It also outlines procedures for addressing problems between members of the project’s community. For more information on why a code of conduct defines standards and expectations for how to engage in a community, see the Open Source Guide.: https://opensource.guide/code-of-conduct/

+
+
+

As previously stated, for devonfw and devonfw projects there is a standard CoC available.

+
+
+
+
+

Contributors; the community

+
+
+

In any OSS project the issue is never how to bind and bring enthusiasm to the core contributors. Steering them is not an easy task by itself but typically they have an internal drive which explains there higher level f contribution. And that drive makes it easier to manage their activities.

+
+
+

The challenge is how to find and attract casual users. Those bring important contributions by themselves but they are also the most important source of new, future, “hard-core” members of the community. How to manage casual contributors to open source projects: https://opensource.com/article/17/10/managing-casual-contributors

+
+
+
+
+

Documentation

+
+
+

Every project should contain documentation, either as a coherent README or in the wiki. If the wiki is used it´s is better to point at the wiki from the README in order to avoid duplication of information.

+
+
+

The documentation minimally should contain:

+
+
+
    +
  • +

    Present the project (purpose)

    +
  • +
  • +

    Step-by-step install and config instruction (how to get running)

    +
  • +
  • +

    Status of the project (Build/info/date)

    +
  • +
  • +

    Basic Use cases & examples

    +
  • +
  • +

    Contact info

    +
  • +
+
+
+

==Cooperation within the devonfw Platform (devonfw core team)

+
+
+

The Product Owners and contributors are principally working on the their projects without any commitment that their contributions merit financially compensation. From within Capgemini it can be decided to have work done within working hours. And to, effectively, financially compensate for activities. Quite a few people are supported by budget provided for by either the SBU or their local BU.

+
+
+

Apart from direct support their is a permanent support team available, the devonfw core team. The product owner can ask for support for particular issues. The devonfw (platform) Product Owner decides is an issue merits supports by the devonfw core team. Following this, the devonfw core team Team Leader can assign resources to the issue.

+
+
+

The Product Owner should directly communicate and work together with the assigned resource. The PO is responsible for a clear definition of User Story, task description and Definition of Done/Acceptance criteria. The PO communicates with the devonfw core team lead about progression and high-level task status.

+
+
+

In brief: the Product Owner is responsible for the content of the task. The devonfw core team lead guards against overrun of the assigned task time estimate.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/Managing-Secrets.html b/docs/devonfw.github.io/1.0/devon4j.wiki/Managing-Secrets.html new file mode 100644 index 00000000..0664364d --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/Managing-Secrets.html @@ -0,0 +1,539 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This page contains content which is intended to be merged in the overall documentation at a later time.

+
+ +
+

==Introduction +Secrets are parts of an application’s configuration (typically Externalized Environment Configuration) that are specific in the following aspects:

+
+
+
    +
  • +

    They need to be protected from unauthorized access

    +
  • +
  • +

    They can be dynamic in that sense that values can change during the runtime of your application process, e.g. if a secret is an access token with an expiry time

    +
  • +
+
+
+

Protecting secrets is typically done by encryption and/or managing them by a service that performs access control. This creates a typical Chicken and Eggs problem: To get access to a secret, your application will need another secret. This can be a key that allows you to decrypt the secret and/or an access token that your application provides to a secrets management service.

+
+
+

If you pass e.g. a key for decryption via "JAVA_OPTS" to your application but have the key in plain text in a startup script, you may just end up with Security by Obscurity. If an attacker got full access to your environment it is just a question of time until your encrypted secrets are accessible.

+
+
+

Of course there is a way to avoid the Chicken and Eggs problem: Don’t store secrets in your application context but let a runtime "infrastructure" pass them to your application. +If you let the infrastructure manage your secrets, it can handle also the renewal of expiring secrets by sending your application a signal that causes your application to accept an updated value for the secret.

+
+
+

==Base Mechanisms +The following sections describe variants for this approach. As two examples for the "infrastructrue" we use Kubernetes and the Hashicorp tool stack ("Hashistack").

+
+
+

Passing Secrets via Environment Variables

+
+
+

In this approach the infrastructure directly passes the secret to your application via an environment variable.

+
+
+

This approach works best if you need only access to a very few secrets: +Passing a "master secret" to an application that is then used used to access the actual secrets is such a use case.

+
+
+

Kubernetes

+
+

When Kubernetes launches a Pod, it can pass the value of a secret maintained by the Kubernetes Secrets system as an environment variable. The drawback of this approach with Kubernetes is, that it stores the secrets in the Pod definition - so you have to rely on access control for the Pod definition.

+
+
+

See Secrets Concept description and in particular section Using Secrets as Environment Variables.

+
+
+
+

Hashistack (Nomad and Vault)

+
+

When Nomad is used to launch your application, it can pull a token from Vault and pass it via an environment variable named VAULT_TOKEN. The job definition allows a job task to specify the particular token that it requires from Vault. Nomad will automatically retrieve a Vault token for the task and handle token renewal for the task. See Nomad Job Specification and in detail the Vault Stanza. In this case the secret does not appear in the Job definition as it is stored by Vault.

+
+
+
+
+
+

Passing Secrets via a Specific File System

+
+
+

In this approach the "infrastructure" passes the secrets via a file system that is accessible to the application. The storage is temporary: Together with the termination of the application it also removes the file system. During the existence of the temporary file system, protection of your secrets depends on the level of access control that the infrastructure provides. If this is not sufficient for your protection needs, you may consider encryption as an additional means but then you are back at square one.

+
+
+

Kubernetes

+
+

Kubernetes supports this approach by mounting secrets that are mapped to a path as a volume in a Pod. See Using Secrets as Files from a Pod.

+
+
+
+

Hashistack (Nomad and Vault)

+
+

Nomad makes a file system available to tasks which contains a "secrets" directory. This directory is private to each task, not accessible via the "nomad fs" command or filesystem APIs and where possible backed by an in-memory filesystem. It can be used to store secret data that should not be visible outside the task.

+
+
+

Nomad stores a token that it pulls from Vault in the secrets directory.

+
+
+
+
+
+

Usage of a Secrets Management Service

+
+
+

If your application needs access to multiple secrets and you don’t want to store them as part of your application configuration (e.g. because you want to avoid encryption) you can use a Secrets Management Service that the infrastructure provides. Access to the API of these services is typically controlled using Access Tokens. This means that this approach needs to be combined with one of the basic secret passing mechanisms described above to provide the access token to your application.

+
+
+

Examples

+ +
+

==Practical Implementations

+
+
+
+
+
+

Encrypted Application Properties

+
+
+

TODO: This section duplicates content from ./guide-configuration#password-encryption and also slightly differs. We need to align our approach and avoid such redundancies.

+
+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. +The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control. +Instead let the "infrastructure" pass it to your application via an environment variable. +In order to support encrypted passwords in spring-boot application.properties all you need to do is to add jasypt-spring-boot as dependency in your pom.xml(please check for recent version):

+
+
+
+
<dependency>
+  <groupId>com.github.ulisesbocchio</groupId>
+  <artifactId>jasypt-spring-boot-starter</artifactId>
+  <version>1.17</version>
+</dependency>
+
+
+
+

This will smoothly integrate jasypt into your spring-boot application. Read this HOWTO to learn how to encrypt and decrypt passwords using jasypt. Here is a simple example output of an enctrypted password (of course you have to use strong passwords instead of secret and postgres - this is only an example):

+
+
+
+
----ARGUMENTS-------------------
+
+input: postgres
+password: secret
+
+----OUTPUT----------------------
+
+jd5ZREpBqxuN9ok0IhnXabgw7V3EoG2p
+
+
+
+

The master-password can be configured as "JAVA_OPTS" on your target environment via -Djasypt.encryptor.password=secret (of course you will replace secret with an expansion of the respective environment variable). +Now you are able to put encrypted passwords into your application.properties

+
+
+
+
spring.datasource.password=ENC(jd5ZREpBqxuN9ok0IhnXabgw7V3EoG2p)
+
+
+
+

To prevent jasypt to throw an exception in dev or test scenarios simply put this in your local config (src/main/config/application.properties and same for test, see above for details):

+
+
+
+
jasypt.encryptor.password=none
+
+
+
+
+
+

Spring Boot and Hashistack

+
+
+

Spring Cloud Vault provides support for externalized Spring configuration in a distributed system using Hashicorp Vault.

+
+
+

See the Quick Start section for details how to use it in your application.

+
+
+

Authentication

+
+

Vault requires an authentication mechanism to authorize client requests. Spring Cloud Vault Config supports multiple authentication mechanisms to authenticate applications with Vault - Token Authentication is the default mechanism.

+
+
+

The Spring Cloud Vault Config documentation provides examples like this to configure the authentication token in your bootstrap.yml file.

+
+
+
+
spring.cloud.vault:
+    token: 19aefa97-cccc-bbbb-aaaa-225940e63d76
+
+
+
+

If you use Nomad in combination with Vault, you will use instead the Vault token passing mechanism of Nomad described above.

+
+
+
+
spring.cloud.vault:
+    token: ${VAULT_TOKEN}
+
+
+
+

As an alternative you can consider using one of the advanced authentication methods of Vault: If you are using AWS you can use AWS-EC2 authentication that does not require first-deploying, or provisioning security-sensitive credentials.

+
+
+
+

Renewal of Secrets

+
+

With every secret, Vault creates a lease: metadata containing information such as a time duration, renewability, and more. Spring Cloud Vault maintains a lease lifecycle beyond the creation of login tokens and secrets. That said, login tokens and secrets associated with a lease are scheduled for renewal just before the lease expires until terminal expiry. +See section Lease lifecycle management of Spring Cloud Vault documentation for details.

+
+
+
+
+
+

Spring Boot and Kubernetes Secrets

+
+
+

The Spring Cloud Kubernetes project provides the +Secrets PropertySource feature which allows sharing secrets with containers via mounted volumes.

+
+
+

There is a blog of "Red Hat developers" that describes +Configuring Spring Boot on Kubernetes With Secrets. +It uses the Environment Variables / File System approaches described above.

+
+
+

It is Part-II of a article series where Part-I described how to use ConfigMaps in configuring a spring boot application on Kubernetes. +The announced Part-III seems not to be released yet. The author says that it will describe how to use the spring-cloud-kubernetes spring module in more detail.

+
+
+

A similar text can be found in a Red Hat documentation Integrate Spring Boot with Kubernetes.

+
+
+

At the same time multiple projects are working on an integration of Vault with Kubernetes. The most prominent of them is the collaboration of Google and Hashicorp: one of the goals is "Using HashiCorp Vault with Google Cloud and Kubernetes" - see the announcement on the Google Cloud Platform blog.

+
+
+
+
+

How the Hashistack solves the "Chicken and Eggs" Problem

+
+
+

To access a secret managed by Vault requires an access token. To obtain an access token you need another secret for authentication.

+
+
+

The approach to let Nomad pass a required token to the application, traces back to the question how Nomad gets access to these tokens. For such purposes Vault offers an "auth method" called AppRole. +Auth methods are the components in Vault that perform authentication and are responsible for assigning identity and a set of policies to a user. The AppRole auth method allows machines or apps to authenticate with Vault-defined roles. The role represents a set of policies that define to which secrets Nomad has access.

+
+
+

In a productive system Nomad will operate as a high available clustered service. The credentials required for a successful authentication of Nomad for its AppRole authentication with Vault are passed during the bootstrapping of the cluster. If the access control mechanisms of your platform to protect these bootstrap credentials don’t match your needs you may want to delegate the protection and provisioning to a human user.

+
+
+

This is related to the bootstrap process of Vault: Starting a productive Vault includes a workflow for unsealing the Vault. Unsealing is the process of constructing the key to decrypt the data, allowing access to the Vault. Instead of distributing this master key as a single key to an operator, Vault uses an algorithm known as Shamir’s Secret Sharing to split the key into shards. A certain threshold of shards (e.g. 3 out of 5) is required to reconstruct the master key.

+
+
+

The unseal process can be executed via Vault’s API. This process is stateful: each key can be provided by processes on multiple computers. In theory this means that the bootstrap process could be automated and still have enhanced security by storing each shard of the master key on a distinct machine. +In practice Hashicorp at the moment recommends a manual workflow for unsealing. The human users who keep the Vault master key shards will also keep credentials to log on, access an authentication token for Nomad and provide this for the bootstrapping of the Nomad cluster.

+
+
+

If really a fully automated cold boot of a Hashistack cluster is required, a possible workflow that meets also high security needs could look like this:

+
+
+
    +
  • +

    Store the shards of the Vault master key on different machines

    +
  • +
  • +

    Protect the shard with the access control mechanisms of the file system and allow access only to system users of system processes that perform the unseal process when the cluster machines boot. (Use encryption to protect the shards? Back to square one!)

    +
  • +
  • +

    Split the authentication token into shards using the same "Shamir’s Secret Sharing" approach and protect them the same way as the shards of the Vault master key

    +
  • +
  • +

    The distributed system processes that collaborate for the unsealing of the Vault as well collaborate to construct the first authentication token

    +
  • +
  • +

    Using this authentication token the bootstrap processes can provide credentials to the bootstrapping of the Nomad servers of the cluster that allow the Nomad servers to authenticate with their Vault AppRole.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/OWASP-Top-10-security-vulnerabilities.html b/docs/devonfw.github.io/1.0/devon4j.wiki/OWASP-Top-10-security-vulnerabilities.html new file mode 100644 index 00000000..4242b599 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/OWASP-Top-10-security-vulnerabilities.html @@ -0,0 +1,669 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This document compares the current devonfw recommendations and sample with the OWASP Top 10 security vulnerabilities.

+
+
+

==A1 Injection

+
+
+

Injection flaws, such as SQL, OS, and LDAP injection occur when untrusted data is sent to an interpreter as part of a command or query. The attacker’s hostile data can trick the interpreter into executing unintended commands or accessing data without proper authorization.

+
+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

CH

OWASP ASVS 2.0

devonfw

OK?

Comment

V5.10 L1

Verify that the runtime environment is not susceptible to SQL Injection, or that security controls prevent SQL Injection.

devonfw4J V1.0.0, 3.4.3.1 SQL-Injection: Prevents 100% injections in static SQLs, gives advises how to handle dynamic SQLs

yes

V5.11 L1

Verify that the runtime environment is not susceptible to LDAP Injection, or that security controls prevent LDAP Injection.

-

no

Spring Security with its ldap query builder could be already immune to this one. Example is missing.

V5.12 L1

Verify that the runtime environment is not susceptible to OS Command Injection, or that security controls prevent OS Command Injection.

-

no

We could probably handly this one quite easy using static code analysis (preventing the usage of the class Runtime?).

V5.14 L1

Verify that the runtime environment is not susceptible to XML Injections or that security controls prevents XML Injections.

-

no

This is primarily about the XPath injection. Could be handled with a good encoder (https://github.com/ESAPI/esapi-java)

+
+

==A2 Broken Authentication and Session Management

+
+
+

Application functions related to authentication and session management are often not implemented correctly, allowing attackers to compromise passwords, keys, or session tokens, or to exploit other implementation flaws to assume other users’ identities.

+
+
+

You may be vulnerable if:

+
+
+
    +
  1. +

    User authentication credentials aren’t protected when stored using hashing or encryption.

    +
  2. +
  3. +

    Credentials can be guessed or overwritten through weak account management functions (e.g., account creation, change password, recover password, weak session IDs).

    +
  4. +
  5. +

    Session IDs are exposed in the URL (e.g., URL rewriting).

    +
  6. +
  7. +

    Session IDs are vulnerable to session fixation attacks.

    +
  8. +
  9. +

    Session IDs don’t timeout, or user sessions or authentication tokens, particularly single sign-on (SSO) tokens, aren’t properly invalidated during logout.

    +
  10. +
  11. +

    Session IDs aren’t rotated after successful login.

    +
  12. +
  13. +

    Passwords, session IDs, and other credentials are sent over unencrypted connections.rypted connections. See A6.

    +
  14. +
+
+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

V2.1 L1

Verify all pages and resources require authentication except those specifically intended to be public (Principle of complete mediation).

encrypt all channels, use a central identity management with strong password-policy

yes

This point is handled well in the documentation.

V2.16 L1

Verify that credentials, and all other identity information handled by the application(s), do not traverse unencrypted or weakly encrypted links.

no

No TLS in the example application present. Need for TLS not stated in the documentation.

V2.17 L1

Verify that the forgotten password function and other recovery paths do not reveal the current password and that the new password is not sent in clear text to the user.

well…

One of the many points that shows, that OWASP Top 10 complaince is not only about secure framework. This one is more about possible business logic flaws. It might not really belong to be a part of the devonfw documentation.

V2.18 L1

Verify that username enumeration is not possible via login, password reset, or forgot account functionality

yes

Spring security does that automatically for us as long as we depend on him.

V3.1 L1

Verify that the framework’s default session management control implementation is used by the application.

yes

Spring security does that automatically for us as long as we depend on him.

V3.2 L1

Verify that sessions are invalidated when the user logs out.

yes

Spring security does that automatically for us as long as we depend on him.

V3.14 L1

Verify that authenticated session tokens using cookies sent via HTTP, are protected by the use of "HttpOnly".

yes

Nice secure default of the tomcat container.

V3.15 L1

Verify that authenticated session tokens using cookies are protected with the "secure" attribute and a strict transport security header (such as StrictTransport-Security: max-age=60000; includeSubDomains) are present.

no

No TLS = no scure flag. HSTS is another topic where good examples could be helpful.

V2.12 L2

Verify that all authentication decisions are logged. This should include requests with missing required information, needed for security investigations.

no

These things are a bit less common then the others, but they show that authentication and session management issues can go deep.

V2.20 L2

Verify that a resource governor is in place to protect against vertical (a single account tested against all possible passwords) and horizontal brute forcing (all accounts tested with the same password e.g. “Password1”). A correct credential entry should incur no delay. Both these governor mechanisms should be active simultaneously to protect against diagonal and distributed attacks.

no

V2.25 L2

Verify that the system can be configured to disallow the use of a configurable number of previous passwords.

no

+
+

==A3 Cross-Site Scripting (XSS)

+
+
+

XSS flaws occur whenever an application takes untrusted data and sends it to a web browser without proper validation or escaping. XSS allows attackers to execute scripts in the victim’s browser which can hijack user sessions, deface web sites, or redirect the user to malicious sites.

+
+ +++++++ + + + + + + + + + +

V5.16 L1

Verify that all untrusted data that are output to HTML (including HTML elements, HTML attributes, JavaScript data values, CSS blocks, and URI atributes) are properly escaped for the applicable context

-

no

AngularJS makes it hard for developers to make XXS mistakes. Still possibilities exist: https://code.google.com/p/mustache-security/wiki/AngularJS. JQuery can also lead to problems. The security we have is probably pretty good. Yet at least a list of dos and don’ts is missing.

+
+

==A4 Insecure Direct Object References

+
+
+

A direct object reference occurs when a developer exposes a reference to an internal implementation object, such as a file, directory, or database key. Without an access control check or other protection, attackers can manipulate these references to access unauthorized data.

+
+ +++++++ + + + + + + + + + +

V4.4 L1

Verify that direct object references are protected, such that only authorized objects or data are accessible to each user (for example, protect against direct object reference tampering).

-

no

The topic is not well covered in the documentation but still we will not have problems at this point. We usually have secure direct object references which are ok.

+
+

==A5 Security Misconfiguration

+
+
+

Good security requires having a secure configuration defined and deployed for the application, frameworks, application server, web server, database server, and platform. Secure settings should be defined, implemented, and maintained, as defaults are often insecure. Additionally, software should be kept up to date.

+
+ +++++++ + + + + + + + + + +

V19.1 L1 (v3.0)

All components should be up to date with proper security configuration(s) and version(s). This should include unneeded configurations and folders (sample applications).

Use devonfw application template and guides to avoid

No

Using some kind of application template is not enough. This is a hard feature for architects to deal with, because it’s more about ITSec, then AppSec. This point is about server hardening. Look at this to get a bigger picture: https://benchmarks.cisecurity.org/tools2/apache/CIS_Apache_Tomcat_Benchmark_v1.0.0.pdf

+
+

==A6 Sensitive Data Exposure

+
+
+

Many web applications do not properly protect sensitive data, such as credit cards, tax IDs, and authentication credentials. Attackers may steal or modify such weakly protected data to conduct credit card fraud, identity theft, or other crimes. Sensitive data deserves extra protection such as encryption at rest or in transit, as well as special precautions when exchanged with the browser.

+
+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

V2.16 L1

Verify that credentials, and all other identity information handled by the application(s), do not traverse unencrypted or weakly encrypted links.

-

No

The example application is not using TLS. The documentation does not describe the need for TLS. Spring Security should be configured to always redirect the connection to a TLS secured one.

V10.3 L1

Verify that TLS is used for all connections (including both external and backend connections) that are authenticated or that involve sensitive data or functions.

-

No

V2.21 L2

Verify that all authentication credentials for accessing services external to the application are encrypted and stored in a protected location (not in source code)

-

No

There is a lot of discussion going on between security officers and architects about this one. Still it is a common security requirement to find.

V2.13 L2

Verify that account passwords are salted using a salt that is unique to that account (e.g., internal user ID, account creation) and use bcrypt, scrypt or PBKDF2 before storing the password.

-

No

This is an elementary solution for local user authentication. Good code examples are necessary. Example application could handle this one aswell.

+
+

==A7 Missing Function Level Access Control

+
+
+

Most web applications verify function level access rights before making that functionality visible in the UI. However, applications need to perform the same access control checks on the server when each function is accessed. If requests are not verified, attackers will be able to forge requests in order to access functionality without proper authorization.

+
+ +++++++ + + + + + + + + + + + + + + + + + + + + + + + +

V4.1 L1

Verify that users can only access secured functions or services for which they possess specific authorization.

Ensure proper authorization for all use-cases, use @DenyAll als default to enforce

yes

V4.2 L1

Verify that users can only access secured URLs for which they possess specific authorization.

yes

V4.3 L1

Verify that users can only access secured data files for which they possess specific authorization.

no

I wouldn’t know how to handle this one based on the documentation and examples.

+
+

==A8 Cross-Site Request Forgery (CSRF)

+
+
+

A CSRF attack forces a logged-on victim’s browser to send a forged HTTP request, including the victim’s session cookie and any other automatically included authentication information, to a vulnerable web application. This allows the attacker to force the victim’s browser to generate requests the vulnerable application thinks are legitimate requests from the victim.

+
+ +++++++ + + + + + + + + + +

V4.16 L1

Verify that the application or framework generates strong random anti-CSRF tokens unique to the user as part of all high value transactions or accessing sensitive data, and that the application verifies the presence of this token with the proper value for the current user when processing these requests.

Short capitel 3.2.6. Beautiful implementation in the example application for SPA/RIA.

yes

Does it make sense to create another example for a non-SPA appliction or application that can not use JavaScript?

+
+

==A9 Using Components with Known Vulnerabilities

+
+
+

Components, such as libraries, frameworks, and other software modules, almost always run with full privileges. If a vulnerable component is exploited, such an attack can facilitate serious data loss or server takeover. Applications using components with known vulnerabilities may undermine application defenses and enable a range of possible attacks and impacts.

+
+ +++++++ + + + + + + + + + +

V19.1 L1 (v3.0)

All components should be up to date with proper security configuration(s) and version(s). This should include unneeded configurations and folders (sample applications).

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

no

Redirecting people to CSV lists does not solve the problem here. Automated solutions like integration with Victims or OWASP Dependency Check is needed.

+
+

==A10 Unvalidated Redirects and Forwards

+
+
+

Web applications frequently redirect and forward users to other pages and websites, and use untrusted data to determine the destination pages. Without proper validation, attackers can redirect victims to phishing or malware sites, or use forwards to access unauthorized pages.

+
+ +++++++ + + + + + + + + + +

V16.1

Verify that URL redirects and forwards do not include unvalidated data.

"devonfw proposes to use richclients (SPA/RIA). We only use redirects for login in a safe way"

yes

We don’t usually need this kind of functionality.

+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/Spring-native-vs-Quarkus.html b/docs/devonfw.github.io/1.0/devon4j.wiki/Spring-native-vs-Quarkus.html new file mode 100644 index 00000000..8f782b16 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/Spring-native-vs-Quarkus.html @@ -0,0 +1,496 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Spring Native vs Quarkus

+
+
+

Nowadays, it is very common to write an application and deploy it to a cloud. +Serverless computing and Function-as-a-Service (FaaS) have become +very popular. +While many challenges arise when deploying a Java application into the latest cloud environment, the biggest challenges facing developers are memory footprint and the startup time required +for the Java application, as more of these keeps the host’s costs high in public clouds and Kubernetes clusters. With the introduction of frameworks like micronaut and microprofile, Java processes are getting faster and more lightweight. In a similar context, Spring has introduced +Spring Native which aims to solve the big memory footprint of Spring and its slow startup time to potentially rival the new framework called Quarkus, by Red Hat. This document briefly discusses both of these two frameworks and their potential suitability with devonfw.

+
+
+

Quarkus

+
+
+

Quarkus is a full-stack, Kubernetes-native Java framework made for JVMs. With its container-first-philosophy and its native compilation with GraalVM, Quarkus optimizes Java for containers with low memory usage and fast startup times.

+
+
+

Quarkus achieves this in the following ways:

+
+
+
    +
  • +

    First Class Support for GraalVM

    +
  • +
  • +

    Build Time Metadata Processing: As much processing as possible is +done at build time, so your application will only contain the classes +that are actually needed at runtime. This results in less memory usage, +and also faster startup time, as all metadata processing has already been +done.

    +
  • +
  • +

    Reduction in Reflection Usage: Quarkus tries to avoid reflection as much as possible in order to reduce startup time and memory usage.

    +
  • +
  • +

    Native Image Pre Boot: When running in a native image, Quarkus +pre-boots as much of the framework as possible during the native image +build process. This means that the resulting native image has already +run most of the startup code and serialized the result into the +executable, resulting in an even faster startup-time.

    +
  • +
+
+
+

This gives Quarkus the potential for a great platform for serverless cloud and Kubernetes environments. For more information about Quarkus and its support for devonfw please refer to the Quarkus introduction guide.

+
+
+
+
+

Spring Native

+
+
+
+
+

The current version of Spring Native 0.10.5 is designed to be used with Spring Boot 2.5.6

+
+
+
+
+

Like Quarkus, Spring Native provides support for compiling Spring applications to native executables using the GraalVM native-image compiler deisgned to be packaged in lightweight containers.

+
+
+

Spring Native is composed of the following modules:

+
+
+
    +
  • +

    spring-native: runtime dependency required for running Spring Native, provides also Native hints API.

    +
  • +
  • +

    spring-native-configuration: configuration hints for Spring classes used by Spring AOT plugins, including various Spring Boot auto-configurations.

    +
  • +
  • +

    spring-native-docs: reference guide, in adoc format.

    +
  • +
  • +

    spring-native-tools: tools used for reviewing image building configuration and output.

    +
  • +
  • +

    spring-aot: AOT transformation infrastructure common to Maven and Gradle plugins.

    +
  • +
  • +

    spring-aot-test: Test-specific AOT transformation infrastructure.

    +
  • +
  • +

    spring-aot-gradle-plugin: Gradle plugin that invokes AOT transformations.

    +
  • +
  • +

    spring-aot-maven-plugin: Maven plugin that invokes AOT transformations.

    +
  • +
  • +

    samples: contains various samples that demonstrate features usage and are used as integration tests.

    +
  • +
+
+
+
+
+

Native compilation with GraalVM

+
+
+

Quarkus and Spring Native both use GraalVM for native compilation. Using a native image provides some key advantages, such as instant startup, instant peak performance, and reduced memory consumption. However, there are also some drawbacks: Creating a native image is a heavy process that is slower than a regular application. A native image also has fewer runtime optimizations after its warmup. Furthermore, it is less mature than the JVM and comes with some different behaviors.

+
+
+

Key characteristics:

+
+
+
    +
  • +

    Static analysis of the application from the main entry point is +performed at build time.

    +
  • +
  • +

    Unused parts are removed at build time.

    +
  • +
  • +

    Configuration required for reflection, resources, and dynamic proxies.

    +
  • +
  • +

    Classpath is fixed at build time.

    +
  • +
  • +

    No class lazy loading: everything shipped in the executables will be loaded in memory on startup.

    +
  • +
  • +

    Some code will run at build time.

    +
  • +
+
+
+

There are limitations around some aspects of Java applications that are not fully supported

+
+
+
+
+

Build time and start time for apps

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
Frameworkbuild timestart time

Spring Native

19.615s

2.913s

Quarkus Native executable

52.818s

0.802s

+
+
+
+

Memory footprints

+
+ ++++ + + + + + + + + + + + + + + + + +
Frameworkmemory footprint

Spring Native

109 MB

Quarkus Native executable

75 MB

+
+
+
+

Considering devonfw best practices

+
+
+

As of now, devonfw actively supports Spring but not Spring Native. +Although Quarkus has been released to a stable release in early 2021, it has been already used in multiple big projects successfully showing its potential to implement cloud native services with low resource consumption matching the needs of scalability and resilience in cloud native environments. +With major stakeholders behind the open source community like Red Hat, its development and growth from its kickoff to the current state is very impressive and really shows the market needs and focus. +Another big advantage of Quarkus is that it started on a green field and therefore did not need to circumvent main pillars of the spring framework like reflection, being able to take clean and up-to-date design decisions not needing to cope with legacy issues. +Nonetheless, there is a experimental support also for some spring libraries already available in Quarkus, which make switching from spring to Quarkus much more easier if needed. +We also provide a guide +for Spring developers who want to adopt or try Quarkus for their +(next) projects as it really has some gamechanging advantages over +Spring.

+
+
+
+
+

General recommendations and conclusion

+
+
+

Quarkus and Spring Native both have their own use cases. Under the consideration of the limitations of GraalVM to be used for native images built by Quarkus and Spring Native, there is a strong recommendation towards Quarkus from devonfw. +One essential differentiation has to be made on the decision for native or against native applications - the foreseen performance optimization of the JIT compiler of the JVM, which is not available anymore in a native image deployment. +For sure, both component frameworks will also run on a JVM getting advantage again from JIT compilation, but depending on the overall landscape then, it is recommended to stay with the knowledge of the available teams, e.g. continue making use of devon4j based on spring or even if already in that state also here make use of Quarkus on JVM.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/alternative-microservice-netflix.html b/docs/devonfw.github.io/1.0/devon4j.wiki/alternative-microservice-netflix.html new file mode 100644 index 00000000..c5cb8499 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/alternative-microservice-netflix.html @@ -0,0 +1,1256 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Microservices based on Netflix-Tools

+
+
+

Devonfw microservices approach is based on Spring Cloud Netflix, that provides all the main components for microservices integrated within Spring Boot context.

+
+
+

In the following schema we can see an overview of the structure of components in a Devon application based on the Spring Cloud Netflix solution for microservices.

+
+
+
+microservices 01 +
+
+
+

Let’s explain each component

+
+
+

Service Discovery - Eureka

+
+

Eureka is a server to register and locate the microservices. The main function for Eureka is to register the different instances of the microservices, its location, its state and other metadata.

+
+
+

It works in a simple way, during the start of each microservice, this communicates with the Eureka server to notify its availability and to send the metadata. The microservice will continue to notify its status to the Eureka server every 30 seconds (default time on Eureka server properties). This value can be changed in the configuration of the component.

+
+
+

If after 3 periods, Eureka does not receive notification of any of the microservices, it will be considered as unavailable and will eliminate its registration.

+
+
+

In addition, it serves as a catalog to locate a specific microservice when routing a request to it.

+
+
+
+

Circuit Breaker - Hystrix

+
+

Hystrix is a library that implements the Circuit Breaker pattern. Its main functionality is to improve the reliability of the system, isolating the entry points of the microservices, preventing the cascading failure from the lower levels of the application all the way up to the user.

+
+
+

In addition to that, it allows developers to provide a fallback in case of error. Hystrix manages the requests to a service, and in case that the microservice doesn’t response, allows to implement an alternative to the request.

+
+
+
+

Client Side Load Balancer - Ribbon

+
+

Ribbon is a library designed as client side load balancer. Its main feature is to integrate with Eureka to discover the instances of the microservices and their metadata. In that way the Ribbon is able to calculate which of the available instances of a microservice is the most appropriate for the client, when facing a request.

+
+
+
+

REST Client - Feign

+
+

Feign is a REST client to make calls to other microservices. The strength of Feign is that it integrates seamlessly with Ribbon and Hystrix, and its implementation is through annotations, which greatly facilitates this task to the developer.

+
+
+

Using annotations, Spring-cloud generates, automatically, a fully configured REST client.

+
+
+
+

Router and Filter - Zuul

+
+

Zuul is the entry point of the apps based on Spring-cloud microservices. It allows dynamic routing, load balancing, monitoring and securing of requests. By default Zuul uses Ribbon to locate, through Eureka, the instances of the microservice that it wants to invoke and sends the requests within a Hystrix Command, taking advantage of its functionality.

+
+
+
+

How to create microservices in devonfw?

+
+

Follow the instructions in the link below to set up devonfw distribution

+
+ +
+

Next, install devonfw modules and dependencies

+
+
+

Step 1: Open the console

+
+

Open the devonfw console by executing the batch file console.bat from the devonfw distribution. It is a pre-configured console which automatically uses the software and configuration provided by the devonfw distribution.

+
+
+
+

Step 2: Change the directory

+
+

Run the following command in the console to change directory to devonfw module

+
+
+
+
cd workspaces\examples\devonfw
+
+
+
+
+

Step 3: Install

+
+

To install modules and dependencies, you need to execute the following command:

+
+
+
+
mvn --projects bom,modules/microservices/microservices,modules/microservices/microservice-archetype,modules/microservices/microservice-infra-archetype  --also-make install
+
+
+
+ + + + + +
+ + +In case installation fails, try running the command again as it is often due to hitch in the network. +
+
+
+

Now, you can use the Microservices archetype given below to create Microservices.

+
+
+

In order to generate microservices in a devonfw project we can choose between two approaches:

+
+
+
    +
  • +

    generate a new devon4j application and implement one by one all the needed components (based on Spring Cloud).

    +
  • +
  • +

    generate a new devon4j application through the custom microservice archetype included in the devonfw distributions.

    +
  • +
+
+
+

That second approach, using the devonfw microservices archetype, will generate automatically all the basic structure and components to start developing the microservices based application.

+
+
+
+
+

devonfw archetypes

+
+

To simplify starting with projects based on microservices, devonfw includes two archetypes to generate pre-configured projects that include all the basic components of the Spring Cloud implementation.

+
+
+
    +
  • +

    archetypes-microservices-infra: generates a project with the needed infrastructure services to manage microservices. Includes the Eureka service, Zuul service and the authentication service.

    +
  • +
  • +

    archetypes-microservices: generates a simple project pre-configured to work as a microservice.

    +
  • +
+
+
+
+

Create New Microservices infrastructure application

+
+

To generate a new microservices infrastructure application through the devonfw archetype you only need to open a devonfw console (console.bat script) and follow the same steps described in getting started creating new devonfw devon4j application. But, instead of using the standard archetype, we must provide the special infrastructure archetype archetype-microservice-infra. Remember to provide your own values for DgroupId, DartifactId, Dversion and Dpackage parameters, Also provide the -DarchetypeVersion with latest value:

+
+
+
+
mvn -DarchetypeVersion=2.4.0 -DarchetypeGroupId=com.devonfw.microservices -DarchetypeArtifactId=microservices-infra-archetype archetype:generate -DgroupId=com.capgemini -DartifactId=sampleinfra -Dversion=0.1-SNAPSHOT -Dpackage=com.capgemini.sampleinfra
+
+
+
+

Once the Maven command has finished an application with the following modules should be created:

+
+
+
+microservices 02 +
+
+
+

service-eureka module

+
+

This module contains the needed classes and configuration to start a Eureka server.

+
+
+

This service runs by default on port 8761 although ti can be changed in the application.properties file of the project.

+
+
+
+

service-zuul module

+
+

This module contains all the needed classes and configuration to start a Zuul server, that will be in charge of the routing and filter of the requests.

+
+
+

This service by default runs on port 8081 but, as we already mentioned, it can be changed through the file application.properties of the project.

+
+
+
+

service-auth module

+
+

This module runs an authentication and authorization mock microservice that allows to generate a security token to make calls to the rest of microservices. This module is only providing a basic structure, the security measures must be implemented fitting the requirements of each project (authentication through DB, SSO, LDAP, OAuth,…​)

+
+
+

This service runs by default on port 9999, although, as in previous services, it can be edited in the application.properties file.

+
+
+
+
+

Create New Microservices Application

+
+

To generate a new microservice project through the devonfw archetype, as in previous archetype example, you can follow the instructions explained in getting started creating new devonfw devon4j application. But, instead of using the standard archetype, we must provide the special microservices archetype archetype-microservices. Open a devonfw console (console.bat script) and launch a Maven command like the following (provide your own values for DgroupId, DartifactId, Dversion and Dpackage parameters, also provide the -DarchetypeVersion with latest value):

+
+
+
+
mvn -DarchetypeVersion=2.4.0 -DarchetypeGroupId=com.devonfw.microservices -DarchetypeArtifactId=microservices-archetype archetype:generate -DgroupId=com.capgemini -DartifactId=sampleapp1 -Dversion=0.1-SNAPSHOT -Dpackage=com.capgemini.sampleapp1
+
+
+
+

That command generates a simple application containing the source code for the microservice. By default, the pom.xml includes the devon-microservices module, that contains the security configuration, jwt interceptors, Hystrix, Ribbon and FeignClient configuration and some properties common to all microservices.

+
+
+

The created microservice runs by default on port 9001 and has the context-path with the same name than the project. This parameters can be changed through the 'application.properties' file of the project.

+
+
+
+

How to use microservices in devonfw

+
+

In the following sections we are going to provide some patterns to manage microservices in devonfw using the archetype, alongside the options that each of the available modules offer.

+
+
+

Eureka service

+
+

We are going to review the general options for the Eureka service. If you are interested in getting more details you can visit the official site for Spring Cloud Eureka clients.

+
+
+

To create an Eureka server you only need to create a new Spring Boot application and add the @EnableEurekaServer to the main class.

+
+
+ + + + + +
+ + +
+

The provided archetype archetype-microservices-infra already provides that annotated class.

+
+
+
+
+
+
@Configuration
+@EnableEurekaServer
+@EnableAutoConfiguration
+@SpringBootApplication
+public class EurekaBootApp {
+
+  public static void main(String[] args) {
+
+    new SpringApplicationBuilder(EurekaBootApp.class).web(true).run(args);
+  }
+}
+
+
+
+

The basic properties that must be configured for Eureka server are:

+
+
+
    +
  • +

    port: in which port the service will run. The default port is the 8761 and you have to keep in mind that the connection to this port is specially critical as all the microservices must be able to connect to this host:port. Remember that Eureka generates and manages the microservices catalog, so it`s crucial to allow the microservices to register in this component.

    +
  • +
  • +

    url: which URL manages as area.

    +
  • +
+
+
+
+
eureka.instance.hostname=localhost
+eureka.instance.port=8761
+
+server.port=${eureka.instance.port}
+
+eureka.client.serviceUrl.defaultZone=http://${eureka.instance.hostname}:${eureka.instance.port}/eureka/
+
+
+
+

The way to connect a microservice to Eureka server is really simple. You only will need to specify the host:port where the server is located and annotate the Spring Boot class with @EnableMicroservices annotation.

+
+
+ + + + + +
+ + +
+

Instead of using that @EnableMicroservices annotation, you can use the equivalent Spring annotations @EnableDiscoveryClient or @EnableEurekaClient.

+
+
+
+
+
+
@Configuration
+@EnableMicroservices
+@SpringBootApplication
+public class MicroserviceBootApp {
+  public static void main(String[] args) {
+
+    SpringApplication.run(MicroserviceBootApp.class, args);
+  }
+}
+
+
+
+
+
eureka.instance.hostname=localhost
+eureka.instance.port=8761
+
+eureka.client.serviceUrl.defaultZone=http://${eureka.instance.hostname}:${eureka.instance.port}/eureka/
+
+
+
+

With this the application will register automatically in Eureka and will be validated each 30 seconds. This value can be changed editing the property eureka.instance.leaseRenewalIntervalInSeconds in application.properties file. It must be taken into account that each Eureka client will maintain a cache of Eureka records to avoid calling the service every time it is necessary to access another microservice. This cache is reloaded every 30 seconds, this value can also be edited through property eureka.client.registryFetchIntervalSeconds in application.properties file.

+
+
+
+

Zuul service

+
+

We are going to show an overview to the options of the Zuul service, if you want to know more details about this particular service visit the official site of Spring Cloud.

+
+
+

Zuul is the component in charge for router and filtering the requests to the microservices system. It works as a gateway that, through a rule engine, redirects the requests to the suitable microservice. In addition, it can be used as a security filter as it can implement PRE-Filters and POST-Filters.

+
+
+

To create a basic Zuul server you only need to create a new Spring Boot application and add the @EnableZuulProxy annotation.

+
+
+
+
@EnableAutoConfiguration
+@EnableEurekaClient
+@EnableZuulProxy
+@SpringBootApplication
+public class ZuulBootApp {
+  public static void main(String[] args) {
+
+    SpringApplication.run(ZuulBootApp.class, args);
+  }
+}
+
+
+
+

To allow Zuul to redirect the requests we need to connect Zuul with the previously created Eureka service, to allow him to register and access to the catalog of microservices created by Eureka.

+
+
+

Also, if we are going to use the Zuul service from a web browser, we must configure the CORS filter to allow connections from any source. This is really easy to implement by adding the following Java Bean to our ZuulBootApp class:

+
+
+
+
@Bean
+public CorsFilter corsFilter() {
+    final UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
+    final CorsConfiguration config = new CorsConfiguration();
+    config.setAllowCredentials(true);
+    config.addAllowedOrigin("*");
+    config.addAllowedHeader("*");
+    config.addAllowedMethod("OPTIONS");
+    config.addAllowedMethod("HEAD");
+    config.addAllowedMethod("GET");
+    config.addAllowedMethod("PUT");
+    config.addAllowedMethod("POST");
+    config.addAllowedMethod("DELETE");
+    config.addAllowedMethod("PATCH");
+    source.registerCorsConfiguration("/**", config);
+    return new CorsFilter(source);
+}
+
+
+
+

To configure the Zuul service we need to define a series of properties that we will describe below:

+
+
+
+
server.port=8081
+spring.application.name=zuulserver
+
+eureka.instance.hostname=localhost
+eureka.instance.port=8761
+eureka.client.serviceUrl.defaultZone=http://${eureka.instance.hostname}:${eureka.instance.port}/eureka/
+
+
+microservices.context-path=/demo
+
+zuul.routes.security.path=${microservices.context-path}/services/rest/security/**
+zuul.routes.security.serviceId=AUTH
+zuul.routes.security.stripPrefix=false
+
+zuul.routes.login.path=${microservices.context-path}/services/rest/login
+zuul.routes.login.serviceId=AUTH
+zuul.routes.login.stripPrefix=false
+
+
+zuul.ignoredServices='*'
+zuul.sensitive-headers=
+
+ribbon.eureka.enabled=true
+hystrix.command.default.execution.timeout.enabled=false
+
+
+
+
    +
  • +

    server.port: Is the port where the Zuul service is listening.

    +
  • +
  • +

    spring.application.name: The name of the service the will be sent to Eureka.

    +
  • +
  • +

    eureka.*: The properties for the register of the Eureka client.

    +
  • +
  • +

    zuul.routes.XXXXX: The configuration of a concrete route.

    +
  • +
  • +

    zuul.routes.XXXXX.path: The path used for a redirection.

    +
  • +
  • +

    zuul.routes.XXXXX.serviceId: ID of the service where the request will be redirected. It must match the property spring.application.name in the microservice.

    +
  • +
  • +

    zuul.routes.XXXXX.stripPrefix: by default set to false. With this property we configure if the part of the route that has matched the request must be cutted out. i.e., if the path is /sample/services/rest/foomanagement/∗∗ and the property is set to true it will redirect to the microservice but it will only send the path **, the root /sample/services/rest/foomanagement/ will be removed.

    +
  • +
  • +

    zuul.ignoredServices: Configures which services without result in the routes, must be ignored.

    +
  • +
  • +

    zuul.sensitive-headers: Configures which headers must be ignored. This property must be set to empty, otherwise Zuul will ignore security authorization headers and the json web token will not work.

    +
  • +
  • +

    ribbon.eureka.enabled: Configures if the Ribbon should be used to route the requests.

    +
  • +
  • +

    hystrix.command.default.execution.timeout.enabled: Enables or disables the timeout parameter to consider a microservices as unavailable. By default the value for this property is 1 second. Any request that takes more than this will be consider failed. By default in the archetype this property is disabled.

    +
  • +
+
+
+

Having an Eureka client activated, the Zuul service will refresh its content every 30 seconds, so a just registered service may still have not been cached in Zuul. On the contrary, if a service is unavailable, 3 cycles of 30 seconds must pass before Eureka sets its register as dead, and other 30 seconds for Zuul to refresh its cache.

+
+
+
+

Security, Authentication and authorization

+
+

The most commonly used authentication in micro-service environments is authentication based on json web tokens, since the server does not need to store any type of user information (stateless) and therefore favors the scalability of the microservices.

+
+
+ + + + + +
+ + +
+

The service-auth module is useful only if the authentication and authorization needs to be done by a remote service (e.g. to have a common auth. service to be used by several microservices).

+
+
+

Otherwise, the autentication and authorization can happen in the main application, that will perform the authentication and will generate the JWT.

+
+
+
+
+
Security in the monolith application
+
+

In this case, the main microservice or application will perform the authentication and generate the JWT, without using service-auth.

+
+
+

It works as follows:

+
+
+
    +
  • +

    The user is authenticated in our application, either through a user / password access, or through a third provider.

    +
  • +
  • +

    This authentication request is launched against the Zuul server which will redirect it to an instance of the microservice.

    +
  • +
  • +

    The microservice will check the user, retrieve their roles and metadata and generate two tokens: one with user access information and another needed to refresh the access token. This information will be returned to the client.

    +
  • +
  • +

    The client is now able to call the microservice, adding the authorization token to the header of the request.

    +
  • +
+
+
+
+microservices 07 +
+
+
+
+
Security in external service (service-auth)
+
+

It works as follows:

+
+
+
    +
  • +

    The user is authenticated in our application, either through a user / password access, or through a third provider.

    +
  • +
  • +

    This authentication request is launched against the Zuul server which will redirect it to an instance of the Auth microservice.

    +
  • +
  • +

    The Auth microservice will check the user, retrieve their roles and metadata and generate two tokens: one with user access information and another needed to refresh the access token. This information will be returned to the client.

    +
  • +
+
+
+
+microservices 03 +
+
+
+

The service-auth service is already prepared to listen to the /login path and generate the two mentioned tokens. To do so we can use the JsonWebTokenUtility class that is implemented in devonfw

+
+
+
+
      UserDetailsJsonWebTokenAbstract clientTo = new UserDetailsJsonWebTokenTo();
+      clientTo.setId(1L);
+      clientTo.setUsername("demo");
+      clientTo.setRoles(new ArrayList<>(Arrays.asList("DEMO")));
+      clientTo.setExpirationDate(buildExpirationDate(this.expirationTime * 60 * 1000L));
+
+      return new ResponseEntity<>(new JwtHeaderTo(this.jsonWebTokenUtility.createJsonWebTokenAccess(clientTo),
+          this.jsonWebTokenUtility.createJsonWebTokenRefresh(clientTo),
+          this.expirationTime * 60 * 1000L), HttpStatus.OK);
+
+
+
+ + + + + +
+ + +
+

In our example you can make a POST request to:

+
+
+

http://localhost:8081/service-auth/services/rest/login
+     HEADER     Content-Type : application/json
+     BODY        { "j_username" : "xxx", "j_password" : "xxx"}

+
+
+
+
+

This will generate a response like the following

+
+
+
+
{
+  "accessToken": "eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiJkZW1vIiwiZmlyc3ROYW1lIjoiZGVtbyIsImxhc3ROYW1lIjoiZGVtbyIsImV4cCI6MTQ4Nzg3NTAyMSwicm9sZXMiOlsiREVNTyJdfQ.aEdJWEpyvRlO8nF_rpSMSM7NXjRIyeJF425HRt8imCTsq4iGiWbmi1FFZ6pydMwKjd-Uw1-ZGf2WF58qjWc4xg",
+  "refreshToken": "eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiJkZW1vIiwiZmlyc3ROYW1lIjoiZGVtbyIsImxhc3ROYW1lIjoiZGVtbyIsImV4cCI6MTQ4Nzg3NTAyMSwicm9sZXMiOlsiUkVGUkVTSF9KV1QiXX0.YtK8Bh07O-h1GTsyTK36YHxkGniyiTlxnazZXi8tT-RtUxxW8We8cdiYJn6tw0RoFkOyr1F5EzvkGyU0HNoLyw",
+  "expirationTime": 900000,
+  "accessHeaderName": "Authorization",
+  "refreshHeaderName": "Authorization-Refresh"
+}
+
+
+
+

The client now should store, in the header defined in accessHeaderName, the token included as accessToken.

+
+
+ + + + + +
+ + +
+

When using service-auth (or any other external authorization service), we must secure not only +the communication between the Client and Zuul, but also between Zuul and the service-auth.

+
+
+

There is very sensitive information being sent (username and password) between the different services that +anyone could read if the channel is not properly secured.

+
+
+
+
+

When configuring the service-auth module is very important to have into account the following aspects:

+
+
+
    +
  • +

    The expiration date of the token can be configured in the properties file with the property jwt.expirationTime (will appear in minutes).

    +
  • +
  • +

    The key for the token generation can be configured also in the properties file using the property jwt.encodedKey which will have a Base64 encoded value.

    +
  • +
  • +

    The roles inserted in the token should be the list of the access roles of the user. Doing this we avoid that each microservice has to look for the roles that belong to a profile.

    +
  • +
  • +

    If you want to use a specific UserDetails for the project, with new fields, you must extend the behavior as explained in here.

    +
  • +
+
+
+

From now on, the client will be able to make calls to the microservices, sending the access token in the header of the request.

+
+
+
+microservices 04 +
+
+
+

Once the request reaches the microservice, the app must validate the token and register the user in the security context. These operations will be automatic as long as the microservice has enabled the security inherited from the JsonWebTokenSecurityConfig class. This is done using the following code:

+
+
+
+
@Configuration
+@EnableWebSecurity
+public class WebSecurityConfig extends JsonWebTokenSecurityConfig {
+
+  @Override
+  public JsonWebTokenUtility getJsonWebTokenUtility() {
+
+    return new JsonWebTokenUtility();
+  }
+
+  @Override
+  protected void setupAuthorization(HttpSecurity http) throws Exception {
+
+    http.authorizeRequests()
+        // authenticate all other requests
+        .anyRequest().authenticated();
+  }
+
+}
+
+
+
+

In addition, devonfw has already implemented the needed interceptors and filters to resend the security header each time that a microservice calls other microservice of the ecosystem.

+
+
+

When validating the token, it is also checked its expiration date, so it is highly recommended that the client refresh from time to time the token, in order to update its expiration date. This is done by launching a request to /refresh_jwt within the service-auth module and sending both the access token and the refresh token in the header.

+
+
+
+microservices 05 +
+
+
+

If for any reason an attempt is made to access a business operation without having a valid token, or without sufficient role level permission to execute that operation, the microservice response will be Forbidden.

+
+
+
+microservices 06 +
+
+
+
+
+
+

How to modify the UserDetails information

+
+

In order to modify the UserDetails information we will need to accomplish two steps: modify the authentication service to generate the authentication token with the custom attributes embedded, and modify the pre-authentication filter of the microservices to convert the token into an Object with the custom attributes available.

+
+
+

Modify the authentication service to generate a new token

+
+

We must modify the service-auth that is in charge of logging the user and generate the security token.

+
+
+

The first thing to do is to create a UserDetails class that contains the required attributes and custom attributes. In the code sample we will call this class UserDetailsJsonWebTokenCustomTo, and must either implement the generic UserDetailsJsonWebTokenAbstract interface or extend it from the current UserDetailsJsonWebTokenTo class, since the services are prepared to work with it. In the example, we will add two new attributes firstName and lastName.

+
+
+
+
public class UserDetailsJsonWebTokenCustomTo extends UserDetailsJsonWebTokenTo {
+
+  private String firstName;
+  private String lastName;
+
+  public String getFirstName() {
+    return this.firstName;
+  }
+
+  public String getLastName() {
+    return this.lastName;
+  }
+
+  public void setFirstName(String firstName) {
+    this.firstName = firstName;
+  }
+
+  public void setLastName(String lastName) {
+    this.lastName = lastName;
+  }
+}
+
+
+
+

In case that the UserDetailsJsonWebTokenAbstract interface is implemented, in addition to the new attributes the rest of the interface must be implemented.

+
+
+

The next step would be to override the component that performs the conversions Token→UserDetails and UserDetails→Token. This component is the JsonWebTokenUtility, so you should create a new class that extends from this, in the example we will call it JsonWebTokenUtilityCustom. In this new class, you must overwrite the only two methods that are allowed to perform the conversions, to add writing and reading operations for the new custom attributes.

+
+
+
+
public class JsonWebTokenUtilityCustom extends JsonWebTokenUtility {
+
+  @Override
+  protected UserDetailsJsonWebTokenAbstract addCustomPropertiesClaimsToUserDetails(Claims claims) {
+
+    UserDetailsJsonWebTokenCustomTo userDetails = new UserDetailsJsonWebTokenCustomTo();
+
+    userDetails.setFirstName(claims.get("firstName", String.class));
+    userDetails.setLastName(claims.get("lastName", String.class));
+
+    return userDetails;
+  }
+
+  @Override
+  protected void addCustomPropertiesUserDetailsToJwt(UserDetailsJsonWebTokenAbstract authTokenDetailsDTO, JwtBuilder jBuilder) {
+
+    UserDetailsJsonWebTokenCustomTo userDetails = (UserDetailsJsonWebTokenCustomTo) authTokenDetailsDTO;
+
+    jBuilder.claim("firtName", userDetails.getFirstName());
+    jBuilder.claim("lastName", userDetails.getLastName());
+  }
+}
+
+
+
+

Now you should enable that new converter to replace the default one. In the WebSecurityConfig class you must change the related @Bean to start using this new class

+
+
+
+
@Configuration
+@EnableWebSecurity
+public class WebSecurityConfig extends WebSecurityConfigurerAdapter {
+
+...
+
+  @Bean
+  public JsonWebTokenUtility getJsonWebTokenUtility() {
+    return new JsonWebTokenUtilityCustom();
+  }
+
+...
+
+}
+
+
+
+

Finally, in the login process the new attributes should be filled in when creating the user. In our example in the class SecuritymanagementRestServiceImpl.

+
+
+
+
      UserDetailsJsonWebTokenCustomTo clientTo = new UserDetailsJsonWebTokenCustomTo();
+      clientTo.setId(1L);
+      clientTo.setUsername("demo");
+      clientTo.setRoles(new ArrayList<>(Arrays.asList("DEMO")));
+      clientTo.setExpirationDate(buildExpirationDate(this.expirationTime * 60 * 1000L));
+
+      clientTo.setFirstName("firstName");
+      clientTo.setLastName("lastName");
+
+
+      return new ResponseEntity<>(new JwtHeaderTo(this.jsonWebTokenUtility.createJsonWebTokenAccess(clientTo),
+          this.jsonWebTokenUtility.createJsonWebTokenRefresh(clientTo), //
+          this.expirationTime * 60 * 1000L), HttpStatus.OK);
+
+
+
+
+

Modify the pre-authentication filter to read the new token

+
+

Once a token with custom attributes has been obtained, the steps to read it and put it in the security context are very simple. The changes shown in this point should be reproduced in those microservices where you want to use the new custom attributes. The steps to follow are those:

+
+
+
    +
  • +

    Create a UserDetailsJsonWebTokenCustomTo class that contains the new attributes, as was done in the previous section. The ideal would be to reuse the same class.

    +
  • +
  • +

    Create a JsonWebTokenUtilityCustom class that extends the implementation of the token generator, just as it was done in the previous section. Again, the ideal would be to reuse the same class.

    +
  • +
  • +

    Configure the creation of this new @Bean in the WebSecurityConfig class just like in the previous section.

    +
  • +
+
+
+

With these three steps you can use the new security object with the custom attributes. One way to use it could be as follows:

+
+
+
+
   UserDetailsJsonWebToken principal = (UserDetailsJsonWebToken) SecurityContextHolder.getContext().getAuthentication().getPrincipal();
+
+   UserDetailsJsonWebTokenCustomTo userDetails = (UserDetailsJsonWebTokenCustomTo) principal.getUserDetailsJsonWebTokenAbstract();
+
+   userDetails.getFirstName();
+
+
+
+
+
+

How to start with a microservice

+
+

Once the microservice has been created through its archetype, you need to have a series of points in mind to configure it correctly:

+
+
+
    +
  • +

    The microservice must have the microservices starter in its pom.xml configuration to be able to use the interceptors and the generic configuration.

    +
  • +
+
+
+
+
<dependency>
+      <groupId>com.devonfw.starter</groupId>
+      <artifactId>devonfw-microservices-starter</artifactId>
+      <version>${devonfw.version}</version>
+</dependency>
+
+
+
+
    +
  • +

    It should be annotated in its initial class with @EnableMicroservices, this will activate the annotations for Eureka client, CircuitBreaker and the client Feign. All of this is configured in the properties file.

    +
  • +
  • +

    This is a bootified application so in the pom.xml file you will have to define which one is the boot class.

    +
  • +
  • +

    You must consider the boot configuration: port and context-path. In development, each microservice must have a different port, to avoid colliding with other microservices, while the context-path is recommended to be the same, to simplify the Zuul configurations and calls between microservices.

    +
  • +
  • +

    You can use @RolesAllowed annotations in the services methods to secure them, as long as the Web security inherited from JsonWebTokenSecurityConfig has been enabled, since it is the responsible for putting the UserDetails generated from the token into the security context.

    +
  • +
  • +

    All microservices must share the security key to encrypt and decrypt the token. And, specially, it should be the same as the service-auth, which will be responsible for generating the initial token.

    +
  • +
  • +

    In the Zuul module, the routes must be well configured to be able to route certain URLs to the new created microservices. So, if we have added a sampleapp1 with server.context-path=/sampleapp1 we will need to map that service in the Zuul’s application.properties file adding

    +
  • +
+
+
+
+
zuul.routes.sampleapp1.path=/sampleapp1/services/rest/**
+zuul.routes.sampleapp1.serviceId=sampleapp1
+zuul.routes.sampleapp1.stripPrefix=false
+
+
+
+

The rest will be treated as if it were a normal Web application, which exposes some services through a REST API.

+
+
+
+

Calls between microservices

+
+

In order to invoke a microservice manually, you would need to implement the following steps:

+
+
+
    +
  • +

    Obtain the instances of the microservice you want to invoke.

    +
  • +
  • +

    Choose which of all instances is the most optimal for the client.

    +
  • +
  • +

    Retrieve the security token from the source request.

    +
  • +
  • +

    Create a REST client that invokes the instance by passing the generated security token.

    +
  • +
  • +

    Intercept the response in case it causes an error, to avoid a cascade propagation.

    +
  • +
+
+
+

Thanks to the combination of Feign, Hystrix, Ribbon, Eureka and devonfw it is possible to make a call to another microservice in a declarative, very simple and almost automatic way.

+
+
+

You only need to create an interface with the methods that need to be invoked. This interface must be annotated with @FeignClient and each of the methods created must have a path and a method in the @RequestMapping annotation. An example interface might be as follows:

+
+
+
+
@FeignClient(value = "foo")
+public interface FooClient {
+
+  @RequestMapping(method = RequestMethod.GET, value = "/${server.context-path}/services/rest/foomanagement/v1/foo")
+  FooMessageTo foo();
+
+}
+
+
+
+

It is important to highlight the following aspects:

+
+
+
    +
  • +

    The @FeignClient annotation comes along with the name of the microservice to be invoked. The correct and optimal would be to use the name of the microservice, but it is also possible to launch the request to the Zuul server. In the latter case it would be the server itself that would perform the load balancing and self-discovery of the most appropriate microservice, but have in mind that, doing this, the proxy server is also unnecessarily overloaded with unnecessary requests.

    +
  • +
  • +

    The @RequestMapping annotation must have the same method and path as expected on target, otherwise the request will be thrown and no response will be found.

    +
  • +
  • +

    The input and output parameters will be mapped to json, so they may not be exactly the same classes in both destination and source. It will depend on how you want to send and retrieve the information.

    +
  • +
+
+
+

Once the interface is created and annotated, in order to use the calls, it would be enough to inject the component into the object from which we want to use it and invoke any of its methods. Spring Cloud will automatically generate the required bean.

+
+
+
+
...
+
+  @Inject
+  FooClient fooClient;
+
+  public FooMessageTo ivokeFooClient() {
+    return this.fooClient.foo();
+  }
+
+...
+
+
+
+

With these two annotations, almost all the functionality is covered automatically: search in Eureka, choice of the best instance through Ribbon, registration of the token and creation of the REST client. Only would be necessary to control the response in case of failure. The idea is to allow, in case of failure or fall of the invoked microservice, from the origin of the invocation is executed an alternative plan. This is as simple as activating the fallback in the @FeignClient annotation and assigning a class that will be invoked in case the REST client response fails.

+
+
+
+
@FeignClient(value = "foo", fallback = FooClientHystrixFallback.class)
+public interface FooClient {
+
+  @RequestMapping(method = RequestMethod.GET, value = "/${server.context-path}/services/rest/foomanagement/v1/foo")
+  FooMessageTo foo();
+
+}
+
+
+
+

Finally, you will need to create a class annotated with @Component that implements the interface of the Feign client. Within this implementation you can add the desired functionality in case the invocation to the REST client fails.

+
+
+
+
@Component
+public class FooClientHystrixFallback implements FooClient {
+
+  @Override
+  public FooMessageTo foo() {
+    return new FooMessageTo("Fail Message");
+  }
+
+}
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/architecture.html b/docs/devonfw.github.io/1.0/devon4j.wiki/architecture.html new file mode 100644 index 00000000..d7b06d1a --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/architecture.html @@ -0,0 +1,605 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Architecture

+
+
+

There are many different views that are summarized by the term architecture. First, we will introduce the key principles and architecture principles of devonfw. Then, we will go into details of the the architecture of an application.

+
+
+

Key Principles

+
+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
+
+
+
+
+

Architecture Principles

+
+
+

Additionally we define the following principles that our architecture is based on:

+
+
+
    +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of Concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information Hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise, maintenance problems will arise to ensure that data remains consistent. Therefore, interfaces of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style.

    +
  • +
+
+
+

As an architect you should be prepared for the future by reading the TechnoVision.

+
+
+
+
+

Application Architecture

+
+
+

For the architecture of an application we distinguish the following views:

+
+
+
    +
  • +

    The Business Architecture describes an application from the business perspective. It divides the application into business components and with full abstraction of technical aspects.

    +
  • +
  • +

    The Technical Architecture describes an application from the technical implementation perspective. It divides the application into technical layers and defines which technical products and frameworks are used to support these layers.

    +
  • +
  • +

    The Infrastructure Architecture describes an application from the operational infrastructure perspective. It defines the nodes used to run the application including clustering, load-balancing and networking. This view is not explored further in this guide.

    +
  • +
+
+
+

Business Architecture

+
+

The business architecture divides the application into business components. A business component has a well-defined responsibility that it encapsulates. All aspects related to that responsibility have to be implemented within that business component. Further, the business architecture defines the dependencies between the business components. These dependencies need to be free of cycles. A business component exports its functionality via well-defined interfaces as a self-contained API. A business component may use another business component via its API and compliant with the dependencies defined by the business architecture.

+
+
+

As the business domain and logic of an application can be totally different, the devonfw can not define a standardized business architecture. Depending on the business domain it has to be defined from scratch or from a domain reference architecture template. For very small systems it may be suitable to define just a single business component containing all the code.

+
+
+
+

Technical Architecture

+
+

The technical architecture divides the application into technical layers based on the multilayered architecture. A layer is a unit of code with the same category such as a service or presentation logic. So, a layer is often supported by a technical framework. Each business component can therefore be split into component parts for each layer. However, a business component may not have component parts for every layer (e.g. only a presentation part that utilized logic from other components).

+
+
+

An overview of the technical reference architecture of the devonfw is given by figure "Technical Reference Architecture". +It defines the following layers visualized as horizontal boxes:

+
+
+ +
+
+

Also, you can see the (business) components as vertical boxes (e.g. A and X) and how they are composed out of component parts each one assigned to one of the technical layers.

+
+
+

Further, there are technical components for cross-cutting aspects grouped by the gray box on the left. Here is a complete list:

+
+ +
+
+devonfw architecture blueprint +
+
Figure 1. Technical Reference Architecture
+
+
+

Please click on the architecture image to open it as SVG and click on the layers and cross-cutting topics to open the according documentation guide.

+
+
+

We reflect this architecture in our code as described in our coding conventions allowing a traceability of business components, use-cases, layers, etc. into the code and giving +developers a sound orientation within the project.

+
+
+

Further, the architecture diagram shows the allowed dependencies illustrated by the dark green connectors. +Within a business component a component part can call the next component part on the layer directly below via a dependency on its API (vertical connectors). +While this is natural and obvious, it is generally forbidden to have dependencies upwards the layers +or to skip a layer by a direct dependency on a component part two or more layers below. +The general dependencies allowed between business components are defined by the business architecture. +In our reference architecture diagram we assume that the business component A1 is allowed to depend +on component A2. Therefore, a use-case within the logic component part of A1 is allowed to call a +use-case from A2 via a dependency on the component API. The same applies for dialogs on the client layer. +This is illustrated by the horizontal connectors. Please note that persistence entities are part of the API of the data-access component part so only the logic component part of the same +business component may depend on them.

+
+
+

The technical architecture has to address non-functional requirements:

+
+
+
    +
  • +

    scalability
    +is established by keeping state in the client and making the server state-less (except for login session). Via load-balancers new server nodes can be added to improve performance (horizontal scaling).

    +
  • +
  • +

    availability and reliability
    +are addressed by clustering with redundant nodes avoiding any single-point-of failure. If one node fails the system is still available. Further, the software has to be robust so there are no dead-locks or other bad effects that can make the system unavailable or not reliable.

    +
  • +
  • +

    security
    +is archived in the devonfw by the right templates and best-practices that avoid vulnerabilities. See security guidelines for further details.

    +
  • +
  • +

    performance
    +is obtained by choosing the right products and proper configurations. While the actual implementation of the application matters for performance a proper design is important as it is the key to allow performance-optimizations (see e.g. caching).

    +
  • +
+
+
+

Technology Stack

+
+

The technology stack of the devonfw is illustrated by the following table.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Technology Stack of devonfw
TopicDetailStandardSuggested implementation

runtime

language & VM

Java

Oracle JDK

runtime

servlet-container

JEE

tomcat

component management

dependency injection

JSR330 & JSR250

spring

configuration

framework

-

spring-boot

persistence

OR-mapper

JPA

hibernate

batch

framework

JSR352

spring-batch

service

SOAP services

JAX-WS

CXF

service

REST services

JAX-RS

CXF

logging

framework

slf4j

logback

validation

framework

beanvalidation/JSR303

hibernate-validator

security

Authentication & Authorization

JAAS

spring-security

monitoring

framework

JMX

spring

monitoring

HTTP Bridge

HTTP & JSON

jolokia

AOP

framework

dynamic proxies

spring AOP

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/coding-conventions.html b/docs/devonfw.github.io/1.0/devon4j.wiki/coding-conventions.html new file mode 100644 index 00000000..fe118bf4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/coding-conventions.html @@ -0,0 +1,865 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Coding Conventions

+
+
+

The code should follow general conventions for Java (see Oracle Naming Conventions, Google Java Style, etc.).We consider this as common sense and provide configurations for SonarQube and related tools such as Checkstyle instead of repeating this here.

+
+
+

Naming

+
+
+

Besides general Java naming conventions, we follow the additional rules listed here explicitly:

+
+
+
    +
  • +

    Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Use CamelCase even for abbreviations (XmlUtil instead of XMLUtil)

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc'). See #1095 for details.

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+
+

Packages

+
+
+

Java Packages are the most important element to structure your code. We use a strict packaging convention to map technical layers and business components (slices) to the code (See technical architecture for further details). By using the same names in documentation and code we create a strong link that gives orientation and makes it easy to find from business requirements, specifications or story tickets into the code and back.

+
+
+

For an devon4j based application we use the following Java-Package schema:

+
+
+
+
«root».«component».«layer»[.«detail»]
+
+
+
+

E.g. in our example application we find the Spring Data repositories for the ordermanagement component in the package com.devonfw.application.mtsj.ordermanagement.dataaccess.api.repo

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Segments of package schema
SegmentDescriptionExample

«root»

Is the basic Java Package name-space of your app. Typically we suggest to use «group».«artifact» where «group» is your maven/gradle groupId corresponding to your organization or IT project owning the code following common Java Package conventions. The segment «artifact» is your maven/gradle artifactId and is typically the technical name of your app.

com.devonfw.application.mtsj

«component»

The (business) component the code belongs to. It is defined by the business architecture and uses terms from the business domain. Use the implicit component general for code not belonging to a specific component (foundation code).

salesmanagement

«layer»

The name of the technical layer (See technical architecture). Details are described for the modern project structure and for the classic project structure.

logic

«detail»

Here you are free to further divide your code into sub-components and other concerns according to the size of your component part. If you want to strictly separate API from implementation you should start «detail» with «scope» that is explained below.

dao

«scope»

The scope which is one of api (official API to be used by other layers or components), base (basic code to be reused by other implementations) and impl (implementation that should never be imported from outside). This segment was initially mandatory but due to trends such as microservices, lean, and agile we decided to make it optional and do not force anybody to use it.

api

+
+

Please note that devon4j library modules for spring use com.devonfw.module as «root» and the name of the module as «component». E.g. the API of our beanmapping module can be found in the package com.devonfw.module.beanmapping.common.api.

+
+
+
+
+

Code Tasks

+
+
+

Code spots that need some rework can be marked with the following tasks tags. These are already properly pre-configured in your development environment for auto completion and to view tasks you are responsible for. It is important to keep the number of code tasks low. Therefore, every member of the team should be responsible for the overall code quality. So if you change a piece of code and hit a code task that you can resolve in a reliable way, please do this as part of your change and remove the according tag.

+
+
+

TODO

+
+

Used to mark a piece of code that is not yet complete (typically because it can not be completed due to a dependency on something that is not ready).

+
+
+
+
 // TODO «author» «description»
+
+
+
+

A TODO tag is added by the author of the code who is also responsible for completing this task.

+
+
+
+

FIXME

+
+
+
 // FIXME «author» «description»
+
+
+
+

A FIXME tag is added by the author of the code or someone who found a bug he can not fix right now. The «author» who added the FIXME is also responsible for completing this task. This is very similar to a TODO but with a higher priority. FIXME tags indicate problems that should be resolved before a release is completed while TODO tags might have to stay for a longer time.

+
+
+
+

REVIEW

+
+
+
 // REVIEW «responsible» («reviewer») «description»
+
+
+
+

A REVIEW tag is added by a reviewer during a code review. Here the original author of the code is responsible to resolve the REVIEW tag and the reviewer is assigning this task to him. This is important for feedback and learning and has to be aligned with a review "process" where people talk to each other and get into discussion. In smaller or local teams a peer-review is preferable but this does not scale for large or even distributed teams.

+
+
+
+
+
+

Code-Documentation

+
+
+

As a general goal, the code should be easy to read and understand. Besides, clear naming the documentation is important. We follow these rules:

+
+
+
    +
  • +

    APIs (especially component interfaces) are properly documented with JavaDoc.

    +
  • +
  • +

    JavaDoc shall provide actual value - we do not write JavaDoc to satisfy tools such as checkstyle but to express information not already available in the signature.

    +
  • +
  • +

    We make use of {@link} tags in JavaDoc to make it more expressive.

    +
  • +
  • +

    JavaDoc of APIs describes how to use the type or method and not how the implementation internally works.

    +
  • +
  • +

    To document implementation details, we use code comments (e.g. // we have to flush explicitly to ensure version is up-to-date). This is only needed for complex logic.

    +
  • +
  • +

    Avoid the pointless {@inheritDoc} as since Java 1.5 there is the @Override annotation for overridden methods and your JavaDoc is inherited automatically even without any JavaDoc comment at all.

    +
  • +
+
+
+
+
+

Code-Style

+
+
+

This section gives you best practices to write better code and avoid pitfalls and mistakes.

+
+
+

BLOBs

+
+

Avoid using byte[] for BLOBs as this will load them entirely into your memory. This will cause performance issues or out of memory errors. Instead, use streams when dealing with BLOBs. For further details see BLOB support.

+
+
+
+

Stateless Programming

+
+

When implementing logic as components or beans of your container using dependency injection, we strongly encourage stateless programming. +This is not about data objects like an entity or transfer-object that are stateful by design. +Instead this applies to all classes annotated with @Named, @ApplicationScoped, @Stateless, etc. and all their super-classes. +These classes especially include your repositories, use-cases, and REST services. +Such classes shall never be modified after initialization. +Methods called at runtime (after initialization via the container) do not assign fields (member variables of your class) or mutate the object stored in a field. +This allows your component or bean to be stateless and thread-safe. +Therefore it can be initialized as a singleton so only one instance is created and shared accross all threads of the application. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // bad
+  private String contractOwner;
+
+  private MyState state;
+
+  @Overide
+  public void approve(Contract contract) {
+    this.contractOwner = contract.getOwner();
+    this.contractOwner = this.contractOwner.toLowerCase(Locale.US);
+    this.state.setAdmin(this.contractOwner.endsWith("admin"));
+    if (this.state.isAdmin()) {
+      ...
+    } else {
+      ...
+    }
+  }
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    if (contractOwner.endsWith("admin")) {
+      ...
+    } else {
+      ...
+    }
+  }
+}
+
+
+
+

As you can see in the bad code fields of the class are assigned when the method approve is called. +So mutliple users and therefore threads calling this method concurrently can interfere and override this state causing side-effects on parallel threads. +This will lead to nasty bugs and errors that are hard to trace down. +They will not occur in simple tests but for sure in production with real users. +Therefore never do this and implement your functionality stateless. +That is keeping all state in local variables and strictly avoid modifying fields or their value as illustrated in the fine code. +If you find yourself passing many parameters between methods that all represent state, you can easily create a separate class that encapsulates this state. +However, then you need to create this state object in your method as local variable and pass it between methods as parameter:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    MyState state = new MyState();
+    state.setAdmin(this.contractOwner.endsWith("admin"));
+    doApproveContract(contract, state);
+  }
+}
+
+
+
+
+

Closing Resources

+
+

Resources such as streams (InputStream, OutputStream, Reader, Writer) or transactions need to be handled properly. Therefore, it is important to follow these rules:

+
+
+
    +
  • +

    Each resource has to be closed properly, otherwise you will get out of file handles, TX sessions, memory leaks or the like

    +
  • +
  • +

    Where possible avoid to deal with such resources manually. That is why we are recommending @Transactional for transactions in devonfw (see Transaction Handling).

    +
  • +
  • +

    In case you have to deal with resources manually (e.g. binary streams) ensure to close them properly. See the example below for details.

    +
  • +
+
+
+

Closing streams and other such resources is error prone. Have a look at the following example:

+
+
+
+
// bad
+try {
+  InputStream in = new FileInputStream(file);
+  readData(in);
+  in.close();
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+

The code above is wrong as in case of an IOException the InputStream is not properly closed. In a server application such mistakes can cause severe errors that typically will only occur in production. As such resources implement the AutoCloseable interface you can use the try-with-resource syntax to write correct code. The following code shows a correct version of the example:

+
+
+
+
// fine
+try (InputStream in = new FileInputStream(file)) {
+  readData(in);
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+
+

Catching and handling Exceptions

+
+

When catching exceptions always ensure the following:

+
+
+
    +
  • +

    Never call printStackTrace() method on an exception

    +
  • +
  • +

    Either log or wrap and re-throw the entire catched exception. Be aware that the cause(s) of an exception is very valuable information. If you loose such information by improper exception-handling you may be unable to properly analyse production problems what can cause severe issues.

    +
    +
      +
    • +

      If you wrap and re-throw an exception ensure that the catched exception is passed as cause to the newly created and thrown exception.

      +
    • +
    • +

      If you log an exception ensure that the entire exception is passed as argument to the logger (and not only the result of getMessage() or toString() on the exception).

      +
    • +
    +
    +
  • +
  • +

    See exception handling

    +
  • +
+
+
+
+

Lambdas and Streams

+
+

With Java8 you have cool new features like lambdas and monads like (Stream, CompletableFuture, Optional, etc.). +However, these new features can also be misused or led to code that is hard to read or debug. To avoid pain, we give you the following best practices:

+
+
+
    +
  1. +

    Learn how to use the new features properly before using. Developers are often keen on using cool new features. When you do your first experiments in your project code you will cause deep pain and might be ashamed afterwards. Please study the features properly. Even Java8 experts still write for loops to iterate over collections, so only use these features where it really makes sense.

    +
  2. +
  3. +

    Streams shall only be used in fluent API calls as a Stream can not be forked or reused.

    +
  4. +
  5. +

    Each stream has to have exactly one terminal operation.

    +
  6. +
  7. +

    Do not write multiple statements into lambda code:

    +
    +
    +
    // bad
    +collection.stream().map(x -> {
    +Foo foo = doSomething(x);
    +...
    +return foo;
    +}).collect(Collectors.toList());
    +
    +
    +
    +

    This style makes the code hard to read and debug. Never do that! Instead, extract the lambda body to a private method with a meaningful name:

    +
    +
    +
    +
    // fine
    +collection.stream().map(this::convertToFoo).collect(Collectors.toList());
    +
    +
    +
  8. +
  9. +

    Do not use parallelStream() in general code (that will run on server side) unless you know exactly what you are doing and what is going on under the hood. Some developers might think that using parallel streams is a good idea as it will make the code faster. However, if you want to do performance optimizations talk to your technical lead (architect). Many features such as security and transactions will rely on contextual information that is associated with the current thread. Hence, using parallel streams will most probably cause serious bugs. Only use them for standalone (CLI) applications or for code that is just processing large amounts of data.

    +
  10. +
  11. +

    Do not perform operations on a sub-stream inside a lambda:

    +
    +
    +
    set.stream().flatMap(x -> x.getChildren().stream().filter(this::isSpecial)).collect(Collectors.toList()); // bad
    +set.stream().flatMap(x -> x.getChildren().stream()).filter(this::isSpecial).collect(Collectors.toList()); // fine
    +
    +
    +
  12. +
  13. +

    Only use collect at the end of the stream:

    +
    +
    +
    set.stream().collect(Collectors.toList()).forEach(...) // bad
    +set.stream().peek(...).collect(Collectors.toList()) // fine
    +
    +
    +
  14. +
  15. +

    Lambda parameters with Types inference

    +
    +
    +
    (String a, Float b, Byte[] c) -> a.toString() + Float.toString(b) + Arrays.toString(c)  // bad
    +(a,b,c)  -> a.toString() + Float.toString(b) + Arrays.toString(c)  // fine
    +
    +Collections.sort(personList, (Person p1, Person p2) -> p1.getSurName().compareTo(p2.getSurName()));  // bad
    +Collections.sort(personList, (p1, p2) -> p1.getSurName().compareTo(p2.getSurName()));  // fine
    +
    +
    +
  16. +
  17. +

    Avoid Return Braces and Statement

    +
    +
    +
     a ->  { return a.toString(); } // bad
    + a ->  a.toString();   // fine
    +
    +
    +
  18. +
  19. +

    Avoid Parentheses with Single Parameter

    +
    +
    +
    (a) -> a.toString(); // bad
    + a -> a.toString();  // fine
    +
    +
    +
  20. +
  21. +

    Avoid if/else inside foreach method. Use Filter method & comprehension

    +
    +
    +
    // bad
    +static public Iterator<String> TwitterHandles(Iterator<Author> authors, string company) {
    +    final List result = new ArrayList<String> ();
    +    foreach (Author a : authors) {
    +      if (a.Company.equals(company)) {
    +        String handle = a.TwitterHandle;
    +        if (handle != null)
    +          result.Add(handle);
    +      }
    +    }
    +    return result;
    +  }
    +
    +
    +
    +
    +
    // fine
    +public List<String> twitterHandles(List<Author> authors, String company) {
    +    return authors.stream()
    +            .filter(a -> null != a && a.getCompany().equals(company))
    +            .map(a -> a.getTwitterHandle())
    +            .collect(toList());
    +  }
    +
    +
    +
  22. +
+
+
+
+

Optionals

+
+

With Optional you can wrap values to avoid a NullPointerException (NPE). However, it is not a good code-style to use Optional for every parameter or result to express that it may be null. For such case use @Nullable or even better instead annotate @NotNull where null is not acceptable.

+
+
+

However, Optional can be used to prevent NPEs in fluent calls (due to the lack of the elvis operator):

+
+
+
+
Long id;
+id = fooCto.getBar().getBar().getId(); // may cause NPE
+id = Optional.ofNullable(fooCto).map(FooCto::getBar).map(BarCto::getBar).map(BarEto::getId).orElse(null); // null-safe
+
+
+
+
+

Encoding

+
+

Encoding (esp. Unicode with combining characters and surrogates) is a complex topic. Please study this topic if you have to deal with encodings and processing of special characters. For the basics follow these recommendations:

+
+
+
    +
  • +

    Whenever possible prefer unicode (UTF-8 or better) as encoding. This especially impacts your databases and has to be defined upfront as it typically can not be changed (easily) afterwards.

    +
  • +
  • +

    Do not cast from byte to char (unicode characters can be composed of multiple bytes, such cast may only work for ASCII characters)

    +
  • +
  • +

    Never convert the case of a String using the default locale (esp. when writing generic code like in devonfw). E.g. if you do "HI".toLowerCase() and your system locale is Turkish, then the output will be "hı" instead of "hi", which can lead to wrong assumptions and serious problems. If you want to do a "universal" case conversion always explicitly use an according western locale (e.g. toLowerCase(Locale.US)). Consider using a helper class (see e.g. CaseHelper) or create your own little static utility for that in your project.

    +
  • +
  • +

    Write your code independent from the default encoding (system property file.encoding) - this will most likely differ in JUnit from production environment

    +
    +
      +
    • +

      Always provide an encoding when you create a String from byte[]: new String(bytes, encoding)

      +
    • +
    • +

      Always provide an encoding when you create a Reader or Writer : new InputStreamReader(inStream, encoding)

      +
    • +
    +
    +
  • +
+
+
+
+

Prefer general API

+
+

Avoid unnecessary strong bindings:

+
+
+
    +
  • +

    Do not bind your code to implementations such as Vector or ArrayList instead of List

    +
  • +
  • +

    In APIs for input (=parameters) always consider to make little assumptions:

    +
    +
      +
    • +

      prefer Collection over List or Set where the difference does not matter (e.g. only use Set when you require uniqueness or highly efficient contains)

      +
    • +
    • +

      consider preferring Collection<? extends Foo> over Collection<Foo> when Foo is an interface or super-class

      +
    • +
    +
    +
  • +
+
+
+
+

Prefer primitive boolean

+
+

Unless in rare cases where you need to allow a flag being null avoid using the object type Boolean.

+
+
+
+
// bad
+public Boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

Instead always use the primitive boolean type:

+
+
+
+
// fine
+public boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

The only known excuse is for flags in embeddable types due to limitations of hibernate.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/coding-tools.html b/docs/devonfw.github.io/1.0/devon4j.wiki/coding-tools.html new file mode 100644 index 00000000..dcdd6876 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/coding-tools.html @@ -0,0 +1,331 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Tools

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Development Tools used for devon4j
TopicDetailSuggested Tool

build-management

*

maven

IDE

IDE

Eclipse

IDE

setup & update

devonfw-ide

IDE

code generation

CobiGen

Testing

Unit-Testing

JUnit

Testing

Mocking

Mockito & WireMock

Testing

Integration-Testing

spring-test (arquillian for JEE)

Testing

End-to-end

MrChecker

Quality

Code-Analysis

SonarQube

+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/decision-between-Spring-and-Quarkus.html b/docs/devonfw.github.io/1.0/devon4j.wiki/decision-between-Spring-and-Quarkus.html new file mode 100644 index 00000000..fdc33384 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/decision-between-Spring-and-Quarkus.html @@ -0,0 +1,419 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Decision between Spring and Quarkus

+
+
+

Spring

+
+
+

Pros

+
+
    +
  • +

    highly flexible: +Spring is famous for its great flexibility. You can customize and integrate nearly everything.

    +
  • +
  • +

    well established: +While JEE application servers including very expensive commercial products turned out to be a dead-end, spring has guided projects through the changing trends of IT throughout decades. +It may be the framework with the longest history track and popularity. +As a result you can easily find a lot of developers, experts, books, articles, etc. about spring.

    +
  • +
  • +

    non-invasive and not biased: +Spring became famous for its non-invasive coding based on patterns instead of hard dependencies. It gives you a lot of freedom and avoids tight coupling of your (business) code.

    +
  • +
+
+
+
+

Cons

+
+
    +
  • +

    history and legacy: +Due to its long established history, spring carries a lot of legacy. +As a result there are many ways to do the same thing, which can be encouraging or confusing at first. +Spring Developers needs some guidance (e.g. via devon4j) as they may enter pitfalls and dead-ends when choosing the first solution they found on google or stackoverflow.

    +
  • +
  • +

    lost lead in cloud-native: +While for the last decades spring was leading innovation in Java app development, it seems that with the latest trends and shift such as cloud-native, they have been overtaken by frameworks like Quarkus. +However, spring is trying to catch up with spring-native.

    +
  • +
+
+
+
+
+
+

Quarkus

+
+
+

Quarkus main information:

+
+

Quarkus is a full-stack, Kubernetes-native Java framework made for JVMs. +With its container-first-philosophy and its native compilation with GraalVM, Quarkus optimizes Java for containers with low memory usage and fast startup times.

+
+
+

Quarkus achieves this in the following ways:

+
+
+
    +
  • +

    First Class Support for GraalVM

    +
  • +
  • +

    Build Time Metadata Processing: As much processing as possible is done at build time, so your application will only contain the classes that are actually needed at runtime. This results in less memory usage, and also faster startup time, as all metadata processing has already been done.

    +
  • +
  • +

    Reduction in Reflection Usage: Quarkus tries to avoid reflection as much as possible in order to reduce startup time and memory usage.

    +
  • +
  • +

    Native Image Pre Boot: When running in a native image, Quarkus pre-boots as much of the framework as possible during the native image build process. +This means that the resulting native image has already run most of the startup code and serialized the result into the executable, resulting in an even faster startup-time.

    +
  • +
+
+
+

This gives Quarkus the potential for a great platform for serverless cloud and Kubernetes environments. +For more information about Quarkus and its support for devonfw please refer to the Quarkus introduction guide.

+
+
+

Although Quarkus has been released to a stable release in early 2021, it has been already used in multiple big projects successfully showing its potential to implement cloud native services with low resource consumption matching the needs of scalability and resilience in cloud native environments. +With major stakeholders behind the open source community like Red Hat, its development and growth from its kickoff to the current state is very impressive and really shows the market needs and focus. +Another big advantage of Quarkus is that it started on a green field and therefore did not need to circumvent main pillars of the spring framework like reflection, being able to take clean and up-to-date design decisions not needing to cope with legacy issues. +Nonetheless, there is a experimental support also for some spring libraries already available in Quarkus, which make switching from spring to Quarkus much more easier if needed. +We also provide a guide for Spring developers who want to adopt or try Quarkus for their (next) projects as it really has some gamechanging advantages over Spring.

+
+
+
+

Pros:

+
+
    +
  • +

    fast turn-around cycles for developers: Save changes in your Java code and immediately test the results without restarting or waiting

    +
  • +
  • +

    faster start-up and less memory footprint: +When building your app as native-images via GraalVM it gets highly optimized. As a result it starts up lightning fast and consumes much less memory. +This is a great advantage for cloud deployment as well as for sustainability. +You can find a performance comparison between Spring and Quarkus here.

    +
  • +
  • +

    clean and lean: As Quarkus was born as cloud-native framework it is very light-weight and does not carry much history and legacy.

    +
  • +
+
+
+
+

Cons:

+
+
    +
  • +

    less flexible: +Quarkus is less flexible compared to spring or in other words it is more biased and coupled to specific implementations. However, the implementations just work and you have less things to choose and worry about. +However, in case you want to integrate a specific or custom library you may hit limitations or lose support for native-images especially when that library is based on reflection. +Therefore, check your requirements and technology stack early on when making your choice.

    +
  • +
  • +

    less established: +Since Quarkus was born in 2019 it is modern but also less established. It will be easier to get developers for spring but we already consider Quarkus mature and established enought for building production ready apps.

    +
  • +
+
+
+
+
+
+

General Recommendation

+
+
+

One essential differentiation has to be made on the decision for native or against native applications - the foreseen performance optimization of the JIT compiler of the JVM, which is not available anymore in a native image deployment. +Depending on the overall landscape, it is recommended to stay with the knowledge of the available teams, e.g. continue making use of devon4j based on spring or even if already in that state, make use of Quarkus on JVM.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/decision-service-framework.html b/docs/devonfw.github.io/1.0/devon4j.wiki/decision-service-framework.html new file mode 100644 index 00000000..4660c4c1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/decision-service-framework.html @@ -0,0 +1,337 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Decision Sheet for Choosing a Service Framework

+
+
+

We need to choose which framework(s) we want to use for building services. For the devonfw, we focus on a standard API, if available. However, we also want to recommend an implementation. While projects would still be able to choose whatever they want, we want to suggest the best, most robust, and established solution. This way, projects do not have to worry about the decision and can rely on a production-ready framework without running into any trouble. Also, besides the standard, the configuration of the implementation framework differs, so we want to give instructions in the documentation and by our sample application. This is why, in the end, the implementation also matters. If a project has a customer demand to use something else, the project has to take care of it. We will always suggest and "support" ONE solution.

+
+
+

REST Services

+
+
+

For REST services, devonfw relies on the JAX-RS standard (and NOT on spring-mvc with its proprietary annotations). JAX-RS (Jakarta RESTful Web Services) is a Java programming language API to develop web services following the Representational State Transfer (REST) architectural pattern. +For Apache CXF, the spring container was the first choice, but container abstraction has been properly introduced by design, so it can be used in JEE application servers. Apache CXF is a services framework that helps to build and develop services using frontend programming APIs, such as JAX-RS. Everything works smoothly in our sample application, and in addition, we collected feedback from various projects utilizing CXF, either with XML or JSON, with reported success in production. Therefore, we decided to use Apache CXF for Spring. +For Quarkus applications, devon4j recommends to use RESTEasy, which is a JAX-RS implementation aimed at providing productivity frameworks for developing client and server RESTful applications and services in Java.

+
+
+
+
+

WebServices

+
+
+

For WebServices we rely on the JAX-WS standard. On our short list we have Metro2 and Apache CXF. Here a collection of facts and considerations:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 1. Decision for JAX-WS implementation
Metro2Apache CXF

Pro

- reference implementation
+- proven in many projects
+- standard in RF

- supports both JAX-WS and JAX-RS therefore consistent configuration, single integration into servlet-container and spring
+- proven in a lot of projects
+- already chosen by devonfw for JAX-RS (so we already have a JAX-WS implementation on board).

Contra

- We expect trouble if use the planned URL path scheme <app>/services/(rest|ws)/... as CXF and Metro2 would both occupy services/*
+- ugly endorsed trouble and small spring-integration issues with WSDL/XSD link resolution (input from Martin Girschik)

- IMHO currently used in less projects than metro2 so less existing experience

+
+

See also +http://predic8.de/axis2-cxf-jax-ws-vergleich.htm +We also had an evaluation at CSD research on CXF vs. Axis2. vs. Metro that suggested CXF.

+
+
+

BTW: Axis(2) is definitely out of discussion for devonfw.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-architecture.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-architecture.html new file mode 100644 index 00000000..c0d41f68 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-architecture.html @@ -0,0 +1,641 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Architecture

+
+ +
+

==Architecture

+
+
+

There are many different views that are summarized by the term architecture. First, we will introduce the key principles and architecture principles of devonfw. Then, we will go into details of the the architecture of an application.

+
+
+

Key Principles

+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
+
+
+
+

Architecture Principles

+
+

Additionally we define the following principles that our architecture is based on:

+
+
+
    +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of Concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information Hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise, maintenance problems will arise to ensure that data remains consistent. Therefore, interfaces of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style.

    +
  • +
+
+
+

As an architect you should be prepared for the future by reading the TechnoVision.

+
+
+
+

Application Architecture

+
+

For the architecture of an application we distinguish the following views:

+
+
+
    +
  • +

    The Business Architecture describes an application from the business perspective. It divides the application into business components and with full abstraction of technical aspects.

    +
  • +
  • +

    The Technical Architecture describes an application from the technical implementation perspective. It divides the application into technical layers and defines which technical products and frameworks are used to support these layers.

    +
  • +
  • +

    The Infrastructure Architecture describes an application from the operational infrastructure perspective. It defines the nodes used to run the application including clustering, load-balancing and networking. This view is not explored further in this guide.

    +
  • +
+
+
+

Business Architecture

+
+

The business architecture divides the application into business components. A business component has a well-defined responsibility that it encapsulates. All aspects related to that responsibility have to be implemented within that business component. Further, the business architecture defines the dependencies between the business components. These dependencies need to be free of cycles. A business component exports its functionality via well-defined interfaces as a self-contained API. A business component may use another business component via its API and compliant with the dependencies defined by the business architecture.

+
+
+

As the business domain and logic of an application can be totally different, the devonfw can not define a standardized business architecture. Depending on the business domain it has to be defined from scratch or from a domain reference architecture template. For very small systems it may be suitable to define just a single business component containing all the code.

+
+
+
+

Technical Architecture

+
+

The technical architecture divides the application into technical layers based on the multilayered architecture. A layer is a unit of code with the same category such as a service or presentation logic. So, a layer is often supported by a technical framework. Each business component can therefore be split into component parts for each layer. However, a business component may not have component parts for every layer (e.g. only a presentation part that utilized logic from other components).

+
+
+

An overview of the technical reference architecture of the devonfw is given by figure "Technical Reference Architecture". +It defines the following layers visualized as horizontal boxes:

+
+
+ +
+
+

Also, you can see the (business) components as vertical boxes (e.g. A and X) and how they are composed out of component parts each one assigned to one of the technical layers.

+
+
+

Further, there are technical components for cross-cutting aspects grouped by the gray box on the left. Here is a complete list:

+
+ +
+
+devonfw architecture blueprint +
+
Figure 1. Technical Reference Architecture
+
+
+

Please click on the architecture image to open it as SVG and click on the layers and cross-cutting topics to open the according documentation guide.

+
+
+

We reflect this architecture in our code as described in our coding conventions allowing a traceability of business components, use-cases, layers, etc. into the code and giving +developers a sound orientation within the project.

+
+
+

Further, the architecture diagram shows the allowed dependencies illustrated by the dark green connectors. +Within a business component a component part can call the next component part on the layer directly below via a dependency on its API (vertical connectors). +While this is natural and obvious, it is generally forbidden to have dependencies upwards the layers +or to skip a layer by a direct dependency on a component part two or more layers below. +The general dependencies allowed between business components are defined by the business architecture. +In our reference architecture diagram we assume that the business component A1 is allowed to depend +on component A2. Therefore, a use-case within the logic component part of A1 is allowed to call a +use-case from A2 via a dependency on the component API. The same applies for dialogs on the client layer. +This is illustrated by the horizontal connectors. Please note that persistence entities are part of the API of the data-access component part so only the logic component part of the same +business component may depend on them.

+
+
+

The technical architecture has to address non-functional requirements:

+
+
+
    +
  • +

    scalability
    +is established by keeping state in the client and making the server state-less (except for login session). Via load-balancers new server nodes can be added to improve performance (horizontal scaling).

    +
  • +
  • +

    availability and reliability
    +are addressed by clustering with redundant nodes avoiding any single-point-of failure. If one node fails the system is still available. Further, the software has to be robust so there are no dead-locks or other bad effects that can make the system unavailable or not reliable.

    +
  • +
  • +

    security
    +is archived in the devonfw by the right templates and best-practices that avoid vulnerabilities. See security guidelines for further details.

    +
  • +
  • +

    performance
    +is obtained by choosing the right products and proper configurations. While the actual implementation of the application matters for performance a proper design is important as it is the key to allow performance-optimizations (see e.g. caching).

    +
  • +
+
+
+
Technology Stack
+
+

The technology stack of the devonfw is illustrated by the following table.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Technology Stack of devonfw
TopicDetailStandardSuggested implementation

runtime

language & VM

Java

Oracle JDK

runtime

servlet-container

JEE

tomcat

component management

dependency injection

JSR330 & JSR250

spring

configuration

framework

-

spring-boot

persistence

OR-mapper

JPA

hibernate

batch

framework

JSR352

spring-batch

service

SOAP services

JAX-WS

CXF

service

REST services

JAX-RS

CXF

logging

framework

slf4j

logback

validation

framework

beanvalidation/JSR303

hibernate-validator

security

Authentication & Authorization

JAAS

spring-security

monitoring

framework

JMX

spring

monitoring

HTTP Bridge

HTTP & JSON

jolokia

AOP

framework

dynamic proxies

spring AOP

+
+ +
+

==Components

+
+
+

Following separation-of-concerns we divide an application into components using our package-conventions and project structure. +As described by the architecture each component is divided into layers as described in the project structure. +Please note that a component will only have the required layers. +So a component may have any number from one to all layers.

+
+
+
+
+

General Component

+
+

Cross-cutting aspects belong to the implicit component general. It contains technical configurations and very general code that is not business specific. Such code shall not have any dependencies to other components and therefore business related code.

+
+
+
+

Business Component

+
+

The business-architecture defines the business components with their allowed dependencies. A small application (microservice) may just have one component and no dependencies making it simple while the same architecture can scale up to large and complex applications (from bigger microservice up to modulith). +Tailoring an business domain into applications and applications into components is a tricky task that needs the skills of an experienced architect. +Also, the tailoring should follow the business and not split by technical reasons or only by size. +Size is only an indicator but not a driver of tailoring. +Whatever hypes like microservices are telling you, never get misled in this regard: +If your system grows and reaches MAX+1 lines of code, it is not the right motivation to split it into two microservices of ~MAX/2 lines of code - such approaches will waste huge amounts of money and lead to chaos.

+
+
+
+

App Component

+
+

Only in case you need cross-cutting code that aggregates another component you may introduce the component app. +It is allowed to depend on all other components but no other component may depend on it. +With the modularity and flexibility of spring you typically do not need this. +However, when you need to have a class that registers all services or component-facades using direct code dependencies, you can introduce this component.

+
+
+
+

Component Example

+
+

The following class diagram illustrates an example of the business component Staffmanagement:

+
+
+
+logic layer component pattern +
+
+
+

In this scheme, you can see the structure and flow from the service-layer (REST service call) via the logic-layer to the dataaccess-layer (and back).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-coding.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-coding.html new file mode 100644 index 00000000..5f1449fe --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-coding.html @@ -0,0 +1,832 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Coding Conventions

+
+ +
+

==Coding Conventions

+
+
+

The code should follow general conventions for Java (see Oracle Naming Conventions, Google Java Style, etc.).We consider this as common sense and provide configurations for SonarQube and related tools such as Checkstyle instead of repeating this here.

+
+
+

Naming

+
+

Besides general Java naming conventions, we follow the additional rules listed here explicitly:

+
+
+
    +
  • +

    Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Use CamelCase even for abbreviations (XmlUtil instead of XMLUtil)

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc'). See #1095 for details.

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+

Packages

+
+

Java Packages are the most important element to structure your code. We use a strict packaging convention to map technical layers and business components (slices) to the code (See technical architecture for further details). By using the same names in documentation and code we create a strong link that gives orientation and makes it easy to find from business requirements, specifications or story tickets into the code and back.

+
+
+

For an devon4j based application we use the following Java-Package schema:

+
+
+
+
«root».«component».«layer»[.«detail»]
+
+
+
+

E.g. in our example application we find the Spring Data repositories for the ordermanagement component in the package com.devonfw.application.mtsj.ordermanagement.dataaccess.api.repo

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Segments of package schema
SegmentDescriptionExample

«root»

Is the basic Java Package name-space of your app. Typically we suggest to use «group».«artifact» where «group» is your maven/gradle groupId corresponding to your organization or IT project owning the code following common Java Package conventions. The segment «artifact» is your maven/gradle artifactId and is typically the technical name of your app.

com.devonfw.application.mtsj

«component»

The (business) component the code belongs to. It is defined by the business architecture and uses terms from the business domain. Use the implicit component general for code not belonging to a specific component (foundation code).

salesmanagement

«layer»

The name of the technical layer (See technical architecture). Details are described for the modern project structure and for the classic project structure.

logic

«detail»

Here you are free to further divide your code into sub-components and other concerns according to the size of your component part. If you want to strictly separate API from implementation you should start «detail» with «scope» that is explained below.

dao

«scope»

The scope which is one of api (official API to be used by other layers or components), base (basic code to be reused by other implementations) and impl (implementation that should never be imported from outside). This segment was initially mandatory but due to trends such as microservices, lean, and agile we decided to make it optional and do not force anybody to use it.

api

+
+

Please note that devon4j library modules for spring use com.devonfw.module as «root» and the name of the module as «component». E.g. the API of our beanmapping module can be found in the package com.devonfw.module.beanmapping.common.api.

+
+
+
+

Code Tasks

+
+

Code spots that need some rework can be marked with the following tasks tags. These are already properly pre-configured in your development environment for auto completion and to view tasks you are responsible for. It is important to keep the number of code tasks low. Therefore, every member of the team should be responsible for the overall code quality. So if you change a piece of code and hit a code task that you can resolve in a reliable way, please do this as part of your change and remove the according tag.

+
+
+

TODO

+
+

Used to mark a piece of code that is not yet complete (typically because it can not be completed due to a dependency on something that is not ready).

+
+
+
+
 // TODO «author» «description»
+
+
+
+

A TODO tag is added by the author of the code who is also responsible for completing this task.

+
+
+
+

FIXME

+
+
+
 // FIXME «author» «description»
+
+
+
+

A FIXME tag is added by the author of the code or someone who found a bug he can not fix right now. The «author» who added the FIXME is also responsible for completing this task. This is very similar to a TODO but with a higher priority. FIXME tags indicate problems that should be resolved before a release is completed while TODO tags might have to stay for a longer time.

+
+
+
+

REVIEW

+
+
+
 // REVIEW «responsible» («reviewer») «description»
+
+
+
+

A REVIEW tag is added by a reviewer during a code review. Here the original author of the code is responsible to resolve the REVIEW tag and the reviewer is assigning this task to him. This is important for feedback and learning and has to be aligned with a review "process" where people talk to each other and get into discussion. In smaller or local teams a peer-review is preferable but this does not scale for large or even distributed teams.

+
+
+
+
+

Code-Documentation

+
+

As a general goal, the code should be easy to read and understand. Besides, clear naming the documentation is important. We follow these rules:

+
+
+
    +
  • +

    APIs (especially component interfaces) are properly documented with JavaDoc.

    +
  • +
  • +

    JavaDoc shall provide actual value - we do not write JavaDoc to satisfy tools such as checkstyle but to express information not already available in the signature.

    +
  • +
  • +

    We make use of {@link} tags in JavaDoc to make it more expressive.

    +
  • +
  • +

    JavaDoc of APIs describes how to use the type or method and not how the implementation internally works.

    +
  • +
  • +

    To document implementation details, we use code comments (e.g. // we have to flush explicitly to ensure version is up-to-date). This is only needed for complex logic.

    +
  • +
  • +

    Avoid the pointless {@inheritDoc} as since Java 1.5 there is the @Override annotation for overridden methods and your JavaDoc is inherited automatically even without any JavaDoc comment at all.

    +
  • +
+
+
+
+

Code-Style

+
+

This section gives you best practices to write better code and avoid pitfalls and mistakes.

+
+
+

BLOBs

+
+

Avoid using byte[] for BLOBs as this will load them entirely into your memory. This will cause performance issues or out of memory errors. Instead, use streams when dealing with BLOBs. For further details see BLOB support.

+
+
+
+

Stateless Programming

+
+

When implementing logic as components or beans of your container using dependency injection, we strongly encourage stateless programming. +This is not about data objects like an entity or transfer-object that are stateful by design. +Instead this applies to all classes annotated with @Named, @ApplicationScoped, @Stateless, etc. and all their super-classes. +These classes especially include your repositories, use-cases, and REST services. +Such classes shall never be modified after initialization. +Methods called at runtime (after initialization via the container) do not assign fields (member variables of your class) or mutate the object stored in a field. +This allows your component or bean to be stateless and thread-safe. +Therefore it can be initialized as a singleton so only one instance is created and shared accross all threads of the application. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // bad
+  private String contractOwner;
+
+  private MyState state;
+
+  @Overide
+  public void approve(Contract contract) {
+    this.contractOwner = contract.getOwner();
+    this.contractOwner = this.contractOwner.toLowerCase(Locale.US);
+    this.state.setAdmin(this.contractOwner.endsWith("admin"));
+    if (this.state.isAdmin()) {
+      ...
+    } else {
+      ...
+    }
+  }
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    if (contractOwner.endsWith("admin")) {
+      ...
+    } else {
+      ...
+    }
+  }
+}
+
+
+
+

As you can see in the bad code fields of the class are assigned when the method approve is called. +So mutliple users and therefore threads calling this method concurrently can interfere and override this state causing side-effects on parallel threads. +This will lead to nasty bugs and errors that are hard to trace down. +They will not occur in simple tests but for sure in production with real users. +Therefore never do this and implement your functionality stateless. +That is keeping all state in local variables and strictly avoid modifying fields or their value as illustrated in the fine code. +If you find yourself passing many parameters between methods that all represent state, you can easily create a separate class that encapsulates this state. +However, then you need to create this state object in your method as local variable and pass it between methods as parameter:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    MyState state = new MyState();
+    state.setAdmin(this.contractOwner.endsWith("admin"));
+    doApproveContract(contract, state);
+  }
+}
+
+
+
+
+

Closing Resources

+
+

Resources such as streams (InputStream, OutputStream, Reader, Writer) or transactions need to be handled properly. Therefore, it is important to follow these rules:

+
+
+
    +
  • +

    Each resource has to be closed properly, otherwise you will get out of file handles, TX sessions, memory leaks or the like

    +
  • +
  • +

    Where possible avoid to deal with such resources manually. That is why we are recommending @Transactional for transactions in devonfw (see Transaction Handling).

    +
  • +
  • +

    In case you have to deal with resources manually (e.g. binary streams) ensure to close them properly. See the example below for details.

    +
  • +
+
+
+

Closing streams and other such resources is error prone. Have a look at the following example:

+
+
+
+
// bad
+try {
+  InputStream in = new FileInputStream(file);
+  readData(in);
+  in.close();
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+

The code above is wrong as in case of an IOException the InputStream is not properly closed. In a server application such mistakes can cause severe errors that typically will only occur in production. As such resources implement the AutoCloseable interface you can use the try-with-resource syntax to write correct code. The following code shows a correct version of the example:

+
+
+
+
// fine
+try (InputStream in = new FileInputStream(file)) {
+  readData(in);
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+
+

Catching and handling Exceptions

+
+

When catching exceptions always ensure the following:

+
+
+
    +
  • +

    Never call printStackTrace() method on an exception

    +
  • +
  • +

    Either log or wrap and re-throw the entire catched exception. Be aware that the cause(s) of an exception is very valuable information. If you loose such information by improper exception-handling you may be unable to properly analyse production problems what can cause severe issues.

    +
    +
      +
    • +

      If you wrap and re-throw an exception ensure that the catched exception is passed as cause to the newly created and thrown exception.

      +
    • +
    • +

      If you log an exception ensure that the entire exception is passed as argument to the logger (and not only the result of getMessage() or toString() on the exception).

      +
    • +
    +
    +
  • +
  • +

    See exception handling

    +
  • +
+
+
+
+

Lambdas and Streams

+
+

With Java8 you have cool new features like lambdas and monads like (Stream, CompletableFuture, Optional, etc.). +However, these new features can also be misused or led to code that is hard to read or debug. To avoid pain, we give you the following best practices:

+
+
+
    +
  1. +

    Learn how to use the new features properly before using. Developers are often keen on using cool new features. When you do your first experiments in your project code you will cause deep pain and might be ashamed afterwards. Please study the features properly. Even Java8 experts still write for loops to iterate over collections, so only use these features where it really makes sense.

    +
  2. +
  3. +

    Streams shall only be used in fluent API calls as a Stream can not be forked or reused.

    +
  4. +
  5. +

    Each stream has to have exactly one terminal operation.

    +
  6. +
  7. +

    Do not write multiple statements into lambda code:

    +
    +
    +
    // bad
    +collection.stream().map(x -> {
    +Foo foo = doSomething(x);
    +...
    +return foo;
    +}).collect(Collectors.toList());
    +
    +
    +
    +

    This style makes the code hard to read and debug. Never do that! Instead, extract the lambda body to a private method with a meaningful name:

    +
    +
    +
    +
    // fine
    +collection.stream().map(this::convertToFoo).collect(Collectors.toList());
    +
    +
    +
  8. +
  9. +

    Do not use parallelStream() in general code (that will run on server side) unless you know exactly what you are doing and what is going on under the hood. Some developers might think that using parallel streams is a good idea as it will make the code faster. However, if you want to do performance optimizations talk to your technical lead (architect). Many features such as security and transactions will rely on contextual information that is associated with the current thread. Hence, using parallel streams will most probably cause serious bugs. Only use them for standalone (CLI) applications or for code that is just processing large amounts of data.

    +
  10. +
  11. +

    Do not perform operations on a sub-stream inside a lambda:

    +
    +
    +
    set.stream().flatMap(x -> x.getChildren().stream().filter(this::isSpecial)).collect(Collectors.toList()); // bad
    +set.stream().flatMap(x -> x.getChildren().stream()).filter(this::isSpecial).collect(Collectors.toList()); // fine
    +
    +
    +
  12. +
  13. +

    Only use collect at the end of the stream:

    +
    +
    +
    set.stream().collect(Collectors.toList()).forEach(...) // bad
    +set.stream().peek(...).collect(Collectors.toList()) // fine
    +
    +
    +
  14. +
  15. +

    Lambda parameters with Types inference

    +
    +
    +
    (String a, Float b, Byte[] c) -> a.toString() + Float.toString(b) + Arrays.toString(c)  // bad
    +(a,b,c)  -> a.toString() + Float.toString(b) + Arrays.toString(c)  // fine
    +
    +Collections.sort(personList, (Person p1, Person p2) -> p1.getSurName().compareTo(p2.getSurName()));  // bad
    +Collections.sort(personList, (p1, p2) -> p1.getSurName().compareTo(p2.getSurName()));  // fine
    +
    +
    +
  16. +
  17. +

    Avoid Return Braces and Statement

    +
    +
    +
     a ->  { return a.toString(); } // bad
    + a ->  a.toString();   // fine
    +
    +
    +
  18. +
  19. +

    Avoid Parentheses with Single Parameter

    +
    +
    +
    (a) -> a.toString(); // bad
    + a -> a.toString();  // fine
    +
    +
    +
  20. +
  21. +

    Avoid if/else inside foreach method. Use Filter method & comprehension

    +
    +
    +
    // bad
    +static public Iterator<String> TwitterHandles(Iterator<Author> authors, string company) {
    +    final List result = new ArrayList<String> ();
    +    foreach (Author a : authors) {
    +      if (a.Company.equals(company)) {
    +        String handle = a.TwitterHandle;
    +        if (handle != null)
    +          result.Add(handle);
    +      }
    +    }
    +    return result;
    +  }
    +
    +
    +
    +
    +
    // fine
    +public List<String> twitterHandles(List<Author> authors, String company) {
    +    return authors.stream()
    +            .filter(a -> null != a && a.getCompany().equals(company))
    +            .map(a -> a.getTwitterHandle())
    +            .collect(toList());
    +  }
    +
    +
    +
  22. +
+
+
+
+

Optionals

+
+

With Optional you can wrap values to avoid a NullPointerException (NPE). However, it is not a good code-style to use Optional for every parameter or result to express that it may be null. For such case use @Nullable or even better instead annotate @NotNull where null is not acceptable.

+
+
+

However, Optional can be used to prevent NPEs in fluent calls (due to the lack of the elvis operator):

+
+
+
+
Long id;
+id = fooCto.getBar().getBar().getId(); // may cause NPE
+id = Optional.ofNullable(fooCto).map(FooCto::getBar).map(BarCto::getBar).map(BarEto::getId).orElse(null); // null-safe
+
+
+
+
+

Encoding

+
+

Encoding (esp. Unicode with combining characters and surrogates) is a complex topic. Please study this topic if you have to deal with encodings and processing of special characters. For the basics follow these recommendations:

+
+
+
    +
  • +

    Whenever possible prefer unicode (UTF-8 or better) as encoding. This especially impacts your databases and has to be defined upfront as it typically can not be changed (easily) afterwards.

    +
  • +
  • +

    Do not cast from byte to char (unicode characters can be composed of multiple bytes, such cast may only work for ASCII characters)

    +
  • +
  • +

    Never convert the case of a String using the default locale (esp. when writing generic code like in devonfw). E.g. if you do "HI".toLowerCase() and your system locale is Turkish, then the output will be "hı" instead of "hi", which can lead to wrong assumptions and serious problems. If you want to do a "universal" case conversion always explicitly use an according western locale (e.g. toLowerCase(Locale.US)). Consider using a helper class (see e.g. CaseHelper) or create your own little static utility for that in your project.

    +
  • +
  • +

    Write your code independent from the default encoding (system property file.encoding) - this will most likely differ in JUnit from production environment

    +
    +
      +
    • +

      Always provide an encoding when you create a String from byte[]: new String(bytes, encoding)

      +
    • +
    • +

      Always provide an encoding when you create a Reader or Writer : new InputStreamReader(inStream, encoding)

      +
    • +
    +
    +
  • +
+
+
+
+

Prefer general API

+
+

Avoid unnecessary strong bindings:

+
+
+
    +
  • +

    Do not bind your code to implementations such as Vector or ArrayList instead of List

    +
  • +
  • +

    In APIs for input (=parameters) always consider to make little assumptions:

    +
    +
      +
    • +

      prefer Collection over List or Set where the difference does not matter (e.g. only use Set when you require uniqueness or highly efficient contains)

      +
    • +
    • +

      consider preferring Collection<? extends Foo> over Collection<Foo> when Foo is an interface or super-class

      +
    • +
    +
    +
  • +
+
+
+
+

Prefer primitive boolean

+
+

Unless in rare cases where you need to allow a flag being null avoid using the object type Boolean.

+
+
+
+
// bad
+public Boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

Instead always use the primitive boolean type:

+
+
+
+
// fine
+public boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

The only known excuse is for flags in embeddable types due to limitations of hibernate.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-doc.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-doc.html new file mode 100644 index 00000000..5f251ae4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-doc.html @@ -0,0 +1,12708 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Java

+
+
+

The devonfw community +${project.version}, ${buildtime}

+
+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+

The following sections contain the complete compendium of devon4j, the Java stack of devonfw. +With devon4j we support both spring and quarkus as major frameworks. +However, the general coding patterns are based on common Java standards mainly from Jakarta EE and therefore do not differ between those frameworks. +Therefore, the general section contains all the documentation that is universal to Java and does not differ between the two frameworks. +Only the sections spring and quarkus contain documentation that is specific to the respective approach.

+
+
+

If you’re trying to decide which of the two frameworks to use, have a look at this guide.

+
+
+

You can also read the latest version of this documentation online at the following sources:

+
+ +
+

1. General

+
+
+

Here you will find documentation and code-patterns for developing with Java in general, independent of the framework you choose.

+
+ +
+

==Architecture

+
+
+

There are many different views that are summarized by the term architecture. First, we will introduce the key principles and architecture principles of devonfw. Then, we will go into details of the the architecture of an application.

+
+
+

1.1. Key Principles

+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
+
+
+
+

1.2. Architecture Principles

+
+

Additionally we define the following principles that our architecture is based on:

+
+
+
    +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of Concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information Hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise, maintenance problems will arise to ensure that data remains consistent. Therefore, interfaces of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style.

    +
  • +
+
+
+

As an architect you should be prepared for the future by reading the TechnoVision.

+
+
+
+

1.3. Application Architecture

+
+

For the architecture of an application we distinguish the following views:

+
+
+
    +
  • +

    The Business Architecture describes an application from the business perspective. It divides the application into business components and with full abstraction of technical aspects.

    +
  • +
  • +

    The Technical Architecture describes an application from the technical implementation perspective. It divides the application into technical layers and defines which technical products and frameworks are used to support these layers.

    +
  • +
  • +

    The Infrastructure Architecture describes an application from the operational infrastructure perspective. It defines the nodes used to run the application including clustering, load-balancing and networking. This view is not explored further in this guide.

    +
  • +
+
+
+
Business Architecture
+
+

The business architecture divides the application into business components. A business component has a well-defined responsibility that it encapsulates. All aspects related to that responsibility have to be implemented within that business component. Further, the business architecture defines the dependencies between the business components. These dependencies need to be free of cycles. A business component exports its functionality via well-defined interfaces as a self-contained API. A business component may use another business component via its API and compliant with the dependencies defined by the business architecture.

+
+
+

As the business domain and logic of an application can be totally different, the devonfw can not define a standardized business architecture. Depending on the business domain it has to be defined from scratch or from a domain reference architecture template. For very small systems it may be suitable to define just a single business component containing all the code.

+
+
+
+
Technical Architecture
+
+

The technical architecture divides the application into technical layers based on the multilayered architecture. A layer is a unit of code with the same category such as a service or presentation logic. So, a layer is often supported by a technical framework. Each business component can therefore be split into component parts for each layer. However, a business component may not have component parts for every layer (e.g. only a presentation part that utilized logic from other components).

+
+
+

An overview of the technical reference architecture of the devonfw is given by figure "Technical Reference Architecture". +It defines the following layers visualized as horizontal boxes:

+
+
+ +
+
+

Also, you can see the (business) components as vertical boxes (e.g. A and X) and how they are composed out of component parts each one assigned to one of the technical layers.

+
+
+

Further, there are technical components for cross-cutting aspects grouped by the gray box on the left. Here is a complete list:

+
+ +
+
+devonfw architecture blueprint +
+
Figure 1. Technical Reference Architecture
+
+
+

Please click on the architecture image to open it as SVG and click on the layers and cross-cutting topics to open the according documentation guide.

+
+
+

We reflect this architecture in our code as described in our coding conventions allowing a traceability of business components, use-cases, layers, etc. into the code and giving +developers a sound orientation within the project.

+
+
+

Further, the architecture diagram shows the allowed dependencies illustrated by the dark green connectors. +Within a business component a component part can call the next component part on the layer directly below via a dependency on its API (vertical connectors). +While this is natural and obvious, it is generally forbidden to have dependencies upwards the layers +or to skip a layer by a direct dependency on a component part two or more layers below. +The general dependencies allowed between business components are defined by the business architecture. +In our reference architecture diagram we assume that the business component A1 is allowed to depend +on component A2. Therefore, a use-case within the logic component part of A1 is allowed to call a +use-case from A2 via a dependency on the component API. The same applies for dialogs on the client layer. +This is illustrated by the horizontal connectors. Please note that persistence entities are part of the API of the data-access component part so only the logic component part of the same +business component may depend on them.

+
+
+

The technical architecture has to address non-functional requirements:

+
+
+
    +
  • +

    scalability
    +is established by keeping state in the client and making the server state-less (except for login session). Via load-balancers new server nodes can be added to improve performance (horizontal scaling).

    +
  • +
  • +

    availability and reliability
    +are addressed by clustering with redundant nodes avoiding any single-point-of failure. If one node fails the system is still available. Further, the software has to be robust so there are no dead-locks or other bad effects that can make the system unavailable or not reliable.

    +
  • +
  • +

    security
    +is archived in the devonfw by the right templates and best-practices that avoid vulnerabilities. See security guidelines for further details.

    +
  • +
  • +

    performance
    +is obtained by choosing the right products and proper configurations. While the actual implementation of the application matters for performance a proper design is important as it is the key to allow performance-optimizations (see e.g. caching).

    +
  • +
+
+
+
Technology Stack
+
+

The technology stack of the devonfw is illustrated by the following table.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Technology Stack of devonfw
TopicDetailStandardSuggested implementation

runtime

language & VM

Java

Oracle JDK

runtime

servlet-container

JEE

tomcat

component management

dependency injection

JSR330 & JSR250

spring

configuration

framework

-

spring-boot

persistence

OR-mapper

JPA

hibernate

batch

framework

JSR352

spring-batch

service

SOAP services

JAX-WS

CXF

service

REST services

JAX-RS

CXF

logging

framework

slf4j

logback

validation

framework

beanvalidation/JSR303

hibernate-validator

security

Authentication & Authorization

JAAS

spring-security

monitoring

framework

JMX

spring

monitoring

HTTP Bridge

HTTP & JSON

jolokia

AOP

framework

dynamic proxies

spring AOP

+
+ +
+

==Configuration

+
+
+

An application needs to be configurable in order to allow internal setup (like CDI) but also to allow externalized configuration of a deployed package (e.g. integration into runtime environment). We rely on a comprehensive configuration approach following a "convention over configuration" pattern. This guide adds on to this by detailed instructions and best-practices how to deal with configurations.

+
+
+

In general we distinguish the following kinds of configuration that are explained in the following sections:

+
+
+ +
+
+
+
+
+

1.4. Internal Application Configuration

+
+

The application configuration contains all internal settings and wirings of the application (bean wiring, database mappings, etc.) and is maintained by the application developers at development time.

+
+
+

For more detail of Spring stack, see here

+
+
+
+

1.5. Externalized Configuration

+
+

Externalized configuration is a configuration that is provided separately to a deployment package and can be maintained undisturbed by re-deployments.

+
+
+
Environment Configuration
+
+

The environment configuration contains configuration parameters (typically port numbers, host names, passwords, logins, timeouts, certificates, etc.) specific for the different environments. These are under the control of the operators responsible for the application.

+
+
+

The environment configuration is maintained in application.properties files, defining various properties. +These properties are explained in the corresponding configuration sections of the guides for each topic:

+
+
+ +
+
+

Make sure your properties are thoroughly documented by providing a comment to each property. This inline documentation is most valuable for your operating department.

+
+
+

More about structuring your application.properties files can be read here for Spring.

+
+
+

For Quarkus, please refer to Quarkus Config Reference for more details.

+
+
+
+
Business Configuration
+
+

Often applications do not need business configuration. In case they do it should typically be editable by administrators via the GUI. The business configuration values should therefore be stored in the database in key/value pairs.

+
+
+

Therefore we suggest to create a dedicated table with (at least) the following columns:

+
+
+
    +
  • +

    ID

    +
  • +
  • +

    Property name

    +
  • +
  • +

    Property type (Boolean, Integer, String)

    +
  • +
  • +

    Property value

    +
  • +
  • +

    Description

    +
  • +
+
+
+

According to the entries in this table, an administrative GUI may show a generic form to modify business configuration. Boolean values should be shown as checkboxes, integer and string values as text fields. The values should be validated according to their type so an error is raised if you try to save a string in an integer property for example.

+
+
+

We recommend the following base layout for the hierarchical business configuration:

+
+
+

component.[subcomponent].[subcomponent].propertyname

+
+
+
+
+

1.6. Security

+
+

Often you need to have passwords (for databases, third-party services, etc.) as part of your configuration. These are typically environment specific (see above). However, with DevOps and continuous-deployment you might be tempted to commit such configurations into your version-control (e.g. git). Doing that with plain text passwords is a severe problem especially for production systems. Never do that! Instead we offer some suggestions how to deal with sensible configurations:

+
+
+
Password Encryption
+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control.

+
+
+

For Spring, we use jasypt-spring-boot. For more details, see here

+
+
+

For Quarkus, see here

+
+
+
Is this Security by Obscurity?
+
+
    +
  • +

    Yes, from the point of view to protect the passwords on the target environment this is nothing but security by obscurity. If an attacker somehow got full access to the machine this will only cause him to spend some more time.

    +
  • +
  • +

    No, if someone only gets the configuration file. So all your developers might have access to the version-control where the config is stored. Others might have access to the software releases that include this configs. But without the master-password that should only be known to specific operators none else can decrypt the password (except with brute-force what will take a very long time, see jasypt for details).

    +
  • +
+
+
+ +
+

==Coding Conventions

+
+
+

The code should follow general conventions for Java (see Oracle Naming Conventions, Google Java Style, etc.).We consider this as common sense and provide configurations for SonarQube and related tools such as Checkstyle instead of repeating this here.

+
+
+
+
+
+

1.7. Naming

+
+

Besides general Java naming conventions, we follow the additional rules listed here explicitly:

+
+
+
    +
  • +

    Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Use CamelCase even for abbreviations (XmlUtil instead of XMLUtil)

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc'). See #1095 for details.

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+

1.8. Packages

+
+

Java Packages are the most important element to structure your code. We use a strict packaging convention to map technical layers and business components (slices) to the code (See technical architecture for further details). By using the same names in documentation and code we create a strong link that gives orientation and makes it easy to find from business requirements, specifications or story tickets into the code and back.

+
+
+

For an devon4j based application we use the following Java-Package schema:

+
+
+
+
«root».«component».«layer»[.«detail»]
+
+
+
+

E.g. in our example application we find the Spring Data repositories for the ordermanagement component in the package com.devonfw.application.mtsj.ordermanagement.dataaccess.api.repo

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of package schema
SegmentDescriptionExample

«root»

Is the basic Java Package name-space of your app. Typically we suggest to use «group».«artifact» where «group» is your maven/gradle groupId corresponding to your organization or IT project owning the code following common Java Package conventions. The segment «artifact» is your maven/gradle artifactId and is typically the technical name of your app.

com.devonfw.application.mtsj

«component»

The (business) component the code belongs to. It is defined by the business architecture and uses terms from the business domain. Use the implicit component general for code not belonging to a specific component (foundation code).

salesmanagement

«layer»

The name of the technical layer (See technical architecture). Details are described for the modern project structure and for the classic project structure.

logic

«detail»

Here you are free to further divide your code into sub-components and other concerns according to the size of your component part. If you want to strictly separate API from implementation you should start «detail» with «scope» that is explained below.

dao

«scope»

The scope which is one of api (official API to be used by other layers or components), base (basic code to be reused by other implementations) and impl (implementation that should never be imported from outside). This segment was initially mandatory but due to trends such as microservices, lean, and agile we decided to make it optional and do not force anybody to use it.

api

+
+

Please note that devon4j library modules for spring use com.devonfw.module as «root» and the name of the module as «component». E.g. the API of our beanmapping module can be found in the package com.devonfw.module.beanmapping.common.api.

+
+
+
+

1.9. Code Tasks

+
+

Code spots that need some rework can be marked with the following tasks tags. These are already properly pre-configured in your development environment for auto completion and to view tasks you are responsible for. It is important to keep the number of code tasks low. Therefore, every member of the team should be responsible for the overall code quality. So if you change a piece of code and hit a code task that you can resolve in a reliable way, please do this as part of your change and remove the according tag.

+
+
+
TODO
+
+

Used to mark a piece of code that is not yet complete (typically because it can not be completed due to a dependency on something that is not ready).

+
+
+
+
 // TODO «author» «description»
+
+
+
+

A TODO tag is added by the author of the code who is also responsible for completing this task.

+
+
+
+
FIXME
+
+
+
 // FIXME «author» «description»
+
+
+
+

A FIXME tag is added by the author of the code or someone who found a bug he can not fix right now. The «author» who added the FIXME is also responsible for completing this task. This is very similar to a TODO but with a higher priority. FIXME tags indicate problems that should be resolved before a release is completed while TODO tags might have to stay for a longer time.

+
+
+
+
REVIEW
+
+
+
 // REVIEW «responsible» («reviewer») «description»
+
+
+
+

A REVIEW tag is added by a reviewer during a code review. Here the original author of the code is responsible to resolve the REVIEW tag and the reviewer is assigning this task to him. This is important for feedback and learning and has to be aligned with a review "process" where people talk to each other and get into discussion. In smaller or local teams a peer-review is preferable but this does not scale for large or even distributed teams.

+
+
+
+
+

1.10. Code-Documentation

+
+

As a general goal, the code should be easy to read and understand. Besides, clear naming the documentation is important. We follow these rules:

+
+
+
    +
  • +

    APIs (especially component interfaces) are properly documented with JavaDoc.

    +
  • +
  • +

    JavaDoc shall provide actual value - we do not write JavaDoc to satisfy tools such as checkstyle but to express information not already available in the signature.

    +
  • +
  • +

    We make use of {@link} tags in JavaDoc to make it more expressive.

    +
  • +
  • +

    JavaDoc of APIs describes how to use the type or method and not how the implementation internally works.

    +
  • +
  • +

    To document implementation details, we use code comments (e.g. // we have to flush explicitly to ensure version is up-to-date). This is only needed for complex logic.

    +
  • +
  • +

    Avoid the pointless {@inheritDoc} as since Java 1.5 there is the @Override annotation for overridden methods and your JavaDoc is inherited automatically even without any JavaDoc comment at all.

    +
  • +
+
+
+
+

1.11. Code-Style

+
+

This section gives you best practices to write better code and avoid pitfalls and mistakes.

+
+
+
BLOBs
+
+

Avoid using byte[] for BLOBs as this will load them entirely into your memory. This will cause performance issues or out of memory errors. Instead, use streams when dealing with BLOBs. For further details see BLOB support.

+
+
+
+
Stateless Programming
+
+

When implementing logic as components or beans of your container using dependency injection, we strongly encourage stateless programming. +This is not about data objects like an entity or transfer-object that are stateful by design. +Instead this applies to all classes annotated with @Named, @ApplicationScoped, @Stateless, etc. and all their super-classes. +These classes especially include your repositories, use-cases, and REST services. +Such classes shall never be modified after initialization. +Methods called at runtime (after initialization via the container) do not assign fields (member variables of your class) or mutate the object stored in a field. +This allows your component or bean to be stateless and thread-safe. +Therefore it can be initialized as a singleton so only one instance is created and shared accross all threads of the application. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // bad
+  private String contractOwner;
+
+  private MyState state;
+
+  @Overide
+  public void approve(Contract contract) {
+    this.contractOwner = contract.getOwner();
+    this.contractOwner = this.contractOwner.toLowerCase(Locale.US);
+    this.state.setAdmin(this.contractOwner.endsWith("admin"));
+    if (this.state.isAdmin()) {
+      ...
+    } else {
+      ...
+    }
+  }
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    if (contractOwner.endsWith("admin")) {
+      ...
+    } else {
+      ...
+    }
+  }
+}
+
+
+
+

As you can see in the bad code fields of the class are assigned when the method approve is called. +So mutliple users and therefore threads calling this method concurrently can interfere and override this state causing side-effects on parallel threads. +This will lead to nasty bugs and errors that are hard to trace down. +They will not occur in simple tests but for sure in production with real users. +Therefore never do this and implement your functionality stateless. +That is keeping all state in local variables and strictly avoid modifying fields or their value as illustrated in the fine code. +If you find yourself passing many parameters between methods that all represent state, you can easily create a separate class that encapsulates this state. +However, then you need to create this state object in your method as local variable and pass it between methods as parameter:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    MyState state = new MyState();
+    state.setAdmin(this.contractOwner.endsWith("admin"));
+    doApproveContract(contract, state);
+  }
+}
+
+
+
+
+
Closing Resources
+
+

Resources such as streams (InputStream, OutputStream, Reader, Writer) or transactions need to be handled properly. Therefore, it is important to follow these rules:

+
+
+
    +
  • +

    Each resource has to be closed properly, otherwise you will get out of file handles, TX sessions, memory leaks or the like

    +
  • +
  • +

    Where possible avoid to deal with such resources manually. That is why we are recommending @Transactional for transactions in devonfw (see Transaction Handling).

    +
  • +
  • +

    In case you have to deal with resources manually (e.g. binary streams) ensure to close them properly. See the example below for details.

    +
  • +
+
+
+

Closing streams and other such resources is error prone. Have a look at the following example:

+
+
+
+
// bad
+try {
+  InputStream in = new FileInputStream(file);
+  readData(in);
+  in.close();
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+

The code above is wrong as in case of an IOException the InputStream is not properly closed. In a server application such mistakes can cause severe errors that typically will only occur in production. As such resources implement the AutoCloseable interface you can use the try-with-resource syntax to write correct code. The following code shows a correct version of the example:

+
+
+
+
// fine
+try (InputStream in = new FileInputStream(file)) {
+  readData(in);
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+
+
Catching and handling Exceptions
+
+

When catching exceptions always ensure the following:

+
+
+
    +
  • +

    Never call printStackTrace() method on an exception

    +
  • +
  • +

    Either log or wrap and re-throw the entire catched exception. Be aware that the cause(s) of an exception is very valuable information. If you loose such information by improper exception-handling you may be unable to properly analyse production problems what can cause severe issues.

    +
    +
      +
    • +

      If you wrap and re-throw an exception ensure that the catched exception is passed as cause to the newly created and thrown exception.

      +
    • +
    • +

      If you log an exception ensure that the entire exception is passed as argument to the logger (and not only the result of getMessage() or toString() on the exception).

      +
    • +
    +
    +
  • +
  • +

    See exception handling

    +
  • +
+
+
+
+
Lambdas and Streams
+
+

With Java8 you have cool new features like lambdas and monads like (Stream, CompletableFuture, Optional, etc.). +However, these new features can also be misused or led to code that is hard to read or debug. To avoid pain, we give you the following best practices:

+
+
+
    +
  1. +

    Learn how to use the new features properly before using. Developers are often keen on using cool new features. When you do your first experiments in your project code you will cause deep pain and might be ashamed afterwards. Please study the features properly. Even Java8 experts still write for loops to iterate over collections, so only use these features where it really makes sense.

    +
  2. +
  3. +

    Streams shall only be used in fluent API calls as a Stream can not be forked or reused.

    +
  4. +
  5. +

    Each stream has to have exactly one terminal operation.

    +
  6. +
  7. +

    Do not write multiple statements into lambda code:

    +
    +
    +
    // bad
    +collection.stream().map(x -> {
    +Foo foo = doSomething(x);
    +...
    +return foo;
    +}).collect(Collectors.toList());
    +
    +
    +
    +

    This style makes the code hard to read and debug. Never do that! Instead, extract the lambda body to a private method with a meaningful name:

    +
    +
    +
    +
    // fine
    +collection.stream().map(this::convertToFoo).collect(Collectors.toList());
    +
    +
    +
  8. +
  9. +

    Do not use parallelStream() in general code (that will run on server side) unless you know exactly what you are doing and what is going on under the hood. Some developers might think that using parallel streams is a good idea as it will make the code faster. However, if you want to do performance optimizations talk to your technical lead (architect). Many features such as security and transactions will rely on contextual information that is associated with the current thread. Hence, using parallel streams will most probably cause serious bugs. Only use them for standalone (CLI) applications or for code that is just processing large amounts of data.

    +
  10. +
  11. +

    Do not perform operations on a sub-stream inside a lambda:

    +
    +
    +
    set.stream().flatMap(x -> x.getChildren().stream().filter(this::isSpecial)).collect(Collectors.toList()); // bad
    +set.stream().flatMap(x -> x.getChildren().stream()).filter(this::isSpecial).collect(Collectors.toList()); // fine
    +
    +
    +
  12. +
  13. +

    Only use collect at the end of the stream:

    +
    +
    +
    set.stream().collect(Collectors.toList()).forEach(...) // bad
    +set.stream().peek(...).collect(Collectors.toList()) // fine
    +
    +
    +
  14. +
  15. +

    Lambda parameters with Types inference

    +
    +
    +
    (String a, Float b, Byte[] c) -> a.toString() + Float.toString(b) + Arrays.toString(c)  // bad
    +(a,b,c)  -> a.toString() + Float.toString(b) + Arrays.toString(c)  // fine
    +
    +Collections.sort(personList, (Person p1, Person p2) -> p1.getSurName().compareTo(p2.getSurName()));  // bad
    +Collections.sort(personList, (p1, p2) -> p1.getSurName().compareTo(p2.getSurName()));  // fine
    +
    +
    +
  16. +
  17. +

    Avoid Return Braces and Statement

    +
    +
    +
     a ->  { return a.toString(); } // bad
    + a ->  a.toString();   // fine
    +
    +
    +
  18. +
  19. +

    Avoid Parentheses with Single Parameter

    +
    +
    +
    (a) -> a.toString(); // bad
    + a -> a.toString();  // fine
    +
    +
    +
  20. +
  21. +

    Avoid if/else inside foreach method. Use Filter method & comprehension

    +
    +
    +
    // bad
    +static public Iterator<String> TwitterHandles(Iterator<Author> authors, string company) {
    +    final List result = new ArrayList<String> ();
    +    foreach (Author a : authors) {
    +      if (a.Company.equals(company)) {
    +        String handle = a.TwitterHandle;
    +        if (handle != null)
    +          result.Add(handle);
    +      }
    +    }
    +    return result;
    +  }
    +
    +
    +
    +
    +
    // fine
    +public List<String> twitterHandles(List<Author> authors, String company) {
    +    return authors.stream()
    +            .filter(a -> null != a && a.getCompany().equals(company))
    +            .map(a -> a.getTwitterHandle())
    +            .collect(toList());
    +  }
    +
    +
    +
  22. +
+
+
+
+
Optionals
+
+

With Optional you can wrap values to avoid a NullPointerException (NPE). However, it is not a good code-style to use Optional for every parameter or result to express that it may be null. For such case use @Nullable or even better instead annotate @NotNull where null is not acceptable.

+
+
+

However, Optional can be used to prevent NPEs in fluent calls (due to the lack of the elvis operator):

+
+
+
+
Long id;
+id = fooCto.getBar().getBar().getId(); // may cause NPE
+id = Optional.ofNullable(fooCto).map(FooCto::getBar).map(BarCto::getBar).map(BarEto::getId).orElse(null); // null-safe
+
+
+
+
+
Encoding
+
+

Encoding (esp. Unicode with combining characters and surrogates) is a complex topic. Please study this topic if you have to deal with encodings and processing of special characters. For the basics follow these recommendations:

+
+
+
    +
  • +

    Whenever possible prefer unicode (UTF-8 or better) as encoding. This especially impacts your databases and has to be defined upfront as it typically can not be changed (easily) afterwards.

    +
  • +
  • +

    Do not cast from byte to char (unicode characters can be composed of multiple bytes, such cast may only work for ASCII characters)

    +
  • +
  • +

    Never convert the case of a String using the default locale (esp. when writing generic code like in devonfw). E.g. if you do "HI".toLowerCase() and your system locale is Turkish, then the output will be "hı" instead of "hi", which can lead to wrong assumptions and serious problems. If you want to do a "universal" case conversion always explicitly use an according western locale (e.g. toLowerCase(Locale.US)). Consider using a helper class (see e.g. CaseHelper) or create your own little static utility for that in your project.

    +
  • +
  • +

    Write your code independent from the default encoding (system property file.encoding) - this will most likely differ in JUnit from production environment

    +
    +
      +
    • +

      Always provide an encoding when you create a String from byte[]: new String(bytes, encoding)

      +
    • +
    • +

      Always provide an encoding when you create a Reader or Writer : new InputStreamReader(inStream, encoding)

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer general API
+
+

Avoid unnecessary strong bindings:

+
+
+
    +
  • +

    Do not bind your code to implementations such as Vector or ArrayList instead of List

    +
  • +
  • +

    In APIs for input (=parameters) always consider to make little assumptions:

    +
    +
      +
    • +

      prefer Collection over List or Set where the difference does not matter (e.g. only use Set when you require uniqueness or highly efficient contains)

      +
    • +
    • +

      consider preferring Collection<? extends Foo> over Collection<Foo> when Foo is an interface or super-class

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer primitive boolean
+
+

Unless in rare cases where you need to allow a flag being null avoid using the object type Boolean.

+
+
+
+
// bad
+public Boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

Instead always use the primitive boolean type:

+
+
+
+
// fine
+public boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

The only known excuse is for flags in embeddable types due to limitations of hibernate.

+
+ +
+

==Project structure

+
+
+

In devonfw we want to give clear structure and guidance for building applications. +This also allows tools such as CobiGen or sonar-devon4j-plugin to "understand" the code. +Also this helps developers going from one devonfw project to the next one to quickly understand the code-base. +If every developer knows where to find what, the project gets more efficient. +A long time ago maven standardized the project structure with src/main/java, etc. and turned chaos into structure. +With devonfw we experienced the same for the codebase (what is inside src/main/java).

+
+
+

We initially started devon4j based on spring and spring-boot and proposed a classic project structure. +With modern cloud-native trends we added a modern project structure, that is more lean and up-to-date with the latest market trends.

+
+ +
+

==Dependency Injection +Dependency injection is one of the most important design patterns and is a key principle to a modular and component based architecture. +The Java Standard for dependency injection is javax.inject (JSR330) that we use in combination with JSR250. +Additionally, for scoping you can use CDI (Context and Dependency Injection) from JSR365.

+
+
+

There are many frameworks which support this standard including all recent Java EE application servers. +Therefore in devonfw we rely on these open standards and can propagate patterns and code examples that work independent from the underlying frameworks.

+
+
+
+
+

1.12. Key Principles

+
+

Within dependency injection a bean is typically a reusable unit of your application providing an encapsulated functionality. +This bean can be injected into other beans and it should in general be replaceable. +As an example we can think of a use-case, a repository, etc. +As best practice we use the following principles:

+
+
+
    +
  • +

    Stateless implementation
    +By default such beans shall be implemented stateless. If you store state information in member variables you can easily run into concurrency problems and nasty bugs. This is easy to avoid by using local variables and separate state classes for complex state-information. Try to avoid stateful beans wherever possible. Only add state if you are fully aware of what you are doing and properly document this as a warning in your JavaDoc.

    +
  • +
  • +

    Usage of Java standards
    +We use common standards (see above) that makes our code portable. Therefore we use standardized annotations like @Inject (javax.inject.Inject) instead of proprietary annotations such as @Autowired. Generally we avoid proprietary annotations in business code (logic layer).

    +
  • +
  • +

    Simple injection-style
    +In general you can choose between constructor, setter or field injection. For simplicity we recommend to do private field injection as it is very compact and easy to maintain. We believe that constructor injection is bad for maintenance especially in case of inheritance (if you change the dependencies you need to refactor all sub-classes). Private field injection and public setter injection are very similar but setter injection is much more verbose (often you are even forced to have javadoc for all public methods). If you are writing re-usable library code setter injection will make sense as it is more flexible. In a business application you typically do not need that and can save a lot of boiler-plate code if you use private field injection instead. Nowadays you are using container infrastructure also for your tests (see testing) so there is no need to inject manually (what would require a public setter).

    +
  • +
  • +

    KISS
    +To follow the KISS (keep it small and simple) principle we avoid advanced features (e.g. custom AOP, non-singleton beans) and only use them where necessary.

    +
  • +
  • +

    Separation of API and implementation
    +For important components we should separate a self-contained API documented with JavaDoc from its implementation. Code from other components that wants to use the implementation shall only rely on the API. However, for things that will never be exchanged no API as interface is required you can skip such separation.

    +
  • +
+
+
+
+

1.13. Example Bean

+
+

Here you can see the implementation of an example bean using dependency injection:

+
+
+
+
@ApplicationScoped
+@Named("MyComponent")
+public class MyComponentImpl implements MyComponent {
+  @Inject
+  private MyOtherComponent myOtherComponent;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+
+  ...
+}
+
+
+
+

Here MyComponentImpl depends on MyOtherComponent that is injected into the field myOtherComponent because of the @Inject annotation. +To make this work there must be exactly one bean in the container (e.g. spring or quarkus) that is an instance of MyOtherComponent. +In order to put a bean into the container, we can use @ApplicationScoped in case of CDI (required for quarkus) for a stateless bean. +In spring we can ommit a CDI annotation and the @Named annotation is already sufficient as a bean is stateless by default in spring. +If we always use @ApplicationScoped we can make this more explicit and more portable accross different frameworks. +So in our example we put MyComponentImpl into the container. +That bean will be called MyComponent as we specified in the @Named annotation but we can also omit the name to use the classname as fallback. +Now our bean can be injected into other beans using @Inject annotation either via MyComponent interface (recommended when interface is present) or even directly via MyComponentImpl. +In case you omit the interface, you should also omit the Impl suffix or instead use Bean as suffix.

+
+
+
+

1.14. Multiple bean implementations

+
+

In some cases you might have multiple implementations as beans for the same interface. +The following sub-sections handle the different scenarios to give you guidance.

+
+
+
Only one implementation in container
+
+

In some cases you still have only one implementation active as bean in the container at runtime. +A typical example is that you have different implemenations for test and main usage. +This case is easy, as @Inject will always be unique. +The only thing you need to care about is how to configure your framework (spring, quarkus, etc.) to know which implementation to put in the container depending on specific configuration. +In spring this can be archived via the proprietary @Profile annotaiton.

+
+
+
+
Injecting all of multiple implementations
+
+

In some situations you may have an interface that defines a kind of "plugin". +You can have multiple implementations in your container and want to have all of them injected. +Then you can request a list with all the bean implementations via the interface as in the following example:

+
+
+
+
  @Inject
+  private List<MyConverter> converters;
+
+
+
+

Your code may iterate over all plugins (converters) and apply them sequentially. +Please note that the injection will fail (at least in spring), when there is no bean available to inject. +So you do not get an empty list injected but will get an exception on startup.

+
+
+
+
Injecting one of multiple implementations
+
+

Another scenario is that you have multiple implementations in your container coexisting, but for injection you may want to choose a specific implementation. +Here you could use the @Named annotation to specify a unique identifier for each implementation what is called qualified injection:

+
+
+
+
@ApplicationScoped
+@Named("UserAuthenticator")
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+@Named("ServiceAuthenticator")
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  @Named("UserAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  @Named("ServiceAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+
+
+
+

However, we discovered that this pattern is not so great: +The identifiers in the @Named annotation are just strings that could easily break. +You could use constants instead but still this is not the best solution.

+
+
+

In the end you can very much simplify this by just directly injecting the implementation instead:

+
+
+
+
@ApplicationScoped
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

In case you want to strictly decouple from implementations, you can still create dedicated interfaces:

+
+
+
+
public interface UserAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class UserAuthenticatorImpl implements UserAuthenticator {
+  ...
+}
+public interface ServiceAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class ServiceAuthenticatorImpl implements ServiceAuthenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

However, as you can see this is again introducing additional boiler-plate code. +While the principle to separate API and implementation and strictly decouple from implementation is valuable in general, +you should always consider KISS, lean, and agile in contrast and balance pros and cons instead of blindly following dogmas.

+
+
+
+
+

1.15. Imports

+
+

Here are the import statements for the most important annotations for dependency injection

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.enterprise.context.ApplicationScoped;
+// import javax.enterprise.context.RequestScoped;
+// import javax.enterprise.context.SessionScoped;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+
+
+
+

1.16. Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>jakarta.inject</groupId>
+  <artifactId>jakarta.inject-api</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>jakarta.annotation</groupId>
+  <artifactId>jakarta.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>javax.inject</groupId>
+  <artifactId>javax.inject</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>javax.annotation</groupId>
+  <artifactId>javax.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+ +
+

==BLOB support

+
+
+

BLOB stands for Binary Large Object. A BLOB may be an image, an office document, ZIP archive or any other multimedia object. +Often these BLOBs are large. if this is the case you need to take care, that you do not copy all the blob data into you application heap, e.g. when providing them via a REST service. +This could easily lead to performance problems or out of memory errors. +As solution for that problem is "streaming" those BLOBs directly from the database to the client. To demonstrate how this can be accomplished, devonfw provides a example.

+
+
+
+

1.17. Further Reading

+ +
+ +
+

==Common

+
+
+

In our coding-conventions we define a clear packaging and layering. +However, there is always cross-cutting code that does not belong to a specific layer such as generic helpers, general code for configuration or integration, etc. +Therefore, we define a package segment common that can be used as «layer» for such cross-cutting code. +Code from any other layer is allowed to access such common code (at least within the same component).

+
+
+ +
+

==Java Persistence API

+
+
+

For mapping java objects to a relational database we use the Java Persistence API (JPA). +As JPA implementation we recommend to use Hibernate. For general documentation about JPA and Hibernate follow the links above as we will not replicate the documentation. Here you will only find guidelines and examples how we recommend to use it properly. The following examples show how to map the data of a database to an entity. As we use JPA we abstract from SQL here. However, you will still need a DDL script for your schema and during maintenance also database migrations. Please follow our SQL guide for such artifacts.

+
+
+
+

1.18. Entity

+
+

Entities are part of the persistence layer and contain the actual data. They are POJOs (Plain Old Java Objects) on which the relational data of a database is mapped and vice versa. The mapping is configured via JPA annotations (javax.persistence). Usually an entity class corresponds to a table of a database and a property to a column of that table. A persistent entity instance then represents a row of the database table.

+
+
+
A Simple Entity
+
+

The following listing shows a simple example:

+
+
+
+
@Entity
+@Table(name="TEXTMESSAGE")
+public class MessageEntity extends ApplicationPersistenceEntity implements Message {
+
+  private String text;
+
+  public String getText() {
+    return this.text;
+  }
+
+  public void setText(String text) {
+    this.text = text;
+  }
+ }
+
+
+
+

The @Entity annotation defines that instances of this class will be entities which can be stored in the database. The @Table annotation is optional and can be used to define the name of the corresponding table in the database. If it is not specified, the simple name of the entity class is used instead.

+
+
+

In order to specify how to map the attributes to columns we annotate the corresponding getter methods (technically also private field annotation is also possible but approaches can not be mixed). +The @Id annotation specifies that a property should be used as primary key. +With the help of the @Column annotation it is possible to define the name of the column that an attribute is mapped to as well as other aspects such as nullable or unique. If no column name is specified, the name of the property is used as default.

+
+
+

Note that every entity class needs a constructor with public or protected visibility that does not have any arguments. Moreover, neither the class nor its getters and setters may be final.

+
+
+

Entities should be simple POJOs and not contain business logic.

+
+
+
+
Entities and Datatypes
+
+

Standard datatypes like Integer, BigDecimal, String, etc. are mapped automatically by JPA. Custom datatypes are mapped as serialized BLOB by default what is typically undesired. +In order to map atomic custom datatypes (implementations of`+SimpleDatatype`) we implement an AttributeConverter. Here is a simple example:

+
+
+
+
@Converter(autoApply = true)
+public class MoneyAttributeConverter implements AttributeConverter<Money, BigDecimal> {
+
+  public BigDecimal convertToDatabaseColumn(Money attribute) {
+    return attribute.getValue();
+  }
+
+  public Money convertToEntityAttribute(BigDecimal dbData) {
+    return new Money(dbData);
+  }
+}
+
+
+
+

The annotation @Converter is detected by the JPA vendor if the annotated class is in the packages to scan. Further, autoApply = true implies that the converter is automatically used for all properties of the handled datatype. Therefore all entities with properties of that datatype will automatically be mapped properly (in our example Money is mapped as BigDecimal).

+
+
+

In case you have a composite datatype that you need to map to multiple columns the JPA does not offer a real solution. As a workaround you can use a bean instead of a real datatype and declare it as @Embeddable. If you are using Hibernate you can implement CompositeUserType. Via the @TypeDef annotation it can be registered to Hibernate. If you want to annotate the CompositeUserType implementation itself you also need another annotation (e.g. MappedSuperclass tough not technically correct) so it is found by the scan.

+
+
+
Enumerations
+
+

By default JPA maps Enums via their ordinal. Therefore the database will only contain the ordinals (0, 1, 2, etc.) . So , inside the database you can not easily understand their meaning. Using @Enumerated with EnumType.STRING allows to map the enum values to their name (Enum.name()). Both approaches are fragile when it comes to code changes and refactoring (if you change the order of the enum values or rename them) after the application is deployed to production. If you want to avoid this and get a robust mapping you can define a dedicated string in each enum value for database representation that you keep untouched. Then you treat the enum just like any other custom datatype.

+
+
+
+
BLOB
+
+

If binary or character large objects (BLOB/CLOB) should be used to store the value of an attribute, e.g. to store an icon, the @Lob annotation should be used as shown in the following listing:

+
+
+
+
@Lob
+public byte[] getIcon() {
+  return this.icon;
+}
+
+
+
+ + + + + +
+ + +Using a byte array will cause problems if BLOBs get large because the entire BLOB is loaded into the RAM of the server and has to be processed by the garbage collector. For larger BLOBs the type Blob and streaming should be used. +
+
+
+
+
public Blob getAttachment() {
+  return this.attachment;
+}
+
+
+
+
+
Date and Time
+
+

To store date and time related values, the temporal annotation can be used as shown in the listing below:

+
+
+
+
@Temporal(TemporalType.TIMESTAMP)
+public java.util.Date getStart() {
+  return start;
+}
+
+
+
+

Until Java8 the java data type java.util.Date (or Jodatime) has to be used. +TemporalType defines the granularity. In this case, a precision of nanoseconds is used. If this granularity is not wanted, TemporalType.DATE can be used instead, which only has a granularity of milliseconds. +Mixing these two granularities can cause problems when comparing one value to another. This is why we only use TemporalType.TIMESTAMP.

+
+
+
+
QueryDSL and Custom Types
+
+

Using the Aliases API of QueryDSL might result in an InvalidDataAccessApiUsageException when using custom datatypes in entity properties. This can be circumvented in two steps:

+
+
+
    +
  1. +

    Ensure you have the following maven dependencies in your project (core module) to support custom types via the Aliases API:

    +
    +
    +
    <dependency>
    +  <groupId>org.ow2.asm</groupId>
    +  <artifactId>asm</artifactId>
    +</dependency>
    +<dependency>
    +  <groupId>cglib</groupId>
    +  <artifactId>cglib</artifactId>
    +</dependency>
    +
    +
    +
  2. +
  3. +

    Make sure, that all your custom types used in entities provide a non-argument constructor with at least visibility level protected.

    +
  4. +
+
+
+
+
+
Primary Keys
+
+

We only use simple Long values as primary keys (IDs). +By default it is auto generated (@GeneratedValue(strategy=GenerationType.AUTO)). +This is already provided by the class com.devonfw.<projectName>.general.dataaccess.api.AbstractPersistenceEntity within the classic project structure respectively com.devonfw.<projectName>.general.domain.model.AbstractPersistenceEntity within the modern project structure, that you can extend.

+
+
+

The reason for this recommendation is simply because using a number (Long) is the most efficient representation for the database. +You may also consider to use other types like String or UUID or even composite custom datatypes and this is technically possible. +However, please consider that the primary key is used to lookup the row from the database table, also in foreign keys and thus in JOINs. +Please note that your project sooner or later may reach some complexity where performance really matters. +Working on big data and performing JOINs when using types such as String (VARCHAR[2]) as primary and foreign keys will kill your performance. +You are still free to make a different choice and devonfw only gives recommendations but does not want to dictate you what to do. +However, you have been warned about the concequences. +If you are well aware of what you are doing, you can still use differnet types of primary keys. +In such case, create your own entity not extending AbstractPersistenceEntity or create your own copy of AbstractPersistenceEntity with a different name and a different type of primary key.

+
+
+

In case you have business oriented keys (often as String), you can define an additional property for it and declare it as unique (@Column(unique=true)). +Be sure to include "AUTO_INCREMENT" in your sql table field ID to be able to persist data (or similar for other databases).

+
+
+
+
+

1.19. Relationships

+
+
n:1 and 1:1 Relationships
+
+

Entities often do not exist independently but are in some relation to each other. For example, for every period of time one of the StaffMember’s of the restaurant example has worked, which is represented by the class WorkingTime, there is a relationship to this StaffMember.

+
+
+

The following listing shows how this can be modeled using JPA:

+
+
+
+
...
+
+@Entity
+public class WorkingTimeEntity {
+   ...
+
+   private StaffMemberEntity staffMember;
+
+   @ManyToOne
+   @JoinColumn(name="STAFFMEMBER")
+   public StaffMemberEntity getStaffMember() {
+      return this.staffMember;
+   }
+
+   public void setStaffMember(StaffMemberEntity staffMember) {
+      this.staffMember = staffMember;
+   }
+}
+
+
+
+

To represent the relationship, an attribute of the type of the corresponding entity class that is referenced has been introduced. The relationship is a n:1 relationship, because every WorkingTime belongs to exactly one StaffMember, but a StaffMember usually worked more often than once.
+This is why the @ManyToOne annotation is used here. For 1:1 relationships the @OneToOne annotation can be used which works basically the same way. To be able to save information about the relation in the database, an additional column in the corresponding table of WorkingTime is needed which contains the primary key of the referenced StaffMember. With the name element of the @JoinColumn annotation it is possible to specify the name of this column.

+
+
+
+
1:n and n:m Relationships
+
+

The relationship of the example listed above is currently an unidirectional one, as there is a getter method for retrieving the StaffMember from the WorkingTime object, but not vice versa.

+
+
+

To make it a bidirectional one, the following code has to be added to StaffMember:

+
+
+
+
  private Set<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy="staffMember")
+  public Set<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(Set<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

To make the relationship bidirectional, the tables in the database do not have to be changed. Instead the column that corresponds to the attribute staffMember in class WorkingTime is used, which is specified by the mappedBy element of the @OneToMany annotation. Hibernate will search for corresponding WorkingTime objects automatically when a StaffMember is loaded.

+
+
+

The problem with bidirectional relationships is that if a WorkingTime object is added to the set or list workingTimes in StaffMember, this does not have any effect in the database unless +the staffMember attribute of that WorkingTime object is set. That is why the devon4j advices not to use bidirectional relationships but to use queries instead. How to do this is shown here. If a bidirectional relationship should be used nevertheless, appropriate add and remove methods must be used.

+
+
+

For 1:n and n:m relations, the devon4j demands that (unordered) Sets and no other collection types are used, as shown in the listing above. The only exception is whenever an ordering is really needed, (sorted) lists can be used.
+For example, if WorkingTime objects should be sorted by their start time, this could be done like this:

+
+
+
+
  private List<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy = "staffMember")
+  @OrderBy("startTime asc")
+  public List<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(List<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

The value of the @OrderBy annotation consists of an attribute name of the class followed by asc (ascending) or desc (descending).

+
+
+

To store information about a n:m relationship, a separate table has to be used, as one column cannot store several values (at least if the database schema is in first normal form).
+For example if one wanted to extend the example application so that all ingredients of one FoodDrink can be saved and to model the ingredients themselves as entities (e.g. to store additional information about them), this could be modeled as follows (extract of class FoodDrink):

+
+
+
+
  private Set<IngredientEntity> ingredients;
+
+  @ManyToMany()
+  @JoinTable
+  public Set<IngredientEntity> getIngredients() {
+    return this.ingredients;
+  }
+
+  public void setOrders(Set<IngredientEntity> ingredients) {
+    this.ingredients = ingredients;
+  }
+
+
+
+

Information about the relation is stored in a table called BILL_ORDER that has to have two columns, one for referencing the Bill, the other one for referencing the Order. Note that the @JoinTable annotation is not needed in this case because a separate table is the default solution here (same for n:m relations) unless there is a mappedBy element specified.

+
+
+

For 1:n relationships this solution has the disadvantage that more joins (in the database system) are needed to get a Bill with all the Orders it refers to. This might have a negative impact on performance so that the solution to store a reference to the Bill row/entity in the Order’s table is probably the better solution in most cases.

+
+
+

Note that bidirectional n:m relationships are not allowed for applications based on devon4j. Instead a third entity has to be introduced, which "represents" the relationship (it has two n:1 relationships).

+
+
+
+
Eager vs. Lazy Loading
+
+

Using JPA it is possible to use either lazy or eager loading. Eager loading means that for entities retrieved from the database, other entities that are referenced by these entities are also retrieved, whereas lazy loading means that this is only done when they are actually needed, i.e. when the corresponding getter method is invoked.

+
+
+

Application based on devon4j are strongly advised to always use lazy loading. The JPA defaults are:

+
+
+
    +
  • +

    @OneToMany: LAZY

    +
  • +
  • +

    @ManyToMany: LAZY

    +
  • +
  • +

    @ManyToOne: EAGER

    +
  • +
  • +

    @OneToOne: EAGER

    +
  • +
+
+
+

So at least for @ManyToOne and @OneToOne you always need to override the default by providing fetch = FetchType.LAZY.

+
+
+ + + + + +
+ + +Please read the performance guide. +
+
+
+
+
Cascading Relationships
+
+

For relations it is also possible to define whether operations are cascaded (like a recursion) to the related entity. +By default, nothing is done in these situations. This can be changed by using the cascade property of the annotation that specifies the relation type (@OneToOne, @ManyToOne, @OneToMany, @ManyToOne). This property accepts a CascadeType that offers the following options:

+
+
+
    +
  • +

    PERSIST (for EntityManager.persist, relevant to inserted transient entities into DB)

    +
  • +
  • +

    REMOVE (for EntityManager.remove to delete entity from DB)

    +
  • +
  • +

    MERGE (for EntityManager.merge)

    +
  • +
  • +

    REFRESH (for EntityManager.refresh)

    +
  • +
  • +

    DETACH (for EntityManager.detach)

    +
  • +
  • +

    ALL (cascade all of the above operations)

    +
  • +
+
+
+

See here for more information.

+
+
+
+
Typesafe Foreign Keys using IdRef
+
+

For simple usage you can use Long for all your foreign keys. +However, as an optional pattern for advanced and type-safe usage, we offer IdRef.

+
+
+
+
+

1.20. Embeddable

+
+

An embeddable Object is a way to group properties of an entity into a separate Java (child) object. Unlike with implement relationships the embeddable is not a separate entity and its properties are stored (embedded) in the same table together with the entity. This is helpful to structure and reuse groups of properties.

+
+
+

The following example shows an Address implemented as an embeddable class:

+
+
+
+
@Embeddable
+public class AddressEmbeddable {
+
+  private String street;
+  private String number;
+  private Integer zipCode;
+  private String city;
+
+  @Column(name="STREETNUMBER")
+  public String getNumber() {
+    return number;
+  }
+
+  public void setNumber(String number) {
+    this.number = number;
+  }
+
+  ...  // other getter and setter methods, equals, hashCode
+}
+
+
+
+

As you can see an embeddable is similar to an entity class, but with an @Embeddable annotation instead of the @Entity annotation and without primary key or modification counter. +An Embeddable does not exist on its own but in the context of an entity. +As a simplification Embeddables do not require a separate interface and ETO as the bean-mapper will create a copy automatically when converting the owning entity to an ETO. +However, in this case the embeddable becomes part of your api module that therefore needs a dependency on the JPA.

+
+
+

In addition to that the methods equals(Object) and hashCode() need to be implemented as this is required by Hibernate (it is not required for entities because they can be unambiguously identified by their primary key). For some hints on how to implement the hashCode() method please have a look here.

+
+
+

Using this AddressEmbeddable inside an entity class can be done like this:

+
+
+
+
  private AddressEmbeddable address;
+
+  @Embedded
+  public AddressEmbeddable getAddress() {
+    return this.address;
+  }
+
+  public void setAddress(AddressEmbeddable address) {
+    this.address = address;
+  }
+}
+
+
+
+

The @Embedded annotation needs to be used for embedded attributes. Note that if in all columns of the embeddable (here Address) are null, then the embeddable object itself is also null inside the entity. This has to be considered to avoid NullPointerException’s. Further this causes some issues with primitive types in embeddable classes that can be avoided by only using object types instead.

+
+
+
+

1.21. Inheritance

+
+

Just like normal java classes, entity classes can inherit from others. The only difference is that you need to specify how to map a class hierarchy to database tables. Generic abstract super-classes for entities can simply be annotated with @MappedSuperclass.

+
+
+

For all other cases the JPA offers the annotation @Inheritance with the property strategy talking an InheritanceType that has the following options:

+
+
+
+
+
    +
  • +

    SINGLE_TABLE: This strategy uses a single table that contains all columns needed to store all entity-types of the entire inheritance hierarchy. If a column is not needed for an entity because of its type, there is a null value in this column. An additional column is introduced, which denotes the type of the entity (called dtype).

    +
  • +
  • +

    TABLE_PER_CLASS: For each concrete entity class there is a table in the database that can store such an entity with all its attributes. An entity is only saved in the table corresponding to its most concrete type. To get all entities of a super type, joins are needed.

    +
  • +
  • +

    JOINED: In this case there is a table for every entity class including abstract classes, which contains only the columns for the persistent properties of that particular class. Additionally there is a primary key column in every table. To get an entity of a class that is a subclass of another one, joins are needed.

    +
  • +
+
+
+
+
+

Each of the three approaches has its advantages and drawbacks, which are discussed in detail here. In most cases, the first one should be used, because it is usually the fastest way to do the mapping, as no joins are needed when retrieving, searching or persisting entities. Moreover it is rather simple and easy to understand. +One major disadvantage is that the first approach could lead to a table with a lot of null values, which might have a negative impact on the database size.

+
+
+

The inheritance strategy has to be annotated to the top-most entity of the class hierarchy (where @MappedSuperclass classes are not considered) like in the following example:

+
+
+
+
@Entity
+@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+public abstract class MyParentEntity extends ApplicationPersistenceEntity implements MyParent {
+  ...
+}
+
+@Entity
+public class MyChildEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+@Entity
+public class MyOtherEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+
+
+

As a best practice we advise you to avoid entity hierarchies at all where possible and otherwise to keep the hierarchy as small as possible. In order to just ensure reuse or establish a common API you can consider a shared interface, a @MappedSuperclass or an @Embeddable instead of an entity hierarchy.

+
+
+
+

1.22. Repositories and DAOs

+
+

For each entity a code unit is created that groups all database operations for that entity. We recommend to use spring-data repositories for that as it is most efficient for developers. As an alternative there is still the classic approach using DAOs.

+
+
+
Concurrency Control
+
+

The concurrency control defines the way concurrent access to the same data of a database is handled. When several users (or threads of application servers) concurrently access a database, anomalies may happen, e.g. a transaction is able to see changes from another transaction although that one did, not yet commit these changes. Most of these anomalies are automatically prevented by the database system, depending on the isolation level (property hibernate.connection.isolation in the jpa.xml, see here, or quarkus.datasource.jdbc.transaction-isolation-level in the application.properties).

+
+
+

Another anomaly is when two stakeholders concurrently access a record, do some changes and write them back to the database. The JPA addresses this with different locking strategies (see here).

+
+
+

As a best practice we are using optimistic locking for regular end-user services (OLTP) and pessimistic locking for batches.

+
+
+
+
Optimistic Locking
+
+

The class com.devonfw.module.jpa.persistence.api.AbstractPersistenceEntity already provides optimistic locking via a modificationCounter with the @Version annotation. Therefore JPA takes care of optimistic locking for you. When entities are transferred to clients, modified and sent back for update you need to ensure the modificationCounter is part of the game. If you follow our guides about transfer-objects and services this will also work out of the box. +You only have to care about two things:

+
+
+
    +
  • +

    How to deal with optimistic locking in relationships?
    +Assume an entity A contains a collection of B entities. Should there be a locking conflict if one user modifies an instance of A while another user in parallel modifies an instance of B that is contained in the other instance? To address this , take a look at FeatureForceIncrementModificationCounter.

    +
  • +
  • +

    What should happen in the UI if an OptimisticLockException occurred?
    +According to KISS our recommendation is that the user gets an error displayed that tells him to do his change again on the recent data. Try to design your system and the work processing in a way to keep such conflicts rare and you are fine.

    +
  • +
+
+
+
+
Pessimistic Locking
+
+

For back-end services and especially for batches optimistic locking is not suitable. A human user shall not cause a large batch process to fail because he was editing the same entity. Therefore such use-cases use pessimistic locking what gives them a kind of priority over the human users. +In your DAO implementation you can provide methods that do pessimistic locking via EntityManager operations that take a LockModeType. Here is a simple example:

+
+
+
+
  getEntityManager().lock(entity, LockModeType.READ);
+
+
+
+

When using the lock(Object, LockModeType) method with LockModeType.READ, Hibernate will issue a SELECT …​ FOR UPDATE. This means that no one else can update the entity (see here for more information on the statement). If LockModeType.WRITE is specified, Hibernate issues a SELECT …​ FOR UPDATE NOWAIT instead, which has has the same meaning as the statement above, but if there is already a lock, the program will not wait for this lock to be released. Instead, an exception is raised.
+Use one of the types if you want to modify the entity later on, for read only access no lock is required.

+
+
+

As you might have noticed, the behavior of Hibernate deviates from what one would expect by looking at the LockModeType (especially LockModeType.READ should not cause a SELECT …​ FOR UPDATE to be issued). The framework actually deviates from what is specified in the JPA for unknown reasons.

+
+
+
+
+

1.23. Database Auditing

+ +
+
+

1.24. Testing Data-Access

+
+

For testing of Entities and Repositories or DAOs see testing guide.

+
+
+
+

1.25. Principles

+
+

We strongly recommend these principles:

+
+
+
    +
  • +

    Use the JPA where ever possible and use vendor (hibernate) specific features only for situations when JPA does not provide a solution. In the latter case consider first if you really need the feature.

    +
  • +
  • +

    Create your entities as simple POJOs and use JPA to annotate the getters in order to define the mapping.

    +
  • +
  • +

    Keep your entities simple and avoid putting advanced logic into entity methods.

    +
  • +
+
+
+
+

1.26. Database Configuration

+
+

For details on the configuration of the database connection and database logging of the individual framework, please refer to the respective configuration guide.

+
+
+

For spring see here.

+
+
+

For quarkus see here.

+
+
+
Database Migration
+ +
+
+
Pooling
+
+

You typically want to pool JDBC connections to boost performance by recycling previous connections. There are many libraries available to do connection pooling. We recommend to use HikariCP. For Oracle RDBMS see here.

+
+
+
+
+

1.27. Security

+
+
SQL-Injection
+
+

A common security threat is SQL-injection. Never build queries with string concatenation or your code might be vulnerable as in the following example:

+
+
+
+
  String query = "Select op from OrderPosition op where op.comment = " + userInput;
+  return getEntityManager().createQuery(query).getResultList();
+
+
+
+

Via the parameter userInput an attacker can inject SQL (JPQL) and execute arbitrary statements in the database causing extreme damage.

+
+
+

In order to prevent such injections you have to strictly follow our rules for queries:

+
+
+ +
+
+
+
Limited Permissions for Application
+
+

We suggest that you operate your application with a database user that has limited permissions so he can not modify the SQL schema (e.g. drop tables). For initializing the schema (DDL) or to do schema migrations use a separate user that is not used by the application itself.

+
+ +
+

==Queries +The Java Persistence API (JPA) defines its own query language, the java persistence query language (JPQL) (see also JPQL tutorial), which is similar to SQL but operates on entities and their attributes instead of tables and columns.

+
+
+

The simplest CRUD-Queries (e.g. find an entity by its ID) are already build in the devonfw CRUD functionality (via Repository or DAO). For other cases you need to write your own query. We distinguish between static and dynamic queries. Static queries have a fixed JPQL query string that may only use parameters to customize the query at runtime. Instead, dynamic queries can change their clauses (WHERE, ORDER BY, JOIN, etc.) at runtime depending on the given search criteria.

+
+
+
+
Static Queries
+
+

E.g. to find all DishEntries (from MTS sample app) that have a price not exceeding a given maxPrice we write the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice
+
+
+
+

Here dish is used as alias (variable name) for our selected DishEntity (what refers to the simple name of the Java entity class). With dish.price we are referring to the Java property price (getPrice()/setPrice(…​)) in DishEntity. A named variable provided from outside (the search criteria at runtime) is specified with a colon (:) as prefix. Here with :maxPrice we reference to a variable that needs to be set via query.setParameter("maxPrice", maxPriceValue). JPQL also supports indexed parameters (?) but they are discouraged because they easily cause confusion and mistakes.

+
+
+
Using Queries to Avoid Bidirectional Relationships
+
+

With the usage of queries it is possible to avoid exposing relationships or modelling bidirectional relationships, which have some disadvantages (see relationships). This is especially desired for relationships between entities of different business components. +So for example to get all OrderLineEntities for a specific OrderEntity without using the orderLines relation from OrderEntity the following query could be used:

+
+
+
+
SELECT line FROM OrderLineEntity line WHERE line.order.id = :orderId
+
+
+
+
+
+
Dynamic Queries
+
+

For dynamic queries, we use the JPA module for Querydsl. Querydsl also supports other modules such as MongoDB, and Apache Lucene. It allows to implement queries in a powerful but readable and type-safe way (unlike Criteria API). If you already know JPQL, you will quickly be able to read and write Querydsl code. It feels like JPQL but implemented in Java instead of plain text.

+
+
+

To use Querydsl in your Maven project, add the following dependencies:

+
+
+
+
<dependencies>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-apt</artifactId>
+        <version>${querydsl.version}</version>
+        <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-jpa</artifactId>
+        <version>${querydsl.version}</version>
+    </dependency>
+
+</dependencies>
+
+
+
+

Next, configure the annotation processing tool (APT) plugin:

+
+
+
+
<project>
+  <build>
+    <plugins>
+      ...
+      <plugin>
+        <groupId>com.mysema.maven</groupId>
+        <artifactId>apt-maven-plugin</artifactId>
+        <version>1.1.3</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>process</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/generated-sources/java</outputDirectory>
+              <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      ...
+    </plugins>
+  </build>
+</project>
+
+
+
+

Here is an example from our sample application:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(dish.price.goe(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(dish.price.loe(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      query.where(dish.name.eq(name));
+    }
+    query.orderBy(dish.price.asc(), dish.name.asc());
+    return query.fetch();
+  }
+
+
+
+

In this example, we use the so called Q-types (QDishEntity). These are classes generated at build time by the Querydsl annotation processor from entity classes. The Q-type classes can be used as static types representative of the original entity class.

+
+
+

The query.from(dish) method call defines the query source, in this case the dish table. The where method defines a filter. For example, The first call uses the goe operator to filter out any dishes that are not greater or equal to the minimal price. Further operators can be found here.

+
+
+

The orderBy method is used to sort the query results according to certain criteria. Here, we sort the results first by their price and then by their name, both in ascending order. To sort in descending order, use .desc(). To partition query results into groups of rows, see the groupBy method.

+
+
+

For spring, devon4j provides another approach that you can use for your Spring applications to implement Querydsl logic without having to use these metaclasses. An example can be found here.

+
+
+
+
Native Queries
+
+

Spring Data supports the use of native queries. Native queries use simple native SQL syntax that is not parsed in JPQL. This allows you to use all the features that your database supports. +The downside to this is that database portability is lost due to the absence of an abstraction layer. Therefore, the queries may not work with another database because it may use a different syntax.

+
+
+

You can implement a native query using @Query annotation with the nativeQuery attribute set to true:

+
+
+
+
@Query(value="...", nativeQuery=true)
+
+
+
+ + + + + +
+ + +This will not work with Quarkus because Quarkus does not support native queries by using the @Query annotation (see here). +
+
+
+

You can also implement native queries directly using the EntityManager API and the createNativeQuery method. +This approach also works with Quarkus.

+
+
+
+
Query query = entityManager.createNativeQuery("SELECT * FROM Product", ProductEntity.class);
+List<ProductEntity> products = query.getResultList();
+
+
+
+ + + + + +
+ + +Be sure to use the name of the table when using native queries, while you must use the entity name when implementing queries with JPQL. +
+
+
+
+
Using Wildcards
+
+

For flexible queries it is often required to allow wildcards (especially in dynamic queries). While users intuitively expect glob syntax, the SQL and JPQL standards work differently. Therefore, a mapping is required. devonfw provides this on a lower level with LikePatternSyntax and on a higher level with QueryUtil (see QueryHelper.newStringClause(…​)).

+
+
+
+
Pagination
+
+

When dealing with large amounts of data, an efficient method of retrieving the data is required. Fetching the entire data set each time would be too time consuming. Instead, Paging is used to process only small subsets of the entire data set.

+
+
+

If you are using Spring Data repositories you will get pagination support out of the box by providing the interfaces Page and Pageable:

+
+
+
Listing 1. repository
+
+
Page<DishEntity> findAll(Pageable pageable);
+
+
+
+

Then you can create a Pageable object and pass it to the method call as follows:

+
+
+
+
int page = criteria.getPageNumber();
+int size = criteria.getPageSize();
+Pageable pageable = PageRequest.of(page, size);
+Page<DishEntity> dishes = dishRepository.findAll(pageable);
+
+
+
+
Paging with Querydsl
+
+

Pagination is also supported for dynamic queries with Querydsl:

+
+
+
+
  public Page<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    // conditions
+
+    int page = criteria.getPageNumber();
+    int size = criteria.getPageSize();
+    Pageable pageable = PageRequest.of(page, size);
+    query.offset(pageable.getOffset());
+    query.limit(pageable.getPageSize());
+
+    List<DishEntity> dishes = query.fetch();
+    return new PageImpl<>(dishes, pageable, dishes.size());
+  }
+
+
+
+
+
Pagination example
+
+

For the table entity we can make a search request by accessing the REST endpoint with pagination support like in the following examples:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "total":true
+  }
+}
+
+//Response
+{
+    "pagination": {
+        "size": 2,
+        "page": 1,
+        "total": 11
+    },
+    "result": [
+        {
+            "id": 101,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 1,
+            "state": "OCCUPIED"
+        },
+        {
+            "id": 102,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 2,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+ + + + + +
+ + +As we are requesting with the total property set to true the server responds with the total count of rows for the query. +
+
+
+

For retrieving a concrete page, we provide the page attribute with the desired value. Here we also left out the total property so the server doesn’t incur on the effort to calculate it:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "page":2
+  }
+}
+
+//Response
+
+{
+    "pagination": {
+        "size": 2,
+        "page": 2,
+        "total": null
+    },
+    "result": [
+        {
+            "id": 103,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 3,
+            "state": "FREE"
+        },
+        {
+            "id": 104,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 4,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+
+
Pagingation in devon4j-spring
+
+

For spring applications, devon4j also offers its own solution for pagination. You can find an example of this here.

+
+
+
+
+
Query Meta-Parameters
+
+

Queries can have meta-parameters and that are provided via SearchCriteriaTo. Besides paging (see above) we also get timeout support.

+
+
+
+
Advanced Queries
+
+

Writing queries can sometimes get rather complex. The current examples given above only showed very simple basics. Within this topic a lot of advanced features need to be considered like:

+
+
+ +
+
+

This list is just containing the most important aspects. As we can not cover all these topics here, they are linked to external documentation that can help and guide you.

+
+ +
+

==Spring Data +Spring Data JPA is supported by both Spring and Quarkus. However, in Quarkus this approach still has some limitations. For detailed information, see the official Quarkus Spring Data guide.

+
+
+
+
Motivation
+
+

The benefits of Spring Data are (for examples and explanations see next sections):

+
+
+
    +
  • +

    All you need is one single repository interface for each entity. No need for a separate implementation or other code artifacts like XML descriptors, NamedQueries class, etc.

    +
  • +
  • +

    You have all information together in one place (the repository interface) that actually belong together (where as in the classic approach you have the static queries in an XML file, constants to them in NamedQueries class and referencing usages in DAO implementation classes).

    +
  • +
  • +

    Static queries are most simple to realize as you do not need to write any method body. This means you can develop faster.

    +
  • +
  • +

    Support for paging is already build-in. Again for static query method the is nothing you have to do except using the paging objects in the signature.

    +
  • +
  • +

    Still you have the freedom to write custom implementations via default methods within the repository interface (e.g. for dynamic queries).

    +
  • +
+
+
+
+
Dependency
+
+

In case you want to switch to or add Spring Data support to your Spring or Quarkus application, all you need is to add the respective maven dependency:

+
+
+
Listing 2. spring
+
+
<dependency>
+  <groupId>org.springframework.boot</groupId>
+  <artifactId>spring-boot-starter-data-jpa</artifactId>
+</dependency>
+
+
+
+
Listing 3. quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
Repository
+
+

For each entity «Entity»Entity an interface is created with the name «Entity»Repository extending JpaRepository. +Such repository is the analogy to a Data-Access-Object (DAO) used in the classic approach or when Spring Data is not an option.

+
+
+
Listing 4. Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long> {
+
+}
+
+
+
+

The Spring Data repository provides some basic implementations for accessing data, e.g. returning all instances of a type (findAll) or returning an instance by its ID (findById).

+
+
+
+
Custom method implementation
+
+

In addition, repositories can be enriched with additional functionality, e.g. to add QueryDSL functionality or to override the default implementations, by using so called repository fragments:

+
+
+
Example
+
+

The following example shows how to write such a repository:

+
+
+
Listing 5. Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, ProductFragment {
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  List<ProductEntity> findByTitle(@Param("title") String title);
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  Page<ProductEntity> findByTitlePaginated(@Param("title") String title, Pageable pageable);
+}
+
+
+
+
Listing 6. Repository fragment
+
+
public interface ProductFragment {
+  Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria);
+}
+
+
+
+
Listing 7. Fragment implementation
+
+
public class ProductFragmentImpl implements ProductFragment {
+  @Inject
+  EntityManager entityManager;
+
+  public Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria) {
+    QProductEntity product = QProductEntity.productEntity;
+    JPAQuery<ProductEntity> query = new JPAQuery<ProductEntity>(this.entityManager);
+    query.from(product);
+
+    String title = criteria.getTitle();
+    if ((title != null) && !title.isEmpty()) {
+      query.where(product.title.eq(title));
+    }
+
+    List<ProductEntity> products = query.fetch();
+    return new PageImpl<>(products, PageRequest.of(criteria.getPageNumber(), criteria.getPageSize()), products.size());
+  }
+}
+
+
+
+

This ProductRepository has the following features:

+
+
+
    +
  • +

    CRUD support from Spring Data (see JavaDoc for details).

    +
  • +
  • +

    Support for QueryDSL integration, paging and more.

    +
  • +
  • +

    A static query method findByTitle to find all ProductEntity instances from DB that have the given title. Please note the @Param annotation that links the method parameter with the variable inside the query (:title).

    +
  • +
  • +

    The same with pagination support via findByTitlePaginated method.

    +
  • +
  • +

    A dynamic query method findByCriteria showing the QueryDSL and paging integration into Spring via a fragment implementation.

    +
  • +
+
+
+

You can find an implementation of this ProductRepository in our Quarkus reference application.

+
+
+ + + + + +
+ + +In Quarkus, native and named queries via the @Query annotation are currently not supported +
+
+
+
+
Integration of Spring Data in devon4j-spring
+
+

For Spring applications, devon4j offers a proprietary solution that integrates seamlessly with QueryDSL and uses default methods instead of the fragment approach. A separate guide for this can be found here.

+
+
+
+
Custom methods without fragment approach
+
+

The fragment approach is a bit laborious, as three types (repository interface, fragment interface and fragment implementation) are always needed to implement custom methods. +We cannot simply use default methods within the repository because we cannot inject the EntityManager directly into the repository interface.

+
+
+

As a workaround, you can create a GenericRepository interface, as is done in the devon4j jpa-spring-data module.

+
+
+
+
public interface GenericRepository<E> {
+
+  EntityManager getEntityManager();
+
+  ...
+}
+
+
+
+
+
public class GenericRepositoryImpl<E> implements GenericRepository<E> {
+
+  @Inject
+  EntityManager entityManager;
+
+  @Override
+  public EntityManager getEntityManager() {
+
+    return this.entityManager;
+  }
+
+  ...
+}
+
+
+
+

Then, all your repository interfaces can extend the GenericRepository and you can implement queries directly in the repository interface using default methods:

+
+
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, GenericRepository<ProductEntity> {
+
+  default Page<ProductEntity> findByTitle(Title title) {
+
+    EntityManager entityManager = getEntityManager();
+    Query query = entityManager.createNativeQuery("select * from Product where title = :title", ProductEntity.class);
+    query.setParameter("title", title);
+    List<ProductEntity> products = query.getResultList();
+    return new PageImpl<>(products);
+  }
+
+  ...
+}
+
+
+
+
+
+
Drawbacks
+
+

Spring Data also has some drawbacks:

+
+
+
    +
  • +

    Some kind of magic behind the scenes that are not so easy to understand. So in case you want to extend all your repositories without providing the implementation via a default method in a parent repository interface you need to deep-dive into Spring Data. We assume that you do not need that and hope what Spring Data and devon already provides out-of-the-box is already sufficient.

    +
  • +
  • +

    The Spring Data magic also includes guessing the query from the method name. This is not easy to understand and especially to debug. Our suggestion is not to use this feature at all and either provide a @Query annotation or an implementation via default method.

    +
  • +
+
+
+
+
Limitations in Quarkus
+
+
    +
  • +

    Native and named queries are not supported using @Query annotation. You will receive something like: Build step io.quarkus.spring.data.deployment.SpringDataJPAProcessor#build threw an exception: java.lang.IllegalArgumentException: Attribute nativeQuery of @Query is currently not supported

    +
  • +
  • +

    Customizing the base repository for all repository interfaces in the code base, which is done in Spring Data by registering a class the extends SimpleJpaRepository

    +
  • +
+
+ +
+

==Data Access Object

+
+
+

The Data Access Objects (DAOs) are part of the persistence layer. +They are responsible for a specific entity and should be named «Entity»Dao and «Entity»DaoImpl. +The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. +Additionally a DAO may offer advanced operations such as query or locking methods.

+
+
+
+
DAO Interface
+
+

For each DAO there is an interface named «Entity»Dao that defines the API. For CRUD support and common naming we derive it from the ApplicationDao interface that comes with the devon application template:

+
+
+
+
public interface MyEntityDao extends ApplicationDao<MyEntity> {
+  List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria);
+}
+
+
+
+

All CRUD operations are inherited from ApplicationDao so you only have to declare the additional methods.

+
+
+
+
DAO Implementation
+
+

Implementing a DAO is quite simple. We create a class named «Entity»DaoImpl that extends ApplicationDaoImpl and implements your «Entity»Dao interface:

+
+
+
+
public class MyEntityDaoImpl extends ApplicationDaoImpl<MyEntity> implements MyEntityDao {
+
+  public List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria) {
+    TypedQuery<MyEntity> query = createQuery(criteria, getEntityManager());
+    return query.getResultList();
+  }
+  ...
+}
+
+
+
+

Again you only need to implement the additional non-CRUD methods that you have declared in your «Entity»Dao interface. +In the DAO implementation you can use the method getEntityManager() to access the EntityManager from the JPA. You will need the EntityManager to create and execute queries.

+
+
+
Static queries for DAO Implementation
+
+

All static queries are declared in the file src\main\resources\META-INF\orm.xml:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+  <named-query name="find.dish.with.max.price">
+    <query><![SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice]]></query>
+  </named-query>
+  ...
+</hibernate-mapping>
+
+
+
+

When your application is started, all these static queries will be created as prepared statements. This allows better performance and also ensures that you get errors for invalid JPQL queries when you start your app rather than later when the query is used.

+
+
+

To avoid redundant occurrences of the query name (get.open.order.positions.for.order) we define a constant for each named query:

+
+
+
+
public class NamedQueries {
+  public static final String FIND_DISH_WITH_MAX_PRICE = "find.dish.with.max.price";
+}
+
+
+
+

Note that changing the name of the java constant (FIND_DISH_WITH_MAX_PRICE) can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+

The following listing shows how to use this query:

+
+
+
+
public List<DishEntity> findDishByMaxPrice(BigDecimal maxPrice) {
+  Query query = getEntityManager().createNamedQuery(NamedQueries.FIND_DISH_WITH_MAX_PRICE);
+  query.setParameter("maxPrice", maxPrice);
+  return query.getResultList();
+}
+
+
+
+

Via EntityManager.createNamedQuery(String) we create an instance of Query for our predefined static query. +Next we use setParameter(String, Object) to provide a parameter (maxPrice) to the query. This has to be done for all parameters of the query.

+
+
+

Note that using the createQuery(String) method, which takes the entire query as string (that may already contain the parameter) is not allowed to avoid SQL injection vulnerabilities. +When the method getResultList() is invoked, the query is executed and the result is delivered as List. As an alternative, there is a method called getSingleResult(), which returns the entity if the query returned exactly one and throws an exception otherwise.

+
+ +
+

==JPA Performance +When using JPA the developer sometimes does not see or understand where and when statements to the database are triggered.

+
+
+
+
+

Establishing expectations Developers shouldn’t expect to sprinkle magic pixie dust on POJOs in hopes they will become persistent.

+
+
+
+— Dan Allen
+https://epdf.tips/seam-in-action.html +
+
+
+

So in case you do not understand what is going on under the hood of JPA, you will easily run into performance issues due to lazy loading and other effects.

+
+
+
+
+
N plus 1 Problem
+
+

The most prominent phenomena is call the N+1 Problem. +We use entities from our MTS demo app as an example to explain the problem. +There is a DishEntity that has a @ManyToMany relation to +IngredientEntity. +Now we assume that we want to iterate all ingredients for a dish like this:

+
+
+
+
DishEntity dish = dao.findDishById(dishId);
+BigDecimal priceWithAllExtras = dish.getPrice();
+for (IngredientEntity ingredient : dish.getExtras()) {
+  priceWithAllExtras = priceWithAllExtras.add(ingredient.getPrice());
+}
+
+
+
+

Now dish.getExtras() is loaded lazy. Therefore the JPA vendor will provide a list with lazy initialized instances of IngredientEntity that only contain the ID of that entity. Now with every call of ingredient.getPrice() we technically trigger an SQL query statement to load the specific IngredientEntity by its ID from the database. +Now findDishById caused 1 initial query statement and for any number N of ingredients we are causing an additional query statement. This makes a total of N+1 statements. As causing statements to the database is an expensive operation with a lot of overhead (creating connection, etc.) this ends in bad performance and is therefore a problem (the N+1 Problem).

+
+
+
+
Solving N plus 1 Problem
+
+

To solve the N+1 Problem you need to change your code to only trigger a single statement instead. This can be archived in various ways. The most universal solution is to use FETCH JOIN in order to pre-load the nested N child entities into the first level cache of the JPA vendor implementation. This will behave very similar as if the @ManyToMany relation to IngredientEntity was having FetchType.EAGER but only for the specific query and not in general. Because changing @ManyToMany to FetchType.EAGER would cause bad performance for other usecases where only the dish but not its extra ingredients are needed. For this reason all relations, including @OneToOne should always be FetchType.LAZY. Back to our example we simply replace dao.findDishById(dishId) with dao.findDishWithExtrasById(dishId) that we implement by the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish
+  LEFT JOIN FETCH dish.extras
+  WHERE dish.id = :dishId
+
+
+
+

The rest of the code does not have to be changed but now dish.getExtras() will get the IngredientEntity from the first level cache where is was fetched by the initial query above.

+
+
+

Please note that if you only need the sum of the prices from the extras you can also create a query using an aggregator function:

+
+
+
+
SELECT sum(dish.extras.price) FROM DishEntity dish
+
+
+
+

As you can see you need to understand the concepts in order to get good performance.

+
+
+

There are many advanced topics such as creating database indexes or calculating statistics for the query optimizer to get the best performance. For such advanced topics we recommend to have a database expert in your team that cares about such things. However, understanding the N+1 Problem and its solutions is something that every Java developer in the team needs to understand.

+
+ +
+

==IdRef

+
+
+

IdRef can be used to reference other entities in TOs in order to make them type-safe and semantically more expressive. +It is an optional concept in devon4j for more complex applications that make intensive use of relations and foreign keys.

+
+
+
+
Motivation
+
+

Assuming you have a method signature like the following:

+
+
+
+
Long approve(Long cId, Long cuId);
+
+
+
+

So what are the paremeters? What is returned?

+
+
+

IdRef is just a wrapper for a Long used as foreign key. This makes our signature much more expressive and self-explanatory:

+
+
+
+
IdRef<Contract> approve(IdRef<Contract> cId, IdRef<Customer> cuId);
+
+
+
+

Now we can easily see, that the result and the parameters are foreign-keys and which entity they are referring to via their generic type. +We can read the javadoc of these entities from the generic type and understand the context. +Finally, when passing IdRef objects to such methods, we get compile errors in case we accidentally place parameters in the wrong order.

+
+
+
+
IdRef and Mapping
+
+

In order to easily map relations from entities to transfer-objects and back, we can easily also put according getters and setters into our entities:

+
+
+
+
public class ContractEntity extends ApplicationPersistenceEntity implements Contract {
+
+  private CustomerEntity customer;
+
+  ...
+
+  @ManyToOne(fetch = FetchType.LAZY)
+  @JoinColumn(name = "CUSTOMER_ID")
+  public CustomerEntity getCustomer() {
+    return this.customer;
+  }
+
+  public void setCustomer(CustomerEntity customer) {
+    this.customer = customer;
+  }
+
+  @Transient
+  public IdRef<Customer> getCustomerId() {
+    return IdRef.of(this.customer);
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customer = JpaHelper.asEntity(customerId, CustomerEntity.class);
+  }
+}
+
+
+
+

Now, ensure that you have the same getters and setters for customerId in your Eto:

+
+
+
+
public class ContractEto extends AbstractEto implements Contract {
+
+  private IdRef<Customer> customerId;
+
+  ...
+
+  public IdRef<Customer> getCustomerId() {
+    return this.customerId;
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customerId = customerId;
+  }
+}
+
+
+
+

This way the bean-mapper can automatically map from your entity (ContractEntity) to your Eto (ContractEto) and vice-versa.

+
+
+
+
JpaHelper and EntityManager access
+
+

In the above example we used JpaHelper.asEntity to convert the foreign key (IdRef<Customer>) to the according entity (CustomerEntity). +This will internally use EntityManager.getReference to properly create a JPA entity. +The alternative "solution" that may be used with Long instead of IdRef is typically:

+
+
+
+
  public void setCustomerId(IdRef<Customer> customerId) {
+    Long id = null;
+    if (customerId != null) {
+      id = customerId.getId();
+    }
+    if (id == null) {
+      this.customer = null;
+    } else {
+      this.customer = new CustomerEntity();
+      this.customer.setId(id);
+    }
+  }
+
+
+
+

While this "solution" works is most cases, we discovered some more complex cases, where it fails with very strange hibernate exceptions. +When cleanly creating the entity via EntityManager.getReference instead it is working in all cases. +So how can JpaHelper.asEntity as a static method access the EntityManager? +Therefore we need to initialize this as otherwise you may see this exception:

+
+
+
+
java.lang.IllegalStateException: EntityManager has not yet been initialized!
+	at com.devonfw.module.jpa.dataaccess.api.JpaEntityManagerAccess.getEntityManager(JpaEntityManagerAccess.java:38)
+	at com.devonfw.module.jpa.dataaccess.api.JpaHelper.asEntity(JpaHelper.java:49)
+
+
+
+

For main usage in your application we assume that there is only one instance of EntityManager. +Therefore we can initialize this instance during the spring boot setup. +This is what we provide for you in JpaInitializer for you +when creating a devon4j app.

+
+
+
JpaHelper and spring-test
+
+

Further, you also want your code to work in integration tests. +Spring-test provides a lot of magic under the hood to make integration testing easy for you. +To boost the performance when running multiple tests, spring is smart and avoids creating the same spring-context multiple times. +Therefore it stores these contexts so that if a test-case is executed with a specific spring-configuration that has already been setup before, +the same spring-context can be reused instead of creating it again. +However, your tests may have multiple spring configurations leading to multiple spring-contexts. +Even worse these tests can run in any order leading to switching between spring-contexts forth and back. +Therefore, a static initializer during the spring boot setup can lead to strange errors as you can get the wrong EntityManager instance. +In order to fix such problems, we provide a solution pattern via DbTest ensuring for every test, +that the proper instance of EntityManager is initialized. +Therefore you should derive directly or indirectly (e.g. via ComponentDbTest and SubsystemDbTest) from DbTesT or adopt your own way to apply this pattern to your tests, when using JpaHelper. +This already happens if you are extending ApplicationComponentTest or ApplicationSubsystemTest.

+
+
+ +
+

==Transaction Handling

+
+
+

For transaction handling we AOP to add transaction control via annotations as aspect. +This is done by annotating your code with the @Transactional annotation. +You can either annotate your container bean at class level to make all methods transactional or your can annotate individual methods to make them transactional:

+
+
+
+
  @Transactional
+  public Output getData(Input input) {
+    ...
+  }
+
+
+
+
+
+
JTA Imports
+
+

Here are the import statements for transaction support:

+
+
+
+
import javax.transaction.Transactional;
+
+
+
+ + + + + +
+ + +Use the above import statement to follow JEE and avoid using org.springframework.transaction.annotation.Transactional. +
+
+
+
+
JTA Dependencies
+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>jakarta.transaction</groupId>
+  <artifactId>jakarta.transaction-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>javax.transaction</groupId>
+  <artifactId>javax.transaction-api</artifactId>
+</dependency>
+
+
+
+
+
Handling constraint violations
+
+

Using @Transactional magically wraps transaction handling around your code. +As constraints are checked by the database at the end when the transaction gets committed, a constraint violation will be thrown by this aspect outside your code. +In case you have to handle constraint violations manually, you have to do that in code outside the logic that is annotated with @Transactional. +This may be done in a service operation by catching a ConstraintViolationException (org.hibernate.exception.ConstraintViolationException for hibernate). +As a generic approach you can solve this via REST execption handling.

+
+
+
+
Batches
+
+

Transaction control for batches is a lot more complicated and is described in the batch layer.

+
+
+ +
+

==SQL

+
+
+

For general guides on dealing or avoiding SQL, preventing SQL-injection, etc. you should study domain layer.

+
+
+
+
+

1.28. Naming Conventions

+
+

Here we define naming conventions that you should follow whenever you write SQL files:

+
+
+
    +
  • +

    All SQL-Keywords in UPPER CASE

    +
  • +
  • +

    Indentation should be 2 spaces as suggested by devonfw for every format.

    +
  • +
+
+
+
DDL
+
+

The naming conventions for database constructs (tables, columns, triggers, constraints, etc.) should be aligned with your database product and their operators. +However, when you have the freedom of choice and a modern case-sensitive database, you can simply use your code conventions also for database constructs to avoid explicitly mapping each and every property (e.g. RestaurantTable vs. RESTAURANT_TABLE).

+
+
+
    +
  • +

    Define columns and constraints inline in the statement to create the table

    +
  • +
  • +

    Indent column types so they all start in the same text column

    +
  • +
  • +

    Constraints should be named explicitly (to get a reasonable hint error messages) with:

    +
    +
      +
    • +

      PK_«table» for primary key (name optional here as PK constraint are fundamental)

      +
    • +
    • +

      FK_«table»_«property» for foreign keys («table» and «property» are both on the source where the foreign key is defined)

      +
    • +
    • +

      UC_«table»_«property»[_«propertyN»]* for unique constraints

      +
    • +
    • +

      CK_«table»_«check» for check constraints («check» describes the check, if it is defined on a single property it should start with the property).

      +
    • +
    +
    +
  • +
  • +

    Old RDBMS had hard limitations for names (e.g. 30 characters). Please note that recent databases have overcome this very low length limitations. However, keep your names short but precise and try to define common abbreviations in your project for according (business) terms. Especially do not just truncate the names at the limit.

    +
  • +
  • +

    If possible add comments on table and columns to help DBAs understanding your schema. This is also honored by many tools (not only DBA-tools).

    +
  • +
+
+
+

Here is a brief example of a DDL:

+
+
+
+
CREATE SEQUENCE HIBERNATE_SEQUENCE START WITH 1000000;
+
+-- *** Table ***
+CREATE TABLE RESTAURANT_TABLE (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  SEATS                INTEGER NOT NULL,
+  CONSTRAINT PK_TABLE PRIMARY KEY(ID)
+);
+COMMENT ON TABLE RESTAURANT_TABLE IS 'The physical tables inside the restaurant.';
+-- *** Order ***
+CREATE TABLE RESTAURANT_ORDER (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  TABLE_ID             NUMBER(19) NOT NULL,
+  TOTAL                DECIMAL(5, 2) NOT NULL,
+  CREATION_DATE        TIMESTAMP NOT NULL,
+  PAYMENT_DATE         TIMESTAMP,
+  STATUS               VARCHAR2(10 CHAR) NOT NULL,
+  CONSTRAINT PK_ORDER PRIMARY KEY(ID),
+  CONSTRAINT FK_ORDER_TABLE_ID FOREIGN KEY(TABLE_ID) REFERENCES RESTAURANT_TABLE(ID)
+);
+COMMENT ON TABLE RESTAURANT_ORDER IS 'An order and bill at the restaurant.';
+...
+
+
+
+

ATTENTION: Please note that TABLE and ORDER are reserved keywords in SQL and you should avoid using such keywords to prevent problems.

+
+
+
+
Data
+
+

For insert, update, delete, etc. of data SQL scripts should additionally follow these guidelines:

+
+
+
    +
  • +

    Inserts always with the same order of columns in blocks for each table.

    +
  • +
  • +

    Insert column values always starting with ID, MODIFICATION_COUNTER, [DTYPE, ] …​

    +
  • +
  • +

    List columns with fixed length values (boolean, number, enums, etc.) before columns with free text to support alignment of multiple insert statements

    +
  • +
  • +

    Pro Tip: Get familiar with column mode of advanced editors such as notepad++ when editing large blocks of similar insert statements.

    +
  • +
+
+
+
+
INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (0, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (1, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (2, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (3, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (4, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (5, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (6, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (7, 1, 8);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (8, 1, 8);
+...
+
+
+
+

See also Database Migrations.

+
+
+ +
+

==Database Migration

+
+
+

When you have a schema-based database, +you need a solution for schema versioning and migration for your database. +A specific release of your app requires a corresponding version of the schema in the database to run. +As you want simple and continuous deployment you should automate the schema versiong and database migration.

+
+
+

The general idea is that your software product contains "scripts" to migrate the database from schema version X to verion X+1. +When you begin your project you start with version 1 and with every increment of your app that needs a change to the database schema (e.g. a new table, a new column to an existing table, a new index, etc.) you add another "script" that migrates from the current to the next version. +For simplicity these versions are just sequential numbers or timestamps. +Now, the solution you choose will automatically manage the schema version in a separate metadata table in your database that stores the current schema version. +When your app is started, it will check the current version inside the database from that metadata table. +As long as there are "scripts" that migrate from there to a higher version, they will be automatically applied to the database and this process is protocolled to the metadata table in your database what also updates the current schema version there. +Using this approach, you can start with an empty database what will result in all "scripts" being applied sequentially. +Also any version of your database schema can be present and you will always end up in a controlled migration to the latest schema version.

+
+
+
+
+

1.29. Options for database migration

+
+

For database migration you can choose between the following options:

+
+
+
    +
  • +

    flyway (KISS based approach with migrations as SQL)

    +
  • +
  • +

    liquibase (more complex approach with database abstraction)

    +
  • +
+
+ +
+

==Flyway

+
+
+

Flyway is a tool for database migration and schema versioning. +See why for a motivation why using flyway.

+
+
+

Flyway can be used standalone e.g. via flyway-maven-plugin or can be integrated directly into your app to make sure the database migration takes place on startup. +For simplicity we recommend to integrate flyway into your app. +However, you need to be aware that therefore your app needs database access with full schema owner permissions.

+
+
+
Organizational Advice
+
+

A few considerations with respect to project organization will help to implement maintainable Flyway migrations.

+
+
+

At first, testing and production environments must be clearly and consistently distinguished. Use the following directory structure to achieve this distinction:

+
+
+
+
  src/main/resources/db
+  src/test/resources/db
+
+
+
+

Although this structure introduces redundancies, the benefit outweighs this disadvantage. +An even more fine-grained production directory structure which contains one sub folder per release should be implemented:

+
+
+
+
  src/main/resources/db/migration/releases/X.Y/x.sql
+
+
+
+

Emphasizing that migration scripts below the current version must never be changed will aid the second advantage of migrations: it will always be clearly reproducible in which state the database currently is. +Here, it is important to mention that, if test data is required, it must be managed separately from the migration data in the following directory:

+
+
+
+
  src/test/resources/db/migration/
+
+
+
+

The migration directory is added to aid easy usage of Flyway defaults. +Of course, test data should also be managed per release as like production data.

+
+
+

With regard to content, separation of concerns (SoC) is an important goal. SoC can be achieved by distinguishing and writing multiple scripts with respect to business components/use cases (or database tables in case of large volumes of master data [1]. Comprehensible file names aid this separation.

+
+
+

It is important to have clear responsibilities regarding the database, the persistence layer (JPA), and migrations. Therefore a dedicated database expert should be in charge of any migrations performed or she should at least be informed before any change to any of the mentioned parts is applied.

+
+
+
+
Technical Configuration
+
+

Database migrations can be SQL based or Java based.

+
+
+

To enable auto migration on startup (not recommended for productive environment) set the following property in the application.properties file for an environment.

+
+
+
+
flyway.enabled=true
+flyway.clean-on-validation-error=false
+
+
+
+

For development environment it is helpful to set both properties to true in order to simplify development. For regular environments flyway.clean-on-validation-error should be false.

+
+
+

If you want to use Flyway set the following property in any case to prevent Hibernate from doing changes on the database (pre-configured by default in devonfw):

+
+
+
+
spring.jpa.hibernate.ddl-auto=validate
+
+
+
+

The setting must be communicated to and coordinated with the customer and their needs. +In acceptance testing the same configuration as for the production environment should be enabled.

+
+
+

Since migration scripts will also be versioned the end-of-line (EOL) style must be fixated according to this issue. This is however solved in flyway 4.0+ and the latest devonfw release. +Also, the version numbers of migration scripts should not consist of simple ascending integer numbers like V0001…​, V0002…​, …​ This naming may lead to problems when merging branches. Instead the usage of timestamps as version numbers will help to avoid such problems.

+
+
+
+
Naming Conventions
+
+

Database migrations should follow this naming convention: +V<version>__<description> (e.g.: V12345__Add_new_table.sql).

+
+
+

It is also possible to use Flyway for test data. To do so place your test data migrations in src/main/resources/db/testdata/ and set property

+
+
+
+
flyway.locations=classpath:db/migration/releases,classpath:db/migration/testdata
+
+
+
+

Then Flyway scans the additional location for migrations and applies all in the order specified by their version. If migrations V0001__... and V0002__... exist and a test data migration should be applied in between you can name it V0001_1__....

+
+ +
+

==Liquibase

+
+ +
+

See devon4j#303 for details and status.

+
+
+
+
Spring-boot usage
+
+

For using liquibase in spring see Using Liquibase with Spring Boot.

+
+
+
+
Quarkus usage
+
+

For uisng liquibase in quarkus see Using Liquibase.

+
+
+ +
+

==REST +REST (REpresentational State Transfer) is an inter-operable protocol for services that is more lightweight than SOAP. +However, it is no real standard and can cause confusion (see REST philosophy). +Therefore we define best practices here to guide you.

+
+
+
+
+

1.30. URLs

+
+

URLs are not case sensitive. Hence, we follow the best practice to use only lower-case-letters-with-hyphen-to-separate-words. +For operations in REST we distinguish the following types of URLs:

+
+
+
    +
  • +

    A collection URL is build from the rest service URL by appending the name of a collection. This is typically the name of an entity. Such URL identifies the entire collection of all elements of this type. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity

    +
  • +
  • +

    An element URL is build from a collection URL by appending an element ID. It identifies a single element (entity) within the collection. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42

    +
  • +
+
+
+

To follow KISS avoid using plural forms (…​/productmanagement/v1/products vs. …​/productmanagement/v1/product/42). Always use singular forms and avoid confusions (except for the rare cases where no singular exists).

+
+
+

The REST URL scheme fits perfect for CRUD operations. +For business operations (processing, calculation, advanced search, etc.) we simply append a collection URL with the name of the business operation. +Then we can POST the input for the business operation and get the result back. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/search

+
+
+
+

1.31. HTTP Methods

+
+

The following table defines the HTTP methods (verbs) and their meaning:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3. Usage of HTTP methods
HTTP MethodMeaning

GET

Read data (stateless).

PUT

Create or update data.

POST

Process data.

DELETE

Delete an entity.

+
+

Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

+
+
+

For general recommendations on HTTP methods for collection and element URLs see REST@wikipedia.

+
+
+
+

1.32. HTTP Status Codes

+
+

Further we define how to use the HTTP status codes for REST services properly. In general the 4xx codes correspond to an error on the client side and the 5xx codes to an error on the server side.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4. Usage of HTTP status codes
HTTP CodeMeaningResponseComment

200

OK

requested result

Result of successful GET

204

No Content

none

Result of successful POST, DELETE, or PUT with empty result (void return)

400

Bad Request

error details

The HTTP request is invalid (parse error, validation failed)

401

Unauthorized

none

Authentication failed

403

Forbidden

none

Authorization failed

404

Not found

none

Either the service URL is wrong or the requested resource does not exist

500

Server Error

error code, UUID

Internal server error occurred, in case of an exception, see REST exception handling

+
+
+

1.33. JAX-RS

+
+

For implementing REST services we use the JAX-RS standard. +As payload encoding we recommend JSON bindings using Jackson. +To implement a REST service you simply add JAX-RS annotations. +Here is a simple example:

+
+
+
+
@ApplicationScoped
+@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class ImagemanagementRestService {
+
+  @Inject
+  private Imagemanagement imagemanagement;
+
+  @GET
+  @Path("/image/{id}/")
+  public ImageDto getImage(@PathParam("id") long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+}
+
+
+
+

Here we can see a REST service for the business component imagemanagement. The method getImage can be accessed via HTTP GET (see @GET) under the URL path imagemanagement/image/{id} (see @Path annotations) where {id} is the ID of the requested table and will be extracted from the URL and provided as parameter id to the method getImage. It will return its result (ImageDto) as JSON (see @Produces annotation - you can also extend RestService marker interface that defines these annotations for JSON). As you can see it delegates to the logic component imagemanagement that contains the actual business logic while the service itself only exposes this logic via HTTP. The REST service implementation is a regular CDI bean that can use dependency injection.

+
+
+ + + + + +
+ + +With JAX-RS it is important to make sure that each service method is annotated with the proper HTTP method (@GET,@POST,etc.) to avoid unnecessary debugging. So you should take care not to forget to specify one of these annotations. +
+
+
+
Service-Interface
+
+

You may also separate API and implementation in case you want to reuse the API for service-client:

+
+
+
+
@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface ImagemanagementRestService {
+
+  @GET
+  @Path("/image/{id}/")
+  ImageEto getImage(@PathParam("id") long id);
+
+}
+
+@Named("ImagemanagementRestService")
+public class ImagemanagementRestServiceImpl implements ImagemanagementRestService {
+
+  @Override
+  public ImageEto getImage(long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+
+}
+
+
+
+
+
JAX-RS Configuration
+
+

Starting from CXF 3.0.0 it is possible to enable the auto-discovery of JAX-RS roots.

+
+
+

When the JAX-RS server is instantiated, all the scanned root and provider beans (beans annotated with javax.ws.rs.Path and javax.ws.rs.ext.Provider) are configured.

+
+
+
+
REST Exception Handling
+
+

For exceptions, a service needs to have an exception facade that catches all exceptions and handles them by writing proper log messages and mapping them to a HTTP response with an corresponding HTTP status code. +For this, devon4j provides a generic solution via RestServiceExceptionFacade that you can use within your Spring applications. You need to follow the exception guide in order for it to work out of the box because the facade needs to be able to distinguish between business and technical exceptions. +To implement a generic exception facade in Quarkus, follow the Quarkus exception guide.

+
+
+

Now your service may throw exceptions, but the facade will automatically handle them for you.

+
+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+
+
Pagination details
+
+

We recommend to use spring-data repositories for database access that already comes with pagination support. +Therefore, when performing a search, you can include a Pageable object. +Here is a JSON example for it:

+
+
+
+
{ "pageSize": 20, "pageNumber": 0, "sort": [] }
+
+
+
+

By increasing the pageNumber the client can browse and page through the hits.

+
+
+

As a result you will receive a Page. +It is a container for your search results just like a Collection but additionally contains pagination information for the client. +Here is a JSON example:

+
+
+
+
{ "totalElements": 1022,
+  pageable: { "pageSize": 20, "pageNumber": 0 },
+  content: [ ... ] }
+
+
+
+

The totalElements property contains the total number of hits. +This can be used by the client to compute the total number of pages and render the pagination links accordingly. +Via the pageable property the client gets back the Pageable properties from the search request. +The actual hits for the current page are returned as array in the content property.

+
+
+
+
+

1.34. REST Testing

+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+

1.35. Security

+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+
CSRF
+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+
JSON top-level arrays
+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+ +
+

==JSON

+
+
+

JSON (JavaScript Object Notation) is a popular format to represent and exchange data especially for modern web-clients. For mapping Java objects to JSON and vice-versa there is no official standard API. We use the established and powerful open-source solution Jackson. +Due to problems with the wiki of fasterxml you should try this alternative link: Jackson/AltLink.

+
+
+
+
+

1.36. Configure JSON Mapping

+
+

In order to avoid polluting business objects with proprietary Jackson annotations (e.g. @JsonTypeInfo, @JsonSubTypes, @JsonProperty) we propose to create a separate configuration class. Every devonfw application (sample or any app created from our app-template) therefore has a class called ApplicationObjectMapperFactory that extends ObjectMapperFactory from the devon4j-rest module. It looks like this:

+
+
+
+
@Named("ApplicationObjectMapperFactory")
+public class ApplicationObjectMapperFactory extends ObjectMapperFactory {
+
+  public RestaurantObjectMapperFactory() {
+    super();
+    // JSON configuration code goes here
+  }
+}
+
+
+
+
+

1.37. JSON and Inheritance

+
+

If you are using inheritance for your objects mapped to JSON then polymorphism can not be supported out-of-the box. So in general avoid polymorphic objects in JSON mapping. However, this is not always possible. +Have a look at the following example from our sample application:

+
+
+
+inheritance class diagram +
+
Figure 2. Transfer-Objects using Inheritance
+
+
+

Now assume you have a REST service operation as Java method that takes a ProductEto as argument. As this is an abstract class the server needs to know the actual sub-class to instantiate. +We typically do not want to specify the classname in the JSON as this should be an implementation detail and not part of the public JSON format (e.g. in case of a service interface). Therefore we use a symbolic name for each polymorphic subtype that is provided as virtual attribute @type within the JSON data of the object:

+
+
+
+
{ "@type": "Drink", ... }
+
+
+
+

Therefore you add configuration code to the constructor of ApplicationObjectMapperFactory. Here you can see an example from the sample application:

+
+
+
+
setBaseClasses(ProductEto.class);
+addSubtypes(new NamedType(MealEto.class, "Meal"), new NamedType(DrinkEto.class, "Drink"),
+  new NamedType(SideDishEto.class, "SideDish"));
+
+
+
+

We use setBaseClasses to register all top-level classes of polymorphic objects. Further we declare all concrete polymorphic sub-classes together with their symbolic name for the JSON format via addSubtypes.

+
+
+
+

1.38. Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create objects dedicated for the JSON mapping you can easily avoid such situations. When this is not suitable follow these instructions to define the mapping:

+
+
+
    +
  1. +

    As an example, the use of JSR354 (javax.money) is appreciated in order to process monetary amounts properly. However, without custom mapping, the default mapping of Jackson will produce the following JSON for a MonetaryAmount:

    +
    +
    +
    "currency": {"defaultFractionDigits":2, "numericCode":978, "currencyCode":"EUR"},
    +"monetaryContext": {...},
    +"number":6.99,
    +"factory": {...}
    +
    +
    +
    +

    As clearly can be seen, the JSON contains too much information and reveals implementation secrets that do not belong here. Instead the JSON output expected and desired would be:

    +
    +
    +
    +
    "currency":"EUR","amount":"6.99"
    +
    +
    +
    +

    Even worse, when we send the JSON data to the server, Jackson will see that MonetaryAmount is an interface and does not know how to instantiate it so the request will fail. +Therefore we need a customized Serializer.

    +
    +
  2. +
  3. +

    We implement MonetaryAmountJsonSerializer to define how a MonetaryAmount is serialized to JSON:

    +
    +
    +
    public final class MonetaryAmountJsonSerializer extends JsonSerializer<MonetaryAmount> {
    +
    +  public static final String NUMBER = "amount";
    +  public static final String CURRENCY = "currency";
    +
    +  public void serialize(MonetaryAmount value, JsonGenerator jgen, SerializerProvider provider) throws ... {
    +    if (value != null) {
    +      jgen.writeStartObject();
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.CURRENCY);
    +      jgen.writeString(value.getCurrency().getCurrencyCode());
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.NUMBER);
    +      jgen.writeString(value.getNumber().toString());
    +      jgen.writeEndObject();
    +    }
    +  }
    +
    +
    +
    +

    For composite datatypes it is important to wrap the info as an object (writeStartObject() and writeEndObject()). MonetaryAmount provides the information we need by the getCurrency() and getNumber(). So that we can easily write them into the JSON data.

    +
    +
  4. +
  5. +

    Next, we implement MonetaryAmountJsonDeserializer to define how a MonetaryAmount is deserialized back as Java object from JSON:

    +
    +
    +
    public final class MonetaryAmountJsonDeserializer extends AbstractJsonDeserializer<MonetaryAmount> {
    +  protected MonetaryAmount deserializeNode(JsonNode node) {
    +    BigDecimal number = getRequiredValue(node, MonetaryAmountJsonSerializer.NUMBER, BigDecimal.class);
    +    String currencyCode = getRequiredValue(node, MonetaryAmountJsonSerializer.CURRENCY, String.class);
    +    MonetaryAmount monetaryAmount =
    +        MonetaryAmounts.getAmountFactory().setNumber(number).setCurrency(currencyCode).create();
    +    return monetaryAmount;
    +  }
    +}
    +
    +
    +
    +

    For composite datatypes we extend from AbstractJsonDeserializer as this makes our task easier. So we already get a JsonNode with the parsed payload of our datatype. Based on this API it is easy to retrieve individual fields from the payload without taking care of their order, etc. +AbstractJsonDeserializer also provides methods such as getRequiredValue to read required fields and get them converted to the desired basis datatype. So we can easily read the amount and currency and construct an instance of MonetaryAmount via the official factory API.

    +
    +
  6. +
  7. +

    Finally we need to register our custom (de)serializers with the following configuration code in the constructor of ApplicationObjectMapperFactory:+

    +
  8. +
+
+
+
+
  SimpleModule module = getExtensionModule();
+  module.addDeserializer(MonetaryAmount.class, new MonetaryAmountJsonDeserializer());
+  module.addSerializer(MonetaryAmount.class, new MonetaryAmountJsonSerializer());
+
+
+
+

Now we can read and write MonetaryAmount from and to JSON as expected.

+
+
+ +
+

==XML

+
+
+

XML (Extensible Markup Language) is a W3C standard format for structured information. It has a large eco-system of additional standards and tools.

+
+
+

In Java there are many different APIs and frameworks for accessing, producing and processing XML. For the devonfw we recommend to use JAXB for mapping Java objects to XML and vice-versa. Further there is the popular DOM API for reading and writing smaller XML documents directly. When processing large XML documents StAX is the right choice.

+
+
+
+

1.39. JAXB

+
+

We use JAXB to serialize Java objects to XML or vice-versa.

+
+
+
JAXB and Inheritance
+
+

Use @XmlSeeAlso annotation to provide sub-classes. +See section "Collective Polymorphism" described here.

+
+
+
+
JAXB Custom Mapping
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create dedicated objects for the XML mapping you can easily avoid such situations. When this is not suitable use @XmlJavaTypeAdapter and provide an XmlAdapter implementation that handles the mapping. +For details see here.

+
+
+
+
+

1.40. Security

+
+

To prevent XML External Entity attacks, follow JAXP Security Guide and enable FSP.

+
+
+ +
+

==SOAP +SOAP is a common protocol for services that is rather complex and heavy. It allows to build inter-operable and well specified services (see WSDL). SOAP is transport neutral what is not only an advantage. We strongly recommend to use HTTPS transport and ignore additional complex standards like WS-Security and use established HTTP-Standards such as RFC2617 (and RFC5280).

+
+
+
+

1.41. JAX-WS

+
+

For building web-services with Java we use the JAX-WS standard. +There are two approaches:

+
+
+
    +
  • +

    code first

    +
  • +
  • +

    contract first

    +
  • +
+
+
+

Here is an example in case you define a code-first service.

+
+
+
Web-Service Interface
+
+

We define a regular interface to define the API of the service and annotate it with JAX-WS annotations:

+
+
+
+
@WebService
+public interface TablemanagmentWebService {
+
+  @WebMethod
+  @WebResult(name = "message")
+  TableEto getTable(@WebParam(name = "id") String id);
+
+}
+
+
+
+
+
Web-Service Implementation
+
+

And here is a simple implementation of the service:

+
+
+
+
@Named
+@WebService(endpointInterface = "com.devonfw.application.mtsj.tablemanagement.service.api.ws.TablemanagmentWebService")
+public class TablemanagementWebServiceImpl implements TablemanagmentWebService {
+
+  private Tablemanagement tableManagement;
+
+  @Override
+  public TableEto getTable(String id) {
+
+    return this.tableManagement.findTable(id);
+  }
+
+
+
+
+
+

1.42. SOAP Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to write adapters for JAXB (see XML).

+
+
+
+

1.43. SOAP Testing

+
+

For testing SOAP services in general consult the testing guide.

+
+
+

For testing SOAP services manually we strongly recommend SoapUI.

+
+
+ +
+

==Logging

+
+
+

We recommend to use SLF4J as API for logging, that has become a de facto standard in Java as it has a much better design than java.util.logging offered by the JDK. +There are serveral implementations for SLF4J. For Spring applications our recommended implementation is Logback. Quarkus uses JBoss Logging which provides a JBoss Log Manager implementation for SLF4J. For more information on logging in Quarkus, see the Quarkus logging guide.

+
+
+
+

1.44. Logging Dependencies

+
+

To use Logback in your Spring application, you need to include the following dependencies:

+
+
+
+
<!-- SLF4J as logging API -->
+<dependency>
+  <groupId>org.slf4j</groupId>
+  <artifactId>slf4j-api</artifactId>
+</dependency>
+<!-- Logback as logging implementation  -->
+<dependency>
+  <groupId>ch.qos.logback</groupId>
+  <artifactId>logback-classic</artifactId>
+</dependency>
+<!-- JSON logging for cloud-native log monitoring -->
+<dependency>
+  <groupId>net.logstash.logback</groupId>
+  <artifactId>logstash-logback-encoder</artifactId>
+</dependency>
+
+
+
+

In devon4j these dependencies are provided by the devon4j-logging module.

+
+
+

In Quarkus, SLF4J and the slf4j-jboss-logmanager are directly included in the Quarkus core runtime and can be used out of the box.

+
+
+
+

1.45. Logger Access

+
+

The general pattern for accessing loggers from your code is a static logger instance per class using the following pattern:

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+  private static final Logger LOG = LoggerFactory.getLogger(MyClass.class);
+  ...
+}
+
+
+
+

For detailed documentation how to use the logger API check the SLF4j manual.

+
+
+ + + + + +
+ + +In case you are using devonfw-ide and Eclipse you can just type LOG and hit [ctrl][space] to insert the code pattern including the imports into your class. +
+
+
+
Lombok
+
+

In case you are using Lombok, you can simply use the @Slf4j annotation in your class. This causes Lombok to generate the logger instance for you.

+
+
+
+
+

1.46. Log-Levels

+
+

We use a common understanding of the log-levels as illustrated by the following table. +This helps for better maintenance and operation of the systems.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5. Log-levels
Log-levelDescriptionImpactActive Environments

FATAL

Only used for fatal errors that prevent the application to work at all (e.g. startup fails or shutdown/restart required)

Operator has to react immediately

all

ERROR

An abnormal error indicating that the processing failed due to technical problems.

Operator should check for known issue and otherwise inform development

all

WARNING

A situation where something worked not as expected. E.g. a business exception or user validation failure occurred.

No direct reaction required. Used for problem analysis.

all

INFO

Important information such as context, duration, success/failure of request or process

No direct reaction required. Used for analysis.

all

DEBUG

Development information that provides additional context for debugging problems.

No direct reaction required. Used for analysis.

development and testing

TRACE

Like DEBUG but exhaustive information and for code that is run very frequently. Will typically cause large log-files.

No direct reaction required. Used for problem analysis.

none (turned off by default)

+
+

Exceptions (with their stack trace) should only be logged on FATAL or ERROR level. For business exceptions typically a WARNING including the message of the exception is sufficient.

+
+
+
Configuration of Logback
+
+

The configuration of logback happens via the logback.xml file that you should place into src/main/resources of your app. +For details consult the logback configuration manual.

+
+
+ + + + + +
+ + +Logback also allows to overrule the configuration with a logback-test.xml file that you may put into src/test/resources or into a test-dependency. +
+
+
+
+
Configuration in Quarkus
+
+

The are several options you can set in the application.properties file to configure the behaviour of the logger in Quarkus. For a detailed overview, see the corresponding part of the Quarkus guide.

+
+
+
+
+

1.47. JSON-logging

+
+

For easy integration with log-monitoring, we recommend that your app logs to standard out in JSON following JSON Lines.

+
+
+

In Spring applications, this can be achieved via logstash-logback-encoder (see dependencies). In Quarkus, it can be easily achieved using the quarkus-logging-json extension (see here for more details).

+
+
+

This will produce log-lines with the following format (example formatted for readability):

+
+
+
+
{
+  "timestamp":"2000-12-31T23:59:59.999+00:00",
+  "@version":"1",
+  "message":"Processing 4 order(s) for shipment",
+  "logger_name":"com.myapp.order.logic.UcManageOrder",
+  "thread_name":"http-nio-8081-exec-6",
+  "level":"INFO",
+  "level_value":20000,
+  "appname":"myapp",
+}
+
+
+
+
Adding custom values to JSON log with Logstash
+
+

The JSON encoder even supports logging custom properties for your log-monitoring. +The trick is to use the class net.logstash.logback.argument.StructuredArguments for adding the arguments to you log message, e.g.

+
+
+
+
import static net.logstash.logback.argument.StructuredArguments.v;
+
+...
+    LOG.info("Request with {} and {} took {} ms.", v("url", url), v("status", statusCode), v("duration", millis));
+...
+
+
+
+

This will produce the a JSON log-line with the following properties:

+
+
+
+
...
+  "message":"Request with url=https://api/service/v1/ordermanagement/order and status=200 took duration=251 ms",
+  "url":"https://api/service/v1/ordermanagement/order",
+  "status":"200",
+  "duration":"251",
+...
+
+
+
+

As you can quickly see besides the human readable message you also have the structured properties url, status and duration that can be extremly valuable to configure dashboards in your log-monitoring that visualize success/failure ratio as well as performance of your requests.

+
+
+
+
+

1.48. Classic log-files

+
+ + + + + +
+ + +In devon4j, we strongly recommend using JSON logging instead of classic log files. The following section refers only to devon4j Spring applications that use Logback. +
+
+
+

Even though we do not recommend anymore to write classical log-files to the local disc, here you can still find our approach for it.

+
+
+
Maven-Integration
+
+

In the pom.xml of your application add this dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java</groupId>
+  <artifactId>devon4j-logging</artifactId>
+</dependency>
+
+
+
+

The above dependency already adds transitive dependencies to SLF4J and logback. +Also it comes with configration snipplets that can be included from your logback.xml file (see configuration).

+
+
+

The logback.xml to write regular log-files can look as following:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+  <property resource="com/devonfw/logging/logback/application-logging.properties" />
+  <property name="appname" value="MyApp"/>
+  <property name="logPath" value="../logs"/>
+  <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />
+  <include resource="com/devonfw/logging/logback/appender-console.xml" />
+
+  <root level="DEBUG">
+    <appender-ref ref="ERROR_APPENDER"/>
+    <appender-ref ref="INFO_APPENDER"/>
+    <appender-ref ref="DEBUG_APPENDER"/>
+    <appender-ref ref="CONSOLE_APPENDER"/>
+  </root>
+
+  <logger name="org.springframework" level="INFO"/>
+</configuration>
+
+
+
+

The provided logback.xml is configured to use variables defined on the config/application.properties file. +On our example, the log files path point to ../logs/ in order to log to tomcat log directory when starting tomcat on the bin folder. +Change it according to your custom needs.

+
+
+
Listing 8. config/application.properties
+
+
log.dir=../logs/
+
+
+
+
+
Log Files
+
+

The classical approach uses the following log files:

+
+
+
    +
  • +

    Error Log: Includes log entries to detect errors.

    +
  • +
  • +

    Info Log: Used to analyze system status and to detect bottlenecks.

    +
  • +
  • +

    Debug Log: Detailed information for error detection.

    +
  • +
+
+
+

The log file name pattern is as follows:

+
+
+
+
«LOGTYPE»_log_«HOST»_«APPLICATION»_«TIMESTAMP».log
+
+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 6. Segments of Logfilename
ElementValueDescription

«LOGTYPE»

info, error, debug

Type of log file

«HOST»

e.g. mywebserver01

Name of server, where logs are generated

«APPLICATION»

e.g. myapp

Name of application, which causes logs

«TIMESTAMP»

YYYY-MM-DD_HH00

date of log file

+
+

Example: +error_log_mywebserver01_myapp_2013-09-16_0900.log

+
+
+

Error log from mywebserver01 at application myapp at 16th September 2013 9pm.

+
+
+
+
Output format
+
+

We use the following output format for all log entries to ensure that searching and filtering of log entries work consistent for all logfiles:

+
+
+
+
[D: «timestamp»] [P: «priority»] [C: «NDC»][T: «thread»][L: «logger»]-[M: «message»]
+
+
+
+
    +
  • +

    D: Date (Timestamp in ISO8601 format e.g. 2013-09-05 16:40:36,464)

    +
  • +
  • +

    P: Priority (the log level)

    +
  • +
  • +

    C: Correlation ID (ID to identify users across multiple systems, needed when application is distributed)

    +
  • +
  • +

    T: Thread (Name of thread)

    +
  • +
  • +

    L: Logger name (use class name)

    +
  • +
  • +

    M: Message (log message)

    +
  • +
+
+
+

Example:

+
+
+
+
[D: 2013-09-05 16:40:36,464] [P: DEBUG] [C: 12345] [T: main] [L: my.package.MyClass]-[M: My message...]
+
+
+
+ + + + + +
+ + +When using devon4j-logging, this format is used by default. To achieve this format in Quarkus, set quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n in your properties. +
+
+
+
+
Correlation ID
+
+

In order to correlate separate HTTP requests to services belonging to the same user / session, we provide a servlet filter called DiagnosticContextFilter. +This filter takes a provided correlation ID from the HTTP header X-Correlation-Id. +If none was found, it will generate a new correlation id as UUID. +This correlation ID is added as MDC to the logger. +Therefore, it will then be included to any log message of the current request (thread). +Further concepts such as service invocations will pass this correlation ID to subsequent calls in the application landscape. Hence you can find all log messages related to an initial request simply via the correlation ID even in highly distributed systems.

+
+
+
+
Security
+
+

In order to prevent log forging attacks you can simply use the suggested JSON logging format. +Otherwise you can use com.devonfw.module.logging.common.impl.SingleLinePatternLayout as demonstrated here in order to prevent such attacks.

+
+
+ +
+

==Monitoring

+
+
+

For monitoring a complex application landscape it is crucial to have an exact overview which applications are up and running and which are not and why. +In devonfw we only focus on topics which are most important when developing production-ready applications. +On a high level view we strongly suggest to separate the application to be monitored from the monitoring system itself. +Therefore, your application should concentrate on providing app specific data for the monitoring. +Aspects such as aggregation, visualization, search, alerting, etc. should be addressed outside of your app by a monitoring system product. +There are many products providing such a monitoring system like checkmk, icinga, SkyWalking, etc. +Please note that there is a huge list of such products and devonfw is not biased or aims to make a choice for you. +Instead please search and find the products that fit best for your requirements and infrastructure.

+
+
+
+
+

1.49. Types of monitoring

+
+

As monitoring coveres a lot of different aspects we separate the following types of monitoring and according data:

+
+
+
    +
  • +

    Log-monitoring
    +is about collecting and monitoring the logs of all apps and containers in your IT landscape. It is suitable for events such as an HTTP request with its URL, resulting status code and duration in milliseconds. Your monitoring may not react to such data in realtime. Instead it may take a delay of one or a few seconds.

    +
  • +
  • +

    Infrastructure monitoring
    +is about monitoring the (hardware) infrastructure with measures like usage of CPU, memory, disc-space, etc. This is a pure operational task and your app should have nothing to do with this. In other words it is a waste if your app tries to monitor these aspects as existing products can do this much better and your app will only see virtual machines and is unable to see the physical infrastructure.

    +
  • +
  • +

    Health check
    +is about providing internal data about the current health of your app. Typically you provide sensors with health status per component or interface to neighbour service (database connectivity, etc.).

    +
  • +
  • +

    Application Performance Monitoring
    +is about measuring performance and tracing down performance issues.

    +
  • +
+
+
+
+

1.50. Health-Check

+
+

The idea of a health check is to prodvide monitoring data about the current health status of your application. +This allows to integrate this specific data into the monitoring system used for your IT landscape. +In order to keep the monitoring simple and easy to integreate consider using the following best practices:

+
+
+
    +
  • +

    Use simple and established protocols such as REST instead of JMX via RMI.

    +
  • +
  • +

    Considuer using recent standards such as microprofile-health.

    +
  • +
  • +

    Consider to drop access-control for your monitoring interfaces and for security prevent external access to it in your infrastructure (loadbalancers or gateways). Monitoring is only for usage within an IT landscape internally. It does not make sense for externals and end-users to access your app for reading monitoring data from a random node decided by a loadbalancer. Furhter, external access can easily lead to sensitive data exposure.

    +
  • +
  • +

    Consider to define different end-points per usage-scenario. So if you want the loadbalancer to ask your app monitoring for availability of each node then create a separate service URL that only provides OK or anything else for failure (NOK, 404, 500, timeout). Do not mix this with a health-check that needs more detailed information.

    +
  • +
  • +

    Also do not forget about basic features such as prodiving the name and the release version of your application.

    +
  • +
  • +

    Be careful to automate decisions based on monitoring and health checks. It easily turns out to be stupid if you automatically restart your pod or container because of some monitoring indicator. In the worst case a failure of a central component will cause your health-check to report down for all apps and as a result all your containers will be restarted frequently. Indead of curing problems such decisions will cause much more harm and trouble.

    +
  • +
  • +

    Avoid causing reasonable load with your monitoring and health-check itself. In many cases it is better to use log-monitoring or to collect monitoring data from use-cases that happen in your app anyway. If you create dummy read and write requests in your monitoring implementation you will easily turn it into a DOS-attack.

    +
  • +
+
+
+

For spring you can simply integrate app monitoring and health check via spring-boot-actuator.

+
+
+

For quarkus you can simply integrate app monitoring via micrometer or smallrye-metrics and health check via smallrye-health.

+
+ +
+

==Log-Monitoring

+
+
+

Log-monitoring is an aspect of monitoring with a strict focus on logging. +With trends towards IT landscapes with many but much smaller apps the classicial approach to write log-files to the disc and let operators read those via SSH became entirely obsolete. +Nowadays we have up to hundreds or even thousands of apps that themselves are clustered into multiple nodes. +Therefore you should establish a centralized log monitoring system in the environment and let all your nodes log directly into that system. +This approach gives the following benefits:

+
+
+
    +
  • +

    all log information available in one place

    +
  • +
  • +

    full-text search accross all logfiles

    +
  • +
  • +

    ability to automatically trigger alerts from specific log patterns

    +
  • +
  • +

    ability to do data-mining on logs and visualize in dashboards

    +
  • +
+
+
+
+

1.51. Options for log-monitoring

+
+

Typical products for such a log monitoring system are:

+
+
+ +
+
+

In devonfw we are not biased for any of these products. Therefore, feel free to make your choice according to the requirements of your project.

+
+
+

For Quarkus applications, you can get an insight into the topic by reading the guide about centralized log management.

+
+
+
+

1.52. API for log-monitoring

+
+

The "API" for logging to a log-monitoring system for your app is pretty simple:

+
+
+
    +
  • +

    Write your logs to standard out.

    +
  • +
  • +

    Use JSON logging as format.

    +
  • +
+
+
+

Then the container infrastructure can automatically collect your logs from standard out and directly feed those into the log monitoring system. +As a result, your app does not need to know anything about your log monitoring system and logging becomes most simple. +Further, if you do not write log-files anymore, you might not need to write any other files and therefore may not even need write permissions on the filesystem of your container. +In such case an attacker who may find a vulnerability in your app will have less attack surface in case he can not write any file.

+
+ +
+

==Application Performance Management

+
+
+

This guide gives hints how to manage, monitor and analyse performance of Java applications.

+
+
+
+

1.53. Temporary Analysis

+
+

If you are facing performance issues and want to do a punctual analysis we recommend you to use glowroot. It is ideal in cases where monitoring in your local development environment is suitable. However, it is also possible to use it in your test environment. It is entirely free and open-source. Still it is very powerful and helps to trace down bottlenecks. To get a first impression of the tool take a look at the demo.

+
+
+
JEE/WTP
+
+

In case you are forced to use an JEE application server and want to do a temporary analysis you can double click your server instance from the servers view in Eclipse and click on the link Open launch configuration in order to add the -javaagent JVM option.

+
+
+
+
+

1.54. Regular Analysis

+
+

In case you want to manage application performance regularly we recommend to use JavaMelody that can be integrated into your application. More information on javamelody is available on the JavaMelody Wiki

+
+
+
+

1.55. Alternatives

+
+ +
+
+ +
+

==Security +Security is todays most important cross-cutting concern of an application and an enterprise IT-landscape. We seriously care about security and give you detailed guides to prevent pitfalls, vulnerabilities, and other disasters. While many mistakes can be avoided by following our guidelines you still have to consider security and think about it in your design and implementation. The security guide will not only automatically prevent you from any harm, but will provide you hints and best practices already used in different software products.

+
+
+

An important aspect of security is proper authentication and authorization as described in access-control. In the following we discuss about potential vulnerabilities and protection to prevent them.

+
+
+
+

1.56. Vulnerabilities and Protection

+
+

Independent from classical authentication and authorization mechanisms there are many common pitfalls that can lead to vulnerabilities and security issues in your application such as XSS, CSRF, SQL-injection, log-forging, etc. A good source of information about this is the OWASP. +We address these common threats individually in security sections of our technological guides as a concrete solution to prevent an attack typically depends on the according technology. The following table illustrates common threats and contains links to the solutions and protection-mechanisms provided by the devonfw:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 7. Security threats and protection-mechanisms
ThreatProtectionLink to details

A1 Injection

validate input, escape output, use proper frameworks

SQL Injection

A2 Broken Authentication

encrypt all channels, use a central identity management with strong password-policy

Authentication

A3 Sensitive Data Exposure

Use secured exception facade, design your data model accordingly

REST exception handling

A4 XML External Entities

Prefer JSON over XML, ensure FSP when parsing (external) XML

XML guide

A5 Broken Access Control

Ensure proper authorization for all use-cases, use @DenyAll as default to enforce

Access-control guide especially method authorization

A6 Security Misconfiguration

Use devon4j application template and guides to avoid

tutorial-newapp and sensitive configuration

A7 Cross-Site Scripting

prevent injection (see A1) for HTML, JavaScript and CSS and understand same-origin-policy

client-layer

A8 Insecure Deserialization

Use simple and established serialization formats such as JSON, prevent generic deserialization (for polymorphic types)

JSON guide especially inheritence, XML guide

A9 Using Components with Known Vulnerabilities

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

CVE newsletter and dependency check

A10 Insufficient_Logging & Monitoring

Ensure to log all security related events (login, logout, errors), establish effective monitoring

Logging guide and monitoring guide

Insecure Direct Object References

Using direct object references (IDs) only with appropriate authorization

logic-layer

Cross-Site Request Forgery (CSRF)

secure mutable service operations with an explicit CSRF security token sent in HTTP header and verified on the server

CSRF guide

Log-Forging

Escape newlines in log messages

logging security

Unvalidated Redirects and Forwards

Avoid using redirects and forwards, in case you need them do a security audit on the solution.

devonfw proposes to use rich-clients (SPA/RIA). We only use redirects for login in a safe way.

+
+
+

1.57. Advanced Security

+
+

While OWASP Top 10 covers the basic aspects of application security, there are advanced standards such as AVS. +In devonfw we address this in the +Application Security Quick Solution Guide.

+
+
+
+

1.58. Tools

+
+
Dependency Check
+
+

To address the thread Using Components with Known Vulnerabilities we recomment to use OWASP dependency check that ships with a maven plugin and can analyze your dependencies for known CVEs. +In order to run this check, you can simply call this command on any maven project:

+
+
+
+
mvn org.owasp:dependency-check-maven:6.1.5:aggregate
+
+
+
+ + + + + +
+ + +The version is just for completeness. You should check yourself for using a recent version of the plugin. +
+
+
+

If you build an devon4j spring application from our app-template you can activate the dependency check even easier with the security profile:

+
+
+
+
mvn clean install -P security
+
+
+
+

This does not run by default as it causes some overhead for the build performance. However, consider to build this in your CI at least nightly. +After the dependency check is performed, you will find the results in target/dependency-check-report.html of each module. The report will also be generated when the site is build (mvn site) even without the profile.

+
+
+
+
Penetration Testing
+
+

For penetration testing (testing for vulnerabilities) of your web application, we recommend the following tools:

+
+
+ +
+
+ +
+

==CORS support

+
+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

For more information about CORS see here. Information about the CORS headers can be found here.

+
+
+
+
+

1.59. Configuring CORS support

+
+

To enable CORS support for your application, see the advanced guides. For Spring applications see here. For Quarkus follow the official Quarkus guide.

+
+
+
+

1.60. Configuration with service mesh

+
+

If you are using a service mesh, you can also define your CORS policy directly there. Here is an example from Istio.

+
+
+ +
+

==Java Development Kit

+
+
+

The Java Development Kit is an implementation of the Java platform. It provides the Java Virtual Machine (JVM) and the Java Runtime Environment (JRE).

+
+
+
+

1.61. Editions

+
+

The JDK exists in different editions:

+
+
+ +
+
+

As Java is evolving and also complex maintaining a JVM requires a lot of energy. +Therefore many alternative JDK editions are unable to cope with this and support latest Java versions and according compatibility. +Unfortunately OpenJDK only maintains a specific version of Java for a relative short period of time before moving to the next major version. +In the end, this technically means that OpenJDK is continuous beta and can not be used in production for reasonable software projects. +As OracleJDK changed its licensing model and can not be used for commercial usage even during development, things can get tricky. +You may want to use OpenJDK for development and OracleJDK only in production. +However, e.g. OpenJDK 11 never released a version that is stable enough for reasonable development (e.g. javadoc tool is broken and fixes are not available of OpenJDK 11 - fixed in 11.0.3 what is only available as OracleJDK 11 or you need to go to OpenJDK 12+, what has other bugs) so in the end there is no working release of OpenJDK 11. +This more or less forces you to use OracleJDK what requires you to buy a subscription so you can use it for commercial development. +However, there is AdoptOpenJDK that provides forked releases of OpenJDK with bug-fixes what might be an option. +Anyhow, as you want to have your development environment close to production, the productively used JDK (most likely OracleJDK) should be preferred also for development.

+
+
+
+

1.62. Upgrading

+
+

Until Java 8 compatibility was one of the key aspects for Java version updates (after the mess on the Swing updates with Java2 many years ago). +However, Java 9 introduced a lot of breaking changes. +This documentation wants to share the experience we collected in devonfw when upgrading from Java 8 to newer versions. +First of all we separate runtime changes that you need if you want to build your software with JDK 8 but such that it can also run on newer versions (e.g. JRE 11) +from changes required to also build your software with more recent JDKs (e.g. JDK 11 or 12).

+
+
+
Runtime Changes
+
+

This section describes required changes to your software in order to make it run also with versions newer than Java 8.

+
+
+
Classes removed from JDK
+
+

The first thing that most users hit when running their software with newer Java versions is a ClassNotFoundException like this:

+
+
+
+
Caused by: java.lang.ClassNotFoundException: javax.xml.bind.JAXBException
+
+
+
+

As Java 9 introduced a module system with Jigsaw, the JDK that has been a monolithic mess is now a well-defined set of structured modules. +Some of the classes that used to come with the JDK moved to modules that where not available by default in Java 9 and have even been removed entirely in later versions of Java. +Therefore you should simply treat such code just like any other 3rd party component that you can add as a (maven) dependency. +The following table gives you the required hints to make your software work even with such classes / modules removed from the JDK (please note that the specified version is just a suggestion that worked, feel free to pick a more recent or more appropriate version):

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 8. Dependencies for classes removed from Java 8 since 9+
ClassGroupIdArtifactIdVersion

javax.xml.bind.*

javax.xml.bind

jaxb-api

2.3.1

com.sun.xml.bind.*

org.glassfish.jaxb

jaxb-runtime

2.3.1

java.activation.*

javax.activation

javax.activation-api

1.2.0

java.transaction.*

javax.transaction

javax.transaction-api

1.2

java.xml.ws.*

javax.xml.ws

jaxws-api

2.3.1

javax.jws.*

javax.jws

javax.jws-api

1.1

javax.annotation.*

javax.annotation

javax.annotation-api

1.3.2

+
+
+
3rd Party Updates
+
+

Further, internal and inofficial APIs (e.g. sun.misc.Unsafe) have been removed. +These are typically not used by your software directly but by low-level 3rd party libraries like asm that need to be updated. +Also simple things like the Java version have changed (from 1.8.x to 9.x, 10.x, 11.x, 12.x, etc.). +Some 3rd party libraries were parsing the Java version in a very naive way making them unable to be used with Java 9+:

+
+
+
+
Caused by: java.lang.NullPointerException
+   at org.apache.maven.surefire.shade.org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast (SystemUtils.java:1626)
+
+
+
+

Therefore the following table gives an overview of common 3rd party libraries that have been affected by such breaking changes and need to be updated to at least the specified version:

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 9. Minimum recommended versions of common 3rd party for Java 9+
GroupIdArtifactIdVersionIssue

org.apache.commons

commons-lang3

3.7

LANG-1365

cglib

cglib

3.2.9

102, 93, 133

org.ow2.asm

asm

7.1

2941

org.javassist

javassist

3.25.0-GA

194, 228, 246, 171

+
+
+
ResourceBundles
+
+

For internationalization (i18n) and localization (l10n) ResourceBundle is used for language and country specific texts and configurations as properties (e.g. MyResourceBundle_de.properties). With Java modules there are changes and impacts you need to know to get things working. The most important change is documented in the JavaDoc of ResourceBundle. However, instead of using ResourceBundleProvider and refactoring your entire code causing incompatibilities, you can simply put the resource bundles in a regular JAR on the classpath rather than a named module (or into the lauching app). +If you want to implement (new) Java modules with i18n support, you can have a look at mmm-nls.

+
+
+
+
+
Buildtime Changes
+
+

If you also want to change your build to work with a recent JDK you also need to ensure that test frameworks and maven plugins properly support this.

+
+
+
Findbugs
+
+

Findbugs does not work with Java 9+ and is actually a dead project. +The new findbugs is SpotBugs. +For maven the new solution is spotbugs-maven-plugin:

+
+
+
+
<plugin>
+  <groupId>com.github.spotbugs</groupId>
+  <artifactId>spotbugs-maven-plugin</artifactId>
+  <version>3.1.11</version>
+</plugin>
+
+
+
+
+
Test Frameworks
+ + ++++++ + + + + + + + + + + + + + + + + +
Table 10. Minimum recommended versions of common 3rd party test frameworks for Java 9+
GroupIdArtifactIdVersionIssue

org.mockito

mockito-core

2.23.4

1419, 1696, 1607, 1594, 1577, 1482

+
+
+
Maven Plugins
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 11. Minimum recommended versions of common maven plugins for Java 9+
GroupIdArtifactId(min.) VersionIssue

org.apache.maven.plugins

maven-compiler-plugin

3.8.1

x

org.apache.maven.plugins

maven-surefire-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-surefire-report-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-archetype-plugin

3.1.0

x

org.apache.maven.plugins

maven-javadoc-plugin

3.1.0

x

org.jacoco

jacoco-maven-plugin

0.8.3

663

+
+
+
Maven Usage
+
+

With Java modules you can not run Javadoc standalone anymore or you will get this error when running mvn javadoc:javadoc:

+
+
+
+
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-javadoc-plugin:3.1.1:javadoc (default-cli) on project mmm-base: An error has occurred in Javadoc report generation:
+[ERROR] Exit code: 1 - error: module not found: io.github.mmm.base
+[ERROR]
+[ERROR] Command line was: /projects/mmm/software/java/bin/javadoc @options @packages @argfile
+
+
+
+

As a solution or workaround you need to include the compile goal into your build lifecycle so the module-path is properly configured:

+
+
+
+
mvn compile javadoc:javadoc
+
+
+
+
+
+
+ +
+

We want to give credits and say thanks to the following articles that have been there before and helped us on our way:

+
+ +
+ +
+

==JEE

+
+
+

This section is about Java Enterprise Edition (JEE). +Regarding to our key principles we focus on open standards. +For Java this means that we consider official standards from Java Standard and Enterprise Edition as first choice for considerations. +Therefore we also decided to recommend JAX-RS over SpringMVC as the latter is proprietary. +Only if an existing Java standard is not suitable for current demands such as Java Server Faces (JSF), we do not officially recommend it (while you are still free to use it if you have good reasons to do so). +In all other cases we officially suggest the according standard and use it in our guides, code-samples, sample application, modules, templates, etc. +Examples for such standards are JPA, JAX-RS, JAX-WS, JSR330, JSR250, JAX-B, etc.

+
+
+
+

1.64. Application-Server

+
+

We designed everything based on standards to work with different technology stacks and servlet containers. +However, we strongly encourage to use modern and leightweight frameworks such as spring or quarkus. +You are free to decide for a JEE application server but here is a list of good reasons for our decision:

+
+
+
    +
  • +

    Up-to-date

    +
    +

    With spring or quarkus you easily keep up to date with evolving technologies (microservices, reactive, NoSQL, etc.). +Most application servers put you in a jail with old legacy technology. +In many cases you are even forced to use a totally outdated version of java (JVM/JDK). +This may even cause severe IT-Security vulnerabilities but with expensive support you might get updates. +Also with leightweight open-source frameworks you need to be aware that for IT-security you need to update recently what can cost quite a lot of additional maintenance effort.

    +
    +
  • +
  • +

    Development speed

    +
    +

    With spring-boot you can implement and especially test your individual logic very fast. Starting the app in your IDE is very easy, fast, and realistic (close to production). You can easily write JUnit tests that startup your server application to e.g. test calls to your remote services via HTTP fast and easy. For application servers you need to bundle and deploy your app what takes more time and limits you in various ways. We are aware that this has improved in the past but also spring continuously improves and is always way ahead in this area. Further, with spring you have your configurations bundled together with the code in version control (still with ability to handle different environments) while with application servers these are configured externally and can not be easily tested during development.

    +
    +
  • +
  • +

    Documentation

    +
    +

    Spring and also quarkus have an extremely open and active community. +There is documentation for everything available for free on the web. +You will find solutions to almost any problem on platforms like stackoverflow. +If you have a problem you are only a google search away from your solution. +This is very much different for proprietary application server products.

    +
    +
  • +
  • +

    Helpful Exception Messages

    +
    +

    Especially spring is really great for developers on exception messages. +If you do something wrong you get detailed and helpful messages that guide you to the problem or even the solution. +This is not as great in application servers.

    +
    +
  • +
  • +

    Future-proof

    +
    +

    Spring has evolved really awesome over time. +Since its 1.0 release in 2004 spring has continuously been improved and always caught up with important trends and innovations. +Even in critical situations, when the company behind it (interface21) was sold, spring went on perfectly. +Quarkus on the other hand is relatively new. +It does not have to carry a large legacy history and is therefore most state-of-the-art for modern projects esp. in cloud environments. +JEE went through a lot of trouble and crisis. +Just look at the EJB pain stories. +This happened often in the past and also recent. +See JEE 8 in crisis.

    +
    +
  • +
  • +

    Free

    +
    +

    Spring and quarkus including their ecosystems are free and open-source. +It still perfectly integrates with commercial solutions for specific needs. +Most application servers are commercial and cost a lot of money. +As of today the ROI for this is of question.

    +
    +
  • +
  • +

    Cloud-native

    +
    +

    Quarkus is designed for cloud-native projects from the start. +With spring this is also available via spring-native. +Using an application server will effectively prevent you from going to the cloud smoothly.

    +
    +
  • +
  • +

    Fun

    +
    +

    If you go to conferences or ask developers you will see that spring or quarkus is popular and fun. +If new developers are forced to use an old application server product they will be less motivated or even get frustrated. +Especially in today’s agile projects this is a very important aspect. +In the end you will get into trouble with maintenance on the long run if you rely on a proprietary application server.

    +
    +
  • +
+
+
+

Of course the vendors of application servers will tell you a different story. +This is simply because they still make a lot of money from their products. +We do not get paid from application servers nor from spring, quarkus or any other IT product company. +We are just developers who love to build great systems. +A good reason for application servers is that they combine a set of solutions to particular aspects to one product that helps to standardize your IT. +However, devonfw fills exactly this gap for the spring and quarkus ecosystems in a very open and flexible way. +However, there is one important aspect that you need to understand and be aware of:

+
+
+

Some big companies decided for a specific application server as their IT strategy. +They may have hundreds of apps running with this application server. +All their operators and developers have learned a lot of specific skills for this product and are familiar with it. +If you are implementing yet another (small) app in this context it could make sense to stick with this application server. +However, also they have to be aware that with every additional app they increase their technical debt. +So actively help your customer and consult him to make the right choices for the future.

+
+
+ +
+

==Validation

+
+
+

Validation is about checking syntax and semantics of input data. Invalid data is rejected by the application. +Therefore validation is required in multiple places of an application. E.g. the GUI will do validation for usability reasons to assist the user, early feedback and to prevent unnecessary server requests. +On the server-side validation has to be done for consistency and security.

+
+
+

In general we distinguish these forms of validation:

+
+
+
    +
  • +

    stateless validation will produce the same result for given input at any time (for the same code/release).

    +
  • +
  • +

    stateful validation is dependent on other states and can consider the same input data as valid in once case and as invalid in another.

    +
  • +
+
+
+
+

1.65. Stateless Validation

+
+

For regular, stateless validation we use the JSR303 standard that is also called bean validation (BV). +Details can be found in the specification. +As implementation we recommend hibernate-validator.

+
+
+
Example
+
+

A description of how to enable BV for spring applications can be found in the relevant Spring documentation. A guide you can use to integrate validation in Quarkus applications can be found here. For a quick summary follow these steps:

+
+
+
    +
  • +

    Make sure that hibernate-validator is located in the classpath by adding a dependency to the pom.xml.

    +
  • +
+
+
+
Listing 9. spring
+
+
    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
Listing 10. quarkus
+
+
    <dependency>
+      <groupId>io.quarkus</groupId>
+      <artifactId>quarkus-hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
    +
  • +

    For methods to validate go to their declaration and add constraint annotations to the method parameters.

    +
    +

    In spring applications you can add the @Validated annotation to the implementation (spring bean) to be validated (this is an annotation of the spring framework, so it`s not available in the Quarkus context). The standard use case is to annotate the logic layer implementation, i.e. the use case implementation or component facade in case of simple logic layer pattern. Thus, the validation will be executed for service requests as well as batch processing.

    +
    +
    +
      +
    • +

      @Valid annotation to the arguments to validate (if that class itself is annotated with constraints to check).

      +
    • +
    • +

      @NotNull for required arguments.

      +
    • +
    • +

      Other constraints (e.g. @Size) for generic arguments (e.g. of type String or Integer). However, consider to create custom datatypes and avoid adding too much validation logic (especially redundant in multiple places).

      +
    • +
    +
    +
  • +
+
+
+
Listing 11. BookingmanagementRestServiceImpl.java
+
+
@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+  ...
+  public BookingEto saveBooking(@Valid BookingCto booking) {
+  ...
+
+
+
+
    +
  • +

    Finally add appropriate validation constraint annotations to the fields of the ETO class.

    +
  • +
+
+
+
Listing 12. BookingCto.java
+
+
  @Valid
+  private BookingEto booking;
+
+
+
+
Listing 13. BookingEto.java
+
+
  @NotNull
+  @Future
+  private Timestamp bookingDate;
+
+
+
+

A list with all bean validation constraint annotations available for hibernate-validator can be found here. In addition it is possible to configure custom constraints. Therefore it is necessary to implement a annotation and a corresponding validator. A description can also be found in the Spring documentation or with more details in the hibernate documentation.

+
+
+ + + + + +
+ + +Bean Validation in Wildfly >v8: Wildfly v8 is the first version of Wildfly implementing the JEE7 specification. It comes with bean validation based on hibernate-validator out of the box. In case someone is running Spring in Wildfly for whatever reasons, the spring based annotation @Validated would duplicate bean validation at runtime and thus should be omitted. +
+
+
+
+
GUI-Integration
+
+

TODO

+
+
+
+
Cross-Field Validation
+
+

BV has poor support for this. Best practice is to create and use beans for ranges, etc. that solve this. A bean for a range could look like so:

+
+
+
+
public class Range<V extends Comparable<V>> {
+
+  private V min;
+  private V max;
+
+  public Range(V min, V max) {
+
+    super();
+    if ((min != null) && (max != null)) {
+      int delta = min.compareTo(max);
+      if (delta > 0) {
+        throw new ValueOutOfRangeException(null, min, min, max);
+      }
+    }
+    this.min = min;
+    this.max = max;
+  }
+
+  public V getMin() ...
+  public V getMax() ...
+
+
+
+
+
+

1.66. Stateful Validation

+
+

For complex and stateful business validations we do not use BV (possible with groups and context, etc.) but follow KISS and just implement this on the server in a straight forward manner. +An example is the deletion of a table in the example application. Here the state of the table must be checked first:

+
+
+

BookingmanagementImpl.java

+
+
+
+
  private void sendConfirmationEmails(BookingEntity booking) {
+
+    if (!booking.getInvitedGuests().isEmpty()) {
+      for (InvitedGuestEntity guest : booking.getInvitedGuests()) {
+        sendInviteEmailToGuest(guest, booking);
+      }
+    }
+
+    sendConfirmationEmailToHost(booking);
+  }
+
+
+
+

Implementing this small check with BV would be a lot more effort.

+
+
+ +
+

==Bean-Mapping

+
+
+

For decoupling, you sometimes need to create separate objects (beans) for a different view. E.g. for an external service, you will use a transfer-object instead of the persistence entity so internal changes to the entity do not implicitly change or break the service.

+
+
+

Therefore you have the need to map similar objects what creates a copy. This also has the benefit that modifications to the copy have no side-effect on the original source object. However, to implement such mapping code by hand is very tedious and error-prone (if new properties are added to beans but not to mapping code):

+
+
+
+
public UserEto mapUser(UserEntity source) {
+  UserEto target = new UserEto();
+  target.setUsername(source.getUsername());
+  target.setEmail(source.getEmail());
+  ...
+  return target;
+}
+
+
+
+

Therefore we are using a BeanMapper for this purpose that makes our lives a lot easier. +There are several bean mapping frameworks with different approaches.

+
+
+

For a devon4j-spring application we recommend Orika, follow Spring Bean-Mapping for an introduction to Orika and Dozer in a devon4j-spring context application.

+
+
+ + + + + +
+ + +devon4j started with Dozer as framework for Spring applications and still supports it. However, we now recommend Orika (for new projects) as it is much faster (see Performance of Java Mapping Frameworks). +
+
+
+

For a Quarkus application we recommend Mapstruct, follow Quarkus Bean-Mapping for an introduction to Mapstruct in a quarkus context application.

+
+ +
+

==Lombok

+
+
+

Lombok is a library that works with an annotation processor and will generate code for you to save you some time and reduce the amount of boilerplate code in your project. Lombok can generate getter and setter, equals methods, automate your logging variables for your classes, and more. Follow the list of all the features provided by Lombok to get an overview.

+
+
+
+

1.67. Lombok Dependency

+
+

To get access to the Lombok library just add the following dependency to the POM.xml.

+
+
+

The Lombok dependency:

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok</artifactId>
+	<version>1.18.20</version>
+</dependency>
+
+
+
+

To get Lombok working with your current IDE you should also install the Lombok addon. Follow the Eclipse installation guide, there are also guides for other supported IDEs.

+
+
+
+

1.68. Lombok with Mapstruct

+
+

MapStruct takes advantage of generated getters, setters, and constructors from Lombok and uses them to +generate the mapper implementations. Lombok is also an annotation processor and since version 1.18.14 both frameworks are working together. Just add the lombok-mapstruct-binding to your POM.xml.

+
+
+

The Lombok annotation processor and the lombok-mapstruct-binding

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok-mapstruct-binding</artifactId>
+	<version>0.2.0</version>
+</dependency>
+
+<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok</artifactId>
+				<version>1.18.4</version>
+			</path>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok-mapstruct-binding</artifactId>
+				<version>0.2.0</version>
+			</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

In our quarkus reference project you can get a look into the usage of both frameworks.

+
+
+
+

1.69. Lombok Usage

+
+

Lombok can be used like any other annotation processor and will be shown in the simple example below to generate getter and setter for a Product Entity.

+
+
+
+
@Getter
+@Setter
+public class Product{
+
+    private String title;
+    private String description;
+    private BigDecimal price;
+}
+
+
+
+

For advanced Lombok usage follow the Baeldung Lombok guide or just read the Lombok javadoc

+
+
+ +
+

==OpenAPI

+
+
+

The OpenAPI Specification (OAS) defines a standard for describing RESTful web services in a machine- and human-readable format. OpenAPI allows REST APIs to be defined in a uniform manner. +Technically, an OpenAPI document is written in YAML or JSON format. The specification defines the structure of a REST API by describing attributes such as path information, response codes, and return types. Some examples can be found here. +Apart from documenting the API, this schema then also acts as a contract between provider and consumers, guaranteeing interoperability between various technologies.

+
+
+

OpenAPI is often used in combination with Swagger. Swagger is a set of tools build around OpenAPI, that help developers to design and document their REST APIs. +The most common tool is the Swagger UI, which uses the OpenAPI specification to create a graphical interface of the REST API that you can also interact with. Check out the Swagger online editor to get a feeling for it.

+
+
+ + + + + +
+ + +
+

Swagger and OpenAPI: Swagger is a former specification, based on which the OpenAPI was created. Swagger 2.0 is still commonly used for describing APIs. OpenAPI is an open-source collaboration and it started from version 3.0.0(semver)

+
+
+
+
+

There are many tools that work with OpenAPI: code generators, documentation tools, validators etc.

+
+
+
+

1.70. OpenAPI generation

+
+

There are several extensions you can use in your project to automatically generate the OpenAPI specifications and Swagger UI from your REST API (code-first approach). devon4j recommends the following two extensions/plugins to use:

+
+
+
    +
  • +

    Smallrye OpenAPI extension

    +
  • +
  • +

    ServicedocGen maven plugin

    +
  • +
+
+
+
Smallrye OpenAPI
+
+

Quarkus provides OpenAPI support through Smallrye OpenAPI extension:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-smallrye-openapi</artifactId>
+</dependency>
+
+
+
+

After adding the extension to your project, you can access the Swagger UI by navigating to /q/swagger-ui.

+
+
+

The OpenAPI specification can be accessed by requesting /q/openapi.

+
+
+

Smallrye OpenAPI is compliant with MicroProfile OpenAPI. You can add MicroProfile annotations to further describe your REST endpoints and extend the OpenAPI documentation. +More information for this can be found here or here.

+
+
+ + + + + +
+ + +
+

Quarkus recommends using this extension and you can document your APIs in great detail by using the MicroProfile annotations. The downside to this is that using these annotations will blow up your code and you will have some duplicate information in it. +If you don’t want to specify the REST API again with all this annotation based information, we also recommend taking a look at the ServicedocGen Maven plugin for your Quarkus applications when implementing JAX-RS APIs.

+
+
+
+
+
+
ServicedocGen Maven Plugin
+
+

The ServicedocGen maven plugin can be used within both Spring and Quarkus applications. +It works a bit different then the Smallrye extensions mentioned above. The plugin analysis the REST API and it’s JavaDoc and then generate the OpenAPI specification and the Swagger UI as static files. So no Swagger or MicroProfile annotations have to be added.

+
+
+

The plugin can be configured in the pom.xml file of your application as follows:

+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>org.codehaus.mojo</groupId>
+      <artifactId>servicedocgen-maven-plugin</artifactId>
+      <version>1.0.0</version>
+      <executions>
+        <execution>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <descriptor>
+          <info>
+            <title>...</title>
+            <description>...</description>
+          </info>
+          <host>...</host>
+          <port>...</port>
+          <basePath>...</basePath>
+          <schemes>
+            <scheme>...</scheme>
+          </schemes>
+        </descriptor>
+      </configuration>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

In the configuration section you have to define additional information to generate the OpenAPI specification correctly. An example can be found in our Quarkus reference application. +When building the application, an OpenApi.yaml and a SwaggerUI.html file are created in the /target/site folder. To make the Swagger UI available in the browser, the file must be served by some servlet.

+
+
+ +
+

==Spring

+
+
+

Spring is the most famous and established Java framework. +It is fully supported by devonfw as an option and alternative to quarkus.

+
+
+
+
+

1.71. Guide to the Reader

+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If you are not yet familiar with Spring, you may be interested in pros and cons of Spring. Also take a look at the official Spring website.

    +
  • +
  • +

    If you already have experience developing with Spring but are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring and coding conventions. Follow the referenced links to go deeper into a topic.

    +
  • +
  • +

    If you have already developed with devon4j and Spring and need more information on a specific topic, check out the devon4j guides for Spring. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Spring and Quarkus are documented there.

    +
  • +
  • +

    If you want to get started or create your first Spring application using devon4j, check out the guide about creating a new application or the Jump the Queue and My Thai Star reference applications.

    +
  • +
+
+
+
+

1.72. Pros

+
+

Spring offers the following benefits:

+
+
+
    +
  • +

    highly flexible
    +Spring is famous for its great flexibility. You can customize and integrate nearly everything.

    +
  • +
  • +

    well established
    +While JEE application servers including very expensive commercial products turned out to be a dead-end, spring has guided projects through the changing trends of IT throughout decades. It may be the framework with the longest history track and popularity. As a result you can easily find developers, experts, books, articles, etc. about spring.

    +
  • +
  • +

    non-invasive and not biased
    +Spring became famous for its non-invasive coding based on patterns instead of hard dependencies. It gives you a lot of freedom and avoids tight coupling of your (business) code.

    +
  • +
+
+
+

See Why Spring? for details.

+
+
+
+

1.73. Cons

+
+

Spring has the following drawbacks:

+
+
+
    +
  • +

    history and legacy
    +Due to the pro of its long established history, spring also carries a lot of legacy. As a result there are many ways to do the same thing while some options may be discouraged. Developers needs some guidance (e.g. via devon4j) as they may enter pitfalls and dead-ends when choosing the first solution they found on google or stackoverflow.

    +
  • +
  • +

    lost lead in cloud-native
    +While for the last decades spring was leading innovation in Java app development, it seems that with the latest trends and shift such as cloud-native, they have been overtaken by frameworks like quarkus. However, spring is trying to catch up with spring-native.

    +
  • +
+
+
+
+

1.74. Spring-Boot

+
+

Spring-boot is a project and initiaitve within the spring-ecosystem that brought a lot of innovation and simplification into app development on top of spring. +As of today we typically use the terms spring and spring-boot rather synonymously as we always use spring together with spring-boot.

+
+
+
+

1.75. Spring-Native

+
+

Spring-native adds cloud-native support to the spring ecosystem and allows to build a spring app as cloud-native image via GraalVM. +You may also consider Quarkus if you are interested in building cloud-native images. For a comparison of both Spring Native and Quarkus, you may refer to our Spring Native vs. Quarkus guide.

+
+ +
+

==Components

+
+
+

Following separation-of-concerns we divide an application into components using our package-conventions and project structure. +As described by the architecture each component is divided into layers as described in the project structure. +Please note that a component will only have the required layers. +So a component may have any number from one to all layers.

+
+
+

1.75.1. General Component

+
+

Cross-cutting aspects belong to the implicit component general. It contains technical configurations and very general code that is not business specific. Such code shall not have any dependencies to other components and therefore business related code.

+
+
+
+

1.75.2. Business Component

+
+

The business-architecture defines the business components with their allowed dependencies. A small application (microservice) may just have one component and no dependencies making it simple while the same architecture can scale up to large and complex applications (from bigger microservice up to modulith). +Tailoring an business domain into applications and applications into components is a tricky task that needs the skills of an experienced architect. +Also, the tailoring should follow the business and not split by technical reasons or only by size. +Size is only an indicator but not a driver of tailoring. +Whatever hypes like microservices are telling you, never get misled in this regard: +If your system grows and reaches MAX+1 lines of code, it is not the right motivation to split it into two microservices of ~MAX/2 lines of code - such approaches will waste huge amounts of money and lead to chaos.

+
+
+
+

1.75.3. App Component

+
+

Only in case you need cross-cutting code that aggregates another component you may introduce the component app. +It is allowed to depend on all other components but no other component may depend on it. +With the modularity and flexibility of spring you typically do not need this. +However, when you need to have a class that registers all services or component-facades using direct code dependencies, you can introduce this component.

+
+
+
+

1.75.4. Component Example

+
+

The following class diagram illustrates an example of the business component Staffmanagement:

+
+
+
+logic layer component pattern +
+
+
+

In this scheme, you can see the structure and flow from the service-layer (REST service call) via the logic-layer to the dataaccess-layer (and back).

+
+ +
+

==Classic project structure

+
+
+

In this section we describe the classic project structure as initially proposed for Java in devonfw. +It is still valid and fully supported. +However, if you want to start a new project, please consider using the modern structure.

+
+
+
+

1.75.5. Modules

+
+

The structure of a devon4j application is divided into the following modules:

+
+
+
    +
  • +

    api: module containing the API of your application. The API contains the required artifacts to interact with your application via remote services. This can be REST service interfaces, transfer-objects with their interfaces and datatypes but also OpenAPI or gRPC contracts.

    +
  • +
  • +

    core: maven module containing the core of the application with service implementation, as well as entire logic layer and dataaccess layer.

    +
  • +
  • +

    batch: optional module for batch layer

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) typically as a bootified WAR file.

    +
  • +
+
+
+
+

1.75.6. Deployment

+
+
+
+

Make jar not war

+
+
+
+— Josh Long +
+
+
+

First of all it is important to understand that the above defined modules aim to make api, core, and batch reusable artifacts, that can be used as a regular maven dependency. +On the other hand to build and deploy your application you want a final artifact that is containing all required 3rd party libraries. +This artifact is not reusable as a maven dependency. +That is exactly the purpose of the server module to build and package this final deployment artifact. +By default we first build a regular WAR file with maven in your server/target directory (*-server-«version».war) and in a second step create a bootified WAR out of this (*-server-bootified.war). +The bootified WAR file can then be started standalone (java -jar «filename».war). +However, it is also possible to deploy the same WAR file to a servlet container like tomcat or jetty. +As application servers and externally provided servlet containers are not recommendet anymore for various reasons (see JEE), you may also want to create a bootified JAR file instead. +All you need to do in that case is to change the packaging in your server/pom.xml from war to jar.

+
+
+
+

1.75.7. Package Structure

+
+

The package structure of your code inside src/main/java (and src/test/java) of your modules is described in our coding conventions in the sections packages. A full mapping of the architecture and the different code elements to the packaging is described in the following section.

+
+
+
+

1.75.8. Layers

+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +The following table describes our classic approach for packaging and layering:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 12. Traditional generic devon4j layers
Layer«layer»

service

service

logic

logic

data-access

dataaccess

batch (optional)

batch

client (optional)

client

common

common

+
+
+

1.75.9. Architecture Mapping

+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.common
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.datatype
+|  |  |  |  └──.«Datatype» (api)
+|  |  |  └──.«BusinessObject» (api)
+|  |  └──.impl[.«detail»]
+|  |     ├──.«Aspect»ConfigProperties (core)
+|  |     ├──.«Datatype»JsonSerializer (core)
+|  |     └──.«Datatype»JsonDeserializer (core)
+|  ├──.dataaccess
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.repo
+|  |  |  |  └──.«BusinessObject»Repository (core)
+|  |  |  ├──.dao (core) [alternative to repo]
+|  |  |  |  └──.«BusinessObject»Dao (core) [alternative to Repository]
+|  |  |  └──.«BusinessObject»Entity (core)
+|  |  └──.impl[.«detail»]
+|  |     ├──.dao (core) [alternative to repo]
+|  |     |  └──.«BusinessObject»DaoImpl (core) [alternative to Repository]
+|  |     └──.«Datatype»AttributeConverter (core)
+|  ├──.logic
+|  |  ├──.api
+|  |  |  ├──.[«detail».]to
+|  |  |  |   ├──.«MyCustom»«To (api)
+|  |  |  |   ├──.«DataStructure»Embeddable (api)
+|  |  |  |   ├──.«BusinessObject»Eto (api)
+|  |  |  |   └──.«BusinessObject»«Subset»Cto (api)
+|  |  |  ├──.[«detail».]usecase
+|  |  |  |   ├──.UcFind«BusinessObject» (core)
+|  |  |  |   ├──.UcManage«BusinessObject» (core)
+|  |  |  |   └──.Uc«Operation»«BusinessObject» (core)
+|  |  |  └──.«Component» (core)
+|  |  ├──.base
+|  |  |  └──.[«detail».]usecase
+|  |  |     └──.Abstract«BusinessObject»Uc (core)
+|  |  └──.impl
+|  |     ├──.[«detail».]usecase
+|  |     |   ├──.UcFind«BusinessObject»Impl (core)
+|  |     |   ├──.UcManage«BusinessObject»Impl (core)
+|  |     |   └──.Uc«Operation»«BusinessObject»Impl (core)
+|  |     └──.«Component»Impl (core)
+|  └──.service
+|     ├──.api[.«detail»]
+|     |  ├──.rest
+|     |  |  └──.«Component»RestService (api)
+|     |  └──.ws
+|     |     └──.«Component»WebService (api)
+|     └──.impl[.«detail»]
+|        ├──.jms
+|        |  └──.«BusinessObject»JmsListener (core)
+|        ├──.rest
+|        |  └──.«Component»RestServiceImpl (core)
+|        └──.ws
+|           └──.«Component»WebServiceImpl (core)
+├──.general
+│  ├──.common
+│  |  ├──.api
+|  |  |  ├──.to
+|  |  |  |  ├──.AbstractSearchCriteriaTo (api)
+|  |  |  └──.ApplicationEntity
+│  |  ├──.base
+|  |  |  └──.AbstractBeanMapperSupport (core)
+│  |  └──.impl
+│  |     ├──.config
+│  |     |  └──.ApplicationObjectMapperFactory (core)
+│  |     └──.security
+│  |        └──.ApplicationWebSecurityConfig (core)
+│  ├──.dataaccess
+│  |  └──.api
+|  |     └──.ApplicationPersistenceEntity (core)
+│  ├──.logic
+│  |  └──.base
+|  |     ├──.AbstractComponentFacade (core)
+|  |     ├──.AbstractLogic (core)
+|  |     └──.AbstractUc (core)
+|  └──.service
+|     └──...
+└──.SpringBootApp (core)
+
+
+
+
+
+
+

1.76. Layers

+ +
+

==Client Layer

+
+
+

There are various technical approaches to building GUI clients. The devonfw proposes rich clients that connect to the server via data-oriented services (e.g. using REST with JSON). +In general, we have to distinguish among the following types of clients:

+
+
+
    +
  • +

    web clients

    +
  • +
  • +

    native desktop clients

    +
  • +
  • +

    (native) mobile clients

    +
  • +
+
+
+

Our main focus is on web-clients. In our sample application my-thai-star we offer a responsive web-client based on Angular following devon4ng that integrates seamlessly with the back ends of my-thai-star available for Java using devon4j as well as .NET/C# using devon4net. For building angular clients read the separate devon4ng guide.

+
+
+
JavaScript for Java Developers
+
+

In order to get started with client development as a Java developer we give you some hints to get started. Also if you are an experienced JavaScript developer and want to learn Java this can be helpful. First, you need to understand that the JavaScript ecosystem is as large as the Java ecosystem and developing a modern web client requires a lot of knowledge. The following table helps you as experienced developer to get an overview of the tools, configuration-files, and other related aspects from the new world to learn. Also it helps you to map concepts between the ecosystems. Please note that we list the tools recommended by devonfw here (and we know that there are alternatives not listed here such as gradle, grunt, bower, etc.).

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 13. Aspects in JavaScript and Java ecosystem
TopicAspectJavaScriptJava

Programming

Language

TypeScript (extends JavaScript)

Java

Runtime

VM

nodejs (or web-browser)

jvm

Build- & Dependency-Management

Tool

npm or yarn

maven

Config

package.json

pom.xml

Repository

npm repo

maven central (repo search)

Build cmd

ng build or npm run build (goals are not standardized in npm)

mvn install (see lifecycle)

Test cmd

ng test

mvn test

Testing

Test-Tool

jasmine

junit

Test-Runner

karma

junit / surefire

E2E Testing

Protractor

Selenium

Code Analysis

Code Coverage

ng test --no-watch --code-coverage

JaCoCo

Development

IDE

MS VS Code or IntelliJ

Eclipse or IntelliJ

Framework

Angular (etc.)

Spring or Quarkus

+
+ +
+

==Service Layer

+
+
+

The service layer is responsible for exposing functionality made available by the logical layer to external consumers over a network via technical protocols.

+
+
+
+
Types of Services
+
+

Before you start creating your services you should consider some general design aspects:

+
+
+
    +
  • +

    Do you want to create a RPC service?

    +
  • +
  • +

    Or is your problem better addressed by messaging or eventing?

    +
  • +
  • +

    Who will consume your service?

    +
    +
      +
    • +

      Do you have one or multiple consumers?

      +
    • +
    • +

      Do web-browsers have to use your service?

      +
    • +
    • +

      Will apps from other vendors or parties have to consume your service that you can not influence if the service may have to change or be extended?

      +
    • +
    +
    +
  • +
+
+
+

For RPC a common choice is REST but there are also interesting alternatives like gRPC. We also have a guide for SOAP but this technology should rather be considered as legacy and is not recommended for new services.

+
+
+

When it comes to messaging in Java the typical answer will be JMS. However, a very promising alternative is Kafka.

+
+
+
+
Versioning
+
+

For RPC services consumed by other applications we use versioning to prevent incompatibilities between applications when deploying updates. This is done by the following conventions:

+
+
+
    +
  • +

    We define a version number and prefix it with v (e.g. v1).

    +
  • +
  • +

    If we support previous versions we use that version numbers as part of the Java package defining the service API (e.g. com.foo.application.component.service.api.v1)

    +
  • +
  • +

    We use the version number as part of the service name in the remote URL (e.g. https://application.foo.com/services/rest/component/v1/resource)

    +
  • +
  • +

    Whenever breaking changes are made to the API, create a separate version of the service and increment the version (e.g. v1v2) . The implementations of the different versions of the service contain compatibility code and delegate to the same unversioned use-case of the logic layer whenever possible.

    +
  • +
  • +

    For maintenance and simplicity, avoid keeping more than one previous version.

    +
  • +
+
+
+
+
Interoperability
+
+

For services that are consumed by clients with different technology, interoperability is required. This is addressed by selecting the right protocol, following protocol-specific best practices and following our considerations especially simplicity.

+
+
+
+
Service Considerations
+
+

The term service is quite generic and therefore easily misunderstood. It is a unit exposing coherent functionality via a well-defined interface over a network. For the design of a service, we consider the following aspects:

+
+
+
    +
  • +

    self-contained
    +The entire API of the service shall be self-contained and have no dependencies on other parts of the application (other services, implementations, etc.).

    +
  • +
  • +

    idempotence
    +E.g. creation of the same master-data entity has no effect (no error)

    +
  • +
  • +

    loosely coupled
    +Service consumers have minimum knowledge and dependencies on the service provider.

    +
  • +
  • +

    normalized
    +Complete, no redundancy, minimal

    +
  • +
  • +

    coarse-grained
    +Service provides rather large operations (save entire entity or set of entities rather than individual attributes)

    +
  • +
  • +

    atomic
    +Process individual entities (for processing large sets of data, use a batch instead of a service)

    +
  • +
  • +

    simplicity
    +Avoid polymorphism, RPC methods with unique name per signature and no overloading, avoid attachments (consider separate download service), etc.

    +
  • +
+
+
+
+
Security
+
+

Your services are the major entry point to your application. Hence, security considerations are important here.

+
+
+

See REST Security.

+
+ +
+

==Service-Versioning

+
+
+

This guide describes the aspect and details about versioning of services

+
+
+
+
Motivation
+
+

Why versioning of services? First of all, you should only care about this topic if you really have to. Service versioning is complex and requires effort (time and budget). The best way to avoid this is to be smart in the first place when designing the service API. +Further, if you are creating services where the only consumer is e.g. the web-client that you deploy together with the consumed services then you can change your service without the overhead to create new service versions and keeping old service versions for compatibility.

+
+
+

However, if the following indicators are given you typically need to do service versioning:

+
+
+
    +
  • +

    Your service is part of a complex and distributed IT landscape

    +
  • +
  • +

    Your service requires incompatible changes

    +
  • +
  • +

    There are many consumers or there is at least one (relevant) consumer that can not be updated at the same time or is entirely out of control (unknown or totally different party/company)

    +
  • +
+
+
+

What are incompatible changes?

+
+
+
    +
  • +

    Almost any change when SOAP is used (as it changes the WSDL and breaks the contract). Therefore, we recommend to use REST instead. Then, only the following changes are critical.

    +
  • +
  • +

    A change where existing properties (attributes) have to change their name

    +
  • +
  • +

    A change where existing features (properties, operations, etc.) have to change their semantics (meaning)

    +
  • +
+
+
+

What changes do not cause incompatibilities?

+
+
+
    +
  • +

    Adding new service operations is entirely uncritical with REST.

    +
  • +
  • +

    Adding new properties is only a problem in the following cases:

    +
    +
      +
    • +

      Adding new mandatory properties to the input of a service is causing incompatibilities. This problem can be avoided by contract-design.

      +
    • +
    • +

      If a consumer is using a service to read data, modify it and then save it back via a service and a property is added to the data, then this property might be lost. This is not a problem with dynamic languages such as JavaScript/TypeScript but with strictly typed languages such as Java. In Java you will typically use structured typed transfer-objects (and not Map<String, Object>) so new properties that have been added but are not known to the consumer can not be mapped to the transfer-object and will be lost. When saving that transfer-object later the property will be gone. It might be impossible to determine the difference between a lost property and a property that was removed on purpose. This is a general problem that you need to be aware of and that you have to consider by your design in such situations.

      +
    • +
    +
    +
  • +
+
+
+

Even if you hit an indicator for incompatible changes you can still think about adding a new service operation instead of changing an existing one (and deprecating the old one). Be creative to simplify and avoid extra effort.

+
+
+
+
Procedure
+
+

The procedure when rolling out incompatible changes is illustrated by the following example:

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +---+----+
+        |
++-------+--------+
+|      Sv1       |
+|                |
+|      App3      |
++----------------+
+
+
+
+

So, here we see a simple example where App3 provides a Service S in Version v1 that is consumed both by App1 and App2.

+
+
+

Now for some reason the service S has to be changed in an incompatible way to make it future-proof for demands. However, upgrading all 3 applications at the same time is not possible in this case for whatever reason. Therefore, service versioning is applied for the changes of S.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+    |
++---+------------+
+|  Sv1  |  Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Now, App3 has been upgraded and the new release was deployed. A new version v2 of S has been added while v1 is still kept for compatibility reasons and that version is still used by App1 and App2.

+
+
+
+
+------+  +------+
+| App1 |  | App2*|
++---+--+  +--+---+
+    |        |
+    |        |
+    |        |
++---+--------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, App2 has been updated and deployed and it is using the new version v2 of S.

+
+
+
+
+------+  +------+
+| App1*|  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, also App1 has been updated and deployed and it is using the new version v2 of S. The version v1 of S is not used anymore. This can be verified via logging and monitoring.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|          Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Finally, version v1 of the service S was removed from App3 and the new release has been deployed.

+
+
+
+
Versioning Schema
+
+

In general anything can be used to differentiate versions of a service. Possibilities are:

+
+
+
    +
  • +

    Code names (e.g. Strawberry, Blueberry, Grapefruit)

    +
  • +
  • +

    Timestamps (YYYYMMDD-HHmmSS)

    +
  • +
  • +

    Sequential version numbers (e.g. v1, v2, v3)

    +
  • +
  • +

    Composed version numbers (e.g. 1.0.48-pre-alpha-3-20171231-235959-Strawberry)

    +
  • +
+
+
+

As we are following the KISS principle (see key principles) we propose to use sequential version numbers. These are short, clear, and easy while still allowing to see what version is after another one. Especially composed version numbers (even 1.1 vs. 2.0) lead to decisions and discussions that easily waste more time than adding value. It is still very easy to maintain an Excel sheet or release-notes document that is explaining the changes for each version (v1, v2, v3) of a particular service.

+
+
+

We suggest to always add the version schema to the service URL to be prepared for service versioning even if service versioning is not (yet) actively used. For simplicity it is explicitly stated that you may even do incompatible changes to the current version (typically v1) of your service if you can update the according consumers within the same deployment.

+
+
+
+
Practice
+
+

So assuming you know that you have to do service versioning, the question is how to do it practically in the code. +The approach for your devon4j project in case of code-first should be as described below:

+
+
+
    +
  • +

    Determine which types in the code need to be changed. It is likely to be the API and implementation of the according service but it may also impact transfer objects and potentially even datatypes.

    +
  • +
  • +

    Create new packages for all these concerned types containing the current version number (e.g. v1).

    +
  • +
  • +

    Copy all these types to that new packages.

    +
  • +
  • +

    Rename these copies so they carry the version number as suffix (e.g. V1).

    +
  • +
  • +

    Increase the version of the service in the unversioned package (e.g. from v1 to v2).

    +
  • +
  • +

    Now you have two versions of the same service (e.g. v1 and v2) but so far they behave exactly the same.

    +
  • +
  • +

    You start with your actual changes and modify the original files that have been copied before.

    +
  • +
  • +

    You will also ensure the links (import statements) of the copied types point to the copies with the version number

    +
  • +
  • +

    This will cause incompatibilities (and compile errors) in the copied service. Therefore, you need to fix that service implementation to map from the old API to the new API and behavior. In some cases, this may be easy (e.g. mapping x.y.z.v1.FooTo to x.y.z.FooTo using bean-mapping with some custom mapping for the incompatible changes), in other cases this can get very complex. Be aware of this complexity from the start before you make your decision about service versioning.

    +
  • +
  • +

    As far as possible this mapping should be done in the service-layer, not to pollute your business code in the core-layer with versioning-aspects. If there is no way to handle it in the service layer, e.g. you need some data from the persistence-layer, implement the "mapping" in the core-layer then, but don’t forget to remove this code, when removing the old service version.

    +
  • +
  • +

    Finally, ensure that both the old service behaves as before as well as the new service works as planned.

    +
  • +
+
+
+
Modularization
+
+

For modularization, we also follow the KISS principle (see key principles): +we suggest to have one api module per application that will contain the most recent version of your service and get released with every release-version of the application. The compatibility code with the versioned packages will be added to the core module and therefore is not exposed via the api module (because it has already been exposed in the previous release of the app). This way, you can always determine for sure which version of a service is used by another application just by its maven dependencies.

+
+
+

The KISS approach with only a single module that may contain multiple services (e.g. one for each business component) will cause problems when you want to have mixed usages of service versions: You can not use an old version of one service and a new version of another service from the same APP as then you would need to have its API module twice as a dependency on different versions, which is not possible. However, to avoid complicated overhead we always suggest to follow this easy approach. Only if you come to the point that you really need this complexity you can still solve it (even afterwards by publishing another maven artefact). As we are all on our way to build more but smaller applications (SOA, microservices, etc.) we should always start simple and only add complexity when really needed.

+
+
+

The following example gives an idea of the structure:

+
+
+
+
/«my-app»
+├──/api
+|  └──/src/main/java/
+|     └──/«rootpackage»/«application»/«component»
+|        ├──/common/api/to
+|        |  └──FooTo
+|        └──/service/api/rest
+|           └──FooRestService
+└──/core
+   └──/src/main/java/
+      └──«rootpackage»/«application»/«component»
+         ├──/common/api/to/v1
+         |  └──FooToV1
+         └──/service
+            ├──/api/rest/v1
+            |  └──FooRestServiceV1
+            └──impl/rest
+               ├──/v1
+               |  └── FooRestServiceImplV1
+               └──FooRestServiceImpl
+
+
+
+ +
+

==Logic Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. +According to our business architecture, we divide an application into components. +For each component, the logic layer defines different use-cases. Another approach is to define a component-facade, which we do not recommend for future application. Especially for quarkus application, we want to simplify things and highly suggest omitting component-facade completely and using use-cases only. +It is very important that you follow the links to understand the concept of use-case in order to properly implement your business logic.

+
+
+
+
+
Responsibility
+
+

The logic layer is responsible to implement the business logic according to the specified functional demands and requirements. +Therefore, it creates the actual value of the application. The logic layer is responsible for invoking business logic in external systems. +The following additional aspects are also included in its responsibility:

+
+
+ +
+
+
+
Security
+
+

The logic layer is the heart of the application. It is also responsible for authorization and hence security is important in this current case. Every method exposed in an interface needs to be annotated with an authorization check, stating what role(s) a caller must provide in order to be allowed to make the call. The authorization concept is described here.

+
+
+
Direct Object References
+
+

A security threat are Insecure Direct Object References. This simply gives you two options:

+
+
+
    +
  • +

    avoid direct object references

    +
  • +
  • +

    ensure that direct object references are secure

    +
  • +
+
+
+

Especially when using REST, direct object references via technical IDs are common sense. This implies that you have a proper authorization in place. This is especially tricky when your authorization does not only rely on the type of the data and according to static permissions but also on the data itself. Vulnerabilities for this threat can easily happen by design flaws and inadvertence. Here is an example from our sample application:

+
+
+

We have a generic use-case to manage BLOBs. In the first place, it makes sense to write a generic REST service to load and save these BLOBs. However, the permission to read or even update such BLOB depends on the business object hosting the BLOB. Therefore, such a generic REST service would open the door for this OWASP A4 vulnerability. To solve this in a secure way, you need individual services for each hosting business object to manage the linked BLOB and have to check permissions based on the parent business object. In this example the ID of the BLOB would be the direct object reference and the ID of the business object (and a BLOB property indicator) would be the indirect object reference.

+
+ +
+

==Component Facade

+
+
+ + + + + +
+ + +Our recommended approach for implementing the logic layer is use-cases +
+
+
+

For each component of the application, the logic layer defines a component facade. +This is an interface defining all business operations of the component. +It carries the name of the component («Component») and has an implementation named «Component»Impl (see implementation).

+
+
+
+
API
+
+

The component facade interface defines the logic API of the component and has to be business oriented. +This means that all parameters and return types of all methods from this API have to be business transfer-objects, datatypes (String, Integer, MyCustomerNumber, etc.), or collections of these. +The API may also only access objects of other business components listed in the (transitive) dependencies of the business-architecture.

+
+
+

Here is an example how such an API may look like:

+
+
+
+
public interface Bookingmanagement {
+
+  BookingEto findBooking(Long id);
+
+  BookingCto findBookingCto(Long id);
+
+  Page<BookingEto> findBookingEtos(BookingSearchCriteriaTo criteria);
+
+  void approveBooking(BookingEto booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation of an interface from the logic layer (a component facade or a use-case) carries the name of that interface with the suffix Impl and is annotated with @Named. +An implementation typically needs access to the persistent data. +This is done by injecting the corresponding repository (or DAO). +According to data-sovereignty, only repositories of the same business component may be accessed directly. +For accessing data from other components the implementation has to use the corresponding API of the logic layer (the component facade). Further, it shall not expose persistent entities from the domain layer and has to map them to transfer objects using the bean-mapper.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public BookingEto findBooking(Long id) {
+
+    LOG.debug("Get Booking with id {} from database.", id);
+    BookingEntity entity = this.bookingRepository.findOne(id);
+    return getBeanMapper().map(entity, BookingEto.class));
+  }
+}
+
+
+
+

As you can see, entities (BookingEntity) are mapped to corresponding ETOs (BookingEto). +Further details about this can be found in bean-mapping.

+
+ +
+

==UseCase +A use-case is a small unit of the logic layer responsible for an operation on a particular entity (business object). +We leave it up to you to decide whether you want to define an interface (API) for each use-case or provide an implementation directly.

+
+
+

Following our architecture-mapping (for classic and modern project), use-cases are named Uc«Operation»«BusinessObject»[Impl]. The prefix Uc stands for use-case and allows to easily find and identify them in your IDE. The «Operation» stands for a verb that is operated on the entity identified by «BusinessObject». +For CRUD we use the standard operations Find and Manage that can be generated by CobiGen. This also separates read and write operations (e.g. if you want to do CQSR, or to configure read-only transactions for read operations).

+
+
+

In our example, we choose to define an interface for each use-case. We also use *To to refer to any type of transfer object. Please follow our guide to understand more about different types of transfer object e.g. Eto, Dto, Cto

+
+
+
+
Find
+
+

The UcFind«BusinessObject» defines all read operations to retrieve and search the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcFindBooking {
+  //*To = Eto, Dto or Cto
+  Booking*To findBooking(Long id);
+}
+
+
+
+
+
Manage
+
+

The UcManage«BusinessObject» defines all CRUD write operations (create, update and delete) for the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcManageBooking {
+
+  //*To = Eto, Dto or Cto
+  Booking*To saveBooking(Booking*To booking);
+
+  void deleteBooking(Long id);
+
+}
+
+
+
+
+
Custom
+
+

Any other non CRUD operation Uc«Operation»«BusinessObject» uses any other custom verb for «Operation». +Typically, such custom use-cases only define a single method. +Here is an example:

+
+
+
+
public interface UcApproveBooking {
+
+  //*To = Eto, Dto or Cto
+  void approveBooking(Booking*To booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation should carry its own name and the suffix Impl and is annotated with @Named and @ApplicationScoped. It will need access to the persistent data which is done by injecting the corresponding repository (or DAO). Furthermore, it shall not expose persistent entities from the data access layer and has to map them to transfer objects using the bean-mapper. Please refer to our bean mapping, transfer object and dependency injection documentation for more information. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcManageBookingImpl implements UcManageBooking {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public void deleteBooking(Long id) {
+
+    LOG.debug("Delete Booking with id {} from database.", id);
+    this.bookingRepository.deleteById(id);
+  }
+}
+
+
+
+

The use-cases can then be injected directly into the service.

+
+
+
+
@Named("BookingmanagementRestService")
+@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private UcFindBooking ucFindBooking;
+
+  @Inject
+  private UcManageBooking ucManageBooking;
+
+  @Inject
+  private UcApproveBooking ucApproveBooking;
+}
+
+
+
+
+
Internal use case
+
+

Sometimes, a component with multiple related entities and many use-cases needs to reuse business logic internally. +Of course, this can be exposed as an official use-case API but this will imply using transfer-objects (ETOs) instead of entities. In some cases, this is undesired e.g. for better performance to prevent unnecessary mapping of entire collections of entities. +In the first place, you should try to use abstract base implementations providing reusable methods the actual use-case implementations can inherit from. +If your business logic is even more complex and you have multiple aspects of business logic to share and reuse but also run into multi-inheritance issues, you may also just create use-cases that have their interface located in the impl scope package right next to the implementation (or you may just skip the interface). In such a case, you may define methods that directly take or return entity objects. +To avoid confusion with regular use-cases, we recommend to add the Internal suffix to the type name leading to Uc«Operation»«BusinessObject»Internal[Impl].

+
+
+ +
+

==Data-Access Layer

+
+
+

The data-access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store. External system could also be accessed from the data-access layer if they match this definition, e.g. a mongo-db via rest services.

+
+
+

Note: In the modern project structure, this layer is replaced by the domain layer.

+
+
+
+
+
Database
+
+

You need to make your choice for a database. Options are documented here.

+
+
+

The classical approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+ +
+

==Batch Layer

+
+
+

We understand batch processing as a bulk-oriented, non-interactive, typically long running execution of tasks. For simplicity, we use the term "batch" or "batch job" for such tasks in the following documentation.

+
+
+

devonfw uses Spring Batch as a batch framework.

+
+
+

This guide explains how Spring Batch is used in devonfw applications. It focuses on aspects which are special to devonfw. If you want to learn about spring-batch you should adhere to springs references documentation.

+
+
+

There is an example of a simple batch implementation in the my-thai-star batch module.

+
+
+

In this chapter, we will describe the overall architecture (especially concerning layering) and how to administer batches.

+
+
+
+
Layering
+
+

Batches are implemented in the batch layer. The batch layer is responsible for batch processes, whereas the business logic is implemented in the logic layer. Compared to the service layer, you may understand the batch layer just as a different way of accessing the business logic. +From a component point of view, each batch is implemented as a subcomponent in the corresponding business component. +The business component is defined by the business architecture.

+
+
+

Let’s make an example for that. The sample application implements a batch for exporting ingredients. This ingredientExportJob belongs to the dishmanagement business component. +So the ingredientExportJob is implemented in the following package:

+
+
+
+
<basepackage>.dishmanagement.batch.impl.*
+
+
+
+

Batches should invoke use cases in the logic layer for doing their work. +Only "batch specific" technical aspects should be implemented in the batch layer.

+
+
+
+
+

Example: +For a batch, which imports product data from a CSV file, this means that all code for actually reading and parsing the CSV input file is implemented in the batch layer. +The batch calls the use case "create product" in the logic layer for actually creating the products for each line read from the CSV input file.

+
+
+
+
+
Directly accessing data access layer
+
+

In practice, it is not always appropriate to create use cases for every bit of work a batch should do. Instead, the data access layer can be used directly. +An example for that is a typical batch for data retention which deletes out-of-time data. +Often deleting, out-dated data is done by invoking a single SQL statement. It is appropriate to implement that SQL in a Repository or DAO method and call this method directly from the batch. +But be careful: this pattern is a simplification which could lead to business logic cluttered in different layers, which reduces the maintainability of your application. +It is a typical design decision you have to make when designing your specific batches.

+
+
+
+
+
Project structure and packaging
+
+

Batches will be implemented in a separate Maven module to keep the application core free of batch dependencies. The batch module includes a dependency on the application core-module to allow the reuse of the use cases, DAOs etc. +Additionally the batch module has dependencies on the required spring batch jars:

+
+
+
+
  <dependencies>
+
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>mtsj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-batch</artifactId>
+    </dependency>
+
+  </dependencies>
+
+
+
+

To allow an easy start of the batches from the command line it is advised to create a bootified jar for the batch module by adding the following to the pom.xml of the batch module:

+
+
+
+
  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>config/application.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Create bootified jar for batch execution via command line.
+           Your applications spring boot app is used as main-class.
+       -->
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        <configuration>
+          <mainClass>com.devonfw.application.mtsj.SpringBootApp</mainClass>
+          <classifier>bootified</classifier>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>repackage</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+
+
+
Implementation
+
+

Most of the details about implementation of batches is described in the spring batch documentation. +There is nothing special about implementing batches in devonfw. You will find an easy example in my-thai-star.

+
+
+
+
Starting from command line
+
+

Devonfw advises to start batches via command line. This is most common to many ops teams and allows easy integration in existing schedulers. In general batches are started with the following command:

+
+
+
+
java -jar <app>-batch-<version>-bootified.jar --spring.main.web-application-type=none --spring.batch.job.enabled=true --spring.batch.job.names=<myJob> <params>
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + +
ParameterExplanation

--spring.main.web-application-type=none

This disables the web app (e.g. Tomcat)

--spring.batch.job.names=<myJob>

This specifies the name of the job to run. If you leave this out ALL jobs will be executed. Which probably does not make to much sense.

<params>

(Optional) additional parameters which are passed to your job

+
+

This will launch your normal spring boot app, disables the web application part and runs the designated job via Spring Boots org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.

+
+
+
+
Scheduling
+
+

In real world scheduling of batches is not as simple as it first might look like.

+
+
+
    +
  • +

    Multiple batches have to be executed in order to achieve complex tasks. If one of those batches fails the further execution has to be stopped and operations should be notified for example.

    +
  • +
  • +

    Input files or those created by batches have to be copied from one node to another.

    +
  • +
  • +

    Scheduling batch executing could get complex easily (quarterly jobs, run job on first workday of a month, …​)

    +
  • +
+
+
+

For devonfw we propose the batches themselves should not mess around with details of scheduling. +Likewise your application should not do so. This complexity should be externalized to a dedicated batch administration service or scheduler. +This service could be a complex product or a simple tool like cron. We propose Rundeck as an open source job scheduler.

+
+
+

This gives full control to operations to choose the solution which fits best into existing administration procedures.

+
+
+
+
Handling restarts
+
+

If you start a job with the same parameters set after a failed run (BatchStatus.FAILED) a restart will occur. +In many cases your batch should then not reprocess all items it processed in the previous runs. +For that you need some logic to start at the desired offset. There different ways to implement such logic:

+
+
+
    +
  • +

    Marking processed items in the database in a dedicated column

    +
  • +
  • +

    Write all IDs of items to process in a separate table as an initialization step of your batch. You can then delete IDs of already processed items from that table during the batch execution.

    +
  • +
  • +

    Storing restart information in springs ExecutionContext (see below)

    +
  • +
+
+
+
Using spring batch ExecutionContext for restarts
+
+

By implementing the ItemStream interface in your ItemReader or ItemWriter you may store information about the batch progress in the ExecutionContext. You will find an example for that in the CountJob in My Thai Star.

+
+
+

Additional hint: It is important that bean definition method of your ItemReader/ItemWriter return types implementing ItemStream(and not just ItemReader or ItemWriter alone). For that the ItemStreamReader and ItemStreamWriter interfaces are provided.

+
+
+
+
+
Exit codes
+
+

Your batches should create a meaningful exit code to allow reaction to batch errors e.g. in a scheduler. +For that spring batch automatically registers an org.springframework.boot.autoconfigure.batch.JobExecutionExitCodeGenerator. To make this mechanism work your spring boot app main class as to populate this exit code to the JVM:

+
+
+
+
@SpringBootApplication
+public class SpringBootApp {
+
+  public static void main(String[] args) {
+    if (Arrays.stream(args).anyMatch((String e) -> e.contains("--spring.batch.job.names"))) {
+      // if executing batch job, explicitly exit jvm to report error code from batch
+      System.exit(SpringApplication.exit(SpringApplication.run(SpringBootApp.class, args)));
+    } else {
+      // normal web application start
+      SpringApplication.run(SpringBootApp.class, args);
+    }
+  }
+}
+
+
+
+
+
Stop batches and manage batch status
+
+

Spring batch uses several database tables to store the status of batch executions. +Each execution may have different status. +You may use this mechanism to gracefully stop batches. +Additionally in some edge cases (batch process crashed) the execution status may be in an undesired state. +E.g. the state will be running, despite the process crashed sometime ago. +For that cases you have to change the status of the execution in the database.

+
+
+
CLI-Tool
+
+

Devonfw provides a easy to use cli-tool to manage the executing status of your jobs. +The tool is implemented in the devonfw module devon4j-batch-tool. It will provide a runnable jar, which may be used as follows:

+
+
+
+
List names of all previous executed jobs
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs list

+
+
Stop job named 'countJob'
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs stop countJob

+
+
Show help
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar

+
+
+
+
+

As you can the each invocation includes the JDBC connection string to your database. +This means that you have to make sure that the corresponding DB driver is in the classpath (the prepared jar only contains H2).

+
+
+
+
+
Authentication
+
+

Most business application incorporate authentication and authorization. +Your spring boot application will implement some kind of security, e.g. integrated login with username+password or in many cases authentication via an existing IAM. +For security reasons your batch should also implement an authentication mechanism and obey the authorization implemented in your application (e.g. via @RolesAllowed).

+
+
+

Since there are many different authentication mechanism we cannot provide an out-of-the-box solution in devonfw, but we describe a pattern how this can be implemented in devonfw batches.

+
+
+

We suggest to implement the authentication in a Spring Batch tasklet, which runs as the first step in your batch. This tasklet will do all of the work which is required to authenticate the batch. A simple example which authenticates the batch "locally" via username and password could be implemented like this:

+
+
+
+
@Named
+public class SimpleAuthenticationTasklet implements Tasklet {
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+    String username = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("username");
+    String password = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("password");
+    Authentication authentication = new UsernamePasswordAuthenticationToken(username, password);
+
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+    return RepeatStatus.FINISHED;
+  }
+
+}
+
+
+
+

The username and password have to be supplied via two cli parameters -username and -password. This implementation creates an "authenticated" Authentication and sets in the Spring Security context. This is just for demonstration normally you should not provide passwords via command line. The actual authentication will be done automatically via Spring Security as in your "normal" application. +If you have a more complex authentication mechanism in your application e.g. via OpenID connect just call this in the tasklet. Naturally you may read authentication parameters (e.g. secrets) from the command line or more securely from a configuration file.

+
+
+

In your Job Configuration set this tasklet as the first step:

+
+
+
+
@Configuration
+@EnableBatchProcessing
+public class BookingsExportBatchConfig {
+  @Inject
+  private JobBuilderFactory jobBuilderFactory;
+
+  @Inject
+  private StepBuilderFactory stepBuilderFactory;
+
+  @Bean
+  public Job myBatchJob() {
+    return this.jobBuilderFactory.get("myJob").start(myAuthenticationStep()).next(...).build();
+  }
+
+  @Bean
+  public Step myAuthenticationStep() {
+    return this.stepBuilderFactory.get("myAuthenticationStep").tasklet(myAuthenticatonTasklet()).build();
+  }
+
+  @Bean
+  public Tasklet myAuthenticatonTasklet() {
+    return new SimpleAuthenticationTasklet();
+  }
+...
+
+
+
+
+
Tipps & tricks
+
+
Identifying job parameters
+
+

Spring uses a jobs parameters to identify job executions. Parameters starting with "-" are not considered for identifying a job execution.

+
+
+
+
+
+
+

1.77. Guides

+ +
+

==Configuration

+
+
+
Internal Application Configuration
+
+

There usually is a main configuration registered with main Spring Boot App, but differing configurations to support automated test of the application can be defined using profiles (not detailed in this guide).

+
+
+
Spring Boot Application
+
+

For a complete documentation, see the Spring Boot Reference Guide.

+
+
+

With spring-boot you provide a simple main class (also called starter class) like this: +com.devonfw.mtsj.application

+
+
+
+
@SpringBootApplication(exclude = { EndpointAutoConfiguration.class })
+@EntityScan(basePackages = { "com.devonfw.mtsj.application" }, basePackageClasses = { AdvancedRevisionEntity.class })
+@EnableGlobalMethodSecurity(jsr250Enabled = true)
+@ComponentScan(basePackages = { "com.devonfw.mtsj.application.general", "com.devonfw.mtsj.application" })
+public class SpringBootApp {
+
+  /**
+   * Entry point for spring-boot based app
+   *
+   * @param args - arguments
+   */
+  public static void main(String[] args) {
+
+    SpringApplication.run(SpringBootApp.class, args);
+  }
+}
+
+
+
+

In an devonfw application this main class is always located in the <basepackage> of the application package namespace (see package-conventions). This is because a spring boot application will automatically do a classpath scan for components (spring-beans) and entities in the package where the application main class is located including all sub-packages. You can use the @ComponentScan and @EntityScan annotations to customize this behaviour.

+
+
+

If you want to map spring configuration properties into your custom code please see configuration mapping.

+
+
+
+
Standard beans configuration
+
+

For basic bean configuration we rely on spring boot using mainly configuration classes and only occasionally XML configuration files. Some key principle to understand Spring Boot auto-configuration features:

+
+
+
    +
  • +

    Spring Boot auto-configuration attempts to automatically configure your Spring application based on the jar dependencies and annotated components found in your source code.

    +
  • +
  • +

    Auto-configuration is non-invasive, at any point you can start to define your own configuration to replace specific parts of the auto-configuration by redefining your identically named bean (see also exclude attribute of @SpringBootApplication in example code above).

    +
  • +
+
+
+

Beans are configured via annotations in your java code (see dependency-injection).

+
+
+

For technical configuration you will typically write additional spring config classes annotated with @Configuration that provide bean implementations via methods annotated with @Bean. See spring @Bean documentation for further details. Like in XML you can also use @Import to make a @Configuration class include other configurations.

+
+
+

More specific configuration files (as required) reside in an adequately named subfolder of:

+
+
+

src/main/resources/app

+
+
+
+
BeanMapper Configuration
+
+

In case you are still using dozer, you will find further details in bean-mapper configuration.

+
+
+
+
Security configuration
+
+

The abstract base class BaseWebSecurityConfig should be extended to configure web application security thoroughly. +A basic and secure configuration is provided which can be overridden or extended by subclasses. +Subclasses must use the @Profile annotation to further discriminate between beans used in production and testing scenarios. See the following example:

+
+
+
Listing 14. How to extend BaseWebSecurityConfig for Production and Test
+
+
@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.JUNIT)
+public class TestWebSecurityConfig extends BaseWebSecurityConfig {...}
+
+@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.NOT_JUNIT)
+public class WebSecurityConfig extends BaseWebSecurityConfig {...}
+
+
+ +
+
+
WebSocket configuration
+
+

A websocket endpoint is configured within the business package as a Spring configuration class. The annotation @EnableWebSocketMessageBroker makes Spring Boot registering this endpoint.

+
+
+
+
package your.path.to.the.websocket.config;
+...
+@Configuration
+@EnableWebSocketMessageBroker
+public class WebSocketConfig extends AbstractWebSocketMessageBrokerConfigurer {
+...
+
+
+
+
+
+
External Application Configuration
+
+
application.properties files
+
+

Here is a list of common properties provided by the Spring framework.

+
+
+

For a general understanding how spring-boot is loading and boostrapping your application.properties see spring-boot external configuration.

+
+
+

The following properties files are used in devonfw application:

+
+
+
    +
  • +

    src/main/resources/application.properties providing a default configuration - bundled and deployed with the application package. It further acts as a template to derive a tailored minimal environment-specific configuration.

    +
  • +
  • +

    src/main/resources/config/application.properties providing additional properties only used at development time (for all local deployment scenarios). This property file is excluded from all packaging.

    +
  • +
  • +

    src/test/resources/config/application.properties providing additional properties only used for testing (JUnits based on spring test).

    +
  • +
+
+
+

For other environments where the software gets deployed such as test, acceptance and production you need to provide a tailored copy of application.properties. The location depends on the deployment strategy:

+
+
+
    +
  • +

    standalone run-able Spring Boot App using embedded tomcat: config/application.properties under the installation directory of the spring boot application.

    +
  • +
  • +

    dedicated tomcat (one tomcat per app): $CATALINA_BASE/lib/config/application.properties

    +
  • +
  • +

    tomcat serving a number of apps (requires expanding the wars): $CATALINA_BASE/webapps/<app>/WEB-INF/classes/config

    +
  • +
+
+
+

In this application.properties you only define the minimum properties that are environment specific and inherit everything else from the bundled src/main/resources/application.properties. In any case, make very sure that the classloader will find the file.

+
+
+
+
Database Configuration
+
+

The configuration for spring and Hibernate is already provided by devonfw in our sample application and the application template. So you only need to worry about a few things to customize.

+
+
+Database System and Access +
+

Obviously you need to configure which type of database you want to use as well as the location and credentials to access it. The defaults are configured in application.properties that is bundled and deployed with the release of the software. The files should therefore contain the properties as in the given example:

+
+
+
+
  database.url=jdbc:postgresql://database.enterprise.com/app
+  database.user.login=appuser01
+  database.user.password=************
+  database.hibernate.dialect = org.hibernate.dialect.PostgreSQLDialect
+  database.hibernate.hbm2ddl.auto=validate
+
+
+
+

For further details about database.hibernate.hbm2ddl.auto please see here. For production and acceptance environments we use the value validate that should be set as default. In case you want to use Oracle RDBMS you can find additional hints here.

+
+
+

If your application supports multiples database types, set spring.profiles.active=XXX in src/main/resources/config/application.properties choose database of your choice. Also, one has to set all the active spring profiles in this application.properties and not in any of the other application.properties.

+
+
+
+Database Logging +
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
spring.jpa.properties.hibernate.show_sql=true
+spring.jpa.properties.hibernate.use_sql_comments=true
+spring.jpa.properties.hibernate.format_sql=true
+
+
+
+
+
+
+
Security
+
+
Password Encryption
+
+

In order to support encrypted passwords in spring-boot application.properties all you need to do is to add jasypt-spring-boot as dependency in your pom.xml (please check for recent version here):

+
+
+
+
<dependency>
+  <groupId>com.github.ulisesbocchio</groupId>
+  <artifactId>jasypt-spring-boot-starter</artifactId>
+  <version>3.0.3</version>
+</dependency>
+
+
+
+

This will smoothly integrate jasypt into your spring-boot application. Read this HOWTO to learn how to encrypt and decrypt passwords using jasypt.

+
+
+

Next, we give a simple example how to encypt and configure a secret value. +We use the algorithm PBEWITHHMACSHA512ANDAES_256 that provides strong encryption and is the default of jasypt-spring-boot-starter. +However, different algorithms can be used if perferred (e.g. PBEWITHMD5ANDTRIPLEDES).

+
+
+
+
java -cp ${M2_REPO}/org/jasypt/jasypt/1.9.3/jasypt-1.9.3.jar org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI password=masterpassword algorithm=PBEWITHHMACSHA512ANDAES_256 input=secret ivGeneratorClassName=org.jasypt.iv.RandomIvGenerator
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.5+10
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: masterpassword
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC
+
+
+
+

Of course the master-password (masterpassword) and the actual password to encrypt (secret) are just examples. +Please replace them with reasonable strong passwords for your environment. +Further, if you are using devonfw-ide you can make your life much easier and just type:

+
+
+
+
devon jasypt encrypt
+
+
+
+

See jasypt commandlet for details.

+
+
+

Now the entire line after the OUTPUT block is your encrypted secret. +It even contains some random salt so that multiple encryption invocations with the same parameters (ARGUMENTS) will produce a different OUTPUT.

+
+
+

The master-password can be configured on your target environment via the property jasypt.encryptor.password. As system properties given on the command-line are visible in the process list, we recommend to use an config/application.yml file only for this purpose (as we recommended to use application.properties for regular configs):

+
+
+
+
jasypt:
+    encryptor:
+        password: masterpassword
+
+
+
+

Again masterpassword is just an example that your replace with your actual master password. +Now you are able to put encrypted passwords into your application.properties and specify the algorithm.

+
+
+
+
spring.datasource.password=ENC(PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC)
+jasypt.encryptor.algorithm=PBEWITHHMACSHA512ANDAES_256
+
+
+
+

This application.properties file can be version controlled (git-opts) and without knowing the masterpassword nobody is able to decrypt this to get the actual secret back.

+
+
+

To prevent jasypt to throw an exception in dev or test scenarios you can simply put this in your local config (src/main/config/application.properties and same for test, see above for details):

+
+
+
+
jasypt.encryptor.password=none
+
+
+ +
+

==Mapping configuration to your code

+
+
+

If you are using spring-boot as suggested by devon4j your application can be configured by application.properties file as described in configuration. +To get a single configuration option into your code for flexibility, you can use

+
+
+
+
@Value("${my.property.name}")
+private String myConfigurableField;
+
+
+
+

Now, in your application.properties you can add the property:

+
+
+
+
my.property.name=my-property-value
+
+
+
+

You may even use @Value("${my.property.name:my-default-value}") to make the property optional.

+
+
+
+
Naming conventions for configuration properties
+
+

As a best practice your configruation properties should follow these naming conventions:

+
+
+
    +
  • +

    build the property-name as a path of segments separated by the dot character (.)

    +
  • +
  • +

    segments should get more specific from left to right

    +
  • +
  • +

    a property-name should either be a leaf value or a tree node (prefix of other property-names) but never both! So never have something like foo.bar=value and foo.bar.child=value2.

    +
  • +
  • +

    start with a segment namespace unique to your context or application

    +
  • +
  • +

    a good example would be «myapp».billing.service.email.sender for the sender address of billing service emails send by «myapp».

    +
  • +
+
+
+
+
Mapping advanced configuration
+
+

However, in many scenarios you will have features that require more than just one property. +Injecting those via @Value is not leading to good code quality. +Instead we create a class with the suffix ConfigProperties containing all configuration properties for our aspect that is annotated with @ConfigurationProperties:

+
+
+
+
@ConfigurationProperties(prefix = "myapp.billing.service")
+public class BillingServiceConfigProperties {
+
+  private final Email email = new Email();
+  private final Smtp smtp = new Smtp();
+
+  public Email getEmail() { return this.email; }
+  public Email getSmtp() { return this.smtp; }
+
+  public static class Email {
+
+    private String sender;
+    private String subject;
+
+    public String getSender() { return this.sender; }
+    public void setSender(String sender) { this.sender = sender; }
+    public String getSubject() { return this.subject; }
+    public void setSubject(String subject) { this.subject = subject; }
+  }
+
+  public static class Smtp {
+
+    private String host;
+    private int port = 25;
+
+    public String getHost() { return this.host; }
+    public void setHost(String host) { this.host = host; }
+    public int getPort() { return this.port; }
+    public void setPort(int port) { this.port = port; }
+  }
+
+}
+
+
+
+

Of course this is just an example to demonstrate this feature of spring-boot. +In order to send emails you would typically use the existing spring-email feature. +But as you can see this allows us to define and access our configuration in a very structured and comfortable way. +The annotation @ConfigurationProperties(prefix = "myapp.billing.service") will automatically map spring configuration properties starting with myapp.billing.service via the according getters and setters into our BillingServiceConfigProperties. +We can easily define defaults (e.g. 25 as default value for myapp.billing.service.smtp.port). +Also Email or Smtp could be top-level classes to be reused in multiple configurations. +Of course you would also add helpful JavaDoc comments to the getters and classes to document your configuration options. +Further to access this configuration, we can use standard dependency-injection:

+
+
+
+
@Inject
+private BillingServiceConfigProperties config;
+
+
+
+

For very generic cases you may also use Map<String, String> to map any kind of property in an untyped way. +An example for generic configuration from devon4j can be found in +ServiceConfigProperties.

+
+
+

For further details about this feature also consult Guide to @ConfigurationProperties in Spring Boot.

+
+
+
+
Generate configuration metadata
+
+

You should further add this dependency to your module containing the *ConfigProperties:

+
+
+
+
    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+
+
+
+

This will generate configuration metadata so projects using your code can benefit from autocompletion and getting your JavaDoc as tooltip when editing application.properites what makes this approach very powerful. +For further details about this please read A Guide to Spring Boot Configuration Metadata.

+
+
+ +
+

==Auditing

+
+
+

For database auditing we use hibernate envers. If you want to use auditing ensure you have the following dependency in your pom.xml:

+
+
+
Listing 15. spring
+
+
<dependency>
+  <groupId>com.devonfw.java.modules</groupId>
+  <artifactId>devon4j-jpa-envers</artifactId>
+</dependency>
+
+
+
+
Listing 16. quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-envers</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +The following part applies only to spring applications. At this point, the Quarkus extension does not provide any additional configurations. For Quarkus applications, simply use the @Audited annotation to enable auditing for an entity class, as described a few lines below or seen here. +
+
+
+

Make sure that entity manager also scans the package from the devon4j-jpa[-envers] module in order to work properly. And make sure that correct Repository Factory Bean Class is chosen.

+
+
+
+
@EntityScan(basePackages = { "«my.base.package»" }, basePackageClasses = { AdvancedRevisionEntity.class })
+...
+@EnableJpaRepositories(repositoryFactoryBeanClass = GenericRevisionedRepositoryFactoryBean.class)
+...
+public class SpringBootApp {
+  ...
+}
+
+
+
+

Now let your [Entity]Repository extend from DefaultRevisionedRepository instead of DefaultRepository.

+
+
+

The repository now has a method getRevisionHistoryMetadata(id) and getRevisionHistoryMetadata(id, boolean lazy) available to get a list of revisions for a given entity and a method find(id, revision) to load a specific revision of an entity with the given ID or getLastRevisionHistoryMetadata(id) to load last revision. +To enable auditing for a entity simply place the @Audited annotation to your entity and all entity classes it extends from.

+
+
+
+
@Entity(name = "Drink")
+@Audited
+public class DrinkEntity extends ProductEntity implements Drink {
+...
+
+
+
+

When auditing is enabled for an entity an additional database table is used to store all changes to the entity table and a corresponding revision number. This table is called <ENTITY_NAME>_AUD per default. Another table called REVINFO is used to store all revisions. Make sure that these tables are available. They can be generated by hibernate with the following property (only for development environments).

+
+
+
+
  database.hibernate.hbm2ddl.auto=create
+
+
+
+

Another possibility is to put them in your database migration scripts like so.

+
+
+
+
CREATE CACHED TABLE PUBLIC.REVINFO(
+  id BIGINT NOT NULL generated by default as identity (start with 1),
+  timestamp BIGINT NOT NULL,
+  user VARCHAR(255)
+);
+...
+CREATE CACHED TABLE PUBLIC.<TABLE_NAME>_AUD(
+    <ALL_TABLE_ATTRIBUTES>,
+    revtype TINYINT,
+    rev BIGINT NOT NULL
+);
+
+
+
+ +
+

==Access-Control +Access-Control is a central and important aspect of Security. It consists of two major aspects:

+
+
+ +
+
+
+
+
Authentication
+
+

Definition:

+
+
+
+
+

Authentication is the verification that somebody interacting with the system is the actual subject for whom he claims to be.

+
+
+
+
+

The one authenticated is properly called subject or principal. There are two forms of principals you need to distinguish while designing your authentication: human users and autonomous systems. While e.g. a Kerberos/SPNEGO Single-Sign-On makes sense for human users, it is pointless for authenticating autonomous systems. For simplicity, we use the common term user to refer to any principal even though it may not be a human (e.g. in case of a service call from an external system).

+
+
+

To prove the authenticity, the user provides some secret called credentials. The most simple form of credentials is a password.

+
+
+
Implementations
+
+ + + + + +
+ + +Please never implement your own authentication mechanism or credential store. You have to be aware of implicit demands such as salting and hashing credentials, password life-cycle with recovery, expiry, and renewal including email notification confirmation tokens, central password policies, etc. This is the domain of access managers and identity management systems. In a business context you will typically already find a system for this purpose that you have to integrate (e.g. via LDAP). Otherwise you should consider establishing such a system e.g. using keycloak. +
+
+
+

We recommend using JWT when possible. For KISS, also try to avoid combining multiple authentication mechanisms (form based, basic-auth, SAMLv2, OAuth, etc.) within the same application (for different URLs).

+
+
+

For spring, check the Spring Security

+
+
+

For quarkus, check the Quarkus Authentication

+
+
+
+
+
Authorization
+
+

Definition:

+
+
+
+
+

Authorization is the verification that an authenticated user is allowed to perform the operation he intends to invoke.

+
+
+
+
+
Clarification of terms
+
+

For clarification we also want to give a common understanding of related terms that have no unique definition and consistent usage in the wild.

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 14. Security terms related to authorization
TermMeaning and comment

Permission

A permission is an object that allows a principal to perform an operation in the system. This permission can be granted (give) or revoked (taken away). Sometimes people also use the term right what is actually wrong as a right (such as the right to be free) can not be revoked.

Group

We use the term group in this context for an object that contains permissions. A group may also contain other groups. Then the group represents the set of all recursively contained permissions.

Role

We consider a role as a specific form of group that also contains permissions. A role identifies a specific function of a principal. A user can act in a role.

+

For simple scenarios a principal has a single role associated. In more complex situations a principal can have multiple roles but has only one active role at a time that he can choose out of his assigned roles. For KISS it is sometimes sufficient to avoid this by creating multiple accounts for the few users with multiple roles. Otherwise at least avoid switching roles at run-time in clients as this may cause problems with related states. Simply restart the client with the new role as parameter in case the user wants to switch his role.

Access Control

Any permission, group, role, etc., which declares a control for access management.

+
+
+
Suggestions on the access model
+
+

For the access model we give the following suggestions:

+
+
+
    +
  • +

    Each Access Control (permission, group, role, …​) is uniquely identified by a human readable string.

    +
  • +
  • +

    We create a unique permission for each use-case.

    +
  • +
  • +

    We define groups that combine permissions to typical and useful sets for the users.

    +
  • +
  • +

    We define roles as specific groups as required by our business demands.

    +
  • +
  • +

    We allow to associate users with a list of Access Controls.

    +
  • +
  • +

    For authorization of an implemented use case we determine the required permission. Furthermore, we determine the current user and verify that the required permission is contained in the tree spanned by all his associated Access Controls. If the user does not have the permission we throw a security exception and thus abort the operation and transaction.

    +
  • +
  • +

    We avoid negative permissions, that is a user has no permission by default and only those granted to him explicitly give him additional permission for specific things. Permissions granted can not be reduced by other permissions.

    +
  • +
  • +

    Technically we consider permissions as a secret of the application. Administrators shall not fiddle with individual permissions but grant them via groups. So the access management provides a list of strings identifying the Access Controls of a user. The individual application itself contains these Access Controls in a structured way, whereas each group forms a permission tree.

    +
  • +
+
+
+
+
Naming conventions
+
+

As stated above each Access Control is uniquely identified by a human readable string. This string should follow the naming convention:

+
+
+
+
«app-id».«local-name»
+
+
+
+

For Access Control Permissions the «local-name» again follows the convention:

+
+
+
+
«verb»«object»
+
+
+
+

The segments are defined by the following table:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 15. Segments of Access Control Permission ID
SegmentDescriptionExample

«app-id»

Is a unique technical but human readable string of the application (or microservice). It shall not contain special characters and especially no dot or whitespace. We recommend to use lower-train-case-ascii-syntax. The identity and access management should be organized on enterprise level rather than application level. Therefore permissions of different apps might easily clash (e.g. two apps might both define a group ReadMasterData but some user shall get this group for only one of these two apps). Using the «app-id». prefix is a simple but powerful namespacing concept that allows you to scale and grow. You may also reserve specific «app-id»s for cross-cutting concerns that do not actually reflect a single app e.g to grant access to a geographic region.

shop

«verb»

The action that is to be performed on «object». We use Find for searching and reading data. Save shall be used both for create and update. Only if you really have demands to separate these two you may use Create in addition to Save. Finally, Delete is used for deletions. For non CRUD actions you are free to use additional verbs such as Approve or Reject.

Find

«object»

The affected object or entity. Shall be named according to your data-model

Product

+
+

So as an example shop.FindProduct will reflect the permission to search and retrieve a Product in the shop application. The group shop.ReadMasterData may combine all permissions to read master-data from the shop. However, also a group shop.Admin may exist for the Admin role of the shop application. Here the «local-name» is Admin that does not follow the «verb»«object» schema.

+
+
+
+
devon4j-security
+
+

The module devon4j-security provides ready-to-use code based on spring-security that makes your life a lot easier.

+
+
+
+access-control +
+
Figure 3. devon4j Security Model
+
+
+

The diagram shows the model of devon4j-security that separates two different aspects:

+
+
+
    +
  • +

    The Identity- and Access-Management is provided by according products and typically already available in the enterprise landscape (e.g. an active directory). It provides a hierarchy of primary access control objects (roles and groups) of a user. An administrator can grant and revoke permissions (indirectly) via this way.

    +
  • +
  • +

    The application security defines a hierarchy of secondary access control objects (groups and permissions). This is done by configuration owned by the application (see following section). The "API" is defined by the IDs of the primary access control objects that will be referenced from the Identity- and Access-Management.

    +
  • +
+
+
+
+
Access Control Config
+
+

In your application simply extend AccessControlConfig to configure your access control objects as code and reference it from your use-cases. An example config may look like this:

+
+
+
+
@Named
+public class ApplicationAccessControlConfig extends AccessControlConfig {
+
+  public static final String APP_ID = "MyApp";
+
+  private static final String PREFIX = APP_ID + ".";
+
+  public static final String PERMISSION_FIND_OFFER = PREFIX + "FindOffer";
+
+  public static final String PERMISSION_SAVE_OFFER = PREFIX + "SaveOffer";
+
+  public static final String PERMISSION_DELETE_OFFER = PREFIX + "DeleteOffer";
+
+  public static final String PERMISSION_FIND_PRODUCT = PREFIX + "FindProduct";
+
+  public static final String PERMISSION_SAVE_PRODUCT = PREFIX + "SaveProduct";
+
+  public static final String PERMISSION_DELETE_PRODUCT = PREFIX + "DeleteProduct";
+
+  public static final String GROUP_READ_MASTER_DATA = PREFIX + "ReadMasterData";
+
+  public static final String GROUP_MANAGER = PREFIX + "Manager";
+
+  public static final String GROUP_ADMIN = PREFIX + "Admin";
+
+  public ApplicationAccessControlConfig() {
+
+    super();
+    AccessControlGroup readMasterData = group(GROUP_READ_MASTER_DATA, PERMISSION_FIND_OFFER, PERMISSION_FIND_PRODUCT);
+    AccessControlGroup manager = group(GROUP_MANAGER, readMasterData, PERMISSION_SAVE_OFFER, PERMISSION_SAVE_PRODUCT);
+    AccessControlGroup admin = group(GROUP_ADMIN, manager, PERMISSION_DELETE_OFFER, PERMISSION_DELETE_PRODUCT);
+  }
+}
+
+
+
+
+
Configuration on Java Method level
+
+

In your use-case you can now reference a permission like this:

+
+
+
+
@Named
+public class UcSafeOfferImpl extends ApplicationUc implements UcSafeOffer {
+
+  @Override
+  @RolesAllowed(ApplicationAccessControlConfig.PERMISSION_SAVE_OFFER)
+  public OfferEto save(OfferEto offer) { ... }
+  ...
+}
+
+
+
+
+
JEE Standard
+
+

Role-based Access Control (RBAC) is commonly used for authorization. +JSR 250 defines a number of common annotations to secure your application.

+
+
+
    +
  • +

    javax.annotation.security.PermitAll specifies that no access control is required to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DenyAll specifies that no access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.RolesAllowed specifies that only a list of access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DeclareRoles defines roles for security checking.

    +
  • +
  • +

    javax.annotation.security.RunAs specifies the RunAs role for the given components.

    +
  • +
+
+
+

@PermitAll, @Denyall, and @RolesAllowed annotations can be applied to both class and method. +A method-level annotation will override the behaviour of class-level annotation. Using multiple annotations of those 3 is not valid.

+
+
+
+
// invalid
+@PermitAll
+@DenyAll
+public String foo()
+
+// invalid and compilation fails
+@RolesAllowed("admin")
+@RolesAllowed("user")
+public String bar()
+
+// OK
+@RolesAllowed("admin", "user")
+public String bar()
+
+
+
+

Please note that when specifying multiple arguments to @RolesAllowed those are combined with OR (and not with AND). +So if the user has any of the specified access controls, he will be able to access the method.

+
+
+

As a best practice avoid specifying string literals to @RolesAllowed. +Instead define a class with all access controls as constants and reference them from there. +This class is typically called ApplicationAccessControlConfig in devonfw.

+
+
+

In many complicated cases where @PermitAll @DenyAll @RolesAllowed are insufficient e.g. a method should be accessed by a user in role A and not in role B at the same time, you have to verify the user role directly in the method. You can use SecurityContext class to get further needed information.

+
+
+Spring +
+

Spring Security also supports authorization on method level. To use it, you need to add the spring-security-config dependency. If you use Spring Boot, the dependency spring-boot-starter-security already includes spring-security-config. Then you can configure as follows:

+
+
+
    +
  • +

    prePostEnabled property enables Spring Security pre/post annotations. @PreAuthorize and @PostAuthorize annotations provide expression-based access control. See more here

    +
  • +
  • +

    securedEnabled property determines if the @Secured annotation should be enabled. @Secured can be used similarly as @RollesAllowed.

    +
  • +
  • +

    jsr250Enabled property allows us to use the JSR-250 annotations such as @RolesAllowed.

    +
  • +
+
+
+
+
@Configuration
+@EnableGlobalMethodSecurity(
+  prePostEnabled = true,
+  securedEnabled = true,
+  jsr250Enabled = true)
+public class MethodSecurityConfig
+  extends GlobalMethodSecurityConfiguration {
+}
+
+
+
+

A further read about the whole concept of Spring Security Authorization can be found here.

+
+
+
+Quarkus +
+

Quarkus comes with built-in security to allow for RBAC based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints and CDI beans. Quarkus also provides the io.quarkus.security.Authenticated annotation that will permit any authenticated user to access the resource (equivalent to @RolesAllowed("**")).

+
+
+
+
+
Data-based Permissions
+ +
+
+
Access Control Schema (deprecated)
+
+

The access-control-schema.xml approach is deprecated. The documentation can still be found in access control schema.

+
+
+ +
+

==Data-permissions

+
+
+

In some projects there are demands for permissions and authorization that is dependent on the processed data. E.g. a user may only be allowed to read or write data for a specific region. This is adding some additional complexity to your authorization. If you can avoid this it is always best to keep things simple. However, in various cases this is a requirement. Therefore the following sections give you guidance and patterns how to solve this properly.

+
+
+
+
Structuring your data
+
+

For all your business objects (entities) that have to be secured regarding to data permissions we recommend that you create a separate interface that provides access to the relevant data required to decide about the permission. Here is a simple example:

+
+
+
+
public interface SecurityDataPermissionCountry {
+
+  /**
+   * @return the 2-letter ISO code of the country this object is associated with. Users need
+   *         a data-permission for this country in order to read and write this object.
+   */
+  String getCountry();
+}
+
+
+
+

Now related business objects (entities) can implement this interface. Often such data-permissions have to be applied to an entire object-hierarchy. For security reasons we recommend that also all child-objects implement this interface. For performance reasons we recommend that the child-objects redundantly store the data-permission properties (such as country in the example above) and this gets simply propagated from the parent, when a child object is created.

+
+
+
+
Permissions for processing data
+
+

When saving or processing objects with a data-permission, we recommend to provide dedicated methods to verify the permission in an abstract base-class such as AbstractUc and simply call this explicitly from your business code. This makes it easy to understand and debug the code. Here is a simple example:

+
+
+
+
protected void verifyPermission(SecurityDataPermissionCountry entity) throws AccessDeniedException;
+
+
+
+Beware of AOP +
+

For simple but cross-cutting data-permissions you may also use AOP. This leads to programming aspects that reflectively scan method arguments and magically decide what to do. Be aware that this quickly gets tricky:

+
+
+
    +
  • +

    What if multiple of your method arguments have data-permissions (e.g. implement SecurityDataPermission*)?

    +
  • +
  • +

    What if the object to authorize is only provided as reference (e.g. Long or IdRef) and only loaded and processed inside the implementation where the AOP aspect does not apply?

    +
  • +
  • +

    How to express advanced data-permissions in annotations?

    +
  • +
+
+
+

What we have learned is that annotations like @PreAuthorize from spring-security easily lead to the "programming in string literals" anti-pattern. We strongly discourage to use this anti-pattern. In such case writing your own verifyPermission methods that you manually call in the right places of your business-logic is much better to understand, debug and maintain.

+
+
+
+
+
Permissions for reading data
+
+

When it comes to restrictions on the data to read it becomes even more tricky. In the context of a user only entities shall be loaded from the database he is permitted to read. This is simple for loading a single entity (e.g. by its ID) as you can load it and then if not permitted throw an exception to secure your code. But what if the user is performing a search query to find many entities? For performance reasons we should only find data the user is permitted to read and filter all the rest already via the database query. But what if this is not a requirement for a single query but needs to be applied cross-cutting to tons of queries? Therefore we have the following pattern that solves your problem:

+
+
+

For each data-permission attribute (or set of such) we create an abstract base entity:

+
+
+
+
@MappedSuperclass
+@EntityListeners(PermissionCheckListener.class)
+@FilterDef(name = "country", parameters = {@ParamDef(name = "countries", type = "string")})
+@Filter(name = "country", condition = "country in (:countries)")
+public abstract class SecurityDataPermissionCountryEntity extends ApplicationPersistenceEntity
+    implements SecurityDataPermissionCountry {
+
+  private String country;
+
+  @Override
+  public String getCountry() {
+    return this.country;
+  }
+
+  public void setCountry(String country) {
+    this.country = country;
+  }
+}
+
+
+
+

There are some special hibernate annotations @EntityListeners, @FilterDef, and @Filter used here allowing to apply a filter on the country for any (non-native) query performed by hibernate. The entity listener may look like this:

+
+
+
+
public class PermissionCheckListener {
+
+  @PostLoad
+  public void read(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireReadPermission(entity);
+  }
+
+  @PrePersist
+  @PreUpdate
+  public void write(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireWritePermission(entity);
+  }
+}
+
+
+
+

This will ensure that hibernate implicitly will call these checks for every such entity when it is read from or written to the database. Further to avoid reading entities from the database the user is not permitted to (and ending up with exceptions), we create an AOP aspect that automatically activates the above declared hibernate filter:

+
+
+
+
@Named
+public class PermissionCheckerAdvice implements MethodBeforeAdvice {
+
+  @Inject
+  private PermissionChecker permissionChecker;
+
+  @PersistenceContext
+  private EntityManager entityManager;
+
+  @Override
+  public void before(Method method, Object[] args, Object target) {
+
+    Collection<String> permittedCountries = this.permissionChecker.getPermittedCountriesForReading();
+    if (permittedCountries != null) { // null is returned for admins that may access all countries
+      if (permittedCountries.isEmpty()) {
+        throw new AccessDeniedException("Not permitted for any country!");
+      }
+      Session session = this.entityManager.unwrap(Session.class);
+      session.enableFilter("country").setParameterList("countries", permittedCountries.toArray());
+    }
+  }
+}
+
+
+
+

Finally to apply this aspect to all Repositories (can easily be changed to DAOs) implement the following advisor:

+
+
+
+
@Named
+public class PermissionCheckerAdvisor implements PointcutAdvisor, Pointcut, ClassFilter, MethodMatcher {
+
+  @Inject
+  private PermissionCheckerAdvice advice;
+
+  @Override
+  public Advice getAdvice() {
+    return this.advice;
+  }
+
+  @Override
+  public boolean isPerInstance() {
+    return false;
+  }
+
+  @Override
+  public Pointcut getPointcut() {
+    return this;
+  }
+
+  @Override
+  public ClassFilter getClassFilter() {
+    return this;
+  }
+
+  @Override
+  public MethodMatcher getMethodMatcher() {
+    return this;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass) {
+    return true; // apply to all methods
+  }
+
+  @Override
+  public boolean isRuntime() {
+    return false;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass, Object... args) {
+    throw new IllegalStateException("isRuntime()==false");
+  }
+
+  @Override
+  public boolean matches(Class<?> clazz) {
+    // when using DAOs simply change to some class like ApplicationDao
+    return DefaultRepository.class.isAssignableFrom(clazz);
+  }
+}
+
+
+
+
+
Managing and granting the data-permissions
+
+

Following our authorization guide we can simply create a permission for each country. We might simply reserve a prefix (as virtual «app-id») for each data-permission to allow granting data-permissions to end-users across all applications of the IT landscape. In our example we could create access controls country.DE, country.US, country.ES, etc. and assign those to the users. The method permissionChecker.getPermittedCountriesForReading() would then scan for these access controls and only return the 2-letter country code from it.

+
+
+ + + + + +
+ + +Before you make your decisions how to design your access controls please clarify the following questions: +
+
+
+
    +
  • +

    Do you need to separate data-permissions independent of the functional permissions? E.g. may it be required to express that a user can read data from the countries ES and PL but is only permitted to modify data from PL? In such case a single assignment of "country-permissions" to users is insufficient.

    +
  • +
  • +

    Do you want to grant data-permissions individually for each application (higher flexibility and complexity) or for the entire application landscape (simplicity, better maintenance for administrators)? In case of the first approach you would rather have access controls like app1.country.GB and app2.country.GB.

    +
  • +
  • +

    Do your data-permissions depend on objects that can be created dynamically inside your application?

    +
  • +
  • +

    If you want to grant data-permissions on other business objects (entities), how do you want to reference them (primary keys, business keys, etc.)? What reference is most stable? Which is most readable?

    +
  • +
+
+
+ +
+

==JWT

+
+
+

JWT (JSON Web Token) is an open standard (see RFC 7519) for creating JSON based access tokens that assert some number of claims. +With an IT landscape divided into multiple smaller apps you want to avoid coupling all those apps or services tightly with your IAM (Identity & Access Management). +Instead your apps simply expects a JWT as bearer-token in the Authorization HTTP header field. +All it needs to do for authentication is validating this JWT. +The actual authentication is done centrally by an access system (IAM) that authors those JWTs. +Therefore we recommend to use strong asymmetric cryptography to sign the JWT when it is authored. +Create a keypair per environment and keep the private key as a secret only known to the access system authorizing the JWTs. +Your apps only need to know the public key in order to validate the JWT. +Any request without a JWT or with an invalid JWT will be rejected (with status code 401).

+
+
+

When using spring check the JWT Spring-Starter. +For quarkus follow Using JWT RBAC.

+
+
+ +
+

==Cross-site request forgery (CSRF)

+
+
+

CSRF is a type of malicious exploit of a web application that allows an attacker to induce users to perform actions that they do not intend to perform.

+
+
+
+csrf +
+
+
+

More details about csrf can be found at https://owasp.org/www-community/attacks/csrf.

+
+
+
+
+
Secure devon4j server against CSRF
+
+

In case your devon4j server application is not accessed by browsers or the web-client is using JWT based authentication, you are already safe according to CSRF. +However, if your application is accessed from a browser and you are using form based authentication (with session coockie) or basic authentication, you need to enable CSRF protection. +This guide will tell you how to do this.

+
+
+
Dependency
+
+

To secure your devon4j application against CSRF attacks, you only need to add the following dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-csrf</artifactId>
+</dependency>
+
+
+
+

Starting with devon4j version 2020.12.001 application template, this is all you need to do. +However, if you have started from an older version or you want to understand more, please read on.

+
+
+
+
Pluggable web-security
+
+

To enable pluggable security via devon4j security starters you need to apply WebSecurityConfigurer to your BaseWebSecurityConfig (your class extending spring-boot’s WebSecurityConfigurerAdapter) as following:

+
+
+
+
  @Inject
+  private WebSecurityConfigurer webSecurityConfigurer;
+
+  public void configure(HttpSecurity http) throws Exception {
+    // disable CSRF protection by default, use csrf starter to override.
+	  http = http.csrf().disable();
+	  // apply pluggable web-security from devon4j security starters
+    http = this.webSecurityConfigurer.configure(http);
+    .....
+  }
+
+
+
+
+
Custom CsrfRequestMatcher
+
+

If you want to customize which HTTP requests will require a CSRF token, you can implement your own CsrfRequestMatcher and provide it to the devon4j CSRF protection via qualified injection as following:

+
+
+
+
@Named("CsrfRequestMatcher")
+public class CsrfRequestMatcher implements RequestMatcher {
+  @Override
+  public boolean matches(HttpServletRequest request) {
+    .....
+  }
+}
+
+
+
+

Please note that the exact name (@Named("CsrfRequestMatcher")) is required here to ensure your custom implementation will be injected properly.

+
+
+
+
CsrfRestService
+
+

With the devon4j-starter-security-csrf the CsrfRestService gets integrated into your app. +It provides an operation to get the CSRF token via an HTTP GET request. +The URL path to retrieve this CSRF token is services/rest/csrf/v1/token. +As a result you will get a JSON like the following:

+
+
+
+
{
+  "token":"3a8a5f66-c9eb-4494-81e1-7cc58bc3a519",
+  "parameterName":"_csrf",
+  "headerName":"X-CSRF-TOKEN"
+}
+
+
+
+

The token value is a strong random value that will differ for each user session. +It has to be send with subsequent HTTP requests (when method is other than GET) in the specified header (X-CSRF-TOKEN).

+
+
+
+
How it works
+
+

Putting it all together, a browser client should call the CsrfRestService after successfull login to receive the current CSRF token. +With every subsequent HTTP request (other than GET) the client has to send this token in the according HTTP header. +Otherwise the server will reject the request to prevent CSRF attacks. +Therefore, an attacker might make your browser perform HTTP requests towards your devon4j application backend via <image> elements, <iframes>, etc. +Your browser will then still include your session coockie if you are already logged in (e.g. from another tab). +However, in case he wants to trigger DELETE or POST requests trying your browser to make changes in the application (delete or update data, etc.) this will fail without CSRF token. +The attacker may make your browser retrieve the CSRF token but he will not be able to retrieve the result and put it into the header of other requests due to the same-origin-policy. +This way your application will be secured against CSRF attacks.

+
+
+
+
+
Configure devon4ng client for CSRF
+
+

Devon4ng client configuration for CSRF is described here

+
+
+ +
+

==Aspect Oriented Programming (AOP)

+
+
+

AOP is a powerful feature for cross-cutting concerns. However, if used extensive and for the wrong things an application can get unmaintainable. Therefore we give you the best practices where and how to use AOP properly.

+
+
+
+
AOP Key Principles
+
+

We follow these principles:

+
+
+
    +
  • +

    We use spring AOP based on dynamic proxies (and fallback to cglib).

    +
  • +
  • +

    We avoid AspectJ and other mighty and complex AOP frameworks whenever possible

    +
  • +
  • +

    We only use AOP where we consider it as necessary (see below).

    +
  • +
+
+
+
+
AOP Usage
+
+

We recommend to use AOP with care but we consider it established for the following cross cutting concerns:

+
+
+ +
+
+
+
AOP Debugging
+
+

When using AOP with dynamic proxies the debugging of your code can get nasty. As you can see by the red boxes in the call stack in the debugger there is a lot of magic happening while you often just want to step directly into the implementation skipping all the AOP clutter. When using Eclipse this can easily be archived by enabling step filters. Therefore you have to enable the feature in the Eclipse tool bar (highlighted in read).

+
+
+
+AOP debugging +
+
+
+

In order to properly make this work you need to ensure that the step filters are properly configured:

+
+
+
+Step Filter Configuration +
+
+
+

Ensure you have at least the following step-filters configured and active:

+
+
+
+
ch.qos.logback.*
+com.devonfw.module.security.*
+java.lang.reflect.*
+java.security.*
+javax.persistence.*
+org.apache.commons.logging.*
+org.apache.cxf.jaxrs.client.*
+org.apache.tomcat.*
+org.h2.*
+org.springframework.*
+
+
+
+ +
+

==Exception Handling

+
+
+
+
Exception Principles
+
+

For exceptions we follow these principles:

+
+
+
    +
  • +

    We only use exceptions for exceptional situations and not for programming control flows, etc. Creating an exception in Java is expensive and hence should not be done for simply testing whether something is present, valid or permitted. In the latter case design your API to return this as a regular result.

    +
  • +
  • +

    We use unchecked exceptions (RuntimeException) [2]

    +
  • +
  • +

    We distinguish internal exceptions and user exceptions:

    +
    +
      +
    • +

      Internal exceptions have technical reasons. For unexpected and exotic situations, it is sufficient to throw existing exceptions such as IllegalStateException. For common scenarios a own exception class is reasonable.

      +
    • +
    • +

      User exceptions contain a message explaining the problem for end users. Therefore, we always define our own exception classes with a clear, brief, but detailed message.

      +
    • +
    +
    +
  • +
  • +

    Our own exceptions derive from an exception base class supporting

    + +
  • +
+
+
+

All this is offered by mmm-util-core, which we propose as a solution. +If you use the devon4j-rest module, this is already included. For Quarkus applications, you need to add the dependency manually.

+
+
+

If you want to avoid additional dependencies, you can implement your own solution for this by creating an abstract exception class ApplicationBusinessException extending from RuntimeException. For an example of this, see our Quarkus reference application.

+
+
+
+
Exception Example
+
+

Here is an exception class from our sample application:

+
+
+
+
public class IllegalEntityStateException extends ApplicationBusinessException {
+
+  private static final long serialVersionUID = 1L;
+
+  public IllegalEntityStateException(Object entity, Object state) {
+
+    this((Throwable) null, entity, state);
+  }
+
+
+  public IllegalEntityStateException(Object entity, Object currentState, Object newState) {
+
+    this(null, entity, currentState, newState);
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object state) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityState(entity, state));
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object currentState, Object newState) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityStateChange(entity, currentState,
+        newState));
+  }
+
+}
+
+
+
+

The message templates are defined in the interface NlsBundleRestaurantRoot as following:

+
+
+
+
public interface NlsBundleApplicationRoot extends NlsBundle {
+
+
+  @NlsBundleMessage("The entity {entity} is in state {state}!")
+  NlsMessage errorIllegalEntityState(@Named("entity") Object entity, @Named("state") Object state);
+
+
+  @NlsBundleMessage("The entity {entity} in state {currentState} can not be changed to state {newState}!")
+  NlsMessage errorIllegalEntityStateChange(@Named("entity") Object entity, @Named("currentState") Object currentState,
+      @Named("newState") Object newState);
+
+
+  @NlsBundleMessage("The property {property} of object {object} can not be changed!")
+  NlsMessage errorIllegalPropertyChange(@Named("object") Object object, @Named("property") Object property);
+
+  @NlsBundleMessage("There is currently no user logged in")
+  NlsMessage errorNoActiveUser();
+
+
+
+
+
Handling Exceptions
+
+

For catching and handling exceptions we follow these rules:

+
+
+
    +
  • +

    We do not catch exceptions just to wrap or to re-throw them.

    +
  • +
  • +

    If we catch an exception and throw a new one, we always have to provide the original exception as cause to the constructor of the new exception.

    +
  • +
  • +

    At the entry points of the application (e.g. a service operation) we have to catch and handle all throwables. This is done via the exception-facade-pattern via an explicit facade or aspect. The devon4j-rest module already provides ready-to-use implementations for this such as RestServiceExceptionFacade that you can use in your Spring application. For Quarkus, follow the Quarkus guide on exception handling.
    +The exception facade has to …​

    +
    +
      +
    • +

      log all errors (user errors on info and technical errors on error level)

      +
    • +
    • +

      ensure that the entire exception is passed to the logger (not only the message) so that the logger can capture the entire stacktrace and the root cause is not lost.

      +
    • +
    • +

      convert the error to a result appropriable for the client and secure for Sensitive Data Exposure. Especially for security exceptions only a generic security error code or message may be revealed but the details shall only be logged but not be exposed to the client. All internal exceptions are converted to a generic error with a message like:

      +
      +
      +
      +

      An unexpected technical error has occurred. We apologize any inconvenience. Please try again later.

      +
      +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
Common Errors
+
+

The following errors may occur in any devon application:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 16. Common Exceptions
CodeMessageLink

TechnicalError

An unexpected error has occurred! We apologize any inconvenience. Please try again later.

TechnicalErrorUserException.java

ServiceInvoke

«original message of the cause»

ServiceInvocationFailedException.java

+
+ +
+

==Internationalization +Internationalization (I18N) is about writing code independent from locale-specific information. +For I18N of text messages we are suggesting +mmm native-language-support.

+
+
+

In devonfw we have developed a solution to manage text internationalization. devonfw solution comes into two aspects:

+
+
+
    +
  • +

    Bind locale information to the user.

    +
  • +
  • +

    Get the messages in the current user locale.

    +
  • +
+
+
+
+
Binding locale information to the user
+
+

We have defined two different points to bind locale information to user, depending on user is authenticated or not.

+
+
+
    +
  • +

    User not authenticated: devonfw intercepts unsecured request and extract locale from it. At first, we try to extract a language parameter from the request and if it is not possible, we extract locale from Àccept-language` header.

    +
  • +
  • +

    User authenticated. During login process, applications developers are responsible to fill language parameter in the UserProfile class. This language parameter could be obtain from DB, LDAP, request, etc. In devonfw sample we get the locale information from database.

    +
  • +
+
+
+

This image shows the entire process:

+
+
+
+Internationalization +
+
+
+
+
Getting internationalizated messages
+
+

devonfw has a bean that manage i18n message resolution, the ApplicationLocaleResolver. This bean is responsible to get the current user and extract locale information from it and read the correct properties file to get the message.

+
+
+

The i18n properties file must be called ApplicationMessages_la_CO.properties where la=language and CO=country. This is an example of a i18n properties file for English language to translate devonfw sample user roles:

+
+
+

ApplicationMessages_en_US.properties

+
+
+
+
admin=Admin
+
+
+
+

You should define an ApplicationMessages_la_CO.properties file for every language that your application needs.

+
+
+

ApplicationLocaleResolver bean is injected in AbstractComponentFacade class so you have available this bean in logic layer so you only need to put this code to get an internationalized message:

+
+
+
+
String msg = getApplicationLocaleResolver().getMessage("mymessage");
+
+
+
+ +
+

==Service Client

+
+
+

This guide is about consuming (calling) services from other applications (micro-services). For providing services, see the Service-Layer Guide. Services can be consumed by the client or the server. As the client is typically not written in Java, you should consult the according guide for your client technology. In case you want to call a service within your Java code, this guide is the right place to get help.

+
+
+
+
Motivation
+
+

Various solutions already exist for calling services, such as RestTemplate from spring or the JAX-RS client API. Furthermore, each and every service framework offers its own API as well. These solutions might be suitable for very small and simple projects (with one or two such invocations). However, with the trend of microservices, the invocation of a service becomes a very common use-case that occurs all over the place. You typically need a solution that is very easy to use but supports flexible configuration, adding headers for authentication, mapping of errors from the server, logging success/errors with duration for performance analysis, support for synchronous and asynchronous invocations, etc. This is exactly what this devon4j service-client solution brings to you.

+
+
+
+
Usage
+
+

Spring

+
+
+

For Spring, follow the Spring rest-client guide.

+
+
+

Quarkus

+
+
+

For Quarkus, we recommend to follow the official Quarkus rest-client guide

+
+
+ +
+

==Testing

+
+
+
+
General best practices
+
+

For testing please follow our general best practices:

+
+
+
    +
  • +

    Tests should have a clear goal that should also be documented.

    +
  • +
  • +

    Tests have to be classified into different integration levels.

    +
  • +
  • +

    Tests should follow a clear naming convention.

    +
  • +
  • +

    Automated tests need to properly assert the result of the tested operation(s) in a reliable way. E.g. avoid stuff like assertThat(service.getAllEntities()).hasSize(42) or even worse tests that have no assertion at all.

    +
  • +
  • +

    Tests need to be independent of each other. Never write test-cases or tests (in Java @Test methods) that depend on another test to be executed before.

    +
  • +
  • +

    Use AssertJ to write good readable and maintainable tests that also provide valuable feedback in case a test fails. Do not use legacy JUnit methods like assertEquals anymore!

    +
  • +
  • +

    For easy understanding divide your test in three commented sections:

    +
    +
      +
    • +

      //given

      +
    • +
    • +

      //when

      +
    • +
    • +

      //then

      +
    • +
    +
    +
  • +
  • +

    Plan your tests and test data management properly before implementing.

    +
  • +
  • +

    Instead of having a too strong focus on test coverage better ensure you have covered your critical core functionality properly and review the code including tests.

    +
  • +
  • +

    Test code shall NOT be seen as second class code. You shall consider design, architecture and code-style also for your test code but do not over-engineer it.

    +
  • +
  • +

    Test automation is good but should be considered in relation to cost per use. Creating full coverage via automated system tests can cause a massive amount of test-code that can turn out as a huge maintenance hell. Always consider all aspects including product life-cycle, criticality of use-cases to test, and variability of the aspect to test (e.g. UI, test-data).

    +
  • +
  • +

    Use continuous integration and establish that the entire team wants to have clean builds and running tests.

    +
  • +
  • +

    Prefer delegation over inheritance for cross-cutting testing functionality. Good places to put this kind of code can be realized and reused via the JUnit @Rule mechanism.

    +
  • +
+
+
+
+
Test Automation Technology Stack
+
+

For test automation we use JUnit. However, we are strictly doing all assertions with AssertJ. For mocking we use Mockito. +In order to mock remote connections we use WireMock.

+
+
+

For testing entire components or sub-systems we recommend to use for Spring stack spring-boot-starter-test as lightweight and fast testing infrastructure that is already shipped with devon4j-test. For Quarkus, you can add the necessary extensions manually such as quarkus-junit5, quarkus-junit5-mockito, assertj-core etc.

+
+
+

In case you have to use a full blown JEE application server, we recommend to use arquillian. To get started with arquillian, look here.

+
+
+
+
Test Doubles
+
+

We use test doubles as generic term for mocks, stubs, fakes, dummies, or spys to avoid confusion. Here is a short summary from stubs VS mocks:

+
+
+
    +
  • +

    Dummy objects specifying no logic at all. May declare data in a POJO style to be used as boiler plate code to parameter lists or even influence the control flow towards the test’s needs.

    +
  • +
  • +

    Fake objects actually have working implementations, but usually take some shortcut which makes them not suitable for production (an in memory database is a good example).

    +
  • +
  • +

    Stubs provide canned answers to calls made during the test, usually not responding at all to anything outside what’s programmed in for the test. Stubs may also record information about calls, such as an email gateway stub that remembers the messages it 'sent', or maybe only how many messages it 'sent'.

    +
  • +
  • +

    Mocks are objects pre-programmed with expectations, which form a specification of the calls they are expected to receive.

    +
  • +
+
+
+

We try to give some examples, which should make it somehow clearer:

+
+
+
Stubs
+
+

Best Practices for applications:

+
+
+
    +
  • +

    A good way to replace small to medium large boundary systems, whose impact (e.g. latency) should be ignored during load and performance tests of the application under development.

    +
  • +
  • +

    As stub implementation will rely on state-based verification, there is the threat, that test developers will partially reimplement the state transitions based on the replaced code. This will immediately lead to a black maintenance whole, so better use mocks to assure the certain behavior on interface level.

    +
  • +
  • +

    Do NOT use stubs as basis of a large amount of test cases as due to state-based verification of stubs, test developers will enrich the stub implementation to become a large monster with its own hunger after maintenance efforts.

    +
  • +
+
+
+
+
Mocks
+
+

Best Practices for applications:

+
+
+
    +
  • +

    Replace not-needed dependencies of your system-under-test (SUT) to minimize the application context to start of your component framework.

    +
  • +
  • +

    Replace dependencies of your SUT to impact the control flow under test without establishing all the context parameters needed to match the control flow.

    +
  • +
  • +

    Remember: Not everything has to be mocked! Especially on lower levels of tests like isolated module tests you can be betrayed into a mocking delusion, where you end up in a hundred lines of code mocking the whole context and five lines executing the test and verifying the mocks behavior. Always keep in mind the benefit-cost ratio, when implementing tests using mocks.

    +
  • +
+
+
+
+
WireMock
+
+

If you need to mock remote connections such as HTTP-Servers, WireMock offers easy to use functionality. For a full description see the homepage or the github repository. Wiremock can be used either as a JUnit Rule, in Java outside of JUnit or as a standalone process. The mocked server can be configured to respond to specific requests in a given way via a fluent Java API, JSON files and JSON over HTTP. An example as an integration to JUnit can look as follows.

+
+
+
+
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+
+public class WireMockOfferImport{
+
+  @Rule
+  public WireMockRule mockServer = new WireMockRule(wireMockConfig().dynamicPort());
+
+  @Test
+  public void requestDataTest() throws Exception {
+  int port = this.mockServer.port();
+  ...}
+
+
+
+

This creates a server on a randomly chosen free port on the running machine. You can also specify the port to be used if wanted. Other than that there are several options to further configure the server. This includes HTTPs, proxy settings, file locations, logging and extensions.

+
+
+
+
  @Test
+  public void requestDataTest() throws Exception {
+      this.mockServer.stubFor(get(urlEqualTo("/new/offers")).withHeader("Accept", equalTo("application/json"))
+      .withHeader("Authorization", containing("Basic")).willReturn(aResponse().withStatus(200).withFixedDelay(1000)
+      .withHeader("Content-Type", "application/json").withBodyFile("/wireMockTest/jsonBodyFile.json")));
+  }
+
+
+
+

This will stub the URL localhost:port/new/offers to respond with a status 200 message containing a header (Content-Type: application/json) and a body with content given in jsonBodyFile.json if the request matches several conditions. +It has to be a GET request to ../new/offers with the two given header properties.

+
+
+

Note that by default files are located in src/test/resources/__files/. When using only one WireMock server one can omit the this.mockServer in before the stubFor call (static method). +You can also add a fixed delay to the response or processing delay with WireMock.addRequestProcessingDelay(time) in order to test for timeouts.

+
+
+

WireMock can also respond with different corrupted messages to simulate faulty behaviour.

+
+
+
+
@Test(expected = ResourceAccessException.class)
+public void faultTest() {
+
+    this.mockServer.stubFor(get(urlEqualTo("/fault")).willReturn(aResponse()
+    .withFault(Fault.MALFORMED_RESPONSE_CHUNK)));
+...}
+
+
+
+

A GET request to ../fault returns an OK status header, then garbage, and then closes the connection.

+
+
+
+
+
Integration Levels
+
+

There are many discussions about the right level of integration for test automation. Sometimes it is better to focus on small, isolated modules of the system - whatever a "module" may be. In other cases it makes more sense to test integrated groups of modules. Because there is no universal answer to this question, devonfw only defines a common terminology for what could be tested. Each project must make its own decision where to put the focus of test automation. There is no worldwide accepted terminology for the integration levels of testing. In general we consider ISTQB. However, with a technical focus on test automation we want to get more precise.

+
+
+

The following picture shows a simplified view of an application based on the devonfw reference architecture. We define four integration levels that are explained in detail below. +The boxes in the picture contain parenthesized numbers. These numbers depict the lowest integration level, a box belongs to. Higher integration levels also contain all boxes of lower integration levels. When writing tests for a given integration level, related boxes with a lower integration level must be replaced by test doubles or drivers.

+
+
+
+Integration Levels +
+
+
+

The main difference between the integration levels is the amount of infrastructure needed to test them. The more infrastructure you need, the more bugs you will find, but the more instable and the slower your tests will be. So each project has to make a trade-off between pros and contras of including much infrastructure in tests and has to select the integration levels that fit best to the project.

+
+
+

Consider, that more infrastructure does not automatically lead to a better bug-detection. There may be bugs in your software that are masked by bugs in the infrastructure. The best way to find those bugs is to test with very few infrastructure.

+
+
+

External systems do not belong to any of the integration levels defined here. devonfw does not recommend involving real external systems in test automation. This means, they have to be replaced by test doubles in automated tests. An exception may be external systems that are fully under control of the own development team.

+
+
+

The following chapters describe the four integration levels.

+
+
+
Level 1 Module Test
+
+

The goal of a isolated module test is to provide fast feedback to the developer. Consequently, isolated module tests must not have any interaction with the client, the database, the file system, the network, etc.

+
+
+

An isolated module test is testing a single classes or at least a small set of classes in isolation. If such classes depend on other components or external resources, etc. these shall be replaced with a test double.

+
+
+
+
public class MyClassTest extends ModuleTest {
+
+  @Test
+  public void testMyClass() {
+
+    // given
+    MyClass myClass = new MyClass();
+    // when
+    String value = myClass.doSomething();
+    // then
+    assertThat(value).isEqualTo("expected value");
+  }
+
+}
+
+
+
+

For an advanced example see here.

+
+
+
+
Level 2 Component Test
+
+

A component test aims to test components or component parts as a unit. +These tests can access resources such as a database (e.g. for DAO tests). +Further, no remote communication is intended here. Access to external systems shall be replaced by a test double.

+
+
+
    +
  • +

    For Spring stack, they are typically run with a (light-weight) infrastructure such as spring-boot-starter-test. A component-test is illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.NONE)
    +public class UcFindCountryTest extends ComponentTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    TestUtil.login("user", MyAccessControlConfig.FIND_COUNTRY);
    +    CountryEto country = this.ucFindCountry.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    This test will start the entire spring-context of your app (MySpringBootApp). Within the test spring will inject according spring-beans into all your fields annotated with @Inject. In the test methods you can use these spring-beans and perform your actual tests. This pattern can be used for testing DAOs/Repositories, Use-Cases, or any other spring-bean with its entire configuration including database and transactions.

    +
    +
  • +
  • +

    For Quarkus, you can similarly inject the CDI beans and perform tests. An example is shown below:

    +
    +
    +
    @QuarkusTest
    +public class UcFindCountryTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +  ...
    +
    +
    +
  • +
+
+
+

When you are testing use-cases your authorization will also be in place. Therefore, you have to simulate a logon in advance what is done via the login method in the above Spring example. The test-infrastructure will automatically do a logout for you after each test method in doTearDown.

+
+
+
+
Level 3 Subsystem Test
+
+

A subsystem test runs against the external interfaces (e.g. HTTP service) of the integrated subsystem. Subsystem tests of the client subsystem are described in the devon4ng testing guide. In devon4j the server (JEE application) is the subsystem under test. The tests act as a client (e.g. service consumer) and the server has to be integrated and started in a container.

+
+
+
    +
  • +

    With devon4j and Spring you can write a subsystem-test as easy as illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.RANDOM_PORT)
    +public class CountryRestServiceTest extends SubsystemTest {
    +
    +  @Inject
    +  private ServiceClientFactory serviceClientFactory;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    CountryRestService service = this.serviceClientFactory.create(CountryRestService.class);
    +    CountryEto country = service.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    Even though not obvious on the first look this test will start your entire application as a server on a free random port (so that it works in CI with parallel builds for different branches) and tests the invocation of a (REST) service including (un)marshalling of data (e.g. as JSON) and transport via HTTP (all in the invocation of the findCountry method).

    +
    +
  • +
+
+
+

Do not confuse a subsystem test with a system integration test. A system integration test validates the interaction of several systems where we do not recommend test automation.

+
+
+
+
Level 4 System Test
+
+

A system test has the goal to test the system as a whole against its official interfaces such as its UI or batches. The system itself runs as a separate process in a way close to a regular deployment. Only external systems are simulated by test doubles.

+
+
+

The devonfw only gives advice for automated system test (TODO see allure testing framework). In nearly every project there must be manual system tests, too. This manual system tests are out of scope here.

+
+
+
+
Classifying Integration-Levels
+
+

For Spring stack, devon4j defines Category-Interfaces that shall be used as JUnit Categories. +Also devon4j provides abstract base classes that you may extend in your test-cases if you like.

+
+
+

devon4j further pre-configures the maven build to only run integration levels 1-2 by default (e.g. for fast feedback in continuous integration). It offers the profiles subsystemtest (1-3) and systemtest (1-4). In your nightly build you can simply add -Psystemtest to run all tests.

+
+
+
+
+
Implementation
+
+

This section introduces how to implement tests on the different levels with the given devonfw infrastructure and the proposed frameworks. +For Spring, see Spring Test Implementation

+
+
+
+
Regression testing
+
+

When it comes to complex output (even binary) that you want to regression test by comparing with an expected result, you sould consider Approval Tests using ApprovalTests.Java. +If applied for the right problems, it can be very helpful.

+
+
+
+
Deployment Pipeline
+
+

A deployment pipeline is a semi-automated process that gets software-changes from version control into production. It contains several validation steps, e.g. automated tests of all integration levels. +Because devon4j should fit to different project types - from agile to waterfall - it does not define a standard deployment pipeline. But we recommend to define such a deployment pipeline explicitly for each project and to find the right place in it for each type of test.

+
+
+

For that purpose, it is advisable to have fast running test suite that gives as much confidence as possible without needing too much time and too much infrastructure. This test suite should run in an early stage of your deployment pipeline. Maybe the developer should run it even before he/she checked in the code. Usually lower integration levels are more suitable for this test suite than higher integration levels.

+
+
+

Note, that the deployment pipeline always should contain manual validation steps, at least manual acceptance testing. There also may be manual validation steps that have to be executed for special changes only, e.g. usability testing. Management and execution processes of those manual validation steps are currently not in the scope of devonfw.

+
+
+
+
Test Coverage
+
+

We are using tools (SonarQube/Jacoco) to measure the coverage of the tests. Please always keep in mind that the only reliable message of a code coverage of X% is that (100-X)% of the code is entirely untested. It does not say anything about the quality of the tests or the software though it often relates to it.

+
+
+
+
Test Configuration
+
+

This section covers test configuration in general without focusing on integration levels as in the first chapter.

+
+
+ +
+
+
Configure Test Specific Beans
+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+
Test Data
+
+

It is possible to obtain test data in two different ways depending on your test’s integration level.

+
+
+
+
+
Debugging Tests
+
+

The following two sections describe two debugging approaches for tests. Tests are either run from within the IDE or from the command line using Maven.

+
+
+
Debugging with the IDE
+
+

Debugging with the IDE is as easy as always. Even if you want to execute a SubsystemTest which needs a Spring context and a server infrastructure to run properly, you just set your breakpoints and click on Debug As → JUnit Test. The test infrastructure will take care of initializing the necessary infrastructure - if everything is configured properly.

+
+
+
+
Debugging with Maven
+
+

Please refer to the following two links to find a guide for debugging tests when running them from Maven.

+
+ +
+

In essence, you first have to start execute a test using the command line. Maven will halt just before the test execution and wait for your IDE to connect to the process. When receiving a connection the test will start and then pause at any breakpoint set in advance. +The first link states that tests are started through the following command:

+
+
+
+
mvn -Dmaven.surefire.debug test
+
+
+
+

Although this is correct, it will run every test class in your project and - which is time consuming and mostly unnecessary - halt before each of these tests. +To counter this problem you can simply execute a single test class through the following command (here we execute the TablemanagementRestServiceTest from the restaurant sample application):

+
+
+
+
mvn test -Dmaven.surefire.debug test -Dtest=TablemanagementRestServiceTest
+
+
+
+

It is important to notice that you first have to execute the Maven command in the according submodule, e.g. to execute the TablemanagementRestServiceTest you have first to navigate to the core module’s directory.

+
+
+ +
+

==Transfer-Objects

+
+
+

The technical data model is defined in form of persistent entities. +However, passing persistent entities via call-by-reference across the entire application will soon cause problems:

+
+
+
    +
  • +

    Changes to a persistent entity are directly written back to the persistent store when the transaction is committed. When the entity is send across the application also changes tend to take place in multiple places endangering data sovereignty and leading to inconsistency.

    +
  • +
  • +

    You want to send and receive data via services across the network and have to define what section of your data is actually transferred. If you have relations in your technical model you quickly end up loading and transferring way too much data.

    +
  • +
  • +

    Modifications to your technical data model shall not automatically have impact on your external services causing incompatibilities.

    +
  • +
+
+
+

To prevent such problems transfer-objects are used leading to a call-by-value model and decoupling changes to persistent entities.

+
+
+

In the following sections the different types of transfer-objects are explained. +You will find all according naming-conventions in the architecture-mapping

+
+
+

To structure your transfer objects, we recommend the following approaches:

+
+
+ +
+
+

Also considering the following transfer objects in specific cases:

+
+
+
+
SearchCriteriaTo
+
+

For searching we create or generate a «BusinessObject»SearchCriteriaTo representing a query to find instances of «BusinessObject».

+
+
TO
+
+

There are typically transfer-objects for data that is never persistent. +For very generic cases these just carry the suffix To.

+
+
STO
+
+

We can potentially create separate service transfer objects (STO) (if possible named «BusinessObject»Sto) to keep the service API stable and independent of the actual data-model. +However, we usually do not need this and want to keep our architecture simple. +Only create STOs if you need service versioning and support previous APIs or to provide legacy service technologies that require their own isolated data-model. +In such case you also need beanmapping between STOs and ETOs/DTOs what means extra effort and complexity that should be avoided.

+
+
+
+
+
+

==Bean Mapping in devon4j-spring

+
+
+

We have developed a solution that uses a BeanMapper that allows to abstract from the underlying implementation. As mentioned in the general bean mapping guide, we started with Dozer a Java Bean to Java Bean mapper that recursively copies data from one object to another. Now we recommend using Orika. This guide will show an introduction to Orika and Dozer bean-mapper.

+
+
+
+
+
Bean-Mapper Dependency
+
+

To get access to the BeanMapper we have to use either of the below dependency in our POM:

+
+
+
Listing 17. Orika
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-orika</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
Listing 18. Dozer
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-dozer</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
+
Bean-Mapper Configuration
+
+
Bean-Mapper Configuration using Dozer
+
+

The BeanMapper implementation is based on an existing open-source bean-mapping framework. +In case of Dozer the mapping is configured src/main/resources/config/app/common/dozer-mapping.xml.

+
+
+

See the my-thai-star dozer-mapping.xml as an example. +Important is that you configure all your custom datatypes as <copy-by-reference> tags and have the mapping from PersistenceEntity (ApplicationPersistenceEntity) to AbstractEto configured properly:

+
+
+
+
 <mapping type="one-way">
+    <class-a>com.devonfw.module.basic.common.api.entity.PersistenceEntity</class-a>
+    <class-b>com.devonfw.module.basic.common.api.to.AbstractEto</class-b>
+    <field custom-converter="com.devonfw.module.beanmapping.common.impl.dozer.IdentityConverter">
+      <a>this</a>
+      <b is-accessible="true">persistentEntity</b>
+    </field>
+</mapping>
+
+
+
+
+
+
Bean-Mapper Configuration using Orika
+
+

Orika with devonfw is configured by default and sets some custom mappings for GenericEntity.java to GenericEntityDto.java. To specify and customize the mappings you can create the class BeansOrikaConfig.java that extends the class BaseOrikaConfig.java from the devon4j.orika package. To register a basic mapping, register a ClassMap for the mapperFactory with your custom mapping. Watch the example below and follow the basic Orika mapping configuration guide and the Orika advanced mapping guide.

+
+
+

Register Mappings:

+
+
+
+
mapperFactory.classMap(UserEntity.class, UserEto.class)
+			.field("email", "email")
+			.field("username", "name")
+			.byDefault()
+			.register();
+
+
+
+
+
Bean-Mapper Usage
+
+

Then we can get the BeanMapper via dependency-injection what we typically already provide by an abstract base class (e.g. AbstractUc). Now we can solve our problem very easy:

+
+
+
+
...
+UserEntity resultEntity = ...;
+...
+return getBeanMapper().map(resultEntity, UserEto.class);
+
+
+
+ +
+

==Datatypes

+
+
+
+
+

A datatype is an object representing a value of a specific type with the following aspects:

+
+
+
    +
  • +

    It has a technical or business specific semantic.

    +
  • +
  • +

    Its JavaDoc explains the meaning and semantic of the value.

    +
  • +
  • +

    It is immutable and therefore stateless (its value assigned at construction time and can not be modified).

    +
  • +
  • +

    It is serializable.

    +
  • +
  • +

    It properly implements #equals(Object) and #hashCode() (two different instances with the same value are equal and have the same hash).

    +
  • +
  • +

    It shall ensure syntactical validation so it is NOT possible to create an instance with an invalid value.

    +
  • +
  • +

    It is responsible for formatting its value to a string representation suitable for sinks such as UI, loggers, etc. Also consider cases like a Datatype representing a password where toString() should return something like "**" instead of the actual password to prevent security accidents.

    +
  • +
  • +

    It is responsible for parsing the value from other representations such as a string (as needed).

    +
  • +
  • +

    It shall provide required logical operations on the value to prevent redundancies. Due to the immutable attribute all manipulative operations have to return a new Datatype instance (see e.g. BigDecimal.add(java.math.BigDecimal)).

    +
  • +
  • +

    It should implement Comparable if a natural order is defined.

    +
  • +
+
+
+

Based on the Datatype a presentation layer can decide how to view and how to edit the value. Therefore a structured data model should make use of custom datatypes in order to be expressive. +Common generic datatypes are String, Boolean, Number and its subclasses, Currency, etc. +Please note that both Date and Calendar are mutable and have very confusing APIs. Therefore, use JSR-310 or jodatime instead. +Even if a datatype is technically nothing but a String or a Number but logically something special it is worth to define it as a dedicated datatype class already for the purpose of having a central javadoc to explain it. On the other side avoid to introduce technical datatypes like String32 for a String with a maximum length of 32 characters as this is not adding value in the sense of a real Datatype. +It is suitable and in most cases also recommended to use the class implementing the datatype as API omitting a dedicated interface.

+
+
+
+— mmm project
+datatype javadoc +
+
+ +
+
+
Datatype Packaging
+
+

For the devonfw we use a common packaging schema. +The specifics for datatypes are as following:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
SegmentValueExplanation

<component>

*

Here we use the (business) component defining the datatype or general for generic datatypes.

<layer>

common

Datatypes are used across all layers and are not assigned to a dedicated layer.

<scope>

api

Datatypes are always used directly as API even tough they may contain (simple) implementation logic. Most datatypes are simple wrappers for generic Java types (e.g. String) but make these explicit and might add some validation.

+
+
+
Technical Concerns
+
+

Many technologies like Dozer and QueryDSL’s (alias API) are heavily based on reflection. For them to work properly with custom datatypes, the frameworks must be able to instantiate custom datatypes with no-argument constructors. It is therefore recommended to implement a no-argument constructor for each datatype of at least protected visibility.

+
+
+
+
Datatypes in Entities
+
+

The usage of custom datatypes in entities is explained in the persistence layer guide.

+
+
+
+
Datatypes in Transfer-Objects
+
+
XML
+
+

For mapping datatypes with JAXB see XML guide.

+
+
+
+
JSON
+
+

For mapping datatypes from and to JSON see JSON custom mapping.

+
+
+ +
+

==CORS configuration in Spring

+
+
+
+
+
Dependency
+
+

To enable the CORS support from the server side for your devon4j-Spring application, add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-cors</artifactId>
+</dependency>
+
+
+
+
+
Configuration
+
+

Add the below properties in your application.properties file:

+
+
+
+
#CORS support
+security.cors.spring.allowCredentials=true
+security.cors.spring.allowedOriginPatterns=*
+security.cors.spring.allowedHeaders=*
+security.cors.spring.allowedMethods=OPTIONS,HEAD,GET,PUT,POST,DELETE,PATCH
+security.cors.pathPattern=/**
+
+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AttributeDescriptionHTTP Header

allowCredentials

Decides the browser should include any cookies associated with the request (true if cookies should be included).

Access-Control-Allow-Credentials

allowedOrigins

List of allowed origins (use * to allow all orgins).

Access-Control-Allow-Origin

allowedMethods

List of allowed HTTP request methods (OPTIONS, HEAD, GET, PUT, POST, DELETE, PATCH, etc.).

-

allowedHeaders

List of allowed headers that can be used during the request (use * to allow all headers requested by the client)

Access-Control-Allow-Headers

pathPattern

Ant-style pattern for the URL paths where to apply CORS. Use "/**" to match all URL paths.

+
+ +
+

==Microservices in devonfw

+
+
+

The Microservices architecture is an approach for application development based on a series of small services grouped under a business domain. Each individual service runs autonomously and communicating with each other through their APIs. That independence between the different services allows to manage (upgrade, fix, deploy, etc.) each one without affecting the rest of the system’s services. In addition to that the microservices architecture allows to scale specific services when facing an increment of the requests, so the applications based on microservices are more flexible and stable, and can be adapted quickly to demand changes.

+
+
+

However, this new approach, developing apps based on microservices, presents some downsides.

+
+
+

Let’s see the main challenges when working with microservices:

+
+
+
    +
  • +

    Having the applications divided in different services we will need a component (router) to redirect each request to the related microservice. These redirection rules must implement filters to guarantee a proper functionality.

    +
  • +
  • +

    In order to manage correctly the routing process, the application will also need a catalog with all the microservices and its details: IPs and ports of each of the deployed instances of each microservice, the state of each instance and some other related information. This catalog is called Service Discovery.

    +
  • +
  • +

    With all the information of the Service Discovery the application will need to calculate and select between all the available instances of a microservice which is the suitable one. This will be figured out by the library Client Side Load Balancer.

    +
  • +
  • +

    The different microservices will be likely interconnected with each other, that means that in case of failure of one of the microservices involved in a process, the application must implement a mechanism to avoid the error propagation through the rest of the services and provide an alternative as a process result. To solve this, the pattern Circuit Breaker can be implemented in the calls between microservices.

    +
  • +
  • +

    As we have mentioned, the microservices will exchange calls and information with each other so our applications will need to provide a secured context to avoid not allowed operations or intrusions. In addition, since microservices must be able to operate in an isolated way, it is not recommended to maintain a session. To meet this need without using Spring sessions, a token-based authentication is used that exchanges information using the json web token (JWT) protocol.

    +
  • +
+
+
+

In addition to all of this we will find other issues related to this particular architecture that we will address fitting the requirements of each project.

+
+
+
    +
  • +

    Distributed data bases: each instance of a microservice should have only one data base.

    +
  • +
  • +

    Centralized logs: each instance of a microservice creates a log and a trace that should be centralized to allow an easier way to read all that information.

    +
  • +
  • +

    Centralized configuration: each microservice has its own configuration, so our applications should group all those configurations in only one place to ease the configuration management.

    +
  • +
  • +

    Automatized deployments: as we are managing several components (microservices, catalogs, balancers, etc.) the deployment should be automatized to avoid errors and ease this process.

    +
  • +
+
+
+

To address the above, devonfw microservices has an alternative approach Microservices based on Netflix-Tools.

+
+
+ +
+

==Caching +Caching is a technical approach to improve performance. While it may appear easy on the first sight it is an advanced topic. In general, try to use caching only when required for performance reasons. If you come to the point that you need caching first think about:

+
+
+
    +
  • +

    What to cache?
    +Be sure about what you want to cache. Is it static data? How often will it change? What will happen if the data changes but due to caching you might receive "old" values? Can this be tolerated? For how long? This is not a technical question but a business requirement.

    +
  • +
  • +

    Where to cache?
    +Will you cache data on client or server? Where exactly?

    +
  • +
  • +

    How to cache?
    +Is a local cache sufficient or do you need a shared cache?

    +
  • +
+
+
+
+
Local Cache
+ +
+
+
Shared Cache
+
+
Distributed Cache
+ +
+
+ +
+
Caching of Web-Resources
+ +
+ +
+

==Feature-Toggles

+
+
+

The most software developing teams use Feature-Branching to be able to work in parallel and maintain a stable main branch in the VCS. However Feature-Branching might not be the ideal tool in every case because of big merges and isolation between development groups. In many cases, Feature-Toggles can avoid some of these problems, so these should definitely be considered to be used in the collaborative software development.

+
+
+
+
Implementation with the devonfw
+
+

To use Feature-Toggles with the devonfw, use the Framework Togglz because it has all the features generally needed and provides a great documentation.

+
+
+

For a pretty minimal working example, also see this fork.

+
+
+
Preparation
+
+

The following example takes place in the oasp-sample-core project, so the necessary dependencies have to be added to the according pom.xml file. Required are the main Togglz project including Spring support, the Togglz console to graphically change the feature state and the Spring security package to handle authentication for the Togglz console.

+
+
+
+
<!-- Feature-Toggle-Framework togglz -->
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-boot-starter</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-console</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-security</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+
+
+

In addition to that, the following lines have to be included in the spring configuration file application.properties

+
+
+
+
##configuration for the togglz Feature-Toggle-Framework
+togglz.enabled=true
+togglz.console.secured=false
+
+
+
+
+
Small features
+
+

For small features, a simple query of the toggle state is often enough to achieve the desired functionality. To illustrate this, a simple example follows, which implements a toggle to limit the page size returned by the staffmanagement. See here for further details.

+
+
+

This is the current implementation to toggle the feature:

+
+
+
+
// Uncomment next line in order to limit the maximum page size for the staff member search
+// criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+
+
+
+

To realise this more elegantly with Togglz, first an enum is required to configure the feature-toggle.

+
+
+
+
public enum StaffmanagementFeatures implements Feature {
+  @Label("Limit the maximum page size for the staff members")
+  LIMIT_STAFF_PAGE_SIZE;
+
+  public boolean isActive() {
+    return FeatureContext.getFeatureManager().isActive(this);
+  }
+}
+
+
+
+

To familiarize the Spring framework with the enum, add the following entry to the application.properties file.

+
+
+
+
togglz.feature-enums=io.oasp.gastronomy.restaurant.staffmanagement.featuremanager.StaffmanagementFeatures
+
+
+
+

After that, the toggle can be used easily by calling the isActive() method of the enum.

+
+
+
+
if (StaffmanagementFeatures.LIMIT_STAFF_PAGE_SIZE.isActive()) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+}
+
+
+
+

This way, you can easily switch the feature on or off by using the administration console at http://localhost:8081/devon4j-sample-server/togglz-console. If you are getting redirected to the login page, just sign in with any valid user (eg. admin).

+
+
+
+
Extensive features
+
+

When implementing extensive features, you might want to consider using the strategy design pattern to maintain the overview of your software. The following example is an implementation of a feature which adds a 25% discount to all products managed by the offermanagement.

+
+
+
Therefore there are two strategies needed:
+
    +
  1. +

    Return the offers with the normal price

    +
  2. +
  3. +

    Return the offers with a 25% discount

    +
  4. +
+
+
+

The implementation is pretty straight forward so use this as a reference. Compare this for further details.

+
+
+
+
@Override
+@RolesAllowed(PermissionConstants.FIND_OFFER)
+public PaginatedListTo<OfferEto> findOfferEtos(OfferSearchCriteriaTo criteria) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+  PaginatedListTo<OfferEntity> offers = getOfferDao().findOffers(criteria);
+
+
+  if (OffermanagementFeatures.DISCOUNT.isActive()) {
+    return getOfferEtosDiscount(offers);
+  } else {
+    return getOfferEtosNormalPrice(offers);
+  }
+
+}
+
+
+// Strategy 1: Return the OfferEtos with the normal price
+private PaginatedListTo<OfferEto> getOfferEtosNormalPrice(PaginatedListTo<OfferEntity> offers) {
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+// Strategy 2: Return the OfferEtos with the new, discounted price
+private PaginatedListTo<OfferEto> getOfferEtosDiscount(PaginatedListTo<OfferEntity> offers) {
+  offers = addDiscountToOffers(offers);
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+private PaginatedListTo<OfferEntity> addDiscountToOffers(PaginatedListTo<OfferEntity> offers) {
+  for (OfferEntity oe : offers.getResult()) {
+    Double oldPrice = oe.getPrice().getValue().doubleValue();
+
+    // calculate the new price and round it to two decimal places
+    BigDecimal newPrice = new BigDecimal(oldPrice * 0.75);
+    newPrice = newPrice.setScale(2, RoundingMode.HALF_UP);
+
+    oe.setPrice(new Money(newPrice));
+  }
+
+  return offers;
+}
+
+
+
+
+
+
Guidelines for a successful use of feature-toggles
+
+

The use of feature-toggles requires a specified set of guidelines to maintain the overview on the software. The following is a collection of considerations and examples for conventions that are reasonable to use.

+
+
+
Minimize the number of toggles
+
+

When using too many toggles at the same time, it is hard to maintain a good overview of the system and things like finding bugs are getting much harder. Additionally, the management of toggles in the configuration interface gets more difficult due to the amount of toggles.

+
+
+

To prevent toggles from piling up during development, a toggle and the associated obsolete source code should be removed after the completion of the corresponding feature. In addition to that, the existing toggles should be revisited periodically to verify that these are still needed and therefore remove legacy toggles.

+
+
+
+
Consistent naming scheme
+
+

A consistent naming scheme is the key to a structured and easily maintainable set of features. This should include the naming of toggles in the source code and the appropriate naming of commit messages in the VCS. The following section contains an example for a useful naming scheme including a small example.

+
+
+

Every Feature-Toggle in the system has to get its own unique name without repeating any names of features, which were removed from the system. The chosen names should be descriptive names to simplify the association between toggles and their purpose. If the feature should be split into multiple sub-features, you might want to name the feature like the parent feature with a describing addition. If for example you want to split the DISCOUNT feature into the logic and the UI part, you might want to name the sub-features DISCOUNT_LOGIC and DISCOUNT_UI.

+
+
+

The entry in the togglz configuration enum should be named identically to the aforementioned feature name. The explicitness of feature names prevents a confusion between toggles due to using multiple enums.

+
+
+

Commit messages are very important for the use of feature-toggles and also should follow a predefined naming scheme. You might want to state the feature name at the beginning of the message, followed by the actual message, describing what the commit changes to the feature. An example commit message could look like the following:

+
+
+
+
DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

Mentioning the feature name in the commit message has the advantage, that you can search your git log for the feature name and get every commit belonging to the feature. An example for this using the tool grep could look like this.

+
+
+
+
$ git log | grep -C 4 DISCOUNT
+
+commit 034669a48208cb946cc6ba8a258bdab586929dd9
+Author: Florian Luediger <florian.luediger@somemail.com>
+Date:   Thu Jul 7 13:04:37 2016 +0100
+
+DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

To keep track of all the features in your software system, a platform like GitHub offers issues. When creating an issue for every feature, you can retrace, who created the feature and who is assigned to completing its development. When referencing the issue from commits, you also have links to all the relevant commits from the issue view.

+
+
+
+
Placement of toggle points
+
+

To maintain a clean codebase, you definitely want to avoid using the same toggle in different places in the software. There should be one single query of the toggle which should be able to toggle the whole functionality of the feature. If one single toggle point is not enough to switch the whole feature on or off, you might want to think about splitting the feature into multiple ones.

+
+
+
+
Use of fine-grained features
+
+

Bigger features in general should be split into multiple sub-features to maintain the overview on the codebase. These sub-features get their own feature-toggle and get implemented independently.

+
+
+ +
+

==Accessibility

+
+
+

TODO

+
+ + + +
+ +
+ + + + + +
+ + +devon4j-kafka has been abandoned. Its main feature was the implementation of a retry pattern using multiple topics. This implementation has become an integral part of Spring Kafka. We recommend to use Spring Kafkas own implemenation for retries. +
+
+
+

==Messaging Services

+
+
+

Messaging Services provide an asynchronous communication mechanism between applications. Technically this is implemented using Apache Kafka .

+
+
+

For spring, devonfw uses Spring-Kafka as kafka framework. +For more details, check the devon4j-kafka.

+
+ +
+ +
+

==Messaging

+
+
+

Messaging in Java is done using the JMS standard from JEE.

+
+
+
+
+
Products
+
+

For messaging you need to choose a JMS provider such as:

+
+
+ +
+
+
+
Receiver
+
+

As a receiver of messages is receiving data from other systems it is located in the service-layer.

+
+
+
JMS Listener
+
+

A JmsListener is a class listening and consuming JMS messages. It should carry the suffix JmsListener and implement the MessageListener interface or have its listener method annotated with @JmsListener. This is illustrated by the following example:

+
+
+
+
@Named
+@Transactional
+public class BookingJmsListener /* implements MessageListener */ {
+
+  @Inject
+  private Bookingmanagement bookingmanagement;
+
+  @Inject
+  private MessageConverter messageConverter;
+
+  @JmsListener(destination = "BOOKING_QUEUE", containerFactory = "jmsListenerContainerFactory")
+  public void onMessage(Message message) {
+    try {
+      BookingTo bookingTo = (BookingTo) this.messageConverter.fromMessage(message);
+      this.bookingmanagement.importBooking(bookingTo);
+    } catch (MessageConversionException | JMSException e) {
+      throw new InvalidMessageException(message);
+    }
+  }
+}
+
+
+
+
+
+
Sender
+
+

The sending of JMS messages is considered as any other sending of data like kafka messages or RPC calls via REST using service-client, gRPC, etc. +This will typically happen directly from a use-case in the logic-layer. +However, the technical complexity of the communication and protocols itself shall be hidden from the use-case and not be part of the logic layer. +With spring we can simply use JmsTemplate to do that.

+
+
+ +
+

==Full Text Search

+
+
+

If you want to all your users fast and simple searches with just a single search field (like in google), you need full text indexing and search support.

+
+
+
+
Solutions
+
+ +
+
+

Maybe you also want to use native features of your database

+
+ +
+
+
Best Practices
+
+

TODO

+
+
+
+
+

1.78. Tutorials

+ +
+

==Creating a new application

+
+
+
Running the archetype
+
+

In order to create a new application you must use the archetype provided by devon4j which uses the maven archetype functionality.

+
+
+

To create a new application, you should have installed devonfw IDE. Follow the devon ide documentation to install +the same. +You can choose between 2 alternatives, create it from command line or, in more visual manner, within eclipse.

+
+
+
From command Line
+
+

To create a new devon4j application from command line, you can simply run the following command:

+
+
+
+
devon java create com.example.application.sampleapp
+
+
+
+

For low-level creation you can also manually call this command:

+
+
+
+
mvn -DarchetypeVersion=${devon4j.version} -DarchetypeGroupId=com.devonfw.java.templates -DarchetypeArtifactId=devon4j-template-server archetype:generate -DgroupId=com.example.application -DartifactId=sampleapp -Dversion=1.0.0-SNAPSHOT -Dpackage=com.devonfw.application.sampleapp
+
+
+
+

Attention: The archetypeVersion (first argument) should be set to the latest version of devon4j. You can easily determine the version from this badge: +latest devon4j version

+
+
+

Further providing additional properties (using -D parameter) you can customize the generated app:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 17. Options for app template
propertycommentexample

dbType

Choose the type of RDBMS to use (hana, oracle, mssql, postgresql, mariadb, mysql, etc.)

-DdbTpye=postgresql

batch

Option to add an batch module

-Dbatch=batch

+
+
+
From Eclipse
+
+
+
After that, you should follow this Eclipse steps to create your application:
+
+
+
+
    +
  • +

    Create a new Maven Project.

    +
  • +
  • +

    Choose the devon4j-template-server archetype, just like the image.

    +
  • +
+
+
+
+Select archetype +
+
+
+
    +
  • +

    Fill the Group Id, Artifact Id, Version and Package for your project.

    +
  • +
+
+
+
+Configure archetype +
+
+
+
    +
  • +

    Finish the Eclipse assistant and you are ready to start your project.

    +
  • +
+
+
+
+
+
What is generated
+
+

The application template (archetype) generates a Maven multi-module project. It has the following modules:

+
+
+
    +
  • +

    api: module with the API (REST service interfaces, transferobjects, datatypes, etc.) to be imported by other apps as a maven dependency in order to invoke and consume the offered (micro)services.

    +
  • +
  • +

    core: maven module containing the core of the application.

    +
  • +
  • +

    batch: optional module for batch(es)

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) as a WAR file.

    +
  • +
+
+
+

The toplevel pom.xml of the generated project has the following features:

+
+
+
    +
  • +

    Properties definition: Spring-boot version, Java version, etc.

    +
  • +
  • +

    Modules definition for the modules (described above)

    +
  • +
  • +

    Dependency management: define versions for dependencies of the technology stack that are recommended and work together in a compatible way.

    +
  • +
  • +

    Maven plugins with desired versions and configuration

    +
  • +
  • +

    Profiles for test stages

    +
  • +
+
+
+
+
How to run your app
+
+
Run app from IDE
+
+

To run your application from your favourite IDE, simply launch SpringBootApp as java application.

+
+
+
+
Run app as bootified jar or war
+
+

More details are available here.

+
+ +
+

==Quarkus

+
+
+

Quarkus is a Java framework for building cloud-native apps. +It is fully supported by devonfw as an option and alternative to spring. +Additional things like extensions will be available on the devon4quarkus GitHub repository.

+
+
+
+
+
+

1.79. Guide to the Reader

+
+

Depending on your intention of reading this document, you might be more interested in the following chapters:

+
+
+
    +
  • +

    If you are completely new to Quarkus, you may be interested in the pros and cons of Quarkus. Also, take a look at the official Quarkus website. You might also be interested in the features that GraalVM offers.

    +
  • +
  • +

    If you are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring, and coding conventions. Follow the referenced links to explore a topic in more depth.

    +
  • +
  • +

    If you are an experienced Spring developer and want to get in touch with Quarkus, read our Getting started with Quarkus for Spring developers guide.

    +
  • +
  • +

    If you’re looking to build your first Quarkus application, the Quarkus website offers some good getting started guides. Also, check out our Quarkus template guide, which gives you some recommendations on extensions and frameworks to use. It also provides some links to the Quarkus code generator with preselected configurations you can use to create your application.

    +
  • +
  • +

    If you want to have a Quarkus sample application using devon4j recommendations, check out our Quarkus reference application.

    +
  • +
  • +

    If you have a Spring application and want to migrate it to Quarkus, take a look at our migration guide.

    +
  • +
  • +

    If you already have some experience with devon4j and Quarkus and need more information on a specific topic, check out our Quarkus guides. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Quarkus and Spring are documented there.

    +
  • +
  • +

    If you want to learn how to build native images, check out this guide.

    +
  • +
+
+
+
+

1.80. Pros

+
+

Quarkus offers the following benefits:

+
+
+
    +
  • +

    fast turn-around cycles for developers
    +Save changes in your Java code and immediately test the results without restarting or waiting

    +
  • +
  • +

    faster start-up and less memory footprint
    +When building your app as native-images via GraalVM, it gets highly optimized. As a result, it starts up lightning fast and consumes much less memory. This is a great advantage for cloud deployment as well as for sustainability. You can find a performance comparison between Spring and Quarkus here.

    +
  • +
  • +

    clean and lean +As quarkus was born as a cloud-native framework, it is very light-weight and does not carry much history and legacy.

    +
  • +
+
+
+
+

1.81. Cons

+
+

Quarkus has the following drawbacks:

+
+
+
    +
  • +

    less flexible
    +Quarkus is less flexible compared to spring, or in other words, it is more biased and coupled to specific implementations. However, the implementations work and you have less things to choose and worry about. However, in case you want to integrate a specific or custom library, you may hit limitations or lose support for native-images, especially when that library is based on reflection. Therefore, check your requirements and technology stack early on when making your choice.

    +
  • +
  • +

    less established
    +Since quarkus was born in 2019, it is modern but also less established. It will be easier to get developers for spring, but we already consider quarkus mature and established enough for building production-ready apps.

    +
  • +
+
+
+

==Quarkus Quickstart

+
+
+

This guide serves as a quickstart on how to create a Quarkus app, briefly presenting the key functionalities that Quarkus provides, both for beginners and experienced developers.

+
+
+

1.81.1. Introduction to Quarkus

+
+

To get a first introduction to Quarkus, you can read the Quarkus introduction guide. To get a brief overview of where you can find the important Quarkus related guides, follow the chapter guide to the reader. +Also, see the comparison of the advantages and disadvantages of a Quarkus application compared to the alternative framework Spring. +This comparison will be supported by our performance comparison between Spring and Quarkus, which demonstrates the lower resource consumption and startup time of Quarkus applications.

+
+
+
+

1.81.2. Installation of Tools and Dependencies

+
+

First, we need to install some dependencies and tools before we can start programming. Our tool devonfw-ide comes with many development tools for you. +We need to install the following tools for this guide:

+
+
+
    +
  • +

    Maven

    +
  • +
  • +

    Java

    +
  • +
  • +

    any IDE (devonfw-ide supports Eclipse, Intellij and VScode)

    +
  • +
  • +

    Docker

    +
  • +
+
+
+

We recommend installing the devonfw-ide with the tools, but if you already have your system configured and the tools above installed, you can skip to Bootstrap a Quarkus Project, otherwise we will show you how to set up and update your devonfw-ide.

+
+
+
devonfw-ide
+
    +
  1. +

    Install devonfw-ide
    +Follow the Setup to install the devonfw-ide with Java, Maven, Eclipse and VScode.

    +
    +
      +
    1. +

      Command to install Docker
      +devon docker setup

      +
    2. +
    +
    +
  2. +
  3. +

    Update devonfw-ide
    +As we are still working on improving devonfw-ide, we recommend to update your already installed devonfw-ide and tools in order to include essential features for cloud development with Quarkus that you could be missing.

    +
  4. +
+
+
+

Use the commands devon ide update, devon ide update software, and devon ide scripts to update devonfw-ide and all installed software.

+
+
+

Go to the main folder under workspaces of the devonfw-ide installation. +We will create the project there.

+
+
+
+

1.81.3. Bootstrap a Quarkus Project

+
+

Quarkus provides multiple ways to bootstrap a project. +The option to bootstrap a project via the command-line is shown in the Quarkus getting started guide Bootstrap the project. +Quarkus also provides a project builder where you can select some extensions, the build tool for your project, and if you want, some starter code. +This will deliver a project skeleton with the configured project dependencies and also contributes the information to compile the application natively. To get some recommendations on starter templates, follow the guide on: template recommendations.

+
+
+ + + + + +
+ + +
+

By creating a Quarkus project from the command-line or with the project builder, you get a different project structure and have to adapt it to the devon4j conventions shown in the next Chapter.

+
+
+
+
+
Project Structure
+
+

We provide a recommendation and guideline for a modern project structure to help organize your project into logically related modules. +In order to comply with the requirements of modern cloud development and microservice architectures, follow the guide and apply the modern project structure to your project. You can also find similar modules in our example projects.

+
+
+
+
+

1.81.4. Introduction to Quarkus Functionality

+
+

Before we start programming, you should first have a look at the functionality of Quarkus.

+
+
+
Quarkus functionality guides
+
    +
  1. +

    Getting started guide from Quarkus
    +This guide presents a good overview of the functionality of Quarkus. The simple Greeting Service gives a brief introduction into concepts like CDI, testing, dev mode, packaging, and running the app.

    +
  2. +
  3. +

    From Spring to Quarkus
    +For experienced Spring developers that have already followed devon4j guidelines, you can read our guide to getting started with Quarkus for Spring developer, as it goes more into the differences that can give you a more detailed comparison to Spring.

    +
    +
      +
    1. +

      Migrate a Spring app to Quarkus
      +This guide shows how to migrate a Spring application to a Quarkus application with devon4j conventions.

      +
    2. +
    +
    +
  4. +
+
+
+
+

1.81.5. Create a REST service

+
+

Now let’s create our first REST CRUD service with Quarkus. +We give you the options of using a guide to start to code the service yourself or to just download a service that’s ready to use.

+
+
+
Options
+
    +
  1. +

    Create the service yourself
    +There is a good Quarkus guide for a simple JSON REST service that will guide you through your first application and help you with implement the definition of endpoints with JAX-RS and an Entity that will be managed by the service, and also how to configure the JSON support.

    +
  2. +
  3. +

    Use an existing Quarkus project
    +You don’t want to code a service and just want to test some Quarkus functionalities? Just load a Quarkus sample project provided for every existing quickstart guide and the supported framework. +Our Team also provides some Quarkus applications that are working and can be loaded and tested.

    +
    +
      +
    • +

      reference project is a service that manages products. It contains the devon4j modern project structure, pagination, queries, a Postgres database, SwaggerUI, and support for Kubernetes deploy. To add OpenTelemetry support, see the following guide. +This project will be steadily improved and is used to showcase the abilities of Quarkus with devon4j.

      +
    • +
    • +

      minimal Quarkus project is just the Quarkus project from a getting started with Quarkus guide with a Greeting Services modified with the correct modern structure mentioned in the chapter Project Structure

      +
    • +
    +
    +
  4. +
+
+
+
+

1.81.6. OpenAPI generation

+
+

We provide a guide with a short introduction to the OpenAPI specification with two plugins that are important in a Quarkus Context.

+
+ +
+

A more detailed usage guide to the Smallrye Plugin is provided by Quarkus OpenAPI and Swagger guide.

+
+
+
+

1.81.7. How to Integrate a Database

+
+

The next step for our REST service would be to integrate a database to store the objects of the entity.

+
+
+

With Quarkus, adding a database can be easy, because Quarkus can take over the build-up and connection process. +First, you should understand our guides on the concepts of working with data. Then, we will show how to integrate a database with Quarkus.

+
+
+
Data Principles Guides
+
    +
  1. +

    General devon4j JPA guide
    +To get an insight into the general JPA usage, read the JPA guide containing a general explanation of the Java Persistence API.

    +
  2. +
  3. +

    Difference to SpringData
    +If you have already worked with SpringData, this is also partially supported with Quarkus. This is explained in more detail in this SpringData Guide.

    +
  4. +
+
+
+
Database Integration
+
    +
  1. +

    Quarkus zero config dev mode
    +Starting with the database implementation in Quarkus, we recommend for beginners to use the DEV mode Zero Config Setup (Dev Services). This is especially great for testing the code without a database set up. +Quarkus does all the work for you and configures a database and creates the database and tables (schemas) for you.

    +
    +
      +
    1. +

      Configuration Properties
      +A list of all database configuration properties for the Dev services

      +
    2. +
    +
    +
  2. +
  3. +

    Integrate a simple Hibernate ORM database
    +The zero config setup only works with the Dev mode, it’s comfortable in the first phases of the creation of your service but if the goal is to also get a deployable version, you have to create your own database and integrate it. +This Quarkus guide shows, how to integrate a Hibernate ORM database with an example service.

    +
    +
      +
    1. +

      Configuration list for JDBC
      +A list of all configuration that is possible with a JDBC configuration properties

      +
    2. +
    +
    +
  4. +
  5. +

    Reactive CRUD application with Panache
    +Quarkus unifies reactive and imperative programming. +Reactive is an architectural principle to build robust, efficient, and concurrent applications. +For an introduction into reactive and how Quarkus enables it, follow this Quarkus reactive architecture article and also the reactive quickstart. +To get started with reactive and implement reactive methods, you can follow the Quarkus reactive guide. +The reactive guide uses the Quarkus based implementation of a Hibernate ORM called Panache. +The implementation is not our first choice with devon4j and therefore not part of our recommendations, but to understand the reactive guide you can read the Hibernate ORM with Panache guide first to prevent possible problems following the guide.

    +
  6. +
+
+
+ + + + + +
+ + +
+

You need an installed Docker version for the zero config setup.

+
+
+
+
+
Database Migration
+

For schema-based databases, we recommend migrating databases with Flyway. +In that case, our general migration guide can give you an overview if you are not familiar with migration. +.. Flyway guide for Quarkus +This Quarkus guide will show how to work with the Flyway extension in a Quarkus application. +This should be used if you start your own database and do not leave the creation to quarkus.

+
+
+
+

1.81.8. Testing a Quarkus Application

+
+

After we have built the service, we have to verify it with some tests. +We will give you some guidelines to implement some test cases.

+
+
+
Testing Guides
+
    +
  1. +

    General testing guide
    +For users that aren’t familiar with the devon4j testing principles, we created a general best practices and recommendations guide for testing.

    +
    +
      +
    1. +

      Our guide for testing with Quarkus +In addition, we also provide a guide that specifically addresses the testing of a Quarkus application.

      +
    2. +
    +
    +
  2. +
+
+
+

Most of the Quarkus applications are already equipped with a basic test and our reference project provides some further test cases. If you want to improve and extend the tests, you can also follow the large Quarkus guide for testing.

+
+
+
+

1.81.9. Packaging of a Quarkus application and creation of a native executable

+
+

Quarkus applications can be packaged into different file types. The following link will show you how to build them and give you a short explanation of the characteristics of these files.

+
+
+
Package types
+
    +
  1. +

    fast-jar

    +
  2. +
  3. +

    mutable-jar

    +
  4. +
  5. +

    uber-jar

    +
  6. +
  7. +

    native executable

    +
  8. +
+
+
+

To package an application, use the command mvn package and Quarkus will generate the output in the /target folder. For the native executables, the command needs more parameters, which is explained in the link above.

+
+
+

Configure the Output with these configuration properties

+
+
+
+

1.81.10. Create and build a Docker Image

+
+

Quarkus supports Jib, S2I and Docker for building images. We focus on building a Quarkus App with Docker. +You get a generated Dockerfile from Quarkus in the src/main/docker folder of any project generated from Quarkus. There are multiple Dockerfiles.

+
+
+
Dockerfiles
+
    +
  1. +

    Dockerfile.jvm
    +Dockerfile for Quarkus application in the JVM mode. running in Red Hat Universal Base Image 8 Minimal Container

    +
  2. +
  3. +

    Dockerfile.legacy-jar
    +DockerFile for Quarkus application in JVM mode with the legacy jar running in Red Hat Universal Base Image 8 Minimal Container.

    +
  4. +
  5. +

    Dockerfile.native
    +Dockerfile using the native executable running in Red Hat Universal Base Image 8 Minimal container.

    +
  6. +
  7. +

    Dockerfile.native-distroless +The native file will run in a Distroless container. Distroless images are very small containers with just the application and runtime dependencies and without the other programs that come with a Linux distribution.

    +
  8. +
+
+
+
+
+

For more information to the different executables go back to the chapter Packaging of a Quarkus application and creation of a native executable

+
+
+
+
+

To simply build and run a Docker image, you can follow the instructions Quarkus provides for every Dockerfile in the comments block.

+
+
+

Docker commands example for the JVM Dockerfile from our reference project

+
+
+
+
####
+##This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
+#
+##Before building the container image run:
+#
+##./mvnw package
+#
+##Then, build the image with:
+#
+##docker build -f src/main/docker/Dockerfile.jvm -t quarkus/quarkus-basics-jvm .
+#
+##Then run the container using:
+#
+##docker run -i --rm -p 8080:8080 quarkus/quarkus-basics-jvm
+#
+##If you want to include the debug port into your docker image
+##you will have to expose the debug port (default 5005) like this :  EXPOSE 8080 5050
+#
+##Then run the container using :
+#
+##docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/quarkus-basics-jvm
+#
+###
+
+
+
+

Quarkus is also able to build the image while packaging the application, so you don’t have to execute the command from above. +To perform Docker builds with the generated Dockerfiles from above, you need to add the following extension to your project with the command mvn quarkus:add-extension -Dextensions="container-image-docker".

+
+
+

You also have to set the quarkus.container-image.build=true. You can add this to your application.properties or just append it to the packaging command like this: ./mvn package -Dquarkus.container-image.build=true.

+
+
+

If your needs exceed the instructions given by the file, we recommend to follow the Docker getting started guide to get familiar with Docker and customize the Dockerfiles according to your needs. +To specify your container build, you can use the general container image configurations properties and the Docker image configurations properties when building and runnig Docker images.

+
+ +
+

==Migrate from Spring to Quarkus

+
+
+

This guide will cover the migration process of a Spring application to a Quarkus application. There are already articles about migrating from Spring to Quarkus (e.g. https://developers.redhat.com/blog/2020/04/10/migrating-a-spring-boot-microservices-application-to-quarkus, https://dzone.com/articles/migrating-a-spring-boot-application-to-quarkus-cha). +This guide will focus more on the devon4j specific aspects. We assume that a working Spring application exists, built in the classic devon4j specific way (e.g. Jump The Queue or My Thai Star).

+
+
+
+

1.81.11. Create the Quarkus application

+
+

We start with an empty Quarkus project. You can create the project with Maven on the command line or use the online generator. The advantage of the online generator is that you have a pre-selection of dependencies to use in your project. +For starters, let’s select the basic dependencies required to develop a REST service with database connectivity (you can use one of the links in the Quarkus template guide): RESTEasy JAX-RS, RESTEasy Jackson, Hibernate ORM, Spring Data JPA API, JDBC Driver (choose the type of database you need), Flyway (if you have database migration schemas), SmallRye Health (optional for Health Monitoring)

+
+
+

The list does not include all required dependencies. We will add more dependencies to the project later. For now, generate the application with these dependencies.

+
+
+
Migration Toolkit from Red Hat
+
+

Red Hat provides a migration toolkit (MTA, Migration Toolkit for Applications), that supports migration of a Spring to a Quarkus application. There are several versions of this toolkit (e.g., a web console, a Maven plugin, or an IDE plugin). +The MTA analyzes your existing application and generates a report with hints and instructions for migrating from Spring to Quarkus. For example, it gives you an indication of which dependencies are not supported in your project for a Quarkus application and which dependencies you need to swap them with. The analysis is rule-based, and you can also add your own rules that will be checked during analysis.

+
+
+
+
+

1.81.12. Entities

+
+

There is nothing special to consider when creating the entities. In most cases, you can simply take the code from your Spring application and use it for your Quarkus application. Usually, the entities extend a superclass ApplicationPersistenceEntity containing, for example, the id property. You can also take this class from your Spring application and reuse it.

+
+
+
+

1.81.13. Transfer objects

+
+

The next step is to create the appropriate transfer objects for the entities. In a devon4j Spring application, we would use CobiGen to create these classes. Since CobiGen is not usable for this purpose in Quarkus applications yet, we have to create the classes manually.

+
+
+

First, we create some abstract base classes for the search criteria and DTO classes. Normally, these would also be created by CobiGen.

+
+
+
Listing 19. AbstractSearchCriteriaTo
+
+
public abstract class AbstractSearchCriteriaTo extends AbstractTo {
+
+  private static final long serialVersionUID = 1L;
+
+  private Pageable pageable;
+
+  //getter + setter for pageable
+}
+
+
+
+
Listing 20. AbstractDto
+
+
public abstract class AbstractDto extends AbstractTo {
+
+  private static final long serialVersionUID = 1L;
+
+  private Long id;
+
+  private int modificationCounter;
+
+  public AbstractDto() {
+
+    super();
+  }
+
+  //getter + setter
+
+  @Override
+  protected void toString(StringBuilder buffer) {
+    ...
+  }
+}
+
+
+
+

The class AbstractTo, extended by other classes, would be provided by the devon4j-basic module in a devon4j Spring application. You can take the code from here and reuse it in your Quarkus project.

+
+
+

Now you can create your transfer objects. Most of the code of the transfer objects of your Spring application should be reusable. For Quarkus, we recommend (as mentioned here) to use *Dto instead of *Eto classes. Be sure to change the names of the classes accordingly.

+
+
+
+

1.81.14. Data Access Layer

+
+

In devon4j, we propose to use Spring Data JPA to build the data access layer using repositories and Querydsl to build dynamic queries. We will also use this approach for Quarkus applications, but we need to change the implementation because the devon4j modules are based on reflection, which is not suitable for Quarkus. +In Quarkus we will use Querydsl using code generation. So for this layer, more changes are required and we can’t just take the existing code.

+
+
+

First, create a repository interface for your entity class that extends JpaRepository (see here).

+
+
+

To add QueryDSL support to your project, add the following dependencies to your pom.xml file:

+
+
+
Listing 21. pom.xml
+
+
<dependency>
+  <groupId>com.querydsl</groupId>
+  <artifactId>querydsl-jpa</artifactId>
+  <version>4.3.1</version>
+</dependency>
+<dependency>
+  <groupId>com.querydsl</groupId>
+  <artifactId>querydsl-apt</artifactId>
+  <scope>provided</scope>
+  <version>4.3.1</version>
+</dependency>
+
+
+
+

As mentioned above, we will use QueryDSL with code generation. For this, add the QueryDSL annotation processor to your plugins:

+
+
+
Listing 22. pom.xml
+
+
<plugins>
+...
+  <plugin>
+    <groupId>com.mysema.maven</groupId>
+    <artifactId>apt-maven-plugin</artifactId>
+    <version>1.1.3</version>
+    <executions>
+      <execution>
+        <phase>generate-sources</phase>
+        <goals>
+          <goal>process</goal>
+        </goals>
+        <configuration>
+          <outputDirectory>target/generated-sources/annotations</outputDirectory>
+          <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+        </configuration>
+      </execution>
+    </executions>
+  </plugin>
+</plugins>
+
+
+
+

To implement the queries, follow the corresponding guide.

+
+
+

Set the following properties in the application.properties file to configure the connection to your database (see also here):

+
+
+
+
quarkus.datasource.db-kind=...
+quarkus.datasource.jdbc.url=...
+quarkus.datasource.username=...
+quarkus.datasource.password=...
+
+
+
+
+

1.81.15. Logic Layer

+
+

For the logic layer, devon4j uses a use-case approach. You can reuse the use case interfaces from the api module of the Spring application. Again, make sure to rename the transfer objects.

+
+
+

Create the appropriate class that implements the interface. Follow the implementation section of the use-case guide to implement the methods. For mapping the entities to the corresponding transfer objects, see the next section.

+
+
+
+

1.81.16. Mapping

+
+

For bean mapping, we need to use a completely different approach in the Quarkus application than in the Spring application. For Quarkus, we use MapStruct, which creates the mapper at build time rather than at runtime using reflection. Add the following dependencies to your pom.xml.

+
+
+
Listing 23. pom.xml
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct-processor</artifactId>
+  <version>1.4.2.Final</version>
+</dependency>
+<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+</dependency>
+
+
+
+

Then you can create the mapper as follows:

+
+
+
Listing 24. Mapper
+
+
@Mapper(componentModel = "cdi")
+public interface YourEntityMapper {
+  YourEntityDto map(YourEntity entity);
+
+  YourEntity map(YourEntityDto dto);
+
+  ...
+}
+
+
+
+

Inject the mapper into your use-case implementation and simply use the methods. The method implementations of the mapper are created when the application is built.

+
+
+
+

1.81.17. Service Layer

+
+

For the implementation of the service layer, we use the JAX-RS for both Quarkus and Spring applications to create the REST services. Classic devon4j Spring applications rely on Apache CFX as the implemention of JAX-RS. +For Quarkus, we use RESTEasy. Since both are implementations of JAX-RS, much of the Spring application code can be reused.

+
+
+

Take the definition of the REST endpoints from the api module of the Spring application (make sure to rename the transfer objects), inject the use-cases from the logic layer and use them in the REST service methods as follows:

+
+
+
Listing 25. REST service
+
+
@Path("/path/v1")
+public class YourComponentRestService {
+
+  @Inject
+  UcFindYourEntity ucFindYourEntity;
+
+  @Inject
+  UcManageYourEntity ucManageYourEntity;
+
+  @GET
+  @Path("/yourEntity/{id}/")
+  public YourEntityDto getYourEntity(@PathParam("id") long id);
+
+    return this.ucFindYourEntity.findYourEntity(id);
+  }
+
+  ...
+}
+
+
+
+
+

1.81.18. Summary

+
+

As you have seen, some parts hardly differ when migrating a Spring application to a Quarkus application, while other parts differ more. The above sections describe the parts needed for simple applications that provide REST services with a data access layer. +If you add more functionality, more customization and other frameworks, dependencies may be required. If that is the case, take a look at the corresponding guide on the topic in the devon4j documentation or check if there is a tutorial on the official Quarkus website.

+
+
+

Furthermore, we can summarize that migrating from a Spring application to a Quarkus representative is not complex. Although Quarkus is a very young framework (release 1.0 was in 2019), it brings a lot of proven standards and libraries that you can integrate into your application. +This makes it easy to migrate and reuse code from existing (Spring) applications. Also, Quarkus comes with Spring API compatibility for many Spring modules (Spring Data JPA, Spring DI, etc.), which makes it easier for developers to reuse their knowledge.

+
+ +
+

==Spring Native vs Quarkus

+
+
+

Nowadays, it is very common to write an application and deploy it to a cloud. +Serverless computing and Function-as-a-Service (FaaS) have become +very popular. +While many challenges arise when deploying a Java application into the latest cloud environment, the biggest challenges facing developers are memory footprint and the startup time required +for the Java application, as more of these keeps the host’s costs high in public clouds and Kubernetes clusters. With the introduction of frameworks like micronaut and microprofile, Java processes are getting faster and more lightweight. In a similar context, Spring has introduced +Spring Native which aims to solve the big memory footprint of Spring and its slow startup time to potentially rival the new framework called Quarkus, by Red Hat. This document briefly discusses both of these two frameworks and their potential suitability with devonfw.

+
+
+
+

1.81.19. Quarkus

+
+

Quarkus is a full-stack, Kubernetes-native Java framework made for JVMs. With its container-first-philosophy and its native compilation with GraalVM, Quarkus optimizes Java for containers with low memory usage and fast startup times.

+
+
+

Quarkus achieves this in the following ways:

+
+
+
    +
  • +

    First Class Support for GraalVM

    +
  • +
  • +

    Build Time Metadata Processing: As much processing as possible is +done at build time, so your application will only contain the classes +that are actually needed at runtime. This results in less memory usage, +and also faster startup time, as all metadata processing has already been +done.

    +
  • +
  • +

    Reduction in Reflection Usage: Quarkus tries to avoid reflection as much as possible in order to reduce startup time and memory usage.

    +
  • +
  • +

    Native Image Pre Boot: When running in a native image, Quarkus +pre-boots as much of the framework as possible during the native image +build process. This means that the resulting native image has already +run most of the startup code and serialized the result into the +executable, resulting in an even faster startup-time.

    +
  • +
+
+
+

This gives Quarkus the potential for a great platform for serverless cloud and Kubernetes environments. For more information about Quarkus and its support for devonfw please refer to the Quarkus introduction guide.

+
+
+
+

1.81.20. Spring Native

+
+
+
+

The current version of Spring Native 0.10.5 is designed to be used with Spring Boot 2.5.6

+
+
+
+
+

Like Quarkus, Spring Native provides support for compiling Spring applications to native executables using the GraalVM native-image compiler deisgned to be packaged in lightweight containers.

+
+
+

Spring Native is composed of the following modules:

+
+
+
    +
  • +

    spring-native: runtime dependency required for running Spring Native, provides also Native hints API.

    +
  • +
  • +

    spring-native-configuration: configuration hints for Spring classes used by Spring AOT plugins, including various Spring Boot auto-configurations.

    +
  • +
  • +

    spring-native-docs: reference guide, in adoc format.

    +
  • +
  • +

    spring-native-tools: tools used for reviewing image building configuration and output.

    +
  • +
  • +

    spring-aot: AOT transformation infrastructure common to Maven and Gradle plugins.

    +
  • +
  • +

    spring-aot-test: Test-specific AOT transformation infrastructure.

    +
  • +
  • +

    spring-aot-gradle-plugin: Gradle plugin that invokes AOT transformations.

    +
  • +
  • +

    spring-aot-maven-plugin: Maven plugin that invokes AOT transformations.

    +
  • +
  • +

    samples: contains various samples that demonstrate features usage and are used as integration tests.

    +
  • +
+
+
+
+

1.81.21. Native compilation with GraalVM

+
+

Quarkus and Spring Native both use GraalVM for native compilation. Using a native image provides some key advantages, such as instant startup, instant peak performance, and reduced memory consumption. However, there are also some drawbacks: Creating a native image is a heavy process that is slower than a regular application. A native image also has fewer runtime optimizations after its warmup. Furthermore, it is less mature than the JVM and comes with some different behaviors.

+
+
+

Key characteristics:

+
+
+
    +
  • +

    Static analysis of the application from the main entry point is +performed at build time.

    +
  • +
  • +

    Unused parts are removed at build time.

    +
  • +
  • +

    Configuration required for reflection, resources, and dynamic proxies.

    +
  • +
  • +

    Classpath is fixed at build time.

    +
  • +
  • +

    No class lazy loading: everything shipped in the executables will be loaded in memory on startup.

    +
  • +
  • +

    Some code will run at build time.

    +
  • +
+
+
+

There are limitations around some aspects of Java applications that are not fully supported

+
+
+
+

1.81.22. Build time and start time for apps

+ +++++ + + + + + + + + + + + + + + + + + + + +
Frameworkbuild timestart time

Spring Native

19.615s

2.913s

Quarkus Native executable

52.818s

0.802s

+
+
+

1.81.23. Memory footprints

+ ++++ + + + + + + + + + + + + + + + + +
Frameworkmemory footprint

Spring Native

109 MB

Quarkus Native executable

75 MB

+
+
+

1.81.24. Considering devonfw best practices

+
+

As of now, devonfw actively supports Spring but not Spring Native. +Although Quarkus has been released to a stable release in early 2021, it has been already used in multiple big projects successfully showing its potential to implement cloud native services with low resource consumption matching the needs of scalability and resilience in cloud native environments. +With major stakeholders behind the open source community like Red Hat, its development and growth from its kickoff to the current state is very impressive and really shows the market needs and focus. +Another big advantage of Quarkus is that it started on a green field and therefore did not need to circumvent main pillars of the spring framework like reflection, being able to take clean and up-to-date design decisions not needing to cope with legacy issues. +Nonetheless, there is a experimental support also for some spring libraries already available in Quarkus, which make switching from spring to Quarkus much more easier if needed. +We also provide a guide +for Spring developers who want to adopt or try Quarkus for their +(next) projects as it really has some gamechanging advantages over +Spring.

+
+
+
+

1.81.25. General recommendations and conclusion

+
+

Quarkus and Spring Native both have their own use cases. Under the consideration of the limitations of GraalVM to be used for native images built by Quarkus and Spring Native, there is a strong recommendation towards Quarkus from devonfw. +One essential differentiation has to be made on the decision for native or against native applications - the foreseen performance optimization of the JIT compiler of the JVM, which is not available anymore in a native image deployment. +For sure, both component frameworks will also run on a JVM getting advantage again from JIT compilation, but depending on the overall landscape then, it is recommended to stay with the knowledge of the available teams, e.g. continue making use of devon4j based on spring or even if already in that state also here make use of Quarkus on JVM.

+
+ +
+

==Modern project structure

+
+
+

With trends such as cloud, microservices, lean, and agile, we decided for a more modern project structure that fits better to recent market trends. +When starting new projects with devonfw, and especially in the context of cloud-native development, we strongly recommend this modern approach over the classic structure.

+
+
+
+

1.81.26. Modules

+
+

Due to trends such as microservices, we are building smaller apps compared to moduliths. +For simplicity, we therefore do not split our app into different modules and keep everything top-level and easy.

+
+
+

In addition to java and resources, we also add helm for helm templates and docker for docker scripts (e.g. Dockerfile) in src/main:

+
+
+
+
├──/src
+|  ├──/main
+|  |  ├──/docker
+|  |  ├──/helm
+|  |  ├──/java
+|  |  └──/resources
+|  └──/test
+|     ├──/java
+|     └──/resources
+└──/pom.xml
+
+
+
+
+

1.81.27. Deployment

+
+

For modern projects, we strongly recommend that your build process generates the final deliverable as an OCI compliant container. +Further, to go fully cloud-native, you should build your app as a native image via GraalVM AOT compiler. +Therefore, we recommed to use quarkus as your main framework. +In case you want to go with spring, you may consider using spring-native.

+
+
+
+

1.81.28. Layers

+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +For the modern project structure, the layers are defined by the following table:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Layer«layer»Description

service

service

The service layer exposing functionality via its remote API. Typical protocol is REST. May also be any other protocol you are using such as gRPC.

domain

domain

The domain with the data-model and DB access. Use sub-package (in «detail») repository for repository and dao for DAOs. Also we recommend to put entities in model sub-package.

logic

logic

The logic layer with the functionallity providing the business value.

common

common

cross-cutting code not assigned to a technical layer.

+
+
+

1.81.29. Architecture Mapping

+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.domain
+|  |  ├──.repo
+|  |  |  ├──.«BusinessObject»Repository
+|  |  |  ├──.«BusinessObject»Fragment
+|  |  |  └──.«BusinessObject»FragmentImpl
+|  |  ├──.dao [alternative to repo]
+|  |  |  ├──.«BusinessObject»Dao
+|  |  |  └──.«BusinessObject»DaoImpl
+|  |  └──.model
+|  |     └──.«BusinessObject»Entity
+|  ├──.logic
+|  |  ├──«BusinessObject»Validator
+|  |  └──«BusinessObject»EventsEmitter
+|   |  └──.Uc«Operation»«BusinessObject»[Impl]
+|  └──.rest
+|     └──.v1
+|        ├──.«Component»RestService
+|        ├──.mapper
+|        |     └──.«BusinessObject»Mapper
+|        └──.model
+|           └──.«BusinessObject»Dto
+└──.general
+   └──.domain
+      └──.model
+         └──.ApplicationPersistenceEntity
+
+
+ +
+

==Domain Layer

+
+
+

The domain layer is responsible for the data-model and mapping it to a database. +The most common approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data, so you can consider the repository guide.

+
+
+

Note: The domain layer is the replacement for the data-access layer in the modern project structure.

+
+
+
+
+
+

1.82. Guides

+ +
+

==Getting started with Quarkus for Spring developers

+
+
+

As a Spring developer, you have heard more and more about Quarkus: its pros and cons, its fast growth etc. So, you decided to adopt/try Quarkus for your (next) project(s) and are wondering where to go next and what you need to pay attention to when moving from Spring to Quarkus.

+
+
+

This guide tries to address this exact concern. In the following, we will present you some main points you should be aware of when starting to develop with Quarkus, along with some useful sources.

+
+
+
    +
  1. +

    Quarkus is a fairly new Java toolkit. Thus, it is very well documented. It also provides a set of well-written technical guides that are a good starting point to get in touch and make the first steps with Quarkus. See here. It is an Open Source project licensed under the Apache License version 2.0. The source code is hosted in GitHub. If you have any questions or concerns, don’t hesitate to reach out to the Quarkus community.

    +
  2. +
  3. +

    Same as Spring Initializr, you can go to code.quarkus.io to create a new application. Also, check out our Template Quarkus Guide to see our recommendations on certain topics.

    +
  4. +
  5. +

    In Spring stack, we recommend structuring your application into multiple modules, known as our classic structure. Moving to Quarkus and the world of cloud-native microservices, where we build smaller applications compared to monoliths, we recommend keeping everything top-level and simple. Therefore, we propose the modern structure as a better fit.

    +
  6. +
  7. +

    Quarkus focuses not only on delivering top features, but also on the developer experience. The Quarkus’s Live Coding feature automatically detects changes made to Java files, application configuration, static resources, or even classpath dependency changes and recompiles and redeploys the changes. As that, it solves the problem of traditional Java development workflow, hence improves productivity.

    +
    +
    +
        Write Code → Compile → Deploy → Test Changes/ Refresh Browser/ etc → Repeat (traditional)
    +    Write Code → Test Changes/ Refresh Browser/ etc → Repeat (Quarkus)
    +
    +
    +
    +

    You can use this feature out of the box without any extra setup by running:

    +
    +
    +
    +
        mvn compile quarkus:dev
    +
    +
    +
    +

    Another highlight feature to speed up developing is the Quarkus’s Dev Mode with Dev Services, which can automatically provision unconfigured services in development and test mode. This means that if you include an extension and don’t configure it, Quarkus will automatically start the relevant service and wire up your application to use it, therefore saving you a lot of time setting up those services manually. In production mode, where the real configuration is provided, Dev Services will be disabled automatically.

    +
    +
    +

    Additionally, you can access the Dev UI at \q\dev in Dev Mode to browse endpoints offered by various extensions, conceptually similar to what a Spring Boot actuator might provide.

    +
    +
  8. +
  9. +

    Quarkus is made of a small core on which hundreds of extensions rely. In fact, the power of Quarkus is its extension mechanism. Think of these extensions as your project dependencies. You can add it per dependency manager such as maven or gradle.

    +
    +
    +
    mvn quarkus:list-extensions
    +mvn quarkus:add-extension -Dextensions="groupId:artifactId"
    +(or add it manually to pom.xml)
    +##or
    +gradle list-extensions
    +(add dependency to build.gradle)
    +
    +
    +
    +

    Like Spring Boot, Quarkus also has a vast ecosystem of extensions with commonly-used technologies.

    +
    + + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Table 18. Example of common Quarkus extensions and the Spring Boot Starters with similar functionality (book: Quarkus for Spring Developer)
    Quarkus extensionSpring Boot Starter

    quarkus-resteasy-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-resteasy-reactive-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-hibernate-orm-panache

    spring-boot-starter-data-jpa

    quarkus-hibernate-orm-rest-datapanache

    spring-boot-starter-data-rest

    quarkus-hibernate-reactive-panache

    spring-boot-starter-data-r2dbc

    quarkus-mongodb-panache

    spring-boot-starter-data-mongodb

    +

    spring-boot-starter-data-mongodb-reactive

    quarkus-hibernate-validator

    spring-boot-starter-validation

    quarkus-qpid-jms

    spring-boot-starter-activemq

    quarkus-artemis-jms

    spring-boot-starter-artemis

    quarkus-cache

    spring-boot-starter-cache

    quarkus-redis-client

    spring-boot-starter-data-redis

    +

    spring-boot-starter-data-redis-reactive

    quarkus-mailer

    spring-boot-starter-mail

    quarkus-quartz

    spring-boot-starter-quartz

    quarkus-oidc

    spring-boot-starter-oauth2-resource-server

    quarkus-oidc-client

    spring-boot-starter-oauth2-client

    quarkus-smallrye-jwt

    spring-boot-starter-security

    +
    +

    A full list of all Quarkus extensions can be found here. Furthermore, you can check out the community extensions hosted by Quarkiverse Hub. Quarkus has some extensions for Spring API as well, which is helpful when migrating from Spring to Quarkus.

    +
    + +
    +

    Besides extensions, which are officially maintained by Quarkus team, Quarkus allows adding external libraries too. While extensions can be integrated seamlessly into Quarkus, as they can be processed at build time and be built in native mode with GraalVM, external dependencies might not work out of the box with native compilation. If that is the case, you have to recompile them with the right GraalVM configuration to make them work.

    +
    +
  10. +
  11. +

    Quarkus' design accounted for native compilation by default. A Quarkus native executable starts much faster and utilizes far less memory than a traditional JVM (see our performace comparision between Spring and Quarkus). To get familiar with building native executable, configuring and running it, please check out our Native Image Guide. Be sure to test your code in both JVM and native mode.

    +
  12. +
  13. +

    Both Quarkus and Spring include testing frameworks based on JUnit and Mockito. Thus, by design, Quarkus enables test-driven development by detecting affected tests as changes are made and automatically reruns them in background. As that, it gives developer instant feedback, hence improves productivity. To use continuous testing, execute the following command:

    +
    +
    +
    mvn quarkus:dev
    +
    +
    +
  14. +
  15. +

    For the sake of performance optimization, Quarkus avoids reflection as much as possible, favoring static class binding instead. When building a native executable, it analyzes the call tree and removes all the classes/methods/fields that are not used directly. As a consequence, the elements used via reflection are not part of the call tree so they are dead code eliminated (if not called directly in other cases).

    +
    +

    A common example is the JSON library, which typically use reflection to serialize the objects to JSON. If you use them out of the box, you might encounter some errors in native mode. So, be sure to register the elements for reflection explicitly. A How-to is provided by Quarkus Registering For Reflection with practical program snippets.

    +
    +
  16. +
+
+
+

A very good read on the topic is the e-book Quarkus for Spring Developers by Red Hat. Another good source for direct hands-on coding tutorial is Katacoda Quarkus for Spring Boot Developers

+
+ +
+

==Configuration

+
+
+

Quarkus provides a comprehensive guide on configuration here.

+
+
+
External Application Configuration
+
+
Database Configuration
+
+

In Quarkus, Hibernate is provided by the quarkus-hibernate-orm extension. Ensure the extension is added to your pom.xml as follows:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-orm</artifactId>
+</dependency>
+
+
+
+

Additionally, you have to add the respective JDBC driver extension to your pom.xml. There are different drivers for different database types. See Quarkus Hibernate guide.

+
+
+
+
Database System and Access
+
+

You need to configure which database type you want to use, as well as the location and credentials to access it. The defaults are configured in application.properties. The file should therefore contain the properties as in the given example:

+
+
+
+
quarkus.datasource.jdbc.url=jdbc:postgresql://database.enterprise.com/app
+quarkus.datasource.username=appuser01
+quarkus.datasource.password=************
+quarkus.datasource.db-kind=postgresql
+
+##drop and create the database at startup (use only for local development)
+quarkus.hibernate-orm.database.generation=drop-and-create
+
+
+
+
+
Database Logging
+
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
quarkus.hibernate-orm.log.sql=true
+quarkus.hibernate-orm.log.format-sql=true
+
+#Logs SQL bind parameters. Setting it to true is obviously not recommended in production.
+quarkus.hibernate-orm.log.bind-parameters=true
+
+
+
+
+
+
Secrets and environment specific configurations
+
+
Environment variables
+
+

There are also some libraries to make Jasypt work with Quarkus, such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode.

+
+
+

Quarkus supports many credential providers with official extensions, such as HashiCorp Vault.

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-vault</artifactId>
+</dependency>
+
+
+
+

Quarkus reads configuration values from several locations, ordered by a certain priority. An overview of these can be found at the official Quarkus config guide.

+
+
+

Environment variables have a higher ordinal number and are therefore higher prioritized than e.g. the application.properties file. +So instead of storing secrets in plain text in the configuration files, it is better to use environment variables for critical values to configure the application.

+
+
+

Environment variables also have the advantage that they can be easily integrated into a containerized environment. +When using Kubernetes, the secrets can be stored as Kubernetes secret and then passed to the containers as an environment variable.

+
+
+
+
Custom config sources
+
+

Quarkus provides the possability to add custom config sources, which can be used to retrieve configuration values from custom locations. +For a description of this feature, see the corresponding Quarkus guide.

+
+
+Config interceptors +
+

Quarkus also allows with the concept of interceptors to hook into the resolution of configuration values. This can be useful when configuration values are encrypted or need to be extracted. +To do this, you have to implement a ConfigSourceInterceptor.

+
+
+
+
public class CustomConfigInterceptor implements ConfigSourceInterceptor {
+
+  @Override
+  public ConfigValue getValue(ConfigSourceInterceptorContext context, String name) {
+
+    ConfigValue configValue = context.proceed(name);
+    if (name.equals("config-value-to-resolve")) {
+      configValue = new ConfigValue.ConfigValueBuilder()
+          .withName(name)
+          .withValue(resolveConfigurationValue(name))
+          .build();
+    }
+
+    return configValue;
+  }
+
+  private String resolveConfigurationValue(String name) {
+    ...
+  }
+}
+
+
+
+

To use the Interceptor, you must register it. To do this, create a file io.smallrye.config.ConfigSourceInterceptor in the folder src/main/resources/META-INF/services and register the interceptor register the interceptor by writing the fully qualified class name to this file.

+
+
+
+
+
Credential encryption
+
+

As for Spring, there are also some libraries that let Jasypt work with Quarkus such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode, so it is not a suitable approach.

+
+
+

If you want to store usernames or passwords in encrypted form or retrieve them from a custom store, you can use a custom CredentialsProvider for this purpose. +Consider the use case where you want to store your database credentials in encrypted form rather than in plain text. Then you can implement a credentials provider as follows:

+
+
+
+
@ApplicationScoped
+@Unremovable
+public class DatabaseCredentialsProvider implements CredentialsProvider {
+
+  @Override
+  public Map<String, String> getCredentials(String credentialsProviderName) {
+
+    Map<String, String> properties = new HashMap<>();
+    properties.put(USER_PROPERTY_NAME, decryptUsername());
+    properties.put(PASSWORD_PROPERTY_NAME, decryptPassword());
+    return properties;
+  }
+}
+
+
+
+

In the application.properties file you need to set quarkus.datasource.credentials-provider=custom. +For more information about the credentials provider, see the official Quarkus guide.

+
+
+
+
HashiCorp Vault
+
+

For centralized management of secrets and other critical configuration values, you can use HashiCorp Vault as external management tool.

+
+
+

For detailed instructions on how to integrate Vault into your Quarkus application, see the official Quarkus guide.

+
+ +
+

==Quarkus template

+
+
+

Quarkus Code Generator is provides many alternative technologies and libraries that can be integrated into a project. Detailed guides on multiple topics can be found here.

+
+
+

Due to the large selection, getting started can be difficult for developers. +In this guide we aim to provide a general suggestion on basic frameworks, libraries, and technologies to make it easy for developers to begin with.

+
+
+

With that said, please take this as a recommendation and not as a compulsion. Depending on your project requirements, you might have to use another stack compared to what is listed below.

+
+
+

If you are new to Quarkus, consider checking out their getting started guide to get an overview of how to create, run, test, as well as package a Quarkus application. Another recommended source to get started is the Katacoda tutorials.

+
+
+
+
Basic templates
+
+
    +
  1. +

    simple REST API (go to code.quarkus.io)

    +
  2. +
  3. +

    simple REST API with monitoring (go to code.quarkus.io)

    +
  4. +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 19. Topic-based suggested implementation
TopicDetailSuggested implementationNote

runtime

servlet-container

Undertow

component management

dependency injection

ArC

ArC is based on JSR 365. It also provides interceptors that can be used to implement the same functionality as AOP provides

configuration

SmallRye Config

SmallRye Config is an implementation of Eclipse MicroProfile Config. It also supports YAML configuration files

persistence

OR-mapper

Hibernate ORM, Spring Data JPA

Hibernate ORM is the de facto standard JPA implementation and works perfectly in Quarkus. Quarkus also provides a compatibility layer for Spring Data JPA repositories in the form of the spring-data-jpa extension.

batch

Quarkus JBeret Extension is a non-official extension, which is hosted in the Quarkiverse Hub. It is an implementation of JSR 352.

service

REST services

RESTEasy

RESTEasy is an portable implementation of the new JCP specification JAX-RS JSR-311. It can be documented via Swagger OpenAPI.

async messaging

SmallRye Reactive Messaging, Vert.x EventBus

SmallRye Reactive Messaging is an implementation of the Eclipse MicroProfile Reactive Messaging specification 1.0. You can also utilize SmallRye Reactive Messaging in your Quarkus application to interact with Apache Kafka.

marshalling

RESTEasy Jackson, RESTEasy JSON-B, RESTEasy JAXB, RESTEasy Multipart

cloud

kubernetes

Kubernetes

deployment

Minikube, k3d

Minikube is quite popular when a Kubernetes cluster is needed for development purposes. Quarkus supports this with the quarkus-minikube extension.

logging

framework

JBoss Log Manager and the JBoss Logging facade

Internally, Quarkus uses JBoss Log Manager and the JBoss Logging facade. Logs from other supported Logging API (JBoss Logging, SLF4J, Apache Commons Logging) will be merged.

validation

framework

Hibernate Validator/Bean Validation (JSR 380)

security

authentication & authorization

JWT authentication

Quarkus supports various security mechanisms. Depending on your protocol, identity provider you can choose the necessary extensions such as quarkus-oidc quarkus-smallrye-jwt quarkus-elytron-security-oauth2.

monitoring

framework

Micrometer Metrics, SmallRye Metrics

SmallRye Metrics is an implementation of the MicroProfile Metrics specification. Quarkus also offers various extensions to customize the metrics.

health

SmallRye Health

SmallRye Health is an implementation of the MicroProfile Health specification.

fault tolerance

SmallRye Fault Tolerance

SmallRye Fault Tolerance is an implementation of the MicroProfile Fault Tolerance specification.

+ +
+

==Building a native image

+
+
+

Quarkus provides the ability to create a native executable of the application called native image. +Unlike other Java based deployments, a native image will only run on the architecture and operating system it is compiled for. +Also, no JVM is needed to run the native-image. +This improves the startup time, performance, and efficiency. +A distribution of GraalVM is needed. +You can find the differences between the available distributions here.

+
+
+

To build your quarkus app as a native-image, you have two options that are described in the following sections.

+
+
+
+
+
Build a native executable with GraalVM
+
+

To build a Quarkus application, you can install GraalVM locally on your machine, as described below. +Therefore, read the basic Quarkus application chapter, or clone the example project provided by devonfw. +Follow this chapter from the Quarkus Guide for building a native executable.

+
+
+
Installing GraalVM
+
+

A native image can be created locally or through a container environment. +To create a native image locally, an installed and configured version of GraalVM is needed. You can follow the installation guide from Quarkus or the guide provided by GraalVM for this.

+
+
+
+
+
Build a native executable with GraalVM through container environment
+
+

In order to make the build of native images more portable, you can also use your container environment and run the GraalVM inside a container (typically Docker). +You can simply install Docker with your devonfw-ide distribution, just follow this description Docker with devonfw-ide. +Follow this chapter to build a native Linux image through container runtime.

+
+
+
+
Configuring the native executable
+
+

A list of all configuration properties for a native image can be found here.

+
+ +
+

==Bean mapping with Quarkus

+
+
+

This guide will show bean-mapping, in particular for a Quarkus application. We recommend using MapStruct with a Quarkus application because the other bean-mapper frameworks use Java reflections. They are not supported in GraalVm right now and cause problems when building native applications. MapStruct is a code generator that greatly simplifies the implementation of mappings between Java bean types based on a convention over configuration approach. The mapping code will be generated at compile-time and uses plain method invocations and is thus fast, type-safe, and easy to understand. MapStruct has to be configured to not use Java reflections, which will be shown in this guide.

+
+
+

You can find the official +MapStruct reference guide and a general introduction to MapStruct from Baeldung.

+
+
+
+
MapStruct Dependency
+
+

To get access to MapStruct, we have to add the dependency to our POM.xml:

+
+
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+  <scope>provided</scope>
+</dependency>
+
+
+
+

MapStruct provides an annotation processor that also has to be added to the POM.xml

+
+
+
+
<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.mapstruct</groupId>
+				<artifactId>mapstruct-processor</artifactId>
+				<version>1.4.2.Final</version>
+			</path>
+		</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

MapStruct takes advantage of generated getters, setters, and constructors from the Lombok library, follow this Lombok with Mapstruct guide to get Lombok with Mapstruct working.

+
+
+
+
MapStruct Configuration
+
+

We already discussed the benefits of dependency injection. MapStruct supports CDI with EJB, spring, and jsr330. The default retrieving method for a mapper is a factory that uses reflections, which should be avoided. The component model should be set to CDI, as this will allow us to easily inject the generated mapper implementation. The component model can be configured in multiple ways.

+
+
+
Simple Configuration
+
+

Add the attribute componentModel to the @Mapper annotation in the mapper interface.

+
+
+
+
@Mapper(compnentModel = "cdi")
+public interface ProductMapper{
+  ...
+}
+
+
+
+
+
MapperConfig Configuration
+
+

Create a shared configuration that can be used for multiple mappers. Implement an interface and use the annotation @MapperConfig for the class. You can define all configurations in this interface and pass the generated MapperConfig.class with the config attribute to the mapper. The MapperConfig also defines the InjectionStrategy and MappingInheritaceStrategy, both of which will be explained later. +A list of all configurations can be found here.

+
+
+
+
@MapperConfig(
+  compnentModel = "cdi",
+  mappingInheritanceStrategy = MappingInheritanceStrategy.AUTO_INHERIT_FROM_CONFIG
+  injectionStrategy =InjectionStrategy.CONSTRUCTOR
+)
+public interface MapperConfig{
+}
+
+
+
+
+
@Mapper( config = MapperConfig.class )
+public interface ProductMapper{
+  ...
+}
+
+
+
+

Any attributes not given via @Mapper will be inherited from the shared configuration MapperConfig.class.

+
+
+
+
Configuration via annotation processor options
+
+

The MapStruct code generator can be configured using annotation processor options. +You can pass the options to the compiler while invoking javac directly, or add the parameters to the maven configuration in the POM.xml

+
+
+

We also use the constructor injection strategy to avoid field injections and potential reflections. This will also simplify our tests.

+
+
+

The option to pass the parameter to the annotation processor in the POM.xml is used and can be inspected in our quarkus reference project.

+
+
+

A list of all annotation processor options can be found here.

+
+
+
+
+
Basic Bean-Mapper Usage
+
+

To use the mapper, we have to implement the mapper interface and the function prototypes with a @Mapper annotation.

+
+
+
+
@Mapper
+public interface ProductMapper {
+
+  ProductDto map(ProductEntity model);
+
+  ProductEntity create(NewProductDto dto);
+}
+
+
+
+

The MapStruct annotation processor will generate the implementation for us under /target/generated-sources/, we just need to tell it that we would like to have a method that accepts a ProductEntity entity and returns a new ProductDto DTO.

+
+
+

The generated mapper implementation will be marked with the @ApplicationScoped annotation and can thus be injected into fields, constructor arguments, etc. using the @Inject annotation:

+
+
+
+
public class ProductRestService{
+
+  @Inject
+  ProductMapper mapper;
+}
+
+
+
+

That is the basic usage of a Mapstruct mapper. In the next chapter, we’ll go into a bit more detail and show some more configurations.

+
+
+
+
Advanced Bean-Mapper Usage
+
+

Let´s assume that our Product entity and the ProductDto have some differently named properties that should be mapped. Add a mapping annotation to map the property type from Product to kind from ProductDto. We define the source name of the property and the target name.

+
+
+
+
@Mapper
+public interface ProductMapper {
+  @Mapping(target = "kind", source = "type")
+  ProductDto map(ProductEntity entity);
+
+  @InheritInverseConfiguration(name = "map" )
+  ProductEntity create(ProductDto dto);
+}
+
+
+
+

For bi-directional mappings, we can indicate that a method shall inherit the inverse configuration of the corresponding method with the @InheritInverseConfiguration. You can omit the name parameter if the result type of method A is the same as the +single-source type of method B and if the single-source type of A is the same as the result type of B. If multiple apply, the attribute name is needed. Specific mappings from the inverse method can (optionally) be overridden, ignored, or set to constants or expressions.

+
+
+

The mappingInheritanceStrategy can be defined as showed in MapStruct Configuration. The existing options can be found here.

+
+
+

A mapped attribute does not always have the same type in the source and target objects. For instance, an attribute may be of type int in the source bean but of type Long in the target bean.

+
+
+

Another example are references to other objects which should be mapped to the corresponding types in the target model. E.g. the class ShoppingCart might have a property content of the type Product which needs to be converted into a ProductDto object when mapping a ShoppingCart object to ShoppingCartDto. For these cases, it’s useful to understand how Mapstruct converts the data types and the object references.

+
+
+

Also, the Chapter for nested bean mappings will help to configure MapStruct to map arbitrarily deep object graphs.

+
+
+

You can study running MapStruct implementation examples given by MapStruct or in our Quarkus reference project

+
+
+
+
+
+
+
+
+1. "Stammdaten" in German. +
+
+2. Whether to use checked exceptions or not is a controversial topic. Arguments for both sides can be found under The Trouble with Checked Exceptions, Unchecked Exceptions — The Controversy, and Checked Exceptions are Evil. The arguments in favor of unchecked exceptions tend to prevail for applications built with devon4j. Therefore, unchecked exceptions should be used for a consistent style. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-guides.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-guides.html new file mode 100644 index 00000000..18c2aa08 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-guides.html @@ -0,0 +1,6297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Guides

+
+ +
+

==Dependency Injection +Dependency injection is one of the most important design patterns and is a key principle to a modular and component based architecture. +The Java Standard for dependency injection is javax.inject (JSR330) that we use in combination with JSR250. +Additionally, for scoping you can use CDI (Context and Dependency Injection) from JSR365.

+
+
+

There are many frameworks which support this standard including all recent Java EE application servers. +Therefore in devonfw we rely on these open standards and can propagate patterns and code examples that work independent from the underlying frameworks.

+
+
+

Key Principles

+
+

Within dependency injection a bean is typically a reusable unit of your application providing an encapsulated functionality. +This bean can be injected into other beans and it should in general be replaceable. +As an example we can think of a use-case, a repository, etc. +As best practice we use the following principles:

+
+
+
    +
  • +

    Stateless implementation
    +By default such beans shall be implemented stateless. If you store state information in member variables you can easily run into concurrency problems and nasty bugs. This is easy to avoid by using local variables and separate state classes for complex state-information. Try to avoid stateful beans wherever possible. Only add state if you are fully aware of what you are doing and properly document this as a warning in your JavaDoc.

    +
  • +
  • +

    Usage of Java standards
    +We use common standards (see above) that makes our code portable. Therefore we use standardized annotations like @Inject (javax.inject.Inject) instead of proprietary annotations such as @Autowired. Generally we avoid proprietary annotations in business code (logic layer).

    +
  • +
  • +

    Simple injection-style
    +In general you can choose between constructor, setter or field injection. For simplicity we recommend to do private field injection as it is very compact and easy to maintain. We believe that constructor injection is bad for maintenance especially in case of inheritance (if you change the dependencies you need to refactor all sub-classes). Private field injection and public setter injection are very similar but setter injection is much more verbose (often you are even forced to have javadoc for all public methods). If you are writing re-usable library code setter injection will make sense as it is more flexible. In a business application you typically do not need that and can save a lot of boiler-plate code if you use private field injection instead. Nowadays you are using container infrastructure also for your tests (see testing) so there is no need to inject manually (what would require a public setter).

    +
  • +
  • +

    KISS
    +To follow the KISS (keep it small and simple) principle we avoid advanced features (e.g. custom AOP, non-singleton beans) and only use them where necessary.

    +
  • +
  • +

    Separation of API and implementation
    +For important components we should separate a self-contained API documented with JavaDoc from its implementation. Code from other components that wants to use the implementation shall only rely on the API. However, for things that will never be exchanged no API as interface is required you can skip such separation.

    +
  • +
+
+
+
+

Example Bean

+
+

Here you can see the implementation of an example bean using dependency injection:

+
+
+
+
@ApplicationScoped
+@Named("MyComponent")
+public class MyComponentImpl implements MyComponent {
+  @Inject
+  private MyOtherComponent myOtherComponent;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+
+  ...
+}
+
+
+
+

Here MyComponentImpl depends on MyOtherComponent that is injected into the field myOtherComponent because of the @Inject annotation. +To make this work there must be exactly one bean in the container (e.g. spring or quarkus) that is an instance of MyOtherComponent. +In order to put a bean into the container, we can use @ApplicationScoped in case of CDI (required for quarkus) for a stateless bean. +In spring we can ommit a CDI annotation and the @Named annotation is already sufficient as a bean is stateless by default in spring. +If we always use @ApplicationScoped we can make this more explicit and more portable accross different frameworks. +So in our example we put MyComponentImpl into the container. +That bean will be called MyComponent as we specified in the @Named annotation but we can also omit the name to use the classname as fallback. +Now our bean can be injected into other beans using @Inject annotation either via MyComponent interface (recommended when interface is present) or even directly via MyComponentImpl. +In case you omit the interface, you should also omit the Impl suffix or instead use Bean as suffix.

+
+
+
+

Multiple bean implementations

+
+

In some cases you might have multiple implementations as beans for the same interface. +The following sub-sections handle the different scenarios to give you guidance.

+
+
+
Only one implementation in container
+
+

In some cases you still have only one implementation active as bean in the container at runtime. +A typical example is that you have different implemenations for test and main usage. +This case is easy, as @Inject will always be unique. +The only thing you need to care about is how to configure your framework (spring, quarkus, etc.) to know which implementation to put in the container depending on specific configuration. +In spring this can be archived via the proprietary @Profile annotaiton.

+
+
+
+
Injecting all of multiple implementations
+
+

In some situations you may have an interface that defines a kind of "plugin". +You can have multiple implementations in your container and want to have all of them injected. +Then you can request a list with all the bean implementations via the interface as in the following example:

+
+
+
+
  @Inject
+  private List<MyConverter> converters;
+
+
+
+

Your code may iterate over all plugins (converters) and apply them sequentially. +Please note that the injection will fail (at least in spring), when there is no bean available to inject. +So you do not get an empty list injected but will get an exception on startup.

+
+
+
+
Injecting one of multiple implementations
+
+

Another scenario is that you have multiple implementations in your container coexisting, but for injection you may want to choose a specific implementation. +Here you could use the @Named annotation to specify a unique identifier for each implementation what is called qualified injection:

+
+
+
+
@ApplicationScoped
+@Named("UserAuthenticator")
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+@Named("ServiceAuthenticator")
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  @Named("UserAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  @Named("ServiceAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+
+
+
+

However, we discovered that this pattern is not so great: +The identifiers in the @Named annotation are just strings that could easily break. +You could use constants instead but still this is not the best solution.

+
+
+

In the end you can very much simplify this by just directly injecting the implementation instead:

+
+
+
+
@ApplicationScoped
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

In case you want to strictly decouple from implementations, you can still create dedicated interfaces:

+
+
+
+
public interface UserAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class UserAuthenticatorImpl implements UserAuthenticator {
+  ...
+}
+public interface ServiceAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class ServiceAuthenticatorImpl implements ServiceAuthenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

However, as you can see this is again introducing additional boiler-plate code. +While the principle to separate API and implementation and strictly decouple from implementation is valuable in general, +you should always consider KISS, lean, and agile in contrast and balance pros and cons instead of blindly following dogmas.

+
+
+
+
+

Imports

+
+

Here are the import statements for the most important annotations for dependency injection

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.enterprise.context.ApplicationScoped;
+// import javax.enterprise.context.RequestScoped;
+// import javax.enterprise.context.SessionScoped;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+
+
+
+

Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>jakarta.inject</groupId>
+  <artifactId>jakarta.inject-api</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>jakarta.annotation</groupId>
+  <artifactId>jakarta.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>javax.inject</groupId>
+  <artifactId>javax.inject</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>javax.annotation</groupId>
+  <artifactId>javax.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+ +
+

==Configuration

+
+
+

An application needs to be configurable in order to allow internal setup (like CDI) but also to allow externalized configuration of a deployed package (e.g. integration into runtime environment). We rely on a comprehensive configuration approach following a "convention over configuration" pattern. This guide adds on to this by detailed instructions and best-practices how to deal with configurations.

+
+
+

In general we distinguish the following kinds of configuration that are explained in the following sections:

+
+
+ +
+
+
+

Internal Application Configuration

+
+

The application configuration contains all internal settings and wirings of the application (bean wiring, database mappings, etc.) and is maintained by the application developers at development time.

+
+
+

For more detail of Spring stack, see here

+
+
+
+

Externalized Configuration

+
+

Externalized configuration is a configuration that is provided separately to a deployment package and can be maintained undisturbed by re-deployments.

+
+
+
Environment Configuration
+
+

The environment configuration contains configuration parameters (typically port numbers, host names, passwords, logins, timeouts, certificates, etc.) specific for the different environments. These are under the control of the operators responsible for the application.

+
+
+

The environment configuration is maintained in application.properties files, defining various properties. +These properties are explained in the corresponding configuration sections of the guides for each topic:

+
+
+ +
+
+

Make sure your properties are thoroughly documented by providing a comment to each property. This inline documentation is most valuable for your operating department.

+
+
+

More about structuring your application.properties files can be read here for Spring.

+
+
+

For Quarkus, please refer to Quarkus Config Reference for more details.

+
+
+
+
Business Configuration
+
+

Often applications do not need business configuration. In case they do it should typically be editable by administrators via the GUI. The business configuration values should therefore be stored in the database in key/value pairs.

+
+
+

Therefore we suggest to create a dedicated table with (at least) the following columns:

+
+
+
    +
  • +

    ID

    +
  • +
  • +

    Property name

    +
  • +
  • +

    Property type (Boolean, Integer, String)

    +
  • +
  • +

    Property value

    +
  • +
  • +

    Description

    +
  • +
+
+
+

According to the entries in this table, an administrative GUI may show a generic form to modify business configuration. Boolean values should be shown as checkboxes, integer and string values as text fields. The values should be validated according to their type so an error is raised if you try to save a string in an integer property for example.

+
+
+

We recommend the following base layout for the hierarchical business configuration:

+
+
+

component.[subcomponent].[subcomponent].propertyname

+
+
+
+
+

Security

+
+

Often you need to have passwords (for databases, third-party services, etc.) as part of your configuration. These are typically environment specific (see above). However, with DevOps and continuous-deployment you might be tempted to commit such configurations into your version-control (e.g. git). Doing that with plain text passwords is a severe problem especially for production systems. Never do that! Instead we offer some suggestions how to deal with sensible configurations:

+
+
+
Password Encryption
+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control.

+
+
+

For Spring, we use jasypt-spring-boot. For more details, see here

+
+
+

For Quarkus, see here

+
+
+
Is this Security by Obscurity?
+
+
    +
  • +

    Yes, from the point of view to protect the passwords on the target environment this is nothing but security by obscurity. If an attacker somehow got full access to the machine this will only cause him to spend some more time.

    +
  • +
  • +

    No, if someone only gets the configuration file. So all your developers might have access to the version-control where the config is stored. Others might have access to the software releases that include this configs. But without the master-password that should only be known to specific operators none else can decrypt the password (except with brute-force what will take a very long time, see jasypt for details).

    +
  • +
+
+ +
+

==Mapping configuration to your code

+
+
+

If you are using spring-boot as suggested by devon4j your application can be configured by application.properties file as described in configuration. +To get a single configuration option into your code for flexibility, you can use

+
+
+
+
@Value("${my.property.name}")
+private String myConfigurableField;
+
+
+
+

Now, in your application.properties you can add the property:

+
+
+
+
my.property.name=my-property-value
+
+
+
+

You may even use @Value("${my.property.name:my-default-value}") to make the property optional.

+
+
+
+
+
+

Naming conventions for configuration properties

+
+

As a best practice your configruation properties should follow these naming conventions:

+
+
+
    +
  • +

    build the property-name as a path of segments separated by the dot character (.)

    +
  • +
  • +

    segments should get more specific from left to right

    +
  • +
  • +

    a property-name should either be a leaf value or a tree node (prefix of other property-names) but never both! So never have something like foo.bar=value and foo.bar.child=value2.

    +
  • +
  • +

    start with a segment namespace unique to your context or application

    +
  • +
  • +

    a good example would be «myapp».billing.service.email.sender for the sender address of billing service emails send by «myapp».

    +
  • +
+
+
+
+

Mapping advanced configuration

+
+

However, in many scenarios you will have features that require more than just one property. +Injecting those via @Value is not leading to good code quality. +Instead we create a class with the suffix ConfigProperties containing all configuration properties for our aspect that is annotated with @ConfigurationProperties:

+
+
+
+
@ConfigurationProperties(prefix = "myapp.billing.service")
+public class BillingServiceConfigProperties {
+
+  private final Email email = new Email();
+  private final Smtp smtp = new Smtp();
+
+  public Email getEmail() { return this.email; }
+  public Email getSmtp() { return this.smtp; }
+
+  public static class Email {
+
+    private String sender;
+    private String subject;
+
+    public String getSender() { return this.sender; }
+    public void setSender(String sender) { this.sender = sender; }
+    public String getSubject() { return this.subject; }
+    public void setSubject(String subject) { this.subject = subject; }
+  }
+
+  public static class Smtp {
+
+    private String host;
+    private int port = 25;
+
+    public String getHost() { return this.host; }
+    public void setHost(String host) { this.host = host; }
+    public int getPort() { return this.port; }
+    public void setPort(int port) { this.port = port; }
+  }
+
+}
+
+
+
+

Of course this is just an example to demonstrate this feature of spring-boot. +In order to send emails you would typically use the existing spring-email feature. +But as you can see this allows us to define and access our configuration in a very structured and comfortable way. +The annotation @ConfigurationProperties(prefix = "myapp.billing.service") will automatically map spring configuration properties starting with myapp.billing.service via the according getters and setters into our BillingServiceConfigProperties. +We can easily define defaults (e.g. 25 as default value for myapp.billing.service.smtp.port). +Also Email or Smtp could be top-level classes to be reused in multiple configurations. +Of course you would also add helpful JavaDoc comments to the getters and classes to document your configuration options. +Further to access this configuration, we can use standard dependency-injection:

+
+
+
+
@Inject
+private BillingServiceConfigProperties config;
+
+
+
+

For very generic cases you may also use Map<String, String> to map any kind of property in an untyped way. +An example for generic configuration from devon4j can be found in +ServiceConfigProperties.

+
+
+

For further details about this feature also consult Guide to @ConfigurationProperties in Spring Boot.

+
+
+
+

Generate configuration metadata

+
+

You should further add this dependency to your module containing the *ConfigProperties:

+
+
+
+
    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+
+
+
+

This will generate configuration metadata so projects using your code can benefit from autocompletion and getting your JavaDoc as tooltip when editing application.properites what makes this approach very powerful. +For further details about this please read A Guide to Spring Boot Configuration Metadata.

+
+
+ +
+

==Java Persistence API

+
+
+

For mapping java objects to a relational database we use the Java Persistence API (JPA). +As JPA implementation we recommend to use Hibernate. For general documentation about JPA and Hibernate follow the links above as we will not replicate the documentation. Here you will only find guidelines and examples how we recommend to use it properly. The following examples show how to map the data of a database to an entity. As we use JPA we abstract from SQL here. However, you will still need a DDL script for your schema and during maintenance also database migrations. Please follow our SQL guide for such artifacts.

+
+
+
+

Entity

+
+

Entities are part of the persistence layer and contain the actual data. They are POJOs (Plain Old Java Objects) on which the relational data of a database is mapped and vice versa. The mapping is configured via JPA annotations (javax.persistence). Usually an entity class corresponds to a table of a database and a property to a column of that table. A persistent entity instance then represents a row of the database table.

+
+
+
A Simple Entity
+
+

The following listing shows a simple example:

+
+
+
+
@Entity
+@Table(name="TEXTMESSAGE")
+public class MessageEntity extends ApplicationPersistenceEntity implements Message {
+
+  private String text;
+
+  public String getText() {
+    return this.text;
+  }
+
+  public void setText(String text) {
+    this.text = text;
+  }
+ }
+
+
+
+

The @Entity annotation defines that instances of this class will be entities which can be stored in the database. The @Table annotation is optional and can be used to define the name of the corresponding table in the database. If it is not specified, the simple name of the entity class is used instead.

+
+
+

In order to specify how to map the attributes to columns we annotate the corresponding getter methods (technically also private field annotation is also possible but approaches can not be mixed). +The @Id annotation specifies that a property should be used as primary key. +With the help of the @Column annotation it is possible to define the name of the column that an attribute is mapped to as well as other aspects such as nullable or unique. If no column name is specified, the name of the property is used as default.

+
+
+

Note that every entity class needs a constructor with public or protected visibility that does not have any arguments. Moreover, neither the class nor its getters and setters may be final.

+
+
+

Entities should be simple POJOs and not contain business logic.

+
+
+
+
Entities and Datatypes
+
+

Standard datatypes like Integer, BigDecimal, String, etc. are mapped automatically by JPA. Custom datatypes are mapped as serialized BLOB by default what is typically undesired. +In order to map atomic custom datatypes (implementations of`+SimpleDatatype`) we implement an AttributeConverter. Here is a simple example:

+
+
+
+
@Converter(autoApply = true)
+public class MoneyAttributeConverter implements AttributeConverter<Money, BigDecimal> {
+
+  public BigDecimal convertToDatabaseColumn(Money attribute) {
+    return attribute.getValue();
+  }
+
+  public Money convertToEntityAttribute(BigDecimal dbData) {
+    return new Money(dbData);
+  }
+}
+
+
+
+

The annotation @Converter is detected by the JPA vendor if the annotated class is in the packages to scan. Further, autoApply = true implies that the converter is automatically used for all properties of the handled datatype. Therefore all entities with properties of that datatype will automatically be mapped properly (in our example Money is mapped as BigDecimal).

+
+
+

In case you have a composite datatype that you need to map to multiple columns the JPA does not offer a real solution. As a workaround you can use a bean instead of a real datatype and declare it as @Embeddable. If you are using Hibernate you can implement CompositeUserType. Via the @TypeDef annotation it can be registered to Hibernate. If you want to annotate the CompositeUserType implementation itself you also need another annotation (e.g. MappedSuperclass tough not technically correct) so it is found by the scan.

+
+
+
Enumerations
+
+

By default JPA maps Enums via their ordinal. Therefore the database will only contain the ordinals (0, 1, 2, etc.) . So , inside the database you can not easily understand their meaning. Using @Enumerated with EnumType.STRING allows to map the enum values to their name (Enum.name()). Both approaches are fragile when it comes to code changes and refactoring (if you change the order of the enum values or rename them) after the application is deployed to production. If you want to avoid this and get a robust mapping you can define a dedicated string in each enum value for database representation that you keep untouched. Then you treat the enum just like any other custom datatype.

+
+
+
+
BLOB
+
+

If binary or character large objects (BLOB/CLOB) should be used to store the value of an attribute, e.g. to store an icon, the @Lob annotation should be used as shown in the following listing:

+
+
+
+
@Lob
+public byte[] getIcon() {
+  return this.icon;
+}
+
+
+
+ + + + + +
+ + +Using a byte array will cause problems if BLOBs get large because the entire BLOB is loaded into the RAM of the server and has to be processed by the garbage collector. For larger BLOBs the type Blob and streaming should be used. +
+
+
+
+
public Blob getAttachment() {
+  return this.attachment;
+}
+
+
+
+
+
Date and Time
+
+

To store date and time related values, the temporal annotation can be used as shown in the listing below:

+
+
+
+
@Temporal(TemporalType.TIMESTAMP)
+public java.util.Date getStart() {
+  return start;
+}
+
+
+
+

Until Java8 the java data type java.util.Date (or Jodatime) has to be used. +TemporalType defines the granularity. In this case, a precision of nanoseconds is used. If this granularity is not wanted, TemporalType.DATE can be used instead, which only has a granularity of milliseconds. +Mixing these two granularities can cause problems when comparing one value to another. This is why we only use TemporalType.TIMESTAMP.

+
+
+
+
QueryDSL and Custom Types
+
+

Using the Aliases API of QueryDSL might result in an InvalidDataAccessApiUsageException when using custom datatypes in entity properties. This can be circumvented in two steps:

+
+
+
    +
  1. +

    Ensure you have the following maven dependencies in your project (core module) to support custom types via the Aliases API:

    +
    +
    +
    <dependency>
    +  <groupId>org.ow2.asm</groupId>
    +  <artifactId>asm</artifactId>
    +</dependency>
    +<dependency>
    +  <groupId>cglib</groupId>
    +  <artifactId>cglib</artifactId>
    +</dependency>
    +
    +
    +
  2. +
  3. +

    Make sure, that all your custom types used in entities provide a non-argument constructor with at least visibility level protected.

    +
  4. +
+
+
+
+
+
Primary Keys
+
+

We only use simple Long values as primary keys (IDs). +By default it is auto generated (@GeneratedValue(strategy=GenerationType.AUTO)). +This is already provided by the class com.devonfw.<projectName>.general.dataaccess.api.AbstractPersistenceEntity within the classic project structure respectively com.devonfw.<projectName>.general.domain.model.AbstractPersistenceEntity within the modern project structure, that you can extend.

+
+
+

The reason for this recommendation is simply because using a number (Long) is the most efficient representation for the database. +You may also consider to use other types like String or UUID or even composite custom datatypes and this is technically possible. +However, please consider that the primary key is used to lookup the row from the database table, also in foreign keys and thus in JOINs. +Please note that your project sooner or later may reach some complexity where performance really matters. +Working on big data and performing JOINs when using types such as String (VARCHAR[2]) as primary and foreign keys will kill your performance. +You are still free to make a different choice and devonfw only gives recommendations but does not want to dictate you what to do. +However, you have been warned about the concequences. +If you are well aware of what you are doing, you can still use differnet types of primary keys. +In such case, create your own entity not extending AbstractPersistenceEntity or create your own copy of AbstractPersistenceEntity with a different name and a different type of primary key.

+
+
+

In case you have business oriented keys (often as String), you can define an additional property for it and declare it as unique (@Column(unique=true)). +Be sure to include "AUTO_INCREMENT" in your sql table field ID to be able to persist data (or similar for other databases).

+
+
+
+
+

Relationships

+
+
n:1 and 1:1 Relationships
+
+

Entities often do not exist independently but are in some relation to each other. For example, for every period of time one of the StaffMember’s of the restaurant example has worked, which is represented by the class WorkingTime, there is a relationship to this StaffMember.

+
+
+

The following listing shows how this can be modeled using JPA:

+
+
+
+
...
+
+@Entity
+public class WorkingTimeEntity {
+   ...
+
+   private StaffMemberEntity staffMember;
+
+   @ManyToOne
+   @JoinColumn(name="STAFFMEMBER")
+   public StaffMemberEntity getStaffMember() {
+      return this.staffMember;
+   }
+
+   public void setStaffMember(StaffMemberEntity staffMember) {
+      this.staffMember = staffMember;
+   }
+}
+
+
+
+

To represent the relationship, an attribute of the type of the corresponding entity class that is referenced has been introduced. The relationship is a n:1 relationship, because every WorkingTime belongs to exactly one StaffMember, but a StaffMember usually worked more often than once.
+This is why the @ManyToOne annotation is used here. For 1:1 relationships the @OneToOne annotation can be used which works basically the same way. To be able to save information about the relation in the database, an additional column in the corresponding table of WorkingTime is needed which contains the primary key of the referenced StaffMember. With the name element of the @JoinColumn annotation it is possible to specify the name of this column.

+
+
+
+
1:n and n:m Relationships
+
+

The relationship of the example listed above is currently an unidirectional one, as there is a getter method for retrieving the StaffMember from the WorkingTime object, but not vice versa.

+
+
+

To make it a bidirectional one, the following code has to be added to StaffMember:

+
+
+
+
  private Set<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy="staffMember")
+  public Set<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(Set<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

To make the relationship bidirectional, the tables in the database do not have to be changed. Instead the column that corresponds to the attribute staffMember in class WorkingTime is used, which is specified by the mappedBy element of the @OneToMany annotation. Hibernate will search for corresponding WorkingTime objects automatically when a StaffMember is loaded.

+
+
+

The problem with bidirectional relationships is that if a WorkingTime object is added to the set or list workingTimes in StaffMember, this does not have any effect in the database unless +the staffMember attribute of that WorkingTime object is set. That is why the devon4j advices not to use bidirectional relationships but to use queries instead. How to do this is shown here. If a bidirectional relationship should be used nevertheless, appropriate add and remove methods must be used.

+
+
+

For 1:n and n:m relations, the devon4j demands that (unordered) Sets and no other collection types are used, as shown in the listing above. The only exception is whenever an ordering is really needed, (sorted) lists can be used.
+For example, if WorkingTime objects should be sorted by their start time, this could be done like this:

+
+
+
+
  private List<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy = "staffMember")
+  @OrderBy("startTime asc")
+  public List<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(List<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

The value of the @OrderBy annotation consists of an attribute name of the class followed by asc (ascending) or desc (descending).

+
+
+

To store information about a n:m relationship, a separate table has to be used, as one column cannot store several values (at least if the database schema is in first normal form).
+For example if one wanted to extend the example application so that all ingredients of one FoodDrink can be saved and to model the ingredients themselves as entities (e.g. to store additional information about them), this could be modeled as follows (extract of class FoodDrink):

+
+
+
+
  private Set<IngredientEntity> ingredients;
+
+  @ManyToMany()
+  @JoinTable
+  public Set<IngredientEntity> getIngredients() {
+    return this.ingredients;
+  }
+
+  public void setOrders(Set<IngredientEntity> ingredients) {
+    this.ingredients = ingredients;
+  }
+
+
+
+

Information about the relation is stored in a table called BILL_ORDER that has to have two columns, one for referencing the Bill, the other one for referencing the Order. Note that the @JoinTable annotation is not needed in this case because a separate table is the default solution here (same for n:m relations) unless there is a mappedBy element specified.

+
+
+

For 1:n relationships this solution has the disadvantage that more joins (in the database system) are needed to get a Bill with all the Orders it refers to. This might have a negative impact on performance so that the solution to store a reference to the Bill row/entity in the Order’s table is probably the better solution in most cases.

+
+
+

Note that bidirectional n:m relationships are not allowed for applications based on devon4j. Instead a third entity has to be introduced, which "represents" the relationship (it has two n:1 relationships).

+
+
+
+
Eager vs. Lazy Loading
+
+

Using JPA it is possible to use either lazy or eager loading. Eager loading means that for entities retrieved from the database, other entities that are referenced by these entities are also retrieved, whereas lazy loading means that this is only done when they are actually needed, i.e. when the corresponding getter method is invoked.

+
+
+

Application based on devon4j are strongly advised to always use lazy loading. The JPA defaults are:

+
+
+
    +
  • +

    @OneToMany: LAZY

    +
  • +
  • +

    @ManyToMany: LAZY

    +
  • +
  • +

    @ManyToOne: EAGER

    +
  • +
  • +

    @OneToOne: EAGER

    +
  • +
+
+
+

So at least for @ManyToOne and @OneToOne you always need to override the default by providing fetch = FetchType.LAZY.

+
+
+ + + + + +
+ + +Please read the performance guide. +
+
+
+
+
Cascading Relationships
+
+

For relations it is also possible to define whether operations are cascaded (like a recursion) to the related entity. +By default, nothing is done in these situations. This can be changed by using the cascade property of the annotation that specifies the relation type (@OneToOne, @ManyToOne, @OneToMany, @ManyToOne). This property accepts a CascadeType that offers the following options:

+
+
+
    +
  • +

    PERSIST (for EntityManager.persist, relevant to inserted transient entities into DB)

    +
  • +
  • +

    REMOVE (for EntityManager.remove to delete entity from DB)

    +
  • +
  • +

    MERGE (for EntityManager.merge)

    +
  • +
  • +

    REFRESH (for EntityManager.refresh)

    +
  • +
  • +

    DETACH (for EntityManager.detach)

    +
  • +
  • +

    ALL (cascade all of the above operations)

    +
  • +
+
+
+

See here for more information.

+
+
+
+
Typesafe Foreign Keys using IdRef
+
+

For simple usage you can use Long for all your foreign keys. +However, as an optional pattern for advanced and type-safe usage, we offer IdRef.

+
+
+
+
+

Embeddable

+
+

An embeddable Object is a way to group properties of an entity into a separate Java (child) object. Unlike with implement relationships the embeddable is not a separate entity and its properties are stored (embedded) in the same table together with the entity. This is helpful to structure and reuse groups of properties.

+
+
+

The following example shows an Address implemented as an embeddable class:

+
+
+
+
@Embeddable
+public class AddressEmbeddable {
+
+  private String street;
+  private String number;
+  private Integer zipCode;
+  private String city;
+
+  @Column(name="STREETNUMBER")
+  public String getNumber() {
+    return number;
+  }
+
+  public void setNumber(String number) {
+    this.number = number;
+  }
+
+  ...  // other getter and setter methods, equals, hashCode
+}
+
+
+
+

As you can see an embeddable is similar to an entity class, but with an @Embeddable annotation instead of the @Entity annotation and without primary key or modification counter. +An Embeddable does not exist on its own but in the context of an entity. +As a simplification Embeddables do not require a separate interface and ETO as the bean-mapper will create a copy automatically when converting the owning entity to an ETO. +However, in this case the embeddable becomes part of your api module that therefore needs a dependency on the JPA.

+
+
+

In addition to that the methods equals(Object) and hashCode() need to be implemented as this is required by Hibernate (it is not required for entities because they can be unambiguously identified by their primary key). For some hints on how to implement the hashCode() method please have a look here.

+
+
+

Using this AddressEmbeddable inside an entity class can be done like this:

+
+
+
+
  private AddressEmbeddable address;
+
+  @Embedded
+  public AddressEmbeddable getAddress() {
+    return this.address;
+  }
+
+  public void setAddress(AddressEmbeddable address) {
+    this.address = address;
+  }
+}
+
+
+
+

The @Embedded annotation needs to be used for embedded attributes. Note that if in all columns of the embeddable (here Address) are null, then the embeddable object itself is also null inside the entity. This has to be considered to avoid NullPointerException’s. Further this causes some issues with primitive types in embeddable classes that can be avoided by only using object types instead.

+
+
+
+

Inheritance

+
+

Just like normal java classes, entity classes can inherit from others. The only difference is that you need to specify how to map a class hierarchy to database tables. Generic abstract super-classes for entities can simply be annotated with @MappedSuperclass.

+
+
+

For all other cases the JPA offers the annotation @Inheritance with the property strategy talking an InheritanceType that has the following options:

+
+
+
+
+
    +
  • +

    SINGLE_TABLE: This strategy uses a single table that contains all columns needed to store all entity-types of the entire inheritance hierarchy. If a column is not needed for an entity because of its type, there is a null value in this column. An additional column is introduced, which denotes the type of the entity (called dtype).

    +
  • +
  • +

    TABLE_PER_CLASS: For each concrete entity class there is a table in the database that can store such an entity with all its attributes. An entity is only saved in the table corresponding to its most concrete type. To get all entities of a super type, joins are needed.

    +
  • +
  • +

    JOINED: In this case there is a table for every entity class including abstract classes, which contains only the columns for the persistent properties of that particular class. Additionally there is a primary key column in every table. To get an entity of a class that is a subclass of another one, joins are needed.

    +
  • +
+
+
+
+
+

Each of the three approaches has its advantages and drawbacks, which are discussed in detail here. In most cases, the first one should be used, because it is usually the fastest way to do the mapping, as no joins are needed when retrieving, searching or persisting entities. Moreover it is rather simple and easy to understand. +One major disadvantage is that the first approach could lead to a table with a lot of null values, which might have a negative impact on the database size.

+
+
+

The inheritance strategy has to be annotated to the top-most entity of the class hierarchy (where @MappedSuperclass classes are not considered) like in the following example:

+
+
+
+
@Entity
+@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+public abstract class MyParentEntity extends ApplicationPersistenceEntity implements MyParent {
+  ...
+}
+
+@Entity
+public class MyChildEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+@Entity
+public class MyOtherEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+
+
+

As a best practice we advise you to avoid entity hierarchies at all where possible and otherwise to keep the hierarchy as small as possible. In order to just ensure reuse or establish a common API you can consider a shared interface, a @MappedSuperclass or an @Embeddable instead of an entity hierarchy.

+
+
+
+

Repositories and DAOs

+
+

For each entity a code unit is created that groups all database operations for that entity. We recommend to use spring-data repositories for that as it is most efficient for developers. As an alternative there is still the classic approach using DAOs.

+
+
+
Concurrency Control
+
+

The concurrency control defines the way concurrent access to the same data of a database is handled. When several users (or threads of application servers) concurrently access a database, anomalies may happen, e.g. a transaction is able to see changes from another transaction although that one did, not yet commit these changes. Most of these anomalies are automatically prevented by the database system, depending on the isolation level (property hibernate.connection.isolation in the jpa.xml, see here, or quarkus.datasource.jdbc.transaction-isolation-level in the application.properties).

+
+
+

Another anomaly is when two stakeholders concurrently access a record, do some changes and write them back to the database. The JPA addresses this with different locking strategies (see here).

+
+
+

As a best practice we are using optimistic locking for regular end-user services (OLTP) and pessimistic locking for batches.

+
+
+
+
Optimistic Locking
+
+

The class com.devonfw.module.jpa.persistence.api.AbstractPersistenceEntity already provides optimistic locking via a modificationCounter with the @Version annotation. Therefore JPA takes care of optimistic locking for you. When entities are transferred to clients, modified and sent back for update you need to ensure the modificationCounter is part of the game. If you follow our guides about transfer-objects and services this will also work out of the box. +You only have to care about two things:

+
+
+
    +
  • +

    How to deal with optimistic locking in relationships?
    +Assume an entity A contains a collection of B entities. Should there be a locking conflict if one user modifies an instance of A while another user in parallel modifies an instance of B that is contained in the other instance? To address this , take a look at FeatureForceIncrementModificationCounter.

    +
  • +
  • +

    What should happen in the UI if an OptimisticLockException occurred?
    +According to KISS our recommendation is that the user gets an error displayed that tells him to do his change again on the recent data. Try to design your system and the work processing in a way to keep such conflicts rare and you are fine.

    +
  • +
+
+
+
+
Pessimistic Locking
+
+

For back-end services and especially for batches optimistic locking is not suitable. A human user shall not cause a large batch process to fail because he was editing the same entity. Therefore such use-cases use pessimistic locking what gives them a kind of priority over the human users. +In your DAO implementation you can provide methods that do pessimistic locking via EntityManager operations that take a LockModeType. Here is a simple example:

+
+
+
+
  getEntityManager().lock(entity, LockModeType.READ);
+
+
+
+

When using the lock(Object, LockModeType) method with LockModeType.READ, Hibernate will issue a SELECT …​ FOR UPDATE. This means that no one else can update the entity (see here for more information on the statement). If LockModeType.WRITE is specified, Hibernate issues a SELECT …​ FOR UPDATE NOWAIT instead, which has has the same meaning as the statement above, but if there is already a lock, the program will not wait for this lock to be released. Instead, an exception is raised.
+Use one of the types if you want to modify the entity later on, for read only access no lock is required.

+
+
+

As you might have noticed, the behavior of Hibernate deviates from what one would expect by looking at the LockModeType (especially LockModeType.READ should not cause a SELECT …​ FOR UPDATE to be issued). The framework actually deviates from what is specified in the JPA for unknown reasons.

+
+
+
+
+

Database Auditing

+ +
+
+

Testing Data-Access

+
+

For testing of Entities and Repositories or DAOs see testing guide.

+
+
+
+

Principles

+
+

We strongly recommend these principles:

+
+
+
    +
  • +

    Use the JPA where ever possible and use vendor (hibernate) specific features only for situations when JPA does not provide a solution. In the latter case consider first if you really need the feature.

    +
  • +
  • +

    Create your entities as simple POJOs and use JPA to annotate the getters in order to define the mapping.

    +
  • +
  • +

    Keep your entities simple and avoid putting advanced logic into entity methods.

    +
  • +
+
+
+
+

Database Configuration

+
+

For details on the configuration of the database connection and database logging of the individual framework, please refer to the respective configuration guide.

+
+
+

For spring see here.

+
+
+

For quarkus see here.

+
+
+
Database Migration
+ +
+
+
Pooling
+
+

You typically want to pool JDBC connections to boost performance by recycling previous connections. There are many libraries available to do connection pooling. We recommend to use HikariCP. For Oracle RDBMS see here.

+
+
+
+
+

Security

+
+
SQL-Injection
+
+

A common security threat is SQL-injection. Never build queries with string concatenation or your code might be vulnerable as in the following example:

+
+
+
+
  String query = "Select op from OrderPosition op where op.comment = " + userInput;
+  return getEntityManager().createQuery(query).getResultList();
+
+
+
+

Via the parameter userInput an attacker can inject SQL (JPQL) and execute arbitrary statements in the database causing extreme damage.

+
+
+

In order to prevent such injections you have to strictly follow our rules for queries:

+
+
+ +
+
+
+
Limited Permissions for Application
+
+

We suggest that you operate your application with a database user that has limited permissions so he can not modify the SQL schema (e.g. drop tables). For initializing the schema (DDL) or to do schema migrations use a separate user that is not used by the application itself.

+
+ +
+

==Queries +The Java Persistence API (JPA) defines its own query language, the java persistence query language (JPQL) (see also JPQL tutorial), which is similar to SQL but operates on entities and their attributes instead of tables and columns.

+
+
+

The simplest CRUD-Queries (e.g. find an entity by its ID) are already build in the devonfw CRUD functionality (via Repository or DAO). For other cases you need to write your own query. We distinguish between static and dynamic queries. Static queries have a fixed JPQL query string that may only use parameters to customize the query at runtime. Instead, dynamic queries can change their clauses (WHERE, ORDER BY, JOIN, etc.) at runtime depending on the given search criteria.

+
+
+
+
Static Queries
+
+

E.g. to find all DishEntries (from MTS sample app) that have a price not exceeding a given maxPrice we write the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice
+
+
+
+

Here dish is used as alias (variable name) for our selected DishEntity (what refers to the simple name of the Java entity class). With dish.price we are referring to the Java property price (getPrice()/setPrice(…​)) in DishEntity. A named variable provided from outside (the search criteria at runtime) is specified with a colon (:) as prefix. Here with :maxPrice we reference to a variable that needs to be set via query.setParameter("maxPrice", maxPriceValue). JPQL also supports indexed parameters (?) but they are discouraged because they easily cause confusion and mistakes.

+
+
+
Using Queries to Avoid Bidirectional Relationships
+
+

With the usage of queries it is possible to avoid exposing relationships or modelling bidirectional relationships, which have some disadvantages (see relationships). This is especially desired for relationships between entities of different business components. +So for example to get all OrderLineEntities for a specific OrderEntity without using the orderLines relation from OrderEntity the following query could be used:

+
+
+
+
SELECT line FROM OrderLineEntity line WHERE line.order.id = :orderId
+
+
+
+
+
+
Dynamic Queries
+
+

For dynamic queries, we use the JPA module for Querydsl. Querydsl also supports other modules such as MongoDB, and Apache Lucene. It allows to implement queries in a powerful but readable and type-safe way (unlike Criteria API). If you already know JPQL, you will quickly be able to read and write Querydsl code. It feels like JPQL but implemented in Java instead of plain text.

+
+
+

To use Querydsl in your Maven project, add the following dependencies:

+
+
+
+
<dependencies>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-apt</artifactId>
+        <version>${querydsl.version}</version>
+        <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-jpa</artifactId>
+        <version>${querydsl.version}</version>
+    </dependency>
+
+</dependencies>
+
+
+
+

Next, configure the annotation processing tool (APT) plugin:

+
+
+
+
<project>
+  <build>
+    <plugins>
+      ...
+      <plugin>
+        <groupId>com.mysema.maven</groupId>
+        <artifactId>apt-maven-plugin</artifactId>
+        <version>1.1.3</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>process</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/generated-sources/java</outputDirectory>
+              <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      ...
+    </plugins>
+  </build>
+</project>
+
+
+
+

Here is an example from our sample application:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(dish.price.goe(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(dish.price.loe(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      query.where(dish.name.eq(name));
+    }
+    query.orderBy(dish.price.asc(), dish.name.asc());
+    return query.fetch();
+  }
+
+
+
+

In this example, we use the so called Q-types (QDishEntity). These are classes generated at build time by the Querydsl annotation processor from entity classes. The Q-type classes can be used as static types representative of the original entity class.

+
+
+

The query.from(dish) method call defines the query source, in this case the dish table. The where method defines a filter. For example, The first call uses the goe operator to filter out any dishes that are not greater or equal to the minimal price. Further operators can be found here.

+
+
+

The orderBy method is used to sort the query results according to certain criteria. Here, we sort the results first by their price and then by their name, both in ascending order. To sort in descending order, use .desc(). To partition query results into groups of rows, see the groupBy method.

+
+
+

For spring, devon4j provides another approach that you can use for your Spring applications to implement Querydsl logic without having to use these metaclasses. An example can be found here.

+
+
+
+
Native Queries
+
+

Spring Data supports the use of native queries. Native queries use simple native SQL syntax that is not parsed in JPQL. This allows you to use all the features that your database supports. +The downside to this is that database portability is lost due to the absence of an abstraction layer. Therefore, the queries may not work with another database because it may use a different syntax.

+
+
+

You can implement a native query using @Query annotation with the nativeQuery attribute set to true:

+
+
+
+
@Query(value="...", nativeQuery=true)
+
+
+
+ + + + + +
+ + +This will not work with Quarkus because Quarkus does not support native queries by using the @Query annotation (see here). +
+
+
+

You can also implement native queries directly using the EntityManager API and the createNativeQuery method. +This approach also works with Quarkus.

+
+
+
+
Query query = entityManager.createNativeQuery("SELECT * FROM Product", ProductEntity.class);
+List<ProductEntity> products = query.getResultList();
+
+
+
+ + + + + +
+ + +Be sure to use the name of the table when using native queries, while you must use the entity name when implementing queries with JPQL. +
+
+
+
+
Using Wildcards
+
+

For flexible queries it is often required to allow wildcards (especially in dynamic queries). While users intuitively expect glob syntax, the SQL and JPQL standards work differently. Therefore, a mapping is required. devonfw provides this on a lower level with LikePatternSyntax and on a higher level with QueryUtil (see QueryHelper.newStringClause(…​)).

+
+
+
+
Pagination
+
+

When dealing with large amounts of data, an efficient method of retrieving the data is required. Fetching the entire data set each time would be too time consuming. Instead, Paging is used to process only small subsets of the entire data set.

+
+
+

If you are using Spring Data repositories you will get pagination support out of the box by providing the interfaces Page and Pageable:

+
+
+
repository
+
+
Page<DishEntity> findAll(Pageable pageable);
+
+
+
+

Then you can create a Pageable object and pass it to the method call as follows:

+
+
+
+
int page = criteria.getPageNumber();
+int size = criteria.getPageSize();
+Pageable pageable = PageRequest.of(page, size);
+Page<DishEntity> dishes = dishRepository.findAll(pageable);
+
+
+
+
Paging with Querydsl
+
+

Pagination is also supported for dynamic queries with Querydsl:

+
+
+
+
  public Page<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    // conditions
+
+    int page = criteria.getPageNumber();
+    int size = criteria.getPageSize();
+    Pageable pageable = PageRequest.of(page, size);
+    query.offset(pageable.getOffset());
+    query.limit(pageable.getPageSize());
+
+    List<DishEntity> dishes = query.fetch();
+    return new PageImpl<>(dishes, pageable, dishes.size());
+  }
+
+
+
+
+
Pagination example
+
+

For the table entity we can make a search request by accessing the REST endpoint with pagination support like in the following examples:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "total":true
+  }
+}
+
+//Response
+{
+    "pagination": {
+        "size": 2,
+        "page": 1,
+        "total": 11
+    },
+    "result": [
+        {
+            "id": 101,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 1,
+            "state": "OCCUPIED"
+        },
+        {
+            "id": 102,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 2,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+ + + + + +
+ + +As we are requesting with the total property set to true the server responds with the total count of rows for the query. +
+
+
+

For retrieving a concrete page, we provide the page attribute with the desired value. Here we also left out the total property so the server doesn’t incur on the effort to calculate it:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "page":2
+  }
+}
+
+//Response
+
+{
+    "pagination": {
+        "size": 2,
+        "page": 2,
+        "total": null
+    },
+    "result": [
+        {
+            "id": 103,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 3,
+            "state": "FREE"
+        },
+        {
+            "id": 104,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 4,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+
+
Pagingation in devon4j-spring
+
+

For spring applications, devon4j also offers its own solution for pagination. You can find an example of this here.

+
+
+
+
+
Query Meta-Parameters
+
+

Queries can have meta-parameters and that are provided via SearchCriteriaTo. Besides paging (see above) we also get timeout support.

+
+
+
+
Advanced Queries
+
+

Writing queries can sometimes get rather complex. The current examples given above only showed very simple basics. Within this topic a lot of advanced features need to be considered like:

+
+
+ +
+
+

This list is just containing the most important aspects. As we can not cover all these topics here, they are linked to external documentation that can help and guide you.

+
+ +
+

==Spring Data +Spring Data JPA is supported by both Spring and Quarkus. However, in Quarkus this approach still has some limitations. For detailed information, see the official Quarkus Spring Data guide.

+
+
+
+
Motivation
+
+

The benefits of Spring Data are (for examples and explanations see next sections):

+
+
+
    +
  • +

    All you need is one single repository interface for each entity. No need for a separate implementation or other code artifacts like XML descriptors, NamedQueries class, etc.

    +
  • +
  • +

    You have all information together in one place (the repository interface) that actually belong together (where as in the classic approach you have the static queries in an XML file, constants to them in NamedQueries class and referencing usages in DAO implementation classes).

    +
  • +
  • +

    Static queries are most simple to realize as you do not need to write any method body. This means you can develop faster.

    +
  • +
  • +

    Support for paging is already build-in. Again for static query method the is nothing you have to do except using the paging objects in the signature.

    +
  • +
  • +

    Still you have the freedom to write custom implementations via default methods within the repository interface (e.g. for dynamic queries).

    +
  • +
+
+
+
+
Dependency
+
+

In case you want to switch to or add Spring Data support to your Spring or Quarkus application, all you need is to add the respective maven dependency:

+
+
+
spring
+
+
<dependency>
+  <groupId>org.springframework.boot</groupId>
+  <artifactId>spring-boot-starter-data-jpa</artifactId>
+</dependency>
+
+
+
+
quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
Repository
+
+

For each entity «Entity»Entity an interface is created with the name «Entity»Repository extending JpaRepository. +Such repository is the analogy to a Data-Access-Object (DAO) used in the classic approach or when Spring Data is not an option.

+
+
+
Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long> {
+
+}
+
+
+
+

The Spring Data repository provides some basic implementations for accessing data, e.g. returning all instances of a type (findAll) or returning an instance by its ID (findById).

+
+
+
+
Custom method implementation
+
+

In addition, repositories can be enriched with additional functionality, e.g. to add QueryDSL functionality or to override the default implementations, by using so called repository fragments:

+
+
+
Example
+
+

The following example shows how to write such a repository:

+
+
+
Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, ProductFragment {
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  List<ProductEntity> findByTitle(@Param("title") String title);
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  Page<ProductEntity> findByTitlePaginated(@Param("title") String title, Pageable pageable);
+}
+
+
+
+
Repository fragment
+
+
public interface ProductFragment {
+  Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria);
+}
+
+
+
+
Fragment implementation
+
+
public class ProductFragmentImpl implements ProductFragment {
+  @Inject
+  EntityManager entityManager;
+
+  public Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria) {
+    QProductEntity product = QProductEntity.productEntity;
+    JPAQuery<ProductEntity> query = new JPAQuery<ProductEntity>(this.entityManager);
+    query.from(product);
+
+    String title = criteria.getTitle();
+    if ((title != null) && !title.isEmpty()) {
+      query.where(product.title.eq(title));
+    }
+
+    List<ProductEntity> products = query.fetch();
+    return new PageImpl<>(products, PageRequest.of(criteria.getPageNumber(), criteria.getPageSize()), products.size());
+  }
+}
+
+
+
+

This ProductRepository has the following features:

+
+
+
    +
  • +

    CRUD support from Spring Data (see JavaDoc for details).

    +
  • +
  • +

    Support for QueryDSL integration, paging and more.

    +
  • +
  • +

    A static query method findByTitle to find all ProductEntity instances from DB that have the given title. Please note the @Param annotation that links the method parameter with the variable inside the query (:title).

    +
  • +
  • +

    The same with pagination support via findByTitlePaginated method.

    +
  • +
  • +

    A dynamic query method findByCriteria showing the QueryDSL and paging integration into Spring via a fragment implementation.

    +
  • +
+
+
+

You can find an implementation of this ProductRepository in our Quarkus reference application.

+
+
+ + + + + +
+ + +In Quarkus, native and named queries via the @Query annotation are currently not supported +
+
+
+
+
Integration of Spring Data in devon4j-spring
+
+

For Spring applications, devon4j offers a proprietary solution that integrates seamlessly with QueryDSL and uses default methods instead of the fragment approach. A separate guide for this can be found here.

+
+
+
+
Custom methods without fragment approach
+
+

The fragment approach is a bit laborious, as three types (repository interface, fragment interface and fragment implementation) are always needed to implement custom methods. +We cannot simply use default methods within the repository because we cannot inject the EntityManager directly into the repository interface.

+
+
+

As a workaround, you can create a GenericRepository interface, as is done in the devon4j jpa-spring-data module.

+
+
+
+
public interface GenericRepository<E> {
+
+  EntityManager getEntityManager();
+
+  ...
+}
+
+
+
+
+
public class GenericRepositoryImpl<E> implements GenericRepository<E> {
+
+  @Inject
+  EntityManager entityManager;
+
+  @Override
+  public EntityManager getEntityManager() {
+
+    return this.entityManager;
+  }
+
+  ...
+}
+
+
+
+

Then, all your repository interfaces can extend the GenericRepository and you can implement queries directly in the repository interface using default methods:

+
+
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, GenericRepository<ProductEntity> {
+
+  default Page<ProductEntity> findByTitle(Title title) {
+
+    EntityManager entityManager = getEntityManager();
+    Query query = entityManager.createNativeQuery("select * from Product where title = :title", ProductEntity.class);
+    query.setParameter("title", title);
+    List<ProductEntity> products = query.getResultList();
+    return new PageImpl<>(products);
+  }
+
+  ...
+}
+
+
+
+
+
+
Drawbacks
+
+

Spring Data also has some drawbacks:

+
+
+
    +
  • +

    Some kind of magic behind the scenes that are not so easy to understand. So in case you want to extend all your repositories without providing the implementation via a default method in a parent repository interface you need to deep-dive into Spring Data. We assume that you do not need that and hope what Spring Data and devon already provides out-of-the-box is already sufficient.

    +
  • +
  • +

    The Spring Data magic also includes guessing the query from the method name. This is not easy to understand and especially to debug. Our suggestion is not to use this feature at all and either provide a @Query annotation or an implementation via default method.

    +
  • +
+
+
+
+
Limitations in Quarkus
+
+
    +
  • +

    Native and named queries are not supported using @Query annotation. You will receive something like: Build step io.quarkus.spring.data.deployment.SpringDataJPAProcessor#build threw an exception: java.lang.IllegalArgumentException: Attribute nativeQuery of @Query is currently not supported

    +
  • +
  • +

    Customizing the base repository for all repository interfaces in the code base, which is done in Spring Data by registering a class the extends SimpleJpaRepository

    +
  • +
+
+ +
+

==Data Access Object

+
+
+

The Data Access Objects (DAOs) are part of the persistence layer. +They are responsible for a specific entity and should be named «Entity»Dao and «Entity»DaoImpl. +The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. +Additionally a DAO may offer advanced operations such as query or locking methods.

+
+
+
+
DAO Interface
+
+

For each DAO there is an interface named «Entity»Dao that defines the API. For CRUD support and common naming we derive it from the ApplicationDao interface that comes with the devon application template:

+
+
+
+
public interface MyEntityDao extends ApplicationDao<MyEntity> {
+  List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria);
+}
+
+
+
+

All CRUD operations are inherited from ApplicationDao so you only have to declare the additional methods.

+
+
+
+
DAO Implementation
+
+

Implementing a DAO is quite simple. We create a class named «Entity»DaoImpl that extends ApplicationDaoImpl and implements your «Entity»Dao interface:

+
+
+
+
public class MyEntityDaoImpl extends ApplicationDaoImpl<MyEntity> implements MyEntityDao {
+
+  public List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria) {
+    TypedQuery<MyEntity> query = createQuery(criteria, getEntityManager());
+    return query.getResultList();
+  }
+  ...
+}
+
+
+
+

Again you only need to implement the additional non-CRUD methods that you have declared in your «Entity»Dao interface. +In the DAO implementation you can use the method getEntityManager() to access the EntityManager from the JPA. You will need the EntityManager to create and execute queries.

+
+
+
Static queries for DAO Implementation
+
+

All static queries are declared in the file src\main\resources\META-INF\orm.xml:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+  <named-query name="find.dish.with.max.price">
+    <query><![SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice]]></query>
+  </named-query>
+  ...
+</hibernate-mapping>
+
+
+
+

When your application is started, all these static queries will be created as prepared statements. This allows better performance and also ensures that you get errors for invalid JPQL queries when you start your app rather than later when the query is used.

+
+
+

To avoid redundant occurrences of the query name (get.open.order.positions.for.order) we define a constant for each named query:

+
+
+
+
public class NamedQueries {
+  public static final String FIND_DISH_WITH_MAX_PRICE = "find.dish.with.max.price";
+}
+
+
+
+

Note that changing the name of the java constant (FIND_DISH_WITH_MAX_PRICE) can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+

The following listing shows how to use this query:

+
+
+
+
public List<DishEntity> findDishByMaxPrice(BigDecimal maxPrice) {
+  Query query = getEntityManager().createNamedQuery(NamedQueries.FIND_DISH_WITH_MAX_PRICE);
+  query.setParameter("maxPrice", maxPrice);
+  return query.getResultList();
+}
+
+
+
+

Via EntityManager.createNamedQuery(String) we create an instance of Query for our predefined static query. +Next we use setParameter(String, Object) to provide a parameter (maxPrice) to the query. This has to be done for all parameters of the query.

+
+
+

Note that using the createQuery(String) method, which takes the entire query as string (that may already contain the parameter) is not allowed to avoid SQL injection vulnerabilities. +When the method getResultList() is invoked, the query is executed and the result is delivered as List. As an alternative, there is a method called getSingleResult(), which returns the entity if the query returned exactly one and throws an exception otherwise.

+
+ +
+

==JPA Performance +When using JPA the developer sometimes does not see or understand where and when statements to the database are triggered.

+
+
+
+
+

Establishing expectations Developers shouldn’t expect to sprinkle magic pixie dust on POJOs in hopes they will become persistent.

+
+
+
+— Dan Allen
+https://epdf.tips/seam-in-action.html +
+
+
+

So in case you do not understand what is going on under the hood of JPA, you will easily run into performance issues due to lazy loading and other effects.

+
+
+
+
+
N plus 1 Problem
+
+

The most prominent phenomena is call the N+1 Problem. +We use entities from our MTS demo app as an example to explain the problem. +There is a DishEntity that has a @ManyToMany relation to +IngredientEntity. +Now we assume that we want to iterate all ingredients for a dish like this:

+
+
+
+
DishEntity dish = dao.findDishById(dishId);
+BigDecimal priceWithAllExtras = dish.getPrice();
+for (IngredientEntity ingredient : dish.getExtras()) {
+  priceWithAllExtras = priceWithAllExtras.add(ingredient.getPrice());
+}
+
+
+
+

Now dish.getExtras() is loaded lazy. Therefore the JPA vendor will provide a list with lazy initialized instances of IngredientEntity that only contain the ID of that entity. Now with every call of ingredient.getPrice() we technically trigger an SQL query statement to load the specific IngredientEntity by its ID from the database. +Now findDishById caused 1 initial query statement and for any number N of ingredients we are causing an additional query statement. This makes a total of N+1 statements. As causing statements to the database is an expensive operation with a lot of overhead (creating connection, etc.) this ends in bad performance and is therefore a problem (the N+1 Problem).

+
+
+
+
Solving N plus 1 Problem
+
+

To solve the N+1 Problem you need to change your code to only trigger a single statement instead. This can be archived in various ways. The most universal solution is to use FETCH JOIN in order to pre-load the nested N child entities into the first level cache of the JPA vendor implementation. This will behave very similar as if the @ManyToMany relation to IngredientEntity was having FetchType.EAGER but only for the specific query and not in general. Because changing @ManyToMany to FetchType.EAGER would cause bad performance for other usecases where only the dish but not its extra ingredients are needed. For this reason all relations, including @OneToOne should always be FetchType.LAZY. Back to our example we simply replace dao.findDishById(dishId) with dao.findDishWithExtrasById(dishId) that we implement by the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish
+  LEFT JOIN FETCH dish.extras
+  WHERE dish.id = :dishId
+
+
+
+

The rest of the code does not have to be changed but now dish.getExtras() will get the IngredientEntity from the first level cache where is was fetched by the initial query above.

+
+
+

Please note that if you only need the sum of the prices from the extras you can also create a query using an aggregator function:

+
+
+
+
SELECT sum(dish.extras.price) FROM DishEntity dish
+
+
+
+

As you can see you need to understand the concepts in order to get good performance.

+
+
+

There are many advanced topics such as creating database indexes or calculating statistics for the query optimizer to get the best performance. For such advanced topics we recommend to have a database expert in your team that cares about such things. However, understanding the N+1 Problem and its solutions is something that every Java developer in the team needs to understand.

+
+
+ +
+

==Auditing

+
+
+

For database auditing we use hibernate envers. If you want to use auditing ensure you have the following dependency in your pom.xml:

+
+
+
spring
+
+
<dependency>
+  <groupId>com.devonfw.java.modules</groupId>
+  <artifactId>devon4j-jpa-envers</artifactId>
+</dependency>
+
+
+
+
quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-envers</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +The following part applies only to spring applications. At this point, the Quarkus extension does not provide any additional configurations. For Quarkus applications, simply use the @Audited annotation to enable auditing for an entity class, as described a few lines below or seen here. +
+
+
+

Make sure that entity manager also scans the package from the devon4j-jpa[-envers] module in order to work properly. And make sure that correct Repository Factory Bean Class is chosen.

+
+
+
+
@EntityScan(basePackages = { "«my.base.package»" }, basePackageClasses = { AdvancedRevisionEntity.class })
+...
+@EnableJpaRepositories(repositoryFactoryBeanClass = GenericRevisionedRepositoryFactoryBean.class)
+...
+public class SpringBootApp {
+  ...
+}
+
+
+
+

Now let your [Entity]Repository extend from DefaultRevisionedRepository instead of DefaultRepository.

+
+
+

The repository now has a method getRevisionHistoryMetadata(id) and getRevisionHistoryMetadata(id, boolean lazy) available to get a list of revisions for a given entity and a method find(id, revision) to load a specific revision of an entity with the given ID or getLastRevisionHistoryMetadata(id) to load last revision. +To enable auditing for a entity simply place the @Audited annotation to your entity and all entity classes it extends from.

+
+
+
+
@Entity(name = "Drink")
+@Audited
+public class DrinkEntity extends ProductEntity implements Drink {
+...
+
+
+
+

When auditing is enabled for an entity an additional database table is used to store all changes to the entity table and a corresponding revision number. This table is called <ENTITY_NAME>_AUD per default. Another table called REVINFO is used to store all revisions. Make sure that these tables are available. They can be generated by hibernate with the following property (only for development environments).

+
+
+
+
  database.hibernate.hbm2ddl.auto=create
+
+
+
+

Another possibility is to put them in your database migration scripts like so.

+
+
+
+
CREATE CACHED TABLE PUBLIC.REVINFO(
+  id BIGINT NOT NULL generated by default as identity (start with 1),
+  timestamp BIGINT NOT NULL,
+  user VARCHAR(255)
+);
+...
+CREATE CACHED TABLE PUBLIC.<TABLE_NAME>_AUD(
+    <ALL_TABLE_ATTRIBUTES>,
+    revtype TINYINT,
+    rev BIGINT NOT NULL
+);
+
+
+
+ +
+

==Transaction Handling

+
+
+

For transaction handling we AOP to add transaction control via annotations as aspect. +This is done by annotating your code with the @Transactional annotation. +You can either annotate your container bean at class level to make all methods transactional or your can annotate individual methods to make them transactional:

+
+
+
+
  @Transactional
+  public Output getData(Input input) {
+    ...
+  }
+
+
+
+
+
+

JTA Imports

+
+

Here are the import statements for transaction support:

+
+
+
+
import javax.transaction.Transactional;
+
+
+
+ + + + + +
+ + +Use the above import statement to follow JEE and avoid using org.springframework.transaction.annotation.Transactional. +
+
+
+
+

JTA Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>jakarta.transaction</groupId>
+  <artifactId>jakarta.transaction-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>javax.transaction</groupId>
+  <artifactId>javax.transaction-api</artifactId>
+</dependency>
+
+
+
+
+

Handling constraint violations

+
+

Using @Transactional magically wraps transaction handling around your code. +As constraints are checked by the database at the end when the transaction gets committed, a constraint violation will be thrown by this aspect outside your code. +In case you have to handle constraint violations manually, you have to do that in code outside the logic that is annotated with @Transactional. +This may be done in a service operation by catching a ConstraintViolationException (org.hibernate.exception.ConstraintViolationException for hibernate). +As a generic approach you can solve this via REST execption handling.

+
+
+
+

Batches

+
+

Transaction control for batches is a lot more complicated and is described in the batch layer.

+
+
+ +
+

==SQL

+
+
+

For general guides on dealing or avoiding SQL, preventing SQL-injection, etc. you should study domain layer.

+
+
+
+

Naming Conventions

+
+

Here we define naming conventions that you should follow whenever you write SQL files:

+
+
+
    +
  • +

    All SQL-Keywords in UPPER CASE

    +
  • +
  • +

    Indentation should be 2 spaces as suggested by devonfw for every format.

    +
  • +
+
+
+
DDL
+
+

The naming conventions for database constructs (tables, columns, triggers, constraints, etc.) should be aligned with your database product and their operators. +However, when you have the freedom of choice and a modern case-sensitive database, you can simply use your code conventions also for database constructs to avoid explicitly mapping each and every property (e.g. RestaurantTable vs. RESTAURANT_TABLE).

+
+
+
    +
  • +

    Define columns and constraints inline in the statement to create the table

    +
  • +
  • +

    Indent column types so they all start in the same text column

    +
  • +
  • +

    Constraints should be named explicitly (to get a reasonable hint error messages) with:

    +
    +
      +
    • +

      PK_«table» for primary key (name optional here as PK constraint are fundamental)

      +
    • +
    • +

      FK_«table»_«property» for foreign keys («table» and «property» are both on the source where the foreign key is defined)

      +
    • +
    • +

      UC_«table»_«property»[_«propertyN»]* for unique constraints

      +
    • +
    • +

      CK_«table»_«check» for check constraints («check» describes the check, if it is defined on a single property it should start with the property).

      +
    • +
    +
    +
  • +
  • +

    Old RDBMS had hard limitations for names (e.g. 30 characters). Please note that recent databases have overcome this very low length limitations. However, keep your names short but precise and try to define common abbreviations in your project for according (business) terms. Especially do not just truncate the names at the limit.

    +
  • +
  • +

    If possible add comments on table and columns to help DBAs understanding your schema. This is also honored by many tools (not only DBA-tools).

    +
  • +
+
+
+

Here is a brief example of a DDL:

+
+
+
+
CREATE SEQUENCE HIBERNATE_SEQUENCE START WITH 1000000;
+
+-- *** Table ***
+CREATE TABLE RESTAURANT_TABLE (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  SEATS                INTEGER NOT NULL,
+  CONSTRAINT PK_TABLE PRIMARY KEY(ID)
+);
+COMMENT ON TABLE RESTAURANT_TABLE IS 'The physical tables inside the restaurant.';
+-- *** Order ***
+CREATE TABLE RESTAURANT_ORDER (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  TABLE_ID             NUMBER(19) NOT NULL,
+  TOTAL                DECIMAL(5, 2) NOT NULL,
+  CREATION_DATE        TIMESTAMP NOT NULL,
+  PAYMENT_DATE         TIMESTAMP,
+  STATUS               VARCHAR2(10 CHAR) NOT NULL,
+  CONSTRAINT PK_ORDER PRIMARY KEY(ID),
+  CONSTRAINT FK_ORDER_TABLE_ID FOREIGN KEY(TABLE_ID) REFERENCES RESTAURANT_TABLE(ID)
+);
+COMMENT ON TABLE RESTAURANT_ORDER IS 'An order and bill at the restaurant.';
+...
+
+
+
+

ATTENTION: Please note that TABLE and ORDER are reserved keywords in SQL and you should avoid using such keywords to prevent problems.

+
+
+
+
Data
+
+

For insert, update, delete, etc. of data SQL scripts should additionally follow these guidelines:

+
+
+
    +
  • +

    Inserts always with the same order of columns in blocks for each table.

    +
  • +
  • +

    Insert column values always starting with ID, MODIFICATION_COUNTER, [DTYPE, ] …​

    +
  • +
  • +

    List columns with fixed length values (boolean, number, enums, etc.) before columns with free text to support alignment of multiple insert statements

    +
  • +
  • +

    Pro Tip: Get familiar with column mode of advanced editors such as notepad++ when editing large blocks of similar insert statements.

    +
  • +
+
+
+
+
INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (0, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (1, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (2, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (3, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (4, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (5, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (6, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (7, 1, 8);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (8, 1, 8);
+...
+
+
+
+

See also Database Migrations.

+
+
+ +
+

==Database Migration

+
+
+

When you have a schema-based database, +you need a solution for schema versioning and migration for your database. +A specific release of your app requires a corresponding version of the schema in the database to run. +As you want simple and continuous deployment you should automate the schema versiong and database migration.

+
+
+

The general idea is that your software product contains "scripts" to migrate the database from schema version X to verion X+1. +When you begin your project you start with version 1 and with every increment of your app that needs a change to the database schema (e.g. a new table, a new column to an existing table, a new index, etc.) you add another "script" that migrates from the current to the next version. +For simplicity these versions are just sequential numbers or timestamps. +Now, the solution you choose will automatically manage the schema version in a separate metadata table in your database that stores the current schema version. +When your app is started, it will check the current version inside the database from that metadata table. +As long as there are "scripts" that migrate from there to a higher version, they will be automatically applied to the database and this process is protocolled to the metadata table in your database what also updates the current schema version there. +Using this approach, you can start with an empty database what will result in all "scripts" being applied sequentially. +Also any version of your database schema can be present and you will always end up in a controlled migration to the latest schema version.

+
+
+
+
+

Options for database migration

+
+

For database migration you can choose between the following options:

+
+
+
    +
  • +

    flyway (KISS based approach with migrations as SQL)

    +
  • +
  • +

    liquibase (more complex approach with database abstraction)

    +
  • +
+
+
+ +
+

==Logging

+
+
+

We recommend to use SLF4J as API for logging, that has become a de facto standard in Java as it has a much better design than java.util.logging offered by the JDK. +There are serveral implementations for SLF4J. For Spring applications our recommended implementation is Logback. Quarkus uses JBoss Logging which provides a JBoss Log Manager implementation for SLF4J. For more information on logging in Quarkus, see the Quarkus logging guide.

+
+
+
+

Logging Dependencies

+
+

To use Logback in your Spring application, you need to include the following dependencies:

+
+
+
+
<!-- SLF4J as logging API -->
+<dependency>
+  <groupId>org.slf4j</groupId>
+  <artifactId>slf4j-api</artifactId>
+</dependency>
+<!-- Logback as logging implementation  -->
+<dependency>
+  <groupId>ch.qos.logback</groupId>
+  <artifactId>logback-classic</artifactId>
+</dependency>
+<!-- JSON logging for cloud-native log monitoring -->
+<dependency>
+  <groupId>net.logstash.logback</groupId>
+  <artifactId>logstash-logback-encoder</artifactId>
+</dependency>
+
+
+
+

In devon4j these dependencies are provided by the devon4j-logging module.

+
+
+

In Quarkus, SLF4J and the slf4j-jboss-logmanager are directly included in the Quarkus core runtime and can be used out of the box.

+
+
+
+

Logger Access

+
+

The general pattern for accessing loggers from your code is a static logger instance per class using the following pattern:

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+  private static final Logger LOG = LoggerFactory.getLogger(MyClass.class);
+  ...
+}
+
+
+
+

For detailed documentation how to use the logger API check the SLF4j manual.

+
+
+ + + + + +
+ + +In case you are using devonfw-ide and Eclipse you can just type LOG and hit [ctrl][space] to insert the code pattern including the imports into your class. +
+
+
+
Lombok
+
+

In case you are using Lombok, you can simply use the @Slf4j annotation in your class. This causes Lombok to generate the logger instance for you.

+
+
+
+
+

Log-Levels

+
+

We use a common understanding of the log-levels as illustrated by the following table. +This helps for better maintenance and operation of the systems.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Log-levels
Log-levelDescriptionImpactActive Environments

FATAL

Only used for fatal errors that prevent the application to work at all (e.g. startup fails or shutdown/restart required)

Operator has to react immediately

all

ERROR

An abnormal error indicating that the processing failed due to technical problems.

Operator should check for known issue and otherwise inform development

all

WARNING

A situation where something worked not as expected. E.g. a business exception or user validation failure occurred.

No direct reaction required. Used for problem analysis.

all

INFO

Important information such as context, duration, success/failure of request or process

No direct reaction required. Used for analysis.

all

DEBUG

Development information that provides additional context for debugging problems.

No direct reaction required. Used for analysis.

development and testing

TRACE

Like DEBUG but exhaustive information and for code that is run very frequently. Will typically cause large log-files.

No direct reaction required. Used for problem analysis.

none (turned off by default)

+
+

Exceptions (with their stack trace) should only be logged on FATAL or ERROR level. For business exceptions typically a WARNING including the message of the exception is sufficient.

+
+
+
Configuration of Logback
+
+

The configuration of logback happens via the logback.xml file that you should place into src/main/resources of your app. +For details consult the logback configuration manual.

+
+
+ + + + + +
+ + +Logback also allows to overrule the configuration with a logback-test.xml file that you may put into src/test/resources or into a test-dependency. +
+
+
+
+
Configuration in Quarkus
+
+

The are several options you can set in the application.properties file to configure the behaviour of the logger in Quarkus. For a detailed overview, see the corresponding part of the Quarkus guide.

+
+
+
+
+

JSON-logging

+
+

For easy integration with log-monitoring, we recommend that your app logs to standard out in JSON following JSON Lines.

+
+
+

In Spring applications, this can be achieved via logstash-logback-encoder (see dependencies). In Quarkus, it can be easily achieved using the quarkus-logging-json extension (see here for more details).

+
+
+

This will produce log-lines with the following format (example formatted for readability):

+
+
+
+
{
+  "timestamp":"2000-12-31T23:59:59.999+00:00",
+  "@version":"1",
+  "message":"Processing 4 order(s) for shipment",
+  "logger_name":"com.myapp.order.logic.UcManageOrder",
+  "thread_name":"http-nio-8081-exec-6",
+  "level":"INFO",
+  "level_value":20000,
+  "appname":"myapp",
+}
+
+
+
+
Adding custom values to JSON log with Logstash
+
+

The JSON encoder even supports logging custom properties for your log-monitoring. +The trick is to use the class net.logstash.logback.argument.StructuredArguments for adding the arguments to you log message, e.g.

+
+
+
+
import static net.logstash.logback.argument.StructuredArguments.v;
+
+...
+    LOG.info("Request with {} and {} took {} ms.", v("url", url), v("status", statusCode), v("duration", millis));
+...
+
+
+
+

This will produce the a JSON log-line with the following properties:

+
+
+
+
...
+  "message":"Request with url=https://api/service/v1/ordermanagement/order and status=200 took duration=251 ms",
+  "url":"https://api/service/v1/ordermanagement/order",
+  "status":"200",
+  "duration":"251",
+...
+
+
+
+

As you can quickly see besides the human readable message you also have the structured properties url, status and duration that can be extremly valuable to configure dashboards in your log-monitoring that visualize success/failure ratio as well as performance of your requests.

+
+
+
+
+

Classic log-files

+
+ + + + + +
+ + +In devon4j, we strongly recommend using JSON logging instead of classic log files. The following section refers only to devon4j Spring applications that use Logback. +
+
+
+

Even though we do not recommend anymore to write classical log-files to the local disc, here you can still find our approach for it.

+
+
+
Maven-Integration
+
+

In the pom.xml of your application add this dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java</groupId>
+  <artifactId>devon4j-logging</artifactId>
+</dependency>
+
+
+
+

The above dependency already adds transitive dependencies to SLF4J and logback. +Also it comes with configration snipplets that can be included from your logback.xml file (see configuration).

+
+
+

The logback.xml to write regular log-files can look as following:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+  <property resource="com/devonfw/logging/logback/application-logging.properties" />
+  <property name="appname" value="MyApp"/>
+  <property name="logPath" value="../logs"/>
+  <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />
+  <include resource="com/devonfw/logging/logback/appender-console.xml" />
+
+  <root level="DEBUG">
+    <appender-ref ref="ERROR_APPENDER"/>
+    <appender-ref ref="INFO_APPENDER"/>
+    <appender-ref ref="DEBUG_APPENDER"/>
+    <appender-ref ref="CONSOLE_APPENDER"/>
+  </root>
+
+  <logger name="org.springframework" level="INFO"/>
+</configuration>
+
+
+
+

The provided logback.xml is configured to use variables defined on the config/application.properties file. +On our example, the log files path point to ../logs/ in order to log to tomcat log directory when starting tomcat on the bin folder. +Change it according to your custom needs.

+
+
+
config/application.properties
+
+
log.dir=../logs/
+
+
+
+
+
Log Files
+
+

The classical approach uses the following log files:

+
+
+
    +
  • +

    Error Log: Includes log entries to detect errors.

    +
  • +
  • +

    Info Log: Used to analyze system status and to detect bottlenecks.

    +
  • +
  • +

    Debug Log: Detailed information for error detection.

    +
  • +
+
+
+

The log file name pattern is as follows:

+
+
+
+
«LOGTYPE»_log_«HOST»_«APPLICATION»_«TIMESTAMP».log
+
+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of Logfilename
ElementValueDescription

«LOGTYPE»

info, error, debug

Type of log file

«HOST»

e.g. mywebserver01

Name of server, where logs are generated

«APPLICATION»

e.g. myapp

Name of application, which causes logs

«TIMESTAMP»

YYYY-MM-DD_HH00

date of log file

+
+

Example: +error_log_mywebserver01_myapp_2013-09-16_0900.log

+
+
+

Error log from mywebserver01 at application myapp at 16th September 2013 9pm.

+
+
+
+
Output format
+
+

We use the following output format for all log entries to ensure that searching and filtering of log entries work consistent for all logfiles:

+
+
+
+
[D: «timestamp»] [P: «priority»] [C: «NDC»][T: «thread»][L: «logger»]-[M: «message»]
+
+
+
+
    +
  • +

    D: Date (Timestamp in ISO8601 format e.g. 2013-09-05 16:40:36,464)

    +
  • +
  • +

    P: Priority (the log level)

    +
  • +
  • +

    C: Correlation ID (ID to identify users across multiple systems, needed when application is distributed)

    +
  • +
  • +

    T: Thread (Name of thread)

    +
  • +
  • +

    L: Logger name (use class name)

    +
  • +
  • +

    M: Message (log message)

    +
  • +
+
+
+

Example:

+
+
+
+
[D: 2013-09-05 16:40:36,464] [P: DEBUG] [C: 12345] [T: main] [L: my.package.MyClass]-[M: My message...]
+
+
+
+ + + + + +
+ + +When using devon4j-logging, this format is used by default. To achieve this format in Quarkus, set quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n in your properties. +
+
+
+
+
Correlation ID
+
+

In order to correlate separate HTTP requests to services belonging to the same user / session, we provide a servlet filter called DiagnosticContextFilter. +This filter takes a provided correlation ID from the HTTP header X-Correlation-Id. +If none was found, it will generate a new correlation id as UUID. +This correlation ID is added as MDC to the logger. +Therefore, it will then be included to any log message of the current request (thread). +Further concepts such as service invocations will pass this correlation ID to subsequent calls in the application landscape. Hence you can find all log messages related to an initial request simply via the correlation ID even in highly distributed systems.

+
+
+
+
Security
+
+

In order to prevent log forging attacks you can simply use the suggested JSON logging format. +Otherwise you can use com.devonfw.module.logging.common.impl.SingleLinePatternLayout as demonstrated here in order to prevent such attacks.

+
+
+ +
+

==Security +Security is todays most important cross-cutting concern of an application and an enterprise IT-landscape. We seriously care about security and give you detailed guides to prevent pitfalls, vulnerabilities, and other disasters. While many mistakes can be avoided by following our guidelines you still have to consider security and think about it in your design and implementation. The security guide will not only automatically prevent you from any harm, but will provide you hints and best practices already used in different software products.

+
+
+

An important aspect of security is proper authentication and authorization as described in access-control. In the following we discuss about potential vulnerabilities and protection to prevent them.

+
+
+
+
+

Vulnerabilities and Protection

+
+

Independent from classical authentication and authorization mechanisms there are many common pitfalls that can lead to vulnerabilities and security issues in your application such as XSS, CSRF, SQL-injection, log-forging, etc. A good source of information about this is the OWASP. +We address these common threats individually in security sections of our technological guides as a concrete solution to prevent an attack typically depends on the according technology. The following table illustrates common threats and contains links to the solutions and protection-mechanisms provided by the devonfw:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3. Security threats and protection-mechanisms
ThreatProtectionLink to details

A1 Injection

validate input, escape output, use proper frameworks

SQL Injection

A2 Broken Authentication

encrypt all channels, use a central identity management with strong password-policy

Authentication

A3 Sensitive Data Exposure

Use secured exception facade, design your data model accordingly

REST exception handling

A4 XML External Entities

Prefer JSON over XML, ensure FSP when parsing (external) XML

XML guide

A5 Broken Access Control

Ensure proper authorization for all use-cases, use @DenyAll as default to enforce

Access-control guide especially method authorization

A6 Security Misconfiguration

Use devon4j application template and guides to avoid

tutorial-newapp and sensitive configuration

A7 Cross-Site Scripting

prevent injection (see A1) for HTML, JavaScript and CSS and understand same-origin-policy

client-layer

A8 Insecure Deserialization

Use simple and established serialization formats such as JSON, prevent generic deserialization (for polymorphic types)

JSON guide especially inheritence, XML guide

A9 Using Components with Known Vulnerabilities

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

CVE newsletter and dependency check

A10 Insufficient_Logging & Monitoring

Ensure to log all security related events (login, logout, errors), establish effective monitoring

Logging guide and monitoring guide

Insecure Direct Object References

Using direct object references (IDs) only with appropriate authorization

logic-layer

Cross-Site Request Forgery (CSRF)

secure mutable service operations with an explicit CSRF security token sent in HTTP header and verified on the server

CSRF guide

Log-Forging

Escape newlines in log messages

logging security

Unvalidated Redirects and Forwards

Avoid using redirects and forwards, in case you need them do a security audit on the solution.

devonfw proposes to use rich-clients (SPA/RIA). We only use redirects for login in a safe way.

+
+
+

Advanced Security

+
+

While OWASP Top 10 covers the basic aspects of application security, there are advanced standards such as AVS. +In devonfw we address this in the +Application Security Quick Solution Guide.

+
+
+
+

Tools

+
+
Dependency Check
+
+

To address the thread Using Components with Known Vulnerabilities we recomment to use OWASP dependency check that ships with a maven plugin and can analyze your dependencies for known CVEs. +In order to run this check, you can simply call this command on any maven project:

+
+
+
+
mvn org.owasp:dependency-check-maven:6.1.5:aggregate
+
+
+
+ + + + + +
+ + +The version is just for completeness. You should check yourself for using a recent version of the plugin. +
+
+
+

If you build an devon4j spring application from our app-template you can activate the dependency check even easier with the security profile:

+
+
+
+
mvn clean install -P security
+
+
+
+

This does not run by default as it causes some overhead for the build performance. However, consider to build this in your CI at least nightly. +After the dependency check is performed, you will find the results in target/dependency-check-report.html of each module. The report will also be generated when the site is build (mvn site) even without the profile.

+
+
+
+
Penetration Testing
+
+

For penetration testing (testing for vulnerabilities) of your web application, we recommend the following tools:

+
+
+ +
+
+ +
+

==Access-Control +Access-Control is a central and important aspect of Security. It consists of two major aspects:

+
+
+ +
+
+
+
+

Authentication

+
+

Definition:

+
+
+
+
+

Authentication is the verification that somebody interacting with the system is the actual subject for whom he claims to be.

+
+
+
+
+

The one authenticated is properly called subject or principal. There are two forms of principals you need to distinguish while designing your authentication: human users and autonomous systems. While e.g. a Kerberos/SPNEGO Single-Sign-On makes sense for human users, it is pointless for authenticating autonomous systems. For simplicity, we use the common term user to refer to any principal even though it may not be a human (e.g. in case of a service call from an external system).

+
+
+

To prove the authenticity, the user provides some secret called credentials. The most simple form of credentials is a password.

+
+
+
Implementations
+
+ + + + + +
+ + +Please never implement your own authentication mechanism or credential store. You have to be aware of implicit demands such as salting and hashing credentials, password life-cycle with recovery, expiry, and renewal including email notification confirmation tokens, central password policies, etc. This is the domain of access managers and identity management systems. In a business context you will typically already find a system for this purpose that you have to integrate (e.g. via LDAP). Otherwise you should consider establishing such a system e.g. using keycloak. +
+
+
+

We recommend using JWT when possible. For KISS, also try to avoid combining multiple authentication mechanisms (form based, basic-auth, SAMLv2, OAuth, etc.) within the same application (for different URLs).

+
+
+

For spring, check the Spring Security

+
+
+

For quarkus, check the Quarkus Authentication

+
+
+
+
+

Authorization

+
+

Definition:

+
+
+
+
+

Authorization is the verification that an authenticated user is allowed to perform the operation he intends to invoke.

+
+
+
+
+
Clarification of terms
+
+

For clarification we also want to give a common understanding of related terms that have no unique definition and consistent usage in the wild.

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4. Security terms related to authorization
TermMeaning and comment

Permission

A permission is an object that allows a principal to perform an operation in the system. This permission can be granted (give) or revoked (taken away). Sometimes people also use the term right what is actually wrong as a right (such as the right to be free) can not be revoked.

Group

We use the term group in this context for an object that contains permissions. A group may also contain other groups. Then the group represents the set of all recursively contained permissions.

Role

We consider a role as a specific form of group that also contains permissions. A role identifies a specific function of a principal. A user can act in a role.

+

For simple scenarios a principal has a single role associated. In more complex situations a principal can have multiple roles but has only one active role at a time that he can choose out of his assigned roles. For KISS it is sometimes sufficient to avoid this by creating multiple accounts for the few users with multiple roles. Otherwise at least avoid switching roles at run-time in clients as this may cause problems with related states. Simply restart the client with the new role as parameter in case the user wants to switch his role.

Access Control

Any permission, group, role, etc., which declares a control for access management.

+
+
+
Suggestions on the access model
+
+

For the access model we give the following suggestions:

+
+
+
    +
  • +

    Each Access Control (permission, group, role, …​) is uniquely identified by a human readable string.

    +
  • +
  • +

    We create a unique permission for each use-case.

    +
  • +
  • +

    We define groups that combine permissions to typical and useful sets for the users.

    +
  • +
  • +

    We define roles as specific groups as required by our business demands.

    +
  • +
  • +

    We allow to associate users with a list of Access Controls.

    +
  • +
  • +

    For authorization of an implemented use case we determine the required permission. Furthermore, we determine the current user and verify that the required permission is contained in the tree spanned by all his associated Access Controls. If the user does not have the permission we throw a security exception and thus abort the operation and transaction.

    +
  • +
  • +

    We avoid negative permissions, that is a user has no permission by default and only those granted to him explicitly give him additional permission for specific things. Permissions granted can not be reduced by other permissions.

    +
  • +
  • +

    Technically we consider permissions as a secret of the application. Administrators shall not fiddle with individual permissions but grant them via groups. So the access management provides a list of strings identifying the Access Controls of a user. The individual application itself contains these Access Controls in a structured way, whereas each group forms a permission tree.

    +
  • +
+
+
+
+
Naming conventions
+
+

As stated above each Access Control is uniquely identified by a human readable string. This string should follow the naming convention:

+
+
+
+
«app-id».«local-name»
+
+
+
+

For Access Control Permissions the «local-name» again follows the convention:

+
+
+
+
«verb»«object»
+
+
+
+

The segments are defined by the following table:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5. Segments of Access Control Permission ID
SegmentDescriptionExample

«app-id»

Is a unique technical but human readable string of the application (or microservice). It shall not contain special characters and especially no dot or whitespace. We recommend to use lower-train-case-ascii-syntax. The identity and access management should be organized on enterprise level rather than application level. Therefore permissions of different apps might easily clash (e.g. two apps might both define a group ReadMasterData but some user shall get this group for only one of these two apps). Using the «app-id». prefix is a simple but powerful namespacing concept that allows you to scale and grow. You may also reserve specific «app-id»s for cross-cutting concerns that do not actually reflect a single app e.g to grant access to a geographic region.

shop

«verb»

The action that is to be performed on «object». We use Find for searching and reading data. Save shall be used both for create and update. Only if you really have demands to separate these two you may use Create in addition to Save. Finally, Delete is used for deletions. For non CRUD actions you are free to use additional verbs such as Approve or Reject.

Find

«object»

The affected object or entity. Shall be named according to your data-model

Product

+
+

So as an example shop.FindProduct will reflect the permission to search and retrieve a Product in the shop application. The group shop.ReadMasterData may combine all permissions to read master-data from the shop. However, also a group shop.Admin may exist for the Admin role of the shop application. Here the «local-name» is Admin that does not follow the «verb»«object» schema.

+
+
+
+
devon4j-security
+
+

The module devon4j-security provides ready-to-use code based on spring-security that makes your life a lot easier.

+
+
+
+access-control +
+
Figure 1. devon4j Security Model
+
+
+

The diagram shows the model of devon4j-security that separates two different aspects:

+
+
+
    +
  • +

    The Identity- and Access-Management is provided by according products and typically already available in the enterprise landscape (e.g. an active directory). It provides a hierarchy of primary access control objects (roles and groups) of a user. An administrator can grant and revoke permissions (indirectly) via this way.

    +
  • +
  • +

    The application security defines a hierarchy of secondary access control objects (groups and permissions). This is done by configuration owned by the application (see following section). The "API" is defined by the IDs of the primary access control objects that will be referenced from the Identity- and Access-Management.

    +
  • +
+
+
+
+
Access Control Config
+
+

In your application simply extend AccessControlConfig to configure your access control objects as code and reference it from your use-cases. An example config may look like this:

+
+
+
+
@Named
+public class ApplicationAccessControlConfig extends AccessControlConfig {
+
+  public static final String APP_ID = "MyApp";
+
+  private static final String PREFIX = APP_ID + ".";
+
+  public static final String PERMISSION_FIND_OFFER = PREFIX + "FindOffer";
+
+  public static final String PERMISSION_SAVE_OFFER = PREFIX + "SaveOffer";
+
+  public static final String PERMISSION_DELETE_OFFER = PREFIX + "DeleteOffer";
+
+  public static final String PERMISSION_FIND_PRODUCT = PREFIX + "FindProduct";
+
+  public static final String PERMISSION_SAVE_PRODUCT = PREFIX + "SaveProduct";
+
+  public static final String PERMISSION_DELETE_PRODUCT = PREFIX + "DeleteProduct";
+
+  public static final String GROUP_READ_MASTER_DATA = PREFIX + "ReadMasterData";
+
+  public static final String GROUP_MANAGER = PREFIX + "Manager";
+
+  public static final String GROUP_ADMIN = PREFIX + "Admin";
+
+  public ApplicationAccessControlConfig() {
+
+    super();
+    AccessControlGroup readMasterData = group(GROUP_READ_MASTER_DATA, PERMISSION_FIND_OFFER, PERMISSION_FIND_PRODUCT);
+    AccessControlGroup manager = group(GROUP_MANAGER, readMasterData, PERMISSION_SAVE_OFFER, PERMISSION_SAVE_PRODUCT);
+    AccessControlGroup admin = group(GROUP_ADMIN, manager, PERMISSION_DELETE_OFFER, PERMISSION_DELETE_PRODUCT);
+  }
+}
+
+
+
+
+
Configuration on Java Method level
+
+

In your use-case you can now reference a permission like this:

+
+
+
+
@Named
+public class UcSafeOfferImpl extends ApplicationUc implements UcSafeOffer {
+
+  @Override
+  @RolesAllowed(ApplicationAccessControlConfig.PERMISSION_SAVE_OFFER)
+  public OfferEto save(OfferEto offer) { ... }
+  ...
+}
+
+
+
+
+
JEE Standard
+
+

Role-based Access Control (RBAC) is commonly used for authorization. +JSR 250 defines a number of common annotations to secure your application.

+
+
+
    +
  • +

    javax.annotation.security.PermitAll specifies that no access control is required to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DenyAll specifies that no access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.RolesAllowed specifies that only a list of access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DeclareRoles defines roles for security checking.

    +
  • +
  • +

    javax.annotation.security.RunAs specifies the RunAs role for the given components.

    +
  • +
+
+
+

@PermitAll, @Denyall, and @RolesAllowed annotations can be applied to both class and method. +A method-level annotation will override the behaviour of class-level annotation. Using multiple annotations of those 3 is not valid.

+
+
+
+
// invalid
+@PermitAll
+@DenyAll
+public String foo()
+
+// invalid and compilation fails
+@RolesAllowed("admin")
+@RolesAllowed("user")
+public String bar()
+
+// OK
+@RolesAllowed("admin", "user")
+public String bar()
+
+
+
+

Please note that when specifying multiple arguments to @RolesAllowed those are combined with OR (and not with AND). +So if the user has any of the specified access controls, he will be able to access the method.

+
+
+

As a best practice avoid specifying string literals to @RolesAllowed. +Instead define a class with all access controls as constants and reference them from there. +This class is typically called ApplicationAccessControlConfig in devonfw.

+
+
+

In many complicated cases where @PermitAll @DenyAll @RolesAllowed are insufficient e.g. a method should be accessed by a user in role A and not in role B at the same time, you have to verify the user role directly in the method. You can use SecurityContext class to get further needed information.

+
+
+
Spring
+
+

Spring Security also supports authorization on method level. To use it, you need to add the spring-security-config dependency. If you use Spring Boot, the dependency spring-boot-starter-security already includes spring-security-config. Then you can configure as follows:

+
+
+
    +
  • +

    prePostEnabled property enables Spring Security pre/post annotations. @PreAuthorize and @PostAuthorize annotations provide expression-based access control. See more here

    +
  • +
  • +

    securedEnabled property determines if the @Secured annotation should be enabled. @Secured can be used similarly as @RollesAllowed.

    +
  • +
  • +

    jsr250Enabled property allows us to use the JSR-250 annotations such as @RolesAllowed.

    +
  • +
+
+
+
+
@Configuration
+@EnableGlobalMethodSecurity(
+  prePostEnabled = true,
+  securedEnabled = true,
+  jsr250Enabled = true)
+public class MethodSecurityConfig
+  extends GlobalMethodSecurityConfiguration {
+}
+
+
+
+

A further read about the whole concept of Spring Security Authorization can be found here.

+
+
+
+
Quarkus
+
+

Quarkus comes with built-in security to allow for RBAC based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints and CDI beans. Quarkus also provides the io.quarkus.security.Authenticated annotation that will permit any authenticated user to access the resource (equivalent to @RolesAllowed("**")).

+
+
+
+
+
Data-based Permissions
+ +
+
+
Access Control Schema (deprecated)
+
+

The access-control-schema.xml approach is deprecated. The documentation can still be found in access control schema.

+
+
+ +
+

==Data-permissions

+
+
+

In some projects there are demands for permissions and authorization that is dependent on the processed data. E.g. a user may only be allowed to read or write data for a specific region. This is adding some additional complexity to your authorization. If you can avoid this it is always best to keep things simple. However, in various cases this is a requirement. Therefore the following sections give you guidance and patterns how to solve this properly.

+
+
+
+
+

Structuring your data

+
+

For all your business objects (entities) that have to be secured regarding to data permissions we recommend that you create a separate interface that provides access to the relevant data required to decide about the permission. Here is a simple example:

+
+
+
+
public interface SecurityDataPermissionCountry {
+
+  /**
+   * @return the 2-letter ISO code of the country this object is associated with. Users need
+   *         a data-permission for this country in order to read and write this object.
+   */
+  String getCountry();
+}
+
+
+
+

Now related business objects (entities) can implement this interface. Often such data-permissions have to be applied to an entire object-hierarchy. For security reasons we recommend that also all child-objects implement this interface. For performance reasons we recommend that the child-objects redundantly store the data-permission properties (such as country in the example above) and this gets simply propagated from the parent, when a child object is created.

+
+
+
+

Permissions for processing data

+
+

When saving or processing objects with a data-permission, we recommend to provide dedicated methods to verify the permission in an abstract base-class such as AbstractUc and simply call this explicitly from your business code. This makes it easy to understand and debug the code. Here is a simple example:

+
+
+
+
protected void verifyPermission(SecurityDataPermissionCountry entity) throws AccessDeniedException;
+
+
+
+
Beware of AOP
+
+

For simple but cross-cutting data-permissions you may also use AOP. This leads to programming aspects that reflectively scan method arguments and magically decide what to do. Be aware that this quickly gets tricky:

+
+
+
    +
  • +

    What if multiple of your method arguments have data-permissions (e.g. implement SecurityDataPermission*)?

    +
  • +
  • +

    What if the object to authorize is only provided as reference (e.g. Long or IdRef) and only loaded and processed inside the implementation where the AOP aspect does not apply?

    +
  • +
  • +

    How to express advanced data-permissions in annotations?

    +
  • +
+
+
+

What we have learned is that annotations like @PreAuthorize from spring-security easily lead to the "programming in string literals" anti-pattern. We strongly discourage to use this anti-pattern. In such case writing your own verifyPermission methods that you manually call in the right places of your business-logic is much better to understand, debug and maintain.

+
+
+
+
+

Permissions for reading data

+
+

When it comes to restrictions on the data to read it becomes even more tricky. In the context of a user only entities shall be loaded from the database he is permitted to read. This is simple for loading a single entity (e.g. by its ID) as you can load it and then if not permitted throw an exception to secure your code. But what if the user is performing a search query to find many entities? For performance reasons we should only find data the user is permitted to read and filter all the rest already via the database query. But what if this is not a requirement for a single query but needs to be applied cross-cutting to tons of queries? Therefore we have the following pattern that solves your problem:

+
+
+

For each data-permission attribute (or set of such) we create an abstract base entity:

+
+
+
+
@MappedSuperclass
+@EntityListeners(PermissionCheckListener.class)
+@FilterDef(name = "country", parameters = {@ParamDef(name = "countries", type = "string")})
+@Filter(name = "country", condition = "country in (:countries)")
+public abstract class SecurityDataPermissionCountryEntity extends ApplicationPersistenceEntity
+    implements SecurityDataPermissionCountry {
+
+  private String country;
+
+  @Override
+  public String getCountry() {
+    return this.country;
+  }
+
+  public void setCountry(String country) {
+    this.country = country;
+  }
+}
+
+
+
+

There are some special hibernate annotations @EntityListeners, @FilterDef, and @Filter used here allowing to apply a filter on the country for any (non-native) query performed by hibernate. The entity listener may look like this:

+
+
+
+
public class PermissionCheckListener {
+
+  @PostLoad
+  public void read(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireReadPermission(entity);
+  }
+
+  @PrePersist
+  @PreUpdate
+  public void write(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireWritePermission(entity);
+  }
+}
+
+
+
+

This will ensure that hibernate implicitly will call these checks for every such entity when it is read from or written to the database. Further to avoid reading entities from the database the user is not permitted to (and ending up with exceptions), we create an AOP aspect that automatically activates the above declared hibernate filter:

+
+
+
+
@Named
+public class PermissionCheckerAdvice implements MethodBeforeAdvice {
+
+  @Inject
+  private PermissionChecker permissionChecker;
+
+  @PersistenceContext
+  private EntityManager entityManager;
+
+  @Override
+  public void before(Method method, Object[] args, Object target) {
+
+    Collection<String> permittedCountries = this.permissionChecker.getPermittedCountriesForReading();
+    if (permittedCountries != null) { // null is returned for admins that may access all countries
+      if (permittedCountries.isEmpty()) {
+        throw new AccessDeniedException("Not permitted for any country!");
+      }
+      Session session = this.entityManager.unwrap(Session.class);
+      session.enableFilter("country").setParameterList("countries", permittedCountries.toArray());
+    }
+  }
+}
+
+
+
+

Finally to apply this aspect to all Repositories (can easily be changed to DAOs) implement the following advisor:

+
+
+
+
@Named
+public class PermissionCheckerAdvisor implements PointcutAdvisor, Pointcut, ClassFilter, MethodMatcher {
+
+  @Inject
+  private PermissionCheckerAdvice advice;
+
+  @Override
+  public Advice getAdvice() {
+    return this.advice;
+  }
+
+  @Override
+  public boolean isPerInstance() {
+    return false;
+  }
+
+  @Override
+  public Pointcut getPointcut() {
+    return this;
+  }
+
+  @Override
+  public ClassFilter getClassFilter() {
+    return this;
+  }
+
+  @Override
+  public MethodMatcher getMethodMatcher() {
+    return this;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass) {
+    return true; // apply to all methods
+  }
+
+  @Override
+  public boolean isRuntime() {
+    return false;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass, Object... args) {
+    throw new IllegalStateException("isRuntime()==false");
+  }
+
+  @Override
+  public boolean matches(Class<?> clazz) {
+    // when using DAOs simply change to some class like ApplicationDao
+    return DefaultRepository.class.isAssignableFrom(clazz);
+  }
+}
+
+
+
+
+

Managing and granting the data-permissions

+
+

Following our authorization guide we can simply create a permission for each country. We might simply reserve a prefix (as virtual «app-id») for each data-permission to allow granting data-permissions to end-users across all applications of the IT landscape. In our example we could create access controls country.DE, country.US, country.ES, etc. and assign those to the users. The method permissionChecker.getPermittedCountriesForReading() would then scan for these access controls and only return the 2-letter country code from it.

+
+
+ + + + + +
+ + +Before you make your decisions how to design your access controls please clarify the following questions: +
+
+
+
    +
  • +

    Do you need to separate data-permissions independent of the functional permissions? E.g. may it be required to express that a user can read data from the countries ES and PL but is only permitted to modify data from PL? In such case a single assignment of "country-permissions" to users is insufficient.

    +
  • +
  • +

    Do you want to grant data-permissions individually for each application (higher flexibility and complexity) or for the entire application landscape (simplicity, better maintenance for administrators)? In case of the first approach you would rather have access controls like app1.country.GB and app2.country.GB.

    +
  • +
  • +

    Do your data-permissions depend on objects that can be created dynamically inside your application?

    +
  • +
  • +

    If you want to grant data-permissions on other business objects (entities), how do you want to reference them (primary keys, business keys, etc.)? What reference is most stable? Which is most readable?

    +
  • +
+
+
+ +
+

==Validation

+
+
+

Validation is about checking syntax and semantics of input data. Invalid data is rejected by the application. +Therefore validation is required in multiple places of an application. E.g. the GUI will do validation for usability reasons to assist the user, early feedback and to prevent unnecessary server requests. +On the server-side validation has to be done for consistency and security.

+
+
+

In general we distinguish these forms of validation:

+
+
+
    +
  • +

    stateless validation will produce the same result for given input at any time (for the same code/release).

    +
  • +
  • +

    stateful validation is dependent on other states and can consider the same input data as valid in once case and as invalid in another.

    +
  • +
+
+
+
+

Stateless Validation

+
+

For regular, stateless validation we use the JSR303 standard that is also called bean validation (BV). +Details can be found in the specification. +As implementation we recommend hibernate-validator.

+
+
+
Example
+
+

A description of how to enable BV for spring applications can be found in the relevant Spring documentation. A guide you can use to integrate validation in Quarkus applications can be found here. For a quick summary follow these steps:

+
+
+
    +
  • +

    Make sure that hibernate-validator is located in the classpath by adding a dependency to the pom.xml.

    +
  • +
+
+
+
spring
+
+
    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
quarkus
+
+
    <dependency>
+      <groupId>io.quarkus</groupId>
+      <artifactId>quarkus-hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
    +
  • +

    For methods to validate go to their declaration and add constraint annotations to the method parameters.

    +
    +

    In spring applications you can add the @Validated annotation to the implementation (spring bean) to be validated (this is an annotation of the spring framework, so it`s not available in the Quarkus context). The standard use case is to annotate the logic layer implementation, i.e. the use case implementation or component facade in case of simple logic layer pattern. Thus, the validation will be executed for service requests as well as batch processing.

    +
    +
    +
      +
    • +

      @Valid annotation to the arguments to validate (if that class itself is annotated with constraints to check).

      +
    • +
    • +

      @NotNull for required arguments.

      +
    • +
    • +

      Other constraints (e.g. @Size) for generic arguments (e.g. of type String or Integer). However, consider to create custom datatypes and avoid adding too much validation logic (especially redundant in multiple places).

      +
    • +
    +
    +
  • +
+
+
+
BookingmanagementRestServiceImpl.java
+
+
@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+  ...
+  public BookingEto saveBooking(@Valid BookingCto booking) {
+  ...
+
+
+
+
    +
  • +

    Finally add appropriate validation constraint annotations to the fields of the ETO class.

    +
  • +
+
+
+
BookingCto.java
+
+
  @Valid
+  private BookingEto booking;
+
+
+
+
BookingEto.java
+
+
  @NotNull
+  @Future
+  private Timestamp bookingDate;
+
+
+
+

A list with all bean validation constraint annotations available for hibernate-validator can be found here. In addition it is possible to configure custom constraints. Therefore it is necessary to implement a annotation and a corresponding validator. A description can also be found in the Spring documentation or with more details in the hibernate documentation.

+
+
+ + + + + +
+ + +Bean Validation in Wildfly >v8: Wildfly v8 is the first version of Wildfly implementing the JEE7 specification. It comes with bean validation based on hibernate-validator out of the box. In case someone is running Spring in Wildfly for whatever reasons, the spring based annotation @Validated would duplicate bean validation at runtime and thus should be omitted. +
+
+
+
+
GUI-Integration
+
+

TODO

+
+
+
+
Cross-Field Validation
+
+

BV has poor support for this. Best practice is to create and use beans for ranges, etc. that solve this. A bean for a range could look like so:

+
+
+
+
public class Range<V extends Comparable<V>> {
+
+  private V min;
+  private V max;
+
+  public Range(V min, V max) {
+
+    super();
+    if ((min != null) && (max != null)) {
+      int delta = min.compareTo(max);
+      if (delta > 0) {
+        throw new ValueOutOfRangeException(null, min, min, max);
+      }
+    }
+    this.min = min;
+    this.max = max;
+  }
+
+  public V getMin() ...
+  public V getMax() ...
+
+
+
+
+
+

Stateful Validation

+
+

For complex and stateful business validations we do not use BV (possible with groups and context, etc.) but follow KISS and just implement this on the server in a straight forward manner. +An example is the deletion of a table in the example application. Here the state of the table must be checked first:

+
+
+

BookingmanagementImpl.java

+
+
+
+
  private void sendConfirmationEmails(BookingEntity booking) {
+
+    if (!booking.getInvitedGuests().isEmpty()) {
+      for (InvitedGuestEntity guest : booking.getInvitedGuests()) {
+        sendInviteEmailToGuest(guest, booking);
+      }
+    }
+
+    sendConfirmationEmailToHost(booking);
+  }
+
+
+
+

Implementing this small check with BV would be a lot more effort.

+
+
+ +
+

==Aspect Oriented Programming (AOP)

+
+
+

AOP is a powerful feature for cross-cutting concerns. However, if used extensive and for the wrong things an application can get unmaintainable. Therefore we give you the best practices where and how to use AOP properly.

+
+
+
+

AOP Key Principles

+
+

We follow these principles:

+
+
+
    +
  • +

    We use spring AOP based on dynamic proxies (and fallback to cglib).

    +
  • +
  • +

    We avoid AspectJ and other mighty and complex AOP frameworks whenever possible

    +
  • +
  • +

    We only use AOP where we consider it as necessary (see below).

    +
  • +
+
+
+
+

AOP Usage

+
+

We recommend to use AOP with care but we consider it established for the following cross cutting concerns:

+
+
+ +
+
+
+

AOP Debugging

+
+

When using AOP with dynamic proxies the debugging of your code can get nasty. As you can see by the red boxes in the call stack in the debugger there is a lot of magic happening while you often just want to step directly into the implementation skipping all the AOP clutter. When using Eclipse this can easily be archived by enabling step filters. Therefore you have to enable the feature in the Eclipse tool bar (highlighted in read).

+
+
+
+AOP debugging +
+
+
+

In order to properly make this work you need to ensure that the step filters are properly configured:

+
+
+
+Step Filter Configuration +
+
+
+

Ensure you have at least the following step-filters configured and active:

+
+
+
+
ch.qos.logback.*
+com.devonfw.module.security.*
+java.lang.reflect.*
+java.security.*
+javax.persistence.*
+org.apache.commons.logging.*
+org.apache.cxf.jaxrs.client.*
+org.apache.tomcat.*
+org.h2.*
+org.springframework.*
+
+
+
+ +
+

==Exception Handling

+
+
+
+

Exception Principles

+
+

For exceptions we follow these principles:

+
+
+
    +
  • +

    We only use exceptions for exceptional situations and not for programming control flows, etc. Creating an exception in Java is expensive and hence should not be done for simply testing whether something is present, valid or permitted. In the latter case design your API to return this as a regular result.

    +
  • +
  • +

    We use unchecked exceptions (RuntimeException) [1]

    +
  • +
  • +

    We distinguish internal exceptions and user exceptions:

    +
    +
      +
    • +

      Internal exceptions have technical reasons. For unexpected and exotic situations, it is sufficient to throw existing exceptions such as IllegalStateException. For common scenarios a own exception class is reasonable.

      +
    • +
    • +

      User exceptions contain a message explaining the problem for end users. Therefore, we always define our own exception classes with a clear, brief, but detailed message.

      +
    • +
    +
    +
  • +
  • +

    Our own exceptions derive from an exception base class supporting

    + +
  • +
+
+
+

All this is offered by mmm-util-core, which we propose as a solution. +If you use the devon4j-rest module, this is already included. For Quarkus applications, you need to add the dependency manually.

+
+
+

If you want to avoid additional dependencies, you can implement your own solution for this by creating an abstract exception class ApplicationBusinessException extending from RuntimeException. For an example of this, see our Quarkus reference application.

+
+
+
+

Exception Example

+
+

Here is an exception class from our sample application:

+
+
+
+
public class IllegalEntityStateException extends ApplicationBusinessException {
+
+  private static final long serialVersionUID = 1L;
+
+  public IllegalEntityStateException(Object entity, Object state) {
+
+    this((Throwable) null, entity, state);
+  }
+
+
+  public IllegalEntityStateException(Object entity, Object currentState, Object newState) {
+
+    this(null, entity, currentState, newState);
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object state) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityState(entity, state));
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object currentState, Object newState) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityStateChange(entity, currentState,
+        newState));
+  }
+
+}
+
+
+
+

The message templates are defined in the interface NlsBundleRestaurantRoot as following:

+
+
+
+
public interface NlsBundleApplicationRoot extends NlsBundle {
+
+
+  @NlsBundleMessage("The entity {entity} is in state {state}!")
+  NlsMessage errorIllegalEntityState(@Named("entity") Object entity, @Named("state") Object state);
+
+
+  @NlsBundleMessage("The entity {entity} in state {currentState} can not be changed to state {newState}!")
+  NlsMessage errorIllegalEntityStateChange(@Named("entity") Object entity, @Named("currentState") Object currentState,
+      @Named("newState") Object newState);
+
+
+  @NlsBundleMessage("The property {property} of object {object} can not be changed!")
+  NlsMessage errorIllegalPropertyChange(@Named("object") Object object, @Named("property") Object property);
+
+  @NlsBundleMessage("There is currently no user logged in")
+  NlsMessage errorNoActiveUser();
+
+
+
+
+

Handling Exceptions

+
+

For catching and handling exceptions we follow these rules:

+
+
+
    +
  • +

    We do not catch exceptions just to wrap or to re-throw them.

    +
  • +
  • +

    If we catch an exception and throw a new one, we always have to provide the original exception as cause to the constructor of the new exception.

    +
  • +
  • +

    At the entry points of the application (e.g. a service operation) we have to catch and handle all throwables. This is done via the exception-facade-pattern via an explicit facade or aspect. The devon4j-rest module already provides ready-to-use implementations for this such as RestServiceExceptionFacade that you can use in your Spring application. For Quarkus, follow the Quarkus guide on exception handling.
    +The exception facade has to …​

    +
    +
      +
    • +

      log all errors (user errors on info and technical errors on error level)

      +
    • +
    • +

      ensure that the entire exception is passed to the logger (not only the message) so that the logger can capture the entire stacktrace and the root cause is not lost.

      +
    • +
    • +

      convert the error to a result appropriable for the client and secure for Sensitive Data Exposure. Especially for security exceptions only a generic security error code or message may be revealed but the details shall only be logged but not be exposed to the client. All internal exceptions are converted to a generic error with a message like:

      +
      +
      +
      +

      An unexpected technical error has occurred. We apologize any inconvenience. Please try again later.

      +
      +
      +
      +
    • +
    +
    +
  • +
+
+
+
+

Common Errors

+
+

The following errors may occur in any devon application:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 6. Common Exceptions
CodeMessageLink

TechnicalError

An unexpected error has occurred! We apologize any inconvenience. Please try again later.

TechnicalErrorUserException.java

ServiceInvoke

«original message of the cause»

ServiceInvocationFailedException.java

+
+ +
+

==Internationalization +Internationalization (I18N) is about writing code independent from locale-specific information. +For I18N of text messages we are suggesting +mmm native-language-support.

+
+
+

In devonfw we have developed a solution to manage text internationalization. devonfw solution comes into two aspects:

+
+
+
    +
  • +

    Bind locale information to the user.

    +
  • +
  • +

    Get the messages in the current user locale.

    +
  • +
+
+
+
+

Binding locale information to the user

+
+

We have defined two different points to bind locale information to user, depending on user is authenticated or not.

+
+
+
    +
  • +

    User not authenticated: devonfw intercepts unsecured request and extract locale from it. At first, we try to extract a language parameter from the request and if it is not possible, we extract locale from Àccept-language` header.

    +
  • +
  • +

    User authenticated. During login process, applications developers are responsible to fill language parameter in the UserProfile class. This language parameter could be obtain from DB, LDAP, request, etc. In devonfw sample we get the locale information from database.

    +
  • +
+
+
+

This image shows the entire process:

+
+
+
+Internationalization +
+
+
+
+

Getting internationalizated messages

+
+

devonfw has a bean that manage i18n message resolution, the ApplicationLocaleResolver. This bean is responsible to get the current user and extract locale information from it and read the correct properties file to get the message.

+
+
+

The i18n properties file must be called ApplicationMessages_la_CO.properties where la=language and CO=country. This is an example of a i18n properties file for English language to translate devonfw sample user roles:

+
+
+

ApplicationMessages_en_US.properties

+
+
+
+
admin=Admin
+
+
+
+

You should define an ApplicationMessages_la_CO.properties file for every language that your application needs.

+
+
+

ApplicationLocaleResolver bean is injected in AbstractComponentFacade class so you have available this bean in logic layer so you only need to put this code to get an internationalized message:

+
+
+
+
String msg = getApplicationLocaleResolver().getMessage("mymessage");
+
+
+
+ +
+

==XML

+
+
+

XML (Extensible Markup Language) is a W3C standard format for structured information. It has a large eco-system of additional standards and tools.

+
+
+

In Java there are many different APIs and frameworks for accessing, producing and processing XML. For the devonfw we recommend to use JAXB for mapping Java objects to XML and vice-versa. Further there is the popular DOM API for reading and writing smaller XML documents directly. When processing large XML documents StAX is the right choice.

+
+
+
+

JAXB

+
+

We use JAXB to serialize Java objects to XML or vice-versa.

+
+
+
JAXB and Inheritance
+
+

Use @XmlSeeAlso annotation to provide sub-classes. +See section "Collective Polymorphism" described here.

+
+
+
+
JAXB Custom Mapping
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create dedicated objects for the XML mapping you can easily avoid such situations. When this is not suitable use @XmlJavaTypeAdapter and provide an XmlAdapter implementation that handles the mapping. +For details see here.

+
+
+
+
+

Security

+
+

To prevent XML External Entity attacks, follow JAXP Security Guide and enable FSP.

+
+
+ +
+

==JSON

+
+
+

JSON (JavaScript Object Notation) is a popular format to represent and exchange data especially for modern web-clients. For mapping Java objects to JSON and vice-versa there is no official standard API. We use the established and powerful open-source solution Jackson. +Due to problems with the wiki of fasterxml you should try this alternative link: Jackson/AltLink.

+
+
+
+

Configure JSON Mapping

+
+

In order to avoid polluting business objects with proprietary Jackson annotations (e.g. @JsonTypeInfo, @JsonSubTypes, @JsonProperty) we propose to create a separate configuration class. Every devonfw application (sample or any app created from our app-template) therefore has a class called ApplicationObjectMapperFactory that extends ObjectMapperFactory from the devon4j-rest module. It looks like this:

+
+
+
+
@Named("ApplicationObjectMapperFactory")
+public class ApplicationObjectMapperFactory extends ObjectMapperFactory {
+
+  public RestaurantObjectMapperFactory() {
+    super();
+    // JSON configuration code goes here
+  }
+}
+
+
+
+
+

JSON and Inheritance

+
+

If you are using inheritance for your objects mapped to JSON then polymorphism can not be supported out-of-the box. So in general avoid polymorphic objects in JSON mapping. However, this is not always possible. +Have a look at the following example from our sample application:

+
+
+
+inheritance class diagram +
+
Figure 2. Transfer-Objects using Inheritance
+
+
+

Now assume you have a REST service operation as Java method that takes a ProductEto as argument. As this is an abstract class the server needs to know the actual sub-class to instantiate. +We typically do not want to specify the classname in the JSON as this should be an implementation detail and not part of the public JSON format (e.g. in case of a service interface). Therefore we use a symbolic name for each polymorphic subtype that is provided as virtual attribute @type within the JSON data of the object:

+
+
+
+
{ "@type": "Drink", ... }
+
+
+
+

Therefore you add configuration code to the constructor of ApplicationObjectMapperFactory. Here you can see an example from the sample application:

+
+
+
+
setBaseClasses(ProductEto.class);
+addSubtypes(new NamedType(MealEto.class, "Meal"), new NamedType(DrinkEto.class, "Drink"),
+  new NamedType(SideDishEto.class, "SideDish"));
+
+
+
+

We use setBaseClasses to register all top-level classes of polymorphic objects. Further we declare all concrete polymorphic sub-classes together with their symbolic name for the JSON format via addSubtypes.

+
+
+
+

Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create objects dedicated for the JSON mapping you can easily avoid such situations. When this is not suitable follow these instructions to define the mapping:

+
+
+
    +
  1. +

    As an example, the use of JSR354 (javax.money) is appreciated in order to process monetary amounts properly. However, without custom mapping, the default mapping of Jackson will produce the following JSON for a MonetaryAmount:

    +
    +
    +
    "currency": {"defaultFractionDigits":2, "numericCode":978, "currencyCode":"EUR"},
    +"monetaryContext": {...},
    +"number":6.99,
    +"factory": {...}
    +
    +
    +
    +

    As clearly can be seen, the JSON contains too much information and reveals implementation secrets that do not belong here. Instead the JSON output expected and desired would be:

    +
    +
    +
    +
    "currency":"EUR","amount":"6.99"
    +
    +
    +
    +

    Even worse, when we send the JSON data to the server, Jackson will see that MonetaryAmount is an interface and does not know how to instantiate it so the request will fail. +Therefore we need a customized Serializer.

    +
    +
  2. +
  3. +

    We implement MonetaryAmountJsonSerializer to define how a MonetaryAmount is serialized to JSON:

    +
    +
    +
    public final class MonetaryAmountJsonSerializer extends JsonSerializer<MonetaryAmount> {
    +
    +  public static final String NUMBER = "amount";
    +  public static final String CURRENCY = "currency";
    +
    +  public void serialize(MonetaryAmount value, JsonGenerator jgen, SerializerProvider provider) throws ... {
    +    if (value != null) {
    +      jgen.writeStartObject();
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.CURRENCY);
    +      jgen.writeString(value.getCurrency().getCurrencyCode());
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.NUMBER);
    +      jgen.writeString(value.getNumber().toString());
    +      jgen.writeEndObject();
    +    }
    +  }
    +
    +
    +
    +

    For composite datatypes it is important to wrap the info as an object (writeStartObject() and writeEndObject()). MonetaryAmount provides the information we need by the getCurrency() and getNumber(). So that we can easily write them into the JSON data.

    +
    +
  4. +
  5. +

    Next, we implement MonetaryAmountJsonDeserializer to define how a MonetaryAmount is deserialized back as Java object from JSON:

    +
    +
    +
    public final class MonetaryAmountJsonDeserializer extends AbstractJsonDeserializer<MonetaryAmount> {
    +  protected MonetaryAmount deserializeNode(JsonNode node) {
    +    BigDecimal number = getRequiredValue(node, MonetaryAmountJsonSerializer.NUMBER, BigDecimal.class);
    +    String currencyCode = getRequiredValue(node, MonetaryAmountJsonSerializer.CURRENCY, String.class);
    +    MonetaryAmount monetaryAmount =
    +        MonetaryAmounts.getAmountFactory().setNumber(number).setCurrency(currencyCode).create();
    +    return monetaryAmount;
    +  }
    +}
    +
    +
    +
    +

    For composite datatypes we extend from AbstractJsonDeserializer as this makes our task easier. So we already get a JsonNode with the parsed payload of our datatype. Based on this API it is easy to retrieve individual fields from the payload without taking care of their order, etc. +AbstractJsonDeserializer also provides methods such as getRequiredValue to read required fields and get them converted to the desired basis datatype. So we can easily read the amount and currency and construct an instance of MonetaryAmount via the official factory API.

    +
    +
  6. +
  7. +

    Finally we need to register our custom (de)serializers with the following configuration code in the constructor of ApplicationObjectMapperFactory:+

    +
  8. +
+
+
+
+
  SimpleModule module = getExtensionModule();
+  module.addDeserializer(MonetaryAmount.class, new MonetaryAmountJsonDeserializer());
+  module.addSerializer(MonetaryAmount.class, new MonetaryAmountJsonSerializer());
+
+
+
+

Now we can read and write MonetaryAmount from and to JSON as expected.

+
+
+ +
+

==REST +REST (REpresentational State Transfer) is an inter-operable protocol for services that is more lightweight than SOAP. +However, it is no real standard and can cause confusion (see REST philosophy). +Therefore we define best practices here to guide you.

+
+
+
+

URLs

+
+

URLs are not case sensitive. Hence, we follow the best practice to use only lower-case-letters-with-hyphen-to-separate-words. +For operations in REST we distinguish the following types of URLs:

+
+
+
    +
  • +

    A collection URL is build from the rest service URL by appending the name of a collection. This is typically the name of an entity. Such URL identifies the entire collection of all elements of this type. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity

    +
  • +
  • +

    An element URL is build from a collection URL by appending an element ID. It identifies a single element (entity) within the collection. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42

    +
  • +
+
+
+

To follow KISS avoid using plural forms (…​/productmanagement/v1/products vs. …​/productmanagement/v1/product/42). Always use singular forms and avoid confusions (except for the rare cases where no singular exists).

+
+
+

The REST URL scheme fits perfect for CRUD operations. +For business operations (processing, calculation, advanced search, etc.) we simply append a collection URL with the name of the business operation. +Then we can POST the input for the business operation and get the result back. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/search

+
+
+
+

HTTP Methods

+
+

The following table defines the HTTP methods (verbs) and their meaning:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 7. Usage of HTTP methods
HTTP MethodMeaning

GET

Read data (stateless).

PUT

Create or update data.

POST

Process data.

DELETE

Delete an entity.

+
+

Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

+
+
+

For general recommendations on HTTP methods for collection and element URLs see REST@wikipedia.

+
+
+
+

HTTP Status Codes

+
+

Further we define how to use the HTTP status codes for REST services properly. In general the 4xx codes correspond to an error on the client side and the 5xx codes to an error on the server side.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 8. Usage of HTTP status codes
HTTP CodeMeaningResponseComment

200

OK

requested result

Result of successful GET

204

No Content

none

Result of successful POST, DELETE, or PUT with empty result (void return)

400

Bad Request

error details

The HTTP request is invalid (parse error, validation failed)

401

Unauthorized

none

Authentication failed

403

Forbidden

none

Authorization failed

404

Not found

none

Either the service URL is wrong or the requested resource does not exist

500

Server Error

error code, UUID

Internal server error occurred, in case of an exception, see REST exception handling

+
+
+

JAX-RS

+
+

For implementing REST services we use the JAX-RS standard. +As payload encoding we recommend JSON bindings using Jackson. +To implement a REST service you simply add JAX-RS annotations. +Here is a simple example:

+
+
+
+
@ApplicationScoped
+@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class ImagemanagementRestService {
+
+  @Inject
+  private Imagemanagement imagemanagement;
+
+  @GET
+  @Path("/image/{id}/")
+  public ImageDto getImage(@PathParam("id") long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+}
+
+
+
+

Here we can see a REST service for the business component imagemanagement. The method getImage can be accessed via HTTP GET (see @GET) under the URL path imagemanagement/image/{id} (see @Path annotations) where {id} is the ID of the requested table and will be extracted from the URL and provided as parameter id to the method getImage. It will return its result (ImageDto) as JSON (see @Produces annotation - you can also extend RestService marker interface that defines these annotations for JSON). As you can see it delegates to the logic component imagemanagement that contains the actual business logic while the service itself only exposes this logic via HTTP. The REST service implementation is a regular CDI bean that can use dependency injection.

+
+
+ + + + + +
+ + +With JAX-RS it is important to make sure that each service method is annotated with the proper HTTP method (@GET,@POST,etc.) to avoid unnecessary debugging. So you should take care not to forget to specify one of these annotations. +
+
+
+
Service-Interface
+
+

You may also separate API and implementation in case you want to reuse the API for service-client:

+
+
+
+
@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface ImagemanagementRestService {
+
+  @GET
+  @Path("/image/{id}/")
+  ImageEto getImage(@PathParam("id") long id);
+
+}
+
+@Named("ImagemanagementRestService")
+public class ImagemanagementRestServiceImpl implements ImagemanagementRestService {
+
+  @Override
+  public ImageEto getImage(long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+
+}
+
+
+
+
+
JAX-RS Configuration
+
+

Starting from CXF 3.0.0 it is possible to enable the auto-discovery of JAX-RS roots.

+
+
+

When the JAX-RS server is instantiated, all the scanned root and provider beans (beans annotated with javax.ws.rs.Path and javax.ws.rs.ext.Provider) are configured.

+
+
+
+
REST Exception Handling
+
+

For exceptions, a service needs to have an exception facade that catches all exceptions and handles them by writing proper log messages and mapping them to a HTTP response with an corresponding HTTP status code. +For this, devon4j provides a generic solution via RestServiceExceptionFacade that you can use within your Spring applications. You need to follow the exception guide in order for it to work out of the box because the facade needs to be able to distinguish between business and technical exceptions. +To implement a generic exception facade in Quarkus, follow the Quarkus exception guide.

+
+
+

Now your service may throw exceptions, but the facade will automatically handle them for you.

+
+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+
+
Pagination details
+
+

We recommend to use spring-data repositories for database access that already comes with pagination support. +Therefore, when performing a search, you can include a Pageable object. +Here is a JSON example for it:

+
+
+
+
{ "pageSize": 20, "pageNumber": 0, "sort": [] }
+
+
+
+

By increasing the pageNumber the client can browse and page through the hits.

+
+
+

As a result you will receive a Page. +It is a container for your search results just like a Collection but additionally contains pagination information for the client. +Here is a JSON example:

+
+
+
+
{ "totalElements": 1022,
+  pageable: { "pageSize": 20, "pageNumber": 0 },
+  content: [ ... ] }
+
+
+
+

The totalElements property contains the total number of hits. +This can be used by the client to compute the total number of pages and render the pagination links accordingly. +Via the pageable property the client gets back the Pageable properties from the search request. +The actual hits for the current page are returned as array in the content property.

+
+
+
+
+

REST Testing

+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+

Security

+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+
CSRF
+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+
JSON top-level arrays
+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+ +
+

==SOAP +SOAP is a common protocol for services that is rather complex and heavy. It allows to build inter-operable and well specified services (see WSDL). SOAP is transport neutral what is not only an advantage. We strongly recommend to use HTTPS transport and ignore additional complex standards like WS-Security and use established HTTP-Standards such as RFC2617 (and RFC5280).

+
+
+
+
+

JAX-WS

+
+

For building web-services with Java we use the JAX-WS standard. +There are two approaches:

+
+
+
    +
  • +

    code first

    +
  • +
  • +

    contract first

    +
  • +
+
+
+

Here is an example in case you define a code-first service.

+
+
+
Web-Service Interface
+
+

We define a regular interface to define the API of the service and annotate it with JAX-WS annotations:

+
+
+
+
@WebService
+public interface TablemanagmentWebService {
+
+  @WebMethod
+  @WebResult(name = "message")
+  TableEto getTable(@WebParam(name = "id") String id);
+
+}
+
+
+
+
+
Web-Service Implementation
+
+

And here is a simple implementation of the service:

+
+
+
+
@Named
+@WebService(endpointInterface = "com.devonfw.application.mtsj.tablemanagement.service.api.ws.TablemanagmentWebService")
+public class TablemanagementWebServiceImpl implements TablemanagmentWebService {
+
+  private Tablemanagement tableManagement;
+
+  @Override
+  public TableEto getTable(String id) {
+
+    return this.tableManagement.findTable(id);
+  }
+
+
+
+
+
+

SOAP Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to write adapters for JAXB (see XML).

+
+
+
+

SOAP Testing

+
+

For testing SOAP services in general consult the testing guide.

+
+
+

For testing SOAP services manually we strongly recommend SoapUI.

+
+
+ +
+

==Service Client

+
+
+

This guide is about consuming (calling) services from other applications (micro-services). For providing services, see the Service-Layer Guide. Services can be consumed by the client or the server. As the client is typically not written in Java, you should consult the according guide for your client technology. In case you want to call a service within your Java code, this guide is the right place to get help.

+
+
+
+

Motivation

+
+

Various solutions already exist for calling services, such as RestTemplate from spring or the JAX-RS client API. Furthermore, each and every service framework offers its own API as well. These solutions might be suitable for very small and simple projects (with one or two such invocations). However, with the trend of microservices, the invocation of a service becomes a very common use-case that occurs all over the place. You typically need a solution that is very easy to use but supports flexible configuration, adding headers for authentication, mapping of errors from the server, logging success/errors with duration for performance analysis, support for synchronous and asynchronous invocations, etc. This is exactly what this devon4j service-client solution brings to you.

+
+
+
+

Usage

+
+

Spring

+
+
+

For Spring, follow the Spring rest-client guide.

+
+
+

Quarkus

+
+
+

For Quarkus, we recommend to follow the official Quarkus rest-client guide

+
+
+ +
+

==Testing

+
+
+
+

General best practices

+
+

For testing please follow our general best practices:

+
+
+
    +
  • +

    Tests should have a clear goal that should also be documented.

    +
  • +
  • +

    Tests have to be classified into different integration levels.

    +
  • +
  • +

    Tests should follow a clear naming convention.

    +
  • +
  • +

    Automated tests need to properly assert the result of the tested operation(s) in a reliable way. E.g. avoid stuff like assertThat(service.getAllEntities()).hasSize(42) or even worse tests that have no assertion at all.

    +
  • +
  • +

    Tests need to be independent of each other. Never write test-cases or tests (in Java @Test methods) that depend on another test to be executed before.

    +
  • +
  • +

    Use AssertJ to write good readable and maintainable tests that also provide valuable feedback in case a test fails. Do not use legacy JUnit methods like assertEquals anymore!

    +
  • +
  • +

    For easy understanding divide your test in three commented sections:

    +
    +
      +
    • +

      //given

      +
    • +
    • +

      //when

      +
    • +
    • +

      //then

      +
    • +
    +
    +
  • +
  • +

    Plan your tests and test data management properly before implementing.

    +
  • +
  • +

    Instead of having a too strong focus on test coverage better ensure you have covered your critical core functionality properly and review the code including tests.

    +
  • +
  • +

    Test code shall NOT be seen as second class code. You shall consider design, architecture and code-style also for your test code but do not over-engineer it.

    +
  • +
  • +

    Test automation is good but should be considered in relation to cost per use. Creating full coverage via automated system tests can cause a massive amount of test-code that can turn out as a huge maintenance hell. Always consider all aspects including product life-cycle, criticality of use-cases to test, and variability of the aspect to test (e.g. UI, test-data).

    +
  • +
  • +

    Use continuous integration and establish that the entire team wants to have clean builds and running tests.

    +
  • +
  • +

    Prefer delegation over inheritance for cross-cutting testing functionality. Good places to put this kind of code can be realized and reused via the JUnit @Rule mechanism.

    +
  • +
+
+
+
+

Test Automation Technology Stack

+
+

For test automation we use JUnit. However, we are strictly doing all assertions with AssertJ. For mocking we use Mockito. +In order to mock remote connections we use WireMock.

+
+
+

For testing entire components or sub-systems we recommend to use for Spring stack spring-boot-starter-test as lightweight and fast testing infrastructure that is already shipped with devon4j-test. For Quarkus, you can add the necessary extensions manually such as quarkus-junit5, quarkus-junit5-mockito, assertj-core etc.

+
+
+

In case you have to use a full blown JEE application server, we recommend to use arquillian. To get started with arquillian, look here.

+
+
+
+

Test Doubles

+
+

We use test doubles as generic term for mocks, stubs, fakes, dummies, or spys to avoid confusion. Here is a short summary from stubs VS mocks:

+
+
+
    +
  • +

    Dummy objects specifying no logic at all. May declare data in a POJO style to be used as boiler plate code to parameter lists or even influence the control flow towards the test’s needs.

    +
  • +
  • +

    Fake objects actually have working implementations, but usually take some shortcut which makes them not suitable for production (an in memory database is a good example).

    +
  • +
  • +

    Stubs provide canned answers to calls made during the test, usually not responding at all to anything outside what’s programmed in for the test. Stubs may also record information about calls, such as an email gateway stub that remembers the messages it 'sent', or maybe only how many messages it 'sent'.

    +
  • +
  • +

    Mocks are objects pre-programmed with expectations, which form a specification of the calls they are expected to receive.

    +
  • +
+
+
+

We try to give some examples, which should make it somehow clearer:

+
+
+
Stubs
+
+

Best Practices for applications:

+
+
+
    +
  • +

    A good way to replace small to medium large boundary systems, whose impact (e.g. latency) should be ignored during load and performance tests of the application under development.

    +
  • +
  • +

    As stub implementation will rely on state-based verification, there is the threat, that test developers will partially reimplement the state transitions based on the replaced code. This will immediately lead to a black maintenance whole, so better use mocks to assure the certain behavior on interface level.

    +
  • +
  • +

    Do NOT use stubs as basis of a large amount of test cases as due to state-based verification of stubs, test developers will enrich the stub implementation to become a large monster with its own hunger after maintenance efforts.

    +
  • +
+
+
+
+
Mocks
+
+

Best Practices for applications:

+
+
+
    +
  • +

    Replace not-needed dependencies of your system-under-test (SUT) to minimize the application context to start of your component framework.

    +
  • +
  • +

    Replace dependencies of your SUT to impact the control flow under test without establishing all the context parameters needed to match the control flow.

    +
  • +
  • +

    Remember: Not everything has to be mocked! Especially on lower levels of tests like isolated module tests you can be betrayed into a mocking delusion, where you end up in a hundred lines of code mocking the whole context and five lines executing the test and verifying the mocks behavior. Always keep in mind the benefit-cost ratio, when implementing tests using mocks.

    +
  • +
+
+
+
+
WireMock
+
+

If you need to mock remote connections such as HTTP-Servers, WireMock offers easy to use functionality. For a full description see the homepage or the github repository. Wiremock can be used either as a JUnit Rule, in Java outside of JUnit or as a standalone process. The mocked server can be configured to respond to specific requests in a given way via a fluent Java API, JSON files and JSON over HTTP. An example as an integration to JUnit can look as follows.

+
+
+
+
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+
+public class WireMockOfferImport{
+
+  @Rule
+  public WireMockRule mockServer = new WireMockRule(wireMockConfig().dynamicPort());
+
+  @Test
+  public void requestDataTest() throws Exception {
+  int port = this.mockServer.port();
+  ...}
+
+
+
+

This creates a server on a randomly chosen free port on the running machine. You can also specify the port to be used if wanted. Other than that there are several options to further configure the server. This includes HTTPs, proxy settings, file locations, logging and extensions.

+
+
+
+
  @Test
+  public void requestDataTest() throws Exception {
+      this.mockServer.stubFor(get(urlEqualTo("/new/offers")).withHeader("Accept", equalTo("application/json"))
+      .withHeader("Authorization", containing("Basic")).willReturn(aResponse().withStatus(200).withFixedDelay(1000)
+      .withHeader("Content-Type", "application/json").withBodyFile("/wireMockTest/jsonBodyFile.json")));
+  }
+
+
+
+

This will stub the URL localhost:port/new/offers to respond with a status 200 message containing a header (Content-Type: application/json) and a body with content given in jsonBodyFile.json if the request matches several conditions. +It has to be a GET request to ../new/offers with the two given header properties.

+
+
+

Note that by default files are located in src/test/resources/__files/. When using only one WireMock server one can omit the this.mockServer in before the stubFor call (static method). +You can also add a fixed delay to the response or processing delay with WireMock.addRequestProcessingDelay(time) in order to test for timeouts.

+
+
+

WireMock can also respond with different corrupted messages to simulate faulty behaviour.

+
+
+
+
@Test(expected = ResourceAccessException.class)
+public void faultTest() {
+
+    this.mockServer.stubFor(get(urlEqualTo("/fault")).willReturn(aResponse()
+    .withFault(Fault.MALFORMED_RESPONSE_CHUNK)));
+...}
+
+
+
+

A GET request to ../fault returns an OK status header, then garbage, and then closes the connection.

+
+
+
+
+

Integration Levels

+
+

There are many discussions about the right level of integration for test automation. Sometimes it is better to focus on small, isolated modules of the system - whatever a "module" may be. In other cases it makes more sense to test integrated groups of modules. Because there is no universal answer to this question, devonfw only defines a common terminology for what could be tested. Each project must make its own decision where to put the focus of test automation. There is no worldwide accepted terminology for the integration levels of testing. In general we consider ISTQB. However, with a technical focus on test automation we want to get more precise.

+
+
+

The following picture shows a simplified view of an application based on the devonfw reference architecture. We define four integration levels that are explained in detail below. +The boxes in the picture contain parenthesized numbers. These numbers depict the lowest integration level, a box belongs to. Higher integration levels also contain all boxes of lower integration levels. When writing tests for a given integration level, related boxes with a lower integration level must be replaced by test doubles or drivers.

+
+
+
+Integration Levels +
+
+
+

The main difference between the integration levels is the amount of infrastructure needed to test them. The more infrastructure you need, the more bugs you will find, but the more instable and the slower your tests will be. So each project has to make a trade-off between pros and contras of including much infrastructure in tests and has to select the integration levels that fit best to the project.

+
+
+

Consider, that more infrastructure does not automatically lead to a better bug-detection. There may be bugs in your software that are masked by bugs in the infrastructure. The best way to find those bugs is to test with very few infrastructure.

+
+
+

External systems do not belong to any of the integration levels defined here. devonfw does not recommend involving real external systems in test automation. This means, they have to be replaced by test doubles in automated tests. An exception may be external systems that are fully under control of the own development team.

+
+
+

The following chapters describe the four integration levels.

+
+
+
Level 1 Module Test
+
+

The goal of a isolated module test is to provide fast feedback to the developer. Consequently, isolated module tests must not have any interaction with the client, the database, the file system, the network, etc.

+
+
+

An isolated module test is testing a single classes or at least a small set of classes in isolation. If such classes depend on other components or external resources, etc. these shall be replaced with a test double.

+
+
+
+
public class MyClassTest extends ModuleTest {
+
+  @Test
+  public void testMyClass() {
+
+    // given
+    MyClass myClass = new MyClass();
+    // when
+    String value = myClass.doSomething();
+    // then
+    assertThat(value).isEqualTo("expected value");
+  }
+
+}
+
+
+
+

For an advanced example see here.

+
+
+
+
Level 2 Component Test
+
+

A component test aims to test components or component parts as a unit. +These tests can access resources such as a database (e.g. for DAO tests). +Further, no remote communication is intended here. Access to external systems shall be replaced by a test double.

+
+
+
    +
  • +

    For Spring stack, they are typically run with a (light-weight) infrastructure such as spring-boot-starter-test. A component-test is illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.NONE)
    +public class UcFindCountryTest extends ComponentTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    TestUtil.login("user", MyAccessControlConfig.FIND_COUNTRY);
    +    CountryEto country = this.ucFindCountry.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    This test will start the entire spring-context of your app (MySpringBootApp). Within the test spring will inject according spring-beans into all your fields annotated with @Inject. In the test methods you can use these spring-beans and perform your actual tests. This pattern can be used for testing DAOs/Repositories, Use-Cases, or any other spring-bean with its entire configuration including database and transactions.

    +
    +
  • +
  • +

    For Quarkus, you can similarly inject the CDI beans and perform tests. An example is shown below:

    +
    +
    +
    @QuarkusTest
    +public class UcFindCountryTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +  ...
    +
    +
    +
  • +
+
+
+

When you are testing use-cases your authorization will also be in place. Therefore, you have to simulate a logon in advance what is done via the login method in the above Spring example. The test-infrastructure will automatically do a logout for you after each test method in doTearDown.

+
+
+
+
Level 3 Subsystem Test
+
+

A subsystem test runs against the external interfaces (e.g. HTTP service) of the integrated subsystem. Subsystem tests of the client subsystem are described in the devon4ng testing guide. In devon4j the server (JEE application) is the subsystem under test. The tests act as a client (e.g. service consumer) and the server has to be integrated and started in a container.

+
+
+
    +
  • +

    With devon4j and Spring you can write a subsystem-test as easy as illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.RANDOM_PORT)
    +public class CountryRestServiceTest extends SubsystemTest {
    +
    +  @Inject
    +  private ServiceClientFactory serviceClientFactory;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    CountryRestService service = this.serviceClientFactory.create(CountryRestService.class);
    +    CountryEto country = service.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    Even though not obvious on the first look this test will start your entire application as a server on a free random port (so that it works in CI with parallel builds for different branches) and tests the invocation of a (REST) service including (un)marshalling of data (e.g. as JSON) and transport via HTTP (all in the invocation of the findCountry method).

    +
    +
  • +
+
+
+

Do not confuse a subsystem test with a system integration test. A system integration test validates the interaction of several systems where we do not recommend test automation.

+
+
+
+
Level 4 System Test
+
+

A system test has the goal to test the system as a whole against its official interfaces such as its UI or batches. The system itself runs as a separate process in a way close to a regular deployment. Only external systems are simulated by test doubles.

+
+
+

The devonfw only gives advice for automated system test (TODO see allure testing framework). In nearly every project there must be manual system tests, too. This manual system tests are out of scope here.

+
+
+
+
Classifying Integration-Levels
+
+

For Spring stack, devon4j defines Category-Interfaces that shall be used as JUnit Categories. +Also devon4j provides abstract base classes that you may extend in your test-cases if you like.

+
+
+

devon4j further pre-configures the maven build to only run integration levels 1-2 by default (e.g. for fast feedback in continuous integration). It offers the profiles subsystemtest (1-3) and systemtest (1-4). In your nightly build you can simply add -Psystemtest to run all tests.

+
+
+
+
+

Implementation

+
+

This section introduces how to implement tests on the different levels with the given devonfw infrastructure and the proposed frameworks. +For Spring, see Spring Test Implementation

+
+
+
+

Regression testing

+
+

When it comes to complex output (even binary) that you want to regression test by comparing with an expected result, you sould consider Approval Tests using ApprovalTests.Java. +If applied for the right problems, it can be very helpful.

+
+
+
+

Deployment Pipeline

+
+

A deployment pipeline is a semi-automated process that gets software-changes from version control into production. It contains several validation steps, e.g. automated tests of all integration levels. +Because devon4j should fit to different project types - from agile to waterfall - it does not define a standard deployment pipeline. But we recommend to define such a deployment pipeline explicitly for each project and to find the right place in it for each type of test.

+
+
+

For that purpose, it is advisable to have fast running test suite that gives as much confidence as possible without needing too much time and too much infrastructure. This test suite should run in an early stage of your deployment pipeline. Maybe the developer should run it even before he/she checked in the code. Usually lower integration levels are more suitable for this test suite than higher integration levels.

+
+
+

Note, that the deployment pipeline always should contain manual validation steps, at least manual acceptance testing. There also may be manual validation steps that have to be executed for special changes only, e.g. usability testing. Management and execution processes of those manual validation steps are currently not in the scope of devonfw.

+
+
+
+

Test Coverage

+
+

We are using tools (SonarQube/Jacoco) to measure the coverage of the tests. Please always keep in mind that the only reliable message of a code coverage of X% is that (100-X)% of the code is entirely untested. It does not say anything about the quality of the tests or the software though it often relates to it.

+
+
+
+

Test Configuration

+
+

This section covers test configuration in general without focusing on integration levels as in the first chapter.

+
+
+ +
+
+
Configure Test Specific Beans
+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+
Test Data
+
+

It is possible to obtain test data in two different ways depending on your test’s integration level.

+
+
+
+
+

Debugging Tests

+
+

The following two sections describe two debugging approaches for tests. Tests are either run from within the IDE or from the command line using Maven.

+
+
+
Debugging with the IDE
+
+

Debugging with the IDE is as easy as always. Even if you want to execute a SubsystemTest which needs a Spring context and a server infrastructure to run properly, you just set your breakpoints and click on Debug As → JUnit Test. The test infrastructure will take care of initializing the necessary infrastructure - if everything is configured properly.

+
+
+
+
Debugging with Maven
+
+

Please refer to the following two links to find a guide for debugging tests when running them from Maven.

+
+ +
+

In essence, you first have to start execute a test using the command line. Maven will halt just before the test execution and wait for your IDE to connect to the process. When receiving a connection the test will start and then pause at any breakpoint set in advance. +The first link states that tests are started through the following command:

+
+
+
+
mvn -Dmaven.surefire.debug test
+
+
+
+

Although this is correct, it will run every test class in your project and - which is time consuming and mostly unnecessary - halt before each of these tests. +To counter this problem you can simply execute a single test class through the following command (here we execute the TablemanagementRestServiceTest from the restaurant sample application):

+
+
+
+
mvn test -Dmaven.surefire.debug test -Dtest=TablemanagementRestServiceTest
+
+
+
+

It is important to notice that you first have to execute the Maven command in the according submodule, e.g. to execute the TablemanagementRestServiceTest you have first to navigate to the core module’s directory.

+
+
+ +
+

==Transfer-Objects

+
+
+

The technical data model is defined in form of persistent entities. +However, passing persistent entities via call-by-reference across the entire application will soon cause problems:

+
+
+
    +
  • +

    Changes to a persistent entity are directly written back to the persistent store when the transaction is committed. When the entity is send across the application also changes tend to take place in multiple places endangering data sovereignty and leading to inconsistency.

    +
  • +
  • +

    You want to send and receive data via services across the network and have to define what section of your data is actually transferred. If you have relations in your technical model you quickly end up loading and transferring way too much data.

    +
  • +
  • +

    Modifications to your technical data model shall not automatically have impact on your external services causing incompatibilities.

    +
  • +
+
+
+

To prevent such problems transfer-objects are used leading to a call-by-value model and decoupling changes to persistent entities.

+
+
+

In the following sections the different types of transfer-objects are explained. +You will find all according naming-conventions in the architecture-mapping

+
+
+

To structure your transfer objects, we recommend the following approaches:

+
+
+ +
+
+

Also considering the following transfer objects in specific cases:

+
+
+
+
SearchCriteriaTo
+
+

For searching we create or generate a «BusinessObject»SearchCriteriaTo representing a query to find instances of «BusinessObject».

+
+
TO
+
+

There are typically transfer-objects for data that is never persistent. +For very generic cases these just carry the suffix To.

+
+
STO
+
+

We can potentially create separate service transfer objects (STO) (if possible named «BusinessObject»Sto) to keep the service API stable and independent of the actual data-model. +However, we usually do not need this and want to keep our architecture simple. +Only create STOs if you need service versioning and support previous APIs or to provide legacy service technologies that require their own isolated data-model. +In such case you also need beanmapping between STOs and ETOs/DTOs what means extra effort and complexity that should be avoided.

+
+
+
+
+ +
+

==Bean-Mapping

+
+
+

For decoupling, you sometimes need to create separate objects (beans) for a different view. E.g. for an external service, you will use a transfer-object instead of the persistence entity so internal changes to the entity do not implicitly change or break the service.

+
+
+

Therefore you have the need to map similar objects what creates a copy. This also has the benefit that modifications to the copy have no side-effect on the original source object. However, to implement such mapping code by hand is very tedious and error-prone (if new properties are added to beans but not to mapping code):

+
+
+
+
public UserEto mapUser(UserEntity source) {
+  UserEto target = new UserEto();
+  target.setUsername(source.getUsername());
+  target.setEmail(source.getEmail());
+  ...
+  return target;
+}
+
+
+
+

Therefore we are using a BeanMapper for this purpose that makes our lives a lot easier. +There are several bean mapping frameworks with different approaches.

+
+
+

For a devon4j-spring application we recommend Orika, follow Spring Bean-Mapping for an introduction to Orika and Dozer in a devon4j-spring context application.

+
+
+ + + + + +
+ + +devon4j started with Dozer as framework for Spring applications and still supports it. However, we now recommend Orika (for new projects) as it is much faster (see Performance of Java Mapping Frameworks). +
+
+
+

For a Quarkus application we recommend Mapstruct, follow Quarkus Bean-Mapping for an introduction to Mapstruct in a quarkus context application.

+
+
+ +
+

==Datatypes

+
+
+
+
+

A datatype is an object representing a value of a specific type with the following aspects:

+
+
+
    +
  • +

    It has a technical or business specific semantic.

    +
  • +
  • +

    Its JavaDoc explains the meaning and semantic of the value.

    +
  • +
  • +

    It is immutable and therefore stateless (its value assigned at construction time and can not be modified).

    +
  • +
  • +

    It is serializable.

    +
  • +
  • +

    It properly implements #equals(Object) and #hashCode() (two different instances with the same value are equal and have the same hash).

    +
  • +
  • +

    It shall ensure syntactical validation so it is NOT possible to create an instance with an invalid value.

    +
  • +
  • +

    It is responsible for formatting its value to a string representation suitable for sinks such as UI, loggers, etc. Also consider cases like a Datatype representing a password where toString() should return something like "**" instead of the actual password to prevent security accidents.

    +
  • +
  • +

    It is responsible for parsing the value from other representations such as a string (as needed).

    +
  • +
  • +

    It shall provide required logical operations on the value to prevent redundancies. Due to the immutable attribute all manipulative operations have to return a new Datatype instance (see e.g. BigDecimal.add(java.math.BigDecimal)).

    +
  • +
  • +

    It should implement Comparable if a natural order is defined.

    +
  • +
+
+
+

Based on the Datatype a presentation layer can decide how to view and how to edit the value. Therefore a structured data model should make use of custom datatypes in order to be expressive. +Common generic datatypes are String, Boolean, Number and its subclasses, Currency, etc. +Please note that both Date and Calendar are mutable and have very confusing APIs. Therefore, use JSR-310 or jodatime instead. +Even if a datatype is technically nothing but a String or a Number but logically something special it is worth to define it as a dedicated datatype class already for the purpose of having a central javadoc to explain it. On the other side avoid to introduce technical datatypes like String32 for a String with a maximum length of 32 characters as this is not adding value in the sense of a real Datatype. +It is suitable and in most cases also recommended to use the class implementing the datatype as API omitting a dedicated interface.

+
+
+
+— mmm project
+datatype javadoc +
+
+ +
+
+
+

Datatype Packaging

+
+

For the devonfw we use a common packaging schema. +The specifics for datatypes are as following:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
SegmentValueExplanation

<component>

*

Here we use the (business) component defining the datatype or general for generic datatypes.

<layer>

common

Datatypes are used across all layers and are not assigned to a dedicated layer.

<scope>

api

Datatypes are always used directly as API even tough they may contain (simple) implementation logic. Most datatypes are simple wrappers for generic Java types (e.g. String) but make these explicit and might add some validation.

+
+
+

Technical Concerns

+
+

Many technologies like Dozer and QueryDSL’s (alias API) are heavily based on reflection. For them to work properly with custom datatypes, the frameworks must be able to instantiate custom datatypes with no-argument constructors. It is therefore recommended to implement a no-argument constructor for each datatype of at least protected visibility.

+
+
+
+

Datatypes in Entities

+
+

The usage of custom datatypes in entities is explained in the persistence layer guide.

+
+
+
+

Datatypes in Transfer-Objects

+
+
XML
+
+

For mapping datatypes with JAXB see XML guide.

+
+
+
+
JSON
+
+

For mapping datatypes from and to JSON see JSON custom mapping.

+
+
+ +
+

==Accessibility

+
+
+

TODO

+
+ + + +
+ +
+

==CORS support

+
+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

For more information about CORS see here. Information about the CORS headers can be found here.

+
+
+
+
+

Configuring CORS support

+
+

To enable CORS support for your application, see the advanced guides. For Spring applications see here. For Quarkus follow the official Quarkus guide.

+
+
+
+

Configuration with service mesh

+
+

If you are using a service mesh, you can also define your CORS policy directly there. Here is an example from Istio.

+
+
+ +
+

==BLOB support

+
+
+

BLOB stands for Binary Large Object. A BLOB may be an image, an office document, ZIP archive or any other multimedia object. +Often these BLOBs are large. if this is the case you need to take care, that you do not copy all the blob data into you application heap, e.g. when providing them via a REST service. +This could easily lead to performance problems or out of memory errors. +As solution for that problem is "streaming" those BLOBs directly from the database to the client. To demonstrate how this can be accomplished, devonfw provides a example.

+
+
+
+

Further Reading

+ +
+ +
+

==Java Development Kit

+
+
+

The Java Development Kit is an implementation of the Java platform. It provides the Java Virtual Machine (JVM) and the Java Runtime Environment (JRE).

+
+
+
+

Editions

+
+

The JDK exists in different editions:

+
+
+ +
+
+

As Java is evolving and also complex maintaining a JVM requires a lot of energy. +Therefore many alternative JDK editions are unable to cope with this and support latest Java versions and according compatibility. +Unfortunately OpenJDK only maintains a specific version of Java for a relative short period of time before moving to the next major version. +In the end, this technically means that OpenJDK is continuous beta and can not be used in production for reasonable software projects. +As OracleJDK changed its licensing model and can not be used for commercial usage even during development, things can get tricky. +You may want to use OpenJDK for development and OracleJDK only in production. +However, e.g. OpenJDK 11 never released a version that is stable enough for reasonable development (e.g. javadoc tool is broken and fixes are not available of OpenJDK 11 - fixed in 11.0.3 what is only available as OracleJDK 11 or you need to go to OpenJDK 12+, what has other bugs) so in the end there is no working release of OpenJDK 11. +This more or less forces you to use OracleJDK what requires you to buy a subscription so you can use it for commercial development. +However, there is AdoptOpenJDK that provides forked releases of OpenJDK with bug-fixes what might be an option. +Anyhow, as you want to have your development environment close to production, the productively used JDK (most likely OracleJDK) should be preferred also for development.

+
+
+
+

Upgrading

+
+

Until Java 8 compatibility was one of the key aspects for Java version updates (after the mess on the Swing updates with Java2 many years ago). +However, Java 9 introduced a lot of breaking changes. +This documentation wants to share the experience we collected in devonfw when upgrading from Java 8 to newer versions. +First of all we separate runtime changes that you need if you want to build your software with JDK 8 but such that it can also run on newer versions (e.g. JRE 11) +from changes required to also build your software with more recent JDKs (e.g. JDK 11 or 12).

+
+
+
Runtime Changes
+
+

This section describes required changes to your software in order to make it run also with versions newer than Java 8.

+
+
+
Classes removed from JDK
+
+

The first thing that most users hit when running their software with newer Java versions is a ClassNotFoundException like this:

+
+
+
+
Caused by: java.lang.ClassNotFoundException: javax.xml.bind.JAXBException
+
+
+
+

As Java 9 introduced a module system with Jigsaw, the JDK that has been a monolithic mess is now a well-defined set of structured modules. +Some of the classes that used to come with the JDK moved to modules that where not available by default in Java 9 and have even been removed entirely in later versions of Java. +Therefore you should simply treat such code just like any other 3rd party component that you can add as a (maven) dependency. +The following table gives you the required hints to make your software work even with such classes / modules removed from the JDK (please note that the specified version is just a suggestion that worked, feel free to pick a more recent or more appropriate version):

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 9. Dependencies for classes removed from Java 8 since 9+
ClassGroupIdArtifactIdVersion

javax.xml.bind.*

javax.xml.bind

jaxb-api

2.3.1

com.sun.xml.bind.*

org.glassfish.jaxb

jaxb-runtime

2.3.1

java.activation.*

javax.activation

javax.activation-api

1.2.0

java.transaction.*

javax.transaction

javax.transaction-api

1.2

java.xml.ws.*

javax.xml.ws

jaxws-api

2.3.1

javax.jws.*

javax.jws

javax.jws-api

1.1

javax.annotation.*

javax.annotation

javax.annotation-api

1.3.2

+
+
+
3rd Party Updates
+
+

Further, internal and inofficial APIs (e.g. sun.misc.Unsafe) have been removed. +These are typically not used by your software directly but by low-level 3rd party libraries like asm that need to be updated. +Also simple things like the Java version have changed (from 1.8.x to 9.x, 10.x, 11.x, 12.x, etc.). +Some 3rd party libraries were parsing the Java version in a very naive way making them unable to be used with Java 9+:

+
+
+
+
Caused by: java.lang.NullPointerException
+   at org.apache.maven.surefire.shade.org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast (SystemUtils.java:1626)
+
+
+
+

Therefore the following table gives an overview of common 3rd party libraries that have been affected by such breaking changes and need to be updated to at least the specified version:

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 10. Minimum recommended versions of common 3rd party for Java 9+
GroupIdArtifactIdVersionIssue

org.apache.commons

commons-lang3

3.7

LANG-1365

cglib

cglib

3.2.9

102, 93, 133

org.ow2.asm

asm

7.1

2941

org.javassist

javassist

3.25.0-GA

194, 228, 246, 171

+
+
+
ResourceBundles
+
+

For internationalization (i18n) and localization (l10n) ResourceBundle is used for language and country specific texts and configurations as properties (e.g. MyResourceBundle_de.properties). With Java modules there are changes and impacts you need to know to get things working. The most important change is documented in the JavaDoc of ResourceBundle. However, instead of using ResourceBundleProvider and refactoring your entire code causing incompatibilities, you can simply put the resource bundles in a regular JAR on the classpath rather than a named module (or into the lauching app). +If you want to implement (new) Java modules with i18n support, you can have a look at mmm-nls.

+
+
+
+
+
Buildtime Changes
+
+

If you also want to change your build to work with a recent JDK you also need to ensure that test frameworks and maven plugins properly support this.

+
+
+
Findbugs
+
+

Findbugs does not work with Java 9+ and is actually a dead project. +The new findbugs is SpotBugs. +For maven the new solution is spotbugs-maven-plugin:

+
+
+
+
<plugin>
+  <groupId>com.github.spotbugs</groupId>
+  <artifactId>spotbugs-maven-plugin</artifactId>
+  <version>3.1.11</version>
+</plugin>
+
+
+
+
+
Test Frameworks
+ + ++++++ + + + + + + + + + + + + + + + + +
Table 11. Minimum recommended versions of common 3rd party test frameworks for Java 9+
GroupIdArtifactIdVersionIssue

org.mockito

mockito-core

2.23.4

1419, 1696, 1607, 1594, 1577, 1482

+
+
+
Maven Plugins
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 12. Minimum recommended versions of common maven plugins for Java 9+
GroupIdArtifactId(min.) VersionIssue

org.apache.maven.plugins

maven-compiler-plugin

3.8.1

x

org.apache.maven.plugins

maven-surefire-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-surefire-report-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-archetype-plugin

3.1.0

x

org.apache.maven.plugins

maven-javadoc-plugin

3.1.0

x

org.jacoco

jacoco-maven-plugin

0.8.3

663

+
+
+
Maven Usage
+
+

With Java modules you can not run Javadoc standalone anymore or you will get this error when running mvn javadoc:javadoc:

+
+
+
+
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-javadoc-plugin:3.1.1:javadoc (default-cli) on project mmm-base: An error has occurred in Javadoc report generation:
+[ERROR] Exit code: 1 - error: module not found: io.github.mmm.base
+[ERROR]
+[ERROR] Command line was: /projects/mmm/software/java/bin/javadoc @options @packages @argfile
+
+
+
+

As a solution or workaround you need to include the compile goal into your build lifecycle so the module-path is properly configured:

+
+
+
+
mvn compile javadoc:javadoc
+
+
+
+
+
+
+ +
+

We want to give credits and say thanks to the following articles that have been there before and helped us on our way:

+
+ +
+
+
+
+1. Whether to use checked exceptions or not is a controversial topic. Arguments for both sides can be found under The Trouble with Checked Exceptions, Unchecked Exceptions — The Controversy, and Checked Exceptions are Evil. The arguments in favor of unchecked exceptions tend to prevail for applications built with devon4j. Therefore, unchecked exceptions should be used for a consistent style. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-layers.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-layers.html new file mode 100644 index 00000000..5fa109ac --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-layers.html @@ -0,0 +1,1164 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Layers

+
+ +
+

==Client Layer

+
+
+

There are various technical approaches to building GUI clients. The devonfw proposes rich clients that connect to the server via data-oriented services (e.g. using REST with JSON). +In general, we have to distinguish among the following types of clients:

+
+
+
    +
  • +

    web clients

    +
  • +
  • +

    native desktop clients

    +
  • +
  • +

    (native) mobile clients

    +
  • +
+
+
+

Our main focus is on web-clients. In our sample application my-thai-star we offer a responsive web-client based on Angular following devon4ng that integrates seamlessly with the back ends of my-thai-star available for Java using devon4j as well as .NET/C# using devon4net. For building angular clients read the separate devon4ng guide.

+
+
+

JavaScript for Java Developers

+
+

In order to get started with client development as a Java developer we give you some hints to get started. Also if you are an experienced JavaScript developer and want to learn Java this can be helpful. First, you need to understand that the JavaScript ecosystem is as large as the Java ecosystem and developing a modern web client requires a lot of knowledge. The following table helps you as experienced developer to get an overview of the tools, configuration-files, and other related aspects from the new world to learn. Also it helps you to map concepts between the ecosystems. Please note that we list the tools recommended by devonfw here (and we know that there are alternatives not listed here such as gradle, grunt, bower, etc.).

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Aspects in JavaScript and Java ecosystem
TopicAspectJavaScriptJava

Programming

Language

TypeScript (extends JavaScript)

Java

Runtime

VM

nodejs (or web-browser)

jvm

Build- & Dependency-Management

Tool

npm or yarn

maven

Config

package.json

pom.xml

Repository

npm repo

maven central (repo search)

Build cmd

ng build or npm run build (goals are not standardized in npm)

mvn install (see lifecycle)

Test cmd

ng test

mvn test

Testing

Test-Tool

jasmine

junit

Test-Runner

karma

junit / surefire

E2E Testing

Protractor

Selenium

Code Analysis

Code Coverage

ng test --no-watch --code-coverage

JaCoCo

Development

IDE

MS VS Code or IntelliJ

Eclipse or IntelliJ

Framework

Angular (etc.)

Spring or Quarkus

+
+ +
+

==Service Layer

+
+
+

The service layer is responsible for exposing functionality made available by the logical layer to external consumers over a network via technical protocols.

+
+
+
+

Types of Services

+
+

Before you start creating your services you should consider some general design aspects:

+
+
+
    +
  • +

    Do you want to create a RPC service?

    +
  • +
  • +

    Or is your problem better addressed by messaging or eventing?

    +
  • +
  • +

    Who will consume your service?

    +
    +
      +
    • +

      Do you have one or multiple consumers?

      +
    • +
    • +

      Do web-browsers have to use your service?

      +
    • +
    • +

      Will apps from other vendors or parties have to consume your service that you can not influence if the service may have to change or be extended?

      +
    • +
    +
    +
  • +
+
+
+

For RPC a common choice is REST but there are also interesting alternatives like gRPC. We also have a guide for SOAP but this technology should rather be considered as legacy and is not recommended for new services.

+
+
+

When it comes to messaging in Java the typical answer will be JMS. However, a very promising alternative is Kafka.

+
+
+
+

Versioning

+
+

For RPC services consumed by other applications we use versioning to prevent incompatibilities between applications when deploying updates. This is done by the following conventions:

+
+
+
    +
  • +

    We define a version number and prefix it with v (e.g. v1).

    +
  • +
  • +

    If we support previous versions we use that version numbers as part of the Java package defining the service API (e.g. com.foo.application.component.service.api.v1)

    +
  • +
  • +

    We use the version number as part of the service name in the remote URL (e.g. https://application.foo.com/services/rest/component/v1/resource)

    +
  • +
  • +

    Whenever breaking changes are made to the API, create a separate version of the service and increment the version (e.g. v1v2) . The implementations of the different versions of the service contain compatibility code and delegate to the same unversioned use-case of the logic layer whenever possible.

    +
  • +
  • +

    For maintenance and simplicity, avoid keeping more than one previous version.

    +
  • +
+
+
+
+

Interoperability

+
+

For services that are consumed by clients with different technology, interoperability is required. This is addressed by selecting the right protocol, following protocol-specific best practices and following our considerations especially simplicity.

+
+
+
+

Service Considerations

+
+

The term service is quite generic and therefore easily misunderstood. It is a unit exposing coherent functionality via a well-defined interface over a network. For the design of a service, we consider the following aspects:

+
+
+
    +
  • +

    self-contained
    +The entire API of the service shall be self-contained and have no dependencies on other parts of the application (other services, implementations, etc.).

    +
  • +
  • +

    idempotence
    +E.g. creation of the same master-data entity has no effect (no error)

    +
  • +
  • +

    loosely coupled
    +Service consumers have minimum knowledge and dependencies on the service provider.

    +
  • +
  • +

    normalized
    +Complete, no redundancy, minimal

    +
  • +
  • +

    coarse-grained
    +Service provides rather large operations (save entire entity or set of entities rather than individual attributes)

    +
  • +
  • +

    atomic
    +Process individual entities (for processing large sets of data, use a batch instead of a service)

    +
  • +
  • +

    simplicity
    +Avoid polymorphism, RPC methods with unique name per signature and no overloading, avoid attachments (consider separate download service), etc.

    +
  • +
+
+
+
+

Security

+
+

Your services are the major entry point to your application. Hence, security considerations are important here.

+
+
+

See REST Security.

+
+
+ +
+

==Logic Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. +According to our business architecture, we divide an application into components. +For each component, the logic layer defines different use-cases. Another approach is to define a component-facade, which we do not recommend for future application. Especially for quarkus application, we want to simplify things and highly suggest omitting component-facade completely and using use-cases only. +It is very important that you follow the links to understand the concept of use-case in order to properly implement your business logic.

+
+
+
+

Responsibility

+
+

The logic layer is responsible to implement the business logic according to the specified functional demands and requirements. +Therefore, it creates the actual value of the application. The logic layer is responsible for invoking business logic in external systems. +The following additional aspects are also included in its responsibility:

+
+
+ +
+
+
+

Security

+
+

The logic layer is the heart of the application. It is also responsible for authorization and hence security is important in this current case. Every method exposed in an interface needs to be annotated with an authorization check, stating what role(s) a caller must provide in order to be allowed to make the call. The authorization concept is described here.

+
+
+
Direct Object References
+
+

A security threat are Insecure Direct Object References. This simply gives you two options:

+
+
+
    +
  • +

    avoid direct object references

    +
  • +
  • +

    ensure that direct object references are secure

    +
  • +
+
+
+

Especially when using REST, direct object references via technical IDs are common sense. This implies that you have a proper authorization in place. This is especially tricky when your authorization does not only rely on the type of the data and according to static permissions but also on the data itself. Vulnerabilities for this threat can easily happen by design flaws and inadvertence. Here is an example from our sample application:

+
+
+

We have a generic use-case to manage BLOBs. In the first place, it makes sense to write a generic REST service to load and save these BLOBs. However, the permission to read or even update such BLOB depends on the business object hosting the BLOB. Therefore, such a generic REST service would open the door for this OWASP A4 vulnerability. To solve this in a secure way, you need individual services for each hosting business object to manage the linked BLOB and have to check permissions based on the parent business object. In this example the ID of the BLOB would be the direct object reference and the ID of the business object (and a BLOB property indicator) would be the indirect object reference.

+
+ +
+

==Component Facade

+
+
+ + + + + +
+ + +Our recommended approach for implementing the logic layer is use-cases +
+
+
+

For each component of the application, the logic layer defines a component facade. +This is an interface defining all business operations of the component. +It carries the name of the component («Component») and has an implementation named «Component»Impl (see implementation).

+
+
+
+
API
+
+

The component facade interface defines the logic API of the component and has to be business oriented. +This means that all parameters and return types of all methods from this API have to be business transfer-objects, datatypes (String, Integer, MyCustomerNumber, etc.), or collections of these. +The API may also only access objects of other business components listed in the (transitive) dependencies of the business-architecture.

+
+
+

Here is an example how such an API may look like:

+
+
+
+
public interface Bookingmanagement {
+
+  BookingEto findBooking(Long id);
+
+  BookingCto findBookingCto(Long id);
+
+  Page<BookingEto> findBookingEtos(BookingSearchCriteriaTo criteria);
+
+  void approveBooking(BookingEto booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation of an interface from the logic layer (a component facade or a use-case) carries the name of that interface with the suffix Impl and is annotated with @Named. +An implementation typically needs access to the persistent data. +This is done by injecting the corresponding repository (or DAO). +According to data-sovereignty, only repositories of the same business component may be accessed directly. +For accessing data from other components the implementation has to use the corresponding API of the logic layer (the component facade). Further, it shall not expose persistent entities from the domain layer and has to map them to transfer objects using the bean-mapper.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public BookingEto findBooking(Long id) {
+
+    LOG.debug("Get Booking with id {} from database.", id);
+    BookingEntity entity = this.bookingRepository.findOne(id);
+    return getBeanMapper().map(entity, BookingEto.class));
+  }
+}
+
+
+
+

As you can see, entities (BookingEntity) are mapped to corresponding ETOs (BookingEto). +Further details about this can be found in bean-mapping.

+
+ +
+

==UseCase +A use-case is a small unit of the logic layer responsible for an operation on a particular entity (business object). +We leave it up to you to decide whether you want to define an interface (API) for each use-case or provide an implementation directly.

+
+
+

Following our architecture-mapping (for classic and modern project), use-cases are named Uc«Operation»«BusinessObject»[Impl]. The prefix Uc stands for use-case and allows to easily find and identify them in your IDE. The «Operation» stands for a verb that is operated on the entity identified by «BusinessObject». +For CRUD we use the standard operations Find and Manage that can be generated by CobiGen. This also separates read and write operations (e.g. if you want to do CQSR, or to configure read-only transactions for read operations).

+
+
+

In our example, we choose to define an interface for each use-case. We also use *To to refer to any type of transfer object. Please follow our guide to understand more about different types of transfer object e.g. Eto, Dto, Cto

+
+
+
+
Find
+
+

The UcFind«BusinessObject» defines all read operations to retrieve and search the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcFindBooking {
+  //*To = Eto, Dto or Cto
+  Booking*To findBooking(Long id);
+}
+
+
+
+
+
Manage
+
+

The UcManage«BusinessObject» defines all CRUD write operations (create, update and delete) for the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcManageBooking {
+
+  //*To = Eto, Dto or Cto
+  Booking*To saveBooking(Booking*To booking);
+
+  void deleteBooking(Long id);
+
+}
+
+
+
+
+
Custom
+
+

Any other non CRUD operation Uc«Operation»«BusinessObject» uses any other custom verb for «Operation». +Typically, such custom use-cases only define a single method. +Here is an example:

+
+
+
+
public interface UcApproveBooking {
+
+  //*To = Eto, Dto or Cto
+  void approveBooking(Booking*To booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation should carry its own name and the suffix Impl and is annotated with @Named and @ApplicationScoped. It will need access to the persistent data which is done by injecting the corresponding repository (or DAO). Furthermore, it shall not expose persistent entities from the data access layer and has to map them to transfer objects using the bean-mapper. Please refer to our bean mapping, transfer object and dependency injection documentation for more information. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcManageBookingImpl implements UcManageBooking {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public void deleteBooking(Long id) {
+
+    LOG.debug("Delete Booking with id {} from database.", id);
+    this.bookingRepository.deleteById(id);
+  }
+}
+
+
+
+

The use-cases can then be injected directly into the service.

+
+
+
+
@Named("BookingmanagementRestService")
+@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private UcFindBooking ucFindBooking;
+
+  @Inject
+  private UcManageBooking ucManageBooking;
+
+  @Inject
+  private UcApproveBooking ucApproveBooking;
+}
+
+
+
+
+
Internal use case
+
+

Sometimes, a component with multiple related entities and many use-cases needs to reuse business logic internally. +Of course, this can be exposed as an official use-case API but this will imply using transfer-objects (ETOs) instead of entities. In some cases, this is undesired e.g. for better performance to prevent unnecessary mapping of entire collections of entities. +In the first place, you should try to use abstract base implementations providing reusable methods the actual use-case implementations can inherit from. +If your business logic is even more complex and you have multiple aspects of business logic to share and reuse but also run into multi-inheritance issues, you may also just create use-cases that have their interface located in the impl scope package right next to the implementation (or you may just skip the interface). In such a case, you may define methods that directly take or return entity objects. +To avoid confusion with regular use-cases, we recommend to add the Internal suffix to the type name leading to Uc«Operation»«BusinessObject»Internal[Impl].

+
+
+ +
+

==Data-Access Layer

+
+
+

The data-access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store. External system could also be accessed from the data-access layer if they match this definition, e.g. a mongo-db via rest services.

+
+
+

Note: In the modern project structure, this layer is replaced by the domain layer.

+
+
+
+
+

Database

+
+

You need to make your choice for a database. Options are documented here.

+
+
+

The classical approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+ +
+

==Batch Layer

+
+
+

We understand batch processing as a bulk-oriented, non-interactive, typically long running execution of tasks. For simplicity, we use the term "batch" or "batch job" for such tasks in the following documentation.

+
+
+

devonfw uses Spring Batch as a batch framework.

+
+
+

This guide explains how Spring Batch is used in devonfw applications. It focuses on aspects which are special to devonfw. If you want to learn about spring-batch you should adhere to springs references documentation.

+
+
+

There is an example of a simple batch implementation in the my-thai-star batch module.

+
+
+

In this chapter, we will describe the overall architecture (especially concerning layering) and how to administer batches.

+
+
+
+

Layering

+
+

Batches are implemented in the batch layer. The batch layer is responsible for batch processes, whereas the business logic is implemented in the logic layer. Compared to the service layer, you may understand the batch layer just as a different way of accessing the business logic. +From a component point of view, each batch is implemented as a subcomponent in the corresponding business component. +The business component is defined by the business architecture.

+
+
+

Let’s make an example for that. The sample application implements a batch for exporting ingredients. This ingredientExportJob belongs to the dishmanagement business component. +So the ingredientExportJob is implemented in the following package:

+
+
+
+
<basepackage>.dishmanagement.batch.impl.*
+
+
+
+

Batches should invoke use cases in the logic layer for doing their work. +Only "batch specific" technical aspects should be implemented in the batch layer.

+
+
+
+
+

Example: +For a batch, which imports product data from a CSV file, this means that all code for actually reading and parsing the CSV input file is implemented in the batch layer. +The batch calls the use case "create product" in the logic layer for actually creating the products for each line read from the CSV input file.

+
+
+
+
+
Directly accessing data access layer
+
+

In practice, it is not always appropriate to create use cases for every bit of work a batch should do. Instead, the data access layer can be used directly. +An example for that is a typical batch for data retention which deletes out-of-time data. +Often deleting, out-dated data is done by invoking a single SQL statement. It is appropriate to implement that SQL in a Repository or DAO method and call this method directly from the batch. +But be careful: this pattern is a simplification which could lead to business logic cluttered in different layers, which reduces the maintainability of your application. +It is a typical design decision you have to make when designing your specific batches.

+
+
+
+
+

Project structure and packaging

+
+

Batches will be implemented in a separate Maven module to keep the application core free of batch dependencies. The batch module includes a dependency on the application core-module to allow the reuse of the use cases, DAOs etc. +Additionally the batch module has dependencies on the required spring batch jars:

+
+
+
+
  <dependencies>
+
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>mtsj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-batch</artifactId>
+    </dependency>
+
+  </dependencies>
+
+
+
+

To allow an easy start of the batches from the command line it is advised to create a bootified jar for the batch module by adding the following to the pom.xml of the batch module:

+
+
+
+
  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>config/application.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Create bootified jar for batch execution via command line.
+           Your applications spring boot app is used as main-class.
+       -->
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        <configuration>
+          <mainClass>com.devonfw.application.mtsj.SpringBootApp</mainClass>
+          <classifier>bootified</classifier>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>repackage</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+
+
+

Implementation

+
+

Most of the details about implementation of batches is described in the spring batch documentation. +There is nothing special about implementing batches in devonfw. You will find an easy example in my-thai-star.

+
+
+
+

Starting from command line

+
+

Devonfw advises to start batches via command line. This is most common to many ops teams and allows easy integration in existing schedulers. In general batches are started with the following command:

+
+
+
+
java -jar <app>-batch-<version>-bootified.jar --spring.main.web-application-type=none --spring.batch.job.enabled=true --spring.batch.job.names=<myJob> <params>
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + +
ParameterExplanation

--spring.main.web-application-type=none

This disables the web app (e.g. Tomcat)

--spring.batch.job.names=<myJob>

This specifies the name of the job to run. If you leave this out ALL jobs will be executed. Which probably does not make to much sense.

<params>

(Optional) additional parameters which are passed to your job

+
+

This will launch your normal spring boot app, disables the web application part and runs the designated job via Spring Boots org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.

+
+
+
+

Scheduling

+
+

In real world scheduling of batches is not as simple as it first might look like.

+
+
+
    +
  • +

    Multiple batches have to be executed in order to achieve complex tasks. If one of those batches fails the further execution has to be stopped and operations should be notified for example.

    +
  • +
  • +

    Input files or those created by batches have to be copied from one node to another.

    +
  • +
  • +

    Scheduling batch executing could get complex easily (quarterly jobs, run job on first workday of a month, …​)

    +
  • +
+
+
+

For devonfw we propose the batches themselves should not mess around with details of scheduling. +Likewise your application should not do so. This complexity should be externalized to a dedicated batch administration service or scheduler. +This service could be a complex product or a simple tool like cron. We propose Rundeck as an open source job scheduler.

+
+
+

This gives full control to operations to choose the solution which fits best into existing administration procedures.

+
+
+
+

Handling restarts

+
+

If you start a job with the same parameters set after a failed run (BatchStatus.FAILED) a restart will occur. +In many cases your batch should then not reprocess all items it processed in the previous runs. +For that you need some logic to start at the desired offset. There different ways to implement such logic:

+
+
+
    +
  • +

    Marking processed items in the database in a dedicated column

    +
  • +
  • +

    Write all IDs of items to process in a separate table as an initialization step of your batch. You can then delete IDs of already processed items from that table during the batch execution.

    +
  • +
  • +

    Storing restart information in springs ExecutionContext (see below)

    +
  • +
+
+
+
Using spring batch ExecutionContext for restarts
+
+

By implementing the ItemStream interface in your ItemReader or ItemWriter you may store information about the batch progress in the ExecutionContext. You will find an example for that in the CountJob in My Thai Star.

+
+
+

Additional hint: It is important that bean definition method of your ItemReader/ItemWriter return types implementing ItemStream(and not just ItemReader or ItemWriter alone). For that the ItemStreamReader and ItemStreamWriter interfaces are provided.

+
+
+
+
+

Exit codes

+
+

Your batches should create a meaningful exit code to allow reaction to batch errors e.g. in a scheduler. +For that spring batch automatically registers an org.springframework.boot.autoconfigure.batch.JobExecutionExitCodeGenerator. To make this mechanism work your spring boot app main class as to populate this exit code to the JVM:

+
+
+
+
@SpringBootApplication
+public class SpringBootApp {
+
+  public static void main(String[] args) {
+    if (Arrays.stream(args).anyMatch((String e) -> e.contains("--spring.batch.job.names"))) {
+      // if executing batch job, explicitly exit jvm to report error code from batch
+      System.exit(SpringApplication.exit(SpringApplication.run(SpringBootApp.class, args)));
+    } else {
+      // normal web application start
+      SpringApplication.run(SpringBootApp.class, args);
+    }
+  }
+}
+
+
+
+
+

Stop batches and manage batch status

+
+

Spring batch uses several database tables to store the status of batch executions. +Each execution may have different status. +You may use this mechanism to gracefully stop batches. +Additionally in some edge cases (batch process crashed) the execution status may be in an undesired state. +E.g. the state will be running, despite the process crashed sometime ago. +For that cases you have to change the status of the execution in the database.

+
+
+
CLI-Tool
+
+

Devonfw provides a easy to use cli-tool to manage the executing status of your jobs. +The tool is implemented in the devonfw module devon4j-batch-tool. It will provide a runnable jar, which may be used as follows:

+
+
+
+
List names of all previous executed jobs
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs list

+
+
Stop job named 'countJob'
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs stop countJob

+
+
Show help
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar

+
+
+
+
+

As you can the each invocation includes the JDBC connection string to your database. +This means that you have to make sure that the corresponding DB driver is in the classpath (the prepared jar only contains H2).

+
+
+
+
+

Authentication

+
+

Most business application incorporate authentication and authorization. +Your spring boot application will implement some kind of security, e.g. integrated login with username+password or in many cases authentication via an existing IAM. +For security reasons your batch should also implement an authentication mechanism and obey the authorization implemented in your application (e.g. via @RolesAllowed).

+
+
+

Since there are many different authentication mechanism we cannot provide an out-of-the-box solution in devonfw, but we describe a pattern how this can be implemented in devonfw batches.

+
+
+

We suggest to implement the authentication in a Spring Batch tasklet, which runs as the first step in your batch. This tasklet will do all of the work which is required to authenticate the batch. A simple example which authenticates the batch "locally" via username and password could be implemented like this:

+
+
+
+
@Named
+public class SimpleAuthenticationTasklet implements Tasklet {
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+    String username = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("username");
+    String password = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("password");
+    Authentication authentication = new UsernamePasswordAuthenticationToken(username, password);
+
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+    return RepeatStatus.FINISHED;
+  }
+
+}
+
+
+
+

The username and password have to be supplied via two cli parameters -username and -password. This implementation creates an "authenticated" Authentication and sets in the Spring Security context. This is just for demonstration normally you should not provide passwords via command line. The actual authentication will be done automatically via Spring Security as in your "normal" application. +If you have a more complex authentication mechanism in your application e.g. via OpenID connect just call this in the tasklet. Naturally you may read authentication parameters (e.g. secrets) from the command line or more securely from a configuration file.

+
+
+

In your Job Configuration set this tasklet as the first step:

+
+
+
+
@Configuration
+@EnableBatchProcessing
+public class BookingsExportBatchConfig {
+  @Inject
+  private JobBuilderFactory jobBuilderFactory;
+
+  @Inject
+  private StepBuilderFactory stepBuilderFactory;
+
+  @Bean
+  public Job myBatchJob() {
+    return this.jobBuilderFactory.get("myJob").start(myAuthenticationStep()).next(...).build();
+  }
+
+  @Bean
+  public Step myAuthenticationStep() {
+    return this.stepBuilderFactory.get("myAuthenticationStep").tasklet(myAuthenticatonTasklet()).build();
+  }
+
+  @Bean
+  public Tasklet myAuthenticatonTasklet() {
+    return new SimpleAuthenticationTasklet();
+  }
+...
+
+
+
+
+

Tipps & tricks

+
+
Identifying job parameters
+
+

Spring uses a jobs parameters to identify job executions. Parameters starting with "-" are not considered for identifying a job execution.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-tutorials.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-tutorials.html new file mode 100644 index 00000000..5eb28f7e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j-tutorials.html @@ -0,0 +1,437 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Tutorials

+
+ +
+

==Creating a new application

+
+
+

Running the archetype

+
+

In order to create a new application you must use the archetype provided by devon4j which uses the maven archetype functionality.

+
+
+

To create a new application, you should have installed devonfw IDE. Follow the devon ide documentation to install +the same. +You can choose between 2 alternatives, create it from command line or, in more visual manner, within eclipse.

+
+
+
From command Line
+
+

To create a new devon4j application from command line, you can simply run the following command:

+
+
+
+
devon java create com.example.application.sampleapp
+
+
+
+

For low-level creation you can also manually call this command:

+
+
+
+
mvn -DarchetypeVersion=${devon4j.version} -DarchetypeGroupId=com.devonfw.java.templates -DarchetypeArtifactId=devon4j-template-server archetype:generate -DgroupId=com.example.application -DartifactId=sampleapp -Dversion=1.0.0-SNAPSHOT -Dpackage=com.devonfw.application.sampleapp
+
+
+
+

Attention: The archetypeVersion (first argument) should be set to the latest version of devon4j. You can easily determine the version from this badge: +latest devon4j version

+
+
+

Further providing additional properties (using -D parameter) you can customize the generated app:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 1. Options for app template
propertycommentexample

dbType

Choose the type of RDBMS to use (hana, oracle, mssql, postgresql, mariadb, mysql, etc.)

-DdbTpye=postgresql

batch

Option to add an batch module

-Dbatch=batch

+
+
+
From Eclipse
+
+
+
After that, you should follow this Eclipse steps to create your application:
+
+
+
+
    +
  • +

    Create a new Maven Project.

    +
  • +
  • +

    Choose the devon4j-template-server archetype, just like the image.

    +
  • +
+
+
+
+Select archetype +
+
+
+
    +
  • +

    Fill the Group Id, Artifact Id, Version and Package for your project.

    +
  • +
+
+
+
+Configure archetype +
+
+
+
    +
  • +

    Finish the Eclipse assistant and you are ready to start your project.

    +
  • +
+
+
+
+
+

What is generated

+
+

The application template (archetype) generates a Maven multi-module project. It has the following modules:

+
+
+
    +
  • +

    api: module with the API (REST service interfaces, transferobjects, datatypes, etc.) to be imported by other apps as a maven dependency in order to invoke and consume the offered (micro)services.

    +
  • +
  • +

    core: maven module containing the core of the application.

    +
  • +
  • +

    batch: optional module for batch(es)

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) as a WAR file.

    +
  • +
+
+
+

The toplevel pom.xml of the generated project has the following features:

+
+
+
    +
  • +

    Properties definition: Spring-boot version, Java version, etc.

    +
  • +
  • +

    Modules definition for the modules (described above)

    +
  • +
  • +

    Dependency management: define versions for dependencies of the technology stack that are recommended and work together in a compatible way.

    +
  • +
  • +

    Maven plugins with desired versions and configuration

    +
  • +
  • +

    Profiles for test stages

    +
  • +
+
+
+
+

How to run your app

+
+
Run app from IDE
+
+

To run your application from your favourite IDE, simply launch SpringBootApp as java application.

+
+
+
+
Run app as bootified jar or war
+
+

More details are available here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j.html new file mode 100644 index 00000000..42f4d796 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devon4j.html @@ -0,0 +1,12708 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Java

+
+
+

The devonfw community +${project.version}, ${buildtime}

+
+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+

The following sections contain the complete compendium of devon4j, the Java stack of devonfw. +With devon4j we support both spring and quarkus as major frameworks. +However, the general coding patterns are based on common Java standards mainly from Jakarta EE and therefore do not differ between those frameworks. +Therefore, the general section contains all the documentation that is universal to Java and does not differ between the two frameworks. +Only the sections spring and quarkus contain documentation that is specific to the respective approach.

+
+
+

If you’re trying to decide which of the two frameworks to use, have a look at this guide.

+
+
+

You can also read the latest version of this documentation online at the following sources:

+
+ +
+

1. General

+
+
+

Here you will find documentation and code-patterns for developing with Java in general, independent of the framework you choose.

+
+ +
+

==Architecture

+
+
+

There are many different views that are summarized by the term architecture. First, we will introduce the key principles and architecture principles of devonfw. Then, we will go into details of the the architecture of an application.

+
+
+

1.1. Key Principles

+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
+
+
+
+

1.2. Architecture Principles

+
+

Additionally we define the following principles that our architecture is based on:

+
+
+
    +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of Concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information Hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise, maintenance problems will arise to ensure that data remains consistent. Therefore, interfaces of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style.

    +
  • +
+
+
+

As an architect you should be prepared for the future by reading the TechnoVision.

+
+
+
+

1.3. Application Architecture

+
+

For the architecture of an application we distinguish the following views:

+
+
+
    +
  • +

    The Business Architecture describes an application from the business perspective. It divides the application into business components and with full abstraction of technical aspects.

    +
  • +
  • +

    The Technical Architecture describes an application from the technical implementation perspective. It divides the application into technical layers and defines which technical products and frameworks are used to support these layers.

    +
  • +
  • +

    The Infrastructure Architecture describes an application from the operational infrastructure perspective. It defines the nodes used to run the application including clustering, load-balancing and networking. This view is not explored further in this guide.

    +
  • +
+
+
+
Business Architecture
+
+

The business architecture divides the application into business components. A business component has a well-defined responsibility that it encapsulates. All aspects related to that responsibility have to be implemented within that business component. Further, the business architecture defines the dependencies between the business components. These dependencies need to be free of cycles. A business component exports its functionality via well-defined interfaces as a self-contained API. A business component may use another business component via its API and compliant with the dependencies defined by the business architecture.

+
+
+

As the business domain and logic of an application can be totally different, the devonfw can not define a standardized business architecture. Depending on the business domain it has to be defined from scratch or from a domain reference architecture template. For very small systems it may be suitable to define just a single business component containing all the code.

+
+
+
+
Technical Architecture
+
+

The technical architecture divides the application into technical layers based on the multilayered architecture. A layer is a unit of code with the same category such as a service or presentation logic. So, a layer is often supported by a technical framework. Each business component can therefore be split into component parts for each layer. However, a business component may not have component parts for every layer (e.g. only a presentation part that utilized logic from other components).

+
+
+

An overview of the technical reference architecture of the devonfw is given by figure "Technical Reference Architecture". +It defines the following layers visualized as horizontal boxes:

+
+
+ +
+
+

Also, you can see the (business) components as vertical boxes (e.g. A and X) and how they are composed out of component parts each one assigned to one of the technical layers.

+
+
+

Further, there are technical components for cross-cutting aspects grouped by the gray box on the left. Here is a complete list:

+
+ +
+
+devonfw architecture blueprint +
+
Figure 1. Technical Reference Architecture
+
+
+

Please click on the architecture image to open it as SVG and click on the layers and cross-cutting topics to open the according documentation guide.

+
+
+

We reflect this architecture in our code as described in our coding conventions allowing a traceability of business components, use-cases, layers, etc. into the code and giving +developers a sound orientation within the project.

+
+
+

Further, the architecture diagram shows the allowed dependencies illustrated by the dark green connectors. +Within a business component a component part can call the next component part on the layer directly below via a dependency on its API (vertical connectors). +While this is natural and obvious, it is generally forbidden to have dependencies upwards the layers +or to skip a layer by a direct dependency on a component part two or more layers below. +The general dependencies allowed between business components are defined by the business architecture. +In our reference architecture diagram we assume that the business component A1 is allowed to depend +on component A2. Therefore, a use-case within the logic component part of A1 is allowed to call a +use-case from A2 via a dependency on the component API. The same applies for dialogs on the client layer. +This is illustrated by the horizontal connectors. Please note that persistence entities are part of the API of the data-access component part so only the logic component part of the same +business component may depend on them.

+
+
+

The technical architecture has to address non-functional requirements:

+
+
+
    +
  • +

    scalability
    +is established by keeping state in the client and making the server state-less (except for login session). Via load-balancers new server nodes can be added to improve performance (horizontal scaling).

    +
  • +
  • +

    availability and reliability
    +are addressed by clustering with redundant nodes avoiding any single-point-of failure. If one node fails the system is still available. Further, the software has to be robust so there are no dead-locks or other bad effects that can make the system unavailable or not reliable.

    +
  • +
  • +

    security
    +is archived in the devonfw by the right templates and best-practices that avoid vulnerabilities. See security guidelines for further details.

    +
  • +
  • +

    performance
    +is obtained by choosing the right products and proper configurations. While the actual implementation of the application matters for performance a proper design is important as it is the key to allow performance-optimizations (see e.g. caching).

    +
  • +
+
+
+
Technology Stack
+
+

The technology stack of the devonfw is illustrated by the following table.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Technology Stack of devonfw
TopicDetailStandardSuggested implementation

runtime

language & VM

Java

Oracle JDK

runtime

servlet-container

JEE

tomcat

component management

dependency injection

JSR330 & JSR250

spring

configuration

framework

-

spring-boot

persistence

OR-mapper

JPA

hibernate

batch

framework

JSR352

spring-batch

service

SOAP services

JAX-WS

CXF

service

REST services

JAX-RS

CXF

logging

framework

slf4j

logback

validation

framework

beanvalidation/JSR303

hibernate-validator

security

Authentication & Authorization

JAAS

spring-security

monitoring

framework

JMX

spring

monitoring

HTTP Bridge

HTTP & JSON

jolokia

AOP

framework

dynamic proxies

spring AOP

+
+ +
+

==Configuration

+
+
+

An application needs to be configurable in order to allow internal setup (like CDI) but also to allow externalized configuration of a deployed package (e.g. integration into runtime environment). We rely on a comprehensive configuration approach following a "convention over configuration" pattern. This guide adds on to this by detailed instructions and best-practices how to deal with configurations.

+
+
+

In general we distinguish the following kinds of configuration that are explained in the following sections:

+
+
+ +
+
+
+
+
+

1.4. Internal Application Configuration

+
+

The application configuration contains all internal settings and wirings of the application (bean wiring, database mappings, etc.) and is maintained by the application developers at development time.

+
+
+

For more detail of Spring stack, see here

+
+
+
+

1.5. Externalized Configuration

+
+

Externalized configuration is a configuration that is provided separately to a deployment package and can be maintained undisturbed by re-deployments.

+
+
+
Environment Configuration
+
+

The environment configuration contains configuration parameters (typically port numbers, host names, passwords, logins, timeouts, certificates, etc.) specific for the different environments. These are under the control of the operators responsible for the application.

+
+
+

The environment configuration is maintained in application.properties files, defining various properties. +These properties are explained in the corresponding configuration sections of the guides for each topic:

+
+
+ +
+
+

Make sure your properties are thoroughly documented by providing a comment to each property. This inline documentation is most valuable for your operating department.

+
+
+

More about structuring your application.properties files can be read here for Spring.

+
+
+

For Quarkus, please refer to Quarkus Config Reference for more details.

+
+
+
+
Business Configuration
+
+

Often applications do not need business configuration. In case they do it should typically be editable by administrators via the GUI. The business configuration values should therefore be stored in the database in key/value pairs.

+
+
+

Therefore we suggest to create a dedicated table with (at least) the following columns:

+
+
+
    +
  • +

    ID

    +
  • +
  • +

    Property name

    +
  • +
  • +

    Property type (Boolean, Integer, String)

    +
  • +
  • +

    Property value

    +
  • +
  • +

    Description

    +
  • +
+
+
+

According to the entries in this table, an administrative GUI may show a generic form to modify business configuration. Boolean values should be shown as checkboxes, integer and string values as text fields. The values should be validated according to their type so an error is raised if you try to save a string in an integer property for example.

+
+
+

We recommend the following base layout for the hierarchical business configuration:

+
+
+

component.[subcomponent].[subcomponent].propertyname

+
+
+
+
+

1.6. Security

+
+

Often you need to have passwords (for databases, third-party services, etc.) as part of your configuration. These are typically environment specific (see above). However, with DevOps and continuous-deployment you might be tempted to commit such configurations into your version-control (e.g. git). Doing that with plain text passwords is a severe problem especially for production systems. Never do that! Instead we offer some suggestions how to deal with sensible configurations:

+
+
+
Password Encryption
+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control.

+
+
+

For Spring, we use jasypt-spring-boot. For more details, see here

+
+
+

For Quarkus, see here

+
+
+
Is this Security by Obscurity?
+
+
    +
  • +

    Yes, from the point of view to protect the passwords on the target environment this is nothing but security by obscurity. If an attacker somehow got full access to the machine this will only cause him to spend some more time.

    +
  • +
  • +

    No, if someone only gets the configuration file. So all your developers might have access to the version-control where the config is stored. Others might have access to the software releases that include this configs. But without the master-password that should only be known to specific operators none else can decrypt the password (except with brute-force what will take a very long time, see jasypt for details).

    +
  • +
+
+
+ +
+

==Coding Conventions

+
+
+

The code should follow general conventions for Java (see Oracle Naming Conventions, Google Java Style, etc.).We consider this as common sense and provide configurations for SonarQube and related tools such as Checkstyle instead of repeating this here.

+
+
+
+
+
+

1.7. Naming

+
+

Besides general Java naming conventions, we follow the additional rules listed here explicitly:

+
+
+
    +
  • +

    Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Use CamelCase even for abbreviations (XmlUtil instead of XMLUtil)

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc'). See #1095 for details.

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+

1.8. Packages

+
+

Java Packages are the most important element to structure your code. We use a strict packaging convention to map technical layers and business components (slices) to the code (See technical architecture for further details). By using the same names in documentation and code we create a strong link that gives orientation and makes it easy to find from business requirements, specifications or story tickets into the code and back.

+
+
+

For an devon4j based application we use the following Java-Package schema:

+
+
+
+
«root».«component».«layer»[.«detail»]
+
+
+
+

E.g. in our example application we find the Spring Data repositories for the ordermanagement component in the package com.devonfw.application.mtsj.ordermanagement.dataaccess.api.repo

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of package schema
SegmentDescriptionExample

«root»

Is the basic Java Package name-space of your app. Typically we suggest to use «group».«artifact» where «group» is your maven/gradle groupId corresponding to your organization or IT project owning the code following common Java Package conventions. The segment «artifact» is your maven/gradle artifactId and is typically the technical name of your app.

com.devonfw.application.mtsj

«component»

The (business) component the code belongs to. It is defined by the business architecture and uses terms from the business domain. Use the implicit component general for code not belonging to a specific component (foundation code).

salesmanagement

«layer»

The name of the technical layer (See technical architecture). Details are described for the modern project structure and for the classic project structure.

logic

«detail»

Here you are free to further divide your code into sub-components and other concerns according to the size of your component part. If you want to strictly separate API from implementation you should start «detail» with «scope» that is explained below.

dao

«scope»

The scope which is one of api (official API to be used by other layers or components), base (basic code to be reused by other implementations) and impl (implementation that should never be imported from outside). This segment was initially mandatory but due to trends such as microservices, lean, and agile we decided to make it optional and do not force anybody to use it.

api

+
+

Please note that devon4j library modules for spring use com.devonfw.module as «root» and the name of the module as «component». E.g. the API of our beanmapping module can be found in the package com.devonfw.module.beanmapping.common.api.

+
+
+
+

1.9. Code Tasks

+
+

Code spots that need some rework can be marked with the following tasks tags. These are already properly pre-configured in your development environment for auto completion and to view tasks you are responsible for. It is important to keep the number of code tasks low. Therefore, every member of the team should be responsible for the overall code quality. So if you change a piece of code and hit a code task that you can resolve in a reliable way, please do this as part of your change and remove the according tag.

+
+
+
TODO
+
+

Used to mark a piece of code that is not yet complete (typically because it can not be completed due to a dependency on something that is not ready).

+
+
+
+
 // TODO «author» «description»
+
+
+
+

A TODO tag is added by the author of the code who is also responsible for completing this task.

+
+
+
+
FIXME
+
+
+
 // FIXME «author» «description»
+
+
+
+

A FIXME tag is added by the author of the code or someone who found a bug he can not fix right now. The «author» who added the FIXME is also responsible for completing this task. This is very similar to a TODO but with a higher priority. FIXME tags indicate problems that should be resolved before a release is completed while TODO tags might have to stay for a longer time.

+
+
+
+
REVIEW
+
+
+
 // REVIEW «responsible» («reviewer») «description»
+
+
+
+

A REVIEW tag is added by a reviewer during a code review. Here the original author of the code is responsible to resolve the REVIEW tag and the reviewer is assigning this task to him. This is important for feedback and learning and has to be aligned with a review "process" where people talk to each other and get into discussion. In smaller or local teams a peer-review is preferable but this does not scale for large or even distributed teams.

+
+
+
+
+

1.10. Code-Documentation

+
+

As a general goal, the code should be easy to read and understand. Besides, clear naming the documentation is important. We follow these rules:

+
+
+
    +
  • +

    APIs (especially component interfaces) are properly documented with JavaDoc.

    +
  • +
  • +

    JavaDoc shall provide actual value - we do not write JavaDoc to satisfy tools such as checkstyle but to express information not already available in the signature.

    +
  • +
  • +

    We make use of {@link} tags in JavaDoc to make it more expressive.

    +
  • +
  • +

    JavaDoc of APIs describes how to use the type or method and not how the implementation internally works.

    +
  • +
  • +

    To document implementation details, we use code comments (e.g. // we have to flush explicitly to ensure version is up-to-date). This is only needed for complex logic.

    +
  • +
  • +

    Avoid the pointless {@inheritDoc} as since Java 1.5 there is the @Override annotation for overridden methods and your JavaDoc is inherited automatically even without any JavaDoc comment at all.

    +
  • +
+
+
+
+

1.11. Code-Style

+
+

This section gives you best practices to write better code and avoid pitfalls and mistakes.

+
+
+
BLOBs
+
+

Avoid using byte[] for BLOBs as this will load them entirely into your memory. This will cause performance issues or out of memory errors. Instead, use streams when dealing with BLOBs. For further details see BLOB support.

+
+
+
+
Stateless Programming
+
+

When implementing logic as components or beans of your container using dependency injection, we strongly encourage stateless programming. +This is not about data objects like an entity or transfer-object that are stateful by design. +Instead this applies to all classes annotated with @Named, @ApplicationScoped, @Stateless, etc. and all their super-classes. +These classes especially include your repositories, use-cases, and REST services. +Such classes shall never be modified after initialization. +Methods called at runtime (after initialization via the container) do not assign fields (member variables of your class) or mutate the object stored in a field. +This allows your component or bean to be stateless and thread-safe. +Therefore it can be initialized as a singleton so only one instance is created and shared accross all threads of the application. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // bad
+  private String contractOwner;
+
+  private MyState state;
+
+  @Overide
+  public void approve(Contract contract) {
+    this.contractOwner = contract.getOwner();
+    this.contractOwner = this.contractOwner.toLowerCase(Locale.US);
+    this.state.setAdmin(this.contractOwner.endsWith("admin"));
+    if (this.state.isAdmin()) {
+      ...
+    } else {
+      ...
+    }
+  }
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    if (contractOwner.endsWith("admin")) {
+      ...
+    } else {
+      ...
+    }
+  }
+}
+
+
+
+

As you can see in the bad code fields of the class are assigned when the method approve is called. +So mutliple users and therefore threads calling this method concurrently can interfere and override this state causing side-effects on parallel threads. +This will lead to nasty bugs and errors that are hard to trace down. +They will not occur in simple tests but for sure in production with real users. +Therefore never do this and implement your functionality stateless. +That is keeping all state in local variables and strictly avoid modifying fields or their value as illustrated in the fine code. +If you find yourself passing many parameters between methods that all represent state, you can easily create a separate class that encapsulates this state. +However, then you need to create this state object in your method as local variable and pass it between methods as parameter:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    MyState state = new MyState();
+    state.setAdmin(this.contractOwner.endsWith("admin"));
+    doApproveContract(contract, state);
+  }
+}
+
+
+
+
+
Closing Resources
+
+

Resources such as streams (InputStream, OutputStream, Reader, Writer) or transactions need to be handled properly. Therefore, it is important to follow these rules:

+
+
+
    +
  • +

    Each resource has to be closed properly, otherwise you will get out of file handles, TX sessions, memory leaks or the like

    +
  • +
  • +

    Where possible avoid to deal with such resources manually. That is why we are recommending @Transactional for transactions in devonfw (see Transaction Handling).

    +
  • +
  • +

    In case you have to deal with resources manually (e.g. binary streams) ensure to close them properly. See the example below for details.

    +
  • +
+
+
+

Closing streams and other such resources is error prone. Have a look at the following example:

+
+
+
+
// bad
+try {
+  InputStream in = new FileInputStream(file);
+  readData(in);
+  in.close();
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+

The code above is wrong as in case of an IOException the InputStream is not properly closed. In a server application such mistakes can cause severe errors that typically will only occur in production. As such resources implement the AutoCloseable interface you can use the try-with-resource syntax to write correct code. The following code shows a correct version of the example:

+
+
+
+
// fine
+try (InputStream in = new FileInputStream(file)) {
+  readData(in);
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+
+
Catching and handling Exceptions
+
+

When catching exceptions always ensure the following:

+
+
+
    +
  • +

    Never call printStackTrace() method on an exception

    +
  • +
  • +

    Either log or wrap and re-throw the entire catched exception. Be aware that the cause(s) of an exception is very valuable information. If you loose such information by improper exception-handling you may be unable to properly analyse production problems what can cause severe issues.

    +
    +
      +
    • +

      If you wrap and re-throw an exception ensure that the catched exception is passed as cause to the newly created and thrown exception.

      +
    • +
    • +

      If you log an exception ensure that the entire exception is passed as argument to the logger (and not only the result of getMessage() or toString() on the exception).

      +
    • +
    +
    +
  • +
  • +

    See exception handling

    +
  • +
+
+
+
+
Lambdas and Streams
+
+

With Java8 you have cool new features like lambdas and monads like (Stream, CompletableFuture, Optional, etc.). +However, these new features can also be misused or led to code that is hard to read or debug. To avoid pain, we give you the following best practices:

+
+
+
    +
  1. +

    Learn how to use the new features properly before using. Developers are often keen on using cool new features. When you do your first experiments in your project code you will cause deep pain and might be ashamed afterwards. Please study the features properly. Even Java8 experts still write for loops to iterate over collections, so only use these features where it really makes sense.

    +
  2. +
  3. +

    Streams shall only be used in fluent API calls as a Stream can not be forked or reused.

    +
  4. +
  5. +

    Each stream has to have exactly one terminal operation.

    +
  6. +
  7. +

    Do not write multiple statements into lambda code:

    +
    +
    +
    // bad
    +collection.stream().map(x -> {
    +Foo foo = doSomething(x);
    +...
    +return foo;
    +}).collect(Collectors.toList());
    +
    +
    +
    +

    This style makes the code hard to read and debug. Never do that! Instead, extract the lambda body to a private method with a meaningful name:

    +
    +
    +
    +
    // fine
    +collection.stream().map(this::convertToFoo).collect(Collectors.toList());
    +
    +
    +
  8. +
  9. +

    Do not use parallelStream() in general code (that will run on server side) unless you know exactly what you are doing and what is going on under the hood. Some developers might think that using parallel streams is a good idea as it will make the code faster. However, if you want to do performance optimizations talk to your technical lead (architect). Many features such as security and transactions will rely on contextual information that is associated with the current thread. Hence, using parallel streams will most probably cause serious bugs. Only use them for standalone (CLI) applications or for code that is just processing large amounts of data.

    +
  10. +
  11. +

    Do not perform operations on a sub-stream inside a lambda:

    +
    +
    +
    set.stream().flatMap(x -> x.getChildren().stream().filter(this::isSpecial)).collect(Collectors.toList()); // bad
    +set.stream().flatMap(x -> x.getChildren().stream()).filter(this::isSpecial).collect(Collectors.toList()); // fine
    +
    +
    +
  12. +
  13. +

    Only use collect at the end of the stream:

    +
    +
    +
    set.stream().collect(Collectors.toList()).forEach(...) // bad
    +set.stream().peek(...).collect(Collectors.toList()) // fine
    +
    +
    +
  14. +
  15. +

    Lambda parameters with Types inference

    +
    +
    +
    (String a, Float b, Byte[] c) -> a.toString() + Float.toString(b) + Arrays.toString(c)  // bad
    +(a,b,c)  -> a.toString() + Float.toString(b) + Arrays.toString(c)  // fine
    +
    +Collections.sort(personList, (Person p1, Person p2) -> p1.getSurName().compareTo(p2.getSurName()));  // bad
    +Collections.sort(personList, (p1, p2) -> p1.getSurName().compareTo(p2.getSurName()));  // fine
    +
    +
    +
  16. +
  17. +

    Avoid Return Braces and Statement

    +
    +
    +
     a ->  { return a.toString(); } // bad
    + a ->  a.toString();   // fine
    +
    +
    +
  18. +
  19. +

    Avoid Parentheses with Single Parameter

    +
    +
    +
    (a) -> a.toString(); // bad
    + a -> a.toString();  // fine
    +
    +
    +
  20. +
  21. +

    Avoid if/else inside foreach method. Use Filter method & comprehension

    +
    +
    +
    // bad
    +static public Iterator<String> TwitterHandles(Iterator<Author> authors, string company) {
    +    final List result = new ArrayList<String> ();
    +    foreach (Author a : authors) {
    +      if (a.Company.equals(company)) {
    +        String handle = a.TwitterHandle;
    +        if (handle != null)
    +          result.Add(handle);
    +      }
    +    }
    +    return result;
    +  }
    +
    +
    +
    +
    +
    // fine
    +public List<String> twitterHandles(List<Author> authors, String company) {
    +    return authors.stream()
    +            .filter(a -> null != a && a.getCompany().equals(company))
    +            .map(a -> a.getTwitterHandle())
    +            .collect(toList());
    +  }
    +
    +
    +
  22. +
+
+
+
+
Optionals
+
+

With Optional you can wrap values to avoid a NullPointerException (NPE). However, it is not a good code-style to use Optional for every parameter or result to express that it may be null. For such case use @Nullable or even better instead annotate @NotNull where null is not acceptable.

+
+
+

However, Optional can be used to prevent NPEs in fluent calls (due to the lack of the elvis operator):

+
+
+
+
Long id;
+id = fooCto.getBar().getBar().getId(); // may cause NPE
+id = Optional.ofNullable(fooCto).map(FooCto::getBar).map(BarCto::getBar).map(BarEto::getId).orElse(null); // null-safe
+
+
+
+
+
Encoding
+
+

Encoding (esp. Unicode with combining characters and surrogates) is a complex topic. Please study this topic if you have to deal with encodings and processing of special characters. For the basics follow these recommendations:

+
+
+
    +
  • +

    Whenever possible prefer unicode (UTF-8 or better) as encoding. This especially impacts your databases and has to be defined upfront as it typically can not be changed (easily) afterwards.

    +
  • +
  • +

    Do not cast from byte to char (unicode characters can be composed of multiple bytes, such cast may only work for ASCII characters)

    +
  • +
  • +

    Never convert the case of a String using the default locale (esp. when writing generic code like in devonfw). E.g. if you do "HI".toLowerCase() and your system locale is Turkish, then the output will be "hı" instead of "hi", which can lead to wrong assumptions and serious problems. If you want to do a "universal" case conversion always explicitly use an according western locale (e.g. toLowerCase(Locale.US)). Consider using a helper class (see e.g. CaseHelper) or create your own little static utility for that in your project.

    +
  • +
  • +

    Write your code independent from the default encoding (system property file.encoding) - this will most likely differ in JUnit from production environment

    +
    +
      +
    • +

      Always provide an encoding when you create a String from byte[]: new String(bytes, encoding)

      +
    • +
    • +

      Always provide an encoding when you create a Reader or Writer : new InputStreamReader(inStream, encoding)

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer general API
+
+

Avoid unnecessary strong bindings:

+
+
+
    +
  • +

    Do not bind your code to implementations such as Vector or ArrayList instead of List

    +
  • +
  • +

    In APIs for input (=parameters) always consider to make little assumptions:

    +
    +
      +
    • +

      prefer Collection over List or Set where the difference does not matter (e.g. only use Set when you require uniqueness or highly efficient contains)

      +
    • +
    • +

      consider preferring Collection<? extends Foo> over Collection<Foo> when Foo is an interface or super-class

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer primitive boolean
+
+

Unless in rare cases where you need to allow a flag being null avoid using the object type Boolean.

+
+
+
+
// bad
+public Boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

Instead always use the primitive boolean type:

+
+
+
+
// fine
+public boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

The only known excuse is for flags in embeddable types due to limitations of hibernate.

+
+ +
+

==Project structure

+
+
+

In devonfw we want to give clear structure and guidance for building applications. +This also allows tools such as CobiGen or sonar-devon4j-plugin to "understand" the code. +Also this helps developers going from one devonfw project to the next one to quickly understand the code-base. +If every developer knows where to find what, the project gets more efficient. +A long time ago maven standardized the project structure with src/main/java, etc. and turned chaos into structure. +With devonfw we experienced the same for the codebase (what is inside src/main/java).

+
+
+

We initially started devon4j based on spring and spring-boot and proposed a classic project structure. +With modern cloud-native trends we added a modern project structure, that is more lean and up-to-date with the latest market trends.

+
+ +
+

==Dependency Injection +Dependency injection is one of the most important design patterns and is a key principle to a modular and component based architecture. +The Java Standard for dependency injection is javax.inject (JSR330) that we use in combination with JSR250. +Additionally, for scoping you can use CDI (Context and Dependency Injection) from JSR365.

+
+
+

There are many frameworks which support this standard including all recent Java EE application servers. +Therefore in devonfw we rely on these open standards and can propagate patterns and code examples that work independent from the underlying frameworks.

+
+
+
+
+

1.12. Key Principles

+
+

Within dependency injection a bean is typically a reusable unit of your application providing an encapsulated functionality. +This bean can be injected into other beans and it should in general be replaceable. +As an example we can think of a use-case, a repository, etc. +As best practice we use the following principles:

+
+
+
    +
  • +

    Stateless implementation
    +By default such beans shall be implemented stateless. If you store state information in member variables you can easily run into concurrency problems and nasty bugs. This is easy to avoid by using local variables and separate state classes for complex state-information. Try to avoid stateful beans wherever possible. Only add state if you are fully aware of what you are doing and properly document this as a warning in your JavaDoc.

    +
  • +
  • +

    Usage of Java standards
    +We use common standards (see above) that makes our code portable. Therefore we use standardized annotations like @Inject (javax.inject.Inject) instead of proprietary annotations such as @Autowired. Generally we avoid proprietary annotations in business code (logic layer).

    +
  • +
  • +

    Simple injection-style
    +In general you can choose between constructor, setter or field injection. For simplicity we recommend to do private field injection as it is very compact and easy to maintain. We believe that constructor injection is bad for maintenance especially in case of inheritance (if you change the dependencies you need to refactor all sub-classes). Private field injection and public setter injection are very similar but setter injection is much more verbose (often you are even forced to have javadoc for all public methods). If you are writing re-usable library code setter injection will make sense as it is more flexible. In a business application you typically do not need that and can save a lot of boiler-plate code if you use private field injection instead. Nowadays you are using container infrastructure also for your tests (see testing) so there is no need to inject manually (what would require a public setter).

    +
  • +
  • +

    KISS
    +To follow the KISS (keep it small and simple) principle we avoid advanced features (e.g. custom AOP, non-singleton beans) and only use them where necessary.

    +
  • +
  • +

    Separation of API and implementation
    +For important components we should separate a self-contained API documented with JavaDoc from its implementation. Code from other components that wants to use the implementation shall only rely on the API. However, for things that will never be exchanged no API as interface is required you can skip such separation.

    +
  • +
+
+
+
+

1.13. Example Bean

+
+

Here you can see the implementation of an example bean using dependency injection:

+
+
+
+
@ApplicationScoped
+@Named("MyComponent")
+public class MyComponentImpl implements MyComponent {
+  @Inject
+  private MyOtherComponent myOtherComponent;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+
+  ...
+}
+
+
+
+

Here MyComponentImpl depends on MyOtherComponent that is injected into the field myOtherComponent because of the @Inject annotation. +To make this work there must be exactly one bean in the container (e.g. spring or quarkus) that is an instance of MyOtherComponent. +In order to put a bean into the container, we can use @ApplicationScoped in case of CDI (required for quarkus) for a stateless bean. +In spring we can ommit a CDI annotation and the @Named annotation is already sufficient as a bean is stateless by default in spring. +If we always use @ApplicationScoped we can make this more explicit and more portable accross different frameworks. +So in our example we put MyComponentImpl into the container. +That bean will be called MyComponent as we specified in the @Named annotation but we can also omit the name to use the classname as fallback. +Now our bean can be injected into other beans using @Inject annotation either via MyComponent interface (recommended when interface is present) or even directly via MyComponentImpl. +In case you omit the interface, you should also omit the Impl suffix or instead use Bean as suffix.

+
+
+
+

1.14. Multiple bean implementations

+
+

In some cases you might have multiple implementations as beans for the same interface. +The following sub-sections handle the different scenarios to give you guidance.

+
+
+
Only one implementation in container
+
+

In some cases you still have only one implementation active as bean in the container at runtime. +A typical example is that you have different implemenations for test and main usage. +This case is easy, as @Inject will always be unique. +The only thing you need to care about is how to configure your framework (spring, quarkus, etc.) to know which implementation to put in the container depending on specific configuration. +In spring this can be archived via the proprietary @Profile annotaiton.

+
+
+
+
Injecting all of multiple implementations
+
+

In some situations you may have an interface that defines a kind of "plugin". +You can have multiple implementations in your container and want to have all of them injected. +Then you can request a list with all the bean implementations via the interface as in the following example:

+
+
+
+
  @Inject
+  private List<MyConverter> converters;
+
+
+
+

Your code may iterate over all plugins (converters) and apply them sequentially. +Please note that the injection will fail (at least in spring), when there is no bean available to inject. +So you do not get an empty list injected but will get an exception on startup.

+
+
+
+
Injecting one of multiple implementations
+
+

Another scenario is that you have multiple implementations in your container coexisting, but for injection you may want to choose a specific implementation. +Here you could use the @Named annotation to specify a unique identifier for each implementation what is called qualified injection:

+
+
+
+
@ApplicationScoped
+@Named("UserAuthenticator")
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+@Named("ServiceAuthenticator")
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  @Named("UserAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  @Named("ServiceAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+
+
+
+

However, we discovered that this pattern is not so great: +The identifiers in the @Named annotation are just strings that could easily break. +You could use constants instead but still this is not the best solution.

+
+
+

In the end you can very much simplify this by just directly injecting the implementation instead:

+
+
+
+
@ApplicationScoped
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

In case you want to strictly decouple from implementations, you can still create dedicated interfaces:

+
+
+
+
public interface UserAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class UserAuthenticatorImpl implements UserAuthenticator {
+  ...
+}
+public interface ServiceAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class ServiceAuthenticatorImpl implements ServiceAuthenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

However, as you can see this is again introducing additional boiler-plate code. +While the principle to separate API and implementation and strictly decouple from implementation is valuable in general, +you should always consider KISS, lean, and agile in contrast and balance pros and cons instead of blindly following dogmas.

+
+
+
+
+

1.15. Imports

+
+

Here are the import statements for the most important annotations for dependency injection

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.enterprise.context.ApplicationScoped;
+// import javax.enterprise.context.RequestScoped;
+// import javax.enterprise.context.SessionScoped;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+
+
+
+

1.16. Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>jakarta.inject</groupId>
+  <artifactId>jakarta.inject-api</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>jakarta.annotation</groupId>
+  <artifactId>jakarta.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>javax.inject</groupId>
+  <artifactId>javax.inject</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>javax.annotation</groupId>
+  <artifactId>javax.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+ +
+

==BLOB support

+
+
+

BLOB stands for Binary Large Object. A BLOB may be an image, an office document, ZIP archive or any other multimedia object. +Often these BLOBs are large. if this is the case you need to take care, that you do not copy all the blob data into you application heap, e.g. when providing them via a REST service. +This could easily lead to performance problems or out of memory errors. +As solution for that problem is "streaming" those BLOBs directly from the database to the client. To demonstrate how this can be accomplished, devonfw provides a example.

+
+
+
+

1.17. Further Reading

+ +
+ +
+

==Common

+
+
+

In our coding-conventions we define a clear packaging and layering. +However, there is always cross-cutting code that does not belong to a specific layer such as generic helpers, general code for configuration or integration, etc. +Therefore, we define a package segment common that can be used as «layer» for such cross-cutting code. +Code from any other layer is allowed to access such common code (at least within the same component).

+
+
+ +
+

==Java Persistence API

+
+
+

For mapping java objects to a relational database we use the Java Persistence API (JPA). +As JPA implementation we recommend to use Hibernate. For general documentation about JPA and Hibernate follow the links above as we will not replicate the documentation. Here you will only find guidelines and examples how we recommend to use it properly. The following examples show how to map the data of a database to an entity. As we use JPA we abstract from SQL here. However, you will still need a DDL script for your schema and during maintenance also database migrations. Please follow our SQL guide for such artifacts.

+
+
+
+

1.18. Entity

+
+

Entities are part of the persistence layer and contain the actual data. They are POJOs (Plain Old Java Objects) on which the relational data of a database is mapped and vice versa. The mapping is configured via JPA annotations (javax.persistence). Usually an entity class corresponds to a table of a database and a property to a column of that table. A persistent entity instance then represents a row of the database table.

+
+
+
A Simple Entity
+
+

The following listing shows a simple example:

+
+
+
+
@Entity
+@Table(name="TEXTMESSAGE")
+public class MessageEntity extends ApplicationPersistenceEntity implements Message {
+
+  private String text;
+
+  public String getText() {
+    return this.text;
+  }
+
+  public void setText(String text) {
+    this.text = text;
+  }
+ }
+
+
+
+

The @Entity annotation defines that instances of this class will be entities which can be stored in the database. The @Table annotation is optional and can be used to define the name of the corresponding table in the database. If it is not specified, the simple name of the entity class is used instead.

+
+
+

In order to specify how to map the attributes to columns we annotate the corresponding getter methods (technically also private field annotation is also possible but approaches can not be mixed). +The @Id annotation specifies that a property should be used as primary key. +With the help of the @Column annotation it is possible to define the name of the column that an attribute is mapped to as well as other aspects such as nullable or unique. If no column name is specified, the name of the property is used as default.

+
+
+

Note that every entity class needs a constructor with public or protected visibility that does not have any arguments. Moreover, neither the class nor its getters and setters may be final.

+
+
+

Entities should be simple POJOs and not contain business logic.

+
+
+
+
Entities and Datatypes
+
+

Standard datatypes like Integer, BigDecimal, String, etc. are mapped automatically by JPA. Custom datatypes are mapped as serialized BLOB by default what is typically undesired. +In order to map atomic custom datatypes (implementations of`+SimpleDatatype`) we implement an AttributeConverter. Here is a simple example:

+
+
+
+
@Converter(autoApply = true)
+public class MoneyAttributeConverter implements AttributeConverter<Money, BigDecimal> {
+
+  public BigDecimal convertToDatabaseColumn(Money attribute) {
+    return attribute.getValue();
+  }
+
+  public Money convertToEntityAttribute(BigDecimal dbData) {
+    return new Money(dbData);
+  }
+}
+
+
+
+

The annotation @Converter is detected by the JPA vendor if the annotated class is in the packages to scan. Further, autoApply = true implies that the converter is automatically used for all properties of the handled datatype. Therefore all entities with properties of that datatype will automatically be mapped properly (in our example Money is mapped as BigDecimal).

+
+
+

In case you have a composite datatype that you need to map to multiple columns the JPA does not offer a real solution. As a workaround you can use a bean instead of a real datatype and declare it as @Embeddable. If you are using Hibernate you can implement CompositeUserType. Via the @TypeDef annotation it can be registered to Hibernate. If you want to annotate the CompositeUserType implementation itself you also need another annotation (e.g. MappedSuperclass tough not technically correct) so it is found by the scan.

+
+
+
Enumerations
+
+

By default JPA maps Enums via their ordinal. Therefore the database will only contain the ordinals (0, 1, 2, etc.) . So , inside the database you can not easily understand their meaning. Using @Enumerated with EnumType.STRING allows to map the enum values to their name (Enum.name()). Both approaches are fragile when it comes to code changes and refactoring (if you change the order of the enum values or rename them) after the application is deployed to production. If you want to avoid this and get a robust mapping you can define a dedicated string in each enum value for database representation that you keep untouched. Then you treat the enum just like any other custom datatype.

+
+
+
+
BLOB
+
+

If binary or character large objects (BLOB/CLOB) should be used to store the value of an attribute, e.g. to store an icon, the @Lob annotation should be used as shown in the following listing:

+
+
+
+
@Lob
+public byte[] getIcon() {
+  return this.icon;
+}
+
+
+
+ + + + + +
+ + +Using a byte array will cause problems if BLOBs get large because the entire BLOB is loaded into the RAM of the server and has to be processed by the garbage collector. For larger BLOBs the type Blob and streaming should be used. +
+
+
+
+
public Blob getAttachment() {
+  return this.attachment;
+}
+
+
+
+
+
Date and Time
+
+

To store date and time related values, the temporal annotation can be used as shown in the listing below:

+
+
+
+
@Temporal(TemporalType.TIMESTAMP)
+public java.util.Date getStart() {
+  return start;
+}
+
+
+
+

Until Java8 the java data type java.util.Date (or Jodatime) has to be used. +TemporalType defines the granularity. In this case, a precision of nanoseconds is used. If this granularity is not wanted, TemporalType.DATE can be used instead, which only has a granularity of milliseconds. +Mixing these two granularities can cause problems when comparing one value to another. This is why we only use TemporalType.TIMESTAMP.

+
+
+
+
QueryDSL and Custom Types
+
+

Using the Aliases API of QueryDSL might result in an InvalidDataAccessApiUsageException when using custom datatypes in entity properties. This can be circumvented in two steps:

+
+
+
    +
  1. +

    Ensure you have the following maven dependencies in your project (core module) to support custom types via the Aliases API:

    +
    +
    +
    <dependency>
    +  <groupId>org.ow2.asm</groupId>
    +  <artifactId>asm</artifactId>
    +</dependency>
    +<dependency>
    +  <groupId>cglib</groupId>
    +  <artifactId>cglib</artifactId>
    +</dependency>
    +
    +
    +
  2. +
  3. +

    Make sure, that all your custom types used in entities provide a non-argument constructor with at least visibility level protected.

    +
  4. +
+
+
+
+
+
Primary Keys
+
+

We only use simple Long values as primary keys (IDs). +By default it is auto generated (@GeneratedValue(strategy=GenerationType.AUTO)). +This is already provided by the class com.devonfw.<projectName>.general.dataaccess.api.AbstractPersistenceEntity within the classic project structure respectively com.devonfw.<projectName>.general.domain.model.AbstractPersistenceEntity within the modern project structure, that you can extend.

+
+
+

The reason for this recommendation is simply because using a number (Long) is the most efficient representation for the database. +You may also consider to use other types like String or UUID or even composite custom datatypes and this is technically possible. +However, please consider that the primary key is used to lookup the row from the database table, also in foreign keys and thus in JOINs. +Please note that your project sooner or later may reach some complexity where performance really matters. +Working on big data and performing JOINs when using types such as String (VARCHAR[2]) as primary and foreign keys will kill your performance. +You are still free to make a different choice and devonfw only gives recommendations but does not want to dictate you what to do. +However, you have been warned about the concequences. +If you are well aware of what you are doing, you can still use differnet types of primary keys. +In such case, create your own entity not extending AbstractPersistenceEntity or create your own copy of AbstractPersistenceEntity with a different name and a different type of primary key.

+
+
+

In case you have business oriented keys (often as String), you can define an additional property for it and declare it as unique (@Column(unique=true)). +Be sure to include "AUTO_INCREMENT" in your sql table field ID to be able to persist data (or similar for other databases).

+
+
+
+
+

1.19. Relationships

+
+
n:1 and 1:1 Relationships
+
+

Entities often do not exist independently but are in some relation to each other. For example, for every period of time one of the StaffMember’s of the restaurant example has worked, which is represented by the class WorkingTime, there is a relationship to this StaffMember.

+
+
+

The following listing shows how this can be modeled using JPA:

+
+
+
+
...
+
+@Entity
+public class WorkingTimeEntity {
+   ...
+
+   private StaffMemberEntity staffMember;
+
+   @ManyToOne
+   @JoinColumn(name="STAFFMEMBER")
+   public StaffMemberEntity getStaffMember() {
+      return this.staffMember;
+   }
+
+   public void setStaffMember(StaffMemberEntity staffMember) {
+      this.staffMember = staffMember;
+   }
+}
+
+
+
+

To represent the relationship, an attribute of the type of the corresponding entity class that is referenced has been introduced. The relationship is a n:1 relationship, because every WorkingTime belongs to exactly one StaffMember, but a StaffMember usually worked more often than once.
+This is why the @ManyToOne annotation is used here. For 1:1 relationships the @OneToOne annotation can be used which works basically the same way. To be able to save information about the relation in the database, an additional column in the corresponding table of WorkingTime is needed which contains the primary key of the referenced StaffMember. With the name element of the @JoinColumn annotation it is possible to specify the name of this column.

+
+
+
+
1:n and n:m Relationships
+
+

The relationship of the example listed above is currently an unidirectional one, as there is a getter method for retrieving the StaffMember from the WorkingTime object, but not vice versa.

+
+
+

To make it a bidirectional one, the following code has to be added to StaffMember:

+
+
+
+
  private Set<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy="staffMember")
+  public Set<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(Set<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

To make the relationship bidirectional, the tables in the database do not have to be changed. Instead the column that corresponds to the attribute staffMember in class WorkingTime is used, which is specified by the mappedBy element of the @OneToMany annotation. Hibernate will search for corresponding WorkingTime objects automatically when a StaffMember is loaded.

+
+
+

The problem with bidirectional relationships is that if a WorkingTime object is added to the set or list workingTimes in StaffMember, this does not have any effect in the database unless +the staffMember attribute of that WorkingTime object is set. That is why the devon4j advices not to use bidirectional relationships but to use queries instead. How to do this is shown here. If a bidirectional relationship should be used nevertheless, appropriate add and remove methods must be used.

+
+
+

For 1:n and n:m relations, the devon4j demands that (unordered) Sets and no other collection types are used, as shown in the listing above. The only exception is whenever an ordering is really needed, (sorted) lists can be used.
+For example, if WorkingTime objects should be sorted by their start time, this could be done like this:

+
+
+
+
  private List<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy = "staffMember")
+  @OrderBy("startTime asc")
+  public List<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(List<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

The value of the @OrderBy annotation consists of an attribute name of the class followed by asc (ascending) or desc (descending).

+
+
+

To store information about a n:m relationship, a separate table has to be used, as one column cannot store several values (at least if the database schema is in first normal form).
+For example if one wanted to extend the example application so that all ingredients of one FoodDrink can be saved and to model the ingredients themselves as entities (e.g. to store additional information about them), this could be modeled as follows (extract of class FoodDrink):

+
+
+
+
  private Set<IngredientEntity> ingredients;
+
+  @ManyToMany()
+  @JoinTable
+  public Set<IngredientEntity> getIngredients() {
+    return this.ingredients;
+  }
+
+  public void setOrders(Set<IngredientEntity> ingredients) {
+    this.ingredients = ingredients;
+  }
+
+
+
+

Information about the relation is stored in a table called BILL_ORDER that has to have two columns, one for referencing the Bill, the other one for referencing the Order. Note that the @JoinTable annotation is not needed in this case because a separate table is the default solution here (same for n:m relations) unless there is a mappedBy element specified.

+
+
+

For 1:n relationships this solution has the disadvantage that more joins (in the database system) are needed to get a Bill with all the Orders it refers to. This might have a negative impact on performance so that the solution to store a reference to the Bill row/entity in the Order’s table is probably the better solution in most cases.

+
+
+

Note that bidirectional n:m relationships are not allowed for applications based on devon4j. Instead a third entity has to be introduced, which "represents" the relationship (it has two n:1 relationships).

+
+
+
+
Eager vs. Lazy Loading
+
+

Using JPA it is possible to use either lazy or eager loading. Eager loading means that for entities retrieved from the database, other entities that are referenced by these entities are also retrieved, whereas lazy loading means that this is only done when they are actually needed, i.e. when the corresponding getter method is invoked.

+
+
+

Application based on devon4j are strongly advised to always use lazy loading. The JPA defaults are:

+
+
+
    +
  • +

    @OneToMany: LAZY

    +
  • +
  • +

    @ManyToMany: LAZY

    +
  • +
  • +

    @ManyToOne: EAGER

    +
  • +
  • +

    @OneToOne: EAGER

    +
  • +
+
+
+

So at least for @ManyToOne and @OneToOne you always need to override the default by providing fetch = FetchType.LAZY.

+
+
+ + + + + +
+ + +Please read the performance guide. +
+
+
+
+
Cascading Relationships
+
+

For relations it is also possible to define whether operations are cascaded (like a recursion) to the related entity. +By default, nothing is done in these situations. This can be changed by using the cascade property of the annotation that specifies the relation type (@OneToOne, @ManyToOne, @OneToMany, @ManyToOne). This property accepts a CascadeType that offers the following options:

+
+
+
    +
  • +

    PERSIST (for EntityManager.persist, relevant to inserted transient entities into DB)

    +
  • +
  • +

    REMOVE (for EntityManager.remove to delete entity from DB)

    +
  • +
  • +

    MERGE (for EntityManager.merge)

    +
  • +
  • +

    REFRESH (for EntityManager.refresh)

    +
  • +
  • +

    DETACH (for EntityManager.detach)

    +
  • +
  • +

    ALL (cascade all of the above operations)

    +
  • +
+
+
+

See here for more information.

+
+
+
+
Typesafe Foreign Keys using IdRef
+
+

For simple usage you can use Long for all your foreign keys. +However, as an optional pattern for advanced and type-safe usage, we offer IdRef.

+
+
+
+
+

1.20. Embeddable

+
+

An embeddable Object is a way to group properties of an entity into a separate Java (child) object. Unlike with implement relationships the embeddable is not a separate entity and its properties are stored (embedded) in the same table together with the entity. This is helpful to structure and reuse groups of properties.

+
+
+

The following example shows an Address implemented as an embeddable class:

+
+
+
+
@Embeddable
+public class AddressEmbeddable {
+
+  private String street;
+  private String number;
+  private Integer zipCode;
+  private String city;
+
+  @Column(name="STREETNUMBER")
+  public String getNumber() {
+    return number;
+  }
+
+  public void setNumber(String number) {
+    this.number = number;
+  }
+
+  ...  // other getter and setter methods, equals, hashCode
+}
+
+
+
+

As you can see an embeddable is similar to an entity class, but with an @Embeddable annotation instead of the @Entity annotation and without primary key or modification counter. +An Embeddable does not exist on its own but in the context of an entity. +As a simplification Embeddables do not require a separate interface and ETO as the bean-mapper will create a copy automatically when converting the owning entity to an ETO. +However, in this case the embeddable becomes part of your api module that therefore needs a dependency on the JPA.

+
+
+

In addition to that the methods equals(Object) and hashCode() need to be implemented as this is required by Hibernate (it is not required for entities because they can be unambiguously identified by their primary key). For some hints on how to implement the hashCode() method please have a look here.

+
+
+

Using this AddressEmbeddable inside an entity class can be done like this:

+
+
+
+
  private AddressEmbeddable address;
+
+  @Embedded
+  public AddressEmbeddable getAddress() {
+    return this.address;
+  }
+
+  public void setAddress(AddressEmbeddable address) {
+    this.address = address;
+  }
+}
+
+
+
+

The @Embedded annotation needs to be used for embedded attributes. Note that if in all columns of the embeddable (here Address) are null, then the embeddable object itself is also null inside the entity. This has to be considered to avoid NullPointerException’s. Further this causes some issues with primitive types in embeddable classes that can be avoided by only using object types instead.

+
+
+
+

1.21. Inheritance

+
+

Just like normal java classes, entity classes can inherit from others. The only difference is that you need to specify how to map a class hierarchy to database tables. Generic abstract super-classes for entities can simply be annotated with @MappedSuperclass.

+
+
+

For all other cases the JPA offers the annotation @Inheritance with the property strategy talking an InheritanceType that has the following options:

+
+
+
+
+
    +
  • +

    SINGLE_TABLE: This strategy uses a single table that contains all columns needed to store all entity-types of the entire inheritance hierarchy. If a column is not needed for an entity because of its type, there is a null value in this column. An additional column is introduced, which denotes the type of the entity (called dtype).

    +
  • +
  • +

    TABLE_PER_CLASS: For each concrete entity class there is a table in the database that can store such an entity with all its attributes. An entity is only saved in the table corresponding to its most concrete type. To get all entities of a super type, joins are needed.

    +
  • +
  • +

    JOINED: In this case there is a table for every entity class including abstract classes, which contains only the columns for the persistent properties of that particular class. Additionally there is a primary key column in every table. To get an entity of a class that is a subclass of another one, joins are needed.

    +
  • +
+
+
+
+
+

Each of the three approaches has its advantages and drawbacks, which are discussed in detail here. In most cases, the first one should be used, because it is usually the fastest way to do the mapping, as no joins are needed when retrieving, searching or persisting entities. Moreover it is rather simple and easy to understand. +One major disadvantage is that the first approach could lead to a table with a lot of null values, which might have a negative impact on the database size.

+
+
+

The inheritance strategy has to be annotated to the top-most entity of the class hierarchy (where @MappedSuperclass classes are not considered) like in the following example:

+
+
+
+
@Entity
+@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+public abstract class MyParentEntity extends ApplicationPersistenceEntity implements MyParent {
+  ...
+}
+
+@Entity
+public class MyChildEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+@Entity
+public class MyOtherEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+
+
+

As a best practice we advise you to avoid entity hierarchies at all where possible and otherwise to keep the hierarchy as small as possible. In order to just ensure reuse or establish a common API you can consider a shared interface, a @MappedSuperclass or an @Embeddable instead of an entity hierarchy.

+
+
+
+

1.22. Repositories and DAOs

+
+

For each entity a code unit is created that groups all database operations for that entity. We recommend to use spring-data repositories for that as it is most efficient for developers. As an alternative there is still the classic approach using DAOs.

+
+
+
Concurrency Control
+
+

The concurrency control defines the way concurrent access to the same data of a database is handled. When several users (or threads of application servers) concurrently access a database, anomalies may happen, e.g. a transaction is able to see changes from another transaction although that one did, not yet commit these changes. Most of these anomalies are automatically prevented by the database system, depending on the isolation level (property hibernate.connection.isolation in the jpa.xml, see here, or quarkus.datasource.jdbc.transaction-isolation-level in the application.properties).

+
+
+

Another anomaly is when two stakeholders concurrently access a record, do some changes and write them back to the database. The JPA addresses this with different locking strategies (see here).

+
+
+

As a best practice we are using optimistic locking for regular end-user services (OLTP) and pessimistic locking for batches.

+
+
+
+
Optimistic Locking
+
+

The class com.devonfw.module.jpa.persistence.api.AbstractPersistenceEntity already provides optimistic locking via a modificationCounter with the @Version annotation. Therefore JPA takes care of optimistic locking for you. When entities are transferred to clients, modified and sent back for update you need to ensure the modificationCounter is part of the game. If you follow our guides about transfer-objects and services this will also work out of the box. +You only have to care about two things:

+
+
+
    +
  • +

    How to deal with optimistic locking in relationships?
    +Assume an entity A contains a collection of B entities. Should there be a locking conflict if one user modifies an instance of A while another user in parallel modifies an instance of B that is contained in the other instance? To address this , take a look at FeatureForceIncrementModificationCounter.

    +
  • +
  • +

    What should happen in the UI if an OptimisticLockException occurred?
    +According to KISS our recommendation is that the user gets an error displayed that tells him to do his change again on the recent data. Try to design your system and the work processing in a way to keep such conflicts rare and you are fine.

    +
  • +
+
+
+
+
Pessimistic Locking
+
+

For back-end services and especially for batches optimistic locking is not suitable. A human user shall not cause a large batch process to fail because he was editing the same entity. Therefore such use-cases use pessimistic locking what gives them a kind of priority over the human users. +In your DAO implementation you can provide methods that do pessimistic locking via EntityManager operations that take a LockModeType. Here is a simple example:

+
+
+
+
  getEntityManager().lock(entity, LockModeType.READ);
+
+
+
+

When using the lock(Object, LockModeType) method with LockModeType.READ, Hibernate will issue a SELECT …​ FOR UPDATE. This means that no one else can update the entity (see here for more information on the statement). If LockModeType.WRITE is specified, Hibernate issues a SELECT …​ FOR UPDATE NOWAIT instead, which has has the same meaning as the statement above, but if there is already a lock, the program will not wait for this lock to be released. Instead, an exception is raised.
+Use one of the types if you want to modify the entity later on, for read only access no lock is required.

+
+
+

As you might have noticed, the behavior of Hibernate deviates from what one would expect by looking at the LockModeType (especially LockModeType.READ should not cause a SELECT …​ FOR UPDATE to be issued). The framework actually deviates from what is specified in the JPA for unknown reasons.

+
+
+
+
+

1.23. Database Auditing

+ +
+
+

1.24. Testing Data-Access

+
+

For testing of Entities and Repositories or DAOs see testing guide.

+
+
+
+

1.25. Principles

+
+

We strongly recommend these principles:

+
+
+
    +
  • +

    Use the JPA where ever possible and use vendor (hibernate) specific features only for situations when JPA does not provide a solution. In the latter case consider first if you really need the feature.

    +
  • +
  • +

    Create your entities as simple POJOs and use JPA to annotate the getters in order to define the mapping.

    +
  • +
  • +

    Keep your entities simple and avoid putting advanced logic into entity methods.

    +
  • +
+
+
+
+

1.26. Database Configuration

+
+

For details on the configuration of the database connection and database logging of the individual framework, please refer to the respective configuration guide.

+
+
+

For spring see here.

+
+
+

For quarkus see here.

+
+
+
Database Migration
+ +
+
+
Pooling
+
+

You typically want to pool JDBC connections to boost performance by recycling previous connections. There are many libraries available to do connection pooling. We recommend to use HikariCP. For Oracle RDBMS see here.

+
+
+
+
+

1.27. Security

+
+
SQL-Injection
+
+

A common security threat is SQL-injection. Never build queries with string concatenation or your code might be vulnerable as in the following example:

+
+
+
+
  String query = "Select op from OrderPosition op where op.comment = " + userInput;
+  return getEntityManager().createQuery(query).getResultList();
+
+
+
+

Via the parameter userInput an attacker can inject SQL (JPQL) and execute arbitrary statements in the database causing extreme damage.

+
+
+

In order to prevent such injections you have to strictly follow our rules for queries:

+
+
+ +
+
+
+
Limited Permissions for Application
+
+

We suggest that you operate your application with a database user that has limited permissions so he can not modify the SQL schema (e.g. drop tables). For initializing the schema (DDL) or to do schema migrations use a separate user that is not used by the application itself.

+
+ +
+

==Queries +The Java Persistence API (JPA) defines its own query language, the java persistence query language (JPQL) (see also JPQL tutorial), which is similar to SQL but operates on entities and their attributes instead of tables and columns.

+
+
+

The simplest CRUD-Queries (e.g. find an entity by its ID) are already build in the devonfw CRUD functionality (via Repository or DAO). For other cases you need to write your own query. We distinguish between static and dynamic queries. Static queries have a fixed JPQL query string that may only use parameters to customize the query at runtime. Instead, dynamic queries can change their clauses (WHERE, ORDER BY, JOIN, etc.) at runtime depending on the given search criteria.

+
+
+
+
Static Queries
+
+

E.g. to find all DishEntries (from MTS sample app) that have a price not exceeding a given maxPrice we write the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice
+
+
+
+

Here dish is used as alias (variable name) for our selected DishEntity (what refers to the simple name of the Java entity class). With dish.price we are referring to the Java property price (getPrice()/setPrice(…​)) in DishEntity. A named variable provided from outside (the search criteria at runtime) is specified with a colon (:) as prefix. Here with :maxPrice we reference to a variable that needs to be set via query.setParameter("maxPrice", maxPriceValue). JPQL also supports indexed parameters (?) but they are discouraged because they easily cause confusion and mistakes.

+
+
+
Using Queries to Avoid Bidirectional Relationships
+
+

With the usage of queries it is possible to avoid exposing relationships or modelling bidirectional relationships, which have some disadvantages (see relationships). This is especially desired for relationships between entities of different business components. +So for example to get all OrderLineEntities for a specific OrderEntity without using the orderLines relation from OrderEntity the following query could be used:

+
+
+
+
SELECT line FROM OrderLineEntity line WHERE line.order.id = :orderId
+
+
+
+
+
+
Dynamic Queries
+
+

For dynamic queries, we use the JPA module for Querydsl. Querydsl also supports other modules such as MongoDB, and Apache Lucene. It allows to implement queries in a powerful but readable and type-safe way (unlike Criteria API). If you already know JPQL, you will quickly be able to read and write Querydsl code. It feels like JPQL but implemented in Java instead of plain text.

+
+
+

To use Querydsl in your Maven project, add the following dependencies:

+
+
+
+
<dependencies>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-apt</artifactId>
+        <version>${querydsl.version}</version>
+        <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-jpa</artifactId>
+        <version>${querydsl.version}</version>
+    </dependency>
+
+</dependencies>
+
+
+
+

Next, configure the annotation processing tool (APT) plugin:

+
+
+
+
<project>
+  <build>
+    <plugins>
+      ...
+      <plugin>
+        <groupId>com.mysema.maven</groupId>
+        <artifactId>apt-maven-plugin</artifactId>
+        <version>1.1.3</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>process</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/generated-sources/java</outputDirectory>
+              <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      ...
+    </plugins>
+  </build>
+</project>
+
+
+
+

Here is an example from our sample application:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(dish.price.goe(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(dish.price.loe(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      query.where(dish.name.eq(name));
+    }
+    query.orderBy(dish.price.asc(), dish.name.asc());
+    return query.fetch();
+  }
+
+
+
+

In this example, we use the so called Q-types (QDishEntity). These are classes generated at build time by the Querydsl annotation processor from entity classes. The Q-type classes can be used as static types representative of the original entity class.

+
+
+

The query.from(dish) method call defines the query source, in this case the dish table. The where method defines a filter. For example, The first call uses the goe operator to filter out any dishes that are not greater or equal to the minimal price. Further operators can be found here.

+
+
+

The orderBy method is used to sort the query results according to certain criteria. Here, we sort the results first by their price and then by their name, both in ascending order. To sort in descending order, use .desc(). To partition query results into groups of rows, see the groupBy method.

+
+
+

For spring, devon4j provides another approach that you can use for your Spring applications to implement Querydsl logic without having to use these metaclasses. An example can be found here.

+
+
+
+
Native Queries
+
+

Spring Data supports the use of native queries. Native queries use simple native SQL syntax that is not parsed in JPQL. This allows you to use all the features that your database supports. +The downside to this is that database portability is lost due to the absence of an abstraction layer. Therefore, the queries may not work with another database because it may use a different syntax.

+
+
+

You can implement a native query using @Query annotation with the nativeQuery attribute set to true:

+
+
+
+
@Query(value="...", nativeQuery=true)
+
+
+
+ + + + + +
+ + +This will not work with Quarkus because Quarkus does not support native queries by using the @Query annotation (see here). +
+
+
+

You can also implement native queries directly using the EntityManager API and the createNativeQuery method. +This approach also works with Quarkus.

+
+
+
+
Query query = entityManager.createNativeQuery("SELECT * FROM Product", ProductEntity.class);
+List<ProductEntity> products = query.getResultList();
+
+
+
+ + + + + +
+ + +Be sure to use the name of the table when using native queries, while you must use the entity name when implementing queries with JPQL. +
+
+
+
+
Using Wildcards
+
+

For flexible queries it is often required to allow wildcards (especially in dynamic queries). While users intuitively expect glob syntax, the SQL and JPQL standards work differently. Therefore, a mapping is required. devonfw provides this on a lower level with LikePatternSyntax and on a higher level with QueryUtil (see QueryHelper.newStringClause(…​)).

+
+
+
+
Pagination
+
+

When dealing with large amounts of data, an efficient method of retrieving the data is required. Fetching the entire data set each time would be too time consuming. Instead, Paging is used to process only small subsets of the entire data set.

+
+
+

If you are using Spring Data repositories you will get pagination support out of the box by providing the interfaces Page and Pageable:

+
+
+
Listing 1. repository
+
+
Page<DishEntity> findAll(Pageable pageable);
+
+
+
+

Then you can create a Pageable object and pass it to the method call as follows:

+
+
+
+
int page = criteria.getPageNumber();
+int size = criteria.getPageSize();
+Pageable pageable = PageRequest.of(page, size);
+Page<DishEntity> dishes = dishRepository.findAll(pageable);
+
+
+
+
Paging with Querydsl
+
+

Pagination is also supported for dynamic queries with Querydsl:

+
+
+
+
  public Page<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    // conditions
+
+    int page = criteria.getPageNumber();
+    int size = criteria.getPageSize();
+    Pageable pageable = PageRequest.of(page, size);
+    query.offset(pageable.getOffset());
+    query.limit(pageable.getPageSize());
+
+    List<DishEntity> dishes = query.fetch();
+    return new PageImpl<>(dishes, pageable, dishes.size());
+  }
+
+
+
+
+
Pagination example
+
+

For the table entity we can make a search request by accessing the REST endpoint with pagination support like in the following examples:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "total":true
+  }
+}
+
+//Response
+{
+    "pagination": {
+        "size": 2,
+        "page": 1,
+        "total": 11
+    },
+    "result": [
+        {
+            "id": 101,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 1,
+            "state": "OCCUPIED"
+        },
+        {
+            "id": 102,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 2,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+ + + + + +
+ + +As we are requesting with the total property set to true the server responds with the total count of rows for the query. +
+
+
+

For retrieving a concrete page, we provide the page attribute with the desired value. Here we also left out the total property so the server doesn’t incur on the effort to calculate it:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "page":2
+  }
+}
+
+//Response
+
+{
+    "pagination": {
+        "size": 2,
+        "page": 2,
+        "total": null
+    },
+    "result": [
+        {
+            "id": 103,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 3,
+            "state": "FREE"
+        },
+        {
+            "id": 104,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 4,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+
+
Pagingation in devon4j-spring
+
+

For spring applications, devon4j also offers its own solution for pagination. You can find an example of this here.

+
+
+
+
+
Query Meta-Parameters
+
+

Queries can have meta-parameters and that are provided via SearchCriteriaTo. Besides paging (see above) we also get timeout support.

+
+
+
+
Advanced Queries
+
+

Writing queries can sometimes get rather complex. The current examples given above only showed very simple basics. Within this topic a lot of advanced features need to be considered like:

+
+
+ +
+
+

This list is just containing the most important aspects. As we can not cover all these topics here, they are linked to external documentation that can help and guide you.

+
+ +
+

==Spring Data +Spring Data JPA is supported by both Spring and Quarkus. However, in Quarkus this approach still has some limitations. For detailed information, see the official Quarkus Spring Data guide.

+
+
+
+
Motivation
+
+

The benefits of Spring Data are (for examples and explanations see next sections):

+
+
+
    +
  • +

    All you need is one single repository interface for each entity. No need for a separate implementation or other code artifacts like XML descriptors, NamedQueries class, etc.

    +
  • +
  • +

    You have all information together in one place (the repository interface) that actually belong together (where as in the classic approach you have the static queries in an XML file, constants to them in NamedQueries class and referencing usages in DAO implementation classes).

    +
  • +
  • +

    Static queries are most simple to realize as you do not need to write any method body. This means you can develop faster.

    +
  • +
  • +

    Support for paging is already build-in. Again for static query method the is nothing you have to do except using the paging objects in the signature.

    +
  • +
  • +

    Still you have the freedom to write custom implementations via default methods within the repository interface (e.g. for dynamic queries).

    +
  • +
+
+
+
+
Dependency
+
+

In case you want to switch to or add Spring Data support to your Spring or Quarkus application, all you need is to add the respective maven dependency:

+
+
+
Listing 2. spring
+
+
<dependency>
+  <groupId>org.springframework.boot</groupId>
+  <artifactId>spring-boot-starter-data-jpa</artifactId>
+</dependency>
+
+
+
+
Listing 3. quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
Repository
+
+

For each entity «Entity»Entity an interface is created with the name «Entity»Repository extending JpaRepository. +Such repository is the analogy to a Data-Access-Object (DAO) used in the classic approach or when Spring Data is not an option.

+
+
+
Listing 4. Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long> {
+
+}
+
+
+
+

The Spring Data repository provides some basic implementations for accessing data, e.g. returning all instances of a type (findAll) or returning an instance by its ID (findById).

+
+
+
+
Custom method implementation
+
+

In addition, repositories can be enriched with additional functionality, e.g. to add QueryDSL functionality or to override the default implementations, by using so called repository fragments:

+
+
+
Example
+
+

The following example shows how to write such a repository:

+
+
+
Listing 5. Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, ProductFragment {
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  List<ProductEntity> findByTitle(@Param("title") String title);
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  Page<ProductEntity> findByTitlePaginated(@Param("title") String title, Pageable pageable);
+}
+
+
+
+
Listing 6. Repository fragment
+
+
public interface ProductFragment {
+  Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria);
+}
+
+
+
+
Listing 7. Fragment implementation
+
+
public class ProductFragmentImpl implements ProductFragment {
+  @Inject
+  EntityManager entityManager;
+
+  public Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria) {
+    QProductEntity product = QProductEntity.productEntity;
+    JPAQuery<ProductEntity> query = new JPAQuery<ProductEntity>(this.entityManager);
+    query.from(product);
+
+    String title = criteria.getTitle();
+    if ((title != null) && !title.isEmpty()) {
+      query.where(product.title.eq(title));
+    }
+
+    List<ProductEntity> products = query.fetch();
+    return new PageImpl<>(products, PageRequest.of(criteria.getPageNumber(), criteria.getPageSize()), products.size());
+  }
+}
+
+
+
+

This ProductRepository has the following features:

+
+
+
    +
  • +

    CRUD support from Spring Data (see JavaDoc for details).

    +
  • +
  • +

    Support for QueryDSL integration, paging and more.

    +
  • +
  • +

    A static query method findByTitle to find all ProductEntity instances from DB that have the given title. Please note the @Param annotation that links the method parameter with the variable inside the query (:title).

    +
  • +
  • +

    The same with pagination support via findByTitlePaginated method.

    +
  • +
  • +

    A dynamic query method findByCriteria showing the QueryDSL and paging integration into Spring via a fragment implementation.

    +
  • +
+
+
+

You can find an implementation of this ProductRepository in our Quarkus reference application.

+
+
+ + + + + +
+ + +In Quarkus, native and named queries via the @Query annotation are currently not supported +
+
+
+
+
Integration of Spring Data in devon4j-spring
+
+

For Spring applications, devon4j offers a proprietary solution that integrates seamlessly with QueryDSL and uses default methods instead of the fragment approach. A separate guide for this can be found here.

+
+
+
+
Custom methods without fragment approach
+
+

The fragment approach is a bit laborious, as three types (repository interface, fragment interface and fragment implementation) are always needed to implement custom methods. +We cannot simply use default methods within the repository because we cannot inject the EntityManager directly into the repository interface.

+
+
+

As a workaround, you can create a GenericRepository interface, as is done in the devon4j jpa-spring-data module.

+
+
+
+
public interface GenericRepository<E> {
+
+  EntityManager getEntityManager();
+
+  ...
+}
+
+
+
+
+
public class GenericRepositoryImpl<E> implements GenericRepository<E> {
+
+  @Inject
+  EntityManager entityManager;
+
+  @Override
+  public EntityManager getEntityManager() {
+
+    return this.entityManager;
+  }
+
+  ...
+}
+
+
+
+

Then, all your repository interfaces can extend the GenericRepository and you can implement queries directly in the repository interface using default methods:

+
+
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, GenericRepository<ProductEntity> {
+
+  default Page<ProductEntity> findByTitle(Title title) {
+
+    EntityManager entityManager = getEntityManager();
+    Query query = entityManager.createNativeQuery("select * from Product where title = :title", ProductEntity.class);
+    query.setParameter("title", title);
+    List<ProductEntity> products = query.getResultList();
+    return new PageImpl<>(products);
+  }
+
+  ...
+}
+
+
+
+
+
+
Drawbacks
+
+

Spring Data also has some drawbacks:

+
+
+
    +
  • +

    Some kind of magic behind the scenes that are not so easy to understand. So in case you want to extend all your repositories without providing the implementation via a default method in a parent repository interface you need to deep-dive into Spring Data. We assume that you do not need that and hope what Spring Data and devon already provides out-of-the-box is already sufficient.

    +
  • +
  • +

    The Spring Data magic also includes guessing the query from the method name. This is not easy to understand and especially to debug. Our suggestion is not to use this feature at all and either provide a @Query annotation or an implementation via default method.

    +
  • +
+
+
+
+
Limitations in Quarkus
+
+
    +
  • +

    Native and named queries are not supported using @Query annotation. You will receive something like: Build step io.quarkus.spring.data.deployment.SpringDataJPAProcessor#build threw an exception: java.lang.IllegalArgumentException: Attribute nativeQuery of @Query is currently not supported

    +
  • +
  • +

    Customizing the base repository for all repository interfaces in the code base, which is done in Spring Data by registering a class the extends SimpleJpaRepository

    +
  • +
+
+ +
+

==Data Access Object

+
+
+

The Data Access Objects (DAOs) are part of the persistence layer. +They are responsible for a specific entity and should be named «Entity»Dao and «Entity»DaoImpl. +The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. +Additionally a DAO may offer advanced operations such as query or locking methods.

+
+
+
+
DAO Interface
+
+

For each DAO there is an interface named «Entity»Dao that defines the API. For CRUD support and common naming we derive it from the ApplicationDao interface that comes with the devon application template:

+
+
+
+
public interface MyEntityDao extends ApplicationDao<MyEntity> {
+  List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria);
+}
+
+
+
+

All CRUD operations are inherited from ApplicationDao so you only have to declare the additional methods.

+
+
+
+
DAO Implementation
+
+

Implementing a DAO is quite simple. We create a class named «Entity»DaoImpl that extends ApplicationDaoImpl and implements your «Entity»Dao interface:

+
+
+
+
public class MyEntityDaoImpl extends ApplicationDaoImpl<MyEntity> implements MyEntityDao {
+
+  public List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria) {
+    TypedQuery<MyEntity> query = createQuery(criteria, getEntityManager());
+    return query.getResultList();
+  }
+  ...
+}
+
+
+
+

Again you only need to implement the additional non-CRUD methods that you have declared in your «Entity»Dao interface. +In the DAO implementation you can use the method getEntityManager() to access the EntityManager from the JPA. You will need the EntityManager to create and execute queries.

+
+
+
Static queries for DAO Implementation
+
+

All static queries are declared in the file src\main\resources\META-INF\orm.xml:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+  <named-query name="find.dish.with.max.price">
+    <query><![SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice]]></query>
+  </named-query>
+  ...
+</hibernate-mapping>
+
+
+
+

When your application is started, all these static queries will be created as prepared statements. This allows better performance and also ensures that you get errors for invalid JPQL queries when you start your app rather than later when the query is used.

+
+
+

To avoid redundant occurrences of the query name (get.open.order.positions.for.order) we define a constant for each named query:

+
+
+
+
public class NamedQueries {
+  public static final String FIND_DISH_WITH_MAX_PRICE = "find.dish.with.max.price";
+}
+
+
+
+

Note that changing the name of the java constant (FIND_DISH_WITH_MAX_PRICE) can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+

The following listing shows how to use this query:

+
+
+
+
public List<DishEntity> findDishByMaxPrice(BigDecimal maxPrice) {
+  Query query = getEntityManager().createNamedQuery(NamedQueries.FIND_DISH_WITH_MAX_PRICE);
+  query.setParameter("maxPrice", maxPrice);
+  return query.getResultList();
+}
+
+
+
+

Via EntityManager.createNamedQuery(String) we create an instance of Query for our predefined static query. +Next we use setParameter(String, Object) to provide a parameter (maxPrice) to the query. This has to be done for all parameters of the query.

+
+
+

Note that using the createQuery(String) method, which takes the entire query as string (that may already contain the parameter) is not allowed to avoid SQL injection vulnerabilities. +When the method getResultList() is invoked, the query is executed and the result is delivered as List. As an alternative, there is a method called getSingleResult(), which returns the entity if the query returned exactly one and throws an exception otherwise.

+
+ +
+

==JPA Performance +When using JPA the developer sometimes does not see or understand where and when statements to the database are triggered.

+
+
+
+
+

Establishing expectations Developers shouldn’t expect to sprinkle magic pixie dust on POJOs in hopes they will become persistent.

+
+
+
+— Dan Allen
+https://epdf.tips/seam-in-action.html +
+
+
+

So in case you do not understand what is going on under the hood of JPA, you will easily run into performance issues due to lazy loading and other effects.

+
+
+
+
+
N plus 1 Problem
+
+

The most prominent phenomena is call the N+1 Problem. +We use entities from our MTS demo app as an example to explain the problem. +There is a DishEntity that has a @ManyToMany relation to +IngredientEntity. +Now we assume that we want to iterate all ingredients for a dish like this:

+
+
+
+
DishEntity dish = dao.findDishById(dishId);
+BigDecimal priceWithAllExtras = dish.getPrice();
+for (IngredientEntity ingredient : dish.getExtras()) {
+  priceWithAllExtras = priceWithAllExtras.add(ingredient.getPrice());
+}
+
+
+
+

Now dish.getExtras() is loaded lazy. Therefore the JPA vendor will provide a list with lazy initialized instances of IngredientEntity that only contain the ID of that entity. Now with every call of ingredient.getPrice() we technically trigger an SQL query statement to load the specific IngredientEntity by its ID from the database. +Now findDishById caused 1 initial query statement and for any number N of ingredients we are causing an additional query statement. This makes a total of N+1 statements. As causing statements to the database is an expensive operation with a lot of overhead (creating connection, etc.) this ends in bad performance and is therefore a problem (the N+1 Problem).

+
+
+
+
Solving N plus 1 Problem
+
+

To solve the N+1 Problem you need to change your code to only trigger a single statement instead. This can be archived in various ways. The most universal solution is to use FETCH JOIN in order to pre-load the nested N child entities into the first level cache of the JPA vendor implementation. This will behave very similar as if the @ManyToMany relation to IngredientEntity was having FetchType.EAGER but only for the specific query and not in general. Because changing @ManyToMany to FetchType.EAGER would cause bad performance for other usecases where only the dish but not its extra ingredients are needed. For this reason all relations, including @OneToOne should always be FetchType.LAZY. Back to our example we simply replace dao.findDishById(dishId) with dao.findDishWithExtrasById(dishId) that we implement by the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish
+  LEFT JOIN FETCH dish.extras
+  WHERE dish.id = :dishId
+
+
+
+

The rest of the code does not have to be changed but now dish.getExtras() will get the IngredientEntity from the first level cache where is was fetched by the initial query above.

+
+
+

Please note that if you only need the sum of the prices from the extras you can also create a query using an aggregator function:

+
+
+
+
SELECT sum(dish.extras.price) FROM DishEntity dish
+
+
+
+

As you can see you need to understand the concepts in order to get good performance.

+
+
+

There are many advanced topics such as creating database indexes or calculating statistics for the query optimizer to get the best performance. For such advanced topics we recommend to have a database expert in your team that cares about such things. However, understanding the N+1 Problem and its solutions is something that every Java developer in the team needs to understand.

+
+ +
+

==IdRef

+
+
+

IdRef can be used to reference other entities in TOs in order to make them type-safe and semantically more expressive. +It is an optional concept in devon4j for more complex applications that make intensive use of relations and foreign keys.

+
+
+
+
Motivation
+
+

Assuming you have a method signature like the following:

+
+
+
+
Long approve(Long cId, Long cuId);
+
+
+
+

So what are the paremeters? What is returned?

+
+
+

IdRef is just a wrapper for a Long used as foreign key. This makes our signature much more expressive and self-explanatory:

+
+
+
+
IdRef<Contract> approve(IdRef<Contract> cId, IdRef<Customer> cuId);
+
+
+
+

Now we can easily see, that the result and the parameters are foreign-keys and which entity they are referring to via their generic type. +We can read the javadoc of these entities from the generic type and understand the context. +Finally, when passing IdRef objects to such methods, we get compile errors in case we accidentally place parameters in the wrong order.

+
+
+
+
IdRef and Mapping
+
+

In order to easily map relations from entities to transfer-objects and back, we can easily also put according getters and setters into our entities:

+
+
+
+
public class ContractEntity extends ApplicationPersistenceEntity implements Contract {
+
+  private CustomerEntity customer;
+
+  ...
+
+  @ManyToOne(fetch = FetchType.LAZY)
+  @JoinColumn(name = "CUSTOMER_ID")
+  public CustomerEntity getCustomer() {
+    return this.customer;
+  }
+
+  public void setCustomer(CustomerEntity customer) {
+    this.customer = customer;
+  }
+
+  @Transient
+  public IdRef<Customer> getCustomerId() {
+    return IdRef.of(this.customer);
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customer = JpaHelper.asEntity(customerId, CustomerEntity.class);
+  }
+}
+
+
+
+

Now, ensure that you have the same getters and setters for customerId in your Eto:

+
+
+
+
public class ContractEto extends AbstractEto implements Contract {
+
+  private IdRef<Customer> customerId;
+
+  ...
+
+  public IdRef<Customer> getCustomerId() {
+    return this.customerId;
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customerId = customerId;
+  }
+}
+
+
+
+

This way the bean-mapper can automatically map from your entity (ContractEntity) to your Eto (ContractEto) and vice-versa.

+
+
+
+
JpaHelper and EntityManager access
+
+

In the above example we used JpaHelper.asEntity to convert the foreign key (IdRef<Customer>) to the according entity (CustomerEntity). +This will internally use EntityManager.getReference to properly create a JPA entity. +The alternative "solution" that may be used with Long instead of IdRef is typically:

+
+
+
+
  public void setCustomerId(IdRef<Customer> customerId) {
+    Long id = null;
+    if (customerId != null) {
+      id = customerId.getId();
+    }
+    if (id == null) {
+      this.customer = null;
+    } else {
+      this.customer = new CustomerEntity();
+      this.customer.setId(id);
+    }
+  }
+
+
+
+

While this "solution" works is most cases, we discovered some more complex cases, where it fails with very strange hibernate exceptions. +When cleanly creating the entity via EntityManager.getReference instead it is working in all cases. +So how can JpaHelper.asEntity as a static method access the EntityManager? +Therefore we need to initialize this as otherwise you may see this exception:

+
+
+
+
java.lang.IllegalStateException: EntityManager has not yet been initialized!
+	at com.devonfw.module.jpa.dataaccess.api.JpaEntityManagerAccess.getEntityManager(JpaEntityManagerAccess.java:38)
+	at com.devonfw.module.jpa.dataaccess.api.JpaHelper.asEntity(JpaHelper.java:49)
+
+
+
+

For main usage in your application we assume that there is only one instance of EntityManager. +Therefore we can initialize this instance during the spring boot setup. +This is what we provide for you in JpaInitializer for you +when creating a devon4j app.

+
+
+
JpaHelper and spring-test
+
+

Further, you also want your code to work in integration tests. +Spring-test provides a lot of magic under the hood to make integration testing easy for you. +To boost the performance when running multiple tests, spring is smart and avoids creating the same spring-context multiple times. +Therefore it stores these contexts so that if a test-case is executed with a specific spring-configuration that has already been setup before, +the same spring-context can be reused instead of creating it again. +However, your tests may have multiple spring configurations leading to multiple spring-contexts. +Even worse these tests can run in any order leading to switching between spring-contexts forth and back. +Therefore, a static initializer during the spring boot setup can lead to strange errors as you can get the wrong EntityManager instance. +In order to fix such problems, we provide a solution pattern via DbTest ensuring for every test, +that the proper instance of EntityManager is initialized. +Therefore you should derive directly or indirectly (e.g. via ComponentDbTest and SubsystemDbTest) from DbTesT or adopt your own way to apply this pattern to your tests, when using JpaHelper. +This already happens if you are extending ApplicationComponentTest or ApplicationSubsystemTest.

+
+
+ +
+

==Transaction Handling

+
+
+

For transaction handling we AOP to add transaction control via annotations as aspect. +This is done by annotating your code with the @Transactional annotation. +You can either annotate your container bean at class level to make all methods transactional or your can annotate individual methods to make them transactional:

+
+
+
+
  @Transactional
+  public Output getData(Input input) {
+    ...
+  }
+
+
+
+
+
+
JTA Imports
+
+

Here are the import statements for transaction support:

+
+
+
+
import javax.transaction.Transactional;
+
+
+
+ + + + + +
+ + +Use the above import statement to follow JEE and avoid using org.springframework.transaction.annotation.Transactional. +
+
+
+
+
JTA Dependencies
+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>jakarta.transaction</groupId>
+  <artifactId>jakarta.transaction-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>javax.transaction</groupId>
+  <artifactId>javax.transaction-api</artifactId>
+</dependency>
+
+
+
+
+
Handling constraint violations
+
+

Using @Transactional magically wraps transaction handling around your code. +As constraints are checked by the database at the end when the transaction gets committed, a constraint violation will be thrown by this aspect outside your code. +In case you have to handle constraint violations manually, you have to do that in code outside the logic that is annotated with @Transactional. +This may be done in a service operation by catching a ConstraintViolationException (org.hibernate.exception.ConstraintViolationException for hibernate). +As a generic approach you can solve this via REST execption handling.

+
+
+
+
Batches
+
+

Transaction control for batches is a lot more complicated and is described in the batch layer.

+
+
+ +
+

==SQL

+
+
+

For general guides on dealing or avoiding SQL, preventing SQL-injection, etc. you should study domain layer.

+
+
+
+
+

1.28. Naming Conventions

+
+

Here we define naming conventions that you should follow whenever you write SQL files:

+
+
+
    +
  • +

    All SQL-Keywords in UPPER CASE

    +
  • +
  • +

    Indentation should be 2 spaces as suggested by devonfw for every format.

    +
  • +
+
+
+
DDL
+
+

The naming conventions for database constructs (tables, columns, triggers, constraints, etc.) should be aligned with your database product and their operators. +However, when you have the freedom of choice and a modern case-sensitive database, you can simply use your code conventions also for database constructs to avoid explicitly mapping each and every property (e.g. RestaurantTable vs. RESTAURANT_TABLE).

+
+
+
    +
  • +

    Define columns and constraints inline in the statement to create the table

    +
  • +
  • +

    Indent column types so they all start in the same text column

    +
  • +
  • +

    Constraints should be named explicitly (to get a reasonable hint error messages) with:

    +
    +
      +
    • +

      PK_«table» for primary key (name optional here as PK constraint are fundamental)

      +
    • +
    • +

      FK_«table»_«property» for foreign keys («table» and «property» are both on the source where the foreign key is defined)

      +
    • +
    • +

      UC_«table»_«property»[_«propertyN»]* for unique constraints

      +
    • +
    • +

      CK_«table»_«check» for check constraints («check» describes the check, if it is defined on a single property it should start with the property).

      +
    • +
    +
    +
  • +
  • +

    Old RDBMS had hard limitations for names (e.g. 30 characters). Please note that recent databases have overcome this very low length limitations. However, keep your names short but precise and try to define common abbreviations in your project for according (business) terms. Especially do not just truncate the names at the limit.

    +
  • +
  • +

    If possible add comments on table and columns to help DBAs understanding your schema. This is also honored by many tools (not only DBA-tools).

    +
  • +
+
+
+

Here is a brief example of a DDL:

+
+
+
+
CREATE SEQUENCE HIBERNATE_SEQUENCE START WITH 1000000;
+
+-- *** Table ***
+CREATE TABLE RESTAURANT_TABLE (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  SEATS                INTEGER NOT NULL,
+  CONSTRAINT PK_TABLE PRIMARY KEY(ID)
+);
+COMMENT ON TABLE RESTAURANT_TABLE IS 'The physical tables inside the restaurant.';
+-- *** Order ***
+CREATE TABLE RESTAURANT_ORDER (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  TABLE_ID             NUMBER(19) NOT NULL,
+  TOTAL                DECIMAL(5, 2) NOT NULL,
+  CREATION_DATE        TIMESTAMP NOT NULL,
+  PAYMENT_DATE         TIMESTAMP,
+  STATUS               VARCHAR2(10 CHAR) NOT NULL,
+  CONSTRAINT PK_ORDER PRIMARY KEY(ID),
+  CONSTRAINT FK_ORDER_TABLE_ID FOREIGN KEY(TABLE_ID) REFERENCES RESTAURANT_TABLE(ID)
+);
+COMMENT ON TABLE RESTAURANT_ORDER IS 'An order and bill at the restaurant.';
+...
+
+
+
+

ATTENTION: Please note that TABLE and ORDER are reserved keywords in SQL and you should avoid using such keywords to prevent problems.

+
+
+
+
Data
+
+

For insert, update, delete, etc. of data SQL scripts should additionally follow these guidelines:

+
+
+
    +
  • +

    Inserts always with the same order of columns in blocks for each table.

    +
  • +
  • +

    Insert column values always starting with ID, MODIFICATION_COUNTER, [DTYPE, ] …​

    +
  • +
  • +

    List columns with fixed length values (boolean, number, enums, etc.) before columns with free text to support alignment of multiple insert statements

    +
  • +
  • +

    Pro Tip: Get familiar with column mode of advanced editors such as notepad++ when editing large blocks of similar insert statements.

    +
  • +
+
+
+
+
INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (0, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (1, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (2, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (3, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (4, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (5, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (6, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (7, 1, 8);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (8, 1, 8);
+...
+
+
+
+

See also Database Migrations.

+
+
+ +
+

==Database Migration

+
+
+

When you have a schema-based database, +you need a solution for schema versioning and migration for your database. +A specific release of your app requires a corresponding version of the schema in the database to run. +As you want simple and continuous deployment you should automate the schema versiong and database migration.

+
+
+

The general idea is that your software product contains "scripts" to migrate the database from schema version X to verion X+1. +When you begin your project you start with version 1 and with every increment of your app that needs a change to the database schema (e.g. a new table, a new column to an existing table, a new index, etc.) you add another "script" that migrates from the current to the next version. +For simplicity these versions are just sequential numbers or timestamps. +Now, the solution you choose will automatically manage the schema version in a separate metadata table in your database that stores the current schema version. +When your app is started, it will check the current version inside the database from that metadata table. +As long as there are "scripts" that migrate from there to a higher version, they will be automatically applied to the database and this process is protocolled to the metadata table in your database what also updates the current schema version there. +Using this approach, you can start with an empty database what will result in all "scripts" being applied sequentially. +Also any version of your database schema can be present and you will always end up in a controlled migration to the latest schema version.

+
+
+
+
+

1.29. Options for database migration

+
+

For database migration you can choose between the following options:

+
+
+
    +
  • +

    flyway (KISS based approach with migrations as SQL)

    +
  • +
  • +

    liquibase (more complex approach with database abstraction)

    +
  • +
+
+ +
+

==Flyway

+
+
+

Flyway is a tool for database migration and schema versioning. +See why for a motivation why using flyway.

+
+
+

Flyway can be used standalone e.g. via flyway-maven-plugin or can be integrated directly into your app to make sure the database migration takes place on startup. +For simplicity we recommend to integrate flyway into your app. +However, you need to be aware that therefore your app needs database access with full schema owner permissions.

+
+
+
Organizational Advice
+
+

A few considerations with respect to project organization will help to implement maintainable Flyway migrations.

+
+
+

At first, testing and production environments must be clearly and consistently distinguished. Use the following directory structure to achieve this distinction:

+
+
+
+
  src/main/resources/db
+  src/test/resources/db
+
+
+
+

Although this structure introduces redundancies, the benefit outweighs this disadvantage. +An even more fine-grained production directory structure which contains one sub folder per release should be implemented:

+
+
+
+
  src/main/resources/db/migration/releases/X.Y/x.sql
+
+
+
+

Emphasizing that migration scripts below the current version must never be changed will aid the second advantage of migrations: it will always be clearly reproducible in which state the database currently is. +Here, it is important to mention that, if test data is required, it must be managed separately from the migration data in the following directory:

+
+
+
+
  src/test/resources/db/migration/
+
+
+
+

The migration directory is added to aid easy usage of Flyway defaults. +Of course, test data should also be managed per release as like production data.

+
+
+

With regard to content, separation of concerns (SoC) is an important goal. SoC can be achieved by distinguishing and writing multiple scripts with respect to business components/use cases (or database tables in case of large volumes of master data [1]. Comprehensible file names aid this separation.

+
+
+

It is important to have clear responsibilities regarding the database, the persistence layer (JPA), and migrations. Therefore a dedicated database expert should be in charge of any migrations performed or she should at least be informed before any change to any of the mentioned parts is applied.

+
+
+
+
Technical Configuration
+
+

Database migrations can be SQL based or Java based.

+
+
+

To enable auto migration on startup (not recommended for productive environment) set the following property in the application.properties file for an environment.

+
+
+
+
flyway.enabled=true
+flyway.clean-on-validation-error=false
+
+
+
+

For development environment it is helpful to set both properties to true in order to simplify development. For regular environments flyway.clean-on-validation-error should be false.

+
+
+

If you want to use Flyway set the following property in any case to prevent Hibernate from doing changes on the database (pre-configured by default in devonfw):

+
+
+
+
spring.jpa.hibernate.ddl-auto=validate
+
+
+
+

The setting must be communicated to and coordinated with the customer and their needs. +In acceptance testing the same configuration as for the production environment should be enabled.

+
+
+

Since migration scripts will also be versioned the end-of-line (EOL) style must be fixated according to this issue. This is however solved in flyway 4.0+ and the latest devonfw release. +Also, the version numbers of migration scripts should not consist of simple ascending integer numbers like V0001…​, V0002…​, …​ This naming may lead to problems when merging branches. Instead the usage of timestamps as version numbers will help to avoid such problems.

+
+
+
+
Naming Conventions
+
+

Database migrations should follow this naming convention: +V<version>__<description> (e.g.: V12345__Add_new_table.sql).

+
+
+

It is also possible to use Flyway for test data. To do so place your test data migrations in src/main/resources/db/testdata/ and set property

+
+
+
+
flyway.locations=classpath:db/migration/releases,classpath:db/migration/testdata
+
+
+
+

Then Flyway scans the additional location for migrations and applies all in the order specified by their version. If migrations V0001__... and V0002__... exist and a test data migration should be applied in between you can name it V0001_1__....

+
+ +
+

==Liquibase

+
+ +
+

See devon4j#303 for details and status.

+
+
+
+
Spring-boot usage
+
+

For using liquibase in spring see Using Liquibase with Spring Boot.

+
+
+
+
Quarkus usage
+
+

For uisng liquibase in quarkus see Using Liquibase.

+
+
+ +
+

==REST +REST (REpresentational State Transfer) is an inter-operable protocol for services that is more lightweight than SOAP. +However, it is no real standard and can cause confusion (see REST philosophy). +Therefore we define best practices here to guide you.

+
+
+
+
+

1.30. URLs

+
+

URLs are not case sensitive. Hence, we follow the best practice to use only lower-case-letters-with-hyphen-to-separate-words. +For operations in REST we distinguish the following types of URLs:

+
+
+
    +
  • +

    A collection URL is build from the rest service URL by appending the name of a collection. This is typically the name of an entity. Such URL identifies the entire collection of all elements of this type. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity

    +
  • +
  • +

    An element URL is build from a collection URL by appending an element ID. It identifies a single element (entity) within the collection. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42

    +
  • +
+
+
+

To follow KISS avoid using plural forms (…​/productmanagement/v1/products vs. …​/productmanagement/v1/product/42). Always use singular forms and avoid confusions (except for the rare cases where no singular exists).

+
+
+

The REST URL scheme fits perfect for CRUD operations. +For business operations (processing, calculation, advanced search, etc.) we simply append a collection URL with the name of the business operation. +Then we can POST the input for the business operation and get the result back. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/search

+
+
+
+

1.31. HTTP Methods

+
+

The following table defines the HTTP methods (verbs) and their meaning:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3. Usage of HTTP methods
HTTP MethodMeaning

GET

Read data (stateless).

PUT

Create or update data.

POST

Process data.

DELETE

Delete an entity.

+
+

Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

+
+
+

For general recommendations on HTTP methods for collection and element URLs see REST@wikipedia.

+
+
+
+

1.32. HTTP Status Codes

+
+

Further we define how to use the HTTP status codes for REST services properly. In general the 4xx codes correspond to an error on the client side and the 5xx codes to an error on the server side.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4. Usage of HTTP status codes
HTTP CodeMeaningResponseComment

200

OK

requested result

Result of successful GET

204

No Content

none

Result of successful POST, DELETE, or PUT with empty result (void return)

400

Bad Request

error details

The HTTP request is invalid (parse error, validation failed)

401

Unauthorized

none

Authentication failed

403

Forbidden

none

Authorization failed

404

Not found

none

Either the service URL is wrong or the requested resource does not exist

500

Server Error

error code, UUID

Internal server error occurred, in case of an exception, see REST exception handling

+
+
+

1.33. JAX-RS

+
+

For implementing REST services we use the JAX-RS standard. +As payload encoding we recommend JSON bindings using Jackson. +To implement a REST service you simply add JAX-RS annotations. +Here is a simple example:

+
+
+
+
@ApplicationScoped
+@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class ImagemanagementRestService {
+
+  @Inject
+  private Imagemanagement imagemanagement;
+
+  @GET
+  @Path("/image/{id}/")
+  public ImageDto getImage(@PathParam("id") long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+}
+
+
+
+

Here we can see a REST service for the business component imagemanagement. The method getImage can be accessed via HTTP GET (see @GET) under the URL path imagemanagement/image/{id} (see @Path annotations) where {id} is the ID of the requested table and will be extracted from the URL and provided as parameter id to the method getImage. It will return its result (ImageDto) as JSON (see @Produces annotation - you can also extend RestService marker interface that defines these annotations for JSON). As you can see it delegates to the logic component imagemanagement that contains the actual business logic while the service itself only exposes this logic via HTTP. The REST service implementation is a regular CDI bean that can use dependency injection.

+
+
+ + + + + +
+ + +With JAX-RS it is important to make sure that each service method is annotated with the proper HTTP method (@GET,@POST,etc.) to avoid unnecessary debugging. So you should take care not to forget to specify one of these annotations. +
+
+
+
Service-Interface
+
+

You may also separate API and implementation in case you want to reuse the API for service-client:

+
+
+
+
@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface ImagemanagementRestService {
+
+  @GET
+  @Path("/image/{id}/")
+  ImageEto getImage(@PathParam("id") long id);
+
+}
+
+@Named("ImagemanagementRestService")
+public class ImagemanagementRestServiceImpl implements ImagemanagementRestService {
+
+  @Override
+  public ImageEto getImage(long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+
+}
+
+
+
+
+
JAX-RS Configuration
+
+

Starting from CXF 3.0.0 it is possible to enable the auto-discovery of JAX-RS roots.

+
+
+

When the JAX-RS server is instantiated, all the scanned root and provider beans (beans annotated with javax.ws.rs.Path and javax.ws.rs.ext.Provider) are configured.

+
+
+
+
REST Exception Handling
+
+

For exceptions, a service needs to have an exception facade that catches all exceptions and handles them by writing proper log messages and mapping them to a HTTP response with an corresponding HTTP status code. +For this, devon4j provides a generic solution via RestServiceExceptionFacade that you can use within your Spring applications. You need to follow the exception guide in order for it to work out of the box because the facade needs to be able to distinguish between business and technical exceptions. +To implement a generic exception facade in Quarkus, follow the Quarkus exception guide.

+
+
+

Now your service may throw exceptions, but the facade will automatically handle them for you.

+
+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+
+
Pagination details
+
+

We recommend to use spring-data repositories for database access that already comes with pagination support. +Therefore, when performing a search, you can include a Pageable object. +Here is a JSON example for it:

+
+
+
+
{ "pageSize": 20, "pageNumber": 0, "sort": [] }
+
+
+
+

By increasing the pageNumber the client can browse and page through the hits.

+
+
+

As a result you will receive a Page. +It is a container for your search results just like a Collection but additionally contains pagination information for the client. +Here is a JSON example:

+
+
+
+
{ "totalElements": 1022,
+  pageable: { "pageSize": 20, "pageNumber": 0 },
+  content: [ ... ] }
+
+
+
+

The totalElements property contains the total number of hits. +This can be used by the client to compute the total number of pages and render the pagination links accordingly. +Via the pageable property the client gets back the Pageable properties from the search request. +The actual hits for the current page are returned as array in the content property.

+
+
+
+
+

1.34. REST Testing

+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+

1.35. Security

+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+
CSRF
+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+
JSON top-level arrays
+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+ +
+

==JSON

+
+
+

JSON (JavaScript Object Notation) is a popular format to represent and exchange data especially for modern web-clients. For mapping Java objects to JSON and vice-versa there is no official standard API. We use the established and powerful open-source solution Jackson. +Due to problems with the wiki of fasterxml you should try this alternative link: Jackson/AltLink.

+
+
+
+
+

1.36. Configure JSON Mapping

+
+

In order to avoid polluting business objects with proprietary Jackson annotations (e.g. @JsonTypeInfo, @JsonSubTypes, @JsonProperty) we propose to create a separate configuration class. Every devonfw application (sample or any app created from our app-template) therefore has a class called ApplicationObjectMapperFactory that extends ObjectMapperFactory from the devon4j-rest module. It looks like this:

+
+
+
+
@Named("ApplicationObjectMapperFactory")
+public class ApplicationObjectMapperFactory extends ObjectMapperFactory {
+
+  public RestaurantObjectMapperFactory() {
+    super();
+    // JSON configuration code goes here
+  }
+}
+
+
+
+
+

1.37. JSON and Inheritance

+
+

If you are using inheritance for your objects mapped to JSON then polymorphism can not be supported out-of-the box. So in general avoid polymorphic objects in JSON mapping. However, this is not always possible. +Have a look at the following example from our sample application:

+
+
+
+inheritance class diagram +
+
Figure 2. Transfer-Objects using Inheritance
+
+
+

Now assume you have a REST service operation as Java method that takes a ProductEto as argument. As this is an abstract class the server needs to know the actual sub-class to instantiate. +We typically do not want to specify the classname in the JSON as this should be an implementation detail and not part of the public JSON format (e.g. in case of a service interface). Therefore we use a symbolic name for each polymorphic subtype that is provided as virtual attribute @type within the JSON data of the object:

+
+
+
+
{ "@type": "Drink", ... }
+
+
+
+

Therefore you add configuration code to the constructor of ApplicationObjectMapperFactory. Here you can see an example from the sample application:

+
+
+
+
setBaseClasses(ProductEto.class);
+addSubtypes(new NamedType(MealEto.class, "Meal"), new NamedType(DrinkEto.class, "Drink"),
+  new NamedType(SideDishEto.class, "SideDish"));
+
+
+
+

We use setBaseClasses to register all top-level classes of polymorphic objects. Further we declare all concrete polymorphic sub-classes together with their symbolic name for the JSON format via addSubtypes.

+
+
+
+

1.38. Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create objects dedicated for the JSON mapping you can easily avoid such situations. When this is not suitable follow these instructions to define the mapping:

+
+
+
    +
  1. +

    As an example, the use of JSR354 (javax.money) is appreciated in order to process monetary amounts properly. However, without custom mapping, the default mapping of Jackson will produce the following JSON for a MonetaryAmount:

    +
    +
    +
    "currency": {"defaultFractionDigits":2, "numericCode":978, "currencyCode":"EUR"},
    +"monetaryContext": {...},
    +"number":6.99,
    +"factory": {...}
    +
    +
    +
    +

    As clearly can be seen, the JSON contains too much information and reveals implementation secrets that do not belong here. Instead the JSON output expected and desired would be:

    +
    +
    +
    +
    "currency":"EUR","amount":"6.99"
    +
    +
    +
    +

    Even worse, when we send the JSON data to the server, Jackson will see that MonetaryAmount is an interface and does not know how to instantiate it so the request will fail. +Therefore we need a customized Serializer.

    +
    +
  2. +
  3. +

    We implement MonetaryAmountJsonSerializer to define how a MonetaryAmount is serialized to JSON:

    +
    +
    +
    public final class MonetaryAmountJsonSerializer extends JsonSerializer<MonetaryAmount> {
    +
    +  public static final String NUMBER = "amount";
    +  public static final String CURRENCY = "currency";
    +
    +  public void serialize(MonetaryAmount value, JsonGenerator jgen, SerializerProvider provider) throws ... {
    +    if (value != null) {
    +      jgen.writeStartObject();
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.CURRENCY);
    +      jgen.writeString(value.getCurrency().getCurrencyCode());
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.NUMBER);
    +      jgen.writeString(value.getNumber().toString());
    +      jgen.writeEndObject();
    +    }
    +  }
    +
    +
    +
    +

    For composite datatypes it is important to wrap the info as an object (writeStartObject() and writeEndObject()). MonetaryAmount provides the information we need by the getCurrency() and getNumber(). So that we can easily write them into the JSON data.

    +
    +
  4. +
  5. +

    Next, we implement MonetaryAmountJsonDeserializer to define how a MonetaryAmount is deserialized back as Java object from JSON:

    +
    +
    +
    public final class MonetaryAmountJsonDeserializer extends AbstractJsonDeserializer<MonetaryAmount> {
    +  protected MonetaryAmount deserializeNode(JsonNode node) {
    +    BigDecimal number = getRequiredValue(node, MonetaryAmountJsonSerializer.NUMBER, BigDecimal.class);
    +    String currencyCode = getRequiredValue(node, MonetaryAmountJsonSerializer.CURRENCY, String.class);
    +    MonetaryAmount monetaryAmount =
    +        MonetaryAmounts.getAmountFactory().setNumber(number).setCurrency(currencyCode).create();
    +    return monetaryAmount;
    +  }
    +}
    +
    +
    +
    +

    For composite datatypes we extend from AbstractJsonDeserializer as this makes our task easier. So we already get a JsonNode with the parsed payload of our datatype. Based on this API it is easy to retrieve individual fields from the payload without taking care of their order, etc. +AbstractJsonDeserializer also provides methods such as getRequiredValue to read required fields and get them converted to the desired basis datatype. So we can easily read the amount and currency and construct an instance of MonetaryAmount via the official factory API.

    +
    +
  6. +
  7. +

    Finally we need to register our custom (de)serializers with the following configuration code in the constructor of ApplicationObjectMapperFactory:+

    +
  8. +
+
+
+
+
  SimpleModule module = getExtensionModule();
+  module.addDeserializer(MonetaryAmount.class, new MonetaryAmountJsonDeserializer());
+  module.addSerializer(MonetaryAmount.class, new MonetaryAmountJsonSerializer());
+
+
+
+

Now we can read and write MonetaryAmount from and to JSON as expected.

+
+
+ +
+

==XML

+
+
+

XML (Extensible Markup Language) is a W3C standard format for structured information. It has a large eco-system of additional standards and tools.

+
+
+

In Java there are many different APIs and frameworks for accessing, producing and processing XML. For the devonfw we recommend to use JAXB for mapping Java objects to XML and vice-versa. Further there is the popular DOM API for reading and writing smaller XML documents directly. When processing large XML documents StAX is the right choice.

+
+
+
+

1.39. JAXB

+
+

We use JAXB to serialize Java objects to XML or vice-versa.

+
+
+
JAXB and Inheritance
+
+

Use @XmlSeeAlso annotation to provide sub-classes. +See section "Collective Polymorphism" described here.

+
+
+
+
JAXB Custom Mapping
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create dedicated objects for the XML mapping you can easily avoid such situations. When this is not suitable use @XmlJavaTypeAdapter and provide an XmlAdapter implementation that handles the mapping. +For details see here.

+
+
+
+
+

1.40. Security

+
+

To prevent XML External Entity attacks, follow JAXP Security Guide and enable FSP.

+
+
+ +
+

==SOAP +SOAP is a common protocol for services that is rather complex and heavy. It allows to build inter-operable and well specified services (see WSDL). SOAP is transport neutral what is not only an advantage. We strongly recommend to use HTTPS transport and ignore additional complex standards like WS-Security and use established HTTP-Standards such as RFC2617 (and RFC5280).

+
+
+
+

1.41. JAX-WS

+
+

For building web-services with Java we use the JAX-WS standard. +There are two approaches:

+
+
+
    +
  • +

    code first

    +
  • +
  • +

    contract first

    +
  • +
+
+
+

Here is an example in case you define a code-first service.

+
+
+
Web-Service Interface
+
+

We define a regular interface to define the API of the service and annotate it with JAX-WS annotations:

+
+
+
+
@WebService
+public interface TablemanagmentWebService {
+
+  @WebMethod
+  @WebResult(name = "message")
+  TableEto getTable(@WebParam(name = "id") String id);
+
+}
+
+
+
+
+
Web-Service Implementation
+
+

And here is a simple implementation of the service:

+
+
+
+
@Named
+@WebService(endpointInterface = "com.devonfw.application.mtsj.tablemanagement.service.api.ws.TablemanagmentWebService")
+public class TablemanagementWebServiceImpl implements TablemanagmentWebService {
+
+  private Tablemanagement tableManagement;
+
+  @Override
+  public TableEto getTable(String id) {
+
+    return this.tableManagement.findTable(id);
+  }
+
+
+
+
+
+

1.42. SOAP Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to write adapters for JAXB (see XML).

+
+
+
+

1.43. SOAP Testing

+
+

For testing SOAP services in general consult the testing guide.

+
+
+

For testing SOAP services manually we strongly recommend SoapUI.

+
+
+ +
+

==Logging

+
+
+

We recommend to use SLF4J as API for logging, that has become a de facto standard in Java as it has a much better design than java.util.logging offered by the JDK. +There are serveral implementations for SLF4J. For Spring applications our recommended implementation is Logback. Quarkus uses JBoss Logging which provides a JBoss Log Manager implementation for SLF4J. For more information on logging in Quarkus, see the Quarkus logging guide.

+
+
+
+

1.44. Logging Dependencies

+
+

To use Logback in your Spring application, you need to include the following dependencies:

+
+
+
+
<!-- SLF4J as logging API -->
+<dependency>
+  <groupId>org.slf4j</groupId>
+  <artifactId>slf4j-api</artifactId>
+</dependency>
+<!-- Logback as logging implementation  -->
+<dependency>
+  <groupId>ch.qos.logback</groupId>
+  <artifactId>logback-classic</artifactId>
+</dependency>
+<!-- JSON logging for cloud-native log monitoring -->
+<dependency>
+  <groupId>net.logstash.logback</groupId>
+  <artifactId>logstash-logback-encoder</artifactId>
+</dependency>
+
+
+
+

In devon4j these dependencies are provided by the devon4j-logging module.

+
+
+

In Quarkus, SLF4J and the slf4j-jboss-logmanager are directly included in the Quarkus core runtime and can be used out of the box.

+
+
+
+

1.45. Logger Access

+
+

The general pattern for accessing loggers from your code is a static logger instance per class using the following pattern:

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+  private static final Logger LOG = LoggerFactory.getLogger(MyClass.class);
+  ...
+}
+
+
+
+

For detailed documentation how to use the logger API check the SLF4j manual.

+
+
+ + + + + +
+ + +In case you are using devonfw-ide and Eclipse you can just type LOG and hit [ctrl][space] to insert the code pattern including the imports into your class. +
+
+
+
Lombok
+
+

In case you are using Lombok, you can simply use the @Slf4j annotation in your class. This causes Lombok to generate the logger instance for you.

+
+
+
+
+

1.46. Log-Levels

+
+

We use a common understanding of the log-levels as illustrated by the following table. +This helps for better maintenance and operation of the systems.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5. Log-levels
Log-levelDescriptionImpactActive Environments

FATAL

Only used for fatal errors that prevent the application to work at all (e.g. startup fails or shutdown/restart required)

Operator has to react immediately

all

ERROR

An abnormal error indicating that the processing failed due to technical problems.

Operator should check for known issue and otherwise inform development

all

WARNING

A situation where something worked not as expected. E.g. a business exception or user validation failure occurred.

No direct reaction required. Used for problem analysis.

all

INFO

Important information such as context, duration, success/failure of request or process

No direct reaction required. Used for analysis.

all

DEBUG

Development information that provides additional context for debugging problems.

No direct reaction required. Used for analysis.

development and testing

TRACE

Like DEBUG but exhaustive information and for code that is run very frequently. Will typically cause large log-files.

No direct reaction required. Used for problem analysis.

none (turned off by default)

+
+

Exceptions (with their stack trace) should only be logged on FATAL or ERROR level. For business exceptions typically a WARNING including the message of the exception is sufficient.

+
+
+
Configuration of Logback
+
+

The configuration of logback happens via the logback.xml file that you should place into src/main/resources of your app. +For details consult the logback configuration manual.

+
+
+ + + + + +
+ + +Logback also allows to overrule the configuration with a logback-test.xml file that you may put into src/test/resources or into a test-dependency. +
+
+
+
+
Configuration in Quarkus
+
+

The are several options you can set in the application.properties file to configure the behaviour of the logger in Quarkus. For a detailed overview, see the corresponding part of the Quarkus guide.

+
+
+
+
+

1.47. JSON-logging

+
+

For easy integration with log-monitoring, we recommend that your app logs to standard out in JSON following JSON Lines.

+
+
+

In Spring applications, this can be achieved via logstash-logback-encoder (see dependencies). In Quarkus, it can be easily achieved using the quarkus-logging-json extension (see here for more details).

+
+
+

This will produce log-lines with the following format (example formatted for readability):

+
+
+
+
{
+  "timestamp":"2000-12-31T23:59:59.999+00:00",
+  "@version":"1",
+  "message":"Processing 4 order(s) for shipment",
+  "logger_name":"com.myapp.order.logic.UcManageOrder",
+  "thread_name":"http-nio-8081-exec-6",
+  "level":"INFO",
+  "level_value":20000,
+  "appname":"myapp",
+}
+
+
+
+
Adding custom values to JSON log with Logstash
+
+

The JSON encoder even supports logging custom properties for your log-monitoring. +The trick is to use the class net.logstash.logback.argument.StructuredArguments for adding the arguments to you log message, e.g.

+
+
+
+
import static net.logstash.logback.argument.StructuredArguments.v;
+
+...
+    LOG.info("Request with {} and {} took {} ms.", v("url", url), v("status", statusCode), v("duration", millis));
+...
+
+
+
+

This will produce the a JSON log-line with the following properties:

+
+
+
+
...
+  "message":"Request with url=https://api/service/v1/ordermanagement/order and status=200 took duration=251 ms",
+  "url":"https://api/service/v1/ordermanagement/order",
+  "status":"200",
+  "duration":"251",
+...
+
+
+
+

As you can quickly see besides the human readable message you also have the structured properties url, status and duration that can be extremly valuable to configure dashboards in your log-monitoring that visualize success/failure ratio as well as performance of your requests.

+
+
+
+
+

1.48. Classic log-files

+
+ + + + + +
+ + +In devon4j, we strongly recommend using JSON logging instead of classic log files. The following section refers only to devon4j Spring applications that use Logback. +
+
+
+

Even though we do not recommend anymore to write classical log-files to the local disc, here you can still find our approach for it.

+
+
+
Maven-Integration
+
+

In the pom.xml of your application add this dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java</groupId>
+  <artifactId>devon4j-logging</artifactId>
+</dependency>
+
+
+
+

The above dependency already adds transitive dependencies to SLF4J and logback. +Also it comes with configration snipplets that can be included from your logback.xml file (see configuration).

+
+
+

The logback.xml to write regular log-files can look as following:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+  <property resource="com/devonfw/logging/logback/application-logging.properties" />
+  <property name="appname" value="MyApp"/>
+  <property name="logPath" value="../logs"/>
+  <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />
+  <include resource="com/devonfw/logging/logback/appender-console.xml" />
+
+  <root level="DEBUG">
+    <appender-ref ref="ERROR_APPENDER"/>
+    <appender-ref ref="INFO_APPENDER"/>
+    <appender-ref ref="DEBUG_APPENDER"/>
+    <appender-ref ref="CONSOLE_APPENDER"/>
+  </root>
+
+  <logger name="org.springframework" level="INFO"/>
+</configuration>
+
+
+
+

The provided logback.xml is configured to use variables defined on the config/application.properties file. +On our example, the log files path point to ../logs/ in order to log to tomcat log directory when starting tomcat on the bin folder. +Change it according to your custom needs.

+
+
+
Listing 8. config/application.properties
+
+
log.dir=../logs/
+
+
+
+
+
Log Files
+
+

The classical approach uses the following log files:

+
+
+
    +
  • +

    Error Log: Includes log entries to detect errors.

    +
  • +
  • +

    Info Log: Used to analyze system status and to detect bottlenecks.

    +
  • +
  • +

    Debug Log: Detailed information for error detection.

    +
  • +
+
+
+

The log file name pattern is as follows:

+
+
+
+
«LOGTYPE»_log_«HOST»_«APPLICATION»_«TIMESTAMP».log
+
+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 6. Segments of Logfilename
ElementValueDescription

«LOGTYPE»

info, error, debug

Type of log file

«HOST»

e.g. mywebserver01

Name of server, where logs are generated

«APPLICATION»

e.g. myapp

Name of application, which causes logs

«TIMESTAMP»

YYYY-MM-DD_HH00

date of log file

+
+

Example: +error_log_mywebserver01_myapp_2013-09-16_0900.log

+
+
+

Error log from mywebserver01 at application myapp at 16th September 2013 9pm.

+
+
+
+
Output format
+
+

We use the following output format for all log entries to ensure that searching and filtering of log entries work consistent for all logfiles:

+
+
+
+
[D: «timestamp»] [P: «priority»] [C: «NDC»][T: «thread»][L: «logger»]-[M: «message»]
+
+
+
+
    +
  • +

    D: Date (Timestamp in ISO8601 format e.g. 2013-09-05 16:40:36,464)

    +
  • +
  • +

    P: Priority (the log level)

    +
  • +
  • +

    C: Correlation ID (ID to identify users across multiple systems, needed when application is distributed)

    +
  • +
  • +

    T: Thread (Name of thread)

    +
  • +
  • +

    L: Logger name (use class name)

    +
  • +
  • +

    M: Message (log message)

    +
  • +
+
+
+

Example:

+
+
+
+
[D: 2013-09-05 16:40:36,464] [P: DEBUG] [C: 12345] [T: main] [L: my.package.MyClass]-[M: My message...]
+
+
+
+ + + + + +
+ + +When using devon4j-logging, this format is used by default. To achieve this format in Quarkus, set quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n in your properties. +
+
+
+
+
Correlation ID
+
+

In order to correlate separate HTTP requests to services belonging to the same user / session, we provide a servlet filter called DiagnosticContextFilter. +This filter takes a provided correlation ID from the HTTP header X-Correlation-Id. +If none was found, it will generate a new correlation id as UUID. +This correlation ID is added as MDC to the logger. +Therefore, it will then be included to any log message of the current request (thread). +Further concepts such as service invocations will pass this correlation ID to subsequent calls in the application landscape. Hence you can find all log messages related to an initial request simply via the correlation ID even in highly distributed systems.

+
+
+
+
Security
+
+

In order to prevent log forging attacks you can simply use the suggested JSON logging format. +Otherwise you can use com.devonfw.module.logging.common.impl.SingleLinePatternLayout as demonstrated here in order to prevent such attacks.

+
+
+ +
+

==Monitoring

+
+
+

For monitoring a complex application landscape it is crucial to have an exact overview which applications are up and running and which are not and why. +In devonfw we only focus on topics which are most important when developing production-ready applications. +On a high level view we strongly suggest to separate the application to be monitored from the monitoring system itself. +Therefore, your application should concentrate on providing app specific data for the monitoring. +Aspects such as aggregation, visualization, search, alerting, etc. should be addressed outside of your app by a monitoring system product. +There are many products providing such a monitoring system like checkmk, icinga, SkyWalking, etc. +Please note that there is a huge list of such products and devonfw is not biased or aims to make a choice for you. +Instead please search and find the products that fit best for your requirements and infrastructure.

+
+
+
+
+

1.49. Types of monitoring

+
+

As monitoring coveres a lot of different aspects we separate the following types of monitoring and according data:

+
+
+
    +
  • +

    Log-monitoring
    +is about collecting and monitoring the logs of all apps and containers in your IT landscape. It is suitable for events such as an HTTP request with its URL, resulting status code and duration in milliseconds. Your monitoring may not react to such data in realtime. Instead it may take a delay of one or a few seconds.

    +
  • +
  • +

    Infrastructure monitoring
    +is about monitoring the (hardware) infrastructure with measures like usage of CPU, memory, disc-space, etc. This is a pure operational task and your app should have nothing to do with this. In other words it is a waste if your app tries to monitor these aspects as existing products can do this much better and your app will only see virtual machines and is unable to see the physical infrastructure.

    +
  • +
  • +

    Health check
    +is about providing internal data about the current health of your app. Typically you provide sensors with health status per component or interface to neighbour service (database connectivity, etc.).

    +
  • +
  • +

    Application Performance Monitoring
    +is about measuring performance and tracing down performance issues.

    +
  • +
+
+
+
+

1.50. Health-Check

+
+

The idea of a health check is to prodvide monitoring data about the current health status of your application. +This allows to integrate this specific data into the monitoring system used for your IT landscape. +In order to keep the monitoring simple and easy to integreate consider using the following best practices:

+
+
+
    +
  • +

    Use simple and established protocols such as REST instead of JMX via RMI.

    +
  • +
  • +

    Considuer using recent standards such as microprofile-health.

    +
  • +
  • +

    Consider to drop access-control for your monitoring interfaces and for security prevent external access to it in your infrastructure (loadbalancers or gateways). Monitoring is only for usage within an IT landscape internally. It does not make sense for externals and end-users to access your app for reading monitoring data from a random node decided by a loadbalancer. Furhter, external access can easily lead to sensitive data exposure.

    +
  • +
  • +

    Consider to define different end-points per usage-scenario. So if you want the loadbalancer to ask your app monitoring for availability of each node then create a separate service URL that only provides OK or anything else for failure (NOK, 404, 500, timeout). Do not mix this with a health-check that needs more detailed information.

    +
  • +
  • +

    Also do not forget about basic features such as prodiving the name and the release version of your application.

    +
  • +
  • +

    Be careful to automate decisions based on monitoring and health checks. It easily turns out to be stupid if you automatically restart your pod or container because of some monitoring indicator. In the worst case a failure of a central component will cause your health-check to report down for all apps and as a result all your containers will be restarted frequently. Indead of curing problems such decisions will cause much more harm and trouble.

    +
  • +
  • +

    Avoid causing reasonable load with your monitoring and health-check itself. In many cases it is better to use log-monitoring or to collect monitoring data from use-cases that happen in your app anyway. If you create dummy read and write requests in your monitoring implementation you will easily turn it into a DOS-attack.

    +
  • +
+
+
+

For spring you can simply integrate app monitoring and health check via spring-boot-actuator.

+
+
+

For quarkus you can simply integrate app monitoring via micrometer or smallrye-metrics and health check via smallrye-health.

+
+ +
+

==Log-Monitoring

+
+
+

Log-monitoring is an aspect of monitoring with a strict focus on logging. +With trends towards IT landscapes with many but much smaller apps the classicial approach to write log-files to the disc and let operators read those via SSH became entirely obsolete. +Nowadays we have up to hundreds or even thousands of apps that themselves are clustered into multiple nodes. +Therefore you should establish a centralized log monitoring system in the environment and let all your nodes log directly into that system. +This approach gives the following benefits:

+
+
+
    +
  • +

    all log information available in one place

    +
  • +
  • +

    full-text search accross all logfiles

    +
  • +
  • +

    ability to automatically trigger alerts from specific log patterns

    +
  • +
  • +

    ability to do data-mining on logs and visualize in dashboards

    +
  • +
+
+
+
+

1.51. Options for log-monitoring

+
+

Typical products for such a log monitoring system are:

+
+
+ +
+
+

In devonfw we are not biased for any of these products. Therefore, feel free to make your choice according to the requirements of your project.

+
+
+

For Quarkus applications, you can get an insight into the topic by reading the guide about centralized log management.

+
+
+
+

1.52. API for log-monitoring

+
+

The "API" for logging to a log-monitoring system for your app is pretty simple:

+
+
+
    +
  • +

    Write your logs to standard out.

    +
  • +
  • +

    Use JSON logging as format.

    +
  • +
+
+
+

Then the container infrastructure can automatically collect your logs from standard out and directly feed those into the log monitoring system. +As a result, your app does not need to know anything about your log monitoring system and logging becomes most simple. +Further, if you do not write log-files anymore, you might not need to write any other files and therefore may not even need write permissions on the filesystem of your container. +In such case an attacker who may find a vulnerability in your app will have less attack surface in case he can not write any file.

+
+ +
+

==Application Performance Management

+
+
+

This guide gives hints how to manage, monitor and analyse performance of Java applications.

+
+
+
+

1.53. Temporary Analysis

+
+

If you are facing performance issues and want to do a punctual analysis we recommend you to use glowroot. It is ideal in cases where monitoring in your local development environment is suitable. However, it is also possible to use it in your test environment. It is entirely free and open-source. Still it is very powerful and helps to trace down bottlenecks. To get a first impression of the tool take a look at the demo.

+
+
+
JEE/WTP
+
+

In case you are forced to use an JEE application server and want to do a temporary analysis you can double click your server instance from the servers view in Eclipse and click on the link Open launch configuration in order to add the -javaagent JVM option.

+
+
+
+
+

1.54. Regular Analysis

+
+

In case you want to manage application performance regularly we recommend to use JavaMelody that can be integrated into your application. More information on javamelody is available on the JavaMelody Wiki

+
+
+
+

1.55. Alternatives

+
+ +
+
+ +
+

==Security +Security is todays most important cross-cutting concern of an application and an enterprise IT-landscape. We seriously care about security and give you detailed guides to prevent pitfalls, vulnerabilities, and other disasters. While many mistakes can be avoided by following our guidelines you still have to consider security and think about it in your design and implementation. The security guide will not only automatically prevent you from any harm, but will provide you hints and best practices already used in different software products.

+
+
+

An important aspect of security is proper authentication and authorization as described in access-control. In the following we discuss about potential vulnerabilities and protection to prevent them.

+
+
+
+

1.56. Vulnerabilities and Protection

+
+

Independent from classical authentication and authorization mechanisms there are many common pitfalls that can lead to vulnerabilities and security issues in your application such as XSS, CSRF, SQL-injection, log-forging, etc. A good source of information about this is the OWASP. +We address these common threats individually in security sections of our technological guides as a concrete solution to prevent an attack typically depends on the according technology. The following table illustrates common threats and contains links to the solutions and protection-mechanisms provided by the devonfw:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 7. Security threats and protection-mechanisms
ThreatProtectionLink to details

A1 Injection

validate input, escape output, use proper frameworks

SQL Injection

A2 Broken Authentication

encrypt all channels, use a central identity management with strong password-policy

Authentication

A3 Sensitive Data Exposure

Use secured exception facade, design your data model accordingly

REST exception handling

A4 XML External Entities

Prefer JSON over XML, ensure FSP when parsing (external) XML

XML guide

A5 Broken Access Control

Ensure proper authorization for all use-cases, use @DenyAll as default to enforce

Access-control guide especially method authorization

A6 Security Misconfiguration

Use devon4j application template and guides to avoid

tutorial-newapp and sensitive configuration

A7 Cross-Site Scripting

prevent injection (see A1) for HTML, JavaScript and CSS and understand same-origin-policy

client-layer

A8 Insecure Deserialization

Use simple and established serialization formats such as JSON, prevent generic deserialization (for polymorphic types)

JSON guide especially inheritence, XML guide

A9 Using Components with Known Vulnerabilities

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

CVE newsletter and dependency check

A10 Insufficient_Logging & Monitoring

Ensure to log all security related events (login, logout, errors), establish effective monitoring

Logging guide and monitoring guide

Insecure Direct Object References

Using direct object references (IDs) only with appropriate authorization

logic-layer

Cross-Site Request Forgery (CSRF)

secure mutable service operations with an explicit CSRF security token sent in HTTP header and verified on the server

CSRF guide

Log-Forging

Escape newlines in log messages

logging security

Unvalidated Redirects and Forwards

Avoid using redirects and forwards, in case you need them do a security audit on the solution.

devonfw proposes to use rich-clients (SPA/RIA). We only use redirects for login in a safe way.

+
+
+

1.57. Advanced Security

+
+

While OWASP Top 10 covers the basic aspects of application security, there are advanced standards such as AVS. +In devonfw we address this in the +Application Security Quick Solution Guide.

+
+
+
+

1.58. Tools

+
+
Dependency Check
+
+

To address the thread Using Components with Known Vulnerabilities we recomment to use OWASP dependency check that ships with a maven plugin and can analyze your dependencies for known CVEs. +In order to run this check, you can simply call this command on any maven project:

+
+
+
+
mvn org.owasp:dependency-check-maven:6.1.5:aggregate
+
+
+
+ + + + + +
+ + +The version is just for completeness. You should check yourself for using a recent version of the plugin. +
+
+
+

If you build an devon4j spring application from our app-template you can activate the dependency check even easier with the security profile:

+
+
+
+
mvn clean install -P security
+
+
+
+

This does not run by default as it causes some overhead for the build performance. However, consider to build this in your CI at least nightly. +After the dependency check is performed, you will find the results in target/dependency-check-report.html of each module. The report will also be generated when the site is build (mvn site) even without the profile.

+
+
+
+
Penetration Testing
+
+

For penetration testing (testing for vulnerabilities) of your web application, we recommend the following tools:

+
+
+ +
+
+ +
+

==CORS support

+
+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

For more information about CORS see here. Information about the CORS headers can be found here.

+
+
+
+
+

1.59. Configuring CORS support

+
+

To enable CORS support for your application, see the advanced guides. For Spring applications see here. For Quarkus follow the official Quarkus guide.

+
+
+
+

1.60. Configuration with service mesh

+
+

If you are using a service mesh, you can also define your CORS policy directly there. Here is an example from Istio.

+
+
+ +
+

==Java Development Kit

+
+
+

The Java Development Kit is an implementation of the Java platform. It provides the Java Virtual Machine (JVM) and the Java Runtime Environment (JRE).

+
+
+
+

1.61. Editions

+
+

The JDK exists in different editions:

+
+
+ +
+
+

As Java is evolving and also complex maintaining a JVM requires a lot of energy. +Therefore many alternative JDK editions are unable to cope with this and support latest Java versions and according compatibility. +Unfortunately OpenJDK only maintains a specific version of Java for a relative short period of time before moving to the next major version. +In the end, this technically means that OpenJDK is continuous beta and can not be used in production for reasonable software projects. +As OracleJDK changed its licensing model and can not be used for commercial usage even during development, things can get tricky. +You may want to use OpenJDK for development and OracleJDK only in production. +However, e.g. OpenJDK 11 never released a version that is stable enough for reasonable development (e.g. javadoc tool is broken and fixes are not available of OpenJDK 11 - fixed in 11.0.3 what is only available as OracleJDK 11 or you need to go to OpenJDK 12+, what has other bugs) so in the end there is no working release of OpenJDK 11. +This more or less forces you to use OracleJDK what requires you to buy a subscription so you can use it for commercial development. +However, there is AdoptOpenJDK that provides forked releases of OpenJDK with bug-fixes what might be an option. +Anyhow, as you want to have your development environment close to production, the productively used JDK (most likely OracleJDK) should be preferred also for development.

+
+
+
+

1.62. Upgrading

+
+

Until Java 8 compatibility was one of the key aspects for Java version updates (after the mess on the Swing updates with Java2 many years ago). +However, Java 9 introduced a lot of breaking changes. +This documentation wants to share the experience we collected in devonfw when upgrading from Java 8 to newer versions. +First of all we separate runtime changes that you need if you want to build your software with JDK 8 but such that it can also run on newer versions (e.g. JRE 11) +from changes required to also build your software with more recent JDKs (e.g. JDK 11 or 12).

+
+
+
Runtime Changes
+
+

This section describes required changes to your software in order to make it run also with versions newer than Java 8.

+
+
+
Classes removed from JDK
+
+

The first thing that most users hit when running their software with newer Java versions is a ClassNotFoundException like this:

+
+
+
+
Caused by: java.lang.ClassNotFoundException: javax.xml.bind.JAXBException
+
+
+
+

As Java 9 introduced a module system with Jigsaw, the JDK that has been a monolithic mess is now a well-defined set of structured modules. +Some of the classes that used to come with the JDK moved to modules that where not available by default in Java 9 and have even been removed entirely in later versions of Java. +Therefore you should simply treat such code just like any other 3rd party component that you can add as a (maven) dependency. +The following table gives you the required hints to make your software work even with such classes / modules removed from the JDK (please note that the specified version is just a suggestion that worked, feel free to pick a more recent or more appropriate version):

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 8. Dependencies for classes removed from Java 8 since 9+
ClassGroupIdArtifactIdVersion

javax.xml.bind.*

javax.xml.bind

jaxb-api

2.3.1

com.sun.xml.bind.*

org.glassfish.jaxb

jaxb-runtime

2.3.1

java.activation.*

javax.activation

javax.activation-api

1.2.0

java.transaction.*

javax.transaction

javax.transaction-api

1.2

java.xml.ws.*

javax.xml.ws

jaxws-api

2.3.1

javax.jws.*

javax.jws

javax.jws-api

1.1

javax.annotation.*

javax.annotation

javax.annotation-api

1.3.2

+
+
+
3rd Party Updates
+
+

Further, internal and inofficial APIs (e.g. sun.misc.Unsafe) have been removed. +These are typically not used by your software directly but by low-level 3rd party libraries like asm that need to be updated. +Also simple things like the Java version have changed (from 1.8.x to 9.x, 10.x, 11.x, 12.x, etc.). +Some 3rd party libraries were parsing the Java version in a very naive way making them unable to be used with Java 9+:

+
+
+
+
Caused by: java.lang.NullPointerException
+   at org.apache.maven.surefire.shade.org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast (SystemUtils.java:1626)
+
+
+
+

Therefore the following table gives an overview of common 3rd party libraries that have been affected by such breaking changes and need to be updated to at least the specified version:

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 9. Minimum recommended versions of common 3rd party for Java 9+
GroupIdArtifactIdVersionIssue

org.apache.commons

commons-lang3

3.7

LANG-1365

cglib

cglib

3.2.9

102, 93, 133

org.ow2.asm

asm

7.1

2941

org.javassist

javassist

3.25.0-GA

194, 228, 246, 171

+
+
+
ResourceBundles
+
+

For internationalization (i18n) and localization (l10n) ResourceBundle is used for language and country specific texts and configurations as properties (e.g. MyResourceBundle_de.properties). With Java modules there are changes and impacts you need to know to get things working. The most important change is documented in the JavaDoc of ResourceBundle. However, instead of using ResourceBundleProvider and refactoring your entire code causing incompatibilities, you can simply put the resource bundles in a regular JAR on the classpath rather than a named module (or into the lauching app). +If you want to implement (new) Java modules with i18n support, you can have a look at mmm-nls.

+
+
+
+
+
Buildtime Changes
+
+

If you also want to change your build to work with a recent JDK you also need to ensure that test frameworks and maven plugins properly support this.

+
+
+
Findbugs
+
+

Findbugs does not work with Java 9+ and is actually a dead project. +The new findbugs is SpotBugs. +For maven the new solution is spotbugs-maven-plugin:

+
+
+
+
<plugin>
+  <groupId>com.github.spotbugs</groupId>
+  <artifactId>spotbugs-maven-plugin</artifactId>
+  <version>3.1.11</version>
+</plugin>
+
+
+
+
+
Test Frameworks
+ + ++++++ + + + + + + + + + + + + + + + + +
Table 10. Minimum recommended versions of common 3rd party test frameworks for Java 9+
GroupIdArtifactIdVersionIssue

org.mockito

mockito-core

2.23.4

1419, 1696, 1607, 1594, 1577, 1482

+
+
+
Maven Plugins
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 11. Minimum recommended versions of common maven plugins for Java 9+
GroupIdArtifactId(min.) VersionIssue

org.apache.maven.plugins

maven-compiler-plugin

3.8.1

x

org.apache.maven.plugins

maven-surefire-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-surefire-report-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-archetype-plugin

3.1.0

x

org.apache.maven.plugins

maven-javadoc-plugin

3.1.0

x

org.jacoco

jacoco-maven-plugin

0.8.3

663

+
+
+
Maven Usage
+
+

With Java modules you can not run Javadoc standalone anymore or you will get this error when running mvn javadoc:javadoc:

+
+
+
+
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-javadoc-plugin:3.1.1:javadoc (default-cli) on project mmm-base: An error has occurred in Javadoc report generation:
+[ERROR] Exit code: 1 - error: module not found: io.github.mmm.base
+[ERROR]
+[ERROR] Command line was: /projects/mmm/software/java/bin/javadoc @options @packages @argfile
+
+
+
+

As a solution or workaround you need to include the compile goal into your build lifecycle so the module-path is properly configured:

+
+
+
+
mvn compile javadoc:javadoc
+
+
+
+
+
+
+ +
+

We want to give credits and say thanks to the following articles that have been there before and helped us on our way:

+
+ +
+ +
+

==JEE

+
+
+

This section is about Java Enterprise Edition (JEE). +Regarding to our key principles we focus on open standards. +For Java this means that we consider official standards from Java Standard and Enterprise Edition as first choice for considerations. +Therefore we also decided to recommend JAX-RS over SpringMVC as the latter is proprietary. +Only if an existing Java standard is not suitable for current demands such as Java Server Faces (JSF), we do not officially recommend it (while you are still free to use it if you have good reasons to do so). +In all other cases we officially suggest the according standard and use it in our guides, code-samples, sample application, modules, templates, etc. +Examples for such standards are JPA, JAX-RS, JAX-WS, JSR330, JSR250, JAX-B, etc.

+
+
+
+

1.64. Application-Server

+
+

We designed everything based on standards to work with different technology stacks and servlet containers. +However, we strongly encourage to use modern and leightweight frameworks such as spring or quarkus. +You are free to decide for a JEE application server but here is a list of good reasons for our decision:

+
+
+
    +
  • +

    Up-to-date

    +
    +

    With spring or quarkus you easily keep up to date with evolving technologies (microservices, reactive, NoSQL, etc.). +Most application servers put you in a jail with old legacy technology. +In many cases you are even forced to use a totally outdated version of java (JVM/JDK). +This may even cause severe IT-Security vulnerabilities but with expensive support you might get updates. +Also with leightweight open-source frameworks you need to be aware that for IT-security you need to update recently what can cost quite a lot of additional maintenance effort.

    +
    +
  • +
  • +

    Development speed

    +
    +

    With spring-boot you can implement and especially test your individual logic very fast. Starting the app in your IDE is very easy, fast, and realistic (close to production). You can easily write JUnit tests that startup your server application to e.g. test calls to your remote services via HTTP fast and easy. For application servers you need to bundle and deploy your app what takes more time and limits you in various ways. We are aware that this has improved in the past but also spring continuously improves and is always way ahead in this area. Further, with spring you have your configurations bundled together with the code in version control (still with ability to handle different environments) while with application servers these are configured externally and can not be easily tested during development.

    +
    +
  • +
  • +

    Documentation

    +
    +

    Spring and also quarkus have an extremely open and active community. +There is documentation for everything available for free on the web. +You will find solutions to almost any problem on platforms like stackoverflow. +If you have a problem you are only a google search away from your solution. +This is very much different for proprietary application server products.

    +
    +
  • +
  • +

    Helpful Exception Messages

    +
    +

    Especially spring is really great for developers on exception messages. +If you do something wrong you get detailed and helpful messages that guide you to the problem or even the solution. +This is not as great in application servers.

    +
    +
  • +
  • +

    Future-proof

    +
    +

    Spring has evolved really awesome over time. +Since its 1.0 release in 2004 spring has continuously been improved and always caught up with important trends and innovations. +Even in critical situations, when the company behind it (interface21) was sold, spring went on perfectly. +Quarkus on the other hand is relatively new. +It does not have to carry a large legacy history and is therefore most state-of-the-art for modern projects esp. in cloud environments. +JEE went through a lot of trouble and crisis. +Just look at the EJB pain stories. +This happened often in the past and also recent. +See JEE 8 in crisis.

    +
    +
  • +
  • +

    Free

    +
    +

    Spring and quarkus including their ecosystems are free and open-source. +It still perfectly integrates with commercial solutions for specific needs. +Most application servers are commercial and cost a lot of money. +As of today the ROI for this is of question.

    +
    +
  • +
  • +

    Cloud-native

    +
    +

    Quarkus is designed for cloud-native projects from the start. +With spring this is also available via spring-native. +Using an application server will effectively prevent you from going to the cloud smoothly.

    +
    +
  • +
  • +

    Fun

    +
    +

    If you go to conferences or ask developers you will see that spring or quarkus is popular and fun. +If new developers are forced to use an old application server product they will be less motivated or even get frustrated. +Especially in today’s agile projects this is a very important aspect. +In the end you will get into trouble with maintenance on the long run if you rely on a proprietary application server.

    +
    +
  • +
+
+
+

Of course the vendors of application servers will tell you a different story. +This is simply because they still make a lot of money from their products. +We do not get paid from application servers nor from spring, quarkus or any other IT product company. +We are just developers who love to build great systems. +A good reason for application servers is that they combine a set of solutions to particular aspects to one product that helps to standardize your IT. +However, devonfw fills exactly this gap for the spring and quarkus ecosystems in a very open and flexible way. +However, there is one important aspect that you need to understand and be aware of:

+
+
+

Some big companies decided for a specific application server as their IT strategy. +They may have hundreds of apps running with this application server. +All their operators and developers have learned a lot of specific skills for this product and are familiar with it. +If you are implementing yet another (small) app in this context it could make sense to stick with this application server. +However, also they have to be aware that with every additional app they increase their technical debt. +So actively help your customer and consult him to make the right choices for the future.

+
+
+ +
+

==Validation

+
+
+

Validation is about checking syntax and semantics of input data. Invalid data is rejected by the application. +Therefore validation is required in multiple places of an application. E.g. the GUI will do validation for usability reasons to assist the user, early feedback and to prevent unnecessary server requests. +On the server-side validation has to be done for consistency and security.

+
+
+

In general we distinguish these forms of validation:

+
+
+
    +
  • +

    stateless validation will produce the same result for given input at any time (for the same code/release).

    +
  • +
  • +

    stateful validation is dependent on other states and can consider the same input data as valid in once case and as invalid in another.

    +
  • +
+
+
+
+

1.65. Stateless Validation

+
+

For regular, stateless validation we use the JSR303 standard that is also called bean validation (BV). +Details can be found in the specification. +As implementation we recommend hibernate-validator.

+
+
+
Example
+
+

A description of how to enable BV for spring applications can be found in the relevant Spring documentation. A guide you can use to integrate validation in Quarkus applications can be found here. For a quick summary follow these steps:

+
+
+
    +
  • +

    Make sure that hibernate-validator is located in the classpath by adding a dependency to the pom.xml.

    +
  • +
+
+
+
Listing 9. spring
+
+
    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
Listing 10. quarkus
+
+
    <dependency>
+      <groupId>io.quarkus</groupId>
+      <artifactId>quarkus-hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
    +
  • +

    For methods to validate go to their declaration and add constraint annotations to the method parameters.

    +
    +

    In spring applications you can add the @Validated annotation to the implementation (spring bean) to be validated (this is an annotation of the spring framework, so it`s not available in the Quarkus context). The standard use case is to annotate the logic layer implementation, i.e. the use case implementation or component facade in case of simple logic layer pattern. Thus, the validation will be executed for service requests as well as batch processing.

    +
    +
    +
      +
    • +

      @Valid annotation to the arguments to validate (if that class itself is annotated with constraints to check).

      +
    • +
    • +

      @NotNull for required arguments.

      +
    • +
    • +

      Other constraints (e.g. @Size) for generic arguments (e.g. of type String or Integer). However, consider to create custom datatypes and avoid adding too much validation logic (especially redundant in multiple places).

      +
    • +
    +
    +
  • +
+
+
+
Listing 11. BookingmanagementRestServiceImpl.java
+
+
@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+  ...
+  public BookingEto saveBooking(@Valid BookingCto booking) {
+  ...
+
+
+
+
    +
  • +

    Finally add appropriate validation constraint annotations to the fields of the ETO class.

    +
  • +
+
+
+
Listing 12. BookingCto.java
+
+
  @Valid
+  private BookingEto booking;
+
+
+
+
Listing 13. BookingEto.java
+
+
  @NotNull
+  @Future
+  private Timestamp bookingDate;
+
+
+
+

A list with all bean validation constraint annotations available for hibernate-validator can be found here. In addition it is possible to configure custom constraints. Therefore it is necessary to implement a annotation and a corresponding validator. A description can also be found in the Spring documentation or with more details in the hibernate documentation.

+
+
+ + + + + +
+ + +Bean Validation in Wildfly >v8: Wildfly v8 is the first version of Wildfly implementing the JEE7 specification. It comes with bean validation based on hibernate-validator out of the box. In case someone is running Spring in Wildfly for whatever reasons, the spring based annotation @Validated would duplicate bean validation at runtime and thus should be omitted. +
+
+
+
+
GUI-Integration
+
+

TODO

+
+
+
+
Cross-Field Validation
+
+

BV has poor support for this. Best practice is to create and use beans for ranges, etc. that solve this. A bean for a range could look like so:

+
+
+
+
public class Range<V extends Comparable<V>> {
+
+  private V min;
+  private V max;
+
+  public Range(V min, V max) {
+
+    super();
+    if ((min != null) && (max != null)) {
+      int delta = min.compareTo(max);
+      if (delta > 0) {
+        throw new ValueOutOfRangeException(null, min, min, max);
+      }
+    }
+    this.min = min;
+    this.max = max;
+  }
+
+  public V getMin() ...
+  public V getMax() ...
+
+
+
+
+
+

1.66. Stateful Validation

+
+

For complex and stateful business validations we do not use BV (possible with groups and context, etc.) but follow KISS and just implement this on the server in a straight forward manner. +An example is the deletion of a table in the example application. Here the state of the table must be checked first:

+
+
+

BookingmanagementImpl.java

+
+
+
+
  private void sendConfirmationEmails(BookingEntity booking) {
+
+    if (!booking.getInvitedGuests().isEmpty()) {
+      for (InvitedGuestEntity guest : booking.getInvitedGuests()) {
+        sendInviteEmailToGuest(guest, booking);
+      }
+    }
+
+    sendConfirmationEmailToHost(booking);
+  }
+
+
+
+

Implementing this small check with BV would be a lot more effort.

+
+
+ +
+

==Bean-Mapping

+
+
+

For decoupling, you sometimes need to create separate objects (beans) for a different view. E.g. for an external service, you will use a transfer-object instead of the persistence entity so internal changes to the entity do not implicitly change or break the service.

+
+
+

Therefore you have the need to map similar objects what creates a copy. This also has the benefit that modifications to the copy have no side-effect on the original source object. However, to implement such mapping code by hand is very tedious and error-prone (if new properties are added to beans but not to mapping code):

+
+
+
+
public UserEto mapUser(UserEntity source) {
+  UserEto target = new UserEto();
+  target.setUsername(source.getUsername());
+  target.setEmail(source.getEmail());
+  ...
+  return target;
+}
+
+
+
+

Therefore we are using a BeanMapper for this purpose that makes our lives a lot easier. +There are several bean mapping frameworks with different approaches.

+
+
+

For a devon4j-spring application we recommend Orika, follow Spring Bean-Mapping for an introduction to Orika and Dozer in a devon4j-spring context application.

+
+
+ + + + + +
+ + +devon4j started with Dozer as framework for Spring applications and still supports it. However, we now recommend Orika (for new projects) as it is much faster (see Performance of Java Mapping Frameworks). +
+
+
+

For a Quarkus application we recommend Mapstruct, follow Quarkus Bean-Mapping for an introduction to Mapstruct in a quarkus context application.

+
+ +
+

==Lombok

+
+
+

Lombok is a library that works with an annotation processor and will generate code for you to save you some time and reduce the amount of boilerplate code in your project. Lombok can generate getter and setter, equals methods, automate your logging variables for your classes, and more. Follow the list of all the features provided by Lombok to get an overview.

+
+
+
+

1.67. Lombok Dependency

+
+

To get access to the Lombok library just add the following dependency to the POM.xml.

+
+
+

The Lombok dependency:

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok</artifactId>
+	<version>1.18.20</version>
+</dependency>
+
+
+
+

To get Lombok working with your current IDE you should also install the Lombok addon. Follow the Eclipse installation guide, there are also guides for other supported IDEs.

+
+
+
+

1.68. Lombok with Mapstruct

+
+

MapStruct takes advantage of generated getters, setters, and constructors from Lombok and uses them to +generate the mapper implementations. Lombok is also an annotation processor and since version 1.18.14 both frameworks are working together. Just add the lombok-mapstruct-binding to your POM.xml.

+
+
+

The Lombok annotation processor and the lombok-mapstruct-binding

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok-mapstruct-binding</artifactId>
+	<version>0.2.0</version>
+</dependency>
+
+<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok</artifactId>
+				<version>1.18.4</version>
+			</path>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok-mapstruct-binding</artifactId>
+				<version>0.2.0</version>
+			</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

In our quarkus reference project you can get a look into the usage of both frameworks.

+
+
+
+

1.69. Lombok Usage

+
+

Lombok can be used like any other annotation processor and will be shown in the simple example below to generate getter and setter for a Product Entity.

+
+
+
+
@Getter
+@Setter
+public class Product{
+
+    private String title;
+    private String description;
+    private BigDecimal price;
+}
+
+
+
+

For advanced Lombok usage follow the Baeldung Lombok guide or just read the Lombok javadoc

+
+
+ +
+

==OpenAPI

+
+
+

The OpenAPI Specification (OAS) defines a standard for describing RESTful web services in a machine- and human-readable format. OpenAPI allows REST APIs to be defined in a uniform manner. +Technically, an OpenAPI document is written in YAML or JSON format. The specification defines the structure of a REST API by describing attributes such as path information, response codes, and return types. Some examples can be found here. +Apart from documenting the API, this schema then also acts as a contract between provider and consumers, guaranteeing interoperability between various technologies.

+
+
+

OpenAPI is often used in combination with Swagger. Swagger is a set of tools build around OpenAPI, that help developers to design and document their REST APIs. +The most common tool is the Swagger UI, which uses the OpenAPI specification to create a graphical interface of the REST API that you can also interact with. Check out the Swagger online editor to get a feeling for it.

+
+
+ + + + + +
+ + +
+

Swagger and OpenAPI: Swagger is a former specification, based on which the OpenAPI was created. Swagger 2.0 is still commonly used for describing APIs. OpenAPI is an open-source collaboration and it started from version 3.0.0(semver)

+
+
+
+
+

There are many tools that work with OpenAPI: code generators, documentation tools, validators etc.

+
+
+
+

1.70. OpenAPI generation

+
+

There are several extensions you can use in your project to automatically generate the OpenAPI specifications and Swagger UI from your REST API (code-first approach). devon4j recommends the following two extensions/plugins to use:

+
+
+
    +
  • +

    Smallrye OpenAPI extension

    +
  • +
  • +

    ServicedocGen maven plugin

    +
  • +
+
+
+
Smallrye OpenAPI
+
+

Quarkus provides OpenAPI support through Smallrye OpenAPI extension:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-smallrye-openapi</artifactId>
+</dependency>
+
+
+
+

After adding the extension to your project, you can access the Swagger UI by navigating to /q/swagger-ui.

+
+
+

The OpenAPI specification can be accessed by requesting /q/openapi.

+
+
+

Smallrye OpenAPI is compliant with MicroProfile OpenAPI. You can add MicroProfile annotations to further describe your REST endpoints and extend the OpenAPI documentation. +More information for this can be found here or here.

+
+
+ + + + + +
+ + +
+

Quarkus recommends using this extension and you can document your APIs in great detail by using the MicroProfile annotations. The downside to this is that using these annotations will blow up your code and you will have some duplicate information in it. +If you don’t want to specify the REST API again with all this annotation based information, we also recommend taking a look at the ServicedocGen Maven plugin for your Quarkus applications when implementing JAX-RS APIs.

+
+
+
+
+
+
ServicedocGen Maven Plugin
+
+

The ServicedocGen maven plugin can be used within both Spring and Quarkus applications. +It works a bit different then the Smallrye extensions mentioned above. The plugin analysis the REST API and it’s JavaDoc and then generate the OpenAPI specification and the Swagger UI as static files. So no Swagger or MicroProfile annotations have to be added.

+
+
+

The plugin can be configured in the pom.xml file of your application as follows:

+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>org.codehaus.mojo</groupId>
+      <artifactId>servicedocgen-maven-plugin</artifactId>
+      <version>1.0.0</version>
+      <executions>
+        <execution>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <descriptor>
+          <info>
+            <title>...</title>
+            <description>...</description>
+          </info>
+          <host>...</host>
+          <port>...</port>
+          <basePath>...</basePath>
+          <schemes>
+            <scheme>...</scheme>
+          </schemes>
+        </descriptor>
+      </configuration>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

In the configuration section you have to define additional information to generate the OpenAPI specification correctly. An example can be found in our Quarkus reference application. +When building the application, an OpenApi.yaml and a SwaggerUI.html file are created in the /target/site folder. To make the Swagger UI available in the browser, the file must be served by some servlet.

+
+
+ +
+

==Spring

+
+
+

Spring is the most famous and established Java framework. +It is fully supported by devonfw as an option and alternative to quarkus.

+
+
+
+
+

1.71. Guide to the Reader

+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If you are not yet familiar with Spring, you may be interested in pros and cons of Spring. Also take a look at the official Spring website.

    +
  • +
  • +

    If you already have experience developing with Spring but are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring and coding conventions. Follow the referenced links to go deeper into a topic.

    +
  • +
  • +

    If you have already developed with devon4j and Spring and need more information on a specific topic, check out the devon4j guides for Spring. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Spring and Quarkus are documented there.

    +
  • +
  • +

    If you want to get started or create your first Spring application using devon4j, check out the guide about creating a new application or the Jump the Queue and My Thai Star reference applications.

    +
  • +
+
+
+
+

1.72. Pros

+
+

Spring offers the following benefits:

+
+
+
    +
  • +

    highly flexible
    +Spring is famous for its great flexibility. You can customize and integrate nearly everything.

    +
  • +
  • +

    well established
    +While JEE application servers including very expensive commercial products turned out to be a dead-end, spring has guided projects through the changing trends of IT throughout decades. It may be the framework with the longest history track and popularity. As a result you can easily find developers, experts, books, articles, etc. about spring.

    +
  • +
  • +

    non-invasive and not biased
    +Spring became famous for its non-invasive coding based on patterns instead of hard dependencies. It gives you a lot of freedom and avoids tight coupling of your (business) code.

    +
  • +
+
+
+

See Why Spring? for details.

+
+
+
+

1.73. Cons

+
+

Spring has the following drawbacks:

+
+
+
    +
  • +

    history and legacy
    +Due to the pro of its long established history, spring also carries a lot of legacy. As a result there are many ways to do the same thing while some options may be discouraged. Developers needs some guidance (e.g. via devon4j) as they may enter pitfalls and dead-ends when choosing the first solution they found on google or stackoverflow.

    +
  • +
  • +

    lost lead in cloud-native
    +While for the last decades spring was leading innovation in Java app development, it seems that with the latest trends and shift such as cloud-native, they have been overtaken by frameworks like quarkus. However, spring is trying to catch up with spring-native.

    +
  • +
+
+
+
+

1.74. Spring-Boot

+
+

Spring-boot is a project and initiaitve within the spring-ecosystem that brought a lot of innovation and simplification into app development on top of spring. +As of today we typically use the terms spring and spring-boot rather synonymously as we always use spring together with spring-boot.

+
+
+
+

1.75. Spring-Native

+
+

Spring-native adds cloud-native support to the spring ecosystem and allows to build a spring app as cloud-native image via GraalVM. +You may also consider Quarkus if you are interested in building cloud-native images. For a comparison of both Spring Native and Quarkus, you may refer to our Spring Native vs. Quarkus guide.

+
+ +
+

==Components

+
+
+

Following separation-of-concerns we divide an application into components using our package-conventions and project structure. +As described by the architecture each component is divided into layers as described in the project structure. +Please note that a component will only have the required layers. +So a component may have any number from one to all layers.

+
+
+

1.75.1. General Component

+
+

Cross-cutting aspects belong to the implicit component general. It contains technical configurations and very general code that is not business specific. Such code shall not have any dependencies to other components and therefore business related code.

+
+
+
+

1.75.2. Business Component

+
+

The business-architecture defines the business components with their allowed dependencies. A small application (microservice) may just have one component and no dependencies making it simple while the same architecture can scale up to large and complex applications (from bigger microservice up to modulith). +Tailoring an business domain into applications and applications into components is a tricky task that needs the skills of an experienced architect. +Also, the tailoring should follow the business and not split by technical reasons or only by size. +Size is only an indicator but not a driver of tailoring. +Whatever hypes like microservices are telling you, never get misled in this regard: +If your system grows and reaches MAX+1 lines of code, it is not the right motivation to split it into two microservices of ~MAX/2 lines of code - such approaches will waste huge amounts of money and lead to chaos.

+
+
+
+

1.75.3. App Component

+
+

Only in case you need cross-cutting code that aggregates another component you may introduce the component app. +It is allowed to depend on all other components but no other component may depend on it. +With the modularity and flexibility of spring you typically do not need this. +However, when you need to have a class that registers all services or component-facades using direct code dependencies, you can introduce this component.

+
+
+
+

1.75.4. Component Example

+
+

The following class diagram illustrates an example of the business component Staffmanagement:

+
+
+
+logic layer component pattern +
+
+
+

In this scheme, you can see the structure and flow from the service-layer (REST service call) via the logic-layer to the dataaccess-layer (and back).

+
+ +
+

==Classic project structure

+
+
+

In this section we describe the classic project structure as initially proposed for Java in devonfw. +It is still valid and fully supported. +However, if you want to start a new project, please consider using the modern structure.

+
+
+
+

1.75.5. Modules

+
+

The structure of a devon4j application is divided into the following modules:

+
+
+
    +
  • +

    api: module containing the API of your application. The API contains the required artifacts to interact with your application via remote services. This can be REST service interfaces, transfer-objects with their interfaces and datatypes but also OpenAPI or gRPC contracts.

    +
  • +
  • +

    core: maven module containing the core of the application with service implementation, as well as entire logic layer and dataaccess layer.

    +
  • +
  • +

    batch: optional module for batch layer

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) typically as a bootified WAR file.

    +
  • +
+
+
+
+

1.75.6. Deployment

+
+
+
+

Make jar not war

+
+
+
+— Josh Long +
+
+
+

First of all it is important to understand that the above defined modules aim to make api, core, and batch reusable artifacts, that can be used as a regular maven dependency. +On the other hand to build and deploy your application you want a final artifact that is containing all required 3rd party libraries. +This artifact is not reusable as a maven dependency. +That is exactly the purpose of the server module to build and package this final deployment artifact. +By default we first build a regular WAR file with maven in your server/target directory (*-server-«version».war) and in a second step create a bootified WAR out of this (*-server-bootified.war). +The bootified WAR file can then be started standalone (java -jar «filename».war). +However, it is also possible to deploy the same WAR file to a servlet container like tomcat or jetty. +As application servers and externally provided servlet containers are not recommendet anymore for various reasons (see JEE), you may also want to create a bootified JAR file instead. +All you need to do in that case is to change the packaging in your server/pom.xml from war to jar.

+
+
+
+

1.75.7. Package Structure

+
+

The package structure of your code inside src/main/java (and src/test/java) of your modules is described in our coding conventions in the sections packages. A full mapping of the architecture and the different code elements to the packaging is described in the following section.

+
+
+
+

1.75.8. Layers

+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +The following table describes our classic approach for packaging and layering:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 12. Traditional generic devon4j layers
Layer«layer»

service

service

logic

logic

data-access

dataaccess

batch (optional)

batch

client (optional)

client

common

common

+
+
+

1.75.9. Architecture Mapping

+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.common
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.datatype
+|  |  |  |  └──.«Datatype» (api)
+|  |  |  └──.«BusinessObject» (api)
+|  |  └──.impl[.«detail»]
+|  |     ├──.«Aspect»ConfigProperties (core)
+|  |     ├──.«Datatype»JsonSerializer (core)
+|  |     └──.«Datatype»JsonDeserializer (core)
+|  ├──.dataaccess
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.repo
+|  |  |  |  └──.«BusinessObject»Repository (core)
+|  |  |  ├──.dao (core) [alternative to repo]
+|  |  |  |  └──.«BusinessObject»Dao (core) [alternative to Repository]
+|  |  |  └──.«BusinessObject»Entity (core)
+|  |  └──.impl[.«detail»]
+|  |     ├──.dao (core) [alternative to repo]
+|  |     |  └──.«BusinessObject»DaoImpl (core) [alternative to Repository]
+|  |     └──.«Datatype»AttributeConverter (core)
+|  ├──.logic
+|  |  ├──.api
+|  |  |  ├──.[«detail».]to
+|  |  |  |   ├──.«MyCustom»«To (api)
+|  |  |  |   ├──.«DataStructure»Embeddable (api)
+|  |  |  |   ├──.«BusinessObject»Eto (api)
+|  |  |  |   └──.«BusinessObject»«Subset»Cto (api)
+|  |  |  ├──.[«detail».]usecase
+|  |  |  |   ├──.UcFind«BusinessObject» (core)
+|  |  |  |   ├──.UcManage«BusinessObject» (core)
+|  |  |  |   └──.Uc«Operation»«BusinessObject» (core)
+|  |  |  └──.«Component» (core)
+|  |  ├──.base
+|  |  |  └──.[«detail».]usecase
+|  |  |     └──.Abstract«BusinessObject»Uc (core)
+|  |  └──.impl
+|  |     ├──.[«detail».]usecase
+|  |     |   ├──.UcFind«BusinessObject»Impl (core)
+|  |     |   ├──.UcManage«BusinessObject»Impl (core)
+|  |     |   └──.Uc«Operation»«BusinessObject»Impl (core)
+|  |     └──.«Component»Impl (core)
+|  └──.service
+|     ├──.api[.«detail»]
+|     |  ├──.rest
+|     |  |  └──.«Component»RestService (api)
+|     |  └──.ws
+|     |     └──.«Component»WebService (api)
+|     └──.impl[.«detail»]
+|        ├──.jms
+|        |  └──.«BusinessObject»JmsListener (core)
+|        ├──.rest
+|        |  └──.«Component»RestServiceImpl (core)
+|        └──.ws
+|           └──.«Component»WebServiceImpl (core)
+├──.general
+│  ├──.common
+│  |  ├──.api
+|  |  |  ├──.to
+|  |  |  |  ├──.AbstractSearchCriteriaTo (api)
+|  |  |  └──.ApplicationEntity
+│  |  ├──.base
+|  |  |  └──.AbstractBeanMapperSupport (core)
+│  |  └──.impl
+│  |     ├──.config
+│  |     |  └──.ApplicationObjectMapperFactory (core)
+│  |     └──.security
+│  |        └──.ApplicationWebSecurityConfig (core)
+│  ├──.dataaccess
+│  |  └──.api
+|  |     └──.ApplicationPersistenceEntity (core)
+│  ├──.logic
+│  |  └──.base
+|  |     ├──.AbstractComponentFacade (core)
+|  |     ├──.AbstractLogic (core)
+|  |     └──.AbstractUc (core)
+|  └──.service
+|     └──...
+└──.SpringBootApp (core)
+
+
+
+
+
+
+

1.76. Layers

+ +
+

==Client Layer

+
+
+

There are various technical approaches to building GUI clients. The devonfw proposes rich clients that connect to the server via data-oriented services (e.g. using REST with JSON). +In general, we have to distinguish among the following types of clients:

+
+
+
    +
  • +

    web clients

    +
  • +
  • +

    native desktop clients

    +
  • +
  • +

    (native) mobile clients

    +
  • +
+
+
+

Our main focus is on web-clients. In our sample application my-thai-star we offer a responsive web-client based on Angular following devon4ng that integrates seamlessly with the back ends of my-thai-star available for Java using devon4j as well as .NET/C# using devon4net. For building angular clients read the separate devon4ng guide.

+
+
+
JavaScript for Java Developers
+
+

In order to get started with client development as a Java developer we give you some hints to get started. Also if you are an experienced JavaScript developer and want to learn Java this can be helpful. First, you need to understand that the JavaScript ecosystem is as large as the Java ecosystem and developing a modern web client requires a lot of knowledge. The following table helps you as experienced developer to get an overview of the tools, configuration-files, and other related aspects from the new world to learn. Also it helps you to map concepts between the ecosystems. Please note that we list the tools recommended by devonfw here (and we know that there are alternatives not listed here such as gradle, grunt, bower, etc.).

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 13. Aspects in JavaScript and Java ecosystem
TopicAspectJavaScriptJava

Programming

Language

TypeScript (extends JavaScript)

Java

Runtime

VM

nodejs (or web-browser)

jvm

Build- & Dependency-Management

Tool

npm or yarn

maven

Config

package.json

pom.xml

Repository

npm repo

maven central (repo search)

Build cmd

ng build or npm run build (goals are not standardized in npm)

mvn install (see lifecycle)

Test cmd

ng test

mvn test

Testing

Test-Tool

jasmine

junit

Test-Runner

karma

junit / surefire

E2E Testing

Protractor

Selenium

Code Analysis

Code Coverage

ng test --no-watch --code-coverage

JaCoCo

Development

IDE

MS VS Code or IntelliJ

Eclipse or IntelliJ

Framework

Angular (etc.)

Spring or Quarkus

+
+ +
+

==Service Layer

+
+
+

The service layer is responsible for exposing functionality made available by the logical layer to external consumers over a network via technical protocols.

+
+
+
+
Types of Services
+
+

Before you start creating your services you should consider some general design aspects:

+
+
+
    +
  • +

    Do you want to create a RPC service?

    +
  • +
  • +

    Or is your problem better addressed by messaging or eventing?

    +
  • +
  • +

    Who will consume your service?

    +
    +
      +
    • +

      Do you have one or multiple consumers?

      +
    • +
    • +

      Do web-browsers have to use your service?

      +
    • +
    • +

      Will apps from other vendors or parties have to consume your service that you can not influence if the service may have to change or be extended?

      +
    • +
    +
    +
  • +
+
+
+

For RPC a common choice is REST but there are also interesting alternatives like gRPC. We also have a guide for SOAP but this technology should rather be considered as legacy and is not recommended for new services.

+
+
+

When it comes to messaging in Java the typical answer will be JMS. However, a very promising alternative is Kafka.

+
+
+
+
Versioning
+
+

For RPC services consumed by other applications we use versioning to prevent incompatibilities between applications when deploying updates. This is done by the following conventions:

+
+
+
    +
  • +

    We define a version number and prefix it with v (e.g. v1).

    +
  • +
  • +

    If we support previous versions we use that version numbers as part of the Java package defining the service API (e.g. com.foo.application.component.service.api.v1)

    +
  • +
  • +

    We use the version number as part of the service name in the remote URL (e.g. https://application.foo.com/services/rest/component/v1/resource)

    +
  • +
  • +

    Whenever breaking changes are made to the API, create a separate version of the service and increment the version (e.g. v1v2) . The implementations of the different versions of the service contain compatibility code and delegate to the same unversioned use-case of the logic layer whenever possible.

    +
  • +
  • +

    For maintenance and simplicity, avoid keeping more than one previous version.

    +
  • +
+
+
+
+
Interoperability
+
+

For services that are consumed by clients with different technology, interoperability is required. This is addressed by selecting the right protocol, following protocol-specific best practices and following our considerations especially simplicity.

+
+
+
+
Service Considerations
+
+

The term service is quite generic and therefore easily misunderstood. It is a unit exposing coherent functionality via a well-defined interface over a network. For the design of a service, we consider the following aspects:

+
+
+
    +
  • +

    self-contained
    +The entire API of the service shall be self-contained and have no dependencies on other parts of the application (other services, implementations, etc.).

    +
  • +
  • +

    idempotence
    +E.g. creation of the same master-data entity has no effect (no error)

    +
  • +
  • +

    loosely coupled
    +Service consumers have minimum knowledge and dependencies on the service provider.

    +
  • +
  • +

    normalized
    +Complete, no redundancy, minimal

    +
  • +
  • +

    coarse-grained
    +Service provides rather large operations (save entire entity or set of entities rather than individual attributes)

    +
  • +
  • +

    atomic
    +Process individual entities (for processing large sets of data, use a batch instead of a service)

    +
  • +
  • +

    simplicity
    +Avoid polymorphism, RPC methods with unique name per signature and no overloading, avoid attachments (consider separate download service), etc.

    +
  • +
+
+
+
+
Security
+
+

Your services are the major entry point to your application. Hence, security considerations are important here.

+
+
+

See REST Security.

+
+ +
+

==Service-Versioning

+
+
+

This guide describes the aspect and details about versioning of services

+
+
+
+
Motivation
+
+

Why versioning of services? First of all, you should only care about this topic if you really have to. Service versioning is complex and requires effort (time and budget). The best way to avoid this is to be smart in the first place when designing the service API. +Further, if you are creating services where the only consumer is e.g. the web-client that you deploy together with the consumed services then you can change your service without the overhead to create new service versions and keeping old service versions for compatibility.

+
+
+

However, if the following indicators are given you typically need to do service versioning:

+
+
+
    +
  • +

    Your service is part of a complex and distributed IT landscape

    +
  • +
  • +

    Your service requires incompatible changes

    +
  • +
  • +

    There are many consumers or there is at least one (relevant) consumer that can not be updated at the same time or is entirely out of control (unknown or totally different party/company)

    +
  • +
+
+
+

What are incompatible changes?

+
+
+
    +
  • +

    Almost any change when SOAP is used (as it changes the WSDL and breaks the contract). Therefore, we recommend to use REST instead. Then, only the following changes are critical.

    +
  • +
  • +

    A change where existing properties (attributes) have to change their name

    +
  • +
  • +

    A change where existing features (properties, operations, etc.) have to change their semantics (meaning)

    +
  • +
+
+
+

What changes do not cause incompatibilities?

+
+
+
    +
  • +

    Adding new service operations is entirely uncritical with REST.

    +
  • +
  • +

    Adding new properties is only a problem in the following cases:

    +
    +
      +
    • +

      Adding new mandatory properties to the input of a service is causing incompatibilities. This problem can be avoided by contract-design.

      +
    • +
    • +

      If a consumer is using a service to read data, modify it and then save it back via a service and a property is added to the data, then this property might be lost. This is not a problem with dynamic languages such as JavaScript/TypeScript but with strictly typed languages such as Java. In Java you will typically use structured typed transfer-objects (and not Map<String, Object>) so new properties that have been added but are not known to the consumer can not be mapped to the transfer-object and will be lost. When saving that transfer-object later the property will be gone. It might be impossible to determine the difference between a lost property and a property that was removed on purpose. This is a general problem that you need to be aware of and that you have to consider by your design in such situations.

      +
    • +
    +
    +
  • +
+
+
+

Even if you hit an indicator for incompatible changes you can still think about adding a new service operation instead of changing an existing one (and deprecating the old one). Be creative to simplify and avoid extra effort.

+
+
+
+
Procedure
+
+

The procedure when rolling out incompatible changes is illustrated by the following example:

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +---+----+
+        |
++-------+--------+
+|      Sv1       |
+|                |
+|      App3      |
++----------------+
+
+
+
+

So, here we see a simple example where App3 provides a Service S in Version v1 that is consumed both by App1 and App2.

+
+
+

Now for some reason the service S has to be changed in an incompatible way to make it future-proof for demands. However, upgrading all 3 applications at the same time is not possible in this case for whatever reason. Therefore, service versioning is applied for the changes of S.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+    |
++---+------------+
+|  Sv1  |  Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Now, App3 has been upgraded and the new release was deployed. A new version v2 of S has been added while v1 is still kept for compatibility reasons and that version is still used by App1 and App2.

+
+
+
+
+------+  +------+
+| App1 |  | App2*|
++---+--+  +--+---+
+    |        |
+    |        |
+    |        |
++---+--------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, App2 has been updated and deployed and it is using the new version v2 of S.

+
+
+
+
+------+  +------+
+| App1*|  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, also App1 has been updated and deployed and it is using the new version v2 of S. The version v1 of S is not used anymore. This can be verified via logging and monitoring.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|          Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Finally, version v1 of the service S was removed from App3 and the new release has been deployed.

+
+
+
+
Versioning Schema
+
+

In general anything can be used to differentiate versions of a service. Possibilities are:

+
+
+
    +
  • +

    Code names (e.g. Strawberry, Blueberry, Grapefruit)

    +
  • +
  • +

    Timestamps (YYYYMMDD-HHmmSS)

    +
  • +
  • +

    Sequential version numbers (e.g. v1, v2, v3)

    +
  • +
  • +

    Composed version numbers (e.g. 1.0.48-pre-alpha-3-20171231-235959-Strawberry)

    +
  • +
+
+
+

As we are following the KISS principle (see key principles) we propose to use sequential version numbers. These are short, clear, and easy while still allowing to see what version is after another one. Especially composed version numbers (even 1.1 vs. 2.0) lead to decisions and discussions that easily waste more time than adding value. It is still very easy to maintain an Excel sheet or release-notes document that is explaining the changes for each version (v1, v2, v3) of a particular service.

+
+
+

We suggest to always add the version schema to the service URL to be prepared for service versioning even if service versioning is not (yet) actively used. For simplicity it is explicitly stated that you may even do incompatible changes to the current version (typically v1) of your service if you can update the according consumers within the same deployment.

+
+
+
+
Practice
+
+

So assuming you know that you have to do service versioning, the question is how to do it practically in the code. +The approach for your devon4j project in case of code-first should be as described below:

+
+
+
    +
  • +

    Determine which types in the code need to be changed. It is likely to be the API and implementation of the according service but it may also impact transfer objects and potentially even datatypes.

    +
  • +
  • +

    Create new packages for all these concerned types containing the current version number (e.g. v1).

    +
  • +
  • +

    Copy all these types to that new packages.

    +
  • +
  • +

    Rename these copies so they carry the version number as suffix (e.g. V1).

    +
  • +
  • +

    Increase the version of the service in the unversioned package (e.g. from v1 to v2).

    +
  • +
  • +

    Now you have two versions of the same service (e.g. v1 and v2) but so far they behave exactly the same.

    +
  • +
  • +

    You start with your actual changes and modify the original files that have been copied before.

    +
  • +
  • +

    You will also ensure the links (import statements) of the copied types point to the copies with the version number

    +
  • +
  • +

    This will cause incompatibilities (and compile errors) in the copied service. Therefore, you need to fix that service implementation to map from the old API to the new API and behavior. In some cases, this may be easy (e.g. mapping x.y.z.v1.FooTo to x.y.z.FooTo using bean-mapping with some custom mapping for the incompatible changes), in other cases this can get very complex. Be aware of this complexity from the start before you make your decision about service versioning.

    +
  • +
  • +

    As far as possible this mapping should be done in the service-layer, not to pollute your business code in the core-layer with versioning-aspects. If there is no way to handle it in the service layer, e.g. you need some data from the persistence-layer, implement the "mapping" in the core-layer then, but don’t forget to remove this code, when removing the old service version.

    +
  • +
  • +

    Finally, ensure that both the old service behaves as before as well as the new service works as planned.

    +
  • +
+
+
+
Modularization
+
+

For modularization, we also follow the KISS principle (see key principles): +we suggest to have one api module per application that will contain the most recent version of your service and get released with every release-version of the application. The compatibility code with the versioned packages will be added to the core module and therefore is not exposed via the api module (because it has already been exposed in the previous release of the app). This way, you can always determine for sure which version of a service is used by another application just by its maven dependencies.

+
+
+

The KISS approach with only a single module that may contain multiple services (e.g. one for each business component) will cause problems when you want to have mixed usages of service versions: You can not use an old version of one service and a new version of another service from the same APP as then you would need to have its API module twice as a dependency on different versions, which is not possible. However, to avoid complicated overhead we always suggest to follow this easy approach. Only if you come to the point that you really need this complexity you can still solve it (even afterwards by publishing another maven artefact). As we are all on our way to build more but smaller applications (SOA, microservices, etc.) we should always start simple and only add complexity when really needed.

+
+
+

The following example gives an idea of the structure:

+
+
+
+
/«my-app»
+├──/api
+|  └──/src/main/java/
+|     └──/«rootpackage»/«application»/«component»
+|        ├──/common/api/to
+|        |  └──FooTo
+|        └──/service/api/rest
+|           └──FooRestService
+└──/core
+   └──/src/main/java/
+      └──«rootpackage»/«application»/«component»
+         ├──/common/api/to/v1
+         |  └──FooToV1
+         └──/service
+            ├──/api/rest/v1
+            |  └──FooRestServiceV1
+            └──impl/rest
+               ├──/v1
+               |  └── FooRestServiceImplV1
+               └──FooRestServiceImpl
+
+
+
+ +
+

==Logic Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. +According to our business architecture, we divide an application into components. +For each component, the logic layer defines different use-cases. Another approach is to define a component-facade, which we do not recommend for future application. Especially for quarkus application, we want to simplify things and highly suggest omitting component-facade completely and using use-cases only. +It is very important that you follow the links to understand the concept of use-case in order to properly implement your business logic.

+
+
+
+
+
Responsibility
+
+

The logic layer is responsible to implement the business logic according to the specified functional demands and requirements. +Therefore, it creates the actual value of the application. The logic layer is responsible for invoking business logic in external systems. +The following additional aspects are also included in its responsibility:

+
+
+ +
+
+
+
Security
+
+

The logic layer is the heart of the application. It is also responsible for authorization and hence security is important in this current case. Every method exposed in an interface needs to be annotated with an authorization check, stating what role(s) a caller must provide in order to be allowed to make the call. The authorization concept is described here.

+
+
+
Direct Object References
+
+

A security threat are Insecure Direct Object References. This simply gives you two options:

+
+
+
    +
  • +

    avoid direct object references

    +
  • +
  • +

    ensure that direct object references are secure

    +
  • +
+
+
+

Especially when using REST, direct object references via technical IDs are common sense. This implies that you have a proper authorization in place. This is especially tricky when your authorization does not only rely on the type of the data and according to static permissions but also on the data itself. Vulnerabilities for this threat can easily happen by design flaws and inadvertence. Here is an example from our sample application:

+
+
+

We have a generic use-case to manage BLOBs. In the first place, it makes sense to write a generic REST service to load and save these BLOBs. However, the permission to read or even update such BLOB depends on the business object hosting the BLOB. Therefore, such a generic REST service would open the door for this OWASP A4 vulnerability. To solve this in a secure way, you need individual services for each hosting business object to manage the linked BLOB and have to check permissions based on the parent business object. In this example the ID of the BLOB would be the direct object reference and the ID of the business object (and a BLOB property indicator) would be the indirect object reference.

+
+ +
+

==Component Facade

+
+
+ + + + + +
+ + +Our recommended approach for implementing the logic layer is use-cases +
+
+
+

For each component of the application, the logic layer defines a component facade. +This is an interface defining all business operations of the component. +It carries the name of the component («Component») and has an implementation named «Component»Impl (see implementation).

+
+
+
+
API
+
+

The component facade interface defines the logic API of the component and has to be business oriented. +This means that all parameters and return types of all methods from this API have to be business transfer-objects, datatypes (String, Integer, MyCustomerNumber, etc.), or collections of these. +The API may also only access objects of other business components listed in the (transitive) dependencies of the business-architecture.

+
+
+

Here is an example how such an API may look like:

+
+
+
+
public interface Bookingmanagement {
+
+  BookingEto findBooking(Long id);
+
+  BookingCto findBookingCto(Long id);
+
+  Page<BookingEto> findBookingEtos(BookingSearchCriteriaTo criteria);
+
+  void approveBooking(BookingEto booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation of an interface from the logic layer (a component facade or a use-case) carries the name of that interface with the suffix Impl and is annotated with @Named. +An implementation typically needs access to the persistent data. +This is done by injecting the corresponding repository (or DAO). +According to data-sovereignty, only repositories of the same business component may be accessed directly. +For accessing data from other components the implementation has to use the corresponding API of the logic layer (the component facade). Further, it shall not expose persistent entities from the domain layer and has to map them to transfer objects using the bean-mapper.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public BookingEto findBooking(Long id) {
+
+    LOG.debug("Get Booking with id {} from database.", id);
+    BookingEntity entity = this.bookingRepository.findOne(id);
+    return getBeanMapper().map(entity, BookingEto.class));
+  }
+}
+
+
+
+

As you can see, entities (BookingEntity) are mapped to corresponding ETOs (BookingEto). +Further details about this can be found in bean-mapping.

+
+ +
+

==UseCase +A use-case is a small unit of the logic layer responsible for an operation on a particular entity (business object). +We leave it up to you to decide whether you want to define an interface (API) for each use-case or provide an implementation directly.

+
+
+

Following our architecture-mapping (for classic and modern project), use-cases are named Uc«Operation»«BusinessObject»[Impl]. The prefix Uc stands for use-case and allows to easily find and identify them in your IDE. The «Operation» stands for a verb that is operated on the entity identified by «BusinessObject». +For CRUD we use the standard operations Find and Manage that can be generated by CobiGen. This also separates read and write operations (e.g. if you want to do CQSR, or to configure read-only transactions for read operations).

+
+
+

In our example, we choose to define an interface for each use-case. We also use *To to refer to any type of transfer object. Please follow our guide to understand more about different types of transfer object e.g. Eto, Dto, Cto

+
+
+
+
Find
+
+

The UcFind«BusinessObject» defines all read operations to retrieve and search the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcFindBooking {
+  //*To = Eto, Dto or Cto
+  Booking*To findBooking(Long id);
+}
+
+
+
+
+
Manage
+
+

The UcManage«BusinessObject» defines all CRUD write operations (create, update and delete) for the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcManageBooking {
+
+  //*To = Eto, Dto or Cto
+  Booking*To saveBooking(Booking*To booking);
+
+  void deleteBooking(Long id);
+
+}
+
+
+
+
+
Custom
+
+

Any other non CRUD operation Uc«Operation»«BusinessObject» uses any other custom verb for «Operation». +Typically, such custom use-cases only define a single method. +Here is an example:

+
+
+
+
public interface UcApproveBooking {
+
+  //*To = Eto, Dto or Cto
+  void approveBooking(Booking*To booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation should carry its own name and the suffix Impl and is annotated with @Named and @ApplicationScoped. It will need access to the persistent data which is done by injecting the corresponding repository (or DAO). Furthermore, it shall not expose persistent entities from the data access layer and has to map them to transfer objects using the bean-mapper. Please refer to our bean mapping, transfer object and dependency injection documentation for more information. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcManageBookingImpl implements UcManageBooking {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public void deleteBooking(Long id) {
+
+    LOG.debug("Delete Booking with id {} from database.", id);
+    this.bookingRepository.deleteById(id);
+  }
+}
+
+
+
+

The use-cases can then be injected directly into the service.

+
+
+
+
@Named("BookingmanagementRestService")
+@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private UcFindBooking ucFindBooking;
+
+  @Inject
+  private UcManageBooking ucManageBooking;
+
+  @Inject
+  private UcApproveBooking ucApproveBooking;
+}
+
+
+
+
+
Internal use case
+
+

Sometimes, a component with multiple related entities and many use-cases needs to reuse business logic internally. +Of course, this can be exposed as an official use-case API but this will imply using transfer-objects (ETOs) instead of entities. In some cases, this is undesired e.g. for better performance to prevent unnecessary mapping of entire collections of entities. +In the first place, you should try to use abstract base implementations providing reusable methods the actual use-case implementations can inherit from. +If your business logic is even more complex and you have multiple aspects of business logic to share and reuse but also run into multi-inheritance issues, you may also just create use-cases that have their interface located in the impl scope package right next to the implementation (or you may just skip the interface). In such a case, you may define methods that directly take or return entity objects. +To avoid confusion with regular use-cases, we recommend to add the Internal suffix to the type name leading to Uc«Operation»«BusinessObject»Internal[Impl].

+
+
+ +
+

==Data-Access Layer

+
+
+

The data-access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store. External system could also be accessed from the data-access layer if they match this definition, e.g. a mongo-db via rest services.

+
+
+

Note: In the modern project structure, this layer is replaced by the domain layer.

+
+
+
+
+
Database
+
+

You need to make your choice for a database. Options are documented here.

+
+
+

The classical approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+ +
+

==Batch Layer

+
+
+

We understand batch processing as a bulk-oriented, non-interactive, typically long running execution of tasks. For simplicity, we use the term "batch" or "batch job" for such tasks in the following documentation.

+
+
+

devonfw uses Spring Batch as a batch framework.

+
+
+

This guide explains how Spring Batch is used in devonfw applications. It focuses on aspects which are special to devonfw. If you want to learn about spring-batch you should adhere to springs references documentation.

+
+
+

There is an example of a simple batch implementation in the my-thai-star batch module.

+
+
+

In this chapter, we will describe the overall architecture (especially concerning layering) and how to administer batches.

+
+
+
+
Layering
+
+

Batches are implemented in the batch layer. The batch layer is responsible for batch processes, whereas the business logic is implemented in the logic layer. Compared to the service layer, you may understand the batch layer just as a different way of accessing the business logic. +From a component point of view, each batch is implemented as a subcomponent in the corresponding business component. +The business component is defined by the business architecture.

+
+
+

Let’s make an example for that. The sample application implements a batch for exporting ingredients. This ingredientExportJob belongs to the dishmanagement business component. +So the ingredientExportJob is implemented in the following package:

+
+
+
+
<basepackage>.dishmanagement.batch.impl.*
+
+
+
+

Batches should invoke use cases in the logic layer for doing their work. +Only "batch specific" technical aspects should be implemented in the batch layer.

+
+
+
+
+

Example: +For a batch, which imports product data from a CSV file, this means that all code for actually reading and parsing the CSV input file is implemented in the batch layer. +The batch calls the use case "create product" in the logic layer for actually creating the products for each line read from the CSV input file.

+
+
+
+
+
Directly accessing data access layer
+
+

In practice, it is not always appropriate to create use cases for every bit of work a batch should do. Instead, the data access layer can be used directly. +An example for that is a typical batch for data retention which deletes out-of-time data. +Often deleting, out-dated data is done by invoking a single SQL statement. It is appropriate to implement that SQL in a Repository or DAO method and call this method directly from the batch. +But be careful: this pattern is a simplification which could lead to business logic cluttered in different layers, which reduces the maintainability of your application. +It is a typical design decision you have to make when designing your specific batches.

+
+
+
+
+
Project structure and packaging
+
+

Batches will be implemented in a separate Maven module to keep the application core free of batch dependencies. The batch module includes a dependency on the application core-module to allow the reuse of the use cases, DAOs etc. +Additionally the batch module has dependencies on the required spring batch jars:

+
+
+
+
  <dependencies>
+
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>mtsj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-batch</artifactId>
+    </dependency>
+
+  </dependencies>
+
+
+
+

To allow an easy start of the batches from the command line it is advised to create a bootified jar for the batch module by adding the following to the pom.xml of the batch module:

+
+
+
+
  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>config/application.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Create bootified jar for batch execution via command line.
+           Your applications spring boot app is used as main-class.
+       -->
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        <configuration>
+          <mainClass>com.devonfw.application.mtsj.SpringBootApp</mainClass>
+          <classifier>bootified</classifier>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>repackage</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+
+
+
Implementation
+
+

Most of the details about implementation of batches is described in the spring batch documentation. +There is nothing special about implementing batches in devonfw. You will find an easy example in my-thai-star.

+
+
+
+
Starting from command line
+
+

Devonfw advises to start batches via command line. This is most common to many ops teams and allows easy integration in existing schedulers. In general batches are started with the following command:

+
+
+
+
java -jar <app>-batch-<version>-bootified.jar --spring.main.web-application-type=none --spring.batch.job.enabled=true --spring.batch.job.names=<myJob> <params>
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + +
ParameterExplanation

--spring.main.web-application-type=none

This disables the web app (e.g. Tomcat)

--spring.batch.job.names=<myJob>

This specifies the name of the job to run. If you leave this out ALL jobs will be executed. Which probably does not make to much sense.

<params>

(Optional) additional parameters which are passed to your job

+
+

This will launch your normal spring boot app, disables the web application part and runs the designated job via Spring Boots org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.

+
+
+
+
Scheduling
+
+

In real world scheduling of batches is not as simple as it first might look like.

+
+
+
    +
  • +

    Multiple batches have to be executed in order to achieve complex tasks. If one of those batches fails the further execution has to be stopped and operations should be notified for example.

    +
  • +
  • +

    Input files or those created by batches have to be copied from one node to another.

    +
  • +
  • +

    Scheduling batch executing could get complex easily (quarterly jobs, run job on first workday of a month, …​)

    +
  • +
+
+
+

For devonfw we propose the batches themselves should not mess around with details of scheduling. +Likewise your application should not do so. This complexity should be externalized to a dedicated batch administration service or scheduler. +This service could be a complex product or a simple tool like cron. We propose Rundeck as an open source job scheduler.

+
+
+

This gives full control to operations to choose the solution which fits best into existing administration procedures.

+
+
+
+
Handling restarts
+
+

If you start a job with the same parameters set after a failed run (BatchStatus.FAILED) a restart will occur. +In many cases your batch should then not reprocess all items it processed in the previous runs. +For that you need some logic to start at the desired offset. There different ways to implement such logic:

+
+
+
    +
  • +

    Marking processed items in the database in a dedicated column

    +
  • +
  • +

    Write all IDs of items to process in a separate table as an initialization step of your batch. You can then delete IDs of already processed items from that table during the batch execution.

    +
  • +
  • +

    Storing restart information in springs ExecutionContext (see below)

    +
  • +
+
+
+
Using spring batch ExecutionContext for restarts
+
+

By implementing the ItemStream interface in your ItemReader or ItemWriter you may store information about the batch progress in the ExecutionContext. You will find an example for that in the CountJob in My Thai Star.

+
+
+

Additional hint: It is important that bean definition method of your ItemReader/ItemWriter return types implementing ItemStream(and not just ItemReader or ItemWriter alone). For that the ItemStreamReader and ItemStreamWriter interfaces are provided.

+
+
+
+
+
Exit codes
+
+

Your batches should create a meaningful exit code to allow reaction to batch errors e.g. in a scheduler. +For that spring batch automatically registers an org.springframework.boot.autoconfigure.batch.JobExecutionExitCodeGenerator. To make this mechanism work your spring boot app main class as to populate this exit code to the JVM:

+
+
+
+
@SpringBootApplication
+public class SpringBootApp {
+
+  public static void main(String[] args) {
+    if (Arrays.stream(args).anyMatch((String e) -> e.contains("--spring.batch.job.names"))) {
+      // if executing batch job, explicitly exit jvm to report error code from batch
+      System.exit(SpringApplication.exit(SpringApplication.run(SpringBootApp.class, args)));
+    } else {
+      // normal web application start
+      SpringApplication.run(SpringBootApp.class, args);
+    }
+  }
+}
+
+
+
+
+
Stop batches and manage batch status
+
+

Spring batch uses several database tables to store the status of batch executions. +Each execution may have different status. +You may use this mechanism to gracefully stop batches. +Additionally in some edge cases (batch process crashed) the execution status may be in an undesired state. +E.g. the state will be running, despite the process crashed sometime ago. +For that cases you have to change the status of the execution in the database.

+
+
+
CLI-Tool
+
+

Devonfw provides a easy to use cli-tool to manage the executing status of your jobs. +The tool is implemented in the devonfw module devon4j-batch-tool. It will provide a runnable jar, which may be used as follows:

+
+
+
+
List names of all previous executed jobs
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs list

+
+
Stop job named 'countJob'
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs stop countJob

+
+
Show help
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar

+
+
+
+
+

As you can the each invocation includes the JDBC connection string to your database. +This means that you have to make sure that the corresponding DB driver is in the classpath (the prepared jar only contains H2).

+
+
+
+
+
Authentication
+
+

Most business application incorporate authentication and authorization. +Your spring boot application will implement some kind of security, e.g. integrated login with username+password or in many cases authentication via an existing IAM. +For security reasons your batch should also implement an authentication mechanism and obey the authorization implemented in your application (e.g. via @RolesAllowed).

+
+
+

Since there are many different authentication mechanism we cannot provide an out-of-the-box solution in devonfw, but we describe a pattern how this can be implemented in devonfw batches.

+
+
+

We suggest to implement the authentication in a Spring Batch tasklet, which runs as the first step in your batch. This tasklet will do all of the work which is required to authenticate the batch. A simple example which authenticates the batch "locally" via username and password could be implemented like this:

+
+
+
+
@Named
+public class SimpleAuthenticationTasklet implements Tasklet {
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+    String username = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("username");
+    String password = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("password");
+    Authentication authentication = new UsernamePasswordAuthenticationToken(username, password);
+
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+    return RepeatStatus.FINISHED;
+  }
+
+}
+
+
+
+

The username and password have to be supplied via two cli parameters -username and -password. This implementation creates an "authenticated" Authentication and sets in the Spring Security context. This is just for demonstration normally you should not provide passwords via command line. The actual authentication will be done automatically via Spring Security as in your "normal" application. +If you have a more complex authentication mechanism in your application e.g. via OpenID connect just call this in the tasklet. Naturally you may read authentication parameters (e.g. secrets) from the command line or more securely from a configuration file.

+
+
+

In your Job Configuration set this tasklet as the first step:

+
+
+
+
@Configuration
+@EnableBatchProcessing
+public class BookingsExportBatchConfig {
+  @Inject
+  private JobBuilderFactory jobBuilderFactory;
+
+  @Inject
+  private StepBuilderFactory stepBuilderFactory;
+
+  @Bean
+  public Job myBatchJob() {
+    return this.jobBuilderFactory.get("myJob").start(myAuthenticationStep()).next(...).build();
+  }
+
+  @Bean
+  public Step myAuthenticationStep() {
+    return this.stepBuilderFactory.get("myAuthenticationStep").tasklet(myAuthenticatonTasklet()).build();
+  }
+
+  @Bean
+  public Tasklet myAuthenticatonTasklet() {
+    return new SimpleAuthenticationTasklet();
+  }
+...
+
+
+
+
+
Tipps & tricks
+
+
Identifying job parameters
+
+

Spring uses a jobs parameters to identify job executions. Parameters starting with "-" are not considered for identifying a job execution.

+
+
+
+
+
+
+

1.77. Guides

+ +
+

==Configuration

+
+
+
Internal Application Configuration
+
+

There usually is a main configuration registered with main Spring Boot App, but differing configurations to support automated test of the application can be defined using profiles (not detailed in this guide).

+
+
+
Spring Boot Application
+
+

For a complete documentation, see the Spring Boot Reference Guide.

+
+
+

With spring-boot you provide a simple main class (also called starter class) like this: +com.devonfw.mtsj.application

+
+
+
+
@SpringBootApplication(exclude = { EndpointAutoConfiguration.class })
+@EntityScan(basePackages = { "com.devonfw.mtsj.application" }, basePackageClasses = { AdvancedRevisionEntity.class })
+@EnableGlobalMethodSecurity(jsr250Enabled = true)
+@ComponentScan(basePackages = { "com.devonfw.mtsj.application.general", "com.devonfw.mtsj.application" })
+public class SpringBootApp {
+
+  /**
+   * Entry point for spring-boot based app
+   *
+   * @param args - arguments
+   */
+  public static void main(String[] args) {
+
+    SpringApplication.run(SpringBootApp.class, args);
+  }
+}
+
+
+
+

In an devonfw application this main class is always located in the <basepackage> of the application package namespace (see package-conventions). This is because a spring boot application will automatically do a classpath scan for components (spring-beans) and entities in the package where the application main class is located including all sub-packages. You can use the @ComponentScan and @EntityScan annotations to customize this behaviour.

+
+
+

If you want to map spring configuration properties into your custom code please see configuration mapping.

+
+
+
+
Standard beans configuration
+
+

For basic bean configuration we rely on spring boot using mainly configuration classes and only occasionally XML configuration files. Some key principle to understand Spring Boot auto-configuration features:

+
+
+
    +
  • +

    Spring Boot auto-configuration attempts to automatically configure your Spring application based on the jar dependencies and annotated components found in your source code.

    +
  • +
  • +

    Auto-configuration is non-invasive, at any point you can start to define your own configuration to replace specific parts of the auto-configuration by redefining your identically named bean (see also exclude attribute of @SpringBootApplication in example code above).

    +
  • +
+
+
+

Beans are configured via annotations in your java code (see dependency-injection).

+
+
+

For technical configuration you will typically write additional spring config classes annotated with @Configuration that provide bean implementations via methods annotated with @Bean. See spring @Bean documentation for further details. Like in XML you can also use @Import to make a @Configuration class include other configurations.

+
+
+

More specific configuration files (as required) reside in an adequately named subfolder of:

+
+
+

src/main/resources/app

+
+
+
+
BeanMapper Configuration
+
+

In case you are still using dozer, you will find further details in bean-mapper configuration.

+
+
+
+
Security configuration
+
+

The abstract base class BaseWebSecurityConfig should be extended to configure web application security thoroughly. +A basic and secure configuration is provided which can be overridden or extended by subclasses. +Subclasses must use the @Profile annotation to further discriminate between beans used in production and testing scenarios. See the following example:

+
+
+
Listing 14. How to extend BaseWebSecurityConfig for Production and Test
+
+
@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.JUNIT)
+public class TestWebSecurityConfig extends BaseWebSecurityConfig {...}
+
+@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.NOT_JUNIT)
+public class WebSecurityConfig extends BaseWebSecurityConfig {...}
+
+
+ +
+
+
WebSocket configuration
+
+

A websocket endpoint is configured within the business package as a Spring configuration class. The annotation @EnableWebSocketMessageBroker makes Spring Boot registering this endpoint.

+
+
+
+
package your.path.to.the.websocket.config;
+...
+@Configuration
+@EnableWebSocketMessageBroker
+public class WebSocketConfig extends AbstractWebSocketMessageBrokerConfigurer {
+...
+
+
+
+
+
+
External Application Configuration
+
+
application.properties files
+
+

Here is a list of common properties provided by the Spring framework.

+
+
+

For a general understanding how spring-boot is loading and boostrapping your application.properties see spring-boot external configuration.

+
+
+

The following properties files are used in devonfw application:

+
+
+
    +
  • +

    src/main/resources/application.properties providing a default configuration - bundled and deployed with the application package. It further acts as a template to derive a tailored minimal environment-specific configuration.

    +
  • +
  • +

    src/main/resources/config/application.properties providing additional properties only used at development time (for all local deployment scenarios). This property file is excluded from all packaging.

    +
  • +
  • +

    src/test/resources/config/application.properties providing additional properties only used for testing (JUnits based on spring test).

    +
  • +
+
+
+

For other environments where the software gets deployed such as test, acceptance and production you need to provide a tailored copy of application.properties. The location depends on the deployment strategy:

+
+
+
    +
  • +

    standalone run-able Spring Boot App using embedded tomcat: config/application.properties under the installation directory of the spring boot application.

    +
  • +
  • +

    dedicated tomcat (one tomcat per app): $CATALINA_BASE/lib/config/application.properties

    +
  • +
  • +

    tomcat serving a number of apps (requires expanding the wars): $CATALINA_BASE/webapps/<app>/WEB-INF/classes/config

    +
  • +
+
+
+

In this application.properties you only define the minimum properties that are environment specific and inherit everything else from the bundled src/main/resources/application.properties. In any case, make very sure that the classloader will find the file.

+
+
+
+
Database Configuration
+
+

The configuration for spring and Hibernate is already provided by devonfw in our sample application and the application template. So you only need to worry about a few things to customize.

+
+
+Database System and Access +
+

Obviously you need to configure which type of database you want to use as well as the location and credentials to access it. The defaults are configured in application.properties that is bundled and deployed with the release of the software. The files should therefore contain the properties as in the given example:

+
+
+
+
  database.url=jdbc:postgresql://database.enterprise.com/app
+  database.user.login=appuser01
+  database.user.password=************
+  database.hibernate.dialect = org.hibernate.dialect.PostgreSQLDialect
+  database.hibernate.hbm2ddl.auto=validate
+
+
+
+

For further details about database.hibernate.hbm2ddl.auto please see here. For production and acceptance environments we use the value validate that should be set as default. In case you want to use Oracle RDBMS you can find additional hints here.

+
+
+

If your application supports multiples database types, set spring.profiles.active=XXX in src/main/resources/config/application.properties choose database of your choice. Also, one has to set all the active spring profiles in this application.properties and not in any of the other application.properties.

+
+
+
+Database Logging +
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
spring.jpa.properties.hibernate.show_sql=true
+spring.jpa.properties.hibernate.use_sql_comments=true
+spring.jpa.properties.hibernate.format_sql=true
+
+
+
+
+
+
+
Security
+
+
Password Encryption
+
+

In order to support encrypted passwords in spring-boot application.properties all you need to do is to add jasypt-spring-boot as dependency in your pom.xml (please check for recent version here):

+
+
+
+
<dependency>
+  <groupId>com.github.ulisesbocchio</groupId>
+  <artifactId>jasypt-spring-boot-starter</artifactId>
+  <version>3.0.3</version>
+</dependency>
+
+
+
+

This will smoothly integrate jasypt into your spring-boot application. Read this HOWTO to learn how to encrypt and decrypt passwords using jasypt.

+
+
+

Next, we give a simple example how to encypt and configure a secret value. +We use the algorithm PBEWITHHMACSHA512ANDAES_256 that provides strong encryption and is the default of jasypt-spring-boot-starter. +However, different algorithms can be used if perferred (e.g. PBEWITHMD5ANDTRIPLEDES).

+
+
+
+
java -cp ${M2_REPO}/org/jasypt/jasypt/1.9.3/jasypt-1.9.3.jar org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI password=masterpassword algorithm=PBEWITHHMACSHA512ANDAES_256 input=secret ivGeneratorClassName=org.jasypt.iv.RandomIvGenerator
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.5+10
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: masterpassword
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC
+
+
+
+

Of course the master-password (masterpassword) and the actual password to encrypt (secret) are just examples. +Please replace them with reasonable strong passwords for your environment. +Further, if you are using devonfw-ide you can make your life much easier and just type:

+
+
+
+
devon jasypt encrypt
+
+
+
+

See jasypt commandlet for details.

+
+
+

Now the entire line after the OUTPUT block is your encrypted secret. +It even contains some random salt so that multiple encryption invocations with the same parameters (ARGUMENTS) will produce a different OUTPUT.

+
+
+

The master-password can be configured on your target environment via the property jasypt.encryptor.password. As system properties given on the command-line are visible in the process list, we recommend to use an config/application.yml file only for this purpose (as we recommended to use application.properties for regular configs):

+
+
+
+
jasypt:
+    encryptor:
+        password: masterpassword
+
+
+
+

Again masterpassword is just an example that your replace with your actual master password. +Now you are able to put encrypted passwords into your application.properties and specify the algorithm.

+
+
+
+
spring.datasource.password=ENC(PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC)
+jasypt.encryptor.algorithm=PBEWITHHMACSHA512ANDAES_256
+
+
+
+

This application.properties file can be version controlled (git-opts) and without knowing the masterpassword nobody is able to decrypt this to get the actual secret back.

+
+
+

To prevent jasypt to throw an exception in dev or test scenarios you can simply put this in your local config (src/main/config/application.properties and same for test, see above for details):

+
+
+
+
jasypt.encryptor.password=none
+
+
+ +
+

==Mapping configuration to your code

+
+
+

If you are using spring-boot as suggested by devon4j your application can be configured by application.properties file as described in configuration. +To get a single configuration option into your code for flexibility, you can use

+
+
+
+
@Value("${my.property.name}")
+private String myConfigurableField;
+
+
+
+

Now, in your application.properties you can add the property:

+
+
+
+
my.property.name=my-property-value
+
+
+
+

You may even use @Value("${my.property.name:my-default-value}") to make the property optional.

+
+
+
+
Naming conventions for configuration properties
+
+

As a best practice your configruation properties should follow these naming conventions:

+
+
+
    +
  • +

    build the property-name as a path of segments separated by the dot character (.)

    +
  • +
  • +

    segments should get more specific from left to right

    +
  • +
  • +

    a property-name should either be a leaf value or a tree node (prefix of other property-names) but never both! So never have something like foo.bar=value and foo.bar.child=value2.

    +
  • +
  • +

    start with a segment namespace unique to your context or application

    +
  • +
  • +

    a good example would be «myapp».billing.service.email.sender for the sender address of billing service emails send by «myapp».

    +
  • +
+
+
+
+
Mapping advanced configuration
+
+

However, in many scenarios you will have features that require more than just one property. +Injecting those via @Value is not leading to good code quality. +Instead we create a class with the suffix ConfigProperties containing all configuration properties for our aspect that is annotated with @ConfigurationProperties:

+
+
+
+
@ConfigurationProperties(prefix = "myapp.billing.service")
+public class BillingServiceConfigProperties {
+
+  private final Email email = new Email();
+  private final Smtp smtp = new Smtp();
+
+  public Email getEmail() { return this.email; }
+  public Email getSmtp() { return this.smtp; }
+
+  public static class Email {
+
+    private String sender;
+    private String subject;
+
+    public String getSender() { return this.sender; }
+    public void setSender(String sender) { this.sender = sender; }
+    public String getSubject() { return this.subject; }
+    public void setSubject(String subject) { this.subject = subject; }
+  }
+
+  public static class Smtp {
+
+    private String host;
+    private int port = 25;
+
+    public String getHost() { return this.host; }
+    public void setHost(String host) { this.host = host; }
+    public int getPort() { return this.port; }
+    public void setPort(int port) { this.port = port; }
+  }
+
+}
+
+
+
+

Of course this is just an example to demonstrate this feature of spring-boot. +In order to send emails you would typically use the existing spring-email feature. +But as you can see this allows us to define and access our configuration in a very structured and comfortable way. +The annotation @ConfigurationProperties(prefix = "myapp.billing.service") will automatically map spring configuration properties starting with myapp.billing.service via the according getters and setters into our BillingServiceConfigProperties. +We can easily define defaults (e.g. 25 as default value for myapp.billing.service.smtp.port). +Also Email or Smtp could be top-level classes to be reused in multiple configurations. +Of course you would also add helpful JavaDoc comments to the getters and classes to document your configuration options. +Further to access this configuration, we can use standard dependency-injection:

+
+
+
+
@Inject
+private BillingServiceConfigProperties config;
+
+
+
+

For very generic cases you may also use Map<String, String> to map any kind of property in an untyped way. +An example for generic configuration from devon4j can be found in +ServiceConfigProperties.

+
+
+

For further details about this feature also consult Guide to @ConfigurationProperties in Spring Boot.

+
+
+
+
Generate configuration metadata
+
+

You should further add this dependency to your module containing the *ConfigProperties:

+
+
+
+
    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+
+
+
+

This will generate configuration metadata so projects using your code can benefit from autocompletion and getting your JavaDoc as tooltip when editing application.properites what makes this approach very powerful. +For further details about this please read A Guide to Spring Boot Configuration Metadata.

+
+
+ +
+

==Auditing

+
+
+

For database auditing we use hibernate envers. If you want to use auditing ensure you have the following dependency in your pom.xml:

+
+
+
Listing 15. spring
+
+
<dependency>
+  <groupId>com.devonfw.java.modules</groupId>
+  <artifactId>devon4j-jpa-envers</artifactId>
+</dependency>
+
+
+
+
Listing 16. quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-envers</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +The following part applies only to spring applications. At this point, the Quarkus extension does not provide any additional configurations. For Quarkus applications, simply use the @Audited annotation to enable auditing for an entity class, as described a few lines below or seen here. +
+
+
+

Make sure that entity manager also scans the package from the devon4j-jpa[-envers] module in order to work properly. And make sure that correct Repository Factory Bean Class is chosen.

+
+
+
+
@EntityScan(basePackages = { "«my.base.package»" }, basePackageClasses = { AdvancedRevisionEntity.class })
+...
+@EnableJpaRepositories(repositoryFactoryBeanClass = GenericRevisionedRepositoryFactoryBean.class)
+...
+public class SpringBootApp {
+  ...
+}
+
+
+
+

Now let your [Entity]Repository extend from DefaultRevisionedRepository instead of DefaultRepository.

+
+
+

The repository now has a method getRevisionHistoryMetadata(id) and getRevisionHistoryMetadata(id, boolean lazy) available to get a list of revisions for a given entity and a method find(id, revision) to load a specific revision of an entity with the given ID or getLastRevisionHistoryMetadata(id) to load last revision. +To enable auditing for a entity simply place the @Audited annotation to your entity and all entity classes it extends from.

+
+
+
+
@Entity(name = "Drink")
+@Audited
+public class DrinkEntity extends ProductEntity implements Drink {
+...
+
+
+
+

When auditing is enabled for an entity an additional database table is used to store all changes to the entity table and a corresponding revision number. This table is called <ENTITY_NAME>_AUD per default. Another table called REVINFO is used to store all revisions. Make sure that these tables are available. They can be generated by hibernate with the following property (only for development environments).

+
+
+
+
  database.hibernate.hbm2ddl.auto=create
+
+
+
+

Another possibility is to put them in your database migration scripts like so.

+
+
+
+
CREATE CACHED TABLE PUBLIC.REVINFO(
+  id BIGINT NOT NULL generated by default as identity (start with 1),
+  timestamp BIGINT NOT NULL,
+  user VARCHAR(255)
+);
+...
+CREATE CACHED TABLE PUBLIC.<TABLE_NAME>_AUD(
+    <ALL_TABLE_ATTRIBUTES>,
+    revtype TINYINT,
+    rev BIGINT NOT NULL
+);
+
+
+
+ +
+

==Access-Control +Access-Control is a central and important aspect of Security. It consists of two major aspects:

+
+
+ +
+
+
+
+
Authentication
+
+

Definition:

+
+
+
+
+

Authentication is the verification that somebody interacting with the system is the actual subject for whom he claims to be.

+
+
+
+
+

The one authenticated is properly called subject or principal. There are two forms of principals you need to distinguish while designing your authentication: human users and autonomous systems. While e.g. a Kerberos/SPNEGO Single-Sign-On makes sense for human users, it is pointless for authenticating autonomous systems. For simplicity, we use the common term user to refer to any principal even though it may not be a human (e.g. in case of a service call from an external system).

+
+
+

To prove the authenticity, the user provides some secret called credentials. The most simple form of credentials is a password.

+
+
+
Implementations
+
+ + + + + +
+ + +Please never implement your own authentication mechanism or credential store. You have to be aware of implicit demands such as salting and hashing credentials, password life-cycle with recovery, expiry, and renewal including email notification confirmation tokens, central password policies, etc. This is the domain of access managers and identity management systems. In a business context you will typically already find a system for this purpose that you have to integrate (e.g. via LDAP). Otherwise you should consider establishing such a system e.g. using keycloak. +
+
+
+

We recommend using JWT when possible. For KISS, also try to avoid combining multiple authentication mechanisms (form based, basic-auth, SAMLv2, OAuth, etc.) within the same application (for different URLs).

+
+
+

For spring, check the Spring Security

+
+
+

For quarkus, check the Quarkus Authentication

+
+
+
+
+
Authorization
+
+

Definition:

+
+
+
+
+

Authorization is the verification that an authenticated user is allowed to perform the operation he intends to invoke.

+
+
+
+
+
Clarification of terms
+
+

For clarification we also want to give a common understanding of related terms that have no unique definition and consistent usage in the wild.

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 14. Security terms related to authorization
TermMeaning and comment

Permission

A permission is an object that allows a principal to perform an operation in the system. This permission can be granted (give) or revoked (taken away). Sometimes people also use the term right what is actually wrong as a right (such as the right to be free) can not be revoked.

Group

We use the term group in this context for an object that contains permissions. A group may also contain other groups. Then the group represents the set of all recursively contained permissions.

Role

We consider a role as a specific form of group that also contains permissions. A role identifies a specific function of a principal. A user can act in a role.

+

For simple scenarios a principal has a single role associated. In more complex situations a principal can have multiple roles but has only one active role at a time that he can choose out of his assigned roles. For KISS it is sometimes sufficient to avoid this by creating multiple accounts for the few users with multiple roles. Otherwise at least avoid switching roles at run-time in clients as this may cause problems with related states. Simply restart the client with the new role as parameter in case the user wants to switch his role.

Access Control

Any permission, group, role, etc., which declares a control for access management.

+
+
+
Suggestions on the access model
+
+

For the access model we give the following suggestions:

+
+
+
    +
  • +

    Each Access Control (permission, group, role, …​) is uniquely identified by a human readable string.

    +
  • +
  • +

    We create a unique permission for each use-case.

    +
  • +
  • +

    We define groups that combine permissions to typical and useful sets for the users.

    +
  • +
  • +

    We define roles as specific groups as required by our business demands.

    +
  • +
  • +

    We allow to associate users with a list of Access Controls.

    +
  • +
  • +

    For authorization of an implemented use case we determine the required permission. Furthermore, we determine the current user and verify that the required permission is contained in the tree spanned by all his associated Access Controls. If the user does not have the permission we throw a security exception and thus abort the operation and transaction.

    +
  • +
  • +

    We avoid negative permissions, that is a user has no permission by default and only those granted to him explicitly give him additional permission for specific things. Permissions granted can not be reduced by other permissions.

    +
  • +
  • +

    Technically we consider permissions as a secret of the application. Administrators shall not fiddle with individual permissions but grant them via groups. So the access management provides a list of strings identifying the Access Controls of a user. The individual application itself contains these Access Controls in a structured way, whereas each group forms a permission tree.

    +
  • +
+
+
+
+
Naming conventions
+
+

As stated above each Access Control is uniquely identified by a human readable string. This string should follow the naming convention:

+
+
+
+
«app-id».«local-name»
+
+
+
+

For Access Control Permissions the «local-name» again follows the convention:

+
+
+
+
«verb»«object»
+
+
+
+

The segments are defined by the following table:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 15. Segments of Access Control Permission ID
SegmentDescriptionExample

«app-id»

Is a unique technical but human readable string of the application (or microservice). It shall not contain special characters and especially no dot or whitespace. We recommend to use lower-train-case-ascii-syntax. The identity and access management should be organized on enterprise level rather than application level. Therefore permissions of different apps might easily clash (e.g. two apps might both define a group ReadMasterData but some user shall get this group for only one of these two apps). Using the «app-id». prefix is a simple but powerful namespacing concept that allows you to scale and grow. You may also reserve specific «app-id»s for cross-cutting concerns that do not actually reflect a single app e.g to grant access to a geographic region.

shop

«verb»

The action that is to be performed on «object». We use Find for searching and reading data. Save shall be used both for create and update. Only if you really have demands to separate these two you may use Create in addition to Save. Finally, Delete is used for deletions. For non CRUD actions you are free to use additional verbs such as Approve or Reject.

Find

«object»

The affected object or entity. Shall be named according to your data-model

Product

+
+

So as an example shop.FindProduct will reflect the permission to search and retrieve a Product in the shop application. The group shop.ReadMasterData may combine all permissions to read master-data from the shop. However, also a group shop.Admin may exist for the Admin role of the shop application. Here the «local-name» is Admin that does not follow the «verb»«object» schema.

+
+
+
+
devon4j-security
+
+

The module devon4j-security provides ready-to-use code based on spring-security that makes your life a lot easier.

+
+
+
+access-control +
+
Figure 3. devon4j Security Model
+
+
+

The diagram shows the model of devon4j-security that separates two different aspects:

+
+
+
    +
  • +

    The Identity- and Access-Management is provided by according products and typically already available in the enterprise landscape (e.g. an active directory). It provides a hierarchy of primary access control objects (roles and groups) of a user. An administrator can grant and revoke permissions (indirectly) via this way.

    +
  • +
  • +

    The application security defines a hierarchy of secondary access control objects (groups and permissions). This is done by configuration owned by the application (see following section). The "API" is defined by the IDs of the primary access control objects that will be referenced from the Identity- and Access-Management.

    +
  • +
+
+
+
+
Access Control Config
+
+

In your application simply extend AccessControlConfig to configure your access control objects as code and reference it from your use-cases. An example config may look like this:

+
+
+
+
@Named
+public class ApplicationAccessControlConfig extends AccessControlConfig {
+
+  public static final String APP_ID = "MyApp";
+
+  private static final String PREFIX = APP_ID + ".";
+
+  public static final String PERMISSION_FIND_OFFER = PREFIX + "FindOffer";
+
+  public static final String PERMISSION_SAVE_OFFER = PREFIX + "SaveOffer";
+
+  public static final String PERMISSION_DELETE_OFFER = PREFIX + "DeleteOffer";
+
+  public static final String PERMISSION_FIND_PRODUCT = PREFIX + "FindProduct";
+
+  public static final String PERMISSION_SAVE_PRODUCT = PREFIX + "SaveProduct";
+
+  public static final String PERMISSION_DELETE_PRODUCT = PREFIX + "DeleteProduct";
+
+  public static final String GROUP_READ_MASTER_DATA = PREFIX + "ReadMasterData";
+
+  public static final String GROUP_MANAGER = PREFIX + "Manager";
+
+  public static final String GROUP_ADMIN = PREFIX + "Admin";
+
+  public ApplicationAccessControlConfig() {
+
+    super();
+    AccessControlGroup readMasterData = group(GROUP_READ_MASTER_DATA, PERMISSION_FIND_OFFER, PERMISSION_FIND_PRODUCT);
+    AccessControlGroup manager = group(GROUP_MANAGER, readMasterData, PERMISSION_SAVE_OFFER, PERMISSION_SAVE_PRODUCT);
+    AccessControlGroup admin = group(GROUP_ADMIN, manager, PERMISSION_DELETE_OFFER, PERMISSION_DELETE_PRODUCT);
+  }
+}
+
+
+
+
+
Configuration on Java Method level
+
+

In your use-case you can now reference a permission like this:

+
+
+
+
@Named
+public class UcSafeOfferImpl extends ApplicationUc implements UcSafeOffer {
+
+  @Override
+  @RolesAllowed(ApplicationAccessControlConfig.PERMISSION_SAVE_OFFER)
+  public OfferEto save(OfferEto offer) { ... }
+  ...
+}
+
+
+
+
+
JEE Standard
+
+

Role-based Access Control (RBAC) is commonly used for authorization. +JSR 250 defines a number of common annotations to secure your application.

+
+
+
    +
  • +

    javax.annotation.security.PermitAll specifies that no access control is required to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DenyAll specifies that no access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.RolesAllowed specifies that only a list of access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DeclareRoles defines roles for security checking.

    +
  • +
  • +

    javax.annotation.security.RunAs specifies the RunAs role for the given components.

    +
  • +
+
+
+

@PermitAll, @Denyall, and @RolesAllowed annotations can be applied to both class and method. +A method-level annotation will override the behaviour of class-level annotation. Using multiple annotations of those 3 is not valid.

+
+
+
+
// invalid
+@PermitAll
+@DenyAll
+public String foo()
+
+// invalid and compilation fails
+@RolesAllowed("admin")
+@RolesAllowed("user")
+public String bar()
+
+// OK
+@RolesAllowed("admin", "user")
+public String bar()
+
+
+
+

Please note that when specifying multiple arguments to @RolesAllowed those are combined with OR (and not with AND). +So if the user has any of the specified access controls, he will be able to access the method.

+
+
+

As a best practice avoid specifying string literals to @RolesAllowed. +Instead define a class with all access controls as constants and reference them from there. +This class is typically called ApplicationAccessControlConfig in devonfw.

+
+
+

In many complicated cases where @PermitAll @DenyAll @RolesAllowed are insufficient e.g. a method should be accessed by a user in role A and not in role B at the same time, you have to verify the user role directly in the method. You can use SecurityContext class to get further needed information.

+
+
+Spring +
+

Spring Security also supports authorization on method level. To use it, you need to add the spring-security-config dependency. If you use Spring Boot, the dependency spring-boot-starter-security already includes spring-security-config. Then you can configure as follows:

+
+
+
    +
  • +

    prePostEnabled property enables Spring Security pre/post annotations. @PreAuthorize and @PostAuthorize annotations provide expression-based access control. See more here

    +
  • +
  • +

    securedEnabled property determines if the @Secured annotation should be enabled. @Secured can be used similarly as @RollesAllowed.

    +
  • +
  • +

    jsr250Enabled property allows us to use the JSR-250 annotations such as @RolesAllowed.

    +
  • +
+
+
+
+
@Configuration
+@EnableGlobalMethodSecurity(
+  prePostEnabled = true,
+  securedEnabled = true,
+  jsr250Enabled = true)
+public class MethodSecurityConfig
+  extends GlobalMethodSecurityConfiguration {
+}
+
+
+
+

A further read about the whole concept of Spring Security Authorization can be found here.

+
+
+
+Quarkus +
+

Quarkus comes with built-in security to allow for RBAC based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints and CDI beans. Quarkus also provides the io.quarkus.security.Authenticated annotation that will permit any authenticated user to access the resource (equivalent to @RolesAllowed("**")).

+
+
+
+
+
Data-based Permissions
+ +
+
+
Access Control Schema (deprecated)
+
+

The access-control-schema.xml approach is deprecated. The documentation can still be found in access control schema.

+
+
+ +
+

==Data-permissions

+
+
+

In some projects there are demands for permissions and authorization that is dependent on the processed data. E.g. a user may only be allowed to read or write data for a specific region. This is adding some additional complexity to your authorization. If you can avoid this it is always best to keep things simple. However, in various cases this is a requirement. Therefore the following sections give you guidance and patterns how to solve this properly.

+
+
+
+
Structuring your data
+
+

For all your business objects (entities) that have to be secured regarding to data permissions we recommend that you create a separate interface that provides access to the relevant data required to decide about the permission. Here is a simple example:

+
+
+
+
public interface SecurityDataPermissionCountry {
+
+  /**
+   * @return the 2-letter ISO code of the country this object is associated with. Users need
+   *         a data-permission for this country in order to read and write this object.
+   */
+  String getCountry();
+}
+
+
+
+

Now related business objects (entities) can implement this interface. Often such data-permissions have to be applied to an entire object-hierarchy. For security reasons we recommend that also all child-objects implement this interface. For performance reasons we recommend that the child-objects redundantly store the data-permission properties (such as country in the example above) and this gets simply propagated from the parent, when a child object is created.

+
+
+
+
Permissions for processing data
+
+

When saving or processing objects with a data-permission, we recommend to provide dedicated methods to verify the permission in an abstract base-class such as AbstractUc and simply call this explicitly from your business code. This makes it easy to understand and debug the code. Here is a simple example:

+
+
+
+
protected void verifyPermission(SecurityDataPermissionCountry entity) throws AccessDeniedException;
+
+
+
+Beware of AOP +
+

For simple but cross-cutting data-permissions you may also use AOP. This leads to programming aspects that reflectively scan method arguments and magically decide what to do. Be aware that this quickly gets tricky:

+
+
+
    +
  • +

    What if multiple of your method arguments have data-permissions (e.g. implement SecurityDataPermission*)?

    +
  • +
  • +

    What if the object to authorize is only provided as reference (e.g. Long or IdRef) and only loaded and processed inside the implementation where the AOP aspect does not apply?

    +
  • +
  • +

    How to express advanced data-permissions in annotations?

    +
  • +
+
+
+

What we have learned is that annotations like @PreAuthorize from spring-security easily lead to the "programming in string literals" anti-pattern. We strongly discourage to use this anti-pattern. In such case writing your own verifyPermission methods that you manually call in the right places of your business-logic is much better to understand, debug and maintain.

+
+
+
+
+
Permissions for reading data
+
+

When it comes to restrictions on the data to read it becomes even more tricky. In the context of a user only entities shall be loaded from the database he is permitted to read. This is simple for loading a single entity (e.g. by its ID) as you can load it and then if not permitted throw an exception to secure your code. But what if the user is performing a search query to find many entities? For performance reasons we should only find data the user is permitted to read and filter all the rest already via the database query. But what if this is not a requirement for a single query but needs to be applied cross-cutting to tons of queries? Therefore we have the following pattern that solves your problem:

+
+
+

For each data-permission attribute (or set of such) we create an abstract base entity:

+
+
+
+
@MappedSuperclass
+@EntityListeners(PermissionCheckListener.class)
+@FilterDef(name = "country", parameters = {@ParamDef(name = "countries", type = "string")})
+@Filter(name = "country", condition = "country in (:countries)")
+public abstract class SecurityDataPermissionCountryEntity extends ApplicationPersistenceEntity
+    implements SecurityDataPermissionCountry {
+
+  private String country;
+
+  @Override
+  public String getCountry() {
+    return this.country;
+  }
+
+  public void setCountry(String country) {
+    this.country = country;
+  }
+}
+
+
+
+

There are some special hibernate annotations @EntityListeners, @FilterDef, and @Filter used here allowing to apply a filter on the country for any (non-native) query performed by hibernate. The entity listener may look like this:

+
+
+
+
public class PermissionCheckListener {
+
+  @PostLoad
+  public void read(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireReadPermission(entity);
+  }
+
+  @PrePersist
+  @PreUpdate
+  public void write(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireWritePermission(entity);
+  }
+}
+
+
+
+

This will ensure that hibernate implicitly will call these checks for every such entity when it is read from or written to the database. Further to avoid reading entities from the database the user is not permitted to (and ending up with exceptions), we create an AOP aspect that automatically activates the above declared hibernate filter:

+
+
+
+
@Named
+public class PermissionCheckerAdvice implements MethodBeforeAdvice {
+
+  @Inject
+  private PermissionChecker permissionChecker;
+
+  @PersistenceContext
+  private EntityManager entityManager;
+
+  @Override
+  public void before(Method method, Object[] args, Object target) {
+
+    Collection<String> permittedCountries = this.permissionChecker.getPermittedCountriesForReading();
+    if (permittedCountries != null) { // null is returned for admins that may access all countries
+      if (permittedCountries.isEmpty()) {
+        throw new AccessDeniedException("Not permitted for any country!");
+      }
+      Session session = this.entityManager.unwrap(Session.class);
+      session.enableFilter("country").setParameterList("countries", permittedCountries.toArray());
+    }
+  }
+}
+
+
+
+

Finally to apply this aspect to all Repositories (can easily be changed to DAOs) implement the following advisor:

+
+
+
+
@Named
+public class PermissionCheckerAdvisor implements PointcutAdvisor, Pointcut, ClassFilter, MethodMatcher {
+
+  @Inject
+  private PermissionCheckerAdvice advice;
+
+  @Override
+  public Advice getAdvice() {
+    return this.advice;
+  }
+
+  @Override
+  public boolean isPerInstance() {
+    return false;
+  }
+
+  @Override
+  public Pointcut getPointcut() {
+    return this;
+  }
+
+  @Override
+  public ClassFilter getClassFilter() {
+    return this;
+  }
+
+  @Override
+  public MethodMatcher getMethodMatcher() {
+    return this;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass) {
+    return true; // apply to all methods
+  }
+
+  @Override
+  public boolean isRuntime() {
+    return false;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass, Object... args) {
+    throw new IllegalStateException("isRuntime()==false");
+  }
+
+  @Override
+  public boolean matches(Class<?> clazz) {
+    // when using DAOs simply change to some class like ApplicationDao
+    return DefaultRepository.class.isAssignableFrom(clazz);
+  }
+}
+
+
+
+
+
Managing and granting the data-permissions
+
+

Following our authorization guide we can simply create a permission for each country. We might simply reserve a prefix (as virtual «app-id») for each data-permission to allow granting data-permissions to end-users across all applications of the IT landscape. In our example we could create access controls country.DE, country.US, country.ES, etc. and assign those to the users. The method permissionChecker.getPermittedCountriesForReading() would then scan for these access controls and only return the 2-letter country code from it.

+
+
+ + + + + +
+ + +Before you make your decisions how to design your access controls please clarify the following questions: +
+
+
+
    +
  • +

    Do you need to separate data-permissions independent of the functional permissions? E.g. may it be required to express that a user can read data from the countries ES and PL but is only permitted to modify data from PL? In such case a single assignment of "country-permissions" to users is insufficient.

    +
  • +
  • +

    Do you want to grant data-permissions individually for each application (higher flexibility and complexity) or for the entire application landscape (simplicity, better maintenance for administrators)? In case of the first approach you would rather have access controls like app1.country.GB and app2.country.GB.

    +
  • +
  • +

    Do your data-permissions depend on objects that can be created dynamically inside your application?

    +
  • +
  • +

    If you want to grant data-permissions on other business objects (entities), how do you want to reference them (primary keys, business keys, etc.)? What reference is most stable? Which is most readable?

    +
  • +
+
+
+ +
+

==JWT

+
+
+

JWT (JSON Web Token) is an open standard (see RFC 7519) for creating JSON based access tokens that assert some number of claims. +With an IT landscape divided into multiple smaller apps you want to avoid coupling all those apps or services tightly with your IAM (Identity & Access Management). +Instead your apps simply expects a JWT as bearer-token in the Authorization HTTP header field. +All it needs to do for authentication is validating this JWT. +The actual authentication is done centrally by an access system (IAM) that authors those JWTs. +Therefore we recommend to use strong asymmetric cryptography to sign the JWT when it is authored. +Create a keypair per environment and keep the private key as a secret only known to the access system authorizing the JWTs. +Your apps only need to know the public key in order to validate the JWT. +Any request without a JWT or with an invalid JWT will be rejected (with status code 401).

+
+
+

When using spring check the JWT Spring-Starter. +For quarkus follow Using JWT RBAC.

+
+
+ +
+

==Cross-site request forgery (CSRF)

+
+
+

CSRF is a type of malicious exploit of a web application that allows an attacker to induce users to perform actions that they do not intend to perform.

+
+
+
+csrf +
+
+
+

More details about csrf can be found at https://owasp.org/www-community/attacks/csrf.

+
+
+
+
+
Secure devon4j server against CSRF
+
+

In case your devon4j server application is not accessed by browsers or the web-client is using JWT based authentication, you are already safe according to CSRF. +However, if your application is accessed from a browser and you are using form based authentication (with session coockie) or basic authentication, you need to enable CSRF protection. +This guide will tell you how to do this.

+
+
+
Dependency
+
+

To secure your devon4j application against CSRF attacks, you only need to add the following dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-csrf</artifactId>
+</dependency>
+
+
+
+

Starting with devon4j version 2020.12.001 application template, this is all you need to do. +However, if you have started from an older version or you want to understand more, please read on.

+
+
+
+
Pluggable web-security
+
+

To enable pluggable security via devon4j security starters you need to apply WebSecurityConfigurer to your BaseWebSecurityConfig (your class extending spring-boot’s WebSecurityConfigurerAdapter) as following:

+
+
+
+
  @Inject
+  private WebSecurityConfigurer webSecurityConfigurer;
+
+  public void configure(HttpSecurity http) throws Exception {
+    // disable CSRF protection by default, use csrf starter to override.
+	  http = http.csrf().disable();
+	  // apply pluggable web-security from devon4j security starters
+    http = this.webSecurityConfigurer.configure(http);
+    .....
+  }
+
+
+
+
+
Custom CsrfRequestMatcher
+
+

If you want to customize which HTTP requests will require a CSRF token, you can implement your own CsrfRequestMatcher and provide it to the devon4j CSRF protection via qualified injection as following:

+
+
+
+
@Named("CsrfRequestMatcher")
+public class CsrfRequestMatcher implements RequestMatcher {
+  @Override
+  public boolean matches(HttpServletRequest request) {
+    .....
+  }
+}
+
+
+
+

Please note that the exact name (@Named("CsrfRequestMatcher")) is required here to ensure your custom implementation will be injected properly.

+
+
+
+
CsrfRestService
+
+

With the devon4j-starter-security-csrf the CsrfRestService gets integrated into your app. +It provides an operation to get the CSRF token via an HTTP GET request. +The URL path to retrieve this CSRF token is services/rest/csrf/v1/token. +As a result you will get a JSON like the following:

+
+
+
+
{
+  "token":"3a8a5f66-c9eb-4494-81e1-7cc58bc3a519",
+  "parameterName":"_csrf",
+  "headerName":"X-CSRF-TOKEN"
+}
+
+
+
+

The token value is a strong random value that will differ for each user session. +It has to be send with subsequent HTTP requests (when method is other than GET) in the specified header (X-CSRF-TOKEN).

+
+
+
+
How it works
+
+

Putting it all together, a browser client should call the CsrfRestService after successfull login to receive the current CSRF token. +With every subsequent HTTP request (other than GET) the client has to send this token in the according HTTP header. +Otherwise the server will reject the request to prevent CSRF attacks. +Therefore, an attacker might make your browser perform HTTP requests towards your devon4j application backend via <image> elements, <iframes>, etc. +Your browser will then still include your session coockie if you are already logged in (e.g. from another tab). +However, in case he wants to trigger DELETE or POST requests trying your browser to make changes in the application (delete or update data, etc.) this will fail without CSRF token. +The attacker may make your browser retrieve the CSRF token but he will not be able to retrieve the result and put it into the header of other requests due to the same-origin-policy. +This way your application will be secured against CSRF attacks.

+
+
+
+
+
Configure devon4ng client for CSRF
+
+

Devon4ng client configuration for CSRF is described here

+
+
+ +
+

==Aspect Oriented Programming (AOP)

+
+
+

AOP is a powerful feature for cross-cutting concerns. However, if used extensive and for the wrong things an application can get unmaintainable. Therefore we give you the best practices where and how to use AOP properly.

+
+
+
+
AOP Key Principles
+
+

We follow these principles:

+
+
+
    +
  • +

    We use spring AOP based on dynamic proxies (and fallback to cglib).

    +
  • +
  • +

    We avoid AspectJ and other mighty and complex AOP frameworks whenever possible

    +
  • +
  • +

    We only use AOP where we consider it as necessary (see below).

    +
  • +
+
+
+
+
AOP Usage
+
+

We recommend to use AOP with care but we consider it established for the following cross cutting concerns:

+
+
+ +
+
+
+
AOP Debugging
+
+

When using AOP with dynamic proxies the debugging of your code can get nasty. As you can see by the red boxes in the call stack in the debugger there is a lot of magic happening while you often just want to step directly into the implementation skipping all the AOP clutter. When using Eclipse this can easily be archived by enabling step filters. Therefore you have to enable the feature in the Eclipse tool bar (highlighted in read).

+
+
+
+AOP debugging +
+
+
+

In order to properly make this work you need to ensure that the step filters are properly configured:

+
+
+
+Step Filter Configuration +
+
+
+

Ensure you have at least the following step-filters configured and active:

+
+
+
+
ch.qos.logback.*
+com.devonfw.module.security.*
+java.lang.reflect.*
+java.security.*
+javax.persistence.*
+org.apache.commons.logging.*
+org.apache.cxf.jaxrs.client.*
+org.apache.tomcat.*
+org.h2.*
+org.springframework.*
+
+
+
+ +
+

==Exception Handling

+
+
+
+
Exception Principles
+
+

For exceptions we follow these principles:

+
+
+
    +
  • +

    We only use exceptions for exceptional situations and not for programming control flows, etc. Creating an exception in Java is expensive and hence should not be done for simply testing whether something is present, valid or permitted. In the latter case design your API to return this as a regular result.

    +
  • +
  • +

    We use unchecked exceptions (RuntimeException) [2]

    +
  • +
  • +

    We distinguish internal exceptions and user exceptions:

    +
    +
      +
    • +

      Internal exceptions have technical reasons. For unexpected and exotic situations, it is sufficient to throw existing exceptions such as IllegalStateException. For common scenarios a own exception class is reasonable.

      +
    • +
    • +

      User exceptions contain a message explaining the problem for end users. Therefore, we always define our own exception classes with a clear, brief, but detailed message.

      +
    • +
    +
    +
  • +
  • +

    Our own exceptions derive from an exception base class supporting

    + +
  • +
+
+
+

All this is offered by mmm-util-core, which we propose as a solution. +If you use the devon4j-rest module, this is already included. For Quarkus applications, you need to add the dependency manually.

+
+
+

If you want to avoid additional dependencies, you can implement your own solution for this by creating an abstract exception class ApplicationBusinessException extending from RuntimeException. For an example of this, see our Quarkus reference application.

+
+
+
+
Exception Example
+
+

Here is an exception class from our sample application:

+
+
+
+
public class IllegalEntityStateException extends ApplicationBusinessException {
+
+  private static final long serialVersionUID = 1L;
+
+  public IllegalEntityStateException(Object entity, Object state) {
+
+    this((Throwable) null, entity, state);
+  }
+
+
+  public IllegalEntityStateException(Object entity, Object currentState, Object newState) {
+
+    this(null, entity, currentState, newState);
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object state) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityState(entity, state));
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object currentState, Object newState) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityStateChange(entity, currentState,
+        newState));
+  }
+
+}
+
+
+
+

The message templates are defined in the interface NlsBundleRestaurantRoot as following:

+
+
+
+
public interface NlsBundleApplicationRoot extends NlsBundle {
+
+
+  @NlsBundleMessage("The entity {entity} is in state {state}!")
+  NlsMessage errorIllegalEntityState(@Named("entity") Object entity, @Named("state") Object state);
+
+
+  @NlsBundleMessage("The entity {entity} in state {currentState} can not be changed to state {newState}!")
+  NlsMessage errorIllegalEntityStateChange(@Named("entity") Object entity, @Named("currentState") Object currentState,
+      @Named("newState") Object newState);
+
+
+  @NlsBundleMessage("The property {property} of object {object} can not be changed!")
+  NlsMessage errorIllegalPropertyChange(@Named("object") Object object, @Named("property") Object property);
+
+  @NlsBundleMessage("There is currently no user logged in")
+  NlsMessage errorNoActiveUser();
+
+
+
+
+
Handling Exceptions
+
+

For catching and handling exceptions we follow these rules:

+
+
+
    +
  • +

    We do not catch exceptions just to wrap or to re-throw them.

    +
  • +
  • +

    If we catch an exception and throw a new one, we always have to provide the original exception as cause to the constructor of the new exception.

    +
  • +
  • +

    At the entry points of the application (e.g. a service operation) we have to catch and handle all throwables. This is done via the exception-facade-pattern via an explicit facade or aspect. The devon4j-rest module already provides ready-to-use implementations for this such as RestServiceExceptionFacade that you can use in your Spring application. For Quarkus, follow the Quarkus guide on exception handling.
    +The exception facade has to …​

    +
    +
      +
    • +

      log all errors (user errors on info and technical errors on error level)

      +
    • +
    • +

      ensure that the entire exception is passed to the logger (not only the message) so that the logger can capture the entire stacktrace and the root cause is not lost.

      +
    • +
    • +

      convert the error to a result appropriable for the client and secure for Sensitive Data Exposure. Especially for security exceptions only a generic security error code or message may be revealed but the details shall only be logged but not be exposed to the client. All internal exceptions are converted to a generic error with a message like:

      +
      +
      +
      +

      An unexpected technical error has occurred. We apologize any inconvenience. Please try again later.

      +
      +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
Common Errors
+
+

The following errors may occur in any devon application:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 16. Common Exceptions
CodeMessageLink

TechnicalError

An unexpected error has occurred! We apologize any inconvenience. Please try again later.

TechnicalErrorUserException.java

ServiceInvoke

«original message of the cause»

ServiceInvocationFailedException.java

+
+ +
+

==Internationalization +Internationalization (I18N) is about writing code independent from locale-specific information. +For I18N of text messages we are suggesting +mmm native-language-support.

+
+
+

In devonfw we have developed a solution to manage text internationalization. devonfw solution comes into two aspects:

+
+
+
    +
  • +

    Bind locale information to the user.

    +
  • +
  • +

    Get the messages in the current user locale.

    +
  • +
+
+
+
+
Binding locale information to the user
+
+

We have defined two different points to bind locale information to user, depending on user is authenticated or not.

+
+
+
    +
  • +

    User not authenticated: devonfw intercepts unsecured request and extract locale from it. At first, we try to extract a language parameter from the request and if it is not possible, we extract locale from Àccept-language` header.

    +
  • +
  • +

    User authenticated. During login process, applications developers are responsible to fill language parameter in the UserProfile class. This language parameter could be obtain from DB, LDAP, request, etc. In devonfw sample we get the locale information from database.

    +
  • +
+
+
+

This image shows the entire process:

+
+
+
+Internationalization +
+
+
+
+
Getting internationalizated messages
+
+

devonfw has a bean that manage i18n message resolution, the ApplicationLocaleResolver. This bean is responsible to get the current user and extract locale information from it and read the correct properties file to get the message.

+
+
+

The i18n properties file must be called ApplicationMessages_la_CO.properties where la=language and CO=country. This is an example of a i18n properties file for English language to translate devonfw sample user roles:

+
+
+

ApplicationMessages_en_US.properties

+
+
+
+
admin=Admin
+
+
+
+

You should define an ApplicationMessages_la_CO.properties file for every language that your application needs.

+
+
+

ApplicationLocaleResolver bean is injected in AbstractComponentFacade class so you have available this bean in logic layer so you only need to put this code to get an internationalized message:

+
+
+
+
String msg = getApplicationLocaleResolver().getMessage("mymessage");
+
+
+
+ +
+

==Service Client

+
+
+

This guide is about consuming (calling) services from other applications (micro-services). For providing services, see the Service-Layer Guide. Services can be consumed by the client or the server. As the client is typically not written in Java, you should consult the according guide for your client technology. In case you want to call a service within your Java code, this guide is the right place to get help.

+
+
+
+
Motivation
+
+

Various solutions already exist for calling services, such as RestTemplate from spring or the JAX-RS client API. Furthermore, each and every service framework offers its own API as well. These solutions might be suitable for very small and simple projects (with one or two such invocations). However, with the trend of microservices, the invocation of a service becomes a very common use-case that occurs all over the place. You typically need a solution that is very easy to use but supports flexible configuration, adding headers for authentication, mapping of errors from the server, logging success/errors with duration for performance analysis, support for synchronous and asynchronous invocations, etc. This is exactly what this devon4j service-client solution brings to you.

+
+
+
+
Usage
+
+

Spring

+
+
+

For Spring, follow the Spring rest-client guide.

+
+
+

Quarkus

+
+
+

For Quarkus, we recommend to follow the official Quarkus rest-client guide

+
+
+ +
+

==Testing

+
+
+
+
General best practices
+
+

For testing please follow our general best practices:

+
+
+
    +
  • +

    Tests should have a clear goal that should also be documented.

    +
  • +
  • +

    Tests have to be classified into different integration levels.

    +
  • +
  • +

    Tests should follow a clear naming convention.

    +
  • +
  • +

    Automated tests need to properly assert the result of the tested operation(s) in a reliable way. E.g. avoid stuff like assertThat(service.getAllEntities()).hasSize(42) or even worse tests that have no assertion at all.

    +
  • +
  • +

    Tests need to be independent of each other. Never write test-cases or tests (in Java @Test methods) that depend on another test to be executed before.

    +
  • +
  • +

    Use AssertJ to write good readable and maintainable tests that also provide valuable feedback in case a test fails. Do not use legacy JUnit methods like assertEquals anymore!

    +
  • +
  • +

    For easy understanding divide your test in three commented sections:

    +
    +
      +
    • +

      //given

      +
    • +
    • +

      //when

      +
    • +
    • +

      //then

      +
    • +
    +
    +
  • +
  • +

    Plan your tests and test data management properly before implementing.

    +
  • +
  • +

    Instead of having a too strong focus on test coverage better ensure you have covered your critical core functionality properly and review the code including tests.

    +
  • +
  • +

    Test code shall NOT be seen as second class code. You shall consider design, architecture and code-style also for your test code but do not over-engineer it.

    +
  • +
  • +

    Test automation is good but should be considered in relation to cost per use. Creating full coverage via automated system tests can cause a massive amount of test-code that can turn out as a huge maintenance hell. Always consider all aspects including product life-cycle, criticality of use-cases to test, and variability of the aspect to test (e.g. UI, test-data).

    +
  • +
  • +

    Use continuous integration and establish that the entire team wants to have clean builds and running tests.

    +
  • +
  • +

    Prefer delegation over inheritance for cross-cutting testing functionality. Good places to put this kind of code can be realized and reused via the JUnit @Rule mechanism.

    +
  • +
+
+
+
+
Test Automation Technology Stack
+
+

For test automation we use JUnit. However, we are strictly doing all assertions with AssertJ. For mocking we use Mockito. +In order to mock remote connections we use WireMock.

+
+
+

For testing entire components or sub-systems we recommend to use for Spring stack spring-boot-starter-test as lightweight and fast testing infrastructure that is already shipped with devon4j-test. For Quarkus, you can add the necessary extensions manually such as quarkus-junit5, quarkus-junit5-mockito, assertj-core etc.

+
+
+

In case you have to use a full blown JEE application server, we recommend to use arquillian. To get started with arquillian, look here.

+
+
+
+
Test Doubles
+
+

We use test doubles as generic term for mocks, stubs, fakes, dummies, or spys to avoid confusion. Here is a short summary from stubs VS mocks:

+
+
+
    +
  • +

    Dummy objects specifying no logic at all. May declare data in a POJO style to be used as boiler plate code to parameter lists or even influence the control flow towards the test’s needs.

    +
  • +
  • +

    Fake objects actually have working implementations, but usually take some shortcut which makes them not suitable for production (an in memory database is a good example).

    +
  • +
  • +

    Stubs provide canned answers to calls made during the test, usually not responding at all to anything outside what’s programmed in for the test. Stubs may also record information about calls, such as an email gateway stub that remembers the messages it 'sent', or maybe only how many messages it 'sent'.

    +
  • +
  • +

    Mocks are objects pre-programmed with expectations, which form a specification of the calls they are expected to receive.

    +
  • +
+
+
+

We try to give some examples, which should make it somehow clearer:

+
+
+
Stubs
+
+

Best Practices for applications:

+
+
+
    +
  • +

    A good way to replace small to medium large boundary systems, whose impact (e.g. latency) should be ignored during load and performance tests of the application under development.

    +
  • +
  • +

    As stub implementation will rely on state-based verification, there is the threat, that test developers will partially reimplement the state transitions based on the replaced code. This will immediately lead to a black maintenance whole, so better use mocks to assure the certain behavior on interface level.

    +
  • +
  • +

    Do NOT use stubs as basis of a large amount of test cases as due to state-based verification of stubs, test developers will enrich the stub implementation to become a large monster with its own hunger after maintenance efforts.

    +
  • +
+
+
+
+
Mocks
+
+

Best Practices for applications:

+
+
+
    +
  • +

    Replace not-needed dependencies of your system-under-test (SUT) to minimize the application context to start of your component framework.

    +
  • +
  • +

    Replace dependencies of your SUT to impact the control flow under test without establishing all the context parameters needed to match the control flow.

    +
  • +
  • +

    Remember: Not everything has to be mocked! Especially on lower levels of tests like isolated module tests you can be betrayed into a mocking delusion, where you end up in a hundred lines of code mocking the whole context and five lines executing the test and verifying the mocks behavior. Always keep in mind the benefit-cost ratio, when implementing tests using mocks.

    +
  • +
+
+
+
+
WireMock
+
+

If you need to mock remote connections such as HTTP-Servers, WireMock offers easy to use functionality. For a full description see the homepage or the github repository. Wiremock can be used either as a JUnit Rule, in Java outside of JUnit or as a standalone process. The mocked server can be configured to respond to specific requests in a given way via a fluent Java API, JSON files and JSON over HTTP. An example as an integration to JUnit can look as follows.

+
+
+
+
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+
+public class WireMockOfferImport{
+
+  @Rule
+  public WireMockRule mockServer = new WireMockRule(wireMockConfig().dynamicPort());
+
+  @Test
+  public void requestDataTest() throws Exception {
+  int port = this.mockServer.port();
+  ...}
+
+
+
+

This creates a server on a randomly chosen free port on the running machine. You can also specify the port to be used if wanted. Other than that there are several options to further configure the server. This includes HTTPs, proxy settings, file locations, logging and extensions.

+
+
+
+
  @Test
+  public void requestDataTest() throws Exception {
+      this.mockServer.stubFor(get(urlEqualTo("/new/offers")).withHeader("Accept", equalTo("application/json"))
+      .withHeader("Authorization", containing("Basic")).willReturn(aResponse().withStatus(200).withFixedDelay(1000)
+      .withHeader("Content-Type", "application/json").withBodyFile("/wireMockTest/jsonBodyFile.json")));
+  }
+
+
+
+

This will stub the URL localhost:port/new/offers to respond with a status 200 message containing a header (Content-Type: application/json) and a body with content given in jsonBodyFile.json if the request matches several conditions. +It has to be a GET request to ../new/offers with the two given header properties.

+
+
+

Note that by default files are located in src/test/resources/__files/. When using only one WireMock server one can omit the this.mockServer in before the stubFor call (static method). +You can also add a fixed delay to the response or processing delay with WireMock.addRequestProcessingDelay(time) in order to test for timeouts.

+
+
+

WireMock can also respond with different corrupted messages to simulate faulty behaviour.

+
+
+
+
@Test(expected = ResourceAccessException.class)
+public void faultTest() {
+
+    this.mockServer.stubFor(get(urlEqualTo("/fault")).willReturn(aResponse()
+    .withFault(Fault.MALFORMED_RESPONSE_CHUNK)));
+...}
+
+
+
+

A GET request to ../fault returns an OK status header, then garbage, and then closes the connection.

+
+
+
+
+
Integration Levels
+
+

There are many discussions about the right level of integration for test automation. Sometimes it is better to focus on small, isolated modules of the system - whatever a "module" may be. In other cases it makes more sense to test integrated groups of modules. Because there is no universal answer to this question, devonfw only defines a common terminology for what could be tested. Each project must make its own decision where to put the focus of test automation. There is no worldwide accepted terminology for the integration levels of testing. In general we consider ISTQB. However, with a technical focus on test automation we want to get more precise.

+
+
+

The following picture shows a simplified view of an application based on the devonfw reference architecture. We define four integration levels that are explained in detail below. +The boxes in the picture contain parenthesized numbers. These numbers depict the lowest integration level, a box belongs to. Higher integration levels also contain all boxes of lower integration levels. When writing tests for a given integration level, related boxes with a lower integration level must be replaced by test doubles or drivers.

+
+
+
+Integration Levels +
+
+
+

The main difference between the integration levels is the amount of infrastructure needed to test them. The more infrastructure you need, the more bugs you will find, but the more instable and the slower your tests will be. So each project has to make a trade-off between pros and contras of including much infrastructure in tests and has to select the integration levels that fit best to the project.

+
+
+

Consider, that more infrastructure does not automatically lead to a better bug-detection. There may be bugs in your software that are masked by bugs in the infrastructure. The best way to find those bugs is to test with very few infrastructure.

+
+
+

External systems do not belong to any of the integration levels defined here. devonfw does not recommend involving real external systems in test automation. This means, they have to be replaced by test doubles in automated tests. An exception may be external systems that are fully under control of the own development team.

+
+
+

The following chapters describe the four integration levels.

+
+
+
Level 1 Module Test
+
+

The goal of a isolated module test is to provide fast feedback to the developer. Consequently, isolated module tests must not have any interaction with the client, the database, the file system, the network, etc.

+
+
+

An isolated module test is testing a single classes or at least a small set of classes in isolation. If such classes depend on other components or external resources, etc. these shall be replaced with a test double.

+
+
+
+
public class MyClassTest extends ModuleTest {
+
+  @Test
+  public void testMyClass() {
+
+    // given
+    MyClass myClass = new MyClass();
+    // when
+    String value = myClass.doSomething();
+    // then
+    assertThat(value).isEqualTo("expected value");
+  }
+
+}
+
+
+
+

For an advanced example see here.

+
+
+
+
Level 2 Component Test
+
+

A component test aims to test components or component parts as a unit. +These tests can access resources such as a database (e.g. for DAO tests). +Further, no remote communication is intended here. Access to external systems shall be replaced by a test double.

+
+
+
    +
  • +

    For Spring stack, they are typically run with a (light-weight) infrastructure such as spring-boot-starter-test. A component-test is illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.NONE)
    +public class UcFindCountryTest extends ComponentTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    TestUtil.login("user", MyAccessControlConfig.FIND_COUNTRY);
    +    CountryEto country = this.ucFindCountry.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    This test will start the entire spring-context of your app (MySpringBootApp). Within the test spring will inject according spring-beans into all your fields annotated with @Inject. In the test methods you can use these spring-beans and perform your actual tests. This pattern can be used for testing DAOs/Repositories, Use-Cases, or any other spring-bean with its entire configuration including database and transactions.

    +
    +
  • +
  • +

    For Quarkus, you can similarly inject the CDI beans and perform tests. An example is shown below:

    +
    +
    +
    @QuarkusTest
    +public class UcFindCountryTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +  ...
    +
    +
    +
  • +
+
+
+

When you are testing use-cases your authorization will also be in place. Therefore, you have to simulate a logon in advance what is done via the login method in the above Spring example. The test-infrastructure will automatically do a logout for you after each test method in doTearDown.

+
+
+
+
Level 3 Subsystem Test
+
+

A subsystem test runs against the external interfaces (e.g. HTTP service) of the integrated subsystem. Subsystem tests of the client subsystem are described in the devon4ng testing guide. In devon4j the server (JEE application) is the subsystem under test. The tests act as a client (e.g. service consumer) and the server has to be integrated and started in a container.

+
+
+
    +
  • +

    With devon4j and Spring you can write a subsystem-test as easy as illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.RANDOM_PORT)
    +public class CountryRestServiceTest extends SubsystemTest {
    +
    +  @Inject
    +  private ServiceClientFactory serviceClientFactory;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    CountryRestService service = this.serviceClientFactory.create(CountryRestService.class);
    +    CountryEto country = service.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    Even though not obvious on the first look this test will start your entire application as a server on a free random port (so that it works in CI with parallel builds for different branches) and tests the invocation of a (REST) service including (un)marshalling of data (e.g. as JSON) and transport via HTTP (all in the invocation of the findCountry method).

    +
    +
  • +
+
+
+

Do not confuse a subsystem test with a system integration test. A system integration test validates the interaction of several systems where we do not recommend test automation.

+
+
+
+
Level 4 System Test
+
+

A system test has the goal to test the system as a whole against its official interfaces such as its UI or batches. The system itself runs as a separate process in a way close to a regular deployment. Only external systems are simulated by test doubles.

+
+
+

The devonfw only gives advice for automated system test (TODO see allure testing framework). In nearly every project there must be manual system tests, too. This manual system tests are out of scope here.

+
+
+
+
Classifying Integration-Levels
+
+

For Spring stack, devon4j defines Category-Interfaces that shall be used as JUnit Categories. +Also devon4j provides abstract base classes that you may extend in your test-cases if you like.

+
+
+

devon4j further pre-configures the maven build to only run integration levels 1-2 by default (e.g. for fast feedback in continuous integration). It offers the profiles subsystemtest (1-3) and systemtest (1-4). In your nightly build you can simply add -Psystemtest to run all tests.

+
+
+
+
+
Implementation
+
+

This section introduces how to implement tests on the different levels with the given devonfw infrastructure and the proposed frameworks. +For Spring, see Spring Test Implementation

+
+
+
+
Regression testing
+
+

When it comes to complex output (even binary) that you want to regression test by comparing with an expected result, you sould consider Approval Tests using ApprovalTests.Java. +If applied for the right problems, it can be very helpful.

+
+
+
+
Deployment Pipeline
+
+

A deployment pipeline is a semi-automated process that gets software-changes from version control into production. It contains several validation steps, e.g. automated tests of all integration levels. +Because devon4j should fit to different project types - from agile to waterfall - it does not define a standard deployment pipeline. But we recommend to define such a deployment pipeline explicitly for each project and to find the right place in it for each type of test.

+
+
+

For that purpose, it is advisable to have fast running test suite that gives as much confidence as possible without needing too much time and too much infrastructure. This test suite should run in an early stage of your deployment pipeline. Maybe the developer should run it even before he/she checked in the code. Usually lower integration levels are more suitable for this test suite than higher integration levels.

+
+
+

Note, that the deployment pipeline always should contain manual validation steps, at least manual acceptance testing. There also may be manual validation steps that have to be executed for special changes only, e.g. usability testing. Management and execution processes of those manual validation steps are currently not in the scope of devonfw.

+
+
+
+
Test Coverage
+
+

We are using tools (SonarQube/Jacoco) to measure the coverage of the tests. Please always keep in mind that the only reliable message of a code coverage of X% is that (100-X)% of the code is entirely untested. It does not say anything about the quality of the tests or the software though it often relates to it.

+
+
+
+
Test Configuration
+
+

This section covers test configuration in general without focusing on integration levels as in the first chapter.

+
+
+ +
+
+
Configure Test Specific Beans
+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+
Test Data
+
+

It is possible to obtain test data in two different ways depending on your test’s integration level.

+
+
+
+
+
Debugging Tests
+
+

The following two sections describe two debugging approaches for tests. Tests are either run from within the IDE or from the command line using Maven.

+
+
+
Debugging with the IDE
+
+

Debugging with the IDE is as easy as always. Even if you want to execute a SubsystemTest which needs a Spring context and a server infrastructure to run properly, you just set your breakpoints and click on Debug As → JUnit Test. The test infrastructure will take care of initializing the necessary infrastructure - if everything is configured properly.

+
+
+
+
Debugging with Maven
+
+

Please refer to the following two links to find a guide for debugging tests when running them from Maven.

+
+ +
+

In essence, you first have to start execute a test using the command line. Maven will halt just before the test execution and wait for your IDE to connect to the process. When receiving a connection the test will start and then pause at any breakpoint set in advance. +The first link states that tests are started through the following command:

+
+
+
+
mvn -Dmaven.surefire.debug test
+
+
+
+

Although this is correct, it will run every test class in your project and - which is time consuming and mostly unnecessary - halt before each of these tests. +To counter this problem you can simply execute a single test class through the following command (here we execute the TablemanagementRestServiceTest from the restaurant sample application):

+
+
+
+
mvn test -Dmaven.surefire.debug test -Dtest=TablemanagementRestServiceTest
+
+
+
+

It is important to notice that you first have to execute the Maven command in the according submodule, e.g. to execute the TablemanagementRestServiceTest you have first to navigate to the core module’s directory.

+
+
+ +
+

==Transfer-Objects

+
+
+

The technical data model is defined in form of persistent entities. +However, passing persistent entities via call-by-reference across the entire application will soon cause problems:

+
+
+
    +
  • +

    Changes to a persistent entity are directly written back to the persistent store when the transaction is committed. When the entity is send across the application also changes tend to take place in multiple places endangering data sovereignty and leading to inconsistency.

    +
  • +
  • +

    You want to send and receive data via services across the network and have to define what section of your data is actually transferred. If you have relations in your technical model you quickly end up loading and transferring way too much data.

    +
  • +
  • +

    Modifications to your technical data model shall not automatically have impact on your external services causing incompatibilities.

    +
  • +
+
+
+

To prevent such problems transfer-objects are used leading to a call-by-value model and decoupling changes to persistent entities.

+
+
+

In the following sections the different types of transfer-objects are explained. +You will find all according naming-conventions in the architecture-mapping

+
+
+

To structure your transfer objects, we recommend the following approaches:

+
+
+ +
+
+

Also considering the following transfer objects in specific cases:

+
+
+
+
SearchCriteriaTo
+
+

For searching we create or generate a «BusinessObject»SearchCriteriaTo representing a query to find instances of «BusinessObject».

+
+
TO
+
+

There are typically transfer-objects for data that is never persistent. +For very generic cases these just carry the suffix To.

+
+
STO
+
+

We can potentially create separate service transfer objects (STO) (if possible named «BusinessObject»Sto) to keep the service API stable and independent of the actual data-model. +However, we usually do not need this and want to keep our architecture simple. +Only create STOs if you need service versioning and support previous APIs or to provide legacy service technologies that require their own isolated data-model. +In such case you also need beanmapping between STOs and ETOs/DTOs what means extra effort and complexity that should be avoided.

+
+
+
+
+
+

==Bean Mapping in devon4j-spring

+
+
+

We have developed a solution that uses a BeanMapper that allows to abstract from the underlying implementation. As mentioned in the general bean mapping guide, we started with Dozer a Java Bean to Java Bean mapper that recursively copies data from one object to another. Now we recommend using Orika. This guide will show an introduction to Orika and Dozer bean-mapper.

+
+
+
+
+
Bean-Mapper Dependency
+
+

To get access to the BeanMapper we have to use either of the below dependency in our POM:

+
+
+
Listing 17. Orika
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-orika</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
Listing 18. Dozer
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-dozer</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
+
Bean-Mapper Configuration
+
+
Bean-Mapper Configuration using Dozer
+
+

The BeanMapper implementation is based on an existing open-source bean-mapping framework. +In case of Dozer the mapping is configured src/main/resources/config/app/common/dozer-mapping.xml.

+
+
+

See the my-thai-star dozer-mapping.xml as an example. +Important is that you configure all your custom datatypes as <copy-by-reference> tags and have the mapping from PersistenceEntity (ApplicationPersistenceEntity) to AbstractEto configured properly:

+
+
+
+
 <mapping type="one-way">
+    <class-a>com.devonfw.module.basic.common.api.entity.PersistenceEntity</class-a>
+    <class-b>com.devonfw.module.basic.common.api.to.AbstractEto</class-b>
+    <field custom-converter="com.devonfw.module.beanmapping.common.impl.dozer.IdentityConverter">
+      <a>this</a>
+      <b is-accessible="true">persistentEntity</b>
+    </field>
+</mapping>
+
+
+
+
+
+
Bean-Mapper Configuration using Orika
+
+

Orika with devonfw is configured by default and sets some custom mappings for GenericEntity.java to GenericEntityDto.java. To specify and customize the mappings you can create the class BeansOrikaConfig.java that extends the class BaseOrikaConfig.java from the devon4j.orika package. To register a basic mapping, register a ClassMap for the mapperFactory with your custom mapping. Watch the example below and follow the basic Orika mapping configuration guide and the Orika advanced mapping guide.

+
+
+

Register Mappings:

+
+
+
+
mapperFactory.classMap(UserEntity.class, UserEto.class)
+			.field("email", "email")
+			.field("username", "name")
+			.byDefault()
+			.register();
+
+
+
+
+
Bean-Mapper Usage
+
+

Then we can get the BeanMapper via dependency-injection what we typically already provide by an abstract base class (e.g. AbstractUc). Now we can solve our problem very easy:

+
+
+
+
...
+UserEntity resultEntity = ...;
+...
+return getBeanMapper().map(resultEntity, UserEto.class);
+
+
+
+ +
+

==Datatypes

+
+
+
+
+

A datatype is an object representing a value of a specific type with the following aspects:

+
+
+
    +
  • +

    It has a technical or business specific semantic.

    +
  • +
  • +

    Its JavaDoc explains the meaning and semantic of the value.

    +
  • +
  • +

    It is immutable and therefore stateless (its value assigned at construction time and can not be modified).

    +
  • +
  • +

    It is serializable.

    +
  • +
  • +

    It properly implements #equals(Object) and #hashCode() (two different instances with the same value are equal and have the same hash).

    +
  • +
  • +

    It shall ensure syntactical validation so it is NOT possible to create an instance with an invalid value.

    +
  • +
  • +

    It is responsible for formatting its value to a string representation suitable for sinks such as UI, loggers, etc. Also consider cases like a Datatype representing a password where toString() should return something like "**" instead of the actual password to prevent security accidents.

    +
  • +
  • +

    It is responsible for parsing the value from other representations such as a string (as needed).

    +
  • +
  • +

    It shall provide required logical operations on the value to prevent redundancies. Due to the immutable attribute all manipulative operations have to return a new Datatype instance (see e.g. BigDecimal.add(java.math.BigDecimal)).

    +
  • +
  • +

    It should implement Comparable if a natural order is defined.

    +
  • +
+
+
+

Based on the Datatype a presentation layer can decide how to view and how to edit the value. Therefore a structured data model should make use of custom datatypes in order to be expressive. +Common generic datatypes are String, Boolean, Number and its subclasses, Currency, etc. +Please note that both Date and Calendar are mutable and have very confusing APIs. Therefore, use JSR-310 or jodatime instead. +Even if a datatype is technically nothing but a String or a Number but logically something special it is worth to define it as a dedicated datatype class already for the purpose of having a central javadoc to explain it. On the other side avoid to introduce technical datatypes like String32 for a String with a maximum length of 32 characters as this is not adding value in the sense of a real Datatype. +It is suitable and in most cases also recommended to use the class implementing the datatype as API omitting a dedicated interface.

+
+
+
+— mmm project
+datatype javadoc +
+
+ +
+
+
Datatype Packaging
+
+

For the devonfw we use a common packaging schema. +The specifics for datatypes are as following:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
SegmentValueExplanation

<component>

*

Here we use the (business) component defining the datatype or general for generic datatypes.

<layer>

common

Datatypes are used across all layers and are not assigned to a dedicated layer.

<scope>

api

Datatypes are always used directly as API even tough they may contain (simple) implementation logic. Most datatypes are simple wrappers for generic Java types (e.g. String) but make these explicit and might add some validation.

+
+
+
Technical Concerns
+
+

Many technologies like Dozer and QueryDSL’s (alias API) are heavily based on reflection. For them to work properly with custom datatypes, the frameworks must be able to instantiate custom datatypes with no-argument constructors. It is therefore recommended to implement a no-argument constructor for each datatype of at least protected visibility.

+
+
+
+
Datatypes in Entities
+
+

The usage of custom datatypes in entities is explained in the persistence layer guide.

+
+
+
+
Datatypes in Transfer-Objects
+
+
XML
+
+

For mapping datatypes with JAXB see XML guide.

+
+
+
+
JSON
+
+

For mapping datatypes from and to JSON see JSON custom mapping.

+
+
+ +
+

==CORS configuration in Spring

+
+
+
+
+
Dependency
+
+

To enable the CORS support from the server side for your devon4j-Spring application, add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-cors</artifactId>
+</dependency>
+
+
+
+
+
Configuration
+
+

Add the below properties in your application.properties file:

+
+
+
+
#CORS support
+security.cors.spring.allowCredentials=true
+security.cors.spring.allowedOriginPatterns=*
+security.cors.spring.allowedHeaders=*
+security.cors.spring.allowedMethods=OPTIONS,HEAD,GET,PUT,POST,DELETE,PATCH
+security.cors.pathPattern=/**
+
+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AttributeDescriptionHTTP Header

allowCredentials

Decides the browser should include any cookies associated with the request (true if cookies should be included).

Access-Control-Allow-Credentials

allowedOrigins

List of allowed origins (use * to allow all orgins).

Access-Control-Allow-Origin

allowedMethods

List of allowed HTTP request methods (OPTIONS, HEAD, GET, PUT, POST, DELETE, PATCH, etc.).

-

allowedHeaders

List of allowed headers that can be used during the request (use * to allow all headers requested by the client)

Access-Control-Allow-Headers

pathPattern

Ant-style pattern for the URL paths where to apply CORS. Use "/**" to match all URL paths.

+
+ +
+

==Microservices in devonfw

+
+
+

The Microservices architecture is an approach for application development based on a series of small services grouped under a business domain. Each individual service runs autonomously and communicating with each other through their APIs. That independence between the different services allows to manage (upgrade, fix, deploy, etc.) each one without affecting the rest of the system’s services. In addition to that the microservices architecture allows to scale specific services when facing an increment of the requests, so the applications based on microservices are more flexible and stable, and can be adapted quickly to demand changes.

+
+
+

However, this new approach, developing apps based on microservices, presents some downsides.

+
+
+

Let’s see the main challenges when working with microservices:

+
+
+
    +
  • +

    Having the applications divided in different services we will need a component (router) to redirect each request to the related microservice. These redirection rules must implement filters to guarantee a proper functionality.

    +
  • +
  • +

    In order to manage correctly the routing process, the application will also need a catalog with all the microservices and its details: IPs and ports of each of the deployed instances of each microservice, the state of each instance and some other related information. This catalog is called Service Discovery.

    +
  • +
  • +

    With all the information of the Service Discovery the application will need to calculate and select between all the available instances of a microservice which is the suitable one. This will be figured out by the library Client Side Load Balancer.

    +
  • +
  • +

    The different microservices will be likely interconnected with each other, that means that in case of failure of one of the microservices involved in a process, the application must implement a mechanism to avoid the error propagation through the rest of the services and provide an alternative as a process result. To solve this, the pattern Circuit Breaker can be implemented in the calls between microservices.

    +
  • +
  • +

    As we have mentioned, the microservices will exchange calls and information with each other so our applications will need to provide a secured context to avoid not allowed operations or intrusions. In addition, since microservices must be able to operate in an isolated way, it is not recommended to maintain a session. To meet this need without using Spring sessions, a token-based authentication is used that exchanges information using the json web token (JWT) protocol.

    +
  • +
+
+
+

In addition to all of this we will find other issues related to this particular architecture that we will address fitting the requirements of each project.

+
+
+
    +
  • +

    Distributed data bases: each instance of a microservice should have only one data base.

    +
  • +
  • +

    Centralized logs: each instance of a microservice creates a log and a trace that should be centralized to allow an easier way to read all that information.

    +
  • +
  • +

    Centralized configuration: each microservice has its own configuration, so our applications should group all those configurations in only one place to ease the configuration management.

    +
  • +
  • +

    Automatized deployments: as we are managing several components (microservices, catalogs, balancers, etc.) the deployment should be automatized to avoid errors and ease this process.

    +
  • +
+
+
+

To address the above, devonfw microservices has an alternative approach Microservices based on Netflix-Tools.

+
+
+ +
+

==Caching +Caching is a technical approach to improve performance. While it may appear easy on the first sight it is an advanced topic. In general, try to use caching only when required for performance reasons. If you come to the point that you need caching first think about:

+
+
+
    +
  • +

    What to cache?
    +Be sure about what you want to cache. Is it static data? How often will it change? What will happen if the data changes but due to caching you might receive "old" values? Can this be tolerated? For how long? This is not a technical question but a business requirement.

    +
  • +
  • +

    Where to cache?
    +Will you cache data on client or server? Where exactly?

    +
  • +
  • +

    How to cache?
    +Is a local cache sufficient or do you need a shared cache?

    +
  • +
+
+
+
+
Local Cache
+ +
+
+
Shared Cache
+
+
Distributed Cache
+ +
+
+ +
+
Caching of Web-Resources
+ +
+ +
+

==Feature-Toggles

+
+
+

The most software developing teams use Feature-Branching to be able to work in parallel and maintain a stable main branch in the VCS. However Feature-Branching might not be the ideal tool in every case because of big merges and isolation between development groups. In many cases, Feature-Toggles can avoid some of these problems, so these should definitely be considered to be used in the collaborative software development.

+
+
+
+
Implementation with the devonfw
+
+

To use Feature-Toggles with the devonfw, use the Framework Togglz because it has all the features generally needed and provides a great documentation.

+
+
+

For a pretty minimal working example, also see this fork.

+
+
+
Preparation
+
+

The following example takes place in the oasp-sample-core project, so the necessary dependencies have to be added to the according pom.xml file. Required are the main Togglz project including Spring support, the Togglz console to graphically change the feature state and the Spring security package to handle authentication for the Togglz console.

+
+
+
+
<!-- Feature-Toggle-Framework togglz -->
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-boot-starter</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-console</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-security</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+
+
+

In addition to that, the following lines have to be included in the spring configuration file application.properties

+
+
+
+
##configuration for the togglz Feature-Toggle-Framework
+togglz.enabled=true
+togglz.console.secured=false
+
+
+
+
+
Small features
+
+

For small features, a simple query of the toggle state is often enough to achieve the desired functionality. To illustrate this, a simple example follows, which implements a toggle to limit the page size returned by the staffmanagement. See here for further details.

+
+
+

This is the current implementation to toggle the feature:

+
+
+
+
// Uncomment next line in order to limit the maximum page size for the staff member search
+// criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+
+
+
+

To realise this more elegantly with Togglz, first an enum is required to configure the feature-toggle.

+
+
+
+
public enum StaffmanagementFeatures implements Feature {
+  @Label("Limit the maximum page size for the staff members")
+  LIMIT_STAFF_PAGE_SIZE;
+
+  public boolean isActive() {
+    return FeatureContext.getFeatureManager().isActive(this);
+  }
+}
+
+
+
+

To familiarize the Spring framework with the enum, add the following entry to the application.properties file.

+
+
+
+
togglz.feature-enums=io.oasp.gastronomy.restaurant.staffmanagement.featuremanager.StaffmanagementFeatures
+
+
+
+

After that, the toggle can be used easily by calling the isActive() method of the enum.

+
+
+
+
if (StaffmanagementFeatures.LIMIT_STAFF_PAGE_SIZE.isActive()) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+}
+
+
+
+

This way, you can easily switch the feature on or off by using the administration console at http://localhost:8081/devon4j-sample-server/togglz-console. If you are getting redirected to the login page, just sign in with any valid user (eg. admin).

+
+
+
+
Extensive features
+
+

When implementing extensive features, you might want to consider using the strategy design pattern to maintain the overview of your software. The following example is an implementation of a feature which adds a 25% discount to all products managed by the offermanagement.

+
+
+
Therefore there are two strategies needed:
+
    +
  1. +

    Return the offers with the normal price

    +
  2. +
  3. +

    Return the offers with a 25% discount

    +
  4. +
+
+
+

The implementation is pretty straight forward so use this as a reference. Compare this for further details.

+
+
+
+
@Override
+@RolesAllowed(PermissionConstants.FIND_OFFER)
+public PaginatedListTo<OfferEto> findOfferEtos(OfferSearchCriteriaTo criteria) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+  PaginatedListTo<OfferEntity> offers = getOfferDao().findOffers(criteria);
+
+
+  if (OffermanagementFeatures.DISCOUNT.isActive()) {
+    return getOfferEtosDiscount(offers);
+  } else {
+    return getOfferEtosNormalPrice(offers);
+  }
+
+}
+
+
+// Strategy 1: Return the OfferEtos with the normal price
+private PaginatedListTo<OfferEto> getOfferEtosNormalPrice(PaginatedListTo<OfferEntity> offers) {
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+// Strategy 2: Return the OfferEtos with the new, discounted price
+private PaginatedListTo<OfferEto> getOfferEtosDiscount(PaginatedListTo<OfferEntity> offers) {
+  offers = addDiscountToOffers(offers);
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+private PaginatedListTo<OfferEntity> addDiscountToOffers(PaginatedListTo<OfferEntity> offers) {
+  for (OfferEntity oe : offers.getResult()) {
+    Double oldPrice = oe.getPrice().getValue().doubleValue();
+
+    // calculate the new price and round it to two decimal places
+    BigDecimal newPrice = new BigDecimal(oldPrice * 0.75);
+    newPrice = newPrice.setScale(2, RoundingMode.HALF_UP);
+
+    oe.setPrice(new Money(newPrice));
+  }
+
+  return offers;
+}
+
+
+
+
+
+
Guidelines for a successful use of feature-toggles
+
+

The use of feature-toggles requires a specified set of guidelines to maintain the overview on the software. The following is a collection of considerations and examples for conventions that are reasonable to use.

+
+
+
Minimize the number of toggles
+
+

When using too many toggles at the same time, it is hard to maintain a good overview of the system and things like finding bugs are getting much harder. Additionally, the management of toggles in the configuration interface gets more difficult due to the amount of toggles.

+
+
+

To prevent toggles from piling up during development, a toggle and the associated obsolete source code should be removed after the completion of the corresponding feature. In addition to that, the existing toggles should be revisited periodically to verify that these are still needed and therefore remove legacy toggles.

+
+
+
+
Consistent naming scheme
+
+

A consistent naming scheme is the key to a structured and easily maintainable set of features. This should include the naming of toggles in the source code and the appropriate naming of commit messages in the VCS. The following section contains an example for a useful naming scheme including a small example.

+
+
+

Every Feature-Toggle in the system has to get its own unique name without repeating any names of features, which were removed from the system. The chosen names should be descriptive names to simplify the association between toggles and their purpose. If the feature should be split into multiple sub-features, you might want to name the feature like the parent feature with a describing addition. If for example you want to split the DISCOUNT feature into the logic and the UI part, you might want to name the sub-features DISCOUNT_LOGIC and DISCOUNT_UI.

+
+
+

The entry in the togglz configuration enum should be named identically to the aforementioned feature name. The explicitness of feature names prevents a confusion between toggles due to using multiple enums.

+
+
+

Commit messages are very important for the use of feature-toggles and also should follow a predefined naming scheme. You might want to state the feature name at the beginning of the message, followed by the actual message, describing what the commit changes to the feature. An example commit message could look like the following:

+
+
+
+
DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

Mentioning the feature name in the commit message has the advantage, that you can search your git log for the feature name and get every commit belonging to the feature. An example for this using the tool grep could look like this.

+
+
+
+
$ git log | grep -C 4 DISCOUNT
+
+commit 034669a48208cb946cc6ba8a258bdab586929dd9
+Author: Florian Luediger <florian.luediger@somemail.com>
+Date:   Thu Jul 7 13:04:37 2016 +0100
+
+DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

To keep track of all the features in your software system, a platform like GitHub offers issues. When creating an issue for every feature, you can retrace, who created the feature and who is assigned to completing its development. When referencing the issue from commits, you also have links to all the relevant commits from the issue view.

+
+
+
+
Placement of toggle points
+
+

To maintain a clean codebase, you definitely want to avoid using the same toggle in different places in the software. There should be one single query of the toggle which should be able to toggle the whole functionality of the feature. If one single toggle point is not enough to switch the whole feature on or off, you might want to think about splitting the feature into multiple ones.

+
+
+
+
Use of fine-grained features
+
+

Bigger features in general should be split into multiple sub-features to maintain the overview on the codebase. These sub-features get their own feature-toggle and get implemented independently.

+
+
+ +
+

==Accessibility

+
+
+

TODO

+
+ + + +
+ +
+ + + + + +
+ + +devon4j-kafka has been abandoned. Its main feature was the implementation of a retry pattern using multiple topics. This implementation has become an integral part of Spring Kafka. We recommend to use Spring Kafkas own implemenation for retries. +
+
+
+

==Messaging Services

+
+
+

Messaging Services provide an asynchronous communication mechanism between applications. Technically this is implemented using Apache Kafka .

+
+
+

For spring, devonfw uses Spring-Kafka as kafka framework. +For more details, check the devon4j-kafka.

+
+ +
+ +
+

==Messaging

+
+
+

Messaging in Java is done using the JMS standard from JEE.

+
+
+
+
+
Products
+
+

For messaging you need to choose a JMS provider such as:

+
+
+ +
+
+
+
Receiver
+
+

As a receiver of messages is receiving data from other systems it is located in the service-layer.

+
+
+
JMS Listener
+
+

A JmsListener is a class listening and consuming JMS messages. It should carry the suffix JmsListener and implement the MessageListener interface or have its listener method annotated with @JmsListener. This is illustrated by the following example:

+
+
+
+
@Named
+@Transactional
+public class BookingJmsListener /* implements MessageListener */ {
+
+  @Inject
+  private Bookingmanagement bookingmanagement;
+
+  @Inject
+  private MessageConverter messageConverter;
+
+  @JmsListener(destination = "BOOKING_QUEUE", containerFactory = "jmsListenerContainerFactory")
+  public void onMessage(Message message) {
+    try {
+      BookingTo bookingTo = (BookingTo) this.messageConverter.fromMessage(message);
+      this.bookingmanagement.importBooking(bookingTo);
+    } catch (MessageConversionException | JMSException e) {
+      throw new InvalidMessageException(message);
+    }
+  }
+}
+
+
+
+
+
+
Sender
+
+

The sending of JMS messages is considered as any other sending of data like kafka messages or RPC calls via REST using service-client, gRPC, etc. +This will typically happen directly from a use-case in the logic-layer. +However, the technical complexity of the communication and protocols itself shall be hidden from the use-case and not be part of the logic layer. +With spring we can simply use JmsTemplate to do that.

+
+
+ +
+

==Full Text Search

+
+
+

If you want to all your users fast and simple searches with just a single search field (like in google), you need full text indexing and search support.

+
+
+
+
Solutions
+
+ +
+
+

Maybe you also want to use native features of your database

+
+ +
+
+
Best Practices
+
+

TODO

+
+
+
+
+

1.78. Tutorials

+ +
+

==Creating a new application

+
+
+
Running the archetype
+
+

In order to create a new application you must use the archetype provided by devon4j which uses the maven archetype functionality.

+
+
+

To create a new application, you should have installed devonfw IDE. Follow the devon ide documentation to install +the same. +You can choose between 2 alternatives, create it from command line or, in more visual manner, within eclipse.

+
+
+
From command Line
+
+

To create a new devon4j application from command line, you can simply run the following command:

+
+
+
+
devon java create com.example.application.sampleapp
+
+
+
+

For low-level creation you can also manually call this command:

+
+
+
+
mvn -DarchetypeVersion=${devon4j.version} -DarchetypeGroupId=com.devonfw.java.templates -DarchetypeArtifactId=devon4j-template-server archetype:generate -DgroupId=com.example.application -DartifactId=sampleapp -Dversion=1.0.0-SNAPSHOT -Dpackage=com.devonfw.application.sampleapp
+
+
+
+

Attention: The archetypeVersion (first argument) should be set to the latest version of devon4j. You can easily determine the version from this badge: +latest devon4j version

+
+
+

Further providing additional properties (using -D parameter) you can customize the generated app:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 17. Options for app template
propertycommentexample

dbType

Choose the type of RDBMS to use (hana, oracle, mssql, postgresql, mariadb, mysql, etc.)

-DdbTpye=postgresql

batch

Option to add an batch module

-Dbatch=batch

+
+
+
From Eclipse
+
+
+
After that, you should follow this Eclipse steps to create your application:
+
+
+
+
    +
  • +

    Create a new Maven Project.

    +
  • +
  • +

    Choose the devon4j-template-server archetype, just like the image.

    +
  • +
+
+
+
+Select archetype +
+
+
+
    +
  • +

    Fill the Group Id, Artifact Id, Version and Package for your project.

    +
  • +
+
+
+
+Configure archetype +
+
+
+
    +
  • +

    Finish the Eclipse assistant and you are ready to start your project.

    +
  • +
+
+
+
+
+
What is generated
+
+

The application template (archetype) generates a Maven multi-module project. It has the following modules:

+
+
+
    +
  • +

    api: module with the API (REST service interfaces, transferobjects, datatypes, etc.) to be imported by other apps as a maven dependency in order to invoke and consume the offered (micro)services.

    +
  • +
  • +

    core: maven module containing the core of the application.

    +
  • +
  • +

    batch: optional module for batch(es)

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) as a WAR file.

    +
  • +
+
+
+

The toplevel pom.xml of the generated project has the following features:

+
+
+
    +
  • +

    Properties definition: Spring-boot version, Java version, etc.

    +
  • +
  • +

    Modules definition for the modules (described above)

    +
  • +
  • +

    Dependency management: define versions for dependencies of the technology stack that are recommended and work together in a compatible way.

    +
  • +
  • +

    Maven plugins with desired versions and configuration

    +
  • +
  • +

    Profiles for test stages

    +
  • +
+
+
+
+
How to run your app
+
+
Run app from IDE
+
+

To run your application from your favourite IDE, simply launch SpringBootApp as java application.

+
+
+
+
Run app as bootified jar or war
+
+

More details are available here.

+
+ +
+

==Quarkus

+
+
+

Quarkus is a Java framework for building cloud-native apps. +It is fully supported by devonfw as an option and alternative to spring. +Additional things like extensions will be available on the devon4quarkus GitHub repository.

+
+
+
+
+
+

1.79. Guide to the Reader

+
+

Depending on your intention of reading this document, you might be more interested in the following chapters:

+
+
+
    +
  • +

    If you are completely new to Quarkus, you may be interested in the pros and cons of Quarkus. Also, take a look at the official Quarkus website. You might also be interested in the features that GraalVM offers.

    +
  • +
  • +

    If you are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring, and coding conventions. Follow the referenced links to explore a topic in more depth.

    +
  • +
  • +

    If you are an experienced Spring developer and want to get in touch with Quarkus, read our Getting started with Quarkus for Spring developers guide.

    +
  • +
  • +

    If you’re looking to build your first Quarkus application, the Quarkus website offers some good getting started guides. Also, check out our Quarkus template guide, which gives you some recommendations on extensions and frameworks to use. It also provides some links to the Quarkus code generator with preselected configurations you can use to create your application.

    +
  • +
  • +

    If you want to have a Quarkus sample application using devon4j recommendations, check out our Quarkus reference application.

    +
  • +
  • +

    If you have a Spring application and want to migrate it to Quarkus, take a look at our migration guide.

    +
  • +
  • +

    If you already have some experience with devon4j and Quarkus and need more information on a specific topic, check out our Quarkus guides. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Quarkus and Spring are documented there.

    +
  • +
  • +

    If you want to learn how to build native images, check out this guide.

    +
  • +
+
+
+
+

1.80. Pros

+
+

Quarkus offers the following benefits:

+
+
+
    +
  • +

    fast turn-around cycles for developers
    +Save changes in your Java code and immediately test the results without restarting or waiting

    +
  • +
  • +

    faster start-up and less memory footprint
    +When building your app as native-images via GraalVM, it gets highly optimized. As a result, it starts up lightning fast and consumes much less memory. This is a great advantage for cloud deployment as well as for sustainability. You can find a performance comparison between Spring and Quarkus here.

    +
  • +
  • +

    clean and lean +As quarkus was born as a cloud-native framework, it is very light-weight and does not carry much history and legacy.

    +
  • +
+
+
+
+

1.81. Cons

+
+

Quarkus has the following drawbacks:

+
+
+
    +
  • +

    less flexible
    +Quarkus is less flexible compared to spring, or in other words, it is more biased and coupled to specific implementations. However, the implementations work and you have less things to choose and worry about. However, in case you want to integrate a specific or custom library, you may hit limitations or lose support for native-images, especially when that library is based on reflection. Therefore, check your requirements and technology stack early on when making your choice.

    +
  • +
  • +

    less established
    +Since quarkus was born in 2019, it is modern but also less established. It will be easier to get developers for spring, but we already consider quarkus mature and established enough for building production-ready apps.

    +
  • +
+
+
+

==Quarkus Quickstart

+
+
+

This guide serves as a quickstart on how to create a Quarkus app, briefly presenting the key functionalities that Quarkus provides, both for beginners and experienced developers.

+
+
+

1.81.1. Introduction to Quarkus

+
+

To get a first introduction to Quarkus, you can read the Quarkus introduction guide. To get a brief overview of where you can find the important Quarkus related guides, follow the chapter guide to the reader. +Also, see the comparison of the advantages and disadvantages of a Quarkus application compared to the alternative framework Spring. +This comparison will be supported by our performance comparison between Spring and Quarkus, which demonstrates the lower resource consumption and startup time of Quarkus applications.

+
+
+
+

1.81.2. Installation of Tools and Dependencies

+
+

First, we need to install some dependencies and tools before we can start programming. Our tool devonfw-ide comes with many development tools for you. +We need to install the following tools for this guide:

+
+
+
    +
  • +

    Maven

    +
  • +
  • +

    Java

    +
  • +
  • +

    any IDE (devonfw-ide supports Eclipse, Intellij and VScode)

    +
  • +
  • +

    Docker

    +
  • +
+
+
+

We recommend installing the devonfw-ide with the tools, but if you already have your system configured and the tools above installed, you can skip to Bootstrap a Quarkus Project, otherwise we will show you how to set up and update your devonfw-ide.

+
+
+
devonfw-ide
+
    +
  1. +

    Install devonfw-ide
    +Follow the Setup to install the devonfw-ide with Java, Maven, Eclipse and VScode.

    +
    +
      +
    1. +

      Command to install Docker
      +devon docker setup

      +
    2. +
    +
    +
  2. +
  3. +

    Update devonfw-ide
    +As we are still working on improving devonfw-ide, we recommend to update your already installed devonfw-ide and tools in order to include essential features for cloud development with Quarkus that you could be missing.

    +
  4. +
+
+
+

Use the commands devon ide update, devon ide update software, and devon ide scripts to update devonfw-ide and all installed software.

+
+
+

Go to the main folder under workspaces of the devonfw-ide installation. +We will create the project there.

+
+
+
+

1.81.3. Bootstrap a Quarkus Project

+
+

Quarkus provides multiple ways to bootstrap a project. +The option to bootstrap a project via the command-line is shown in the Quarkus getting started guide Bootstrap the project. +Quarkus also provides a project builder where you can select some extensions, the build tool for your project, and if you want, some starter code. +This will deliver a project skeleton with the configured project dependencies and also contributes the information to compile the application natively. To get some recommendations on starter templates, follow the guide on: template recommendations.

+
+
+ + + + + +
+ + +
+

By creating a Quarkus project from the command-line or with the project builder, you get a different project structure and have to adapt it to the devon4j conventions shown in the next Chapter.

+
+
+
+
+
Project Structure
+
+

We provide a recommendation and guideline for a modern project structure to help organize your project into logically related modules. +In order to comply with the requirements of modern cloud development and microservice architectures, follow the guide and apply the modern project structure to your project. You can also find similar modules in our example projects.

+
+
+
+
+

1.81.4. Introduction to Quarkus Functionality

+
+

Before we start programming, you should first have a look at the functionality of Quarkus.

+
+
+
Quarkus functionality guides
+
    +
  1. +

    Getting started guide from Quarkus
    +This guide presents a good overview of the functionality of Quarkus. The simple Greeting Service gives a brief introduction into concepts like CDI, testing, dev mode, packaging, and running the app.

    +
  2. +
  3. +

    From Spring to Quarkus
    +For experienced Spring developers that have already followed devon4j guidelines, you can read our guide to getting started with Quarkus for Spring developer, as it goes more into the differences that can give you a more detailed comparison to Spring.

    +
    +
      +
    1. +

      Migrate a Spring app to Quarkus
      +This guide shows how to migrate a Spring application to a Quarkus application with devon4j conventions.

      +
    2. +
    +
    +
  4. +
+
+
+
+

1.81.5. Create a REST service

+
+

Now let’s create our first REST CRUD service with Quarkus. +We give you the options of using a guide to start to code the service yourself or to just download a service that’s ready to use.

+
+
+
Options
+
    +
  1. +

    Create the service yourself
    +There is a good Quarkus guide for a simple JSON REST service that will guide you through your first application and help you with implement the definition of endpoints with JAX-RS and an Entity that will be managed by the service, and also how to configure the JSON support.

    +
  2. +
  3. +

    Use an existing Quarkus project
    +You don’t want to code a service and just want to test some Quarkus functionalities? Just load a Quarkus sample project provided for every existing quickstart guide and the supported framework. +Our Team also provides some Quarkus applications that are working and can be loaded and tested.

    +
    +
      +
    • +

      reference project is a service that manages products. It contains the devon4j modern project structure, pagination, queries, a Postgres database, SwaggerUI, and support for Kubernetes deploy. To add OpenTelemetry support, see the following guide. +This project will be steadily improved and is used to showcase the abilities of Quarkus with devon4j.

      +
    • +
    • +

      minimal Quarkus project is just the Quarkus project from a getting started with Quarkus guide with a Greeting Services modified with the correct modern structure mentioned in the chapter Project Structure

      +
    • +
    +
    +
  4. +
+
+
+
+

1.81.6. OpenAPI generation

+
+

We provide a guide with a short introduction to the OpenAPI specification with two plugins that are important in a Quarkus Context.

+
+ +
+

A more detailed usage guide to the Smallrye Plugin is provided by Quarkus OpenAPI and Swagger guide.

+
+
+
+

1.81.7. How to Integrate a Database

+
+

The next step for our REST service would be to integrate a database to store the objects of the entity.

+
+
+

With Quarkus, adding a database can be easy, because Quarkus can take over the build-up and connection process. +First, you should understand our guides on the concepts of working with data. Then, we will show how to integrate a database with Quarkus.

+
+
+
Data Principles Guides
+
    +
  1. +

    General devon4j JPA guide
    +To get an insight into the general JPA usage, read the JPA guide containing a general explanation of the Java Persistence API.

    +
  2. +
  3. +

    Difference to SpringData
    +If you have already worked with SpringData, this is also partially supported with Quarkus. This is explained in more detail in this SpringData Guide.

    +
  4. +
+
+
+
Database Integration
+
    +
  1. +

    Quarkus zero config dev mode
    +Starting with the database implementation in Quarkus, we recommend for beginners to use the DEV mode Zero Config Setup (Dev Services). This is especially great for testing the code without a database set up. +Quarkus does all the work for you and configures a database and creates the database and tables (schemas) for you.

    +
    +
      +
    1. +

      Configuration Properties
      +A list of all database configuration properties for the Dev services

      +
    2. +
    +
    +
  2. +
  3. +

    Integrate a simple Hibernate ORM database
    +The zero config setup only works with the Dev mode, it’s comfortable in the first phases of the creation of your service but if the goal is to also get a deployable version, you have to create your own database and integrate it. +This Quarkus guide shows, how to integrate a Hibernate ORM database with an example service.

    +
    +
      +
    1. +

      Configuration list for JDBC
      +A list of all configuration that is possible with a JDBC configuration properties

      +
    2. +
    +
    +
  4. +
  5. +

    Reactive CRUD application with Panache
    +Quarkus unifies reactive and imperative programming. +Reactive is an architectural principle to build robust, efficient, and concurrent applications. +For an introduction into reactive and how Quarkus enables it, follow this Quarkus reactive architecture article and also the reactive quickstart. +To get started with reactive and implement reactive methods, you can follow the Quarkus reactive guide. +The reactive guide uses the Quarkus based implementation of a Hibernate ORM called Panache. +The implementation is not our first choice with devon4j and therefore not part of our recommendations, but to understand the reactive guide you can read the Hibernate ORM with Panache guide first to prevent possible problems following the guide.

    +
  6. +
+
+
+ + + + + +
+ + +
+

You need an installed Docker version for the zero config setup.

+
+
+
+
+
Database Migration
+

For schema-based databases, we recommend migrating databases with Flyway. +In that case, our general migration guide can give you an overview if you are not familiar with migration. +.. Flyway guide for Quarkus +This Quarkus guide will show how to work with the Flyway extension in a Quarkus application. +This should be used if you start your own database and do not leave the creation to quarkus.

+
+
+
+

1.81.8. Testing a Quarkus Application

+
+

After we have built the service, we have to verify it with some tests. +We will give you some guidelines to implement some test cases.

+
+
+
Testing Guides
+
    +
  1. +

    General testing guide
    +For users that aren’t familiar with the devon4j testing principles, we created a general best practices and recommendations guide for testing.

    +
    +
      +
    1. +

      Our guide for testing with Quarkus +In addition, we also provide a guide that specifically addresses the testing of a Quarkus application.

      +
    2. +
    +
    +
  2. +
+
+
+

Most of the Quarkus applications are already equipped with a basic test and our reference project provides some further test cases. If you want to improve and extend the tests, you can also follow the large Quarkus guide for testing.

+
+
+
+

1.81.9. Packaging of a Quarkus application and creation of a native executable

+
+

Quarkus applications can be packaged into different file types. The following link will show you how to build them and give you a short explanation of the characteristics of these files.

+
+
+
Package types
+
    +
  1. +

    fast-jar

    +
  2. +
  3. +

    mutable-jar

    +
  4. +
  5. +

    uber-jar

    +
  6. +
  7. +

    native executable

    +
  8. +
+
+
+

To package an application, use the command mvn package and Quarkus will generate the output in the /target folder. For the native executables, the command needs more parameters, which is explained in the link above.

+
+
+

Configure the Output with these configuration properties

+
+
+
+

1.81.10. Create and build a Docker Image

+
+

Quarkus supports Jib, S2I and Docker for building images. We focus on building a Quarkus App with Docker. +You get a generated Dockerfile from Quarkus in the src/main/docker folder of any project generated from Quarkus. There are multiple Dockerfiles.

+
+
+
Dockerfiles
+
    +
  1. +

    Dockerfile.jvm
    +Dockerfile for Quarkus application in the JVM mode. running in Red Hat Universal Base Image 8 Minimal Container

    +
  2. +
  3. +

    Dockerfile.legacy-jar
    +DockerFile for Quarkus application in JVM mode with the legacy jar running in Red Hat Universal Base Image 8 Minimal Container.

    +
  4. +
  5. +

    Dockerfile.native
    +Dockerfile using the native executable running in Red Hat Universal Base Image 8 Minimal container.

    +
  6. +
  7. +

    Dockerfile.native-distroless +The native file will run in a Distroless container. Distroless images are very small containers with just the application and runtime dependencies and without the other programs that come with a Linux distribution.

    +
  8. +
+
+
+
+
+

For more information to the different executables go back to the chapter Packaging of a Quarkus application and creation of a native executable

+
+
+
+
+

To simply build and run a Docker image, you can follow the instructions Quarkus provides for every Dockerfile in the comments block.

+
+
+

Docker commands example for the JVM Dockerfile from our reference project

+
+
+
+
####
+##This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
+#
+##Before building the container image run:
+#
+##./mvnw package
+#
+##Then, build the image with:
+#
+##docker build -f src/main/docker/Dockerfile.jvm -t quarkus/quarkus-basics-jvm .
+#
+##Then run the container using:
+#
+##docker run -i --rm -p 8080:8080 quarkus/quarkus-basics-jvm
+#
+##If you want to include the debug port into your docker image
+##you will have to expose the debug port (default 5005) like this :  EXPOSE 8080 5050
+#
+##Then run the container using :
+#
+##docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/quarkus-basics-jvm
+#
+###
+
+
+
+

Quarkus is also able to build the image while packaging the application, so you don’t have to execute the command from above. +To perform Docker builds with the generated Dockerfiles from above, you need to add the following extension to your project with the command mvn quarkus:add-extension -Dextensions="container-image-docker".

+
+
+

You also have to set the quarkus.container-image.build=true. You can add this to your application.properties or just append it to the packaging command like this: ./mvn package -Dquarkus.container-image.build=true.

+
+
+

If your needs exceed the instructions given by the file, we recommend to follow the Docker getting started guide to get familiar with Docker and customize the Dockerfiles according to your needs. +To specify your container build, you can use the general container image configurations properties and the Docker image configurations properties when building and runnig Docker images.

+
+ +
+

==Migrate from Spring to Quarkus

+
+
+

This guide will cover the migration process of a Spring application to a Quarkus application. There are already articles about migrating from Spring to Quarkus (e.g. https://developers.redhat.com/blog/2020/04/10/migrating-a-spring-boot-microservices-application-to-quarkus, https://dzone.com/articles/migrating-a-spring-boot-application-to-quarkus-cha). +This guide will focus more on the devon4j specific aspects. We assume that a working Spring application exists, built in the classic devon4j specific way (e.g. Jump The Queue or My Thai Star).

+
+
+
+

1.81.11. Create the Quarkus application

+
+

We start with an empty Quarkus project. You can create the project with Maven on the command line or use the online generator. The advantage of the online generator is that you have a pre-selection of dependencies to use in your project. +For starters, let’s select the basic dependencies required to develop a REST service with database connectivity (you can use one of the links in the Quarkus template guide): RESTEasy JAX-RS, RESTEasy Jackson, Hibernate ORM, Spring Data JPA API, JDBC Driver (choose the type of database you need), Flyway (if you have database migration schemas), SmallRye Health (optional for Health Monitoring)

+
+
+

The list does not include all required dependencies. We will add more dependencies to the project later. For now, generate the application with these dependencies.

+
+
+
Migration Toolkit from Red Hat
+
+

Red Hat provides a migration toolkit (MTA, Migration Toolkit for Applications), that supports migration of a Spring to a Quarkus application. There are several versions of this toolkit (e.g., a web console, a Maven plugin, or an IDE plugin). +The MTA analyzes your existing application and generates a report with hints and instructions for migrating from Spring to Quarkus. For example, it gives you an indication of which dependencies are not supported in your project for a Quarkus application and which dependencies you need to swap them with. The analysis is rule-based, and you can also add your own rules that will be checked during analysis.

+
+
+
+
+

1.81.12. Entities

+
+

There is nothing special to consider when creating the entities. In most cases, you can simply take the code from your Spring application and use it for your Quarkus application. Usually, the entities extend a superclass ApplicationPersistenceEntity containing, for example, the id property. You can also take this class from your Spring application and reuse it.

+
+
+
+

1.81.13. Transfer objects

+
+

The next step is to create the appropriate transfer objects for the entities. In a devon4j Spring application, we would use CobiGen to create these classes. Since CobiGen is not usable for this purpose in Quarkus applications yet, we have to create the classes manually.

+
+
+

First, we create some abstract base classes for the search criteria and DTO classes. Normally, these would also be created by CobiGen.

+
+
+
Listing 19. AbstractSearchCriteriaTo
+
+
public abstract class AbstractSearchCriteriaTo extends AbstractTo {
+
+  private static final long serialVersionUID = 1L;
+
+  private Pageable pageable;
+
+  //getter + setter for pageable
+}
+
+
+
+
Listing 20. AbstractDto
+
+
public abstract class AbstractDto extends AbstractTo {
+
+  private static final long serialVersionUID = 1L;
+
+  private Long id;
+
+  private int modificationCounter;
+
+  public AbstractDto() {
+
+    super();
+  }
+
+  //getter + setter
+
+  @Override
+  protected void toString(StringBuilder buffer) {
+    ...
+  }
+}
+
+
+
+

The class AbstractTo, extended by other classes, would be provided by the devon4j-basic module in a devon4j Spring application. You can take the code from here and reuse it in your Quarkus project.

+
+
+

Now you can create your transfer objects. Most of the code of the transfer objects of your Spring application should be reusable. For Quarkus, we recommend (as mentioned here) to use *Dto instead of *Eto classes. Be sure to change the names of the classes accordingly.

+
+
+
+

1.81.14. Data Access Layer

+
+

In devon4j, we propose to use Spring Data JPA to build the data access layer using repositories and Querydsl to build dynamic queries. We will also use this approach for Quarkus applications, but we need to change the implementation because the devon4j modules are based on reflection, which is not suitable for Quarkus. +In Quarkus we will use Querydsl using code generation. So for this layer, more changes are required and we can’t just take the existing code.

+
+
+

First, create a repository interface for your entity class that extends JpaRepository (see here).

+
+
+

To add QueryDSL support to your project, add the following dependencies to your pom.xml file:

+
+
+
Listing 21. pom.xml
+
+
<dependency>
+  <groupId>com.querydsl</groupId>
+  <artifactId>querydsl-jpa</artifactId>
+  <version>4.3.1</version>
+</dependency>
+<dependency>
+  <groupId>com.querydsl</groupId>
+  <artifactId>querydsl-apt</artifactId>
+  <scope>provided</scope>
+  <version>4.3.1</version>
+</dependency>
+
+
+
+

As mentioned above, we will use QueryDSL with code generation. For this, add the QueryDSL annotation processor to your plugins:

+
+
+
Listing 22. pom.xml
+
+
<plugins>
+...
+  <plugin>
+    <groupId>com.mysema.maven</groupId>
+    <artifactId>apt-maven-plugin</artifactId>
+    <version>1.1.3</version>
+    <executions>
+      <execution>
+        <phase>generate-sources</phase>
+        <goals>
+          <goal>process</goal>
+        </goals>
+        <configuration>
+          <outputDirectory>target/generated-sources/annotations</outputDirectory>
+          <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+        </configuration>
+      </execution>
+    </executions>
+  </plugin>
+</plugins>
+
+
+
+

To implement the queries, follow the corresponding guide.

+
+
+

Set the following properties in the application.properties file to configure the connection to your database (see also here):

+
+
+
+
quarkus.datasource.db-kind=...
+quarkus.datasource.jdbc.url=...
+quarkus.datasource.username=...
+quarkus.datasource.password=...
+
+
+
+
+

1.81.15. Logic Layer

+
+

For the logic layer, devon4j uses a use-case approach. You can reuse the use case interfaces from the api module of the Spring application. Again, make sure to rename the transfer objects.

+
+
+

Create the appropriate class that implements the interface. Follow the implementation section of the use-case guide to implement the methods. For mapping the entities to the corresponding transfer objects, see the next section.

+
+
+
+

1.81.16. Mapping

+
+

For bean mapping, we need to use a completely different approach in the Quarkus application than in the Spring application. For Quarkus, we use MapStruct, which creates the mapper at build time rather than at runtime using reflection. Add the following dependencies to your pom.xml.

+
+
+
Listing 23. pom.xml
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct-processor</artifactId>
+  <version>1.4.2.Final</version>
+</dependency>
+<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+</dependency>
+
+
+
+

Then you can create the mapper as follows:

+
+
+
Listing 24. Mapper
+
+
@Mapper(componentModel = "cdi")
+public interface YourEntityMapper {
+  YourEntityDto map(YourEntity entity);
+
+  YourEntity map(YourEntityDto dto);
+
+  ...
+}
+
+
+
+

Inject the mapper into your use-case implementation and simply use the methods. The method implementations of the mapper are created when the application is built.

+
+
+
+

1.81.17. Service Layer

+
+

For the implementation of the service layer, we use the JAX-RS for both Quarkus and Spring applications to create the REST services. Classic devon4j Spring applications rely on Apache CFX as the implemention of JAX-RS. +For Quarkus, we use RESTEasy. Since both are implementations of JAX-RS, much of the Spring application code can be reused.

+
+
+

Take the definition of the REST endpoints from the api module of the Spring application (make sure to rename the transfer objects), inject the use-cases from the logic layer and use them in the REST service methods as follows:

+
+
+
Listing 25. REST service
+
+
@Path("/path/v1")
+public class YourComponentRestService {
+
+  @Inject
+  UcFindYourEntity ucFindYourEntity;
+
+  @Inject
+  UcManageYourEntity ucManageYourEntity;
+
+  @GET
+  @Path("/yourEntity/{id}/")
+  public YourEntityDto getYourEntity(@PathParam("id") long id);
+
+    return this.ucFindYourEntity.findYourEntity(id);
+  }
+
+  ...
+}
+
+
+
+
+

1.81.18. Summary

+
+

As you have seen, some parts hardly differ when migrating a Spring application to a Quarkus application, while other parts differ more. The above sections describe the parts needed for simple applications that provide REST services with a data access layer. +If you add more functionality, more customization and other frameworks, dependencies may be required. If that is the case, take a look at the corresponding guide on the topic in the devon4j documentation or check if there is a tutorial on the official Quarkus website.

+
+
+

Furthermore, we can summarize that migrating from a Spring application to a Quarkus representative is not complex. Although Quarkus is a very young framework (release 1.0 was in 2019), it brings a lot of proven standards and libraries that you can integrate into your application. +This makes it easy to migrate and reuse code from existing (Spring) applications. Also, Quarkus comes with Spring API compatibility for many Spring modules (Spring Data JPA, Spring DI, etc.), which makes it easier for developers to reuse their knowledge.

+
+ +
+

==Spring Native vs Quarkus

+
+
+

Nowadays, it is very common to write an application and deploy it to a cloud. +Serverless computing and Function-as-a-Service (FaaS) have become +very popular. +While many challenges arise when deploying a Java application into the latest cloud environment, the biggest challenges facing developers are memory footprint and the startup time required +for the Java application, as more of these keeps the host’s costs high in public clouds and Kubernetes clusters. With the introduction of frameworks like micronaut and microprofile, Java processes are getting faster and more lightweight. In a similar context, Spring has introduced +Spring Native which aims to solve the big memory footprint of Spring and its slow startup time to potentially rival the new framework called Quarkus, by Red Hat. This document briefly discusses both of these two frameworks and their potential suitability with devonfw.

+
+
+
+

1.81.19. Quarkus

+
+

Quarkus is a full-stack, Kubernetes-native Java framework made for JVMs. With its container-first-philosophy and its native compilation with GraalVM, Quarkus optimizes Java for containers with low memory usage and fast startup times.

+
+
+

Quarkus achieves this in the following ways:

+
+
+
    +
  • +

    First Class Support for GraalVM

    +
  • +
  • +

    Build Time Metadata Processing: As much processing as possible is +done at build time, so your application will only contain the classes +that are actually needed at runtime. This results in less memory usage, +and also faster startup time, as all metadata processing has already been +done.

    +
  • +
  • +

    Reduction in Reflection Usage: Quarkus tries to avoid reflection as much as possible in order to reduce startup time and memory usage.

    +
  • +
  • +

    Native Image Pre Boot: When running in a native image, Quarkus +pre-boots as much of the framework as possible during the native image +build process. This means that the resulting native image has already +run most of the startup code and serialized the result into the +executable, resulting in an even faster startup-time.

    +
  • +
+
+
+

This gives Quarkus the potential for a great platform for serverless cloud and Kubernetes environments. For more information about Quarkus and its support for devonfw please refer to the Quarkus introduction guide.

+
+
+
+

1.81.20. Spring Native

+
+
+
+

The current version of Spring Native 0.10.5 is designed to be used with Spring Boot 2.5.6

+
+
+
+
+

Like Quarkus, Spring Native provides support for compiling Spring applications to native executables using the GraalVM native-image compiler deisgned to be packaged in lightweight containers.

+
+
+

Spring Native is composed of the following modules:

+
+
+
    +
  • +

    spring-native: runtime dependency required for running Spring Native, provides also Native hints API.

    +
  • +
  • +

    spring-native-configuration: configuration hints for Spring classes used by Spring AOT plugins, including various Spring Boot auto-configurations.

    +
  • +
  • +

    spring-native-docs: reference guide, in adoc format.

    +
  • +
  • +

    spring-native-tools: tools used for reviewing image building configuration and output.

    +
  • +
  • +

    spring-aot: AOT transformation infrastructure common to Maven and Gradle plugins.

    +
  • +
  • +

    spring-aot-test: Test-specific AOT transformation infrastructure.

    +
  • +
  • +

    spring-aot-gradle-plugin: Gradle plugin that invokes AOT transformations.

    +
  • +
  • +

    spring-aot-maven-plugin: Maven plugin that invokes AOT transformations.

    +
  • +
  • +

    samples: contains various samples that demonstrate features usage and are used as integration tests.

    +
  • +
+
+
+
+

1.81.21. Native compilation with GraalVM

+
+

Quarkus and Spring Native both use GraalVM for native compilation. Using a native image provides some key advantages, such as instant startup, instant peak performance, and reduced memory consumption. However, there are also some drawbacks: Creating a native image is a heavy process that is slower than a regular application. A native image also has fewer runtime optimizations after its warmup. Furthermore, it is less mature than the JVM and comes with some different behaviors.

+
+
+

Key characteristics:

+
+
+
    +
  • +

    Static analysis of the application from the main entry point is +performed at build time.

    +
  • +
  • +

    Unused parts are removed at build time.

    +
  • +
  • +

    Configuration required for reflection, resources, and dynamic proxies.

    +
  • +
  • +

    Classpath is fixed at build time.

    +
  • +
  • +

    No class lazy loading: everything shipped in the executables will be loaded in memory on startup.

    +
  • +
  • +

    Some code will run at build time.

    +
  • +
+
+
+

There are limitations around some aspects of Java applications that are not fully supported

+
+
+
+

1.81.22. Build time and start time for apps

+ +++++ + + + + + + + + + + + + + + + + + + + +
Frameworkbuild timestart time

Spring Native

19.615s

2.913s

Quarkus Native executable

52.818s

0.802s

+
+
+

1.81.23. Memory footprints

+ ++++ + + + + + + + + + + + + + + + + +
Frameworkmemory footprint

Spring Native

109 MB

Quarkus Native executable

75 MB

+
+
+

1.81.24. Considering devonfw best practices

+
+

As of now, devonfw actively supports Spring but not Spring Native. +Although Quarkus has been released to a stable release in early 2021, it has been already used in multiple big projects successfully showing its potential to implement cloud native services with low resource consumption matching the needs of scalability and resilience in cloud native environments. +With major stakeholders behind the open source community like Red Hat, its development and growth from its kickoff to the current state is very impressive and really shows the market needs and focus. +Another big advantage of Quarkus is that it started on a green field and therefore did not need to circumvent main pillars of the spring framework like reflection, being able to take clean and up-to-date design decisions not needing to cope with legacy issues. +Nonetheless, there is a experimental support also for some spring libraries already available in Quarkus, which make switching from spring to Quarkus much more easier if needed. +We also provide a guide +for Spring developers who want to adopt or try Quarkus for their +(next) projects as it really has some gamechanging advantages over +Spring.

+
+
+
+

1.81.25. General recommendations and conclusion

+
+

Quarkus and Spring Native both have their own use cases. Under the consideration of the limitations of GraalVM to be used for native images built by Quarkus and Spring Native, there is a strong recommendation towards Quarkus from devonfw. +One essential differentiation has to be made on the decision for native or against native applications - the foreseen performance optimization of the JIT compiler of the JVM, which is not available anymore in a native image deployment. +For sure, both component frameworks will also run on a JVM getting advantage again from JIT compilation, but depending on the overall landscape then, it is recommended to stay with the knowledge of the available teams, e.g. continue making use of devon4j based on spring or even if already in that state also here make use of Quarkus on JVM.

+
+ +
+

==Modern project structure

+
+
+

With trends such as cloud, microservices, lean, and agile, we decided for a more modern project structure that fits better to recent market trends. +When starting new projects with devonfw, and especially in the context of cloud-native development, we strongly recommend this modern approach over the classic structure.

+
+
+
+

1.81.26. Modules

+
+

Due to trends such as microservices, we are building smaller apps compared to moduliths. +For simplicity, we therefore do not split our app into different modules and keep everything top-level and easy.

+
+
+

In addition to java and resources, we also add helm for helm templates and docker for docker scripts (e.g. Dockerfile) in src/main:

+
+
+
+
├──/src
+|  ├──/main
+|  |  ├──/docker
+|  |  ├──/helm
+|  |  ├──/java
+|  |  └──/resources
+|  └──/test
+|     ├──/java
+|     └──/resources
+└──/pom.xml
+
+
+
+
+

1.81.27. Deployment

+
+

For modern projects, we strongly recommend that your build process generates the final deliverable as an OCI compliant container. +Further, to go fully cloud-native, you should build your app as a native image via GraalVM AOT compiler. +Therefore, we recommed to use quarkus as your main framework. +In case you want to go with spring, you may consider using spring-native.

+
+
+
+

1.81.28. Layers

+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +For the modern project structure, the layers are defined by the following table:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Layer«layer»Description

service

service

The service layer exposing functionality via its remote API. Typical protocol is REST. May also be any other protocol you are using such as gRPC.

domain

domain

The domain with the data-model and DB access. Use sub-package (in «detail») repository for repository and dao for DAOs. Also we recommend to put entities in model sub-package.

logic

logic

The logic layer with the functionallity providing the business value.

common

common

cross-cutting code not assigned to a technical layer.

+
+
+

1.81.29. Architecture Mapping

+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.domain
+|  |  ├──.repo
+|  |  |  ├──.«BusinessObject»Repository
+|  |  |  ├──.«BusinessObject»Fragment
+|  |  |  └──.«BusinessObject»FragmentImpl
+|  |  ├──.dao [alternative to repo]
+|  |  |  ├──.«BusinessObject»Dao
+|  |  |  └──.«BusinessObject»DaoImpl
+|  |  └──.model
+|  |     └──.«BusinessObject»Entity
+|  ├──.logic
+|  |  ├──«BusinessObject»Validator
+|  |  └──«BusinessObject»EventsEmitter
+|   |  └──.Uc«Operation»«BusinessObject»[Impl]
+|  └──.rest
+|     └──.v1
+|        ├──.«Component»RestService
+|        ├──.mapper
+|        |     └──.«BusinessObject»Mapper
+|        └──.model
+|           └──.«BusinessObject»Dto
+└──.general
+   └──.domain
+      └──.model
+         └──.ApplicationPersistenceEntity
+
+
+ +
+

==Domain Layer

+
+
+

The domain layer is responsible for the data-model and mapping it to a database. +The most common approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data, so you can consider the repository guide.

+
+
+

Note: The domain layer is the replacement for the data-access layer in the modern project structure.

+
+
+
+
+
+

1.82. Guides

+ +
+

==Getting started with Quarkus for Spring developers

+
+
+

As a Spring developer, you have heard more and more about Quarkus: its pros and cons, its fast growth etc. So, you decided to adopt/try Quarkus for your (next) project(s) and are wondering where to go next and what you need to pay attention to when moving from Spring to Quarkus.

+
+
+

This guide tries to address this exact concern. In the following, we will present you some main points you should be aware of when starting to develop with Quarkus, along with some useful sources.

+
+
+
    +
  1. +

    Quarkus is a fairly new Java toolkit. Thus, it is very well documented. It also provides a set of well-written technical guides that are a good starting point to get in touch and make the first steps with Quarkus. See here. It is an Open Source project licensed under the Apache License version 2.0. The source code is hosted in GitHub. If you have any questions or concerns, don’t hesitate to reach out to the Quarkus community.

    +
  2. +
  3. +

    Same as Spring Initializr, you can go to code.quarkus.io to create a new application. Also, check out our Template Quarkus Guide to see our recommendations on certain topics.

    +
  4. +
  5. +

    In Spring stack, we recommend structuring your application into multiple modules, known as our classic structure. Moving to Quarkus and the world of cloud-native microservices, where we build smaller applications compared to monoliths, we recommend keeping everything top-level and simple. Therefore, we propose the modern structure as a better fit.

    +
  6. +
  7. +

    Quarkus focuses not only on delivering top features, but also on the developer experience. The Quarkus’s Live Coding feature automatically detects changes made to Java files, application configuration, static resources, or even classpath dependency changes and recompiles and redeploys the changes. As that, it solves the problem of traditional Java development workflow, hence improves productivity.

    +
    +
    +
        Write Code → Compile → Deploy → Test Changes/ Refresh Browser/ etc → Repeat (traditional)
    +    Write Code → Test Changes/ Refresh Browser/ etc → Repeat (Quarkus)
    +
    +
    +
    +

    You can use this feature out of the box without any extra setup by running:

    +
    +
    +
    +
        mvn compile quarkus:dev
    +
    +
    +
    +

    Another highlight feature to speed up developing is the Quarkus’s Dev Mode with Dev Services, which can automatically provision unconfigured services in development and test mode. This means that if you include an extension and don’t configure it, Quarkus will automatically start the relevant service and wire up your application to use it, therefore saving you a lot of time setting up those services manually. In production mode, where the real configuration is provided, Dev Services will be disabled automatically.

    +
    +
    +

    Additionally, you can access the Dev UI at \q\dev in Dev Mode to browse endpoints offered by various extensions, conceptually similar to what a Spring Boot actuator might provide.

    +
    +
  8. +
  9. +

    Quarkus is made of a small core on which hundreds of extensions rely. In fact, the power of Quarkus is its extension mechanism. Think of these extensions as your project dependencies. You can add it per dependency manager such as maven or gradle.

    +
    +
    +
    mvn quarkus:list-extensions
    +mvn quarkus:add-extension -Dextensions="groupId:artifactId"
    +(or add it manually to pom.xml)
    +##or
    +gradle list-extensions
    +(add dependency to build.gradle)
    +
    +
    +
    +

    Like Spring Boot, Quarkus also has a vast ecosystem of extensions with commonly-used technologies.

    +
    + + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Table 18. Example of common Quarkus extensions and the Spring Boot Starters with similar functionality (book: Quarkus for Spring Developer)
    Quarkus extensionSpring Boot Starter

    quarkus-resteasy-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-resteasy-reactive-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-hibernate-orm-panache

    spring-boot-starter-data-jpa

    quarkus-hibernate-orm-rest-datapanache

    spring-boot-starter-data-rest

    quarkus-hibernate-reactive-panache

    spring-boot-starter-data-r2dbc

    quarkus-mongodb-panache

    spring-boot-starter-data-mongodb

    +

    spring-boot-starter-data-mongodb-reactive

    quarkus-hibernate-validator

    spring-boot-starter-validation

    quarkus-qpid-jms

    spring-boot-starter-activemq

    quarkus-artemis-jms

    spring-boot-starter-artemis

    quarkus-cache

    spring-boot-starter-cache

    quarkus-redis-client

    spring-boot-starter-data-redis

    +

    spring-boot-starter-data-redis-reactive

    quarkus-mailer

    spring-boot-starter-mail

    quarkus-quartz

    spring-boot-starter-quartz

    quarkus-oidc

    spring-boot-starter-oauth2-resource-server

    quarkus-oidc-client

    spring-boot-starter-oauth2-client

    quarkus-smallrye-jwt

    spring-boot-starter-security

    +
    +

    A full list of all Quarkus extensions can be found here. Furthermore, you can check out the community extensions hosted by Quarkiverse Hub. Quarkus has some extensions for Spring API as well, which is helpful when migrating from Spring to Quarkus.

    +
    + +
    +

    Besides extensions, which are officially maintained by Quarkus team, Quarkus allows adding external libraries too. While extensions can be integrated seamlessly into Quarkus, as they can be processed at build time and be built in native mode with GraalVM, external dependencies might not work out of the box with native compilation. If that is the case, you have to recompile them with the right GraalVM configuration to make them work.

    +
    +
  10. +
  11. +

    Quarkus' design accounted for native compilation by default. A Quarkus native executable starts much faster and utilizes far less memory than a traditional JVM (see our performace comparision between Spring and Quarkus). To get familiar with building native executable, configuring and running it, please check out our Native Image Guide. Be sure to test your code in both JVM and native mode.

    +
  12. +
  13. +

    Both Quarkus and Spring include testing frameworks based on JUnit and Mockito. Thus, by design, Quarkus enables test-driven development by detecting affected tests as changes are made and automatically reruns them in background. As that, it gives developer instant feedback, hence improves productivity. To use continuous testing, execute the following command:

    +
    +
    +
    mvn quarkus:dev
    +
    +
    +
  14. +
  15. +

    For the sake of performance optimization, Quarkus avoids reflection as much as possible, favoring static class binding instead. When building a native executable, it analyzes the call tree and removes all the classes/methods/fields that are not used directly. As a consequence, the elements used via reflection are not part of the call tree so they are dead code eliminated (if not called directly in other cases).

    +
    +

    A common example is the JSON library, which typically use reflection to serialize the objects to JSON. If you use them out of the box, you might encounter some errors in native mode. So, be sure to register the elements for reflection explicitly. A How-to is provided by Quarkus Registering For Reflection with practical program snippets.

    +
    +
  16. +
+
+
+

A very good read on the topic is the e-book Quarkus for Spring Developers by Red Hat. Another good source for direct hands-on coding tutorial is Katacoda Quarkus for Spring Boot Developers

+
+ +
+

==Configuration

+
+
+

Quarkus provides a comprehensive guide on configuration here.

+
+
+
External Application Configuration
+
+
Database Configuration
+
+

In Quarkus, Hibernate is provided by the quarkus-hibernate-orm extension. Ensure the extension is added to your pom.xml as follows:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-orm</artifactId>
+</dependency>
+
+
+
+

Additionally, you have to add the respective JDBC driver extension to your pom.xml. There are different drivers for different database types. See Quarkus Hibernate guide.

+
+
+
+
Database System and Access
+
+

You need to configure which database type you want to use, as well as the location and credentials to access it. The defaults are configured in application.properties. The file should therefore contain the properties as in the given example:

+
+
+
+
quarkus.datasource.jdbc.url=jdbc:postgresql://database.enterprise.com/app
+quarkus.datasource.username=appuser01
+quarkus.datasource.password=************
+quarkus.datasource.db-kind=postgresql
+
+##drop and create the database at startup (use only for local development)
+quarkus.hibernate-orm.database.generation=drop-and-create
+
+
+
+
+
Database Logging
+
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
quarkus.hibernate-orm.log.sql=true
+quarkus.hibernate-orm.log.format-sql=true
+
+#Logs SQL bind parameters. Setting it to true is obviously not recommended in production.
+quarkus.hibernate-orm.log.bind-parameters=true
+
+
+
+
+
+
Secrets and environment specific configurations
+
+
Environment variables
+
+

There are also some libraries to make Jasypt work with Quarkus, such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode.

+
+
+

Quarkus supports many credential providers with official extensions, such as HashiCorp Vault.

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-vault</artifactId>
+</dependency>
+
+
+
+

Quarkus reads configuration values from several locations, ordered by a certain priority. An overview of these can be found at the official Quarkus config guide.

+
+
+

Environment variables have a higher ordinal number and are therefore higher prioritized than e.g. the application.properties file. +So instead of storing secrets in plain text in the configuration files, it is better to use environment variables for critical values to configure the application.

+
+
+

Environment variables also have the advantage that they can be easily integrated into a containerized environment. +When using Kubernetes, the secrets can be stored as Kubernetes secret and then passed to the containers as an environment variable.

+
+
+
+
Custom config sources
+
+

Quarkus provides the possability to add custom config sources, which can be used to retrieve configuration values from custom locations. +For a description of this feature, see the corresponding Quarkus guide.

+
+
+Config interceptors +
+

Quarkus also allows with the concept of interceptors to hook into the resolution of configuration values. This can be useful when configuration values are encrypted or need to be extracted. +To do this, you have to implement a ConfigSourceInterceptor.

+
+
+
+
public class CustomConfigInterceptor implements ConfigSourceInterceptor {
+
+  @Override
+  public ConfigValue getValue(ConfigSourceInterceptorContext context, String name) {
+
+    ConfigValue configValue = context.proceed(name);
+    if (name.equals("config-value-to-resolve")) {
+      configValue = new ConfigValue.ConfigValueBuilder()
+          .withName(name)
+          .withValue(resolveConfigurationValue(name))
+          .build();
+    }
+
+    return configValue;
+  }
+
+  private String resolveConfigurationValue(String name) {
+    ...
+  }
+}
+
+
+
+

To use the Interceptor, you must register it. To do this, create a file io.smallrye.config.ConfigSourceInterceptor in the folder src/main/resources/META-INF/services and register the interceptor register the interceptor by writing the fully qualified class name to this file.

+
+
+
+
+
Credential encryption
+
+

As for Spring, there are also some libraries that let Jasypt work with Quarkus such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode, so it is not a suitable approach.

+
+
+

If you want to store usernames or passwords in encrypted form or retrieve them from a custom store, you can use a custom CredentialsProvider for this purpose. +Consider the use case where you want to store your database credentials in encrypted form rather than in plain text. Then you can implement a credentials provider as follows:

+
+
+
+
@ApplicationScoped
+@Unremovable
+public class DatabaseCredentialsProvider implements CredentialsProvider {
+
+  @Override
+  public Map<String, String> getCredentials(String credentialsProviderName) {
+
+    Map<String, String> properties = new HashMap<>();
+    properties.put(USER_PROPERTY_NAME, decryptUsername());
+    properties.put(PASSWORD_PROPERTY_NAME, decryptPassword());
+    return properties;
+  }
+}
+
+
+
+

In the application.properties file you need to set quarkus.datasource.credentials-provider=custom. +For more information about the credentials provider, see the official Quarkus guide.

+
+
+
+
HashiCorp Vault
+
+

For centralized management of secrets and other critical configuration values, you can use HashiCorp Vault as external management tool.

+
+
+

For detailed instructions on how to integrate Vault into your Quarkus application, see the official Quarkus guide.

+
+ +
+

==Quarkus template

+
+
+

Quarkus Code Generator is provides many alternative technologies and libraries that can be integrated into a project. Detailed guides on multiple topics can be found here.

+
+
+

Due to the large selection, getting started can be difficult for developers. +In this guide we aim to provide a general suggestion on basic frameworks, libraries, and technologies to make it easy for developers to begin with.

+
+
+

With that said, please take this as a recommendation and not as a compulsion. Depending on your project requirements, you might have to use another stack compared to what is listed below.

+
+
+

If you are new to Quarkus, consider checking out their getting started guide to get an overview of how to create, run, test, as well as package a Quarkus application. Another recommended source to get started is the Katacoda tutorials.

+
+
+
+
Basic templates
+
+
    +
  1. +

    simple REST API (go to code.quarkus.io)

    +
  2. +
  3. +

    simple REST API with monitoring (go to code.quarkus.io)

    +
  4. +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 19. Topic-based suggested implementation
TopicDetailSuggested implementationNote

runtime

servlet-container

Undertow

component management

dependency injection

ArC

ArC is based on JSR 365. It also provides interceptors that can be used to implement the same functionality as AOP provides

configuration

SmallRye Config

SmallRye Config is an implementation of Eclipse MicroProfile Config. It also supports YAML configuration files

persistence

OR-mapper

Hibernate ORM, Spring Data JPA

Hibernate ORM is the de facto standard JPA implementation and works perfectly in Quarkus. Quarkus also provides a compatibility layer for Spring Data JPA repositories in the form of the spring-data-jpa extension.

batch

Quarkus JBeret Extension is a non-official extension, which is hosted in the Quarkiverse Hub. It is an implementation of JSR 352.

service

REST services

RESTEasy

RESTEasy is an portable implementation of the new JCP specification JAX-RS JSR-311. It can be documented via Swagger OpenAPI.

async messaging

SmallRye Reactive Messaging, Vert.x EventBus

SmallRye Reactive Messaging is an implementation of the Eclipse MicroProfile Reactive Messaging specification 1.0. You can also utilize SmallRye Reactive Messaging in your Quarkus application to interact with Apache Kafka.

marshalling

RESTEasy Jackson, RESTEasy JSON-B, RESTEasy JAXB, RESTEasy Multipart

cloud

kubernetes

Kubernetes

deployment

Minikube, k3d

Minikube is quite popular when a Kubernetes cluster is needed for development purposes. Quarkus supports this with the quarkus-minikube extension.

logging

framework

JBoss Log Manager and the JBoss Logging facade

Internally, Quarkus uses JBoss Log Manager and the JBoss Logging facade. Logs from other supported Logging API (JBoss Logging, SLF4J, Apache Commons Logging) will be merged.

validation

framework

Hibernate Validator/Bean Validation (JSR 380)

security

authentication & authorization

JWT authentication

Quarkus supports various security mechanisms. Depending on your protocol, identity provider you can choose the necessary extensions such as quarkus-oidc quarkus-smallrye-jwt quarkus-elytron-security-oauth2.

monitoring

framework

Micrometer Metrics, SmallRye Metrics

SmallRye Metrics is an implementation of the MicroProfile Metrics specification. Quarkus also offers various extensions to customize the metrics.

health

SmallRye Health

SmallRye Health is an implementation of the MicroProfile Health specification.

fault tolerance

SmallRye Fault Tolerance

SmallRye Fault Tolerance is an implementation of the MicroProfile Fault Tolerance specification.

+ +
+

==Building a native image

+
+
+

Quarkus provides the ability to create a native executable of the application called native image. +Unlike other Java based deployments, a native image will only run on the architecture and operating system it is compiled for. +Also, no JVM is needed to run the native-image. +This improves the startup time, performance, and efficiency. +A distribution of GraalVM is needed. +You can find the differences between the available distributions here.

+
+
+

To build your quarkus app as a native-image, you have two options that are described in the following sections.

+
+
+
+
+
Build a native executable with GraalVM
+
+

To build a Quarkus application, you can install GraalVM locally on your machine, as described below. +Therefore, read the basic Quarkus application chapter, or clone the example project provided by devonfw. +Follow this chapter from the Quarkus Guide for building a native executable.

+
+
+
Installing GraalVM
+
+

A native image can be created locally or through a container environment. +To create a native image locally, an installed and configured version of GraalVM is needed. You can follow the installation guide from Quarkus or the guide provided by GraalVM for this.

+
+
+
+
+
Build a native executable with GraalVM through container environment
+
+

In order to make the build of native images more portable, you can also use your container environment and run the GraalVM inside a container (typically Docker). +You can simply install Docker with your devonfw-ide distribution, just follow this description Docker with devonfw-ide. +Follow this chapter to build a native Linux image through container runtime.

+
+
+
+
Configuring the native executable
+
+

A list of all configuration properties for a native image can be found here.

+
+ +
+

==Bean mapping with Quarkus

+
+
+

This guide will show bean-mapping, in particular for a Quarkus application. We recommend using MapStruct with a Quarkus application because the other bean-mapper frameworks use Java reflections. They are not supported in GraalVm right now and cause problems when building native applications. MapStruct is a code generator that greatly simplifies the implementation of mappings between Java bean types based on a convention over configuration approach. The mapping code will be generated at compile-time and uses plain method invocations and is thus fast, type-safe, and easy to understand. MapStruct has to be configured to not use Java reflections, which will be shown in this guide.

+
+
+

You can find the official +MapStruct reference guide and a general introduction to MapStruct from Baeldung.

+
+
+
+
MapStruct Dependency
+
+

To get access to MapStruct, we have to add the dependency to our POM.xml:

+
+
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+  <scope>provided</scope>
+</dependency>
+
+
+
+

MapStruct provides an annotation processor that also has to be added to the POM.xml

+
+
+
+
<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.mapstruct</groupId>
+				<artifactId>mapstruct-processor</artifactId>
+				<version>1.4.2.Final</version>
+			</path>
+		</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

MapStruct takes advantage of generated getters, setters, and constructors from the Lombok library, follow this Lombok with Mapstruct guide to get Lombok with Mapstruct working.

+
+
+
+
MapStruct Configuration
+
+

We already discussed the benefits of dependency injection. MapStruct supports CDI with EJB, spring, and jsr330. The default retrieving method for a mapper is a factory that uses reflections, which should be avoided. The component model should be set to CDI, as this will allow us to easily inject the generated mapper implementation. The component model can be configured in multiple ways.

+
+
+
Simple Configuration
+
+

Add the attribute componentModel to the @Mapper annotation in the mapper interface.

+
+
+
+
@Mapper(compnentModel = "cdi")
+public interface ProductMapper{
+  ...
+}
+
+
+
+
+
MapperConfig Configuration
+
+

Create a shared configuration that can be used for multiple mappers. Implement an interface and use the annotation @MapperConfig for the class. You can define all configurations in this interface and pass the generated MapperConfig.class with the config attribute to the mapper. The MapperConfig also defines the InjectionStrategy and MappingInheritaceStrategy, both of which will be explained later. +A list of all configurations can be found here.

+
+
+
+
@MapperConfig(
+  compnentModel = "cdi",
+  mappingInheritanceStrategy = MappingInheritanceStrategy.AUTO_INHERIT_FROM_CONFIG
+  injectionStrategy =InjectionStrategy.CONSTRUCTOR
+)
+public interface MapperConfig{
+}
+
+
+
+
+
@Mapper( config = MapperConfig.class )
+public interface ProductMapper{
+  ...
+}
+
+
+
+

Any attributes not given via @Mapper will be inherited from the shared configuration MapperConfig.class.

+
+
+
+
Configuration via annotation processor options
+
+

The MapStruct code generator can be configured using annotation processor options. +You can pass the options to the compiler while invoking javac directly, or add the parameters to the maven configuration in the POM.xml

+
+
+

We also use the constructor injection strategy to avoid field injections and potential reflections. This will also simplify our tests.

+
+
+

The option to pass the parameter to the annotation processor in the POM.xml is used and can be inspected in our quarkus reference project.

+
+
+

A list of all annotation processor options can be found here.

+
+
+
+
+
Basic Bean-Mapper Usage
+
+

To use the mapper, we have to implement the mapper interface and the function prototypes with a @Mapper annotation.

+
+
+
+
@Mapper
+public interface ProductMapper {
+
+  ProductDto map(ProductEntity model);
+
+  ProductEntity create(NewProductDto dto);
+}
+
+
+
+

The MapStruct annotation processor will generate the implementation for us under /target/generated-sources/, we just need to tell it that we would like to have a method that accepts a ProductEntity entity and returns a new ProductDto DTO.

+
+
+

The generated mapper implementation will be marked with the @ApplicationScoped annotation and can thus be injected into fields, constructor arguments, etc. using the @Inject annotation:

+
+
+
+
public class ProductRestService{
+
+  @Inject
+  ProductMapper mapper;
+}
+
+
+
+

That is the basic usage of a Mapstruct mapper. In the next chapter, we’ll go into a bit more detail and show some more configurations.

+
+
+
+
Advanced Bean-Mapper Usage
+
+

Let´s assume that our Product entity and the ProductDto have some differently named properties that should be mapped. Add a mapping annotation to map the property type from Product to kind from ProductDto. We define the source name of the property and the target name.

+
+
+
+
@Mapper
+public interface ProductMapper {
+  @Mapping(target = "kind", source = "type")
+  ProductDto map(ProductEntity entity);
+
+  @InheritInverseConfiguration(name = "map" )
+  ProductEntity create(ProductDto dto);
+}
+
+
+
+

For bi-directional mappings, we can indicate that a method shall inherit the inverse configuration of the corresponding method with the @InheritInverseConfiguration. You can omit the name parameter if the result type of method A is the same as the +single-source type of method B and if the single-source type of A is the same as the result type of B. If multiple apply, the attribute name is needed. Specific mappings from the inverse method can (optionally) be overridden, ignored, or set to constants or expressions.

+
+
+

The mappingInheritanceStrategy can be defined as showed in MapStruct Configuration. The existing options can be found here.

+
+
+

A mapped attribute does not always have the same type in the source and target objects. For instance, an attribute may be of type int in the source bean but of type Long in the target bean.

+
+
+

Another example are references to other objects which should be mapped to the corresponding types in the target model. E.g. the class ShoppingCart might have a property content of the type Product which needs to be converted into a ProductDto object when mapping a ShoppingCart object to ShoppingCartDto. For these cases, it’s useful to understand how Mapstruct converts the data types and the object references.

+
+
+

Also, the Chapter for nested bean mappings will help to configure MapStruct to map arbitrarily deep object graphs.

+
+
+

You can study running MapStruct implementation examples given by MapStruct or in our Quarkus reference project

+
+
+
+
+
+
+
+
+1. "Stammdaten" in German. +
+
+2. Whether to use checked exceptions or not is a controversial topic. Arguments for both sides can be found under The Trouble with Checked Exceptions, Unchecked Exceptions — The Controversy, and Checked Exceptions are Evil. The arguments in favor of unchecked exceptions tend to prevail for applications built with devon4j. Therefore, unchecked exceptions should be used for a consistent style. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-Deploy-&-Run-devonfw-locally.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-Deploy-&-Run-devonfw-locally.html new file mode 100644 index 00000000..3c719b1f --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-Deploy-&-Run-devonfw-locally.html @@ -0,0 +1,416 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Deploy & Run devonfw sample app with Tomcat +This section describes the different ways to run the provided sample application.

+
+
+

Deploy locally inside Eclipse

+
+
+

There are two way to deploy your application within Eclipse during development time.

+
+
+

Deploy standalone server with embedded Tomcat 8

+
+

The preferred way is the standalone execution via the spring boot framework within eclipse. In this scenario, the spring boot framework runs the sample application using an embedded tomcat server.

+
+
+

Open eclipse and find class com.devonfw.application.mtsj.SpringBootApp. This class configures the application and contains the main-method to start the sample application.

+
+
+

Select the class and click the left mouse button. In the subsequent open context menu select the entry 'Run as ⇒ Java Application' (or 'Debug as …​').

+
+
+
+eclipse run as +
+
+
+

The application should start. Additional configuration settings are not necessary.

+
+
+

Once started, the devonfw server applications runs on http://localhost:8081 with /mythaistar/ as context-root.

+
+ +
+

The contex-path and/or server port can be changed by setting the corresponding properties in the application.properties file:

+
+
+
+
server.port=8081
+server.context-path=/mythaistar
+
+
+
+

It is also possible to start the application without eclipse with the following maven command '..\samples\core\>mvn spring-boot:run', but this is not the preferred way. The command must be executed within the core project, otherwise the maven 'spring boot' plugin is not available.

+
+
+

Important: If you are always and ever working with the embedded Tomcat deployment, you may remove the ServletInitializer Configuration from your application. This will speed up startup time by a factor of 2.

+
+
+
+

Deployment to Tomcat 8 server

+
+

Another way uses a internal eclipse Tomcat 8 instance that is provided by an eclipse plugin (see xref:"devonfw-Deploy-&-Run-devonfw-locally"#tomcat-8-eclipse-plugin[Tomcat 8 plugin]). An external Tomcat 8 server is needed to provide a runtime environment (it is contained in the IDE-distributions). All configuration files of the external server remain untouched.

+
+
+

This way of deployment is not at all recommended and therefore only briefly described.

+
+
+

Startup Eclipse and go to the servers view. Add a Tomcat 8 and configure it’s port to 8081. Add mythaistar to the tomcat. Startup the tomcat. Try to access the service list via:

+
+ +
+
+
+
+

Deployment outside of Eclipse

+
+
+

Furthermore the sample application could be started/tested outside of Eclipse. This approach is usually not preferred because of the higher deployment time and the additional configuration effort, but may be interesting before staging to a test-environment.

+
+
+

Deploy standalone server with embedded Tomcat 8

+
+

Start a commandline window, go to your workspace and into the project devon4j-sample and build the devon4j-sample-core.jar using maven:

+
+
+
+
mvn clean install
+...
+
+
+
+

Find the jar file in the target folder of the oasp-sample-core. This jar is executable. Copy the jar to a folder where you want to run the test (recommended is a test folder inside the devonfw distribution package). Run the server by executing the jar.

+
+
+
+
java -jar devon4j-sample-core.jar
+
+
+
+
+

Deployment to Tomcat 8 server

+
+

You may also deploy your server into an external Tomcat 8 server instance.

+
+
+

Start a commandline window, go to your workspace and into the project devon4j-sample and build the devon4j-sample-server.war using maven:

+
+
+
+
mvn clean install
+
+
+
+

Find the war file in the target folder of the oasp-sample-server. Copy the file to your external tomcat webapps folder. +Run the tomcat (catalina bat).

+
+
+
+
+
+

Tomcat 7 support

+
+
+

Spring Boot 3 is preconfigured with embedded Tomcat 8 (with Java 7). You may however also use Tomcat 7. To switch to Tomcat 7 a tomcat.version property in the pom.xml is not sufficient (this is due to the fact, as oasp.pom does not derive from, but embed spring-boot.pom). It is required to define exclusions of Tomcat 8 jars in the pom.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-code-contribution.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-code-contribution.html new file mode 100644 index 00000000..a7723d8d --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-code-contribution.html @@ -0,0 +1,348 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Code contribution

+
+
+

We are looking forward to your contribution to devon4j. This page describes the few conventions to follow. Please note that this is an open and international project and all content has to be in (American) English language.

+
+
+

For contributions to the code please consider:

+
+
+
    +
  • +

    We are working issue-based so check if there is already an issue in our tracker for the task you want to work on or create a new issue for it.

    +
  • +
  • +

    In case of more complex issues please get involved with the community and ensure that there is a common understanding of what and how to do it. You do not want to invest into something that will later be rejected by the community.

    +
  • +
  • +

    Before you get started ensure that you comment the issue accordingly and you are the person assigned to the issue. If there is already someone else assigned get in contact with him if you still want to contribute to the same issue. You do not want to invest into something that is already done by someone else.

    +
  • +
  • +

    Create a fork of the repository on github to your private github space.

    +
  • +
  • +

    Clone this fork.

    +
  • +
  • +

    Before doing any change choose the branch you want to add your feature to. In most cases this will be the develop branch to add new features. However, if you want to fix a bug, check if an according maintenance branch develop-x.y already exists and switch to that one before.

    +
  • +
  • +

    Then the first step is to create a local feature branch (named by the feature you are planning so `feature/«issue-id»-«keyword») and checkout this branch.

    +
  • +
  • +

    Start your modifications.

    +
  • +
  • +

    Ensure to stick to our coding-conventions.

    +
  • +
  • +

    Check in features or fixes as individual commits associated with an issue using the commit message format:

    +
    +
    +
    #<issueId>: <describe your change>
    +
    +
    +
    +

    Then github will automatically link the commit in the issue. In case you worked on an issue from a different repository (e.g. change in devon4j-sample due to issue in devon4j) we use this commit message format:

    +
    +
    +
    +
    devonfw/<repository>#<issueId>: <describe your change>
    +
    +
    +
    +

    So as an example:

    +
    +
    +
    +
    devonfw/devon4j#1: added REST service for tablemanagement
    +
    +
    +
  • +
  • +

    If you completed your feature (bug-fix, improvement, etc.) use a pull request to give it back to the community.

    +
  • +
  • +

    Your pull request will automatically be checked if it builds correctly (no compile or test errors), can be merged without conflicts, and CLA has been signed. Please ensure to do the required tasks and reworks unless all checks are satisfied.

    +
  • +
  • +

    From here a reviewer should take over and give feedback. In the best case, your contribution gets merged and everything is completed.

    +
  • +
  • +

    In case you should not get feedback for weeks, do not hesitate to ask the community.

    +
  • +
  • +

    If one (typically the reviewer) has to change the base branch (because the wrong develop branch was used, see above) onto which the changes will be merged, one can do the same by following the instructions at here.

    +
  • +
  • +

    see also the documentation guidelines.

    +
  • +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-code-contributions.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-code-contributions.html new file mode 100644 index 00000000..a713a263 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-code-contributions.html @@ -0,0 +1,348 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Code contribution

+
+
+

We are looking forward to your contribution to devon4j. This page describes the few conventions to follow. Please note that this is an open and international project and all content has to be in (American) English language.

+
+
+

For contributions to the code please consider:

+
+
+
    +
  • +

    We are working issue-based so check if there is already an issue in our tracker for the task you want to work on or create a new issue for it.

    +
  • +
  • +

    In case of more complex issues please get involved with the community and ensure that there is a common understanding of what and how to do it. You do not want to invest into something that will later be rejected by the community.

    +
  • +
  • +

    Before you get started ensure that you comment the issue accordingly and you are the person assigned to the issue. If there is already someone else assigned get in contact with him if you still want to contribute to the same issue. You do not want to invest into something that is already done by someone else.

    +
  • +
  • +

    Create a fork of the repository on github to your private github space.

    +
  • +
  • +

    Clone this fork.

    +
  • +
  • +

    Before doing any change choose the branch you want to add your feature to. In most cases this will be the develop branch to add new features. However, if you want to fix a bug, check if an according maintenance branch develop-x.y already exists and switch to that one before.

    +
  • +
  • +

    Then the first step is to create a local feature branch (named by the feature you are planning so `feature-«issue-id»-«keyword») and checkout this branch.

    +
  • +
  • +

    Start your modifications.

    +
  • +
  • +

    Ensure to stick to our coding-conventions.

    +
  • +
  • +

    Check in features or fixes as individual commits associated with an issue using the commit message format:

    +
    +
    +
    #<issueId>: <describe your change>
    +
    +
    +
    +

    Then github will automatically link the commit in the issue. In case you worked on an issue from a different repository (e.g. change in devon4j-sample due to issue in devon4j) we use this commit message format:

    +
    +
    +
    +
    devonfw/<repository>#<issueId>: <describe your change>
    +
    +
    +
    +

    So as an example:

    +
    +
    +
    +
    devonfw/devon4j#1: added REST service for tablemanagement
    +
    +
    +
  • +
  • +

    If you completed your feature (bugfix, improvement, etc.) use a pull request to give it back to the community.

    +
  • +
  • +

    Your pull request will automatically be checked if it builds correctly (no compile or test errors), can be merged without conflicts, and CLA has been signed. Please ensure to do the required tasks and reworks unless all checks are satisfied.

    +
  • +
  • +

    From here a reviewer should take over and give feedback. In the best case, your contribution gets merged and everything is completed.

    +
  • +
  • +

    In case you should not get feedback for weeks, do not hesitate to ask the community.

    +
  • +
  • +

    If one (typically the reviewer) has to change the base branch (because the wrong develop branch was used, see above) onto which the changes will be merged, one can do the same by following the instructions at here.

    +
  • +
  • +

    see also the documentation guidelines.

    +
  • +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-documentation.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-documentation.html new file mode 100644 index 00000000..31af166a --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-documentation.html @@ -0,0 +1,304 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==devonfw Documentation +We are using the github adoc feature to create and maintain the documentation for devonfw. Also the documentation PDFs are generated from these adoc files.

+
+
+

The source of the documentation is always located in the documentation folder of the main git repository (see Code tab and then click on documentation). These files are automatically synchronized to the wiki. This is for pure usability reasons as people typically go to the Wiki tab on github repositories to look for documentation. However, the wiki is a read-only copy of the documentation folder from the Code repo.

+
+
+

Contribution to devon4j documentation

+
+
+

Contributions and improvements to the documentation are welcome. However, you should be aware of the following aspects:

+
+
+
    +
  • +

    Your contributions will become part of the devon4j documentation and is licensed under creative commons (see footer).

    +
  • +
  • +

    If you want to contribute larger changes (beyond fixing a typo or a link) please consider to get in contact with the community (by creating an issue) before getting started. You do not want to write complete chapters and then get your work rejected afterwards.

    +
  • +
  • +

    Please consult the DocGen manual as we are using DocGen +to generate the documentation starting from devon4j-doc.

    +
  • +
+
+
+

If you consider all the aspects above you can start editing the documentation if you have a github-account. For small and simple changes just go to the adoc file in the documentation folder. Then click on the pencil-icon (you have to be signed in). Now github will allow you to edit the raw adoc text. Do your changes and preview them (using the Preview tab). Once complete, commit the changes as a new branch. From there click on compare and pull-request and finally confirm your pull-request. +For larger changes, you should create a fork just as for code-contributions. Often larger changes imply changes to documentation and code.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-ide-setup.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-ide-setup.html new file mode 100644 index 00000000..9dcc5c8e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-ide-setup.html @@ -0,0 +1,368 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==IDE Setup

+
+
+

This Tutorial explains how to setup the development environment to work on and contribute to devonfw4j with your Windows computer.

+
+
+

We are using a pre-configured devon-ide for development. To get started follow these steps:

+
+
+
    +
  1. +

    Get a Git client. For Windows use:

    +
    + +
    +
  2. +
  3. +

    Download the IDE

    +
    +
      +
    • +

      If you are a member of Capgemini: download devonfw ide package or the higher integrated devonfw distribution (for devonfw please find the setup guide within the devon-dist).

      +
    • +
    • +

      If you are not member of Capgemini: We cannot distribute the package. Please consult devon-ide to setup and configure the IDE manually. If you need help, please get in touch.

      +
    • +
    +
    +
  4. +
  5. +

    Choose a project location for your project (e.g. C:\projects\devonfw, referred to with $projectLoc in this setup guides following steps). Avoid long paths and white spaces to prevent trouble. Extract the downloaded ZIP files via Extract Here (e.g. using 7-Zip). Do not use the Windows native ZIP tool to extract as this is not working properly on long paths and filenames.

    +
  6. +
  7. +

    Run the script update-all-workspaces.bat in $projectLoc.

    +
    +
    +update +
    +
    +
    +

    Hint: You can use update-all-workspaces.bat whenever you created a new folder in workspaces to separate different workspaces. This update will create new Eclipse start batches allowing to run a number of Eclipse instances using different workspaces in parallel.

    +
    +
    +

    You should end up having a structure like this in $projectLoc

    +
    +
    +
    +folder structure +
    +
    +
  8. +
  9. +

    Open console.bat and check out the git repositories you need to work on into workspaces\main. with the following commands:

    +
    +
    +
    cd workspaces/main
    +git clone --recursive https://github.com/devonfw/my-thai-star.git
    +
    +
    +
    +

    Do another check whether there are files in folder workspaces\main\my-thai-star\!

    +
    +
  10. +
  11. +

    Run the script eclipse-main.bat to start the Eclipse IDE.

    +
  12. +
  13. +

    In Eclipse select File > Import > Maven > Existing Maven Projects and then choose the cloned projects from your workspace by clicking the Browse button and select the folder structure (workspaces\main\my-thai-star\java\MTSJ).

    +
  14. +
  15. +

    Execute the application by starting the ´SpringBootApp´. Select the class and click the right mouse button. In the context menu select the entry Run as ⇒ Java Application (or Debug as …​). The application starts up and creates log entries in the Eclipse Console Tab.

    +
    +
    +eclipse run as +
    +
    +
    +

    Once started, the backend part of the application runs on http://localhost:8081/mythaistar. This is protected by Spring Security, so an additional frontend needs to be started, that will be able to get the needed access tokens.

    +
    +
  16. +
  17. +

    Now switch within command line to workspaces\main\my-thai-star\angular and run yarn install followed by yarn start. Finally +login with waiter/waiter at http://localhost:4200/restaurant/.

    +
  18. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-issue-work.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-issue-work.html new file mode 100644 index 00000000..dc429b8a --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-issue-work.html @@ -0,0 +1,373 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Issue creation and resolution

+
+
+

Issue creation

+
+
+

You can create an issue here. Please consider the following points:

+
+
+
    +
  • +

    If your issue is related to a specific building block (like e.g. devon4ng), open an issue on that specific issue tracker. If you’re unsure which building block is causing your problem open an issue on this repository.

    +
  • +
  • +

    Put a label on the issue to mark whether you suggest an enhancement, report an error or something else.

    +
  • +
+
+
+

When reporting errors:

+
+
+
    +
  • +

    Include the version of devon4j you are using.

    +
  • +
  • +

    Include screenshots, stack traces.

    +
  • +
  • +

    Include the behavior you expected.

    +
  • +
  • +

    using a debugger you might be able to find the cause of the problem and you could be the one to contribute a bug-fix.

    +
  • +
+
+
+
+
+

Preparation for issue resolution

+
+
+

Before you get started working on an issue, check out the following points:

+
+
+
    +
  • +

    try to complete all other issues you are working on before. Only postpone issues where you are stuck and consider giving them back in the queue (backlog).

    +
  • +
  • +

    check that no-one else is already assigned or working on the issue

    +
  • +
  • +

    read through the issue and check that you understand the task completely. Collect any remaining questions and clarify them with the one responsible for the topic.

    +
  • +
  • +

    ensure that you are aware on which branch the issue shall be fixed and start your work in the corresponding workspace.

    +
  • +
  • +

    if you are using git perform your changes on a feature branch.

    +
  • +
+
+
+
+
+

Definition of Done

+
+
+
    +
  • +

    actual issue is implemented (bug fixed, new feature implemented, etc.)

    +
  • +
  • +

    new situation is covered by tests (according to test strategy of the project e.g. for bugs create a unit test first proving the bug and running red, then fix the bug and check that the test gets green, for new essential features create new tests, for GUI features do manual testing)

    +
  • +
  • +

    check the code-style with sonar-qube in eclipse. If there are anomalies in the new or modified code, please rework.

    +
  • +
  • +

    check out the latest code from the branch you are working on (svn update, git pull after git commit)

    +
  • +
  • +

    test that all builds and tests are working (mvn clean install)

    +
  • +
  • +

    commit your code (svn commit, git push) - for all your commits ensure you stick to the conventions for code contributions (see code contribution) and provide proper comments (see coding conventions).

    +
  • +
  • +

    if no milestone was assigned please assign suitable milestone

    +
  • +
  • +

    set the issue as done

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-release-error-fix.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-release-error-fix.html new file mode 100644 index 00000000..d5627f34 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-release-error-fix.html @@ -0,0 +1,325 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Errors & Fix

+
+
+

Unable to find valid certification path to requested target

+
+
+
+ssl certificate error +
+
+
+

Analysis: Possible cause of error due to missing SSL certificate for Maven OSSRH portal in distribution’s JRE.

+
+
+

Fix

+
+

Add SSL certificates for Maven repository in distribution’s JRE.

+
+
+
    +
  1. +

    Open the links below in browser and save their SSL certificates into a file.

    +
  2. +
+
+ + +
+
+save certificate +
+
+
+
    +
  1. +

    Follow the guide below to install certificates to the JRE keystore located at <distribution_root>/software/java/jre/lib/security/cacerts

    +
  2. +
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-release.html b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-release.html new file mode 100644 index 00000000..013d508d --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/devonfw-release.html @@ -0,0 +1,642 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Creating a Release

+
+
+

This page documents how to create and publish a release of devon4j.

+
+
+

For each release there is a milestone that contains an issue for creating the release itself (the github issue of that issue is referred as «issue»). The release version is referred as «x.y.z».

+
+
+

Releasing the code

+
+
+

To release the code follow these steps.

+
+
+
    +
  • +

    Create a clean clone of the repository:

    +
    +
    +
    git clone https://github.com/devonfw/devon4j.git
    +
    +
    +
  • +
  • +

    In case you want to build a (bug fix) release from a different branch, switch to that branch:

    +
    +
    +
    git checkout -b develop-«x.y» origin/develop-«x.y»
    +
    +
    +
  • +
  • +

    Ensure your branch is up-to-date:

    +
    +
    +
    git pull
    +
    +
    +
  • +
  • +

    Ensure that the result is what you want to release (mvn clean install).

    +
  • +
  • +

    Bump the release version by removing the -SNAPSHOT from devon4j.version property in top-level pom.xml.

    +
  • +
  • +

    Create an annotated tag for your release:

    +
    +
    +
    git tag -a release/x.y.z -m "#«issue»: tagged x.y.z"
    +
    +
    +
    +

    e.g For release 2.5.0 the command would look like

    +
    +
    +
    +
    git tag -a release/2.5.0 -m "#618: tagged 2.5.0"
    +
    +
    +
    +

    where #618 is the issue number created for release itself under release milestone. +You can confirm if the tag is created by listing out the tags with the following command

    +
    +
    +
    +
    git tag
    +
    +
    +
  • +
+
+
+

Configure OSSRH

+
+

For publishing artifacts to OSSRH, we need an OSSRH account with necessary rights for publishing and managing staging repositories. And configure this account in devonfw distribution to create connection and deploy to OSSRH.

+
+
+
    +
  • +

    If you do not already have an account on OSSRH, create an account on the link below +https://issues.sonatype.org/secure/Signup!default.jspa

    +
  • +
  • +

    You need manager access to deploy artifacts to OSSRH. For same contact devonfw administrators for OSSRH.

    +
  • +
  • +

    Open file conf/.m2/setting.xml in your devon distribution (devon-ide) and add a new server with following details

    +
    +
    +
    <server>
    +   <id>ossrh</id>
    +   <username>«ossrh_username»</username>
    +   <password>«ossrh_password»</password>
    +</server>
    +
    +
    +
    +

    Here «ossrh_username» and «ossrh_password» are the account details used to login into OSSRH and should have rights to publish artifacts to OSSRH for groupId name com.devonfw (and its children). +Please use password encryption and prevent +storing passwords in plain text. +The id ossrh points to the OSSRH repository for snaphost and release declared in the <distributionManagement> section of the devon4j/pom.xml.

    +
    +
  • +
  • +

    Optionally you may want to explicitly define PGP key via the associated email-address:

    +
    +
    +
    <profile>
    +  <id>devon.ossrh</id>
    +  <activation>
    +    <activeByDefault>true</activeByDefault>
    +  </activation>
    +  <properties>
    +    <gpg.keyname>your.email@address.com</gpg.keyname>
    +  </properties>
    +</profile>
    +
    +
    +
  • +
+
+
+
+

Configure PGP

+
+

Artifacts should be PGP signed before they can be deployed to OSSRH. Artifacts can be signed either by using command line tool GnuPG or GUI based tool Gpg4win Kleopetra (preferred). Follow the steps below to sign artifacts using either of the two tools.

+
+
+ +
+
+ + + + + +
+ + +
+

Remember the passphrase set for PGP keys as it will be used later for authentication during signing of artifacts by maven.

+
+
+
+
+

Using GnuPg follow either of the link below

+
+ + +
+

Using Kleopetra follow link below

+
+ +
+

Exporting PGP key to public key-server

+
+ +
+

Using Kleopetra, click on the certificate entry you want to publish to OpenPGP certificate servers and select File > Publish on Server as shown below. These instructions are as per Kleopatra 3.0.1-gpg4win-3.0.2, for latest versions there might be some variation.

+
+
+
+pgp key publish +
+
+
+
+

Deploy to OSSRH

+
+
    +
  • +

    Go to the root of devon4j project and run following command. Make sure there are no spaces between comma separated profiles.

    +
    +
    +
    mvn clean deploy -P deploy
    +
    +
    +
  • +
  • +

    A pop will appear asking for passphrase for PGP key. Enter the passphrase and press "OK".

    +
  • +
+
+
+
+pgpkey passphrase +
+
+
+ + + + + +
+ + +
+

If you face the error below, contact one of the people who have access to the repository for access rights.

+
+
+
+
+
+ossrh publish error forbidden +
+
+
+
    +
  • +

    Open OSSRH, login and open staging repositories.

    +
  • +
  • +

    Find your deployment repository as comdevonfw-NNNN and check its Content.

    +
  • +
  • +

    Then click on Close to close the repository and wait a minute.

    +
  • +
  • +

    Refresh the repository and copy the URL.

    +
  • +
  • +

    Create a vote for the release and paste the URL of the staging repository.

    +
  • +
  • +

    After the vote has passed with success go back to OSSRH and and click on Release to publish the release and stage to maven central.

    +
  • +
  • +

    Edit the top-level pom.xml and change devon4j.version property to the next planned release version including the -SNAPSHOT suffix.

    +
  • +
  • +

    Commit and push the changes:

    +
    +
    +
    git commit -m "#«issue»: open next snapshot version"
    +git push
    +
    +
    +
  • +
  • +

    In case you build the release from a branch other that develop ensure to follow the next steps. Otherwise you are done here and can continue to the next section. To merge the changes (bug fixes) onto develop do:

    +
    +
    +
    git checkout develop
    +git merge develop-«x.y»
    +
    +
    +
  • +
  • +

    You most probably will have a conflict in the top-level pom.xml. Then resolve this conflict. In any case edit this pom.xml and ensure that it is still pointing to the latest planned SNAPSHOT for the develop branch.

    +
  • +
  • +

    If there are local changes to the top-level pom.xml, commit them:

    +
    +
    +
    git commit -m "#«issue»: open next snapshot version"
    +
    +
    +
  • +
  • +

    Push the changes of your develop branch:

    +
    +
    +
    git push
    +
    +
    +
  • +
+
+
+
+
+
+

Releasing the maven-site

+
+
+
    +
  • +

    Create a new folder for your version in your checkout of devonfw.github.io/devon4j (as «x.y.z»).

    +
  • +
  • +

    Copy the just generated devon4j-doc.pdf into the new release version folder.

    +
  • +
  • +

    Copy the index.html from the previous release to the new release version folder.

    +
  • +
  • +

    Edit the new copy of index.html and replace all occurrences of the version to the new release as well as the release date.

    +
  • +
  • +

    Generate the maven site from the devon4j release checkout (see code release):

    +
    +
    +
    mvn site
    +mvn site:deploy
    +
    +
    +
  • +
  • +

    Review that the maven site is intact and copy it to the new release version folder (from devon4j/target/devon4j/maven to devonfw.github.io/devon4j/«x.y.z»/maven).

    +
  • +
  • +

    Update the link in the devon4j/index.html to the latest stable documentation.

    +
  • +
  • +

    Add, commit and push the new release version folder.

    +
    +
    +
    git add «x.y.z»
    +git commit -m "devonfw/devon4j#«issue»: released documentation"
    +git push
    +
    +
    +
  • +
+
+
+
+
+

Finalize the Release

+
+
+
    +
  • +

    Close the issue of the release.

    +
  • +
  • +

    Close the milestone of the release (if necessary correct the release date).

    +
  • +
  • +

    Ensure that the new release is available in maven central.

    +
  • +
  • +

    Write an announcement for the new release.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-access-control-schema.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-access-control-schema.html new file mode 100644 index 00000000..59b521ce --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-access-control-schema.html @@ -0,0 +1,400 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Access Control Schema

+
+
+

With release 3.0.0 the access-control-schema.xml has been deprecated. You may still use it and find the documentation in this section. However, for new devonfw applications always start with the new approach described in access control config.

+
+
+

Legacy Access Control Schema Documentation

+
+
+

The file access-control-schema.xml is used to define the mapping from groups to permissions (see example from sample app). The general terms discussed above can be mapped to the implementation as follows:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. General security terms related to devon4j access control schema
Termdevon4j-security implementationComment

Permission

AccessControlPermission

Group

AccessControlGroup

When considering different levels of groups of different meanings, declare type attribute, e.g. as "group".

Role

AccessControlGroup

With type="role".

Access Control

AccessControl

Super type that represents a tree of AccessControlGroups and AccessControlPermissions. If a principal "has" a AccessControl he also "has" all AccessControls with according permissions in the spanned sub-tree.

+
+
Example access-control-schema.xml
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<access-control-schema>
+  <group id="ReadMasterData" type="group">
+    <permissions>
+      <permission id="OfferManagement_GetOffer"/>
+      <permission id="OfferManagement_GetProduct"/>
+      <permission id="TableManagement_GetTable"/>
+      <permission id="StaffManagement_GetStaffMember"/>
+    </permissions>
+  </group>
+
+  <group id="Waiter" type="role">
+    <inherits>
+      <group-ref>Barkeeper</group-ref>
+    </inherits>
+    <permissions>
+      <permission id="TableManagement_ChangeTable"/>
+    </permissions>
+  </group>
+  ...
+</access-control-schema>
+
+
+
+

This example access-control-schema.xml declares

+
+
+
    +
  • +

    a group named ReadMasterData, which grants four different permissions, e.g., OfferManagement_GetOffer

    +
  • +
  • +

    a group named Waiter, which

    +
    +
      +
    • +

      also grants all permissions from the group Barkeeper

      +
    • +
    • +

      in addition grants the permission TableManagement_ChangeTable

      +
    • +
    • +

      is marked to be a role for further application needs.

      +
    • +
    +
    +
  • +
+
+
+

The devon4j-security module automatically validates the schema configuration and will throw an exception if invalid.

+
+
+

Unfortunately, Spring Security does not provide differentiated interfaces for authentication and authorization. Thus we have to provide an AuthenticationProvider, which is provided from Spring Security as an interface for authentication and authorization simultaneously. +To integrate the devon4j-security provided access control schema, you can simply inherit your own implementation from the devon4j-security provided abstract class AbstractAccessControlBasedAuthenticationProvider and register your ApplicationAuthenticationProvider as an AuthenticationManager. Doing so, you also have to declare the two Beans AccessControlProvider and AccessControlSchemaProvider, which are precondition for the AbstractAccessControlBasedAuthenticationProvider.

+
+
+

As state of the art devon4j will focus on role-based authorization to cope with authorization for executing use case of an application. +We will use the JSR250 annotations, mainly @RolesAllowed, for authorizing method calls against the permissions defined in the annotation body. This has to be done for each use-case method in logic layer. Here is an example:

+
+
+
+
public class OrdermanagementImpl extends AbstractComponentFacade implements Ordermanagement {
+
+  @RolesAllowed(Roles.WAITER)
+  public PaginatedListTo<OrderCto> findOrdersByPost(OrderSearchCriteriaTo criteria) {
+
+    return findOrderCtos(criteria);
+  }
+}
+
+
+
+

Now this method can only be called if a user is logged-in that has the permission FIND_TABLE.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-access-control.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-access-control.html new file mode 100644 index 00000000..2a2b4e5b --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-access-control.html @@ -0,0 +1,688 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Access-Control +Access-Control is a central and important aspect of Security. It consists of two major aspects:

+
+
+ +
+
+

Authentication

+
+
+

Definition:

+
+
+
+
+

Authentication is the verification that somebody interacting with the system is the actual subject for whom he claims to be.

+
+
+
+
+

The one authenticated is properly called subject or principal. There are two forms of principals you need to distinguish while designing your authentication: human users and autonomous systems. While e.g. a Kerberos/SPNEGO Single-Sign-On makes sense for human users, it is pointless for authenticating autonomous systems. For simplicity, we use the common term user to refer to any principal even though it may not be a human (e.g. in case of a service call from an external system).

+
+
+

To prove the authenticity, the user provides some secret called credentials. The most simple form of credentials is a password.

+
+
+

Implementations

+
+ + + + + +
+ + +Please never implement your own authentication mechanism or credential store. You have to be aware of implicit demands such as salting and hashing credentials, password life-cycle with recovery, expiry, and renewal including email notification confirmation tokens, central password policies, etc. This is the domain of access managers and identity management systems. In a business context you will typically already find a system for this purpose that you have to integrate (e.g. via LDAP). Otherwise you should consider establishing such a system e.g. using keycloak. +
+
+
+

We recommend using JWT when possible. For KISS, also try to avoid combining multiple authentication mechanisms (form based, basic-auth, SAMLv2, OAuth, etc.) within the same application (for different URLs).

+
+
+

For spring, check the Spring Security

+
+
+

For quarkus, check the Quarkus Authentication

+
+
+
+
+
+

Authorization

+
+
+

Definition:

+
+
+
+
+

Authorization is the verification that an authenticated user is allowed to perform the operation he intends to invoke.

+
+
+
+
+

Clarification of terms

+
+

For clarification we also want to give a common understanding of related terms that have no unique definition and consistent usage in the wild.

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Security terms related to authorization
TermMeaning and comment

Permission

A permission is an object that allows a principal to perform an operation in the system. This permission can be granted (give) or revoked (taken away). Sometimes people also use the term right what is actually wrong as a right (such as the right to be free) can not be revoked.

Group

We use the term group in this context for an object that contains permissions. A group may also contain other groups. Then the group represents the set of all recursively contained permissions.

Role

We consider a role as a specific form of group that also contains permissions. A role identifies a specific function of a principal. A user can act in a role.

+

For simple scenarios a principal has a single role associated. In more complex situations a principal can have multiple roles but has only one active role at a time that he can choose out of his assigned roles. For KISS it is sometimes sufficient to avoid this by creating multiple accounts for the few users with multiple roles. Otherwise at least avoid switching roles at run-time in clients as this may cause problems with related states. Simply restart the client with the new role as parameter in case the user wants to switch his role.

Access Control

Any permission, group, role, etc., which declares a control for access management.

+
+
+

Suggestions on the access model

+
+

For the access model we give the following suggestions:

+
+
+
    +
  • +

    Each Access Control (permission, group, role, …​) is uniquely identified by a human readable string.

    +
  • +
  • +

    We create a unique permission for each use-case.

    +
  • +
  • +

    We define groups that combine permissions to typical and useful sets for the users.

    +
  • +
  • +

    We define roles as specific groups as required by our business demands.

    +
  • +
  • +

    We allow to associate users with a list of Access Controls.

    +
  • +
  • +

    For authorization of an implemented use case we determine the required permission. Furthermore, we determine the current user and verify that the required permission is contained in the tree spanned by all his associated Access Controls. If the user does not have the permission we throw a security exception and thus abort the operation and transaction.

    +
  • +
  • +

    We avoid negative permissions, that is a user has no permission by default and only those granted to him explicitly give him additional permission for specific things. Permissions granted can not be reduced by other permissions.

    +
  • +
  • +

    Technically we consider permissions as a secret of the application. Administrators shall not fiddle with individual permissions but grant them via groups. So the access management provides a list of strings identifying the Access Controls of a user. The individual application itself contains these Access Controls in a structured way, whereas each group forms a permission tree.

    +
  • +
+
+
+
+

Naming conventions

+
+

As stated above each Access Control is uniquely identified by a human readable string. This string should follow the naming convention:

+
+
+
+
«app-id».«local-name»
+
+
+
+

For Access Control Permissions the «local-name» again follows the convention:

+
+
+
+
«verb»«object»
+
+
+
+

The segments are defined by the following table:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of Access Control Permission ID
SegmentDescriptionExample

«app-id»

Is a unique technical but human readable string of the application (or microservice). It shall not contain special characters and especially no dot or whitespace. We recommend to use lower-train-case-ascii-syntax. The identity and access management should be organized on enterprise level rather than application level. Therefore permissions of different apps might easily clash (e.g. two apps might both define a group ReadMasterData but some user shall get this group for only one of these two apps). Using the «app-id». prefix is a simple but powerful namespacing concept that allows you to scale and grow. You may also reserve specific «app-id»s for cross-cutting concerns that do not actually reflect a single app e.g to grant access to a geographic region.

shop

«verb»

The action that is to be performed on «object». We use Find for searching and reading data. Save shall be used both for create and update. Only if you really have demands to separate these two you may use Create in addition to Save. Finally, Delete is used for deletions. For non CRUD actions you are free to use additional verbs such as Approve or Reject.

Find

«object»

The affected object or entity. Shall be named according to your data-model

Product

+
+

So as an example shop.FindProduct will reflect the permission to search and retrieve a Product in the shop application. The group shop.ReadMasterData may combine all permissions to read master-data from the shop. However, also a group shop.Admin may exist for the Admin role of the shop application. Here the «local-name» is Admin that does not follow the «verb»«object» schema.

+
+
+
+

devon4j-security

+
+

The module devon4j-security provides ready-to-use code based on spring-security that makes your life a lot easier.

+
+
+
+access-control +
+
Figure 1. devon4j Security Model
+
+
+

The diagram shows the model of devon4j-security that separates two different aspects:

+
+
+
    +
  • +

    The Identity- and Access-Management is provided by according products and typically already available in the enterprise landscape (e.g. an active directory). It provides a hierarchy of primary access control objects (roles and groups) of a user. An administrator can grant and revoke permissions (indirectly) via this way.

    +
  • +
  • +

    The application security defines a hierarchy of secondary access control objects (groups and permissions). This is done by configuration owned by the application (see following section). The "API" is defined by the IDs of the primary access control objects that will be referenced from the Identity- and Access-Management.

    +
  • +
+
+
+
+

Access Control Config

+
+

In your application simply extend AccessControlConfig to configure your access control objects as code and reference it from your use-cases. An example config may look like this:

+
+
+
+
@Named
+public class ApplicationAccessControlConfig extends AccessControlConfig {
+
+  public static final String APP_ID = "MyApp";
+
+  private static final String PREFIX = APP_ID + ".";
+
+  public static final String PERMISSION_FIND_OFFER = PREFIX + "FindOffer";
+
+  public static final String PERMISSION_SAVE_OFFER = PREFIX + "SaveOffer";
+
+  public static final String PERMISSION_DELETE_OFFER = PREFIX + "DeleteOffer";
+
+  public static final String PERMISSION_FIND_PRODUCT = PREFIX + "FindProduct";
+
+  public static final String PERMISSION_SAVE_PRODUCT = PREFIX + "SaveProduct";
+
+  public static final String PERMISSION_DELETE_PRODUCT = PREFIX + "DeleteProduct";
+
+  public static final String GROUP_READ_MASTER_DATA = PREFIX + "ReadMasterData";
+
+  public static final String GROUP_MANAGER = PREFIX + "Manager";
+
+  public static final String GROUP_ADMIN = PREFIX + "Admin";
+
+  public ApplicationAccessControlConfig() {
+
+    super();
+    AccessControlGroup readMasterData = group(GROUP_READ_MASTER_DATA, PERMISSION_FIND_OFFER, PERMISSION_FIND_PRODUCT);
+    AccessControlGroup manager = group(GROUP_MANAGER, readMasterData, PERMISSION_SAVE_OFFER, PERMISSION_SAVE_PRODUCT);
+    AccessControlGroup admin = group(GROUP_ADMIN, manager, PERMISSION_DELETE_OFFER, PERMISSION_DELETE_PRODUCT);
+  }
+}
+
+
+
+
+

Configuration on Java Method level

+
+

In your use-case you can now reference a permission like this:

+
+
+
+
@Named
+public class UcSafeOfferImpl extends ApplicationUc implements UcSafeOffer {
+
+  @Override
+  @RolesAllowed(ApplicationAccessControlConfig.PERMISSION_SAVE_OFFER)
+  public OfferEto save(OfferEto offer) { ... }
+  ...
+}
+
+
+
+
+

JEE Standard

+
+

Role-based Access Control (RBAC) is commonly used for authorization. +JSR 250 defines a number of common annotations to secure your application.

+
+
+
    +
  • +

    javax.annotation.security.PermitAll specifies that no access control is required to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DenyAll specifies that no access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.RolesAllowed specifies that only a list of access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DeclareRoles defines roles for security checking.

    +
  • +
  • +

    javax.annotation.security.RunAs specifies the RunAs role for the given components.

    +
  • +
+
+
+

@PermitAll, @Denyall, and @RolesAllowed annotations can be applied to both class and method. +A method-level annotation will override the behaviour of class-level annotation. Using multiple annotations of those 3 is not valid.

+
+
+
+
// invalid
+@PermitAll
+@DenyAll
+public String foo()
+
+// invalid and compilation fails
+@RolesAllowed("admin")
+@RolesAllowed("user")
+public String bar()
+
+// OK
+@RolesAllowed("admin", "user")
+public String bar()
+
+
+
+

Please note that when specifying multiple arguments to @RolesAllowed those are combined with OR (and not with AND). +So if the user has any of the specified access controls, he will be able to access the method.

+
+
+

As a best practice avoid specifying string literals to @RolesAllowed. +Instead define a class with all access controls as constants and reference them from there. +This class is typically called ApplicationAccessControlConfig in devonfw.

+
+
+

In many complicated cases where @PermitAll @DenyAll @RolesAllowed are insufficient e.g. a method should be accessed by a user in role A and not in role B at the same time, you have to verify the user role directly in the method. You can use SecurityContext class to get further needed information.

+
+
+

Spring

+
+

Spring Security also supports authorization on method level. To use it, you need to add the spring-security-config dependency. If you use Spring Boot, the dependency spring-boot-starter-security already includes spring-security-config. Then you can configure as follows:

+
+
+
    +
  • +

    prePostEnabled property enables Spring Security pre/post annotations. @PreAuthorize and @PostAuthorize annotations provide expression-based access control. See more here

    +
  • +
  • +

    securedEnabled property determines if the @Secured annotation should be enabled. @Secured can be used similarly as @RollesAllowed.

    +
  • +
  • +

    jsr250Enabled property allows us to use the JSR-250 annotations such as @RolesAllowed.

    +
  • +
+
+
+
+
@Configuration
+@EnableGlobalMethodSecurity(
+  prePostEnabled = true,
+  securedEnabled = true,
+  jsr250Enabled = true)
+public class MethodSecurityConfig
+  extends GlobalMethodSecurityConfiguration {
+}
+
+
+
+

A further read about the whole concept of Spring Security Authorization can be found here.

+
+
+
+

Quarkus

+
+

Quarkus comes with built-in security to allow for RBAC based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints and CDI beans. Quarkus also provides the io.quarkus.security.Authenticated annotation that will permit any authenticated user to access the resource (equivalent to @RolesAllowed("**")).

+
+
+
+
+

Data-based Permissions

+ +
+
+

Access Control Schema (deprecated)

+
+

The access-control-schema.xml approach is deprecated. The documentation can still be found in access control schema.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-accessibility.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-accessibility.html new file mode 100644 index 00000000..1cc0464c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-accessibility.html @@ -0,0 +1,281 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ + +
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-aop.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-aop.html new file mode 100644 index 00000000..7686b6ee --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-aop.html @@ -0,0 +1,365 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Aspect Oriented Programming (AOP)

+
+
+

AOP is a powerful feature for cross-cutting concerns. However, if used extensive and for the wrong things an application can get unmaintainable. Therefore we give you the best practices where and how to use AOP properly.

+
+
+

AOP Key Principles

+
+
+

We follow these principles:

+
+
+
    +
  • +

    We use spring AOP based on dynamic proxies (and fallback to cglib).

    +
  • +
  • +

    We avoid AspectJ and other mighty and complex AOP frameworks whenever possible

    +
  • +
  • +

    We only use AOP where we consider it as necessary (see below).

    +
  • +
+
+
+
+
+

AOP Usage

+
+
+

We recommend to use AOP with care but we consider it established for the following cross cutting concerns:

+
+
+ +
+
+
+
+

AOP Debugging

+
+
+

When using AOP with dynamic proxies the debugging of your code can get nasty. As you can see by the red boxes in the call stack in the debugger there is a lot of magic happening while you often just want to step directly into the implementation skipping all the AOP clutter. When using Eclipse this can easily be archived by enabling step filters. Therefore you have to enable the feature in the Eclipse tool bar (highlighted in read).

+
+
+
+AOP debugging +
+
+
+

In order to properly make this work you need to ensure that the step filters are properly configured:

+
+
+
+Step Filter Configuration +
+
+
+

Ensure you have at least the following step-filters configured and active:

+
+
+
+
ch.qos.logback.*
+com.devonfw.module.security.*
+java.lang.reflect.*
+java.security.*
+javax.persistence.*
+org.apache.commons.logging.*
+org.apache.cxf.jaxrs.client.*
+org.apache.tomcat.*
+org.h2.*
+org.springframework.*
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-api-first.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-api-first.html new file mode 100644 index 00000000..de17472e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-api-first.html @@ -0,0 +1,474 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==API first development guide

+
+
+

Cloud native promotes the use of microservices, which are loosely coupled and self-contained. These services communicate with each other using a well-defined interface or API, and no consumer needs to be aware of any implementation details of the provider. There could even be multiple providers of the same API, or we can decide to swap existing implementation for a new one, without disrupting our clients - all because we adhere to a well defined API. +This guide focuses on HTTP interfaces following RESTful design principles.

+
+
+

API first strategy

+
+
+

API first strategy treats APIs as first class citizens in the delivery model. +The APIs are modeled/designed first (usually as OpenAPI specification), often in a collaborative process between providers and consumers, and only once the APIs are defined do we start with development of the provider service. +This requires a bit more time upfront, but also provides opportunity to think about the behaviour of the system before it gets implemented. Several other advatages of this approach:

+
+
+
    +
  • +

    provider and client implementation can run in parallel - we have the contract, client does not need to wait for provider to finish implementation

    +
  • +
  • +

    support for automation and great ecosystem - we can easily generate stubs and clients for most languages/frameworks, generate documentation and API catalogues using one of the many opensource and commercial tools

    +
  • +
+
+
+
+
+

API first provider

+
+
+

After careful planning, we defined our future API. Now we would like to implement a provider of this API. +We could manually create all the JAX-RS endpoints based on the schema and then test whether the actually provided interface conforms with the API schema. However, it is an error prone and laborious process, especially if the API gets big. +Luckily, we can use great open source tooling to generate JAX-RS interfaces that conform to 100% with the schema, and then we simply implement them.

+
+
+

There are many open source tools, that allow code generation from OpenAPI schema files, for example OpenAPI Generator or Swagger codegen. Both are based on Java and even provide maven plugins to integrate them in our build.

+
+
+

OpenAPI Generator maven plugin

+
+

If we already have our backend maven project, using the maven plugin will be easiest for us. Lets define a new maven profile and add the OpenAPI generator plugin:

+
+
+
+
<profile>
+  <id>apigen</id>
+  <activation><activeByDefault>true</activeByDefault></activation>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.openapitools</groupId>
+        <artifactId>openapi-generator-maven-plugin</artifactId>
+        <version>5.1.0</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>generate</goal>
+            </goals>
+            <configuration>
+              <!-- input spec can be a url, or a local file -->
+              <inputSpec>https://raw.githubusercontent.com/OAI/OpenAPI-Specification/main/examples/v3.0/petstore.yaml</inputSpec>
+              <!-- <inputSpec>${project.basedir}/src/main/resources/api.yaml</inputSpec> -->
+              <generatorName>jaxrs-spec</generatorName>
+              <apiPackage>com.petclinic.api</apiPackage>
+              <modelPackage>com.petclinic.api.model</modelPackage>
+              <!-- <library>quarkus</library> -->
+              <configOptions>
+                <sourceFolder>src/gen/java/main</sourceFolder>
+                <useSwaggerAnnotations>false</useSwaggerAnnotations>
+                <useTags>true</useTags>
+                <interfaceOnly>true</interfaceOnly>
+                <generatePom>false</generatePom>
+              </configOptions>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</profile>
+
+
+
+

OpenAPI generator project includes support for many languages and frameworks - in our case we picked jaxrs-spec server generator. +Many of the generator templates have further sub-generators, for example for your specific framework. We could, for example use quarkus sub-generator to bootstrap a full working Quarkus maven project, but in our case, we want to have more control over the process, therefore we opt for standard jaxrs template and pass some additional configuration:

+
+
+
    +
  • +

    apiPackage = generated package name for our endpoint interfaces

    +
  • +
  • +

    modelPackage = generated package name for our API models (DTOs)

    +
  • +
  • +

    sourceFolder = where do we want to output the generated files, relative to the output folder - if we regenerate the files with every build, then it is recommended to put them in ${project.build.directory}/generated-sources/openapi (default, can be changed via output plugin attribute). Final location of our generated Java files will then be ${project.build.directory}/generated-sources/openapi/src/gen/java/main

    +
  • +
  • +

    useTags = group the generated endpoints by schema tag attribute

    +
  • +
  • +

    interfaceOnly = we want to implement the endpoint logic ourselves, so we only want JAX-RS interfaces

    +
  • +
  • +

    generatePom = we are generating the files into existing project, so we dont want to generate additional maven pom

    +
  • +
+
+
+

Let’s run the plugin now using mvn clean compile. If everything went well, we end up with something like this:

+
+
+
+Generated sources by OpenAPI generator +
+
Figure 1. Generated sources by OpenAPI generator
+
+
+

Depending on the configuration of your code generator, you might need to add some additional dependencies to your pom.xml, for example for bean validation or Jackson annotations.

+
+
+

The OpenAPI generator has many more configuration options that are outside the scope of this guide. You can find full documentation of the codegen plugin in the projects GitHub repository.

+
+
+
+

Generate once vs generate always

+
+

Depending on our needs, we may either want to generate the interfaces and models once and afterwards copy them to our project as general source code files, or treat them as immutable generated assets, that we generate anew with every build. +Both scenarios have their pros and cons, and you’ll need to find out what best suits your project. In the example above, we use a profile with activeByDefault=true, which will cause the generator to run with every build. The generated files will be included as sources in our project, so we can import them in any other java class without issues.

+
+
+

In case you only want to generate your API resources once and version them afterwards in SCM, simply run the generator, outputting to some temporary location, then, copy them to src/main/java and you are done. Be careful if you manually modify the generated files afterwards and you want to re-generate them after an API schema update, you will lose any manual changes.

+
+
+
+

Implement the generated interfaces

+
+

To implement the generated interfaces, we simply create an impl class - rest controller bean - that implements the interface from our gen package:

+
+
+
+
package org.acme.rest.controller;
+
+import java.util.List;
+
+import com.petclinic.api.PetsApi;
+import com.petclinic.api.model.Pet;
+
+public class PetClinicController implements PetsApi {
+
+    @Override
+    public void createPets() {
+        // TODO Auto-generated method stub
+
+    }
+
+    @Override
+    public List<Pet> listPets(Integer limit) {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pet showPetById(String petId) {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+}
+
+
+
+

And now we can invoke our API endpoint as usual: http://localhost:8080/pets - because /pets is the @Path annotation value in the generated PetsApi interface.

+
+
+
+

Serving API docs / Swagger-UI

+
+

A common requirement is that our backend API provider should also provide an endpoint with the schema or a Swagger-UI application with that schema. +In our example, we decided to generate the JAX-RS interface without Swagger/OpenAPI annotations, therefore the schema can not be re-constructed 1:1 from our code (missing method documentation, error handling, etc.).

+
+
+

When having a Quarkus application and using the Smallrye OpenAPI extension, we can tell Quarkus to serve a static version of the API as our openapi schema (the same file we used to generate the interfaces and models) and to disable the auto-generating of the schema. Follow the Quarkus OpenAPI documentation for more info.

+
+
+
+

Advanced topics

+
+

In some cases, we may have specific requirements or API extensions that are not supported by the existing generators. OpenAPI generator project allows us to define custom genererator, or to extend the existing generator templates. We can also selectively generate subset of the models or API endpoints, generate test code and much more.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-apm.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-apm.html new file mode 100644 index 00000000..a1780684 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-apm.html @@ -0,0 +1,326 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Application Performance Management

+
+
+

This guide gives hints how to manage, monitor and analyse performance of Java applications.

+
+
+

Temporary Analysis

+
+
+

If you are facing performance issues and want to do a punctual analysis we recommend you to use glowroot. It is ideal in cases where monitoring in your local development environment is suitable. However, it is also possible to use it in your test environment. It is entirely free and open-source. Still it is very powerful and helps to trace down bottlenecks. To get a first impression of the tool take a look at the demo.

+
+
+

JEE/WTP

+
+

In case you are forced to use an JEE application server and want to do a temporary analysis you can double click your server instance from the servers view in Eclipse and click on the link Open launch configuration in order to add the -javaagent JVM option.

+
+
+
+
+
+

Regular Analysis

+
+
+

In case you want to manage application performance regularly we recommend to use JavaMelody that can be integrated into your application. More information on javamelody is available on the JavaMelody Wiki

+
+
+
+
+

Alternatives

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-auditing.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-auditing.html new file mode 100644 index 00000000..6e664761 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-auditing.html @@ -0,0 +1,357 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Auditing

+
+
+

For database auditing we use hibernate envers. If you want to use auditing ensure you have the following dependency in your pom.xml:

+
+
+
spring
+
+
<dependency>
+  <groupId>com.devonfw.java.modules</groupId>
+  <artifactId>devon4j-jpa-envers</artifactId>
+</dependency>
+
+
+
+
quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-envers</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +The following part applies only to spring applications. At this point, the Quarkus extension does not provide any additional configurations. For Quarkus applications, simply use the @Audited annotation to enable auditing for an entity class, as described a few lines below or seen here. +
+
+
+

Make sure that entity manager also scans the package from the devon4j-jpa[-envers] module in order to work properly. And make sure that correct Repository Factory Bean Class is chosen.

+
+
+
+
@EntityScan(basePackages = { "«my.base.package»" }, basePackageClasses = { AdvancedRevisionEntity.class })
+...
+@EnableJpaRepositories(repositoryFactoryBeanClass = GenericRevisionedRepositoryFactoryBean.class)
+...
+public class SpringBootApp {
+  ...
+}
+
+
+
+

Now let your [Entity]Repository extend from DefaultRevisionedRepository instead of DefaultRepository.

+
+
+

The repository now has a method getRevisionHistoryMetadata(id) and getRevisionHistoryMetadata(id, boolean lazy) available to get a list of revisions for a given entity and a method find(id, revision) to load a specific revision of an entity with the given ID or getLastRevisionHistoryMetadata(id) to load last revision. +To enable auditing for a entity simply place the @Audited annotation to your entity and all entity classes it extends from.

+
+
+
+
@Entity(name = "Drink")
+@Audited
+public class DrinkEntity extends ProductEntity implements Drink {
+...
+
+
+
+

When auditing is enabled for an entity an additional database table is used to store all changes to the entity table and a corresponding revision number. This table is called <ENTITY_NAME>_AUD per default. Another table called REVINFO is used to store all revisions. Make sure that these tables are available. They can be generated by hibernate with the following property (only for development environments).

+
+
+
+
  database.hibernate.hbm2ddl.auto=create
+
+
+
+

Another possibility is to put them in your database migration scripts like so.

+
+
+
+
CREATE CACHED TABLE PUBLIC.REVINFO(
+  id BIGINT NOT NULL generated by default as identity (start with 1),
+  timestamp BIGINT NOT NULL,
+  user VARCHAR(255)
+);
+...
+CREATE CACHED TABLE PUBLIC.<TABLE_NAME>_AUD(
+    <ALL_TABLE_ATTRIBUTES>,
+    revtype TINYINT,
+    rev BIGINT NOT NULL
+);
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-batch-layer.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-batch-layer.html new file mode 100644 index 00000000..303f13ab --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-batch-layer.html @@ -0,0 +1,686 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Batch Layer

+
+
+

We understand batch processing as a bulk-oriented, non-interactive, typically long running execution of tasks. For simplicity, we use the term "batch" or "batch job" for such tasks in the following documentation.

+
+
+

devonfw uses Spring Batch as a batch framework.

+
+
+

This guide explains how Spring Batch is used in devonfw applications. It focuses on aspects which are special to devonfw. If you want to learn about spring-batch you should adhere to springs references documentation.

+
+
+

There is an example of a simple batch implementation in the my-thai-star batch module.

+
+
+

In this chapter, we will describe the overall architecture (especially concerning layering) and how to administer batches.

+
+
+

Layering

+
+
+

Batches are implemented in the batch layer. The batch layer is responsible for batch processes, whereas the business logic is implemented in the logic layer. Compared to the service layer, you may understand the batch layer just as a different way of accessing the business logic. +From a component point of view, each batch is implemented as a subcomponent in the corresponding business component. +The business component is defined by the business architecture.

+
+
+

Let’s make an example for that. The sample application implements a batch for exporting ingredients. This ingredientExportJob belongs to the dishmanagement business component. +So the ingredientExportJob is implemented in the following package:

+
+
+
+
<basepackage>.dishmanagement.batch.impl.*
+
+
+
+

Batches should invoke use cases in the logic layer for doing their work. +Only "batch specific" technical aspects should be implemented in the batch layer.

+
+
+
+
+

Example: +For a batch, which imports product data from a CSV file, this means that all code for actually reading and parsing the CSV input file is implemented in the batch layer. +The batch calls the use case "create product" in the logic layer for actually creating the products for each line read from the CSV input file.

+
+
+
+
+

Directly accessing data access layer

+
+

In practice, it is not always appropriate to create use cases for every bit of work a batch should do. Instead, the data access layer can be used directly. +An example for that is a typical batch for data retention which deletes out-of-time data. +Often deleting, out-dated data is done by invoking a single SQL statement. It is appropriate to implement that SQL in a Repository or DAO method and call this method directly from the batch. +But be careful: this pattern is a simplification which could lead to business logic cluttered in different layers, which reduces the maintainability of your application. +It is a typical design decision you have to make when designing your specific batches.

+
+
+
+
+
+

Project structure and packaging

+
+
+

Batches will be implemented in a separate Maven module to keep the application core free of batch dependencies. The batch module includes a dependency on the application core-module to allow the reuse of the use cases, DAOs etc. +Additionally the batch module has dependencies on the required spring batch jars:

+
+
+
+
  <dependencies>
+
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>mtsj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-batch</artifactId>
+    </dependency>
+
+  </dependencies>
+
+
+
+

To allow an easy start of the batches from the command line it is advised to create a bootified jar for the batch module by adding the following to the pom.xml of the batch module:

+
+
+
+
  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>config/application.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Create bootified jar for batch execution via command line.
+           Your applications spring boot app is used as main-class.
+       -->
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        <configuration>
+          <mainClass>com.devonfw.application.mtsj.SpringBootApp</mainClass>
+          <classifier>bootified</classifier>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>repackage</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+
+
+
+

Implementation

+
+
+

Most of the details about implementation of batches is described in the spring batch documentation. +There is nothing special about implementing batches in devonfw. You will find an easy example in my-thai-star.

+
+
+
+
+

Starting from command line

+
+
+

Devonfw advises to start batches via command line. This is most common to many ops teams and allows easy integration in existing schedulers. In general batches are started with the following command:

+
+
+
+
java -jar <app>-batch-<version>-bootified.jar --spring.main.web-application-type=none --spring.batch.job.enabled=true --spring.batch.job.names=<myJob> <params>
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + +
ParameterExplanation

--spring.main.web-application-type=none

This disables the web app (e.g. Tomcat)

--spring.batch.job.names=<myJob>

This specifies the name of the job to run. If you leave this out ALL jobs will be executed. Which probably does not make to much sense.

<params>

(Optional) additional parameters which are passed to your job

+
+

This will launch your normal spring boot app, disables the web application part and runs the designated job via Spring Boots org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.

+
+
+
+
+

Scheduling

+
+
+

In real world scheduling of batches is not as simple as it first might look like.

+
+
+
    +
  • +

    Multiple batches have to be executed in order to achieve complex tasks. If one of those batches fails the further execution has to be stopped and operations should be notified for example.

    +
  • +
  • +

    Input files or those created by batches have to be copied from one node to another.

    +
  • +
  • +

    Scheduling batch executing could get complex easily (quarterly jobs, run job on first workday of a month, …​)

    +
  • +
+
+
+

For devonfw we propose the batches themselves should not mess around with details of scheduling. +Likewise your application should not do so. This complexity should be externalized to a dedicated batch administration service or scheduler. +This service could be a complex product or a simple tool like cron. We propose Rundeck as an open source job scheduler.

+
+
+

This gives full control to operations to choose the solution which fits best into existing administration procedures.

+
+
+
+
+

Handling restarts

+
+
+

If you start a job with the same parameters set after a failed run (BatchStatus.FAILED) a restart will occur. +In many cases your batch should then not reprocess all items it processed in the previous runs. +For that you need some logic to start at the desired offset. There different ways to implement such logic:

+
+
+
    +
  • +

    Marking processed items in the database in a dedicated column

    +
  • +
  • +

    Write all IDs of items to process in a separate table as an initialization step of your batch. You can then delete IDs of already processed items from that table during the batch execution.

    +
  • +
  • +

    Storing restart information in springs ExecutionContext (see below)

    +
  • +
+
+
+

Using spring batch ExecutionContext for restarts

+
+

By implementing the ItemStream interface in your ItemReader or ItemWriter you may store information about the batch progress in the ExecutionContext. You will find an example for that in the CountJob in My Thai Star.

+
+
+

Additional hint: It is important that bean definition method of your ItemReader/ItemWriter return types implementing ItemStream(and not just ItemReader or ItemWriter alone). For that the ItemStreamReader and ItemStreamWriter interfaces are provided.

+
+
+
+
+
+

Exit codes

+
+
+

Your batches should create a meaningful exit code to allow reaction to batch errors e.g. in a scheduler. +For that spring batch automatically registers an org.springframework.boot.autoconfigure.batch.JobExecutionExitCodeGenerator. To make this mechanism work your spring boot app main class as to populate this exit code to the JVM:

+
+
+
+
@SpringBootApplication
+public class SpringBootApp {
+
+  public static void main(String[] args) {
+    if (Arrays.stream(args).anyMatch((String e) -> e.contains("--spring.batch.job.names"))) {
+      // if executing batch job, explicitly exit jvm to report error code from batch
+      System.exit(SpringApplication.exit(SpringApplication.run(SpringBootApp.class, args)));
+    } else {
+      // normal web application start
+      SpringApplication.run(SpringBootApp.class, args);
+    }
+  }
+}
+
+
+
+
+
+

Stop batches and manage batch status

+
+
+

Spring batch uses several database tables to store the status of batch executions. +Each execution may have different status. +You may use this mechanism to gracefully stop batches. +Additionally in some edge cases (batch process crashed) the execution status may be in an undesired state. +E.g. the state will be running, despite the process crashed sometime ago. +For that cases you have to change the status of the execution in the database.

+
+
+

CLI-Tool

+
+

Devonfw provides a easy to use cli-tool to manage the executing status of your jobs. +The tool is implemented in the devonfw module devon4j-batch-tool. It will provide a runnable jar, which may be used as follows:

+
+
+
+
List names of all previous executed jobs
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs list

+
+
Stop job named 'countJob'
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs stop countJob

+
+
Show help
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar

+
+
+
+
+

As you can the each invocation includes the JDBC connection string to your database. +This means that you have to make sure that the corresponding DB driver is in the classpath (the prepared jar only contains H2).

+
+
+
+
+
+

Authentication

+
+
+

Most business application incorporate authentication and authorization. +Your spring boot application will implement some kind of security, e.g. integrated login with username+password or in many cases authentication via an existing IAM. +For security reasons your batch should also implement an authentication mechanism and obey the authorization implemented in your application (e.g. via @RolesAllowed).

+
+
+

Since there are many different authentication mechanism we cannot provide an out-of-the-box solution in devonfw, but we describe a pattern how this can be implemented in devonfw batches.

+
+
+

We suggest to implement the authentication in a Spring Batch tasklet, which runs as the first step in your batch. This tasklet will do all of the work which is required to authenticate the batch. A simple example which authenticates the batch "locally" via username and password could be implemented like this:

+
+
+
+
@Named
+public class SimpleAuthenticationTasklet implements Tasklet {
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+    String username = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("username");
+    String password = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("password");
+    Authentication authentication = new UsernamePasswordAuthenticationToken(username, password);
+
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+    return RepeatStatus.FINISHED;
+  }
+
+}
+
+
+
+

The username and password have to be supplied via two cli parameters -username and -password. This implementation creates an "authenticated" Authentication and sets in the Spring Security context. This is just for demonstration normally you should not provide passwords via command line. The actual authentication will be done automatically via Spring Security as in your "normal" application. +If you have a more complex authentication mechanism in your application e.g. via OpenID connect just call this in the tasklet. Naturally you may read authentication parameters (e.g. secrets) from the command line or more securely from a configuration file.

+
+
+

In your Job Configuration set this tasklet as the first step:

+
+
+
+
@Configuration
+@EnableBatchProcessing
+public class BookingsExportBatchConfig {
+  @Inject
+  private JobBuilderFactory jobBuilderFactory;
+
+  @Inject
+  private StepBuilderFactory stepBuilderFactory;
+
+  @Bean
+  public Job myBatchJob() {
+    return this.jobBuilderFactory.get("myJob").start(myAuthenticationStep()).next(...).build();
+  }
+
+  @Bean
+  public Step myAuthenticationStep() {
+    return this.stepBuilderFactory.get("myAuthenticationStep").tasklet(myAuthenticatonTasklet()).build();
+  }
+
+  @Bean
+  public Tasklet myAuthenticatonTasklet() {
+    return new SimpleAuthenticationTasklet();
+  }
+...
+
+
+
+
+
+

Tipps & tricks

+
+
+

Identifying job parameters

+
+

Spring uses a jobs parameters to identify job executions. Parameters starting with "-" are not considered for identifying a job execution.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-beanmapping.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-beanmapping.html new file mode 100644 index 00000000..96b63c01 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-beanmapping.html @@ -0,0 +1,308 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Bean-Mapping

+
+
+

For decoupling, you sometimes need to create separate objects (beans) for a different view. E.g. for an external service, you will use a transfer-object instead of the persistence entity so internal changes to the entity do not implicitly change or break the service.

+
+
+

Therefore you have the need to map similar objects what creates a copy. This also has the benefit that modifications to the copy have no side-effect on the original source object. However, to implement such mapping code by hand is very tedious and error-prone (if new properties are added to beans but not to mapping code):

+
+
+
+
public UserEto mapUser(UserEntity source) {
+  UserEto target = new UserEto();
+  target.setUsername(source.getUsername());
+  target.setEmail(source.getEmail());
+  ...
+  return target;
+}
+
+
+
+

Therefore we are using a BeanMapper for this purpose that makes our lives a lot easier. +There are several bean mapping frameworks with different approaches.

+
+
+

For a devon4j-spring application we recommend Orika, follow Spring Bean-Mapping for an introduction to Orika and Dozer in a devon4j-spring context application.

+
+
+ + + + + +
+ + +devon4j started with Dozer as framework for Spring applications and still supports it. However, we now recommend Orika (for new projects) as it is much faster (see Performance of Java Mapping Frameworks). +
+
+
+

For a Quarkus application we recommend Mapstruct, follow Quarkus Bean-Mapping for an introduction to Mapstruct in a quarkus context application.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-blob-support.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-blob-support.html new file mode 100644 index 00000000..45b00f53 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-blob-support.html @@ -0,0 +1,295 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==BLOB support

+
+
+

BLOB stands for Binary Large Object. A BLOB may be an image, an office document, ZIP archive or any other multimedia object. +Often these BLOBs are large. if this is the case you need to take care, that you do not copy all the blob data into you application heap, e.g. when providing them via a REST service. +This could easily lead to performance problems or out of memory errors. +As solution for that problem is "streaming" those BLOBs directly from the database to the client. To demonstrate how this can be accomplished, devonfw provides a example.

+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-caching.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-caching.html new file mode 100644 index 00000000..fd279100 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-caching.html @@ -0,0 +1,352 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Caching +Caching is a technical approach to improve performance. While it may appear easy on the first sight it is an advanced topic. In general, try to use caching only when required for performance reasons. If you come to the point that you need caching first think about:

+
+
+
    +
  • +

    What to cache?
    +Be sure about what you want to cache. Is it static data? How often will it change? What will happen if the data changes but due to caching you might receive "old" values? Can this be tolerated? For how long? This is not a technical question but a business requirement.

    +
  • +
  • +

    Where to cache?
    +Will you cache data on client or server? Where exactly?

    +
  • +
  • +

    How to cache?
    +Is a local cache sufficient or do you need a shared cache?

    +
  • +
+
+
+

Local Cache

+
+ +
+
+
+

Shared Cache

+
+
+

Distributed Cache

+ +
+
+
+ + +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-client-layer.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-client-layer.html new file mode 100644 index 00000000..73c1af5c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-client-layer.html @@ -0,0 +1,392 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Client Layer

+
+
+

There are various technical approaches to building GUI clients. The devonfw proposes rich clients that connect to the server via data-oriented services (e.g. using REST with JSON). +In general, we have to distinguish among the following types of clients:

+
+
+
    +
  • +

    web clients

    +
  • +
  • +

    native desktop clients

    +
  • +
  • +

    (native) mobile clients

    +
  • +
+
+
+

Our main focus is on web-clients. In our sample application my-thai-star we offer a responsive web-client based on Angular following devon4ng that integrates seamlessly with the back ends of my-thai-star available for Java using devon4j as well as .NET/C# using devon4net. For building angular clients read the separate devon4ng guide.

+
+
+

JavaScript for Java Developers

+
+
+

In order to get started with client development as a Java developer we give you some hints to get started. Also if you are an experienced JavaScript developer and want to learn Java this can be helpful. First, you need to understand that the JavaScript ecosystem is as large as the Java ecosystem and developing a modern web client requires a lot of knowledge. The following table helps you as experienced developer to get an overview of the tools, configuration-files, and other related aspects from the new world to learn. Also it helps you to map concepts between the ecosystems. Please note that we list the tools recommended by devonfw here (and we know that there are alternatives not listed here such as gradle, grunt, bower, etc.).

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Aspects in JavaScript and Java ecosystem
TopicAspectJavaScriptJava

Programming

Language

TypeScript (extends JavaScript)

Java

Runtime

VM

nodejs (or web-browser)

jvm

Build- & Dependency-Management

Tool

npm or yarn

maven

Config

package.json

pom.xml

Repository

npm repo

maven central (repo search)

Build cmd

ng build or npm run build (goals are not standardized in npm)

mvn install (see lifecycle)

Test cmd

ng test

mvn test

Testing

Test-Tool

jasmine

junit

Test-Runner

karma

junit / surefire

E2E Testing

Protractor

Selenium

Code Analysis

Code Coverage

ng test --no-watch --code-coverage

JaCoCo

Development

IDE

MS VS Code or IntelliJ

Eclipse or IntelliJ

Framework

Angular (etc.)

Spring or Quarkus

+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-common.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-common.html new file mode 100644 index 00000000..749171ab --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-common.html @@ -0,0 +1,275 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Common

+
+
+

In our coding-conventions we define a clear packaging and layering. +However, there is always cross-cutting code that does not belong to a specific layer such as generic helpers, general code for configuration or integration, etc. +Therefore, we define a package segment common that can be used as «layer» for such cross-cutting code. +Code from any other layer is allowed to access such common code (at least within the same component).

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-component-facade.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-component-facade.html new file mode 100644 index 00000000..5974bb5e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-component-facade.html @@ -0,0 +1,355 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Component Facade

+
+
+ + + + + +
+ + +Our recommended approach for implementing the logic layer is use-cases +
+
+
+

For each component of the application, the logic layer defines a component facade. +This is an interface defining all business operations of the component. +It carries the name of the component («Component») and has an implementation named «Component»Impl (see implementation).

+
+
+

API

+
+
+

The component facade interface defines the logic API of the component and has to be business oriented. +This means that all parameters and return types of all methods from this API have to be business transfer-objects, datatypes (String, Integer, MyCustomerNumber, etc.), or collections of these. +The API may also only access objects of other business components listed in the (transitive) dependencies of the business-architecture.

+
+
+

Here is an example how such an API may look like:

+
+
+
+
public interface Bookingmanagement {
+
+  BookingEto findBooking(Long id);
+
+  BookingCto findBookingCto(Long id);
+
+  Page<BookingEto> findBookingEtos(BookingSearchCriteriaTo criteria);
+
+  void approveBooking(BookingEto booking);
+
+}
+
+
+
+
+
+

Implementation

+
+
+

The implementation of an interface from the logic layer (a component facade or a use-case) carries the name of that interface with the suffix Impl and is annotated with @Named. +An implementation typically needs access to the persistent data. +This is done by injecting the corresponding repository (or DAO). +According to data-sovereignty, only repositories of the same business component may be accessed directly. +For accessing data from other components the implementation has to use the corresponding API of the logic layer (the component facade). Further, it shall not expose persistent entities from the domain layer and has to map them to transfer objects using the bean-mapper.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public BookingEto findBooking(Long id) {
+
+    LOG.debug("Get Booking with id {} from database.", id);
+    BookingEntity entity = this.bookingRepository.findOne(id);
+    return getBeanMapper().map(entity, BookingEto.class));
+  }
+}
+
+
+
+

As you can see, entities (BookingEntity) are mapped to corresponding ETOs (BookingEto). +Further details about this can be found in bean-mapping.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-component.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-component.html new file mode 100644 index 00000000..dbe7d9a7 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-component.html @@ -0,0 +1,331 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Components

+
+
+

Following separation-of-concerns we divide an application into components using our package-conventions and project structure. +As described by the architecture each component is divided into layers as described in the project structure. +Please note that a component will only have the required layers. +So a component may have any number from one to all layers.

+
+
+

General Component

+
+
+

Cross-cutting aspects belong to the implicit component general. It contains technical configurations and very general code that is not business specific. Such code shall not have any dependencies to other components and therefore business related code.

+
+
+
+
+

Business Component

+
+
+

The business-architecture defines the business components with their allowed dependencies. A small application (microservice) may just have one component and no dependencies making it simple while the same architecture can scale up to large and complex applications (from bigger microservice up to modulith). +Tailoring an business domain into applications and applications into components is a tricky task that needs the skills of an experienced architect. +Also, the tailoring should follow the business and not split by technical reasons or only by size. +Size is only an indicator but not a driver of tailoring. +Whatever hypes like microservices are telling you, never get misled in this regard: +If your system grows and reaches MAX+1 lines of code, it is not the right motivation to split it into two microservices of ~MAX/2 lines of code - such approaches will waste huge amounts of money and lead to chaos.

+
+
+
+
+

App Component

+
+
+

Only in case you need cross-cutting code that aggregates another component you may introduce the component app. +It is allowed to depend on all other components but no other component may depend on it. +With the modularity and flexibility of spring you typically do not need this. +However, when you need to have a class that registers all services or component-facades using direct code dependencies, you can introduce this component.

+
+
+
+
+

Component Example

+
+
+

The following class diagram illustrates an example of the business component Staffmanagement:

+
+
+
+logic layer component pattern +
+
+
+

In this scheme, you can see the structure and flow from the service-layer (REST service call) via the logic-layer to the dataaccess-layer (and back).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-configuration-mapping.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-configuration-mapping.html new file mode 100644 index 00000000..257af152 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-configuration-mapping.html @@ -0,0 +1,415 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Mapping configuration to your code

+
+
+

If you are using spring-boot as suggested by devon4j your application can be configured by application.properties file as described in configuration. +To get a single configuration option into your code for flexibility, you can use

+
+
+
+
@Value("${my.property.name}")
+private String myConfigurableField;
+
+
+
+

Now, in your application.properties you can add the property:

+
+
+
+
my.property.name=my-property-value
+
+
+
+

You may even use @Value("${my.property.name:my-default-value}") to make the property optional.

+
+
+

Naming conventions for configuration properties

+
+
+

As a best practice your configruation properties should follow these naming conventions:

+
+
+
    +
  • +

    build the property-name as a path of segments separated by the dot character (.)

    +
  • +
  • +

    segments should get more specific from left to right

    +
  • +
  • +

    a property-name should either be a leaf value or a tree node (prefix of other property-names) but never both! So never have something like foo.bar=value and foo.bar.child=value2.

    +
  • +
  • +

    start with a segment namespace unique to your context or application

    +
  • +
  • +

    a good example would be «myapp».billing.service.email.sender for the sender address of billing service emails send by «myapp».

    +
  • +
+
+
+
+
+

Mapping advanced configuration

+
+
+

However, in many scenarios you will have features that require more than just one property. +Injecting those via @Value is not leading to good code quality. +Instead we create a class with the suffix ConfigProperties containing all configuration properties for our aspect that is annotated with @ConfigurationProperties:

+
+
+
+
@ConfigurationProperties(prefix = "myapp.billing.service")
+public class BillingServiceConfigProperties {
+
+  private final Email email = new Email();
+  private final Smtp smtp = new Smtp();
+
+  public Email getEmail() { return this.email; }
+  public Email getSmtp() { return this.smtp; }
+
+  public static class Email {
+
+    private String sender;
+    private String subject;
+
+    public String getSender() { return this.sender; }
+    public void setSender(String sender) { this.sender = sender; }
+    public String getSubject() { return this.subject; }
+    public void setSubject(String subject) { this.subject = subject; }
+  }
+
+  public static class Smtp {
+
+    private String host;
+    private int port = 25;
+
+    public String getHost() { return this.host; }
+    public void setHost(String host) { this.host = host; }
+    public int getPort() { return this.port; }
+    public void setPort(int port) { this.port = port; }
+  }
+
+}
+
+
+
+

Of course this is just an example to demonstrate this feature of spring-boot. +In order to send emails you would typically use the existing spring-email feature. +But as you can see this allows us to define and access our configuration in a very structured and comfortable way. +The annotation @ConfigurationProperties(prefix = "myapp.billing.service") will automatically map spring configuration properties starting with myapp.billing.service via the according getters and setters into our BillingServiceConfigProperties. +We can easily define defaults (e.g. 25 as default value for myapp.billing.service.smtp.port). +Also Email or Smtp could be top-level classes to be reused in multiple configurations. +Of course you would also add helpful JavaDoc comments to the getters and classes to document your configuration options. +Further to access this configuration, we can use standard dependency-injection:

+
+
+
+
@Inject
+private BillingServiceConfigProperties config;
+
+
+
+

For very generic cases you may also use Map<String, String> to map any kind of property in an untyped way. +An example for generic configuration from devon4j can be found in +ServiceConfigProperties.

+
+
+

For further details about this feature also consult Guide to @ConfigurationProperties in Spring Boot.

+
+
+
+
+

Generate configuration metadata

+
+
+

You should further add this dependency to your module containing the *ConfigProperties:

+
+
+
+
    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+
+
+
+

This will generate configuration metadata so projects using your code can benefit from autocompletion and getting your JavaDoc as tooltip when editing application.properites what makes this approach very powerful. +For further details about this please read A Guide to Spring Boot Configuration Metadata.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-configuration.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-configuration.html new file mode 100644 index 00000000..44b356f8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-configuration.html @@ -0,0 +1,425 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Configuration

+
+
+

An application needs to be configurable in order to allow internal setup (like CDI) but also to allow externalized configuration of a deployed package (e.g. integration into runtime environment). We rely on a comprehensive configuration approach following a "convention over configuration" pattern. This guide adds on to this by detailed instructions and best-practices how to deal with configurations.

+
+
+

In general we distinguish the following kinds of configuration that are explained in the following sections:

+
+
+ +
+
+

Internal Application Configuration

+
+
+

The application configuration contains all internal settings and wirings of the application (bean wiring, database mappings, etc.) and is maintained by the application developers at development time.

+
+
+

For more detail of Spring stack, see here

+
+
+
+
+

Externalized Configuration

+
+
+

Externalized configuration is a configuration that is provided separately to a deployment package and can be maintained undisturbed by re-deployments.

+
+
+

Environment Configuration

+
+

The environment configuration contains configuration parameters (typically port numbers, host names, passwords, logins, timeouts, certificates, etc.) specific for the different environments. These are under the control of the operators responsible for the application.

+
+
+

The environment configuration is maintained in application.properties files, defining various properties. +These properties are explained in the corresponding configuration sections of the guides for each topic:

+
+
+ +
+
+

Make sure your properties are thoroughly documented by providing a comment to each property. This inline documentation is most valuable for your operating department.

+
+
+

More about structuring your application.properties files can be read here for Spring.

+
+
+

For Quarkus, please refer to Quarkus Config Reference for more details.

+
+
+
+

Business Configuration

+
+

Often applications do not need business configuration. In case they do it should typically be editable by administrators via the GUI. The business configuration values should therefore be stored in the database in key/value pairs.

+
+
+

Therefore we suggest to create a dedicated table with (at least) the following columns:

+
+
+
    +
  • +

    ID

    +
  • +
  • +

    Property name

    +
  • +
  • +

    Property type (Boolean, Integer, String)

    +
  • +
  • +

    Property value

    +
  • +
  • +

    Description

    +
  • +
+
+
+

According to the entries in this table, an administrative GUI may show a generic form to modify business configuration. Boolean values should be shown as checkboxes, integer and string values as text fields. The values should be validated according to their type so an error is raised if you try to save a string in an integer property for example.

+
+
+

We recommend the following base layout for the hierarchical business configuration:

+
+
+

component.[subcomponent].[subcomponent].propertyname

+
+
+
+
+
+

Security

+
+
+

Often you need to have passwords (for databases, third-party services, etc.) as part of your configuration. These are typically environment specific (see above). However, with DevOps and continuous-deployment you might be tempted to commit such configurations into your version-control (e.g. git). Doing that with plain text passwords is a severe problem especially for production systems. Never do that! Instead we offer some suggestions how to deal with sensible configurations:

+
+
+

Password Encryption

+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control.

+
+
+

For Spring, we use jasypt-spring-boot. For more details, see here

+
+
+

For Quarkus, see here

+
+
+

Is this Security by Obscurity?

+
+
    +
  • +

    Yes, from the point of view to protect the passwords on the target environment this is nothing but security by obscurity. If an attacker somehow got full access to the machine this will only cause him to spend some more time.

    +
  • +
  • +

    No, if someone only gets the configuration file. So all your developers might have access to the version-control where the config is stored. Others might have access to the software releases that include this configs. But without the master-password that should only be known to specific operators none else can decrypt the password (except with brute-force what will take a very long time, see jasypt for details).

    +
  • +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-cors-support.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-cors-support.html new file mode 100644 index 00000000..7592c2e6 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-cors-support.html @@ -0,0 +1,316 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==CORS support

+
+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

For more information about CORS see here. Information about the CORS headers can be found here.

+
+
+

Configuring CORS support

+
+
+

To enable CORS support for your application, see the advanced guides. For Spring applications see here. For Quarkus follow the official Quarkus guide.

+
+
+
+
+

Configuration with service mesh

+
+
+

If you are using a service mesh, you can also define your CORS policy directly there. Here is an example from Istio.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-csrf.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-csrf.html new file mode 100644 index 00000000..5e694fe2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-csrf.html @@ -0,0 +1,405 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Cross-site request forgery (CSRF)

+
+
+

CSRF is a type of malicious exploit of a web application that allows an attacker to induce users to perform actions that they do not intend to perform.

+
+
+
+csrf +
+
+
+

More details about csrf can be found at https://owasp.org/www-community/attacks/csrf.

+
+
+

Secure devon4j server against CSRF

+
+
+

In case your devon4j server application is not accessed by browsers or the web-client is using JWT based authentication, you are already safe according to CSRF. +However, if your application is accessed from a browser and you are using form based authentication (with session coockie) or basic authentication, you need to enable CSRF protection. +This guide will tell you how to do this.

+
+
+

Dependency

+
+

To secure your devon4j application against CSRF attacks, you only need to add the following dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-csrf</artifactId>
+</dependency>
+
+
+
+

Starting with devon4j version 2020.12.001 application template, this is all you need to do. +However, if you have started from an older version or you want to understand more, please read on.

+
+
+
+

Pluggable web-security

+
+

To enable pluggable security via devon4j security starters you need to apply WebSecurityConfigurer to your BaseWebSecurityConfig (your class extending spring-boot’s WebSecurityConfigurerAdapter) as following:

+
+
+
+
  @Inject
+  private WebSecurityConfigurer webSecurityConfigurer;
+
+  public void configure(HttpSecurity http) throws Exception {
+    // disable CSRF protection by default, use csrf starter to override.
+	  http = http.csrf().disable();
+	  // apply pluggable web-security from devon4j security starters
+    http = this.webSecurityConfigurer.configure(http);
+    .....
+  }
+
+
+
+
+

Custom CsrfRequestMatcher

+
+

If you want to customize which HTTP requests will require a CSRF token, you can implement your own CsrfRequestMatcher and provide it to the devon4j CSRF protection via qualified injection as following:

+
+
+
+
@Named("CsrfRequestMatcher")
+public class CsrfRequestMatcher implements RequestMatcher {
+  @Override
+  public boolean matches(HttpServletRequest request) {
+    .....
+  }
+}
+
+
+
+

Please note that the exact name (@Named("CsrfRequestMatcher")) is required here to ensure your custom implementation will be injected properly.

+
+
+
+

CsrfRestService

+
+

With the devon4j-starter-security-csrf the CsrfRestService gets integrated into your app. +It provides an operation to get the CSRF token via an HTTP GET request. +The URL path to retrieve this CSRF token is services/rest/csrf/v1/token. +As a result you will get a JSON like the following:

+
+
+
+
{
+  "token":"3a8a5f66-c9eb-4494-81e1-7cc58bc3a519",
+  "parameterName":"_csrf",
+  "headerName":"X-CSRF-TOKEN"
+}
+
+
+
+

The token value is a strong random value that will differ for each user session. +It has to be send with subsequent HTTP requests (when method is other than GET) in the specified header (X-CSRF-TOKEN).

+
+
+
+

How it works

+
+

Putting it all together, a browser client should call the CsrfRestService after successfull login to receive the current CSRF token. +With every subsequent HTTP request (other than GET) the client has to send this token in the according HTTP header. +Otherwise the server will reject the request to prevent CSRF attacks. +Therefore, an attacker might make your browser perform HTTP requests towards your devon4j application backend via <image> elements, <iframes>, etc. +Your browser will then still include your session coockie if you are already logged in (e.g. from another tab). +However, in case he wants to trigger DELETE or POST requests trying your browser to make changes in the application (delete or update data, etc.) this will fail without CSRF token. +The attacker may make your browser retrieve the CSRF token but he will not be able to retrieve the result and put it into the header of other requests due to the same-origin-policy. +This way your application will be secured against CSRF attacks.

+
+
+
+
+
+

Configure devon4ng client for CSRF

+
+
+

Devon4ng client configuration for CSRF is described here

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dao.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dao.html new file mode 100644 index 00000000..db348022 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dao.html @@ -0,0 +1,381 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Data Access Object

+
+
+

The Data Access Objects (DAOs) are part of the persistence layer. +They are responsible for a specific entity and should be named «Entity»Dao and «Entity»DaoImpl. +The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. +Additionally a DAO may offer advanced operations such as query or locking methods.

+
+
+

DAO Interface

+
+
+

For each DAO there is an interface named «Entity»Dao that defines the API. For CRUD support and common naming we derive it from the ApplicationDao interface that comes with the devon application template:

+
+
+
+
public interface MyEntityDao extends ApplicationDao<MyEntity> {
+  List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria);
+}
+
+
+
+

All CRUD operations are inherited from ApplicationDao so you only have to declare the additional methods.

+
+
+
+
+

DAO Implementation

+
+
+

Implementing a DAO is quite simple. We create a class named «Entity»DaoImpl that extends ApplicationDaoImpl and implements your «Entity»Dao interface:

+
+
+
+
public class MyEntityDaoImpl extends ApplicationDaoImpl<MyEntity> implements MyEntityDao {
+
+  public List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria) {
+    TypedQuery<MyEntity> query = createQuery(criteria, getEntityManager());
+    return query.getResultList();
+  }
+  ...
+}
+
+
+
+

Again you only need to implement the additional non-CRUD methods that you have declared in your «Entity»Dao interface. +In the DAO implementation you can use the method getEntityManager() to access the EntityManager from the JPA. You will need the EntityManager to create and execute queries.

+
+
+

Static queries for DAO Implementation

+
+

All static queries are declared in the file src\main\resources\META-INF\orm.xml:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+  <named-query name="find.dish.with.max.price">
+    <query><![SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice]]></query>
+  </named-query>
+  ...
+</hibernate-mapping>
+
+
+
+

When your application is started, all these static queries will be created as prepared statements. This allows better performance and also ensures that you get errors for invalid JPQL queries when you start your app rather than later when the query is used.

+
+
+

To avoid redundant occurrences of the query name (get.open.order.positions.for.order) we define a constant for each named query:

+
+
+
+
public class NamedQueries {
+  public static final String FIND_DISH_WITH_MAX_PRICE = "find.dish.with.max.price";
+}
+
+
+
+

Note that changing the name of the java constant (FIND_DISH_WITH_MAX_PRICE) can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+

The following listing shows how to use this query:

+
+
+
+
public List<DishEntity> findDishByMaxPrice(BigDecimal maxPrice) {
+  Query query = getEntityManager().createNamedQuery(NamedQueries.FIND_DISH_WITH_MAX_PRICE);
+  query.setParameter("maxPrice", maxPrice);
+  return query.getResultList();
+}
+
+
+
+

Via EntityManager.createNamedQuery(String) we create an instance of Query for our predefined static query. +Next we use setParameter(String, Object) to provide a parameter (maxPrice) to the query. This has to be done for all parameters of the query.

+
+
+

Note that using the createQuery(String) method, which takes the entire query as string (that may already contain the parameter) is not allowed to avoid SQL injection vulnerabilities. +When the method getResultList() is invoked, the query is executed and the result is delivered as List. As an alternative, there is a method called getSingleResult(), which returns the entity if the query returned exactly one and throws an exception otherwise.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-data-permission.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-data-permission.html new file mode 100644 index 00000000..3501d1f5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-data-permission.html @@ -0,0 +1,519 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Data-permissions

+
+
+

In some projects there are demands for permissions and authorization that is dependent on the processed data. E.g. a user may only be allowed to read or write data for a specific region. This is adding some additional complexity to your authorization. If you can avoid this it is always best to keep things simple. However, in various cases this is a requirement. Therefore the following sections give you guidance and patterns how to solve this properly.

+
+
+

Structuring your data

+
+
+

For all your business objects (entities) that have to be secured regarding to data permissions we recommend that you create a separate interface that provides access to the relevant data required to decide about the permission. Here is a simple example:

+
+
+
+
public interface SecurityDataPermissionCountry {
+
+  /**
+   * @return the 2-letter ISO code of the country this object is associated with. Users need
+   *         a data-permission for this country in order to read and write this object.
+   */
+  String getCountry();
+}
+
+
+
+

Now related business objects (entities) can implement this interface. Often such data-permissions have to be applied to an entire object-hierarchy. For security reasons we recommend that also all child-objects implement this interface. For performance reasons we recommend that the child-objects redundantly store the data-permission properties (such as country in the example above) and this gets simply propagated from the parent, when a child object is created.

+
+
+
+
+

Permissions for processing data

+
+
+

When saving or processing objects with a data-permission, we recommend to provide dedicated methods to verify the permission in an abstract base-class such as AbstractUc and simply call this explicitly from your business code. This makes it easy to understand and debug the code. Here is a simple example:

+
+
+
+
protected void verifyPermission(SecurityDataPermissionCountry entity) throws AccessDeniedException;
+
+
+
+

Beware of AOP

+
+

For simple but cross-cutting data-permissions you may also use AOP. This leads to programming aspects that reflectively scan method arguments and magically decide what to do. Be aware that this quickly gets tricky:

+
+
+
    +
  • +

    What if multiple of your method arguments have data-permissions (e.g. implement SecurityDataPermission*)?

    +
  • +
  • +

    What if the object to authorize is only provided as reference (e.g. Long or IdRef) and only loaded and processed inside the implementation where the AOP aspect does not apply?

    +
  • +
  • +

    How to express advanced data-permissions in annotations?

    +
  • +
+
+
+

What we have learned is that annotations like @PreAuthorize from spring-security easily lead to the "programming in string literals" anti-pattern. We strongly discourage to use this anti-pattern. In such case writing your own verifyPermission methods that you manually call in the right places of your business-logic is much better to understand, debug and maintain.

+
+
+
+
+
+

Permissions for reading data

+
+
+

When it comes to restrictions on the data to read it becomes even more tricky. In the context of a user only entities shall be loaded from the database he is permitted to read. This is simple for loading a single entity (e.g. by its ID) as you can load it and then if not permitted throw an exception to secure your code. But what if the user is performing a search query to find many entities? For performance reasons we should only find data the user is permitted to read and filter all the rest already via the database query. But what if this is not a requirement for a single query but needs to be applied cross-cutting to tons of queries? Therefore we have the following pattern that solves your problem:

+
+
+

For each data-permission attribute (or set of such) we create an abstract base entity:

+
+
+
+
@MappedSuperclass
+@EntityListeners(PermissionCheckListener.class)
+@FilterDef(name = "country", parameters = {@ParamDef(name = "countries", type = "string")})
+@Filter(name = "country", condition = "country in (:countries)")
+public abstract class SecurityDataPermissionCountryEntity extends ApplicationPersistenceEntity
+    implements SecurityDataPermissionCountry {
+
+  private String country;
+
+  @Override
+  public String getCountry() {
+    return this.country;
+  }
+
+  public void setCountry(String country) {
+    this.country = country;
+  }
+}
+
+
+
+

There are some special hibernate annotations @EntityListeners, @FilterDef, and @Filter used here allowing to apply a filter on the country for any (non-native) query performed by hibernate. The entity listener may look like this:

+
+
+
+
public class PermissionCheckListener {
+
+  @PostLoad
+  public void read(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireReadPermission(entity);
+  }
+
+  @PrePersist
+  @PreUpdate
+  public void write(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireWritePermission(entity);
+  }
+}
+
+
+
+

This will ensure that hibernate implicitly will call these checks for every such entity when it is read from or written to the database. Further to avoid reading entities from the database the user is not permitted to (and ending up with exceptions), we create an AOP aspect that automatically activates the above declared hibernate filter:

+
+
+
+
@Named
+public class PermissionCheckerAdvice implements MethodBeforeAdvice {
+
+  @Inject
+  private PermissionChecker permissionChecker;
+
+  @PersistenceContext
+  private EntityManager entityManager;
+
+  @Override
+  public void before(Method method, Object[] args, Object target) {
+
+    Collection<String> permittedCountries = this.permissionChecker.getPermittedCountriesForReading();
+    if (permittedCountries != null) { // null is returned for admins that may access all countries
+      if (permittedCountries.isEmpty()) {
+        throw new AccessDeniedException("Not permitted for any country!");
+      }
+      Session session = this.entityManager.unwrap(Session.class);
+      session.enableFilter("country").setParameterList("countries", permittedCountries.toArray());
+    }
+  }
+}
+
+
+
+

Finally to apply this aspect to all Repositories (can easily be changed to DAOs) implement the following advisor:

+
+
+
+
@Named
+public class PermissionCheckerAdvisor implements PointcutAdvisor, Pointcut, ClassFilter, MethodMatcher {
+
+  @Inject
+  private PermissionCheckerAdvice advice;
+
+  @Override
+  public Advice getAdvice() {
+    return this.advice;
+  }
+
+  @Override
+  public boolean isPerInstance() {
+    return false;
+  }
+
+  @Override
+  public Pointcut getPointcut() {
+    return this;
+  }
+
+  @Override
+  public ClassFilter getClassFilter() {
+    return this;
+  }
+
+  @Override
+  public MethodMatcher getMethodMatcher() {
+    return this;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass) {
+    return true; // apply to all methods
+  }
+
+  @Override
+  public boolean isRuntime() {
+    return false;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass, Object... args) {
+    throw new IllegalStateException("isRuntime()==false");
+  }
+
+  @Override
+  public boolean matches(Class<?> clazz) {
+    // when using DAOs simply change to some class like ApplicationDao
+    return DefaultRepository.class.isAssignableFrom(clazz);
+  }
+}
+
+
+
+
+
+

Managing and granting the data-permissions

+
+
+

Following our authorization guide we can simply create a permission for each country. We might simply reserve a prefix (as virtual «app-id») for each data-permission to allow granting data-permissions to end-users across all applications of the IT landscape. In our example we could create access controls country.DE, country.US, country.ES, etc. and assign those to the users. The method permissionChecker.getPermittedCountriesForReading() would then scan for these access controls and only return the 2-letter country code from it.

+
+
+ + + + + +
+ + +Before you make your decisions how to design your access controls please clarify the following questions: +
+
+
+
    +
  • +

    Do you need to separate data-permissions independent of the functional permissions? E.g. may it be required to express that a user can read data from the countries ES and PL but is only permitted to modify data from PL? In such case a single assignment of "country-permissions" to users is insufficient.

    +
  • +
  • +

    Do you want to grant data-permissions individually for each application (higher flexibility and complexity) or for the entire application landscape (simplicity, better maintenance for administrators)? In case of the first approach you would rather have access controls like app1.country.GB and app2.country.GB.

    +
  • +
  • +

    Do your data-permissions depend on objects that can be created dynamically inside your application?

    +
  • +
  • +

    If you want to grant data-permissions on other business objects (entities), how do you want to reference them (primary keys, business keys, etc.)? What reference is most stable? Which is most readable?

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dataaccess-layer.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dataaccess-layer.html new file mode 100644 index 00000000..d9bb35f1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dataaccess-layer.html @@ -0,0 +1,291 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Data-Access Layer

+
+
+

The data-access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store. External system could also be accessed from the data-access layer if they match this definition, e.g. a mongo-db via rest services.

+
+
+

Note: In the modern project structure, this layer is replaced by the domain layer.

+
+
+

Database

+
+
+

You need to make your choice for a database. Options are documented here.

+
+
+

The classical approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-database-migration.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-database-migration.html new file mode 100644 index 00000000..682ee872 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-database-migration.html @@ -0,0 +1,308 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Database Migration

+
+
+

When you have a schema-based database, +you need a solution for schema versioning and migration for your database. +A specific release of your app requires a corresponding version of the schema in the database to run. +As you want simple and continuous deployment you should automate the schema versiong and database migration.

+
+
+

The general idea is that your software product contains "scripts" to migrate the database from schema version X to verion X+1. +When you begin your project you start with version 1 and with every increment of your app that needs a change to the database schema (e.g. a new table, a new column to an existing table, a new index, etc.) you add another "script" that migrates from the current to the next version. +For simplicity these versions are just sequential numbers or timestamps. +Now, the solution you choose will automatically manage the schema version in a separate metadata table in your database that stores the current schema version. +When your app is started, it will check the current version inside the database from that metadata table. +As long as there are "scripts" that migrate from there to a higher version, they will be automatically applied to the database and this process is protocolled to the metadata table in your database what also updates the current schema version there. +Using this approach, you can start with an empty database what will result in all "scripts" being applied sequentially. +Also any version of your database schema can be present and you will always end up in a controlled migration to the latest schema version.

+
+
+

Options for database migration

+
+
+

For database migration you can choose between the following options:

+
+
+
    +
  • +

    flyway (KISS based approach with migrations as SQL)

    +
  • +
  • +

    liquibase (more complex approach with database abstraction)

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-datatype.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-datatype.html new file mode 100644 index 00000000..f3892192 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-datatype.html @@ -0,0 +1,410 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Datatypes

+
+
+
+
+

A datatype is an object representing a value of a specific type with the following aspects:

+
+
+
    +
  • +

    It has a technical or business specific semantic.

    +
  • +
  • +

    Its JavaDoc explains the meaning and semantic of the value.

    +
  • +
  • +

    It is immutable and therefore stateless (its value assigned at construction time and can not be modified).

    +
  • +
  • +

    It is serializable.

    +
  • +
  • +

    It properly implements #equals(Object) and #hashCode() (two different instances with the same value are equal and have the same hash).

    +
  • +
  • +

    It shall ensure syntactical validation so it is NOT possible to create an instance with an invalid value.

    +
  • +
  • +

    It is responsible for formatting its value to a string representation suitable for sinks such as UI, loggers, etc. Also consider cases like a Datatype representing a password where toString() should return something like "**" instead of the actual password to prevent security accidents.

    +
  • +
  • +

    It is responsible for parsing the value from other representations such as a string (as needed).

    +
  • +
  • +

    It shall provide required logical operations on the value to prevent redundancies. Due to the immutable attribute all manipulative operations have to return a new Datatype instance (see e.g. BigDecimal.add(java.math.BigDecimal)).

    +
  • +
  • +

    It should implement Comparable if a natural order is defined.

    +
  • +
+
+
+

Based on the Datatype a presentation layer can decide how to view and how to edit the value. Therefore a structured data model should make use of custom datatypes in order to be expressive. +Common generic datatypes are String, Boolean, Number and its subclasses, Currency, etc. +Please note that both Date and Calendar are mutable and have very confusing APIs. Therefore, use JSR-310 or jodatime instead. +Even if a datatype is technically nothing but a String or a Number but logically something special it is worth to define it as a dedicated datatype class already for the purpose of having a central javadoc to explain it. On the other side avoid to introduce technical datatypes like String32 for a String with a maximum length of 32 characters as this is not adding value in the sense of a real Datatype. +It is suitable and in most cases also recommended to use the class implementing the datatype as API omitting a dedicated interface.

+
+
+
+— mmm project
+datatype javadoc +
+
+ +
+

Datatype Packaging

+
+
+

For the devonfw we use a common packaging schema. +The specifics for datatypes are as following:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
SegmentValueExplanation

<component>

*

Here we use the (business) component defining the datatype or general for generic datatypes.

<layer>

common

Datatypes are used across all layers and are not assigned to a dedicated layer.

<scope>

api

Datatypes are always used directly as API even tough they may contain (simple) implementation logic. Most datatypes are simple wrappers for generic Java types (e.g. String) but make these explicit and might add some validation.

+
+
+
+

Technical Concerns

+
+
+

Many technologies like Dozer and QueryDSL’s (alias API) are heavily based on reflection. For them to work properly with custom datatypes, the frameworks must be able to instantiate custom datatypes with no-argument constructors. It is therefore recommended to implement a no-argument constructor for each datatype of at least protected visibility.

+
+
+
+
+

Datatypes in Entities

+
+
+

The usage of custom datatypes in entities is explained in the persistence layer guide.

+
+
+
+
+

Datatypes in Transfer-Objects

+
+
+

XML

+
+

For mapping datatypes with JAXB see XML guide.

+
+
+
+

JSON

+
+

For mapping datatypes from and to JSON see JSON custom mapping.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dependency-injection.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dependency-injection.html new file mode 100644 index 00000000..939f430f --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dependency-injection.html @@ -0,0 +1,570 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Dependency Injection +Dependency injection is one of the most important design patterns and is a key principle to a modular and component based architecture. +The Java Standard for dependency injection is javax.inject (JSR330) that we use in combination with JSR250. +Additionally, for scoping you can use CDI (Context and Dependency Injection) from JSR365.

+
+
+

There are many frameworks which support this standard including all recent Java EE application servers. +Therefore in devonfw we rely on these open standards and can propagate patterns and code examples that work independent from the underlying frameworks.

+
+
+

Key Principles

+
+
+

Within dependency injection a bean is typically a reusable unit of your application providing an encapsulated functionality. +This bean can be injected into other beans and it should in general be replaceable. +As an example we can think of a use-case, a repository, etc. +As best practice we use the following principles:

+
+
+
    +
  • +

    Stateless implementation
    +By default such beans shall be implemented stateless. If you store state information in member variables you can easily run into concurrency problems and nasty bugs. This is easy to avoid by using local variables and separate state classes for complex state-information. Try to avoid stateful beans wherever possible. Only add state if you are fully aware of what you are doing and properly document this as a warning in your JavaDoc.

    +
  • +
  • +

    Usage of Java standards
    +We use common standards (see above) that makes our code portable. Therefore we use standardized annotations like @Inject (javax.inject.Inject) instead of proprietary annotations such as @Autowired. Generally we avoid proprietary annotations in business code (logic layer).

    +
  • +
  • +

    Simple injection-style
    +In general you can choose between constructor, setter or field injection. For simplicity we recommend to do private field injection as it is very compact and easy to maintain. We believe that constructor injection is bad for maintenance especially in case of inheritance (if you change the dependencies you need to refactor all sub-classes). Private field injection and public setter injection are very similar but setter injection is much more verbose (often you are even forced to have javadoc for all public methods). If you are writing re-usable library code setter injection will make sense as it is more flexible. In a business application you typically do not need that and can save a lot of boiler-plate code if you use private field injection instead. Nowadays you are using container infrastructure also for your tests (see testing) so there is no need to inject manually (what would require a public setter).

    +
  • +
  • +

    KISS
    +To follow the KISS (keep it small and simple) principle we avoid advanced features (e.g. custom AOP, non-singleton beans) and only use them where necessary.

    +
  • +
  • +

    Separation of API and implementation
    +For important components we should separate a self-contained API documented with JavaDoc from its implementation. Code from other components that wants to use the implementation shall only rely on the API. However, for things that will never be exchanged no API as interface is required you can skip such separation.

    +
  • +
+
+
+
+
+

Example Bean

+
+
+

Here you can see the implementation of an example bean using dependency injection:

+
+
+
+
@ApplicationScoped
+@Named("MyComponent")
+public class MyComponentImpl implements MyComponent {
+  @Inject
+  private MyOtherComponent myOtherComponent;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+
+  ...
+}
+
+
+
+

Here MyComponentImpl depends on MyOtherComponent that is injected into the field myOtherComponent because of the @Inject annotation. +To make this work there must be exactly one bean in the container (e.g. spring or quarkus) that is an instance of MyOtherComponent. +In order to put a bean into the container, we can use @ApplicationScoped in case of CDI (required for quarkus) for a stateless bean. +In spring we can ommit a CDI annotation and the @Named annotation is already sufficient as a bean is stateless by default in spring. +If we always use @ApplicationScoped we can make this more explicit and more portable accross different frameworks. +So in our example we put MyComponentImpl into the container. +That bean will be called MyComponent as we specified in the @Named annotation but we can also omit the name to use the classname as fallback. +Now our bean can be injected into other beans using @Inject annotation either via MyComponent interface (recommended when interface is present) or even directly via MyComponentImpl. +In case you omit the interface, you should also omit the Impl suffix or instead use Bean as suffix.

+
+
+
+
+

Multiple bean implementations

+
+
+

In some cases you might have multiple implementations as beans for the same interface. +The following sub-sections handle the different scenarios to give you guidance.

+
+
+

Only one implementation in container

+
+

In some cases you still have only one implementation active as bean in the container at runtime. +A typical example is that you have different implemenations for test and main usage. +This case is easy, as @Inject will always be unique. +The only thing you need to care about is how to configure your framework (spring, quarkus, etc.) to know which implementation to put in the container depending on specific configuration. +In spring this can be archived via the proprietary @Profile annotaiton.

+
+
+
+

Injecting all of multiple implementations

+
+

In some situations you may have an interface that defines a kind of "plugin". +You can have multiple implementations in your container and want to have all of them injected. +Then you can request a list with all the bean implementations via the interface as in the following example:

+
+
+
+
  @Inject
+  private List<MyConverter> converters;
+
+
+
+

Your code may iterate over all plugins (converters) and apply them sequentially. +Please note that the injection will fail (at least in spring), when there is no bean available to inject. +So you do not get an empty list injected but will get an exception on startup.

+
+
+
+

Injecting one of multiple implementations

+
+

Another scenario is that you have multiple implementations in your container coexisting, but for injection you may want to choose a specific implementation. +Here you could use the @Named annotation to specify a unique identifier for each implementation what is called qualified injection:

+
+
+
+
@ApplicationScoped
+@Named("UserAuthenticator")
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+@Named("ServiceAuthenticator")
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  @Named("UserAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  @Named("ServiceAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+
+
+
+

However, we discovered that this pattern is not so great: +The identifiers in the @Named annotation are just strings that could easily break. +You could use constants instead but still this is not the best solution.

+
+
+

In the end you can very much simplify this by just directly injecting the implementation instead:

+
+
+
+
@ApplicationScoped
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

In case you want to strictly decouple from implementations, you can still create dedicated interfaces:

+
+
+
+
public interface UserAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class UserAuthenticatorImpl implements UserAuthenticator {
+  ...
+}
+public interface ServiceAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class ServiceAuthenticatorImpl implements ServiceAuthenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

However, as you can see this is again introducing additional boiler-plate code. +While the principle to separate API and implementation and strictly decouple from implementation is valuable in general, +you should always consider KISS, lean, and agile in contrast and balance pros and cons instead of blindly following dogmas.

+
+
+
+
+
+

Imports

+
+
+

Here are the import statements for the most important annotations for dependency injection

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.enterprise.context.ApplicationScoped;
+// import javax.enterprise.context.RequestScoped;
+// import javax.enterprise.context.SessionScoped;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+
+
+
+
+

Dependencies

+
+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>jakarta.inject</groupId>
+  <artifactId>jakarta.inject-api</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>jakarta.annotation</groupId>
+  <artifactId>jakarta.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>javax.inject</groupId>
+  <artifactId>javax.inject</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>javax.annotation</groupId>
+  <artifactId>javax.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-domain-layer.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-domain-layer.html new file mode 100644 index 00000000..74eaf747 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-domain-layer.html @@ -0,0 +1,276 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Domain Layer

+
+
+

The domain layer is responsible for the data-model and mapping it to a database. +The most common approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data, so you can consider the repository guide.

+
+
+

Note: The domain layer is the replacement for the data-access layer in the modern project structure.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dto.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dto.html new file mode 100644 index 00000000..a1f2f24e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-dto.html @@ -0,0 +1,272 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==DTO approach

+
+
+

As described in our modern structure guide, for application e.g. with microservices architecture where we build smaller applications compared to monoliths, we recommend keeping things as simple as possible. The same principle applies to transfer object. Instead of using different types of transfer objects for each entity such as ETO and CTO, we highly suggest using one data transfer object (DTO) named «BusinessObject»Dto.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-eto-cto.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-eto-cto.html new file mode 100644 index 00000000..6e03be54 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-eto-cto.html @@ -0,0 +1,316 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==ETO and CTO approach

+
+
+

ETO

+
+
+

For each persistent entity «BusinessObject»Entity we create or generate a corresponding entity transfer object (ETO) named «BusinessObject»Eto. It has the same properties except for relations.

+
+
+
+
+

BO

+
+
+

In order to centralize the properties (getters and setters with their javadoc) we create a common interface «BusinessObject» implemented both by the entity and its ETO. This also gives us compile-time safety that +bean-mapper can properly map all properties between entity and ETO.

+
+
+
+
+

CTO

+
+
+

If we need to pass an entity with its relation(s) we create a corresponding composite transfer object (CTO) named «BusinessObject»«Subset»Cto that only contains other transfer-objects or collections of them. Here «Subset» is empty for the canonical CTO that holds the ETO together with all its relations. +This is what can be generated automatically with CobiGen. +However, be careful to generate CTOs without thinking and considering design. +If there are no relations at all a CTO is pointless and shall be omitted. +However, if there are multiple relations you typically need multiple CTOs for the same «BusinessObject» that define different subsets of the related data. +These will typically be designed and implemented by hand. +E.g. you may have CustomerWithAddressCto and CustomerWithContractCto. Most CTOs correspond to a specific «BusinessObject» and therefore contain a «BusinessObject»Eto. Such CTOs should inherit from MasterCto.

+
+
+

This pattern with entities, ETOs and CTOs is illustrated by the following UML diagram from our sample application.

+
+
+
+ETOs and CTOs +
+
Figure 1. ETOs and CTOs
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-exceptions.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-exceptions.html new file mode 100644 index 00000000..db7fd649 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-exceptions.html @@ -0,0 +1,480 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Exception Handling

+
+
+

Exception Principles

+
+
+

For exceptions we follow these principles:

+
+
+
    +
  • +

    We only use exceptions for exceptional situations and not for programming control flows, etc. Creating an exception in Java is expensive and hence should not be done for simply testing whether something is present, valid or permitted. In the latter case design your API to return this as a regular result.

    +
  • +
  • +

    We use unchecked exceptions (RuntimeException) [1]

    +
  • +
  • +

    We distinguish internal exceptions and user exceptions:

    +
    +
      +
    • +

      Internal exceptions have technical reasons. For unexpected and exotic situations, it is sufficient to throw existing exceptions such as IllegalStateException. For common scenarios a own exception class is reasonable.

      +
    • +
    • +

      User exceptions contain a message explaining the problem for end users. Therefore, we always define our own exception classes with a clear, brief, but detailed message.

      +
    • +
    +
    +
  • +
  • +

    Our own exceptions derive from an exception base class supporting

    + +
  • +
+
+
+

All this is offered by mmm-util-core, which we propose as a solution. +If you use the devon4j-rest module, this is already included. For Quarkus applications, you need to add the dependency manually.

+
+
+

If you want to avoid additional dependencies, you can implement your own solution for this by creating an abstract exception class ApplicationBusinessException extending from RuntimeException. For an example of this, see our Quarkus reference application.

+
+
+
+
+

Exception Example

+
+
+

Here is an exception class from our sample application:

+
+
+
+
public class IllegalEntityStateException extends ApplicationBusinessException {
+
+  private static final long serialVersionUID = 1L;
+
+  public IllegalEntityStateException(Object entity, Object state) {
+
+    this((Throwable) null, entity, state);
+  }
+
+
+  public IllegalEntityStateException(Object entity, Object currentState, Object newState) {
+
+    this(null, entity, currentState, newState);
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object state) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityState(entity, state));
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object currentState, Object newState) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityStateChange(entity, currentState,
+        newState));
+  }
+
+}
+
+
+
+

The message templates are defined in the interface NlsBundleRestaurantRoot as following:

+
+
+
+
public interface NlsBundleApplicationRoot extends NlsBundle {
+
+
+  @NlsBundleMessage("The entity {entity} is in state {state}!")
+  NlsMessage errorIllegalEntityState(@Named("entity") Object entity, @Named("state") Object state);
+
+
+  @NlsBundleMessage("The entity {entity} in state {currentState} can not be changed to state {newState}!")
+  NlsMessage errorIllegalEntityStateChange(@Named("entity") Object entity, @Named("currentState") Object currentState,
+      @Named("newState") Object newState);
+
+
+  @NlsBundleMessage("The property {property} of object {object} can not be changed!")
+  NlsMessage errorIllegalPropertyChange(@Named("object") Object object, @Named("property") Object property);
+
+  @NlsBundleMessage("There is currently no user logged in")
+  NlsMessage errorNoActiveUser();
+
+
+
+
+
+

Handling Exceptions

+
+
+

For catching and handling exceptions we follow these rules:

+
+
+
    +
  • +

    We do not catch exceptions just to wrap or to re-throw them.

    +
  • +
  • +

    If we catch an exception and throw a new one, we always have to provide the original exception as cause to the constructor of the new exception.

    +
  • +
  • +

    At the entry points of the application (e.g. a service operation) we have to catch and handle all throwables. This is done via the exception-facade-pattern via an explicit facade or aspect. The devon4j-rest module already provides ready-to-use implementations for this such as RestServiceExceptionFacade that you can use in your Spring application. For Quarkus, follow the Quarkus guide on exception handling.
    +The exception facade has to …​

    +
    +
      +
    • +

      log all errors (user errors on info and technical errors on error level)

      +
    • +
    • +

      ensure that the entire exception is passed to the logger (not only the message) so that the logger can capture the entire stacktrace and the root cause is not lost.

      +
    • +
    • +

      convert the error to a result appropriable for the client and secure for Sensitive Data Exposure. Especially for security exceptions only a generic security error code or message may be revealed but the details shall only be logged but not be exposed to the client. All internal exceptions are converted to a generic error with a message like:

      +
      +
      +
      +

      An unexpected technical error has occurred. We apologize any inconvenience. Please try again later.

      +
      +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

Common Errors

+
+
+

The following errors may occur in any devon application:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 1. Common Exceptions
CodeMessageLink

TechnicalError

An unexpected error has occurred! We apologize any inconvenience. Please try again later.

TechnicalErrorUserException.java

ServiceInvoke

«original message of the cause»

ServiceInvocationFailedException.java

+
+
+
+
+
+1. Whether to use checked exceptions or not is a controversial topic. Arguments for both sides can be found under The Trouble with Checked Exceptions, Unchecked Exceptions — The Controversy, and Checked Exceptions are Evil. The arguments in favor of unchecked exceptions tend to prevail for applications built with devon4j. Therefore, unchecked exceptions should be used for a consistent style. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-feature-toggle.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-feature-toggle.html new file mode 100644 index 00000000..abd1f56e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-feature-toggle.html @@ -0,0 +1,521 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Feature-Toggles

+
+
+

The most software developing teams use Feature-Branching to be able to work in parallel and maintain a stable main branch in the VCS. However Feature-Branching might not be the ideal tool in every case because of big merges and isolation between development groups. In many cases, Feature-Toggles can avoid some of these problems, so these should definitely be considered to be used in the collaborative software development.

+
+
+

Implementation with the devonfw

+
+
+

To use Feature-Toggles with the devonfw, use the Framework Togglz because it has all the features generally needed and provides a great documentation.

+
+
+

For a pretty minimal working example, also see this fork.

+
+
+

Preparation

+
+

The following example takes place in the oasp-sample-core project, so the necessary dependencies have to be added to the according pom.xml file. Required are the main Togglz project including Spring support, the Togglz console to graphically change the feature state and the Spring security package to handle authentication for the Togglz console.

+
+
+
+
<!-- Feature-Toggle-Framework togglz -->
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-boot-starter</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-console</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-security</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+
+
+

In addition to that, the following lines have to be included in the spring configuration file application.properties

+
+
+
+
##configuration for the togglz Feature-Toggle-Framework
+togglz.enabled=true
+togglz.console.secured=false
+
+
+
+
+

Small features

+
+

For small features, a simple query of the toggle state is often enough to achieve the desired functionality. To illustrate this, a simple example follows, which implements a toggle to limit the page size returned by the staffmanagement. See here for further details.

+
+
+

This is the current implementation to toggle the feature:

+
+
+
+
// Uncomment next line in order to limit the maximum page size for the staff member search
+// criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+
+
+
+

To realise this more elegantly with Togglz, first an enum is required to configure the feature-toggle.

+
+
+
+
public enum StaffmanagementFeatures implements Feature {
+  @Label("Limit the maximum page size for the staff members")
+  LIMIT_STAFF_PAGE_SIZE;
+
+  public boolean isActive() {
+    return FeatureContext.getFeatureManager().isActive(this);
+  }
+}
+
+
+
+

To familiarize the Spring framework with the enum, add the following entry to the application.properties file.

+
+
+
+
togglz.feature-enums=io.oasp.gastronomy.restaurant.staffmanagement.featuremanager.StaffmanagementFeatures
+
+
+
+

After that, the toggle can be used easily by calling the isActive() method of the enum.

+
+
+
+
if (StaffmanagementFeatures.LIMIT_STAFF_PAGE_SIZE.isActive()) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+}
+
+
+
+

This way, you can easily switch the feature on or off by using the administration console at http://localhost:8081/devon4j-sample-server/togglz-console. If you are getting redirected to the login page, just sign in with any valid user (eg. admin).

+
+
+
+

Extensive features

+
+

When implementing extensive features, you might want to consider using the strategy design pattern to maintain the overview of your software. The following example is an implementation of a feature which adds a 25% discount to all products managed by the offermanagement.

+
+
+
Therefore there are two strategies needed:
+
    +
  1. +

    Return the offers with the normal price

    +
  2. +
  3. +

    Return the offers with a 25% discount

    +
  4. +
+
+
+

The implementation is pretty straight forward so use this as a reference. Compare this for further details.

+
+
+
+
@Override
+@RolesAllowed(PermissionConstants.FIND_OFFER)
+public PaginatedListTo<OfferEto> findOfferEtos(OfferSearchCriteriaTo criteria) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+  PaginatedListTo<OfferEntity> offers = getOfferDao().findOffers(criteria);
+
+
+  if (OffermanagementFeatures.DISCOUNT.isActive()) {
+    return getOfferEtosDiscount(offers);
+  } else {
+    return getOfferEtosNormalPrice(offers);
+  }
+
+}
+
+
+// Strategy 1: Return the OfferEtos with the normal price
+private PaginatedListTo<OfferEto> getOfferEtosNormalPrice(PaginatedListTo<OfferEntity> offers) {
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+// Strategy 2: Return the OfferEtos with the new, discounted price
+private PaginatedListTo<OfferEto> getOfferEtosDiscount(PaginatedListTo<OfferEntity> offers) {
+  offers = addDiscountToOffers(offers);
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+private PaginatedListTo<OfferEntity> addDiscountToOffers(PaginatedListTo<OfferEntity> offers) {
+  for (OfferEntity oe : offers.getResult()) {
+    Double oldPrice = oe.getPrice().getValue().doubleValue();
+
+    // calculate the new price and round it to two decimal places
+    BigDecimal newPrice = new BigDecimal(oldPrice * 0.75);
+    newPrice = newPrice.setScale(2, RoundingMode.HALF_UP);
+
+    oe.setPrice(new Money(newPrice));
+  }
+
+  return offers;
+}
+
+
+
+
+
+
+

Guidelines for a successful use of feature-toggles

+
+
+

The use of feature-toggles requires a specified set of guidelines to maintain the overview on the software. The following is a collection of considerations and examples for conventions that are reasonable to use.

+
+
+

Minimize the number of toggles

+
+

When using too many toggles at the same time, it is hard to maintain a good overview of the system and things like finding bugs are getting much harder. Additionally, the management of toggles in the configuration interface gets more difficult due to the amount of toggles.

+
+
+

To prevent toggles from piling up during development, a toggle and the associated obsolete source code should be removed after the completion of the corresponding feature. In addition to that, the existing toggles should be revisited periodically to verify that these are still needed and therefore remove legacy toggles.

+
+
+
+

Consistent naming scheme

+
+

A consistent naming scheme is the key to a structured and easily maintainable set of features. This should include the naming of toggles in the source code and the appropriate naming of commit messages in the VCS. The following section contains an example for a useful naming scheme including a small example.

+
+
+

Every Feature-Toggle in the system has to get its own unique name without repeating any names of features, which were removed from the system. The chosen names should be descriptive names to simplify the association between toggles and their purpose. If the feature should be split into multiple sub-features, you might want to name the feature like the parent feature with a describing addition. If for example you want to split the DISCOUNT feature into the logic and the UI part, you might want to name the sub-features DISCOUNT_LOGIC and DISCOUNT_UI.

+
+
+

The entry in the togglz configuration enum should be named identically to the aforementioned feature name. The explicitness of feature names prevents a confusion between toggles due to using multiple enums.

+
+
+

Commit messages are very important for the use of feature-toggles and also should follow a predefined naming scheme. You might want to state the feature name at the beginning of the message, followed by the actual message, describing what the commit changes to the feature. An example commit message could look like the following:

+
+
+
+
DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

Mentioning the feature name in the commit message has the advantage, that you can search your git log for the feature name and get every commit belonging to the feature. An example for this using the tool grep could look like this.

+
+
+
+
$ git log | grep -C 4 DISCOUNT
+
+commit 034669a48208cb946cc6ba8a258bdab586929dd9
+Author: Florian Luediger <florian.luediger@somemail.com>
+Date:   Thu Jul 7 13:04:37 2016 +0100
+
+DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

To keep track of all the features in your software system, a platform like GitHub offers issues. When creating an issue for every feature, you can retrace, who created the feature and who is assigned to completing its development. When referencing the issue from commits, you also have links to all the relevant commits from the issue view.

+
+
+
+

Placement of toggle points

+
+

To maintain a clean codebase, you definitely want to avoid using the same toggle in different places in the software. There should be one single query of the toggle which should be able to toggle the whole functionality of the feature. If one single toggle point is not enough to switch the whole feature on or off, you might want to think about splitting the feature into multiple ones.

+
+
+
+

Use of fine-grained features

+
+

Bigger features in general should be split into multiple sub-features to maintain the overview on the codebase. These sub-features get their own feature-toggle and get implemented independently.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-flyway.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-flyway.html new file mode 100644 index 00000000..69a7502d --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-flyway.html @@ -0,0 +1,392 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Flyway

+
+
+

Flyway is a tool for database migration and schema versioning. +See why for a motivation why using flyway.

+
+
+

Flyway can be used standalone e.g. via flyway-maven-plugin or can be integrated directly into your app to make sure the database migration takes place on startup. +For simplicity we recommend to integrate flyway into your app. +However, you need to be aware that therefore your app needs database access with full schema owner permissions.

+
+
+

Organizational Advice

+
+
+

A few considerations with respect to project organization will help to implement maintainable Flyway migrations.

+
+
+

At first, testing and production environments must be clearly and consistently distinguished. Use the following directory structure to achieve this distinction:

+
+
+
+
  src/main/resources/db
+  src/test/resources/db
+
+
+
+

Although this structure introduces redundancies, the benefit outweighs this disadvantage. +An even more fine-grained production directory structure which contains one sub folder per release should be implemented:

+
+
+
+
  src/main/resources/db/migration/releases/X.Y/x.sql
+
+
+
+

Emphasizing that migration scripts below the current version must never be changed will aid the second advantage of migrations: it will always be clearly reproducible in which state the database currently is. +Here, it is important to mention that, if test data is required, it must be managed separately from the migration data in the following directory:

+
+
+
+
  src/test/resources/db/migration/
+
+
+
+

The migration directory is added to aid easy usage of Flyway defaults. +Of course, test data should also be managed per release as like production data.

+
+
+

With regard to content, separation of concerns (SoC) is an important goal. SoC can be achieved by distinguishing and writing multiple scripts with respect to business components/use cases (or database tables in case of large volumes of master data [1]. Comprehensible file names aid this separation.

+
+
+

It is important to have clear responsibilities regarding the database, the persistence layer (JPA), and migrations. Therefore a dedicated database expert should be in charge of any migrations performed or she should at least be informed before any change to any of the mentioned parts is applied.

+
+
+
+
+

Technical Configuration

+
+
+

Database migrations can be SQL based or Java based.

+
+
+

To enable auto migration on startup (not recommended for productive environment) set the following property in the application.properties file for an environment.

+
+
+
+
flyway.enabled=true
+flyway.clean-on-validation-error=false
+
+
+
+

For development environment it is helpful to set both properties to true in order to simplify development. For regular environments flyway.clean-on-validation-error should be false.

+
+
+

If you want to use Flyway set the following property in any case to prevent Hibernate from doing changes on the database (pre-configured by default in devonfw):

+
+
+
+
spring.jpa.hibernate.ddl-auto=validate
+
+
+
+

The setting must be communicated to and coordinated with the customer and their needs. +In acceptance testing the same configuration as for the production environment should be enabled.

+
+
+

Since migration scripts will also be versioned the end-of-line (EOL) style must be fixated according to this issue. This is however solved in flyway 4.0+ and the latest devonfw release. +Also, the version numbers of migration scripts should not consist of simple ascending integer numbers like V0001…​, V0002…​, …​ This naming may lead to problems when merging branches. Instead the usage of timestamps as version numbers will help to avoid such problems.

+
+
+
+
+

Naming Conventions

+
+
+

Database migrations should follow this naming convention: +V<version>__<description> (e.g.: V12345__Add_new_table.sql).

+
+
+

It is also possible to use Flyway for test data. To do so place your test data migrations in src/main/resources/db/testdata/ and set property

+
+
+
+
flyway.locations=classpath:db/migration/releases,classpath:db/migration/testdata
+
+
+
+

Then Flyway scans the additional location for migrations and applies all in the order specified by their version. If migrations V0001__... and V0002__... exist and a test data migration should be applied in between you can name it V0001_1__....

+
+
+
+
+
+
+1. "Stammdaten" in German. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-hana.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-hana.html new file mode 100644 index 00000000..3f9b7506 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-hana.html @@ -0,0 +1,366 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==SAP HANA

+
+
+

This section contains hints for those who use SAP HANA, a very powerful and fast RDBMS. If you have chosen a different persistence technology on purpose you can simply ignore this guide. Besides general hints about the driver there are tips for more tight integration with other SAP features or products.

+
+
+

Driver

+
+
+

The hana JDBC driver is available in Maven Central what makes your life very easy. All you need is the following maven dependency:

+
+
+
+
<dependency>
+  <groupId>com.sap.cloud.db.jdbc</groupId>
+  <artifactId>ngdbc</artifactId>
+  <version>${hana.driver.version}</version>
+</dependency>
+
+
+
+

The variable hana.driver.version may be 2.3.55, but check yourself at http://central.maven.org/maven2/com/sap/cloud/db/jdbc/ngdbc/ for the proper or most recent version.

+
+
+
+
+

Developer Usage

+
+
+

For your local development environment you will love the free SAP HANA, Express Edition.

+
+
+

You can run HANA in several ways:

+
+
+ +
+
+

To get started with SAP HANA, Express Edition you can check out the tutorials at the SAP Developer Center.

+
+
+
+
+

Pooling

+
+
+

TODO

+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-i18n.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-i18n.html new file mode 100644 index 00000000..4015cd51 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-i18n.html @@ -0,0 +1,347 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Internationalization +Internationalization (I18N) is about writing code independent from locale-specific information. +For I18N of text messages we are suggesting +mmm native-language-support.

+
+
+

In devonfw we have developed a solution to manage text internationalization. devonfw solution comes into two aspects:

+
+
+
    +
  • +

    Bind locale information to the user.

    +
  • +
  • +

    Get the messages in the current user locale.

    +
  • +
+
+
+

Binding locale information to the user

+
+
+

We have defined two different points to bind locale information to user, depending on user is authenticated or not.

+
+
+
    +
  • +

    User not authenticated: devonfw intercepts unsecured request and extract locale from it. At first, we try to extract a language parameter from the request and if it is not possible, we extract locale from Àccept-language` header.

    +
  • +
  • +

    User authenticated. During login process, applications developers are responsible to fill language parameter in the UserProfile class. This language parameter could be obtain from DB, LDAP, request, etc. In devonfw sample we get the locale information from database.

    +
  • +
+
+
+

This image shows the entire process:

+
+
+
+Internationalization +
+
+
+
+
+

Getting internationalizated messages

+
+
+

devonfw has a bean that manage i18n message resolution, the ApplicationLocaleResolver. This bean is responsible to get the current user and extract locale information from it and read the correct properties file to get the message.

+
+
+

The i18n properties file must be called ApplicationMessages_la_CO.properties where la=language and CO=country. This is an example of a i18n properties file for English language to translate devonfw sample user roles:

+
+
+

ApplicationMessages_en_US.properties

+
+
+
+
admin=Admin
+
+
+
+

You should define an ApplicationMessages_la_CO.properties file for every language that your application needs.

+
+
+

ApplicationLocaleResolver bean is injected in AbstractComponentFacade class so you have available this bean in logic layer so you only need to put this code to get an internationalized message:

+
+
+
+
String msg = getApplicationLocaleResolver().getMessage("mymessage");
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jdk.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jdk.html new file mode 100644 index 00000000..5bf0a1f9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jdk.html @@ -0,0 +1,639 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Java Development Kit

+
+
+

The Java Development Kit is an implementation of the Java platform. It provides the Java Virtual Machine (JVM) and the Java Runtime Environment (JRE).

+
+
+

Editions

+
+
+

The JDK exists in different editions:

+
+
+ +
+
+

As Java is evolving and also complex maintaining a JVM requires a lot of energy. +Therefore many alternative JDK editions are unable to cope with this and support latest Java versions and according compatibility. +Unfortunately OpenJDK only maintains a specific version of Java for a relative short period of time before moving to the next major version. +In the end, this technically means that OpenJDK is continuous beta and can not be used in production for reasonable software projects. +As OracleJDK changed its licensing model and can not be used for commercial usage even during development, things can get tricky. +You may want to use OpenJDK for development and OracleJDK only in production. +However, e.g. OpenJDK 11 never released a version that is stable enough for reasonable development (e.g. javadoc tool is broken and fixes are not available of OpenJDK 11 - fixed in 11.0.3 what is only available as OracleJDK 11 or you need to go to OpenJDK 12+, what has other bugs) so in the end there is no working release of OpenJDK 11. +This more or less forces you to use OracleJDK what requires you to buy a subscription so you can use it for commercial development. +However, there is AdoptOpenJDK that provides forked releases of OpenJDK with bug-fixes what might be an option. +Anyhow, as you want to have your development environment close to production, the productively used JDK (most likely OracleJDK) should be preferred also for development.

+
+
+
+
+

Upgrading

+
+
+

Until Java 8 compatibility was one of the key aspects for Java version updates (after the mess on the Swing updates with Java2 many years ago). +However, Java 9 introduced a lot of breaking changes. +This documentation wants to share the experience we collected in devonfw when upgrading from Java 8 to newer versions. +First of all we separate runtime changes that you need if you want to build your software with JDK 8 but such that it can also run on newer versions (e.g. JRE 11) +from changes required to also build your software with more recent JDKs (e.g. JDK 11 or 12).

+
+
+

Runtime Changes

+
+

This section describes required changes to your software in order to make it run also with versions newer than Java 8.

+
+
+

Classes removed from JDK

+
+

The first thing that most users hit when running their software with newer Java versions is a ClassNotFoundException like this:

+
+
+
+
Caused by: java.lang.ClassNotFoundException: javax.xml.bind.JAXBException
+
+
+
+

As Java 9 introduced a module system with Jigsaw, the JDK that has been a monolithic mess is now a well-defined set of structured modules. +Some of the classes that used to come with the JDK moved to modules that where not available by default in Java 9 and have even been removed entirely in later versions of Java. +Therefore you should simply treat such code just like any other 3rd party component that you can add as a (maven) dependency. +The following table gives you the required hints to make your software work even with such classes / modules removed from the JDK (please note that the specified version is just a suggestion that worked, feel free to pick a more recent or more appropriate version):

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Dependencies for classes removed from Java 8 since 9+
ClassGroupIdArtifactIdVersion

javax.xml.bind.*

javax.xml.bind

jaxb-api

2.3.1

com.sun.xml.bind.*

org.glassfish.jaxb

jaxb-runtime

2.3.1

java.activation.*

javax.activation

javax.activation-api

1.2.0

java.transaction.*

javax.transaction

javax.transaction-api

1.2

java.xml.ws.*

javax.xml.ws

jaxws-api

2.3.1

javax.jws.*

javax.jws

javax.jws-api

1.1

javax.annotation.*

javax.annotation

javax.annotation-api

1.3.2

+
+
+

3rd Party Updates

+
+

Further, internal and inofficial APIs (e.g. sun.misc.Unsafe) have been removed. +These are typically not used by your software directly but by low-level 3rd party libraries like asm that need to be updated. +Also simple things like the Java version have changed (from 1.8.x to 9.x, 10.x, 11.x, 12.x, etc.). +Some 3rd party libraries were parsing the Java version in a very naive way making them unable to be used with Java 9+:

+
+
+
+
Caused by: java.lang.NullPointerException
+   at org.apache.maven.surefire.shade.org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast (SystemUtils.java:1626)
+
+
+
+

Therefore the following table gives an overview of common 3rd party libraries that have been affected by such breaking changes and need to be updated to at least the specified version:

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Minimum recommended versions of common 3rd party for Java 9+
GroupIdArtifactIdVersionIssue

org.apache.commons

commons-lang3

3.7

LANG-1365

cglib

cglib

3.2.9

102, 93, 133

org.ow2.asm

asm

7.1

2941

org.javassist

javassist

3.25.0-GA

194, 228, 246, 171

+
+
+

ResourceBundles

+
+

For internationalization (i18n) and localization (l10n) ResourceBundle is used for language and country specific texts and configurations as properties (e.g. MyResourceBundle_de.properties). With Java modules there are changes and impacts you need to know to get things working. The most important change is documented in the JavaDoc of ResourceBundle. However, instead of using ResourceBundleProvider and refactoring your entire code causing incompatibilities, you can simply put the resource bundles in a regular JAR on the classpath rather than a named module (or into the lauching app). +If you want to implement (new) Java modules with i18n support, you can have a look at mmm-nls.

+
+
+
+
+

Buildtime Changes

+
+

If you also want to change your build to work with a recent JDK you also need to ensure that test frameworks and maven plugins properly support this.

+
+
+

Findbugs

+
+

Findbugs does not work with Java 9+ and is actually a dead project. +The new findbugs is SpotBugs. +For maven the new solution is spotbugs-maven-plugin:

+
+
+
+
<plugin>
+  <groupId>com.github.spotbugs</groupId>
+  <artifactId>spotbugs-maven-plugin</artifactId>
+  <version>3.1.11</version>
+</plugin>
+
+
+
+
+

Test Frameworks

+ + ++++++ + + + + + + + + + + + + + + + + +
Table 3. Minimum recommended versions of common 3rd party test frameworks for Java 9+
GroupIdArtifactIdVersionIssue

org.mockito

mockito-core

2.23.4

1419, 1696, 1607, 1594, 1577, 1482

+
+
+

Maven Plugins

+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4. Minimum recommended versions of common maven plugins for Java 9+
GroupIdArtifactId(min.) VersionIssue

org.apache.maven.plugins

maven-compiler-plugin

3.8.1

x

org.apache.maven.plugins

maven-surefire-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-surefire-report-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-archetype-plugin

3.1.0

x

org.apache.maven.plugins

maven-javadoc-plugin

3.1.0

x

org.jacoco

jacoco-maven-plugin

0.8.3

663

+
+
+

Maven Usage

+
+

With Java modules you can not run Javadoc standalone anymore or you will get this error when running mvn javadoc:javadoc:

+
+
+
+
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-javadoc-plugin:3.1.1:javadoc (default-cli) on project mmm-base: An error has occurred in Javadoc report generation:
+[ERROR] Exit code: 1 - error: module not found: io.github.mmm.base
+[ERROR]
+[ERROR] Command line was: /projects/mmm/software/java/bin/javadoc @options @packages @argfile
+
+
+
+

As a solution or workaround you need to include the compile goal into your build lifecycle so the module-path is properly configured:

+
+
+
+
mvn compile javadoc:javadoc
+
+
+
+
+
+
+
+ +
+
+

We want to give credits and say thanks to the following articles that have been there before and helped us on our way:

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jee.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jee.html new file mode 100644 index 00000000..78d7f4e0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jee.html @@ -0,0 +1,388 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==JEE

+
+
+

This section is about Java Enterprise Edition (JEE). +Regarding to our key principles we focus on open standards. +For Java this means that we consider official standards from Java Standard and Enterprise Edition as first choice for considerations. +Therefore we also decided to recommend JAX-RS over SpringMVC as the latter is proprietary. +Only if an existing Java standard is not suitable for current demands such as Java Server Faces (JSF), we do not officially recommend it (while you are still free to use it if you have good reasons to do so). +In all other cases we officially suggest the according standard and use it in our guides, code-samples, sample application, modules, templates, etc. +Examples for such standards are JPA, JAX-RS, JAX-WS, JSR330, JSR250, JAX-B, etc.

+
+
+

Application-Server

+
+
+

We designed everything based on standards to work with different technology stacks and servlet containers. +However, we strongly encourage to use modern and leightweight frameworks such as spring or quarkus. +You are free to decide for a JEE application server but here is a list of good reasons for our decision:

+
+
+
    +
  • +

    Up-to-date

    +
    +

    With spring or quarkus you easily keep up to date with evolving technologies (microservices, reactive, NoSQL, etc.). +Most application servers put you in a jail with old legacy technology. +In many cases you are even forced to use a totally outdated version of java (JVM/JDK). +This may even cause severe IT-Security vulnerabilities but with expensive support you might get updates. +Also with leightweight open-source frameworks you need to be aware that for IT-security you need to update recently what can cost quite a lot of additional maintenance effort.

    +
    +
  • +
  • +

    Development speed

    +
    +

    With spring-boot you can implement and especially test your individual logic very fast. Starting the app in your IDE is very easy, fast, and realistic (close to production). You can easily write JUnit tests that startup your server application to e.g. test calls to your remote services via HTTP fast and easy. For application servers you need to bundle and deploy your app what takes more time and limits you in various ways. We are aware that this has improved in the past but also spring continuously improves and is always way ahead in this area. Further, with spring you have your configurations bundled together with the code in version control (still with ability to handle different environments) while with application servers these are configured externally and can not be easily tested during development.

    +
    +
  • +
  • +

    Documentation

    +
    +

    Spring and also quarkus have an extremely open and active community. +There is documentation for everything available for free on the web. +You will find solutions to almost any problem on platforms like stackoverflow. +If you have a problem you are only a google search away from your solution. +This is very much different for proprietary application server products.

    +
    +
  • +
  • +

    Helpful Exception Messages

    +
    +

    Especially spring is really great for developers on exception messages. +If you do something wrong you get detailed and helpful messages that guide you to the problem or even the solution. +This is not as great in application servers.

    +
    +
  • +
  • +

    Future-proof

    +
    +

    Spring has evolved really awesome over time. +Since its 1.0 release in 2004 spring has continuously been improved and always caught up with important trends and innovations. +Even in critical situations, when the company behind it (interface21) was sold, spring went on perfectly. +Quarkus on the other hand is relatively new. +It does not have to carry a large legacy history and is therefore most state-of-the-art for modern projects esp. in cloud environments. +JEE went through a lot of trouble and crisis. +Just look at the EJB pain stories. +This happened often in the past and also recent. +See JEE 8 in crisis.

    +
    +
  • +
  • +

    Free

    +
    +

    Spring and quarkus including their ecosystems are free and open-source. +It still perfectly integrates with commercial solutions for specific needs. +Most application servers are commercial and cost a lot of money. +As of today the ROI for this is of question.

    +
    +
  • +
  • +

    Cloud-native

    +
    +

    Quarkus is designed for cloud-native projects from the start. +With spring this is also available via spring-native. +Using an application server will effectively prevent you from going to the cloud smoothly.

    +
    +
  • +
  • +

    Fun

    +
    +

    If you go to conferences or ask developers you will see that spring or quarkus is popular and fun. +If new developers are forced to use an old application server product they will be less motivated or even get frustrated. +Especially in today’s agile projects this is a very important aspect. +In the end you will get into trouble with maintenance on the long run if you rely on a proprietary application server.

    +
    +
  • +
+
+
+

Of course the vendors of application servers will tell you a different story. +This is simply because they still make a lot of money from their products. +We do not get paid from application servers nor from spring, quarkus or any other IT product company. +We are just developers who love to build great systems. +A good reason for application servers is that they combine a set of solutions to particular aspects to one product that helps to standardize your IT. +However, devonfw fills exactly this gap for the spring and quarkus ecosystems in a very open and flexible way. +However, there is one important aspect that you need to understand and be aware of:

+
+
+

Some big companies decided for a specific application server as their IT strategy. +They may have hundreds of apps running with this application server. +All their operators and developers have learned a lot of specific skills for this product and are familiar with it. +If you are implementing yet another (small) app in this context it could make sense to stick with this application server. +However, also they have to be aware that with every additional app they increase their technical debt. +So actively help your customer and consult him to make the right choices for the future.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jms.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jms.html new file mode 100644 index 00000000..c4df76ff --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jms.html @@ -0,0 +1,353 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Messaging

+
+
+

Messaging in Java is done using the JMS standard from JEE.

+
+
+

Products

+
+
+

For messaging you need to choose a JMS provider such as:

+
+
+ +
+
+
+
+

Receiver

+
+
+

As a receiver of messages is receiving data from other systems it is located in the service-layer.

+
+
+

JMS Listener

+
+

A JmsListener is a class listening and consuming JMS messages. It should carry the suffix JmsListener and implement the MessageListener interface or have its listener method annotated with @JmsListener. This is illustrated by the following example:

+
+
+
+
@Named
+@Transactional
+public class BookingJmsListener /* implements MessageListener */ {
+
+  @Inject
+  private Bookingmanagement bookingmanagement;
+
+  @Inject
+  private MessageConverter messageConverter;
+
+  @JmsListener(destination = "BOOKING_QUEUE", containerFactory = "jmsListenerContainerFactory")
+  public void onMessage(Message message) {
+    try {
+      BookingTo bookingTo = (BookingTo) this.messageConverter.fromMessage(message);
+      this.bookingmanagement.importBooking(bookingTo);
+    } catch (MessageConversionException | JMSException e) {
+      throw new InvalidMessageException(message);
+    }
+  }
+}
+
+
+
+
+
+
+

Sender

+
+
+

The sending of JMS messages is considered as any other sending of data like kafka messages or RPC calls via REST using service-client, gRPC, etc. +This will typically happen directly from a use-case in the logic-layer. +However, the technical complexity of the communication and protocols itself shall be hidden from the use-case and not be part of the logic layer. +With spring we can simply use JmsTemplate to do that.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jmx.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jmx.html new file mode 100644 index 00000000..194c7f37 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jmx.html @@ -0,0 +1,287 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==JMX

+
+
+

JMX (Java Management Extensions) is the official Java monitoring solution. +It is part of the JDK. +Your application may provide monitoring information or receive monitoring related commands via MBeans. +There is a huge amount of information about JMX available. +A good starting point might be JMX on wikipedia.

+
+
+

Traditionally JMX uses RMI for communication, what is rather a discouraged protocol that should be avoided. +In many environments HTTP(S) is preferred, so be careful on deciding if JMX is the right solution. +However, you can even expose existing JMX MBeans via HTTP(S) instead of RMI. +Traditionally JMX also allows administrators not only to read data but also to write data typically in order to re-configure the app or do other such related tasks (e.g. clear caches). +Today, configuration and monitoring are clearly separated aspects and should not be mixed. +With container technology the ability to re-configure an app as a running process has become an outdated feature. +Instead, you simply restart the container to apply changes. +With cloud-native trends and aims for simplification the importance of JMX is continuously dropping. +Instead new projects tend to use more modern and leight-weight solutions.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa-idref.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa-idref.html new file mode 100644 index 00000000..050e2574 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa-idref.html @@ -0,0 +1,436 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==IdRef

+
+
+

IdRef can be used to reference other entities in TOs in order to make them type-safe and semantically more expressive. +It is an optional concept in devon4j for more complex applications that make intensive use of relations and foreign keys.

+
+
+

Motivation

+
+
+

Assuming you have a method signature like the following:

+
+
+
+
Long approve(Long cId, Long cuId);
+
+
+
+

So what are the paremeters? What is returned?

+
+
+

IdRef is just a wrapper for a Long used as foreign key. This makes our signature much more expressive and self-explanatory:

+
+
+
+
IdRef<Contract> approve(IdRef<Contract> cId, IdRef<Customer> cuId);
+
+
+
+

Now we can easily see, that the result and the parameters are foreign-keys and which entity they are referring to via their generic type. +We can read the javadoc of these entities from the generic type and understand the context. +Finally, when passing IdRef objects to such methods, we get compile errors in case we accidentally place parameters in the wrong order.

+
+
+
+
+

IdRef and Mapping

+
+
+

In order to easily map relations from entities to transfer-objects and back, we can easily also put according getters and setters into our entities:

+
+
+
+
public class ContractEntity extends ApplicationPersistenceEntity implements Contract {
+
+  private CustomerEntity customer;
+
+  ...
+
+  @ManyToOne(fetch = FetchType.LAZY)
+  @JoinColumn(name = "CUSTOMER_ID")
+  public CustomerEntity getCustomer() {
+    return this.customer;
+  }
+
+  public void setCustomer(CustomerEntity customer) {
+    this.customer = customer;
+  }
+
+  @Transient
+  public IdRef<Customer> getCustomerId() {
+    return IdRef.of(this.customer);
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customer = JpaHelper.asEntity(customerId, CustomerEntity.class);
+  }
+}
+
+
+
+

Now, ensure that you have the same getters and setters for customerId in your Eto:

+
+
+
+
public class ContractEto extends AbstractEto implements Contract {
+
+  private IdRef<Customer> customerId;
+
+  ...
+
+  public IdRef<Customer> getCustomerId() {
+    return this.customerId;
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customerId = customerId;
+  }
+}
+
+
+
+

This way the bean-mapper can automatically map from your entity (ContractEntity) to your Eto (ContractEto) and vice-versa.

+
+
+
+
+

JpaHelper and EntityManager access

+
+
+

In the above example we used JpaHelper.asEntity to convert the foreign key (IdRef<Customer>) to the according entity (CustomerEntity). +This will internally use EntityManager.getReference to properly create a JPA entity. +The alternative "solution" that may be used with Long instead of IdRef is typically:

+
+
+
+
  public void setCustomerId(IdRef<Customer> customerId) {
+    Long id = null;
+    if (customerId != null) {
+      id = customerId.getId();
+    }
+    if (id == null) {
+      this.customer = null;
+    } else {
+      this.customer = new CustomerEntity();
+      this.customer.setId(id);
+    }
+  }
+
+
+
+

While this "solution" works is most cases, we discovered some more complex cases, where it fails with very strange hibernate exceptions. +When cleanly creating the entity via EntityManager.getReference instead it is working in all cases. +So how can JpaHelper.asEntity as a static method access the EntityManager? +Therefore we need to initialize this as otherwise you may see this exception:

+
+
+
+
java.lang.IllegalStateException: EntityManager has not yet been initialized!
+	at com.devonfw.module.jpa.dataaccess.api.JpaEntityManagerAccess.getEntityManager(JpaEntityManagerAccess.java:38)
+	at com.devonfw.module.jpa.dataaccess.api.JpaHelper.asEntity(JpaHelper.java:49)
+
+
+
+

For main usage in your application we assume that there is only one instance of EntityManager. +Therefore we can initialize this instance during the spring boot setup. +This is what we provide for you in JpaInitializer for you +when creating a devon4j app.

+
+
+

JpaHelper and spring-test

+
+

Further, you also want your code to work in integration tests. +Spring-test provides a lot of magic under the hood to make integration testing easy for you. +To boost the performance when running multiple tests, spring is smart and avoids creating the same spring-context multiple times. +Therefore it stores these contexts so that if a test-case is executed with a specific spring-configuration that has already been setup before, +the same spring-context can be reused instead of creating it again. +However, your tests may have multiple spring configurations leading to multiple spring-contexts. +Even worse these tests can run in any order leading to switching between spring-contexts forth and back. +Therefore, a static initializer during the spring boot setup can lead to strange errors as you can get the wrong EntityManager instance. +In order to fix such problems, we provide a solution pattern via DbTest ensuring for every test, +that the proper instance of EntityManager is initialized. +Therefore you should derive directly or indirectly (e.g. via ComponentDbTest and SubsystemDbTest) from DbTesT or adopt your own way to apply this pattern to your tests, when using JpaHelper. +This already happens if you are extending ApplicationComponentTest or ApplicationSubsystemTest.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa-performance.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa-performance.html new file mode 100644 index 00000000..af9303a6 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa-performance.html @@ -0,0 +1,347 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==JPA Performance +When using JPA the developer sometimes does not see or understand where and when statements to the database are triggered.

+
+
+
+
+

Establishing expectations Developers shouldn’t expect to sprinkle magic pixie dust on POJOs in hopes they will become persistent.

+
+
+
+— Dan Allen
+https://epdf.tips/seam-in-action.html +
+
+
+

So in case you do not understand what is going on under the hood of JPA, you will easily run into performance issues due to lazy loading and other effects.

+
+
+

N plus 1 Problem

+
+
+

The most prominent phenomena is call the N+1 Problem. +We use entities from our MTS demo app as an example to explain the problem. +There is a DishEntity that has a @ManyToMany relation to +IngredientEntity. +Now we assume that we want to iterate all ingredients for a dish like this:

+
+
+
+
DishEntity dish = dao.findDishById(dishId);
+BigDecimal priceWithAllExtras = dish.getPrice();
+for (IngredientEntity ingredient : dish.getExtras()) {
+  priceWithAllExtras = priceWithAllExtras.add(ingredient.getPrice());
+}
+
+
+
+

Now dish.getExtras() is loaded lazy. Therefore the JPA vendor will provide a list with lazy initialized instances of IngredientEntity that only contain the ID of that entity. Now with every call of ingredient.getPrice() we technically trigger an SQL query statement to load the specific IngredientEntity by its ID from the database. +Now findDishById caused 1 initial query statement and for any number N of ingredients we are causing an additional query statement. This makes a total of N+1 statements. As causing statements to the database is an expensive operation with a lot of overhead (creating connection, etc.) this ends in bad performance and is therefore a problem (the N+1 Problem).

+
+
+
+
+

Solving N plus 1 Problem

+
+
+

To solve the N+1 Problem you need to change your code to only trigger a single statement instead. This can be archived in various ways. The most universal solution is to use FETCH JOIN in order to pre-load the nested N child entities into the first level cache of the JPA vendor implementation. This will behave very similar as if the @ManyToMany relation to IngredientEntity was having FetchType.EAGER but only for the specific query and not in general. Because changing @ManyToMany to FetchType.EAGER would cause bad performance for other usecases where only the dish but not its extra ingredients are needed. For this reason all relations, including @OneToOne should always be FetchType.LAZY. Back to our example we simply replace dao.findDishById(dishId) with dao.findDishWithExtrasById(dishId) that we implement by the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish
+  LEFT JOIN FETCH dish.extras
+  WHERE dish.id = :dishId
+
+
+
+

The rest of the code does not have to be changed but now dish.getExtras() will get the IngredientEntity from the first level cache where is was fetched by the initial query above.

+
+
+

Please note that if you only need the sum of the prices from the extras you can also create a query using an aggregator function:

+
+
+
+
SELECT sum(dish.extras.price) FROM DishEntity dish
+
+
+
+

As you can see you need to understand the concepts in order to get good performance.

+
+
+

There are many advanced topics such as creating database indexes or calculating statistics for the query optimizer to get the best performance. For such advanced topics we recommend to have a database expert in your team that cares about such things. However, understanding the N+1 Problem and its solutions is something that every Java developer in the team needs to understand.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa-query.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa-query.html new file mode 100644 index 00000000..2ada4e38 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa-query.html @@ -0,0 +1,696 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Queries +The Java Persistence API (JPA) defines its own query language, the java persistence query language (JPQL) (see also JPQL tutorial), which is similar to SQL but operates on entities and their attributes instead of tables and columns.

+
+
+

The simplest CRUD-Queries (e.g. find an entity by its ID) are already build in the devonfw CRUD functionality (via Repository or DAO). For other cases you need to write your own query. We distinguish between static and dynamic queries. Static queries have a fixed JPQL query string that may only use parameters to customize the query at runtime. Instead, dynamic queries can change their clauses (WHERE, ORDER BY, JOIN, etc.) at runtime depending on the given search criteria.

+
+
+

Static Queries

+
+
+

E.g. to find all DishEntries (from MTS sample app) that have a price not exceeding a given maxPrice we write the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice
+
+
+
+

Here dish is used as alias (variable name) for our selected DishEntity (what refers to the simple name of the Java entity class). With dish.price we are referring to the Java property price (getPrice()/setPrice(…​)) in DishEntity. A named variable provided from outside (the search criteria at runtime) is specified with a colon (:) as prefix. Here with :maxPrice we reference to a variable that needs to be set via query.setParameter("maxPrice", maxPriceValue). JPQL also supports indexed parameters (?) but they are discouraged because they easily cause confusion and mistakes.

+
+
+

Using Queries to Avoid Bidirectional Relationships

+
+

With the usage of queries it is possible to avoid exposing relationships or modelling bidirectional relationships, which have some disadvantages (see relationships). This is especially desired for relationships between entities of different business components. +So for example to get all OrderLineEntities for a specific OrderEntity without using the orderLines relation from OrderEntity the following query could be used:

+
+
+
+
SELECT line FROM OrderLineEntity line WHERE line.order.id = :orderId
+
+
+
+
+
+
+

Dynamic Queries

+
+
+

For dynamic queries, we use the JPA module for Querydsl. Querydsl also supports other modules such as MongoDB, and Apache Lucene. It allows to implement queries in a powerful but readable and type-safe way (unlike Criteria API). If you already know JPQL, you will quickly be able to read and write Querydsl code. It feels like JPQL but implemented in Java instead of plain text.

+
+
+

To use Querydsl in your Maven project, add the following dependencies:

+
+
+
+
<dependencies>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-apt</artifactId>
+        <version>${querydsl.version}</version>
+        <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-jpa</artifactId>
+        <version>${querydsl.version}</version>
+    </dependency>
+
+</dependencies>
+
+
+
+

Next, configure the annotation processing tool (APT) plugin:

+
+
+
+
<project>
+  <build>
+    <plugins>
+      ...
+      <plugin>
+        <groupId>com.mysema.maven</groupId>
+        <artifactId>apt-maven-plugin</artifactId>
+        <version>1.1.3</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>process</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/generated-sources/java</outputDirectory>
+              <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      ...
+    </plugins>
+  </build>
+</project>
+
+
+
+

Here is an example from our sample application:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(dish.price.goe(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(dish.price.loe(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      query.where(dish.name.eq(name));
+    }
+    query.orderBy(dish.price.asc(), dish.name.asc());
+    return query.fetch();
+  }
+
+
+
+

In this example, we use the so called Q-types (QDishEntity). These are classes generated at build time by the Querydsl annotation processor from entity classes. The Q-type classes can be used as static types representative of the original entity class.

+
+
+

The query.from(dish) method call defines the query source, in this case the dish table. The where method defines a filter. For example, The first call uses the goe operator to filter out any dishes that are not greater or equal to the minimal price. Further operators can be found here.

+
+
+

The orderBy method is used to sort the query results according to certain criteria. Here, we sort the results first by their price and then by their name, both in ascending order. To sort in descending order, use .desc(). To partition query results into groups of rows, see the groupBy method.

+
+
+

For spring, devon4j provides another approach that you can use for your Spring applications to implement Querydsl logic without having to use these metaclasses. An example can be found here.

+
+
+
+
+

Native Queries

+
+
+

Spring Data supports the use of native queries. Native queries use simple native SQL syntax that is not parsed in JPQL. This allows you to use all the features that your database supports. +The downside to this is that database portability is lost due to the absence of an abstraction layer. Therefore, the queries may not work with another database because it may use a different syntax.

+
+
+

You can implement a native query using @Query annotation with the nativeQuery attribute set to true:

+
+
+
+
@Query(value="...", nativeQuery=true)
+
+
+
+ + + + + +
+ + +This will not work with Quarkus because Quarkus does not support native queries by using the @Query annotation (see here). +
+
+
+

You can also implement native queries directly using the EntityManager API and the createNativeQuery method. +This approach also works with Quarkus.

+
+
+
+
Query query = entityManager.createNativeQuery("SELECT * FROM Product", ProductEntity.class);
+List<ProductEntity> products = query.getResultList();
+
+
+
+ + + + + +
+ + +Be sure to use the name of the table when using native queries, while you must use the entity name when implementing queries with JPQL. +
+
+
+
+
+

Using Wildcards

+
+
+

For flexible queries it is often required to allow wildcards (especially in dynamic queries). While users intuitively expect glob syntax, the SQL and JPQL standards work differently. Therefore, a mapping is required. devonfw provides this on a lower level with LikePatternSyntax and on a higher level with QueryUtil (see QueryHelper.newStringClause(…​)).

+
+
+
+
+

Pagination

+
+
+

When dealing with large amounts of data, an efficient method of retrieving the data is required. Fetching the entire data set each time would be too time consuming. Instead, Paging is used to process only small subsets of the entire data set.

+
+
+

If you are using Spring Data repositories you will get pagination support out of the box by providing the interfaces Page and Pageable:

+
+
+
repository
+
+
Page<DishEntity> findAll(Pageable pageable);
+
+
+
+

Then you can create a Pageable object and pass it to the method call as follows:

+
+
+
+
int page = criteria.getPageNumber();
+int size = criteria.getPageSize();
+Pageable pageable = PageRequest.of(page, size);
+Page<DishEntity> dishes = dishRepository.findAll(pageable);
+
+
+
+

Paging with Querydsl

+
+

Pagination is also supported for dynamic queries with Querydsl:

+
+
+
+
  public Page<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    // conditions
+
+    int page = criteria.getPageNumber();
+    int size = criteria.getPageSize();
+    Pageable pageable = PageRequest.of(page, size);
+    query.offset(pageable.getOffset());
+    query.limit(pageable.getPageSize());
+
+    List<DishEntity> dishes = query.fetch();
+    return new PageImpl<>(dishes, pageable, dishes.size());
+  }
+
+
+
+
+

Pagination example

+
+

For the table entity we can make a search request by accessing the REST endpoint with pagination support like in the following examples:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "total":true
+  }
+}
+
+//Response
+{
+    "pagination": {
+        "size": 2,
+        "page": 1,
+        "total": 11
+    },
+    "result": [
+        {
+            "id": 101,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 1,
+            "state": "OCCUPIED"
+        },
+        {
+            "id": 102,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 2,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+ + + + + +
+ + +As we are requesting with the total property set to true the server responds with the total count of rows for the query. +
+
+
+

For retrieving a concrete page, we provide the page attribute with the desired value. Here we also left out the total property so the server doesn’t incur on the effort to calculate it:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "page":2
+  }
+}
+
+//Response
+
+{
+    "pagination": {
+        "size": 2,
+        "page": 2,
+        "total": null
+    },
+    "result": [
+        {
+            "id": 103,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 3,
+            "state": "FREE"
+        },
+        {
+            "id": 104,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 4,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+
+

Pagingation in devon4j-spring

+
+

For spring applications, devon4j also offers its own solution for pagination. You can find an example of this here.

+
+
+
+
+
+

Query Meta-Parameters

+
+
+

Queries can have meta-parameters and that are provided via SearchCriteriaTo. Besides paging (see above) we also get timeout support.

+
+
+
+
+

Advanced Queries

+
+
+

Writing queries can sometimes get rather complex. The current examples given above only showed very simple basics. Within this topic a lot of advanced features need to be considered like:

+
+
+ +
+
+

This list is just containing the most important aspects. As we can not cover all these topics here, they are linked to external documentation that can help and guide you.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa.html new file mode 100644 index 00000000..351fa390 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jpa.html @@ -0,0 +1,983 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Java Persistence API

+
+
+

For mapping java objects to a relational database we use the Java Persistence API (JPA). +As JPA implementation we recommend to use Hibernate. For general documentation about JPA and Hibernate follow the links above as we will not replicate the documentation. Here you will only find guidelines and examples how we recommend to use it properly. The following examples show how to map the data of a database to an entity. As we use JPA we abstract from SQL here. However, you will still need a DDL script for your schema and during maintenance also database migrations. Please follow our SQL guide for such artifacts.

+
+
+

Entity

+
+
+

Entities are part of the persistence layer and contain the actual data. They are POJOs (Plain Old Java Objects) on which the relational data of a database is mapped and vice versa. The mapping is configured via JPA annotations (javax.persistence). Usually an entity class corresponds to a table of a database and a property to a column of that table. A persistent entity instance then represents a row of the database table.

+
+
+

A Simple Entity

+
+

The following listing shows a simple example:

+
+
+
+
@Entity
+@Table(name="TEXTMESSAGE")
+public class MessageEntity extends ApplicationPersistenceEntity implements Message {
+
+  private String text;
+
+  public String getText() {
+    return this.text;
+  }
+
+  public void setText(String text) {
+    this.text = text;
+  }
+ }
+
+
+
+

The @Entity annotation defines that instances of this class will be entities which can be stored in the database. The @Table annotation is optional and can be used to define the name of the corresponding table in the database. If it is not specified, the simple name of the entity class is used instead.

+
+
+

In order to specify how to map the attributes to columns we annotate the corresponding getter methods (technically also private field annotation is also possible but approaches can not be mixed). +The @Id annotation specifies that a property should be used as primary key. +With the help of the @Column annotation it is possible to define the name of the column that an attribute is mapped to as well as other aspects such as nullable or unique. If no column name is specified, the name of the property is used as default.

+
+
+

Note that every entity class needs a constructor with public or protected visibility that does not have any arguments. Moreover, neither the class nor its getters and setters may be final.

+
+
+

Entities should be simple POJOs and not contain business logic.

+
+
+
+

Entities and Datatypes

+
+

Standard datatypes like Integer, BigDecimal, String, etc. are mapped automatically by JPA. Custom datatypes are mapped as serialized BLOB by default what is typically undesired. +In order to map atomic custom datatypes (implementations of`+SimpleDatatype`) we implement an AttributeConverter. Here is a simple example:

+
+
+
+
@Converter(autoApply = true)
+public class MoneyAttributeConverter implements AttributeConverter<Money, BigDecimal> {
+
+  public BigDecimal convertToDatabaseColumn(Money attribute) {
+    return attribute.getValue();
+  }
+
+  public Money convertToEntityAttribute(BigDecimal dbData) {
+    return new Money(dbData);
+  }
+}
+
+
+
+

The annotation @Converter is detected by the JPA vendor if the annotated class is in the packages to scan. Further, autoApply = true implies that the converter is automatically used for all properties of the handled datatype. Therefore all entities with properties of that datatype will automatically be mapped properly (in our example Money is mapped as BigDecimal).

+
+
+

In case you have a composite datatype that you need to map to multiple columns the JPA does not offer a real solution. As a workaround you can use a bean instead of a real datatype and declare it as @Embeddable. If you are using Hibernate you can implement CompositeUserType. Via the @TypeDef annotation it can be registered to Hibernate. If you want to annotate the CompositeUserType implementation itself you also need another annotation (e.g. MappedSuperclass tough not technically correct) so it is found by the scan.

+
+
+

Enumerations

+
+

By default JPA maps Enums via their ordinal. Therefore the database will only contain the ordinals (0, 1, 2, etc.) . So , inside the database you can not easily understand their meaning. Using @Enumerated with EnumType.STRING allows to map the enum values to their name (Enum.name()). Both approaches are fragile when it comes to code changes and refactoring (if you change the order of the enum values or rename them) after the application is deployed to production. If you want to avoid this and get a robust mapping you can define a dedicated string in each enum value for database representation that you keep untouched. Then you treat the enum just like any other custom datatype.

+
+
+
+

BLOB

+
+

If binary or character large objects (BLOB/CLOB) should be used to store the value of an attribute, e.g. to store an icon, the @Lob annotation should be used as shown in the following listing:

+
+
+
+
@Lob
+public byte[] getIcon() {
+  return this.icon;
+}
+
+
+
+ + + + + +
+ + +Using a byte array will cause problems if BLOBs get large because the entire BLOB is loaded into the RAM of the server and has to be processed by the garbage collector. For larger BLOBs the type Blob and streaming should be used. +
+
+
+
+
public Blob getAttachment() {
+  return this.attachment;
+}
+
+
+
+
+

Date and Time

+
+

To store date and time related values, the temporal annotation can be used as shown in the listing below:

+
+
+
+
@Temporal(TemporalType.TIMESTAMP)
+public java.util.Date getStart() {
+  return start;
+}
+
+
+
+

Until Java8 the java data type java.util.Date (or Jodatime) has to be used. +TemporalType defines the granularity. In this case, a precision of nanoseconds is used. If this granularity is not wanted, TemporalType.DATE can be used instead, which only has a granularity of milliseconds. +Mixing these two granularities can cause problems when comparing one value to another. This is why we only use TemporalType.TIMESTAMP.

+
+
+
+

QueryDSL and Custom Types

+
+

Using the Aliases API of QueryDSL might result in an InvalidDataAccessApiUsageException when using custom datatypes in entity properties. This can be circumvented in two steps:

+
+
+
    +
  1. +

    Ensure you have the following maven dependencies in your project (core module) to support custom types via the Aliases API:

    +
    +
    +
    <dependency>
    +  <groupId>org.ow2.asm</groupId>
    +  <artifactId>asm</artifactId>
    +</dependency>
    +<dependency>
    +  <groupId>cglib</groupId>
    +  <artifactId>cglib</artifactId>
    +</dependency>
    +
    +
    +
  2. +
  3. +

    Make sure, that all your custom types used in entities provide a non-argument constructor with at least visibility level protected.

    +
  4. +
+
+
+
+
+

Primary Keys

+
+

We only use simple Long values as primary keys (IDs). +By default it is auto generated (@GeneratedValue(strategy=GenerationType.AUTO)). +This is already provided by the class com.devonfw.<projectName>.general.dataaccess.api.AbstractPersistenceEntity within the classic project structure respectively com.devonfw.<projectName>.general.domain.model.AbstractPersistenceEntity within the modern project structure, that you can extend.

+
+
+

The reason for this recommendation is simply because using a number (Long) is the most efficient representation for the database. +You may also consider to use other types like String or UUID or even composite custom datatypes and this is technically possible. +However, please consider that the primary key is used to lookup the row from the database table, also in foreign keys and thus in JOINs. +Please note that your project sooner or later may reach some complexity where performance really matters. +Working on big data and performing JOINs when using types such as String (VARCHAR[2]) as primary and foreign keys will kill your performance. +You are still free to make a different choice and devonfw only gives recommendations but does not want to dictate you what to do. +However, you have been warned about the concequences. +If you are well aware of what you are doing, you can still use differnet types of primary keys. +In such case, create your own entity not extending AbstractPersistenceEntity or create your own copy of AbstractPersistenceEntity with a different name and a different type of primary key.

+
+
+

In case you have business oriented keys (often as String), you can define an additional property for it and declare it as unique (@Column(unique=true)). +Be sure to include "AUTO_INCREMENT" in your sql table field ID to be able to persist data (or similar for other databases).

+
+
+
+
+
+

Relationships

+
+
+

n:1 and 1:1 Relationships

+
+

Entities often do not exist independently but are in some relation to each other. For example, for every period of time one of the StaffMember’s of the restaurant example has worked, which is represented by the class WorkingTime, there is a relationship to this StaffMember.

+
+
+

The following listing shows how this can be modeled using JPA:

+
+
+
+
...
+
+@Entity
+public class WorkingTimeEntity {
+   ...
+
+   private StaffMemberEntity staffMember;
+
+   @ManyToOne
+   @JoinColumn(name="STAFFMEMBER")
+   public StaffMemberEntity getStaffMember() {
+      return this.staffMember;
+   }
+
+   public void setStaffMember(StaffMemberEntity staffMember) {
+      this.staffMember = staffMember;
+   }
+}
+
+
+
+

To represent the relationship, an attribute of the type of the corresponding entity class that is referenced has been introduced. The relationship is a n:1 relationship, because every WorkingTime belongs to exactly one StaffMember, but a StaffMember usually worked more often than once.
+This is why the @ManyToOne annotation is used here. For 1:1 relationships the @OneToOne annotation can be used which works basically the same way. To be able to save information about the relation in the database, an additional column in the corresponding table of WorkingTime is needed which contains the primary key of the referenced StaffMember. With the name element of the @JoinColumn annotation it is possible to specify the name of this column.

+
+
+
+

1:n and n:m Relationships

+
+

The relationship of the example listed above is currently an unidirectional one, as there is a getter method for retrieving the StaffMember from the WorkingTime object, but not vice versa.

+
+
+

To make it a bidirectional one, the following code has to be added to StaffMember:

+
+
+
+
  private Set<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy="staffMember")
+  public Set<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(Set<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

To make the relationship bidirectional, the tables in the database do not have to be changed. Instead the column that corresponds to the attribute staffMember in class WorkingTime is used, which is specified by the mappedBy element of the @OneToMany annotation. Hibernate will search for corresponding WorkingTime objects automatically when a StaffMember is loaded.

+
+
+

The problem with bidirectional relationships is that if a WorkingTime object is added to the set or list workingTimes in StaffMember, this does not have any effect in the database unless +the staffMember attribute of that WorkingTime object is set. That is why the devon4j advices not to use bidirectional relationships but to use queries instead. How to do this is shown here. If a bidirectional relationship should be used nevertheless, appropriate add and remove methods must be used.

+
+
+

For 1:n and n:m relations, the devon4j demands that (unordered) Sets and no other collection types are used, as shown in the listing above. The only exception is whenever an ordering is really needed, (sorted) lists can be used.
+For example, if WorkingTime objects should be sorted by their start time, this could be done like this:

+
+
+
+
  private List<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy = "staffMember")
+  @OrderBy("startTime asc")
+  public List<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(List<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

The value of the @OrderBy annotation consists of an attribute name of the class followed by asc (ascending) or desc (descending).

+
+
+

To store information about a n:m relationship, a separate table has to be used, as one column cannot store several values (at least if the database schema is in first normal form).
+For example if one wanted to extend the example application so that all ingredients of one FoodDrink can be saved and to model the ingredients themselves as entities (e.g. to store additional information about them), this could be modeled as follows (extract of class FoodDrink):

+
+
+
+
  private Set<IngredientEntity> ingredients;
+
+  @ManyToMany()
+  @JoinTable
+  public Set<IngredientEntity> getIngredients() {
+    return this.ingredients;
+  }
+
+  public void setOrders(Set<IngredientEntity> ingredients) {
+    this.ingredients = ingredients;
+  }
+
+
+
+

Information about the relation is stored in a table called BILL_ORDER that has to have two columns, one for referencing the Bill, the other one for referencing the Order. Note that the @JoinTable annotation is not needed in this case because a separate table is the default solution here (same for n:m relations) unless there is a mappedBy element specified.

+
+
+

For 1:n relationships this solution has the disadvantage that more joins (in the database system) are needed to get a Bill with all the Orders it refers to. This might have a negative impact on performance so that the solution to store a reference to the Bill row/entity in the Order’s table is probably the better solution in most cases.

+
+
+

Note that bidirectional n:m relationships are not allowed for applications based on devon4j. Instead a third entity has to be introduced, which "represents" the relationship (it has two n:1 relationships).

+
+
+
+

Eager vs. Lazy Loading

+
+

Using JPA it is possible to use either lazy or eager loading. Eager loading means that for entities retrieved from the database, other entities that are referenced by these entities are also retrieved, whereas lazy loading means that this is only done when they are actually needed, i.e. when the corresponding getter method is invoked.

+
+
+

Application based on devon4j are strongly advised to always use lazy loading. The JPA defaults are:

+
+
+
    +
  • +

    @OneToMany: LAZY

    +
  • +
  • +

    @ManyToMany: LAZY

    +
  • +
  • +

    @ManyToOne: EAGER

    +
  • +
  • +

    @OneToOne: EAGER

    +
  • +
+
+
+

So at least for @ManyToOne and @OneToOne you always need to override the default by providing fetch = FetchType.LAZY.

+
+
+ + + + + +
+ + +Please read the performance guide. +
+
+
+
+

Cascading Relationships

+
+

For relations it is also possible to define whether operations are cascaded (like a recursion) to the related entity. +By default, nothing is done in these situations. This can be changed by using the cascade property of the annotation that specifies the relation type (@OneToOne, @ManyToOne, @OneToMany, @ManyToOne). This property accepts a CascadeType that offers the following options:

+
+
+
    +
  • +

    PERSIST (for EntityManager.persist, relevant to inserted transient entities into DB)

    +
  • +
  • +

    REMOVE (for EntityManager.remove to delete entity from DB)

    +
  • +
  • +

    MERGE (for EntityManager.merge)

    +
  • +
  • +

    REFRESH (for EntityManager.refresh)

    +
  • +
  • +

    DETACH (for EntityManager.detach)

    +
  • +
  • +

    ALL (cascade all of the above operations)

    +
  • +
+
+
+

See here for more information.

+
+
+
+

Typesafe Foreign Keys using IdRef

+
+

For simple usage you can use Long for all your foreign keys. +However, as an optional pattern for advanced and type-safe usage, we offer IdRef.

+
+
+
+
+
+

Embeddable

+
+
+

An embeddable Object is a way to group properties of an entity into a separate Java (child) object. Unlike with implement relationships the embeddable is not a separate entity and its properties are stored (embedded) in the same table together with the entity. This is helpful to structure and reuse groups of properties.

+
+
+

The following example shows an Address implemented as an embeddable class:

+
+
+
+
@Embeddable
+public class AddressEmbeddable {
+
+  private String street;
+  private String number;
+  private Integer zipCode;
+  private String city;
+
+  @Column(name="STREETNUMBER")
+  public String getNumber() {
+    return number;
+  }
+
+  public void setNumber(String number) {
+    this.number = number;
+  }
+
+  ...  // other getter and setter methods, equals, hashCode
+}
+
+
+
+

As you can see an embeddable is similar to an entity class, but with an @Embeddable annotation instead of the @Entity annotation and without primary key or modification counter. +An Embeddable does not exist on its own but in the context of an entity. +As a simplification Embeddables do not require a separate interface and ETO as the bean-mapper will create a copy automatically when converting the owning entity to an ETO. +However, in this case the embeddable becomes part of your api module that therefore needs a dependency on the JPA.

+
+
+

In addition to that the methods equals(Object) and hashCode() need to be implemented as this is required by Hibernate (it is not required for entities because they can be unambiguously identified by their primary key). For some hints on how to implement the hashCode() method please have a look here.

+
+
+

Using this AddressEmbeddable inside an entity class can be done like this:

+
+
+
+
  private AddressEmbeddable address;
+
+  @Embedded
+  public AddressEmbeddable getAddress() {
+    return this.address;
+  }
+
+  public void setAddress(AddressEmbeddable address) {
+    this.address = address;
+  }
+}
+
+
+
+

The @Embedded annotation needs to be used for embedded attributes. Note that if in all columns of the embeddable (here Address) are null, then the embeddable object itself is also null inside the entity. This has to be considered to avoid NullPointerException’s. Further this causes some issues with primitive types in embeddable classes that can be avoided by only using object types instead.

+
+
+
+
+

Inheritance

+
+
+

Just like normal java classes, entity classes can inherit from others. The only difference is that you need to specify how to map a class hierarchy to database tables. Generic abstract super-classes for entities can simply be annotated with @MappedSuperclass.

+
+
+

For all other cases the JPA offers the annotation @Inheritance with the property strategy talking an InheritanceType that has the following options:

+
+
+
+
+
    +
  • +

    SINGLE_TABLE: This strategy uses a single table that contains all columns needed to store all entity-types of the entire inheritance hierarchy. If a column is not needed for an entity because of its type, there is a null value in this column. An additional column is introduced, which denotes the type of the entity (called dtype).

    +
  • +
  • +

    TABLE_PER_CLASS: For each concrete entity class there is a table in the database that can store such an entity with all its attributes. An entity is only saved in the table corresponding to its most concrete type. To get all entities of a super type, joins are needed.

    +
  • +
  • +

    JOINED: In this case there is a table for every entity class including abstract classes, which contains only the columns for the persistent properties of that particular class. Additionally there is a primary key column in every table. To get an entity of a class that is a subclass of another one, joins are needed.

    +
  • +
+
+
+
+
+

Each of the three approaches has its advantages and drawbacks, which are discussed in detail here. In most cases, the first one should be used, because it is usually the fastest way to do the mapping, as no joins are needed when retrieving, searching or persisting entities. Moreover it is rather simple and easy to understand. +One major disadvantage is that the first approach could lead to a table with a lot of null values, which might have a negative impact on the database size.

+
+
+

The inheritance strategy has to be annotated to the top-most entity of the class hierarchy (where @MappedSuperclass classes are not considered) like in the following example:

+
+
+
+
@Entity
+@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+public abstract class MyParentEntity extends ApplicationPersistenceEntity implements MyParent {
+  ...
+}
+
+@Entity
+public class MyChildEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+@Entity
+public class MyOtherEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+
+
+

As a best practice we advise you to avoid entity hierarchies at all where possible and otherwise to keep the hierarchy as small as possible. In order to just ensure reuse or establish a common API you can consider a shared interface, a @MappedSuperclass or an @Embeddable instead of an entity hierarchy.

+
+
+
+
+

Repositories and DAOs

+
+
+

For each entity a code unit is created that groups all database operations for that entity. We recommend to use spring-data repositories for that as it is most efficient for developers. As an alternative there is still the classic approach using DAOs.

+
+
+

Concurrency Control

+
+

The concurrency control defines the way concurrent access to the same data of a database is handled. When several users (or threads of application servers) concurrently access a database, anomalies may happen, e.g. a transaction is able to see changes from another transaction although that one did, not yet commit these changes. Most of these anomalies are automatically prevented by the database system, depending on the isolation level (property hibernate.connection.isolation in the jpa.xml, see here, or quarkus.datasource.jdbc.transaction-isolation-level in the application.properties).

+
+
+

Another anomaly is when two stakeholders concurrently access a record, do some changes and write them back to the database. The JPA addresses this with different locking strategies (see here).

+
+
+

As a best practice we are using optimistic locking for regular end-user services (OLTP) and pessimistic locking for batches.

+
+
+
+

Optimistic Locking

+
+

The class com.devonfw.module.jpa.persistence.api.AbstractPersistenceEntity already provides optimistic locking via a modificationCounter with the @Version annotation. Therefore JPA takes care of optimistic locking for you. When entities are transferred to clients, modified and sent back for update you need to ensure the modificationCounter is part of the game. If you follow our guides about transfer-objects and services this will also work out of the box. +You only have to care about two things:

+
+
+
    +
  • +

    How to deal with optimistic locking in relationships?
    +Assume an entity A contains a collection of B entities. Should there be a locking conflict if one user modifies an instance of A while another user in parallel modifies an instance of B that is contained in the other instance? To address this , take a look at FeatureForceIncrementModificationCounter.

    +
  • +
  • +

    What should happen in the UI if an OptimisticLockException occurred?
    +According to KISS our recommendation is that the user gets an error displayed that tells him to do his change again on the recent data. Try to design your system and the work processing in a way to keep such conflicts rare and you are fine.

    +
  • +
+
+
+
+

Pessimistic Locking

+
+

For back-end services and especially for batches optimistic locking is not suitable. A human user shall not cause a large batch process to fail because he was editing the same entity. Therefore such use-cases use pessimistic locking what gives them a kind of priority over the human users. +In your DAO implementation you can provide methods that do pessimistic locking via EntityManager operations that take a LockModeType. Here is a simple example:

+
+
+
+
  getEntityManager().lock(entity, LockModeType.READ);
+
+
+
+

When using the lock(Object, LockModeType) method with LockModeType.READ, Hibernate will issue a SELECT …​ FOR UPDATE. This means that no one else can update the entity (see here for more information on the statement). If LockModeType.WRITE is specified, Hibernate issues a SELECT …​ FOR UPDATE NOWAIT instead, which has has the same meaning as the statement above, but if there is already a lock, the program will not wait for this lock to be released. Instead, an exception is raised.
+Use one of the types if you want to modify the entity later on, for read only access no lock is required.

+
+
+

As you might have noticed, the behavior of Hibernate deviates from what one would expect by looking at the LockModeType (especially LockModeType.READ should not cause a SELECT …​ FOR UPDATE to be issued). The framework actually deviates from what is specified in the JPA for unknown reasons.

+
+
+
+
+
+

Database Auditing

+
+ +
+
+
+

Testing Data-Access

+
+
+

For testing of Entities and Repositories or DAOs see testing guide.

+
+
+
+
+

Principles

+
+
+

We strongly recommend these principles:

+
+
+
    +
  • +

    Use the JPA where ever possible and use vendor (hibernate) specific features only for situations when JPA does not provide a solution. In the latter case consider first if you really need the feature.

    +
  • +
  • +

    Create your entities as simple POJOs and use JPA to annotate the getters in order to define the mapping.

    +
  • +
  • +

    Keep your entities simple and avoid putting advanced logic into entity methods.

    +
  • +
+
+
+
+
+

Database Configuration

+
+
+

For details on the configuration of the database connection and database logging of the individual framework, please refer to the respective configuration guide.

+
+
+

For spring see here.

+
+
+

For quarkus see here.

+
+
+

Database Migration

+ +
+
+

Pooling

+
+

You typically want to pool JDBC connections to boost performance by recycling previous connections. There are many libraries available to do connection pooling. We recommend to use HikariCP. For Oracle RDBMS see here.

+
+
+
+
+
+

Security

+
+
+

SQL-Injection

+
+

A common security threat is SQL-injection. Never build queries with string concatenation or your code might be vulnerable as in the following example:

+
+
+
+
  String query = "Select op from OrderPosition op where op.comment = " + userInput;
+  return getEntityManager().createQuery(query).getResultList();
+
+
+
+

Via the parameter userInput an attacker can inject SQL (JPQL) and execute arbitrary statements in the database causing extreme damage.

+
+
+

In order to prevent such injections you have to strictly follow our rules for queries:

+
+
+ +
+
+
+

Limited Permissions for Application

+
+

We suggest that you operate your application with a database user that has limited permissions so he can not modify the SQL schema (e.g. drop tables). For initializing the schema (DDL) or to do schema migrations use a separate user that is not used by the application itself.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-json.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-json.html new file mode 100644 index 00000000..15c6e3d8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-json.html @@ -0,0 +1,430 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==JSON

+
+
+

JSON (JavaScript Object Notation) is a popular format to represent and exchange data especially for modern web-clients. For mapping Java objects to JSON and vice-versa there is no official standard API. We use the established and powerful open-source solution Jackson. +Due to problems with the wiki of fasterxml you should try this alternative link: Jackson/AltLink.

+
+
+

Configure JSON Mapping

+
+
+

In order to avoid polluting business objects with proprietary Jackson annotations (e.g. @JsonTypeInfo, @JsonSubTypes, @JsonProperty) we propose to create a separate configuration class. Every devonfw application (sample or any app created from our app-template) therefore has a class called ApplicationObjectMapperFactory that extends ObjectMapperFactory from the devon4j-rest module. It looks like this:

+
+
+
+
@Named("ApplicationObjectMapperFactory")
+public class ApplicationObjectMapperFactory extends ObjectMapperFactory {
+
+  public RestaurantObjectMapperFactory() {
+    super();
+    // JSON configuration code goes here
+  }
+}
+
+
+
+
+
+

JSON and Inheritance

+
+
+

If you are using inheritance for your objects mapped to JSON then polymorphism can not be supported out-of-the box. So in general avoid polymorphic objects in JSON mapping. However, this is not always possible. +Have a look at the following example from our sample application:

+
+
+
+inheritance class diagram +
+
Figure 1. Transfer-Objects using Inheritance
+
+
+

Now assume you have a REST service operation as Java method that takes a ProductEto as argument. As this is an abstract class the server needs to know the actual sub-class to instantiate. +We typically do not want to specify the classname in the JSON as this should be an implementation detail and not part of the public JSON format (e.g. in case of a service interface). Therefore we use a symbolic name for each polymorphic subtype that is provided as virtual attribute @type within the JSON data of the object:

+
+
+
+
{ "@type": "Drink", ... }
+
+
+
+

Therefore you add configuration code to the constructor of ApplicationObjectMapperFactory. Here you can see an example from the sample application:

+
+
+
+
setBaseClasses(ProductEto.class);
+addSubtypes(new NamedType(MealEto.class, "Meal"), new NamedType(DrinkEto.class, "Drink"),
+  new NamedType(SideDishEto.class, "SideDish"));
+
+
+
+

We use setBaseClasses to register all top-level classes of polymorphic objects. Further we declare all concrete polymorphic sub-classes together with their symbolic name for the JSON format via addSubtypes.

+
+
+
+
+

Custom Mapping

+
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create objects dedicated for the JSON mapping you can easily avoid such situations. When this is not suitable follow these instructions to define the mapping:

+
+
+
    +
  1. +

    As an example, the use of JSR354 (javax.money) is appreciated in order to process monetary amounts properly. However, without custom mapping, the default mapping of Jackson will produce the following JSON for a MonetaryAmount:

    +
    +
    +
    "currency": {"defaultFractionDigits":2, "numericCode":978, "currencyCode":"EUR"},
    +"monetaryContext": {...},
    +"number":6.99,
    +"factory": {...}
    +
    +
    +
    +

    As clearly can be seen, the JSON contains too much information and reveals implementation secrets that do not belong here. Instead the JSON output expected and desired would be:

    +
    +
    +
    +
    "currency":"EUR","amount":"6.99"
    +
    +
    +
    +

    Even worse, when we send the JSON data to the server, Jackson will see that MonetaryAmount is an interface and does not know how to instantiate it so the request will fail. +Therefore we need a customized Serializer.

    +
    +
  2. +
  3. +

    We implement MonetaryAmountJsonSerializer to define how a MonetaryAmount is serialized to JSON:

    +
    +
    +
    public final class MonetaryAmountJsonSerializer extends JsonSerializer<MonetaryAmount> {
    +
    +  public static final String NUMBER = "amount";
    +  public static final String CURRENCY = "currency";
    +
    +  public void serialize(MonetaryAmount value, JsonGenerator jgen, SerializerProvider provider) throws ... {
    +    if (value != null) {
    +      jgen.writeStartObject();
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.CURRENCY);
    +      jgen.writeString(value.getCurrency().getCurrencyCode());
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.NUMBER);
    +      jgen.writeString(value.getNumber().toString());
    +      jgen.writeEndObject();
    +    }
    +  }
    +
    +
    +
    +

    For composite datatypes it is important to wrap the info as an object (writeStartObject() and writeEndObject()). MonetaryAmount provides the information we need by the getCurrency() and getNumber(). So that we can easily write them into the JSON data.

    +
    +
  4. +
  5. +

    Next, we implement MonetaryAmountJsonDeserializer to define how a MonetaryAmount is deserialized back as Java object from JSON:

    +
    +
    +
    public final class MonetaryAmountJsonDeserializer extends AbstractJsonDeserializer<MonetaryAmount> {
    +  protected MonetaryAmount deserializeNode(JsonNode node) {
    +    BigDecimal number = getRequiredValue(node, MonetaryAmountJsonSerializer.NUMBER, BigDecimal.class);
    +    String currencyCode = getRequiredValue(node, MonetaryAmountJsonSerializer.CURRENCY, String.class);
    +    MonetaryAmount monetaryAmount =
    +        MonetaryAmounts.getAmountFactory().setNumber(number).setCurrency(currencyCode).create();
    +    return monetaryAmount;
    +  }
    +}
    +
    +
    +
    +

    For composite datatypes we extend from AbstractJsonDeserializer as this makes our task easier. So we already get a JsonNode with the parsed payload of our datatype. Based on this API it is easy to retrieve individual fields from the payload without taking care of their order, etc. +AbstractJsonDeserializer also provides methods such as getRequiredValue to read required fields and get them converted to the desired basis datatype. So we can easily read the amount and currency and construct an instance of MonetaryAmount via the official factory API.

    +
    +
  6. +
  7. +

    Finally we need to register our custom (de)serializers with the following configuration code in the constructor of ApplicationObjectMapperFactory:+

    +
  8. +
+
+
+
+
  SimpleModule module = getExtensionModule();
+  module.addDeserializer(MonetaryAmount.class, new MonetaryAmountJsonDeserializer());
+  module.addSerializer(MonetaryAmount.class, new MonetaryAmountJsonSerializer());
+
+
+
+

Now we can read and write MonetaryAmount from and to JSON as expected.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jwt.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jwt.html new file mode 100644 index 00000000..047e72ae --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-jwt.html @@ -0,0 +1,284 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==JWT

+
+
+

JWT (JSON Web Token) is an open standard (see RFC 7519) for creating JSON based access tokens that assert some number of claims. +With an IT landscape divided into multiple smaller apps you want to avoid coupling all those apps or services tightly with your IAM (Identity & Access Management). +Instead your apps simply expects a JWT as bearer-token in the Authorization HTTP header field. +All it needs to do for authentication is validating this JWT. +The actual authentication is done centrally by an access system (IAM) that authors those JWTs. +Therefore we recommend to use strong asymmetric cryptography to sign the JWT when it is authored. +Create a keypair per environment and keep the private key as a secret only known to the access system authorizing the JWTs. +Your apps only need to know the public key in order to validate the JWT. +Any request without a JWT or with an invalid JWT will be rejected (with status code 401).

+
+
+

When using spring check the JWT Spring-Starter. +For quarkus follow Using JWT RBAC.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-kafka.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-kafka.html new file mode 100644 index 00000000..2a3f0210 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-kafka.html @@ -0,0 +1,291 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+ + + + + +
+ + +devon4j-kafka has been abandoned. Its main feature was the implementation of a retry pattern using multiple topics. This implementation has become an integral part of Spring Kafka. We recommend to use Spring Kafkas own implemenation for retries. +
+
+
+

==Messaging Services

+
+
+

Messaging Services provide an asynchronous communication mechanism between applications. Technically this is implemented using Apache Kafka .

+
+
+

For spring, devonfw uses Spring-Kafka as kafka framework. +For more details, check the devon4j-kafka.

+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-liquibase.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-liquibase.html new file mode 100644 index 00000000..f76e5f3f --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-liquibase.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-log-monitoring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-log-monitoring.html new file mode 100644 index 00000000..f5d79a7a --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-log-monitoring.html @@ -0,0 +1,350 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Log-Monitoring

+
+
+

Log-monitoring is an aspect of monitoring with a strict focus on logging. +With trends towards IT landscapes with many but much smaller apps the classicial approach to write log-files to the disc and let operators read those via SSH became entirely obsolete. +Nowadays we have up to hundreds or even thousands of apps that themselves are clustered into multiple nodes. +Therefore you should establish a centralized log monitoring system in the environment and let all your nodes log directly into that system. +This approach gives the following benefits:

+
+
+
    +
  • +

    all log information available in one place

    +
  • +
  • +

    full-text search accross all logfiles

    +
  • +
  • +

    ability to automatically trigger alerts from specific log patterns

    +
  • +
  • +

    ability to do data-mining on logs and visualize in dashboards

    +
  • +
+
+
+

Options for log-monitoring

+
+
+

Typical products for such a log monitoring system are:

+
+
+ +
+
+

In devonfw we are not biased for any of these products. Therefore, feel free to make your choice according to the requirements of your project.

+
+
+

For Quarkus applications, you can get an insight into the topic by reading the guide about centralized log management.

+
+
+
+
+

API for log-monitoring

+
+
+

The "API" for logging to a log-monitoring system for your app is pretty simple:

+
+
+
    +
  • +

    Write your logs to standard out.

    +
  • +
  • +

    Use JSON logging as format.

    +
  • +
+
+
+

Then the container infrastructure can automatically collect your logs from standard out and directly feed those into the log monitoring system. +As a result, your app does not need to know anything about your log monitoring system and logging becomes most simple. +Further, if you do not write log-files anymore, you might not need to write any other files and therefore may not even need write permissions on the filesystem of your container. +In such case an attacker who may find a vulnerability in your app will have less attack surface in case he can not write any file.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-logging.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-logging.html new file mode 100644 index 00000000..20b9aa16 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-logging.html @@ -0,0 +1,743 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Logging

+
+
+

We recommend to use SLF4J as API for logging, that has become a de facto standard in Java as it has a much better design than java.util.logging offered by the JDK. +There are serveral implementations for SLF4J. For Spring applications our recommended implementation is Logback. Quarkus uses JBoss Logging which provides a JBoss Log Manager implementation for SLF4J. For more information on logging in Quarkus, see the Quarkus logging guide.

+
+
+

Logging Dependencies

+
+
+

To use Logback in your Spring application, you need to include the following dependencies:

+
+
+
+
<!-- SLF4J as logging API -->
+<dependency>
+  <groupId>org.slf4j</groupId>
+  <artifactId>slf4j-api</artifactId>
+</dependency>
+<!-- Logback as logging implementation  -->
+<dependency>
+  <groupId>ch.qos.logback</groupId>
+  <artifactId>logback-classic</artifactId>
+</dependency>
+<!-- JSON logging for cloud-native log monitoring -->
+<dependency>
+  <groupId>net.logstash.logback</groupId>
+  <artifactId>logstash-logback-encoder</artifactId>
+</dependency>
+
+
+
+

In devon4j these dependencies are provided by the devon4j-logging module.

+
+
+

In Quarkus, SLF4J and the slf4j-jboss-logmanager are directly included in the Quarkus core runtime and can be used out of the box.

+
+
+
+
+

Logger Access

+
+
+

The general pattern for accessing loggers from your code is a static logger instance per class using the following pattern:

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+  private static final Logger LOG = LoggerFactory.getLogger(MyClass.class);
+  ...
+}
+
+
+
+

For detailed documentation how to use the logger API check the SLF4j manual.

+
+
+ + + + + +
+ + +In case you are using devonfw-ide and Eclipse you can just type LOG and hit [ctrl][space] to insert the code pattern including the imports into your class. +
+
+
+

Lombok

+
+

In case you are using Lombok, you can simply use the @Slf4j annotation in your class. This causes Lombok to generate the logger instance for you.

+
+
+
+
+
+

Log-Levels

+
+
+

We use a common understanding of the log-levels as illustrated by the following table. +This helps for better maintenance and operation of the systems.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Log-levels
Log-levelDescriptionImpactActive Environments

FATAL

Only used for fatal errors that prevent the application to work at all (e.g. startup fails or shutdown/restart required)

Operator has to react immediately

all

ERROR

An abnormal error indicating that the processing failed due to technical problems.

Operator should check for known issue and otherwise inform development

all

WARNING

A situation where something worked not as expected. E.g. a business exception or user validation failure occurred.

No direct reaction required. Used for problem analysis.

all

INFO

Important information such as context, duration, success/failure of request or process

No direct reaction required. Used for analysis.

all

DEBUG

Development information that provides additional context for debugging problems.

No direct reaction required. Used for analysis.

development and testing

TRACE

Like DEBUG but exhaustive information and for code that is run very frequently. Will typically cause large log-files.

No direct reaction required. Used for problem analysis.

none (turned off by default)

+
+

Exceptions (with their stack trace) should only be logged on FATAL or ERROR level. For business exceptions typically a WARNING including the message of the exception is sufficient.

+
+
+

Configuration of Logback

+
+

The configuration of logback happens via the logback.xml file that you should place into src/main/resources of your app. +For details consult the logback configuration manual.

+
+
+ + + + + +
+ + +Logback also allows to overrule the configuration with a logback-test.xml file that you may put into src/test/resources or into a test-dependency. +
+
+
+
+

Configuration in Quarkus

+
+

The are several options you can set in the application.properties file to configure the behaviour of the logger in Quarkus. For a detailed overview, see the corresponding part of the Quarkus guide.

+
+
+
+
+
+

JSON-logging

+
+
+

For easy integration with log-monitoring, we recommend that your app logs to standard out in JSON following JSON Lines.

+
+
+

In Spring applications, this can be achieved via logstash-logback-encoder (see dependencies). In Quarkus, it can be easily achieved using the quarkus-logging-json extension (see here for more details).

+
+
+

This will produce log-lines with the following format (example formatted for readability):

+
+
+
+
{
+  "timestamp":"2000-12-31T23:59:59.999+00:00",
+  "@version":"1",
+  "message":"Processing 4 order(s) for shipment",
+  "logger_name":"com.myapp.order.logic.UcManageOrder",
+  "thread_name":"http-nio-8081-exec-6",
+  "level":"INFO",
+  "level_value":20000,
+  "appname":"myapp",
+}
+
+
+
+

Adding custom values to JSON log with Logstash

+
+

The JSON encoder even supports logging custom properties for your log-monitoring. +The trick is to use the class net.logstash.logback.argument.StructuredArguments for adding the arguments to you log message, e.g.

+
+
+
+
import static net.logstash.logback.argument.StructuredArguments.v;
+
+...
+    LOG.info("Request with {} and {} took {} ms.", v("url", url), v("status", statusCode), v("duration", millis));
+...
+
+
+
+

This will produce the a JSON log-line with the following properties:

+
+
+
+
...
+  "message":"Request with url=https://api/service/v1/ordermanagement/order and status=200 took duration=251 ms",
+  "url":"https://api/service/v1/ordermanagement/order",
+  "status":"200",
+  "duration":"251",
+...
+
+
+
+

As you can quickly see besides the human readable message you also have the structured properties url, status and duration that can be extremly valuable to configure dashboards in your log-monitoring that visualize success/failure ratio as well as performance of your requests.

+
+
+
+
+
+

Classic log-files

+
+
+ + + + + +
+ + +In devon4j, we strongly recommend using JSON logging instead of classic log files. The following section refers only to devon4j Spring applications that use Logback. +
+
+
+

Even though we do not recommend anymore to write classical log-files to the local disc, here you can still find our approach for it.

+
+
+

Maven-Integration

+
+

In the pom.xml of your application add this dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java</groupId>
+  <artifactId>devon4j-logging</artifactId>
+</dependency>
+
+
+
+

The above dependency already adds transitive dependencies to SLF4J and logback. +Also it comes with configration snipplets that can be included from your logback.xml file (see configuration).

+
+
+

The logback.xml to write regular log-files can look as following:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+  <property resource="com/devonfw/logging/logback/application-logging.properties" />
+  <property name="appname" value="MyApp"/>
+  <property name="logPath" value="../logs"/>
+  <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />
+  <include resource="com/devonfw/logging/logback/appender-console.xml" />
+
+  <root level="DEBUG">
+    <appender-ref ref="ERROR_APPENDER"/>
+    <appender-ref ref="INFO_APPENDER"/>
+    <appender-ref ref="DEBUG_APPENDER"/>
+    <appender-ref ref="CONSOLE_APPENDER"/>
+  </root>
+
+  <logger name="org.springframework" level="INFO"/>
+</configuration>
+
+
+
+

The provided logback.xml is configured to use variables defined on the config/application.properties file. +On our example, the log files path point to ../logs/ in order to log to tomcat log directory when starting tomcat on the bin folder. +Change it according to your custom needs.

+
+
+
config/application.properties
+
+
log.dir=../logs/
+
+
+
+
+

Log Files

+
+

The classical approach uses the following log files:

+
+
+
    +
  • +

    Error Log: Includes log entries to detect errors.

    +
  • +
  • +

    Info Log: Used to analyze system status and to detect bottlenecks.

    +
  • +
  • +

    Debug Log: Detailed information for error detection.

    +
  • +
+
+
+

The log file name pattern is as follows:

+
+
+
+
«LOGTYPE»_log_«HOST»_«APPLICATION»_«TIMESTAMP».log
+
+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of Logfilename
ElementValueDescription

«LOGTYPE»

info, error, debug

Type of log file

«HOST»

e.g. mywebserver01

Name of server, where logs are generated

«APPLICATION»

e.g. myapp

Name of application, which causes logs

«TIMESTAMP»

YYYY-MM-DD_HH00

date of log file

+
+

Example: +error_log_mywebserver01_myapp_2013-09-16_0900.log

+
+
+

Error log from mywebserver01 at application myapp at 16th September 2013 9pm.

+
+
+
+

Output format

+
+

We use the following output format for all log entries to ensure that searching and filtering of log entries work consistent for all logfiles:

+
+
+
+
[D: «timestamp»] [P: «priority»] [C: «NDC»][T: «thread»][L: «logger»]-[M: «message»]
+
+
+
+
    +
  • +

    D: Date (Timestamp in ISO8601 format e.g. 2013-09-05 16:40:36,464)

    +
  • +
  • +

    P: Priority (the log level)

    +
  • +
  • +

    C: Correlation ID (ID to identify users across multiple systems, needed when application is distributed)

    +
  • +
  • +

    T: Thread (Name of thread)

    +
  • +
  • +

    L: Logger name (use class name)

    +
  • +
  • +

    M: Message (log message)

    +
  • +
+
+
+

Example:

+
+
+
+
[D: 2013-09-05 16:40:36,464] [P: DEBUG] [C: 12345] [T: main] [L: my.package.MyClass]-[M: My message...]
+
+
+
+ + + + + +
+ + +When using devon4j-logging, this format is used by default. To achieve this format in Quarkus, set quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n in your properties. +
+
+
+
+

Correlation ID

+
+

In order to correlate separate HTTP requests to services belonging to the same user / session, we provide a servlet filter called DiagnosticContextFilter. +This filter takes a provided correlation ID from the HTTP header X-Correlation-Id. +If none was found, it will generate a new correlation id as UUID. +This correlation ID is added as MDC to the logger. +Therefore, it will then be included to any log message of the current request (thread). +Further concepts such as service invocations will pass this correlation ID to subsequent calls in the application landscape. Hence you can find all log messages related to an initial request simply via the correlation ID even in highly distributed systems.

+
+
+
+

Security

+
+

In order to prevent log forging attacks you can simply use the suggested JSON logging format. +Otherwise you can use com.devonfw.module.logging.common.impl.SingleLinePatternLayout as demonstrated here in order to prevent such attacks.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-logic-layer.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-logic-layer.html new file mode 100644 index 00000000..7b842c5b --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-logic-layer.html @@ -0,0 +1,338 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Logic Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. +According to our business architecture, we divide an application into components. +For each component, the logic layer defines different use-cases. Another approach is to define a component-facade, which we do not recommend for future application. Especially for quarkus application, we want to simplify things and highly suggest omitting component-facade completely and using use-cases only. +It is very important that you follow the links to understand the concept of use-case in order to properly implement your business logic.

+
+
+

Responsibility

+
+
+

The logic layer is responsible to implement the business logic according to the specified functional demands and requirements. +Therefore, it creates the actual value of the application. The logic layer is responsible for invoking business logic in external systems. +The following additional aspects are also included in its responsibility:

+
+
+ +
+
+
+
+

Security

+
+
+

The logic layer is the heart of the application. It is also responsible for authorization and hence security is important in this current case. Every method exposed in an interface needs to be annotated with an authorization check, stating what role(s) a caller must provide in order to be allowed to make the call. The authorization concept is described here.

+
+
+

Direct Object References

+
+

A security threat are Insecure Direct Object References. This simply gives you two options:

+
+
+
    +
  • +

    avoid direct object references

    +
  • +
  • +

    ensure that direct object references are secure

    +
  • +
+
+
+

Especially when using REST, direct object references via technical IDs are common sense. This implies that you have a proper authorization in place. This is especially tricky when your authorization does not only rely on the type of the data and according to static permissions but also on the data itself. Vulnerabilities for this threat can easily happen by design flaws and inadvertence. Here is an example from our sample application:

+
+
+

We have a generic use-case to manage BLOBs. In the first place, it makes sense to write a generic REST service to load and save these BLOBs. However, the permission to read or even update such BLOB depends on the business object hosting the BLOB. Therefore, such a generic REST service would open the door for this OWASP A4 vulnerability. To solve this in a secure way, you need individual services for each hosting business object to manage the linked BLOB and have to check permissions based on the parent business object. In this example the ID of the BLOB would be the direct object reference and the ID of the business object (and a BLOB property indicator) would be the indirect object reference.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-lombok.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-lombok.html new file mode 100644 index 00000000..1532a2bd --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-lombok.html @@ -0,0 +1,371 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Lombok

+
+
+

Lombok is a library that works with an annotation processor and will generate code for you to save you some time and reduce the amount of boilerplate code in your project. Lombok can generate getter and setter, equals methods, automate your logging variables for your classes, and more. Follow the list of all the features provided by Lombok to get an overview.

+
+
+

Lombok Dependency

+
+
+

To get access to the Lombok library just add the following dependency to the POM.xml.

+
+
+

The Lombok dependency:

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok</artifactId>
+	<version>1.18.20</version>
+</dependency>
+
+
+
+

To get Lombok working with your current IDE you should also install the Lombok addon. Follow the Eclipse installation guide, there are also guides for other supported IDEs.

+
+
+
+
+

Lombok with Mapstruct

+
+
+

MapStruct takes advantage of generated getters, setters, and constructors from Lombok and uses them to +generate the mapper implementations. Lombok is also an annotation processor and since version 1.18.14 both frameworks are working together. Just add the lombok-mapstruct-binding to your POM.xml.

+
+
+

The Lombok annotation processor and the lombok-mapstruct-binding

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok-mapstruct-binding</artifactId>
+	<version>0.2.0</version>
+</dependency>
+
+<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok</artifactId>
+				<version>1.18.4</version>
+			</path>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok-mapstruct-binding</artifactId>
+				<version>0.2.0</version>
+			</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

In our quarkus reference project you can get a look into the usage of both frameworks.

+
+
+
+
+

Lombok Usage

+
+
+

Lombok can be used like any other annotation processor and will be shown in the simple example below to generate getter and setter for a Product Entity.

+
+
+
+
@Getter
+@Setter
+public class Product{
+
+    private String title;
+    private String description;
+    private BigDecimal price;
+}
+
+
+
+

For advanced Lombok usage follow the Baeldung Lombok guide or just read the Lombok javadoc

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-microservice.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-microservice.html new file mode 100644 index 00000000..ee383f0c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-microservice.html @@ -0,0 +1,319 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Microservices in devonfw

+
+
+

The Microservices architecture is an approach for application development based on a series of small services grouped under a business domain. Each individual service runs autonomously and communicating with each other through their APIs. That independence between the different services allows to manage (upgrade, fix, deploy, etc.) each one without affecting the rest of the system’s services. In addition to that the microservices architecture allows to scale specific services when facing an increment of the requests, so the applications based on microservices are more flexible and stable, and can be adapted quickly to demand changes.

+
+
+

However, this new approach, developing apps based on microservices, presents some downsides.

+
+
+

Let’s see the main challenges when working with microservices:

+
+
+
    +
  • +

    Having the applications divided in different services we will need a component (router) to redirect each request to the related microservice. These redirection rules must implement filters to guarantee a proper functionality.

    +
  • +
  • +

    In order to manage correctly the routing process, the application will also need a catalog with all the microservices and its details: IPs and ports of each of the deployed instances of each microservice, the state of each instance and some other related information. This catalog is called Service Discovery.

    +
  • +
  • +

    With all the information of the Service Discovery the application will need to calculate and select between all the available instances of a microservice which is the suitable one. This will be figured out by the library Client Side Load Balancer.

    +
  • +
  • +

    The different microservices will be likely interconnected with each other, that means that in case of failure of one of the microservices involved in a process, the application must implement a mechanism to avoid the error propagation through the rest of the services and provide an alternative as a process result. To solve this, the pattern Circuit Breaker can be implemented in the calls between microservices.

    +
  • +
  • +

    As we have mentioned, the microservices will exchange calls and information with each other so our applications will need to provide a secured context to avoid not allowed operations or intrusions. In addition, since microservices must be able to operate in an isolated way, it is not recommended to maintain a session. To meet this need without using Spring sessions, a token-based authentication is used that exchanges information using the json web token (JWT) protocol.

    +
  • +
+
+
+

In addition to all of this we will find other issues related to this particular architecture that we will address fitting the requirements of each project.

+
+
+
    +
  • +

    Distributed data bases: each instance of a microservice should have only one data base.

    +
  • +
  • +

    Centralized logs: each instance of a microservice creates a log and a trace that should be centralized to allow an easier way to read all that information.

    +
  • +
  • +

    Centralized configuration: each microservice has its own configuration, so our applications should group all those configurations in only one place to ease the configuration management.

    +
  • +
  • +

    Automatized deployments: as we are managing several components (microservices, catalogs, balancers, etc.) the deployment should be automatized to avoid errors and ease this process.

    +
  • +
+
+
+

To address the above, devonfw microservices has an alternative approach Microservices based on Netflix-Tools.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-migration-oasp3-to-devon3.1.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-migration-oasp3-to-devon3.1.html new file mode 100644 index 00000000..c28f64df --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-migration-oasp3-to-devon3.1.html @@ -0,0 +1,727 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

##Migration Guide from oasp 3.0.0 to devon4j 3.1.0 migration

+
+
+
    +
  • +

    Automatic migration with devcon doesn’t work with parent pom’s; you need to migrate every single subproject on it’s own.

    +
  • +
  • +

    If your subproject’s don’t contain the old oasp4j or devon4j version number you have to copy your parent pom file into your child pom files and then use the migrate command.

    +
  • +
  • +

    use the devon4j migation command

    +
  • +
  • +

    after migration you need to update the version tag in the pom file manually. If you’re working with a parent pom you also need to update the version tag of the parent reference +in the child pom file.

    +
  • +
  • +

    In case you are using eclipse, now you have to update and rebuild all your maven projects (alt + F5)

    +
  • +
+
+
+

JsonDeserializer:

+
+
+
    +
  1. +

    Change the super class from AbstractJsonDeserializer to JsonDeserializer

    +
  2. +
  3. +

    Implement unimplemented methods or change the methode signatur from Pageable deserializeNode(JsonNode node) to Pageable deserialize(JsonParser p, DeserializationContext context)

    +
  4. +
  5. +

    To get the JsonNode you need to use the following methods with the JsonParser p: JsonNode node = p.getCodec().readTree(p);

    +
  6. +
  7. +

    To get values of properties, you need to change from getRequiredValue(node, "property", String.class) to JacksonUtil.readValue(node, "property", String.class, false);

    +
  8. +
+
+
+
+
+

QueryUtil update

+
+
+

whereString() (StringSearchConfigTo) method or similar:

+
+
+
    +
  1. +

    Check the parameter type with attetion on the source of the used class (the classes may have the same name but the one from oasp4j is obsolete)

    +
  2. +
  3. +

    Delete the old import of oasp4j (for example import io.oasp.module.beanmapping.common.api.BeanMapper) and import the new class of +devon4j (for example import com.devonfw.module.beanmapping.common.api.BeanMapper)

    +
  4. +
+
+
+
+
+

logback.xml file

+
+
+
    +
  1. +

    There maximum three chnages that needed to be done in the logback.xml file

    +
  2. +
  3. +

    Change the logging properties tag from +<property resource="io/oasp/logging/logback/application-logging.properties" /> to <property resource="com/devonfw/logging/logback/application-logging.properties" />

    +
  4. +
  5. +

    Change the appenders file all tag from +<include resource="io/oasp/logging/logback/appenders-file-all.xml" /> to <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />

    +
  6. +
  7. +

    Change the appender console tag from <include resource="io/oasp/logging/logback/appender-console.xml" /> to <include resource="com/devonfw/logging/logback/appender-console.xml" />

    +
  8. +
+
+
+
+
+

OaspPackage:

+
+
+

If you use the OaspPackage class you can replace it with the Devon4jPackage class

+
+
+
+
+

AbstractLogic

+
+
+
    +
  1. +

    You can replace all net.sf.mmm.util imports with the appropriate com.devonfw.module imports. For example "import net.sf.mmm.util.entity.api.GenericEntity" to "import com.devonfw.module.basic.common.api.entity.GenericEntity"

    +
  2. +
  3. +

    Except the TransferObject and the AbstractTransferObject. These are replaced with the denvonfw AbstractTo. +Example: "import net.sf.mmm.util.transferobject.api.AbstractTransferObject" or "import net.sf.mmm.util.transferobject.api.TransferObject" to "import com.devonfw.module.basic.common.api.to.AbstractTo".

    +
  4. +
+
+
+
+
+

BeanDozerConfig

+
+
+
    +
  1. +

    Change the @ComponentScan annotation from @ComponentScan(basePackages = { "io.oasp.module.beanmapping" }) to @ComponentScan(basePackages = { "com.devonfw.module.beanmapping" }).

    +
  2. +
  3. +

    Now you have to create a variable DOZER_MAPPING_XML with following content: static final String DOZER_MAPPING_XML = "config/app/common/dozer-mapping.xml".

    +
  4. +
  5. +

    Then you create an list beanMappings where you add the variable created in step 2.

    +
  6. +
  7. +

    To get an Mapper instance you now have to use a builder like Mapper mapper = DozerBeanMapperBuilder.create().withMappingFiles(beanMappings).build().

    +
  8. +
  9. +

    Change occurrences of io.oasp.module.beanmapping.common.impl.dozer.IdentityConverter in dozer-mappings.xml to com.devonfw.module.beanmapping.common.impl.dozer.IdentityConverter

    +
  10. +
  11. +

    Migration of dozer 5 to 6.4: +https://github.com/DozerMapper/dozer/blob/master/docs/adoc/migration/v6-to-v61.adoc +https://github.com/DozerMapper/dozer/blob/master/docs/adoc/migration/v61-to-v62.adoc +https://github.com/DozerMapper/dozer/blob/master/docs/adoc/migration/v62-to-v63.adoc +https://github.com/DozerMapper/dozer/blob/master/docs/adoc/migration/v63-to-v64.adoc

    +
  12. +
  13. +

    In addition, the semantics of <mapping type="one-way"> seems to be changed. If you for example just needed to exclude files on mapping from a to b one-way, you now have to declare an empty mapping as well from b to a one-way without any field(-extension) declarations to enable mapping from b to a at all. See also https://github.com/DozerMapper/dozer/issues/605 and https://github.com/DozerMapper/dozer/issues/451

    +
  14. +
+
+
+
+
+

pom.xml

+
+
+

In the pom.xml file you have to do some manuall changes. You need to change all oasp dependencies to denvonfw dependencies. Here are some examples:

+
+
+
    +
  1. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-beanmapping</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-beanmapping</artifactId>
    +    </dependency>
    +
    +
    +
  2. +
  3. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-security</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-security</artifactId>
    +    </dependency>
    +
    +
    +
  4. +
  5. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-web</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-web</artifactId>
    +    </dependency>
    +
    +
    +
  6. +
  7. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.starters</groupId>
    +      <artifactId>oasp4j-starter-cxf-client-rest</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.starters</groupId>
    +      <artifactId>devon4j-starter-cxf-client-rest</artifactId>
    +    </dependency>
    +
    +
    +
  8. +
  9. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.starters</groupId>
    +      <artifactId>oasp4j-starter-cxf-client-ws</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.starters</groupId>
    +      <artifactId>devon4j-starter-cxf-client-ws</artifactId>
    +    </dependency>
    +
    +
    +
  10. +
  11. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.starters</groupId>
    +      <artifactId>oasp4j-starter-cxf-server-rest</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.starters</groupId>
    +      <artifactId>devon4j-starter-cxf-server-rest</artifactId>
    +    </dependency>
    +
    +
    +
  12. +
  13. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.starters</groupId>
    +      <artifactId>oasp4j-starter-spring-data-jpa</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.starters</groupId>
    +      <artifactId>devon4j-starter-spring-data-jpa</artifactId>
    +    </dependency>
    +
    +
    +
  14. +
  15. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-batch</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-batch</artifactId>
    +    </dependency>
    +
    +
    +
  16. +
  17. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-test</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-test</artifactId>
    +    </dependency>
    +
    +
    +
  18. +
  19. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-logging</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-logging</artifactId>
    +    </dependency>
    +
    +
    +
  20. +
  21. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-jpa-spring-data</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-jpa-spring-data</artifactId>
    +    </dependency>
    +
    +
    +
  22. +
  23. +

    from

    +
    +
    +
        <dependency>
    +      <groupId>io.oasp.java.modules</groupId>
    +      <artifactId>oasp4j-rest</artifactId>
    +	</dependency>
    +
    +
    +
    +
    +
    to
    +
    +
    +
    +
    +
    	<dependency>
    +      <groupId>com.devonfw.java.modules</groupId>
    +      <artifactId>devon4j-rest</artifactId>
    +    </dependency>
    +
    +
    +
  24. +
+
+
+
+
+

MutableGenericEntity

+
+
+

If you use the MutableGenericEntity<> class you have to change it to the PersistenceEntity<> class. Change the import "net.sf.mmm.util.entity.api.MutableGenericEntity" to +"import com.devonfw.module.basic.common.api.entity.PersistenceEntity".

+
+
+
+
+

CompositeTo

+
+
+

If you use the CompositeTo class you should now use the AbstractTo class. Just change the import from "import net.sf.mmm.util.transferobject.api.CompositeTo" +to "import com.devonfw.module.basic.common.api.to.AbstractTo".

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-migration-spring-quarkus.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-migration-spring-quarkus.html new file mode 100644 index 00000000..ab95e070 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-migration-spring-quarkus.html @@ -0,0 +1,545 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Migrate from Spring to Quarkus

+
+
+

This guide will cover the migration process of a Spring application to a Quarkus application. There are already articles about migrating from Spring to Quarkus (e.g. https://developers.redhat.com/blog/2020/04/10/migrating-a-spring-boot-microservices-application-to-quarkus, https://dzone.com/articles/migrating-a-spring-boot-application-to-quarkus-cha). +This guide will focus more on the devon4j specific aspects. We assume that a working Spring application exists, built in the classic devon4j specific way (e.g. Jump The Queue or My Thai Star).

+
+
+

Create the Quarkus application

+
+
+

We start with an empty Quarkus project. You can create the project with Maven on the command line or use the online generator. The advantage of the online generator is that you have a pre-selection of dependencies to use in your project. +For starters, let’s select the basic dependencies required to develop a REST service with database connectivity (you can use one of the links in the Quarkus template guide): RESTEasy JAX-RS, RESTEasy Jackson, Hibernate ORM, Spring Data JPA API, JDBC Driver (choose the type of database you need), Flyway (if you have database migration schemas), SmallRye Health (optional for Health Monitoring)

+
+
+

The list does not include all required dependencies. We will add more dependencies to the project later. For now, generate the application with these dependencies.

+
+
+

Migration Toolkit from Red Hat

+
+

Red Hat provides a migration toolkit (MTA, Migration Toolkit for Applications), that supports migration of a Spring to a Quarkus application. There are several versions of this toolkit (e.g., a web console, a Maven plugin, or an IDE plugin). +The MTA analyzes your existing application and generates a report with hints and instructions for migrating from Spring to Quarkus. For example, it gives you an indication of which dependencies are not supported in your project for a Quarkus application and which dependencies you need to swap them with. The analysis is rule-based, and you can also add your own rules that will be checked during analysis.

+
+
+
+
+
+

Entities

+
+
+

There is nothing special to consider when creating the entities. In most cases, you can simply take the code from your Spring application and use it for your Quarkus application. Usually, the entities extend a superclass ApplicationPersistenceEntity containing, for example, the id property. You can also take this class from your Spring application and reuse it.

+
+
+
+
+

Transfer objects

+
+
+

The next step is to create the appropriate transfer objects for the entities. In a devon4j Spring application, we would use CobiGen to create these classes. Since CobiGen is not usable for this purpose in Quarkus applications yet, we have to create the classes manually.

+
+
+

First, we create some abstract base classes for the search criteria and DTO classes. Normally, these would also be created by CobiGen.

+
+
+
AbstractSearchCriteriaTo
+
+
public abstract class AbstractSearchCriteriaTo extends AbstractTo {
+
+  private static final long serialVersionUID = 1L;
+
+  private Pageable pageable;
+
+  //getter + setter for pageable
+}
+
+
+
+
AbstractDto
+
+
public abstract class AbstractDto extends AbstractTo {
+
+  private static final long serialVersionUID = 1L;
+
+  private Long id;
+
+  private int modificationCounter;
+
+  public AbstractDto() {
+
+    super();
+  }
+
+  //getter + setter
+
+  @Override
+  protected void toString(StringBuilder buffer) {
+    ...
+  }
+}
+
+
+
+

The class AbstractTo, extended by other classes, would be provided by the devon4j-basic module in a devon4j Spring application. You can take the code from here and reuse it in your Quarkus project.

+
+
+

Now you can create your transfer objects. Most of the code of the transfer objects of your Spring application should be reusable. For Quarkus, we recommend (as mentioned here) to use *Dto instead of *Eto classes. Be sure to change the names of the classes accordingly.

+
+
+
+
+

Data Access Layer

+
+
+

In devon4j, we propose to use Spring Data JPA to build the data access layer using repositories and Querydsl to build dynamic queries. We will also use this approach for Quarkus applications, but we need to change the implementation because the devon4j modules are based on reflection, which is not suitable for Quarkus. +In Quarkus we will use Querydsl using code generation. So for this layer, more changes are required and we can’t just take the existing code.

+
+
+

First, create a repository interface for your entity class that extends JpaRepository (see here).

+
+
+

To add QueryDSL support to your project, add the following dependencies to your pom.xml file:

+
+
+
pom.xml
+
+
<dependency>
+  <groupId>com.querydsl</groupId>
+  <artifactId>querydsl-jpa</artifactId>
+  <version>4.3.1</version>
+</dependency>
+<dependency>
+  <groupId>com.querydsl</groupId>
+  <artifactId>querydsl-apt</artifactId>
+  <scope>provided</scope>
+  <version>4.3.1</version>
+</dependency>
+
+
+
+

As mentioned above, we will use QueryDSL with code generation. For this, add the QueryDSL annotation processor to your plugins:

+
+
+
pom.xml
+
+
<plugins>
+...
+  <plugin>
+    <groupId>com.mysema.maven</groupId>
+    <artifactId>apt-maven-plugin</artifactId>
+    <version>1.1.3</version>
+    <executions>
+      <execution>
+        <phase>generate-sources</phase>
+        <goals>
+          <goal>process</goal>
+        </goals>
+        <configuration>
+          <outputDirectory>target/generated-sources/annotations</outputDirectory>
+          <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+        </configuration>
+      </execution>
+    </executions>
+  </plugin>
+</plugins>
+
+
+
+

To implement the queries, follow the corresponding guide.

+
+
+

Set the following properties in the application.properties file to configure the connection to your database (see also here):

+
+
+
+
quarkus.datasource.db-kind=...
+quarkus.datasource.jdbc.url=...
+quarkus.datasource.username=...
+quarkus.datasource.password=...
+
+
+
+
+
+

Logic Layer

+
+
+

For the logic layer, devon4j uses a use-case approach. You can reuse the use case interfaces from the api module of the Spring application. Again, make sure to rename the transfer objects.

+
+
+

Create the appropriate class that implements the interface. Follow the implementation section of the use-case guide to implement the methods. For mapping the entities to the corresponding transfer objects, see the next section.

+
+
+
+
+

Mapping

+
+
+

For bean mapping, we need to use a completely different approach in the Quarkus application than in the Spring application. For Quarkus, we use MapStruct, which creates the mapper at build time rather than at runtime using reflection. Add the following dependencies to your pom.xml.

+
+
+
pom.xml
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct-processor</artifactId>
+  <version>1.4.2.Final</version>
+</dependency>
+<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+</dependency>
+
+
+
+

Then you can create the mapper as follows:

+
+
+
Mapper
+
+
@Mapper(componentModel = "cdi")
+public interface YourEntityMapper {
+  YourEntityDto map(YourEntity entity);
+
+  YourEntity map(YourEntityDto dto);
+
+  ...
+}
+
+
+
+

Inject the mapper into your use-case implementation and simply use the methods. The method implementations of the mapper are created when the application is built.

+
+
+
+
+

Service Layer

+
+
+

For the implementation of the service layer, we use the JAX-RS for both Quarkus and Spring applications to create the REST services. Classic devon4j Spring applications rely on Apache CFX as the implemention of JAX-RS. +For Quarkus, we use RESTEasy. Since both are implementations of JAX-RS, much of the Spring application code can be reused.

+
+
+

Take the definition of the REST endpoints from the api module of the Spring application (make sure to rename the transfer objects), inject the use-cases from the logic layer and use them in the REST service methods as follows:

+
+
+
REST service
+
+
@Path("/path/v1")
+public class YourComponentRestService {
+
+  @Inject
+  UcFindYourEntity ucFindYourEntity;
+
+  @Inject
+  UcManageYourEntity ucManageYourEntity;
+
+  @GET
+  @Path("/yourEntity/{id}/")
+  public YourEntityDto getYourEntity(@PathParam("id") long id);
+
+    return this.ucFindYourEntity.findYourEntity(id);
+  }
+
+  ...
+}
+
+
+
+
+
+

Summary

+
+
+

As you have seen, some parts hardly differ when migrating a Spring application to a Quarkus application, while other parts differ more. The above sections describe the parts needed for simple applications that provide REST services with a data access layer. +If you add more functionality, more customization and other frameworks, dependencies may be required. If that is the case, take a look at the corresponding guide on the topic in the devon4j documentation or check if there is a tutorial on the official Quarkus website.

+
+
+

Furthermore, we can summarize that migrating from a Spring application to a Quarkus representative is not complex. Although Quarkus is a very young framework (release 1.0 was in 2019), it brings a lot of proven standards and libraries that you can integrate into your application. +This makes it easy to migrate and reuse code from existing (Spring) applications. Also, Quarkus comes with Spring API compatibility for many Spring modules (Spring Data JPA, Spring DI, etc.), which makes it easier for developers to reuse their knowledge.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-monitoring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-monitoring.html new file mode 100644 index 00000000..8c4ee678 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-monitoring.html @@ -0,0 +1,355 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Monitoring

+
+
+

For monitoring a complex application landscape it is crucial to have an exact overview which applications are up and running and which are not and why. +In devonfw we only focus on topics which are most important when developing production-ready applications. +On a high level view we strongly suggest to separate the application to be monitored from the monitoring system itself. +Therefore, your application should concentrate on providing app specific data for the monitoring. +Aspects such as aggregation, visualization, search, alerting, etc. should be addressed outside of your app by a monitoring system product. +There are many products providing such a monitoring system like checkmk, icinga, SkyWalking, etc. +Please note that there is a huge list of such products and devonfw is not biased or aims to make a choice for you. +Instead please search and find the products that fit best for your requirements and infrastructure.

+
+
+

Types of monitoring

+
+
+

As monitoring coveres a lot of different aspects we separate the following types of monitoring and according data:

+
+
+
    +
  • +

    Log-monitoring
    +is about collecting and monitoring the logs of all apps and containers in your IT landscape. It is suitable for events such as an HTTP request with its URL, resulting status code and duration in milliseconds. Your monitoring may not react to such data in realtime. Instead it may take a delay of one or a few seconds.

    +
  • +
  • +

    Infrastructure monitoring
    +is about monitoring the (hardware) infrastructure with measures like usage of CPU, memory, disc-space, etc. This is a pure operational task and your app should have nothing to do with this. In other words it is a waste if your app tries to monitor these aspects as existing products can do this much better and your app will only see virtual machines and is unable to see the physical infrastructure.

    +
  • +
  • +

    Health check
    +is about providing internal data about the current health of your app. Typically you provide sensors with health status per component or interface to neighbour service (database connectivity, etc.).

    +
  • +
  • +

    Application Performance Monitoring
    +is about measuring performance and tracing down performance issues.

    +
  • +
+
+
+
+
+

Health-Check

+
+
+

The idea of a health check is to prodvide monitoring data about the current health status of your application. +This allows to integrate this specific data into the monitoring system used for your IT landscape. +In order to keep the monitoring simple and easy to integreate consider using the following best practices:

+
+
+
    +
  • +

    Use simple and established protocols such as REST instead of JMX via RMI.

    +
  • +
  • +

    Considuer using recent standards such as microprofile-health.

    +
  • +
  • +

    Consider to drop access-control for your monitoring interfaces and for security prevent external access to it in your infrastructure (loadbalancers or gateways). Monitoring is only for usage within an IT landscape internally. It does not make sense for externals and end-users to access your app for reading monitoring data from a random node decided by a loadbalancer. Furhter, external access can easily lead to sensitive data exposure.

    +
  • +
  • +

    Consider to define different end-points per usage-scenario. So if you want the loadbalancer to ask your app monitoring for availability of each node then create a separate service URL that only provides OK or anything else for failure (NOK, 404, 500, timeout). Do not mix this with a health-check that needs more detailed information.

    +
  • +
  • +

    Also do not forget about basic features such as prodiving the name and the release version of your application.

    +
  • +
  • +

    Be careful to automate decisions based on monitoring and health checks. It easily turns out to be stupid if you automatically restart your pod or container because of some monitoring indicator. In the worst case a failure of a central component will cause your health-check to report down for all apps and as a result all your containers will be restarted frequently. Indead of curing problems such decisions will cause much more harm and trouble.

    +
  • +
  • +

    Avoid causing reasonable load with your monitoring and health-check itself. In many cases it is better to use log-monitoring or to collect monitoring data from use-cases that happen in your app anyway. If you create dummy read and write requests in your monitoring implementation you will easily turn it into a DOS-attack.

    +
  • +
+
+
+

For spring you can simply integrate app monitoring and health check via spring-boot-actuator.

+
+
+

For quarkus you can simply integrate app monitoring via micrometer or smallrye-metrics and health check via smallrye-health.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-openapi.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-openapi.html new file mode 100644 index 00000000..8fe3f328 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-openapi.html @@ -0,0 +1,410 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==OpenAPI

+
+
+

The OpenAPI Specification (OAS) defines a standard for describing RESTful web services in a machine- and human-readable format. OpenAPI allows REST APIs to be defined in a uniform manner. +Technically, an OpenAPI document is written in YAML or JSON format. The specification defines the structure of a REST API by describing attributes such as path information, response codes, and return types. Some examples can be found here. +Apart from documenting the API, this schema then also acts as a contract between provider and consumers, guaranteeing interoperability between various technologies.

+
+
+

OpenAPI is often used in combination with Swagger. Swagger is a set of tools build around OpenAPI, that help developers to design and document their REST APIs. +The most common tool is the Swagger UI, which uses the OpenAPI specification to create a graphical interface of the REST API that you can also interact with. Check out the Swagger online editor to get a feeling for it.

+
+
+ + + + + +
+ + +
+

Swagger and OpenAPI: Swagger is a former specification, based on which the OpenAPI was created. Swagger 2.0 is still commonly used for describing APIs. OpenAPI is an open-source collaboration and it started from version 3.0.0(semver)

+
+
+
+
+

There are many tools that work with OpenAPI: code generators, documentation tools, validators etc.

+
+
+

OpenAPI generation

+
+
+

There are several extensions you can use in your project to automatically generate the OpenAPI specifications and Swagger UI from your REST API (code-first approach). devon4j recommends the following two extensions/plugins to use:

+
+
+
    +
  • +

    Smallrye OpenAPI extension

    +
  • +
  • +

    ServicedocGen maven plugin

    +
  • +
+
+
+

Smallrye OpenAPI

+
+

Quarkus provides OpenAPI support through Smallrye OpenAPI extension:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-smallrye-openapi</artifactId>
+</dependency>
+
+
+
+

After adding the extension to your project, you can access the Swagger UI by navigating to /q/swagger-ui.

+
+
+

The OpenAPI specification can be accessed by requesting /q/openapi.

+
+
+

Smallrye OpenAPI is compliant with MicroProfile OpenAPI. You can add MicroProfile annotations to further describe your REST endpoints and extend the OpenAPI documentation. +More information for this can be found here or here.

+
+
+ + + + + +
+ + +
+

Quarkus recommends using this extension and you can document your APIs in great detail by using the MicroProfile annotations. The downside to this is that using these annotations will blow up your code and you will have some duplicate information in it. +If you don’t want to specify the REST API again with all this annotation based information, we also recommend taking a look at the ServicedocGen Maven plugin for your Quarkus applications when implementing JAX-RS APIs.

+
+
+
+
+
+

ServicedocGen Maven Plugin

+
+

The ServicedocGen maven plugin can be used within both Spring and Quarkus applications. +It works a bit different then the Smallrye extensions mentioned above. The plugin analysis the REST API and it’s JavaDoc and then generate the OpenAPI specification and the Swagger UI as static files. So no Swagger or MicroProfile annotations have to be added.

+
+
+

The plugin can be configured in the pom.xml file of your application as follows:

+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>org.codehaus.mojo</groupId>
+      <artifactId>servicedocgen-maven-plugin</artifactId>
+      <version>1.0.0</version>
+      <executions>
+        <execution>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <descriptor>
+          <info>
+            <title>...</title>
+            <description>...</description>
+          </info>
+          <host>...</host>
+          <port>...</port>
+          <basePath>...</basePath>
+          <schemes>
+            <scheme>...</scheme>
+          </schemes>
+        </descriptor>
+      </configuration>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

In the configuration section you have to define additional information to generate the OpenAPI specification correctly. An example can be found in our Quarkus reference application. +When building the application, an OpenApi.yaml and a SwaggerUI.html file are created in the /target/site folder. To make the Swagger UI available in the browser, the file must be served by some servlet.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-oracle.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-oracle.html new file mode 100644 index 00000000..6b7fdacb --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-oracle.html @@ -0,0 +1,410 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Oracle RDBMS

+
+
+

This section contains hints for those who use Oracle RDBMS. If you use a different persistence technology you can simply ignore it. Besides general hints about the driver there are tips for more tight integration with other Oracle features or products. However, if you work for a project where Oracle RDBMS is settled and not going to be replaced (you are in a vendor lock-in anyway), you might want to use even more from Oracle technology to take advantage from a closer integration.

+
+
+

XE

+
+
+

For local development you should setup Oracle XE (eXpress Edition). +You need an oracle account, then you can download it from here.

+
+
+

The most comfortable way to run it as needed is using docker. You can build your own docker image from the downloaded RPM using the instructions and dockerfile from oracle. (In case the build of the docker-image fails reproducibly and you want to give up with the Dockerfiles from Oracle you can also try this inofficial docker-oracle-xe solution).

+
+
+

To connect to your local XE database you need to use xe as the SID of your main database that can not be changed. The hostname should be localhost and the port is by default 1521 if you did not remap it with docker to something else. However, starting with XE 18c you need to be aware that oracle introduced a multi-tenant architecture. Hence xe refers to the root CDB while you typically want to connect to the PDB (pluggable database) and XE ships with exactly one of this called xepdb1. To connect with SQL Developer switch Connection Type from Basic to Advanced and enter the Custom JDBC URL like e.g.

+
+
+
+
jdbc:oracle:thin:@//localhost:1521/xepdb1
+
+
+
+

The same way you can also connect from your devon4j app via JDBC.

+
+
+
+
+

Driver

+
+
+

The oracle JDBC driver is not available in maven central. Depending on the Oracle DB version and the Java version, you can use either the 11g/ojdbc6, 12c/ojdbc7, or 12c/ojdbc8 version of the driver. Oracle JDBC drivers usually are backward and forward compatible so you should be able to use the 12c/ojdbc8 driver with an 11g DB etc. As a rule of thumb, use the 12c/ojdbc8 driver unless you must use Java7. All JDBC drivers can be downloaded without registration: 11g/ojdbc6, 12c/ojdbc7, and 12c/ojdbc8. Your project should use a maven repository server such as nexus or artifactory. +Your dependency for the oracle driver should look as follows (use artifactId "ojdbc6" or "ojdbc7" for the older drivers):

+
+
+
+
<dependency>
+  <groupId>com.oracle</groupId>
+  <artifactId>ojdbc8</artifactId>
+  <version>${oracle.driver.version}</version>
+</dependency>
+
+
+
+

oracle.driver.version being 11.2.0.4 for 11g/ojdbc6, or 12.1.0.1 for 12c/ojdbc7, or 12.2.0.1 for 12c/ojdbc8 or newer

+
+
+
+
+

Pooling

+
+
+

In order to boost performance JDBC connections should be pooled and reused. If you are using Oracle RDBMS and do not plan to change that you can use the Oracle specific connection pool "Universal Connection Pool (UCP)" that is perfectly integrated with the Oracle driver. According to the documentation, UCP can even be used to manage third party data sources. The 11g version of UCP can be downloaded without registration here, the 12c version of UCP is available at the same download locations as the 12c JDBC driver (see above). As a rule of thumb, use the version that is the same as the JDBC driver version. +Again, you have to upload the artefact manually to your maven repository. The dependency should look like this:

+
+
+
+
<dependency>
+  <groupId>com.oracle</groupId>
+  <artifactId>ucp</artifactId>
+  <version>${oracle.ucp.version}</version>
+</dependency>
+
+
+
+

with oracle.ucp.version being 11.2.0.4 or 12.2.0.1 or newer.

+
+
+

Configuration is done via application.properties like this (example):

+
+
+
+
#Oracle UCP
+##Datasource for accessing the database
+spring.datasource.url=jdbc:oracle:thin:@192.168.58.2:1521:xe
+spring.jpa.database-platform=org.hibernate.dialect.Oracle12cDialect
+spring.datasource.user=MyUser
+spring.datasource.password=ThisIsMyPassword
+spring.datasource.driver-class-name=oracle.jdbc.OracleDriver
+spring.datasource.schema=MySchema
+
+spring.datasource.type=oracle.ucp.jdbc.PoolDataSourceImpl
+spring.datasource.factory=oracle.ucp.jdbc.PoolDataSourceFactory
+spring.datasource.factory-method=getPoolDataSource
+spring.datasource.connectionFactoryClassName=oracle.jdbc.pool.OracleDataSource
+spring.datasource.validateConnectionOnBorrow=true
+spring.datasource.connectionPoolName=MyPool
+spring.datasource.jmx-enabled=true
+
+##Optional: Set the log level to INTERNAL_ERROR, SEVERE, WARNING, INFO, CONFIG, FINE, TRACE_10, FINER, TRACE_20, TRACE_30, or FINEST
+##logging.level.oracle.ucp=INTERNAL_ERROR
+##Optional: activate tracing
+##logging.level.oracle.ucp.jdbc.oracle.OracleUniversalPooledConnection=TRACE
+
+#Optional: Configures pool size manually
+#spring.datasource.minPoolSize=10
+#spring.datasource.maxPoolSize=40
+#spring.datasource.initialPoolSize=20
+
+
+
+

Resources: FAQ, developer’s guide, Java API Reference. For an in-depth discussion on how to use JDBC and UCP, see the Oracle documentation Connection Management Strategies for Java Applications using JDBC and UCP.

+
+
+

Note: there is a bug in UCP 12.1.0.2 that results in the creation of thousands of java.lang.Timer threads over hours or days of system uptime (see article on stackoverflow). Also, Oracle has a strange bug fixing / patching policy: instead of producing a fixed version 12.1.0.3 or 12.1.0.2.x, Oracle publishes collections of *.class files that must be manually patched into the ucp.jar! Therefore, use the newest versions only.

+
+
+
+
+

Messaging

+
+
+

In case you want to do messaging based on JMS you might consider the Oracle JMS also called Oracle Streams Advanced Queuing, or Oracle Advanced Queuing, or OAQ or AQ for short. OAQ is a JMS provider based on the Oracle RDBMS and included in the DB product for no extra fee. OAQ has some features that exceed the JMS standard like a retention time (i.e. a built-in backup mechanism that allows to make messages "unread" within a configurable period of time so that these messages do not have to be resent by the sending application). Also, OAQ messages are stored in relational tables so they can easily be process messages within the same technical transaction. They also might be observed by a test driver in a system test scenario.

+
+
+

This is possible only due to the fact that OAQ queues and RDBMS tables actually reside in the same database and quite straightforward to setup if you are using an Oracle connection pool.

+
+
+

If you are working with an application server or use a different connection pool, things get more complicated. Spring offers a (now deprecated project)Spring Data JDBC Extension in order to process OAQ messages within the same technical transaction even when using an application server.

+
+
+
+
+

General Notes on the use of Oracle products

+
+
+

Oracle sells commercial products and receives licence fees for them. This includes access to a support organization. Therefore, at an early stage of your project, prepare for contacting oracle support in case of technical problems. You will need the Oracle support ID of your customer [i.e. the legal entity who pays the licence fee and runs the RDBMS] and your customer must grant you permission to use it in a service request - it is not legal to use a your own support ID in a customer-related project. Your customer pays for that service anyway, so use it in case of a problem!

+
+
+

Software components like the JDBC driver or the UCP may be available without a registration or fee but they are protected by the Oracle Technology Network (OTN) License Agreement. The most important aspect of this licence agreement is the fact that an IT service provider is not allowed to simply download the Oracle software component, bundle it in a software artefact and deliver it to the customer. Instead, the Oracle software component must be (from a legal point of view) provided by the owner of the Oracle DB licence (i.e. your customer). This can be achieved in two ways: Advise your customer to install the Oracle software component in the application server as a library that can be used by your custom built system. Or, in cases where this is not feasible, e.g. in a OpenShift environment where the IT service provider delivers complete Docker images, you must advise your customer to (legally, i.e. documented in a written form) provide the Oracle software component to you, i.e. you don’t download the software component from the Oracle site but receive it from your customer.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-queueing.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-queueing.html new file mode 100644 index 00000000..9888a293 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-queueing.html @@ -0,0 +1,310 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-repository.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-repository.html new file mode 100644 index 00000000..9e671bba --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-repository.html @@ -0,0 +1,552 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Spring Data +Spring Data JPA is supported by both Spring and Quarkus. However, in Quarkus this approach still has some limitations. For detailed information, see the official Quarkus Spring Data guide.

+
+
+

Motivation

+
+
+

The benefits of Spring Data are (for examples and explanations see next sections):

+
+
+
    +
  • +

    All you need is one single repository interface for each entity. No need for a separate implementation or other code artifacts like XML descriptors, NamedQueries class, etc.

    +
  • +
  • +

    You have all information together in one place (the repository interface) that actually belong together (where as in the classic approach you have the static queries in an XML file, constants to them in NamedQueries class and referencing usages in DAO implementation classes).

    +
  • +
  • +

    Static queries are most simple to realize as you do not need to write any method body. This means you can develop faster.

    +
  • +
  • +

    Support for paging is already build-in. Again for static query method the is nothing you have to do except using the paging objects in the signature.

    +
  • +
  • +

    Still you have the freedom to write custom implementations via default methods within the repository interface (e.g. for dynamic queries).

    +
  • +
+
+
+
+
+

Dependency

+
+
+

In case you want to switch to or add Spring Data support to your Spring or Quarkus application, all you need is to add the respective maven dependency:

+
+
+
spring
+
+
<dependency>
+  <groupId>org.springframework.boot</groupId>
+  <artifactId>spring-boot-starter-data-jpa</artifactId>
+</dependency>
+
+
+
+
quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
+

Repository

+
+
+

For each entity «Entity»Entity an interface is created with the name «Entity»Repository extending JpaRepository. +Such repository is the analogy to a Data-Access-Object (DAO) used in the classic approach or when Spring Data is not an option.

+
+
+
Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long> {
+
+}
+
+
+
+

The Spring Data repository provides some basic implementations for accessing data, e.g. returning all instances of a type (findAll) or returning an instance by its ID (findById).

+
+
+
+
+

Custom method implementation

+
+
+

In addition, repositories can be enriched with additional functionality, e.g. to add QueryDSL functionality or to override the default implementations, by using so called repository fragments:

+
+
+

Example

+
+

The following example shows how to write such a repository:

+
+
+
Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, ProductFragment {
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  List<ProductEntity> findByTitle(@Param("title") String title);
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  Page<ProductEntity> findByTitlePaginated(@Param("title") String title, Pageable pageable);
+}
+
+
+
+
Repository fragment
+
+
public interface ProductFragment {
+  Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria);
+}
+
+
+
+
Fragment implementation
+
+
public class ProductFragmentImpl implements ProductFragment {
+  @Inject
+  EntityManager entityManager;
+
+  public Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria) {
+    QProductEntity product = QProductEntity.productEntity;
+    JPAQuery<ProductEntity> query = new JPAQuery<ProductEntity>(this.entityManager);
+    query.from(product);
+
+    String title = criteria.getTitle();
+    if ((title != null) && !title.isEmpty()) {
+      query.where(product.title.eq(title));
+    }
+
+    List<ProductEntity> products = query.fetch();
+    return new PageImpl<>(products, PageRequest.of(criteria.getPageNumber(), criteria.getPageSize()), products.size());
+  }
+}
+
+
+
+

This ProductRepository has the following features:

+
+
+
    +
  • +

    CRUD support from Spring Data (see JavaDoc for details).

    +
  • +
  • +

    Support for QueryDSL integration, paging and more.

    +
  • +
  • +

    A static query method findByTitle to find all ProductEntity instances from DB that have the given title. Please note the @Param annotation that links the method parameter with the variable inside the query (:title).

    +
  • +
  • +

    The same with pagination support via findByTitlePaginated method.

    +
  • +
  • +

    A dynamic query method findByCriteria showing the QueryDSL and paging integration into Spring via a fragment implementation.

    +
  • +
+
+
+

You can find an implementation of this ProductRepository in our Quarkus reference application.

+
+
+ + + + + +
+ + +In Quarkus, native and named queries via the @Query annotation are currently not supported +
+
+
+
+

Integration of Spring Data in devon4j-spring

+
+

For Spring applications, devon4j offers a proprietary solution that integrates seamlessly with QueryDSL and uses default methods instead of the fragment approach. A separate guide for this can be found here.

+
+
+
+

Custom methods without fragment approach

+
+

The fragment approach is a bit laborious, as three types (repository interface, fragment interface and fragment implementation) are always needed to implement custom methods. +We cannot simply use default methods within the repository because we cannot inject the EntityManager directly into the repository interface.

+
+
+

As a workaround, you can create a GenericRepository interface, as is done in the devon4j jpa-spring-data module.

+
+
+
+
public interface GenericRepository<E> {
+
+  EntityManager getEntityManager();
+
+  ...
+}
+
+
+
+
+
public class GenericRepositoryImpl<E> implements GenericRepository<E> {
+
+  @Inject
+  EntityManager entityManager;
+
+  @Override
+  public EntityManager getEntityManager() {
+
+    return this.entityManager;
+  }
+
+  ...
+}
+
+
+
+

Then, all your repository interfaces can extend the GenericRepository and you can implement queries directly in the repository interface using default methods:

+
+
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, GenericRepository<ProductEntity> {
+
+  default Page<ProductEntity> findByTitle(Title title) {
+
+    EntityManager entityManager = getEntityManager();
+    Query query = entityManager.createNativeQuery("select * from Product where title = :title", ProductEntity.class);
+    query.setParameter("title", title);
+    List<ProductEntity> products = query.getResultList();
+    return new PageImpl<>(products);
+  }
+
+  ...
+}
+
+
+
+
+
+
+

Drawbacks

+
+
+

Spring Data also has some drawbacks:

+
+
+
    +
  • +

    Some kind of magic behind the scenes that are not so easy to understand. So in case you want to extend all your repositories without providing the implementation via a default method in a parent repository interface you need to deep-dive into Spring Data. We assume that you do not need that and hope what Spring Data and devon already provides out-of-the-box is already sufficient.

    +
  • +
  • +

    The Spring Data magic also includes guessing the query from the method name. This is not easy to understand and especially to debug. Our suggestion is not to use this feature at all and either provide a @Query annotation or an implementation via default method.

    +
  • +
+
+
+
+
+

Limitations in Quarkus

+
+
+
    +
  • +

    Native and named queries are not supported using @Query annotation. You will receive something like: Build step io.quarkus.spring.data.deployment.SpringDataJPAProcessor#build threw an exception: java.lang.IllegalArgumentException: Attribute nativeQuery of @Query is currently not supported

    +
  • +
  • +

    Customizing the base repository for all repository interfaces in the code base, which is done in Spring Data by registering a class the extends SimpleJpaRepository

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-rest-philosophy.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-rest-philosophy.html new file mode 100644 index 00000000..17cf0a28 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-rest-philosophy.html @@ -0,0 +1,723 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==REST Philosophy

+
+
+

REST and RESTful often implies very strict and specific rules and conventions. +However different people will often have different opinions of such rules. +We learned that this leads to "religious discussions" (starting from PUT vs. POST and IDs in path vs. payload up to Hypermedia and HATEOAS). +These "religious discussions" waste a lot of time and money without adding real value in case of common business applications (if you publish your API on the internet to billions of users this is a different story). +Therefore we give best practices that lead to simple, easy and pragmatic "HTTP APIs" (to avoid the term "REST services" and end "religious discussions"). +Please also note that we do not want to assault anybody nor force anyone to follow our guidelines. +This guide is just an option for people who want to be pragmatic and face such "religious discussions". +Please read the following best practices carefully and be aware that they might slightly differ from what your first hit on the web will say about REST (see e.g. RESTful cookbook).

+
+
+

If you want to provide an entity with a different structure do not append further details to an element URL but create a separate collection URL as base. +So use https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity-with-details/42 instead of https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42/with-details. +For offering a CTO simply append -cto to the collection URL (e.g. …​/myentity-cto/).

+
+
+

While REST was designed as a pragmatical approach it sometimes leads to "religious discussions" e.g. about using PUT vs. POST (see ATTENTION notice above). +As the devonfw has a strong focus on usual business applications it proposes a more "pragmatic" approach to REST services.

+
+
+

On the next table we compare the main differences between the "canonical" REST approach (or RESTful) and the devonfw proposal.

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Usage of HTTP methods
HTTP MethodRESTful Meaningdevonfw

GET

Read single element.

+

Search on an entity (with parametrized url)

Read a single element.

PUT

Replace entity data.

+

Replace entire collection (typically not supported)

Not used

POST

Create a new element in the collection

Create or update an element in the collection.

+

Search on an entity (parametrized post body)

+

Bulk deletion.

DELETE

Delete an entity.

+

Delete an entire collection (typically not supported)

Delete an entity.

+

Delete an entire collection (typically not supported)

+
+

Please consider these guidelines and rationales:

+
+
+
    +
  • +

    We use POST on the collection URL to save an entity (create if no ID provided in payload otherwise update). This avoids pointless discussions in distinctions between PUT and POST and what to do if a create contains an ID in the payload or if an update is missing the ID property or contains a different ID in payload than in URL.

    +
  • +
  • +

    Hence, we do NOT use PUT but always use POST for write operations. As we always have a technical ID for each entity, we can simply distinguish create and update by the presence of the ID property.

    +
  • +
  • +

    Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

    +
  • +
+
+
+

Metadata

+
+
+

devonfw has support for the following metadata in REST service invocations:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
NameDescriptionFurther information

X-Correlation-Id

HTTP header for a correlation ID that is a unique identifier to associate different requests belonging to the same session / action

Logging guide

Validation errors

Standardized format for a service to communicate validation errors to the client

Server-side validation is documented in the Validation guide.

+

The protocol to communicate these validation errors is described in REST exception handling.

Pagination

Standardized format for a service to offer paginated access to a list of entities

Server-side support for pagination is documented in the Repository Guide.

+
+
+
+

Recommendations for REST requests and responses

+
+
+

The devonfw proposes, for simplicity, a deviation from the common REST pattern:

+
+
+
    +
  • +

    Using POST for updates (instead of PUT)

    +
  • +
  • +

    Using the payload for addressing resources on POST (instead of identifier on the URL)

    +
  • +
  • +

    Using parametrized POST for searches

    +
  • +
+
+
+

This use of REST will lead to simpler code both on client and on server. We discuss this use on the next points.

+
+
+

The following table specifies how to use the HTTP methods (verbs) for collection and element URIs properly (see wikipedia).

+
+
+

Unparameterized loading of a single resource

+
+
    +
  • +

    HTTP Method: GET

    +
  • +
  • +

    URL example: /services/rest/productmanagement/v1/product/123

    +
  • +
+
+
+

For loading of a single resource, embed the identifier (e.g. 123) of the resource in the URL.

+
+
+

The response contains the resource in JSON format, using a JSON object at the top-level, for example:

+
+
+
+
{
+  "id": 123,
+  "name": "Steak",
+  "color": "brown"
+}
+
+
+
+
+

Unparameterized loading of a collection of resources

+
+
    +
  • +

    HTTP Method: GET

    +
  • +
  • +

    URL example: /services/rest/productmanagement/v1/product

    +
  • +
+
+
+

For loading of a collection of resources, make sure that the size of the collection can never exceed a reasonable maximum size. For parameterized loading (searching, pagination), see below.

+
+
+

The response contains the collection in JSON format, using a JSON object at the top-level, and the actual collection underneath a result key, for example:

+
+
+
+
{
+  "result": [
+    {
+      "id": 123,
+      "name": "Steak",
+      "color": "brown"
+    },
+    {
+      "id": 124,
+      "name": "Broccoli",
+      "color": "green"
+    }
+  ]
+}
+
+
+
+
+

Saving a resource

+
+
    +
  • +

    HTTP Method: POST

    +
  • +
  • +

    URL example: /services/rest/productmanagement/v1/product

    +
  • +
+
+
+

The resource will be passed via JSON in the request body. If updating an existing resource, include the resource’s identifier in the JSON and not in the URL, in order to avoid ambiguity.

+
+
+

If saving was successful, the updated product (e.g. with assigned ID or updated modification counter) is returned.

+
+
+

If saving was unsuccessful, refer below for the format to return errors to the client.

+
+
+
+

Parameterized loading of a resource

+
+
    +
  • +

    HTTP Method: POST

    +
  • +
  • +

    URL example: /services/rest/productmanagement/v1/product/search

    +
  • +
+
+
+

In order to differentiate from an unparameterized load, a special subpath (for example search) is introduced. The parameters are passed via JSON in the request body. An example of a simple, paginated search would be:

+
+
+
+
{
+  "status": "OPEN",
+  "pagination": {
+    "page": 2,
+    "size": 25
+  }
+}
+
+
+
+

The response contains the requested page of the collection in JSON format, using a JSON object at the top-level, the actual page underneath a result key, and additional pagination information underneath a pagination key, for example:

+
+
+
+
{
+  "pagination": {
+    "page": 2,
+    "size": 25,
+    "total": null
+  },
+  "result": [
+    {
+      "id": 123,
+      "name": "Steak",
+      "color": "brown"
+    },
+    {
+      "id": 124,
+      "name": "Broccoli",
+      "color": "green"
+    }
+  ]
+}
+
+
+
+

Compare the code needed on server side to accept this request:

+
+
+
+
  @Path("/category/search")
+  @POST
+  public PaginatedListTo<CategoryEto> findCategorysByPost(CategorySearchCriteriaTo searchCriteriaTo) {
+    return this.dishmanagement.findCategoryEtos(searchCriteriaTo);
+ }
+
+
+
+

With the equivalent code required if doing it the RESTful way by issuing a GET request:

+
+
+
+
 @Path("/category/search")
+  @POST @Path("/order")
+  @GET
+  public PaginatedListTo<CategoryEto> findCategorysByPost( @Context UriInfo info) {
+
+    RequestParameters parameters = RequestParameters.fromQuery(info);
+    CategorySearchCriteriaTo criteria = new CategorySearchCriteriaTo();
+    criteria.setName(parameters.get("name", Long.class, false));
+    criteria.setDescription(parameters.get("description", OrderState.class, false));
+    criteria.setShowOrder(parameters.get("showOrder", OrderState.class, false));
+    return this.dishmanagement.findCategoryEtos(criteria);
+
+  }
+
+
+
+

Pagination details

+
+

The client can choose to request a count of the total size of the collection, for example to calculate the total number of available pages. It does so, by specifying the pagination.total property with a value of true.

+
+
+

The service is free to honour this request. If it chooses to do so, it returns the total count as the pagination.total property in the response.

+
+
+
+
+

Deletion of a resource

+
+
    +
  • +

    HTTP Method: DELETE

    +
  • +
  • +

    URL example: /services/rest/productmanagement/v1/product/123

    +
  • +
+
+
+

For deletion of a single resource, embed the identifier of the resource in the URL.

+
+
+
+

Error results

+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+

If the error is caused by a failed validation of the entity, the above format is extended to also include the list of individual validation errors:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs",
+  "errors": {
+    "property failing validation": [
+       "First error message on this property",
+       "Second error message on this property"
+    ],
+    // ....
+  }
+}
+
+
+
+
+
+
+

REST Media Types

+
+
+

The payload of a REST service can be in any format as REST by itself does not specify this. The most established ones that the devonfw recommends are XML and JSON. Follow these links for further details and guidance how to use them properly. JAX-RS and CXF properly support these formats (MediaType.APPLICATION_JSON and MediaType.APPLICATION_XML can be specified for @Produces or @Consumes). Try to decide for a single format for all services if possible and NEVER mix different formats in a service.

+
+
+
+
+

REST Testing

+
+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+
+

Security

+
+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+

CSRF

+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+

JSON top-level arrays

+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-rest.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-rest.html new file mode 100644 index 00000000..5a5272bd --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-rest.html @@ -0,0 +1,622 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==REST +REST (REpresentational State Transfer) is an inter-operable protocol for services that is more lightweight than SOAP. +However, it is no real standard and can cause confusion (see REST philosophy). +Therefore we define best practices here to guide you.

+
+
+

URLs

+
+
+

URLs are not case sensitive. Hence, we follow the best practice to use only lower-case-letters-with-hyphen-to-separate-words. +For operations in REST we distinguish the following types of URLs:

+
+
+
    +
  • +

    A collection URL is build from the rest service URL by appending the name of a collection. This is typically the name of an entity. Such URL identifies the entire collection of all elements of this type. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity

    +
  • +
  • +

    An element URL is build from a collection URL by appending an element ID. It identifies a single element (entity) within the collection. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42

    +
  • +
+
+
+

To follow KISS avoid using plural forms (…​/productmanagement/v1/products vs. …​/productmanagement/v1/product/42). Always use singular forms and avoid confusions (except for the rare cases where no singular exists).

+
+
+

The REST URL scheme fits perfect for CRUD operations. +For business operations (processing, calculation, advanced search, etc.) we simply append a collection URL with the name of the business operation. +Then we can POST the input for the business operation and get the result back. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/search

+
+
+
+
+

HTTP Methods

+
+
+

The following table defines the HTTP methods (verbs) and their meaning:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Usage of HTTP methods
HTTP MethodMeaning

GET

Read data (stateless).

PUT

Create or update data.

POST

Process data.

DELETE

Delete an entity.

+
+

Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

+
+
+

For general recommendations on HTTP methods for collection and element URLs see REST@wikipedia.

+
+
+
+
+

HTTP Status Codes

+
+
+

Further we define how to use the HTTP status codes for REST services properly. In general the 4xx codes correspond to an error on the client side and the 5xx codes to an error on the server side.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Usage of HTTP status codes
HTTP CodeMeaningResponseComment

200

OK

requested result

Result of successful GET

204

No Content

none

Result of successful POST, DELETE, or PUT with empty result (void return)

400

Bad Request

error details

The HTTP request is invalid (parse error, validation failed)

401

Unauthorized

none

Authentication failed

403

Forbidden

none

Authorization failed

404

Not found

none

Either the service URL is wrong or the requested resource does not exist

500

Server Error

error code, UUID

Internal server error occurred, in case of an exception, see REST exception handling

+
+
+
+

JAX-RS

+
+
+

For implementing REST services we use the JAX-RS standard. +As payload encoding we recommend JSON bindings using Jackson. +To implement a REST service you simply add JAX-RS annotations. +Here is a simple example:

+
+
+
+
@ApplicationScoped
+@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class ImagemanagementRestService {
+
+  @Inject
+  private Imagemanagement imagemanagement;
+
+  @GET
+  @Path("/image/{id}/")
+  public ImageDto getImage(@PathParam("id") long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+}
+
+
+
+

Here we can see a REST service for the business component imagemanagement. The method getImage can be accessed via HTTP GET (see @GET) under the URL path imagemanagement/image/{id} (see @Path annotations) where {id} is the ID of the requested table and will be extracted from the URL and provided as parameter id to the method getImage. It will return its result (ImageDto) as JSON (see @Produces annotation - you can also extend RestService marker interface that defines these annotations for JSON). As you can see it delegates to the logic component imagemanagement that contains the actual business logic while the service itself only exposes this logic via HTTP. The REST service implementation is a regular CDI bean that can use dependency injection.

+
+
+ + + + + +
+ + +With JAX-RS it is important to make sure that each service method is annotated with the proper HTTP method (@GET,@POST,etc.) to avoid unnecessary debugging. So you should take care not to forget to specify one of these annotations. +
+
+
+

Service-Interface

+
+

You may also separate API and implementation in case you want to reuse the API for service-client:

+
+
+
+
@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface ImagemanagementRestService {
+
+  @GET
+  @Path("/image/{id}/")
+  ImageEto getImage(@PathParam("id") long id);
+
+}
+
+@Named("ImagemanagementRestService")
+public class ImagemanagementRestServiceImpl implements ImagemanagementRestService {
+
+  @Override
+  public ImageEto getImage(long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+
+}
+
+
+
+
+

JAX-RS Configuration

+
+

Starting from CXF 3.0.0 it is possible to enable the auto-discovery of JAX-RS roots.

+
+
+

When the JAX-RS server is instantiated, all the scanned root and provider beans (beans annotated with javax.ws.rs.Path and javax.ws.rs.ext.Provider) are configured.

+
+
+
+

REST Exception Handling

+
+

For exceptions, a service needs to have an exception facade that catches all exceptions and handles them by writing proper log messages and mapping them to a HTTP response with an corresponding HTTP status code. +For this, devon4j provides a generic solution via RestServiceExceptionFacade that you can use within your Spring applications. You need to follow the exception guide in order for it to work out of the box because the facade needs to be able to distinguish between business and technical exceptions. +To implement a generic exception facade in Quarkus, follow the Quarkus exception guide.

+
+
+

Now your service may throw exceptions, but the facade will automatically handle them for you.

+
+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+
+

Pagination details

+
+

We recommend to use spring-data repositories for database access that already comes with pagination support. +Therefore, when performing a search, you can include a Pageable object. +Here is a JSON example for it:

+
+
+
+
{ "pageSize": 20, "pageNumber": 0, "sort": [] }
+
+
+
+

By increasing the pageNumber the client can browse and page through the hits.

+
+
+

As a result you will receive a Page. +It is a container for your search results just like a Collection but additionally contains pagination information for the client. +Here is a JSON example:

+
+
+
+
{ "totalElements": 1022,
+  pageable: { "pageSize": 20, "pageNumber": 0 },
+  content: [ ... ] }
+
+
+
+

The totalElements property contains the total number of hits. +This can be used by the client to compute the total number of pages and render the pagination links accordingly. +Via the pageable property the client gets back the Pageable properties from the search request. +The actual hits for the current page are returned as array in the content property.

+
+
+
+
+
+

REST Testing

+
+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+
+

Security

+
+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+

CSRF

+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+

JSON top-level arrays

+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-scm.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-scm.html new file mode 100644 index 00000000..277e8dfb --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-scm.html @@ -0,0 +1,323 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-security.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-security.html new file mode 100644 index 00000000..4540667c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-security.html @@ -0,0 +1,454 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Security +Security is todays most important cross-cutting concern of an application and an enterprise IT-landscape. We seriously care about security and give you detailed guides to prevent pitfalls, vulnerabilities, and other disasters. While many mistakes can be avoided by following our guidelines you still have to consider security and think about it in your design and implementation. The security guide will not only automatically prevent you from any harm, but will provide you hints and best practices already used in different software products.

+
+
+

An important aspect of security is proper authentication and authorization as described in access-control. In the following we discuss about potential vulnerabilities and protection to prevent them.

+
+
+

Vulnerabilities and Protection

+
+
+

Independent from classical authentication and authorization mechanisms there are many common pitfalls that can lead to vulnerabilities and security issues in your application such as XSS, CSRF, SQL-injection, log-forging, etc. A good source of information about this is the OWASP. +We address these common threats individually in security sections of our technological guides as a concrete solution to prevent an attack typically depends on the according technology. The following table illustrates common threats and contains links to the solutions and protection-mechanisms provided by the devonfw:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Security threats and protection-mechanisms
ThreatProtectionLink to details

A1 Injection

validate input, escape output, use proper frameworks

SQL Injection

A2 Broken Authentication

encrypt all channels, use a central identity management with strong password-policy

Authentication

A3 Sensitive Data Exposure

Use secured exception facade, design your data model accordingly

REST exception handling

A4 XML External Entities

Prefer JSON over XML, ensure FSP when parsing (external) XML

XML guide

A5 Broken Access Control

Ensure proper authorization for all use-cases, use @DenyAll as default to enforce

Access-control guide especially method authorization

A6 Security Misconfiguration

Use devon4j application template and guides to avoid

tutorial-newapp and sensitive configuration

A7 Cross-Site Scripting

prevent injection (see A1) for HTML, JavaScript and CSS and understand same-origin-policy

client-layer

A8 Insecure Deserialization

Use simple and established serialization formats such as JSON, prevent generic deserialization (for polymorphic types)

JSON guide especially inheritence, XML guide

A9 Using Components with Known Vulnerabilities

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

CVE newsletter and dependency check

A10 Insufficient_Logging & Monitoring

Ensure to log all security related events (login, logout, errors), establish effective monitoring

Logging guide and monitoring guide

Insecure Direct Object References

Using direct object references (IDs) only with appropriate authorization

logic-layer

Cross-Site Request Forgery (CSRF)

secure mutable service operations with an explicit CSRF security token sent in HTTP header and verified on the server

CSRF guide

Log-Forging

Escape newlines in log messages

logging security

Unvalidated Redirects and Forwards

Avoid using redirects and forwards, in case you need them do a security audit on the solution.

devonfw proposes to use rich-clients (SPA/RIA). We only use redirects for login in a safe way.

+
+
+
+

Advanced Security

+
+
+

While OWASP Top 10 covers the basic aspects of application security, there are advanced standards such as AVS. +In devonfw we address this in the +Application Security Quick Solution Guide.

+
+
+
+
+

Tools

+
+
+

Dependency Check

+
+

To address the thread Using Components with Known Vulnerabilities we recomment to use OWASP dependency check that ships with a maven plugin and can analyze your dependencies for known CVEs. +In order to run this check, you can simply call this command on any maven project:

+
+
+
+
mvn org.owasp:dependency-check-maven:6.1.5:aggregate
+
+
+
+ + + + + +
+ + +The version is just for completeness. You should check yourself for using a recent version of the plugin. +
+
+
+

If you build an devon4j spring application from our app-template you can activate the dependency check even easier with the security profile:

+
+
+
+
mvn clean install -P security
+
+
+
+

This does not run by default as it causes some overhead for the build performance. However, consider to build this in your CI at least nightly. +After the dependency check is performed, you will find the results in target/dependency-check-report.html of each module. The report will also be generated when the site is build (mvn site) even without the profile.

+
+
+
+

Penetration Testing

+
+

For penetration testing (testing for vulnerabilities) of your web application, we recommend the following tools:

+
+
+ +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-service-client.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-service-client.html new file mode 100644 index 00000000..4d2e4eae --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-service-client.html @@ -0,0 +1,303 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Service Client

+
+
+

This guide is about consuming (calling) services from other applications (micro-services). For providing services, see the Service-Layer Guide. Services can be consumed by the client or the server. As the client is typically not written in Java, you should consult the according guide for your client technology. In case you want to call a service within your Java code, this guide is the right place to get help.

+
+
+

Motivation

+
+
+

Various solutions already exist for calling services, such as RestTemplate from spring or the JAX-RS client API. Furthermore, each and every service framework offers its own API as well. These solutions might be suitable for very small and simple projects (with one or two such invocations). However, with the trend of microservices, the invocation of a service becomes a very common use-case that occurs all over the place. You typically need a solution that is very easy to use but supports flexible configuration, adding headers for authentication, mapping of errors from the server, logging success/errors with duration for performance analysis, support for synchronous and asynchronous invocations, etc. This is exactly what this devon4j service-client solution brings to you.

+
+
+
+
+

Usage

+
+
+

Spring

+
+
+

For Spring, follow the Spring rest-client guide.

+
+
+

Quarkus

+
+
+

For Quarkus, we recommend to follow the official Quarkus rest-client guide

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-service-layer.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-service-layer.html new file mode 100644 index 00000000..f24c9b5e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-service-layer.html @@ -0,0 +1,407 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Service Layer

+
+
+

The service layer is responsible for exposing functionality made available by the logical layer to external consumers over a network via technical protocols.

+
+
+

Types of Services

+
+
+

Before you start creating your services you should consider some general design aspects:

+
+
+
    +
  • +

    Do you want to create a RPC service?

    +
  • +
  • +

    Or is your problem better addressed by messaging or eventing?

    +
  • +
  • +

    Who will consume your service?

    +
    +
      +
    • +

      Do you have one or multiple consumers?

      +
    • +
    • +

      Do web-browsers have to use your service?

      +
    • +
    • +

      Will apps from other vendors or parties have to consume your service that you can not influence if the service may have to change or be extended?

      +
    • +
    +
    +
  • +
+
+
+

For RPC a common choice is REST but there are also interesting alternatives like gRPC. We also have a guide for SOAP but this technology should rather be considered as legacy and is not recommended for new services.

+
+
+

When it comes to messaging in Java the typical answer will be JMS. However, a very promising alternative is Kafka.

+
+
+
+
+

Versioning

+
+
+

For RPC services consumed by other applications we use versioning to prevent incompatibilities between applications when deploying updates. This is done by the following conventions:

+
+
+
    +
  • +

    We define a version number and prefix it with v (e.g. v1).

    +
  • +
  • +

    If we support previous versions we use that version numbers as part of the Java package defining the service API (e.g. com.foo.application.component.service.api.v1)

    +
  • +
  • +

    We use the version number as part of the service name in the remote URL (e.g. https://application.foo.com/services/rest/component/v1/resource)

    +
  • +
  • +

    Whenever breaking changes are made to the API, create a separate version of the service and increment the version (e.g. v1v2) . The implementations of the different versions of the service contain compatibility code and delegate to the same unversioned use-case of the logic layer whenever possible.

    +
  • +
  • +

    For maintenance and simplicity, avoid keeping more than one previous version.

    +
  • +
+
+
+
+
+

Interoperability

+
+
+

For services that are consumed by clients with different technology, interoperability is required. This is addressed by selecting the right protocol, following protocol-specific best practices and following our considerations especially simplicity.

+
+
+
+
+

Service Considerations

+
+
+

The term service is quite generic and therefore easily misunderstood. It is a unit exposing coherent functionality via a well-defined interface over a network. For the design of a service, we consider the following aspects:

+
+
+
    +
  • +

    self-contained
    +The entire API of the service shall be self-contained and have no dependencies on other parts of the application (other services, implementations, etc.).

    +
  • +
  • +

    idempotence
    +E.g. creation of the same master-data entity has no effect (no error)

    +
  • +
  • +

    loosely coupled
    +Service consumers have minimum knowledge and dependencies on the service provider.

    +
  • +
  • +

    normalized
    +Complete, no redundancy, minimal

    +
  • +
  • +

    coarse-grained
    +Service provides rather large operations (save entire entity or set of entities rather than individual attributes)

    +
  • +
  • +

    atomic
    +Process individual entities (for processing large sets of data, use a batch instead of a service)

    +
  • +
  • +

    simplicity
    +Avoid polymorphism, RPC methods with unique name per signature and no overloading, avoid attachments (consider separate download service), etc.

    +
  • +
+
+
+
+
+

Security

+
+
+

Your services are the major entry point to your application. Hence, security considerations are important here.

+
+
+

See REST Security.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-service-versioning.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-service-versioning.html new file mode 100644 index 00000000..3d04df04 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-service-versioning.html @@ -0,0 +1,565 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Service-Versioning

+
+
+

This guide describes the aspect and details about versioning of services

+
+
+

Motivation

+
+
+

Why versioning of services? First of all, you should only care about this topic if you really have to. Service versioning is complex and requires effort (time and budget). The best way to avoid this is to be smart in the first place when designing the service API. +Further, if you are creating services where the only consumer is e.g. the web-client that you deploy together with the consumed services then you can change your service without the overhead to create new service versions and keeping old service versions for compatibility.

+
+
+

However, if the following indicators are given you typically need to do service versioning:

+
+
+
    +
  • +

    Your service is part of a complex and distributed IT landscape

    +
  • +
  • +

    Your service requires incompatible changes

    +
  • +
  • +

    There are many consumers or there is at least one (relevant) consumer that can not be updated at the same time or is entirely out of control (unknown or totally different party/company)

    +
  • +
+
+
+

What are incompatible changes?

+
+
+
    +
  • +

    Almost any change when SOAP is used (as it changes the WSDL and breaks the contract). Therefore, we recommend to use REST instead. Then, only the following changes are critical.

    +
  • +
  • +

    A change where existing properties (attributes) have to change their name

    +
  • +
  • +

    A change where existing features (properties, operations, etc.) have to change their semantics (meaning)

    +
  • +
+
+
+

What changes do not cause incompatibilities?

+
+
+
    +
  • +

    Adding new service operations is entirely uncritical with REST.

    +
  • +
  • +

    Adding new properties is only a problem in the following cases:

    +
    +
      +
    • +

      Adding new mandatory properties to the input of a service is causing incompatibilities. This problem can be avoided by contract-design.

      +
    • +
    • +

      If a consumer is using a service to read data, modify it and then save it back via a service and a property is added to the data, then this property might be lost. This is not a problem with dynamic languages such as JavaScript/TypeScript but with strictly typed languages such as Java. In Java you will typically use structured typed transfer-objects (and not Map<String, Object>) so new properties that have been added but are not known to the consumer can not be mapped to the transfer-object and will be lost. When saving that transfer-object later the property will be gone. It might be impossible to determine the difference between a lost property and a property that was removed on purpose. This is a general problem that you need to be aware of and that you have to consider by your design in such situations.

      +
    • +
    +
    +
  • +
+
+
+

Even if you hit an indicator for incompatible changes you can still think about adding a new service operation instead of changing an existing one (and deprecating the old one). Be creative to simplify and avoid extra effort.

+
+
+
+
+

Procedure

+
+
+

The procedure when rolling out incompatible changes is illustrated by the following example:

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +---+----+
+        |
++-------+--------+
+|      Sv1       |
+|                |
+|      App3      |
++----------------+
+
+
+
+

So, here we see a simple example where App3 provides a Service S in Version v1 that is consumed both by App1 and App2.

+
+
+

Now for some reason the service S has to be changed in an incompatible way to make it future-proof for demands. However, upgrading all 3 applications at the same time is not possible in this case for whatever reason. Therefore, service versioning is applied for the changes of S.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+    |
++---+------------+
+|  Sv1  |  Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Now, App3 has been upgraded and the new release was deployed. A new version v2 of S has been added while v1 is still kept for compatibility reasons and that version is still used by App1 and App2.

+
+
+
+
+------+  +------+
+| App1 |  | App2*|
++---+--+  +--+---+
+    |        |
+    |        |
+    |        |
++---+--------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, App2 has been updated and deployed and it is using the new version v2 of S.

+
+
+
+
+------+  +------+
+| App1*|  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, also App1 has been updated and deployed and it is using the new version v2 of S. The version v1 of S is not used anymore. This can be verified via logging and monitoring.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|          Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Finally, version v1 of the service S was removed from App3 and the new release has been deployed.

+
+
+
+
+

Versioning Schema

+
+
+

In general anything can be used to differentiate versions of a service. Possibilities are:

+
+
+
    +
  • +

    Code names (e.g. Strawberry, Blueberry, Grapefruit)

    +
  • +
  • +

    Timestamps (YYYYMMDD-HHmmSS)

    +
  • +
  • +

    Sequential version numbers (e.g. v1, v2, v3)

    +
  • +
  • +

    Composed version numbers (e.g. 1.0.48-pre-alpha-3-20171231-235959-Strawberry)

    +
  • +
+
+
+

As we are following the KISS principle (see key principles) we propose to use sequential version numbers. These are short, clear, and easy while still allowing to see what version is after another one. Especially composed version numbers (even 1.1 vs. 2.0) lead to decisions and discussions that easily waste more time than adding value. It is still very easy to maintain an Excel sheet or release-notes document that is explaining the changes for each version (v1, v2, v3) of a particular service.

+
+
+

We suggest to always add the version schema to the service URL to be prepared for service versioning even if service versioning is not (yet) actively used. For simplicity it is explicitly stated that you may even do incompatible changes to the current version (typically v1) of your service if you can update the according consumers within the same deployment.

+
+
+
+
+

Practice

+
+
+

So assuming you know that you have to do service versioning, the question is how to do it practically in the code. +The approach for your devon4j project in case of code-first should be as described below:

+
+
+
    +
  • +

    Determine which types in the code need to be changed. It is likely to be the API and implementation of the according service but it may also impact transfer objects and potentially even datatypes.

    +
  • +
  • +

    Create new packages for all these concerned types containing the current version number (e.g. v1).

    +
  • +
  • +

    Copy all these types to that new packages.

    +
  • +
  • +

    Rename these copies so they carry the version number as suffix (e.g. V1).

    +
  • +
  • +

    Increase the version of the service in the unversioned package (e.g. from v1 to v2).

    +
  • +
  • +

    Now you have two versions of the same service (e.g. v1 and v2) but so far they behave exactly the same.

    +
  • +
  • +

    You start with your actual changes and modify the original files that have been copied before.

    +
  • +
  • +

    You will also ensure the links (import statements) of the copied types point to the copies with the version number

    +
  • +
  • +

    This will cause incompatibilities (and compile errors) in the copied service. Therefore, you need to fix that service implementation to map from the old API to the new API and behavior. In some cases, this may be easy (e.g. mapping x.y.z.v1.FooTo to x.y.z.FooTo using bean-mapping with some custom mapping for the incompatible changes), in other cases this can get very complex. Be aware of this complexity from the start before you make your decision about service versioning.

    +
  • +
  • +

    As far as possible this mapping should be done in the service-layer, not to pollute your business code in the core-layer with versioning-aspects. If there is no way to handle it in the service layer, e.g. you need some data from the persistence-layer, implement the "mapping" in the core-layer then, but don’t forget to remove this code, when removing the old service version.

    +
  • +
  • +

    Finally, ensure that both the old service behaves as before as well as the new service works as planned.

    +
  • +
+
+
+

Modularization

+
+

For modularization, we also follow the KISS principle (see key principles): +we suggest to have one api module per application that will contain the most recent version of your service and get released with every release-version of the application. The compatibility code with the versioned packages will be added to the core module and therefore is not exposed via the api module (because it has already been exposed in the previous release of the app). This way, you can always determine for sure which version of a service is used by another application just by its maven dependencies.

+
+
+

The KISS approach with only a single module that may contain multiple services (e.g. one for each business component) will cause problems when you want to have mixed usages of service versions: You can not use an old version of one service and a new version of another service from the same APP as then you would need to have its API module twice as a dependency on different versions, which is not possible. However, to avoid complicated overhead we always suggest to follow this easy approach. Only if you come to the point that you really need this complexity you can still solve it (even afterwards by publishing another maven artefact). As we are all on our way to build more but smaller applications (SOA, microservices, etc.) we should always start simple and only add complexity when really needed.

+
+
+

The following example gives an idea of the structure:

+
+
+
+
/«my-app»
+├──/api
+|  └──/src/main/java/
+|     └──/«rootpackage»/«application»/«component»
+|        ├──/common/api/to
+|        |  └──FooTo
+|        └──/service/api/rest
+|           └──FooRestService
+└──/core
+   └──/src/main/java/
+      └──«rootpackage»/«application»/«component»
+         ├──/common/api/to/v1
+         |  └──FooToV1
+         └──/service
+            ├──/api/rest/v1
+            |  └──FooRestServiceV1
+            └──impl/rest
+               ├──/v1
+               |  └── FooRestServiceImplV1
+               └──FooRestServiceImpl
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-soap.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-soap.html new file mode 100644 index 00000000..2cbd8528 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-soap.html @@ -0,0 +1,362 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==SOAP +SOAP is a common protocol for services that is rather complex and heavy. It allows to build inter-operable and well specified services (see WSDL). SOAP is transport neutral what is not only an advantage. We strongly recommend to use HTTPS transport and ignore additional complex standards like WS-Security and use established HTTP-Standards such as RFC2617 (and RFC5280).

+
+
+

JAX-WS

+
+
+

For building web-services with Java we use the JAX-WS standard. +There are two approaches:

+
+
+
    +
  • +

    code first

    +
  • +
  • +

    contract first

    +
  • +
+
+
+

Here is an example in case you define a code-first service.

+
+
+

Web-Service Interface

+
+

We define a regular interface to define the API of the service and annotate it with JAX-WS annotations:

+
+
+
+
@WebService
+public interface TablemanagmentWebService {
+
+  @WebMethod
+  @WebResult(name = "message")
+  TableEto getTable(@WebParam(name = "id") String id);
+
+}
+
+
+
+
+

Web-Service Implementation

+
+

And here is a simple implementation of the service:

+
+
+
+
@Named
+@WebService(endpointInterface = "com.devonfw.application.mtsj.tablemanagement.service.api.ws.TablemanagmentWebService")
+public class TablemanagementWebServiceImpl implements TablemanagmentWebService {
+
+  private Tablemanagement tableManagement;
+
+  @Override
+  public TableEto getTable(String id) {
+
+    return this.tableManagement.findTable(id);
+  }
+
+
+
+
+
+
+

SOAP Custom Mapping

+
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to write adapters for JAXB (see XML).

+
+
+
+
+

SOAP Testing

+
+
+

For testing SOAP services in general consult the testing guide.

+
+
+

For testing SOAP services manually we strongly recommend SoapUI.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-sql.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-sql.html new file mode 100644 index 00000000..327ac9a5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-sql.html @@ -0,0 +1,415 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==SQL

+
+
+

For general guides on dealing or avoiding SQL, preventing SQL-injection, etc. you should study domain layer.

+
+
+

Naming Conventions

+
+
+

Here we define naming conventions that you should follow whenever you write SQL files:

+
+
+
    +
  • +

    All SQL-Keywords in UPPER CASE

    +
  • +
  • +

    Indentation should be 2 spaces as suggested by devonfw for every format.

    +
  • +
+
+
+

DDL

+
+

The naming conventions for database constructs (tables, columns, triggers, constraints, etc.) should be aligned with your database product and their operators. +However, when you have the freedom of choice and a modern case-sensitive database, you can simply use your code conventions also for database constructs to avoid explicitly mapping each and every property (e.g. RestaurantTable vs. RESTAURANT_TABLE).

+
+
+
    +
  • +

    Define columns and constraints inline in the statement to create the table

    +
  • +
  • +

    Indent column types so they all start in the same text column

    +
  • +
  • +

    Constraints should be named explicitly (to get a reasonable hint error messages) with:

    +
    +
      +
    • +

      PK_«table» for primary key (name optional here as PK constraint are fundamental)

      +
    • +
    • +

      FK_«table»_«property» for foreign keys («table» and «property» are both on the source where the foreign key is defined)

      +
    • +
    • +

      UC_«table»_«property»[_«propertyN»]* for unique constraints

      +
    • +
    • +

      CK_«table»_«check» for check constraints («check» describes the check, if it is defined on a single property it should start with the property).

      +
    • +
    +
    +
  • +
  • +

    Old RDBMS had hard limitations for names (e.g. 30 characters). Please note that recent databases have overcome this very low length limitations. However, keep your names short but precise and try to define common abbreviations in your project for according (business) terms. Especially do not just truncate the names at the limit.

    +
  • +
  • +

    If possible add comments on table and columns to help DBAs understanding your schema. This is also honored by many tools (not only DBA-tools).

    +
  • +
+
+
+

Here is a brief example of a DDL:

+
+
+
+
CREATE SEQUENCE HIBERNATE_SEQUENCE START WITH 1000000;
+
+-- *** Table ***
+CREATE TABLE RESTAURANT_TABLE (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  SEATS                INTEGER NOT NULL,
+  CONSTRAINT PK_TABLE PRIMARY KEY(ID)
+);
+COMMENT ON TABLE RESTAURANT_TABLE IS 'The physical tables inside the restaurant.';
+-- *** Order ***
+CREATE TABLE RESTAURANT_ORDER (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  TABLE_ID             NUMBER(19) NOT NULL,
+  TOTAL                DECIMAL(5, 2) NOT NULL,
+  CREATION_DATE        TIMESTAMP NOT NULL,
+  PAYMENT_DATE         TIMESTAMP,
+  STATUS               VARCHAR2(10 CHAR) NOT NULL,
+  CONSTRAINT PK_ORDER PRIMARY KEY(ID),
+  CONSTRAINT FK_ORDER_TABLE_ID FOREIGN KEY(TABLE_ID) REFERENCES RESTAURANT_TABLE(ID)
+);
+COMMENT ON TABLE RESTAURANT_ORDER IS 'An order and bill at the restaurant.';
+...
+
+
+
+

ATTENTION: Please note that TABLE and ORDER are reserved keywords in SQL and you should avoid using such keywords to prevent problems.

+
+
+
+

Data

+
+

For insert, update, delete, etc. of data SQL scripts should additionally follow these guidelines:

+
+
+
    +
  • +

    Inserts always with the same order of columns in blocks for each table.

    +
  • +
  • +

    Insert column values always starting with ID, MODIFICATION_COUNTER, [DTYPE, ] …​

    +
  • +
  • +

    List columns with fixed length values (boolean, number, enums, etc.) before columns with free text to support alignment of multiple insert statements

    +
  • +
  • +

    Pro Tip: Get familiar with column mode of advanced editors such as notepad++ when editing large blocks of similar insert statements.

    +
  • +
+
+
+
+
INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (0, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (1, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (2, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (3, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (4, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (5, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (6, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (7, 1, 8);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (8, 1, 8);
+...
+
+
+
+

See also Database Migrations.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-structure-classic.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-structure-classic.html new file mode 100644 index 00000000..86fddb25 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-structure-classic.html @@ -0,0 +1,482 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Classic project structure

+
+
+

In this section we describe the classic project structure as initially proposed for Java in devonfw. +It is still valid and fully supported. +However, if you want to start a new project, please consider using the modern structure.

+
+
+

Modules

+
+
+

The structure of a devon4j application is divided into the following modules:

+
+
+
    +
  • +

    api: module containing the API of your application. The API contains the required artifacts to interact with your application via remote services. This can be REST service interfaces, transfer-objects with their interfaces and datatypes but also OpenAPI or gRPC contracts.

    +
  • +
  • +

    core: maven module containing the core of the application with service implementation, as well as entire logic layer and dataaccess layer.

    +
  • +
  • +

    batch: optional module for batch layer

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) typically as a bootified WAR file.

    +
  • +
+
+
+
+
+

Deployment

+
+
+
+
+

Make jar not war

+
+
+
+— Josh Long +
+
+
+

First of all it is important to understand that the above defined modules aim to make api, core, and batch reusable artifacts, that can be used as a regular maven dependency. +On the other hand to build and deploy your application you want a final artifact that is containing all required 3rd party libraries. +This artifact is not reusable as a maven dependency. +That is exactly the purpose of the server module to build and package this final deployment artifact. +By default we first build a regular WAR file with maven in your server/target directory (*-server-«version».war) and in a second step create a bootified WAR out of this (*-server-bootified.war). +The bootified WAR file can then be started standalone (java -jar «filename».war). +However, it is also possible to deploy the same WAR file to a servlet container like tomcat or jetty. +As application servers and externally provided servlet containers are not recommendet anymore for various reasons (see JEE), you may also want to create a bootified JAR file instead. +All you need to do in that case is to change the packaging in your server/pom.xml from war to jar.

+
+
+
+
+

Package Structure

+
+
+

The package structure of your code inside src/main/java (and src/test/java) of your modules is described in our coding conventions in the sections packages. A full mapping of the architecture and the different code elements to the packaging is described in the following section.

+
+
+
+
+

Layers

+
+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +The following table describes our classic approach for packaging and layering:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Traditional generic devon4j layers
Layer«layer»

service

service

logic

logic

data-access

dataaccess

batch (optional)

batch

client (optional)

client

common

common

+
+
+
+

Architecture Mapping

+
+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.common
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.datatype
+|  |  |  |  └──.«Datatype» (api)
+|  |  |  └──.«BusinessObject» (api)
+|  |  └──.impl[.«detail»]
+|  |     ├──.«Aspect»ConfigProperties (core)
+|  |     ├──.«Datatype»JsonSerializer (core)
+|  |     └──.«Datatype»JsonDeserializer (core)
+|  ├──.dataaccess
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.repo
+|  |  |  |  └──.«BusinessObject»Repository (core)
+|  |  |  ├──.dao (core) [alternative to repo]
+|  |  |  |  └──.«BusinessObject»Dao (core) [alternative to Repository]
+|  |  |  └──.«BusinessObject»Entity (core)
+|  |  └──.impl[.«detail»]
+|  |     ├──.dao (core) [alternative to repo]
+|  |     |  └──.«BusinessObject»DaoImpl (core) [alternative to Repository]
+|  |     └──.«Datatype»AttributeConverter (core)
+|  ├──.logic
+|  |  ├──.api
+|  |  |  ├──.[«detail».]to
+|  |  |  |   ├──.«MyCustom»«To (api)
+|  |  |  |   ├──.«DataStructure»Embeddable (api)
+|  |  |  |   ├──.«BusinessObject»Eto (api)
+|  |  |  |   └──.«BusinessObject»«Subset»Cto (api)
+|  |  |  ├──.[«detail».]usecase
+|  |  |  |   ├──.UcFind«BusinessObject» (core)
+|  |  |  |   ├──.UcManage«BusinessObject» (core)
+|  |  |  |   └──.Uc«Operation»«BusinessObject» (core)
+|  |  |  └──.«Component» (core)
+|  |  ├──.base
+|  |  |  └──.[«detail».]usecase
+|  |  |     └──.Abstract«BusinessObject»Uc (core)
+|  |  └──.impl
+|  |     ├──.[«detail».]usecase
+|  |     |   ├──.UcFind«BusinessObject»Impl (core)
+|  |     |   ├──.UcManage«BusinessObject»Impl (core)
+|  |     |   └──.Uc«Operation»«BusinessObject»Impl (core)
+|  |     └──.«Component»Impl (core)
+|  └──.service
+|     ├──.api[.«detail»]
+|     |  ├──.rest
+|     |  |  └──.«Component»RestService (api)
+|     |  └──.ws
+|     |     └──.«Component»WebService (api)
+|     └──.impl[.«detail»]
+|        ├──.jms
+|        |  └──.«BusinessObject»JmsListener (core)
+|        ├──.rest
+|        |  └──.«Component»RestServiceImpl (core)
+|        └──.ws
+|           └──.«Component»WebServiceImpl (core)
+├──.general
+│  ├──.common
+│  |  ├──.api
+|  |  |  ├──.to
+|  |  |  |  ├──.AbstractSearchCriteriaTo (api)
+|  |  |  └──.ApplicationEntity
+│  |  ├──.base
+|  |  |  └──.AbstractBeanMapperSupport (core)
+│  |  └──.impl
+│  |     ├──.config
+│  |     |  └──.ApplicationObjectMapperFactory (core)
+│  |     └──.security
+│  |        └──.ApplicationWebSecurityConfig (core)
+│  ├──.dataaccess
+│  |  └──.api
+|  |     └──.ApplicationPersistenceEntity (core)
+│  ├──.logic
+│  |  └──.base
+|  |     ├──.AbstractComponentFacade (core)
+|  |     ├──.AbstractLogic (core)
+|  |     └──.AbstractUc (core)
+|  └──.service
+|     └──...
+└──.SpringBootApp (core)
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-structure-modern.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-structure-modern.html new file mode 100644 index 00000000..0155b5e7 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-structure-modern.html @@ -0,0 +1,403 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Modern project structure

+
+
+

With trends such as cloud, microservices, lean, and agile, we decided for a more modern project structure that fits better to recent market trends. +When starting new projects with devonfw, and especially in the context of cloud-native development, we strongly recommend this modern approach over the classic structure.

+
+
+

Modules

+
+
+

Due to trends such as microservices, we are building smaller apps compared to moduliths. +For simplicity, we therefore do not split our app into different modules and keep everything top-level and easy.

+
+
+

In addition to java and resources, we also add helm for helm templates and docker for docker scripts (e.g. Dockerfile) in src/main:

+
+
+
+
├──/src
+|  ├──/main
+|  |  ├──/docker
+|  |  ├──/helm
+|  |  ├──/java
+|  |  └──/resources
+|  └──/test
+|     ├──/java
+|     └──/resources
+└──/pom.xml
+
+
+
+
+
+

Deployment

+
+
+

For modern projects, we strongly recommend that your build process generates the final deliverable as an OCI compliant container. +Further, to go fully cloud-native, you should build your app as a native image via GraalVM AOT compiler. +Therefore, we recommed to use quarkus as your main framework. +In case you want to go with spring, you may consider using spring-native.

+
+
+
+
+

Layers

+
+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +For the modern project structure, the layers are defined by the following table:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Layer«layer»Description

service

service

The service layer exposing functionality via its remote API. Typical protocol is REST. May also be any other protocol you are using such as gRPC.

domain

domain

The domain with the data-model and DB access. Use sub-package (in «detail») repository for repository and dao for DAOs. Also we recommend to put entities in model sub-package.

logic

logic

The logic layer with the functionallity providing the business value.

common

common

cross-cutting code not assigned to a technical layer.

+
+
+
+

Architecture Mapping

+
+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.domain
+|  |  ├──.repo
+|  |  |  ├──.«BusinessObject»Repository
+|  |  |  ├──.«BusinessObject»Fragment
+|  |  |  └──.«BusinessObject»FragmentImpl
+|  |  ├──.dao [alternative to repo]
+|  |  |  ├──.«BusinessObject»Dao
+|  |  |  └──.«BusinessObject»DaoImpl
+|  |  └──.model
+|  |     └──.«BusinessObject»Entity
+|  ├──.logic
+|  |  ├──«BusinessObject»Validator
+|  |  └──«BusinessObject»EventsEmitter
+|   |  └──.Uc«Operation»«BusinessObject»[Impl]
+|  └──.rest
+|     └──.v1
+|        ├──.«Component»RestService
+|        ├──.mapper
+|        |     └──.«BusinessObject»Mapper
+|        └──.model
+|           └──.«BusinessObject»Dto
+└──.general
+   └──.domain
+      └──.model
+         └──.ApplicationPersistenceEntity
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-structure.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-structure.html new file mode 100644 index 00000000..f3a9af88 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-structure.html @@ -0,0 +1,281 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Project structure

+
+
+

In devonfw we want to give clear structure and guidance for building applications. +This also allows tools such as CobiGen or sonar-devon4j-plugin to "understand" the code. +Also this helps developers going from one devonfw project to the next one to quickly understand the code-base. +If every developer knows where to find what, the project gets more efficient. +A long time ago maven standardized the project structure with src/main/java, etc. and turned chaos into structure. +With devonfw we experienced the same for the codebase (what is inside src/main/java).

+
+
+

We initially started devon4j based on spring and spring-boot and proposed a classic project structure. +With modern cloud-native trends we added a modern project structure, that is more lean and up-to-date with the latest market trends.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-testing-snapshots.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-testing-snapshots.html new file mode 100644 index 00000000..71327768 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-testing-snapshots.html @@ -0,0 +1,311 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Testing devon4j SNAPSHOT releases

+
+
+

Whenever a story in devon4j is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here.

+
+
+

In order to test a SNAPSHOT release in your project e.g. to give feedback if a bugfix or feature is working as you expect, you can do the following:

+
+
+
    +
  1. +

    Add the repository https://oss.sonatype.org/content/repositories/snapshots/. +If you are using a recent devonfw-ide simply edit the file $DEVON_IDE_HOME/conf/.m2/settings.xml and activate the devonfw-snapshots profile by changing activeByDefault to true. +All details can be found here.

    +
  2. +
  3. +

    Edit your toplevel pom.xml file and change the devon4j.version to the most recent SNAPSHOT version. To figure out the latest SNAPSHOT version of devon4j, check the the maven.config or the CHANGELOG.

    +
  4. +
  5. +

    Test your appliation and see if the latest SNAPSHOT release fixes your issues, does not break your app and works as expected.

    +
  6. +
  7. +

    Give us feedback. We love to hear your feedback:

    +
    +
      +
    • +

      If and issue is not fixed as expected, comment the according issue on github.

      +
    • +
    • +

      If something broke or does not work as expected, please file a new issue and provide details (stacktrace, error log, etc.) but no confidentail data (passwords, customer details, etc.).

      +
    • +
    • +

      If your test succeeded with the latest SNAPSHOT please also give confirming feedback to bug or feature tickets to let us know.

      +
    • +
    +
    +
  8. +
+
+
+

Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-testing.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-testing.html new file mode 100644 index 00000000..97ad800f --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-testing.html @@ -0,0 +1,859 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Testing

+
+
+

General best practices

+
+
+

For testing please follow our general best practices:

+
+
+
    +
  • +

    Tests should have a clear goal that should also be documented.

    +
  • +
  • +

    Tests have to be classified into different integration levels.

    +
  • +
  • +

    Tests should follow a clear naming convention.

    +
  • +
  • +

    Automated tests need to properly assert the result of the tested operation(s) in a reliable way. E.g. avoid stuff like assertThat(service.getAllEntities()).hasSize(42) or even worse tests that have no assertion at all.

    +
  • +
  • +

    Tests need to be independent of each other. Never write test-cases or tests (in Java @Test methods) that depend on another test to be executed before.

    +
  • +
  • +

    Use AssertJ to write good readable and maintainable tests that also provide valuable feedback in case a test fails. Do not use legacy JUnit methods like assertEquals anymore!

    +
  • +
  • +

    For easy understanding divide your test in three commented sections:

    +
    +
      +
    • +

      //given

      +
    • +
    • +

      //when

      +
    • +
    • +

      //then

      +
    • +
    +
    +
  • +
  • +

    Plan your tests and test data management properly before implementing.

    +
  • +
  • +

    Instead of having a too strong focus on test coverage better ensure you have covered your critical core functionality properly and review the code including tests.

    +
  • +
  • +

    Test code shall NOT be seen as second class code. You shall consider design, architecture and code-style also for your test code but do not over-engineer it.

    +
  • +
  • +

    Test automation is good but should be considered in relation to cost per use. Creating full coverage via automated system tests can cause a massive amount of test-code that can turn out as a huge maintenance hell. Always consider all aspects including product life-cycle, criticality of use-cases to test, and variability of the aspect to test (e.g. UI, test-data).

    +
  • +
  • +

    Use continuous integration and establish that the entire team wants to have clean builds and running tests.

    +
  • +
  • +

    Prefer delegation over inheritance for cross-cutting testing functionality. Good places to put this kind of code can be realized and reused via the JUnit @Rule mechanism.

    +
  • +
+
+
+
+
+

Test Automation Technology Stack

+
+
+

For test automation we use JUnit. However, we are strictly doing all assertions with AssertJ. For mocking we use Mockito. +In order to mock remote connections we use WireMock.

+
+
+

For testing entire components or sub-systems we recommend to use for Spring stack spring-boot-starter-test as lightweight and fast testing infrastructure that is already shipped with devon4j-test. For Quarkus, you can add the necessary extensions manually such as quarkus-junit5, quarkus-junit5-mockito, assertj-core etc.

+
+
+

In case you have to use a full blown JEE application server, we recommend to use arquillian. To get started with arquillian, look here.

+
+
+
+
+

Test Doubles

+
+
+

We use test doubles as generic term for mocks, stubs, fakes, dummies, or spys to avoid confusion. Here is a short summary from stubs VS mocks:

+
+
+
    +
  • +

    Dummy objects specifying no logic at all. May declare data in a POJO style to be used as boiler plate code to parameter lists or even influence the control flow towards the test’s needs.

    +
  • +
  • +

    Fake objects actually have working implementations, but usually take some shortcut which makes them not suitable for production (an in memory database is a good example).

    +
  • +
  • +

    Stubs provide canned answers to calls made during the test, usually not responding at all to anything outside what’s programmed in for the test. Stubs may also record information about calls, such as an email gateway stub that remembers the messages it 'sent', or maybe only how many messages it 'sent'.

    +
  • +
  • +

    Mocks are objects pre-programmed with expectations, which form a specification of the calls they are expected to receive.

    +
  • +
+
+
+

We try to give some examples, which should make it somehow clearer:

+
+
+

Stubs

+
+

Best Practices for applications:

+
+
+
    +
  • +

    A good way to replace small to medium large boundary systems, whose impact (e.g. latency) should be ignored during load and performance tests of the application under development.

    +
  • +
  • +

    As stub implementation will rely on state-based verification, there is the threat, that test developers will partially reimplement the state transitions based on the replaced code. This will immediately lead to a black maintenance whole, so better use mocks to assure the certain behavior on interface level.

    +
  • +
  • +

    Do NOT use stubs as basis of a large amount of test cases as due to state-based verification of stubs, test developers will enrich the stub implementation to become a large monster with its own hunger after maintenance efforts.

    +
  • +
+
+
+
+

Mocks

+
+

Best Practices for applications:

+
+
+
    +
  • +

    Replace not-needed dependencies of your system-under-test (SUT) to minimize the application context to start of your component framework.

    +
  • +
  • +

    Replace dependencies of your SUT to impact the control flow under test without establishing all the context parameters needed to match the control flow.

    +
  • +
  • +

    Remember: Not everything has to be mocked! Especially on lower levels of tests like isolated module tests you can be betrayed into a mocking delusion, where you end up in a hundred lines of code mocking the whole context and five lines executing the test and verifying the mocks behavior. Always keep in mind the benefit-cost ratio, when implementing tests using mocks.

    +
  • +
+
+
+
+

WireMock

+
+

If you need to mock remote connections such as HTTP-Servers, WireMock offers easy to use functionality. For a full description see the homepage or the github repository. Wiremock can be used either as a JUnit Rule, in Java outside of JUnit or as a standalone process. The mocked server can be configured to respond to specific requests in a given way via a fluent Java API, JSON files and JSON over HTTP. An example as an integration to JUnit can look as follows.

+
+
+
+
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+
+public class WireMockOfferImport{
+
+  @Rule
+  public WireMockRule mockServer = new WireMockRule(wireMockConfig().dynamicPort());
+
+  @Test
+  public void requestDataTest() throws Exception {
+  int port = this.mockServer.port();
+  ...}
+
+
+
+

This creates a server on a randomly chosen free port on the running machine. You can also specify the port to be used if wanted. Other than that there are several options to further configure the server. This includes HTTPs, proxy settings, file locations, logging and extensions.

+
+
+
+
  @Test
+  public void requestDataTest() throws Exception {
+      this.mockServer.stubFor(get(urlEqualTo("/new/offers")).withHeader("Accept", equalTo("application/json"))
+      .withHeader("Authorization", containing("Basic")).willReturn(aResponse().withStatus(200).withFixedDelay(1000)
+      .withHeader("Content-Type", "application/json").withBodyFile("/wireMockTest/jsonBodyFile.json")));
+  }
+
+
+
+

This will stub the URL localhost:port/new/offers to respond with a status 200 message containing a header (Content-Type: application/json) and a body with content given in jsonBodyFile.json if the request matches several conditions. +It has to be a GET request to ../new/offers with the two given header properties.

+
+
+

Note that by default files are located in src/test/resources/__files/. When using only one WireMock server one can omit the this.mockServer in before the stubFor call (static method). +You can also add a fixed delay to the response or processing delay with WireMock.addRequestProcessingDelay(time) in order to test for timeouts.

+
+
+

WireMock can also respond with different corrupted messages to simulate faulty behaviour.

+
+
+
+
@Test(expected = ResourceAccessException.class)
+public void faultTest() {
+
+    this.mockServer.stubFor(get(urlEqualTo("/fault")).willReturn(aResponse()
+    .withFault(Fault.MALFORMED_RESPONSE_CHUNK)));
+...}
+
+
+
+

A GET request to ../fault returns an OK status header, then garbage, and then closes the connection.

+
+
+
+
+
+

Integration Levels

+
+
+

There are many discussions about the right level of integration for test automation. Sometimes it is better to focus on small, isolated modules of the system - whatever a "module" may be. In other cases it makes more sense to test integrated groups of modules. Because there is no universal answer to this question, devonfw only defines a common terminology for what could be tested. Each project must make its own decision where to put the focus of test automation. There is no worldwide accepted terminology for the integration levels of testing. In general we consider ISTQB. However, with a technical focus on test automation we want to get more precise.

+
+
+

The following picture shows a simplified view of an application based on the devonfw reference architecture. We define four integration levels that are explained in detail below. +The boxes in the picture contain parenthesized numbers. These numbers depict the lowest integration level, a box belongs to. Higher integration levels also contain all boxes of lower integration levels. When writing tests for a given integration level, related boxes with a lower integration level must be replaced by test doubles or drivers.

+
+
+
+Integration Levels +
+
+
+

The main difference between the integration levels is the amount of infrastructure needed to test them. The more infrastructure you need, the more bugs you will find, but the more instable and the slower your tests will be. So each project has to make a trade-off between pros and contras of including much infrastructure in tests and has to select the integration levels that fit best to the project.

+
+
+

Consider, that more infrastructure does not automatically lead to a better bug-detection. There may be bugs in your software that are masked by bugs in the infrastructure. The best way to find those bugs is to test with very few infrastructure.

+
+
+

External systems do not belong to any of the integration levels defined here. devonfw does not recommend involving real external systems in test automation. This means, they have to be replaced by test doubles in automated tests. An exception may be external systems that are fully under control of the own development team.

+
+
+

The following chapters describe the four integration levels.

+
+
+

Level 1 Module Test

+
+

The goal of a isolated module test is to provide fast feedback to the developer. Consequently, isolated module tests must not have any interaction with the client, the database, the file system, the network, etc.

+
+
+

An isolated module test is testing a single classes or at least a small set of classes in isolation. If such classes depend on other components or external resources, etc. these shall be replaced with a test double.

+
+
+
+
public class MyClassTest extends ModuleTest {
+
+  @Test
+  public void testMyClass() {
+
+    // given
+    MyClass myClass = new MyClass();
+    // when
+    String value = myClass.doSomething();
+    // then
+    assertThat(value).isEqualTo("expected value");
+  }
+
+}
+
+
+
+

For an advanced example see here.

+
+
+
+

Level 2 Component Test

+
+

A component test aims to test components or component parts as a unit. +These tests can access resources such as a database (e.g. for DAO tests). +Further, no remote communication is intended here. Access to external systems shall be replaced by a test double.

+
+
+
    +
  • +

    For Spring stack, they are typically run with a (light-weight) infrastructure such as spring-boot-starter-test. A component-test is illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.NONE)
    +public class UcFindCountryTest extends ComponentTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    TestUtil.login("user", MyAccessControlConfig.FIND_COUNTRY);
    +    CountryEto country = this.ucFindCountry.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    This test will start the entire spring-context of your app (MySpringBootApp). Within the test spring will inject according spring-beans into all your fields annotated with @Inject. In the test methods you can use these spring-beans and perform your actual tests. This pattern can be used for testing DAOs/Repositories, Use-Cases, or any other spring-bean with its entire configuration including database and transactions.

    +
    +
  • +
  • +

    For Quarkus, you can similarly inject the CDI beans and perform tests. An example is shown below:

    +
    +
    +
    @QuarkusTest
    +public class UcFindCountryTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +  ...
    +
    +
    +
  • +
+
+
+

When you are testing use-cases your authorization will also be in place. Therefore, you have to simulate a logon in advance what is done via the login method in the above Spring example. The test-infrastructure will automatically do a logout for you after each test method in doTearDown.

+
+
+
+

Level 3 Subsystem Test

+
+

A subsystem test runs against the external interfaces (e.g. HTTP service) of the integrated subsystem. Subsystem tests of the client subsystem are described in the devon4ng testing guide. In devon4j the server (JEE application) is the subsystem under test. The tests act as a client (e.g. service consumer) and the server has to be integrated and started in a container.

+
+
+
    +
  • +

    With devon4j and Spring you can write a subsystem-test as easy as illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.RANDOM_PORT)
    +public class CountryRestServiceTest extends SubsystemTest {
    +
    +  @Inject
    +  private ServiceClientFactory serviceClientFactory;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    CountryRestService service = this.serviceClientFactory.create(CountryRestService.class);
    +    CountryEto country = service.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    Even though not obvious on the first look this test will start your entire application as a server on a free random port (so that it works in CI with parallel builds for different branches) and tests the invocation of a (REST) service including (un)marshalling of data (e.g. as JSON) and transport via HTTP (all in the invocation of the findCountry method).

    +
    +
  • +
+
+
+

Do not confuse a subsystem test with a system integration test. A system integration test validates the interaction of several systems where we do not recommend test automation.

+
+
+
+

Level 4 System Test

+
+

A system test has the goal to test the system as a whole against its official interfaces such as its UI or batches. The system itself runs as a separate process in a way close to a regular deployment. Only external systems are simulated by test doubles.

+
+
+

The devonfw only gives advice for automated system test (TODO see allure testing framework). In nearly every project there must be manual system tests, too. This manual system tests are out of scope here.

+
+
+
+

Classifying Integration-Levels

+
+

For Spring stack, devon4j defines Category-Interfaces that shall be used as JUnit Categories. +Also devon4j provides abstract base classes that you may extend in your test-cases if you like.

+
+
+

devon4j further pre-configures the maven build to only run integration levels 1-2 by default (e.g. for fast feedback in continuous integration). It offers the profiles subsystemtest (1-3) and systemtest (1-4). In your nightly build you can simply add -Psystemtest to run all tests.

+
+
+
+
+
+

Implementation

+
+
+

This section introduces how to implement tests on the different levels with the given devonfw infrastructure and the proposed frameworks. +For Spring, see Spring Test Implementation

+
+
+
+
+

Regression testing

+
+
+

When it comes to complex output (even binary) that you want to regression test by comparing with an expected result, you sould consider Approval Tests using ApprovalTests.Java. +If applied for the right problems, it can be very helpful.

+
+
+
+
+

Deployment Pipeline

+
+
+

A deployment pipeline is a semi-automated process that gets software-changes from version control into production. It contains several validation steps, e.g. automated tests of all integration levels. +Because devon4j should fit to different project types - from agile to waterfall - it does not define a standard deployment pipeline. But we recommend to define such a deployment pipeline explicitly for each project and to find the right place in it for each type of test.

+
+
+

For that purpose, it is advisable to have fast running test suite that gives as much confidence as possible without needing too much time and too much infrastructure. This test suite should run in an early stage of your deployment pipeline. Maybe the developer should run it even before he/she checked in the code. Usually lower integration levels are more suitable for this test suite than higher integration levels.

+
+
+

Note, that the deployment pipeline always should contain manual validation steps, at least manual acceptance testing. There also may be manual validation steps that have to be executed for special changes only, e.g. usability testing. Management and execution processes of those manual validation steps are currently not in the scope of devonfw.

+
+
+
+
+

Test Coverage

+
+
+

We are using tools (SonarQube/Jacoco) to measure the coverage of the tests. Please always keep in mind that the only reliable message of a code coverage of X% is that (100-X)% of the code is entirely untested. It does not say anything about the quality of the tests or the software though it often relates to it.

+
+
+
+
+

Test Configuration

+
+
+

This section covers test configuration in general without focusing on integration levels as in the first chapter.

+
+
+ +
+
+

Configure Test Specific Beans

+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+

Test Data

+
+

It is possible to obtain test data in two different ways depending on your test’s integration level.

+
+
+
+
+
+

Debugging Tests

+
+
+

The following two sections describe two debugging approaches for tests. Tests are either run from within the IDE or from the command line using Maven.

+
+
+

Debugging with the IDE

+
+

Debugging with the IDE is as easy as always. Even if you want to execute a SubsystemTest which needs a Spring context and a server infrastructure to run properly, you just set your breakpoints and click on Debug As → JUnit Test. The test infrastructure will take care of initializing the necessary infrastructure - if everything is configured properly.

+
+
+
+

Debugging with Maven

+
+

Please refer to the following two links to find a guide for debugging tests when running them from Maven.

+
+ +
+

In essence, you first have to start execute a test using the command line. Maven will halt just before the test execution and wait for your IDE to connect to the process. When receiving a connection the test will start and then pause at any breakpoint set in advance. +The first link states that tests are started through the following command:

+
+
+
+
mvn -Dmaven.surefire.debug test
+
+
+
+

Although this is correct, it will run every test class in your project and - which is time consuming and mostly unnecessary - halt before each of these tests. +To counter this problem you can simply execute a single test class through the following command (here we execute the TablemanagementRestServiceTest from the restaurant sample application):

+
+
+
+
mvn test -Dmaven.surefire.debug test -Dtest=TablemanagementRestServiceTest
+
+
+
+

It is important to notice that you first have to execute the Maven command in the according submodule, e.g. to execute the TablemanagementRestServiceTest you have first to navigate to the core module’s directory.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-text-search.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-text-search.html new file mode 100644 index 00000000..cbe426de --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-text-search.html @@ -0,0 +1,320 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Full Text Search

+
+
+

If you want to all your users fast and simple searches with just a single search field (like in google), you need full text indexing and search support.

+
+
+

Solutions

+
+
+ +
+
+

Maybe you also want to use native features of your database

+
+ +
+
+
+

Best Practices

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-transactions.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-transactions.html new file mode 100644 index 00000000..bcb4a205 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-transactions.html @@ -0,0 +1,366 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Transaction Handling

+
+
+

For transaction handling we AOP to add transaction control via annotations as aspect. +This is done by annotating your code with the @Transactional annotation. +You can either annotate your container bean at class level to make all methods transactional or your can annotate individual methods to make them transactional:

+
+
+
+
  @Transactional
+  public Output getData(Input input) {
+    ...
+  }
+
+
+
+

JTA Imports

+
+
+

Here are the import statements for transaction support:

+
+
+
+
import javax.transaction.Transactional;
+
+
+
+ + + + + +
+ + +Use the above import statement to follow JEE and avoid using org.springframework.transaction.annotation.Transactional. +
+
+
+
+
+

JTA Dependencies

+
+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>jakarta.transaction</groupId>
+  <artifactId>jakarta.transaction-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>javax.transaction</groupId>
+  <artifactId>javax.transaction-api</artifactId>
+</dependency>
+
+
+
+
+
+

Handling constraint violations

+
+
+

Using @Transactional magically wraps transaction handling around your code. +As constraints are checked by the database at the end when the transaction gets committed, a constraint violation will be thrown by this aspect outside your code. +In case you have to handle constraint violations manually, you have to do that in code outside the logic that is annotated with @Transactional. +This may be done in a service operation by catching a ConstraintViolationException (org.hibernate.exception.ConstraintViolationException for hibernate). +As a generic approach you can solve this via REST execption handling.

+
+
+
+
+

Batches

+
+
+

Transaction control for batches is a lot more complicated and is described in the batch layer.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-transferobject.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-transferobject.html new file mode 100644 index 00000000..cd65fae4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-transferobject.html @@ -0,0 +1,329 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Transfer-Objects

+
+
+

The technical data model is defined in form of persistent entities. +However, passing persistent entities via call-by-reference across the entire application will soon cause problems:

+
+
+
    +
  • +

    Changes to a persistent entity are directly written back to the persistent store when the transaction is committed. When the entity is send across the application also changes tend to take place in multiple places endangering data sovereignty and leading to inconsistency.

    +
  • +
  • +

    You want to send and receive data via services across the network and have to define what section of your data is actually transferred. If you have relations in your technical model you quickly end up loading and transferring way too much data.

    +
  • +
  • +

    Modifications to your technical data model shall not automatically have impact on your external services causing incompatibilities.

    +
  • +
+
+
+

To prevent such problems transfer-objects are used leading to a call-by-value model and decoupling changes to persistent entities.

+
+
+

In the following sections the different types of transfer-objects are explained. +You will find all according naming-conventions in the architecture-mapping

+
+
+

To structure your transfer objects, we recommend the following approaches:

+
+
+ +
+
+

Also considering the following transfer objects in specific cases:

+
+
+
+
SearchCriteriaTo
+
+

For searching we create or generate a «BusinessObject»SearchCriteriaTo representing a query to find instances of «BusinessObject».

+
+
TO
+
+

There are typically transfer-objects for data that is never persistent. +For very generic cases these just carry the suffix To.

+
+
STO
+
+

We can potentially create separate service transfer objects (STO) (if possible named «BusinessObject»Sto) to keep the service API stable and independent of the actual data-model. +However, we usually do not need this and want to keep our architecture simple. +Only create STOs if you need service versioning and support previous APIs or to provide legacy service technologies that require their own isolated data-model. +In such case you also need beanmapping between STOs and ETOs/DTOs what means extra effort and complexity that should be avoided.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-usecase.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-usecase.html new file mode 100644 index 00000000..b1dcc9da --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-usecase.html @@ -0,0 +1,404 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==UseCase +A use-case is a small unit of the logic layer responsible for an operation on a particular entity (business object). +We leave it up to you to decide whether you want to define an interface (API) for each use-case or provide an implementation directly.

+
+
+

Following our architecture-mapping (for classic and modern project), use-cases are named Uc«Operation»«BusinessObject»[Impl]. The prefix Uc stands for use-case and allows to easily find and identify them in your IDE. The «Operation» stands for a verb that is operated on the entity identified by «BusinessObject». +For CRUD we use the standard operations Find and Manage that can be generated by CobiGen. This also separates read and write operations (e.g. if you want to do CQSR, or to configure read-only transactions for read operations).

+
+
+

In our example, we choose to define an interface for each use-case. We also use *To to refer to any type of transfer object. Please follow our guide to understand more about different types of transfer object e.g. Eto, Dto, Cto

+
+
+

Find

+
+
+

The UcFind«BusinessObject» defines all read operations to retrieve and search the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcFindBooking {
+  //*To = Eto, Dto or Cto
+  Booking*To findBooking(Long id);
+}
+
+
+
+
+
+

Manage

+
+
+

The UcManage«BusinessObject» defines all CRUD write operations (create, update and delete) for the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcManageBooking {
+
+  //*To = Eto, Dto or Cto
+  Booking*To saveBooking(Booking*To booking);
+
+  void deleteBooking(Long id);
+
+}
+
+
+
+
+
+

Custom

+
+
+

Any other non CRUD operation Uc«Operation»«BusinessObject» uses any other custom verb for «Operation». +Typically, such custom use-cases only define a single method. +Here is an example:

+
+
+
+
public interface UcApproveBooking {
+
+  //*To = Eto, Dto or Cto
+  void approveBooking(Booking*To booking);
+
+}
+
+
+
+
+
+

Implementation

+
+
+

The implementation should carry its own name and the suffix Impl and is annotated with @Named and @ApplicationScoped. It will need access to the persistent data which is done by injecting the corresponding repository (or DAO). Furthermore, it shall not expose persistent entities from the data access layer and has to map them to transfer objects using the bean-mapper. Please refer to our bean mapping, transfer object and dependency injection documentation for more information. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcManageBookingImpl implements UcManageBooking {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public void deleteBooking(Long id) {
+
+    LOG.debug("Delete Booking with id {} from database.", id);
+    this.bookingRepository.deleteById(id);
+  }
+}
+
+
+
+

The use-cases can then be injected directly into the service.

+
+
+
+
@Named("BookingmanagementRestService")
+@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private UcFindBooking ucFindBooking;
+
+  @Inject
+  private UcManageBooking ucManageBooking;
+
+  @Inject
+  private UcApproveBooking ucApproveBooking;
+}
+
+
+
+
+
+

Internal use case

+
+
+

Sometimes, a component with multiple related entities and many use-cases needs to reuse business logic internally. +Of course, this can be exposed as an official use-case API but this will imply using transfer-objects (ETOs) instead of entities. In some cases, this is undesired e.g. for better performance to prevent unnecessary mapping of entire collections of entities. +In the first place, you should try to use abstract base implementations providing reusable methods the actual use-case implementations can inherit from. +If your business logic is even more complex and you have multiple aspects of business logic to share and reuse but also run into multi-inheritance issues, you may also just create use-cases that have their interface located in the impl scope package right next to the implementation (or you may just skip the interface). In such a case, you may define methods that directly take or return entity objects. +To avoid confusion with regular use-cases, we recommend to add the Internal suffix to the type name leading to Uc«Operation»«BusinessObject»Internal[Impl].

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-validation.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-validation.html new file mode 100644 index 00000000..f5138fc0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-validation.html @@ -0,0 +1,475 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Validation

+
+
+

Validation is about checking syntax and semantics of input data. Invalid data is rejected by the application. +Therefore validation is required in multiple places of an application. E.g. the GUI will do validation for usability reasons to assist the user, early feedback and to prevent unnecessary server requests. +On the server-side validation has to be done for consistency and security.

+
+
+

In general we distinguish these forms of validation:

+
+
+
    +
  • +

    stateless validation will produce the same result for given input at any time (for the same code/release).

    +
  • +
  • +

    stateful validation is dependent on other states and can consider the same input data as valid in once case and as invalid in another.

    +
  • +
+
+
+

Stateless Validation

+
+
+

For regular, stateless validation we use the JSR303 standard that is also called bean validation (BV). +Details can be found in the specification. +As implementation we recommend hibernate-validator.

+
+
+

Example

+
+

A description of how to enable BV for spring applications can be found in the relevant Spring documentation. A guide you can use to integrate validation in Quarkus applications can be found here. For a quick summary follow these steps:

+
+
+
    +
  • +

    Make sure that hibernate-validator is located in the classpath by adding a dependency to the pom.xml.

    +
  • +
+
+
+
spring
+
+
    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
quarkus
+
+
    <dependency>
+      <groupId>io.quarkus</groupId>
+      <artifactId>quarkus-hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
    +
  • +

    For methods to validate go to their declaration and add constraint annotations to the method parameters.

    +
    +

    In spring applications you can add the @Validated annotation to the implementation (spring bean) to be validated (this is an annotation of the spring framework, so it`s not available in the Quarkus context). The standard use case is to annotate the logic layer implementation, i.e. the use case implementation or component facade in case of simple logic layer pattern. Thus, the validation will be executed for service requests as well as batch processing.

    +
    +
    +
      +
    • +

      @Valid annotation to the arguments to validate (if that class itself is annotated with constraints to check).

      +
    • +
    • +

      @NotNull for required arguments.

      +
    • +
    • +

      Other constraints (e.g. @Size) for generic arguments (e.g. of type String or Integer). However, consider to create custom datatypes and avoid adding too much validation logic (especially redundant in multiple places).

      +
    • +
    +
    +
  • +
+
+
+
BookingmanagementRestServiceImpl.java
+
+
@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+  ...
+  public BookingEto saveBooking(@Valid BookingCto booking) {
+  ...
+
+
+
+
    +
  • +

    Finally add appropriate validation constraint annotations to the fields of the ETO class.

    +
  • +
+
+
+
BookingCto.java
+
+
  @Valid
+  private BookingEto booking;
+
+
+
+
BookingEto.java
+
+
  @NotNull
+  @Future
+  private Timestamp bookingDate;
+
+
+
+

A list with all bean validation constraint annotations available for hibernate-validator can be found here. In addition it is possible to configure custom constraints. Therefore it is necessary to implement a annotation and a corresponding validator. A description can also be found in the Spring documentation or with more details in the hibernate documentation.

+
+
+ + + + + +
+ + +Bean Validation in Wildfly >v8: Wildfly v8 is the first version of Wildfly implementing the JEE7 specification. It comes with bean validation based on hibernate-validator out of the box. In case someone is running Spring in Wildfly for whatever reasons, the spring based annotation @Validated would duplicate bean validation at runtime and thus should be omitted. +
+
+
+
+

GUI-Integration

+
+

TODO

+
+
+
+

Cross-Field Validation

+
+

BV has poor support for this. Best practice is to create and use beans for ranges, etc. that solve this. A bean for a range could look like so:

+
+
+
+
public class Range<V extends Comparable<V>> {
+
+  private V min;
+  private V max;
+
+  public Range(V min, V max) {
+
+    super();
+    if ((min != null) && (max != null)) {
+      int delta = min.compareTo(max);
+      if (delta > 0) {
+        throw new ValueOutOfRangeException(null, min, min, max);
+      }
+    }
+    this.min = min;
+    this.max = max;
+  }
+
+  public V getMin() ...
+  public V getMax() ...
+
+
+
+
+
+
+

Stateful Validation

+
+
+

For complex and stateful business validations we do not use BV (possible with groups and context, etc.) but follow KISS and just implement this on the server in a straight forward manner. +An example is the deletion of a table in the example application. Here the state of the table must be checked first:

+
+
+

BookingmanagementImpl.java

+
+
+
+
  private void sendConfirmationEmails(BookingEntity booking) {
+
+    if (!booking.getInvitedGuests().isEmpty()) {
+      for (InvitedGuestEntity guest : booking.getInvitedGuests()) {
+        sendInviteEmailToGuest(guest, booking);
+      }
+    }
+
+    sendConfirmationEmailToHost(booking);
+  }
+
+
+
+

Implementing this small check with BV would be a lot more effort.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/guide-xml.html b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-xml.html new file mode 100644 index 00000000..0f139b5c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/guide-xml.html @@ -0,0 +1,316 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==XML

+
+
+

XML (Extensible Markup Language) is a W3C standard format for structured information. It has a large eco-system of additional standards and tools.

+
+
+

In Java there are many different APIs and frameworks for accessing, producing and processing XML. For the devonfw we recommend to use JAXB for mapping Java objects to XML and vice-versa. Further there is the popular DOM API for reading and writing smaller XML documents directly. When processing large XML documents StAX is the right choice.

+
+
+

JAXB

+
+
+

We use JAXB to serialize Java objects to XML or vice-versa.

+
+
+

JAXB and Inheritance

+
+

Use @XmlSeeAlso annotation to provide sub-classes. +See section "Collective Polymorphism" described here.

+
+
+
+

JAXB Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create dedicated objects for the XML mapping you can easily avoid such situations. When this is not suitable use @XmlJavaTypeAdapter and provide an XmlAdapter implementation that handles the mapping. +For details see here.

+
+
+
+
+
+

Security

+
+
+

To prevent XML External Entity attacks, follow JAXP Security Guide and enable FSP.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/master-devon4j.html b/docs/devonfw.github.io/1.0/devon4j.wiki/master-devon4j.html new file mode 100644 index 00000000..4fa67d05 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/master-devon4j.html @@ -0,0 +1,8328 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==devon4j

+
+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+

The following sections contain the complete compendium of devon4j, the Java stack of devonfw. +You can also read the latest version of this documentation online in the devon4j wiki +or at devon4j on devonfw.com.

+
+ +
+

==Architecture

+
+
+

There are many different views that are summarized by the term architecture. First, we will introduce the key principles and architecture principles of devonfw. Then, we will go into details of the the architecture of an application.

+
+
+

Key Principles

+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
+
+
+
+

Architecture Principles

+
+

Additionally we define the following principles that our architecture is based on:

+
+
+
    +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of Concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information Hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise, maintenance problems will arise to ensure that data remains consistent. Therefore, interfaces of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style.

    +
  • +
+
+
+

As an architect you should be prepared for the future by reading the TechnoVision.

+
+
+
+

Application Architecture

+
+

For the architecture of an application we distinguish the following views:

+
+
+
    +
  • +

    The Business Architecture describes an application from the business perspective. It divides the application into business components and with full abstraction of technical aspects.

    +
  • +
  • +

    The Technical Architecture describes an application from the technical implementation perspective. It divides the application into technical layers and defines which technical products and frameworks are used to support these layers.

    +
  • +
  • +

    The Infrastructure Architecture describes an application from the operational infrastructure perspective. It defines the nodes used to run the application including clustering, load-balancing and networking. This view is not explored further in this guide.

    +
  • +
+
+
+

Business Architecture

+
+

The business architecture divides the application into business components. A business component has a well-defined responsibility that it encapsulates. All aspects related to that responsibility have to be implemented within that business component. Further, the business architecture defines the dependencies between the business components. These dependencies need to be free of cycles. A business component exports its functionality via well-defined interfaces as a self-contained API. A business component may use another business component via its API and compliant with the dependencies defined by the business architecture.

+
+
+

As the business domain and logic of an application can be totally different, the devonfw can not define a standardized business architecture. Depending on the business domain it has to be defined from scratch or from a domain reference architecture template. For very small systems it may be suitable to define just a single business component containing all the code.

+
+
+
+

Technical Architecture

+
+

The technical architecture divides the application into technical layers based on the multilayered architecture. A layer is a unit of code with the same category such as a service or presentation logic. So, a layer is often supported by a technical framework. Each business component can therefore be split into component parts for each layer. However, a business component may not have component parts for every layer (e.g. only a presentation part that utilized logic from other components).

+
+
+

An overview of the technical reference architecture of the devonfw is given by figure "Technical Reference Architecture". +It defines the following layers visualized as horizontal boxes:

+
+
+ +
+
+

Also, you can see the (business) components as vertical boxes (e.g. A and X) and how they are composed out of component parts each one assigned to one of the technical layers.

+
+
+

Further, there are technical components for cross-cutting aspects grouped by the gray box on the left. Here is a complete list:

+
+ +
+
+devonfw architecture blueprint +
+
Figure 1. Technical Reference Architecture
+
+
+

Please click on the architecture image to open it as SVG and click on the layers and cross-cutting topics to open the according documentation guide.

+
+
+

We reflect this architecture in our code as described in our coding conventions allowing a traceability of business components, use-cases, layers, etc. into the code and giving +developers a sound orientation within the project.

+
+
+

Further, the architecture diagram shows the allowed dependencies illustrated by the dark green connectors. +Within a business component a component part can call the next component part on the layer directly below via a dependency on its API (vertical connectors). +While this is natural and obvious, it is generally forbidden to have dependencies upwards the layers +or to skip a layer by a direct dependency on a component part two or more layers below. +The general dependencies allowed between business components are defined by the business architecture. +In our reference architecture diagram we assume that the business component A1 is allowed to depend +on component A2. Therefore, a use-case within the logic component part of A1 is allowed to call a +use-case from A2 via a dependency on the component API. The same applies for dialogs on the client layer. +This is illustrated by the horizontal connectors. Please note that persistence entities are part of the API of the data-access component part so only the logic component part of the same +business component may depend on them.

+
+
+

The technical architecture has to address non-functional requirements:

+
+
+
    +
  • +

    scalability
    +is established by keeping state in the client and making the server state-less (except for login session). Via load-balancers new server nodes can be added to improve performance (horizontal scaling).

    +
  • +
  • +

    availability and reliability
    +are addressed by clustering with redundant nodes avoiding any single-point-of failure. If one node fails the system is still available. Further, the software has to be robust so there are no dead-locks or other bad effects that can make the system unavailable or not reliable.

    +
  • +
  • +

    security
    +is archived in the devonfw by the right templates and best-practices that avoid vulnerabilities. See security guidelines for further details.

    +
  • +
  • +

    performance
    +is obtained by choosing the right products and proper configurations. While the actual implementation of the application matters for performance a proper design is important as it is the key to allow performance-optimizations (see e.g. caching).

    +
  • +
+
+
+
Technology Stack
+
+

The technology stack of the devonfw is illustrated by the following table.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Technology Stack of devonfw
TopicDetailStandardSuggested implementation

runtime

language & VM

Java

Oracle JDK

runtime

servlet-container

JEE

tomcat

component management

dependency injection

JSR330 & JSR250

spring

configuration

framework

-

spring-boot

persistence

OR-mapper

JPA

hibernate

batch

framework

JSR352

spring-batch

service

SOAP services

JAX-WS

CXF

service

REST services

JAX-RS

CXF

logging

framework

slf4j

logback

validation

framework

beanvalidation/JSR303

hibernate-validator

security

Authentication & Authorization

JAAS

spring-security

monitoring

framework

JMX

spring

monitoring

HTTP Bridge

HTTP & JSON

jolokia

AOP

framework

dynamic proxies

spring AOP

+
+ +
+

==Components

+
+
+

Following separation-of-concerns we divide an application into components using our package-conventions and project structure. +As described by the architecture each component is divided into layers as described in the project structure. +Please note that a component will only have the required layers. +So a component may have any number from one to all layers.

+
+
+
+
+

General Component

+
+

Cross-cutting aspects belong to the implicit component general. It contains technical configurations and very general code that is not business specific. Such code shall not have any dependencies to other components and therefore business related code.

+
+
+
+

Business Component

+
+

The business-architecture defines the business components with their allowed dependencies. A small application (microservice) may just have one component and no dependencies making it simple while the same architecture can scale up to large and complex applications (from bigger microservice up to modulith). +Tailoring an business domain into applications and applications into components is a tricky task that needs the skills of an experienced architect. +Also, the tailoring should follow the business and not split by technical reasons or only by size. +Size is only an indicator but not a driver of tailoring. +Whatever hypes like microservices are telling you, never get misled in this regard: +If your system grows and reaches MAX+1 lines of code, it is not the right motivation to split it into two microservices of ~MAX/2 lines of code - such approaches will waste huge amounts of money and lead to chaos.

+
+
+
+

App Component

+
+

Only in case you need cross-cutting code that aggregates another component you may introduce the component app. +It is allowed to depend on all other components but no other component may depend on it. +With the modularity and flexibility of spring you typically do not need this. +However, when you need to have a class that registers all services or component-facades using direct code dependencies, you can introduce this component.

+
+
+
+

Component Example

+
+

The following class diagram illustrates an example of the business component Staffmanagement:

+
+
+
+logic layer component pattern +
+
+
+

In this scheme, you can see the structure and flow from the service-layer (REST service call) via the logic-layer to the dataaccess-layer (and back).

+
+
+
+
+

Coding

+
+ +
+

==Coding Conventions

+
+
+

The code should follow general conventions for Java (see Oracle Naming Conventions, Google Java Style, etc.).We consider this as common sense and provide configurations for SonarQube and related tools such as Checkstyle instead of repeating this here.

+
+
+

Naming

+
+

Besides general Java naming conventions, we follow the additional rules listed here explicitly:

+
+
+
    +
  • +

    Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Use CamelCase even for abbreviations (XmlUtil instead of XMLUtil)

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc'). See #1095 for details.

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+

Packages

+
+

Java Packages are the most important element to structure your code. We use a strict packaging convention to map technical layers and business components (slices) to the code (See technical architecture for further details). By using the same names in documentation and code we create a strong link that gives orientation and makes it easy to find from business requirements, specifications or story tickets into the code and back.

+
+
+

For an devon4j based application we use the following Java-Package schema:

+
+
+
+
«root».«component».«layer»[.«detail»]
+
+
+
+

E.g. in our example application we find the Spring Data repositories for the ordermanagement component in the package com.devonfw.application.mtsj.ordermanagement.dataaccess.api.repo

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of package schema
SegmentDescriptionExample

«root»

Is the basic Java Package name-space of your app. Typically we suggest to use «group».«artifact» where «group» is your maven/gradle groupId corresponding to your organization or IT project owning the code following common Java Package conventions. The segment «artifact» is your maven/gradle artifactId and is typically the technical name of your app.

com.devonfw.application.mtsj

«component»

The (business) component the code belongs to. It is defined by the business architecture and uses terms from the business domain. Use the implicit component general for code not belonging to a specific component (foundation code).

salesmanagement

«layer»

The name of the technical layer (See technical architecture). Details are described for the modern project structure and for the classic project structure.

logic

«detail»

Here you are free to further divide your code into sub-components and other concerns according to the size of your component part. If you want to strictly separate API from implementation you should start «detail» with «scope» that is explained below.

dao

«scope»

The scope which is one of api (official API to be used by other layers or components), base (basic code to be reused by other implementations) and impl (implementation that should never be imported from outside). This segment was initially mandatory but due to trends such as microservices, lean, and agile we decided to make it optional and do not force anybody to use it.

api

+
+

Please note that devon4j library modules for spring use com.devonfw.module as «root» and the name of the module as «component». E.g. the API of our beanmapping module can be found in the package com.devonfw.module.beanmapping.common.api.

+
+
+
+

Code Tasks

+
+

Code spots that need some rework can be marked with the following tasks tags. These are already properly pre-configured in your development environment for auto completion and to view tasks you are responsible for. It is important to keep the number of code tasks low. Therefore, every member of the team should be responsible for the overall code quality. So if you change a piece of code and hit a code task that you can resolve in a reliable way, please do this as part of your change and remove the according tag.

+
+
+
TODO
+
+

Used to mark a piece of code that is not yet complete (typically because it can not be completed due to a dependency on something that is not ready).

+
+
+
+
 // TODO «author» «description»
+
+
+
+

A TODO tag is added by the author of the code who is also responsible for completing this task.

+
+
+
+
FIXME
+
+
+
 // FIXME «author» «description»
+
+
+
+

A FIXME tag is added by the author of the code or someone who found a bug he can not fix right now. The «author» who added the FIXME is also responsible for completing this task. This is very similar to a TODO but with a higher priority. FIXME tags indicate problems that should be resolved before a release is completed while TODO tags might have to stay for a longer time.

+
+
+
+
REVIEW
+
+
+
 // REVIEW «responsible» («reviewer») «description»
+
+
+
+

A REVIEW tag is added by a reviewer during a code review. Here the original author of the code is responsible to resolve the REVIEW tag and the reviewer is assigning this task to him. This is important for feedback and learning and has to be aligned with a review "process" where people talk to each other and get into discussion. In smaller or local teams a peer-review is preferable but this does not scale for large or even distributed teams.

+
+
+
+
+

Code-Documentation

+
+

As a general goal, the code should be easy to read and understand. Besides, clear naming the documentation is important. We follow these rules:

+
+
+
    +
  • +

    APIs (especially component interfaces) are properly documented with JavaDoc.

    +
  • +
  • +

    JavaDoc shall provide actual value - we do not write JavaDoc to satisfy tools such as checkstyle but to express information not already available in the signature.

    +
  • +
  • +

    We make use of {@link} tags in JavaDoc to make it more expressive.

    +
  • +
  • +

    JavaDoc of APIs describes how to use the type or method and not how the implementation internally works.

    +
  • +
  • +

    To document implementation details, we use code comments (e.g. // we have to flush explicitly to ensure version is up-to-date). This is only needed for complex logic.

    +
  • +
  • +

    Avoid the pointless {@inheritDoc} as since Java 1.5 there is the @Override annotation for overridden methods and your JavaDoc is inherited automatically even without any JavaDoc comment at all.

    +
  • +
+
+
+
+

Code-Style

+
+

This section gives you best practices to write better code and avoid pitfalls and mistakes.

+
+
+
BLOBs
+
+

Avoid using byte[] for BLOBs as this will load them entirely into your memory. This will cause performance issues or out of memory errors. Instead, use streams when dealing with BLOBs. For further details see BLOB support.

+
+
+
+
Stateless Programming
+
+

When implementing logic as components or beans of your container using dependency injection, we strongly encourage stateless programming. +This is not about data objects like an entity or transfer-object that are stateful by design. +Instead this applies to all classes annotated with @Named, @ApplicationScoped, @Stateless, etc. and all their super-classes. +These classes especially include your repositories, use-cases, and REST services. +Such classes shall never be modified after initialization. +Methods called at runtime (after initialization via the container) do not assign fields (member variables of your class) or mutate the object stored in a field. +This allows your component or bean to be stateless and thread-safe. +Therefore it can be initialized as a singleton so only one instance is created and shared accross all threads of the application. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // bad
+  private String contractOwner;
+
+  private MyState state;
+
+  @Overide
+  public void approve(Contract contract) {
+    this.contractOwner = contract.getOwner();
+    this.contractOwner = this.contractOwner.toLowerCase(Locale.US);
+    this.state.setAdmin(this.contractOwner.endsWith("admin"));
+    if (this.state.isAdmin()) {
+      ...
+    } else {
+      ...
+    }
+  }
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    if (contractOwner.endsWith("admin")) {
+      ...
+    } else {
+      ...
+    }
+  }
+}
+
+
+
+

As you can see in the bad code fields of the class are assigned when the method approve is called. +So mutliple users and therefore threads calling this method concurrently can interfere and override this state causing side-effects on parallel threads. +This will lead to nasty bugs and errors that are hard to trace down. +They will not occur in simple tests but for sure in production with real users. +Therefore never do this and implement your functionality stateless. +That is keeping all state in local variables and strictly avoid modifying fields or their value as illustrated in the fine code. +If you find yourself passing many parameters between methods that all represent state, you can easily create a separate class that encapsulates this state. +However, then you need to create this state object in your method as local variable and pass it between methods as parameter:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    MyState state = new MyState();
+    state.setAdmin(this.contractOwner.endsWith("admin"));
+    doApproveContract(contract, state);
+  }
+}
+
+
+
+
+
Closing Resources
+
+

Resources such as streams (InputStream, OutputStream, Reader, Writer) or transactions need to be handled properly. Therefore, it is important to follow these rules:

+
+
+
    +
  • +

    Each resource has to be closed properly, otherwise you will get out of file handles, TX sessions, memory leaks or the like

    +
  • +
  • +

    Where possible avoid to deal with such resources manually. That is why we are recommending @Transactional for transactions in devonfw (see Transaction Handling).

    +
  • +
  • +

    In case you have to deal with resources manually (e.g. binary streams) ensure to close them properly. See the example below for details.

    +
  • +
+
+
+

Closing streams and other such resources is error prone. Have a look at the following example:

+
+
+
+
// bad
+try {
+  InputStream in = new FileInputStream(file);
+  readData(in);
+  in.close();
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+

The code above is wrong as in case of an IOException the InputStream is not properly closed. In a server application such mistakes can cause severe errors that typically will only occur in production. As such resources implement the AutoCloseable interface you can use the try-with-resource syntax to write correct code. The following code shows a correct version of the example:

+
+
+
+
// fine
+try (InputStream in = new FileInputStream(file)) {
+  readData(in);
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+
+
Catching and handling Exceptions
+
+

When catching exceptions always ensure the following:

+
+
+
    +
  • +

    Never call printStackTrace() method on an exception

    +
  • +
  • +

    Either log or wrap and re-throw the entire catched exception. Be aware that the cause(s) of an exception is very valuable information. If you loose such information by improper exception-handling you may be unable to properly analyse production problems what can cause severe issues.

    +
    +
      +
    • +

      If you wrap and re-throw an exception ensure that the catched exception is passed as cause to the newly created and thrown exception.

      +
    • +
    • +

      If you log an exception ensure that the entire exception is passed as argument to the logger (and not only the result of getMessage() or toString() on the exception).

      +
    • +
    +
    +
  • +
  • +

    See exception handling

    +
  • +
+
+
+
+
Lambdas and Streams
+
+

With Java8 you have cool new features like lambdas and monads like (Stream, CompletableFuture, Optional, etc.). +However, these new features can also be misused or led to code that is hard to read or debug. To avoid pain, we give you the following best practices:

+
+
+
    +
  1. +

    Learn how to use the new features properly before using. Developers are often keen on using cool new features. When you do your first experiments in your project code you will cause deep pain and might be ashamed afterwards. Please study the features properly. Even Java8 experts still write for loops to iterate over collections, so only use these features where it really makes sense.

    +
  2. +
  3. +

    Streams shall only be used in fluent API calls as a Stream can not be forked or reused.

    +
  4. +
  5. +

    Each stream has to have exactly one terminal operation.

    +
  6. +
  7. +

    Do not write multiple statements into lambda code:

    +
    +
    +
    // bad
    +collection.stream().map(x -> {
    +Foo foo = doSomething(x);
    +...
    +return foo;
    +}).collect(Collectors.toList());
    +
    +
    +
    +

    This style makes the code hard to read and debug. Never do that! Instead, extract the lambda body to a private method with a meaningful name:

    +
    +
    +
    +
    // fine
    +collection.stream().map(this::convertToFoo).collect(Collectors.toList());
    +
    +
    +
  8. +
  9. +

    Do not use parallelStream() in general code (that will run on server side) unless you know exactly what you are doing and what is going on under the hood. Some developers might think that using parallel streams is a good idea as it will make the code faster. However, if you want to do performance optimizations talk to your technical lead (architect). Many features such as security and transactions will rely on contextual information that is associated with the current thread. Hence, using parallel streams will most probably cause serious bugs. Only use them for standalone (CLI) applications or for code that is just processing large amounts of data.

    +
  10. +
  11. +

    Do not perform operations on a sub-stream inside a lambda:

    +
    +
    +
    set.stream().flatMap(x -> x.getChildren().stream().filter(this::isSpecial)).collect(Collectors.toList()); // bad
    +set.stream().flatMap(x -> x.getChildren().stream()).filter(this::isSpecial).collect(Collectors.toList()); // fine
    +
    +
    +
  12. +
  13. +

    Only use collect at the end of the stream:

    +
    +
    +
    set.stream().collect(Collectors.toList()).forEach(...) // bad
    +set.stream().peek(...).collect(Collectors.toList()) // fine
    +
    +
    +
  14. +
  15. +

    Lambda parameters with Types inference

    +
    +
    +
    (String a, Float b, Byte[] c) -> a.toString() + Float.toString(b) + Arrays.toString(c)  // bad
    +(a,b,c)  -> a.toString() + Float.toString(b) + Arrays.toString(c)  // fine
    +
    +Collections.sort(personList, (Person p1, Person p2) -> p1.getSurName().compareTo(p2.getSurName()));  // bad
    +Collections.sort(personList, (p1, p2) -> p1.getSurName().compareTo(p2.getSurName()));  // fine
    +
    +
    +
  16. +
  17. +

    Avoid Return Braces and Statement

    +
    +
    +
     a ->  { return a.toString(); } // bad
    + a ->  a.toString();   // fine
    +
    +
    +
  18. +
  19. +

    Avoid Parentheses with Single Parameter

    +
    +
    +
    (a) -> a.toString(); // bad
    + a -> a.toString();  // fine
    +
    +
    +
  20. +
  21. +

    Avoid if/else inside foreach method. Use Filter method & comprehension

    +
    +
    +
    // bad
    +static public Iterator<String> TwitterHandles(Iterator<Author> authors, string company) {
    +    final List result = new ArrayList<String> ();
    +    foreach (Author a : authors) {
    +      if (a.Company.equals(company)) {
    +        String handle = a.TwitterHandle;
    +        if (handle != null)
    +          result.Add(handle);
    +      }
    +    }
    +    return result;
    +  }
    +
    +
    +
    +
    +
    // fine
    +public List<String> twitterHandles(List<Author> authors, String company) {
    +    return authors.stream()
    +            .filter(a -> null != a && a.getCompany().equals(company))
    +            .map(a -> a.getTwitterHandle())
    +            .collect(toList());
    +  }
    +
    +
    +
  22. +
+
+
+
+
Optionals
+
+

With Optional you can wrap values to avoid a NullPointerException (NPE). However, it is not a good code-style to use Optional for every parameter or result to express that it may be null. For such case use @Nullable or even better instead annotate @NotNull where null is not acceptable.

+
+
+

However, Optional can be used to prevent NPEs in fluent calls (due to the lack of the elvis operator):

+
+
+
+
Long id;
+id = fooCto.getBar().getBar().getId(); // may cause NPE
+id = Optional.ofNullable(fooCto).map(FooCto::getBar).map(BarCto::getBar).map(BarEto::getId).orElse(null); // null-safe
+
+
+
+
+
Encoding
+
+

Encoding (esp. Unicode with combining characters and surrogates) is a complex topic. Please study this topic if you have to deal with encodings and processing of special characters. For the basics follow these recommendations:

+
+
+
    +
  • +

    Whenever possible prefer unicode (UTF-8 or better) as encoding. This especially impacts your databases and has to be defined upfront as it typically can not be changed (easily) afterwards.

    +
  • +
  • +

    Do not cast from byte to char (unicode characters can be composed of multiple bytes, such cast may only work for ASCII characters)

    +
  • +
  • +

    Never convert the case of a String using the default locale (esp. when writing generic code like in devonfw). E.g. if you do "HI".toLowerCase() and your system locale is Turkish, then the output will be "hı" instead of "hi", which can lead to wrong assumptions and serious problems. If you want to do a "universal" case conversion always explicitly use an according western locale (e.g. toLowerCase(Locale.US)). Consider using a helper class (see e.g. CaseHelper) or create your own little static utility for that in your project.

    +
  • +
  • +

    Write your code independent from the default encoding (system property file.encoding) - this will most likely differ in JUnit from production environment

    +
    +
      +
    • +

      Always provide an encoding when you create a String from byte[]: new String(bytes, encoding)

      +
    • +
    • +

      Always provide an encoding when you create a Reader or Writer : new InputStreamReader(inStream, encoding)

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer general API
+
+

Avoid unnecessary strong bindings:

+
+
+
    +
  • +

    Do not bind your code to implementations such as Vector or ArrayList instead of List

    +
  • +
  • +

    In APIs for input (=parameters) always consider to make little assumptions:

    +
    +
      +
    • +

      prefer Collection over List or Set where the difference does not matter (e.g. only use Set when you require uniqueness or highly efficient contains)

      +
    • +
    • +

      consider preferring Collection<? extends Foo> over Collection<Foo> when Foo is an interface or super-class

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer primitive boolean
+
+

Unless in rare cases where you need to allow a flag being null avoid using the object type Boolean.

+
+
+
+
// bad
+public Boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

Instead always use the primitive boolean type:

+
+
+
+
// fine
+public boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

The only known excuse is for flags in embeddable types due to limitations of hibernate.

+
+
+
+
+
+
+
+

Layers

+
+ +
+

==Client Layer

+
+
+

There are various technical approaches to building GUI clients. The devonfw proposes rich clients that connect to the server via data-oriented services (e.g. using REST with JSON). +In general, we have to distinguish among the following types of clients:

+
+
+
    +
  • +

    web clients

    +
  • +
  • +

    native desktop clients

    +
  • +
  • +

    (native) mobile clients

    +
  • +
+
+
+

Our main focus is on web-clients. In our sample application my-thai-star we offer a responsive web-client based on Angular following devon4ng that integrates seamlessly with the back ends of my-thai-star available for Java using devon4j as well as .NET/C# using devon4net. For building angular clients read the separate devon4ng guide.

+
+
+

JavaScript for Java Developers

+
+

In order to get started with client development as a Java developer we give you some hints to get started. Also if you are an experienced JavaScript developer and want to learn Java this can be helpful. First, you need to understand that the JavaScript ecosystem is as large as the Java ecosystem and developing a modern web client requires a lot of knowledge. The following table helps you as experienced developer to get an overview of the tools, configuration-files, and other related aspects from the new world to learn. Also it helps you to map concepts between the ecosystems. Please note that we list the tools recommended by devonfw here (and we know that there are alternatives not listed here such as gradle, grunt, bower, etc.).

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3. Aspects in JavaScript and Java ecosystem
TopicAspectJavaScriptJava

Programming

Language

TypeScript (extends JavaScript)

Java

Runtime

VM

nodejs (or web-browser)

jvm

Build- & Dependency-Management

Tool

npm or yarn

maven

Config

package.json

pom.xml

Repository

npm repo

maven central (repo search)

Build cmd

ng build or npm run build (goals are not standardized in npm)

mvn install (see lifecycle)

Test cmd

ng test

mvn test

Testing

Test-Tool

jasmine

junit

Test-Runner

karma

junit / surefire

E2E Testing

Protractor

Selenium

Code Analysis

Code Coverage

ng test --no-watch --code-coverage

JaCoCo

Development

IDE

MS VS Code or IntelliJ

Eclipse or IntelliJ

Framework

Angular (etc.)

Spring or Quarkus

+
+ +
+

==Service Layer

+
+
+

The service layer is responsible for exposing functionality made available by the logical layer to external consumers over a network via technical protocols.

+
+
+
+

Types of Services

+
+

Before you start creating your services you should consider some general design aspects:

+
+
+
    +
  • +

    Do you want to create a RPC service?

    +
  • +
  • +

    Or is your problem better addressed by messaging or eventing?

    +
  • +
  • +

    Who will consume your service?

    +
    +
      +
    • +

      Do you have one or multiple consumers?

      +
    • +
    • +

      Do web-browsers have to use your service?

      +
    • +
    • +

      Will apps from other vendors or parties have to consume your service that you can not influence if the service may have to change or be extended?

      +
    • +
    +
    +
  • +
+
+
+

For RPC a common choice is REST but there are also interesting alternatives like gRPC. We also have a guide for SOAP but this technology should rather be considered as legacy and is not recommended for new services.

+
+
+

When it comes to messaging in Java the typical answer will be JMS. However, a very promising alternative is Kafka.

+
+
+
+

Versioning

+
+

For RPC services consumed by other applications we use versioning to prevent incompatibilities between applications when deploying updates. This is done by the following conventions:

+
+
+
    +
  • +

    We define a version number and prefix it with v (e.g. v1).

    +
  • +
  • +

    If we support previous versions we use that version numbers as part of the Java package defining the service API (e.g. com.foo.application.component.service.api.v1)

    +
  • +
  • +

    We use the version number as part of the service name in the remote URL (e.g. https://application.foo.com/services/rest/component/v1/resource)

    +
  • +
  • +

    Whenever breaking changes are made to the API, create a separate version of the service and increment the version (e.g. v1v2) . The implementations of the different versions of the service contain compatibility code and delegate to the same unversioned use-case of the logic layer whenever possible.

    +
  • +
  • +

    For maintenance and simplicity, avoid keeping more than one previous version.

    +
  • +
+
+
+
+

Interoperability

+
+

For services that are consumed by clients with different technology, interoperability is required. This is addressed by selecting the right protocol, following protocol-specific best practices and following our considerations especially simplicity.

+
+
+
+

Service Considerations

+
+

The term service is quite generic and therefore easily misunderstood. It is a unit exposing coherent functionality via a well-defined interface over a network. For the design of a service, we consider the following aspects:

+
+
+
    +
  • +

    self-contained
    +The entire API of the service shall be self-contained and have no dependencies on other parts of the application (other services, implementations, etc.).

    +
  • +
  • +

    idempotence
    +E.g. creation of the same master-data entity has no effect (no error)

    +
  • +
  • +

    loosely coupled
    +Service consumers have minimum knowledge and dependencies on the service provider.

    +
  • +
  • +

    normalized
    +Complete, no redundancy, minimal

    +
  • +
  • +

    coarse-grained
    +Service provides rather large operations (save entire entity or set of entities rather than individual attributes)

    +
  • +
  • +

    atomic
    +Process individual entities (for processing large sets of data, use a batch instead of a service)

    +
  • +
  • +

    simplicity
    +Avoid polymorphism, RPC methods with unique name per signature and no overloading, avoid attachments (consider separate download service), etc.

    +
  • +
+
+
+
+

Security

+
+

Your services are the major entry point to your application. Hence, security considerations are important here.

+
+
+

See REST Security.

+
+
+ +
+

==Logic Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. +According to our business architecture, we divide an application into components. +For each component, the logic layer defines different use-cases. Another approach is to define a component-facade, which we do not recommend for future application. Especially for quarkus application, we want to simplify things and highly suggest omitting component-facade completely and using use-cases only. +It is very important that you follow the links to understand the concept of use-case in order to properly implement your business logic.

+
+
+
+

Responsibility

+
+

The logic layer is responsible to implement the business logic according to the specified functional demands and requirements. +Therefore, it creates the actual value of the application. The logic layer is responsible for invoking business logic in external systems. +The following additional aspects are also included in its responsibility:

+
+
+ +
+
+
+

Security

+
+

The logic layer is the heart of the application. It is also responsible for authorization and hence security is important in this current case. Every method exposed in an interface needs to be annotated with an authorization check, stating what role(s) a caller must provide in order to be allowed to make the call. The authorization concept is described here.

+
+
+
Direct Object References
+
+

A security threat are Insecure Direct Object References. This simply gives you two options:

+
+
+
    +
  • +

    avoid direct object references

    +
  • +
  • +

    ensure that direct object references are secure

    +
  • +
+
+
+

Especially when using REST, direct object references via technical IDs are common sense. This implies that you have a proper authorization in place. This is especially tricky when your authorization does not only rely on the type of the data and according to static permissions but also on the data itself. Vulnerabilities for this threat can easily happen by design flaws and inadvertence. Here is an example from our sample application:

+
+
+

We have a generic use-case to manage BLOBs. In the first place, it makes sense to write a generic REST service to load and save these BLOBs. However, the permission to read or even update such BLOB depends on the business object hosting the BLOB. Therefore, such a generic REST service would open the door for this OWASP A4 vulnerability. To solve this in a secure way, you need individual services for each hosting business object to manage the linked BLOB and have to check permissions based on the parent business object. In this example the ID of the BLOB would be the direct object reference and the ID of the business object (and a BLOB property indicator) would be the indirect object reference.

+
+ +
+

==Component Facade

+
+
+ + + + + +
+ + +Our recommended approach for implementing the logic layer is use-cases +
+
+
+

For each component of the application, the logic layer defines a component facade. +This is an interface defining all business operations of the component. +It carries the name of the component («Component») and has an implementation named «Component»Impl (see implementation).

+
+
+
+
API
+
+

The component facade interface defines the logic API of the component and has to be business oriented. +This means that all parameters and return types of all methods from this API have to be business transfer-objects, datatypes (String, Integer, MyCustomerNumber, etc.), or collections of these. +The API may also only access objects of other business components listed in the (transitive) dependencies of the business-architecture.

+
+
+

Here is an example how such an API may look like:

+
+
+
+
public interface Bookingmanagement {
+
+  BookingEto findBooking(Long id);
+
+  BookingCto findBookingCto(Long id);
+
+  Page<BookingEto> findBookingEtos(BookingSearchCriteriaTo criteria);
+
+  void approveBooking(BookingEto booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation of an interface from the logic layer (a component facade or a use-case) carries the name of that interface with the suffix Impl and is annotated with @Named. +An implementation typically needs access to the persistent data. +This is done by injecting the corresponding repository (or DAO). +According to data-sovereignty, only repositories of the same business component may be accessed directly. +For accessing data from other components the implementation has to use the corresponding API of the logic layer (the component facade). Further, it shall not expose persistent entities from the domain layer and has to map them to transfer objects using the bean-mapper.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public BookingEto findBooking(Long id) {
+
+    LOG.debug("Get Booking with id {} from database.", id);
+    BookingEntity entity = this.bookingRepository.findOne(id);
+    return getBeanMapper().map(entity, BookingEto.class));
+  }
+}
+
+
+
+

As you can see, entities (BookingEntity) are mapped to corresponding ETOs (BookingEto). +Further details about this can be found in bean-mapping.

+
+ +
+

==UseCase +A use-case is a small unit of the logic layer responsible for an operation on a particular entity (business object). +We leave it up to you to decide whether you want to define an interface (API) for each use-case or provide an implementation directly.

+
+
+

Following our architecture-mapping (for classic and modern project), use-cases are named Uc«Operation»«BusinessObject»[Impl]. The prefix Uc stands for use-case and allows to easily find and identify them in your IDE. The «Operation» stands for a verb that is operated on the entity identified by «BusinessObject». +For CRUD we use the standard operations Find and Manage that can be generated by CobiGen. This also separates read and write operations (e.g. if you want to do CQSR, or to configure read-only transactions for read operations).

+
+
+

In our example, we choose to define an interface for each use-case. We also use *To to refer to any type of transfer object. Please follow our guide to understand more about different types of transfer object e.g. Eto, Dto, Cto

+
+
+
+
Find
+
+

The UcFind«BusinessObject» defines all read operations to retrieve and search the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcFindBooking {
+  //*To = Eto, Dto or Cto
+  Booking*To findBooking(Long id);
+}
+
+
+
+
+
Manage
+
+

The UcManage«BusinessObject» defines all CRUD write operations (create, update and delete) for the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcManageBooking {
+
+  //*To = Eto, Dto or Cto
+  Booking*To saveBooking(Booking*To booking);
+
+  void deleteBooking(Long id);
+
+}
+
+
+
+
+
Custom
+
+

Any other non CRUD operation Uc«Operation»«BusinessObject» uses any other custom verb for «Operation». +Typically, such custom use-cases only define a single method. +Here is an example:

+
+
+
+
public interface UcApproveBooking {
+
+  //*To = Eto, Dto or Cto
+  void approveBooking(Booking*To booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation should carry its own name and the suffix Impl and is annotated with @Named and @ApplicationScoped. It will need access to the persistent data which is done by injecting the corresponding repository (or DAO). Furthermore, it shall not expose persistent entities from the data access layer and has to map them to transfer objects using the bean-mapper. Please refer to our bean mapping, transfer object and dependency injection documentation for more information. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcManageBookingImpl implements UcManageBooking {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public void deleteBooking(Long id) {
+
+    LOG.debug("Delete Booking with id {} from database.", id);
+    this.bookingRepository.deleteById(id);
+  }
+}
+
+
+
+

The use-cases can then be injected directly into the service.

+
+
+
+
@Named("BookingmanagementRestService")
+@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private UcFindBooking ucFindBooking;
+
+  @Inject
+  private UcManageBooking ucManageBooking;
+
+  @Inject
+  private UcApproveBooking ucApproveBooking;
+}
+
+
+
+
+
Internal use case
+
+

Sometimes, a component with multiple related entities and many use-cases needs to reuse business logic internally. +Of course, this can be exposed as an official use-case API but this will imply using transfer-objects (ETOs) instead of entities. In some cases, this is undesired e.g. for better performance to prevent unnecessary mapping of entire collections of entities. +In the first place, you should try to use abstract base implementations providing reusable methods the actual use-case implementations can inherit from. +If your business logic is even more complex and you have multiple aspects of business logic to share and reuse but also run into multi-inheritance issues, you may also just create use-cases that have their interface located in the impl scope package right next to the implementation (or you may just skip the interface). In such a case, you may define methods that directly take or return entity objects. +To avoid confusion with regular use-cases, we recommend to add the Internal suffix to the type name leading to Uc«Operation»«BusinessObject»Internal[Impl].

+
+
+ +
+

==Data-Access Layer

+
+
+

The data-access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store. External system could also be accessed from the data-access layer if they match this definition, e.g. a mongo-db via rest services.

+
+
+

Note: In the modern project structure, this layer is replaced by the domain layer.

+
+
+
+
+

Database

+
+

You need to make your choice for a database. Options are documented here.

+
+
+

The classical approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+ +
+

==Batch Layer

+
+
+

We understand batch processing as a bulk-oriented, non-interactive, typically long running execution of tasks. For simplicity, we use the term "batch" or "batch job" for such tasks in the following documentation.

+
+
+

devonfw uses Spring Batch as a batch framework.

+
+
+

This guide explains how Spring Batch is used in devonfw applications. It focuses on aspects which are special to devonfw. If you want to learn about spring-batch you should adhere to springs references documentation.

+
+
+

There is an example of a simple batch implementation in the my-thai-star batch module.

+
+
+

In this chapter, we will describe the overall architecture (especially concerning layering) and how to administer batches.

+
+
+
+

Layering

+
+

Batches are implemented in the batch layer. The batch layer is responsible for batch processes, whereas the business logic is implemented in the logic layer. Compared to the service layer, you may understand the batch layer just as a different way of accessing the business logic. +From a component point of view, each batch is implemented as a subcomponent in the corresponding business component. +The business component is defined by the business architecture.

+
+
+

Let’s make an example for that. The sample application implements a batch for exporting ingredients. This ingredientExportJob belongs to the dishmanagement business component. +So the ingredientExportJob is implemented in the following package:

+
+
+
+
<basepackage>.dishmanagement.batch.impl.*
+
+
+
+

Batches should invoke use cases in the logic layer for doing their work. +Only "batch specific" technical aspects should be implemented in the batch layer.

+
+
+
+
+

Example: +For a batch, which imports product data from a CSV file, this means that all code for actually reading and parsing the CSV input file is implemented in the batch layer. +The batch calls the use case "create product" in the logic layer for actually creating the products for each line read from the CSV input file.

+
+
+
+
+
Directly accessing data access layer
+
+

In practice, it is not always appropriate to create use cases for every bit of work a batch should do. Instead, the data access layer can be used directly. +An example for that is a typical batch for data retention which deletes out-of-time data. +Often deleting, out-dated data is done by invoking a single SQL statement. It is appropriate to implement that SQL in a Repository or DAO method and call this method directly from the batch. +But be careful: this pattern is a simplification which could lead to business logic cluttered in different layers, which reduces the maintainability of your application. +It is a typical design decision you have to make when designing your specific batches.

+
+
+
+
+

Project structure and packaging

+
+

Batches will be implemented in a separate Maven module to keep the application core free of batch dependencies. The batch module includes a dependency on the application core-module to allow the reuse of the use cases, DAOs etc. +Additionally the batch module has dependencies on the required spring batch jars:

+
+
+
+
  <dependencies>
+
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>mtsj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-batch</artifactId>
+    </dependency>
+
+  </dependencies>
+
+
+
+

To allow an easy start of the batches from the command line it is advised to create a bootified jar for the batch module by adding the following to the pom.xml of the batch module:

+
+
+
+
  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>config/application.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Create bootified jar for batch execution via command line.
+           Your applications spring boot app is used as main-class.
+       -->
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        <configuration>
+          <mainClass>com.devonfw.application.mtsj.SpringBootApp</mainClass>
+          <classifier>bootified</classifier>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>repackage</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+
+
+

Implementation

+
+

Most of the details about implementation of batches is described in the spring batch documentation. +There is nothing special about implementing batches in devonfw. You will find an easy example in my-thai-star.

+
+
+
+

Starting from command line

+
+

Devonfw advises to start batches via command line. This is most common to many ops teams and allows easy integration in existing schedulers. In general batches are started with the following command:

+
+
+
+
java -jar <app>-batch-<version>-bootified.jar --spring.main.web-application-type=none --spring.batch.job.enabled=true --spring.batch.job.names=<myJob> <params>
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + +
ParameterExplanation

--spring.main.web-application-type=none

This disables the web app (e.g. Tomcat)

--spring.batch.job.names=<myJob>

This specifies the name of the job to run. If you leave this out ALL jobs will be executed. Which probably does not make to much sense.

<params>

(Optional) additional parameters which are passed to your job

+
+

This will launch your normal spring boot app, disables the web application part and runs the designated job via Spring Boots org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.

+
+
+
+

Scheduling

+
+

In real world scheduling of batches is not as simple as it first might look like.

+
+
+
    +
  • +

    Multiple batches have to be executed in order to achieve complex tasks. If one of those batches fails the further execution has to be stopped and operations should be notified for example.

    +
  • +
  • +

    Input files or those created by batches have to be copied from one node to another.

    +
  • +
  • +

    Scheduling batch executing could get complex easily (quarterly jobs, run job on first workday of a month, …​)

    +
  • +
+
+
+

For devonfw we propose the batches themselves should not mess around with details of scheduling. +Likewise your application should not do so. This complexity should be externalized to a dedicated batch administration service or scheduler. +This service could be a complex product or a simple tool like cron. We propose Rundeck as an open source job scheduler.

+
+
+

This gives full control to operations to choose the solution which fits best into existing administration procedures.

+
+
+
+

Handling restarts

+
+

If you start a job with the same parameters set after a failed run (BatchStatus.FAILED) a restart will occur. +In many cases your batch should then not reprocess all items it processed in the previous runs. +For that you need some logic to start at the desired offset. There different ways to implement such logic:

+
+
+
    +
  • +

    Marking processed items in the database in a dedicated column

    +
  • +
  • +

    Write all IDs of items to process in a separate table as an initialization step of your batch. You can then delete IDs of already processed items from that table during the batch execution.

    +
  • +
  • +

    Storing restart information in springs ExecutionContext (see below)

    +
  • +
+
+
+
Using spring batch ExecutionContext for restarts
+
+

By implementing the ItemStream interface in your ItemReader or ItemWriter you may store information about the batch progress in the ExecutionContext. You will find an example for that in the CountJob in My Thai Star.

+
+
+

Additional hint: It is important that bean definition method of your ItemReader/ItemWriter return types implementing ItemStream(and not just ItemReader or ItemWriter alone). For that the ItemStreamReader and ItemStreamWriter interfaces are provided.

+
+
+
+
+

Exit codes

+
+

Your batches should create a meaningful exit code to allow reaction to batch errors e.g. in a scheduler. +For that spring batch automatically registers an org.springframework.boot.autoconfigure.batch.JobExecutionExitCodeGenerator. To make this mechanism work your spring boot app main class as to populate this exit code to the JVM:

+
+
+
+
@SpringBootApplication
+public class SpringBootApp {
+
+  public static void main(String[] args) {
+    if (Arrays.stream(args).anyMatch((String e) -> e.contains("--spring.batch.job.names"))) {
+      // if executing batch job, explicitly exit jvm to report error code from batch
+      System.exit(SpringApplication.exit(SpringApplication.run(SpringBootApp.class, args)));
+    } else {
+      // normal web application start
+      SpringApplication.run(SpringBootApp.class, args);
+    }
+  }
+}
+
+
+
+
+

Stop batches and manage batch status

+
+

Spring batch uses several database tables to store the status of batch executions. +Each execution may have different status. +You may use this mechanism to gracefully stop batches. +Additionally in some edge cases (batch process crashed) the execution status may be in an undesired state. +E.g. the state will be running, despite the process crashed sometime ago. +For that cases you have to change the status of the execution in the database.

+
+
+
CLI-Tool
+
+

Devonfw provides a easy to use cli-tool to manage the executing status of your jobs. +The tool is implemented in the devonfw module devon4j-batch-tool. It will provide a runnable jar, which may be used as follows:

+
+
+
+
List names of all previous executed jobs
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs list

+
+
Stop job named 'countJob'
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs stop countJob

+
+
Show help
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar

+
+
+
+
+

As you can the each invocation includes the JDBC connection string to your database. +This means that you have to make sure that the corresponding DB driver is in the classpath (the prepared jar only contains H2).

+
+
+
+
+

Authentication

+
+

Most business application incorporate authentication and authorization. +Your spring boot application will implement some kind of security, e.g. integrated login with username+password or in many cases authentication via an existing IAM. +For security reasons your batch should also implement an authentication mechanism and obey the authorization implemented in your application (e.g. via @RolesAllowed).

+
+
+

Since there are many different authentication mechanism we cannot provide an out-of-the-box solution in devonfw, but we describe a pattern how this can be implemented in devonfw batches.

+
+
+

We suggest to implement the authentication in a Spring Batch tasklet, which runs as the first step in your batch. This tasklet will do all of the work which is required to authenticate the batch. A simple example which authenticates the batch "locally" via username and password could be implemented like this:

+
+
+
+
@Named
+public class SimpleAuthenticationTasklet implements Tasklet {
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+    String username = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("username");
+    String password = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("password");
+    Authentication authentication = new UsernamePasswordAuthenticationToken(username, password);
+
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+    return RepeatStatus.FINISHED;
+  }
+
+}
+
+
+
+

The username and password have to be supplied via two cli parameters -username and -password. This implementation creates an "authenticated" Authentication and sets in the Spring Security context. This is just for demonstration normally you should not provide passwords via command line. The actual authentication will be done automatically via Spring Security as in your "normal" application. +If you have a more complex authentication mechanism in your application e.g. via OpenID connect just call this in the tasklet. Naturally you may read authentication parameters (e.g. secrets) from the command line or more securely from a configuration file.

+
+
+

In your Job Configuration set this tasklet as the first step:

+
+
+
+
@Configuration
+@EnableBatchProcessing
+public class BookingsExportBatchConfig {
+  @Inject
+  private JobBuilderFactory jobBuilderFactory;
+
+  @Inject
+  private StepBuilderFactory stepBuilderFactory;
+
+  @Bean
+  public Job myBatchJob() {
+    return this.jobBuilderFactory.get("myJob").start(myAuthenticationStep()).next(...).build();
+  }
+
+  @Bean
+  public Step myAuthenticationStep() {
+    return this.stepBuilderFactory.get("myAuthenticationStep").tasklet(myAuthenticatonTasklet()).build();
+  }
+
+  @Bean
+  public Tasklet myAuthenticatonTasklet() {
+    return new SimpleAuthenticationTasklet();
+  }
+...
+
+
+
+
+

Tipps & tricks

+
+
Identifying job parameters
+
+

Spring uses a jobs parameters to identify job executions. Parameters starting with "-" are not considered for identifying a job execution.

+
+
+
+
+
+
+

Guides

+
+ +
+

==Dependency Injection +Dependency injection is one of the most important design patterns and is a key principle to a modular and component based architecture. +The Java Standard for dependency injection is javax.inject (JSR330) that we use in combination with JSR250. +Additionally, for scoping you can use CDI (Context and Dependency Injection) from JSR365.

+
+
+

There are many frameworks which support this standard including all recent Java EE application servers. +Therefore in devonfw we rely on these open standards and can propagate patterns and code examples that work independent from the underlying frameworks.

+
+
+

Key Principles

+
+

Within dependency injection a bean is typically a reusable unit of your application providing an encapsulated functionality. +This bean can be injected into other beans and it should in general be replaceable. +As an example we can think of a use-case, a repository, etc. +As best practice we use the following principles:

+
+
+
    +
  • +

    Stateless implementation
    +By default such beans shall be implemented stateless. If you store state information in member variables you can easily run into concurrency problems and nasty bugs. This is easy to avoid by using local variables and separate state classes for complex state-information. Try to avoid stateful beans wherever possible. Only add state if you are fully aware of what you are doing and properly document this as a warning in your JavaDoc.

    +
  • +
  • +

    Usage of Java standards
    +We use common standards (see above) that makes our code portable. Therefore we use standardized annotations like @Inject (javax.inject.Inject) instead of proprietary annotations such as @Autowired. Generally we avoid proprietary annotations in business code (logic layer).

    +
  • +
  • +

    Simple injection-style
    +In general you can choose between constructor, setter or field injection. For simplicity we recommend to do private field injection as it is very compact and easy to maintain. We believe that constructor injection is bad for maintenance especially in case of inheritance (if you change the dependencies you need to refactor all sub-classes). Private field injection and public setter injection are very similar but setter injection is much more verbose (often you are even forced to have javadoc for all public methods). If you are writing re-usable library code setter injection will make sense as it is more flexible. In a business application you typically do not need that and can save a lot of boiler-plate code if you use private field injection instead. Nowadays you are using container infrastructure also for your tests (see testing) so there is no need to inject manually (what would require a public setter).

    +
  • +
  • +

    KISS
    +To follow the KISS (keep it small and simple) principle we avoid advanced features (e.g. custom AOP, non-singleton beans) and only use them where necessary.

    +
  • +
  • +

    Separation of API and implementation
    +For important components we should separate a self-contained API documented with JavaDoc from its implementation. Code from other components that wants to use the implementation shall only rely on the API. However, for things that will never be exchanged no API as interface is required you can skip such separation.

    +
  • +
+
+
+
+

Example Bean

+
+

Here you can see the implementation of an example bean using dependency injection:

+
+
+
+
@ApplicationScoped
+@Named("MyComponent")
+public class MyComponentImpl implements MyComponent {
+  @Inject
+  private MyOtherComponent myOtherComponent;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+
+  ...
+}
+
+
+
+

Here MyComponentImpl depends on MyOtherComponent that is injected into the field myOtherComponent because of the @Inject annotation. +To make this work there must be exactly one bean in the container (e.g. spring or quarkus) that is an instance of MyOtherComponent. +In order to put a bean into the container, we can use @ApplicationScoped in case of CDI (required for quarkus) for a stateless bean. +In spring we can ommit a CDI annotation and the @Named annotation is already sufficient as a bean is stateless by default in spring. +If we always use @ApplicationScoped we can make this more explicit and more portable accross different frameworks. +So in our example we put MyComponentImpl into the container. +That bean will be called MyComponent as we specified in the @Named annotation but we can also omit the name to use the classname as fallback. +Now our bean can be injected into other beans using @Inject annotation either via MyComponent interface (recommended when interface is present) or even directly via MyComponentImpl. +In case you omit the interface, you should also omit the Impl suffix or instead use Bean as suffix.

+
+
+
+

Multiple bean implementations

+
+

In some cases you might have multiple implementations as beans for the same interface. +The following sub-sections handle the different scenarios to give you guidance.

+
+
+
Only one implementation in container
+
+

In some cases you still have only one implementation active as bean in the container at runtime. +A typical example is that you have different implemenations for test and main usage. +This case is easy, as @Inject will always be unique. +The only thing you need to care about is how to configure your framework (spring, quarkus, etc.) to know which implementation to put in the container depending on specific configuration. +In spring this can be archived via the proprietary @Profile annotaiton.

+
+
+
+
Injecting all of multiple implementations
+
+

In some situations you may have an interface that defines a kind of "plugin". +You can have multiple implementations in your container and want to have all of them injected. +Then you can request a list with all the bean implementations via the interface as in the following example:

+
+
+
+
  @Inject
+  private List<MyConverter> converters;
+
+
+
+

Your code may iterate over all plugins (converters) and apply them sequentially. +Please note that the injection will fail (at least in spring), when there is no bean available to inject. +So you do not get an empty list injected but will get an exception on startup.

+
+
+
+
Injecting one of multiple implementations
+
+

Another scenario is that you have multiple implementations in your container coexisting, but for injection you may want to choose a specific implementation. +Here you could use the @Named annotation to specify a unique identifier for each implementation what is called qualified injection:

+
+
+
+
@ApplicationScoped
+@Named("UserAuthenticator")
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+@Named("ServiceAuthenticator")
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  @Named("UserAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  @Named("ServiceAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+
+
+
+

However, we discovered that this pattern is not so great: +The identifiers in the @Named annotation are just strings that could easily break. +You could use constants instead but still this is not the best solution.

+
+
+

In the end you can very much simplify this by just directly injecting the implementation instead:

+
+
+
+
@ApplicationScoped
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

In case you want to strictly decouple from implementations, you can still create dedicated interfaces:

+
+
+
+
public interface UserAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class UserAuthenticatorImpl implements UserAuthenticator {
+  ...
+}
+public interface ServiceAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class ServiceAuthenticatorImpl implements ServiceAuthenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

However, as you can see this is again introducing additional boiler-plate code. +While the principle to separate API and implementation and strictly decouple from implementation is valuable in general, +you should always consider KISS, lean, and agile in contrast and balance pros and cons instead of blindly following dogmas.

+
+
+
+
+

Imports

+
+

Here are the import statements for the most important annotations for dependency injection

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.enterprise.context.ApplicationScoped;
+// import javax.enterprise.context.RequestScoped;
+// import javax.enterprise.context.SessionScoped;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+
+
+
+

Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>jakarta.inject</groupId>
+  <artifactId>jakarta.inject-api</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>jakarta.annotation</groupId>
+  <artifactId>jakarta.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>javax.inject</groupId>
+  <artifactId>javax.inject</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>javax.annotation</groupId>
+  <artifactId>javax.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+ +
+

==Configuration

+
+
+

An application needs to be configurable in order to allow internal setup (like CDI) but also to allow externalized configuration of a deployed package (e.g. integration into runtime environment). We rely on a comprehensive configuration approach following a "convention over configuration" pattern. This guide adds on to this by detailed instructions and best-practices how to deal with configurations.

+
+
+

In general we distinguish the following kinds of configuration that are explained in the following sections:

+
+
+ +
+
+
+

Internal Application Configuration

+
+

The application configuration contains all internal settings and wirings of the application (bean wiring, database mappings, etc.) and is maintained by the application developers at development time.

+
+
+

For more detail of Spring stack, see here

+
+
+
+

Externalized Configuration

+
+

Externalized configuration is a configuration that is provided separately to a deployment package and can be maintained undisturbed by re-deployments.

+
+
+
Environment Configuration
+
+

The environment configuration contains configuration parameters (typically port numbers, host names, passwords, logins, timeouts, certificates, etc.) specific for the different environments. These are under the control of the operators responsible for the application.

+
+
+

The environment configuration is maintained in application.properties files, defining various properties. +These properties are explained in the corresponding configuration sections of the guides for each topic:

+
+
+ +
+
+

Make sure your properties are thoroughly documented by providing a comment to each property. This inline documentation is most valuable for your operating department.

+
+
+

More about structuring your application.properties files can be read here for Spring.

+
+
+

For Quarkus, please refer to Quarkus Config Reference for more details.

+
+
+
+
Business Configuration
+
+

Often applications do not need business configuration. In case they do it should typically be editable by administrators via the GUI. The business configuration values should therefore be stored in the database in key/value pairs.

+
+
+

Therefore we suggest to create a dedicated table with (at least) the following columns:

+
+
+
    +
  • +

    ID

    +
  • +
  • +

    Property name

    +
  • +
  • +

    Property type (Boolean, Integer, String)

    +
  • +
  • +

    Property value

    +
  • +
  • +

    Description

    +
  • +
+
+
+

According to the entries in this table, an administrative GUI may show a generic form to modify business configuration. Boolean values should be shown as checkboxes, integer and string values as text fields. The values should be validated according to their type so an error is raised if you try to save a string in an integer property for example.

+
+
+

We recommend the following base layout for the hierarchical business configuration:

+
+
+

component.[subcomponent].[subcomponent].propertyname

+
+
+
+
+

Security

+
+

Often you need to have passwords (for databases, third-party services, etc.) as part of your configuration. These are typically environment specific (see above). However, with DevOps and continuous-deployment you might be tempted to commit such configurations into your version-control (e.g. git). Doing that with plain text passwords is a severe problem especially for production systems. Never do that! Instead we offer some suggestions how to deal with sensible configurations:

+
+
+
Password Encryption
+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control.

+
+
+

For Spring, we use jasypt-spring-boot. For more details, see here

+
+
+

For Quarkus, see here

+
+
+
Is this Security by Obscurity?
+
+
    +
  • +

    Yes, from the point of view to protect the passwords on the target environment this is nothing but security by obscurity. If an attacker somehow got full access to the machine this will only cause him to spend some more time.

    +
  • +
  • +

    No, if someone only gets the configuration file. So all your developers might have access to the version-control where the config is stored. Others might have access to the software releases that include this configs. But without the master-password that should only be known to specific operators none else can decrypt the password (except with brute-force what will take a very long time, see jasypt for details).

    +
  • +
+
+ +
+

==Mapping configuration to your code

+
+
+

If you are using spring-boot as suggested by devon4j your application can be configured by application.properties file as described in configuration. +To get a single configuration option into your code for flexibility, you can use

+
+
+
+
@Value("${my.property.name}")
+private String myConfigurableField;
+
+
+
+

Now, in your application.properties you can add the property:

+
+
+
+
my.property.name=my-property-value
+
+
+
+

You may even use @Value("${my.property.name:my-default-value}") to make the property optional.

+
+
+
+
+
+

Naming conventions for configuration properties

+
+

As a best practice your configruation properties should follow these naming conventions:

+
+
+
    +
  • +

    build the property-name as a path of segments separated by the dot character (.)

    +
  • +
  • +

    segments should get more specific from left to right

    +
  • +
  • +

    a property-name should either be a leaf value or a tree node (prefix of other property-names) but never both! So never have something like foo.bar=value and foo.bar.child=value2.

    +
  • +
  • +

    start with a segment namespace unique to your context or application

    +
  • +
  • +

    a good example would be «myapp».billing.service.email.sender for the sender address of billing service emails send by «myapp».

    +
  • +
+
+
+
+

Mapping advanced configuration

+
+

However, in many scenarios you will have features that require more than just one property. +Injecting those via @Value is not leading to good code quality. +Instead we create a class with the suffix ConfigProperties containing all configuration properties for our aspect that is annotated with @ConfigurationProperties:

+
+
+
+
@ConfigurationProperties(prefix = "myapp.billing.service")
+public class BillingServiceConfigProperties {
+
+  private final Email email = new Email();
+  private final Smtp smtp = new Smtp();
+
+  public Email getEmail() { return this.email; }
+  public Email getSmtp() { return this.smtp; }
+
+  public static class Email {
+
+    private String sender;
+    private String subject;
+
+    public String getSender() { return this.sender; }
+    public void setSender(String sender) { this.sender = sender; }
+    public String getSubject() { return this.subject; }
+    public void setSubject(String subject) { this.subject = subject; }
+  }
+
+  public static class Smtp {
+
+    private String host;
+    private int port = 25;
+
+    public String getHost() { return this.host; }
+    public void setHost(String host) { this.host = host; }
+    public int getPort() { return this.port; }
+    public void setPort(int port) { this.port = port; }
+  }
+
+}
+
+
+
+

Of course this is just an example to demonstrate this feature of spring-boot. +In order to send emails you would typically use the existing spring-email feature. +But as you can see this allows us to define and access our configuration in a very structured and comfortable way. +The annotation @ConfigurationProperties(prefix = "myapp.billing.service") will automatically map spring configuration properties starting with myapp.billing.service via the according getters and setters into our BillingServiceConfigProperties. +We can easily define defaults (e.g. 25 as default value for myapp.billing.service.smtp.port). +Also Email or Smtp could be top-level classes to be reused in multiple configurations. +Of course you would also add helpful JavaDoc comments to the getters and classes to document your configuration options. +Further to access this configuration, we can use standard dependency-injection:

+
+
+
+
@Inject
+private BillingServiceConfigProperties config;
+
+
+
+

For very generic cases you may also use Map<String, String> to map any kind of property in an untyped way. +An example for generic configuration from devon4j can be found in +ServiceConfigProperties.

+
+
+

For further details about this feature also consult Guide to @ConfigurationProperties in Spring Boot.

+
+
+
+

Generate configuration metadata

+
+

You should further add this dependency to your module containing the *ConfigProperties:

+
+
+
+
    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+
+
+
+

This will generate configuration metadata so projects using your code can benefit from autocompletion and getting your JavaDoc as tooltip when editing application.properites what makes this approach very powerful. +For further details about this please read A Guide to Spring Boot Configuration Metadata.

+
+
+ +
+

==Java Persistence API

+
+
+

For mapping java objects to a relational database we use the Java Persistence API (JPA). +As JPA implementation we recommend to use Hibernate. For general documentation about JPA and Hibernate follow the links above as we will not replicate the documentation. Here you will only find guidelines and examples how we recommend to use it properly. The following examples show how to map the data of a database to an entity. As we use JPA we abstract from SQL here. However, you will still need a DDL script for your schema and during maintenance also database migrations. Please follow our SQL guide for such artifacts.

+
+
+
+

Entity

+
+

Entities are part of the persistence layer and contain the actual data. They are POJOs (Plain Old Java Objects) on which the relational data of a database is mapped and vice versa. The mapping is configured via JPA annotations (javax.persistence). Usually an entity class corresponds to a table of a database and a property to a column of that table. A persistent entity instance then represents a row of the database table.

+
+
+
A Simple Entity
+
+

The following listing shows a simple example:

+
+
+
+
@Entity
+@Table(name="TEXTMESSAGE")
+public class MessageEntity extends ApplicationPersistenceEntity implements Message {
+
+  private String text;
+
+  public String getText() {
+    return this.text;
+  }
+
+  public void setText(String text) {
+    this.text = text;
+  }
+ }
+
+
+
+

The @Entity annotation defines that instances of this class will be entities which can be stored in the database. The @Table annotation is optional and can be used to define the name of the corresponding table in the database. If it is not specified, the simple name of the entity class is used instead.

+
+
+

In order to specify how to map the attributes to columns we annotate the corresponding getter methods (technically also private field annotation is also possible but approaches can not be mixed). +The @Id annotation specifies that a property should be used as primary key. +With the help of the @Column annotation it is possible to define the name of the column that an attribute is mapped to as well as other aspects such as nullable or unique. If no column name is specified, the name of the property is used as default.

+
+
+

Note that every entity class needs a constructor with public or protected visibility that does not have any arguments. Moreover, neither the class nor its getters and setters may be final.

+
+
+

Entities should be simple POJOs and not contain business logic.

+
+
+
+
Entities and Datatypes
+
+

Standard datatypes like Integer, BigDecimal, String, etc. are mapped automatically by JPA. Custom datatypes are mapped as serialized BLOB by default what is typically undesired. +In order to map atomic custom datatypes (implementations of`+SimpleDatatype`) we implement an AttributeConverter. Here is a simple example:

+
+
+
+
@Converter(autoApply = true)
+public class MoneyAttributeConverter implements AttributeConverter<Money, BigDecimal> {
+
+  public BigDecimal convertToDatabaseColumn(Money attribute) {
+    return attribute.getValue();
+  }
+
+  public Money convertToEntityAttribute(BigDecimal dbData) {
+    return new Money(dbData);
+  }
+}
+
+
+
+

The annotation @Converter is detected by the JPA vendor if the annotated class is in the packages to scan. Further, autoApply = true implies that the converter is automatically used for all properties of the handled datatype. Therefore all entities with properties of that datatype will automatically be mapped properly (in our example Money is mapped as BigDecimal).

+
+
+

In case you have a composite datatype that you need to map to multiple columns the JPA does not offer a real solution. As a workaround you can use a bean instead of a real datatype and declare it as @Embeddable. If you are using Hibernate you can implement CompositeUserType. Via the @TypeDef annotation it can be registered to Hibernate. If you want to annotate the CompositeUserType implementation itself you also need another annotation (e.g. MappedSuperclass tough not technically correct) so it is found by the scan.

+
+
+
Enumerations
+
+

By default JPA maps Enums via their ordinal. Therefore the database will only contain the ordinals (0, 1, 2, etc.) . So , inside the database you can not easily understand their meaning. Using @Enumerated with EnumType.STRING allows to map the enum values to their name (Enum.name()). Both approaches are fragile when it comes to code changes and refactoring (if you change the order of the enum values or rename them) after the application is deployed to production. If you want to avoid this and get a robust mapping you can define a dedicated string in each enum value for database representation that you keep untouched. Then you treat the enum just like any other custom datatype.

+
+
+
+
BLOB
+
+

If binary or character large objects (BLOB/CLOB) should be used to store the value of an attribute, e.g. to store an icon, the @Lob annotation should be used as shown in the following listing:

+
+
+
+
@Lob
+public byte[] getIcon() {
+  return this.icon;
+}
+
+
+
+ + + + + +
+ + +Using a byte array will cause problems if BLOBs get large because the entire BLOB is loaded into the RAM of the server and has to be processed by the garbage collector. For larger BLOBs the type Blob and streaming should be used. +
+
+
+
+
public Blob getAttachment() {
+  return this.attachment;
+}
+
+
+
+
+
Date and Time
+
+

To store date and time related values, the temporal annotation can be used as shown in the listing below:

+
+
+
+
@Temporal(TemporalType.TIMESTAMP)
+public java.util.Date getStart() {
+  return start;
+}
+
+
+
+

Until Java8 the java data type java.util.Date (or Jodatime) has to be used. +TemporalType defines the granularity. In this case, a precision of nanoseconds is used. If this granularity is not wanted, TemporalType.DATE can be used instead, which only has a granularity of milliseconds. +Mixing these two granularities can cause problems when comparing one value to another. This is why we only use TemporalType.TIMESTAMP.

+
+
+
+
QueryDSL and Custom Types
+
+

Using the Aliases API of QueryDSL might result in an InvalidDataAccessApiUsageException when using custom datatypes in entity properties. This can be circumvented in two steps:

+
+
+
    +
  1. +

    Ensure you have the following maven dependencies in your project (core module) to support custom types via the Aliases API:

    +
    +
    +
    <dependency>
    +  <groupId>org.ow2.asm</groupId>
    +  <artifactId>asm</artifactId>
    +</dependency>
    +<dependency>
    +  <groupId>cglib</groupId>
    +  <artifactId>cglib</artifactId>
    +</dependency>
    +
    +
    +
  2. +
  3. +

    Make sure, that all your custom types used in entities provide a non-argument constructor with at least visibility level protected.

    +
  4. +
+
+
+
+
+
Primary Keys
+
+

We only use simple Long values as primary keys (IDs). +By default it is auto generated (@GeneratedValue(strategy=GenerationType.AUTO)). +This is already provided by the class com.devonfw.<projectName>.general.dataaccess.api.AbstractPersistenceEntity within the classic project structure respectively com.devonfw.<projectName>.general.domain.model.AbstractPersistenceEntity within the modern project structure, that you can extend.

+
+
+

The reason for this recommendation is simply because using a number (Long) is the most efficient representation for the database. +You may also consider to use other types like String or UUID or even composite custom datatypes and this is technically possible. +However, please consider that the primary key is used to lookup the row from the database table, also in foreign keys and thus in JOINs. +Please note that your project sooner or later may reach some complexity where performance really matters. +Working on big data and performing JOINs when using types such as String (VARCHAR[2]) as primary and foreign keys will kill your performance. +You are still free to make a different choice and devonfw only gives recommendations but does not want to dictate you what to do. +However, you have been warned about the concequences. +If you are well aware of what you are doing, you can still use differnet types of primary keys. +In such case, create your own entity not extending AbstractPersistenceEntity or create your own copy of AbstractPersistenceEntity with a different name and a different type of primary key.

+
+
+

In case you have business oriented keys (often as String), you can define an additional property for it and declare it as unique (@Column(unique=true)). +Be sure to include "AUTO_INCREMENT" in your sql table field ID to be able to persist data (or similar for other databases).

+
+
+
+
+

Relationships

+
+
n:1 and 1:1 Relationships
+
+

Entities often do not exist independently but are in some relation to each other. For example, for every period of time one of the StaffMember’s of the restaurant example has worked, which is represented by the class WorkingTime, there is a relationship to this StaffMember.

+
+
+

The following listing shows how this can be modeled using JPA:

+
+
+
+
...
+
+@Entity
+public class WorkingTimeEntity {
+   ...
+
+   private StaffMemberEntity staffMember;
+
+   @ManyToOne
+   @JoinColumn(name="STAFFMEMBER")
+   public StaffMemberEntity getStaffMember() {
+      return this.staffMember;
+   }
+
+   public void setStaffMember(StaffMemberEntity staffMember) {
+      this.staffMember = staffMember;
+   }
+}
+
+
+
+

To represent the relationship, an attribute of the type of the corresponding entity class that is referenced has been introduced. The relationship is a n:1 relationship, because every WorkingTime belongs to exactly one StaffMember, but a StaffMember usually worked more often than once.
+This is why the @ManyToOne annotation is used here. For 1:1 relationships the @OneToOne annotation can be used which works basically the same way. To be able to save information about the relation in the database, an additional column in the corresponding table of WorkingTime is needed which contains the primary key of the referenced StaffMember. With the name element of the @JoinColumn annotation it is possible to specify the name of this column.

+
+
+
+
1:n and n:m Relationships
+
+

The relationship of the example listed above is currently an unidirectional one, as there is a getter method for retrieving the StaffMember from the WorkingTime object, but not vice versa.

+
+
+

To make it a bidirectional one, the following code has to be added to StaffMember:

+
+
+
+
  private Set<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy="staffMember")
+  public Set<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(Set<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

To make the relationship bidirectional, the tables in the database do not have to be changed. Instead the column that corresponds to the attribute staffMember in class WorkingTime is used, which is specified by the mappedBy element of the @OneToMany annotation. Hibernate will search for corresponding WorkingTime objects automatically when a StaffMember is loaded.

+
+
+

The problem with bidirectional relationships is that if a WorkingTime object is added to the set or list workingTimes in StaffMember, this does not have any effect in the database unless +the staffMember attribute of that WorkingTime object is set. That is why the devon4j advices not to use bidirectional relationships but to use queries instead. How to do this is shown here. If a bidirectional relationship should be used nevertheless, appropriate add and remove methods must be used.

+
+
+

For 1:n and n:m relations, the devon4j demands that (unordered) Sets and no other collection types are used, as shown in the listing above. The only exception is whenever an ordering is really needed, (sorted) lists can be used.
+For example, if WorkingTime objects should be sorted by their start time, this could be done like this:

+
+
+
+
  private List<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy = "staffMember")
+  @OrderBy("startTime asc")
+  public List<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(List<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

The value of the @OrderBy annotation consists of an attribute name of the class followed by asc (ascending) or desc (descending).

+
+
+

To store information about a n:m relationship, a separate table has to be used, as one column cannot store several values (at least if the database schema is in first normal form).
+For example if one wanted to extend the example application so that all ingredients of one FoodDrink can be saved and to model the ingredients themselves as entities (e.g. to store additional information about them), this could be modeled as follows (extract of class FoodDrink):

+
+
+
+
  private Set<IngredientEntity> ingredients;
+
+  @ManyToMany()
+  @JoinTable
+  public Set<IngredientEntity> getIngredients() {
+    return this.ingredients;
+  }
+
+  public void setOrders(Set<IngredientEntity> ingredients) {
+    this.ingredients = ingredients;
+  }
+
+
+
+

Information about the relation is stored in a table called BILL_ORDER that has to have two columns, one for referencing the Bill, the other one for referencing the Order. Note that the @JoinTable annotation is not needed in this case because a separate table is the default solution here (same for n:m relations) unless there is a mappedBy element specified.

+
+
+

For 1:n relationships this solution has the disadvantage that more joins (in the database system) are needed to get a Bill with all the Orders it refers to. This might have a negative impact on performance so that the solution to store a reference to the Bill row/entity in the Order’s table is probably the better solution in most cases.

+
+
+

Note that bidirectional n:m relationships are not allowed for applications based on devon4j. Instead a third entity has to be introduced, which "represents" the relationship (it has two n:1 relationships).

+
+
+
+
Eager vs. Lazy Loading
+
+

Using JPA it is possible to use either lazy or eager loading. Eager loading means that for entities retrieved from the database, other entities that are referenced by these entities are also retrieved, whereas lazy loading means that this is only done when they are actually needed, i.e. when the corresponding getter method is invoked.

+
+
+

Application based on devon4j are strongly advised to always use lazy loading. The JPA defaults are:

+
+
+
    +
  • +

    @OneToMany: LAZY

    +
  • +
  • +

    @ManyToMany: LAZY

    +
  • +
  • +

    @ManyToOne: EAGER

    +
  • +
  • +

    @OneToOne: EAGER

    +
  • +
+
+
+

So at least for @ManyToOne and @OneToOne you always need to override the default by providing fetch = FetchType.LAZY.

+
+
+ + + + + +
+ + +Please read the performance guide. +
+
+
+
+
Cascading Relationships
+
+

For relations it is also possible to define whether operations are cascaded (like a recursion) to the related entity. +By default, nothing is done in these situations. This can be changed by using the cascade property of the annotation that specifies the relation type (@OneToOne, @ManyToOne, @OneToMany, @ManyToOne). This property accepts a CascadeType that offers the following options:

+
+
+
    +
  • +

    PERSIST (for EntityManager.persist, relevant to inserted transient entities into DB)

    +
  • +
  • +

    REMOVE (for EntityManager.remove to delete entity from DB)

    +
  • +
  • +

    MERGE (for EntityManager.merge)

    +
  • +
  • +

    REFRESH (for EntityManager.refresh)

    +
  • +
  • +

    DETACH (for EntityManager.detach)

    +
  • +
  • +

    ALL (cascade all of the above operations)

    +
  • +
+
+
+

See here for more information.

+
+
+
+
Typesafe Foreign Keys using IdRef
+
+

For simple usage you can use Long for all your foreign keys. +However, as an optional pattern for advanced and type-safe usage, we offer IdRef.

+
+
+
+
+

Embeddable

+
+

An embeddable Object is a way to group properties of an entity into a separate Java (child) object. Unlike with implement relationships the embeddable is not a separate entity and its properties are stored (embedded) in the same table together with the entity. This is helpful to structure and reuse groups of properties.

+
+
+

The following example shows an Address implemented as an embeddable class:

+
+
+
+
@Embeddable
+public class AddressEmbeddable {
+
+  private String street;
+  private String number;
+  private Integer zipCode;
+  private String city;
+
+  @Column(name="STREETNUMBER")
+  public String getNumber() {
+    return number;
+  }
+
+  public void setNumber(String number) {
+    this.number = number;
+  }
+
+  ...  // other getter and setter methods, equals, hashCode
+}
+
+
+
+

As you can see an embeddable is similar to an entity class, but with an @Embeddable annotation instead of the @Entity annotation and without primary key or modification counter. +An Embeddable does not exist on its own but in the context of an entity. +As a simplification Embeddables do not require a separate interface and ETO as the bean-mapper will create a copy automatically when converting the owning entity to an ETO. +However, in this case the embeddable becomes part of your api module that therefore needs a dependency on the JPA.

+
+
+

In addition to that the methods equals(Object) and hashCode() need to be implemented as this is required by Hibernate (it is not required for entities because they can be unambiguously identified by their primary key). For some hints on how to implement the hashCode() method please have a look here.

+
+
+

Using this AddressEmbeddable inside an entity class can be done like this:

+
+
+
+
  private AddressEmbeddable address;
+
+  @Embedded
+  public AddressEmbeddable getAddress() {
+    return this.address;
+  }
+
+  public void setAddress(AddressEmbeddable address) {
+    this.address = address;
+  }
+}
+
+
+
+

The @Embedded annotation needs to be used for embedded attributes. Note that if in all columns of the embeddable (here Address) are null, then the embeddable object itself is also null inside the entity. This has to be considered to avoid NullPointerException’s. Further this causes some issues with primitive types in embeddable classes that can be avoided by only using object types instead.

+
+
+
+

Inheritance

+
+

Just like normal java classes, entity classes can inherit from others. The only difference is that you need to specify how to map a class hierarchy to database tables. Generic abstract super-classes for entities can simply be annotated with @MappedSuperclass.

+
+
+

For all other cases the JPA offers the annotation @Inheritance with the property strategy talking an InheritanceType that has the following options:

+
+
+
+
+
    +
  • +

    SINGLE_TABLE: This strategy uses a single table that contains all columns needed to store all entity-types of the entire inheritance hierarchy. If a column is not needed for an entity because of its type, there is a null value in this column. An additional column is introduced, which denotes the type of the entity (called dtype).

    +
  • +
  • +

    TABLE_PER_CLASS: For each concrete entity class there is a table in the database that can store such an entity with all its attributes. An entity is only saved in the table corresponding to its most concrete type. To get all entities of a super type, joins are needed.

    +
  • +
  • +

    JOINED: In this case there is a table for every entity class including abstract classes, which contains only the columns for the persistent properties of that particular class. Additionally there is a primary key column in every table. To get an entity of a class that is a subclass of another one, joins are needed.

    +
  • +
+
+
+
+
+

Each of the three approaches has its advantages and drawbacks, which are discussed in detail here. In most cases, the first one should be used, because it is usually the fastest way to do the mapping, as no joins are needed when retrieving, searching or persisting entities. Moreover it is rather simple and easy to understand. +One major disadvantage is that the first approach could lead to a table with a lot of null values, which might have a negative impact on the database size.

+
+
+

The inheritance strategy has to be annotated to the top-most entity of the class hierarchy (where @MappedSuperclass classes are not considered) like in the following example:

+
+
+
+
@Entity
+@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+public abstract class MyParentEntity extends ApplicationPersistenceEntity implements MyParent {
+  ...
+}
+
+@Entity
+public class MyChildEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+@Entity
+public class MyOtherEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+
+
+

As a best practice we advise you to avoid entity hierarchies at all where possible and otherwise to keep the hierarchy as small as possible. In order to just ensure reuse or establish a common API you can consider a shared interface, a @MappedSuperclass or an @Embeddable instead of an entity hierarchy.

+
+
+
+

Repositories and DAOs

+
+

For each entity a code unit is created that groups all database operations for that entity. We recommend to use spring-data repositories for that as it is most efficient for developers. As an alternative there is still the classic approach using DAOs.

+
+
+
Concurrency Control
+
+

The concurrency control defines the way concurrent access to the same data of a database is handled. When several users (or threads of application servers) concurrently access a database, anomalies may happen, e.g. a transaction is able to see changes from another transaction although that one did, not yet commit these changes. Most of these anomalies are automatically prevented by the database system, depending on the isolation level (property hibernate.connection.isolation in the jpa.xml, see here, or quarkus.datasource.jdbc.transaction-isolation-level in the application.properties).

+
+
+

Another anomaly is when two stakeholders concurrently access a record, do some changes and write them back to the database. The JPA addresses this with different locking strategies (see here).

+
+
+

As a best practice we are using optimistic locking for regular end-user services (OLTP) and pessimistic locking for batches.

+
+
+
+
Optimistic Locking
+
+

The class com.devonfw.module.jpa.persistence.api.AbstractPersistenceEntity already provides optimistic locking via a modificationCounter with the @Version annotation. Therefore JPA takes care of optimistic locking for you. When entities are transferred to clients, modified and sent back for update you need to ensure the modificationCounter is part of the game. If you follow our guides about transfer-objects and services this will also work out of the box. +You only have to care about two things:

+
+
+
    +
  • +

    How to deal with optimistic locking in relationships?
    +Assume an entity A contains a collection of B entities. Should there be a locking conflict if one user modifies an instance of A while another user in parallel modifies an instance of B that is contained in the other instance? To address this , take a look at FeatureForceIncrementModificationCounter.

    +
  • +
  • +

    What should happen in the UI if an OptimisticLockException occurred?
    +According to KISS our recommendation is that the user gets an error displayed that tells him to do his change again on the recent data. Try to design your system and the work processing in a way to keep such conflicts rare and you are fine.

    +
  • +
+
+
+
+
Pessimistic Locking
+
+

For back-end services and especially for batches optimistic locking is not suitable. A human user shall not cause a large batch process to fail because he was editing the same entity. Therefore such use-cases use pessimistic locking what gives them a kind of priority over the human users. +In your DAO implementation you can provide methods that do pessimistic locking via EntityManager operations that take a LockModeType. Here is a simple example:

+
+
+
+
  getEntityManager().lock(entity, LockModeType.READ);
+
+
+
+

When using the lock(Object, LockModeType) method with LockModeType.READ, Hibernate will issue a SELECT …​ FOR UPDATE. This means that no one else can update the entity (see here for more information on the statement). If LockModeType.WRITE is specified, Hibernate issues a SELECT …​ FOR UPDATE NOWAIT instead, which has has the same meaning as the statement above, but if there is already a lock, the program will not wait for this lock to be released. Instead, an exception is raised.
+Use one of the types if you want to modify the entity later on, for read only access no lock is required.

+
+
+

As you might have noticed, the behavior of Hibernate deviates from what one would expect by looking at the LockModeType (especially LockModeType.READ should not cause a SELECT …​ FOR UPDATE to be issued). The framework actually deviates from what is specified in the JPA for unknown reasons.

+
+
+
+
+

Database Auditing

+ +
+
+

Testing Data-Access

+
+

For testing of Entities and Repositories or DAOs see testing guide.

+
+
+
+

Principles

+
+

We strongly recommend these principles:

+
+
+
    +
  • +

    Use the JPA where ever possible and use vendor (hibernate) specific features only for situations when JPA does not provide a solution. In the latter case consider first if you really need the feature.

    +
  • +
  • +

    Create your entities as simple POJOs and use JPA to annotate the getters in order to define the mapping.

    +
  • +
  • +

    Keep your entities simple and avoid putting advanced logic into entity methods.

    +
  • +
+
+
+
+

Database Configuration

+
+

For details on the configuration of the database connection and database logging of the individual framework, please refer to the respective configuration guide.

+
+
+

For spring see here.

+
+
+

For quarkus see here.

+
+
+
Database Migration
+ +
+
+
Pooling
+
+

You typically want to pool JDBC connections to boost performance by recycling previous connections. There are many libraries available to do connection pooling. We recommend to use HikariCP. For Oracle RDBMS see here.

+
+
+
+
+

Security

+
+
SQL-Injection
+
+

A common security threat is SQL-injection. Never build queries with string concatenation or your code might be vulnerable as in the following example:

+
+
+
+
  String query = "Select op from OrderPosition op where op.comment = " + userInput;
+  return getEntityManager().createQuery(query).getResultList();
+
+
+
+

Via the parameter userInput an attacker can inject SQL (JPQL) and execute arbitrary statements in the database causing extreme damage.

+
+
+

In order to prevent such injections you have to strictly follow our rules for queries:

+
+
+ +
+
+
+
Limited Permissions for Application
+
+

We suggest that you operate your application with a database user that has limited permissions so he can not modify the SQL schema (e.g. drop tables). For initializing the schema (DDL) or to do schema migrations use a separate user that is not used by the application itself.

+
+ +
+

==Queries +The Java Persistence API (JPA) defines its own query language, the java persistence query language (JPQL) (see also JPQL tutorial), which is similar to SQL but operates on entities and their attributes instead of tables and columns.

+
+
+

The simplest CRUD-Queries (e.g. find an entity by its ID) are already build in the devonfw CRUD functionality (via Repository or DAO). For other cases you need to write your own query. We distinguish between static and dynamic queries. Static queries have a fixed JPQL query string that may only use parameters to customize the query at runtime. Instead, dynamic queries can change their clauses (WHERE, ORDER BY, JOIN, etc.) at runtime depending on the given search criteria.

+
+
+
+
Static Queries
+
+

E.g. to find all DishEntries (from MTS sample app) that have a price not exceeding a given maxPrice we write the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice
+
+
+
+

Here dish is used as alias (variable name) for our selected DishEntity (what refers to the simple name of the Java entity class). With dish.price we are referring to the Java property price (getPrice()/setPrice(…​)) in DishEntity. A named variable provided from outside (the search criteria at runtime) is specified with a colon (:) as prefix. Here with :maxPrice we reference to a variable that needs to be set via query.setParameter("maxPrice", maxPriceValue). JPQL also supports indexed parameters (?) but they are discouraged because they easily cause confusion and mistakes.

+
+
+
Using Queries to Avoid Bidirectional Relationships
+
+

With the usage of queries it is possible to avoid exposing relationships or modelling bidirectional relationships, which have some disadvantages (see relationships). This is especially desired for relationships between entities of different business components. +So for example to get all OrderLineEntities for a specific OrderEntity without using the orderLines relation from OrderEntity the following query could be used:

+
+
+
+
SELECT line FROM OrderLineEntity line WHERE line.order.id = :orderId
+
+
+
+
+
+
Dynamic Queries
+
+

For dynamic queries, we use the JPA module for Querydsl. Querydsl also supports other modules such as MongoDB, and Apache Lucene. It allows to implement queries in a powerful but readable and type-safe way (unlike Criteria API). If you already know JPQL, you will quickly be able to read and write Querydsl code. It feels like JPQL but implemented in Java instead of plain text.

+
+
+

To use Querydsl in your Maven project, add the following dependencies:

+
+
+
+
<dependencies>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-apt</artifactId>
+        <version>${querydsl.version}</version>
+        <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-jpa</artifactId>
+        <version>${querydsl.version}</version>
+    </dependency>
+
+</dependencies>
+
+
+
+

Next, configure the annotation processing tool (APT) plugin:

+
+
+
+
<project>
+  <build>
+    <plugins>
+      ...
+      <plugin>
+        <groupId>com.mysema.maven</groupId>
+        <artifactId>apt-maven-plugin</artifactId>
+        <version>1.1.3</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>process</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/generated-sources/java</outputDirectory>
+              <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      ...
+    </plugins>
+  </build>
+</project>
+
+
+
+

Here is an example from our sample application:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(dish.price.goe(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(dish.price.loe(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      query.where(dish.name.eq(name));
+    }
+    query.orderBy(dish.price.asc(), dish.name.asc());
+    return query.fetch();
+  }
+
+
+
+

In this example, we use the so called Q-types (QDishEntity). These are classes generated at build time by the Querydsl annotation processor from entity classes. The Q-type classes can be used as static types representative of the original entity class.

+
+
+

The query.from(dish) method call defines the query source, in this case the dish table. The where method defines a filter. For example, The first call uses the goe operator to filter out any dishes that are not greater or equal to the minimal price. Further operators can be found here.

+
+
+

The orderBy method is used to sort the query results according to certain criteria. Here, we sort the results first by their price and then by their name, both in ascending order. To sort in descending order, use .desc(). To partition query results into groups of rows, see the groupBy method.

+
+
+

For spring, devon4j provides another approach that you can use for your Spring applications to implement Querydsl logic without having to use these metaclasses. An example can be found here.

+
+
+
+
Native Queries
+
+

Spring Data supports the use of native queries. Native queries use simple native SQL syntax that is not parsed in JPQL. This allows you to use all the features that your database supports. +The downside to this is that database portability is lost due to the absence of an abstraction layer. Therefore, the queries may not work with another database because it may use a different syntax.

+
+
+

You can implement a native query using @Query annotation with the nativeQuery attribute set to true:

+
+
+
+
@Query(value="...", nativeQuery=true)
+
+
+
+ + + + + +
+ + +This will not work with Quarkus because Quarkus does not support native queries by using the @Query annotation (see here). +
+
+
+

You can also implement native queries directly using the EntityManager API and the createNativeQuery method. +This approach also works with Quarkus.

+
+
+
+
Query query = entityManager.createNativeQuery("SELECT * FROM Product", ProductEntity.class);
+List<ProductEntity> products = query.getResultList();
+
+
+
+ + + + + +
+ + +Be sure to use the name of the table when using native queries, while you must use the entity name when implementing queries with JPQL. +
+
+
+
+
Using Wildcards
+
+

For flexible queries it is often required to allow wildcards (especially in dynamic queries). While users intuitively expect glob syntax, the SQL and JPQL standards work differently. Therefore, a mapping is required. devonfw provides this on a lower level with LikePatternSyntax and on a higher level with QueryUtil (see QueryHelper.newStringClause(…​)).

+
+
+
+
Pagination
+
+

When dealing with large amounts of data, an efficient method of retrieving the data is required. Fetching the entire data set each time would be too time consuming. Instead, Paging is used to process only small subsets of the entire data set.

+
+
+

If you are using Spring Data repositories you will get pagination support out of the box by providing the interfaces Page and Pageable:

+
+
+
repository
+
+
Page<DishEntity> findAll(Pageable pageable);
+
+
+
+

Then you can create a Pageable object and pass it to the method call as follows:

+
+
+
+
int page = criteria.getPageNumber();
+int size = criteria.getPageSize();
+Pageable pageable = PageRequest.of(page, size);
+Page<DishEntity> dishes = dishRepository.findAll(pageable);
+
+
+
+
Paging with Querydsl
+
+

Pagination is also supported for dynamic queries with Querydsl:

+
+
+
+
  public Page<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    // conditions
+
+    int page = criteria.getPageNumber();
+    int size = criteria.getPageSize();
+    Pageable pageable = PageRequest.of(page, size);
+    query.offset(pageable.getOffset());
+    query.limit(pageable.getPageSize());
+
+    List<DishEntity> dishes = query.fetch();
+    return new PageImpl<>(dishes, pageable, dishes.size());
+  }
+
+
+
+
+
Pagination example
+
+

For the table entity we can make a search request by accessing the REST endpoint with pagination support like in the following examples:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "total":true
+  }
+}
+
+//Response
+{
+    "pagination": {
+        "size": 2,
+        "page": 1,
+        "total": 11
+    },
+    "result": [
+        {
+            "id": 101,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 1,
+            "state": "OCCUPIED"
+        },
+        {
+            "id": 102,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 2,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+ + + + + +
+ + +As we are requesting with the total property set to true the server responds with the total count of rows for the query. +
+
+
+

For retrieving a concrete page, we provide the page attribute with the desired value. Here we also left out the total property so the server doesn’t incur on the effort to calculate it:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "page":2
+  }
+}
+
+//Response
+
+{
+    "pagination": {
+        "size": 2,
+        "page": 2,
+        "total": null
+    },
+    "result": [
+        {
+            "id": 103,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 3,
+            "state": "FREE"
+        },
+        {
+            "id": 104,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 4,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+
+
Pagingation in devon4j-spring
+
+

For spring applications, devon4j also offers its own solution for pagination. You can find an example of this here.

+
+
+
+
+
Query Meta-Parameters
+
+

Queries can have meta-parameters and that are provided via SearchCriteriaTo. Besides paging (see above) we also get timeout support.

+
+
+
+
Advanced Queries
+
+

Writing queries can sometimes get rather complex. The current examples given above only showed very simple basics. Within this topic a lot of advanced features need to be considered like:

+
+
+ +
+
+

This list is just containing the most important aspects. As we can not cover all these topics here, they are linked to external documentation that can help and guide you.

+
+ +
+

==Spring Data +Spring Data JPA is supported by both Spring and Quarkus. However, in Quarkus this approach still has some limitations. For detailed information, see the official Quarkus Spring Data guide.

+
+
+
+
Motivation
+
+

The benefits of Spring Data are (for examples and explanations see next sections):

+
+
+
    +
  • +

    All you need is one single repository interface for each entity. No need for a separate implementation or other code artifacts like XML descriptors, NamedQueries class, etc.

    +
  • +
  • +

    You have all information together in one place (the repository interface) that actually belong together (where as in the classic approach you have the static queries in an XML file, constants to them in NamedQueries class and referencing usages in DAO implementation classes).

    +
  • +
  • +

    Static queries are most simple to realize as you do not need to write any method body. This means you can develop faster.

    +
  • +
  • +

    Support for paging is already build-in. Again for static query method the is nothing you have to do except using the paging objects in the signature.

    +
  • +
  • +

    Still you have the freedom to write custom implementations via default methods within the repository interface (e.g. for dynamic queries).

    +
  • +
+
+
+
+
Dependency
+
+

In case you want to switch to or add Spring Data support to your Spring or Quarkus application, all you need is to add the respective maven dependency:

+
+
+
spring
+
+
<dependency>
+  <groupId>org.springframework.boot</groupId>
+  <artifactId>spring-boot-starter-data-jpa</artifactId>
+</dependency>
+
+
+
+
quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
Repository
+
+

For each entity «Entity»Entity an interface is created with the name «Entity»Repository extending JpaRepository. +Such repository is the analogy to a Data-Access-Object (DAO) used in the classic approach or when Spring Data is not an option.

+
+
+
Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long> {
+
+}
+
+
+
+

The Spring Data repository provides some basic implementations for accessing data, e.g. returning all instances of a type (findAll) or returning an instance by its ID (findById).

+
+
+
+
Custom method implementation
+
+

In addition, repositories can be enriched with additional functionality, e.g. to add QueryDSL functionality or to override the default implementations, by using so called repository fragments:

+
+
+
Example
+
+

The following example shows how to write such a repository:

+
+
+
Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, ProductFragment {
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  List<ProductEntity> findByTitle(@Param("title") String title);
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  Page<ProductEntity> findByTitlePaginated(@Param("title") String title, Pageable pageable);
+}
+
+
+
+
Repository fragment
+
+
public interface ProductFragment {
+  Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria);
+}
+
+
+
+
Fragment implementation
+
+
public class ProductFragmentImpl implements ProductFragment {
+  @Inject
+  EntityManager entityManager;
+
+  public Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria) {
+    QProductEntity product = QProductEntity.productEntity;
+    JPAQuery<ProductEntity> query = new JPAQuery<ProductEntity>(this.entityManager);
+    query.from(product);
+
+    String title = criteria.getTitle();
+    if ((title != null) && !title.isEmpty()) {
+      query.where(product.title.eq(title));
+    }
+
+    List<ProductEntity> products = query.fetch();
+    return new PageImpl<>(products, PageRequest.of(criteria.getPageNumber(), criteria.getPageSize()), products.size());
+  }
+}
+
+
+
+

This ProductRepository has the following features:

+
+
+
    +
  • +

    CRUD support from Spring Data (see JavaDoc for details).

    +
  • +
  • +

    Support for QueryDSL integration, paging and more.

    +
  • +
  • +

    A static query method findByTitle to find all ProductEntity instances from DB that have the given title. Please note the @Param annotation that links the method parameter with the variable inside the query (:title).

    +
  • +
  • +

    The same with pagination support via findByTitlePaginated method.

    +
  • +
  • +

    A dynamic query method findByCriteria showing the QueryDSL and paging integration into Spring via a fragment implementation.

    +
  • +
+
+
+

You can find an implementation of this ProductRepository in our Quarkus reference application.

+
+
+ + + + + +
+ + +In Quarkus, native and named queries via the @Query annotation are currently not supported +
+
+
+
+
Integration of Spring Data in devon4j-spring
+
+

For Spring applications, devon4j offers a proprietary solution that integrates seamlessly with QueryDSL and uses default methods instead of the fragment approach. A separate guide for this can be found here.

+
+
+
+
Custom methods without fragment approach
+
+

The fragment approach is a bit laborious, as three types (repository interface, fragment interface and fragment implementation) are always needed to implement custom methods. +We cannot simply use default methods within the repository because we cannot inject the EntityManager directly into the repository interface.

+
+
+

As a workaround, you can create a GenericRepository interface, as is done in the devon4j jpa-spring-data module.

+
+
+
+
public interface GenericRepository<E> {
+
+  EntityManager getEntityManager();
+
+  ...
+}
+
+
+
+
+
public class GenericRepositoryImpl<E> implements GenericRepository<E> {
+
+  @Inject
+  EntityManager entityManager;
+
+  @Override
+  public EntityManager getEntityManager() {
+
+    return this.entityManager;
+  }
+
+  ...
+}
+
+
+
+

Then, all your repository interfaces can extend the GenericRepository and you can implement queries directly in the repository interface using default methods:

+
+
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, GenericRepository<ProductEntity> {
+
+  default Page<ProductEntity> findByTitle(Title title) {
+
+    EntityManager entityManager = getEntityManager();
+    Query query = entityManager.createNativeQuery("select * from Product where title = :title", ProductEntity.class);
+    query.setParameter("title", title);
+    List<ProductEntity> products = query.getResultList();
+    return new PageImpl<>(products);
+  }
+
+  ...
+}
+
+
+
+
+
+
Drawbacks
+
+

Spring Data also has some drawbacks:

+
+
+
    +
  • +

    Some kind of magic behind the scenes that are not so easy to understand. So in case you want to extend all your repositories without providing the implementation via a default method in a parent repository interface you need to deep-dive into Spring Data. We assume that you do not need that and hope what Spring Data and devon already provides out-of-the-box is already sufficient.

    +
  • +
  • +

    The Spring Data magic also includes guessing the query from the method name. This is not easy to understand and especially to debug. Our suggestion is not to use this feature at all and either provide a @Query annotation or an implementation via default method.

    +
  • +
+
+
+
+
Limitations in Quarkus
+
+
    +
  • +

    Native and named queries are not supported using @Query annotation. You will receive something like: Build step io.quarkus.spring.data.deployment.SpringDataJPAProcessor#build threw an exception: java.lang.IllegalArgumentException: Attribute nativeQuery of @Query is currently not supported

    +
  • +
  • +

    Customizing the base repository for all repository interfaces in the code base, which is done in Spring Data by registering a class the extends SimpleJpaRepository

    +
  • +
+
+ +
+

==Data Access Object

+
+
+

The Data Access Objects (DAOs) are part of the persistence layer. +They are responsible for a specific entity and should be named «Entity»Dao and «Entity»DaoImpl. +The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. +Additionally a DAO may offer advanced operations such as query or locking methods.

+
+
+
+
DAO Interface
+
+

For each DAO there is an interface named «Entity»Dao that defines the API. For CRUD support and common naming we derive it from the ApplicationDao interface that comes with the devon application template:

+
+
+
+
public interface MyEntityDao extends ApplicationDao<MyEntity> {
+  List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria);
+}
+
+
+
+

All CRUD operations are inherited from ApplicationDao so you only have to declare the additional methods.

+
+
+
+
DAO Implementation
+
+

Implementing a DAO is quite simple. We create a class named «Entity»DaoImpl that extends ApplicationDaoImpl and implements your «Entity»Dao interface:

+
+
+
+
public class MyEntityDaoImpl extends ApplicationDaoImpl<MyEntity> implements MyEntityDao {
+
+  public List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria) {
+    TypedQuery<MyEntity> query = createQuery(criteria, getEntityManager());
+    return query.getResultList();
+  }
+  ...
+}
+
+
+
+

Again you only need to implement the additional non-CRUD methods that you have declared in your «Entity»Dao interface. +In the DAO implementation you can use the method getEntityManager() to access the EntityManager from the JPA. You will need the EntityManager to create and execute queries.

+
+
+
Static queries for DAO Implementation
+
+

All static queries are declared in the file src\main\resources\META-INF\orm.xml:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+  <named-query name="find.dish.with.max.price">
+    <query><![SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice]]></query>
+  </named-query>
+  ...
+</hibernate-mapping>
+
+
+
+

When your application is started, all these static queries will be created as prepared statements. This allows better performance and also ensures that you get errors for invalid JPQL queries when you start your app rather than later when the query is used.

+
+
+

To avoid redundant occurrences of the query name (get.open.order.positions.for.order) we define a constant for each named query:

+
+
+
+
public class NamedQueries {
+  public static final String FIND_DISH_WITH_MAX_PRICE = "find.dish.with.max.price";
+}
+
+
+
+

Note that changing the name of the java constant (FIND_DISH_WITH_MAX_PRICE) can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+

The following listing shows how to use this query:

+
+
+
+
public List<DishEntity> findDishByMaxPrice(BigDecimal maxPrice) {
+  Query query = getEntityManager().createNamedQuery(NamedQueries.FIND_DISH_WITH_MAX_PRICE);
+  query.setParameter("maxPrice", maxPrice);
+  return query.getResultList();
+}
+
+
+
+

Via EntityManager.createNamedQuery(String) we create an instance of Query for our predefined static query. +Next we use setParameter(String, Object) to provide a parameter (maxPrice) to the query. This has to be done for all parameters of the query.

+
+
+

Note that using the createQuery(String) method, which takes the entire query as string (that may already contain the parameter) is not allowed to avoid SQL injection vulnerabilities. +When the method getResultList() is invoked, the query is executed and the result is delivered as List. As an alternative, there is a method called getSingleResult(), which returns the entity if the query returned exactly one and throws an exception otherwise.

+
+ +
+

==JPA Performance +When using JPA the developer sometimes does not see or understand where and when statements to the database are triggered.

+
+
+
+
+

Establishing expectations Developers shouldn’t expect to sprinkle magic pixie dust on POJOs in hopes they will become persistent.

+
+
+
+— Dan Allen
+https://epdf.tips/seam-in-action.html +
+
+
+

So in case you do not understand what is going on under the hood of JPA, you will easily run into performance issues due to lazy loading and other effects.

+
+
+
+
+
N plus 1 Problem
+
+

The most prominent phenomena is call the N+1 Problem. +We use entities from our MTS demo app as an example to explain the problem. +There is a DishEntity that has a @ManyToMany relation to +IngredientEntity. +Now we assume that we want to iterate all ingredients for a dish like this:

+
+
+
+
DishEntity dish = dao.findDishById(dishId);
+BigDecimal priceWithAllExtras = dish.getPrice();
+for (IngredientEntity ingredient : dish.getExtras()) {
+  priceWithAllExtras = priceWithAllExtras.add(ingredient.getPrice());
+}
+
+
+
+

Now dish.getExtras() is loaded lazy. Therefore the JPA vendor will provide a list with lazy initialized instances of IngredientEntity that only contain the ID of that entity. Now with every call of ingredient.getPrice() we technically trigger an SQL query statement to load the specific IngredientEntity by its ID from the database. +Now findDishById caused 1 initial query statement and for any number N of ingredients we are causing an additional query statement. This makes a total of N+1 statements. As causing statements to the database is an expensive operation with a lot of overhead (creating connection, etc.) this ends in bad performance and is therefore a problem (the N+1 Problem).

+
+
+
+
Solving N plus 1 Problem
+
+

To solve the N+1 Problem you need to change your code to only trigger a single statement instead. This can be archived in various ways. The most universal solution is to use FETCH JOIN in order to pre-load the nested N child entities into the first level cache of the JPA vendor implementation. This will behave very similar as if the @ManyToMany relation to IngredientEntity was having FetchType.EAGER but only for the specific query and not in general. Because changing @ManyToMany to FetchType.EAGER would cause bad performance for other usecases where only the dish but not its extra ingredients are needed. For this reason all relations, including @OneToOne should always be FetchType.LAZY. Back to our example we simply replace dao.findDishById(dishId) with dao.findDishWithExtrasById(dishId) that we implement by the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish
+  LEFT JOIN FETCH dish.extras
+  WHERE dish.id = :dishId
+
+
+
+

The rest of the code does not have to be changed but now dish.getExtras() will get the IngredientEntity from the first level cache where is was fetched by the initial query above.

+
+
+

Please note that if you only need the sum of the prices from the extras you can also create a query using an aggregator function:

+
+
+
+
SELECT sum(dish.extras.price) FROM DishEntity dish
+
+
+
+

As you can see you need to understand the concepts in order to get good performance.

+
+
+

There are many advanced topics such as creating database indexes or calculating statistics for the query optimizer to get the best performance. For such advanced topics we recommend to have a database expert in your team that cares about such things. However, understanding the N+1 Problem and its solutions is something that every Java developer in the team needs to understand.

+
+
+ +
+

==Auditing

+
+
+

For database auditing we use hibernate envers. If you want to use auditing ensure you have the following dependency in your pom.xml:

+
+
+
spring
+
+
<dependency>
+  <groupId>com.devonfw.java.modules</groupId>
+  <artifactId>devon4j-jpa-envers</artifactId>
+</dependency>
+
+
+
+
quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-envers</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +The following part applies only to spring applications. At this point, the Quarkus extension does not provide any additional configurations. For Quarkus applications, simply use the @Audited annotation to enable auditing for an entity class, as described a few lines below or seen here. +
+
+
+

Make sure that entity manager also scans the package from the devon4j-jpa[-envers] module in order to work properly. And make sure that correct Repository Factory Bean Class is chosen.

+
+
+
+
@EntityScan(basePackages = { "«my.base.package»" }, basePackageClasses = { AdvancedRevisionEntity.class })
+...
+@EnableJpaRepositories(repositoryFactoryBeanClass = GenericRevisionedRepositoryFactoryBean.class)
+...
+public class SpringBootApp {
+  ...
+}
+
+
+
+

Now let your [Entity]Repository extend from DefaultRevisionedRepository instead of DefaultRepository.

+
+
+

The repository now has a method getRevisionHistoryMetadata(id) and getRevisionHistoryMetadata(id, boolean lazy) available to get a list of revisions for a given entity and a method find(id, revision) to load a specific revision of an entity with the given ID or getLastRevisionHistoryMetadata(id) to load last revision. +To enable auditing for a entity simply place the @Audited annotation to your entity and all entity classes it extends from.

+
+
+
+
@Entity(name = "Drink")
+@Audited
+public class DrinkEntity extends ProductEntity implements Drink {
+...
+
+
+
+

When auditing is enabled for an entity an additional database table is used to store all changes to the entity table and a corresponding revision number. This table is called <ENTITY_NAME>_AUD per default. Another table called REVINFO is used to store all revisions. Make sure that these tables are available. They can be generated by hibernate with the following property (only for development environments).

+
+
+
+
  database.hibernate.hbm2ddl.auto=create
+
+
+
+

Another possibility is to put them in your database migration scripts like so.

+
+
+
+
CREATE CACHED TABLE PUBLIC.REVINFO(
+  id BIGINT NOT NULL generated by default as identity (start with 1),
+  timestamp BIGINT NOT NULL,
+  user VARCHAR(255)
+);
+...
+CREATE CACHED TABLE PUBLIC.<TABLE_NAME>_AUD(
+    <ALL_TABLE_ATTRIBUTES>,
+    revtype TINYINT,
+    rev BIGINT NOT NULL
+);
+
+
+
+ +
+

==Transaction Handling

+
+
+

For transaction handling we AOP to add transaction control via annotations as aspect. +This is done by annotating your code with the @Transactional annotation. +You can either annotate your container bean at class level to make all methods transactional or your can annotate individual methods to make them transactional:

+
+
+
+
  @Transactional
+  public Output getData(Input input) {
+    ...
+  }
+
+
+
+
+
+

JTA Imports

+
+

Here are the import statements for transaction support:

+
+
+
+
import javax.transaction.Transactional;
+
+
+
+ + + + + +
+ + +Use the above import statement to follow JEE and avoid using org.springframework.transaction.annotation.Transactional. +
+
+
+
+

JTA Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>jakarta.transaction</groupId>
+  <artifactId>jakarta.transaction-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>javax.transaction</groupId>
+  <artifactId>javax.transaction-api</artifactId>
+</dependency>
+
+
+
+
+

Handling constraint violations

+
+

Using @Transactional magically wraps transaction handling around your code. +As constraints are checked by the database at the end when the transaction gets committed, a constraint violation will be thrown by this aspect outside your code. +In case you have to handle constraint violations manually, you have to do that in code outside the logic that is annotated with @Transactional. +This may be done in a service operation by catching a ConstraintViolationException (org.hibernate.exception.ConstraintViolationException for hibernate). +As a generic approach you can solve this via REST execption handling.

+
+
+
+

Batches

+
+

Transaction control for batches is a lot more complicated and is described in the batch layer.

+
+
+ +
+

==SQL

+
+
+

For general guides on dealing or avoiding SQL, preventing SQL-injection, etc. you should study domain layer.

+
+
+
+

Naming Conventions

+
+

Here we define naming conventions that you should follow whenever you write SQL files:

+
+
+
    +
  • +

    All SQL-Keywords in UPPER CASE

    +
  • +
  • +

    Indentation should be 2 spaces as suggested by devonfw for every format.

    +
  • +
+
+
+
DDL
+
+

The naming conventions for database constructs (tables, columns, triggers, constraints, etc.) should be aligned with your database product and their operators. +However, when you have the freedom of choice and a modern case-sensitive database, you can simply use your code conventions also for database constructs to avoid explicitly mapping each and every property (e.g. RestaurantTable vs. RESTAURANT_TABLE).

+
+
+
    +
  • +

    Define columns and constraints inline in the statement to create the table

    +
  • +
  • +

    Indent column types so they all start in the same text column

    +
  • +
  • +

    Constraints should be named explicitly (to get a reasonable hint error messages) with:

    +
    +
      +
    • +

      PK_«table» for primary key (name optional here as PK constraint are fundamental)

      +
    • +
    • +

      FK_«table»_«property» for foreign keys («table» and «property» are both on the source where the foreign key is defined)

      +
    • +
    • +

      UC_«table»_«property»[_«propertyN»]* for unique constraints

      +
    • +
    • +

      CK_«table»_«check» for check constraints («check» describes the check, if it is defined on a single property it should start with the property).

      +
    • +
    +
    +
  • +
  • +

    Old RDBMS had hard limitations for names (e.g. 30 characters). Please note that recent databases have overcome this very low length limitations. However, keep your names short but precise and try to define common abbreviations in your project for according (business) terms. Especially do not just truncate the names at the limit.

    +
  • +
  • +

    If possible add comments on table and columns to help DBAs understanding your schema. This is also honored by many tools (not only DBA-tools).

    +
  • +
+
+
+

Here is a brief example of a DDL:

+
+
+
+
CREATE SEQUENCE HIBERNATE_SEQUENCE START WITH 1000000;
+
+-- *** Table ***
+CREATE TABLE RESTAURANT_TABLE (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  SEATS                INTEGER NOT NULL,
+  CONSTRAINT PK_TABLE PRIMARY KEY(ID)
+);
+COMMENT ON TABLE RESTAURANT_TABLE IS 'The physical tables inside the restaurant.';
+-- *** Order ***
+CREATE TABLE RESTAURANT_ORDER (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  TABLE_ID             NUMBER(19) NOT NULL,
+  TOTAL                DECIMAL(5, 2) NOT NULL,
+  CREATION_DATE        TIMESTAMP NOT NULL,
+  PAYMENT_DATE         TIMESTAMP,
+  STATUS               VARCHAR2(10 CHAR) NOT NULL,
+  CONSTRAINT PK_ORDER PRIMARY KEY(ID),
+  CONSTRAINT FK_ORDER_TABLE_ID FOREIGN KEY(TABLE_ID) REFERENCES RESTAURANT_TABLE(ID)
+);
+COMMENT ON TABLE RESTAURANT_ORDER IS 'An order and bill at the restaurant.';
+...
+
+
+
+

ATTENTION: Please note that TABLE and ORDER are reserved keywords in SQL and you should avoid using such keywords to prevent problems.

+
+
+
+
Data
+
+

For insert, update, delete, etc. of data SQL scripts should additionally follow these guidelines:

+
+
+
    +
  • +

    Inserts always with the same order of columns in blocks for each table.

    +
  • +
  • +

    Insert column values always starting with ID, MODIFICATION_COUNTER, [DTYPE, ] …​

    +
  • +
  • +

    List columns with fixed length values (boolean, number, enums, etc.) before columns with free text to support alignment of multiple insert statements

    +
  • +
  • +

    Pro Tip: Get familiar with column mode of advanced editors such as notepad++ when editing large blocks of similar insert statements.

    +
  • +
+
+
+
+
INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (0, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (1, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (2, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (3, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (4, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (5, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (6, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (7, 1, 8);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (8, 1, 8);
+...
+
+
+
+

See also Database Migrations.

+
+
+ +
+

==Database Migration

+
+
+

When you have a schema-based database, +you need a solution for schema versioning and migration for your database. +A specific release of your app requires a corresponding version of the schema in the database to run. +As you want simple and continuous deployment you should automate the schema versiong and database migration.

+
+
+

The general idea is that your software product contains "scripts" to migrate the database from schema version X to verion X+1. +When you begin your project you start with version 1 and with every increment of your app that needs a change to the database schema (e.g. a new table, a new column to an existing table, a new index, etc.) you add another "script" that migrates from the current to the next version. +For simplicity these versions are just sequential numbers or timestamps. +Now, the solution you choose will automatically manage the schema version in a separate metadata table in your database that stores the current schema version. +When your app is started, it will check the current version inside the database from that metadata table. +As long as there are "scripts" that migrate from there to a higher version, they will be automatically applied to the database and this process is protocolled to the metadata table in your database what also updates the current schema version there. +Using this approach, you can start with an empty database what will result in all "scripts" being applied sequentially. +Also any version of your database schema can be present and you will always end up in a controlled migration to the latest schema version.

+
+
+
+
+

Options for database migration

+
+

For database migration you can choose between the following options:

+
+
+
    +
  • +

    flyway (KISS based approach with migrations as SQL)

    +
  • +
  • +

    liquibase (more complex approach with database abstraction)

    +
  • +
+
+
+ +
+

==Logging

+
+
+

We recommend to use SLF4J as API for logging, that has become a de facto standard in Java as it has a much better design than java.util.logging offered by the JDK. +There are serveral implementations for SLF4J. For Spring applications our recommended implementation is Logback. Quarkus uses JBoss Logging which provides a JBoss Log Manager implementation for SLF4J. For more information on logging in Quarkus, see the Quarkus logging guide.

+
+
+
+

Logging Dependencies

+
+

To use Logback in your Spring application, you need to include the following dependencies:

+
+
+
+
<!-- SLF4J as logging API -->
+<dependency>
+  <groupId>org.slf4j</groupId>
+  <artifactId>slf4j-api</artifactId>
+</dependency>
+<!-- Logback as logging implementation  -->
+<dependency>
+  <groupId>ch.qos.logback</groupId>
+  <artifactId>logback-classic</artifactId>
+</dependency>
+<!-- JSON logging for cloud-native log monitoring -->
+<dependency>
+  <groupId>net.logstash.logback</groupId>
+  <artifactId>logstash-logback-encoder</artifactId>
+</dependency>
+
+
+
+

In devon4j these dependencies are provided by the devon4j-logging module.

+
+
+

In Quarkus, SLF4J and the slf4j-jboss-logmanager are directly included in the Quarkus core runtime and can be used out of the box.

+
+
+
+

Logger Access

+
+

The general pattern for accessing loggers from your code is a static logger instance per class using the following pattern:

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+  private static final Logger LOG = LoggerFactory.getLogger(MyClass.class);
+  ...
+}
+
+
+
+

For detailed documentation how to use the logger API check the SLF4j manual.

+
+
+ + + + + +
+ + +In case you are using devonfw-ide and Eclipse you can just type LOG and hit [ctrl][space] to insert the code pattern including the imports into your class. +
+
+
+
Lombok
+
+

In case you are using Lombok, you can simply use the @Slf4j annotation in your class. This causes Lombok to generate the logger instance for you.

+
+
+
+
+

Log-Levels

+
+

We use a common understanding of the log-levels as illustrated by the following table. +This helps for better maintenance and operation of the systems.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4. Log-levels
Log-levelDescriptionImpactActive Environments

FATAL

Only used for fatal errors that prevent the application to work at all (e.g. startup fails or shutdown/restart required)

Operator has to react immediately

all

ERROR

An abnormal error indicating that the processing failed due to technical problems.

Operator should check for known issue and otherwise inform development

all

WARNING

A situation where something worked not as expected. E.g. a business exception or user validation failure occurred.

No direct reaction required. Used for problem analysis.

all

INFO

Important information such as context, duration, success/failure of request or process

No direct reaction required. Used for analysis.

all

DEBUG

Development information that provides additional context for debugging problems.

No direct reaction required. Used for analysis.

development and testing

TRACE

Like DEBUG but exhaustive information and for code that is run very frequently. Will typically cause large log-files.

No direct reaction required. Used for problem analysis.

none (turned off by default)

+
+

Exceptions (with their stack trace) should only be logged on FATAL or ERROR level. For business exceptions typically a WARNING including the message of the exception is sufficient.

+
+
+
Configuration of Logback
+
+

The configuration of logback happens via the logback.xml file that you should place into src/main/resources of your app. +For details consult the logback configuration manual.

+
+
+ + + + + +
+ + +Logback also allows to overrule the configuration with a logback-test.xml file that you may put into src/test/resources or into a test-dependency. +
+
+
+
+
Configuration in Quarkus
+
+

The are several options you can set in the application.properties file to configure the behaviour of the logger in Quarkus. For a detailed overview, see the corresponding part of the Quarkus guide.

+
+
+
+
+

JSON-logging

+
+

For easy integration with log-monitoring, we recommend that your app logs to standard out in JSON following JSON Lines.

+
+
+

In Spring applications, this can be achieved via logstash-logback-encoder (see dependencies). In Quarkus, it can be easily achieved using the quarkus-logging-json extension (see here for more details).

+
+
+

This will produce log-lines with the following format (example formatted for readability):

+
+
+
+
{
+  "timestamp":"2000-12-31T23:59:59.999+00:00",
+  "@version":"1",
+  "message":"Processing 4 order(s) for shipment",
+  "logger_name":"com.myapp.order.logic.UcManageOrder",
+  "thread_name":"http-nio-8081-exec-6",
+  "level":"INFO",
+  "level_value":20000,
+  "appname":"myapp",
+}
+
+
+
+
Adding custom values to JSON log with Logstash
+
+

The JSON encoder even supports logging custom properties for your log-monitoring. +The trick is to use the class net.logstash.logback.argument.StructuredArguments for adding the arguments to you log message, e.g.

+
+
+
+
import static net.logstash.logback.argument.StructuredArguments.v;
+
+...
+    LOG.info("Request with {} and {} took {} ms.", v("url", url), v("status", statusCode), v("duration", millis));
+...
+
+
+
+

This will produce the a JSON log-line with the following properties:

+
+
+
+
...
+  "message":"Request with url=https://api/service/v1/ordermanagement/order and status=200 took duration=251 ms",
+  "url":"https://api/service/v1/ordermanagement/order",
+  "status":"200",
+  "duration":"251",
+...
+
+
+
+

As you can quickly see besides the human readable message you also have the structured properties url, status and duration that can be extremly valuable to configure dashboards in your log-monitoring that visualize success/failure ratio as well as performance of your requests.

+
+
+
+
+

Classic log-files

+
+ + + + + +
+ + +In devon4j, we strongly recommend using JSON logging instead of classic log files. The following section refers only to devon4j Spring applications that use Logback. +
+
+
+

Even though we do not recommend anymore to write classical log-files to the local disc, here you can still find our approach for it.

+
+
+
Maven-Integration
+
+

In the pom.xml of your application add this dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java</groupId>
+  <artifactId>devon4j-logging</artifactId>
+</dependency>
+
+
+
+

The above dependency already adds transitive dependencies to SLF4J and logback. +Also it comes with configration snipplets that can be included from your logback.xml file (see configuration).

+
+
+

The logback.xml to write regular log-files can look as following:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+  <property resource="com/devonfw/logging/logback/application-logging.properties" />
+  <property name="appname" value="MyApp"/>
+  <property name="logPath" value="../logs"/>
+  <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />
+  <include resource="com/devonfw/logging/logback/appender-console.xml" />
+
+  <root level="DEBUG">
+    <appender-ref ref="ERROR_APPENDER"/>
+    <appender-ref ref="INFO_APPENDER"/>
+    <appender-ref ref="DEBUG_APPENDER"/>
+    <appender-ref ref="CONSOLE_APPENDER"/>
+  </root>
+
+  <logger name="org.springframework" level="INFO"/>
+</configuration>
+
+
+
+

The provided logback.xml is configured to use variables defined on the config/application.properties file. +On our example, the log files path point to ../logs/ in order to log to tomcat log directory when starting tomcat on the bin folder. +Change it according to your custom needs.

+
+
+
config/application.properties
+
+
log.dir=../logs/
+
+
+
+
+
Log Files
+
+

The classical approach uses the following log files:

+
+
+
    +
  • +

    Error Log: Includes log entries to detect errors.

    +
  • +
  • +

    Info Log: Used to analyze system status and to detect bottlenecks.

    +
  • +
  • +

    Debug Log: Detailed information for error detection.

    +
  • +
+
+
+

The log file name pattern is as follows:

+
+
+
+
«LOGTYPE»_log_«HOST»_«APPLICATION»_«TIMESTAMP».log
+
+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5. Segments of Logfilename
ElementValueDescription

«LOGTYPE»

info, error, debug

Type of log file

«HOST»

e.g. mywebserver01

Name of server, where logs are generated

«APPLICATION»

e.g. myapp

Name of application, which causes logs

«TIMESTAMP»

YYYY-MM-DD_HH00

date of log file

+
+

Example: +error_log_mywebserver01_myapp_2013-09-16_0900.log

+
+
+

Error log from mywebserver01 at application myapp at 16th September 2013 9pm.

+
+
+
+
Output format
+
+

We use the following output format for all log entries to ensure that searching and filtering of log entries work consistent for all logfiles:

+
+
+
+
[D: «timestamp»] [P: «priority»] [C: «NDC»][T: «thread»][L: «logger»]-[M: «message»]
+
+
+
+
    +
  • +

    D: Date (Timestamp in ISO8601 format e.g. 2013-09-05 16:40:36,464)

    +
  • +
  • +

    P: Priority (the log level)

    +
  • +
  • +

    C: Correlation ID (ID to identify users across multiple systems, needed when application is distributed)

    +
  • +
  • +

    T: Thread (Name of thread)

    +
  • +
  • +

    L: Logger name (use class name)

    +
  • +
  • +

    M: Message (log message)

    +
  • +
+
+
+

Example:

+
+
+
+
[D: 2013-09-05 16:40:36,464] [P: DEBUG] [C: 12345] [T: main] [L: my.package.MyClass]-[M: My message...]
+
+
+
+ + + + + +
+ + +When using devon4j-logging, this format is used by default. To achieve this format in Quarkus, set quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n in your properties. +
+
+
+
+
Correlation ID
+
+

In order to correlate separate HTTP requests to services belonging to the same user / session, we provide a servlet filter called DiagnosticContextFilter. +This filter takes a provided correlation ID from the HTTP header X-Correlation-Id. +If none was found, it will generate a new correlation id as UUID. +This correlation ID is added as MDC to the logger. +Therefore, it will then be included to any log message of the current request (thread). +Further concepts such as service invocations will pass this correlation ID to subsequent calls in the application landscape. Hence you can find all log messages related to an initial request simply via the correlation ID even in highly distributed systems.

+
+
+
+
Security
+
+

In order to prevent log forging attacks you can simply use the suggested JSON logging format. +Otherwise you can use com.devonfw.module.logging.common.impl.SingleLinePatternLayout as demonstrated here in order to prevent such attacks.

+
+
+ +
+

==Security +Security is todays most important cross-cutting concern of an application and an enterprise IT-landscape. We seriously care about security and give you detailed guides to prevent pitfalls, vulnerabilities, and other disasters. While many mistakes can be avoided by following our guidelines you still have to consider security and think about it in your design and implementation. The security guide will not only automatically prevent you from any harm, but will provide you hints and best practices already used in different software products.

+
+
+

An important aspect of security is proper authentication and authorization as described in access-control. In the following we discuss about potential vulnerabilities and protection to prevent them.

+
+
+
+
+

Vulnerabilities and Protection

+
+

Independent from classical authentication and authorization mechanisms there are many common pitfalls that can lead to vulnerabilities and security issues in your application such as XSS, CSRF, SQL-injection, log-forging, etc. A good source of information about this is the OWASP. +We address these common threats individually in security sections of our technological guides as a concrete solution to prevent an attack typically depends on the according technology. The following table illustrates common threats and contains links to the solutions and protection-mechanisms provided by the devonfw:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 6. Security threats and protection-mechanisms
ThreatProtectionLink to details

A1 Injection

validate input, escape output, use proper frameworks

SQL Injection

A2 Broken Authentication

encrypt all channels, use a central identity management with strong password-policy

Authentication

A3 Sensitive Data Exposure

Use secured exception facade, design your data model accordingly

REST exception handling

A4 XML External Entities

Prefer JSON over XML, ensure FSP when parsing (external) XML

XML guide

A5 Broken Access Control

Ensure proper authorization for all use-cases, use @DenyAll as default to enforce

Access-control guide especially method authorization

A6 Security Misconfiguration

Use devon4j application template and guides to avoid

tutorial-newapp and sensitive configuration

A7 Cross-Site Scripting

prevent injection (see A1) for HTML, JavaScript and CSS and understand same-origin-policy

client-layer

A8 Insecure Deserialization

Use simple and established serialization formats such as JSON, prevent generic deserialization (for polymorphic types)

JSON guide especially inheritence, XML guide

A9 Using Components with Known Vulnerabilities

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

CVE newsletter and dependency check

A10 Insufficient_Logging & Monitoring

Ensure to log all security related events (login, logout, errors), establish effective monitoring

Logging guide and monitoring guide

Insecure Direct Object References

Using direct object references (IDs) only with appropriate authorization

logic-layer

Cross-Site Request Forgery (CSRF)

secure mutable service operations with an explicit CSRF security token sent in HTTP header and verified on the server

CSRF guide

Log-Forging

Escape newlines in log messages

logging security

Unvalidated Redirects and Forwards

Avoid using redirects and forwards, in case you need them do a security audit on the solution.

devonfw proposes to use rich-clients (SPA/RIA). We only use redirects for login in a safe way.

+
+
+

Advanced Security

+
+

While OWASP Top 10 covers the basic aspects of application security, there are advanced standards such as AVS. +In devonfw we address this in the +Application Security Quick Solution Guide.

+
+
+
+

Tools

+
+
Dependency Check
+
+

To address the thread Using Components with Known Vulnerabilities we recomment to use OWASP dependency check that ships with a maven plugin and can analyze your dependencies for known CVEs. +In order to run this check, you can simply call this command on any maven project:

+
+
+
+
mvn org.owasp:dependency-check-maven:6.1.5:aggregate
+
+
+
+ + + + + +
+ + +The version is just for completeness. You should check yourself for using a recent version of the plugin. +
+
+
+

If you build an devon4j spring application from our app-template you can activate the dependency check even easier with the security profile:

+
+
+
+
mvn clean install -P security
+
+
+
+

This does not run by default as it causes some overhead for the build performance. However, consider to build this in your CI at least nightly. +After the dependency check is performed, you will find the results in target/dependency-check-report.html of each module. The report will also be generated when the site is build (mvn site) even without the profile.

+
+
+
+
Penetration Testing
+
+

For penetration testing (testing for vulnerabilities) of your web application, we recommend the following tools:

+
+
+ +
+
+ +
+

==Access-Control +Access-Control is a central and important aspect of Security. It consists of two major aspects:

+
+
+ +
+
+
+
+

Authentication

+
+

Definition:

+
+
+
+
+

Authentication is the verification that somebody interacting with the system is the actual subject for whom he claims to be.

+
+
+
+
+

The one authenticated is properly called subject or principal. There are two forms of principals you need to distinguish while designing your authentication: human users and autonomous systems. While e.g. a Kerberos/SPNEGO Single-Sign-On makes sense for human users, it is pointless for authenticating autonomous systems. For simplicity, we use the common term user to refer to any principal even though it may not be a human (e.g. in case of a service call from an external system).

+
+
+

To prove the authenticity, the user provides some secret called credentials. The most simple form of credentials is a password.

+
+
+
Implementations
+
+ + + + + +
+ + +Please never implement your own authentication mechanism or credential store. You have to be aware of implicit demands such as salting and hashing credentials, password life-cycle with recovery, expiry, and renewal including email notification confirmation tokens, central password policies, etc. This is the domain of access managers and identity management systems. In a business context you will typically already find a system for this purpose that you have to integrate (e.g. via LDAP). Otherwise you should consider establishing such a system e.g. using keycloak. +
+
+
+

We recommend using JWT when possible. For KISS, also try to avoid combining multiple authentication mechanisms (form based, basic-auth, SAMLv2, OAuth, etc.) within the same application (for different URLs).

+
+
+

For spring, check the Spring Security

+
+
+

For quarkus, check the Quarkus Authentication

+
+
+
+
+

Authorization

+
+

Definition:

+
+
+
+
+

Authorization is the verification that an authenticated user is allowed to perform the operation he intends to invoke.

+
+
+
+
+
Clarification of terms
+
+

For clarification we also want to give a common understanding of related terms that have no unique definition and consistent usage in the wild.

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 7. Security terms related to authorization
TermMeaning and comment

Permission

A permission is an object that allows a principal to perform an operation in the system. This permission can be granted (give) or revoked (taken away). Sometimes people also use the term right what is actually wrong as a right (such as the right to be free) can not be revoked.

Group

We use the term group in this context for an object that contains permissions. A group may also contain other groups. Then the group represents the set of all recursively contained permissions.

Role

We consider a role as a specific form of group that also contains permissions. A role identifies a specific function of a principal. A user can act in a role.

+

For simple scenarios a principal has a single role associated. In more complex situations a principal can have multiple roles but has only one active role at a time that he can choose out of his assigned roles. For KISS it is sometimes sufficient to avoid this by creating multiple accounts for the few users with multiple roles. Otherwise at least avoid switching roles at run-time in clients as this may cause problems with related states. Simply restart the client with the new role as parameter in case the user wants to switch his role.

Access Control

Any permission, group, role, etc., which declares a control for access management.

+
+
+
Suggestions on the access model
+
+

For the access model we give the following suggestions:

+
+
+
    +
  • +

    Each Access Control (permission, group, role, …​) is uniquely identified by a human readable string.

    +
  • +
  • +

    We create a unique permission for each use-case.

    +
  • +
  • +

    We define groups that combine permissions to typical and useful sets for the users.

    +
  • +
  • +

    We define roles as specific groups as required by our business demands.

    +
  • +
  • +

    We allow to associate users with a list of Access Controls.

    +
  • +
  • +

    For authorization of an implemented use case we determine the required permission. Furthermore, we determine the current user and verify that the required permission is contained in the tree spanned by all his associated Access Controls. If the user does not have the permission we throw a security exception and thus abort the operation and transaction.

    +
  • +
  • +

    We avoid negative permissions, that is a user has no permission by default and only those granted to him explicitly give him additional permission for specific things. Permissions granted can not be reduced by other permissions.

    +
  • +
  • +

    Technically we consider permissions as a secret of the application. Administrators shall not fiddle with individual permissions but grant them via groups. So the access management provides a list of strings identifying the Access Controls of a user. The individual application itself contains these Access Controls in a structured way, whereas each group forms a permission tree.

    +
  • +
+
+
+
+
Naming conventions
+
+

As stated above each Access Control is uniquely identified by a human readable string. This string should follow the naming convention:

+
+
+
+
«app-id».«local-name»
+
+
+
+

For Access Control Permissions the «local-name» again follows the convention:

+
+
+
+
«verb»«object»
+
+
+
+

The segments are defined by the following table:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 8. Segments of Access Control Permission ID
SegmentDescriptionExample

«app-id»

Is a unique technical but human readable string of the application (or microservice). It shall not contain special characters and especially no dot or whitespace. We recommend to use lower-train-case-ascii-syntax. The identity and access management should be organized on enterprise level rather than application level. Therefore permissions of different apps might easily clash (e.g. two apps might both define a group ReadMasterData but some user shall get this group for only one of these two apps). Using the «app-id». prefix is a simple but powerful namespacing concept that allows you to scale and grow. You may also reserve specific «app-id»s for cross-cutting concerns that do not actually reflect a single app e.g to grant access to a geographic region.

shop

«verb»

The action that is to be performed on «object». We use Find for searching and reading data. Save shall be used both for create and update. Only if you really have demands to separate these two you may use Create in addition to Save. Finally, Delete is used for deletions. For non CRUD actions you are free to use additional verbs such as Approve or Reject.

Find

«object»

The affected object or entity. Shall be named according to your data-model

Product

+
+

So as an example shop.FindProduct will reflect the permission to search and retrieve a Product in the shop application. The group shop.ReadMasterData may combine all permissions to read master-data from the shop. However, also a group shop.Admin may exist for the Admin role of the shop application. Here the «local-name» is Admin that does not follow the «verb»«object» schema.

+
+
+
+
devon4j-security
+
+

The module devon4j-security provides ready-to-use code based on spring-security that makes your life a lot easier.

+
+
+
+access-control +
+
Figure 2. devon4j Security Model
+
+
+

The diagram shows the model of devon4j-security that separates two different aspects:

+
+
+
    +
  • +

    The Identity- and Access-Management is provided by according products and typically already available in the enterprise landscape (e.g. an active directory). It provides a hierarchy of primary access control objects (roles and groups) of a user. An administrator can grant and revoke permissions (indirectly) via this way.

    +
  • +
  • +

    The application security defines a hierarchy of secondary access control objects (groups and permissions). This is done by configuration owned by the application (see following section). The "API" is defined by the IDs of the primary access control objects that will be referenced from the Identity- and Access-Management.

    +
  • +
+
+
+
+
Access Control Config
+
+

In your application simply extend AccessControlConfig to configure your access control objects as code and reference it from your use-cases. An example config may look like this:

+
+
+
+
@Named
+public class ApplicationAccessControlConfig extends AccessControlConfig {
+
+  public static final String APP_ID = "MyApp";
+
+  private static final String PREFIX = APP_ID + ".";
+
+  public static final String PERMISSION_FIND_OFFER = PREFIX + "FindOffer";
+
+  public static final String PERMISSION_SAVE_OFFER = PREFIX + "SaveOffer";
+
+  public static final String PERMISSION_DELETE_OFFER = PREFIX + "DeleteOffer";
+
+  public static final String PERMISSION_FIND_PRODUCT = PREFIX + "FindProduct";
+
+  public static final String PERMISSION_SAVE_PRODUCT = PREFIX + "SaveProduct";
+
+  public static final String PERMISSION_DELETE_PRODUCT = PREFIX + "DeleteProduct";
+
+  public static final String GROUP_READ_MASTER_DATA = PREFIX + "ReadMasterData";
+
+  public static final String GROUP_MANAGER = PREFIX + "Manager";
+
+  public static final String GROUP_ADMIN = PREFIX + "Admin";
+
+  public ApplicationAccessControlConfig() {
+
+    super();
+    AccessControlGroup readMasterData = group(GROUP_READ_MASTER_DATA, PERMISSION_FIND_OFFER, PERMISSION_FIND_PRODUCT);
+    AccessControlGroup manager = group(GROUP_MANAGER, readMasterData, PERMISSION_SAVE_OFFER, PERMISSION_SAVE_PRODUCT);
+    AccessControlGroup admin = group(GROUP_ADMIN, manager, PERMISSION_DELETE_OFFER, PERMISSION_DELETE_PRODUCT);
+  }
+}
+
+
+
+
+
Configuration on Java Method level
+
+

In your use-case you can now reference a permission like this:

+
+
+
+
@Named
+public class UcSafeOfferImpl extends ApplicationUc implements UcSafeOffer {
+
+  @Override
+  @RolesAllowed(ApplicationAccessControlConfig.PERMISSION_SAVE_OFFER)
+  public OfferEto save(OfferEto offer) { ... }
+  ...
+}
+
+
+
+
+
JEE Standard
+
+

Role-based Access Control (RBAC) is commonly used for authorization. +JSR 250 defines a number of common annotations to secure your application.

+
+
+
    +
  • +

    javax.annotation.security.PermitAll specifies that no access control is required to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DenyAll specifies that no access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.RolesAllowed specifies that only a list of access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DeclareRoles defines roles for security checking.

    +
  • +
  • +

    javax.annotation.security.RunAs specifies the RunAs role for the given components.

    +
  • +
+
+
+

@PermitAll, @Denyall, and @RolesAllowed annotations can be applied to both class and method. +A method-level annotation will override the behaviour of class-level annotation. Using multiple annotations of those 3 is not valid.

+
+
+
+
// invalid
+@PermitAll
+@DenyAll
+public String foo()
+
+// invalid and compilation fails
+@RolesAllowed("admin")
+@RolesAllowed("user")
+public String bar()
+
+// OK
+@RolesAllowed("admin", "user")
+public String bar()
+
+
+
+

Please note that when specifying multiple arguments to @RolesAllowed those are combined with OR (and not with AND). +So if the user has any of the specified access controls, he will be able to access the method.

+
+
+

As a best practice avoid specifying string literals to @RolesAllowed. +Instead define a class with all access controls as constants and reference them from there. +This class is typically called ApplicationAccessControlConfig in devonfw.

+
+
+

In many complicated cases where @PermitAll @DenyAll @RolesAllowed are insufficient e.g. a method should be accessed by a user in role A and not in role B at the same time, you have to verify the user role directly in the method. You can use SecurityContext class to get further needed information.

+
+
+
Spring
+
+

Spring Security also supports authorization on method level. To use it, you need to add the spring-security-config dependency. If you use Spring Boot, the dependency spring-boot-starter-security already includes spring-security-config. Then you can configure as follows:

+
+
+
    +
  • +

    prePostEnabled property enables Spring Security pre/post annotations. @PreAuthorize and @PostAuthorize annotations provide expression-based access control. See more here

    +
  • +
  • +

    securedEnabled property determines if the @Secured annotation should be enabled. @Secured can be used similarly as @RollesAllowed.

    +
  • +
  • +

    jsr250Enabled property allows us to use the JSR-250 annotations such as @RolesAllowed.

    +
  • +
+
+
+
+
@Configuration
+@EnableGlobalMethodSecurity(
+  prePostEnabled = true,
+  securedEnabled = true,
+  jsr250Enabled = true)
+public class MethodSecurityConfig
+  extends GlobalMethodSecurityConfiguration {
+}
+
+
+
+

A further read about the whole concept of Spring Security Authorization can be found here.

+
+
+
+
Quarkus
+
+

Quarkus comes with built-in security to allow for RBAC based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints and CDI beans. Quarkus also provides the io.quarkus.security.Authenticated annotation that will permit any authenticated user to access the resource (equivalent to @RolesAllowed("**")).

+
+
+
+
+
Data-based Permissions
+ +
+
+
Access Control Schema (deprecated)
+
+

The access-control-schema.xml approach is deprecated. The documentation can still be found in access control schema.

+
+
+ +
+

==Data-permissions

+
+
+

In some projects there are demands for permissions and authorization that is dependent on the processed data. E.g. a user may only be allowed to read or write data for a specific region. This is adding some additional complexity to your authorization. If you can avoid this it is always best to keep things simple. However, in various cases this is a requirement. Therefore the following sections give you guidance and patterns how to solve this properly.

+
+
+
+
+

Structuring your data

+
+

For all your business objects (entities) that have to be secured regarding to data permissions we recommend that you create a separate interface that provides access to the relevant data required to decide about the permission. Here is a simple example:

+
+
+
+
public interface SecurityDataPermissionCountry {
+
+  /**
+   * @return the 2-letter ISO code of the country this object is associated with. Users need
+   *         a data-permission for this country in order to read and write this object.
+   */
+  String getCountry();
+}
+
+
+
+

Now related business objects (entities) can implement this interface. Often such data-permissions have to be applied to an entire object-hierarchy. For security reasons we recommend that also all child-objects implement this interface. For performance reasons we recommend that the child-objects redundantly store the data-permission properties (such as country in the example above) and this gets simply propagated from the parent, when a child object is created.

+
+
+
+

Permissions for processing data

+
+

When saving or processing objects with a data-permission, we recommend to provide dedicated methods to verify the permission in an abstract base-class such as AbstractUc and simply call this explicitly from your business code. This makes it easy to understand and debug the code. Here is a simple example:

+
+
+
+
protected void verifyPermission(SecurityDataPermissionCountry entity) throws AccessDeniedException;
+
+
+
+
Beware of AOP
+
+

For simple but cross-cutting data-permissions you may also use AOP. This leads to programming aspects that reflectively scan method arguments and magically decide what to do. Be aware that this quickly gets tricky:

+
+
+
    +
  • +

    What if multiple of your method arguments have data-permissions (e.g. implement SecurityDataPermission*)?

    +
  • +
  • +

    What if the object to authorize is only provided as reference (e.g. Long or IdRef) and only loaded and processed inside the implementation where the AOP aspect does not apply?

    +
  • +
  • +

    How to express advanced data-permissions in annotations?

    +
  • +
+
+
+

What we have learned is that annotations like @PreAuthorize from spring-security easily lead to the "programming in string literals" anti-pattern. We strongly discourage to use this anti-pattern. In such case writing your own verifyPermission methods that you manually call in the right places of your business-logic is much better to understand, debug and maintain.

+
+
+
+
+

Permissions for reading data

+
+

When it comes to restrictions on the data to read it becomes even more tricky. In the context of a user only entities shall be loaded from the database he is permitted to read. This is simple for loading a single entity (e.g. by its ID) as you can load it and then if not permitted throw an exception to secure your code. But what if the user is performing a search query to find many entities? For performance reasons we should only find data the user is permitted to read and filter all the rest already via the database query. But what if this is not a requirement for a single query but needs to be applied cross-cutting to tons of queries? Therefore we have the following pattern that solves your problem:

+
+
+

For each data-permission attribute (or set of such) we create an abstract base entity:

+
+
+
+
@MappedSuperclass
+@EntityListeners(PermissionCheckListener.class)
+@FilterDef(name = "country", parameters = {@ParamDef(name = "countries", type = "string")})
+@Filter(name = "country", condition = "country in (:countries)")
+public abstract class SecurityDataPermissionCountryEntity extends ApplicationPersistenceEntity
+    implements SecurityDataPermissionCountry {
+
+  private String country;
+
+  @Override
+  public String getCountry() {
+    return this.country;
+  }
+
+  public void setCountry(String country) {
+    this.country = country;
+  }
+}
+
+
+
+

There are some special hibernate annotations @EntityListeners, @FilterDef, and @Filter used here allowing to apply a filter on the country for any (non-native) query performed by hibernate. The entity listener may look like this:

+
+
+
+
public class PermissionCheckListener {
+
+  @PostLoad
+  public void read(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireReadPermission(entity);
+  }
+
+  @PrePersist
+  @PreUpdate
+  public void write(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireWritePermission(entity);
+  }
+}
+
+
+
+

This will ensure that hibernate implicitly will call these checks for every such entity when it is read from or written to the database. Further to avoid reading entities from the database the user is not permitted to (and ending up with exceptions), we create an AOP aspect that automatically activates the above declared hibernate filter:

+
+
+
+
@Named
+public class PermissionCheckerAdvice implements MethodBeforeAdvice {
+
+  @Inject
+  private PermissionChecker permissionChecker;
+
+  @PersistenceContext
+  private EntityManager entityManager;
+
+  @Override
+  public void before(Method method, Object[] args, Object target) {
+
+    Collection<String> permittedCountries = this.permissionChecker.getPermittedCountriesForReading();
+    if (permittedCountries != null) { // null is returned for admins that may access all countries
+      if (permittedCountries.isEmpty()) {
+        throw new AccessDeniedException("Not permitted for any country!");
+      }
+      Session session = this.entityManager.unwrap(Session.class);
+      session.enableFilter("country").setParameterList("countries", permittedCountries.toArray());
+    }
+  }
+}
+
+
+
+

Finally to apply this aspect to all Repositories (can easily be changed to DAOs) implement the following advisor:

+
+
+
+
@Named
+public class PermissionCheckerAdvisor implements PointcutAdvisor, Pointcut, ClassFilter, MethodMatcher {
+
+  @Inject
+  private PermissionCheckerAdvice advice;
+
+  @Override
+  public Advice getAdvice() {
+    return this.advice;
+  }
+
+  @Override
+  public boolean isPerInstance() {
+    return false;
+  }
+
+  @Override
+  public Pointcut getPointcut() {
+    return this;
+  }
+
+  @Override
+  public ClassFilter getClassFilter() {
+    return this;
+  }
+
+  @Override
+  public MethodMatcher getMethodMatcher() {
+    return this;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass) {
+    return true; // apply to all methods
+  }
+
+  @Override
+  public boolean isRuntime() {
+    return false;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass, Object... args) {
+    throw new IllegalStateException("isRuntime()==false");
+  }
+
+  @Override
+  public boolean matches(Class<?> clazz) {
+    // when using DAOs simply change to some class like ApplicationDao
+    return DefaultRepository.class.isAssignableFrom(clazz);
+  }
+}
+
+
+
+
+

Managing and granting the data-permissions

+
+

Following our authorization guide we can simply create a permission for each country. We might simply reserve a prefix (as virtual «app-id») for each data-permission to allow granting data-permissions to end-users across all applications of the IT landscape. In our example we could create access controls country.DE, country.US, country.ES, etc. and assign those to the users. The method permissionChecker.getPermittedCountriesForReading() would then scan for these access controls and only return the 2-letter country code from it.

+
+
+ + + + + +
+ + +Before you make your decisions how to design your access controls please clarify the following questions: +
+
+
+
    +
  • +

    Do you need to separate data-permissions independent of the functional permissions? E.g. may it be required to express that a user can read data from the countries ES and PL but is only permitted to modify data from PL? In such case a single assignment of "country-permissions" to users is insufficient.

    +
  • +
  • +

    Do you want to grant data-permissions individually for each application (higher flexibility and complexity) or for the entire application landscape (simplicity, better maintenance for administrators)? In case of the first approach you would rather have access controls like app1.country.GB and app2.country.GB.

    +
  • +
  • +

    Do your data-permissions depend on objects that can be created dynamically inside your application?

    +
  • +
  • +

    If you want to grant data-permissions on other business objects (entities), how do you want to reference them (primary keys, business keys, etc.)? What reference is most stable? Which is most readable?

    +
  • +
+
+
+ +
+

==Validation

+
+
+

Validation is about checking syntax and semantics of input data. Invalid data is rejected by the application. +Therefore validation is required in multiple places of an application. E.g. the GUI will do validation for usability reasons to assist the user, early feedback and to prevent unnecessary server requests. +On the server-side validation has to be done for consistency and security.

+
+
+

In general we distinguish these forms of validation:

+
+
+
    +
  • +

    stateless validation will produce the same result for given input at any time (for the same code/release).

    +
  • +
  • +

    stateful validation is dependent on other states and can consider the same input data as valid in once case and as invalid in another.

    +
  • +
+
+
+
+

Stateless Validation

+
+

For regular, stateless validation we use the JSR303 standard that is also called bean validation (BV). +Details can be found in the specification. +As implementation we recommend hibernate-validator.

+
+
+
Example
+
+

A description of how to enable BV for spring applications can be found in the relevant Spring documentation. A guide you can use to integrate validation in Quarkus applications can be found here. For a quick summary follow these steps:

+
+
+
    +
  • +

    Make sure that hibernate-validator is located in the classpath by adding a dependency to the pom.xml.

    +
  • +
+
+
+
spring
+
+
    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
quarkus
+
+
    <dependency>
+      <groupId>io.quarkus</groupId>
+      <artifactId>quarkus-hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
    +
  • +

    For methods to validate go to their declaration and add constraint annotations to the method parameters.

    +
    +

    In spring applications you can add the @Validated annotation to the implementation (spring bean) to be validated (this is an annotation of the spring framework, so it`s not available in the Quarkus context). The standard use case is to annotate the logic layer implementation, i.e. the use case implementation or component facade in case of simple logic layer pattern. Thus, the validation will be executed for service requests as well as batch processing.

    +
    +
    +
      +
    • +

      @Valid annotation to the arguments to validate (if that class itself is annotated with constraints to check).

      +
    • +
    • +

      @NotNull for required arguments.

      +
    • +
    • +

      Other constraints (e.g. @Size) for generic arguments (e.g. of type String or Integer). However, consider to create custom datatypes and avoid adding too much validation logic (especially redundant in multiple places).

      +
    • +
    +
    +
  • +
+
+
+
BookingmanagementRestServiceImpl.java
+
+
@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+  ...
+  public BookingEto saveBooking(@Valid BookingCto booking) {
+  ...
+
+
+
+
    +
  • +

    Finally add appropriate validation constraint annotations to the fields of the ETO class.

    +
  • +
+
+
+
BookingCto.java
+
+
  @Valid
+  private BookingEto booking;
+
+
+
+
BookingEto.java
+
+
  @NotNull
+  @Future
+  private Timestamp bookingDate;
+
+
+
+

A list with all bean validation constraint annotations available for hibernate-validator can be found here. In addition it is possible to configure custom constraints. Therefore it is necessary to implement a annotation and a corresponding validator. A description can also be found in the Spring documentation or with more details in the hibernate documentation.

+
+
+ + + + + +
+ + +Bean Validation in Wildfly >v8: Wildfly v8 is the first version of Wildfly implementing the JEE7 specification. It comes with bean validation based on hibernate-validator out of the box. In case someone is running Spring in Wildfly for whatever reasons, the spring based annotation @Validated would duplicate bean validation at runtime and thus should be omitted. +
+
+
+
+
GUI-Integration
+
+

TODO

+
+
+
+
Cross-Field Validation
+
+

BV has poor support for this. Best practice is to create and use beans for ranges, etc. that solve this. A bean for a range could look like so:

+
+
+
+
public class Range<V extends Comparable<V>> {
+
+  private V min;
+  private V max;
+
+  public Range(V min, V max) {
+
+    super();
+    if ((min != null) && (max != null)) {
+      int delta = min.compareTo(max);
+      if (delta > 0) {
+        throw new ValueOutOfRangeException(null, min, min, max);
+      }
+    }
+    this.min = min;
+    this.max = max;
+  }
+
+  public V getMin() ...
+  public V getMax() ...
+
+
+
+
+
+

Stateful Validation

+
+

For complex and stateful business validations we do not use BV (possible with groups and context, etc.) but follow KISS and just implement this on the server in a straight forward manner. +An example is the deletion of a table in the example application. Here the state of the table must be checked first:

+
+
+

BookingmanagementImpl.java

+
+
+
+
  private void sendConfirmationEmails(BookingEntity booking) {
+
+    if (!booking.getInvitedGuests().isEmpty()) {
+      for (InvitedGuestEntity guest : booking.getInvitedGuests()) {
+        sendInviteEmailToGuest(guest, booking);
+      }
+    }
+
+    sendConfirmationEmailToHost(booking);
+  }
+
+
+
+

Implementing this small check with BV would be a lot more effort.

+
+
+ +
+

==Aspect Oriented Programming (AOP)

+
+
+

AOP is a powerful feature for cross-cutting concerns. However, if used extensive and for the wrong things an application can get unmaintainable. Therefore we give you the best practices where and how to use AOP properly.

+
+
+
+

AOP Key Principles

+
+

We follow these principles:

+
+
+
    +
  • +

    We use spring AOP based on dynamic proxies (and fallback to cglib).

    +
  • +
  • +

    We avoid AspectJ and other mighty and complex AOP frameworks whenever possible

    +
  • +
  • +

    We only use AOP where we consider it as necessary (see below).

    +
  • +
+
+
+
+

AOP Usage

+
+

We recommend to use AOP with care but we consider it established for the following cross cutting concerns:

+
+
+ +
+
+
+

AOP Debugging

+
+

When using AOP with dynamic proxies the debugging of your code can get nasty. As you can see by the red boxes in the call stack in the debugger there is a lot of magic happening while you often just want to step directly into the implementation skipping all the AOP clutter. When using Eclipse this can easily be archived by enabling step filters. Therefore you have to enable the feature in the Eclipse tool bar (highlighted in read).

+
+
+
+AOP debugging +
+
+
+

In order to properly make this work you need to ensure that the step filters are properly configured:

+
+
+
+Step Filter Configuration +
+
+
+

Ensure you have at least the following step-filters configured and active:

+
+
+
+
ch.qos.logback.*
+com.devonfw.module.security.*
+java.lang.reflect.*
+java.security.*
+javax.persistence.*
+org.apache.commons.logging.*
+org.apache.cxf.jaxrs.client.*
+org.apache.tomcat.*
+org.h2.*
+org.springframework.*
+
+
+
+ +
+

==Exception Handling

+
+
+
+

Exception Principles

+
+

For exceptions we follow these principles:

+
+
+
    +
  • +

    We only use exceptions for exceptional situations and not for programming control flows, etc. Creating an exception in Java is expensive and hence should not be done for simply testing whether something is present, valid or permitted. In the latter case design your API to return this as a regular result.

    +
  • +
  • +

    We use unchecked exceptions (RuntimeException) [1]

    +
  • +
  • +

    We distinguish internal exceptions and user exceptions:

    +
    +
      +
    • +

      Internal exceptions have technical reasons. For unexpected and exotic situations, it is sufficient to throw existing exceptions such as IllegalStateException. For common scenarios a own exception class is reasonable.

      +
    • +
    • +

      User exceptions contain a message explaining the problem for end users. Therefore, we always define our own exception classes with a clear, brief, but detailed message.

      +
    • +
    +
    +
  • +
  • +

    Our own exceptions derive from an exception base class supporting

    + +
  • +
+
+
+

All this is offered by mmm-util-core, which we propose as a solution. +If you use the devon4j-rest module, this is already included. For Quarkus applications, you need to add the dependency manually.

+
+
+

If you want to avoid additional dependencies, you can implement your own solution for this by creating an abstract exception class ApplicationBusinessException extending from RuntimeException. For an example of this, see our Quarkus reference application.

+
+
+
+

Exception Example

+
+

Here is an exception class from our sample application:

+
+
+
+
public class IllegalEntityStateException extends ApplicationBusinessException {
+
+  private static final long serialVersionUID = 1L;
+
+  public IllegalEntityStateException(Object entity, Object state) {
+
+    this((Throwable) null, entity, state);
+  }
+
+
+  public IllegalEntityStateException(Object entity, Object currentState, Object newState) {
+
+    this(null, entity, currentState, newState);
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object state) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityState(entity, state));
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object currentState, Object newState) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityStateChange(entity, currentState,
+        newState));
+  }
+
+}
+
+
+
+

The message templates are defined in the interface NlsBundleRestaurantRoot as following:

+
+
+
+
public interface NlsBundleApplicationRoot extends NlsBundle {
+
+
+  @NlsBundleMessage("The entity {entity} is in state {state}!")
+  NlsMessage errorIllegalEntityState(@Named("entity") Object entity, @Named("state") Object state);
+
+
+  @NlsBundleMessage("The entity {entity} in state {currentState} can not be changed to state {newState}!")
+  NlsMessage errorIllegalEntityStateChange(@Named("entity") Object entity, @Named("currentState") Object currentState,
+      @Named("newState") Object newState);
+
+
+  @NlsBundleMessage("The property {property} of object {object} can not be changed!")
+  NlsMessage errorIllegalPropertyChange(@Named("object") Object object, @Named("property") Object property);
+
+  @NlsBundleMessage("There is currently no user logged in")
+  NlsMessage errorNoActiveUser();
+
+
+
+
+

Handling Exceptions

+
+

For catching and handling exceptions we follow these rules:

+
+
+
    +
  • +

    We do not catch exceptions just to wrap or to re-throw them.

    +
  • +
  • +

    If we catch an exception and throw a new one, we always have to provide the original exception as cause to the constructor of the new exception.

    +
  • +
  • +

    At the entry points of the application (e.g. a service operation) we have to catch and handle all throwables. This is done via the exception-facade-pattern via an explicit facade or aspect. The devon4j-rest module already provides ready-to-use implementations for this such as RestServiceExceptionFacade that you can use in your Spring application. For Quarkus, follow the Quarkus guide on exception handling.
    +The exception facade has to …​

    +
    +
      +
    • +

      log all errors (user errors on info and technical errors on error level)

      +
    • +
    • +

      ensure that the entire exception is passed to the logger (not only the message) so that the logger can capture the entire stacktrace and the root cause is not lost.

      +
    • +
    • +

      convert the error to a result appropriable for the client and secure for Sensitive Data Exposure. Especially for security exceptions only a generic security error code or message may be revealed but the details shall only be logged but not be exposed to the client. All internal exceptions are converted to a generic error with a message like:

      +
      +
      +
      +

      An unexpected technical error has occurred. We apologize any inconvenience. Please try again later.

      +
      +
      +
      +
    • +
    +
    +
  • +
+
+
+
+

Common Errors

+
+

The following errors may occur in any devon application:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 9. Common Exceptions
CodeMessageLink

TechnicalError

An unexpected error has occurred! We apologize any inconvenience. Please try again later.

TechnicalErrorUserException.java

ServiceInvoke

«original message of the cause»

ServiceInvocationFailedException.java

+
+ +
+

==Internationalization +Internationalization (I18N) is about writing code independent from locale-specific information. +For I18N of text messages we are suggesting +mmm native-language-support.

+
+
+

In devonfw we have developed a solution to manage text internationalization. devonfw solution comes into two aspects:

+
+
+
    +
  • +

    Bind locale information to the user.

    +
  • +
  • +

    Get the messages in the current user locale.

    +
  • +
+
+
+
+

Binding locale information to the user

+
+

We have defined two different points to bind locale information to user, depending on user is authenticated or not.

+
+
+
    +
  • +

    User not authenticated: devonfw intercepts unsecured request and extract locale from it. At first, we try to extract a language parameter from the request and if it is not possible, we extract locale from Àccept-language` header.

    +
  • +
  • +

    User authenticated. During login process, applications developers are responsible to fill language parameter in the UserProfile class. This language parameter could be obtain from DB, LDAP, request, etc. In devonfw sample we get the locale information from database.

    +
  • +
+
+
+

This image shows the entire process:

+
+
+
+Internationalization +
+
+
+
+

Getting internationalizated messages

+
+

devonfw has a bean that manage i18n message resolution, the ApplicationLocaleResolver. This bean is responsible to get the current user and extract locale information from it and read the correct properties file to get the message.

+
+
+

The i18n properties file must be called ApplicationMessages_la_CO.properties where la=language and CO=country. This is an example of a i18n properties file for English language to translate devonfw sample user roles:

+
+
+

ApplicationMessages_en_US.properties

+
+
+
+
admin=Admin
+
+
+
+

You should define an ApplicationMessages_la_CO.properties file for every language that your application needs.

+
+
+

ApplicationLocaleResolver bean is injected in AbstractComponentFacade class so you have available this bean in logic layer so you only need to put this code to get an internationalized message:

+
+
+
+
String msg = getApplicationLocaleResolver().getMessage("mymessage");
+
+
+
+ +
+

==XML

+
+
+

XML (Extensible Markup Language) is a W3C standard format for structured information. It has a large eco-system of additional standards and tools.

+
+
+

In Java there are many different APIs and frameworks for accessing, producing and processing XML. For the devonfw we recommend to use JAXB for mapping Java objects to XML and vice-versa. Further there is the popular DOM API for reading and writing smaller XML documents directly. When processing large XML documents StAX is the right choice.

+
+
+
+

JAXB

+
+

We use JAXB to serialize Java objects to XML or vice-versa.

+
+
+
JAXB and Inheritance
+
+

Use @XmlSeeAlso annotation to provide sub-classes. +See section "Collective Polymorphism" described here.

+
+
+
+
JAXB Custom Mapping
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create dedicated objects for the XML mapping you can easily avoid such situations. When this is not suitable use @XmlJavaTypeAdapter and provide an XmlAdapter implementation that handles the mapping. +For details see here.

+
+
+
+
+

Security

+
+

To prevent XML External Entity attacks, follow JAXP Security Guide and enable FSP.

+
+
+ +
+

==JSON

+
+
+

JSON (JavaScript Object Notation) is a popular format to represent and exchange data especially for modern web-clients. For mapping Java objects to JSON and vice-versa there is no official standard API. We use the established and powerful open-source solution Jackson. +Due to problems with the wiki of fasterxml you should try this alternative link: Jackson/AltLink.

+
+
+
+

Configure JSON Mapping

+
+

In order to avoid polluting business objects with proprietary Jackson annotations (e.g. @JsonTypeInfo, @JsonSubTypes, @JsonProperty) we propose to create a separate configuration class. Every devonfw application (sample or any app created from our app-template) therefore has a class called ApplicationObjectMapperFactory that extends ObjectMapperFactory from the devon4j-rest module. It looks like this:

+
+
+
+
@Named("ApplicationObjectMapperFactory")
+public class ApplicationObjectMapperFactory extends ObjectMapperFactory {
+
+  public RestaurantObjectMapperFactory() {
+    super();
+    // JSON configuration code goes here
+  }
+}
+
+
+
+
+

JSON and Inheritance

+
+

If you are using inheritance for your objects mapped to JSON then polymorphism can not be supported out-of-the box. So in general avoid polymorphic objects in JSON mapping. However, this is not always possible. +Have a look at the following example from our sample application:

+
+
+
+inheritance class diagram +
+
Figure 3. Transfer-Objects using Inheritance
+
+
+

Now assume you have a REST service operation as Java method that takes a ProductEto as argument. As this is an abstract class the server needs to know the actual sub-class to instantiate. +We typically do not want to specify the classname in the JSON as this should be an implementation detail and not part of the public JSON format (e.g. in case of a service interface). Therefore we use a symbolic name for each polymorphic subtype that is provided as virtual attribute @type within the JSON data of the object:

+
+
+
+
{ "@type": "Drink", ... }
+
+
+
+

Therefore you add configuration code to the constructor of ApplicationObjectMapperFactory. Here you can see an example from the sample application:

+
+
+
+
setBaseClasses(ProductEto.class);
+addSubtypes(new NamedType(MealEto.class, "Meal"), new NamedType(DrinkEto.class, "Drink"),
+  new NamedType(SideDishEto.class, "SideDish"));
+
+
+
+

We use setBaseClasses to register all top-level classes of polymorphic objects. Further we declare all concrete polymorphic sub-classes together with their symbolic name for the JSON format via addSubtypes.

+
+
+
+

Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create objects dedicated for the JSON mapping you can easily avoid such situations. When this is not suitable follow these instructions to define the mapping:

+
+
+
    +
  1. +

    As an example, the use of JSR354 (javax.money) is appreciated in order to process monetary amounts properly. However, without custom mapping, the default mapping of Jackson will produce the following JSON for a MonetaryAmount:

    +
    +
    +
    "currency": {"defaultFractionDigits":2, "numericCode":978, "currencyCode":"EUR"},
    +"monetaryContext": {...},
    +"number":6.99,
    +"factory": {...}
    +
    +
    +
    +

    As clearly can be seen, the JSON contains too much information and reveals implementation secrets that do not belong here. Instead the JSON output expected and desired would be:

    +
    +
    +
    +
    "currency":"EUR","amount":"6.99"
    +
    +
    +
    +

    Even worse, when we send the JSON data to the server, Jackson will see that MonetaryAmount is an interface and does not know how to instantiate it so the request will fail. +Therefore we need a customized Serializer.

    +
    +
  2. +
  3. +

    We implement MonetaryAmountJsonSerializer to define how a MonetaryAmount is serialized to JSON:

    +
    +
    +
    public final class MonetaryAmountJsonSerializer extends JsonSerializer<MonetaryAmount> {
    +
    +  public static final String NUMBER = "amount";
    +  public static final String CURRENCY = "currency";
    +
    +  public void serialize(MonetaryAmount value, JsonGenerator jgen, SerializerProvider provider) throws ... {
    +    if (value != null) {
    +      jgen.writeStartObject();
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.CURRENCY);
    +      jgen.writeString(value.getCurrency().getCurrencyCode());
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.NUMBER);
    +      jgen.writeString(value.getNumber().toString());
    +      jgen.writeEndObject();
    +    }
    +  }
    +
    +
    +
    +

    For composite datatypes it is important to wrap the info as an object (writeStartObject() and writeEndObject()). MonetaryAmount provides the information we need by the getCurrency() and getNumber(). So that we can easily write them into the JSON data.

    +
    +
  4. +
  5. +

    Next, we implement MonetaryAmountJsonDeserializer to define how a MonetaryAmount is deserialized back as Java object from JSON:

    +
    +
    +
    public final class MonetaryAmountJsonDeserializer extends AbstractJsonDeserializer<MonetaryAmount> {
    +  protected MonetaryAmount deserializeNode(JsonNode node) {
    +    BigDecimal number = getRequiredValue(node, MonetaryAmountJsonSerializer.NUMBER, BigDecimal.class);
    +    String currencyCode = getRequiredValue(node, MonetaryAmountJsonSerializer.CURRENCY, String.class);
    +    MonetaryAmount monetaryAmount =
    +        MonetaryAmounts.getAmountFactory().setNumber(number).setCurrency(currencyCode).create();
    +    return monetaryAmount;
    +  }
    +}
    +
    +
    +
    +

    For composite datatypes we extend from AbstractJsonDeserializer as this makes our task easier. So we already get a JsonNode with the parsed payload of our datatype. Based on this API it is easy to retrieve individual fields from the payload without taking care of their order, etc. +AbstractJsonDeserializer also provides methods such as getRequiredValue to read required fields and get them converted to the desired basis datatype. So we can easily read the amount and currency and construct an instance of MonetaryAmount via the official factory API.

    +
    +
  6. +
  7. +

    Finally we need to register our custom (de)serializers with the following configuration code in the constructor of ApplicationObjectMapperFactory:+

    +
  8. +
+
+
+
+
  SimpleModule module = getExtensionModule();
+  module.addDeserializer(MonetaryAmount.class, new MonetaryAmountJsonDeserializer());
+  module.addSerializer(MonetaryAmount.class, new MonetaryAmountJsonSerializer());
+
+
+
+

Now we can read and write MonetaryAmount from and to JSON as expected.

+
+
+ +
+

==REST +REST (REpresentational State Transfer) is an inter-operable protocol for services that is more lightweight than SOAP. +However, it is no real standard and can cause confusion (see REST philosophy). +Therefore we define best practices here to guide you.

+
+
+
+

URLs

+
+

URLs are not case sensitive. Hence, we follow the best practice to use only lower-case-letters-with-hyphen-to-separate-words. +For operations in REST we distinguish the following types of URLs:

+
+
+
    +
  • +

    A collection URL is build from the rest service URL by appending the name of a collection. This is typically the name of an entity. Such URL identifies the entire collection of all elements of this type. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity

    +
  • +
  • +

    An element URL is build from a collection URL by appending an element ID. It identifies a single element (entity) within the collection. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42

    +
  • +
+
+
+

To follow KISS avoid using plural forms (…​/productmanagement/v1/products vs. …​/productmanagement/v1/product/42). Always use singular forms and avoid confusions (except for the rare cases where no singular exists).

+
+
+

The REST URL scheme fits perfect for CRUD operations. +For business operations (processing, calculation, advanced search, etc.) we simply append a collection URL with the name of the business operation. +Then we can POST the input for the business operation and get the result back. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/search

+
+
+
+

HTTP Methods

+
+

The following table defines the HTTP methods (verbs) and their meaning:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 10. Usage of HTTP methods
HTTP MethodMeaning

GET

Read data (stateless).

PUT

Create or update data.

POST

Process data.

DELETE

Delete an entity.

+
+

Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

+
+
+

For general recommendations on HTTP methods for collection and element URLs see REST@wikipedia.

+
+
+
+

HTTP Status Codes

+
+

Further we define how to use the HTTP status codes for REST services properly. In general the 4xx codes correspond to an error on the client side and the 5xx codes to an error on the server side.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 11. Usage of HTTP status codes
HTTP CodeMeaningResponseComment

200

OK

requested result

Result of successful GET

204

No Content

none

Result of successful POST, DELETE, or PUT with empty result (void return)

400

Bad Request

error details

The HTTP request is invalid (parse error, validation failed)

401

Unauthorized

none

Authentication failed

403

Forbidden

none

Authorization failed

404

Not found

none

Either the service URL is wrong or the requested resource does not exist

500

Server Error

error code, UUID

Internal server error occurred, in case of an exception, see REST exception handling

+
+
+

JAX-RS

+
+

For implementing REST services we use the JAX-RS standard. +As payload encoding we recommend JSON bindings using Jackson. +To implement a REST service you simply add JAX-RS annotations. +Here is a simple example:

+
+
+
+
@ApplicationScoped
+@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class ImagemanagementRestService {
+
+  @Inject
+  private Imagemanagement imagemanagement;
+
+  @GET
+  @Path("/image/{id}/")
+  public ImageDto getImage(@PathParam("id") long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+}
+
+
+
+

Here we can see a REST service for the business component imagemanagement. The method getImage can be accessed via HTTP GET (see @GET) under the URL path imagemanagement/image/{id} (see @Path annotations) where {id} is the ID of the requested table and will be extracted from the URL and provided as parameter id to the method getImage. It will return its result (ImageDto) as JSON (see @Produces annotation - you can also extend RestService marker interface that defines these annotations for JSON). As you can see it delegates to the logic component imagemanagement that contains the actual business logic while the service itself only exposes this logic via HTTP. The REST service implementation is a regular CDI bean that can use dependency injection.

+
+
+ + + + + +
+ + +With JAX-RS it is important to make sure that each service method is annotated with the proper HTTP method (@GET,@POST,etc.) to avoid unnecessary debugging. So you should take care not to forget to specify one of these annotations. +
+
+
+
Service-Interface
+
+

You may also separate API and implementation in case you want to reuse the API for service-client:

+
+
+
+
@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface ImagemanagementRestService {
+
+  @GET
+  @Path("/image/{id}/")
+  ImageEto getImage(@PathParam("id") long id);
+
+}
+
+@Named("ImagemanagementRestService")
+public class ImagemanagementRestServiceImpl implements ImagemanagementRestService {
+
+  @Override
+  public ImageEto getImage(long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+
+}
+
+
+
+
+
JAX-RS Configuration
+
+

Starting from CXF 3.0.0 it is possible to enable the auto-discovery of JAX-RS roots.

+
+
+

When the JAX-RS server is instantiated, all the scanned root and provider beans (beans annotated with javax.ws.rs.Path and javax.ws.rs.ext.Provider) are configured.

+
+
+
+
REST Exception Handling
+
+

For exceptions, a service needs to have an exception facade that catches all exceptions and handles them by writing proper log messages and mapping them to a HTTP response with an corresponding HTTP status code. +For this, devon4j provides a generic solution via RestServiceExceptionFacade that you can use within your Spring applications. You need to follow the exception guide in order for it to work out of the box because the facade needs to be able to distinguish between business and technical exceptions. +To implement a generic exception facade in Quarkus, follow the Quarkus exception guide.

+
+
+

Now your service may throw exceptions, but the facade will automatically handle them for you.

+
+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+
+
Pagination details
+
+

We recommend to use spring-data repositories for database access that already comes with pagination support. +Therefore, when performing a search, you can include a Pageable object. +Here is a JSON example for it:

+
+
+
+
{ "pageSize": 20, "pageNumber": 0, "sort": [] }
+
+
+
+

By increasing the pageNumber the client can browse and page through the hits.

+
+
+

As a result you will receive a Page. +It is a container for your search results just like a Collection but additionally contains pagination information for the client. +Here is a JSON example:

+
+
+
+
{ "totalElements": 1022,
+  pageable: { "pageSize": 20, "pageNumber": 0 },
+  content: [ ... ] }
+
+
+
+

The totalElements property contains the total number of hits. +This can be used by the client to compute the total number of pages and render the pagination links accordingly. +Via the pageable property the client gets back the Pageable properties from the search request. +The actual hits for the current page are returned as array in the content property.

+
+
+
+
+

REST Testing

+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+

Security

+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+
CSRF
+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+
JSON top-level arrays
+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+ +
+

==SOAP +SOAP is a common protocol for services that is rather complex and heavy. It allows to build inter-operable and well specified services (see WSDL). SOAP is transport neutral what is not only an advantage. We strongly recommend to use HTTPS transport and ignore additional complex standards like WS-Security and use established HTTP-Standards such as RFC2617 (and RFC5280).

+
+
+
+
+

JAX-WS

+
+

For building web-services with Java we use the JAX-WS standard. +There are two approaches:

+
+
+
    +
  • +

    code first

    +
  • +
  • +

    contract first

    +
  • +
+
+
+

Here is an example in case you define a code-first service.

+
+
+
Web-Service Interface
+
+

We define a regular interface to define the API of the service and annotate it with JAX-WS annotations:

+
+
+
+
@WebService
+public interface TablemanagmentWebService {
+
+  @WebMethod
+  @WebResult(name = "message")
+  TableEto getTable(@WebParam(name = "id") String id);
+
+}
+
+
+
+
+
Web-Service Implementation
+
+

And here is a simple implementation of the service:

+
+
+
+
@Named
+@WebService(endpointInterface = "com.devonfw.application.mtsj.tablemanagement.service.api.ws.TablemanagmentWebService")
+public class TablemanagementWebServiceImpl implements TablemanagmentWebService {
+
+  private Tablemanagement tableManagement;
+
+  @Override
+  public TableEto getTable(String id) {
+
+    return this.tableManagement.findTable(id);
+  }
+
+
+
+
+
+

SOAP Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to write adapters for JAXB (see XML).

+
+
+
+

SOAP Testing

+
+

For testing SOAP services in general consult the testing guide.

+
+
+

For testing SOAP services manually we strongly recommend SoapUI.

+
+
+ +
+

==Service Client

+
+
+

This guide is about consuming (calling) services from other applications (micro-services). For providing services, see the Service-Layer Guide. Services can be consumed by the client or the server. As the client is typically not written in Java, you should consult the according guide for your client technology. In case you want to call a service within your Java code, this guide is the right place to get help.

+
+
+
+

Motivation

+
+

Various solutions already exist for calling services, such as RestTemplate from spring or the JAX-RS client API. Furthermore, each and every service framework offers its own API as well. These solutions might be suitable for very small and simple projects (with one or two such invocations). However, with the trend of microservices, the invocation of a service becomes a very common use-case that occurs all over the place. You typically need a solution that is very easy to use but supports flexible configuration, adding headers for authentication, mapping of errors from the server, logging success/errors with duration for performance analysis, support for synchronous and asynchronous invocations, etc. This is exactly what this devon4j service-client solution brings to you.

+
+
+
+

Usage

+
+

Spring

+
+
+

For Spring, follow the Spring rest-client guide.

+
+
+

Quarkus

+
+
+

For Quarkus, we recommend to follow the official Quarkus rest-client guide

+
+
+ +
+

==Testing

+
+
+
+

General best practices

+
+

For testing please follow our general best practices:

+
+
+
    +
  • +

    Tests should have a clear goal that should also be documented.

    +
  • +
  • +

    Tests have to be classified into different integration levels.

    +
  • +
  • +

    Tests should follow a clear naming convention.

    +
  • +
  • +

    Automated tests need to properly assert the result of the tested operation(s) in a reliable way. E.g. avoid stuff like assertThat(service.getAllEntities()).hasSize(42) or even worse tests that have no assertion at all.

    +
  • +
  • +

    Tests need to be independent of each other. Never write test-cases or tests (in Java @Test methods) that depend on another test to be executed before.

    +
  • +
  • +

    Use AssertJ to write good readable and maintainable tests that also provide valuable feedback in case a test fails. Do not use legacy JUnit methods like assertEquals anymore!

    +
  • +
  • +

    For easy understanding divide your test in three commented sections:

    +
    +
      +
    • +

      //given

      +
    • +
    • +

      //when

      +
    • +
    • +

      //then

      +
    • +
    +
    +
  • +
  • +

    Plan your tests and test data management properly before implementing.

    +
  • +
  • +

    Instead of having a too strong focus on test coverage better ensure you have covered your critical core functionality properly and review the code including tests.

    +
  • +
  • +

    Test code shall NOT be seen as second class code. You shall consider design, architecture and code-style also for your test code but do not over-engineer it.

    +
  • +
  • +

    Test automation is good but should be considered in relation to cost per use. Creating full coverage via automated system tests can cause a massive amount of test-code that can turn out as a huge maintenance hell. Always consider all aspects including product life-cycle, criticality of use-cases to test, and variability of the aspect to test (e.g. UI, test-data).

    +
  • +
  • +

    Use continuous integration and establish that the entire team wants to have clean builds and running tests.

    +
  • +
  • +

    Prefer delegation over inheritance for cross-cutting testing functionality. Good places to put this kind of code can be realized and reused via the JUnit @Rule mechanism.

    +
  • +
+
+
+
+

Test Automation Technology Stack

+
+

For test automation we use JUnit. However, we are strictly doing all assertions with AssertJ. For mocking we use Mockito. +In order to mock remote connections we use WireMock.

+
+
+

For testing entire components or sub-systems we recommend to use for Spring stack spring-boot-starter-test as lightweight and fast testing infrastructure that is already shipped with devon4j-test. For Quarkus, you can add the necessary extensions manually such as quarkus-junit5, quarkus-junit5-mockito, assertj-core etc.

+
+
+

In case you have to use a full blown JEE application server, we recommend to use arquillian. To get started with arquillian, look here.

+
+
+
+

Test Doubles

+
+

We use test doubles as generic term for mocks, stubs, fakes, dummies, or spys to avoid confusion. Here is a short summary from stubs VS mocks:

+
+
+
    +
  • +

    Dummy objects specifying no logic at all. May declare data in a POJO style to be used as boiler plate code to parameter lists or even influence the control flow towards the test’s needs.

    +
  • +
  • +

    Fake objects actually have working implementations, but usually take some shortcut which makes them not suitable for production (an in memory database is a good example).

    +
  • +
  • +

    Stubs provide canned answers to calls made during the test, usually not responding at all to anything outside what’s programmed in for the test. Stubs may also record information about calls, such as an email gateway stub that remembers the messages it 'sent', or maybe only how many messages it 'sent'.

    +
  • +
  • +

    Mocks are objects pre-programmed with expectations, which form a specification of the calls they are expected to receive.

    +
  • +
+
+
+

We try to give some examples, which should make it somehow clearer:

+
+
+
Stubs
+
+

Best Practices for applications:

+
+
+
    +
  • +

    A good way to replace small to medium large boundary systems, whose impact (e.g. latency) should be ignored during load and performance tests of the application under development.

    +
  • +
  • +

    As stub implementation will rely on state-based verification, there is the threat, that test developers will partially reimplement the state transitions based on the replaced code. This will immediately lead to a black maintenance whole, so better use mocks to assure the certain behavior on interface level.

    +
  • +
  • +

    Do NOT use stubs as basis of a large amount of test cases as due to state-based verification of stubs, test developers will enrich the stub implementation to become a large monster with its own hunger after maintenance efforts.

    +
  • +
+
+
+
+
Mocks
+
+

Best Practices for applications:

+
+
+
    +
  • +

    Replace not-needed dependencies of your system-under-test (SUT) to minimize the application context to start of your component framework.

    +
  • +
  • +

    Replace dependencies of your SUT to impact the control flow under test without establishing all the context parameters needed to match the control flow.

    +
  • +
  • +

    Remember: Not everything has to be mocked! Especially on lower levels of tests like isolated module tests you can be betrayed into a mocking delusion, where you end up in a hundred lines of code mocking the whole context and five lines executing the test and verifying the mocks behavior. Always keep in mind the benefit-cost ratio, when implementing tests using mocks.

    +
  • +
+
+
+
+
WireMock
+
+

If you need to mock remote connections such as HTTP-Servers, WireMock offers easy to use functionality. For a full description see the homepage or the github repository. Wiremock can be used either as a JUnit Rule, in Java outside of JUnit or as a standalone process. The mocked server can be configured to respond to specific requests in a given way via a fluent Java API, JSON files and JSON over HTTP. An example as an integration to JUnit can look as follows.

+
+
+
+
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+
+public class WireMockOfferImport{
+
+  @Rule
+  public WireMockRule mockServer = new WireMockRule(wireMockConfig().dynamicPort());
+
+  @Test
+  public void requestDataTest() throws Exception {
+  int port = this.mockServer.port();
+  ...}
+
+
+
+

This creates a server on a randomly chosen free port on the running machine. You can also specify the port to be used if wanted. Other than that there are several options to further configure the server. This includes HTTPs, proxy settings, file locations, logging and extensions.

+
+
+
+
  @Test
+  public void requestDataTest() throws Exception {
+      this.mockServer.stubFor(get(urlEqualTo("/new/offers")).withHeader("Accept", equalTo("application/json"))
+      .withHeader("Authorization", containing("Basic")).willReturn(aResponse().withStatus(200).withFixedDelay(1000)
+      .withHeader("Content-Type", "application/json").withBodyFile("/wireMockTest/jsonBodyFile.json")));
+  }
+
+
+
+

This will stub the URL localhost:port/new/offers to respond with a status 200 message containing a header (Content-Type: application/json) and a body with content given in jsonBodyFile.json if the request matches several conditions. +It has to be a GET request to ../new/offers with the two given header properties.

+
+
+

Note that by default files are located in src/test/resources/__files/. When using only one WireMock server one can omit the this.mockServer in before the stubFor call (static method). +You can also add a fixed delay to the response or processing delay with WireMock.addRequestProcessingDelay(time) in order to test for timeouts.

+
+
+

WireMock can also respond with different corrupted messages to simulate faulty behaviour.

+
+
+
+
@Test(expected = ResourceAccessException.class)
+public void faultTest() {
+
+    this.mockServer.stubFor(get(urlEqualTo("/fault")).willReturn(aResponse()
+    .withFault(Fault.MALFORMED_RESPONSE_CHUNK)));
+...}
+
+
+
+

A GET request to ../fault returns an OK status header, then garbage, and then closes the connection.

+
+
+
+
+

Integration Levels

+
+

There are many discussions about the right level of integration for test automation. Sometimes it is better to focus on small, isolated modules of the system - whatever a "module" may be. In other cases it makes more sense to test integrated groups of modules. Because there is no universal answer to this question, devonfw only defines a common terminology for what could be tested. Each project must make its own decision where to put the focus of test automation. There is no worldwide accepted terminology for the integration levels of testing. In general we consider ISTQB. However, with a technical focus on test automation we want to get more precise.

+
+
+

The following picture shows a simplified view of an application based on the devonfw reference architecture. We define four integration levels that are explained in detail below. +The boxes in the picture contain parenthesized numbers. These numbers depict the lowest integration level, a box belongs to. Higher integration levels also contain all boxes of lower integration levels. When writing tests for a given integration level, related boxes with a lower integration level must be replaced by test doubles or drivers.

+
+
+
+Integration Levels +
+
+
+

The main difference between the integration levels is the amount of infrastructure needed to test them. The more infrastructure you need, the more bugs you will find, but the more instable and the slower your tests will be. So each project has to make a trade-off between pros and contras of including much infrastructure in tests and has to select the integration levels that fit best to the project.

+
+
+

Consider, that more infrastructure does not automatically lead to a better bug-detection. There may be bugs in your software that are masked by bugs in the infrastructure. The best way to find those bugs is to test with very few infrastructure.

+
+
+

External systems do not belong to any of the integration levels defined here. devonfw does not recommend involving real external systems in test automation. This means, they have to be replaced by test doubles in automated tests. An exception may be external systems that are fully under control of the own development team.

+
+
+

The following chapters describe the four integration levels.

+
+
+
Level 1 Module Test
+
+

The goal of a isolated module test is to provide fast feedback to the developer. Consequently, isolated module tests must not have any interaction with the client, the database, the file system, the network, etc.

+
+
+

An isolated module test is testing a single classes or at least a small set of classes in isolation. If such classes depend on other components or external resources, etc. these shall be replaced with a test double.

+
+
+
+
public class MyClassTest extends ModuleTest {
+
+  @Test
+  public void testMyClass() {
+
+    // given
+    MyClass myClass = new MyClass();
+    // when
+    String value = myClass.doSomething();
+    // then
+    assertThat(value).isEqualTo("expected value");
+  }
+
+}
+
+
+
+

For an advanced example see here.

+
+
+
+
Level 2 Component Test
+
+

A component test aims to test components or component parts as a unit. +These tests can access resources such as a database (e.g. for DAO tests). +Further, no remote communication is intended here. Access to external systems shall be replaced by a test double.

+
+
+
    +
  • +

    For Spring stack, they are typically run with a (light-weight) infrastructure such as spring-boot-starter-test. A component-test is illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.NONE)
    +public class UcFindCountryTest extends ComponentTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    TestUtil.login("user", MyAccessControlConfig.FIND_COUNTRY);
    +    CountryEto country = this.ucFindCountry.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    This test will start the entire spring-context of your app (MySpringBootApp). Within the test spring will inject according spring-beans into all your fields annotated with @Inject. In the test methods you can use these spring-beans and perform your actual tests. This pattern can be used for testing DAOs/Repositories, Use-Cases, or any other spring-bean with its entire configuration including database and transactions.

    +
    +
  • +
  • +

    For Quarkus, you can similarly inject the CDI beans and perform tests. An example is shown below:

    +
    +
    +
    @QuarkusTest
    +public class UcFindCountryTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +  ...
    +
    +
    +
  • +
+
+
+

When you are testing use-cases your authorization will also be in place. Therefore, you have to simulate a logon in advance what is done via the login method in the above Spring example. The test-infrastructure will automatically do a logout for you after each test method in doTearDown.

+
+
+
+
Level 3 Subsystem Test
+
+

A subsystem test runs against the external interfaces (e.g. HTTP service) of the integrated subsystem. Subsystem tests of the client subsystem are described in the devon4ng testing guide. In devon4j the server (JEE application) is the subsystem under test. The tests act as a client (e.g. service consumer) and the server has to be integrated and started in a container.

+
+
+
    +
  • +

    With devon4j and Spring you can write a subsystem-test as easy as illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.RANDOM_PORT)
    +public class CountryRestServiceTest extends SubsystemTest {
    +
    +  @Inject
    +  private ServiceClientFactory serviceClientFactory;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    CountryRestService service = this.serviceClientFactory.create(CountryRestService.class);
    +    CountryEto country = service.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    Even though not obvious on the first look this test will start your entire application as a server on a free random port (so that it works in CI with parallel builds for different branches) and tests the invocation of a (REST) service including (un)marshalling of data (e.g. as JSON) and transport via HTTP (all in the invocation of the findCountry method).

    +
    +
  • +
+
+
+

Do not confuse a subsystem test with a system integration test. A system integration test validates the interaction of several systems where we do not recommend test automation.

+
+
+
+
Level 4 System Test
+
+

A system test has the goal to test the system as a whole against its official interfaces such as its UI or batches. The system itself runs as a separate process in a way close to a regular deployment. Only external systems are simulated by test doubles.

+
+
+

The devonfw only gives advice for automated system test (TODO see allure testing framework). In nearly every project there must be manual system tests, too. This manual system tests are out of scope here.

+
+
+
+
Classifying Integration-Levels
+
+

For Spring stack, devon4j defines Category-Interfaces that shall be used as JUnit Categories. +Also devon4j provides abstract base classes that you may extend in your test-cases if you like.

+
+
+

devon4j further pre-configures the maven build to only run integration levels 1-2 by default (e.g. for fast feedback in continuous integration). It offers the profiles subsystemtest (1-3) and systemtest (1-4). In your nightly build you can simply add -Psystemtest to run all tests.

+
+
+
+
+

Implementation

+
+

This section introduces how to implement tests on the different levels with the given devonfw infrastructure and the proposed frameworks. +For Spring, see Spring Test Implementation

+
+
+
+

Regression testing

+
+

When it comes to complex output (even binary) that you want to regression test by comparing with an expected result, you sould consider Approval Tests using ApprovalTests.Java. +If applied for the right problems, it can be very helpful.

+
+
+
+

Deployment Pipeline

+
+

A deployment pipeline is a semi-automated process that gets software-changes from version control into production. It contains several validation steps, e.g. automated tests of all integration levels. +Because devon4j should fit to different project types - from agile to waterfall - it does not define a standard deployment pipeline. But we recommend to define such a deployment pipeline explicitly for each project and to find the right place in it for each type of test.

+
+
+

For that purpose, it is advisable to have fast running test suite that gives as much confidence as possible without needing too much time and too much infrastructure. This test suite should run in an early stage of your deployment pipeline. Maybe the developer should run it even before he/she checked in the code. Usually lower integration levels are more suitable for this test suite than higher integration levels.

+
+
+

Note, that the deployment pipeline always should contain manual validation steps, at least manual acceptance testing. There also may be manual validation steps that have to be executed for special changes only, e.g. usability testing. Management and execution processes of those manual validation steps are currently not in the scope of devonfw.

+
+
+
+

Test Coverage

+
+

We are using tools (SonarQube/Jacoco) to measure the coverage of the tests. Please always keep in mind that the only reliable message of a code coverage of X% is that (100-X)% of the code is entirely untested. It does not say anything about the quality of the tests or the software though it often relates to it.

+
+
+
+

Test Configuration

+
+

This section covers test configuration in general without focusing on integration levels as in the first chapter.

+
+
+ +
+
+
Configure Test Specific Beans
+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+
Test Data
+
+

It is possible to obtain test data in two different ways depending on your test’s integration level.

+
+
+
+
+

Debugging Tests

+
+

The following two sections describe two debugging approaches for tests. Tests are either run from within the IDE or from the command line using Maven.

+
+
+
Debugging with the IDE
+
+

Debugging with the IDE is as easy as always. Even if you want to execute a SubsystemTest which needs a Spring context and a server infrastructure to run properly, you just set your breakpoints and click on Debug As → JUnit Test. The test infrastructure will take care of initializing the necessary infrastructure - if everything is configured properly.

+
+
+
+
Debugging with Maven
+
+

Please refer to the following two links to find a guide for debugging tests when running them from Maven.

+
+ +
+

In essence, you first have to start execute a test using the command line. Maven will halt just before the test execution and wait for your IDE to connect to the process. When receiving a connection the test will start and then pause at any breakpoint set in advance. +The first link states that tests are started through the following command:

+
+
+
+
mvn -Dmaven.surefire.debug test
+
+
+
+

Although this is correct, it will run every test class in your project and - which is time consuming and mostly unnecessary - halt before each of these tests. +To counter this problem you can simply execute a single test class through the following command (here we execute the TablemanagementRestServiceTest from the restaurant sample application):

+
+
+
+
mvn test -Dmaven.surefire.debug test -Dtest=TablemanagementRestServiceTest
+
+
+
+

It is important to notice that you first have to execute the Maven command in the according submodule, e.g. to execute the TablemanagementRestServiceTest you have first to navigate to the core module’s directory.

+
+
+ +
+

==Transfer-Objects

+
+
+

The technical data model is defined in form of persistent entities. +However, passing persistent entities via call-by-reference across the entire application will soon cause problems:

+
+
+
    +
  • +

    Changes to a persistent entity are directly written back to the persistent store when the transaction is committed. When the entity is send across the application also changes tend to take place in multiple places endangering data sovereignty and leading to inconsistency.

    +
  • +
  • +

    You want to send and receive data via services across the network and have to define what section of your data is actually transferred. If you have relations in your technical model you quickly end up loading and transferring way too much data.

    +
  • +
  • +

    Modifications to your technical data model shall not automatically have impact on your external services causing incompatibilities.

    +
  • +
+
+
+

To prevent such problems transfer-objects are used leading to a call-by-value model and decoupling changes to persistent entities.

+
+
+

In the following sections the different types of transfer-objects are explained. +You will find all according naming-conventions in the architecture-mapping

+
+
+

To structure your transfer objects, we recommend the following approaches:

+
+
+ +
+
+

Also considering the following transfer objects in specific cases:

+
+
+
+
SearchCriteriaTo
+
+

For searching we create or generate a «BusinessObject»SearchCriteriaTo representing a query to find instances of «BusinessObject».

+
+
TO
+
+

There are typically transfer-objects for data that is never persistent. +For very generic cases these just carry the suffix To.

+
+
STO
+
+

We can potentially create separate service transfer objects (STO) (if possible named «BusinessObject»Sto) to keep the service API stable and independent of the actual data-model. +However, we usually do not need this and want to keep our architecture simple. +Only create STOs if you need service versioning and support previous APIs or to provide legacy service technologies that require their own isolated data-model. +In such case you also need beanmapping between STOs and ETOs/DTOs what means extra effort and complexity that should be avoided.

+
+
+
+
+ +
+

==Bean-Mapping

+
+
+

For decoupling, you sometimes need to create separate objects (beans) for a different view. E.g. for an external service, you will use a transfer-object instead of the persistence entity so internal changes to the entity do not implicitly change or break the service.

+
+
+

Therefore you have the need to map similar objects what creates a copy. This also has the benefit that modifications to the copy have no side-effect on the original source object. However, to implement such mapping code by hand is very tedious and error-prone (if new properties are added to beans but not to mapping code):

+
+
+
+
public UserEto mapUser(UserEntity source) {
+  UserEto target = new UserEto();
+  target.setUsername(source.getUsername());
+  target.setEmail(source.getEmail());
+  ...
+  return target;
+}
+
+
+
+

Therefore we are using a BeanMapper for this purpose that makes our lives a lot easier. +There are several bean mapping frameworks with different approaches.

+
+
+

For a devon4j-spring application we recommend Orika, follow Spring Bean-Mapping for an introduction to Orika and Dozer in a devon4j-spring context application.

+
+
+ + + + + +
+ + +devon4j started with Dozer as framework for Spring applications and still supports it. However, we now recommend Orika (for new projects) as it is much faster (see Performance of Java Mapping Frameworks). +
+
+
+

For a Quarkus application we recommend Mapstruct, follow Quarkus Bean-Mapping for an introduction to Mapstruct in a quarkus context application.

+
+
+ +
+

==Datatypes

+
+
+
+
+

A datatype is an object representing a value of a specific type with the following aspects:

+
+
+
    +
  • +

    It has a technical or business specific semantic.

    +
  • +
  • +

    Its JavaDoc explains the meaning and semantic of the value.

    +
  • +
  • +

    It is immutable and therefore stateless (its value assigned at construction time and can not be modified).

    +
  • +
  • +

    It is serializable.

    +
  • +
  • +

    It properly implements #equals(Object) and #hashCode() (two different instances with the same value are equal and have the same hash).

    +
  • +
  • +

    It shall ensure syntactical validation so it is NOT possible to create an instance with an invalid value.

    +
  • +
  • +

    It is responsible for formatting its value to a string representation suitable for sinks such as UI, loggers, etc. Also consider cases like a Datatype representing a password where toString() should return something like "**" instead of the actual password to prevent security accidents.

    +
  • +
  • +

    It is responsible for parsing the value from other representations such as a string (as needed).

    +
  • +
  • +

    It shall provide required logical operations on the value to prevent redundancies. Due to the immutable attribute all manipulative operations have to return a new Datatype instance (see e.g. BigDecimal.add(java.math.BigDecimal)).

    +
  • +
  • +

    It should implement Comparable if a natural order is defined.

    +
  • +
+
+
+

Based on the Datatype a presentation layer can decide how to view and how to edit the value. Therefore a structured data model should make use of custom datatypes in order to be expressive. +Common generic datatypes are String, Boolean, Number and its subclasses, Currency, etc. +Please note that both Date and Calendar are mutable and have very confusing APIs. Therefore, use JSR-310 or jodatime instead. +Even if a datatype is technically nothing but a String or a Number but logically something special it is worth to define it as a dedicated datatype class already for the purpose of having a central javadoc to explain it. On the other side avoid to introduce technical datatypes like String32 for a String with a maximum length of 32 characters as this is not adding value in the sense of a real Datatype. +It is suitable and in most cases also recommended to use the class implementing the datatype as API omitting a dedicated interface.

+
+
+
+— mmm project
+datatype javadoc +
+
+ +
+
+
+

Datatype Packaging

+
+

For the devonfw we use a common packaging schema. +The specifics for datatypes are as following:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
SegmentValueExplanation

<component>

*

Here we use the (business) component defining the datatype or general for generic datatypes.

<layer>

common

Datatypes are used across all layers and are not assigned to a dedicated layer.

<scope>

api

Datatypes are always used directly as API even tough they may contain (simple) implementation logic. Most datatypes are simple wrappers for generic Java types (e.g. String) but make these explicit and might add some validation.

+
+
+

Technical Concerns

+
+

Many technologies like Dozer and QueryDSL’s (alias API) are heavily based on reflection. For them to work properly with custom datatypes, the frameworks must be able to instantiate custom datatypes with no-argument constructors. It is therefore recommended to implement a no-argument constructor for each datatype of at least protected visibility.

+
+
+
+

Datatypes in Entities

+
+

The usage of custom datatypes in entities is explained in the persistence layer guide.

+
+
+
+

Datatypes in Transfer-Objects

+
+
XML
+
+

For mapping datatypes with JAXB see XML guide.

+
+
+
+
JSON
+
+

For mapping datatypes from and to JSON see JSON custom mapping.

+
+
+ +
+

==Accessibility

+
+
+

TODO

+
+ + + +
+ +
+

==CORS support

+
+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

For more information about CORS see here. Information about the CORS headers can be found here.

+
+
+
+
+

Configuring CORS support

+
+

To enable CORS support for your application, see the advanced guides. For Spring applications see here. For Quarkus follow the official Quarkus guide.

+
+
+
+

Configuration with service mesh

+
+

If you are using a service mesh, you can also define your CORS policy directly there. Here is an example from Istio.

+
+
+ +
+

==BLOB support

+
+
+

BLOB stands for Binary Large Object. A BLOB may be an image, an office document, ZIP archive or any other multimedia object. +Often these BLOBs are large. if this is the case you need to take care, that you do not copy all the blob data into you application heap, e.g. when providing them via a REST service. +This could easily lead to performance problems or out of memory errors. +As solution for that problem is "streaming" those BLOBs directly from the database to the client. To demonstrate how this can be accomplished, devonfw provides a example.

+
+
+
+

Further Reading

+ +
+ +
+

==Java Development Kit

+
+
+

The Java Development Kit is an implementation of the Java platform. It provides the Java Virtual Machine (JVM) and the Java Runtime Environment (JRE).

+
+
+
+

Editions

+
+

The JDK exists in different editions:

+
+
+ +
+
+

As Java is evolving and also complex maintaining a JVM requires a lot of energy. +Therefore many alternative JDK editions are unable to cope with this and support latest Java versions and according compatibility. +Unfortunately OpenJDK only maintains a specific version of Java for a relative short period of time before moving to the next major version. +In the end, this technically means that OpenJDK is continuous beta and can not be used in production for reasonable software projects. +As OracleJDK changed its licensing model and can not be used for commercial usage even during development, things can get tricky. +You may want to use OpenJDK for development and OracleJDK only in production. +However, e.g. OpenJDK 11 never released a version that is stable enough for reasonable development (e.g. javadoc tool is broken and fixes are not available of OpenJDK 11 - fixed in 11.0.3 what is only available as OracleJDK 11 or you need to go to OpenJDK 12+, what has other bugs) so in the end there is no working release of OpenJDK 11. +This more or less forces you to use OracleJDK what requires you to buy a subscription so you can use it for commercial development. +However, there is AdoptOpenJDK that provides forked releases of OpenJDK with bug-fixes what might be an option. +Anyhow, as you want to have your development environment close to production, the productively used JDK (most likely OracleJDK) should be preferred also for development.

+
+
+
+

Upgrading

+
+

Until Java 8 compatibility was one of the key aspects for Java version updates (after the mess on the Swing updates with Java2 many years ago). +However, Java 9 introduced a lot of breaking changes. +This documentation wants to share the experience we collected in devonfw when upgrading from Java 8 to newer versions. +First of all we separate runtime changes that you need if you want to build your software with JDK 8 but such that it can also run on newer versions (e.g. JRE 11) +from changes required to also build your software with more recent JDKs (e.g. JDK 11 or 12).

+
+
+
Runtime Changes
+
+

This section describes required changes to your software in order to make it run also with versions newer than Java 8.

+
+
+
Classes removed from JDK
+
+

The first thing that most users hit when running their software with newer Java versions is a ClassNotFoundException like this:

+
+
+
+
Caused by: java.lang.ClassNotFoundException: javax.xml.bind.JAXBException
+
+
+
+

As Java 9 introduced a module system with Jigsaw, the JDK that has been a monolithic mess is now a well-defined set of structured modules. +Some of the classes that used to come with the JDK moved to modules that where not available by default in Java 9 and have even been removed entirely in later versions of Java. +Therefore you should simply treat such code just like any other 3rd party component that you can add as a (maven) dependency. +The following table gives you the required hints to make your software work even with such classes / modules removed from the JDK (please note that the specified version is just a suggestion that worked, feel free to pick a more recent or more appropriate version):

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 12. Dependencies for classes removed from Java 8 since 9+
ClassGroupIdArtifactIdVersion

javax.xml.bind.*

javax.xml.bind

jaxb-api

2.3.1

com.sun.xml.bind.*

org.glassfish.jaxb

jaxb-runtime

2.3.1

java.activation.*

javax.activation

javax.activation-api

1.2.0

java.transaction.*

javax.transaction

javax.transaction-api

1.2

java.xml.ws.*

javax.xml.ws

jaxws-api

2.3.1

javax.jws.*

javax.jws

javax.jws-api

1.1

javax.annotation.*

javax.annotation

javax.annotation-api

1.3.2

+
+
+
3rd Party Updates
+
+

Further, internal and inofficial APIs (e.g. sun.misc.Unsafe) have been removed. +These are typically not used by your software directly but by low-level 3rd party libraries like asm that need to be updated. +Also simple things like the Java version have changed (from 1.8.x to 9.x, 10.x, 11.x, 12.x, etc.). +Some 3rd party libraries were parsing the Java version in a very naive way making them unable to be used with Java 9+:

+
+
+
+
Caused by: java.lang.NullPointerException
+   at org.apache.maven.surefire.shade.org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast (SystemUtils.java:1626)
+
+
+
+

Therefore the following table gives an overview of common 3rd party libraries that have been affected by such breaking changes and need to be updated to at least the specified version:

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 13. Minimum recommended versions of common 3rd party for Java 9+
GroupIdArtifactIdVersionIssue

org.apache.commons

commons-lang3

3.7

LANG-1365

cglib

cglib

3.2.9

102, 93, 133

org.ow2.asm

asm

7.1

2941

org.javassist

javassist

3.25.0-GA

194, 228, 246, 171

+
+
+
ResourceBundles
+
+

For internationalization (i18n) and localization (l10n) ResourceBundle is used for language and country specific texts and configurations as properties (e.g. MyResourceBundle_de.properties). With Java modules there are changes and impacts you need to know to get things working. The most important change is documented in the JavaDoc of ResourceBundle. However, instead of using ResourceBundleProvider and refactoring your entire code causing incompatibilities, you can simply put the resource bundles in a regular JAR on the classpath rather than a named module (or into the lauching app). +If you want to implement (new) Java modules with i18n support, you can have a look at mmm-nls.

+
+
+
+
+
Buildtime Changes
+
+

If you also want to change your build to work with a recent JDK you also need to ensure that test frameworks and maven plugins properly support this.

+
+
+
Findbugs
+
+

Findbugs does not work with Java 9+ and is actually a dead project. +The new findbugs is SpotBugs. +For maven the new solution is spotbugs-maven-plugin:

+
+
+
+
<plugin>
+  <groupId>com.github.spotbugs</groupId>
+  <artifactId>spotbugs-maven-plugin</artifactId>
+  <version>3.1.11</version>
+</plugin>
+
+
+
+
+
Test Frameworks
+ + ++++++ + + + + + + + + + + + + + + + + +
Table 14. Minimum recommended versions of common 3rd party test frameworks for Java 9+
GroupIdArtifactIdVersionIssue

org.mockito

mockito-core

2.23.4

1419, 1696, 1607, 1594, 1577, 1482

+
+
+
Maven Plugins
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 15. Minimum recommended versions of common maven plugins for Java 9+
GroupIdArtifactId(min.) VersionIssue

org.apache.maven.plugins

maven-compiler-plugin

3.8.1

x

org.apache.maven.plugins

maven-surefire-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-surefire-report-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-archetype-plugin

3.1.0

x

org.apache.maven.plugins

maven-javadoc-plugin

3.1.0

x

org.jacoco

jacoco-maven-plugin

0.8.3

663

+
+
+
Maven Usage
+
+

With Java modules you can not run Javadoc standalone anymore or you will get this error when running mvn javadoc:javadoc:

+
+
+
+
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-javadoc-plugin:3.1.1:javadoc (default-cli) on project mmm-base: An error has occurred in Javadoc report generation:
+[ERROR] Exit code: 1 - error: module not found: io.github.mmm.base
+[ERROR]
+[ERROR] Command line was: /projects/mmm/software/java/bin/javadoc @options @packages @argfile
+
+
+
+

As a solution or workaround you need to include the compile goal into your build lifecycle so the module-path is properly configured:

+
+
+
+
mvn compile javadoc:javadoc
+
+
+
+
+
+
+ +
+

We want to give credits and say thanks to the following articles that have been there before and helped us on our way:

+
+ +
+
+
+
+

Tutorials

+
+ +
+

==Creating a new application

+
+
+

Running the archetype

+
+

In order to create a new application you must use the archetype provided by devon4j which uses the maven archetype functionality.

+
+
+

To create a new application, you should have installed devonfw IDE. Follow the devon ide documentation to install +the same. +You can choose between 2 alternatives, create it from command line or, in more visual manner, within eclipse.

+
+
+
From command Line
+
+

To create a new devon4j application from command line, you can simply run the following command:

+
+
+
+
devon java create com.example.application.sampleapp
+
+
+
+

For low-level creation you can also manually call this command:

+
+
+
+
mvn -DarchetypeVersion=${devon4j.version} -DarchetypeGroupId=com.devonfw.java.templates -DarchetypeArtifactId=devon4j-template-server archetype:generate -DgroupId=com.example.application -DartifactId=sampleapp -Dversion=1.0.0-SNAPSHOT -Dpackage=com.devonfw.application.sampleapp
+
+
+
+

Attention: The archetypeVersion (first argument) should be set to the latest version of devon4j. You can easily determine the version from this badge: +latest devon4j version

+
+
+

Further providing additional properties (using -D parameter) you can customize the generated app:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 16. Options for app template
propertycommentexample

dbType

Choose the type of RDBMS to use (hana, oracle, mssql, postgresql, mariadb, mysql, etc.)

-DdbTpye=postgresql

batch

Option to add an batch module

-Dbatch=batch

+
+
+
From Eclipse
+
+
+
After that, you should follow this Eclipse steps to create your application:
+
+
+
+
    +
  • +

    Create a new Maven Project.

    +
  • +
  • +

    Choose the devon4j-template-server archetype, just like the image.

    +
  • +
+
+
+
+Select archetype +
+
+
+
    +
  • +

    Fill the Group Id, Artifact Id, Version and Package for your project.

    +
  • +
+
+
+
+Configure archetype +
+
+
+
    +
  • +

    Finish the Eclipse assistant and you are ready to start your project.

    +
  • +
+
+
+
+
+

What is generated

+
+

The application template (archetype) generates a Maven multi-module project. It has the following modules:

+
+
+
    +
  • +

    api: module with the API (REST service interfaces, transferobjects, datatypes, etc.) to be imported by other apps as a maven dependency in order to invoke and consume the offered (micro)services.

    +
  • +
  • +

    core: maven module containing the core of the application.

    +
  • +
  • +

    batch: optional module for batch(es)

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) as a WAR file.

    +
  • +
+
+
+

The toplevel pom.xml of the generated project has the following features:

+
+
+
    +
  • +

    Properties definition: Spring-boot version, Java version, etc.

    +
  • +
  • +

    Modules definition for the modules (described above)

    +
  • +
  • +

    Dependency management: define versions for dependencies of the technology stack that are recommended and work together in a compatible way.

    +
  • +
  • +

    Maven plugins with desired versions and configuration

    +
  • +
  • +

    Profiles for test stages

    +
  • +
+
+
+
+

How to run your app

+
+
Run app from IDE
+
+

To run your application from your favourite IDE, simply launch SpringBootApp as java application.

+
+
+
+
Run app as bootified jar or war
+
+

More details are available here.

+
+
+
+
+
+
+
+
+1. Whether to use checked exceptions or not is a controversial topic. Arguments for both sides can be found under The Trouble with Checked Exceptions, Unchecked Exceptions — The Controversy, and Checked Exceptions are Evil. The arguments in favor of unchecked exceptions tend to prevail for applications built with devon4j. Therefore, unchecked exceptions should be used for a consistent style. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/master-doc.html b/docs/devonfw.github.io/1.0/devon4j.wiki/master-doc.html new file mode 100644 index 00000000..64b670fc --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/master-doc.html @@ -0,0 +1,8325 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+

The following sections contain the complete compendium of devon4j, the Java stack of devonfw. +You can also read the latest version of this documentation online in the devon4j wiki +or at devon4j on devonfw.com.

+
+ +
+

==Architecture

+
+
+

There are many different views that are summarized by the term architecture. First, we will introduce the key principles and architecture principles of devonfw. Then, we will go into details of the the architecture of an application.

+
+
+

Key Principles

+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
+
+
+
+

Architecture Principles

+
+

Additionally we define the following principles that our architecture is based on:

+
+
+
    +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of Concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information Hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise, maintenance problems will arise to ensure that data remains consistent. Therefore, interfaces of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style.

    +
  • +
+
+
+

As an architect you should be prepared for the future by reading the TechnoVision.

+
+
+
+

Application Architecture

+
+

For the architecture of an application we distinguish the following views:

+
+
+
    +
  • +

    The Business Architecture describes an application from the business perspective. It divides the application into business components and with full abstraction of technical aspects.

    +
  • +
  • +

    The Technical Architecture describes an application from the technical implementation perspective. It divides the application into technical layers and defines which technical products and frameworks are used to support these layers.

    +
  • +
  • +

    The Infrastructure Architecture describes an application from the operational infrastructure perspective. It defines the nodes used to run the application including clustering, load-balancing and networking. This view is not explored further in this guide.

    +
  • +
+
+
+

Business Architecture

+
+

The business architecture divides the application into business components. A business component has a well-defined responsibility that it encapsulates. All aspects related to that responsibility have to be implemented within that business component. Further, the business architecture defines the dependencies between the business components. These dependencies need to be free of cycles. A business component exports its functionality via well-defined interfaces as a self-contained API. A business component may use another business component via its API and compliant with the dependencies defined by the business architecture.

+
+
+

As the business domain and logic of an application can be totally different, the devonfw can not define a standardized business architecture. Depending on the business domain it has to be defined from scratch or from a domain reference architecture template. For very small systems it may be suitable to define just a single business component containing all the code.

+
+
+
+

Technical Architecture

+
+

The technical architecture divides the application into technical layers based on the multilayered architecture. A layer is a unit of code with the same category such as a service or presentation logic. So, a layer is often supported by a technical framework. Each business component can therefore be split into component parts for each layer. However, a business component may not have component parts for every layer (e.g. only a presentation part that utilized logic from other components).

+
+
+

An overview of the technical reference architecture of the devonfw is given by figure "Technical Reference Architecture". +It defines the following layers visualized as horizontal boxes:

+
+
+ +
+
+

Also, you can see the (business) components as vertical boxes (e.g. A and X) and how they are composed out of component parts each one assigned to one of the technical layers.

+
+
+

Further, there are technical components for cross-cutting aspects grouped by the gray box on the left. Here is a complete list:

+
+ +
+
+devonfw architecture blueprint +
+
Figure 1. Technical Reference Architecture
+
+
+

Please click on the architecture image to open it as SVG and click on the layers and cross-cutting topics to open the according documentation guide.

+
+
+

We reflect this architecture in our code as described in our coding conventions allowing a traceability of business components, use-cases, layers, etc. into the code and giving +developers a sound orientation within the project.

+
+
+

Further, the architecture diagram shows the allowed dependencies illustrated by the dark green connectors. +Within a business component a component part can call the next component part on the layer directly below via a dependency on its API (vertical connectors). +While this is natural and obvious, it is generally forbidden to have dependencies upwards the layers +or to skip a layer by a direct dependency on a component part two or more layers below. +The general dependencies allowed between business components are defined by the business architecture. +In our reference architecture diagram we assume that the business component A1 is allowed to depend +on component A2. Therefore, a use-case within the logic component part of A1 is allowed to call a +use-case from A2 via a dependency on the component API. The same applies for dialogs on the client layer. +This is illustrated by the horizontal connectors. Please note that persistence entities are part of the API of the data-access component part so only the logic component part of the same +business component may depend on them.

+
+
+

The technical architecture has to address non-functional requirements:

+
+
+
    +
  • +

    scalability
    +is established by keeping state in the client and making the server state-less (except for login session). Via load-balancers new server nodes can be added to improve performance (horizontal scaling).

    +
  • +
  • +

    availability and reliability
    +are addressed by clustering with redundant nodes avoiding any single-point-of failure. If one node fails the system is still available. Further, the software has to be robust so there are no dead-locks or other bad effects that can make the system unavailable or not reliable.

    +
  • +
  • +

    security
    +is archived in the devonfw by the right templates and best-practices that avoid vulnerabilities. See security guidelines for further details.

    +
  • +
  • +

    performance
    +is obtained by choosing the right products and proper configurations. While the actual implementation of the application matters for performance a proper design is important as it is the key to allow performance-optimizations (see e.g. caching).

    +
  • +
+
+
+
Technology Stack
+
+

The technology stack of the devonfw is illustrated by the following table.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Technology Stack of devonfw
TopicDetailStandardSuggested implementation

runtime

language & VM

Java

Oracle JDK

runtime

servlet-container

JEE

tomcat

component management

dependency injection

JSR330 & JSR250

spring

configuration

framework

-

spring-boot

persistence

OR-mapper

JPA

hibernate

batch

framework

JSR352

spring-batch

service

SOAP services

JAX-WS

CXF

service

REST services

JAX-RS

CXF

logging

framework

slf4j

logback

validation

framework

beanvalidation/JSR303

hibernate-validator

security

Authentication & Authorization

JAAS

spring-security

monitoring

framework

JMX

spring

monitoring

HTTP Bridge

HTTP & JSON

jolokia

AOP

framework

dynamic proxies

spring AOP

+
+ +
+

==Components

+
+
+

Following separation-of-concerns we divide an application into components using our package-conventions and project structure. +As described by the architecture each component is divided into layers as described in the project structure. +Please note that a component will only have the required layers. +So a component may have any number from one to all layers.

+
+
+
+
+

General Component

+
+

Cross-cutting aspects belong to the implicit component general. It contains technical configurations and very general code that is not business specific. Such code shall not have any dependencies to other components and therefore business related code.

+
+
+
+

Business Component

+
+

The business-architecture defines the business components with their allowed dependencies. A small application (microservice) may just have one component and no dependencies making it simple while the same architecture can scale up to large and complex applications (from bigger microservice up to modulith). +Tailoring an business domain into applications and applications into components is a tricky task that needs the skills of an experienced architect. +Also, the tailoring should follow the business and not split by technical reasons or only by size. +Size is only an indicator but not a driver of tailoring. +Whatever hypes like microservices are telling you, never get misled in this regard: +If your system grows and reaches MAX+1 lines of code, it is not the right motivation to split it into two microservices of ~MAX/2 lines of code - such approaches will waste huge amounts of money and lead to chaos.

+
+
+
+

App Component

+
+

Only in case you need cross-cutting code that aggregates another component you may introduce the component app. +It is allowed to depend on all other components but no other component may depend on it. +With the modularity and flexibility of spring you typically do not need this. +However, when you need to have a class that registers all services or component-facades using direct code dependencies, you can introduce this component.

+
+
+
+

Component Example

+
+

The following class diagram illustrates an example of the business component Staffmanagement:

+
+
+
+logic layer component pattern +
+
+
+

In this scheme, you can see the structure and flow from the service-layer (REST service call) via the logic-layer to the dataaccess-layer (and back).

+
+
+
+
+

Coding

+
+ +
+

==Coding Conventions

+
+
+

The code should follow general conventions for Java (see Oracle Naming Conventions, Google Java Style, etc.).We consider this as common sense and provide configurations for SonarQube and related tools such as Checkstyle instead of repeating this here.

+
+
+

Naming

+
+

Besides general Java naming conventions, we follow the additional rules listed here explicitly:

+
+
+
    +
  • +

    Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Use CamelCase even for abbreviations (XmlUtil instead of XMLUtil)

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc'). See #1095 for details.

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+

Packages

+
+

Java Packages are the most important element to structure your code. We use a strict packaging convention to map technical layers and business components (slices) to the code (See technical architecture for further details). By using the same names in documentation and code we create a strong link that gives orientation and makes it easy to find from business requirements, specifications or story tickets into the code and back.

+
+
+

For an devon4j based application we use the following Java-Package schema:

+
+
+
+
«root».«component».«layer»[.«detail»]
+
+
+
+

E.g. in our example application we find the Spring Data repositories for the ordermanagement component in the package com.devonfw.application.mtsj.ordermanagement.dataaccess.api.repo

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of package schema
SegmentDescriptionExample

«root»

Is the basic Java Package name-space of your app. Typically we suggest to use «group».«artifact» where «group» is your maven/gradle groupId corresponding to your organization or IT project owning the code following common Java Package conventions. The segment «artifact» is your maven/gradle artifactId and is typically the technical name of your app.

com.devonfw.application.mtsj

«component»

The (business) component the code belongs to. It is defined by the business architecture and uses terms from the business domain. Use the implicit component general for code not belonging to a specific component (foundation code).

salesmanagement

«layer»

The name of the technical layer (See technical architecture). Details are described for the modern project structure and for the classic project structure.

logic

«detail»

Here you are free to further divide your code into sub-components and other concerns according to the size of your component part. If you want to strictly separate API from implementation you should start «detail» with «scope» that is explained below.

dao

«scope»

The scope which is one of api (official API to be used by other layers or components), base (basic code to be reused by other implementations) and impl (implementation that should never be imported from outside). This segment was initially mandatory but due to trends such as microservices, lean, and agile we decided to make it optional and do not force anybody to use it.

api

+
+

Please note that devon4j library modules for spring use com.devonfw.module as «root» and the name of the module as «component». E.g. the API of our beanmapping module can be found in the package com.devonfw.module.beanmapping.common.api.

+
+
+
+

Code Tasks

+
+

Code spots that need some rework can be marked with the following tasks tags. These are already properly pre-configured in your development environment for auto completion and to view tasks you are responsible for. It is important to keep the number of code tasks low. Therefore, every member of the team should be responsible for the overall code quality. So if you change a piece of code and hit a code task that you can resolve in a reliable way, please do this as part of your change and remove the according tag.

+
+
+
TODO
+
+

Used to mark a piece of code that is not yet complete (typically because it can not be completed due to a dependency on something that is not ready).

+
+
+
+
 // TODO «author» «description»
+
+
+
+

A TODO tag is added by the author of the code who is also responsible for completing this task.

+
+
+
+
FIXME
+
+
+
 // FIXME «author» «description»
+
+
+
+

A FIXME tag is added by the author of the code or someone who found a bug he can not fix right now. The «author» who added the FIXME is also responsible for completing this task. This is very similar to a TODO but with a higher priority. FIXME tags indicate problems that should be resolved before a release is completed while TODO tags might have to stay for a longer time.

+
+
+
+
REVIEW
+
+
+
 // REVIEW «responsible» («reviewer») «description»
+
+
+
+

A REVIEW tag is added by a reviewer during a code review. Here the original author of the code is responsible to resolve the REVIEW tag and the reviewer is assigning this task to him. This is important for feedback and learning and has to be aligned with a review "process" where people talk to each other and get into discussion. In smaller or local teams a peer-review is preferable but this does not scale for large or even distributed teams.

+
+
+
+
+

Code-Documentation

+
+

As a general goal, the code should be easy to read and understand. Besides, clear naming the documentation is important. We follow these rules:

+
+
+
    +
  • +

    APIs (especially component interfaces) are properly documented with JavaDoc.

    +
  • +
  • +

    JavaDoc shall provide actual value - we do not write JavaDoc to satisfy tools such as checkstyle but to express information not already available in the signature.

    +
  • +
  • +

    We make use of {@link} tags in JavaDoc to make it more expressive.

    +
  • +
  • +

    JavaDoc of APIs describes how to use the type or method and not how the implementation internally works.

    +
  • +
  • +

    To document implementation details, we use code comments (e.g. // we have to flush explicitly to ensure version is up-to-date). This is only needed for complex logic.

    +
  • +
  • +

    Avoid the pointless {@inheritDoc} as since Java 1.5 there is the @Override annotation for overridden methods and your JavaDoc is inherited automatically even without any JavaDoc comment at all.

    +
  • +
+
+
+
+

Code-Style

+
+

This section gives you best practices to write better code and avoid pitfalls and mistakes.

+
+
+
BLOBs
+
+

Avoid using byte[] for BLOBs as this will load them entirely into your memory. This will cause performance issues or out of memory errors. Instead, use streams when dealing with BLOBs. For further details see BLOB support.

+
+
+
+
Stateless Programming
+
+

When implementing logic as components or beans of your container using dependency injection, we strongly encourage stateless programming. +This is not about data objects like an entity or transfer-object that are stateful by design. +Instead this applies to all classes annotated with @Named, @ApplicationScoped, @Stateless, etc. and all their super-classes. +These classes especially include your repositories, use-cases, and REST services. +Such classes shall never be modified after initialization. +Methods called at runtime (after initialization via the container) do not assign fields (member variables of your class) or mutate the object stored in a field. +This allows your component or bean to be stateless and thread-safe. +Therefore it can be initialized as a singleton so only one instance is created and shared accross all threads of the application. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // bad
+  private String contractOwner;
+
+  private MyState state;
+
+  @Overide
+  public void approve(Contract contract) {
+    this.contractOwner = contract.getOwner();
+    this.contractOwner = this.contractOwner.toLowerCase(Locale.US);
+    this.state.setAdmin(this.contractOwner.endsWith("admin"));
+    if (this.state.isAdmin()) {
+      ...
+    } else {
+      ...
+    }
+  }
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    if (contractOwner.endsWith("admin")) {
+      ...
+    } else {
+      ...
+    }
+  }
+}
+
+
+
+

As you can see in the bad code fields of the class are assigned when the method approve is called. +So mutliple users and therefore threads calling this method concurrently can interfere and override this state causing side-effects on parallel threads. +This will lead to nasty bugs and errors that are hard to trace down. +They will not occur in simple tests but for sure in production with real users. +Therefore never do this and implement your functionality stateless. +That is keeping all state in local variables and strictly avoid modifying fields or their value as illustrated in the fine code. +If you find yourself passing many parameters between methods that all represent state, you can easily create a separate class that encapsulates this state. +However, then you need to create this state object in your method as local variable and pass it between methods as parameter:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    MyState state = new MyState();
+    state.setAdmin(this.contractOwner.endsWith("admin"));
+    doApproveContract(contract, state);
+  }
+}
+
+
+
+
+
Closing Resources
+
+

Resources such as streams (InputStream, OutputStream, Reader, Writer) or transactions need to be handled properly. Therefore, it is important to follow these rules:

+
+
+
    +
  • +

    Each resource has to be closed properly, otherwise you will get out of file handles, TX sessions, memory leaks or the like

    +
  • +
  • +

    Where possible avoid to deal with such resources manually. That is why we are recommending @Transactional for transactions in devonfw (see Transaction Handling).

    +
  • +
  • +

    In case you have to deal with resources manually (e.g. binary streams) ensure to close them properly. See the example below for details.

    +
  • +
+
+
+

Closing streams and other such resources is error prone. Have a look at the following example:

+
+
+
+
// bad
+try {
+  InputStream in = new FileInputStream(file);
+  readData(in);
+  in.close();
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+

The code above is wrong as in case of an IOException the InputStream is not properly closed. In a server application such mistakes can cause severe errors that typically will only occur in production. As such resources implement the AutoCloseable interface you can use the try-with-resource syntax to write correct code. The following code shows a correct version of the example:

+
+
+
+
// fine
+try (InputStream in = new FileInputStream(file)) {
+  readData(in);
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+
+
Catching and handling Exceptions
+
+

When catching exceptions always ensure the following:

+
+
+
    +
  • +

    Never call printStackTrace() method on an exception

    +
  • +
  • +

    Either log or wrap and re-throw the entire catched exception. Be aware that the cause(s) of an exception is very valuable information. If you loose such information by improper exception-handling you may be unable to properly analyse production problems what can cause severe issues.

    +
    +
      +
    • +

      If you wrap and re-throw an exception ensure that the catched exception is passed as cause to the newly created and thrown exception.

      +
    • +
    • +

      If you log an exception ensure that the entire exception is passed as argument to the logger (and not only the result of getMessage() or toString() on the exception).

      +
    • +
    +
    +
  • +
  • +

    See exception handling

    +
  • +
+
+
+
+
Lambdas and Streams
+
+

With Java8 you have cool new features like lambdas and monads like (Stream, CompletableFuture, Optional, etc.). +However, these new features can also be misused or led to code that is hard to read or debug. To avoid pain, we give you the following best practices:

+
+
+
    +
  1. +

    Learn how to use the new features properly before using. Developers are often keen on using cool new features. When you do your first experiments in your project code you will cause deep pain and might be ashamed afterwards. Please study the features properly. Even Java8 experts still write for loops to iterate over collections, so only use these features where it really makes sense.

    +
  2. +
  3. +

    Streams shall only be used in fluent API calls as a Stream can not be forked or reused.

    +
  4. +
  5. +

    Each stream has to have exactly one terminal operation.

    +
  6. +
  7. +

    Do not write multiple statements into lambda code:

    +
    +
    +
    // bad
    +collection.stream().map(x -> {
    +Foo foo = doSomething(x);
    +...
    +return foo;
    +}).collect(Collectors.toList());
    +
    +
    +
    +

    This style makes the code hard to read and debug. Never do that! Instead, extract the lambda body to a private method with a meaningful name:

    +
    +
    +
    +
    // fine
    +collection.stream().map(this::convertToFoo).collect(Collectors.toList());
    +
    +
    +
  8. +
  9. +

    Do not use parallelStream() in general code (that will run on server side) unless you know exactly what you are doing and what is going on under the hood. Some developers might think that using parallel streams is a good idea as it will make the code faster. However, if you want to do performance optimizations talk to your technical lead (architect). Many features such as security and transactions will rely on contextual information that is associated with the current thread. Hence, using parallel streams will most probably cause serious bugs. Only use them for standalone (CLI) applications or for code that is just processing large amounts of data.

    +
  10. +
  11. +

    Do not perform operations on a sub-stream inside a lambda:

    +
    +
    +
    set.stream().flatMap(x -> x.getChildren().stream().filter(this::isSpecial)).collect(Collectors.toList()); // bad
    +set.stream().flatMap(x -> x.getChildren().stream()).filter(this::isSpecial).collect(Collectors.toList()); // fine
    +
    +
    +
  12. +
  13. +

    Only use collect at the end of the stream:

    +
    +
    +
    set.stream().collect(Collectors.toList()).forEach(...) // bad
    +set.stream().peek(...).collect(Collectors.toList()) // fine
    +
    +
    +
  14. +
  15. +

    Lambda parameters with Types inference

    +
    +
    +
    (String a, Float b, Byte[] c) -> a.toString() + Float.toString(b) + Arrays.toString(c)  // bad
    +(a,b,c)  -> a.toString() + Float.toString(b) + Arrays.toString(c)  // fine
    +
    +Collections.sort(personList, (Person p1, Person p2) -> p1.getSurName().compareTo(p2.getSurName()));  // bad
    +Collections.sort(personList, (p1, p2) -> p1.getSurName().compareTo(p2.getSurName()));  // fine
    +
    +
    +
  16. +
  17. +

    Avoid Return Braces and Statement

    +
    +
    +
     a ->  { return a.toString(); } // bad
    + a ->  a.toString();   // fine
    +
    +
    +
  18. +
  19. +

    Avoid Parentheses with Single Parameter

    +
    +
    +
    (a) -> a.toString(); // bad
    + a -> a.toString();  // fine
    +
    +
    +
  20. +
  21. +

    Avoid if/else inside foreach method. Use Filter method & comprehension

    +
    +
    +
    // bad
    +static public Iterator<String> TwitterHandles(Iterator<Author> authors, string company) {
    +    final List result = new ArrayList<String> ();
    +    foreach (Author a : authors) {
    +      if (a.Company.equals(company)) {
    +        String handle = a.TwitterHandle;
    +        if (handle != null)
    +          result.Add(handle);
    +      }
    +    }
    +    return result;
    +  }
    +
    +
    +
    +
    +
    // fine
    +public List<String> twitterHandles(List<Author> authors, String company) {
    +    return authors.stream()
    +            .filter(a -> null != a && a.getCompany().equals(company))
    +            .map(a -> a.getTwitterHandle())
    +            .collect(toList());
    +  }
    +
    +
    +
  22. +
+
+
+
+
Optionals
+
+

With Optional you can wrap values to avoid a NullPointerException (NPE). However, it is not a good code-style to use Optional for every parameter or result to express that it may be null. For such case use @Nullable or even better instead annotate @NotNull where null is not acceptable.

+
+
+

However, Optional can be used to prevent NPEs in fluent calls (due to the lack of the elvis operator):

+
+
+
+
Long id;
+id = fooCto.getBar().getBar().getId(); // may cause NPE
+id = Optional.ofNullable(fooCto).map(FooCto::getBar).map(BarCto::getBar).map(BarEto::getId).orElse(null); // null-safe
+
+
+
+
+
Encoding
+
+

Encoding (esp. Unicode with combining characters and surrogates) is a complex topic. Please study this topic if you have to deal with encodings and processing of special characters. For the basics follow these recommendations:

+
+
+
    +
  • +

    Whenever possible prefer unicode (UTF-8 or better) as encoding. This especially impacts your databases and has to be defined upfront as it typically can not be changed (easily) afterwards.

    +
  • +
  • +

    Do not cast from byte to char (unicode characters can be composed of multiple bytes, such cast may only work for ASCII characters)

    +
  • +
  • +

    Never convert the case of a String using the default locale (esp. when writing generic code like in devonfw). E.g. if you do "HI".toLowerCase() and your system locale is Turkish, then the output will be "hı" instead of "hi", which can lead to wrong assumptions and serious problems. If you want to do a "universal" case conversion always explicitly use an according western locale (e.g. toLowerCase(Locale.US)). Consider using a helper class (see e.g. CaseHelper) or create your own little static utility for that in your project.

    +
  • +
  • +

    Write your code independent from the default encoding (system property file.encoding) - this will most likely differ in JUnit from production environment

    +
    +
      +
    • +

      Always provide an encoding when you create a String from byte[]: new String(bytes, encoding)

      +
    • +
    • +

      Always provide an encoding when you create a Reader or Writer : new InputStreamReader(inStream, encoding)

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer general API
+
+

Avoid unnecessary strong bindings:

+
+
+
    +
  • +

    Do not bind your code to implementations such as Vector or ArrayList instead of List

    +
  • +
  • +

    In APIs for input (=parameters) always consider to make little assumptions:

    +
    +
      +
    • +

      prefer Collection over List or Set where the difference does not matter (e.g. only use Set when you require uniqueness or highly efficient contains)

      +
    • +
    • +

      consider preferring Collection<? extends Foo> over Collection<Foo> when Foo is an interface or super-class

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer primitive boolean
+
+

Unless in rare cases where you need to allow a flag being null avoid using the object type Boolean.

+
+
+
+
// bad
+public Boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

Instead always use the primitive boolean type:

+
+
+
+
// fine
+public boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

The only known excuse is for flags in embeddable types due to limitations of hibernate.

+
+
+
+
+
+
+
+

Layers

+
+ +
+

==Client Layer

+
+
+

There are various technical approaches to building GUI clients. The devonfw proposes rich clients that connect to the server via data-oriented services (e.g. using REST with JSON). +In general, we have to distinguish among the following types of clients:

+
+
+
    +
  • +

    web clients

    +
  • +
  • +

    native desktop clients

    +
  • +
  • +

    (native) mobile clients

    +
  • +
+
+
+

Our main focus is on web-clients. In our sample application my-thai-star we offer a responsive web-client based on Angular following devon4ng that integrates seamlessly with the back ends of my-thai-star available for Java using devon4j as well as .NET/C# using devon4net. For building angular clients read the separate devon4ng guide.

+
+
+

JavaScript for Java Developers

+
+

In order to get started with client development as a Java developer we give you some hints to get started. Also if you are an experienced JavaScript developer and want to learn Java this can be helpful. First, you need to understand that the JavaScript ecosystem is as large as the Java ecosystem and developing a modern web client requires a lot of knowledge. The following table helps you as experienced developer to get an overview of the tools, configuration-files, and other related aspects from the new world to learn. Also it helps you to map concepts between the ecosystems. Please note that we list the tools recommended by devonfw here (and we know that there are alternatives not listed here such as gradle, grunt, bower, etc.).

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3. Aspects in JavaScript and Java ecosystem
TopicAspectJavaScriptJava

Programming

Language

TypeScript (extends JavaScript)

Java

Runtime

VM

nodejs (or web-browser)

jvm

Build- & Dependency-Management

Tool

npm or yarn

maven

Config

package.json

pom.xml

Repository

npm repo

maven central (repo search)

Build cmd

ng build or npm run build (goals are not standardized in npm)

mvn install (see lifecycle)

Test cmd

ng test

mvn test

Testing

Test-Tool

jasmine

junit

Test-Runner

karma

junit / surefire

E2E Testing

Protractor

Selenium

Code Analysis

Code Coverage

ng test --no-watch --code-coverage

JaCoCo

Development

IDE

MS VS Code or IntelliJ

Eclipse or IntelliJ

Framework

Angular (etc.)

Spring or Quarkus

+
+ +
+

==Service Layer

+
+
+

The service layer is responsible for exposing functionality made available by the logical layer to external consumers over a network via technical protocols.

+
+
+
+

Types of Services

+
+

Before you start creating your services you should consider some general design aspects:

+
+
+
    +
  • +

    Do you want to create a RPC service?

    +
  • +
  • +

    Or is your problem better addressed by messaging or eventing?

    +
  • +
  • +

    Who will consume your service?

    +
    +
      +
    • +

      Do you have one or multiple consumers?

      +
    • +
    • +

      Do web-browsers have to use your service?

      +
    • +
    • +

      Will apps from other vendors or parties have to consume your service that you can not influence if the service may have to change or be extended?

      +
    • +
    +
    +
  • +
+
+
+

For RPC a common choice is REST but there are also interesting alternatives like gRPC. We also have a guide for SOAP but this technology should rather be considered as legacy and is not recommended for new services.

+
+
+

When it comes to messaging in Java the typical answer will be JMS. However, a very promising alternative is Kafka.

+
+
+
+

Versioning

+
+

For RPC services consumed by other applications we use versioning to prevent incompatibilities between applications when deploying updates. This is done by the following conventions:

+
+
+
    +
  • +

    We define a version number and prefix it with v (e.g. v1).

    +
  • +
  • +

    If we support previous versions we use that version numbers as part of the Java package defining the service API (e.g. com.foo.application.component.service.api.v1)

    +
  • +
  • +

    We use the version number as part of the service name in the remote URL (e.g. https://application.foo.com/services/rest/component/v1/resource)

    +
  • +
  • +

    Whenever breaking changes are made to the API, create a separate version of the service and increment the version (e.g. v1v2) . The implementations of the different versions of the service contain compatibility code and delegate to the same unversioned use-case of the logic layer whenever possible.

    +
  • +
  • +

    For maintenance and simplicity, avoid keeping more than one previous version.

    +
  • +
+
+
+
+

Interoperability

+
+

For services that are consumed by clients with different technology, interoperability is required. This is addressed by selecting the right protocol, following protocol-specific best practices and following our considerations especially simplicity.

+
+
+
+

Service Considerations

+
+

The term service is quite generic and therefore easily misunderstood. It is a unit exposing coherent functionality via a well-defined interface over a network. For the design of a service, we consider the following aspects:

+
+
+
    +
  • +

    self-contained
    +The entire API of the service shall be self-contained and have no dependencies on other parts of the application (other services, implementations, etc.).

    +
  • +
  • +

    idempotence
    +E.g. creation of the same master-data entity has no effect (no error)

    +
  • +
  • +

    loosely coupled
    +Service consumers have minimum knowledge and dependencies on the service provider.

    +
  • +
  • +

    normalized
    +Complete, no redundancy, minimal

    +
  • +
  • +

    coarse-grained
    +Service provides rather large operations (save entire entity or set of entities rather than individual attributes)

    +
  • +
  • +

    atomic
    +Process individual entities (for processing large sets of data, use a batch instead of a service)

    +
  • +
  • +

    simplicity
    +Avoid polymorphism, RPC methods with unique name per signature and no overloading, avoid attachments (consider separate download service), etc.

    +
  • +
+
+
+
+

Security

+
+

Your services are the major entry point to your application. Hence, security considerations are important here.

+
+
+

See REST Security.

+
+
+ +
+

==Logic Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. +According to our business architecture, we divide an application into components. +For each component, the logic layer defines different use-cases. Another approach is to define a component-facade, which we do not recommend for future application. Especially for quarkus application, we want to simplify things and highly suggest omitting component-facade completely and using use-cases only. +It is very important that you follow the links to understand the concept of use-case in order to properly implement your business logic.

+
+
+
+

Responsibility

+
+

The logic layer is responsible to implement the business logic according to the specified functional demands and requirements. +Therefore, it creates the actual value of the application. The logic layer is responsible for invoking business logic in external systems. +The following additional aspects are also included in its responsibility:

+
+
+ +
+
+
+

Security

+
+

The logic layer is the heart of the application. It is also responsible for authorization and hence security is important in this current case. Every method exposed in an interface needs to be annotated with an authorization check, stating what role(s) a caller must provide in order to be allowed to make the call. The authorization concept is described here.

+
+
+
Direct Object References
+
+

A security threat are Insecure Direct Object References. This simply gives you two options:

+
+
+
    +
  • +

    avoid direct object references

    +
  • +
  • +

    ensure that direct object references are secure

    +
  • +
+
+
+

Especially when using REST, direct object references via technical IDs are common sense. This implies that you have a proper authorization in place. This is especially tricky when your authorization does not only rely on the type of the data and according to static permissions but also on the data itself. Vulnerabilities for this threat can easily happen by design flaws and inadvertence. Here is an example from our sample application:

+
+
+

We have a generic use-case to manage BLOBs. In the first place, it makes sense to write a generic REST service to load and save these BLOBs. However, the permission to read or even update such BLOB depends on the business object hosting the BLOB. Therefore, such a generic REST service would open the door for this OWASP A4 vulnerability. To solve this in a secure way, you need individual services for each hosting business object to manage the linked BLOB and have to check permissions based on the parent business object. In this example the ID of the BLOB would be the direct object reference and the ID of the business object (and a BLOB property indicator) would be the indirect object reference.

+
+ +
+

==Component Facade

+
+
+ + + + + +
+ + +Our recommended approach for implementing the logic layer is use-cases +
+
+
+

For each component of the application, the logic layer defines a component facade. +This is an interface defining all business operations of the component. +It carries the name of the component («Component») and has an implementation named «Component»Impl (see implementation).

+
+
+
+
API
+
+

The component facade interface defines the logic API of the component and has to be business oriented. +This means that all parameters and return types of all methods from this API have to be business transfer-objects, datatypes (String, Integer, MyCustomerNumber, etc.), or collections of these. +The API may also only access objects of other business components listed in the (transitive) dependencies of the business-architecture.

+
+
+

Here is an example how such an API may look like:

+
+
+
+
public interface Bookingmanagement {
+
+  BookingEto findBooking(Long id);
+
+  BookingCto findBookingCto(Long id);
+
+  Page<BookingEto> findBookingEtos(BookingSearchCriteriaTo criteria);
+
+  void approveBooking(BookingEto booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation of an interface from the logic layer (a component facade or a use-case) carries the name of that interface with the suffix Impl and is annotated with @Named. +An implementation typically needs access to the persistent data. +This is done by injecting the corresponding repository (or DAO). +According to data-sovereignty, only repositories of the same business component may be accessed directly. +For accessing data from other components the implementation has to use the corresponding API of the logic layer (the component facade). Further, it shall not expose persistent entities from the domain layer and has to map them to transfer objects using the bean-mapper.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public BookingEto findBooking(Long id) {
+
+    LOG.debug("Get Booking with id {} from database.", id);
+    BookingEntity entity = this.bookingRepository.findOne(id);
+    return getBeanMapper().map(entity, BookingEto.class));
+  }
+}
+
+
+
+

As you can see, entities (BookingEntity) are mapped to corresponding ETOs (BookingEto). +Further details about this can be found in bean-mapping.

+
+ +
+

==UseCase +A use-case is a small unit of the logic layer responsible for an operation on a particular entity (business object). +We leave it up to you to decide whether you want to define an interface (API) for each use-case or provide an implementation directly.

+
+
+

Following our architecture-mapping (for classic and modern project), use-cases are named Uc«Operation»«BusinessObject»[Impl]. The prefix Uc stands for use-case and allows to easily find and identify them in your IDE. The «Operation» stands for a verb that is operated on the entity identified by «BusinessObject». +For CRUD we use the standard operations Find and Manage that can be generated by CobiGen. This also separates read and write operations (e.g. if you want to do CQSR, or to configure read-only transactions for read operations).

+
+
+

In our example, we choose to define an interface for each use-case. We also use *To to refer to any type of transfer object. Please follow our guide to understand more about different types of transfer object e.g. Eto, Dto, Cto

+
+
+
+
Find
+
+

The UcFind«BusinessObject» defines all read operations to retrieve and search the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcFindBooking {
+  //*To = Eto, Dto or Cto
+  Booking*To findBooking(Long id);
+}
+
+
+
+
+
Manage
+
+

The UcManage«BusinessObject» defines all CRUD write operations (create, update and delete) for the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcManageBooking {
+
+  //*To = Eto, Dto or Cto
+  Booking*To saveBooking(Booking*To booking);
+
+  void deleteBooking(Long id);
+
+}
+
+
+
+
+
Custom
+
+

Any other non CRUD operation Uc«Operation»«BusinessObject» uses any other custom verb for «Operation». +Typically, such custom use-cases only define a single method. +Here is an example:

+
+
+
+
public interface UcApproveBooking {
+
+  //*To = Eto, Dto or Cto
+  void approveBooking(Booking*To booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation should carry its own name and the suffix Impl and is annotated with @Named and @ApplicationScoped. It will need access to the persistent data which is done by injecting the corresponding repository (or DAO). Furthermore, it shall not expose persistent entities from the data access layer and has to map them to transfer objects using the bean-mapper. Please refer to our bean mapping, transfer object and dependency injection documentation for more information. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcManageBookingImpl implements UcManageBooking {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public void deleteBooking(Long id) {
+
+    LOG.debug("Delete Booking with id {} from database.", id);
+    this.bookingRepository.deleteById(id);
+  }
+}
+
+
+
+

The use-cases can then be injected directly into the service.

+
+
+
+
@Named("BookingmanagementRestService")
+@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private UcFindBooking ucFindBooking;
+
+  @Inject
+  private UcManageBooking ucManageBooking;
+
+  @Inject
+  private UcApproveBooking ucApproveBooking;
+}
+
+
+
+
+
Internal use case
+
+

Sometimes, a component with multiple related entities and many use-cases needs to reuse business logic internally. +Of course, this can be exposed as an official use-case API but this will imply using transfer-objects (ETOs) instead of entities. In some cases, this is undesired e.g. for better performance to prevent unnecessary mapping of entire collections of entities. +In the first place, you should try to use abstract base implementations providing reusable methods the actual use-case implementations can inherit from. +If your business logic is even more complex and you have multiple aspects of business logic to share and reuse but also run into multi-inheritance issues, you may also just create use-cases that have their interface located in the impl scope package right next to the implementation (or you may just skip the interface). In such a case, you may define methods that directly take or return entity objects. +To avoid confusion with regular use-cases, we recommend to add the Internal suffix to the type name leading to Uc«Operation»«BusinessObject»Internal[Impl].

+
+
+ +
+

==Data-Access Layer

+
+
+

The data-access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store. External system could also be accessed from the data-access layer if they match this definition, e.g. a mongo-db via rest services.

+
+
+

Note: In the modern project structure, this layer is replaced by the domain layer.

+
+
+
+
+

Database

+
+

You need to make your choice for a database. Options are documented here.

+
+
+

The classical approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+ +
+

==Batch Layer

+
+
+

We understand batch processing as a bulk-oriented, non-interactive, typically long running execution of tasks. For simplicity, we use the term "batch" or "batch job" for such tasks in the following documentation.

+
+
+

devonfw uses Spring Batch as a batch framework.

+
+
+

This guide explains how Spring Batch is used in devonfw applications. It focuses on aspects which are special to devonfw. If you want to learn about spring-batch you should adhere to springs references documentation.

+
+
+

There is an example of a simple batch implementation in the my-thai-star batch module.

+
+
+

In this chapter, we will describe the overall architecture (especially concerning layering) and how to administer batches.

+
+
+
+

Layering

+
+

Batches are implemented in the batch layer. The batch layer is responsible for batch processes, whereas the business logic is implemented in the logic layer. Compared to the service layer, you may understand the batch layer just as a different way of accessing the business logic. +From a component point of view, each batch is implemented as a subcomponent in the corresponding business component. +The business component is defined by the business architecture.

+
+
+

Let’s make an example for that. The sample application implements a batch for exporting ingredients. This ingredientExportJob belongs to the dishmanagement business component. +So the ingredientExportJob is implemented in the following package:

+
+
+
+
<basepackage>.dishmanagement.batch.impl.*
+
+
+
+

Batches should invoke use cases in the logic layer for doing their work. +Only "batch specific" technical aspects should be implemented in the batch layer.

+
+
+
+
+

Example: +For a batch, which imports product data from a CSV file, this means that all code for actually reading and parsing the CSV input file is implemented in the batch layer. +The batch calls the use case "create product" in the logic layer for actually creating the products for each line read from the CSV input file.

+
+
+
+
+
Directly accessing data access layer
+
+

In practice, it is not always appropriate to create use cases for every bit of work a batch should do. Instead, the data access layer can be used directly. +An example for that is a typical batch for data retention which deletes out-of-time data. +Often deleting, out-dated data is done by invoking a single SQL statement. It is appropriate to implement that SQL in a Repository or DAO method and call this method directly from the batch. +But be careful: this pattern is a simplification which could lead to business logic cluttered in different layers, which reduces the maintainability of your application. +It is a typical design decision you have to make when designing your specific batches.

+
+
+
+
+

Project structure and packaging

+
+

Batches will be implemented in a separate Maven module to keep the application core free of batch dependencies. The batch module includes a dependency on the application core-module to allow the reuse of the use cases, DAOs etc. +Additionally the batch module has dependencies on the required spring batch jars:

+
+
+
+
  <dependencies>
+
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>mtsj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-batch</artifactId>
+    </dependency>
+
+  </dependencies>
+
+
+
+

To allow an easy start of the batches from the command line it is advised to create a bootified jar for the batch module by adding the following to the pom.xml of the batch module:

+
+
+
+
  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>config/application.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Create bootified jar for batch execution via command line.
+           Your applications spring boot app is used as main-class.
+       -->
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        <configuration>
+          <mainClass>com.devonfw.application.mtsj.SpringBootApp</mainClass>
+          <classifier>bootified</classifier>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>repackage</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+
+
+

Implementation

+
+

Most of the details about implementation of batches is described in the spring batch documentation. +There is nothing special about implementing batches in devonfw. You will find an easy example in my-thai-star.

+
+
+
+

Starting from command line

+
+

Devonfw advises to start batches via command line. This is most common to many ops teams and allows easy integration in existing schedulers. In general batches are started with the following command:

+
+
+
+
java -jar <app>-batch-<version>-bootified.jar --spring.main.web-application-type=none --spring.batch.job.enabled=true --spring.batch.job.names=<myJob> <params>
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + +
ParameterExplanation

--spring.main.web-application-type=none

This disables the web app (e.g. Tomcat)

--spring.batch.job.names=<myJob>

This specifies the name of the job to run. If you leave this out ALL jobs will be executed. Which probably does not make to much sense.

<params>

(Optional) additional parameters which are passed to your job

+
+

This will launch your normal spring boot app, disables the web application part and runs the designated job via Spring Boots org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.

+
+
+
+

Scheduling

+
+

In real world scheduling of batches is not as simple as it first might look like.

+
+
+
    +
  • +

    Multiple batches have to be executed in order to achieve complex tasks. If one of those batches fails the further execution has to be stopped and operations should be notified for example.

    +
  • +
  • +

    Input files or those created by batches have to be copied from one node to another.

    +
  • +
  • +

    Scheduling batch executing could get complex easily (quarterly jobs, run job on first workday of a month, …​)

    +
  • +
+
+
+

For devonfw we propose the batches themselves should not mess around with details of scheduling. +Likewise your application should not do so. This complexity should be externalized to a dedicated batch administration service or scheduler. +This service could be a complex product or a simple tool like cron. We propose Rundeck as an open source job scheduler.

+
+
+

This gives full control to operations to choose the solution which fits best into existing administration procedures.

+
+
+
+

Handling restarts

+
+

If you start a job with the same parameters set after a failed run (BatchStatus.FAILED) a restart will occur. +In many cases your batch should then not reprocess all items it processed in the previous runs. +For that you need some logic to start at the desired offset. There different ways to implement such logic:

+
+
+
    +
  • +

    Marking processed items in the database in a dedicated column

    +
  • +
  • +

    Write all IDs of items to process in a separate table as an initialization step of your batch. You can then delete IDs of already processed items from that table during the batch execution.

    +
  • +
  • +

    Storing restart information in springs ExecutionContext (see below)

    +
  • +
+
+
+
Using spring batch ExecutionContext for restarts
+
+

By implementing the ItemStream interface in your ItemReader or ItemWriter you may store information about the batch progress in the ExecutionContext. You will find an example for that in the CountJob in My Thai Star.

+
+
+

Additional hint: It is important that bean definition method of your ItemReader/ItemWriter return types implementing ItemStream(and not just ItemReader or ItemWriter alone). For that the ItemStreamReader and ItemStreamWriter interfaces are provided.

+
+
+
+
+

Exit codes

+
+

Your batches should create a meaningful exit code to allow reaction to batch errors e.g. in a scheduler. +For that spring batch automatically registers an org.springframework.boot.autoconfigure.batch.JobExecutionExitCodeGenerator. To make this mechanism work your spring boot app main class as to populate this exit code to the JVM:

+
+
+
+
@SpringBootApplication
+public class SpringBootApp {
+
+  public static void main(String[] args) {
+    if (Arrays.stream(args).anyMatch((String e) -> e.contains("--spring.batch.job.names"))) {
+      // if executing batch job, explicitly exit jvm to report error code from batch
+      System.exit(SpringApplication.exit(SpringApplication.run(SpringBootApp.class, args)));
+    } else {
+      // normal web application start
+      SpringApplication.run(SpringBootApp.class, args);
+    }
+  }
+}
+
+
+
+
+

Stop batches and manage batch status

+
+

Spring batch uses several database tables to store the status of batch executions. +Each execution may have different status. +You may use this mechanism to gracefully stop batches. +Additionally in some edge cases (batch process crashed) the execution status may be in an undesired state. +E.g. the state will be running, despite the process crashed sometime ago. +For that cases you have to change the status of the execution in the database.

+
+
+
CLI-Tool
+
+

Devonfw provides a easy to use cli-tool to manage the executing status of your jobs. +The tool is implemented in the devonfw module devon4j-batch-tool. It will provide a runnable jar, which may be used as follows:

+
+
+
+
List names of all previous executed jobs
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs list

+
+
Stop job named 'countJob'
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs stop countJob

+
+
Show help
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar

+
+
+
+
+

As you can the each invocation includes the JDBC connection string to your database. +This means that you have to make sure that the corresponding DB driver is in the classpath (the prepared jar only contains H2).

+
+
+
+
+

Authentication

+
+

Most business application incorporate authentication and authorization. +Your spring boot application will implement some kind of security, e.g. integrated login with username+password or in many cases authentication via an existing IAM. +For security reasons your batch should also implement an authentication mechanism and obey the authorization implemented in your application (e.g. via @RolesAllowed).

+
+
+

Since there are many different authentication mechanism we cannot provide an out-of-the-box solution in devonfw, but we describe a pattern how this can be implemented in devonfw batches.

+
+
+

We suggest to implement the authentication in a Spring Batch tasklet, which runs as the first step in your batch. This tasklet will do all of the work which is required to authenticate the batch. A simple example which authenticates the batch "locally" via username and password could be implemented like this:

+
+
+
+
@Named
+public class SimpleAuthenticationTasklet implements Tasklet {
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+    String username = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("username");
+    String password = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("password");
+    Authentication authentication = new UsernamePasswordAuthenticationToken(username, password);
+
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+    return RepeatStatus.FINISHED;
+  }
+
+}
+
+
+
+

The username and password have to be supplied via two cli parameters -username and -password. This implementation creates an "authenticated" Authentication and sets in the Spring Security context. This is just for demonstration normally you should not provide passwords via command line. The actual authentication will be done automatically via Spring Security as in your "normal" application. +If you have a more complex authentication mechanism in your application e.g. via OpenID connect just call this in the tasklet. Naturally you may read authentication parameters (e.g. secrets) from the command line or more securely from a configuration file.

+
+
+

In your Job Configuration set this tasklet as the first step:

+
+
+
+
@Configuration
+@EnableBatchProcessing
+public class BookingsExportBatchConfig {
+  @Inject
+  private JobBuilderFactory jobBuilderFactory;
+
+  @Inject
+  private StepBuilderFactory stepBuilderFactory;
+
+  @Bean
+  public Job myBatchJob() {
+    return this.jobBuilderFactory.get("myJob").start(myAuthenticationStep()).next(...).build();
+  }
+
+  @Bean
+  public Step myAuthenticationStep() {
+    return this.stepBuilderFactory.get("myAuthenticationStep").tasklet(myAuthenticatonTasklet()).build();
+  }
+
+  @Bean
+  public Tasklet myAuthenticatonTasklet() {
+    return new SimpleAuthenticationTasklet();
+  }
+...
+
+
+
+
+

Tipps & tricks

+
+
Identifying job parameters
+
+

Spring uses a jobs parameters to identify job executions. Parameters starting with "-" are not considered for identifying a job execution.

+
+
+
+
+
+
+

Guides

+
+ +
+

==Dependency Injection +Dependency injection is one of the most important design patterns and is a key principle to a modular and component based architecture. +The Java Standard for dependency injection is javax.inject (JSR330) that we use in combination with JSR250. +Additionally, for scoping you can use CDI (Context and Dependency Injection) from JSR365.

+
+
+

There are many frameworks which support this standard including all recent Java EE application servers. +Therefore in devonfw we rely on these open standards and can propagate patterns and code examples that work independent from the underlying frameworks.

+
+
+

Key Principles

+
+

Within dependency injection a bean is typically a reusable unit of your application providing an encapsulated functionality. +This bean can be injected into other beans and it should in general be replaceable. +As an example we can think of a use-case, a repository, etc. +As best practice we use the following principles:

+
+
+
    +
  • +

    Stateless implementation
    +By default such beans shall be implemented stateless. If you store state information in member variables you can easily run into concurrency problems and nasty bugs. This is easy to avoid by using local variables and separate state classes for complex state-information. Try to avoid stateful beans wherever possible. Only add state if you are fully aware of what you are doing and properly document this as a warning in your JavaDoc.

    +
  • +
  • +

    Usage of Java standards
    +We use common standards (see above) that makes our code portable. Therefore we use standardized annotations like @Inject (javax.inject.Inject) instead of proprietary annotations such as @Autowired. Generally we avoid proprietary annotations in business code (logic layer).

    +
  • +
  • +

    Simple injection-style
    +In general you can choose between constructor, setter or field injection. For simplicity we recommend to do private field injection as it is very compact and easy to maintain. We believe that constructor injection is bad for maintenance especially in case of inheritance (if you change the dependencies you need to refactor all sub-classes). Private field injection and public setter injection are very similar but setter injection is much more verbose (often you are even forced to have javadoc for all public methods). If you are writing re-usable library code setter injection will make sense as it is more flexible. In a business application you typically do not need that and can save a lot of boiler-plate code if you use private field injection instead. Nowadays you are using container infrastructure also for your tests (see testing) so there is no need to inject manually (what would require a public setter).

    +
  • +
  • +

    KISS
    +To follow the KISS (keep it small and simple) principle we avoid advanced features (e.g. custom AOP, non-singleton beans) and only use them where necessary.

    +
  • +
  • +

    Separation of API and implementation
    +For important components we should separate a self-contained API documented with JavaDoc from its implementation. Code from other components that wants to use the implementation shall only rely on the API. However, for things that will never be exchanged no API as interface is required you can skip such separation.

    +
  • +
+
+
+
+

Example Bean

+
+

Here you can see the implementation of an example bean using dependency injection:

+
+
+
+
@ApplicationScoped
+@Named("MyComponent")
+public class MyComponentImpl implements MyComponent {
+  @Inject
+  private MyOtherComponent myOtherComponent;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+
+  ...
+}
+
+
+
+

Here MyComponentImpl depends on MyOtherComponent that is injected into the field myOtherComponent because of the @Inject annotation. +To make this work there must be exactly one bean in the container (e.g. spring or quarkus) that is an instance of MyOtherComponent. +In order to put a bean into the container, we can use @ApplicationScoped in case of CDI (required for quarkus) for a stateless bean. +In spring we can ommit a CDI annotation and the @Named annotation is already sufficient as a bean is stateless by default in spring. +If we always use @ApplicationScoped we can make this more explicit and more portable accross different frameworks. +So in our example we put MyComponentImpl into the container. +That bean will be called MyComponent as we specified in the @Named annotation but we can also omit the name to use the classname as fallback. +Now our bean can be injected into other beans using @Inject annotation either via MyComponent interface (recommended when interface is present) or even directly via MyComponentImpl. +In case you omit the interface, you should also omit the Impl suffix or instead use Bean as suffix.

+
+
+
+

Multiple bean implementations

+
+

In some cases you might have multiple implementations as beans for the same interface. +The following sub-sections handle the different scenarios to give you guidance.

+
+
+
Only one implementation in container
+
+

In some cases you still have only one implementation active as bean in the container at runtime. +A typical example is that you have different implemenations for test and main usage. +This case is easy, as @Inject will always be unique. +The only thing you need to care about is how to configure your framework (spring, quarkus, etc.) to know which implementation to put in the container depending on specific configuration. +In spring this can be archived via the proprietary @Profile annotaiton.

+
+
+
+
Injecting all of multiple implementations
+
+

In some situations you may have an interface that defines a kind of "plugin". +You can have multiple implementations in your container and want to have all of them injected. +Then you can request a list with all the bean implementations via the interface as in the following example:

+
+
+
+
  @Inject
+  private List<MyConverter> converters;
+
+
+
+

Your code may iterate over all plugins (converters) and apply them sequentially. +Please note that the injection will fail (at least in spring), when there is no bean available to inject. +So you do not get an empty list injected but will get an exception on startup.

+
+
+
+
Injecting one of multiple implementations
+
+

Another scenario is that you have multiple implementations in your container coexisting, but for injection you may want to choose a specific implementation. +Here you could use the @Named annotation to specify a unique identifier for each implementation what is called qualified injection:

+
+
+
+
@ApplicationScoped
+@Named("UserAuthenticator")
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+@Named("ServiceAuthenticator")
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  @Named("UserAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  @Named("ServiceAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+
+
+
+

However, we discovered that this pattern is not so great: +The identifiers in the @Named annotation are just strings that could easily break. +You could use constants instead but still this is not the best solution.

+
+
+

In the end you can very much simplify this by just directly injecting the implementation instead:

+
+
+
+
@ApplicationScoped
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

In case you want to strictly decouple from implementations, you can still create dedicated interfaces:

+
+
+
+
public interface UserAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class UserAuthenticatorImpl implements UserAuthenticator {
+  ...
+}
+public interface ServiceAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class ServiceAuthenticatorImpl implements ServiceAuthenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

However, as you can see this is again introducing additional boiler-plate code. +While the principle to separate API and implementation and strictly decouple from implementation is valuable in general, +you should always consider KISS, lean, and agile in contrast and balance pros and cons instead of blindly following dogmas.

+
+
+
+
+

Imports

+
+

Here are the import statements for the most important annotations for dependency injection

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.enterprise.context.ApplicationScoped;
+// import javax.enterprise.context.RequestScoped;
+// import javax.enterprise.context.SessionScoped;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+
+
+
+

Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>jakarta.inject</groupId>
+  <artifactId>jakarta.inject-api</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>jakarta.annotation</groupId>
+  <artifactId>jakarta.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>javax.inject</groupId>
+  <artifactId>javax.inject</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>javax.annotation</groupId>
+  <artifactId>javax.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+ +
+

==Configuration

+
+
+

An application needs to be configurable in order to allow internal setup (like CDI) but also to allow externalized configuration of a deployed package (e.g. integration into runtime environment). We rely on a comprehensive configuration approach following a "convention over configuration" pattern. This guide adds on to this by detailed instructions and best-practices how to deal with configurations.

+
+
+

In general we distinguish the following kinds of configuration that are explained in the following sections:

+
+
+ +
+
+
+

Internal Application Configuration

+
+

The application configuration contains all internal settings and wirings of the application (bean wiring, database mappings, etc.) and is maintained by the application developers at development time.

+
+
+

For more detail of Spring stack, see here

+
+
+
+

Externalized Configuration

+
+

Externalized configuration is a configuration that is provided separately to a deployment package and can be maintained undisturbed by re-deployments.

+
+
+
Environment Configuration
+
+

The environment configuration contains configuration parameters (typically port numbers, host names, passwords, logins, timeouts, certificates, etc.) specific for the different environments. These are under the control of the operators responsible for the application.

+
+
+

The environment configuration is maintained in application.properties files, defining various properties. +These properties are explained in the corresponding configuration sections of the guides for each topic:

+
+
+ +
+
+

Make sure your properties are thoroughly documented by providing a comment to each property. This inline documentation is most valuable for your operating department.

+
+
+

More about structuring your application.properties files can be read here for Spring.

+
+
+

For Quarkus, please refer to Quarkus Config Reference for more details.

+
+
+
+
Business Configuration
+
+

Often applications do not need business configuration. In case they do it should typically be editable by administrators via the GUI. The business configuration values should therefore be stored in the database in key/value pairs.

+
+
+

Therefore we suggest to create a dedicated table with (at least) the following columns:

+
+
+
    +
  • +

    ID

    +
  • +
  • +

    Property name

    +
  • +
  • +

    Property type (Boolean, Integer, String)

    +
  • +
  • +

    Property value

    +
  • +
  • +

    Description

    +
  • +
+
+
+

According to the entries in this table, an administrative GUI may show a generic form to modify business configuration. Boolean values should be shown as checkboxes, integer and string values as text fields. The values should be validated according to their type so an error is raised if you try to save a string in an integer property for example.

+
+
+

We recommend the following base layout for the hierarchical business configuration:

+
+
+

component.[subcomponent].[subcomponent].propertyname

+
+
+
+
+

Security

+
+

Often you need to have passwords (for databases, third-party services, etc.) as part of your configuration. These are typically environment specific (see above). However, with DevOps and continuous-deployment you might be tempted to commit such configurations into your version-control (e.g. git). Doing that with plain text passwords is a severe problem especially for production systems. Never do that! Instead we offer some suggestions how to deal with sensible configurations:

+
+
+
Password Encryption
+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control.

+
+
+

For Spring, we use jasypt-spring-boot. For more details, see here

+
+
+

For Quarkus, see here

+
+
+
Is this Security by Obscurity?
+
+
    +
  • +

    Yes, from the point of view to protect the passwords on the target environment this is nothing but security by obscurity. If an attacker somehow got full access to the machine this will only cause him to spend some more time.

    +
  • +
  • +

    No, if someone only gets the configuration file. So all your developers might have access to the version-control where the config is stored. Others might have access to the software releases that include this configs. But without the master-password that should only be known to specific operators none else can decrypt the password (except with brute-force what will take a very long time, see jasypt for details).

    +
  • +
+
+ +
+

==Mapping configuration to your code

+
+
+

If you are using spring-boot as suggested by devon4j your application can be configured by application.properties file as described in configuration. +To get a single configuration option into your code for flexibility, you can use

+
+
+
+
@Value("${my.property.name}")
+private String myConfigurableField;
+
+
+
+

Now, in your application.properties you can add the property:

+
+
+
+
my.property.name=my-property-value
+
+
+
+

You may even use @Value("${my.property.name:my-default-value}") to make the property optional.

+
+
+
+
+
+

Naming conventions for configuration properties

+
+

As a best practice your configruation properties should follow these naming conventions:

+
+
+
    +
  • +

    build the property-name as a path of segments separated by the dot character (.)

    +
  • +
  • +

    segments should get more specific from left to right

    +
  • +
  • +

    a property-name should either be a leaf value or a tree node (prefix of other property-names) but never both! So never have something like foo.bar=value and foo.bar.child=value2.

    +
  • +
  • +

    start with a segment namespace unique to your context or application

    +
  • +
  • +

    a good example would be «myapp».billing.service.email.sender for the sender address of billing service emails send by «myapp».

    +
  • +
+
+
+
+

Mapping advanced configuration

+
+

However, in many scenarios you will have features that require more than just one property. +Injecting those via @Value is not leading to good code quality. +Instead we create a class with the suffix ConfigProperties containing all configuration properties for our aspect that is annotated with @ConfigurationProperties:

+
+
+
+
@ConfigurationProperties(prefix = "myapp.billing.service")
+public class BillingServiceConfigProperties {
+
+  private final Email email = new Email();
+  private final Smtp smtp = new Smtp();
+
+  public Email getEmail() { return this.email; }
+  public Email getSmtp() { return this.smtp; }
+
+  public static class Email {
+
+    private String sender;
+    private String subject;
+
+    public String getSender() { return this.sender; }
+    public void setSender(String sender) { this.sender = sender; }
+    public String getSubject() { return this.subject; }
+    public void setSubject(String subject) { this.subject = subject; }
+  }
+
+  public static class Smtp {
+
+    private String host;
+    private int port = 25;
+
+    public String getHost() { return this.host; }
+    public void setHost(String host) { this.host = host; }
+    public int getPort() { return this.port; }
+    public void setPort(int port) { this.port = port; }
+  }
+
+}
+
+
+
+

Of course this is just an example to demonstrate this feature of spring-boot. +In order to send emails you would typically use the existing spring-email feature. +But as you can see this allows us to define and access our configuration in a very structured and comfortable way. +The annotation @ConfigurationProperties(prefix = "myapp.billing.service") will automatically map spring configuration properties starting with myapp.billing.service via the according getters and setters into our BillingServiceConfigProperties. +We can easily define defaults (e.g. 25 as default value for myapp.billing.service.smtp.port). +Also Email or Smtp could be top-level classes to be reused in multiple configurations. +Of course you would also add helpful JavaDoc comments to the getters and classes to document your configuration options. +Further to access this configuration, we can use standard dependency-injection:

+
+
+
+
@Inject
+private BillingServiceConfigProperties config;
+
+
+
+

For very generic cases you may also use Map<String, String> to map any kind of property in an untyped way. +An example for generic configuration from devon4j can be found in +ServiceConfigProperties.

+
+
+

For further details about this feature also consult Guide to @ConfigurationProperties in Spring Boot.

+
+
+
+

Generate configuration metadata

+
+

You should further add this dependency to your module containing the *ConfigProperties:

+
+
+
+
    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+
+
+
+

This will generate configuration metadata so projects using your code can benefit from autocompletion and getting your JavaDoc as tooltip when editing application.properites what makes this approach very powerful. +For further details about this please read A Guide to Spring Boot Configuration Metadata.

+
+
+ +
+

==Java Persistence API

+
+
+

For mapping java objects to a relational database we use the Java Persistence API (JPA). +As JPA implementation we recommend to use Hibernate. For general documentation about JPA and Hibernate follow the links above as we will not replicate the documentation. Here you will only find guidelines and examples how we recommend to use it properly. The following examples show how to map the data of a database to an entity. As we use JPA we abstract from SQL here. However, you will still need a DDL script for your schema and during maintenance also database migrations. Please follow our SQL guide for such artifacts.

+
+
+
+

Entity

+
+

Entities are part of the persistence layer and contain the actual data. They are POJOs (Plain Old Java Objects) on which the relational data of a database is mapped and vice versa. The mapping is configured via JPA annotations (javax.persistence). Usually an entity class corresponds to a table of a database and a property to a column of that table. A persistent entity instance then represents a row of the database table.

+
+
+
A Simple Entity
+
+

The following listing shows a simple example:

+
+
+
+
@Entity
+@Table(name="TEXTMESSAGE")
+public class MessageEntity extends ApplicationPersistenceEntity implements Message {
+
+  private String text;
+
+  public String getText() {
+    return this.text;
+  }
+
+  public void setText(String text) {
+    this.text = text;
+  }
+ }
+
+
+
+

The @Entity annotation defines that instances of this class will be entities which can be stored in the database. The @Table annotation is optional and can be used to define the name of the corresponding table in the database. If it is not specified, the simple name of the entity class is used instead.

+
+
+

In order to specify how to map the attributes to columns we annotate the corresponding getter methods (technically also private field annotation is also possible but approaches can not be mixed). +The @Id annotation specifies that a property should be used as primary key. +With the help of the @Column annotation it is possible to define the name of the column that an attribute is mapped to as well as other aspects such as nullable or unique. If no column name is specified, the name of the property is used as default.

+
+
+

Note that every entity class needs a constructor with public or protected visibility that does not have any arguments. Moreover, neither the class nor its getters and setters may be final.

+
+
+

Entities should be simple POJOs and not contain business logic.

+
+
+
+
Entities and Datatypes
+
+

Standard datatypes like Integer, BigDecimal, String, etc. are mapped automatically by JPA. Custom datatypes are mapped as serialized BLOB by default what is typically undesired. +In order to map atomic custom datatypes (implementations of`+SimpleDatatype`) we implement an AttributeConverter. Here is a simple example:

+
+
+
+
@Converter(autoApply = true)
+public class MoneyAttributeConverter implements AttributeConverter<Money, BigDecimal> {
+
+  public BigDecimal convertToDatabaseColumn(Money attribute) {
+    return attribute.getValue();
+  }
+
+  public Money convertToEntityAttribute(BigDecimal dbData) {
+    return new Money(dbData);
+  }
+}
+
+
+
+

The annotation @Converter is detected by the JPA vendor if the annotated class is in the packages to scan. Further, autoApply = true implies that the converter is automatically used for all properties of the handled datatype. Therefore all entities with properties of that datatype will automatically be mapped properly (in our example Money is mapped as BigDecimal).

+
+
+

In case you have a composite datatype that you need to map to multiple columns the JPA does not offer a real solution. As a workaround you can use a bean instead of a real datatype and declare it as @Embeddable. If you are using Hibernate you can implement CompositeUserType. Via the @TypeDef annotation it can be registered to Hibernate. If you want to annotate the CompositeUserType implementation itself you also need another annotation (e.g. MappedSuperclass tough not technically correct) so it is found by the scan.

+
+
+
Enumerations
+
+

By default JPA maps Enums via their ordinal. Therefore the database will only contain the ordinals (0, 1, 2, etc.) . So , inside the database you can not easily understand their meaning. Using @Enumerated with EnumType.STRING allows to map the enum values to their name (Enum.name()). Both approaches are fragile when it comes to code changes and refactoring (if you change the order of the enum values or rename them) after the application is deployed to production. If you want to avoid this and get a robust mapping you can define a dedicated string in each enum value for database representation that you keep untouched. Then you treat the enum just like any other custom datatype.

+
+
+
+
BLOB
+
+

If binary or character large objects (BLOB/CLOB) should be used to store the value of an attribute, e.g. to store an icon, the @Lob annotation should be used as shown in the following listing:

+
+
+
+
@Lob
+public byte[] getIcon() {
+  return this.icon;
+}
+
+
+
+ + + + + +
+ + +Using a byte array will cause problems if BLOBs get large because the entire BLOB is loaded into the RAM of the server and has to be processed by the garbage collector. For larger BLOBs the type Blob and streaming should be used. +
+
+
+
+
public Blob getAttachment() {
+  return this.attachment;
+}
+
+
+
+
+
Date and Time
+
+

To store date and time related values, the temporal annotation can be used as shown in the listing below:

+
+
+
+
@Temporal(TemporalType.TIMESTAMP)
+public java.util.Date getStart() {
+  return start;
+}
+
+
+
+

Until Java8 the java data type java.util.Date (or Jodatime) has to be used. +TemporalType defines the granularity. In this case, a precision of nanoseconds is used. If this granularity is not wanted, TemporalType.DATE can be used instead, which only has a granularity of milliseconds. +Mixing these two granularities can cause problems when comparing one value to another. This is why we only use TemporalType.TIMESTAMP.

+
+
+
+
QueryDSL and Custom Types
+
+

Using the Aliases API of QueryDSL might result in an InvalidDataAccessApiUsageException when using custom datatypes in entity properties. This can be circumvented in two steps:

+
+
+
    +
  1. +

    Ensure you have the following maven dependencies in your project (core module) to support custom types via the Aliases API:

    +
    +
    +
    <dependency>
    +  <groupId>org.ow2.asm</groupId>
    +  <artifactId>asm</artifactId>
    +</dependency>
    +<dependency>
    +  <groupId>cglib</groupId>
    +  <artifactId>cglib</artifactId>
    +</dependency>
    +
    +
    +
  2. +
  3. +

    Make sure, that all your custom types used in entities provide a non-argument constructor with at least visibility level protected.

    +
  4. +
+
+
+
+
+
Primary Keys
+
+

We only use simple Long values as primary keys (IDs). +By default it is auto generated (@GeneratedValue(strategy=GenerationType.AUTO)). +This is already provided by the class com.devonfw.<projectName>.general.dataaccess.api.AbstractPersistenceEntity within the classic project structure respectively com.devonfw.<projectName>.general.domain.model.AbstractPersistenceEntity within the modern project structure, that you can extend.

+
+
+

The reason for this recommendation is simply because using a number (Long) is the most efficient representation for the database. +You may also consider to use other types like String or UUID or even composite custom datatypes and this is technically possible. +However, please consider that the primary key is used to lookup the row from the database table, also in foreign keys and thus in JOINs. +Please note that your project sooner or later may reach some complexity where performance really matters. +Working on big data and performing JOINs when using types such as String (VARCHAR[2]) as primary and foreign keys will kill your performance. +You are still free to make a different choice and devonfw only gives recommendations but does not want to dictate you what to do. +However, you have been warned about the concequences. +If you are well aware of what you are doing, you can still use differnet types of primary keys. +In such case, create your own entity not extending AbstractPersistenceEntity or create your own copy of AbstractPersistenceEntity with a different name and a different type of primary key.

+
+
+

In case you have business oriented keys (often as String), you can define an additional property for it and declare it as unique (@Column(unique=true)). +Be sure to include "AUTO_INCREMENT" in your sql table field ID to be able to persist data (or similar for other databases).

+
+
+
+
+

Relationships

+
+
n:1 and 1:1 Relationships
+
+

Entities often do not exist independently but are in some relation to each other. For example, for every period of time one of the StaffMember’s of the restaurant example has worked, which is represented by the class WorkingTime, there is a relationship to this StaffMember.

+
+
+

The following listing shows how this can be modeled using JPA:

+
+
+
+
...
+
+@Entity
+public class WorkingTimeEntity {
+   ...
+
+   private StaffMemberEntity staffMember;
+
+   @ManyToOne
+   @JoinColumn(name="STAFFMEMBER")
+   public StaffMemberEntity getStaffMember() {
+      return this.staffMember;
+   }
+
+   public void setStaffMember(StaffMemberEntity staffMember) {
+      this.staffMember = staffMember;
+   }
+}
+
+
+
+

To represent the relationship, an attribute of the type of the corresponding entity class that is referenced has been introduced. The relationship is a n:1 relationship, because every WorkingTime belongs to exactly one StaffMember, but a StaffMember usually worked more often than once.
+This is why the @ManyToOne annotation is used here. For 1:1 relationships the @OneToOne annotation can be used which works basically the same way. To be able to save information about the relation in the database, an additional column in the corresponding table of WorkingTime is needed which contains the primary key of the referenced StaffMember. With the name element of the @JoinColumn annotation it is possible to specify the name of this column.

+
+
+
+
1:n and n:m Relationships
+
+

The relationship of the example listed above is currently an unidirectional one, as there is a getter method for retrieving the StaffMember from the WorkingTime object, but not vice versa.

+
+
+

To make it a bidirectional one, the following code has to be added to StaffMember:

+
+
+
+
  private Set<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy="staffMember")
+  public Set<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(Set<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

To make the relationship bidirectional, the tables in the database do not have to be changed. Instead the column that corresponds to the attribute staffMember in class WorkingTime is used, which is specified by the mappedBy element of the @OneToMany annotation. Hibernate will search for corresponding WorkingTime objects automatically when a StaffMember is loaded.

+
+
+

The problem with bidirectional relationships is that if a WorkingTime object is added to the set or list workingTimes in StaffMember, this does not have any effect in the database unless +the staffMember attribute of that WorkingTime object is set. That is why the devon4j advices not to use bidirectional relationships but to use queries instead. How to do this is shown here. If a bidirectional relationship should be used nevertheless, appropriate add and remove methods must be used.

+
+
+

For 1:n and n:m relations, the devon4j demands that (unordered) Sets and no other collection types are used, as shown in the listing above. The only exception is whenever an ordering is really needed, (sorted) lists can be used.
+For example, if WorkingTime objects should be sorted by their start time, this could be done like this:

+
+
+
+
  private List<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy = "staffMember")
+  @OrderBy("startTime asc")
+  public List<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(List<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

The value of the @OrderBy annotation consists of an attribute name of the class followed by asc (ascending) or desc (descending).

+
+
+

To store information about a n:m relationship, a separate table has to be used, as one column cannot store several values (at least if the database schema is in first normal form).
+For example if one wanted to extend the example application so that all ingredients of one FoodDrink can be saved and to model the ingredients themselves as entities (e.g. to store additional information about them), this could be modeled as follows (extract of class FoodDrink):

+
+
+
+
  private Set<IngredientEntity> ingredients;
+
+  @ManyToMany()
+  @JoinTable
+  public Set<IngredientEntity> getIngredients() {
+    return this.ingredients;
+  }
+
+  public void setOrders(Set<IngredientEntity> ingredients) {
+    this.ingredients = ingredients;
+  }
+
+
+
+

Information about the relation is stored in a table called BILL_ORDER that has to have two columns, one for referencing the Bill, the other one for referencing the Order. Note that the @JoinTable annotation is not needed in this case because a separate table is the default solution here (same for n:m relations) unless there is a mappedBy element specified.

+
+
+

For 1:n relationships this solution has the disadvantage that more joins (in the database system) are needed to get a Bill with all the Orders it refers to. This might have a negative impact on performance so that the solution to store a reference to the Bill row/entity in the Order’s table is probably the better solution in most cases.

+
+
+

Note that bidirectional n:m relationships are not allowed for applications based on devon4j. Instead a third entity has to be introduced, which "represents" the relationship (it has two n:1 relationships).

+
+
+
+
Eager vs. Lazy Loading
+
+

Using JPA it is possible to use either lazy or eager loading. Eager loading means that for entities retrieved from the database, other entities that are referenced by these entities are also retrieved, whereas lazy loading means that this is only done when they are actually needed, i.e. when the corresponding getter method is invoked.

+
+
+

Application based on devon4j are strongly advised to always use lazy loading. The JPA defaults are:

+
+
+
    +
  • +

    @OneToMany: LAZY

    +
  • +
  • +

    @ManyToMany: LAZY

    +
  • +
  • +

    @ManyToOne: EAGER

    +
  • +
  • +

    @OneToOne: EAGER

    +
  • +
+
+
+

So at least for @ManyToOne and @OneToOne you always need to override the default by providing fetch = FetchType.LAZY.

+
+
+ + + + + +
+ + +Please read the performance guide. +
+
+
+
+
Cascading Relationships
+
+

For relations it is also possible to define whether operations are cascaded (like a recursion) to the related entity. +By default, nothing is done in these situations. This can be changed by using the cascade property of the annotation that specifies the relation type (@OneToOne, @ManyToOne, @OneToMany, @ManyToOne). This property accepts a CascadeType that offers the following options:

+
+
+
    +
  • +

    PERSIST (for EntityManager.persist, relevant to inserted transient entities into DB)

    +
  • +
  • +

    REMOVE (for EntityManager.remove to delete entity from DB)

    +
  • +
  • +

    MERGE (for EntityManager.merge)

    +
  • +
  • +

    REFRESH (for EntityManager.refresh)

    +
  • +
  • +

    DETACH (for EntityManager.detach)

    +
  • +
  • +

    ALL (cascade all of the above operations)

    +
  • +
+
+
+

See here for more information.

+
+
+
+
Typesafe Foreign Keys using IdRef
+
+

For simple usage you can use Long for all your foreign keys. +However, as an optional pattern for advanced and type-safe usage, we offer IdRef.

+
+
+
+
+

Embeddable

+
+

An embeddable Object is a way to group properties of an entity into a separate Java (child) object. Unlike with implement relationships the embeddable is not a separate entity and its properties are stored (embedded) in the same table together with the entity. This is helpful to structure and reuse groups of properties.

+
+
+

The following example shows an Address implemented as an embeddable class:

+
+
+
+
@Embeddable
+public class AddressEmbeddable {
+
+  private String street;
+  private String number;
+  private Integer zipCode;
+  private String city;
+
+  @Column(name="STREETNUMBER")
+  public String getNumber() {
+    return number;
+  }
+
+  public void setNumber(String number) {
+    this.number = number;
+  }
+
+  ...  // other getter and setter methods, equals, hashCode
+}
+
+
+
+

As you can see an embeddable is similar to an entity class, but with an @Embeddable annotation instead of the @Entity annotation and without primary key or modification counter. +An Embeddable does not exist on its own but in the context of an entity. +As a simplification Embeddables do not require a separate interface and ETO as the bean-mapper will create a copy automatically when converting the owning entity to an ETO. +However, in this case the embeddable becomes part of your api module that therefore needs a dependency on the JPA.

+
+
+

In addition to that the methods equals(Object) and hashCode() need to be implemented as this is required by Hibernate (it is not required for entities because they can be unambiguously identified by their primary key). For some hints on how to implement the hashCode() method please have a look here.

+
+
+

Using this AddressEmbeddable inside an entity class can be done like this:

+
+
+
+
  private AddressEmbeddable address;
+
+  @Embedded
+  public AddressEmbeddable getAddress() {
+    return this.address;
+  }
+
+  public void setAddress(AddressEmbeddable address) {
+    this.address = address;
+  }
+}
+
+
+
+

The @Embedded annotation needs to be used for embedded attributes. Note that if in all columns of the embeddable (here Address) are null, then the embeddable object itself is also null inside the entity. This has to be considered to avoid NullPointerException’s. Further this causes some issues with primitive types in embeddable classes that can be avoided by only using object types instead.

+
+
+
+

Inheritance

+
+

Just like normal java classes, entity classes can inherit from others. The only difference is that you need to specify how to map a class hierarchy to database tables. Generic abstract super-classes for entities can simply be annotated with @MappedSuperclass.

+
+
+

For all other cases the JPA offers the annotation @Inheritance with the property strategy talking an InheritanceType that has the following options:

+
+
+
+
+
    +
  • +

    SINGLE_TABLE: This strategy uses a single table that contains all columns needed to store all entity-types of the entire inheritance hierarchy. If a column is not needed for an entity because of its type, there is a null value in this column. An additional column is introduced, which denotes the type of the entity (called dtype).

    +
  • +
  • +

    TABLE_PER_CLASS: For each concrete entity class there is a table in the database that can store such an entity with all its attributes. An entity is only saved in the table corresponding to its most concrete type. To get all entities of a super type, joins are needed.

    +
  • +
  • +

    JOINED: In this case there is a table for every entity class including abstract classes, which contains only the columns for the persistent properties of that particular class. Additionally there is a primary key column in every table. To get an entity of a class that is a subclass of another one, joins are needed.

    +
  • +
+
+
+
+
+

Each of the three approaches has its advantages and drawbacks, which are discussed in detail here. In most cases, the first one should be used, because it is usually the fastest way to do the mapping, as no joins are needed when retrieving, searching or persisting entities. Moreover it is rather simple and easy to understand. +One major disadvantage is that the first approach could lead to a table with a lot of null values, which might have a negative impact on the database size.

+
+
+

The inheritance strategy has to be annotated to the top-most entity of the class hierarchy (where @MappedSuperclass classes are not considered) like in the following example:

+
+
+
+
@Entity
+@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+public abstract class MyParentEntity extends ApplicationPersistenceEntity implements MyParent {
+  ...
+}
+
+@Entity
+public class MyChildEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+@Entity
+public class MyOtherEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+
+
+

As a best practice we advise you to avoid entity hierarchies at all where possible and otherwise to keep the hierarchy as small as possible. In order to just ensure reuse or establish a common API you can consider a shared interface, a @MappedSuperclass or an @Embeddable instead of an entity hierarchy.

+
+
+
+

Repositories and DAOs

+
+

For each entity a code unit is created that groups all database operations for that entity. We recommend to use spring-data repositories for that as it is most efficient for developers. As an alternative there is still the classic approach using DAOs.

+
+
+
Concurrency Control
+
+

The concurrency control defines the way concurrent access to the same data of a database is handled. When several users (or threads of application servers) concurrently access a database, anomalies may happen, e.g. a transaction is able to see changes from another transaction although that one did, not yet commit these changes. Most of these anomalies are automatically prevented by the database system, depending on the isolation level (property hibernate.connection.isolation in the jpa.xml, see here, or quarkus.datasource.jdbc.transaction-isolation-level in the application.properties).

+
+
+

Another anomaly is when two stakeholders concurrently access a record, do some changes and write them back to the database. The JPA addresses this with different locking strategies (see here).

+
+
+

As a best practice we are using optimistic locking for regular end-user services (OLTP) and pessimistic locking for batches.

+
+
+
+
Optimistic Locking
+
+

The class com.devonfw.module.jpa.persistence.api.AbstractPersistenceEntity already provides optimistic locking via a modificationCounter with the @Version annotation. Therefore JPA takes care of optimistic locking for you. When entities are transferred to clients, modified and sent back for update you need to ensure the modificationCounter is part of the game. If you follow our guides about transfer-objects and services this will also work out of the box. +You only have to care about two things:

+
+
+
    +
  • +

    How to deal with optimistic locking in relationships?
    +Assume an entity A contains a collection of B entities. Should there be a locking conflict if one user modifies an instance of A while another user in parallel modifies an instance of B that is contained in the other instance? To address this , take a look at FeatureForceIncrementModificationCounter.

    +
  • +
  • +

    What should happen in the UI if an OptimisticLockException occurred?
    +According to KISS our recommendation is that the user gets an error displayed that tells him to do his change again on the recent data. Try to design your system and the work processing in a way to keep such conflicts rare and you are fine.

    +
  • +
+
+
+
+
Pessimistic Locking
+
+

For back-end services and especially for batches optimistic locking is not suitable. A human user shall not cause a large batch process to fail because he was editing the same entity. Therefore such use-cases use pessimistic locking what gives them a kind of priority over the human users. +In your DAO implementation you can provide methods that do pessimistic locking via EntityManager operations that take a LockModeType. Here is a simple example:

+
+
+
+
  getEntityManager().lock(entity, LockModeType.READ);
+
+
+
+

When using the lock(Object, LockModeType) method with LockModeType.READ, Hibernate will issue a SELECT …​ FOR UPDATE. This means that no one else can update the entity (see here for more information on the statement). If LockModeType.WRITE is specified, Hibernate issues a SELECT …​ FOR UPDATE NOWAIT instead, which has has the same meaning as the statement above, but if there is already a lock, the program will not wait for this lock to be released. Instead, an exception is raised.
+Use one of the types if you want to modify the entity later on, for read only access no lock is required.

+
+
+

As you might have noticed, the behavior of Hibernate deviates from what one would expect by looking at the LockModeType (especially LockModeType.READ should not cause a SELECT …​ FOR UPDATE to be issued). The framework actually deviates from what is specified in the JPA for unknown reasons.

+
+
+
+
+

Database Auditing

+ +
+
+

Testing Data-Access

+
+

For testing of Entities and Repositories or DAOs see testing guide.

+
+
+
+

Principles

+
+

We strongly recommend these principles:

+
+
+
    +
  • +

    Use the JPA where ever possible and use vendor (hibernate) specific features only for situations when JPA does not provide a solution. In the latter case consider first if you really need the feature.

    +
  • +
  • +

    Create your entities as simple POJOs and use JPA to annotate the getters in order to define the mapping.

    +
  • +
  • +

    Keep your entities simple and avoid putting advanced logic into entity methods.

    +
  • +
+
+
+
+

Database Configuration

+
+

For details on the configuration of the database connection and database logging of the individual framework, please refer to the respective configuration guide.

+
+
+

For spring see here.

+
+
+

For quarkus see here.

+
+
+
Database Migration
+ +
+
+
Pooling
+
+

You typically want to pool JDBC connections to boost performance by recycling previous connections. There are many libraries available to do connection pooling. We recommend to use HikariCP. For Oracle RDBMS see here.

+
+
+
+
+

Security

+
+
SQL-Injection
+
+

A common security threat is SQL-injection. Never build queries with string concatenation or your code might be vulnerable as in the following example:

+
+
+
+
  String query = "Select op from OrderPosition op where op.comment = " + userInput;
+  return getEntityManager().createQuery(query).getResultList();
+
+
+
+

Via the parameter userInput an attacker can inject SQL (JPQL) and execute arbitrary statements in the database causing extreme damage.

+
+
+

In order to prevent such injections you have to strictly follow our rules for queries:

+
+
+ +
+
+
+
Limited Permissions for Application
+
+

We suggest that you operate your application with a database user that has limited permissions so he can not modify the SQL schema (e.g. drop tables). For initializing the schema (DDL) or to do schema migrations use a separate user that is not used by the application itself.

+
+ +
+

==Queries +The Java Persistence API (JPA) defines its own query language, the java persistence query language (JPQL) (see also JPQL tutorial), which is similar to SQL but operates on entities and their attributes instead of tables and columns.

+
+
+

The simplest CRUD-Queries (e.g. find an entity by its ID) are already build in the devonfw CRUD functionality (via Repository or DAO). For other cases you need to write your own query. We distinguish between static and dynamic queries. Static queries have a fixed JPQL query string that may only use parameters to customize the query at runtime. Instead, dynamic queries can change their clauses (WHERE, ORDER BY, JOIN, etc.) at runtime depending on the given search criteria.

+
+
+
+
Static Queries
+
+

E.g. to find all DishEntries (from MTS sample app) that have a price not exceeding a given maxPrice we write the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice
+
+
+
+

Here dish is used as alias (variable name) for our selected DishEntity (what refers to the simple name of the Java entity class). With dish.price we are referring to the Java property price (getPrice()/setPrice(…​)) in DishEntity. A named variable provided from outside (the search criteria at runtime) is specified with a colon (:) as prefix. Here with :maxPrice we reference to a variable that needs to be set via query.setParameter("maxPrice", maxPriceValue). JPQL also supports indexed parameters (?) but they are discouraged because they easily cause confusion and mistakes.

+
+
+
Using Queries to Avoid Bidirectional Relationships
+
+

With the usage of queries it is possible to avoid exposing relationships or modelling bidirectional relationships, which have some disadvantages (see relationships). This is especially desired for relationships between entities of different business components. +So for example to get all OrderLineEntities for a specific OrderEntity without using the orderLines relation from OrderEntity the following query could be used:

+
+
+
+
SELECT line FROM OrderLineEntity line WHERE line.order.id = :orderId
+
+
+
+
+
+
Dynamic Queries
+
+

For dynamic queries, we use the JPA module for Querydsl. Querydsl also supports other modules such as MongoDB, and Apache Lucene. It allows to implement queries in a powerful but readable and type-safe way (unlike Criteria API). If you already know JPQL, you will quickly be able to read and write Querydsl code. It feels like JPQL but implemented in Java instead of plain text.

+
+
+

To use Querydsl in your Maven project, add the following dependencies:

+
+
+
+
<dependencies>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-apt</artifactId>
+        <version>${querydsl.version}</version>
+        <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-jpa</artifactId>
+        <version>${querydsl.version}</version>
+    </dependency>
+
+</dependencies>
+
+
+
+

Next, configure the annotation processing tool (APT) plugin:

+
+
+
+
<project>
+  <build>
+    <plugins>
+      ...
+      <plugin>
+        <groupId>com.mysema.maven</groupId>
+        <artifactId>apt-maven-plugin</artifactId>
+        <version>1.1.3</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>process</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/generated-sources/java</outputDirectory>
+              <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      ...
+    </plugins>
+  </build>
+</project>
+
+
+
+

Here is an example from our sample application:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(dish.price.goe(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(dish.price.loe(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      query.where(dish.name.eq(name));
+    }
+    query.orderBy(dish.price.asc(), dish.name.asc());
+    return query.fetch();
+  }
+
+
+
+

In this example, we use the so called Q-types (QDishEntity). These are classes generated at build time by the Querydsl annotation processor from entity classes. The Q-type classes can be used as static types representative of the original entity class.

+
+
+

The query.from(dish) method call defines the query source, in this case the dish table. The where method defines a filter. For example, The first call uses the goe operator to filter out any dishes that are not greater or equal to the minimal price. Further operators can be found here.

+
+
+

The orderBy method is used to sort the query results according to certain criteria. Here, we sort the results first by their price and then by their name, both in ascending order. To sort in descending order, use .desc(). To partition query results into groups of rows, see the groupBy method.

+
+
+

For spring, devon4j provides another approach that you can use for your Spring applications to implement Querydsl logic without having to use these metaclasses. An example can be found here.

+
+
+
+
Native Queries
+
+

Spring Data supports the use of native queries. Native queries use simple native SQL syntax that is not parsed in JPQL. This allows you to use all the features that your database supports. +The downside to this is that database portability is lost due to the absence of an abstraction layer. Therefore, the queries may not work with another database because it may use a different syntax.

+
+
+

You can implement a native query using @Query annotation with the nativeQuery attribute set to true:

+
+
+
+
@Query(value="...", nativeQuery=true)
+
+
+
+ + + + + +
+ + +This will not work with Quarkus because Quarkus does not support native queries by using the @Query annotation (see here). +
+
+
+

You can also implement native queries directly using the EntityManager API and the createNativeQuery method. +This approach also works with Quarkus.

+
+
+
+
Query query = entityManager.createNativeQuery("SELECT * FROM Product", ProductEntity.class);
+List<ProductEntity> products = query.getResultList();
+
+
+
+ + + + + +
+ + +Be sure to use the name of the table when using native queries, while you must use the entity name when implementing queries with JPQL. +
+
+
+
+
Using Wildcards
+
+

For flexible queries it is often required to allow wildcards (especially in dynamic queries). While users intuitively expect glob syntax, the SQL and JPQL standards work differently. Therefore, a mapping is required. devonfw provides this on a lower level with LikePatternSyntax and on a higher level with QueryUtil (see QueryHelper.newStringClause(…​)).

+
+
+
+
Pagination
+
+

When dealing with large amounts of data, an efficient method of retrieving the data is required. Fetching the entire data set each time would be too time consuming. Instead, Paging is used to process only small subsets of the entire data set.

+
+
+

If you are using Spring Data repositories you will get pagination support out of the box by providing the interfaces Page and Pageable:

+
+
+
repository
+
+
Page<DishEntity> findAll(Pageable pageable);
+
+
+
+

Then you can create a Pageable object and pass it to the method call as follows:

+
+
+
+
int page = criteria.getPageNumber();
+int size = criteria.getPageSize();
+Pageable pageable = PageRequest.of(page, size);
+Page<DishEntity> dishes = dishRepository.findAll(pageable);
+
+
+
+
Paging with Querydsl
+
+

Pagination is also supported for dynamic queries with Querydsl:

+
+
+
+
  public Page<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    // conditions
+
+    int page = criteria.getPageNumber();
+    int size = criteria.getPageSize();
+    Pageable pageable = PageRequest.of(page, size);
+    query.offset(pageable.getOffset());
+    query.limit(pageable.getPageSize());
+
+    List<DishEntity> dishes = query.fetch();
+    return new PageImpl<>(dishes, pageable, dishes.size());
+  }
+
+
+
+
+
Pagination example
+
+

For the table entity we can make a search request by accessing the REST endpoint with pagination support like in the following examples:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "total":true
+  }
+}
+
+//Response
+{
+    "pagination": {
+        "size": 2,
+        "page": 1,
+        "total": 11
+    },
+    "result": [
+        {
+            "id": 101,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 1,
+            "state": "OCCUPIED"
+        },
+        {
+            "id": 102,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 2,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+ + + + + +
+ + +As we are requesting with the total property set to true the server responds with the total count of rows for the query. +
+
+
+

For retrieving a concrete page, we provide the page attribute with the desired value. Here we also left out the total property so the server doesn’t incur on the effort to calculate it:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "page":2
+  }
+}
+
+//Response
+
+{
+    "pagination": {
+        "size": 2,
+        "page": 2,
+        "total": null
+    },
+    "result": [
+        {
+            "id": 103,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 3,
+            "state": "FREE"
+        },
+        {
+            "id": 104,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 4,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+
+
Pagingation in devon4j-spring
+
+

For spring applications, devon4j also offers its own solution for pagination. You can find an example of this here.

+
+
+
+
+
Query Meta-Parameters
+
+

Queries can have meta-parameters and that are provided via SearchCriteriaTo. Besides paging (see above) we also get timeout support.

+
+
+
+
Advanced Queries
+
+

Writing queries can sometimes get rather complex. The current examples given above only showed very simple basics. Within this topic a lot of advanced features need to be considered like:

+
+
+ +
+
+

This list is just containing the most important aspects. As we can not cover all these topics here, they are linked to external documentation that can help and guide you.

+
+ +
+

==Spring Data +Spring Data JPA is supported by both Spring and Quarkus. However, in Quarkus this approach still has some limitations. For detailed information, see the official Quarkus Spring Data guide.

+
+
+
+
Motivation
+
+

The benefits of Spring Data are (for examples and explanations see next sections):

+
+
+
    +
  • +

    All you need is one single repository interface for each entity. No need for a separate implementation or other code artifacts like XML descriptors, NamedQueries class, etc.

    +
  • +
  • +

    You have all information together in one place (the repository interface) that actually belong together (where as in the classic approach you have the static queries in an XML file, constants to them in NamedQueries class and referencing usages in DAO implementation classes).

    +
  • +
  • +

    Static queries are most simple to realize as you do not need to write any method body. This means you can develop faster.

    +
  • +
  • +

    Support for paging is already build-in. Again for static query method the is nothing you have to do except using the paging objects in the signature.

    +
  • +
  • +

    Still you have the freedom to write custom implementations via default methods within the repository interface (e.g. for dynamic queries).

    +
  • +
+
+
+
+
Dependency
+
+

In case you want to switch to or add Spring Data support to your Spring or Quarkus application, all you need is to add the respective maven dependency:

+
+
+
spring
+
+
<dependency>
+  <groupId>org.springframework.boot</groupId>
+  <artifactId>spring-boot-starter-data-jpa</artifactId>
+</dependency>
+
+
+
+
quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
Repository
+
+

For each entity «Entity»Entity an interface is created with the name «Entity»Repository extending JpaRepository. +Such repository is the analogy to a Data-Access-Object (DAO) used in the classic approach or when Spring Data is not an option.

+
+
+
Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long> {
+
+}
+
+
+
+

The Spring Data repository provides some basic implementations for accessing data, e.g. returning all instances of a type (findAll) or returning an instance by its ID (findById).

+
+
+
+
Custom method implementation
+
+

In addition, repositories can be enriched with additional functionality, e.g. to add QueryDSL functionality or to override the default implementations, by using so called repository fragments:

+
+
+
Example
+
+

The following example shows how to write such a repository:

+
+
+
Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, ProductFragment {
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  List<ProductEntity> findByTitle(@Param("title") String title);
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  Page<ProductEntity> findByTitlePaginated(@Param("title") String title, Pageable pageable);
+}
+
+
+
+
Repository fragment
+
+
public interface ProductFragment {
+  Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria);
+}
+
+
+
+
Fragment implementation
+
+
public class ProductFragmentImpl implements ProductFragment {
+  @Inject
+  EntityManager entityManager;
+
+  public Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria) {
+    QProductEntity product = QProductEntity.productEntity;
+    JPAQuery<ProductEntity> query = new JPAQuery<ProductEntity>(this.entityManager);
+    query.from(product);
+
+    String title = criteria.getTitle();
+    if ((title != null) && !title.isEmpty()) {
+      query.where(product.title.eq(title));
+    }
+
+    List<ProductEntity> products = query.fetch();
+    return new PageImpl<>(products, PageRequest.of(criteria.getPageNumber(), criteria.getPageSize()), products.size());
+  }
+}
+
+
+
+

This ProductRepository has the following features:

+
+
+
    +
  • +

    CRUD support from Spring Data (see JavaDoc for details).

    +
  • +
  • +

    Support for QueryDSL integration, paging and more.

    +
  • +
  • +

    A static query method findByTitle to find all ProductEntity instances from DB that have the given title. Please note the @Param annotation that links the method parameter with the variable inside the query (:title).

    +
  • +
  • +

    The same with pagination support via findByTitlePaginated method.

    +
  • +
  • +

    A dynamic query method findByCriteria showing the QueryDSL and paging integration into Spring via a fragment implementation.

    +
  • +
+
+
+

You can find an implementation of this ProductRepository in our Quarkus reference application.

+
+
+ + + + + +
+ + +In Quarkus, native and named queries via the @Query annotation are currently not supported +
+
+
+
+
Integration of Spring Data in devon4j-spring
+
+

For Spring applications, devon4j offers a proprietary solution that integrates seamlessly with QueryDSL and uses default methods instead of the fragment approach. A separate guide for this can be found here.

+
+
+
+
Custom methods without fragment approach
+
+

The fragment approach is a bit laborious, as three types (repository interface, fragment interface and fragment implementation) are always needed to implement custom methods. +We cannot simply use default methods within the repository because we cannot inject the EntityManager directly into the repository interface.

+
+
+

As a workaround, you can create a GenericRepository interface, as is done in the devon4j jpa-spring-data module.

+
+
+
+
public interface GenericRepository<E> {
+
+  EntityManager getEntityManager();
+
+  ...
+}
+
+
+
+
+
public class GenericRepositoryImpl<E> implements GenericRepository<E> {
+
+  @Inject
+  EntityManager entityManager;
+
+  @Override
+  public EntityManager getEntityManager() {
+
+    return this.entityManager;
+  }
+
+  ...
+}
+
+
+
+

Then, all your repository interfaces can extend the GenericRepository and you can implement queries directly in the repository interface using default methods:

+
+
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, GenericRepository<ProductEntity> {
+
+  default Page<ProductEntity> findByTitle(Title title) {
+
+    EntityManager entityManager = getEntityManager();
+    Query query = entityManager.createNativeQuery("select * from Product where title = :title", ProductEntity.class);
+    query.setParameter("title", title);
+    List<ProductEntity> products = query.getResultList();
+    return new PageImpl<>(products);
+  }
+
+  ...
+}
+
+
+
+
+
+
Drawbacks
+
+

Spring Data also has some drawbacks:

+
+
+
    +
  • +

    Some kind of magic behind the scenes that are not so easy to understand. So in case you want to extend all your repositories without providing the implementation via a default method in a parent repository interface you need to deep-dive into Spring Data. We assume that you do not need that and hope what Spring Data and devon already provides out-of-the-box is already sufficient.

    +
  • +
  • +

    The Spring Data magic also includes guessing the query from the method name. This is not easy to understand and especially to debug. Our suggestion is not to use this feature at all and either provide a @Query annotation or an implementation via default method.

    +
  • +
+
+
+
+
Limitations in Quarkus
+
+
    +
  • +

    Native and named queries are not supported using @Query annotation. You will receive something like: Build step io.quarkus.spring.data.deployment.SpringDataJPAProcessor#build threw an exception: java.lang.IllegalArgumentException: Attribute nativeQuery of @Query is currently not supported

    +
  • +
  • +

    Customizing the base repository for all repository interfaces in the code base, which is done in Spring Data by registering a class the extends SimpleJpaRepository

    +
  • +
+
+ +
+

==Data Access Object

+
+
+

The Data Access Objects (DAOs) are part of the persistence layer. +They are responsible for a specific entity and should be named «Entity»Dao and «Entity»DaoImpl. +The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. +Additionally a DAO may offer advanced operations such as query or locking methods.

+
+
+
+
DAO Interface
+
+

For each DAO there is an interface named «Entity»Dao that defines the API. For CRUD support and common naming we derive it from the ApplicationDao interface that comes with the devon application template:

+
+
+
+
public interface MyEntityDao extends ApplicationDao<MyEntity> {
+  List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria);
+}
+
+
+
+

All CRUD operations are inherited from ApplicationDao so you only have to declare the additional methods.

+
+
+
+
DAO Implementation
+
+

Implementing a DAO is quite simple. We create a class named «Entity»DaoImpl that extends ApplicationDaoImpl and implements your «Entity»Dao interface:

+
+
+
+
public class MyEntityDaoImpl extends ApplicationDaoImpl<MyEntity> implements MyEntityDao {
+
+  public List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria) {
+    TypedQuery<MyEntity> query = createQuery(criteria, getEntityManager());
+    return query.getResultList();
+  }
+  ...
+}
+
+
+
+

Again you only need to implement the additional non-CRUD methods that you have declared in your «Entity»Dao interface. +In the DAO implementation you can use the method getEntityManager() to access the EntityManager from the JPA. You will need the EntityManager to create and execute queries.

+
+
+
Static queries for DAO Implementation
+
+

All static queries are declared in the file src\main\resources\META-INF\orm.xml:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+  <named-query name="find.dish.with.max.price">
+    <query><![SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice]]></query>
+  </named-query>
+  ...
+</hibernate-mapping>
+
+
+
+

When your application is started, all these static queries will be created as prepared statements. This allows better performance and also ensures that you get errors for invalid JPQL queries when you start your app rather than later when the query is used.

+
+
+

To avoid redundant occurrences of the query name (get.open.order.positions.for.order) we define a constant for each named query:

+
+
+
+
public class NamedQueries {
+  public static final String FIND_DISH_WITH_MAX_PRICE = "find.dish.with.max.price";
+}
+
+
+
+

Note that changing the name of the java constant (FIND_DISH_WITH_MAX_PRICE) can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+

The following listing shows how to use this query:

+
+
+
+
public List<DishEntity> findDishByMaxPrice(BigDecimal maxPrice) {
+  Query query = getEntityManager().createNamedQuery(NamedQueries.FIND_DISH_WITH_MAX_PRICE);
+  query.setParameter("maxPrice", maxPrice);
+  return query.getResultList();
+}
+
+
+
+

Via EntityManager.createNamedQuery(String) we create an instance of Query for our predefined static query. +Next we use setParameter(String, Object) to provide a parameter (maxPrice) to the query. This has to be done for all parameters of the query.

+
+
+

Note that using the createQuery(String) method, which takes the entire query as string (that may already contain the parameter) is not allowed to avoid SQL injection vulnerabilities. +When the method getResultList() is invoked, the query is executed and the result is delivered as List. As an alternative, there is a method called getSingleResult(), which returns the entity if the query returned exactly one and throws an exception otherwise.

+
+ +
+

==JPA Performance +When using JPA the developer sometimes does not see or understand where and when statements to the database are triggered.

+
+
+
+
+

Establishing expectations Developers shouldn’t expect to sprinkle magic pixie dust on POJOs in hopes they will become persistent.

+
+
+
+— Dan Allen
+https://epdf.tips/seam-in-action.html +
+
+
+

So in case you do not understand what is going on under the hood of JPA, you will easily run into performance issues due to lazy loading and other effects.

+
+
+
+
+
N plus 1 Problem
+
+

The most prominent phenomena is call the N+1 Problem. +We use entities from our MTS demo app as an example to explain the problem. +There is a DishEntity that has a @ManyToMany relation to +IngredientEntity. +Now we assume that we want to iterate all ingredients for a dish like this:

+
+
+
+
DishEntity dish = dao.findDishById(dishId);
+BigDecimal priceWithAllExtras = dish.getPrice();
+for (IngredientEntity ingredient : dish.getExtras()) {
+  priceWithAllExtras = priceWithAllExtras.add(ingredient.getPrice());
+}
+
+
+
+

Now dish.getExtras() is loaded lazy. Therefore the JPA vendor will provide a list with lazy initialized instances of IngredientEntity that only contain the ID of that entity. Now with every call of ingredient.getPrice() we technically trigger an SQL query statement to load the specific IngredientEntity by its ID from the database. +Now findDishById caused 1 initial query statement and for any number N of ingredients we are causing an additional query statement. This makes a total of N+1 statements. As causing statements to the database is an expensive operation with a lot of overhead (creating connection, etc.) this ends in bad performance and is therefore a problem (the N+1 Problem).

+
+
+
+
Solving N plus 1 Problem
+
+

To solve the N+1 Problem you need to change your code to only trigger a single statement instead. This can be archived in various ways. The most universal solution is to use FETCH JOIN in order to pre-load the nested N child entities into the first level cache of the JPA vendor implementation. This will behave very similar as if the @ManyToMany relation to IngredientEntity was having FetchType.EAGER but only for the specific query and not in general. Because changing @ManyToMany to FetchType.EAGER would cause bad performance for other usecases where only the dish but not its extra ingredients are needed. For this reason all relations, including @OneToOne should always be FetchType.LAZY. Back to our example we simply replace dao.findDishById(dishId) with dao.findDishWithExtrasById(dishId) that we implement by the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish
+  LEFT JOIN FETCH dish.extras
+  WHERE dish.id = :dishId
+
+
+
+

The rest of the code does not have to be changed but now dish.getExtras() will get the IngredientEntity from the first level cache where is was fetched by the initial query above.

+
+
+

Please note that if you only need the sum of the prices from the extras you can also create a query using an aggregator function:

+
+
+
+
SELECT sum(dish.extras.price) FROM DishEntity dish
+
+
+
+

As you can see you need to understand the concepts in order to get good performance.

+
+
+

There are many advanced topics such as creating database indexes or calculating statistics for the query optimizer to get the best performance. For such advanced topics we recommend to have a database expert in your team that cares about such things. However, understanding the N+1 Problem and its solutions is something that every Java developer in the team needs to understand.

+
+
+ +
+

==Auditing

+
+
+

For database auditing we use hibernate envers. If you want to use auditing ensure you have the following dependency in your pom.xml:

+
+
+
spring
+
+
<dependency>
+  <groupId>com.devonfw.java.modules</groupId>
+  <artifactId>devon4j-jpa-envers</artifactId>
+</dependency>
+
+
+
+
quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-envers</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +The following part applies only to spring applications. At this point, the Quarkus extension does not provide any additional configurations. For Quarkus applications, simply use the @Audited annotation to enable auditing for an entity class, as described a few lines below or seen here. +
+
+
+

Make sure that entity manager also scans the package from the devon4j-jpa[-envers] module in order to work properly. And make sure that correct Repository Factory Bean Class is chosen.

+
+
+
+
@EntityScan(basePackages = { "«my.base.package»" }, basePackageClasses = { AdvancedRevisionEntity.class })
+...
+@EnableJpaRepositories(repositoryFactoryBeanClass = GenericRevisionedRepositoryFactoryBean.class)
+...
+public class SpringBootApp {
+  ...
+}
+
+
+
+

Now let your [Entity]Repository extend from DefaultRevisionedRepository instead of DefaultRepository.

+
+
+

The repository now has a method getRevisionHistoryMetadata(id) and getRevisionHistoryMetadata(id, boolean lazy) available to get a list of revisions for a given entity and a method find(id, revision) to load a specific revision of an entity with the given ID or getLastRevisionHistoryMetadata(id) to load last revision. +To enable auditing for a entity simply place the @Audited annotation to your entity and all entity classes it extends from.

+
+
+
+
@Entity(name = "Drink")
+@Audited
+public class DrinkEntity extends ProductEntity implements Drink {
+...
+
+
+
+

When auditing is enabled for an entity an additional database table is used to store all changes to the entity table and a corresponding revision number. This table is called <ENTITY_NAME>_AUD per default. Another table called REVINFO is used to store all revisions. Make sure that these tables are available. They can be generated by hibernate with the following property (only for development environments).

+
+
+
+
  database.hibernate.hbm2ddl.auto=create
+
+
+
+

Another possibility is to put them in your database migration scripts like so.

+
+
+
+
CREATE CACHED TABLE PUBLIC.REVINFO(
+  id BIGINT NOT NULL generated by default as identity (start with 1),
+  timestamp BIGINT NOT NULL,
+  user VARCHAR(255)
+);
+...
+CREATE CACHED TABLE PUBLIC.<TABLE_NAME>_AUD(
+    <ALL_TABLE_ATTRIBUTES>,
+    revtype TINYINT,
+    rev BIGINT NOT NULL
+);
+
+
+
+ +
+

==Transaction Handling

+
+
+

For transaction handling we AOP to add transaction control via annotations as aspect. +This is done by annotating your code with the @Transactional annotation. +You can either annotate your container bean at class level to make all methods transactional or your can annotate individual methods to make them transactional:

+
+
+
+
  @Transactional
+  public Output getData(Input input) {
+    ...
+  }
+
+
+
+
+
+

JTA Imports

+
+

Here are the import statements for transaction support:

+
+
+
+
import javax.transaction.Transactional;
+
+
+
+ + + + + +
+ + +Use the above import statement to follow JEE and avoid using org.springframework.transaction.annotation.Transactional. +
+
+
+
+

JTA Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>jakarta.transaction</groupId>
+  <artifactId>jakarta.transaction-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>javax.transaction</groupId>
+  <artifactId>javax.transaction-api</artifactId>
+</dependency>
+
+
+
+
+

Handling constraint violations

+
+

Using @Transactional magically wraps transaction handling around your code. +As constraints are checked by the database at the end when the transaction gets committed, a constraint violation will be thrown by this aspect outside your code. +In case you have to handle constraint violations manually, you have to do that in code outside the logic that is annotated with @Transactional. +This may be done in a service operation by catching a ConstraintViolationException (org.hibernate.exception.ConstraintViolationException for hibernate). +As a generic approach you can solve this via REST execption handling.

+
+
+
+

Batches

+
+

Transaction control for batches is a lot more complicated and is described in the batch layer.

+
+
+ +
+

==SQL

+
+
+

For general guides on dealing or avoiding SQL, preventing SQL-injection, etc. you should study domain layer.

+
+
+
+

Naming Conventions

+
+

Here we define naming conventions that you should follow whenever you write SQL files:

+
+
+
    +
  • +

    All SQL-Keywords in UPPER CASE

    +
  • +
  • +

    Indentation should be 2 spaces as suggested by devonfw for every format.

    +
  • +
+
+
+
DDL
+
+

The naming conventions for database constructs (tables, columns, triggers, constraints, etc.) should be aligned with your database product and their operators. +However, when you have the freedom of choice and a modern case-sensitive database, you can simply use your code conventions also for database constructs to avoid explicitly mapping each and every property (e.g. RestaurantTable vs. RESTAURANT_TABLE).

+
+
+
    +
  • +

    Define columns and constraints inline in the statement to create the table

    +
  • +
  • +

    Indent column types so they all start in the same text column

    +
  • +
  • +

    Constraints should be named explicitly (to get a reasonable hint error messages) with:

    +
    +
      +
    • +

      PK_«table» for primary key (name optional here as PK constraint are fundamental)

      +
    • +
    • +

      FK_«table»_«property» for foreign keys («table» and «property» are both on the source where the foreign key is defined)

      +
    • +
    • +

      UC_«table»_«property»[_«propertyN»]* for unique constraints

      +
    • +
    • +

      CK_«table»_«check» for check constraints («check» describes the check, if it is defined on a single property it should start with the property).

      +
    • +
    +
    +
  • +
  • +

    Old RDBMS had hard limitations for names (e.g. 30 characters). Please note that recent databases have overcome this very low length limitations. However, keep your names short but precise and try to define common abbreviations in your project for according (business) terms. Especially do not just truncate the names at the limit.

    +
  • +
  • +

    If possible add comments on table and columns to help DBAs understanding your schema. This is also honored by many tools (not only DBA-tools).

    +
  • +
+
+
+

Here is a brief example of a DDL:

+
+
+
+
CREATE SEQUENCE HIBERNATE_SEQUENCE START WITH 1000000;
+
+-- *** Table ***
+CREATE TABLE RESTAURANT_TABLE (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  SEATS                INTEGER NOT NULL,
+  CONSTRAINT PK_TABLE PRIMARY KEY(ID)
+);
+COMMENT ON TABLE RESTAURANT_TABLE IS 'The physical tables inside the restaurant.';
+-- *** Order ***
+CREATE TABLE RESTAURANT_ORDER (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  TABLE_ID             NUMBER(19) NOT NULL,
+  TOTAL                DECIMAL(5, 2) NOT NULL,
+  CREATION_DATE        TIMESTAMP NOT NULL,
+  PAYMENT_DATE         TIMESTAMP,
+  STATUS               VARCHAR2(10 CHAR) NOT NULL,
+  CONSTRAINT PK_ORDER PRIMARY KEY(ID),
+  CONSTRAINT FK_ORDER_TABLE_ID FOREIGN KEY(TABLE_ID) REFERENCES RESTAURANT_TABLE(ID)
+);
+COMMENT ON TABLE RESTAURANT_ORDER IS 'An order and bill at the restaurant.';
+...
+
+
+
+

ATTENTION: Please note that TABLE and ORDER are reserved keywords in SQL and you should avoid using such keywords to prevent problems.

+
+
+
+
Data
+
+

For insert, update, delete, etc. of data SQL scripts should additionally follow these guidelines:

+
+
+
    +
  • +

    Inserts always with the same order of columns in blocks for each table.

    +
  • +
  • +

    Insert column values always starting with ID, MODIFICATION_COUNTER, [DTYPE, ] …​

    +
  • +
  • +

    List columns with fixed length values (boolean, number, enums, etc.) before columns with free text to support alignment of multiple insert statements

    +
  • +
  • +

    Pro Tip: Get familiar with column mode of advanced editors such as notepad++ when editing large blocks of similar insert statements.

    +
  • +
+
+
+
+
INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (0, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (1, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (2, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (3, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (4, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (5, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (6, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (7, 1, 8);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (8, 1, 8);
+...
+
+
+
+

See also Database Migrations.

+
+
+ +
+

==Database Migration

+
+
+

When you have a schema-based database, +you need a solution for schema versioning and migration for your database. +A specific release of your app requires a corresponding version of the schema in the database to run. +As you want simple and continuous deployment you should automate the schema versiong and database migration.

+
+
+

The general idea is that your software product contains "scripts" to migrate the database from schema version X to verion X+1. +When you begin your project you start with version 1 and with every increment of your app that needs a change to the database schema (e.g. a new table, a new column to an existing table, a new index, etc.) you add another "script" that migrates from the current to the next version. +For simplicity these versions are just sequential numbers or timestamps. +Now, the solution you choose will automatically manage the schema version in a separate metadata table in your database that stores the current schema version. +When your app is started, it will check the current version inside the database from that metadata table. +As long as there are "scripts" that migrate from there to a higher version, they will be automatically applied to the database and this process is protocolled to the metadata table in your database what also updates the current schema version there. +Using this approach, you can start with an empty database what will result in all "scripts" being applied sequentially. +Also any version of your database schema can be present and you will always end up in a controlled migration to the latest schema version.

+
+
+
+
+

Options for database migration

+
+

For database migration you can choose between the following options:

+
+
+
    +
  • +

    flyway (KISS based approach with migrations as SQL)

    +
  • +
  • +

    liquibase (more complex approach with database abstraction)

    +
  • +
+
+
+ +
+

==Logging

+
+
+

We recommend to use SLF4J as API for logging, that has become a de facto standard in Java as it has a much better design than java.util.logging offered by the JDK. +There are serveral implementations for SLF4J. For Spring applications our recommended implementation is Logback. Quarkus uses JBoss Logging which provides a JBoss Log Manager implementation for SLF4J. For more information on logging in Quarkus, see the Quarkus logging guide.

+
+
+
+

Logging Dependencies

+
+

To use Logback in your Spring application, you need to include the following dependencies:

+
+
+
+
<!-- SLF4J as logging API -->
+<dependency>
+  <groupId>org.slf4j</groupId>
+  <artifactId>slf4j-api</artifactId>
+</dependency>
+<!-- Logback as logging implementation  -->
+<dependency>
+  <groupId>ch.qos.logback</groupId>
+  <artifactId>logback-classic</artifactId>
+</dependency>
+<!-- JSON logging for cloud-native log monitoring -->
+<dependency>
+  <groupId>net.logstash.logback</groupId>
+  <artifactId>logstash-logback-encoder</artifactId>
+</dependency>
+
+
+
+

In devon4j these dependencies are provided by the devon4j-logging module.

+
+
+

In Quarkus, SLF4J and the slf4j-jboss-logmanager are directly included in the Quarkus core runtime and can be used out of the box.

+
+
+
+

Logger Access

+
+

The general pattern for accessing loggers from your code is a static logger instance per class using the following pattern:

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+  private static final Logger LOG = LoggerFactory.getLogger(MyClass.class);
+  ...
+}
+
+
+
+

For detailed documentation how to use the logger API check the SLF4j manual.

+
+
+ + + + + +
+ + +In case you are using devonfw-ide and Eclipse you can just type LOG and hit [ctrl][space] to insert the code pattern including the imports into your class. +
+
+
+
Lombok
+
+

In case you are using Lombok, you can simply use the @Slf4j annotation in your class. This causes Lombok to generate the logger instance for you.

+
+
+
+
+

Log-Levels

+
+

We use a common understanding of the log-levels as illustrated by the following table. +This helps for better maintenance and operation of the systems.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4. Log-levels
Log-levelDescriptionImpactActive Environments

FATAL

Only used for fatal errors that prevent the application to work at all (e.g. startup fails or shutdown/restart required)

Operator has to react immediately

all

ERROR

An abnormal error indicating that the processing failed due to technical problems.

Operator should check for known issue and otherwise inform development

all

WARNING

A situation where something worked not as expected. E.g. a business exception or user validation failure occurred.

No direct reaction required. Used for problem analysis.

all

INFO

Important information such as context, duration, success/failure of request or process

No direct reaction required. Used for analysis.

all

DEBUG

Development information that provides additional context for debugging problems.

No direct reaction required. Used for analysis.

development and testing

TRACE

Like DEBUG but exhaustive information and for code that is run very frequently. Will typically cause large log-files.

No direct reaction required. Used for problem analysis.

none (turned off by default)

+
+

Exceptions (with their stack trace) should only be logged on FATAL or ERROR level. For business exceptions typically a WARNING including the message of the exception is sufficient.

+
+
+
Configuration of Logback
+
+

The configuration of logback happens via the logback.xml file that you should place into src/main/resources of your app. +For details consult the logback configuration manual.

+
+
+ + + + + +
+ + +Logback also allows to overrule the configuration with a logback-test.xml file that you may put into src/test/resources or into a test-dependency. +
+
+
+
+
Configuration in Quarkus
+
+

The are several options you can set in the application.properties file to configure the behaviour of the logger in Quarkus. For a detailed overview, see the corresponding part of the Quarkus guide.

+
+
+
+
+

JSON-logging

+
+

For easy integration with log-monitoring, we recommend that your app logs to standard out in JSON following JSON Lines.

+
+
+

In Spring applications, this can be achieved via logstash-logback-encoder (see dependencies). In Quarkus, it can be easily achieved using the quarkus-logging-json extension (see here for more details).

+
+
+

This will produce log-lines with the following format (example formatted for readability):

+
+
+
+
{
+  "timestamp":"2000-12-31T23:59:59.999+00:00",
+  "@version":"1",
+  "message":"Processing 4 order(s) for shipment",
+  "logger_name":"com.myapp.order.logic.UcManageOrder",
+  "thread_name":"http-nio-8081-exec-6",
+  "level":"INFO",
+  "level_value":20000,
+  "appname":"myapp",
+}
+
+
+
+
Adding custom values to JSON log with Logstash
+
+

The JSON encoder even supports logging custom properties for your log-monitoring. +The trick is to use the class net.logstash.logback.argument.StructuredArguments for adding the arguments to you log message, e.g.

+
+
+
+
import static net.logstash.logback.argument.StructuredArguments.v;
+
+...
+    LOG.info("Request with {} and {} took {} ms.", v("url", url), v("status", statusCode), v("duration", millis));
+...
+
+
+
+

This will produce the a JSON log-line with the following properties:

+
+
+
+
...
+  "message":"Request with url=https://api/service/v1/ordermanagement/order and status=200 took duration=251 ms",
+  "url":"https://api/service/v1/ordermanagement/order",
+  "status":"200",
+  "duration":"251",
+...
+
+
+
+

As you can quickly see besides the human readable message you also have the structured properties url, status and duration that can be extremly valuable to configure dashboards in your log-monitoring that visualize success/failure ratio as well as performance of your requests.

+
+
+
+
+

Classic log-files

+
+ + + + + +
+ + +In devon4j, we strongly recommend using JSON logging instead of classic log files. The following section refers only to devon4j Spring applications that use Logback. +
+
+
+

Even though we do not recommend anymore to write classical log-files to the local disc, here you can still find our approach for it.

+
+
+
Maven-Integration
+
+

In the pom.xml of your application add this dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java</groupId>
+  <artifactId>devon4j-logging</artifactId>
+</dependency>
+
+
+
+

The above dependency already adds transitive dependencies to SLF4J and logback. +Also it comes with configration snipplets that can be included from your logback.xml file (see configuration).

+
+
+

The logback.xml to write regular log-files can look as following:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+  <property resource="com/devonfw/logging/logback/application-logging.properties" />
+  <property name="appname" value="MyApp"/>
+  <property name="logPath" value="../logs"/>
+  <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />
+  <include resource="com/devonfw/logging/logback/appender-console.xml" />
+
+  <root level="DEBUG">
+    <appender-ref ref="ERROR_APPENDER"/>
+    <appender-ref ref="INFO_APPENDER"/>
+    <appender-ref ref="DEBUG_APPENDER"/>
+    <appender-ref ref="CONSOLE_APPENDER"/>
+  </root>
+
+  <logger name="org.springframework" level="INFO"/>
+</configuration>
+
+
+
+

The provided logback.xml is configured to use variables defined on the config/application.properties file. +On our example, the log files path point to ../logs/ in order to log to tomcat log directory when starting tomcat on the bin folder. +Change it according to your custom needs.

+
+
+
config/application.properties
+
+
log.dir=../logs/
+
+
+
+
+
Log Files
+
+

The classical approach uses the following log files:

+
+
+
    +
  • +

    Error Log: Includes log entries to detect errors.

    +
  • +
  • +

    Info Log: Used to analyze system status and to detect bottlenecks.

    +
  • +
  • +

    Debug Log: Detailed information for error detection.

    +
  • +
+
+
+

The log file name pattern is as follows:

+
+
+
+
«LOGTYPE»_log_«HOST»_«APPLICATION»_«TIMESTAMP».log
+
+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5. Segments of Logfilename
ElementValueDescription

«LOGTYPE»

info, error, debug

Type of log file

«HOST»

e.g. mywebserver01

Name of server, where logs are generated

«APPLICATION»

e.g. myapp

Name of application, which causes logs

«TIMESTAMP»

YYYY-MM-DD_HH00

date of log file

+
+

Example: +error_log_mywebserver01_myapp_2013-09-16_0900.log

+
+
+

Error log from mywebserver01 at application myapp at 16th September 2013 9pm.

+
+
+
+
Output format
+
+

We use the following output format for all log entries to ensure that searching and filtering of log entries work consistent for all logfiles:

+
+
+
+
[D: «timestamp»] [P: «priority»] [C: «NDC»][T: «thread»][L: «logger»]-[M: «message»]
+
+
+
+
    +
  • +

    D: Date (Timestamp in ISO8601 format e.g. 2013-09-05 16:40:36,464)

    +
  • +
  • +

    P: Priority (the log level)

    +
  • +
  • +

    C: Correlation ID (ID to identify users across multiple systems, needed when application is distributed)

    +
  • +
  • +

    T: Thread (Name of thread)

    +
  • +
  • +

    L: Logger name (use class name)

    +
  • +
  • +

    M: Message (log message)

    +
  • +
+
+
+

Example:

+
+
+
+
[D: 2013-09-05 16:40:36,464] [P: DEBUG] [C: 12345] [T: main] [L: my.package.MyClass]-[M: My message...]
+
+
+
+ + + + + +
+ + +When using devon4j-logging, this format is used by default. To achieve this format in Quarkus, set quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n in your properties. +
+
+
+
+
Correlation ID
+
+

In order to correlate separate HTTP requests to services belonging to the same user / session, we provide a servlet filter called DiagnosticContextFilter. +This filter takes a provided correlation ID from the HTTP header X-Correlation-Id. +If none was found, it will generate a new correlation id as UUID. +This correlation ID is added as MDC to the logger. +Therefore, it will then be included to any log message of the current request (thread). +Further concepts such as service invocations will pass this correlation ID to subsequent calls in the application landscape. Hence you can find all log messages related to an initial request simply via the correlation ID even in highly distributed systems.

+
+
+
+
Security
+
+

In order to prevent log forging attacks you can simply use the suggested JSON logging format. +Otherwise you can use com.devonfw.module.logging.common.impl.SingleLinePatternLayout as demonstrated here in order to prevent such attacks.

+
+
+ +
+

==Security +Security is todays most important cross-cutting concern of an application and an enterprise IT-landscape. We seriously care about security and give you detailed guides to prevent pitfalls, vulnerabilities, and other disasters. While many mistakes can be avoided by following our guidelines you still have to consider security and think about it in your design and implementation. The security guide will not only automatically prevent you from any harm, but will provide you hints and best practices already used in different software products.

+
+
+

An important aspect of security is proper authentication and authorization as described in access-control. In the following we discuss about potential vulnerabilities and protection to prevent them.

+
+
+
+
+

Vulnerabilities and Protection

+
+

Independent from classical authentication and authorization mechanisms there are many common pitfalls that can lead to vulnerabilities and security issues in your application such as XSS, CSRF, SQL-injection, log-forging, etc. A good source of information about this is the OWASP. +We address these common threats individually in security sections of our technological guides as a concrete solution to prevent an attack typically depends on the according technology. The following table illustrates common threats and contains links to the solutions and protection-mechanisms provided by the devonfw:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 6. Security threats and protection-mechanisms
ThreatProtectionLink to details

A1 Injection

validate input, escape output, use proper frameworks

SQL Injection

A2 Broken Authentication

encrypt all channels, use a central identity management with strong password-policy

Authentication

A3 Sensitive Data Exposure

Use secured exception facade, design your data model accordingly

REST exception handling

A4 XML External Entities

Prefer JSON over XML, ensure FSP when parsing (external) XML

XML guide

A5 Broken Access Control

Ensure proper authorization for all use-cases, use @DenyAll as default to enforce

Access-control guide especially method authorization

A6 Security Misconfiguration

Use devon4j application template and guides to avoid

tutorial-newapp and sensitive configuration

A7 Cross-Site Scripting

prevent injection (see A1) for HTML, JavaScript and CSS and understand same-origin-policy

client-layer

A8 Insecure Deserialization

Use simple and established serialization formats such as JSON, prevent generic deserialization (for polymorphic types)

JSON guide especially inheritence, XML guide

A9 Using Components with Known Vulnerabilities

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

CVE newsletter and dependency check

A10 Insufficient_Logging & Monitoring

Ensure to log all security related events (login, logout, errors), establish effective monitoring

Logging guide and monitoring guide

Insecure Direct Object References

Using direct object references (IDs) only with appropriate authorization

logic-layer

Cross-Site Request Forgery (CSRF)

secure mutable service operations with an explicit CSRF security token sent in HTTP header and verified on the server

CSRF guide

Log-Forging

Escape newlines in log messages

logging security

Unvalidated Redirects and Forwards

Avoid using redirects and forwards, in case you need them do a security audit on the solution.

devonfw proposes to use rich-clients (SPA/RIA). We only use redirects for login in a safe way.

+
+
+

Advanced Security

+
+

While OWASP Top 10 covers the basic aspects of application security, there are advanced standards such as AVS. +In devonfw we address this in the +Application Security Quick Solution Guide.

+
+
+
+

Tools

+
+
Dependency Check
+
+

To address the thread Using Components with Known Vulnerabilities we recomment to use OWASP dependency check that ships with a maven plugin and can analyze your dependencies for known CVEs. +In order to run this check, you can simply call this command on any maven project:

+
+
+
+
mvn org.owasp:dependency-check-maven:6.1.5:aggregate
+
+
+
+ + + + + +
+ + +The version is just for completeness. You should check yourself for using a recent version of the plugin. +
+
+
+

If you build an devon4j spring application from our app-template you can activate the dependency check even easier with the security profile:

+
+
+
+
mvn clean install -P security
+
+
+
+

This does not run by default as it causes some overhead for the build performance. However, consider to build this in your CI at least nightly. +After the dependency check is performed, you will find the results in target/dependency-check-report.html of each module. The report will also be generated when the site is build (mvn site) even without the profile.

+
+
+
+
Penetration Testing
+
+

For penetration testing (testing for vulnerabilities) of your web application, we recommend the following tools:

+
+
+ +
+
+ +
+

==Access-Control +Access-Control is a central and important aspect of Security. It consists of two major aspects:

+
+
+ +
+
+
+
+

Authentication

+
+

Definition:

+
+
+
+
+

Authentication is the verification that somebody interacting with the system is the actual subject for whom he claims to be.

+
+
+
+
+

The one authenticated is properly called subject or principal. There are two forms of principals you need to distinguish while designing your authentication: human users and autonomous systems. While e.g. a Kerberos/SPNEGO Single-Sign-On makes sense for human users, it is pointless for authenticating autonomous systems. For simplicity, we use the common term user to refer to any principal even though it may not be a human (e.g. in case of a service call from an external system).

+
+
+

To prove the authenticity, the user provides some secret called credentials. The most simple form of credentials is a password.

+
+
+
Implementations
+
+ + + + + +
+ + +Please never implement your own authentication mechanism or credential store. You have to be aware of implicit demands such as salting and hashing credentials, password life-cycle with recovery, expiry, and renewal including email notification confirmation tokens, central password policies, etc. This is the domain of access managers and identity management systems. In a business context you will typically already find a system for this purpose that you have to integrate (e.g. via LDAP). Otherwise you should consider establishing such a system e.g. using keycloak. +
+
+
+

We recommend using JWT when possible. For KISS, also try to avoid combining multiple authentication mechanisms (form based, basic-auth, SAMLv2, OAuth, etc.) within the same application (for different URLs).

+
+
+

For spring, check the Spring Security

+
+
+

For quarkus, check the Quarkus Authentication

+
+
+
+
+

Authorization

+
+

Definition:

+
+
+
+
+

Authorization is the verification that an authenticated user is allowed to perform the operation he intends to invoke.

+
+
+
+
+
Clarification of terms
+
+

For clarification we also want to give a common understanding of related terms that have no unique definition and consistent usage in the wild.

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 7. Security terms related to authorization
TermMeaning and comment

Permission

A permission is an object that allows a principal to perform an operation in the system. This permission can be granted (give) or revoked (taken away). Sometimes people also use the term right what is actually wrong as a right (such as the right to be free) can not be revoked.

Group

We use the term group in this context for an object that contains permissions. A group may also contain other groups. Then the group represents the set of all recursively contained permissions.

Role

We consider a role as a specific form of group that also contains permissions. A role identifies a specific function of a principal. A user can act in a role.

+

For simple scenarios a principal has a single role associated. In more complex situations a principal can have multiple roles but has only one active role at a time that he can choose out of his assigned roles. For KISS it is sometimes sufficient to avoid this by creating multiple accounts for the few users with multiple roles. Otherwise at least avoid switching roles at run-time in clients as this may cause problems with related states. Simply restart the client with the new role as parameter in case the user wants to switch his role.

Access Control

Any permission, group, role, etc., which declares a control for access management.

+
+
+
Suggestions on the access model
+
+

For the access model we give the following suggestions:

+
+
+
    +
  • +

    Each Access Control (permission, group, role, …​) is uniquely identified by a human readable string.

    +
  • +
  • +

    We create a unique permission for each use-case.

    +
  • +
  • +

    We define groups that combine permissions to typical and useful sets for the users.

    +
  • +
  • +

    We define roles as specific groups as required by our business demands.

    +
  • +
  • +

    We allow to associate users with a list of Access Controls.

    +
  • +
  • +

    For authorization of an implemented use case we determine the required permission. Furthermore, we determine the current user and verify that the required permission is contained in the tree spanned by all his associated Access Controls. If the user does not have the permission we throw a security exception and thus abort the operation and transaction.

    +
  • +
  • +

    We avoid negative permissions, that is a user has no permission by default and only those granted to him explicitly give him additional permission for specific things. Permissions granted can not be reduced by other permissions.

    +
  • +
  • +

    Technically we consider permissions as a secret of the application. Administrators shall not fiddle with individual permissions but grant them via groups. So the access management provides a list of strings identifying the Access Controls of a user. The individual application itself contains these Access Controls in a structured way, whereas each group forms a permission tree.

    +
  • +
+
+
+
+
Naming conventions
+
+

As stated above each Access Control is uniquely identified by a human readable string. This string should follow the naming convention:

+
+
+
+
«app-id».«local-name»
+
+
+
+

For Access Control Permissions the «local-name» again follows the convention:

+
+
+
+
«verb»«object»
+
+
+
+

The segments are defined by the following table:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 8. Segments of Access Control Permission ID
SegmentDescriptionExample

«app-id»

Is a unique technical but human readable string of the application (or microservice). It shall not contain special characters and especially no dot or whitespace. We recommend to use lower-train-case-ascii-syntax. The identity and access management should be organized on enterprise level rather than application level. Therefore permissions of different apps might easily clash (e.g. two apps might both define a group ReadMasterData but some user shall get this group for only one of these two apps). Using the «app-id». prefix is a simple but powerful namespacing concept that allows you to scale and grow. You may also reserve specific «app-id»s for cross-cutting concerns that do not actually reflect a single app e.g to grant access to a geographic region.

shop

«verb»

The action that is to be performed on «object». We use Find for searching and reading data. Save shall be used both for create and update. Only if you really have demands to separate these two you may use Create in addition to Save. Finally, Delete is used for deletions. For non CRUD actions you are free to use additional verbs such as Approve or Reject.

Find

«object»

The affected object or entity. Shall be named according to your data-model

Product

+
+

So as an example shop.FindProduct will reflect the permission to search and retrieve a Product in the shop application. The group shop.ReadMasterData may combine all permissions to read master-data from the shop. However, also a group shop.Admin may exist for the Admin role of the shop application. Here the «local-name» is Admin that does not follow the «verb»«object» schema.

+
+
+
+
devon4j-security
+
+

The module devon4j-security provides ready-to-use code based on spring-security that makes your life a lot easier.

+
+
+
+access-control +
+
Figure 2. devon4j Security Model
+
+
+

The diagram shows the model of devon4j-security that separates two different aspects:

+
+
+
    +
  • +

    The Identity- and Access-Management is provided by according products and typically already available in the enterprise landscape (e.g. an active directory). It provides a hierarchy of primary access control objects (roles and groups) of a user. An administrator can grant and revoke permissions (indirectly) via this way.

    +
  • +
  • +

    The application security defines a hierarchy of secondary access control objects (groups and permissions). This is done by configuration owned by the application (see following section). The "API" is defined by the IDs of the primary access control objects that will be referenced from the Identity- and Access-Management.

    +
  • +
+
+
+
+
Access Control Config
+
+

In your application simply extend AccessControlConfig to configure your access control objects as code and reference it from your use-cases. An example config may look like this:

+
+
+
+
@Named
+public class ApplicationAccessControlConfig extends AccessControlConfig {
+
+  public static final String APP_ID = "MyApp";
+
+  private static final String PREFIX = APP_ID + ".";
+
+  public static final String PERMISSION_FIND_OFFER = PREFIX + "FindOffer";
+
+  public static final String PERMISSION_SAVE_OFFER = PREFIX + "SaveOffer";
+
+  public static final String PERMISSION_DELETE_OFFER = PREFIX + "DeleteOffer";
+
+  public static final String PERMISSION_FIND_PRODUCT = PREFIX + "FindProduct";
+
+  public static final String PERMISSION_SAVE_PRODUCT = PREFIX + "SaveProduct";
+
+  public static final String PERMISSION_DELETE_PRODUCT = PREFIX + "DeleteProduct";
+
+  public static final String GROUP_READ_MASTER_DATA = PREFIX + "ReadMasterData";
+
+  public static final String GROUP_MANAGER = PREFIX + "Manager";
+
+  public static final String GROUP_ADMIN = PREFIX + "Admin";
+
+  public ApplicationAccessControlConfig() {
+
+    super();
+    AccessControlGroup readMasterData = group(GROUP_READ_MASTER_DATA, PERMISSION_FIND_OFFER, PERMISSION_FIND_PRODUCT);
+    AccessControlGroup manager = group(GROUP_MANAGER, readMasterData, PERMISSION_SAVE_OFFER, PERMISSION_SAVE_PRODUCT);
+    AccessControlGroup admin = group(GROUP_ADMIN, manager, PERMISSION_DELETE_OFFER, PERMISSION_DELETE_PRODUCT);
+  }
+}
+
+
+
+
+
Configuration on Java Method level
+
+

In your use-case you can now reference a permission like this:

+
+
+
+
@Named
+public class UcSafeOfferImpl extends ApplicationUc implements UcSafeOffer {
+
+  @Override
+  @RolesAllowed(ApplicationAccessControlConfig.PERMISSION_SAVE_OFFER)
+  public OfferEto save(OfferEto offer) { ... }
+  ...
+}
+
+
+
+
+
JEE Standard
+
+

Role-based Access Control (RBAC) is commonly used for authorization. +JSR 250 defines a number of common annotations to secure your application.

+
+
+
    +
  • +

    javax.annotation.security.PermitAll specifies that no access control is required to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DenyAll specifies that no access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.RolesAllowed specifies that only a list of access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DeclareRoles defines roles for security checking.

    +
  • +
  • +

    javax.annotation.security.RunAs specifies the RunAs role for the given components.

    +
  • +
+
+
+

@PermitAll, @Denyall, and @RolesAllowed annotations can be applied to both class and method. +A method-level annotation will override the behaviour of class-level annotation. Using multiple annotations of those 3 is not valid.

+
+
+
+
// invalid
+@PermitAll
+@DenyAll
+public String foo()
+
+// invalid and compilation fails
+@RolesAllowed("admin")
+@RolesAllowed("user")
+public String bar()
+
+// OK
+@RolesAllowed("admin", "user")
+public String bar()
+
+
+
+

Please note that when specifying multiple arguments to @RolesAllowed those are combined with OR (and not with AND). +So if the user has any of the specified access controls, he will be able to access the method.

+
+
+

As a best practice avoid specifying string literals to @RolesAllowed. +Instead define a class with all access controls as constants and reference them from there. +This class is typically called ApplicationAccessControlConfig in devonfw.

+
+
+

In many complicated cases where @PermitAll @DenyAll @RolesAllowed are insufficient e.g. a method should be accessed by a user in role A and not in role B at the same time, you have to verify the user role directly in the method. You can use SecurityContext class to get further needed information.

+
+
+
Spring
+
+

Spring Security also supports authorization on method level. To use it, you need to add the spring-security-config dependency. If you use Spring Boot, the dependency spring-boot-starter-security already includes spring-security-config. Then you can configure as follows:

+
+
+
    +
  • +

    prePostEnabled property enables Spring Security pre/post annotations. @PreAuthorize and @PostAuthorize annotations provide expression-based access control. See more here

    +
  • +
  • +

    securedEnabled property determines if the @Secured annotation should be enabled. @Secured can be used similarly as @RollesAllowed.

    +
  • +
  • +

    jsr250Enabled property allows us to use the JSR-250 annotations such as @RolesAllowed.

    +
  • +
+
+
+
+
@Configuration
+@EnableGlobalMethodSecurity(
+  prePostEnabled = true,
+  securedEnabled = true,
+  jsr250Enabled = true)
+public class MethodSecurityConfig
+  extends GlobalMethodSecurityConfiguration {
+}
+
+
+
+

A further read about the whole concept of Spring Security Authorization can be found here.

+
+
+
+
Quarkus
+
+

Quarkus comes with built-in security to allow for RBAC based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints and CDI beans. Quarkus also provides the io.quarkus.security.Authenticated annotation that will permit any authenticated user to access the resource (equivalent to @RolesAllowed("**")).

+
+
+
+
+
Data-based Permissions
+ +
+
+
Access Control Schema (deprecated)
+
+

The access-control-schema.xml approach is deprecated. The documentation can still be found in access control schema.

+
+
+ +
+

==Data-permissions

+
+
+

In some projects there are demands for permissions and authorization that is dependent on the processed data. E.g. a user may only be allowed to read or write data for a specific region. This is adding some additional complexity to your authorization. If you can avoid this it is always best to keep things simple. However, in various cases this is a requirement. Therefore the following sections give you guidance and patterns how to solve this properly.

+
+
+
+
+

Structuring your data

+
+

For all your business objects (entities) that have to be secured regarding to data permissions we recommend that you create a separate interface that provides access to the relevant data required to decide about the permission. Here is a simple example:

+
+
+
+
public interface SecurityDataPermissionCountry {
+
+  /**
+   * @return the 2-letter ISO code of the country this object is associated with. Users need
+   *         a data-permission for this country in order to read and write this object.
+   */
+  String getCountry();
+}
+
+
+
+

Now related business objects (entities) can implement this interface. Often such data-permissions have to be applied to an entire object-hierarchy. For security reasons we recommend that also all child-objects implement this interface. For performance reasons we recommend that the child-objects redundantly store the data-permission properties (such as country in the example above) and this gets simply propagated from the parent, when a child object is created.

+
+
+
+

Permissions for processing data

+
+

When saving or processing objects with a data-permission, we recommend to provide dedicated methods to verify the permission in an abstract base-class such as AbstractUc and simply call this explicitly from your business code. This makes it easy to understand and debug the code. Here is a simple example:

+
+
+
+
protected void verifyPermission(SecurityDataPermissionCountry entity) throws AccessDeniedException;
+
+
+
+
Beware of AOP
+
+

For simple but cross-cutting data-permissions you may also use AOP. This leads to programming aspects that reflectively scan method arguments and magically decide what to do. Be aware that this quickly gets tricky:

+
+
+
    +
  • +

    What if multiple of your method arguments have data-permissions (e.g. implement SecurityDataPermission*)?

    +
  • +
  • +

    What if the object to authorize is only provided as reference (e.g. Long or IdRef) and only loaded and processed inside the implementation where the AOP aspect does not apply?

    +
  • +
  • +

    How to express advanced data-permissions in annotations?

    +
  • +
+
+
+

What we have learned is that annotations like @PreAuthorize from spring-security easily lead to the "programming in string literals" anti-pattern. We strongly discourage to use this anti-pattern. In such case writing your own verifyPermission methods that you manually call in the right places of your business-logic is much better to understand, debug and maintain.

+
+
+
+
+

Permissions for reading data

+
+

When it comes to restrictions on the data to read it becomes even more tricky. In the context of a user only entities shall be loaded from the database he is permitted to read. This is simple for loading a single entity (e.g. by its ID) as you can load it and then if not permitted throw an exception to secure your code. But what if the user is performing a search query to find many entities? For performance reasons we should only find data the user is permitted to read and filter all the rest already via the database query. But what if this is not a requirement for a single query but needs to be applied cross-cutting to tons of queries? Therefore we have the following pattern that solves your problem:

+
+
+

For each data-permission attribute (or set of such) we create an abstract base entity:

+
+
+
+
@MappedSuperclass
+@EntityListeners(PermissionCheckListener.class)
+@FilterDef(name = "country", parameters = {@ParamDef(name = "countries", type = "string")})
+@Filter(name = "country", condition = "country in (:countries)")
+public abstract class SecurityDataPermissionCountryEntity extends ApplicationPersistenceEntity
+    implements SecurityDataPermissionCountry {
+
+  private String country;
+
+  @Override
+  public String getCountry() {
+    return this.country;
+  }
+
+  public void setCountry(String country) {
+    this.country = country;
+  }
+}
+
+
+
+

There are some special hibernate annotations @EntityListeners, @FilterDef, and @Filter used here allowing to apply a filter on the country for any (non-native) query performed by hibernate. The entity listener may look like this:

+
+
+
+
public class PermissionCheckListener {
+
+  @PostLoad
+  public void read(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireReadPermission(entity);
+  }
+
+  @PrePersist
+  @PreUpdate
+  public void write(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireWritePermission(entity);
+  }
+}
+
+
+
+

This will ensure that hibernate implicitly will call these checks for every such entity when it is read from or written to the database. Further to avoid reading entities from the database the user is not permitted to (and ending up with exceptions), we create an AOP aspect that automatically activates the above declared hibernate filter:

+
+
+
+
@Named
+public class PermissionCheckerAdvice implements MethodBeforeAdvice {
+
+  @Inject
+  private PermissionChecker permissionChecker;
+
+  @PersistenceContext
+  private EntityManager entityManager;
+
+  @Override
+  public void before(Method method, Object[] args, Object target) {
+
+    Collection<String> permittedCountries = this.permissionChecker.getPermittedCountriesForReading();
+    if (permittedCountries != null) { // null is returned for admins that may access all countries
+      if (permittedCountries.isEmpty()) {
+        throw new AccessDeniedException("Not permitted for any country!");
+      }
+      Session session = this.entityManager.unwrap(Session.class);
+      session.enableFilter("country").setParameterList("countries", permittedCountries.toArray());
+    }
+  }
+}
+
+
+
+

Finally to apply this aspect to all Repositories (can easily be changed to DAOs) implement the following advisor:

+
+
+
+
@Named
+public class PermissionCheckerAdvisor implements PointcutAdvisor, Pointcut, ClassFilter, MethodMatcher {
+
+  @Inject
+  private PermissionCheckerAdvice advice;
+
+  @Override
+  public Advice getAdvice() {
+    return this.advice;
+  }
+
+  @Override
+  public boolean isPerInstance() {
+    return false;
+  }
+
+  @Override
+  public Pointcut getPointcut() {
+    return this;
+  }
+
+  @Override
+  public ClassFilter getClassFilter() {
+    return this;
+  }
+
+  @Override
+  public MethodMatcher getMethodMatcher() {
+    return this;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass) {
+    return true; // apply to all methods
+  }
+
+  @Override
+  public boolean isRuntime() {
+    return false;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass, Object... args) {
+    throw new IllegalStateException("isRuntime()==false");
+  }
+
+  @Override
+  public boolean matches(Class<?> clazz) {
+    // when using DAOs simply change to some class like ApplicationDao
+    return DefaultRepository.class.isAssignableFrom(clazz);
+  }
+}
+
+
+
+
+

Managing and granting the data-permissions

+
+

Following our authorization guide we can simply create a permission for each country. We might simply reserve a prefix (as virtual «app-id») for each data-permission to allow granting data-permissions to end-users across all applications of the IT landscape. In our example we could create access controls country.DE, country.US, country.ES, etc. and assign those to the users. The method permissionChecker.getPermittedCountriesForReading() would then scan for these access controls and only return the 2-letter country code from it.

+
+
+ + + + + +
+ + +Before you make your decisions how to design your access controls please clarify the following questions: +
+
+
+
    +
  • +

    Do you need to separate data-permissions independent of the functional permissions? E.g. may it be required to express that a user can read data from the countries ES and PL but is only permitted to modify data from PL? In such case a single assignment of "country-permissions" to users is insufficient.

    +
  • +
  • +

    Do you want to grant data-permissions individually for each application (higher flexibility and complexity) or for the entire application landscape (simplicity, better maintenance for administrators)? In case of the first approach you would rather have access controls like app1.country.GB and app2.country.GB.

    +
  • +
  • +

    Do your data-permissions depend on objects that can be created dynamically inside your application?

    +
  • +
  • +

    If you want to grant data-permissions on other business objects (entities), how do you want to reference them (primary keys, business keys, etc.)? What reference is most stable? Which is most readable?

    +
  • +
+
+
+ +
+

==Validation

+
+
+

Validation is about checking syntax and semantics of input data. Invalid data is rejected by the application. +Therefore validation is required in multiple places of an application. E.g. the GUI will do validation for usability reasons to assist the user, early feedback and to prevent unnecessary server requests. +On the server-side validation has to be done for consistency and security.

+
+
+

In general we distinguish these forms of validation:

+
+
+
    +
  • +

    stateless validation will produce the same result for given input at any time (for the same code/release).

    +
  • +
  • +

    stateful validation is dependent on other states and can consider the same input data as valid in once case and as invalid in another.

    +
  • +
+
+
+
+

Stateless Validation

+
+

For regular, stateless validation we use the JSR303 standard that is also called bean validation (BV). +Details can be found in the specification. +As implementation we recommend hibernate-validator.

+
+
+
Example
+
+

A description of how to enable BV for spring applications can be found in the relevant Spring documentation. A guide you can use to integrate validation in Quarkus applications can be found here. For a quick summary follow these steps:

+
+
+
    +
  • +

    Make sure that hibernate-validator is located in the classpath by adding a dependency to the pom.xml.

    +
  • +
+
+
+
spring
+
+
    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
quarkus
+
+
    <dependency>
+      <groupId>io.quarkus</groupId>
+      <artifactId>quarkus-hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
    +
  • +

    For methods to validate go to their declaration and add constraint annotations to the method parameters.

    +
    +

    In spring applications you can add the @Validated annotation to the implementation (spring bean) to be validated (this is an annotation of the spring framework, so it`s not available in the Quarkus context). The standard use case is to annotate the logic layer implementation, i.e. the use case implementation or component facade in case of simple logic layer pattern. Thus, the validation will be executed for service requests as well as batch processing.

    +
    +
    +
      +
    • +

      @Valid annotation to the arguments to validate (if that class itself is annotated with constraints to check).

      +
    • +
    • +

      @NotNull for required arguments.

      +
    • +
    • +

      Other constraints (e.g. @Size) for generic arguments (e.g. of type String or Integer). However, consider to create custom datatypes and avoid adding too much validation logic (especially redundant in multiple places).

      +
    • +
    +
    +
  • +
+
+
+
BookingmanagementRestServiceImpl.java
+
+
@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+  ...
+  public BookingEto saveBooking(@Valid BookingCto booking) {
+  ...
+
+
+
+
    +
  • +

    Finally add appropriate validation constraint annotations to the fields of the ETO class.

    +
  • +
+
+
+
BookingCto.java
+
+
  @Valid
+  private BookingEto booking;
+
+
+
+
BookingEto.java
+
+
  @NotNull
+  @Future
+  private Timestamp bookingDate;
+
+
+
+

A list with all bean validation constraint annotations available for hibernate-validator can be found here. In addition it is possible to configure custom constraints. Therefore it is necessary to implement a annotation and a corresponding validator. A description can also be found in the Spring documentation or with more details in the hibernate documentation.

+
+
+ + + + + +
+ + +Bean Validation in Wildfly >v8: Wildfly v8 is the first version of Wildfly implementing the JEE7 specification. It comes with bean validation based on hibernate-validator out of the box. In case someone is running Spring in Wildfly for whatever reasons, the spring based annotation @Validated would duplicate bean validation at runtime and thus should be omitted. +
+
+
+
+
GUI-Integration
+
+

TODO

+
+
+
+
Cross-Field Validation
+
+

BV has poor support for this. Best practice is to create and use beans for ranges, etc. that solve this. A bean for a range could look like so:

+
+
+
+
public class Range<V extends Comparable<V>> {
+
+  private V min;
+  private V max;
+
+  public Range(V min, V max) {
+
+    super();
+    if ((min != null) && (max != null)) {
+      int delta = min.compareTo(max);
+      if (delta > 0) {
+        throw new ValueOutOfRangeException(null, min, min, max);
+      }
+    }
+    this.min = min;
+    this.max = max;
+  }
+
+  public V getMin() ...
+  public V getMax() ...
+
+
+
+
+
+

Stateful Validation

+
+

For complex and stateful business validations we do not use BV (possible with groups and context, etc.) but follow KISS and just implement this on the server in a straight forward manner. +An example is the deletion of a table in the example application. Here the state of the table must be checked first:

+
+
+

BookingmanagementImpl.java

+
+
+
+
  private void sendConfirmationEmails(BookingEntity booking) {
+
+    if (!booking.getInvitedGuests().isEmpty()) {
+      for (InvitedGuestEntity guest : booking.getInvitedGuests()) {
+        sendInviteEmailToGuest(guest, booking);
+      }
+    }
+
+    sendConfirmationEmailToHost(booking);
+  }
+
+
+
+

Implementing this small check with BV would be a lot more effort.

+
+
+ +
+

==Aspect Oriented Programming (AOP)

+
+
+

AOP is a powerful feature for cross-cutting concerns. However, if used extensive and for the wrong things an application can get unmaintainable. Therefore we give you the best practices where and how to use AOP properly.

+
+
+
+

AOP Key Principles

+
+

We follow these principles:

+
+
+
    +
  • +

    We use spring AOP based on dynamic proxies (and fallback to cglib).

    +
  • +
  • +

    We avoid AspectJ and other mighty and complex AOP frameworks whenever possible

    +
  • +
  • +

    We only use AOP where we consider it as necessary (see below).

    +
  • +
+
+
+
+

AOP Usage

+
+

We recommend to use AOP with care but we consider it established for the following cross cutting concerns:

+
+
+ +
+
+
+

AOP Debugging

+
+

When using AOP with dynamic proxies the debugging of your code can get nasty. As you can see by the red boxes in the call stack in the debugger there is a lot of magic happening while you often just want to step directly into the implementation skipping all the AOP clutter. When using Eclipse this can easily be archived by enabling step filters. Therefore you have to enable the feature in the Eclipse tool bar (highlighted in read).

+
+
+
+AOP debugging +
+
+
+

In order to properly make this work you need to ensure that the step filters are properly configured:

+
+
+
+Step Filter Configuration +
+
+
+

Ensure you have at least the following step-filters configured and active:

+
+
+
+
ch.qos.logback.*
+com.devonfw.module.security.*
+java.lang.reflect.*
+java.security.*
+javax.persistence.*
+org.apache.commons.logging.*
+org.apache.cxf.jaxrs.client.*
+org.apache.tomcat.*
+org.h2.*
+org.springframework.*
+
+
+
+ +
+

==Exception Handling

+
+
+
+

Exception Principles

+
+

For exceptions we follow these principles:

+
+
+
    +
  • +

    We only use exceptions for exceptional situations and not for programming control flows, etc. Creating an exception in Java is expensive and hence should not be done for simply testing whether something is present, valid or permitted. In the latter case design your API to return this as a regular result.

    +
  • +
  • +

    We use unchecked exceptions (RuntimeException) [1]

    +
  • +
  • +

    We distinguish internal exceptions and user exceptions:

    +
    +
      +
    • +

      Internal exceptions have technical reasons. For unexpected and exotic situations, it is sufficient to throw existing exceptions such as IllegalStateException. For common scenarios a own exception class is reasonable.

      +
    • +
    • +

      User exceptions contain a message explaining the problem for end users. Therefore, we always define our own exception classes with a clear, brief, but detailed message.

      +
    • +
    +
    +
  • +
  • +

    Our own exceptions derive from an exception base class supporting

    + +
  • +
+
+
+

All this is offered by mmm-util-core, which we propose as a solution. +If you use the devon4j-rest module, this is already included. For Quarkus applications, you need to add the dependency manually.

+
+
+

If you want to avoid additional dependencies, you can implement your own solution for this by creating an abstract exception class ApplicationBusinessException extending from RuntimeException. For an example of this, see our Quarkus reference application.

+
+
+
+

Exception Example

+
+

Here is an exception class from our sample application:

+
+
+
+
public class IllegalEntityStateException extends ApplicationBusinessException {
+
+  private static final long serialVersionUID = 1L;
+
+  public IllegalEntityStateException(Object entity, Object state) {
+
+    this((Throwable) null, entity, state);
+  }
+
+
+  public IllegalEntityStateException(Object entity, Object currentState, Object newState) {
+
+    this(null, entity, currentState, newState);
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object state) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityState(entity, state));
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object currentState, Object newState) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityStateChange(entity, currentState,
+        newState));
+  }
+
+}
+
+
+
+

The message templates are defined in the interface NlsBundleRestaurantRoot as following:

+
+
+
+
public interface NlsBundleApplicationRoot extends NlsBundle {
+
+
+  @NlsBundleMessage("The entity {entity} is in state {state}!")
+  NlsMessage errorIllegalEntityState(@Named("entity") Object entity, @Named("state") Object state);
+
+
+  @NlsBundleMessage("The entity {entity} in state {currentState} can not be changed to state {newState}!")
+  NlsMessage errorIllegalEntityStateChange(@Named("entity") Object entity, @Named("currentState") Object currentState,
+      @Named("newState") Object newState);
+
+
+  @NlsBundleMessage("The property {property} of object {object} can not be changed!")
+  NlsMessage errorIllegalPropertyChange(@Named("object") Object object, @Named("property") Object property);
+
+  @NlsBundleMessage("There is currently no user logged in")
+  NlsMessage errorNoActiveUser();
+
+
+
+
+

Handling Exceptions

+
+

For catching and handling exceptions we follow these rules:

+
+
+
    +
  • +

    We do not catch exceptions just to wrap or to re-throw them.

    +
  • +
  • +

    If we catch an exception and throw a new one, we always have to provide the original exception as cause to the constructor of the new exception.

    +
  • +
  • +

    At the entry points of the application (e.g. a service operation) we have to catch and handle all throwables. This is done via the exception-facade-pattern via an explicit facade or aspect. The devon4j-rest module already provides ready-to-use implementations for this such as RestServiceExceptionFacade that you can use in your Spring application. For Quarkus, follow the Quarkus guide on exception handling.
    +The exception facade has to …​

    +
    +
      +
    • +

      log all errors (user errors on info and technical errors on error level)

      +
    • +
    • +

      ensure that the entire exception is passed to the logger (not only the message) so that the logger can capture the entire stacktrace and the root cause is not lost.

      +
    • +
    • +

      convert the error to a result appropriable for the client and secure for Sensitive Data Exposure. Especially for security exceptions only a generic security error code or message may be revealed but the details shall only be logged but not be exposed to the client. All internal exceptions are converted to a generic error with a message like:

      +
      +
      +
      +

      An unexpected technical error has occurred. We apologize any inconvenience. Please try again later.

      +
      +
      +
      +
    • +
    +
    +
  • +
+
+
+
+

Common Errors

+
+

The following errors may occur in any devon application:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 9. Common Exceptions
CodeMessageLink

TechnicalError

An unexpected error has occurred! We apologize any inconvenience. Please try again later.

TechnicalErrorUserException.java

ServiceInvoke

«original message of the cause»

ServiceInvocationFailedException.java

+
+ +
+

==Internationalization +Internationalization (I18N) is about writing code independent from locale-specific information. +For I18N of text messages we are suggesting +mmm native-language-support.

+
+
+

In devonfw we have developed a solution to manage text internationalization. devonfw solution comes into two aspects:

+
+
+
    +
  • +

    Bind locale information to the user.

    +
  • +
  • +

    Get the messages in the current user locale.

    +
  • +
+
+
+
+

Binding locale information to the user

+
+

We have defined two different points to bind locale information to user, depending on user is authenticated or not.

+
+
+
    +
  • +

    User not authenticated: devonfw intercepts unsecured request and extract locale from it. At first, we try to extract a language parameter from the request and if it is not possible, we extract locale from Àccept-language` header.

    +
  • +
  • +

    User authenticated. During login process, applications developers are responsible to fill language parameter in the UserProfile class. This language parameter could be obtain from DB, LDAP, request, etc. In devonfw sample we get the locale information from database.

    +
  • +
+
+
+

This image shows the entire process:

+
+
+
+Internationalization +
+
+
+
+

Getting internationalizated messages

+
+

devonfw has a bean that manage i18n message resolution, the ApplicationLocaleResolver. This bean is responsible to get the current user and extract locale information from it and read the correct properties file to get the message.

+
+
+

The i18n properties file must be called ApplicationMessages_la_CO.properties where la=language and CO=country. This is an example of a i18n properties file for English language to translate devonfw sample user roles:

+
+
+

ApplicationMessages_en_US.properties

+
+
+
+
admin=Admin
+
+
+
+

You should define an ApplicationMessages_la_CO.properties file for every language that your application needs.

+
+
+

ApplicationLocaleResolver bean is injected in AbstractComponentFacade class so you have available this bean in logic layer so you only need to put this code to get an internationalized message:

+
+
+
+
String msg = getApplicationLocaleResolver().getMessage("mymessage");
+
+
+
+ +
+

==XML

+
+
+

XML (Extensible Markup Language) is a W3C standard format for structured information. It has a large eco-system of additional standards and tools.

+
+
+

In Java there are many different APIs and frameworks for accessing, producing and processing XML. For the devonfw we recommend to use JAXB for mapping Java objects to XML and vice-versa. Further there is the popular DOM API for reading and writing smaller XML documents directly. When processing large XML documents StAX is the right choice.

+
+
+
+

JAXB

+
+

We use JAXB to serialize Java objects to XML or vice-versa.

+
+
+
JAXB and Inheritance
+
+

Use @XmlSeeAlso annotation to provide sub-classes. +See section "Collective Polymorphism" described here.

+
+
+
+
JAXB Custom Mapping
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create dedicated objects for the XML mapping you can easily avoid such situations. When this is not suitable use @XmlJavaTypeAdapter and provide an XmlAdapter implementation that handles the mapping. +For details see here.

+
+
+
+
+

Security

+
+

To prevent XML External Entity attacks, follow JAXP Security Guide and enable FSP.

+
+
+ +
+

==JSON

+
+
+

JSON (JavaScript Object Notation) is a popular format to represent and exchange data especially for modern web-clients. For mapping Java objects to JSON and vice-versa there is no official standard API. We use the established and powerful open-source solution Jackson. +Due to problems with the wiki of fasterxml you should try this alternative link: Jackson/AltLink.

+
+
+
+

Configure JSON Mapping

+
+

In order to avoid polluting business objects with proprietary Jackson annotations (e.g. @JsonTypeInfo, @JsonSubTypes, @JsonProperty) we propose to create a separate configuration class. Every devonfw application (sample or any app created from our app-template) therefore has a class called ApplicationObjectMapperFactory that extends ObjectMapperFactory from the devon4j-rest module. It looks like this:

+
+
+
+
@Named("ApplicationObjectMapperFactory")
+public class ApplicationObjectMapperFactory extends ObjectMapperFactory {
+
+  public RestaurantObjectMapperFactory() {
+    super();
+    // JSON configuration code goes here
+  }
+}
+
+
+
+
+

JSON and Inheritance

+
+

If you are using inheritance for your objects mapped to JSON then polymorphism can not be supported out-of-the box. So in general avoid polymorphic objects in JSON mapping. However, this is not always possible. +Have a look at the following example from our sample application:

+
+
+
+inheritance class diagram +
+
Figure 3. Transfer-Objects using Inheritance
+
+
+

Now assume you have a REST service operation as Java method that takes a ProductEto as argument. As this is an abstract class the server needs to know the actual sub-class to instantiate. +We typically do not want to specify the classname in the JSON as this should be an implementation detail and not part of the public JSON format (e.g. in case of a service interface). Therefore we use a symbolic name for each polymorphic subtype that is provided as virtual attribute @type within the JSON data of the object:

+
+
+
+
{ "@type": "Drink", ... }
+
+
+
+

Therefore you add configuration code to the constructor of ApplicationObjectMapperFactory. Here you can see an example from the sample application:

+
+
+
+
setBaseClasses(ProductEto.class);
+addSubtypes(new NamedType(MealEto.class, "Meal"), new NamedType(DrinkEto.class, "Drink"),
+  new NamedType(SideDishEto.class, "SideDish"));
+
+
+
+

We use setBaseClasses to register all top-level classes of polymorphic objects. Further we declare all concrete polymorphic sub-classes together with their symbolic name for the JSON format via addSubtypes.

+
+
+
+

Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create objects dedicated for the JSON mapping you can easily avoid such situations. When this is not suitable follow these instructions to define the mapping:

+
+
+
    +
  1. +

    As an example, the use of JSR354 (javax.money) is appreciated in order to process monetary amounts properly. However, without custom mapping, the default mapping of Jackson will produce the following JSON for a MonetaryAmount:

    +
    +
    +
    "currency": {"defaultFractionDigits":2, "numericCode":978, "currencyCode":"EUR"},
    +"monetaryContext": {...},
    +"number":6.99,
    +"factory": {...}
    +
    +
    +
    +

    As clearly can be seen, the JSON contains too much information and reveals implementation secrets that do not belong here. Instead the JSON output expected and desired would be:

    +
    +
    +
    +
    "currency":"EUR","amount":"6.99"
    +
    +
    +
    +

    Even worse, when we send the JSON data to the server, Jackson will see that MonetaryAmount is an interface and does not know how to instantiate it so the request will fail. +Therefore we need a customized Serializer.

    +
    +
  2. +
  3. +

    We implement MonetaryAmountJsonSerializer to define how a MonetaryAmount is serialized to JSON:

    +
    +
    +
    public final class MonetaryAmountJsonSerializer extends JsonSerializer<MonetaryAmount> {
    +
    +  public static final String NUMBER = "amount";
    +  public static final String CURRENCY = "currency";
    +
    +  public void serialize(MonetaryAmount value, JsonGenerator jgen, SerializerProvider provider) throws ... {
    +    if (value != null) {
    +      jgen.writeStartObject();
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.CURRENCY);
    +      jgen.writeString(value.getCurrency().getCurrencyCode());
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.NUMBER);
    +      jgen.writeString(value.getNumber().toString());
    +      jgen.writeEndObject();
    +    }
    +  }
    +
    +
    +
    +

    For composite datatypes it is important to wrap the info as an object (writeStartObject() and writeEndObject()). MonetaryAmount provides the information we need by the getCurrency() and getNumber(). So that we can easily write them into the JSON data.

    +
    +
  4. +
  5. +

    Next, we implement MonetaryAmountJsonDeserializer to define how a MonetaryAmount is deserialized back as Java object from JSON:

    +
    +
    +
    public final class MonetaryAmountJsonDeserializer extends AbstractJsonDeserializer<MonetaryAmount> {
    +  protected MonetaryAmount deserializeNode(JsonNode node) {
    +    BigDecimal number = getRequiredValue(node, MonetaryAmountJsonSerializer.NUMBER, BigDecimal.class);
    +    String currencyCode = getRequiredValue(node, MonetaryAmountJsonSerializer.CURRENCY, String.class);
    +    MonetaryAmount monetaryAmount =
    +        MonetaryAmounts.getAmountFactory().setNumber(number).setCurrency(currencyCode).create();
    +    return monetaryAmount;
    +  }
    +}
    +
    +
    +
    +

    For composite datatypes we extend from AbstractJsonDeserializer as this makes our task easier. So we already get a JsonNode with the parsed payload of our datatype. Based on this API it is easy to retrieve individual fields from the payload without taking care of their order, etc. +AbstractJsonDeserializer also provides methods such as getRequiredValue to read required fields and get them converted to the desired basis datatype. So we can easily read the amount and currency and construct an instance of MonetaryAmount via the official factory API.

    +
    +
  6. +
  7. +

    Finally we need to register our custom (de)serializers with the following configuration code in the constructor of ApplicationObjectMapperFactory:+

    +
  8. +
+
+
+
+
  SimpleModule module = getExtensionModule();
+  module.addDeserializer(MonetaryAmount.class, new MonetaryAmountJsonDeserializer());
+  module.addSerializer(MonetaryAmount.class, new MonetaryAmountJsonSerializer());
+
+
+
+

Now we can read and write MonetaryAmount from and to JSON as expected.

+
+
+ +
+

==REST +REST (REpresentational State Transfer) is an inter-operable protocol for services that is more lightweight than SOAP. +However, it is no real standard and can cause confusion (see REST philosophy). +Therefore we define best practices here to guide you.

+
+
+
+

URLs

+
+

URLs are not case sensitive. Hence, we follow the best practice to use only lower-case-letters-with-hyphen-to-separate-words. +For operations in REST we distinguish the following types of URLs:

+
+
+
    +
  • +

    A collection URL is build from the rest service URL by appending the name of a collection. This is typically the name of an entity. Such URL identifies the entire collection of all elements of this type. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity

    +
  • +
  • +

    An element URL is build from a collection URL by appending an element ID. It identifies a single element (entity) within the collection. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42

    +
  • +
+
+
+

To follow KISS avoid using plural forms (…​/productmanagement/v1/products vs. …​/productmanagement/v1/product/42). Always use singular forms and avoid confusions (except for the rare cases where no singular exists).

+
+
+

The REST URL scheme fits perfect for CRUD operations. +For business operations (processing, calculation, advanced search, etc.) we simply append a collection URL with the name of the business operation. +Then we can POST the input for the business operation and get the result back. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/search

+
+
+
+

HTTP Methods

+
+

The following table defines the HTTP methods (verbs) and their meaning:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 10. Usage of HTTP methods
HTTP MethodMeaning

GET

Read data (stateless).

PUT

Create or update data.

POST

Process data.

DELETE

Delete an entity.

+
+

Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

+
+
+

For general recommendations on HTTP methods for collection and element URLs see REST@wikipedia.

+
+
+
+

HTTP Status Codes

+
+

Further we define how to use the HTTP status codes for REST services properly. In general the 4xx codes correspond to an error on the client side and the 5xx codes to an error on the server side.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 11. Usage of HTTP status codes
HTTP CodeMeaningResponseComment

200

OK

requested result

Result of successful GET

204

No Content

none

Result of successful POST, DELETE, or PUT with empty result (void return)

400

Bad Request

error details

The HTTP request is invalid (parse error, validation failed)

401

Unauthorized

none

Authentication failed

403

Forbidden

none

Authorization failed

404

Not found

none

Either the service URL is wrong or the requested resource does not exist

500

Server Error

error code, UUID

Internal server error occurred, in case of an exception, see REST exception handling

+
+
+

JAX-RS

+
+

For implementing REST services we use the JAX-RS standard. +As payload encoding we recommend JSON bindings using Jackson. +To implement a REST service you simply add JAX-RS annotations. +Here is a simple example:

+
+
+
+
@ApplicationScoped
+@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class ImagemanagementRestService {
+
+  @Inject
+  private Imagemanagement imagemanagement;
+
+  @GET
+  @Path("/image/{id}/")
+  public ImageDto getImage(@PathParam("id") long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+}
+
+
+
+

Here we can see a REST service for the business component imagemanagement. The method getImage can be accessed via HTTP GET (see @GET) under the URL path imagemanagement/image/{id} (see @Path annotations) where {id} is the ID of the requested table and will be extracted from the URL and provided as parameter id to the method getImage. It will return its result (ImageDto) as JSON (see @Produces annotation - you can also extend RestService marker interface that defines these annotations for JSON). As you can see it delegates to the logic component imagemanagement that contains the actual business logic while the service itself only exposes this logic via HTTP. The REST service implementation is a regular CDI bean that can use dependency injection.

+
+
+ + + + + +
+ + +With JAX-RS it is important to make sure that each service method is annotated with the proper HTTP method (@GET,@POST,etc.) to avoid unnecessary debugging. So you should take care not to forget to specify one of these annotations. +
+
+
+
Service-Interface
+
+

You may also separate API and implementation in case you want to reuse the API for service-client:

+
+
+
+
@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface ImagemanagementRestService {
+
+  @GET
+  @Path("/image/{id}/")
+  ImageEto getImage(@PathParam("id") long id);
+
+}
+
+@Named("ImagemanagementRestService")
+public class ImagemanagementRestServiceImpl implements ImagemanagementRestService {
+
+  @Override
+  public ImageEto getImage(long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+
+}
+
+
+
+
+
JAX-RS Configuration
+
+

Starting from CXF 3.0.0 it is possible to enable the auto-discovery of JAX-RS roots.

+
+
+

When the JAX-RS server is instantiated, all the scanned root and provider beans (beans annotated with javax.ws.rs.Path and javax.ws.rs.ext.Provider) are configured.

+
+
+
+
REST Exception Handling
+
+

For exceptions, a service needs to have an exception facade that catches all exceptions and handles them by writing proper log messages and mapping them to a HTTP response with an corresponding HTTP status code. +For this, devon4j provides a generic solution via RestServiceExceptionFacade that you can use within your Spring applications. You need to follow the exception guide in order for it to work out of the box because the facade needs to be able to distinguish between business and technical exceptions. +To implement a generic exception facade in Quarkus, follow the Quarkus exception guide.

+
+
+

Now your service may throw exceptions, but the facade will automatically handle them for you.

+
+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+
+
Pagination details
+
+

We recommend to use spring-data repositories for database access that already comes with pagination support. +Therefore, when performing a search, you can include a Pageable object. +Here is a JSON example for it:

+
+
+
+
{ "pageSize": 20, "pageNumber": 0, "sort": [] }
+
+
+
+

By increasing the pageNumber the client can browse and page through the hits.

+
+
+

As a result you will receive a Page. +It is a container for your search results just like a Collection but additionally contains pagination information for the client. +Here is a JSON example:

+
+
+
+
{ "totalElements": 1022,
+  pageable: { "pageSize": 20, "pageNumber": 0 },
+  content: [ ... ] }
+
+
+
+

The totalElements property contains the total number of hits. +This can be used by the client to compute the total number of pages and render the pagination links accordingly. +Via the pageable property the client gets back the Pageable properties from the search request. +The actual hits for the current page are returned as array in the content property.

+
+
+
+
+

REST Testing

+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+

Security

+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+
CSRF
+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+
JSON top-level arrays
+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+ +
+

==SOAP +SOAP is a common protocol for services that is rather complex and heavy. It allows to build inter-operable and well specified services (see WSDL). SOAP is transport neutral what is not only an advantage. We strongly recommend to use HTTPS transport and ignore additional complex standards like WS-Security and use established HTTP-Standards such as RFC2617 (and RFC5280).

+
+
+
+
+

JAX-WS

+
+

For building web-services with Java we use the JAX-WS standard. +There are two approaches:

+
+
+
    +
  • +

    code first

    +
  • +
  • +

    contract first

    +
  • +
+
+
+

Here is an example in case you define a code-first service.

+
+
+
Web-Service Interface
+
+

We define a regular interface to define the API of the service and annotate it with JAX-WS annotations:

+
+
+
+
@WebService
+public interface TablemanagmentWebService {
+
+  @WebMethod
+  @WebResult(name = "message")
+  TableEto getTable(@WebParam(name = "id") String id);
+
+}
+
+
+
+
+
Web-Service Implementation
+
+

And here is a simple implementation of the service:

+
+
+
+
@Named
+@WebService(endpointInterface = "com.devonfw.application.mtsj.tablemanagement.service.api.ws.TablemanagmentWebService")
+public class TablemanagementWebServiceImpl implements TablemanagmentWebService {
+
+  private Tablemanagement tableManagement;
+
+  @Override
+  public TableEto getTable(String id) {
+
+    return this.tableManagement.findTable(id);
+  }
+
+
+
+
+
+

SOAP Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to write adapters for JAXB (see XML).

+
+
+
+

SOAP Testing

+
+

For testing SOAP services in general consult the testing guide.

+
+
+

For testing SOAP services manually we strongly recommend SoapUI.

+
+
+ +
+

==Service Client

+
+
+

This guide is about consuming (calling) services from other applications (micro-services). For providing services, see the Service-Layer Guide. Services can be consumed by the client or the server. As the client is typically not written in Java, you should consult the according guide for your client technology. In case you want to call a service within your Java code, this guide is the right place to get help.

+
+
+
+

Motivation

+
+

Various solutions already exist for calling services, such as RestTemplate from spring or the JAX-RS client API. Furthermore, each and every service framework offers its own API as well. These solutions might be suitable for very small and simple projects (with one or two such invocations). However, with the trend of microservices, the invocation of a service becomes a very common use-case that occurs all over the place. You typically need a solution that is very easy to use but supports flexible configuration, adding headers for authentication, mapping of errors from the server, logging success/errors with duration for performance analysis, support for synchronous and asynchronous invocations, etc. This is exactly what this devon4j service-client solution brings to you.

+
+
+
+

Usage

+
+

Spring

+
+
+

For Spring, follow the Spring rest-client guide.

+
+
+

Quarkus

+
+
+

For Quarkus, we recommend to follow the official Quarkus rest-client guide

+
+
+ +
+

==Testing

+
+
+
+

General best practices

+
+

For testing please follow our general best practices:

+
+
+
    +
  • +

    Tests should have a clear goal that should also be documented.

    +
  • +
  • +

    Tests have to be classified into different integration levels.

    +
  • +
  • +

    Tests should follow a clear naming convention.

    +
  • +
  • +

    Automated tests need to properly assert the result of the tested operation(s) in a reliable way. E.g. avoid stuff like assertThat(service.getAllEntities()).hasSize(42) or even worse tests that have no assertion at all.

    +
  • +
  • +

    Tests need to be independent of each other. Never write test-cases or tests (in Java @Test methods) that depend on another test to be executed before.

    +
  • +
  • +

    Use AssertJ to write good readable and maintainable tests that also provide valuable feedback in case a test fails. Do not use legacy JUnit methods like assertEquals anymore!

    +
  • +
  • +

    For easy understanding divide your test in three commented sections:

    +
    +
      +
    • +

      //given

      +
    • +
    • +

      //when

      +
    • +
    • +

      //then

      +
    • +
    +
    +
  • +
  • +

    Plan your tests and test data management properly before implementing.

    +
  • +
  • +

    Instead of having a too strong focus on test coverage better ensure you have covered your critical core functionality properly and review the code including tests.

    +
  • +
  • +

    Test code shall NOT be seen as second class code. You shall consider design, architecture and code-style also for your test code but do not over-engineer it.

    +
  • +
  • +

    Test automation is good but should be considered in relation to cost per use. Creating full coverage via automated system tests can cause a massive amount of test-code that can turn out as a huge maintenance hell. Always consider all aspects including product life-cycle, criticality of use-cases to test, and variability of the aspect to test (e.g. UI, test-data).

    +
  • +
  • +

    Use continuous integration and establish that the entire team wants to have clean builds and running tests.

    +
  • +
  • +

    Prefer delegation over inheritance for cross-cutting testing functionality. Good places to put this kind of code can be realized and reused via the JUnit @Rule mechanism.

    +
  • +
+
+
+
+

Test Automation Technology Stack

+
+

For test automation we use JUnit. However, we are strictly doing all assertions with AssertJ. For mocking we use Mockito. +In order to mock remote connections we use WireMock.

+
+
+

For testing entire components or sub-systems we recommend to use for Spring stack spring-boot-starter-test as lightweight and fast testing infrastructure that is already shipped with devon4j-test. For Quarkus, you can add the necessary extensions manually such as quarkus-junit5, quarkus-junit5-mockito, assertj-core etc.

+
+
+

In case you have to use a full blown JEE application server, we recommend to use arquillian. To get started with arquillian, look here.

+
+
+
+

Test Doubles

+
+

We use test doubles as generic term for mocks, stubs, fakes, dummies, or spys to avoid confusion. Here is a short summary from stubs VS mocks:

+
+
+
    +
  • +

    Dummy objects specifying no logic at all. May declare data in a POJO style to be used as boiler plate code to parameter lists or even influence the control flow towards the test’s needs.

    +
  • +
  • +

    Fake objects actually have working implementations, but usually take some shortcut which makes them not suitable for production (an in memory database is a good example).

    +
  • +
  • +

    Stubs provide canned answers to calls made during the test, usually not responding at all to anything outside what’s programmed in for the test. Stubs may also record information about calls, such as an email gateway stub that remembers the messages it 'sent', or maybe only how many messages it 'sent'.

    +
  • +
  • +

    Mocks are objects pre-programmed with expectations, which form a specification of the calls they are expected to receive.

    +
  • +
+
+
+

We try to give some examples, which should make it somehow clearer:

+
+
+
Stubs
+
+

Best Practices for applications:

+
+
+
    +
  • +

    A good way to replace small to medium large boundary systems, whose impact (e.g. latency) should be ignored during load and performance tests of the application under development.

    +
  • +
  • +

    As stub implementation will rely on state-based verification, there is the threat, that test developers will partially reimplement the state transitions based on the replaced code. This will immediately lead to a black maintenance whole, so better use mocks to assure the certain behavior on interface level.

    +
  • +
  • +

    Do NOT use stubs as basis of a large amount of test cases as due to state-based verification of stubs, test developers will enrich the stub implementation to become a large monster with its own hunger after maintenance efforts.

    +
  • +
+
+
+
+
Mocks
+
+

Best Practices for applications:

+
+
+
    +
  • +

    Replace not-needed dependencies of your system-under-test (SUT) to minimize the application context to start of your component framework.

    +
  • +
  • +

    Replace dependencies of your SUT to impact the control flow under test without establishing all the context parameters needed to match the control flow.

    +
  • +
  • +

    Remember: Not everything has to be mocked! Especially on lower levels of tests like isolated module tests you can be betrayed into a mocking delusion, where you end up in a hundred lines of code mocking the whole context and five lines executing the test and verifying the mocks behavior. Always keep in mind the benefit-cost ratio, when implementing tests using mocks.

    +
  • +
+
+
+
+
WireMock
+
+

If you need to mock remote connections such as HTTP-Servers, WireMock offers easy to use functionality. For a full description see the homepage or the github repository. Wiremock can be used either as a JUnit Rule, in Java outside of JUnit or as a standalone process. The mocked server can be configured to respond to specific requests in a given way via a fluent Java API, JSON files and JSON over HTTP. An example as an integration to JUnit can look as follows.

+
+
+
+
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+
+public class WireMockOfferImport{
+
+  @Rule
+  public WireMockRule mockServer = new WireMockRule(wireMockConfig().dynamicPort());
+
+  @Test
+  public void requestDataTest() throws Exception {
+  int port = this.mockServer.port();
+  ...}
+
+
+
+

This creates a server on a randomly chosen free port on the running machine. You can also specify the port to be used if wanted. Other than that there are several options to further configure the server. This includes HTTPs, proxy settings, file locations, logging and extensions.

+
+
+
+
  @Test
+  public void requestDataTest() throws Exception {
+      this.mockServer.stubFor(get(urlEqualTo("/new/offers")).withHeader("Accept", equalTo("application/json"))
+      .withHeader("Authorization", containing("Basic")).willReturn(aResponse().withStatus(200).withFixedDelay(1000)
+      .withHeader("Content-Type", "application/json").withBodyFile("/wireMockTest/jsonBodyFile.json")));
+  }
+
+
+
+

This will stub the URL localhost:port/new/offers to respond with a status 200 message containing a header (Content-Type: application/json) and a body with content given in jsonBodyFile.json if the request matches several conditions. +It has to be a GET request to ../new/offers with the two given header properties.

+
+
+

Note that by default files are located in src/test/resources/__files/. When using only one WireMock server one can omit the this.mockServer in before the stubFor call (static method). +You can also add a fixed delay to the response or processing delay with WireMock.addRequestProcessingDelay(time) in order to test for timeouts.

+
+
+

WireMock can also respond with different corrupted messages to simulate faulty behaviour.

+
+
+
+
@Test(expected = ResourceAccessException.class)
+public void faultTest() {
+
+    this.mockServer.stubFor(get(urlEqualTo("/fault")).willReturn(aResponse()
+    .withFault(Fault.MALFORMED_RESPONSE_CHUNK)));
+...}
+
+
+
+

A GET request to ../fault returns an OK status header, then garbage, and then closes the connection.

+
+
+
+
+

Integration Levels

+
+

There are many discussions about the right level of integration for test automation. Sometimes it is better to focus on small, isolated modules of the system - whatever a "module" may be. In other cases it makes more sense to test integrated groups of modules. Because there is no universal answer to this question, devonfw only defines a common terminology for what could be tested. Each project must make its own decision where to put the focus of test automation. There is no worldwide accepted terminology for the integration levels of testing. In general we consider ISTQB. However, with a technical focus on test automation we want to get more precise.

+
+
+

The following picture shows a simplified view of an application based on the devonfw reference architecture. We define four integration levels that are explained in detail below. +The boxes in the picture contain parenthesized numbers. These numbers depict the lowest integration level, a box belongs to. Higher integration levels also contain all boxes of lower integration levels. When writing tests for a given integration level, related boxes with a lower integration level must be replaced by test doubles or drivers.

+
+
+
+Integration Levels +
+
+
+

The main difference between the integration levels is the amount of infrastructure needed to test them. The more infrastructure you need, the more bugs you will find, but the more instable and the slower your tests will be. So each project has to make a trade-off between pros and contras of including much infrastructure in tests and has to select the integration levels that fit best to the project.

+
+
+

Consider, that more infrastructure does not automatically lead to a better bug-detection. There may be bugs in your software that are masked by bugs in the infrastructure. The best way to find those bugs is to test with very few infrastructure.

+
+
+

External systems do not belong to any of the integration levels defined here. devonfw does not recommend involving real external systems in test automation. This means, they have to be replaced by test doubles in automated tests. An exception may be external systems that are fully under control of the own development team.

+
+
+

The following chapters describe the four integration levels.

+
+
+
Level 1 Module Test
+
+

The goal of a isolated module test is to provide fast feedback to the developer. Consequently, isolated module tests must not have any interaction with the client, the database, the file system, the network, etc.

+
+
+

An isolated module test is testing a single classes or at least a small set of classes in isolation. If such classes depend on other components or external resources, etc. these shall be replaced with a test double.

+
+
+
+
public class MyClassTest extends ModuleTest {
+
+  @Test
+  public void testMyClass() {
+
+    // given
+    MyClass myClass = new MyClass();
+    // when
+    String value = myClass.doSomething();
+    // then
+    assertThat(value).isEqualTo("expected value");
+  }
+
+}
+
+
+
+

For an advanced example see here.

+
+
+
+
Level 2 Component Test
+
+

A component test aims to test components or component parts as a unit. +These tests can access resources such as a database (e.g. for DAO tests). +Further, no remote communication is intended here. Access to external systems shall be replaced by a test double.

+
+
+
    +
  • +

    For Spring stack, they are typically run with a (light-weight) infrastructure such as spring-boot-starter-test. A component-test is illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.NONE)
    +public class UcFindCountryTest extends ComponentTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    TestUtil.login("user", MyAccessControlConfig.FIND_COUNTRY);
    +    CountryEto country = this.ucFindCountry.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    This test will start the entire spring-context of your app (MySpringBootApp). Within the test spring will inject according spring-beans into all your fields annotated with @Inject. In the test methods you can use these spring-beans and perform your actual tests. This pattern can be used for testing DAOs/Repositories, Use-Cases, or any other spring-bean with its entire configuration including database and transactions.

    +
    +
  • +
  • +

    For Quarkus, you can similarly inject the CDI beans and perform tests. An example is shown below:

    +
    +
    +
    @QuarkusTest
    +public class UcFindCountryTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +  ...
    +
    +
    +
  • +
+
+
+

When you are testing use-cases your authorization will also be in place. Therefore, you have to simulate a logon in advance what is done via the login method in the above Spring example. The test-infrastructure will automatically do a logout for you after each test method in doTearDown.

+
+
+
+
Level 3 Subsystem Test
+
+

A subsystem test runs against the external interfaces (e.g. HTTP service) of the integrated subsystem. Subsystem tests of the client subsystem are described in the devon4ng testing guide. In devon4j the server (JEE application) is the subsystem under test. The tests act as a client (e.g. service consumer) and the server has to be integrated and started in a container.

+
+
+
    +
  • +

    With devon4j and Spring you can write a subsystem-test as easy as illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.RANDOM_PORT)
    +public class CountryRestServiceTest extends SubsystemTest {
    +
    +  @Inject
    +  private ServiceClientFactory serviceClientFactory;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    CountryRestService service = this.serviceClientFactory.create(CountryRestService.class);
    +    CountryEto country = service.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    Even though not obvious on the first look this test will start your entire application as a server on a free random port (so that it works in CI with parallel builds for different branches) and tests the invocation of a (REST) service including (un)marshalling of data (e.g. as JSON) and transport via HTTP (all in the invocation of the findCountry method).

    +
    +
  • +
+
+
+

Do not confuse a subsystem test with a system integration test. A system integration test validates the interaction of several systems where we do not recommend test automation.

+
+
+
+
Level 4 System Test
+
+

A system test has the goal to test the system as a whole against its official interfaces such as its UI or batches. The system itself runs as a separate process in a way close to a regular deployment. Only external systems are simulated by test doubles.

+
+
+

The devonfw only gives advice for automated system test (TODO see allure testing framework). In nearly every project there must be manual system tests, too. This manual system tests are out of scope here.

+
+
+
+
Classifying Integration-Levels
+
+

For Spring stack, devon4j defines Category-Interfaces that shall be used as JUnit Categories. +Also devon4j provides abstract base classes that you may extend in your test-cases if you like.

+
+
+

devon4j further pre-configures the maven build to only run integration levels 1-2 by default (e.g. for fast feedback in continuous integration). It offers the profiles subsystemtest (1-3) and systemtest (1-4). In your nightly build you can simply add -Psystemtest to run all tests.

+
+
+
+
+

Implementation

+
+

This section introduces how to implement tests on the different levels with the given devonfw infrastructure and the proposed frameworks. +For Spring, see Spring Test Implementation

+
+
+
+

Regression testing

+
+

When it comes to complex output (even binary) that you want to regression test by comparing with an expected result, you sould consider Approval Tests using ApprovalTests.Java. +If applied for the right problems, it can be very helpful.

+
+
+
+

Deployment Pipeline

+
+

A deployment pipeline is a semi-automated process that gets software-changes from version control into production. It contains several validation steps, e.g. automated tests of all integration levels. +Because devon4j should fit to different project types - from agile to waterfall - it does not define a standard deployment pipeline. But we recommend to define such a deployment pipeline explicitly for each project and to find the right place in it for each type of test.

+
+
+

For that purpose, it is advisable to have fast running test suite that gives as much confidence as possible without needing too much time and too much infrastructure. This test suite should run in an early stage of your deployment pipeline. Maybe the developer should run it even before he/she checked in the code. Usually lower integration levels are more suitable for this test suite than higher integration levels.

+
+
+

Note, that the deployment pipeline always should contain manual validation steps, at least manual acceptance testing. There also may be manual validation steps that have to be executed for special changes only, e.g. usability testing. Management and execution processes of those manual validation steps are currently not in the scope of devonfw.

+
+
+
+

Test Coverage

+
+

We are using tools (SonarQube/Jacoco) to measure the coverage of the tests. Please always keep in mind that the only reliable message of a code coverage of X% is that (100-X)% of the code is entirely untested. It does not say anything about the quality of the tests or the software though it often relates to it.

+
+
+
+

Test Configuration

+
+

This section covers test configuration in general without focusing on integration levels as in the first chapter.

+
+
+ +
+
+
Configure Test Specific Beans
+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+
Test Data
+
+

It is possible to obtain test data in two different ways depending on your test’s integration level.

+
+
+
+
+

Debugging Tests

+
+

The following two sections describe two debugging approaches for tests. Tests are either run from within the IDE or from the command line using Maven.

+
+
+
Debugging with the IDE
+
+

Debugging with the IDE is as easy as always. Even if you want to execute a SubsystemTest which needs a Spring context and a server infrastructure to run properly, you just set your breakpoints and click on Debug As → JUnit Test. The test infrastructure will take care of initializing the necessary infrastructure - if everything is configured properly.

+
+
+
+
Debugging with Maven
+
+

Please refer to the following two links to find a guide for debugging tests when running them from Maven.

+
+ +
+

In essence, you first have to start execute a test using the command line. Maven will halt just before the test execution and wait for your IDE to connect to the process. When receiving a connection the test will start and then pause at any breakpoint set in advance. +The first link states that tests are started through the following command:

+
+
+
+
mvn -Dmaven.surefire.debug test
+
+
+
+

Although this is correct, it will run every test class in your project and - which is time consuming and mostly unnecessary - halt before each of these tests. +To counter this problem you can simply execute a single test class through the following command (here we execute the TablemanagementRestServiceTest from the restaurant sample application):

+
+
+
+
mvn test -Dmaven.surefire.debug test -Dtest=TablemanagementRestServiceTest
+
+
+
+

It is important to notice that you first have to execute the Maven command in the according submodule, e.g. to execute the TablemanagementRestServiceTest you have first to navigate to the core module’s directory.

+
+
+ +
+

==Transfer-Objects

+
+
+

The technical data model is defined in form of persistent entities. +However, passing persistent entities via call-by-reference across the entire application will soon cause problems:

+
+
+
    +
  • +

    Changes to a persistent entity are directly written back to the persistent store when the transaction is committed. When the entity is send across the application also changes tend to take place in multiple places endangering data sovereignty and leading to inconsistency.

    +
  • +
  • +

    You want to send and receive data via services across the network and have to define what section of your data is actually transferred. If you have relations in your technical model you quickly end up loading and transferring way too much data.

    +
  • +
  • +

    Modifications to your technical data model shall not automatically have impact on your external services causing incompatibilities.

    +
  • +
+
+
+

To prevent such problems transfer-objects are used leading to a call-by-value model and decoupling changes to persistent entities.

+
+
+

In the following sections the different types of transfer-objects are explained. +You will find all according naming-conventions in the architecture-mapping

+
+
+

To structure your transfer objects, we recommend the following approaches:

+
+
+ +
+
+

Also considering the following transfer objects in specific cases:

+
+
+
+
SearchCriteriaTo
+
+

For searching we create or generate a «BusinessObject»SearchCriteriaTo representing a query to find instances of «BusinessObject».

+
+
TO
+
+

There are typically transfer-objects for data that is never persistent. +For very generic cases these just carry the suffix To.

+
+
STO
+
+

We can potentially create separate service transfer objects (STO) (if possible named «BusinessObject»Sto) to keep the service API stable and independent of the actual data-model. +However, we usually do not need this and want to keep our architecture simple. +Only create STOs if you need service versioning and support previous APIs or to provide legacy service technologies that require their own isolated data-model. +In such case you also need beanmapping between STOs and ETOs/DTOs what means extra effort and complexity that should be avoided.

+
+
+
+
+ +
+

==Bean-Mapping

+
+
+

For decoupling, you sometimes need to create separate objects (beans) for a different view. E.g. for an external service, you will use a transfer-object instead of the persistence entity so internal changes to the entity do not implicitly change or break the service.

+
+
+

Therefore you have the need to map similar objects what creates a copy. This also has the benefit that modifications to the copy have no side-effect on the original source object. However, to implement such mapping code by hand is very tedious and error-prone (if new properties are added to beans but not to mapping code):

+
+
+
+
public UserEto mapUser(UserEntity source) {
+  UserEto target = new UserEto();
+  target.setUsername(source.getUsername());
+  target.setEmail(source.getEmail());
+  ...
+  return target;
+}
+
+
+
+

Therefore we are using a BeanMapper for this purpose that makes our lives a lot easier. +There are several bean mapping frameworks with different approaches.

+
+
+

For a devon4j-spring application we recommend Orika, follow Spring Bean-Mapping for an introduction to Orika and Dozer in a devon4j-spring context application.

+
+
+ + + + + +
+ + +devon4j started with Dozer as framework for Spring applications and still supports it. However, we now recommend Orika (for new projects) as it is much faster (see Performance of Java Mapping Frameworks). +
+
+
+

For a Quarkus application we recommend Mapstruct, follow Quarkus Bean-Mapping for an introduction to Mapstruct in a quarkus context application.

+
+
+ +
+

==Datatypes

+
+
+
+
+

A datatype is an object representing a value of a specific type with the following aspects:

+
+
+
    +
  • +

    It has a technical or business specific semantic.

    +
  • +
  • +

    Its JavaDoc explains the meaning and semantic of the value.

    +
  • +
  • +

    It is immutable and therefore stateless (its value assigned at construction time and can not be modified).

    +
  • +
  • +

    It is serializable.

    +
  • +
  • +

    It properly implements #equals(Object) and #hashCode() (two different instances with the same value are equal and have the same hash).

    +
  • +
  • +

    It shall ensure syntactical validation so it is NOT possible to create an instance with an invalid value.

    +
  • +
  • +

    It is responsible for formatting its value to a string representation suitable for sinks such as UI, loggers, etc. Also consider cases like a Datatype representing a password where toString() should return something like "**" instead of the actual password to prevent security accidents.

    +
  • +
  • +

    It is responsible for parsing the value from other representations such as a string (as needed).

    +
  • +
  • +

    It shall provide required logical operations on the value to prevent redundancies. Due to the immutable attribute all manipulative operations have to return a new Datatype instance (see e.g. BigDecimal.add(java.math.BigDecimal)).

    +
  • +
  • +

    It should implement Comparable if a natural order is defined.

    +
  • +
+
+
+

Based on the Datatype a presentation layer can decide how to view and how to edit the value. Therefore a structured data model should make use of custom datatypes in order to be expressive. +Common generic datatypes are String, Boolean, Number and its subclasses, Currency, etc. +Please note that both Date and Calendar are mutable and have very confusing APIs. Therefore, use JSR-310 or jodatime instead. +Even if a datatype is technically nothing but a String or a Number but logically something special it is worth to define it as a dedicated datatype class already for the purpose of having a central javadoc to explain it. On the other side avoid to introduce technical datatypes like String32 for a String with a maximum length of 32 characters as this is not adding value in the sense of a real Datatype. +It is suitable and in most cases also recommended to use the class implementing the datatype as API omitting a dedicated interface.

+
+
+
+— mmm project
+datatype javadoc +
+
+ +
+
+
+

Datatype Packaging

+
+

For the devonfw we use a common packaging schema. +The specifics for datatypes are as following:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
SegmentValueExplanation

<component>

*

Here we use the (business) component defining the datatype or general for generic datatypes.

<layer>

common

Datatypes are used across all layers and are not assigned to a dedicated layer.

<scope>

api

Datatypes are always used directly as API even tough they may contain (simple) implementation logic. Most datatypes are simple wrappers for generic Java types (e.g. String) but make these explicit and might add some validation.

+
+
+

Technical Concerns

+
+

Many technologies like Dozer and QueryDSL’s (alias API) are heavily based on reflection. For them to work properly with custom datatypes, the frameworks must be able to instantiate custom datatypes with no-argument constructors. It is therefore recommended to implement a no-argument constructor for each datatype of at least protected visibility.

+
+
+
+

Datatypes in Entities

+
+

The usage of custom datatypes in entities is explained in the persistence layer guide.

+
+
+
+

Datatypes in Transfer-Objects

+
+
XML
+
+

For mapping datatypes with JAXB see XML guide.

+
+
+
+
JSON
+
+

For mapping datatypes from and to JSON see JSON custom mapping.

+
+
+ +
+

==Accessibility

+
+
+

TODO

+
+ + + +
+ +
+

==CORS support

+
+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

For more information about CORS see here. Information about the CORS headers can be found here.

+
+
+
+
+

Configuring CORS support

+
+

To enable CORS support for your application, see the advanced guides. For Spring applications see here. For Quarkus follow the official Quarkus guide.

+
+
+
+

Configuration with service mesh

+
+

If you are using a service mesh, you can also define your CORS policy directly there. Here is an example from Istio.

+
+
+ +
+

==BLOB support

+
+
+

BLOB stands for Binary Large Object. A BLOB may be an image, an office document, ZIP archive or any other multimedia object. +Often these BLOBs are large. if this is the case you need to take care, that you do not copy all the blob data into you application heap, e.g. when providing them via a REST service. +This could easily lead to performance problems or out of memory errors. +As solution for that problem is "streaming" those BLOBs directly from the database to the client. To demonstrate how this can be accomplished, devonfw provides a example.

+
+
+
+

Further Reading

+ +
+ +
+

==Java Development Kit

+
+
+

The Java Development Kit is an implementation of the Java platform. It provides the Java Virtual Machine (JVM) and the Java Runtime Environment (JRE).

+
+
+
+

Editions

+
+

The JDK exists in different editions:

+
+
+ +
+
+

As Java is evolving and also complex maintaining a JVM requires a lot of energy. +Therefore many alternative JDK editions are unable to cope with this and support latest Java versions and according compatibility. +Unfortunately OpenJDK only maintains a specific version of Java for a relative short period of time before moving to the next major version. +In the end, this technically means that OpenJDK is continuous beta and can not be used in production for reasonable software projects. +As OracleJDK changed its licensing model and can not be used for commercial usage even during development, things can get tricky. +You may want to use OpenJDK for development and OracleJDK only in production. +However, e.g. OpenJDK 11 never released a version that is stable enough for reasonable development (e.g. javadoc tool is broken and fixes are not available of OpenJDK 11 - fixed in 11.0.3 what is only available as OracleJDK 11 or you need to go to OpenJDK 12+, what has other bugs) so in the end there is no working release of OpenJDK 11. +This more or less forces you to use OracleJDK what requires you to buy a subscription so you can use it for commercial development. +However, there is AdoptOpenJDK that provides forked releases of OpenJDK with bug-fixes what might be an option. +Anyhow, as you want to have your development environment close to production, the productively used JDK (most likely OracleJDK) should be preferred also for development.

+
+
+
+

Upgrading

+
+

Until Java 8 compatibility was one of the key aspects for Java version updates (after the mess on the Swing updates with Java2 many years ago). +However, Java 9 introduced a lot of breaking changes. +This documentation wants to share the experience we collected in devonfw when upgrading from Java 8 to newer versions. +First of all we separate runtime changes that you need if you want to build your software with JDK 8 but such that it can also run on newer versions (e.g. JRE 11) +from changes required to also build your software with more recent JDKs (e.g. JDK 11 or 12).

+
+
+
Runtime Changes
+
+

This section describes required changes to your software in order to make it run also with versions newer than Java 8.

+
+
+
Classes removed from JDK
+
+

The first thing that most users hit when running their software with newer Java versions is a ClassNotFoundException like this:

+
+
+
+
Caused by: java.lang.ClassNotFoundException: javax.xml.bind.JAXBException
+
+
+
+

As Java 9 introduced a module system with Jigsaw, the JDK that has been a monolithic mess is now a well-defined set of structured modules. +Some of the classes that used to come with the JDK moved to modules that where not available by default in Java 9 and have even been removed entirely in later versions of Java. +Therefore you should simply treat such code just like any other 3rd party component that you can add as a (maven) dependency. +The following table gives you the required hints to make your software work even with such classes / modules removed from the JDK (please note that the specified version is just a suggestion that worked, feel free to pick a more recent or more appropriate version):

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 12. Dependencies for classes removed from Java 8 since 9+
ClassGroupIdArtifactIdVersion

javax.xml.bind.*

javax.xml.bind

jaxb-api

2.3.1

com.sun.xml.bind.*

org.glassfish.jaxb

jaxb-runtime

2.3.1

java.activation.*

javax.activation

javax.activation-api

1.2.0

java.transaction.*

javax.transaction

javax.transaction-api

1.2

java.xml.ws.*

javax.xml.ws

jaxws-api

2.3.1

javax.jws.*

javax.jws

javax.jws-api

1.1

javax.annotation.*

javax.annotation

javax.annotation-api

1.3.2

+
+
+
3rd Party Updates
+
+

Further, internal and inofficial APIs (e.g. sun.misc.Unsafe) have been removed. +These are typically not used by your software directly but by low-level 3rd party libraries like asm that need to be updated. +Also simple things like the Java version have changed (from 1.8.x to 9.x, 10.x, 11.x, 12.x, etc.). +Some 3rd party libraries were parsing the Java version in a very naive way making them unable to be used with Java 9+:

+
+
+
+
Caused by: java.lang.NullPointerException
+   at org.apache.maven.surefire.shade.org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast (SystemUtils.java:1626)
+
+
+
+

Therefore the following table gives an overview of common 3rd party libraries that have been affected by such breaking changes and need to be updated to at least the specified version:

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 13. Minimum recommended versions of common 3rd party for Java 9+
GroupIdArtifactIdVersionIssue

org.apache.commons

commons-lang3

3.7

LANG-1365

cglib

cglib

3.2.9

102, 93, 133

org.ow2.asm

asm

7.1

2941

org.javassist

javassist

3.25.0-GA

194, 228, 246, 171

+
+
+
ResourceBundles
+
+

For internationalization (i18n) and localization (l10n) ResourceBundle is used for language and country specific texts and configurations as properties (e.g. MyResourceBundle_de.properties). With Java modules there are changes and impacts you need to know to get things working. The most important change is documented in the JavaDoc of ResourceBundle. However, instead of using ResourceBundleProvider and refactoring your entire code causing incompatibilities, you can simply put the resource bundles in a regular JAR on the classpath rather than a named module (or into the lauching app). +If you want to implement (new) Java modules with i18n support, you can have a look at mmm-nls.

+
+
+
+
+
Buildtime Changes
+
+

If you also want to change your build to work with a recent JDK you also need to ensure that test frameworks and maven plugins properly support this.

+
+
+
Findbugs
+
+

Findbugs does not work with Java 9+ and is actually a dead project. +The new findbugs is SpotBugs. +For maven the new solution is spotbugs-maven-plugin:

+
+
+
+
<plugin>
+  <groupId>com.github.spotbugs</groupId>
+  <artifactId>spotbugs-maven-plugin</artifactId>
+  <version>3.1.11</version>
+</plugin>
+
+
+
+
+
Test Frameworks
+ + ++++++ + + + + + + + + + + + + + + + + +
Table 14. Minimum recommended versions of common 3rd party test frameworks for Java 9+
GroupIdArtifactIdVersionIssue

org.mockito

mockito-core

2.23.4

1419, 1696, 1607, 1594, 1577, 1482

+
+
+
Maven Plugins
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 15. Minimum recommended versions of common maven plugins for Java 9+
GroupIdArtifactId(min.) VersionIssue

org.apache.maven.plugins

maven-compiler-plugin

3.8.1

x

org.apache.maven.plugins

maven-surefire-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-surefire-report-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-archetype-plugin

3.1.0

x

org.apache.maven.plugins

maven-javadoc-plugin

3.1.0

x

org.jacoco

jacoco-maven-plugin

0.8.3

663

+
+
+
Maven Usage
+
+

With Java modules you can not run Javadoc standalone anymore or you will get this error when running mvn javadoc:javadoc:

+
+
+
+
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-javadoc-plugin:3.1.1:javadoc (default-cli) on project mmm-base: An error has occurred in Javadoc report generation:
+[ERROR] Exit code: 1 - error: module not found: io.github.mmm.base
+[ERROR]
+[ERROR] Command line was: /projects/mmm/software/java/bin/javadoc @options @packages @argfile
+
+
+
+

As a solution or workaround you need to include the compile goal into your build lifecycle so the module-path is properly configured:

+
+
+
+
mvn compile javadoc:javadoc
+
+
+
+
+
+
+ +
+

We want to give credits and say thanks to the following articles that have been there before and helped us on our way:

+
+ +
+
+
+
+

Tutorials

+
+ +
+

==Creating a new application

+
+
+

Running the archetype

+
+

In order to create a new application you must use the archetype provided by devon4j which uses the maven archetype functionality.

+
+
+

To create a new application, you should have installed devonfw IDE. Follow the devon ide documentation to install +the same. +You can choose between 2 alternatives, create it from command line or, in more visual manner, within eclipse.

+
+
+
From command Line
+
+

To create a new devon4j application from command line, you can simply run the following command:

+
+
+
+
devon java create com.example.application.sampleapp
+
+
+
+

For low-level creation you can also manually call this command:

+
+
+
+
mvn -DarchetypeVersion=${devon4j.version} -DarchetypeGroupId=com.devonfw.java.templates -DarchetypeArtifactId=devon4j-template-server archetype:generate -DgroupId=com.example.application -DartifactId=sampleapp -Dversion=1.0.0-SNAPSHOT -Dpackage=com.devonfw.application.sampleapp
+
+
+
+

Attention: The archetypeVersion (first argument) should be set to the latest version of devon4j. You can easily determine the version from this badge: +latest devon4j version

+
+
+

Further providing additional properties (using -D parameter) you can customize the generated app:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 16. Options for app template
propertycommentexample

dbType

Choose the type of RDBMS to use (hana, oracle, mssql, postgresql, mariadb, mysql, etc.)

-DdbTpye=postgresql

batch

Option to add an batch module

-Dbatch=batch

+
+
+
From Eclipse
+
+
+
After that, you should follow this Eclipse steps to create your application:
+
+
+
+
    +
  • +

    Create a new Maven Project.

    +
  • +
  • +

    Choose the devon4j-template-server archetype, just like the image.

    +
  • +
+
+
+
+Select archetype +
+
+
+
    +
  • +

    Fill the Group Id, Artifact Id, Version and Package for your project.

    +
  • +
+
+
+
+Configure archetype +
+
+
+
    +
  • +

    Finish the Eclipse assistant and you are ready to start your project.

    +
  • +
+
+
+
+
+

What is generated

+
+

The application template (archetype) generates a Maven multi-module project. It has the following modules:

+
+
+
    +
  • +

    api: module with the API (REST service interfaces, transferobjects, datatypes, etc.) to be imported by other apps as a maven dependency in order to invoke and consume the offered (micro)services.

    +
  • +
  • +

    core: maven module containing the core of the application.

    +
  • +
  • +

    batch: optional module for batch(es)

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) as a WAR file.

    +
  • +
+
+
+

The toplevel pom.xml of the generated project has the following features:

+
+
+
    +
  • +

    Properties definition: Spring-boot version, Java version, etc.

    +
  • +
  • +

    Modules definition for the modules (described above)

    +
  • +
  • +

    Dependency management: define versions for dependencies of the technology stack that are recommended and work together in a compatible way.

    +
  • +
  • +

    Maven plugins with desired versions and configuration

    +
  • +
  • +

    Profiles for test stages

    +
  • +
+
+
+
+

How to run your app

+
+
Run app from IDE
+
+

To run your application from your favourite IDE, simply launch SpringBootApp as java application.

+
+
+
+
Run app as bootified jar or war
+
+

More details are available here.

+
+
+
+
+
+
+
+
+1. Whether to use checked exceptions or not is a controversial topic. Arguments for both sides can be found under The Trouble with Checked Exceptions, Unchecked Exceptions — The Controversy, and Checked Exceptions are Evil. The arguments in favor of unchecked exceptions tend to prevail for applications built with devon4j. Therefore, unchecked exceptions should be used for a consistent style. +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/performance-comparision-spring-quarkus.html b/docs/devonfw.github.io/1.0/devon4j.wiki/performance-comparision-spring-quarkus.html new file mode 100644 index 00000000..fedbbdd1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/performance-comparision-spring-quarkus.html @@ -0,0 +1,407 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Performance comparison between Spring and Quarkus

+
+
+

Quarkus offers a big advantage in resource consumption compared to a Spring application. Especially in native mode, the memory footprint of a Quarkus application is extremely low, which can be a deciding factor in real-world environments. +The tables performace comparision application 1 and performace comparision application 2, which show the startup and memory consumption of two applications that are similar in their Quarkus and Spring implementations, illustrate this point. Application 1 is more complex in scope than Application 2 and uses more dependencies. +The listings above the tables show the functions/extensions of the applications and the lines of code (only java files).

+
+
+
Application 1:
+
    +
  • +

    LOC (without automatically generated classes)

    +
    +
      +
    • +

      Quarkus: ~4600

      +
    • +
    • +

      Spring: ~7700 (separated into api and core module, as described for the classic project structure; api: ~3800, core: 3900)

      +
    • +
    +
    +
  • +
  • +

    Features

    +
    +
      +
    • +

      3 entitites

      +
    • +
    • +

      REST service

      +
    • +
    • +

      Connection to a Postgres database (using Spring Data JPA and QueryDSL for the repository implementation)

      +
    • +
    • +

      Flyway for database migration

      +
    • +
    • +

      Kafka for asynchronous messaging

      +
    • +
    • +

      Avro for data serialization combined with a schema registry

      +
    • +
    +
    +
  • +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + +
Table 1. performace comparision application 1

Spring

Quarkus JVM Mode

Quarkus Native Mode

startup time (time until first response)

~35 seconds (+/- 1s)

~4,7 - 5,2 seconds

~0,9 seconds

memory usage

~850 - 900 MB

~550 MB

~190 MB

+
+
+
Application 2:
+
    +
  • +

    LOC

    +
    +
      +
    • +

      Quarkus: ~300

      +
    • +
    • +

      Spring: ~ 280

      +
    • +
    +
    +
  • +
  • +

    Features

    +
    +
      +
    • +

      1 entity

      +
    • +
    • +

      REST service with Postgres database connection

      +
    • +
    +
    +
  • +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + +
Table 2. performace comparision application 2

Spring

Quarkus JVM Mode

Quarkus Native Mode

startup time (time until first response)

~9 - 10 seconds

~3,9 seconds

~0,9 seconds

memory usage

~810 MB

~460 MB

~90 MB

+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus.html new file mode 100644 index 00000000..f27c0040 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus.html @@ -0,0 +1,361 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Quarkus

+
+
+

Quarkus is a Java framework for building cloud-native apps. +It is fully supported by devonfw as an option and alternative to spring. +Additional things like extensions will be available on the devon4quarkus GitHub repository.

+
+
+

Guide to the Reader

+
+
+

Depending on your intention of reading this document, you might be more interested in the following chapters:

+
+
+
    +
  • +

    If you are completely new to Quarkus, you may be interested in the pros and cons of Quarkus. Also, take a look at the official Quarkus website. You might also be interested in the features that GraalVM offers.

    +
  • +
  • +

    If you are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring, and coding conventions. Follow the referenced links to explore a topic in more depth.

    +
  • +
  • +

    If you are an experienced Spring developer and want to get in touch with Quarkus, read our Getting started with Quarkus for Spring developers guide.

    +
  • +
  • +

    If you’re looking to build your first Quarkus application, the Quarkus website offers some good getting started guides. Also, check out our Quarkus template guide, which gives you some recommendations on extensions and frameworks to use. It also provides some links to the Quarkus code generator with preselected configurations you can use to create your application.

    +
  • +
  • +

    If you want to have a Quarkus sample application using devon4j recommendations, check out our Quarkus reference application.

    +
  • +
  • +

    If you have a Spring application and want to migrate it to Quarkus, take a look at our migration guide.

    +
  • +
  • +

    If you already have some experience with devon4j and Quarkus and need more information on a specific topic, check out our Quarkus guides. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Quarkus and Spring are documented there.

    +
  • +
  • +

    If you want to learn how to build native images, check out this guide.

    +
  • +
+
+
+
+
+

Pros

+
+
+

Quarkus offers the following benefits:

+
+
+
    +
  • +

    fast turn-around cycles for developers
    +Save changes in your Java code and immediately test the results without restarting or waiting

    +
  • +
  • +

    faster start-up and less memory footprint
    +When building your app as native-images via GraalVM, it gets highly optimized. As a result, it starts up lightning fast and consumes much less memory. This is a great advantage for cloud deployment as well as for sustainability. You can find a performance comparison between Spring and Quarkus here.

    +
  • +
  • +

    clean and lean +As quarkus was born as a cloud-native framework, it is very light-weight and does not carry much history and legacy.

    +
  • +
+
+
+
+
+

Cons

+
+
+

Quarkus has the following drawbacks:

+
+
+
    +
  • +

    less flexible
    +Quarkus is less flexible compared to spring, or in other words, it is more biased and coupled to specific implementations. However, the implementations work and you have less things to choose and worry about. However, in case you want to integrate a specific or custom library, you may hit limitations or lose support for native-images, especially when that library is based on reflection. Therefore, check your requirements and technology stack early on when making your choice.

    +
  • +
  • +

    less established
    +Since quarkus was born in 2019, it is modern but also less established. It will be easier to get developers for spring, but we already consider quarkus mature and established enough for building production-ready apps.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/getting-started-for-spring-developers.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/getting-started-for-spring-developers.html new file mode 100644 index 00000000..3246f998 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/getting-started-for-spring-developers.html @@ -0,0 +1,467 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Getting started with Quarkus for Spring developers

+
+
+

As a Spring developer, you have heard more and more about Quarkus: its pros and cons, its fast growth etc. So, you decided to adopt/try Quarkus for your (next) project(s) and are wondering where to go next and what you need to pay attention to when moving from Spring to Quarkus.

+
+
+

This guide tries to address this exact concern. In the following, we will present you some main points you should be aware of when starting to develop with Quarkus, along with some useful sources.

+
+
+
    +
  1. +

    Quarkus is a fairly new Java toolkit. Thus, it is very well documented. It also provides a set of well-written technical guides that are a good starting point to get in touch and make the first steps with Quarkus. See here. It is an Open Source project licensed under the Apache License version 2.0. The source code is hosted in GitHub. If you have any questions or concerns, don’t hesitate to reach out to the Quarkus community.

    +
  2. +
  3. +

    Same as Spring Initializr, you can go to code.quarkus.io to create a new application. Also, check out our Template Quarkus Guide to see our recommendations on certain topics.

    +
  4. +
  5. +

    In Spring stack, we recommend structuring your application into multiple modules, known as our classic structure. Moving to Quarkus and the world of cloud-native microservices, where we build smaller applications compared to monoliths, we recommend keeping everything top-level and simple. Therefore, we propose the modern structure as a better fit.

    +
  6. +
  7. +

    Quarkus focuses not only on delivering top features, but also on the developer experience. The Quarkus’s Live Coding feature automatically detects changes made to Java files, application configuration, static resources, or even classpath dependency changes and recompiles and redeploys the changes. As that, it solves the problem of traditional Java development workflow, hence improves productivity.

    +
    +
    +
        Write Code → Compile → Deploy → Test Changes/ Refresh Browser/ etc → Repeat (traditional)
    +    Write Code → Test Changes/ Refresh Browser/ etc → Repeat (Quarkus)
    +
    +
    +
    +

    You can use this feature out of the box without any extra setup by running:

    +
    +
    +
    +
        mvn compile quarkus:dev
    +
    +
    +
    +

    Another highlight feature to speed up developing is the Quarkus’s Dev Mode with Dev Services, which can automatically provision unconfigured services in development and test mode. This means that if you include an extension and don’t configure it, Quarkus will automatically start the relevant service and wire up your application to use it, therefore saving you a lot of time setting up those services manually. In production mode, where the real configuration is provided, Dev Services will be disabled automatically.

    +
    +
    +

    Additionally, you can access the Dev UI at \q\dev in Dev Mode to browse endpoints offered by various extensions, conceptually similar to what a Spring Boot actuator might provide.

    +
    +
  8. +
  9. +

    Quarkus is made of a small core on which hundreds of extensions rely. In fact, the power of Quarkus is its extension mechanism. Think of these extensions as your project dependencies. You can add it per dependency manager such as maven or gradle.

    +
    +
    +
    mvn quarkus:list-extensions
    +mvn quarkus:add-extension -Dextensions="groupId:artifactId"
    +(or add it manually to pom.xml)
    +##or
    +gradle list-extensions
    +(add dependency to build.gradle)
    +
    +
    +
    +

    Like Spring Boot, Quarkus also has a vast ecosystem of extensions with commonly-used technologies.

    +
    + + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Table 1. Example of common Quarkus extensions and the Spring Boot Starters with similar functionality (book: Quarkus for Spring Developer)
    Quarkus extensionSpring Boot Starter

    quarkus-resteasy-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-resteasy-reactive-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-hibernate-orm-panache

    spring-boot-starter-data-jpa

    quarkus-hibernate-orm-rest-datapanache

    spring-boot-starter-data-rest

    quarkus-hibernate-reactive-panache

    spring-boot-starter-data-r2dbc

    quarkus-mongodb-panache

    spring-boot-starter-data-mongodb

    +

    spring-boot-starter-data-mongodb-reactive

    quarkus-hibernate-validator

    spring-boot-starter-validation

    quarkus-qpid-jms

    spring-boot-starter-activemq

    quarkus-artemis-jms

    spring-boot-starter-artemis

    quarkus-cache

    spring-boot-starter-cache

    quarkus-redis-client

    spring-boot-starter-data-redis

    +

    spring-boot-starter-data-redis-reactive

    quarkus-mailer

    spring-boot-starter-mail

    quarkus-quartz

    spring-boot-starter-quartz

    quarkus-oidc

    spring-boot-starter-oauth2-resource-server

    quarkus-oidc-client

    spring-boot-starter-oauth2-client

    quarkus-smallrye-jwt

    spring-boot-starter-security

    +
    +

    A full list of all Quarkus extensions can be found here. Furthermore, you can check out the community extensions hosted by Quarkiverse Hub. Quarkus has some extensions for Spring API as well, which is helpful when migrating from Spring to Quarkus.

    +
    + +
    +

    Besides extensions, which are officially maintained by Quarkus team, Quarkus allows adding external libraries too. While extensions can be integrated seamlessly into Quarkus, as they can be processed at build time and be built in native mode with GraalVM, external dependencies might not work out of the box with native compilation. If that is the case, you have to recompile them with the right GraalVM configuration to make them work.

    +
    +
  10. +
  11. +

    Quarkus' design accounted for native compilation by default. A Quarkus native executable starts much faster and utilizes far less memory than a traditional JVM (see our performace comparision between Spring and Quarkus). To get familiar with building native executable, configuring and running it, please check out our Native Image Guide. Be sure to test your code in both JVM and native mode.

    +
  12. +
  13. +

    Both Quarkus and Spring include testing frameworks based on JUnit and Mockito. Thus, by design, Quarkus enables test-driven development by detecting affected tests as changes are made and automatically reruns them in background. As that, it gives developer instant feedback, hence improves productivity. To use continuous testing, execute the following command:

    +
    +
    +
    mvn quarkus:dev
    +
    +
    +
  14. +
  15. +

    For the sake of performance optimization, Quarkus avoids reflection as much as possible, favoring static class binding instead. When building a native executable, it analyzes the call tree and removes all the classes/methods/fields that are not used directly. As a consequence, the elements used via reflection are not part of the call tree so they are dead code eliminated (if not called directly in other cases).

    +
    +

    A common example is the JSON library, which typically use reflection to serialize the objects to JSON. If you use them out of the box, you might encounter some errors in native mode. So, be sure to register the elements for reflection explicitly. A How-to is provided by Quarkus Registering For Reflection with practical program snippets.

    +
    +
  16. +
+
+
+

A very good read on the topic is the e-book Quarkus for Spring Developers by Red Hat. Another good source for direct hands-on coding tutorial is Katacoda Quarkus for Spring Boot Developers

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/getting-started-quarkus.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/getting-started-quarkus.html new file mode 100644 index 00000000..ac5f8251 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/getting-started-quarkus.html @@ -0,0 +1,681 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Quarkus Quickstart

+
+
+

This guide serves as a quickstart on how to create a Quarkus app, briefly presenting the key functionalities that Quarkus provides, both for beginners and experienced developers.

+
+
+

Introduction to Quarkus

+
+
+

To get a first introduction to Quarkus, you can read the Quarkus introduction guide. To get a brief overview of where you can find the important Quarkus related guides, follow the chapter guide to the reader. +Also, see the comparison of the advantages and disadvantages of a Quarkus application compared to the alternative framework Spring. +This comparison will be supported by our performance comparison between Spring and Quarkus, which demonstrates the lower resource consumption and startup time of Quarkus applications.

+
+
+
+
+

Installation of Tools and Dependencies

+
+
+

First, we need to install some dependencies and tools before we can start programming. Our tool devonfw-ide comes with many development tools for you. +We need to install the following tools for this guide:

+
+
+
    +
  • +

    Maven

    +
  • +
  • +

    Java

    +
  • +
  • +

    any IDE (devonfw-ide supports Eclipse, Intellij and VScode)

    +
  • +
  • +

    Docker

    +
  • +
+
+
+

We recommend installing the devonfw-ide with the tools, but if you already have your system configured and the tools above installed, you can skip to Bootstrap a Quarkus Project, otherwise we will show you how to set up and update your devonfw-ide.

+
+
+
devonfw-ide
+
    +
  1. +

    Install devonfw-ide
    +Follow the Setup to install the devonfw-ide with Java, Maven, Eclipse and VScode.

    +
    +
      +
    1. +

      Command to install Docker
      +devon docker setup

      +
    2. +
    +
    +
  2. +
  3. +

    Update devonfw-ide
    +As we are still working on improving devonfw-ide, we recommend to update your already installed devonfw-ide and tools in order to include essential features for cloud development with Quarkus that you could be missing.

    +
  4. +
+
+
+

Use the commands devon ide update, devon ide update software, and devon ide scripts to update devonfw-ide and all installed software.

+
+
+

Go to the main folder under workspaces of the devonfw-ide installation. +We will create the project there.

+
+
+
+
+

Bootstrap a Quarkus Project

+
+
+

Quarkus provides multiple ways to bootstrap a project. +The option to bootstrap a project via the command-line is shown in the Quarkus getting started guide Bootstrap the project. +Quarkus also provides a project builder where you can select some extensions, the build tool for your project, and if you want, some starter code. +This will deliver a project skeleton with the configured project dependencies and also contributes the information to compile the application natively. To get some recommendations on starter templates, follow the guide on: template recommendations.

+
+
+ + + + + +
+ + +
+

By creating a Quarkus project from the command-line or with the project builder, you get a different project structure and have to adapt it to the devon4j conventions shown in the next Chapter.

+
+
+
+
+

Project Structure

+
+

We provide a recommendation and guideline for a modern project structure to help organize your project into logically related modules. +In order to comply with the requirements of modern cloud development and microservice architectures, follow the guide and apply the modern project structure to your project. You can also find similar modules in our example projects.

+
+
+
+
+
+

Introduction to Quarkus Functionality

+
+
+

Before we start programming, you should first have a look at the functionality of Quarkus.

+
+
+
Quarkus functionality guides
+
    +
  1. +

    Getting started guide from Quarkus
    +This guide presents a good overview of the functionality of Quarkus. The simple Greeting Service gives a brief introduction into concepts like CDI, testing, dev mode, packaging, and running the app.

    +
  2. +
  3. +

    From Spring to Quarkus
    +For experienced Spring developers that have already followed devon4j guidelines, you can read our guide to getting started with Quarkus for Spring developer, as it goes more into the differences that can give you a more detailed comparison to Spring.

    +
    +
      +
    1. +

      Migrate a Spring app to Quarkus
      +This guide shows how to migrate a Spring application to a Quarkus application with devon4j conventions.

      +
    2. +
    +
    +
  4. +
+
+
+
+
+

Create a REST service

+
+
+

Now let’s create our first REST CRUD service with Quarkus. +We give you the options of using a guide to start to code the service yourself or to just download a service that’s ready to use.

+
+
+
Options
+
    +
  1. +

    Create the service yourself
    +There is a good Quarkus guide for a simple JSON REST service that will guide you through your first application and help you with implement the definition of endpoints with JAX-RS and an Entity that will be managed by the service, and also how to configure the JSON support.

    +
  2. +
  3. +

    Use an existing Quarkus project
    +You don’t want to code a service and just want to test some Quarkus functionalities? Just load a Quarkus sample project provided for every existing quickstart guide and the supported framework. +Our Team also provides some Quarkus applications that are working and can be loaded and tested.

    +
    +
      +
    • +

      reference project is a service that manages products. It contains the devon4j modern project structure, pagination, queries, a Postgres database, SwaggerUI, and support for Kubernetes deploy. To add OpenTelemetry support, see the following guide. +This project will be steadily improved and is used to showcase the abilities of Quarkus with devon4j.

      +
    • +
    • +

      minimal Quarkus project is just the Quarkus project from a getting started with Quarkus guide with a Greeting Services modified with the correct modern structure mentioned in the chapter Project Structure

      +
    • +
    +
    +
  4. +
+
+
+
+
+

OpenAPI generation

+
+
+

We provide a guide with a short introduction to the OpenAPI specification with two plugins that are important in a Quarkus Context.

+
+ +
+

A more detailed usage guide to the Smallrye Plugin is provided by Quarkus OpenAPI and Swagger guide.

+
+
+
+
+

How to Integrate a Database

+
+
+

The next step for our REST service would be to integrate a database to store the objects of the entity.

+
+
+

With Quarkus, adding a database can be easy, because Quarkus can take over the build-up and connection process. +First, you should understand our guides on the concepts of working with data. Then, we will show how to integrate a database with Quarkus.

+
+
+
Data Principles Guides
+
    +
  1. +

    General devon4j JPA guide
    +To get an insight into the general JPA usage, read the JPA guide containing a general explanation of the Java Persistence API.

    +
  2. +
  3. +

    Difference to SpringData
    +If you have already worked with SpringData, this is also partially supported with Quarkus. This is explained in more detail in this SpringData Guide.

    +
  4. +
+
+
+
Database Integration
+
    +
  1. +

    Quarkus zero config dev mode
    +Starting with the database implementation in Quarkus, we recommend for beginners to use the DEV mode Zero Config Setup (Dev Services). This is especially great for testing the code without a database set up. +Quarkus does all the work for you and configures a database and creates the database and tables (schemas) for you.

    +
    +
      +
    1. +

      Configuration Properties
      +A list of all database configuration properties for the Dev services

      +
    2. +
    +
    +
  2. +
  3. +

    Integrate a simple Hibernate ORM database
    +The zero config setup only works with the Dev mode, it’s comfortable in the first phases of the creation of your service but if the goal is to also get a deployable version, you have to create your own database and integrate it. +This Quarkus guide shows, how to integrate a Hibernate ORM database with an example service.

    +
    +
      +
    1. +

      Configuration list for JDBC
      +A list of all configuration that is possible with a JDBC configuration properties

      +
    2. +
    +
    +
  4. +
  5. +

    Reactive CRUD application with Panache
    +Quarkus unifies reactive and imperative programming. +Reactive is an architectural principle to build robust, efficient, and concurrent applications. +For an introduction into reactive and how Quarkus enables it, follow this Quarkus reactive architecture article and also the reactive quickstart. +To get started with reactive and implement reactive methods, you can follow the Quarkus reactive guide. +The reactive guide uses the Quarkus based implementation of a Hibernate ORM called Panache. +The implementation is not our first choice with devon4j and therefore not part of our recommendations, but to understand the reactive guide you can read the Hibernate ORM with Panache guide first to prevent possible problems following the guide.

    +
  6. +
+
+
+ + + + + +
+ + +
+

You need an installed Docker version for the zero config setup.

+
+
+
+
+
Database Migration
+

For schema-based databases, we recommend migrating databases with Flyway. +In that case, our general migration guide can give you an overview if you are not familiar with migration. +.. Flyway guide for Quarkus +This Quarkus guide will show how to work with the Flyway extension in a Quarkus application. +This should be used if you start your own database and do not leave the creation to quarkus.

+
+
+
+
+

Testing a Quarkus Application

+
+
+

After we have built the service, we have to verify it with some tests. +We will give you some guidelines to implement some test cases.

+
+
+
Testing Guides
+
    +
  1. +

    General testing guide
    +For users that aren’t familiar with the devon4j testing principles, we created a general best practices and recommendations guide for testing.

    +
    +
      +
    1. +

      Our guide for testing with Quarkus +In addition, we also provide a guide that specifically addresses the testing of a Quarkus application.

      +
    2. +
    +
    +
  2. +
+
+
+

Most of the Quarkus applications are already equipped with a basic test and our reference project provides some further test cases. If you want to improve and extend the tests, you can also follow the large Quarkus guide for testing.

+
+
+
+
+

Packaging of a Quarkus application and creation of a native executable

+
+
+

Quarkus applications can be packaged into different file types. The following link will show you how to build them and give you a short explanation of the characteristics of these files.

+
+
+
Package types
+
    +
  1. +

    fast-jar

    +
  2. +
  3. +

    mutable-jar

    +
  4. +
  5. +

    uber-jar

    +
  6. +
  7. +

    native executable

    +
  8. +
+
+
+

To package an application, use the command mvn package and Quarkus will generate the output in the /target folder. For the native executables, the command needs more parameters, which is explained in the link above.

+
+
+

Configure the Output with these configuration properties

+
+
+
+
+

Create and build a Docker Image

+
+
+

Quarkus supports Jib, S2I and Docker for building images. We focus on building a Quarkus App with Docker. +You get a generated Dockerfile from Quarkus in the src/main/docker folder of any project generated from Quarkus. There are multiple Dockerfiles.

+
+
+
Dockerfiles
+
    +
  1. +

    Dockerfile.jvm
    +Dockerfile for Quarkus application in the JVM mode. running in Red Hat Universal Base Image 8 Minimal Container

    +
  2. +
  3. +

    Dockerfile.legacy-jar
    +DockerFile for Quarkus application in JVM mode with the legacy jar running in Red Hat Universal Base Image 8 Minimal Container.

    +
  4. +
  5. +

    Dockerfile.native
    +Dockerfile using the native executable running in Red Hat Universal Base Image 8 Minimal container.

    +
  6. +
  7. +

    Dockerfile.native-distroless +The native file will run in a Distroless container. Distroless images are very small containers with just the application and runtime dependencies and without the other programs that come with a Linux distribution.

    +
  8. +
+
+
+
+
+

For more information to the different executables go back to the chapter Packaging of a Quarkus application and creation of a native executable

+
+
+
+
+

To simply build and run a Docker image, you can follow the instructions Quarkus provides for every Dockerfile in the comments block.

+
+
+

Docker commands example for the JVM Dockerfile from our reference project

+
+
+
+
####
+##This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
+#
+##Before building the container image run:
+#
+##./mvnw package
+#
+##Then, build the image with:
+#
+##docker build -f src/main/docker/Dockerfile.jvm -t quarkus/quarkus-basics-jvm .
+#
+##Then run the container using:
+#
+##docker run -i --rm -p 8080:8080 quarkus/quarkus-basics-jvm
+#
+##If you want to include the debug port into your docker image
+##you will have to expose the debug port (default 5005) like this :  EXPOSE 8080 5050
+#
+##Then run the container using :
+#
+##docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/quarkus-basics-jvm
+#
+###
+
+
+
+

Quarkus is also able to build the image while packaging the application, so you don’t have to execute the command from above. +To perform Docker builds with the generated Dockerfiles from above, you need to add the following extension to your project with the command mvn quarkus:add-extension -Dextensions="container-image-docker".

+
+
+

You also have to set the quarkus.container-image.build=true. You can add this to your application.properties or just append it to the packaging command like this: ./mvn package -Dquarkus.container-image.build=true.

+
+
+

If your needs exceed the instructions given by the file, we recommend to follow the Docker getting started guide to get familiar with Docker and customize the Dockerfiles according to your needs. +To specify your container build, you can use the general container image configurations properties and the Docker image configurations properties when building and runnig Docker images.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-authentication-quarkus.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-authentication-quarkus.html new file mode 100644 index 00000000..5b6775a0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-authentication-quarkus.html @@ -0,0 +1,298 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Quarkus Authentication +Quarkus supports different authentication mechanisms through different extensions. For example:

+
+
+ +
+
+

For mix authentication, see here.

+
+
+

For further details see Quarkus - Security architecture and guides. Quarkus also provides a compatibility layer for Spring Security in the form of the spring-security extension.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-beanmapping-quarkus.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-beanmapping-quarkus.html new file mode 100644 index 00000000..ecc30efc --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-beanmapping-quarkus.html @@ -0,0 +1,478 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Bean mapping with Quarkus

+
+
+

This guide will show bean-mapping, in particular for a Quarkus application. We recommend using MapStruct with a Quarkus application because the other bean-mapper frameworks use Java reflections. They are not supported in GraalVm right now and cause problems when building native applications. MapStruct is a code generator that greatly simplifies the implementation of mappings between Java bean types based on a convention over configuration approach. The mapping code will be generated at compile-time and uses plain method invocations and is thus fast, type-safe, and easy to understand. MapStruct has to be configured to not use Java reflections, which will be shown in this guide.

+
+
+

You can find the official +MapStruct reference guide and a general introduction to MapStruct from Baeldung.

+
+
+

MapStruct Dependency

+
+
+

To get access to MapStruct, we have to add the dependency to our POM.xml:

+
+
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+  <scope>provided</scope>
+</dependency>
+
+
+
+

MapStruct provides an annotation processor that also has to be added to the POM.xml

+
+
+
+
<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.mapstruct</groupId>
+				<artifactId>mapstruct-processor</artifactId>
+				<version>1.4.2.Final</version>
+			</path>
+		</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

MapStruct takes advantage of generated getters, setters, and constructors from the Lombok library, follow this Lombok with Mapstruct guide to get Lombok with Mapstruct working.

+
+
+
+
+

MapStruct Configuration

+
+
+

We already discussed the benefits of dependency injection. MapStruct supports CDI with EJB, spring, and jsr330. The default retrieving method for a mapper is a factory that uses reflections, which should be avoided. The component model should be set to CDI, as this will allow us to easily inject the generated mapper implementation. The component model can be configured in multiple ways.

+
+
+

Simple Configuration

+
+

Add the attribute componentModel to the @Mapper annotation in the mapper interface.

+
+
+
+
@Mapper(compnentModel = "cdi")
+public interface ProductMapper{
+  ...
+}
+
+
+
+
+

MapperConfig Configuration

+
+

Create a shared configuration that can be used for multiple mappers. Implement an interface and use the annotation @MapperConfig for the class. You can define all configurations in this interface and pass the generated MapperConfig.class with the config attribute to the mapper. The MapperConfig also defines the InjectionStrategy and MappingInheritaceStrategy, both of which will be explained later. +A list of all configurations can be found here.

+
+
+
+
@MapperConfig(
+  compnentModel = "cdi",
+  mappingInheritanceStrategy = MappingInheritanceStrategy.AUTO_INHERIT_FROM_CONFIG
+  injectionStrategy =InjectionStrategy.CONSTRUCTOR
+)
+public interface MapperConfig{
+}
+
+
+
+
+
@Mapper( config = MapperConfig.class )
+public interface ProductMapper{
+  ...
+}
+
+
+
+

Any attributes not given via @Mapper will be inherited from the shared configuration MapperConfig.class.

+
+
+
+

Configuration via annotation processor options

+
+

The MapStruct code generator can be configured using annotation processor options. +You can pass the options to the compiler while invoking javac directly, or add the parameters to the maven configuration in the POM.xml

+
+
+

We also use the constructor injection strategy to avoid field injections and potential reflections. This will also simplify our tests.

+
+
+

The option to pass the parameter to the annotation processor in the POM.xml is used and can be inspected in our quarkus reference project.

+
+
+

A list of all annotation processor options can be found here.

+
+
+
+
+
+

Basic Bean-Mapper Usage

+
+
+

To use the mapper, we have to implement the mapper interface and the function prototypes with a @Mapper annotation.

+
+
+
+
@Mapper
+public interface ProductMapper {
+
+  ProductDto map(ProductEntity model);
+
+  ProductEntity create(NewProductDto dto);
+}
+
+
+
+

The MapStruct annotation processor will generate the implementation for us under /target/generated-sources/, we just need to tell it that we would like to have a method that accepts a ProductEntity entity and returns a new ProductDto DTO.

+
+
+

The generated mapper implementation will be marked with the @ApplicationScoped annotation and can thus be injected into fields, constructor arguments, etc. using the @Inject annotation:

+
+
+
+
public class ProductRestService{
+
+  @Inject
+  ProductMapper mapper;
+}
+
+
+
+

That is the basic usage of a Mapstruct mapper. In the next chapter, we’ll go into a bit more detail and show some more configurations.

+
+
+
+
+

Advanced Bean-Mapper Usage

+
+
+

Let´s assume that our Product entity and the ProductDto have some differently named properties that should be mapped. Add a mapping annotation to map the property type from Product to kind from ProductDto. We define the source name of the property and the target name.

+
+
+
+
@Mapper
+public interface ProductMapper {
+  @Mapping(target = "kind", source = "type")
+  ProductDto map(ProductEntity entity);
+
+  @InheritInverseConfiguration(name = "map" )
+  ProductEntity create(ProductDto dto);
+}
+
+
+
+

For bi-directional mappings, we can indicate that a method shall inherit the inverse configuration of the corresponding method with the @InheritInverseConfiguration. You can omit the name parameter if the result type of method A is the same as the +single-source type of method B and if the single-source type of A is the same as the result type of B. If multiple apply, the attribute name is needed. Specific mappings from the inverse method can (optionally) be overridden, ignored, or set to constants or expressions.

+
+
+

The mappingInheritanceStrategy can be defined as showed in MapStruct Configuration. The existing options can be found here.

+
+
+

A mapped attribute does not always have the same type in the source and target objects. For instance, an attribute may be of type int in the source bean but of type Long in the target bean.

+
+
+

Another example are references to other objects which should be mapped to the corresponding types in the target model. E.g. the class ShoppingCart might have a property content of the type Product which needs to be converted into a ProductDto object when mapping a ShoppingCart object to ShoppingCartDto. For these cases, it’s useful to understand how Mapstruct converts the data types and the object references.

+
+
+

Also, the Chapter for nested bean mappings will help to configure MapStruct to map arbitrarily deep object graphs.

+
+
+

You can study running MapStruct implementation examples given by MapStruct or in our Quarkus reference project

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-cors-support.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-cors-support.html new file mode 100644 index 00000000..6da6b3a0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-cors-support.html @@ -0,0 +1,396 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==CORS support

+
+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

Configuring CORS support

+
+
+

Quarkus comes with a CORS filter which implements the javax.servlet.Filter interface and intercepts all incoming HTTP requests. It can be enabled in the Quarkus configuration file, src/main/resources/application.properties:

+
+
+
+
quarkus.http.cors=true
+
+
+
+

Configuration with quarkus

+
+

Here’s an example of a full CORS filter configuration, including a regular expression defining an allowed origin:

+
+
+
+
##enable cors filter
+quarkus.http.cors=true
+##configurations cors
+quarkus.http.cors.origins=http://foo.com,http://www.bar.io,/https://([a-z0-9\\-_]+)\\.app\\.mydomain\\.com/
+quarkus.http.cors.methods=OPTIONS,HEAD,GET,PUT,POST,DELETE,PATCH
+quarkus.http.cors.headers=X-Custom
+quarkus.http.cors.exposed-headers=Content-Disposition
+quarkus.http.cors.access-control-max-age=24H
+quarkus.http.cors.access-control-allow-credentials=true
+
+
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AttributeDefaultDescriptionHTTP Header

quarkus.http.cors.access-control-allow-credentials

-

Boolean value to tell the browsers to expose the response to front-end JavaScript code when the request’s credentials mode Request.credentials is “include”

Access-Control-Allow-Credentials

quarkus.http.cors.origins

*

The comma-separated list of origins allowed for CORS. Values starting and ending with '/'' will be treated as regular expressions. The filter allows any origin if this is not set or set to '*'.

Access-Control-Allow-Origin

quarkus.http.cors.methods

*

The comma-separated list of HTTP methods allowed for CORS. The filter allows any method if this is not set or set to '*'.

Access-Control-Allow-Methods

quarkus.http.cors.headers

*

The comma-separated list of HTTP headers allowed for CORS. The filter allows any header if this is not set or set to '*'.

Access-Control-Allow-Headers

quarkus.http.cors.exposed-headers

*

The comma-separated list of HTTP headers exposed in CORS. The filter allows any headers to be exposed if this is not set or set to '*'.

Access-Control-Expose-Headers

quarkus.http.cors.access-control-max-age

-

The duration (see note below) indicating how long the results of a pre-flight request can be cached.

Access-Control-Max-Age

+
+
+

Configuration with service mesh

+
+

Alternatively, if you use service mesh, you can also define your CORS policy directly there. Here is an example from istio

+
+
+

More information about the CORS headers can be found here

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-exception-handling.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-exception-handling.html new file mode 100644 index 00000000..bbb1f52c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-exception-handling.html @@ -0,0 +1,353 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Exception Handling in Quarkus

+
+
+

For handling exceptions within a Spring application, devon4j provides the devon4j-rest module, which provides a RestServiceExceptionFacade to handle all exceptions in a consistent way. Since the module is not suitable for Quarkus, we need to implement this ourselves.

+
+
+

This guide shows how to do just that. For an example, see our Quarkus reference application.

+
+
+

Exception mapping

+
+
+

We suggest to implement the exception handling the JAX-RS way using ExceptionMapper<T>.

+
+
+

RESTEasy provides several exception mappers out of the box. For example, RESTEasy’s NotFoundExceptionMapper provides a web page that shows all available endpoints in dev mode.

+
+
+

Even though this looks really nice, we want to have consistent exception handling throughout the application. +We create an abstract class AbstractExceptionMapper that acts as a base class for all of the more specific exception mappers and where the response is created in a consistent manner. +You can find an example of this class here.

+
+
+
AbstractExceptionMapper
+
+
public abstract class AbstractExceptionMapper {
+
+  ...
+
+  protected Response createResponse(int status, String errorCode, Exception exception) {
+
+    Map<String, Object> jsonMap = new HashMap<>();
+    jsonMap.put("code", errorCode);
+    if (this.exposeInternalErrorDetails) {
+      jsonMap.put("message", getExposedErrorDetails(exception));
+    } else {
+      jsonMap.put("message", exception.getMessage());
+    }
+    jsonMap.put("uri", this.uriInfo.getPath());
+    jsonMap.put("uuid", UUID.randomUUID());
+    jsonMap.put("timestamp", ZonedDateTime.now().toString());
+    return Response.status(status).type(MediaType.APPLICATION_JSON).entity(jsonMap).build();
+  }
+
+  ...
+}
+
+
+
+

For the exceptions that may occur during runtime, we create an ExceptionMapper that extends from our AbstractExceptionMapper class. To make the class discoverable by the JAX-RS runtime, we have to annotate the class with @Provider.

+
+
+
NotFoundExceptionMapper
+
+
@Provider
+public class NotFoundExceptionMapper extends AbstractExceptionMapper implements ExceptionMapper<NotFoundException> {
+
+  @Override
+  public Response toResponse(NotFoundException exception) {
+
+    ...
+
+    return createResponse(Status.NOT_FOUND.getStatusCode(), exception.getClass().getSimpleName(), exception);
+  }
+}
+
+
+
+ + + + + +
+ + +Unlike the RestServiceExceptionFacade of the devon4j-rest module, we cannot use ExceptionMapper<Throwable> in Quarkus, because in this case, the exception mapper of RESTEasy would be used, since they are more specific. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-logging.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-logging.html new file mode 100644 index 00000000..ce4793f4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-logging.html @@ -0,0 +1,520 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Logging

+
+
+

Logging is the recording of messages during the execution of an application. The log messages provide information for developers or administrators that can be used to troubleshoot or maintain the application, such as errors and warnings, but also info messages such as runtime statistics that can be used for analysis.

+
+
+

One must distinguish between a logging API and logging implementations. A logging API provides a standardised interface, while the specific implementation is a framework that is developed against and uses the API.

+
+
+

Internally, Quarkus uses the JBoss Logging facade, an abstraction layer that provides support for multiple logging APIs and JBoss LogManager, which provides implementations for the specific APIs. The following logging APIs are supported:

+
+ +
+

Usage

+
+
+

Maven integration

+
+

We recommend using SLF4j as the logging API. Since Quarkus uses JBoss logging internally, you can use it out of the box and do not need to add any dependencies to your project to use it. JBoss LogManager will send it to the appropriate implementation.

+
+
+

Exceptional case: +If you use a dependency in your project that has dependencies on other logging libraries like SLF4j, then you need to exclude them from the dependency and use a JBoss Logging adapter. For more information, see here. +For example, if you have a dependency that uses SLF4j, you need to add the following dependency to your pom.xml file:

+
+
+
+
<dependency>
+    <groupId>org.jboss.slf4j</groupId>
+    <artifactId>slf4j-jboss-logmanager</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +
+

This is not needed for libraries that are dependencies of a Quarkus extension as the extension will take care of this for you.

+
+
+
+
+
+

Logger access

+
+

The general pattern for accessing an instance of a logger class is to use static instances. So for SLF4j, the following lines are sufficient to create a log object:

+
+
+
+
...
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+    private static final Logger log = LoggerFactory.getLogger(MyClass.class);
+
+}
+
+
+
+

If you are use Lombok in your project, you can simply add the @SLF4J annotation to your class. Lombok will then automatically create a logger instance that you can use in your code.

+
+
+
+

Using the logger

+
+

After you have created the logger instance, you can simply use one of the log methods of the corresponding object. Different logging APIs provide different methods for creating log messages. When using SLF4j, there are several methods such as info, warn, error that are logged depending on the log level set (see [configuration]).

+
+
+
+
...
+public void myMethod(...) {
+    log.info("your log message");
+}
+...
+
+
+
+

For detailed documentation on the SLF4j API, see here.

+
+
+
+

Configuration

+
+

The are several options you can set in the application.properties file to configure the bahaviour of the logger. For example, to set the log level or the format of the log messages.

+
+
+

Log levels

+
+

Quarkus supports eight different log levels (see here for an overview). Use quarkus.log.level to set the default log level of the application (default is INFO). To define more specific log levels, you can set different levels per category.

+
+
+
+
quarkus.log.level=INFO
+quarkus.log.category."org.hibernate".level=DEBUG
+
+
+
+

This would set the default log level in your application to INFO and the Hibernate log level to DEBUG.

+
+
+

To understand when to use which log level, you can take a look at the devon4j logging guide.

+
+
+
+

Format

+
+

To configure the output format of the log messages, set the property quarkus.log.console.format. Information on the supported options can be found here.

+
+
+
+
quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n
+
+
+
+

Result:

+
+
+
+
[D: 2021-07-20 11:54:33,127] [P: DEBUG] [C: «MDC values»] [T: executor-thread-0] [L: my.package.MyClass] [M: log message...]
+
+
+
+
+
+

Customizing log messages

+
+

You can use Mapped Diagnostic Context to add custom fields to your log messages. MDC is a simple map consisting of key-value pairs to store additional useful information such as session or request ids that can be helpful when filtering log messages or debugging applications.

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+
+...
+public class MyClass {
+
+    private static final Logger log = LoggerFactory.getLogger(SLF4JLoggingResource.class);
+
+    public String myMethod() {
+    	MDC.put("yourKey", "yourValue");
+    	log.info("log message ...");
+    }
+
+    ...
+}
+
+
+
+

Result:

+
+
+
+
[D: 2021-07-20 11:54:33,127] [P: DEBUG] [C: {yourKey=yourValue}] [T: executor-thread-0] [L: my.package.MyClass] [M: log message...]
+
+
+
+
+
+
+

JSON Logging

+
+
+

For production environments we suggest to use JSON logs instead of plain text. The JSON output can be captured by external services for storing and analysis. To do this add the quarkus-logging-json extension to your project`s pom.xml file.

+
+
+
+
<dependency>
+    <groupId>io.quarkus</groupId>
+    <artifactId>quarkus-logging-json</artifactId>
+</dependency>
+
+
+
+

This will change the output format by default. Since it makes sense in development environments to have the output format in a human readable format, you can disable JSON logging for development (or test) environments by adding the following properties to your application.properties file.

+
+
+
+
%dev.quarkus.log.console.json=false
+%test.quarkus.log.console.json=false
+
+
+
+
+
+

Centralized Log Management

+
+
+

As mentioned in the section on JSON logging, in production environments it makes sense to have a service to store and analyse the logs. For this, you can use a central log management system like Graylog or Logstash in combination with Elasticsearch, which provides you with a powerful search engine.

+
+
+

For this, Quarkus provides the quarkus-logging-gelf extension to send the logs in the Graylog Extended Log Format (GELF) to your log management system.

+
+
+
+
<dependency>
+    <groupId>io.quarkus</groupId>
+    <artifactId>quarkus-logging-gelf</artifactId>
+</dependency>
+
+
+
+

You do not have to extend your code, just configure the GELF log handler to your management system.

+
+
+
+
quarkus.log.handler.gelf.enabled=true
+quarkus.log.handler.gelf.host=tcp:localhost
+quarkus.log.handler.gelf.port=12201
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-native-image.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-native-image.html new file mode 100644 index 00000000..d60526af --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-native-image.html @@ -0,0 +1,326 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Building a native image

+
+
+

Quarkus provides the ability to create a native executable of the application called native image. +Unlike other Java based deployments, a native image will only run on the architecture and operating system it is compiled for. +Also, no JVM is needed to run the native-image. +This improves the startup time, performance, and efficiency. +A distribution of GraalVM is needed. +You can find the differences between the available distributions here.

+
+
+

To build your quarkus app as a native-image, you have two options that are described in the following sections.

+
+
+

Build a native executable with GraalVM

+
+
+

To build a Quarkus application, you can install GraalVM locally on your machine, as described below. +Therefore, read the basic Quarkus application chapter, or clone the example project provided by devonfw. +Follow this chapter from the Quarkus Guide for building a native executable.

+
+
+

Installing GraalVM

+
+

A native image can be created locally or through a container environment. +To create a native image locally, an installed and configured version of GraalVM is needed. You can follow the installation guide from Quarkus or the guide provided by GraalVM for this.

+
+
+
+
+
+

Build a native executable with GraalVM through container environment

+
+
+

In order to make the build of native images more portable, you can also use your container environment and run the GraalVM inside a container (typically Docker). +You can simply install Docker with your devonfw-ide distribution, just follow this description Docker with devonfw-ide. +Follow this chapter to build a native Linux image through container runtime.

+
+
+
+
+

Configuring the native executable

+
+
+

A list of all configuration properties for a native image can be found here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-quarkus-configuration.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-quarkus-configuration.html new file mode 100644 index 00000000..b450a77a --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-quarkus-configuration.html @@ -0,0 +1,459 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Configuration

+
+
+

Quarkus provides a comprehensive guide on configuration here.

+
+
+

External Application Configuration

+
+
+

Database Configuration

+
+

In Quarkus, Hibernate is provided by the quarkus-hibernate-orm extension. Ensure the extension is added to your pom.xml as follows:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-orm</artifactId>
+</dependency>
+
+
+
+

Additionally, you have to add the respective JDBC driver extension to your pom.xml. There are different drivers for different database types. See Quarkus Hibernate guide.

+
+
+
+

Database System and Access

+
+

You need to configure which database type you want to use, as well as the location and credentials to access it. The defaults are configured in application.properties. The file should therefore contain the properties as in the given example:

+
+
+
+
quarkus.datasource.jdbc.url=jdbc:postgresql://database.enterprise.com/app
+quarkus.datasource.username=appuser01
+quarkus.datasource.password=************
+quarkus.datasource.db-kind=postgresql
+
+##drop and create the database at startup (use only for local development)
+quarkus.hibernate-orm.database.generation=drop-and-create
+
+
+
+
+

Database Logging

+
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
quarkus.hibernate-orm.log.sql=true
+quarkus.hibernate-orm.log.format-sql=true
+
+#Logs SQL bind parameters. Setting it to true is obviously not recommended in production.
+quarkus.hibernate-orm.log.bind-parameters=true
+
+
+
+
+
+
+

Secrets and environment specific configurations

+
+
+

Environment variables

+
+

There are also some libraries to make Jasypt work with Quarkus, such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode.

+
+
+

Quarkus supports many credential providers with official extensions, such as HashiCorp Vault.

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-vault</artifactId>
+</dependency>
+
+
+
+

Quarkus reads configuration values from several locations, ordered by a certain priority. An overview of these can be found at the official Quarkus config guide.

+
+
+

Environment variables have a higher ordinal number and are therefore higher prioritized than e.g. the application.properties file. +So instead of storing secrets in plain text in the configuration files, it is better to use environment variables for critical values to configure the application.

+
+
+

Environment variables also have the advantage that they can be easily integrated into a containerized environment. +When using Kubernetes, the secrets can be stored as Kubernetes secret and then passed to the containers as an environment variable.

+
+
+
+

Custom config sources

+
+

Quarkus provides the possability to add custom config sources, which can be used to retrieve configuration values from custom locations. +For a description of this feature, see the corresponding Quarkus guide.

+
+
+

Config interceptors

+
+

Quarkus also allows with the concept of interceptors to hook into the resolution of configuration values. This can be useful when configuration values are encrypted or need to be extracted. +To do this, you have to implement a ConfigSourceInterceptor.

+
+
+
+
public class CustomConfigInterceptor implements ConfigSourceInterceptor {
+
+  @Override
+  public ConfigValue getValue(ConfigSourceInterceptorContext context, String name) {
+
+    ConfigValue configValue = context.proceed(name);
+    if (name.equals("config-value-to-resolve")) {
+      configValue = new ConfigValue.ConfigValueBuilder()
+          .withName(name)
+          .withValue(resolveConfigurationValue(name))
+          .build();
+    }
+
+    return configValue;
+  }
+
+  private String resolveConfigurationValue(String name) {
+    ...
+  }
+}
+
+
+
+

To use the Interceptor, you must register it. To do this, create a file io.smallrye.config.ConfigSourceInterceptor in the folder src/main/resources/META-INF/services and register the interceptor register the interceptor by writing the fully qualified class name to this file.

+
+
+
+
+

Credential encryption

+
+

As for Spring, there are also some libraries that let Jasypt work with Quarkus such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode, so it is not a suitable approach.

+
+
+

If you want to store usernames or passwords in encrypted form or retrieve them from a custom store, you can use a custom CredentialsProvider for this purpose. +Consider the use case where you want to store your database credentials in encrypted form rather than in plain text. Then you can implement a credentials provider as follows:

+
+
+
+
@ApplicationScoped
+@Unremovable
+public class DatabaseCredentialsProvider implements CredentialsProvider {
+
+  @Override
+  public Map<String, String> getCredentials(String credentialsProviderName) {
+
+    Map<String, String> properties = new HashMap<>();
+    properties.put(USER_PROPERTY_NAME, decryptUsername());
+    properties.put(PASSWORD_PROPERTY_NAME, decryptPassword());
+    return properties;
+  }
+}
+
+
+
+

In the application.properties file you need to set quarkus.datasource.credentials-provider=custom. +For more information about the credentials provider, see the official Quarkus guide.

+
+
+
+

HashiCorp Vault

+
+

For centralized management of secrets and other critical configuration values, you can use HashiCorp Vault as external management tool.

+
+
+

For detailed instructions on how to integrate Vault into your Quarkus application, see the official Quarkus guide.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-quarkus-testing.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-quarkus-testing.html new file mode 100644 index 00000000..70332799 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/guide-quarkus-testing.html @@ -0,0 +1,411 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Testing

+
+
+

Configuration

+
+
+

Quarkus relies on JUnit for testing.

+
+
+
+
<dependency>
+    <groupId>io.quarkus</groupId>
+    <artifactId>quarkus-junit5</artifactId>
+    <scope>test</scope>
+</dependency>
+
+
+
+

The extension quarkus-junit5 provides the @QuarkusTest annotation that controls the testing framework. The tests are run by default on port 8081. To change it, modify the application.properties correspondingly:

+
+
+
+
quarkus.http.test-port=8083
+quarkus.http.test-ssl-port=8446
+
+
+
+

Quarkus supports injecting scoped CDI beans into your tests via @Inject annotation for e.g. unit testing or beans testing.

+
+
+

Mocking CDI beans

+
+

The io.quarkus.test.junit.QuarkusMock class can be used to temporarily mock out any normal scoped bean (e.g. @ApplicationScoped, @RequestScoped etc, basically every scope except @Singleton and @Dependent). If you use this method in a @BeforeAll, the mock will take effect for all tests on the current class, while if you use this in a test method, the mock will only take effect there. An example is given below:

+
+
+
+
@QuarkusTest
+public class MockTestCase {
+    @Inject
+    MockableBean1 mockableBean1;
+
+    @Inject
+    MockableBean2 mockableBean2;
+
+    @BeforeAll
+    public static void setup() {
+        MockableBean1 mock = Mockito.mock(MockableBean1.class);
+        QuarkusMock.installMockForType(mock, MockableBean1.class);
+    }
+
+    @Test
+    public void testPerTestMock() {
+        QuarkusMock.installMockForInstance(new MockClass(), mockableBean2);
+    }
+}
+
+
+
+

Quarkus also allows users to effortlessly take advantage of Mockito.

+
+
+
+
<dependency>
+    <groupId>io.quarkus</groupId>
+    <artifactId>quarkus-junit5-mockito</artifactId>
+    <scope>test</scope>
+</dependency>
+
+
+
+

The example above can be simplified like so:

+
+
+
+
@QuarkusTest
+public class MockTestCase {
+    @InjectMock
+    MockableBean1 mockableBean1;
+
+    @InjectMock
+    MockableBean2 mockableBean2;
+}
+
+
+
+

A RestClient can also be easily mocked with @InjectMock and @RestClient annotations.

+
+
+
+
@Path("/")
+@ApplicationScoped
+@RegisterRestClient
+public interface DemoRestClient {
+}
+
+@QuarkusTest
+public class MockTestCase {
+    @InjectMock
+    @RestClient
+    DemoRestClient demoRestClient;
+}
+
+
+
+
+

Test profiles

+
+

Quarkus supports testing different configurations with test profiles. A test profile has to implement the QuarkusTestProfile . To write a profile, please follow this guide.

+
+
+
+

Continuous testing

+
+

By design, Quarkus enables test-driven development. It detects affected tests as changes are made and automatically rerun them in background. As that, it gives developer instant feedback. To use continuous testing, execute the following command:

+
+
+
+
mvn quarkus:dev
+
+
+
+

For more details, see here.

+
+
+
+

Native testing

+
+

It’s possible to test native executables using @NativeImageTest (which might be replaced by @QuarkusIntergrationTest in the future). For more, see here.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/quarkus-template.html b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/quarkus-template.html new file mode 100644 index 00000000..3c46c4d3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/quarkus/quarkus-template.html @@ -0,0 +1,410 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Quarkus template

+
+
+

Quarkus Code Generator is provides many alternative technologies and libraries that can be integrated into a project. Detailed guides on multiple topics can be found here.

+
+
+

Due to the large selection, getting started can be difficult for developers. +In this guide we aim to provide a general suggestion on basic frameworks, libraries, and technologies to make it easy for developers to begin with.

+
+
+

With that said, please take this as a recommendation and not as a compulsion. Depending on your project requirements, you might have to use another stack compared to what is listed below.

+
+
+

If you are new to Quarkus, consider checking out their getting started guide to get an overview of how to create, run, test, as well as package a Quarkus application. Another recommended source to get started is the Katacoda tutorials.

+
+
+

Basic templates

+
+
    +
  1. +

    simple REST API (go to code.quarkus.io)

    +
  2. +
  3. +

    simple REST API with monitoring (go to code.quarkus.io)

    +
  4. +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Topic-based suggested implementation
TopicDetailSuggested implementationNote

runtime

servlet-container

Undertow

component management

dependency injection

ArC

ArC is based on JSR 365. It also provides interceptors that can be used to implement the same functionality as AOP provides

configuration

SmallRye Config

SmallRye Config is an implementation of Eclipse MicroProfile Config. It also supports YAML configuration files

persistence

OR-mapper

Hibernate ORM, Spring Data JPA

Hibernate ORM is the de facto standard JPA implementation and works perfectly in Quarkus. Quarkus also provides a compatibility layer for Spring Data JPA repositories in the form of the spring-data-jpa extension.

batch

Quarkus JBeret Extension is a non-official extension, which is hosted in the Quarkiverse Hub. It is an implementation of JSR 352.

service

REST services

RESTEasy

RESTEasy is an portable implementation of the new JCP specification JAX-RS JSR-311. It can be documented via Swagger OpenAPI.

async messaging

SmallRye Reactive Messaging, Vert.x EventBus

SmallRye Reactive Messaging is an implementation of the Eclipse MicroProfile Reactive Messaging specification 1.0. You can also utilize SmallRye Reactive Messaging in your Quarkus application to interact with Apache Kafka.

marshalling

RESTEasy Jackson, RESTEasy JSON-B, RESTEasy JAXB, RESTEasy Multipart

cloud

kubernetes

Kubernetes

deployment

Minikube, k3d

Minikube is quite popular when a Kubernetes cluster is needed for development purposes. Quarkus supports this with the quarkus-minikube extension.

logging

framework

JBoss Log Manager and the JBoss Logging facade

Internally, Quarkus uses JBoss Log Manager and the JBoss Logging facade. Logs from other supported Logging API (JBoss Logging, SLF4J, Apache Commons Logging) will be merged.

validation

framework

Hibernate Validator/Bean Validation (JSR 380)

security

authentication & authorization

JWT authentication

Quarkus supports various security mechanisms. Depending on your protocol, identity provider you can choose the necessary extensions such as quarkus-oidc quarkus-smallrye-jwt quarkus-elytron-security-oauth2.

monitoring

framework

Micrometer Metrics, SmallRye Metrics

SmallRye Metrics is an implementation of the MicroProfile Metrics specification. Quarkus also offers various extensions to customize the metrics.

health

SmallRye Health

SmallRye Health is an implementation of the MicroProfile Health specification.

fault tolerance

SmallRye Fault Tolerance

SmallRye Fault Tolerance is an implementation of the MicroProfile Fault Tolerance specification.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring.html new file mode 100644 index 00000000..0f67daf2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring.html @@ -0,0 +1,371 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Spring

+
+
+

Spring is the most famous and established Java framework. +It is fully supported by devonfw as an option and alternative to quarkus.

+
+
+

Guide to the Reader

+
+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If you are not yet familiar with Spring, you may be interested in pros and cons of Spring. Also take a look at the official Spring website.

    +
  • +
  • +

    If you already have experience developing with Spring but are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring and coding conventions. Follow the referenced links to go deeper into a topic.

    +
  • +
  • +

    If you have already developed with devon4j and Spring and need more information on a specific topic, check out the devon4j guides for Spring. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Spring and Quarkus are documented there.

    +
  • +
  • +

    If you want to get started or create your first Spring application using devon4j, check out the guide about creating a new application or the Jump the Queue and My Thai Star reference applications.

    +
  • +
+
+
+
+
+

Pros

+
+
+

Spring offers the following benefits:

+
+
+
    +
  • +

    highly flexible
    +Spring is famous for its great flexibility. You can customize and integrate nearly everything.

    +
  • +
  • +

    well established
    +While JEE application servers including very expensive commercial products turned out to be a dead-end, spring has guided projects through the changing trends of IT throughout decades. It may be the framework with the longest history track and popularity. As a result you can easily find developers, experts, books, articles, etc. about spring.

    +
  • +
  • +

    non-invasive and not biased
    +Spring became famous for its non-invasive coding based on patterns instead of hard dependencies. It gives you a lot of freedom and avoids tight coupling of your (business) code.

    +
  • +
+
+
+

See Why Spring? for details.

+
+
+
+
+

Cons

+
+
+

Spring has the following drawbacks:

+
+
+
    +
  • +

    history and legacy
    +Due to the pro of its long established history, spring also carries a lot of legacy. As a result there are many ways to do the same thing while some options may be discouraged. Developers needs some guidance (e.g. via devon4j) as they may enter pitfalls and dead-ends when choosing the first solution they found on google or stackoverflow.

    +
  • +
  • +

    lost lead in cloud-native
    +While for the last decades spring was leading innovation in Java app development, it seems that with the latest trends and shift such as cloud-native, they have been overtaken by frameworks like quarkus. However, spring is trying to catch up with spring-native.

    +
  • +
+
+
+
+
+

Spring-Boot

+
+
+

Spring-boot is a project and initiaitve within the spring-ecosystem that brought a lot of innovation and simplification into app development on top of spring. +As of today we typically use the terms spring and spring-boot rather synonymously as we always use spring together with spring-boot.

+
+
+
+
+

Spring-Native

+
+
+

Spring-native adds cloud-native support to the spring ecosystem and allows to build a spring app as cloud-native image via GraalVM. +You may also consider Quarkus if you are interested in building cloud-native images. For a comparison of both Spring Native and Quarkus, you may refer to our Spring Native vs. Quarkus guide.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-authentication-spring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-authentication-spring.html new file mode 100644 index 00000000..7942b5c4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-authentication-spring.html @@ -0,0 +1,348 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Spring Security

+
+
+

We use spring-security as a framework for authentication purposes.

+
+
+

Therefore, you need to provide an implementation of WebSecurityConfigurerAdapter:

+
+
+
+
@Configuration
+@EnableWebSecurity
+public class MyWebSecurityConfig extends WebSecurityConfigurerAdapter {
+
+  @Inject
+  private UserDetailsService userDetailsService;
+  ...
+  public void configure(HttpSecurity http) throws Exception {
+    http.userDetailsService(this.userDetailsService)
+        .authorizeRequests().antMatchers("/public/**").permitAll()
+        .anyRequest().authenticated().and()
+        ...
+  }
+}
+
+
+
+

As you can see, spring-security offers a fluent API for easy configuration. You can simply add invocations like formLogin().loginPage("/public/login") or httpBasic().realmName("MyApp"). Also CSRF protection can be configured by invoking csrf(). +For further details see spring Java-config for HTTP security.

+
+
+

Further, you need to provide an implementation of the UserDetailsService interface. +A good starting point comes with our application template.

+
+
+

For authentication via JSON Web Token (JWT), check the JWT Spring-Starter.

+
+
+

Mix authentication should be avoided where possible. However, when needed, you can find a solution +here.

+
+
+

Preserve original request anchors after form login redirect

+
+

Spring Security will automatically redirect any unauthorized access to the defined login-page. After successful login, the user will be redirected to the original requested URL. The only pitfall is, that anchors in the request URL will not be transmitted to server and thus cannot be restored after successful login. Therefore the devon4j-security module provides the RetainAnchorFilter, which is able to inject javascript code to the source page and to the target page of any redirection. Using javascript this filter is able to retrieve the requested anchors and store them into a cookie. Heading the target URL this cookie will be used to restore the original anchors again.

+
+
+

To enable this mechanism you have to integrate the RetainAnchorFilter as follows: +First, declare the filter with

+
+
+
    +
  • +

    storeUrlPattern: a regular expression matching the URL, where anchors should be stored

    +
  • +
  • +

    restoreUrlPattern: a regular expression matching the URL, where anchors should be restored

    +
  • +
  • +

    cookieName: the name of the cookie to save the anchors in the intermediate time

    +
  • +
+
+
+

You can easily configure this as code in your WebSecurityConfig as following:

+
+
+
+
RetainAnchorFilter filter = new RetainAnchorFilter();
+filter.setStoreUrlPattern("http://[^/]+/[^/]+/login.*");
+filter.setRestoreUrlPattern("http://[^/]+/[^/]+/.*");
+filter.setCookieName("TARGETANCHOR");
+http.addFilterBefore(filter, UsernamePasswordAuthenticationFilter.class);
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-beanmapping-spring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-beanmapping-spring.html new file mode 100644 index 00000000..14acc607 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-beanmapping-spring.html @@ -0,0 +1,363 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Bean Mapping in devon4j-spring

+
+
+

We have developed a solution that uses a BeanMapper that allows to abstract from the underlying implementation. As mentioned in the general bean mapping guide, we started with Dozer a Java Bean to Java Bean mapper that recursively copies data from one object to another. Now we recommend using Orika. This guide will show an introduction to Orika and Dozer bean-mapper.

+
+
+

Bean-Mapper Dependency

+
+
+

To get access to the BeanMapper we have to use either of the below dependency in our POM:

+
+
+
Orika
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-orika</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
Dozer
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-dozer</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
+
+

Bean-Mapper Configuration

+
+
+

Bean-Mapper Configuration using Dozer

+
+

The BeanMapper implementation is based on an existing open-source bean-mapping framework. +In case of Dozer the mapping is configured src/main/resources/config/app/common/dozer-mapping.xml.

+
+
+

See the my-thai-star dozer-mapping.xml as an example. +Important is that you configure all your custom datatypes as <copy-by-reference> tags and have the mapping from PersistenceEntity (ApplicationPersistenceEntity) to AbstractEto configured properly:

+
+
+
+
 <mapping type="one-way">
+    <class-a>com.devonfw.module.basic.common.api.entity.PersistenceEntity</class-a>
+    <class-b>com.devonfw.module.basic.common.api.to.AbstractEto</class-b>
+    <field custom-converter="com.devonfw.module.beanmapping.common.impl.dozer.IdentityConverter">
+      <a>this</a>
+      <b is-accessible="true">persistentEntity</b>
+    </field>
+</mapping>
+
+
+
+
+
+
+

Bean-Mapper Configuration using Orika

+
+
+

Orika with devonfw is configured by default and sets some custom mappings for GenericEntity.java to GenericEntityDto.java. To specify and customize the mappings you can create the class BeansOrikaConfig.java that extends the class BaseOrikaConfig.java from the devon4j.orika package. To register a basic mapping, register a ClassMap for the mapperFactory with your custom mapping. Watch the example below and follow the basic Orika mapping configuration guide and the Orika advanced mapping guide.

+
+
+

Register Mappings:

+
+
+
+
mapperFactory.classMap(UserEntity.class, UserEto.class)
+			.field("email", "email")
+			.field("username", "name")
+			.byDefault()
+			.register();
+
+
+
+
+
+

Bean-Mapper Usage

+
+
+

Then we can get the BeanMapper via dependency-injection what we typically already provide by an abstract base class (e.g. AbstractUc). Now we can solve our problem very easy:

+
+
+
+
...
+UserEntity resultEntity = ...;
+...
+return getBeanMapper().map(resultEntity, UserEto.class);
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-cors-spring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-cors-spring.html new file mode 100644 index 00000000..5fa10199 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-cors-spring.html @@ -0,0 +1,350 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==CORS configuration in Spring

+
+
+

Dependency

+
+
+

To enable the CORS support from the server side for your devon4j-Spring application, add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-cors</artifactId>
+</dependency>
+
+
+
+
+
+

Configuration

+
+
+

Add the below properties in your application.properties file:

+
+
+
+
#CORS support
+security.cors.spring.allowCredentials=true
+security.cors.spring.allowedOriginPatterns=*
+security.cors.spring.allowedHeaders=*
+security.cors.spring.allowedMethods=OPTIONS,HEAD,GET,PUT,POST,DELETE,PATCH
+security.cors.pathPattern=/**
+
+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AttributeDescriptionHTTP Header

allowCredentials

Decides the browser should include any cookies associated with the request (true if cookies should be included).

Access-Control-Allow-Credentials

allowedOrigins

List of allowed origins (use * to allow all orgins).

Access-Control-Allow-Origin

allowedMethods

List of allowed HTTP request methods (OPTIONS, HEAD, GET, PUT, POST, DELETE, PATCH, etc.).

-

allowedHeaders

List of allowed headers that can be used during the request (use * to allow all headers requested by the client)

Access-Control-Allow-Headers

pathPattern

Ant-style pattern for the URL paths where to apply CORS. Use "/**" to match all URL paths.

+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-devon4j-spring-repository.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-devon4j-spring-repository.html new file mode 100644 index 00000000..f0db980b --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-devon4j-spring-repository.html @@ -0,0 +1,349 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Spring Data in devon4j-spring

+
+
+

In devon4j-spring, spring-data-jpa is provided via devon4j-starter-spring-data-jpa extension, which provides advanced integration (esp. for QueryDSL).

+
+
+

Spring Data uses a fragment approach to implement custom functionality for repositories. For Spring applications, devon4j provides a solution that works without this fragment approach.

+
+
+

The repository must extend DefaultRepository, which uses GenericRepositoryImpl as implementation. The QueryUtil helper class provides methods to support pagination and query creation.

+
+
+

Dependency

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
+

Example

+
+
+

The following example shows how to write such a repository. The example has the same functionality as the example in the Spring Data guide:

+
+
+
+
public interface ExampleRepository extends DefaultRepository<ExampleEntity> {
+
+  @Query("SELECT example FROM ExampleEntity example" //
+      + " WHERE example.name = :name")
+  List<ExampleEntity> findByName(@Param("name") String name);
+
+  @Query("SELECT example FROM ExampleEntity example" //
+      + " WHERE example.name = :name")
+  Page<ExampleEntity> findByNamePaginated(@Param("name") String name, Pageable pageable);
+
+  default Page<ExampleEntity> findByCriteria(ExampleSearchCriteriaTo criteria) {
+    ExampleEntity alias = newDslAlias();
+    JPAQuery<ExampleEntity> query = newDslQuery(alias);
+    String name = criteria.getName();
+    if ((name != null) && !name.isEmpty()) {
+      QueryUtil.get().whereString(query, $(alias.getName()), name, criteria.getNameOption());
+    }
+    return QueryUtil.get().findPaginated(criteria.getPageable(), query, false);
+  }
+
+}
+
+
+
+
+
+

Further examples

+
+
+

You can also read the JUnit test-case DefaultRepositoryTest that is testing an example +FooRepository.

+
+
+
+
+

Auditing

+
+
+

In case you need auditing, you only need to extend DefaultRevisionedRepository instead of DefaultRepository. The auditing methods can be found in GenericRevisionedRepository.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-jwt-spring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-jwt-spring.html new file mode 100644 index 00000000..fd7198a8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-jwt-spring.html @@ -0,0 +1,430 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==JWT Spring-Starter

+
+
+

Keystore

+
+
+

A KeyStore is a repository of certificates and keys (public key, private key, or secret key). They can be used for TSL transportation, for encryption and decryption as well as for signing. +For demonstration you might create a keystore with openssl, with the following commands:

+
+
+
+
openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365
+openssl pkcs12 -export -in cert.pem -inkey key.pem -out example.p12
+
+
+
+

For Java tooling you may also try the following instead:

+
+
+
+
keytool -genkeypair -alias devonfw -keypass "password" -storetype PKCS12 -keyalg RSA -keysize 4096 -storepass "password" -keystore keystore.pkcs
+
+
+
+ + + + + +
+ + +Please use reasonable passwords instead of password what should be obvious. Also for the alias the value devonfw is just an example. +
+
+
+
+
+

JWT Dependency

+
+
+

To use JWT support from devon4j with spring-boot you have to add following required dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-jwt</artifactId>
+</dependency>
+
+
+
+
+
+

Configuration

+
+
+

The following properties need to be configured in your application.properties file:

+
+
+
+
##location of the keystore file, can be any spring resource (such as file or classpath URIs)
+security.keystore.location=classpath:config/keystore.pkcs
+##type of keystore e.g. "PKCS12" (recommended), "JKS", or "JCEKS"
+security.keystore.type=PKCS12
+##password the keystore is secured with. Consider using password encryption as described in devon4j configuration guide
+security.keystore.password=password
+##the algorithm for encryption/decryption and signing - see io.jsonwebtoken.SignatureAlgorithm
+security.authentication.jwt.algorithm=RS256
+##alias of public/private key in keystore (for validation only public key is used, for creation private key is required)
+security.authentication.jwt.alias=devonfw
+##the following properties are used if you are validating JWTs (e.g. via JwtAuthenticationFilter)
+security.authentication.jwt.validation.expiration-required=false
+security.authentication.jwt.validation.max-validity=42h
+security.authentication.jwt.validation.not-before-required=false
+##the following properties are only used if you are issuing JWTs (e.g. via JwtLoginFilter)
+security.authentication.jwt.creation.add-issued-at=true
+security.authentication.jwt.creation.validity=4h
+security.authentication.jwt.creation.not-before-delay=1m
+##the following properties enable backward compatiblity for devon4j <= 2021.04.002
+##after microprofile JWT is used by default since 2021.04.003
+#security.authentication.jwt.claims.access-controls-name=roles
+#security.authentication.jwt.claims.access-controls-array=false
+
+
+
+

See also JwtConfigProperties for details about configuration.

+
+
+
+
+

Authentication with JWT via OAuth

+
+
+

The authentication with JWT via OAuth (HTTP header), will happen via JwtAuthenticationFilter that is automatically added by devon4j-starter-security-jwt via JwtAutoConfiguration. +With the starter and auto-configuration we want to make it as easy as possible for you. +In case you would like to build a server app that e.g. wants to issue JWTs but does not allow authentication via JWT itself, you can use devon4j-security-jwt as dependency instead of the starter and do the spring config yourself (pick and choose from JwtAutoConfiguration).

+
+
+

To do this, you need to add the following changes in your BaseWebSecurityConfig:

+
+
+
+
  @Bean
+  public JwtAuthenticationFilter getJwtAuthenticationFilter() {
+    return new JwtAuthenticationFilter();
+  }
+
+  @Override
+  public void configure(HttpSecurity http) throws Exception {
+    // ...
+    // add this line to the end of this existing method
+    http.addFilterBefore(getJwtAuthenticationFilter(), UsernamePasswordAuthenticationFilter.class);
+  }
+
+
+
+
+
+

Login with Username and Password to get JWT

+
+
+

To allow a client to login with username and password to get a JWT for sub-sequent requests, you need to do the following changes in your BaseWebSecurityConfig:

+
+
+
+
  @Bean
+  public JwtLoginFilter getJwtLoginFilter() throws Exception {
+
+    JwtLoginFilter jwtLoginFilter = new JwtLoginFilter("/login");
+    jwtLoginFilter.setAuthenticationManager(authenticationManager());
+    jwtLoginFilter.setUserDetailsService(this.userDetailsService);
+    return jwtLoginFilter;
+  }
+
+  @Override
+  public void configure(HttpSecurity http) throws Exception {
+    // ...
+    // add this line to the end of this existing method
+    http.addFilterBefore(getJwtLoginFilter(), UsernamePasswordAuthenticationFilter.class);
+  }
+
+
+
+
+
+

Authentication with Kafka

+
+
+

Authentication with JWT and Kafka is explained in the Kafka guide.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-kafka-spring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-kafka-spring.html new file mode 100644 index 00000000..d765887d --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-kafka-spring.html @@ -0,0 +1,853 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Kafka

+
+
+

This guide explains how Spring Kafka is used in devonfw applications. It focuses on aspects which are special to devonfw if you want to learn about spring-kafka you should adhere to springs references documentation.

+
+
+

There is an example of simple Kafka implementation in the devon4j-kafka-employeeapp.

+
+
+

The devon4j-kafka library consists of:

+
+
+
    +
  • +

    Custom message processor with retry pattern

    +
  • +
  • +

    Monitoring support

    +
  • +
  • +

    Tracing support

    +
  • +
  • +

    Logging support

    +
  • +
  • +

    Configuration support for Kafka Producers, Consumers, brave tracer and message retry processing including defaults

    +
  • +
+
+
+

How to use?

+
+
+

To use devon4j-kafka you have to add required starter dependencies which is "starter-kafka-sender" or "starter-kafka-receiver" from devon4j. These 2 starters are responsible for taking care of the required spring configuration. If you only want to produce messages "starter-kafka-sender" is enough. For consuming messages you need "starter-kafka-receiver" which also includes "starter-kafka-sender".

+
+
+

To use devon4j-kafka message sender add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-kafka-sender</artifactId>
+</dependency>
+
+
+
+

It includes the Tracer implementations from Spring cloud sleuth.

+
+
+

To use the devon4j-kafka message receiver configurations, loggers and message retry processor for processing message, add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-kafka-receiver</artifactId>
+</dependency>
+
+
+
+
+
+

Property Parameters

+
+
+

As written before kafka-producer and listener-specific configuration is done via properties classes. These classes provide useful defaults, at a minimum the following parameters have to be configured:

+
+
+
+
messaging.kafka.common.bootstrap-servers=kafka-broker:9092
+messaging.kafka.consumer.group-id=<e.g. application name>
+messaging.kafka.listener.container.concurrency=<Number of listener threads for each listener container>
+
+
+
+

All the configuration beans for devon4j-kafka are annotated with @ConfigurationProperties and use common prefixes to read the property values from application.properties or application.yml.

+
+
+

Example:

+
+
+
+
 @Bean
+  @ConfigurationProperties(prefix = "messaging.kafka.producer")
+  public KafkaProducerProperties messageKafkaProducerProperties() {
+    return new KafkaProducerProperties();
+  }
+
+
+
+

For producer and consumer the prefixes are messaging.kafka.producer…​ and message.kafka.consumer…​ and for retry the prefix is messaging.retry…​

+
+ +
+

We use the same properties defined by Apache Kafka or Spring Kafka. They are simply "mapped" to the above prefixes to allow easy access from your application properties. The java docs provided in each of the devon4j-kafka property classes which explains their use and what value has to be passed.

+
+ +
+
+
+

Naming convention for topics

+
+
+

For better managing of several Kafka topics in your application portfolio we strongly advice to introduce a naming scheme for your topics. The schema may depend on the actual usage pattern of Kafka. For context where Kafka is used +in a 1-to-1-communication-scheme (not publish/subscribe) the following schema has been proven useful in practice:

+
+
+
+
<application name>-<service name>-<version>-<service-operation>
+
+
+
+

To keep things easy and prevent problems we suggest to use only small letters, hyphens but no other special characters.

+
+
+
+
+

Send Messages

+
+
+

As mentioned above the 'starter-kafka-sender' is required to be added as a dependency to use MessageSender from Kafka.

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-kafka-sender</artifactId>
+</dependency>
+
+
+
+

The following example shows how to use MessageSender and its method to send messages to Kafka broker:

+
+
+

Example:

+
+
+
+
  @Inject
+  private MessageSender messageSender;
+  private ProducerRecord<K,V> producerRecord;
+
+  public void sendMessageToKafka(){
+  producerRecord=new ProducerRecord<>("topic-name","message");
+  messageSender.sendMessage(this.producerRecord);
+  //Alternative
+  messageSender.sendMessageAndWait(this.producerRecord,10);
+  }
+
+
+
+

There are multiple methods available from MessageSender of devon4j-kafka. The ProducerListener will log the message sent to the Kafka broker.

+
+
+
+
+

Receive Messages

+
+
+

To receive messages you have to define a listener. The listener is normally part of the service layer.

+
+
+
+Architecture for Kafka services +
+
Figure 1. Architecture for Kafka services
+
+
+

Import the following starter-kafka-receiver dependency to use the listener configurations and loggers from devon4j-kafka.

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-kafka-receiver</artifactId>
+</dependency>
+
+
+
+

The listener +is defined by implementing and annotating a method like in the following example:

+
+
+
+
  @KafkaListener(topics = "employeeapp-employee-v1-delete", groupId = "${messaging.kafka.consumer.groupId}", containerFactory = "kafkaListenerContainerFactory")
+  public void consumer(ConsumerRecord<Object, Object> consumerRecord, Acknowledgment acknowledgment) {
+  //user operation
+  //To acknowledge listener after processing
+  acknowledgement.acknowledge();
+  }
+
+
+
+

The group id can be mentioned in application.properties as listener properties.

+
+
+
+
messaging.kafka.consumer.groupId=default
+
+
+
+

If there are multiple topics and multiple listeners then we suggest to specify the topic names directly on each listener instead reading from the property file. +The container factory mentioned in the @KafkaListener is provided in the KafkaListenerContainerProperties.java to create a default container factory with acknowledgement.

+
+
+

The default ack-mode is manual_immediate . It can be overridden by below example:

+
+
+
+
messaging.kafka.listener.container.ackMode=<ack-mode>
+
+
+
+

The other ack-mode values can be referred from +here.

+
+
+
+
+

Retry

+
+
+

The retry pattern in devon4j-kafka is invoked when a particular exception(described by user in application.properties file) is thrown while processing the consumed message and it is configured in application.properties file. The general idea is to separate messages which could not be processed into dedicated retry-topics to allow fine control on how processing of the messages is retried and to not block newly arriving messages. +Let us see more about handling retry in the below topics.

+
+
+
+Retry pattern in devon4j-kafka +
+
+
+

Handling retry in devon4j-kafka

+
+

The retry pattern is included in the starter dependency of "starter-kafka-receiver".

+
+
+

The retryPattern method is used by calling the method processMessageWithRetry(ConsumerRecord<K, V> consumerRecord,MessageProcessor<K, V> processor). Please find the below Example:

+
+
+
+
@Inject
+private MessageRetryOperations<K, V> messageRetryOperations;
+@Inject
+private DeleteEmployeeMessageProcessor<K, V> deleteEmployeeMessageProcessor;
+@KafkaListener(topics = "employeeapp-employee-v1-delete", groupId = "${messaging.kafka.consumer.groupId}",containerFactory = "kafkaListenerContainerFactory")
+public void consumer(ConsumerRecord<K, V> consumerRecord, Acknowledgment acknowledgment) {
+this.messageRetryOperations.processMessageWithRetry(consumerRecord, this.deleteEmployeeMessageProcessor);
+// Acknowledge the listener.
+acknowledgment.acknowledge();
+}
+
+
+
+

The implementation for MessageProcessor from devon4j-kafka is required to provide the implementation to process the ConsumedRecord from Kafka broker. The implementation for MessageProcessor interface can look as below example:

+
+
+
+
import com.devonfw.module.kafka.common.messaging.retry.api.client.MessageProcessor;
+@Named
+public class DeleteEmployeeMessageProcessor<K, V> implements MessageProcessor<K, V> {
+ @Override
+  public void processMessage(ConsumerRecord<K, V> message) {
+  //process message
+  }
+}
+
+
+
+

It works as follows:

+
+
+
    +
  • +

    The application gets a message from the topic.

    +
  • +
  • +

    During the processing of the message an error occurs, the message will be written to the redelivery topic.

    +
  • +
  • +

    The message is acknowledged in the topic.

    +
  • +
  • +

    The message will be processed from the re-delivery topic after a delay.

    +
  • +
  • +

    Processing of the message fails again. It retires until the retry count gets over.

    +
  • +
  • +

    When the retry fails in all the retry then the message is logged and payload in the ProducerRecord is deleted for log +compaction which is explained below.

    +
  • +
+
+
+
+

Retry configuration and naming convention of redelivery topics.

+
+

The following properties should be added in the application.properties or application.yml file.

+
+
+

The retry pattern in devon4j-kafka will perform for specific topic of a message. So its mandatory to specify the properties for each topic. Below properties are example,

+
+
+
+
##Back off policy properties for employeeapp-employee-v1-delete
+messaging.retry.back-off-policy.retryReEnqueueDelay.employeeapp-employee-v1-delete=1000
+messaging.retry.back-off-policy.retryDelay.employeeapp-employee-v1-delete=600000
+messaging.retry.back-off-policy.retryDelayMultiplier.employeeapp-employee-v1-delete=1.0
+messaging.retry.back-off-policy.retryMaxDelay.employeeapp-employee-v1-delete=600000
+messaging.retry.back-off-policy.retryCount.employeeapp-employee-v1-delete=2
+
+##Retry policy properties for employeeapp-employee-v1-delete
+messaging.retry.retry-policy.retryPeriod.employeeapp-employee-v1-delete=1800
+messaging.retry.retry-policy.retryableExceptions.employeeapp-employee-v1-delete=<Class names of exceptions for which a retry should be performed>
+messaging.retry.retry-policy.retryableExceptionsTraverseCauses.employeeapp-employee-v1-delete=true
+
+##Back off policy properties for employeeapp-employee-v1-add
+messaging.retry.back-off-policy.retryReEnqueueDelay.employeeapp-employee-v1-add=1000
+messaging.retry.back-off-policy.retryDelay.employeeapp-employee-v1-add=600000
+messaging.retry.back-off-policy.retryDelayMultiplier.employeeapp-employee-v1-add=2.0
+messaging.retry.back-off-policy.retryMaxDelay.employeeapp-employee-v1-add=600000
+messaging.retry.back-off-policy.retryCount.employeeapp-employee-v1-add=4
+
+##Retry policy properties for employeeapp-employee-v1-add
+messaging.retry.retry-policy.retryPeriod.employeeapp-employee-v1-add=3000
+messaging.retry.retry-policy.retryableExceptions.employeeapp-employee-v1-add=<Class names of exceptions for which a retry should be performed>
+messaging.retry.retry-policy.retryableExceptionsTraverseCauses.employeeapp-employee-v1-add=true
+
+
+
+

If you notice the above properties, the retry-policy and back-off policy properties are repeated twice as i have 2 topics for the retry to be performed with different level of values. The topic name should be added at the last of attribute.

+
+
+

So, the retry will be performed for each topic according to their configuration values.

+
+
+

If you want to provide same/default values for all the topics, then its required to add default in the place of topic on the above properties example.

+
+
+

For example,

+
+
+
+
##Default back off policy properties
+messaging.retry.back-off-policy.retryReEnqueueDelay.default=1000
+messaging.retry.back-off-policy.retryDelay.default=600000
+messaging.retry.back-off-policy.retryDelayMultiplier.default=1.0
+messaging.retry.back-off-policy.retryMaxDelay.default=600000
+messaging.retry.back-off-policy.retryCount.default=2
+
+##Default retry policy properties
+messaging.retry.retry-policy.retryPeriod.default=1800
+messaging.retry.retry-policy.retryableExceptions.default=<Class names of exceptions for which a retry should be performed>
+messaging.retry.retry-policy.retryableExceptionsTraverseCauses.default=true
+
+
+
+

By giving properties like above, the same values will be passed for all the topics and the way of processing retry for all the topics are same.

+
+
+

All these above property values are mapped to the classes DefaultBackOffPolicyProperties.java and DefaultRetryPolicyProperties.java and configured by the class MessageDefaultRetryConfig.java.

+
+
+

The MessageRetryContext in devon kafka is used to perform the retry pattern with the properties from DefaultBackOffPolicyProperties and DefaultRetryPolicyProperties.

+
+
+

The 2 main properties of MessageRetryContext are nextRetry and retryUntil which is a Instant date format and it is calculated internally using the properties given in DefaultBackOffPolicyProperties and DefaultRetryPolicyProperties.

+
+
+

You may change the behavior of this date calculation by providing your own implementation classes for MessageBackOffPolicy.java and MessageRetryPolicy.java.

+
+
+

The naming convention for retry topic is the same topic name which you have given to publish the message and we add suffix -retry to it once it is consumed and given to process with retry.

+
+
+

If there is no topic found in the consumed record the default retry topic will be added which is default-message-retry.

+
+
+
+

Retry topics

+
+

Devon4j-kafka uses a separate retry topic for each topic where retries occur. By default this topic is named <topic name>-retry. You may change this behavior by providing your own implementation for DefaultKafkaRecordSupport which is a default implementation from devon4j-kafka for KafkaRecordSupport.

+
+
+

Devon4j-kafka enqueues a new message for each retry attempt. It is very important to configure your retry tropics with log compaction enabled. More or less simplified, if log compaction is enabled Kafka keeps only one message per message key. Since each retry message has the same key, in fact only one message per retry attempt is stored. After the last retry attempt the message payload is removed from the message so, you do not keep unnecessary data in your topics.

+
+
+
+

Handling retry finally failed

+
+

Per default when the retry fails with final attempt we just log the message and delete the payload of ProducerRecord which comes to proceed the retry pattern.

+
+
+

You can change this behavior by providing the implementation class for the interface MessageRetryHandler.java +which has two methods retryTimeout and retryFailedFinal.

+
+
+
+
+
+

Tracer

+
+
+

We leverage Spring Cloud Sleuth for tracing in devon4j-kafka +This is used to trace the asynchronous process of Kafka producing and consuming. In an asynchronous process it is important to maintain an id which will be same for all asynchronous process. +However, devon uses its own correlation-id(UUID) to track the process. But devon4j-kafka uses an additional tracing protocol which is Brave Tracer.

+
+
+

This is a part of both starter dependencies starter-kafka-receiver and starter-kafka-sender.

+
+
+

There are 2 important properties which will be automatically logged which are trace-id and spain-id. +The trace-id is same for all the asynchronous process and span-id is unique for each asynchronous process.

+
+
+

How devon4j-kafka handles tracer ?

+
+

We inject the trace-id and span-id in to the ProducerRecord headers which comes to publish into the Kafka broker. +It’s injected in the headers with the key traceId for trace-id and spanId for span-id. +Along with these, the correlation-id(UUID) is also injected in the headers of record with the key correlationId.

+
+
+

So, when you consume record from Kafka broker, these values can be found in the consumed record’s headers with these keys.

+
+
+

So, it is very helpful to track the asynchronous process of consuming the messages.

+
+
+
+
+
+

Logging

+
+
+

devon4j-kafka provides multiple support classes to log the published message and the consumed message. +* The class ProducerLoggingListener which implements ProducerListener<K,V> from Spring Kafka uses to log the message as soon as it is published in the Kafka broker.

+
+
+
    +
  • +

    The aspect class MessageListenerLoggingAspect which is annotated with @Aspect and has a method logMessageprocessing which is annotated with @Around("@annotation(org.springframework.kafka.annotation.KafkaListener)&&args(kafkaRecord,..)") +is used to listen to the classes which is annotated with @KafkaListener and logs the message as soon as it is consumed.

    +
  • +
  • +

    The class MessageLoggingSupport has multiple methods to log different types of events like MessageReceived, MessageSent, MessageProcessed, MessageNotProcessed.

    +
  • +
  • +

    The class LoggingErrorHandler which implements ErrorHandler from spring-kafka which logs the message when an error occurred while consuming messages. You may change this behavior by creating your own implementation class for the ErrorHandler.

    +
  • +
+
+
+
+
+

Kafka Health check using Spring acutator

+
+
+

The spring config class MessageCommonConfig automatically provides a spring health indicator bean for kafka if +the property endpoints. The health indicator will check for all topics listed in messaging.kafka.health.topics-tocheck +if a leader is available. If this property is missing only the broker connection will be checked. The timeout for +the check (default 60s) maybe changed via the property messaging.kafka.health.timeout. +If an application uses multiple broker(-clusters) for each broker(-cluster) a dedicated health indicator bean has to be +configured in the spring config.

+
+
+

The properties for the devon kafka health check should be given like below example:

+
+
+
+
management.endpoint.health.enabled=<true or false>
+messaging.kafka.health.timeout=<the health check timeout seconds>
+messaging.kafka.health.topicsToCheck=employeeapp-employee-v1-delete,employeeapp-employee-v1-add
+
+
+
+

These properties are provided with default values except the topicsToCheck and health check will do happen only when the property is management.endpoint.health.enabled=true.

+
+
+
+
+

Authentication

+
+
+

JSON Web Token (JWT)

+
+

devon4j-kafka supports authentication via JSON Web Tokens (JWT) out-of-the-box. +To use it add a dependency to the devon4j-starter-security-jwt:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-jwt</artifactId>
+</dependency>
+
+
+
+

The authentication via JWT needs some configuration, e.g. a keystore to verify the token signature. This is explained in the JWT documentation.

+
+
+

To secure a message listener with jwt add the @JwtAuthentication:

+
+
+
+
  @JwtAuthentication
+  @KafkaListener(topics = "employeeapp-employee-v1-delete", groupId = "${messaging.kafka.consumer.groupId}")
+  public void consumer(ConsumerRecord<K, V> consumerRecord, Acknowledgment acknowledgment) {
+...
+    }
+  }
+
+
+
+

With this annotation in-place each message will be checked for a valid JWT in a message header with the name Authorization. If a valid annotation is found the spring security context will be initialized with the user roles and "normal" authorization e.g. with @RolesAllowed may be used. This is also demonstrated in the kafka sample application.

+
+
+
+
+
+

Using Kafka for internal parallel processing

+
+
+

Apart from the use of Kafka as "communication channel", it is sometimes helpful to use Kafka internally to do parallel processing:

+
+
+
+Architecture for internal parallel processing with Kafka +
+
Figure 2. Architecture for internal parallel processing with Kafka
+
+
+

This examples shows a payment service which allows to submit a list of receipt IDs for payment. +We assume that the payment itself takes a long time and should be done asynchronously and in parallel. +The general idea is to put a message for each receipt to pay into a topic. This is done in the use case implementation in a first step, if a rest call arrives. +Also part of the use case is a listener which consumes the messages. For each message (e.g. payment to do) a processor is called, which actually does the payment via the use case. +Since Kafka supports concurrency for the listeners easily the payment will also be done in parallel. +All features of devon4j-kafka, like retry handling could also be used.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-querydsl-spring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-querydsl-spring.html new file mode 100644 index 00000000..a270c77e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-querydsl-spring.html @@ -0,0 +1,325 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==QueryDSL in devon4j-spring

+
+
+

To implement dynamic queries, devon4j suggests the use of QueryDSL. QueryDSL uses metaclasses generated from entity classes at build time. devon4j-spring provides a way to use QueryDSL without the need for code generation. For this, devon4j provides the interface DefaultRepository that your repository needs to extend and the QueryUtil helper class to build your queries.

+
+
+

Example

+
+
+

Here is an example for using QueryDSL in devon4j-spring:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    DishEntity dish = Alias.alias(DishEntity.class);
+    JPAQuery<DishEntity> query = newDslQuery(alias);
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(Alias.$(dish.getPrice()).ge(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(Alias.$(dish.getPrice()).le(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      // query.where(Alias.$(alias.getName()).eq(name));
+      QueryUtil.get().whereString(query, Alias.$(alias.getName()), name, criteria.getNameOption());
+    }
+    return query.fetch();
+  }
+
+
+
+
+
+

Pagination

+
+
+

Pagination for dynamic or generally handwritten queries is provided in devon4j-spring via QueryUtil.findPaginated(…​):

+
+
+
+
boolean determineTotalHitCount = ...;
+return QueryUtil.get().findPaginated(criteria.getPageable(), query, determineTotalHitCount);
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-service-client-spring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-service-client-spring.html new file mode 100644 index 00000000..54a3103f --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-service-client-spring.html @@ -0,0 +1,669 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Service Client in devon4j-spring

+
+
+

This guide is about consuming (calling) services from other applications (micro-services) in devon4j-spring.

+
+
+

Dependency

+
+
+

You need to add (at least one of) these dependencies to your application:

+
+
+
+
<!-- Starter for asynchronous consuming REST services via Jaca HTTP Client (Java11+) -->
+<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-http-client-rest-async</artifactId>
+</dependency>
+<!-- Starter for synchronous consuming REST services via Jaca HTTP Client (Java11+) -->
+<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-http-client-rest-sync</artifactId>
+</dependency>
+<!-- Starter for synchronous consuming REST services via Apache CXF (Java8+)
+  NOTE: This is an alternative to devon4j-starter-http-client-rest-sync
+  -->
+<!--
+<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-cxf-client-rest</artifactId>
+</dependency>
+-->
+<!-- Starter for synchronous consuming SOAP services via Apache CXF (Java8+) -->
+<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-cxf-client-ws</artifactId>
+</dependency>
+
+
+
+
+
+

Features

+
+
+

When invoking a service, you need to consider many cross-cutting aspects. You might not think about them in the very first place and you do not want to redundantly implement them multiple times. Therefore, you should consider using this approach. The following sub-sections list the covered features and aspects:

+
+
+

Simple usage

+
+

Assuming you already have a Java interface MyService of the service you want to invoke:

+
+
+
+
package com.company.department.foo.mycomponent.service.api.rest;
+...
+
+@Path("/myservice")
+public interface MyService extends RestService {
+
+  @POST
+  @Path("/getresult")
+  MyResult getResult(MyArgs myArgs);
+
+  @DELETE
+  @Path("/entity/{id}")
+  void deleteEntity(@PathParam("id") long id);
+}
+
+
+
+

Then, all you need to do is this:

+
+
+
+
@Named
+public class UcMyUseCaseImpl extends MyUseCaseBase implements UcMyUseCase {
+  @Inject
+  private ServiceClientFactory serviceClientFactory;
+
+  ...
+  private void callSynchronous(MyArgs myArgs) {
+    MyService myService = this.serviceClientFactory.create(MyService.class);
+    // call of service over the wire, synchronously blocking until result is received or error occurred
+    MyResult myResult = myService.myMethod(myArgs);
+    handleResult(myResult);
+  }
+
+  private void callAsynchronous(MyArgs myArgs) {
+    AsyncServiceClient<MyService> client = this.serviceClientFactory.createAsync(MyService.class);
+    // call of service over the wire, will return when request is send and invoke handleResult asynchronously
+    client.call(client.get().myMethod(myArgs), this::handleResult);
+  }
+
+  private void handleResult(MyResult myResult) {
+    ...
+  }
+  ...
+}
+
+
+
+

As you can see, both synchronous and asynchronous invocation of a service is very simple and type-safe. However, it is also very flexible and powerful (see following features). The actual call of myMethod will technically call the remote service over the wire (e.g. via HTTP), including marshalling the arguments (e.g. converting myArgs to JSON) and unmarshalling the result (e.g. converting the received JSON to myResult).

+
+
+

Asynchronous Invocation of void Methods

+
+

If you want to call a service method with void as the return type, the type-safe call method cannot be used as void methods do not return a result. Therefore you can use the callVoid method as following:

+
+
+
+
  private void callAsynchronousVoid(long id) {
+    AsyncServiceClient<MyService> client = this.serviceClientFactory.createAsync(MyService.class);
+    // call of service over the wire, will return when request is send and invoke resultHandler asynchronously
+    Consumer<Void> resultHandler = r -> { System.out.println("Response received")};
+    client.callVoid(() -> { client.get().deleteEntity(id);}, resultHandler);
+  }
+
+
+
+

You may also provide null as resultHandler for "fire and forget". However, this will lead to the result being ignored, so even in the case of an error you will not be notified.

+
+
+
+
+

Configuration

+
+

This solution allows a very flexible configuration on the following levels:

+
+
+
    +
  1. +

    Global configuration (defaults)

    +
  2. +
  3. +

    Configuration per remote service application (microservice)

    +
  4. +
  5. +

    Configuration per invocation.

    +
  6. +
+
+
+

A configuration on a deeper level (e.g. 3) overrides the configuration from a higher level (e.g. 1).

+
+
+

The configuration on Level 1 and 2 are configured via application.properties +(see configuration guide). +For Level 1, the prefix service.client.default. is used for properties. +Further, for level 2, the prefix service.client.app.«application». is used where «application» is the +technical name of the application providing the service. This name will automatically be derived from +the java package of the service interface (e.g. foo in MyService interface before) following our +packaging conventions. +In case these conventions are not met, it will fall back to the fully qualified name of the service interface.

+
+
+

Configuration on Level 3 has to be provided as a Map argument to the method +ServiceClientFactory.create(Class<S> serviceInterface, Map<String, String> config). +The keys of this Map will not use prefixes (such as the ones above). For common configuration +parameters, a type-safe builder is offered to create such a map via ServiceClientConfigBuilder. +E.g. for testing, you may want to do:

+
+
+
+
this.serviceClientFactory.create(MyService.class,
+  new ServiceClientConfigBuilder().authBasic().userLogin(login).userPassword(password).buildMap());
+
+
+
+

Here is an example of a configuration block for your application.properties:

+
+
+
+
service.client.default.url=https://api.company.com/services/${type}
+service.client.default.timeout.connection=120
+service.client.default.timeout.response=3600
+
+service.client.app.bar.url=https://bar.company.com:8080/services/rest
+service.client.app.bar.auth=basic
+service.client.app.bar.user.login=user4711
+service.client.app.bar.user.password=ENC(jd5ZREpBqxuN9ok0IhnXabgw7V3EoG2p)
+
+service.client.app.foo.url=https://foo.company.com:8443/services/rest
+##authForward: simply forward Authorization header (e.g. with JWT) to remote service
+service.client.app.bar.auth=authForward
+
+
+
+
+

Service Discovery

+
+

You do not want to hardwire service URLs in your code, right? Therefore, different strategies might apply +to discover the URL of the invoked service. This is done internally by an implementation of the interface +ServiceDiscoverer. The default implementation simply reads the base URL from the configuration. +You can simply add this to your application.properties as in the above configuration example.

+
+
+

Assuming your service interface has the fully qualified name +com.company.department.foo.mycomponent.service.api.rest.MyService, then the URL would be resolved to +https://foo.company.com:8443/services/rest, as the «application» is foo.

+
+
+

Additionally, the URL might use the following variables that will automatically be resolved:

+
+
+
    +
  • +

    ${app} to «application» (useful for default URL)

    +
  • +
  • +

    ${type} to the type of the service. E.g. rest in case of a REST service and ws for a SOAP service.

    +
  • +
  • +

    ${local.server.port} for the port of your current Java servlet container running the JVM. Should only be used for testing with spring-boot random port mechanism (technically spring cannot resolve this variable, but we do it for you here).

    +
  • +
+
+
+

Therefore, the default URL may also be configured as:

+
+
+
+
service.client.default.url=https://api.company.com/${app}/services/${type}
+
+
+
+

As you can use any implementation of ServiceDiscoverer, you can also easily use eureka (or anything else) instead to discover your services. +However, we recommend to use istio instead, as described below.

+
+
+
+

Headers

+
+

A very common demand is to tweak (HTTP) headers in the request to invoke the service. May it be for security (authentication data) or for other cross-cutting concerns (such as the Correlation ID). This is done internally by implementations of the interface ServiceHeaderCustomizer. +We already provide several implementations such as:

+
+
+
    +
  • +

    ServiceHeaderCustomizerBasicAuth for basic authentication (auth=basic).

    +
  • +
  • +

    ServiceHeaderCustomizerOAuth for OAuth: passes a security token from security context such as a JWT via OAuth (auth=oauth).

    +
  • +
  • +

    ServiceHeaderCustomizerAuthForward forwards the Authorization HTTP header from the running request to the request to the remote service as is (auth=authForward). Be careful to avoid security pitfalls by misconfiguring this feature, as it may also contain sensitive credentials (e.g. basic auth) to the remote service. Never use as default.

    +
  • +
  • +

    ServiceHeaderCustomizerCorrelationId passed the Correlation ID to the service request.

    +
  • +
+
+
+

Additionally, you can add further custom implementations of ServiceHeaderCustomizer for your individual requirements and additional headers.

+
+
+
+

Timeouts

+
+

You can configure timeouts in a very flexible way. First of all, you can configure timeouts to establish the connection (timeout.connection) and to wait for the response (timeout.response) separately. These timeouts can be configured on all three levels as described in the configuration section above.

+
+
+
+

Error Handling

+
+

Whilst invoking a remote service, an error may occur. This solution will automatically handle such errors and map them to a higher level ServiceInvocationFailedException. In general, we separate two different types of errors:

+
+
+
    +
  • +

    Network error
    +In such a case (host not found, connection refused, time out, etc.), there is not even a response from the server. However, in advance to a low-level exception you will get a wrapped ServiceInvocationFailedException (with code ServiceInvoke) with a readable message containing the service that could not be invoked.

    +
  • +
  • +

    Service error
    +In case the service failed on the server-side, the error result will be parsed and thrown as a ServiceInvocationFailedException with the received message and code.

    +
  • +
+
+
+

This allows to catch and handle errors when a service-invocation failed. You can even distinguish business errors from the server-side from technical errors and implement retry strategies or the like. +Further, the created exception contains detailed contextual information about the service that failed (service interface class, method, URL), which makes it much easier to trace down errors. Here is an example from our tests:

+
+
+
+
While invoking the service com.devonfw.test.app.myexample.service.api.rest.MyExampleRestService#businessError[http://localhost:50178/app/services/rest/my-example/v1/business-error] the following error occurred: Test of business error. Probably the service is temporary unavailable. Please try again later. If the problem persists contact your system administrator.
+2f43b03e-685b-45c0-9aae-23ff4b220c85:BusinessErrorCode
+
+
+
+

You may even provide your own implementation of ServiceClientErrorFactory instead to provide an own exception class for this purpose.

+
+
+

Handling Errors

+
+

In case of a synchronous service invocation, an error will be immediately thrown so you can surround the call with a regular try-catch block:

+
+
+
+
  private void callSynchronous(MyArgs myArgs) {
+    MyService myService = this.serviceClientFactory.create(MyService.class);
+    // call of service over the wire, synchronously blocking until result is received or error occurred
+    try {
+      MyResult myResult = myService.myMethod(myArgs);
+      handleResult(myResult);
+    } catch (ServiceInvocationFailedException e) {
+      if (e.isTechnical()) {
+        handleTechnicalError(e);
+      } else {
+        // error code you defined in the exception on the server side of the service
+        String errorCode = e.getCode();
+        handleBusinessError(e, errorCode;
+      }
+    } catch (Throwable e) { // you may not handle this explicitly here...
+      handleTechnicalError(e);
+    }
+  }
+
+
+
+

If you are using asynchronous service invocation, an error can occurr in a separate thread. Therefore, you may and should define a custom error handler:

+
+
+
+
  private void callAsynchronous(MyArgs myArgs) {
+    AsyncServiceClient<MyService> client = this.serviceClientFactory.createAsync(MyService.class);
+    Consumer<Throwalbe> errorHandler = this::handleError;
+    client.setErrorHandler(errorHandler);
+    // call of service over the wire, will return when request is send and invoke handleResult asynchronously
+    client.call(client.get().myMethod(myArgs), this::handleResult);
+  }
+
+  private void handleError(Throwalbe error) {
+    ...
+  }
+}
+
+
+
+

The error handler consumes Throwable, and not only RuntimeException, so you can get notified even in case of an unexpected OutOfMemoryError, NoClassDefFoundError, or other technical problems. Please note that the error handler may also be called from the thread calling the service (e.g. if already creating the request fails). The default error handler used if no custom handler is set will only log the error and do nothing else.

+
+
+
+
+

Logging

+
+

By default, this solution will log all invocations including the URL of the invoked service, success or error status flag and the duration in seconds (with decimal nano precision as available). Therefore, you can easily monitor the status and performance of the service invocations. Here is an example from our tests:

+
+
+
+
Invoking service com.devonfw.test.app.myexample.service.api.rest.MyExampleRestService#greet[http://localhost:50178/app/services/rest/my-example/v1/greet/John%20Doe%20%26%20%3F%23] took PT20.309756622S (20309756622ns) and succeded with status 200.
+
+
+
+
+

Resilience

+
+

Resilience adds a lot of complexity, which typically means that addressing this here would most probably result in not being up-to-date and not meeting all requirements. Therefore, we recommend something completely different: the sidecar approach (based on sidecar pattern). This means that you use a generic proxy app that runs as a separate process on the same host, VM, or container of your actual application. Then, in your app, you call the service via the sidecar proxy on localhost (service discovery URL is e.g. http://localhost:8081/${app}/services/${type}) that then acts as proxy to the actual remote service. Now aspects such as resilience with circuit breaking and the actual service discovery can be configured in the sidecar proxy app, independent of your actual application. Therefore, you can even share and reuse configuration and experience with such a sidecar proxy app even across different technologies (Java, .NET/C#, Node.JS, etc.). Further, you do not pollute the technology stack of your actual app with the infrastructure for resilience, throttling, etc. and can update the app and the sidecar independently when security-fixes are available.

+
+
+

Various implementations of such sidecar proxy apps are available as free open source software. +Our recommendation in devonfw is to use istio. This not only provides such a side-car, but also an entire management solution for service-mesh, making administration and maintenance much easier. Platforms like OpenShift support this out of the box.

+
+
+

However, if you are looking for details about side-car implementations for services, you can have a look at the following links:

+
+
+ +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-spring-configuration.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-spring-configuration.html new file mode 100644 index 00000000..b6573656 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-spring-configuration.html @@ -0,0 +1,601 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Configuration

+
+
+

Internal Application Configuration

+
+
+

There usually is a main configuration registered with main Spring Boot App, but differing configurations to support automated test of the application can be defined using profiles (not detailed in this guide).

+
+
+

Spring Boot Application

+
+

For a complete documentation, see the Spring Boot Reference Guide.

+
+
+

With spring-boot you provide a simple main class (also called starter class) like this: +com.devonfw.mtsj.application

+
+
+
+
@SpringBootApplication(exclude = { EndpointAutoConfiguration.class })
+@EntityScan(basePackages = { "com.devonfw.mtsj.application" }, basePackageClasses = { AdvancedRevisionEntity.class })
+@EnableGlobalMethodSecurity(jsr250Enabled = true)
+@ComponentScan(basePackages = { "com.devonfw.mtsj.application.general", "com.devonfw.mtsj.application" })
+public class SpringBootApp {
+
+  /**
+   * Entry point for spring-boot based app
+   *
+   * @param args - arguments
+   */
+  public static void main(String[] args) {
+
+    SpringApplication.run(SpringBootApp.class, args);
+  }
+}
+
+
+
+

In an devonfw application this main class is always located in the <basepackage> of the application package namespace (see package-conventions). This is because a spring boot application will automatically do a classpath scan for components (spring-beans) and entities in the package where the application main class is located including all sub-packages. You can use the @ComponentScan and @EntityScan annotations to customize this behaviour.

+
+
+

If you want to map spring configuration properties into your custom code please see configuration mapping.

+
+
+
+

Standard beans configuration

+
+

For basic bean configuration we rely on spring boot using mainly configuration classes and only occasionally XML configuration files. Some key principle to understand Spring Boot auto-configuration features:

+
+
+
    +
  • +

    Spring Boot auto-configuration attempts to automatically configure your Spring application based on the jar dependencies and annotated components found in your source code.

    +
  • +
  • +

    Auto-configuration is non-invasive, at any point you can start to define your own configuration to replace specific parts of the auto-configuration by redefining your identically named bean (see also exclude attribute of @SpringBootApplication in example code above).

    +
  • +
+
+
+

Beans are configured via annotations in your java code (see dependency-injection).

+
+
+

For technical configuration you will typically write additional spring config classes annotated with @Configuration that provide bean implementations via methods annotated with @Bean. See spring @Bean documentation for further details. Like in XML you can also use @Import to make a @Configuration class include other configurations.

+
+
+

More specific configuration files (as required) reside in an adequately named subfolder of:

+
+
+

src/main/resources/app

+
+
+
+

BeanMapper Configuration

+
+

In case you are still using dozer, you will find further details in bean-mapper configuration.

+
+
+
+

Security configuration

+
+

The abstract base class BaseWebSecurityConfig should be extended to configure web application security thoroughly. +A basic and secure configuration is provided which can be overridden or extended by subclasses. +Subclasses must use the @Profile annotation to further discriminate between beans used in production and testing scenarios. See the following example:

+
+
+
How to extend BaseWebSecurityConfig for Production and Test
+
+
@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.JUNIT)
+public class TestWebSecurityConfig extends BaseWebSecurityConfig {...}
+
+@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.NOT_JUNIT)
+public class WebSecurityConfig extends BaseWebSecurityConfig {...}
+
+
+ +
+
+

WebSocket configuration

+
+

A websocket endpoint is configured within the business package as a Spring configuration class. The annotation @EnableWebSocketMessageBroker makes Spring Boot registering this endpoint.

+
+
+
+
package your.path.to.the.websocket.config;
+...
+@Configuration
+@EnableWebSocketMessageBroker
+public class WebSocketConfig extends AbstractWebSocketMessageBrokerConfigurer {
+...
+
+
+
+
+
+
+

External Application Configuration

+
+
+

application.properties files

+
+

Here is a list of common properties provided by the Spring framework.

+
+
+

For a general understanding how spring-boot is loading and boostrapping your application.properties see spring-boot external configuration.

+
+
+

The following properties files are used in devonfw application:

+
+
+
    +
  • +

    src/main/resources/application.properties providing a default configuration - bundled and deployed with the application package. It further acts as a template to derive a tailored minimal environment-specific configuration.

    +
  • +
  • +

    src/main/resources/config/application.properties providing additional properties only used at development time (for all local deployment scenarios). This property file is excluded from all packaging.

    +
  • +
  • +

    src/test/resources/config/application.properties providing additional properties only used for testing (JUnits based on spring test).

    +
  • +
+
+
+

For other environments where the software gets deployed such as test, acceptance and production you need to provide a tailored copy of application.properties. The location depends on the deployment strategy:

+
+
+
    +
  • +

    standalone run-able Spring Boot App using embedded tomcat: config/application.properties under the installation directory of the spring boot application.

    +
  • +
  • +

    dedicated tomcat (one tomcat per app): $CATALINA_BASE/lib/config/application.properties

    +
  • +
  • +

    tomcat serving a number of apps (requires expanding the wars): $CATALINA_BASE/webapps/<app>/WEB-INF/classes/config

    +
  • +
+
+
+

In this application.properties you only define the minimum properties that are environment specific and inherit everything else from the bundled src/main/resources/application.properties. In any case, make very sure that the classloader will find the file.

+
+
+
+

Database Configuration

+
+

The configuration for spring and Hibernate is already provided by devonfw in our sample application and the application template. So you only need to worry about a few things to customize.

+
+
+

Database System and Access

+
+

Obviously you need to configure which type of database you want to use as well as the location and credentials to access it. The defaults are configured in application.properties that is bundled and deployed with the release of the software. The files should therefore contain the properties as in the given example:

+
+
+
+
  database.url=jdbc:postgresql://database.enterprise.com/app
+  database.user.login=appuser01
+  database.user.password=************
+  database.hibernate.dialect = org.hibernate.dialect.PostgreSQLDialect
+  database.hibernate.hbm2ddl.auto=validate
+
+
+
+

For further details about database.hibernate.hbm2ddl.auto please see here. For production and acceptance environments we use the value validate that should be set as default. In case you want to use Oracle RDBMS you can find additional hints here.

+
+
+

If your application supports multiples database types, set spring.profiles.active=XXX in src/main/resources/config/application.properties choose database of your choice. Also, one has to set all the active spring profiles in this application.properties and not in any of the other application.properties.

+
+
+
+

Database Logging

+
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
spring.jpa.properties.hibernate.show_sql=true
+spring.jpa.properties.hibernate.use_sql_comments=true
+spring.jpa.properties.hibernate.format_sql=true
+
+
+
+
+
+
+
+

Security

+
+
+

Password Encryption

+
+

In order to support encrypted passwords in spring-boot application.properties all you need to do is to add jasypt-spring-boot as dependency in your pom.xml (please check for recent version here):

+
+
+
+
<dependency>
+  <groupId>com.github.ulisesbocchio</groupId>
+  <artifactId>jasypt-spring-boot-starter</artifactId>
+  <version>3.0.3</version>
+</dependency>
+
+
+
+

This will smoothly integrate jasypt into your spring-boot application. Read this HOWTO to learn how to encrypt and decrypt passwords using jasypt.

+
+
+

Next, we give a simple example how to encypt and configure a secret value. +We use the algorithm PBEWITHHMACSHA512ANDAES_256 that provides strong encryption and is the default of jasypt-spring-boot-starter. +However, different algorithms can be used if perferred (e.g. PBEWITHMD5ANDTRIPLEDES).

+
+
+
+
java -cp ${M2_REPO}/org/jasypt/jasypt/1.9.3/jasypt-1.9.3.jar org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI password=masterpassword algorithm=PBEWITHHMACSHA512ANDAES_256 input=secret ivGeneratorClassName=org.jasypt.iv.RandomIvGenerator
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.5+10
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: masterpassword
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC
+
+
+
+

Of course the master-password (masterpassword) and the actual password to encrypt (secret) are just examples. +Please replace them with reasonable strong passwords for your environment. +Further, if you are using devonfw-ide you can make your life much easier and just type:

+
+
+
+
devon jasypt encrypt
+
+
+
+

See jasypt commandlet for details.

+
+
+

Now the entire line after the OUTPUT block is your encrypted secret. +It even contains some random salt so that multiple encryption invocations with the same parameters (ARGUMENTS) will produce a different OUTPUT.

+
+
+

The master-password can be configured on your target environment via the property jasypt.encryptor.password. As system properties given on the command-line are visible in the process list, we recommend to use an config/application.yml file only for this purpose (as we recommended to use application.properties for regular configs):

+
+
+
+
jasypt:
+    encryptor:
+        password: masterpassword
+
+
+
+

Again masterpassword is just an example that your replace with your actual master password. +Now you are able to put encrypted passwords into your application.properties and specify the algorithm.

+
+
+
+
spring.datasource.password=ENC(PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC)
+jasypt.encryptor.algorithm=PBEWITHHMACSHA512ANDAES_256
+
+
+
+

This application.properties file can be version controlled (git-opts) and without knowing the masterpassword nobody is able to decrypt this to get the actual secret back.

+
+
+

To prevent jasypt to throw an exception in dev or test scenarios you can simply put this in your local config (src/main/config/application.properties and same for test, see above for details):

+
+
+
+
jasypt.encryptor.password=none
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-spring-testing.html b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-spring-testing.html new file mode 100644 index 00000000..d13df6ad --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/spring/guide-spring-testing.html @@ -0,0 +1,425 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Testing

+
+
+

Implementation

+
+
+

Module Test

+
+

In devon4j you can extend the abstract class ModuleTest to basically get access to assertions. In order to test classes embedded in dependencies and external services one needs to provide mocks for that. As the technology stack recommends we use the Mockito framework to offer this functionality. The following example shows how to implement Mockito into a JUnit test.

+
+
+
+
import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.mock;
+...
+
+public class StaffmanagementImplTest extends ModuleTest {
+  @Rule
+  public MockitoRule rule = MockitoJUnit.rule();
+
+  @Test
+  public void testFindStaffMember() {
+  ...}
+}
+
+
+
+

Note that the test class does not use the @SpringApplicationConfiguration annotation. In a module test one does not use the whole application. +The JUnit rule is the best solution to use in order to get all needed functionality of Mockito. Static imports are a convenient option to enhance readability within Mockito tests. +You can define mocks with the @Mock annotation or the mock(*.class) call. To inject the mocked objects into your class under test you can use the @InjectMocks annotation. This automatically uses the setters of StaffmanagementImpl to inject the defined mocks into the class under test (CUT) when there is a setter available. In this case the beanMapper and the staffMemberDao are injected. Of course it is possible to do this manually if you need more control.

+
+
+
+
  @Mock
+  private BeanMapper beanMapper;
+  @Mock
+  private StaffMemberEntity staffMemberEntity;
+  @Mock
+  private StaffMemberEto staffMemberEto;
+  @Mock
+  private StaffMemberDao staffMemberDao;
+  @InjectMocks
+  StaffmanagementImpl staffmanagementImpl = new StaffmanagementImpl();
+
+
+
+

The mocked objects do not provide any functionality at the time being. To define what happens on a method call on a mocked dependency in the CUT one can use when(condition).thenReturn(result). In this case we want to test findStaffMember(Long id) in the StaffmanagementImpl.

+
+
+
+
public StaffMemberEto findStaffMember(Long id) {
+  return getBeanMapper().map(getStaffMemberDao().find(id), StaffMemberEto.class);
+}
+
+
+
+

In this simple example one has to stub two calls on the CUT as you can see below. For example the method call of the CUT staffMemberDao.find(id) is stubbed for returning a mock object staffMemberEntity that is also defined as mock.

+
+
+
+

Subsystem Test

+
+

devon4j provides a simple test infrastructure to aid with the implementation of subsystem tests. It becomes available by simply subclassing AbstractRestServiceTest.java.

+
+
+
+
//given
+long id = 1L;
+Class<StaffMemberEto> targetClass = StaffMemberEto.class;
+when(this.staffMemberDao.find(id)).thenReturn(this.staffMemberEntity);
+when(this.beanMapper.map(this.staffMemberEntity, targetClass)).thenReturn(this.staffMemberEto);
+
+//when
+StaffMemberEto resultEto = this.staffmanagementImpl.findStaffMember(id);
+
+//then
+assertThat(resultEto).isNotNull();
+assertThat(resultEto).isEqualTo(this.staffMemberEto);
+
+
+
+

After the test method call one can verify the expected results. Mockito can check whether a mocked method call was indeed called. This can be done using Mockito verify. Note that it does not generate any value if you check for method calls that are needed to reach the asserted result anyway. Call verification can be useful e.g. when you want to assure that statistics are written out without actually testing them.

+
+
+
+
+
+

Configuration

+
+
+

Configure Test Specific Beans

+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-components.html b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-components.html new file mode 100644 index 00000000..710c9790 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-components.html @@ -0,0 +1,272 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Using Business components

+
+
+

Describe how to access other business components and how to deal with entities shared between components

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-crud.html b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-crud.html new file mode 100644 index 00000000..c92c99a5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-crud.html @@ -0,0 +1,1543 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

==Creating a CRUD functionality for an entity

+
+
+

In this tutorial we are going to create an entity for the application and provide services for Create, Read, Update and Delete instances of that entity.

+
+
+

It is important to mention devonfw packaging convention. devonfw uses a strict packaging convention to map technical layers and business components to the code. devonfw uses the following Java-Package schema:

+
+
+
+
<basepackage>.<component>.<layer>.<scope>[.<detail>]*
+
+
+
+

In our example application we find the different classes in this packages:

+
+
+
    +
  • +

    Entity and DAO: com.devonfw.application.mtsj.ordermanagement.dataaccess.api[.<detail>]

    +
  • +
  • +

    Logic: com.devonfw.application.mtsj.ordermanagement.logic[.<detail>]

    +
  • +
  • +

    Services: com.devonfw.application.mtsj.ordermanagement.service[.<detail>]

    +
  • +
+
+
+

For more information you can consult packaging devonfw documentation

+
+
+

Persitence provider configuration

+
+ +
+
+
+

Create the JPA entity

+
+
+

We are going to create a Order entity. First, we are going to create the Order entity interface. This will be reused between all the objects involved with order on the different layers.

+
+
+
+
import com.devonfw.application.mtsj.general.common.api.ApplicationEntity;
+
+public interface Order extends ApplicationEntity {
+
+  public Long getBookingId();
+
+  public void setBookingId(Long bookingId);
+
+  public Long getInvitedGuestId();
+
+  public void setInvitedGuestId(Long invitedGuestId);
+
+  public Long getHostId();
+
+  public void setHostId(Long hostId);
+
+}
+
+
+
+

As you can see, Table should extend ApplicationEntity class. This class provides the neccesary methods for a mutable entity (ID getter and setter basically).

+
+
+

Finally, we should create the entity implementation:

+
+
+
+
import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+import javax.persistence.Transient;
+
+import com.devonfw.application.mtsj.bookingmanagement.dataaccess.api.BookingEntity;
+import com.devonfw.application.mtsj.bookingmanagement.dataaccess.api.InvitedGuestEntity;
+import com.devonfw.application.mtsj.general.dataaccess.api.ApplicationPersistenceEntity;
+//import io.oasp.application.mtsj.ordermanagement.common.api.Order;
+
+/**
+ * The {@link com.devonfw.application.mtsj.general.dataaccess.api.ApplicationPersistenceEntity persistent entity} for
+ * {@link Order}.
+ */
+@Entity
+@Table(name = "Orders")
+public class OrderEntity extends ApplicationPersistenceEntity implements Order {
+
+  private static final long serialVersionUID = 1L;
+
+  private BookingEntity booking;
+
+  private InvitedGuestEntity invitedGuest;
+
+  private BookingEntity host;
+
+  private List<OrderLineEntity> orderLines;
+
+  /**
+   * @return booking
+   */
+  @ManyToOne(fetch = FetchType.EAGER)
+  @JoinColumn(name = "idBooking")
+  public BookingEntity getBooking() {
+
+    return this.booking;
+  }
+
+  /**
+   * @param booking new value of {@link #getbooking}.
+   */
+  public void setBooking(BookingEntity booking) {
+
+    this.booking = booking;
+  }
+
+  /**
+   * @return invitedGuest
+   */
+  @OneToOne(fetch = FetchType.EAGER)
+  @JoinColumn(name = "idInvitedGuest")
+  public InvitedGuestEntity getInvitedGuest() {
+
+    return this.invitedGuest;
+  }
+
+  /**
+   * @param invitedGuest new value of {@link #getinvitedGuest}.
+   */
+  public void setInvitedGuest(InvitedGuestEntity invitedGuest) {
+
+    this.invitedGuest = invitedGuest;
+  }
+
+  /**
+   * @return orderLines
+   */
+  @OneToMany(mappedBy = "order", fetch = FetchType.EAGER)
+  public List<OrderLineEntity> getOrderLines() {
+
+    return this.orderLines;
+  }
+
+  /**
+   * @param orderLines new value of {@link #getorderLines}.
+   */
+  public void setOrderLines(List<OrderLineEntity> orderLines) {
+
+    this.orderLines = orderLines;
+  }
+
+  @Override
+  @Transient
+  public Long getBookingId() {
+
+    if (this.booking == null) {
+      return null;
+    }
+    return this.booking.getId();
+  }
+
+  @Override
+  public void setBookingId(Long bookingId) {
+
+    if (bookingId == null) {
+      this.booking = null;
+    } else {
+      BookingEntity bookingEntity = new BookingEntity();
+      bookingEntity.setId(bookingId);
+      this.booking = bookingEntity;
+    }
+  }
+
+  @Override
+  @Transient
+  public Long getInvitedGuestId() {
+
+    if (this.invitedGuest == null) {
+      return null;
+    }
+    return this.invitedGuest.getId();
+  }
+
+  @Override
+  public void setInvitedGuestId(Long invitedGuestId) {
+
+    if (invitedGuestId == null) {
+      this.invitedGuest = null;
+    } else {
+      InvitedGuestEntity invitedGuestEntity = new InvitedGuestEntity();
+      invitedGuestEntity.setId(invitedGuestId);
+      this.invitedGuest = invitedGuestEntity;
+    }
+  }
+
+  /**
+   * @return host
+   */
+  @OneToOne
+  @JoinColumn(name = "idHost")
+  public BookingEntity getHost() {
+
+    return this.host;
+  }
+
+  /**
+   * @param host new value of {@link #gethost}.
+   */
+  public void setHost(BookingEntity host) {
+
+    this.host = host;
+  }
+
+  @Override
+  @Transient
+  public Long getHostId() {
+
+    if (this.host == null) {
+      return null;
+    }
+    return this.host.getId();
+  }
+
+  @Override
+  public void setHostId(Long hostId) {
+
+    if (hostId == null) {
+      this.host = null;
+    } else {
+      BookingEntity bookingEntity = new BookingEntity();
+      bookingEntity.setId(hostId);
+      this.host = bookingEntity;
+    }
+  }
+
+}
+
+
+
+

Validation

+
+

You can read more about devonfw validation in devonfw validation

+
+
+

For example, we are going to add a validation in TableEntity to validate number property to allow only values greater than 0.

+
+
+
+
  @Min(value = 1, message = "Assistants must be greater than 0")
+  @Digits(integer = 2, fraction = 0)
+  private Integer assistants;
+
+
+
+
+
+
+

Creating persistence layer

+
+
+

Data Acccess Objects (DAOs) are part of the persistence layer. They are responsible for a specific entity and should be named <entity>Dao[Impl]. The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. Additionally a DAO may offer advanced operations such as search or locking methods.

+
+
+

For each DAO there is an interface named <entity>Dao that defines the API. For CRUD support and common naming methods we derive it from the interface com.devonfw.application.mtsj.general.dataaccess.api.dao.

+
+
+
OrderDao.java
+
+
// import io.oasp.application.mtsj.general.dataaccess.api.dao.ApplicationDao;
+import com.devonfw.application.mtsj.ordermanagement.dataaccess.api.OrderEntity;
+import com.devonfw.application.mtsj.ordermanagement.logic.api.to.OrderSearchCriteriaTo;
+import com.devonfw.module.jpa.common.api.to.PaginatedListTo;
+
+/**
+ * Data access interface for Order entities
+ */
+public interface OrderDao extends ApplicationDao<OrderEntity> {
+
+  /**
+   * Finds the {@link OrderEntity orders} matching the given {@link OrderSearchCriteriaTo}.
+   *
+   * @param criteria is the {@link OrderSearchCriteriaTo}.
+   * @return the {@link PaginatedListTo} with the matching {@link OrderEntity} objects.
+   */
+  PaginatedListTo<OrderEntity> findOrders(OrderSearchCriteriaTo criteria);
+
+}
+
+
+
+

Implementing a DAO is quite simple. We should create a class named <entity>DaoImpl that extends ApplicationDaoImpl class and implements our DAO interface.

+
+
+

This is the DAO implementation for our table sample:

+
+
+
OrderDaoImpl.java
+
+
import java.util.List;
+
+import javax.inject.Named;
+
+import com.mysema.query.alias.Alias;
+import com.mysema.query.jpa.impl.JPAQuery;
+import com.mysema.query.types.path.EntityPathBase;
+
+import com.cap.jumpthequeue.general.dataaccess.base.dao.ApplicationDaoImpl;
+import com.devonfw.application.mtsj.ordermanagement.dataaccess.api.OrderEntity;
+// import io.oasp.application.mtsj.ordermanagement.dataaccess.api.dao.OrderDao;
+import com.devonfw.application.mtsj.ordermanagement.logic.api.to.OrderSearchCriteriaTo;
+// import io.oasp.module.jpa.common.api.to.OrderByTo;
+// import io.oasp.module.jpa.common.api.to.OrderDirection;
+import com.devonfw.module.jpa.common.api.to.PaginatedListTo;
+
+/**
+ * This is the implementation of {@link OrderDao}.
+ */
+@Named
+public class OrderDaoImpl extends ApplicationDaoImpl<OrderEntity> implements OrderDao {
+
+  /**
+   * The constructor.
+   */
+  public OrderDaoImpl() {
+
+    super();
+  }
+
+  @Override
+  public Class<OrderEntity> getEntityClass() {
+
+    return OrderEntity.class;
+  }
+
+  @Override
+  public PaginatedListTo<OrderEntity> findOrders(OrderSearchCriteriaTo criteria) {
+
+    OrderEntity order = Alias.alias(OrderEntity.class);
+    EntityPathBase<OrderEntity> alias = Alias.$(order);
+    JPAQuery query = new JPAQuery(getEntityManager()).from(alias);
+
+    Long booking = criteria.getBookingId();
+    if (booking != null && order.getBooking() != null) {
+      query.where(Alias.$(order.getBooking().getId()).eq(booking));
+    }
+    Long invitedGuest = criteria.getInvitedGuestId();
+    if (invitedGuest != null && order.getInvitedGuest() != null) {
+      query.where(Alias.$(order.getInvitedGuest().getId()).eq(invitedGuest));
+    }
+    String hostToken = criteria.getHostToken();
+    if (hostToken != null && order.getHost() != null) {
+      query.where(Alias.$(order.getBooking().getBookingToken()).toLowerCase().eq(hostToken.toLowerCase()));
+    }
+
+    String email = criteria.getEmail();
+    if (email != null) {
+      query.where(Alias.$(order.getBooking().getEmail()).toLowerCase().eq(email.toLowerCase()));
+    }
+
+    String bookingToken = criteria.getBookingToken();
+    if (bookingToken != null) {
+      query.where(Alias.$(order.getBooking().getBookingToken()).toLowerCase().eq(bookingToken.toLowerCase()));
+    }
+
+    addOrderBy(query, alias, order, criteria.getSort());
+    return findPaginated(criteria, query, alias);
+  }
+
+  private void addOrderBy(JPAQuery query, EntityPathBase<OrderEntity> alias, OrderEntity order, List<OrderByTo> sort) {
+
+    if (sort != null && !sort.isEmpty()) {
+      for (OrderByTo orderEntry : sort) {
+        if ("idBooking".equals(orderEntry.getName())) {
+          if (OrderDirection.ASC.equals(orderEntry.getDirection())) {
+            query.orderBy(Alias.$(order.getBookingId()).asc());
+          } else {
+            query.orderBy(Alias.$(order.getBookingId()).desc());
+          }
+        } else if ("idInvitedGuest".equals(orderEntry.getName())) {
+          if (OrderDirection.ASC.equals(orderEntry.getDirection())) {
+            query.orderBy(Alias.$(order.getInvitedGuestId()).asc());
+          } else {
+            query.orderBy(Alias.$(order.getInvitedGuestId()).desc());
+          }
+        } else if ("hostToken".equals(orderEntry.getName())) {
+          if (OrderDirection.ASC.equals(orderEntry.getDirection())) {
+            query.orderBy(Alias.$(order.getBooking().getBookingToken()).toLowerCase().asc());
+          } else {
+            query.orderBy(Alias.$(order.getBooking().getBookingToken()).toLowerCase().desc());
+          }
+        } else if ("bookingToken".equals(orderEntry.getName())) {
+          if (OrderDirection.ASC.equals(orderEntry.getDirection())) {
+            query.orderBy(Alias.$(order.getBooking().getBookingToken()).toLowerCase().asc());
+          } else {
+            query.orderBy(Alias.$(order.getBooking().getBookingToken()).toLowerCase().desc());
+          }
+        } else if ("email".equals(orderEntry.getName())) {
+          if (OrderDirection.ASC.equals(orderEntry.getDirection())) {
+            query.orderBy(Alias.$(order.getBooking().getEmail()).toLowerCase().asc());
+          } else {
+            query.orderBy(Alias.$(order.getBooking().getEmail()).toLowerCase().desc());
+          }
+        } else if ("bookingDate".equals(orderEntry.getName())) {
+          if (OrderDirection.ASC.equals(orderEntry.getDirection())) {
+            query.orderBy(Alias.$(order.getBooking().getBookingDate()).asc());
+          } else {
+            query.orderBy(Alias.$(order.getBooking().getBookingDate()).desc());
+          }
+        }
+      }
+    }
+  }
+
+}
+
+
+
+

As you can see ApplicationMasterDataDaoImpl already implements the CRUD operations so you only have to implement the additional methods that you have declared in your <entity>Dao interface.

+
+
+

Defining querys

+
+

devonfw advises to specify all queries in one mapping file called NamedQueries.xml. So we are going to create a query to get free tables that we have used in TableDaoImpl.

+
+
+
src/main/resources/config/app/dataaccess/NamedQueries.xml
+
+
<!--?xml version="1.0" encoding="UTF-8"?-->
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemalocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+
+  <named-query name="get.free.tables">
+    <query><!--[CDATA[SELECT t FROM Table t WHERE t.state = com.devonfw.gastronomy.restaurant.common.datatype.TableState.FREE]]--></query>
+  </named-query>
+
+</entity-mappings>
+
+
+
+

To avoid redundant occurrences of the query name we define the constants for each named query:

+
+
+
NamedQueries.java
+
+
/**
+ * Constants of the named queries defined in ``NamedQueries.xml``.
+ *
+ */
+public abstract class NamedQueries {
+
+  // put your query names from NamedQueries.xml as constants here
+  /** @see io.oasp.gastronomy.restaurant.tablemanagement.dataaccess.impl.dao.TableDaoImpl#getFreeTables() */
+  public static final String GET_FREE_TABLES = "get.free.tables";
+
+}
+
+
+
+

Note that changing the name of the java constant can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+
+
+
+

Expose logic as services

+
+
+

The logic layer is for internal use of an application. In order to access the functionality of the logic layer from other applications it should be exposed with a bridge layer called the service layer.

+
+
+

This layer should be in charge of converting between Java objects to its serialized form and back. It also provide the means to publish to an endpoint and securize the access to certain users. Last but not less important it is responsible to wrap any error coming from the logic layer to a format that would be understood by the client of the service.

+
+
+

In devonfw, we propose to divide the CRUD logic into different files to sepparate responsability:

+
+
+
    +
  • +

    An interface and an implementing class for CRUD read only methods, UCFind[XXX]. E.g. UCFindTable.

    +
  • +
  • +

    An interface and an implementing class fro CRUD write methods, UCManage[XXX]. E.g. UCManageTable.

    +
  • +
+
+
+
UCFindTable.java
+
+
import com.devonfw.application.mtsj.bookingmanagement.logic.api.to.TableEto;
+
+import java.util.List;
+
+/**
+ * Interface of UcFindTable to centralize documentation and signatures of methods.
+ *
+ */
+public interface UcFindTable {
+
+  /**
+   * Returns a restaurant table by its id 'id'.
+   *
+   * @param id The id 'id' of the restaurant table.
+   * @return The restaurant {@link TableEto} with id 'id'
+   */
+  TableEto findTable(Long id);
+
+  /**
+   * Returns a list of all existing restaurant tables.
+   *
+   * @return {@link List} of all existing restaurant {@link TableEto}s
+   */
+  List<tableeto> findAllTables();
+
+ /**
+   * Returns a list of all existing free restaurant tables.
+   *
+   * @return {@link List} of all existing free restaurant {@link TableEto}s
+   */
+  List<tableeto> findFreeTables();
+
+}
+
+
+
+
UCFindTableImpl.java
+
+
import com.devonfw.application.mtsj.general.common.api.constants.PermissionConstants;
+import com.devonfw.application.mtsj.general.logic.api.UseCase;
+import com.devonfw.application.mtsj.general.dataaccess.api.TableEntity;
+import com.devonfw.application.mtsj.general.logic.api.to.TableEto;
+import com.devonfw.application.mtsj.general.logic.api.usecase.UcFindTable;
+import com.devonfw.application.mtsj.general.logic.base.usecase.AbstractTableUc;
+
+import java.util.List;
+
+import javax.annotation.security.RolesAllowed;
+import javax.inject.Named;
+
+/**
+ * Implementation of {@link UcFindTable}.
+ *
+ */
+@Named
+@UseCase
+public class UcFindTableImpl extends AbstractTableUc implements UcFindTable {
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  @RolesAllowed(PermissionConstants.FIND_TABLE)
+  public TableEto findTable(Long id) {
+
+    return getBeanMapper().map(getTableDao().findOne(id), TableEto.class);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  @RolesAllowed(PermissionConstants.FIND_TABLE)
+  public List<tableeto> findAllTables() {
+
+    List<tableentity> tables = getTableDao().findAll();
+    return getBeanMapper().mapList(tables, TableEto.class);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  @RolesAllowed(PermissionConstants.FIND_TABLE)
+  public List<tableeto> findFreeTables() {
+
+    List<tableentity> tables = getTableDao().getFreeTables();
+    return getBeanMapper().mapList(tables, TableEto.class);
+  }
+
+}
+
+
+
+
UCManageTable.java
+
+
import com.devonfw.application.mtsj.general.logic.api.to.TableEto;
+
+import javax.validation.Valid;
+
+/**
+ * Interface of UcManageTable to centralize documentation and signatures of methods.
+ *
+ */
+public interface UcManageTable {
+
+  /**
+   * Deletes a restaurant table from the database by its id 'id'.
+   *
+   * @param tableId Id of the restaurant table to delete
+   */
+  void deleteTable(Long tableId);
+
+  /**
+   * Creates a new restaurant table and store it in the database.
+   *
+   * @param table the {@link TableEto} to create.
+   * @return the new {@link TableEto} that has been saved with ID and version.
+   */
+  TableEto saveTable(@Valid TableEto table);
+
+}
+
+
+
+
UCManageTableImpl.java
+
+
import com.devonfw.application.mtsj.general.common.api.constants.PermissionConstants;
+import com.devonfw.application.mtsj.general.common.api.exception.IllegalEntityStateException;
+import com.devonfw.application.mtsj.general.logic.api.UseCase;
+import com.devonfw.gastronomy.restaurant.common.datatype.TableState;
+import com.devonfw.application.mtsj.bookingmanagement.dataaccess.api.TableEntity;
+import com.devonfw.application.mtsj.bookingmanagement.logic.api.to.TableEto;
+import com.devonfw.application.mtsj.bookingmanagement.logic.api.usecase.UcManageTable;
+import com.devonfw.application.mtsj.bookingmanagement.logic.base.usecase.AbstractTableUc;
+
+import java.util.Objects;
+
+import javax.annotation.security.RolesAllowed;
+import javax.inject.Named;
+import javax.validation.Valid;
+
+import org.springframework.validation.annotation.Validated;
+
+/**
+ * Implementation of {@link UcManageTable}.
+ *
+ */
+@Named
+@UseCase
+@Validated
+public class UcManageTableImpl extends AbstractTableUc implements UcManageTable {
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  @RolesAllowed(PermissionConstants.DELETE_TABLE)
+  public void deleteTable(Long tableId) {
+
+    TableEntity table = getTableDao().find(tableId);
+
+    if (!table.getState().isFree()) {
+      throw new IllegalEntityStateException(table, table.getState());
+    }
+
+    getTableDao().delete(table);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  @RolesAllowed(PermissionConstants.SAVE_TABLE)
+  public TableEto saveTable(@Valid TableEto table) {
+
+    Objects.requireNonNull(table, "table");
+
+    TableEntity tableEntity = getBeanMapper().map(table, TableEntity.class);
+    // initialize
+    if (tableEntity.getState() == null) {
+      tableEntity.setState(TableState.FREE);
+    }
+
+    getTableDao().save(tableEntity);
+    return getBeanMapper().map(tableEntity, TableEto.class);
+  }
+
+}
+
+
+
+

As you can see, implementation classes extend AbstractTableUC class. This class provides the DAO class injection.

+
+
+
AbstractTableUC.java
+
+
import com.devonfw.application.mtsj.general.logic.base.AbstractUc;
+import com.devonfw.application.mtsj.bookingmanagement.logic.impl.BookingmanagementImpl;
+
+import javax.inject.Inject;
+
+/**
+ *
+ */
+public abstract class AbstractTableUc extends AbstractUc {
+
+  /** @see #getTableDao() */
+  private TableDao tableDao;
+
+  /**
+   * @return the {@link TableDao} instance.
+   */
+  public TableDao getTableDao() {
+
+    return this.tableDao;
+  }
+
+  /**
+   * @param tableDao the {@link TableDao} to {@link Inject}.
+   */
+  @Inject
+  public void setTableDao(TableDao tableDao) {
+
+    this.tableDao = tableDao;
+  }
+
+}
+
+
+
+

Finally, we are going to create an interface and the implementating class that joins both UC classes. devonfw naming convention for this classes are: [XXX]management and [XXX]managementImpl.

+
+
+
Tablemanagement.java
+
+
import com.devonfw.application.mtsj.general.logic.api.usecase.UcFindTable;
+import com.devonfw.application.mtsj.bookingmanagement.logic.api.usecase.UcManageTable;
+
+/**
+ * Interface for TableManagement component.
+ *
+ */
+public interface Tablemanagement extends UcFindTable, UcManageTable {
+
+}
+
+
+
+
TablemanagementImpl.java
+
+
import com.devonfw.application.mtsj.general.common.base.AbstractBeanMapperSupport;
+import com.devonfw.application.mtsj.general.logic.api.UseCase;
+// import io.oasp.gastronomy.restaurant.tablemanagement.logic.api.Tablemanagement;
+import com.devonfw.application.mtsj.bookingmanagement.logic.api.to.TableEto;
+import com.devonfw.application.mtsj.general.logic.api.usecase.UcFindTable;
+import com.devonfw.application.mtsj.bookingmanagement.logic.api.usecase.UcManageTable;
+
+import java.util.List;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+
+/**
+ * Implementation of {@link Tablemanagement}.
+ *
+ */
+@Named
+public class TablemanagementImpl extends AbstractBeanMapperSupport implements Tablemanagement {
+
+  private UcFindTable ucFindTable;
+
+  private UcManageTable ucManageTable;
+
+  /**
+   * The constructor.
+   */
+  public TablemanagementImpl() {
+
+    super();
+  }
+
+  /**
+   * Sets the field 'ucFindTable'.
+   *
+   * @param ucFindTable New value for ucFindTable
+   */
+  @Inject
+  @UseCase
+  public void setUcFindTable(UcFindTable ucFindTable) {
+
+    this.ucFindTable = ucFindTable;
+  }
+
+  /**
+   * Sets the field 'ucManageTable'.
+   *
+   * @param ucManageTable New value for ucManageTable
+   */
+  @Inject
+  @UseCase
+  public void setUcManageTable(UcManageTable ucManageTable) {
+
+    this.ucManageTable = ucManageTable;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public TableEto findTable(Long id) {
+
+    return this.ucFindTable.findTable(id);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<tableeto> findAllTables() {
+
+    return this.ucFindTable.findAllTables();
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<tableeto> findAllTables() {
+
+    return this.ucFindTable.findAllTables();
+  }
+
+  /**
+   * {@inheritDoc}
+   *
+   */
+  @Override
+  public TableEto saveTable(TableEto table) {
+
+    return this.ucManageTable.saveTable(table);
+  }
+
+  /**
+   * {@inheritDoc}
+   *
+   */
+  @Override
+  public void deleteTable(Long id) {
+
+    this.ucManageTable.deleteTable(id);
+  }
+
+}
+
+
+
+

This code shows that is merely a delegation for the injected UC and can be automatically generated with eclipse’s powerful refactoring capabilities.

+
+
+
+
+

Securing the application

+
+
+

devonfw focus on role-based authorization to cope with authorization for executing use case of an application. devonfw use the JSR250 annotations, mainly @RolesAllowed, as you have seen, for authorizing method calls against the permissions defined in the annotation body. +So, finally, we have to create a class to declare the RollesAllowed annotation value as constants:

+
+
+
+
/**
+ * Contains constants for the keys of all
+ * {@link com.devonfw.module.security.common.api.accesscontrol.AccessControlPermission}s.
+ *
+ */
+public abstract class PermissionConstants {
+
+  /** {@link com.devonfw.module.security.common.api.accesscontrol.AccessControlPermission} to retrieve table. */
+  public static final String FIND_TABLE = "FindTable";
+
+  /** {@link com.devonfw.module.security.common.api.accesscontrol.AccessControlPermission} to save table. */
+  public static final String SAVE_TABLE = "SaveTable";
+
+  /** {@link com.devonfw.module.security.common.api.accesscontrol.AccessControlPermission} to remove table. */
+  public static final String DELETE_TABLE = "DeleteTable";
+}
+
+
+
+

Creating REST endpoints

+
+

Web applications need to get data from the server, so we have to expose the methods defined in the logic layer to this applications. We need a class that exposes methods as URLs to allow to the applications get the data. By convention, we call this class [XXX]managementRestServiceImpl where [XXX] will be the name of the entity.

+
+
+

This is an example of a REST API for our Table use case using JAX-RS. devonfw recommends to use CXF as the implementation for JAX-RS but other libraries following the standard will perform equally.

+
+
+

Also note that the implementation does not follow the canonical RESTFUL approach as devonfw proposes a more pragmatic way to use REST. Please refer to the Platform Guide service layer chapter for more information on the subject.

+
+
+
TablemanagementRestServiceImpl.java
+
+
import com.devonfw.application.mtsj.bookingmanagement.common.api.Table;
+// import io.oasp.gastronomy.restaurant.tablemanagement.logic.api.Tablemanagement;
+import com.devonfw.application.mtsj.bookingmanagement.logic.api.to.TableEto;
+import com.devonfw.application.mtsj.general.logic.api.usecase.UcFindTable;
+import com.devonfw.application.mtsj.bookingmanagement.logic.api.usecase.UcManageTable;
+
+import java.util.List;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+import javax.ws.rs.BadRequestException;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.NotFoundException;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+
+import net.sf.mmm.util.exception.api.ObjectNotFoundUserException;
+
+import org.springframework.transaction.annotation.Transactional;
+
+/**
+ * The service class for REST calls in order to execute the methods in {@link Tablemanagement}.
+ */
+@Path("/tablemanagement/v1")
+@Named("TablemanagementRestService")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+@Transactional
+public class TablemanagementRestServiceImpl {
+
+  private Tablemanagement tableManagement;
+
+  /**
+   * This method sets the field <tt>tableManagement</tt>.
+   *
+   * @param tableManagement the new value of the field tableManagement
+   */
+  @Inject
+  public void setTableManagement(Tablemanagement tableManagement) {
+
+    this.tableManagement = tableManagement;
+  }
+
+  /**
+   * Delegates to {@link UcFindTable#findTable}.
+   *
+   * @param id the ID of the {@link TableEto}
+   * @return the {@link TableEto}
+   */
+  @GET
+  @Path("/table/{id}/")
+  public TableEto getTable(@PathParam("id") String id) {
+
+    Long idAsLong;
+    if (id == null) {
+      throw new BadRequestException("missing id");
+    }
+    try {
+      idAsLong = Long.parseLong(id);
+    } catch (NumberFormatException e) {
+      throw new BadRequestException("id is not a number");
+    } catch (NotFoundException e) {
+      throw new BadRequestException("table not found");
+    }
+    return this.tableManagement.findTable(idAsLong);
+  }
+
+  /**
+   * Delegates to {@link UcFindTable#findAllTables}.
+   *
+   * @return list of all existing restaurant {@link TableEto}s
+   */
+  @GET
+  @Path("/table/")
+  public List<tableeto> getAllTables() {
+
+    List<tableeto> allTables = this.tableManagement.findAllTables();
+    return allTables;
+  }
+
+  /**
+   * Delegates to {@link UcFindTable#findFreeTables}.
+   *
+   * @return list of all existing free {@link TableEto}s
+   */
+  @GET
+  @Path("/freetables/")
+  public List<tableeto> getFreeTables() {
+
+    return this.tableManagement.findFreeTables();
+  }
+
+  /**
+   * Delegates to {@link UcManageTable#saveTable}.
+   *
+   * @param table the {@link TableEto} to be created
+   * @return the recently created {@link TableEto}
+   */
+  @POST
+  @Path("/table/")
+  public TableEto saveTable(TableEto table) {
+
+    return this.tableManagement.saveTable(table);
+  }
+
+  /**
+   * Delegates to {@link UcManageTable#deleteTable}.
+   *
+   * @param id ID of the {@link TableEto} to be deleted
+   */
+  @DELETE
+  @Path("/table/{id}/")
+  public void deleteTable(@PathParam("id") Long id) {
+
+    this.tableManagement.deleteTable(id);
+  }
+}
+
+
+
+

Is important to mention:

+
+
+
    +
  • +

    We send and receive the information in JSON format.

    +
  • +
  • +

    We specify the version of the entire API or every method.

    +
  • +
+
+
+

Finally, we need to add this implementation into JAX-RS server bean definition:

+
+
+
+
  <jaxrs:server id="CxfRestServices" address="/rest">
+    <jaxrs:providers>
+      <bean class="com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider">
+      <property name="mapper">
+        <ref bean="JacksonObjectMapper">
+      </ref></property>
+      </bean>
+      <ref bean="RestServiceExceptionFacade">
+    </ref></jaxrs:providers>
+    <jaxrs:servicebeans>
+      <ref bean="TablemanagementRestService">
+      <ref bean="SecurityRestService">
+    </ref></ref></jaxrs:servicebeans>
+  </jaxrs:server>
+
+
+
+

As you can see, we have defined the REST URLs for our Table user case. Now, for example, you can find all tables on this URL:

+
+
+
+
http://server:port/application-name/tablemanagement/v1/table/
+
+
+
+

DTO conversion

+
+

In the logic API, the methods of the classes should return Data Transfer Object (DTO) instead of entities. So, in devonfw we have a mechanism to convert the entities into DTOs.

+
+
+

This is an example of how to convert a entity into a DTO:

+
+
+
+
    // Conversion for lists
+    getBeanMapper().mapList(tableList, TableDto.class);
+
+    // Conversion for objects
+    getBeanMapper().map(table, TableDto.class);
+
+
+
+

In the example, we use the function getBeanMapper(). This function provides us an API to convert entities into DTOs. In the logic layer, we only have to extend the class AbstractUc to access to this functionality.

+
+
+
+

Exceptions

+
+
User exceptions
+ +
+
+
Non controlled exceptions
+ +
+
+
+
+
+
+

Internationalization

+
+
+

Pagination

+ +
+
+

Sorting

+
+
+
/**
+ * This enum identifies the entity, on which the sorting should be executed.
+ *
+ */
+public enum TableSortByHitEntry {
+
+  /**
+   * Sort by id.
+   */
+  ID("id"),
+  /**
+   * Sort by number.
+   */
+  NUMBER("number"),
+  /**
+   * Sort by state.
+   */
+  STATE("state"),
+  /**
+   * Sort by waiterId.
+   */
+  WAITERID("waiterId");
+
+  private final String sortByAttributeName;
+
+  private TableSortByHitEntry(String sortByAttributeName) {
+
+    this.sortByAttributeName = sortByAttributeName;
+  }
+
+  /**
+   * @return sortByAttributeName
+   */
+  public String getSortByAttributeName() {
+
+    return this.sortByAttributeName;
+  }
+
+  /**
+   * This method returns an {@link TableSortByHitEntry} for a given {@link #getSortByAttributeName() attribute name}.
+   *
+   * @param sortByAttributeName the name.
+   * @return an {@link TableSortByHitEntry}
+   */
+  public static TableSortByHitEntry getEntryForAttributeName(String sortByAttributeName) {
+
+    for (TableSortByHitEntry entry : TableSortByHitEntry.values()) {
+      if (entry.sortByAttributeName.equals(sortByAttributeName)) {
+        return entry;
+      }
+    }
+
+    return null;
+  }
+}
+
+
+
+
+
// import io.oasp.gastronomy.restaurant.general.common.api.datatype.OrderBy;
+// import io.oasp.gastronomy.restaurant.tablemanagement.common.api.datatype.TableSortByHitEntry;
+
+/**
+ * Table sortBy class
+ */
+public class TableSortBy {
+
+  private TableSortByHitEntry sortByEntry;
+
+  private OrderBy orderBy;
+
+  /**
+   * The constructor.
+   */
+  public TableSortBy() {
+
+    this.sortByEntry = TableSortByHitEntry.ID;
+    this.orderBy = OrderBy.ASC;
+  }
+
+  /**
+   * Returns the field 'sortByEntry'.
+   *
+   * @return Value of sortByEntry
+   */
+  public TableSortByHitEntry getSortByEntry() {
+
+    return this.sortByEntry;
+  }
+
+  /**
+   * Sets the field 'sortByEntry'.
+   *
+   * @param sortByEntry New value for sortByEntry
+   */
+  public void setSortByEntry(TableSortByHitEntry sortByEntry) {
+
+    this.sortByEntry = sortByEntry;
+  }
+
+  /**
+   * Returns the field 'orderBy'.
+   *
+   * @return Value of orderBy
+   */
+  public OrderBy getOrderBy() {
+
+    return this.orderBy;
+  }
+
+  /**
+   * Sets the field 'orderBy'.
+   *
+   * @param orderBy New value for orderBy
+   */
+  public void setOrderBy(OrderBy orderBy) {
+
+    this.orderBy = orderBy;
+  }
+
+}
+
+
+
+
+

Testing endpoints

+
+

SOAPUI, JUnit?

+
+
+
+

Creating Web Services

+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-eclipse.html b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-eclipse.html new file mode 100644 index 00000000..7ec6a2db --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-eclipse.html @@ -0,0 +1,272 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Working with eclipse

+
+
+

TODO: highlight how to edit code from eclipse, run the server, etc…​

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-environment.html b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-environment.html new file mode 100644 index 00000000..420f700e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-environment.html @@ -0,0 +1,282 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Preparing the environment

+
+
+

Pre-requisites

+
+
+

Internet connection, filesystem access, git, proxy, java and maven versions, etc…​

+
+
+
+
+

Prepare the workspace

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-introduction.html b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-introduction.html new file mode 100644 index 00000000..541b4494 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-introduction.html @@ -0,0 +1,284 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Introduction

+
+
+

This is an step by step tutorial for starting an devonfw server application from setting up the environment to packaging for production.

+
+
+

The tutorial starts by setting up the programmer environment with the aid of the devon-ide project and verifies everything is correct by running the my-thai-start restaurant sample application of the devonfw project.

+
+
+

Afterwards a new blank application is created by using the provided archetypes and all generated files are reviewed to explain what devonfw is providing.

+
+
+

A classical CRUD use case is developed for creating, retrieving updating and deleting an entity. With this entity we introduce cross cutting concerns such as exception handling, validation and securing the access from the web.

+
+
+

Finally the sample will be ready for deployment to a web server so we will package it on a WAR (or EAR) file.

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-monitoring.html b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-monitoring.html new file mode 100644 index 00000000..66d5e7e1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-monitoring.html @@ -0,0 +1,272 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Monitoring

+
+
+

Explain how to use Logging (refer to the plattform guide)

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-newapp.html b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-newapp.html new file mode 100644 index 00000000..21d73d2b --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-newapp.html @@ -0,0 +1,457 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Creating a new application

+
+
+

Running the archetype

+
+
+

In order to create a new application you must use the archetype provided by devon4j which uses the maven archetype functionality.

+
+
+

To create a new application, you should have installed devonfw IDE. Follow the devon ide documentation to install +the same. +You can choose between 2 alternatives, create it from command line or, in more visual manner, within eclipse.

+
+
+

From command Line

+
+

To create a new devon4j application from command line, you can simply run the following command:

+
+
+
+
devon java create com.example.application.sampleapp
+
+
+
+

For low-level creation you can also manually call this command:

+
+
+
+
mvn -DarchetypeVersion=${devon4j.version} -DarchetypeGroupId=com.devonfw.java.templates -DarchetypeArtifactId=devon4j-template-server archetype:generate -DgroupId=com.example.application -DartifactId=sampleapp -Dversion=1.0.0-SNAPSHOT -Dpackage=com.devonfw.application.sampleapp
+
+
+
+

Attention: The archetypeVersion (first argument) should be set to the latest version of devon4j. You can easily determine the version from this badge: +latest devon4j version

+
+
+

Further providing additional properties (using -D parameter) you can customize the generated app:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 1. Options for app template
propertycommentexample

dbType

Choose the type of RDBMS to use (hana, oracle, mssql, postgresql, mariadb, mysql, etc.)

-DdbTpye=postgresql

batch

Option to add an batch module

-Dbatch=batch

+
+
+

From Eclipse

+
+
+
After that, you should follow this Eclipse steps to create your application:
+
+
+
+
    +
  • +

    Create a new Maven Project.

    +
  • +
  • +

    Choose the devon4j-template-server archetype, just like the image.

    +
  • +
+
+
+
+Select archetype +
+
+
+
    +
  • +

    Fill the Group Id, Artifact Id, Version and Package for your project.

    +
  • +
+
+
+
+Configure archetype +
+
+
+
    +
  • +

    Finish the Eclipse assistant and you are ready to start your project.

    +
  • +
+
+
+
+
+
+

What is generated

+
+
+

The application template (archetype) generates a Maven multi-module project. It has the following modules:

+
+
+
    +
  • +

    api: module with the API (REST service interfaces, transferobjects, datatypes, etc.) to be imported by other apps as a maven dependency in order to invoke and consume the offered (micro)services.

    +
  • +
  • +

    core: maven module containing the core of the application.

    +
  • +
  • +

    batch: optional module for batch(es)

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) as a WAR file.

    +
  • +
+
+
+

The toplevel pom.xml of the generated project has the following features:

+
+
+
    +
  • +

    Properties definition: Spring-boot version, Java version, etc.

    +
  • +
  • +

    Modules definition for the modules (described above)

    +
  • +
  • +

    Dependency management: define versions for dependencies of the technology stack that are recommended and work together in a compatible way.

    +
  • +
  • +

    Maven plugins with desired versions and configuration

    +
  • +
  • +

    Profiles for test stages

    +
  • +
+
+
+
+
+

How to run your app

+
+
+

Run app from IDE

+
+

To run your application from your favourite IDE, simply launch SpringBootApp as java application.

+
+
+
+

Run app as bootified jar or war

+
+

More details are available here.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-packaging.html b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-packaging.html new file mode 100644 index 00000000..0a907fc5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-packaging.html @@ -0,0 +1,307 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==Packaging the application

+
+
+

The application packaging is based on maven package, so you must execute the command

+
+
+
+
mvn package
+
+
+
+

Based on which choose you done on archetype execution, the project will create war or ear packaging in the apropiate project.

+
+
+

Packaging as WAR file

+
+
+

The war packaging is the default packaging output and all devonfw projects create this packaging.

+
+
+

The war packaging will be created in the devonfw server project (called ${artifactId}-server, where ${artifactId} is your maven project artifactId) and will include all web files, including client ones.

+
+
+

This output allow you to deploy the application in all application servers and servlet containers, but if you use an application server, you would choose ear packaging to allow further configuration and exploits all enterprise advantages.

+
+
+
+
+

Packaging as EAR file

+
+
+

This packaging is the preferred one when you will deploy your application in an application server and you would like to use all power of enterprise applications (optimizing shared libraries packaging, using JCA, JTA transactions supported by server container, EJBs,…​).

+
+
+

To find the ear package you will navigate to the ear project you created with the archetype throw the earProjectName property.

+
+
+

This project will generate an ear containing the war project and you could create specific application.xml file in order to use specific server features (like Weblogic multi version deployment) or will allow you to add more application modules (like another Web modules, EJBs,…​).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-sample.html b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-sample.html new file mode 100644 index 00000000..a5a5dc67 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-sample.html @@ -0,0 +1,322 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

==Sample My-Thai-Star application

+
+
+

The My-Thai-Star application is an example implementation for an devon4j web application. It acts as a demo and also shows various integration aspects.

+
+
+

The application showcases the following aspects of the architecture:

+
+
+
    +
  • +

    Server configuration

    +
  • +
  • +

    Transaction management

    +
  • +
  • +

    Logging usage

    +
  • +
  • +

    Naming conventions

    +
  • +
  • +

    Code organization

    +
  • +
  • +

    Validation

    +
  • +
  • +

    Database access

    +
  • +
  • +

    Logic layer implementation patterns

    +
  • +
+
+
+

Logic Layer

+
+
+

The restaurant application showcases two approaches for the logic layer organization. For simple cases the common interface/implementation pattern is used but for more complex logic situations this pattern lacks clarity and can be improved by using the UseCase pattern.

+
+
+

This UseCase pattern divides the Business Facade into several fine grained sub-interfaces and implementations for better maintainability and testability. This pattern has been proven successful in several large scale projects with a big team of developers involved.

+
+
+

Bear in mind that on a real engagement it should be better to opt for one single pattern to code all the appliction

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-security.html b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-security.html new file mode 100644 index 00000000..1833f333 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4j.wiki/tutorial-security.html @@ -0,0 +1,375 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

==Securing the application

+
+
+

CORS filter

+
+
+

Cross-origin resource sharing (CORS) is a mechanism that allows restricted resources on a web page to be requested from another domain outside the domain from which the resource originated.

+
+
+

AJAX (XMLHttpRequest) requests have been limited to accessing the same domain as the parent web page (as per the same-origin security policy), so "Cross-domain" AJAX requests are forbidden by default because of their ability to perform advanced requests that introduce many security issues.

+
+
+

So to manage and solve that in devonfw…​ TODO

+
+
+
+
+

CSRF filter

+
+
+

Cross-Site Request Forgery (CSRF) is an attack that forces an end user to execute unwanted actions on a web application in which they’re currently authenticated.

+
+
+

In OWASP (Open Web Application Security Project) they talk about this vulnerability and they have written a guide to prevent CSRF attacks (CSRF Prevention).

+
+
+

devonfw uses the synchronizer token pattern to avoid this problem. This solution is to ensure that each request requires, in addition to our session cookie, a randomly generated token as an HTTP parameter. When a request is submitted, the server must look up the expected value for the parameter and compare it against the actual value in the request. If the values do not match, the request should fail.

+
+
+

devonfw has extended the Csrf Spring filter and has applied it to REST request, by devonfw convention, the request to the path /services/rest/**. +This filter is active by default, but it can be disabled changing the value of the system property CsrfDisabled.

+
+
+

devonfw also provides a REST service that allow to retrieve the CSRF token in the URL: services/rest/security/v1/csrftoken/

+
+
+

At this point we have resolved the issue in the server side but we have to manage the token in the client side. This is responsability for the client side developers so we should retrive the CSRF token after the login and then, we should send the token in every request to the server.

+
+
+
+
+

Securing methods

+
+
+

devonfw focus on role-based authorization to cope with authorization for executing use case of an application. devonfw use the JSR250 annotations, mainly @RolesAllowed, for authorizing method calls against the permissions defined in the annotation body. This has to be done for each use-case method in logic layer +This is an example of how to annotate the methods with RolesAllowed:

+
+
+
+
public class UcFindTableImpl extends AbstractTableUc implements UcFindTable {
+
+  private static final Logger LOG = LoggerFactory.getLogger(UcFindTableImpl.class);
+
+  @Override
+  @RolesAllowed(PermissionConstants.FIND_TABLE)
+  public TableEto findTable(Long id) {
+    ...
+  }
+
+}
+
+public class UcManageTableImpl extends AbstractTableUc implements UcManageTable {
+
+  @Override
+  @RolesAllowed(PermissionConstants.DELETE_TABLE)
+  public void deleteTable(Long tableId) {
+    ...
+  }
+
+  @Override
+  @RolesAllowed(PermissionConstants.SAVE_TABLE)
+  public TableEto saveTable(@Valid TableEto table) {
+     ...
+  }
+}
+
+
+
+

We have defined the value of the annotation RolesAllowed as constants, so we need to create a constant class for this purpose. Continuing with the example, that is our constant class:

+
+
+
+
/**
+ * Contains constants for the keys of all {@link AccessControlPermission}s.
+ */
+public abstract class PermissionConstants {
+
+  /** {@link AccessControlPermission} to retrieve table. */
+  public static final String FIND_TABLE = "FindTable";
+
+  /** {@link AccessControlPermission} to save table. */
+  public static final String SAVE_TABLE = "SaveTable";
+
+  /** {@link AccessControlPermission} to remove table. */
+  public static final String DELETE_TABLE = "DeleteTable";
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/architecture_guide.html b/docs/devonfw.github.io/1.0/devon4net.wiki/architecture_guide.html new file mode 100644 index 00000000..3e8bc160 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/architecture_guide.html @@ -0,0 +1,767 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Introduction

+
+
+

The devonfw platform provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+
+
+

Overview Onion Design

+
+
+

This guide shows the overall proposed architecture in terms of separated layers making use the Onion architecture pattern. Each layers represents a logical group of components and functionality. In this guide you will learn the basics of the proposed architecture based in layers in order to develop software making use of the best practices.

+
+
+
+
+

Layer specification

+
+
+
+
+

It is important to understand the distinction between layers and tiers. Layers describe the logical groupings of the functionality and components in an application; whereas tiers describe the physical distribution of the functionality and components on separate servers, computers, networks, or remote locations. Although both layers and tiers use the same set of names (presentation, business, services, and data), remember that only tiers imply a physical separation. It is quite common to locate more than one layer on the same physical machine (the same tier). You can think of the term tier as referring to physical distribution patterns such as two-tier, three-tier, and n-tier.

+
+
+
+— Layered Application Guidelines
+MSDN Microsoft +
+
+
+

The proposed architecture makes use of cooperating components called layers. To develop specific functionality each layer contains a set of components which is capable to develop such functionalities.

+
+
+

The next figure represents the different layers:

+
+
+
+technical architecture +
+
Figure 1. High level architecture representation
+
+
+

The layers are separated in physical tiers making use of interfaces. This pattern makes possible to be flexible in different kind of projects maximizing performance and deployment strategies (synchronous/asynchronous access, security, component deployment in different environments, microservices…​). Another important point is to provide automated unit testing or test-driven development (TDD) facilities.

+
+
+
+
+

== Application layer

+
+
+

The Application Layer encapsulates the different .Net projects and its resource dependencies and manages the user interaction depending on the project’s nature.

+
+
+
+technical architecture +
+
Figure 2. Net application stack
+
+
+

The provided application template implements an dotnet API application. Also integrates by default the Swagger client. This provides the possibility to share the contract with external applications (angular, mobile apps, external services…​).

+
+
+
+
+

== Business layer

+
+
+

The business layer implements the core functionality of the application and encapsulates the component’s logic. +This layer provides the interface between the data transformation and the application exposition. This allow the data to be optimized and ready for different data consumers.

+
+
+

This layer may implement for each main entity the API controller, the entity related service and other classes to support the application logic.

+
+
+

In order to implement the service logic, the services class must follow the next specification:

+
+
+
+
    public class Service<TContext> : IService where TContext: DbContext
+
+
+
+

PE: devon4Net API template shows how to implement the TODOs service as follows:

+
+
+
+
    public class TodoService: Service<TodoContext>, ITodoService
+
+
+
+

Where Service is the base service class to be inherited and have full access for the Unit of work, TodoContext is the TODOs database context and ITodoService is the interface of the service, which exposes the public extended methods to be implemented.

+
+
+
+
+

== Data layer

+
+
+

The data layer orchestrates the data obtained between the Domain Layer and the Business Layer. Also transforms the data to be used more efficiently between layers.

+
+
+

So, if a service needs the help of another service or repository, the implemented Dependency Injection is the solution to accomplish the task.

+
+
+

The main aim of this layer is to implement the repository for each entity. The repository’s interface is defined in the Domain layer.

+
+
+

In order to implement the repository logic, the repository class must follow the next specification:

+
+
+
+
    Repository<T> : IRepository<T> where T : class
+
+
+
+

PE: devon4Net API template shows how to implement the TODOs repository as follows:

+
+
+
+
    public class TodoRepository : Repository<Todos>, ITodoRepository
+
+
+
+

Where Repository is the the base repository class to be inherited and have full access for the basic CRUD operations, Todos is the entity defined in the database context. ITodoRepository is the interface of the repository, which exposes the public extended methods to be implemented.

+
+
+ + + + + +
+ + +Please remember that <T> is the mapped class which reference the entity from the database context. This abstraction allows to write services implementation with different database contexts +
+
+
+
+
+

== Domain layer

+
+
+

The domain layer provides access to data directly exposed from other systems. The main source is used to be a data base system. The provided template makes use of Entity Framework solution from Microsoft in order to achieve this functionality.

+
+
+

To make a good use of this technology, Repository Pattern has been implemented with the help of Unit Of Work pattern. Also, the use of generic types are makes this solution to be the most flexible.

+
+
+

Regarding to data base source, each entity is mapped as a class. Repository pattern allows to use this mapped classes to access the data base via Entity framework:

+
+
+
+
 public class UnitOfWork<TContext> : IUnitOfWork<TContext> where TContext : DbContext
+
+
+
+ + + + + +
+ + +Where <T> is the mapped class which reference the entity from the database. +
+
+
+

The repository and unit of work patterns are create an abstraction layer between the data access layer and the business logic layer of an application.

+
+
+ + + + + +
+ + +Domain Layer has no dependencies with other layers. It contains the Entities, datasources and the Repository Interfaces. +
+
+
+
+
+

devon4Net architecture layer implementation

+
+
+

The next picture shows how the devon4Net API template implements the architecture described in previous points:

+
+
+
+devon4Net api template architecture implementation +
+
Figure 3. devon4Net architecture implementations
+
+
+
+
+

== Cross-Cutting concerns

+
+
+

Cross-cutting provides the implementation functionality that spans layers. Each functionality is implemented through components able to work stand alone. This approach provides better reusability and maintainability.

+
+
+

A common component set of cross cutting components include different types of functionality regarding to authentication, authorization, security, caching, configuration, logging, and communication.

+
+
+
+
+

Communication between Layers: Interfaces

+
+
+

The main target of the use of interfaces is to loose coupling between layers and minimize dependencies.

+
+
+

Public interfaces allow to hide implementation details of the components within the layers making use of dependency inversion.

+
+
+

In order to make this possible, we make use of Dependency Injection Pattern (implementation of dependency inversion) given by default in .Net Core.

+
+
+

The provided Data Layer contains the abstract classes to inherit from. All new repository and service classes must inherit from them, also the must implement their own interfaces.

+
+
+
+technical architecture +
+
Figure 4. Architecture representation in deep
+
+
+
+
+

Templates

+
+ +
+
+
+

State of the art

+
+
+

The provided bundle contains the devon4Net API template based on .net core. The template allows to create a microservice solution with minimal configuration.

+
+
+

Also, the devon4Net framework can be added to third party templates such as the Amazon API template to use lambdas in serverless environments.

+
+
+

Included features:

+
+
+
    +
  • +

    Logging:

    +
  • +
  • +

    Text File

    +
  • +
  • +

    Sqlite database support

    +
  • +
  • +

    Serilog Seq Server support

    +
  • +
  • +

    Graylog integration ready through TCP/UDP/HTTP protocols

    +
  • +
  • +

    API Call params interception (simple and compose objects)

    +
  • +
  • +

    API error exception management

    +
  • +
  • +

    Swagger:

    +
  • +
  • +

    Swagger autogenerating client from comments and annotations on controller classes

    +
  • +
  • +

    Full swagger client customization (Version, Title, Description, Terms, License, Json end point definition)

    +
  • +
  • +

    Easy configuration with just one configuration node in your settings file

    +
  • +
  • +

    JWT:

    +
  • +
  • +

    Issuer, audience, token expiration customization by external file configuration

    +
  • +
  • +

    Token generation via certificate

    +
  • +
  • +

    MVC inherited classes to access JWT user properties

    +
  • +
  • +

    API method security access based on JWT Claims

    +
  • +
  • +

    CORS:

    +
  • +
  • +

    Simple CORS definition ready

    +
  • +
  • +

    Multiple CORS domain origin definition with specific headers and verbs

    +
  • +
  • +

    Headers:

    +
  • +
  • +

    Automatic header injection with middleware.

    +
  • +
  • +

    Supported header definitions: AccessControlExposeHeader, StrictTransportSecurityHeader, XFrameOptionsHeader, XssProtectionHeader, XContentTypeOptionsHeader, ContentSecurityPolicyHeader, PermittedCrossDomainPoliciesHeader, ReferrerPolicyHeader

    +
  • +
  • +

    Reporting server:

    +
  • +
  • +

    Partial implementation of reporting server based on My-FyiReporting (now runs on linux container)

    +
  • +
  • +

    Testing:

    +
  • +
  • +

    Integration test template with sqlite support

    +
  • +
  • +

    Unit test template

    +
  • +
  • +

    Moq, xunit frameworks integrated

    +
  • +
  • +

    Circuit breaker:

    +
  • +
  • +

    Integrated with HttpClient factory

    +
  • +
  • +

    Client Certificate customization

    +
  • +
  • +

    Number of retries customizables

    +
  • +
  • +

    LiteDB:

    +
  • +
  • +

    Support for LiteDB

    +
  • +
  • +

    Provided basic repository for CRUD operations

    +
  • +
  • +

    RabbitMq:

    +
  • +
  • +

    Use of EasyQNet library to perform CQRS main functions between different microservices

    +
  • +
  • +

    Send commands / Subscribe queues with one C# sentence

    +
  • +
  • +

    Events management: Handled received commands to subscribed messages

    +
  • +
  • +

    Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    MediatR:

    +
  • +
  • +

    Use of MediatR library to perform CQRS main functions in memory

    +
  • +
  • +

    Send commands / Subscribe queues with one C# sentence

    +
  • +
  • +

    Events management: Handled received commands to subscribed messages

    +
  • +
  • +

    Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    SmaxHcm:

    +
  • +
  • +

    Component to manage Microfocus SMAX for cloud infrastructure services management

    +
  • +
  • +

    CyberArk:

    +
  • +
  • +

    Manage safe credentials with CyberArk

    +
  • +
  • +

    AnsibleTower:

    +
  • +
  • +

    Ansible automates the cloud infrastructure. devon4net integrates with Ansible Tower via API consumption endpoints

    +
  • +
  • +

    gRPC+Protobuf:

    +
  • +
  • +

    Added Client + Server basic templates sample gRPC with Google’s Protobuf protocol using devon4net

    +
  • +
  • +

    Kafka:

    +
  • +
  • +

    Added Apache Kafka support for deliver/consume messages and create/delete topics as well

    +
  • +
+
+
+
+
+

Software stack

+
+
+
Technology Stack of devon4Net
+

|== == == == == == == == == == == = +|Topic|Detail|Implementation +|runtime|language & VM|.Net Core Version 3.0 +|persistence|OR-mapper| Entity Framework Core +|service|REST services|https://www.asp.net/web-api[Web API] +|service - integration to external systems - optional|SOAP services|https://msdn.microsoft.com/en-us/library/dd456779(v=vs.110).aspx[WCF] +|logging|framework|https://github.com/serilog/serilog-extensions-logging[Serilog] +|validation|framework| NewtonSoft Json, DataAnnotations +|component management|dependency injection| Unity +|security|Authentication & Authorization| JWT .Net Security - Token based, local Authentication Provider +|unit tests|framework|https://github.com/xunit/xunit[xUnit] +|Circuit breaker|framework, allows retry pattern on http calls|https://github.com/App-vNext/Polly[Polly] +|CQRS|Memory events and queue events| MediatR - EasyNetQ - Kafka +|Kafka| Kafka support for enterprise applications| Confluent.Kafka +|Fluent Validation| Fluent validation for class instances|https://fluentvalidation.net/[Fluent validation] +|== == == == == == == == == == == =

+
+
+
+
+

Target platforms

+
+
+

Thanks to the new .Net Core platform from Microsoft, the developed software can be published Windows, Linux, OS, X and Android platforms.

+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/cobiGen.html b/docs/devonfw.github.io/1.0/devon4net.wiki/cobiGen.html new file mode 100644 index 00000000..9263ce26 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/cobiGen.html @@ -0,0 +1,534 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4net Cobigen Guide

+
+ +
+
+
+

Overview

+
+
+

In this guide we will explain how to generate a new WebApi project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+
+

Getting things ready

+
+ +
+
+
+

devonfw Distribution

+
+
+

The devonfw distributions can be obtained from the TeamForge releases library and are packaged in zips files that include all the needed tools, software and configurations.

+
+
+

It is not necessary to install nor configure anything. Just extracting the zip content is enough to have a fully functional devonfw. The only thing you have to do is run create-or-update-workspace.bat and then update-all-workspaces.bat to set up all the needed tools.

+
+
+
+
+

devon4net Templates

+
+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
dotnet new -i `Devon4Net`.WebAPI.Template
+
+
+
+

and then:

+
+
+
+
dotnet new Devon4NetAPI
+
+
+
+
+
+

OpenAPI File

+
+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to `CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+
+

Generating files

+
+
+

Cobigen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the Cobigen CLI tool.

+
+
+
+
+

Generating files through Eclipse

+
+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+cobigen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+cobigen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+cobigen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+cobigen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+
+

Generating files through Cobigen CLI

+
+
+

In order to generate the files using the Cobigen CLI it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    cobigen generate {yourOpenAPIFile}.yml
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+
+

Configuration

+
+ +
+
+
+

Dependency Injection configuration

+
+
+

At this point it is needed to make some modifications in the code in order to configure correctly the server. To do so it is needed to locate the services and the repositories files that were created in Devon4Net.WebAPI.Implementation

+
+
+

Services location:

+
+
+
+cobigen +
+
+
+

Repositories location:

+
+
+
+cobigen +
+
+
+

Now, we are going to open the following file Devon4Net.WebAPI.Implementation\Configure\DevonConfiguration.cs. +In there we have to add the Dependency Injection for the services and the repositories that Cobigen has generated. The following image is an example of what is needed to add.

+
+
+
+cobigen +
+
+
+

Moreover it is needed to remove the last line in order to be able to run the application:

+
+
+
+
`throw new NotImplementedException(...);`
+
+
+
+
+
+

Configure data base

+
+
+

Cobigen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+cobigen +
+
+
+
+
+

Configure services

+
+
+

In order to finish the configuration of the services it is needed to go to each service file of the managements generated.

+
+
+

In there we will see some "NotImplementedExceptions", so it is needed to read carefully each comment inside of each exception in order to be able to use the service. It can be shown an example of the service with its NotImplementedExceptions comments:

+
+
+
+cobigen +
+
+
+
+
+

Run the application

+
+
+

After doing all the steps defined above, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
dotnet run
+
+
+
+

This will deploy our application in our localhost with the port 8081, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/codeconvention.html b/docs/devonfw.github.io/1.0/devon4net.wiki/codeconvention.html new file mode 100644 index 00000000..ffd9e6c5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/codeconvention.html @@ -0,0 +1,616 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Code conventions

+
+
+

Introduction

+
+
+

This document covers .NET Coding Standards and is recommended to be read by team leaders/sw architects and developing teams operating in the Microsoft .NET environment.

+
+
+

“All the code in the system looks as if it was written by a single – very competent – individual” (K. Beck)

+
+
+
+
+

Capitalization Conventions

+
+
+

Terminology

+
+

Camel Case (camelCase)

+
+

Each word or abbreviation in the middle of the phrase begins with a capital letter, with no intervening spaces or punctuation.

+
+
+

The camel case convention, used only for parameter names, capitalizes the first character of each word except the first word, as shown in the following examples. As the example also shows, two-letter acronyms that begin a camel-cased identifier are both lowercase.

+
+
+

use camelCasing for parameter names.

+
+
+
+

Pascal Case (PascalCase)

+
+

The first letter of each concatenated word is capitalized. No other characters are used to separate the words, like hyphens or underscores.

+
+
+

The PascalCasing convention, used for all identifiers except parameter names, capitalizes the first character of each word (including acronyms over two letters in length).

+
+
+

use PascalCasing for all public member, type, and namespace names consisting of multiple words.

+
+
+
+

Underscore Prefix (_underScore)

+
+

For underscore ( _ ), the word after _ use camelCase terminology.

+
+
+
+
+
+
+

General Naming Conventions

+
+
+

choose easily readable identifier names.

+
+
+

favor readability over brevity.

+
+
+
+
◦ e.g.: `GetLength` is a better name than GetInt.
+◦ Aim for the “ubiquitous language” (E. Evans): A language distilled from the domain language, which helps the team clarifying domain concepts and communicating with domain experts.
+
+
+
+

prefer adding a suffix rather than a prefix to indicate a new version of an existing API.

+
+
+

use a numeric suffix to indicate a new version of an existing API, particularly if the existing name of the API is the only name that makes sense (i.e., if it is an industry standard) and if adding any meaningful suffix (or changing the name) is not an appropriate option.

+
+
+

do not use underscores, hyphens, or any other non-alphanumeric characters.

+
+
+

do not use Hungarian notation.

+
+
+

avoid using identifiers that conflict with keywords of widely used programming languages.

+
+
+

do not use abbreviations or contractions as part of identifier names.

+
+
+

do not use any acronyms that are not widely accepted, and even if they are, only when necessary.

+
+
+

do not use the "Ex" (or a similar) suffix for an identifier to distinguish it from an earlier version of the same API.

+
+
+

do not use C# reserved words as names.

+
+
+

do not use Hungarian notation. Hungarian notation is the practice of including a prefix in identifiers to encode some metadata about the parameter, such as the data type of the identifier.

+
+
+
+
◦ `e.g.: iNumberOfClients, sClientName`
+
+
+
+
+
+

Names of Assemblies and DLLs

+
+
+

An assembly is the unit of deployment and identity for managed code programs. Although assemblies can span one or more files, typically an assembly maps one-to-one with a` DLL`. Therefore, this section describes only` DLL` naming conventions, which then can be mapped to assembly naming conventions.

+
+
+

choose names for your assembly DLLs that suggest large chunks of functionality, such as System.Data.

+
+
+

Assembly and DLL names don’t have to correspond to namespace names, but it is reasonable to follow the namespace name when naming assemblies. A good rule of thumb is to name the DLL based on the common prefix of the assemblies contained in the assembly. For example, an assembly with two namespaces, MyCompany.MyTechnology.FirstFeature and MyCompany.MyTechnology.SecondFeature, could be called MyCompany.MyTechnology.dll.

+
+
+

consider naming DLLs according to the following pattern:
+<Company>.<Component>.dll +where <Component> contains one or more dot-separated clauses.

+
+
+

For example: +Litware.Controls.dll.

+
+
+
+
+

General coding style

+
+
+
    +
  • +

    Source files: One Namespace per file and one class per file.

    +
  • +
  • +

    Braces: On new line. Always use braces when optional.

    +
  • +
  • +

    Indention: Use tabs with size of 4.

    +
  • +
  • +

    Comments: Use // for simple comment or /// for summaries. Do not /* … */ and do not flower box.

    +
  • +
  • +

    Use Use built-in C# native data types vs .NET CTS types (string instead of String)

    +
  • +
  • +

    Avoid changing default type in Enums.

    +
  • +
  • +

    Use base or this only in constructors or within an override.

    +
  • +
  • +

    Always check for null before invoking events.

    +
  • +
  • +

    Avoid using Finalize. Use C# Destructors and do not create Finalize() method.

    +
  • +
  • +

    Suggestion: Use blank lines, to make it much more readable by dividing it into small, easy-to-digest sections:

    +
    +
    +
    ◦ Use a single blank line to separate logical groups of code, such as control structures.
    +◦ Use two blank lines to separate method definitions
    +
    +
    +
  • +
+
+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
CaseConvention

Source File

Pascal case. Match class name and file name

Namespace

Pascal case

Class

Pascal case

Interface

Pascal case

Generics

Single capital letter (T or K)

Methods

Pascal case (use a Verb or Verb+Object)

Public field

Pascal case

Private field

Camel case with underscore (_) prefix

Static field

Pascal case

Property

Pascal case. Try to use get and and set convention {get;set;}

Constant

Pascal case

Enum

Pascal case

Variable (inline)

Camel case

Param

Camel case

+
+
+
+

Use of Region guideline

+
+
+

Regions can be used to collapse code inside Visual Studio .NET. Regions are ideal candidates to hide boiler plate style code that adds little value to the reader on your code. Regions can then be expanded to provide progressive disclosure of the underlying details of the class or method.

+
+
+
    +
  • +

    Do Not regionalise entire type definitions that are of an important nature. Types such as enums (which tend to be fairly static in their nature) can be regionalised – their permissible values show up in Intellisense anyway.

    +
  • +
  • +

    Do Not regionalise an entire file. When another developer opens the file, all they will see is a single line in the code editor pane.

    +
  • +
  • +

    Do regionalise boiler plate type code.

    +
  • +
+
+
+
+
+

Use of Comment guideline

+
+
+

Code is the only completely reliable documentation: write “good code” first!

+
+
+

Avoid Unnecessary comments

+
+
    +
  • +

    Choosing good names for fields, methods, parameters, etc. “let the code speak” (K. Beck) by itself reducing the need for comments and documentation

    +
  • +
  • +

    Avoid “repeating the code” and commenting the obvious

    +
  • +
  • +

    Avoid commenting “tricky code”: rewrite it! If there’s no time at present to refactor a tricky section, mark it with a TODO and schedule time to take care of it as soon as possible.

    +
  • +
+
+
+
+

Effective comments

+
+
    +
  • +

    Use comments to summarize a section of code

    +
  • +
  • +

    Use comments to clarify sensitive pieces of code

    +
  • +
  • +

    Use comments to clarify the intent of the code

    +
  • +
  • +

    Bad written or out-of-date comments are more damaging than helpful:

    +
  • +
  • +

    Write clear and effective comments

    +
  • +
  • +

    Pay attention to pre-existing comments when modifying code or copying&pasting code

    +
  • +
+
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/environment.html b/docs/devonfw.github.io/1.0/devon4net.wiki/environment.html new file mode 100644 index 00000000..43984be0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/environment.html @@ -0,0 +1,443 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Environment

+
+ +
+
+
+

Overview

+
+ +
+
+
+

Required software

+ +
+
+

Setting up the environment

+
+
+
    +
  1. +

    Download and install Visual Studio Code

    +
  2. +
  3. +

    Download and install .Net Core SDK

    +
  4. +
  5. +

    Intall the extension Omnisharp in Visual Studio Code

    +
  6. +
+
+
+
+
+

== Hello world

+
+
+
    +
  1. +

    Open a project:

    +
    +
      +
    • +

      Open Visual Studio Code.

      +
    • +
    • +

      Click on the Explorer icon on the left menu and then click Open Folder.

      +
    • +
    • +

      Select the folder you want your C# project to be in and click Select Folder. For our example, we’ll create a folder for our project named 'HelloWorld'.

      +
    • +
    +
    +
  2. +
  3. +

    Initialize a C# project:

    +
    +
      +
    • +

      Open the Integrated Terminal from Visual Studio Code by typing CTRL+(backtick). Alternatively, you can select View > Integrated Terminal from the main menu.

      +
    • +
    • +

      In the terminal window, type dotnet new console.

      +
    • +
    • +

      This creates a Program.cs file in your folder with a simple "Hello World" program already written, along with a C# project file named HelloWorld.csproj.

      +
    • +
    +
    +
  4. +
  5. +

    Resolve the build assets:

    +
    +
      +
    • +

      For .NET Core 2.0, this step is optional. The dotnet restore command executes automatically when a new project is created.

      +
    • +
    +
    +
  6. +
  7. +

    Run the "Hello World" program:

    +
    +
      +
    • +

      Type dotnet run.

      +
    • +
    +
    +
  8. +
+
+
+
+
+

Debug

+
+
+
    +
  1. +

    Open Program.cs by clicking on it. The first time you open a C# file in Visual Studio Code, OmniSharp will load in the editor.

    +
  2. +
  3. +

    Visual Studio Code will prompt you to add the missing assets to build and debug your app. Select Yes.

    +
  4. +
  5. +

    To open the Debug view, click on the Debugging icon on the left side menu.

    +
  6. +
  7. +

    Locate the green arrow at the top of the pane. Make sure the drop-down next to it has .NET Core Launch (console) selected.

    +
  8. +
  9. +

    Add a breakpoint to your project by clicking on the editor margin (the space on the left of the line numbers in the editor).

    +
  10. +
  11. +

    Select F5 or the green arrow to start debugging. The debugger stops execution of your program when it reaches the breakpoint you set in the previous step.

    +
    +
      +
    • +

      While debugging you can view your local variables in the top left pane or use the debug console.

      +
    • +
    +
    +
  12. +
  13. +

    Select the green arrow at the top to continue debugging, or select the red square at the top to stop.

    +
  14. +
+
+
+
+
+

==

+
+
+

For more information and troubleshooting tips on .NET Core debugging with OmniSharp in Visual Studio Code, see Instructions for setting up the .NET Core debugger. +== ==

+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/home.html b/docs/devonfw.github.io/1.0/devon4net.wiki/home.html new file mode 100644 index 00000000..8128d78b --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/home.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

devon4net

+
+
+

This guide describes an application architecture for API development with .net core.

+
+
+
+
+

Motivation

+
+
+

The main challenge we encounter in our projects is to bring junior and senior developers into .net core. +There are a lot of different frameworks and architectures in the market. +The idea is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends. +On the other hand, providing a short onboarding time while still using an architecture that helps us scale and be productive at the same time. +Also, the architecture must be compatible with the market. +Guides, practices and naming found in the web should still be valid (e.g. a stackoverflow article for a given problem).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/howto.html b/docs/devonfw.github.io/1.0/devon4net.wiki/howto.html new file mode 100644 index 00000000..27e569cc --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/howto.html @@ -0,0 +1,1249 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Introduction

+
+
+

The aim of this document is to show how to get devon4net things done in a easy way.

+
+
+
+
+

How to

+
+ +
+
+
+

Start a new devonfw project

+
+
+

The .Net Core 3.1 template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2019.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
+
+

== Using devon4Net template

+
+ +
+
+
+

== Option 1

+
+
+
+
Open your favourite terminal (Win/Linux/iOS)
+Go to future project's path
+Type dotnet new --install Devon4Net.WebAPI.Template
+Type dotnet new Devon4NetAPI
+Go to project's path
+You are ready to start developing with devon4Net
+
+
+
+
+
+

== Option 2

+
+
+
+
Create a new dotnet` API` project from scratch
+Add the NuGet package reference to your project:
+Install-Package Devon4Net.Application.WebAPI.Configuration
+
+
+
+

Set up your project as follows in program.cs file:

+
+
+
+
        public static void Main(string[] args)
+        {
+            // Please use
+            // Devonfw.Configure<Startup>(args);
+            // Or :
+
+            WebHost.CreateDefaultBuilder(args)
+                .UseStartup<Startup>()
+                .InitializeDevonFw()
+                .Build()
+                .Run();
+        }
+
+
+
+

Set up your project as follows in startup.cs file:

+
+
+
+
    private IConfiguration Configuration { get; }
+
+
+   public Startup(IConfiguration configuration)
+    {
+        Configuration = configuration;
+    }
+
+    public void ConfigureServices(IServiceCollection services)
+    {
+
+        services.ConfigureDevonFw(Configuration);
+        SetupDatabase(services);
+
+        ...
+    }
+
+
+    private void SetupDatabase(IServiceCollection services)
+    {
+        // Default is the database connection name in appsettings.json file
+        services.SetupDatabase<TodoContext>(Configuration, "Default", DatabaseType.InMemory);
+    }
+
+    public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
+    {
+        app.ConfigureDevonFw();
+        ...
+    }
+
+
+
+

Add the devonfw configuration options in your appsettings.json file

+
+
+
+
+

devon4net configuration files

+
+
+

To start using devon4net in your .net core application add this configuration in your appsettings.json file:

+
+
+
+
 "devonfw": {
+    "UseDetailedErrorsKey": true,
+    "UseIIS": false,
+    "UseSwagger": true,
+    "Environment": "Development",
+    "KillSwitch": {
+      "killSwitchSettingsFile": "killswitch.appsettings.json"
+    },
+    "Kestrel": {
+      "UseHttps": true,
+      "HttpProtocol": "Http2", //Http1, Http2, Http1AndHttp2, none
+      "ApplicationPort": 8082,
+      "KeepAliveTimeout": 120, //in seconds
+      "MaxConcurrentConnections": 100,
+      "MaxConcurrentUpgradedConnections": 100,
+      "MaxRequestBodySize": 28.6, //In MB. The default maximum request body size is 30,000,000 bytes, which is approximately 28.6 MB
+      "Http2MaxStreamsPerConnection": 100,
+      "Http2InitialConnectionWindowSize": 131072, // From 65,535 and less than 2^31 (2,147,483,648)
+      "Http2InitialStreamWindowSize": 98304, // From 65,535 and less than 2^31 (2,147,483,648)
+      "AllowSynchronousIO": true,
+      "SslProtocol": "Tls12", //Tls, Tls11,Tls12, Tls13, Ssl2, Ssl3, none. For Https2 Tls12 is needed
+      "ServerCertificate": {
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost"
+      },
+      "ClientCertificate": {
+        "DisableClientCertificateCheck": true,
+        "RequireClientCertificate": false,
+        "CheckCertificateRevocation": true,
+        "ClientCertificates": {
+          "Whitelist": [
+            "3A87A49460E8FE0E2A198E63D408DC58435BC501"
+          ],
+          "DisableClientCertificateCheck": false
+        }
+      }
+    },
+    "IIS": {
+      "ForwardClientCertificate": true,
+      "AutomaticAuthentication": true,
+      "AuthenticationDisplayName" : ""
+    }
+  }
+
+
+
+

Also, for start using the devon4net components, you should add the next json options in your appsettings.json or appsettings.Development.json file:

+
+
+
+
{
+  "ExtraSettingsFiles": [
+    "Put a directory path (relative/absolute/linux-like) like /run/secrets/global where there are many settings/secret files to load",
+    "Put a specific file name (with/without path) like /app-configs/app/extra-settings.json"
+  ],
+  "ConnectionStrings": {
+    "Default": "Todos",
+    "Employee": "Employee",
+    "RabbitMqBackup": "Add your database connection string here for messaging backup",
+    "MediatRBackup": "Add your database connection string here for messaging backup"
+  },
+  "Logging": {
+    "LogLevel": {
+      "Default": "Debug",
+      "System": "Information",
+      "Microsoft": "Information"
+    }
+  },
+  "Swagger": {
+    "Version": "v1",
+    "Title": "devon4net API",
+    "Description": "devon4net API Contract",
+    "Terms": "https://www.devonfw.com/terms-of-use/",
+    "Contact": {
+      "Name": "devonfw",
+      "Email": "sample@mail.com",
+      "Url": "https://www.devonfw.com"
+    },
+    "License": {
+      "Name": "devonfw - Terms of Use",
+      "Url": "https://www.devonfw.com/terms-of-use/"
+    },
+    "Endpoint": {
+      "Name": "V1 Docs",
+      "Url": "/swagger/v1/swagger.json",
+      "UrlUi": "swagger",
+      "RouteTemplate": "swagger/v1/{documentName}/swagger.json"
+    }
+  },
+  "JWT": {
+    "Audience": "devon4Net",
+    "Issuer": "devon4Net",
+    "TokenExpirationTime": 60,
+    "ValidateIssuerSigningKey": true,
+    "ValidateLifetime": true,
+    "ClockSkew": 5,
+    "Security": {
+      "SecretKeyLengthAlgorithm": "",
+      "SecretKeyEncryptionAlgorithm": "",
+      "SecretKey": "",
+      "Certificate": "",
+      "CertificatePassword": "",
+      "CertificateEncryptionAlgorithm": ""
+    }
+  },
+  "Cors": []
+  //[
+  //  {
+  //    "CorsPolicy": "CorsPolicy1",
+  //    "Origins": "http://example.com,http://www.contoso.com",
+  //    "Headers": "accept,content-type,origin,x-custom-header",
+  //    "Methods": "GET,POST,HEAD",
+  //    "AllowCredentials": true
+  //  },
+  //  {
+  //    "CorsPolicy": "CorsPolicy2",
+  //    "Origins": "http://example.com,http://www.contoso.com",
+  //    "Headers": "accept,content-type,origin,x-custom-header",
+  //    "Methods": "GET,POST,HEAD",
+  //    "AllowCredentials": true
+  //  }
+  //]
+  ,
+  "CircuitBreaker": {
+    "CheckCertificate": false,
+    "Endpoints": [
+      {
+        "Name": "AnsibleTower",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      },
+      {
+        "Name": "CyberArk",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      },
+      {
+        "Name": "SmaxHcm",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      }
+    ]
+  },
+  "Headers": {
+    "AccessControlExposeHeader": "Authorization",
+    "StrictTransportSecurityHeader": "",
+    "XFrameOptionsHeader": "DENY",
+    "XssProtectionHeader": "1;mode=block",
+    "XContentTypeOptionsHeader": "nosniff",
+    "ContentSecurityPolicyHeader": "",
+    "PermittedCrossDomainPoliciesHeader": "",
+    "ReferrerPolicyHeader": ""
+  },
+  "Log": {
+    "UseAOPTrace": false,
+    "LogLevel": "Debug",
+    "SqliteDatabase": "logs/log.db",
+    "LogFile": "logs/{0}_devonfw.log",
+    "SeqLogServerHost": "http://127.0.0.1:5341",
+    "GrayLog": {
+      "GrayLogHost": "127.0.0.1",
+      "GrayLogPort": "12201",
+      "GrayLogProtocol": "UDP",
+      "UseSecureConnection": true,
+      "UseAsyncLogging": true,
+      "RetryCount": 5,
+      "RetryIntervalMs": 15,
+      "MaxUdpMessageSize": 8192
+    }
+  },
+  "RabbitMq": {
+    "EnableRabbitMq": false,
+    "Hosts": [
+      {
+        "Host": "127.0.0.1",
+        "Port": 5672,
+        "Ssl": false,
+        "SslServerName": "localhost",
+        "SslCertPath": "localhost.pfx",
+        "SslCertPassPhrase": "localhost",
+        "SslPolicyErrors": "RemoteCertificateNotAvailable" //None, RemoteCertificateNotAvailable, RemoteCertificateNameMismatch, RemoteCertificateChainErrors
+      }
+    ],
+
+    "VirtualHost": "/",
+    "UserName": "admin",
+    "Password": "password",
+    "Product": "devon4net",
+    "RequestedHeartbeat": 10, //Set to zero for no heartbeat
+    "PrefetchCount": 50,
+    "PublisherConfirms": false,
+    "PersistentMessages": true,
+    "Platform": "localhost",
+    "Timeout": 10,
+    "Backup": {
+      "UseLocalBackup": false,
+      "DatabaseName": "devon4netMessageBackup.db"
+    }
+  },
+  "MediatR": {
+    "EnableMediatR": false,
+    "Backup": {
+      "UseLocalBackup": false,
+      "DatabaseName": "devon4netMessageBackup.db"
+    }
+  },
+  "LiteDb": {
+    "DatabaseLocation": "devon4net.db"
+  },
+  "AnsibleTower": {
+    "EnableAnsible": false,
+    "Name": "AnsibleTower",
+    "CircuitBreakerName": "AnsibleTower",
+    "ApiUrlBase": "/api/v2/?format=json",
+    "Version": "1.0.5.29",
+    "Username": "",
+    "Password": ""
+  },
+  "CyberArk": {
+    "EnableCyberArk": false,
+    "Username": "",
+    "Password": "",
+    "CircuitBreakerName": "CyberArk"
+  },
+  "SmaxHcm": {
+    "EnableSmax": false,
+    "Username": "",
+    "Password": "",
+    "TenantId": "",
+    "CircuitBreakerName": "SmaxHcm",
+    "ProviderId": ""
+  },
+  "Kafka": {
+    "EnableKafka": true,
+    "Administration": [
+      {
+        "AdminId": "Admin1",
+        "Servers": "127.0.0.1:9092"
+      }
+    ],
+    "Producers": [
+      {
+        "ProducerId": "Producer1", // devon identifier
+        "Servers": "127.0.0.1:9092", // Initial list of brokers as a CSV list of broker host or host:port. The application may also use `rd_kafka_brokers_add()` to add brokers during runtime
+        "ClientId": "client1", //Client identifier
+        "Topic": "devonfw", // topics to deliver the message
+        "MessageMaxBytes": 1000000, //Maximum Kafka protocol request message size. Due to differing framing overhead between protocol versions the producer is unable to reliably enforce a strict max message limit at produce time and may exceed the maximum size by one message in protocol ProduceRequests, the broker will enforce the the topic's `max.message.bytes` limit (see Apache Kafka documentation)
+        "CompressionLevel": -1, // [0-9] for gzip; [0-12] for lz4; only 0 for snappy; -1 = codec-dependent default compression level
+        "CompressionType": "None", // None, Gzip, Snappy, Lz4, Zstd
+        "ReceiveMessageMaxBytes": 100000000,
+        "EnableSslCertificateVerification": false,
+        "CancellationDelayMaxMs": 100, // The maximum length of time (in milliseconds) before a cancellation request is acted on. Low values may result in measurably higher CPU usage
+        "Ack": "None", //Zero=Broker does not send any response/ack to client, One=The leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. All=Broker will block until message is committed by all in sync replicas (ISRs). If there are less than min.insync.replicas (broker configuration) in the ISR set the produce request will fail
+        "Debug": "", //A comma-separated list of debug contexts to enable. Detailed Producer debugging: broker,topic,msg. Consumer: consumer,cgrp,topic,fetch
+        "BrokerAddressTtl": 1000, //How long to cache the broker address resolving results (milliseconds)
+        "BatchNumMessages": 1000000, // Maximum size (in bytes) of all messages batched in one MessageSet, including protocol framing overhead. This limit is applied after the first message has been added to the batch, regardless of the first message's size, this is to ensure that messages that exceed batch.size are produced. The total MessageSet size is also limited by batch.num.messages and message.max.bytes
+        "EnableIdempotence": false, //When set to `true`, the producer will ensure that messages are successfully produced exactly once and in the original produce order. The following configuration properties are adjusted automatically (if not modified by the user) when idempotence is enabled: `max.in.flight.requests.per.connection=5` (must be less than or equal to 5), `retries=INT32_MAX` (must be greater than 0), `acks=all`, `queuing.strategy=fifo`. Producer instantation will fail if user-supplied configuration is incompatible
+        "MaxInFlight": 5,
+        "MessageSendMaxRetries": 5,
+        "BatchSize": 100000000 // Maximum size (in bytes) of all messages batched in one MessageSet, including protocol framing overhead. This limit is applied after the first message has been added to the batch, regardless of the first message's size, this is to ensure that messages that exceed batch.size are produced. The total MessageSet size is also limited by batch.num.messages and message.max.bytes
+      }
+    ],
+    "Consumers": [
+      {
+        "ConsumerId": "Consumer1", // devon identifier
+        "Servers": "127.0.0.1:9092",
+        "GroupId": "group1",
+        "Topics": "devonfw", // Comma separated topics to subscribe
+        "AutoCommit": true, //Automatically and periodically commit offsets in the background. Note: setting this to false does not prevent the consumer from fetching previously committed start offsets. To circumvent this behaviour set specific start offsets per partition in the call to assign()
+        "StatisticsIntervalMs": 0, //librdkafka statistics emit interval. The application also needs to register a stats callback using `rd_kafka_conf_set_stats_cb()`. The granularity is 1000ms. A value of 0 disables statistics
+        "SessionTimeoutMs": 10000, //Client group session and failure detection timeout. The consumer sends periodic heartbeats (heartbeat.interval.ms) to indicate its liveness to the broker. If no hearts are received by the broker for a group member within the session timeout, the broker will remove the consumer from the group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. Also see `max.poll.interval.ms`
+        "AutoOffsetReset": "Largest", //Action to take when there is no initial offset in offset store or the desired offset is out of range: 'smallest','earliest' - automatically reset the offset to the smallest offset, 'largest','latest' - automatically reset the offset to the largest offset, 'error' - trigger an error which is retrieved by consuming messages and checking 'message-&gt;err'
+        "EnablePartitionEof": true, //Verify CRC32 of consumed messages, ensuring no on-the-wire or on-disk corruption to the messages occurred. This check comes at slightly increased CPU usage
+        "IsolationLevel": "ReadCommitted", //Controls how to read messages written transactionally: `ReadCommitted` - only return transactional messages which have been committed. `ReadUncommitted` - return all messages, even transactional messages which have been aborted.
+        "EnableSslCertificateVerification": false,
+        "Debug": "" //A comma-separated list of debug contexts to enable. Detailed Producer debugging: broker,topic,msg. Consumer: consumer,cgrp,topic,fetch
+      }
+    ]
+  }
+}
+
+
+
+
+
+

devon4net Cobigen Guide

+
+ +
+
+
+

Overview

+
+
+

In this guide we will explain how to generate a new WebApi project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+
+

Getting things ready

+
+ +
+
+
+

== devonfw Distribution

+
+
+

The devonfw distributions can be obtained from here. You can find all releases in maven central.

+
+
+

It is not necessary to install nor configure anything. Just extracting the zip content is enough to have a fully functional devonfw. The only thing you have to do is run create-or-update-workspace.bat and then update-all-workspaces.bat to set up all the needed tools.

+
+
+
+
+

== devon4net Templates

+
+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
dotnet new -i Devon4Net.WebAPI.Template
+
+
+
+

and then:

+
+
+
+
dotnet new Devon4NetAPI
+
+
+
+
+
+

== OpenAPI File

+
+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+
+

Generating files

+
+
+

Cobigen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the Cobigen` CLI` tool.

+
+
+
+
+

== Generating files through Eclipse

+
+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+cobigen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+cobigen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+cobigen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+cobigen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+
+

== Generating files through Cobigen` CLI`

+
+
+

In order to generate the files using the Cobigen` CLI` it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    cobigen generate {yourOpenAPIFile}.yml
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+
+

Configuration

+
+ +
+
+
+

== Dependency Injection configuration

+
+
+

At this point it is needed to make some modifications in the code in order to configure correctly the server. To do so it is needed to locate the services and the repositories files that were created in Devon4Net.WebAPI.Implementation

+
+
+

Services location:

+
+
+
+cobigen +
+
+
+

Repositories location:

+
+
+
+cobigen +
+
+
+

Now, we are going to open the following file Devon4Net.WebAPI.Implementation\Configure\DevonConfiguration.cs. +In there we have to add the Dependency Injection for the services and the repositories that Cobigen has generated. The following image is an example of what is needed to add.

+
+
+
+cobigen +
+
+
+

Moreover it is needed to remove the last line in order to be able to run the application:

+
+
+
+
`throw new NotImplementedException(...);`
+
+
+
+
+
+

== Configure data base

+
+
+

Cobigen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+cobigen +
+
+
+
+
+

== Configure services

+
+
+

In order to finish the configuration of the services it is needed to go to each service file of the managements generated.

+
+
+

In there we will see some "NotImplementedExceptions", so it is needed to read carefully each comment inside of each exception in order to be able to use the service. It can be shown an example of the service with its NotImplementedExceptions comments:

+
+
+
+cobigen +
+
+
+
+
+

== Run the application

+
+
+

After doing all the steps defined above, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
dotnet run
+
+
+
+

This will deploy our application in our localhost with the port 8081, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+
+

Use HTTP2 protocol

+
+
+

You can specify the` HTTP` protocol to be used on your devon4net application modifying some node values at devonfw node in your appsettings configuration file.

+
+
+
+
+

HttpProtocol

+
+
+

The supported protocols are:

+
+
+

|== == == == == == == == == == == = +|Protocol|Description +|Http1| Http1 protocol +|Http2| Http2 Protocol +|Http1AndHttp2| Both supported +|== == == == == == == == == == == =

+
+
+
+
+

=` SSL`

+
+
+

To activate the HTTP2, the SslProtocol node must be set to Tls12 value.

+
+
+

The` SSL` protocol supported version values are:

+
+
+
    +
  • +

    Tls

    +
  • +
  • +

    Tls11

    +
  • +
  • +

    Tls12

    +
  • +
  • +

    Tls13

    +
  • +
  • +

    Ssl2

    +
  • +
  • +

    Ssl3

    +
  • +
+
+
+
+
+

Create a certificate for development purposes

+
+
+

In order to create a valid certificate for development purposes the Open` SSL` tools are needed.

+
+
+
+
+

Certificate authority (CA)

+
+
+

Run the next commands in a shell:

+
+
+
+
1. openssl req -x509 -nodes -new -sha256 -days 1024 -newkey rsa:2048 -keyout RootCA.key -out RootCA.pem -subj "/C=ES/ST=Valencia/L=Valencia/O=Certificates/CN=localhost.local"
+
+2. openssl x509 -outform pem -in RootCA.pem -out RootCA.crt
+
+
+
+

If you want to convert your certificate run the command:

+
+
+
+
openssl pkcs12 -export -out localhost.pfx -inkey RootCA.key -in RootCA.crt
+
+
+
+
+
+

Domain name certificate

+
+
+

Run the next commands in a shell:

+
+
+
+
1. openssl req -new -nodes -newkey rsa:2048 -keyout localhost.key -out localhost.csr -subj "/C=ES/ST=Valencia/L=Valencia/O=Certificates/CN=localhost.local"
+
+2. openssl x509 -req -sha256 -days 1024 -in localhost.csr -CA RootCA.pem -CAkey RootCA.key -CAcreateserial -extfile domains.ext -out localhost.crt
+
+
+
+

Where the domains.ext file should contain:

+
+
+
+
authorityKeyIdentifier=keyid,issuer
+basicConstraints=CA:FALSE
+keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment
+subjectAltName = @alt_names
+[alt_names]
+DNS.1 = localhost
+DNS.2 = localhost.local
+DNS.3 = 127.0.0.1
+DNS.4 = fake1.local
+DNS.5 = fake2.local
+
+
+
+

If you want to convert your certificate run the command:

+
+
+
+
openssl pkcs12 -export -out localhost.pfx -inkey localhost.key -in localhost.crt
+
+
+
+
+
+

Setup the database driver

+
+
+

Add the database connection on the SetupDatabase method at Startup.cs

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+
+

Change the JWT encryption algorithm

+
+
+

In the appsettings.json configuration file, you can use the next values on the SecretKeyLengthAlgorithm and SecretKeyEncryptionAlgorithm nodes at JWT configuration:

+
+
+

|== == == == == == == == == == == = +|Algorithm|Description +|Aes128Encryption|"http://www.w3.org/2001/04/xmlenc#aes128-cbc" +|Aes192Encryption|"http://www.w3.org/2001/04/xmlenc#aes192-cbc" +|Aes256Encryption|"http://www.w3.org/2001/04/xmlenc#aes256-cbc" +|DesEncryption|"http://www.w3.org/2001/04/xmlenc#des-cbc" +|Aes128KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes128" +|Aes192KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes192" +|Aes256KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes256" +|RsaV15KeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-1_5" +|Ripemd160Digest|"http://www.w3.org/2001/04/xmlenc#ripemd160" +|RsaOaepKeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-oaep" +|Aes128KW|"A128KW" +|Aes256KW|"A256KW" +|RsaPKCS1|"RSA1_5" +|RsaOAEP|"RSA-OAEP" +|ExclusiveC14n|"http://www.w3.org/2001/10/xml-exc-c14n#" +|ExclusiveC14nWithComments|"http://www.w3.org/2001/10/xml-exc-c14n#WithComments" +|EnvelopedSignature|"http://www.w3.org/2000/09/xmldsig#enveloped-signature" +|Sha256Digest|"http://www.w3.org/2001/04/xmlenc#sha256" +|Sha384Digest|"http://www.w3.org/2001/04/xmldsig-more#sha384" +|Sha512Digest|"http://www.w3.org/2001/04/xmlenc#sha512" +|Sha256|"SHA256" +|Sha384|"SHA384" +|Sha512|"SHA512" +|EcdsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" +|EcdsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384" +|EcdsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512" +|HmacSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha256" +|HmacSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha384" +|HmacSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha512" +|RsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" +|RsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" +|RsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" +|RsaSsaPssSha256Signature|"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1" +|RsaSsaPssSha384Signature|"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1" +|RsaSsaPssSha512Signature|"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1" +|EcdsaSha256|"ES256" +|EcdsaSha384|"ES384" +|EcdsaSha512|"ES512" +|HmacSha256|"HS256" +|HmacSha384|"HS384" +|HmacSha512|"HS512" +|None|"none" +|RsaSha256|"RS256" +|RsaSha384|"RS384" +|RsaSha512|"RS512" +|RsaSsaPssSha256|"PS256" +|RsaSsaPssSha384|"PS384" +|RsaSsaPssSha512|"PS512" +|Aes128CbcHmacSha256|"A128CBC-HS256" +|Aes192CbcHmacSha384|"A192CBC-HS384" +|Aes256CbcHmacSha512|"A256CBC-HS512" +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/master-devon4net.html b/docs/devonfw.github.io/1.0/devon4net.wiki/master-devon4net.html new file mode 100644 index 00000000..6c439cf8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/master-devon4net.html @@ -0,0 +1,5743 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

==.net

+
+
+

Architecture basics

+
+ +
+

Introduction

+
+

The devonfw platform provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+
+

Overview Onion Design

+
+

This guide shows the overall proposed architecture in terms of separated layers making use the Onion architecture pattern. Each layers represents a logical group of components and functionality. In this guide you will learn the basics of the proposed architecture based in layers in order to develop software making use of the best practices.

+
+
+
+

Layer specification

+
+
+
+

It is important to understand the distinction between layers and tiers. Layers describe the logical groupings of the functionality and components in an application; whereas tiers describe the physical distribution of the functionality and components on separate servers, computers, networks, or remote locations. Although both layers and tiers use the same set of names (presentation, business, services, and data), remember that only tiers imply a physical separation. It is quite common to locate more than one layer on the same physical machine (the same tier). You can think of the term tier as referring to physical distribution patterns such as two-tier, three-tier, and n-tier.

+
+
+
+— Layered Application Guidelines
+MSDN Microsoft +
+
+
+

The proposed architecture makes use of cooperating components called layers. To develop specific functionality each layer contains a set of components which is capable to develop such functionalities.

+
+
+

The next figure represents the different layers:

+
+
+
+technical architecture +
+
Figure 1. High level architecture representation
+
+
+

The layers are separated in physical tiers making use of interfaces. This pattern makes possible to be flexible in different kind of projects maximizing performance and deployment strategies (synchronous/asynchronous access, security, component deployment in different environments, microservices…​). Another important point is to provide automated unit testing or test-driven development (TDD) facilities.

+
+
+
+

== Application layer

+
+

The Application Layer encapsulates the different .Net projects and its resource dependencies and manages the user interaction depending on the project’s nature.

+
+
+
+technical architecture +
+
Figure 2. Net application stack
+
+
+

The provided application template implements an dotnet API application. Also integrates by default the Swagger client. This provides the possibility to share the contract with external applications (angular, mobile apps, external services…​).

+
+
+
+

== Business layer

+
+

The business layer implements the core functionality of the application and encapsulates the component’s logic. +This layer provides the interface between the data transformation and the application exposition. This allow the data to be optimized and ready for different data consumers.

+
+
+

This layer may implement for each main entity the API controller, the entity related service and other classes to support the application logic.

+
+
+

In order to implement the service logic, the services class must follow the next specification:

+
+
+
+
    public class Service<TContext> : IService where TContext: DbContext
+
+
+
+

PE: devon4Net API template shows how to implement the TODOs service as follows:

+
+
+
+
    public class TodoService: Service<TodoContext>, ITodoService
+
+
+
+

Where Service is the base service class to be inherited and have full access for the Unit of work, TodoContext is the TODOs database context and ITodoService is the interface of the service, which exposes the public extended methods to be implemented.

+
+
+
+

== Data layer

+
+

The data layer orchestrates the data obtained between the Domain Layer and the Business Layer. Also transforms the data to be used more efficiently between layers.

+
+
+

So, if a service needs the help of another service or repository, the implemented Dependency Injection is the solution to accomplish the task.

+
+
+

The main aim of this layer is to implement the repository for each entity. The repository’s interface is defined in the Domain layer.

+
+
+

In order to implement the repository logic, the repository class must follow the next specification:

+
+
+
+
    Repository<T> : IRepository<T> where T : class
+
+
+
+

PE: devon4Net API template shows how to implement the TODOs repository as follows:

+
+
+
+
    public class TodoRepository : Repository<Todos>, ITodoRepository
+
+
+
+

Where Repository is the the base repository class to be inherited and have full access for the basic CRUD operations, Todos is the entity defined in the database context. ITodoRepository is the interface of the repository, which exposes the public extended methods to be implemented.

+
+
+ + + + + +
+ + +Please remember that <T> is the mapped class which reference the entity from the database context. This abstraction allows to write services implementation with different database contexts +
+
+
+
+

== Domain layer

+
+

The domain layer provides access to data directly exposed from other systems. The main source is used to be a data base system. The provided template makes use of Entity Framework solution from Microsoft in order to achieve this functionality.

+
+
+

To make a good use of this technology, Repository Pattern has been implemented with the help of Unit Of Work pattern. Also, the use of generic types are makes this solution to be the most flexible.

+
+
+

Regarding to data base source, each entity is mapped as a class. Repository pattern allows to use this mapped classes to access the data base via Entity framework:

+
+
+
+
 public class UnitOfWork<TContext> : IUnitOfWork<TContext> where TContext : DbContext
+
+
+
+ + + + + +
+ + +Where <T> is the mapped class which reference the entity from the database. +
+
+
+

The repository and unit of work patterns are create an abstraction layer between the data access layer and the business logic layer of an application.

+
+
+ + + + + +
+ + +Domain Layer has no dependencies with other layers. It contains the Entities, datasources and the Repository Interfaces. +
+
+
+
+

devon4Net architecture layer implementation

+
+

The next picture shows how the devon4Net API template implements the architecture described in previous points:

+
+
+
+devon4Net api template architecture implementation +
+
Figure 3. devon4Net architecture implementations
+
+
+
+

== Cross-Cutting concerns

+
+

Cross-cutting provides the implementation functionality that spans layers. Each functionality is implemented through components able to work stand alone. This approach provides better reusability and maintainability.

+
+
+

A common component set of cross cutting components include different types of functionality regarding to authentication, authorization, security, caching, configuration, logging, and communication.

+
+
+
+

Communication between Layers: Interfaces

+
+

The main target of the use of interfaces is to loose coupling between layers and minimize dependencies.

+
+
+

Public interfaces allow to hide implementation details of the components within the layers making use of dependency inversion.

+
+
+

In order to make this possible, we make use of Dependency Injection Pattern (implementation of dependency inversion) given by default in .Net Core.

+
+
+

The provided Data Layer contains the abstract classes to inherit from. All new repository and service classes must inherit from them, also the must implement their own interfaces.

+
+
+
+technical architecture +
+
Figure 4. Architecture representation in deep
+
+
+
+

Templates

+ +
+
+

State of the art

+
+

The provided bundle contains the devon4Net API template based on .net core. The template allows to create a microservice solution with minimal configuration.

+
+
+

Also, the devon4Net framework can be added to third party templates such as the Amazon API template to use lambdas in serverless environments.

+
+
+

Included features:

+
+
+
    +
  • +

    Logging:

    +
  • +
  • +

    Text File

    +
  • +
  • +

    Sqlite database support

    +
  • +
  • +

    Serilog Seq Server support

    +
  • +
  • +

    Graylog integration ready through TCP/UDP/HTTP protocols

    +
  • +
  • +

    API Call params interception (simple and compose objects)

    +
  • +
  • +

    API error exception management

    +
  • +
  • +

    Swagger:

    +
  • +
  • +

    Swagger autogenerating client from comments and annotations on controller classes

    +
  • +
  • +

    Full swagger client customization (Version, Title, Description, Terms, License, Json end point definition)

    +
  • +
  • +

    Easy configuration with just one configuration node in your settings file

    +
  • +
  • +

    JWT:

    +
  • +
  • +

    Issuer, audience, token expiration customization by external file configuration

    +
  • +
  • +

    Token generation via certificate

    +
  • +
  • +

    MVC inherited classes to access JWT user properties

    +
  • +
  • +

    API method security access based on JWT Claims

    +
  • +
  • +

    CORS:

    +
  • +
  • +

    Simple CORS definition ready

    +
  • +
  • +

    Multiple CORS domain origin definition with specific headers and verbs

    +
  • +
  • +

    Headers:

    +
  • +
  • +

    Automatic header injection with middleware.

    +
  • +
  • +

    Supported header definitions: AccessControlExposeHeader, StrictTransportSecurityHeader, XFrameOptionsHeader, XssProtectionHeader, XContentTypeOptionsHeader, ContentSecurityPolicyHeader, PermittedCrossDomainPoliciesHeader, ReferrerPolicyHeader

    +
  • +
  • +

    Reporting server:

    +
  • +
  • +

    Partial implementation of reporting server based on My-FyiReporting (now runs on linux container)

    +
  • +
  • +

    Testing:

    +
  • +
  • +

    Integration test template with sqlite support

    +
  • +
  • +

    Unit test template

    +
  • +
  • +

    Moq, xunit frameworks integrated

    +
  • +
  • +

    Circuit breaker:

    +
  • +
  • +

    Integrated with HttpClient factory

    +
  • +
  • +

    Client Certificate customization

    +
  • +
  • +

    Number of retries customizables

    +
  • +
  • +

    LiteDB:

    +
  • +
  • +

    Support for LiteDB

    +
  • +
  • +

    Provided basic repository for CRUD operations

    +
  • +
  • +

    RabbitMq:

    +
  • +
  • +

    Use of EasyQNet library to perform CQRS main functions between different microservices

    +
  • +
  • +

    Send commands / Subscribe queues with one C# sentence

    +
  • +
  • +

    Events management: Handled received commands to subscribed messages

    +
  • +
  • +

    Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    MediatR:

    +
  • +
  • +

    Use of MediatR library to perform CQRS main functions in memory

    +
  • +
  • +

    Send commands / Subscribe queues with one C# sentence

    +
  • +
  • +

    Events management: Handled received commands to subscribed messages

    +
  • +
  • +

    Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    SmaxHcm:

    +
  • +
  • +

    Component to manage Microfocus SMAX for cloud infrastructure services management

    +
  • +
  • +

    CyberArk:

    +
  • +
  • +

    Manage safe credentials with CyberArk

    +
  • +
  • +

    AnsibleTower:

    +
  • +
  • +

    Ansible automates the cloud infrastructure. devon4net integrates with Ansible Tower via API consumption endpoints

    +
  • +
  • +

    gRPC+Protobuf:

    +
  • +
  • +

    Added Client + Server basic templates sample gRPC with Google’s Protobuf protocol using devon4net

    +
  • +
  • +

    Kafka:

    +
  • +
  • +

    Added Apache Kafka support for deliver/consume messages and create/delete topics as well

    +
  • +
+
+
+
+

Software stack

+
+
Technology Stack of devon4Net
+

|== == == == == == == == == == == = +|Topic|Detail|Implementation +|runtime|language & VM|.Net Core Version 3.0 +|persistence|OR-mapper| Entity Framework Core +|service|REST services|https://www.asp.net/web-api[Web API] +|service - integration to external systems - optional|SOAP services|https://msdn.microsoft.com/en-us/library/dd456779(v=vs.110).aspx[WCF] +|logging|framework|https://github.com/serilog/serilog-extensions-logging[Serilog] +|validation|framework| NewtonSoft Json, DataAnnotations +|component management|dependency injection| Unity +|security|Authentication & Authorization| JWT .Net Security - Token based, local Authentication Provider +|unit tests|framework|https://github.com/xunit/xunit[xUnit] +|Circuit breaker|framework, allows retry pattern on http calls|https://github.com/App-vNext/Polly[Polly] +|CQRS|Memory events and queue events| MediatR - EasyNetQ - Kafka +|Kafka| Kafka support for enterprise applications| Confluent.Kafka +|Fluent Validation| Fluent validation for class instances|https://fluentvalidation.net/[Fluent validation] +|== == == == == == == == == == == =

+
+
+
+

Target platforms

+
+

Thanks to the new .Net Core platform from Microsoft, the developed software can be published Windows, Linux, OS, X and Android platforms.

+
+
+ +
+
+
+

User guide

+
+ +
+
+technical architecture +
+
+
+

devon4net Guide

+ +
+
+

Introduction

+
+

Welcome to devon4net framework user guide. In this document you will find the information regarding how to start and deploy your project using the guidelines proposed in our solution.

+
+
+

All the guidelines shown and used in this document are a set of rules and conventions proposed and supported by Microsoft and the industry.

+
+
+
+

The package

+
+

Devon4Net package solution contains:

+
+
+

|== == == == == == == == == == == = +|File / Folder|Content +|Documentation| User documentation in HTML format +|Modules| Contains the source code of the different devon4net modules +|Samples| Different samples implemented in .NET and .NET Core. Also includes My Thai Star Devon flagship restaurant application +|Templates| Main .net Core template to start developing from scratch +|License| License agreement +|README.md| Github main page +|TERMS_OF_USE.adoc| The devon4net terms of use +|LICENSE| The devon license +|Other files| Such the code of conduct and contributing guide +|== == == == == == == == == == == =

+
+
+
+

Application templates

+
+

The application templates given in the bundle are ready to use.

+
+
+

At the moment .net Core template is supported. The template is ready to be used as a simple console Kestrel application or being deployed in a web server like IIS.

+
+
+
+

Samples

+ +
+
+

== My Thai Star

+
+

You can find My Thai Star .NET port application at Github.

+
+
+ + + + + +
+ + +As devon4net has been migrated to the latest version of .net core, the template is not finished yet. +
+
+
+
+

Cookbook

+ +
+
+

Data management

+
+

To use Entity Framework Core, install the package for the database provider(s) you want to target. This walk-through uses SQL Server.

+
+
+

For a list of available providers see Database Providers

+
+
+
    +
  • +

    Go to Tools > NuGet Package Manager > Package Manager Console

    +
  • +
  • +

    Run Install-Package Microsoft.EntityFrameworkCore.SqlServer

    +
  • +
+
+
+

We will be using some Entity Framework Tools to create a model from the database. So we will install the tools package as well:

+
+
+
    +
  • +

    Run Install-Package Microsoft.EntityFrameworkCore.Tools

    +
  • +
+
+
+

We will be using some ASP.NET Core Scaffolding tools to create controllers and views later on. So we will install this design package as well:

+
+
+
    +
  • +

    Run Install-Package Microsoft.VisualStudio.Web.CodeGeneration.Design

    +
  • +
+
+
+
+

== Entity Framework Code first

+
+

In order to design your database model from scratch, we encourage to follow the Microsoft guidelines described here.

+
+
+
+

== Entity Framework Database first

+
+
    +
  • +

    Go to Tools > NuGet Package Manager > Package Manager Console

    +
  • +
  • +

    Run the following command to create a model from the existing database:

    +
  • +
+
+
+
+
Scaffold-DbContext "Your connection string to existing database" Microsoft.EntityFrameworkCore.SqlServer -OutputDir Models
+
+
+
+

The command will create the database context and the mapped entities as well inside of Models folder.

+
+
+
+

== Register your context with dependency injection

+
+

Services are registered with dependency injection during application startup.

+
+
+

In order to register your database context (or multiple database context as well) you can add the following line at ConfigureDbService method at startup.cs:

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+

Repositories and Services

+
+

Services and Repositories are an important part of devon4net proposal. To make them work properly, first of all must be declared and injected at Startup.cs at DI Region.

+
+
+

Services are declared in devon4net.Business.Common and injected in Controller classes when needed. Use services to build your application logic.

+
+
+
+technical architecture +
+
Figure 5. Screenshot of devon4net.Business.Common project in depth
+
+
+

For example, My Thai Star Booking controller constructor looks like this:

+
+
+
+
        public BookingController(IBookingService bookingService, IMapper mapper)
+        {
+            BookingService = bookingService;
+            Mapper = mapper;
+
+        }
+
+
+
+

Currently devon4net has a Unit of Work class in order to perform CRUD operations to database making use of your designed model context.

+
+
+

Repositories are declared at devon4net.Domain.UnitOfWork project and make use of Unit of Work class.

+
+
+

The common methods to perform CRUD operations (where <T> is an entity from your model) are:

+
+
+
    +
  • +

    Sync methods:

    +
  • +
+
+
+
+
IList<T> GetAll(Expression<Func<T, bool>> predicate = null);
+T Get(Expression<Func<T, bool>> predicate = null);
+IList<T> GetAllInclude(IList<string> include, Expression<Func<T, bool>> predicate = null);
+T Create(T entity);
+void Delete(T entity);
+void DeleteById(object id);
+void Delete(Expression<Func<T, bool>> where);
+void Edit(T entity);
+
+
+
+
    +
  • +

    Async methods:

    +
  • +
+
+
+
+
Task<IList<T>> GetAllAsync(Expression<Func<T, bool>> predicate = null);
+Task<T> GetAsync(Expression<Func<T, bool>> predicate = null);
+Task<IList<T>> GetAllIncludeAsync(IList<string> include, Expression<Func<T, bool>> predicate = null);
+
+
+
+

If you perform a Commit operation and an error happens, changes will be rolled back.

+
+
+
+

Swagger integration

+
+

The given templates allow you to specify the API contract through Swagger integration and the controller classes are the responsible of exposing methods making use of comments in the source code.

+
+
+

The next example shows how to comment the method with summaries in order to define the contract. Add (Triple Slash) XML Documentation To Swagger:

+
+
+
+
/// <summary>
+/// Method to get reservations
+/// </summary>
+/// <response code="201">Ok.</response>
+/// <response code="400">Bad request. Parser data error.</response>
+/// <response code="401">Unauthorized. Authentication fail.</response>
+/// <response code="403">Forbidden. Authorization error.</response>
+/// <response code="500">Internal Server Error. The search process ended with error.</response>
+[HttpPost]
+[Route("/mythaistar/services/rest/bookingmanagement/v1/booking/search")]
+//[Authorize(Policy = "MTSWaiterPolicy")]
+[AllowAnonymous]
+[EnableCors("CorsPolicy")]
+public async Task<IActionResult> BookingSearch([FromBody]BookingSearchDto bookingSearchDto)
+{
+
+
+
+

In order to be effective and make use of the comments to build the API contract, the project which contains the controller classes must generate the XML document file. To achieve this, the XML documentation file must be checked in project settings tab:

+
+
+
+technical architecture +
+
Figure 6. Project settings tab
+
+
+

We propose to generate the file under the XmlDocumentation folder. For example in devon4net.Domain.Entities project in My Thai Star .NET implementation the output folder is:

+
+
+
+
`XmlDocumentation\devon4net.Business.Common.xml`
+
+
+
+

The file devon4net.Business.Common.xml won’t appear until you build the project. Once the file is generated, please modify its properties as a resource and set it to be Copy always .

+
+
+
+technical architecture +
+
Figure 7. Swagger XML document file properties
+
+
+

Once you have this, the swagger user interface will show the method properties defined in your controller comments.

+
+
+

Making use of this technique controller are not encapsulated to the application project. Also, you can develop your controller classes in different projects obtain code reusability.

+
+
+

Swagger comment:

+
+
+

|== == == == == == == == == == == = +|Comment|Functionality +|<summary>| Will map to the operation’s summary +|<remarks>| Will map to the operation’s description (shown as "Implementation Notes" in the UI) +|<response code="###">| Specifies the different response of the target method +|<param>| Will define the parameter(s) of the target method +| +|== == == == == == == == == == == =

+
+
+

Please check Microsoft’s site regarding to summary notations.

+
+
+
+

Logging module

+
+

An important part of life software is the need of using log and traces. devon4net has a log module pre-configured to achieve this important point.

+
+
+

By default Microsoft provides a logging module on .NET Core applications. This module is open and can it can be extended. devon4net uses the Serilog implementation. This implementation provides a huge quantity information about events and traces.

+
+
+
+

== Log file

+
+

devon4net can write the log information to a simple text file. You can configure the file name and folder at appsettings.json file (LogFile attribute) at devon4net.Application.WebApi project.

+
+
+
+

== Database log

+
+

devon4net can write the log information to a SQLite database. You can configure the file name and folder at appsettings.json file (LogDatabase attribute) at devon4net.Application.WebApi project.

+
+
+

With this method you can launch queries in order to search the information you are looking for.

+
+
+
+

== Seq log

+
+

devon4net can write the log information to a Serilog server. You can configure the Serilog URL at appsettings.json file (SeqLogServerUrl attribute) at devon4net.Application.WebApi project.

+
+
+

With this method you can make queries via HTTP.

+
+
+
+serilog seq +
+
+
+

By default you can find the log information at Logs folder.

+
+
+
+

JWT module

+
+

JSON Web Tokens are an open, industry standard RFC 7519 method for representing claims securely between two parties allowing you to decode, verify and generate JWT.

+
+
+

You should use JWT for:

+
+
+
    +
  • +

    Authentication : allowing the user to access routes, services, and resources that are permitted with that token.

    +
  • +
  • +

    Information Exchange: JSON Web Tokens are a good way of securely transmitting information between parties. Additionally, as the signature is calculated using the header and the payload, you can also verify that the content.

    +
  • +
+
+
+

The JWT module is configured at Startup.cs inside devon4net.Application.WebApi project from .NET Core template. In this class you can configure the different authentication policy and JWT properties.

+
+
+

Once the user has been authenticated, the client perform the call to the backend with the attribute Bearer plus the token generated at server side.

+
+
+
+jwt +
+
+
+

On My Thai Star sample there are two predefined users: user0 and Waiter. Once they log in the application, the client (Angular/Xamarin) will manage the server call with the json web token. With this method we can manage the server authentication and authorization.

+
+
+

You can find more information about JWT at jwt.io

+
+
+
+

AOP module

+
+

AOP (Aspect Oriented Programming) tracks all information when a method is call.AOP also tracks the input and output data when a method is call.

+
+
+

By default devon4net has AOP module pre-configured and activated for controllers at Startup.cs file at devon4net.Application.WebApi:

+
+
+
+
options.Filters.Add(new Infrastructure.AOP.AopControllerAttribute(Log.Logger));
+
+options.Filters.Add(new Infrastructure.AOP.AopExceptionFilter(Log.Logger));
+
+
+
+

This configuration allows all Controller classes to be tracked. If you don’t need to track the info comment the lines written before.

+
+
+
+

Docker support

+
+

devon4net Core projects are ready to be integrated with docker.

+
+
+

My Thai Star application sample is ready to be use with linux docker containers. The Readme file explains how to launch and setup the sample application.

+
+
+
    +
  • +

    angular : Angular client to support backend. Just binaries.

    +
  • +
  • +

    database : Database scripts and .bak file

    +
  • +
  • +

    mailservice: Microservice implementation to send notifications.

    +
  • +
  • +

    netcore: Server side using .net core 2.0.x.

    +
  • +
  • +

    xamarin: Xamarin client based on Excalibur framework from The Netherlands using XForms.

    +
  • +
+
+
+

Docker configuration and docker-compose files are provided.

+
+
+
+

Testing with XUnit

+
+
+
+

xUnit.net is a free, open source, community-focused unit testing tool for the .NET Framework. Written by the original inventor of NUnit v2, xUnit.net is the latest technology for unit testing C#, F#, VB.NET and other .NET languages. xUnit.net works with ReSharper, CodeRush, TestDriven.NET and Xamarin. It is part of the .NET Foundation, and operates under their code of conduct. It is licensed under Apache 2 (an OSI approved license).

+
+
+
+— About xUnit.net
+https://xunit.github.io/#documentation +
+
+
+

Facts are tests which are always true. They test invariant conditions.

+
+
+

Theories are tests which are only true for a particular set of data.

+
+
+
+

The first test

+
+
+
using Xunit;
+
+namespace MyFirstUnitTests
+{
+    public class Class1
+    {
+        [Fact]
+        public void PassingTest()
+        {
+            Assert.Equal(4, Add(2, 2));
+        }
+
+        [Fact]
+        public void FailingTest()
+        {
+            Assert.Equal(5, Add(2, 2));
+        }
+
+        int Add(int x, int y)
+        {
+            return x + y;
+        }
+    }
+}
+
+
+
+
+

The first test with theory

+
+

Theory attribute is used to create tests with input params:

+
+
+
+
[Theory]
+[InlineData(3)]
+[InlineData(5)]
+[InlineData(6)]
+public void MyFirstTheory(int value)
+{
+    Assert.True(IsOdd(value));
+}
+
+bool IsOdd(int value)
+{
+    return value % 2 ==  1;
+}
+
+
+
+
+

Cheat Sheet

+
+

|== == == == == == == == == == == = +|Operation| Example +|Test|

+
+
+
+
public void Test()
+{
+}
+|Setup|public class TestFixture {
+public TestFixture()
+{
+
+...
+
+    }
+
+}
+|Teardown|public class TestFixture : IDisposable
+
+{
+
+public void Dispose() {
+
+ ...
+ }
+
+}
+
+
+
+

|== == == == == == == == == == == =

+
+
+
+

Console runner return codes

+
+

|== == == == == == == == == == == = +|Code| Meaning +|0|The tests ran successfully. +|1|One or more of the tests failed. +|2|The help page was shown, either because it was requested, or because the user did not provide any command line arguments. +|3| There was a problem with one of the command line options passed to the runner. +|4|There was a problem loading one or more of the test assemblies (for example, if a 64-bit only assembly is run with the 32-bit test runner). +|== == == == == == == == == == == =

+
+
+
+

Publishing

+ +
+
+

== Nginx

+
+

In order to deploy your application to a Nginx server on Linux platform you can follow the instructions from Microsoft here.

+
+
+
+

== IIS

+
+

In this point is shown the configuration options that must implement the .Net Core application.

+
+
+

Supported operating systems:

+
+
+
    +
  • +

    Windows 7 and newer

    +
  • +
  • +

    Windows Server 2008 R2 and newer*

    +
  • +
+
+
+

WebListener server will not work in a reverse-proxy configuration with IIS. You must use the Kestrel server.

+
+
+

IIS configuration

+
+
+

Enable the Web Server (IIS) role and establish role services.

+
+
+

Windows desktop operating systems

+
+
+

Navigate to Control Panel > Programs > Programs and Features > Turn Windows features on or off (left side of the screen). Open the group for Internet Information Services and Web Management Tools. Check the box for IIS Management Console. Check the box for World Wide Web Services. Accept the default features for World Wide Web Services or customize the IIS features to suit your needs.

+
+
+
+iis 1 +
+
+
+

*Conceptually, the IIS configuration described in this document also applies to hosting ASP.NET Core applications on Nano Server IIS, but refer to ASP.NET Core with IIS on Nano Server for specific instructions.

+
+
+

Windows Server operating systems +For server operating systems, use the Add Roles and Features wizard via the Manage menu or the link in Server Manager. On the Server Roles step, check the box for Web Server (IIS).

+
+
+
+iis 2 +
+
+
+

On the Role services step, select the IIS role services you desire or accept the default role services provided.

+
+
+
+iis 3 +
+
+
+

Proceed through the Confirmation step to install the web server role and services. A server/IIS restart is not required after installing the Web Server (IIS) role.

+
+
+

Install the .NET Core Windows Server Hosting bundle

+
+
+
    +
  1. +

    Install the .NET Core Windows Server Hosting bundle on the hosting system. The bundle will install the .NET Core Runtime, .NET Core Library, and the ASP.NET Core Module. The module creates the reverse-proxy between IIS and the Kestrel server. Note: If the system doesn’t have an Internet connection, obtain and install the Microsoft Visual C++ 2015 Re-distributable before installing the .NET Core Windows Server Hosting bundle.

    +
  2. +
  3. +

    Restart the system or execute net stop was /y followed by net start w3svc from a command prompt to pick up a change to the system PATH.

    +
  4. +
+
+
+ + + + + +
+ + +If you use an IIS Shared Configuration, see ASP.NET Core Module with IIS Shared Configuration. +
+
+
+

To configure IISIntegration service options, include a service configuration for IISOptions in ConfigureServices:

+
+
+
+
services.Configure<IISOptions>(options =>
+{
+    ...
+});
+
+
+
+

|== == == == == == == == == == == = +|Option|Default|Setting +|AutomaticAuthentication| true |If true, the authentication middleware sets the HttpContext.User and responds to generic challenges. If false, the authentication middleware only provides an identity (HttpContext.User) and responds to challenges when explicitly requested by the Authentication Scheme. Windows Authentication must be enabled in IIS for AutomaticAuthentication to function. +|AuthenticationDisplayName | null| Sets the display name shown to users on login pages. +|ForwardClientCertificate |true|If true and the MS-ASPNETCORE-CLIENTCERT request header is present, the HttpContext.Connection.ClientCertificate is populated. +|== == == == == == == == == == == =

+
+
+

web.config

+
+
+

The web.config file configures the ASP.NET Core Module and provides other IIS configuration. Creating, transforming, and publishing web.config is handled by Microsoft.NET.Sdk.Web, which is included when you set your project’s SDK at the top of your .csproj file, <Project Sdk="Microsoft.NET.Sdk.Web">. To prevent the MSBuild target from transforming your web.config file, add the <IsTransformWebConfigDisabled> property to your project file with a setting of true:

+
+
+
+
<PropertyGroup>
+  <IsTransformWebConfigDisabled>true</IsTransformWebConfigDisabled>
+</PropertyGroup>
+
+
+
+
+

== Azure

+
+

In order to deploy your application to Azure platform you can follow the instructions from Microsoft:

+
+
+

Set up the development environment

+
+
+ +
+
+

Create a web app

+
+
+

In the Visual Studio Start Page, select File > New > Project…​

+
+
+
+File menu +
+
+
+

Complete the New Project dialog:

+
+
+
    +
  • +

    In the left pane, select .NET Core.

    +
  • +
  • +

    In the center pane, select ASP.NET Core Web Application.

    +
  • +
  • +

    Select OK.

    +
  • +
+
+
+
+New Project dialog +
+
+
+

In the New ASP.NET Core Web Application dialog:

+
+
+
    +
  • +

    Select Web Application.

    +
  • +
  • +

    Select Change Authentication.

    +
  • +
+
+
+
+New Project dialog +
+
+
+

The Change Authentication dialog appears.

+
+
+
    +
  • +

    Select Individual User Accounts.

    +
  • +
  • +

    Select OK to return to the New ASP.NET Core Web Application, then select OK again.

    +
  • +
+
+
+
+New ASP.NET Core Web authentication dialog +
+
+
+

Visual Studio creates the solution.

+
+
+

Run the app locally

+
+
+
    +
  • +

    Choose Debug then Start Without Debugging to run the app locally.

    +
  • +
  • +

    Click the About and Contact links to verify the web application works.

    +
  • +
+
+
+
+Web application open in Microsoft Edge on localhost +
+
+
+
    +
  • +

    Select Register and register a new user. You can use a fictitious email address. When you submit, the page displays the following error:

    +
  • +
+
+
+

"Internal Server Error: A database operation failed while processing the request. SQL exception: Cannot open the database. Applying existing migrations for Application DB context may resolve this issue."

+
+
+
    +
  • +

    Select Apply Migrations and, once the page updates, refresh the page.

    +
  • +
+
+
+
+Internal Server Error: A database operation failed while processing the request. SQL exception: Cannot open the database. Applying existing migrations for Application DB context may resolve this issue. +
+
+
+

The app displays the email used to register the new user and a Log out link.

+
+
+
+Web application open in Microsoft Edge. The Register link is replaced by the text Hello email@domain.com! +
+
+
+

Deploy the app to Azure

+
+
+

Close the web page, return to Visual Studio, and select Stop Debugging from the Debug menu.

+
+
+

Right-click on the project in Solution Explorer and select Publish…​.

+
+
+
+Contextual menu open with Publish link highlighted +
+
+
+

In the Publish dialog, select Microsoft Azure App Service and click Publish.

+
+
+
+Publish dialog +
+
+
+
    +
  • +

    Name the app a unique name.

    +
  • +
  • +

    Select a subscription.

    +
  • +
  • +

    Select New…​ for the resource group and enter a name for the new resource group.

    +
  • +
  • +

    Select New…​ for the app service plan and select a location near you. You can keep the name that is generated by default.

    +
  • +
+
+
+
+App Service dialog +
+
+
+
    +
  • +

    Select the Services tab to create a new database.

    +
  • +
  • +

    Select the green + icon to create a new SQL Database

    +
  • +
+
+
+
+New SQL Database +
+
+
+
    +
  • +

    Select New…​ on the Configure SQL Database dialog to create a new database.

    +
  • +
+
+
+
+New SQL Database and server +
+
+
+

The Configure SQL Server dialog appears.

+
+
+
    +
  • +

    Enter an administrator user name and password, and then select OK. Don’t forget the user name and password you create in this step. You can keep the default Server Name.

    +
  • +
  • +

    Enter names for the database and connection string.

    +
  • +
+
+
+
+

== Note

+
+

"admin" is not allowed as the administrator user name.

+
+
+
+Configure SQL Server dialog +
+
+
+
    +
  • +

    Select OK.

    +
  • +
+
+
+

Visual Studio returns to the Create App Service dialog.

+
+
+
    +
  • +

    Select Create on the Create App Service dialog.

    +
  • +
+
+
+
+Configure SQL Database dialog +
+
+
+
    +
  • +

    Click the Settings link in the Publish dialog.

    +
  • +
+
+
+
+Publish dialog: Connection panel +
+
+
+

On the Settings page of the Publish dialog:

+
+
+
    +
  • +

    Expand Databases and check Use this connection string at runtime.

    +
  • +
  • +

    Expand Entity Framework Migrations and check Apply this migration on publish.

    +
  • +
  • +

    Select Save. Visual Studio returns to the Publish dialog.

    +
  • +
+
+
+
+Publish dialog: Settings panel +
+
+
+

Click Publish. Visual Studio will publish your app to Azure and launch the cloud app in your browser.

+
+
+

Test your app in Azure

+
+
+
    +
  • +

    Test the About and Contact links

    +
  • +
  • +

    Register a new user

    +
  • +
+
+
+
+Web application opened in Microsoft Edge on Azure App Service +
+
+
+

Update the app

+
+
+
    +
  • +

    Edit the Pages/About.cshtml Razor page and change its contents. For example, you can modify the paragraph to say "Hello ASP.NET Core!":

    +
    +
    +
    html<button class="action copy" data-bi-name="copy">Copy</button>
    +
    +
    +
  • +
+
+
+
+
@page
+@model AboutModel
+@{
+    ViewData["Title"] = "About";
+}
+<h2>@ViewData["Title"]</h2>
+<h3>@Model.Message</h3>
+
+    <p>Hello ASP.NET Core!</p>
+
+
+
+
    +
  • +

    Right-click on the project and select Publish…​ again.

    +
  • +
+
+
+
+Contextual menu open with Publish link highlighted +
+
+
+
    +
  • +

    After the app is published, verify the changes you made are available on Azure.

    +
  • +
+
+
+
+Verify task is complete +
+
+
+

Clean up

+
+
+

When you have finished testing the app, go to the Azure portal and delete the app.

+
+
+
    +
  • +

    Select Resource groups, then select the resource group you created.

    +
  • +
+
+
+
+Azure Portal: Resource Groups in sidebar menu +
+
+
+
    +
  • +

    In the Resource groups page, select Delete.

    +
  • +
+
+
+
+Azure Portal: Resource Groups page +
+
+
+
    +
  • +

    Enter the name of the resource group and select Delete. Your app and all other resources created in this tutorial are now deleted from Azure.

    +
  • +
+
+
+ +
+
+
+

How To section

+
+ +
+

Introduction

+
+

The aim of this document is to show how to get devon4net things done in a easy way.

+
+
+
+

How to

+ +
+
+

Start a new devonfw project

+
+

The .Net Core 3.1 template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2019.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
+

== Using devon4Net template

+ +
+
+

== Option 1

+
+
+
Open your favourite terminal (Win/Linux/iOS)
+Go to future project's path
+Type dotnet new --install Devon4Net.WebAPI.Template
+Type dotnet new Devon4NetAPI
+Go to project's path
+You are ready to start developing with devon4Net
+
+
+
+
+

== Option 2

+
+
+
Create a new dotnet` API` project from scratch
+Add the NuGet package reference to your project:
+Install-Package Devon4Net.Application.WebAPI.Configuration
+
+
+
+

Set up your project as follows in program.cs file:

+
+
+
+
        public static void Main(string[] args)
+        {
+            // Please use
+            // Devonfw.Configure<Startup>(args);
+            // Or :
+
+            WebHost.CreateDefaultBuilder(args)
+                .UseStartup<Startup>()
+                .InitializeDevonFw()
+                .Build()
+                .Run();
+        }
+
+
+
+

Set up your project as follows in startup.cs file:

+
+
+
+
    private IConfiguration Configuration { get; }
+
+
+   public Startup(IConfiguration configuration)
+    {
+        Configuration = configuration;
+    }
+
+    public void ConfigureServices(IServiceCollection services)
+    {
+
+        services.ConfigureDevonFw(Configuration);
+        SetupDatabase(services);
+
+        ...
+    }
+
+
+    private void SetupDatabase(IServiceCollection services)
+    {
+        // Default is the database connection name in appsettings.json file
+        services.SetupDatabase<TodoContext>(Configuration, "Default", DatabaseType.InMemory);
+    }
+
+    public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
+    {
+        app.ConfigureDevonFw();
+        ...
+    }
+
+
+
+

Add the devonfw configuration options in your appsettings.json file

+
+
+
+

devon4net configuration files

+
+

To start using devon4net in your .net core application add this configuration in your appsettings.json file:

+
+
+
+
 "devonfw": {
+    "UseDetailedErrorsKey": true,
+    "UseIIS": false,
+    "UseSwagger": true,
+    "Environment": "Development",
+    "KillSwitch": {
+      "killSwitchSettingsFile": "killswitch.appsettings.json"
+    },
+    "Kestrel": {
+      "UseHttps": true,
+      "HttpProtocol": "Http2", //Http1, Http2, Http1AndHttp2, none
+      "ApplicationPort": 8082,
+      "KeepAliveTimeout": 120, //in seconds
+      "MaxConcurrentConnections": 100,
+      "MaxConcurrentUpgradedConnections": 100,
+      "MaxRequestBodySize": 28.6, //In MB. The default maximum request body size is 30,000,000 bytes, which is approximately 28.6 MB
+      "Http2MaxStreamsPerConnection": 100,
+      "Http2InitialConnectionWindowSize": 131072, // From 65,535 and less than 2^31 (2,147,483,648)
+      "Http2InitialStreamWindowSize": 98304, // From 65,535 and less than 2^31 (2,147,483,648)
+      "AllowSynchronousIO": true,
+      "SslProtocol": "Tls12", //Tls, Tls11,Tls12, Tls13, Ssl2, Ssl3, none. For Https2 Tls12 is needed
+      "ServerCertificate": {
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost"
+      },
+      "ClientCertificate": {
+        "DisableClientCertificateCheck": true,
+        "RequireClientCertificate": false,
+        "CheckCertificateRevocation": true,
+        "ClientCertificates": {
+          "Whitelist": [
+            "3A87A49460E8FE0E2A198E63D408DC58435BC501"
+          ],
+          "DisableClientCertificateCheck": false
+        }
+      }
+    },
+    "IIS": {
+      "ForwardClientCertificate": true,
+      "AutomaticAuthentication": true,
+      "AuthenticationDisplayName" : ""
+    }
+  }
+
+
+
+

Also, for start using the devon4net components, you should add the next json options in your appsettings.json or appsettings.Development.json file:

+
+
+
+
{
+  "ExtraSettingsFiles": [
+    "Put a directory path (relative/absolute/linux-like) like /run/secrets/global where there are many settings/secret files to load",
+    "Put a specific file name (with/without path) like /app-configs/app/extra-settings.json"
+  ],
+  "ConnectionStrings": {
+    "Default": "Todos",
+    "Employee": "Employee",
+    "RabbitMqBackup": "Add your database connection string here for messaging backup",
+    "MediatRBackup": "Add your database connection string here for messaging backup"
+  },
+  "Logging": {
+    "LogLevel": {
+      "Default": "Debug",
+      "System": "Information",
+      "Microsoft": "Information"
+    }
+  },
+  "Swagger": {
+    "Version": "v1",
+    "Title": "devon4net API",
+    "Description": "devon4net API Contract",
+    "Terms": "https://www.devonfw.com/terms-of-use/",
+    "Contact": {
+      "Name": "devonfw",
+      "Email": "sample@mail.com",
+      "Url": "https://www.devonfw.com"
+    },
+    "License": {
+      "Name": "devonfw - Terms of Use",
+      "Url": "https://www.devonfw.com/terms-of-use/"
+    },
+    "Endpoint": {
+      "Name": "V1 Docs",
+      "Url": "/swagger/v1/swagger.json",
+      "UrlUi": "swagger",
+      "RouteTemplate": "swagger/v1/{documentName}/swagger.json"
+    }
+  },
+  "JWT": {
+    "Audience": "devon4Net",
+    "Issuer": "devon4Net",
+    "TokenExpirationTime": 60,
+    "ValidateIssuerSigningKey": true,
+    "ValidateLifetime": true,
+    "ClockSkew": 5,
+    "Security": {
+      "SecretKeyLengthAlgorithm": "",
+      "SecretKeyEncryptionAlgorithm": "",
+      "SecretKey": "",
+      "Certificate": "",
+      "CertificatePassword": "",
+      "CertificateEncryptionAlgorithm": ""
+    }
+  },
+  "Cors": []
+  //[
+  //  {
+  //    "CorsPolicy": "CorsPolicy1",
+  //    "Origins": "http://example.com,http://www.contoso.com",
+  //    "Headers": "accept,content-type,origin,x-custom-header",
+  //    "Methods": "GET,POST,HEAD",
+  //    "AllowCredentials": true
+  //  },
+  //  {
+  //    "CorsPolicy": "CorsPolicy2",
+  //    "Origins": "http://example.com,http://www.contoso.com",
+  //    "Headers": "accept,content-type,origin,x-custom-header",
+  //    "Methods": "GET,POST,HEAD",
+  //    "AllowCredentials": true
+  //  }
+  //]
+  ,
+  "CircuitBreaker": {
+    "CheckCertificate": false,
+    "Endpoints": [
+      {
+        "Name": "AnsibleTower",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      },
+      {
+        "Name": "CyberArk",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      },
+      {
+        "Name": "SmaxHcm",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      }
+    ]
+  },
+  "Headers": {
+    "AccessControlExposeHeader": "Authorization",
+    "StrictTransportSecurityHeader": "",
+    "XFrameOptionsHeader": "DENY",
+    "XssProtectionHeader": "1;mode=block",
+    "XContentTypeOptionsHeader": "nosniff",
+    "ContentSecurityPolicyHeader": "",
+    "PermittedCrossDomainPoliciesHeader": "",
+    "ReferrerPolicyHeader": ""
+  },
+  "Log": {
+    "UseAOPTrace": false,
+    "LogLevel": "Debug",
+    "SqliteDatabase": "logs/log.db",
+    "LogFile": "logs/{0}_devonfw.log",
+    "SeqLogServerHost": "http://127.0.0.1:5341",
+    "GrayLog": {
+      "GrayLogHost": "127.0.0.1",
+      "GrayLogPort": "12201",
+      "GrayLogProtocol": "UDP",
+      "UseSecureConnection": true,
+      "UseAsyncLogging": true,
+      "RetryCount": 5,
+      "RetryIntervalMs": 15,
+      "MaxUdpMessageSize": 8192
+    }
+  },
+  "RabbitMq": {
+    "EnableRabbitMq": false,
+    "Hosts": [
+      {
+        "Host": "127.0.0.1",
+        "Port": 5672,
+        "Ssl": false,
+        "SslServerName": "localhost",
+        "SslCertPath": "localhost.pfx",
+        "SslCertPassPhrase": "localhost",
+        "SslPolicyErrors": "RemoteCertificateNotAvailable" //None, RemoteCertificateNotAvailable, RemoteCertificateNameMismatch, RemoteCertificateChainErrors
+      }
+    ],
+
+    "VirtualHost": "/",
+    "UserName": "admin",
+    "Password": "password",
+    "Product": "devon4net",
+    "RequestedHeartbeat": 10, //Set to zero for no heartbeat
+    "PrefetchCount": 50,
+    "PublisherConfirms": false,
+    "PersistentMessages": true,
+    "Platform": "localhost",
+    "Timeout": 10,
+    "Backup": {
+      "UseLocalBackup": false,
+      "DatabaseName": "devon4netMessageBackup.db"
+    }
+  },
+  "MediatR": {
+    "EnableMediatR": false,
+    "Backup": {
+      "UseLocalBackup": false,
+      "DatabaseName": "devon4netMessageBackup.db"
+    }
+  },
+  "LiteDb": {
+    "DatabaseLocation": "devon4net.db"
+  },
+  "AnsibleTower": {
+    "EnableAnsible": false,
+    "Name": "AnsibleTower",
+    "CircuitBreakerName": "AnsibleTower",
+    "ApiUrlBase": "/api/v2/?format=json",
+    "Version": "1.0.5.29",
+    "Username": "",
+    "Password": ""
+  },
+  "CyberArk": {
+    "EnableCyberArk": false,
+    "Username": "",
+    "Password": "",
+    "CircuitBreakerName": "CyberArk"
+  },
+  "SmaxHcm": {
+    "EnableSmax": false,
+    "Username": "",
+    "Password": "",
+    "TenantId": "",
+    "CircuitBreakerName": "SmaxHcm",
+    "ProviderId": ""
+  },
+  "Kafka": {
+    "EnableKafka": true,
+    "Administration": [
+      {
+        "AdminId": "Admin1",
+        "Servers": "127.0.0.1:9092"
+      }
+    ],
+    "Producers": [
+      {
+        "ProducerId": "Producer1", // devon identifier
+        "Servers": "127.0.0.1:9092", // Initial list of brokers as a CSV list of broker host or host:port. The application may also use `rd_kafka_brokers_add()` to add brokers during runtime
+        "ClientId": "client1", //Client identifier
+        "Topic": "devonfw", // topics to deliver the message
+        "MessageMaxBytes": 1000000, //Maximum Kafka protocol request message size. Due to differing framing overhead between protocol versions the producer is unable to reliably enforce a strict max message limit at produce time and may exceed the maximum size by one message in protocol ProduceRequests, the broker will enforce the the topic's `max.message.bytes` limit (see Apache Kafka documentation)
+        "CompressionLevel": -1, // [0-9] for gzip; [0-12] for lz4; only 0 for snappy; -1 = codec-dependent default compression level
+        "CompressionType": "None", // None, Gzip, Snappy, Lz4, Zstd
+        "ReceiveMessageMaxBytes": 100000000,
+        "EnableSslCertificateVerification": false,
+        "CancellationDelayMaxMs": 100, // The maximum length of time (in milliseconds) before a cancellation request is acted on. Low values may result in measurably higher CPU usage
+        "Ack": "None", //Zero=Broker does not send any response/ack to client, One=The leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. All=Broker will block until message is committed by all in sync replicas (ISRs). If there are less than min.insync.replicas (broker configuration) in the ISR set the produce request will fail
+        "Debug": "", //A comma-separated list of debug contexts to enable. Detailed Producer debugging: broker,topic,msg. Consumer: consumer,cgrp,topic,fetch
+        "BrokerAddressTtl": 1000, //How long to cache the broker address resolving results (milliseconds)
+        "BatchNumMessages": 1000000, // Maximum size (in bytes) of all messages batched in one MessageSet, including protocol framing overhead. This limit is applied after the first message has been added to the batch, regardless of the first message's size, this is to ensure that messages that exceed batch.size are produced. The total MessageSet size is also limited by batch.num.messages and message.max.bytes
+        "EnableIdempotence": false, //When set to `true`, the producer will ensure that messages are successfully produced exactly once and in the original produce order. The following configuration properties are adjusted automatically (if not modified by the user) when idempotence is enabled: `max.in.flight.requests.per.connection=5` (must be less than or equal to 5), `retries=INT32_MAX` (must be greater than 0), `acks=all`, `queuing.strategy=fifo`. Producer instantation will fail if user-supplied configuration is incompatible
+        "MaxInFlight": 5,
+        "MessageSendMaxRetries": 5,
+        "BatchSize": 100000000 // Maximum size (in bytes) of all messages batched in one MessageSet, including protocol framing overhead. This limit is applied after the first message has been added to the batch, regardless of the first message's size, this is to ensure that messages that exceed batch.size are produced. The total MessageSet size is also limited by batch.num.messages and message.max.bytes
+      }
+    ],
+    "Consumers": [
+      {
+        "ConsumerId": "Consumer1", // devon identifier
+        "Servers": "127.0.0.1:9092",
+        "GroupId": "group1",
+        "Topics": "devonfw", // Comma separated topics to subscribe
+        "AutoCommit": true, //Automatically and periodically commit offsets in the background. Note: setting this to false does not prevent the consumer from fetching previously committed start offsets. To circumvent this behaviour set specific start offsets per partition in the call to assign()
+        "StatisticsIntervalMs": 0, //librdkafka statistics emit interval. The application also needs to register a stats callback using `rd_kafka_conf_set_stats_cb()`. The granularity is 1000ms. A value of 0 disables statistics
+        "SessionTimeoutMs": 10000, //Client group session and failure detection timeout. The consumer sends periodic heartbeats (heartbeat.interval.ms) to indicate its liveness to the broker. If no hearts are received by the broker for a group member within the session timeout, the broker will remove the consumer from the group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. Also see `max.poll.interval.ms`
+        "AutoOffsetReset": "Largest", //Action to take when there is no initial offset in offset store or the desired offset is out of range: 'smallest','earliest' - automatically reset the offset to the smallest offset, 'largest','latest' - automatically reset the offset to the largest offset, 'error' - trigger an error which is retrieved by consuming messages and checking 'message-&gt;err'
+        "EnablePartitionEof": true, //Verify CRC32 of consumed messages, ensuring no on-the-wire or on-disk corruption to the messages occurred. This check comes at slightly increased CPU usage
+        "IsolationLevel": "ReadCommitted", //Controls how to read messages written transactionally: `ReadCommitted` - only return transactional messages which have been committed. `ReadUncommitted` - return all messages, even transactional messages which have been aborted.
+        "EnableSslCertificateVerification": false,
+        "Debug": "" //A comma-separated list of debug contexts to enable. Detailed Producer debugging: broker,topic,msg. Consumer: consumer,cgrp,topic,fetch
+      }
+    ]
+  }
+}
+
+
+
+
+

devon4net Cobigen Guide

+ +
+
+

Overview

+
+

In this guide we will explain how to generate a new WebApi project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+

Getting things ready

+ +
+
+

== devonfw Distribution

+
+

The devonfw distributions can be obtained from here. You can find all releases in maven central.

+
+
+

It is not necessary to install nor configure anything. Just extracting the zip content is enough to have a fully functional devonfw. The only thing you have to do is run create-or-update-workspace.bat and then update-all-workspaces.bat to set up all the needed tools.

+
+
+
+

== devon4net Templates

+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
dotnet new -i Devon4Net.WebAPI.Template
+
+
+
+

and then:

+
+
+
+
dotnet new Devon4NetAPI
+
+
+
+
+

== OpenAPI File

+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+

Generating files

+
+

Cobigen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the Cobigen` CLI` tool.

+
+
+
+

== Generating files through Eclipse

+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+cobigen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+cobigen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+cobigen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+cobigen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+

== Generating files through Cobigen` CLI`

+
+

In order to generate the files using the Cobigen` CLI` it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    cobigen generate {yourOpenAPIFile}.yml
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+

Configuration

+ +
+
+

== Dependency Injection configuration

+
+

At this point it is needed to make some modifications in the code in order to configure correctly the server. To do so it is needed to locate the services and the repositories files that were created in Devon4Net.WebAPI.Implementation

+
+
+

Services location:

+
+
+
+cobigen +
+
+
+

Repositories location:

+
+
+
+cobigen +
+
+
+

Now, we are going to open the following file Devon4Net.WebAPI.Implementation\Configure\DevonConfiguration.cs. +In there we have to add the Dependency Injection for the services and the repositories that Cobigen has generated. The following image is an example of what is needed to add.

+
+
+
+cobigen +
+
+
+

Moreover it is needed to remove the last line in order to be able to run the application:

+
+
+
+
`throw new NotImplementedException(...);`
+
+
+
+
+

== Configure data base

+
+

Cobigen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+cobigen +
+
+
+
+

== Configure services

+
+

In order to finish the configuration of the services it is needed to go to each service file of the managements generated.

+
+
+

In there we will see some "NotImplementedExceptions", so it is needed to read carefully each comment inside of each exception in order to be able to use the service. It can be shown an example of the service with its NotImplementedExceptions comments:

+
+
+
+cobigen +
+
+
+
+

== Run the application

+
+

After doing all the steps defined above, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
dotnet run
+
+
+
+

This will deploy our application in our localhost with the port 8081, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+

Use HTTP2 protocol

+
+

You can specify the` HTTP` protocol to be used on your devon4net application modifying some node values at devonfw node in your appsettings configuration file.

+
+
+
+

HttpProtocol

+
+

The supported protocols are:

+
+
+

|== == == == == == == == == == == = +|Protocol|Description +|Http1| Http1 protocol +|Http2| Http2 Protocol +|Http1AndHttp2| Both supported +|== == == == == == == == == == == =

+
+
+
+

=` SSL`

+
+

To activate the HTTP2, the SslProtocol node must be set to Tls12 value.

+
+
+

The` SSL` protocol supported version values are:

+
+
+
    +
  • +

    Tls

    +
  • +
  • +

    Tls11

    +
  • +
  • +

    Tls12

    +
  • +
  • +

    Tls13

    +
  • +
  • +

    Ssl2

    +
  • +
  • +

    Ssl3

    +
  • +
+
+
+
+

Create a certificate for development purposes

+
+

In order to create a valid certificate for development purposes the Open` SSL` tools are needed.

+
+
+
+

Certificate authority (CA)

+
+

Run the next commands in a shell:

+
+
+
+
1. openssl req -x509 -nodes -new -sha256 -days 1024 -newkey rsa:2048 -keyout RootCA.key -out RootCA.pem -subj "/C=ES/ST=Valencia/L=Valencia/O=Certificates/CN=localhost.local"
+
+2. openssl x509 -outform pem -in RootCA.pem -out RootCA.crt
+
+
+
+

If you want to convert your certificate run the command:

+
+
+
+
openssl pkcs12 -export -out localhost.pfx -inkey RootCA.key -in RootCA.crt
+
+
+
+
+

Domain name certificate

+
+

Run the next commands in a shell:

+
+
+
+
1. openssl req -new -nodes -newkey rsa:2048 -keyout localhost.key -out localhost.csr -subj "/C=ES/ST=Valencia/L=Valencia/O=Certificates/CN=localhost.local"
+
+2. openssl x509 -req -sha256 -days 1024 -in localhost.csr -CA RootCA.pem -CAkey RootCA.key -CAcreateserial -extfile domains.ext -out localhost.crt
+
+
+
+

Where the domains.ext file should contain:

+
+
+
+
authorityKeyIdentifier=keyid,issuer
+basicConstraints=CA:FALSE
+keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment
+subjectAltName = @alt_names
+[alt_names]
+DNS.1 = localhost
+DNS.2 = localhost.local
+DNS.3 = 127.0.0.1
+DNS.4 = fake1.local
+DNS.5 = fake2.local
+
+
+
+

If you want to convert your certificate run the command:

+
+
+
+
openssl pkcs12 -export -out localhost.pfx -inkey localhost.key -in localhost.crt
+
+
+
+
+

Setup the database driver

+
+

Add the database connection on the SetupDatabase method at Startup.cs

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+

Change the JWT encryption algorithm

+
+

In the appsettings.json configuration file, you can use the next values on the SecretKeyLengthAlgorithm and SecretKeyEncryptionAlgorithm nodes at JWT configuration:

+
+
+

|== == == == == == == == == == == = +|Algorithm|Description +|Aes128Encryption|"http://www.w3.org/2001/04/xmlenc#aes128-cbc" +|Aes192Encryption|"http://www.w3.org/2001/04/xmlenc#aes192-cbc" +|Aes256Encryption|"http://www.w3.org/2001/04/xmlenc#aes256-cbc" +|DesEncryption|"http://www.w3.org/2001/04/xmlenc#des-cbc" +|Aes128KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes128" +|Aes192KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes192" +|Aes256KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes256" +|RsaV15KeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-1_5" +|Ripemd160Digest|"http://www.w3.org/2001/04/xmlenc#ripemd160" +|RsaOaepKeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-oaep" +|Aes128KW|"A128KW" +|Aes256KW|"A256KW" +|RsaPKCS1|"RSA1_5" +|RsaOAEP|"RSA-OAEP" +|ExclusiveC14n|"http://www.w3.org/2001/10/xml-exc-c14n#" +|ExclusiveC14nWithComments|"http://www.w3.org/2001/10/xml-exc-c14n#WithComments" +|EnvelopedSignature|"http://www.w3.org/2000/09/xmldsig#enveloped-signature" +|Sha256Digest|"http://www.w3.org/2001/04/xmlenc#sha256" +|Sha384Digest|"http://www.w3.org/2001/04/xmldsig-more#sha384" +|Sha512Digest|"http://www.w3.org/2001/04/xmlenc#sha512" +|Sha256|"SHA256" +|Sha384|"SHA384" +|Sha512|"SHA512" +|EcdsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" +|EcdsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384" +|EcdsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512" +|HmacSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha256" +|HmacSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha384" +|HmacSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha512" +|RsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" +|RsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" +|RsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" +|RsaSsaPssSha256Signature|"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1" +|RsaSsaPssSha384Signature|"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1" +|RsaSsaPssSha512Signature|"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1" +|EcdsaSha256|"ES256" +|EcdsaSha384|"ES384" +|EcdsaSha512|"ES512" +|HmacSha256|"HS256" +|HmacSha384|"HS384" +|HmacSha512|"HS512" +|None|"none" +|RsaSha256|"RS256" +|RsaSha384|"RS384" +|RsaSha512|"RS512" +|RsaSsaPssSha256|"PS256" +|RsaSsaPssSha384|"PS384" +|RsaSsaPssSha512|"PS512" +|Aes128CbcHmacSha256|"A128CBC-HS256" +|Aes192CbcHmacSha384|"A192CBC-HS384" +|Aes256CbcHmacSha512|"A256CBC-HS512" +|== == == == == == == == == == == =

+
+
+
+
+
+

Cobigen guide

+
+ +
+

devon4net Cobigen Guide

+ +
+
+

Overview

+
+

In this guide we will explain how to generate a new WebApi project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+

Getting things ready

+ +
+
+

devonfw Distribution

+
+

The devonfw distributions can be obtained from the TeamForge releases library and are packaged in zips files that include all the needed tools, software and configurations.

+
+
+

It is not necessary to install nor configure anything. Just extracting the zip content is enough to have a fully functional devonfw. The only thing you have to do is run create-or-update-workspace.bat and then update-all-workspaces.bat to set up all the needed tools.

+
+
+
+

devon4net Templates

+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
dotnet new -i `Devon4Net`.WebAPI.Template
+
+
+
+

and then:

+
+
+
+
dotnet new Devon4NetAPI
+
+
+
+
+

OpenAPI File

+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to `CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+

Generating files

+
+

Cobigen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the Cobigen CLI tool.

+
+
+
+

Generating files through Eclipse

+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+cobigen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+cobigen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+cobigen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+cobigen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+

Generating files through Cobigen CLI

+
+

In order to generate the files using the Cobigen CLI it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    cobigen generate {yourOpenAPIFile}.yml
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+

Configuration

+ +
+
+

Dependency Injection configuration

+
+

At this point it is needed to make some modifications in the code in order to configure correctly the server. To do so it is needed to locate the services and the repositories files that were created in Devon4Net.WebAPI.Implementation

+
+
+

Services location:

+
+
+
+cobigen +
+
+
+

Repositories location:

+
+
+
+cobigen +
+
+
+

Now, we are going to open the following file Devon4Net.WebAPI.Implementation\Configure\DevonConfiguration.cs. +In there we have to add the Dependency Injection for the services and the repositories that Cobigen has generated. The following image is an example of what is needed to add.

+
+
+
+cobigen +
+
+
+

Moreover it is needed to remove the last line in order to be able to run the application:

+
+
+
+
`throw new NotImplementedException(...);`
+
+
+
+
+

Configure data base

+
+

Cobigen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+cobigen +
+
+
+
+

Configure services

+
+

In order to finish the configuration of the services it is needed to go to each service file of the managements generated.

+
+
+

In there we will see some "NotImplementedExceptions", so it is needed to read carefully each comment inside of each exception in order to be able to use the service. It can be shown an example of the service with its NotImplementedExceptions comments:

+
+
+
+cobigen +
+
+
+
+

Run the application

+
+

After doing all the steps defined above, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
dotnet run
+
+
+
+

This will deploy our application in our localhost with the port 8081, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+
+
+

Coding conventions

+
+ +
+

==Code conventions

+
+
+

Introduction

+
+

This document covers .NET Coding Standards and is recommended to be read by team leaders/sw architects and developing teams operating in the Microsoft .NET environment.

+
+
+

“All the code in the system looks as if it was written by a single – very competent – individual” (K. Beck)

+
+
+
+

Capitalization Conventions

+
+
Terminology
+
+
Camel Case (camelCase)
+
+

Each word or abbreviation in the middle of the phrase begins with a capital letter, with no intervening spaces or punctuation.

+
+
+

The camel case convention, used only for parameter names, capitalizes the first character of each word except the first word, as shown in the following examples. As the example also shows, two-letter acronyms that begin a camel-cased identifier are both lowercase.

+
+
+

use camelCasing for parameter names.

+
+
+
+
Pascal Case (PascalCase)
+
+

The first letter of each concatenated word is capitalized. No other characters are used to separate the words, like hyphens or underscores.

+
+
+

The PascalCasing convention, used for all identifiers except parameter names, capitalizes the first character of each word (including acronyms over two letters in length).

+
+
+

use PascalCasing for all public member, type, and namespace names consisting of multiple words.

+
+
+
+
Underscore Prefix (_underScore)
+
+

For underscore ( _ ), the word after _ use camelCase terminology.

+
+
+
+
+
+

General Naming Conventions

+
+

choose easily readable identifier names.

+
+
+

favor readability over brevity.

+
+
+
+
◦ e.g.: `GetLength` is a better name than GetInt.
+◦ Aim for the “ubiquitous language” (E. Evans): A language distilled from the domain language, which helps the team clarifying domain concepts and communicating with domain experts.
+
+
+
+

prefer adding a suffix rather than a prefix to indicate a new version of an existing API.

+
+
+

use a numeric suffix to indicate a new version of an existing API, particularly if the existing name of the API is the only name that makes sense (i.e., if it is an industry standard) and if adding any meaningful suffix (or changing the name) is not an appropriate option.

+
+
+

do not use underscores, hyphens, or any other non-alphanumeric characters.

+
+
+

do not use Hungarian notation.

+
+
+

avoid using identifiers that conflict with keywords of widely used programming languages.

+
+
+

do not use abbreviations or contractions as part of identifier names.

+
+
+

do not use any acronyms that are not widely accepted, and even if they are, only when necessary.

+
+
+

do not use the "Ex" (or a similar) suffix for an identifier to distinguish it from an earlier version of the same API.

+
+
+

do not use C# reserved words as names.

+
+
+

do not use Hungarian notation. Hungarian notation is the practice of including a prefix in identifiers to encode some metadata about the parameter, such as the data type of the identifier.

+
+
+
+
◦ `e.g.: iNumberOfClients, sClientName`
+
+
+
+
+

Names of Assemblies and DLLs

+
+

An assembly is the unit of deployment and identity for managed code programs. Although assemblies can span one or more files, typically an assembly maps one-to-one with a` DLL`. Therefore, this section describes only` DLL` naming conventions, which then can be mapped to assembly naming conventions.

+
+
+

choose names for your assembly DLLs that suggest large chunks of functionality, such as System.Data.

+
+
+

Assembly and DLL names don’t have to correspond to namespace names, but it is reasonable to follow the namespace name when naming assemblies. A good rule of thumb is to name the DLL based on the common prefix of the assemblies contained in the assembly. For example, an assembly with two namespaces, MyCompany.MyTechnology.FirstFeature and MyCompany.MyTechnology.SecondFeature, could be called MyCompany.MyTechnology.dll.

+
+
+

consider naming DLLs according to the following pattern:
+<Company>.<Component>.dll +where <Component> contains one or more dot-separated clauses.

+
+
+

For example: +Litware.Controls.dll.

+
+
+
+

General coding style

+
+
    +
  • +

    Source files: One Namespace per file and one class per file.

    +
  • +
  • +

    Braces: On new line. Always use braces when optional.

    +
  • +
  • +

    Indention: Use tabs with size of 4.

    +
  • +
  • +

    Comments: Use // for simple comment or /// for summaries. Do not /* … */ and do not flower box.

    +
  • +
  • +

    Use Use built-in C# native data types vs .NET CTS types (string instead of String)

    +
  • +
  • +

    Avoid changing default type in Enums.

    +
  • +
  • +

    Use base or this only in constructors or within an override.

    +
  • +
  • +

    Always check for null before invoking events.

    +
  • +
  • +

    Avoid using Finalize. Use C# Destructors and do not create Finalize() method.

    +
  • +
  • +

    Suggestion: Use blank lines, to make it much more readable by dividing it into small, easy-to-digest sections:

    +
    +
    +
    ◦ Use a single blank line to separate logical groups of code, such as control structures.
    +◦ Use two blank lines to separate method definitions
    +
    +
    +
  • +
+
+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
CaseConvention

Source File

Pascal case. Match class name and file name

Namespace

Pascal case

Class

Pascal case

Interface

Pascal case

Generics

Single capital letter (T or K)

Methods

Pascal case (use a Verb or Verb+Object)

Public field

Pascal case

Private field

Camel case with underscore (_) prefix

Static field

Pascal case

Property

Pascal case. Try to use get and and set convention {get;set;}

Constant

Pascal case

Enum

Pascal case

Variable (inline)

Camel case

Param

Camel case

+
+
+

Use of Region guideline

+
+

Regions can be used to collapse code inside Visual Studio .NET. Regions are ideal candidates to hide boiler plate style code that adds little value to the reader on your code. Regions can then be expanded to provide progressive disclosure of the underlying details of the class or method.

+
+
+
    +
  • +

    Do Not regionalise entire type definitions that are of an important nature. Types such as enums (which tend to be fairly static in their nature) can be regionalised – their permissible values show up in Intellisense anyway.

    +
  • +
  • +

    Do Not regionalise an entire file. When another developer opens the file, all they will see is a single line in the code editor pane.

    +
  • +
  • +

    Do regionalise boiler plate type code.

    +
  • +
+
+
+
+

Use of Comment guideline

+
+

Code is the only completely reliable documentation: write “good code” first!

+
+
+
Avoid Unnecessary comments
+
+
    +
  • +

    Choosing good names for fields, methods, parameters, etc. “let the code speak” (K. Beck) by itself reducing the need for comments and documentation

    +
  • +
  • +

    Avoid “repeating the code” and commenting the obvious

    +
  • +
  • +

    Avoid commenting “tricky code”: rewrite it! If there’s no time at present to refactor a tricky section, mark it with a TODO and schedule time to take care of it as soon as possible.

    +
  • +
+
+
+
+
Effective comments
+
+
    +
  • +

    Use comments to summarize a section of code

    +
  • +
  • +

    Use comments to clarify sensitive pieces of code

    +
  • +
  • +

    Use comments to clarify the intent of the code

    +
  • +
  • +

    Bad written or out-of-date comments are more damaging than helpful:

    +
  • +
  • +

    Write clear and effective comments

    +
  • +
  • +

    Pay attention to pre-existing comments when modifying code or copying&pasting code

    +
  • +
+
+
+
+ +
+
+
+

Environment

+
+ +
+

Environment

+ +
+
+

Overview

+ +
+
+

Required software

+ + + +
+
+

Setting up the environment

+
+
    +
  1. +

    Download and install Visual Studio Code

    +
  2. +
  3. +

    Download and install .Net Core SDK

    +
  4. +
  5. +

    Intall the extension Omnisharp in Visual Studio Code

    +
  6. +
+
+
+
+

== Hello world

+
+
    +
  1. +

    Open a project:

    +
    +
      +
    • +

      Open Visual Studio Code.

      +
    • +
    • +

      Click on the Explorer icon on the left menu and then click Open Folder.

      +
    • +
    • +

      Select the folder you want your C# project to be in and click Select Folder. For our example, we’ll create a folder for our project named 'HelloWorld'.

      +
    • +
    +
    +
  2. +
  3. +

    Initialize a C# project:

    +
    +
      +
    • +

      Open the Integrated Terminal from Visual Studio Code by typing CTRL+(backtick). Alternatively, you can select View > Integrated Terminal from the main menu.

      +
    • +
    • +

      In the terminal window, type dotnet new console.

      +
    • +
    • +

      This creates a Program.cs file in your folder with a simple "Hello World" program already written, along with a C# project file named HelloWorld.csproj.

      +
    • +
    +
    +
  4. +
  5. +

    Resolve the build assets:

    +
    +
      +
    • +

      For .NET Core 2.0, this step is optional. The dotnet restore command executes automatically when a new project is created.

      +
    • +
    +
    +
  6. +
  7. +

    Run the "Hello World" program:

    +
    +
      +
    • +

      Type dotnet run.

      +
    • +
    +
    +
  8. +
+
+
+
+

Debug

+
+
    +
  1. +

    Open Program.cs by clicking on it. The first time you open a C# file in Visual Studio Code, OmniSharp will load in the editor.

    +
  2. +
  3. +

    Visual Studio Code will prompt you to add the missing assets to build and debug your app. Select Yes.

    +
  4. +
  5. +

    To open the Debug view, click on the Debugging icon on the left side menu.

    +
  6. +
  7. +

    Locate the green arrow at the top of the pane. Make sure the drop-down next to it has .NET Core Launch (console) selected.

    +
  8. +
  9. +

    Add a breakpoint to your project by clicking on the editor margin (the space on the left of the line numbers in the editor).

    +
  10. +
  11. +

    Select F5 or the green arrow to start debugging. The debugger stops execution of your program when it reaches the breakpoint you set in the previous step.

    +
    +
      +
    • +

      While debugging you can view your local variables in the top left pane or use the debug console.

      +
    • +
    +
    +
  12. +
  13. +

    Select the green arrow at the top to continue debugging, or select the red square at the top to stop.

    +
  14. +
+
+
+
+

==

+
+

For more information and troubleshooting tips on .NET Core debugging with OmniSharp in Visual Studio Code, see Instructions for setting up the .NET Core debugger. +== ==

+
+
+ +
+
+
+

Packages

+
+ +
+

Packages

+ +
+
+

Packages overview

+
+ + + + + +
+ + +devon4Net is composed by a number of packages that increases the functionality and boosts time development. Each package has it’s own configuration to make them work properly. In appsettings.json set up your environment. On appsettings.{environment}.json you can configure each component. +
+
+
+
+

The packages

+
+

You can get the devon4Net packages on nuget.org.

+
+
+
+

Devon4Net.Application.WebAPI.Configuration

+ +
+
+

== Description

+
+

The devon4Net web API configuration core.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package Devon4Net.Application.WebAPI.Configuration
    +
    +
    +
  • +
+
+
+
+

== Default configuration values

+
+
+
  "devonfw": {
+    "UseDetailedErrorsKey": true,
+    "UseIIS": false,
+    "UseSwagger": true,
+    "Environment": "Development",
+    "KillSwitch": {
+      "killSwitchSettingsFile": "killswitch.appsettings.json"
+    },
+    "Kestrel": {
+      "UseHttps": true,
+      "HttpProtocol": "Http2", //Http1, Http2, Http1AndHttp2, none
+      "ApplicationPort": 8082,
+      "KeepAliveTimeout": 120, //in seconds
+      "MaxConcurrentConnections": 100,
+      "MaxConcurrentUpgradedConnections": 100,
+      "MaxRequestBodySize": 28.6, //In MB. The default maximum request body size is 30,000,000 bytes, which is approximately 28.6 MB
+      "Http2MaxStreamsPerConnection": 100,
+      "Http2InitialConnectionWindowSize": 131072, // From 65,535 and less than 2^31 (2,147,483,648)
+      "Http2InitialStreamWindowSize": 98304, // From 65,535 and less than 2^31 (2,147,483,648)
+      "AllowSynchronousIO": true,
+      "SslProtocol": "Tls12", //Tls, Tls11,Tls12, Tls13, Ssl2, Ssl3, none. For Https2 Tls12 is needed
+      "ServerCertificate": {
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost"
+      },
+      "ClientCertificate": {
+        "DisableClientCertificateCheck": true,
+        "RequireClientCertificate": false,
+        "CheckCertificateRevocation": true,
+        "ClientCertificates": {
+          "Whitelist": [
+            "3A87A49460E8FE0E2A198E63D408DC58435BC501"
+          ],
+          "DisableClientCertificateCheck": false
+        }
+      }
+    },
+    "IIS": {
+      "ForwardClientCertificate": true,
+      "AutomaticAuthentication": true,
+      "AuthenticationDisplayName" : ""
+    }
+  }
+
+
+
+
+

Devon4Net.Infrastructure.CircuitBreaker

+ +
+
+

== Description

+
+

The Devon4Net.Infrastructure.CircuitBreaker component implements the retry pattern for HTTP/HTTPS calls.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package Devon4Net.Infrastructure.CircuitBreaker
    +
    +
    +
  • +
+
+
+
+

== Default configuration values

+
+
+
  "CircuitBreaker": {
+    "CheckCertificate": true,
+    "Endpoints": [
+      {
+        "Name": "SampleService",
+        "BaseAddress": "https://localhost:5001/",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": true,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      }
+    ]
+  }
+
+
+
+

|== == == == == == == == == == == = +|Property|Description +|CheckCertificate| True if HTTPS is required. This is useful when developing an API Gateway needs a secured HTTP, disabling this on development we can use communications with a valid server certificate +|Endpoints| Array with predefined sites to connect with +|Name| The name key to identify the destination URL +|Headers| Not ready yet +|WaitAndRetrySeconds| Array which determines the number of retries and the lapse period between each retry. The value is in milliseconds. +|Certificate| Ceritificate client to use to perform the HTTP call +|SslProtocol| The secure protocol to use on the call +|== == == == == == == == == == == =

+
+
+
+

== Protocols

+
+

|== == == == == == == == == == == = +|Protocol|Key|Description +|SSl3|48| Specifies the Secure Socket Layer (SSL) 3.0 security protocol. SSL 3.0 has been superseded by the Transport Layer Security (TLS) protocol and is provided for backward compatibility only. +|TLS|192|Specifies the Transport Layer Security (TLS) 1.0 security protocol. The TLS 1.0 protocol is defined in IETF RFC 2246. +|TLS11|768| Specifies the Transport Layer Security (TLS) 1.1 security protocol. The TLS 1.1 protocol is defined in IETF RFC 4346. On Windows systems, this value is supported starting with Windows 7. +|TLS12|3072| Specifies the Transport Layer Security (TLS) 1.2 security protocol. The TLS 1.2 protocol is defined in IETF RFC 5246. On Windows systems, this value is supported starting with Windows 7. +|TLS13|12288| Specifies the TLS 1.3 security protocol. The TLS protocol is defined in IETF RFC 8446.

+
+
+

|== == == == == == == == == == == =

+
+
+
+

== Usage

+
+

Add via Dependency Injection the circuit breaker instance. PE:

+
+
+
+
    public class FooService : Service<TodosContext>, ILoginService
+    {
+ public FooService(IUnitOfWork<AUTContext> uoW,  ICircuitBreakerHttpClient circuitBreakerClient,
+            ILogger<LoginService> logger) : base(uoW)
+        {
+        ...
+        }
+    }
+
+
+
+

At this point you can use the circuit breaker functionality in your code.

+
+
+

To perform a POST call you should use your circuit breaker instance as follows:

+
+
+
+
await circuitBreakerClient.PostAsync<YourOutputClass>(NameOftheService, EndPoint, InputData, MediaType.ApplicationJson).ConfigureAwait(false);
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Property|Description +|YourOutputClass| The type of the class that you are expecting to retrieve from the POST call +|NameOftheService| The key name of the endpoint provided in the appsettings.json file at Endpoints[] node +|EndPoint|Part of the url to use with the base address. PE: /validate +|InputData| Your instance of the class with values that you want to use in the POST call +|MediaType.ApplicationJson| The media type flag for the POST call +|== == == == == == == == == == == =

+
+
+
+

devon4Net.Domain.UnitOfWork

+ +
+
+

== Description

+
+

Unit of work implementation for devon4net solution. This unit of work provides the different methods to access the data layer with an atomic context. Sync and Async repository operations are provided. Customized Eager Loading method also provided for custom entity properties.

+
+
+ + + + + +
+ + +This component will move on next releases to Infrastructure instead of being part of Domain components +
+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.UnitOfWork
    +
    +
    +
  • +
  • +

    Adding the database connection information:

    +
  • +
+
+
+

Add the database connection on the SetupDatabase method at Startup.cs

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+

== Notes

+
+

Now you can use the unit of work via dependency injection on your classes:

+
+
+
+UOW `DI` Sample +
+
Figure 8. Use of Unit of work via dependency injection
+
+
+

As you can see in the image, you can use Unit Of Work class with your defined ModelContext classes.

+
+
+

Predicate expression builder

+
+
+
    +
  • +

    Use this expression builder to generate lambda expressions dynamically.

    +
    +
    +
    var predicate =  PredicateBuilder.True<T>();
    +
    +
    +
  • +
+
+
+

Where T is a class. At this moment, you can build your expression and apply it to obtain your results in a efficient way and not retrieving data each time you apply an expression.

+
+
+
    +
  • +

    Example from My Thai Star .Net Core implementation:

    +
  • +
+
+
+
+
public async Task<PaginationResult<Dish>> GetpagedDishListFromFilter(int currentpage, int pageSize, bool isFav, decimal maxPrice, int minLikes, string searchBy, IList<long> categoryIdList, long userId)
+{
+    var includeList = new List<string>{"DishCategory","DishCategory.IdCategoryNavigation", "DishIngredient","DishIngredient.IdIngredientNavigation","IdImageNavigation"};
+
+    //Here we create our predicate builder
+    var dishPredicate = PredicateBuilder.True<Dish>();
+
+
+    //Now we start applying the different criteria:
+    if (!string.IsNullOrEmpty(searchBy))
+    {
+        var criteria = searchBy.ToLower();
+        dishPredicate = dishPredicate.And(d => d.Name.ToLower().Contains(criteria) || d.Description.ToLower().Contains(criteria));
+    }
+
+    if (maxPrice > 0) dishPredicate = dishPredicate.And(d=>d.Price<=maxPrice);
+
+    if (categoryIdList.Any())
+    {
+        dishPredicate = dishPredicate.And(r => r.DishCategory.Any(a => categoryIdList.Contains(a.IdCategory)));
+    }
+
+    if (isFav && userId >= 0)
+    {
+        var favourites = await UoW.Repository<UserFavourite>().GetAllAsync(w=>w.IdUser ==  userId);
+        var dishes = favourites.Select(s => s.IdDish);
+        dishPredicate = dishPredicate.And(r=> dishes.Contains(r.Id));
+    }
+
+    // Now we can use the predicate to retrieve data from database with just one call
+    return await UoW.Repository<Dish>().GetAllIncludePagedAsync(currentpage, pageSize, includeList, dishPredicate);
+
+}
+
+
+
+
+

devon4Net.Infrastructure.Extensions

+ +
+
+

== Description

+
+

Miscellaneous extension library which contains : +- Predicate expression builder +- DateTime formatter +- HttpClient +- HttpContext (Middleware support)

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Infrastructure.Extensions
    +
    +
    +
  • +
+
+
+

HttpContext

+
+
+
    +
  • +

    TryAddHeader method is used on devon4Net.Infrastructure.Middleware component to add automatically response header options such authorization.

    +
  • +
+
+
+
+

devon4Net.Infrastructure.JWT

+ +
+
+

== Description

+
+
+
+

JSON Web Token (JWT) is an open standard (RFC 7519) that defines a compact and self-contained way for securely transmitting information between parties as a JSON object. This information can be verified and trusted because it is digitally signed. JWTs can be signed using a secret (with the` HMAC` algorithm) or a public/private key pair using RSA or ECDSA.

+
+
+
+— What is JSON Web Token?
+https://jwt.io/introduction/ +
+
+
+
    +
  • +

    devon4Net component to manage JWT standard to provide security to .Net API applications.

    +
  • +
+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.JWT
    +
    +
    +
  • +
+
+
+
+

== Default configuration values

+
+
+
"JWT": {
+    "Audience": "devon4Net",
+    "Issuer": "devon4Net",
+    "TokenExpirationTime": 60,
+    "ValidateIssuerSigningKey": true,
+    "ValidateLifetime": true,
+    "ClockSkew": 5,
+    "Security": {
+      "SecretKeyLengthAlgorithm": "",
+      "SecretKeyEncryptionAlgorithm": "",
+      "SecretKey": "",
+      "Certificate": "",
+      "CertificatePassword": "",
+      "CertificateEncryptionAlgorithm": ""
+    }
+  }
+
+
+
+
    +
  • +

    ClockSkew indicates the token expiration time in minutes

    +
  • +
  • +

    Certificate you can specify the name of your certificate (if it is on the same path) or the full path of the certificate. If the certificate does not exists an exception will be raised.

    +
  • +
  • +

    SecretKeyLengthAlgorithm, SecretKeyEncryptionAlgorithm and CertificateEncryptionAlgorithm supported algorithms are:

    +
  • +
+
+
+

|== == == == == == == == == == == = +|Algorithm|Description +|Aes128Encryption|"http://www.w3.org/2001/04/xmlenc#aes128-cbc" +|Aes192Encryption|"http://www.w3.org/2001/04/xmlenc#aes192-cbc" +|Aes256Encryption|"http://www.w3.org/2001/04/xmlenc#aes256-cbc" +|DesEncryption|"http://www.w3.org/2001/04/xmlenc#des-cbc" +|Aes128KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes128" +|Aes192KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes192" +|Aes256KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes256" +|RsaV15KeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-1_5" +|Ripemd160Digest|"http://www.w3.org/2001/04/xmlenc#ripemd160" +|RsaOaepKeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-oaep" +|Aes128KW|"A128KW" +|Aes256KW|"A256KW" +|RsaPKCS1|"RSA1_5" +|RsaOAEP|"RSA-OAEP" +|ExclusiveC14n|"http://www.w3.org/2001/10/xml-exc-c14n#" +|ExclusiveC14nWithComments|"http://www.w3.org/2001/10/xml-exc-c14n#WithComments" +|EnvelopedSignature|"http://www.w3.org/2000/09/xmldsig#enveloped-signature" +|Sha256Digest|"http://www.w3.org/2001/04/xmlenc#sha256" +|Sha384Digest|"http://www.w3.org/2001/04/xmldsig-more#sha384" +|Sha512Digest|"http://www.w3.org/2001/04/xmlenc#sha512" +|Sha256|"SHA256" +|Sha384|"SHA384" +|Sha512|"SHA512" +|EcdsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" +|EcdsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384" +|EcdsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512" +|HmacSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha256" +|HmacSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha384" +|HmacSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha512" +|RsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" +|RsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" +|RsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" +|RsaSsaPssSha256Signature|"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1" +|RsaSsaPssSha384Signature|"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1" +|RsaSsaPssSha512Signature|"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1" +|EcdsaSha256|"ES256" +|EcdsaSha384|"ES384" +|EcdsaSha512|"ES512" +|HmacSha256|"HS256" +|HmacSha384|"HS384" +|HmacSha512|"HS512" +|None|"none" +|RsaSha256|"RS256" +|RsaSha384|"RS384" +|RsaSha512|"RS512" +|RsaSsaPssSha256|"PS256" +|RsaSsaPssSha384|"PS384" +|RsaSsaPssSha512|"PS512" +|Aes128CbcHmacSha256|"A128CBC-HS256" +|Aes192CbcHmacSha384|"A192CBC-HS384" +|Aes256CbcHmacSha512|"A256CBC-HS512" +|== == == == == == == == == == == =

+
+
+ + + + + +
+ + +Please check Microsoft documentation to get the lastest updates on supported encryption algorithms +
+
+
+
    +
  • +

    Add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
services.AddBusinessCommonJwtPolicy();
+
+
+
+

On

+
+
+
+
Startup.cs
+
+
+
+

or on:

+
+
+
+
devon4Net.Application.Configuration.Startup/JwtApplicationConfiguration/ConfigureJwtPolicy method.
+
+
+
+
    +
  • +

    Inside the AddBusinessCommonJwtPolicy method you can add your JWT Policy like in My Thai Star application sample:

    +
  • +
+
+
+
+
 services.ConfigureJwtAddPolicy("MTSWaiterPolicy", "role", "waiter");
+
+
+
+
+

== Notes

+
+
    +
  • +

    The certificate will be used to generate the key to encrypt the json web token.

    +
  • +
+
+
+
+

devon4Net.Infrastructure.Middleware

+ +
+
+

== Description

+
+
    +
  • +

    devon4Net support for middleware classes.

    +
  • +
  • +

    In ASP.NET Core, middleware classes can handle an HTTP request or response. Middleware can either:

    +
    +
      +
    • +

      Handle an incoming HTTP request by generating an HTTP response.

      +
    • +
    • +

      Process an incoming HTTP request, modify it, and pass it on to another piece of middleware.

      +
    • +
    • +

      Process an outgoing HTTP response, modify it, and pass it on to either another piece of middleware, or the ASP.NET Core web server.

      +
    • +
    +
    +
  • +
  • +

    devon4Net supports the following automatic response headers:

    +
    +
      +
    • +

      AccessControlExposeHeader

      +
    • +
    • +

      StrictTransportSecurityHeader

      +
    • +
    • +

      XFrameOptionsHeader

      +
    • +
    • +

      XssProtectionHeader

      +
    • +
    • +

      XContentTypeOptionsHeader

      +
    • +
    • +

      ContentSecurityPolicyHeader

      +
    • +
    • +

      PermittedCrossDomainPoliciesHeader

      +
    • +
    • +

      ReferrerPolicyHeader:toc: macro

      +
    • +
    +
    +
  • +
+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Infrastructure.Middleware
    +
    +
    +
  • +
  • +

    You can configure your Middleware configuration on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"Middleware": {
+    "Headers": {
+      "AccessControlExposeHeader": "Authorization",
+      "StrictTransportSecurityHeader": "",
+      "XFrameOptionsHeader": "DENY",
+      "XssProtectionHeader": "1;mode=block",
+      "XContentTypeOptionsHeader": "nosniff",
+      "ContentSecurityPolicyHeader": "",
+      "PermittedCrossDomainPoliciesHeader": "",
+      "ReferrerPolicyHeader": ""
+    }
+}
+
+
+
+
    +
  • +

    On the above sample, the server application will add to response header the AccessControlExposeHeader, XFrameOptionsHeader, XssProtectionHeader and XContentTypeOptionsHeader headers.

    +
  • +
  • +

    If the header response type does not have a value, it will not be added to the response headers.

    +
  • +
+
+
+
+

devon4Net.Infrastructure.Swagger

+ +
+
+

== Description

+
+
    +
  • +

    devon4net Swagger abstraction to provide full externalized easy configuration.

    +
  • +
  • +

    Swagger offers the easiest to use tools to take full advantage of all the capabilities of the OpenAPI Specification (OAS).

    +
  • +
+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Swagger
    +
    +
    +
  • +
  • +

    You can configure your Swagger configuration on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"Swagger": {
+    "Version": "v1",
+    "Title": "devon4net API",
+    "Description": "devon4net API Contract",
+    "Terms": "https://www.devonfw.com/terms-of-use/",
+    "Contact": {
+      "Name": "devonfw",
+      "Email": "sample@mail.com",
+      "Url": "https://www.devonfw.com"
+    },
+    "License": {
+      "Name": "devonfw - Terms of Use",
+      "Url": "https://www.devonfw.com/terms-of-use/"
+    },
+    "Endpoint": {
+      "Name": "V1 Docs",
+      "Url": "/swagger/v1/swagger.json",
+      "UrlUi": "swagger",
+      "RouteTemplate": "swagger/v1/{documentName}/swagger.json"
+    }
+  }
+
+
+
+
    +
  • +

    Add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
services.ConfigureSwaggerService();
+
+
+
+

On

+
+
+
+
Startup.cs
+
+
+
+
    +
  • +

    Also add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
app.ConfigureSwaggerApplication();
+
+
+
+

On

+
+
+
+
Startup.cs/Configure(IApplicationBuilder app, IHostingEnvironment env)
+
+
+
+
    +
  • +

    Ensure your API actions and non-route parameters are decorated with explicit "Http" and "From" bindings.

    +
  • +
+
+
+
+

== Notes

+
+
    +
  • +

    To access to swagger UI launch your API project and type in your html browser the url http://localhost:yourPort/swagger.

    +
  • +
  • +

    In order to generate the documentation annotate your actions with summary, remarks and response tags:

    +
  • +
+
+
+
+
/// <summary>
+/// Method to make a reservation with potential guests. The method returns the reservation token with the format: {(CB_|GB_)}{now.Year}{now.Month:00}{now.Day:00}{_}{MD5({Host/Guest-email}{now.Year}{now.Month:00}{now.Day:00}{now.Hour:00}{now.Minute:00}{now.Second:00})}
+/// </summary>
+/// <param name="bookingDto"></param>
+/// <response code="201">Ok.</response>
+/// <response code="400">Bad request. Parser data error.</response>
+/// <response code="401">Unauthorized. Authentication fail.</response>
+/// <response code="403">Forbidden. Authorization error.</response>
+/// <response code="500">Internal Server Error. The search process ended with error.</response>
+[HttpPost]
+[HttpOptions]
+[Route("/mythaistar/services/rest/bookingmanagement/v1/booking")]
+[AllowAnonymous]
+[EnableCors("CorsPolicy")]
+public async Task<IActionResult> Booking([FromBody]BookingDto bookingDto)
+{
+    try
+    {
+
+    ...
+
+
+
+
    +
  • +

    Ensure that your project has the generateXMLdocumentationfile check active on build menu:

    +
  • +
+
+
+
+Generate documentation XML check +
+
Figure 9. Swagger documentation
+
+
+
    +
  • +

    Ensure that your XML files has the attribute copy always to true:

    +
  • +
+
+
+
+Generate documentation XML check +
+
Figure 10. Swagger documentation
+
+
+
+

devon4Net.Infrastructure.Test

+ +
+
+

== Description

+
+

devon4Net Base classes to create unit tests and integration tests with Moq and xUnit.

+
+
+
+

== Configuration

+
+
    +
  • +

    Load the template: +> dotnet new -i devon4Net.Test.Template +> dotnet new devon4NetTest

    +
  • +
+
+
+
+

== Notes

+
+
    +
  • +

    At this point you can find this classes:

    +
    +
      +
    • +

      BaseManagementTest

      +
    • +
    • +

      DatabaseManagementTest<T> (Where T is a devon4NetBaseContext class)

      +
    • +
    +
    +
  • +
  • +

    For unit testing, inherit a class from BaseManagementTest.

    +
  • +
  • +

    For integration tests, inherit a class from DatabaseManagementTest.

    +
  • +
  • +

    The recommended databases in integration test are in memory database or SQlite database.

    +
  • +
  • +

    Please check My thai Star test project.

    +
  • +
+
+
+
+

Deperecated packages

+ +
+
+

devon4Net.Domain.Context

+ +
+
+

== Description

+
+

devon4Net.Domain.Context contains the extended class devon4NetBaseContext in order to make easier the process of having a model context configured against different database engines. This configuration allows an easier testing configuration against local and in memory databases.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.Context
    +
    +
    +
  • +
  • +

    Add to appsettings.{environment}.json file your database connections:

    +
  • +
+
+
+
+
"ConnectionStrings":
+{
+"DefaultConnection":
+"Server=localhost;Database=MyThaiStar;User Id=sa;Password=sa;MultipleActiveResultSets=True;",
+
+"AuthConnection":
+"Server=(localdb)\\mssqllocaldb;Database=aspnet-DualAuthCore-5E206A0B-D4DA-4E71-92D3-87FD6B120C5E;Trusted_Connection=True;MultipleActiveResultSets=true",
+
+"SqliteConnection": "Data Source=c:\\tmp\\membership.db;"
+}
+
+
+
+
    +
  • +

    On Startup.cs :

    +
  • +
+
+
+
+
void ConfigureServices(IServiceCollection services)
+
+
+
+
    +
  • +

    Add your database connections defined on previous point:

    +
  • +
+
+
+
+
services.ConfigureDataBase(
+new Dictionary<string, string> {
+{ConfigurationConst.DefaultConnection, Configuration.GetConnectionString(ConfigurationConst.DefaultConnection) }});
+
+
+
+
    +
  • +

    On devon4Net.Application.Configuration.Startup/DataBaseConfiguration/ConfigureDataBase configure your connections.

    +
  • +
+
+
+
+

devon4Net.Infrastructure.ApplicationUser

+ +
+
+

== Description

+
+

devon4Net Application user classes to implement basic Microsoft’s basic authentication in order to be used on authentication methodologies such Jason Web Token (JWT).

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.ApplicationUser
    +
    +
    +
  • +
  • +

    Add the database connection string for user management on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"ConnectionStrings":
+{
+"AuthConnection":
+"Server=(localdb)\\mssqllocaldb;Database=aspnet-DualAuthCore-5E206A0B-D4DA-4E71-92D3-87FD6B120C5E;Trusted_Connection=True;MultipleActiveResultSets=true"
+}
+
+
+
+
    +
  • +

    Add the following line of code

    +
  • +
+
+
+
+
services.AddApplicationUserDependencyInjection();
+
+
+
+

On

+
+
+
+
Startup.cs/ConfigureServices(IServiceCollection services)
+
+
+
+

or on:

+
+
+
+
devon4Net.Application.Configuration.Startup/DependencyInjectionConfiguration/ConfigureDependencyInjectionService method.
+
+
+
+
    +
  • +

    Add the data seeder on Configure method on start.cs class:

    +
  • +
+
+
+
+
public void Configure(IApplicationBuilder app, IHostingEnvironment env, DataSeeder seeder)
+{
+    ...
+
+    app.UseAuthentication();
+    seeder.SeedAsync().Wait();
+
+    ...
+}
+
+
+
+
+

== Notes

+
+
    +
  • +

    You can use the following methods to set up the database configuration:

    +
  • +
+
+
+
+
public static void AddApplicationUserDbContextInMemoryService(this IServiceCollection services)
+
+public static void AddApplicationUserDbContextSQliteService(this IServiceCollection services, string connectionString)
+
+public static void AddApplicationUserDbContextSQlServerService(this IServiceCollection services, string connectionString)
+
+
+
+
    +
  • +

    The method AddApplicationUserDbContextInMemoryService uses the AuthContext connection string name to set up the database.

    +
  • +
  • +

    This component is used with the components devon4Net.Infrastructure.JWT and devon4Net.Infrastructure.JWT.MVC.

    +
  • +
+
+
+
+

devon4Net.Infrastructure.Communication

+ +
+
+

== Description

+
+

Basic client classes to invoke` GET`/POST methods asynchronously. This component has the minimal classes to send basic data. For more complex operations please use ASP4Net.Infrastructure.Extensions.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Communication
    +
    +
    +
  • +
  • +

    Create an instance of RestManagementService class.

    +
  • +
  • +

    Use next methods to use GET/POST basic options:

    +
  • +
+
+
+
+
public Task<string> CallGetMethod(string url);
+public Task<Stream> CallGetMethodAsStream(string url);
+public Task<string> CallPostMethod<T>(string url, T dataToSend);
+public Task<string> CallPutMethod<T>(string url, T dataToSend);
+
+
+
+
+

== Notes

+
+
    +
  • +

    Example:

    +
  • +
+
+
+
+
private async Task RestManagementServiceSample(EmailDto dataToSend)
+{
+    var url = Configuration["EmailServiceUrl"];
+    var restManagementService = new RestManagementService();
+    await restManagementService.CallPostMethod(url, dataToSend);
+}
+
+
+
+
+

devon4Net.Infrastructure.JWT.MVC

+ +
+
+

== Description

+
+
    +
  • +

    devon4Net Extended controller to interact with JWT features

    +
  • +
+
+
+
+

== Configuration

+
+
    +
  • +

    Extend your _ Microsoft.AspNetCore.Mvc.Controller_ class with devon4NetJWTController class:

    +
  • +
+
+
+
+
public class LoginController : devon4NetJWTController
+{
+    private readonly ILoginService _loginService;
+
+    public LoginController(ILoginService loginService,  SignInManager<ApplicationUser>  signInManager, UserManager<ApplicationUser> userManager, ILogger<LoginController> logger, IMapper mapper) : base(logger,mapper)
+    {
+        _loginService = loginService;
+    }
+
+    ....
+
+
+
+
+

== Notes

+
+
    +
  • +

    In order to generate a JWT, you should implement the JWT generation on user login. For example, in My Thai Star is created as follows:

    +
  • +
+
+
+
+
public async Task<IActionResult> Login([FromBody]LoginDto loginDto)
+{
+    try
+    {
+        if (loginDto ==  null) return Ok();
+        var logged = await _loginService.LoginAsync(loginDto.UserName, loginDto.Password);
+
+        if (logged)
+        {
+            var user = await _loginService.GetUserByUserNameAsync(loginDto.UserName);
+
+            var encodedJwt = new JwtClientToken().CreateClientToken(_loginService.GetUserClaimsAsync(user));
+
+            Response.Headers.Add("Access-Control-Expose-Headers", "Authorization");
+
+            Response.Headers.Add("Authorization", $"{JwtBearerDefaults.AuthenticationScheme} {encodedJwt}");
+
+            return Ok(encodedJwt);
+        }
+        else
+        {
+            Response.Headers.Clear();
+            return StatusCode((int)HttpStatusCode.Unauthorized, "Login Error");
+        }
+
+    }
+    catch (Exception ex)
+    {
+        return StatusCode((int)HttpStatusCode.InternalServerError, $"{ex.Message} : {ex.InnerException}");
+    }
+}
+
+
+
+
    +
  • +

    In My Thai Star the JWT will contain the user information such id, roles…​

    +
  • +
  • +

    Once you extend your controller with devon4NetJWTController you will have available these methods to simplify user management:

    +
  • +
+
+
+
+
    public interface Idevon4NetJWTController
+    {
+        // Gets the current user
+        JwtSecurityToken GetCurrentUser();
+
+        // Gets an specific assigned claim of current user
+        Claim GetUserClaim(string claimName, JwtSecurityToken jwtUser = null);
+
+        // Gets all the assigned claims of current user
+        IEnumerable<Claim> GetUserClaims(JwtSecurityToken jwtUser = null);
+    }
+
+
+
+
+

devon4Net.Infrastructure.MVC

+ +
+
+

== Description

+
+

Common classes to extend controller functionality on API. Also provides support for paged results in devon4Net applications and automapper injected class.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.MVC
    +
    +
    +
  • +
+
+
+
+

== Notes

+
+
    +
  • +

    The generic class ResultObjectDto<T> provides a typed result object with pagination.

    +
  • +
  • +

    The extended class provides the following methods:

    +
  • +
+
+
+
+
        ResultObjectDto<T> GenerateResultDto<T>(int? page, int? size, int? total);
+        ResultObjectDto<T> GenerateResultDto<T>(List<T> result, int? page = null, int? size = null);
+
+
+
+
    +
  • +

    GenerateResultDto provides typed ResultObjectDto object or a list of typed ResultObjectDto object. The aim of this methods is to provide a clean management for result objects and not repeating code through the different controller classes.

    +
  • +
  • +

    The following sample from My Thai Star shows how to use it:

    +
  • +
+
+
+
+
public async Task<IActionResult> Search([FromBody] FilterDtoSearchObject filterDto)
+{
+    if (filterDto ==  null) filterDto = new FilterDtoSearchObject();
+
+    try
+    {
+        var dishList = await _dishService.GetDishListFromFilter(false, filterDto.GetMaxPrice(), filterDto.GetMinLikes(), filterDto.GetSearchBy(),filterDto.GetCategories(), -1);
+
+
+        return new OkObjectResult(GenerateResultDto(dishList).ToJson());
+    }
+    catch (Exception ex)
+    {
+        return StatusCode((int)HttpStatusCode.InternalServerError, $"{ex.Message} : {ex.InnerException}");
+    }
+}
+
+
+
+
+

devon4Net.Infrastructure.AOP

+ +
+
+

== Description

+
+

Simple AOP Exception handler for .Net Controller classes integrated with Serilog.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.AOP
    +
    +
    +
  • +
+
+
+

Add this line of code on ConfigureServices method on Startup.cs

+
+
+
+
services.AddAopAttributeService();
+
+
+
+
+

== Notes

+
+

Now automatically your exposed API methods exposed on controller classes will be tracked on the methods:

+
+
+
    +
  • +

    OnActionExecuting

    +
  • +
  • +

    OnActionExecuted

    +
  • +
  • +

    OnResultExecuting

    +
  • +
  • +

    OnResultExecuted

    +
  • +
+
+
+

If an exception occurs, a message will be displayed on log with the stack trace.

+
+
+
+

devon4Net.Infrastructure.Cors

+ +
+
+

== Description

+
+

Enables CORS configuration for devon4Net application. Multiple domains can be configured from configuration. Mandatory to web clients (p.e. Angular) to prevent making AJAX requests to another domain.

+
+
+

Cross-Origin Resource Sharing (CORS) is a mechanism that uses additional HTTP headers to tell a browser to let a web application running at one origin (domain) have permission to access selected resources from a server at a different origin. A web application makes a cross-origin HTTP request when it requests a resource that has a different origin (domain, protocol, and port) than its own origin.

+
+
+

Please refer to this link to get more information about CORS and .Net core.

+
+
+
+

== Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Cors
    +
    +
    +
  • +
  • +

    You can configure your Cors configuration on appsettings.{environment}.json:

    +
    +
    +
    `CorsPolicy`: indicates the name of the policy. You can use this name to add security headers on your API exposed methods.
    +
    +
    +
    +
    +
    Origins: The allowed domains
    +
    +
    +
    +
    +
    Headers: The allowed headers such accept,content-type,origin,x-custom-header
    +
    +
    +
  • +
  • +

    If you specify the cors configuration as empty array, a default cors-policy will be used with all origins enabled:

    +
  • +
+
+
+
+
  "Cors": []
+
+
+
+
    +
  • +

    On the other hand, you can specify different Cors policies in your solution as follows:

    +
  • +
+
+
+
+
"Cors": []
+[
+  {
+    "CorsPolicy": "CorsPolicy1",
+    "Origins": "http:example.com,http:www.contoso.com",
+    "Headers": "accept,content-type,origin,x-custom-header",
+    "Methods": "GET,POST,HEAD",
+    "AllowCredentials": true
+  },
+  {
+    "CorsPolicy": "CorsPolicy2",
+    "Origins": "http:example.com,http:www.contoso.com",
+    "Headers": "accept,content-type,origin,x-custom-header",
+    "Methods": "GET,POST,HEAD",
+    "AllowCredentials": true
+  }
+]
+
+
+
+
+

== Notes

+
+
    +
  • +

    To use CORS in your API methods, use the next notation:

    +
  • +
+
+
+
+
[EnableCors("YourCorsPolicy")]
+public IActionResult Index() {
+    return View();
+}
+
+
+
+
    +
  • +

    if you want to disable the CORS check use the following annotation:

    +
  • +
+
+
+
+
[DisableCors]
+public IActionResult Index() {
+    return View();
+}
+
+
+
+ +
+
+
+

Templates

+
+ +
+

==Templates

+
+
+

Overview

+
+

The .Net Core and .Net Framework given templates allows to start coding an application with the following functionality ready to use:

+
+
+

Please refer to User guide in order to start developing.

+
+
+
+

Net Core 3.0

+
+

The .Net Core 3.0 template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2019.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
Using devon4Net template
+ +
+
+
Option 1
+
+
    +
  1. +

    Open your favourite terminal (Win/Linux/iOS)

    +
  2. +
  3. +

    Go to future project’s path

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template

    +
  6. +
  7. +

    Type dotnet new Devon4NetAPI

    +
  8. +
  9. +

    Go to project’s path

    +
  10. +
  11. +

    You are ready to start developing with devon4Net

    +
  12. +
+
+
+
+
Option 2
+
+
    +
  1. +

    Create a new dotnet API project from scratch

    +
  2. +
  3. +

    Add the NuGet package reference to your project

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template

    +
  6. +
+
+
+
+
+

Net Core 2.1.x

+
+

The .Net Core 2.1.x template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2017.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
Using devon4Net template
+
+
    +
  1. +

    Open your favourite terminal (Win/Linux/iOS)

    +
  2. +
  3. +

    Go to future project’s path

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template::1.0.8

    +
  6. +
  7. +

    Type dotnet new Devon4NetAPI

    +
  8. +
  9. +

    Go to project’s path

    +
  10. +
  11. +

    You are ready to start developing with devon4Net

    +
  12. +
+
+
+ + + + + +
+ + +For the latest updates on references packages, please get the sources from Github +
+
+
+
+
+ + +
+
+
+
+

Samples

+
+ +
+

Samples

+ +
+
+

My Thai Star Restaurant

+ +
+
+ +
+
+
+
+

Angular requirements

+
+ +
+
+
+

Angular client

+
+
    +
  1. +

    Install Node.js LTS version

    +
  2. +
  3. +

    Install Angular CLI from command line:

    +
    +
      +
    • +

      npm install -g @angular/cli

      +
    • +
    +
    +
  4. +
  5. +

    Install Yarn

    +
  6. +
  7. +

    Go to Angular client from command line

    +
  8. +
  9. +

    Execute : yarn install

    +
  10. +
  11. +

    Launch the app from command line: ng serve and check http://localhost:4200

    +
  12. +
  13. +

    You are ready

    +
  14. +
+
+
+
+

.Net Core server

+ +
+
+

== Basic architecture details

+
+

Following the devonfw conventions the .Net Core 2.0 My Thai Star backend is going to be developed dividing the application in Components and using a n-layer architecture.

+
+
+
+project modules +
+
+
+
+

== Components

+
+

The application is going to be divided in different components to encapsulate the different domains of the application functionalities.

+
+
+
+mtsn components +
+
+
+

As main components we will find:

+
+
+
    +
  • +

    _BookingService: Manages the bookings part of the application. With this component the users (anonymous/logged in) can create new bookings or cancel an existing booking. The users with waiter role can see all scheduled bookings.

    +
  • +
+
+
+

-OrderService: This component handles the process to order dishes (related to bookings). A user (as a host or as a guest) can create orders (that contain dishes) or cancel an existing one. The users with waiter role can see all ordered orders.

+
+
+
    +
  • +

    DishService: This component groups the logic related to the menu (dishes) view. Its main feature is to provide the client with the data of the available dishes but also can be used by other components (Ordermanagement) as a data provider in some processes.

    +
  • +
  • +

    UserService: Takes care of the User Profile management, allowing to create and update the data profiles.

    +
  • +
+
+
+

As common components (that don’t exactly represent an application’s area but provide functionalities that can be used by the main components):

+
+
+
    +
  • +

    Mailservice: with this service we will provide the functionality for sending email notifications. This is a shared service between different app components such as bookingmanagement or ordercomponent.

    +
  • +
+
+
+

Other components:

+
+
+
    +
  • +

    Security (will manage the access to the private part of the application using a jwt implementation).

    +
  • +
  • +

    Twitter integration: planned as a Microservice will provide the twitter integration needed for some specific functionalities of the application.

    +
  • +
+
+
+
+

Layers

+ +
+
+

== Introduction

+
+

The .Net Core backend for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    devon4NET as the .Net Core framework

    +
  • +
  • +

    VSCode as the Development environment

    +
  • +
  • +

    TOBAGO as code generation tool

    +
  • +
+
+
+
+

== Application layer

+
+

This layer will expose the REST api to exchange information with the client applications.

+
+
+

The application will expose the services on port 8081 and it can be launched as a self host console application (microservice approach) and as a Web Api application hosted on IIS/IIS Express.

+
+
+
+

== Business layer

+
+

This layer will define the controllers which will be used on the application layer to expose the different services. Also, will define the swagger contract making use of summary comments and framework attributes.

+
+
+

This layer also includes the object response classes in order to interact with external clients.

+
+
+
+

== Service layer

+
+

The layer in charge of hosting the business logic of the application. Also orchestrates the object conversion between object response and entity objects defined in Data layer.

+
+
+
+

== Data layer

+
+

The layer to communicate with the data base.

+
+
+

Data layer makes use of Entity Framework. +The Database context is defined on DataAccessLayer assembly (ModelContext).

+
+
+

This layer makes use of the Repository pattern and Unit of work in order to encapsulate the complexity. Making use of this combined patterns we ensure an organized and easy work model.

+
+
+

As in the previous layers, the data access layer will have both interface and implementation tiers. However, in this case, the implementation will be slightly different due to the use of generics.

+
+
+
+

== Cross-Cutting concerns

+
+

the layer to make use of transversal components such JWT and mailing.

+
+
+
+

Jwt basics

+
+
    +
  • +

    A user will provide a username / password combination to our auth server.

    +
  • +
  • +

    The auth server will try to identify the user and, if the credentials match, will issue a token.

    +
  • +
  • +

    The user will send the token as the Authorization header to access resources on server protected by JWT Authentication.

    +
  • +
+
+
+
+jwt schema +
+
+
+
+

Jwt implementation details

+
+

The Json Web Token pattern will be implemented based on the jwt on .net core framework that is provided by default in the devon4Net projects.

+
+
+
+

Authentication

+
+

Based on Microsoft approach, we will implement a class to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

On devon4Net.Infrastructure.JWT assembly is defined a subset of Microsoft’s authorization schema Database. It is started up the first time the application launches.

+
+
+

You can read more about _Authorization on:

+
+ + +
+
+

Dependency injection

+
+

As it is explained in the Microsoft documentation we are going to implement the dependency injection pattern basing our solution on .Net Core.

+
+
+
+dependency injection +
+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different tiers: interface and implementation. The interface tier will store the interface with the methods definition and inside the implementation we will store the class that implements the interface.

    +
  • +
+
+
+
+

Layer communication method

+
+

The connection between layers, to access to the functionalities of each one, will be solved using the dependency injection.

+
+
+
+layer impl +
+
+
+

Connection BookingService - Logic

+
+
+
+
 public class BookingService : EntityService<Booking>, IBookingService
+    {
+        private readonly IBookingRepository _bookingRepository;
+        private readonly IRepository<Order> _orderRepository;
+        private readonly IRepository<InvitedGuest> _invitedGuestRepository;
+        private readonly IOrderLineRepository _orderLineRepository;
+        private readonly IUnitOfWork _unitOfWork;
+
+        public BookingService(IUnitOfWork unitOfWork,
+            IBookingRepository repository,
+            IRepository<Order> orderRepository,
+            IRepository<InvitedGuest> invitedGuestRepository,
+            IOrderLineRepository orderLineRepository) : base(unitOfWork, repository)
+        {
+            _unitOfWork = unitOfWork;
+            _bookingRepository = repository;
+            _orderRepository = orderRepository;
+            _invitedGuestRepository = invitedGuestRepository;
+            _orderLineRepository = orderLineRepository;
+        }
+}
+
+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

Following the [naming conventions] proposed for devon4Net applications we will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+
+

Api Exposed

+
+

The devon4Net.Business.Controller assembly in the business layer of a component will store the definition of the service by a interface. In this definition of the service we will set-up the endpoints of the service, the type of data expected and returned, the HTTP method for each endpoint of the service and other configurations if needed.

+
+
+
+
        /// <summary>
+        /// Method to make a reservation with potential guests. The method returns the reservation token with the format: {(CB_|GB_)}{now.Year}{now.Month:00}{now.Day:00}{_}{MD5({Host/Guest-email}{now.Year}{now.Month:00}{now.Day:00}{now.Hour:00}{now.Minute:00}{now.Second:00})}
+        /// </summary>
+
+        /// <param name="bookingView"></param>
+        /// <response code="201">Ok.</response>
+        /// <response code="400">Bad request. Parser data error.</response>
+        /// <response code="401">Unauthorized. Authentication fail.</response>
+        /// <response code="403">Forbidden. Authorization error.</response>
+        /// <response code="500">Internal Server Error. The search process ended with error.</response>
+        [HttpPost]
+        [HttpOptions]
+        [Route("/mythaistar/services/rest/bookingmanagement/v1/booking")]
+        [AllowAnonymous]
+        [EnableCors("CorsPolicy")]
+        public IActionResult BookingBooking([FromBody]BookingView bookingView)
+        {
+...
+
+
+
+

Using the summary annotations and attributes will tell to swagger the contract via the XML doc generated on compiling time. This doc will be stored in XmlDocumentation folder.

+
+
+

The Api methods will be exposed on the application layer.

+
+
+
+

Google Mail API Consumer

+ +
+

|== == == == == == == == == == == = +|Application| MyThaiStarEmailService.exe +|Config file| MyThaiStarEmailService.exe.Config +|Default port|8080 +|== == == == == == == == == == == =

+
+
+
+

Overview

+
+
    +
  1. +

    Execute MyThaiStarEmailService.exe.

    +
  2. +
  3. +

    The first time google will ask you for credentials +(just one time) in your default browser:

    +
    + +
    +
  4. +
  5. +

    Visit the url: http://localhost:8080/swagger

    +
  6. +
  7. +

    Your server is ready!

    +
  8. +
+
+
+
+GMail Service +
+
Figure 11. GMail Server Swagger contract page
+
+
+
+

JSON Example

+
+

This is the JSON example to test with swagger client. Please read the swagger documentation.

+
+
+
+
{
+   "EmailFrom":"mythaistarrestaurant@gmail.com",
+   "EmailAndTokenTo":{
+      "MD5Token1":" Email_Here!@gmail.com",
+      "MD5Token2":" Email_Here!@gmail.com"
+   },
+   "EmailType":0,
+   "DetailMenu":[
+      "Thai Spicy Basil Fried Rice x2",
+      "Thai green chicken curry x2"
+   ],
+   "BookingDate":"2017-05-31T12:53:39.7864723+02:00",
+   "Assistants":2,
+   "BookingToken":"MD5Booking",
+   "Price":20.0,
+   "ButtonActionList":{
+      "http://accept.url":"Accept",
+      "http://cancel.url":"Cancel"
+   },
+   "Host":{
+      " Email_Here!@gmail.com":"José Manuel"
+   }
+}
+
+
+
+
+

Configure the service port

+
+

If you want to change the default port, please edit the config file and +change the next entry in appSettings node:

+
+
+
+
<appSettings>
+   <add key="LocalListenPort" value="8080" />
+</appSettings>
+
+
+
+
+ + + +
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/packages.html b/docs/devonfw.github.io/1.0/devon4net.wiki/packages.html new file mode 100644 index 00000000..86003bd5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/packages.html @@ -0,0 +1,2050 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Packages

+
+ +
+
+
+

Packages overview

+
+
+ + + + + +
+ + +devon4Net is composed by a number of packages that increases the functionality and boosts time development. Each package has it’s own configuration to make them work properly. In appsettings.json set up your environment. On appsettings.{environment}.json you can configure each component. +
+
+
+
+
+

The packages

+
+
+

You can get the devon4Net packages on nuget.org.

+
+
+
+
+

Devon4Net.Application.WebAPI.Configuration

+
+ +
+
+
+

== Description

+
+
+

The devon4Net web API configuration core.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package Devon4Net.Application.WebAPI.Configuration
    +
    +
    +
  • +
+
+
+
+
+

== Default configuration values

+
+
+
+
  "devonfw": {
+    "UseDetailedErrorsKey": true,
+    "UseIIS": false,
+    "UseSwagger": true,
+    "Environment": "Development",
+    "KillSwitch": {
+      "killSwitchSettingsFile": "killswitch.appsettings.json"
+    },
+    "Kestrel": {
+      "UseHttps": true,
+      "HttpProtocol": "Http2", //Http1, Http2, Http1AndHttp2, none
+      "ApplicationPort": 8082,
+      "KeepAliveTimeout": 120, //in seconds
+      "MaxConcurrentConnections": 100,
+      "MaxConcurrentUpgradedConnections": 100,
+      "MaxRequestBodySize": 28.6, //In MB. The default maximum request body size is 30,000,000 bytes, which is approximately 28.6 MB
+      "Http2MaxStreamsPerConnection": 100,
+      "Http2InitialConnectionWindowSize": 131072, // From 65,535 and less than 2^31 (2,147,483,648)
+      "Http2InitialStreamWindowSize": 98304, // From 65,535 and less than 2^31 (2,147,483,648)
+      "AllowSynchronousIO": true,
+      "SslProtocol": "Tls12", //Tls, Tls11,Tls12, Tls13, Ssl2, Ssl3, none. For Https2 Tls12 is needed
+      "ServerCertificate": {
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost"
+      },
+      "ClientCertificate": {
+        "DisableClientCertificateCheck": true,
+        "RequireClientCertificate": false,
+        "CheckCertificateRevocation": true,
+        "ClientCertificates": {
+          "Whitelist": [
+            "3A87A49460E8FE0E2A198E63D408DC58435BC501"
+          ],
+          "DisableClientCertificateCheck": false
+        }
+      }
+    },
+    "IIS": {
+      "ForwardClientCertificate": true,
+      "AutomaticAuthentication": true,
+      "AuthenticationDisplayName" : ""
+    }
+  }
+
+
+
+
+
+

Devon4Net.Infrastructure.CircuitBreaker

+
+ +
+
+
+

== Description

+
+
+

The Devon4Net.Infrastructure.CircuitBreaker component implements the retry pattern for HTTP/HTTPS calls.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package Devon4Net.Infrastructure.CircuitBreaker
    +
    +
    +
  • +
+
+
+
+
+

== Default configuration values

+
+
+
+
  "CircuitBreaker": {
+    "CheckCertificate": true,
+    "Endpoints": [
+      {
+        "Name": "SampleService",
+        "BaseAddress": "https://localhost:5001/",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": true,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      }
+    ]
+  }
+
+
+
+

|== == == == == == == == == == == = +|Property|Description +|CheckCertificate| True if HTTPS is required. This is useful when developing an API Gateway needs a secured HTTP, disabling this on development we can use communications with a valid server certificate +|Endpoints| Array with predefined sites to connect with +|Name| The name key to identify the destination URL +|Headers| Not ready yet +|WaitAndRetrySeconds| Array which determines the number of retries and the lapse period between each retry. The value is in milliseconds. +|Certificate| Ceritificate client to use to perform the HTTP call +|SslProtocol| The secure protocol to use on the call +|== == == == == == == == == == == =

+
+
+
+
+

== Protocols

+
+
+

|== == == == == == == == == == == = +|Protocol|Key|Description +|SSl3|48| Specifies the Secure Socket Layer (SSL) 3.0 security protocol. SSL 3.0 has been superseded by the Transport Layer Security (TLS) protocol and is provided for backward compatibility only. +|TLS|192|Specifies the Transport Layer Security (TLS) 1.0 security protocol. The TLS 1.0 protocol is defined in IETF RFC 2246. +|TLS11|768| Specifies the Transport Layer Security (TLS) 1.1 security protocol. The TLS 1.1 protocol is defined in IETF RFC 4346. On Windows systems, this value is supported starting with Windows 7. +|TLS12|3072| Specifies the Transport Layer Security (TLS) 1.2 security protocol. The TLS 1.2 protocol is defined in IETF RFC 5246. On Windows systems, this value is supported starting with Windows 7. +|TLS13|12288| Specifies the TLS 1.3 security protocol. The TLS protocol is defined in IETF RFC 8446.

+
+
+

|== == == == == == == == == == == =

+
+
+
+
+

== Usage

+
+
+

Add via Dependency Injection the circuit breaker instance. PE:

+
+
+
+
    public class FooService : Service<TodosContext>, ILoginService
+    {
+ public FooService(IUnitOfWork<AUTContext> uoW,  ICircuitBreakerHttpClient circuitBreakerClient,
+            ILogger<LoginService> logger) : base(uoW)
+        {
+        ...
+        }
+    }
+
+
+
+

At this point you can use the circuit breaker functionality in your code.

+
+
+

To perform a POST call you should use your circuit breaker instance as follows:

+
+
+
+
await circuitBreakerClient.PostAsync<YourOutputClass>(NameOftheService, EndPoint, InputData, MediaType.ApplicationJson).ConfigureAwait(false);
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Property|Description +|YourOutputClass| The type of the class that you are expecting to retrieve from the POST call +|NameOftheService| The key name of the endpoint provided in the appsettings.json file at Endpoints[] node +|EndPoint|Part of the url to use with the base address. PE: /validate +|InputData| Your instance of the class with values that you want to use in the POST call +|MediaType.ApplicationJson| The media type flag for the POST call +|== == == == == == == == == == == =

+
+
+
+
+

devon4Net.Domain.UnitOfWork

+
+ +
+
+
+

== Description

+
+
+

Unit of work implementation for devon4net solution. This unit of work provides the different methods to access the data layer with an atomic context. Sync and Async repository operations are provided. Customized Eager Loading method also provided for custom entity properties.

+
+
+ + + + + +
+ + +This component will move on next releases to Infrastructure instead of being part of Domain components +
+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.UnitOfWork
    +
    +
    +
  • +
  • +

    Adding the database connection information:

    +
  • +
+
+
+

Add the database connection on the SetupDatabase method at Startup.cs

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+
+

== Notes

+
+
+

Now you can use the unit of work via dependency injection on your classes:

+
+
+
+UOW `DI` Sample +
+
Figure 1. Use of Unit of work via dependency injection
+
+
+

As you can see in the image, you can use Unit Of Work class with your defined ModelContext classes.

+
+
+

Predicate expression builder

+
+
+
    +
  • +

    Use this expression builder to generate lambda expressions dynamically.

    +
    +
    +
    var predicate =  PredicateBuilder.True<T>();
    +
    +
    +
  • +
+
+
+

Where T is a class. At this moment, you can build your expression and apply it to obtain your results in a efficient way and not retrieving data each time you apply an expression.

+
+
+
    +
  • +

    Example from My Thai Star .Net Core implementation:

    +
  • +
+
+
+
+
public async Task<PaginationResult<Dish>> GetpagedDishListFromFilter(int currentpage, int pageSize, bool isFav, decimal maxPrice, int minLikes, string searchBy, IList<long> categoryIdList, long userId)
+{
+    var includeList = new List<string>{"DishCategory","DishCategory.IdCategoryNavigation", "DishIngredient","DishIngredient.IdIngredientNavigation","IdImageNavigation"};
+
+    //Here we create our predicate builder
+    var dishPredicate = PredicateBuilder.True<Dish>();
+
+
+    //Now we start applying the different criteria:
+    if (!string.IsNullOrEmpty(searchBy))
+    {
+        var criteria = searchBy.ToLower();
+        dishPredicate = dishPredicate.And(d => d.Name.ToLower().Contains(criteria) || d.Description.ToLower().Contains(criteria));
+    }
+
+    if (maxPrice > 0) dishPredicate = dishPredicate.And(d=>d.Price<=maxPrice);
+
+    if (categoryIdList.Any())
+    {
+        dishPredicate = dishPredicate.And(r => r.DishCategory.Any(a => categoryIdList.Contains(a.IdCategory)));
+    }
+
+    if (isFav && userId >= 0)
+    {
+        var favourites = await UoW.Repository<UserFavourite>().GetAllAsync(w=>w.IdUser ==  userId);
+        var dishes = favourites.Select(s => s.IdDish);
+        dishPredicate = dishPredicate.And(r=> dishes.Contains(r.Id));
+    }
+
+    // Now we can use the predicate to retrieve data from database with just one call
+    return await UoW.Repository<Dish>().GetAllIncludePagedAsync(currentpage, pageSize, includeList, dishPredicate);
+
+}
+
+
+
+
+
+

devon4Net.Infrastructure.Extensions

+
+ +
+
+
+

== Description

+
+
+

Miscellaneous extension library which contains : +- Predicate expression builder +- DateTime formatter +- HttpClient +- HttpContext (Middleware support)

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Infrastructure.Extensions
    +
    +
    +
  • +
+
+
+

HttpContext

+
+
+
    +
  • +

    TryAddHeader method is used on devon4Net.Infrastructure.Middleware component to add automatically response header options such authorization.

    +
  • +
+
+
+
+
+

devon4Net.Infrastructure.JWT

+
+ +
+
+
+

== Description

+
+
+
+
+

JSON Web Token (JWT) is an open standard (RFC 7519) that defines a compact and self-contained way for securely transmitting information between parties as a JSON object. This information can be verified and trusted because it is digitally signed. JWTs can be signed using a secret (with the` HMAC` algorithm) or a public/private key pair using RSA or ECDSA.

+
+
+
+— What is JSON Web Token?
+https://jwt.io/introduction/ +
+
+
+
    +
  • +

    devon4Net component to manage JWT standard to provide security to .Net API applications.

    +
  • +
+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.JWT
    +
    +
    +
  • +
+
+
+
+
+

== Default configuration values

+
+
+
+
"JWT": {
+    "Audience": "devon4Net",
+    "Issuer": "devon4Net",
+    "TokenExpirationTime": 60,
+    "ValidateIssuerSigningKey": true,
+    "ValidateLifetime": true,
+    "ClockSkew": 5,
+    "Security": {
+      "SecretKeyLengthAlgorithm": "",
+      "SecretKeyEncryptionAlgorithm": "",
+      "SecretKey": "",
+      "Certificate": "",
+      "CertificatePassword": "",
+      "CertificateEncryptionAlgorithm": ""
+    }
+  }
+
+
+
+
    +
  • +

    ClockSkew indicates the token expiration time in minutes

    +
  • +
  • +

    Certificate you can specify the name of your certificate (if it is on the same path) or the full path of the certificate. If the certificate does not exists an exception will be raised.

    +
  • +
  • +

    SecretKeyLengthAlgorithm, SecretKeyEncryptionAlgorithm and CertificateEncryptionAlgorithm supported algorithms are:

    +
  • +
+
+
+

|== == == == == == == == == == == = +|Algorithm|Description +|Aes128Encryption|"http://www.w3.org/2001/04/xmlenc#aes128-cbc" +|Aes192Encryption|"http://www.w3.org/2001/04/xmlenc#aes192-cbc" +|Aes256Encryption|"http://www.w3.org/2001/04/xmlenc#aes256-cbc" +|DesEncryption|"http://www.w3.org/2001/04/xmlenc#des-cbc" +|Aes128KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes128" +|Aes192KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes192" +|Aes256KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes256" +|RsaV15KeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-1_5" +|Ripemd160Digest|"http://www.w3.org/2001/04/xmlenc#ripemd160" +|RsaOaepKeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-oaep" +|Aes128KW|"A128KW" +|Aes256KW|"A256KW" +|RsaPKCS1|"RSA1_5" +|RsaOAEP|"RSA-OAEP" +|ExclusiveC14n|"http://www.w3.org/2001/10/xml-exc-c14n#" +|ExclusiveC14nWithComments|"http://www.w3.org/2001/10/xml-exc-c14n#WithComments" +|EnvelopedSignature|"http://www.w3.org/2000/09/xmldsig#enveloped-signature" +|Sha256Digest|"http://www.w3.org/2001/04/xmlenc#sha256" +|Sha384Digest|"http://www.w3.org/2001/04/xmldsig-more#sha384" +|Sha512Digest|"http://www.w3.org/2001/04/xmlenc#sha512" +|Sha256|"SHA256" +|Sha384|"SHA384" +|Sha512|"SHA512" +|EcdsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" +|EcdsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384" +|EcdsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512" +|HmacSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha256" +|HmacSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha384" +|HmacSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha512" +|RsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" +|RsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" +|RsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" +|RsaSsaPssSha256Signature|"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1" +|RsaSsaPssSha384Signature|"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1" +|RsaSsaPssSha512Signature|"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1" +|EcdsaSha256|"ES256" +|EcdsaSha384|"ES384" +|EcdsaSha512|"ES512" +|HmacSha256|"HS256" +|HmacSha384|"HS384" +|HmacSha512|"HS512" +|None|"none" +|RsaSha256|"RS256" +|RsaSha384|"RS384" +|RsaSha512|"RS512" +|RsaSsaPssSha256|"PS256" +|RsaSsaPssSha384|"PS384" +|RsaSsaPssSha512|"PS512" +|Aes128CbcHmacSha256|"A128CBC-HS256" +|Aes192CbcHmacSha384|"A192CBC-HS384" +|Aes256CbcHmacSha512|"A256CBC-HS512" +|== == == == == == == == == == == =

+
+
+ + + + + +
+ + +Please check Microsoft documentation to get the lastest updates on supported encryption algorithms +
+
+
+
    +
  • +

    Add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
services.AddBusinessCommonJwtPolicy();
+
+
+
+

On

+
+
+
+
Startup.cs
+
+
+
+

or on:

+
+
+
+
devon4Net.Application.Configuration.Startup/JwtApplicationConfiguration/ConfigureJwtPolicy method.
+
+
+
+
    +
  • +

    Inside the AddBusinessCommonJwtPolicy method you can add your JWT Policy like in My Thai Star application sample:

    +
  • +
+
+
+
+
 services.ConfigureJwtAddPolicy("MTSWaiterPolicy", "role", "waiter");
+
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    The certificate will be used to generate the key to encrypt the json web token.

    +
  • +
+
+
+
+
+

devon4Net.Infrastructure.Middleware

+
+ +
+
+
+

== Description

+
+
+
    +
  • +

    devon4Net support for middleware classes.

    +
  • +
  • +

    In ASP.NET Core, middleware classes can handle an HTTP request or response. Middleware can either:

    +
    +
      +
    • +

      Handle an incoming HTTP request by generating an HTTP response.

      +
    • +
    • +

      Process an incoming HTTP request, modify it, and pass it on to another piece of middleware.

      +
    • +
    • +

      Process an outgoing HTTP response, modify it, and pass it on to either another piece of middleware, or the ASP.NET Core web server.

      +
    • +
    +
    +
  • +
  • +

    devon4Net supports the following automatic response headers:

    +
    +
      +
    • +

      AccessControlExposeHeader

      +
    • +
    • +

      StrictTransportSecurityHeader

      +
    • +
    • +

      XFrameOptionsHeader

      +
    • +
    • +

      XssProtectionHeader

      +
    • +
    • +

      XContentTypeOptionsHeader

      +
    • +
    • +

      ContentSecurityPolicyHeader

      +
    • +
    • +

      PermittedCrossDomainPoliciesHeader

      +
    • +
    • +

      ReferrerPolicyHeader:toc: macro

      +
    • +
    +
    +
  • +
+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Infrastructure.Middleware
    +
    +
    +
  • +
  • +

    You can configure your Middleware configuration on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"Middleware": {
+    "Headers": {
+      "AccessControlExposeHeader": "Authorization",
+      "StrictTransportSecurityHeader": "",
+      "XFrameOptionsHeader": "DENY",
+      "XssProtectionHeader": "1;mode=block",
+      "XContentTypeOptionsHeader": "nosniff",
+      "ContentSecurityPolicyHeader": "",
+      "PermittedCrossDomainPoliciesHeader": "",
+      "ReferrerPolicyHeader": ""
+    }
+}
+
+
+
+
    +
  • +

    On the above sample, the server application will add to response header the AccessControlExposeHeader, XFrameOptionsHeader, XssProtectionHeader and XContentTypeOptionsHeader headers.

    +
  • +
  • +

    If the header response type does not have a value, it will not be added to the response headers.

    +
  • +
+
+
+
+
+

devon4Net.Infrastructure.Swagger

+
+ +
+
+
+

== Description

+
+
+
    +
  • +

    devon4net Swagger abstraction to provide full externalized easy configuration.

    +
  • +
  • +

    Swagger offers the easiest to use tools to take full advantage of all the capabilities of the OpenAPI Specification (OAS).

    +
  • +
+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Swagger
    +
    +
    +
  • +
  • +

    You can configure your Swagger configuration on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"Swagger": {
+    "Version": "v1",
+    "Title": "devon4net API",
+    "Description": "devon4net API Contract",
+    "Terms": "https://www.devonfw.com/terms-of-use/",
+    "Contact": {
+      "Name": "devonfw",
+      "Email": "sample@mail.com",
+      "Url": "https://www.devonfw.com"
+    },
+    "License": {
+      "Name": "devonfw - Terms of Use",
+      "Url": "https://www.devonfw.com/terms-of-use/"
+    },
+    "Endpoint": {
+      "Name": "V1 Docs",
+      "Url": "/swagger/v1/swagger.json",
+      "UrlUi": "swagger",
+      "RouteTemplate": "swagger/v1/{documentName}/swagger.json"
+    }
+  }
+
+
+
+
    +
  • +

    Add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
services.ConfigureSwaggerService();
+
+
+
+

On

+
+
+
+
Startup.cs
+
+
+
+
    +
  • +

    Also add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
app.ConfigureSwaggerApplication();
+
+
+
+

On

+
+
+
+
Startup.cs/Configure(IApplicationBuilder app, IHostingEnvironment env)
+
+
+
+
    +
  • +

    Ensure your API actions and non-route parameters are decorated with explicit "Http" and "From" bindings.

    +
  • +
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    To access to swagger UI launch your API project and type in your html browser the url http://localhost:yourPort/swagger.

    +
  • +
  • +

    In order to generate the documentation annotate your actions with summary, remarks and response tags:

    +
  • +
+
+
+
+
/// <summary>
+/// Method to make a reservation with potential guests. The method returns the reservation token with the format: {(CB_|GB_)}{now.Year}{now.Month:00}{now.Day:00}{_}{MD5({Host/Guest-email}{now.Year}{now.Month:00}{now.Day:00}{now.Hour:00}{now.Minute:00}{now.Second:00})}
+/// </summary>
+/// <param name="bookingDto"></param>
+/// <response code="201">Ok.</response>
+/// <response code="400">Bad request. Parser data error.</response>
+/// <response code="401">Unauthorized. Authentication fail.</response>
+/// <response code="403">Forbidden. Authorization error.</response>
+/// <response code="500">Internal Server Error. The search process ended with error.</response>
+[HttpPost]
+[HttpOptions]
+[Route("/mythaistar/services/rest/bookingmanagement/v1/booking")]
+[AllowAnonymous]
+[EnableCors("CorsPolicy")]
+public async Task<IActionResult> Booking([FromBody]BookingDto bookingDto)
+{
+    try
+    {
+
+    ...
+
+
+
+
    +
  • +

    Ensure that your project has the generateXMLdocumentationfile check active on build menu:

    +
  • +
+
+
+
+Generate documentation XML check +
+
Figure 2. Swagger documentation
+
+
+
    +
  • +

    Ensure that your XML files has the attribute copy always to true:

    +
  • +
+
+
+
+Generate documentation XML check +
+
Figure 3. Swagger documentation
+
+
+
+
+

devon4Net.Infrastructure.Test

+
+ +
+
+
+

== Description

+
+
+

devon4Net Base classes to create unit tests and integration tests with Moq and xUnit.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Load the template: +> dotnet new -i devon4Net.Test.Template +> dotnet new devon4NetTest

    +
  • +
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    At this point you can find this classes:

    +
    +
      +
    • +

      BaseManagementTest

      +
    • +
    • +

      DatabaseManagementTest<T> (Where T is a devon4NetBaseContext class)

      +
    • +
    +
    +
  • +
  • +

    For unit testing, inherit a class from BaseManagementTest.

    +
  • +
  • +

    For integration tests, inherit a class from DatabaseManagementTest.

    +
  • +
  • +

    The recommended databases in integration test are in memory database or SQlite database.

    +
  • +
  • +

    Please check My thai Star test project.

    +
  • +
+
+
+
+
+

Deperecated packages

+
+ +
+
+
+

devon4Net.Domain.Context

+
+ +
+
+
+

== Description

+
+
+

devon4Net.Domain.Context contains the extended class devon4NetBaseContext in order to make easier the process of having a model context configured against different database engines. This configuration allows an easier testing configuration against local and in memory databases.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.Context
    +
    +
    +
  • +
  • +

    Add to appsettings.{environment}.json file your database connections:

    +
  • +
+
+
+
+
"ConnectionStrings":
+{
+"DefaultConnection":
+"Server=localhost;Database=MyThaiStar;User Id=sa;Password=sa;MultipleActiveResultSets=True;",
+
+"AuthConnection":
+"Server=(localdb)\\mssqllocaldb;Database=aspnet-DualAuthCore-5E206A0B-D4DA-4E71-92D3-87FD6B120C5E;Trusted_Connection=True;MultipleActiveResultSets=true",
+
+"SqliteConnection": "Data Source=c:\\tmp\\membership.db;"
+}
+
+
+
+
    +
  • +

    On Startup.cs :

    +
  • +
+
+
+
+
void ConfigureServices(IServiceCollection services)
+
+
+
+
    +
  • +

    Add your database connections defined on previous point:

    +
  • +
+
+
+
+
services.ConfigureDataBase(
+new Dictionary<string, string> {
+{ConfigurationConst.DefaultConnection, Configuration.GetConnectionString(ConfigurationConst.DefaultConnection) }});
+
+
+
+
    +
  • +

    On devon4Net.Application.Configuration.Startup/DataBaseConfiguration/ConfigureDataBase configure your connections.

    +
  • +
+
+
+
+
+

devon4Net.Infrastructure.ApplicationUser

+
+ +
+
+
+

== Description

+
+
+

devon4Net Application user classes to implement basic Microsoft’s basic authentication in order to be used on authentication methodologies such Jason Web Token (JWT).

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.ApplicationUser
    +
    +
    +
  • +
  • +

    Add the database connection string for user management on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"ConnectionStrings":
+{
+"AuthConnection":
+"Server=(localdb)\\mssqllocaldb;Database=aspnet-DualAuthCore-5E206A0B-D4DA-4E71-92D3-87FD6B120C5E;Trusted_Connection=True;MultipleActiveResultSets=true"
+}
+
+
+
+
    +
  • +

    Add the following line of code

    +
  • +
+
+
+
+
services.AddApplicationUserDependencyInjection();
+
+
+
+

On

+
+
+
+
Startup.cs/ConfigureServices(IServiceCollection services)
+
+
+
+

or on:

+
+
+
+
devon4Net.Application.Configuration.Startup/DependencyInjectionConfiguration/ConfigureDependencyInjectionService method.
+
+
+
+
    +
  • +

    Add the data seeder on Configure method on start.cs class:

    +
  • +
+
+
+
+
public void Configure(IApplicationBuilder app, IHostingEnvironment env, DataSeeder seeder)
+{
+    ...
+
+    app.UseAuthentication();
+    seeder.SeedAsync().Wait();
+
+    ...
+}
+
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    You can use the following methods to set up the database configuration:

    +
  • +
+
+
+
+
public static void AddApplicationUserDbContextInMemoryService(this IServiceCollection services)
+
+public static void AddApplicationUserDbContextSQliteService(this IServiceCollection services, string connectionString)
+
+public static void AddApplicationUserDbContextSQlServerService(this IServiceCollection services, string connectionString)
+
+
+
+
    +
  • +

    The method AddApplicationUserDbContextInMemoryService uses the AuthContext connection string name to set up the database.

    +
  • +
  • +

    This component is used with the components devon4Net.Infrastructure.JWT and devon4Net.Infrastructure.JWT.MVC.

    +
  • +
+
+
+
+
+

devon4Net.Infrastructure.Communication

+
+ +
+
+
+

== Description

+
+
+

Basic client classes to invoke` GET`/POST methods asynchronously. This component has the minimal classes to send basic data. For more complex operations please use ASP4Net.Infrastructure.Extensions.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Communication
    +
    +
    +
  • +
  • +

    Create an instance of RestManagementService class.

    +
  • +
  • +

    Use next methods to use GET/POST basic options:

    +
  • +
+
+
+
+
public Task<string> CallGetMethod(string url);
+public Task<Stream> CallGetMethodAsStream(string url);
+public Task<string> CallPostMethod<T>(string url, T dataToSend);
+public Task<string> CallPutMethod<T>(string url, T dataToSend);
+
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    Example:

    +
  • +
+
+
+
+
private async Task RestManagementServiceSample(EmailDto dataToSend)
+{
+    var url = Configuration["EmailServiceUrl"];
+    var restManagementService = new RestManagementService();
+    await restManagementService.CallPostMethod(url, dataToSend);
+}
+
+
+
+
+
+

devon4Net.Infrastructure.JWT.MVC

+
+ +
+
+
+

== Description

+
+
+
    +
  • +

    devon4Net Extended controller to interact with JWT features

    +
  • +
+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Extend your _ Microsoft.AspNetCore.Mvc.Controller_ class with devon4NetJWTController class:

    +
  • +
+
+
+
+
public class LoginController : devon4NetJWTController
+{
+    private readonly ILoginService _loginService;
+
+    public LoginController(ILoginService loginService,  SignInManager<ApplicationUser>  signInManager, UserManager<ApplicationUser> userManager, ILogger<LoginController> logger, IMapper mapper) : base(logger,mapper)
+    {
+        _loginService = loginService;
+    }
+
+    ....
+
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    In order to generate a JWT, you should implement the JWT generation on user login. For example, in My Thai Star is created as follows:

    +
  • +
+
+
+
+
public async Task<IActionResult> Login([FromBody]LoginDto loginDto)
+{
+    try
+    {
+        if (loginDto ==  null) return Ok();
+        var logged = await _loginService.LoginAsync(loginDto.UserName, loginDto.Password);
+
+        if (logged)
+        {
+            var user = await _loginService.GetUserByUserNameAsync(loginDto.UserName);
+
+            var encodedJwt = new JwtClientToken().CreateClientToken(_loginService.GetUserClaimsAsync(user));
+
+            Response.Headers.Add("Access-Control-Expose-Headers", "Authorization");
+
+            Response.Headers.Add("Authorization", $"{JwtBearerDefaults.AuthenticationScheme} {encodedJwt}");
+
+            return Ok(encodedJwt);
+        }
+        else
+        {
+            Response.Headers.Clear();
+            return StatusCode((int)HttpStatusCode.Unauthorized, "Login Error");
+        }
+
+    }
+    catch (Exception ex)
+    {
+        return StatusCode((int)HttpStatusCode.InternalServerError, $"{ex.Message} : {ex.InnerException}");
+    }
+}
+
+
+
+
    +
  • +

    In My Thai Star the JWT will contain the user information such id, roles…​

    +
  • +
  • +

    Once you extend your controller with devon4NetJWTController you will have available these methods to simplify user management:

    +
  • +
+
+
+
+
    public interface Idevon4NetJWTController
+    {
+        // Gets the current user
+        JwtSecurityToken GetCurrentUser();
+
+        // Gets an specific assigned claim of current user
+        Claim GetUserClaim(string claimName, JwtSecurityToken jwtUser = null);
+
+        // Gets all the assigned claims of current user
+        IEnumerable<Claim> GetUserClaims(JwtSecurityToken jwtUser = null);
+    }
+
+
+
+
+
+

devon4Net.Infrastructure.MVC

+
+ +
+
+
+

== Description

+
+
+

Common classes to extend controller functionality on API. Also provides support for paged results in devon4Net applications and automapper injected class.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.MVC
    +
    +
    +
  • +
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    The generic class ResultObjectDto<T> provides a typed result object with pagination.

    +
  • +
  • +

    The extended class provides the following methods:

    +
  • +
+
+
+
+
        ResultObjectDto<T> GenerateResultDto<T>(int? page, int? size, int? total);
+        ResultObjectDto<T> GenerateResultDto<T>(List<T> result, int? page = null, int? size = null);
+
+
+
+
    +
  • +

    GenerateResultDto provides typed ResultObjectDto object or a list of typed ResultObjectDto object. The aim of this methods is to provide a clean management for result objects and not repeating code through the different controller classes.

    +
  • +
  • +

    The following sample from My Thai Star shows how to use it:

    +
  • +
+
+
+
+
public async Task<IActionResult> Search([FromBody] FilterDtoSearchObject filterDto)
+{
+    if (filterDto ==  null) filterDto = new FilterDtoSearchObject();
+
+    try
+    {
+        var dishList = await _dishService.GetDishListFromFilter(false, filterDto.GetMaxPrice(), filterDto.GetMinLikes(), filterDto.GetSearchBy(),filterDto.GetCategories(), -1);
+
+
+        return new OkObjectResult(GenerateResultDto(dishList).ToJson());
+    }
+    catch (Exception ex)
+    {
+        return StatusCode((int)HttpStatusCode.InternalServerError, $"{ex.Message} : {ex.InnerException}");
+    }
+}
+
+
+
+
+
+

devon4Net.Infrastructure.AOP

+
+ +
+
+
+

== Description

+
+
+

Simple AOP Exception handler for .Net Controller classes integrated with Serilog.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.AOP
    +
    +
    +
  • +
+
+
+

Add this line of code on ConfigureServices method on Startup.cs

+
+
+
+
services.AddAopAttributeService();
+
+
+
+
+
+

== Notes

+
+
+

Now automatically your exposed API methods exposed on controller classes will be tracked on the methods:

+
+
+
    +
  • +

    OnActionExecuting

    +
  • +
  • +

    OnActionExecuted

    +
  • +
  • +

    OnResultExecuting

    +
  • +
  • +

    OnResultExecuted

    +
  • +
+
+
+

If an exception occurs, a message will be displayed on log with the stack trace.

+
+
+
+
+

devon4Net.Infrastructure.Cors

+
+ +
+
+
+

== Description

+
+
+

Enables CORS configuration for devon4Net application. Multiple domains can be configured from configuration. Mandatory to web clients (p.e. Angular) to prevent making AJAX requests to another domain.

+
+
+

Cross-Origin Resource Sharing (CORS) is a mechanism that uses additional HTTP headers to tell a browser to let a web application running at one origin (domain) have permission to access selected resources from a server at a different origin. A web application makes a cross-origin HTTP request when it requests a resource that has a different origin (domain, protocol, and port) than its own origin.

+
+
+

Please refer to this link to get more information about CORS and .Net core.

+
+
+
+
+

== Configuration

+
+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Cors
    +
    +
    +
  • +
  • +

    You can configure your Cors configuration on appsettings.{environment}.json:

    +
    +
    +
    `CorsPolicy`: indicates the name of the policy. You can use this name to add security headers on your API exposed methods.
    +
    +
    +
    +
    +
    Origins: The allowed domains
    +
    +
    +
    +
    +
    Headers: The allowed headers such accept,content-type,origin,x-custom-header
    +
    +
    +
  • +
  • +

    If you specify the cors configuration as empty array, a default cors-policy will be used with all origins enabled:

    +
  • +
+
+
+
+
  "Cors": []
+
+
+
+
    +
  • +

    On the other hand, you can specify different Cors policies in your solution as follows:

    +
  • +
+
+
+
+
"Cors": []
+[
+  {
+    "CorsPolicy": "CorsPolicy1",
+    "Origins": "http:example.com,http:www.contoso.com",
+    "Headers": "accept,content-type,origin,x-custom-header",
+    "Methods": "GET,POST,HEAD",
+    "AllowCredentials": true
+  },
+  {
+    "CorsPolicy": "CorsPolicy2",
+    "Origins": "http:example.com,http:www.contoso.com",
+    "Headers": "accept,content-type,origin,x-custom-header",
+    "Methods": "GET,POST,HEAD",
+    "AllowCredentials": true
+  }
+]
+
+
+
+
+
+

== Notes

+
+
+
    +
  • +

    To use CORS in your API methods, use the next notation:

    +
  • +
+
+
+
+
[EnableCors("YourCorsPolicy")]
+public IActionResult Index() {
+    return View();
+}
+
+
+
+
    +
  • +

    if you want to disable the CORS check use the following annotation:

    +
  • +
+
+
+
+
[DisableCors]
+public IActionResult Index() {
+    return View();
+}
+
+
+
+
+
+

Required software

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/samples.html b/docs/devonfw.github.io/1.0/devon4net.wiki/samples.html new file mode 100644 index 00000000..cabc0968 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/samples.html @@ -0,0 +1,866 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Samples

+
+ +
+
+
+

My Thai Star Restaurant

+
+ +
+
+ +
+
+
+
+
+

Angular requirements

+
+
+ +
+
+
+
+

Angular client

+
+
+
    +
  1. +

    Install Node.js LTS version

    +
  2. +
  3. +

    Install Angular CLI from command line:

    +
    +
      +
    • +

      npm install -g @angular/cli

      +
    • +
    +
    +
  4. +
  5. +

    Install Yarn

    +
  6. +
  7. +

    Go to Angular client from command line

    +
  8. +
  9. +

    Execute : yarn install

    +
  10. +
  11. +

    Launch the app from command line: ng serve and check http://localhost:4200

    +
  12. +
  13. +

    You are ready

    +
  14. +
+
+
+
+
+

.Net Core server

+
+ +
+
+
+

== Basic architecture details

+
+
+

Following the devonfw conventions the .Net Core 2.0 My Thai Star backend is going to be developed dividing the application in Components and using a n-layer architecture.

+
+
+
+project modules +
+
+
+
+
+

== Components

+
+
+

The application is going to be divided in different components to encapsulate the different domains of the application functionalities.

+
+
+
+mtsn components +
+
+
+

As main components we will find:

+
+
+
    +
  • +

    _BookingService: Manages the bookings part of the application. With this component the users (anonymous/logged in) can create new bookings or cancel an existing booking. The users with waiter role can see all scheduled bookings.

    +
  • +
+
+
+

-OrderService: This component handles the process to order dishes (related to bookings). A user (as a host or as a guest) can create orders (that contain dishes) or cancel an existing one. The users with waiter role can see all ordered orders.

+
+
+
    +
  • +

    DishService: This component groups the logic related to the menu (dishes) view. Its main feature is to provide the client with the data of the available dishes but also can be used by other components (Ordermanagement) as a data provider in some processes.

    +
  • +
  • +

    UserService: Takes care of the User Profile management, allowing to create and update the data profiles.

    +
  • +
+
+
+

As common components (that don’t exactly represent an application’s area but provide functionalities that can be used by the main components):

+
+
+
    +
  • +

    Mailservice: with this service we will provide the functionality for sending email notifications. This is a shared service between different app components such as bookingmanagement or ordercomponent.

    +
  • +
+
+
+

Other components:

+
+
+
    +
  • +

    Security (will manage the access to the private part of the application using a jwt implementation).

    +
  • +
  • +

    Twitter integration: planned as a Microservice will provide the twitter integration needed for some specific functionalities of the application.

    +
  • +
+
+
+
+
+

Layers

+
+ +
+
+
+

== Introduction

+
+
+

The .Net Core backend for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    devon4NET as the .Net Core framework

    +
  • +
  • +

    VSCode as the Development environment

    +
  • +
  • +

    TOBAGO as code generation tool

    +
  • +
+
+
+
+
+

== Application layer

+
+
+

This layer will expose the REST api to exchange information with the client applications.

+
+
+

The application will expose the services on port 8081 and it can be launched as a self host console application (microservice approach) and as a Web Api application hosted on IIS/IIS Express.

+
+
+
+
+

== Business layer

+
+
+

This layer will define the controllers which will be used on the application layer to expose the different services. Also, will define the swagger contract making use of summary comments and framework attributes.

+
+
+

This layer also includes the object response classes in order to interact with external clients.

+
+
+
+
+

== Service layer

+
+
+

The layer in charge of hosting the business logic of the application. Also orchestrates the object conversion between object response and entity objects defined in Data layer.

+
+
+
+
+

== Data layer

+
+
+

The layer to communicate with the data base.

+
+
+

Data layer makes use of Entity Framework. +The Database context is defined on DataAccessLayer assembly (ModelContext).

+
+
+

This layer makes use of the Repository pattern and Unit of work in order to encapsulate the complexity. Making use of this combined patterns we ensure an organized and easy work model.

+
+
+

As in the previous layers, the data access layer will have both interface and implementation tiers. However, in this case, the implementation will be slightly different due to the use of generics.

+
+
+
+
+

== Cross-Cutting concerns

+
+
+

the layer to make use of transversal components such JWT and mailing.

+
+
+
+
+

Jwt basics

+
+
+
    +
  • +

    A user will provide a username / password combination to our auth server.

    +
  • +
  • +

    The auth server will try to identify the user and, if the credentials match, will issue a token.

    +
  • +
  • +

    The user will send the token as the Authorization header to access resources on server protected by JWT Authentication.

    +
  • +
+
+
+
+jwt schema +
+
+
+
+
+

Jwt implementation details

+
+
+

The Json Web Token pattern will be implemented based on the jwt on .net core framework that is provided by default in the devon4Net projects.

+
+
+
+
+

Authentication

+
+
+

Based on Microsoft approach, we will implement a class to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

On devon4Net.Infrastructure.JWT assembly is defined a subset of Microsoft’s authorization schema Database. It is started up the first time the application launches.

+
+
+

You can read more about _Authorization on:

+
+ + +
+
+
+

Dependency injection

+
+
+

As it is explained in the Microsoft documentation we are going to implement the dependency injection pattern basing our solution on .Net Core.

+
+
+
+dependency injection +
+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different tiers: interface and implementation. The interface tier will store the interface with the methods definition and inside the implementation we will store the class that implements the interface.

    +
  • +
+
+
+
+
+

Layer communication method

+
+
+

The connection between layers, to access to the functionalities of each one, will be solved using the dependency injection.

+
+
+
+layer impl +
+
+
+

Connection BookingService - Logic

+
+
+
+
 public class BookingService : EntityService<Booking>, IBookingService
+    {
+        private readonly IBookingRepository _bookingRepository;
+        private readonly IRepository<Order> _orderRepository;
+        private readonly IRepository<InvitedGuest> _invitedGuestRepository;
+        private readonly IOrderLineRepository _orderLineRepository;
+        private readonly IUnitOfWork _unitOfWork;
+
+        public BookingService(IUnitOfWork unitOfWork,
+            IBookingRepository repository,
+            IRepository<Order> orderRepository,
+            IRepository<InvitedGuest> invitedGuestRepository,
+            IOrderLineRepository orderLineRepository) : base(unitOfWork, repository)
+        {
+            _unitOfWork = unitOfWork;
+            _bookingRepository = repository;
+            _orderRepository = orderRepository;
+            _invitedGuestRepository = invitedGuestRepository;
+            _orderLineRepository = orderLineRepository;
+        }
+}
+
+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

Following the [naming conventions] proposed for devon4Net applications we will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+
+
+

Api Exposed

+
+
+

The devon4Net.Business.Controller assembly in the business layer of a component will store the definition of the service by a interface. In this definition of the service we will set-up the endpoints of the service, the type of data expected and returned, the HTTP method for each endpoint of the service and other configurations if needed.

+
+
+
+
        /// <summary>
+        /// Method to make a reservation with potential guests. The method returns the reservation token with the format: {(CB_|GB_)}{now.Year}{now.Month:00}{now.Day:00}{_}{MD5({Host/Guest-email}{now.Year}{now.Month:00}{now.Day:00}{now.Hour:00}{now.Minute:00}{now.Second:00})}
+        /// </summary>
+
+        /// <param name="bookingView"></param>
+        /// <response code="201">Ok.</response>
+        /// <response code="400">Bad request. Parser data error.</response>
+        /// <response code="401">Unauthorized. Authentication fail.</response>
+        /// <response code="403">Forbidden. Authorization error.</response>
+        /// <response code="500">Internal Server Error. The search process ended with error.</response>
+        [HttpPost]
+        [HttpOptions]
+        [Route("/mythaistar/services/rest/bookingmanagement/v1/booking")]
+        [AllowAnonymous]
+        [EnableCors("CorsPolicy")]
+        public IActionResult BookingBooking([FromBody]BookingView bookingView)
+        {
+...
+
+
+
+

Using the summary annotations and attributes will tell to swagger the contract via the XML doc generated on compiling time. This doc will be stored in XmlDocumentation folder.

+
+
+

The Api methods will be exposed on the application layer.

+
+
+
+
+

Google Mail API Consumer

+
+ +
+

|== == == == == == == == == == == = +|Application| MyThaiStarEmailService.exe +|Config file| MyThaiStarEmailService.exe.Config +|Default port|8080 +|== == == == == == == == == == == =

+
+
+
+
+

Overview

+
+
+
    +
  1. +

    Execute MyThaiStarEmailService.exe.

    +
  2. +
  3. +

    The first time google will ask you for credentials +(just one time) in your default browser:

    +
    + +
    +
  4. +
  5. +

    Visit the url: http://localhost:8080/swagger

    +
  6. +
  7. +

    Your server is ready!

    +
  8. +
+
+
+
+GMail Service +
+
Figure 1. GMail Server Swagger contract page
+
+
+
+
+

JSON Example

+
+
+

This is the JSON example to test with swagger client. Please read the swagger documentation.

+
+
+
+
{
+   "EmailFrom":"mythaistarrestaurant@gmail.com",
+   "EmailAndTokenTo":{
+      "MD5Token1":" Email_Here!@gmail.com",
+      "MD5Token2":" Email_Here!@gmail.com"
+   },
+   "EmailType":0,
+   "DetailMenu":[
+      "Thai Spicy Basil Fried Rice x2",
+      "Thai green chicken curry x2"
+   ],
+   "BookingDate":"2017-05-31T12:53:39.7864723+02:00",
+   "Assistants":2,
+   "BookingToken":"MD5Booking",
+   "Price":20.0,
+   "ButtonActionList":{
+      "http://accept.url":"Accept",
+      "http://cancel.url":"Cancel"
+   },
+   "Host":{
+      " Email_Here!@gmail.com":"José Manuel"
+   }
+}
+
+
+
+
+
+

Configure the service port

+
+
+

If you want to change the default port, please edit the config file and +change the next entry in appSettings node:

+
+
+
+
<appSettings>
+   <add key="LocalListenPort" value="8080" />
+</appSettings>
+
+
+
+
+
+ + +
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/templates.html b/docs/devonfw.github.io/1.0/devon4net.wiki/templates.html new file mode 100644 index 00000000..0f29ff89 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/templates.html @@ -0,0 +1,416 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

==Templates

+
+
+

Overview

+
+
+

The .Net Core and .Net Framework given templates allows to start coding an application with the following functionality ready to use:

+
+
+

Please refer to User guide in order to start developing.

+
+
+
+
+

Net Core 3.0

+
+
+

The .Net Core 3.0 template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2019.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+

Using devon4Net template

+ +
+
+

Option 1

+
+
    +
  1. +

    Open your favourite terminal (Win/Linux/iOS)

    +
  2. +
  3. +

    Go to future project’s path

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template

    +
  6. +
  7. +

    Type dotnet new Devon4NetAPI

    +
  8. +
  9. +

    Go to project’s path

    +
  10. +
  11. +

    You are ready to start developing with devon4Net

    +
  12. +
+
+
+
+

Option 2

+
+
    +
  1. +

    Create a new dotnet API project from scratch

    +
  2. +
  3. +

    Add the NuGet package reference to your project

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template

    +
  6. +
+
+
+
+
+
+

Net Core 2.1.x

+
+
+

The .Net Core 2.1.x template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2017.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+

Using devon4Net template

+
+
    +
  1. +

    Open your favourite terminal (Win/Linux/iOS)

    +
  2. +
  3. +

    Go to future project’s path

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template::1.0.8

    +
  6. +
  7. +

    Type dotnet new Devon4NetAPI

    +
  8. +
  9. +

    Go to project’s path

    +
  10. +
  11. +

    You are ready to start developing with devon4Net

    +
  12. +
+
+
+ + + + + +
+ + +For the latest updates on references packages, please get the sources from Github +
+
+
+
+
+
+ + +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4net.wiki/userguide.html b/docs/devonfw.github.io/1.0/devon4net.wiki/userguide.html new file mode 100644 index 00000000..52592d80 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4net.wiki/userguide.html @@ -0,0 +1,1529 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+technical architecture +
+
+
+

devon4net Guide

+
+ +
+
+
+

Introduction

+
+
+

Welcome to devon4net framework user guide. In this document you will find the information regarding how to start and deploy your project using the guidelines proposed in our solution.

+
+
+

All the guidelines shown and used in this document are a set of rules and conventions proposed and supported by Microsoft and the industry.

+
+
+
+
+

The package

+
+
+

Devon4Net package solution contains:

+
+
+

|== == == == == == == == == == == = +|File / Folder|Content +|Documentation| User documentation in HTML format +|Modules| Contains the source code of the different devon4net modules +|Samples| Different samples implemented in .NET and .NET Core. Also includes My Thai Star Devon flagship restaurant application +|Templates| Main .net Core template to start developing from scratch +|License| License agreement +|README.md| Github main page +|TERMS_OF_USE.adoc| The devon4net terms of use +|LICENSE| The devon license +|Other files| Such the code of conduct and contributing guide +|== == == == == == == == == == == =

+
+
+
+
+

Application templates

+
+
+

The application templates given in the bundle are ready to use.

+
+
+

At the moment .net Core template is supported. The template is ready to be used as a simple console Kestrel application or being deployed in a web server like IIS.

+
+
+
+
+

Samples

+
+ +
+
+
+

== My Thai Star

+
+
+

You can find My Thai Star .NET port application at Github.

+
+
+ + + + + +
+ + +As devon4net has been migrated to the latest version of .net core, the template is not finished yet. +
+
+
+
+
+

Cookbook

+
+ +
+
+
+

Data management

+
+
+

To use Entity Framework Core, install the package for the database provider(s) you want to target. This walk-through uses SQL Server.

+
+
+

For a list of available providers see Database Providers

+
+
+
    +
  • +

    Go to Tools > NuGet Package Manager > Package Manager Console

    +
  • +
  • +

    Run Install-Package Microsoft.EntityFrameworkCore.SqlServer

    +
  • +
+
+
+

We will be using some Entity Framework Tools to create a model from the database. So we will install the tools package as well:

+
+
+
    +
  • +

    Run Install-Package Microsoft.EntityFrameworkCore.Tools

    +
  • +
+
+
+

We will be using some ASP.NET Core Scaffolding tools to create controllers and views later on. So we will install this design package as well:

+
+
+
    +
  • +

    Run Install-Package Microsoft.VisualStudio.Web.CodeGeneration.Design

    +
  • +
+
+
+
+
+

== Entity Framework Code first

+
+
+

In order to design your database model from scratch, we encourage to follow the Microsoft guidelines described here.

+
+
+
+
+

== Entity Framework Database first

+
+
+
    +
  • +

    Go to Tools > NuGet Package Manager > Package Manager Console

    +
  • +
  • +

    Run the following command to create a model from the existing database:

    +
  • +
+
+
+
+
Scaffold-DbContext "Your connection string to existing database" Microsoft.EntityFrameworkCore.SqlServer -OutputDir Models
+
+
+
+

The command will create the database context and the mapped entities as well inside of Models folder.

+
+
+
+
+

== Register your context with dependency injection

+
+
+

Services are registered with dependency injection during application startup.

+
+
+

In order to register your database context (or multiple database context as well) you can add the following line at ConfigureDbService method at startup.cs:

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+
+

Repositories and Services

+
+
+

Services and Repositories are an important part of devon4net proposal. To make them work properly, first of all must be declared and injected at Startup.cs at DI Region.

+
+
+

Services are declared in devon4net.Business.Common and injected in Controller classes when needed. Use services to build your application logic.

+
+
+
+technical architecture +
+
Figure 1. Screenshot of devon4net.Business.Common project in depth
+
+
+

For example, My Thai Star Booking controller constructor looks like this:

+
+
+
+
        public BookingController(IBookingService bookingService, IMapper mapper)
+        {
+            BookingService = bookingService;
+            Mapper = mapper;
+
+        }
+
+
+
+

Currently devon4net has a Unit of Work class in order to perform CRUD operations to database making use of your designed model context.

+
+
+

Repositories are declared at devon4net.Domain.UnitOfWork project and make use of Unit of Work class.

+
+
+

The common methods to perform CRUD operations (where <T> is an entity from your model) are:

+
+
+
    +
  • +

    Sync methods:

    +
  • +
+
+
+
+
IList<T> GetAll(Expression<Func<T, bool>> predicate = null);
+T Get(Expression<Func<T, bool>> predicate = null);
+IList<T> GetAllInclude(IList<string> include, Expression<Func<T, bool>> predicate = null);
+T Create(T entity);
+void Delete(T entity);
+void DeleteById(object id);
+void Delete(Expression<Func<T, bool>> where);
+void Edit(T entity);
+
+
+
+
    +
  • +

    Async methods:

    +
  • +
+
+
+
+
Task<IList<T>> GetAllAsync(Expression<Func<T, bool>> predicate = null);
+Task<T> GetAsync(Expression<Func<T, bool>> predicate = null);
+Task<IList<T>> GetAllIncludeAsync(IList<string> include, Expression<Func<T, bool>> predicate = null);
+
+
+
+

If you perform a Commit operation and an error happens, changes will be rolled back.

+
+
+
+
+

Swagger integration

+
+
+

The given templates allow you to specify the API contract through Swagger integration and the controller classes are the responsible of exposing methods making use of comments in the source code.

+
+
+

The next example shows how to comment the method with summaries in order to define the contract. Add (Triple Slash) XML Documentation To Swagger:

+
+
+
+
/// <summary>
+/// Method to get reservations
+/// </summary>
+/// <response code="201">Ok.</response>
+/// <response code="400">Bad request. Parser data error.</response>
+/// <response code="401">Unauthorized. Authentication fail.</response>
+/// <response code="403">Forbidden. Authorization error.</response>
+/// <response code="500">Internal Server Error. The search process ended with error.</response>
+[HttpPost]
+[Route("/mythaistar/services/rest/bookingmanagement/v1/booking/search")]
+//[Authorize(Policy = "MTSWaiterPolicy")]
+[AllowAnonymous]
+[EnableCors("CorsPolicy")]
+public async Task<IActionResult> BookingSearch([FromBody]BookingSearchDto bookingSearchDto)
+{
+
+
+
+

In order to be effective and make use of the comments to build the API contract, the project which contains the controller classes must generate the XML document file. To achieve this, the XML documentation file must be checked in project settings tab:

+
+
+
+technical architecture +
+
Figure 2. Project settings tab
+
+
+

We propose to generate the file under the XmlDocumentation folder. For example in devon4net.Domain.Entities project in My Thai Star .NET implementation the output folder is:

+
+
+
+
`XmlDocumentation\devon4net.Business.Common.xml`
+
+
+
+

The file devon4net.Business.Common.xml won’t appear until you build the project. Once the file is generated, please modify its properties as a resource and set it to be Copy always .

+
+
+
+technical architecture +
+
Figure 3. Swagger XML document file properties
+
+
+

Once you have this, the swagger user interface will show the method properties defined in your controller comments.

+
+
+

Making use of this technique controller are not encapsulated to the application project. Also, you can develop your controller classes in different projects obtain code reusability.

+
+
+

Swagger comment:

+
+
+

|== == == == == == == == == == == = +|Comment|Functionality +|<summary>| Will map to the operation’s summary +|<remarks>| Will map to the operation’s description (shown as "Implementation Notes" in the UI) +|<response code="###">| Specifies the different response of the target method +|<param>| Will define the parameter(s) of the target method +| +|== == == == == == == == == == == =

+
+
+

Please check Microsoft’s site regarding to summary notations.

+
+
+
+
+

Logging module

+
+
+

An important part of life software is the need of using log and traces. devon4net has a log module pre-configured to achieve this important point.

+
+
+

By default Microsoft provides a logging module on .NET Core applications. This module is open and can it can be extended. devon4net uses the Serilog implementation. This implementation provides a huge quantity information about events and traces.

+
+
+
+
+

== Log file

+
+
+

devon4net can write the log information to a simple text file. You can configure the file name and folder at appsettings.json file (LogFile attribute) at devon4net.Application.WebApi project.

+
+
+
+
+

== Database log

+
+
+

devon4net can write the log information to a SQLite database. You can configure the file name and folder at appsettings.json file (LogDatabase attribute) at devon4net.Application.WebApi project.

+
+
+

With this method you can launch queries in order to search the information you are looking for.

+
+
+
+
+

== Seq log

+
+
+

devon4net can write the log information to a Serilog server. You can configure the Serilog URL at appsettings.json file (SeqLogServerUrl attribute) at devon4net.Application.WebApi project.

+
+
+

With this method you can make queries via HTTP.

+
+
+
+serilog seq +
+
+
+

By default you can find the log information at Logs folder.

+
+
+
+
+

JWT module

+
+
+

JSON Web Tokens are an open, industry standard RFC 7519 method for representing claims securely between two parties allowing you to decode, verify and generate JWT.

+
+
+

You should use JWT for:

+
+
+
    +
  • +

    Authentication : allowing the user to access routes, services, and resources that are permitted with that token.

    +
  • +
  • +

    Information Exchange: JSON Web Tokens are a good way of securely transmitting information between parties. Additionally, as the signature is calculated using the header and the payload, you can also verify that the content.

    +
  • +
+
+
+

The JWT module is configured at Startup.cs inside devon4net.Application.WebApi project from .NET Core template. In this class you can configure the different authentication policy and JWT properties.

+
+
+

Once the user has been authenticated, the client perform the call to the backend with the attribute Bearer plus the token generated at server side.

+
+
+
+jwt +
+
+
+

On My Thai Star sample there are two predefined users: user0 and Waiter. Once they log in the application, the client (Angular/Xamarin) will manage the server call with the json web token. With this method we can manage the server authentication and authorization.

+
+
+

You can find more information about JWT at jwt.io

+
+
+
+
+

AOP module

+
+
+

AOP (Aspect Oriented Programming) tracks all information when a method is call.AOP also tracks the input and output data when a method is call.

+
+
+

By default devon4net has AOP module pre-configured and activated for controllers at Startup.cs file at devon4net.Application.WebApi:

+
+
+
+
options.Filters.Add(new Infrastructure.AOP.AopControllerAttribute(Log.Logger));
+
+options.Filters.Add(new Infrastructure.AOP.AopExceptionFilter(Log.Logger));
+
+
+
+

This configuration allows all Controller classes to be tracked. If you don’t need to track the info comment the lines written before.

+
+
+
+
+

Docker support

+
+
+

devon4net Core projects are ready to be integrated with docker.

+
+
+

My Thai Star application sample is ready to be use with linux docker containers. The Readme file explains how to launch and setup the sample application.

+
+
+
    +
  • +

    angular : Angular client to support backend. Just binaries.

    +
  • +
  • +

    database : Database scripts and .bak file

    +
  • +
  • +

    mailservice: Microservice implementation to send notifications.

    +
  • +
  • +

    netcore: Server side using .net core 2.0.x.

    +
  • +
  • +

    xamarin: Xamarin client based on Excalibur framework from The Netherlands using XForms.

    +
  • +
+
+
+

Docker configuration and docker-compose files are provided.

+
+
+
+
+

Testing with XUnit

+
+
+
+
+

xUnit.net is a free, open source, community-focused unit testing tool for the .NET Framework. Written by the original inventor of NUnit v2, xUnit.net is the latest technology for unit testing C#, F#, VB.NET and other .NET languages. xUnit.net works with ReSharper, CodeRush, TestDriven.NET and Xamarin. It is part of the .NET Foundation, and operates under their code of conduct. It is licensed under Apache 2 (an OSI approved license).

+
+
+
+— About xUnit.net
+https://xunit.github.io/#documentation +
+
+
+

Facts are tests which are always true. They test invariant conditions.

+
+
+

Theories are tests which are only true for a particular set of data.

+
+
+
+
+

The first test

+
+
+
+
using Xunit;
+
+namespace MyFirstUnitTests
+{
+    public class Class1
+    {
+        [Fact]
+        public void PassingTest()
+        {
+            Assert.Equal(4, Add(2, 2));
+        }
+
+        [Fact]
+        public void FailingTest()
+        {
+            Assert.Equal(5, Add(2, 2));
+        }
+
+        int Add(int x, int y)
+        {
+            return x + y;
+        }
+    }
+}
+
+
+
+
+
+

The first test with theory

+
+
+

Theory attribute is used to create tests with input params:

+
+
+
+
[Theory]
+[InlineData(3)]
+[InlineData(5)]
+[InlineData(6)]
+public void MyFirstTheory(int value)
+{
+    Assert.True(IsOdd(value));
+}
+
+bool IsOdd(int value)
+{
+    return value % 2 ==  1;
+}
+
+
+
+
+
+

Cheat Sheet

+
+
+

|== == == == == == == == == == == = +|Operation| Example +|Test|

+
+
+
+
public void Test()
+{
+}
+|Setup|public class TestFixture {
+public TestFixture()
+{
+
+...
+
+    }
+
+}
+|Teardown|public class TestFixture : IDisposable
+
+{
+
+public void Dispose() {
+
+ ...
+ }
+
+}
+
+
+
+

|== == == == == == == == == == == =

+
+
+
+
+

Console runner return codes

+
+
+

|== == == == == == == == == == == = +|Code| Meaning +|0|The tests ran successfully. +|1|One or more of the tests failed. +|2|The help page was shown, either because it was requested, or because the user did not provide any command line arguments. +|3| There was a problem with one of the command line options passed to the runner. +|4|There was a problem loading one or more of the test assemblies (for example, if a 64-bit only assembly is run with the 32-bit test runner). +|== == == == == == == == == == == =

+
+
+
+
+

Publishing

+
+ +
+
+
+

== Nginx

+
+
+

In order to deploy your application to a Nginx server on Linux platform you can follow the instructions from Microsoft here.

+
+
+
+
+

== IIS

+
+
+

In this point is shown the configuration options that must implement the .Net Core application.

+
+
+

Supported operating systems:

+
+
+
    +
  • +

    Windows 7 and newer

    +
  • +
  • +

    Windows Server 2008 R2 and newer*

    +
  • +
+
+
+

WebListener server will not work in a reverse-proxy configuration with IIS. You must use the Kestrel server.

+
+
+

IIS configuration

+
+
+

Enable the Web Server (IIS) role and establish role services.

+
+
+

Windows desktop operating systems

+
+
+

Navigate to Control Panel > Programs > Programs and Features > Turn Windows features on or off (left side of the screen). Open the group for Internet Information Services and Web Management Tools. Check the box for IIS Management Console. Check the box for World Wide Web Services. Accept the default features for World Wide Web Services or customize the IIS features to suit your needs.

+
+
+
+iis 1 +
+
+
+

*Conceptually, the IIS configuration described in this document also applies to hosting ASP.NET Core applications on Nano Server IIS, but refer to ASP.NET Core with IIS on Nano Server for specific instructions.

+
+
+

Windows Server operating systems +For server operating systems, use the Add Roles and Features wizard via the Manage menu or the link in Server Manager. On the Server Roles step, check the box for Web Server (IIS).

+
+
+
+iis 2 +
+
+
+

On the Role services step, select the IIS role services you desire or accept the default role services provided.

+
+
+
+iis 3 +
+
+
+

Proceed through the Confirmation step to install the web server role and services. A server/IIS restart is not required after installing the Web Server (IIS) role.

+
+
+

Install the .NET Core Windows Server Hosting bundle

+
+
+
    +
  1. +

    Install the .NET Core Windows Server Hosting bundle on the hosting system. The bundle will install the .NET Core Runtime, .NET Core Library, and the ASP.NET Core Module. The module creates the reverse-proxy between IIS and the Kestrel server. Note: If the system doesn’t have an Internet connection, obtain and install the Microsoft Visual C++ 2015 Re-distributable before installing the .NET Core Windows Server Hosting bundle.

    +
  2. +
  3. +

    Restart the system or execute net stop was /y followed by net start w3svc from a command prompt to pick up a change to the system PATH.

    +
  4. +
+
+
+ + + + + +
+ + +If you use an IIS Shared Configuration, see ASP.NET Core Module with IIS Shared Configuration. +
+
+
+

To configure IISIntegration service options, include a service configuration for IISOptions in ConfigureServices:

+
+
+
+
services.Configure<IISOptions>(options =>
+{
+    ...
+});
+
+
+
+

|== == == == == == == == == == == = +|Option|Default|Setting +|AutomaticAuthentication| true |If true, the authentication middleware sets the HttpContext.User and responds to generic challenges. If false, the authentication middleware only provides an identity (HttpContext.User) and responds to challenges when explicitly requested by the Authentication Scheme. Windows Authentication must be enabled in IIS for AutomaticAuthentication to function. +|AuthenticationDisplayName | null| Sets the display name shown to users on login pages. +|ForwardClientCertificate |true|If true and the MS-ASPNETCORE-CLIENTCERT request header is present, the HttpContext.Connection.ClientCertificate is populated. +|== == == == == == == == == == == =

+
+
+

web.config

+
+
+

The web.config file configures the ASP.NET Core Module and provides other IIS configuration. Creating, transforming, and publishing web.config is handled by Microsoft.NET.Sdk.Web, which is included when you set your project’s SDK at the top of your .csproj file, <Project Sdk="Microsoft.NET.Sdk.Web">. To prevent the MSBuild target from transforming your web.config file, add the <IsTransformWebConfigDisabled> property to your project file with a setting of true:

+
+
+
+
<PropertyGroup>
+  <IsTransformWebConfigDisabled>true</IsTransformWebConfigDisabled>
+</PropertyGroup>
+
+
+
+
+
+

== Azure

+
+
+

In order to deploy your application to Azure platform you can follow the instructions from Microsoft:

+
+
+

Set up the development environment

+
+
+ +
+
+

Create a web app

+
+
+

In the Visual Studio Start Page, select File > New > Project…​

+
+
+
+File menu +
+
+
+

Complete the New Project dialog:

+
+
+
    +
  • +

    In the left pane, select .NET Core.

    +
  • +
  • +

    In the center pane, select ASP.NET Core Web Application.

    +
  • +
  • +

    Select OK.

    +
  • +
+
+
+
+New Project dialog +
+
+
+

In the New ASP.NET Core Web Application dialog:

+
+
+
    +
  • +

    Select Web Application.

    +
  • +
  • +

    Select Change Authentication.

    +
  • +
+
+
+
+New Project dialog +
+
+
+

The Change Authentication dialog appears.

+
+
+
    +
  • +

    Select Individual User Accounts.

    +
  • +
  • +

    Select OK to return to the New ASP.NET Core Web Application, then select OK again.

    +
  • +
+
+
+
+New ASP.NET Core Web authentication dialog +
+
+
+

Visual Studio creates the solution.

+
+
+

Run the app locally

+
+
+
    +
  • +

    Choose Debug then Start Without Debugging to run the app locally.

    +
  • +
  • +

    Click the About and Contact links to verify the web application works.

    +
  • +
+
+
+
+Web application open in Microsoft Edge on localhost +
+
+
+
    +
  • +

    Select Register and register a new user. You can use a fictitious email address. When you submit, the page displays the following error:

    +
  • +
+
+
+

"Internal Server Error: A database operation failed while processing the request. SQL exception: Cannot open the database. Applying existing migrations for Application DB context may resolve this issue."

+
+
+
    +
  • +

    Select Apply Migrations and, once the page updates, refresh the page.

    +
  • +
+
+
+
+Internal Server Error: A database operation failed while processing the request. SQL exception: Cannot open the database. Applying existing migrations for Application DB context may resolve this issue. +
+
+
+

The app displays the email used to register the new user and a Log out link.

+
+
+
+Web application open in Microsoft Edge. The Register link is replaced by the text Hello email@domain.com! +
+
+
+

Deploy the app to Azure

+
+
+

Close the web page, return to Visual Studio, and select Stop Debugging from the Debug menu.

+
+
+

Right-click on the project in Solution Explorer and select Publish…​.

+
+
+
+Contextual menu open with Publish link highlighted +
+
+
+

In the Publish dialog, select Microsoft Azure App Service and click Publish.

+
+
+
+Publish dialog +
+
+
+
    +
  • +

    Name the app a unique name.

    +
  • +
  • +

    Select a subscription.

    +
  • +
  • +

    Select New…​ for the resource group and enter a name for the new resource group.

    +
  • +
  • +

    Select New…​ for the app service plan and select a location near you. You can keep the name that is generated by default.

    +
  • +
+
+
+
+App Service dialog +
+
+
+
    +
  • +

    Select the Services tab to create a new database.

    +
  • +
  • +

    Select the green + icon to create a new SQL Database

    +
  • +
+
+
+
+New SQL Database +
+
+
+
    +
  • +

    Select New…​ on the Configure SQL Database dialog to create a new database.

    +
  • +
+
+
+
+New SQL Database and server +
+
+
+

The Configure SQL Server dialog appears.

+
+
+
    +
  • +

    Enter an administrator user name and password, and then select OK. Don’t forget the user name and password you create in this step. You can keep the default Server Name.

    +
  • +
  • +

    Enter names for the database and connection string.

    +
  • +
+
+
+
+
+

== Note

+
+
+

"admin" is not allowed as the administrator user name.

+
+
+
+Configure SQL Server dialog +
+
+
+
    +
  • +

    Select OK.

    +
  • +
+
+
+

Visual Studio returns to the Create App Service dialog.

+
+
+
    +
  • +

    Select Create on the Create App Service dialog.

    +
  • +
+
+
+
+Configure SQL Database dialog +
+
+
+
    +
  • +

    Click the Settings link in the Publish dialog.

    +
  • +
+
+
+
+Publish dialog: Connection panel +
+
+
+

On the Settings page of the Publish dialog:

+
+
+
    +
  • +

    Expand Databases and check Use this connection string at runtime.

    +
  • +
  • +

    Expand Entity Framework Migrations and check Apply this migration on publish.

    +
  • +
  • +

    Select Save. Visual Studio returns to the Publish dialog.

    +
  • +
+
+
+
+Publish dialog: Settings panel +
+
+
+

Click Publish. Visual Studio will publish your app to Azure and launch the cloud app in your browser.

+
+
+

Test your app in Azure

+
+
+
    +
  • +

    Test the About and Contact links

    +
  • +
  • +

    Register a new user

    +
  • +
+
+
+
+Web application opened in Microsoft Edge on Azure App Service +
+
+
+

Update the app

+
+
+
    +
  • +

    Edit the Pages/About.cshtml Razor page and change its contents. For example, you can modify the paragraph to say "Hello ASP.NET Core!":

    +
    +
    +
    html<button class="action copy" data-bi-name="copy">Copy</button>
    +
    +
    +
  • +
+
+
+
+
@page
+@model AboutModel
+@{
+    ViewData["Title"] = "About";
+}
+<h2>@ViewData["Title"]</h2>
+<h3>@Model.Message</h3>
+
+    <p>Hello ASP.NET Core!</p>
+
+
+
+
    +
  • +

    Right-click on the project and select Publish…​ again.

    +
  • +
+
+
+
+Contextual menu open with Publish link highlighted +
+
+
+
    +
  • +

    After the app is published, verify the changes you made are available on Azure.

    +
  • +
+
+
+
+Verify task is complete +
+
+
+

Clean up

+
+
+

When you have finished testing the app, go to the Azure portal and delete the app.

+
+
+
    +
  • +

    Select Resource groups, then select the resource group you created.

    +
  • +
+
+
+
+Azure Portal: Resource Groups in sidebar menu +
+
+
+
    +
  • +

    In the Resource groups page, select Delete.

    +
  • +
+
+
+
+Azure Portal: Resource Groups page +
+
+
+
    +
  • +

    Enter the name of the resource group and select Delete. Your app and all other resources created in this tutorial are now deleted from Azure.

    +
  • +
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/architecture.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/architecture.html new file mode 100644 index 00000000..a7d90b5e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/architecture.html @@ -0,0 +1,386 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Architecture

+
+
+

The following principles and guidelines are based on Angular Style Guide - especially Angular modules (see Angular Docs). +It extends those where additional guidance is needed to define an architecture which is:

+
+
+
    +
  • +

    maintainable across applications and teams

    +
  • +
  • +

    easy to understand, especially when coming from a classic Java/.Net perspective - so whenever possible the same principles apply both to the server and the client

    +
  • +
  • +

    pattern based to solve common problems

    +
  • +
  • +

    based on best of breed solutions coming from open source and Capgemini project experiences

    +
  • +
  • +

    gives as much guidance as necessary and as little as possible

    +
  • +
+
+
+
+
+

Overview

+
+
+

When using Angular the web client architecture is driven by the framework in a certain way Google and the Angular community think about web client architecture. +Angular gives an opinion on how to look at architecture. +It is a component based like devon4j but uses different terms which are common language in web application development. +The important term is module which is used instead of component. The primary reason is the naming collision with the Web Components standard (see Web Components).
+To clarify this:

+
+
+
    +
  • +

    A component describes an UI element containing HTML, CSS and JavaScript - structure, design and logic encapsulated inside a reusable container called component.

    +
  • +
  • +

    A module describes an applications feature area. The application flight-app may have a module called booking.

    +
  • +
+
+
+

An application developed using Angular consists of multiple modules. +There are feature modules and special modules described by the Angular Style Guide - core and shared. +Angular or Angular Style Guide give no guidance on how to structure a module internally. +This is where this architecture comes in.

+
+
+
+
+

Layers

+
+
+

The architecture describes two layers. The terminology is based on common language in web development.

+
+
+
+Architecture - Layers +
+
Figure 1. Layers
+
+
+
    +
  • +

    Components Layer encapsulates components which present the current application state. +Components are separated into Smart and Dumb Components. +The only logic present is view logic inside Smart Components.

    +
  • +
  • +

    Services Layer is more or less what we call 'business logic layer' on the server side. +The layer defines the applications state, the transitions between state and classic business logic. +Stores contain application state over time to which Smart Components subscribe to. +Adapters are used to perform XHR, WebSocket connections, etc. +The business model is described inside the module. +Use case services perform business logic needed for use cases. +A use case services interacts with the store and adapters. +Methods of use case services are the API for Smart Components. +Those methods are Actions in reactive terminology.

    +
  • +
+
+
+
+
+

Modules

+
+
+

Angular requires a module called app which is the main entrance to an application at runtime - this module gets bootstrapped. +Angular Style Guide defines feature modules and two special modules - core and shared.

+
+
+
+Architecture - Modules +
+
Figure 2. Modules
+
+
+

A feature module is basically a vertical cut through both layers. +The shared module consists of components shared across feature modules. +The core module holds services shared across modules. +So core module is a module only having a services layer +and shared module is a module only having a components layer.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/components-layer.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/components-layer.html new file mode 100644 index 00000000..b4f75c8c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/components-layer.html @@ -0,0 +1,470 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Components Layer

+
+
+

The components layer encapsulates all components presenting the current application view state, which means data to be shown to the user. +The term component refers to a component described by the standard Web Components. +So this layer has all Angular components, directives and pipes defined for an application. +The main challenges are:

+
+
+
    +
  • +

    how to structure the components layer (see File Structure Guide)

    +
  • +
  • +

    decompose components into maintainable chunks (see Component Decomposition Guide)

    +
  • +
  • +

    handle component interaction

    +
  • +
  • +

    manage calls to the services layer

    +
  • +
  • +

    apply a maintainable data and event flow throughout the component tree

    +
  • +
+
+
+
+
+

Smart and Dumb Components

+
+
+

The architecture applies the concept of Smart and Dumb Components (syn. Containers and Presenters). +The concept means that components are divided into Smart and Dumb Components.

+
+
+

A Smart Component typically is a top-level dialog inside the component tree.

+
+
+
    +
  • +

    a component, that can be routed to

    +
  • +
  • +

    a modal dialog

    +
  • +
  • +

    a component, which is placed inside AppComponent

    +
  • +
+
+
+

A Dumb Component can be used by one to many Smart Components. +Inside the component tree a Dumb Component is a child of a Smart Component.

+
+
+
+Component Tree +
+
Figure 1. Component tree example
+
+
+

As shown the topmost component is always the AppComponent in Angular applications. +The component tree describes the hierarchy of components starting from AppComponent. +The figure shows Smart Components in blue and Dumb Components in green. +AppComponent is a Smart Component by definition. +Inside the template of AppComponent placed components are static components inside the component tree. +So they are always displayed. +In the example OverviewComponent and DetailsComponent are rendered by Angular compiler depending on current URL the application displays. +So OverviewComponents sub-tree is displayed if the URL is /overview and DetailsComponents sub-tree is displayed if the URL is /details. +To clarify this distinction further the following table shows the main differences.

+
+
+
Smart vs Dumb Components
+

|== = +|Smart Components |Dumb Components

+
+
+

|contain the current view state +|show data via binding (@Input) and contain no view state

+
+
+

|handle events emitted by Dumb Components +|pass events up the component tree to be handled by Smart Components (@Output)

+
+
+

|call the services layer +|never call the services layer

+
+
+

|use services +|do not use services

+
+
+

|consists of n Dumb Components +|is independent of Smart Components +|== =

+
+
+
+
+

Interaction of Smart and Dumb Components

+
+
+

With the usage of the Smart and Dumb Components pattern one of the most important part is component interaction. +Angular comes with built in support for component interaction with @Input() and @Output() Decorators. +The following figure illustrates an unidirectional data flow.

+
+
+
    +
  • +

    Data always goes down the component tree - from a Smart Component down its children.

    +
  • +
  • +

    Events bubble up, to be handled by a Smart Component.

    +
  • +
+
+
+
+Smart and Dumb Components Interaction +
+
Figure 2. Smart and Dumb Component Interaction
+
+
+

As shown a Dumb Components role is to define a signature by declaring Input and Output Bindings.

+
+
+
    +
  • +

    @Input() defines what data is necessary for that component to work

    +
  • +
  • +

    @Output() defines which events can be listened on by the parent component

    +
  • +
+
+
+
Listing 1. Dumb Components define a signature
+
+
export class ValuePickerComponent {
+
+  @Input() columns: string[];
+  @Input() items: {}[];
+  @Input() selected: {};
+  @Input() filter: string;
+  @Input() isChunked = false;
+  @Input() showInput = true;
+  @Input() showDropdownHeader = true;
+
+  @Output() elementSelected = new EventEmitter<{}>();
+  @Output() filterChanged = new EventEmitter<string>();
+  @Output() loadNextChunk = new EventEmitter();
+  @Output() escapeKeyPressed = new EventEmitter();
+
+}
+
+
+
+

The example shows the Dumb Component ValuePickerComponent. +It describes seven input bindings with isChunked, showHeader and showDropdownHeader being non mandatory as they have a default value. +Four output bindings are present. Typically, a Dumb Component has very little code to no code inside the TypeScript class.

+
+
+
Listing 2. Smart Components use the Dumb Components signature inside the template
+
+
<div>
+
+  <value-input
+    ...>
+  </value-input>
+
+  <value-picker
+    *ngIf="isValuePickerOpen"
+    [columns]="columns"
+    [items]="filteredItems"
+    [isChunked]="isChunked"
+    [filter]="filter"
+    [selected]="selectedItem"
+    [showDropdownHeader]="showDropdownHeader"
+    (loadNextChunk)="onLoadNextChunk()"
+    (elementSelected)="onElementSelected($event)"
+    (filterChanged)="onFilterChanged($event)"
+    (escapeKeyPressed)="onEscapePressedInsideChildTable()">
+  </value-picker>
+
+</div>
+
+
+
+

Inside the Smart Components template the events emitted by Dumb Components are handled. +It is a good practice to name the handlers with the prefix on* (e.g. onInputChanged()).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/cookbook-abstract-class-store.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/cookbook-abstract-class-store.html new file mode 100644 index 00000000..de6b706a --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/cookbook-abstract-class-store.html @@ -0,0 +1,402 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Abstract Class Store

+
+
+

The following solution presents a base class for implementing stores which handle state and its transitions. +Working with the base class achieves:

+
+
+
    +
  • +

    common API across all stores

    +
  • +
  • +

    logging (when activated in the constructor)

    +
  • +
  • +

    state transitions are asynchronous by design - sequential order problems are avoided

    +
  • +
+
+
+
Listing 1. Usage Example
+
+
@Injectable()
+export class ModalStore extends Store<ModalState> {
+
+  constructor() {
+    super({ isOpen: false }, !environment.production);
+  }
+
+  closeDialog() {
+    this.dispatchAction('Close Dialog', (currentState) => ({...currentState, isOpen: false}));
+  }
+
+  openDialog() {
+    this.dispatchAction('Open Dialog', (currentState) => ({...currentState, isOpen: true}));
+  }
+
+}
+
+
+
+
Listing 2. Abstract Base Class Store
+
+
import { OnDestroy } from '@angular/core';
+import { BehaviorSubject } from 'rxjs/BehaviorSubject';
+import { Observable } from 'rxjs/Observable';
+import { intersection, difference } from 'lodash';
+import { map, distinctUntilChanged, observeOn } from 'rxjs/operators';
+import { Subject } from 'rxjs/Subject';
+import { queue } from 'rxjs/scheduler/queue';
+import { Subscription } from 'rxjs/Subscription';
+
+interface Action<T> {
+  name: string;
+  actionFn: (state: T) => T;
+}
+
+/** Base class for implementing stores. */
+export abstract class Store<T> implements OnDestroy {
+
+  private actionSubscription: Subscription;
+  private actionSource: Subject<Action<T>>;
+  private stateSource: BehaviorSubject<T>;
+  state$: Observable<T>;
+
+  /**
+   * Initializes a store with initial state and logging.
+   * @param initialState Initial state
+   * @param logChanges When true state transitions are logged to the console.
+   */
+  constructor(initialState: T, public logChanges = false) {
+    this.stateSource = new BehaviorSubject<T>(initialState);
+    this.state$ = this.stateSource.asObservable();
+    this.actionSource = new Subject<Action<T>>();
+
+    this.actionSubscription = this.actionSource.pipe(observeOn(queue)).subscribe(action => {
+      const currentState = this.stateSource.getValue();
+      const nextState = action.actionFn(currentState);
+
+      if (this.logChanges) {
+        this.log(action.name, currentState, nextState);
+      }
+
+      this.stateSource.next(nextState);
+    });
+  }
+
+  /**
+   * Selects a property from the stores state.
+   * Will do distinctUntilChanged() and map() with the given selector.
+   * @param selector Selector function which selects the needed property from the state.
+   * @returns Observable of return type from selector function.
+   */
+  select<TX>(selector: (state: T) => TX): Observable<TX> {
+    return this.state$.pipe(
+      map(selector),
+      distinctUntilChanged()
+    );
+  }
+
+  protected dispatchAction(name: string, action: (state: T) => T) {
+    this.actionSource.next({ name, actionFn: action });
+  }
+
+  private log(actionName: string, before: T, after: T) {
+    const result: { [key: string]: { from: any, to: any} } = {};
+    const sameProbs = intersection(Object.keys(after), Object.keys(before));
+    const newProbs = difference(Object.keys(after), Object.keys(before));
+    for (const prop of newProbs) {
+      result[prop] = { from: undefined, to: (<any>after)[prop] };
+    }
+
+    for (const prop of sameProbs) {
+      if ((<any>before)[prop] !==  (<any>after)[prop]) {
+        result[prop] = { from: (<any>before)[prop], to: (<any>after)[prop] };
+      }
+    }
+
+    console.log(this.constructor.name, actionName, result);
+  }
+
+  ngOnDestroy() {
+    this.actionSubscription.unsubscribe();
+  }
+
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-accessibility.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-accessibility.html new file mode 100644 index 00000000..5c7ac303 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-accessibility.html @@ -0,0 +1,660 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Accessibility

+
+
+

Multiple studies suggest that around 15-20% of the population are living with a disability of some kind. In comparison, that number is higher than any single browser demographic currently, other than Chrome2. Not considering those users when developing an application means excluding a large number of people from being able to use it comfortable or at all.

+
+
+

Some people are unable to use the mouse, view a screen, see low contrast text, Hear dialogue or music and some people having difficulty to understanding the complex language.This kind of people needed the support like Keyboard support, screen reader support, high contrast text, captions and transcripts and Plain language support. This disability may change the from permanent to the situation.

+
+
+
+
+

Key Concerns of Accessible Web Applications

+
+
+
    +
  • +

    Semantic Markup - Allows the application to be understood on a more general level rather than just details of whats being rendered

    +
  • +
  • +

    Keyboard Accessibility - Applications must still be usable when using only a keyboard

    +
  • +
  • +

    Visual Assistance - color contrast, focus of elements and text representations of audio and events

    +
  • +
+
+
+
+
+

Semantic Markup

+
+
+

If you’re creating custom element directives, Web Components or HTML in general, use native elements wherever possible to utilize built-in events and properties. Alternatively, use ARIA to communicate semantic meaning.

+
+
+

HTML tags have attributes that providers extra context on what’s being displayed on the browser. For example, the <img> tag’s alt attribute lets the reader know what is being shown using a short description.However, native tags don’t cover all cases. This is where ARIA fits in. ARIA attributes can provide context on what roles specific elements have in the application or on how elements within the document relate to each other.

+
+
+

A modal component can be given the role of dialog or alertdialog to let the browser know that that component is acting as a modal. The modal component template can use the ARIA attributes aria-labelledby and aria-described to describe to readers what the title and purpose of the modal is.

+
+
+
+
@Component({
+    selector: 'ngc2-app',
+    template: `
+      <ngc2-notification-button
+        message="Hello!"
+        label="Greeting"
+        role="button">
+      </ngc2-notification-button>
+      <ngc2-modal
+        [title]="modal.title"
+        [description]="modal.description"
+        [visible]="modal.visible"
+        (close)="modal.close()">
+      </ngc2-modal>
+    `
+})
+export class AppComponent {
+  constructor(private modal: ModalService) { }
+}
+
+
+
+

notification-button.component.ts

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `
+})
+export class ModalComponent {
+  ...
+}
+
+
+
+
+
+

Keyboard Accessibility

+
+
+

Keyboard accessibility is the ability of your application to be interacted with using just a keyboard. The more streamlined the site can be used this way, the more keyboard accessible it is. Keyboard accessibility is one of the largest aspects of web accessibility since it targets:

+
+
+
    +
  • +

    those with motor disabilities who can’t use a mouse

    +
  • +
  • +

    users who rely on screen readers and other assistive technology, which require keyboard navigation

    +
  • +
  • +

    those who prefer not to use a mouse

    +
  • +
+
+
+
+
+

== Focus

+
+
+

Keyboard interaction is driven by something called focus. In web applications, only one element on a document has focus at a time, and keypress will activate whatever function is bound to that element. +Focus element border can be styled with CSS using the outline property, but it should not be removed. Elements can also be styled using the :focus psuedo-selector.

+
+
+
+
+

== Tabbing

+
+
+

The most common way of moving focus along the page is through the tab key. Elements will be traversed in the order they appear in the document outline - so that order must be carefully considered during development. +There is way change the default behavior or tab order. This can be done through the tabindex attribute. The tabindex can be given the values: +* less than zero - to let readers know that an element should be focusable but not keyboard accessible +* 0 - to let readers know that that element should be accessible by keyboard +* greater than zero - to let readers know the order in which the focusable element should be reached using the keyboard. Order is calculated from lowest to highest.

+
+
+
+
+

== Transitions

+
+
+

The majority of transitions that happen in an Angular application will not involve a page reload. This means that developers will need to carefully manage what happens to focus in these cases.

+
+
+

For example:

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `,
+})
+export class ModalComponent {
+  constructor(private modal: ModalService, private element: ElementRef) { }
+
+  ngOnInit() {
+    this.modal.visible$.subscribe(visible => {
+      if(visible) {
+        setTimeout(() => {
+          this.element.nativeElement.querySelector('button').focus();
+        }, 0);
+      }
+    })
+  }
+}
+
+
+
+
+
+

Visual Assistance

+
+
+

One large category of disability is visual impairment. This includes not just the blind, but those who are color blind or partially sighted, and require some additional consideration.

+
+
+
+
+

Color Contrast

+
+
+

When choosing colors for text or elements on a website, the contrast between them needs to be considered. For WCAG 2.0 AA, this means that the contrast ratio for text or visual representations of text needs to be at least 4.5:1. There are tools online to measure the contrast ratio such as this color contrast checker from WebAIM or be checked with using automation tests.

+
+
+
+
+

Visual Information

+
+
+

Color can help a user’s understanding of information, but it should never be the only way to convey information to a user. For example, a user with red/green color-blindness may have trouble discerning at a glance if an alert is informing them of success or failure.

+
+
+
+
+

Audiovisual Media

+
+
+

Audiovisual elements in the application such as video, sound effects or audio (that is, podcasts) need related textual representations such as transcripts, captions or descriptions. They also should never auto-play and playback controls should be provided to the user.

+
+
+
+
+

Accessibility with Angular Material

+
+
+

The a11y package provides a number of tools to improve accessibility. Import

+
+
+
+
import { A11yModule } from '@angular/cdk/a11y';
+
+
+
+
+
+

ListKeyManager

+
+
+

ListKeyManager manages the active option in a list of items based on keyboard interaction. Intended to be used with components that correspond to a role="menu" or role="listbox" pattern . Any component that uses a ListKeyManager will generally do three things:

+
+
+
    +
  • +

    Create a @ViewChildren query for the options being managed.

    +
  • +
  • +

    Initialize the ListKeyManager, passing in the options.

    +
  • +
  • +

    Forward keyboard events from the managed component to the ListKeyManager.

    +
  • +
+
+
+

Each option should implement the ListKeyManagerOption interface:

+
+
+
+
interface ListKeyManagerOption {
+  disabled?: boolean;
+  getLabel?(): string;
+}
+
+
+
+
+
+

== Types of ListKeyManager

+
+
+

There are two varieties of ListKeyManager, FocusKeyManager and ActiveDescendantKeyManager.

+
+
+
+
+

FocusKeyManager

+
+
+

Used when options will directly receive browser focus. Each item managed must implement the FocusableOption interface:

+
+
+
+
interface FocusableOption extends ListKeyManagerOption {
+  focus(): void;
+}
+
+
+
+
+
+

ActiveDescendantKeyManager

+
+
+

Used when options will be marked as active via aria-activedescendant. Each item managed must implement the Highlightable interface:

+
+
+
+
interface Highlightable extends ListKeyManagerOption {
+  setActiveStyles(): void;
+  setInactiveStyles(): void;
+}
+
+
+
+

Each item must also have an ID bound to the listbox’s or menu’s aria-activedescendant.

+
+
+
+
+

FocusTrap

+
+
+

The cdkTrapFocus directive traps Tab key focus within an element. This is intended to be used to create accessible experience for components like modal dialogs, where focus must be constrained. This directive is declared in A11yModule.

+
+
+

This directive will not prevent focus from moving out of the trapped region due to mouse interaction.

+
+
+

For example:

+
+
+
+
<div class="my-inner-dialog-content" cdkTrapFocus>
+  <!-- Tab and Shift + Tab will not leave this element. -->
+</div>
+
+
+
+
+
+

Regions

+
+
+

Regions can be declared explicitly with an initial focus element by using the cdkFocusRegionStart, cdkFocusRegionEnd and cdkFocusInitial DOM attributes. When using the tab key, focus will move through this region and wrap around on either end.

+
+
+

For example:

+
+
+
+
<a mat-list-item routerLink cdkFocusRegionStart>Focus region start</a>
+<a mat-list-item routerLink>Link</a>
+<a mat-list-item routerLink cdkFocusInitial>Initially focused</a>
+<a mat-list-item routerLink cdkFocusRegionEnd>Focus region end</a>
+
+
+
+
+
+

InteractivityChecker

+
+
+

InteractivityChecker is used to check the interactivity of an element, capturing disabled, visible, tabbable, and focusable states for accessibility purposes.

+
+
+
+
+

LiveAnnouncer

+
+
+

LiveAnnouncer is used to announce messages for screen-reader users using an aria-live region.

+
+
+

For example:

+
+
+
+
@Component({...})
+export class MyComponent {
+
+ constructor(liveAnnouncer: LiveAnnouncer) {
+   liveAnnouncer.announce("Hey Google");
+ }
+}
+
+
+
+
+
+

API reference for Angular CDK a11y

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-add-electron.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-add-electron.html new file mode 100644 index 00000000..a0736076 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-add-electron.html @@ -0,0 +1,848 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Add Electron to an Angular application using Angular CLI

+
+
+

This cookbook recipe explains how to integrate Electron in an Angular 10+ application. Electron is a framework for creating native applications with web technologies like JavaScript, HTML, and CSS. As an example, very well known applications as Visual Studio Code, Atom, Slack or Skype (and many more) are using Electron too.

+
+
+ + + + + +
+ + +At the moment of this writing Angular 11.2.0, Electron 11.2.3 and Electron-builder 22.9.1 were the versions available. +
+
+
+

Here are the steps to achieve this goal. Follow them in order.

+
+
+
+
+

Add Electron and other relevant dependencies

+
+
+

There are two different approaches to add the dependencies in the package.json file:

+
+
+
    +
  • +

    Writing the dependencies directly in that file.

    +
  • +
  • +

    Installing using npm install or yarn add.

    +
  • +
+
+
+ + + + + +
+ + +Please remember if the project has a package-lock.json or yarn.lock file use npm or yarn respectively. +
+
+
+

In order to add the dependencies directly in the package.json file, include the following lines in the devDependencies section:

+
+
+
+
"devDependencies": {
+...
+    "electron": "^11.2.3",
+    "electron-builder": "^22.9.1",
+...
+},
+
+
+
+

As indicated above, instead of this npm install can be used:

+
+
+
+
$ npm install -D electron electron-builder
+
+
+
+

Or with yarn:

+
+
+
+
$ yarn add -D electron electron-builder
+
+
+
+
+
+

Create the necessary typescript configurations

+
+
+

In order to initiate electron in an angular app we need to modify the tsconfig.json file and create a tsconfig.serve.json and a tsconfig.base.json in the root folder.

+
+
+
+
+

== tsconfig.json

+
+
+

This file needs to be modified to create references to ./src/tsconfig.app.json and ./src/tsconfig.spec.json to support different configurations.

+
+
+
+
{
+  "files": [],
+  "references": [
+    {
+      "path": "./src/tsconfig.app.json"
+    },
+    {
+      "path": "./src/tsconfig.spec.json"
+    }
+  ]
+}
+
+
+
+
+
+

== tsconfig.app.json

+
+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../app",
+    "module": "es2015",
+    "baseUrl": "",
+    "types": []
+  },
+  "include": [
+    "**/*.ts",
+  ],
+  "exclude": [
+    "**/*.spec.ts"
+  ],
+  "angularCompilerOptions": {
+    "fullTemplateTypeCheck": true,
+    "strictInjectionParameters": true,
+    "preserveWhitespaces": true
+  }
+}
+
+
+
+
+
+

== tsconfig.spec.json

+
+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../spec",
+    "module": "commonjs",
+    "types": [
+      "jasmine",
+      "node"
+    ]
+  },
+  "files": [
+    "test.ts",
+  ],
+  "include": [
+    "**/*.spec.ts",
+    "**/*.d.ts"
+  ],
+  "exclude": [
+    "dist",
+    "release",
+    "node_modules"
+  ]
+}
+
+
+
+
+
+

== tsconfig.base.json

+
+
+

This is shared between tsconfig.app.json and tsconfig.spec.json and it will be extended on each config file.

+
+
+
+
{
+  "compileOnSave": false,
+  "compilerOptions": {
+    "outDir": "./dist",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "es2016",
+      "es2015",
+      "dom"
+    ]
+  },
+  "files": [
+    "electron-main.ts"
+    "src/polyfills.ts"
+  ],
+  "include": [
+    "src/**/*.d.ts"
+  ],
+  "exclude": [
+    "node_modules"
+  ]
+}
+
+
+
+
+
+

== tsconfig.serve.json

+
+
+

In the root, tsconfig.serve.json needs to be created. This typescript config file is going to be used when we serve electron:

+
+
+
+
{
+  "compilerOptions": {
+    "outDir": ".",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "dom"
+    ]
+  },
+  "include": [
+    "electron-main.ts"
+  ],
+  "exclude": [
+    "node_modules",
+    "**/*.spec.ts"
+  ]
+}
+
+
+
+
+
+

Add Electron build configuration

+
+
+

In order to configure electron builds properly we need to create a new json on our application, let’s call it electron-builder.json. For more information and fine tuning please refer to the Electron Builder official documentation.

+
+
+

The contents of the file will be something similar to the following:

+
+
+
+
{
+  "productName": "devon4ngElectron",
+  "directories":{
+    "output": "./builder-release"
+  },
+  "win": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "portable"
+    ]
+  },
+  "mac": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "dmg"
+    ]
+  },
+  "linux": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "AppImage"
+    ]
+  }
+}
+
+
+
+

There are two important things in this files:

+
+
+
    +
  1. +

    "output": this is where electron builder is going to build our application

    +
  2. +
  3. +

    "icon": in every OS possible there is an icon parameter, the route to the icon folder that will be created after building with angular needs to be used here. This will make it so the electron builder can find the icons and build.

    +
  4. +
+
+
+
+
+

Modify angular.json

+
+
+

angular.json has to to be modified so the project is build inside /dist without an intermediate folder.

+
+
+
+
{
+  "architect": {
+    "build": {
+      "outputPath": "dist"
+    }
+  }
+}
+
+
+
+
+
+

Create the electron window in electron-main.ts

+
+
+

In order to use electron, a file needs to be created at the root of the application (main.ts). This file will create a window with different settings checking if we are using --serve as an argument:

+
+
+
+
import { app, BrowserWindow } from 'electron';
+import * as path from 'path';
+import * as url from 'url';
+
+let win: any;
+const args: any = process.argv.slice(1);
+const serve: any = args.some((val) => val == '--serve');
+
+const createWindow:any = ()=>{
+  // Create the browser window.
+  win = new BrowserWindow({
+    fullscreen: true,
+    webPreferences: {
+      nodeIntegration: true,
+    }
+  });
+
+  if (serve) {
+    require('electron-reload')(__dirname, {
+      electron: require(`${__dirname}/node_modules/electron`)
+    });
+    win.loadURL('http://localhost:4200');
+  } else {
+    win.loadURL(
+      url.format({
+        pathname: path.join(__dirname, 'dist/index.html'),
+        protocol: 'file:',
+        slashes: true
+      })
+    );
+  }
+
+  if (serve) {
+    win.webContents.openDevTools();
+  }
+
+  // Emitted when the window is closed.
+  win.on('closed', () => {
+    // Dereference the window object, usually you would store window
+    // in an array if your app supports multi windows, this is the time
+    // when you should delete the corresponding element.
+    // tslint:disable-next-line:no-null-keyword
+    win = null;
+  });
+}
+
+try {
+  // This method will be called when Electron has finished
+  // initialization and is ready to create browser windows.
+  // Some APIs can only be used after this event occurs.
+  app.on('ready', createWindow);
+
+   // Quit when all windows are closed.
+  app.on('window-all-closed', () => {
+    // On OS X it is common for applications and their menu bar
+    // to stay active until the user quits explicitly with Cmd + Q
+    if (process.platform !==  'darwin') {
+      app.quit();
+    }
+  });
+
+   app.on('activate', () => {
+    // On OS X it's common to re-create a window in the app when the
+    // dock icon is clicked and there are no other windows open.
+    if (win == null) {
+      createWindow();
+    }
+  });
+} catch (e) {
+  // Catch Error
+  // throw e;
+}
+
+
+
+
+
+

Add the electron window and improve the package.json scripts

+
+
+

Inside package.json the electron window that will be transformed to electron-main.js when building needs to be added.

+
+
+
+
{
+  ....
+  "main": "electron-main.js",
+  "scripts": {...}
+  ....
+}
+
+
+
+

The scripts section in the package.json can be improved to avoid running too verbose commands. As a very complete example we can take a look to the My Thai Star’s scripts section and copy the lines useful in your project. In any case, at least we recommend to add the following lines:

+
+
+
+
  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e",
+    "electron:tsc": "tsc -p tsconfig.serve.json",
+    "electron:run": "npm run electron:tsc && ng build --base-href ./ && npx electron .",
+    "electron:serve": "npm run electron:tsc && npx electron . --serve",
+    "electron:pack": "npm run electron:tsc && electron-builder --dir --config electron-builder.json",
+    "electron:build": "npm run electron:tsc && electron-builder --config electron-builder.json build"
+  },
+
+
+
+

The electron: scripts do the following:

+
+
+
    +
  • +

    electron:tsc: Compiles electron TS files.

    +
  • +
  • +

    electron:run: Serves Angular app and runs electron.

    +
  • +
  • +

    electron:serve: Serves electron with an already running angular app (i.e. a ng serve command running on another terminal).

    +
  • +
  • +

    electron:pack: Packs electron app.

    +
  • +
  • +

    electron:build: Builds electron app.

    +
  • +
+
+
+
+
+

Add Electron to an Angular application using Nx CLI

+
+
+

Creating an Electron app is very easy and straight-forward if you are using Nx CLI. As a pre-requisite, you should already have an application in your Nx workspace which you want to run as a front-end in your Electron app. (You can follow this guide if you want to get started with Nx).

+
+
+

Follow the steps below to develop an Electron app in your Nx workspace:

+
+
+
+
+

Install nx-electron

+
+
+

Install nx-electron using the command:

+
+
+
+
  npm install -D nx-electron
+
+
+
+

This will add the packages electron and nx-electron as dev dependencies to your Nx workspace. This will help us generate our Electron app in the next step.

+
+
+
+
+

Generate your Electron app

+
+
+

Once you have installed nx-electron, you can generate your electron app using the command:

+
+
+
+
  nx g nx-electron:app <electron-app-name> --frontendProject=<frontend-app-name>
+
+
+
+

And that is it! You have generated your Electron app already. All the configuration files (tsconfig.*) are generated for you under <electron-app-name> in your Nx workspace.

+
+
+
+
+

Serving your app

+
+
+

You can use this command to serve your Electron app:

+
+
+
+
  nx run-many --target=serve --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+

If you see a blank application, it is because the Electron app was served before the front-end was served. To avoid this, you can serve the front-end and back-end separately, (that is, serve the back-end only after the front-end is served).

+
+
+
+
+

Building your app

+
+
+

The command for building your Electron app in Nx is similar to the serve command above, you only change the target from serve to build:

+
+
+
+
  nx run-many --target=build --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+
+
+

Packaging your app

+
+
+

Make sure you have build your app before you try to package it using the following command:

+
+
+
+
  nx run <electron-app-name>:package [--options]
+
+
+
+

The options that can be passed can be found here.

+
+
+

You can find a working example of an Electron app in devon4ts-samples.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-elements.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-elements.html new file mode 100644 index 00000000..a92b1d77 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-elements.html @@ -0,0 +1,949 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Elements

+
+ +
+
+
+

What are Angular Elements?

+
+
+

Angular elements are Angular components packaged as custom elements, a web standard for defining new HTML elements in a framework-agnostic way.

+
+
+

Custom elements are a Web Platform feature currently supported by Chrome, Firefox, Opera, and Safari, and available in other browsers through Polyfills. A custom element extends HTML by allowing you to define a tag whose content is created and controlled by JavaScript code. The browser maintains a CustomElementRegistry of defined custom elements (also called Web Components), which maps an instantiable JavaScript class to an HTML tag.

+
+
+
+
+

Why use Angular Elements?

+
+
+

Angular Elements allows Angular to work with different frameworks by using input and output elements. This allows Angular to work with many different frameworks if needed. This is an ideal situation if a slow transformation of an application to Angular is needed or some Angular needs to be added in other web applications(For example. ASP.net, JSP etc )

+
+
+
+
+

Negative points about Elements

+
+
+

Angular Elements is really powerful but since, the transition between views is going to be handled by another framework or HTML/JavaScript, using Angular Router is not possible. the view transitions have to be handled manually. This fact also eliminates the possibility of just porting an application completely.

+
+
+
+
+

How to use Angular Elements?

+
+
+

In a generalized way, a simple Angular component could be transformed to an Angular Element with this steps:

+
+
+
+
+

Installing Angular Elements

+
+
+

The first step is going to be install the library using our preferred packet manager:

+
+
+
+
+

== NPM

+
+
+
+
npm install @angular/elements
+
+
+
+
+
+

== YARN

+
+
+
+
yarn add @angular/elements
+
+
+
+
+
+

Preparing the components in the modules

+
+
+

Inside the app.module.ts, in addition to the normal declaration of the components inside declarations, the modules inside imports and the services inside providers, the components need to added in entryComponents. If there are components that have their own module, the same logic is going to be applied for them, only adding in the app.module.ts the components that do not have their own module. Here is an example of this:

+
+
+
+
....
+@NgModule({
+  declarations: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  imports: [
+    CoreModule,  // Module containing Angular Materials
+    FormsModule
+  ],
+  entryComponents: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  providers: [DishShareService]
+})
+....
+
+
+
+

After that is done, the constructor of the module is going to be modified to use injector and bootstrap the application defining the components. This is going to allow the Angular Element to get the injections and to define a component tag that will be used later:

+
+
+
+
....
+})
+export class AppModule {
+  constructor(private injector: Injector) {
+
+  }
+
+  ngDoBootstrap() {
+    const el = createCustomElement(DishFormComponent, {injector: this.injector});
+    customElements.define('dish-form', el);
+
+    const elView = createCustomElement(DishViewComponent, {injector: this.injector});
+    customElements.define('dish-view', elView);
+  }
+}
+....
+
+
+
+
+
+

A component example

+
+
+

In order to be able to use a component, @Input() and @Output() variables are used. These variables are going to be the ones that will allow the Angular Element to communicate with the framework/JavaScript:

+
+
+

Component html

+
+
+
+
<mat-card>
+    <mat-grid-list cols="1" rowHeight="100px" rowWidth="50%">
+				<mat-grid-tile colspan="1" rowspan="1">
+					<span>{{ platename }}</span>
+				</mat-grid-tile>
+				<form (ngSubmit)="onSubmit(dishForm)" #dishForm="ngForm">
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<input matInput placeholder="Name" name="name" [(ngModel)]="dish.name">
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<textarea matInput placeholder="Description" name="description" [(ngModel)]="dish.description"></textarea>
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<button mat-raised-button color="primary" type="submit">Submit</button>
+					</mat-grid-tile>
+				</form>
+		</mat-grid-list>
+</mat-card>
+
+
+
+

Component ts

+
+
+
+
@Component({
+  templateUrl: './dish-form.component.html',
+  styleUrls: ['./dish-form.component.scss']
+})
+export class DishFormComponent implements OnInit {
+
+  @Input() platename;
+
+  @Input() platedescription;
+
+  @Output()
+  submitDishEvent = new EventEmitter();
+
+  submitted = false;
+  dish = {name: '', description: ''};
+
+  constructor(public dishShareService: DishShareService) { }
+
+  ngOnInit() {
+    this.dish.name = this.platename;
+    this.dish.description = this.platedescription;
+  }
+
+  onSubmit(dishForm: NgForm): void {
+    this.dishShareService.createDish(dishForm.value.name, dishForm.value.description);
+    this.submitDishEvent.emit('dishSubmited');
+  }
+
+}
+
+
+
+

In this file there are definitions of multiple variables that will be used as input and output. Since the input variables are going to be used directly by html, only lowercase and underscore strategies can be used for them. On the onSubmit(dishForm: NgForm) a service is used to pass this variables to another component. Finally, as a last thing, the selector inside @Component has been removed since a tag that will be used dynamically was already defined in the last step.

+
+
+
+
+

Solving the error

+
+
+

In order to be able to use this Angular Element a Polyfills/Browser support related error needs to solved. This error can be solved in two ways:

+
+
+
+
+

== Changing the target

+
+
+

One solution is to change the target in tsconfig.json to es2015. This might not be doable for every application since maybe a specific target is required.

+
+
+
+
+

== Installing Polyfaces

+
+
+

Another solution is to use AutoPollyfill. In order to do so, the library is going to be installed with a packet manager:

+
+
+

Yarn

+
+
+
+
yarn add @webcomponents/webcomponentsjs
+
+
+
+

Npm

+
+
+
+
npm install @webcomponents/webcomponentsjs
+
+
+
+

After the packet manager has finished, inside the src folder a new file polyfills.ts is found. To solve the error, importing the corresponding adapter (custom-elements-es5-adapter.js) is necessary:

+
+
+
+
....
+/***************************************************************************************************
+ * APPLICATION IMPORTS
+ */
+
+import '@webcomponents/webcomponentsjs/custom-elements-es5-adapter.js';
+....
+
+
+
+

If you want to learn more about polyfills in angular you can do it here

+
+
+
+
+

Building the Angular Element

+
+
+

First, before building the Angular Element, every element inside that app component except the module need to be removed. After that, a bash script is created in the root folder,. This script will allow to put every necessary file into a JS.

+
+
+
+
ng build "projectName" --configuration production --output-hashing=none && cat dist/"projectName"/runtime.js dist/"projectName"/polyfills.js dist/"projectName"/scripts.js dist/"projectName"/main.js > ./dist/"projectName"/"nameWantedAngularElement".js
+
+
+
+

After executing the bash script, it will generate inside the path dist/"projectName" (or dist/apps/projectname in a Nx workspace) a JS file named "nameWantedAngularElement".js and a css file.

+
+
+
+
+ +
+
+

The library ngx-build-plus allows to add different options when building. In addition, it solves some errors that will occur when trying to use multiple angular elements in an application. In order to use it, yarn or npm can be used:

+
+
+

Yarn

+
+
+
+
yarn add ngx-build-plus
+
+
+
+

Npm

+
+
+
+
npm install ngx-build-plus
+
+
+
+

If you want to add it to a specific sub project in your projects folder, use the --project:

+
+
+
+
.... ngx-build-plus --project "project-name"
+
+
+
+

Using this library and the following command, an isolated Angular Element which won’t have conflict with others can be generated. This Angular Element will not have a polyfill so, the project where we use them will need to include a poliyfill with the Angular Element requirements.

+
+
+
+
ng build "projectName" --output-hashing none --single-bundle true --configuration production --bundle-styles false
+
+
+
+

This command will generate three things:

+
+
+
    +
  1. +

    The main JS bundle

    +
  2. +
  3. +

    The script JS

    +
  4. +
  5. +

    The css

    +
  6. +
+
+
+

These files will be used later instead of the single JS generated in the last step.

+
+
+
+
+

== == Extra parameters

+
+
+

Here are some extra useful parameters that ngx-build-plus provides:

+
+
+
    +
  • +

    --keep-polyfills: This parameter is going to allow us to keep the polyfills. This needs to be used with caution, avoiding using multiple different polyfills that could cause an error is necessary.

    +
  • +
  • +

    --extraWebpackConfig webpack.extra.js: This parameter allows us to create a JavaScript file inside our Angular Elements project with the name of different libraries. Using webpack these libraries will not be included in the Angular Element. This is useful to lower the size of our Angular Element by removing libraries shared. Example:

    +
  • +
+
+
+
+
const webpack = require('webpack');
+
+module.exports = {
+    "externals": {
+        "rxjs": "rxjs",
+        "@angular/core": "ng.core",
+        "@angular/common": "ng.common",
+        "@angular/common/http": "ng.common.http",
+        "@angular/platform-browser": "ng.platformBrowser",
+        "@angular/platform-browser-dynamic": "ng.platformBrowserDynamic",
+        "@angular/compiler": "ng.compiler",
+        "@angular/elements": "ng.elements",
+        "@angular/router": "ng.router",
+        "@angular/forms": "ng.forms"
+    }
+}
+
+
+
+
+
+

==

+
+
+
+
  If some libraries are excluded from the `Angular Element` you will need to add the bundled UMD files of those libraries manually.
+== ==
+
+
+
+
+
+

Using the Angular Element

+
+
+

The Angular Element that got generated in the last step can be used in almost every framework. In this case, the Angular Element is going to be used in html:

+
+
+
Listing 1. Sample index.html version without ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+        <script src="./devon4ngAngularElements.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+
Listing 2. Sample index.html version with ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+         <script src="./polyfills.js"> </script> <!-- Created using --keep-polyfills options -->
+        <script src="./scripts.js"> </script>
+         <script src="./main.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+

In this html, the css generated in the last step is going to be imported inside the <head> and then, the JavaScript element is going to be imported at the end of the body. After that is done, There is two uses of Angular Elements in the html, one directly with use of the @input() variables as parameters commented in the html:

+
+
+
+
....
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+....
+
+
+
+

and one dynamically inside the script:

+
+
+
+
....
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+....
+
+
+
+

This JavaScript is an example of how to create dynamically an Angular Element inserting attributed to fill our @Input() variables and listen to the @Output() that was defined earlier. This is done with:

+
+
+
+
                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+
+
+
+

This allows JavaScript to hook with the @Output() event emitter that was defined. When this event gets called, another component that was defined gets inserted dynamically.

+
+
+
+
+

Angular Element within another Angular project

+
+
+

In order to use an Angular Element within another Angular project the following steps need to be followed:

+
+
+
+
+

Copy bundled script and css to resources

+
+
+

First copy the generated .js and .css inside assets in the corresponding folder.

+
+
+
+
+

Add bundled script to angular.json

+
+
+

Inside angular.json both of the files that were copied in the last step are going to be included. This will be done both, in test and in build. Including it on the test, will allow to perform unitary tests.

+
+
+
+
{
+....
+  "architect": {
+    ....
+    "build": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+    ....
+    "test": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+  }
+}
+
+
+
+

By declaring the files in the angular.json angular will take care of including them in a proper way.

+
+
+
+
+

==

+
+
+
+
  If you are using Nx, the configuration file `angular.json` might be named as `workspace.json`, depending on how you had setup the workspace. The structure of the file remains similar though.
+== ==
+
+
+
+
+
+

Using Angular Element

+
+
+

There are two ways that Angular Element can be used:

+
+
+
+
+

== Create component dynamically

+
+
+

In order to add the component in a dynamic way, first adding a container is necessary:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+</div>
+....
+
+
+
+

With this container created, inside the app.component.ts a method is going to be created. This method is going to find the container, create the dynamic element and append it into the container.

+
+
+

app.component.ts

+
+
+
+
export class AppComponent implements OnInit {
+  ....
+  ngOnInit(): void {
+    this.createComponent();
+  }
+  ....
+  createComponent(): void {
+    const container = document.getElementById('container');
+    const component = document.createElement('dish-form');
+    container.appendChild(component);
+  }
+  ....
+
+
+
+
+
+

== Using it directly

+
+
+

In order to use it directly on the templates, in the app.module.ts the CUSTOM_ELEMENTS_SCHEMA needs to be added:

+
+
+
+
....
+import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
+....
+@NgModule({
+  ....
+  schemas: [ CUSTOM_ELEMENTS_SCHEMA ],
+
+
+
+

This is going to allow the use of the Angular Element in the templates directly:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+  <dish-form></dish-form>
+</div>
+
+
+
+

You can find a working example of Angular Elements in our devon4ts-samples repo by referring the samples named angular-elements and angular-elements-test.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-lazy-loading.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-lazy-loading.html new file mode 100644 index 00000000..ac42d82e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-lazy-loading.html @@ -0,0 +1,684 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Lazy loading

+
+
+

When the development of an application starts, it just contains a small set of features so the app usually loads fast. However, as new features are added, the overall application size grows up and its loading speed decreases. It is in this context where Lazy loading finds its place. +Lazy loading is a design pattern that defers initialization of objects until it is needed, so, for example, users that just access to a website’s home page do not need to have other areas loaded. +Angular handles lazy loading through the routing module which redirects to requested pages. Those pages can be loaded at start or on demand.

+
+
+
+
+

An example with Angular

+
+
+

To explain how lazy loading is implemented using angular, a basic sample app is going to be developed. This app will consist in a window named "level 1" that contains two buttons that redirects to other windows in a "second level". It is a simple example, but useful to understand the relation between angular modules and lazy loading.

+
+
+
+Levels app structure +
+
Figure 1. Levels app structure.
+
+
+

This graphic shows that modules acts as gates to access components "inside" them.

+
+
+

Because the objective of this guide is related mainly with logic, the html structure and SCSS styles are less relevant, but the complete code can be found as a sample here.

+
+
+
+
+

Implementation

+
+
+

First write in a console ng new level-app --routing, to generate a new project called level-app including an app-routing.module.ts file (--routing flag). If you are using Nx, the command would be nx generate @nrwl/angular:app level-app --routing in your Nx workspace.

+
+
+

In the file app.component.html delete all the content except the router-outlet tag.

+
+
+
Listing 1. File app.component.html
+
+
<router-outlet></router-outlet>
+
+
+
+

The next steps consists on creating features modules.

+
+
+
    +
  • +

    run ng generate module first --routing to generate a module named first.

    +
  • +
  • +

    run ng generate module first/second-left --routing to generate a module named second-left under first.

    +
  • +
  • +

    run ng generate module first/second-right --routing to generate a module second-right under first.

    +
  • +
  • +

    run ng generate component first/first to generate a component named first inside the module first.

    +
  • +
  • +

    run ng generate component first/second-left/content to generate a component content inside the module second-left.

    +
  • +
  • +

    run ng generate component first/second-right/content to generate a component content inside the module second-right.

    +
  • +
+
+
+
+
+

==

+
+
+
+
  If you are using Nx, you have to specify the project name (level-app) along with the --project flag. For example, command for generating the first module will be `ng generate module first --project=level-app --routing`
+== ==
+
+
+
+

To move between components we have to configure the routes used:

+
+
+

In app-routing.module.ts add a path 'first' to FirstComponent and a redirection from '' to 'first'.

+
+
+
Listing 2. File app-routing.module.ts.
+
+
...
+import { FirstComponent } from './first/first/first.component';
+
+const routes: Routes = [
+  {
+    path: 'first',
+    component: FirstComponent
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

In app.module.ts import the module which includes FirstComponent.

+
+
+
Listing 3. File app.module.ts
+
+
....
+import { FirstModule } from './first/first.module';
+
+@NgModule({
+  ...
+  imports: [
+    ....
+    FirstModule
+  ],
+  ...
+})
+export class AppModule { }
+
+
+
+

In first-routing.module.ts add routes that direct to the content of SecondRightModule and SecondLeftModule. The content of both modules have the same name so, in order to avoid conflicts the name of the components are going to be changed using as ( original-name as new-name).

+
+
+
Listing 4. File first-routing.module.ts
+
+
...
+import { ContentComponent as ContentLeft} from './second-left/content/content.component';
+import { ContentComponent as ContentRight} from './second-right/content/content.component';
+import { FirstComponent } from './first/first.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'first/second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'first/second-right',
+    component: ContentRight
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class FirstRoutingModule { }
+
+
+
+

In first.module.ts import SecondLeftModule and SecondRightModule.

+
+
+
Listing 5. File first.module.ts
+
+
...
+import { SecondLeftModule } from './second-left/second-left.module';
+import { SecondRightModule } from './second-right/second-right.module';
+
+@NgModule({
+  ...
+  imports: [
+    ...
+    SecondLeftModule,
+    SecondRightModule,
+  ]
+})
+export class FirstModule { }
+
+
+
+

Using the current configuration, we have a project that loads all the modules in a eager way. Run ng serve (with --project=level-app in an Nx workspace) to see what happens.

+
+
+

First, during the compilation we can see that just a main file is built.

+
+
+
+Compile eager +
+
Figure 2. Compile eager.
+
+
+

If we go to http://localhost:4200/first and open developer options (F12 on Chrome), it is found that a document named "first" is loaded.

+
+
+
+First level eager +
+
Figure 3. First level eager.
+
+
+

If we click on [Go to right module] a second level module opens, but there is no 'second-right' document.

+
+
+
+Second level right eager +
+
Figure 4. Second level right eager.
+
+
+

But, typing the URL directly will load 'second-right' but no 'first', even if we click on [Go back]

+
+
+
+Second level right eager +
+
Figure 5. Second level right eager direct URL.
+
+
+

Modifying an angular application to load its modules lazily is easy, you have to change the routing configuration of the desired module (for example FirstModule).

+
+
+
Listing 6. File app-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: 'first',
+    loadChildren: () => import('./first/first.module').then(m => m.FirstModule),
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

Notice that instead of loading a component, you dynamically import it in a loadChildren attribute because modules acts as gates to access components "inside" them. Updating the app to load lazily has four consequences:

+
+
+
    +
  1. +

    No component attribute.

    +
  2. +
  3. +

    No import of FirstComponent.

    +
  4. +
  5. +

    FirstModule import has to be removed from the imports array at app.module.ts.

    +
  6. +
  7. +

    Change of context.

    +
  8. +
+
+
+

If we check first-routing.module.ts again, we can see that the path for ContentLeft and ContentRight is set to 'first/second-left' and 'first/second-right' respectively, so writing http://localhost:4200/first/second-left will redirect us to ContentLeft. However, after loading a module with loadChildren setting the path to 'second-left' and 'second-right' is enough because it acquires the context set by AppRoutingModule.

+
+
+
Listing 7. File first-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+

If we go to 'first' then FirstModule is situated in '/first' but also its children ContentLeft and ContentRight, so it is not necessary to write in their path 'first/second-left' and 'first/second-right', because that will situate the components on 'first/first/second-left' and 'first/first/second-right'.

+
+
+
+First level wrong path +
+
Figure 6. First level lazy wrong path.
+
+
+

When we compile an app with lazy loaded modules, files containing them will be generated

+
+
+
+First level lazy compilation +
+
Figure 7. First level lazy compilation.
+
+
+

And if we go to developer tools → network, we can find those modules loaded (if they are needed).

+
+
+
+First level lazy +
+
Figure 8. First level lazy.
+
+
+

To load the component ContentComponent of SecondLeftModule lazily, we have to load SecondLeftModule as a children of FirstModule:

+
+
+
    +
  • +

    Change component to loadChildren and reference SecondLeftModule.

    +
  • +
+
+
+
Listing 8. File first-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    loadChildren: () => import('./second-left/second-left.module').then(m => m.SecondLeftModule),
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+
    +
  • +

    Remove SecondLeftModule at first.component.ts

    +
  • +
  • +

    Route the components inside SecondLeftModule. Without this step nothing would be displayed.

    +
  • +
+
+
+
Listing 9. File second-left-routing.module.ts.
+
+
...
+import { ContentComponent } from './content/content.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: ContentComponent
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class SecondLeftRoutingModule { }
+
+
+
+
    +
  • +

    run ng serve to generate files containing the lazy modules.

    +
  • +
+
+
+
+Second level lazy +
+
Figure 9. Second level lazy loading compilation.
+
+
+

Clicking on [Go to left module] triggers the load of SecondLeftModule.

+
+
+
+Second level lazy network +
+
Figure 10. Second level lazy loading network.
+
+
+
+
+

Conclusion

+
+
+

Lazy loading is a pattern useful when new features are added, these features are usually identified as modules which can be loaded only if needed as shown in this document, reducing the time spent loading an application.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-library.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-library.html new file mode 100644 index 00000000..baa04c0f --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-library.html @@ -0,0 +1,566 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Library

+
+
+

Angular CLI provides us with methods that allow the creation of a library. After that, using a packet manager (either npm or yarn) the library can be build and packed which will allow later to install/publish it.

+
+
+
+
+

Whats a library?

+
+
+

From wikipedia: a library is a collection of non-volatile resources used by computer programs, often for software development. These may include configuration data, documentation, help data, message templates, pre-written code and subroutines, classes, values or type specifications.

+
+
+
+
+

How to build a library

+
+
+

In this section, a library is going to be build step by step. Please note, we will be explaining the steps using both Angular CLI and Nx CLI. You are free to choose either one for your development.

+
+
+
+
+

1. Creating an empty application

+
+
+

First, using Angular CLI we are going to generate a empty application which will be later filled with the generated library. In order to do so, Angular CLI allows us to add to ng new "application-name" an option (--create-application). This option is going to tell Angular CLI not to create the initial app project. This is convenient since a library is going to be generated in later steps. Using this command ng new "application-name" --create-application=false an empty project with the name wanted is created.

+
+
+
+
ng new "application-name" --create-application=false
+
+
+
+

This step is much more easier and straight-forward when using Nx. Nx allows us to work in a monorepo workspace, where you can develop a project as an application, or a library, or a tool. You can follow this guide to get started with Nx. +The command for generating a library in Nx is nx generate @nrwl/angular:library library-name --publishable --importPath=library-name. This will create an empty angular application which we can modify and publish as a library.

+
+
+
+
+

2. Generating a library

+
+
+

After generating an empty application, a library is going to be generated. Inside the folder of the project, the Angular CLI command ng generate library "library-name" is going to generate the library as a project (projects/"library-name"). As an addition, the option --prefix="library-prefix-wanted" allows us to switch the default prefix that Angular generated with (lib). Using the option to change the prefix the command will look like this ng generate library "library-name" --prefix="library-prefix-wanted".

+
+
+
+
ng generate library "library-name" --prefix="library-prefix-wanted"
+
+
+
+

If you are using Nx, this step is not needed as it is already covered in step 1. In this case, the library project will be generated in the libs folder of a Nx workspace.

+
+
+
+
+

3. Modifying our library

+
+
+

In the last step we generated a library. This automatically generates a module,service and a component inside projects/"library-name" that we can modify adding new methods, components etc that we want to use in other projects. We can generate other elements, using the usual Angular CLI generate commands adding the option --project="library-name", this will allow to generate elements within our project . An example of this is: ng generate service "name" --project="library-name".

+
+
+
+
ng generate "element" "name" --project="library-name"
+
+
+
+

You can use the same command as above in a Nx workspace.

+
+
+
+
+

4. Exporting the generated things

+
+
+

Inside the library (projects/"library-name) there’s a public_api.ts which is the file that exports the elements inside the library. (The file is named as index.ts in an Nx workspace). In case we generated other things, this file needs to be modified adding the extra exports with the generated elements. In addition, changing the library version is possible in the file package.json.

+
+
+
+
+

5. Building our library

+
+
+

Once we added the necessary exports, in order to use the library in other applications, we need to build the library. The command ng build "library-name" is going to build the library, generating the necessary files in "project-name"/dist/"library-name".

+
+
+
+
ng build "library-name"
+
+
+
+

You can use the same command in Nx as well. Only the path for the generated files will be slightly different: "project-name"/dist/libs/"library-name"

+
+
+
+
+

6. Packing the library

+
+
+

In this step we are going to pack the build library. In order to do so, we need to go inside dist/"library-name" (or dist/libs/"library-name") and then run either npm pack or yarn pack to generate a "library-name-version.tgz" file.

+
+
+
Listing 1. Packing using npm
+
+
npm pack
+
+
+
+
Listing 2. Packing using yarn
+
+
yarn pack
+
+
+
+
+
+

7. Publishing to npm repository (optional)

+
+
+
    +
  • +

    Add a README.md and LICENSE file. The text inside README.md will be used in you npm package web page as documentation.

    +
  • +
  • +

    run npm adduser if you do not have a npm account to create it, otherwise run npm login and introduce your credentials.

    +
  • +
  • +

    run npm publish inside dist/"library-name" folder.

    +
  • +
  • +

    Check that the library is published: https://npmjs.com/package/library-name

    +
  • +
+
+
+
+
+

8. Installing our library in other projects

+
+
+

In this step we are going to install/add the library on other projects.

+
+
+
+
+

== npm

+
+
+

In order to add the library in other applications, there are two ways:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command npm install "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run npm install "library-name" to install it from npm repository.

    +
  • +
+
+
+
+
+

== yarn

+
+
+

To add the package using yarn:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command yarn add "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run yarn add "library-name" to install it from npm repository.

    +
  • +
+
+
+
+
+

9. Using the library

+
+
+

Finally, once the library was installed with either packet manager, you can start using the elements from inside like they would be used in a normal element inside the application. Example app.component.ts:

+
+
+
+
import { Component, OnInit } from '@angular/core';
+import { MyLibraryService } from 'my-library';
+
+@Component({
+  selector: 'app-root',
+  templateUrl: './app.component.html',
+  styleUrls: ['./app.component.scss']
+})
+export class AppComponent implements OnInit {
+
+  toUpper: string;
+
+  constructor(private myLibraryService: MyLibraryService) {}
+  title = 'devon4ng library test';
+  ngOnInit(): void {
+    this.toUpper = this.myLibraryService.firstLetterToUpper('test');
+  }
+}
+
+
+
+

Example app.component.html:

+
+
+
+
<!--The content below is only a placeholder and can be replaced.-->
+<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+  <img width="300" alt="Angular Logo" src="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNTAgMjUwIj4KICAgIDxwYXRoIGZpbGw9IiNERDAwMzEiIGQ9Ik0xMjUgMzBMMzEuOSA2My4ybDE0LjIgMTIzLjFMMTI1IDIzMGw3OC45LTQzLjcgMTQuMi0xMjMuMXoiIC8+CiAgICA8cGF0aCBmaWxsPSIjQzMwMDJGIiBkPSJNMTI1IDMwdjIyLjItLjFWMjMwbDc4LjktNDMuNyAxNC4yLTEyMy4xTDEyNSAzMHoiIC8+CiAgICA8cGF0aCAgZmlsbD0iI0ZGRkZGRiIgZD0iTTEyNSA1Mi4xTDY2LjggMTgyLjZoMjEuN2wxMS43LTI5LjJoNDkuNGwxMS43IDI5LjJIMTgzTDEyNSA1Mi4xem0xNyA4My4zaC0zNGwxNy00MC45IDE3IDQwLjl6IiAvPgogIDwvc3ZnPg== ">
+</div>
+<h2>Here is my library service being used: {{toUpper}}</h2>
+<lib-my-library></lib-my-library>
+
+
+
+

Example app.module.ts:

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+
+import { MyLibraryModule } from 'my-library';
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    AppRoutingModule,
+    MyLibraryModule
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+

The result from using the library:

+
+
+
+result +
+
+
+
+
+

devon4ng libraries

+
+
+

In devonfw/devon4ng-library you can find some useful libraries:

+
+
+
    +
  • +

    Authorization module: This devon4ng Angular module adds rights-based authorization to your Angular app.

    +
  • +
  • +

    Cache module: Use this devon4ng Angular module when you want to cache requests to server. You may configure it to store in cache only the requests you need and to set the duration you want.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-mock-service.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-mock-service.html new file mode 100644 index 00000000..a3e35eb9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-mock-service.html @@ -0,0 +1,409 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Mock Service

+
+
+

We’ve all been there: A new idea comes, let’s quickly prototype it. But wait, there’s no back-end. What can we do?

+
+
+

Below you will find a solution that will get your started quick and easy. The idea is to write a simple mock service that helps us by feeding data into our components.

+
+
+
+
+

The app we start with

+
+
+

Let’s say you have a simple boilerplate code, with your favorite styling library hooked up and you’re ready to go. The angular-material-basic-layout sample is a good starting place.

+
+
+
+
+

The Components

+
+
+

Components - are the building blocks of our application. Their main role is to enable fragments of user interfaces. They will either display data (a list, a table, a chart, etc.), or 'collect' user interaction (e.g: a form, a menu, etc.)

+
+
+

Components stay at the forefront of the application. They should also be reusable (as much as possible). Reusability is key for what we are trying to achieve - a stable, maintainable front-end where multiple people can contribute and collaborate.

+
+
+

In our project, we are at the beginning. That means we may have more ideas than plans. We are exploring possibilities. In order to code efficiently:
+1) We will not store mock data in the components.
+2) We will not fetch or save data directly in the components.

+
+ +
+
+
+

The Service

+
+
+

So, how do we get data in our app? How do we propagate the data to the components and how can we send user interaction from the components to the our data "manager" logic.

+
+
+

The answer to all these questions is an Angular Service (that we will just call a service from now on).

+
+
+

A service is an injectable logic that can be consumed by all the components that need it. It can carry manipulation functions and ,in our case, fetch data from a provider.

+
+
+
+Service Architecture +
+
Figure 1. Angular Components & Services architecture.
+
+
+

Inside the Angular App, an Injector gives access to each component to their required services. It’s good coding practice to use a distinct service to each data type you want to manipulate. The type is described in a interface.

+
+
+

Still, our ideas drive in different ways, so we have to stay flexible. We cannot use a database at the moment, but we want a way to represent data on screen, which can grow organically.

+
+ +
+
+
+

The Model

+
+
+
+Data Box +
+
Figure 2. Data box in relation to services and components.
+
+
+

Let’s consider a 'box of data' represented in JSON. Physically, this means a folder with some JSON/TS files in it. They are located in the app/mock folder. The example uses only one mock data file. The file is typed according to our data model.

+
+
+

Pro tip: separate your files based on purpose. In your source code, put the mock files in the mock folder, components in the components folder, services in the services folder and data models in the models folder.

+
+
+
+Project Structure +
+
Figure 3. Project structure.
+
+
+

Aligned with the Angular way of development, we are implementing a model-view-controller pattern.

+
+
+

The model is represented by the interfaces we make. These interfaces describe the data structures we will use in our application. In this example, there is one data model, corresponding with the 'type' of data that was mocked. In the models folder you will find the .ts script file that describes chemical elements. The corresponding mock file defines a set is chemical elements objects, in accordance to our interface definition.

+
+
+
+
+

Use case

+
+
+

Enough with the theory, let’s see what we have here. The app presents 3 pages as follows:

+
+
+
    +
  • +

    A leader board with the top 3 elements

    +
  • +
  • +

    A data table with all the elements

    +
  • +
  • +

    A details page that reads a route parameter and displays the details of the element.

    +
  • +
+
+
+

There are a lot of business cases which have these requirements:

+
+
+
    +
  • +

    A leader board can be understood as "the most popular items in a set", "the latest updated items", "you favorite items" etc.

    +
  • +
  • +

    A data table with CRUD operations is very useful (in our case we only view details or delete an item, but they illustrate two important things: the details view shows how to navigate and consume a parametric route, the delete action shows how to invoke service operations over the loaded data - this means that the component is reusable and when the data comes with and API, only the service will need it’s implementation changed)

    +
  • +
+
+
+

Check out the angular-mock-service sample from the apps folder and easily get started with front-end development using dummy data.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-pwa.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-pwa.html new file mode 100644 index 00000000..a3edfb57 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-pwa.html @@ -0,0 +1,816 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Progressive Web App

+
+
+

Progressive web applications (PWA) are web application that offer better user experience than the traditional ones. In general, they solve problems related with reliability and speed:

+
+
+
    +
  • +

    Reliability: PWA are stable. In this context stability means than even with slow connections or even with no network at all, the application still works. To achieve this, some basic resources like styles, fonts, requests, …​ are stored; due to this caching, it is not possible to assure that the content is always up-to-date.

    +
  • +
  • +

    Speed: When an users opens an application, he or she will expect it to load almost immediately (almost 53% of users abandon sites that take longer that 3 seconds, source: https://developers.google.com/web/progressive-web-apps/#fast).

    +
  • +
+
+
+

PWA uses a script called service worker, which runs in background and essentially act as proxy between web app and network, intercepting requests and acting depending on the network conditions.

+
+
+
+
+

Assumptions

+
+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
+
+
+
+
+

Sample Application

+
+
+
+My Thai Star recommendation +
+
Figure 1. Basic angular PWA.
+
+
+

To explain how to build PWA using angular, a basic application is going to be built. This app will be able to ask for resources and save in the cache in order to work even offline.

+
+
+
+
+

Step 1: Create a new project

+
+
+

This step can be completed with one simple command using the Angular CLI: ng new <name>, where <name> is the name for the app. In this case, the app is going to be named basic-ng-pwa. If you are using Nx CLI, you can use the command nx generate @nrwl/angular:app <name> in your Nx workspace. You can follow this guide if you want to get started with Nx workspace.

+
+
+
+
+

Step 2: Create a service

+
+
+

Web applications usually uses external resources, making necessary the addition of services which can get those resources. This application gets a dish from My Thai Star’s back-end and shows it. To do so, a new service is going to be created.

+
+
+
    +
  • +

    go to project folder: cd basic-ng-pwa. If using Nx, go to the root folder of the workspace.

    +
  • +
  • +

    run ng generate service data. For Nx CLI, specify the project name with --project flag. So the command becomes ng generate service data --project=basic-ng-pwa

    +
  • +
  • +

    Modify data.service.ts, environment.ts, environment.prod.ts

    +
  • +
+
+
+

To retrieve data with this service, you have to import the module HttpClient and add it to the service’s constructor. Once added, use it to create a function getDishes() that sends HTTP request to My Thai Start’s back-end. The URL of the back-end can be stored as an environment variable MY_THAI_STAR_DISH.

+
+
+

data.service.ts

+
+
+
+
  ...
+  import { HttpClient } from '@angular/common/http';
+  import { MY_THAI_STAR_DISH } from '../environments/environment';
+  ...
+
+  export class DataService {
+    constructor(private http: HttpClient) {}
+
+    /* Get data from Back-end */
+    getDishes() {
+      return this.http.get(MY_THAI_STAR_DISH);
+    }
+    ...
+  }
+
+
+
+

environments.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+

environments.prod.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+
+
+

Step 3: Use the service

+
+
+

The component AppComponent implements the interface OnInit and inside its method ngOnInit() the subscription to the services is done. When a dish arrives, it is saved and shown (app.component.html).

+
+
+
+
  ...
+  import { DataService } from './data.service';
+  export class AppComponent implements OnInit {
+  dish: { name: string; description: string } = { name: '', description: ''};
+
+  ...
+  ngOnInit() {
+    this.data
+      .getDishes()
+      .subscribe(
+        (dishToday: { dish: { name: string; description: string } }) => {
+          this.dish = {
+            name: dishToday.dish.name,
+            description: dishToday.dish.description,
+          };
+        },
+      );
+  }
+}
+
+
+
+
+
+

Step 4: Structures, styles and updates

+
+
+

This step shows code interesting inside the sample app. The complete content can be found in devon4ts-samples.

+
+
+

index.html

+
+
+

To use the Montserrat font add the following link inside the head tag of the app’s index.html file.

+
+
+
+
  <link href="https://fonts.googleapis.com/css?family=Montserrat" rel="stylesheet">
+
+
+
+

styles.scss

+
+
+
+
  body {
+    ...
+    font-family: 'Montserrat', sans-serif;
+  }
+
+
+
+

app.component.ts

+
+
+

This file is also used to reload the app if there are any changes.

+
+
+
    +
  • +

    SwUpdate: This object comes inside the @angular/pwa package and it is used to detect changes and reload the page if needed.

    +
  • +
+
+
+
+
  ...
+  import { SwUpdate } from '@angular/service-worker';
+
+  export class AppComponent implements OnInit {
+
+  ...
+    constructor(updates: SwUpdate, private data: DataService) {
+      updates.available.subscribe((event) => {
+        updates.activateUpdate().then(() => document.location.reload());
+      });
+    }
+    ...
+  }
+
+
+
+
+
+

Step 5: Make it Progressive.

+
+
+

Install Angular PWA package with ng add @angular/pwa --project=<name>. As before substitute name with basic-ng-pwa.

+
+
+

The above command completes the following actions:

+
+
+
    +
  1. +

    Adds the @angular/service-worker package to your project.

    +
  2. +
  3. +

    Enables service worker build support in the CLI.

    +
  4. +
  5. +

    Imports and registers the service worker in the app module.

    +
  6. +
  7. +

    Updates the index.html file:

    +
    +
      +
    • +

      Includes a link to add the manifest.json file.

      +
    • +
    • +

      Adds meta tags for theme-color.

      +
    • +
    • +

      Installs icon files to support the installed Progressive Web App (PWA).

      +
    • +
    • +

      Creates the service worker configuration file called ngsw-config.json, which specifies the caching behaviors and other settings.

      +
    • +
    +
    +
  8. +
+
+
+
+
+

== manifest.json

+
+
+

manifest.json is a file that allows to control how the app is displayed in places where native apps are displayed.

+
+
+

Fields

+
+
+

name: Name of the web application.

+
+
+

short_name: Short version of name.

+
+
+

theme_color: Default theme color for an application context.

+
+
+

background_color: Expected background color of the web application.

+
+
+

display: Preferred display mode.

+
+
+

scope: Navigation scope of this web application’s application context.

+
+
+

start_url: URL loaded when the user launches the web application.

+
+
+

icons: Array of icons that serve as representations of the web app.

+
+
+

Additional information can be found here.

+
+
+
+
+

== ngsw-config.json

+
+
+

ngsw-config.json specifies which files and data URLs have to be cached and updated by the Angular service worker.

+
+
+

Fields

+
+
+
    +
  • +

    index: File that serves as index page to satisfy navigation requests.

    +
  • +
  • +

    assetGroups: Resources that are part of the app version that update along with the app.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      installMode: How the resources are cached (pre-fetch or lazy).

      +
    • +
    • +

      updateMode: Caching behavior when a new version of the app is found (pre-fetch or lazy).

      +
    • +
    • +

      resources: Resources to cache. There are three groups.

      +
      +
        +
      • +

        files: Lists patterns that match files in the distribution directory.

        +
      • +
      • +

        urls: URL patterns matched at runtime.

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    dataGroups: UsefulIdentifies the group. for API requests.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      urls: URL patterns matched at runtime.

      +
    • +
    • +

      version: Indicates that the resources being cached have been updated in a backwards-incompatible way.

      +
    • +
    • +

      cacheConfig: Policy by which matching requests will be cached

      +
      +
        +
      • +

        maxSize: The maximum number of entries, or responses, in the cache.

        +
      • +
      • +

        maxAge: How long responses are allowed to remain in the cache.

        +
        +
          +
        • +

          d: days. (5d = 5 days).

          +
        • +
        • +

          h: hours

          +
        • +
        • +

          m: minutes

          +
        • +
        • +

          s: seconds. (5m20s = 5 minutes and 20 seconds).

          +
        • +
        • +

          u: milliseconds

          +
        • +
        +
        +
      • +
      • +

        timeout: How long the Angular service worker will wait for the network to respond before using a cached response. Same dataformat as maxAge.

        +
      • +
      • +

        strategy: Caching strategies (performance or freshness).

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    navigationUrls: List of URLs that will be redirected to the index file.

    +
  • +
+
+
+

Additional information can be found here.

+
+
+
+
+

Step 6: Configure the app

+
+
+

manifest.json

+
+
+

Default configuration.

+
+
+

 

+
+
+

ngsw-config.json

+
+
+

At assetGroups → resources → urls: In this field the google fonts API is added in order to use Montserrat font even without network.

+
+
+
+
  "urls": [
+          "https://fonts.googleapis.com/**"
+        ]
+
+
+
+

At the root of the json: A data group to cache API calls.

+
+
+
+
  {
+    ...
+    "dataGroups": [{
+      "name": "mythaistar-dishes",
+      "urls": [
+        "https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1"
+      ],
+      "cacheConfig": {
+        "maxSize": 100,
+        "maxAge": "1h",
+        "timeout": "10s",
+        "strategy": "freshness"
+      }
+    }]
+  }
+
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ng build --prod to build the app using production settings.(nx build <name> --prod in Nx CLI)

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here.

+
+
+

Go to the dist/basic-ng-pwa/ folder running cd dist/basic-ng-pwa. In an Nx workspace, the path will be dist/apps/basic-ng-pwa

+
+
+

http-server -o to serve your built app.

+
+
+
+Http server running +
+
Figure 2. Http server running on localhost:8081.
+
+
+

 

+
+
+

In another console instance run ng serve (or nx serve basic-ng-pwa for Nx) to open the common app (not built).

+
+
+
+.Angular server running +
+
Figure 3. Angular server running on localhost:4200.
+
+
+

 

+
+
+

The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common (right) one does not.

+
+
+
+Application comparison +
+
Figure 4. Application service worker comparison.
+
+
+

 

+
+
+

If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 5. Offline application.
+
+
+

 

+
+
+

Finally, browser extensions like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 6. Lighthouse report.
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-theming.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-theming.html new file mode 100644 index 00000000..3b78d365 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-angular-theming.html @@ -0,0 +1,774 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Material Theming

+
+
+

Angular Material library offers UI components for developers, those components follows Google Material design baselines but characteristics like colors can be modified in order to adapt them to the needs of the client: corporative colors, corporative identity, dark themes, …​

+
+
+
+
+

Theming basics

+
+
+

In Angular Material, a theme is created mixing multiple colors. Colors and its light and dark variants conform a palette. In general, a theme consists of the following palettes:

+
+
+
    +
  • +

    primary: Most used across screens and components.

    +
  • +
  • +

    accent: Floating action button and interactive elements.

    +
  • +
  • +

    warn: Error state.

    +
  • +
  • +

    foreground: Text and icons.

    +
  • +
  • +

    background: Element backgrounds.

    +
  • +
+
+
+
+Theme palette +
+
Figure 1. Palettes and variants.
+
+
+

In angular material, a palette is represented as a SCSS map.

+
+
+
+SCSS map +
+
Figure 2. SCSS map and palettes.
+
+
+ + + + + +
+ + +Some components can be forced to use primary, accent or warn palettes using the attribute color, for example: <mat-toolbar color="primary">. +
+
+
+
+
+

Pre-built themes

+
+
+

Available pre-built themes:

+
+
+
    +
  • +

    deeppurple-amber.css

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 3. deeppurple-amber theme.
+
+
+
    +
  • +

    indigo-pink.css

    +
  • +
+
+
+
+indigo-pink theme +
+
Figure 4. indigo-pink theme.
+
+
+
    +
  • +

    pink-bluegrey.css

    +
  • +
+
+
+
+` pink-bluegrey theme` +
+
Figure 5. ink-bluegrey theme.
+
+
+
    +
  • +

    purple-green.css

    +
  • +
+
+
+
+purple-green theme +
+
Figure 6. purple-green theme.
+
+
+

The pre-built themes can be added using @import.

+
+
+
+
@import '@angular/material/prebuilt-themes/deeppurple-amber.css';
+
+
+
+
+
+

Custom themes

+
+
+

Sometimes pre-built themes do not meet the needs of a project, because color schemas are too specific or do not incorporate branding colors, in those situations custom themes can be built to offer a better solution to the client.

+
+
+

For this topic, we are going to use a basic layout project that can be found in devon4ts-samples repository.

+
+
+
+
+

Basics

+
+
+

Before starting writing custom themes, there are some necessary things that have to be mentioned:

+
+
+
    +
  • +

    Add a default theme: The project mentioned before has just one global SCSS style sheet styles.scss that includes indigo-pink.scss which will be the default theme.

    +
  • +
  • +

    Add @import '~@angular/material/theming'; at the beginning of the every style sheet to be able to use angular material pre-built color palettes and functions.

    +
  • +
  • +

    Add @include mat-core(); once per project, so if you are writing multiple themes in multiple files you could import those files from a 'central' one (for example styles.scss). This includes all common styles that are used by multiple components.

    +
  • +
+
+
+
+Theme files structure +
+
Figure 7. Theme files structure.
+
+
+
+
+

Basic custom theme

+
+
+

To create a new custom theme, the .scss file containing it has to have imported the angular _theming.scss file (angular/material/theming) file and mat-core included. _theming.scss includes multiple color palettes and some functions that we are going to see below. The file for this basic theme is going to be named styles-custom-dark.scss.

+
+
+

First, declare new variables for primary, accent and warn palettes. Those variables are going to store the result of the function mat-palette.

+
+
+

mat-palette accepts four arguments: base color palette, main, lighter and darker variants (See Palettes and variants.) and returns a new palette including some additional map values: default, lighter and darker ([id_scss_map]). Only the first argument is mandatory.

+
+
+
Listing 1. File styles-custom-dark.scss.
+
+
$custom-dark-theme-primary: mat-palette($mat-pink);
+$custom-dark-theme-accent: mat-palette($mat-blue);
+$custom-dark-theme-warn: mat-palette($mat-red);
+);
+
+
+
+

In this example we are using colors available in _theming.scss: mat-pink, mat-blue, mat-red. If you want to use a custom color you need to define a new map, for instance:

+
+
+
Listing 2. File styles-custom-dark.scss custom pink.
+
+
$my-pink: (
+    50 : #fcf3f3,
+    100 : #f9e0e0,
+    200 : #f5cccc,
+    300 : #f0b8b8,
+    500 : #ea9999,
+    900 : #db6b6b,
+    A100 : #ffffff,
+    A200 : #ffffff,
+    A400 : #ffeaea,
+    A700 : #ffd0d0,
+    contrast: (
+        50 : #000000,
+        100 : #000000,
+        200 : #000000,
+        300 : #000000,
+        900 : #000000,
+        A100 : #000000,
+        A200 : #000000,
+        A400 : #000000,
+        A700 : #000000,
+    )
+);
+
+$custom-dark-theme-primary: mat-palette($my-pink);
+...
+
+
+
+ + + + + +
+ + +Some pages allows to create these palettes easily, for instance: http://mcg.mbitson.com +
+
+
+

Until now, we just have defined primary, accent and warn palettes but what about foreground and background? Angular material has two functions to change both:

+
+
+
    +
  • +

    mat-light-theme: Receives as arguments primary, accent and warn palettes and return a theme whose foreground is basically black (texts, icons, …​), the background is white and the other palettes are the received ones.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 8. Custom light theme.
+
+
+
    +
  • +

    mat-dark-theme: Similar to mat-light-theme but returns a theme whose foreground is basically white and background black.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 9. Custom dark theme.
+
+
+

For this example we are going to use mat-dark-theme and save its result in $custom-dark-theme.

+
+
+
Listing 3. File styles-custom-dark.scss updated with mat-dark-theme.
+
+
...
+
+$custom-dark-theme: mat-dark-theme(
+  $custom-dark-theme-primary,
+  $custom-dark-theme-accent,
+  $custom-dark-theme-warn
+);
+
+
+
+

To apply the saved theme, we have to go to styles.scss and import our styles-custom-dark.scss and include a function called angular-material-theme using the theme variable as argument.

+
+
+
Listing 4. File styles.scss.
+
+
...
+@import 'styles-custom-dark.scss';
+@include angular-material-theme($custom-dark-theme);
+
+
+
+

If we have multiple themes it is necessary to add the include statement inside a css class and use it in src/index.html → app-root component.

+
+
+
Listing 5. File styles.scss updated with custom-dark-theme class.
+
+
...
+@import 'styles-custom-dark.scss';
+
+.custom-dark-theme {
+  @include angular-material-theme($custom-dark-theme);
+}
+
+
+
+
Listing 6. File src/index.html.
+
+
...
+<app-root class="custom-dark-theme"></app-root>
+...
+
+
+
+

This will apply $custom-dark-theme theme for the entire application.

+
+
+
+
+

Full custom theme

+
+
+

Sometimes it is needed to custom different elements from background and foreground, in those situations we have to create a new function similar to mat-light-theme and mat-dark-theme. Let’s focus con mat-light-theme:

+
+
+
Listing 7. Source code of mat-light-theme
+
+
@function mat-light-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $mat-light-theme-foreground,
+    background: $mat-light-theme-background,
+  );
+}
+
+
+
+

As we can see, mat-light-theme takes three arguments and returns a map including them as primary, accent and warn color; but there are three more keys in that map: is-dark, foreground and background.

+
+
+
    +
  • +

    is-dark: Boolean true if it is a dark theme, false otherwise.

    +
  • +
  • +

    background: Map that stores the color for multiple background elements.

    +
  • +
  • +

    foreground: Map that stores the color for multiple foreground elements.

    +
  • +
+
+
+

To show which elements can be colored lets create a new theme in a file styles-custom-cap.scss:

+
+
+
Listing 8. File styles-custom-cap.scss: Background and foreground variables.
+
+
@import '~@angular/material/theming';
+
+// custom background and foreground palettes
+$my-cap-theme-background: (
+  status-bar: #0070ad,
+  app-bar: map_get($mat-blue, 900),
+  background: #12abdb,
+  hover: rgba(white, 0.04),
+  card: map_get($mat-red, 800),
+  dialog: map_get($mat-grey, 800),
+  disabled-button: $white-12-opacity,
+  raised-button: map-get($mat-grey, 800),
+  focused-button: $white-6-opacity,
+  selected-button: map_get($mat-grey, 900),
+  selected-disabled-button: map_get($mat-grey, 800),
+  disabled-button-toggle: black,
+  unselected-chip: map_get($mat-grey, 700),
+  disabled-list-option: black,
+);
+
+$my-cap-theme-foreground: (
+  base: yellow,
+  divider: $white-12-opacity,
+  dividers: $white-12-opacity,
+  disabled: rgba(white, 0.3),
+  disabled-button: rgba(white, 0.3),
+  disabled-text: rgba(white, 0.3),
+  hint-text: rgba(white, 0.3),
+  secondary-text: rgba(white, 0.7),
+  icon: white,
+  icons: white,
+  text: white,
+  slider-min: white,
+  slider-off: rgba(white, 0.3),
+  slider-off-active: rgba(white, 0.3),
+);
+
+
+
+

Function which uses the variables defined before to create a new theme:

+
+
+
Listing 9. File styles-custom-cap.scss: Creating a new theme function.
+
+
// instead of creating a theme with mat-light-theme or mat-dark-theme,
+// we will create our own theme-creating function that lets us apply our own foreground and background palettes.
+@function create-my-cap-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $my-cap-theme-foreground,
+    background: $my-cap-theme-background
+  );
+}
+
+
+
+

Calling the new function and storing its value in $custom-cap-theme.

+
+
+
Listing 10. File styles-custom-cap.scss: Storing the new theme.
+
+
// We use create-my-cap-theme instead of mat-light-theme or mat-dark-theme
+$custom-cap-theme-primary: mat-palette($mat-green);
+$custom-cap-theme-accent: mat-palette($mat-blue);
+$custom-cap-theme-warn: mat-palette($mat-red);
+
+$custom-cap-theme: create-my-cap-theme(
+  $custom-cap-theme-primary,
+  $custom-cap-theme-accent,
+  $custom-cap-theme-warn
+);
+
+
+
+

After defining our new theme, we can import it from styles.scss.

+
+
+
Listing 11. File styles.scss updated with custom-cap-theme class.
+
+
...
+@import 'styles-custom-cap.scss';
+.custom-cap-theme {
+  @include angular-material-theme($custom-cap-theme);
+}
+
+
+
+
+
+

Multiple themes and overlay-based components

+
+
+

Certain components (e.g. menu, select, dialog, etc.) that are inside of a global overlay container,require an additional step to be affected by the theme’s css class selector.

+
+
+
Listing 12. File app.module.ts
+
+
import {OverlayContainer} from '@angular/cdk/overlay';
+
+@NgModule({
+  // ...
+})
+export class AppModule {
+  constructor(overlayContainer: OverlayContainer) {
+    overlayContainer.getContainerElement().classList.add('custom-cap-theme');
+  }
+}
+
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-app-initializer.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-app-initializer.html new file mode 100644 index 00000000..941213ac --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-app-initializer.html @@ -0,0 +1,790 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

APP_INITIALIZER

+
+ +
+
+
+

What is the APP_INITIALIZER pattern

+
+
+

The APP_INITIALIZER pattern allows an application to choose which configuration is going to be used in the start of the application, this is useful because it allows to setup different configurations, for example, for docker or a remote configuration. This provides benefits since this is done on runtime, so there’s no need to recompile the whole application to switch from configuration.

+
+
+
+
+

What is APP_INITIALIZER

+
+
+

APP_INITIALIZER allows to provide a service in the initialization of the application in a @NgModule. It also allows to use a factory, allowing to create a singleton in the same service. An example can be found in MyThaiStar /core/config/config.module.ts:

+
+
+
+
+

==

+
+
+

The provider expects the return of a Promise, if it is using Observables, a change with the method toPromise() will allow a switch from Observable to Promise +== ==

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

This is going to allow the creation of a ConfigService where, using a singleton, the service is going to load an external config depending on a route. This dependence with a route, allows to setup different configuration for docker etc. This is seen in the ConfigService of MyThaiStar:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  //and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it is mentioned earlier, you can see the use of a factory to create a singleton at the start. After that, loadExternalConfig is going to look for a Boolean inside the corresponding environment file inside the path src/environments/, this Boolean loadExternalConfig is going to easily allow to switch to a external config. If it is true, it generates a promise that overwrites the parameters of the local config, allowing to load the external config. Finally, the last method getValues() is going to allow to return the file config with the values (overwritten or not). The local config file from MyThaiStar can be seen here:

+
+
+
+
export enum BackendType {
+  IN_MEMORY,
+  REST,
+  GRAPHQL,
+}
+
+interface Role {
+  name: string;
+  permission: number;
+}
+
+interface Lang {
+  label: string;
+  value: string;
+}
+
+export interface Config {
+  version: string;
+  backendType: BackendType;
+  restPathRoot: string;
+  restServiceRoot: string;
+  pageSizes: number[];
+  pageSizesDialog: number[];
+  roles: Role[];
+  langs: Lang[];
+}
+
+export const config: Config = {
+  version: 'dev',
+  backendType: BackendType.REST,
+  restPathRoot: 'http://localhost:8081/mythaistar/',
+  restServiceRoot: 'http://localhost:8081/mythaistar/services/rest/',
+  pageSizes: [8, 16, 24],
+  pageSizesDialog: [4, 8, 12],
+  roles: [
+    { name: 'CUSTOMER', permission: 0 },
+    { name: 'WAITER', permission: 1 },
+  ],
+  langs: [
+    { label: 'English', value: 'en' },
+    { label: 'Deutsch', value: 'de' },
+    { label: 'Español', value: 'es' },
+    { label: 'Català', value: 'ca' },
+    { label: 'Français', value: 'fr' },
+    { label: 'Nederlands', value: 'nl' },
+    { label: 'हिन्दी', value: 'hi' },
+    { label: 'Polski', value: 'pl' },
+    { label: 'Русский', value: 'ru' },
+    { label: 'български', value: 'bg' },
+  ],
+};
+
+
+
+

Finally, inside a environment file src/environments/environment.ts the use of the Boolean loadExternalConfig is seen:

+
+
+
+
// The file contents for the current environment will overwrite these during build.
+// The build system defaults to the dev environment which uses `environment.ts`, but if you do
+// `ng build --env=prod` then `environment.prod.ts` will be used instead.
+// The list of which env maps to which file can be found in `.angular-cli.json`.
+
+export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+
+
+

Creating a APP_INITIALIZER configuration

+
+
+

This section is going to be used to create a new APP_INITIALIZER basic example. For this, a basic app with angular is going to be generated using ng new "appname" substituting appname for the name of the app opted. +If you are using Nx, the command would be nx generate @nrwl/angular:app "appname" in your Nx workspace. Click here to get started with using Nx.

+
+
+
+
+

Setting up the config files

+
+ +
+
+
+

Docker external configuration (Optional)

+
+
+

This section is only done if there is a docker configuration in the app you are setting up this type of configuration.

+
+
+

1.- Create in the root folder /docker-external-config.json. This external config is going to be used when the application is loaded with docker (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load with docker:

+
+
+
+
{
+    "version": "docker-version"
+}
+
+
+
+

2.- In the root, in the file /Dockerfile angular is going to copy the docker-external-config.json that was created before into the Nginx html route:

+
+
+
+
....
+COPY docker-external-config.json /usr/share/nginx/html/docker-external-config.json
+....
+
+
+
+
+
+

External json configuration

+
+
+

1.- Create a json file in the route /src/external-config.json. This external config is going to be used when the application is loaded with the start script (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load:

+
+
+
+
{
+    "version": "external-config"
+}
+
+
+
+

2.- The file named /angular.json (/workspace.json if using Nx) located at the root is going to be modified to add the file external-config.json that was just created to both "assets" inside Build and Test:

+
+
+
+
	....
+	"build": {
+          ....
+            "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	        ....
+        "test": {
+	  ....
+	   "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	  ....
+
+
+
+
+
+

Setting up the proxies

+
+
+

This step is going to setup two proxies. This is going to allow to load the config desired by the context, in case that it is using docker to load the app or in case it loads the app with angular. Loading different files is made possible by the fact that the ConfigService method loadExternalConfig() looks for the path /config.

+
+
+
+
+

Docker (Optional)

+
+
+

1.- This step is going to be for docker. Add docker-external-config.json to Nginx configuration (/nginx.conf) that is in the root of the application:

+
+
+
+
....
+  location  ~ ^/config {
+        alias /usr/share/nginx/html/docker-external-config.json;
+  }
+....
+
+
+
+
+
+

External Configuration

+
+
+

1.- Now the file /proxy.conf.json, needs to be created/modified this file can be found in the root of the application. In this file you can add the route of the external configuration in target and the name of the file in ^/config::

+
+
+
+
....
+  "/config": {
+    "target": "http://localhost:4200",
+    "secure": false,
+    "pathRewrite": {
+      "^/config": "/external-config.json"
+    }
+  }
+....
+
+
+
+

2.- The file package.json found in the root of the application is gonna use the start script to load the proxy config that was just created :

+
+
+
+
  "scripts": {
+....
+    "start": "ng serve --proxy-config proxy.conf.json -o",
+....
+
+
+
+

If using Nx, you need to run the command manually:

+
+
+

nx run angular-app-initializer:serve:development --proxyConfig=proxy.conf.json --o

+
+
+
+
+

Adding the loadExternalConfig Boolean to the environments

+
+
+

In order to load an external config we need to add the loadExternalConfig Boolean to the environments. To do so, inside the folder environments/ the files are going to get modified adding this Boolean to each environment that is going to be used. In this case, only two environments are going to be modified (environment.ts and environment.prod.ts). Down below there is an example of the modification being done in the environment.prod.ts:

+
+
+
+
export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+

In the file in first instance there is the declaration of the types of the variables. After that, there is the definition of those variables. This variable loadExternalConfig is going to be used by the service, allowing to setup a external config just by switching the loadExternalConfig to true.

+
+
+
+
+

Creating core configuration service

+
+
+

In order to create the whole configuration module three are going to be created:

+
+
+

1.- Create in the core app/core/config/ a config.ts

+
+
+
+
  export interface Config {
+    version: string;
+  }
+
+  export const config: Config = {
+    version: 'dev'
+  };
+
+
+
+

Taking a look to this file, it creates a interface (Config) that is going to be used by the variable that exports (export const config: Config). This variable config is going to be used by the service that is going to be created.

+
+
+

2.- Create in the core app/core/config/ a config.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  // and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it was explained in previous steps, at first, there is a factory that uses the method loadExternalConfig(), this factory is going to be used in later steps in the module. After that, the loadExternalConfig() method checks if the Boolean in the environment is false. If it is false it will return the promise resolved with the normal config. Else, it is going to load the external config in the path (/config), and overwrite the values from the external config to the config that’s going to be used by the app, this is all returned in a promise.

+
+
+

3.- Create in the core a module for the config app/core/config/ a config.module.ts:

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

As seen earlier, the ConfigService is added to the module. In this addition, the app is initialized(provide) and it uses the factory that was created in the ConfigService loading the config with or without the external values depending on the Boolean in the config.

+
+
+
+
+

Using the Config Service

+
+
+

As a first step, in the file /app/app.module.ts the ConfigModule created earlier in the other step is going to be imported:

+
+
+
+
  imports: [
+    ....
+    ConfigModule,
+    ....
+  ]
+
+
+
+

After that, the ConfigService is going to be injected into the app.component.ts

+
+
+
+
....
+import { ConfigService } from './core/config/config.service';
+....
+export class AppComponent {
+....
+  constructor(public configService: ConfigService) { }
+....
+
+
+
+

Finally, for this demonstration app, the component app/app.component.html is going to show the version of the config it is using at that moment.

+
+
+
+
<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+</div>
+<h2>Here is the configuration version that is using angular right now: {{configService.getValues().version}}</h2>
+
+
+
+
+
+

Final steps

+
+
+

The script start that was created earlier in the package.json (npm start) is going to be used to start the application. After that, modifying the Boolean loadExternalConfig inside the corresponding environment file inside /app/environments/ should show the different config versions.

+
+
+
+loadExternalConfigFalse +
+
+
+
+loadExternalConfigTrue +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-blob-streaming.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-blob-streaming.html new file mode 100644 index 00000000..a4e33f5c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-blob-streaming.html @@ -0,0 +1,552 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Angular File Uploading

+
+
+

This sample demonstrates how to upload a file to a server. For this, we will need to use an Angular form. In this case we have chosen a simple template-driven form, as the goal of this sample is just to show the process to upload a file. You can learn more about Forms in Angular in the official documentation.

+
+
+ + + + + +
+ + +The back-end implementation for this sample is located here: +devon4j-blob-streaming +
+
+
+
+
+

FormData

+
+
+

FormData is an object where you can store key-value pairs that allows you to send through XMLHttpRequest. You can create a FormData object as simply as:

+
+
+
+
....
+const formData = new FormData();
+formData.append('key', value);
+....
+
+
+
+
+
+

Let’s begin

+
+
+

I assume you already have your angular application running, if not, you can have a look to our AngularBasicPWA sample +Unresolved include directive in modules/ROOT/pages/devon4ng.wiki/guide-blob-streaming.adoc - include::guide-angular-pwa[]

+
+
+

We are going to use Angular Material components, so it is necessary to install the dependency with the following command:

+
+
+

npm install --save @angular/material @angular/cdk @angular/animations

+
+
+
+
+

Importing necessary components

+
+
+

These are the component I am going to use for our sample, are material HTML components. For use the template-driven form you do not need to import any component. I am going to create a module called Core where I place the needed imports. After that, I will import Core module on my main App module, and I be able to use these components in any part of my application.

+
+
+
+
....
+@NgModule({
+  declarations: [],
+  imports: [CommonModule],
+  exports: [
+    MatButtonModule,
+    MatFormFieldModule,
+    MatInputModule,
+    FormsModule,
+    MatProgressBarModule,
+  ],
+})
+export class CoreModule {}
+....
+
+
+
+

FormsModule Will allow us data binding through html and component.

+
+
+

The next step will be to create a component to place the uploading component: +ng generate component uploader

+
+
+

So this will be our project structure so far:

+
+
+
+folder structure +
+
+
+

Then, in the app.component.html we need to add the selector for our new component, so it will be represented there. We are not going to create any route for this sample. We can also modify the values for the toolbar.

+
+
+
+
....
+<div class="toolbar" role="banner">
+  <img
+    width="40"
+    alt="Angular Logo"
+    src="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNTAgMjUwIj4KICAgIDxwYXRoIGZpbGw9IiNERDAwMzEiIGQ9Ik0xMjUgMzBMMzEuOSA2My4ybDE0LjIgMTIzLjFMMTI1IDIzMGw3OC45LTQzLjcgMTQuMi0xMjMuMXoiIC8+CiAgICA8cGF0aCBmaWxsPSIjQzMwMDJGIiBkPSJNMTI1IDMwdjIyLjItLjFWMjMwbDc4LjktNDMuNyAxNC4yLTEyMy4xTDEyNSAzMHoiIC8+CiAgICA8cGF0aCAgZmlsbD0iI0ZGRkZGRiIgZD0iTTEyNSA1Mi4xTDY2LjggMTgyLjZoMjEuN2wxMS43LTI5LjJoNDkuNGwxMS43IDI5LjJIMTgzTDEyNSA1Mi4xem0xNyA4My4zaC0zNGwxNy00MC45IDE3IDQwLjl6IiAvPgogIDwvc3ZnPg== "
+  />
+  <span>File uploader</span>
+</div>
+
+<app-uploader></app-uploader>
+
+<router-outlet></router-outlet>
+....
+
+
+
+

Now, our new component uploader will be loaded in the root page. Let’s add some code to it.

+
+
+
+
+

Uploader component

+
+
+

I will begin editing the html file. First thing we need is an input component, which will allow us to select the file to upload. Furthermore, I added a button which will be the responsible of calling the upload file window. Apart from this, there is also two labels and a progress bar. Labels will give feedback about file upload request, both with an if clause with uploadSuccess and uploadFail global variables that will be in uploader.component.ts. The progress bar will show the progress of the file being uploaded.

+
+
+
+
....
+  <div class="upload">
+    <div>
+      <button mat-raised-button (click)="upload()">Upload file</button>
+    </div>
+        <label mat-label *ngIf="uploadSuccess"
+      >The file was upload succesfully!</label
+    >
+    <label mat-label *ngIf="uploadFail"
+      >There was an error uploading the file</label
+    >
+    <input
+      type="file"
+      #fileUpload
+      name="fileUpload"
+      accept="*"
+      style="display: none"
+    />
+  </div>
+    <mat-progress-bar
+    *ngIf="fileInProgress"
+    [value]="fileProgress"
+  ></mat-progress-bar>
+</div>
+....
+
+
+
+

The button will call the upload() method in our uploader.component.ts, and as we can see, I assigned an identifier for the input, #fileUpload, so we can reference it from uploader.component.ts. It accepts any file, and the display none style is because it will be called when we click the button, so it is no necessary to be present in the view.

+
+
+

Our html view should look something similar to this:

+
+
+
+html view 1 +
+
+
+

Let’s start in our .ts file. In order to interact with the input #fileUpload, it is necessary to declare it like this:

+
+
+
+
....
+@ViewChild('fileUpload') fileUpload: ElementRef;
+constructor() {}
+....
+
+
+
+

And then, the upload() method that the button in html is calling:

+
+
+
+
....
+ upload(): void {
+    this.fileUpload.nativeElement.click();
+
+    this.fileUpload.nativeElement.onchange = () => {
+      const file = this.fileUpload.nativeElement.files[0];
+      this.uploadFile(file);
+    };
+  }
+....
+
+
+
+

The click method at first line will open the file explorer in order to select the desired file to upload, and on change method will be called when a new file is selected, so a change is detected. Then, uploadFile(…​) method will be called.

+
+
+

Before explain this uploadFile(…​) method, there is something still missing, a service to communicate with back-end through HTTP. +I am going to place the service in a service folder inside our uploader component folder. +Execute the following command ng generate service data and paste the following code

+
+
+
+
....
+export class DataService {
+  SERVER_URL = 'http://localhost:8081/services/rest/binary/v1/';
+
+  constructor(private httpClient: HttpClient) {}
+
+  uploadFile(formdData: FormData): Observable<HttpEvent<BinaryObject>> {
+    const headers = new HttpHeaders({
+      'Content-Type': 'multipart/form-data',
+    });
+
+    return this.httpClient.post<BinaryObject>(
+      this.SERVER_URL + 'binaryobject',
+      formdData,
+      {
+        headers,
+        reportProgress: true,
+        observe: 'events',
+      }
+    );
+  }
+}
+....
+
+
+
+

We have declared the URL as a global variable. Also is necessary to set the content-type as multipart/form-data in the headers sections, that will be the body of the request. There is also two options to talk about:

+
+
+
    +
  • +

    reportProgress: to have a feedback about the file upload so we can show percentage on the view.

    +
  • +
  • +

    observe: ' events' in order to receive this type of events information.

    +
  • +
+
+
+

In uploader.component.ts is missing uploadFile(…​) method.

+
+
+
+
....
+  uploadFile(file: File): void {
+    const formDataBody = this.getFormData(file);
+    this.dataService.uploadFile(formDataBody).subscribe(
+      (event) => {
+        if (event.type == HttpEventType.UploadProgress) {
+          this.fileProgress = Math.round((100 * event.loaded) / event.total);
+        } else if (event instanceof HttpResponse) {
+          this.fileInProgress = false;
+          this.uploadSuccess = true;
+        }
+      },
+      (err) => {
+        console.log('Could not upload the file!');
+        this.uploadFail = true;
+      }
+    );
+  }
+....
+
+
+
+

Notice that whether we have a correct response, or an error response, we set the variable this.uploadSuccess or this.uploadFail to show the labels in the html giving feedback. +Once we call the service to do the HTTP request, we expect two types of response(three if we count the error), the first one is the progress of the upload, and will update the progress bar through this.fileProgress variable. The second one is a response when the request is finished. +That is why the type of the response is checked between HttpEventType or HttpResponse.

+
+
+

Now, if you have your back-end running, you should be able to upload a file, and check in DB that all the process worked fine.

+
+
+ + + + + +
+ + +Download method is not implemented yet. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-component-decomposition.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-component-decomposition.html new file mode 100644 index 00000000..b01157d3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-component-decomposition.html @@ -0,0 +1,504 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Component Decomposition

+
+
+

When implementing a new requirement there are a few design decisions, which need to be considered. +A decomposition in Smart and Dumb Components should be done first. +This includes the definition of state and responsibilities. +Implementing a new dialog will most likely be done by defining a new Smart Component with multiple Dumb Component children.

+
+
+

In the component tree this would translate to the definition of a new sub-tree.

+
+
+
+Component Tree With Highlighted Sub Tree +
+
Figure 1. Component Tree with highlighted sub-tree
+
+
+
+
+

Defining Components

+
+
+

The following gives an example for component decomposition. +Shown is a screenshot from a style guide to be implemented. +It is a widget called Listpicker.

+
+
+

The basic function is an input field accepting direct input. +So typing otto puts otto inside the FormControl. +With arrow down key or by clicking the icon displayed in the inputs right edge a dropdown is opened. +Inside possible values can be selected and filtered beforehand. +After pressing arrow down key the focus should move into the filter input field. +Up and down arrow keys can be used to select an element from the list. +Typing into the filter input field filters the list from which the elements can be selected. +The current selected element is highlighted with green background color.

+
+
+
+Component Decomposition Example 1v2 +
+
Figure 2. Component decomposition example before
+
+
+

What should be done, is to define small reusable Dumb Components. +This way the complexity becomes manageable. +In the example every colored box describes a component with the purple box being a Smart Component.

+
+
+
+Component Decomposition Example 2v2 +
+
Figure 3. Component decomposition example after
+
+
+

This leads to the following component tree.

+
+
+
+Component Decomposition Example component tree +
+
Figure 4. Component decomposition example component tree
+
+
+

Note the uppermost component is a Dumb Component. +It is a wrapper for the label and the component to be displayed inside a form. +The Smart Component is Listpicker. +This way the widget can be reused without a form needed.

+
+
+

A widgets is a typical Smart Component to be shared across feature modules. +So the SharedModule is the place for it to be defined.

+
+
+
+
+

Defining state

+
+
+

Every UI has state. +There are different kinds of state, for example

+
+
+
    +
  • +

    View State: e.g. is a panel open, a css transition pending, etc.

    +
  • +
  • +

    Application State: e.g. is a payment pending, current URL, user info, etc.

    +
  • +
  • +

    Business Data: e.g. products loaded from back-end

    +
  • +
+
+
+

It is good practice to base the component decomposition on the state handled by a component and to define a simplified state model beforehand. +Starting with the parent - the Smart Component:

+
+
+
    +
  • +

    What overall state does the dialog have: e.g. loading, error, valid data loaded, valid input, invalid input, etc. +Every defined value should correspond to an overall appearance of the whole dialog.

    +
  • +
  • +

    What events can occur to the dialog: e.g. submitting a form, changing a filter, pressing buttons, pressing keys, etc.

    +
  • +
+
+
+

For every Dumb Component:

+
+
+
    +
  • +

    What data does a component display: e.g. a header text, user information to be displayed, a loading flag, etc.
    +This will be a slice of the overall state of the parent Smart Component. +In general a Dumb Component presents a slice of its parent Smart Components state to the user.

    +
  • +
  • +

    What events can occur: keyboard events, mouse events, etc.
    +These events are all handled by its parent Smart Component - every event is passed up the tree to be handled by a Smart Component.

    +
  • +
+
+
+

These information should be reflected inside the modeled state. +The implementation is a TypeScript type - an interface or a class describing the model.

+
+
+

So there should be a type describing all state relevant for a Smart Component. +An instance of that type is send down the component tree at runtime. +Not every Dumb Component will need the whole state. +For instance a single Dumb Component could only need a single string.

+
+
+

The state model for the previous Listpicker example is shown in the following listing.

+
+
+
Listing 1. Listpicker state model
+
+
export class ListpickerState {
+
+  items: {}[]|undefined;
+  columns = ['key', 'value'];
+  keyColumn = 'key';
+  displayValueColumn = 'value';
+  filteredItems: {}[]|undefined;
+  filter = '';
+  placeholder = '';
+  caseSensitive = true;
+  isDisabled = false;
+  isDropdownOpen = false;
+  selectedItem: {}|undefined;
+  displayValue = '';
+
+}
+
+
+
+

Listpicker holds an instance of ListpickerState which is passed down the component tree via @Input() bindings in the Dumb Components. +Events emitted by children - Dumb Components - create a new instance of ListpickerState based on the current instance and the event and its data. +So a state transition is just setting a new instance of ListpickerState. +Angular Bindings propagate the value down the tree after exchanging the state.

+
+
+
Listing 2. Listpicker State transition
+
+
export class ListpickerComponent {
+
+  // initial default values are set
+  state = new ListpickerState();
+
+  /** User changes filter */
+  onFilterChange(filter: string): void {
+    // apply filter ...
+    const filteredList = this.filterService.filter(...);
+
+    // important: A new instance is created, instead of altering the existing one.
+    //            This makes change detection easier and prevents hard to find bugs.
+    this.state = Object.assing({}, this.state, {
+      filteredItems: filteredList,
+      filter: filter
+    });
+  }
+
+}
+
+
+
+
Note:
+

It is not always necessary to define the model as independent type. +So there would be no state property and just properties for every state defined directly in the component class. +When complexity grows and state becomes larger this is usually a good idea. +If the state should be shared between Smart Components a store is to be used.

+
+
+
+
+

When are Dumb Components needed

+
+
+

Sometimes it is not necessary to perform a full decomposition. The architecture does not enforce it generally. What you should keep in mind is, that there is always a point when it becomes recommendable.

+
+
+

For example a template with 800 line of code is:

+
+
+
    +
  • +

    not understandable

    +
  • +
  • +

    not maintainable

    +
  • +
  • +

    not testable

    +
  • +
  • +

    not reusable

    +
  • +
+
+
+

So when implementing a template with more than 50 line of code you should think about decomposition.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-consuming-rest-services.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-consuming-rest-services.html new file mode 100644 index 00000000..d449793f --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-consuming-rest-services.html @@ -0,0 +1,527 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Consuming REST services

+
+
+

A good introduction to working with Angular HttpClient can be found in Angular Docs

+
+
+

This guide will cover, how to embed Angular HttpClient in the application architecture. +For back-end request a special service with the suffix Adapter needs to be defined.

+
+
+
+
+

Defining Adapters

+
+
+

It is a good practice to have a Angular service whose single responsibility is to call the back-end and parse the received value to a transfer data model (e.g. Swagger generated TOs). +Those services need to have the suffix Adapter to make them easy to recognize.

+
+
+
+Adapters handle back-end communication +
+
Figure 1. Adapters handle back-end communication
+
+
+

As illustrated in the figure a Use Case service does not use Angular HttpClient directly but uses an adapter. +A basic adapter could look like this:

+
+
+
Listing 1. Example adapter
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+
+import { FlightTo } from './flight-to';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  getFlights(): Observable<FlightTo> {
+    return this.httpClient.get<FlightTo>('/relative/url/to/flights');
+  }
+
+}
+
+
+
+

The adapters should use a well-defined transfer data model. +This could be generated from server endpoints with CobiGen, Swagger, typescript-maven-plugin, etc. +If inside the application there is a business model defined, the adapter has to parse to the transfer model. +This is illustrated in the following listing.

+
+
+
Listing 2. Example adapter mapping from business model to transfer model
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+import { map } from 'rxjs/operators';
+
+import { FlightTo } from './flight-to';
+import { Flight } from '../../../model/flight';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  updateFlight(flight: Flight): Observable<Flight> {
+    const to = this.mapFlight(flight);
+
+    return this.httpClient.post<FlightTo>('/relative/url/to/flights', to).pipe(
+      map(to => this.mapFlightTo(to))
+    );
+  }
+
+  private mapFlight(flight: Flight): FlightTo {
+    // mapping logic
+  }
+
+  private mapFlightTo(flightTo: FlightTo): Flight {
+    // mapping logic
+  }
+
+}
+
+
+
+
+
+

Token management

+
+
+

In most cases the access to back-end API is secured using well known mechanisms as CSRF, JWT or both. In these cases the front-end application must manage the tokens that are generated when the user authenticates. More concretely it must store them to include them in every request automatically. Obviously, when user logs out these tokens must be removed from localStorage, memory, etc.

+
+
+
+
+

Store security token

+
+
+

In order to make this guide simple we are going to store the token in memory. Therefore, if we consider that we already have a login mechanism implemented we would like to store the token using a auth.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { Router } from '@angular/router';
+
+@Injectable({
+  providedIn: 'root',
+})
+export class AuthService {
+  private loggedIn = false;
+  private token: string;
+
+  constructor(public router: Router) {}
+
+  public isLogged(): boolean {
+    return this.loggedIn || false;
+  }
+
+  public setLogged(login: boolean): void {
+    this.loggedIn = login;
+  }
+
+  public getToken(): string {
+    return this.token;
+  }
+
+  public setToken(token: string): void {
+    this.token = token;
+  }
+}
+
+
+
+

Using the previous service we will be able to store the token obtained in the login request using the method setToken(token). Please consider that, if you want a more sophisticated approach using localStorage API, you will need to modify this service accordingly.

+
+
+
+
+

Include token in every request

+
+
+

Now that the token is available in the application it is necessary to include it in every request to a protected API endpoint. Instead of modifying all the HTTP requests in our application, Angular provides a class to intercept every request (and every response if we need to) called HttpInterceptor. Let’s create a service called http-interceptor.service.ts to implement the intercept method of this class:

+
+
+
+
import {
+  HttpEvent,
+  HttpHandler,
+  HttpInterceptor,
+  HttpRequest,
+} from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { environment } from '../../../environments/environment';
+import { AuthService } from './auth.service';
+
+@Injectable()
+export class HttpRequestInterceptorService implements HttpInterceptor {
+
+  constructor(private auth: AuthService) {}
+
+  intercept(
+    req: HttpRequest<any>,
+    next: HttpHandler,
+  ): Observable<HttpEvent<any>> {
+    // Get the auth header from the service.
+    const authHeader: string = this.auth.getToken();
+    if (authHeader) {
+      let authReq: HttpRequest<any>;
+
+      // CSRF
+      if (environment.security == 'csrf') {
+        authReq = req.clone({
+          withCredentials: true,
+          setHeaders: { 'x-csrf-token': authHeader },
+        });
+      }
+
+      // JWT
+      if (environment.security == 'jwt') {
+        authReq = req.clone({
+          setHeaders: { Authorization: authHeader },
+        });
+      }
+
+      return next.handle(authReq);
+    } else {
+      return next.handle(req);
+    }
+  }
+}
+
+
+
+

As you may notice, this service is making use of an environment field environment.security to determine if we are using JWT or CSRF in order to inject the token accordingly. In your application you can combine both if necessary.

+
+
+

Configure environment.ts file to use the CSRF/JWT.

+
+
+
+
security: 'csrf'
+
+
+
+

The authHeader used is obtained using the injected service AuthService already presented above.

+
+
+

In order to activate the interceptor we need to provide it in our app.module.ts or core.module.ts depending on the application structure. Let’s assume that we are using the latter and the interceptor file is inside a security folder:

+
+
+
+
...
+import { HttpRequestInterceptorService } from './security/http-request-interceptor.service';
+...
+
+@NgModule({
+  imports: [...],
+  exports: [...],
+  declarations: [],
+  providers: [
+    ...
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: HttpRequestInterceptorService,
+      multi: true,
+    },
+  ],
+})
+export class CoreModule {}
+
+
+
+

Angular automatically will now modify every request and include in the header the token if it is convenient.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-creating-angular-app-with-nx-cli.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-creating-angular-app-with-nx-cli.html new file mode 100644 index 00000000..1f08f3ba --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-creating-angular-app-with-nx-cli.html @@ -0,0 +1,408 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Nx CLI

+
+
+

Nx CLI provides a wrapper around Angular CLI and makes it faster, in addition to other benefits. Its computational cache significantly speeds up building and serving applications successively.

+
+
+

With Nx CLI you always get the latest tools to develop your angular applications. By default it is integrated with tools like Jest, Cypress, ESLint and many more. Though you can always configure to use other tools as per your preference.

+
+
+

One difference you will find while working with Nx CLI is that an Nx workspace follows a certain folder structure. That is because Nx strongly supports monorepo architecture, wherein you place all the different components that make up your entire application (front-end, back-end, libraries, models) into one single repository. Nx also provides the tooling between these different components, so that you can share your code across your different applications in the same repo and avoid re-writing. We will go through the folder structure later in this guide. But we might not always want to follow a monorepo architecture and it is possible to create a single application with Nx CLI.

+
+
+

In this guide we are going to learn how to create an angular app with Nx CLI. But first, let us start by installing Nx

+
+
+
+
+

Installing Nx

+
+
+

You can install Nx globally in your system using the following command:

+
+
+
+
npm install -g nx
+
+
+
+

Now let us proceed to creating an angular application using Nx.

+
+
+
+
+

Creating Angular app with Nx

+
+
+

To create an angular app with Nx, we simply create an Nx workspace with angular preset using the following command:

+
+
+
+
npx create-nx-workspace --preset=angular
+
+
+
+

The CLI will ask a series of questions and setup your workspace with an empty angular application.

+
+
+
+Creating a Nx workspace +
+
Figure 1. Creating a Nx workspace.
+
+
+

Here we have given the workspace name as nx-guide and the app name as nx-ng-app. Let us have a look at the folder structure created by Nx CLI.

+
+
+
+
+

Nx workspace folder structure

+
+
+

Every Nx workspace has the following folder structure:

+
+
+
+
myorg/
+├── apps/
+├── libs/
+├── tools/
+├── workspace.json
+├── nx.json
+├── package.json
+└── tsconfig.base.json
+
+
+
+

Nx creates such a folder structure because it strongly supports the concept of monorepo, wherein all the inter-related applications and libraries are put together in the same repository for better maintenance, code-sharing and avoiding duplication of codes.

+
+
+

As per the structure, all runnable applications should belong in the apps/ directory. Supporting applications and libraries can be put in the libs/ folder, with each library defining its own external API to differentiate between them. tools/ folder can contain scripts which act on your code like database scripts, for example. The JSON files contain various information and configuration settings about the workspace and the projects within them.

+
+
+

You will find your angular app named nx-ng-app in the apps/ folder. The folder structure within your app is similar to any Angular app created with Angular CLI.

+
+
+
+Your Nx workspace in VSCode +
+
Figure 2. Your Nx workspace in VSCode.
+
+
+

You will also notice another app named nx-ng-app-e2e automatically generated in the apps folder. This for performing end-to-end testing with Cypress on your app.

+
+
+

Now that we have created our angular app, let us serve it so we can view the application in our browser.

+
+
+
+
+

Running your angular application

+
+
+

You can still use the ng command to serve your application from your workspace root directory as such:

+
+
+
+
ng serve nx-ng-app
+
+
+
+

Using Nx, you can use either of the commands below for the same purpose:

+
+
+
+
nx run my-app:serve
+nx serve my-app
+
+
+
+

Once your code is compiled, you can view your application at http://localhost:4200 as usual.

+
+
+
+
+

Conclusion

+
+
+

In this guide you learned how to install Nx and create an Angular application with it. Nx comes with a host of features and documentation. You can read more about using Nx for you angular projects over here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-cypress.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-cypress.html new file mode 100644 index 00000000..a3ffbe30 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-cypress.html @@ -0,0 +1,1064 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Testing e2e with Cypress

+
+
+

This guide will cover the basics of e2e testing using Cypress.

+
+
+

Cypress is a framework “all in one” that provides the necessary libraries to write specific e2e tests, without the need of Selenium.

+
+
+

Why Cypress?

+
+
+
    +
  • +

    Uses JavaScript

    +
  • +
  • +

    It works directly with the browser so the compatibility with the front-end framework the project uses (in this case Angular) is not a problem.

    +
  • +
  • +

    Easy cross browser testing

    +
  • +
+
+
+
+
+

Setup

+
+
+

Install +First of all we need to install it, we can use npm install:

+
+
+
+
$ npm install -D cypress
+
+
+
+

Or we can install it with yarn:

+
+
+
+
$ yarn add -D cypress
+
+
+
+

We need to run Cypress in order to get the folder tree downloaded, then create a tsconfig.json file inside cypress folder to add the typescript configuration.

+
+
+
+
$ . /node_modules/.bin/cypress open
+
+
+
+
Listing 1. tsconfig.json
+
+
{
+  "compilerOptions": {
+    "strict": true,
+    "baseUrl": "../node_modules",
+    "target": "es5",
+    "lib": ["es5", "dom"],
+    "types": ["cypress"]
+  },
+  "include": [
+    "**/*.ts"
+  ]
+}
+
+
+
+

BaseUrl

+
+
+

Let’s setup the base URL so when we run the tests cypress will "navigate" to the right place, go to cypress.json on the root of the project.

+
+
+
Listing 2. cypress.json
+
+
{
+  "baseUrl": "http://localhost:4200"
+}
+
+
+
+
+
+

Files / Structure

+
+
+
+
/cypress
+  tsconfig.json
+  /fixtures
+    - example.json
+  /integration
+    - button.spec.ts
+    - test.spec.ts
+    /examples
+  /plugins
+    - index.js
+  /support
+    - commands.js
+    - index.js
+
+
+
+

tsconfig.json for typescript configuration.

+
+
+

fixtures to store our mock data or files (images, mp3…​) to use on our tests.

+
+
+

integration is where our tests go, by default it comes with an examples folder with tested samples.

+
+
+

plugins is where the configuration files of the plugins go.

+
+
+

support to add custom commands.

+
+
+
+
+

== =

+
+
+

If you are using Nx, it automatically generates a e2e cypress project for every project that you generate. So you already get the configuration files like tsconfig.json and cypress.json and also get the folder structure described above. This helps you focus more on writing your tests rather than setting up Cypress.

+
+
+
+
+

== =

+
+ +
+
+
+

Tests

+
+
+

The structure is the same than Mocha.

+
+
+

First, we create a file, for example form.spec.ts, inside we define a context to group all our tests referred to the same subject.

+
+
+
Listing 3. form.spec.ts
+
+
context('Button page', () => {
+  beforeEach(() => {
+    cy.visit('/');
+  });
+  it('should have button',()=>{
+    cy.get('button').should('exist');
+  });
+  it('should contain PRESS',()=>{
+    cy.contains('button', 'PRESS');
+  });
+});
+
+
+
+
beforeEach
+

Visit '/' before every test.

+
+
+
it
+

Inside we write the test.

+
+
+

The result:

+
+
+
+contextImg +
+
+
+

For more info check Cypress documentation

+
+
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+
+
+
+

Fixtures

+
+
+

We use fixtures to mock data, it can be a json, an image, video…​

+
+
+
+
{
+  "name": "Dummy name",
+  "phone": 999 99 99 99,
+  "body": "Mock data"
+}
+
+
+
+

You can store multiple mocks on the same fixture file.

+
+
+
+
{
+  "create":{"name": "e2etestBox"},
+  "boxFruit":{
+    "uuid":"3376339576e33dfb9145362426a33333",
+    "name":"e2etestBox",
+    "visibility":true,
+    "items":[
+      {"name":"apple","units":3},
+      {"name":"kiwi","units":2},
+    ]
+  },
+}
+
+
+
+

To access data we don’t need to import any file, we just call cy.fixture(filename) inside the **.spec.ts. We can name it as we want.

+
+
+
+
cy.fixture('box.json').as('fruitBox')
+
+
+
+

cy.fixture('box.json') we get access to box.json +.as(fruitBox) is used to create an alias (fruitBox) to the fixture.

+
+
+

For more info check Fixtures documentation

+
+
+
+
+

Request / Route

+
+
+

With cypress you can test your application with real data or with mocks.

+
+
+

Not using mocks guarantees that your tests are real e2e test but makes them vulnerable to external issues. +When you mock data you don’t know exactly if the data and the structure received from the backend is correct because you are forcing a mock on the response, but you can avoid external issues, run test faster and have better control on the structure and status.

+
+
+

To get more information go to Testing Strategies

+
+
+
+
+

Route

+
+
+

Cypress can intercept a XHR request and interact with it.

+
+
+
+
cy.server();
+cy.route(
+  'GET',
+  '/apiUrl/list',
+  [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]
+)
+
+
+
+

cy.server(options) start a server to interact with the responses.

+
+
+
cy.route(options) intercepts a XMLHttpRequests
+
    +
  • +

    method GET

    +
  • +
  • +

    URL /apiUrl/list'

    +
  • +
  • +

    response [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]

    +
  • +
+
+
+

Waits

+
+
+

Every cypress action has a default await time to avoid asynchronous issues, but this time can be short for some particular actions like API calls, for those cases we can use cy.wait().

+
+
+
+
cy.server();
+cy.route('/apiUrl/list').as('list');
+cy.visit('/boxList');
+cy.wait('@list');
+
+
+
+

You can find more information about cy.wait() here

+
+
+

To mock data with fixtures:

+
+
+
+
cy.fixture('box')
+  .then(({boxFruit}) => {
+    cy.route(
+      'GET',
+      '/apiUrl/list',
+      boxFruit
+    ).as('boxFruit');
+    cy.get('#button').click();
+    cy.wait('@journalsList');
+    cy.get('#list').contains('apple');
+  })
+
+
+
+

We get boxFruit data from the box fixture and then we mock the API call with it so now the response of the call is boxFruit object. +When the button is clicked, it waits to receive the response of the call and then checks if the list contains one of the elements of the fruitBox.

+
+
+
+
+

Request

+
+
+

Make a HTTP request.

+
+
+
+
cy.server();
+cy.request('http://localhost:4200/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+
+
+
+

If we have 'http://localhost:4200' as baseUrl on cypress.json

+
+
+
+
cy.server();
+cy.request('/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+// Goes to http://localhost:4200/
+
+
+
+

We can add other options, like we can send the body of a form.

+
+
+
+
cy.server();
+cy.request({
+  method: 'POST',
+  url: '/send',
+  form: true,
+  body: {
+    name: 'name task',
+    description: 'description of the task'
+  }
+});
+
+
+
+
+
+

Custom commands

+
+
+

If you see yourself writing the same test more than once (login is a common one), you can create a custom command to make things faster.

+
+
+

Cypress.Commands.add('name', ()⇒{}) to create the test.

+
+
+
Listing 4. commands.ts
+
+
Cypress.Commands.add('checkPlaceholder', (name) => {
+  cy.get(`[name='${name}']`).click();
+  cy.get('mat-form-field.mat-focused').should('exist');
+});
+
+
+
+
index.ts
+

To use the commands we need to import the files on support/index.ts

+
+
+
Listing 5. index.ts
+
+
import './commands'
+import './file1'
+import './folder/file2'
+
+
+
+

index.ts is where all our custom commands files unite so Cypress knows where to find them.

+
+
+

And as we are using typescript we need to define a namespace, interface and define our function.

+
+
+
    +
  • +

    index.d.ts

    +
  • +
+
+
+
+
declare namespace Cypress {
+  interface Chainable<Subject> {
+    checkPlaceholder(name:string):Chainable<void>
+  }
+}
+
+
+ +
+
+
+

Cross browser testing

+
+
+

By default the browser used by Cypress is Chrome, it has compatibility with it’s family browsers (including Microsoft Edge) and has beta support for Mozilla Firefox.

+
+
+

To change the browser on the panel we can do it by selecting the desired one on the browsers tab before running the spec file.

+
+
+

Cypress will detect and display, except electron, only the browsers that you have already installed on your machine.

+
+
+
+browserTab +
+
+
+

Once the browser is selected, you can run your tests.

+
+
+

To change the browser on the automatic test run, you can add a flag on the node command

+
+
+
+
cypress run --browser edge
+
+
+
+

Only if we use the cypress run command.

+
+
+

Or we can change the script file.

+
+
+
    +
  • +

    cypress/script.js

    +
  • +
+
+
+
+
const runTests= async ()=>{
+  ...
+  const {totalFailed} = await cypress.run({browser:'edge'});
+  ...
+};
+
+
+ +
+
+
+

Viewport

+
+
+

Cypress allow us to create tests depending on the Viewport, so we can test responsiveness.

+
+
+

There are different ways to use it:

+
+
+

Inside a test case

+
+
+
+
it('should change title when viewport is less than 320px', ()=>{
+  cy.get('.title-l').should('be.visible');
+  cy.get('.title-s').should('not.be.visible');
+  cy.viewport(320, 480);
+  cy.get('.title-l').should('not.be.visible');
+  cy.get('.title-s').should('be.visible');
+})
+
+
+
+

Passing the configuration as an option

+
+
+
+
describe('page display on medium size screen', {
+  viewportHeight: 1000,
+  viewportWidth: 400
+}, () => {
+  ...
+})
+
+
+
+

Or we can set a default

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+{
+ "viewportHeight": 1000
+ "viewportWidth": 400,
+}
+...
+
+
+ +
+
+
+

Test retries

+
+
+

We can get false negatives intermittently due external issues that can affect our tests, because of that we can add, in the configuration, a retries entry so Cypress can run again a certain failed test the selected number of times to verify that the error is real.

+
+
+

We can set retries for run or open mode.

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+"retries": {
+    "runMode": 3,
+    "openMode": 3
+  }
+...
+
+
+
+

The retries can be configured on the cypress.json or directly on a specific test.

+
+
+
+
it('should get button', {
+  retries: {
+    runMode: 2,
+    openMode: 2
+  }
+}, () => {
+  ...
+})
+
+
+
+

This retries those not shown on the test log.

+
+
+

Check more on retries documentation

+
+
+
+
+

Reporter

+
+
+

The tests results appear on the terminal, but to have a more friendly view we can add a reporter.

+
+
+
+reporter +
+
+
+
+
+

Mochawesome

+
+
+

In this case we are going to use Mochawesome, initially its a Mocha reporter but as Cypress uses Mocha it works the same.

+
+
+

Install

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome
+
+
+
+

To run the reporter:

+
+
+
+
cypress run --reporter mochawesome
+
+
+
+

Mochawesome saves by default the generated files on `./mochawesome-report/` but we can add options to change this behavior.

+
+
+

Options can be passed to the reporter in two ways

+
+
+

Using a flag

+
+
+
+
cypress run --reporter mochawesome --reporter-options reportDir=report
+
+
+
+

Or on cypress.json

+
+
+
+
{
+  "baseUrl": "http://localhost:4200",
+  "reporter": "mochawesome",
+  "reporterOptions": {
+    "overwrite": false,
+    "html": false,
+    "json": true,
+    "reportDir": "cypress/report"
+  }
+}
+
+
+
+

Overwrite:false to not overwrite every **:spec.ts test report, we want them to create a merged version later.

+
+
+

reportDir to set a custom directory.

+
+
+

html:false because we don’t need it.

+
+
+

json:true to save them on json.

+
+
+

Mochawesome only creates the html file of the last .spec.ts file that the tests run, that’s why we don’t generate html reports directly, in order to stack them all on the same final html we need to merge the reports.

+
+ +
+

mochawesome-merge

+
+
+

Mochawesome-merge is a library that helps us to merge the different json.

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome-merge
+npm install --save-dev mochawesome-report-generator
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome-merge
+yarn add -D mochawesome-report-generator
+
+
+
+

To merge the files we execute this command:

+
+
+
+
mochawesome-merge cypress/report/*.json > cypress/reportFinal.json
+
+
+
+

reportFinal.json is the result of this merge, whit that we have the data of all the spec files in one json.

+
+
+

We can also automate the test, merge and conversion to html using a script.

+
+
+
+
const cypress = require('cypress');
+const fse = require('fs-extra');
+const { merge } = require('mochawesome-merge');
+const generator = require('mochawesome-report-generator');
+const runTests= async ()=>{
+  await fse.remove('mochawesome-report');
+  await fse.remove('cypress/report');
+  const {totalFailed} = await cypress.run();
+  const reporterOptions = {
+    files: ["cypress/report/*.json"]
+  };
+  await generateReport(reporterOptions);
+  if(totalFailed !==  0){
+    process.exit(2);
+  };
+};
+const generateReport = (options)=> {
+  return merge(options).then((jsonReport)=>{
+    generator.create(jsonReport).then(()=>{
+      process.exit();
+    });
+  });
+};
+runTests();
+
+
+
+

fse.remove() to remove older reports data.

+
+
+

cypress.run() to run the tests.

+
+
+

merge(options) we merge the json output from running the tests.

+
+
+

generator.create(jsonReport) then we generate the html view of the report.

+
+ +
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-error-handler.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-error-handler.html new file mode 100644 index 00000000..b9db1cea --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-error-handler.html @@ -0,0 +1,510 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Error Handler in angular

+
+
+

Angular allows us to set up a custom error handler that can be used to control the different errors and them in a correct way. Using a global error handler will avoid mistakes and provide a use friendly interface allowing us to indicate the user what problem is happening.

+
+
+
+
+

What is ErrorHandler

+
+
+

ErrorHandler is the class that Angular uses by default to control the errors. This means that, even if the application doesn’t have a ErrorHandler it is going to use the one setup by default in Angular. This can be tested by trying to find a page not existing in any app, instantly Angular will print the error in the console.

+
+
+
+
+

Creating your custom ErrorHandler step by step

+
+
+

In order to create a custom ErrorHandler three steps are going to be needed:

+
+
+
+
+

Creating the custom ErrorHandler class

+
+
+

In this first step the custom ErrorHandler class is going to be created inside the folder /app/core/errors/errors-handler.ts:

+
+
+
+
import { ErrorHandler, Injectable, Injector } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      //  To do: Use injector to get the necessary services to redirect or
+      // show a message to the user
+      const classname  = error.constructor.name;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          console.error('HttpError:' + error.message);
+          if (!navigator.onLine) {
+            console.error('There's no internet connection');
+            // To do: control here in internet what you wanna do if user has no internet
+          } else {
+            console.error('Server Error:' + error.message);
+            // To do: control here if the server gave an error
+          }
+          break;
+        default:
+          console.error('Error:' + error.message);
+          // To do: control here if the client/other things gave an error
+      }
+    }
+}
+
+
+
+

This class can be used to control the different type of errors. If wanted, the classname variable could be used to add more switch cases. This would allow control of more specific situations.

+
+
+
+
+

Creating a ErrorInterceptor

+
+
+

Inside the same folder created in the last step we are going to create the ErrorInterceptor(errors-handler-interceptor.ts). This ErrorInterceptor is going to retry any failed calls to the server to make sure it is not being found before showing the error:

+
+
+
+
import { HttpInterceptor, HttpRequest, HttpHandler, HttpEvent } from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { retry } from 'rxjs/operators';
+
+@Injectable()
+export class ErrorsHandlerInterceptor implements HttpInterceptor {
+
+    constructor() {}
+    intercept(req: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>> {
+        return next.handle(req).pipe(
+            retryWhen((errors: Observable<any>) => errors.pipe(
+                delay(500),
+                take(5),
+                concatMap((error: any, retryIndex: number) => {
+                    if (++retryIndex == 5) {
+                        throw error;
+                    }
+                    return of(error);
+                })
+            ))
+        );
+    }
+}
+
+
+
+

This custom made interceptor is implementing the HttpInterceptor and inside the method intercept using the method pipe,retryWhen,delay,take and concatMap from RxJs it is going to do the next things if there is errors:

+
+
+
    +
  1. +

    With delay(500) do a delay to allow some time in between requests

    +
  2. +
  3. +

    With take(5) retry five times.

    +
  4. +
  5. +

    With concatMap if the index that take() gives is not 5 it returns the error, else, it throws the error.

    +
  6. +
+
+
+
+
+

Creating a Error Module

+
+
+

Finally, creating a module(errors-handler.module.ts) is necessary to include the interceptor and the custom error handler. In this case, the module is going to be created in the same folder as the last two:

+
+
+
+
import { NgModule, ErrorHandler } from '@angular/core';
+import { CommonModule } from '@angular/common';
+import { ErrorsHandler } from './errors-handler';
+import { HTTP_INTERCEPTORS } from '@angular/common/http';
+import { ErrorsHandlerInterceptor } from './errors-handler-interceptor';
+
+@NgModule({
+  declarations: [], // Declare here component if you want to use routing to error component
+  imports: [
+    CommonModule
+  ],
+  providers: [
+    {
+      provide: ErrorHandler,
+      useClass: ErrorsHandler,
+    },
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: ErrorsHandlerInterceptor,
+      multi: true,
+    }
+  ]
+})
+export class ErrorsHandlerModule { }
+
+
+
+

This module simply is providing the services that are implemented by our custom classes and then telling angular to use our custom made classes instead of the default ones. After doing this, the module has to be included in the app module app.module.ts in order to be used.

+
+
+
+
....
+  imports: [
+    ErrorsHandlerModule,
+    ....
+
+
+
+
+
+

Handling Errors

+
+
+

As a final step, handling these errors is necessary. There are different ways that can be used to control the errors, here are a few:

+
+
+
    +
  • +

    Creating a custom page and using with Router to redirect to a page showing an error.

    +
  • +
  • +

    Creating a service in the server side or Backend to create a log with the error and calling it with HttpClient.

    +
  • +
  • +

    Showing a custom made SnackBar with the error message.

    +
  • +
+
+
+
+
+

== Using SnackBarService and NgZone

+
+
+

If the SnackBar is used directly, some errors can occur, this is due to SnackBar being out of the Angular zone. In order to use this service properly, NgZone is necessary. The method run() from NgZone will allow the service to be inside the Angular Zone. An example on how to use it:

+
+
+
+
import { ErrorHandler, Injectable, Injector, NgZone } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+import { MatSnackBar } from '@angular/material';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector, private zone: NgZone) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      // Use injector to get the necessary services to redirect or
+      const snackBar: MatSnackBar = this.injector.get(MatSnackBar);
+      const classname  = error.constructor.name;
+      let message: string;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          message = !(navigator.onLine) ? 'There is no internet connection' : error.message;
+          break;
+        default:
+          message = error.message;
+      }
+      this.zone.run(
+        () => snackBar.open(message, 'danger', { duration : 4000})
+      );
+    }
+}
+
+
+
+

Using Injector the MatSnackBar is obtained, then the correct message is obtained inside the switch. Finally, using NgZone and run(), we open the SnackBar passing the message, and the parameters wanted.

+
+
+

You can find a working example of this guide in devon4ts-samples.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-eslint.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-eslint.html new file mode 100644 index 00000000..22c9421a --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-eslint.html @@ -0,0 +1,385 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular ESLint support

+
+
+ + + + + +
+ + +ESLint is supported in Angular 10.1.0 onward. +
+
+
+
+
+

What about TSLint?

+
+
+

TSLint is a fantastic tool. It is a linter that was written specifically to work based on the TypeScript AST format. This has advantages and disadvantages, as with most decisions we are faced with in software engineering!

+
+
+

One advantage is there is no tooling required to reconcile differences between ESLint and TypeScript AST formats, but the major disadvantage is that the tool is therefore unable to reuse any of the previous work which has been done in the JavaScript ecosystem around linting, and it has to re-implement everything from scratch. Everything from rules to auto-fixing capabilities and more.

+
+
+

However, the backers behind TSLint announced in 2019 that they would be deprecating TSLint in favor of supporting typescript-eslint in order to benefit the community. You can read more about that here

+
+
+

The TypeScript Team themselves also announced their plans to move the TypeScript codebase from TSLint to typescript-eslint, and they have been big supporters of this project. More details at https://github.com/microsoft/TypeScript/issues/30553

+
+
+

Angular ESLint support comes from the angular-eslint tooling package. Angular documentation also links to this repository as you can check in the ng lint section of the Angular CLI documentation.

+
+
+
+
+

Quick start with Angular and ESLint

+
+
+

In order to create a brand new Angular CLI workspace which uses ESLint instead of TSLint and Codelyzer, simply run the following commands:

+
+
+
+
##Install the Angular CLI and @angular-eslint/schematics globally however you want (e.g. npm, yarn, volta etc)
+
+$ npm i -g @angular/cli @angular-devkit/core @angular-devkit/schematics @angular-eslint/schematics
+
+##Create a new Angular CLI workspace using the @angular-eslint/schematics collection (instead of the default)
+
+$ ng new --collection=@angular-eslint/schematics
+
+
+
+
+
+

Migrating an Angular CLI project from Codelyzer and TSLint

+
+ +
+
+
+

1 - Add relevant dependencies

+
+
+

The first step is to run the schematic to add @angular-eslint to your project:

+
+
+
+
$ ng add @angular-eslint/schematics
+
+
+
+

This will handle installing the latest version of all the relevant packages for you and adding them to the devDependencies of your package.json.

+
+
+
+
+

2 - Run the convert-tslint-to-eslint schematic on a project

+
+
+

The next thing to do is consider which "project" you want to migrate to use ESLint. If you have a single application in your workspace you will likely have just a single entry in the projects configuration object within your angular.json file. If you have a projects/` directory in your workspace, you will have multiple entries in your projects configuration and you will need to chose which one you want to migrate using the convert-tslint-to-eslint schematic.

+
+
+

You can run it like so:

+
+
+
+
$ ng g @angular-eslint/schematics:convert-tslint-to-eslint {{YOUR_PROJECT_NAME_GOES_HERE}}
+
+
+
+

From now on, ng lint will use ESLint!

+
+
+
+
+

3 - Remove root TSLint configuration and use only ESLint

+
+
+

Once you are happy with your ESLint setup, you simply need to remove the root-level tslint.json and potentially uninstall TSLint and any TSLint-related plugins/dependencies if your Angular CLI workspace is now no longer using TSLint at all.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-file-structure.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-file-structure.html new file mode 100644 index 00000000..a7b66757 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-file-structure.html @@ -0,0 +1,421 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

File Structure

+
+ +
+
+
+

Top-level

+
+
+

The top-level file structure is defined by Angular CLI. You might put this "top-level file structure" into a sub-directory to facilitate your build, but this is not relevant for this guide. So the applications file structure relevant to this guide is the folder /src/app inside the part managed by Angular CLI.

+
+
+
Listing 1. Top-level file structure shows feature modules
+
+
    /src
+    └── /app
+        ├── /account-management
+        ├── /billing
+        ├── /booking
+        ├── /core
+        ├── /shared
+        ├── /status
+        |
+        ├── app.module.ts
+        ├── app.component.spec.ts
+        ├── app.component.ts
+        └── app.routing-module.ts
+
+
+
+

Besides the definition of app module the app folder has feature modules on top-level. +The special modules shared and core are present as well.

+
+
+
+
+

Feature Modules

+
+
+

A feature module contains the modules definition and two folders representing both layers.

+
+
+
Listing 2. Feature module file structure has both layers
+
+
    /src
+    └── /app
+        └── /account-management
+            ├── /components
+            ├── /services
+            |
+            ├── account-management.module.ts
+            ├── account-management.component.spec.ts
+            ├── account-management.component.ts
+            └── account-management.routing-module.ts
+
+
+
+

Additionally an entry component is possible. This would be the case in lazy loading scenarios. +So account-management.component.ts would be only present if account-management is lazy loaded. +Otherwise, the module’s routes would be defined Component-less +(see vsavkin blog post).

+
+
+
+
+

Components Layer

+
+
+

The component layer reflects the distinction between Smart Components and Dumb Components.

+
+
+
Listing 3. Components layer file structure shows Smart Components on top-level
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                ├── /account-overview
+                ├── /confirm-modal
+                ├── /create-account
+                ├── /forgot-password
+                └── /shared
+
+
+
+

Every folder inside the /components folder represents a smart component. The only exception is /shared. +/shared contains Dumb Components shared across Smart Components inside the components layer.

+
+
+
Listing 4. Smart components contain Dumb components
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                └── /account-overview
+                    ├── /user-info-panel
+                    |   ├── /address-tab
+                    |   ├── /last-activities-tab
+                    |   |
+                    |   ├── user-info-panel.component.html
+                    |   ├── user-info-panel.component.scss
+                    |   ├── user-info-panel.component.spec.ts
+                    |   └── user-info-panel.component.ts
+                    |
+                    ├── /user-header
+                    ├── /user-toolbar
+                    |
+                    ├── account-overview.component.html
+                    ├── account-overview.component.scss
+                    ├── account-overview.component.spec.ts
+                    └── account-overview.component.ts
+
+
+
+

Inside the folder of a Smart Component the component is defined. +Besides that are folders containing the Dumb Components the Smart Component consists of. +This can be recursive - a Dumb Component can consist of other Dumb Components. +This is reflected by the file structure as well. This way the structure of a view becomes very readable. +As mentioned before, if a Dumb Component is used by multiple Smart Components inside the components layer +it is put inside the /shared folder inside the components layer.

+
+
+

With this way of thinking the shared module makes a lot of sense. If a Dumb Component is used by multiple Smart Components +from different feature modules, the Dumb Component is placed into the shared module.

+
+
+
Listing 5. The shared module contains Dumb Components shared across Smart Components from different feature modules
+
+
    /src
+    └── /app
+        └── /shared
+            └── /user-panel
+                |
+                ├── user-panel.component.html
+                ├── user-panel.component.scss
+                ├── user-panel.component.spec.ts
+                └── user-panel.component.ts
+
+
+
+

The layer folder /components is not necessary inside the shared module. +The shared module only contains components!

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-internationalization.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-internationalization.html new file mode 100644 index 00000000..1081bb61 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-internationalization.html @@ -0,0 +1,575 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Internationalization

+
+
+

Nowadays, a common scenario in front-end applications is to have the ability to translate labels and locate numbers, dates, currency and so on when the user clicks over a language selector or similar. devon4ng and specifically Angular has a default mechanism in order to fill the gap of such features, and besides there are some wide used libraries that make even easier to translate applications.

+
+ +
+
+
+

devon4ng i18n approach

+
+
+

The official approach could be a bit complicated, therefore the recommended one is to use the recommended library Transloco from https://github.com/ngneat/transloco/.

+
+
+
+
+

Install and configure Transloco

+
+
+

In order to include this library in your devon4ng Angular >= 7.2 project you will need to execute in a terminal:

+
+
+
+
$ ng add @ngneat/transloco
+
+
+
+

As part of the installation process you’ll be presented with questions; Once you answer them, everything you need will automatically be created for you.

+
+
+
    +
  • +

    First, Transloco creates boilerplate files for the requested translations.

    +
  • +
  • +

    Next, it will create a new file, transloco-root.module.ts which exposes an Angular’s module with a default configuration, and inject it into the AppModule.

    +
  • +
+
+
+
+
import { HttpClient } from '@angular/common/http';
+import {
+  TRANSLOCO_LOADER,
+  Translation,
+  TranslocoLoader,
+  TRANSLOCO_CONFIG,
+  translocoConfig,
+  TranslocoModule
+} from '@ngneat/transloco';
+import { Injectable, NgModule } from '@angular/core';
+import { environment } from '../environments/environment';
+
+@Injectable({ providedIn: 'root' })
+export class TranslocoHttpLoader implements TranslocoLoader {
+  constructor(private http: HttpClient) {}
+
+  getTranslation(lang: string) {
+    return this.http.get<Translation>(`/assets/i18n/${lang}.json`);
+  }
+}
+
+@NgModule({
+  exports: [ TranslocoModule ],
+  providers: [
+    {
+      provide: TRANSLOCO_CONFIG,
+      useValue: translocoConfig({
+        availableLangs: ['en', 'es'],
+        defaultLang: 'en',
+        // Remove this option if your application doesn't support changing language in runtime.
+        reRenderOnLangChange: true,
+        prodMode: environment.production,
+      })
+    },
+    { provide: TRANSLOCO_LOADER, useClass: TranslocoHttpLoader }
+  ]
+})
+export class TranslocoRootModule {}
+
+
+
+ + + + + +
+ + +As you might have noticed it also set an HttpLoader into the module’s providers. The HttpLoader is a class that implements the TranslocoLoader interface. It’s responsible for instructing Transloco how to load the translation files. It uses Angular HTTP client to fetch the files, based on the given path. +
+
+
+
+
+

Usage

+
+
+

In order to translate any label in any HTML template you will need to use the transloco pipe available:

+
+
+
+
{{ 'HELLO' | transloco }}
+
+
+
+

An optional parameter from the component TypeScript class could be included as follows:

+
+
+
+
{{ 'HELLO' | transloco: { value: dynamic } }}
+
+
+
+

It is possible to use with inputs:

+
+
+
+
<span [attr.alt]="'hello' | transloco">Attribute</span>
+<span [title]="'hello' | transloco">Property</span>
+
+
+
+

In order to change the language used you will need to create a button or selector that calls the this.translocoService.use(language: string) method from TranslocoService. For example:

+
+
+
+
export class AppComponent {
+  constructor(private translocoService: TranslocoService) {}
+
+  changeLanguage(lang) {
+      this.translocoService.setActiveLang(lang);
+  }
+}
+
+
+
+

The translations will be included in the en.json, es.json, de.json, etc. files inside the /assets/i18n folder. For example en.json would be (using the previous parameter):

+
+
+
+
{
+    "HELLO": "hello"
+}
+
+
+
+

Or with an optional parameter:

+
+
+
+
{
+    "HELLO": "hello {{value}}"
+}
+
+
+
+

Transloco understands nested JSON objects. This means that you can have a translation that looks like this:

+
+
+
+
{
+    "HOME": {
+        "HELLO": "hello {{value}}"
+    }
+}
+
+
+
+

In order to access access the value, use the dot notation, in this case HOME.HELLO.

+
+
+
+
+

Using the service, pipe or directive

+
+ +
+
+
+

== Structural Directive

+
+
+

Using a structural directive is the recommended approach. It’s DRY and efficient, as it creates one subscription per template:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('title') }}</p>
+
+  <comp [title]="t('title')"></comp>
+</ng-container>
+
+
+
+

Note that the t function is memoized. It means that given the same key it will return the result directly from the cache.

+
+
+

We can pass a params object as the second parameter:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('name', { name: 'Transloco' }) }}</p>
+</ng-container>
+
+
+
+

We can instruct the directive to use a different language in our template:

+
+
+
+
<ng-container *transloco="let t; lang: 'es'">
+  <p>{{ t('title') }}</p>
+</ng-container>
+
+
+
+
+
+

== Pipe

+
+
+

The use of pipes can be possible too:

+
+
+

template:

+
+
+
+
<div>{{ 'HELLO' | transloco:param }}</div>
+
+
+
+

component:

+
+
+
+
param = {value: 'world'};
+
+
+
+
+
+

== Attribute Directive

+
+
+

The last option available with transloco is the attribute directive:

+
+
+
+
<div transloco="HELLO" [translocoParams]="{ value: 'world' }"></div>
+
+
+
+
+
+

== Service

+
+
+

If you need to access translations in any component or service you can do it injecting the TranslocoService into them:

+
+
+
+
// Sync translation
+translocoService.translate('HELLO', {value: 'world'});
+
+// Async translation
+translocoService.selectTranslate('HELLO', { value: 'world' }).subscribe(res => {
+    console.log(res);
+    //=> 'hello world'
+});
+
+
+
+ + + + + +
+ + +You can find a complete example at https://github.com/devonfw/devon4ng-application-template. +
+
+
+

Please, visit https://github.com/ngneat/transloco/ for more info.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ionic-from-code-to-android.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ionic-from-code-to-android.html new file mode 100644 index 00000000..1b10a470 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ionic-from-code-to-android.html @@ -0,0 +1,606 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic to android

+
+
+

This page is written to help developers to go from the source code of an ionic application to an android one, with this in mind, topics such as: environment, commands, modifications,…​ are covered.

+
+
+
+
+

Assumptions

+
+
+

This document assumes that the reader has already:

+
+
+
    +
  • +

    Source code of an Ionic application and wants to build it on an android device,

    +
  • +
  • +

    A working installation of NodeJS

    +
  • +
  • +

    An Ionic CLI installed and up-to-date.

    +
  • +
  • +

    Android Studio and Android SDK.

    +
  • +
+
+
+
+
+

From Ionic to Android project

+
+
+

When a native application is being designed, sometimes, functionalities that uses camera, geolocation, push notification, …​ are requested. To resolve these requests, Capacitor can be used.

+
+
+

In general terms, Capacitor wraps apps made with Ionic (HTML, SCSS, Typescript) into WebViews that can be displayed in native applications (Android, IOS) and allows the developer to access native functionalities like the ones said before.

+
+
+

Installing capacitor is as easy as installing any node module, just a few commands have to be run in a console:

+
+
+
    +
  • +

    cd name-of-ionic-4-app

    +
  • +
  • +

    npm install --save @capacitor/core @capacitor/cli

    +
  • +
+
+
+

Then, it is necessary to initialize capacitor with some information: app id, name of the app and the directory where your app is stored. To fill this information, run:

+
+
+
    +
  • +

    npx cap init

    +
  • +
+
+
+
+
+

Modifications

+
+
+

Throughout the development process, usually back-end and front-end are on a local computer, so it’s a common practice to have different configuration files for each environment (commonly production and development). Ionic uses an angular.json file to store those configurations and some rules to be applied.

+
+
+

If a back-end is hosted on http://localhost:8081, and that direction is used in every environment, the application built for android will not work because computer and device do not have the same localhost. Fortunately, different configurations can be defined.

+
+
+

Android Studio uses 10.0.0.2 as alias for 127.0.0.1 (computer’s localhost) so adding http://10.0.0.2:8081 in a new environment file and modifying angular.json accordingly, will make possible connect front-end and back-end.

+
+
+
+Android environment and angular.json +
+
+
+
+
    "build": {
+    ...
+        "configurations": {
+            ...
+            "android": {
+                "fileReplacements": [
+                    {
+                        "replace": "src/environments/environment.ts",
+                        "with": "src/environments/environment.android.ts"
+                    }
+                ]
+            },
+        }
+    }
+
+
+
+
+
+

Build

+
+
+

Once configured, it is necessary to build the Ionic app using this new configuration:

+
+
+
    +
  • +

    ionic build --configuration=android

    +
  • +
+
+
+

The next commands copy the build application on a folder named android and open android studio.

+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+
+
+

From Android project to emulated device

+
+
+

Once Android Studio is opened, follow these steps:

+
+
+
    +
  1. +

    Click on "Build" → Make project.

    +
  2. +
  3. +

    Click on "Build" → Make Module 'app' (default name).

    +
  4. +
+
+
+

Click on make project +click on make app

+
+
+
    +
  1. +

    Click on" Build" → Build Bundle(s) / APK(s) → Build APK(s).

    +
  2. +
  3. +

    Click on run and choose a device.

    +
  4. +
+
+
+

click on build APK +click on running device

+
+
+

If there are no devices available, a new one can be created:

+
+
+
    +
  1. +

    Click on "Create new device"

    +
  2. +
  3. +

    Select hardware and click "Next". For example: Phone → Nexus 5X.

    +
  4. +
+
+
+

Create new device +Select hardware

+
+
+
    +
  1. +

    Download a system image.

    +
    +
      +
    1. +

      Click on download.

      +
    2. +
    3. +

      Wait until the installation finished and then click "Finish".

      +
    4. +
    5. +

      Click "Next".

      +
    6. +
    +
    +
  2. +
  3. +

    Verify configuration (default configuration should be enough) and click "Next".

    +
  4. +
+
+
+

Download system image +Check configuration

+
+
+
    +
  1. +

    Check that the new device is created correctly.

    +
  2. +
+
+
+
+New created device +
+
+
+
+
+

From Android project to real device

+
+
+

To test on a real android device, an easy approach to communicate a smartphone (front-end) and computer (back-end) is to configure a WiFi hotspot and connect the computer to it. A guide about this process can be found here.

+
+
+

Once connected, run ipconfig on a console if you are using windows or ifconfig on a Linux machine to get the IP address of your machine’s Wireless LAN adapter WiFi.

+
+
+
+Result of `ipconfig` command on Windows 10 +
+
+
+

This obtained IP must be used instead of "localhost" or "10.0.2.2" at environment.android.ts.

+
+
+
+Android environment file server URL +
+
+
+

After this configuration, follow the build steps in "From Ionic to Android project" and the first three steps in "From Android project to emulated device".

+
+
+
+
+

Send APK to Android through USB

+
+
+

To send the built application to a device, you can connect computer and mobile through USB, but first, it is necessary to unlock developer options.

+
+
+
    +
  1. +

    Open "Settings" and go to "System".

    +
  2. +
  3. +

    Click on "About".

    +
  4. +
  5. +

    Click "Build number" seven times to unlock developer options.

    +
  6. +
+
+
+
+Steps to enable developer options: 1, 2, 3 +
+
+
+
    +
  1. +

    Go to "System" again an then to "Developer options"

    +
  2. +
  3. +

    Check that the options are "On".

    +
  4. +
  5. +

    Check that "USB debugging" is activated.

    +
  6. +
+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+

After this, do the step four in "From Android project to emulated device" and choose the connected smartphone.

+
+
+
+
+

Send APK to Android through email

+
+
+

When you build an APK, a dialog gives two options: locate or analyze. If the first one is chosen, Windows file explorer will be opened showing an APK that can be send using email. Download the APK on your phone and click it to install.

+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+
+
+

Result

+
+
+

If everything goes correctly, the Ionic application will be ready to be tested.

+
+
+
+Application running on a real device +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ionic-getting-started.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ionic-getting-started.html new file mode 100644 index 00000000..7d61ce13 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ionic-getting-started.html @@ -0,0 +1,383 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic 5 Getting started

+
+
+

Ionic is a front-end focused framework which offers different tools for developing hybrid mobile applications. The web technologies used for this purpose are CSS, Sass, HTML5 and Typescript.

+
+
+
+
+

Why Ionic?

+
+
+

Ionic is used for developing hybrid applications, which means not having to rely on a specific IDE such as Android Studio or Xcode. Furthermore, development of native apps require learning different languages (Java/Kotlin for Android and Objective-C/Swift for Apple), with Ionic, a developer does not have to code the same functionality for multiple platforms, just use the adequate libraries and components.

+
+
+
+
+

Basic environment set up

+
+ +
+
+
+

Install Ionic CLI

+
+
+

Although the devonfw distribution comes with and already installed Ionic CLI, here are the steps to install it. The installation of Ionic is easy, just one command has to be written:

+
+
+

$ npm install -g @ionic/cli

+
+
+
+
+

Update Ionic CLI

+
+
+

If there was a previous installation of the Ionic CLI, it will need to be uninstalled due to a change in package name.

+
+
+
+
$ npm uninstall -g ionic
+$ npm install -g @ionic/cli
+
+
+
+

##Basic project set up +The set up of an ionic application is pretty immediate and can be done in one line:

+
+
+

ionic start <name> <template> --type=angular

+
+
+
    +
  • +

    ionic start: Command to create an app.

    +
  • +
  • +

    <name>: Name of the application.

    +
  • +
  • +

    <template>: Model of the application.

    +
  • +
  • +

    --type=angular: With this flag, the app produced will be based on angular.

    +
  • +
+
+
+

To create an empty project, the following command can be used:

+
+
+

ionic start MyApp blank --type=angular

+
+
+
+Ionic blank project +
+
+
+

The image above shows the directory structure generated.

+
+
+

There are more templates available that can be seen with the command +ionic start --list

+
+
+
+List of ionic templates +
+
+
+

The templates surrounded by red line are based on angular and comes with Ionic v5, while the others belong to earlier versions (before v4).

+
+
+ + + + + +
+ + +More info at https://ionicframework.com/docs. Remember to select Angular documentation, since Ionic supports React, Vue and Vanilla JS. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ionic-pwa.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ionic-pwa.html new file mode 100644 index 00000000..4ebfba83 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ionic-pwa.html @@ -0,0 +1,545 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Ionic Progressive Web App

+
+
+

This guide is a continuation of the guide Angular PWAs, therefore, valid concepts explained there are still valid in this page but focused on Ionic.

+
+
+
+
+

Assumptions

+
+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
  • +

    Ionic 5 CLI

    +
  • +
  • +

    Capacitor

    +
  • +
+
+
+

Also, it is a good idea to read the document about PWA using Angular.

+
+
+
+
+

Sample Application

+
+
+
+Ionic 5 PWA Base +
+
Figure 1. Basic ionic PWA.
+
+
+

To explain how to build progressive web apps (PWA) using Ionic, a basic application is going to be built. This app will be able to take photos even without network using PWA elements.

+
+
+
+
+

Step 1: Create a new project

+
+
+

This step can be completed with one simple command: ionic start <name> <template>, where <name> is the name and <template> a model for the app. In this case, the app is going to be named basic-ion-pwa.

+
+
+

If you are using Nx, there is a pre-requisite to this step. And that is, you have to add the @nxtend/ionic-angular plugin to your Nx workspace. The command for that is npm install --save-dev @nxtend/ionic-angular. Once you have the plugin installed, you can generate an ionic app in your Nx workspace with the command nx generate @nxtend/ionic-angular:app basic-ion-pwa. (You can refer this guide if you want to get started with Nx).

+
+
+
+
+

Step 2: Structures and styles

+
+
+

The styles (scss) and structures (html) do not have anything specially relevant, just colors and ionic web components. The code can be found in devon4ts-samples.

+
+
+
+
+

Step 3: Add functionality

+
+
+

After this step, the app will allow users to take photos and display them in the main screen. +First we have to import three important elements:

+
+
+
    +
  • +

    DomSanitizer: Sanitizes values to be safe to use.

    +
  • +
  • +

    SafeResourceUrl: Interface for values that are safe to use as URL.

    +
  • +
  • +

    Plugins: Capacitor constant value used to access to the device’s camera and toast dialogs.

    +
  • +
+
+
+
+
  import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
+  import { Plugins, CameraResultType } from '@capacitor/core';
+
+
+
+

The process of taking a picture is enclosed in a takePicture() method. takePicture() calls the Camera’s getPhoto() function which returns an URL or an exception. If a photo is taken then the image displayed in the main page will be changed for the new picture, else, if the app is closed without changing it, a toast message will be displayed.

+
+
+
+
  export class HomePage {
+    image: SafeResourceUrl;
+    ...
+
+    async takePicture() {
+      try {
+        const image = await Plugins.Camera.getPhoto({
+          quality: 90,
+          allowEditing: true,
+          resultType: CameraResultType.Uri,
+        });
+
+        // Change last picture shown
+        this.image = this.sanitizer.bypassSecurityTrustResourceUrl(image.webPath);
+      } catch (e) {
+        this.show('Closing camera');
+      }
+    }
+
+    async show(message: string) {
+      await Plugins.Toast.show({
+        text: message,
+      });
+    }
+  }
+
+
+
+
+
+

Step 4: PWA Elements

+
+
+

When Ionic apps are not running natively, some resources like Camera do not work by default but can be enabled using PWA Elements. To use Capacitor’s PWA elements run npm install @ionic/pwa-elements and modify src/main.ts as shown below.

+
+
+
+
...
+
+// Import for PWA elements
+import { defineCustomElements } from '@ionic/pwa-elements/loader';
+
+if (environment.production) {
+  enableProdMode();
+}
+
+platformBrowserDynamic().bootstrapModule(AppModule)
+  .catch(err => console.log(err));
+
+// Call the element loader after the platform has been bootstrapped
+defineCustomElements(window);
+
+
+
+
+
+

Step 5: Make it Progressive.

+
+
+

Turning an Ionic 5 app into a PWA is pretty easy. The same module used to turn Angular apps into PWAs has to be added. To do so, run: ng add @angular/pwa. This command also creates an icons folder inside src/assets and contains angular icons for multiple resolutions. (Note: In an Nx workspace, you have to add it like a normal package using npm install @angular/pwa, and you have to manually add the icons). If you want to use other images, be sure that they have the same resolution, the names can be different but the file manifest.json has to be changed accordingly.

+
+
+
+
+

Step 6: Configure the app

+
+
+

manifest.json

+
+
+

Default configuration.

+
+
+

ngsw-config.json

+
+
+

At assetGroupsresources add a URLs field and a pattern to match PWA Elements scripts and other resources (images, styles, …​):

+
+
+
+
  "urls": ["https://unpkg.com/@ionic/pwa-elements@1.0.2/dist/**"]
+
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ionic build --configuration production to build the app using production settings. (nx build basic-ion-pwa --configuration production in your Nx workspace root).

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here. A good alternative is also npm install serve. It can be checked here.

+
+
+

Go to the www folder running cd www.

+
+
+

http-server -o or serve to serve your built app.

+
+
+ + + + + +
+ + +In order not to install anything not necessary npx can be used directly to serve the app. i.e run npx serve [folder] will automatically download and run this HTTP server without installing it in the project dependencies. +
+
+
+
+Http server running +
+
Figure 2. Http server running on localhost:8081.
+
+
+

 
+In another console instance run ionic serve (nx serve basic-ion-pwa if using Nx CLI) to open the common app (not built).

+
+
+
+Ionic serve on Visual Studio Code console +
+
Figure 3. Ionic server running on localhost:8100.
+
+
+

 
+The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common one does not.

+
+
+
+Application comparison +
+
Figure 4. Application service worker comparison.
+
+
+

 
+If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 5. Offline application.
+
+
+

 
+Finally, plugins like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 6. Lighthouse report.
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-angular-material.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-angular-material.html new file mode 100644 index 00000000..9514feed --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-angular-material.html @@ -0,0 +1,750 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Material Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts using Angular Material in a devon4ng application. We will create an application with a header containing some menu links and a sidenav with some navigation links.

+
+
+
+Finished application +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Create a new angular application

+
+
+

We start with opening the devonfw IDE(right-click anywhere in your workspace and click "Open devonfw CMD shell here") and running the following command to start a project named devon4ng-mat-layout

+
+
+
    +
  • +

    ng new devon4ng-mat-layout --routing --style=scss. If you are using Nx, the command would be nx generate @nrwl/angular:app devon4ng-mat-layout --routing --style=scss in your Nx workspace. Click here to get started with using Nx.

    +
  • +
+
+
+

We are providing the routing flag so that a routing module is generated, and we are also setting the style sheet format to SCSS with --style=scss.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+
    +
  • +

    ng serve. (If you are using Nx, you have to specify the project name along with the --project flag, so the command becomes ng serve --project=devon4ng-mat-layout)

    +
  • +
+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+Blank application +
+
Figure 2. Blank application
+
+
+
+
+

Adding Angular Material library to the project

+
+
+

Next we will add Angular Material to our application. In the integrated terminal, press Ctrl + C to terminate the running application and run the following command:

+
+
+
    +
  • +

    npm install --save @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

You can also use Yarn to install the dependencies if you prefer that:

+
+
+
    +
  • +

    yarn add @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

Once the dependencies are installed, we need to import the BrowserAnimationsModule in our AppModule for animations support.

+
+
+
Listing 1. Importing BrowserAnimationsModule in AppModule
+
+
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
+
+@NgModule({
+  ...
+  imports: [BrowserAnimationsModule],
+  ...
+})
+export class AppModule { }
+
+
+
+

Angular Material provides a host of components for designing our application. All the components are well structured into individual NgModules. For each component from the Angular Material library that we want to use, we have to import the respective NgModule.

+
+
+
Listing 2. We will be using the following components in our application:
+
+
import { MatIconModule, MatButtonModule, MatMenuModule, MatListModule, MatToolbarModule, MatSidenavModule } from '@angular/material';
+
+@NgModule({
+  ...
+  imports: [
+	...
+    MatIconModule,
+    MatButtonModule,
+    MatMenuModule,
+    MatListModule,
+    MatToolbarModule,
+    MatSidenavModule,
+	...
+	],
+  ...
+})
+export class AppModule { }
+
+
+
+

A better approach is to import and then export all the required components in a shared module. But for the sake of simplicity, we are importing all the required components in the AppModule itself.

+
+
+
+
+

==

+
+
+
+
  You can find a working copy of this application https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-basic-layout[here]. The sample application is part of a Nx workspace, which means it is one of the many apps in a monorepo and capable of importing reusable code from a shared library. This guide describes the implementaion by assuming a stand-alone single-repo application, but the pages and layout described in this sample app are similar to the ones used in another sample app in the monorepo (https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-theming[angular-material-theming]), which is why we have exported the required components from a shared library and reused them in both the apps. As a result, the code in our monorepo will be slightly different. It would still help you in following this guide.
+== ==
+
+
+
+

Next, we include a theme in our application. Angular Material comes with four pre-defined themes: indigo-pink, deeppurple-amber, pink-bluegrey and purple-green. It is also possible to create our own custom theme, but that is beyond the scope of this guide. Including a theme is required to apply all of the core and theme styles to your application. +We will include the indigo-pink theme in our application by importing the indigo-pink.css file in our src/styles.scss:

+
+
+
Listing 3. In src/styles.scss:
+
+
@import "~@angular/material/prebuilt-themes/indigo-pink.css";
+
+
+
+

To use Material Design Icons along with the mat-icon component, we will load the Material Icons library in our src/index.html file

+
+
+
Listing 4. In src/index.html:
+
+
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
+
+
+
+
+
+

Development

+
+
+

Now that we have all the Angular Material related dependencies set up in our project, we can start coding. Let’s begin by adding a suitable margin and font to the body element of our single page application. We will add it in the src/styles.scss file to apply it globally:

+
+
+
Listing 5. In src/styles.scss:
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

At this point, if we run our application, this is how it will look like:

+
+
+
+Angular Material added to the application +
+
Figure 3. Application with Angular Material set up
+
+
+

We will clear the app.component.html file and setup a header with a menu button and some navigational links. We will use mat-toolbar, mat-button, mat-menu, mat-icon and mat-icon-button for this:

+
+
+
Listing 6. app.component.html:
+
+
<mat-toolbar color="primary">
+  <button mat-icon-button aria-label="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+  <button mat-button [matMenuTriggerFor]="submenu">Menu 1</button>
+  <button mat-button>Menu 2</button>
+  <button mat-button>Menu 3</button>
+
+  <mat-menu #submenu="matMenu">
+    <button mat-menu-item>Sub-menu 1</button>
+    <button mat-menu-item [matMenuTriggerFor]="submenu2">Sub-menu 2</button>
+  </mat-menu>
+
+  <mat-menu #submenu2="matMenu">
+    <button mat-menu-item>Menu Item 1</button>
+    <button mat-menu-item>Menu Item 2</button>
+    <button mat-menu-item>Menu Item 3</button>
+  </mat-menu>
+
+</mat-toolbar>
+
+
+
+

The color attribute on the mat-toolbar element will give it the primary (indigo) color as defined by our theme. The color attribute works with most Angular Material components; the possible values are 'primary', 'accent' and 'warn'. +The mat-toolbar is a suitable component to represent a header. It serves as a placeholder for elements we want in our header. +Inside the mat-toolbar, we start with a button having mat-icon-button attribute, which itself contains a mat-icon element having the value menu. This will serve as a menu button which we can use to toggle the sidenav. +We follow it with some sample buttons having the mat-button attribute. Notice the first button has a property matMenuTriggerFor bound to a local reference submenu. As the property name suggests, the click of this button will display the mat-menu element with the specified local reference as a drop-down menu. The rest of the code is self explanatory.

+
+
+
+Header added to the application +
+
Figure 4. This is how our application looks with the first menu button (Menu 1) clicked.
+
+
+

We want to keep the sidenav toggling menu button on the left and move the rest to the right to make it look better. To do this we add a class to the menu icon button:

+
+
+
Listing 7. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+

And in the app.component.scss file, we add the following style:

+
+
+
Listing 8. app.component.scss:
+
+
.menu {
+    margin-right: auto;
+}
+
+
+
+

The mat-toolbar element already has it’s display property set to flex. Setting the menu icon button’s margin-right property to auto keeps itself on the left and pushes the other elements to the right.

+
+
+
+Final look of the header +
+
Figure 5. Final look of the header.
+
+
+

Next, we will create a sidenav. But before that lets create a couple of components to navigate between, the links of which we will add to the sidenav. +We will use the ng generate component (or ng g c command for short) to create Home and Data components. (Append --project=devon4ng-mat-layout to the command in a Nx workspace). We nest them in the pages sub-directory since they represent our pages.

+
+
+
    +
  • +

    ng g c pages/home

    +
  • +
  • +

    ng g c pages/data;

    +
  • +
+
+
+

Let us set up the routing such that when we visit http://localhost:4200/ root url we see the HomeComponent and when we visit http://localhost:4200/data url we see the DataComponent. +We had opted for routing while creating the application, so we have the routing module app-routing.module.ts setup for us. In this file, we have the empty routes array where we set up our routes.

+
+
+
Listing 9. app-routing.module.ts:
+
+
import { HomeComponent } from './pages/home/home.component';
+import { DataComponent } from './pages/data/data.component';
+
+	const routes: Routes = [
+	  { path: '', component: HomeComponent },
+	  { path: 'data', component: DataComponent }
+	];
+
+
+
+

We need to provide a hook where the components will be loaded when their respective URLs are loaded. We do that by using the router-outlet directive in the app.component.html.

+
+
+
Listing 10. app.component.html:
+
+
...
+	</mat-toolbar>
+	<router-outlet></router-outlet>
+
+
+
+

Now when we visit the defined URLs we see the appropriate components rendered on screen.

+
+
+

Lets change the contents of the components to have something better.

+
+
+
Listing 11. home.component.html:
+
+
<h2>Home Page</h2>
+
+
+
+
Listing 12. home.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+
Listing 13. data.component.html:
+
+
<h2>Data Page</h2>
+
+
+
+
Listing 14. data.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+

The pages look somewhat better now:

+
+
+
+Home page +
+
Figure 6. Home page
+
+
+
+Data page +
+
Figure 7. Data page
+
+
+

Let us finally create the sidenav. To implement the sidenav we need to use 3 Angular Material components: mat-sidenav-container, mat-sidenav and mat-sidenav-content. +The mat-sidenav-container, as the name suggests, acts as a container for the sidenav and the associated content. So it is the parent element, and mat-sidenav and mat-sidenav-content are the children sibling elements. mat-sidenav represents the sidenav. We can put any content we want, though it is usually used to contain a list of navigational links. The mat-sidenav-content element is for containing the contents of our current page. Since we need the sidenav application-wide, we will put it in the app.component.html.

+
+
+
Listing 15. app.component.html:
+
+
...
+</mat-toolbar>
+
+<mat-sidenav-container>
+  <mat-sidenav mode="over" [disableClose]="false" #sidenav>
+    Sidenav
+  </mat-sidenav>
+  <mat-sidenav-content>
+    <router-outlet></router-outlet>
+  </mat-sidenav-content>
+</mat-sidenav-container>
+
+
+
+

The mat-sidenav has a mode property, which accepts one of the 3 values: over, push and side. It decides the behavior of the sidenav. mat-sidenav also has a disableClose property which accents a boolean value. It toggles the behavior where we click on the backdrop or press the Esc key to close the sidenav. There are other properties which we can use to customize the appearance, behavior and position of the sidenav. You can find the properties documented online at https://material.angular.io/components/sidenav/api +We moved the router-outlet directive inside the mat-sidenav-content where it will render the routed component. +But if you check the running application in the browser, we don’t see the sidenav yet. That is because it is closed. We want to have the sidenav opened/closed at the click of the menu icon button on the left side of the header we implemented earlier. Notice we have set a local reference #sidenav on the mat-sidenav element. We can access this element and call its toggle() function to toggle open or close the sidenav.

+
+
+
Listing 16. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu" (click)="sidenav.toggle()">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+
+Sidenav works +
+
Figure 8. Sidenav is implemented
+
+
+

We can now open the sidenav by clicking the menu icon button. But it does not look right. The sidenav is only as wide as its content. Also the page does not stretch the entire viewport due to lack of content. +Let’s add the following styles to make the page fill the viewport:

+
+
+
Listing 17. app.component.scss:
+
+
...
+mat-sidenav-container {
+    position: absolute;
+    top: 64px;
+    left: 0;
+    right: 0;
+    bottom: 0;
+}
+
+
+
+

The sidenav width will be corrected when we add the navigational links to it. That is the only thing remaining to be done. Lets implement it now:

+
+
+
Listing 18. app.component.html:
+
+
...
+  <mat-sidenav [disableClose]="false" mode="over" #sidenav>
+	<mat-nav-list>
+      <a
+        id="home"
+        mat-list-item
+        [routerLink]="['./']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+        [routerLinkActiveOptions]="{exact: true}"
+      >
+        <mat-icon matListAvatar>home</mat-icon>
+        <h3 matLine>Home</h3>
+        <p matLine>sample home page</p>
+      </a>
+      <a
+        id="sampleData"
+        mat-list-item
+        [routerLink]="['./data']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+      >
+        <mat-icon matListAvatar>grid_on</mat-icon>
+        <h3 matLine>Data</h3>
+        <p matLine>sample data page</p>
+      </a>
+    </mat-nav-list>
+  </mat-sidenav>
+...
+
+
+
+

We use the mat-nav-list element to set a list of navigational links. We use the a tags with mat-list-item directive. We implement a click listener on each link to close the sidenav when it is clicked. The routerLink directive is used to provide the URLs to navigate to. The routerLinkActive directive is used to provide the class name which will be added to the link when it’s URL is visited. Here we name the class`active`. To style it, let' modify the app.component.scss file:

+
+
+
Listing 19. app.component.scss:
+
+
...
+mat-sidenav-container {
+...
+	a.active {
+        background: #8e8d8d;
+        color: #fff;
+
+        p {
+            color: #4a4a4a;
+        }
+    }
+}
+
+
+
+

Now we have a working application with a basic layout: a header with some menu and a sidenav with some navigational links.

+
+
+
+Finished application +
+
Figure 9. Finished application
+
+
+
+
+

Conclusion

+
+
+

The purpose of this guide was to provide a basic understanding of creating layouts with Angular Material. The Angular Material library has a huge collection of ready to use components which can be found at https://material.angular.io/components/categories +It has provided documentation and example usage for each of its components. Going through the documentation will give a better understanding of using Angular Material components in our devon4ng applications.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-clarity-angular.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-clarity-angular.html new file mode 100644 index 00000000..1892b158 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-clarity-angular.html @@ -0,0 +1,675 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Clarity Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts Angular Clarity in a devon4ng application. Angular Clarity is a HTML/CSS framework.

+
+
+
+1 +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Let’s begin

+
+
+

We start with opening the console(in the Devon distribution folder) and running the following command to start a project named AngularZorroLayout.

+
+
+

devon ng new AngularClarityLayout

+
+
+

Select y when it asks whether it would like to add Angular routing and select SCSS when it asks for the style sheet format. You can also use the devonfw IDE CLI to create a new devon4ng application.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+

devon ng serve

+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+2 +
+
Figure 2. Blank Application
+
+
+
+
+

Adding Angular Clarity framework to the project

+
+
+

Next we will add Angular Clarity to our application. In the integrated terminal, press CTRL + C to terminate the running application and run the following command:

+
+
+

Generate a new Angular application (if you haven’t already): +ng new my-app +Navigate to the directory: +cd my-app +Run the ng add command for Clarity: +ng add @clr/angular

+
+
+

After that we can see that the module is imported on app.module.ts

+
+
+
+
import { ClarityModule } from '@clr/angular';
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+imports: [
+    ClarityModule,
+ ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+
+3 +
+
Figure 3. ClarityModule
+
+
+
+
+

Development

+
+
+

After installed the library we can start to develop the code.

+
+
+

Adding styles in styles.css

+
+
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

First thing that we need to do is the menu

+
+
+
+
<header class="header-6">
+  <div class="branding">
+    <a href="..." class="nav-link">
+      <clr-icon shape="vm-bug"></clr-icon>
+      <span class="title">Project Clarity</span>
+    </a>
+  </div>
+
+  <div class="header-nav">
+    <a href="..." class="active nav-link"><span class="nav-text">Dashboard</span></a>
+    <a href="..." class="nav-link"><span class="nav-text">Interactive Analytics</span></a>
+  </div>
+  <div class="header-actions">
+      <form class="search">
+        <label for="search_input">
+          <input id="search_input" type="text" placeholder="Search for keywords...">
+        </label>
+      </form>
+        <clr-dropdown>
+          <button class="nav-text" clrDropdownTrigger aria-label="open user profile">
+            devonfw@clarityangular
+            <clr-icon shape="caret down"></clr-icon>
+          </button>
+          <clr-dropdown-menu *clrIfOpen clrPosition="bottom-right">
+            <a href="..." clrDropdownItem>Settings</a>
+            <a href="..." clrDropdownItem>Log out</a>
+          </clr-dropdown-menu>
+        </clr-dropdown>
+<clr-dropdown>
+  <button class="nav-icon" clrDropdownTrigger aria-label="toggle settings menu">
+    <clr-icon shape="cog"></clr-icon>
+    <clr-icon shape="caret down"></clr-icon>
+  </button>
+  <clr-dropdown-menu *clrIfOpen clrPosition="bottom-right">
+    <a href="..." clrDropdownItem>About</a>
+    <a href="..." clrDropdownItem>Preferences</a>
+  </clr-dropdown-menu>
+</clr-dropdown>
+  </div>
+</header>
+
+
+
+
+4 +
+
Figure 4. Clarity Menu
+
+
+

The framework has its own css classes. +For example, the first class that we can see is the header-6 that one is a css style that change the color from the menu. +Also, we can see it that the framework has some icons where we can choose, where the tag is +<clr-icon shape”vm-bug></clr-icon> +The next div on the menu will have the navigation header. +As with all this framework we can see that has is own css class <div class=”header-nav” +We can see 2 <a> tags with a different css class +The first one has the activated class. The difference between both of them shows like this.

+
+
+
+5 +
+
Figure 5. Difference
+
+
+

After seeing this piece of code, can see that the other part of the menu has another css class. +<div class=”header-actions”> +After this all divs inside the last one, they are going to be aligned to the right.

+
+
+
+6 +
+
Figure 6. Search
+
+
+

To do this search bar, just need to create a form with the class search +<form class=”search”> +To shows the icon we use the tag <label for="search_input"></label> +And the normal input with the id=”search_input” to match with the previous label

+
+
+
+7 +
+
Figure 7. Dropdown
+
+
+

To do the menu-dropdown , we use the tag owned by the framework called +<clr-dropdown>

+
+
+
+
 <clr-dropdown>
+          <button class="nav-text" clrDropdownTrigger aria-label="open user profile">
+            devonfw@clarityangular
+            <clr-icon shape="caret down"></clr-icon>
+          </button>
+          <clr-dropdown-menu *clrIfOpen clrPosition="bottom-right">
+            <a href="..." clrDropdownItem>Settings</a>
+            <a href="..." clrDropdownItem>Log out</a>
+          </clr-dropdown-menu>
+  </clr-dropdown>
+
+
+
+
+8 +
+
Figure 8. Dropdown
+
+
+

The attribute clrDropdownTrigger is needed because if not, we cannot do the tag <clr-dropdown-menu> because that tag is going to be activated just if the clrDropdownTrigger is activated too. +Also, with the attribute clrPosition when can decided where will be positioned the dropdown.

+
+
+

The other part the menu, check the next figure.

+
+
+
+9 +
+
Figure 9. Button
+
+
+

It’s pretty much the same code but we just change the attribute aria-abel and the icons`

+
+
+
+10 +
+
Figure 10. Button Logic
+
+
+

After we have the whole menu finished, time to see the card.

+
+
+
+11 +
+
Figure 11. Card
+
+
+

In the first figure, all the elements are aligned to the center. +To do this, we just need to use the classes by the framework. +<div class="clr-main-container"> +The first css class is giving the style to the main container.

+
+
+

<div class="clr-row clr-justify-content-center">

+
+
+

This one, is saying that its going to be sorted by rows and all the content inside of this div will be in the center + <div class="clr-col-lg-4"> +And the last one it’s the size of the div. This framework has as maximum 12, is like bootstrap. +More examples in: +https://clarity.design/documentation/grid

+
+
+

To create the card with his border and all the properties we just call the class +<div class=”card”> +To do the tooltip, check next figure.

+
+
+
+12 +
+
Figure 12. Tooltip
+
+
+

We just need to do an <a> tag with this attributes + <a href="…​" role="tooltip" aria-haspopup="true" class="tooltip tooltip-bottom-right"> +The class is giving us the toltip and his position. +After that we have the: +<clr-icon shape="info-circle" size="24"></clr-icon> +That is giving us the icon and the size +And the content of the tooltip is coming from the: +<span class="tooltip-content"> +After the tooltip done, we just need to add a image and the text. +To do it we just need to code

+
+
+
+
<div class="card-img">
+            <img src="../assets/images/clarity.png">
+          </div>
+          <div class="card-block">
+            <p class="card-text">
+              Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard
+              dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen
+              book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially
+              unchanged. It was popularised in the 1930s with the release of Letraset sheets containing Lorem Ipsum passages, and more
+              recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.
+            </p>
+          </div>
+
+
+
+

For the next card, check next figure

+
+
+
+13 +
+
Figure 13. Card
+
+
+

We are using the same class from the card that we used before. +But to do the numbers on the top we used : +<span class="badge"> +And to give some colors we used: +<span class="badge badge-purple"> for example

+
+
+

The next step is do the bars with the progress, to do it we just need to create a div with the class “progress-block”

+
+
+
+
        <div class="progress-block">
+              <label>Label</label>
+                <div class="progress-static">
+                  <div class="progress-meter" data-value="25"></div>
+                </div>
+              </div>
+
+
+
+

To do the bar with that widh and high we ull the class “progress-static” +And finally to change the color and the value is with the class “progress-meter” and “progress success” +Depends with class are we using, we will have different attributes to put the value +If we have the progress-static we will use +<div class="progress-meter" data-value="43"></div> +If we have the progress success we need to use: +<progress value="75" max="100" data-displayval="…​%"></progress>

+
+
+

As you can see, the card has a footer. Check next picture

+
+
+
+14 +
+
Figure 14. Card
+
+
+

We just need to add a div with this class inside of the card div +<div class="card-footer"> +And will link to the card:

+
+
+

<a class="card-link" (click)="send()">Click to see the modal</a> +And the method send() its just a method to convert the variable basic to true when is false and false when its true:

+
+
+
+
  basic = false;
+  send(): void {
+    this.basic = !this.basic;
+  }
+
+
+
+

So in the html file we need to write a div with a ngIf, to check if the variable is true and create a model with the tag <clr-modal> and the attribute clrModalOpen and the same name as the variable has.

+
+
+
+
<div *ngIf="basic">
+        <clr-modal [(clrModalOpen)]="basic">
+
+
+
+

After this we need to create the body of the modal, to do it we will use a div with the classes from the framework

+
+
+
+
<div class="modal-body">
+            <p>But not much to say...</p>
+          </div>
+          <div class="modal-footer">
+            <button type="button" class="btn btn-primary" (click)="basic = true">OK</button>
+            <button type="button" class="btn btn-outline" (click)="basic = false">Cancel</button>          </div>
+        </clr-modal>
+
+
+
+

The class to create the body of the modal, it’s just to create a div with the class +<div class="modal-body"> +And to create the footer +<div class="modal-footer"> +We can see that the footer has 2 buttons, with different style coming from the framework and with 2 methods with different values +(click)="basic = true" on OK button, this button won’t change the variable value so would not do anything. +(click)="basic = false" on Cancel button. This button will change the value of the variable and will leave the modal.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-ng-zorro-layout.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-ng-zorro-layout.html new file mode 100644 index 00000000..29801e82 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-ng-zorro-layout.html @@ -0,0 +1,897 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

NG ZORRO Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts using NG ZORRO in a devon4ng application.

+
+
+
+figure1 +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Let’s begin

+
+
+

Starts with opening the console(in the Devon distribution folder) and running the following command to start a project named AngularZorroLayout. +devon ng new AngularZorroLayout

+
+
+
    +
  • +

    devon ng new AngularZorroLayout

    +
  • +
+
+
+

Select y when it asks whether it would like to add Angular routing and select scss when it asks for the style sheet format.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+
    +
  • +

    devon ng serve

    +
  • +
+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+
+

Adding Angular ZORRO library to the project

+
+
+
Blank application
+

Next we will add Angular Material to our application. In the integrated terminal, press CTRL + C to terminate the running application and run the following command:

+
+
+
    +
  • +

    ng add ng-zorro-antd

    +
  • +
+
+
+
+figure3 +
+
Figure 2. CLI Angular ZORRO Layout
+
+
+

Or if we would like to customize our workflow we can install it with:

+
+
+
    +
  • +

    npm install ng-zorro-antd

    +
  • +
+
+
+

After run that command, need to import the pre-build styles in angular.json

+
+
+
Listing 1. Styles on angular.json
+
+
"styles": [
+    "src/styles.scss",
+    "node_modules/ng-zorro-antd/src/ng-zorro-antd.min.css",
+    "node_modules/ng-zorro-antd/resizable/style/index.min.css"
+],
+
+
+
+

Once the dependencies are installed, need to import the BrowserAnimationsModule in our AppModule for animations support.

+
+
+
Listing 2. Importing BrowserAnimationsModule in AppModule
+
+
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
+
+@NgModule({
+  ...
+  imports: [BrowserAnimationsModule],
+  ...
+})
+export class AppModule { }
+
+
+
+
+
+

Internationalization

+
+
+

The default language of ng-zorro-antd is Chinese . If you want to use other languages, you can follow the instructions below. You can also set the language with ng add ng-zorro-antd when creating project.

+
+
+

ng-zorro-antd provides several configuration tokens for global configuration of international copy and date, NZ_I18N for international copy.

+
+
+
Listing 3. Importing Configuration in App.module
+
+
import { NZ_I18N, en_US } from 'ng-zorro-antd/i18n';
+
+@NgModule({
+  ...
+  providers: [
+    { provide: NZ_I18N, useValue: en_US },
+  ...
+})
+export class AppModule { }
+
+
+
+

To finish the configuration, we need to import the icons from the Library.

+
+
+
Listing 4. Importing Icons in App.module
+
+
import * as AllIcons from '@ant-design/icons-angular/icons';
+
+const antDesignIcons = AllIcons as {
+  [key: string]: IconDefinition;
+};
+const icons: IconDefinition[] = Object.keys(antDesignIcons).map(key => antDesignIcons[key]);
+
+
+
+
+
+

Development

+
+
+

We have all the NG ZORRO related dependencies set up in our project, we can start coding.

+
+
+
Listing 5. Adding styles in styles.css
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

Next step is to create a component for the header. We will create it with the next command. +We will create a folder component to have a good practices.

+
+
+

ng generate component components/header

+
+
+

In this component, we are going to create the menu.

+
+
+

First, we need to import the menu module on app.module.

+
+
+
Listing 6. Adding module in app.module
+
+
import { NzMenuModule } from 'ng-zorro-antd/menu';
+
+
+
+

And we will create the header with this code:

+
+
+
+
<ul nz-menu nzMode="horizontal" class="container">
+  <li nz-menu-item nzSelected>
+    <i nz-icon nzType="mail"></i>
+    Navigation One
+  </li>
+  <li nz-menu-item nzDisabled>
+    <i nz-icon nzType="appstore"></i>
+    Navigation Two
+  </li>
+  <li nz-submenu nzTitle="Navigation Three - Submenu" nzIcon="setting">
+    <ul>
+      <li nz-menu-group nzTitle="Modals">
+        <ul>
+             <li nz-menu-item nz-button (click)="info()"> Info</li>
+               <li nz-menu-item nz-button (click)="success()">Success</li>
+             <li nz-menu-item nz-button (click)="error()">Error</li>
+             <li nz-menu-item nz-button (click)="warning()">Warning</li>
+        </ul>
+      </li>
+      <li nz-menu-group nzTitle="Item 2">
+        <ul>
+          <li nz-menu-item>Option 3</li>
+          <li nz-submenu nzTitle="Sub Menu">
+            <ul>
+              <li nz-menu-item nzDisabled>Option 4</li>
+              <li nz-menu-item>Option 5</li>
+            </ul>
+          </li>
+          <li nz-submenu nzDisabled nzTitle="Disabled Sub Menu">
+            <ul>
+              <li nz-menu-item>Option 6</li>
+              <li nz-menu-item>Option 7</li>
+            </ul>
+          </li>
+        </ul>
+      </li>
+    </ul>
+  </li>
+  <li nz-menu-item>
+    <a href="https://ng.ant.design" target="_blank" rel="noopener noreferrer">Navigation Four - Link</a>
+  </li>
+</ul>
+
+
+
+
+figure4 +
+
Figure 3. Header component
+
+
+

Note +The menu has some properties like nzTitle, nzButton, nzDisabled or nzSelected.

+
+
+

And modify the styles on header.component.scss

+
+
+
Listing 7. Adding styles on header.scss
+
+
.container{
+  margin: auto;
+  text-align: center;
+}
+
+
+
+

The library has enough styles and we don’t need to change to much. +We’ll be like:

+
+
+
+figure5 +
+
Figure 4. Header Component
+
+
+

In the menu, we added an example of a modal

+
+
+

To use it we need to import that module on app.module.ts

+
+
+
+
import { NzModalModule } from 'ng-zorro-antd/modal';
+
+
+
+

In the HTML file we just need to create a method on (click) to call the modal.

+
+
+
+
  <li nz-submenu nzTitle="Navigation Three - Submenu" nzIcon="setting">
+    <ul>
+      <li nz-menu-group nzTitle="Modals">
+        <ul>
+             <li nz-menu-item nz-button (click)="info()"> Info</li>
+               <li nz-menu-item nz-button (click)="success()">Success</li>
+             <li nz-menu-item nz-button (click)="error()">Error</li>
+             <li nz-menu-item nz-button (click)="warning()">Warning</li>
+        </ul>
+      </li>
+
+
+
+
+figure6 +
+
Figure 5. Modal
+
+
+

And now, we just need to create those methods in the file header.component.ts +Also, need to import the modal service and we use it in the constructor of the class.

+
+
+

import {NzModalService} from 'ng-zorro-antd/modal'; +constructor(private modal: NzModalService){}

+
+
+
+figure7 +
+
Figure 6. Import ModalService from ZORRO
+
+
+
+
  info(): void {
+    this.modal.info({
+      nzTitle: 'This is a notification message',
+      nzContent: '<p>some messages...some messages...</p><p>some messages...some messages...</p>',
+      nzOnOk: () => console.log('Info OK')
+    });
+  }
+
+  success(): void {
+    this.modal.success({
+      nzTitle: 'This is a success message',
+      nzContent: 'some messages...some messages...'
+    });
+  }
+
+  error(): void {
+    this.modal.error({
+      nzTitle: 'This is an error message',
+      nzContent: 'some messages...some messages...'
+    });
+  }
+
+  warning(): void {
+    this.modal.warning({
+      nzTitle: 'This is an warning message',
+      nzContent: 'some messages...some messages...'
+    });
+  }
+
+
+
+
+figure8 +
+
Figure 7. Logic on ts file looks like
+
+
+

Once the header is done, time to create the main component. In this case will be those elements.

+
+
+
+figure9 +
+
Figure 8. Main Component
+
+
+

The first element that we can see, it’s a carousel. +To implement it on the code, we just need to do the same that we done before, import the module and import the component. +Do we import the next module on app.module

+
+
+
Listing 8. Import carousel Module
+
+
import { NzCarouselModule } from 'ng-zorro-antd/carousel';
+
+
+
+

And use the label “nz-carousel” to create the Carousel, it has some attributes coming from the library.

+
+
+
+figure10 +
+
Figure 9. Import ModalService from ZORRO
+
+
+

**NOTE +The loop that we are doing its how many images we will have. +And finally, we will give some styles.

+
+
+
+
.container{
+  margin: auto;
+  text-align: center;
+  margin-top: 20px;
+}
+[nz-carousel-content] {
+        text-align: center;
+        height: 160px;
+        line-height: 160px;
+        background: #364d79;
+        color: #fff;
+        overflow: hidden;
+      }
+
+      h3 {
+        color: #fff;
+        margin-bottom: 0;
+      }
+
+nz-content{
+  padding: 0 30px 0 30px;
+}
+
+
+
+
+figure11 +
+
Figure 10. Styling
+
+
+

Next element, the cards

+
+
+
+figure12 +
+
Figure 11. Cards1
+
+
+
+figure13 +
+
Figure 12. Cards Unlocked
+
+
+

We will have a button to activate or deactivate the cards. +To do it, we will write the next code in our file html.

+
+
+
+
        <div nz-row>
+          <div nz-col [nzXs]="{ span: 5, offset: 1 }" [nzLg]="{ span: 6, offset: 2 }">
+            <nz-card nzXs="8">
+              <nz-skeleton [nzActive]="true" [nzLoading]="loading" [nzAvatar]="{ size: 'large' }">
+                <nz-card-meta [nzAvatar]="avatarTemplate" nzTitle="Card title" nzDescription="This is the description">
+                </nz-card-meta>
+              </nz-skeleton>
+            </nz-card>
+          </div>
+          <div nz-col [nzXs]="{ span: 11, offset: 1 }" [nzLg]="{ span: 6, offset: 2 }">
+            <nz-card nzXs="8">
+              <nz-skeleton [nzActive]="true" [nzLoading]="!loading" [nzAvatar]="{ size: 'small' }">
+                <nz-card-meta [nzAvatar]="avatarTemplate" nzTitle="Card title" nzDescription="This is the description">
+                </nz-card-meta>
+              </nz-skeleton>
+            </nz-card>
+          </div>
+          <div nz-col [nzXs]="{ span: 5, offset: 1 }" [nzLg]="{ span: 6, offset: 2 }">
+            <nz-card nzXs="8">
+              <nz-skeleton [nzActive]="true" [nzLoading]="loading" [nzAvatar]="{ size: 'large' }">
+                <nz-card-meta [nzAvatar]="avatarTemplate" nzTitle="Card title" nzDescription="This is the description">
+                </nz-card-meta>
+              </nz-skeleton>
+            </nz-card>
+          </div>
+        </div>
+
+
+
+
+figure14 +
+
Figure 13. Cards HTML
+
+
+

The first thing that we can see, it’s a button to switch between see it or not. +So,first thing, we need to import that switch.

+
+
+

import { NzSwitchModule } from 'ng-zorro-antd/switch';

+
+
+

Next step, that we need to do its write the `HTML code. It’s simple:

+
+
+

<nz-switch [(ngModel)]="loading"></nz-switch>

+
+
+

So now, in the ts file we just need to create a Boolean variable. +With the ngModel and the switch, each time that we will click on the button the variable will swap between true or false. +After create the button, we are going to create the card.

+
+
+

Need to import the following module on app.module +import { NzCardModule } from 'ng-zorro-antd/card'; +And after that we need to write the HTML code

+
+
+
+figure15 +
+
Figure 14. Cards Logic
+
+
+

We will find a lot of attributes. +We can find their explication in the api: +NG ZORRO

+
+
+

Last Element, the table

+
+
+
+figure16 +
+
Figure 15. Table
+
+
+

We need to import the module +import { NzTableModule } from 'ng-zorro-antd/table';

+
+
+

After that we can see a button, this is just to create a new row in the table. +The button only has a method to add a new value to our array

+
+
+

Table Interface

+
+
+
+
interface ItemData {
+  id: string;
+  name: string;
+  age: string;
+  address: string;
+}
+
+
+
+
+figure17 +
+
Figure 16. Table Interface
+
+
+

Add Row Method

+
+
+
+
  addRow(): void {
+    this.listOfData = [
+      ...this.listOfData,
+      {
+        id: `${this.i}`,
+        name: `Edward King ${this.i}`,
+        age: '32',
+        address: `London, Park Lane no. ${this.i}`
+      }
+    ];
+    this.i++;
+  }
+
+
+
+
+figure18 +
+
Figure 17. Add Method
+
+
+

After that we need to create the table

+
+
+
+
<nz-table #editRowTable nzBordered [nzData]="listOfData">
+          <thead>
+            <tr>
+              <th nzWidth="30%">Name</th>
+              <th>Age</th>
+              <th>Address</th>
+              <th>Action</th>
+            </tr>
+          </thead>
+          <tbody>
+            <tr *ngFor="let data of editRowTable.data" class="editable-row">
+              <td>
+                <div class="editable-cell" [hidden]="editId == data.id" (click)="startEdit(data.id)">
+                  {{ data.name }}
+                </div>
+                <input [hidden]="editId !==  data.id" type="text" nz-input [(ngModel)]="data.name" (blur)="stopEdit()" />
+              </td>
+              <td>{{ data.age }}</td>
+              <td>{{ data.address }}</td>
+              <td>
+                <a nz-popconfirm nzPopconfirmTitle="Sure to delete?" (nzOnConfirm)="deleteRow(data.id)">Delete</a>
+              </td>
+            </tr>
+          </tbody>
+        </nz-table>
+
+
+
+
+figure19 +
+
Figure 18. Table HTML Logic
+
+
+

To create the table we need to use the tag <nz-table> and after that is like a Html table, with the <thead> and <tbody>

+
+
+

How it shows with the for, we are showing the data from the array created before. +In the first cell we can see, that we have a method to edit the value.

+
+
+
+figure20 +
+
Figure 19. Table methods
+
+
+
+figure21 +
+
Figure 20. Table
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-primeng-angular.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-primeng-angular.html new file mode 100644 index 00000000..5cc62587 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-layout-with-primeng-angular.html @@ -0,0 +1,1721 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

PrimeNG Layout

+
+
+

The purpose of this guide is to get a basic understanding of creating layouts of PrimeNG in a devon4ng application. PrimeNG is a HTML/CSS framework.

+
+
+
+Screenshot 0 +
+
Figure 1. This is what the finished application will look like
+
+
+
+
+

Let’s begin

+
+
+

We start with opening the console(in the Devon distribution folder) and running the following command to start a project named AngularZorroLayout.

+
+
+

devon ng new AngularPrimeNgLayout

+
+
+

Select y when it asks whether it would like to add Angular routing and select SCSS when it asks for the style sheet format. You can also use the devonfw IDE CLI to create a new devon4ng application.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+

devon ng serve

+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+Screenshot 1 +
+
Figure 2. Blank Application
+
+
+
+
+

Adding PrimeNG to the project

+
+
+

Next we will add Angular Material to our application. In the integrated terminal, press CTRL + C to terminate the running application and run the following command:

+
+
+

Run the ng add command for PrimeNG:

+
+
+
+
npm install primeng
+npm install primeicons --save
+
+
+
+

After that we can see that the module is imported on app.module.ts

+
+
+

The css dependencies are as follows, Prime Icons, theme of your choice and structural css of components.

+
+
+
+
 "src/styles.scss",
+ "node_modules/primeicons/primeicons.css",
+ "node_modules/primeng/resources/themes/saga-blue/thcss",
+ "node_modules/primeng/resources/primeng.min.css"
+
+
+
+
+Screenshot 2 +
+
Figure 3. Styles on angular.json
+
+
+
+
+

Development

+
+
+

Now we need to create a component for the header. We will create it with the command +We will create a folder component to have a good practices.

+
+
+
+
ng generate component components/header
+
+
+
+

In this component, we are going to create the menu.

+
+
+
+Screenshot 5 +
+
Figure 4. Menu
+
+
+
+Screenshot 6 +
+
Figure 5. Menu Dropdown
+
+
+

And will create the code like:

+
+
+
+Screenshot 3 +
+
Figure 6. Header
+
+
+
+
<p-menubar [model]="items">
+  <ng-template pTemplate="start">
+    <img src="assets/images/primeng.svg" height="40" class="p-mr-2">
+  </ng-template>
+</p-menubar>
+
+
+
+

How we see the menu has some properties from the library.

+
+
+

<p-menubar> is the first one, with this label we can create the menu and with the <ng-template pTemplate> we decided where the menu will be aligned.

+
+
+

The [model]=items means that the menu is looking for the "items" to print.

+
+
+

The items is a array but his type come from the PrimeNG. So we just need to import the MenuItem.

+
+
+
+
import { MenuItem } from 'primeng/api';`
+
+
+
+

And give some values.

+
+
+
+
this.items = [
+      {
+        label: 'File',
+        icon: 'pi pi-fw pi-file',
+        items: [
+          {
+            label: 'New',
+            icon: 'pi pi-fw pi-plus',
+            items: [
+              {
+                label: 'Bookmark',
+                icon: 'pi pi-fw pi-bookmark'
+              },
+              {
+                label: 'Video',
+                icon: 'pi pi-fw pi-video'
+              },
+
+            ]
+          },
+          {
+            label: 'Delete',
+            icon: 'pi pi-fw pi-trash'
+          },
+          {
+            separator: true
+          },
+          {
+            label: 'Export',
+            icon: 'pi pi-fw pi-external-link'
+          }
+        ]
+      },
+      {
+        label: 'Edit',
+        icon: 'pi pi-fw pi-pencil',
+        items: [
+          {
+            label: 'Left',
+            icon: 'pi pi-fw pi-align-left'
+          },
+          {
+            label: 'Right',
+            icon: 'pi pi-fw pi-align-right'
+          },
+          {
+            label: 'Center',
+            icon: 'pi pi-fw pi-align-center'
+          },
+          {
+            label: 'Justify',
+            icon: 'pi pi-fw pi-align-justify'
+          },
+
+        ]
+      },
+      {
+        label: 'Users',
+        icon: 'pi pi-fw pi-user',
+        items: [
+          {
+            label: 'New',
+            icon: 'pi pi-fw pi-user-plus',
+
+          },
+          {
+            label: 'Delete',
+            icon: 'pi pi-fw pi-user-minus',
+
+          },
+          {
+            label: 'Search',
+            icon: 'pi pi-fw pi-users',
+            items: [
+              {
+                label: 'Filter',
+                icon: 'pi pi-fw pi-filter',
+                items: [
+                  {
+                    label: 'Print',
+                    icon: 'pi pi-fw pi-print'
+                  }
+                ]
+              },
+              {
+                icon: 'pi pi-fw pi-bars',
+                label: 'List'
+              }
+            ]
+          }
+        ]
+      },
+      {
+        label: 'Events',
+        icon: 'pi pi-fw pi-calendar',
+        items: [
+          {
+            label: 'Edit',
+            icon: 'pi pi-fw pi-pencil',
+            items: [
+              {
+                label: 'Save',
+                icon: 'pi pi-fw pi-calendar-plus'
+              },
+              {
+                label: 'Delete',
+                icon: 'pi pi-fw pi-calendar-minus'
+              },
+
+            ]
+          },
+          {
+            label: 'Archieve',
+            icon: 'pi pi-fw pi-calendar-times',
+            items: [
+              {
+                label: 'Remove',
+                icon: 'pi pi-fw pi-calendar-minus'
+              }
+            ]
+          }
+        ]
+      },
+      {
+        label: 'Quit',
+        icon: 'pi pi-fw pi-power-off'
+      }
+    ];
+  }
+
+
+
+
+Screenshot 4 +
+
Figure 7. Menu Values
+
+
+

After the menus is done. The next step is create the main container, in this case will be the table.

+
+
+
+Screenshot 7 +
+
Figure 8. Table
+
+
+

How is a very complex table we are going to explain component by component

+
+
+
+Screenshot 8 +
+
Figure 9. Buttons
+
+
+

To create those buttons we just need to write this piece of code

+
+
+
+
<p-toolbar styleClass="p-mb-4">
+    <ng-template pTemplate="left">
+      <button pButton pRipple label="New" icon="pi pi-plus" class="p-button-success p-mr-2"
+        (click)="openNew()"></button>
+      <button pButton pRipple label="Delete" icon="pi pi-trash" class="p-button-danger"
+        (click)="deleteSelectedProducts()" [disabled]="!selectedProducts || !selectedProducts.length"></button>
+    </ng-template>
+
+    <ng-template pTemplate="right">
+      <p-fileUpload mode="basic" accept="image/*" [maxFileSize]="1000000" label="Import" chooseLabel="Import"
+        class="p-mr-2 p-d-inline-block"></p-fileUpload>
+      <button pButton pRipple label="Export" icon="pi pi-upload" class="p-button-help"></button>
+    </ng-template>
+  </p-toolbar>
+
+
+
+
+Screenshot 9 +
+
Figure 10. Buttons Code
+
+
+

We can see some labels and attributes, for example <p-toolbar>, pButton, <p-fuleUpload>.

+
+
+

To use them, we need to import on app.module with the following code

+
+
+
+
import { TableModule } from 'primeng/table';
+import { ButtonModule } from 'primeng/button';
+import {ToolbarModule} from 'primeng/toolbar';
+import {FileUploadModule} from 'primeng/fileupload';
+
+
+
+

We see the first method is openNew() when we call this method a variable is going to be true

+
+
+
+
  openNew(): any {
+    this.product = {};
+    this.submitted = false;
+    this.productDialog = true;
+  }
+
+
+
+

And when the productDialog its true, we will open a Modal with the following code and will look like:

+
+
+
+Screenshot 11 +
+
Figure 11. Modal
+
+
+
+
<p-dialog [(visible)]="productDialog" [style]="{width: '450px'}" header="Product Details" [modal]="true"
+  styleClass="p-fluid">
+  <ng-template pTemplate="content">
+    <div class="p-field">
+      <label for="name">Name</label>
+      <input type="text" pInputText id="name" [(ngModel)]="product.name" required autofocus />
+      <small class="p-invalid" *ngIf="submitted && !product.name">Name is required.</small>
+    </div>
+    <div class="p-field">
+      <label for="description">Description</label>
+      <textarea id="description" pInputTextarea [(ngModel)]="product.description" required rows="3"
+        cols="20"></textarea>
+    </div>
+
+    <div class="p-field">
+      <label class="p-mb-3">Category</label>
+      <div class="p-formgrid p-grid">
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category1" name="category" value="Accessories" [(ngModel)]="product.category">
+          </p-radioButton>
+          <label for="category1">Accessories</label>
+        </div>
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category2" name="category" value="Clothing" [(ngModel)]="product.category"></p-radioButton>
+          <label for="category2">Clothing</label>
+        </div>
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category3" name="category" value="Electronics" [(ngModel)]="product.category">
+          </p-radioButton>
+          <label for="category3">Electronics</label>
+        </div>
+        <div class="p-field-radiobutton p-col-6">
+          <p-radioButton id="category4" name="category" value="Fitness" [(ngModel)]="product.category"></p-radioButton>
+          <label for="category4">Fitness</label>
+        </div>
+      </div>
+    </div>
+
+    <div class="p-formgrid p-grid">
+      <div class="p-field p-col">
+        <label for="price">Price</label>
+        <p-inputNumber id="price" [(ngModel)]="product.price" mode="currency" currency="USD" locale="en-US">
+        </p-inputNumber>
+      </div>
+      <div class="p-field p-col">
+        <label for="quantity">Quantity</label>
+        <p-inputNumber id="quantity" [(ngModel)]="product.quantity"></p-inputNumber>
+      </div>
+    </div>
+  </ng-template>
+
+  <ng-template pTemplate="footer">
+    <button pButton pRipple label="Cancel" icon="pi pi-times" class="p-button-text" (click)="hideDialog()"></button>
+    <button pButton pRipple label="Save" icon="pi pi-check" class="p-button-text" (click)="saveProduct()"></button>
+  </ng-template>
+</p-dialog>
+
+
+
+
+Screenshot 10 +
+
Figure 12. Modal Code
+
+
+

To start to development this, we need to import DialogModule, ConfirmDialogMoudle, InputTextModule, RadioButtonModule and ` FormsModule` to do it we just need to write on app.module

+
+
+
+
import { DialogModule } from 'primeng/dialog';
+import { ConfirmDialogModule } from 'primeng/confirmdialog';
+import {FormsModule} from '@angular/forms';
+import { RadioButtonModule } from 'primeng/radiobutton';
+import { InputTextModule } from 'primeng/inputtext';
+
+
+
+
+Screenshot 11 +
+
Figure 13. Modal Code
+
+
+

After that we can see a Modal with the form and when we click on the "Save Button", We will create a new product.

+
+
+
+
  saveProduct(): any {
+    this.submitted = true;
+
+    if (this.product.name.trim()) {
+      if (this.product.id) {
+        this.products[this.findIndexById(this.product.id)] = this.product;
+        this.messageService.add({ severity: 'success', summary: 'Successful', detail: 'Product Updated', life: 3000 });
+      }
+      else {
+        this.product.id = this.createId();
+        this.product.image = 'product-placeholder.svg';
+        this.products.push(this.product);
+        this.messageService.add({ severity: 'success', summary: 'Successful', detail: 'Product Created', life: 3000 });
+      }
+
+      this.products = [...this.products];
+      this.productDialog = false;
+      this.product = {};
+    }
+  }
+
+
+
+

After done the first buttons, just need to do the rest of the table

+
+
+
+
<p-table #dt [value]="products" [rows]="10" [paginator]="true"
+    [globalFilterFields]="['name','country.name','representative.name','status']" [(selection)]="selectedProducts"
+    [rowHover]="true" dataKey="id" currentPageReportTemplate="Showing {first} to {last} of {totalRecords} entries"
+    [showCurrentPageReport]="true">
+    <ng-template pTemplate="caption">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        <h5 class="p-m-0">Manage Products</h5>
+        <span class="p-input-icon-left">
+          <i class="pi pi-search"></i>
+          <input pInputText type="text" (input)="dt.filterGlobal($event.target.value, 'contains')"
+            placeholder="Search..." />
+        </span>
+      </div>
+    </ng-template>
+    <ng-template pTemplate="header">
+      <tr>
+        <th style="width: 3rem">
+          <p-tableHeaderCheckbox></p-tableHeaderCheckbox>
+        </th>
+        <th pSortableColumn="name">Name <p-sortIcon field="name"></p-sortIcon>
+        </th>
+        <th pSortableColumn="price">Price <p-sortIcon field="price"></p-sortIcon>
+        </th>
+        <th pSortableColumn="category">Category <p-sortIcon field="category"></p-sortIcon>
+        </th>
+        <th pSortableColumn="rating">Reviews <p-sortIcon field="rating"></p-sortIcon>
+        </th>
+        <th pSortableColumn="inventoryStatus">Status <p-sortIcon field="inventoryStatus"></p-sortIcon>
+        </th>
+        <th></th>
+      </tr>
+    </ng-template>
+    <ng-template pTemplate="body" let-product>
+      <tr>
+        <td>
+          <p-tableCheckbox [value]="product"></p-tableCheckbox>
+        </td>
+        <td>{{product.name}}</td>
+        <td>{{product.price | currency:'USD'}}</td>
+        <td>{{product.category}}</td>
+        <td>
+          <p-rating [ngModel]="product.rating" [readonly]="true" [cancel]="false"></p-rating>
+        </td>
+        <td><span
+            [class]="'product-badge status-' + product.inventoryStatus.toLowerCase()">{{product.inventoryStatus}}</span>
+        </td>
+        <td>
+          <button pButton pRipple icon="pi pi-pencil" class="p-button-rounded p-button-success p-mr-2"
+            (click)="editProduct(product)"></button>
+          <button pButton pRipple icon="pi pi-trash" class="p-button-rounded p-button-warning"
+            (click)="deleteProduct(product)"></button>
+        </td>
+      </tr>
+    </ng-template>
+    <ng-template pTemplate="summary">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        In total there are {{products ? products.length : 0 }} products.
+      </div>
+    </ng-template>
+  </p-table>
+
+
+
+
+Screenshot 12 +
+
Figure 14. Table Code
+
+
+

After that, need to add some styles to the code.

+
+
+
+
:host ::ng-deep {
+    .p-paginator {
+        .p-paginator-current {
+            margin-left: auto;
+        }
+    }
+
+    .p-progressbar {
+        height: .5rem;
+        background-color: #D8DADC;
+
+        .p-progressbar-value {
+            background-color: #607D8B;
+        }
+    }
+
+    .table-header {
+        display: flex;
+        justify-content: space-between;
+    }
+
+    .p-calendar .p-datepicker {
+        min-width: 25rem;
+
+        td {
+            font-weight: 400;
+        }
+    }
+
+    .p-datatable.p-datatable-customers {
+        .p-datatable-header {
+            padding: 1rem;
+            text-align: left;
+            font-size: 1.5rem;
+        }
+
+        .p-paginator {
+            padding: 1rem;
+        }
+
+        .p-datatable-thead > tr > th {
+            text-align: left;
+        }
+
+        .p-datatable-tbody > tr > td {
+            cursor: auto;
+        }
+
+        .p-dropdown-label:not(.p-placeholder) {
+            text-transform: uppercase;
+        }
+    }
+
+    /* Responsive */
+    .p-datatable-customers .p-datatable-tbody > tr > td .p-column-title {
+        display: none;
+    }
+}
+
+@media screen and (max-width: 960px) {
+    :host ::ng-deep {
+        .p-datatable {
+            &.p-datatable-customers {
+                .p-datatable-thead > tr > th,
+                .p-datatable-tfoot > tr > td {
+                    display: none !important;
+                }
+
+                .p-datatable-tbody > tr {
+                    border-bottom: 1px solid var(--layer-2);
+
+                    > td {
+                        text-align: left;
+                        display: block;
+                        border: 0 none !important;
+                        width: 100% !important;
+                        float: left;
+                        clear: left;
+                        border: 0 none;
+
+                        .p-column-title {
+                            padding: .4rem;
+                            min-width: 30%;
+                            display: inline-block;
+                            margin: -.4rem 1rem -.4rem -.4rem;
+                            font-weight: bold;
+                        }
+
+                        .p-progressbar {
+                            margin-top: .5rem;
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+}
+
+
+
+
+tablestyle +
+
Figure 15. Table CSS
+
+
+

How we see it, we have some values already logged like products and some attributes that we need to import to use correctly the table.

+
+
+

All the moduls need to be in app.module

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+import { HeaderComponent } from './components/header/header.component';
+
+import { MenubarModule } from 'primeng/menubar';
+import { HttpClientModule } from '@angular/common/http';
+import { TableModule } from 'primeng/table';
+import { CalendarModule } from 'primeng/calendar';
+import { SliderModule } from 'primeng/slider';
+import { DialogModule } from 'primeng/dialog';
+import { MultiSelectModule } from 'primeng/multiselect';
+import { ContextMenuModule } from 'primeng/contextmenu';
+import { ButtonModule } from 'primeng/button';
+import { ToastModule } from 'primeng/toast';
+import { InputTextModule } from 'primeng/inputtext';
+import { ProgressBarModule } from 'primeng/progressbar';
+import { DropdownModule } from 'primeng/dropdown';
+import {ToolbarModule} from 'primeng/toolbar';
+import {FileUploadModule} from 'primeng/fileupload';
+import {RatingModule} from 'primeng/rating';
+import { RadioButtonModule } from 'primeng/radiobutton';
+import { InputNumberModule } from 'primeng/inputnumber';
+import { ConfirmDialogModule } from 'primeng/confirmdialog';
+import { ConfirmationService, MessageService } from 'primeng/api';
+import { ProductService } from './services/product.service';
+import { InputTextareaModule } from 'primeng/inputtextarea';
+import {FormsModule} from '@angular/forms';
+
+import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
+import { NoopAnimationsModule } from '@angular/platform-browser/animations';
+
+
+@NgModule({
+  declarations: [AppComponent, HeaderComponent],
+  imports: [
+    BrowserModule,
+    BrowserAnimationsModule,
+    NoopAnimationsModule,
+    AppRoutingModule,
+    MenubarModule,
+    TableModule,
+    CalendarModule,
+    SliderModule,
+    DialogModule,
+    MultiSelectModule,
+    ContextMenuModule,
+    ButtonModule,
+    ToastModule,
+    InputTextModule,
+    ProgressBarModule,
+    DropdownModule,
+    ToolbarModule,
+    FileUploadModule,
+    RatingModule,
+    RadioButtonModule,
+    InputNumberModule,
+    ConfirmDialogModule,
+    InputTextareaModule,
+    FormsModule,
+    HttpClientModule,
+  ],
+
+
+
+
+Screenshot 13 +
+
Figure 16. All modules imported
+
+
+

How we can see, the first thing that the table is doing is loading all the products that we have.

+
+
+

To do it, we will create a service to get all the data.

+
+
+

To create a service we need to use the next command

+
+
+

ng generate service services/product

+
+
+

In the service we are simulating a endpoint to get data.

+
+
+

We will have our products "hardcoded" and the methods to get or to set some values.

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Product } from '../models/product';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class ProductService {
+  status: string[] = ['OUTOFSTOCK', 'INSTOCK', 'LOWSTOCK'];
+
+  productNames: string[] = [
+    'Bamboo Watch',
+    'Black Watch',
+    'Blue Band',
+    'Blue T-Shirt',
+    'Bracelet',
+    'Brown Purse',
+    'Chakra Bracelet',
+    'Galaxy Earrings',
+    'Game Controller',
+    'Gaming Set',
+    'Gold Phone Case',
+    'Green Earbuds',
+    'Green T-Shirt',
+    'Grey T-Shirt',
+    'Headphones',
+    'Light Green T-Shirt',
+    'Lime Band',
+    'Mini Speakers',
+    'Painted Phone Case',
+    'Pink Band',
+    'Pink Purse',
+    'Purple Band',
+    'Purple Gemstone Necklace',
+    'Purple T-Shirt',
+    'Shoes',
+    'Sneakers',
+    'Teal T-Shirt',
+    'Yellow Earbuds',
+    'Yoga Mat',
+    'Yoga Set',
+  ];
+
+  constructor(private http: HttpClient) { }
+
+  getProductsSmall(): any {
+    return this.http.get<any>('assets/products-small.json')
+      .toPromise()
+      .then(res => res.data as Product[])
+      .then(data => data);
+  }
+
+  getProducts(): any {
+    return this.http.get<any>('assets/products.json')
+      .toPromise()
+      .then(res => res.data as Product[])
+      .then(data => data);
+  }
+
+  getProductsWithOrdersSmall(): any {
+    return this.http.get<any>('assets/products-orders-small.json')
+      .toPromise()
+      .then(res => res.data as Product[])
+      .then(data => data);
+  }
+
+  generatePrduct(): Product {
+    const product: Product = {
+      id: this.generateId(),
+      name: this.generateName(),
+      description: 'Product Description',
+      price: this.generatePrice(),
+      quantity: this.generateQuantity(),
+      category: 'Product Category',
+      inventoryStatus: this.generateStatus(),
+      rating: this.generateRating()
+    };
+
+    product.image = product.name.toLocaleLowerCase().split(/[ ,]+/).join('-') + '.jpg';
+    return product;
+  }
+
+  generateId(): string {
+    let text = '';
+    const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
+
+    for (let  i = 0; i < 5; i++) {
+      text += possible.charAt(Math.floor(Math.random() * possible.length));
+    }
+
+    return text;
+  }
+
+
+  generateName(): any {
+    return this.productNames[Math.floor(Math.random() * Math.floor(30))];
+  }
+
+  generatePrice(): any {
+    return Math.floor(Math.random() * Math.floor(299) + 1);
+  }
+
+  generateQuantity(): any {
+    return Math.floor(Math.random() * Math.floor(75) + 1);
+  }
+
+  generateStatus(): any {
+    return this.status[Math.floor(Math.random() * Math.floor(3))];
+  }
+
+  generateRating(): any {
+    return Math.floor(Math.random() * Math.floor(5) + 1);
+  }
+}
+
+
+
+
+Screenshot 14 +
+
Figure 17. Product Service
+
+
+

Also we create a interface for the Product, so all the products will have the same structure:

+
+
+
+
export interface Product {
+  id?: string;
+  code?: string;
+  name?: string;
+  description?: string;
+  price?: number;
+  quantity?: number;
+  inventoryStatus?: string;
+  category?: string;
+  image?: string;
+  rating?: number;
+}
+
+
+
+
+Screenshot 15 +
+
Figure 18. Product Interface
+
+
+

How we can see in the methods, we are getting the data from a hardcoded file product.json.

+
+
+
+
{
+	"data": [
+		{
+			"id": "1000",
+			"code": "f230fh0g3",
+			"name": "Bamboo Watch",
+			"description": "Product Description",
+			"image": "bamboo-watch.jpg",
+			"price": 65,
+			"category": "Accessories",
+			"quantity": 24,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1001",
+			"code": "nvklal433",
+			"name": "Black Watch",
+			"description": "Product Description",
+			"image": "black-watch.jpg",
+			"price": 72,
+			"category": "Accessories",
+			"quantity": 61,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1002",
+			"code": "zz21cz3c1",
+			"name": "Blue Band",
+			"description": "Product Description",
+			"image": "blue-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 2,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1003",
+			"code": "244wgerg2",
+			"name": "Blue T-Shirt",
+			"description": "Product Description",
+			"image": "blue-t-shirt.jpg",
+			"price": 29,
+			"category": "Clothing",
+			"quantity": 25,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1004",
+			"code": "h456wer53",
+			"name": "Bracelet",
+			"description": "Product Description",
+			"image": "bracelet.jpg",
+			"price": 15,
+			"category": "Accessories",
+			"quantity": 73,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1005",
+			"code": "av2231fwg",
+			"name": "Brown Purse",
+			"description": "Product Description",
+			"image": "brown-purse.jpg",
+			"price": 120,
+			"category": "Accessories",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1006",
+			"code": "bib36pfvm",
+			"name": "Chakra Bracelet",
+			"description": "Product Description",
+			"image": "chakra-bracelet.jpg",
+			"price": 32,
+			"category": "Accessories",
+			"quantity": 5,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1007",
+			"code": "mbvjkgip5",
+			"name": "Galaxy Earrings",
+			"description": "Product Description",
+			"image": "galaxy-earrings.jpg",
+			"price": 34,
+			"category": "Accessories",
+			"quantity": 23,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1008",
+			"code": "vbb124btr",
+			"name": "Game Controller",
+			"description": "Product Description",
+			"image": "game-controller.jpg",
+			"price": 99,
+			"category": "Electronics",
+			"quantity": 2,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1009",
+			"code": "cm230f032",
+			"name": "Gaming Set",
+			"description": "Product Description",
+			"image": "gaming-set.jpg",
+			"price": 299,
+			"category": "Electronics",
+			"quantity": 63,
+			"inventoryStatus": "INSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1010",
+			"code": "plb34234v",
+			"name": "Gold Phone Case",
+			"description": "Product Description",
+			"image": "gold-phone-case.jpg",
+			"price": 24,
+			"category": "Accessories",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1011",
+			"code": "4920nnc2d",
+			"name": "Green Earbuds",
+			"description": "Product Description",
+			"image": "green-earbuds.jpg",
+			"price": 89,
+			"category": "Electronics",
+			"quantity": 23,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1012",
+			"code": "250vm23cc",
+			"name": "Green T-Shirt",
+			"description": "Product Description",
+			"image": "green-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 74,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1013",
+			"code": "fldsmn31b",
+			"name": "Grey T-Shirt",
+			"description": "Product Description",
+			"image": "grey-t-shirt.jpg",
+			"price": 48,
+			"category": "Clothing",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1014",
+			"code": "waas1x2as",
+			"name": "Headphones",
+			"description": "Product Description",
+			"image": "headphones.jpg",
+			"price": 175,
+			"category": "Electronics",
+			"quantity": 8,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1015",
+			"code": "vb34btbg5",
+			"name": "Light Green T-Shirt",
+			"description": "Product Description",
+			"image": "light-green-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 34,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1016",
+			"code": "k8l6j58jl",
+			"name": "Lime Band",
+			"description": "Product Description",
+			"image": "lime-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 12,
+			"inventoryStatus": "INSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1017",
+			"code": "v435nn85n",
+			"name": "Mini Speakers",
+			"description": "Product Description",
+			"image": "mini-speakers.jpg",
+			"price": 85,
+			"category": "Clothing",
+			"quantity": 42,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1018",
+			"code": "09zx9c0zc",
+			"name": "Painted Phone Case",
+			"description": "Product Description",
+			"image": "painted-phone-case.jpg",
+			"price": 56,
+			"category": "Accessories",
+			"quantity": 41,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1019",
+			"code": "mnb5mb2m5",
+			"name": "Pink Band",
+			"description": "Product Description",
+			"image": "pink-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 63,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1020",
+			"code": "r23fwf2w3",
+			"name": "Pink Purse",
+			"description": "Product Description",
+			"image": "pink-purse.jpg",
+			"price": 110,
+			"category": "Accessories",
+			"quantity": 0,
+			"inventoryStatus": "OUTOFSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1021",
+			"code": "pxpzczo23",
+			"name": "Purple Band",
+			"description": "Product Description",
+			"image": "purple-band.jpg",
+			"price": 79,
+			"category": "Fitness",
+			"quantity": 6,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1022",
+			"code": "2c42cb5cb",
+			"name": "Purple Gemstone Necklace",
+			"description": "Product Description",
+			"image": "purple-gemstone-necklace.jpg",
+			"price": 45,
+			"category": "Accessories",
+			"quantity": 62,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1023",
+			"code": "5k43kkk23",
+			"name": "Purple T-Shirt",
+			"description": "Product Description",
+			"image": "purple-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 2,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1024",
+			"code": "lm2tny2k4",
+			"name": "Shoes",
+			"description": "Product Description",
+			"image": "shoes.jpg",
+			"price": 64,
+			"category": "Clothing",
+			"quantity": 0,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1025",
+			"code": "nbm5mv45n",
+			"name": "Sneakers",
+			"description": "Product Description",
+			"image": "sneakers.jpg",
+			"price": 78,
+			"category": "Clothing",
+			"quantity": 52,
+			"inventoryStatus": "INSTOCK",
+			"rating": 4
+		},
+		{
+			"id": "1026",
+			"code": "zx23zc42c",
+			"name": "Teal T-Shirt",
+			"description": "Product Description",
+			"image": "teal-t-shirt.jpg",
+			"price": 49,
+			"category": "Clothing",
+			"quantity": 3,
+			"inventoryStatus": "LOWSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1027",
+			"code": "acvx872gc",
+			"name": "Yellow Earbuds",
+			"description": "Product Description",
+			"image": "yellow-earbuds.jpg",
+			"price": 89,
+			"category": "Electronics",
+			"quantity": 35,
+			"inventoryStatus": "INSTOCK",
+			"rating": 3
+		},
+		{
+			"id": "1028",
+			"code": "tx125ck42",
+			"name": "Yoga Mat",
+			"description": "Product Description",
+			"image": "yoga-mat.jpg",
+			"price": 20,
+			"category": "Fitness",
+			"quantity": 15,
+			"inventoryStatus": "INSTOCK",
+			"rating": 5
+		},
+		{
+			"id": "1029",
+			"code": "gwuby345v",
+			"name": "Yoga Set",
+			"description": "Product Description",
+			"image": "yoga-set.jpg",
+			"price": 20,
+			"category": "Fitness",
+			"quantity": 25,
+			"inventoryStatus": "INSTOCK",
+			"rating": 8
+		}
+	]
+}
+
+
+
+
+Screenshot 16 +
+
Figure 19. Product Json Data
+
+
+

But in our component.ts we can see in ngOninit that we are getting the data when the component is ready. So when the component is rendered the data will be in the table.

+
+
+

The first lines of our table we can see a some attributes and events like value, rows, paginator, globalFilterFields, selection, rowHover, dataKey, currentPageReportTemplate, showCurrentPageReport.

+
+
+

We can see more details from those attributes and events here: https://primefaces.org/primeng/showcase/#/table

+
+
+

In the first section, we can see the <ng-template>, there is where we can search a value from the table.

+
+
+
+
<p-table #dt [value]="products" [rows]="10" [paginator]="true"
+    [globalFilterFields]="['name','country.name','representative.name','status']" [(selection)]="selectedProducts"
+    [rowHover]="true" dataKey="id" currentPageReportTemplate="Showing {first} to {last} of {totalRecords} entries"
+    [showCurrentPageReport]="true">
+    <ng-template pTemplate="caption">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        <h5 class="p-m-0">Manage Products</h5>
+        <span class="p-input-icon-left">
+          <i class="pi pi-search"></i>
+          <input pInputText type="text" (input)="dt.filterGlobal($event.target.value, 'contains')"
+            placeholder="Search..." />
+        </span>
+      </div>
+    </ng-template>
+
+
+
+
+Screenshot 17 +
+
Figure 20. Search on Table
+
+
+

The next <ng-template> is the header of the table. We’re we can see the name of each column.

+
+
+
+
    <ng-template pTemplate="header">
+      <tr>
+        <th style="width: 3rem">
+          <p-tableHeaderCheckbox></p-tableHeaderCheckbox>
+        </th>
+        <th pSortableColumn="name">Name <p-sortIcon field="name"></p-sortIcon>
+        </th>
+        <th pSortableColumn="price">Price <p-sortIcon field="price"></p-sortIcon>
+        </th>
+        <th pSortableColumn="category">Category <p-sortIcon field="category"></p-sortIcon>
+        </th>
+        <th pSortableColumn="rating">Reviews <p-sortIcon field="rating"></p-sortIcon>
+        </th>
+        <th pSortableColumn="inventoryStatus">Status <p-sortIcon field="inventoryStatus"></p-sortIcon>
+        </th>
+        <th></th>
+      </tr>
+    </ng-template>
+
+
+
+
+Screenshot 18 +
+
Figure 21. Table Headers
+
+
+

After done the header, we need to do the table body. Here is where we need to print each row values

+
+
+
+
    <ng-template pTemplate="body" let-product>
+      <tr>
+        <td>
+          <p-tableCheckbox [value]="product"></p-tableCheckbox>
+        </td>
+        <td>{{product.name}}</td>
+        <td>{{product.price | currency:'USD'}}</td>
+        <td>{{product.category}}</td>
+        <td>
+          <p-rating [ngModel]="product.rating" [readonly]="true" [cancel]="false"></p-rating>
+        </td>
+        <td><span
+            [class]="'product-badge status-' + product.inventoryStatus.toLowerCase()">{{product.inventoryStatus}}</span>
+        </td>
+        <td>
+          <button pButton pRipple icon="pi pi-pencil" class="p-button-rounded p-button-success p-mr-2"
+            (click)="editProduct(product)"></button>
+          <button pButton pRipple icon="pi pi-trash" class="p-button-rounded p-button-warning"
+            (click)="deleteProduct(product)"></button>
+        </td>
+      </tr>
+    </ng-template>
+
+
+
+
+Screenshot 19 +
+
Figure 22. Table Body
+
+
+

As we can see, we have some buttons with methods

+
+
+

The first method is to edit a specifict product (click)="editProduct(product)" and the second one is to delete it deleteProduct(product)

+
+
+
+
  editProduct(product: Product): any {
+    this.product = { ...product };
+    this.productDialog = true;
+  }
+
+  deleteProduct(product: Product): any {
+    this.confirmationService.confirm({
+      message: 'Are you sure you want to delete ' + product.name + '?',
+      header: 'Confirm',
+      icon: 'pi pi-exclamation-triangle',
+      accept: () => {
+        this.products = this.products.filter(val => val.id !==  product.id);
+        this.product = {};
+        this.messageService.add({ severity: 'success', summary: 'Successful', detail: 'Product Deleted', life: 3000 });
+      }
+    });
+  }
+
+
+
+
+Screenshot 20 +
+
Figure 23. Delete and Edit methods
+
+
+

The last part of the table, we will have a section to know how many products we have.

+
+
+
+Screenshot 21 +
+
Figure 24. Table footer
+
+
+

To do it just need to do another template and add the following code:

+
+
+
+
    <ng-template pTemplate="summary">
+      <div class="p-d-flex p-ai-center p-jc-between">
+        In total there are {{products ? products.length : 0 }} products.
+      </div>
+    </ng-template>
+
+
+
+
+Screenshot 22 +
+
Figure 25. Table footer code
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-effects.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-effects.html new file mode 100644 index 00000000..670eb2c0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-effects.html @@ -0,0 +1,448 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Side effects with NgRx/Effects

+
+
+

Reducers are pure functions, meaning they are side-effect free and deterministic. Many actions however have side effects like sending messages or displaying a toast notification. NgRx encapsulates these actions in effects.

+
+
+

Let’s build a recommended movies list so the user can add movies to their watchlist.

+
+
+
+
+

Obtaining the recommendation list from the server

+
+
+

Create a module for recommendations and add stores and states as in the previous chapter. Add EffectsModule.forRoot([]) to the imports in AppModule below StoreModule.forRoot(). Add effects to the feature module:

+
+
+
+
ng generate effect recommendation/Recommendation -m recommendation/recommendation.module.ts
+
+
+
+

We need actions for loading the movie list, success and failure cases:

+
+
+

recommendation/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+import { Movie } from 'src/app/watchlist/models/movies';
+
+export const loadRecommendedMovies = createAction('[Recommendation List] Load movies');
+export const loadRecommendedMoviesSuccess = createAction('[Recommendation API] Load movies success', props<{movies: Movie[]}>());
+export const loadRecommendedMoviesFailure = createAction('[Recommendation API] Load movies failure', props<{error: any}>());
+
+const actions = union({
+    loadRecommendedMovies,
+    loadRecommendedMoviesSuccess,
+    loadRecommendedMoviesFailure
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

In the reducer, we use a loading flag so the UI can show a loading spinner. The store is updated with arriving data.

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State {
+  items: Movie[];
+  loading: boolean;
+}
+
+export const initialState: State = {
+  items: [],
+  loading: false
+};
+
+export function reducer(state = initialState, action: recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case '[Recommendation List] Load movies':
+      return {
+        ...state,
+        items: [],
+        loading: true
+      };
+
+    case '[Recommendation API] Load movies failure':
+      return {
+        ...state,
+          loading: false
+      };
+
+    case '[Recommendation API] Load movies success':
+      return {
+        ...state,
+        items: action.movies,
+        loading: false
+      };
+
+    default:
+      return state;
+  }
+}
+
+export const getAll = (state: State) => state.items;
+export const isLoading = (state: State) => state.loading;
+
+
+
+

We need an API service to talk to the server. For demonstration purposes, we simulate an answer delayed by one second:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable({
+  providedIn: 'root'
+})
+export class RecommendationApiService {
+
+  private readonly recommendedMovies: Movie[] = [
+    {
+      id: 2,
+      title: 'The Hunger Games',
+      genre: 'sci-fi',
+      releaseYear: 2012,
+      runtimeMinutes: 144
+    },
+    {
+      id: 4,
+      title: 'Avengers: Endgame',
+      genre: 'fantasy',
+      releaseYear: 2019,
+      runtimeMinutes: 181
+    }
+  ];
+
+  loadRecommendedMovies(): Observable<Movie[]> {
+    return of(this.recommendedMovies).pipe(delay(1000));
+  }
+}
+
+
+
+

Here are the effects:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable()
+export class RecommendationEffects {
+
+  constructor(
+    private actions$: Actions,
+    private recommendationApi: RecommendationApiService,
+  ) { }
+
+  @Effect()
+  loadBooks$ = this.actions$.pipe(
+    ofType(recommendationActions.loadRecommendedMovies.type),
+    switchMap(() => this.recommendationApi.loadRecommendedMovies().pipe(
+      map(movies => recommendationActions.loadRecommendedMoviesSuccess({ movies })),
+      catchError(error => of(recommendationActions.loadRecommendedMoviesFailure({ error })))
+    ))
+  );
+}
+
+
+
+

Effects are always observables and return actions. In this example, we consume the actions observable provided by NgRx and listen only for the loadRecommendedMovies actions by using the ofType operator. Using switchMap, we map to a new observable, one that loads movies and maps the successful result to a new loadRecommendedMoviesSuccess action or a failure to loadRecommendedMoviesFailure. In a real application we would show a notification in the error case.

+
+
+
+
+

==

+
+
+

If an effect should not dispatch another action, return an empty observable. +== ==

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-entity.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-entity.html new file mode 100644 index 00000000..ca081c91 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-entity.html @@ -0,0 +1,376 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Simplifying CRUD with NgRx/Entity

+
+
+

Most of the time when manipulating entries in the store, we like to create, add, update, or delete entries (CRUD). NgRx/Entity provides convenience functions if each item of a collection has an id property. Luckily all our entities already have this property.

+
+
+

Let’s add functionality to add a movie to the watchlist. First, create the required action:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export const addToWatchlist = createAction('[Recommendation List] Add to watchlist',
+    props<{ watchlistItemId: number, movie: Movie, addedAt: Date }>());
+
+
+
+
+
+

==

+
+
+

You may wonder why the Date object is not created inside the reducer instead, since it should always be the current time. However, remember that reducers should be deterministic state machines — State A + Action B should always result in the same State C. This makes reducers easily testable. +== ==

+
+
+

Then, rewrite the watchlistData reducer to make use of NgRx/Entity:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State extends EntityState<WatchlistItem> { (1)
+}
+
+export const entityAdapter = createEntityAdapter<WatchlistItem>(); (2)
+
+export const initialState: State = entityAdapter.getInitialState(); (3)
+
+const entitySelectors = entityAdapter.getSelectors();
+
+export function reducer(state = initialState, action: playbackActions.ActionsUnion | recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      const itemToUpdate = entitySelectors
+      .selectAll(state) (4)
+      .find(item => item.movie.id == action.movieId);
+      if (itemToUpdate) {
+        return entityAdapter.updateOne({ (5)
+          id: itemToUpdate.id,
+          changes: { playbackMinutes: action.stoppedAtMinute } (6)
+        }, state);
+      } else {
+        return state;
+      }
+
+    case recommendationActions.addToWatchlist.type:
+      return entityAdapter.addOne({id: action.watchlistItemId, movie: action.movie, added: action.addedAt, playbackMinutes: 0}, state);
+
+    default:
+      return state;
+  }
+}
+
+
+export const getAllItems = entitySelectors.selectAll;
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1NgRx/Entity requires state to extend EntityState. It provides a list of ids and a dictionary of id ⇒ entity entries
2The entity adapter provides data manipulation operations and selectors
3The state can be initialized with getInitialState(), which accepts an optional object to define any additional state beyond EntityState
4selectAll returns an array of all entities
5All adapter operations consume the state object as the last argument and produce a new state
6Update methods accept a partial change definition; you don’t have to clone the object
+
+
+

This concludes the tutorial on NgRx. If you want to learn about advanced topics such as selectors with arguments, testing, or router state, head over to the official NgRx documentation.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-getting-started.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-getting-started.html new file mode 100644 index 00000000..a67154ec --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-getting-started.html @@ -0,0 +1,408 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Introduction to NgRx

+
+
+

NgRx is a state management framework for Angular based on the Redux pattern.

+
+
+
+
+

The need for client side state management

+
+
+

You may wonder why you should bother with state management. Usually data resides in a back-end storage system, e.g. a database, and is retrieved by the client on a per-need basis. To add, update, or delete entities from this store, clients have to invoke API endpoints at the back-end. Mimicking database-like transactions on the client side may seem redundant. However, there are many use cases for which a global client-side state is appropriate:

+
+
+
    +
  • +

    the client has some kind of global state which should survive the destruction of a component, but does not warrant server side persistence, for example: volume level of media, expansion status of menus

    +
  • +
  • +

    sever side data should not be retrieved every time it is needed, either because multiple components consume it, or because it should be cached, e.g. the personal watchlist in an online streaming app

    +
  • +
  • +

    the app provides a rich experience with offline functionality, e.g. a native app built with Ionic

    +
  • +
+
+
+

Saving global states inside the services they originates from results in a data flow that is hard to follow and state becoming inconsistent due to unordered state mutations. Following the single source of truth principle, there should be a central location holding all your application’s state, just like a server side database does. State management libraries for Angular provide tools for storing, retrieving, and updating client-side state.

+
+
+
+
+

Why NgRx?

+
+
+

As stated in the introduction, devon4ng does not stipulate a particular state library, or require using one at all. However, NgRx has proven to be a robust, mature solution for this task, with good tooling and 3rd-party library support. Albeit introducing a level of indirection that requires additional effort even for simple features, the Redux concept enforces a clear separation of concerns leading to a cleaner architecture.

+
+
+

Nonetheless, you should always compare different approaches to state management and pick the best one suiting your use case. Here’s a (non-exhaustive) list of competing state management libraries:

+
+
+
    +
  • +

    Plain RxJS using the simple store described in Abstract Class Store

    +
  • +
  • +

    NgXS reduces some boilerplate of NgRx by leveraging the power of decorators and moving side effects to the store

    +
  • +
  • +

    MobX follows a more imperative approach in contrast to the functional Redux pattern

    +
  • +
  • +

    Akita also uses an imperative approach with direct setters in the store, but keeps the concept of immutable state transitions

    +
  • +
+
+
+
+
+

Setup

+
+
+

To get a quick start, use the provided template for devon4ng + NgRx.

+
+
+

To manually install the core store package together with a set of useful extensions:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools --save`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools`
+
+
+
+

We recommend to add the NgRx schematics to your project so you can create code artifacts from the command line:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/schematics --save-dev`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/schematics --dev`
+
+
+
+

Afterwards, make NgRx your default schematics provider, so you don’t have to type the qualified package name every time:

+
+
+
+
`ng config cli.defaultCollection @ngrx/schematics`
+
+
+
+

If you have custom settings for Angular schematics, you have to configure them as described here.

+
+
+
+
+

Concept

+
+
+
+NgRx Architecture +
+
Figure 1. NgRx architecture overview
+
+
+

Figure 1 gives an overview of the NgRx data flow. The single source of truth is managed as an immutable state object by the store. Components dispatch actions to trigger state changes. Actions are handed over to reducers, which take the current state and action data to compute the next state. Actions are also consumed by-effects, which perform side-effects such as retrieving data from the back-end, and may dispatch new actions as a result. Components subscribe to state changes using selectors.

+
+
+

Continue with Creating a Simple Store.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-simple-store.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-simple-store.html new file mode 100644 index 00000000..646b5e67 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-ngrx-simple-store.html @@ -0,0 +1,771 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

State, Selection and Reducers

+
+ +
+
+
+

Creating a Simple Store

+
+
+

In the following pages we use the example of an online streaming service. We will model a particular feature, a watchlist that can be populated by the user with movies she or he wants to see in the future.

+
+
+
+
+

Initializing NgRx

+
+
+

If you’re starting fresh, you first have to initialize NgRx and create a root state. The fastest way to do this is using the schematic:

+
+
+
+
`ng generate @ngrx/schematics:store State --root --module app.module.ts`
+
+
+
+

This will automatically generate a root store and register it in the app module. Next we generate a feature module for the watchlist:

+
+
+

` ng generate module watchlist`

+
+
+

and create a corresponding feature store:

+
+
+

` ng generate store watchlist/Watchlist -m watchlist.module.ts`

+
+
+

This generates a file watchlist/reducers/index.ts with the reducer function, and registers the store in the watchlist module declaration.

+
+
+
+
+

== =

+
+
+

If you’re getting an error Schematic "store" not found in collection "@schematics/angular", this means you forgot to register the NgRx schematics as default. +== == =

+
+
+

Next, add the WatchlistModule to the AppModule imports so the feature store is registered when the application starts. We also added the store devtools which we will use later, resulting in the following file:

+
+
+

app.module.ts

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppComponent } from './app.component';
+import { EffectsModule } from '@ngrx/effects';
+import { AppEffects } from './app.effects';
+import { StoreModule } from '@ngrx/store';
+import { reducers, metaReducers } from './reducers';
+import { StoreDevtoolsModule } from '@ngrx/store-devtools';
+import { environment } from '../environments/environment';
+import { WatchlistModule } from './watchlist/watchlist.module';
+
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    WatchlistModule,
+    StoreModule.forRoot(reducers, { metaReducers }),
+    // Instrumentation must be imported after importing StoreModule (config is optional)
+    StoreDevtoolsModule.instrument({
+      maxAge: 25, // Retains last 25 states
+      logOnly: environment.production, // Restrict extension to log-only mode
+    }),
+    !environment.production ? StoreDevtoolsModule.instrument() : []
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+
+
+

Create an entity model and initial state

+
+
+

We need a simple model for our list of movies. Create a file watchlist/models/movies.ts and insert the following code:

+
+
+
+
export interface Movie {
+    id: number;
+    title: string;
+    releaseYear: number;
+    runtimeMinutes: number;
+    genre: Genre;
+}
+
+export type Genre = 'action' | 'fantasy' | 'sci-fi' | 'romantic' | 'comedy' | 'mystery';
+
+export interface WatchlistItem {
+    id: number;
+    movie: Movie;
+    added: Date;
+    playbackMinutes: number;
+}
+
+
+
+
+
+

== =

+
+
+

We discourage putting several types into the same file and do this only for the sake of keeping this tutorial brief. +== == =

+
+
+

Later we will learn how to retrieve data from the back-end using effects. For now we will create an initial state for the user with a default movie.

+
+
+

State is defined and transforms by a reducer function. Let’s create a watchlist reducer:

+
+
+
+
```
+cd watchlist/reducers
+ng g reducer WatchlistData --reducers index.ts
+```
+
+
+
+

Open the generated file watchlist-data.reducer.ts. You see three exports: The State interface defines the shape of the state. There is only one instance of a feature state in the store at all times. The initialState constant is the state at application creation time. The reducer function will later be called by the store to produce the next state instance based on the current state and an action object.

+
+
+

Let’s put a movie into the user’s watchlist:

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export interface State {
+  items: WatchlistItem[];
+}
+
+export const initialState: State = {
+  items: [
+    {
+      id: 42,
+      movie: {
+        id: 1,
+        title: 'Die Hard',
+        genre: 'action',
+        releaseYear: 1988,
+        runtimeMinutes: 132
+      },
+      playbackMinutes: 0,
+      added: new Date(),
+    }
+  ]
+};
+
+
+
+
+
+

Select the current watchlist

+
+
+

State slices can be retrieved from the store using selectors.

+
+
+

Create a watchlist component:

+
+
+
+
`ng g c watchlist/Watchlist`
+
+
+
+

and add it to the exports of WatchlistModule. Also, replace app.component.html with

+
+
+
+
<app-watchlist></app-watchlist>
+
+
+
+

State observables are obtained using selectors. They are memoized by default, meaning that you don’t have to worry about performance if you use complicated calculations when deriving state — these are only performed once per state emission.

+
+
+

Add a selector to watchlist-data.reducer.ts:

+
+
+
+
`export const getAllItems = (state: State) => state.items;`
+
+
+
+

Next, we have to re-export the selector for this sub-state in the feature reducer. Modify the watchlist/reducers/index.ts like this:

+
+
+

watchlist/reducers/index.ts

+
+
+
+
import {
+  ActionReducer,
+  ActionReducerMap,
+  createFeatureSelector,
+  createSelector,
+  MetaReducer
+} from '@ngrx/store';
+import { environment } from 'src/environments/environment';
+import * as fromWatchlistData from './watchlist-data.reducer';
+import * as fromRoot from 'src/app/reducers/index';
+
+export interface WatchlistState { (1)
+  watchlistData: fromWatchlistData.State;
+}
+
+export interface State extends fromRoot.State { (2)
+  watchlist: WatchlistState;
+}
+
+export const reducers: ActionReducerMap<WatchlistState> = { (3)
+  watchlistData: fromWatchlistData.reducer,
+};
+
+export const metaReducers: MetaReducer<WatchlistState>[] = !environment.production ? [] : [];
+
+export const getFeature = createFeatureSelector<State, WatchlistState>('watchlist'); (4)
+
+export const getWatchlistData = createSelector( (5)
+  getFeature,
+  state => state.watchlistData
+);
+
+export const getAllItems = createSelector( (6)
+  getWatchlistData,
+  fromWatchlistData.getAllItems
+);
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1The feature state, each member is managed by a different reducer
2Feature states are registered by the forFeature method. This interface provides a typesafe path from root to feature state.
3Tie sub-states of a feature state to the corresponding reducers
4Create a selector to access the 'watchlist' feature state
5select the watchlistData sub state
6re-export the selector
+
+
+

Note how createSelector allows to chain selectors. This is a powerful tool that also allows for selecting from multiple states.

+
+
+

You can use selectors as pipeable operators:

+
+
+

watchlist.component.ts

+
+
+
+
export class WatchlistComponent {
+  watchlistItems$: Observable<WatchlistItem[]>;
+
+  constructor(
+    private store: Store<fromWatchlist.State>
+  ) {
+    this.watchlistItems$ = this.store.pipe(select(fromWatchlist.getAllItems));
+  }
+}
+
+
+
+

watchlist.component.html

+
+
+
+
<h1>Watchlist</h1>
+<ul>
+    <li *ngFor="let item of watchlistItems$ | async">{{item.movie.title}} ({{item.movie.releaseYear}}): {{item.playbackMinutes}}/{{item.movie.runtimeMinutes}} min watched</li>
+</ul>
+
+
+
+
+
+

Dispatching an action to update watched minutes

+
+
+

We track the user’s current progress at watching a movie as the playbackMinutes property. After closing a video, the watched minutes have to be updated. In NgRx, state is being updated by dispatching actions. An action is an option with a (globally unique) type discriminator and an optional payload.

+
+
+
+
+

== Creating the action

+
+
+

Create a file playback/actions/index.ts. In this example, we do not further separate the actions per sub state. Actions can be defined by using action creators:

+
+
+

playback/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+
+export const playbackFinished = createAction('[Playback] Playback finished', props<{ movieId: number, stoppedAtMinute: number }>());
+
+const actions = union({
+    playbackFinished
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

First we specify the type, followed by a call to the payload definition function. Next, we create a union of all possible actions for this file using union, which allows us a to access action payloads in the reducer in a typesafe way.

+
+
+
+
+

== =

+
+
+

Action types should follow the naming convention [Source] Event, e.g. [Recommended List] Hide Recommendation or [Auth API] Login Success. Think of actions rather as events than commands. You should never use the same action at two different places (you can still handle multiple actions the same way). This facilitate tracing the source of an action. For details see Good Action Hygiene with NgRx by Mike Ryan (video). +== == =

+
+
+
+
+

== Dispatch

+
+
+

We skip the implementation of an actual video playback page and simulate watching a movie in 10 minute segments by adding a link in the template:

+
+
+

watchlist-component.html

+
+
+
+
<li *ngFor="let item of watchlistItems$ | async">... <button (click)="stoppedPlayback(item.movie.id, item.playbackMinutes + 10)">Add 10 Minutes</button></li>
+
+
+
+

watchlist-component.ts

+
+
+
+
import * as playbackActions from 'src/app/playback/actions';
+...
+  stoppedPlayback(movieId: number, stoppedAtMinute: number) {
+    this.store.dispatch(playbackActions.playbackFinished({ movieId, stoppedAtMinute }));
+  }
+
+
+
+
+
+

== State reduction

+
+
+

Next, we handle the action inside the watchlistData reducer. Note that actions can be handled by multiple reducers and effects at the same time to update different states, for example if we’d like to show a rating modal after playback has finished.

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export function reducer(state = initialState, action: playbackActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      return {
+        ...state,
+        items: state.items.map(updatePlaybackMinutesMapper(action.movieId, action.stoppedAtMinute))
+      };
+
+    default:
+      return state;
+  }
+}
+
+export function updatePlaybackMinutesMapper(movieId: number, stoppedAtMinute: number) {
+  return (item: WatchlistItem) => {
+    if (item.movie.id == movieId) {
+      return {
+        ...item,
+        playbackMinutes: stoppedAtMinute
+      };
+    } else {
+      return item;
+    }
+  };
+}
+
+
+
+

Note how we changed the reducer’s function signature to reference the actions union. The switch-case handles all incoming actions to produce the next state. The default case handles all actions a reducer is not interested in by returning the state unchanged. Then we find the watchlist item corresponding to the movie with the given id and update the playback minutes. Since state is immutable, we have to clone all objects down to the one we would like to change using the object spread operator (…​).

+
+
+
+
+

== =

+
+
+

Selectors rely on object identity to decide whether the value has to be recalculated. Do not clone objects that are not on the path to the change you want to make. This is why updatePlaybackMinutesMapper returns the same item if the movie id does not match. +== == =

+
+
+
+
+

== Alternative state mapping with Immer

+
+
+

It can be hard to think in immutable changes, especially if your team has a strong background in imperative programming. In this case, you may find the Immer library convenient, which allows to produce immutable objects by manipulating a proxied draft. The same reducer can then be written as:

+
+
+

watchlist-data.reducer.ts with Immer

+
+
+
+
import { produce } from 'immer';
+...
+case playbackActions.playbackFinished.type:
+      return produce(state, draft => {
+        const itemToUpdate = draft.items.find(item => item.movie.id == action.movieId);
+        if (itemToUpdate) {
+          itemToUpdate.playbackMinutes = action.stoppedAtMinute;
+        }
+      });
+
+
+
+

Immer works out of the box with plain objects and arrays.

+
+
+
+
+

== Redux devtools

+
+
+

If the StoreDevToolsModule is instrumented as described above, you can use the browser extension Redux devtools to see all dispatched actions and the resulting state diff, as well as the current state, and even travel back in time by undoing actions.

+
+
+
+Redux Devtools +
+
Figure 1. Redux devtools
+
+
+

Continue with learning about effects

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-npm-yarn-workflow.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-npm-yarn-workflow.html new file mode 100644 index 00000000..b0b9c003 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-npm-yarn-workflow.html @@ -0,0 +1,975 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Package Managers Workflow

+
+ +
+
+
+

Introduction

+
+
+

This document aims to provide you the necessary documentation and sources in order to help you understand the importance of dependencies between packages.

+
+
+

Projects in NodeJS make use of modules, chunks of reusable code made by other people or teams. These small chunks of reusable code are called packages [1]. Packages are used to solve specific problems or tasks. These relations between your project and the external packages are called dependencies.

+
+
+

For example, imagine we are doing a small program that takes your birthday as an input and tells you how many days are left until your birthday. We search in the repository if someone has published a package to retrieve the actual date and manage date types, and maybe we could search for another package to show a calendar, because we want to optimize our time, and we wish the user to click a calendar button and choose the day in the calendar instead of typing it.

+
+
+

As you can see, packages are convenient. In some cases, they may be even needed, as they can manage aspects of your program you may not be proficient in, or provide an easier use of them.

+
+
+

For more comprehensive information visit npm definition

+
+
+
+
+

Package.json

+
+
+

Dependencies in your project are stored in a file called package.json. Every package.json must contain, at least, the name and version of your project.

+
+
+

Package.json is located in the root of your project.

+
+
+ + + + + +
+ + +If package.json is not on your root directory refer to Problems you may encounter section +
+
+
+

If you wish to learn more information about package.json, click on the following links:

+
+ +
+
+
+

== Content of package.json

+
+
+

As you noticed, package.json is a really important file in your project. It contains essential information about our project, therefore you need to understand what’s inside.

+
+
+

The structure of package.json is divided in blocks, inside the first one you can find essential information of your project such as the name, version, license and optionally some [Scripts].

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e"
+  }
+
+
+
+

The next block is called dependencies and contains the packages that project needs in order to be developed, compiled and executed.

+
+
+
+
"private": true,
+  "dependencies": {
+    "@angular/animations": "^4.2.4",
+    "@angular/common": "^4.2.4",
+    "@angular/forms": "^4.2.4",
+    ...
+    "zone.js": "^0.8.14"
+  }
+
+
+
+

After dependencies we find devDependencies, another kind of dependencies present in the development of the application but unnecessary for its execution. One example is typescript. Code is written in typescript, and then, transpiled to JavaScript. This means the application is not using typescript in execution and consequently not included in the deployment of our application.

+
+
+
+
"devDependencies": {
+    "@angular/cli": "1.4.9",
+    "@angular/compiler-cli": "^4.2.4",
+    ...
+    "@types/node": "~6.0.60",
+    "typescript": "~2.3.3"
+  }
+
+
+
+

Having a peer dependency means that your package needs a dependency that is the same exact dependency as the person installing your package

+
+
+
+
"peerDependencies": {
+    "package-123": "^2.7.18"
+  }
+
+
+
+

Optional dependencies are just that: optional. If they fail to install, Yarn will still say the install process was successful.

+
+
+
+
"optionalDependencies": {
+    "package-321": "^2.7.18"
+  }
+
+
+
+

Finally you can have bundled dependencies which are packages bundled together when publishing your package in a repository.

+
+
+
+
{
+  "bundledDependencies": [
+    "package-4"
+  ]
+}
+
+
+
+

Here is the link to an in-depth explanation of dependency types​.

+
+
+
+
+

== Scripts

+
+
+

Scripts are a great way of automating tasks related to your package, such as simple build processes or development tools.

+
+
+

For example:

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "build-project": "node hello-world.js",
+  }
+
+
+
+

You can run that script by running the command yarn (run) script or npm run script, check the example below:

+
+
+
+
$ yarn (run) build-project    # run is optional
+$ npm run build-project
+
+
+
+

There are special reserved words for scripts, like pre-install, which will execute the script automatically +before the package you install are installed.

+
+
+

Check different uses for scripts in the following links:

+
+ +
+

Or you can go back to +[Content of package.json]​.

+
+
+
+
+

Managing dependencies

+
+
+

In order to manage dependencies we recommend using package managers in your projects.

+
+
+

A big reason is their usability. Adding or removing a package is really easy, and by doing so, packet manager update the package.json and copies (or removes) the package in the needed location, with a single command.

+
+
+

Another reason, closely related to the first one, is reducing human error by automating the package management process.

+
+
+

Two of the package managers you can use in NodeJS projects are "yarn" and "npm". While you can use both, we encourage you to use only one of them while working on projects. Using both may lead to different dependencies between members of the team.

+
+
+
+
+

== npm

+
+
+

We’ll start by installing npm following this small guide here.

+
+
+

As stated on the web, npm comes inside of NodeJS, and must be updated after installing NodeJS, in the same guide you used earlier are written the instructions to update npm.

+
+
+

How npm works

+
+
+

In order to explain how npm works, let’s take a command as an example:

+
+
+
+
$ npm install @angular/material @angular/cdk
+
+
+
+

This command tells npm to look for the packages @angular/material and @angular/cdk in the npm registry, download and decompress them in the folder node_modules along with their own dependencies. Additionally, npm will update package.json and create a new file called package-lock.json.

+
+
+

After initialization and installing the first package there will be a new folder called node_modules in your project. This folder is where your packages are unzipped and stored, following a tree scheme.

+
+
+

Take in consideration both npm and yarn need a package.json in the root of your project in order to work properly. If after creating your project don’t have it, download again the package.json from the repository or you’ll have to start again.

+
+
+

Brief overview of commands

+
+
+

If we need to create a package.json from scratch, we can use the command init. This command asks the user for basic information about the project and creates a brand new package.json.

+
+
+
+
$ npm init
+
+
+
+

Install (or i) installs all modules listed as dependencies in package.json locally. You can also specify a package, and install that package. Install can also be used with the parameter -g, which tells npm to install the [Global package].

+
+
+
+
$ npm install
+$ npm i
+$ npm install Package
+
+
+
+ + + + + +
+ + +Earlier versions of npm did not add dependencies to package.json unless it was used with the flag --save, so npm install package would be npm install --save package, you have one example below. +
+
+
+
+
$ npm install --save Package
+
+
+
+

Npm needs flags in order to know what kind of dependency you want in your project, in npm you need to put the flag -D or --save-dev to install devDependencies, for more information consult the links at the end of this section.

+
+
+
+
$ npm install -D package
+$ npm install --save-dev package
+
+
+
+

+
+
+

The next command uninstalls the module you specified in the command.

+
+
+
+
$ npm uninstall Package
+
+
+
+

ls command shows us the dependencies like a nested tree, useful if you have few packages, not so useful when you need a lot of packages.

+
+
+
+
$ npm ls
+
+
+
+
+
npm@@VERSION@ /path/to/npm
+└─┬ init-package-json@0.0.4
+  └── promzard@0.1.5
+
+
+
+
example tree
+

We recommend you to learn more about npm commands in the following link, navigating to the section CLI commands.

+
+
+

About Package-lock.json

+
+
+

Package-lock.json describes the dependency tree resulting of using package.json and npm. +Whenever you update, add or remove a package, package-lock.json is deleted and redone with +the new dependencies.

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

This lock file is checked every time the command npm i (or npm install) is used without specifying a package, +in the case it exists and it’s valid, npm will install the exact tree that was generated, such that subsequent +installs are able to generate identical dependency trees.

+
+
+ + + + + +
+ + +It is not recommended to modify this file yourself. It’s better to leave its management to npm. +
+
+
+

More information is provided by the npm team at package-lock.json

+
+
+
+
+

== Yarn

+
+
+

Yarn is an alternative to npm, if you wish to install yarn follow the guide getting started with yarn and download the correct version for your operative system. NodeJS is also needed you can find it here.

+
+
+

Working with yarn

+
+
+

Yarn is used like npm, with small differences in syntax, for example npm install module is changed to yarn add module.

+
+
+
+
$ yarn add @covalent
+
+
+
+

This command is going to download the required packages, modify package.json, put the package in the folder node_modules and makes a new yarn.lock with the new dependency.

+
+
+

However, unlike npm, yarn maintains a cache with packages you download inside. You don’t need to download every file every time you do a general installation. This means installations faster than npm.

+
+
+

Similarly to npm, yarn creates and maintains his own lock file, called yarn.lock. Yarn.lock gives enough information about the project for dependency tree to be reproduced.

+
+
+

yarn commands

+
+
+

Here we have a brief description of yarn’s most used commands:

+
+
+
+
$ yarn add Package
+$ yarn add --dev Package
+
+
+
+

Adds a package locally to use in your package. Adding the flags --dev or -D will add them to devDependencies instead of the default dependencies, if you need more information check the links at the end of the section.

+
+
+
+
$ yarn init
+
+
+
+

Initializes the development of a package.

+
+
+
+
$ yarn install
+
+
+
+

Installs all the dependencies defined in a package.json file, you can also write "yarn" to achieve the same effect.

+
+
+
+
$ yarn remove Package
+
+
+
+

You use it when you wish to remove a package from your project.

+
+
+
+
$ yarn global add Package
+
+
+
+

Installs the [Global package].

+
+
+

Please, refer to the documentation to learn more about yarn commands and their attributes: yarn commands

+
+
+

yarn.lock

+
+
+

This file has the same purpose as Package-lock.json, to guide the packet manager, in this case yarn, +to install the dependency tree specified in yarn.lock.

+
+
+

Yarn.lock and package.json are +essential files when collaborating in a project more co-workers and may be a +source of errors if programmers do not use the same manager.

+
+
+

Yarn.lock follows the same structure as package-lock.json, you can find an example of dependency below:

+
+
+
+
"@angular/animations@^4.2.4":
+  version "4.4.6"
+  resolved "https://registry.yarnpkg.com/@angular/animations/-/animations-4.4.6.tgz#fa661899a8a4e38cb7c583c7a5c97ce65d592a35"
+  dependencies:
+    tslib "^1.7.1"
+
+
+
+ + + + + +
+ + +As with package-lock.json, it’s strongly not advised to modify this file. Leave its management to yarn +
+
+
+

You can learn more about yarn.lock here: yarn.lock

+
+
+
+
+

== Global package

+
+
+

Global packages are packages installed in your operative system instead of your local project, +global packages useful for developer tooling that is not part of any individual project but instead is used for local commands.

+
+
+

A good example of global package is @angular/cli, a command line interface for angular used in our projects. You can install +a global package in npm with "npm install -g package" and "yarn global add package" with yarn, you have a npm example below:

+
+
+
Listing 1. npm global package
+
+
npm install –g @angular/cli
+
+
+ +
+
+
+

== Package version

+
+
+

Dependencies are critical to the success of a package. You must be extra careful about +which version packages are using, one package in a different version may break your code.

+
+
+

Versioning in npm and yarn, follows a semantic called semver, following the logic +MAJOR.MINOR.PATCH, like for example, @angular/animations: 4.4.6.

+
+
+

Different versions

+
+
+

Sometimes, packages are installed with a different version from the one initially installed. +This happens because package.json also contains the range of versions we allow yarn or npm to +install or update to, example:

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

And here the installed one:

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

As you can see, the version we initially added is 4.2.4, and the version finally installed after +a global installation of all packages, 4.4.6.

+
+
+

Installing packages without package-lock.json or yarn.lock using their respective packet managers, will always +end with npm or yarn installing the latest version allowed by package.json.

+
+
+

"@angular/animations": "^4.2.4" contains not only the version we added, but also the range we allow npm and yarn +to update. Here are some examples:

+
+
+
+
"@angular/animations": "<4.2.4"
+
+
+
+

The version installed must be lower than 4.2.4 .

+
+
+
+
"@angular/animations": ">=4.2.4"
+
+
+
+

The version installed must be greater than or equal to 4.2.4 .

+
+
+
+
"@angular/animations": "=4.2.4"
+
+
+
+

the version installed must be equal to 4.2.4 .

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

The version installed cannot modify the first non zero digit, for example in this case +it cannot surpass 5.0.0 or be lower than 4.2.4 .

+
+
+

You can learn more about this in Versions

+
+
+
+
+

Problems you may encounter

+
+
+

If you can’t find package.json, you may have deleted the one you had previously, +which means you have to download the package.json from the repository. +In the case you are creating a new project you can create a new package.json. More information +in the links below. Click on Package.json if you come from that section.

+
+ +
+ + + + + +
+ + +Using npm install or yarn without package.json in your projects will +result in compilation errors. As we mentioned earlier, +Package.json contains essential information about your project. +
+
+
+

If you have package.json, but you don’t have package-lock.json or yarn.lock the use of +command "npm install" or "yarn" may result in a different dependency tree.

+
+
+

If you are trying to import a module and visual code studio is not able to find it, +is usually caused by error adding the package to the project, try to add the module again with yarn or npm, +and restart Visual Studio Code.

+
+
+

Be careful with the semantic versioning inside your package.json of the packages, +or you may find a new update on one of your dependencies breaking your code.

+
+
+ + + + + +
+ + +In the following link +there is a solution to a problematic update to one package. +
+
+
+

A list of common errors of npm can be found in: npm errors

+
+
+
+
+

== Recomendations

+
+
+

Use yarn or npm in your project, reach an agreement with your team in order to choose one, this will avoid +undesired situations like forgetting to upload an updated yarn.lock or package-lock.json. +Be sure to have the latest version of your project when possible.

+
+
+ + + + + +
+ + +Pull your project every time it’s updated. Erase your node_modules folder and reinstall all +dependencies. This assures you to be working with the same dependencies your team has. +
+
+
+

AD Center recommends the use of yarn.

+
+
+
+
+
+
+1. A package is a file or directory that is described by a package.json. . +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-package-managers.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-package-managers.html new file mode 100644 index 00000000..41ef6181 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-package-managers.html @@ -0,0 +1,502 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Package Managers

+
+
+

There are two major package managers currently used for JavaScript / TypeScript projects which leverage NodeJS as a build platform.

+
+
+
    +
  1. +

    npm

    +
  2. +
  3. +

    yarn

    +
  4. +
+
+
+

Our recommendation is to use yarn but both package managers are fine.

+
+
+ + + + + +
+ + +When using npm it is important to use a version greater 5.0 as npm 3 has major drawbacks compared to yarn. +The following guide assumes that you are using npm >= 5 or yarn. +
+
+
+

Before you start reading further, please take a look at the docs:

+
+ +
+

The following guide will describe best practices for working with yarn / npm.

+
+
+
+
+

Semantic Versioning

+
+
+

When working with package managers it is very important to understand the concept of semantic versioning.

+
+
+
Version example 1.2.3
+

|== == == = +|Version |1. |2. |3 +|Version name when incrementing |Major (2.0.0) |Minor (1.3.0) |Patch (1.2.4) +|Has breaking changes |yes |no |no +|Has features |yes |yes |no +|Has bug fixes |yes |yes |yes +|== == == =

+
+
+

The table gives an overview of the most important parts of semantic versioning. +In the header version 1.2.3 is displayed. +The first row shows the name and the resulting version when incrementing a part of the version. +The next rows show specifics of the resulting version - e.g. a major version can have breaking changes, features and bug fixes.

+
+
+

Packages from npm and yarn leverage semantic versioning and instead of selecting a fixed version one can specify a selector. +The most common selectors are:

+
+
+
    +
  • +

    ^1.2.3 +At least 1.2.3 - 1.2.4 or 1.3.0 can be used, 2.0.0 can not be used

    +
  • +
  • +

    ~1.2.3 +At lease 1.2.3 - 1.2.4 can be used, 2.0.0 and 1.3.0 can not be used

    +
  • +
  • +

    >=1.2.3 +At least 1.2.3 - every version greater can also be used

    +
  • +
+
+
+

This achieves a lower number of duplicates. +To give an example:

+
+
+

If package A needs version 1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 4 packages.

+
+
+

If package A needs version ^1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 3 packages. +A would use the same version of C as B - 1.4.0.

+
+
+
+
+

Do not modify package.json and lock files by hand

+
+
+

Dependencies are always added using a yarn or npm command. +Altering the package.json, package-json.lock or yarn.lock file by hand is not recommended.

+
+
+

Always use a yarn or npm command to add a new dependency.

+
+
+

Adding the package express with yarn to dependencies.

+
+
+
+
yarn add express
+
+
+
+

Adding the package express with npm to dependencies.

+
+
+
+
npm install express
+
+
+
+
+
+

What does the lock file do

+
+
+

The purpose of files yarn.lock and package-json.lock is to freeze versions for a short time.

+
+
+

The following problem is solved:

+
+
+
    +
  • +

    Developer A upgrades the dependency express to fixed version 4.16.3.

    +
  • +
  • +

    express has sub-dependency accepts with version selector ~1.3.5

    +
  • +
  • +

    His local node_modules folder receives accepts in version 1.3.5

    +
  • +
  • +

    On his machine everything is working fine

    +
  • +
  • +

    Afterward version 1.3.6 of accepts is published - it contains a major bug

    +
  • +
  • +

    Developer B now clones the repo and loads the dependencies.

    +
  • +
  • +

    He receives version 1.3.6 of accepts and blames developer A for upgrading to a broken version.

    +
  • +
+
+
+

Both yarn.lock and package-json.lock freeze all the dependencies. +For example in yarn lock you will find.

+
+
+
Listing 1. yarn.lock example (excerp)
+
+
accepts@~1.3.5:
+  version "1.3.5"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-types "~2.1.18"
+    negotiator "0.6.1"
+
+mime-db@~1.33.0:
+  version "1.33.0"
+  resolved "[...URL to registry]"
+
+mime-types@~2.1.18:
+  version "2.1.18"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-db "~1.33.0"
+
+negotiator@0.6.1:
+  version "0.6.1"
+  resolved "[...URL to registry]"
+
+
+
+

The described problem is solved by the example yarn.lock file.

+
+
+
    +
  • +

    accepts is frozen at version ~1.3.5

    +
  • +
  • +

    All of its sub-dependencies are also frozen. +It needs mime-types at version ~2.1.18 which is frozen at 2.1.18. +mime-types needs mime-db at ~1.33.0 which is frozen at 1.33.0

    +
  • +
+
+
+

Every developer will receive the same versions of every dependency.

+
+
+ + + + + +
+ + +You have to make sure all your developers are using the same npm/yarn version - this includes the CI build. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-routing.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-routing.html new file mode 100644 index 00000000..125cbdd7 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-routing.html @@ -0,0 +1,666 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Routing

+
+
+

A basic introduction to the Angular Router can be found in Angular Docs.

+
+
+

This guide will show common tasks and best practices.

+
+
+
+
+

Defining Routes

+
+
+

For each feature module and the app module all routes should be defined in a separate module with the suffix RoutingModule. +This way the routing modules are the only place where routes are defined. +This pattern achieves a clear separation of concerns. +The following figure illustrates this.

+
+
+
+Routing module declaration +
+
Figure 1. Routing module declaration
+
+
+

It is important to define routes inside app routing module with .forRoot() and in feature routing modules with .forChild().

+
+
+
+
+

Example 1 - No Lazy Loading

+
+
+

In this example two modules need to be configured with routes - AppModule and FlightModule.

+
+
+

The following routes will be configured

+
+
+
    +
  • +

    / will redirect to /search

    +
  • +
  • +

    /search displays FlightSearchComponent (FlightModule)

    +
  • +
  • +

    /search/print/:flightId/:date displays FlightPrintComponent (FlightModule)

    +
  • +
  • +

    /search/details/:flightId/:date displays FlightDetailsComponent (FlightModule)

    +
  • +
  • +

    All other routes will display ErrorPage404 (AppModule)

    +
  • +
+
+
+
Listing 1. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '', redirectTo: 'search', pathMatch: 'full' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 2. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: 'search', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+ + + + + +
+ + +The import order inside AppModule is important. +AppRoutingModule needs to be imported after FlightModule. +
+
+
+
+
+

Example 2 - Lazy Loading

+
+
+

Lazy Loading is a good practice when the application has multiple feature areas and a user might not visit every dialog. +Or at least he might not need every dialog up front.

+
+
+

The following example will configure the same routes as example 1 but will lazy load FlightModule.

+
+
+
Listing 3. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '/search', loadChildren: 'app/flight-search/flight-search.module#FlightSearchModule' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 4. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+
+
+

Triggering Route Changes

+
+
+

With Angular you have two ways of triggering route changes.

+
+
+
    +
  1. +

    Declarative with bindings in component HTML templates

    +
  2. +
  3. +

    Programmatic with Angular Router service inside component classes

    +
  4. +
+
+
+

On the one hand, architecture-wise it is a much cleaner solution to trigger route changes in Smart Components. +This way you have every UI event that should trigger a navigation handled in one place - in a Smart Component. +It becomes very easy to look inside the code for every navigation, that can occur. +Refactoring is also much easier, as there are no navigation events "hidden" in the HTML templates

+
+
+

On the other hand, in terms of accessibility and SEO it is a better solution to rely on bindings in the view - e.g. by using Angular router-link directive. +This way screen readers and the Google crawler can move through the page easily.

+
+
+ + + + + +
+ + +If you do not have to support accessibility (screen readers, etc.) and to care about SEO (Google rank, etc.), +then you should aim for triggering navigation only in Smart Components. +
+
+
+
+Triggering navigation +
+
Figure 2. Triggering navigation
+
+
+
+
+

Guards

+
+
+

Guards are Angular services implemented on routes which determines whether a user can navigate to/from the route. There are examples below which will explain things better. We have the following types of Guards:

+
+
+
    +
  • +

    CanActivate: It is used to determine whether a user can visit a route. The most common scenario for this guard is to check if the user is authenticated. For example, if we want only logged in users to be able to go to a particular route, we will implement the CanActivate guard on this route.

    +
  • +
  • +

    CanActivateChild: Same as above, only implemented on child routes.

    +
  • +
  • +

    CanDeactivate: It is used to determine if a user can navigate away from a route. Most common example is when a user tries to go to a different page after filling up a form and does not save/submit the changes, we can use this guard to confirm whether the user really wants to leave the page without saving/submitting.

    +
  • +
  • +

    Resolve: For resolving dynamic data.

    +
  • +
  • +

    CanLoad: It is used to determine whether an Angular module can be loaded lazily. Example below will be helpful to understand it.

    +
  • +
+
+
+

Let’s have a look at some examples.

+
+
+
+
+

Example 1 - CanActivate and CanActivateChild guards

+
+ +
+
+
+

== CanActivate guard

+
+
+

As mentioned earlier, a guard is an Angular service and services are simply TypeScript classes. So we begin by creating a class. This class has to implement the CanActivate interface (imported from angular/router), and therefore, must have a canActivate function. The logic of this function determines whether the requested route can be navigated to or not. It returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. If it is true, the route is loaded, else not.

+
+
+
Listing 5. CanActivate example
+
+
...
+import {CanActivate} from "@angular/router";
+
+@Injectable()
+class ExampleAuthGuard implements CanActivate {
+  constructor(private authService: AuthService) {}
+
+  canActivate(route: ActivatedRouterSnapshot, state: RouterStateSnapshot) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

In the above example, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. We use it to return true or false from the canActivate function. +The canActivate function accepts two parameters (provided by Angular). The first parameter of type ActivatedRouterSnapshot is the snapshot of the route the user is trying to navigate to (where the guard is implemented); we can extract the route parameters from this instance. The second parameter of type RouterStateSnapshot is a snapshot of the router state the user is trying to navigate to; we can fetch the URL from it’s url property.

+
+
+ + + + + +
+ + +We can also redirect the user to another page (maybe a login page) if the authService returns false. To do that, inject Router and use it’s navigate function to redirect to the appropriate page. +
+
+
+

Since it is a service, it needs to be provided in our module:

+
+
+
Listing 6. provide the guard in a module
+
+
@NgModule({
+  ...
+  providers: [
+    ...
+    ExampleAuthGuard
+  ]
+})
+
+
+
+

Now this guard is ready to use on our routes. We implement it where we define our array of routes in the application:

+
+
+
Listing 7. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivate: [ExampleAuthGuard] }
+];
+
+
+
+

As you can see, the canActivate property accepts an array of guards. So we can implement more than one guard on a route.

+
+
+
+
+

== CanActivateChild guard

+
+
+

To use the guard on nested (children) routes, we add it to the canActivateChild property like so:

+
+
+
Listing 8. Implementing the guard on child routes
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivateChild: [ExampleAuthGuard], children: [
+	{path: 'sub-page1', component: SubPageComponent},
+    {path: 'sub-page2', component: SubPageComponent}
+  ] }
+];
+
+
+
+
+
+

Example 2 - CanLoad guard

+
+
+

Similar to CanActivate, to use this guard we implement the CanLoad interface and overwrite it’s canLoad function. Again, this function returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. The fundamental difference between CanActivate and CanLoad is that CanLoad is used to determine whether an entire module can be lazily loaded or not. If the guard returns false for a module protected by CanLoad, the entire module is not loaded.

+
+
+
Listing 9. CanLoad example
+
+
...
+import {CanLoad, Route} from "@angular/router";
+
+@Injectable()
+class ExampleCanLoadGuard implements CanLoad {
+  constructor(private authService: AuthService) {}
+
+  canLoad(route: Route) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

Again, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. The canLoad function accepts a parameter of type Route which we can use to fetch the path a user is trying to navigate to (using the path property of Route).

+
+
+

This guard needs to be provided in our module like any other service.

+
+
+

To implement the guard, we use the canLoad property:

+
+
+
Listing 10. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: 'home', component: HomeComponent },
+  { path: 'admin', loadChildren: 'app/admin/admin.module#AdminModule', canLoad: [ExampleCanLoadGuard] }
+];
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-testing.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-testing.html new file mode 100644 index 00000000..ebed7473 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-testing.html @@ -0,0 +1,719 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Testing

+
+
+

This guide will cover the basics of testing logic inside your code with unit test cases. +The guide assumes that you are familiar with Angular CLI (see the guide)

+
+
+

For testing your Angular application with unit test cases there are two main strategies:

+
+
+
    +
  1. +

    Isolated unit test cases
    +Isolated unit tests examine an instance of a class all by itself without any dependence on Angular or any injected values. +The amount of code and effort needed to create such tests in minimal.

    +
  2. +
  3. +

    Angular Testing Utilities
    +Let you test components including their interaction with Angular. +The amount of code and effort needed to create such tests is a little higher.

    +
  4. +
+
+
+
+
+

Testing Concept

+
+
+

The following figure shows you an overview of the application architecture divided in testing areas.

+
+
+
+Testing Areas +
+
Figure 1. Testing Areas
+
+
+

There are three areas, which need to be covered by different testing strategies.

+
+
+
    +
  1. +

    Components:
    +Smart Components need to be tested because they contain view logic. +Also the interaction with 3rd party components needs to be tested. +When a 3rd party component changes with an upgrade a test will be failing and warn you, that there is something wrong with the new version. +Most of the time Dumb Components do not need to be tested because they mainly display data and do not contain any logic. +Smart Components are always tested with Angular Testing Utilities. +For example selectors, which select data from the store and transform it further, need to be tested.

    +
  2. +
  3. +

    Stores:
    +A store contains methods representing state transitions. +If these methods contain logic, they need to be tested. +Stores are always tested using Isolated unit tests.

    +
  4. +
  5. +

    Services:
    +Services contain Business Logic, which needs to be tested. +UseCase Services represent a whole business use case. +For instance this could be initializing a store with all the data that is needed for a dialog - loading, transforming, storing. +Often Angular Testing Utilities are the optimal solution for testing UseCase Services, because they allow for an easy stubbing of the back-end. +All other services should be tested with Isolated unit tests as they are much easier to write and maintain.

    +
  6. +
+
+
+
+
+

Testing Smart Components

+
+
+

Testing Smart Components should assure the following.

+
+
+
    +
  1. +

    Bindings are correct.

    +
  2. +
  3. +

    Selectors which load data from the store are correct.

    +
  4. +
  5. +

    Asynchronous behavior is correct (loading state, error state, "normal" state).

    +
  6. +
  7. +

    Oftentimes through testing one realizes, that important edge cases are forgotten.

    +
  8. +
  9. +

    Do these test become very complex, it is often an indicator for poor code quality in the component. +Then the implementation is to be adjusted / refactored.

    +
  10. +
  11. +

    When testing values received from the native DOM, you will test also that 3rd party libraries did not change with a version upgrade. +A failing test will show you what part of a 3rd party library has changed. +This is much better than the users doing this for you. +For example a binding might fail because the property name was changed with a newer version of a 3rd party library.

    +
  12. +
+
+
+

In the function beforeEach() the TestBed imported from Angular Testing Utilities needs to be initialized. +The goal should be to define a minimal test-module with TestBed. +The following code gives you an example.

+
+
+
Listing 1. Example test setup for Smart Components
+
+
describe('PrintFlightComponent', () => {
+
+  let fixture: ComponentFixture<PrintCPrintFlightComponentomponent>;
+  let store: FlightStore;
+  let printServiceSpy: jasmine.SpyObj<FlightPrintService>;
+
+  beforeEach(() => {
+    const urlParam = '1337';
+    const activatedRouteStub = { params: of({ id: urlParam }) };
+    printServiceSpy = jasmine.createSpyObj('FlightPrintService', ['initializePrintDialog']);
+    TestBed.configureTestingModule({
+      imports: [
+        TranslateModule.forRoot(),
+        RouterTestingModule
+      ],
+      declarations: [
+        PrintFlightComponent,
+        PrintContentComponent,
+        GeneralInformationPrintPanelComponent,
+        PassengersPrintPanelComponent
+      ],
+      providers: [
+        FlightStore,
+        {provide: FlightPrintService, useValue: printServiceSpy},
+        {provide: ActivatedRoute, useValue: activatedRouteStub}
+      ]
+    });
+    fixture = TestBed.createComponent(PrintFlightComponent);
+    store = fixture.debugElement.injector.get(FlightStore);
+    fixture.detectChanges();
+  });
+
+  // ... test cases
+})
+
+
+
+

It is important:

+
+
+
    +
  • +

    Use RouterTestingModule instead of RouterModule

    +
  • +
  • +

    Use TranslateModule.forRoot() without translations +This way you can test language-neutral without translation marks.

    +
  • +
  • +

    Do not add a whole module from your application - in declarations add the tested Smart Component with all its Dumb Components

    +
  • +
  • +

    The store should never be stubbed. +If you need a complex test setup, just use the regular methods defined on the store.

    +
  • +
  • +

    Stub all services used by the Smart Component. +These are mostly UseCase services. +They should not be tested by these tests. +Only the correct call to their functions should be assured. +The logic inside the UseCase services is tested with separate tests.

    +
  • +
  • +

    detectChanges() performance an Angular Change Detection cycle (Angular refreshes all the bindings present in the view)

    +
  • +
  • +

    tick() performance a virtual macro task, tick(1000) is equal to the virtual passing of 1s.

    +
  • +
+
+
+

The following test cases show the testing strategy in action.

+
+
+
Listing 2. Example
+
+
it('calls initializePrintDialog for url parameter 1337', fakeAsync(() => {
+  expect(printServiceSpy.initializePrintDialog).toHaveBeenCalledWith(1337);
+}));
+
+it('creates correct loading subtitle', fakeAsync(() => {
+  store.setPrintStateLoading(123);
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT STATE.IS_LOADING');
+}));
+
+it('creates correct subtitle for loaded flight', fakeAsync(() => {
+  store.setPrintStateLoadedSuccess({
+    id: 123,
+    description: 'Description',
+    iata: 'FRA',
+    name: 'Frankfurt',
+    // ...
+  });
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT "FRA (Frankfurt)" (ID: 123)');
+}));
+
+
+
+

The examples show the basic testing method

+
+
+
    +
  • +

    Set the store to a well-defined state

    +
  • +
  • +

    check if the component displays the correct values

    +
  • +
  • +

    …​ via checking values inside the native DOM.

    +
  • +
+
+
+
+
+

Testing state transitions performed by stores

+
+
+

Stores are always tested with Isolated unit tests.

+
+
+

Actions triggered by dispatchAction() calls are asynchronously performed to alter the state. +A good solution to test such a state transition is to use the done callback from Jasmine.

+
+
+
Listing 3. Example for testing a store
+
+
let sut: FlightStore;
+
+beforeEach(() => {
+  sut = new FlightStore();
+});
+
+it('setPrintStateLoading sets print state to loading', (done: Function) => {
+  sut.setPrintStateLoading(4711);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.print.isLoading).toBe(true);
+    expect(result.print.loadingId).toBe(4711);
+    done();
+  });
+});
+
+it('toggleRowChecked adds flight with given id to selectedValues Property', (done: Function) => {
+  const flight: FlightTO = {
+    id: 12
+    // dummy data
+  };
+  sut.setRegisterabgleichListe([flight]);
+  sut.toggleRowChecked(12);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.selectedValues).toContain(flight);
+    done();
+  });
+});
+
+
+
+
+
+

Testing services

+
+
+

When testing services both strategies - Isolated unit tests and Angular Testing Utilities - are valid options.

+
+
+

The goal of such tests are

+
+
+
    +
  • +

    assuring the behavior for valid data.

    +
  • +
  • +

    assuring the behavior for invalid data.

    +
  • +
  • +

    documenting functionality

    +
  • +
  • +

    save performing refactoring

    +
  • +
  • +

    thinking about edge case behavior while testing

    +
  • +
+
+
+

For simple services Isolated unit tests can be written. +Writing these tests takes lesser effort and they can be written very fast.

+
+
+

The following listing gives an example of such tests.

+
+
+
Listing 4. Testing a simple services with Isolated unit tests
+
+
let sut: IsyDatePipe;
+
+beforeEach(() => {
+  sut = new IsyDatePipe();
+});
+
+it('transform should return empty string if input value is empty', () => {
+  expect(sut.transform('')).toBe('');
+});
+
+it('transform should return empty string if input value is null', () => {
+  expect(sut.transform(undefined)).toBe('');
+});
+
+// ...more tests
+
+
+
+

For testing Use Case services the Angular Testing Utilities should be used. +The following listing gives an example.

+
+
+
Listing 5. Test setup for testing use case services with Angular Testing Utilities
+
+
let sut: FlightPrintService;
+let store: FlightStore;
+let httpController: HttpTestingController;
+let flightCalculationServiceStub: jasmine.SpyObj<FlightCalculationService>;
+const flight: FlightTo = {
+  // ... valid dummy data
+};
+
+beforeEach(() => {
+  flightCalculationServiceStub = jasmine.createSpyObj('FlightCalculationService', ['getFlightType']);
+  flightCalculationServiceStub.getFlightType.and.callFake((catalog: string, type: string, key: string) => of(`${key}_long`));
+  TestBed.configureTestingModule({
+    imports: [
+      HttpClientTestingModule,
+      RouterTestingModule,
+    ],
+    providers: [
+      FlightPrintService,
+      FlightStore,
+      FlightAdapter,
+      {provide: FlightCalculationService, useValue: flightCalculationServiceStub}
+    ]
+  });
+
+  sut = TestBed.get(FlightPrintService);
+  store = TestBed.get(FlightStore);
+  httpController = TestBed.get(HttpTestingController);
+});
+
+
+
+

When using TestBed, it is important

+
+
+
    +
  • +

    to import HttpClientTestingModule for stubbing the back-end

    +
  • +
  • +

    to import RouterTestingModule for stubbing the Angular router

    +
  • +
  • +

    not to stub stores, adapters and business services

    +
  • +
  • +

    to stub services from libraries like FlightCalculationService - the correct implementation of libraries should not be tested by these tests.

    +
  • +
+
+
+

Testing back-end communication looks like this:

+
+
+
Listing 6. Testing back-end communication with Angular HttpTestingController
+
+
it('loads flight if not present in store', fakeAsync(() => {
+  sut.initializePrintDialog(1337);
+  const processRequest = httpController.expectOne('/path/to/flight');
+  processRequest.flush(flight);
+
+  httpController.verify();
+}));
+
+it('does not load flight if present in store', fakeAsync(() => {
+  const flight = {...flight, id: 4711};
+  store.setRegisterabgleich(flight);
+
+  sut.initializePrintDialog(4711);
+  httpController.expectNone('/path/to/flight');
+
+  httpController.verify();
+}));
+
+
+
+

The first test assures a correct XHR request is performed if initializePrintDialog() is called and no data is in the store. +The second test assures no XHR request IST performed if the needed data is already in the store.

+
+
+

The next steps are checks for the correct implementation of logic.

+
+
+
Listing 7. Example testing a Use Case service
+
+
it('creates flight destination for valid key in svz', fakeAsync(() => {
+  const flightTo: FlightTo = {
+    ...flight,
+    id: 4712,
+    profile: '77'
+  };
+  store.setFlight(flightTo);
+  let result: FlightPrintContent|undefined;
+
+  sut.initializePrintDialog(4712);
+  store.select(s => s.print.content).subscribe(content => result = content);
+  tick();
+
+  expect(result!.destination).toBe('77_long (ID: 77)');
+}));
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-update-angular-cli.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-update-angular-cli.html new file mode 100644 index 00000000..beb15d9b --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-update-angular-cli.html @@ -0,0 +1,346 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Update Angular CLI

+
+ +
+
+
+

Angular CLI common issues

+
+
+

There are constant updates for the official Angular framework dependencies. These dependencies are directly related with the Angular CLI package. Since this package comes installed by default inside the devonfw distribution folder for Windows OS and the distribution is updated every few months it needs to be updated in order to avoid known issues.

+
+
+
+
+

Angular CLI update guide

+
+
+

For Linux users is as easy as updating the global package:

+
+
+
+
$ npm unistall -g @angular/cli
+$ npm install -g @angular/cli
+
+
+
+

For Windows users the process is only a bit harder. Open the devonfw bundled console and do as follows:

+
+
+
+
$ cd [devonfw_dist_folder]
+$ cd software/nodejs
+$ npm uninstall @angular/cli --no-save
+$ npm install @angular/cli --no-save
+
+
+
+

After following these steps you should have the latest Angular CLI version installed in your system. In order to check it run in the distribution console:

+
+
+ + + + + +
+ + +At the time of this writing, the Angular CLI is at 1.7.4 version. +
+
+
+
+
λ ng version
+
+     _                      _                 ____ _     ___
+    / \   _ __   __ _ _   _| | __ _ _ __     / ___| |   |_ _|
+   / △ \ | '_ \ / _` | | | | |/ _` | '__|   | |   | |    | |
+  / ___ \| | | | (_| | |_| | | (_| | |      | |___| |___ | |
+ /_/   \_\_| |_|\__, |\__,_|_|\__,_|_|       \____|_____|___|
+                |___/
+
+
+Angular CLI: 7.2.3
+Node: 10.13.0
+OS: win32 x64
+Angular:
+...
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-upgrade-devon4ng.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-upgrade-devon4ng.html new file mode 100644 index 00000000..35d3e682 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-upgrade-devon4ng.html @@ -0,0 +1,441 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Upgrade devon4ng Angular and Ionic/Angular applications

+
+
+

Angular CLI provides a powerful tool to upgrade Angular based applications to the current stable release of the core framework.

+
+
+

This tool is ng update. It will not only upgrade dependencies and their related ones but also will perform some fixes in your code if available thanks to the provided schematics. It will check even if the update is not possible as there is another library or libraries that are not compatible with the versions of the upgraded dependencies. In this case it will keep your application untouched.

+
+
+ + + + + +
+ + +The repository must be in a clean state before executing a ng update. So, remember to commit your changes first. +
+
+
+
+
+

Basic usage

+
+
+

In order to perform a basic upgrade we will execute:

+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
+
+

Upgrade to new Angular version

+
+
+

The process will be the same, but first we need to make sure that our devon4ng application is in the lates version of Angular 8, so the ng update command can perform the upgrade not only in the dependencies but also making code changes to reflect the new features and fixes.

+
+
+
    +
  • +

    First, upgrade to latest Angular 9 version:

    +
  • +
+
+
+
+
$ ng update @angular/cli@9 @angular/core@9
+
+
+
+

Optionally the flag -C can be added to previous command to make a commit automatically. This is also valid for the next steps.

+
+
+
    +
  • +

    Then, upgrade Angular:

    +
  • +
+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
    +
  • +

    In case you use Angular Material:

    +
  • +
+
+
+
+
$ ng update @angular/material
+
+
+
+
    +
  • +

    If the application depends on third party libraries, the new tool ngcc can be run to make them compatible with the new Ivy compiler. In this case it is recommended to include a postinstall script in the package.json:

    +
  • +
+
+
+
+
{
+  "scripts": {
+    "postinstall": "ngcc --properties es2015 browser module main --first-only --create-ivy-entry-points"
+  }
+}
+
+
+ +
+

Important use cases:

+
+
+
    +
  • +

    To update to the next beta or pre-release version, use the --next=true option.

    +
  • +
  • +

    To update from one major version to another, use the format ng update @angular/cli@^<major_version> @angular/core@^<major_version>.

    +
  • +
  • +

    In case your Angular application uses @angular/material include it in the first command:

    +
    +
    +
    $ ng update @angular/cli @angular/core @angular/material
    +
    +
    +
  • +
+
+
+
+
+

Ionic/Angular applications

+
+
+

Just following the same procedure we can upgrade Angular applications, but we must take care of important specific Ionic dependencies:

+
+
+
+
$ ng update @angular/cli @angular/core @ionic/angular @ionic/angular-toolkit [@ionic/...]
+
+
+
+
+
+

Other dependencies

+
+
+

Every application will make use of different dependencies. Angular CLI ng upgrade will also take care of these ones. For example, if you need to upgrade @capacitor you will perform:

+
+
+
+
$ ng update @capacitor/cli @capacitor/core [@capacitor/...]
+
+
+
+

Another example could be that you need to upgrade @ngx-translate packages. As always in this case you will execute:

+
+
+
+
$ ng update @ngx-translate/core @ngx-translate/http-loader
+
+
+
+
+
+

Angular Update Guide online tool

+
+
+

It is recommended to use the Angular Update Guide tool at https://update.angular.io/ that will provide the necessary steps to upgrade any Angular application depending on multiple criteria.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-working-with-angular-cli.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-working-with-angular-cli.html new file mode 100644 index 00000000..e84fa566 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-working-with-angular-cli.html @@ -0,0 +1,585 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Working with Angular CLI

+
+
+

Angular CLI provides a facade for building, testing, linting, debugging and generating code. +Under the hood Angular CLI uses specific tools to achieve these tasks. +The user does no need to maintain them and can rely on Angular to keep them up to date and maybe switch to other tools which come up in the future.

+
+
+

The Angular CLI provides a wiki with common tasks you encounter when working on applications with the Angular CLI. +The Angular CLI Wiki can be found here.

+
+
+

In this guide we will go through the most important tasks. +To go into more details, please visit the Angular CLI wiki.

+
+
+
+
+

Installing Angular CLI

+
+
+

Angular CLI should be added as global and local dependency. +The following commands add Angular CLI as global Dependency.

+
+
+

yarn command

+
+
+
+
yarn global add @angular/cli
+
+
+
+

npm command

+
+
+
+
npm install -g @angular/cli
+
+
+
+

You can check a successful installtion with ng --version. +This should print out the version installed.

+
+
+
+Printing Angular CLI Version +
+
Figure 1. Printing Angular CLI Version
+
+
+
+
+

Running a live development server

+
+
+

The Angular CLI can be used to start a live development server. +First your application will be compiled and then the server will be started. +If you change the code of a file, the server will reload the displayed page. +Run your application with the following command:

+
+
+
+
ng serve -o
+
+
+
+
+
+

Running Unit Tests

+
+
+

All unit tests can be executed with the command:

+
+
+
+
ng test
+
+
+
+

To make a single run and create a code coverage file use the following command:

+
+
+
+
ng test -sr -cc
+
+
+
+ + + + + +
+ + +You can configure the output format for code coverage files to match your requirements in the file karma.conf.js which can be found on toplevel of your project folder. +For instance, this can be useful for exporting the results to a SonarQube. +
+
+
+
+
+

Linting the code quality

+
+
+

You can lint your files with the command

+
+
+
+
ng lint --type-check
+
+
+
+ + + + + +
+ + +You can adjust the linting rules in the file tslint.json which can be found on toplevel of your project folder. +
+
+
+
+
+

Generating Code

+
+ +
+
+
+

Creating a new Angular CLI project

+
+
+

For creating a new Angular CLI project the command ng new is used.

+
+
+

The following command creates a new application named my-app.

+
+
+
+
ng create my-app
+
+
+
+
+
+

Creating a new feature module

+
+
+

A new feature module can be created via ng generate module` command.

+
+
+

The following command generates a new feature module named todo.

+
+
+
+
ng generate module todo
+
+
+
+
+Generate a module with Angular CLI +
+
Figure 2. Generate a module with Angular CLI
+
+
+ + + + + +
+ + +The created feature module needs to be added to the AppModule by hand. +Other option would be to define a lazy route in AppRoutingModule to make this a lazy loaded module. +
+
+
+
+
+

Creating a new component

+
+
+

To create components the command ng generate component can be used.

+
+
+

The following command will generate the component todo-details inside the components layer of todo module. +It will generate a class, a html file, a css file and a test file. +Also, it will register this component as declaration inside the nearest module - this ist TodoModule.

+
+
+
+
ng generate component todo/components/todo-details
+
+
+
+
+Generate a component with Angular CLI +
+
Figure 3. Generate a component with Angular CLI
+
+
+ + + + + +
+ + +If you want to export the component, you have to add the component to exports array of the module. +This would be the case if you generate a component inside shared module. +
+
+
+
+
+

Configuring an Angular CLI project

+
+
+

Inside an Angular CLI project the file .angular-cli.json can be used to configure the Angular CLI.

+
+
+

The following options are very important to understand.

+
+
+
    +
  • +

    The property defaults` can be used to change the default style extension. +The following settings will make the Angular CLI generate .less files, when a new component is generated.

    +
  • +
+
+
+
+
"defaults": {
+  "styleExt": "less",
+  "component": {}
+}
+
+
+
+
    +
  • +

    The property apps contains all applications maintained with Angular CLI. +Most of the time you will have only one.

    +
    +
      +
    • +

      assets configures all the static files, that the application needs - this can be images, fonts, json files, etc. +When you add them to assets the Angular CLI will put these files to the build target and serve them while debugging. +The following will put all files in /i18n to the output folder /i18n

      +
    • +
    +
    +
  • +
+
+
+
+
"assets": [
+  { "glob": "**/*.json", "input": "./i18n", "output": "./i18n" }
+]
+
+
+
+
    +
  • +

    styles property contains all style files that will be globally available. +The Angular CLI will create a styles bundle that goes directly into index.html with it. +The following will make all styles in styles.less globally available.

    +
  • +
+
+
+
+
"styles": [
+  "styles.less"
+]
+
+
+
+
    +
  • +

    environmentSource and environments are used to configure configuration with the Angular CLI. +Inside the code always the file specified in environmentSource will be referenced. +You can define different environments - eg. production, staging, etc. - which you list in enviroments. +At compile time the Angular CLI will override all values in environmentSource with the values from the matching environment target. +The following code will build the application for the environment staging.

    +
  • +
+
+
+
+
ng build --environment=staging
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-yarn-2-support.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-yarn-2-support.html new file mode 100644 index 00000000..1669d1ef --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/guide-yarn-2-support.html @@ -0,0 +1,427 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Yarn 2

+
+
+

Yarn v2 is a very different software from the v1. The following list contains the main new features:

+
+ +
+

Please, read them carefully to decide if your current project is suitable to use Yarn 2 as package manager.

+
+
+ + + + + +
+ + +Some features are still experimental, so please do not use them in production environments. +
+
+
+

More info at https://yarnpkg.com/

+
+
+
+
+

Global Install

+
+
+

Installing Yarn 2.x globally is discouraged as Yarn team is moving to a per-project install strategy. We advise you to keep Yarn 1.x (Classic) as your global binary by installing it via the instructions you can find here.

+
+
+

Once you’ve followed the instructions (running yarn --version from your home directory should yield something like 1.22.0), go to the next section to see how to enable Yarn 2 on your project.

+
+
+
+
+

Per-project install

+
+
+

Follow these instructions to update your current devon4ng project to Yarn 2:

+
+
+
    +
  1. +

    Follow the global install instructions.

    +
  2. +
  3. +

    Move into your project folder:

    +
    +
    +
    cd ~/path/to/project
    +
    +
    +
  4. +
  5. +

    Run the following command:

    +
    +
    +
    yarn policies set-version berry # below v1.22
    +yarn set version berry          # on v1.22+
    +
    +
    +
  6. +
  7. +

    Since Angular CLI still is not fully supported with the new Yarn architecture as it is not compatible with PnP it is necessary to include the node-modules plugin adding the following line in the .yarnrc.yml file:

    +
    +
    +
    nodeLinker: node-modules
    +
    +
    +
  8. +
  9. +

    Commit the .yarn and .yarnrc.yml changes

    +
  10. +
  11. +

    Run again yarn install.

    +
  12. +
+
+
+ + + + + +
+ + +For more advanced migration topics please refer to https://yarnpkg.com/advanced/migration +
+
+
+
+
+

Which files should be added to gitignore file?

+
+
+

If you’re using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/cache
+!.yarn/releases
+!.yarn/plugins
+
+
+
+

If you’re not using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/releases
+!.yarn/plugins
+.pnp.*
+
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/home.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/home.html new file mode 100644 index 00000000..b8b90fe4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/home.html @@ -0,0 +1,550 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4ng

+
+
+

This guide describes an application architecture for web client development with Angular.

+
+
+
+
+

Motivation

+
+
+

Front-end development is a very difficult task since there are a lot of different frameworks, patterns and practices nowadays. For that reason, in devonfw we decided to make use of Angular since it is a full front-end framework that includes almost all the different patterns and features that any SPA may need and provides a well defined architecture to development, build and deploy.

+
+
+

The idea with devon4ng is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends like reactive style development, on the other hand, providing a short on-boarding time while still using an architecture that helps us scale and be productive at the same time.

+
+
+

At the same time devon4ng aims to help developers to solve common problems that appear in many projects and provide samples and blueprints to show how to apply this solutions in real situations.

+
+
+
+
+

Contents

+
+ +
+

This section introduces in an easy way the main principles and guidelines based on Angular Style Guide.

+
+ +
+

The goal of this topic is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview.

+
+
+
+
+

Layers

+
+
+

This section provides a condensed explanation about the different layers a good Angular application must provide.

+
+ +
+
+
+

Guides

+
+
+

This section introduces concepts to help developers with the tooling and package managers.

+
+ +
+
+
+

Angular

+
+
+

This is the main section of the documentation, where the developer will find guidelines for accessibility, how to use the Angular toolchain, how to refactor components, create libraries and, in general, maintain Angular applications. But last and not least, developers will also find solutions to common problems many of the Angular projects may have.

+
+
+ + + + + +
+ + +All the different topics are demonstrated in the samples folder with a small application. +
+
+ +
+
+
+

Ionic

+
+
+

As part of the devon4ng stack, we include a small section to explain how to develop hybrid mobile Ionic/Angular applications and create PWAs with this UI library. As the previous section, the contents are demonstrated in the samples folder.

+
+ +
+
+
+

Layouts

+
+
+

Any SPA application must have a layout. So, the purpose of this section is to explain the Angular Material approach.

+
+ +
+
+
+

NgRx

+
+
+

State Management is a big topic in big front-end application. This section explains the fundamentals of the industry standard library NgRx, showing its main components.

+
+ +
+
+
+

Cookbook

+
+
+

The Cookbook section aims to provide solutions to cross-topic challenges that at this moment do not fit in the previous sections. As the Angular section, some of the topics are demonstrated with a sample located in the samples folder.

+
+ +
+
+
+

devon4ng templates

+
+
+

In order to support CobiGen generation tool for Angular applications, devon4ng demos realization and provide more opinionated samples, the following templates are also included in devon4ng contents:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/master-devon4ng.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/master-devon4ng.html new file mode 100644 index 00000000..43bbc01c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/master-devon4ng.html @@ -0,0 +1,11432 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Angular

+
+ +
+
+
+

Introduction

+
+ +
+

devon4ng

+
+

This guide describes an application architecture for web client development with Angular.

+
+
+
+

Motivation

+
+

Front-end development is a very difficult task since there are a lot of different frameworks, patterns and practices nowadays. For that reason, in devonfw we decided to make use of Angular since it is a full front-end framework that includes almost all the different patterns and features that any SPA may need and provides a well defined architecture to development, build and deploy.

+
+
+

The idea with devon4ng is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends like reactive style development, on the other hand, providing a short on-boarding time while still using an architecture that helps us scale and be productive at the same time.

+
+
+

At the same time devon4ng aims to help developers to solve common problems that appear in many projects and provide samples and blueprints to show how to apply this solutions in real situations.

+
+
+
+

Contents

+ +
+

This section introduces in an easy way the main principles and guidelines based on Angular Style Guide.

+
+ +
+

The goal of this topic is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview.

+
+
+
+

Layers

+
+

This section provides a condensed explanation about the different layers a good Angular application must provide.

+
+ +
+
+

Guides

+
+

This section introduces concepts to help developers with the tooling and package managers.

+
+ +
+
+

Angular

+
+

This is the main section of the documentation, where the developer will find guidelines for accessibility, how to use the Angular toolchain, how to refactor components, create libraries and, in general, maintain Angular applications. But last and not least, developers will also find solutions to common problems many of the Angular projects may have.

+
+
+ + + + + +
+ + +All the different topics are demonstrated in the samples folder with a small application. +
+
+ +
+
+

Ionic

+
+

As part of the devon4ng stack, we include a small section to explain how to develop hybrid mobile Ionic/Angular applications and create PWAs with this UI library. As the previous section, the contents are demonstrated in the samples folder.

+
+ +
+
+

Layouts

+
+

Any SPA application must have a layout. So, the purpose of this section is to explain the Angular Material approach.

+
+ +
+
+

NgRx

+
+

State Management is a big topic in big front-end application. This section explains the fundamentals of the industry standard library NgRx, showing its main components.

+
+ +
+
+

Cookbook

+
+

The Cookbook section aims to provide solutions to cross-topic challenges that at this moment do not fit in the previous sections. As the Angular section, some of the topics are demonstrated with a sample located in the samples folder.

+
+ +
+
+

devon4ng templates

+
+

In order to support CobiGen generation tool for Angular applications, devon4ng demos realization and provide more opinionated samples, the following templates are also included in devon4ng contents:

+
+
+ +
+
+
+
+
+

Architecture

+
+ +
+

Architecture

+
+

The following principles and guidelines are based on Angular Style Guide - especially Angular modules (see Angular Docs). +It extends those where additional guidance is needed to define an architecture which is:

+
+
+
    +
  • +

    maintainable across applications and teams

    +
  • +
  • +

    easy to understand, especially when coming from a classic Java/.Net perspective - so whenever possible the same principles apply both to the server and the client

    +
  • +
  • +

    pattern based to solve common problems

    +
  • +
  • +

    based on best of breed solutions coming from open source and Capgemini project experiences

    +
  • +
  • +

    gives as much guidance as necessary and as little as possible

    +
  • +
+
+
+
+

Overview

+
+

When using Angular the web client architecture is driven by the framework in a certain way Google and the Angular community think about web client architecture. +Angular gives an opinion on how to look at architecture. +It is a component based like devon4j but uses different terms which are common language in web application development. +The important term is module which is used instead of component. The primary reason is the naming collision with the Web Components standard (see Web Components).
+To clarify this:

+
+
+
    +
  • +

    A component describes an UI element containing HTML, CSS and JavaScript - structure, design and logic encapsulated inside a reusable container called component.

    +
  • +
  • +

    A module describes an applications feature area. The application flight-app may have a module called booking.

    +
  • +
+
+
+

An application developed using Angular consists of multiple modules. +There are feature modules and special modules described by the Angular Style Guide - core and shared. +Angular or Angular Style Guide give no guidance on how to structure a module internally. +This is where this architecture comes in.

+
+
+
+

Layers

+
+

The architecture describes two layers. The terminology is based on common language in web development.

+
+
+
+Architecture - Layers +
+
Figure 1. Layers
+
+
+
    +
  • +

    Components Layer encapsulates components which present the current application state. +Components are separated into Smart and Dumb Components. +The only logic present is view logic inside Smart Components.

    +
  • +
  • +

    Services Layer is more or less what we call 'business logic layer' on the server side. +The layer defines the applications state, the transitions between state and classic business logic. +Stores contain application state over time to which Smart Components subscribe to. +Adapters are used to perform XHR, WebSocket connections, etc. +The business model is described inside the module. +Use case services perform business logic needed for use cases. +A use case services interacts with the store and adapters. +Methods of use case services are the API for Smart Components. +Those methods are Actions in reactive terminology.

    +
  • +
+
+
+
+

Modules

+
+

Angular requires a module called app which is the main entrance to an application at runtime - this module gets bootstrapped. +Angular Style Guide defines feature modules and two special modules - core and shared.

+
+
+
+Architecture - Modules +
+
Figure 2. Modules
+
+
+

A feature module is basically a vertical cut through both layers. +The shared module consists of components shared across feature modules. +The core module holds services shared across modules. +So core module is a module only having a services layer +and shared module is a module only having a components layer.

+
+ +
+
+

Meta Architecture

+ +
+
+

Introduction

+ +
+
+

Purpose of this document

+
+

In our business applications, the client easily gets underestimated. Sometimes the client is more complex to develop and design than the server. While the server architecture is nowadays easily to agree as common sense, for clients this is not as obvious and stable especially as it typically depends on the client framework used. Finding a concrete architecture applicable for all clients may therefore be difficult to accomplish.

+
+
+

This document tries to define on a high abstract level, a reference architecture which is supposed to be a mental image and frame for orientation regarding the evaluation and appliance of different client frameworks. As such it defines terms and concepts required to be provided for in any framework and thus gives a common ground of understanding for those acquainted with the reference architecture. This allows better comparison between the various frameworks out there, each having their own terms for essentially the same concepts. It also means that for each framework we need to explicitly map how it implements the concepts defined in this document.

+
+
+

The architecture proposed herein is neither new nor was it developed from scratch. Instead it is the gathered and consolidated knowledge and best practices of various projects (s. References).

+
+
+
+

Goal of the Client Architecture

+
+

The goal of the client architecture is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview. Furthermore it ensures a homogeneity regarding how different concrete UI technologies are being applied in the projects, solving the common requirements in the same way.

+
+
+
+

Architecture Views

+
+

As for the server we distinguish between the business and the technical architecture. Where the business architecture is different from project to project and relates to the concrete design of dialog components given concrete requirements, the technical architecture can be applied to multiple projects.

+
+
+

The focus of this document is to provide a technical reference architecture on the client on a very abstract level defining required layers and components. How the architecture is implemented has to be defined for each UI technology.

+
+
+

The technical infrastructure architecture is out of scope for this document and although it needs to be considered, the concepts of the reference architecture should work across multiple TI architecture, i.e. native or web clients.

+
+
+
+

devonfw Reference Client Architecture

+
+

The following gives a complete overview of the proposed reference architecture. It will be built up incrementally in the following sections.

+
+
+
+Complete Client Architecture Overview +
+
+
+

Figure 1 Overview

+
+
+
+

Client Architecture

+
+

On the highest level of abstraction we see the need to differentiate between dialog components and their container they are managed in, as well as the access to the application server being the back-end for the client (e.g. an devon4j instance). This section gives a summary of these components and how they relate to each other. Detailed architectures for each component will be supplied in subsequent sections

+
+
+
+Client Architecture Overview +
+
+
+

Figure 2 Overview of Client Architecture

+
+
+
+

== Dialog Component

+
+

A dialog component is a logical, self-contained part of the user interface. It accepts user input and actions and controls communication with the user. Dialog components use the services provided by the dialog container in order to execute the business logic. They are self-contained, i.e. they possess their own user interface together with the associated logic, data and states.

+
+
+
    +
  • +

    Dialog components can be composed of other dialog components forming a hierarchy

    +
  • +
  • +

    Dialog components can interact with each other. This includes communication of a parent to its children, but also between components independent of each other regarding the hierarchy.

    +
  • +
+
+
+
+

== Dialog Container

+
+

Dialog components need to be managed in their life-cycle and how they can be coupled to each other. The dialog container is responsible for this along with the following:

+
+
+
    +
  • +

    Bootstrapping the client application and environment

    +
    +
      +
    • +

      Configuration of the client

      +
    • +
    • +

      Initialization of the application server access component

      +
    • +
    +
    +
  • +
  • +

    Dialog Component Management

    +
    +
      +
    • +

      Controlling the life-cycle

      +
    • +
    • +

      Controlling the dialog flow

      +
    • +
    • +

      Providing means of interaction between the dialogs

      +
    • +
    • +

      Providing application server access

      +
    • +
    • +

      Providing services to the dialog components
      +(e.g. printing, caching, data storage)

      +
    • +
    +
    +
  • +
  • +

    Shutdown of the application

    +
  • +
+
+
+
+

== Application Server Access

+
+

Dialogs will require a back-end application server in order to execute their business logic. Typically in an devonfw application the service layer will provide interfaces for the functionality exposed to the client. These business oriented interfaces should also be present on the client backed by a proxy handling the concrete call of the server over the network. This component provides the set of interfaces as well as the proxy.

+
+
+
+

Dialog Container Architecture

+
+

The dialog container can be further structured into the following components with their respective tasks described in own sections:

+
+
+
+Dialog Container Architecture Overview +
+
+
+

Figure 3 Dialog Container Architecture

+
+
+
+

== Application

+
+

The application component represents the overall client in our architecture. It is responsible for bootstrapping all other components and connecting them with each other. As such it initializes the components below and provides an environment for them to work in.

+
+
+
+

== Configuration Management

+
+

The configuration management manages the configuration of the client, so the client can be deployed in different environments. This includes configuration of the concrete application server to be called or any other environment-specific property.

+
+
+
+

== Dialog Management

+
+

The Dialog Management component provides the means to define, create and destroy dialog components. It therefore offers basic life-cycle capabilities for a component. In addition it also allows composition of dialog components in a hierarchy. The life-cycle is then managed along the hierarchy, meaning when creating/destroying a parent dialog, this affects all child components, which are created/destroyed as well.

+
+
+
+

== Service Registry

+
+

Apart from dialog components, a client application also consists of services offered to these. A service can thereby encompass among others:

+
+
+
    +
  • +

    Access to the application server

    +
  • +
  • +

    Access to the dialog container functions for managing dialogs or accessing the configuration

    +
  • +
  • +

    Dialog independent client functionality such as Printing, Caching, Logging, Encapsulated business logic such as tax calculation

    +
  • +
  • +

    Dialog component interaction

    +
  • +
+
+
+

The service registry offers the possibility to define, register and lookup these services. Note that these services could be dependent on the dialog hierarchy, meaning different child instances could obtain different instances / implementations of a service via the service registry, depending on which service implementations are registered by the parents.

+
+
+

Services should be defined as interfaces allowing for different implementations and thus loose coupling.

+
+
+
+

Dialog Component Architecture

+
+

A dialog component has to support all or a subset of the following tasks:
+(T1) Displaying the user interface incl. internationalization
+(T2) Displaying business data incl. changes made to the data due to user interactions and localization of the data
+(T3) Accepting user input including possible conversion from e.g. entered Text to an Integer
+(T4) Displaying the dialog state
+(T5) Validation of user input
+(T6) Managing the business data incl. business logic altering it due to user interactions
+(T7) Execution of user interactions
+(T8) Managing the state of the dialog (e.g. Edit vs. View)
+(T9) Calling the application server in the course of user interactions

+
+
+

Following the principle of separation of concerns, we further structure a dialog component in an own architecture allowing us the distribute responsibility for these tasks along the defined components:

+
+
+
+Dialog Component Architecture +
+
+
+

Figure 4 Overview of dialog component architecture

+
+
+
+

== Presentation Layer

+
+

The presentation layer generates and displays the user interface, accepts user input and user actions and binds these to the dialog core layer (T1-5). The tasks of the presentation layer fall into two categories:

+
+
+
    +
  • +

    Provision of the visual representation (View component)
    +The presentation layer generates and displays the user interface and accepts user input and user actions. The logical processing of the data, actions and states is performed in the dialog core layer. The data and user interface are displayed in localized and internationalized form.

    +
  • +
  • +

    Binding of the visual representation to the dialog core layer
    +The presentation layer itself does not contain any dialog logic. The data or actions entered by the user are then processed in the dialog core layer. There are three aspects to the binding to the dialog core layer. We refer to “data binding”, “state binding” and “action binding”. Syntactical and (to a certain extent) semantic validations are performed during data binding (e.g. cross-field plausibility checks). Furthermore, the formatted, localized data in the presentation layer is converted into the presentation-independent, neutral data in the dialog core layer (parsing) and vice versa (formatting).

    +
  • +
+
+
+
+

== Dialog Core Layer

+
+

The dialog core layer contains the business logic, the control logic, and the logical state of the dialog. It therefore covers tasks T5-9:

+
+
+
    +
  • +

    Maintenance of the logical dialog state and the logical data
    +The dialog core layer maintains the logical dialog state and the logical data in a form which is independent of the presentation. The states of the presentation (e.g. individual widgets) must not be maintained in the dialog core layer, e.g. the view state could lead to multiple presentation states disabling all editable widgets on the view.

    +
  • +
  • +

    Implementation of the dialog and dialog control logic
    +The component parts in the dialog core layer implement the client specific business logic and the dialog control logic. This includes, for example, the manipulation of dialog data and dialog states as well as the opening and closing of dialogs.

    +
  • +
  • +

    Communication with the application server
    +The dialog core layer calls the interfaces of the application server via the application server access component services.

    +
  • +
+
+
+

The dialog core layer should not depend on the presentation layer enforcing a strict layering and thus minimizing dependencies.

+
+
+
+

== Interactions between dialog components

+
+

Dialog components can interact in the following ways:

+
+
+
+Dialog Interactions +
+
+
+
    +
  • +

    Embedding of dialog components
    +As already said dialog components can be hierarchically composed. This composition works by embedding on dialog component within the other. Apart from the life-cycle managed by the dialog container, the embedding needs to cope for the visual embedding of the presentation and core layer.

    +
    +
      +
    • +

      Embedding dialog presentation
      +The parent dialog needs to either integrate the embedded dialog in its layout or open it in an own model window.

      +
    • +
    • +

      Embedding dialog core
      +The parent dialog needs to be able to access the embedded instance of its children. This allows initializing and changing their data and states. On the other hand the children might require context information offered by the parent dialog by registering services in the hierarchical service registry.

      +
    • +
    +
    +
  • +
  • +

    Dialog flow
    +Apart from the embedding of dialog components representing a tight coupling, dialogs can interact with each other by passing the control of the UI, i.e. switching from one dialog to another.

    +
  • +
+
+
+

When interacting, dialog components should interact only between the same or lower layers, i.e. the dialog core should not access the presentation layer of another dialog component.

+
+
+
+

Appendix

+ +
+
+

Notes about Quasar Client

+
+

The Quasar client architecture as the consolidated knowledge of our CSD projects is the major source for the above drafted architecture. However, the above is a much simplified and more agile version thereof:

+
+
+
    +
  • +

    Quasar Client tried to abstract from the concrete UI library being used, so it could decouple the business from the technical logic of a dialog. The presentation layer should be the only one knowing the concrete UI framework used. This level of abstraction was dropped in this reference architecture, although it might of course still make sense in some projects. For fast-moving agile projects in the web however introducing such a level of abstraction takes effort with little gained benefits. With frameworks like Angular 2 we would even introduce one additional seemingly artificial and redundant layer, since it already separates the dialog core from its presentation.

    +
  • +
  • +

    In the past and in the days of Struts, JSF, etc. the concept of session handling was important for the client since part of the client was sitting on a server with a session relating it to its remote counterpart on the users PC. Quasar Client catered for this need, by very prominently differentiating between session and application in the root of the dialog component hierarchy. However, in the current days of SPA applications and the lowered importance of servers-side web clients, this prominent differentiation was dropped. When still needed the referenced documents will provide in more detail how to tailor the respective architecture to this end.

    +
  • +
+
+
+ +
+
+
+

Layers

+
+ +
+

Components Layer

+
+

The components layer encapsulates all components presenting the current application view state, which means data to be shown to the user. +The term component refers to a component described by the standard Web Components. +So this layer has all Angular components, directives and pipes defined for an application. +The main challenges are:

+
+
+
    +
  • +

    how to structure the components layer (see File Structure Guide)

    +
  • +
  • +

    decompose components into maintainable chunks (see Component Decomposition Guide)

    +
  • +
  • +

    handle component interaction

    +
  • +
  • +

    manage calls to the services layer

    +
  • +
  • +

    apply a maintainable data and event flow throughout the component tree

    +
  • +
+
+
+
+

Smart and Dumb Components

+
+

The architecture applies the concept of Smart and Dumb Components (syn. Containers and Presenters). +The concept means that components are divided into Smart and Dumb Components.

+
+
+

A Smart Component typically is a top-level dialog inside the component tree.

+
+
+
    +
  • +

    a component, that can be routed to

    +
  • +
  • +

    a modal dialog

    +
  • +
  • +

    a component, which is placed inside AppComponent

    +
  • +
+
+
+

A Dumb Component can be used by one to many Smart Components. +Inside the component tree a Dumb Component is a child of a Smart Component.

+
+
+
+Component Tree +
+
Figure 3. Component tree example
+
+
+

As shown the topmost component is always the AppComponent in Angular applications. +The component tree describes the hierarchy of components starting from AppComponent. +The figure shows Smart Components in blue and Dumb Components in green. +AppComponent is a Smart Component by definition. +Inside the template of AppComponent placed components are static components inside the component tree. +So they are always displayed. +In the example OverviewComponent and DetailsComponent are rendered by Angular compiler depending on current URL the application displays. +So OverviewComponents sub-tree is displayed if the URL is /overview and DetailsComponents sub-tree is displayed if the URL is /details. +To clarify this distinction further the following table shows the main differences.

+
+
+
Smart vs Dumb Components
+

|== = +|Smart Components |Dumb Components

+
+
+

|contain the current view state +|show data via binding (@Input) and contain no view state

+
+
+

|handle events emitted by Dumb Components +|pass events up the component tree to be handled by Smart Components (@Output)

+
+
+

|call the services layer +|never call the services layer

+
+
+

|use services +|do not use services

+
+
+

|consists of n Dumb Components +|is independent of Smart Components +|== =

+
+
+
+

Interaction of Smart and Dumb Components

+
+

With the usage of the Smart and Dumb Components pattern one of the most important part is component interaction. +Angular comes with built in support for component interaction with @Input() and @Output() Decorators. +The following figure illustrates an unidirectional data flow.

+
+
+
    +
  • +

    Data always goes down the component tree - from a Smart Component down its children.

    +
  • +
  • +

    Events bubble up, to be handled by a Smart Component.

    +
  • +
+
+
+
+Smart and Dumb Components Interaction +
+
Figure 4. Smart and Dumb Component Interaction
+
+
+

As shown a Dumb Components role is to define a signature by declaring Input and Output Bindings.

+
+
+
    +
  • +

    @Input() defines what data is necessary for that component to work

    +
  • +
  • +

    @Output() defines which events can be listened on by the parent component

    +
  • +
+
+
+
Listing 1. Dumb Components define a signature
+
+
export class ValuePickerComponent {
+
+  @Input() columns: string[];
+  @Input() items: {}[];
+  @Input() selected: {};
+  @Input() filter: string;
+  @Input() isChunked = false;
+  @Input() showInput = true;
+  @Input() showDropdownHeader = true;
+
+  @Output() elementSelected = new EventEmitter<{}>();
+  @Output() filterChanged = new EventEmitter<string>();
+  @Output() loadNextChunk = new EventEmitter();
+  @Output() escapeKeyPressed = new EventEmitter();
+
+}
+
+
+
+

The example shows the Dumb Component ValuePickerComponent. +It describes seven input bindings with isChunked, showHeader and showDropdownHeader being non mandatory as they have a default value. +Four output bindings are present. Typically, a Dumb Component has very little code to no code inside the TypeScript class.

+
+
+
Listing 2. Smart Components use the Dumb Components signature inside the template
+
+
<div>
+
+  <value-input
+    ...>
+  </value-input>
+
+  <value-picker
+    *ngIf="isValuePickerOpen"
+    [columns]="columns"
+    [items]="filteredItems"
+    [isChunked]="isChunked"
+    [filter]="filter"
+    [selected]="selectedItem"
+    [showDropdownHeader]="showDropdownHeader"
+    (loadNextChunk)="onLoadNextChunk()"
+    (elementSelected)="onElementSelected($event)"
+    (filterChanged)="onFilterChanged($event)"
+    (escapeKeyPressed)="onEscapePressedInsideChildTable()">
+  </value-picker>
+
+</div>
+
+
+
+

Inside the Smart Components template the events emitted by Dumb Components are handled. +It is a good practice to name the handlers with the prefix on* (e.g. onInputChanged()).

+
+ +
+
+

Services Layer

+
+

The services layer is more or less what we call 'business logic layer' on the server side. +It is the layer where the business logic is placed. +The main challenges are:

+
+
+
    +
  • +

    Define application state and an API for the components layer to use it

    +
  • +
  • +

    Handle application state transitions

    +
  • +
  • +

    Perform back-end interaction (XHR, WebSocket, etc.)

    +
  • +
  • +

    Handle business logic in a maintainable way

    +
  • +
  • +

    Configuration management

    +
  • +
+
+
+

All parts of the services layer are described in this chapter. +An example which puts the concepts together can be found at the end Interaction of Smart Components through the services layer.

+
+
+
+

Boundaries

+
+

There are two APIs for the components layer to interact with the services layer:

+
+
+
    +
  • +

    A store can be subscribed to for receiving state updates over time

    +
  • +
  • +

    A use case service can be called to trigger an action

    +
  • +
+
+
+

To illustrate the fact the following figure shows an abstract overview.

+
+
+
+Smart and Dumb Components Interaction +
+
Figure 5. Boundaries to components layer
+
+
+
+

Store

+
+

A store is a class which defines and handles application state with its transitions over time. +Interaction with a store is always synchronous. +A basic implementation using RxJS can look like this.

+
+
+ + + + + +
+ + +A more profound implementation taken from a real-life project can be found here (Abstract Class Store). +
+
+
+
Listing 3. Store defined using RxJS
+
+
@Injectable()
+export class ProductSearchStore {
+
+  private stateSource = new BehaviorSubject<ProductSearchState>(defaultProductSearchState);
+  state$ = this.stateSource.asObservable();
+
+  setLoading(isLoading: boolean) {
+    const currentState = this.stateSource.getValue();
+    this.stateSource.next({
+      isLoading: isLoading,
+      products: currentState.products,
+      searchCriteria: currentState.searchCriteria
+    });
+  }
+
+}
+
+
+
+

In the example ProductSearchStore handles state of type ProductSearchState. +The public API is the property state$ which is an observable of type ProductSearchState. +The state can be changed with method calls. +So every desired change to the state needs to be modeled with an method. +In reactive terminology this would be an Action. +The store does not use any services. +Subscribing to the state$ observable leads to the subscribers receiving every new state.

+
+
+

This is basically the Observer Pattern:
+The store consumer registers itself to the observable via state$.subscribe() method call. +The first parameter of subscribe() is a callback function to be called when the subject changes. +This way the consumer - the observer - is registered. +When next() is called with a new state inside the store, all callback functions are called with the new value. +So every observer is notified of the state change. +This equals the Observer Pattern push type.

+
+
+

A store is the API for Smart Components to receive state from the service layer. +State transitions are handled automatically with Smart Components registering to the state$ observable.

+
+
+
+

Use Case Service

+
+

A use case service is a service which has methods to perform asynchronous state transitions. +In reactive terminology this would be an Action of Actions - a thunk (redux) or an effect (@ngrx).

+
+
+
+Use Case Service +
+
Figure 6. Use case services are the main API to trigger state transitions
+
+
+

A use case services method - an action - interacts with adapters, business services and stores. +So use case services orchestrate whole use cases. +For an example see use case service example.

+
+
+
+

Adapter

+
+

An adapter is used to communicate with the back-end. +This could be a simple XHR request, a WebSocket connection, etc. +An adapter is simple in the way that it does not add anything other than the pure network call. +So there is no caching or logging performed here. +The following listing shows an example.

+
+
+

For further information on back-end interaction see Consuming REST Services

+
+
+
Listing 4. Calling the back-end via an adapter
+
+
@Injectable()
+export class ProducsAdapter {
+
+  private baseUrl = environment.baseUrl;
+
+  constructor(private http: HttpClient) { }
+
+  getAll(): Observable<Product[]> {
+    return this.http.get<Product[]>(this.baseUrl + '/products');
+  }
+
+}
+
+
+
+
+

Interaction of Smart Components through the services layer

+
+

The interaction of smart components is a classic problem which has to be solved in every UI technology. +It is basically how one dialog tells the other something has changed.

+
+
+

An example is adding an item to the shopping basket. +With this action there need to be multiple state updates.

+
+
+
    +
  • +

    The small logo showing how many items are currently inside the basket needs to be updated from 0 to 1

    +
  • +
  • +

    The price needs to be recalculated

    +
  • +
  • +

    Shipping costs need to be checked

    +
  • +
  • +

    Discounts need to be updated

    +
  • +
  • +

    Ads need to be updated with related products

    +
  • +
  • +

    etc.

    +
  • +
+
+
+
+

Pattern

+
+

To handle this interaction in a scalable way we apply the following pattern.

+
+
+
+Interaction of Smart Components via services layer +
+
Figure 7. Smart Component interaction
+
+
+

The state of interest is encapsulated inside a store. All Smart Components interested in the state have to subscribe to the store’s API served by the public observable. Thus, with every update to the store the subscribed components receive the new value. The components basically react to state changes. Altering a store can be done directly if the desired change is synchronous. Most actions are of asynchronous nature so the UseCaseService comes into play. Its actions are void methods, which implement a use case, i.e., adding a new item to the basket. It calls asynchronous actions and can perform multiple store updates over time.

+
+
+

To put this pattern into perspective the UseCaseService is a programmatic alternative to redux-thunk or @ngrx/effects. The main motivation here is to use the full power of TypeScript --strictNullChecks and to let the learning curve not to become as steep as it would be when learning a new state management framework. This way actions are just void method calls.

+
+
+
+

Example

+
+
+Smart component interaction example +
+
Figure 8. Smart Components interaction example
+
+
+

The example shows two Smart Components sharing the FlightSearchState by using the FlightSearchStore. +The use case shown is started by an event in the Smart Component FlightSearchComponent. The action loadFlight() is called. This could be submitting a search form. +The UseCaseService is FlightSearchService, which handles the use case Load Flights.

+
+
+
UseCaseService example
+

+
+
+
+
export class FlightSearchService {
+
+  constructor(
+    private flightSearchAdapter: FlightSearchAdapter,
+    private store: FlightSearchStore
+  ) { }
+
+  loadFlights(criteria: FlightSearchCriteria): void {
+    this.store.setLoadingFlights(true);
+    this.store.clearFlights();
+
+    this.flightSearchAdapter.getFlights(criteria.departureDate,
+        {
+          from: criteria.departureAirport,
+          to: criteria.destinationAirport
+        })
+      .finally(() => this.store.setLoadingFlights(false))
+      .subscribe((result: FlightTo[]) => this.store.setFlights(result, criteria));
+  }
+
+}
+
+
+
+

First the loading flag is set to true and the current flights are cleared. This leads the Smart Component showing a spinner indicating the loading action. Then the asynchronous XHR is triggered by calling the adapter. After completion the loading flag is set to false causing the loading indication no longer to be shown. If the XHR was successful, the data would be put into the store. If the XHR was not successful, this would be the place to handle a custom error. All general network issues should be handled in a dedicated class, i.e., an interceptor. So for example the basic handling of 404 errors is not done here.

+
+
+
+
+
+

Guides

+
+ +
+

Package Managers

+
+

There are two major package managers currently used for JavaScript / TypeScript projects which leverage NodeJS as a build platform.

+
+
+
    +
  1. +

    npm

    +
  2. +
  3. +

    yarn

    +
  4. +
+
+
+

Our recommendation is to use yarn but both package managers are fine.

+
+
+ + + + + +
+ + +When using npm it is important to use a version greater 5.0 as npm 3 has major drawbacks compared to yarn. +The following guide assumes that you are using npm >= 5 or yarn. +
+
+
+

Before you start reading further, please take a look at the docs:

+
+ +
+

The following guide will describe best practices for working with yarn / npm.

+
+
+
+

Semantic Versioning

+
+

When working with package managers it is very important to understand the concept of semantic versioning.

+
+
+
Version example 1.2.3
+

|== == == = +|Version |1. |2. |3 +|Version name when incrementing |Major (2.0.0) |Minor (1.3.0) |Patch (1.2.4) +|Has breaking changes |yes |no |no +|Has features |yes |yes |no +|Has bug fixes |yes |yes |yes +|== == == =

+
+
+

The table gives an overview of the most important parts of semantic versioning. +In the header version 1.2.3 is displayed. +The first row shows the name and the resulting version when incrementing a part of the version. +The next rows show specifics of the resulting version - e.g. a major version can have breaking changes, features and bug fixes.

+
+
+

Packages from npm and yarn leverage semantic versioning and instead of selecting a fixed version one can specify a selector. +The most common selectors are:

+
+
+
    +
  • +

    ^1.2.3 +At least 1.2.3 - 1.2.4 or 1.3.0 can be used, 2.0.0 can not be used

    +
  • +
  • +

    ~1.2.3 +At lease 1.2.3 - 1.2.4 can be used, 2.0.0 and 1.3.0 can not be used

    +
  • +
  • +

    >=1.2.3 +At least 1.2.3 - every version greater can also be used

    +
  • +
+
+
+

This achieves a lower number of duplicates. +To give an example:

+
+
+

If package A needs version 1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 4 packages.

+
+
+

If package A needs version ^1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 3 packages. +A would use the same version of C as B - 1.4.0.

+
+
+
+

Do not modify package.json and lock files by hand

+
+

Dependencies are always added using a yarn or npm command. +Altering the package.json, package-json.lock or yarn.lock file by hand is not recommended.

+
+
+

Always use a yarn or npm command to add a new dependency.

+
+
+

Adding the package express with yarn to dependencies.

+
+
+
+
yarn add express
+
+
+
+

Adding the package express with npm to dependencies.

+
+
+
+
npm install express
+
+
+
+
+

What does the lock file do

+
+

The purpose of files yarn.lock and package-json.lock is to freeze versions for a short time.

+
+
+

The following problem is solved:

+
+
+
    +
  • +

    Developer A upgrades the dependency express to fixed version 4.16.3.

    +
  • +
  • +

    express has sub-dependency accepts with version selector ~1.3.5

    +
  • +
  • +

    His local node_modules folder receives accepts in version 1.3.5

    +
  • +
  • +

    On his machine everything is working fine

    +
  • +
  • +

    Afterward version 1.3.6 of accepts is published - it contains a major bug

    +
  • +
  • +

    Developer B now clones the repo and loads the dependencies.

    +
  • +
  • +

    He receives version 1.3.6 of accepts and blames developer A for upgrading to a broken version.

    +
  • +
+
+
+

Both yarn.lock and package-json.lock freeze all the dependencies. +For example in yarn lock you will find.

+
+
+
Listing 5. yarn.lock example (excerp)
+
+
accepts@~1.3.5:
+  version "1.3.5"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-types "~2.1.18"
+    negotiator "0.6.1"
+
+mime-db@~1.33.0:
+  version "1.33.0"
+  resolved "[...URL to registry]"
+
+mime-types@~2.1.18:
+  version "2.1.18"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-db "~1.33.0"
+
+negotiator@0.6.1:
+  version "0.6.1"
+  resolved "[...URL to registry]"
+
+
+
+

The described problem is solved by the example yarn.lock file.

+
+
+
    +
  • +

    accepts is frozen at version ~1.3.5

    +
  • +
  • +

    All of its sub-dependencies are also frozen. +It needs mime-types at version ~2.1.18 which is frozen at 2.1.18. +mime-types needs mime-db at ~1.33.0 which is frozen at 1.33.0

    +
  • +
+
+
+

Every developer will receive the same versions of every dependency.

+
+
+ + + + + +
+ + +You have to make sure all your developers are using the same npm/yarn version - this includes the CI build. +
+
+ +
+
+

Package Managers Workflow

+ +
+
+

Introduction

+
+

This document aims to provide you the necessary documentation and sources in order to help you understand the importance of dependencies between packages.

+
+
+

Projects in NodeJS make use of modules, chunks of reusable code made by other people or teams. These small chunks of reusable code are called packages [1]. Packages are used to solve specific problems or tasks. These relations between your project and the external packages are called dependencies.

+
+
+

For example, imagine we are doing a small program that takes your birthday as an input and tells you how many days are left until your birthday. We search in the repository if someone has published a package to retrieve the actual date and manage date types, and maybe we could search for another package to show a calendar, because we want to optimize our time, and we wish the user to click a calendar button and choose the day in the calendar instead of typing it.

+
+
+

As you can see, packages are convenient. In some cases, they may be even needed, as they can manage aspects of your program you may not be proficient in, or provide an easier use of them.

+
+
+

For more comprehensive information visit npm definition

+
+
+
+

Package.json

+
+

Dependencies in your project are stored in a file called package.json. Every package.json must contain, at least, the name and version of your project.

+
+
+

Package.json is located in the root of your project.

+
+
+ + + + + +
+ + +If package.json is not on your root directory refer to Problems you may encounter section +
+
+
+

If you wish to learn more information about package.json, click on the following links:

+
+ +
+
+

== Content of package.json

+
+

As you noticed, package.json is a really important file in your project. It contains essential information about our project, therefore you need to understand what’s inside.

+
+
+

The structure of package.json is divided in blocks, inside the first one you can find essential information of your project such as the name, version, license and optionally some [Scripts].

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e"
+  }
+
+
+
+

The next block is called dependencies and contains the packages that project needs in order to be developed, compiled and executed.

+
+
+
+
"private": true,
+  "dependencies": {
+    "@angular/animations": "^4.2.4",
+    "@angular/common": "^4.2.4",
+    "@angular/forms": "^4.2.4",
+    ...
+    "zone.js": "^0.8.14"
+  }
+
+
+
+

After dependencies we find devDependencies, another kind of dependencies present in the development of the application but unnecessary for its execution. One example is typescript. Code is written in typescript, and then, transpiled to JavaScript. This means the application is not using typescript in execution and consequently not included in the deployment of our application.

+
+
+
+
"devDependencies": {
+    "@angular/cli": "1.4.9",
+    "@angular/compiler-cli": "^4.2.4",
+    ...
+    "@types/node": "~6.0.60",
+    "typescript": "~2.3.3"
+  }
+
+
+
+

Having a peer dependency means that your package needs a dependency that is the same exact dependency as the person installing your package

+
+
+
+
"peerDependencies": {
+    "package-123": "^2.7.18"
+  }
+
+
+
+

Optional dependencies are just that: optional. If they fail to install, Yarn will still say the install process was successful.

+
+
+
+
"optionalDependencies": {
+    "package-321": "^2.7.18"
+  }
+
+
+
+

Finally you can have bundled dependencies which are packages bundled together when publishing your package in a repository.

+
+
+
+
{
+  "bundledDependencies": [
+    "package-4"
+  ]
+}
+
+
+
+

Here is the link to an in-depth explanation of dependency types​.

+
+
+
+

== Scripts

+
+

Scripts are a great way of automating tasks related to your package, such as simple build processes or development tools.

+
+
+

For example:

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "build-project": "node hello-world.js",
+  }
+
+
+
+

You can run that script by running the command yarn (run) script or npm run script, check the example below:

+
+
+
+
$ yarn (run) build-project    # run is optional
+$ npm run build-project
+
+
+
+

There are special reserved words for scripts, like pre-install, which will execute the script automatically +before the package you install are installed.

+
+
+

Check different uses for scripts in the following links:

+
+ +
+

Or you can go back to +[Content of package.json]​.

+
+
+
+

Managing dependencies

+
+

In order to manage dependencies we recommend using package managers in your projects.

+
+
+

A big reason is their usability. Adding or removing a package is really easy, and by doing so, packet manager update the package.json and copies (or removes) the package in the needed location, with a single command.

+
+
+

Another reason, closely related to the first one, is reducing human error by automating the package management process.

+
+
+

Two of the package managers you can use in NodeJS projects are "yarn" and "npm". While you can use both, we encourage you to use only one of them while working on projects. Using both may lead to different dependencies between members of the team.

+
+
+
+

== npm

+
+

We’ll start by installing npm following this small guide here.

+
+
+

As stated on the web, npm comes inside of NodeJS, and must be updated after installing NodeJS, in the same guide you used earlier are written the instructions to update npm.

+
+
+

How npm works

+
+
+

In order to explain how npm works, let’s take a command as an example:

+
+
+
+
$ npm install @angular/material @angular/cdk
+
+
+
+

This command tells npm to look for the packages @angular/material and @angular/cdk in the npm registry, download and decompress them in the folder node_modules along with their own dependencies. Additionally, npm will update package.json and create a new file called package-lock.json.

+
+
+

After initialization and installing the first package there will be a new folder called node_modules in your project. This folder is where your packages are unzipped and stored, following a tree scheme.

+
+
+

Take in consideration both npm and yarn need a package.json in the root of your project in order to work properly. If after creating your project don’t have it, download again the package.json from the repository or you’ll have to start again.

+
+
+

Brief overview of commands

+
+
+

If we need to create a package.json from scratch, we can use the command init. This command asks the user for basic information about the project and creates a brand new package.json.

+
+
+
+
$ npm init
+
+
+
+

Install (or i) installs all modules listed as dependencies in package.json locally. You can also specify a package, and install that package. Install can also be used with the parameter -g, which tells npm to install the [Global package].

+
+
+
+
$ npm install
+$ npm i
+$ npm install Package
+
+
+
+ + + + + +
+ + +Earlier versions of npm did not add dependencies to package.json unless it was used with the flag --save, so npm install package would be npm install --save package, you have one example below. +
+
+
+
+
$ npm install --save Package
+
+
+
+

Npm needs flags in order to know what kind of dependency you want in your project, in npm you need to put the flag -D or --save-dev to install devDependencies, for more information consult the links at the end of this section.

+
+
+
+
$ npm install -D package
+$ npm install --save-dev package
+
+
+
+

+
+
+

The next command uninstalls the module you specified in the command.

+
+
+
+
$ npm uninstall Package
+
+
+
+

ls command shows us the dependencies like a nested tree, useful if you have few packages, not so useful when you need a lot of packages.

+
+
+
+
$ npm ls
+
+
+
+
+
npm@@VERSION@ /path/to/npm
+└─┬ init-package-json@0.0.4
+  └── promzard@0.1.5
+
+
+
+
example tree
+

We recommend you to learn more about npm commands in the following link, navigating to the section CLI commands.

+
+
+

About Package-lock.json

+
+
+

Package-lock.json describes the dependency tree resulting of using package.json and npm. +Whenever you update, add or remove a package, package-lock.json is deleted and redone with +the new dependencies.

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

This lock file is checked every time the command npm i (or npm install) is used without specifying a package, +in the case it exists and it’s valid, npm will install the exact tree that was generated, such that subsequent +installs are able to generate identical dependency trees.

+
+
+ + + + + +
+ + +It is not recommended to modify this file yourself. It’s better to leave its management to npm. +
+
+
+

More information is provided by the npm team at package-lock.json

+
+
+
+

== Yarn

+
+

Yarn is an alternative to npm, if you wish to install yarn follow the guide getting started with yarn and download the correct version for your operative system. NodeJS is also needed you can find it here.

+
+
+

Working with yarn

+
+
+

Yarn is used like npm, with small differences in syntax, for example npm install module is changed to yarn add module.

+
+
+
+
$ yarn add @covalent
+
+
+
+

This command is going to download the required packages, modify package.json, put the package in the folder node_modules and makes a new yarn.lock with the new dependency.

+
+
+

However, unlike npm, yarn maintains a cache with packages you download inside. You don’t need to download every file every time you do a general installation. This means installations faster than npm.

+
+
+

Similarly to npm, yarn creates and maintains his own lock file, called yarn.lock. Yarn.lock gives enough information about the project for dependency tree to be reproduced.

+
+
+

yarn commands

+
+
+

Here we have a brief description of yarn’s most used commands:

+
+
+
+
$ yarn add Package
+$ yarn add --dev Package
+
+
+
+

Adds a package locally to use in your package. Adding the flags --dev or -D will add them to devDependencies instead of the default dependencies, if you need more information check the links at the end of the section.

+
+
+
+
$ yarn init
+
+
+
+

Initializes the development of a package.

+
+
+
+
$ yarn install
+
+
+
+

Installs all the dependencies defined in a package.json file, you can also write "yarn" to achieve the same effect.

+
+
+
+
$ yarn remove Package
+
+
+
+

You use it when you wish to remove a package from your project.

+
+
+
+
$ yarn global add Package
+
+
+
+

Installs the [Global package].

+
+
+

Please, refer to the documentation to learn more about yarn commands and their attributes: yarn commands

+
+
+

yarn.lock

+
+
+

This file has the same purpose as Package-lock.json, to guide the packet manager, in this case yarn, +to install the dependency tree specified in yarn.lock.

+
+
+

Yarn.lock and package.json are +essential files when collaborating in a project more co-workers and may be a +source of errors if programmers do not use the same manager.

+
+
+

Yarn.lock follows the same structure as package-lock.json, you can find an example of dependency below:

+
+
+
+
"@angular/animations@^4.2.4":
+  version "4.4.6"
+  resolved "https://registry.yarnpkg.com/@angular/animations/-/animations-4.4.6.tgz#fa661899a8a4e38cb7c583c7a5c97ce65d592a35"
+  dependencies:
+    tslib "^1.7.1"
+
+
+
+ + + + + +
+ + +As with package-lock.json, it’s strongly not advised to modify this file. Leave its management to yarn +
+
+
+

You can learn more about yarn.lock here: yarn.lock

+
+
+
+

== Global package

+
+

Global packages are packages installed in your operative system instead of your local project, +global packages useful for developer tooling that is not part of any individual project but instead is used for local commands.

+
+
+

A good example of global package is @angular/cli, a command line interface for angular used in our projects. You can install +a global package in npm with "npm install -g package" and "yarn global add package" with yarn, you have a npm example below:

+
+
+
Listing 6. npm global package
+
+
npm install –g @angular/cli
+
+
+ +
+
+

== Package version

+
+

Dependencies are critical to the success of a package. You must be extra careful about +which version packages are using, one package in a different version may break your code.

+
+
+

Versioning in npm and yarn, follows a semantic called semver, following the logic +MAJOR.MINOR.PATCH, like for example, @angular/animations: 4.4.6.

+
+
+

Different versions

+
+
+

Sometimes, packages are installed with a different version from the one initially installed. +This happens because package.json also contains the range of versions we allow yarn or npm to +install or update to, example:

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

And here the installed one:

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

As you can see, the version we initially added is 4.2.4, and the version finally installed after +a global installation of all packages, 4.4.6.

+
+
+

Installing packages without package-lock.json or yarn.lock using their respective packet managers, will always +end with npm or yarn installing the latest version allowed by package.json.

+
+
+

"@angular/animations": "^4.2.4" contains not only the version we added, but also the range we allow npm and yarn +to update. Here are some examples:

+
+
+
+
"@angular/animations": "<4.2.4"
+
+
+
+

The version installed must be lower than 4.2.4 .

+
+
+
+
"@angular/animations": ">=4.2.4"
+
+
+
+

The version installed must be greater than or equal to 4.2.4 .

+
+
+
+
"@angular/animations": "=4.2.4"
+
+
+
+

the version installed must be equal to 4.2.4 .

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

The version installed cannot modify the first non zero digit, for example in this case +it cannot surpass 5.0.0 or be lower than 4.2.4 .

+
+
+

You can learn more about this in Versions

+
+
+
+

Problems you may encounter

+
+

If you can’t find package.json, you may have deleted the one you had previously, +which means you have to download the package.json from the repository. +In the case you are creating a new project you can create a new package.json. More information +in the links below. Click on Package.json if you come from that section.

+
+ +
+ + + + + +
+ + +Using npm install or yarn without package.json in your projects will +result in compilation errors. As we mentioned earlier, +Package.json contains essential information about your project. +
+
+
+

If you have package.json, but you don’t have package-lock.json or yarn.lock the use of +command "npm install" or "yarn" may result in a different dependency tree.

+
+
+

If you are trying to import a module and visual code studio is not able to find it, +is usually caused by error adding the package to the project, try to add the module again with yarn or npm, +and restart Visual Studio Code.

+
+
+

Be careful with the semantic versioning inside your package.json of the packages, +or you may find a new update on one of your dependencies breaking your code.

+
+
+ + + + + +
+ + +In the following link +there is a solution to a problematic update to one package. +
+
+
+

A list of common errors of npm can be found in: npm errors

+
+
+
+

== Recomendations

+
+

Use yarn or npm in your project, reach an agreement with your team in order to choose one, this will avoid +undesired situations like forgetting to upload an updated yarn.lock or package-lock.json. +Be sure to have the latest version of your project when possible.

+
+
+ + + + + +
+ + +Pull your project every time it’s updated. Erase your node_modules folder and reinstall all +dependencies. This assures you to be working with the same dependencies your team has. +
+
+
+

AD Center recommends the use of yarn.

+
+ +
+
+

Yarn 2

+
+

Yarn v2 is a very different software from the v1. The following list contains the main new features:

+
+ +
+

Please, read them carefully to decide if your current project is suitable to use Yarn 2 as package manager.

+
+
+ + + + + +
+ + +Some features are still experimental, so please do not use them in production environments. +
+
+
+

More info at https://yarnpkg.com/

+
+
+
+

Global Install

+
+

Installing Yarn 2.x globally is discouraged as Yarn team is moving to a per-project install strategy. We advise you to keep Yarn 1.x (Classic) as your global binary by installing it via the instructions you can find here.

+
+
+

Once you’ve followed the instructions (running yarn --version from your home directory should yield something like 1.22.0), go to the next section to see how to enable Yarn 2 on your project.

+
+
+
+

Per-project install

+
+

Follow these instructions to update your current devon4ng project to Yarn 2:

+
+
+
    +
  1. +

    Follow the global install instructions.

    +
  2. +
  3. +

    Move into your project folder:

    +
    +
    +
    cd ~/path/to/project
    +
    +
    +
  4. +
  5. +

    Run the following command:

    +
    +
    +
    yarn policies set-version berry # below v1.22
    +yarn set version berry          # on v1.22+
    +
    +
    +
  6. +
  7. +

    Since Angular CLI still is not fully supported with the new Yarn architecture as it is not compatible with PnP it is necessary to include the node-modules plugin adding the following line in the .yarnrc.yml file:

    +
    +
    +
    nodeLinker: node-modules
    +
    +
    +
  8. +
  9. +

    Commit the .yarn and .yarnrc.yml changes

    +
  10. +
  11. +

    Run again yarn install.

    +
  12. +
+
+
+ + + + + +
+ + +For more advanced migration topics please refer to https://yarnpkg.com/advanced/migration +
+
+
+
+

Which files should be added to gitignore file?

+
+

If you’re using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/cache
+!.yarn/releases
+!.yarn/plugins
+
+
+
+

If you’re not using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/releases
+!.yarn/plugins
+.pnp.*
+
+
+ +
+
+
+
+

Angular

+
+ +
+

Accessibility

+
+

Multiple studies suggest that around 15-20% of the population are living with a disability of some kind. In comparison, that number is higher than any single browser demographic currently, other than Chrome2. Not considering those users when developing an application means excluding a large number of people from being able to use it comfortable or at all.

+
+
+

Some people are unable to use the mouse, view a screen, see low contrast text, Hear dialogue or music and some people having difficulty to understanding the complex language.This kind of people needed the support like Keyboard support, screen reader support, high contrast text, captions and transcripts and Plain language support. This disability may change the from permanent to the situation.

+
+
+
+

Key Concerns of Accessible Web Applications

+
+
    +
  • +

    Semantic Markup - Allows the application to be understood on a more general level rather than just details of whats being rendered

    +
  • +
  • +

    Keyboard Accessibility - Applications must still be usable when using only a keyboard

    +
  • +
  • +

    Visual Assistance - color contrast, focus of elements and text representations of audio and events

    +
  • +
+
+
+
+

Semantic Markup

+
+

If you’re creating custom element directives, Web Components or HTML in general, use native elements wherever possible to utilize built-in events and properties. Alternatively, use ARIA to communicate semantic meaning.

+
+
+

HTML tags have attributes that providers extra context on what’s being displayed on the browser. For example, the <img> tag’s alt attribute lets the reader know what is being shown using a short description.However, native tags don’t cover all cases. This is where ARIA fits in. ARIA attributes can provide context on what roles specific elements have in the application or on how elements within the document relate to each other.

+
+
+

A modal component can be given the role of dialog or alertdialog to let the browser know that that component is acting as a modal. The modal component template can use the ARIA attributes aria-labelledby and aria-described to describe to readers what the title and purpose of the modal is.

+
+
+
+
@Component({
+    selector: 'ngc2-app',
+    template: `
+      <ngc2-notification-button
+        message="Hello!"
+        label="Greeting"
+        role="button">
+      </ngc2-notification-button>
+      <ngc2-modal
+        [title]="modal.title"
+        [description]="modal.description"
+        [visible]="modal.visible"
+        (close)="modal.close()">
+      </ngc2-modal>
+    `
+})
+export class AppComponent {
+  constructor(private modal: ModalService) { }
+}
+
+
+
+

notification-button.component.ts

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `
+})
+export class ModalComponent {
+  ...
+}
+
+
+
+
+

Keyboard Accessibility

+
+

Keyboard accessibility is the ability of your application to be interacted with using just a keyboard. The more streamlined the site can be used this way, the more keyboard accessible it is. Keyboard accessibility is one of the largest aspects of web accessibility since it targets:

+
+
+
    +
  • +

    those with motor disabilities who can’t use a mouse

    +
  • +
  • +

    users who rely on screen readers and other assistive technology, which require keyboard navigation

    +
  • +
  • +

    those who prefer not to use a mouse

    +
  • +
+
+
+
+

== Focus

+
+

Keyboard interaction is driven by something called focus. In web applications, only one element on a document has focus at a time, and keypress will activate whatever function is bound to that element. +Focus element border can be styled with CSS using the outline property, but it should not be removed. Elements can also be styled using the :focus psuedo-selector.

+
+
+
+

== Tabbing

+
+

The most common way of moving focus along the page is through the tab key. Elements will be traversed in the order they appear in the document outline - so that order must be carefully considered during development. +There is way change the default behavior or tab order. This can be done through the tabindex attribute. The tabindex can be given the values: +* less than zero - to let readers know that an element should be focusable but not keyboard accessible +* 0 - to let readers know that that element should be accessible by keyboard +* greater than zero - to let readers know the order in which the focusable element should be reached using the keyboard. Order is calculated from lowest to highest.

+
+
+
+

== Transitions

+
+

The majority of transitions that happen in an Angular application will not involve a page reload. This means that developers will need to carefully manage what happens to focus in these cases.

+
+
+

For example:

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `,
+})
+export class ModalComponent {
+  constructor(private modal: ModalService, private element: ElementRef) { }
+
+  ngOnInit() {
+    this.modal.visible$.subscribe(visible => {
+      if(visible) {
+        setTimeout(() => {
+          this.element.nativeElement.querySelector('button').focus();
+        }, 0);
+      }
+    })
+  }
+}
+
+
+
+
+

Visual Assistance

+
+

One large category of disability is visual impairment. This includes not just the blind, but those who are color blind or partially sighted, and require some additional consideration.

+
+
+
+

Color Contrast

+
+

When choosing colors for text or elements on a website, the contrast between them needs to be considered. For WCAG 2.0 AA, this means that the contrast ratio for text or visual representations of text needs to be at least 4.5:1. There are tools online to measure the contrast ratio such as this color contrast checker from WebAIM or be checked with using automation tests.

+
+
+
+

Visual Information

+
+

Color can help a user’s understanding of information, but it should never be the only way to convey information to a user. For example, a user with red/green color-blindness may have trouble discerning at a glance if an alert is informing them of success or failure.

+
+
+
+

Audiovisual Media

+
+

Audiovisual elements in the application such as video, sound effects or audio (that is, podcasts) need related textual representations such as transcripts, captions or descriptions. They also should never auto-play and playback controls should be provided to the user.

+
+
+
+

Accessibility with Angular Material

+
+

The a11y package provides a number of tools to improve accessibility. Import

+
+
+
+
import { A11yModule } from '@angular/cdk/a11y';
+
+
+
+
+

ListKeyManager

+
+

ListKeyManager manages the active option in a list of items based on keyboard interaction. Intended to be used with components that correspond to a role="menu" or role="listbox" pattern . Any component that uses a ListKeyManager will generally do three things:

+
+
+
    +
  • +

    Create a @ViewChildren query for the options being managed.

    +
  • +
  • +

    Initialize the ListKeyManager, passing in the options.

    +
  • +
  • +

    Forward keyboard events from the managed component to the ListKeyManager.

    +
  • +
+
+
+

Each option should implement the ListKeyManagerOption interface:

+
+
+
+
interface ListKeyManagerOption {
+  disabled?: boolean;
+  getLabel?(): string;
+}
+
+
+
+
+

== Types of ListKeyManager

+
+

There are two varieties of ListKeyManager, FocusKeyManager and ActiveDescendantKeyManager.

+
+
+
+

FocusKeyManager

+
+

Used when options will directly receive browser focus. Each item managed must implement the FocusableOption interface:

+
+
+
+
interface FocusableOption extends ListKeyManagerOption {
+  focus(): void;
+}
+
+
+
+
+

ActiveDescendantKeyManager

+
+

Used when options will be marked as active via aria-activedescendant. Each item managed must implement the Highlightable interface:

+
+
+
+
interface Highlightable extends ListKeyManagerOption {
+  setActiveStyles(): void;
+  setInactiveStyles(): void;
+}
+
+
+
+

Each item must also have an ID bound to the listbox’s or menu’s aria-activedescendant.

+
+
+
+

FocusTrap

+
+

The cdkTrapFocus directive traps Tab key focus within an element. This is intended to be used to create accessible experience for components like modal dialogs, where focus must be constrained. This directive is declared in A11yModule.

+
+
+

This directive will not prevent focus from moving out of the trapped region due to mouse interaction.

+
+
+

For example:

+
+
+
+
<div class="my-inner-dialog-content" cdkTrapFocus>
+  <!-- Tab and Shift + Tab will not leave this element. -->
+</div>
+
+
+
+
+

Regions

+
+

Regions can be declared explicitly with an initial focus element by using the cdkFocusRegionStart, cdkFocusRegionEnd and cdkFocusInitial DOM attributes. When using the tab key, focus will move through this region and wrap around on either end.

+
+
+

For example:

+
+
+
+
<a mat-list-item routerLink cdkFocusRegionStart>Focus region start</a>
+<a mat-list-item routerLink>Link</a>
+<a mat-list-item routerLink cdkFocusInitial>Initially focused</a>
+<a mat-list-item routerLink cdkFocusRegionEnd>Focus region end</a>
+
+
+
+
+

InteractivityChecker

+
+

InteractivityChecker is used to check the interactivity of an element, capturing disabled, visible, tabbable, and focusable states for accessibility purposes.

+
+
+
+

LiveAnnouncer

+
+

LiveAnnouncer is used to announce messages for screen-reader users using an aria-live region.

+
+
+

For example:

+
+
+
+
@Component({...})
+export class MyComponent {
+
+ constructor(liveAnnouncer: LiveAnnouncer) {
+   liveAnnouncer.announce("Hey Google");
+ }
+}
+
+
+
+
+

API reference for Angular CDK a11y

+ + +
+
+

Angular Elements

+ +
+
+

What are Angular Elements?

+
+

Angular elements are Angular components packaged as custom elements, a web standard for defining new HTML elements in a framework-agnostic way.

+
+
+

Custom elements are a Web Platform feature currently supported by Chrome, Firefox, Opera, and Safari, and available in other browsers through Polyfills. A custom element extends HTML by allowing you to define a tag whose content is created and controlled by JavaScript code. The browser maintains a CustomElementRegistry of defined custom elements (also called Web Components), which maps an instantiable JavaScript class to an HTML tag.

+
+
+
+

Why use Angular Elements?

+
+

Angular Elements allows Angular to work with different frameworks by using input and output elements. This allows Angular to work with many different frameworks if needed. This is an ideal situation if a slow transformation of an application to Angular is needed or some Angular needs to be added in other web applications(For example. ASP.net, JSP etc )

+
+
+
+

Negative points about Elements

+
+

Angular Elements is really powerful but since, the transition between views is going to be handled by another framework or HTML/JavaScript, using Angular Router is not possible. the view transitions have to be handled manually. This fact also eliminates the possibility of just porting an application completely.

+
+
+
+

How to use Angular Elements?

+
+

In a generalized way, a simple Angular component could be transformed to an Angular Element with this steps:

+
+
+
+

Installing Angular Elements

+
+

The first step is going to be install the library using our preferred packet manager:

+
+
+
+

== NPM

+
+
+
npm install @angular/elements
+
+
+
+
+

== YARN

+
+
+
yarn add @angular/elements
+
+
+
+
+

Preparing the components in the modules

+
+

Inside the app.module.ts, in addition to the normal declaration of the components inside declarations, the modules inside imports and the services inside providers, the components need to added in entryComponents. If there are components that have their own module, the same logic is going to be applied for them, only adding in the app.module.ts the components that do not have their own module. Here is an example of this:

+
+
+
+
....
+@NgModule({
+  declarations: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  imports: [
+    CoreModule,  // Module containing Angular Materials
+    FormsModule
+  ],
+  entryComponents: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  providers: [DishShareService]
+})
+....
+
+
+
+

After that is done, the constructor of the module is going to be modified to use injector and bootstrap the application defining the components. This is going to allow the Angular Element to get the injections and to define a component tag that will be used later:

+
+
+
+
....
+})
+export class AppModule {
+  constructor(private injector: Injector) {
+
+  }
+
+  ngDoBootstrap() {
+    const el = createCustomElement(DishFormComponent, {injector: this.injector});
+    customElements.define('dish-form', el);
+
+    const elView = createCustomElement(DishViewComponent, {injector: this.injector});
+    customElements.define('dish-view', elView);
+  }
+}
+....
+
+
+
+
+

A component example

+
+

In order to be able to use a component, @Input() and @Output() variables are used. These variables are going to be the ones that will allow the Angular Element to communicate with the framework/JavaScript:

+
+
+

Component html

+
+
+
+
<mat-card>
+    <mat-grid-list cols="1" rowHeight="100px" rowWidth="50%">
+				<mat-grid-tile colspan="1" rowspan="1">
+					<span>{{ platename }}</span>
+				</mat-grid-tile>
+				<form (ngSubmit)="onSubmit(dishForm)" #dishForm="ngForm">
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<input matInput placeholder="Name" name="name" [(ngModel)]="dish.name">
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<textarea matInput placeholder="Description" name="description" [(ngModel)]="dish.description"></textarea>
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<button mat-raised-button color="primary" type="submit">Submit</button>
+					</mat-grid-tile>
+				</form>
+		</mat-grid-list>
+</mat-card>
+
+
+
+

Component ts

+
+
+
+
@Component({
+  templateUrl: './dish-form.component.html',
+  styleUrls: ['./dish-form.component.scss']
+})
+export class DishFormComponent implements OnInit {
+
+  @Input() platename;
+
+  @Input() platedescription;
+
+  @Output()
+  submitDishEvent = new EventEmitter();
+
+  submitted = false;
+  dish = {name: '', description: ''};
+
+  constructor(public dishShareService: DishShareService) { }
+
+  ngOnInit() {
+    this.dish.name = this.platename;
+    this.dish.description = this.platedescription;
+  }
+
+  onSubmit(dishForm: NgForm): void {
+    this.dishShareService.createDish(dishForm.value.name, dishForm.value.description);
+    this.submitDishEvent.emit('dishSubmited');
+  }
+
+}
+
+
+
+

In this file there are definitions of multiple variables that will be used as input and output. Since the input variables are going to be used directly by html, only lowercase and underscore strategies can be used for them. On the onSubmit(dishForm: NgForm) a service is used to pass this variables to another component. Finally, as a last thing, the selector inside @Component has been removed since a tag that will be used dynamically was already defined in the last step.

+
+
+
+

Solving the error

+
+

In order to be able to use this Angular Element a Polyfills/Browser support related error needs to solved. This error can be solved in two ways:

+
+
+
+

== Changing the target

+
+

One solution is to change the target in tsconfig.json to es2015. This might not be doable for every application since maybe a specific target is required.

+
+
+
+

== Installing Polyfaces

+
+

Another solution is to use AutoPollyfill. In order to do so, the library is going to be installed with a packet manager:

+
+
+

Yarn

+
+
+
+
yarn add @webcomponents/webcomponentsjs
+
+
+
+

Npm

+
+
+
+
npm install @webcomponents/webcomponentsjs
+
+
+
+

After the packet manager has finished, inside the src folder a new file polyfills.ts is found. To solve the error, importing the corresponding adapter (custom-elements-es5-adapter.js) is necessary:

+
+
+
+
....
+/***************************************************************************************************
+ * APPLICATION IMPORTS
+ */
+
+import '@webcomponents/webcomponentsjs/custom-elements-es5-adapter.js';
+....
+
+
+
+

If you want to learn more about polyfills in angular you can do it here

+
+
+
+

Building the Angular Element

+
+

First, before building the Angular Element, every element inside that app component except the module need to be removed. After that, a bash script is created in the root folder,. This script will allow to put every necessary file into a JS.

+
+
+
+
ng build "projectName" --configuration production --output-hashing=none && cat dist/"projectName"/runtime.js dist/"projectName"/polyfills.js dist/"projectName"/scripts.js dist/"projectName"/main.js > ./dist/"projectName"/"nameWantedAngularElement".js
+
+
+
+

After executing the bash script, it will generate inside the path dist/"projectName" (or dist/apps/projectname in a Nx workspace) a JS file named "nameWantedAngularElement".js and a css file.

+
+
+
+ +
+

The library ngx-build-plus allows to add different options when building. In addition, it solves some errors that will occur when trying to use multiple angular elements in an application. In order to use it, yarn or npm can be used:

+
+
+

Yarn

+
+
+
+
yarn add ngx-build-plus
+
+
+
+

Npm

+
+
+
+
npm install ngx-build-plus
+
+
+
+

If you want to add it to a specific sub project in your projects folder, use the --project:

+
+
+
+
.... ngx-build-plus --project "project-name"
+
+
+
+

Using this library and the following command, an isolated Angular Element which won’t have conflict with others can be generated. This Angular Element will not have a polyfill so, the project where we use them will need to include a poliyfill with the Angular Element requirements.

+
+
+
+
ng build "projectName" --output-hashing none --single-bundle true --configuration production --bundle-styles false
+
+
+
+

This command will generate three things:

+
+
+
    +
  1. +

    The main JS bundle

    +
  2. +
  3. +

    The script JS

    +
  4. +
  5. +

    The css

    +
  6. +
+
+
+

These files will be used later instead of the single JS generated in the last step.

+
+
+
+

== == Extra parameters

+
+

Here are some extra useful parameters that ngx-build-plus provides:

+
+
+
    +
  • +

    --keep-polyfills: This parameter is going to allow us to keep the polyfills. This needs to be used with caution, avoiding using multiple different polyfills that could cause an error is necessary.

    +
  • +
  • +

    --extraWebpackConfig webpack.extra.js: This parameter allows us to create a JavaScript file inside our Angular Elements project with the name of different libraries. Using webpack these libraries will not be included in the Angular Element. This is useful to lower the size of our Angular Element by removing libraries shared. Example:

    +
  • +
+
+
+
+
const webpack = require('webpack');
+
+module.exports = {
+    "externals": {
+        "rxjs": "rxjs",
+        "@angular/core": "ng.core",
+        "@angular/common": "ng.common",
+        "@angular/common/http": "ng.common.http",
+        "@angular/platform-browser": "ng.platformBrowser",
+        "@angular/platform-browser-dynamic": "ng.platformBrowserDynamic",
+        "@angular/compiler": "ng.compiler",
+        "@angular/elements": "ng.elements",
+        "@angular/router": "ng.router",
+        "@angular/forms": "ng.forms"
+    }
+}
+
+
+
+
+

==

+
+
+
  If some libraries are excluded from the `Angular Element` you will need to add the bundled UMD files of those libraries manually.
+== ==
+
+
+
+
+

Using the Angular Element

+
+

The Angular Element that got generated in the last step can be used in almost every framework. In this case, the Angular Element is going to be used in html:

+
+
+
Listing 7. Sample index.html version without ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+        <script src="./devon4ngAngularElements.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+
Listing 8. Sample index.html version with ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+         <script src="./polyfills.js"> </script> <!-- Created using --keep-polyfills options -->
+        <script src="./scripts.js"> </script>
+         <script src="./main.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+

In this html, the css generated in the last step is going to be imported inside the <head> and then, the JavaScript element is going to be imported at the end of the body. After that is done, There is two uses of Angular Elements in the html, one directly with use of the @input() variables as parameters commented in the html:

+
+
+
+
....
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+....
+
+
+
+

and one dynamically inside the script:

+
+
+
+
....
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+....
+
+
+
+

This JavaScript is an example of how to create dynamically an Angular Element inserting attributed to fill our @Input() variables and listen to the @Output() that was defined earlier. This is done with:

+
+
+
+
                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+
+
+
+

This allows JavaScript to hook with the @Output() event emitter that was defined. When this event gets called, another component that was defined gets inserted dynamically.

+
+
+
+

Angular Element within another Angular project

+
+

In order to use an Angular Element within another Angular project the following steps need to be followed:

+
+
+
+

Copy bundled script and css to resources

+
+

First copy the generated .js and .css inside assets in the corresponding folder.

+
+
+
+

Add bundled script to angular.json

+
+

Inside angular.json both of the files that were copied in the last step are going to be included. This will be done both, in test and in build. Including it on the test, will allow to perform unitary tests.

+
+
+
+
{
+....
+  "architect": {
+    ....
+    "build": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+    ....
+    "test": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+  }
+}
+
+
+
+

By declaring the files in the angular.json angular will take care of including them in a proper way.

+
+
+
+

==

+
+
+
  If you are using Nx, the configuration file `angular.json` might be named as `workspace.json`, depending on how you had setup the workspace. The structure of the file remains similar though.
+== ==
+
+
+
+
+

Using Angular Element

+
+

There are two ways that Angular Element can be used:

+
+
+
+

== Create component dynamically

+
+

In order to add the component in a dynamic way, first adding a container is necessary:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+</div>
+....
+
+
+
+

With this container created, inside the app.component.ts a method is going to be created. This method is going to find the container, create the dynamic element and append it into the container.

+
+
+

app.component.ts

+
+
+
+
export class AppComponent implements OnInit {
+  ....
+  ngOnInit(): void {
+    this.createComponent();
+  }
+  ....
+  createComponent(): void {
+    const container = document.getElementById('container');
+    const component = document.createElement('dish-form');
+    container.appendChild(component);
+  }
+  ....
+
+
+
+
+

== Using it directly

+
+

In order to use it directly on the templates, in the app.module.ts the CUSTOM_ELEMENTS_SCHEMA needs to be added:

+
+
+
+
....
+import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
+....
+@NgModule({
+  ....
+  schemas: [ CUSTOM_ELEMENTS_SCHEMA ],
+
+
+
+

This is going to allow the use of the Angular Element in the templates directly:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+  <dish-form></dish-form>
+</div>
+
+
+
+

You can find a working example of Angular Elements in our devon4ts-samples repo by referring the samples named angular-elements and angular-elements-test.

+
+ +
+
+

Angular Lazy loading

+
+

When the development of an application starts, it just contains a small set of features so the app usually loads fast. However, as new features are added, the overall application size grows up and its loading speed decreases. It is in this context where Lazy loading finds its place. +Lazy loading is a design pattern that defers initialization of objects until it is needed, so, for example, users that just access to a website’s home page do not need to have other areas loaded. +Angular handles lazy loading through the routing module which redirects to requested pages. Those pages can be loaded at start or on demand.

+
+
+
+

An example with Angular

+
+

To explain how lazy loading is implemented using angular, a basic sample app is going to be developed. This app will consist in a window named "level 1" that contains two buttons that redirects to other windows in a "second level". It is a simple example, but useful to understand the relation between angular modules and lazy loading.

+
+
+
+Levels app structure +
+
Figure 9. Levels app structure.
+
+
+

This graphic shows that modules acts as gates to access components "inside" them.

+
+
+

Because the objective of this guide is related mainly with logic, the html structure and SCSS styles are less relevant, but the complete code can be found as a sample here.

+
+
+
+

Implementation

+
+

First write in a console ng new level-app --routing, to generate a new project called level-app including an app-routing.module.ts file (--routing flag). If you are using Nx, the command would be nx generate @nrwl/angular:app level-app --routing in your Nx workspace.

+
+
+

In the file app.component.html delete all the content except the router-outlet tag.

+
+
+
Listing 9. File app.component.html
+
+
<router-outlet></router-outlet>
+
+
+
+

The next steps consists on creating features modules.

+
+
+
    +
  • +

    run ng generate module first --routing to generate a module named first.

    +
  • +
  • +

    run ng generate module first/second-left --routing to generate a module named second-left under first.

    +
  • +
  • +

    run ng generate module first/second-right --routing to generate a module second-right under first.

    +
  • +
  • +

    run ng generate component first/first to generate a component named first inside the module first.

    +
  • +
  • +

    run ng generate component first/second-left/content to generate a component content inside the module second-left.

    +
  • +
  • +

    run ng generate component first/second-right/content to generate a component content inside the module second-right.

    +
  • +
+
+
+
+

==

+
+
+
  If you are using Nx, you have to specify the project name (level-app) along with the --project flag. For example, command for generating the first module will be `ng generate module first --project=level-app --routing`
+== ==
+
+
+
+

To move between components we have to configure the routes used:

+
+
+

In app-routing.module.ts add a path 'first' to FirstComponent and a redirection from '' to 'first'.

+
+
+
Listing 10. File app-routing.module.ts.
+
+
...
+import { FirstComponent } from './first/first/first.component';
+
+const routes: Routes = [
+  {
+    path: 'first',
+    component: FirstComponent
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

In app.module.ts import the module which includes FirstComponent.

+
+
+
Listing 11. File app.module.ts
+
+
....
+import { FirstModule } from './first/first.module';
+
+@NgModule({
+  ...
+  imports: [
+    ....
+    FirstModule
+  ],
+  ...
+})
+export class AppModule { }
+
+
+
+

In first-routing.module.ts add routes that direct to the content of SecondRightModule and SecondLeftModule. The content of both modules have the same name so, in order to avoid conflicts the name of the components are going to be changed using as ( original-name as new-name).

+
+
+
Listing 12. File first-routing.module.ts
+
+
...
+import { ContentComponent as ContentLeft} from './second-left/content/content.component';
+import { ContentComponent as ContentRight} from './second-right/content/content.component';
+import { FirstComponent } from './first/first.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'first/second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'first/second-right',
+    component: ContentRight
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class FirstRoutingModule { }
+
+
+
+

In first.module.ts import SecondLeftModule and SecondRightModule.

+
+
+
Listing 13. File first.module.ts
+
+
...
+import { SecondLeftModule } from './second-left/second-left.module';
+import { SecondRightModule } from './second-right/second-right.module';
+
+@NgModule({
+  ...
+  imports: [
+    ...
+    SecondLeftModule,
+    SecondRightModule,
+  ]
+})
+export class FirstModule { }
+
+
+
+

Using the current configuration, we have a project that loads all the modules in a eager way. Run ng serve (with --project=level-app in an Nx workspace) to see what happens.

+
+
+

First, during the compilation we can see that just a main file is built.

+
+
+
+Compile eager +
+
Figure 10. Compile eager.
+
+
+

If we go to http://localhost:4200/first and open developer options (F12 on Chrome), it is found that a document named "first" is loaded.

+
+
+
+First level eager +
+
Figure 11. First level eager.
+
+
+

If we click on [Go to right module] a second level module opens, but there is no 'second-right' document.

+
+
+
+Second level right eager +
+
Figure 12. Second level right eager.
+
+
+

But, typing the URL directly will load 'second-right' but no 'first', even if we click on [Go back]

+
+
+
+Second level right eager +
+
Figure 13. Second level right eager direct URL.
+
+
+

Modifying an angular application to load its modules lazily is easy, you have to change the routing configuration of the desired module (for example FirstModule).

+
+
+
Listing 14. File app-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: 'first',
+    loadChildren: () => import('./first/first.module').then(m => m.FirstModule),
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

Notice that instead of loading a component, you dynamically import it in a loadChildren attribute because modules acts as gates to access components "inside" them. Updating the app to load lazily has four consequences:

+
+
+
    +
  1. +

    No component attribute.

    +
  2. +
  3. +

    No import of FirstComponent.

    +
  4. +
  5. +

    FirstModule import has to be removed from the imports array at app.module.ts.

    +
  6. +
  7. +

    Change of context.

    +
  8. +
+
+
+

If we check first-routing.module.ts again, we can see that the path for ContentLeft and ContentRight is set to 'first/second-left' and 'first/second-right' respectively, so writing http://localhost:4200/first/second-left will redirect us to ContentLeft. However, after loading a module with loadChildren setting the path to 'second-left' and 'second-right' is enough because it acquires the context set by AppRoutingModule.

+
+
+
Listing 15. File first-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+

If we go to 'first' then FirstModule is situated in '/first' but also its children ContentLeft and ContentRight, so it is not necessary to write in their path 'first/second-left' and 'first/second-right', because that will situate the components on 'first/first/second-left' and 'first/first/second-right'.

+
+
+
+First level wrong path +
+
Figure 14. First level lazy wrong path.
+
+
+

When we compile an app with lazy loaded modules, files containing them will be generated

+
+
+
+First level lazy compilation +
+
Figure 15. First level lazy compilation.
+
+
+

And if we go to developer tools → network, we can find those modules loaded (if they are needed).

+
+
+
+First level lazy +
+
Figure 16. First level lazy.
+
+
+

To load the component ContentComponent of SecondLeftModule lazily, we have to load SecondLeftModule as a children of FirstModule:

+
+
+
    +
  • +

    Change component to loadChildren and reference SecondLeftModule.

    +
  • +
+
+
+
Listing 16. File first-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    loadChildren: () => import('./second-left/second-left.module').then(m => m.SecondLeftModule),
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+
    +
  • +

    Remove SecondLeftModule at first.component.ts

    +
  • +
  • +

    Route the components inside SecondLeftModule. Without this step nothing would be displayed.

    +
  • +
+
+
+
Listing 17. File second-left-routing.module.ts.
+
+
...
+import { ContentComponent } from './content/content.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: ContentComponent
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class SecondLeftRoutingModule { }
+
+
+
+
    +
  • +

    run ng serve to generate files containing the lazy modules.

    +
  • +
+
+
+
+Second level lazy +
+
Figure 17. Second level lazy loading compilation.
+
+
+

Clicking on [Go to left module] triggers the load of SecondLeftModule.

+
+
+
+Second level lazy network +
+
Figure 18. Second level lazy loading network.
+
+
+
+

Conclusion

+
+

Lazy loading is a pattern useful when new features are added, these features are usually identified as modules which can be loaded only if needed as shown in this document, reducing the time spent loading an application.

+
+ +
+
+

Angular Library

+
+

Angular CLI provides us with methods that allow the creation of a library. After that, using a packet manager (either npm or yarn) the library can be build and packed which will allow later to install/publish it.

+
+
+
+

Whats a library?

+
+

From wikipedia: a library is a collection of non-volatile resources used by computer programs, often for software development. These may include configuration data, documentation, help data, message templates, pre-written code and subroutines, classes, values or type specifications.

+
+
+
+

How to build a library

+
+

In this section, a library is going to be build step by step. Please note, we will be explaining the steps using both Angular CLI and Nx CLI. You are free to choose either one for your development.

+
+
+
+

1. Creating an empty application

+
+

First, using Angular CLI we are going to generate a empty application which will be later filled with the generated library. In order to do so, Angular CLI allows us to add to ng new "application-name" an option (--create-application). This option is going to tell Angular CLI not to create the initial app project. This is convenient since a library is going to be generated in later steps. Using this command ng new "application-name" --create-application=false an empty project with the name wanted is created.

+
+
+
+
ng new "application-name" --create-application=false
+
+
+
+

This step is much more easier and straight-forward when using Nx. Nx allows us to work in a monorepo workspace, where you can develop a project as an application, or a library, or a tool. You can follow this guide to get started with Nx. +The command for generating a library in Nx is nx generate @nrwl/angular:library library-name --publishable --importPath=library-name. This will create an empty angular application which we can modify and publish as a library.

+
+
+
+

2. Generating a library

+
+

After generating an empty application, a library is going to be generated. Inside the folder of the project, the Angular CLI command ng generate library "library-name" is going to generate the library as a project (projects/"library-name"). As an addition, the option --prefix="library-prefix-wanted" allows us to switch the default prefix that Angular generated with (lib). Using the option to change the prefix the command will look like this ng generate library "library-name" --prefix="library-prefix-wanted".

+
+
+
+
ng generate library "library-name" --prefix="library-prefix-wanted"
+
+
+
+

If you are using Nx, this step is not needed as it is already covered in step 1. In this case, the library project will be generated in the libs folder of a Nx workspace.

+
+
+
+

3. Modifying our library

+
+

In the last step we generated a library. This automatically generates a module,service and a component inside projects/"library-name" that we can modify adding new methods, components etc that we want to use in other projects. We can generate other elements, using the usual Angular CLI generate commands adding the option --project="library-name", this will allow to generate elements within our project . An example of this is: ng generate service "name" --project="library-name".

+
+
+
+
ng generate "element" "name" --project="library-name"
+
+
+
+

You can use the same command as above in a Nx workspace.

+
+
+
+

4. Exporting the generated things

+
+

Inside the library (projects/"library-name) there’s a public_api.ts which is the file that exports the elements inside the library. (The file is named as index.ts in an Nx workspace). In case we generated other things, this file needs to be modified adding the extra exports with the generated elements. In addition, changing the library version is possible in the file package.json.

+
+
+
+

5. Building our library

+
+

Once we added the necessary exports, in order to use the library in other applications, we need to build the library. The command ng build "library-name" is going to build the library, generating the necessary files in "project-name"/dist/"library-name".

+
+
+
+
ng build "library-name"
+
+
+
+

You can use the same command in Nx as well. Only the path for the generated files will be slightly different: "project-name"/dist/libs/"library-name"

+
+
+
+

6. Packing the library

+
+

In this step we are going to pack the build library. In order to do so, we need to go inside dist/"library-name" (or dist/libs/"library-name") and then run either npm pack or yarn pack to generate a "library-name-version.tgz" file.

+
+
+
Listing 18. Packing using npm
+
+
npm pack
+
+
+
+
Listing 19. Packing using yarn
+
+
yarn pack
+
+
+
+
+

7. Publishing to npm repository (optional)

+
+
    +
  • +

    Add a README.md and LICENSE file. The text inside README.md will be used in you npm package web page as documentation.

    +
  • +
  • +

    run npm adduser if you do not have a npm account to create it, otherwise run npm login and introduce your credentials.

    +
  • +
  • +

    run npm publish inside dist/"library-name" folder.

    +
  • +
  • +

    Check that the library is published: https://npmjs.com/package/library-name

    +
  • +
+
+
+
+

8. Installing our library in other projects

+
+

In this step we are going to install/add the library on other projects.

+
+
+
+

== npm

+
+

In order to add the library in other applications, there are two ways:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command npm install "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run npm install "library-name" to install it from npm repository.

    +
  • +
+
+
+
+

== yarn

+
+

To add the package using yarn:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command yarn add "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run yarn add "library-name" to install it from npm repository.

    +
  • +
+
+
+
+

9. Using the library

+
+

Finally, once the library was installed with either packet manager, you can start using the elements from inside like they would be used in a normal element inside the application. Example app.component.ts:

+
+
+
+
import { Component, OnInit } from '@angular/core';
+import { MyLibraryService } from 'my-library';
+
+@Component({
+  selector: 'app-root',
+  templateUrl: './app.component.html',
+  styleUrls: ['./app.component.scss']
+})
+export class AppComponent implements OnInit {
+
+  toUpper: string;
+
+  constructor(private myLibraryService: MyLibraryService) {}
+  title = 'devon4ng library test';
+  ngOnInit(): void {
+    this.toUpper = this.myLibraryService.firstLetterToUpper('test');
+  }
+}
+
+
+
+

Example app.component.html:

+
+
+
+
<!--The content below is only a placeholder and can be replaced.-->
+<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+  <img width="300" alt="Angular Logo" src="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNTAgMjUwIj4KICAgIDxwYXRoIGZpbGw9IiNERDAwMzEiIGQ9Ik0xMjUgMzBMMzEuOSA2My4ybDE0LjIgMTIzLjFMMTI1IDIzMGw3OC45LTQzLjcgMTQuMi0xMjMuMXoiIC8+CiAgICA8cGF0aCBmaWxsPSIjQzMwMDJGIiBkPSJNMTI1IDMwdjIyLjItLjFWMjMwbDc4LjktNDMuNyAxNC4yLTEyMy4xTDEyNSAzMHoiIC8+CiAgICA8cGF0aCAgZmlsbD0iI0ZGRkZGRiIgZD0iTTEyNSA1Mi4xTDY2LjggMTgyLjZoMjEuN2wxMS43LTI5LjJoNDkuNGwxMS43IDI5LjJIMTgzTDEyNSA1Mi4xem0xNyA4My4zaC0zNGwxNy00MC45IDE3IDQwLjl6IiAvPgogIDwvc3ZnPg== ">
+</div>
+<h2>Here is my library service being used: {{toUpper}}</h2>
+<lib-my-library></lib-my-library>
+
+
+
+

Example app.module.ts:

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+
+import { MyLibraryModule } from 'my-library';
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    AppRoutingModule,
+    MyLibraryModule
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+

The result from using the library:

+
+
+
+result +
+
+
+
+

devon4ng libraries

+
+

In devonfw/devon4ng-library you can find some useful libraries:

+
+
+
    +
  • +

    Authorization module: This devon4ng Angular module adds rights-based authorization to your Angular app.

    +
  • +
  • +

    Cache module: Use this devon4ng Angular module when you want to cache requests to server. You may configure it to store in cache only the requests you need and to set the duration you want.

    +
  • +
+
+ +
+
+

Angular Material Theming

+
+

Angular Material library offers UI components for developers, those components follows Google Material design baselines but characteristics like colors can be modified in order to adapt them to the needs of the client: corporative colors, corporative identity, dark themes, …​

+
+
+
+

Theming basics

+
+

In Angular Material, a theme is created mixing multiple colors. Colors and its light and dark variants conform a palette. In general, a theme consists of the following palettes:

+
+
+
    +
  • +

    primary: Most used across screens and components.

    +
  • +
  • +

    accent: Floating action button and interactive elements.

    +
  • +
  • +

    warn: Error state.

    +
  • +
  • +

    foreground: Text and icons.

    +
  • +
  • +

    background: Element backgrounds.

    +
  • +
+
+
+
+Theme palette +
+
Figure 19. Palettes and variants.
+
+
+

In angular material, a palette is represented as a SCSS map.

+
+
+
+SCSS map +
+
Figure 20. SCSS map and palettes.
+
+
+ + + + + +
+ + +Some components can be forced to use primary, accent or warn palettes using the attribute color, for example: <mat-toolbar color="primary">. +
+
+
+
+

Pre-built themes

+
+

Available pre-built themes:

+
+
+
    +
  • +

    deeppurple-amber.css

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 21. deeppurple-amber theme.
+
+
+
    +
  • +

    indigo-pink.css

    +
  • +
+
+
+
+indigo-pink theme +
+
Figure 22. indigo-pink theme.
+
+
+
    +
  • +

    pink-bluegrey.css

    +
  • +
+
+
+
+` pink-bluegrey theme` +
+
Figure 23. ink-bluegrey theme.
+
+
+
    +
  • +

    purple-green.css

    +
  • +
+
+
+
+purple-green theme +
+
Figure 24. purple-green theme.
+
+
+

The pre-built themes can be added using @import.

+
+
+
+
@import '@angular/material/prebuilt-themes/deeppurple-amber.css';
+
+
+
+
+

Custom themes

+
+

Sometimes pre-built themes do not meet the needs of a project, because color schemas are too specific or do not incorporate branding colors, in those situations custom themes can be built to offer a better solution to the client.

+
+
+

For this topic, we are going to use a basic layout project that can be found in devon4ts-samples repository.

+
+
+
+

Basics

+
+

Before starting writing custom themes, there are some necessary things that have to be mentioned:

+
+
+
    +
  • +

    Add a default theme: The project mentioned before has just one global SCSS style sheet styles.scss that includes indigo-pink.scss which will be the default theme.

    +
  • +
  • +

    Add @import '~@angular/material/theming'; at the beginning of the every style sheet to be able to use angular material pre-built color palettes and functions.

    +
  • +
  • +

    Add @include mat-core(); once per project, so if you are writing multiple themes in multiple files you could import those files from a 'central' one (for example styles.scss). This includes all common styles that are used by multiple components.

    +
  • +
+
+
+
+Theme files structure +
+
Figure 25. Theme files structure.
+
+
+
+

Basic custom theme

+
+

To create a new custom theme, the .scss file containing it has to have imported the angular _theming.scss file (angular/material/theming) file and mat-core included. _theming.scss includes multiple color palettes and some functions that we are going to see below. The file for this basic theme is going to be named styles-custom-dark.scss.

+
+
+

First, declare new variables for primary, accent and warn palettes. Those variables are going to store the result of the function mat-palette.

+
+
+

mat-palette accepts four arguments: base color palette, main, lighter and darker variants (See Palettes and variants.) and returns a new palette including some additional map values: default, lighter and darker ([id_scss_map]). Only the first argument is mandatory.

+
+
+
Listing 20. File styles-custom-dark.scss.
+
+
$custom-dark-theme-primary: mat-palette($mat-pink);
+$custom-dark-theme-accent: mat-palette($mat-blue);
+$custom-dark-theme-warn: mat-palette($mat-red);
+);
+
+
+
+

In this example we are using colors available in _theming.scss: mat-pink, mat-blue, mat-red. If you want to use a custom color you need to define a new map, for instance:

+
+
+
Listing 21. File styles-custom-dark.scss custom pink.
+
+
$my-pink: (
+    50 : #fcf3f3,
+    100 : #f9e0e0,
+    200 : #f5cccc,
+    300 : #f0b8b8,
+    500 : #ea9999,
+    900 : #db6b6b,
+    A100 : #ffffff,
+    A200 : #ffffff,
+    A400 : #ffeaea,
+    A700 : #ffd0d0,
+    contrast: (
+        50 : #000000,
+        100 : #000000,
+        200 : #000000,
+        300 : #000000,
+        900 : #000000,
+        A100 : #000000,
+        A200 : #000000,
+        A400 : #000000,
+        A700 : #000000,
+    )
+);
+
+$custom-dark-theme-primary: mat-palette($my-pink);
+...
+
+
+
+ + + + + +
+ + +Some pages allows to create these palettes easily, for instance: http://mcg.mbitson.com +
+
+
+

Until now, we just have defined primary, accent and warn palettes but what about foreground and background? Angular material has two functions to change both:

+
+
+
    +
  • +

    mat-light-theme: Receives as arguments primary, accent and warn palettes and return a theme whose foreground is basically black (texts, icons, …​), the background is white and the other palettes are the received ones.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 26. Custom light theme.
+
+
+
    +
  • +

    mat-dark-theme: Similar to mat-light-theme but returns a theme whose foreground is basically white and background black.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 27. Custom dark theme.
+
+
+

For this example we are going to use mat-dark-theme and save its result in $custom-dark-theme.

+
+
+
Listing 22. File styles-custom-dark.scss updated with mat-dark-theme.
+
+
...
+
+$custom-dark-theme: mat-dark-theme(
+  $custom-dark-theme-primary,
+  $custom-dark-theme-accent,
+  $custom-dark-theme-warn
+);
+
+
+
+

To apply the saved theme, we have to go to styles.scss and import our styles-custom-dark.scss and include a function called angular-material-theme using the theme variable as argument.

+
+
+
Listing 23. File styles.scss.
+
+
...
+@import 'styles-custom-dark.scss';
+@include angular-material-theme($custom-dark-theme);
+
+
+
+

If we have multiple themes it is necessary to add the include statement inside a css class and use it in src/index.html → app-root component.

+
+
+
Listing 24. File styles.scss updated with custom-dark-theme class.
+
+
...
+@import 'styles-custom-dark.scss';
+
+.custom-dark-theme {
+  @include angular-material-theme($custom-dark-theme);
+}
+
+
+
+
Listing 25. File src/index.html.
+
+
...
+<app-root class="custom-dark-theme"></app-root>
+...
+
+
+
+

This will apply $custom-dark-theme theme for the entire application.

+
+
+
+

Full custom theme

+
+

Sometimes it is needed to custom different elements from background and foreground, in those situations we have to create a new function similar to mat-light-theme and mat-dark-theme. Let’s focus con mat-light-theme:

+
+
+
Listing 26. Source code of mat-light-theme
+
+
@function mat-light-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $mat-light-theme-foreground,
+    background: $mat-light-theme-background,
+  );
+}
+
+
+
+

As we can see, mat-light-theme takes three arguments and returns a map including them as primary, accent and warn color; but there are three more keys in that map: is-dark, foreground and background.

+
+
+
    +
  • +

    is-dark: Boolean true if it is a dark theme, false otherwise.

    +
  • +
  • +

    background: Map that stores the color for multiple background elements.

    +
  • +
  • +

    foreground: Map that stores the color for multiple foreground elements.

    +
  • +
+
+
+

To show which elements can be colored lets create a new theme in a file styles-custom-cap.scss:

+
+
+
Listing 27. File styles-custom-cap.scss: Background and foreground variables.
+
+
@import '~@angular/material/theming';
+
+// custom background and foreground palettes
+$my-cap-theme-background: (
+  status-bar: #0070ad,
+  app-bar: map_get($mat-blue, 900),
+  background: #12abdb,
+  hover: rgba(white, 0.04),
+  card: map_get($mat-red, 800),
+  dialog: map_get($mat-grey, 800),
+  disabled-button: $white-12-opacity,
+  raised-button: map-get($mat-grey, 800),
+  focused-button: $white-6-opacity,
+  selected-button: map_get($mat-grey, 900),
+  selected-disabled-button: map_get($mat-grey, 800),
+  disabled-button-toggle: black,
+  unselected-chip: map_get($mat-grey, 700),
+  disabled-list-option: black,
+);
+
+$my-cap-theme-foreground: (
+  base: yellow,
+  divider: $white-12-opacity,
+  dividers: $white-12-opacity,
+  disabled: rgba(white, 0.3),
+  disabled-button: rgba(white, 0.3),
+  disabled-text: rgba(white, 0.3),
+  hint-text: rgba(white, 0.3),
+  secondary-text: rgba(white, 0.7),
+  icon: white,
+  icons: white,
+  text: white,
+  slider-min: white,
+  slider-off: rgba(white, 0.3),
+  slider-off-active: rgba(white, 0.3),
+);
+
+
+
+

Function which uses the variables defined before to create a new theme:

+
+
+
Listing 28. File styles-custom-cap.scss: Creating a new theme function.
+
+
// instead of creating a theme with mat-light-theme or mat-dark-theme,
+// we will create our own theme-creating function that lets us apply our own foreground and background palettes.
+@function create-my-cap-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $my-cap-theme-foreground,
+    background: $my-cap-theme-background
+  );
+}
+
+
+
+

Calling the new function and storing its value in $custom-cap-theme.

+
+
+
Listing 29. File styles-custom-cap.scss: Storing the new theme.
+
+
// We use create-my-cap-theme instead of mat-light-theme or mat-dark-theme
+$custom-cap-theme-primary: mat-palette($mat-green);
+$custom-cap-theme-accent: mat-palette($mat-blue);
+$custom-cap-theme-warn: mat-palette($mat-red);
+
+$custom-cap-theme: create-my-cap-theme(
+  $custom-cap-theme-primary,
+  $custom-cap-theme-accent,
+  $custom-cap-theme-warn
+);
+
+
+
+

After defining our new theme, we can import it from styles.scss.

+
+
+
Listing 30. File styles.scss updated with custom-cap-theme class.
+
+
...
+@import 'styles-custom-cap.scss';
+.custom-cap-theme {
+  @include angular-material-theme($custom-cap-theme);
+}
+
+
+
+
+

Multiple themes and overlay-based components

+
+

Certain components (e.g. menu, select, dialog, etc.) that are inside of a global overlay container,require an additional step to be affected by the theme’s css class selector.

+
+
+
Listing 31. File app.module.ts
+
+
import {OverlayContainer} from '@angular/cdk/overlay';
+
+@NgModule({
+  // ...
+})
+export class AppModule {
+  constructor(overlayContainer: OverlayContainer) {
+    overlayContainer.getContainerElement().classList.add('custom-cap-theme');
+  }
+}
+
+
+
+ +
+

Angular Progressive Web App

+
+

Progressive web applications (PWA) are web application that offer better user experience than the traditional ones. In general, they solve problems related with reliability and speed:

+
+
+
    +
  • +

    Reliability: PWA are stable. In this context stability means than even with slow connections or even with no network at all, the application still works. To achieve this, some basic resources like styles, fonts, requests, …​ are stored; due to this caching, it is not possible to assure that the content is always up-to-date.

    +
  • +
  • +

    Speed: When an users opens an application, he or she will expect it to load almost immediately (almost 53% of users abandon sites that take longer that 3 seconds, source: https://developers.google.com/web/progressive-web-apps/#fast).

    +
  • +
+
+
+

PWA uses a script called service worker, which runs in background and essentially act as proxy between web app and network, intercepting requests and acting depending on the network conditions.

+
+
+
+

Assumptions

+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
+
+
+
+

Sample Application

+
+
+My Thai Star recommendation +
+
Figure 28. Basic angular PWA.
+
+
+

To explain how to build PWA using angular, a basic application is going to be built. This app will be able to ask for resources and save in the cache in order to work even offline.

+
+
+
+

Step 1: Create a new project

+
+

This step can be completed with one simple command using the Angular CLI: ng new <name>, where <name> is the name for the app. In this case, the app is going to be named basic-ng-pwa. If you are using Nx CLI, you can use the command nx generate @nrwl/angular:app <name> in your Nx workspace. You can follow this guide if you want to get started with Nx workspace.

+
+
+
+

Step 2: Create a service

+
+

Web applications usually uses external resources, making necessary the addition of services which can get those resources. This application gets a dish from My Thai Star’s back-end and shows it. To do so, a new service is going to be created.

+
+
+
    +
  • +

    go to project folder: cd basic-ng-pwa. If using Nx, go to the root folder of the workspace.

    +
  • +
  • +

    run ng generate service data. For Nx CLI, specify the project name with --project flag. So the command becomes ng generate service data --project=basic-ng-pwa

    +
  • +
  • +

    Modify data.service.ts, environment.ts, environment.prod.ts

    +
  • +
+
+
+

To retrieve data with this service, you have to import the module HttpClient and add it to the service’s constructor. Once added, use it to create a function getDishes() that sends HTTP request to My Thai Start’s back-end. The URL of the back-end can be stored as an environment variable MY_THAI_STAR_DISH.

+
+
+

data.service.ts

+
+
+
+
  ...
+  import { HttpClient } from '@angular/common/http';
+  import { MY_THAI_STAR_DISH } from '../environments/environment';
+  ...
+
+  export class DataService {
+    constructor(private http: HttpClient) {}
+
+    /* Get data from Back-end */
+    getDishes() {
+      return this.http.get(MY_THAI_STAR_DISH);
+    }
+    ...
+  }
+
+
+
+

environments.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+

environments.prod.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+
+

Step 3: Use the service

+
+

The component AppComponent implements the interface OnInit and inside its method ngOnInit() the subscription to the services is done. When a dish arrives, it is saved and shown (app.component.html).

+
+
+
+
  ...
+  import { DataService } from './data.service';
+  export class AppComponent implements OnInit {
+  dish: { name: string; description: string } = { name: '', description: ''};
+
+  ...
+  ngOnInit() {
+    this.data
+      .getDishes()
+      .subscribe(
+        (dishToday: { dish: { name: string; description: string } }) => {
+          this.dish = {
+            name: dishToday.dish.name,
+            description: dishToday.dish.description,
+          };
+        },
+      );
+  }
+}
+
+
+
+
+

Step 4: Structures, styles and updates

+
+

This step shows code interesting inside the sample app. The complete content can be found in devon4ts-samples.

+
+
+

index.html

+
+
+

To use the Montserrat font add the following link inside the head tag of the app’s index.html file.

+
+
+
+
  <link href="https://fonts.googleapis.com/css?family=Montserrat" rel="stylesheet">
+
+
+
+

styles.scss

+
+
+
+
  body {
+    ...
+    font-family: 'Montserrat', sans-serif;
+  }
+
+
+
+

app.component.ts

+
+
+

This file is also used to reload the app if there are any changes.

+
+
+
    +
  • +

    SwUpdate: This object comes inside the @angular/pwa package and it is used to detect changes and reload the page if needed.

    +
  • +
+
+
+
+
  ...
+  import { SwUpdate } from '@angular/service-worker';
+
+  export class AppComponent implements OnInit {
+
+  ...
+    constructor(updates: SwUpdate, private data: DataService) {
+      updates.available.subscribe((event) => {
+        updates.activateUpdate().then(() => document.location.reload());
+      });
+    }
+    ...
+  }
+
+
+
+
+

Step 5: Make it Progressive.

+
+

Install Angular PWA package with ng add @angular/pwa --project=<name>. As before substitute name with basic-ng-pwa.

+
+
+

The above command completes the following actions:

+
+
+
    +
  1. +

    Adds the @angular/service-worker package to your project.

    +
  2. +
  3. +

    Enables service worker build support in the CLI.

    +
  4. +
  5. +

    Imports and registers the service worker in the app module.

    +
  6. +
  7. +

    Updates the index.html file:

    +
    +
      +
    • +

      Includes a link to add the manifest.json file.

      +
    • +
    • +

      Adds meta tags for theme-color.

      +
    • +
    • +

      Installs icon files to support the installed Progressive Web App (PWA).

      +
    • +
    • +

      Creates the service worker configuration file called ngsw-config.json, which specifies the caching behaviors and other settings.

      +
    • +
    +
    +
  8. +
+
+
+
+

== manifest.json

+
+

manifest.json is a file that allows to control how the app is displayed in places where native apps are displayed.

+
+
+

Fields

+
+
+

name: Name of the web application.

+
+
+

short_name: Short version of name.

+
+
+

theme_color: Default theme color for an application context.

+
+
+

background_color: Expected background color of the web application.

+
+
+

display: Preferred display mode.

+
+
+

scope: Navigation scope of this web application’s application context.

+
+
+

start_url: URL loaded when the user launches the web application.

+
+
+

icons: Array of icons that serve as representations of the web app.

+
+
+

Additional information can be found here.

+
+
+
+

== ngsw-config.json

+
+

ngsw-config.json specifies which files and data URLs have to be cached and updated by the Angular service worker.

+
+
+

Fields

+
+
+
    +
  • +

    index: File that serves as index page to satisfy navigation requests.

    +
  • +
  • +

    assetGroups: Resources that are part of the app version that update along with the app.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      installMode: How the resources are cached (pre-fetch or lazy).

      +
    • +
    • +

      updateMode: Caching behavior when a new version of the app is found (pre-fetch or lazy).

      +
    • +
    • +

      resources: Resources to cache. There are three groups.

      +
      +
        +
      • +

        files: Lists patterns that match files in the distribution directory.

        +
      • +
      • +

        urls: URL patterns matched at runtime.

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    dataGroups: UsefulIdentifies the group. for API requests.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      urls: URL patterns matched at runtime.

      +
    • +
    • +

      version: Indicates that the resources being cached have been updated in a backwards-incompatible way.

      +
    • +
    • +

      cacheConfig: Policy by which matching requests will be cached

      +
      +
        +
      • +

        maxSize: The maximum number of entries, or responses, in the cache.

        +
      • +
      • +

        maxAge: How long responses are allowed to remain in the cache.

        +
        +
          +
        • +

          d: days. (5d = 5 days).

          +
        • +
        • +

          h: hours

          +
        • +
        • +

          m: minutes

          +
        • +
        • +

          s: seconds. (5m20s = 5 minutes and 20 seconds).

          +
        • +
        • +

          u: milliseconds

          +
        • +
        +
        +
      • +
      • +

        timeout: How long the Angular service worker will wait for the network to respond before using a cached response. Same dataformat as maxAge.

        +
      • +
      • +

        strategy: Caching strategies (performance or freshness).

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    navigationUrls: List of URLs that will be redirected to the index file.

    +
  • +
+
+
+

Additional information can be found here.

+
+
+
+

Step 6: Configure the app

+
+

manifest.json

+
+
+

Default configuration.

+
+
+

 

+
+
+

ngsw-config.json

+
+
+

At assetGroups → resources → urls: In this field the google fonts API is added in order to use Montserrat font even without network.

+
+
+
+
  "urls": [
+          "https://fonts.googleapis.com/**"
+        ]
+
+
+
+

At the root of the json: A data group to cache API calls.

+
+
+
+
  {
+    ...
+    "dataGroups": [{
+      "name": "mythaistar-dishes",
+      "urls": [
+        "https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1"
+      ],
+      "cacheConfig": {
+        "maxSize": 100,
+        "maxAge": "1h",
+        "timeout": "10s",
+        "strategy": "freshness"
+      }
+    }]
+  }
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ng build --prod to build the app using production settings.(nx build <name> --prod in Nx CLI)

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here.

+
+
+

Go to the dist/basic-ng-pwa/ folder running cd dist/basic-ng-pwa. In an Nx workspace, the path will be dist/apps/basic-ng-pwa

+
+
+

http-server -o to serve your built app.

+
+
+
+Http server running +
+
Figure 29. Http server running on localhost:8081.
+
+
+

 

+
+
+

In another console instance run ng serve (or nx serve basic-ng-pwa for Nx) to open the common app (not built).

+
+
+
+.Angular server running +
+
Figure 30. Angular server running on localhost:4200.
+
+
+

 

+
+
+

The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common (right) one does not.

+
+
+
+Application comparison +
+
Figure 31. Application service worker comparison.
+
+
+

 

+
+
+

If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 32. Offline application.
+
+
+

 

+
+
+

Finally, browser extensions like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 33. Lighthouse report.
+
+ +
+
+

APP_INITIALIZER

+ +
+
+

What is the APP_INITIALIZER pattern

+
+

The APP_INITIALIZER pattern allows an application to choose which configuration is going to be used in the start of the application, this is useful because it allows to setup different configurations, for example, for docker or a remote configuration. This provides benefits since this is done on runtime, so there’s no need to recompile the whole application to switch from configuration.

+
+
+
+

What is APP_INITIALIZER

+
+

APP_INITIALIZER allows to provide a service in the initialization of the application in a @NgModule. It also allows to use a factory, allowing to create a singleton in the same service. An example can be found in MyThaiStar /core/config/config.module.ts:

+
+
+
+

==

+
+

The provider expects the return of a Promise, if it is using Observables, a change with the method toPromise() will allow a switch from Observable to Promise +== ==

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

This is going to allow the creation of a ConfigService where, using a singleton, the service is going to load an external config depending on a route. This dependence with a route, allows to setup different configuration for docker etc. This is seen in the ConfigService of MyThaiStar:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  //and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it is mentioned earlier, you can see the use of a factory to create a singleton at the start. After that, loadExternalConfig is going to look for a Boolean inside the corresponding environment file inside the path src/environments/, this Boolean loadExternalConfig is going to easily allow to switch to a external config. If it is true, it generates a promise that overwrites the parameters of the local config, allowing to load the external config. Finally, the last method getValues() is going to allow to return the file config with the values (overwritten or not). The local config file from MyThaiStar can be seen here:

+
+
+
+
export enum BackendType {
+  IN_MEMORY,
+  REST,
+  GRAPHQL,
+}
+
+interface Role {
+  name: string;
+  permission: number;
+}
+
+interface Lang {
+  label: string;
+  value: string;
+}
+
+export interface Config {
+  version: string;
+  backendType: BackendType;
+  restPathRoot: string;
+  restServiceRoot: string;
+  pageSizes: number[];
+  pageSizesDialog: number[];
+  roles: Role[];
+  langs: Lang[];
+}
+
+export const config: Config = {
+  version: 'dev',
+  backendType: BackendType.REST,
+  restPathRoot: 'http://localhost:8081/mythaistar/',
+  restServiceRoot: 'http://localhost:8081/mythaistar/services/rest/',
+  pageSizes: [8, 16, 24],
+  pageSizesDialog: [4, 8, 12],
+  roles: [
+    { name: 'CUSTOMER', permission: 0 },
+    { name: 'WAITER', permission: 1 },
+  ],
+  langs: [
+    { label: 'English', value: 'en' },
+    { label: 'Deutsch', value: 'de' },
+    { label: 'Español', value: 'es' },
+    { label: 'Català', value: 'ca' },
+    { label: 'Français', value: 'fr' },
+    { label: 'Nederlands', value: 'nl' },
+    { label: 'हिन्दी', value: 'hi' },
+    { label: 'Polski', value: 'pl' },
+    { label: 'Русский', value: 'ru' },
+    { label: 'български', value: 'bg' },
+  ],
+};
+
+
+
+

Finally, inside a environment file src/environments/environment.ts the use of the Boolean loadExternalConfig is seen:

+
+
+
+
// The file contents for the current environment will overwrite these during build.
+// The build system defaults to the dev environment which uses `environment.ts`, but if you do
+// `ng build --env=prod` then `environment.prod.ts` will be used instead.
+// The list of which env maps to which file can be found in `.angular-cli.json`.
+
+export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+
+

Creating a APP_INITIALIZER configuration

+
+

This section is going to be used to create a new APP_INITIALIZER basic example. For this, a basic app with angular is going to be generated using ng new "appname" substituting appname for the name of the app opted. +If you are using Nx, the command would be nx generate @nrwl/angular:app "appname" in your Nx workspace. Click here to get started with using Nx.

+
+
+
+

Setting up the config files

+ +
+
+

Docker external configuration (Optional)

+
+

This section is only done if there is a docker configuration in the app you are setting up this type of configuration.

+
+
+

1.- Create in the root folder /docker-external-config.json. This external config is going to be used when the application is loaded with docker (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load with docker:

+
+
+
+
{
+    "version": "docker-version"
+}
+
+
+
+

2.- In the root, in the file /Dockerfile angular is going to copy the docker-external-config.json that was created before into the Nginx html route:

+
+
+
+
....
+COPY docker-external-config.json /usr/share/nginx/html/docker-external-config.json
+....
+
+
+
+
+

External json configuration

+
+

1.- Create a json file in the route /src/external-config.json. This external config is going to be used when the application is loaded with the start script (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load:

+
+
+
+
{
+    "version": "external-config"
+}
+
+
+
+

2.- The file named /angular.json (/workspace.json if using Nx) located at the root is going to be modified to add the file external-config.json that was just created to both "assets" inside Build and Test:

+
+
+
+
	....
+	"build": {
+          ....
+            "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	        ....
+        "test": {
+	  ....
+	   "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	  ....
+
+
+
+
+

Setting up the proxies

+
+

This step is going to setup two proxies. This is going to allow to load the config desired by the context, in case that it is using docker to load the app or in case it loads the app with angular. Loading different files is made possible by the fact that the ConfigService method loadExternalConfig() looks for the path /config.

+
+
+
+

Docker (Optional)

+
+

1.- This step is going to be for docker. Add docker-external-config.json to Nginx configuration (/nginx.conf) that is in the root of the application:

+
+
+
+
....
+  location  ~ ^/config {
+        alias /usr/share/nginx/html/docker-external-config.json;
+  }
+....
+
+
+
+
+

External Configuration

+
+

1.- Now the file /proxy.conf.json, needs to be created/modified this file can be found in the root of the application. In this file you can add the route of the external configuration in target and the name of the file in ^/config::

+
+
+
+
....
+  "/config": {
+    "target": "http://localhost:4200",
+    "secure": false,
+    "pathRewrite": {
+      "^/config": "/external-config.json"
+    }
+  }
+....
+
+
+
+

2.- The file package.json found in the root of the application is gonna use the start script to load the proxy config that was just created :

+
+
+
+
  "scripts": {
+....
+    "start": "ng serve --proxy-config proxy.conf.json -o",
+....
+
+
+
+

If using Nx, you need to run the command manually:

+
+
+

nx run angular-app-initializer:serve:development --proxyConfig=proxy.conf.json --o

+
+
+
+

Adding the loadExternalConfig Boolean to the environments

+
+

In order to load an external config we need to add the loadExternalConfig Boolean to the environments. To do so, inside the folder environments/ the files are going to get modified adding this Boolean to each environment that is going to be used. In this case, only two environments are going to be modified (environment.ts and environment.prod.ts). Down below there is an example of the modification being done in the environment.prod.ts:

+
+
+
+
export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+

In the file in first instance there is the declaration of the types of the variables. After that, there is the definition of those variables. This variable loadExternalConfig is going to be used by the service, allowing to setup a external config just by switching the loadExternalConfig to true.

+
+
+
+

Creating core configuration service

+
+

In order to create the whole configuration module three are going to be created:

+
+
+

1.- Create in the core app/core/config/ a config.ts

+
+
+
+
  export interface Config {
+    version: string;
+  }
+
+  export const config: Config = {
+    version: 'dev'
+  };
+
+
+
+

Taking a look to this file, it creates a interface (Config) that is going to be used by the variable that exports (export const config: Config). This variable config is going to be used by the service that is going to be created.

+
+
+

2.- Create in the core app/core/config/ a config.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  // and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it was explained in previous steps, at first, there is a factory that uses the method loadExternalConfig(), this factory is going to be used in later steps in the module. After that, the loadExternalConfig() method checks if the Boolean in the environment is false. If it is false it will return the promise resolved with the normal config. Else, it is going to load the external config in the path (/config), and overwrite the values from the external config to the config that’s going to be used by the app, this is all returned in a promise.

+
+
+

3.- Create in the core a module for the config app/core/config/ a config.module.ts:

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

As seen earlier, the ConfigService is added to the module. In this addition, the app is initialized(provide) and it uses the factory that was created in the ConfigService loading the config with or without the external values depending on the Boolean in the config.

+
+
+
+

Using the Config Service

+
+

As a first step, in the file /app/app.module.ts the ConfigModule created earlier in the other step is going to be imported:

+
+
+
+
  imports: [
+    ....
+    ConfigModule,
+    ....
+  ]
+
+
+
+

After that, the ConfigService is going to be injected into the app.component.ts

+
+
+
+
....
+import { ConfigService } from './core/config/config.service';
+....
+export class AppComponent {
+....
+  constructor(public configService: ConfigService) { }
+....
+
+
+
+

Finally, for this demonstration app, the component app/app.component.html is going to show the version of the config it is using at that moment.

+
+
+
+
<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+</div>
+<h2>Here is the configuration version that is using angular right now: {{configService.getValues().version}}</h2>
+
+
+
+
+

Final steps

+
+

The script start that was created earlier in the package.json (npm start) is going to be used to start the application. After that, modifying the Boolean loadExternalConfig inside the corresponding environment file inside /app/environments/ should show the different config versions.

+
+
+
+loadExternalConfigFalse +
+
+
+
+loadExternalConfigTrue +
+
+ +
+
+

Component Decomposition

+
+

When implementing a new requirement there are a few design decisions, which need to be considered. +A decomposition in Smart and Dumb Components should be done first. +This includes the definition of state and responsibilities. +Implementing a new dialog will most likely be done by defining a new Smart Component with multiple Dumb Component children.

+
+
+

In the component tree this would translate to the definition of a new sub-tree.

+
+
+
+Component Tree With Highlighted Sub Tree +
+
Figure 34. Component Tree with highlighted sub-tree
+
+
+
+

Defining Components

+
+

The following gives an example for component decomposition. +Shown is a screenshot from a style guide to be implemented. +It is a widget called Listpicker.

+
+
+

The basic function is an input field accepting direct input. +So typing otto puts otto inside the FormControl. +With arrow down key or by clicking the icon displayed in the inputs right edge a dropdown is opened. +Inside possible values can be selected and filtered beforehand. +After pressing arrow down key the focus should move into the filter input field. +Up and down arrow keys can be used to select an element from the list. +Typing into the filter input field filters the list from which the elements can be selected. +The current selected element is highlighted with green background color.

+
+
+
+Component Decomposition Example 1v2 +
+
Figure 35. Component decomposition example before
+
+
+

What should be done, is to define small reusable Dumb Components. +This way the complexity becomes manageable. +In the example every colored box describes a component with the purple box being a Smart Component.

+
+
+
+Component Decomposition Example 2v2 +
+
Figure 36. Component decomposition example after
+
+
+

This leads to the following component tree.

+
+
+
+Component Decomposition Example component tree +
+
Figure 37. Component decomposition example component tree
+
+
+

Note the uppermost component is a Dumb Component. +It is a wrapper for the label and the component to be displayed inside a form. +The Smart Component is Listpicker. +This way the widget can be reused without a form needed.

+
+
+

A widgets is a typical Smart Component to be shared across feature modules. +So the SharedModule is the place for it to be defined.

+
+
+
+

Defining state

+
+

Every UI has state. +There are different kinds of state, for example

+
+
+
    +
  • +

    View State: e.g. is a panel open, a css transition pending, etc.

    +
  • +
  • +

    Application State: e.g. is a payment pending, current URL, user info, etc.

    +
  • +
  • +

    Business Data: e.g. products loaded from back-end

    +
  • +
+
+
+

It is good practice to base the component decomposition on the state handled by a component and to define a simplified state model beforehand. +Starting with the parent - the Smart Component:

+
+
+
    +
  • +

    What overall state does the dialog have: e.g. loading, error, valid data loaded, valid input, invalid input, etc. +Every defined value should correspond to an overall appearance of the whole dialog.

    +
  • +
  • +

    What events can occur to the dialog: e.g. submitting a form, changing a filter, pressing buttons, pressing keys, etc.

    +
  • +
+
+
+

For every Dumb Component:

+
+
+
    +
  • +

    What data does a component display: e.g. a header text, user information to be displayed, a loading flag, etc.
    +This will be a slice of the overall state of the parent Smart Component. +In general a Dumb Component presents a slice of its parent Smart Components state to the user.

    +
  • +
  • +

    What events can occur: keyboard events, mouse events, etc.
    +These events are all handled by its parent Smart Component - every event is passed up the tree to be handled by a Smart Component.

    +
  • +
+
+
+

These information should be reflected inside the modeled state. +The implementation is a TypeScript type - an interface or a class describing the model.

+
+
+

So there should be a type describing all state relevant for a Smart Component. +An instance of that type is send down the component tree at runtime. +Not every Dumb Component will need the whole state. +For instance a single Dumb Component could only need a single string.

+
+
+

The state model for the previous Listpicker example is shown in the following listing.

+
+
+
Listing 32. Listpicker state model
+
+
export class ListpickerState {
+
+  items: {}[]|undefined;
+  columns = ['key', 'value'];
+  keyColumn = 'key';
+  displayValueColumn = 'value';
+  filteredItems: {}[]|undefined;
+  filter = '';
+  placeholder = '';
+  caseSensitive = true;
+  isDisabled = false;
+  isDropdownOpen = false;
+  selectedItem: {}|undefined;
+  displayValue = '';
+
+}
+
+
+
+

Listpicker holds an instance of ListpickerState which is passed down the component tree via @Input() bindings in the Dumb Components. +Events emitted by children - Dumb Components - create a new instance of ListpickerState based on the current instance and the event and its data. +So a state transition is just setting a new instance of ListpickerState. +Angular Bindings propagate the value down the tree after exchanging the state.

+
+
+
Listing 33. Listpicker State transition
+
+
export class ListpickerComponent {
+
+  // initial default values are set
+  state = new ListpickerState();
+
+  /** User changes filter */
+  onFilterChange(filter: string): void {
+    // apply filter ...
+    const filteredList = this.filterService.filter(...);
+
+    // important: A new instance is created, instead of altering the existing one.
+    //            This makes change detection easier and prevents hard to find bugs.
+    this.state = Object.assing({}, this.state, {
+      filteredItems: filteredList,
+      filter: filter
+    });
+  }
+
+}
+
+
+
+
Note:
+

It is not always necessary to define the model as independent type. +So there would be no state property and just properties for every state defined directly in the component class. +When complexity grows and state becomes larger this is usually a good idea. +If the state should be shared between Smart Components a store is to be used.

+
+
+
+

When are Dumb Components needed

+
+

Sometimes it is not necessary to perform a full decomposition. The architecture does not enforce it generally. What you should keep in mind is, that there is always a point when it becomes recommendable.

+
+
+

For example a template with 800 line of code is:

+
+
+
    +
  • +

    not understandable

    +
  • +
  • +

    not maintainable

    +
  • +
  • +

    not testable

    +
  • +
  • +

    not reusable

    +
  • +
+
+
+

So when implementing a template with more than 50 line of code you should think about decomposition.

+
+ +
+
+

Consuming REST services

+
+

A good introduction to working with Angular HttpClient can be found in Angular Docs

+
+
+

This guide will cover, how to embed Angular HttpClient in the application architecture. +For back-end request a special service with the suffix Adapter needs to be defined.

+
+
+
+

Defining Adapters

+
+

It is a good practice to have a Angular service whose single responsibility is to call the back-end and parse the received value to a transfer data model (e.g. Swagger generated TOs). +Those services need to have the suffix Adapter to make them easy to recognize.

+
+
+
+Adapters handle back-end communication +
+
Figure 38. Adapters handle back-end communication
+
+
+

As illustrated in the figure a Use Case service does not use Angular HttpClient directly but uses an adapter. +A basic adapter could look like this:

+
+
+
Listing 34. Example adapter
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+
+import { FlightTo } from './flight-to';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  getFlights(): Observable<FlightTo> {
+    return this.httpClient.get<FlightTo>('/relative/url/to/flights');
+  }
+
+}
+
+
+
+

The adapters should use a well-defined transfer data model. +This could be generated from server endpoints with CobiGen, Swagger, typescript-maven-plugin, etc. +If inside the application there is a business model defined, the adapter has to parse to the transfer model. +This is illustrated in the following listing.

+
+
+
Listing 35. Example adapter mapping from business model to transfer model
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+import { map } from 'rxjs/operators';
+
+import { FlightTo } from './flight-to';
+import { Flight } from '../../../model/flight';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  updateFlight(flight: Flight): Observable<Flight> {
+    const to = this.mapFlight(flight);
+
+    return this.httpClient.post<FlightTo>('/relative/url/to/flights', to).pipe(
+      map(to => this.mapFlightTo(to))
+    );
+  }
+
+  private mapFlight(flight: Flight): FlightTo {
+    // mapping logic
+  }
+
+  private mapFlightTo(flightTo: FlightTo): Flight {
+    // mapping logic
+  }
+
+}
+
+
+
+
+

Token management

+
+

In most cases the access to back-end API is secured using well known mechanisms as CSRF, JWT or both. In these cases the front-end application must manage the tokens that are generated when the user authenticates. More concretely it must store them to include them in every request automatically. Obviously, when user logs out these tokens must be removed from localStorage, memory, etc.

+
+
+
+

Store security token

+
+

In order to make this guide simple we are going to store the token in memory. Therefore, if we consider that we already have a login mechanism implemented we would like to store the token using a auth.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { Router } from '@angular/router';
+
+@Injectable({
+  providedIn: 'root',
+})
+export class AuthService {
+  private loggedIn = false;
+  private token: string;
+
+  constructor(public router: Router) {}
+
+  public isLogged(): boolean {
+    return this.loggedIn || false;
+  }
+
+  public setLogged(login: boolean): void {
+    this.loggedIn = login;
+  }
+
+  public getToken(): string {
+    return this.token;
+  }
+
+  public setToken(token: string): void {
+    this.token = token;
+  }
+}
+
+
+
+

Using the previous service we will be able to store the token obtained in the login request using the method setToken(token). Please consider that, if you want a more sophisticated approach using localStorage API, you will need to modify this service accordingly.

+
+
+
+

Include token in every request

+
+

Now that the token is available in the application it is necessary to include it in every request to a protected API endpoint. Instead of modifying all the HTTP requests in our application, Angular provides a class to intercept every request (and every response if we need to) called HttpInterceptor. Let’s create a service called http-interceptor.service.ts to implement the intercept method of this class:

+
+
+
+
import {
+  HttpEvent,
+  HttpHandler,
+  HttpInterceptor,
+  HttpRequest,
+} from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { environment } from '../../../environments/environment';
+import { AuthService } from './auth.service';
+
+@Injectable()
+export class HttpRequestInterceptorService implements HttpInterceptor {
+
+  constructor(private auth: AuthService) {}
+
+  intercept(
+    req: HttpRequest<any>,
+    next: HttpHandler,
+  ): Observable<HttpEvent<any>> {
+    // Get the auth header from the service.
+    const authHeader: string = this.auth.getToken();
+    if (authHeader) {
+      let authReq: HttpRequest<any>;
+
+      // CSRF
+      if (environment.security == 'csrf') {
+        authReq = req.clone({
+          withCredentials: true,
+          setHeaders: { 'x-csrf-token': authHeader },
+        });
+      }
+
+      // JWT
+      if (environment.security == 'jwt') {
+        authReq = req.clone({
+          setHeaders: { Authorization: authHeader },
+        });
+      }
+
+      return next.handle(authReq);
+    } else {
+      return next.handle(req);
+    }
+  }
+}
+
+
+
+

As you may notice, this service is making use of an environment field environment.security to determine if we are using JWT or CSRF in order to inject the token accordingly. In your application you can combine both if necessary.

+
+
+

Configure environment.ts file to use the CSRF/JWT.

+
+
+
+
security: 'csrf'
+
+
+
+

The authHeader used is obtained using the injected service AuthService already presented above.

+
+
+

In order to activate the interceptor we need to provide it in our app.module.ts or core.module.ts depending on the application structure. Let’s assume that we are using the latter and the interceptor file is inside a security folder:

+
+
+
+
...
+import { HttpRequestInterceptorService } from './security/http-request-interceptor.service';
+...
+
+@NgModule({
+  imports: [...],
+  exports: [...],
+  declarations: [],
+  providers: [
+    ...
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: HttpRequestInterceptorService,
+      multi: true,
+    },
+  ],
+})
+export class CoreModule {}
+
+
+
+

Angular automatically will now modify every request and include in the header the token if it is convenient.

+
+ +
+
+

Error Handler in angular

+
+

Angular allows us to set up a custom error handler that can be used to control the different errors and them in a correct way. Using a global error handler will avoid mistakes and provide a use friendly interface allowing us to indicate the user what problem is happening.

+
+
+
+

What is ErrorHandler

+
+

ErrorHandler is the class that Angular uses by default to control the errors. This means that, even if the application doesn’t have a ErrorHandler it is going to use the one setup by default in Angular. This can be tested by trying to find a page not existing in any app, instantly Angular will print the error in the console.

+
+
+
+

Creating your custom ErrorHandler step by step

+
+

In order to create a custom ErrorHandler three steps are going to be needed:

+
+
+
+

Creating the custom ErrorHandler class

+
+

In this first step the custom ErrorHandler class is going to be created inside the folder /app/core/errors/errors-handler.ts:

+
+
+
+
import { ErrorHandler, Injectable, Injector } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      //  To do: Use injector to get the necessary services to redirect or
+      // show a message to the user
+      const classname  = error.constructor.name;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          console.error('HttpError:' + error.message);
+          if (!navigator.onLine) {
+            console.error('There's no internet connection');
+            // To do: control here in internet what you wanna do if user has no internet
+          } else {
+            console.error('Server Error:' + error.message);
+            // To do: control here if the server gave an error
+          }
+          break;
+        default:
+          console.error('Error:' + error.message);
+          // To do: control here if the client/other things gave an error
+      }
+    }
+}
+
+
+
+

This class can be used to control the different type of errors. If wanted, the classname variable could be used to add more switch cases. This would allow control of more specific situations.

+
+
+
+

Creating a ErrorInterceptor

+
+

Inside the same folder created in the last step we are going to create the ErrorInterceptor(errors-handler-interceptor.ts). This ErrorInterceptor is going to retry any failed calls to the server to make sure it is not being found before showing the error:

+
+
+
+
import { HttpInterceptor, HttpRequest, HttpHandler, HttpEvent } from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { retry } from 'rxjs/operators';
+
+@Injectable()
+export class ErrorsHandlerInterceptor implements HttpInterceptor {
+
+    constructor() {}
+    intercept(req: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>> {
+        return next.handle(req).pipe(
+            retryWhen((errors: Observable<any>) => errors.pipe(
+                delay(500),
+                take(5),
+                concatMap((error: any, retryIndex: number) => {
+                    if (++retryIndex == 5) {
+                        throw error;
+                    }
+                    return of(error);
+                })
+            ))
+        );
+    }
+}
+
+
+
+

This custom made interceptor is implementing the HttpInterceptor and inside the method intercept using the method pipe,retryWhen,delay,take and concatMap from RxJs it is going to do the next things if there is errors:

+
+
+
    +
  1. +

    With delay(500) do a delay to allow some time in between requests

    +
  2. +
  3. +

    With take(5) retry five times.

    +
  4. +
  5. +

    With concatMap if the index that take() gives is not 5 it returns the error, else, it throws the error.

    +
  6. +
+
+
+
+

Creating a Error Module

+
+

Finally, creating a module(errors-handler.module.ts) is necessary to include the interceptor and the custom error handler. In this case, the module is going to be created in the same folder as the last two:

+
+
+
+
import { NgModule, ErrorHandler } from '@angular/core';
+import { CommonModule } from '@angular/common';
+import { ErrorsHandler } from './errors-handler';
+import { HTTP_INTERCEPTORS } from '@angular/common/http';
+import { ErrorsHandlerInterceptor } from './errors-handler-interceptor';
+
+@NgModule({
+  declarations: [], // Declare here component if you want to use routing to error component
+  imports: [
+    CommonModule
+  ],
+  providers: [
+    {
+      provide: ErrorHandler,
+      useClass: ErrorsHandler,
+    },
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: ErrorsHandlerInterceptor,
+      multi: true,
+    }
+  ]
+})
+export class ErrorsHandlerModule { }
+
+
+
+

This module simply is providing the services that are implemented by our custom classes and then telling angular to use our custom made classes instead of the default ones. After doing this, the module has to be included in the app module app.module.ts in order to be used.

+
+
+
+
....
+  imports: [
+    ErrorsHandlerModule,
+    ....
+
+
+
+
+

Handling Errors

+
+

As a final step, handling these errors is necessary. There are different ways that can be used to control the errors, here are a few:

+
+
+
    +
  • +

    Creating a custom page and using with Router to redirect to a page showing an error.

    +
  • +
  • +

    Creating a service in the server side or Backend to create a log with the error and calling it with HttpClient.

    +
  • +
  • +

    Showing a custom made SnackBar with the error message.

    +
  • +
+
+
+
+

== Using SnackBarService and NgZone

+
+

If the SnackBar is used directly, some errors can occur, this is due to SnackBar being out of the Angular zone. In order to use this service properly, NgZone is necessary. The method run() from NgZone will allow the service to be inside the Angular Zone. An example on how to use it:

+
+
+
+
import { ErrorHandler, Injectable, Injector, NgZone } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+import { MatSnackBar } from '@angular/material';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector, private zone: NgZone) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      // Use injector to get the necessary services to redirect or
+      const snackBar: MatSnackBar = this.injector.get(MatSnackBar);
+      const classname  = error.constructor.name;
+      let message: string;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          message = !(navigator.onLine) ? 'There is no internet connection' : error.message;
+          break;
+        default:
+          message = error.message;
+      }
+      this.zone.run(
+        () => snackBar.open(message, 'danger', { duration : 4000})
+      );
+    }
+}
+
+
+
+

Using Injector the MatSnackBar is obtained, then the correct message is obtained inside the switch. Finally, using NgZone and run(), we open the SnackBar passing the message, and the parameters wanted.

+
+
+

You can find a working example of this guide in devon4ts-samples.

+
+ +
+
+

File Structure

+ +
+
+

Top-level

+
+

The top-level file structure is defined by Angular CLI. You might put this "top-level file structure" into a sub-directory to facilitate your build, but this is not relevant for this guide. So the applications file structure relevant to this guide is the folder /src/app inside the part managed by Angular CLI.

+
+
+
Listing 36. Top-level file structure shows feature modules
+
+
    /src
+    └── /app
+        ├── /account-management
+        ├── /billing
+        ├── /booking
+        ├── /core
+        ├── /shared
+        ├── /status
+        |
+        ├── app.module.ts
+        ├── app.component.spec.ts
+        ├── app.component.ts
+        └── app.routing-module.ts
+
+
+
+

Besides the definition of app module the app folder has feature modules on top-level. +The special modules shared and core are present as well.

+
+
+
+

Feature Modules

+
+

A feature module contains the modules definition and two folders representing both layers.

+
+
+
Listing 37. Feature module file structure has both layers
+
+
    /src
+    └── /app
+        └── /account-management
+            ├── /components
+            ├── /services
+            |
+            ├── account-management.module.ts
+            ├── account-management.component.spec.ts
+            ├── account-management.component.ts
+            └── account-management.routing-module.ts
+
+
+
+

Additionally an entry component is possible. This would be the case in lazy loading scenarios. +So account-management.component.ts would be only present if account-management is lazy loaded. +Otherwise, the module’s routes would be defined Component-less +(see vsavkin blog post).

+
+
+
+

Components Layer

+
+

The component layer reflects the distinction between Smart Components and Dumb Components.

+
+
+
Listing 38. Components layer file structure shows Smart Components on top-level
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                ├── /account-overview
+                ├── /confirm-modal
+                ├── /create-account
+                ├── /forgot-password
+                └── /shared
+
+
+
+

Every folder inside the /components folder represents a smart component. The only exception is /shared. +/shared contains Dumb Components shared across Smart Components inside the components layer.

+
+
+
Listing 39. Smart components contain Dumb components
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                └── /account-overview
+                    ├── /user-info-panel
+                    |   ├── /address-tab
+                    |   ├── /last-activities-tab
+                    |   |
+                    |   ├── user-info-panel.component.html
+                    |   ├── user-info-panel.component.scss
+                    |   ├── user-info-panel.component.spec.ts
+                    |   └── user-info-panel.component.ts
+                    |
+                    ├── /user-header
+                    ├── /user-toolbar
+                    |
+                    ├── account-overview.component.html
+                    ├── account-overview.component.scss
+                    ├── account-overview.component.spec.ts
+                    └── account-overview.component.ts
+
+
+
+

Inside the folder of a Smart Component the component is defined. +Besides that are folders containing the Dumb Components the Smart Component consists of. +This can be recursive - a Dumb Component can consist of other Dumb Components. +This is reflected by the file structure as well. This way the structure of a view becomes very readable. +As mentioned before, if a Dumb Component is used by multiple Smart Components inside the components layer +it is put inside the /shared folder inside the components layer.

+
+
+

With this way of thinking the shared module makes a lot of sense. If a Dumb Component is used by multiple Smart Components +from different feature modules, the Dumb Component is placed into the shared module.

+
+
+
Listing 40. The shared module contains Dumb Components shared across Smart Components from different feature modules
+
+
    /src
+    └── /app
+        └── /shared
+            └── /user-panel
+                |
+                ├── user-panel.component.html
+                ├── user-panel.component.scss
+                ├── user-panel.component.spec.ts
+                └── user-panel.component.ts
+
+
+
+

The layer folder /components is not necessary inside the shared module. +The shared module only contains components!

+
+ +
+
+

Internationalization

+
+

Nowadays, a common scenario in front-end applications is to have the ability to translate labels and locate numbers, dates, currency and so on when the user clicks over a language selector or similar. devon4ng and specifically Angular has a default mechanism in order to fill the gap of such features, and besides there are some wide used libraries that make even easier to translate applications.

+
+ +
+
+

devon4ng i18n approach

+
+

The official approach could be a bit complicated, therefore the recommended one is to use the recommended library Transloco from https://github.com/ngneat/transloco/.

+
+
+
+

Install and configure Transloco

+
+

In order to include this library in your devon4ng Angular >= 7.2 project you will need to execute in a terminal:

+
+
+
+
$ ng add @ngneat/transloco
+
+
+
+

As part of the installation process you’ll be presented with questions; Once you answer them, everything you need will automatically be created for you.

+
+
+
    +
  • +

    First, Transloco creates boilerplate files for the requested translations.

    +
  • +
  • +

    Next, it will create a new file, transloco-root.module.ts which exposes an Angular’s module with a default configuration, and inject it into the AppModule.

    +
  • +
+
+
+
+
import { HttpClient } from '@angular/common/http';
+import {
+  TRANSLOCO_LOADER,
+  Translation,
+  TranslocoLoader,
+  TRANSLOCO_CONFIG,
+  translocoConfig,
+  TranslocoModule
+} from '@ngneat/transloco';
+import { Injectable, NgModule } from '@angular/core';
+import { environment } from '../environments/environment';
+
+@Injectable({ providedIn: 'root' })
+export class TranslocoHttpLoader implements TranslocoLoader {
+  constructor(private http: HttpClient) {}
+
+  getTranslation(lang: string) {
+    return this.http.get<Translation>(`/assets/i18n/${lang}.json`);
+  }
+}
+
+@NgModule({
+  exports: [ TranslocoModule ],
+  providers: [
+    {
+      provide: TRANSLOCO_CONFIG,
+      useValue: translocoConfig({
+        availableLangs: ['en', 'es'],
+        defaultLang: 'en',
+        // Remove this option if your application doesn't support changing language in runtime.
+        reRenderOnLangChange: true,
+        prodMode: environment.production,
+      })
+    },
+    { provide: TRANSLOCO_LOADER, useClass: TranslocoHttpLoader }
+  ]
+})
+export class TranslocoRootModule {}
+
+
+
+ + + + + +
+ + +As you might have noticed it also set an HttpLoader into the module’s providers. The HttpLoader is a class that implements the TranslocoLoader interface. It’s responsible for instructing Transloco how to load the translation files. It uses Angular HTTP client to fetch the files, based on the given path. +
+
+
+
+

Usage

+
+

In order to translate any label in any HTML template you will need to use the transloco pipe available:

+
+
+
+
{{ 'HELLO' | transloco }}
+
+
+
+

An optional parameter from the component TypeScript class could be included as follows:

+
+
+
+
{{ 'HELLO' | transloco: { value: dynamic } }}
+
+
+
+

It is possible to use with inputs:

+
+
+
+
<span [attr.alt]="'hello' | transloco">Attribute</span>
+<span [title]="'hello' | transloco">Property</span>
+
+
+
+

In order to change the language used you will need to create a button or selector that calls the this.translocoService.use(language: string) method from TranslocoService. For example:

+
+
+
+
export class AppComponent {
+  constructor(private translocoService: TranslocoService) {}
+
+  changeLanguage(lang) {
+      this.translocoService.setActiveLang(lang);
+  }
+}
+
+
+
+

The translations will be included in the en.json, es.json, de.json, etc. files inside the /assets/i18n folder. For example en.json would be (using the previous parameter):

+
+
+
+
{
+    "HELLO": "hello"
+}
+
+
+
+

Or with an optional parameter:

+
+
+
+
{
+    "HELLO": "hello {{value}}"
+}
+
+
+
+

Transloco understands nested JSON objects. This means that you can have a translation that looks like this:

+
+
+
+
{
+    "HOME": {
+        "HELLO": "hello {{value}}"
+    }
+}
+
+
+
+

In order to access access the value, use the dot notation, in this case HOME.HELLO.

+
+
+
+

Using the service, pipe or directive

+ +
+
+

== Structural Directive

+
+

Using a structural directive is the recommended approach. It’s DRY and efficient, as it creates one subscription per template:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('title') }}</p>
+
+  <comp [title]="t('title')"></comp>
+</ng-container>
+
+
+
+

Note that the t function is memoized. It means that given the same key it will return the result directly from the cache.

+
+
+

We can pass a params object as the second parameter:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('name', { name: 'Transloco' }) }}</p>
+</ng-container>
+
+
+
+

We can instruct the directive to use a different language in our template:

+
+
+
+
<ng-container *transloco="let t; lang: 'es'">
+  <p>{{ t('title') }}</p>
+</ng-container>
+
+
+
+
+

== Pipe

+
+

The use of pipes can be possible too:

+
+
+

template:

+
+
+
+
<div>{{ 'HELLO' | transloco:param }}</div>
+
+
+
+

component:

+
+
+
+
param = {value: 'world'};
+
+
+
+
+

== Attribute Directive

+
+

The last option available with transloco is the attribute directive:

+
+
+
+
<div transloco="HELLO" [translocoParams]="{ value: 'world' }"></div>
+
+
+
+
+

== Service

+
+

If you need to access translations in any component or service you can do it injecting the TranslocoService into them:

+
+
+
+
// Sync translation
+translocoService.translate('HELLO', {value: 'world'});
+
+// Async translation
+translocoService.selectTranslate('HELLO', { value: 'world' }).subscribe(res => {
+    console.log(res);
+    //=> 'hello world'
+});
+
+
+
+ + + + + +
+ + +You can find a complete example at https://github.com/devonfw/devon4ng-application-template. +
+
+
+

Please, visit https://github.com/ngneat/transloco/ for more info.

+
+ +
+
+

Routing

+
+

A basic introduction to the Angular Router can be found in Angular Docs.

+
+
+

This guide will show common tasks and best practices.

+
+
+
+

Defining Routes

+
+

For each feature module and the app module all routes should be defined in a separate module with the suffix RoutingModule. +This way the routing modules are the only place where routes are defined. +This pattern achieves a clear separation of concerns. +The following figure illustrates this.

+
+
+
+Routing module declaration +
+
Figure 39. Routing module declaration
+
+
+

It is important to define routes inside app routing module with .forRoot() and in feature routing modules with .forChild().

+
+
+
+

Example 1 - No Lazy Loading

+
+

In this example two modules need to be configured with routes - AppModule and FlightModule.

+
+
+

The following routes will be configured

+
+
+
    +
  • +

    / will redirect to /search

    +
  • +
  • +

    /search displays FlightSearchComponent (FlightModule)

    +
  • +
  • +

    /search/print/:flightId/:date displays FlightPrintComponent (FlightModule)

    +
  • +
  • +

    /search/details/:flightId/:date displays FlightDetailsComponent (FlightModule)

    +
  • +
  • +

    All other routes will display ErrorPage404 (AppModule)

    +
  • +
+
+
+
Listing 41. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '', redirectTo: 'search', pathMatch: 'full' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 42. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: 'search', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+ + + + + +
+ + +The import order inside AppModule is important. +AppRoutingModule needs to be imported after FlightModule. +
+
+
+
+

Example 2 - Lazy Loading

+
+

Lazy Loading is a good practice when the application has multiple feature areas and a user might not visit every dialog. +Or at least he might not need every dialog up front.

+
+
+

The following example will configure the same routes as example 1 but will lazy load FlightModule.

+
+
+
Listing 43. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '/search', loadChildren: 'app/flight-search/flight-search.module#FlightSearchModule' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 44. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+
+

Triggering Route Changes

+
+

With Angular you have two ways of triggering route changes.

+
+
+
    +
  1. +

    Declarative with bindings in component HTML templates

    +
  2. +
  3. +

    Programmatic with Angular Router service inside component classes

    +
  4. +
+
+
+

On the one hand, architecture-wise it is a much cleaner solution to trigger route changes in Smart Components. +This way you have every UI event that should trigger a navigation handled in one place - in a Smart Component. +It becomes very easy to look inside the code for every navigation, that can occur. +Refactoring is also much easier, as there are no navigation events "hidden" in the HTML templates

+
+
+

On the other hand, in terms of accessibility and SEO it is a better solution to rely on bindings in the view - e.g. by using Angular router-link directive. +This way screen readers and the Google crawler can move through the page easily.

+
+
+ + + + + +
+ + +If you do not have to support accessibility (screen readers, etc.) and to care about SEO (Google rank, etc.), +then you should aim for triggering navigation only in Smart Components. +
+
+
+
+Triggering navigation +
+
Figure 40. Triggering navigation
+
+
+
+

Guards

+
+

Guards are Angular services implemented on routes which determines whether a user can navigate to/from the route. There are examples below which will explain things better. We have the following types of Guards:

+
+
+
    +
  • +

    CanActivate: It is used to determine whether a user can visit a route. The most common scenario for this guard is to check if the user is authenticated. For example, if we want only logged in users to be able to go to a particular route, we will implement the CanActivate guard on this route.

    +
  • +
  • +

    CanActivateChild: Same as above, only implemented on child routes.

    +
  • +
  • +

    CanDeactivate: It is used to determine if a user can navigate away from a route. Most common example is when a user tries to go to a different page after filling up a form and does not save/submit the changes, we can use this guard to confirm whether the user really wants to leave the page without saving/submitting.

    +
  • +
  • +

    Resolve: For resolving dynamic data.

    +
  • +
  • +

    CanLoad: It is used to determine whether an Angular module can be loaded lazily. Example below will be helpful to understand it.

    +
  • +
+
+
+

Let’s have a look at some examples.

+
+
+
+

Example 1 - CanActivate and CanActivateChild guards

+ +
+
+

== CanActivate guard

+
+

As mentioned earlier, a guard is an Angular service and services are simply TypeScript classes. So we begin by creating a class. This class has to implement the CanActivate interface (imported from angular/router), and therefore, must have a canActivate function. The logic of this function determines whether the requested route can be navigated to or not. It returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. If it is true, the route is loaded, else not.

+
+
+
Listing 45. CanActivate example
+
+
...
+import {CanActivate} from "@angular/router";
+
+@Injectable()
+class ExampleAuthGuard implements CanActivate {
+  constructor(private authService: AuthService) {}
+
+  canActivate(route: ActivatedRouterSnapshot, state: RouterStateSnapshot) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

In the above example, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. We use it to return true or false from the canActivate function. +The canActivate function accepts two parameters (provided by Angular). The first parameter of type ActivatedRouterSnapshot is the snapshot of the route the user is trying to navigate to (where the guard is implemented); we can extract the route parameters from this instance. The second parameter of type RouterStateSnapshot is a snapshot of the router state the user is trying to navigate to; we can fetch the URL from it’s url property.

+
+
+ + + + + +
+ + +We can also redirect the user to another page (maybe a login page) if the authService returns false. To do that, inject Router and use it’s navigate function to redirect to the appropriate page. +
+
+
+

Since it is a service, it needs to be provided in our module:

+
+
+
Listing 46. provide the guard in a module
+
+
@NgModule({
+  ...
+  providers: [
+    ...
+    ExampleAuthGuard
+  ]
+})
+
+
+
+

Now this guard is ready to use on our routes. We implement it where we define our array of routes in the application:

+
+
+
Listing 47. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivate: [ExampleAuthGuard] }
+];
+
+
+
+

As you can see, the canActivate property accepts an array of guards. So we can implement more than one guard on a route.

+
+
+
+

== CanActivateChild guard

+
+

To use the guard on nested (children) routes, we add it to the canActivateChild property like so:

+
+
+
Listing 48. Implementing the guard on child routes
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivateChild: [ExampleAuthGuard], children: [
+	{path: 'sub-page1', component: SubPageComponent},
+    {path: 'sub-page2', component: SubPageComponent}
+  ] }
+];
+
+
+
+
+

Example 2 - CanLoad guard

+
+

Similar to CanActivate, to use this guard we implement the CanLoad interface and overwrite it’s canLoad function. Again, this function returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. The fundamental difference between CanActivate and CanLoad is that CanLoad is used to determine whether an entire module can be lazily loaded or not. If the guard returns false for a module protected by CanLoad, the entire module is not loaded.

+
+
+
Listing 49. CanLoad example
+
+
...
+import {CanLoad, Route} from "@angular/router";
+
+@Injectable()
+class ExampleCanLoadGuard implements CanLoad {
+  constructor(private authService: AuthService) {}
+
+  canLoad(route: Route) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

Again, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. The canLoad function accepts a parameter of type Route which we can use to fetch the path a user is trying to navigate to (using the path property of Route).

+
+
+

This guard needs to be provided in our module like any other service.

+
+
+

To implement the guard, we use the canLoad property:

+
+
+
Listing 50. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: 'home', component: HomeComponent },
+  { path: 'admin', loadChildren: 'app/admin/admin.module#AdminModule', canLoad: [ExampleCanLoadGuard] }
+];
+
+
+ +
+
+

Testing

+
+

This guide will cover the basics of testing logic inside your code with unit test cases. +The guide assumes that you are familiar with Angular CLI (see the guide)

+
+
+

For testing your Angular application with unit test cases there are two main strategies:

+
+
+
    +
  1. +

    Isolated unit test cases
    +Isolated unit tests examine an instance of a class all by itself without any dependence on Angular or any injected values. +The amount of code and effort needed to create such tests in minimal.

    +
  2. +
  3. +

    Angular Testing Utilities
    +Let you test components including their interaction with Angular. +The amount of code and effort needed to create such tests is a little higher.

    +
  4. +
+
+
+
+

Testing Concept

+
+

The following figure shows you an overview of the application architecture divided in testing areas.

+
+
+
+Testing Areas +
+
Figure 41. Testing Areas
+
+
+

There are three areas, which need to be covered by different testing strategies.

+
+
+
    +
  1. +

    Components:
    +Smart Components need to be tested because they contain view logic. +Also the interaction with 3rd party components needs to be tested. +When a 3rd party component changes with an upgrade a test will be failing and warn you, that there is something wrong with the new version. +Most of the time Dumb Components do not need to be tested because they mainly display data and do not contain any logic. +Smart Components are always tested with Angular Testing Utilities. +For example selectors, which select data from the store and transform it further, need to be tested.

    +
  2. +
  3. +

    Stores:
    +A store contains methods representing state transitions. +If these methods contain logic, they need to be tested. +Stores are always tested using Isolated unit tests.

    +
  4. +
  5. +

    Services:
    +Services contain Business Logic, which needs to be tested. +UseCase Services represent a whole business use case. +For instance this could be initializing a store with all the data that is needed for a dialog - loading, transforming, storing. +Often Angular Testing Utilities are the optimal solution for testing UseCase Services, because they allow for an easy stubbing of the back-end. +All other services should be tested with Isolated unit tests as they are much easier to write and maintain.

    +
  6. +
+
+
+
+

Testing Smart Components

+
+

Testing Smart Components should assure the following.

+
+
+
    +
  1. +

    Bindings are correct.

    +
  2. +
  3. +

    Selectors which load data from the store are correct.

    +
  4. +
  5. +

    Asynchronous behavior is correct (loading state, error state, "normal" state).

    +
  6. +
  7. +

    Oftentimes through testing one realizes, that important edge cases are forgotten.

    +
  8. +
  9. +

    Do these test become very complex, it is often an indicator for poor code quality in the component. +Then the implementation is to be adjusted / refactored.

    +
  10. +
  11. +

    When testing values received from the native DOM, you will test also that 3rd party libraries did not change with a version upgrade. +A failing test will show you what part of a 3rd party library has changed. +This is much better than the users doing this for you. +For example a binding might fail because the property name was changed with a newer version of a 3rd party library.

    +
  12. +
+
+
+

In the function beforeEach() the TestBed imported from Angular Testing Utilities needs to be initialized. +The goal should be to define a minimal test-module with TestBed. +The following code gives you an example.

+
+
+
Listing 51. Example test setup for Smart Components
+
+
describe('PrintFlightComponent', () => {
+
+  let fixture: ComponentFixture<PrintCPrintFlightComponentomponent>;
+  let store: FlightStore;
+  let printServiceSpy: jasmine.SpyObj<FlightPrintService>;
+
+  beforeEach(() => {
+    const urlParam = '1337';
+    const activatedRouteStub = { params: of({ id: urlParam }) };
+    printServiceSpy = jasmine.createSpyObj('FlightPrintService', ['initializePrintDialog']);
+    TestBed.configureTestingModule({
+      imports: [
+        TranslateModule.forRoot(),
+        RouterTestingModule
+      ],
+      declarations: [
+        PrintFlightComponent,
+        PrintContentComponent,
+        GeneralInformationPrintPanelComponent,
+        PassengersPrintPanelComponent
+      ],
+      providers: [
+        FlightStore,
+        {provide: FlightPrintService, useValue: printServiceSpy},
+        {provide: ActivatedRoute, useValue: activatedRouteStub}
+      ]
+    });
+    fixture = TestBed.createComponent(PrintFlightComponent);
+    store = fixture.debugElement.injector.get(FlightStore);
+    fixture.detectChanges();
+  });
+
+  // ... test cases
+})
+
+
+
+

It is important:

+
+
+
    +
  • +

    Use RouterTestingModule instead of RouterModule

    +
  • +
  • +

    Use TranslateModule.forRoot() without translations +This way you can test language-neutral without translation marks.

    +
  • +
  • +

    Do not add a whole module from your application - in declarations add the tested Smart Component with all its Dumb Components

    +
  • +
  • +

    The store should never be stubbed. +If you need a complex test setup, just use the regular methods defined on the store.

    +
  • +
  • +

    Stub all services used by the Smart Component. +These are mostly UseCase services. +They should not be tested by these tests. +Only the correct call to their functions should be assured. +The logic inside the UseCase services is tested with separate tests.

    +
  • +
  • +

    detectChanges() performance an Angular Change Detection cycle (Angular refreshes all the bindings present in the view)

    +
  • +
  • +

    tick() performance a virtual macro task, tick(1000) is equal to the virtual passing of 1s.

    +
  • +
+
+
+

The following test cases show the testing strategy in action.

+
+
+
Listing 52. Example
+
+
it('calls initializePrintDialog for url parameter 1337', fakeAsync(() => {
+  expect(printServiceSpy.initializePrintDialog).toHaveBeenCalledWith(1337);
+}));
+
+it('creates correct loading subtitle', fakeAsync(() => {
+  store.setPrintStateLoading(123);
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT STATE.IS_LOADING');
+}));
+
+it('creates correct subtitle for loaded flight', fakeAsync(() => {
+  store.setPrintStateLoadedSuccess({
+    id: 123,
+    description: 'Description',
+    iata: 'FRA',
+    name: 'Frankfurt',
+    // ...
+  });
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT "FRA (Frankfurt)" (ID: 123)');
+}));
+
+
+
+

The examples show the basic testing method

+
+
+
    +
  • +

    Set the store to a well-defined state

    +
  • +
  • +

    check if the component displays the correct values

    +
  • +
  • +

    …​ via checking values inside the native DOM.

    +
  • +
+
+
+
+

Testing state transitions performed by stores

+
+

Stores are always tested with Isolated unit tests.

+
+
+

Actions triggered by dispatchAction() calls are asynchronously performed to alter the state. +A good solution to test such a state transition is to use the done callback from Jasmine.

+
+
+
Listing 53. Example for testing a store
+
+
let sut: FlightStore;
+
+beforeEach(() => {
+  sut = new FlightStore();
+});
+
+it('setPrintStateLoading sets print state to loading', (done: Function) => {
+  sut.setPrintStateLoading(4711);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.print.isLoading).toBe(true);
+    expect(result.print.loadingId).toBe(4711);
+    done();
+  });
+});
+
+it('toggleRowChecked adds flight with given id to selectedValues Property', (done: Function) => {
+  const flight: FlightTO = {
+    id: 12
+    // dummy data
+  };
+  sut.setRegisterabgleichListe([flight]);
+  sut.toggleRowChecked(12);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.selectedValues).toContain(flight);
+    done();
+  });
+});
+
+
+
+
+

Testing services

+
+

When testing services both strategies - Isolated unit tests and Angular Testing Utilities - are valid options.

+
+
+

The goal of such tests are

+
+
+
    +
  • +

    assuring the behavior for valid data.

    +
  • +
  • +

    assuring the behavior for invalid data.

    +
  • +
  • +

    documenting functionality

    +
  • +
  • +

    save performing refactoring

    +
  • +
  • +

    thinking about edge case behavior while testing

    +
  • +
+
+
+

For simple services Isolated unit tests can be written. +Writing these tests takes lesser effort and they can be written very fast.

+
+
+

The following listing gives an example of such tests.

+
+
+
Listing 54. Testing a simple services with Isolated unit tests
+
+
let sut: IsyDatePipe;
+
+beforeEach(() => {
+  sut = new IsyDatePipe();
+});
+
+it('transform should return empty string if input value is empty', () => {
+  expect(sut.transform('')).toBe('');
+});
+
+it('transform should return empty string if input value is null', () => {
+  expect(sut.transform(undefined)).toBe('');
+});
+
+// ...more tests
+
+
+
+

For testing Use Case services the Angular Testing Utilities should be used. +The following listing gives an example.

+
+
+
Listing 55. Test setup for testing use case services with Angular Testing Utilities
+
+
let sut: FlightPrintService;
+let store: FlightStore;
+let httpController: HttpTestingController;
+let flightCalculationServiceStub: jasmine.SpyObj<FlightCalculationService>;
+const flight: FlightTo = {
+  // ... valid dummy data
+};
+
+beforeEach(() => {
+  flightCalculationServiceStub = jasmine.createSpyObj('FlightCalculationService', ['getFlightType']);
+  flightCalculationServiceStub.getFlightType.and.callFake((catalog: string, type: string, key: string) => of(`${key}_long`));
+  TestBed.configureTestingModule({
+    imports: [
+      HttpClientTestingModule,
+      RouterTestingModule,
+    ],
+    providers: [
+      FlightPrintService,
+      FlightStore,
+      FlightAdapter,
+      {provide: FlightCalculationService, useValue: flightCalculationServiceStub}
+    ]
+  });
+
+  sut = TestBed.get(FlightPrintService);
+  store = TestBed.get(FlightStore);
+  httpController = TestBed.get(HttpTestingController);
+});
+
+
+
+

When using TestBed, it is important

+
+
+
    +
  • +

    to import HttpClientTestingModule for stubbing the back-end

    +
  • +
  • +

    to import RouterTestingModule for stubbing the Angular router

    +
  • +
  • +

    not to stub stores, adapters and business services

    +
  • +
  • +

    to stub services from libraries like FlightCalculationService - the correct implementation of libraries should not be tested by these tests.

    +
  • +
+
+
+

Testing back-end communication looks like this:

+
+
+
Listing 56. Testing back-end communication with Angular HttpTestingController
+
+
it('loads flight if not present in store', fakeAsync(() => {
+  sut.initializePrintDialog(1337);
+  const processRequest = httpController.expectOne('/path/to/flight');
+  processRequest.flush(flight);
+
+  httpController.verify();
+}));
+
+it('does not load flight if present in store', fakeAsync(() => {
+  const flight = {...flight, id: 4711};
+  store.setRegisterabgleich(flight);
+
+  sut.initializePrintDialog(4711);
+  httpController.expectNone('/path/to/flight');
+
+  httpController.verify();
+}));
+
+
+
+

The first test assures a correct XHR request is performed if initializePrintDialog() is called and no data is in the store. +The second test assures no XHR request IST performed if the needed data is already in the store.

+
+
+

The next steps are checks for the correct implementation of logic.

+
+
+
Listing 57. Example testing a Use Case service
+
+
it('creates flight destination for valid key in svz', fakeAsync(() => {
+  const flightTo: FlightTo = {
+    ...flight,
+    id: 4712,
+    profile: '77'
+  };
+  store.setFlight(flightTo);
+  let result: FlightPrintContent|undefined;
+
+  sut.initializePrintDialog(4712);
+  store.select(s => s.print.content).subscribe(content => result = content);
+  tick();
+
+  expect(result!.destination).toBe('77_long (ID: 77)');
+}));
+
+
+ +
+
+

Update Angular CLI

+ +
+
+

Angular CLI common issues

+
+

There are constant updates for the official Angular framework dependencies. These dependencies are directly related with the Angular CLI package. Since this package comes installed by default inside the devonfw distribution folder for Windows OS and the distribution is updated every few months it needs to be updated in order to avoid known issues.

+
+
+
+

Angular CLI update guide

+
+

For Linux users is as easy as updating the global package:

+
+
+
+
$ npm unistall -g @angular/cli
+$ npm install -g @angular/cli
+
+
+
+

For Windows users the process is only a bit harder. Open the devonfw bundled console and do as follows:

+
+
+
+
$ cd [devonfw_dist_folder]
+$ cd software/nodejs
+$ npm uninstall @angular/cli --no-save
+$ npm install @angular/cli --no-save
+
+
+
+

After following these steps you should have the latest Angular CLI version installed in your system. In order to check it run in the distribution console:

+
+
+ + + + + +
+ + +At the time of this writing, the Angular CLI is at 1.7.4 version. +
+
+
+
+
λ ng version
+
+     _                      _                 ____ _     ___
+    / \   _ __   __ _ _   _| | __ _ _ __     / ___| |   |_ _|
+   / △ \ | '_ \ / _` | | | | |/ _` | '__|   | |   | |    | |
+  / ___ \| | | | (_| | |_| | | (_| | |      | |___| |___ | |
+ /_/   \_\_| |_|\__, |\__,_|_|\__,_|_|       \____|_____|___|
+                |___/
+
+
+Angular CLI: 7.2.3
+Node: 10.13.0
+OS: win32 x64
+Angular:
+...
+
+
+ +
+
+

Upgrade devon4ng Angular and Ionic/Angular applications

+
+

Angular CLI provides a powerful tool to upgrade Angular based applications to the current stable release of the core framework.

+
+
+

This tool is ng update. It will not only upgrade dependencies and their related ones but also will perform some fixes in your code if available thanks to the provided schematics. It will check even if the update is not possible as there is another library or libraries that are not compatible with the versions of the upgraded dependencies. In this case it will keep your application untouched.

+
+
+ + + + + +
+ + +The repository must be in a clean state before executing a ng update. So, remember to commit your changes first. +
+
+
+
+

Basic usage

+
+

In order to perform a basic upgrade we will execute:

+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
+

Upgrade to new Angular version

+
+

The process will be the same, but first we need to make sure that our devon4ng application is in the lates version of Angular 8, so the ng update command can perform the upgrade not only in the dependencies but also making code changes to reflect the new features and fixes.

+
+
+
    +
  • +

    First, upgrade to latest Angular 9 version:

    +
  • +
+
+
+
+
$ ng update @angular/cli@9 @angular/core@9
+
+
+
+

Optionally the flag -C can be added to previous command to make a commit automatically. This is also valid for the next steps.

+
+
+
    +
  • +

    Then, upgrade Angular:

    +
  • +
+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
    +
  • +

    In case you use Angular Material:

    +
  • +
+
+
+
+
$ ng update @angular/material
+
+
+
+
    +
  • +

    If the application depends on third party libraries, the new tool ngcc can be run to make them compatible with the new Ivy compiler. In this case it is recommended to include a postinstall script in the package.json:

    +
  • +
+
+
+
+
{
+  "scripts": {
+    "postinstall": "ngcc --properties es2015 browser module main --first-only --create-ivy-entry-points"
+  }
+}
+
+
+ +
+

Important use cases:

+
+
+
    +
  • +

    To update to the next beta or pre-release version, use the --next=true option.

    +
  • +
  • +

    To update from one major version to another, use the format ng update @angular/cli@^<major_version> @angular/core@^<major_version>.

    +
  • +
  • +

    In case your Angular application uses @angular/material include it in the first command:

    +
    +
    +
    $ ng update @angular/cli @angular/core @angular/material
    +
    +
    +
  • +
+
+
+
+

Ionic/Angular applications

+
+

Just following the same procedure we can upgrade Angular applications, but we must take care of important specific Ionic dependencies:

+
+
+
+
$ ng update @angular/cli @angular/core @ionic/angular @ionic/angular-toolkit [@ionic/...]
+
+
+
+
+

Other dependencies

+
+

Every application will make use of different dependencies. Angular CLI ng upgrade will also take care of these ones. For example, if you need to upgrade @capacitor you will perform:

+
+
+
+
$ ng update @capacitor/cli @capacitor/core [@capacitor/...]
+
+
+
+

Another example could be that you need to upgrade @ngx-translate packages. As always in this case you will execute:

+
+
+
+
$ ng update @ngx-translate/core @ngx-translate/http-loader
+
+
+
+
+

Angular Update Guide online tool

+
+

It is recommended to use the Angular Update Guide tool at https://update.angular.io/ that will provide the necessary steps to upgrade any Angular application depending on multiple criteria.

+
+ +
+
+

Working with Angular CLI

+
+

Angular CLI provides a facade for building, testing, linting, debugging and generating code. +Under the hood Angular CLI uses specific tools to achieve these tasks. +The user does no need to maintain them and can rely on Angular to keep them up to date and maybe switch to other tools which come up in the future.

+
+
+

The Angular CLI provides a wiki with common tasks you encounter when working on applications with the Angular CLI. +The Angular CLI Wiki can be found here.

+
+
+

In this guide we will go through the most important tasks. +To go into more details, please visit the Angular CLI wiki.

+
+
+
+

Installing Angular CLI

+
+

Angular CLI should be added as global and local dependency. +The following commands add Angular CLI as global Dependency.

+
+
+

yarn command

+
+
+
+
yarn global add @angular/cli
+
+
+
+

npm command

+
+
+
+
npm install -g @angular/cli
+
+
+
+

You can check a successful installtion with ng --version. +This should print out the version installed.

+
+
+
+Printing Angular CLI Version +
+
Figure 42. Printing Angular CLI Version
+
+
+
+

Running a live development server

+
+

The Angular CLI can be used to start a live development server. +First your application will be compiled and then the server will be started. +If you change the code of a file, the server will reload the displayed page. +Run your application with the following command:

+
+
+
+
ng serve -o
+
+
+
+
+

Running Unit Tests

+
+

All unit tests can be executed with the command:

+
+
+
+
ng test
+
+
+
+

To make a single run and create a code coverage file use the following command:

+
+
+
+
ng test -sr -cc
+
+
+
+ + + + + +
+ + +You can configure the output format for code coverage files to match your requirements in the file karma.conf.js which can be found on toplevel of your project folder. +For instance, this can be useful for exporting the results to a SonarQube. +
+
+
+
+

Linting the code quality

+
+

You can lint your files with the command

+
+
+
+
ng lint --type-check
+
+
+
+ + + + + +
+ + +You can adjust the linting rules in the file tslint.json which can be found on toplevel of your project folder. +
+
+
+
+

Generating Code

+ +
+
+

Creating a new Angular CLI project

+
+

For creating a new Angular CLI project the command ng new is used.

+
+
+

The following command creates a new application named my-app.

+
+
+
+
ng create my-app
+
+
+
+
+

Creating a new feature module

+
+

A new feature module can be created via ng generate module` command.

+
+
+

The following command generates a new feature module named todo.

+
+
+
+
ng generate module todo
+
+
+
+
+Generate a module with Angular CLI +
+
Figure 43. Generate a module with Angular CLI
+
+
+ + + + + +
+ + +The created feature module needs to be added to the AppModule by hand. +Other option would be to define a lazy route in AppRoutingModule to make this a lazy loaded module. +
+
+
+
+

Creating a new component

+
+

To create components the command ng generate component can be used.

+
+
+

The following command will generate the component todo-details inside the components layer of todo module. +It will generate a class, a html file, a css file and a test file. +Also, it will register this component as declaration inside the nearest module - this ist TodoModule.

+
+
+
+
ng generate component todo/components/todo-details
+
+
+
+
+Generate a component with Angular CLI +
+
Figure 44. Generate a component with Angular CLI
+
+
+ + + + + +
+ + +If you want to export the component, you have to add the component to exports array of the module. +This would be the case if you generate a component inside shared module. +
+
+
+
+

Configuring an Angular CLI project

+
+

Inside an Angular CLI project the file .angular-cli.json can be used to configure the Angular CLI.

+
+
+

The following options are very important to understand.

+
+
+
    +
  • +

    The property defaults` can be used to change the default style extension. +The following settings will make the Angular CLI generate .less files, when a new component is generated.

    +
  • +
+
+
+
+
"defaults": {
+  "styleExt": "less",
+  "component": {}
+}
+
+
+
+
    +
  • +

    The property apps contains all applications maintained with Angular CLI. +Most of the time you will have only one.

    +
    +
      +
    • +

      assets configures all the static files, that the application needs - this can be images, fonts, json files, etc. +When you add them to assets the Angular CLI will put these files to the build target and serve them while debugging. +The following will put all files in /i18n to the output folder /i18n

      +
    • +
    +
    +
  • +
+
+
+
+
"assets": [
+  { "glob": "**/*.json", "input": "./i18n", "output": "./i18n" }
+]
+
+
+
+
    +
  • +

    styles property contains all style files that will be globally available. +The Angular CLI will create a styles bundle that goes directly into index.html with it. +The following will make all styles in styles.less globally available.

    +
  • +
+
+
+
+
"styles": [
+  "styles.less"
+]
+
+
+
+
    +
  • +

    environmentSource and environments are used to configure configuration with the Angular CLI. +Inside the code always the file specified in environmentSource will be referenced. +You can define different environments - eg. production, staging, etc. - which you list in enviroments. +At compile time the Angular CLI will override all values in environmentSource with the values from the matching environment target. +The following code will build the application for the environment staging.

    +
  • +
+
+
+
+
ng build --environment=staging
+
+
+
+
+
+
+

Ionic

+
+ +
+

Ionic 5 Getting started

+
+

Ionic is a front-end focused framework which offers different tools for developing hybrid mobile applications. The web technologies used for this purpose are CSS, Sass, HTML5 and Typescript.

+
+
+
+

Why Ionic?

+
+

Ionic is used for developing hybrid applications, which means not having to rely on a specific IDE such as Android Studio or Xcode. Furthermore, development of native apps require learning different languages (Java/Kotlin for Android and Objective-C/Swift for Apple), with Ionic, a developer does not have to code the same functionality for multiple platforms, just use the adequate libraries and components.

+
+
+
+

Basic environment set up

+ +
+
+

Install Ionic CLI

+
+

Although the devonfw distribution comes with and already installed Ionic CLI, here are the steps to install it. The installation of Ionic is easy, just one command has to be written:

+
+
+

$ npm install -g @ionic/cli

+
+
+
+

Update Ionic CLI

+
+

If there was a previous installation of the Ionic CLI, it will need to be uninstalled due to a change in package name.

+
+
+
+
$ npm uninstall -g ionic
+$ npm install -g @ionic/cli
+
+
+
+

##Basic project set up +The set up of an ionic application is pretty immediate and can be done in one line:

+
+
+

ionic start <name> <template> --type=angular

+
+
+
    +
  • +

    ionic start: Command to create an app.

    +
  • +
  • +

    <name>: Name of the application.

    +
  • +
  • +

    <template>: Model of the application.

    +
  • +
  • +

    --type=angular: With this flag, the app produced will be based on angular.

    +
  • +
+
+
+

To create an empty project, the following command can be used:

+
+
+

ionic start MyApp blank --type=angular

+
+
+
+Ionic blank project +
+
+
+

The image above shows the directory structure generated.

+
+
+

There are more templates available that can be seen with the command +ionic start --list

+
+
+
+List of ionic templates +
+
+
+

The templates surrounded by red line are based on angular and comes with Ionic v5, while the others belong to earlier versions (before v4).

+
+
+ + + + + +
+ + +More info at https://ionicframework.com/docs. Remember to select Angular documentation, since Ionic supports React, Vue and Vanilla JS. +
+
+ +
+
+

Ionic to android

+
+

This page is written to help developers to go from the source code of an ionic application to an android one, with this in mind, topics such as: environment, commands, modifications,…​ are covered.

+
+
+
+

Assumptions

+
+

This document assumes that the reader has already:

+
+
+
    +
  • +

    Source code of an Ionic application and wants to build it on an android device,

    +
  • +
  • +

    A working installation of NodeJS

    +
  • +
  • +

    An Ionic CLI installed and up-to-date.

    +
  • +
  • +

    Android Studio and Android SDK.

    +
  • +
+
+
+
+

From Ionic to Android project

+
+

When a native application is being designed, sometimes, functionalities that uses camera, geolocation, push notification, …​ are requested. To resolve these requests, Capacitor can be used.

+
+
+

In general terms, Capacitor wraps apps made with Ionic (HTML, SCSS, Typescript) into WebViews that can be displayed in native applications (Android, IOS) and allows the developer to access native functionalities like the ones said before.

+
+
+

Installing capacitor is as easy as installing any node module, just a few commands have to be run in a console:

+
+
+
    +
  • +

    cd name-of-ionic-4-app

    +
  • +
  • +

    npm install --save @capacitor/core @capacitor/cli

    +
  • +
+
+
+

Then, it is necessary to initialize capacitor with some information: app id, name of the app and the directory where your app is stored. To fill this information, run:

+
+
+
    +
  • +

    npx cap init

    +
  • +
+
+
+
+

Modifications

+
+

Throughout the development process, usually back-end and front-end are on a local computer, so it’s a common practice to have different configuration files for each environment (commonly production and development). Ionic uses an angular.json file to store those configurations and some rules to be applied.

+
+
+

If a back-end is hosted on http://localhost:8081, and that direction is used in every environment, the application built for android will not work because computer and device do not have the same localhost. Fortunately, different configurations can be defined.

+
+
+

Android Studio uses 10.0.0.2 as alias for 127.0.0.1 (computer’s localhost) so adding http://10.0.0.2:8081 in a new environment file and modifying angular.json accordingly, will make possible connect front-end and back-end.

+
+
+
+Android environment and angular.json +
+
+
+
+
    "build": {
+    ...
+        "configurations": {
+            ...
+            "android": {
+                "fileReplacements": [
+                    {
+                        "replace": "src/environments/environment.ts",
+                        "with": "src/environments/environment.android.ts"
+                    }
+                ]
+            },
+        }
+    }
+
+
+
+
+

Build

+
+

Once configured, it is necessary to build the Ionic app using this new configuration:

+
+
+
    +
  • +

    ionic build --configuration=android

    +
  • +
+
+
+

The next commands copy the build application on a folder named android and open android studio.

+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+
+

From Android project to emulated device

+
+

Once Android Studio is opened, follow these steps:

+
+
+
    +
  1. +

    Click on "Build" → Make project.

    +
  2. +
  3. +

    Click on "Build" → Make Module 'app' (default name).

    +
  4. +
+
+
+

Click on make project +click on make app

+
+
+
    +
  1. +

    Click on" Build" → Build Bundle(s) / APK(s) → Build APK(s).

    +
  2. +
  3. +

    Click on run and choose a device.

    +
  4. +
+
+
+

click on build APK +click on running device

+
+
+

If there are no devices available, a new one can be created:

+
+
+
    +
  1. +

    Click on "Create new device"

    +
  2. +
  3. +

    Select hardware and click "Next". For example: Phone → Nexus 5X.

    +
  4. +
+
+
+

Create new device +Select hardware

+
+
+
    +
  1. +

    Download a system image.

    +
    +
      +
    1. +

      Click on download.

      +
    2. +
    3. +

      Wait until the installation finished and then click "Finish".

      +
    4. +
    5. +

      Click "Next".

      +
    6. +
    +
    +
  2. +
  3. +

    Verify configuration (default configuration should be enough) and click "Next".

    +
  4. +
+
+
+

Download system image +Check configuration

+
+
+
    +
  1. +

    Check that the new device is created correctly.

    +
  2. +
+
+
+
+New created device +
+
+
+
+

From Android project to real device

+
+

To test on a real android device, an easy approach to communicate a smartphone (front-end) and computer (back-end) is to configure a WiFi hotspot and connect the computer to it. A guide about this process can be found here.

+
+
+

Once connected, run ipconfig on a console if you are using windows or ifconfig on a Linux machine to get the IP address of your machine’s Wireless LAN adapter WiFi.

+
+
+
+Result of `ipconfig` command on Windows 10 +
+
+
+

This obtained IP must be used instead of "localhost" or "10.0.2.2" at environment.android.ts.

+
+
+
+Android environment file server URL +
+
+
+

After this configuration, follow the build steps in "From Ionic to Android project" and the first three steps in "From Android project to emulated device".

+
+
+
+

Send APK to Android through USB

+
+

To send the built application to a device, you can connect computer and mobile through USB, but first, it is necessary to unlock developer options.

+
+
+
    +
  1. +

    Open "Settings" and go to "System".

    +
  2. +
  3. +

    Click on "About".

    +
  4. +
  5. +

    Click "Build number" seven times to unlock developer options.

    +
  6. +
+
+
+
+Steps to enable developer options: 1, 2, 3 +
+
+
+
    +
  1. +

    Go to "System" again an then to "Developer options"

    +
  2. +
  3. +

    Check that the options are "On".

    +
  4. +
  5. +

    Check that "USB debugging" is activated.

    +
  6. +
+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+

After this, do the step four in "From Android project to emulated device" and choose the connected smartphone.

+
+
+
+

Send APK to Android through email

+
+

When you build an APK, a dialog gives two options: locate or analyze. If the first one is chosen, Windows file explorer will be opened showing an APK that can be send using email. Download the APK on your phone and click it to install.

+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+
+

Result

+
+

If everything goes correctly, the Ionic application will be ready to be tested.

+
+
+
+Application running on a real device +
+
+ +
+
+

Ionic Progressive Web App

+
+

This guide is a continuation of the guide Angular PWAs, therefore, valid concepts explained there are still valid in this page but focused on Ionic.

+
+
+
+

Assumptions

+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
  • +

    Ionic 5 CLI

    +
  • +
  • +

    Capacitor

    +
  • +
+
+
+

Also, it is a good idea to read the document about PWA using Angular.

+
+
+
+

Sample Application

+
+
+Ionic 5 PWA Base +
+
Figure 45. Basic ionic PWA.
+
+
+

To explain how to build progressive web apps (PWA) using Ionic, a basic application is going to be built. This app will be able to take photos even without network using PWA elements.

+
+
+
+

Step 1: Create a new project

+
+

This step can be completed with one simple command: ionic start <name> <template>, where <name> is the name and <template> a model for the app. In this case, the app is going to be named basic-ion-pwa.

+
+
+

If you are using Nx, there is a pre-requisite to this step. And that is, you have to add the @nxtend/ionic-angular plugin to your Nx workspace. The command for that is npm install --save-dev @nxtend/ionic-angular. Once you have the plugin installed, you can generate an ionic app in your Nx workspace with the command nx generate @nxtend/ionic-angular:app basic-ion-pwa. (You can refer this guide if you want to get started with Nx).

+
+
+
+

Step 2: Structures and styles

+
+

The styles (scss) and structures (html) do not have anything specially relevant, just colors and ionic web components. The code can be found in devon4ts-samples.

+
+
+
+

Step 3: Add functionality

+
+

After this step, the app will allow users to take photos and display them in the main screen. +First we have to import three important elements:

+
+
+
    +
  • +

    DomSanitizer: Sanitizes values to be safe to use.

    +
  • +
  • +

    SafeResourceUrl: Interface for values that are safe to use as URL.

    +
  • +
  • +

    Plugins: Capacitor constant value used to access to the device’s camera and toast dialogs.

    +
  • +
+
+
+
+
  import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
+  import { Plugins, CameraResultType } from '@capacitor/core';
+
+
+
+

The process of taking a picture is enclosed in a takePicture() method. takePicture() calls the Camera’s getPhoto() function which returns an URL or an exception. If a photo is taken then the image displayed in the main page will be changed for the new picture, else, if the app is closed without changing it, a toast message will be displayed.

+
+
+
+
  export class HomePage {
+    image: SafeResourceUrl;
+    ...
+
+    async takePicture() {
+      try {
+        const image = await Plugins.Camera.getPhoto({
+          quality: 90,
+          allowEditing: true,
+          resultType: CameraResultType.Uri,
+        });
+
+        // Change last picture shown
+        this.image = this.sanitizer.bypassSecurityTrustResourceUrl(image.webPath);
+      } catch (e) {
+        this.show('Closing camera');
+      }
+    }
+
+    async show(message: string) {
+      await Plugins.Toast.show({
+        text: message,
+      });
+    }
+  }
+
+
+
+
+

Step 4: PWA Elements

+
+

When Ionic apps are not running natively, some resources like Camera do not work by default but can be enabled using PWA Elements. To use Capacitor’s PWA elements run npm install @ionic/pwa-elements and modify src/main.ts as shown below.

+
+
+
+
...
+
+// Import for PWA elements
+import { defineCustomElements } from '@ionic/pwa-elements/loader';
+
+if (environment.production) {
+  enableProdMode();
+}
+
+platformBrowserDynamic().bootstrapModule(AppModule)
+  .catch(err => console.log(err));
+
+// Call the element loader after the platform has been bootstrapped
+defineCustomElements(window);
+
+
+
+
+

Step 5: Make it Progressive.

+
+

Turning an Ionic 5 app into a PWA is pretty easy. The same module used to turn Angular apps into PWAs has to be added. To do so, run: ng add @angular/pwa. This command also creates an icons folder inside src/assets and contains angular icons for multiple resolutions. (Note: In an Nx workspace, you have to add it like a normal package using npm install @angular/pwa, and you have to manually add the icons). If you want to use other images, be sure that they have the same resolution, the names can be different but the file manifest.json has to be changed accordingly.

+
+
+
+

Step 6: Configure the app

+
+

manifest.json

+
+
+

Default configuration.

+
+
+

ngsw-config.json

+
+
+

At assetGroupsresources add a URLs field and a pattern to match PWA Elements scripts and other resources (images, styles, …​):

+
+
+
+
  "urls": ["https://unpkg.com/@ionic/pwa-elements@1.0.2/dist/**"]
+
+
+
+
+

Step 7: Check that your app is a PWA

+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ionic build --configuration production to build the app using production settings. (nx build basic-ion-pwa --configuration production in your Nx workspace root).

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here. A good alternative is also npm install serve. It can be checked here.

+
+
+

Go to the www folder running cd www.

+
+
+

http-server -o or serve to serve your built app.

+
+
+ + + + + +
+ + +In order not to install anything not necessary npx can be used directly to serve the app. i.e run npx serve [folder] will automatically download and run this HTTP server without installing it in the project dependencies. +
+
+
+
+Http server running +
+
Figure 46. Http server running on localhost:8081.
+
+
+

 
+In another console instance run ionic serve (nx serve basic-ion-pwa if using Nx CLI) to open the common app (not built).

+
+
+
+Ionic serve on Visual Studio Code console +
+
Figure 47. Ionic server running on localhost:8100.
+
+
+

 
+The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common one does not.

+
+
+
+Application comparison +
+
Figure 48. Application service worker comparison.
+
+
+

 
+If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 49. Offline application.
+
+
+

 
+Finally, plugins like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 50. Lighthouse report.
+
+
+
+
+
+

Layouts

+
+ +
+

Angular Material Layout

+
+

The purpose of this guide is to get a basic understanding of creating layouts using Angular Material in a devon4ng application. We will create an application with a header containing some menu links and a sidenav with some navigation links.

+
+
+
+Finished application +
+
Figure 51. This is what the finished application will look like
+
+
+
+

Create a new angular application

+
+

We start with opening the devonfw IDE(right-click anywhere in your workspace and click "Open devonfw CMD shell here") and running the following command to start a project named devon4ng-mat-layout

+
+
+
    +
  • +

    ng new devon4ng-mat-layout --routing --style=scss. If you are using Nx, the command would be nx generate @nrwl/angular:app devon4ng-mat-layout --routing --style=scss in your Nx workspace. Click here to get started with using Nx.

    +
  • +
+
+
+

We are providing the routing flag so that a routing module is generated, and we are also setting the style sheet format to SCSS with --style=scss.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+
    +
  • +

    ng serve. (If you are using Nx, you have to specify the project name along with the --project flag, so the command becomes ng serve --project=devon4ng-mat-layout)

    +
  • +
+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+Blank application +
+
Figure 52. Blank application
+
+
+
+

Adding Angular Material library to the project

+
+

Next we will add Angular Material to our application. In the integrated terminal, press Ctrl + C to terminate the running application and run the following command:

+
+
+
    +
  • +

    npm install --save @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

You can also use Yarn to install the dependencies if you prefer that:

+
+
+
    +
  • +

    yarn add @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

Once the dependencies are installed, we need to import the BrowserAnimationsModule in our AppModule for animations support.

+
+
+
Listing 58. Importing BrowserAnimationsModule in AppModule
+
+
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
+
+@NgModule({
+  ...
+  imports: [BrowserAnimationsModule],
+  ...
+})
+export class AppModule { }
+
+
+
+

Angular Material provides a host of components for designing our application. All the components are well structured into individual NgModules. For each component from the Angular Material library that we want to use, we have to import the respective NgModule.

+
+
+
Listing 59. We will be using the following components in our application:
+
+
import { MatIconModule, MatButtonModule, MatMenuModule, MatListModule, MatToolbarModule, MatSidenavModule } from '@angular/material';
+
+@NgModule({
+  ...
+  imports: [
+	...
+    MatIconModule,
+    MatButtonModule,
+    MatMenuModule,
+    MatListModule,
+    MatToolbarModule,
+    MatSidenavModule,
+	...
+	],
+  ...
+})
+export class AppModule { }
+
+
+
+

A better approach is to import and then export all the required components in a shared module. But for the sake of simplicity, we are importing all the required components in the AppModule itself.

+
+
+
+

==

+
+
+
  You can find a working copy of this application https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-basic-layout[here]. The sample application is part of a Nx workspace, which means it is one of the many apps in a monorepo and capable of importing reusable code from a shared library. This guide describes the implementaion by assuming a stand-alone single-repo application, but the pages and layout described in this sample app are similar to the ones used in another sample app in the monorepo (https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-theming[angular-material-theming]), which is why we have exported the required components from a shared library and reused them in both the apps. As a result, the code in our monorepo will be slightly different. It would still help you in following this guide.
+== ==
+
+
+
+

Next, we include a theme in our application. Angular Material comes with four pre-defined themes: indigo-pink, deeppurple-amber, pink-bluegrey and purple-green. It is also possible to create our own custom theme, but that is beyond the scope of this guide. Including a theme is required to apply all of the core and theme styles to your application. +We will include the indigo-pink theme in our application by importing the indigo-pink.css file in our src/styles.scss:

+
+
+
Listing 60. In src/styles.scss:
+
+
@import "~@angular/material/prebuilt-themes/indigo-pink.css";
+
+
+
+

To use Material Design Icons along with the mat-icon component, we will load the Material Icons library in our src/index.html file

+
+
+
Listing 61. In src/index.html:
+
+
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
+
+
+
+
+

Development

+
+

Now that we have all the Angular Material related dependencies set up in our project, we can start coding. Let’s begin by adding a suitable margin and font to the body element of our single page application. We will add it in the src/styles.scss file to apply it globally:

+
+
+
Listing 62. In src/styles.scss:
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

At this point, if we run our application, this is how it will look like:

+
+
+
+Angular Material added to the application +
+
Figure 53. Application with Angular Material set up
+
+
+

We will clear the app.component.html file and setup a header with a menu button and some navigational links. We will use mat-toolbar, mat-button, mat-menu, mat-icon and mat-icon-button for this:

+
+
+
Listing 63. app.component.html:
+
+
<mat-toolbar color="primary">
+  <button mat-icon-button aria-label="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+  <button mat-button [matMenuTriggerFor]="submenu">Menu 1</button>
+  <button mat-button>Menu 2</button>
+  <button mat-button>Menu 3</button>
+
+  <mat-menu #submenu="matMenu">
+    <button mat-menu-item>Sub-menu 1</button>
+    <button mat-menu-item [matMenuTriggerFor]="submenu2">Sub-menu 2</button>
+  </mat-menu>
+
+  <mat-menu #submenu2="matMenu">
+    <button mat-menu-item>Menu Item 1</button>
+    <button mat-menu-item>Menu Item 2</button>
+    <button mat-menu-item>Menu Item 3</button>
+  </mat-menu>
+
+</mat-toolbar>
+
+
+
+

The color attribute on the mat-toolbar element will give it the primary (indigo) color as defined by our theme. The color attribute works with most Angular Material components; the possible values are 'primary', 'accent' and 'warn'. +The mat-toolbar is a suitable component to represent a header. It serves as a placeholder for elements we want in our header. +Inside the mat-toolbar, we start with a button having mat-icon-button attribute, which itself contains a mat-icon element having the value menu. This will serve as a menu button which we can use to toggle the sidenav. +We follow it with some sample buttons having the mat-button attribute. Notice the first button has a property matMenuTriggerFor bound to a local reference submenu. As the property name suggests, the click of this button will display the mat-menu element with the specified local reference as a drop-down menu. The rest of the code is self explanatory.

+
+
+
+Header added to the application +
+
Figure 54. This is how our application looks with the first menu button (Menu 1) clicked.
+
+
+

We want to keep the sidenav toggling menu button on the left and move the rest to the right to make it look better. To do this we add a class to the menu icon button:

+
+
+
Listing 64. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+

And in the app.component.scss file, we add the following style:

+
+
+
Listing 65. app.component.scss:
+
+
.menu {
+    margin-right: auto;
+}
+
+
+
+

The mat-toolbar element already has it’s display property set to flex. Setting the menu icon button’s margin-right property to auto keeps itself on the left and pushes the other elements to the right.

+
+
+
+Final look of the header +
+
Figure 55. Final look of the header.
+
+
+

Next, we will create a sidenav. But before that lets create a couple of components to navigate between, the links of which we will add to the sidenav. +We will use the ng generate component (or ng g c command for short) to create Home and Data components. (Append --project=devon4ng-mat-layout to the command in a Nx workspace). We nest them in the pages sub-directory since they represent our pages.

+
+
+
    +
  • +

    ng g c pages/home

    +
  • +
  • +

    ng g c pages/data;

    +
  • +
+
+
+

Let us set up the routing such that when we visit http://localhost:4200/ root url we see the HomeComponent and when we visit http://localhost:4200/data url we see the DataComponent. +We had opted for routing while creating the application, so we have the routing module app-routing.module.ts setup for us. In this file, we have the empty routes array where we set up our routes.

+
+
+
Listing 66. app-routing.module.ts:
+
+
import { HomeComponent } from './pages/home/home.component';
+import { DataComponent } from './pages/data/data.component';
+
+	const routes: Routes = [
+	  { path: '', component: HomeComponent },
+	  { path: 'data', component: DataComponent }
+	];
+
+
+
+

We need to provide a hook where the components will be loaded when their respective URLs are loaded. We do that by using the router-outlet directive in the app.component.html.

+
+
+
Listing 67. app.component.html:
+
+
...
+	</mat-toolbar>
+	<router-outlet></router-outlet>
+
+
+
+

Now when we visit the defined URLs we see the appropriate components rendered on screen.

+
+
+

Lets change the contents of the components to have something better.

+
+
+
Listing 68. home.component.html:
+
+
<h2>Home Page</h2>
+
+
+
+
Listing 69. home.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+
Listing 70. data.component.html:
+
+
<h2>Data Page</h2>
+
+
+
+
Listing 71. data.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+

The pages look somewhat better now:

+
+
+
+Home page +
+
Figure 56. Home page
+
+
+
+Data page +
+
Figure 57. Data page
+
+
+

Let us finally create the sidenav. To implement the sidenav we need to use 3 Angular Material components: mat-sidenav-container, mat-sidenav and mat-sidenav-content. +The mat-sidenav-container, as the name suggests, acts as a container for the sidenav and the associated content. So it is the parent element, and mat-sidenav and mat-sidenav-content are the children sibling elements. mat-sidenav represents the sidenav. We can put any content we want, though it is usually used to contain a list of navigational links. The mat-sidenav-content element is for containing the contents of our current page. Since we need the sidenav application-wide, we will put it in the app.component.html.

+
+
+
Listing 72. app.component.html:
+
+
...
+</mat-toolbar>
+
+<mat-sidenav-container>
+  <mat-sidenav mode="over" [disableClose]="false" #sidenav>
+    Sidenav
+  </mat-sidenav>
+  <mat-sidenav-content>
+    <router-outlet></router-outlet>
+  </mat-sidenav-content>
+</mat-sidenav-container>
+
+
+
+

The mat-sidenav has a mode property, which accepts one of the 3 values: over, push and side. It decides the behavior of the sidenav. mat-sidenav also has a disableClose property which accents a boolean value. It toggles the behavior where we click on the backdrop or press the Esc key to close the sidenav. There are other properties which we can use to customize the appearance, behavior and position of the sidenav. You can find the properties documented online at https://material.angular.io/components/sidenav/api +We moved the router-outlet directive inside the mat-sidenav-content where it will render the routed component. +But if you check the running application in the browser, we don’t see the sidenav yet. That is because it is closed. We want to have the sidenav opened/closed at the click of the menu icon button on the left side of the header we implemented earlier. Notice we have set a local reference #sidenav on the mat-sidenav element. We can access this element and call its toggle() function to toggle open or close the sidenav.

+
+
+
Listing 73. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu" (click)="sidenav.toggle()">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+
+Sidenav works +
+
Figure 58. Sidenav is implemented
+
+
+

We can now open the sidenav by clicking the menu icon button. But it does not look right. The sidenav is only as wide as its content. Also the page does not stretch the entire viewport due to lack of content. +Let’s add the following styles to make the page fill the viewport:

+
+
+
Listing 74. app.component.scss:
+
+
...
+mat-sidenav-container {
+    position: absolute;
+    top: 64px;
+    left: 0;
+    right: 0;
+    bottom: 0;
+}
+
+
+
+

The sidenav width will be corrected when we add the navigational links to it. That is the only thing remaining to be done. Lets implement it now:

+
+
+
Listing 75. app.component.html:
+
+
...
+  <mat-sidenav [disableClose]="false" mode="over" #sidenav>
+	<mat-nav-list>
+      <a
+        id="home"
+        mat-list-item
+        [routerLink]="['./']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+        [routerLinkActiveOptions]="{exact: true}"
+      >
+        <mat-icon matListAvatar>home</mat-icon>
+        <h3 matLine>Home</h3>
+        <p matLine>sample home page</p>
+      </a>
+      <a
+        id="sampleData"
+        mat-list-item
+        [routerLink]="['./data']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+      >
+        <mat-icon matListAvatar>grid_on</mat-icon>
+        <h3 matLine>Data</h3>
+        <p matLine>sample data page</p>
+      </a>
+    </mat-nav-list>
+  </mat-sidenav>
+...
+
+
+
+

We use the mat-nav-list element to set a list of navigational links. We use the a tags with mat-list-item directive. We implement a click listener on each link to close the sidenav when it is clicked. The routerLink directive is used to provide the URLs to navigate to. The routerLinkActive directive is used to provide the class name which will be added to the link when it’s URL is visited. Here we name the class`active`. To style it, let' modify the app.component.scss file:

+
+
+
Listing 76. app.component.scss:
+
+
...
+mat-sidenav-container {
+...
+	a.active {
+        background: #8e8d8d;
+        color: #fff;
+
+        p {
+            color: #4a4a4a;
+        }
+    }
+}
+
+
+
+

Now we have a working application with a basic layout: a header with some menu and a sidenav with some navigational links.

+
+
+
+Finished application +
+
Figure 59. Finished application
+
+
+
+

Conclusion

+
+

The purpose of this guide was to provide a basic understanding of creating layouts with Angular Material. The Angular Material library has a huge collection of ready to use components which can be found at https://material.angular.io/components/categories +It has provided documentation and example usage for each of its components. Going through the documentation will give a better understanding of using Angular Material components in our devon4ng applications.

+
+
+
+
+
+

NgRx

+
+ +
+

Introduction to NgRx

+
+

NgRx is a state management framework for Angular based on the Redux pattern.

+
+
+
+

The need for client side state management

+
+

You may wonder why you should bother with state management. Usually data resides in a back-end storage system, e.g. a database, and is retrieved by the client on a per-need basis. To add, update, or delete entities from this store, clients have to invoke API endpoints at the back-end. Mimicking database-like transactions on the client side may seem redundant. However, there are many use cases for which a global client-side state is appropriate:

+
+
+
    +
  • +

    the client has some kind of global state which should survive the destruction of a component, but does not warrant server side persistence, for example: volume level of media, expansion status of menus

    +
  • +
  • +

    sever side data should not be retrieved every time it is needed, either because multiple components consume it, or because it should be cached, e.g. the personal watchlist in an online streaming app

    +
  • +
  • +

    the app provides a rich experience with offline functionality, e.g. a native app built with Ionic

    +
  • +
+
+
+

Saving global states inside the services they originates from results in a data flow that is hard to follow and state becoming inconsistent due to unordered state mutations. Following the single source of truth principle, there should be a central location holding all your application’s state, just like a server side database does. State management libraries for Angular provide tools for storing, retrieving, and updating client-side state.

+
+
+
+

Why NgRx?

+
+

As stated in the introduction, devon4ng does not stipulate a particular state library, or require using one at all. However, NgRx has proven to be a robust, mature solution for this task, with good tooling and 3rd-party library support. Albeit introducing a level of indirection that requires additional effort even for simple features, the Redux concept enforces a clear separation of concerns leading to a cleaner architecture.

+
+
+

Nonetheless, you should always compare different approaches to state management and pick the best one suiting your use case. Here’s a (non-exhaustive) list of competing state management libraries:

+
+
+
    +
  • +

    Plain RxJS using the simple store described in Abstract Class Store

    +
  • +
  • +

    NgXS reduces some boilerplate of NgRx by leveraging the power of decorators and moving side effects to the store

    +
  • +
  • +

    MobX follows a more imperative approach in contrast to the functional Redux pattern

    +
  • +
  • +

    Akita also uses an imperative approach with direct setters in the store, but keeps the concept of immutable state transitions

    +
  • +
+
+
+
+

Setup

+
+

To get a quick start, use the provided template for devon4ng + NgRx.

+
+
+

To manually install the core store package together with a set of useful extensions:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools --save`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools`
+
+
+
+

We recommend to add the NgRx schematics to your project so you can create code artifacts from the command line:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/schematics --save-dev`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/schematics --dev`
+
+
+
+

Afterwards, make NgRx your default schematics provider, so you don’t have to type the qualified package name every time:

+
+
+
+
`ng config cli.defaultCollection @ngrx/schematics`
+
+
+
+

If you have custom settings for Angular schematics, you have to configure them as described here.

+
+
+
+

Concept

+
+
+NgRx Architecture +
+
Figure 60. NgRx architecture overview
+
+
+

Figure 1 gives an overview of the NgRx data flow. The single source of truth is managed as an immutable state object by the store. Components dispatch actions to trigger state changes. Actions are handed over to reducers, which take the current state and action data to compute the next state. Actions are also consumed by-effects, which perform side-effects such as retrieving data from the back-end, and may dispatch new actions as a result. Components subscribe to state changes using selectors.

+
+
+

Continue with Creating a Simple Store.

+
+ +
+
+

State, Selection and Reducers

+ +
+
+

Creating a Simple Store

+
+

In the following pages we use the example of an online streaming service. We will model a particular feature, a watchlist that can be populated by the user with movies she or he wants to see in the future.

+
+
+
+

Initializing NgRx

+
+

If you’re starting fresh, you first have to initialize NgRx and create a root state. The fastest way to do this is using the schematic:

+
+
+
+
`ng generate @ngrx/schematics:store State --root --module app.module.ts`
+
+
+
+

This will automatically generate a root store and register it in the app module. Next we generate a feature module for the watchlist:

+
+
+

` ng generate module watchlist`

+
+
+

and create a corresponding feature store:

+
+
+

` ng generate store watchlist/Watchlist -m watchlist.module.ts`

+
+
+

This generates a file watchlist/reducers/index.ts with the reducer function, and registers the store in the watchlist module declaration.

+
+
+
+

== =

+
+

If you’re getting an error Schematic "store" not found in collection "@schematics/angular", this means you forgot to register the NgRx schematics as default. +== == =

+
+
+

Next, add the WatchlistModule to the AppModule imports so the feature store is registered when the application starts. We also added the store devtools which we will use later, resulting in the following file:

+
+
+

app.module.ts

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppComponent } from './app.component';
+import { EffectsModule } from '@ngrx/effects';
+import { AppEffects } from './app.effects';
+import { StoreModule } from '@ngrx/store';
+import { reducers, metaReducers } from './reducers';
+import { StoreDevtoolsModule } from '@ngrx/store-devtools';
+import { environment } from '../environments/environment';
+import { WatchlistModule } from './watchlist/watchlist.module';
+
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    WatchlistModule,
+    StoreModule.forRoot(reducers, { metaReducers }),
+    // Instrumentation must be imported after importing StoreModule (config is optional)
+    StoreDevtoolsModule.instrument({
+      maxAge: 25, // Retains last 25 states
+      logOnly: environment.production, // Restrict extension to log-only mode
+    }),
+    !environment.production ? StoreDevtoolsModule.instrument() : []
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+
+

Create an entity model and initial state

+
+

We need a simple model for our list of movies. Create a file watchlist/models/movies.ts and insert the following code:

+
+
+
+
export interface Movie {
+    id: number;
+    title: string;
+    releaseYear: number;
+    runtimeMinutes: number;
+    genre: Genre;
+}
+
+export type Genre = 'action' | 'fantasy' | 'sci-fi' | 'romantic' | 'comedy' | 'mystery';
+
+export interface WatchlistItem {
+    id: number;
+    movie: Movie;
+    added: Date;
+    playbackMinutes: number;
+}
+
+
+
+
+

== =

+
+

We discourage putting several types into the same file and do this only for the sake of keeping this tutorial brief. +== == =

+
+
+

Later we will learn how to retrieve data from the back-end using effects. For now we will create an initial state for the user with a default movie.

+
+
+

State is defined and transforms by a reducer function. Let’s create a watchlist reducer:

+
+
+
+
```
+cd watchlist/reducers
+ng g reducer WatchlistData --reducers index.ts
+```
+
+
+
+

Open the generated file watchlist-data.reducer.ts. You see three exports: The State interface defines the shape of the state. There is only one instance of a feature state in the store at all times. The initialState constant is the state at application creation time. The reducer function will later be called by the store to produce the next state instance based on the current state and an action object.

+
+
+

Let’s put a movie into the user’s watchlist:

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export interface State {
+  items: WatchlistItem[];
+}
+
+export const initialState: State = {
+  items: [
+    {
+      id: 42,
+      movie: {
+        id: 1,
+        title: 'Die Hard',
+        genre: 'action',
+        releaseYear: 1988,
+        runtimeMinutes: 132
+      },
+      playbackMinutes: 0,
+      added: new Date(),
+    }
+  ]
+};
+
+
+
+
+

Select the current watchlist

+
+

State slices can be retrieved from the store using selectors.

+
+
+

Create a watchlist component:

+
+
+
+
`ng g c watchlist/Watchlist`
+
+
+
+

and add it to the exports of WatchlistModule. Also, replace app.component.html with

+
+
+
+
<app-watchlist></app-watchlist>
+
+
+
+

State observables are obtained using selectors. They are memoized by default, meaning that you don’t have to worry about performance if you use complicated calculations when deriving state — these are only performed once per state emission.

+
+
+

Add a selector to watchlist-data.reducer.ts:

+
+
+
+
`export const getAllItems = (state: State) => state.items;`
+
+
+
+

Next, we have to re-export the selector for this sub-state in the feature reducer. Modify the watchlist/reducers/index.ts like this:

+
+
+

watchlist/reducers/index.ts

+
+
+
+
import {
+  ActionReducer,
+  ActionReducerMap,
+  createFeatureSelector,
+  createSelector,
+  MetaReducer
+} from '@ngrx/store';
+import { environment } from 'src/environments/environment';
+import * as fromWatchlistData from './watchlist-data.reducer';
+import * as fromRoot from 'src/app/reducers/index';
+
+export interface WatchlistState { (1)
+  watchlistData: fromWatchlistData.State;
+}
+
+export interface State extends fromRoot.State { (2)
+  watchlist: WatchlistState;
+}
+
+export const reducers: ActionReducerMap<WatchlistState> = { (3)
+  watchlistData: fromWatchlistData.reducer,
+};
+
+export const metaReducers: MetaReducer<WatchlistState>[] = !environment.production ? [] : [];
+
+export const getFeature = createFeatureSelector<State, WatchlistState>('watchlist'); (4)
+
+export const getWatchlistData = createSelector( (5)
+  getFeature,
+  state => state.watchlistData
+);
+
+export const getAllItems = createSelector( (6)
+  getWatchlistData,
+  fromWatchlistData.getAllItems
+);
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1The feature state, each member is managed by a different reducer
2Feature states are registered by the forFeature method. This interface provides a typesafe path from root to feature state.
3Tie sub-states of a feature state to the corresponding reducers
4Create a selector to access the 'watchlist' feature state
5select the watchlistData sub state
6re-export the selector
+
+
+

Note how createSelector allows to chain selectors. This is a powerful tool that also allows for selecting from multiple states.

+
+
+

You can use selectors as pipeable operators:

+
+
+

watchlist.component.ts

+
+
+
+
export class WatchlistComponent {
+  watchlistItems$: Observable<WatchlistItem[]>;
+
+  constructor(
+    private store: Store<fromWatchlist.State>
+  ) {
+    this.watchlistItems$ = this.store.pipe(select(fromWatchlist.getAllItems));
+  }
+}
+
+
+
+

watchlist.component.html

+
+
+
+
<h1>Watchlist</h1>
+<ul>
+    <li *ngFor="let item of watchlistItems$ | async">{{item.movie.title}} ({{item.movie.releaseYear}}): {{item.playbackMinutes}}/{{item.movie.runtimeMinutes}} min watched</li>
+</ul>
+
+
+
+
+

Dispatching an action to update watched minutes

+
+

We track the user’s current progress at watching a movie as the playbackMinutes property. After closing a video, the watched minutes have to be updated. In NgRx, state is being updated by dispatching actions. An action is an option with a (globally unique) type discriminator and an optional payload.

+
+
+
+

== Creating the action

+
+

Create a file playback/actions/index.ts. In this example, we do not further separate the actions per sub state. Actions can be defined by using action creators:

+
+
+

playback/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+
+export const playbackFinished = createAction('[Playback] Playback finished', props<{ movieId: number, stoppedAtMinute: number }>());
+
+const actions = union({
+    playbackFinished
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

First we specify the type, followed by a call to the payload definition function. Next, we create a union of all possible actions for this file using union, which allows us a to access action payloads in the reducer in a typesafe way.

+
+
+
+

== =

+
+

Action types should follow the naming convention [Source] Event, e.g. [Recommended List] Hide Recommendation or [Auth API] Login Success. Think of actions rather as events than commands. You should never use the same action at two different places (you can still handle multiple actions the same way). This facilitate tracing the source of an action. For details see Good Action Hygiene with NgRx by Mike Ryan (video). +== == =

+
+
+
+

== Dispatch

+
+

We skip the implementation of an actual video playback page and simulate watching a movie in 10 minute segments by adding a link in the template:

+
+
+

watchlist-component.html

+
+
+
+
<li *ngFor="let item of watchlistItems$ | async">... <button (click)="stoppedPlayback(item.movie.id, item.playbackMinutes + 10)">Add 10 Minutes</button></li>
+
+
+
+

watchlist-component.ts

+
+
+
+
import * as playbackActions from 'src/app/playback/actions';
+...
+  stoppedPlayback(movieId: number, stoppedAtMinute: number) {
+    this.store.dispatch(playbackActions.playbackFinished({ movieId, stoppedAtMinute }));
+  }
+
+
+
+
+

== State reduction

+
+

Next, we handle the action inside the watchlistData reducer. Note that actions can be handled by multiple reducers and effects at the same time to update different states, for example if we’d like to show a rating modal after playback has finished.

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export function reducer(state = initialState, action: playbackActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      return {
+        ...state,
+        items: state.items.map(updatePlaybackMinutesMapper(action.movieId, action.stoppedAtMinute))
+      };
+
+    default:
+      return state;
+  }
+}
+
+export function updatePlaybackMinutesMapper(movieId: number, stoppedAtMinute: number) {
+  return (item: WatchlistItem) => {
+    if (item.movie.id == movieId) {
+      return {
+        ...item,
+        playbackMinutes: stoppedAtMinute
+      };
+    } else {
+      return item;
+    }
+  };
+}
+
+
+
+

Note how we changed the reducer’s function signature to reference the actions union. The switch-case handles all incoming actions to produce the next state. The default case handles all actions a reducer is not interested in by returning the state unchanged. Then we find the watchlist item corresponding to the movie with the given id and update the playback minutes. Since state is immutable, we have to clone all objects down to the one we would like to change using the object spread operator (…​).

+
+
+
+

== =

+
+

Selectors rely on object identity to decide whether the value has to be recalculated. Do not clone objects that are not on the path to the change you want to make. This is why updatePlaybackMinutesMapper returns the same item if the movie id does not match. +== == =

+
+
+
+

== Alternative state mapping with Immer

+
+

It can be hard to think in immutable changes, especially if your team has a strong background in imperative programming. In this case, you may find the Immer library convenient, which allows to produce immutable objects by manipulating a proxied draft. The same reducer can then be written as:

+
+
+

watchlist-data.reducer.ts with Immer

+
+
+
+
import { produce } from 'immer';
+...
+case playbackActions.playbackFinished.type:
+      return produce(state, draft => {
+        const itemToUpdate = draft.items.find(item => item.movie.id == action.movieId);
+        if (itemToUpdate) {
+          itemToUpdate.playbackMinutes = action.stoppedAtMinute;
+        }
+      });
+
+
+
+

Immer works out of the box with plain objects and arrays.

+
+
+
+

== Redux devtools

+
+

If the StoreDevToolsModule is instrumented as described above, you can use the browser extension Redux devtools to see all dispatched actions and the resulting state diff, as well as the current state, and even travel back in time by undoing actions.

+
+
+
+Redux Devtools +
+
Figure 61. Redux devtools
+
+
+

Continue with learning about effects

+
+ +
+
+

Side effects with NgRx/Effects

+
+

Reducers are pure functions, meaning they are side-effect free and deterministic. Many actions however have side effects like sending messages or displaying a toast notification. NgRx encapsulates these actions in effects.

+
+
+

Let’s build a recommended movies list so the user can add movies to their watchlist.

+
+
+
+

Obtaining the recommendation list from the server

+
+

Create a module for recommendations and add stores and states as in the previous chapter. Add EffectsModule.forRoot([]) to the imports in AppModule below StoreModule.forRoot(). Add effects to the feature module:

+
+
+
+
ng generate effect recommendation/Recommendation -m recommendation/recommendation.module.ts
+
+
+
+

We need actions for loading the movie list, success and failure cases:

+
+
+

recommendation/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+import { Movie } from 'src/app/watchlist/models/movies';
+
+export const loadRecommendedMovies = createAction('[Recommendation List] Load movies');
+export const loadRecommendedMoviesSuccess = createAction('[Recommendation API] Load movies success', props<{movies: Movie[]}>());
+export const loadRecommendedMoviesFailure = createAction('[Recommendation API] Load movies failure', props<{error: any}>());
+
+const actions = union({
+    loadRecommendedMovies,
+    loadRecommendedMoviesSuccess,
+    loadRecommendedMoviesFailure
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

In the reducer, we use a loading flag so the UI can show a loading spinner. The store is updated with arriving data.

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State {
+  items: Movie[];
+  loading: boolean;
+}
+
+export const initialState: State = {
+  items: [],
+  loading: false
+};
+
+export function reducer(state = initialState, action: recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case '[Recommendation List] Load movies':
+      return {
+        ...state,
+        items: [],
+        loading: true
+      };
+
+    case '[Recommendation API] Load movies failure':
+      return {
+        ...state,
+          loading: false
+      };
+
+    case '[Recommendation API] Load movies success':
+      return {
+        ...state,
+        items: action.movies,
+        loading: false
+      };
+
+    default:
+      return state;
+  }
+}
+
+export const getAll = (state: State) => state.items;
+export const isLoading = (state: State) => state.loading;
+
+
+
+

We need an API service to talk to the server. For demonstration purposes, we simulate an answer delayed by one second:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable({
+  providedIn: 'root'
+})
+export class RecommendationApiService {
+
+  private readonly recommendedMovies: Movie[] = [
+    {
+      id: 2,
+      title: 'The Hunger Games',
+      genre: 'sci-fi',
+      releaseYear: 2012,
+      runtimeMinutes: 144
+    },
+    {
+      id: 4,
+      title: 'Avengers: Endgame',
+      genre: 'fantasy',
+      releaseYear: 2019,
+      runtimeMinutes: 181
+    }
+  ];
+
+  loadRecommendedMovies(): Observable<Movie[]> {
+    return of(this.recommendedMovies).pipe(delay(1000));
+  }
+}
+
+
+
+

Here are the effects:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable()
+export class RecommendationEffects {
+
+  constructor(
+    private actions$: Actions,
+    private recommendationApi: RecommendationApiService,
+  ) { }
+
+  @Effect()
+  loadBooks$ = this.actions$.pipe(
+    ofType(recommendationActions.loadRecommendedMovies.type),
+    switchMap(() => this.recommendationApi.loadRecommendedMovies().pipe(
+      map(movies => recommendationActions.loadRecommendedMoviesSuccess({ movies })),
+      catchError(error => of(recommendationActions.loadRecommendedMoviesFailure({ error })))
+    ))
+  );
+}
+
+
+
+

Effects are always observables and return actions. In this example, we consume the actions observable provided by NgRx and listen only for the loadRecommendedMovies actions by using the ofType operator. Using switchMap, we map to a new observable, one that loads movies and maps the successful result to a new loadRecommendedMoviesSuccess action or a failure to loadRecommendedMoviesFailure. In a real application we would show a notification in the error case.

+
+
+
+

==

+
+

If an effect should not dispatch another action, return an empty observable. +== ==

+
+ + +
+
+

Simplifying CRUD with NgRx/Entity

+
+

Most of the time when manipulating entries in the store, we like to create, add, update, or delete entries (CRUD). NgRx/Entity provides convenience functions if each item of a collection has an id property. Luckily all our entities already have this property.

+
+
+

Let’s add functionality to add a movie to the watchlist. First, create the required action:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export const addToWatchlist = createAction('[Recommendation List] Add to watchlist',
+    props<{ watchlistItemId: number, movie: Movie, addedAt: Date }>());
+
+
+
+
+

==

+
+

You may wonder why the Date object is not created inside the reducer instead, since it should always be the current time. However, remember that reducers should be deterministic state machines — State A + Action B should always result in the same State C. This makes reducers easily testable. +== ==

+
+
+

Then, rewrite the watchlistData reducer to make use of NgRx/Entity:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State extends EntityState<WatchlistItem> { (1)
+}
+
+export const entityAdapter = createEntityAdapter<WatchlistItem>(); (2)
+
+export const initialState: State = entityAdapter.getInitialState(); (3)
+
+const entitySelectors = entityAdapter.getSelectors();
+
+export function reducer(state = initialState, action: playbackActions.ActionsUnion | recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      const itemToUpdate = entitySelectors
+      .selectAll(state) (4)
+      .find(item => item.movie.id == action.movieId);
+      if (itemToUpdate) {
+        return entityAdapter.updateOne({ (5)
+          id: itemToUpdate.id,
+          changes: { playbackMinutes: action.stoppedAtMinute } (6)
+        }, state);
+      } else {
+        return state;
+      }
+
+    case recommendationActions.addToWatchlist.type:
+      return entityAdapter.addOne({id: action.watchlistItemId, movie: action.movie, added: action.addedAt, playbackMinutes: 0}, state);
+
+    default:
+      return state;
+  }
+}
+
+
+export const getAllItems = entitySelectors.selectAll;
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1NgRx/Entity requires state to extend EntityState. It provides a list of ids and a dictionary of id ⇒ entity entries
2The entity adapter provides data manipulation operations and selectors
3The state can be initialized with getInitialState(), which accepts an optional object to define any additional state beyond EntityState
4selectAll returns an array of all entities
5All adapter operations consume the state object as the last argument and produce a new state
6Update methods accept a partial change definition; you don’t have to clone the object
+
+
+

This concludes the tutorial on NgRx. If you want to learn about advanced topics such as selectors with arguments, testing, or router state, head over to the official NgRx documentation.

+
+
+
+
+
+

Cookbook

+
+ +
+

Abstract Class Store

+
+

The following solution presents a base class for implementing stores which handle state and its transitions. +Working with the base class achieves:

+
+
+
    +
  • +

    common API across all stores

    +
  • +
  • +

    logging (when activated in the constructor)

    +
  • +
  • +

    state transitions are asynchronous by design - sequential order problems are avoided

    +
  • +
+
+
+
Listing 77. Usage Example
+
+
@Injectable()
+export class ModalStore extends Store<ModalState> {
+
+  constructor() {
+    super({ isOpen: false }, !environment.production);
+  }
+
+  closeDialog() {
+    this.dispatchAction('Close Dialog', (currentState) => ({...currentState, isOpen: false}));
+  }
+
+  openDialog() {
+    this.dispatchAction('Open Dialog', (currentState) => ({...currentState, isOpen: true}));
+  }
+
+}
+
+
+
+
Listing 78. Abstract Base Class Store
+
+
import { OnDestroy } from '@angular/core';
+import { BehaviorSubject } from 'rxjs/BehaviorSubject';
+import { Observable } from 'rxjs/Observable';
+import { intersection, difference } from 'lodash';
+import { map, distinctUntilChanged, observeOn } from 'rxjs/operators';
+import { Subject } from 'rxjs/Subject';
+import { queue } from 'rxjs/scheduler/queue';
+import { Subscription } from 'rxjs/Subscription';
+
+interface Action<T> {
+  name: string;
+  actionFn: (state: T) => T;
+}
+
+/** Base class for implementing stores. */
+export abstract class Store<T> implements OnDestroy {
+
+  private actionSubscription: Subscription;
+  private actionSource: Subject<Action<T>>;
+  private stateSource: BehaviorSubject<T>;
+  state$: Observable<T>;
+
+  /**
+   * Initializes a store with initial state and logging.
+   * @param initialState Initial state
+   * @param logChanges When true state transitions are logged to the console.
+   */
+  constructor(initialState: T, public logChanges = false) {
+    this.stateSource = new BehaviorSubject<T>(initialState);
+    this.state$ = this.stateSource.asObservable();
+    this.actionSource = new Subject<Action<T>>();
+
+    this.actionSubscription = this.actionSource.pipe(observeOn(queue)).subscribe(action => {
+      const currentState = this.stateSource.getValue();
+      const nextState = action.actionFn(currentState);
+
+      if (this.logChanges) {
+        this.log(action.name, currentState, nextState);
+      }
+
+      this.stateSource.next(nextState);
+    });
+  }
+
+  /**
+   * Selects a property from the stores state.
+   * Will do distinctUntilChanged() and map() with the given selector.
+   * @param selector Selector function which selects the needed property from the state.
+   * @returns Observable of return type from selector function.
+   */
+  select<TX>(selector: (state: T) => TX): Observable<TX> {
+    return this.state$.pipe(
+      map(selector),
+      distinctUntilChanged()
+    );
+  }
+
+  protected dispatchAction(name: string, action: (state: T) => T) {
+    this.actionSource.next({ name, actionFn: action });
+  }
+
+  private log(actionName: string, before: T, after: T) {
+    const result: { [key: string]: { from: any, to: any} } = {};
+    const sameProbs = intersection(Object.keys(after), Object.keys(before));
+    const newProbs = difference(Object.keys(after), Object.keys(before));
+    for (const prop of newProbs) {
+      result[prop] = { from: undefined, to: (<any>after)[prop] };
+    }
+
+    for (const prop of sameProbs) {
+      if ((<any>before)[prop] !==  (<any>after)[prop]) {
+        result[prop] = { from: (<any>before)[prop], to: (<any>after)[prop] };
+      }
+    }
+
+    console.log(this.constructor.name, actionName, result);
+  }
+
+  ngOnDestroy() {
+    this.actionSubscription.unsubscribe();
+  }
+
+}
+
+
+ +
+
+

Add Electron to an Angular application using Angular CLI

+
+

This cookbook recipe explains how to integrate Electron in an Angular 10+ application. Electron is a framework for creating native applications with web technologies like JavaScript, HTML, and CSS. As an example, very well known applications as Visual Studio Code, Atom, Slack or Skype (and many more) are using Electron too.

+
+
+ + + + + +
+ + +At the moment of this writing Angular 11.2.0, Electron 11.2.3 and Electron-builder 22.9.1 were the versions available. +
+
+
+

Here are the steps to achieve this goal. Follow them in order.

+
+
+
+

Add Electron and other relevant dependencies

+
+

There are two different approaches to add the dependencies in the package.json file:

+
+
+
    +
  • +

    Writing the dependencies directly in that file.

    +
  • +
  • +

    Installing using npm install or yarn add.

    +
  • +
+
+
+ + + + + +
+ + +Please remember if the project has a package-lock.json or yarn.lock file use npm or yarn respectively. +
+
+
+

In order to add the dependencies directly in the package.json file, include the following lines in the devDependencies section:

+
+
+
+
"devDependencies": {
+...
+    "electron": "^11.2.3",
+    "electron-builder": "^22.9.1",
+...
+},
+
+
+
+

As indicated above, instead of this npm install can be used:

+
+
+
+
$ npm install -D electron electron-builder
+
+
+
+

Or with yarn:

+
+
+
+
$ yarn add -D electron electron-builder
+
+
+
+
+

Create the necessary typescript configurations

+
+

In order to initiate electron in an angular app we need to modify the tsconfig.json file and create a tsconfig.serve.json and a tsconfig.base.json in the root folder.

+
+
+
+

== tsconfig.json

+
+

This file needs to be modified to create references to ./src/tsconfig.app.json and ./src/tsconfig.spec.json to support different configurations.

+
+
+
+
{
+  "files": [],
+  "references": [
+    {
+      "path": "./src/tsconfig.app.json"
+    },
+    {
+      "path": "./src/tsconfig.spec.json"
+    }
+  ]
+}
+
+
+
+
+

== tsconfig.app.json

+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../app",
+    "module": "es2015",
+    "baseUrl": "",
+    "types": []
+  },
+  "include": [
+    "**/*.ts",
+  ],
+  "exclude": [
+    "**/*.spec.ts"
+  ],
+  "angularCompilerOptions": {
+    "fullTemplateTypeCheck": true,
+    "strictInjectionParameters": true,
+    "preserveWhitespaces": true
+  }
+}
+
+
+
+
+

== tsconfig.spec.json

+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../spec",
+    "module": "commonjs",
+    "types": [
+      "jasmine",
+      "node"
+    ]
+  },
+  "files": [
+    "test.ts",
+  ],
+  "include": [
+    "**/*.spec.ts",
+    "**/*.d.ts"
+  ],
+  "exclude": [
+    "dist",
+    "release",
+    "node_modules"
+  ]
+}
+
+
+
+
+

== tsconfig.base.json

+
+

This is shared between tsconfig.app.json and tsconfig.spec.json and it will be extended on each config file.

+
+
+
+
{
+  "compileOnSave": false,
+  "compilerOptions": {
+    "outDir": "./dist",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "es2016",
+      "es2015",
+      "dom"
+    ]
+  },
+  "files": [
+    "electron-main.ts"
+    "src/polyfills.ts"
+  ],
+  "include": [
+    "src/**/*.d.ts"
+  ],
+  "exclude": [
+    "node_modules"
+  ]
+}
+
+
+
+
+

== tsconfig.serve.json

+
+

In the root, tsconfig.serve.json needs to be created. This typescript config file is going to be used when we serve electron:

+
+
+
+
{
+  "compilerOptions": {
+    "outDir": ".",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "dom"
+    ]
+  },
+  "include": [
+    "electron-main.ts"
+  ],
+  "exclude": [
+    "node_modules",
+    "**/*.spec.ts"
+  ]
+}
+
+
+
+
+

Add Electron build configuration

+
+

In order to configure electron builds properly we need to create a new json on our application, let’s call it electron-builder.json. For more information and fine tuning please refer to the Electron Builder official documentation.

+
+
+

The contents of the file will be something similar to the following:

+
+
+
+
{
+  "productName": "devon4ngElectron",
+  "directories":{
+    "output": "./builder-release"
+  },
+  "win": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "portable"
+    ]
+  },
+  "mac": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "dmg"
+    ]
+  },
+  "linux": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "AppImage"
+    ]
+  }
+}
+
+
+
+

There are two important things in this files:

+
+
+
    +
  1. +

    "output": this is where electron builder is going to build our application

    +
  2. +
  3. +

    "icon": in every OS possible there is an icon parameter, the route to the icon folder that will be created after building with angular needs to be used here. This will make it so the electron builder can find the icons and build.

    +
  4. +
+
+
+
+

Modify angular.json

+
+

angular.json has to to be modified so the project is build inside /dist without an intermediate folder.

+
+
+
+
{
+  "architect": {
+    "build": {
+      "outputPath": "dist"
+    }
+  }
+}
+
+
+
+
+

Create the electron window in electron-main.ts

+
+

In order to use electron, a file needs to be created at the root of the application (main.ts). This file will create a window with different settings checking if we are using --serve as an argument:

+
+
+
+
import { app, BrowserWindow } from 'electron';
+import * as path from 'path';
+import * as url from 'url';
+
+let win: any;
+const args: any = process.argv.slice(1);
+const serve: any = args.some((val) => val == '--serve');
+
+const createWindow:any = ()=>{
+  // Create the browser window.
+  win = new BrowserWindow({
+    fullscreen: true,
+    webPreferences: {
+      nodeIntegration: true,
+    }
+  });
+
+  if (serve) {
+    require('electron-reload')(__dirname, {
+      electron: require(`${__dirname}/node_modules/electron`)
+    });
+    win.loadURL('http://localhost:4200');
+  } else {
+    win.loadURL(
+      url.format({
+        pathname: path.join(__dirname, 'dist/index.html'),
+        protocol: 'file:',
+        slashes: true
+      })
+    );
+  }
+
+  if (serve) {
+    win.webContents.openDevTools();
+  }
+
+  // Emitted when the window is closed.
+  win.on('closed', () => {
+    // Dereference the window object, usually you would store window
+    // in an array if your app supports multi windows, this is the time
+    // when you should delete the corresponding element.
+    // tslint:disable-next-line:no-null-keyword
+    win = null;
+  });
+}
+
+try {
+  // This method will be called when Electron has finished
+  // initialization and is ready to create browser windows.
+  // Some APIs can only be used after this event occurs.
+  app.on('ready', createWindow);
+
+   // Quit when all windows are closed.
+  app.on('window-all-closed', () => {
+    // On OS X it is common for applications and their menu bar
+    // to stay active until the user quits explicitly with Cmd + Q
+    if (process.platform !==  'darwin') {
+      app.quit();
+    }
+  });
+
+   app.on('activate', () => {
+    // On OS X it's common to re-create a window in the app when the
+    // dock icon is clicked and there are no other windows open.
+    if (win == null) {
+      createWindow();
+    }
+  });
+} catch (e) {
+  // Catch Error
+  // throw e;
+}
+
+
+
+
+

Add the electron window and improve the package.json scripts

+
+

Inside package.json the electron window that will be transformed to electron-main.js when building needs to be added.

+
+
+
+
{
+  ....
+  "main": "electron-main.js",
+  "scripts": {...}
+  ....
+}
+
+
+
+

The scripts section in the package.json can be improved to avoid running too verbose commands. As a very complete example we can take a look to the My Thai Star’s scripts section and copy the lines useful in your project. In any case, at least we recommend to add the following lines:

+
+
+
+
  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e",
+    "electron:tsc": "tsc -p tsconfig.serve.json",
+    "electron:run": "npm run electron:tsc && ng build --base-href ./ && npx electron .",
+    "electron:serve": "npm run electron:tsc && npx electron . --serve",
+    "electron:pack": "npm run electron:tsc && electron-builder --dir --config electron-builder.json",
+    "electron:build": "npm run electron:tsc && electron-builder --config electron-builder.json build"
+  },
+
+
+
+

The electron: scripts do the following:

+
+
+
    +
  • +

    electron:tsc: Compiles electron TS files.

    +
  • +
  • +

    electron:run: Serves Angular app and runs electron.

    +
  • +
  • +

    electron:serve: Serves electron with an already running angular app (i.e. a ng serve command running on another terminal).

    +
  • +
  • +

    electron:pack: Packs electron app.

    +
  • +
  • +

    electron:build: Builds electron app.

    +
  • +
+
+
+
+

Add Electron to an Angular application using Nx CLI

+
+

Creating an Electron app is very easy and straight-forward if you are using Nx CLI. As a pre-requisite, you should already have an application in your Nx workspace which you want to run as a front-end in your Electron app. (You can follow this guide if you want to get started with Nx).

+
+
+

Follow the steps below to develop an Electron app in your Nx workspace:

+
+
+
+

Install nx-electron

+
+

Install nx-electron using the command:

+
+
+
+
  npm install -D nx-electron
+
+
+
+

This will add the packages electron and nx-electron as dev dependencies to your Nx workspace. This will help us generate our Electron app in the next step.

+
+
+
+

Generate your Electron app

+
+

Once you have installed nx-electron, you can generate your electron app using the command:

+
+
+
+
  nx g nx-electron:app <electron-app-name> --frontendProject=<frontend-app-name>
+
+
+
+

And that is it! You have generated your Electron app already. All the configuration files (tsconfig.*) are generated for you under <electron-app-name> in your Nx workspace.

+
+
+
+

Serving your app

+
+

You can use this command to serve your Electron app:

+
+
+
+
  nx run-many --target=serve --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+

If you see a blank application, it is because the Electron app was served before the front-end was served. To avoid this, you can serve the front-end and back-end separately, (that is, serve the back-end only after the front-end is served).

+
+
+
+

Building your app

+
+

The command for building your Electron app in Nx is similar to the serve command above, you only change the target from serve to build:

+
+
+
+
  nx run-many --target=build --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+
+

Packaging your app

+
+

Make sure you have build your app before you try to package it using the following command:

+
+
+
+
  nx run <electron-app-name>:package [--options]
+
+
+
+

The options that can be passed can be found here.

+
+
+

You can find a working example of an Electron app in devon4ts-samples.

+
+
+

Unresolved include directive in modules/ROOT/pages/devon4ng.wiki/master-devon4ng.adoc - include::guide-angular-mock-service.adoc.adoc[]

+
+ +
+
+

Testing e2e with Cypress

+
+

This guide will cover the basics of e2e testing using Cypress.

+
+
+

Cypress is a framework “all in one” that provides the necessary libraries to write specific e2e tests, without the need of Selenium.

+
+
+

Why Cypress?

+
+
+
    +
  • +

    Uses JavaScript

    +
  • +
  • +

    It works directly with the browser so the compatibility with the front-end framework the project uses (in this case Angular) is not a problem.

    +
  • +
  • +

    Easy cross browser testing

    +
  • +
+
+
+
+

Setup

+
+

Install +First of all we need to install it, we can use npm install:

+
+
+
+
$ npm install -D cypress
+
+
+
+

Or we can install it with yarn:

+
+
+
+
$ yarn add -D cypress
+
+
+
+

We need to run Cypress in order to get the folder tree downloaded, then create a tsconfig.json file inside cypress folder to add the typescript configuration.

+
+
+
+
$ . /node_modules/.bin/cypress open
+
+
+
+
Listing 79. tsconfig.json
+
+
{
+  "compilerOptions": {
+    "strict": true,
+    "baseUrl": "../node_modules",
+    "target": "es5",
+    "lib": ["es5", "dom"],
+    "types": ["cypress"]
+  },
+  "include": [
+    "**/*.ts"
+  ]
+}
+
+
+
+

BaseUrl

+
+
+

Let’s setup the base URL so when we run the tests cypress will "navigate" to the right place, go to cypress.json on the root of the project.

+
+
+
Listing 80. cypress.json
+
+
{
+  "baseUrl": "http://localhost:4200"
+}
+
+
+
+
+

Files / Structure

+
+
+
/cypress
+  tsconfig.json
+  /fixtures
+    - example.json
+  /integration
+    - button.spec.ts
+    - test.spec.ts
+    /examples
+  /plugins
+    - index.js
+  /support
+    - commands.js
+    - index.js
+
+
+
+

tsconfig.json for typescript configuration.

+
+
+

fixtures to store our mock data or files (images, mp3…​) to use on our tests.

+
+
+

integration is where our tests go, by default it comes with an examples folder with tested samples.

+
+
+

plugins is where the configuration files of the plugins go.

+
+
+

support to add custom commands.

+
+
+
+

== =

+
+

If you are using Nx, it automatically generates a e2e cypress project for every project that you generate. So you already get the configuration files like tsconfig.json and cypress.json and also get the folder structure described above. This helps you focus more on writing your tests rather than setting up Cypress.

+
+
+
+

== =

+ +
+
+

Tests

+
+

The structure is the same than Mocha.

+
+
+

First, we create a file, for example form.spec.ts, inside we define a context to group all our tests referred to the same subject.

+
+
+
Listing 81. form.spec.ts
+
+
context('Button page', () => {
+  beforeEach(() => {
+    cy.visit('/');
+  });
+  it('should have button',()=>{
+    cy.get('button').should('exist');
+  });
+  it('should contain PRESS',()=>{
+    cy.contains('button', 'PRESS');
+  });
+});
+
+
+
+
beforeEach
+

Visit '/' before every test.

+
+
+
it
+

Inside we write the test.

+
+
+

The result:

+
+
+
+contextImg +
+
+
+

For more info check Cypress documentation

+
+
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+
+
+

Fixtures

+
+

We use fixtures to mock data, it can be a json, an image, video…​

+
+
+
+
{
+  "name": "Dummy name",
+  "phone": 999 99 99 99,
+  "body": "Mock data"
+}
+
+
+
+

You can store multiple mocks on the same fixture file.

+
+
+
+
{
+  "create":{"name": "e2etestBox"},
+  "boxFruit":{
+    "uuid":"3376339576e33dfb9145362426a33333",
+    "name":"e2etestBox",
+    "visibility":true,
+    "items":[
+      {"name":"apple","units":3},
+      {"name":"kiwi","units":2},
+    ]
+  },
+}
+
+
+
+

To access data we don’t need to import any file, we just call cy.fixture(filename) inside the **.spec.ts. We can name it as we want.

+
+
+
+
cy.fixture('box.json').as('fruitBox')
+
+
+
+

cy.fixture('box.json') we get access to box.json +.as(fruitBox) is used to create an alias (fruitBox) to the fixture.

+
+
+

For more info check Fixtures documentation

+
+
+
+

Request / Route

+
+

With cypress you can test your application with real data or with mocks.

+
+
+

Not using mocks guarantees that your tests are real e2e test but makes them vulnerable to external issues. +When you mock data you don’t know exactly if the data and the structure received from the backend is correct because you are forcing a mock on the response, but you can avoid external issues, run test faster and have better control on the structure and status.

+
+
+

To get more information go to Testing Strategies

+
+
+
+

Route

+
+

Cypress can intercept a XHR request and interact with it.

+
+
+
+
cy.server();
+cy.route(
+  'GET',
+  '/apiUrl/list',
+  [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]
+)
+
+
+
+

cy.server(options) start a server to interact with the responses.

+
+
+
cy.route(options) intercepts a XMLHttpRequests
+
    +
  • +

    method GET

    +
  • +
  • +

    URL /apiUrl/list'

    +
  • +
  • +

    response [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]

    +
  • +
+
+
+

Waits

+
+
+

Every cypress action has a default await time to avoid asynchronous issues, but this time can be short for some particular actions like API calls, for those cases we can use cy.wait().

+
+
+
+
cy.server();
+cy.route('/apiUrl/list').as('list');
+cy.visit('/boxList');
+cy.wait('@list');
+
+
+
+

You can find more information about cy.wait() here

+
+
+

To mock data with fixtures:

+
+
+
+
cy.fixture('box')
+  .then(({boxFruit}) => {
+    cy.route(
+      'GET',
+      '/apiUrl/list',
+      boxFruit
+    ).as('boxFruit');
+    cy.get('#button').click();
+    cy.wait('@journalsList');
+    cy.get('#list').contains('apple');
+  })
+
+
+
+

We get boxFruit data from the box fixture and then we mock the API call with it so now the response of the call is boxFruit object. +When the button is clicked, it waits to receive the response of the call and then checks if the list contains one of the elements of the fruitBox.

+
+
+
+

Request

+
+

Make a HTTP request.

+
+
+
+
cy.server();
+cy.request('http://localhost:4200/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+
+
+
+

If we have 'http://localhost:4200' as baseUrl on cypress.json

+
+
+
+
cy.server();
+cy.request('/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+// Goes to http://localhost:4200/
+
+
+
+

We can add other options, like we can send the body of a form.

+
+
+
+
cy.server();
+cy.request({
+  method: 'POST',
+  url: '/send',
+  form: true,
+  body: {
+    name: 'name task',
+    description: 'description of the task'
+  }
+});
+
+
+
+
+

Custom commands

+
+

If you see yourself writing the same test more than once (login is a common one), you can create a custom command to make things faster.

+
+
+

Cypress.Commands.add('name', ()⇒{}) to create the test.

+
+
+
Listing 82. commands.ts
+
+
Cypress.Commands.add('checkPlaceholder', (name) => {
+  cy.get(`[name='${name}']`).click();
+  cy.get('mat-form-field.mat-focused').should('exist');
+});
+
+
+
+
index.ts
+

To use the commands we need to import the files on support/index.ts

+
+
+
Listing 83. index.ts
+
+
import './commands'
+import './file1'
+import './folder/file2'
+
+
+
+

index.ts is where all our custom commands files unite so Cypress knows where to find them.

+
+
+

And as we are using typescript we need to define a namespace, interface and define our function.

+
+
+
    +
  • +

    index.d.ts

    +
  • +
+
+
+
+
declare namespace Cypress {
+  interface Chainable<Subject> {
+    checkPlaceholder(name:string):Chainable<void>
+  }
+}
+
+
+ +
+
+

Cross browser testing

+
+

By default the browser used by Cypress is Chrome, it has compatibility with it’s family browsers (including Microsoft Edge) and has beta support for Mozilla Firefox.

+
+
+

To change the browser on the panel we can do it by selecting the desired one on the browsers tab before running the spec file.

+
+
+

Cypress will detect and display, except electron, only the browsers that you have already installed on your machine.

+
+
+
+browserTab +
+
+
+

Once the browser is selected, you can run your tests.

+
+
+

To change the browser on the automatic test run, you can add a flag on the node command

+
+
+
+
cypress run --browser edge
+
+
+
+

Only if we use the cypress run command.

+
+
+

Or we can change the script file.

+
+
+
    +
  • +

    cypress/script.js

    +
  • +
+
+
+
+
const runTests= async ()=>{
+  ...
+  const {totalFailed} = await cypress.run({browser:'edge'});
+  ...
+};
+
+
+ +
+
+

Viewport

+
+

Cypress allow us to create tests depending on the Viewport, so we can test responsiveness.

+
+
+

There are different ways to use it:

+
+
+

Inside a test case

+
+
+
+
it('should change title when viewport is less than 320px', ()=>{
+  cy.get('.title-l').should('be.visible');
+  cy.get('.title-s').should('not.be.visible');
+  cy.viewport(320, 480);
+  cy.get('.title-l').should('not.be.visible');
+  cy.get('.title-s').should('be.visible');
+})
+
+
+
+

Passing the configuration as an option

+
+
+
+
describe('page display on medium size screen', {
+  viewportHeight: 1000,
+  viewportWidth: 400
+}, () => {
+  ...
+})
+
+
+
+

Or we can set a default

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+{
+ "viewportHeight": 1000
+ "viewportWidth": 400,
+}
+...
+
+
+ +
+
+

Test retries

+
+

We can get false negatives intermittently due external issues that can affect our tests, because of that we can add, in the configuration, a retries entry so Cypress can run again a certain failed test the selected number of times to verify that the error is real.

+
+
+

We can set retries for run or open mode.

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+"retries": {
+    "runMode": 3,
+    "openMode": 3
+  }
+...
+
+
+
+

The retries can be configured on the cypress.json or directly on a specific test.

+
+
+
+
it('should get button', {
+  retries: {
+    runMode: 2,
+    openMode: 2
+  }
+}, () => {
+  ...
+})
+
+
+
+

This retries those not shown on the test log.

+
+
+

Check more on retries documentation

+
+
+
+

Reporter

+
+

The tests results appear on the terminal, but to have a more friendly view we can add a reporter.

+
+
+
+reporter +
+
+
+
+

Mochawesome

+
+

In this case we are going to use Mochawesome, initially its a Mocha reporter but as Cypress uses Mocha it works the same.

+
+
+

Install

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome
+
+
+
+

To run the reporter:

+
+
+
+
cypress run --reporter mochawesome
+
+
+
+

Mochawesome saves by default the generated files on `./mochawesome-report/` but we can add options to change this behavior.

+
+
+

Options can be passed to the reporter in two ways

+
+
+

Using a flag

+
+
+
+
cypress run --reporter mochawesome --reporter-options reportDir=report
+
+
+
+

Or on cypress.json

+
+
+
+
{
+  "baseUrl": "http://localhost:4200",
+  "reporter": "mochawesome",
+  "reporterOptions": {
+    "overwrite": false,
+    "html": false,
+    "json": true,
+    "reportDir": "cypress/report"
+  }
+}
+
+
+
+

Overwrite:false to not overwrite every **:spec.ts test report, we want them to create a merged version later.

+
+
+

reportDir to set a custom directory.

+
+
+

html:false because we don’t need it.

+
+
+

json:true to save them on json.

+
+
+

Mochawesome only creates the html file of the last .spec.ts file that the tests run, that’s why we don’t generate html reports directly, in order to stack them all on the same final html we need to merge the reports.

+
+ +
+

mochawesome-merge

+
+
+

Mochawesome-merge is a library that helps us to merge the different json.

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome-merge
+npm install --save-dev mochawesome-report-generator
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome-merge
+yarn add -D mochawesome-report-generator
+
+
+
+

To merge the files we execute this command:

+
+
+
+
mochawesome-merge cypress/report/*.json > cypress/reportFinal.json
+
+
+
+

reportFinal.json is the result of this merge, whit that we have the data of all the spec files in one json.

+
+
+

We can also automate the test, merge and conversion to html using a script.

+
+
+
+
const cypress = require('cypress');
+const fse = require('fs-extra');
+const { merge } = require('mochawesome-merge');
+const generator = require('mochawesome-report-generator');
+const runTests= async ()=>{
+  await fse.remove('mochawesome-report');
+  await fse.remove('cypress/report');
+  const {totalFailed} = await cypress.run();
+  const reporterOptions = {
+    files: ["cypress/report/*.json"]
+  };
+  await generateReport(reporterOptions);
+  if(totalFailed !==  0){
+    process.exit(2);
+  };
+};
+const generateReport = (options)=> {
+  return merge(options).then((jsonReport)=>{
+    generator.create(jsonReport).then(()=>{
+      process.exit();
+    });
+  });
+};
+runTests();
+
+
+
+

fse.remove() to remove older reports data.

+
+
+

cypress.run() to run the tests.

+
+
+

merge(options) we merge the json output from running the tests.

+
+
+

generator.create(jsonReport) then we generate the html view of the report.

+
+ +
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+ +
+
+

Angular ESLint support

+
+ + + + + +
+ + +ESLint is supported in Angular 10.1.0 onward. +
+
+
+
+

What about TSLint?

+
+

TSLint is a fantastic tool. It is a linter that was written specifically to work based on the TypeScript AST format. This has advantages and disadvantages, as with most decisions we are faced with in software engineering!

+
+
+

One advantage is there is no tooling required to reconcile differences between ESLint and TypeScript AST formats, but the major disadvantage is that the tool is therefore unable to reuse any of the previous work which has been done in the JavaScript ecosystem around linting, and it has to re-implement everything from scratch. Everything from rules to auto-fixing capabilities and more.

+
+
+

However, the backers behind TSLint announced in 2019 that they would be deprecating TSLint in favor of supporting typescript-eslint in order to benefit the community. You can read more about that here

+
+
+

The TypeScript Team themselves also announced their plans to move the TypeScript codebase from TSLint to typescript-eslint, and they have been big supporters of this project. More details at https://github.com/microsoft/TypeScript/issues/30553

+
+
+

Angular ESLint support comes from the angular-eslint tooling package. Angular documentation also links to this repository as you can check in the ng lint section of the Angular CLI documentation.

+
+
+
+

Quick start with Angular and ESLint

+
+

In order to create a brand new Angular CLI workspace which uses ESLint instead of TSLint and Codelyzer, simply run the following commands:

+
+
+
+
##Install the Angular CLI and @angular-eslint/schematics globally however you want (e.g. npm, yarn, volta etc)
+
+$ npm i -g @angular/cli @angular-devkit/core @angular-devkit/schematics @angular-eslint/schematics
+
+##Create a new Angular CLI workspace using the @angular-eslint/schematics collection (instead of the default)
+
+$ ng new --collection=@angular-eslint/schematics
+
+
+
+
+

Migrating an Angular CLI project from Codelyzer and TSLint

+ +
+
+

1 - Add relevant dependencies

+
+

The first step is to run the schematic to add @angular-eslint to your project:

+
+
+
+
$ ng add @angular-eslint/schematics
+
+
+
+

This will handle installing the latest version of all the relevant packages for you and adding them to the devDependencies of your package.json.

+
+
+
+

2 - Run the convert-tslint-to-eslint schematic on a project

+
+

The next thing to do is consider which "project" you want to migrate to use ESLint. If you have a single application in your workspace you will likely have just a single entry in the projects configuration object within your angular.json file. If you have a projects/` directory in your workspace, you will have multiple entries in your projects configuration and you will need to chose which one you want to migrate using the convert-tslint-to-eslint schematic.

+
+
+

You can run it like so:

+
+
+
+
$ ng g @angular-eslint/schematics:convert-tslint-to-eslint {{YOUR_PROJECT_NAME_GOES_HERE}}
+
+
+
+

From now on, ng lint will use ESLint!

+
+
+
+

3 - Remove root TSLint configuration and use only ESLint

+
+

Once you are happy with your ESLint setup, you simply need to remove the root-level tslint.json and potentially uninstall TSLint and any TSLint-related plugins/dependencies if your Angular CLI workspace is now no longer using TSLint at all.

+
+ +
+
+
+
+
+
+1. A package is a file or directory that is described by a package.json. . +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/meta-architecture.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/meta-architecture.html new file mode 100644 index 00000000..cf22047b --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/meta-architecture.html @@ -0,0 +1,675 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Meta Architecture

+
+ +
+
+
+

Introduction

+
+ +
+
+
+

Purpose of this document

+
+
+

In our business applications, the client easily gets underestimated. Sometimes the client is more complex to develop and design than the server. While the server architecture is nowadays easily to agree as common sense, for clients this is not as obvious and stable especially as it typically depends on the client framework used. Finding a concrete architecture applicable for all clients may therefore be difficult to accomplish.

+
+
+

This document tries to define on a high abstract level, a reference architecture which is supposed to be a mental image and frame for orientation regarding the evaluation and appliance of different client frameworks. As such it defines terms and concepts required to be provided for in any framework and thus gives a common ground of understanding for those acquainted with the reference architecture. This allows better comparison between the various frameworks out there, each having their own terms for essentially the same concepts. It also means that for each framework we need to explicitly map how it implements the concepts defined in this document.

+
+
+

The architecture proposed herein is neither new nor was it developed from scratch. Instead it is the gathered and consolidated knowledge and best practices of various projects (s. References).

+
+
+
+
+

Goal of the Client Architecture

+
+
+

The goal of the client architecture is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview. Furthermore it ensures a homogeneity regarding how different concrete UI technologies are being applied in the projects, solving the common requirements in the same way.

+
+
+
+
+

Architecture Views

+
+
+

As for the server we distinguish between the business and the technical architecture. Where the business architecture is different from project to project and relates to the concrete design of dialog components given concrete requirements, the technical architecture can be applied to multiple projects.

+
+
+

The focus of this document is to provide a technical reference architecture on the client on a very abstract level defining required layers and components. How the architecture is implemented has to be defined for each UI technology.

+
+
+

The technical infrastructure architecture is out of scope for this document and although it needs to be considered, the concepts of the reference architecture should work across multiple TI architecture, i.e. native or web clients.

+
+
+
+
+

devonfw Reference Client Architecture

+
+
+

The following gives a complete overview of the proposed reference architecture. It will be built up incrementally in the following sections.

+
+
+
+Complete Client Architecture Overview +
+
+
+

Figure 1 Overview

+
+
+
+
+

Client Architecture

+
+
+

On the highest level of abstraction we see the need to differentiate between dialog components and their container they are managed in, as well as the access to the application server being the back-end for the client (e.g. an devon4j instance). This section gives a summary of these components and how they relate to each other. Detailed architectures for each component will be supplied in subsequent sections

+
+
+
+Client Architecture Overview +
+
+
+

Figure 2 Overview of Client Architecture

+
+
+
+
+

== Dialog Component

+
+
+

A dialog component is a logical, self-contained part of the user interface. It accepts user input and actions and controls communication with the user. Dialog components use the services provided by the dialog container in order to execute the business logic. They are self-contained, i.e. they possess their own user interface together with the associated logic, data and states.

+
+
+
    +
  • +

    Dialog components can be composed of other dialog components forming a hierarchy

    +
  • +
  • +

    Dialog components can interact with each other. This includes communication of a parent to its children, but also between components independent of each other regarding the hierarchy.

    +
  • +
+
+
+
+
+

== Dialog Container

+
+
+

Dialog components need to be managed in their life-cycle and how they can be coupled to each other. The dialog container is responsible for this along with the following:

+
+
+
    +
  • +

    Bootstrapping the client application and environment

    +
    +
      +
    • +

      Configuration of the client

      +
    • +
    • +

      Initialization of the application server access component

      +
    • +
    +
    +
  • +
  • +

    Dialog Component Management

    +
    +
      +
    • +

      Controlling the life-cycle

      +
    • +
    • +

      Controlling the dialog flow

      +
    • +
    • +

      Providing means of interaction between the dialogs

      +
    • +
    • +

      Providing application server access

      +
    • +
    • +

      Providing services to the dialog components
      +(e.g. printing, caching, data storage)

      +
    • +
    +
    +
  • +
  • +

    Shutdown of the application

    +
  • +
+
+
+
+
+

== Application Server Access

+
+
+

Dialogs will require a back-end application server in order to execute their business logic. Typically in an devonfw application the service layer will provide interfaces for the functionality exposed to the client. These business oriented interfaces should also be present on the client backed by a proxy handling the concrete call of the server over the network. This component provides the set of interfaces as well as the proxy.

+
+
+
+
+

Dialog Container Architecture

+
+
+

The dialog container can be further structured into the following components with their respective tasks described in own sections:

+
+
+
+Dialog Container Architecture Overview +
+
+
+

Figure 3 Dialog Container Architecture

+
+
+
+
+

== Application

+
+
+

The application component represents the overall client in our architecture. It is responsible for bootstrapping all other components and connecting them with each other. As such it initializes the components below and provides an environment for them to work in.

+
+
+
+
+

== Configuration Management

+
+
+

The configuration management manages the configuration of the client, so the client can be deployed in different environments. This includes configuration of the concrete application server to be called or any other environment-specific property.

+
+
+
+
+

== Dialog Management

+
+
+

The Dialog Management component provides the means to define, create and destroy dialog components. It therefore offers basic life-cycle capabilities for a component. In addition it also allows composition of dialog components in a hierarchy. The life-cycle is then managed along the hierarchy, meaning when creating/destroying a parent dialog, this affects all child components, which are created/destroyed as well.

+
+
+
+
+

== Service Registry

+
+
+

Apart from dialog components, a client application also consists of services offered to these. A service can thereby encompass among others:

+
+
+
    +
  • +

    Access to the application server

    +
  • +
  • +

    Access to the dialog container functions for managing dialogs or accessing the configuration

    +
  • +
  • +

    Dialog independent client functionality such as Printing, Caching, Logging, Encapsulated business logic such as tax calculation

    +
  • +
  • +

    Dialog component interaction

    +
  • +
+
+
+

The service registry offers the possibility to define, register and lookup these services. Note that these services could be dependent on the dialog hierarchy, meaning different child instances could obtain different instances / implementations of a service via the service registry, depending on which service implementations are registered by the parents.

+
+
+

Services should be defined as interfaces allowing for different implementations and thus loose coupling.

+
+
+
+
+

Dialog Component Architecture

+
+
+

A dialog component has to support all or a subset of the following tasks:
+(T1) Displaying the user interface incl. internationalization
+(T2) Displaying business data incl. changes made to the data due to user interactions and localization of the data
+(T3) Accepting user input including possible conversion from e.g. entered Text to an Integer
+(T4) Displaying the dialog state
+(T5) Validation of user input
+(T6) Managing the business data incl. business logic altering it due to user interactions
+(T7) Execution of user interactions
+(T8) Managing the state of the dialog (e.g. Edit vs. View)
+(T9) Calling the application server in the course of user interactions

+
+
+

Following the principle of separation of concerns, we further structure a dialog component in an own architecture allowing us the distribute responsibility for these tasks along the defined components:

+
+
+
+Dialog Component Architecture +
+
+
+

Figure 4 Overview of dialog component architecture

+
+
+
+
+

== Presentation Layer

+
+
+

The presentation layer generates and displays the user interface, accepts user input and user actions and binds these to the dialog core layer (T1-5). The tasks of the presentation layer fall into two categories:

+
+
+
    +
  • +

    Provision of the visual representation (View component)
    +The presentation layer generates and displays the user interface and accepts user input and user actions. The logical processing of the data, actions and states is performed in the dialog core layer. The data and user interface are displayed in localized and internationalized form.

    +
  • +
  • +

    Binding of the visual representation to the dialog core layer
    +The presentation layer itself does not contain any dialog logic. The data or actions entered by the user are then processed in the dialog core layer. There are three aspects to the binding to the dialog core layer. We refer to “data binding”, “state binding” and “action binding”. Syntactical and (to a certain extent) semantic validations are performed during data binding (e.g. cross-field plausibility checks). Furthermore, the formatted, localized data in the presentation layer is converted into the presentation-independent, neutral data in the dialog core layer (parsing) and vice versa (formatting).

    +
  • +
+
+
+
+
+

== Dialog Core Layer

+
+
+

The dialog core layer contains the business logic, the control logic, and the logical state of the dialog. It therefore covers tasks T5-9:

+
+
+
    +
  • +

    Maintenance of the logical dialog state and the logical data
    +The dialog core layer maintains the logical dialog state and the logical data in a form which is independent of the presentation. The states of the presentation (e.g. individual widgets) must not be maintained in the dialog core layer, e.g. the view state could lead to multiple presentation states disabling all editable widgets on the view.

    +
  • +
  • +

    Implementation of the dialog and dialog control logic
    +The component parts in the dialog core layer implement the client specific business logic and the dialog control logic. This includes, for example, the manipulation of dialog data and dialog states as well as the opening and closing of dialogs.

    +
  • +
  • +

    Communication with the application server
    +The dialog core layer calls the interfaces of the application server via the application server access component services.

    +
  • +
+
+
+

The dialog core layer should not depend on the presentation layer enforcing a strict layering and thus minimizing dependencies.

+
+
+
+
+

== Interactions between dialog components

+
+
+

Dialog components can interact in the following ways:

+
+
+
+Dialog Interactions +
+
+
+
    +
  • +

    Embedding of dialog components
    +As already said dialog components can be hierarchically composed. This composition works by embedding on dialog component within the other. Apart from the life-cycle managed by the dialog container, the embedding needs to cope for the visual embedding of the presentation and core layer.

    +
    +
      +
    • +

      Embedding dialog presentation
      +The parent dialog needs to either integrate the embedded dialog in its layout or open it in an own model window.

      +
    • +
    • +

      Embedding dialog core
      +The parent dialog needs to be able to access the embedded instance of its children. This allows initializing and changing their data and states. On the other hand the children might require context information offered by the parent dialog by registering services in the hierarchical service registry.

      +
    • +
    +
    +
  • +
  • +

    Dialog flow
    +Apart from the embedding of dialog components representing a tight coupling, dialogs can interact with each other by passing the control of the UI, i.e. switching from one dialog to another.

    +
  • +
+
+
+

When interacting, dialog components should interact only between the same or lower layers, i.e. the dialog core should not access the presentation layer of another dialog component.

+
+
+
+
+

Appendix

+
+ +
+
+
+

Notes about Quasar Client

+
+
+

The Quasar client architecture as the consolidated knowledge of our CSD projects is the major source for the above drafted architecture. However, the above is a much simplified and more agile version thereof:

+
+
+
    +
  • +

    Quasar Client tried to abstract from the concrete UI library being used, so it could decouple the business from the technical logic of a dialog. The presentation layer should be the only one knowing the concrete UI framework used. This level of abstraction was dropped in this reference architecture, although it might of course still make sense in some projects. For fast-moving agile projects in the web however introducing such a level of abstraction takes effort with little gained benefits. With frameworks like Angular 2 we would even introduce one additional seemingly artificial and redundant layer, since it already separates the dialog core from its presentation.

    +
  • +
  • +

    In the past and in the days of Struts, JSF, etc. the concept of session handling was important for the client since part of the client was sitting on a server with a session relating it to its remote counterpart on the users PC. Quasar Client catered for this need, by very prominently differentiating between session and application in the root of the dialog component hierarchy. However, in the current days of SPA applications and the lowered importance of servers-side web clients, this prominent differentiation was dropped. When still needed the referenced documents will provide in more detail how to tailor the respective architecture to this end.

    +
  • +
+
+
+
+ +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4ng.wiki/services-layer.html b/docs/devonfw.github.io/1.0/devon4ng.wiki/services-layer.html new file mode 100644 index 00000000..24f709b4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4ng.wiki/services-layer.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Services Layer

+
+
+

The services layer is more or less what we call 'business logic layer' on the server side. +It is the layer where the business logic is placed. +The main challenges are:

+
+
+
    +
  • +

    Define application state and an API for the components layer to use it

    +
  • +
  • +

    Handle application state transitions

    +
  • +
  • +

    Perform back-end interaction (XHR, WebSocket, etc.)

    +
  • +
  • +

    Handle business logic in a maintainable way

    +
  • +
  • +

    Configuration management

    +
  • +
+
+
+

All parts of the services layer are described in this chapter. +An example which puts the concepts together can be found at the end Interaction of Smart Components through the services layer.

+
+
+
+
+

Boundaries

+
+
+

There are two APIs for the components layer to interact with the services layer:

+
+
+
    +
  • +

    A store can be subscribed to for receiving state updates over time

    +
  • +
  • +

    A use case service can be called to trigger an action

    +
  • +
+
+
+

To illustrate the fact the following figure shows an abstract overview.

+
+
+
+Smart and Dumb Components Interaction +
+
Figure 1. Boundaries to components layer
+
+
+
+
+

Store

+
+
+

A store is a class which defines and handles application state with its transitions over time. +Interaction with a store is always synchronous. +A basic implementation using RxJS can look like this.

+
+
+ + + + + +
+ + +A more profound implementation taken from a real-life project can be found here (Abstract Class Store). +
+
+
+
Listing 1. Store defined using RxJS
+
+
@Injectable()
+export class ProductSearchStore {
+
+  private stateSource = new BehaviorSubject<ProductSearchState>(defaultProductSearchState);
+  state$ = this.stateSource.asObservable();
+
+  setLoading(isLoading: boolean) {
+    const currentState = this.stateSource.getValue();
+    this.stateSource.next({
+      isLoading: isLoading,
+      products: currentState.products,
+      searchCriteria: currentState.searchCriteria
+    });
+  }
+
+}
+
+
+
+

In the example ProductSearchStore handles state of type ProductSearchState. +The public API is the property state$ which is an observable of type ProductSearchState. +The state can be changed with method calls. +So every desired change to the state needs to be modeled with an method. +In reactive terminology this would be an Action. +The store does not use any services. +Subscribing to the state$ observable leads to the subscribers receiving every new state.

+
+
+

This is basically the Observer Pattern:
+The store consumer registers itself to the observable via state$.subscribe() method call. +The first parameter of subscribe() is a callback function to be called when the subject changes. +This way the consumer - the observer - is registered. +When next() is called with a new state inside the store, all callback functions are called with the new value. +So every observer is notified of the state change. +This equals the Observer Pattern push type.

+
+
+

A store is the API for Smart Components to receive state from the service layer. +State transitions are handled automatically with Smart Components registering to the state$ observable.

+
+
+
+
+

Use Case Service

+
+
+

A use case service is a service which has methods to perform asynchronous state transitions. +In reactive terminology this would be an Action of Actions - a thunk (redux) or an effect (@ngrx).

+
+
+
+Use Case Service +
+
Figure 2. Use case services are the main API to trigger state transitions
+
+
+

A use case services method - an action - interacts with adapters, business services and stores. +So use case services orchestrate whole use cases. +For an example see use case service example.

+
+
+
+
+

Adapter

+
+
+

An adapter is used to communicate with the back-end. +This could be a simple XHR request, a WebSocket connection, etc. +An adapter is simple in the way that it does not add anything other than the pure network call. +So there is no caching or logging performed here. +The following listing shows an example.

+
+
+

For further information on back-end interaction see Consuming REST Services

+
+
+
Listing 2. Calling the back-end via an adapter
+
+
@Injectable()
+export class ProducsAdapter {
+
+  private baseUrl = environment.baseUrl;
+
+  constructor(private http: HttpClient) { }
+
+  getAll(): Observable<Product[]> {
+    return this.http.get<Product[]>(this.baseUrl + '/products');
+  }
+
+}
+
+
+
+
+
+

Interaction of Smart Components through the services layer

+
+
+

The interaction of smart components is a classic problem which has to be solved in every UI technology. +It is basically how one dialog tells the other something has changed.

+
+
+

An example is adding an item to the shopping basket. +With this action there need to be multiple state updates.

+
+
+
    +
  • +

    The small logo showing how many items are currently inside the basket needs to be updated from 0 to 1

    +
  • +
  • +

    The price needs to be recalculated

    +
  • +
  • +

    Shipping costs need to be checked

    +
  • +
  • +

    Discounts need to be updated

    +
  • +
  • +

    Ads need to be updated with related products

    +
  • +
  • +

    etc.

    +
  • +
+
+
+
+
+

Pattern

+
+
+

To handle this interaction in a scalable way we apply the following pattern.

+
+
+
+Interaction of Smart Components via services layer +
+
Figure 3. Smart Component interaction
+
+
+

The state of interest is encapsulated inside a store. All Smart Components interested in the state have to subscribe to the store’s API served by the public observable. Thus, with every update to the store the subscribed components receive the new value. The components basically react to state changes. Altering a store can be done directly if the desired change is synchronous. Most actions are of asynchronous nature so the UseCaseService comes into play. Its actions are void methods, which implement a use case, i.e., adding a new item to the basket. It calls asynchronous actions and can perform multiple store updates over time.

+
+
+

To put this pattern into perspective the UseCaseService is a programmatic alternative to redux-thunk or @ngrx/effects. The main motivation here is to use the full power of TypeScript --strictNullChecks and to let the learning curve not to become as steep as it would be when learning a new state management framework. This way actions are just void method calls.

+
+
+
+
+

Example

+
+
+
+Smart component interaction example +
+
Figure 4. Smart Components interaction example
+
+
+

The example shows two Smart Components sharing the FlightSearchState by using the FlightSearchStore. +The use case shown is started by an event in the Smart Component FlightSearchComponent. The action loadFlight() is called. This could be submitting a search form. +The UseCaseService is FlightSearchService, which handles the use case Load Flights.

+
+
+
UseCaseService example
+

+
+
+
+
export class FlightSearchService {
+
+  constructor(
+    private flightSearchAdapter: FlightSearchAdapter,
+    private store: FlightSearchStore
+  ) { }
+
+  loadFlights(criteria: FlightSearchCriteria): void {
+    this.store.setLoadingFlights(true);
+    this.store.clearFlights();
+
+    this.flightSearchAdapter.getFlights(criteria.departureDate,
+        {
+          from: criteria.departureAirport,
+          to: criteria.destinationAirport
+        })
+      .finally(() => this.store.setLoadingFlights(false))
+      .subscribe((result: FlightTo[]) => this.store.setFlights(result, criteria));
+  }
+
+}
+
+
+
+

First the loading flag is set to true and the current flights are cleared. This leads the Smart Component showing a spinner indicating the loading action. Then the asynchronous XHR is triggered by calling the adapter. After completion the loading flag is set to false causing the loading indication no longer to be shown. If the XHR was successful, the data would be put into the store. If the XHR was not successful, this would be the place to handle a custom error. All general network issues should be handled in a dedicated class, i.e., an interceptor. So for example the basic handling of 404 errors is not done here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/Home.html b/docs/devonfw.github.io/1.0/devon4node.wiki/Home.html new file mode 100644 index 00000000..472f8b65 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/Home.html @@ -0,0 +1,373 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node Wiki

+
+ +
+
+
+

Layers

+
+
+ +
+
+
+ +
+

devon4node applications

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/devon4node-architecture.html b/docs/devonfw.github.io/1.0/devon4node.wiki/devon4node-architecture.html new file mode 100644 index 00000000..3c472bc7 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/devon4node-architecture.html @@ -0,0 +1,515 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node Architecture

+
+
+

As we have mentioned in the introduction, devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications.

+
+
+
+
+

HTTP layer

+
+
+

By using NestJS, devon4node is a platform-agnostic framework. NestJS focuses only on the logical layer, and delegates the transport layer to another framework, such as ExpressJS. You can see it in the following diagram:

+
+
+
+devon4node architecture +
+
+
+

As you can see, NestJS do not listen directly for incoming request. It has an adapter to communicate with ExpressJS and ExpressJS is the responsible for that. ExpressJS is only one of the frameworks that NestJS can work with. We have also another adapter available out-of-the-box: the Fastify adapter. With that, you can replace ExpressJS for Fastify But you can still use all your NestJS components. You can also create your own adapter to make NestJS work with other HTTP framework.

+
+
+

At this point, you may think: why is NestJS (and devon4node) using ExpressJS by default instead of Fastify? Because, as you can see in the previous diagram, there is a component that is dependent on the HTTP framework: the middleware. As ExpressJS is the most widely used framework, there exists a lot of middleware for it, so, in order to reuse them in our NestJS applications, NestJS use ExpressJS by default. Anyway, you may think which HTTP framework best fits your requirements.

+
+
+
+
+

devon4node layers

+
+
+

As other devonfw technologies, devon4node separates the application into layers.

+
+
+

Those layers are:

+
+ +
+
+layers +
+
+
+
+
+

devon4node application structure

+
+
+

Although there are many frameworks to create backend applications in NodeJS, none of them effectively solve the main problem of - Architecture. This is the main reason we have chosen NestJS for the devon4node applications. Besides, NestJS is highly inspired by Angular, therefore a developer who knows Angular can use his already acquired knowledge to write devon4node applications.

+
+
+

NestJS adopts various Angular concepts, such as dependency injection, piping, interceptors and modularity, among others. By using modularity we can reuse some of our modules between applications. One example that devon4node provide is the mailer module.

+
+
+
+
+

Modules

+
+
+

Create a application module is simple, you only need to create an empty class with the decorator Module:

+
+
+
+
@Module({})
+export class AppModule {}
+
+
+
+

In the module you can define:

+
+
+
    +
  • +

    Imports: the list of imported modules that export the providers which are required in this module

    +
  • +
  • +

    Controllers: the set of controllers defined in this module which have to be instantiated

    +
  • +
  • +

    Providers: the providers that will be instantiated by the Nest injector and that may be shared at least across this module

    +
  • +
  • +

    Exports: the subset of providers that are provided by this module and should be available in other modules which import this module

    +
  • +
+
+
+

The main difference between Angular and NestJS is NestJS modules encapsulates providers by default. This means that it’s impossible to inject providers that are neither directly part of the current module nor exported from the imported modules. Thus, you may consider the exported providers from a module as the module’s public interface, or API. Example of modules graph:

+
+
+
+modules +
+
+
+

In devon4node we three different kind of modules:

+
+
+
    +
  • +

    AppModule: this is the root module. Everything that our application need must be imported here.

    +
  • +
  • +

    Global Modules: this is a special kind of modules. When you make a module global, it’s accessible for every module in your application. Your can see it in the next diagram. It’s the same as the previous one, but now the CoreModule is global:

    +
    +
    +module2 +
    +
    +
    +

    One example of global module is the CoreModule. In the CoreModule you must import every module which have providers that needs to be accessible in all modules of you application

    +
    +
  • +
  • +

    Feature (or application) modules: modules which contains the logic of our application. We must import it in the AppModule.

    +
  • +
+
+
+

For more information about modules, see NestJS documentation page

+
+
+
+
+

Folder structure

+
+
+

devon4node defines a folder structure that every devon4node application must follow. The folder structure is:

+
+
+
+
├───src
+│   ├───app
+│   │   ├───core
+│   │   │   ├───auth
+│   │   │   ├───configuration
+│   │   │   ├───user
+│   │   │   └───core.module.ts
+│   │   ├───shared
+│   │   └───feature
+│   │       ├───sub-module
+│   │       │   ├───controllers
+│   │       │   ├───...
+│   │       │   ├───services
+│   │       │   └───sub-module.module.ts
+│   │       ├───controllers
+│   │       ├───interceptors
+│   │       ├───pipes
+│   │       ├───guards
+│   │       ├───filters
+│   │       ├───middlewares
+│   │       ├───model
+│   │       │   ├───dto
+│   │       │   └───entities
+│   │       ├───services
+│   │       └───feature.module.ts
+│   ├───config
+│   └───migration
+├───test
+└───package.json
+
+
+
+

devon4node schematics ensures this folder structure so, please, do not create files by your own, use the devon4node schematics.

+
+
+
+
+

NestJS components

+
+
+

NestJS provides several components that you can use in your application:

+
+
+ +
+
+

In the NestJS documentation you can find all information about each component. But, something that is missing in the documentation is the execution order. Every component can be defined in different levels: globally, in the controller or in the handler. As middleware is part of the HTTP server we can define it in a different way: globally or in the module.

+
+
+
+components +
+
+
+

It is not necessary to have defined components in every level. For example, you can have defined a interceptor globally but you do not have any other in the controller or handler level. If nothing is defined in some level, the request will continue to the next component.

+
+
+

As you can see in the previous image, the first component which receive the request is the global defined middleware. Then, it send the request to the module middleware. Each of them can return a response to the client, without passing the request to the next level.

+
+
+

Then, the request continue to the guards: first the global guard, next to controller guard and finally to the handler guard. At this point, we can throw an exception in all components and the exception filter will catch it and send a proper error message to the client. We do not paint the filters in the graphic in order to simplify it.

+
+
+

After the guards, is time to interceptors: global interceptors, controller interceptors and handler interceptors. And last, before arrive to the handler inside the controller, the request pass through the pipes.

+
+
+

When the handler has the response ready to send to the client, it does not go directly to the client. It come again to the interceptors, so we can also intercept the response. The order this time is the reverse: handler interceptors, controller interceptors and global interceptors. After that, we can finally send the response to the client.

+
+
+

Now, with this in mind, you are able to create the components in a better way.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/devon4node-introduction.html b/docs/devonfw.github.io/1.0/devon4node.wiki/devon4node-introduction.html new file mode 100644 index 00000000..e9c93b5e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/devon4node-introduction.html @@ -0,0 +1,288 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

devon4node

+
+
+

devonfw is a platform which provides solutions to building business applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. devonfw is 100% Open Source (Apache License version 2.0) since the beginning of 2018.

+
+
+

devon4node is the NodeJS stack of devonfw. It allows you to build business applications (backends) using NodeJS technology in standardized way based on established best-practices.

+
+
+

devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications. It uses progressive TypeScript and combines elements of OOP (Object Oriented Programming), FP (Functional Programming), and FRP (Functional Reactive Programming).

+
+
+

In this wiki you can find all documentation related with devon4node. See choose the wiki page at the side-bar.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-auth-jwt.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-auth-jwt.html new file mode 100644 index 00000000..b2ab910d --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-auth-jwt.html @@ -0,0 +1,363 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Auth JWT module

+
+
+

devon4node provides a way to generate a default authentication module using JWT (JSON Web Token). It uses the @nestjs/passport library describe here.

+
+
+

To generate the devon4node auth-jwt module you only need to execute the command: nest generate -c @devon4node/schematics auth-jwt. We generate this module inside the applications instead of distributing a npm package because this module is prone to be modified depending on the requirements. It also generate a basic user module.

+
+
+

In this page we will explain the default implementation provided by devon4node. For more information about authentication, JWT, passport and other you can see:

+
+
+ +
+
+
+
+

Auth JWT endpoints

+
+
+

In order to execute authentication operations, the auth-jwt module exposes the following endpoints:

+
+
+
    +
  • +

    POST /auth/login: receive an username and a password and return the token in the header if the combination of username and password is correct.

    +
  • +
  • +

    POST /auth/register: register a new user.

    +
  • +
  • +

    GET /auth/currentuser: return the user data if he is authenticated.

    +
  • +
+
+
+
+
+

Protect endpoints with auth-jwt

+
+
+

In order to protect your endpoints with auth-jwt module you only need to add the AuthGuard() in the UseGuards decorator. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+
+

Now, all request to currentuser are protected by the AuthGuard.

+
+
+
+
+

Role based Access Control

+
+
+

The auth-jwt module provides also a way to control the access to some endpoints by using roles. For example, if you want to grant access to a endpoint only to admins, you only need to add the Roles decorator to those endpoints with the roles allowed. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+@Roles(roles.ADMIN)
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-cli.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-cli.html new file mode 100644 index 00000000..6c06609c --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-cli.html @@ -0,0 +1,1230 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node CLI

+
+
+

devon4node CLI is a tool designed to manage devon4node applications in a easy way. Highly inspired by Nest CLI.

+
+
+

In this page we will explain all commands available and their arguments.

+
+
+
+
+

Prerequisites

+
+
+
    +
  • +

    NodeJS lts

    +
  • +
  • +

    yarn

    +
  • +
+
+
+
+
+

devon4node

+
+
+

After install the devon4node CLI package npm i -g @devon4node/cli, the command devon4node (or d4n) must be available in your system. This have new, generate and db subcommands and also accepts the following arguments:

+
+
+

|== == +| Arguments | Description +|--help, -h | Shows help +|-v, --version | Shows version number +|== ==

+
+
+

Examples:

+
+
+
+
devon4node -h
+devon4node new -h
+
+
+
+
+
+

new

+
+
+

devon4node new allows you to create new devon4node applications. It’s an interactive command and it will ask you for everything that it need in order to create a new application.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Show help +| --no-interactive, -n | Execute the command without ask anything to the user +| --dry-run | Allow to test changes before execute command. +| --skip-git, -g | Allow to skip git repository initialization. +| --skip-install, -s | Allow to skip package installation. +| --typeorm, -t | Allow to select the type of database. +| --config-module, -c | Allow to add config module or not. +| --swagger, -a | Allow to add swagger module or not. +| --security, -y | Allow to add security (cors + HTTP security headers) or not. +| --mailer, -m | Allow to add mailer module or not. +| --auth-jwt, -j | Allow to add Auth JWT module or not. +| --version, -v | Show version number +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node new my-app -sg
+devon4node new my-app -n
+devon4node new my-app -n -typeorm sqlite -config-module -auth-jwt
+
+
+
+
+
+

generate

+
+
+

This command allows you to generate code into your application. It receive he name of the schematic that will generate the code.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Show help +| --interactive, -i | Generate code using the interactive mode (same as new command). +| --skip-install, -s | Allow to skip package installation. +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node generate -i
+devon4node generate service --name my-service
+
+
+
+
+
+

== application

+
+
+

Create a devon4node application. It is used by the new command.

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, | Path to project. +| --name, -n | The name of the application. +|== == == ==

+
+
+
+
+

== angular-app

+
+
+

Create a new Angular application. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, | Path to project. +| --initApp | Flag to skip the angular application generation. +| --name, -n | The name of the application. +|== == == ==

+
+
+
+
+

== class

+
+
+

Create a new class.Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +|--dry-run, -d | Allow to test changes before execute command. +|--path, -p | The path to create the class. +|--name, -n | The name of the class. +|--flat | Flag to indicate if a directory is created. +|--spec | Specifies if a spec file is generated. +|--language | Nest class language (ts/js). +|--sourceRoot | Nest controller source root directory. +|== == == ==

+
+
+
+
+

== controller

+
+
+

Create a Nest controller.

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the controller. +| --name, -n | The name of the controller. To create a controller with name Banana in the module fruits you need to introduce fruits/banana +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== decorator

+
+
+

Create a Nest decorator. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the decorator. +| --name, -n | The name of the decorator. +| --language | Nest decorator language (ts/js). +| --sourceRoot | Nest decorator source root directory. +| --flat | Flag to indicate if a directory is created. +|== == == ==

+
+
+
+
+

== filter

+
+
+

Create a Nest filter.

+
+
+

|== == == == +| Arguments | Description +|--dry-run, -d | Allow to test changes before execute command. +|--path, -p | The path to create the filter. +|--name, -n | The name of the filter. To create a filter with name Banana in the module fruits you need to introduce fruits/banana +|--language | Nest filter language (ts/js). +|--sourceRoot | Nest filter source root directory. +|--flat | Flag to indicate if a directory is created. +|--spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== gateway

+
+
+

Create a Nest gateway. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the gateway. +| --name, -n | The name of the gateway. +| --language | Nest gateway language (ts/js). +| --sourceRoot | Nest gateway source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== guard

+
+
+

Create a Nest guard.

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the guard. +| --name, -n | The name of the guard. To create a guard with name Banana in the module fruits you need to introduce fruits/banana +| --language | Nest guard language (ts/js). +| --sourceRoot | Nest guard source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== interceptor

+
+
+

Create a Nest interceptor.

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the interceptor. +| --name, -n | The name of the interceptor. To create an interceptor with name Banana in the module fruits you need to introduce fruits/banana +| --language | Nest interceptor language (ts/js). +| --sourceRoot | Nest interceptor source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== interface

+
+
+

Create a Nest interface. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the interface. +| --name, -n | The name of the interface. +| --sourceRoot | Nest interface source root directory +| --flat | Flag to indicate if a directory is created. +|== == == ==

+
+
+
+
+

== middleware

+
+
+

Create a Nest middleware.

+
+
+

|== == == == +| Arguments | Description +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the middleware. +| --name, -n | The name of the middleware. To create a middleware with name Banana in the module fruits you need to introduce fruits/banana +| --language | Nest middleware language (ts/js). +| --sourceRoot | Nest middleware source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== module

+
+
+

Create a Nest module.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the module. +| --name, -n | The name of the module. To create a module named module-b as a submodule of module-a, you need to introduce module-a/module-b +| --module | The path to import the module. +| --language | Nest module language (ts/js). +| --sourceRoot | Nest module source root directory. +| --skipImport | Flag to skip the module import. +|== == == ==

+
+
+
+
+

== pipe

+
+
+

Create a Nest pipe.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the pipe. +| --name, -n | The name of the pipe. To create a pipe with name Banana in the module fruits you need to introduce fruits/banana +| --language | Nest pipe language (ts/js). +| --sourceRoot | Nest pipe source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== provider

+
+
+

Create a Nest provider. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the provider. +| --name, -n | The name of the provider. +| --language | Nest provider language (ts/js). +| --sourceRoot | Nest provider source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== service

+
+
+

Create a Nest service.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the service. +| --name, -n | The name of the service. +| --spec | Specifies if a spec file is generated. To create a service with name Banana in the module fruits you need to introduce fruits/banana +|== == == ==

+
+
+
+
+

== resolver

+
+
+

Create a Nest resolver. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the resolver. +| --name, -n | The name of the resolver. +| --language | Nest resolver language (ts/js). +| --sourceRoot | Nest resolver source root directory. +| --flat | Flag to indicate if a directory is created. +| --spec | Specifies if a spec file is generated. +|== == == ==

+
+
+
+
+

== configuration

+
+
+

Create a Nest CLI configuration. Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, | Path to project. +|== == == ==

+
+
+
+
+

== library

+
+
+

Create a Nest library (mono-repo). Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the library. +| --name, -n | The name of the library. +| --prefix | The prefix of the library. +| --language | Nest library language. +| --rootDir | The libraries root directory. +|== == == ==

+
+
+
+
+

== sub-app

+
+
+

Create a Nest application (mono-repo). Inherit from Nest CLI

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | The path to create the application. +| --name, -n | The name of the application. +| --language | Nest application language. +| --rootDir | Applications root directory. +|== == == ==

+
+
+
+
+

== typeorm

+
+
+

Initialize typeorm into your current project in a correct way.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +| --db | Database type. +|== == == ==

+
+
+
+
+

== entity

+
+
+

Add a TypeOrm entity to your project. Requires TypeORM installed in the project.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Change the application folder where you will create the entity +| --name, -n | The entity name. To create a entity with name Banana in the module fruits you need to introduce fruits/banana +|== == == ==

+
+
+
+
+

== config-module

+
+
+

Add the config module to the project.

+
+
+

It will add the @devon4node/common module as a project dependency. Then, it will generate the configuration module into your project and add it in the core module. Also, it generates the config files for the most common environments.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+
+
+

== crud

+
+
+

Generate CRUD methods for a entity. Requires TypeORM installed in the project.

+
+
+

It will add the @nestjsx/crud module as a project dependency. Then, generates an entity, a CRUD controller and a CRUD service. It also register the entity, controller and service in the module.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Change the application folder where you will create the crud +| --name, -n | The crud name. To create crud with name Banana in the module fruits you need to introduce fruits/banana +|== == == ==

+
+
+
+
+

== mailer

+
+
+

Add @devon4node/mailer module to project.

+
+
+

It will add the @devon4node/mailer module as a project dependency. Also, it will add it to the core module and it will generate some email template examples.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+
+
+

== swagger

+
+
+

Add swagger module to project.

+
+
+

It will add the @nestjs/swagger module as a project dependency. Also, it will update the main.ts file in order to expose the endpoint for swagger. The default endpoint is: /v1/api

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+
+
+

== auth-jwt

+
+
+

Add the auth JWT module to the project.

+
+
+

It will add to your project the auth-jwt and user module. Also, it will import those modules into the core module.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+
+
+

== all-in-one

+
+
+

Execute multiple schematics at the same time.

+
+
+

This schematic is used by the interactive mode.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to config file +|== == == ==

+
+
+
+
+

== security

+
+
+

Add cors and helmet to your project.

+
+
+

It will add helmet package as project dependency and update the main.ts file in order to enable the cors and helmet in your application.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --dry-run, -d | Allow to test changes before execute command. +| --path, -p | Path to project. +|== == == ==

+
+
+
+
+

db

+
+
+

Execute a database command. This command is an alias of typeorm command, so if you exetue the command devon4node db migration:create under the hood it will execute typeorm migration:create. For more information see typeorm CLI documentation.

+
+
+
+
+

== schema:sync

+
+
+

Synchronizes your entities with database schema. It runs schema update queries on all connections you have. To run update queries on a concrete connection use -c option.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which schema synchronization needs to to run. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node db schema:sync
+
+
+
+
+
+

== schema:log

+
+
+

Shows sql to be executed by schema:sync command. It shows sql log only for your default connection. To run update queries on a concrete connection use -c option.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which schema synchronization needs to to run. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== schema:drop

+
+
+

Drops all tables in the database on your default connection. To drop table of a concrete connection’s database use -c option.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which schema synchronization needs to to run. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== query

+
+
+

Executes given SQL query on a default connection. Specify connection name to run query on a specific connection.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which schema synchronization needs to to run. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== entity:create

+
+
+

Generates a new entity.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which to run a query +| --name, -n | Name of the entity class. +| --dir | Directory where entity should be created. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== subscriber:create

+
+
+

Generates a new subscriber.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which to run a query +| --name, -n | Name of the entity class. +| --dir | Directory where entity should be created. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== migration:create

+
+
+

Creates a new migration file.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which to run a query +| --name, -n | Name of the entity class. +| --dir | Directory where entity should be created. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node db migration:create -n InsertData
+
+
+
+
+
+

== migration:generate

+
+
+

Generates a new migration file with sql needs to be executed to update schema.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which to run a query +| --name, -n | Name of the entity class. +| --dir | Directory where entity should be created. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node db migration:generate -n CreateTables
+
+
+
+
+
+

== migration:run

+
+
+

Runs all pending migrations.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which run a query. +| --transaction, -t | Indicates if transaction should be used or not for migration run. Enabled by default. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== migration:show

+
+
+

Show all migrations and whether they have been run or not

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which run a query. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== migration:revert

+
+
+

Reverts last executed migration.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which run a query. +| --transaction, -t | Indicates if transaction should be used or not for migration revert. Enabled by default. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+

== version

+
+
+

Prints TypeORM version this project uses.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --version, -v | Shows number version +|== == == ==

+
+
+

Examples:

+
+
+
+
devon4node db version
+
+
+
+
+
+

== cache:clear

+
+
+

Clears all data stored in query runner cache.

+
+
+

|== == == == +| Arguments | Description +| --help, -h | Shows help +| --connection, -c | Name of the connection on which run a query. +| --config, -f | Name of the file with connection configuration. +| --version, -v | Shows number version +|== == == ==

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-code-generation.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-code-generation.html new file mode 100644 index 00000000..2526f7a3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-code-generation.html @@ -0,0 +1,515 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Code Generation

+
+
+

As we mention in the page key principles, one of our key principles is Productivity. In order to provide that productivity, we have some tools to generate code. These tools will help you generate the common parts of the application so that you can focus only on the specific functionality.

+
+
+

Those tools are:

+
+ +
+
+
+

Nest CLI and Devon4node schematics

+
+
+

We are going to use the Nest CLI to generate code of our application, you can know more about NodeJs CLI in the official documentation.

+
+
+
+
+

Install devon4node schematics

+
+
+

First of all, you need to install Nest CLI

+
+
+

Execute the command yarn global add @nestjs/cli. +You can also use npm: npm install -g @nestjs/cli

+
+
+

And then Devon4node schematics globally with the following command:

+
+
+

yarn global add @devon4node/schematics or npm install -g @devon4node/schematics

+
+
+
+
+

==

+
+
+

If you get an error trying execute any devon4node schematic related to collection not found, try to reinstall devon4node/schematics on the project folder or be sure that schematics folder is inside @devon4node in node_modules. +yarn add @devon4node/schematics +== ==

+
+
+
+
+

Generate new devon4node application

+
+
+

To start creating a devon4node application, execute the command:

+
+
+

nest g -c @devon4node/schematics application [application-name]

+
+
+

If you do not put a name, the command line will ask you for one.

+
+
+
+
+

Generate code for TypeORM

+
+
+

Initialize TypeORM into your current project in a correct way.

+
+
+

nest g -c @devon4node/schematics typeorm

+
+
+

Then, you will be asked about which DB you want to use.

+
+
+

typeorm schematic

+
+
+
+
+

Generate CRUD

+
+
+

Generate CRUD methods for a entity. Requires TypeORM installed in the project.

+
+
+

It will add the @nestjsx/crud module as a project dependency. Then, generates an entity, a CRUD controller and a CRUD service. It also register the entity, controller and service in the module.

+
+
+

Execute nest g -c @devon4node/schematics crud and then you will need to write a name for the crud.

+
+
+
+crud schematic +
+
+
+
+
+

Generate TypeORM entity

+
+
+

Add a TypeORM entity to your project. Requires TypeORM installed in the project.

+
+
+

Execute nest g -c @devon4node/schematics entity and you will be asked for an entity name.

+
+
+
+
+

Add config-module

+
+
+

Add the config module to the project.

+
+
+

It will add the @devon4node/common module as a project dependency. Then, it will generate the configuration module into your project and add it in the core module. Also, it generates the config files for the most common environments.

+
+
+

The command to execute will be nest g -c @devon4node/schematics config-module

+
+
+
+
+

Add mailer module

+
+
+

Add @devon4node/mailer module to project.

+
+
+

It will add the @devon4node/mailer module as a project dependency. Also, it will add it to the core module and it will generate some email template examples.

+
+
+

Write the command nest g -c @devon4node/schematics mailer

+
+
+
+
+

Add swagger module

+
+
+

Add swagger module to project.

+
+
+

It will add the @nestjs/swagger module as a project dependency. Also, it will update the main.ts file in order to expose the endpoint for swagger. The default endpoint is: /v1/api

+
+
+

Execute the command nest g -c @devon4node/schematics swagger

+
+
+
+
+

Add auth-jwt module

+
+
+

Add the auth JWT module to the project.

+
+
+

It will add to your project the auth-jwt and user module. Also, it will import those modules into the core module.

+
+
+

Execute nest g -c @devon4node/schematics auth-jwt

+
+
+
+
+

Add security

+
+
+

Add cors and helmet to your project.

+
+
+

It will add helmet package as project dependency and update the main.ts file in order to enable the cors and helmet in your application.

+
+
+

Execute nest g -c @devon4node/schematics security

+
+
+
+
+

Generate database migrations

+
+
+
    +
  1. +

    Generate database migrations

    +
    +
      +
    1. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node or npm i -g ts-node

      +
    2. +
    3. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +insert data +
      +
      +
      +

      It will connect to the database, read all entities and then it will generate a migration file with all sql queries need to transform the current status of the database to the status defined by the entities. If the database is empty, it will generate all sql queries need to create all tables defined in the entities. You can find a example in the todo example

      +
      +
    4. +
    +
    +
  2. +
+
+
+

As TypeORM is the tool used for DB. You can check official documentation for more information. +See TypeORM CLI documentation.

+
+
+
+
+

CobiGen

+
+
+

Currently, we do not have templates to generate devon4node code (we have planned to do that in the future). Instead, we have templates that read the code of a devon4node application and generate a devon4ng application. Visit the CobiGen page for more information.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-coding-conventions.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-coding-conventions.html new file mode 100644 index 00000000..19001906 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-coding-conventions.html @@ -0,0 +1,537 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Coding Conventions

+
+
+

devon4node defines some coding conventions in order to improve the readability, reduce the merge conflicts and be able to develop applications in an industrialized way.

+
+
+

In order to ensure that you are following the devon4node coding conventions, you can use the following tools:

+
+
+
    +
  • +

    ESLint: ESLint ESLint is a tool for identifying and reporting on patterns found in ECMAScript/JavaScript code, with the goal of making code more consistent and avoiding bugs. We recommend to use the ESLint VSCode extension (included in the devonfw Platform Extension Pack) in order to be able to see the linting errors while you are developing.

    +
  • +
  • +

    Prettier: Prettier is a code formatter. We recommend to use the Prettier VSCode extension (included in the devonfw Platform Extension Pack) and enable the editor.formatOnSave option.

    +
  • +
  • +

    devon4node application schematic: this tool will generate code following the devon4node coding conventions. Also, when you generate a new project using the devon4node application schematic, it generates the configuration files for TSLint and Prettier that satisfy the devon4node coding conventions.

    +
  • +
+
+
+

When you combine all tools, you can be sure that you follow the devon4node coding conventions.

+
+
+
+
+

Detailed devon4node Coding Conventions

+
+
+

Here we will detail some of most important devon4node coding conventions. To be sure that you follows all devon4node coding conventions use the tools described before.

+
+
+
+
+

Indentation

+
+
+

All devon4node code files must be indented using spaces. The indentation with must be 2 spaces.

+
+
+
+
+

White space

+
+
+

In order to improve the readability of your code, you must introduce whitespaces. Example:

+
+
+
+
if(condition){
+
+
+
+

must be

+
+
+
+
if (condition) {
+
+
+
+
+
+

Naming conventions

+
+ +
+
+
+

== File naming

+
+
+

The file name must follow the pattern: (name in kebab case).(kind of component).(extension) +The test file name must follow the pattern: (name in kebab case).(kind of component).spec.(extension)

+
+
+

Example:

+
+
+
+
auth-jwt.service.ts
+auth-jwt.service.spec.ts
+
+
+
+
+
+

== Interface naming

+
+
+

The interface names must be in pascal case, and must start with I. There is some controversy in starting the interface names with an I, but we decided to do it because is most of cases you will have an interface and a class with the same name, so, to differentiate them, we decided to start the interfaces with I. Other devonfw stacks solves it by adding the suffix Impl in the class implementations.

+
+
+

Example:

+
+
+
+
interface ICoffee {}
+
+
+
+
+
+

== Class naming

+
+
+

The class names must be in pascal case.

+
+
+

Example:

+
+
+
+
class Coffee {}
+
+
+
+
+
+

== Variable naming

+
+
+

All variable names must be in camel case.

+
+
+
+
const coffeeList: Coffe[];
+
+
+
+
+
+

Declarations

+
+
+

For all variable declarations we must use const or let. var is forbidden. We prefer to use const when possible.

+
+
+
+
+

Programming practices

+
+ +
+
+
+

== Trailing comma

+
+
+

All statements must end with a trailing comma. Example:

+
+
+
+
{
+  one: 'one',
+  two: 'two'  // bad
+}
+{
+  one: 'one',
+  two: 'two', // good
+}
+
+
+
+
+
+

== Arrow functions

+
+
+

All anonymous functions must be defined with the arrow function notation. In most of cases it’s not a problem, but sometimes, when you do not want to bind this when you define the function, you can use the other function definition. In this special cases you must disable the linter for those sentence.

+
+
+
+
+

== Comments

+
+
+

Comments must start with a whitespace. Example:

+
+
+
+
//This is a bad comment
+// This is OK
+
+
+
+
+
+

== Quotemarks

+
+
+

For string definitions, we must use single quotes.

+
+
+
+
+

== if statements

+
+
+

In all if statements you always must use brackets. Example:

+
+
+
+
// Bad if statement
+if (condition)
+  return true;
+
+// Good if statement
+if (condition) {
+  return true;
+}
+
+
+
+
+
+

Pre-commit hooks

+
+
+

In order to ensure that your new code follows the coding conventions, devon4node uses by default husky. Husky is a tool that allows you to configure git hooks easily in your project. When you make a git commit in your devon4node project, it will execute two actions:

+
+
+
    +
  • +

    Prettify the staged files

    +
  • +
  • +

    Execute the linter in the staged files

    +
  • +
+
+
+

If any action fails, you won’t be able to commit your new changes.

+
+
+ + + + + +
+ + +If you want to skip the git hooks, you can do a commit passing the --no-verify flag. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-configuration-module.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-configuration-module.html new file mode 100644 index 00000000..efe8a349 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-configuration-module.html @@ -0,0 +1,415 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Configuration Module

+
+
+

devon4node provides a way to generate a configuration module inside your application. To generate it you only need to execute the command nest g -c @devon4node/schematics config-module. This command will generate inside your application:

+
+
+
    +
  • +

    Configuration module inside the core module.

    +
  • +
  • +

    config folder where all environment configuration are stored.

    +
    +
      +
    • +

      default configuration: configuration for your local development environment.

      +
    • +
    • +

      develop environment configuration for the develop environment.

      +
    • +
    • +

      uat environment configuration for the uat environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      test environment configuration used by test.

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +some code generators will add some properties to this module, so, be sure that the config module is the first module that you generate in your application. +
+
+
+
+
+

Use the configuration service

+
+
+

To use the configuration service, you only need to inject it as dependency. As configuration module is defined in the core module, it will be available everywhere in your application. Example:

+
+
+
+
export class MyProvider {
+  constructor(public readonly configService: ConfigurationService) {}
+
+  myMethod() {
+    return this.confiService.isDev;
+  }
+}
+
+
+
+
+
+

Choose an environment file

+
+
+

By default, when you use the configuration service it will take the properties defined in the default.ts file. If you want to change the configuration file, you only need to set the NODE_ENV environment property with the name of the desired environment. Examples: in windows execute set NODE_ENV=develop before executing the application, in linux execute NODE_ENV=develop before executing the application or NODE_ENV=develop yarn start.

+
+
+
+
+

Override configuration properties

+
+
+

Sometimes, you want to keep some configuration property secure, and you do not want to publish it to the repository, or you want to reuse some configuration file but you need to change some properties. For those scenarios, you can override configuration properties by defining a environment variable with the same name. For example, if you want to override the property host, you can do: set host="newhost". It also works with objects. For example, if you want to change the value of secret in the property jwtConfig for this example, you can set a environment variable like this: set jwtConfig="{"secret": "newsecret"}". As you can see, this environment variable has a JSON value. It will take object and merge the jwtConfig property with the properties defined inside the environment variable. It other properties maintain their value. The behaviour is the same for the nested objects.

+
+
+
+
+

Add a configuration property

+
+
+

In order to add a new property to the configuration module, you need to follow some steps:

+
+
+
    +
  • +

    Add the property to IConfig interface in src/app/core/configuration/types.ts file. With this, we can ensure that the ConfigurationService and the environment files has those property at compiling time.

    +
  • +
  • +

    Add the new property getter to ConfigurationService. You must use the get method of ConfigurationService to ensure that the property will be loaded from the desired config file. You can also add extra logic if needed.

    +
  • +
  • +

    Add the property to all config files inside the src/config folder.

    +
  • +
+
+
+

Example:

+
+
+

We want to add the property devonfwUrl to our ConfigurationService, so:

+
+
+

We add the following code in IConfig interface:

+
+
+
+
devonfwUrl: string;
+
+
+
+

Then, we add the getter in the ConfigurationService:

+
+
+
+
get devonfwUrl(): string {
+  return this.get('devonfwUrl')!;
+}
+
+
+
+

Finally, we add the definition in all config files:

+
+
+
+
devonfwUrl: 'https://devonfw.com',
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-dependency-injection.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-dependency-injection.html new file mode 100644 index 00000000..c1aaeb25 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-dependency-injection.html @@ -0,0 +1,389 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Dependency Injection

+
+
+

The dependency injection is a well-known common design pattern applied by frameworks in all languages, like Spring in Java, Angular and others. The intention of this page is not to explain how dependency injection works, but instead how it is addressed by NestJS.

+
+
+

NestJS resolve the dependency injection in their modules. When you define a provider in a module, it can be injected in all components of the module. By default, those providers are only available in the module where it is defined. The only way to export a module provider to other modules which import it is adding those provider to the export array. You can also reexport modules.

+
+
+
+
+

Inject dependencies in NestJS

+
+
+

In order to inject a dependency in a NestJS component, you need to declare it in the component constructor. Example:

+
+
+
+
export class CoffeeController {
+  constructor(public readonly conffeeService: CoffeeService) {}
+}
+
+
+
+

NestJS can resolve all dependencies that are defined in the module as provider, and also the dependencies exported by the modules imported. Example:

+
+
+
+
@Module({
+  controllers: [CoffeeController],
+  providers: [CoffeeService],
+})
+export class CoffeeModule {}
+
+
+
+

Inject dependencies in the constructor is the is the preferred choice, but, sometimes it is not possible. For example, when you are extending another class and want to keep the constructor definition. In this specific cases we can inject dependencies in the class properties. Example:

+
+
+
+
export class CoffeeController {
+  @Inject(CoffeeService)
+  private readonly conffeeService: CoffeeService;
+}
+
+
+
+
+
+

Dependency Graph

+
+
+
+dependency injection1 +
+
+
+

In the previous image, the Module A can inject dependencies exported by Module B, Module E and Module F. If module B reexport Module C and Module D, they are also accessible by Module A.

+
+
+

If there is a conflict with the injection token, it resolves the provider with less distance with the module. For example: if the modules C and F exports a UserService provider, the Module A will resolve the UserService exported by the Module F, because the distance from Module A to Module F is 1, and the distance from Module A to Module C is 2.

+
+
+

When you define a module as global, the dependency injection system is the same. The only difference is now all modules as a link to the global module. For example, if we make the Module C as global the dependency graph will be:

+
+
+
+dependency injection2 +
+
+
+
+
+

Custom providers

+
+
+

When you want to change the provider name, you can use a NestJS feature called custom providers. For example, if you want to define a provider called MockUserService with the provider token UserService you can define it like:

+
+
+
+
@Module({
+  providers: [{
+    provide: UserService,
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

With this, when you inject want to inject UserService as dependency, the MockUserService will be injected.

+
+
+

Custom provider token can be also a string:

+
+
+
+
@Module({
+  providers: [{
+    provide: 'USER_SERVICE',
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

but now, when you want to inject it as dependency you need to use the @Inject decorator.

+
+
+
+
constructor(@Inject('USER_SERVICE') userService: any) {}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-entities.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-entities.html new file mode 100644 index 00000000..6a49b21a --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-entities.html @@ -0,0 +1,277 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Entities

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-eslint-sonarqube-config.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-eslint-sonarqube-config.html new file mode 100644 index 00000000..5ff10985 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-eslint-sonarqube-config.html @@ -0,0 +1,307 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Importing your ESLint reports into SonarQube

+
+
+

This guide covers the import of ESLint reports into SonarQube instances in CI environments, as this is the recommended way of using ESLint and SonarQube for devon4node projects. The prerequisites for this process are a CI environment, preferably a Production Line instance, and the ESLint CLI, which is already included when generating a new devon4node project.

+
+
+
+
+

Configuring the ESLint analysis

+
+
+

You can configure the ESLint analysis parameters in the .eslintrc.js file inside the top-level directory of your project. If you created your node project using the devon4node application schematic, this file will already exist. If you want to make further adjustments to it, have a look at the ESLint documentation.

+
+
+

The ESLint analysis script lint is already configured in the scripts part of your package.json. Simply add -f json > report.json, so that the output of the analysis is saved in a .json file. Additional information to customization options for the ESLint CLI can be found here.

+
+
+

To run the analysis, execute the script with npm run lint inside the base directory of your project.

+
+
+
+
+

Configuring SonarQube

+
+
+

If you haven’t already generated your CICD-related files, follow the tutorial on the devon4node schematic of our CICDGEN project, as you will need a Jenkinsfile configured in your project to proceed.

+
+
+

Inside the script for the SonarQube code analysis in your Jenkinsfile, add the parameter -Dsonar.eslint.reportPaths=report.json. Now, whenever a SonarQube analysis is triggered by your CI environment, the generated report will be loaded into your SonarQube instance. +To avoid duplicated issues, you can associate an empty TypeScript quality profile with your project in its server configurations.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-grapql.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-grapql.html new file mode 100644 index 00000000..2eadff35 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-grapql.html @@ -0,0 +1,603 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

GraphQL on Devon4Node

+
+
+

GraphQL is a query language that gets exactly the data that we ask for instead of static predefined responses.

+
+
+

For example, on a regular API a get by id method would return something like:

+
+
+
+
{
+  "location": {
+    "lon": 00.14,
+    "lat": 54.11
+  },
+  "station": "dsrEE3Sg",
+  "visibility": 5000,
+  "wind":{
+    "speed": 6.2,
+    "deg": 78
+  },
+  "logs": [...]
+  ...
+}
+
+
+
+

But if we want to get only the wind data we have to create another endpoint that returns the specified data.

+
+
+

But instead with graphQL we can get different information without creating new endpoints, in this case we only want the wind data so it would return:

+
+
+
+
{
+  "wind":{
+    "speed": 6.2,
+    "deg": 78
+  }
+}
+
+
+
+

To install it:

+
+
+
+
yarn add @nestjs/graphql graphql-tools graphql apollo-server-express
+
+
+
+
+
+

Schema first

+
+ +
+
+
+

==

+
+
+

This tutorial uses the schema first method.

+
+
+

We assume you have already a functioning TODO module / app.

+
+
+

If not you can use Devon4node GraphQL sample +== ==

+
+
+

First we need to import GraphQLModule to our app.module.ts.

+
+
+
+
...
+import { GraphQLModule } from '@nestjs/graphql';
+import { join } from 'path';
+
+@Module({
+  imports: [
+    // Your module import
+    GraphQLModule.forRoot({
+      typePaths: ['./**/*.graphql'],
+      definitions: {
+        path: join(process.cwd(), 'src/graphql.ts'),
+        outputAs: 'class',
+      },
+    }),
+  ],
+})
+export class AppModule {}
+
+
+
+

The typePaths indicates the location of the schema definition files.

+
+
+

The definitions indicates the file where the typescript definitions will automatically save, adding the outputAs: 'class' saves those definitions as classes.

+
+
+
+
+

Schema

+
+
+

Graphql is a typed language with object types, scalars, and enums.

+
+
+

We use query to define the methods we are going to use for fetching data, and mutations are used for modifying this data, similar to how GET and POST work.

+
+
+

Let’s define the elements, queries and mutations that our module is going to have.

+
+
+

For that we have to create a graphql file on our module, on this case we are going to name it "schema.graphql".

+
+
+
+
type Todo {
+  id: ID
+  task: String
+}
+
+type Query {
+  todos: [Todo]
+  todoById: Todo
+}
+
+type Mutation {
+  createTodo(task: String): Todo
+  deleteTodo(id: String): Todo
+}
+
+
+
+

For more information about Types go to the official graphQL documentation

+
+
+
+
+

Resolver

+
+
+

Resolver has the instructions to turn GraphQL orders into the data requested.

+
+
+

To create a resolver we go to our module and then create a new todo.resolver.ts file, import the decorators needed and set the resolver.

+
+
+
+
import { Resolver, Args, Mutation, Query } from '@nestjs/graphql';
+import { TodoService } from '../services/todo.service';
+import { Todo } from '../schemas/todo.schema';
+
+@Resolver()
+export class TodoResolver {
+  constructor(private readonly todoService: TodoService) {}
+
+  @Query('todos')
+  findAll(): Promise<Todo[]> {
+    return this.todoService.findAll();
+  }
+
+  @Query('todoById')
+  findOneById(@Args('id') id: string): Promise<Todo | null> {
+    return this.todoService.findOneById(id);
+  }
+
+  @Mutation()
+  createTodo(@Args('task') task: string): Promise<Todo> {
+    return this.todoService.create(task);
+  }
+
+  @Mutation()
+  deleteTodo(@Args('id') id: string): Promise<Todo | null> {
+    return this.todoService.delete(id);
+  }
+}
+
+
+
+

@Resolver() indicates that the next class is a resolver.

+
+
+

@Query is used to get data.

+
+
+

@Mutation is used to create or modify data.

+
+
+

Here we have also an argument decorator @Args which is an object with the arguments passed into the field in the query.

+
+
+

By default we can access the query or mutation using the method’s name, for example:

+
+
+

For the deleteTodo mutation.

+
+
+
+
mutation {
+  deleteTodo( id: "6f7ed2q8" ){
+    id,
+    task
+  }
+}
+
+
+
+

But if we write something different on the decorator, we change the name, for example:

+
+
+

For the findAll query, we named it todos.

+
+
+
+
{
+  todos{
+    id,
+    task
+  }
+}
+
+
+
+

Also if we go back to the schema.graphql, we will see how we define the query with todos.

+
+
+

Learn more about Resolvers, mutations and their argument decorators on the NestJS documentation.

+
+
+
+
+

Playground

+
+
+

To test our backend we can use tools as Postman, but graphql already gives us a playground to test our Resolvers, we can access by default on http://localhost:3000/graphql.

+
+
+

We can call a query, or several queries this way:

+
+
+
+
{
+  findAll{
+    id,
+    task
+  }
+}
+
+
+
+

And the output will look something like:

+
+
+
+
{
+  "data": {
+    "findAll": [
+      {
+        "id": "5fb54b30e686cb49500b6728",
+        "task": "clean dishes"
+      },
+      {
+        "id": "5fb54b3be686cb49500b672a",
+        "task": "burn house"
+      }
+    ]
+  }
+}
+
+
+
+

As we can see, we get a json "data" with an array of results.

+
+
+

And for our mutations it’s very similar, in this case we create a todo with task "rebuild house" and we are going to ask on the response just for the task data, we don’t want the id.

+
+
+
+
mutation{
+  createTodo (
+    task: "rebuild house"
+  ){
+    task
+  }
+}
+
+
+
+

And the output

+
+
+
+
{
+  "data": {
+    "createTodo": {
+      "task": "rebuild house"
+    }
+  }
+}
+
+
+
+

In this case we return just one item so there is no array, we also got just the task data but if we want the id too, we just have to add it on the request.

+
+
+

To make the playground unavailable we can add an option to the app.module import:

+
+
+
+
...
+GraphQLModule.forRoot({
+  ...
+  playground: false,
+}),
+...
+
+
+
+

For further information go to the official NestJS documentation

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-key-principles.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-key-principles.html new file mode 100644 index 00000000..77f4f4ae --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-key-principles.html @@ -0,0 +1,388 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Key Principles

+
+
+

devon4node is built following some basic principles like:

+
+
+ +
+
+

But key principles that best define devon4node (and are inherited from NestJS) are:

+
+
+
    +
  • +

    Simplicity (aka KISS)

    +
  • +
  • +

    Reusability

    +
  • +
  • +

    Productivity

    +
  • +
+
+
+
+
+

Simplicity

+
+
+

In devon4node we tried to do everything as simple as possible. Following this principle we will be able to do easy to maintain applications.

+
+
+

For example, in order to expose all CRUD operations for an entity, you only need to create a controller like:

+
+
+
+
@Crud({
+  model: {
+    type: Employee,
+  },
+})
+@CrudType(Employee)
+@Controller('employee/employees')
+export class EmployeeCrudController {
+  constructor(public service: EmployeeCrudService) {}
+}
+
+
+
+

You can find this code in the employee example. Only with this code your exposing the full CRUD operations for the employee entity. As you can see, it’s an empty class with some decorators and the EmployeeCrudService injected as dependency. Simple, isn’t it? The EmployeeCrudService is also simple:

+
+
+
+
@Injectable()
+export class EmployeeCrudService extends TypeOrmCrudService<Employee> {
+  constructor(@InjectRepository(Employee) repo: Repository<Employee>) {
+    super(repo);
+  }
+}
+
+
+
+

Another empty class which extends from TypeOrmCrudService<Employee> and injects the Employee Repository as dependency. Nothing else.

+
+
+

With these examples you can get an idea of how simple it can be to code a devon4node application .

+
+
+
+
+

Reusability

+
+
+

NestJS (and devon4node) applications are designed in a modular way. This allows you to isolate some functionality in a module, and then reuse it in every application that you need. This is the same behaviour that Angular has. You can see it in the NestJS modules like TypeORM, Swagger and others. Also, in devon4node we have the Mailer module.

+
+
+

In your applications, you only need to import those modules and then you will be able to use the functionality that they implement. Example

+
+
+
+
@Module({
+  imports: [ AuthModule, ConfigurationModule ],
+})
+export class SomeModule {}
+
+
+
+
+
+

Productivity

+
+
+

devon4node is designed to create secure enterprise applications. But also, it allow you to do it in a fast way. To increase the productivity devon4node, devon4node provide schematics in order to generate some boilerplate code.

+
+
+

For example, to create a module you need to create a new file for a module (or copy it) and write the code, then you need to import it in the AppModule. This is a easy example, but you can introduce some errors: forget to import it in the AppModule, introduce errors with the copy/paste and so on. By using the command nest g module --name <module-name> it will do everything for you. Just a simple command. In this specific case probably you do not see any advantage, but there are other complex cases where you can generate more complex code with nest and devon4node schematics command.

+
+
+

See code generation in order to know how to increase your productivity creating devon4node applications.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-logger.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-logger.html new file mode 100644 index 00000000..248ebe5e --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-logger.html @@ -0,0 +1,310 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Logger

+
+
+

When you create a new devon4node application, it already has a logger: src/app/shared/logger/winston.logger.ts. This logger provide the methods log, error and warn. All of those methods will write a log message, but with a different log level.

+
+
+

The winston logger has two transports: one to log everything inside the file logs/general.log and the other to log only the error logs inside the file logs/error.log. In addition, it uses the default NestJS logger in order to show the logs in the console.

+
+
+

As you can see it is a simple example about how to use logger in a devon4node application. It will be update to a complex one in the next versions.

+
+
+
+
+

How to use logger

+
+
+

In order to use the logger you only need to inject the logger as a dependency:

+
+
+
+
constructor(logger: WinstonLogger){}
+
+
+
+

and then use it

+
+
+
+
async getAll() {
+  this.service.getAll();
+  this.logger.log('Returning all data');
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-mailer.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-mailer.html new file mode 100644 index 00000000..d17eb4d5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-mailer.html @@ -0,0 +1,624 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Mailer Module

+
+
+

This module enables you to send emails in devon4node. It also provides a template engine using Handlebars.

+
+
+

It is a NestJS module that inject into your application a MailerService, which is the responsible to send the emails using the nodemailer library.

+
+
+
+
+

Installing

+
+
+

Execute the following command in a devon4node project:

+
+
+
+
yarn add @devon4node/mailer
+
+
+
+
+
+

Configuring

+
+
+

To configure the mailer module, you only need to import it in your application into another module. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot(),
+  ],
+  ...
+})
+
+
+
+

Your must pass the configuration using the forRoot or forRootAsync methods.

+
+
+
+
+

forRoot()

+
+
+

The forRoot method receives an MailerModuleOptions object as parameter. It configures the MailerModule using the input MailerModuleOptions object.

+
+
+

The structure of MailerModuleOptions is:

+
+
+
+
{
+  hbsOptions?: {
+    templatesDir: string;
+    extension?: string;
+    partialsDir?: string;
+    helpers?: IHelperFunction[];
+    compilerOptions?: ICompileOptions;
+  },
+  mailOptions?: nodemailerSmtpTransportOptions;
+  emailFrom: string;
+}
+
+
+
+

Here, you need to specify the Handlebars compile options, the nodemailer transport options and the email address which will send the emails. +Then, you need to call to forRoot function in the module imports. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot({
+      mailOptions: {
+        host: 'localhost',
+        port: 1025,
+        secure: false,
+        tls: {
+          rejectUnauthorized: false,
+        },
+      },
+      emailFrom: 'noreply@capgemini.com',
+      hbsOptions: {
+        templatesDir: join(__dirname, '../..', 'templates/views'),
+        partialsDir: join(__dirname, '../..', 'templates/partials'),
+        helpers: [{
+          name: 'fullname',
+          func: person => `${person.name} ${person.surname}`,s
+        }],
+      },
+    }),
+  ...
+})
+
+
+
+
+
+

forRootAsync()

+
+
+

The method forRootAsync enables you to get the mailer configuration in a asynchronous way. It is useful when you need to get the configuration using, for example, a service (e.g. ConfigurationService).

+
+
+

Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRootAsync({
+      imports: [ConfigurationModule],
+      useFactory: (config: ConfigurationService) => {
+        return config.mailerConfig;
+      },
+      inject: [ConfigurationService],
+    }),
+  ...
+})
+
+
+
+

In this example, we use the ConfigurationService in order to get the MailerModuleOptions (the same as forRoot)

+
+
+
+
+

Usage

+
+
+

In order to use, you only need to inject using the dependency injection the MailerService.

+
+
+

Example:

+
+
+
+
@Injectable()
+export class CatsService {
+  constructor(private readonly mailer: MailerService) {}
+}
+
+
+
+

Then, you only need to use the methods provided by the MailerService in your service. Take into account that you can inject it in every place that support NestJS dependency injection.

+
+
+
+
+

MailerService methods

+
+ +
+
+
+

== sendPlainMail

+
+
+

The method sendPlainMail receive a string sends a email.

+
+
+

The method signatures are:

+
+
+
+
sendPlainMail(emailOptions: SendMailOptions): Promise<SentMessageInfo>;
+sendPlainMail(to: string, subject: string, mail: string): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendPlainMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+});
+this.mailer.sendPlainMail('example@example.com', 'This is a subject', '<h1>Hello world</h1>');
+
+
+
+
+
+

== sendTemplateMail

+
+
+

The method sendTemplateMail sends a email based on a Handlebars template. The templates are registered using the templatesDir option or using the addTemplate method. +The template name is the name of the template (without extension) or the first parameter of the method addTemplate.

+
+
+

The method signatures are:

+
+
+
+
sendTemplateMail(emailOptions: SendMailOptions, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+sendTemplateMail(to: string, subject: string, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendTemplateMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+}, 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+this.mailer.sendTemplateMail('example@example.com', 'This is a subject', 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+
+
+
+
+
+

== addTemplate

+
+
+

Adds a new template to the MailerService.

+
+
+

Method signature:

+
+
+
+
addTemplate(name: string, template: string, options?: CompileOptions): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.addTemplate('newTemplate', '<html><head></head><body>{{>partial1}}</body></html>')
+
+
+
+
+
+

== registerPartial

+
+
+

Register a new partial in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerPartial(name: string, partial: Handlebars.Template<any>): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerPartial('partial', '<h1>Hello World</h1>')
+
+
+
+
+
+

== registerHelper

+
+
+

Register a new helper in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerHelper(name: string, helper: Handlebars.HelperDelegate): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerHelper('fullname', person => `${person.name} ${person.surname}`)
+
+
+
+
+
+

Handlebars templates

+
+
+

As mentioned above, this module allow you to use Handlebars as template engine, but it is optional. If you do not need the Handlebars, you just need to keep the hbsOptions undefined.

+
+
+

In order to get the templates form the file system, you can specify the template folder, the partials folder and the helpers. +At the moment of module initialization, it will read the content of the template folder, and will register every file with the name (without extension) and the content as Handlebars template. It will do the same for the partials.

+
+
+

You can specify the extension of template files using the extension parameter. The default value is .handlebars

+
+
+
+
+

Local development

+
+
+

If you want to work with this module but you don’t have a SMTP server, you can use the streamTransport. Example:

+
+
+
+
{
+  mailOptions: {
+    streamTransport: true,
+    newline: 'windows',
+  },
+  emailFrom: ...
+  hbsOptions: ...
+}
+
+
+
+

Then, you need to get the sendPlainMail or sendTemplateMail result, and print the email to the standard output (STDOUT). Example:

+
+
+
+
const mail = await this.mailer.sendTemplateMail(...);
+
+mail.message.pipe(process.stdout);
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-serializer.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-serializer.html new file mode 100644 index 00000000..bf34cd2d --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-serializer.html @@ -0,0 +1,338 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Serializer

+
+
+

Serialization is the process of translating data structures or object state into a format that can be transmitted across network and reconstructed later.

+
+
+

NestJS by default serialize all data to JSON (JSON.stringify). Sometimes this is not enough. In some situations you need to exclude some property (e.g password). Instead doing it manually, devon4node provides an interceptor (ClassSerializerInterceptor) that will do it for you. You only need to return a class instance as always and the interceptor will transform those class to the expected data.

+
+
+

The ClassSerializerInterceptor takes the class-transformer decorators in order to know how to transform the class and then send the result to the client.

+
+
+

Some of class-transformer decorators are:

+
+
+
    +
  • +

    Expose

    +
  • +
  • +

    Exclude

    +
  • +
  • +

    Type

    +
  • +
  • +

    Transform

    +
  • +
+
+
+

And methods to transform data:

+
+
+
    +
  • +

    plainToClass

    +
  • +
  • +

    plainToClassFromExist

    +
  • +
  • +

    classToPlain

    +
  • +
  • +

    classToClass

    +
  • +
  • +

    serialize

    +
  • +
  • +

    deserialize

    +
  • +
  • +

    deserializeArray

    +
  • +
+
+
+

See the class-transformer page for more information.

+
+
+

See NestJS serialization page for more information about ClassSerializerInterceptor.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-swagger.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-swagger.html new file mode 100644 index 00000000..e4bdb905 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-swagger.html @@ -0,0 +1,303 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Swagger

+
+
+

We can use swagger (OpenAPI) in order to describe the endpoints that our application exposes.

+
+
+

NestJS provides a module which will read the code of our application and will expose one endpoint where we can see the swagger.

+
+
+

Add swagger to a devon4node application is simple, you only need to execute the command nest g -c @devon4node/schematics swagger and it will do everything for you. The next time that you start your application, you will be able to see the swagger at /v1/api endpoint.

+
+
+

The swagger module can read your code in order to create the swagger definition, but sometimes you need to help him by decorating your handlers.

+
+
+

For more information about decorators and other behaviour about swagger module, you can see the NestJS swagger documentation page

+
+
+ + + + + +
+ + +the OpenAPI specification that this module supports is v2.0. The OpenAPI v3.0 is not available yet by using this module. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-typeorm.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-typeorm.html new file mode 100644 index 00000000..278c256d --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-typeorm.html @@ -0,0 +1,407 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

TypeORM

+
+
+

TypeORM is the default ORM provided by devon4node. It supports MySQL, MariaDB, Postgres, CockroachDB, SQLite, Microsoft SQL Server, Oracle, sql.js relational database and also supports MongoDB NoSQL database.

+
+
+

Add TypeORM support to a devon4node application is very easy: you only need to execute the command nest g -c @devon4node/schematics typeorm and it will add all required dependencies to the project and also imports the @nestjs/typeorm module.

+
+
+

For more information about TypeORM and the integration with NestJS you can visit TypeORM webpage, TypeORM GitHub repository and NestJS TypeORM documentation page

+
+
+
+
+

Configuration

+
+
+

When you have the configuration module, the TypeORM generator will add one property in order to be able to configure the database depending on the environment. Example:

+
+
+
+
database: {
+  type: 'sqlite',
+  database: ':memory:',
+  synchronize: false,
+  migrationsRun: true,
+  logging: true,
+  entities: ['dist/**/*.entity.js'],
+  migrations: ['dist/migration/**/*.js'],
+  subscribers: ['dist/subscriber/**/*.js'],
+  cli: {
+    entitiesDir: 'src/entity',
+    migrationsDir: 'src/migration',
+    subscribersDir: 'src/subscriber',
+  },
+},
+
+
+
+

This object is a TypeORM ConnectionOptions. For fore information about it visit the TypeORM Connection Options page.

+
+
+

There is also a special case: the default configuration. As the devon4node CLI need the database configuration when you use the devon4node db command, we also provide the ormconfig.json file. In this file you must put the configuration for you local environment. In order to do not have duplicated the configuration for local environment, in the default config file the database property is set-up like:

+
+
+
+
database: require('../../ormconfig.json'),
+
+
+
+

So, you only need to maintain the ormconfig.json file for the local environment.

+
+
+
+
+

Entity

+
+
+

Entity is a class that maps to a database table. The devon4node schematics has a generator to create new entities. You only need to execute the command nest g -c @devon4node/schematics entity <entity-name> and it generate the entity.

+
+
+

In the entity, you must define all columns, relations, primary keys of your database table. By default, devon4node provides a class named BaseEntity. All entities created with the devon4node schematics will extends the BaseEntity. This entity provides you some common columns:

+
+
+
    +
  • +

    id: the primary key of you table

    +
  • +
  • +

    version: the version of the entry (used for auditing purposes)

    +
  • +
  • +

    createdAt: creation date of the entry (used for auditing purposes)

    +
  • +
  • +

    updatedAt: last update date of the entry (used for auditing purposes)

    +
  • +
+
+
+

For more information about Entities, please visit the TypeORM entities page

+
+
+
+
+

Repository

+
+
+

With repositories, you can manage (insert, update, delete, load, etc.) a concrete entity. Using this pattern, we have separated the data (Entities) from the methods to manage it (Repositories).

+
+
+

To use a repository you only need to:

+
+
+
    +
  • +

    Import it in the module as follows:

    +
    +
    +
    @Module({
    +  imports: [TypeOrmModule.forFeature([Employee])],
    +})
    +
    +
    +
    + + + + + +
    + + +if you generate the entities with the devon4node schematic, this step is not necessary, devon4node schematic will do it for you. +
    +
    +
  • +
  • +

    Inject the repository as dependency in your service:

    +
    +
    +
    constructor(@InjectRepository(Employee) employeeRepository: Repository<Employee>) {}
    +
    +
    +
  • +
+
+
+

You can see more details in the NestJS database and NestJS TypeORM documentation pages.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/guides-validation.html b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-validation.html new file mode 100644 index 00000000..af6e118d --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/guides-validation.html @@ -0,0 +1,340 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Validation

+
+
+

To be sure that your application will works well, you must validate any input data. devon4node by default provides a ValidationPipe. This ValidationPipe is the responsible of validate the request input and, if the input do not pass the validation process, it returns a 400 Bad Request error.

+
+
+
+
+

Defining Validators

+
+
+

The ValidationPipe needs to know how to validate the input. For that purpose we use the class-validator package. This package allows you to define the validation of a class by using decorators.

+
+
+

For example:

+
+
+
+
export class Coffee {
+  @IsDefined()
+  @IsString()
+  @MaxLength(255)
+  name: string;
+
+  @IsDefined()
+  @IsString()
+  @MaxLength(25)
+  type: string;
+
+  @IsDefined()
+  @IsNumber()
+  quantity: number;
+}
+
+
+
+

As you can see in the previous example, we used some decorators in order to define the validators for every property of the Coffee class. You can find all decorators in the class-validator github repository.

+
+
+

Now, when you want to receive a Coffee as input in some endpoint, it will execute the validations before executing the handler function.

+
+
+ + + + + +
+ + +In order to be able to use the class-validator package, you must use classes instead of interfaces. As you know interfaces disappear at compiling time, and class-validator need to know the metadata of the properties in order to be able to validate. +
+
+
+ + + + + +
+ + +The ValidationPipe only works if you put a specific type in the handler definition. For example, if you define a handler like getCoffee(@Body() coffee: any): Coffee {} the ValidationPipe will not do anything. You must specify the type of the input: getCoffee(@Body() coffee: Coffee): Coffee {} +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/layer-controller.html b/docs/devonfw.github.io/1.0/devon4node.wiki/layer-controller.html new file mode 100644 index 00000000..237ac6ee --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/layer-controller.html @@ -0,0 +1,354 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Controller Layer

+
+
+

The controller layer is responsible for handling the requests/responses to the client. This layer knows everything about the endpoints exposed, the expected input (and also validate it), the response schema, the HTTP codes for the response and the HTTP errors that every endpoint can send.

+
+
+
+
+

How to implement the controller layer

+
+
+

This layer is implemented by the NestJS controllers. Let’s see how it works with an example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    try {
+      return await this.coffeeService.searchCoffees(search);
+    } catch (error) {
+      throw new BadRequestException(error.message, error);
+    }
+  }
+}
+
+
+
+

As you can see in the example, to create a controller you only need to decorate a class with the Controller decorator. This example is handling all request to coffee/coffees.

+
+
+

Also, you have defined one handler. This handler is listening to POST request for the route coffee/coffees/search. In addition, this handler is waiting for a CoffeeSearch object and returns an array of Coffee. In order to keep it simple, that’s all that you need in order to define one route.

+
+
+

One important thing that can be observed in this example is that there is no business logic. It delegates to the service layer and return the response to the client. At this point, transformations from the value that you receive from the service layer to the desired return type are also allowed.

+
+
+

By default, every POST handler return an HTTP 204 response with the returned value as body, but you can change it in a easy way by using decorators. As you can see in the example, the handler will return a HTTP 200 response (@HttpCode(200)).

+
+
+

Finally, if the service layer throws an error, this handler will catch it and return a HTTP 400 Bad Request response. The controller layer is the only one that knows about the answers to the client, therefore it is the only one that knows which error codes should be sent.

+
+
+
+
+

Validation

+
+
+

In order to do not propagate errors in the incoming payload, we need to validate all data in the controller layer. See the validation guide for more information.

+
+
+
+
+

Error handling

+
+
+

In the previous example, we catch all errors using the try/catch statement. This is not the usual implementation. In order to catch properly the errors you must use the exception filters. Example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  @UseFilters(CaffeExceptionFilter)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    return await this.coffeeService.searchCoffees(search);
+  }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/layer-dataaccess.html b/docs/devonfw.github.io/1.0/devon4node.wiki/layer-dataaccess.html new file mode 100644 index 00000000..96519059 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/layer-dataaccess.html @@ -0,0 +1,303 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Data Access Layer

+
+
+

The data access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store but also about invoking external services.

+
+
+

This layer is implemented using providers. Those providers could be: services, repositories and others. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic.

+
+
+
+
+

Database

+
+
+

We strongly recommend TypeORM for database management in devon4node applications. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic. TypeORM supports the most commonly used relational databases, link Oracle, MySQL, MariaDB, PostgreSQL, SQLite, MSSQL and others. Also, it supports no-relational databases like MongoDB.

+
+
+

TypeORM supports Active Record and Repository patterns. We recommend to use the Repository pattern. This pattern allows you to separate the data objects from the methods to manipulate the database.

+
+
+
+
+

External APIs

+
+
+

In order to manage the data in a external API, you need to create a service for that purpose. In order to manage the connections with the external API, we strongly recommend the NestJS HTTP module

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/layer-service.html b/docs/devonfw.github.io/1.0/devon4node.wiki/layer-service.html new file mode 100644 index 00000000..0dfd8759 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/layer-service.html @@ -0,0 +1,308 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Service Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. It knows everything about the business logic, but it does not know about the response to the client and the HTTP errors. That’s why this layer is separated from the controller layer.

+
+
+
+
+

How to implement the service layer

+
+
+

This layer is implemented by services, a specific kind of providers. Let’s see one example:

+
+
+
+
@Injectable()
+export class CoffeeService {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  async searchCoffees(@InjectRepository(Coffee) coffeeRepository: Repository<Coffee>): Promise<Array<Coffee>> {
+    const coffees = this.coffeeRepository.find();
+
+    return doSomeBusinessLogic(coffees);
+  }
+}
+
+
+
+

This is the CoffeeService that we inject in the example of controller layer. As you can see, a service is a regular class with the Injectable decorator. Also, it inject as dependency the data access layer (in this specific case, the Repository<Coffee>).

+
+
+

The services expose methods in order to transform the input from the controllers by applying some business logic. They can also request data from the data access layer. And that’s all.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/master-devon4node.html b/docs/devonfw.github.io/1.0/devon4node.wiki/master-devon4node.html new file mode 100644 index 00000000..9cb6bcfb --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/master-devon4node.html @@ -0,0 +1,2768 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

NodeJS

+
+
+

devonfw is a platform which provides solutions to building business applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. devonfw is 100% Open Source (Apache License version 2.0) since the beginning of 2018.

+
+
+

devon4node is the NodeJS stack of devonfw. It allows you to build business applications (backends) using NodeJS technology in standardized way based on established best-practices.

+
+
+

devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications. It uses progressive TypeScript and combines elements of OOP (Object Oriented Programming), FP (Functional Programming), and FRP (Functional Reactive Programming).

+
+ +
+

devon4node Architecture

+
+

As we have mentioned in the introduction, devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications.

+
+
+
+

HTTP layer

+
+

By using NestJS, devon4node is a platform-agnostic framework. NestJS focuses only on the logical layer, and delegates the transport layer to another framework, such as ExpressJS. You can see it in the following diagram:

+
+
+
+devon4node architecture +
+
+
+

As you can see, NestJS do not listen directly for incoming request. It has an adapter to communicate with ExpressJS and ExpressJS is the responsible for that. ExpressJS is only one of the frameworks that NestJS can work with. We have also another adapter available out-of-the-box: the Fastify adapter. With that, you can replace ExpressJS for Fastify But you can still use all your NestJS components. You can also create your own adapter to make NestJS work with other HTTP framework.

+
+
+

At this point, you may think: why is NestJS (and devon4node) using ExpressJS by default instead of Fastify? Because, as you can see in the previous diagram, there is a component that is dependent on the HTTP framework: the middleware. As ExpressJS is the most widely used framework, there exists a lot of middleware for it, so, in order to reuse them in our NestJS applications, NestJS use ExpressJS by default. Anyway, you may think which HTTP framework best fits your requirements.

+
+
+
+

devon4node layers

+
+

As other devonfw technologies, devon4node separates the application into layers.

+
+
+

Those layers are:

+
+ +
+
+layers +
+
+
+
+

devon4node application structure

+
+

Although there are many frameworks to create backend applications in NodeJS, none of them effectively solve the main problem of - Architecture. This is the main reason we have chosen NestJS for the devon4node applications. Besides, NestJS is highly inspired by Angular, therefore a developer who knows Angular can use his already acquired knowledge to write devon4node applications.

+
+
+

NestJS adopts various Angular concepts, such as dependency injection, piping, interceptors and modularity, among others. By using modularity we can reuse some of our modules between applications. One example that devon4node provide is the mailer module.

+
+
+
+

Modules

+
+

Create a application module is simple, you only need to create an empty class with the decorator Module:

+
+
+
+
@Module({})
+export class AppModule {}
+
+
+
+

In the module you can define:

+
+
+
    +
  • +

    Imports: the list of imported modules that export the providers which are required in this module

    +
  • +
  • +

    Controllers: the set of controllers defined in this module which have to be instantiated

    +
  • +
  • +

    Providers: the providers that will be instantiated by the Nest injector and that may be shared at least across this module

    +
  • +
  • +

    Exports: the subset of providers that are provided by this module and should be available in other modules which import this module

    +
  • +
+
+
+

The main difference between Angular and NestJS is NestJS modules encapsulates providers by default. This means that it’s impossible to inject providers that are neither directly part of the current module nor exported from the imported modules. Thus, you may consider the exported providers from a module as the module’s public interface, or API. Example of modules graph:

+
+
+
+modules +
+
+
+

In devon4node we three different kind of modules:

+
+
+
    +
  • +

    AppModule: this is the root module. Everything that our application need must be imported here.

    +
  • +
  • +

    Global Modules: this is a special kind of modules. When you make a module global, it’s accessible for every module in your application. Your can see it in the next diagram. It’s the same as the previous one, but now the CoreModule is global:

    +
    +
    +module2 +
    +
    +
    +

    One example of global module is the CoreModule. In the CoreModule you must import every module which have providers that needs to be accessible in all modules of you application

    +
    +
  • +
  • +

    Feature (or application) modules: modules which contains the logic of our application. We must import it in the AppModule.

    +
  • +
+
+
+

For more information about modules, see NestJS documentation page

+
+
+
+

Folder structure

+
+

devon4node defines a folder structure that every devon4node application must follow. The folder structure is:

+
+
+
+
├───src
+│   ├───app
+│   │   ├───core
+│   │   │   ├───auth
+│   │   │   ├───configuration
+│   │   │   ├───user
+│   │   │   └───core.module.ts
+│   │   ├───shared
+│   │   └───feature
+│   │       ├───sub-module
+│   │       │   ├───controllers
+│   │       │   ├───...
+│   │       │   ├───services
+│   │       │   └───sub-module.module.ts
+│   │       ├───controllers
+│   │       ├───interceptors
+│   │       ├───pipes
+│   │       ├───guards
+│   │       ├───filters
+│   │       ├───middlewares
+│   │       ├───model
+│   │       │   ├───dto
+│   │       │   └───entities
+│   │       ├───services
+│   │       └───feature.module.ts
+│   ├───config
+│   └───migration
+├───test
+└───package.json
+
+
+
+

devon4node schematics ensures this folder structure so, please, do not create files by your own, use the devon4node schematics.

+
+
+
+

NestJS components

+
+

NestJS provides several components that you can use in your application:

+
+
+ +
+
+

In the NestJS documentation you can find all information about each component. But, something that is missing in the documentation is the execution order. Every component can be defined in different levels: globally, in the controller or in the handler. As middleware is part of the HTTP server we can define it in a different way: globally or in the module.

+
+
+
+components +
+
+
+

It is not necessary to have defined components in every level. For example, you can have defined a interceptor globally but you do not have any other in the controller or handler level. If nothing is defined in some level, the request will continue to the next component.

+
+
+

As you can see in the previous image, the first component which receive the request is the global defined middleware. Then, it send the request to the module middleware. Each of them can return a response to the client, without passing the request to the next level.

+
+
+

Then, the request continue to the guards: first the global guard, next to controller guard and finally to the handler guard. At this point, we can throw an exception in all components and the exception filter will catch it and send a proper error message to the client. We do not paint the filters in the graphic in order to simplify it.

+
+
+

After the guards, is time to interceptors: global interceptors, controller interceptors and handler interceptors. And last, before arrive to the handler inside the controller, the request pass through the pipes.

+
+
+

When the handler has the response ready to send to the client, it does not go directly to the client. It come again to the interceptors, so we can also intercept the response. The order this time is the reverse: handler interceptors, controller interceptors and global interceptors. After that, we can finally send the response to the client.

+
+
+

Now, with this in mind, you are able to create the components in a better way.

+
+
+
+
+
+

Layers

+
+ +
+

Controller Layer

+
+

The controller layer is responsible for handling the requests/responses to the client. This layer knows everything about the endpoints exposed, the expected input (and also validate it), the response schema, the HTTP codes for the response and the HTTP errors that every endpoint can send.

+
+
+
+

How to implement the controller layer

+
+

This layer is implemented by the NestJS controllers. Let’s see how it works with an example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    try {
+      return await this.coffeeService.searchCoffees(search);
+    } catch (error) {
+      throw new BadRequestException(error.message, error);
+    }
+  }
+}
+
+
+
+

As you can see in the example, to create a controller you only need to decorate a class with the Controller decorator. This example is handling all request to coffee/coffees.

+
+
+

Also, you have defined one handler. This handler is listening to POST request for the route coffee/coffees/search. In addition, this handler is waiting for a CoffeeSearch object and returns an array of Coffee. In order to keep it simple, that’s all that you need in order to define one route.

+
+
+

One important thing that can be observed in this example is that there is no business logic. It delegates to the service layer and return the response to the client. At this point, transformations from the value that you receive from the service layer to the desired return type are also allowed.

+
+
+

By default, every POST handler return an HTTP 204 response with the returned value as body, but you can change it in a easy way by using decorators. As you can see in the example, the handler will return a HTTP 200 response (@HttpCode(200)).

+
+
+

Finally, if the service layer throws an error, this handler will catch it and return a HTTP 400 Bad Request response. The controller layer is the only one that knows about the answers to the client, therefore it is the only one that knows which error codes should be sent.

+
+
+
+

Validation

+
+

In order to do not propagate errors in the incoming payload, we need to validate all data in the controller layer. See the validation guide for more information.

+
+
+
+

Error handling

+
+

In the previous example, we catch all errors using the try/catch statement. This is not the usual implementation. In order to catch properly the errors you must use the exception filters. Example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  @UseFilters(CaffeExceptionFilter)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    return await this.coffeeService.searchCoffees(search);
+  }
+}
+
+
+ +
+
+

Service Layer

+
+

The logic layer is the heart of the application and contains the main business logic. It knows everything about the business logic, but it does not know about the response to the client and the HTTP errors. That’s why this layer is separated from the controller layer.

+
+
+
+

How to implement the service layer

+
+

This layer is implemented by services, a specific kind of providers. Let’s see one example:

+
+
+
+
@Injectable()
+export class CoffeeService {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  async searchCoffees(@InjectRepository(Coffee) coffeeRepository: Repository<Coffee>): Promise<Array<Coffee>> {
+    const coffees = this.coffeeRepository.find();
+
+    return doSomeBusinessLogic(coffees);
+  }
+}
+
+
+
+

This is the CoffeeService that we inject in the example of controller layer. As you can see, a service is a regular class with the Injectable decorator. Also, it inject as dependency the data access layer (in this specific case, the Repository<Coffee>).

+
+
+

The services expose methods in order to transform the input from the controllers by applying some business logic. They can also request data from the data access layer. And that’s all.

+
+ +
+
+

Data Access Layer

+
+

The data access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store but also about invoking external services.

+
+
+

This layer is implemented using providers. Those providers could be: services, repositories and others. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic.

+
+
+
+

Database

+
+

We strongly recommend TypeORM for database management in devon4node applications. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic. TypeORM supports the most commonly used relational databases, link Oracle, MySQL, MariaDB, PostgreSQL, SQLite, MSSQL and others. Also, it supports no-relational databases like MongoDB.

+
+
+

TypeORM supports Active Record and Repository patterns. We recommend to use the Repository pattern. This pattern allows you to separate the data objects from the methods to manipulate the database.

+
+
+
+

External APIs

+
+

In order to manage the data in a external API, you need to create a service for that purpose. In order to manage the connections with the external API, we strongly recommend the NestJS HTTP module

+
+
+
+
+
+

Guides

+
+ +
+

Key Principles

+
+

devon4node is built following some basic principles like:

+
+
+ +
+
+

But key principles that best define devon4node (and are inherited from NestJS) are:

+
+
+
    +
  • +

    Simplicity (aka KISS)

    +
  • +
  • +

    Reusability

    +
  • +
  • +

    Productivity

    +
  • +
+
+
+
+

Simplicity

+
+

In devon4node we tried to do everything as simple as possible. Following this principle we will be able to do easy to maintain applications.

+
+
+

For example, in order to expose all CRUD operations for an entity, you only need to create a controller like:

+
+
+
+
@Crud({
+  model: {
+    type: Employee,
+  },
+})
+@CrudType(Employee)
+@Controller('employee/employees')
+export class EmployeeCrudController {
+  constructor(public service: EmployeeCrudService) {}
+}
+
+
+
+

You can find this code in the employee example. Only with this code your exposing the full CRUD operations for the employee entity. As you can see, it’s an empty class with some decorators and the EmployeeCrudService injected as dependency. Simple, isn’t it? The EmployeeCrudService is also simple:

+
+
+
+
@Injectable()
+export class EmployeeCrudService extends TypeOrmCrudService<Employee> {
+  constructor(@InjectRepository(Employee) repo: Repository<Employee>) {
+    super(repo);
+  }
+}
+
+
+
+

Another empty class which extends from TypeOrmCrudService<Employee> and injects the Employee Repository as dependency. Nothing else.

+
+
+

With these examples you can get an idea of how simple it can be to code a devon4node application .

+
+
+
+

Reusability

+
+

NestJS (and devon4node) applications are designed in a modular way. This allows you to isolate some functionality in a module, and then reuse it in every application that you need. This is the same behaviour that Angular has. You can see it in the NestJS modules like TypeORM, Swagger and others. Also, in devon4node we have the Mailer module.

+
+
+

In your applications, you only need to import those modules and then you will be able to use the functionality that they implement. Example

+
+
+
+
@Module({
+  imports: [ AuthModule, ConfigurationModule ],
+})
+export class SomeModule {}
+
+
+
+
+

Productivity

+
+

devon4node is designed to create secure enterprise applications. But also, it allow you to do it in a fast way. To increase the productivity devon4node, devon4node provide schematics in order to generate some boilerplate code.

+
+
+

For example, to create a module you need to create a new file for a module (or copy it) and write the code, then you need to import it in the AppModule. This is a easy example, but you can introduce some errors: forget to import it in the AppModule, introduce errors with the copy/paste and so on. By using the command nest g module --name <module-name> it will do everything for you. Just a simple command. In this specific case probably you do not see any advantage, but there are other complex cases where you can generate more complex code with nest and devon4node schematics command.

+
+
+

See code generation in order to know how to increase your productivity creating devon4node applications.

+
+ +
+
+

Code Generation

+
+

As we mention in the page key principles, one of our key principles is Productivity. In order to provide that productivity, we have some tools to generate code. These tools will help you generate the common parts of the application so that you can focus only on the specific functionality.

+
+
+

Those tools are:

+
+ +
+
+

Nest CLI and Devon4node schematics

+
+

We are going to use the Nest CLI to generate code of our application, you can know more about NodeJs CLI in the official documentation.

+
+
+
+

Install devon4node schematics

+
+

First of all, you need to install Nest CLI

+
+
+

Execute the command yarn global add @nestjs/cli. +You can also use npm: npm install -g @nestjs/cli

+
+
+

And then Devon4node schematics globally with the following command:

+
+
+

yarn global add @devon4node/schematics or npm install -g @devon4node/schematics

+
+
+
+

==

+
+

If you get an error trying execute any devon4node schematic related to collection not found, try to reinstall devon4node/schematics on the project folder or be sure that schematics folder is inside @devon4node in node_modules. +yarn add @devon4node/schematics +== ==

+
+
+
+

Generate new devon4node application

+
+

To start creating a devon4node application, execute the command:

+
+
+

nest g -c @devon4node/schematics application [application-name]

+
+
+

If you do not put a name, the command line will ask you for one.

+
+
+
+

Generate code for TypeORM

+
+

Initialize TypeORM into your current project in a correct way.

+
+
+

nest g -c @devon4node/schematics typeorm

+
+
+

Then, you will be asked about which DB you want to use.

+
+
+

typeorm schematic

+
+
+
+

Generate CRUD

+
+

Generate CRUD methods for a entity. Requires TypeORM installed in the project.

+
+
+

It will add the @nestjsx/crud module as a project dependency. Then, generates an entity, a CRUD controller and a CRUD service. It also register the entity, controller and service in the module.

+
+
+

Execute nest g -c @devon4node/schematics crud and then you will need to write a name for the crud.

+
+
+
+crud schematic +
+
+
+
+

Generate TypeORM entity

+
+

Add a TypeORM entity to your project. Requires TypeORM installed in the project.

+
+
+

Execute nest g -c @devon4node/schematics entity and you will be asked for an entity name.

+
+
+
+

Add config-module

+
+

Add the config module to the project.

+
+
+

It will add the @devon4node/common module as a project dependency. Then, it will generate the configuration module into your project and add it in the core module. Also, it generates the config files for the most common environments.

+
+
+

The command to execute will be nest g -c @devon4node/schematics config-module

+
+
+
+

Add mailer module

+
+

Add @devon4node/mailer module to project.

+
+
+

It will add the @devon4node/mailer module as a project dependency. Also, it will add it to the core module and it will generate some email template examples.

+
+
+

Write the command nest g -c @devon4node/schematics mailer

+
+
+
+

Add swagger module

+
+

Add swagger module to project.

+
+
+

It will add the @nestjs/swagger module as a project dependency. Also, it will update the main.ts file in order to expose the endpoint for swagger. The default endpoint is: /v1/api

+
+
+

Execute the command nest g -c @devon4node/schematics swagger

+
+
+
+

Add auth-jwt module

+
+

Add the auth JWT module to the project.

+
+
+

It will add to your project the auth-jwt and user module. Also, it will import those modules into the core module.

+
+
+

Execute nest g -c @devon4node/schematics auth-jwt

+
+
+
+

Add security

+
+

Add cors and helmet to your project.

+
+
+

It will add helmet package as project dependency and update the main.ts file in order to enable the cors and helmet in your application.

+
+
+

Execute nest g -c @devon4node/schematics security

+
+
+
+

Generate database migrations

+
+
    +
  1. +

    Generate database migrations

    +
    +
      +
    1. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node or npm i -g ts-node

      +
    2. +
    3. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +insert data +
      +
      +
      +

      It will connect to the database, read all entities and then it will generate a migration file with all sql queries need to transform the current status of the database to the status defined by the entities. If the database is empty, it will generate all sql queries need to create all tables defined in the entities. You can find a example in the todo example

      +
      +
    4. +
    +
    +
  2. +
+
+
+

As TypeORM is the tool used for DB. You can check official documentation for more information. +See TypeORM CLI documentation.

+
+
+
+

CobiGen

+
+

Currently, we do not have templates to generate devon4node code (we have planned to do that in the future). Instead, we have templates that read the code of a devon4node application and generate a devon4ng application. Visit the CobiGen page for more information.

+
+ +
+
+

Coding Conventions

+
+

devon4node defines some coding conventions in order to improve the readability, reduce the merge conflicts and be able to develop applications in an industrialized way.

+
+
+

In order to ensure that you are following the devon4node coding conventions, you can use the following tools:

+
+
+
    +
  • +

    ESLint: ESLint ESLint is a tool for identifying and reporting on patterns found in ECMAScript/JavaScript code, with the goal of making code more consistent and avoiding bugs. We recommend to use the ESLint VSCode extension (included in the devonfw Platform Extension Pack) in order to be able to see the linting errors while you are developing.

    +
  • +
  • +

    Prettier: Prettier is a code formatter. We recommend to use the Prettier VSCode extension (included in the devonfw Platform Extension Pack) and enable the editor.formatOnSave option.

    +
  • +
  • +

    devon4node application schematic: this tool will generate code following the devon4node coding conventions. Also, when you generate a new project using the devon4node application schematic, it generates the configuration files for TSLint and Prettier that satisfy the devon4node coding conventions.

    +
  • +
+
+
+

When you combine all tools, you can be sure that you follow the devon4node coding conventions.

+
+
+
+

Detailed devon4node Coding Conventions

+
+

Here we will detail some of most important devon4node coding conventions. To be sure that you follows all devon4node coding conventions use the tools described before.

+
+
+
+

Indentation

+
+

All devon4node code files must be indented using spaces. The indentation with must be 2 spaces.

+
+
+
+

White space

+
+

In order to improve the readability of your code, you must introduce whitespaces. Example:

+
+
+
+
if(condition){
+
+
+
+

must be

+
+
+
+
if (condition) {
+
+
+
+
+

Naming conventions

+ +
+
+

== File naming

+
+

The file name must follow the pattern: (name in kebab case).(kind of component).(extension) +The test file name must follow the pattern: (name in kebab case).(kind of component).spec.(extension)

+
+
+

Example:

+
+
+
+
auth-jwt.service.ts
+auth-jwt.service.spec.ts
+
+
+
+
+

== Interface naming

+
+

The interface names must be in pascal case, and must start with I. There is some controversy in starting the interface names with an I, but we decided to do it because is most of cases you will have an interface and a class with the same name, so, to differentiate them, we decided to start the interfaces with I. Other devonfw stacks solves it by adding the suffix Impl in the class implementations.

+
+
+

Example:

+
+
+
+
interface ICoffee {}
+
+
+
+
+

== Class naming

+
+

The class names must be in pascal case.

+
+
+

Example:

+
+
+
+
class Coffee {}
+
+
+
+
+

== Variable naming

+
+

All variable names must be in camel case.

+
+
+
+
const coffeeList: Coffe[];
+
+
+
+
+

Declarations

+
+

For all variable declarations we must use const or let. var is forbidden. We prefer to use const when possible.

+
+
+
+

Programming practices

+ +
+
+

== Trailing comma

+
+

All statements must end with a trailing comma. Example:

+
+
+
+
{
+  one: 'one',
+  two: 'two'  // bad
+}
+{
+  one: 'one',
+  two: 'two', // good
+}
+
+
+
+
+

== Arrow functions

+
+

All anonymous functions must be defined with the arrow function notation. In most of cases it’s not a problem, but sometimes, when you do not want to bind this when you define the function, you can use the other function definition. In this special cases you must disable the linter for those sentence.

+
+
+
+

== Comments

+
+

Comments must start with a whitespace. Example:

+
+
+
+
//This is a bad comment
+// This is OK
+
+
+
+
+

== Quotemarks

+
+

For string definitions, we must use single quotes.

+
+
+
+

== if statements

+
+

In all if statements you always must use brackets. Example:

+
+
+
+
// Bad if statement
+if (condition)
+  return true;
+
+// Good if statement
+if (condition) {
+  return true;
+}
+
+
+
+
+

Pre-commit hooks

+
+

In order to ensure that your new code follows the coding conventions, devon4node uses by default husky. Husky is a tool that allows you to configure git hooks easily in your project. When you make a git commit in your devon4node project, it will execute two actions:

+
+
+
    +
  • +

    Prettify the staged files

    +
  • +
  • +

    Execute the linter in the staged files

    +
  • +
+
+
+

If any action fails, you won’t be able to commit your new changes.

+
+
+ + + + + +
+ + +If you want to skip the git hooks, you can do a commit passing the --no-verify flag. +
+
+ +
+
+

Dependency Injection

+
+

The dependency injection is a well-known common design pattern applied by frameworks in all languages, like Spring in Java, Angular and others. The intention of this page is not to explain how dependency injection works, but instead how it is addressed by NestJS.

+
+
+

NestJS resolve the dependency injection in their modules. When you define a provider in a module, it can be injected in all components of the module. By default, those providers are only available in the module where it is defined. The only way to export a module provider to other modules which import it is adding those provider to the export array. You can also reexport modules.

+
+
+
+

Inject dependencies in NestJS

+
+

In order to inject a dependency in a NestJS component, you need to declare it in the component constructor. Example:

+
+
+
+
export class CoffeeController {
+  constructor(public readonly conffeeService: CoffeeService) {}
+}
+
+
+
+

NestJS can resolve all dependencies that are defined in the module as provider, and also the dependencies exported by the modules imported. Example:

+
+
+
+
@Module({
+  controllers: [CoffeeController],
+  providers: [CoffeeService],
+})
+export class CoffeeModule {}
+
+
+
+

Inject dependencies in the constructor is the is the preferred choice, but, sometimes it is not possible. For example, when you are extending another class and want to keep the constructor definition. In this specific cases we can inject dependencies in the class properties. Example:

+
+
+
+
export class CoffeeController {
+  @Inject(CoffeeService)
+  private readonly conffeeService: CoffeeService;
+}
+
+
+
+
+

Dependency Graph

+
+
+dependency injection1 +
+
+
+

In the previous image, the Module A can inject dependencies exported by Module B, Module E and Module F. If module B reexport Module C and Module D, they are also accessible by Module A.

+
+
+

If there is a conflict with the injection token, it resolves the provider with less distance with the module. For example: if the modules C and F exports a UserService provider, the Module A will resolve the UserService exported by the Module F, because the distance from Module A to Module F is 1, and the distance from Module A to Module C is 2.

+
+
+

When you define a module as global, the dependency injection system is the same. The only difference is now all modules as a link to the global module. For example, if we make the Module C as global the dependency graph will be:

+
+
+
+dependency injection2 +
+
+
+
+

Custom providers

+
+

When you want to change the provider name, you can use a NestJS feature called custom providers. For example, if you want to define a provider called MockUserService with the provider token UserService you can define it like:

+
+
+
+
@Module({
+  providers: [{
+    provide: UserService,
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

With this, when you inject want to inject UserService as dependency, the MockUserService will be injected.

+
+
+

Custom provider token can be also a string:

+
+
+
+
@Module({
+  providers: [{
+    provide: 'USER_SERVICE',
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

but now, when you want to inject it as dependency you need to use the @Inject decorator.

+
+
+
+
constructor(@Inject('USER_SERVICE') userService: any) {}
+
+
+ +
+
+

Configuration Module

+
+

devon4node provides a way to generate a configuration module inside your application. To generate it you only need to execute the command nest g -c @devon4node/schematics config-module. This command will generate inside your application:

+
+
+
    +
  • +

    Configuration module inside the core module.

    +
  • +
  • +

    config folder where all environment configuration are stored.

    +
    +
      +
    • +

      default configuration: configuration for your local development environment.

      +
    • +
    • +

      develop environment configuration for the develop environment.

      +
    • +
    • +

      uat environment configuration for the uat environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      test environment configuration used by test.

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +some code generators will add some properties to this module, so, be sure that the config module is the first module that you generate in your application. +
+
+
+
+

Use the configuration service

+
+

To use the configuration service, you only need to inject it as dependency. As configuration module is defined in the core module, it will be available everywhere in your application. Example:

+
+
+
+
export class MyProvider {
+  constructor(public readonly configService: ConfigurationService) {}
+
+  myMethod() {
+    return this.confiService.isDev;
+  }
+}
+
+
+
+
+

Choose an environment file

+
+

By default, when you use the configuration service it will take the properties defined in the default.ts file. If you want to change the configuration file, you only need to set the NODE_ENV environment property with the name of the desired environment. Examples: in windows execute set NODE_ENV=develop before executing the application, in linux execute NODE_ENV=develop before executing the application or NODE_ENV=develop yarn start.

+
+
+
+

Override configuration properties

+
+

Sometimes, you want to keep some configuration property secure, and you do not want to publish it to the repository, or you want to reuse some configuration file but you need to change some properties. For those scenarios, you can override configuration properties by defining a environment variable with the same name. For example, if you want to override the property host, you can do: set host="newhost". It also works with objects. For example, if you want to change the value of secret in the property jwtConfig for this example, you can set a environment variable like this: set jwtConfig="{"secret": "newsecret"}". As you can see, this environment variable has a JSON value. It will take object and merge the jwtConfig property with the properties defined inside the environment variable. It other properties maintain their value. The behaviour is the same for the nested objects.

+
+
+
+

Add a configuration property

+
+

In order to add a new property to the configuration module, you need to follow some steps:

+
+
+
    +
  • +

    Add the property to IConfig interface in src/app/core/configuration/types.ts file. With this, we can ensure that the ConfigurationService and the environment files has those property at compiling time.

    +
  • +
  • +

    Add the new property getter to ConfigurationService. You must use the get method of ConfigurationService to ensure that the property will be loaded from the desired config file. You can also add extra logic if needed.

    +
  • +
  • +

    Add the property to all config files inside the src/config folder.

    +
  • +
+
+
+

Example:

+
+
+

We want to add the property devonfwUrl to our ConfigurationService, so:

+
+
+

We add the following code in IConfig interface:

+
+
+
+
devonfwUrl: string;
+
+
+
+

Then, we add the getter in the ConfigurationService:

+
+
+
+
get devonfwUrl(): string {
+  return this.get('devonfwUrl')!;
+}
+
+
+
+

Finally, we add the definition in all config files:

+
+
+
+
devonfwUrl: 'https://devonfw.com',
+
+
+ +
+
+

Auth JWT module

+
+

devon4node provides a way to generate a default authentication module using JWT (JSON Web Token). It uses the @nestjs/passport library describe here.

+
+
+

To generate the devon4node auth-jwt module you only need to execute the command: nest generate -c @devon4node/schematics auth-jwt. We generate this module inside the applications instead of distributing a npm package because this module is prone to be modified depending on the requirements. It also generate a basic user module.

+
+
+

In this page we will explain the default implementation provided by devon4node. For more information about authentication, JWT, passport and other you can see:

+
+
+ +
+
+
+

Auth JWT endpoints

+
+

In order to execute authentication operations, the auth-jwt module exposes the following endpoints:

+
+
+
    +
  • +

    POST /auth/login: receive an username and a password and return the token in the header if the combination of username and password is correct.

    +
  • +
  • +

    POST /auth/register: register a new user.

    +
  • +
  • +

    GET /auth/currentuser: return the user data if he is authenticated.

    +
  • +
+
+
+
+

Protect endpoints with auth-jwt

+
+

In order to protect your endpoints with auth-jwt module you only need to add the AuthGuard() in the UseGuards decorator. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+
+

Now, all request to currentuser are protected by the AuthGuard.

+
+
+
+

Role based Access Control

+
+

The auth-jwt module provides also a way to control the access to some endpoints by using roles. For example, if you want to grant access to a endpoint only to admins, you only need to add the Roles decorator to those endpoints with the roles allowed. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+@Roles(roles.ADMIN)
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+ +
+
+

Swagger

+
+

We can use swagger (OpenAPI) in order to describe the endpoints that our application exposes.

+
+
+

NestJS provides a module which will read the code of our application and will expose one endpoint where we can see the swagger.

+
+
+

Add swagger to a devon4node application is simple, you only need to execute the command nest g -c @devon4node/schematics swagger and it will do everything for you. The next time that you start your application, you will be able to see the swagger at /v1/api endpoint.

+
+
+

The swagger module can read your code in order to create the swagger definition, but sometimes you need to help him by decorating your handlers.

+
+
+

For more information about decorators and other behaviour about swagger module, you can see the NestJS swagger documentation page

+
+
+ + + + + +
+ + +the OpenAPI specification that this module supports is v2.0. The OpenAPI v3.0 is not available yet by using this module. +
+
+ +
+
+

TypeORM

+
+

TypeORM is the default ORM provided by devon4node. It supports MySQL, MariaDB, Postgres, CockroachDB, SQLite, Microsoft SQL Server, Oracle, sql.js relational database and also supports MongoDB NoSQL database.

+
+
+

Add TypeORM support to a devon4node application is very easy: you only need to execute the command nest g -c @devon4node/schematics typeorm and it will add all required dependencies to the project and also imports the @nestjs/typeorm module.

+
+
+

For more information about TypeORM and the integration with NestJS you can visit TypeORM webpage, TypeORM GitHub repository and NestJS TypeORM documentation page

+
+
+
+

Configuration

+
+

When you have the configuration module, the TypeORM generator will add one property in order to be able to configure the database depending on the environment. Example:

+
+
+
+
database: {
+  type: 'sqlite',
+  database: ':memory:',
+  synchronize: false,
+  migrationsRun: true,
+  logging: true,
+  entities: ['dist/**/*.entity.js'],
+  migrations: ['dist/migration/**/*.js'],
+  subscribers: ['dist/subscriber/**/*.js'],
+  cli: {
+    entitiesDir: 'src/entity',
+    migrationsDir: 'src/migration',
+    subscribersDir: 'src/subscriber',
+  },
+},
+
+
+
+

This object is a TypeORM ConnectionOptions. For fore information about it visit the TypeORM Connection Options page.

+
+
+

There is also a special case: the default configuration. As the devon4node CLI need the database configuration when you use the devon4node db command, we also provide the ormconfig.json file. In this file you must put the configuration for you local environment. In order to do not have duplicated the configuration for local environment, in the default config file the database property is set-up like:

+
+
+
+
database: require('../../ormconfig.json'),
+
+
+
+

So, you only need to maintain the ormconfig.json file for the local environment.

+
+
+
+

Entity

+
+

Entity is a class that maps to a database table. The devon4node schematics has a generator to create new entities. You only need to execute the command nest g -c @devon4node/schematics entity <entity-name> and it generate the entity.

+
+
+

In the entity, you must define all columns, relations, primary keys of your database table. By default, devon4node provides a class named BaseEntity. All entities created with the devon4node schematics will extends the BaseEntity. This entity provides you some common columns:

+
+
+
    +
  • +

    id: the primary key of you table

    +
  • +
  • +

    version: the version of the entry (used for auditing purposes)

    +
  • +
  • +

    createdAt: creation date of the entry (used for auditing purposes)

    +
  • +
  • +

    updatedAt: last update date of the entry (used for auditing purposes)

    +
  • +
+
+
+

For more information about Entities, please visit the TypeORM entities page

+
+
+
+

Repository

+
+

With repositories, you can manage (insert, update, delete, load, etc.) a concrete entity. Using this pattern, we have separated the data (Entities) from the methods to manage it (Repositories).

+
+
+

To use a repository you only need to:

+
+
+
    +
  • +

    Import it in the module as follows:

    +
    +
    +
    @Module({
    +  imports: [TypeOrmModule.forFeature([Employee])],
    +})
    +
    +
    +
    + + + + + +
    + + +if you generate the entities with the devon4node schematic, this step is not necessary, devon4node schematic will do it for you. +
    +
    +
  • +
  • +

    Inject the repository as dependency in your service:

    +
    +
    +
    constructor(@InjectRepository(Employee) employeeRepository: Repository<Employee>) {}
    +
    +
    +
  • +
+
+
+

You can see more details in the NestJS database and NestJS TypeORM documentation pages.

+
+ +
+
+

Serializer

+
+

Serialization is the process of translating data structures or object state into a format that can be transmitted across network and reconstructed later.

+
+
+

NestJS by default serialize all data to JSON (JSON.stringify). Sometimes this is not enough. In some situations you need to exclude some property (e.g password). Instead doing it manually, devon4node provides an interceptor (ClassSerializerInterceptor) that will do it for you. You only need to return a class instance as always and the interceptor will transform those class to the expected data.

+
+
+

The ClassSerializerInterceptor takes the class-transformer decorators in order to know how to transform the class and then send the result to the client.

+
+
+

Some of class-transformer decorators are:

+
+
+
    +
  • +

    Expose

    +
  • +
  • +

    Exclude

    +
  • +
  • +

    Type

    +
  • +
  • +

    Transform

    +
  • +
+
+
+

And methods to transform data:

+
+
+
    +
  • +

    plainToClass

    +
  • +
  • +

    plainToClassFromExist

    +
  • +
  • +

    classToPlain

    +
  • +
  • +

    classToClass

    +
  • +
  • +

    serialize

    +
  • +
  • +

    deserialize

    +
  • +
  • +

    deserializeArray

    +
  • +
+
+
+

See the class-transformer page for more information.

+
+
+

See NestJS serialization page for more information about ClassSerializerInterceptor.

+
+ +
+
+

Validation

+
+

To be sure that your application will works well, you must validate any input data. devon4node by default provides a ValidationPipe. This ValidationPipe is the responsible of validate the request input and, if the input do not pass the validation process, it returns a 400 Bad Request error.

+
+
+
+

Defining Validators

+
+

The ValidationPipe needs to know how to validate the input. For that purpose we use the class-validator package. This package allows you to define the validation of a class by using decorators.

+
+
+

For example:

+
+
+
+
export class Coffee {
+  @IsDefined()
+  @IsString()
+  @MaxLength(255)
+  name: string;
+
+  @IsDefined()
+  @IsString()
+  @MaxLength(25)
+  type: string;
+
+  @IsDefined()
+  @IsNumber()
+  quantity: number;
+}
+
+
+
+

As you can see in the previous example, we used some decorators in order to define the validators for every property of the Coffee class. You can find all decorators in the class-validator github repository.

+
+
+

Now, when you want to receive a Coffee as input in some endpoint, it will execute the validations before executing the handler function.

+
+
+ + + + + +
+ + +In order to be able to use the class-validator package, you must use classes instead of interfaces. As you know interfaces disappear at compiling time, and class-validator need to know the metadata of the properties in order to be able to validate. +
+
+
+ + + + + +
+ + +The ValidationPipe only works if you put a specific type in the handler definition. For example, if you define a handler like getCoffee(@Body() coffee: any): Coffee {} the ValidationPipe will not do anything. You must specify the type of the input: getCoffee(@Body() coffee: Coffee): Coffee {} +
+
+ +
+
+

Logger

+
+

When you create a new devon4node application, it already has a logger: src/app/shared/logger/winston.logger.ts. This logger provide the methods log, error and warn. All of those methods will write a log message, but with a different log level.

+
+
+

The winston logger has two transports: one to log everything inside the file logs/general.log and the other to log only the error logs inside the file logs/error.log. In addition, it uses the default NestJS logger in order to show the logs in the console.

+
+
+

As you can see it is a simple example about how to use logger in a devon4node application. It will be update to a complex one in the next versions.

+
+
+
+

How to use logger

+
+

In order to use the logger you only need to inject the logger as a dependency:

+
+
+
+
constructor(logger: WinstonLogger){}
+
+
+
+

and then use it

+
+
+
+
async getAll() {
+  this.service.getAll();
+  this.logger.log('Returning all data');
+}
+
+
+ +
+
+

Mailer Module

+
+

This module enables you to send emails in devon4node. It also provides a template engine using Handlebars.

+
+
+

It is a NestJS module that inject into your application a MailerService, which is the responsible to send the emails using the nodemailer library.

+
+
+
+

Installing

+
+

Execute the following command in a devon4node project:

+
+
+
+
yarn add @devon4node/mailer
+
+
+
+
+

Configuring

+
+

To configure the mailer module, you only need to import it in your application into another module. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot(),
+  ],
+  ...
+})
+
+
+
+

Your must pass the configuration using the forRoot or forRootAsync methods.

+
+
+
+

forRoot()

+
+

The forRoot method receives an MailerModuleOptions object as parameter. It configures the MailerModule using the input MailerModuleOptions object.

+
+
+

The structure of MailerModuleOptions is:

+
+
+
+
{
+  hbsOptions?: {
+    templatesDir: string;
+    extension?: string;
+    partialsDir?: string;
+    helpers?: IHelperFunction[];
+    compilerOptions?: ICompileOptions;
+  },
+  mailOptions?: nodemailerSmtpTransportOptions;
+  emailFrom: string;
+}
+
+
+
+

Here, you need to specify the Handlebars compile options, the nodemailer transport options and the email address which will send the emails. +Then, you need to call to forRoot function in the module imports. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot({
+      mailOptions: {
+        host: 'localhost',
+        port: 1025,
+        secure: false,
+        tls: {
+          rejectUnauthorized: false,
+        },
+      },
+      emailFrom: 'noreply@capgemini.com',
+      hbsOptions: {
+        templatesDir: join(__dirname, '../..', 'templates/views'),
+        partialsDir: join(__dirname, '../..', 'templates/partials'),
+        helpers: [{
+          name: 'fullname',
+          func: person => `${person.name} ${person.surname}`,s
+        }],
+      },
+    }),
+  ...
+})
+
+
+
+
+

forRootAsync()

+
+

The method forRootAsync enables you to get the mailer configuration in a asynchronous way. It is useful when you need to get the configuration using, for example, a service (e.g. ConfigurationService).

+
+
+

Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRootAsync({
+      imports: [ConfigurationModule],
+      useFactory: (config: ConfigurationService) => {
+        return config.mailerConfig;
+      },
+      inject: [ConfigurationService],
+    }),
+  ...
+})
+
+
+
+

In this example, we use the ConfigurationService in order to get the MailerModuleOptions (the same as forRoot)

+
+
+
+

Usage

+
+

In order to use, you only need to inject using the dependency injection the MailerService.

+
+
+

Example:

+
+
+
+
@Injectable()
+export class CatsService {
+  constructor(private readonly mailer: MailerService) {}
+}
+
+
+
+

Then, you only need to use the methods provided by the MailerService in your service. Take into account that you can inject it in every place that support NestJS dependency injection.

+
+
+
+

MailerService methods

+ +
+
+

== sendPlainMail

+
+

The method sendPlainMail receive a string sends a email.

+
+
+

The method signatures are:

+
+
+
+
sendPlainMail(emailOptions: SendMailOptions): Promise<SentMessageInfo>;
+sendPlainMail(to: string, subject: string, mail: string): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendPlainMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+});
+this.mailer.sendPlainMail('example@example.com', 'This is a subject', '<h1>Hello world</h1>');
+
+
+
+
+

== sendTemplateMail

+
+

The method sendTemplateMail sends a email based on a Handlebars template. The templates are registered using the templatesDir option or using the addTemplate method. +The template name is the name of the template (without extension) or the first parameter of the method addTemplate.

+
+
+

The method signatures are:

+
+
+
+
sendTemplateMail(emailOptions: SendMailOptions, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+sendTemplateMail(to: string, subject: string, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendTemplateMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+}, 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+this.mailer.sendTemplateMail('example@example.com', 'This is a subject', 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+
+
+
+
+

== addTemplate

+
+

Adds a new template to the MailerService.

+
+
+

Method signature:

+
+
+
+
addTemplate(name: string, template: string, options?: CompileOptions): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.addTemplate('newTemplate', '<html><head></head><body>{{>partial1}}</body></html>')
+
+
+
+
+

== registerPartial

+
+

Register a new partial in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerPartial(name: string, partial: Handlebars.Template<any>): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerPartial('partial', '<h1>Hello World</h1>')
+
+
+
+
+

== registerHelper

+
+

Register a new helper in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerHelper(name: string, helper: Handlebars.HelperDelegate): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerHelper('fullname', person => `${person.name} ${person.surname}`)
+
+
+
+
+

Handlebars templates

+
+

As mentioned above, this module allow you to use Handlebars as template engine, but it is optional. If you do not need the Handlebars, you just need to keep the hbsOptions undefined.

+
+
+

In order to get the templates form the file system, you can specify the template folder, the partials folder and the helpers. +At the moment of module initialization, it will read the content of the template folder, and will register every file with the name (without extension) and the content as Handlebars template. It will do the same for the partials.

+
+
+

You can specify the extension of template files using the extension parameter. The default value is .handlebars

+
+
+
+

Local development

+
+

If you want to work with this module but you don’t have a SMTP server, you can use the streamTransport. Example:

+
+
+
+
{
+  mailOptions: {
+    streamTransport: true,
+    newline: 'windows',
+  },
+  emailFrom: ...
+  hbsOptions: ...
+}
+
+
+
+

Then, you need to get the sendPlainMail or sendTemplateMail result, and print the email to the standard output (STDOUT). Example:

+
+
+
+
const mail = await this.mailer.sendTemplateMail(...);
+
+mail.message.pipe(process.stdout);
+
+
+ +
+
+

Importing your ESLint reports into SonarQube

+
+

This guide covers the import of ESLint reports into SonarQube instances in CI environments, as this is the recommended way of using ESLint and SonarQube for devon4node projects. The prerequisites for this process are a CI environment, preferably a Production Line instance, and the ESLint CLI, which is already included when generating a new devon4node project.

+
+
+
+

Configuring the ESLint analysis

+
+

You can configure the ESLint analysis parameters in the .eslintrc.js file inside the top-level directory of your project. If you created your node project using the devon4node application schematic, this file will already exist. If you want to make further adjustments to it, have a look at the ESLint documentation.

+
+
+

The ESLint analysis script lint is already configured in the scripts part of your package.json. Simply add -f json > report.json, so that the output of the analysis is saved in a .json file. Additional information to customization options for the ESLint CLI can be found here.

+
+
+

To run the analysis, execute the script with npm run lint inside the base directory of your project.

+
+
+
+

Configuring SonarQube

+
+

If you haven’t already generated your CICD-related files, follow the tutorial on the devon4node schematic of our CICDGEN project, as you will need a Jenkinsfile configured in your project to proceed.

+
+
+

Inside the script for the SonarQube code analysis in your Jenkinsfile, add the parameter -Dsonar.eslint.reportPaths=report.json. Now, whenever a SonarQube analysis is triggered by your CI environment, the generated report will be loaded into your SonarQube instance. +To avoid duplicated issues, you can associate an empty TypeScript quality profile with your project in its server configurations.

+
+
+
+
+
+

devon4node applications

+
+ +
+

devon4node Samples

+
+

In the folder /samples, you can find some devon4node examples that could be useful for you in order to understand better the framework.

+
+
+

The samples are:

+
+
+ +
+
+

Also, we have another realistic example in the My Thai Star repository. This example is the implementation of My Thai Star backend, which is compatible with the frontend made with Angular. To do that, this node implementation exposes the same API as Java backend. Take care with this example, as we need to follow the Java API, some components do not follow the devon4node patterns and code conventions.

+
+
+
+

Todo example

+
+

This example is the backend part of an TO-DO application. It exposes and API where you can create, read, update and delete a TO-DO list.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:3000/v1/todo/todos. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:3000/v1/api.

+
+
+

Also, in this example we show you how to control the access to you application by implementing an authentication mechanism using JWT and rol based strategy. In order to access to the list of todos (http://localhost:3000/v1/todo/todos), first you need to call to POST http://localhost:3000/v1/auth/login and in the body you need to send the user information:

+
+
+
+
{
+  "username": "user",
+  "password": "password"
+}
+
+
+
+

It will return a JWT token for the user user. The rol of this user is USER, so you can only access to the methods GET, POST and DELETE of the endpoint http://localhost:3000/v1/todo/todos. If you login with the user admin/admin, you will be able to access to the methods UPDATE and PATCH.

+
+
+
+

Employee example

+
+

This is an example of employee management application. With the application you can create, read, update and delete employees.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:8081/v1/employee/employees. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:8081/v1/api.

+
+
+

This is a simple example without authentication. With this example you can learn how to work with database migrations. You can find them in the folder /src/migrations. The TypeORM is configured in order to execute the migrations every time that you start this application at ormconfig.json with the following flag:

+
+
+
+
"migrationsRun": true
+
+
+
+

You can also execute the migration manually by typing the command devon4node db migration:run, or revert executing devon4node db migration:revert. Take into account that the database that this application is using is an in-memory sqlite, so every time that you stop the application all data is lost.

+
+
+
+

Components example

+
+

This example allow you to understand better the execution order of the components of a devon4node application (guards, pipes, interceptors, filters, middleware).

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

In order to see the execution order, you can call to http://localhost:3000/v1. It will show you the execution order of all components except the filters. If you want to know the execution order while a filter is applied, call to the endpoint with the following queries: ?hello=error, ?hello=controller, ?hello=global.

+
+ +
+
+

Create the employee sample step by step

+ +
+
+

Application requisites

+
+

The employee application needs:

+
+
+
    +
  • +

    A configuration module

    +
  • +
  • +

    A SQLite in memory database

    +
  • +
  • +

    Security: CORS

    +
  • +
  • +

    Swagger support

    +
  • +
  • +

    Authentication using JWT

    +
  • +
  • +

    CRUD for manage employees. The employees will have the following properties:

    +
    +
      +
    • +

      name

      +
    • +
    • +

      surname

      +
    • +
    • +

      email

      +
    • +
    +
    +
  • +
+
+
+
+

Create the application

+
+
    +
  1. +

    Install Nest CLI

    +
    +

    Execute the command yarn global add @nestjs/cli

    +
    +
  2. +
  3. +

    Install devon4node schematics

    +
  4. +
  5. +

    Execute the command yarn global add @devon4node/schematics

    +
  6. +
  7. +

    Create the new application

    +
    +

    Execute the command nest g -c @devon4node/schematics application employee

    +
    +
  8. +
  9. +

    Then, we need to add some components, go inside the project folder and execute the following commands:

    +
    +

    Go inside project folder: cd employee.

    +
    +
    +

    Config module: nest g -c @devon4node/schematics config-module.

    +
    +
    +

    TypeORM database, choose sqlite DB when asked nest g -c @devon4node/schematics typeorm.

    +
    +
    +

    Add security: nest g -c @devon4node/schematics security.

    +
    +
    +

    Swagger module: nest g -c @devon4node/schematics swagger.

    +
    +
    +

    Auth-jwt authentication: nest g -c @devon4node/schematics auth-jwt.

    +
    +
    +

    Add an application module: nest g -c @devon4node/schematics module employee.

    +
    +
    +

    Add CRUD component: nest g -c @devon4node/schematics crud employee/employee.

    +
    +
    +

    With this, you will generate the following files:

    +
    +
    +
    +
    /employee/.prettierrc
    +/employee/nest-cli.json
    +/employee/package.json
    +/employee/README.md
    +/employee/tsconfig.build.json
    +/employee/tsconfig.json
    +/employee/tslint.json
    +/employee/src/main.ts
    +/employee/test/app.e2e-spec.ts
    +/employee/test/jest-e2e.json
    +/employee/src/app/app.controller.spec.ts
    +/employee/src/app/app.controller.ts
    +/employee/src/app/app.module.ts
    +/employee/src/app/app.service.ts
    +/employee/src/app/core/core.module.ts
    +/employee/src/app/shared/logger/winston.logger.ts
    +/employee/src/app/core/configuration/configuration.module.ts
    +/employee/src/app/core/configuration/model/index.ts
    +/employee/src/app/core/configuration/model/types.ts
    +/employee/src/app/core/configuration/services/configuration.service.spec.ts
    +/employee/src/app/core/configuration/services/configuration.service.ts
    +/employee/src/app/core/configuration/services/index.ts
    +/employee/src/config/default.ts
    +/employee/src/config/develop.ts
    +/employee/src/config/production.ts
    +/employee/src/config/test.ts
    +/employee/src/config/uat.ts
    +/employee/docker-compose.yml
    +/employee/ormconfig.json
    +/employee/src/app/shared/model/entities/base-entity.entity.ts
    +/employee/src/app/core/auth/auth.module.ts
    +/employee/src/app/core/auth/controllers/auth.controller.spec.ts
    +/employee/src/app/core/auth/controllers/auth.controller.ts
    +/employee/src/app/core/auth/controllers/index.ts
    +/employee/src/app/core/auth/decorators/index.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.spec.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.ts
    +/employee/src/app/core/auth/guards/index.ts
    +/employee/src/app/core/auth/guards/roles.guard.spec.ts
    +/employee/src/app/core/auth/guards/roles.guard.ts
    +/employee/src/app/core/auth/model/index.ts
    +/employee/src/app/core/auth/model/roles.enum.ts
    +/employee/src/app/core/auth/model/user-request.interface.ts
    +/employee/src/app/core/auth/services/auth.service.spec.ts
    +/employee/src/app/core/auth/services/auth.service.ts
    +/employee/src/app/core/auth/services/index.ts
    +/employee/src/app/core/auth/strategies/index.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.spec.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.ts
    +/employee/src/app/core/user/user.module.ts
    +/employee/src/app/core/user/model/index.ts
    +/employee/src/app/core/user/model/dto/user-payload.dto.ts
    +/employee/src/app/core/user/model/entities/user.entity.ts
    +/employee/src/app/core/user/services/index.ts
    +/employee/src/app/core/user/services/user.service.spec.ts
    +/employee/src/app/core/user/services/user.service.ts
    +/employee/test/auth/auth.service.mock.ts
    +/employee/test/user/user.repository.mock.ts
    +/employee/src/app/employee/employee.module.ts
    +/employee/src/app/employee/model/entities/employee.entity.ts
    +/employee/src/app/employee/model/index.ts
    +/employee/src/app/employee/controllers/employee.crud.controller.ts
    +/employee/src/app/employee/services/employee.crud.service.ts
    +/employee/src/app/employee/services/index.ts
    +/employee/src/app/employee/controllers/index.ts
    +
    +
    +
  10. +
  11. +

    Open the VSCode

    +
    +

    Execute the commands:

    +
    +
    +
    +
    yarn install
    +code .
    +
    +
    +
  12. +
  13. +

    Fill in the entity: src/app/employee/model/entities/employee.entity.ts

    +
    +
      +
    1. +

      Add the columns

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    2. +
    3. +

      Add the validations

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    4. +
    5. +

      Add the transformations

      +
      +

      In this specific case, we will not transform any property, but you can see an example in the src/app/shared/model/entities/base-entity.entity.ts file.

      +
      +
      +
      +
      export abstract class BaseEntity {
      +  @PrimaryGeneratedColumn('increment')
      +  id!: number;
      +
      +  @VersionColumn({ default: 1 })
      +  @Exclude({ toPlainOnly: true })
      +  version!: number;
      +
      +  @CreateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  createdAt!: string;
      +
      +  @UpdateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  updatedAt!: string;
      +}
      +
      +
      +
    6. +
    7. +

      Add swagger metadata

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    8. +
    +
    +
  14. +
  15. +

    Add swagger metadata to src/app/employee/controllers/employee.crud.controller.ts

    +
    +
    +
    @ApiTags('employee')
    +
    +
    +
  16. +
  17. +

    Generate database migrations

    +
    +
      +
    1. +

      Build the application: yarn build

      +
    2. +
    3. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node

      +
    4. +
    5. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +generate migrations +
      +
      +
      +

      The output will be something similar to:

      +
      +
      +
      +
      export class CreateTables1572480273012 implements MigrationInterface {
      +  name = 'CreateTables1572480273012';
      +
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `CREATE TABLE "user" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "username" varchar(255) NOT NULL, "password" varchar(255) NOT NULL, "role" integer NOT NULL DEFAULT (0))`,
      +      undefined,
      +    );
      +    await queryRunner.query(
      +      `CREATE TABLE "employee" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "name" varchar(255), "surname" varchar(255), "email" varchar(255))`,
      +      undefined,
      +    );
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DROP TABLE "employee"`, undefined);
      +    await queryRunner.query(`DROP TABLE "user"`, undefined);
      +  }
      +}
      +
      +
      +
      +

      The number in the name is a timestamp, so may change in your application.

      +
      +
    6. +
    7. +

      Create a migration to insert data:`yarn run typeorm migration:generate -n InsertData`

      +
      +
      +insert data +
      +
      +
      +

      and fill in with the following code:

      +
      +
      +
      +
      export class InsertData1572480830290 implements MigrationInterface {
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(1, 'Santiago', 'Fowler', 'Santiago.Fowler@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(2, 'Clinton', 'Thornton', 'Clinton.Thornton@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(3, 'Lisa', 'Rodriquez', 'Lisa.Rodriquez@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(4, 'Calvin', 'Becker', 'Calvin.Becker@example.com');`,
      +    );
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      1,
      +      'user',
      +      await hash('password', await genSalt(12)),
      +      roles.USER,
      +    ]);
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      2,
      +      'admin',
      +      await hash('admin', await genSalt(12)),
      +      roles.ADMIN,
      +    ]);
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DELETE FROM EMPLOYEE`);
      +    await queryRunner.query(`DELETE FROM USER`);
      +  }
      +}
      +
      +
      +
    8. +
    +
    +
  18. +
  19. +

    Start the application: yarn start:dev

    +
    +
    +start app +
    +
    +
  20. +
  21. +

    Check the swagger endpoint: http://localhost:3000/v1/api

    +
    +
    +swagger +
    +
    +
  22. +
  23. +

    Make petitions to the employee CRUD: http://localhost:3000/v1/employee/employees

    +
    +
    +employees +
    +
    +
  24. +
  25. +

    Write the tests

    +
    +

    As we do not create any method, only add some properties to the entity, all application must be tested by the autogenerated code. As we add some modules, you need to uncomment some lines in the src/app/core/configuration/services/configuration.service.spec.ts:

    +
    +
    +
    +
    describe('ConfigurationService', () => {
    +  const configService: ConfigurationService = new ConfigurationService();
    +
    +  it('should return the values of test config file', () => {
    +    expect(configService.isDev).toStrictEqual(def.isDev);
    +    expect(configService.host).toStrictEqual(def.host);
    +    expect(configService.port).toStrictEqual(def.port);
    +    expect(configService.clientUrl).toStrictEqual(def.clientUrl);
    +    expect(configService.globalPrefix).toStrictEqual(def.globalPrefix);
    +    // Remove comments if you add those modules
    +    expect(configService.database).toStrictEqual(def.database);
    +    expect(configService.swaggerConfig).toStrictEqual(def.swaggerConfig);
    +    expect(configService.jwtConfig).toStrictEqual(def.jwtConfig);
    +    // expect(configService.mailerConfig).toStrictEqual(def.mailerConfig);
    +  });
    +  it('should take the value of environment varible if defined', () => {
    +    process.env.isDev = 'true';
    +    process.env.host = 'notlocalhost';
    +    process.env.port = '123456';
    +    process.env.clientUrl = 'http://theclienturl.net';
    +    process.env.globalPrefix = 'v2';
    +    process.env.swaggerConfig = JSON.stringify({
    +      swaggerTitle: 'Test Application',
    +    });
    +    process.env.database = JSON.stringify({
    +      type: 'oracle',
    +      cli: { entitiesDir: 'src/notentitiesdir' },
    +    });
    +    process.env.jwtConfig = JSON.stringify({ secret: 'NOTSECRET' });
    +    // process.env.mailerConfig = JSON.stringify({ mailOptions: { host: 'notlocalhost' }});
    +
    +    expect(configService.isDev).toBe(true);
    +    expect(configService.host).toBe('notlocalhost');
    +    expect(configService.port).toBe(123456);
    +    expect(configService.clientUrl).toBe('http://theclienturl.net');
    +    expect(configService.globalPrefix).toBe('v2');
    +    const database: any = { ...def.database, type: 'oracle' };
    +    database.cli.entitiesDir = 'src/notentitiesdir';
    +    expect(configService.database).toStrictEqual(database);
    +    expect(configService.swaggerConfig).toStrictEqual({
    +      ...def.swaggerConfig,
    +      swaggerTitle: 'Test Application',
    +    });
    +    expect(configService.jwtConfig).toStrictEqual({
    +      ...def.jwtConfig,
    +      secret: 'NOTSECRET',
    +    });
    +    // const mail: any = { ...def.mailerConfig };
    +    // mail.mailOptions.host = 'notlocalhost';
    +    // expect(configService.mailerConfig).toStrictEqual(mail);
    +
    +    process.env.isDev = undefined;
    +    process.env.host = undefined;
    +    process.env.port = undefined;
    +    process.env.clientUrl = undefined;
    +    process.env.globalPrefix = undefined;
    +    process.env.database = undefined;
    +    process.env.swaggerConfig = undefined;
    +    process.env.jwtConfig = undefined;
    +    // process.env.mailerConfig = undefined;
    +  });
    +});
    +
    +
    +
    +

    And the output should be:

    +
    +
    +
    +test +
    +
    +
  26. +
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/samples-step-by-step.html b/docs/devonfw.github.io/1.0/devon4node.wiki/samples-step-by-step.html new file mode 100644 index 00000000..e028d572 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/samples-step-by-step.html @@ -0,0 +1,780 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Create the employee sample step by step

+
+ +
+
+
+

Application requisites

+
+
+

The employee application needs:

+
+
+
    +
  • +

    A configuration module

    +
  • +
  • +

    A SQLite in memory database

    +
  • +
  • +

    Security: CORS

    +
  • +
  • +

    Swagger support

    +
  • +
  • +

    Authentication using JWT

    +
  • +
  • +

    CRUD for manage employees. The employees will have the following properties:

    +
    +
      +
    • +

      name

      +
    • +
    • +

      surname

      +
    • +
    • +

      email

      +
    • +
    +
    +
  • +
+
+
+
+
+

Create the application

+
+
+
    +
  1. +

    Install Nest CLI

    +
    +

    Execute the command yarn global add @nestjs/cli

    +
    +
  2. +
  3. +

    Install devon4node schematics

    +
  4. +
  5. +

    Execute the command yarn global add @devon4node/schematics

    +
  6. +
  7. +

    Create the new application

    +
    +

    Execute the command nest g -c @devon4node/schematics application employee

    +
    +
  8. +
  9. +

    Then, we need to add some components, go inside the project folder and execute the following commands:

    +
    +

    Go inside project folder: cd employee.

    +
    +
    +

    Config module: nest g -c @devon4node/schematics config-module.

    +
    +
    +

    TypeORM database, choose sqlite DB when asked nest g -c @devon4node/schematics typeorm.

    +
    +
    +

    Add security: nest g -c @devon4node/schematics security.

    +
    +
    +

    Swagger module: nest g -c @devon4node/schematics swagger.

    +
    +
    +

    Auth-jwt authentication: nest g -c @devon4node/schematics auth-jwt.

    +
    +
    +

    Add an application module: nest g -c @devon4node/schematics module employee.

    +
    +
    +

    Add CRUD component: nest g -c @devon4node/schematics crud employee/employee.

    +
    +
    +

    With this, you will generate the following files:

    +
    +
    +
    +
    /employee/.prettierrc
    +/employee/nest-cli.json
    +/employee/package.json
    +/employee/README.md
    +/employee/tsconfig.build.json
    +/employee/tsconfig.json
    +/employee/tslint.json
    +/employee/src/main.ts
    +/employee/test/app.e2e-spec.ts
    +/employee/test/jest-e2e.json
    +/employee/src/app/app.controller.spec.ts
    +/employee/src/app/app.controller.ts
    +/employee/src/app/app.module.ts
    +/employee/src/app/app.service.ts
    +/employee/src/app/core/core.module.ts
    +/employee/src/app/shared/logger/winston.logger.ts
    +/employee/src/app/core/configuration/configuration.module.ts
    +/employee/src/app/core/configuration/model/index.ts
    +/employee/src/app/core/configuration/model/types.ts
    +/employee/src/app/core/configuration/services/configuration.service.spec.ts
    +/employee/src/app/core/configuration/services/configuration.service.ts
    +/employee/src/app/core/configuration/services/index.ts
    +/employee/src/config/default.ts
    +/employee/src/config/develop.ts
    +/employee/src/config/production.ts
    +/employee/src/config/test.ts
    +/employee/src/config/uat.ts
    +/employee/docker-compose.yml
    +/employee/ormconfig.json
    +/employee/src/app/shared/model/entities/base-entity.entity.ts
    +/employee/src/app/core/auth/auth.module.ts
    +/employee/src/app/core/auth/controllers/auth.controller.spec.ts
    +/employee/src/app/core/auth/controllers/auth.controller.ts
    +/employee/src/app/core/auth/controllers/index.ts
    +/employee/src/app/core/auth/decorators/index.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.spec.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.ts
    +/employee/src/app/core/auth/guards/index.ts
    +/employee/src/app/core/auth/guards/roles.guard.spec.ts
    +/employee/src/app/core/auth/guards/roles.guard.ts
    +/employee/src/app/core/auth/model/index.ts
    +/employee/src/app/core/auth/model/roles.enum.ts
    +/employee/src/app/core/auth/model/user-request.interface.ts
    +/employee/src/app/core/auth/services/auth.service.spec.ts
    +/employee/src/app/core/auth/services/auth.service.ts
    +/employee/src/app/core/auth/services/index.ts
    +/employee/src/app/core/auth/strategies/index.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.spec.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.ts
    +/employee/src/app/core/user/user.module.ts
    +/employee/src/app/core/user/model/index.ts
    +/employee/src/app/core/user/model/dto/user-payload.dto.ts
    +/employee/src/app/core/user/model/entities/user.entity.ts
    +/employee/src/app/core/user/services/index.ts
    +/employee/src/app/core/user/services/user.service.spec.ts
    +/employee/src/app/core/user/services/user.service.ts
    +/employee/test/auth/auth.service.mock.ts
    +/employee/test/user/user.repository.mock.ts
    +/employee/src/app/employee/employee.module.ts
    +/employee/src/app/employee/model/entities/employee.entity.ts
    +/employee/src/app/employee/model/index.ts
    +/employee/src/app/employee/controllers/employee.crud.controller.ts
    +/employee/src/app/employee/services/employee.crud.service.ts
    +/employee/src/app/employee/services/index.ts
    +/employee/src/app/employee/controllers/index.ts
    +
    +
    +
  10. +
  11. +

    Open the VSCode

    +
    +

    Execute the commands:

    +
    +
    +
    +
    yarn install
    +code .
    +
    +
    +
  12. +
  13. +

    Fill in the entity: src/app/employee/model/entities/employee.entity.ts

    +
    +
      +
    1. +

      Add the columns

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    2. +
    3. +

      Add the validations

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    4. +
    5. +

      Add the transformations

      +
      +

      In this specific case, we will not transform any property, but you can see an example in the src/app/shared/model/entities/base-entity.entity.ts file.

      +
      +
      +
      +
      export abstract class BaseEntity {
      +  @PrimaryGeneratedColumn('increment')
      +  id!: number;
      +
      +  @VersionColumn({ default: 1 })
      +  @Exclude({ toPlainOnly: true })
      +  version!: number;
      +
      +  @CreateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  createdAt!: string;
      +
      +  @UpdateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  updatedAt!: string;
      +}
      +
      +
      +
    6. +
    7. +

      Add swagger metadata

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    8. +
    +
    +
  14. +
  15. +

    Add swagger metadata to src/app/employee/controllers/employee.crud.controller.ts

    +
    +
    +
    @ApiTags('employee')
    +
    +
    +
  16. +
  17. +

    Generate database migrations

    +
    +
      +
    1. +

      Build the application: yarn build

      +
    2. +
    3. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node

      +
    4. +
    5. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +generate migrations +
      +
      +
      +

      The output will be something similar to:

      +
      +
      +
      +
      export class CreateTables1572480273012 implements MigrationInterface {
      +  name = 'CreateTables1572480273012';
      +
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `CREATE TABLE "user" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "username" varchar(255) NOT NULL, "password" varchar(255) NOT NULL, "role" integer NOT NULL DEFAULT (0))`,
      +      undefined,
      +    );
      +    await queryRunner.query(
      +      `CREATE TABLE "employee" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "name" varchar(255), "surname" varchar(255), "email" varchar(255))`,
      +      undefined,
      +    );
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DROP TABLE "employee"`, undefined);
      +    await queryRunner.query(`DROP TABLE "user"`, undefined);
      +  }
      +}
      +
      +
      +
      +

      The number in the name is a timestamp, so may change in your application.

      +
      +
    6. +
    7. +

      Create a migration to insert data:`yarn run typeorm migration:generate -n InsertData`

      +
      +
      +insert data +
      +
      +
      +

      and fill in with the following code:

      +
      +
      +
      +
      export class InsertData1572480830290 implements MigrationInterface {
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(1, 'Santiago', 'Fowler', 'Santiago.Fowler@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(2, 'Clinton', 'Thornton', 'Clinton.Thornton@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(3, 'Lisa', 'Rodriquez', 'Lisa.Rodriquez@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(4, 'Calvin', 'Becker', 'Calvin.Becker@example.com');`,
      +    );
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      1,
      +      'user',
      +      await hash('password', await genSalt(12)),
      +      roles.USER,
      +    ]);
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      2,
      +      'admin',
      +      await hash('admin', await genSalt(12)),
      +      roles.ADMIN,
      +    ]);
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DELETE FROM EMPLOYEE`);
      +    await queryRunner.query(`DELETE FROM USER`);
      +  }
      +}
      +
      +
      +
    8. +
    +
    +
  18. +
  19. +

    Start the application: yarn start:dev

    +
    +
    +start app +
    +
    +
  20. +
  21. +

    Check the swagger endpoint: http://localhost:3000/v1/api

    +
    +
    +swagger +
    +
    +
  22. +
  23. +

    Make petitions to the employee CRUD: http://localhost:3000/v1/employee/employees

    +
    +
    +employees +
    +
    +
  24. +
  25. +

    Write the tests

    +
    +

    As we do not create any method, only add some properties to the entity, all application must be tested by the autogenerated code. As we add some modules, you need to uncomment some lines in the src/app/core/configuration/services/configuration.service.spec.ts:

    +
    +
    +
    +
    describe('ConfigurationService', () => {
    +  const configService: ConfigurationService = new ConfigurationService();
    +
    +  it('should return the values of test config file', () => {
    +    expect(configService.isDev).toStrictEqual(def.isDev);
    +    expect(configService.host).toStrictEqual(def.host);
    +    expect(configService.port).toStrictEqual(def.port);
    +    expect(configService.clientUrl).toStrictEqual(def.clientUrl);
    +    expect(configService.globalPrefix).toStrictEqual(def.globalPrefix);
    +    // Remove comments if you add those modules
    +    expect(configService.database).toStrictEqual(def.database);
    +    expect(configService.swaggerConfig).toStrictEqual(def.swaggerConfig);
    +    expect(configService.jwtConfig).toStrictEqual(def.jwtConfig);
    +    // expect(configService.mailerConfig).toStrictEqual(def.mailerConfig);
    +  });
    +  it('should take the value of environment varible if defined', () => {
    +    process.env.isDev = 'true';
    +    process.env.host = 'notlocalhost';
    +    process.env.port = '123456';
    +    process.env.clientUrl = 'http://theclienturl.net';
    +    process.env.globalPrefix = 'v2';
    +    process.env.swaggerConfig = JSON.stringify({
    +      swaggerTitle: 'Test Application',
    +    });
    +    process.env.database = JSON.stringify({
    +      type: 'oracle',
    +      cli: { entitiesDir: 'src/notentitiesdir' },
    +    });
    +    process.env.jwtConfig = JSON.stringify({ secret: 'NOTSECRET' });
    +    // process.env.mailerConfig = JSON.stringify({ mailOptions: { host: 'notlocalhost' }});
    +
    +    expect(configService.isDev).toBe(true);
    +    expect(configService.host).toBe('notlocalhost');
    +    expect(configService.port).toBe(123456);
    +    expect(configService.clientUrl).toBe('http://theclienturl.net');
    +    expect(configService.globalPrefix).toBe('v2');
    +    const database: any = { ...def.database, type: 'oracle' };
    +    database.cli.entitiesDir = 'src/notentitiesdir';
    +    expect(configService.database).toStrictEqual(database);
    +    expect(configService.swaggerConfig).toStrictEqual({
    +      ...def.swaggerConfig,
    +      swaggerTitle: 'Test Application',
    +    });
    +    expect(configService.jwtConfig).toStrictEqual({
    +      ...def.jwtConfig,
    +      secret: 'NOTSECRET',
    +    });
    +    // const mail: any = { ...def.mailerConfig };
    +    // mail.mailOptions.host = 'notlocalhost';
    +    // expect(configService.mailerConfig).toStrictEqual(mail);
    +
    +    process.env.isDev = undefined;
    +    process.env.host = undefined;
    +    process.env.port = undefined;
    +    process.env.clientUrl = undefined;
    +    process.env.globalPrefix = undefined;
    +    process.env.database = undefined;
    +    process.env.swaggerConfig = undefined;
    +    process.env.jwtConfig = undefined;
    +    // process.env.mailerConfig = undefined;
    +  });
    +});
    +
    +
    +
    +

    And the output should be:

    +
    +
    +
    +test +
    +
    +
  26. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/devon4node.wiki/samples.html b/docs/devonfw.github.io/1.0/devon4node.wiki/samples.html new file mode 100644 index 00000000..fabde443 --- /dev/null +++ b/docs/devonfw.github.io/1.0/devon4node.wiki/samples.html @@ -0,0 +1,386 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4node Samples

+
+
+

In the folder /samples, you can find some devon4node examples that could be useful for you in order to understand better the framework.

+
+
+

The samples are:

+
+
+ +
+
+

Also, we have another realistic example in the My Thai Star repository. This example is the implementation of My Thai Star backend, which is compatible with the frontend made with Angular. To do that, this node implementation exposes the same API as Java backend. Take care with this example, as we need to follow the Java API, some components do not follow the devon4node patterns and code conventions.

+
+
+
+
+

Todo example

+
+
+

This example is the backend part of an TO-DO application. It exposes and API where you can create, read, update and delete a TO-DO list.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:3000/v1/todo/todos. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:3000/v1/api.

+
+
+

Also, in this example we show you how to control the access to you application by implementing an authentication mechanism using JWT and rol based strategy. In order to access to the list of todos (http://localhost:3000/v1/todo/todos), first you need to call to POST http://localhost:3000/v1/auth/login and in the body you need to send the user information:

+
+
+
+
{
+  "username": "user",
+  "password": "password"
+}
+
+
+
+

It will return a JWT token for the user user. The rol of this user is USER, so you can only access to the methods GET, POST and DELETE of the endpoint http://localhost:3000/v1/todo/todos. If you login with the user admin/admin, you will be able to access to the methods UPDATE and PATCH.

+
+
+
+
+

Employee example

+
+
+

This is an example of employee management application. With the application you can create, read, update and delete employees.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:8081/v1/employee/employees. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:8081/v1/api.

+
+
+

This is a simple example without authentication. With this example you can learn how to work with database migrations. You can find them in the folder /src/migrations. The TypeORM is configured in order to execute the migrations every time that you start this application at ormconfig.json with the following flag:

+
+
+
+
"migrationsRun": true
+
+
+
+

You can also execute the migration manually by typing the command devon4node db migration:run, or revert executing devon4node db migration:revert. Take into account that the database that this application is using is an in-memory sqlite, so every time that you stop the application all data is lost.

+
+
+
+
+

Components example

+
+
+

This example allow you to understand better the execution order of the components of a devon4node application (guards, pipes, interceptors, filters, middleware).

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

In order to see the execution order, you can call to http://localhost:3000/v1. It will show you the execution order of all components except the filters. If you want to know the execution order while a filter is applied, call to the endpoint with the following queries: ?hello=error, ?hello=controller, ?hello=global.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-blazegraph.html b/docs/devonfw.github.io/1.0/general/db/guide-blazegraph.html new file mode 100644 index 00000000..cc6685a2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-blazegraph.html @@ -0,0 +1,314 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Blazegraph

+
+
+

This section is the place to share experience for those who use Blazegraph as NoSQL database.

+
+
+
+
+

Java

+
+ +
+
+
+

Attention

+
+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+
+

Driver

+
+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-cassandra.html b/docs/devonfw.github.io/1.0/general/db/guide-cassandra.html new file mode 100644 index 00000000..a4fea173 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-cassandra.html @@ -0,0 +1,335 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Cassandra

+
+
+

This section is the place to share experience for those who use Cassandra as NoSQL database.

+
+
+
+
+

Java

+
+ +
+
+
+

Attention

+
+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+
+

Driver

+
+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+
+
+
+

Spring-Data

+
+
+

There is spring-data support available for cassandra via spring-data-cassandra.

+
+
+ + + + + +
+ + +Please note that some time ago we had feedback from projects that had issues with spring-data-cassandra and switched back to using the driver natively. We assume the issues are meanwhile resolved. TODO: collect more feedback and update this guide. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-couchdb.html b/docs/devonfw.github.io/1.0/general/db/guide-couchdb.html new file mode 100644 index 00000000..e69dbe6a --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-couchdb.html @@ -0,0 +1,314 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

CouchDB

+
+
+

This section is the place to share experience for those who use CouchDB as NoSQL database.

+
+
+
+
+

Java

+
+ +
+
+
+

Attention

+
+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+
+

Driver

+
+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-database.html b/docs/devonfw.github.io/1.0/general/db/guide-database.html new file mode 100644 index 00000000..3f0b7683 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-database.html @@ -0,0 +1,478 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Database

+
+
+

For your business application with devonfw you need to choose the right database. +In devonfw we are not biased for a particular product so you have the freedom of choice.

+
+
+
+
+

RDBMS

+
+
+

The classical and well-established form of a database is a relational database management system (RDBMS). +In devonfw we recommend to use an RDBMS unless you have specific need. +However, in case you have the need for big data, graph-data, BLOB focus, or schema-less dynamic data you can have a look at NoSQL options but be aware that these may be experimental and are not fully supported by devonfw.

+
+
+
+
+

Options

+
+
+

In devonfw we are not biased for a particular RDBMS so you have the freedom of choice. +Here are the most common options:

+
+
+
    +
  • +

    SAP Hana (high performance in-memory, many advanced features)

    +
  • +
  • +

    Oracle (most established, well featured for enterprise)

    +
  • +
  • +

    PostgreSQL (great open-source RDBMS)

    +
  • +
  • +

    MariaDB (true OSS successor of MySQL)

    +
  • +
  • +

    MS SQL Server (best choice for Microsoft and Windows dominated IT landscapes)

    +
  • +
+
+
+

Please click on any of the above choices and go to the according guide to find specific detials such as client/driver.

+
+
+
+
+

NoSQL

+
+
+

While not (yet) officially supported and recommendet there are also interesting NoSQL (Not Only SQL) databases that could be an interesting alternative. Please be aware that you will typically not be able to use JPA (and hibernate). Further before choosing a NoSQL database you should check the following aspects:

+
+
+
    +
  • +

    Is the database of choice reliable and mature enough for your project?

    +
  • +
  • +

    Can the operators of your product support the database of choice properly (provisioning, administration, backup, scaling & clustering, monitoring, etc.)?

    +
  • +
  • +

    Does the database of choice meet the requirements of your project (ACID vs. eventual consistencey, CAP theorem)?

    +
  • +
+
+
+

There are good reasons to choose a particular NoSQL database in specific cases (e.g. extreme demand for big-data, throughput or scaling). +But as indicated by the questions above you need to be fully aware of what you are doing. +NoSQL databases can be schemaless (untyped, dynamic & flexible) and/or schemaful (typed, structured & strict). +Furhter, there are different types of NoSQL databases that are discussed in the following sub-sections:

+
+
+
+
+

Java

+
+ +
+
+
+

== Column DB

+
+
+

Column NoSQL databases are more related to a regular RDBMS with their tables and columns. +However, they typically do not offer relational support with joins to the same level as you expect from an RDBMS. +Therefore, you have to carefully design your data-model upfront with the all the knowledge how you later want to query your data.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    Cassandra (high-performance, schema-based DB)

    +
  • +
  • +

    HBase (distributed, big-data Hadoop database)

    +
  • +
+
+
+
+
+

== Key-Value DB

+
+
+

As indicated by the name, a key-value database stores objects as key/value pairs similar to Properties or Map in Java.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    Redis (in-memory key/value store, especially used as cache or message broker)

    +
  • +
  • +

    aerospike

    +
  • +
+
+
+
+
+

== Document DB

+
+
+

A document database is similar to a key-value database, but it stores objects in standard structured formats such as XML, JSON, or BSON. +Therefore not only flat key/value pairs but even trees of hierarchical data can be stored, retrieved and queried.

+
+
+

The most prominent options are:

+
+
+ +
+
+
+
+

== Graph DB

+
+
+

If the connections (links/relations) between your data is key and an RDBMS is just not flexible or fast enough for your plans, then a graph database can help you. +They are very strong on storing and querying complex connections between entities. +For queries there are even specific standards and languages like Gremlin.

+
+
+

The most prominent options are:

+
+
+ +
+
+
+
+

== Hybrid DB

+
+
+

In addition to the above types there are some NoSQL databases that are hybrid and combine the features and aspects of these types. +While as an architect and developer you might love the idea to get all in one, you have to be careful with your choice. +If you do not exactly know your problem, you are not ready to make the right choice for your database. +Further, you might still be best-off with an good old RDBMS if you need to address multiple aspects together. +Anyhow, for experiments, PoCs, or small microservices with little risk it might be a great idea to choose a hybrid NoSQL database. +If you have collected very positive, profound and productive experience with such product you can grow on it.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    OrientDB (object-oriented, hyper-flexible, column- and graph-based)

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-gigaspaces.html b/docs/devonfw.github.io/1.0/general/db/guide-gigaspaces.html new file mode 100644 index 00000000..723769e3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-gigaspaces.html @@ -0,0 +1,365 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

GigaSpaces XAP (Smart Cache)

+
+
+

This section is the place to share experience for those who use GigaSpaces XAP as NoSQL database.

+
+
+
+
+

Java

+
+ +
+
+
+

Attention

+
+
+ + + + + +
+ + +A sample for GigaSpaces integration has been contributed from a graduate work, which will be described here. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+
+

Possible Approach

+
+
+

GigaSpaces is currently not in the central maven repository, therefore an additional repository needs to be added along with the dependency:

+
+
+
+
<repositories>
+    <repository>
+	<id>org.openspaces</id>
+	<url>http://maven-repository.openspaces.org</url>
+    </repository>
+</repositories>
+
+<dependency>
+    <groupId>org.gigaspaces</groupId>
+    <artifactId>xap-openspaces</artifactId>
+    <version>${gsVersion}</version>
+</dependency>
+
+
+
+

Of course the version (${gsVersion}) needs to be adopted to your needs.

+
+
+
+
@Configuration
+public class ContextConfiguration {
+  @Bean
+  public GigaSpace space() {
+    UrlSpaceConfigurer urlSpaceConfigurer = new UrlSpaceConfigurer("jini://*/*/my-space");
+    return new GigaSpaceConfigurer(urlSpaceConfigurer).gigaSpace();
+  }
+}
+
+
+
+

To establish a connection with a running instance of GigaSpaces, a Configuration Class is required. Here a Bean will be created that retrieves via URL the name of a Space e.g. my-space (a Space is equivalent to a Database Schema). Of course a Space needs to be firstly created in order to use it (see also the Example). This bean can be used for all database typical operations e.g. create, read, update and delete data (a complete list can be found here). Another possibility to execute those operations is via spring-data (see section below). The spring-data-gigaspaces automatically detects if a GigaSpaces Bean exists.

+
+
+
+
+

Spring-Data

+
+
+

There is spring-data support available for GigaSpaces XAP (Smart Cache) via spring-data-gigaspaces.

+
+
+
+
+

Example

+
+
+

There is an implementation of the sample application, My Thai Star, using GigaSpaces XAP (Smart Cache) as data storage. More details can be found on mts-gigaspaces.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-hana.html b/docs/devonfw.github.io/1.0/general/db/guide-hana.html new file mode 100644 index 00000000..044f434c --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-hana.html @@ -0,0 +1,379 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

SAP HANA

+
+
+

This section contains hints for those who use SAP HANA, a very powerful and fast RDBMS. Besides general hints about the driver there are tips for more tight integration with other SAP features or products.

+
+
+
+
+

Java

+
+ +
+
+
+

Driver

+
+
+

SAP Hana is a commercial and professional product. +However, the hana JDBC driver is available in Maven Central what makes it easy to integrate. +All you need is the following maven dependency:

+
+
+
+
<dependency>
+  <groupId>com.sap.cloud.db.jdbc</groupId>
+  <artifactId>ngdbc</artifactId>
+  <version>${hana.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${hana.driver.version}) needs to be adopted to your needs (Hana installtion in production, e.g. 2.4.64). +For an overview of available driver versions see here.

+
+
+
+
+

Developer Usage

+
+
+

For your local development environment you will love the free SAP HANA, Express Edition.

+
+
+

You can run HANA in several ways:

+
+
+ +
+
+

To get started with SAP HANA, Express Edition you can check out the tutorials at the SAP Developer Center.

+
+
+
+ + +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-hbase.html b/docs/devonfw.github.io/1.0/general/db/guide-hbase.html new file mode 100644 index 00000000..69d6b116 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-hbase.html @@ -0,0 +1,315 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

HBase

+
+
+

This section is the place to share experience for those who use HBase as NoSQL database.

+
+
+
+
+

Java

+
+ +
+
+
+

Attention

+
+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+
+

Driver

+
+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here and +hbase-java-api tutorial.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-mariadb.html b/docs/devonfw.github.io/1.0/general/db/guide-mariadb.html new file mode 100644 index 00000000..e1fa2991 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-mariadb.html @@ -0,0 +1,309 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

MariaDB

+
+
+

This section gives guidance and hints for those who use MariaDB as RDBMS.

+
+
+
+
+

Java

+
+ +
+
+
+

Driver

+
+
+

MariaDB is fully open-source. The driver is therefore available in maven central. +Your dependency for the driver should look as following:

+
+
+
+
<dependency>
+    <groupId>org.mariadb.jdbc</groupId>
+    <artifactId>mariadb-java-client</artifactId>
+    <version>${mariadb.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${mariadb.driver.version}) needs to be adopted to your needs (MariaDB installtion in production and JDK version, e.g. 2.5.1). +For an overview of available driver versions see here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-mongodb.html b/docs/devonfw.github.io/1.0/general/db/guide-mongodb.html new file mode 100644 index 00000000..f53df58a --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-mongodb.html @@ -0,0 +1,314 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

MongoDB

+
+
+

This section is the place to share experience for those who use MongoDB as NoSQL database.

+
+
+
+
+

Java

+
+ +
+
+
+

Attention

+
+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+
+

Driver

+
+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-mssqlserver.html b/docs/devonfw.github.io/1.0/general/db/guide-mssqlserver.html new file mode 100644 index 00000000..ed722121 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-mssqlserver.html @@ -0,0 +1,310 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

MS-SQL-Server

+
+
+

This section gives guidance and hints for those who use Microsoft SQL Server as RDBMS.

+
+
+
+
+

Java

+
+ +
+
+
+

Driver

+
+
+

Microsoft SQL Server is a commercial and professional product. +However, the JDBC driver is MIT licensed and available in Maven Central what makes it easy to integrate. +Your dependency for the driver should look as following:

+
+
+
+
<dependency>
+    <groupId>com.microsoft.sqlserver</groupId>
+    <artifactId>mssql-jdbc</artifactId>
+    <version>${mssqlserver.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${mssqlserver.driver.version}) needs to be adopted to your needs (SQL Server installtion in production and JDK version, e.g. 7.4.1.jre8). +For an overview of available driver versions see here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-neo4j.html b/docs/devonfw.github.io/1.0/general/db/guide-neo4j.html new file mode 100644 index 00000000..49c85978 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-neo4j.html @@ -0,0 +1,323 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

neo4j

+
+
+

This section is the place to share experience for those who use neo4j as NoSQL database.

+
+
+
+
+

Java

+
+ +
+
+
+

Attention

+
+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+
+

Driver

+
+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+
+
+
+

Spring-Data

+
+
+

There is spring-data integration available via spring-data-neo4j.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-oracle.html b/docs/devonfw.github.io/1.0/general/db/guide-oracle.html new file mode 100644 index 00000000..08896a0d --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-oracle.html @@ -0,0 +1,495 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Oracle RDBMS

+
+
+

This section contains hints for those who use Oracle RDBMS. Besides general hints about the driver there are tips for more tight integration with other Oracle features or products. However, if you work for a project where Oracle RDBMS is settled and not going to be replaced (you are in a vendor lock-in anyway), you might want to use even more from Oracle technology to take advantage from a closer integration.

+
+
+
+
+

Java

+
+ +
+
+
+

XE

+
+
+

For local development you should setup Oracle XE (eXpress Edition). +You need an oracle account, then you can download it from here.

+
+
+

The most comfortable way to run it as needed is using docker. You can build your own docker image from the downloaded RPM using the instructions and dockerfile from oracle. The following commands will build and start Oracle XE 18.4.0 on your machine:

+
+
+
+
git clone https://github.com/oracle/docker-images.git
+cd docker-images/OracleDatabase/SingleInstance/dockerfiles
+./buildDockerImage.sh -x -v 18.4.0
+docker run -d -p 1521:1521 --name=oracle-xe --restart=always -e ORACLE_PWD=«my-sys-pwd» oracle/database:18.4.0-xe
+
+
+
+

Please note that the buildDockerImage.sh will take a long time. Further after docker run has passed you need to give time for your new container to startup and setup the Oracle XE DB. So be patient and give it some time. +(In case the build of the docker-image fails reproducibly and you want to give up with the Dockerfiles from Oracle you can also try this inofficial docker-oracle-xe solution. However, this is not recommended and may lead to other problems.).

+
+
+

Starting with XE 18c you need to be aware that oracle introduced a multi-tenant architecture. Hence xe refers to the root CDB while you typically want to connect to the PDB (pluggable database) and XE ships with exactly one of this called xepdb1. +To connect to your local XE database you need to use xepdb1 as the Service Name (typically in SQL Developer). The hostname should be localhost and the port is by default 1521 if you did not remap it with docker to something else. +In order to create schema users, use sys as Username and change Role to SYSDBA.

+
+
+

Hint: If you happen to end up connected to xe instead of xepdb1 in some case (e.g. in sqlplus), you may switch using this statement:

+
+
+
+
ALTER SESSION SET CONTAINER = XEPDB1;
+
+
+
+

The JDBC URL for your Oracle XE Database is:

+
+
+
+
jdbc:oracle:thin:@//localhost:1521/xepdb1
+
+
+
+

To locally connect as sysdba without password use the following command (connect / as sysdba is not working anymore):

+
+
+
+
sqlplus sys/Oracle18@localhost/XE as sysdba
+
+
+
+
+
+

Driver

+
+
+

The oracle JDBC driver is available in maven central. +Oracle JDBC drivers usually are backward and forward compatible so you should be able to use an older driver with a newer Oracle DB, etc. +Your dependency for the oracle driver should look as follows:

+
+
+
+
<dependency>
+  <groupId>com.oracle.database.jdbc</groupId>
+  <artifactId>ojdbc10</artifactId>
+  <version>${oracle.driver.version}</version>
+</dependency>
+
+
+
+

For the most recent Oracle DB 19 the property oracle.driver.version should be 19.8.0.0. The number in the artifactId correlates to the minimum Java Version so for Java8 artifactId should be ojdbc8 instead. It is fine to use ojdbc10 with Java11 or higher.

+
+
+
+
+

Pooling

+
+
+

In order to boost performance JDBC connections should be pooled and reused. If you are using Oracle RDBMS and do not plan to change that you can use the Oracle specific connection pool "Universal Connection Pool (UCP)" that is perfectly integrated with the Oracle driver. According to the documentation, UCP can even be used to manage third party data sources. +Like the JDBC driver also the UCP is available in maven central. The dependency should look like this:

+
+
+
+
<dependency>
+  <groupId>com.oracle.database.jdbc</groupId>
+  <artifactId>ucp</artifactId>
+  <version>${oracle.ucp.version}</version>
+</dependency>
+
+
+
+

with property oracle.ucp.version analogue to oracle.driver.version.

+
+
+

Configuration is done via application.properties like this (example):

+
+
+
+
#Oracle UCP
+##Datasource for accessing the database
+spring.datasource.url=jdbc:oracle:thin:@192.168.58.2:1521:xe
+spring.jpa.database-platform=org.hibernate.dialect.Oracle12cDialect
+spring.datasource.user=MyUser
+spring.datasource.password=ThisIsMyPassword
+spring.datasource.driver-class-name=oracle.jdbc.OracleDriver
+spring.datasource.schema=MySchema
+
+spring.datasource.type=oracle.ucp.jdbc.PoolDataSourceImpl
+spring.datasource.factory=oracle.ucp.jdbc.PoolDataSourceFactory
+spring.datasource.factory-method=getPoolDataSource
+spring.datasource.connectionFactoryClassName=oracle.jdbc.pool.OracleDataSource
+spring.datasource.validateConnectionOnBorrow=true
+spring.datasource.connectionPoolName=MyPool
+spring.datasource.jmx-enabled=true
+
+##Optional: Set the log level to INTERNAL_ERROR, SEVERE, WARNING, INFO, CONFIG, FINE, TRACE_10, FINER, TRACE_20, TRACE_30, or FINEST
+##logging.level.oracle.ucp=INTERNAL_ERROR
+##Optional: activate tracing
+##logging.level.oracle.ucp.jdbc.oracle.OracleUniversalPooledConnection=TRACE
+
+#Optional: Configures pool size manually
+#spring.datasource.minPoolSize=10
+#spring.datasource.maxPoolSize=40
+#spring.datasource.initialPoolSize=20
+
+
+
+

Resources: FAQ, developer’s guide, Java API Reference. For an in-depth discussion on how to use JDBC and UCP, see the Oracle documentation Connection Management Strategies for Java Applications using JDBC and UCP.

+
+
+

Note: there is a bug in UCP 12.1.0.2 that results in the creation of thousands of java.lang.Timer threads over hours or days of system uptime (see article on stackoverflow). Also, Oracle has a strange bug fixing / patching policy: instead of producing a fixed version 12.1.0.3 or 12.1.0.2.x, Oracle publishes collections of *.class files that must be manually patched into the ucp.jar! Therefore, use the newest versions only.

+
+
+
+
+

Messaging

+
+
+

In case you want to do messaging based on JMS you might consider the Oracle JMS also called Oracle Streams Advanced Queuing, or Oracle Advanced Queuing, or OAQ or AQ for short. OAQ is a JMS provider based on the Oracle RDBMS and included in the DB product for no extra fee. OAQ has some features that exceed the JMS standard like a retention time (i.e. a built-in backup mechanism that allows to make messages "unread" within a configurable period of time so that these messages do not have to be resent by the sending application). Also, OAQ messages are stored in relational tables so they can easily be observed by a test driver in a system test scenario. +Capgemini has used the Spring Data JDBC Extension in order to process OAQ messages within the same technical transaction as the resulting Oracle RDBMS data changes without using 2PC and an XA-compliant transaction manager - which is not available out of the box in Tomcat. This is possible only due to the fact that OAQ queues and RDBMS tables actually reside in the same database. However, this is higher magic and should only be tried if high transaction rates must be achieved by avoiding 2PC.

+
+
+
+
+

General Notes on the use of Oracle products

+
+
+

Oracle sells commercial products and receives licence fees for them. This includes access to a support organization. Therefore, at an early stage of your project, prepare for contacting oracle support in case of technical problems. You will need the Oracle support ID of your customer [i.e. the legal entity who pays the licence fee and runs the RDBMS] and your customer must grant you permission to use it in a service request - it is not legal to use a your own support ID in a customer-related project. Your customer pays for that service anyway, so use it in case of a problem!

+
+
+

Software components like the JDBC driver or the UCP may be available without a registration or fee but they are protected by the Oracle Technology Network (OTN) License Agreement. The most important aspect of this licence agreement is the fact that an IT service provider is not allowed to simply download the Oracle software component, bundle it in a software artefact and deliver it to the customer. Instead, the Oracle software component must be (from a legal point of view) provided by the owner of the Oracle DB licence (i.e. your customer). This can be achieved in two ways: Advise your customer to install the Oracle software component in the application server as a library that can be used by your custom built system. Or, in cases where this is not feasible, e.g. in a OpenShift environment where the IT service provider delivers complete Docker images, you must advise your customer to (legally, i.e. documented in a written form) provide the Oracle software component to you, i.e. you don’t download the software component from the Oracle site but receive it from your customer.

+
+
+
+
+

Fix for TNS-Listener issues

+
+
+

When switching networks (e.g. due to VPN) you might end up that your local Oracle XE stopps working with this error:

+
+
+
+
Listener refused the connection with the following error:
+ORA-12505, TNS:listener does not currently know of SID given in connect descriptor
+
+
+
+

While a reboot resolves this problem, it is a huge pain to reboot every time this error occurs as this wastes a lot of time. +Therefore we suggest the following fix:

+
+
+
    +
  • +

    Go to your oracle installation and open the folder product/«version»/dbhomeXE/network/admin.

    +
  • +
  • +

    Edit the file listener.ora and change the value of the property HOST from your qualified hostname to localhost (HOST = localhost).

    +
  • +
  • +

    Edit the file tnsnames.ora and change the value of the HOST properties (two occurences) from your qualified hostname to localhost (HOST = localhost).

    +
  • +
  • +

    Reboot your machine or (on windows) restart the service OracleServiceXE via services.msc.

    +
  • +
  • +

    Now this problem should be gone forever and you can continue your work.

    +
  • +
+
+
+

On older XE versions until 11g you could run the following SQL (sqlplus / as sysdba @reset_tns_listener.sql):

+
+
+
+
WHENEVER SQLERROR EXIT;
+ALTER SYSTEM SET local_listener = '(ADDRESS = (PROTOCOL = TCP)(HOST = 127.0.0.1)(PORT = 1521))';
+ALTER SYSTEM REGISTER;
+EXIT;
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-orientdb.html b/docs/devonfw.github.io/1.0/general/db/guide-orientdb.html new file mode 100644 index 00000000..2cb8210e --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-orientdb.html @@ -0,0 +1,322 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

OrientDB

+
+
+

This section is the place to share experience for those who use OrientDB (see also Open-Source community edition) as NoSQL database.

+
+
+
+
+

Java

+
+ +
+
+
+

Attention

+
+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+
+

Driver

+
+
+

For driver options see here.

+
+
+
+
+

Administration

+
+
+

OrientDB comes with a powerful, impressive admin interface for your web-browser called Studio.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-postgresql.html b/docs/devonfw.github.io/1.0/general/db/guide-postgresql.html new file mode 100644 index 00000000..3568876b --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-postgresql.html @@ -0,0 +1,309 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

PostgreSQL

+
+
+

This section gives guidance and hints for those who use PostgreSQL as RDBMS.

+
+
+
+
+

Java

+
+ +
+
+
+

Driver

+
+
+

PostgreSQL is fully open-source. The driver is therefore available in maven central. +Your dependency for the driver should look as following:

+
+
+
+
<dependency>
+    <groupId>postgresql</groupId>
+    <artifactId>postgresql</artifactId>
+    <version>${postgresql.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${postgresql.driver.version}) needs to be adopted to your needs (PostgreSQL installtion in production and JDBC level suitable for your JDK, e.g. 9.1-901-1.jdbc4). +For an overview of available driver versions see here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-ravendb.html b/docs/devonfw.github.io/1.0/general/db/guide-ravendb.html new file mode 100644 index 00000000..c5310e34 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-ravendb.html @@ -0,0 +1,314 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

RavenDB

+
+
+

This section is the place to share experience for those who use RavenDB as NoSQL database.

+
+
+
+
+

Java

+
+ +
+
+
+

Attention

+
+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+
+

Driver

+
+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see ravendb-jvm-client and Java Client Features.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/guide-redis.html b/docs/devonfw.github.io/1.0/general/db/guide-redis.html new file mode 100644 index 00000000..79c4cd6f --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/guide-redis.html @@ -0,0 +1,314 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Redis

+
+
+

This section is the place to share experience for those who use Redis as NoSQL database.

+
+
+
+
+

Java

+
+ +
+
+
+

Attention

+
+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+
+

Driver

+
+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/db/master-database.html b/docs/devonfw.github.io/1.0/general/db/master-database.html new file mode 100644 index 00000000..7721f422 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/db/master-database.html @@ -0,0 +1,1421 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Choosing your Database

+
+ +
+

Database

+
+

For your business application with devonfw you need to choose the right database. +In devonfw we are not biased for a particular product so you have the freedom of choice.

+
+
+
+

RDBMS

+
+

The classical and well-established form of a database is a relational database management system (RDBMS). +In devonfw we recommend to use an RDBMS unless you have specific need. +However, in case you have the need for big data, graph-data, BLOB focus, or schema-less dynamic data you can have a look at NoSQL options but be aware that these may be experimental and are not fully supported by devonfw.

+
+
+
+

Options

+
+

In devonfw we are not biased for a particular RDBMS so you have the freedom of choice. +Here are the most common options:

+
+
+
    +
  • +

    SAP Hana (high performance in-memory, many advanced features)

    +
  • +
  • +

    Oracle (most established, well featured for enterprise)

    +
  • +
  • +

    PostgreSQL (great open-source RDBMS)

    +
  • +
  • +

    MariaDB (true OSS successor of MySQL)

    +
  • +
  • +

    MS SQL Server (best choice for Microsoft and Windows dominated IT landscapes)

    +
  • +
+
+
+

Please click on any of the above choices and go to the according guide to find specific detials such as client/driver.

+
+
+
+

NoSQL

+
+

While not (yet) officially supported and recommendet there are also interesting NoSQL (Not Only SQL) databases that could be an interesting alternative. Please be aware that you will typically not be able to use JPA (and hibernate). Further before choosing a NoSQL database you should check the following aspects:

+
+
+
    +
  • +

    Is the database of choice reliable and mature enough for your project?

    +
  • +
  • +

    Can the operators of your product support the database of choice properly (provisioning, administration, backup, scaling & clustering, monitoring, etc.)?

    +
  • +
  • +

    Does the database of choice meet the requirements of your project (ACID vs. eventual consistencey, CAP theorem)?

    +
  • +
+
+
+

There are good reasons to choose a particular NoSQL database in specific cases (e.g. extreme demand for big-data, throughput or scaling). +But as indicated by the questions above you need to be fully aware of what you are doing. +NoSQL databases can be schemaless (untyped, dynamic & flexible) and/or schemaful (typed, structured & strict). +Furhter, there are different types of NoSQL databases that are discussed in the following sub-sections:

+
+
+
+

Java

+ +
+
+

== Column DB

+
+

Column NoSQL databases are more related to a regular RDBMS with their tables and columns. +However, they typically do not offer relational support with joins to the same level as you expect from an RDBMS. +Therefore, you have to carefully design your data-model upfront with the all the knowledge how you later want to query your data.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    Cassandra (high-performance, schema-based DB)

    +
  • +
  • +

    HBase (distributed, big-data Hadoop database)

    +
  • +
+
+
+
+

== Key-Value DB

+
+

As indicated by the name, a key-value database stores objects as key/value pairs similar to Properties or Map in Java.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    Redis (in-memory key/value store, especially used as cache or message broker)

    +
  • +
  • +

    aerospike

    +
  • +
+
+
+
+

== Document DB

+
+

A document database is similar to a key-value database, but it stores objects in standard structured formats such as XML, JSON, or BSON. +Therefore not only flat key/value pairs but even trees of hierarchical data can be stored, retrieved and queried.

+
+
+

The most prominent options are:

+
+
+ +
+
+
+

== Graph DB

+
+

If the connections (links/relations) between your data is key and an RDBMS is just not flexible or fast enough for your plans, then a graph database can help you. +They are very strong on storing and querying complex connections between entities. +For queries there are even specific standards and languages like Gremlin.

+
+
+

The most prominent options are:

+
+
+ +
+
+
+

== Hybrid DB

+
+

In addition to the above types there are some NoSQL databases that are hybrid and combine the features and aspects of these types. +While as an architect and developer you might love the idea to get all in one, you have to be careful with your choice. +If you do not exactly know your problem, you are not ready to make the right choice for your database. +Further, you might still be best-off with an good old RDBMS if you need to address multiple aspects together. +Anyhow, for experiments, PoCs, or small microservices with little risk it might be a great idea to choose a hybrid NoSQL database. +If you have collected very positive, profound and productive experience with such product you can grow on it.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    OrientDB (object-oriented, hyper-flexible, column- and graph-based)

    +
  • +
+
+ +
+
+

SAP HANA

+
+

This section contains hints for those who use SAP HANA, a very powerful and fast RDBMS. Besides general hints about the driver there are tips for more tight integration with other SAP features or products.

+
+
+
+

Java

+ +
+
+

Driver

+
+

SAP Hana is a commercial and professional product. +However, the hana JDBC driver is available in Maven Central what makes it easy to integrate. +All you need is the following maven dependency:

+
+
+
+
<dependency>
+  <groupId>com.sap.cloud.db.jdbc</groupId>
+  <artifactId>ngdbc</artifactId>
+  <version>${hana.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${hana.driver.version}) needs to be adopted to your needs (Hana installtion in production, e.g. 2.4.64). +For an overview of available driver versions see here.

+
+
+
+

Developer Usage

+
+

For your local development environment you will love the free SAP HANA, Express Edition.

+
+
+

You can run HANA in several ways:

+
+
+ +
+
+

To get started with SAP HANA, Express Edition you can check out the tutorials at the SAP Developer Center.

+
+
+ + +
+

Oracle RDBMS

+
+

This section contains hints for those who use Oracle RDBMS. Besides general hints about the driver there are tips for more tight integration with other Oracle features or products. However, if you work for a project where Oracle RDBMS is settled and not going to be replaced (you are in a vendor lock-in anyway), you might want to use even more from Oracle technology to take advantage from a closer integration.

+
+
+
+

Java

+ +
+
+

XE

+
+

For local development you should setup Oracle XE (eXpress Edition). +You need an oracle account, then you can download it from here.

+
+
+

The most comfortable way to run it as needed is using docker. You can build your own docker image from the downloaded RPM using the instructions and dockerfile from oracle. The following commands will build and start Oracle XE 18.4.0 on your machine:

+
+
+
+
git clone https://github.com/oracle/docker-images.git
+cd docker-images/OracleDatabase/SingleInstance/dockerfiles
+./buildDockerImage.sh -x -v 18.4.0
+docker run -d -p 1521:1521 --name=oracle-xe --restart=always -e ORACLE_PWD=«my-sys-pwd» oracle/database:18.4.0-xe
+
+
+
+

Please note that the buildDockerImage.sh will take a long time. Further after docker run has passed you need to give time for your new container to startup and setup the Oracle XE DB. So be patient and give it some time. +(In case the build of the docker-image fails reproducibly and you want to give up with the Dockerfiles from Oracle you can also try this inofficial docker-oracle-xe solution. However, this is not recommended and may lead to other problems.).

+
+
+

Starting with XE 18c you need to be aware that oracle introduced a multi-tenant architecture. Hence xe refers to the root CDB while you typically want to connect to the PDB (pluggable database) and XE ships with exactly one of this called xepdb1. +To connect to your local XE database you need to use xepdb1 as the Service Name (typically in SQL Developer). The hostname should be localhost and the port is by default 1521 if you did not remap it with docker to something else. +In order to create schema users, use sys as Username and change Role to SYSDBA.

+
+
+

Hint: If you happen to end up connected to xe instead of xepdb1 in some case (e.g. in sqlplus), you may switch using this statement:

+
+
+
+
ALTER SESSION SET CONTAINER = XEPDB1;
+
+
+
+

The JDBC URL for your Oracle XE Database is:

+
+
+
+
jdbc:oracle:thin:@//localhost:1521/xepdb1
+
+
+
+

To locally connect as sysdba without password use the following command (connect / as sysdba is not working anymore):

+
+
+
+
sqlplus sys/Oracle18@localhost/XE as sysdba
+
+
+
+
+

Driver

+
+

The oracle JDBC driver is available in maven central. +Oracle JDBC drivers usually are backward and forward compatible so you should be able to use an older driver with a newer Oracle DB, etc. +Your dependency for the oracle driver should look as follows:

+
+
+
+
<dependency>
+  <groupId>com.oracle.database.jdbc</groupId>
+  <artifactId>ojdbc10</artifactId>
+  <version>${oracle.driver.version}</version>
+</dependency>
+
+
+
+

For the most recent Oracle DB 19 the property oracle.driver.version should be 19.8.0.0. The number in the artifactId correlates to the minimum Java Version so for Java8 artifactId should be ojdbc8 instead. It is fine to use ojdbc10 with Java11 or higher.

+
+
+
+

Pooling

+
+

In order to boost performance JDBC connections should be pooled and reused. If you are using Oracle RDBMS and do not plan to change that you can use the Oracle specific connection pool "Universal Connection Pool (UCP)" that is perfectly integrated with the Oracle driver. According to the documentation, UCP can even be used to manage third party data sources. +Like the JDBC driver also the UCP is available in maven central. The dependency should look like this:

+
+
+
+
<dependency>
+  <groupId>com.oracle.database.jdbc</groupId>
+  <artifactId>ucp</artifactId>
+  <version>${oracle.ucp.version}</version>
+</dependency>
+
+
+
+

with property oracle.ucp.version analogue to oracle.driver.version.

+
+
+

Configuration is done via application.properties like this (example):

+
+
+
+
#Oracle UCP
+##Datasource for accessing the database
+spring.datasource.url=jdbc:oracle:thin:@192.168.58.2:1521:xe
+spring.jpa.database-platform=org.hibernate.dialect.Oracle12cDialect
+spring.datasource.user=MyUser
+spring.datasource.password=ThisIsMyPassword
+spring.datasource.driver-class-name=oracle.jdbc.OracleDriver
+spring.datasource.schema=MySchema
+
+spring.datasource.type=oracle.ucp.jdbc.PoolDataSourceImpl
+spring.datasource.factory=oracle.ucp.jdbc.PoolDataSourceFactory
+spring.datasource.factory-method=getPoolDataSource
+spring.datasource.connectionFactoryClassName=oracle.jdbc.pool.OracleDataSource
+spring.datasource.validateConnectionOnBorrow=true
+spring.datasource.connectionPoolName=MyPool
+spring.datasource.jmx-enabled=true
+
+##Optional: Set the log level to INTERNAL_ERROR, SEVERE, WARNING, INFO, CONFIG, FINE, TRACE_10, FINER, TRACE_20, TRACE_30, or FINEST
+##logging.level.oracle.ucp=INTERNAL_ERROR
+##Optional: activate tracing
+##logging.level.oracle.ucp.jdbc.oracle.OracleUniversalPooledConnection=TRACE
+
+#Optional: Configures pool size manually
+#spring.datasource.minPoolSize=10
+#spring.datasource.maxPoolSize=40
+#spring.datasource.initialPoolSize=20
+
+
+
+

Resources: FAQ, developer’s guide, Java API Reference. For an in-depth discussion on how to use JDBC and UCP, see the Oracle documentation Connection Management Strategies for Java Applications using JDBC and UCP.

+
+
+

Note: there is a bug in UCP 12.1.0.2 that results in the creation of thousands of java.lang.Timer threads over hours or days of system uptime (see article on stackoverflow). Also, Oracle has a strange bug fixing / patching policy: instead of producing a fixed version 12.1.0.3 or 12.1.0.2.x, Oracle publishes collections of *.class files that must be manually patched into the ucp.jar! Therefore, use the newest versions only.

+
+
+
+

Messaging

+
+

In case you want to do messaging based on JMS you might consider the Oracle JMS also called Oracle Streams Advanced Queuing, or Oracle Advanced Queuing, or OAQ or AQ for short. OAQ is a JMS provider based on the Oracle RDBMS and included in the DB product for no extra fee. OAQ has some features that exceed the JMS standard like a retention time (i.e. a built-in backup mechanism that allows to make messages "unread" within a configurable period of time so that these messages do not have to be resent by the sending application). Also, OAQ messages are stored in relational tables so they can easily be observed by a test driver in a system test scenario. +Capgemini has used the Spring Data JDBC Extension in order to process OAQ messages within the same technical transaction as the resulting Oracle RDBMS data changes without using 2PC and an XA-compliant transaction manager - which is not available out of the box in Tomcat. This is possible only due to the fact that OAQ queues and RDBMS tables actually reside in the same database. However, this is higher magic and should only be tried if high transaction rates must be achieved by avoiding 2PC.

+
+
+
+

General Notes on the use of Oracle products

+
+

Oracle sells commercial products and receives licence fees for them. This includes access to a support organization. Therefore, at an early stage of your project, prepare for contacting oracle support in case of technical problems. You will need the Oracle support ID of your customer [i.e. the legal entity who pays the licence fee and runs the RDBMS] and your customer must grant you permission to use it in a service request - it is not legal to use a your own support ID in a customer-related project. Your customer pays for that service anyway, so use it in case of a problem!

+
+
+

Software components like the JDBC driver or the UCP may be available without a registration or fee but they are protected by the Oracle Technology Network (OTN) License Agreement. The most important aspect of this licence agreement is the fact that an IT service provider is not allowed to simply download the Oracle software component, bundle it in a software artefact and deliver it to the customer. Instead, the Oracle software component must be (from a legal point of view) provided by the owner of the Oracle DB licence (i.e. your customer). This can be achieved in two ways: Advise your customer to install the Oracle software component in the application server as a library that can be used by your custom built system. Or, in cases where this is not feasible, e.g. in a OpenShift environment where the IT service provider delivers complete Docker images, you must advise your customer to (legally, i.e. documented in a written form) provide the Oracle software component to you, i.e. you don’t download the software component from the Oracle site but receive it from your customer.

+
+
+
+

Fix for TNS-Listener issues

+
+

When switching networks (e.g. due to VPN) you might end up that your local Oracle XE stopps working with this error:

+
+
+
+
Listener refused the connection with the following error:
+ORA-12505, TNS:listener does not currently know of SID given in connect descriptor
+
+
+
+

While a reboot resolves this problem, it is a huge pain to reboot every time this error occurs as this wastes a lot of time. +Therefore we suggest the following fix:

+
+
+
    +
  • +

    Go to your oracle installation and open the folder product/«version»/dbhomeXE/network/admin.

    +
  • +
  • +

    Edit the file listener.ora and change the value of the property HOST from your qualified hostname to localhost (HOST = localhost).

    +
  • +
  • +

    Edit the file tnsnames.ora and change the value of the HOST properties (two occurences) from your qualified hostname to localhost (HOST = localhost).

    +
  • +
  • +

    Reboot your machine or (on windows) restart the service OracleServiceXE via services.msc.

    +
  • +
  • +

    Now this problem should be gone forever and you can continue your work.

    +
  • +
+
+
+

On older XE versions until 11g you could run the following SQL (sqlplus / as sysdba @reset_tns_listener.sql):

+
+
+
+
WHENEVER SQLERROR EXIT;
+ALTER SYSTEM SET local_listener = '(ADDRESS = (PROTOCOL = TCP)(HOST = 127.0.0.1)(PORT = 1521))';
+ALTER SYSTEM REGISTER;
+EXIT;
+
+
+ +
+
+

MS-SQL-Server

+
+

This section gives guidance and hints for those who use Microsoft SQL Server as RDBMS.

+
+
+
+

Java

+ +
+
+

Driver

+
+

Microsoft SQL Server is a commercial and professional product. +However, the JDBC driver is MIT licensed and available in Maven Central what makes it easy to integrate. +Your dependency for the driver should look as following:

+
+
+
+
<dependency>
+    <groupId>com.microsoft.sqlserver</groupId>
+    <artifactId>mssql-jdbc</artifactId>
+    <version>${mssqlserver.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${mssqlserver.driver.version}) needs to be adopted to your needs (SQL Server installtion in production and JDK version, e.g. 7.4.1.jre8). +For an overview of available driver versions see here.

+
+ +
+
+

PostgreSQL

+
+

This section gives guidance and hints for those who use PostgreSQL as RDBMS.

+
+
+
+

Java

+ +
+
+

Driver

+
+

PostgreSQL is fully open-source. The driver is therefore available in maven central. +Your dependency for the driver should look as following:

+
+
+
+
<dependency>
+    <groupId>postgresql</groupId>
+    <artifactId>postgresql</artifactId>
+    <version>${postgresql.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${postgresql.driver.version}) needs to be adopted to your needs (PostgreSQL installtion in production and JDBC level suitable for your JDK, e.g. 9.1-901-1.jdbc4). +For an overview of available driver versions see here.

+
+ +
+
+

MariaDB

+
+

This section gives guidance and hints for those who use MariaDB as RDBMS.

+
+
+
+

Java

+ +
+
+

Driver

+
+

MariaDB is fully open-source. The driver is therefore available in maven central. +Your dependency for the driver should look as following:

+
+
+
+
<dependency>
+    <groupId>org.mariadb.jdbc</groupId>
+    <artifactId>mariadb-java-client</artifactId>
+    <version>${mariadb.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${mariadb.driver.version}) needs to be adopted to your needs (MariaDB installtion in production and JDK version, e.g. 2.5.1). +For an overview of available driver versions see here.

+
+ +
+
+

Database

+
+

For your business application with devonfw you need to choose the right database. +In devonfw we are not biased for a particular product so you have the freedom of choice.

+
+
+
+

RDBMS

+
+

The classical and well-established form of a database is a relational database management system (RDBMS). +In devonfw we recommend to use an RDBMS unless you have specific need. +However, in case you have the need for big data, graph-data, BLOB focus, or schema-less dynamic data you can have a look at NoSQL options but be aware that these may be experimental and are not fully supported by devonfw.

+
+
+
+

Options

+
+

In devonfw we are not biased for a particular RDBMS so you have the freedom of choice. +Here are the most common options:

+
+
+
    +
  • +

    SAP Hana (high performance in-memory, many advanced features)

    +
  • +
  • +

    Oracle (most established, well featured for enterprise)

    +
  • +
  • +

    PostgreSQL (great open-source RDBMS)

    +
  • +
  • +

    MariaDB (true OSS successor of MySQL)

    +
  • +
  • +

    MS SQL Server (best choice for Microsoft and Windows dominated IT landscapes)

    +
  • +
+
+
+

Please click on any of the above choices and go to the according guide to find specific detials such as client/driver.

+
+
+
+

NoSQL

+
+

While not (yet) officially supported and recommendet there are also interesting NoSQL (Not Only SQL) databases that could be an interesting alternative. Please be aware that you will typically not be able to use JPA (and hibernate). Further before choosing a NoSQL database you should check the following aspects:

+
+
+
    +
  • +

    Is the database of choice reliable and mature enough for your project?

    +
  • +
  • +

    Can the operators of your product support the database of choice properly (provisioning, administration, backup, scaling & clustering, monitoring, etc.)?

    +
  • +
  • +

    Does the database of choice meet the requirements of your project (ACID vs. eventual consistencey, CAP theorem)?

    +
  • +
+
+
+

There are good reasons to choose a particular NoSQL database in specific cases (e.g. extreme demand for big-data, throughput or scaling). +But as indicated by the questions above you need to be fully aware of what you are doing. +NoSQL databases can be schemaless (untyped, dynamic & flexible) and/or schemaful (typed, structured & strict). +Furhter, there are different types of NoSQL databases that are discussed in the following sub-sections:

+
+
+
+

Java

+ +
+
+

== Column DB

+
+

Column NoSQL databases are more related to a regular RDBMS with their tables and columns. +However, they typically do not offer relational support with joins to the same level as you expect from an RDBMS. +Therefore, you have to carefully design your data-model upfront with the all the knowledge how you later want to query your data.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    Cassandra (high-performance, schema-based DB)

    +
  • +
  • +

    HBase (distributed, big-data Hadoop database)

    +
  • +
+
+
+
+

== Key-Value DB

+
+

As indicated by the name, a key-value database stores objects as key/value pairs similar to Properties or Map in Java.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    Redis (in-memory key/value store, especially used as cache or message broker)

    +
  • +
  • +

    aerospike

    +
  • +
+
+
+
+

== Document DB

+
+

A document database is similar to a key-value database, but it stores objects in standard structured formats such as XML, JSON, or BSON. +Therefore not only flat key/value pairs but even trees of hierarchical data can be stored, retrieved and queried.

+
+
+

The most prominent options are:

+
+
+ +
+
+
+

== Graph DB

+
+

If the connections (links/relations) between your data is key and an RDBMS is just not flexible or fast enough for your plans, then a graph database can help you. +They are very strong on storing and querying complex connections between entities. +For queries there are even specific standards and languages like Gremlin.

+
+
+

The most prominent options are:

+
+
+ +
+
+
+

== Hybrid DB

+
+

In addition to the above types there are some NoSQL databases that are hybrid and combine the features and aspects of these types. +While as an architect and developer you might love the idea to get all in one, you have to be careful with your choice. +If you do not exactly know your problem, you are not ready to make the right choice for your database. +Further, you might still be best-off with an good old RDBMS if you need to address multiple aspects together. +Anyhow, for experiments, PoCs, or small microservices with little risk it might be a great idea to choose a hybrid NoSQL database. +If you have collected very positive, profound and productive experience with such product you can grow on it.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    OrientDB (object-oriented, hyper-flexible, column- and graph-based)

    +
  • +
+
+ +
+
+

Cassandra

+
+

This section is the place to share experience for those who use Cassandra as NoSQL database.

+
+
+
+

Java

+ +
+
+

Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+
+
+

Spring-Data

+
+

There is spring-data support available for cassandra via spring-data-cassandra.

+
+
+ + + + + +
+ + +Please note that some time ago we had feedback from projects that had issues with spring-data-cassandra and switched back to using the driver natively. We assume the issues are meanwhile resolved. TODO: collect more feedback and update this guide. +
+
+ +
+
+

neo4j

+
+

This section is the place to share experience for those who use neo4j as NoSQL database.

+
+
+
+

Java

+ +
+
+

Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+
+
+

Spring-Data

+
+

There is spring-data integration available via spring-data-neo4j.

+
+ +
+
+

MongoDB

+
+

This section is the place to share experience for those who use MongoDB as NoSQL database.

+
+
+
+

Java

+ +
+
+

Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+ +
+
+

CouchDB

+
+

This section is the place to share experience for those who use CouchDB as NoSQL database.

+
+
+
+

Java

+ +
+
+

Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+ +
+
+

Redis

+
+

This section is the place to share experience for those who use Redis as NoSQL database.

+
+
+
+

Java

+ +
+
+

Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+ +
+
+

OrientDB

+
+

This section is the place to share experience for those who use OrientDB (see also Open-Source community edition) as NoSQL database.

+
+
+
+

Java

+ +
+
+

Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

Driver

+
+

For driver options see here.

+
+
+
+

Administration

+
+

OrientDB comes with a powerful, impressive admin interface for your web-browser called Studio.

+
+ +
+
+

Blazegraph

+
+

This section is the place to share experience for those who use Blazegraph as NoSQL database.

+
+
+
+

Java

+ +
+
+

Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+ +
+
+

HBase

+
+

This section is the place to share experience for those who use HBase as NoSQL database.

+
+
+
+

Java

+ +
+
+

Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here and +hbase-java-api tutorial.

+
+ +
+
+

RavenDB

+
+

This section is the place to share experience for those who use RavenDB as NoSQL database.

+
+
+
+

Java

+ +
+
+

Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see ravendb-jvm-client and Java Client Features.

+
+ +
+
+

GigaSpaces XAP (Smart Cache)

+
+

This section is the place to share experience for those who use GigaSpaces XAP as NoSQL database.

+
+
+
+

Java

+ +
+
+

Attention

+
+ + + + + +
+ + +A sample for GigaSpaces integration has been contributed from a graduate work, which will be described here. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

Possible Approach

+
+

GigaSpaces is currently not in the central maven repository, therefore an additional repository needs to be added along with the dependency:

+
+
+
+
<repositories>
+    <repository>
+	<id>org.openspaces</id>
+	<url>http://maven-repository.openspaces.org</url>
+    </repository>
+</repositories>
+
+<dependency>
+    <groupId>org.gigaspaces</groupId>
+    <artifactId>xap-openspaces</artifactId>
+    <version>${gsVersion}</version>
+</dependency>
+
+
+
+

Of course the version (${gsVersion}) needs to be adopted to your needs.

+
+
+
+
@Configuration
+public class ContextConfiguration {
+  @Bean
+  public GigaSpace space() {
+    UrlSpaceConfigurer urlSpaceConfigurer = new UrlSpaceConfigurer("jini://*/*/my-space");
+    return new GigaSpaceConfigurer(urlSpaceConfigurer).gigaSpace();
+  }
+}
+
+
+
+

To establish a connection with a running instance of GigaSpaces, a Configuration Class is required. Here a Bean will be created that retrieves via URL the name of a Space e.g. my-space (a Space is equivalent to a Database Schema). Of course a Space needs to be firstly created in order to use it (see also the Example). This bean can be used for all database typical operations e.g. create, read, update and delete data (a complete list can be found here). Another possibility to execute those operations is via spring-data (see section below). The spring-data-gigaspaces automatically detects if a GigaSpaces Bean exists.

+
+
+
+

Spring-Data

+
+

There is spring-data support available for GigaSpaces XAP (Smart Cache) via spring-data-gigaspaces.

+
+
+
+

Example

+
+

There is an implementation of the sample application, My Thai Star, using GigaSpaces XAP (Smart Cache) as data storage. More details can be found on mts-gigaspaces.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/master-contributing.html b/docs/devonfw.github.io/1.0/general/master-contributing.html new file mode 100644 index 00000000..e416d3f6 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/master-contributing.html @@ -0,0 +1,632 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Contributing

+
+
+

Unresolved include directive in modules/ROOT/pages/general/master-contributing.adoc - include::../devonfw-github/CONTRIBUTING.adoc[]

+
+
+

Unresolved include directive in modules/ROOT/pages/general/master-contributing.adoc - include::../devonfw-github/CODE_OF_CONDUCT.adoc[]

+
+ +
+

OSS Compliance

+
+

This chapter helps you to gain transparency on OSS usage and reach OSS compliance in your project.

+
+
+
+

Preface

+
+

devonfw, as most Java software, makes strong use of Open Source Software (OSS). It is using about 150 OSS products on the server only and on the client even more. Using a platform like devonfw to develop your own custom solution requires handling contained OSS correctly, i.e acting OSS-compliant.

+
+
+

Please read the Open Source policy of your company first, e.g. the Capgemini OSS Policy which contains a short, comprehensive and well written explanation on relevant OSS-knowledge. Make sure you:

+
+
+
    +
  • +

    understand the copyleft effect and its effect in commercial projects

    +
  • +
  • +

    understand the 3 license categories: "permissive", "weak copyleft" and "strong copyleft"

    +
  • +
  • +

    know prominent license types as e.g. "Apache-2.0" or GPL-3.0" and what copyleft-category they are in

    +
  • +
  • +

    are aware that some OSS offer dual/multi-licenses

    +
  • +
  • +

    Understand that OSS libraries often come with sub-dependencies of other OSS carrying licenses themselves

    +
  • +
+
+
+

To define sufficient OSS compliance measures, contact your IP officer or legal team as early as possible, especially if you develop software for clients.

+
+
+
+

Obligations when using OSS

+
+

If you create a custom solution containing OSS, this in legal sense is a "derived" work. If you distribute your derived work to your business client or any other legal entity in binary packaged form, the license obligations of contained OSS get into effect. Ignoring these leads to a license infringement which can create high damage.

+
+
+

To carefully handle these obligations you must:

+
+
+
    +
  • +

    maintain an OSS inventory (to gain transparency on OSS usage and used licenses)

    +
  • +
  • +

    check license conformity depending on usage/distribution in a commercial scenario

    +
  • +
  • +

    check license compatibility between used OSS-licenses

    +
  • +
  • +

    fulfill obligations defined by the OSS-licenses

    +
  • +
+
+
+

Obligations need to be checked per license. Frequent obligations are:

+
+
+
    +
  • +

    deliver the license terms of all used versions of the OSS licenses

    +
  • +
  • +

    not to change any copyright statements or warranty exclusions contained in the used OSS components

    +
  • +
  • +

    deliver the source code of the OSS components (e.g. on a data carrier)

    +
  • +
  • +

    when modifying OSS, track any source code modification (including date and name of the employee/company)

    +
  • +
  • +

    display OSS license notice in a user frontend (if any)

    +
  • +
  • +

    other obligations depending on individual license

    +
  • +
+
+
+
+

Automate OSS handling

+
+

Carefully judging the OSS usage in your project is a MANUAL activity! However, collecting OSS information and fulfilling license obligations should be automated as much as possible. A prominent professional tool to automate OSS compliance is the commercial software "Black Duck". Unfortunately it is rather expensive - either purchased or used as SaaS.

+
+
+

The most recommended lightweight tooling is a combination of Maven plugins. We will mainly use the Mojo Maven License Plugin.

+
+
+
+

Configure the Mojo Maven License Plugin

+
+

You can use it from command line but this will limit the ability to sustainably configure it (shown later). +Therefore we add it permanently as a build-plugin to the project parent-pom like this:

+
+
+
+
<plugin>
+  <groupId>org.codehaus.mojo</groupId>
+  <artifactId>license-maven-plugin</artifactId>
+  <version>1.14</version>
+
+  <configuration>
+    <outputDirectory>${project.build.directory}/generated-resources</outputDirectory>
+    <sortArtifactByName>true</sortArtifactByName>
+    <includeTransitiveDependencies>true</includeTransitiveDependencies>
+    <!-- the "missing file" declares licenses for dependencies that could not be detected automatically -->
+    <useMissingFile>true</useMissingFile>
+    <!-- find the "missing files" in all child-projects at the following location -->
+    <missingFile>src/license/THIRD-PARTY.properties</missingFile>
+    <!-- if the "missing files" are not yet existing in child-projects they will be created automatically -->
+    <failOnMissing>false</failOnMissing>
+    <overrideFile>src/license/override-THIRD-PARTY.properties</overrideFile>
+    <!-- harmonize different ways of writing license names -->
+    <licenseMerges>
+      <licenseMerge>Apache-2.0|Apache 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache Software License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|The Apache Software License, Version 2.0</licenseMerge>
+    </licenseMerges>
+    <encoding>utf-8</encoding>
+  </configuration>
+</plugin>
+
+
+
+

In the config above there are several settings that help to permanently improve the result of an automated OSS scan. We explain these now.

+
+
+
+

Declare additional licenses

+
+

Sometimes the licenses of used OSS cannot be resolved automatically. That is not the mistake of the maven-license-tool, but the mistake of the OSS author who didn’t make the respective license-information properly available.

+
+
+

Declare additional licenses in a "missing file" within each maven-subproject: /src/license/THIRD-PARTY.properties.

+
+
+
+
##Generated by org.codehaus.mojo.license.AddThirdPartyMojo
+#-------------------------------------------------------------------------------
+##Already used licenses in project :
+##- ASF 2.0
+##- Apache 2
+...
+#-------------------------------------------------------------------------------
+##Please fill the missing licenses for dependencies :
+...
+dom4j--dom4j--1.6.1=BSD 3-Clause
+javax.servlet--jstl--1.2=CDDL
+...
+
+
+
+

In case the use of "missing files" is activated, but the THIRD-PARTY.properties-file is not yet existing, the first run of an "aggregate-add-third-party" goal (see below) will fail. Luckily the license-plugin just helped us and created the properties-files automatically (in each maven-subproject) and prefilled it with:

+
+
+
    +
  • +

    a list of all detected licenses within the maven project

    +
  • +
  • +

    all OSS libraries where a license could not be detected automatically.

    +
  • +
+
+
+

You now need to fill in missing license information and rerun the plugin.

+
+
+
+

Redefine wrongly detected licenses

+
+

In case automatically detected licenses proof to be wrong by closer investigation, this wrong detection can be overwritten. Add a configuration to declare alternative licenses within each maven-subproject: /src/license/override-THIRD-PARTY.properties

+
+
+
+
com.sun.mail--javax.mail--1.5.6=Common Development and Distribution License 1.1
+
+
+
+

This can be also be useful for OSS that provides a multi-license to make a decision which license to actually choose .

+
+
+
+

Merge licenses

+
+

You will see that many prominent licenses come in all sorts of notations, e.g. Apache-2.0 as: "Apache 2" or "ASL-2.0" or "The Apache License, Version 2.0". The Mojo Maven License Plugin allows to harmonize different forms of a license-naming like this:

+
+
+
+
    <!-- harmonize different ways of writing license names -->
+    <licenseMerges>
+      <licenseMerge>Apache-2.0|Apache 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache Software License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|The Apache Software License, Version 2.0</licenseMerge>
+    </licenseMerges>
+
+
+
+

License-names will be harmonized in the OSS report to one common term. We propose to harmonize to short-license-IDs defined by the SPDX standard.

+
+
+
+

Retrieve licenses list

+
+

For a quick initial judgement of OSS license situation run the following maven command from command line:

+
+
+
+
$ mvn license:license-list
+
+
+
+

You receive the summary list of all used OSS licenses on the cmd-out.

+
+
+
+

Create an OSS inventory

+
+

To create an OSS inventory means to report on the overall bill of material of used OSS and corresponding licenses. +Within the parent project, run the following maven goal from command line.

+
+
+
+
$ mvn license:aggregate-download-licenses -Dlicense.excludedScopes=test,provided
+
+
+
+

Running the aggregate-download-licenses goal creates two results.

+
+
+
    +
  1. +

    a license.xml that contains all used OSS dependencies (even sub-dependencies) with respective license information

    +
  2. +
  3. +

    puts all used OSS-license-texts as html files into folder target/generated resources

    +
  4. +
+
+
+

Carefully validate and judge the outcome of the license list. It is recommended to copy the license.xml to the project documentation and hand it over to your client. You may also import it into a spreadsheet to get a better overview.

+
+
+
+

Create a THIRD PARTY file

+
+

Within Java software it is a common practice to add a "THIRD-PARTY" text file to the distribution. Contained is a summary-list of all used OSS and respective licenses. This can also be achieved with the Mojo Maven License Plugin.

+
+
+

Within the parent project, run the following maven goal from command line.

+
+
+
+
$ mvn license:aggregate-add-third-party -Dlicense.excludedScopes=test,provided
+
+
+
+

Find the THIRD-PARTY.txt in the folder: target\generated-resources. The goal aggregate-add-third-party also profits from configuration as outlined above.

+
+
+
+

Download and package OSS SourceCode

+
+

Some OSS licenses require handing over the OSS source code which is packaged with your custom software to the client the solution is distributed to. It is a good practice to hand over the source code of all used OSS to your client. Collecting all source code can be accomplished by another Maven plugin: Apache Maven Dependency Plugin.

+
+
+

It downloads all OSS Source Jars into the folder: \target\sources across the parent and all child maven projects.

+
+
+

You configure the plugin like this:

+
+
+
+
<plugin>
+  <groupId>org.apache.maven.plugins</groupId>
+  <artifactId>maven-dependency-plugin</artifactId>
+  <version>3.0.2</version>
+
+  <configuration>
+    <classifier>sources</classifier>
+    <failOnMissingClassifierArtifact>false</failOnMissingClassifierArtifact>
+    <outputDirectory>${project.build.directory}/sources</outputDirectory>
+  </configuration>
+  <executions>
+    <execution>
+      <id>src-dependencies</id>
+      <phase>package</phase>
+      <goals>
+        <!-- use unpack-dependencies instead if you want to explode the sources -->
+        <goal>copy-dependencies</goal>
+      </goals>
+    </execution>
+  </executions>
+</plugin>
+
+
+
+

You run the plugin from command line like this:

+
+
+
+
$ mvn dependency:copy-dependencies -Dclassifier=sources
+
+
+
+

The plugin provides another goal that also unzips the jars, which is not recommended, since contents get mixed up.

+
+
+

Deliver the OSS source jars to your client with the release of your custom solution. This has been done physically - e.g. on DVD.

+
+
+
+

Handle OSS within CI-process

+
+

To automate OSS handling in the regular build-process (which is not recommended to start with) you may declare the following executions and goals in your maven-configuration:

+
+
+
+
<plugin>
+  ...
+
+  <executions>
+    <execution>
+      <id>aggregate-add-third-party</id>
+      <phase>generate-resources</phase>
+      <goals>
+        <goal>aggregate-add-third-party</goal>
+      </goals>
+    </execution>
+
+    <execution>
+      <id>aggregate-download-licenses</id>
+      <phase>generate-resources</phase>
+      <goals>
+        <goal>aggregate-download-licenses</goal>
+      </goals>
+    </execution>
+  </executions>
+</plugin>
+
+
+
+

Note that the build may fail in case the OSS information was not complete. Check the build-output to understand and resolve the issue - like e.g. add missing license information in the "missing file".

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/master-release-notes.html b/docs/devonfw.github.io/1.0/general/master-release-notes.html new file mode 100644 index 00000000..d9500d05 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/master-release-notes.html @@ -0,0 +1,6113 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Release Notes

+
+ +
+

devonfw Release notes 2021.04

+ +
+
+

Introduction

+
+

We are proud to announce the release of devonfw version 2021.04.

+
+
+

This release includes lots of addition of new features, updates and bug fixes but it is very important to highlight the following improvements:

+
+
+
+

devonfw IDE

+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+

2021.04.001

+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #537: Update eclipse to 2021-03

    +
  • +
  • +

    #287: Command autocompletion

    +
  • +
  • +

    #536: Improve handling of aborted downloads

    +
  • +
  • +

    #542: Support placeholders in settings.xml template

    +
  • +
  • +

    #557: minimize setup by reducing DEVON_IDE_TOOLS

    +
  • +
  • +

    #550: update maven to 3.8.1

    +
  • +
  • +

    #545: update devon4j to 2021.04.002 and add migration

    +
  • +
  • +

    #575: jasypt support for password encryption and decryption

    +
  • +
  • +

    #546: Problems with tm-terminal Eclipse plugin

    +
  • +
  • +

    #553: VSCode user-data-dir shall be part of workspace config

    +
  • +
  • +

    #513: Configurable generation of IDE start scripts

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2021.04.001.

+
+
+
+

devon4j

+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+

2021.04.002

+
+

Bugfix release of with the following stories: +* #389: archetype build broken with ci-friendly-maven +* #391: jasypt documentation improvements +* #387: rebuild and updated diagram with drawio

+
+
+

Documentation is available at devon4j guide 2021.04.002. +The full list of changes for this release can be found in milestone devon4j 2020.04.002.

+
+
+
+

2021.04.001

+
+

New release of devon4j with fixes, updates and improvements:

+
+
+
    +
  • +

    #370: Minor updates (spring-boot 2.4.4, jackson 2.12.2, CXF 3.4.3, etc.)

    +
  • +
  • +

    #366: BaseTest.isInitialSetup() broken

    +
  • +
  • +

    #85: ci-friendly-maven also for archetype

    +
  • +
  • +

    #373: CORS starter not part of devon4j release

    +
  • +
  • +

    #164: Flattened pom for core project invalid

    +
  • +
  • +

    #323: Add spring integration test to archetype

    +
  • +
  • +

    #351: improved error handling of service client

    +
  • +
  • +

    #71: improve documentation for strong password encryption

    +
  • +
  • +

    #354: JMS senders should not be part of data access layer, but logical layer

    +
  • +
  • +

    #377: updated T-Architecture

    +
  • +
  • +

    #294: integrate sonarcloud analysis into devon4j CI pipeline

    +
  • +
+
+
+

Documentation is available at devon4j guide 2021.04.001. +The full list of changes for this release can be found in milestone devon4j 2020.04.001.

+
+
+
+

devon4ng

+
+

Updated template and samples to Angular 11. +Updated guide of devon4ng.

+
+
+
+

MrChecker

+
+

MrChecker Test Framework is an end to end test automation framework written in Java. It is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security, native mobile apps and, in the near future, databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Two new modules are added to MrChecker:

+
+
+
    +
  • +

    DB Module - we have created a module intended to make testing efforts on DBs easier. It is founded on JPA in conjunction with Hibernate and therefore supports both high level, object based access to DB entities via the IDao interface and low level, native SQL commands via the EntityManager class .

    +
  • +
  • +

    CLI Module - we have created a module intended to make testing command line applications like compilers or batches easier and faster. Huge success here is that, team using this solution was able to prepare a test suite, without app provided, basing only on documentation and using mocking technique.

    +
  • +
+
+
+
+

Trainings/tutorials

+
+ +
+ +
+
+

devonfw Release notes 2020.12

+ +
+
+

Introduction

+
+

We are proud to announce the release of devonfw version 2020.12.

+
+
+

This release includes lots of addition of new features, updates and bug fixes but it is very important to highlight the following improvements:

+
+
+
+

devonfw IDE

+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+

2020.12.001

+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #495: Documentation corrections

    +
  • +
  • +

    #491: Consider lombok support

    +
  • +
  • +

    #489: Update node to v12.19.0 and VS Code to 1.50.1

    +
  • +
  • +

    #470: reverse merge of workspace settings not sorting properties anymore

    +
  • +
  • +

    #483: Error during installation when npm is already installed

    +
  • +
  • +

    #415: documentation to customize settings

    +
  • +
  • +

    #479: Error for vscode plugin installation

    +
  • +
  • +

    #471: Preconfigure Project Explorer with Hierarchical Project Presentation

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.12.001.

+
+
+
+

2020.08.001

+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #454: update to eclipse 2020.06

    +
  • +
  • +

    #442: update nodejs and vscode

    +
  • +
  • +

    #432: vsCode settings are not updated

    +
  • +
  • +

    #446: intellij: doConfigureEclipse: command not found

    +
  • +
  • +

    #440: Software update may lead to inconsistent state due to windows file locks

    +
  • +
  • +

    #427: release: keep leading zeros

    +
  • +
  • +

    #450: update settings

    +
  • +
  • +

    #431: devon build command not working correct for yarn or npm

    +
  • +
  • +

    #449: update to devon4j 2020.08.001

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.08.001.

+
+
+
+

2020.04.004

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #433: Windows: devon command line sets wrong environment variables (with tilde symbol)

    +
  • +
  • +

    #435: fix variable resolution on bash

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.004.

+
+
+
+

2020.04.003

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #395: variable from devon.properites unset if value is in double quotes

    +
  • +
  • +

    #429: Added script to create a meta file in the users directory after setup

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.003.

+
+
+
+

2020.04.002

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #418: Make projects optional

    +
  • +
  • +

    #421: update devon4j to 2020.04.002

    +
  • +
  • +

    #413: Update Eclipse to 2020-03

    +
  • +
  • +

    #424: Strange errors on windows if devon.properties contains mixed line endings

    +
  • +
  • +

    #399: launching of Intellij fails with No such file or directory error.

    +
  • +
  • +

    #410: fix jsonmerge for boolean and null values

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.002.

+
+
+
+

devon4j

+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+

2020.12.001

+
+

New release of devon4j with pluggable web security (CSRF starter) and CompletableFuture support for async REST service client as well as other improvements:

+
+
+
    +
  • +

    #283: Support for CompletableFuture in async service client

    +
  • +
  • +

    #307: Fix CSRF protection support

    +
  • +
  • +

    #287: spring-boot update to 2.3.3

    +
  • +
  • +

    #288: Update jackson to 2.11.2

    +
  • +
  • +

    #293: Update owasp-dependency-check plugin version to 5.3.2

    +
  • +
  • +

    #302: added guide for project/app structure

    +
  • +
  • +

    #315: devon4j documentation correction

    +
  • +
  • +

    #306: improve documentation to launch app

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.12.001. +The full list of changes for this release can be found in milestone devon4j 2020.12.001.

+
+
+
+

2020.08.001

+
+

New release of devon4j with async REST service client support and other improvements:

+
+
+
    +
  • +

    #279: support for async service clients

    +
  • +
  • +

    #277: Update Security-Guide to recent OWASP Top (2017)

    +
  • +
  • +

    #281: cleanup documentation

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.08.001. +The full list of changes for this release can be found in milestone devon4j 2020.08.001.

+
+
+
+

2020.04.002

+
+

Minor update of devon4j with the following bugfixes and small improvements:

+
+
+
    +
  • +

    #261: JUnit4 backward compatibility

    +
  • +
  • +

    #267: Fix JWT permission expansion

    +
  • +
  • +

    #254: JWT Authentication support for devon4j-kafka

    +
  • +
  • +

    #258: archetype is still lacking a .gitignore

    +
  • +
  • +

    #273: Update libs

    +
  • +
  • +

    #271: Do not enable resource filtering by default

    +
  • +
  • +

    #255: Kafka: Support different retry configuration for different topics

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.04.002. +The full list of changes for this release can be found in milestone devon4j 2020.04.002.

+
+
+
+

devon4node

+
+

New devon4node version is published, the changes are:

+
+
+

On this release we have deprecated devon4node cli, now we use nest cli, and we have added a GraphQL sample.

+
+
+
    +
  • +

    #375: GraphQL Sample.

    +
  • +
  • +

    #257: D4N cli remove

    +
  • +
+
+
+
+

CobiGen

+
+

Various bugfixes were made as well as consolidating behavior of eclipse vs maven vs cli by properly sharing more code across the different clients. +Also properly takes into account a files line delimiter instead of defaulting to those of the host system.

+
+ +
+
+

Templates

+
+
    +
  • +

    Removed environment.ts from the crud_angular_client_app/CRUD devon4ng Angular App increment since Cobigen did not make any changes in it

    +
  • +
  • +

    Removed cross referencing between template increments since there is currently no useful use case for it and it leads to a few problems

    +
  • +
  • +

    v2020.12.001

    +
  • +
+
+
+
+

Java Plug-in

+
+
    +
  • +

    Now properly merges using the input files line delimiters instead of defaulting to those of the host system.

    +
  • +
  • +

    v7.1.0

    +
  • +
+
+
+
+

TypeScript Plug-in

+
+
    +
  • +

    Fixed NPE Added the option to read a path from an object input

    +
  • +
  • +

    v7.1.0

    +
  • +
+
+
+
+

Property Plug-in

+
+
    +
  • +

    Now properly merges using the input files line delimiters instead of defaulting to those of the host system.

    +
  • +
  • +

    v7.1.0

    +
  • +
+
+
+
+

OpenAPI Plug-in

+
+
    +
  • +

    Fixed an issue where nullable enums lead to errors

    +
  • +
  • +

    7.1.0

    +
  • +
+
+
+
+

Textmerger

+
+
    +
  • +

    Now properly merges using the input files line delimiters instead of defaulting to those of the host system.

    +
  • +
  • +

    v7.1.0

    +
  • +
  • +

    v7.1.1

    +
  • +
+
+
+
+

Sonar devon4j plugin

+
+

With this release, we made the package structure configurable and did some other improvements and fixes:

+
+
+
    +
  • +

    #117: Rule from checkstyle plugin could not be instantiated in our quality profile

    +
  • +
  • +

    #118: NPE during project analysis

    +
  • +
  • +

    #97: Custom configuration for architecture

    +
  • +
  • +

    #92: Display warnings on the 'devonfw' config page in the 'Administration' section of SonarQube

    +
  • +
  • +

    #95: Add 3rd Party rule to avoid Immutable annotation from wrong package

    +
  • +
  • +

    #94: Add 3rd Party rule to avoid legacy date types

    +
  • +
  • +

    #93: Improve devonfw Java quality profile

    +
  • +
  • +

    #114: Deleted unused architecture config from SonarQube settings to avoid confusion

    +
  • +
+
+
+

Changes for this release can be found in milestone 2020.12.001 and + milestone 2020.12.002

+
+
+
+

devon4net

+
+

The consolidated list of features for devon4net is as follows:

+
+
+
    +
  • +

    LiteDb: - Support for LiteDB - Provided basic repository for CRUD operations.

    +
  • +
  • +

    RabbitMq: - Use of EasyQNet library to perform CQRS main functions between different microservices - Send commands / Subscribe queues with one C# sentence - Events management: Handled received commands to subscribed messages - Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    MediatR: - Use of MediatR library to perform CQRS main functions in memory - Send commands / Subscribe queues with one C# sentence - Events management: Handled received commands to subscribed messages - Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    SmaxHcm: - Component to manage Microfocus SMAX for cloud infrastructure services management

    +
  • +
  • +

    CyberArk: - Manage safe credentials with CyberArk

    +
  • +
  • +

    AnsibleTower: - Ansible automates the cloud infrastructure. devon4net integrates with Ansible Tower via API consumption endpoints

    +
  • +
  • +

    gRPC+Protobuf: - Added Client + Server basic templates sample gRPC with Google’s Protobuf protocol using devon4net

    +
  • +
  • +

    Kafka: - Added Apache Kafka support for deliver/consume messages and create/delete topics as well

    +
  • +
  • +

    AWS support

    +
    +
      +
    • +

      AWS Template to create serverless applications with auto generation of an APIGateway using AWS base template

      +
    • +
    • +

      AWS template to create pure Lambda functions and manage SQS Events, SNS Events, Generic Events, CloudWatch, S3 Management, AWS Secrets management as a configuration provider in .NET life cycle

      +
    • +
    • +

      AWS CDK integration component to create/manage AWS infrastructures (Infra As Code): Database, Database cluster, VPC, Secrets, S3 buckets, Roles…

      +
    • +
    +
    +
  • +
  • +

    Minor performance and stability improvements such Entity framework migration integration

    +
  • +
  • +

    Updated to the latest .net Core 3.1 TLS

    +
  • +
+
+
+
+

dashboard (beta version)

+
+

We are adding dashboard beta version as part of this release. Dashboard is a tool that allows you to create and manage devonfw projects.It makes it easy to onboard a new person with devonfw.

+
+
+
    +
  • +

    Dashboard list all ide available on user system or if no ide is availble it will provide option to download latest version of ide.

    +
  • +
  • +

    Project creation and management: Project page list all projects created by user using dahboard. User will be able to create devon4j, devon4ng and devon4node projects using dashboard.

    +
  • +
  • +

    Support for Eclipse and VSCode IDE

    +
  • +
  • +

    Integrated devonfw-ide usage guide from the website

    +
  • +
+
+
+
+

Solicitor

+
+

Solicitor is a tool which helps managing Open Source Software used within projects. Below is consolidated feature list of solicitor:

+
+
+
    +
  • +

    Standalone Command Line Java Tool

    +
  • +
  • +

    Importers for component/license information from

    +
  • +
  • +

    Maven

    +
  • +
  • +

    Gradle

    +
  • +
  • +

    NPM

    +
  • +
  • +

    CSV (e.g. for manual entry of data)

    +
  • +
  • +

    Rules processing (using Drools Rule Engine) controls the the different phases:

    +
  • +
  • +

    Normalizing / Enhancing of license information

    +
  • +
  • +

    Handling of multilicensing (including selection of applicable licenses) and re-licensing

    +
  • +
  • +

    Legal evaluation

    +
  • +
  • +

    Rules to be defined as Decision Tables

    +
  • +
  • +

    Sample Decision Tables included

    +
  • +
  • +

    Automatic download and file based caching of license texts

    +
  • +
  • +

    Allows manual editing / reformatting of license text

    +
  • +
  • +

    Output processing

    +
  • +
  • +

    Template based text (Velocity) and XLS generation

    +
  • +
  • +

    SQL based pre-processor (e.g. for filtering, aggregation)

    +
  • +
  • +

    Audit log which documents all applied rules for every item might be included in report

    +
  • +
  • +

    "Diff Mode" allows to mark data which has changed as compared to a previous run of Solicitor (in Velocity and XLS reporting)

    +
  • +
  • +

    Customization

    +
  • +
  • +

    Project specific configuration (containing e.g. reporting templates, decision tables) allows to override/amend builtin configuration

    +
  • +
  • +

    Builtin configuration might be overridden/extended by configuration data contained in a single extension file (ZIP format)

    +
  • +
  • +

    This allows to safely provide organization specific rules and reporting templates to all projects of an organization (e.g. to reflect the specific OSS usage policy of the organization)

    +
  • +
+
+
+
+

MrChecker

+
+

MrChecker Test Framework is an end to end test automation framework written in Java. It is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security, native mobile apps and, in the near future, databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Below is consolidated list of updates in MrChecker:

+
+
+ +
+
+
+

Trainings/tutorials

+
+ +
+ +
+
+

devonfw Release notes 2020.08

+ +
+
+

Introduction

+
+

We are proud to announce the release of devonfw version 2020.08.

+
+
+

This release includes lots of addition of new features, updates and bug fixes but it is very important to highlight the following improvements:

+
+
+
+

devonfw IDE

+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+

2020.08.001

+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #454: update to eclipse 2020.06

    +
  • +
  • +

    #442: update nodejs and vscode

    +
  • +
  • +

    #432: vsCode settings are not updated

    +
  • +
  • +

    #446: intellij: doConfigureEclipse: command not found

    +
  • +
  • +

    #440: Software update may lead to inconsistent state due to windows file locks

    +
  • +
  • +

    #427: release: keep leading zeros

    +
  • +
  • +

    #450: update settings

    +
  • +
  • +

    #431: devon build command not working correct for yarn or npm

    +
  • +
  • +

    #449: update to devon4j 2020.08.001

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.08.001.

+
+
+
+

2020.04.004

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #433: Windows: devon command line sets wrong environment variables (with tilde symbol)

    +
  • +
  • +

    #435: fix variable resolution on bash

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.004.

+
+
+
+

2020.04.003

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #395: variable from devon.properites unset if value is in double quotes

    +
  • +
  • +

    #429: Added script to create a meta file in the users directory after setup

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.003.

+
+
+
+

2020.04.002

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #418: Make projects optional

    +
  • +
  • +

    #421: update devon4j to 2020.04.002

    +
  • +
  • +

    #413: Update Eclipse to 2020-03

    +
  • +
  • +

    #424: Strange errors on windows if devon.properties contains mixed line endings

    +
  • +
  • +

    #399: launching of Intellij fails with No such file or directory error.

    +
  • +
  • +

    #410: fix jsonmerge for boolean and null values

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.002.

+
+
+
+

devon4j

+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+

2020.08.001

+
+

New release of devon4j with async REST service client support and other improvements:

+
+
+
    +
  • +

    #279: support for async service clients

    +
  • +
  • +

    #277: Update Security-Guide to recent OWASP Top (2017)

    +
  • +
  • +

    #281: cleanup documentation

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.08.001. +The full list of changes for this release can be found in milestone devon4j 2020.08.001.

+
+
+
+

2020.04.002

+
+

Minor update of devon4j with the following bugfixes and small improvements:

+
+
+
    +
  • +

    #261: JUnit4 backward compatibility

    +
  • +
  • +

    #267: Fix JWT permission expansion

    +
  • +
  • +

    #254: JWT Authentication support for devon4j-kafka

    +
  • +
  • +

    #258: archetype is still lacking a .gitignore

    +
  • +
  • +

    #273: Update libs

    +
  • +
  • +

    #271: Do not enable resource filtering by default

    +
  • +
  • +

    #255: Kafka: Support different retry configuration for different topics

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.04.002. +The full list of changes for this release can be found in milestone devon4j 2020.04.002.

+
+
+
+

devon4ng

+
+

This release is focused mainly on the Angular 10 upgrade:

+
+
+
    +
  • +

    #176: Template submodules updated to Angular 10 and NgRx 10.

    +
  • +
  • +

    #167, #168, #174 and #175: Updated electron (sample and documentation).

    +
  • +
  • +

    #166: Update error handler.

    +
  • +
  • +

    #165: Cypress sample.

    +
  • +
  • +

    #164: Update to Angular 10 (samples and documentation).

    +
  • +
+
+
+
+

devon4node

+
+

New devon4node version is published, the changes are:

+
+
+
    +
  • +

    Updated dependencies.

    +
  • +
  • +

    Solved bug when you introduce a name with dashes in new command.

    +
  • +
  • +

    Add more options to the non-interactive new command.

    +
  • +
+
+
+
+

CobiGen

+
+

CobiGen version numbers have been consolidated to now represent plug-in compatibility in the major release number (7.x.x).

+
+
+
+

CLI

+
+
    +
  • +

    CLI increments can be referenced by name and description.

    +
  • +
  • +

    Ability to configure logging.

    +
  • +
  • +

    Fixed error on code formatting.

    +
  • +
  • +

    Improved Performance by lazy plug-in loading.

    +
  • +
  • +

    Possibility to prefer custom plug-ins over CobiGen ones.

    +
  • +
  • +

    Fixed bug, which broke whole CobiGen execution in case a custom CobiGen Plug-in was throwing an arbitrary exception.

    +
  • +
+
+
+
+

Eclipse

+
+
    +
  • +

    Improved Performance by lazy plug-in loading.

    +
  • +
  • +

    Possibility to prefer custom plug-ins over CobiGen ones.

    +
  • +
  • +

    Fixed bug, which broke whole CobiGen execution in case a custom CobiGen Plug-in was throwing an arbitrary exception.

    +
  • +
+
+
+
+

Maven

+
+
    +
  • +

    Fixed bug to properly load template util classes.

    +
  • +
  • +

    Improved Performance by lazy plug-in loading.

    +
  • +
  • +

    Possibility to prefer custom plug-ins over CobiGen ones.

    +
  • +
  • +

    Fixed bug, which broke whole CobiGen execution in case a custom CobiGen Plug-in was throwing an arbitrary exception.

    +
  • +
+
+
+
+

XML Plug-in

+
+
    +
  • +

    Added ability to provide custom merge schemas as part of the template folder.

    +
  • +
  • +

    Added further merge strategies for merging including XML validation.

    +
  • +
+
+
+
+

Java Plug-in

+
+
    +
  • +

    Fixed NPE for annotated constructors.

    +
  • +
  • +

    Fixed line separator handling to now prefer the file’s one instead of the system ones.

    +
  • +
  • +

    Fixed unwanted new lines in constructors after merging.

    +
  • +
  • +

    Fixed annotation formatting after merge.

    +
  • +
+
+
+
+

TypeScript Plug-in

+
+
    +
  • +

    Fixed issue on automatic update of the ts-merger bundle.

    +
  • +
+
+
+
+

Sonar devon4j plugin

+
+

The consolidated list of features for this Sonar devon4j plugin release is as it follows.

+
+
+

With this release, we added our own quality profile:

+
+
+
    +
  • +

    #16: Install devon4j quality profile

    +
  • +
+
+
+

Changes for this release can be found in milestone 2020.08.001

+
+
+
+

My Thai Star with Microservices and ISTIO Service Mesh Implementation

+
+

As always, our reference application, My Thai Star now has been implemented with Microservices and ISTIO Service Mesh features:

+
+
+
    +
  • +

    devon4j - Java

    +
    +
      +
    • +

      My Thai Star now has a sample version on Microservices architecture.

      +
    • +
    • +

      The github repository for the microservices version of My Thai Star is hosted at My Thai Star with Microservices

      +
    • +
    • +

      My Thai Star Microservices now has a multi stage docker build which generates the respective docker images for all the My Thai Star services.

      +
    • +
    • +

      My Thai Star microservices has the Kubernetes artifacts available to be able to deploy into Kubernetes pods.

      +
    • +
    • +

      My Thai Star microservices has ISTIO the service mesh implementation.

      +
    • +
    • +

      Check out the guides to implement or configure ISTIO features such as Traffic Routing, Network Resiliency features(RequestRouting, RequestTimeouts, Fault Injection, Circuit Breaker), Canary Deployments.

      +
    • +
    +
    +
  • +
+
+ +
+
+

devonfw Release notes 2020.04

+ +
+
+

Introduction

+
+

We are proud to announce the immediate release of devonfw version 2020.04. This version is the first one with the new versioning that will make easier to the community to identify when it was released since we use the year and month as many other software distributions.

+
+
+

This release includes lots of bug fixes and many version updates, but it is very important to highlight the following improvements:

+
+
+
    +
  • +

    New devonfw IDE auto-configure project feature.

    +
  • +
  • +

    Improved devonfw IDE plugin configuration.

    +
  • +
  • +

    New devon4j kafka module.

    +
  • +
  • +

    New devon4j JWT module.

    +
  • +
  • +

    New devon4j authorization of batches feature.

    +
  • +
  • +

    Dozer replaced with Orika in devon4j.

    +
  • +
  • +

    Support for composite keys in devon4j and CobiGen.

    +
  • +
  • +

    Multiple enhancements for project specific plugin development and usage of project specific template sets in CobiGen.

    +
  • +
  • +

    Ability to adapt your own templates by making use of CobiGen CLI.

    +
  • +
  • +

    Better responsiveness in eclipse and bugfixes in all assets in CobiGen.

    +
  • +
  • +

    devon4ng updated to Angular 9, NgRx 9 and Ionic 5, including documentation, samples and templates.

    +
  • +
  • +

    Yarn 2 support in devon4ng.

    +
  • +
  • +

    devon4node updated to NestJS 7 (packages, samples and documentation)

    +
  • +
  • +

    devon4node TSLint replaced with ESLint.

    +
  • +
  • +

    @devon4node/config package added.

    +
  • +
  • +

    devon4net updated to latest .NET Core 3.1.3 LTS version.

    +
  • +
  • +

    Update of the Production Line templates for devonfw projects in devonfw shop floor.

    +
  • +
  • +

    New merge feature included in the devonfw shop floor cicdgen tool.

    +
  • +
  • +

    Updated sonar-devon4j-plugin:

    +
    +
      +
    • +

      Improved coloring and other visual cues to our rule descriptions to highlight good and bad code examples.

      +
    • +
    • +

      Improved the locations of issues thrown on method- and class-level.

      +
    • +
    +
    +
  • +
+
+
+

Please check the detailed list below.

+
+
+

This would have not been possible without the commitment and hard work of the devonfw core team, German, Indian and ADCenter Valencia colleagues and collaborators as, among many others, the Production Line team.

+
+
+
+

devonfw IDE

+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+

2020.04.001

+
+

Starting with this release we have changed the versioning schema in devonfw to yyyy.mm.NNN where yyyy.mm is the date of the planned milestone release and NNN is a running number increased with every bug- or security-fix update.

+
+
+
    +
  • +

    #394 variable from devon.properties not set if not terminated with newline

    +
  • +
  • +

    #399 launching of Intellij fails with No such file or directory error.

    +
  • +
  • +

    #371 Eclipse plugin installation broke

    +
  • +
  • +

    #390 maven get/set-version buggy

    +
  • +
  • +

    #397 migration support for devon4j 2020.04.001

    +
  • +
  • +

    #400 allow custom args for release

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.001.

+
+
+
+

3.3.1

+
+

New release with bugfixes and new ide plugin feature:

+
+
+
    +
  • +

    #343: Setup can’t find Bash nor Git

    +
  • +
  • +

    #369: Fix flattening of POMs

    +
  • +
  • +

    #386: Feature/clone recursive

    +
  • +
  • +

    #379: Use own extensions folder in devonfw-ide

    +
  • +
  • +

    #381: Add ability to configure VS Code plugins via settings

    +
  • +
  • +

    #376: Improve Eclipse plugin configuration

    +
  • +
  • +

    #373: Fix project import on windows

    +
  • +
  • +

    #374: Rework build on import

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 3.3.1.

+
+
+
+

3.3.0

+
+

New release with bugfixes and new project import feature:

+
+
+
    +
  • +

    #343: Detect non-admin GitForWindows and Cygwin

    +
  • +
  • +

    #175: Ability to clone projects and import into Eclipse automatically

    +
  • +
  • +

    #346: devon eclipse add-plugin parameters swapped

    +
  • +
  • +

    #363: devon ide update does not pull latest project settings

    +
  • +
  • +

    #366: update java versions to latest fix releases

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 3.3.0.

+
+
+
+

devon4j

+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+

2020.04.001

+
+

Starting with this release we have changed the versioning schema in devonfw to yyyy.mm.NNN where yyyy.mm is the date of the planned milestone release and NNN is a running number increased with every bug- or security-fix update.

+
+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    #233: Various version updates

    +
  • +
  • +

    #241: Add module to support JWT and parts of OAuth

    +
  • +
  • +

    #147: Switch from dozer to orika

    +
  • +
  • +

    #180: Cleanup archtype

    +
  • +
  • +

    #240: Add unreferenced guides

    +
  • +
  • +

    #202: Architecture documentation needs update for components

    +
  • +
  • +

    #145: Add a microservices article in the documentation

    +
  • +
  • +

    #198: Deploy SNAPSHOTs to OSSRH in travis CI

    +
  • +
  • +

    #90: Authorization of batches

    +
  • +
  • +

    #221: Wrote monitoring guide

    +
  • +
  • +

    #213: Document logging of custom field in json

    +
  • +
  • +

    #138: Remove deprecated RevisionMetadata[Type]

    +
  • +
  • +

    #211: Archetype: security config broken

    +
  • +
  • +

    #109: LoginController not following devon4j to use JAX-RS but uses spring-webmvc instead

    +
  • +
  • +

    #52: Improve configuration

    +
  • +
  • +

    #39: Ability to log custom fields via SLF4J

    +
  • +
  • +

    #204: Slf4j version

    +
  • +
  • +

    #190: Rework of spring-batch integration

    +
  • +
  • +

    #210: Rework documentation for blob support

    +
  • +
  • +

    #191: Rework of devon4j-batch module

    +
  • +
  • +

    #209: Include performance info in separate fields

    +
  • +
  • +

    #207: Use more specific exception for not found entity

    +
  • +
  • +

    #208: Remove unnecesary clone

    +
  • +
  • +

    #116: Bug in JSON Mapping for ZonedDateTime

    +
  • +
  • +

    #184: Fixed BOMs so devon4j and archetype can be used again

    +
  • +
  • +

    #183: Error in executing the project created with devon4j

    +
  • +
  • +

    #177: Switch to new maven-parent

    +
  • +
  • +

    169: Provide a reason, why unchecked exceptions are used in devon4j

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.04.001. +The full list of changes for this release can be found in milestone devon4j 2020.04.001.

+
+
+
+

devon4ng

+
+

The consolidated list of features for this devon4ng release is as it follows.

+
+
+
+

2020.04.001

+
+

Starting with this release we have changed the versioning schema in devonfw to yyyy.mm.NNN where yyyy.mm is the date of the planned milestone release and NNN is a running number increased with every bug- or security-fix update.

+
+
+
    +
  • +

    #111: Yarn 2 support included

    +
  • +
  • +

    #96: devon4ng upgrade to Angular 9

    +
    +
      +
    • +

      Templates and samples updated to Angular 9, NgRx 9 and Ionic 5.

      +
    • +
    • +

      New internationalization module.

      +
    • +
    • +

      Documentation updates and improvements.

      +
    • +
    +
    +
  • +
  • +

    #95: Added token management info in documentation

    +
  • +
+
+
+
+

devon4net

+
+

The consolidated list of features for this devon4net release is as it follows:

+
+
+
    +
  • +

    Updated to latest .NET Core 3.1.3 LTS version

    +
  • +
  • +

    Dependency Injection Autoregistration for services and repositories

    +
  • +
  • +

    Added multiple role managing claims in JWT

    +
  • +
  • +

    Added custom headers to circuit breaker

    +
  • +
  • +

    Reviewed default log configuration

    +
  • +
  • +

    Added support to order query results from database via lambda expression

    +
  • +
  • +

    Updated template and nuget packages

    +
  • +
+
+
+
+

devon4node

+
+

The consolidated list of features for this devon4node release is as it follows:

+
+
+
    +
  • +

    Upgrade to NestJS 7 (packages, samples and documentation)

    +
  • +
  • +

    TSLint replaced with ESLint

    +
  • +
  • +

    Add lerna to project to manage all the packages

    +
  • +
  • +

    Add @devon4node/config package

    +
  • +
  • +

    Add new schematics: Repository

    +
  • +
  • +

    Improve WinstonLogger

    +
  • +
  • +

    Improve documentation

    +
  • +
  • +

    Update dependencies to latest versions

    +
  • +
+
+
+
+

CobiGen

+
+

New release with updates and bugfixes:

+
+
+
    +
  • +

    devonfw templates:

    +
    +
      +
    • +

      #1063: Upgrade devon4ng Ionic template to latest version

      +
    • +
    • +

      #1065: devon4ng templates for devon4node

      +
    • +
    • +

      #1128: update java templates for composite keys

      +
    • +
    • +

      #1130: Update template for devon4ng application template

      +
    • +
    • +

      #1131: Update template for devon4ng NgRx template

      +
    • +
    • +

      #1149: .NET templates

      +
    • +
    • +

      #1146: Dev ionic template update bug fix

      +
    • +
    +
    +
  • +
  • +

    TypeScript plugin:

    +
    +
      +
    • +

      #1126: OpenApi parse/merge issues (ionic List templates)

      +
    • +
    +
    +
  • +
  • +

    Eclipse plugin:

    +
    +
      +
    • +

      #412: Write UI Test for HealthCheck use

      +
    • +
    • +

      #867: Cobigen processbar

      +
    • +
    • +

      #1069: #953 dot path

      +
    • +
    • +

      #1099: NPE on HealthCheck

      +
    • +
    • +

      #1100: 1099 NPE on health check

      +
    • +
    • +

      #1101: #867 fix import of core and api

      +
    • +
    • +

      #1102: eclipse_plugin doesn’t accept folders as input

      +
    • +
    • +

      #1134: (Eclipse-Plugin) Resolve Template utility classes from core

      +
    • +
    • +

      #1142: #1102 accept all kinds of input

      +
    • +
    +
    +
  • +
  • +

    CobiGen core:

    +
    +
      +
    • +

      #429: Reference external template files

      +
    • +
    • +

      #1143: Abort generation if external trigger does not match

      +
    • +
    • +

      #1125: Generation of templates from external increments does not work

      +
    • +
    • +

      #747: Variable assignment for external increments throws exception

      +
    • +
    • +

      #1133: Bugfix/1125 generation of templates from external increments does not work

      +
    • +
    • +

      #1127: #1119 added new TemplatesUtilsClassesUtil class to core

      +
    • +
    • +

      #953: NPE bug if foldername contains a dot

      +
    • +
    • +

      #1067: Feature/158 lat variables syntax

      +
    • +
    +
    +
  • +
  • +

    CobiGen CLI:

    +
    +
      +
    • +

      #1111: Infinity loop in mmm-code (MavenDependencyCollector.collectWithReactor)

      +
    • +
    • +

      #1113: cobigen-cli does not seem to properly resolve classes from dependencies

      +
    • +
    • +

      #1120: Feature #1108 custom templates folder

      +
    • +
    • +

      #1115: Fixing CLI bugs related to dependencies and custom templates jar

      +
    • +
    • +

      #1108: CobiGen CLI: Allow easy use of user’s templates

      +
    • +
    • +

      #1110: FileSystemNotFoundException blocking cobigen-cli

      +
    • +
    • +

      #1138: #1108 dev cli feature custom templates folder

      +
    • +
    • +

      #1136: (Cobigen-CLI) Resolve Template utility classes from core

      +
    • +
    +
    +
  • +
+
+
+
+

devonfw-shop-floor

+
+
    +
  • +

    Add documentation for deploy jenkins slaves

    +
  • +
  • +

    Improve documentation

    +
  • +
  • +

    Add devon4net Openshift template

    +
  • +
  • +

    Add nginx docker image for devon4ng

    +
  • +
  • +

    Add Openshift provisioning

    +
  • +
  • +

    Production Line:

    +
    +
      +
    • +

      Updated MTS template: add step for dependency check and change the deployment method

      +
    • +
    • +

      Add template utils: initialize instance, openshift configuration, docker configuration and install sonar plugin

      +
    • +
    • +

      Add devon4net template

      +
    • +
    • +

      Add from existing template

      +
    • +
    • +

      Improve documentation

      +
    • +
    • +

      Refactor the documentation in order to follow the devonfw wiki workflow

      +
    • +
    • +

      Update devon4j, devon4ng, devon4net and devon4node in order to be able to choose the deployment method: none, docker or openshift.

      +
    • +
    • +

      Update the tools version in order to use the latest.

      +
    • +
    +
    +
  • +
  • +

    Production Line Shared Lib

    +
    +
      +
    • +

      Add more fuctionality to the existing classes.

      +
    • +
    • +

      Add classes: DependencyCheckConfiguration, DockerConfiguration and OpenshiftConfiguration

      +
    • +
    +
    +
  • +
  • +

    CICDGEN

    +
    +
      +
    • +

      Add devon4net support

      +
    • +
    • +

      Update tools versions in Jenkinsfiles to align with Production Line templates

      +
    • +
    • +

      Add merge strategies: error, keep, override, combine

      +
    • +
    • +

      Add lerna to the project

      +
    • +
    • +

      Minor improvements in the code

      +
    • +
    • +

      Add GitHub actions workflow to validate the new changes

      +
    • +
    • +

      Improve documentation

      +
    • +
    • +

      Breaking changes:

      +
      +
        +
      • +

        Remove the following parameters: plurl, ocurl

        +
      • +
      • +

        Add the following parameters: dockerurl, dockercertid, registryurl, ocname and merge

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+

Sonar devon4j plugin

+
+

The consolidated list of features for this Sonar devon4j plugin release is as it follows.

+
+
+
+

2020.04.001

+
+

This is the first version using our new versioning scheme. Here, the following issues were resolved:

+
+
+
    +
  • +

    #60: Fixed a bug in the naming check for Use-Case implementation classes

    +
  • +
  • +

    #67: Fixed a bug where the whole body of a method or a class was marked as the issue location. Now only the method / class headers will be highlighted.

    +
  • +
  • +

    #68: Made our rule descriptions more accessible by using better readable colors as well as alternative visual cues

    +
  • +
  • +

    #71: Fixed a bug where a NPE could be thrown

    +
  • +
  • +

    #74: Fixed a bug where a method always returned null

    +
  • +
+
+
+

Unrelated to any specific issues, there was some refactoring and cleaning up done with the following two PRs:

+
+
+
    +
  • +

    PR #66: Refactored the prefixes of our rule names from 'Devon' to 'devonfw'

    +
  • +
  • +

    PR #65: Sorted security-related test files into their own package

    +
  • +
+
+
+

Changes for this release can be found in milestone 2020.04.001.

+
+
+
+

My Thai Star

+
+

As always, our reference application, My Thai Star, contains some interesting improvements that come from the new features and bug fixes from the other assets. The list is as it follows:

+
+
+
    +
  • +

    devon4j - Java

    +
    +
      +
    • +

      Implement example batches with modified devon-batch

      +
    • +
    • +

      Upgrade spring boot version to 2.2.6 and devon4j 2020.004.001

      +
    • +
    • +

      Migrate from dozer to orika

      +
    • +
    +
    +
  • +
  • +

    devon4ng - Angular

    +
    +
      +
    • +

      Move configuration to NgRx store

      +
    • +
    +
    +
  • +
  • +

    devonfw shop floor - Jenkins

    +
    +
      +
    • +

      Update tools versions in order to align with Production Line templates

      +
    • +
    • +

      Add dependency check step (using dependency checker and yarn audit)

      +
    • +
    • +

      Send dependency checker reports to SonarQube

      +
    • +
    • +

      Changed deployment pipelines. Now pipelines are able to deploy docker containers using docker directly. No more ssh connections to execute commands in a remote machine are required.

      +
    • +
    • +

      Update documentation in order to reflect all changes

      +
    • +
    +
    +
  • +
  • +

    devon4nde - Node.js

    +
    +
      +
    • +

      Upgrade to NestJS 7

      +
    • +
    • +

      Add custom repositories

      +
    • +
    • +

      Add exceptions and exception filters

      +
    • +
    • +

      Add tests (missing in the previous version)

      +
    • +
    • +

      Split logic into use cases in order to make the test process easier

      +
    • +
    • +

      Minor patches and improvemets

      +
    • +
    • +

      Documentation updated in order to reflect the new implementation

      +
    • +
    +
    +
  • +
+
+ +
+
+

devonfw Release notes 3.2 “Homer”

+ +
+
+

Introduction

+
+

We are proud to announce the immediate release of devonfw version 3.2 (code named “Homer” during development). This version is the first one that contains the new devonfw IDE by default, so there is no need to download a huge ZIP with the whole distribution regardless of the use to which it will be put. The new devonfw IDE CLI will allow any user to setup a customized development environment completely configured with access to all the devonfw features, frameworks and tools. As we access to the official IDEs this is also the first version macOS compatible.

+
+
+

This release consolidates the documentation workflow adding the contents dynamically to the new devonfw website at the same time the PDF is generated. This have been achieved using a new GitHub action that takes the contents and builds the HTML files for the documentation section of the website. The documentation workflow proposed in the following picture is now complete:

+
+
+
+documentation workflow +
+
+
+

This release also includes the first version of devon4node. We consider that node.js should be a first-class citizen inside the devonfw platform and for that reason we have included the latest development technologies for this ecosystem. The devon4node CLI, schematics and other tools will allow our users to create powerful node.js applications with the same philosophy you may find in the other languages and frameworks included. More information at its section below.

+
+
+

The new devon4net 3.2.0 version is also included in this release. Based on the .NET Core 3.0 and containing lots of new features gathered from important and recent projects, it represents a great improvement and an intermediate step to provide support for the incoming .NET Core 3.1 LTS. More information at its section below.

+
+
+

This release includes the final version of the new CobiGen CLI and completely integrated with the new devonfw IDE. Now using commands, you will be able to generate code the same way as you do with Eclipse. This means that you can use CobiGen on other IDEs like Visual Studio Code or IntelliJ. Besides the Update command has been implemented. Now you will be able to update easily all your CobiGen plug-ins and templates inside the CLI.

+
+
+

On the other hand, the refactoring process has been completely developed, improving the mergers and including input readers for any other languages and frameworks, allowing the creation of models to generate code from them. Last, but not least, this new version includes the new templates for devon4net, devon4ng and devon4j generation.

+
+
+

And as always, My Thai Star has been updated to the latest versions of devon4j, devon4node and devon4net including completely State Management with NgRx in its devon4ng implementation upgrade.

+
+
+

This is the last release with the current semantic versioning number and without a fixed release calendar. From now on the new devonfw releases will happen in April, August and December and will be named YYYY.MM.NN, being the first release of the next year the 2020.04.00.

+
+
+
+

Changes and new features

+ +
+
+

devonfw-ide

+
+

We have entirely rewritten our automated solution for your local IDE (integrated desktop environment). The former oasp4j-ide and devonfw distributions with their extra-large gigabyte zip files are not entirely replaced with devonfw-ide. This new solution is provided as a small *.tar.gz file that is publicly available. It works on all platforms and has been tested on Windows, MacOS, and Linux. After extraction you only need to run a setup script. Here you provide a settings git URL for your customer project or simply hit return for testing or small projects. After reading and confirming the terms of use it will download all required tools in the proper versions for your operating system and configure them. Instead of various confusing scripts there is now only one CLI command devon for all use-cases what gives a much better user experience.

+
+
+

To get started go to the home page. There is even a migration-guide if you are currently used to the old approach and want to quickly jump into the new solution.

+
+
+
+

My Thai Star Sample Application

+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 3.2.0.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      devon4j 3.2.0 integrated.

      +
    • +
    • +

      Spring Boot 2.1.9 integrated.

      +
    • +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      2FA toggleable (two factor authentication).

      +
    • +
    • +

      NgRx full integrated (PR #285).

      +
    • +
    +
    +
  • +
  • +

    devon4net

    +
    +
      +
    • +

      devon4net for dotnet core 3.0 updated

      +
    • +
    • +

      Updated the API contract compatible with the other stacks

      +
    • +
    • +

      JWT implementation reviewed to increase security

      +
    • +
    • +

      ASP.NET user database dependencies removed

      +
    • +
    • +

      HTTP2 support

      +
    • +
    • +

      Clearer CRUD pattern implementation

      +
    • +
    +
    +
  • +
  • +

    devon4node

    +
    +
      +
    • +

      TypeScript 3.6.3.

      +
    • +
    • +

      Based on Nest framework.

      +
    • +
    • +

      Configuration Module

      +
    • +
    • +

      Added cors and security headers

      +
    • +
    • +

      Added mailer module and email templates.

      +
    • +
    • +

      Built in winston logger

      +
    • +
    • +

      Custom ClassSerializerInterceptor

      +
    • +
    +
    +
  • +
  • +

    MrChecker

    +
    +
      +
    • +

      Example cases for end-to-end test.

      +
    • +
    • +

      Production line configuration.

      +
    • +
    +
    +
  • +
  • +

    CICD

    +
    +
      +
    • +

      Improved integration with Production Line

      +
    • +
    • +

      New Traefik load balancer and reverse proxy

      +
    • +
    • +

      New deployment from artifact

      +
    • +
    • +

      New CICD pipelines

      +
    • +
    • +

      New deployment pipelines

      +
    • +
    • +

      Automated creation of pipelines in Jenkins

      +
    • +
    +
    +
  • +
+
+
+
+

Documentation updates

+
+

This release addresses the new documentation workflow, being now possible to keep the documentation synced with any change. The new documentation includes the following contents:

+
+
+
    +
  • +

    Getting started

    +
  • +
  • +

    devonfw ide

    +
  • +
  • +

    devon4j documentation

    +
  • +
  • +

    devon4ng documentation

    +
  • +
  • +

    devon4net documentation

    +
  • +
  • +

    devon4node documentation

    +
  • +
  • +

    CobiGen documentation

    +
  • +
  • +

    devonfw-shop-floor documentation

    +
  • +
  • +

    cicdgen documentation

    +
  • +
  • +

    devonfw testing with MrChecker

    +
  • +
  • +

    My Thai Star documentation

    +
  • +
  • +

    Contribution guide

    +
  • +
  • +

    Release notes

    +
  • +
+
+
+
+

devon4j

+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Completed full support from Java8 to Java11

    +
  • +
  • +

    Several security fixes

    +
  • +
  • +

    Upgrade to Spring Boot 2.1.9

    +
  • +
  • +

    Upgrade to Spring 5.1.8

    +
  • +
  • +

    Upgrade to JUnit 5 (requires migration via devonfw-ide)

    +
  • +
  • +

    Improved JPA support for IdRef

    +
  • +
  • +

    Improved auditing metadata support

    +
  • +
  • +

    Many improvements to documentation (added JDK guide, architecture-mapping, JMS, etc.)

    +
  • +
  • +

    For all details see milestone.

    +
  • +
+
+
+
+

devon4ng

+
+

The following changes have been incorporated in devon4ng:

+
+
+
    +
  • +

    Angular CLI 8.3.1,

    +
  • +
  • +

    Angular 8.2.11,

    +
  • +
  • +

    Angular Material 8.2.3,

    +
  • +
  • +

    Ionic 4.11.1,

    +
  • +
  • +

    Capacitor 1.2.1 as Cordova replacement,

    +
  • +
  • +

    NgRx 8.3 support for State Management,

    +
  • +
  • +

    devon4ng Angular application template updated to Angular 8.2.11 with visual improvements and bugfixes https://github.com/devonfw/devon4ng-application-template

    +
  • +
  • +

    devon4ng Ionic application template updated to 4.11.1 and improved https://github.com/devonfw/devon4ng-ionic-application-template

    +
  • +
  • +

    Improved devon4ng Angular application template with state management using Angular 8 and NgRx 8 https://github.com/devonfw/devon4ng-ngrx-template

    +
  • +
  • +

    Documentation and samples updated to latest versions:

    +
    +
      +
    • +

      Web Components with Angular Elements

      +
    • +
    • +

      Initial configuration with App Initializer pattern

      +
    • +
    • +

      Error Handling

      +
    • +
    • +

      PWA with Angular and Ionic

      +
    • +
    • +

      Lazy Loading

      +
    • +
    • +

      Library construction

      +
    • +
    • +

      Layout with Angular Material

      +
    • +
    • +

      Theming with Angular Material

      +
    • +
    +
    +
  • +
+
+
+
+

devon4net

+
+

The following changes have been incorporated in devon4net:

+
+
+
    +
  • +

    Updated to latest .net core 3.0 version

    +
  • +
  • +

    Template

    +
    +
      +
    • +

      Global configuration automated. devon4net can be instantiated on any .net core application template with no effort

      +
    • +
    • +

      Added support for HTTP2

      +
    • +
    • +

      Number of libraries minimized

      +
    • +
    • +

      Architecture layer review. More clear and scalable

      +
    • +
    • +

      Added red button functionality (aka killswitch) to stop attending API request with custom error

      +
    • +
    • +

      Improved API error management

      +
    • +
    • +

      Added support to only accept request from clients with a specific client certificate on Kestrel server. Special thanks to Bart Roozendaal (Capgemini NL)

      +
    • +
    • +

      All components use IOptions pattern to be set up properly

      +
    • +
    • +

      Swagger generation compatible with OpenAPI v3

      +
    • +
    +
    +
  • +
  • +

    Modules

    +
    +
      +
    • +

      The devon4net netstandard libraries have been updated to netstandard 2.1

      +
    • +
    • +

      JWT:

      +
      +
        +
      • +

        Added token encryption (token cannot be decrypted anymore by external parties). Now You can choose the encryption algorithm depending on your needs

        +
      • +
      • +

        Added support for secret key or certificate encryption

        +
      • +
      • +

        Added authorization for swagger portal

        +
      • +
      +
      +
    • +
    • +

      Circuit breaker

      +
      +
        +
      • +

        Added support to bypass certificate validation

        +
      • +
      • +

        Added support to use a certificate for https communications using Microsoft’s httpclient factory

        +
      • +
      +
      +
    • +
    • +

      Unit of Work

      +
      +
        +
      • +

        Repository classes unified and reviewed for increasing performance and reduce the consumed memory

        +
      • +
      • +

        Added support for different database servers: In memory, Cosmos, MySQL + MariaDB, Firebird, PostgreSQL, Oracle, SQLite, Access, MS Local.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+

devon4node

+
+

The following changes have been incorporated in devon4node:

+
+
+
    +
  • +

    TypeScript 3.6.3.

    +
  • +
  • +

    Based on Nest framework.

    +
  • +
  • +

    Complete backend implementation.

    +
  • +
  • +

    New devon4node CLI. It will provide you some commands

    +
    +
      +
    • +

      new: create a new devon4node interactively

      +
    • +
    • +

      generate: generate code based on schematics

      +
    • +
    • +

      db: manage the database

      +
    • +
    +
    +
  • +
  • +

    New devon4node schematics

    +
    +
      +
    • +

      application: create a new devon4node application

      +
    • +
    • +

      config-module: add a configuration module to the project

      +
    • +
    • +

      mailer: install and configure the devon4node mailer module

      +
    • +
    • +

      typeorm: install TypeORM in the project

      +
    • +
    • +

      auth-jwt: add users and auth-jwt modules to the project

      +
    • +
    • +

      swagger: expose an endpoint with the auto-generated swagger

      +
    • +
    • +

      security: add cors and other security headers to the project.

      +
    • +
    • +

      crud: create all CRUD for an entity

      +
    • +
    • +

      entity: create an entity

      +
    • +
    +
    +
  • +
  • +

    New mailer module

    +
  • +
  • +

    New common library

    +
  • +
  • +

    Build in winston logger

    +
  • +
  • +

    Custom ClassSerializerInterceptor

    +
  • +
  • +

    Extendable base entity

    +
  • +
  • +

    New application samples

    +
  • +
+
+
+
+

CobiGen

+
+
    +
  • +

    CobiGen core new features:

    +
    +
      +
    • +

      CobiGen CLI: Update command implemented. Now you will be able to update easily all your CobiGen plug-ins and templates inside the CLI. Please take a look into the documentation for more info.

      +
      +
        +
      • +

        CobiGen CLI is now JDK11 compatible.

        +
      • +
      • +

        CobiGen CLI commandlet for devonfw-ide has been added. You can use it to setup easily your CLI and to run CobiGen related commands.

        +
      • +
      • +

        Added a version provider so that you will be able to know all the CobiGen plug-ins versions.

        +
      • +
      • +

        Added a process bar when the CLI is downloading the CobiGen plug-ins.

        +
      • +
      +
      +
    • +
    • +

      CobiGen refactoring finished: With this refactoring we have been able to decouple CobiGen completely from the target and input language. This facilitates the creation of parsers and mergers for any language. For more information please take a look here.

      +
      +
        +
      • +

        New TypeScript input reader: We are now able to parse any TypeScript class and generate code using the parsed information. We currently use TypeORM entities as a base for generation.

        +
      • +
      +
      +
    • +
    • +

      Improving CobiGen templates:

      +
      +
        +
      • +

        Updated devon4ng-NgRx templates to NgRx 8.

        +
      • +
      • +

        Generation of an Angular client using as input a TypeORM entity. This is possible thanks to the new TypeScript input reader.

        +
      • +
      • +

        .Net templates have been upgraded to .Net Core 3.0

        +
      • +
      +
      +
    • +
    • +

      CobiGen for Eclipse is now JDK11 compatible.

      +
    • +
    • +

      Fixed bugs when adapting templates and other bugs on the CobiGen core.

      +
    • +
    +
    +
  • +
+
+
+
+

devonfw shop floor

+
+ +
+
+
+

== cicdgen

+
+
    +
  • +

    Patched minor bugs

    +
  • +
+
+
+
+

sonar-devon4j-plugin

+
+

sonar-devon4j-plugin is a SonarQube plugin for architecture governance of devon4j applications. It verifies the architecture and conventions of devon4j, the Java stack of devonfw. The following changes have been incorporated: +* Plugin was renamed from sonar-devon-plugin to sonar-devon4j-plugin +* Rules/checks have been added to verify naming conventions +* New rule for proper JPA datatype mapping +* Proper tagging of rules as architecture-violation and not as bug, etc. +* Several improvements have been made to prepare the plugin to enter the SonarQube marketplace, what will happen with the very next release. +* Details can be found here: https://github.com/devonfw/sonar-devon4j-plugin/milestone/2?closed=1

+
+ +
+
+

devonfw Release notes 3.1 “Goku”

+ +
+
+

Introduction

+
+

We are proud to announce the immediate release of devonfw version 3.1 (code named “Goku” during development). This version is the first one that implements our new documentation workflow, that will allow users to get the updated documentation at any moment and not to wait for the next devonfw release.

+
+
+

This is now possible as we have established a new workflow and rules during development of our assets. The idea behind this is that all the repositories contain a documentation folder and, in any pull request, the developer must include the related documentation change. A new Travis CI configuration added to all these repositories will automatically take the changes and publish them in the wiki section of every repository and in the new devonfw-guide repository that consolidates all the changes from all the repositories. Another pipeline will take changes from this consolidated repository and generate dynamically the devonfw guide in PDF and in the next weeks in HTML for the new planned devonfw website. The following schema explains this process:

+
+
+
+documentation workflow +
+
+
+

This release includes the very first version of the new CobiGen CLI. Now using commands, you will be able to generate code the same way as you do with Eclipse. This means that you can use CobiGen on other IDEs like Visual Studio Code or IntelliJ. Please take a look at https://github.com/devonfw/cobigen/wiki/howto_Cobigen-CLI-generation for more info.

+
+
+

The devonfw-shop-floor project has got a lot of updates in order to make even easier the creation of devonfw projects with CICD pipelines that run on the Production Line, deploy on Red Hat OpenShift Clusters and in general Docker environments. See the details below.

+
+
+

This release includes the very first version of our devonfw-ide tool that will allow users to automate devonfw setup and update the development environment. This tool will become the default devonfw setup tool in future releases. For more information please visit the repository https://github.com/devonfw/devon-ide.

+
+
+

Following the same collaboration model we used in order to improve the integration of devonfw with Red Hat OpenShift and which allowed us to get the Red Hat Open Shift Primed certification, we have been working alongside with SAP HANA developers in order to support this database in the devon4j. This model was based on the contribution and review of pull requests in our reference application My Thai Star. In this case, SAP developers collaborated with us in the following two new use cases:

+
+
+
    +
  • +

    Prediction of future demand

    +
  • +
  • +

    Geospatial analysis and clustering of customers

    +
  • +
+
+ +
+

Last but not least the devonfw extension pack for VS Code has been improved with the latest extensions and helpers for this IDE. Among many others you can now use:

+
+ +
+

Also it is worth the try of the updated support for Java and Spring Boot development in VS Code. Check it out for yourself!

+
+
+

More information at https://marketplace.visualstudio.com/items?itemName=devonfw.devonfw-extension-pack. Also, you can contribute to this extension in this GitHub repository https://github.com/devonfw/devonfw-extension-pack-vscode.

+
+
+
+

Changes and new features

+ +
+
+

Devonfw dist

+
+
    +
  • +

    Eclipse 2018.12 integrated

    +
    +
      +
    • +

      CheckStyle Plugin updated.

      +
    • +
    • +

      SonarLint Plugin updated.

      +
    • +
    • +

      Git Plugin updated.

      +
    • +
    • +

      FindBugs Plugin updated.

      +
    • +
    • +

      CobiGen plugin updated.

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Visual Studio Code latest version included and pre-configured with the devonfw Platform Extension Pack.

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      @devonfw/cicdgen included.

      +
    • +
    • +

      Yarn package manager updated.

      +
    • +
    • +

      Python3 updated.

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation updated.

      +
    • +
    • +

      devon4ng-application-template for Angular 8 at workspaces/examples

      +
    • +
    • +

      devon4ng-ionic-application-template for Ionic 4 at workspace/samples

      +
    • +
    +
    +
  • +
+
+
+
+

My Thai Star Sample Application

+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 3.1.0.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      devon4j 3.1.0 integrated.

      +
    • +
    • +

      Spring Boot 2.1.6 integrated.

      +
    • +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      2FA toggleable (two factor authentication).

      +
    • +
    • +

      NgRx integration in process (PR #234).

      +
    • +
    +
    +
  • +
  • +

    devon4node

    +
    +
      +
    • +

      TypeScript 3.1.3.

      +
    • +
    • +

      Based on Nest framework.

      +
    • +
    • +

      Aligned with devon4j.

      +
    • +
    • +

      Complete backend implementation.

      +
    • +
    • +

      TypeORM integrated with SQLite database configuration.

      +
    • +
    • +

      Webpack bundler.

      +
    • +
    • +

      Nodemon runner.

      +
    • +
    • +

      Jest unit tests.

      +
    • +
    +
    +
  • +
  • +

    Mr.Checker

    +
    +
      +
    • +

      Example cases for end-to-end test.

      +
    • +
    • +

      Production line configuration.

      +
    • +
    • +

      CICD

      +
    • +
    • +

      Improved integration with Production Line

      +
    • +
    • +

      New Traefik load balancer and reverse proxy

      +
    • +
    • +

      New deployment from artifact

      +
    • +
    • +

      New CICD pipelines

      +
    • +
    • +

      New deployment pipelines

      +
    • +
    • +

      Automated creation of pipelines in Jenkins

      +
    • +
    +
    +
  • +
+
+
+
+

Documentation updates

+
+

This release addresses the new documentation workflow, being now possible to keep the documentation synced with any change. The new documentation includes the following contents:

+
+
+
    +
  • +

    Getting started

    +
  • +
  • +

    Contribution guide

    +
  • +
  • +

    Devcon

    +
  • +
  • +

    Release notes

    +
  • +
  • +

    devon4j documentation

    +
  • +
  • +

    devon4ng documentation

    +
  • +
  • +

    devon4net documentation

    +
  • +
  • +

    devonfw-shop-floor documentation

    +
  • +
  • +

    cicdgen documentation

    +
  • +
  • +

    devonfw testing with MrChecker

    +
  • +
  • +

    My Thai Star documentation

    +
  • +
+
+
+
+

devon4j

+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Added Support for Java8 up to Java11

    +
  • +
  • +

    Upgrade to Spring Boot 2.1.6.

    +
  • +
  • +

    Upgrade to Spring 5.1.8

    +
  • +
  • +

    Upgrade to JPA 2.2

    +
  • +
  • +

    Upgrade to Hibernate 5.3

    +
  • +
  • +

    Upgrade to Dozer 6.4.1 (ATTENTION: Requires Migration, use devon-ide for automatic upgrade)

    +
  • +
  • +

    Many improvements to documentation (added JDK guide, architecture-mapping, JMS, etc.)

    +
  • +
  • +

    Completed support (JSON, Beanmapping) for pagination, IdRef, and java.time

    +
  • +
  • +

    Added MasterCto

    +
  • +
  • +

    For all details see milestone.

    +
  • +
+
+
+
+

devon4ng

+
+

The following changes have been incorporated in devon4ng:

+
+
+
    +
  • +

    Angular CLI 8,

    +
  • +
  • +

    Angular 8,

    +
  • +
  • +

    Angular Material 8,

    +
  • +
  • +

    Ionic 4,

    +
  • +
  • +

    Capacitor 1.0 as Cordova replacement,

    +
  • +
  • +

    NgRx 8 support for State Management,

    +
  • +
  • +

    devon4ng Angular application template updated to Angular 8 with visual improvements and bugfixes https://github.com/devonfw/devon4ng-application-template

    +
  • +
  • +

    devon4ng Ionic application template updated and improved https://github.com/devonfw/devon4ng-ionic-application-template

    +
  • +
  • +

    New devon4ng Angular application template with state management using Angular 8 and NgRx 8 https://github.com/devonfw/devon4ng-ngrx-template

    +
  • +
  • +

    New devon4ng library https://github.com/devonfw/devon4ng-library that includes the following libraries:

    +
    +
      +
    • +

      Cache Module for Angular 7+ projects.

      +
    • +
    • +

      Authorization Module for Angular 7+ projects.

      +
    • +
    +
    +
  • +
  • +

    New use cases with documentation and samples:

    +
    +
      +
    • +

      Web Components with Angular Elements

      +
    • +
    • +

      Initial configuration with App Initializer pattern

      +
    • +
    • +

      Error Handling

      +
    • +
    • +

      PWA with Angular and Ionic

      +
    • +
    • +

      Lazy Loading

      +
    • +
    • +

      Library construction

      +
    • +
    • +

      Layout with Angular Material

      +
    • +
    • +

      Theming with Angular Material

      +
    • +
    +
    +
  • +
+
+
+
+

devon4net

+
+

The following changes have been incorporated in devon4net:

+
+
+
    +
  • +

    New circuit breaker component to communicate microservices via HTTP

    +
  • +
  • +

    Resolved the update packages issue

    +
  • +
+
+
+
+

AppSec Quick Solution Guide

+
+

This release incorporates a new Solution Guide for Application Security based on the state of the art in OWASP based application security. The purpose of this guide is to offer quick solutions for common application security issues for all applications based on devonfw. It’s often the case that we need our systems to comply to certain sets of security requirements and standards. Each of these requirements needs to be understood, addressed and converted to code or project activity. We want this guide to prevent the wheel from being reinvented over and over again and to give clear hints and solutions to common security problems.

+
+
+ +
+
+
+

CobiGen

+
+
    +
  • +

    CobiGen core new features:

    +
    +
      +
    • +

      CobiGen CLI: New command line interface for CobiGen. Using commands, you will be able to generate code the same way as you do with Eclipse. This means that you can use CobiGen on other IDEs like Visual Studio Code or IntelliJ. Please take a look into the documentation for more info.

      +
      +
        +
      • +

        Performance improves greatly in the CLI thanks to the lack of GUI.

        +
      • +
      • +

        You will be able to use path globs for selecting multiple input files.

        +
      • +
      • +

        We have implemented a search functionality so that you can easily search for increments or templates.

        +
      • +
      +
      +
    • +
    • +

      First steps taken on CobiGen refactoring: With the new refactoring we will be able to decouple CobiGen completely from the target and input language. This will facilitate the creation of parsers and mergers for any language.

      +
      +
        +
      • +

        NashornJS has been deprecated: It was used for executing JavaScript code inside JVM. With the refactoring, performance has improved on the TypeScript merger.

        +
      • +
      +
      +
    • +
    • +

      Improving CobiGen templates:

      +
      +
        +
      • +

        Removed Covalent from Angular templates as it is not compatible with Angular 8.

        +
      • +
      • +

        Added devon4ng-NgRx templates that implement reactive state management. Note: The TypeScript merger is currently being improved in order to accept NgRx. The current templates are set as overridable by default.

        +
      • +
      • +

        Test data builder templates now make use of Lambdas and Consumers.

        +
      • +
      • +

        CTOs and ETOs increments have been correctly separated.

        +
      • +
      +
      +
    • +
    • +

      TypeScript merger has been improved: Now it is possible to merge comments (like tsdoc) and enums.

      +
    • +
    • +

      OpenAPI parsing extended to read enums. Also fixed some bugs when no properties were set or when URLs were too short.

      +
    • +
    • +

      Java static and object initializers now get merged.

      +
    • +
    • +

      Fixed bugs when downloading and adapting templates.

      +
    • +
    +
    +
  • +
+
+
+
+

Devcon

+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Updated to match current devon4j

    +
  • +
  • +

    Update to download Linux distribution.

    +
  • +
  • +

    Custom modules creation improvements.

    +
  • +
  • +

    Code Migration feature added.

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+

Devonfw OSS Modules

+
+

Modules upgraded to be used in new devon4j projects:

+
+
+ +
+
+
+

devonfw shop floor

+
+
    +
  • +

    Industrialization oriented to configure the provisioning environment provided by Production Line and deploy applications on an OpenShift cluster.

    +
  • +
  • +

    Added Jenkinsfiles to configure automatically OpenShift environments to deploy devonfw applications.

    +
  • +
  • +

    Industrialization to start new projects and configure them with CICD.

    +
  • +
  • +

    Upgrade the documentation with getting started guide to configure CICD in any devonfw project and deploy it.

    +
  • +
  • +

    Added new tool cicdgen to generate CICD code/files.

    +
  • +
+
+
+
+

== cicdgen

+
+

cicdgen is a devonfw tool to generate all code/files related to CICD in your project. It’s based on angular schematics and it has its own CLI. +More information here.

+
+
+
    +
  • +

    CICD configuration for devon4j, devon4ng and devon4node projects

    +
  • +
  • +

    Option to deploy devonfw projects with Docker

    +
  • +
  • +

    Option to deploy devonfw projects with OpenShift

    +
  • +
+
+
+
+

Devonfw Testing

+ +
+
+

== Mr.Checker

+
+

The Mr.Checker Test Framework is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Mr.Checker updates and improvements:

+
+
+
    +
  • +

    Examples available under embedded project “MrChecker-App-Under-Test” and in project wiki: https://github.com/devonfw/devonfw-testing/wiki

    +
  • +
  • +

    How to install:

    + +
  • +
  • +

    Release Note:

    +
    +
      +
    • +

      module selenium - 3.8.2.1:

      +
      +
        +
      • +

        possibility to define version of driver in properties.file

        +
      • +
      • +

        automatic driver download if the version is not specified

        +
      • +
      • +

        possibility to run with different browser options

        +
      • +
      • +

        module webAPI – 1.2.1:

        +
      • +
      • +

        possibility to connect to the remote WireMock server

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+ +
+
+

devonfw Release notes 3.0 “Fry”

+ +
+
+

Introduction

+
+

We are proud to announce the immediate release of devonfw version 3.0 (code named “Fry” during development). This version is the consolidation of Open Source, focused on the major namespace change ever in the platform, removing the OASP references and adopting the new devonfw names for each technical stack or framework.

+
+
+

The new stack names are the following:

+
+
+
    +
  • +

    devon4j, former devon4j, is the new name for Java.

    +
  • +
  • +

    devon4ng, former devon4ng, is the new one for Angular.

    +
  • +
  • +

    devon4net, is the new .NET stack.

    +
  • +
  • +

    devon4X, is the new stack for Xamarin development.

    +
  • +
  • +

    devon4node, is the new devonfw incubator for node.js.

    +
  • +
+
+
+

The new devon4j version was created directly from the latest devon4j version (3.0.0). Hence it brings all the features and values that devon4j offered. However, the namespace migration was used to do some housekeeping and remove deprecated code as well as reduce dependencies. Therefore your data-access layer will no longer have to depend on any third party except for devon4j as well as of course the JPA. We also have improved the application template that now comes with a modern JSON logging ready for docker and logstash based environments.

+
+
+

To help you upgrading we introduced a migration feature in devcon. This can automatically migrate your code from devon4j (even older versions starting from 2.4.0) to the latest version of devon4j. There might be some small manual changes left to do but 90% of the migration will be done automatically for you.

+
+
+

Besides, the first version of the devonfw plugin for SonarQube has been released. It extends SonarQube with the ability to validate your code according to the devon4j architecture. More details at https://github.com/devonfw/sonar-devon-plugin.

+
+
+

This is the first release that integrates the new devonfw .NET framework, called devon4net, and Xamarin for mobile native development, devon4X. devon4NET and devon4X are the Capgemini standard frameworks for .NET and Xamarin software development. With the two new family members devonfw provides guidance and acceleration for the major software development platforms in our industry. Their interoperability provides you the assurance your multichannel solution will be consistent across web and mobile channels.

+
+
+

“Fry” release contains lots of improvements in our Mr.Checker E2E Testing Framework, including a complete E2E sample inside our reference application My Thai Star. Besides Mr.Checker, we include as an incubator Testar, a test tool (and framework) to test applications at the GUI level whose objective is to solve part of the maintenance problem affecting tests by automatically generating test cases based on a structure that is automatically derived from the GUI. Testar is not included to replace Mr.Checker but rather to provide development teams with a series of interesting options which go beyond what Mr.Checker already provides.

+
+
+

Apart from Mr.Checker, engagements can now use Testar as an extra option for testing. This is a tool that enables the automated system testing of desktop, web and mobile applications at the GUI level. Testar has been added as an incubator to the platform awaiting further development during 2019.

+
+
+

The new incubator for node.js, called devon4node, has been included and implemented in several internal projects. This incubator is based on the Nest framework https://www.nestjs.com/. Nest is a framework for building efficient, scalable Node.js server-side applications. It uses progressive JavaScript, is built with TypeScript (preserves compatibility with pure JavaScript) and combines elements of OOP (Object Oriented Programming), FP (Functional Programming), and FRP (Functional Reactive Programming). Under the hood, Nest makes use of Express, but also provides compatibility with a wide range of other libraries (e.g. Fastify). This allows for easy use of the myriad third-party plugins which are available.

+
+
+

In order to facilitate the utilization of Microsoft Visual Studio Code in devonfw, we have developed and included the new devonfw Platform Extension Pack with lots of features to develop and test applications with this IDE in languages and frameworks such as TypeScript, JavaScript, .NET, Java, Rust, C++ and many more. More information at https://marketplace.visualstudio.com/items?itemName=devonfw.devonfw-extension-pack. Also, you can contribute to this extension in this GitHub repository https://github.com/devonfw/devonfw-extension-pack-vscode.

+
+
+

There is a whole range of new features and improvements which can be seen in that light. The My Thai Star sample app has now been upgraded to devon4j and devon4ng, a new devon4node backend implementation has been included that is seamless interchangeable, an E2E MrChecker sample project, CICD and deployment scripts and lots of bugs have been fixed.

+
+
+

Last but not least, the projects wikis and the devonfw Guide has once again been updated accordingly before the big refactor that will be addressed in the following release in 2019.

+
+
+
+

Changes and new features

+ +
+
+

Devonfw dist

+
+
    +
  • +

    Eclipse 2018.9 integrated

    +
    +
      +
    • +

      CheckStyle Plugin updated.

      +
    • +
    • +

      SonarLint Plugin updated.

      +
    • +
    • +

      Git Plugin updated.

      +
    • +
    • +

      FindBugs Plugin updated.

      +
    • +
    • +

      CobiGen plugin updated.

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Visual Studio Code latest version included and pre-configured with the devonfw Platform Extension Pack.

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      Yarn package manager updated.

      +
    • +
    • +

      Python3 updated.

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation updated.

      +
    • +
    • +

      devon4ng-application-template for Angular 7 at workspaces/examples

      +
    • +
    • +

      devon4ng-ionic-application-template for Ionic 3.20 at workspace/samples

      +
    • +
    +
    +
  • +
+
+
+
+

My Thai Star Sample Application

+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 1.12.2.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      devon4j 3.0.0 integrated.

      +
    • +
    • +

      Spring Boot 2.0.4 integrated.

      +
    • +
    • +

      Spring Data integration.

      +
    • +
    • +

      New pagination and search system.

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      Client devon4ng updated to Angular 7.

      +
    • +
    • +

      Angular Material and Covalent UI frameworks updated.

      +
    • +
    • +

      Electron framework integrated.

      +
    • +
    +
    +
  • +
  • +

    devon4node

    +
    +
      +
    • +

      TypeScript 3.1.3.

      +
    • +
    • +

      Based on Nest framework.

      +
    • +
    • +

      Aligned with devon4j.

      +
    • +
    • +

      Complete backend implementation.

      +
    • +
    • +

      TypeORM integrated with SQLite database configuration.

      +
    • +
    • +

      Webpack bundler.

      +
    • +
    • +

      Nodemon runner.

      +
    • +
    • +

      Jest unit tests.

      +
    • +
    +
    +
  • +
  • +

    Mr.Checker

    +
    +
      +
    • +

      Example cases for end-to-end test.

      +
    • +
    • +

      Production line configuration.

      +
    • +
    • +

      CICD

      +
    • +
    • +

      Improved integration with Production Line

      +
    • +
    • +

      New deployment from artifact

      +
    • +
    • +

      New CICD pipelines

      +
    • +
    • +

      New deployment pipelines

      +
    • +
    • +

      Automated creation of pipelines in Jenkins

      +
    • +
    +
    +
  • +
+
+
+
+

Documentation updates

+
+

The following contents in the devonfw guide have been updated:

+
+
+
    +
  • +

    Upgrade of all the new devonfw named assets.

    +
    +
      +
    • +

      devon4j

      +
    • +
    • +

      devon4ng

      +
    • +
    • +

      Mr.Checker

      +
    • +
    +
    +
  • +
  • +

    Electron integration cookbook.

    +
  • +
  • +

    Updated cookbook about Swagger.

    +
  • +
  • +

    Removed deprecated entries.

    +
  • +
+
+
+

Apart from this the documentation has been reviewed and some typos and errors have been fixed.

+
+
+

The current development of the guide has been moved to https://github.com/devonfw-forge/devon-guide/wiki in order to be available as the rest of OSS assets.

+
+
+
+

devon4j

+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Spring Boot 2.0.4 Integrated.

    +
  • +
  • +

    Spring Data layer Integrated.

    +
  • +
  • +

    Decouple mmm.util.*

    +
  • +
  • +

    Removed depreciated restaurant sample.

    +
  • +
  • +

    Updated Pagination support for Spring Data

    +
  • +
  • +

    Add support for hana as dbType.

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+

devon4ng

+
+

The following changes have been incorporated in devon4ng:

+
+
+ +
+
+
+

devon4net

+
+

Some of the highlights of devon4net 1.0 are:

+
+
+
    +
  • +

    External configuration file for each environment.

    +
  • +
  • +

    .NET Core 2.1.X working solution (Latest 2.1.402).

    +
  • +
  • +

    Packages and solution templates published on nuget.org.

    +
  • +
  • +

    Full components customization by config file.

    +
  • +
  • +

    Docker ready (My Thai Star sample fully working on docker).

    +
  • +
  • +

    Port specification by configuration.

    +
  • +
  • +

    Dependency injection by Microsoft .NET Core.

    +
  • +
  • +

    Automapper support.

    +
  • +
  • +

    Entity framework ORM (Unit of work, async methods).

    +
  • +
  • +

    .NET Standard library 2.0 ready.

    +
  • +
  • +

    Multi-platform support: Windows, Linux, Mac.

    +
  • +
  • +

    Samples: My Thai Star back-end, Google API integration, Azure login, AOP with Castle.

    +
  • +
  • +

    Documentation site.

    +
  • +
  • +

    SPA page support.

    +
  • +
+
+
+

And included the following features:

+
+
+
    +
  • +

    Logging:

    +
    +
      +
    • +

      Text File.

      +
    • +
    • +

      Sqlite database support.

      +
    • +
    • +

      Serilog Seq Server support.

      +
    • +
    • +

      Graylog integration ready through TCP/UDP/HTTP protocols.

      +
    • +
    • +

      API Call params interception (simple and compose objects).

      +
    • +
    • +

      API error exception management.

      +
    • +
    +
    +
  • +
  • +

    Swagger:

    +
    +
      +
    • +

      Swagger auto generating client from comments and annotations on controller classes.

      +
    • +
    • +

      Full swagger client customization (Version, Title, Description, Terms, License, Json endpoint definition).

      +
    • +
    +
    +
  • +
  • +

    JWT:

    +
    +
      +
    • +

      Issuer, audience, token expiration customization by external file configuration.

      +
    • +
    • +

      Token generation via certificate.

      +
    • +
    • +

      MVC inherited classes to access JWT user properties.

      +
    • +
    • +

      API method security access based on JWT Claims.

      +
    • +
    +
    +
  • +
  • +

    CORS:

    +
    +
      +
    • +

      Simple CORS definition ready.

      +
    • +
    • +

      Multiple CORS domain origin definition with specific headers and verbs.

      +
    • +
    +
    +
  • +
  • +

    Headers:

    +
    +
      +
    • +

      Automatic header injection with middleware.

      +
    • +
    • +

      Supported header definitions: AccessControlExposeHeader, StrictTransportSecurityHeader, XFrameOptionsHeader, XssProtectionHeader, XContentTypeOptionsHeader, ContentSecurityPolicyHeader, PermittedCrossDomainPoliciesHeader, ReferrerPolicyHeader.

      +
    • +
    +
    +
  • +
  • +

    Reporting server:

    +
    +
      +
    • +

      Partial implementation of reporting server based on My-FyiReporting (now runs on linux container).

      +
    • +
    +
    +
  • +
  • +

    Testing:

    +
    +
      +
    • +

      Integration test template with sqlite support.

      +
    • +
    • +

      Unit test template.

      +
    • +
    • +

      Moq, xunit frameworks integrated.

      +
    • +
    +
    +
  • +
+
+
+
+

devon4X

+
+

Some of the highlights of the new devonfw Xamarin framework are:

+
+
+
    +
  • +

    Based on Excalibur framework by Hans Harts (https://github.com/Xciles/Excalibur).

    +
  • +
  • +

    Updated to latest MVVMCross 6 version.

    +
  • +
  • +

    My Thai Star Excalibur forms sample.

    +
  • +
  • +

    Xamarin Forms template available on nuget.org.

    +
  • +
+
+
+
+

AppSec Quick Solution Guide

+
+

This release incorporates a new Solution Guide for Application Security based on the state of the art in OWASP based application security. The purpose of this guide is to offer quick solutions for common application security issues for all applications based on devonfw. It’s often the case that we need our systems to comply to certain sets of security requirements and standards. Each of these requirements needs to be understood, addressed and converted to code or project activity. We want this guide to prevent the wheel from being reinvented over and over again and to give clear hints and solutions to common security problems.

+
+
+ +
+
+
+

CobiGen

+
+
    +
  • +

    CobiGen core new features:

    +
    +
      +
    • +

      CobiGen_Templates will not need to be imported into the workspace anymore. However, If you want to adapt them, you can still click on a button that automatically imports them for you.

      +
    • +
    • +

      CobiGen_Templates can be updated by one-click whenever the user wants to have the latest version.

      +
    • +
    • +

      Added the possibility to reference external increments on configuration level. This is used for reducing the number of duplicated templates.

      +
    • +
    +
    +
  • +
  • +

    CobiGen_Templates project and docs updated:

    +
    +
      +
    • +

      Spring standard has been followed better than ever.

      +
    • +
    • +

      Interface templates get automatically relocated to the api project. Needed for following the new devon4j standard.

      +
    • +
    +
    +
  • +
  • +

    CobiGen Angular:

    +
    +
      +
    • +

      Angular 7 generation improved based on the updated application template.

      +
    • +
    • +

      Pagination changed to fit Spring standard.

      +
    • +
    +
    +
  • +
  • +

    CobiGen Ionic: Pagination changed to fit Spring standard.

    +
  • +
  • +

    CobiGen OpenAPI plugin released with multiple bug-fixes and other functionalities like:

    +
    +
      +
    • +

      Response and parameter types are parsed properly when they are a reference to an entity.

      +
    • +
    • +

      Parameters defined on the body of a request are being read correctly.

      +
    • +
    +
    +
  • +
+
+
+
+

Devcon

+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Updated to match current devon4j

    +
  • +
  • +

    Update to download Linux distribution.

    +
  • +
  • +

    Custom modules creation improvements.

    +
  • +
  • +

    Code Migration feature added

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+

Devonfw OSS Modules

+
+

Modules upgraded to be used in new devon4j projects:

+
+
+
    +
  • +

    Reporting module

    +
  • +
  • +

    WinAuth AD Module

    +
  • +
  • +

    WinAuth SSO Module

    +
  • +
  • +

    I18n Module

    +
  • +
  • +

    Async Module

    +
  • +
  • +

    Integration Module

    +
  • +
  • +

    Microservice Module

    +
  • +
  • +

    Compose for Redis Module

    +
  • +
+
+ +
+
+

Devonfw Testing

+ +
+
+

== Mr.Checker

+
+

The Mr.Checker Test Framework is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Mr.Checker updates and improvements:

+
+
+ +
+
+
+

== Testar

+
+

We have added Test*, Testar, as an incubator to the available test tools within devonfw. This ground-breaking tool is being developed by the Technical University of Valencia (UPV). In 2019 Capgemini will co-develop Testar with the UPV.

+
+
+

Testar is a tool that enables the automated system testing of desktop, web and mobile applications at the GUI level.

+
+
+

With Testar, you can start testing immediately. It automatically generates and executes test sequences based on a structure that is automatically derived from the UI through the accessibility API. Testar can detect the violation of general-purpose system requirements and you can use plugins to customize your tests.

+
+
+

You do not need test scripts and maintenance of it. The tests are random and are generated and executed automatically.

+
+
+

If you need to do directed tests you can create scripts to test specific requirements of your application.

+
+
+

Testar is included in the devonfw distro or can be downloaded from https://testar.org/download/.

+
+
+

The Github repository can be found at o: https://github.com/TESTARtool/TESTAR.

+
+ +
+
+

devonfw Release notes 2.4 “EVE”

+ +
+
+

Introduction

+
+

We are proud to announce the immediate release of devonfw version 2.4 (code named “EVE” during development). This version is the first one that fully embraces Open Source, including components like the documentation assets and CobiGen. Most of the IP (Intellectual Property or proprietary) part of devonfw are now published under the Apache License version 2.0 (with the documentation under the Creative Commons License (Attribution-NoDerivatives)). This includes the GitHub repositories where all the code and documentation is located. All of these repositories are now open for public viewing as well.

+
+
+

“EVE” contains a slew of new features but in essence it is already driven by what we expect to be the core focus of 2018: strengthening the platform and improving quality.

+
+
+

This release is also fully focused on deepening the platform rather than expanding it. That is to say: we have worked on improving existing features rather than adding new ones and strengthen the qualitative aspects of the software development life cycle, i.e. security, testing, infrastructure (CI, provisioning) etc.

+
+
+

“EVE” already is very much an example of this. This release contains the Allure Test Framework (included as an incubator in version 2.3) update called MrChecker Test Framework. MrChecker is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+

Another incubator being updated is the devonfw Shop Floor which intended to be a compilation of DevOps experiences from the devonfw perspective. A new part of the release is the new Solution Guide for Application Security based on the state of the art in OWASP based application security.

+
+
+

There is a whole range of new features and improvements which can be seen in that light. devon4j 2.6 changes and improves the package structure of the core Java framework. The My Thai Star sample app has now been upgraded to Angular 6, lots of bugs have been fixed and the devonfw Guide has once again been improved.

+
+
+

Last but not least, this release contains the formal publication of the devonfw Methodology or The Accelerated Solution Design - an Industry Standards based solution design and specification (documentation) methodology for Agile (and less-than-agile) projects.

+
+
+
+

Changes and new features

+ +
+
+

devonfw 2.4 is Open Source

+
+

This version is the first release of devonfw that fully embraces Open Source, including components like the documentation assets and CobiGen. This is done in response to intensive market pressure and demands from the MU´s (Public Sector France, Netherlands)

+
+
+

Most of the IP (Intellectual Property or proprietary) part of devonfw are now published under the Apache License version 2.0 (with the documentation under the Creative Commons License (Attribution-NoDerivatives)).

+
+
+

So you can now use the devonfw distribution (the "zip" file), CobiGen, the devonfw modules and all other components without any worry to expose the client unwittingly to Capgemini IP.

+
+
+

Note: there are still some components which are IP and are not published under an OSS license. The class room trainings, the Sencha components and some CobiGen templates. But these are not includes in the distribution nor documentation and are now completely maintained separately.

+
+
+
+

devonfw dist

+
+
    +
  • +

    Eclipse Oxygen integrated

    +
    +
      +
    • +

      CheckStyle Plugin updated.

      +
    • +
    • +

      SonarLint Plugin updated.

      +
    • +
    • +

      Git Plugin updated.

      +
    • +
    • +

      FindBugs Plugin updated.

      +
    • +
    • +

      CobiGen plugin updated.

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Visual Studio Code latest version included and pre-configured with https://devonfw.com/website/pages/docs/cli.adoc.html#vscode.adoc

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      Yarn package manager updated.

      +
    • +
    • +

      Python3 updated.

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation updated.

      +
    • +
    • +

      devon4ng-application-template for Angular 6 at workspaces/examples

      +
    • +
    +
    +
  • +
+
+
+
+

My Thai Star Sample Application

+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 1.6.0.

    +
  • +
  • +

    Travis CI integration with Docker. Now we get a valuable feedback of the current status and when collaborators make pull requests.

    +
  • +
  • +

    Docker compose deployment.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      Flyway upgrade from 3.2.1 to 4.2.0

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      Client devon4ng updated to Angular 6.

      +
    • +
    • +

      Frontend translated into 9 languages.

      +
    • +
    • +

      Improved mobile and tablet views.

      +
    • +
    • +

      Routing fade animations.

      +
    • +
    • +

      Compodoc included to generate dynamically frontend documentation.

      +
    • +
    +
    +
  • +
+
+
+
+

Documentation updates

+
+

The following contents in the devonfw guide have been updated:

+
+
+
    +
  • +

    devonfw OSS modules documentation.

    +
  • +
  • +

    Creating a new devon4j application.

    +
  • +
  • +

    How to update Angular CLI in devonfw.

    +
  • +
  • +

    Include Angular i18n.

    +
  • +
+
+
+

Apart from this the documentation has been reviewed and some typos and errors have been fixed.

+
+
+

The current development of the guide has been moved to https://github.com/devonfw/devonfw-guide/wiki in order to be available as the rest of OSS assets.

+
+
+
+

devon4j

+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Integrate batch with archetype.

    +
  • +
  • +

    Application module structure and dependencies improved.

    +
  • +
  • +

    Issues with Application Template fixed.

    +
  • +
  • +

    Solved issue where Eclipse maven template devon4j-template-server version 2.4.0 produced pom with missing dependency spring-boot-starter-jdbc.

    +
  • +
  • +

    Solved datasource issue with project archetype 2.4.0.

    +
  • +
  • +

    Decouple archetype from sample (restaurant).

    +
  • +
  • +

    Upgrade to Flyway 4.

    +
  • +
  • +

    Fix for issue with Java 1.8 and QueryDSL #599.

    +
  • +
+
+
+
+

devon4ng

+
+

The following changes have been incorporated in devon4ng:

+
+
+ +
+
+
+

AppSec Quick Solution Guide

+
+

This release incorporates a new Solution Guide for Application Security based on the state of the art in OWASP based application security. The purpose of this guide is to offer quick solutions for common application security issues for all applications based on devonfw. It’s often the case that we need our systems to comply to certain sets of security requirements and standards. Each of these requirements needs to be understood, addressed and converted to code or project activity. We want this guide to prevent the wheel from being reinvented over and over again and to give clear hints and solutions to common security problems.

+
+
+ +
+
+
+

CobiGen

+
+
    +
  • +

    CobiGen_Templates project and docs updated.

    +
  • +
  • +

    CobiGen Angular 6 generation improved based on the updated application template

    +
  • +
  • +

    CobiGen Ionic CRUD App generation based on Ionic application template. Although a first version was already implemented, it has been deeply improved:

    +
    +
      +
    • +

      Changed the code structure to comply with Ionic standards.

      +
    • +
    • +

      Added pagination.

      +
    • +
    • +

      Pull-to-refresh, swipe and attributes header implemented.

      +
    • +
    • +

      Code documented and JSDoc enabled (similar to Javadoc)

      +
    • +
    +
    +
  • +
  • +

    CobiGen TSPlugin Interface Merge support.

    +
  • +
  • +

    CobiGen XML plugin comes out with new cool features:

    +
    +
      +
    • +

      Enabled the use of XPath within variable assignment. You can now retrieve almost any data from an XML file and store it on a variable for further processing on the templates. Documented here.

      +
    • +
    • +

      Able to generate multiple output files per XML input file.

      +
    • +
    • +

      Generating code from UML diagrams. XMI files (standard XML for UML) can be now read and processed. This means that you can develop templates and generate code from an XMI like class diagrams.

      +
    • +
    +
    +
  • +
  • +

    CobiGen OpenAPI plugin released with multiple bug-fixes and other functionalities like:

    +
    +
      +
    • +

      Assigning global and local variables is now possible. Therefore you can set any string for further processing on the templates. For instance, changing the root package name of the generated files. Documented here.

      +
    • +
    • +

      Enabled having a class with more than one relationship to another class (more than one property of the same type).

      +
    • +
    +
    +
  • +
  • +

    CobiGen Text merger plugin has been extended and now it is able to merge text blocks. This means, for example, that the generation and merging of adoc documentation is possible. Documented here.

    +
  • +
+
+
+
+

Devcon

+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Now Devcon is OSS, with public repository at https://github.com/devonfw/devcon

    +
  • +
  • +

    Updated to match current devon4j

    +
  • +
  • +

    Update to download Linux distribution.

    +
  • +
  • +

    Custom modules creation improvements.

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+

devonfw OSS Modules

+
+
    +
  • +

    Existing devonfw IP modules have been moved to OSS.

    +
    +
      +
    • +

      They can now be accessed in any devon4j project as optional dependencies from Maven Central.

      +
    • +
    • +

      The repository now has public access https://github.com/devonfw/devon

      +
    • +
    +
    +
  • +
  • +

    Starters available for modules:

    +
    +
      +
    • +

      Reporting module

      +
    • +
    • +

      WinAuth AD Module

      +
    • +
    • +

      WinAuth SSO Module

      +
    • +
    • +

      I18n Module

      +
    • +
    • +

      Async Module

      +
    • +
    • +

      Integration Module

      +
    • +
    • +

      Microservice Module

      +
    • +
    • +

      Compose for Redis Module

      +
    • +
    +
    +
  • +
+
+ +
+
+

devonfw Shop Floor

+
+
    +
  • +

    devonfw Shop Floor 4 Docker

    +
    +
      +
    • +

      Docker-based CICD environment

      +
      +
        +
      • +

        docker-compose.yml (installation file)

        +
      • +
      • +

        dsf4docker.sh (installation script)

        +
      • +
      • +

        Service Integration (documentation in Wiki)

        +
      • +
      +
      +
    • +
    • +

      devonfw projects build and deployment with Docker

      +
      +
        +
      • +

        Dockerfiles (multi-stage building)

        +
        +
          +
        • +

          Build artifact (NodeJS for Angular and Maven for Java)

          +
        • +
        • +

          Deploy built artifact (NGINX for Angular and Tomcat for Java)

          +
        • +
        • +

          NGINX Reverse-Proxy to redirect traffic between both Angular client and Java server containers.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    devonfw Shop Floor 4 OpenShift

    +
    +
      +
    • +

      devonfw projects deployment in OpenShift cluster

      +
      +
        +
      • +

        s2i images

        +
      • +
      • +

        OpenShift templates

        +
      • +
      • +

        Video showcase (OpenShift Origin 3.6)

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

This incubator is intended to be a compilation of DevOps experiences from the devonfw perspective. “How we use our devonfw projects in DevOps environments”. Integration with the Production Line, creation and service integration of a Docker-based CI environment and deploying devonfw applications in an OpenShift Origin cluster using devonfw templates. +See: https://github.com/devonfw/devonfw-shop-floor

+
+
+
+

devonfw Testing

+
+

The MrChecker Test Framework is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+
    +
  • +

    Examples available under embedded project “MrChecker-App-Under-Test” and in project wiki: https://github.com/devonfw/devonfw-testing/wiki

    +
  • +
  • +

    How to install:

    + +
  • +
  • +

    Release Note:

    +
    +
      +
    • +

      module core - 4.12.0.8:

      +
      +
        +
      • +

        fixes on getting Environment values

        +
      • +
      • +

        top notch example how to keep vulnerable data in repo , like passwords

        +
      • +
      +
      +
    • +
    • +

      module selenium - 3.8.1.8:

      +
      +
        +
      • +

        browser driver auto downloader

        +
      • +
      • +

        list of out off the box examples to use in any web page

        +
      • +
      +
      +
    • +
    • +

      module webAPI - ver. 1.0.2 :

      +
      +
        +
      • +

        api service virtualization with REST and SOAP examples

        +
      • +
      • +

        api service virtualization with dynamic arguments

        +
      • +
      • +

        REST working test examples with page object model

        +
      • +
      +
      +
    • +
    • +

      module security - 1.0.1 (security tests against My Thai Start)

      +
    • +
    • +

      module DevOps :

      +
      +
        +
      • +

        dockerfile for Test environment execution

        +
      • +
      • +

        CI + CD as Jenkinsfile code

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+

devonfw methodology: Accelerated Solution Design

+
+

One of the prime challenges in Distributed Agile Delivery is the maintenance of a common understanding and unity of intent among all participants in the process of creating a product. That is: how can you guarantee that different parties in the client, different providers, all in different locations and time zones during a particular period of time actually understand the requirements of the client, the proposed solution space and the state of implementation.

+
+
+

We offer the Accelerated Solution Design as a possible answer to these challenges. The ASD is carefully designed to be a practical guideline that fosters and ensures the collaboration and communication among all team members.

+
+
+

The Accelerated Solution Design is:

+
+
+
    +
  • +

    A practical guideline rather than a “methodology”

    +
  • +
  • +

    Based on industry standards rather than proprietary methods

    +
  • +
  • +

    Consisting of an evolving, “living”, document set rather than a static, fixed document

    +
  • +
  • +

    Encapsulating the business requirements, functional definitions as well as Architecture design

    +
  • +
  • +

    Based on the intersection of Lean, Agile, DDD and User Story Mapping

    +
  • +
+
+
+

And further it is based on the essential belief or paradigm that ASD should be:

+
+
+
    +
  • +

    Focused on the design (definition) of the “externally observable behavior of a system”

    +
  • +
  • +

    Promoting communication and collaboration between team members

    +
  • +
  • +

    Guided by prototypes

    +
  • +
+
+ + +
+
+

devonfw Release notes 2.3 "Dash"

+ +
+
+

Release: improving and strengthening the Platform

+
+

We are proud to announce the immediate release of devonfw version 2.3 (code named “Dash” during development). This release comes with a bit of a delay as we decided to wait for the publication of devon4j 2.5. “Dash” contains a slew of new features but in essence it is already driven by what we expect to be the core focus of 2018: strengthening the platform and improving quality.

+
+
+

After one year and a half of rapid expansion, we expect the next release(s) of the devonfw 2.x series to be fully focused on deepening the platform rather than expanding it. That is to say: we should work on improving existing features rather than adding new ones and strengthen the qualitative aspects of the software development life cycle, i.e. testing, infrastructure (CI, provisioning) etc.

+
+
+

“Dash” already is very much an example of this. This release contains the Allure Test Framework as an incubator. This is an automated testing framework for functional testing of web applications. Another incubator is the devonfw Shop Floor which intended to be a compilation of DevOps experiences from the devonfw perspective. And based on this devonfw has been OpenShift Primed (“certified”) by Red Hat.

+
+
+

There is a whole range of new features and improvements which can be seen in that light. devon4j 2.5 changes and improves the package structure of the core Java framework. The My Thai Star sample app has now been fully integrated in the different frameworks and the devonfw Guide has once again been significantly expanded and improved.

+
+
+
+

An industrialized platform for the ADcenter

+
+

Although less visible to the overall devonfw community, an important driving force was (meaning that lots of work has been done in the context of) the creation of the ADcenter concept towards the end of 2017. Based on a radical transformation of on/near/offshore software delivery, the focus of the ADcenters is to deliver agile & accelerated “Rightshore” services with an emphasis on:

+
+
+
    +
  • +

    Delivering Business Value and optimized User Experience

    +
  • +
  • +

    Innovative software development with state of the art technology

    +
  • +
  • +

    Highly automated devops; resulting in lower costs & shorter time-to-market

    +
  • +
+
+
+

The first two ADcenters, in Valencia (Spain) and Bangalore (India), are already servicing clients all over Europe - Germany, France, Switzerland and the Netherlands - while ADcenter aligned production teams are currently working for Capgemini UK as well (through Spain).Through the ADcenter, Capgemini establishes industrialized innovation; designed for & with the user. The availability of platforms for industrialized software delivery like devonfw and the Production Line has allowed us to train and make available over a 150 people in very short time.

+
+
+

The creation of the ADcenter is such a short time is visible proof that we´re getting closer to a situation where devonfw and Production Line are turning into the default development platform for APPS2, thereby standardizing all aspects of the software development life cycle: from training and design, architecture, devops and development, all the way up to QA and deployment.

+
+
+
+

Changes and new features

+ +
+
+

devonfw dist

+
+

The devonfw dist, or distribution, i.e. the central zip file which contains the main working environment for the devonfw developer, has been significantly enhanced. New features include:

+
+
+
    +
  • +

    Eclipse Oxygen integrated

    +
    +
      +
    • +

      CheckStyle Plugin installed and configured

      +
    • +
    • +

      SonarLint Plugin installed and configured

      +
    • +
    • +

      Git Plugin installed

      +
    • +
    • +

      FindBugs replaced by SpotBugs and configured

      +
    • +
    • +

      Tomcat8 specific Oxygen configuration

      +
    • +
    • +

      CobiGen Plugin installed

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Cmder integrated (when console.bat launched)

      +
    • +
    • +

      Visual Studio Code latest version included and pre-configured with https://github.com/devonfw/extension-pack-vscode

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      Yarn package manager included.

      +
    • +
    • +

      Python3 integrated

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation

      +
    • +
    • +

      devon4ng-application-template for Angular5 at workspaces/examples

      +
    • +
    • +

      Devon4sencha starter templates updated

      +
    • +
    +
    +
  • +
+
+
+
+

devon4j 2.5

+ +
+
+

== Support for JAX-RS & JAX-WS clients

+
+

With the aim to enhance the ease in consuming RESTful and SOAP web services, JAX-RS and JAX-WS clients have been introduced. They enable developers to concisely and efficiently implement portable client-side solutions that leverage existing and well-established client-side HTTP connector implementations. Furthermore, the getting started time for consuming web services has been considerably reduced with the default configuration out-of-the-box which can be tweaked as per individual project requirements.

+
+ +
+
+

== Separate security logs for devon4j log component

+
+

Based on OWASP(Open Web Application Security Project), devon4j aims to give developers more control and flexibility with the logging of security events and tracking of forensic information. Furthermore, it helps classifying the information in log messages and applying masking when necessary. It provides powerful security features while based on set of logging APIs developers are already familiar with over a decade of their experience with Log4J and its successors.

+
+
+
+

== Support for Microservices

+
+

Integration of an devon4j application to a Microservices environment can now be leveraged with this release of devon4j. Introduction of service clients for RESTful and SOAP web services based on Java EE give developers agility and ease to access microservices in the Devon framework. It significantly cuts down the efforts on part of developers around boilerplate code and stresses more focus on the business code improving overall efficiency and quality of deliverables.

+
+
+
+

Cobigen

+
+

A new version of Cobigen has been included. New features include:

+
+
+ +
+
+
+

My Thai Star Sample Application

+
+

From this release on the My Thai Star application has been fully integrated in the different frameworks in the platform. Further more, a more modularized approach has been followed in the current release of My Thai star application to decouple client from implementation details. Which provides better encapsulation of code and dependency management for API and implementation classes. This has been achieved with creation of a new “API” module that contain interfaces for REST services and corresponding Request/Response objects. With existing “Core” module being dependent on “API” module. To read further you can follow the link https://github.com/devonfw/my-thai-star/wiki/java-design#basic-architecture-details

+
+
+

Furthermore: an email and Twitter micro service were integrated in my-thai-star. This is just for demonstration purposes. A full micro service framework is already part of devon4j 2.5.0

+
+
+
+

Documentation refactoring

+
+

The complete devonfw guide is restructured and refactored. Getting started guides are added for easy start with devonfw.Integration of the new Tutorial with the existing devonfw Guide whereby existing chapters of the previous tutorial were converted to Cookbook chapters. Asciidoctor is used for devonfw guide PDF generation. +See: https://github.com/devonfw/devonfw-guide/wiki

+
+
+
+

devon4ng

+
+

The following changes have been incorporated in devon4ng:

+
+
+
    +
  • +

    Angular CLI 1.6.0,

    +
  • +
  • +

    Angular 5.1,

    +
  • +
  • +

    Angular Material 5 and Covalent 1.0.0 RC1,

    +
  • +
  • +

    PWA enabled,

    +
  • +
  • +

    Core and Shared Modules included to follow the recommended Angular projects structure,

    +
  • +
  • +

    Yarn and NPM compliant since both lock files are included in order to get a stable installation.

    +
  • +
+
+
+
+

Admin interface for devon4j apps

+
+

The new version includes an Integration of an admin interface for devon4j apps (Spring Boot). This module is based on CodeCentric´s Spring Boot Admin (https://github.com/codecentric/spring-boot-admin).

+
+
+
+

Devcon

+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Renaming of system Commands.

    +
  • +
  • +

    New menu has been added - “other modules”, if menus are more than 10, other modules will display some menus.

    +
  • +
  • +

    A progress bar has been added for installing the distribution

    +
  • +
+
+
+
+

devonfw Modules

+
+

Existing devonfw modules can now be accessed with the help of starters following namespace devonfw-<module_name>-starter. Starters available for modules:

+
+
+
    +
  • +

    Reporting module

    +
  • +
  • +

    WinAuth AD Module

    +
  • +
  • +

    WinAuth SSO Module

    +
  • +
  • +

    I18n Module

    +
  • +
  • +

    Async Module

    +
  • +
  • +

    Integration Module

    +
  • +
  • +

    Microservice Module

    +
  • +
  • +

    Compose for Redis Module

    +
  • +
+
+ +
+
+

devonfw Shop Floor

+
+

This incubator is intended to be a compilation of DevOps experiences from the devonfw perspective. “How we use our devonfw projects in DevOps environments”. Integration with the Production Line, creation and service integration of a Docker-based CI environment and deploying devonfw applications in an OpenShift Origin cluster using devonfw templates.

+
+ +
+
+

devonfw-testing

+
+

The Allure Test Framework is an automated testing framework for functional testing of web applications and in coming future native mobile apps, web services and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+
    +
  • +

    Examples available under embedded project “Allure-App-Under-Test” and in project wiki: https://github.com/devonfw/devonfw-testing/wiki

    +
  • +
  • +

    How to install: https://github.com/devonfw/devonfw-testing/wiki/How-to-install

    +
  • +
  • +

    Release Notes:

    +
    +
      +
    • +

      Core Module – ver.4.12.0.3:

      +
      +
        +
      • +

        Test report with logs and/or screenshots

        +
      • +
      • +

        Test groups/tags

        +
      • +
      • +

        Data Driven (inside test case, external file)

        +
      • +
      • +

        Test case parallel execution

        +
      • +
      • +

        Run on independent Operating System (Java)

        +
      • +
      • +

        Externalize test environment (DEV, QA, PROD)

        +
      • +
      +
      +
    • +
    • +

      UI Selenium module – ver. 3.4.0.3:

      +
      +
        +
      • +

        Malleable resolution ( Remote Web Design, Mobile browsers)

        +
      • +
      • +

        Support for many browsers( Internet Explorer, Edge, Chrome, Firefox, Safari)

        +
      • +
      • +

        User friendly actions ( elementCheckBox, elementDropdown, etc. )

        +
      • +
      • +

        Ubiquese test execution (locally, against Selenium Grid through Jenkins)

        +
      • +
      • +

        Page Object Model architecture

        +
      • +
      • +

        Selenium WebDriver library ver. 3.4.0

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+ +
+
+

DOT.NET Framework incubators

+
+

The .NET Core and Xamarin frameworks are still under development by a workgroup from The Netherlands, Spain, Poland, Italy, Norway and Germany. The 1.0 release is expected to be coming soon but the current incubator frameworks are already being used in several engagements. Some features to highlight are:

+
+
+
    +
  • +

    Full .NET implementation with multi-platform support

    +
  • +
  • +

    Detailed documentation for developers

    +
  • +
  • +

    Docker ready

    +
  • +
  • +

    Web API server side template :

    +
    +
      +
    • +

      Swagger auto-generation

      +
    • +
    • +

      JWT security

      +
    • +
    • +

      Entity Framework Support

      +
    • +
    • +

      Advanced log features

      +
    • +
    +
    +
  • +
  • +

    Xamarin Templates based on Excalibur framework

    +
  • +
  • +

    My Thai Star implementation:

    +
    +
      +
    • +

      Backend (.NET Core)

      +
    • +
    • +

      FrontEnd (Xamarin)

      +
    • +
    +
    +
  • +
+
+
+
+

devonfw has been Primed by Red Hat for OpenShift

+
+

OpenShift is a supported distribution of Kubernetes from Red Hat for container-based software deployment and management. It is using Docker containers and DevOps tools for accelerated application development. Using OpenShift allows Capgemini to avoid Cloud Vendor lock-in. OpenShift provides devonfw with a state of the art CI/CD environment (devonfw Shop Floor), providing devonfw with a platform for the whole development life cycle: from development to staging / deploy.

+
+ +
+
+

Harvested components and modules

+
+

The devonfw Harvesting process continues to add valuable components and modules to the devonfw platform. The last months the following elements were contributed:

+
+
+
+

== Service Client support (for Micro service Projects).

+
+

This client is for consuming microservices from other application.This solution is already very flexible and customizable.As of now,this is suitable for small and simple project where two or three microservices are invoked. Donated by Jörg Hohwiller. See: https://github.com/devonfw-forge/devonfw-microservices

+
+
+
+

== JHipster devonfw code generation

+
+

This component was donated by the ADcenter in Valencia. It was made in order to comply with strong requirements (especially from the French BU) to use jHipster for code generation.

+
+
+

JHipster is a code generator based on Yeoman generators. Its default generator generator-jhipster generates a specific JHipster structure. The purpose of generator-jhipster-DevonModule is to generate the structure and files of a typical devon4j project. It is therefore equivalent to the standard devon4j application template based CobiGen code generation.

+
+
+
+

== Simple Jenkins task status dashboard

+
+

This component has been donated by, has been harvested from system in use by, Capgemini Valencia. This dashboard, apart from an optional gamification element, allows the display of multiple Jenkins instances.

+
+
+
+

And lots more, among others:

+
+ +
+ +
+
+

devonfw Release notes 2.2 "Courage"

+ +
+
+

Production Line Integration

+
+

devonfw is now fully supported on the Production Line v1.3 and the coming v2.0. Besides that, we now "eat our own dogfood" as the whole devonfw project, all "buildable assets", now run on the Production Line.

+
+
+
+

devon4ng 2.0

+
+

The main focus of the Courage release is the renewed introduction of "devonfw for JavaScript", or devon4ng. This new version is a completely new implementation based on Angular (version 4). This new "stack" comes with:

+
+
+
    +
  • +

    New application templates for Angular 4 application (as well as Ionic 3)

    +
  • +
  • +

    A new reference application

    +
  • +
  • +

    A new tutorial (and Architecture Guide following soon)

    +
  • +
  • +

    Component Gallery

    +
  • +
  • +

    New CobiGen templates for generation of both Angular 4 and Ionic 3 UI components ("screens")

    +
  • +
  • +

    Integration of Covalent and Bootstrap offering a large number of components

    +
  • +
  • +

    my-thai-star, a showcase and reference implementation in Angular of a real, responsive usable app using recommended architecture and patterns

    +
  • +
  • +

    A new Tutorial using my-thai-star as a starting point

    +
  • +
+
+ +
+
+

New Cobigen

+
+

Major changes in this release:

+
+
+
    +
  • +

    Support for multi-module projects

    +
  • +
  • +

    Client UI Generation:

    +
    +
      +
    • +

      New Angular 4 templates based on the latest - angular project seed

      +
    • +
    • +

      Basic Typescript Merger

      +
    • +
    • +

      Basic Angular Template Merger

      +
    • +
    • +

      JSON Merger

      +
    • +
    +
    +
  • +
  • +

    Refactored devon4j templates to make use of Java template logic feature

    +
  • +
  • +

    Bugfixes:

    +
    +
      +
    • +

      Fixed merging of nested Java annotations including array values

      +
    • +
    • +

      more minor issues

      +
    • +
    +
    +
  • +
  • +

    Under the hood:

    +
    +
      +
    • +

      Large refactoring steps towards language agnostic templates formatting sensitive placeholder descriptions automatically formatting camelCase to TrainCase to snake-case, etc.

      +
    • +
    +
    +
  • +
  • +

    Easy setup of CobiGen IDE to enable fluent contribution

    +
  • +
  • +

    CI integration improved to integrate with GitHub for more valuable feedback

    +
  • +
+
+ +
+
+

MyThaiStar: New Restaurant Example, reference implementation & Methodology showcase

+
+

A major part of the new devonfw release is the incorporation of a new application, "my-thai-star" which among others:

+
+
+
    +
  • +

    serve as an example of how to make a "real" devonfw application (i.e. the application could be used for real)

    +
  • +
  • +

    Serves as an attractive showcase

    +
  • +
  • +

    Serves as a reference application of devonfw patterns and practices as well as the standard example in the new devonfw tutorial

    +
  • +
  • +

    highlights modern security option like JWT Integration

    +
  • +
+
+
+

The application is accompanied by a substantial new documentation asset, the devonfw methodology, which described in detail the whole lifecycle of the development of a devonfw application, from requirements gathering to technical design. Officially my-that-star is still considered to be an incubator as especially this last part is still not as mature as it could be. But the example application and tutorial are 100% complete and functional and form a marked improvement over the "old" restaurant example app. My-Thai-star will become the standard example app from devonfw 3.0 onwards.

+
+ +
+
+

The new devonfw Tutorial

+
+

The devonfw Tutorial is a new part of the devonfw documentation which changes the focus of how people can get started with the platform

+
+
+

There are tutorials for devon4j, devon4ng (Angular) and more to come. My-Thai-Star is used throughout the tutorial series to demonstrate the basic principles, architecture, and good practices of the different devonfw "stacks". There is an elaborated exercise where the readers get to write their own application "JumpTheQueue".

+
+
+

We hope that the new tutorial offers a better, more efficient way for people to get started with devonfw. Answering especially the question: how to make a devonfw application.

+
+ +
+
+

devon4j 2.4.0

+
+

"devonfw for Java" or devon4j now includes updated versions of the latest stable versions of Spring Boot and the Spring Framework and all related dependencies. This allows guaranteed, stable, execution of any devonfw 2.X application on the latest versions of the Industry Standard Spring stack. +Another important new feature is a new testing architecture/infrastructure. All database options are updated to the latest versions as well as guaranteed to function on all Application Servers which should cause less friction and configuration time when starting a new devon4j project.

+
+
+

Details:

+
+
+
    +
  • +

    Spring Boot Upgrade to 1.5.3

    +
  • +
  • +

    Updated all underlying dependencies

    +
  • +
  • +

    Spring version is 4.3.8

    +
  • +
  • +

    Exclude Third Party Libraries that are not needed from sample restaurant application

    +
  • +
  • +

    Bugfix:Fixed the 'WhiteLabel' error received when tried to login to the sample restaurant application that is deployed onto external Tomcat

    +
  • +
  • +

    Bugfix:Removed the API api.org.apache.catalina.filters.SetCharacterEncodingFilter and used spring framework’s API org.springframework.web.filter.CharacterEncodingFilter instead

    +
  • +
  • +

    Bugfix:Fixed the error "class file for javax.interceptor.InterceptorBinding not found" received when executing the command 'mvn site' when trying to generate javadoc using Maven javadoc plugin

    +
  • +
  • +

    Documentation of the usage of UserDetailsService of Spring Security

    +
  • +
+
+ + +
+
+

Microservices Netflix

+
+

devonfw now includes a microservices implementation based on Spring Cloud Netflix. It provides a Netflix OSS integrations for Spring Boot apps through auto-configuration and binding to the Spring Environment. It offers microservices archetypes and a complete user guide with all the details to start creating microservices with devonfw.

+
+ +
+
+

devonfw distribution based on Eclipse OOMPH

+
+

The new Eclipse devonfw distribution is now based on Eclipse OOMPH, which allows us, an any engagement, to create and manage the distribution more effectively by formalizing the setup instructions so they can be performed automatically (due to a blocking issue postponed to devonfw 2.2.1 which will be released a few weeks after 2.2.0)

+
+
+
+

Visual Studio Code or Atom

+
+

The devonfw distro now contains Visual Studio Code alongside Eclipse in order to provide a default, state of the art, environment for web based development.

+
+ +
+
+

More I18N options

+
+

The platform now contains more documentation and a conversion utility which makes it easier to share i18n resource files between the different frameworks.

+
+ +
+
+

Spring Integration as devonfw Module

+
+

This release includes a new module based on the Java Message Service (JMS) and Spring Integration which provides a communication system (sender/subscriber) out-of-the-box with simple channels (only to send and read messages), request and reply channels (to send messages and responses) and request & reply asynchronously channels.

+
+ +
+
+

devonfw Harvest contributions

+
+

devonfw contains a whole series of new components obtained through the Harvesting process. Examples are :

+
+
+
    +
  • +

    New backend IP module Compose for Redis: management component for cloud environments. Redis is an open-source, blazingly fast, key/value low maintenance store. Compose’s platform gives you a configuration pre-tuned for high availability and locked down with additional security features. The component will manage the service connection and the main methods to manage the key/values on the storage. The library used is "lettuce".

    +
  • +
  • +

    Sencha component for extending GMapPanel with the following functionality :

    +
    +
      +
    • +

      Markers management

      +
    • +
    • +

      Google Maps options management

      +
    • +
    • +

      Geoposition management

      +
    • +
    • +

      Search address and coordinates management

      +
    • +
    • +

      Map events management

      +
    • +
    • +

      Map life cycle and behavior management

      +
    • +
    +
    +
  • +
  • +

    Sencha responsive Footer that moves from horizontal to vertical layout depending on the screen resolution or the device type. It is a simple functionality but we consider it very useful and reusable.

    +
  • +
+
+ +
+
+

More Deployment options to JEE Application Servers and Docker/CloudFoundry

+
+

The platform now fully supports deployment on the latest version of Weblogic, WebSphere, Wildfly (JBoss) as well as Docker and Cloud Foundry.

+
+ +
+
+

Devcon on Linux

+
+

Devcon is now fully supported on Linux which, together with the devonfw distro running on Linux, makes devonfw fully multi-platform and Cloud compatible (as Linux is the default OS in the Cloud!)

+
+ +
+
+

New devonfw Incubators

+
+

From different Business Units (countries) have contributed "incubator" frameworks:

+
+
+
    +
  • +

    devon4NET (Stack based on .NET Core / .NET "Classic" (4.6))

    +
  • +
  • +

    devon4X (Stack based on Xamarin)

    +
  • +
  • +

    devon4node (Stack based on Node-js/Serverless): https://github.com/devonfw/devon4node

    +
  • +
+
+
+

An "incubator" status means that the frameworks are production ready, all are actually already used in production, but are still not fully compliant with the devonfw definition of a "Minimally Viable Product".

+
+
+

During this summer devon4NET will be properly installed. In the mean time, if you want to have access to the source code, please contact the devonfw Core Team.

+
+ +
+
+

Release notes devonfw 2.1.1 "Balu"

+ +
+
+

Version 2.1.2: devon4j updates and some new features

+
+

We’ve released the latest update release of devonfw in the Balu series: version 2.1.2. The next major release, code named Courage, will be released approximately the end of June. This current release contains the following items:

+
+
+
+

devon4j 2.3.0 Release

+
+

Friday the 12th of May 2017 devon4j version 2.3.0 was released. Major features added are :

+
+
+
    +
  • +

    Database Integration with PostGres, MSSQL Server, MariaDB

    +
  • +
  • +

    Added docs folder for gh pages and added oomph setups

    +
  • +
  • +

    Refactored Code

    +
  • +
  • +

    Refactored Test Infrastructure

    +
  • +
  • +

    Added Documentation on debugging tests

    +
  • +
  • +

    Added Two Batch Job tests in the restaurant sample

    +
  • +
  • +

    Bugfix: Fixed the error received when the Spring Boot Application from sample application that is created from maven archetype is launched

    +
  • +
  • +

    Bugfix: Fix for 404 error received when clicked on the link '1. Table' in index.html of the sample application created from maven archetype

    +
  • +
+
+
+

The devon4j wiki and other documents are updated for release 2.3.0.

+
+
+
+

CobiGen Enhancements

+
+

Previous versions of CobiGen are able to generate code for REST services only. Now it is possible to generate the code for SOAP services as well. There are two use cases available in CobiGen:

+
+
+
    +
  • +

    SOAP without nested data

    +
  • +
  • +

    SOAP nested data

    +
  • +
+
+
+

The "nested data" use case is when there are 3 or more entities which are interrelated with each other. CobiGen will generate code which will return the nested data. Currently CobiGen services return ETO classes, CobiGen has been enhanced as to return CTO classes (ETO + relationship).

+
+
+

Apart from the SOAP code generation, the capability to express nested relationships have been added to the existing ReST code generator as well.

+
+
+
+

Micro services module (Spring Cloud/Netflix OSS)

+
+

To make it easier for devonfw users to design and develop applications based on microservices, this release provides a series of archetypes and resources based on Spring Cloud Netflix to automate the creation and configuration of microservices.

+
+
+

New documentation ind de devonfw Guide contains all the details to start creating microservices with devonfw

+
+
+
+

Spring Integration Module

+
+

Based on the Java Message Service (JMS) and Spring Integration, the devonfw Integration module provides a communication system (sender/subscriber) out-of-the-box with simple channels (only to send and read messages), request and reply channels (to send messages and responses) and request & reply asynchronously channels.

+
+
+
+

Version 2.1.1 Updates, fixes and some new features

+ +
+
+

CobiGen code-generator fixes

+
+

The CobiGen incremental code generator released in the previous version contained a regression which has now been fixed. Generating services in Batch mode whereby a package can be given as an input, using all Entities contained in that package, works again as expected.

+
+
+

For more information see: The CobiGen documentation

+
+
+
+

Devcon enhancements

+
+

In this new release we have added devcon to the devonfw distribution itself so one can directly use devcon from the console.bat or ps-console.bat windows. It is therefore no longer necessary to independently install devcon. However, as devcon is useful outside of the devonfw distribution, this remains a viable option.

+
+
+
+

Devon4Sencha

+
+

in Devon4Sencha there are changes in the sample application. It now complies fully with the architecture which is known as "universal app", so now it has screens custom tailored for desktop and mobile devices. All the basic logic remains the same for both versions. (The StarterTemplate is still only for creating a desktop app. This will be tackled in the next release.)

+
+
+
+

New Winauth modules

+
+

The original winauth module that, in previous Devon versions, implemented the Active Directory authentication and the Single Sign-on authentication now has been divided in two independent modules. The Active Directory authentication now is included in the new Winauth-ad module whereas the Single Sign-on implementation is included in a separate module called Winauth-sso. +Also some improvements have been added to Winauth-sso module to ease the way in which the module can be injected.

+
+
+
+

General updates

+
+

There are a series of updates to the devonfw documentation, principally the devonfw Guide. Further more, from this release on, you can find the devonfw guide in the doc folder of the distribution.

+
+
+

Furthermore, the devon4j and devonfw source-code in the "examples" workspace, have been updated to the latest version.

+
+
+
+

Version 2.1 New features, improvements and updates

+ +
+
+

Introduction

+
+

We are proud to present the new release of devonfw, version "2.1" which we’ve baptized "Balu". A major focus for this release is developer productivity. So that explains the name, as Balu is not just big, friendly and cuddly but also was very happy to let Mowgli do the work for him.

+
+
+
+

Cobigen code-generator UI code generation and more

+
+

The Cobigen incremental code generator which is part of devonfw has been significantly improved. Based on a single data schema it can generate the JPA/Hibernate code for the whole service layer (from data-access code to web services) for all CRUD operations. When generating code, Cobigen is able to detect and leave untouched any code which developers have added manually.

+
+
+

In the new release it supports Spring Data for data access and it is now capable of generating the whole User Interface as well: data-grids and individual rows/records with support for filters, pagination etc. That is to say: Cobigen can now generate automatically all the code from the server-side database access layer all the way up to the UI "screens" in the web browser.

+
+
+

Currently we support Sencha Ext JS with support for Angular 2 coming soon. The code generated by Cobigen can be opened and used by Sencha Architect, the visual design tool, which enables the programmer to extend and enhance the generated UI non-programmatically. When Cobigen regenerates the code, even those additions are left intact. All these features combined allow for an iterative, incremental way of development which can be up to an order of an magnitude more productive than "programming manual"

+
+
+

Cobigen can now also be used for code-generation within the context of an engagement. It is easily extensible and the process of how to extend it for your own project is well documented. This becomes already worthwhile ("delivers ROI") when having 5+ identical elements within the project.

+
+
+

For more information see: The Cobigen documentation

+
+
+
+

Angular 2

+
+

With the official release of Angular 2 and TypeScript 2, we’re slowly but steadily moving to embrace these important new players in the web development scene. We keep supporting the Angular 1 based devon4ng framework and are planning a migration of this framework to Angular 2 in the near future. For "Balu" we’ve have decided to integrate "vanilla" Angular 2.

+
+
+

We have migrated the Restaurant Sample application to serve as a, documented and supported, blueprint for Angular 2 applications. Furthermore, we support three "kickstarter" projects which help engagement getting started with Angular2 - either using Bootstrap or Google´s Material Design - or, alternatively, Ionic 2 (the mobile framework on top of Angular 2).

+
+
+
+

devon4j 2.2.0 Release

+
+

A new release of devon4j, version 2.2.0, is included in this release of devonfw. This release mainly focuses on server side of devonfw. i.e devon4j.

+
+
+

Major features added are :

+
+
+
    +
  • +

    Upgrade to Spring Boot 1.3.8.RELEASE

    +
  • +
  • +

    Upgrade to Apache CXF 3.1.8

    +
  • +
  • +

    Database Integration with Oracle 11g

    +
  • +
  • +

    Added Servlet for HTTP-Debugging

    +
  • +
  • +

    Refactored code and improved JavaDoc

    +
  • +
  • +

    Bugfix: mvn spring-boot:run executes successfully for devon4j application created using devon4j template

    +
  • +
  • +

    Added subsystem tests of SalesmanagementRestService and several other tests

    +
  • +
  • +

    Added Tests to test java packages conformance to devonfw conventions

    +
  • +
+
+
+

More details on features added can be found at https://github.com/devonfw/devon4j/milestone/19?closed=1(here). The devon4j wiki and other documents are updated for release 2.2.0.

+
+
+
+

Devon4Sencha

+
+

Devon4Sencha is an alternative view layer for web applications developed with devonfw. It is based on Sencha Ext JS. As it requires a license for commercial applications it is not provided as Open Source and is considered to be part of the IP of Capgemini.

+
+
+

These libraries provide support for creating SPA (Single Page Applications) with a very rich set of components for both desktop and mobile. In the new version we extend this functionality to support for "Universal Apps", the Sencha specific term for true multi-device applications which make it possible to develop a single application for desktop, tablet as well as mobile devices. In the latest version Devon4Sencha has been upgraded to support Ext JS 6.2 and we now support the usage of Cobigen as well as Sencha Architect as extra option to improve developer productivity.

+
+
+
+

Devcon enhancements

+
+

The Devon Console, Devcon, is a cross-platform command line tool running on the JVM that provides many automated tasks around the full life-cycle of Devon applications, from installing the basic working environment and generating a new project, to running a test server and deploying an application to production. It can be used by the engagements to integrate with their proprietary tool chain.

+
+
+

In this new release we have added an optional graphical user interface (with integrated help) which makes using Devcon even easier to use. Another new feature is that it is now possible to easily extend it with commands just by adding your own or project specific Javascript files. This makes it an attractive option for project task automation.

+
+
+
+

Ready for the Cloud

+
+

devonfw is in active use in the Cloud, with projects running on IBM Bluemix and on Amazon AWS. The focus is very much to keep Cloud-specific functionality decoupled from the devonfw core. The engagement can choose between - and easily configure the use of - either CloudFoundry or Spring Cloud (alternatively, you can run devonfw in Docker containers in the Cloud as well. See elsewhere in the release notes).

+
+
+
+

Spring Data

+
+

The java server stack within devonfw, devon4j, is build on a very solid DDD architecture which uses JPA for its data access layer. We now offer integration of Spring Data as an alternative or to be used in conjunction with JPA. Spring Data offers significant advantages over JPA through its query mechanism which allows the developer to specify complex queries in an easy way. Overall working with Spring Data should be quite more productive compared with JPA for the average or junior developer. And extra advantage is that Spring Data also allows - and comes with support for - the usage of NoSQL databases like MongoDB, Cassandra, DynamoDB etc. THis becomes especially critical in the Cloud where NoSQL databases typically offer better scalability than relational databases.

+
+
+
+

Videos content in the devonfw Guide

+
+

The devonfw Guide is the single, authoritative tutorial and reference ("cookbook") for all things devonfw, targeted at the general developer working with the platform (there is another document for Architects). It is clear and concise but because of the large scope and wide reach of devonfw, it comes with a hefty 370+ pages. For the impatient - and sometimes images do indeed say more than words - we’ve added videos to the Guide which significantly speed up getting started with the diverse aspects of devonfw.

+
+
+

For more information on videos check out our devonfw Youtube channel

+
+
+
+

Containerisation with Docker and the Production Line

+
+

Docker (see: https://www.docker.com/) containers wrap a piece of software in a complete filesystem that contains everything needed to run: code, runtime, system tools, system libraries – anything that can be installed on a server. Docker containers resemble virtual machines but are far more resource efficient. Because of this, Docker and related technologies like Kubernetes are taking the Enterprise and Cloud by storm. We have certified and documented the usage of devonfw on Docker so we can now firmly state that "devonfw is Docker" ready. All the more so as the iCSD Production Line is now supporting devonfw as well. The Production Line is a Docker based set of methods and tools that make possible to develop custom software to our customers on time and with the expected quality. By having first-class support for devonfw on the Production Line, iCSD has got an unified, integral solution which covers all the phases involved on the application development cycle from requirements to testing and hand-off to the client.

+
+
+
+

Eclipse Neon

+
+

devonfw comes with its own pre configured and enhanced Eclipse based IDE: the Open Source "devonfw IDE" and "devonfw Distr" which falls under Capgemini IP. We’ve updated both versions to the latest stable version of Eclipse, Neon. From Balu onwards we support the IDE on Linux as well and we offer downloadable versions for both Windows and Linux.

+
+
+

See: The Devon IDE

+
+
+
+

Default Java 8 with Java 7 compatibility

+
+

From version 2.1. "Balu" onwards, devonfw is using by default Java 8 for both the tool-chain as well as the integrated development environments. However, both the framework as well as the IDE and tool-set remain fully backward compatible with Java 7. We have added documentation to help configuring aspects of the framework to use Java 7 or to upgrade existing projects to Java 8. See: Compatibility guide for Java7, Java8 and Tomcat7, Tomcat8

+
+
+
+

Full Linux support

+
+

In order to fully support the move towards the Cloud, from version 2.1. "Balu" onwards, devonfw is fully supported on Linux. Linux is the de-facto standard for most Cloud providers. We currently only offer first-class support for Ubuntu 16.04 LTS onward but most aspects of devonfw should run without problems on other and older distributions as well.

+
+
+
+

Initial ATOM support

+
+

Atom is a text editor that’s modern, approachable, yet hackable to the core - a tool you can customize to do anything but also use productively without ever touching a config file. It is turning into a standard for modern web development. In devonfw 2.1 "Balu" we provide a script which installs automatically the most recent version of Atom in the devonfw distribution with a pre-configured set of essential plugins.

+
+
+
+

Database support

+
+

Through JPA (and now Spring Data as well) devonfw supports many databases. In Balu we’ve extended this support to prepared configuration, extensive documentations and supporting examples for all major "Enterprise" DB servers. So it becomes even easier for engagements to start using these standard database options. Currently we provide this extended support for Oracle, Microsoft SQL Server, MySQL and PostgreSQL. +For more information see: devonfw Database Migration Guide

+
+
+
+

Internationalisation (I18N) improvements

+
+

Likewise, existing basic Internationalisation (I18N) support has been significantly enhanced through an new devonfw module and extended to support Ext JS and Angular 2 apps as well. This means that both server as well as client side applications can be made easily to support multiple languages ("locales"), using industry standard tools and without touching programming code (essential when working with teams of translators).

+
+
+
+

Asynchronous HTTP support

+
+

Asynchronous HTTP is an important feature allowing so-called "long polling" HTTP Requests (for streaming applications, for example) or with requests sending large amounts of data. By making HTTP Requests asynchronous, devonfw server instances can better support these types of use-cases while offering far better performance.

+
+
+
+

Security and License guarantees

+
+

In devonfw security comes first. The components of the framework are designed and implemented according to the recommendations and guidelines as specified by OWASP in order to confront the top 10 security vulnerabilities.

+
+
+

From version 2.1 "Balu" onward we certify that devonfw has been scanned by software from "Black Duck". This verifies that devonfw is based on 100% Open Source Software (non Copyleft) and demonstrates that at moment of release there are no known, critical security flaws. Less critical issues are clearly documented.

+
+
+
+

Documentation improvements

+
+

Apart from the previously mentioned additions and improvements to diverse aspects of the devonfw documentation, principally the devonfw Guide, there are a number of other important changes. We’ve incorporated the Devon Modules Developer´s Guide which describes how to extend devonfw with its Spring-based module system. Furthermore we’ve significantly improved the Guide to the usage of web services. We’ve included a Compatibility Guide which details a series of considerations related with different version of the framework as well as Java 7 vs 8. And finally, we’ve extended the F.A.Q. to provide the users with direct answers to common, Frequently Asked Questions.

+
+
+
+

Contributors

+
+

Many thanks to adrianbielewicz, aferre777, amarinso, arenstedt, azzigeorge, cbeldacap, cmammado, crisjdiaz, csiwiak, Dalgar, drhoet, Drophoff, dumbNickname, EastWindShak, fawinter, fbougeno, fkreis, GawandeKunal, henning-cg, hennk, hohwille, ivanderk, jarek-jpa, jart, jensbartelheimer, jhcore, jkokoszk, julianmetzler, kalmuczakm, kiran-vadla, kowalj, lgoerlach, ManjiriBirajdar, MarcoRose, maybeec, mmatczak, nelooo, oelsabba, pablo-parra, patrhel, pawelkorzeniowski, PriyankaBelorkar, RobertoGM, sekaiser, sesslinger, SimonHuber, sjimenez77, sobkowiak, sroeger, ssarmokadam, subashbasnet, szendo, tbialecki, thoptr, tsowada, znazir and anyone who we may have forgotten to add!

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/oss-compliance.html b/docs/devonfw.github.io/1.0/general/oss-compliance.html new file mode 100644 index 00000000..917a6fb2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/oss-compliance.html @@ -0,0 +1,664 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

OSS Compliance

+
+
+

This chapter helps you to gain transparency on OSS usage and reach OSS compliance in your project.

+
+
+
+
+

Preface

+
+
+

devonfw, as most Java software, makes strong use of Open Source Software (OSS). It is using about 150 OSS products on the server only and on the client even more. Using a platform like devonfw to develop your own custom solution requires handling contained OSS correctly, i.e acting OSS-compliant.

+
+
+

Please read the Open Source policy of your company first, e.g. the Capgemini OSS Policy which contains a short, comprehensive and well written explanation on relevant OSS-knowledge. Make sure you:

+
+
+
    +
  • +

    understand the copyleft effect and its effect in commercial projects

    +
  • +
  • +

    understand the 3 license categories: "permissive", "weak copyleft" and "strong copyleft"

    +
  • +
  • +

    know prominent license types as e.g. "Apache-2.0" or GPL-3.0" and what copyleft-category they are in

    +
  • +
  • +

    are aware that some OSS offer dual/multi-licenses

    +
  • +
  • +

    Understand that OSS libraries often come with sub-dependencies of other OSS carrying licenses themselves

    +
  • +
+
+
+

To define sufficient OSS compliance measures, contact your IP officer or legal team as early as possible, especially if you develop software for clients.

+
+
+
+
+

Obligations when using OSS

+
+
+

If you create a custom solution containing OSS, this in legal sense is a "derived" work. If you distribute your derived work to your business client or any other legal entity in binary packaged form, the license obligations of contained OSS get into effect. Ignoring these leads to a license infringement which can create high damage.

+
+
+

To carefully handle these obligations you must:

+
+
+
    +
  • +

    maintain an OSS inventory (to gain transparency on OSS usage and used licenses)

    +
  • +
  • +

    check license conformity depending on usage/distribution in a commercial scenario

    +
  • +
  • +

    check license compatibility between used OSS-licenses

    +
  • +
  • +

    fulfill obligations defined by the OSS-licenses

    +
  • +
+
+
+

Obligations need to be checked per license. Frequent obligations are:

+
+
+
    +
  • +

    deliver the license terms of all used versions of the OSS licenses

    +
  • +
  • +

    not to change any copyright statements or warranty exclusions contained in the used OSS components

    +
  • +
  • +

    deliver the source code of the OSS components (e.g. on a data carrier)

    +
  • +
  • +

    when modifying OSS, track any source code modification (including date and name of the employee/company)

    +
  • +
  • +

    display OSS license notice in a user frontend (if any)

    +
  • +
  • +

    other obligations depending on individual license

    +
  • +
+
+
+
+
+

Automate OSS handling

+
+
+

Carefully judging the OSS usage in your project is a MANUAL activity! However, collecting OSS information and fulfilling license obligations should be automated as much as possible. A prominent professional tool to automate OSS compliance is the commercial software "Black Duck". Unfortunately it is rather expensive - either purchased or used as SaaS.

+
+
+

The most recommended lightweight tooling is a combination of Maven plugins. We will mainly use the Mojo Maven License Plugin.

+
+
+
+
+

Configure the Mojo Maven License Plugin

+
+
+

You can use it from command line but this will limit the ability to sustainably configure it (shown later). +Therefore we add it permanently as a build-plugin to the project parent-pom like this:

+
+
+
+
<plugin>
+  <groupId>org.codehaus.mojo</groupId>
+  <artifactId>license-maven-plugin</artifactId>
+  <version>1.14</version>
+
+  <configuration>
+    <outputDirectory>${project.build.directory}/generated-resources</outputDirectory>
+    <sortArtifactByName>true</sortArtifactByName>
+    <includeTransitiveDependencies>true</includeTransitiveDependencies>
+    <!-- the "missing file" declares licenses for dependencies that could not be detected automatically -->
+    <useMissingFile>true</useMissingFile>
+    <!-- find the "missing files" in all child-projects at the following location -->
+    <missingFile>src/license/THIRD-PARTY.properties</missingFile>
+    <!-- if the "missing files" are not yet existing in child-projects they will be created automatically -->
+    <failOnMissing>false</failOnMissing>
+    <overrideFile>src/license/override-THIRD-PARTY.properties</overrideFile>
+    <!-- harmonize different ways of writing license names -->
+    <licenseMerges>
+      <licenseMerge>Apache-2.0|Apache 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache Software License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|The Apache Software License, Version 2.0</licenseMerge>
+    </licenseMerges>
+    <encoding>utf-8</encoding>
+  </configuration>
+</plugin>
+
+
+
+

In the config above there are several settings that help to permanently improve the result of an automated OSS scan. We explain these now.

+
+
+
+
+

Declare additional licenses

+
+
+

Sometimes the licenses of used OSS cannot be resolved automatically. That is not the mistake of the maven-license-tool, but the mistake of the OSS author who didn’t make the respective license-information properly available.

+
+
+

Declare additional licenses in a "missing file" within each maven-subproject: /src/license/THIRD-PARTY.properties.

+
+
+
+
##Generated by org.codehaus.mojo.license.AddThirdPartyMojo
+#-------------------------------------------------------------------------------
+##Already used licenses in project :
+##- ASF 2.0
+##- Apache 2
+...
+#-------------------------------------------------------------------------------
+##Please fill the missing licenses for dependencies :
+...
+dom4j--dom4j--1.6.1=BSD 3-Clause
+javax.servlet--jstl--1.2=CDDL
+...
+
+
+
+

In case the use of "missing files" is activated, but the THIRD-PARTY.properties-file is not yet existing, the first run of an "aggregate-add-third-party" goal (see below) will fail. Luckily the license-plugin just helped us and created the properties-files automatically (in each maven-subproject) and prefilled it with:

+
+
+
    +
  • +

    a list of all detected licenses within the maven project

    +
  • +
  • +

    all OSS libraries where a license could not be detected automatically.

    +
  • +
+
+
+

You now need to fill in missing license information and rerun the plugin.

+
+
+
+
+

Redefine wrongly detected licenses

+
+
+

In case automatically detected licenses proof to be wrong by closer investigation, this wrong detection can be overwritten. Add a configuration to declare alternative licenses within each maven-subproject: /src/license/override-THIRD-PARTY.properties

+
+
+
+
com.sun.mail--javax.mail--1.5.6=Common Development and Distribution License 1.1
+
+
+
+

This can be also be useful for OSS that provides a multi-license to make a decision which license to actually choose .

+
+
+
+
+

Merge licenses

+
+
+

You will see that many prominent licenses come in all sorts of notations, e.g. Apache-2.0 as: "Apache 2" or "ASL-2.0" or "The Apache License, Version 2.0". The Mojo Maven License Plugin allows to harmonize different forms of a license-naming like this:

+
+
+
+
    <!-- harmonize different ways of writing license names -->
+    <licenseMerges>
+      <licenseMerge>Apache-2.0|Apache 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache Software License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|The Apache Software License, Version 2.0</licenseMerge>
+    </licenseMerges>
+
+
+
+

License-names will be harmonized in the OSS report to one common term. We propose to harmonize to short-license-IDs defined by the SPDX standard.

+
+
+
+
+

Retrieve licenses list

+
+
+

For a quick initial judgement of OSS license situation run the following maven command from command line:

+
+
+
+
$ mvn license:license-list
+
+
+
+

You receive the summary list of all used OSS licenses on the cmd-out.

+
+
+
+
+

Create an OSS inventory

+
+
+

To create an OSS inventory means to report on the overall bill of material of used OSS and corresponding licenses. +Within the parent project, run the following maven goal from command line.

+
+
+
+
$ mvn license:aggregate-download-licenses -Dlicense.excludedScopes=test,provided
+
+
+
+

Running the aggregate-download-licenses goal creates two results.

+
+
+
    +
  1. +

    a license.xml that contains all used OSS dependencies (even sub-dependencies) with respective license information

    +
  2. +
  3. +

    puts all used OSS-license-texts as html files into folder target/generated resources

    +
  4. +
+
+
+

Carefully validate and judge the outcome of the license list. It is recommended to copy the license.xml to the project documentation and hand it over to your client. You may also import it into a spreadsheet to get a better overview.

+
+
+
+
+

Create a THIRD PARTY file

+
+
+

Within Java software it is a common practice to add a "THIRD-PARTY" text file to the distribution. Contained is a summary-list of all used OSS and respective licenses. This can also be achieved with the Mojo Maven License Plugin.

+
+
+

Within the parent project, run the following maven goal from command line.

+
+
+
+
$ mvn license:aggregate-add-third-party -Dlicense.excludedScopes=test,provided
+
+
+
+

Find the THIRD-PARTY.txt in the folder: target\generated-resources. The goal aggregate-add-third-party also profits from configuration as outlined above.

+
+
+
+
+

Download and package OSS SourceCode

+
+
+

Some OSS licenses require handing over the OSS source code which is packaged with your custom software to the client the solution is distributed to. It is a good practice to hand over the source code of all used OSS to your client. Collecting all source code can be accomplished by another Maven plugin: Apache Maven Dependency Plugin.

+
+
+

It downloads all OSS Source Jars into the folder: \target\sources across the parent and all child maven projects.

+
+
+

You configure the plugin like this:

+
+
+
+
<plugin>
+  <groupId>org.apache.maven.plugins</groupId>
+  <artifactId>maven-dependency-plugin</artifactId>
+  <version>3.0.2</version>
+
+  <configuration>
+    <classifier>sources</classifier>
+    <failOnMissingClassifierArtifact>false</failOnMissingClassifierArtifact>
+    <outputDirectory>${project.build.directory}/sources</outputDirectory>
+  </configuration>
+  <executions>
+    <execution>
+      <id>src-dependencies</id>
+      <phase>package</phase>
+      <goals>
+        <!-- use unpack-dependencies instead if you want to explode the sources -->
+        <goal>copy-dependencies</goal>
+      </goals>
+    </execution>
+  </executions>
+</plugin>
+
+
+
+

You run the plugin from command line like this:

+
+
+
+
$ mvn dependency:copy-dependencies -Dclassifier=sources
+
+
+
+

The plugin provides another goal that also unzips the jars, which is not recommended, since contents get mixed up.

+
+
+

Deliver the OSS source jars to your client with the release of your custom solution. This has been done physically - e.g. on DVD.

+
+
+
+
+

Handle OSS within CI-process

+
+
+

To automate OSS handling in the regular build-process (which is not recommended to start with) you may declare the following executions and goals in your maven-configuration:

+
+
+
+
<plugin>
+  ...
+
+  <executions>
+    <execution>
+      <id>aggregate-add-third-party</id>
+      <phase>generate-resources</phase>
+      <goals>
+        <goal>aggregate-add-third-party</goal>
+      </goals>
+    </execution>
+
+    <execution>
+      <id>aggregate-download-licenses</id>
+      <phase>generate-resources</phase>
+      <goals>
+        <goal>aggregate-download-licenses</goal>
+      </goals>
+    </execution>
+  </executions>
+</plugin>
+
+
+
+

Note that the build may fail in case the OSS information was not complete. Check the build-output to understand and resolve the issue - like e.g. add missing license information in the "missing file".

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-2.1.html b/docs/devonfw.github.io/1.0/general/release-notes-version-2.1.html new file mode 100644 index 00000000..2d59edec --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-2.1.html @@ -0,0 +1,683 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Release notes devonfw 2.1.1 "Balu"

+
+ +
+
+
+

Version 2.1.2: devon4j updates and some new features

+
+
+

We’ve released the latest update release of devonfw in the Balu series: version 2.1.2. The next major release, code named Courage, will be released approximately the end of June. This current release contains the following items:

+
+
+
+
+

devon4j 2.3.0 Release

+
+
+

Friday the 12th of May 2017 devon4j version 2.3.0 was released. Major features added are :

+
+
+
    +
  • +

    Database Integration with PostGres, MSSQL Server, MariaDB

    +
  • +
  • +

    Added docs folder for gh pages and added oomph setups

    +
  • +
  • +

    Refactored Code

    +
  • +
  • +

    Refactored Test Infrastructure

    +
  • +
  • +

    Added Documentation on debugging tests

    +
  • +
  • +

    Added Two Batch Job tests in the restaurant sample

    +
  • +
  • +

    Bugfix: Fixed the error received when the Spring Boot Application from sample application that is created from maven archetype is launched

    +
  • +
  • +

    Bugfix: Fix for 404 error received when clicked on the link '1. Table' in index.html of the sample application created from maven archetype

    +
  • +
+
+
+

The devon4j wiki and other documents are updated for release 2.3.0.

+
+
+
+
+

CobiGen Enhancements

+
+
+

Previous versions of CobiGen are able to generate code for REST services only. Now it is possible to generate the code for SOAP services as well. There are two use cases available in CobiGen:

+
+
+
    +
  • +

    SOAP without nested data

    +
  • +
  • +

    SOAP nested data

    +
  • +
+
+
+

The "nested data" use case is when there are 3 or more entities which are interrelated with each other. CobiGen will generate code which will return the nested data. Currently CobiGen services return ETO classes, CobiGen has been enhanced as to return CTO classes (ETO + relationship).

+
+
+

Apart from the SOAP code generation, the capability to express nested relationships have been added to the existing ReST code generator as well.

+
+
+
+
+

Micro services module (Spring Cloud/Netflix OSS)

+
+
+

To make it easier for devonfw users to design and develop applications based on microservices, this release provides a series of archetypes and resources based on Spring Cloud Netflix to automate the creation and configuration of microservices.

+
+
+

New documentation ind de devonfw Guide contains all the details to start creating microservices with devonfw

+
+
+
+
+

Spring Integration Module

+
+
+

Based on the Java Message Service (JMS) and Spring Integration, the devonfw Integration module provides a communication system (sender/subscriber) out-of-the-box with simple channels (only to send and read messages), request and reply channels (to send messages and responses) and request & reply asynchronously channels.

+
+
+
+
+

Version 2.1.1 Updates, fixes and some new features

+
+ +
+
+
+

CobiGen code-generator fixes

+
+
+

The CobiGen incremental code generator released in the previous version contained a regression which has now been fixed. Generating services in Batch mode whereby a package can be given as an input, using all Entities contained in that package, works again as expected.

+
+
+

For more information see: The CobiGen documentation

+
+
+
+
+

Devcon enhancements

+
+
+

In this new release we have added devcon to the devonfw distribution itself so one can directly use devcon from the console.bat or ps-console.bat windows. It is therefore no longer necessary to independently install devcon. However, as devcon is useful outside of the devonfw distribution, this remains a viable option.

+
+
+
+
+

Devon4Sencha

+
+
+

in Devon4Sencha there are changes in the sample application. It now complies fully with the architecture which is known as "universal app", so now it has screens custom tailored for desktop and mobile devices. All the basic logic remains the same for both versions. (The StarterTemplate is still only for creating a desktop app. This will be tackled in the next release.)

+
+
+
+
+

New Winauth modules

+
+
+

The original winauth module that, in previous Devon versions, implemented the Active Directory authentication and the Single Sign-on authentication now has been divided in two independent modules. The Active Directory authentication now is included in the new Winauth-ad module whereas the Single Sign-on implementation is included in a separate module called Winauth-sso. +Also some improvements have been added to Winauth-sso module to ease the way in which the module can be injected.

+
+
+
+
+

General updates

+
+
+

There are a series of updates to the devonfw documentation, principally the devonfw Guide. Further more, from this release on, you can find the devonfw guide in the doc folder of the distribution.

+
+
+

Furthermore, the devon4j and devonfw source-code in the "examples" workspace, have been updated to the latest version.

+
+
+
+
+

Version 2.1 New features, improvements and updates

+
+ +
+
+
+

Introduction

+
+
+

We are proud to present the new release of devonfw, version "2.1" which we’ve baptized "Balu". A major focus for this release is developer productivity. So that explains the name, as Balu is not just big, friendly and cuddly but also was very happy to let Mowgli do the work for him.

+
+
+
+
+

Cobigen code-generator UI code generation and more

+
+
+

The Cobigen incremental code generator which is part of devonfw has been significantly improved. Based on a single data schema it can generate the JPA/Hibernate code for the whole service layer (from data-access code to web services) for all CRUD operations. When generating code, Cobigen is able to detect and leave untouched any code which developers have added manually.

+
+
+

In the new release it supports Spring Data for data access and it is now capable of generating the whole User Interface as well: data-grids and individual rows/records with support for filters, pagination etc. That is to say: Cobigen can now generate automatically all the code from the server-side database access layer all the way up to the UI "screens" in the web browser.

+
+
+

Currently we support Sencha Ext JS with support for Angular 2 coming soon. The code generated by Cobigen can be opened and used by Sencha Architect, the visual design tool, which enables the programmer to extend and enhance the generated UI non-programmatically. When Cobigen regenerates the code, even those additions are left intact. All these features combined allow for an iterative, incremental way of development which can be up to an order of an magnitude more productive than "programming manual"

+
+
+

Cobigen can now also be used for code-generation within the context of an engagement. It is easily extensible and the process of how to extend it for your own project is well documented. This becomes already worthwhile ("delivers ROI") when having 5+ identical elements within the project.

+
+
+

For more information see: The Cobigen documentation

+
+
+
+
+

Angular 2

+
+
+

With the official release of Angular 2 and TypeScript 2, we’re slowly but steadily moving to embrace these important new players in the web development scene. We keep supporting the Angular 1 based devon4ng framework and are planning a migration of this framework to Angular 2 in the near future. For "Balu" we’ve have decided to integrate "vanilla" Angular 2.

+
+
+

We have migrated the Restaurant Sample application to serve as a, documented and supported, blueprint for Angular 2 applications. Furthermore, we support three "kickstarter" projects which help engagement getting started with Angular2 - either using Bootstrap or Google´s Material Design - or, alternatively, Ionic 2 (the mobile framework on top of Angular 2).

+
+
+
+
+

devon4j 2.2.0 Release

+
+
+

A new release of devon4j, version 2.2.0, is included in this release of devonfw. This release mainly focuses on server side of devonfw. i.e devon4j.

+
+
+

Major features added are :

+
+
+
    +
  • +

    Upgrade to Spring Boot 1.3.8.RELEASE

    +
  • +
  • +

    Upgrade to Apache CXF 3.1.8

    +
  • +
  • +

    Database Integration with Oracle 11g

    +
  • +
  • +

    Added Servlet for HTTP-Debugging

    +
  • +
  • +

    Refactored code and improved JavaDoc

    +
  • +
  • +

    Bugfix: mvn spring-boot:run executes successfully for devon4j application created using devon4j template

    +
  • +
  • +

    Added subsystem tests of SalesmanagementRestService and several other tests

    +
  • +
  • +

    Added Tests to test java packages conformance to devonfw conventions

    +
  • +
+
+
+

More details on features added can be found at https://github.com/devonfw/devon4j/milestone/19?closed=1(here). The devon4j wiki and other documents are updated for release 2.2.0.

+
+
+
+
+

Devon4Sencha

+
+
+

Devon4Sencha is an alternative view layer for web applications developed with devonfw. It is based on Sencha Ext JS. As it requires a license for commercial applications it is not provided as Open Source and is considered to be part of the IP of Capgemini.

+
+
+

These libraries provide support for creating SPA (Single Page Applications) with a very rich set of components for both desktop and mobile. In the new version we extend this functionality to support for "Universal Apps", the Sencha specific term for true multi-device applications which make it possible to develop a single application for desktop, tablet as well as mobile devices. In the latest version Devon4Sencha has been upgraded to support Ext JS 6.2 and we now support the usage of Cobigen as well as Sencha Architect as extra option to improve developer productivity.

+
+
+
+
+

Devcon enhancements

+
+
+

The Devon Console, Devcon, is a cross-platform command line tool running on the JVM that provides many automated tasks around the full life-cycle of Devon applications, from installing the basic working environment and generating a new project, to running a test server and deploying an application to production. It can be used by the engagements to integrate with their proprietary tool chain.

+
+
+

In this new release we have added an optional graphical user interface (with integrated help) which makes using Devcon even easier to use. Another new feature is that it is now possible to easily extend it with commands just by adding your own or project specific Javascript files. This makes it an attractive option for project task automation.

+
+
+
+
+

Ready for the Cloud

+
+
+

devonfw is in active use in the Cloud, with projects running on IBM Bluemix and on Amazon AWS. The focus is very much to keep Cloud-specific functionality decoupled from the devonfw core. The engagement can choose between - and easily configure the use of - either CloudFoundry or Spring Cloud (alternatively, you can run devonfw in Docker containers in the Cloud as well. See elsewhere in the release notes).

+
+
+
+
+

Spring Data

+
+
+

The java server stack within devonfw, devon4j, is build on a very solid DDD architecture which uses JPA for its data access layer. We now offer integration of Spring Data as an alternative or to be used in conjunction with JPA. Spring Data offers significant advantages over JPA through its query mechanism which allows the developer to specify complex queries in an easy way. Overall working with Spring Data should be quite more productive compared with JPA for the average or junior developer. And extra advantage is that Spring Data also allows - and comes with support for - the usage of NoSQL databases like MongoDB, Cassandra, DynamoDB etc. THis becomes especially critical in the Cloud where NoSQL databases typically offer better scalability than relational databases.

+
+
+
+
+

Videos content in the devonfw Guide

+
+
+

The devonfw Guide is the single, authoritative tutorial and reference ("cookbook") for all things devonfw, targeted at the general developer working with the platform (there is another document for Architects). It is clear and concise but because of the large scope and wide reach of devonfw, it comes with a hefty 370+ pages. For the impatient - and sometimes images do indeed say more than words - we’ve added videos to the Guide which significantly speed up getting started with the diverse aspects of devonfw.

+
+
+

For more information on videos check out our devonfw Youtube channel

+
+
+
+
+

Containerisation with Docker and the Production Line

+
+
+

Docker (see: https://www.docker.com/) containers wrap a piece of software in a complete filesystem that contains everything needed to run: code, runtime, system tools, system libraries – anything that can be installed on a server. Docker containers resemble virtual machines but are far more resource efficient. Because of this, Docker and related technologies like Kubernetes are taking the Enterprise and Cloud by storm. We have certified and documented the usage of devonfw on Docker so we can now firmly state that "devonfw is Docker" ready. All the more so as the iCSD Production Line is now supporting devonfw as well. The Production Line is a Docker based set of methods and tools that make possible to develop custom software to our customers on time and with the expected quality. By having first-class support for devonfw on the Production Line, iCSD has got an unified, integral solution which covers all the phases involved on the application development cycle from requirements to testing and hand-off to the client.

+
+
+
+
+

Eclipse Neon

+
+
+

devonfw comes with its own pre configured and enhanced Eclipse based IDE: the Open Source "devonfw IDE" and "devonfw Distr" which falls under Capgemini IP. We’ve updated both versions to the latest stable version of Eclipse, Neon. From Balu onwards we support the IDE on Linux as well and we offer downloadable versions for both Windows and Linux.

+
+
+

See: The Devon IDE

+
+
+
+
+

Default Java 8 with Java 7 compatibility

+
+
+

From version 2.1. "Balu" onwards, devonfw is using by default Java 8 for both the tool-chain as well as the integrated development environments. However, both the framework as well as the IDE and tool-set remain fully backward compatible with Java 7. We have added documentation to help configuring aspects of the framework to use Java 7 or to upgrade existing projects to Java 8. See: Compatibility guide for Java7, Java8 and Tomcat7, Tomcat8

+
+
+
+
+

Full Linux support

+
+
+

In order to fully support the move towards the Cloud, from version 2.1. "Balu" onwards, devonfw is fully supported on Linux. Linux is the de-facto standard for most Cloud providers. We currently only offer first-class support for Ubuntu 16.04 LTS onward but most aspects of devonfw should run without problems on other and older distributions as well.

+
+
+
+
+

Initial ATOM support

+
+
+

Atom is a text editor that’s modern, approachable, yet hackable to the core - a tool you can customize to do anything but also use productively without ever touching a config file. It is turning into a standard for modern web development. In devonfw 2.1 "Balu" we provide a script which installs automatically the most recent version of Atom in the devonfw distribution with a pre-configured set of essential plugins.

+
+
+
+
+

Database support

+
+
+

Through JPA (and now Spring Data as well) devonfw supports many databases. In Balu we’ve extended this support to prepared configuration, extensive documentations and supporting examples for all major "Enterprise" DB servers. So it becomes even easier for engagements to start using these standard database options. Currently we provide this extended support for Oracle, Microsoft SQL Server, MySQL and PostgreSQL. +For more information see: devonfw Database Migration Guide

+
+
+
+
+

Internationalisation (I18N) improvements

+
+
+

Likewise, existing basic Internationalisation (I18N) support has been significantly enhanced through an new devonfw module and extended to support Ext JS and Angular 2 apps as well. This means that both server as well as client side applications can be made easily to support multiple languages ("locales"), using industry standard tools and without touching programming code (essential when working with teams of translators).

+
+
+
+
+

Asynchronous HTTP support

+
+
+

Asynchronous HTTP is an important feature allowing so-called "long polling" HTTP Requests (for streaming applications, for example) or with requests sending large amounts of data. By making HTTP Requests asynchronous, devonfw server instances can better support these types of use-cases while offering far better performance.

+
+
+
+
+

Security and License guarantees

+
+
+

In devonfw security comes first. The components of the framework are designed and implemented according to the recommendations and guidelines as specified by OWASP in order to confront the top 10 security vulnerabilities.

+
+
+

From version 2.1 "Balu" onward we certify that devonfw has been scanned by software from "Black Duck". This verifies that devonfw is based on 100% Open Source Software (non Copyleft) and demonstrates that at moment of release there are no known, critical security flaws. Less critical issues are clearly documented.

+
+
+
+
+

Documentation improvements

+
+
+

Apart from the previously mentioned additions and improvements to diverse aspects of the devonfw documentation, principally the devonfw Guide, there are a number of other important changes. We’ve incorporated the Devon Modules Developer´s Guide which describes how to extend devonfw with its Spring-based module system. Furthermore we’ve significantly improved the Guide to the usage of web services. We’ve included a Compatibility Guide which details a series of considerations related with different version of the framework as well as Java 7 vs 8. And finally, we’ve extended the F.A.Q. to provide the users with direct answers to common, Frequently Asked Questions.

+
+
+
+
+

Contributors

+
+
+

Many thanks to adrianbielewicz, aferre777, amarinso, arenstedt, azzigeorge, cbeldacap, cmammado, crisjdiaz, csiwiak, Dalgar, drhoet, Drophoff, dumbNickname, EastWindShak, fawinter, fbougeno, fkreis, GawandeKunal, henning-cg, hennk, hohwille, ivanderk, jarek-jpa, jart, jensbartelheimer, jhcore, jkokoszk, julianmetzler, kalmuczakm, kiran-vadla, kowalj, lgoerlach, ManjiriBirajdar, MarcoRose, maybeec, mmatczak, nelooo, oelsabba, pablo-parra, patrhel, pawelkorzeniowski, PriyankaBelorkar, RobertoGM, sekaiser, sesslinger, SimonHuber, sjimenez77, sobkowiak, sroeger, ssarmokadam, subashbasnet, szendo, tbialecki, thoptr, tsowada, znazir and anyone who we may have forgotten to add!

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-2.2.html b/docs/devonfw.github.io/1.0/general/release-notes-version-2.2.html new file mode 100644 index 00000000..0f873e95 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-2.2.html @@ -0,0 +1,654 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 2.2 "Courage"

+
+ +
+
+
+

Production Line Integration

+
+
+

devonfw is now fully supported on the Production Line v1.3 and the coming v2.0. Besides that, we now "eat our own dogfood" as the whole devonfw project, all "buildable assets", now run on the Production Line.

+
+
+
+
+

devon4ng 2.0

+
+
+

The main focus of the Courage release is the renewed introduction of "devonfw for JavaScript", or devon4ng. This new version is a completely new implementation based on Angular (version 4). This new "stack" comes with:

+
+
+
    +
  • +

    New application templates for Angular 4 application (as well as Ionic 3)

    +
  • +
  • +

    A new reference application

    +
  • +
  • +

    A new tutorial (and Architecture Guide following soon)

    +
  • +
  • +

    Component Gallery

    +
  • +
  • +

    New CobiGen templates for generation of both Angular 4 and Ionic 3 UI components ("screens")

    +
  • +
  • +

    Integration of Covalent and Bootstrap offering a large number of components

    +
  • +
  • +

    my-thai-star, a showcase and reference implementation in Angular of a real, responsive usable app using recommended architecture and patterns

    +
  • +
  • +

    A new Tutorial using my-thai-star as a starting point

    +
  • +
+
+ +
+
+
+

New Cobigen

+
+
+

Major changes in this release:

+
+
+
    +
  • +

    Support for multi-module projects

    +
  • +
  • +

    Client UI Generation:

    +
    +
      +
    • +

      New Angular 4 templates based on the latest - angular project seed

      +
    • +
    • +

      Basic Typescript Merger

      +
    • +
    • +

      Basic Angular Template Merger

      +
    • +
    • +

      JSON Merger

      +
    • +
    +
    +
  • +
  • +

    Refactored devon4j templates to make use of Java template logic feature

    +
  • +
  • +

    Bugfixes:

    +
    +
      +
    • +

      Fixed merging of nested Java annotations including array values

      +
    • +
    • +

      more minor issues

      +
    • +
    +
    +
  • +
  • +

    Under the hood:

    +
    +
      +
    • +

      Large refactoring steps towards language agnostic templates formatting sensitive placeholder descriptions automatically formatting camelCase to TrainCase to snake-case, etc.

      +
    • +
    +
    +
  • +
  • +

    Easy setup of CobiGen IDE to enable fluent contribution

    +
  • +
  • +

    CI integration improved to integrate with GitHub for more valuable feedback

    +
  • +
+
+ +
+
+
+

MyThaiStar: New Restaurant Example, reference implementation & Methodology showcase

+
+
+

A major part of the new devonfw release is the incorporation of a new application, "my-thai-star" which among others:

+
+
+
    +
  • +

    serve as an example of how to make a "real" devonfw application (i.e. the application could be used for real)

    +
  • +
  • +

    Serves as an attractive showcase

    +
  • +
  • +

    Serves as a reference application of devonfw patterns and practices as well as the standard example in the new devonfw tutorial

    +
  • +
  • +

    highlights modern security option like JWT Integration

    +
  • +
+
+
+

The application is accompanied by a substantial new documentation asset, the devonfw methodology, which described in detail the whole lifecycle of the development of a devonfw application, from requirements gathering to technical design. Officially my-that-star is still considered to be an incubator as especially this last part is still not as mature as it could be. But the example application and tutorial are 100% complete and functional and form a marked improvement over the "old" restaurant example app. My-Thai-star will become the standard example app from devonfw 3.0 onwards.

+
+ +
+
+
+

The new devonfw Tutorial

+
+
+

The devonfw Tutorial is a new part of the devonfw documentation which changes the focus of how people can get started with the platform

+
+
+

There are tutorials for devon4j, devon4ng (Angular) and more to come. My-Thai-Star is used throughout the tutorial series to demonstrate the basic principles, architecture, and good practices of the different devonfw "stacks". There is an elaborated exercise where the readers get to write their own application "JumpTheQueue".

+
+
+

We hope that the new tutorial offers a better, more efficient way for people to get started with devonfw. Answering especially the question: how to make a devonfw application.

+
+ +
+
+
+

devon4j 2.4.0

+
+
+

"devonfw for Java" or devon4j now includes updated versions of the latest stable versions of Spring Boot and the Spring Framework and all related dependencies. This allows guaranteed, stable, execution of any devonfw 2.X application on the latest versions of the Industry Standard Spring stack. +Another important new feature is a new testing architecture/infrastructure. All database options are updated to the latest versions as well as guaranteed to function on all Application Servers which should cause less friction and configuration time when starting a new devon4j project.

+
+
+

Details:

+
+
+
    +
  • +

    Spring Boot Upgrade to 1.5.3

    +
  • +
  • +

    Updated all underlying dependencies

    +
  • +
  • +

    Spring version is 4.3.8

    +
  • +
  • +

    Exclude Third Party Libraries that are not needed from sample restaurant application

    +
  • +
  • +

    Bugfix:Fixed the 'WhiteLabel' error received when tried to login to the sample restaurant application that is deployed onto external Tomcat

    +
  • +
  • +

    Bugfix:Removed the API api.org.apache.catalina.filters.SetCharacterEncodingFilter and used spring framework’s API org.springframework.web.filter.CharacterEncodingFilter instead

    +
  • +
  • +

    Bugfix:Fixed the error "class file for javax.interceptor.InterceptorBinding not found" received when executing the command 'mvn site' when trying to generate javadoc using Maven javadoc plugin

    +
  • +
  • +

    Documentation of the usage of UserDetailsService of Spring Security

    +
  • +
+
+ + +
+
+
+

Microservices Netflix

+
+
+

devonfw now includes a microservices implementation based on Spring Cloud Netflix. It provides a Netflix OSS integrations for Spring Boot apps through auto-configuration and binding to the Spring Environment. It offers microservices archetypes and a complete user guide with all the details to start creating microservices with devonfw.

+
+ +
+
+
+

devonfw distribution based on Eclipse OOMPH

+
+
+

The new Eclipse devonfw distribution is now based on Eclipse OOMPH, which allows us, an any engagement, to create and manage the distribution more effectively by formalizing the setup instructions so they can be performed automatically (due to a blocking issue postponed to devonfw 2.2.1 which will be released a few weeks after 2.2.0)

+
+
+
+
+

Visual Studio Code or Atom

+
+
+

The devonfw distro now contains Visual Studio Code alongside Eclipse in order to provide a default, state of the art, environment for web based development.

+
+ +
+
+
+

More I18N options

+
+
+

The platform now contains more documentation and a conversion utility which makes it easier to share i18n resource files between the different frameworks.

+
+ +
+
+
+

Spring Integration as devonfw Module

+
+
+

This release includes a new module based on the Java Message Service (JMS) and Spring Integration which provides a communication system (sender/subscriber) out-of-the-box with simple channels (only to send and read messages), request and reply channels (to send messages and responses) and request & reply asynchronously channels.

+
+ +
+
+
+

devonfw Harvest contributions

+
+
+

devonfw contains a whole series of new components obtained through the Harvesting process. Examples are :

+
+
+
    +
  • +

    New backend IP module Compose for Redis: management component for cloud environments. Redis is an open-source, blazingly fast, key/value low maintenance store. Compose’s platform gives you a configuration pre-tuned for high availability and locked down with additional security features. The component will manage the service connection and the main methods to manage the key/values on the storage. The library used is "lettuce".

    +
  • +
  • +

    Sencha component for extending GMapPanel with the following functionality :

    +
    +
      +
    • +

      Markers management

      +
    • +
    • +

      Google Maps options management

      +
    • +
    • +

      Geoposition management

      +
    • +
    • +

      Search address and coordinates management

      +
    • +
    • +

      Map events management

      +
    • +
    • +

      Map life cycle and behavior management

      +
    • +
    +
    +
  • +
  • +

    Sencha responsive Footer that moves from horizontal to vertical layout depending on the screen resolution or the device type. It is a simple functionality but we consider it very useful and reusable.

    +
  • +
+
+ +
+
+
+

More Deployment options to JEE Application Servers and Docker/CloudFoundry

+
+
+

The platform now fully supports deployment on the latest version of Weblogic, WebSphere, Wildfly (JBoss) as well as Docker and Cloud Foundry.

+
+ +
+
+
+

Devcon on Linux

+
+
+

Devcon is now fully supported on Linux which, together with the devonfw distro running on Linux, makes devonfw fully multi-platform and Cloud compatible (as Linux is the default OS in the Cloud!)

+
+ +
+
+
+

New devonfw Incubators

+
+
+

From different Business Units (countries) have contributed "incubator" frameworks:

+
+
+
    +
  • +

    devon4NET (Stack based on .NET Core / .NET "Classic" (4.6))

    +
  • +
  • +

    devon4X (Stack based on Xamarin)

    +
  • +
  • +

    devon4node (Stack based on Node-js/Serverless): https://github.com/devonfw/devon4node

    +
  • +
+
+
+

An "incubator" status means that the frameworks are production ready, all are actually already used in production, but are still not fully compliant with the devonfw definition of a "Minimally Viable Product".

+
+
+

During this summer devon4NET will be properly installed. In the mean time, if you want to have access to the source code, please contact the devonfw Core Team.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-2.3.html b/docs/devonfw.github.io/1.0/general/release-notes-version-2.3.html new file mode 100644 index 00000000..0306d8ee --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-2.3.html @@ -0,0 +1,818 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 2.3 "Dash"

+
+ +
+
+
+

Release: improving and strengthening the Platform

+
+
+

We are proud to announce the immediate release of devonfw version 2.3 (code named “Dash” during development). This release comes with a bit of a delay as we decided to wait for the publication of devon4j 2.5. “Dash” contains a slew of new features but in essence it is already driven by what we expect to be the core focus of 2018: strengthening the platform and improving quality.

+
+
+

After one year and a half of rapid expansion, we expect the next release(s) of the devonfw 2.x series to be fully focused on deepening the platform rather than expanding it. That is to say: we should work on improving existing features rather than adding new ones and strengthen the qualitative aspects of the software development life cycle, i.e. testing, infrastructure (CI, provisioning) etc.

+
+
+

“Dash” already is very much an example of this. This release contains the Allure Test Framework as an incubator. This is an automated testing framework for functional testing of web applications. Another incubator is the devonfw Shop Floor which intended to be a compilation of DevOps experiences from the devonfw perspective. And based on this devonfw has been OpenShift Primed (“certified”) by Red Hat.

+
+
+

There is a whole range of new features and improvements which can be seen in that light. devon4j 2.5 changes and improves the package structure of the core Java framework. The My Thai Star sample app has now been fully integrated in the different frameworks and the devonfw Guide has once again been significantly expanded and improved.

+
+
+
+
+

An industrialized platform for the ADcenter

+
+
+

Although less visible to the overall devonfw community, an important driving force was (meaning that lots of work has been done in the context of) the creation of the ADcenter concept towards the end of 2017. Based on a radical transformation of on/near/offshore software delivery, the focus of the ADcenters is to deliver agile & accelerated “Rightshore” services with an emphasis on:

+
+
+
    +
  • +

    Delivering Business Value and optimized User Experience

    +
  • +
  • +

    Innovative software development with state of the art technology

    +
  • +
  • +

    Highly automated devops; resulting in lower costs & shorter time-to-market

    +
  • +
+
+
+

The first two ADcenters, in Valencia (Spain) and Bangalore (India), are already servicing clients all over Europe - Germany, France, Switzerland and the Netherlands - while ADcenter aligned production teams are currently working for Capgemini UK as well (through Spain).Through the ADcenter, Capgemini establishes industrialized innovation; designed for & with the user. The availability of platforms for industrialized software delivery like devonfw and the Production Line has allowed us to train and make available over a 150 people in very short time.

+
+
+

The creation of the ADcenter is such a short time is visible proof that we´re getting closer to a situation where devonfw and Production Line are turning into the default development platform for APPS2, thereby standardizing all aspects of the software development life cycle: from training and design, architecture, devops and development, all the way up to QA and deployment.

+
+
+
+
+

Changes and new features

+
+ +
+
+
+

devonfw dist

+
+
+

The devonfw dist, or distribution, i.e. the central zip file which contains the main working environment for the devonfw developer, has been significantly enhanced. New features include:

+
+
+
    +
  • +

    Eclipse Oxygen integrated

    +
    +
      +
    • +

      CheckStyle Plugin installed and configured

      +
    • +
    • +

      SonarLint Plugin installed and configured

      +
    • +
    • +

      Git Plugin installed

      +
    • +
    • +

      FindBugs replaced by SpotBugs and configured

      +
    • +
    • +

      Tomcat8 specific Oxygen configuration

      +
    • +
    • +

      CobiGen Plugin installed

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Cmder integrated (when console.bat launched)

      +
    • +
    • +

      Visual Studio Code latest version included and pre-configured with https://github.com/devonfw/extension-pack-vscode

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      Yarn package manager included.

      +
    • +
    • +

      Python3 integrated

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation

      +
    • +
    • +

      devon4ng-application-template for Angular5 at workspaces/examples

      +
    • +
    • +

      Devon4sencha starter templates updated

      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4j 2.5

+
+ +
+
+
+

== Support for JAX-RS & JAX-WS clients

+
+
+

With the aim to enhance the ease in consuming RESTful and SOAP web services, JAX-RS and JAX-WS clients have been introduced. They enable developers to concisely and efficiently implement portable client-side solutions that leverage existing and well-established client-side HTTP connector implementations. Furthermore, the getting started time for consuming web services has been considerably reduced with the default configuration out-of-the-box which can be tweaked as per individual project requirements.

+
+ +
+
+
+

== Separate security logs for devon4j log component

+
+
+

Based on OWASP(Open Web Application Security Project), devon4j aims to give developers more control and flexibility with the logging of security events and tracking of forensic information. Furthermore, it helps classifying the information in log messages and applying masking when necessary. It provides powerful security features while based on set of logging APIs developers are already familiar with over a decade of their experience with Log4J and its successors.

+
+
+
+
+

== Support for Microservices

+
+
+

Integration of an devon4j application to a Microservices environment can now be leveraged with this release of devon4j. Introduction of service clients for RESTful and SOAP web services based on Java EE give developers agility and ease to access microservices in the Devon framework. It significantly cuts down the efforts on part of developers around boilerplate code and stresses more focus on the business code improving overall efficiency and quality of deliverables.

+
+
+
+
+

Cobigen

+
+
+

A new version of Cobigen has been included. New features include:

+
+
+ +
+
+
+
+

My Thai Star Sample Application

+
+
+

From this release on the My Thai Star application has been fully integrated in the different frameworks in the platform. Further more, a more modularized approach has been followed in the current release of My Thai star application to decouple client from implementation details. Which provides better encapsulation of code and dependency management for API and implementation classes. This has been achieved with creation of a new “API” module that contain interfaces for REST services and corresponding Request/Response objects. With existing “Core” module being dependent on “API” module. To read further you can follow the link https://github.com/devonfw/my-thai-star/wiki/java-design#basic-architecture-details

+
+
+

Furthermore: an email and Twitter micro service were integrated in my-thai-star. This is just for demonstration purposes. A full micro service framework is already part of devon4j 2.5.0

+
+
+
+
+

Documentation refactoring

+
+
+

The complete devonfw guide is restructured and refactored. Getting started guides are added for easy start with devonfw.Integration of the new Tutorial with the existing devonfw Guide whereby existing chapters of the previous tutorial were converted to Cookbook chapters. Asciidoctor is used for devonfw guide PDF generation. +See: https://github.com/devonfw/devonfw-guide/wiki

+
+
+
+
+

devon4ng

+
+
+

The following changes have been incorporated in devon4ng:

+
+
+
    +
  • +

    Angular CLI 1.6.0,

    +
  • +
  • +

    Angular 5.1,

    +
  • +
  • +

    Angular Material 5 and Covalent 1.0.0 RC1,

    +
  • +
  • +

    PWA enabled,

    +
  • +
  • +

    Core and Shared Modules included to follow the recommended Angular projects structure,

    +
  • +
  • +

    Yarn and NPM compliant since both lock files are included in order to get a stable installation.

    +
  • +
+
+
+
+
+

Admin interface for devon4j apps

+
+
+

The new version includes an Integration of an admin interface for devon4j apps (Spring Boot). This module is based on CodeCentric´s Spring Boot Admin (https://github.com/codecentric/spring-boot-admin).

+
+
+
+
+

Devcon

+
+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Renaming of system Commands.

    +
  • +
  • +

    New menu has been added - “other modules”, if menus are more than 10, other modules will display some menus.

    +
  • +
  • +

    A progress bar has been added for installing the distribution

    +
  • +
+
+
+
+
+

devonfw Modules

+
+
+

Existing devonfw modules can now be accessed with the help of starters following namespace devonfw-<module_name>-starter. Starters available for modules:

+
+
+
    +
  • +

    Reporting module

    +
  • +
  • +

    WinAuth AD Module

    +
  • +
  • +

    WinAuth SSO Module

    +
  • +
  • +

    I18n Module

    +
  • +
  • +

    Async Module

    +
  • +
  • +

    Integration Module

    +
  • +
  • +

    Microservice Module

    +
  • +
  • +

    Compose for Redis Module

    +
  • +
+
+ +
+
+
+

devonfw Shop Floor

+
+
+

This incubator is intended to be a compilation of DevOps experiences from the devonfw perspective. “How we use our devonfw projects in DevOps environments”. Integration with the Production Line, creation and service integration of a Docker-based CI environment and deploying devonfw applications in an OpenShift Origin cluster using devonfw templates.

+
+ +
+
+
+

devonfw-testing

+
+
+

The Allure Test Framework is an automated testing framework for functional testing of web applications and in coming future native mobile apps, web services and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+
    +
  • +

    Examples available under embedded project “Allure-App-Under-Test” and in project wiki: https://github.com/devonfw/devonfw-testing/wiki

    +
  • +
  • +

    How to install: https://github.com/devonfw/devonfw-testing/wiki/How-to-install

    +
  • +
  • +

    Release Notes:

    +
    +
      +
    • +

      Core Module – ver.4.12.0.3:

      +
      +
        +
      • +

        Test report with logs and/or screenshots

        +
      • +
      • +

        Test groups/tags

        +
      • +
      • +

        Data Driven (inside test case, external file)

        +
      • +
      • +

        Test case parallel execution

        +
      • +
      • +

        Run on independent Operating System (Java)

        +
      • +
      • +

        Externalize test environment (DEV, QA, PROD)

        +
      • +
      +
      +
    • +
    • +

      UI Selenium module – ver. 3.4.0.3:

      +
      +
        +
      • +

        Malleable resolution ( Remote Web Design, Mobile browsers)

        +
      • +
      • +

        Support for many browsers( Internet Explorer, Edge, Chrome, Firefox, Safari)

        +
      • +
      • +

        User friendly actions ( elementCheckBox, elementDropdown, etc. )

        +
      • +
      • +

        Ubiquese test execution (locally, against Selenium Grid through Jenkins)

        +
      • +
      • +

        Page Object Model architecture

        +
      • +
      • +

        Selenium WebDriver library ver. 3.4.0

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+ +
+
+
+

DOT.NET Framework incubators

+
+
+

The .NET Core and Xamarin frameworks are still under development by a workgroup from The Netherlands, Spain, Poland, Italy, Norway and Germany. The 1.0 release is expected to be coming soon but the current incubator frameworks are already being used in several engagements. Some features to highlight are:

+
+
+
    +
  • +

    Full .NET implementation with multi-platform support

    +
  • +
  • +

    Detailed documentation for developers

    +
  • +
  • +

    Docker ready

    +
  • +
  • +

    Web API server side template :

    +
    +
      +
    • +

      Swagger auto-generation

      +
    • +
    • +

      JWT security

      +
    • +
    • +

      Entity Framework Support

      +
    • +
    • +

      Advanced log features

      +
    • +
    +
    +
  • +
  • +

    Xamarin Templates based on Excalibur framework

    +
  • +
  • +

    My Thai Star implementation:

    +
    +
      +
    • +

      Backend (.NET Core)

      +
    • +
    • +

      FrontEnd (Xamarin)

      +
    • +
    +
    +
  • +
+
+
+
+
+

devonfw has been Primed by Red Hat for OpenShift

+
+
+

OpenShift is a supported distribution of Kubernetes from Red Hat for container-based software deployment and management. It is using Docker containers and DevOps tools for accelerated application development. Using OpenShift allows Capgemini to avoid Cloud Vendor lock-in. OpenShift provides devonfw with a state of the art CI/CD environment (devonfw Shop Floor), providing devonfw with a platform for the whole development life cycle: from development to staging / deploy.

+
+ +
+
+
+

Harvested components and modules

+
+
+

The devonfw Harvesting process continues to add valuable components and modules to the devonfw platform. The last months the following elements were contributed:

+
+
+
+
+

== Service Client support (for Micro service Projects).

+
+
+

This client is for consuming microservices from other application.This solution is already very flexible and customizable.As of now,this is suitable for small and simple project where two or three microservices are invoked. Donated by Jörg Hohwiller. See: https://github.com/devonfw-forge/devonfw-microservices

+
+
+
+
+

== JHipster devonfw code generation

+
+
+

This component was donated by the ADcenter in Valencia. It was made in order to comply with strong requirements (especially from the French BU) to use jHipster for code generation.

+
+
+

JHipster is a code generator based on Yeoman generators. Its default generator generator-jhipster generates a specific JHipster structure. The purpose of generator-jhipster-DevonModule is to generate the structure and files of a typical devon4j project. It is therefore equivalent to the standard devon4j application template based CobiGen code generation.

+
+
+
+
+

== Simple Jenkins task status dashboard

+
+
+

This component has been donated by, has been harvested from system in use by, Capgemini Valencia. This dashboard, apart from an optional gamification element, allows the display of multiple Jenkins instances.

+
+
+
+
+

And lots more, among others:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-2.4.html b/docs/devonfw.github.io/1.0/general/release-notes-version-2.4.html new file mode 100644 index 00000000..c8221192 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-2.4.html @@ -0,0 +1,965 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 2.4 “EVE”

+
+ +
+
+
+

Introduction

+
+
+

We are proud to announce the immediate release of devonfw version 2.4 (code named “EVE” during development). This version is the first one that fully embraces Open Source, including components like the documentation assets and CobiGen. Most of the IP (Intellectual Property or proprietary) part of devonfw are now published under the Apache License version 2.0 (with the documentation under the Creative Commons License (Attribution-NoDerivatives)). This includes the GitHub repositories where all the code and documentation is located. All of these repositories are now open for public viewing as well.

+
+
+

“EVE” contains a slew of new features but in essence it is already driven by what we expect to be the core focus of 2018: strengthening the platform and improving quality.

+
+
+

This release is also fully focused on deepening the platform rather than expanding it. That is to say: we have worked on improving existing features rather than adding new ones and strengthen the qualitative aspects of the software development life cycle, i.e. security, testing, infrastructure (CI, provisioning) etc.

+
+
+

“EVE” already is very much an example of this. This release contains the Allure Test Framework (included as an incubator in version 2.3) update called MrChecker Test Framework. MrChecker is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+

Another incubator being updated is the devonfw Shop Floor which intended to be a compilation of DevOps experiences from the devonfw perspective. A new part of the release is the new Solution Guide for Application Security based on the state of the art in OWASP based application security.

+
+
+

There is a whole range of new features and improvements which can be seen in that light. devon4j 2.6 changes and improves the package structure of the core Java framework. The My Thai Star sample app has now been upgraded to Angular 6, lots of bugs have been fixed and the devonfw Guide has once again been improved.

+
+
+

Last but not least, this release contains the formal publication of the devonfw Methodology or The Accelerated Solution Design - an Industry Standards based solution design and specification (documentation) methodology for Agile (and less-than-agile) projects.

+
+
+
+
+

Changes and new features

+
+ +
+
+
+

devonfw 2.4 is Open Source

+
+
+

This version is the first release of devonfw that fully embraces Open Source, including components like the documentation assets and CobiGen. This is done in response to intensive market pressure and demands from the MU´s (Public Sector France, Netherlands)

+
+
+

Most of the IP (Intellectual Property or proprietary) part of devonfw are now published under the Apache License version 2.0 (with the documentation under the Creative Commons License (Attribution-NoDerivatives)).

+
+
+

So you can now use the devonfw distribution (the "zip" file), CobiGen, the devonfw modules and all other components without any worry to expose the client unwittingly to Capgemini IP.

+
+
+

Note: there are still some components which are IP and are not published under an OSS license. The class room trainings, the Sencha components and some CobiGen templates. But these are not includes in the distribution nor documentation and are now completely maintained separately.

+
+
+
+
+

devonfw dist

+
+
+
    +
  • +

    Eclipse Oxygen integrated

    +
    +
      +
    • +

      CheckStyle Plugin updated.

      +
    • +
    • +

      SonarLint Plugin updated.

      +
    • +
    • +

      Git Plugin updated.

      +
    • +
    • +

      FindBugs Plugin updated.

      +
    • +
    • +

      CobiGen plugin updated.

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Visual Studio Code latest version included and pre-configured with https://devonfw.com/website/pages/docs/cli.adoc.html#vscode.adoc

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      Yarn package manager updated.

      +
    • +
    • +

      Python3 updated.

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation updated.

      +
    • +
    • +

      devon4ng-application-template for Angular 6 at workspaces/examples

      +
    • +
    +
    +
  • +
+
+
+
+
+

My Thai Star Sample Application

+
+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 1.6.0.

    +
  • +
  • +

    Travis CI integration with Docker. Now we get a valuable feedback of the current status and when collaborators make pull requests.

    +
  • +
  • +

    Docker compose deployment.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      Flyway upgrade from 3.2.1 to 4.2.0

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      Client devon4ng updated to Angular 6.

      +
    • +
    • +

      Frontend translated into 9 languages.

      +
    • +
    • +

      Improved mobile and tablet views.

      +
    • +
    • +

      Routing fade animations.

      +
    • +
    • +

      Compodoc included to generate dynamically frontend documentation.

      +
    • +
    +
    +
  • +
+
+
+
+
+

Documentation updates

+
+
+

The following contents in the devonfw guide have been updated:

+
+
+
    +
  • +

    devonfw OSS modules documentation.

    +
  • +
  • +

    Creating a new devon4j application.

    +
  • +
  • +

    How to update Angular CLI in devonfw.

    +
  • +
  • +

    Include Angular i18n.

    +
  • +
+
+
+

Apart from this the documentation has been reviewed and some typos and errors have been fixed.

+
+
+

The current development of the guide has been moved to https://github.com/devonfw/devonfw-guide/wiki in order to be available as the rest of OSS assets.

+
+
+
+
+

devon4j

+
+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Integrate batch with archetype.

    +
  • +
  • +

    Application module structure and dependencies improved.

    +
  • +
  • +

    Issues with Application Template fixed.

    +
  • +
  • +

    Solved issue where Eclipse maven template devon4j-template-server version 2.4.0 produced pom with missing dependency spring-boot-starter-jdbc.

    +
  • +
  • +

    Solved datasource issue with project archetype 2.4.0.

    +
  • +
  • +

    Decouple archetype from sample (restaurant).

    +
  • +
  • +

    Upgrade to Flyway 4.

    +
  • +
  • +

    Fix for issue with Java 1.8 and QueryDSL #599.

    +
  • +
+
+
+
+
+

devon4ng

+
+
+

The following changes have been incorporated in devon4ng:

+
+
+ +
+
+
+
+

AppSec Quick Solution Guide

+
+
+

This release incorporates a new Solution Guide for Application Security based on the state of the art in OWASP based application security. The purpose of this guide is to offer quick solutions for common application security issues for all applications based on devonfw. It’s often the case that we need our systems to comply to certain sets of security requirements and standards. Each of these requirements needs to be understood, addressed and converted to code or project activity. We want this guide to prevent the wheel from being reinvented over and over again and to give clear hints and solutions to common security problems.

+
+
+ +
+
+
+
+

CobiGen

+
+
+
    +
  • +

    CobiGen_Templates project and docs updated.

    +
  • +
  • +

    CobiGen Angular 6 generation improved based on the updated application template

    +
  • +
  • +

    CobiGen Ionic CRUD App generation based on Ionic application template. Although a first version was already implemented, it has been deeply improved:

    +
    +
      +
    • +

      Changed the code structure to comply with Ionic standards.

      +
    • +
    • +

      Added pagination.

      +
    • +
    • +

      Pull-to-refresh, swipe and attributes header implemented.

      +
    • +
    • +

      Code documented and JSDoc enabled (similar to Javadoc)

      +
    • +
    +
    +
  • +
  • +

    CobiGen TSPlugin Interface Merge support.

    +
  • +
  • +

    CobiGen XML plugin comes out with new cool features:

    +
    +
      +
    • +

      Enabled the use of XPath within variable assignment. You can now retrieve almost any data from an XML file and store it on a variable for further processing on the templates. Documented here.

      +
    • +
    • +

      Able to generate multiple output files per XML input file.

      +
    • +
    • +

      Generating code from UML diagrams. XMI files (standard XML for UML) can be now read and processed. This means that you can develop templates and generate code from an XMI like class diagrams.

      +
    • +
    +
    +
  • +
  • +

    CobiGen OpenAPI plugin released with multiple bug-fixes and other functionalities like:

    +
    +
      +
    • +

      Assigning global and local variables is now possible. Therefore you can set any string for further processing on the templates. For instance, changing the root package name of the generated files. Documented here.

      +
    • +
    • +

      Enabled having a class with more than one relationship to another class (more than one property of the same type).

      +
    • +
    +
    +
  • +
  • +

    CobiGen Text merger plugin has been extended and now it is able to merge text blocks. This means, for example, that the generation and merging of adoc documentation is possible. Documented here.

    +
  • +
+
+
+
+
+

Devcon

+
+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Now Devcon is OSS, with public repository at https://github.com/devonfw/devcon

    +
  • +
  • +

    Updated to match current devon4j

    +
  • +
  • +

    Update to download Linux distribution.

    +
  • +
  • +

    Custom modules creation improvements.

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+
+

devonfw OSS Modules

+
+
+
    +
  • +

    Existing devonfw IP modules have been moved to OSS.

    +
    +
      +
    • +

      They can now be accessed in any devon4j project as optional dependencies from Maven Central.

      +
    • +
    • +

      The repository now has public access https://github.com/devonfw/devon

      +
    • +
    +
    +
  • +
  • +

    Starters available for modules:

    +
    +
      +
    • +

      Reporting module

      +
    • +
    • +

      WinAuth AD Module

      +
    • +
    • +

      WinAuth SSO Module

      +
    • +
    • +

      I18n Module

      +
    • +
    • +

      Async Module

      +
    • +
    • +

      Integration Module

      +
    • +
    • +

      Microservice Module

      +
    • +
    • +

      Compose for Redis Module

      +
    • +
    +
    +
  • +
+
+ +
+
+
+

devonfw Shop Floor

+
+
+
    +
  • +

    devonfw Shop Floor 4 Docker

    +
    +
      +
    • +

      Docker-based CICD environment

      +
      +
        +
      • +

        docker-compose.yml (installation file)

        +
      • +
      • +

        dsf4docker.sh (installation script)

        +
      • +
      • +

        Service Integration (documentation in Wiki)

        +
      • +
      +
      +
    • +
    • +

      devonfw projects build and deployment with Docker

      +
      +
        +
      • +

        Dockerfiles (multi-stage building)

        +
        +
          +
        • +

          Build artifact (NodeJS for Angular and Maven for Java)

          +
        • +
        • +

          Deploy built artifact (NGINX for Angular and Tomcat for Java)

          +
        • +
        • +

          NGINX Reverse-Proxy to redirect traffic between both Angular client and Java server containers.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    devonfw Shop Floor 4 OpenShift

    +
    +
      +
    • +

      devonfw projects deployment in OpenShift cluster

      +
      +
        +
      • +

        s2i images

        +
      • +
      • +

        OpenShift templates

        +
      • +
      • +

        Video showcase (OpenShift Origin 3.6)

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

This incubator is intended to be a compilation of DevOps experiences from the devonfw perspective. “How we use our devonfw projects in DevOps environments”. Integration with the Production Line, creation and service integration of a Docker-based CI environment and deploying devonfw applications in an OpenShift Origin cluster using devonfw templates. +See: https://github.com/devonfw/devonfw-shop-floor

+
+
+
+
+

devonfw Testing

+
+
+

The MrChecker Test Framework is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+
    +
  • +

    Examples available under embedded project “MrChecker-App-Under-Test” and in project wiki: https://github.com/devonfw/devonfw-testing/wiki

    +
  • +
  • +

    How to install:

    + +
  • +
  • +

    Release Note:

    +
    +
      +
    • +

      module core - 4.12.0.8:

      +
      +
        +
      • +

        fixes on getting Environment values

        +
      • +
      • +

        top notch example how to keep vulnerable data in repo , like passwords

        +
      • +
      +
      +
    • +
    • +

      module selenium - 3.8.1.8:

      +
      +
        +
      • +

        browser driver auto downloader

        +
      • +
      • +

        list of out off the box examples to use in any web page

        +
      • +
      +
      +
    • +
    • +

      module webAPI - ver. 1.0.2 :

      +
      +
        +
      • +

        api service virtualization with REST and SOAP examples

        +
      • +
      • +

        api service virtualization with dynamic arguments

        +
      • +
      • +

        REST working test examples with page object model

        +
      • +
      +
      +
    • +
    • +

      module security - 1.0.1 (security tests against My Thai Start)

      +
    • +
    • +

      module DevOps :

      +
      +
        +
      • +

        dockerfile for Test environment execution

        +
      • +
      • +

        CI + CD as Jenkinsfile code

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devonfw methodology: Accelerated Solution Design

+
+
+

One of the prime challenges in Distributed Agile Delivery is the maintenance of a common understanding and unity of intent among all participants in the process of creating a product. That is: how can you guarantee that different parties in the client, different providers, all in different locations and time zones during a particular period of time actually understand the requirements of the client, the proposed solution space and the state of implementation.

+
+
+

We offer the Accelerated Solution Design as a possible answer to these challenges. The ASD is carefully designed to be a practical guideline that fosters and ensures the collaboration and communication among all team members.

+
+
+

The Accelerated Solution Design is:

+
+
+
    +
  • +

    A practical guideline rather than a “methodology”

    +
  • +
  • +

    Based on industry standards rather than proprietary methods

    +
  • +
  • +

    Consisting of an evolving, “living”, document set rather than a static, fixed document

    +
  • +
  • +

    Encapsulating the business requirements, functional definitions as well as Architecture design

    +
  • +
  • +

    Based on the intersection of Lean, Agile, DDD and User Story Mapping

    +
  • +
+
+
+

And further it is based on the essential belief or paradigm that ASD should be:

+
+
+
    +
  • +

    Focused on the design (definition) of the “externally observable behavior of a system”

    +
  • +
  • +

    Promoting communication and collaboration between team members

    +
  • +
  • +

    Guided by prototypes

    +
  • +
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-2020.04.html b/docs/devonfw.github.io/1.0/general/release-notes-version-2020.04.html new file mode 100644 index 00000000..c45ea36b --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-2020.04.html @@ -0,0 +1,1107 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 2020.04

+
+ +
+
+
+

Introduction

+
+
+

We are proud to announce the immediate release of devonfw version 2020.04. This version is the first one with the new versioning that will make easier to the community to identify when it was released since we use the year and month as many other software distributions.

+
+
+

This release includes lots of bug fixes and many version updates, but it is very important to highlight the following improvements:

+
+
+
    +
  • +

    New devonfw IDE auto-configure project feature.

    +
  • +
  • +

    Improved devonfw IDE plugin configuration.

    +
  • +
  • +

    New devon4j kafka module.

    +
  • +
  • +

    New devon4j JWT module.

    +
  • +
  • +

    New devon4j authorization of batches feature.

    +
  • +
  • +

    Dozer replaced with Orika in devon4j.

    +
  • +
  • +

    Support for composite keys in devon4j and CobiGen.

    +
  • +
  • +

    Multiple enhancements for project specific plugin development and usage of project specific template sets in CobiGen.

    +
  • +
  • +

    Ability to adapt your own templates by making use of CobiGen CLI.

    +
  • +
  • +

    Better responsiveness in eclipse and bugfixes in all assets in CobiGen.

    +
  • +
  • +

    devon4ng updated to Angular 9, NgRx 9 and Ionic 5, including documentation, samples and templates.

    +
  • +
  • +

    Yarn 2 support in devon4ng.

    +
  • +
  • +

    devon4node updated to NestJS 7 (packages, samples and documentation)

    +
  • +
  • +

    devon4node TSLint replaced with ESLint.

    +
  • +
  • +

    @devon4node/config package added.

    +
  • +
  • +

    devon4net updated to latest .NET Core 3.1.3 LTS version.

    +
  • +
  • +

    Update of the Production Line templates for devonfw projects in devonfw shop floor.

    +
  • +
  • +

    New merge feature included in the devonfw shop floor cicdgen tool.

    +
  • +
  • +

    Updated sonar-devon4j-plugin:

    +
    +
      +
    • +

      Improved coloring and other visual cues to our rule descriptions to highlight good and bad code examples.

      +
    • +
    • +

      Improved the locations of issues thrown on method- and class-level.

      +
    • +
    +
    +
  • +
+
+
+

Please check the detailed list below.

+
+
+

This would have not been possible without the commitment and hard work of the devonfw core team, German, Indian and ADCenter Valencia colleagues and collaborators as, among many others, the Production Line team.

+
+
+
+
+

devonfw IDE

+
+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+
+

2020.04.001

+
+
+

Starting with this release we have changed the versioning schema in devonfw to yyyy.mm.NNN where yyyy.mm is the date of the planned milestone release and NNN is a running number increased with every bug- or security-fix update.

+
+
+
    +
  • +

    #394 variable from devon.properties not set if not terminated with newline

    +
  • +
  • +

    #399 launching of Intellij fails with No such file or directory error.

    +
  • +
  • +

    #371 Eclipse plugin installation broke

    +
  • +
  • +

    #390 maven get/set-version buggy

    +
  • +
  • +

    #397 migration support for devon4j 2020.04.001

    +
  • +
  • +

    #400 allow custom args for release

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.001.

+
+
+
+
+

3.3.1

+
+
+

New release with bugfixes and new ide plugin feature:

+
+
+
    +
  • +

    #343: Setup can’t find Bash nor Git

    +
  • +
  • +

    #369: Fix flattening of POMs

    +
  • +
  • +

    #386: Feature/clone recursive

    +
  • +
  • +

    #379: Use own extensions folder in devonfw-ide

    +
  • +
  • +

    #381: Add ability to configure VS Code plugins via settings

    +
  • +
  • +

    #376: Improve Eclipse plugin configuration

    +
  • +
  • +

    #373: Fix project import on windows

    +
  • +
  • +

    #374: Rework build on import

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 3.3.1.

+
+
+
+
+

3.3.0

+
+
+

New release with bugfixes and new project import feature:

+
+
+
    +
  • +

    #343: Detect non-admin GitForWindows and Cygwin

    +
  • +
  • +

    #175: Ability to clone projects and import into Eclipse automatically

    +
  • +
  • +

    #346: devon eclipse add-plugin parameters swapped

    +
  • +
  • +

    #363: devon ide update does not pull latest project settings

    +
  • +
  • +

    #366: update java versions to latest fix releases

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 3.3.0.

+
+
+
+
+

devon4j

+
+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+
+

2020.04.001

+
+
+

Starting with this release we have changed the versioning schema in devonfw to yyyy.mm.NNN where yyyy.mm is the date of the planned milestone release and NNN is a running number increased with every bug- or security-fix update.

+
+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    #233: Various version updates

    +
  • +
  • +

    #241: Add module to support JWT and parts of OAuth

    +
  • +
  • +

    #147: Switch from dozer to orika

    +
  • +
  • +

    #180: Cleanup archtype

    +
  • +
  • +

    #240: Add unreferenced guides

    +
  • +
  • +

    #202: Architecture documentation needs update for components

    +
  • +
  • +

    #145: Add a microservices article in the documentation

    +
  • +
  • +

    #198: Deploy SNAPSHOTs to OSSRH in travis CI

    +
  • +
  • +

    #90: Authorization of batches

    +
  • +
  • +

    #221: Wrote monitoring guide

    +
  • +
  • +

    #213: Document logging of custom field in json

    +
  • +
  • +

    #138: Remove deprecated RevisionMetadata[Type]

    +
  • +
  • +

    #211: Archetype: security config broken

    +
  • +
  • +

    #109: LoginController not following devon4j to use JAX-RS but uses spring-webmvc instead

    +
  • +
  • +

    #52: Improve configuration

    +
  • +
  • +

    #39: Ability to log custom fields via SLF4J

    +
  • +
  • +

    #204: Slf4j version

    +
  • +
  • +

    #190: Rework of spring-batch integration

    +
  • +
  • +

    #210: Rework documentation for blob support

    +
  • +
  • +

    #191: Rework of devon4j-batch module

    +
  • +
  • +

    #209: Include performance info in separate fields

    +
  • +
  • +

    #207: Use more specific exception for not found entity

    +
  • +
  • +

    #208: Remove unnecesary clone

    +
  • +
  • +

    #116: Bug in JSON Mapping for ZonedDateTime

    +
  • +
  • +

    #184: Fixed BOMs so devon4j and archetype can be used again

    +
  • +
  • +

    #183: Error in executing the project created with devon4j

    +
  • +
  • +

    #177: Switch to new maven-parent

    +
  • +
  • +

    169: Provide a reason, why unchecked exceptions are used in devon4j

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.04.001. +The full list of changes for this release can be found in milestone devon4j 2020.04.001.

+
+
+
+
+

devon4ng

+
+
+

The consolidated list of features for this devon4ng release is as it follows.

+
+
+
+
+

2020.04.001

+
+
+

Starting with this release we have changed the versioning schema in devonfw to yyyy.mm.NNN where yyyy.mm is the date of the planned milestone release and NNN is a running number increased with every bug- or security-fix update.

+
+
+
    +
  • +

    #111: Yarn 2 support included

    +
  • +
  • +

    #96: devon4ng upgrade to Angular 9

    +
    +
      +
    • +

      Templates and samples updated to Angular 9, NgRx 9 and Ionic 5.

      +
    • +
    • +

      New internationalization module.

      +
    • +
    • +

      Documentation updates and improvements.

      +
    • +
    +
    +
  • +
  • +

    #95: Added token management info in documentation

    +
  • +
+
+
+
+
+

devon4net

+
+
+

The consolidated list of features for this devon4net release is as it follows:

+
+
+
    +
  • +

    Updated to latest .NET Core 3.1.3 LTS version

    +
  • +
  • +

    Dependency Injection Autoregistration for services and repositories

    +
  • +
  • +

    Added multiple role managing claims in JWT

    +
  • +
  • +

    Added custom headers to circuit breaker

    +
  • +
  • +

    Reviewed default log configuration

    +
  • +
  • +

    Added support to order query results from database via lambda expression

    +
  • +
  • +

    Updated template and nuget packages

    +
  • +
+
+
+
+
+

devon4node

+
+
+

The consolidated list of features for this devon4node release is as it follows:

+
+
+
    +
  • +

    Upgrade to NestJS 7 (packages, samples and documentation)

    +
  • +
  • +

    TSLint replaced with ESLint

    +
  • +
  • +

    Add lerna to project to manage all the packages

    +
  • +
  • +

    Add @devon4node/config package

    +
  • +
  • +

    Add new schematics: Repository

    +
  • +
  • +

    Improve WinstonLogger

    +
  • +
  • +

    Improve documentation

    +
  • +
  • +

    Update dependencies to latest versions

    +
  • +
+
+
+
+
+

CobiGen

+
+
+

New release with updates and bugfixes:

+
+
+
    +
  • +

    devonfw templates:

    +
    +
      +
    • +

      #1063: Upgrade devon4ng Ionic template to latest version

      +
    • +
    • +

      #1065: devon4ng templates for devon4node

      +
    • +
    • +

      #1128: update java templates for composite keys

      +
    • +
    • +

      #1130: Update template for devon4ng application template

      +
    • +
    • +

      #1131: Update template for devon4ng NgRx template

      +
    • +
    • +

      #1149: .NET templates

      +
    • +
    • +

      #1146: Dev ionic template update bug fix

      +
    • +
    +
    +
  • +
  • +

    TypeScript plugin:

    +
    +
      +
    • +

      #1126: OpenApi parse/merge issues (ionic List templates)

      +
    • +
    +
    +
  • +
  • +

    Eclipse plugin:

    +
    +
      +
    • +

      #412: Write UI Test for HealthCheck use

      +
    • +
    • +

      #867: Cobigen processbar

      +
    • +
    • +

      #1069: #953 dot path

      +
    • +
    • +

      #1099: NPE on HealthCheck

      +
    • +
    • +

      #1100: 1099 NPE on health check

      +
    • +
    • +

      #1101: #867 fix import of core and api

      +
    • +
    • +

      #1102: eclipse_plugin doesn’t accept folders as input

      +
    • +
    • +

      #1134: (Eclipse-Plugin) Resolve Template utility classes from core

      +
    • +
    • +

      #1142: #1102 accept all kinds of input

      +
    • +
    +
    +
  • +
  • +

    CobiGen core:

    +
    +
      +
    • +

      #429: Reference external template files

      +
    • +
    • +

      #1143: Abort generation if external trigger does not match

      +
    • +
    • +

      #1125: Generation of templates from external increments does not work

      +
    • +
    • +

      #747: Variable assignment for external increments throws exception

      +
    • +
    • +

      #1133: Bugfix/1125 generation of templates from external increments does not work

      +
    • +
    • +

      #1127: #1119 added new TemplatesUtilsClassesUtil class to core

      +
    • +
    • +

      #953: NPE bug if foldername contains a dot

      +
    • +
    • +

      #1067: Feature/158 lat variables syntax

      +
    • +
    +
    +
  • +
  • +

    CobiGen CLI:

    +
    +
      +
    • +

      #1111: Infinity loop in mmm-code (MavenDependencyCollector.collectWithReactor)

      +
    • +
    • +

      #1113: cobigen-cli does not seem to properly resolve classes from dependencies

      +
    • +
    • +

      #1120: Feature #1108 custom templates folder

      +
    • +
    • +

      #1115: Fixing CLI bugs related to dependencies and custom templates jar

      +
    • +
    • +

      #1108: CobiGen CLI: Allow easy use of user’s templates

      +
    • +
    • +

      #1110: FileSystemNotFoundException blocking cobigen-cli

      +
    • +
    • +

      #1138: #1108 dev cli feature custom templates folder

      +
    • +
    • +

      #1136: (Cobigen-CLI) Resolve Template utility classes from core

      +
    • +
    +
    +
  • +
+
+
+
+
+

devonfw-shop-floor

+
+
+
    +
  • +

    Add documentation for deploy jenkins slaves

    +
  • +
  • +

    Improve documentation

    +
  • +
  • +

    Add devon4net Openshift template

    +
  • +
  • +

    Add nginx docker image for devon4ng

    +
  • +
  • +

    Add Openshift provisioning

    +
  • +
  • +

    Production Line:

    +
    +
      +
    • +

      Updated MTS template: add step for dependency check and change the deployment method

      +
    • +
    • +

      Add template utils: initialize instance, openshift configuration, docker configuration and install sonar plugin

      +
    • +
    • +

      Add devon4net template

      +
    • +
    • +

      Add from existing template

      +
    • +
    • +

      Improve documentation

      +
    • +
    • +

      Refactor the documentation in order to follow the devonfw wiki workflow

      +
    • +
    • +

      Update devon4j, devon4ng, devon4net and devon4node in order to be able to choose the deployment method: none, docker or openshift.

      +
    • +
    • +

      Update the tools version in order to use the latest.

      +
    • +
    +
    +
  • +
  • +

    Production Line Shared Lib

    +
    +
      +
    • +

      Add more fuctionality to the existing classes.

      +
    • +
    • +

      Add classes: DependencyCheckConfiguration, DockerConfiguration and OpenshiftConfiguration

      +
    • +
    +
    +
  • +
  • +

    CICDGEN

    +
    +
      +
    • +

      Add devon4net support

      +
    • +
    • +

      Update tools versions in Jenkinsfiles to align with Production Line templates

      +
    • +
    • +

      Add merge strategies: error, keep, override, combine

      +
    • +
    • +

      Add lerna to the project

      +
    • +
    • +

      Minor improvements in the code

      +
    • +
    • +

      Add GitHub actions workflow to validate the new changes

      +
    • +
    • +

      Improve documentation

      +
    • +
    • +

      Breaking changes:

      +
      +
        +
      • +

        Remove the following parameters: plurl, ocurl

        +
      • +
      • +

        Add the following parameters: dockerurl, dockercertid, registryurl, ocname and merge

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

Sonar devon4j plugin

+
+
+

The consolidated list of features for this Sonar devon4j plugin release is as it follows.

+
+
+
+
+

2020.04.001

+
+
+

This is the first version using our new versioning scheme. Here, the following issues were resolved:

+
+
+
    +
  • +

    #60: Fixed a bug in the naming check for Use-Case implementation classes

    +
  • +
  • +

    #67: Fixed a bug where the whole body of a method or a class was marked as the issue location. Now only the method / class headers will be highlighted.

    +
  • +
  • +

    #68: Made our rule descriptions more accessible by using better readable colors as well as alternative visual cues

    +
  • +
  • +

    #71: Fixed a bug where a NPE could be thrown

    +
  • +
  • +

    #74: Fixed a bug where a method always returned null

    +
  • +
+
+
+

Unrelated to any specific issues, there was some refactoring and cleaning up done with the following two PRs:

+
+
+
    +
  • +

    PR #66: Refactored the prefixes of our rule names from 'Devon' to 'devonfw'

    +
  • +
  • +

    PR #65: Sorted security-related test files into their own package

    +
  • +
+
+
+

Changes for this release can be found in milestone 2020.04.001.

+
+
+
+
+

My Thai Star

+
+
+

As always, our reference application, My Thai Star, contains some interesting improvements that come from the new features and bug fixes from the other assets. The list is as it follows:

+
+
+
    +
  • +

    devon4j - Java

    +
    +
      +
    • +

      Implement example batches with modified devon-batch

      +
    • +
    • +

      Upgrade spring boot version to 2.2.6 and devon4j 2020.004.001

      +
    • +
    • +

      Migrate from dozer to orika

      +
    • +
    +
    +
  • +
  • +

    devon4ng - Angular

    +
    +
      +
    • +

      Move configuration to NgRx store

      +
    • +
    +
    +
  • +
  • +

    devonfw shop floor - Jenkins

    +
    +
      +
    • +

      Update tools versions in order to align with Production Line templates

      +
    • +
    • +

      Add dependency check step (using dependency checker and yarn audit)

      +
    • +
    • +

      Send dependency checker reports to SonarQube

      +
    • +
    • +

      Changed deployment pipelines. Now pipelines are able to deploy docker containers using docker directly. No more ssh connections to execute commands in a remote machine are required.

      +
    • +
    • +

      Update documentation in order to reflect all changes

      +
    • +
    +
    +
  • +
  • +

    devon4nde - Node.js

    +
    +
      +
    • +

      Upgrade to NestJS 7

      +
    • +
    • +

      Add custom repositories

      +
    • +
    • +

      Add exceptions and exception filters

      +
    • +
    • +

      Add tests (missing in the previous version)

      +
    • +
    • +

      Split logic into use cases in order to make the test process easier

      +
    • +
    • +

      Minor patches and improvemets

      +
    • +
    • +

      Documentation updated in order to reflect the new implementation

      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-2020.08.html b/docs/devonfw.github.io/1.0/general/release-notes-version-2020.08.html new file mode 100644 index 00000000..fe2af69f --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-2020.08.html @@ -0,0 +1,731 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 2020.08

+
+ +
+
+
+

Introduction

+
+
+

We are proud to announce the release of devonfw version 2020.08.

+
+
+

This release includes lots of addition of new features, updates and bug fixes but it is very important to highlight the following improvements:

+
+
+
+
+

devonfw IDE

+
+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+
+

2020.08.001

+
+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #454: update to eclipse 2020.06

    +
  • +
  • +

    #442: update nodejs and vscode

    +
  • +
  • +

    #432: vsCode settings are not updated

    +
  • +
  • +

    #446: intellij: doConfigureEclipse: command not found

    +
  • +
  • +

    #440: Software update may lead to inconsistent state due to windows file locks

    +
  • +
  • +

    #427: release: keep leading zeros

    +
  • +
  • +

    #450: update settings

    +
  • +
  • +

    #431: devon build command not working correct for yarn or npm

    +
  • +
  • +

    #449: update to devon4j 2020.08.001

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.08.001.

+
+
+
+
+

2020.04.004

+
+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #433: Windows: devon command line sets wrong environment variables (with tilde symbol)

    +
  • +
  • +

    #435: fix variable resolution on bash

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.004.

+
+
+
+
+

2020.04.003

+
+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #395: variable from devon.properites unset if value is in double quotes

    +
  • +
  • +

    #429: Added script to create a meta file in the users directory after setup

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.003.

+
+
+
+
+

2020.04.002

+
+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #418: Make projects optional

    +
  • +
  • +

    #421: update devon4j to 2020.04.002

    +
  • +
  • +

    #413: Update Eclipse to 2020-03

    +
  • +
  • +

    #424: Strange errors on windows if devon.properties contains mixed line endings

    +
  • +
  • +

    #399: launching of Intellij fails with No such file or directory error.

    +
  • +
  • +

    #410: fix jsonmerge for boolean and null values

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.002.

+
+
+
+
+

devon4j

+
+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+
+

2020.08.001

+
+
+

New release of devon4j with async REST service client support and other improvements:

+
+
+
    +
  • +

    #279: support for async service clients

    +
  • +
  • +

    #277: Update Security-Guide to recent OWASP Top (2017)

    +
  • +
  • +

    #281: cleanup documentation

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.08.001. +The full list of changes for this release can be found in milestone devon4j 2020.08.001.

+
+
+
+
+

2020.04.002

+
+
+

Minor update of devon4j with the following bugfixes and small improvements:

+
+
+
    +
  • +

    #261: JUnit4 backward compatibility

    +
  • +
  • +

    #267: Fix JWT permission expansion

    +
  • +
  • +

    #254: JWT Authentication support for devon4j-kafka

    +
  • +
  • +

    #258: archetype is still lacking a .gitignore

    +
  • +
  • +

    #273: Update libs

    +
  • +
  • +

    #271: Do not enable resource filtering by default

    +
  • +
  • +

    #255: Kafka: Support different retry configuration for different topics

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.04.002. +The full list of changes for this release can be found in milestone devon4j 2020.04.002.

+
+
+
+
+

devon4ng

+
+
+

This release is focused mainly on the Angular 10 upgrade:

+
+
+
    +
  • +

    #176: Template submodules updated to Angular 10 and NgRx 10.

    +
  • +
  • +

    #167, #168, #174 and #175: Updated electron (sample and documentation).

    +
  • +
  • +

    #166: Update error handler.

    +
  • +
  • +

    #165: Cypress sample.

    +
  • +
  • +

    #164: Update to Angular 10 (samples and documentation).

    +
  • +
+
+
+
+
+

devon4node

+
+
+

New devon4node version is published, the changes are:

+
+
+
    +
  • +

    Updated dependencies.

    +
  • +
  • +

    Solved bug when you introduce a name with dashes in new command.

    +
  • +
  • +

    Add more options to the non-interactive new command.

    +
  • +
+
+
+
+
+

CobiGen

+
+
+

CobiGen version numbers have been consolidated to now represent plug-in compatibility in the major release number (7.x.x).

+
+
+
+
+

CLI

+
+
+
    +
  • +

    CLI increments can be referenced by name and description.

    +
  • +
  • +

    Ability to configure logging.

    +
  • +
  • +

    Fixed error on code formatting.

    +
  • +
  • +

    Improved Performance by lazy plug-in loading.

    +
  • +
  • +

    Possibility to prefer custom plug-ins over CobiGen ones.

    +
  • +
  • +

    Fixed bug, which broke whole CobiGen execution in case a custom CobiGen Plug-in was throwing an arbitrary exception.

    +
  • +
+
+
+
+
+

Eclipse

+
+
+
    +
  • +

    Improved Performance by lazy plug-in loading.

    +
  • +
  • +

    Possibility to prefer custom plug-ins over CobiGen ones.

    +
  • +
  • +

    Fixed bug, which broke whole CobiGen execution in case a custom CobiGen Plug-in was throwing an arbitrary exception.

    +
  • +
+
+
+
+
+

Maven

+
+
+
    +
  • +

    Fixed bug to properly load template util classes.

    +
  • +
  • +

    Improved Performance by lazy plug-in loading.

    +
  • +
  • +

    Possibility to prefer custom plug-ins over CobiGen ones.

    +
  • +
  • +

    Fixed bug, which broke whole CobiGen execution in case a custom CobiGen Plug-in was throwing an arbitrary exception.

    +
  • +
+
+
+
+
+

XML Plug-in

+
+
+
    +
  • +

    Added ability to provide custom merge schemas as part of the template folder.

    +
  • +
  • +

    Added further merge strategies for merging including XML validation.

    +
  • +
+
+
+
+
+

Java Plug-in

+
+
+
    +
  • +

    Fixed NPE for annotated constructors.

    +
  • +
  • +

    Fixed line separator handling to now prefer the file’s one instead of the system ones.

    +
  • +
  • +

    Fixed unwanted new lines in constructors after merging.

    +
  • +
  • +

    Fixed annotation formatting after merge.

    +
  • +
+
+
+
+
+

TypeScript Plug-in

+
+
+
    +
  • +

    Fixed issue on automatic update of the ts-merger bundle.

    +
  • +
+
+
+
+
+

Sonar devon4j plugin

+
+
+

The consolidated list of features for this Sonar devon4j plugin release is as it follows.

+
+
+

With this release, we added our own quality profile:

+
+
+
    +
  • +

    #16: Install devon4j quality profile

    +
  • +
+
+
+

Changes for this release can be found in milestone 2020.08.001

+
+
+
+
+

My Thai Star with Microservices and ISTIO Service Mesh Implementation

+
+
+

As always, our reference application, My Thai Star now has been implemented with Microservices and ISTIO Service Mesh features:

+
+
+
    +
  • +

    devon4j - Java

    +
    +
      +
    • +

      My Thai Star now has a sample version on Microservices architecture.

      +
    • +
    • +

      The github repository for the microservices version of My Thai Star is hosted at My Thai Star with Microservices

      +
    • +
    • +

      My Thai Star Microservices now has a multi stage docker build which generates the respective docker images for all the My Thai Star services.

      +
    • +
    • +

      My Thai Star microservices has the Kubernetes artifacts available to be able to deploy into Kubernetes pods.

      +
    • +
    • +

      My Thai Star microservices has ISTIO the service mesh implementation.

      +
    • +
    • +

      Check out the guides to implement or configure ISTIO features such as Traffic Routing, Network Resiliency features(RequestRouting, RequestTimeouts, Fault Injection, Circuit Breaker), Canary Deployments.

      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-2020.12.html b/docs/devonfw.github.io/1.0/general/release-notes-version-2020.12.html new file mode 100644 index 00000000..abd40a2e --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-2020.12.html @@ -0,0 +1,966 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 2020.12

+
+ +
+
+
+

Introduction

+
+
+

We are proud to announce the release of devonfw version 2020.12.

+
+
+

This release includes lots of addition of new features, updates and bug fixes but it is very important to highlight the following improvements:

+
+
+
+
+

devonfw IDE

+
+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+
+

2020.12.001

+
+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #495: Documentation corrections

    +
  • +
  • +

    #491: Consider lombok support

    +
  • +
  • +

    #489: Update node to v12.19.0 and VS Code to 1.50.1

    +
  • +
  • +

    #470: reverse merge of workspace settings not sorting properties anymore

    +
  • +
  • +

    #483: Error during installation when npm is already installed

    +
  • +
  • +

    #415: documentation to customize settings

    +
  • +
  • +

    #479: Error for vscode plugin installation

    +
  • +
  • +

    #471: Preconfigure Project Explorer with Hierarchical Project Presentation

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.12.001.

+
+
+
+
+

2020.08.001

+
+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #454: update to eclipse 2020.06

    +
  • +
  • +

    #442: update nodejs and vscode

    +
  • +
  • +

    #432: vsCode settings are not updated

    +
  • +
  • +

    #446: intellij: doConfigureEclipse: command not found

    +
  • +
  • +

    #440: Software update may lead to inconsistent state due to windows file locks

    +
  • +
  • +

    #427: release: keep leading zeros

    +
  • +
  • +

    #450: update settings

    +
  • +
  • +

    #431: devon build command not working correct for yarn or npm

    +
  • +
  • +

    #449: update to devon4j 2020.08.001

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.08.001.

+
+
+
+
+

2020.04.004

+
+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #433: Windows: devon command line sets wrong environment variables (with tilde symbol)

    +
  • +
  • +

    #435: fix variable resolution on bash

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.004.

+
+
+
+
+

2020.04.003

+
+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #395: variable from devon.properites unset if value is in double quotes

    +
  • +
  • +

    #429: Added script to create a meta file in the users directory after setup

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.003.

+
+
+
+
+

2020.04.002

+
+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #418: Make projects optional

    +
  • +
  • +

    #421: update devon4j to 2020.04.002

    +
  • +
  • +

    #413: Update Eclipse to 2020-03

    +
  • +
  • +

    #424: Strange errors on windows if devon.properties contains mixed line endings

    +
  • +
  • +

    #399: launching of Intellij fails with No such file or directory error.

    +
  • +
  • +

    #410: fix jsonmerge for boolean and null values

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.002.

+
+
+
+
+

devon4j

+
+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+
+

2020.12.001

+
+
+

New release of devon4j with pluggable web security (CSRF starter) and CompletableFuture support for async REST service client as well as other improvements:

+
+
+
    +
  • +

    #283: Support for CompletableFuture in async service client

    +
  • +
  • +

    #307: Fix CSRF protection support

    +
  • +
  • +

    #287: spring-boot update to 2.3.3

    +
  • +
  • +

    #288: Update jackson to 2.11.2

    +
  • +
  • +

    #293: Update owasp-dependency-check plugin version to 5.3.2

    +
  • +
  • +

    #302: added guide for project/app structure

    +
  • +
  • +

    #315: devon4j documentation correction

    +
  • +
  • +

    #306: improve documentation to launch app

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.12.001. +The full list of changes for this release can be found in milestone devon4j 2020.12.001.

+
+
+
+
+

2020.08.001

+
+
+

New release of devon4j with async REST service client support and other improvements:

+
+
+
    +
  • +

    #279: support for async service clients

    +
  • +
  • +

    #277: Update Security-Guide to recent OWASP Top (2017)

    +
  • +
  • +

    #281: cleanup documentation

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.08.001. +The full list of changes for this release can be found in milestone devon4j 2020.08.001.

+
+
+
+
+

2020.04.002

+
+
+

Minor update of devon4j with the following bugfixes and small improvements:

+
+
+
    +
  • +

    #261: JUnit4 backward compatibility

    +
  • +
  • +

    #267: Fix JWT permission expansion

    +
  • +
  • +

    #254: JWT Authentication support for devon4j-kafka

    +
  • +
  • +

    #258: archetype is still lacking a .gitignore

    +
  • +
  • +

    #273: Update libs

    +
  • +
  • +

    #271: Do not enable resource filtering by default

    +
  • +
  • +

    #255: Kafka: Support different retry configuration for different topics

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.04.002. +The full list of changes for this release can be found in milestone devon4j 2020.04.002.

+
+
+
+
+

devon4node

+
+
+

New devon4node version is published, the changes are:

+
+
+

On this release we have deprecated devon4node cli, now we use nest cli, and we have added a GraphQL sample.

+
+
+
    +
  • +

    #375: GraphQL Sample.

    +
  • +
  • +

    #257: D4N cli remove

    +
  • +
+
+
+
+
+

CobiGen

+
+
+

Various bugfixes were made as well as consolidating behavior of eclipse vs maven vs cli by properly sharing more code across the different clients. +Also properly takes into account a files line delimiter instead of defaulting to those of the host system.

+
+ +
+
+
+

Templates

+
+
+
    +
  • +

    Removed environment.ts from the crud_angular_client_app/CRUD devon4ng Angular App increment since Cobigen did not make any changes in it

    +
  • +
  • +

    Removed cross referencing between template increments since there is currently no useful use case for it and it leads to a few problems

    +
  • +
  • +

    v2020.12.001

    +
  • +
+
+
+
+
+

Java Plug-in

+
+
+
    +
  • +

    Now properly merges using the input files line delimiters instead of defaulting to those of the host system.

    +
  • +
  • +

    v7.1.0

    +
  • +
+
+
+
+
+

TypeScript Plug-in

+
+
+
    +
  • +

    Fixed NPE Added the option to read a path from an object input

    +
  • +
  • +

    v7.1.0

    +
  • +
+
+
+
+
+

Property Plug-in

+
+
+
    +
  • +

    Now properly merges using the input files line delimiters instead of defaulting to those of the host system.

    +
  • +
  • +

    v7.1.0

    +
  • +
+
+
+
+
+

OpenAPI Plug-in

+
+
+
    +
  • +

    Fixed an issue where nullable enums lead to errors

    +
  • +
  • +

    7.1.0

    +
  • +
+
+
+
+
+

Textmerger

+
+
+
    +
  • +

    Now properly merges using the input files line delimiters instead of defaulting to those of the host system.

    +
  • +
  • +

    v7.1.0

    +
  • +
  • +

    v7.1.1

    +
  • +
+
+
+
+
+

Sonar devon4j plugin

+
+
+

With this release, we made the package structure configurable and did some other improvements and fixes:

+
+
+
    +
  • +

    #117: Rule from checkstyle plugin could not be instantiated in our quality profile

    +
  • +
  • +

    #118: NPE during project analysis

    +
  • +
  • +

    #97: Custom configuration for architecture

    +
  • +
  • +

    #92: Display warnings on the 'devonfw' config page in the 'Administration' section of SonarQube

    +
  • +
  • +

    #95: Add 3rd Party rule to avoid Immutable annotation from wrong package

    +
  • +
  • +

    #94: Add 3rd Party rule to avoid legacy date types

    +
  • +
  • +

    #93: Improve devonfw Java quality profile

    +
  • +
  • +

    #114: Deleted unused architecture config from SonarQube settings to avoid confusion

    +
  • +
+
+
+

Changes for this release can be found in milestone 2020.12.001 and + milestone 2020.12.002

+
+
+
+
+

devon4net

+
+
+

The consolidated list of features for devon4net is as follows:

+
+
+
    +
  • +

    LiteDb: - Support for LiteDB - Provided basic repository for CRUD operations.

    +
  • +
  • +

    RabbitMq: - Use of EasyQNet library to perform CQRS main functions between different microservices - Send commands / Subscribe queues with one C# sentence - Events management: Handled received commands to subscribed messages - Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    MediatR: - Use of MediatR library to perform CQRS main functions in memory - Send commands / Subscribe queues with one C# sentence - Events management: Handled received commands to subscribed messages - Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    SmaxHcm: - Component to manage Microfocus SMAX for cloud infrastructure services management

    +
  • +
  • +

    CyberArk: - Manage safe credentials with CyberArk

    +
  • +
  • +

    AnsibleTower: - Ansible automates the cloud infrastructure. devon4net integrates with Ansible Tower via API consumption endpoints

    +
  • +
  • +

    gRPC+Protobuf: - Added Client + Server basic templates sample gRPC with Google’s Protobuf protocol using devon4net

    +
  • +
  • +

    Kafka: - Added Apache Kafka support for deliver/consume messages and create/delete topics as well

    +
  • +
  • +

    AWS support

    +
    +
      +
    • +

      AWS Template to create serverless applications with auto generation of an APIGateway using AWS base template

      +
    • +
    • +

      AWS template to create pure Lambda functions and manage SQS Events, SNS Events, Generic Events, CloudWatch, S3 Management, AWS Secrets management as a configuration provider in .NET life cycle

      +
    • +
    • +

      AWS CDK integration component to create/manage AWS infrastructures (Infra As Code): Database, Database cluster, VPC, Secrets, S3 buckets, Roles…

      +
    • +
    +
    +
  • +
  • +

    Minor performance and stability improvements such Entity framework migration integration

    +
  • +
  • +

    Updated to the latest .net Core 3.1 TLS

    +
  • +
+
+
+
+
+

dashboard (beta version)

+
+
+

We are adding dashboard beta version as part of this release. Dashboard is a tool that allows you to create and manage devonfw projects.It makes it easy to onboard a new person with devonfw.

+
+
+
    +
  • +

    Dashboard list all ide available on user system or if no ide is availble it will provide option to download latest version of ide.

    +
  • +
  • +

    Project creation and management: Project page list all projects created by user using dahboard. User will be able to create devon4j, devon4ng and devon4node projects using dashboard.

    +
  • +
  • +

    Support for Eclipse and VSCode IDE

    +
  • +
  • +

    Integrated devonfw-ide usage guide from the website

    +
  • +
+
+
+
+
+

Solicitor

+
+
+

Solicitor is a tool which helps managing Open Source Software used within projects. Below is consolidated feature list of solicitor:

+
+
+
    +
  • +

    Standalone Command Line Java Tool

    +
  • +
  • +

    Importers for component/license information from

    +
  • +
  • +

    Maven

    +
  • +
  • +

    Gradle

    +
  • +
  • +

    NPM

    +
  • +
  • +

    CSV (e.g. for manual entry of data)

    +
  • +
  • +

    Rules processing (using Drools Rule Engine) controls the the different phases:

    +
  • +
  • +

    Normalizing / Enhancing of license information

    +
  • +
  • +

    Handling of multilicensing (including selection of applicable licenses) and re-licensing

    +
  • +
  • +

    Legal evaluation

    +
  • +
  • +

    Rules to be defined as Decision Tables

    +
  • +
  • +

    Sample Decision Tables included

    +
  • +
  • +

    Automatic download and file based caching of license texts

    +
  • +
  • +

    Allows manual editing / reformatting of license text

    +
  • +
  • +

    Output processing

    +
  • +
  • +

    Template based text (Velocity) and XLS generation

    +
  • +
  • +

    SQL based pre-processor (e.g. for filtering, aggregation)

    +
  • +
  • +

    Audit log which documents all applied rules for every item might be included in report

    +
  • +
  • +

    "Diff Mode" allows to mark data which has changed as compared to a previous run of Solicitor (in Velocity and XLS reporting)

    +
  • +
  • +

    Customization

    +
  • +
  • +

    Project specific configuration (containing e.g. reporting templates, decision tables) allows to override/amend builtin configuration

    +
  • +
  • +

    Builtin configuration might be overridden/extended by configuration data contained in a single extension file (ZIP format)

    +
  • +
  • +

    This allows to safely provide organization specific rules and reporting templates to all projects of an organization (e.g. to reflect the specific OSS usage policy of the organization)

    +
  • +
+
+
+
+
+

MrChecker

+
+
+

MrChecker Test Framework is an end to end test automation framework written in Java. It is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security, native mobile apps and, in the near future, databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Below is consolidated list of updates in MrChecker:

+
+
+ +
+
+
+
+

Trainings/tutorials

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-2021.04.html b/docs/devonfw.github.io/1.0/general/release-notes-version-2021.04.html new file mode 100644 index 00000000..59709e0e --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-2021.04.html @@ -0,0 +1,467 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 2021.04

+
+ +
+
+
+

Introduction

+
+
+

We are proud to announce the release of devonfw version 2021.04.

+
+
+

This release includes lots of addition of new features, updates and bug fixes but it is very important to highlight the following improvements:

+
+
+
+
+

devonfw IDE

+
+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+
+

2021.04.001

+
+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #537: Update eclipse to 2021-03

    +
  • +
  • +

    #287: Command autocompletion

    +
  • +
  • +

    #536: Improve handling of aborted downloads

    +
  • +
  • +

    #542: Support placeholders in settings.xml template

    +
  • +
  • +

    #557: minimize setup by reducing DEVON_IDE_TOOLS

    +
  • +
  • +

    #550: update maven to 3.8.1

    +
  • +
  • +

    #545: update devon4j to 2021.04.002 and add migration

    +
  • +
  • +

    #575: jasypt support for password encryption and decryption

    +
  • +
  • +

    #546: Problems with tm-terminal Eclipse plugin

    +
  • +
  • +

    #553: VSCode user-data-dir shall be part of workspace config

    +
  • +
  • +

    #513: Configurable generation of IDE start scripts

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2021.04.001.

+
+
+
+
+

devon4j

+
+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+
+

2021.04.002

+
+
+

Bugfix release of with the following stories: +* #389: archetype build broken with ci-friendly-maven +* #391: jasypt documentation improvements +* #387: rebuild and updated diagram with drawio

+
+
+

Documentation is available at devon4j guide 2021.04.002. +The full list of changes for this release can be found in milestone devon4j 2020.04.002.

+
+
+
+
+

2021.04.001

+
+
+

New release of devon4j with fixes, updates and improvements:

+
+
+
    +
  • +

    #370: Minor updates (spring-boot 2.4.4, jackson 2.12.2, CXF 3.4.3, etc.)

    +
  • +
  • +

    #366: BaseTest.isInitialSetup() broken

    +
  • +
  • +

    #85: ci-friendly-maven also for archetype

    +
  • +
  • +

    #373: CORS starter not part of devon4j release

    +
  • +
  • +

    #164: Flattened pom for core project invalid

    +
  • +
  • +

    #323: Add spring integration test to archetype

    +
  • +
  • +

    #351: improved error handling of service client

    +
  • +
  • +

    #71: improve documentation for strong password encryption

    +
  • +
  • +

    #354: JMS senders should not be part of data access layer, but logical layer

    +
  • +
  • +

    #377: updated T-Architecture

    +
  • +
  • +

    #294: integrate sonarcloud analysis into devon4j CI pipeline

    +
  • +
+
+
+

Documentation is available at devon4j guide 2021.04.001. +The full list of changes for this release can be found in milestone devon4j 2020.04.001.

+
+
+
+
+

devon4ng

+
+
+

Updated template and samples to Angular 11. +Updated guide of devon4ng.

+
+
+
+
+

MrChecker

+
+
+

MrChecker Test Framework is an end to end test automation framework written in Java. It is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security, native mobile apps and, in the near future, databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Two new modules are added to MrChecker:

+
+
+
    +
  • +

    DB Module - we have created a module intended to make testing efforts on DBs easier. It is founded on JPA in conjunction with Hibernate and therefore supports both high level, object based access to DB entities via the IDao interface and low level, native SQL commands via the EntityManager class .

    +
  • +
  • +

    CLI Module - we have created a module intended to make testing command line applications like compilers or batches easier and faster. Huge success here is that, team using this solution was able to prepare a test suite, without app provided, basing only on documentation and using mocking technique.

    +
  • +
+
+
+
+
+

Trainings/tutorials

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-2021.12.html b/docs/devonfw.github.io/1.0/general/release-notes-version-2021.12.html new file mode 100644 index 00000000..d38e785b --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-2021.12.html @@ -0,0 +1,567 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 2021.12

+
+ +
+
+
+

Introduction

+
+
+

We are proud to announce the release of devonfw version 2021.12.

+
+
+

This release includes lots of addition of new features, updates and bug fixes but it is very important to highlight the following improvements:

+
+
+
+
+

devonfw IDE

+
+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+
+

2021.12.001

+
+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #616: Default eclipse version not downloadable

    +
  • +
  • +

    #607: fix url from eclipse plugins

    +
  • +
  • +

    #453: Automatic plugin installation for IntelliJ

    +
  • +
  • +

    #601: Update of node.js to 14.17.6 to fix CVE-2021-22930

    +
  • +
  • +

    #625: There is no doIsQuiet yet for CobiGen

    +
  • +
+
+
+
+
+

devon4j

+
+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+
+

2021.04.003

+
+
+

Bugfix release of with the following stories:

+
+
+
    +
  • +

    #336: archetype contains batch artefacts even when no batch was generated

    +
  • +
  • +

    #385: Access-control should honor roles by default

    +
  • +
  • +

    #397: security-jwt should support the claim "groups" from the microprofile jwt

    +
  • +
  • +

    #284: Support for synchronous rest client using HTTP client

    +
  • +
  • +

    #393: devon4j JpaInitializer documentation not matching releases

    +
  • +
  • +

    #216: ability to generate OpenApi files automatically from JAX-RS services

    +
  • +
+
+
+

Documentation is available at devon4j guide 2021.04.003. +The full list of changes for this release can be found in milestone devon4j 2020.04.003.

+
+
+
+
+

2021.04.002

+
+
+

Bugfix release of with the following stories:

+
+
+
    +
  • +

    #389: archetype build broken with ci-friendly-maven

    +
  • +
  • +

    #391: jasypt documentation improvements

    +
  • +
  • +

    #387: rebuild and updated diagram with drawio

    +
  • +
+
+
+

Documentation is available at devon4j guide 2021.04.002. +The full list of changes for this release can be found in milestone devon4j 2020.04.002.

+
+
+
+
+

Quarkus

+
+ +
+
+
+

Documentation update

+
+
+
    +
  • +

    Separated the Java section of the homepage into 'General', 'Spring' and 'Quarkus'

    +
  • +
  • +

    Guiding the reader

    +
    +
      +
    • +

      Pros and cons of Quarkus

      +
    • +
    • +

      Guide to the Reader

      +
    • +
    • +

      Guiding for a decision between Spring and Quarkus

      +
    • +
    +
    +
  • +
  • +

    Added getting started guide with Quarkus

    +
  • +
  • +

    Project structure: Defined 'new' modern project structure for Quarkus applications

    +
  • +
  • +

    Added and updated different technical guides for Quarkus

    +
  • +
+
+
+
+
+

Reference applications

+
+
+
    +
  • +

    devon4quarkus-product +https://github.com/devonfw-sample/devon4quarkus-reference is application including a REST service for product management including different concepts documented in the devon4j documentation. Additionally it demonstrates Kubernetes deployment and Helm charts.

    +
  • +
  • +

    devon4quarkus-order +https://github.com/devonfw-sample/devon4quarkus-order +Application including a REST service for order management. Connected with the devon4quarkus-product application via REST client approach (gets the products from there)

    +
    +
      +
    • +

      Similar feature as the devon4quarkus-product application

      +
    • +
    • +

      Includes REST client using tkit-mp-restclient-plugin with API first approach

      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4ng

+
+
+

In this release we have introduced some new samples to serve as a reference for integrating some popular 3rd-party component libraries like PrimeNG, NG-ZORRO and more with devon4ng. Also, all samples and templates are updated to the latest Angular 13:

+
+
+ +
+
+
+
+

devon4net

+
+
+
    +
  • +

    Real modular clean+onion architecture

    +
  • +
  • +

    .NET 6 Updated

    +
  • +
  • +

    API,Kafka, Protobuf, AWS templates reviewed

    +
  • +
  • +

    Component isolation (Any component can be used outside devon templates)

    +
  • +
  • +

    Cloud native. AWS templates reviewed

    +
    +
      +
    • +

      Modulairty

      +
    • +
    • +

      Secrets and params as configuration options

      +
    • +
    • +

      DynamoDB integration

      +
    • +
    +
    +
  • +
  • +

    Google protobuf integration (updated libraries, retry pattern)

    +
  • +
  • +

    Code review. Start your project with 0 Errors / 0 Warnings

    +
  • +
  • +

    Component options cleaned and reviewed

    +
  • +
+
+
+
+
+

CobiGen

+
+
+

This is bug fix release of CobiGen more details can be found in here.

+
+
+
+
+

Trainings/tutorials

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-3.0.html b/docs/devonfw.github.io/1.0/general/release-notes-version-3.0.html new file mode 100644 index 00000000..ed1b7b25 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-3.0.html @@ -0,0 +1,1095 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 3.0 “Fry”

+
+ +
+
+
+

Introduction

+
+
+

We are proud to announce the immediate release of devonfw version 3.0 (code named “Fry” during development). This version is the consolidation of Open Source, focused on the major namespace change ever in the platform, removing the OASP references and adopting the new devonfw names for each technical stack or framework.

+
+
+

The new stack names are the following:

+
+
+
    +
  • +

    devon4j, former devon4j, is the new name for Java.

    +
  • +
  • +

    devon4ng, former devon4ng, is the new one for Angular.

    +
  • +
  • +

    devon4net, is the new .NET stack.

    +
  • +
  • +

    devon4X, is the new stack for Xamarin development.

    +
  • +
  • +

    devon4node, is the new devonfw incubator for node.js.

    +
  • +
+
+
+

The new devon4j version was created directly from the latest devon4j version (3.0.0). Hence it brings all the features and values that devon4j offered. However, the namespace migration was used to do some housekeeping and remove deprecated code as well as reduce dependencies. Therefore your data-access layer will no longer have to depend on any third party except for devon4j as well as of course the JPA. We also have improved the application template that now comes with a modern JSON logging ready for docker and logstash based environments.

+
+
+

To help you upgrading we introduced a migration feature in devcon. This can automatically migrate your code from devon4j (even older versions starting from 2.4.0) to the latest version of devon4j. There might be some small manual changes left to do but 90% of the migration will be done automatically for you.

+
+
+

Besides, the first version of the devonfw plugin for SonarQube has been released. It extends SonarQube with the ability to validate your code according to the devon4j architecture. More details at https://github.com/devonfw/sonar-devon-plugin.

+
+
+

This is the first release that integrates the new devonfw .NET framework, called devon4net, and Xamarin for mobile native development, devon4X. devon4NET and devon4X are the Capgemini standard frameworks for .NET and Xamarin software development. With the two new family members devonfw provides guidance and acceleration for the major software development platforms in our industry. Their interoperability provides you the assurance your multichannel solution will be consistent across web and mobile channels.

+
+
+

“Fry” release contains lots of improvements in our Mr.Checker E2E Testing Framework, including a complete E2E sample inside our reference application My Thai Star. Besides Mr.Checker, we include as an incubator Testar, a test tool (and framework) to test applications at the GUI level whose objective is to solve part of the maintenance problem affecting tests by automatically generating test cases based on a structure that is automatically derived from the GUI. Testar is not included to replace Mr.Checker but rather to provide development teams with a series of interesting options which go beyond what Mr.Checker already provides.

+
+
+

Apart from Mr.Checker, engagements can now use Testar as an extra option for testing. This is a tool that enables the automated system testing of desktop, web and mobile applications at the GUI level. Testar has been added as an incubator to the platform awaiting further development during 2019.

+
+
+

The new incubator for node.js, called devon4node, has been included and implemented in several internal projects. This incubator is based on the Nest framework https://www.nestjs.com/. Nest is a framework for building efficient, scalable Node.js server-side applications. It uses progressive JavaScript, is built with TypeScript (preserves compatibility with pure JavaScript) and combines elements of OOP (Object Oriented Programming), FP (Functional Programming), and FRP (Functional Reactive Programming). Under the hood, Nest makes use of Express, but also provides compatibility with a wide range of other libraries (e.g. Fastify). This allows for easy use of the myriad third-party plugins which are available.

+
+
+

In order to facilitate the utilization of Microsoft Visual Studio Code in devonfw, we have developed and included the new devonfw Platform Extension Pack with lots of features to develop and test applications with this IDE in languages and frameworks such as TypeScript, JavaScript, .NET, Java, Rust, C++ and many more. More information at https://marketplace.visualstudio.com/items?itemName=devonfw.devonfw-extension-pack. Also, you can contribute to this extension in this GitHub repository https://github.com/devonfw/devonfw-extension-pack-vscode.

+
+
+

There is a whole range of new features and improvements which can be seen in that light. The My Thai Star sample app has now been upgraded to devon4j and devon4ng, a new devon4node backend implementation has been included that is seamless interchangeable, an E2E MrChecker sample project, CICD and deployment scripts and lots of bugs have been fixed.

+
+
+

Last but not least, the projects wikis and the devonfw Guide has once again been updated accordingly before the big refactor that will be addressed in the following release in 2019.

+
+
+
+
+

Changes and new features

+
+ +
+
+
+

Devonfw dist

+
+
+
    +
  • +

    Eclipse 2018.9 integrated

    +
    +
      +
    • +

      CheckStyle Plugin updated.

      +
    • +
    • +

      SonarLint Plugin updated.

      +
    • +
    • +

      Git Plugin updated.

      +
    • +
    • +

      FindBugs Plugin updated.

      +
    • +
    • +

      CobiGen plugin updated.

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Visual Studio Code latest version included and pre-configured with the devonfw Platform Extension Pack.

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      Yarn package manager updated.

      +
    • +
    • +

      Python3 updated.

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation updated.

      +
    • +
    • +

      devon4ng-application-template for Angular 7 at workspaces/examples

      +
    • +
    • +

      devon4ng-ionic-application-template for Ionic 3.20 at workspace/samples

      +
    • +
    +
    +
  • +
+
+
+
+
+

My Thai Star Sample Application

+
+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 1.12.2.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      devon4j 3.0.0 integrated.

      +
    • +
    • +

      Spring Boot 2.0.4 integrated.

      +
    • +
    • +

      Spring Data integration.

      +
    • +
    • +

      New pagination and search system.

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      Client devon4ng updated to Angular 7.

      +
    • +
    • +

      Angular Material and Covalent UI frameworks updated.

      +
    • +
    • +

      Electron framework integrated.

      +
    • +
    +
    +
  • +
  • +

    devon4node

    +
    +
      +
    • +

      TypeScript 3.1.3.

      +
    • +
    • +

      Based on Nest framework.

      +
    • +
    • +

      Aligned with devon4j.

      +
    • +
    • +

      Complete backend implementation.

      +
    • +
    • +

      TypeORM integrated with SQLite database configuration.

      +
    • +
    • +

      Webpack bundler.

      +
    • +
    • +

      Nodemon runner.

      +
    • +
    • +

      Jest unit tests.

      +
    • +
    +
    +
  • +
  • +

    Mr.Checker

    +
    +
      +
    • +

      Example cases for end-to-end test.

      +
    • +
    • +

      Production line configuration.

      +
    • +
    • +

      CICD

      +
    • +
    • +

      Improved integration with Production Line

      +
    • +
    • +

      New deployment from artifact

      +
    • +
    • +

      New CICD pipelines

      +
    • +
    • +

      New deployment pipelines

      +
    • +
    • +

      Automated creation of pipelines in Jenkins

      +
    • +
    +
    +
  • +
+
+
+
+
+

Documentation updates

+
+
+

The following contents in the devonfw guide have been updated:

+
+
+
    +
  • +

    Upgrade of all the new devonfw named assets.

    +
    +
      +
    • +

      devon4j

      +
    • +
    • +

      devon4ng

      +
    • +
    • +

      Mr.Checker

      +
    • +
    +
    +
  • +
  • +

    Electron integration cookbook.

    +
  • +
  • +

    Updated cookbook about Swagger.

    +
  • +
  • +

    Removed deprecated entries.

    +
  • +
+
+
+

Apart from this the documentation has been reviewed and some typos and errors have been fixed.

+
+
+

The current development of the guide has been moved to https://github.com/devonfw-forge/devon-guide/wiki in order to be available as the rest of OSS assets.

+
+
+
+
+

devon4j

+
+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Spring Boot 2.0.4 Integrated.

    +
  • +
  • +

    Spring Data layer Integrated.

    +
  • +
  • +

    Decouple mmm.util.*

    +
  • +
  • +

    Removed depreciated restaurant sample.

    +
  • +
  • +

    Updated Pagination support for Spring Data

    +
  • +
  • +

    Add support for hana as dbType.

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+
+

devon4ng

+
+
+

The following changes have been incorporated in devon4ng:

+
+
+ +
+
+
+
+

devon4net

+
+
+

Some of the highlights of devon4net 1.0 are:

+
+
+
    +
  • +

    External configuration file for each environment.

    +
  • +
  • +

    .NET Core 2.1.X working solution (Latest 2.1.402).

    +
  • +
  • +

    Packages and solution templates published on nuget.org.

    +
  • +
  • +

    Full components customization by config file.

    +
  • +
  • +

    Docker ready (My Thai Star sample fully working on docker).

    +
  • +
  • +

    Port specification by configuration.

    +
  • +
  • +

    Dependency injection by Microsoft .NET Core.

    +
  • +
  • +

    Automapper support.

    +
  • +
  • +

    Entity framework ORM (Unit of work, async methods).

    +
  • +
  • +

    .NET Standard library 2.0 ready.

    +
  • +
  • +

    Multi-platform support: Windows, Linux, Mac.

    +
  • +
  • +

    Samples: My Thai Star back-end, Google API integration, Azure login, AOP with Castle.

    +
  • +
  • +

    Documentation site.

    +
  • +
  • +

    SPA page support.

    +
  • +
+
+
+

And included the following features:

+
+
+
    +
  • +

    Logging:

    +
    +
      +
    • +

      Text File.

      +
    • +
    • +

      Sqlite database support.

      +
    • +
    • +

      Serilog Seq Server support.

      +
    • +
    • +

      Graylog integration ready through TCP/UDP/HTTP protocols.

      +
    • +
    • +

      API Call params interception (simple and compose objects).

      +
    • +
    • +

      API error exception management.

      +
    • +
    +
    +
  • +
  • +

    Swagger:

    +
    +
      +
    • +

      Swagger auto generating client from comments and annotations on controller classes.

      +
    • +
    • +

      Full swagger client customization (Version, Title, Description, Terms, License, Json endpoint definition).

      +
    • +
    +
    +
  • +
  • +

    JWT:

    +
    +
      +
    • +

      Issuer, audience, token expiration customization by external file configuration.

      +
    • +
    • +

      Token generation via certificate.

      +
    • +
    • +

      MVC inherited classes to access JWT user properties.

      +
    • +
    • +

      API method security access based on JWT Claims.

      +
    • +
    +
    +
  • +
  • +

    CORS:

    +
    +
      +
    • +

      Simple CORS definition ready.

      +
    • +
    • +

      Multiple CORS domain origin definition with specific headers and verbs.

      +
    • +
    +
    +
  • +
  • +

    Headers:

    +
    +
      +
    • +

      Automatic header injection with middleware.

      +
    • +
    • +

      Supported header definitions: AccessControlExposeHeader, StrictTransportSecurityHeader, XFrameOptionsHeader, XssProtectionHeader, XContentTypeOptionsHeader, ContentSecurityPolicyHeader, PermittedCrossDomainPoliciesHeader, ReferrerPolicyHeader.

      +
    • +
    +
    +
  • +
  • +

    Reporting server:

    +
    +
      +
    • +

      Partial implementation of reporting server based on My-FyiReporting (now runs on linux container).

      +
    • +
    +
    +
  • +
  • +

    Testing:

    +
    +
      +
    • +

      Integration test template with sqlite support.

      +
    • +
    • +

      Unit test template.

      +
    • +
    • +

      Moq, xunit frameworks integrated.

      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4X

+
+
+

Some of the highlights of the new devonfw Xamarin framework are:

+
+
+
    +
  • +

    Based on Excalibur framework by Hans Harts (https://github.com/Xciles/Excalibur).

    +
  • +
  • +

    Updated to latest MVVMCross 6 version.

    +
  • +
  • +

    My Thai Star Excalibur forms sample.

    +
  • +
  • +

    Xamarin Forms template available on nuget.org.

    +
  • +
+
+
+
+
+

AppSec Quick Solution Guide

+
+
+

This release incorporates a new Solution Guide for Application Security based on the state of the art in OWASP based application security. The purpose of this guide is to offer quick solutions for common application security issues for all applications based on devonfw. It’s often the case that we need our systems to comply to certain sets of security requirements and standards. Each of these requirements needs to be understood, addressed and converted to code or project activity. We want this guide to prevent the wheel from being reinvented over and over again and to give clear hints and solutions to common security problems.

+
+
+ +
+
+
+
+

CobiGen

+
+
+
    +
  • +

    CobiGen core new features:

    +
    +
      +
    • +

      CobiGen_Templates will not need to be imported into the workspace anymore. However, If you want to adapt them, you can still click on a button that automatically imports them for you.

      +
    • +
    • +

      CobiGen_Templates can be updated by one-click whenever the user wants to have the latest version.

      +
    • +
    • +

      Added the possibility to reference external increments on configuration level. This is used for reducing the number of duplicated templates.

      +
    • +
    +
    +
  • +
  • +

    CobiGen_Templates project and docs updated:

    +
    +
      +
    • +

      Spring standard has been followed better than ever.

      +
    • +
    • +

      Interface templates get automatically relocated to the api project. Needed for following the new devon4j standard.

      +
    • +
    +
    +
  • +
  • +

    CobiGen Angular:

    +
    +
      +
    • +

      Angular 7 generation improved based on the updated application template.

      +
    • +
    • +

      Pagination changed to fit Spring standard.

      +
    • +
    +
    +
  • +
  • +

    CobiGen Ionic: Pagination changed to fit Spring standard.

    +
  • +
  • +

    CobiGen OpenAPI plugin released with multiple bug-fixes and other functionalities like:

    +
    +
      +
    • +

      Response and parameter types are parsed properly when they are a reference to an entity.

      +
    • +
    • +

      Parameters defined on the body of a request are being read correctly.

      +
    • +
    +
    +
  • +
+
+
+
+
+

Devcon

+
+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Updated to match current devon4j

    +
  • +
  • +

    Update to download Linux distribution.

    +
  • +
  • +

    Custom modules creation improvements.

    +
  • +
  • +

    Code Migration feature added

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+
+

Devonfw OSS Modules

+
+
+

Modules upgraded to be used in new devon4j projects:

+
+
+
    +
  • +

    Reporting module

    +
  • +
  • +

    WinAuth AD Module

    +
  • +
  • +

    WinAuth SSO Module

    +
  • +
  • +

    I18n Module

    +
  • +
  • +

    Async Module

    +
  • +
  • +

    Integration Module

    +
  • +
  • +

    Microservice Module

    +
  • +
  • +

    Compose for Redis Module

    +
  • +
+
+ +
+
+
+

Devonfw Testing

+
+ +
+
+
+

== Mr.Checker

+
+
+

The Mr.Checker Test Framework is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Mr.Checker updates and improvements:

+
+
+ +
+
+
+
+

== Testar

+
+
+

We have added Test*, Testar, as an incubator to the available test tools within devonfw. This ground-breaking tool is being developed by the Technical University of Valencia (UPV). In 2019 Capgemini will co-develop Testar with the UPV.

+
+
+

Testar is a tool that enables the automated system testing of desktop, web and mobile applications at the GUI level.

+
+
+

With Testar, you can start testing immediately. It automatically generates and executes test sequences based on a structure that is automatically derived from the UI through the accessibility API. Testar can detect the violation of general-purpose system requirements and you can use plugins to customize your tests.

+
+
+

You do not need test scripts and maintenance of it. The tests are random and are generated and executed automatically.

+
+
+

If you need to do directed tests you can create scripts to test specific requirements of your application.

+
+
+

Testar is included in the devonfw distro or can be downloaded from https://testar.org/download/.

+
+
+

The Github repository can be found at o: https://github.com/TESTARtool/TESTAR.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-3.1.html b/docs/devonfw.github.io/1.0/general/release-notes-version-3.1.html new file mode 100644 index 00000000..b7bf5bc3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-3.1.html @@ -0,0 +1,1003 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 3.1 “Goku”

+
+ +
+
+
+

Introduction

+
+
+

We are proud to announce the immediate release of devonfw version 3.1 (code named “Goku” during development). This version is the first one that implements our new documentation workflow, that will allow users to get the updated documentation at any moment and not to wait for the next devonfw release.

+
+
+

This is now possible as we have established a new workflow and rules during development of our assets. The idea behind this is that all the repositories contain a documentation folder and, in any pull request, the developer must include the related documentation change. A new Travis CI configuration added to all these repositories will automatically take the changes and publish them in the wiki section of every repository and in the new devonfw-guide repository that consolidates all the changes from all the repositories. Another pipeline will take changes from this consolidated repository and generate dynamically the devonfw guide in PDF and in the next weeks in HTML for the new planned devonfw website. The following schema explains this process:

+
+
+
+documentation workflow +
+
+
+

This release includes the very first version of the new CobiGen CLI. Now using commands, you will be able to generate code the same way as you do with Eclipse. This means that you can use CobiGen on other IDEs like Visual Studio Code or IntelliJ. Please take a look at https://github.com/devonfw/cobigen/wiki/howto_Cobigen-CLI-generation for more info.

+
+
+

The devonfw-shop-floor project has got a lot of updates in order to make even easier the creation of devonfw projects with CICD pipelines that run on the Production Line, deploy on Red Hat OpenShift Clusters and in general Docker environments. See the details below.

+
+
+

This release includes the very first version of our devonfw-ide tool that will allow users to automate devonfw setup and update the development environment. This tool will become the default devonfw setup tool in future releases. For more information please visit the repository https://github.com/devonfw/devon-ide.

+
+
+

Following the same collaboration model we used in order to improve the integration of devonfw with Red Hat OpenShift and which allowed us to get the Red Hat Open Shift Primed certification, we have been working alongside with SAP HANA developers in order to support this database in the devon4j. This model was based on the contribution and review of pull requests in our reference application My Thai Star. In this case, SAP developers collaborated with us in the following two new use cases:

+
+
+
    +
  • +

    Prediction of future demand

    +
  • +
  • +

    Geospatial analysis and clustering of customers

    +
  • +
+
+ +
+

Last but not least the devonfw extension pack for VS Code has been improved with the latest extensions and helpers for this IDE. Among many others you can now use:

+
+ +
+

Also it is worth the try of the updated support for Java and Spring Boot development in VS Code. Check it out for yourself!

+
+
+

More information at https://marketplace.visualstudio.com/items?itemName=devonfw.devonfw-extension-pack. Also, you can contribute to this extension in this GitHub repository https://github.com/devonfw/devonfw-extension-pack-vscode.

+
+
+
+
+

Changes and new features

+
+ +
+
+
+

Devonfw dist

+
+
+
    +
  • +

    Eclipse 2018.12 integrated

    +
    +
      +
    • +

      CheckStyle Plugin updated.

      +
    • +
    • +

      SonarLint Plugin updated.

      +
    • +
    • +

      Git Plugin updated.

      +
    • +
    • +

      FindBugs Plugin updated.

      +
    • +
    • +

      CobiGen plugin updated.

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Visual Studio Code latest version included and pre-configured with the devonfw Platform Extension Pack.

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      @devonfw/cicdgen included.

      +
    • +
    • +

      Yarn package manager updated.

      +
    • +
    • +

      Python3 updated.

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation updated.

      +
    • +
    • +

      devon4ng-application-template for Angular 8 at workspaces/examples

      +
    • +
    • +

      devon4ng-ionic-application-template for Ionic 4 at workspace/samples

      +
    • +
    +
    +
  • +
+
+
+
+
+

My Thai Star Sample Application

+
+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 3.1.0.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      devon4j 3.1.0 integrated.

      +
    • +
    • +

      Spring Boot 2.1.6 integrated.

      +
    • +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      2FA toggleable (two factor authentication).

      +
    • +
    • +

      NgRx integration in process (PR #234).

      +
    • +
    +
    +
  • +
  • +

    devon4node

    +
    +
      +
    • +

      TypeScript 3.1.3.

      +
    • +
    • +

      Based on Nest framework.

      +
    • +
    • +

      Aligned with devon4j.

      +
    • +
    • +

      Complete backend implementation.

      +
    • +
    • +

      TypeORM integrated with SQLite database configuration.

      +
    • +
    • +

      Webpack bundler.

      +
    • +
    • +

      Nodemon runner.

      +
    • +
    • +

      Jest unit tests.

      +
    • +
    +
    +
  • +
  • +

    Mr.Checker

    +
    +
      +
    • +

      Example cases for end-to-end test.

      +
    • +
    • +

      Production line configuration.

      +
    • +
    • +

      CICD

      +
    • +
    • +

      Improved integration with Production Line

      +
    • +
    • +

      New Traefik load balancer and reverse proxy

      +
    • +
    • +

      New deployment from artifact

      +
    • +
    • +

      New CICD pipelines

      +
    • +
    • +

      New deployment pipelines

      +
    • +
    • +

      Automated creation of pipelines in Jenkins

      +
    • +
    +
    +
  • +
+
+
+
+
+

Documentation updates

+
+
+

This release addresses the new documentation workflow, being now possible to keep the documentation synced with any change. The new documentation includes the following contents:

+
+
+
    +
  • +

    Getting started

    +
  • +
  • +

    Contribution guide

    +
  • +
  • +

    Devcon

    +
  • +
  • +

    Release notes

    +
  • +
  • +

    devon4j documentation

    +
  • +
  • +

    devon4ng documentation

    +
  • +
  • +

    devon4net documentation

    +
  • +
  • +

    devonfw-shop-floor documentation

    +
  • +
  • +

    cicdgen documentation

    +
  • +
  • +

    devonfw testing with MrChecker

    +
  • +
  • +

    My Thai Star documentation

    +
  • +
+
+
+
+
+

devon4j

+
+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Added Support for Java8 up to Java11

    +
  • +
  • +

    Upgrade to Spring Boot 2.1.6.

    +
  • +
  • +

    Upgrade to Spring 5.1.8

    +
  • +
  • +

    Upgrade to JPA 2.2

    +
  • +
  • +

    Upgrade to Hibernate 5.3

    +
  • +
  • +

    Upgrade to Dozer 6.4.1 (ATTENTION: Requires Migration, use devon-ide for automatic upgrade)

    +
  • +
  • +

    Many improvements to documentation (added JDK guide, architecture-mapping, JMS, etc.)

    +
  • +
  • +

    Completed support (JSON, Beanmapping) for pagination, IdRef, and java.time

    +
  • +
  • +

    Added MasterCto

    +
  • +
  • +

    For all details see milestone.

    +
  • +
+
+
+
+
+

devon4ng

+
+
+

The following changes have been incorporated in devon4ng:

+
+
+
    +
  • +

    Angular CLI 8,

    +
  • +
  • +

    Angular 8,

    +
  • +
  • +

    Angular Material 8,

    +
  • +
  • +

    Ionic 4,

    +
  • +
  • +

    Capacitor 1.0 as Cordova replacement,

    +
  • +
  • +

    NgRx 8 support for State Management,

    +
  • +
  • +

    devon4ng Angular application template updated to Angular 8 with visual improvements and bugfixes https://github.com/devonfw/devon4ng-application-template

    +
  • +
  • +

    devon4ng Ionic application template updated and improved https://github.com/devonfw/devon4ng-ionic-application-template

    +
  • +
  • +

    New devon4ng Angular application template with state management using Angular 8 and NgRx 8 https://github.com/devonfw/devon4ng-ngrx-template

    +
  • +
  • +

    New devon4ng library https://github.com/devonfw/devon4ng-library that includes the following libraries:

    +
    +
      +
    • +

      Cache Module for Angular 7+ projects.

      +
    • +
    • +

      Authorization Module for Angular 7+ projects.

      +
    • +
    +
    +
  • +
  • +

    New use cases with documentation and samples:

    +
    +
      +
    • +

      Web Components with Angular Elements

      +
    • +
    • +

      Initial configuration with App Initializer pattern

      +
    • +
    • +

      Error Handling

      +
    • +
    • +

      PWA with Angular and Ionic

      +
    • +
    • +

      Lazy Loading

      +
    • +
    • +

      Library construction

      +
    • +
    • +

      Layout with Angular Material

      +
    • +
    • +

      Theming with Angular Material

      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4net

+
+
+

The following changes have been incorporated in devon4net:

+
+
+
    +
  • +

    New circuit breaker component to communicate microservices via HTTP

    +
  • +
  • +

    Resolved the update packages issue

    +
  • +
+
+
+
+
+

AppSec Quick Solution Guide

+
+
+

This release incorporates a new Solution Guide for Application Security based on the state of the art in OWASP based application security. The purpose of this guide is to offer quick solutions for common application security issues for all applications based on devonfw. It’s often the case that we need our systems to comply to certain sets of security requirements and standards. Each of these requirements needs to be understood, addressed and converted to code or project activity. We want this guide to prevent the wheel from being reinvented over and over again and to give clear hints and solutions to common security problems.

+
+
+ +
+
+
+
+

CobiGen

+
+
+
    +
  • +

    CobiGen core new features:

    +
    +
      +
    • +

      CobiGen CLI: New command line interface for CobiGen. Using commands, you will be able to generate code the same way as you do with Eclipse. This means that you can use CobiGen on other IDEs like Visual Studio Code or IntelliJ. Please take a look into the documentation for more info.

      +
      +
        +
      • +

        Performance improves greatly in the CLI thanks to the lack of GUI.

        +
      • +
      • +

        You will be able to use path globs for selecting multiple input files.

        +
      • +
      • +

        We have implemented a search functionality so that you can easily search for increments or templates.

        +
      • +
      +
      +
    • +
    • +

      First steps taken on CobiGen refactoring: With the new refactoring we will be able to decouple CobiGen completely from the target and input language. This will facilitate the creation of parsers and mergers for any language.

      +
      +
        +
      • +

        NashornJS has been deprecated: It was used for executing JavaScript code inside JVM. With the refactoring, performance has improved on the TypeScript merger.

        +
      • +
      +
      +
    • +
    • +

      Improving CobiGen templates:

      +
      +
        +
      • +

        Removed Covalent from Angular templates as it is not compatible with Angular 8.

        +
      • +
      • +

        Added devon4ng-NgRx templates that implement reactive state management. Note: The TypeScript merger is currently being improved in order to accept NgRx. The current templates are set as overridable by default.

        +
      • +
      • +

        Test data builder templates now make use of Lambdas and Consumers.

        +
      • +
      • +

        CTOs and ETOs increments have been correctly separated.

        +
      • +
      +
      +
    • +
    • +

      TypeScript merger has been improved: Now it is possible to merge comments (like tsdoc) and enums.

      +
    • +
    • +

      OpenAPI parsing extended to read enums. Also fixed some bugs when no properties were set or when URLs were too short.

      +
    • +
    • +

      Java static and object initializers now get merged.

      +
    • +
    • +

      Fixed bugs when downloading and adapting templates.

      +
    • +
    +
    +
  • +
+
+
+
+
+

Devcon

+
+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Updated to match current devon4j

    +
  • +
  • +

    Update to download Linux distribution.

    +
  • +
  • +

    Custom modules creation improvements.

    +
  • +
  • +

    Code Migration feature added.

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+
+

Devonfw OSS Modules

+
+
+

Modules upgraded to be used in new devon4j projects:

+
+
+ +
+
+
+
+

devonfw shop floor

+
+
+
    +
  • +

    Industrialization oriented to configure the provisioning environment provided by Production Line and deploy applications on an OpenShift cluster.

    +
  • +
  • +

    Added Jenkinsfiles to configure automatically OpenShift environments to deploy devonfw applications.

    +
  • +
  • +

    Industrialization to start new projects and configure them with CICD.

    +
  • +
  • +

    Upgrade the documentation with getting started guide to configure CICD in any devonfw project and deploy it.

    +
  • +
  • +

    Added new tool cicdgen to generate CICD code/files.

    +
  • +
+
+
+
+
+

== cicdgen

+
+
+

cicdgen is a devonfw tool to generate all code/files related to CICD in your project. It’s based on angular schematics and it has its own CLI. +More information here.

+
+
+
    +
  • +

    CICD configuration for devon4j, devon4ng and devon4node projects

    +
  • +
  • +

    Option to deploy devonfw projects with Docker

    +
  • +
  • +

    Option to deploy devonfw projects with OpenShift

    +
  • +
+
+
+
+
+

Devonfw Testing

+
+ +
+
+
+

== Mr.Checker

+
+
+

The Mr.Checker Test Framework is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Mr.Checker updates and improvements:

+
+
+
    +
  • +

    Examples available under embedded project “MrChecker-App-Under-Test” and in project wiki: https://github.com/devonfw/devonfw-testing/wiki

    +
  • +
  • +

    How to install:

    + +
  • +
  • +

    Release Note:

    +
    +
      +
    • +

      module selenium - 3.8.2.1:

      +
      +
        +
      • +

        possibility to define version of driver in properties.file

        +
      • +
      • +

        automatic driver download if the version is not specified

        +
      • +
      • +

        possibility to run with different browser options

        +
      • +
      • +

        module webAPI – 1.2.1:

        +
      • +
      • +

        possibility to connect to the remote WireMock server

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/general/release-notes-version-3.2.html b/docs/devonfw.github.io/1.0/general/release-notes-version-3.2.html new file mode 100644 index 00000000..5c18207d --- /dev/null +++ b/docs/devonfw.github.io/1.0/general/release-notes-version-3.2.html @@ -0,0 +1,949 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Release notes 3.2 “Homer”

+
+ +
+
+
+

Introduction

+
+
+

We are proud to announce the immediate release of devonfw version 3.2 (code named “Homer” during development). This version is the first one that contains the new devonfw IDE by default, so there is no need to download a huge ZIP with the whole distribution regardless of the use to which it will be put. The new devonfw IDE CLI will allow any user to setup a customized development environment completely configured with access to all the devonfw features, frameworks and tools. As we access to the official IDEs this is also the first version macOS compatible.

+
+
+

This release consolidates the documentation workflow adding the contents dynamically to the new devonfw website at the same time the PDF is generated. This have been achieved using a new GitHub action that takes the contents and builds the HTML files for the documentation section of the website. The documentation workflow proposed in the following picture is now complete:

+
+
+
+documentation workflow +
+
+
+

This release also includes the first version of devon4node. We consider that node.js should be a first-class citizen inside the devonfw platform and for that reason we have included the latest development technologies for this ecosystem. The devon4node CLI, schematics and other tools will allow our users to create powerful node.js applications with the same philosophy you may find in the other languages and frameworks included. More information at its section below.

+
+
+

The new devon4net 3.2.0 version is also included in this release. Based on the .NET Core 3.0 and containing lots of new features gathered from important and recent projects, it represents a great improvement and an intermediate step to provide support for the incoming .NET Core 3.1 LTS. More information at its section below.

+
+
+

This release includes the final version of the new CobiGen CLI and completely integrated with the new devonfw IDE. Now using commands, you will be able to generate code the same way as you do with Eclipse. This means that you can use CobiGen on other IDEs like Visual Studio Code or IntelliJ. Besides the Update command has been implemented. Now you will be able to update easily all your CobiGen plug-ins and templates inside the CLI.

+
+
+

On the other hand, the refactoring process has been completely developed, improving the mergers and including input readers for any other languages and frameworks, allowing the creation of models to generate code from them. Last, but not least, this new version includes the new templates for devon4net, devon4ng and devon4j generation.

+
+
+

And as always, My Thai Star has been updated to the latest versions of devon4j, devon4node and devon4net including completely State Management with NgRx in its devon4ng implementation upgrade.

+
+
+

This is the last release with the current semantic versioning number and without a fixed release calendar. From now on the new devonfw releases will happen in April, August and December and will be named YYYY.MM.NN, being the first release of the next year the 2020.04.00.

+
+
+
+
+

Changes and new features

+
+ +
+
+
+

devonfw-ide

+
+
+

We have entirely rewritten our automated solution for your local IDE (integrated desktop environment). The former oasp4j-ide and devonfw distributions with their extra-large gigabyte zip files are not entirely replaced with devonfw-ide. This new solution is provided as a small *.tar.gz file that is publicly available. It works on all platforms and has been tested on Windows, MacOS, and Linux. After extraction you only need to run a setup script. Here you provide a settings git URL for your customer project or simply hit return for testing or small projects. After reading and confirming the terms of use it will download all required tools in the proper versions for your operating system and configure them. Instead of various confusing scripts there is now only one CLI command devon for all use-cases what gives a much better user experience.

+
+
+

To get started go to the home page. There is even a migration-guide if you are currently used to the old approach and want to quickly jump into the new solution.

+
+
+
+
+

My Thai Star Sample Application

+
+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 3.2.0.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      devon4j 3.2.0 integrated.

      +
    • +
    • +

      Spring Boot 2.1.9 integrated.

      +
    • +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      2FA toggleable (two factor authentication).

      +
    • +
    • +

      NgRx full integrated (PR #285).

      +
    • +
    +
    +
  • +
  • +

    devon4net

    +
    +
      +
    • +

      devon4net for dotnet core 3.0 updated

      +
    • +
    • +

      Updated the API contract compatible with the other stacks

      +
    • +
    • +

      JWT implementation reviewed to increase security

      +
    • +
    • +

      ASP.NET user database dependencies removed

      +
    • +
    • +

      HTTP2 support

      +
    • +
    • +

      Clearer CRUD pattern implementation

      +
    • +
    +
    +
  • +
  • +

    devon4node

    +
    +
      +
    • +

      TypeScript 3.6.3.

      +
    • +
    • +

      Based on Nest framework.

      +
    • +
    • +

      Configuration Module

      +
    • +
    • +

      Added cors and security headers

      +
    • +
    • +

      Added mailer module and email templates.

      +
    • +
    • +

      Built in winston logger

      +
    • +
    • +

      Custom ClassSerializerInterceptor

      +
    • +
    +
    +
  • +
  • +

    MrChecker

    +
    +
      +
    • +

      Example cases for end-to-end test.

      +
    • +
    • +

      Production line configuration.

      +
    • +
    +
    +
  • +
  • +

    CICD

    +
    +
      +
    • +

      Improved integration with Production Line

      +
    • +
    • +

      New Traefik load balancer and reverse proxy

      +
    • +
    • +

      New deployment from artifact

      +
    • +
    • +

      New CICD pipelines

      +
    • +
    • +

      New deployment pipelines

      +
    • +
    • +

      Automated creation of pipelines in Jenkins

      +
    • +
    +
    +
  • +
+
+
+
+
+

Documentation updates

+
+
+

This release addresses the new documentation workflow, being now possible to keep the documentation synced with any change. The new documentation includes the following contents:

+
+
+
    +
  • +

    Getting started

    +
  • +
  • +

    devonfw ide

    +
  • +
  • +

    devon4j documentation

    +
  • +
  • +

    devon4ng documentation

    +
  • +
  • +

    devon4net documentation

    +
  • +
  • +

    devon4node documentation

    +
  • +
  • +

    CobiGen documentation

    +
  • +
  • +

    devonfw-shop-floor documentation

    +
  • +
  • +

    cicdgen documentation

    +
  • +
  • +

    devonfw testing with MrChecker

    +
  • +
  • +

    My Thai Star documentation

    +
  • +
  • +

    Contribution guide

    +
  • +
  • +

    Release notes

    +
  • +
+
+
+
+
+

devon4j

+
+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Completed full support from Java8 to Java11

    +
  • +
  • +

    Several security fixes

    +
  • +
  • +

    Upgrade to Spring Boot 2.1.9

    +
  • +
  • +

    Upgrade to Spring 5.1.8

    +
  • +
  • +

    Upgrade to JUnit 5 (requires migration via devonfw-ide)

    +
  • +
  • +

    Improved JPA support for IdRef

    +
  • +
  • +

    Improved auditing metadata support

    +
  • +
  • +

    Many improvements to documentation (added JDK guide, architecture-mapping, JMS, etc.)

    +
  • +
  • +

    For all details see milestone.

    +
  • +
+
+
+
+
+

devon4ng

+
+
+

The following changes have been incorporated in devon4ng:

+
+
+
    +
  • +

    Angular CLI 8.3.1,

    +
  • +
  • +

    Angular 8.2.11,

    +
  • +
  • +

    Angular Material 8.2.3,

    +
  • +
  • +

    Ionic 4.11.1,

    +
  • +
  • +

    Capacitor 1.2.1 as Cordova replacement,

    +
  • +
  • +

    NgRx 8.3 support for State Management,

    +
  • +
  • +

    devon4ng Angular application template updated to Angular 8.2.11 with visual improvements and bugfixes https://github.com/devonfw/devon4ng-application-template

    +
  • +
  • +

    devon4ng Ionic application template updated to 4.11.1 and improved https://github.com/devonfw/devon4ng-ionic-application-template

    +
  • +
  • +

    Improved devon4ng Angular application template with state management using Angular 8 and NgRx 8 https://github.com/devonfw/devon4ng-ngrx-template

    +
  • +
  • +

    Documentation and samples updated to latest versions:

    +
    +
      +
    • +

      Web Components with Angular Elements

      +
    • +
    • +

      Initial configuration with App Initializer pattern

      +
    • +
    • +

      Error Handling

      +
    • +
    • +

      PWA with Angular and Ionic

      +
    • +
    • +

      Lazy Loading

      +
    • +
    • +

      Library construction

      +
    • +
    • +

      Layout with Angular Material

      +
    • +
    • +

      Theming with Angular Material

      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4net

+
+
+

The following changes have been incorporated in devon4net:

+
+
+
    +
  • +

    Updated to latest .net core 3.0 version

    +
  • +
  • +

    Template

    +
    +
      +
    • +

      Global configuration automated. devon4net can be instantiated on any .net core application template with no effort

      +
    • +
    • +

      Added support for HTTP2

      +
    • +
    • +

      Number of libraries minimized

      +
    • +
    • +

      Architecture layer review. More clear and scalable

      +
    • +
    • +

      Added red button functionality (aka killswitch) to stop attending API request with custom error

      +
    • +
    • +

      Improved API error management

      +
    • +
    • +

      Added support to only accept request from clients with a specific client certificate on Kestrel server. Special thanks to Bart Roozendaal (Capgemini NL)

      +
    • +
    • +

      All components use IOptions pattern to be set up properly

      +
    • +
    • +

      Swagger generation compatible with OpenAPI v3

      +
    • +
    +
    +
  • +
  • +

    Modules

    +
    +
      +
    • +

      The devon4net netstandard libraries have been updated to netstandard 2.1

      +
    • +
    • +

      JWT:

      +
      +
        +
      • +

        Added token encryption (token cannot be decrypted anymore by external parties). Now You can choose the encryption algorithm depending on your needs

        +
      • +
      • +

        Added support for secret key or certificate encryption

        +
      • +
      • +

        Added authorization for swagger portal

        +
      • +
      +
      +
    • +
    • +

      Circuit breaker

      +
      +
        +
      • +

        Added support to bypass certificate validation

        +
      • +
      • +

        Added support to use a certificate for https communications using Microsoft’s httpclient factory

        +
      • +
      +
      +
    • +
    • +

      Unit of Work

      +
      +
        +
      • +

        Repository classes unified and reviewed for increasing performance and reduce the consumed memory

        +
      • +
      • +

        Added support for different database servers: In memory, Cosmos, MySQL + MariaDB, Firebird, PostgreSQL, Oracle, SQLite, Access, MS Local.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+

devon4node

+
+
+

The following changes have been incorporated in devon4node:

+
+
+
    +
  • +

    TypeScript 3.6.3.

    +
  • +
  • +

    Based on Nest framework.

    +
  • +
  • +

    Complete backend implementation.

    +
  • +
  • +

    New devon4node CLI. It will provide you some commands

    +
    +
      +
    • +

      new: create a new devon4node interactively

      +
    • +
    • +

      generate: generate code based on schematics

      +
    • +
    • +

      db: manage the database

      +
    • +
    +
    +
  • +
  • +

    New devon4node schematics

    +
    +
      +
    • +

      application: create a new devon4node application

      +
    • +
    • +

      config-module: add a configuration module to the project

      +
    • +
    • +

      mailer: install and configure the devon4node mailer module

      +
    • +
    • +

      typeorm: install TypeORM in the project

      +
    • +
    • +

      auth-jwt: add users and auth-jwt modules to the project

      +
    • +
    • +

      swagger: expose an endpoint with the auto-generated swagger

      +
    • +
    • +

      security: add cors and other security headers to the project.

      +
    • +
    • +

      crud: create all CRUD for an entity

      +
    • +
    • +

      entity: create an entity

      +
    • +
    +
    +
  • +
  • +

    New mailer module

    +
  • +
  • +

    New common library

    +
  • +
  • +

    Build in winston logger

    +
  • +
  • +

    Custom ClassSerializerInterceptor

    +
  • +
  • +

    Extendable base entity

    +
  • +
  • +

    New application samples

    +
  • +
+
+
+
+
+

CobiGen

+
+
+
    +
  • +

    CobiGen core new features:

    +
    +
      +
    • +

      CobiGen CLI: Update command implemented. Now you will be able to update easily all your CobiGen plug-ins and templates inside the CLI. Please take a look into the documentation for more info.

      +
      +
        +
      • +

        CobiGen CLI is now JDK11 compatible.

        +
      • +
      • +

        CobiGen CLI commandlet for devonfw-ide has been added. You can use it to setup easily your CLI and to run CobiGen related commands.

        +
      • +
      • +

        Added a version provider so that you will be able to know all the CobiGen plug-ins versions.

        +
      • +
      • +

        Added a process bar when the CLI is downloading the CobiGen plug-ins.

        +
      • +
      +
      +
    • +
    • +

      CobiGen refactoring finished: With this refactoring we have been able to decouple CobiGen completely from the target and input language. This facilitates the creation of parsers and mergers for any language. For more information please take a look here.

      +
      +
        +
      • +

        New TypeScript input reader: We are now able to parse any TypeScript class and generate code using the parsed information. We currently use TypeORM entities as a base for generation.

        +
      • +
      +
      +
    • +
    • +

      Improving CobiGen templates:

      +
      +
        +
      • +

        Updated devon4ng-NgRx templates to NgRx 8.

        +
      • +
      • +

        Generation of an Angular client using as input a TypeORM entity. This is possible thanks to the new TypeScript input reader.

        +
      • +
      • +

        .Net templates have been upgraded to .Net Core 3.0

        +
      • +
      +
      +
    • +
    • +

      CobiGen for Eclipse is now JDK11 compatible.

      +
    • +
    • +

      Fixed bugs when adapting templates and other bugs on the CobiGen core.

      +
    • +
    +
    +
  • +
+
+
+
+
+

devonfw shop floor

+
+
+ +
+
+
+
+

== cicdgen

+
+
+
    +
  • +

    Patched minor bugs

    +
  • +
+
+
+
+
+

sonar-devon4j-plugin

+
+
+

sonar-devon4j-plugin is a SonarQube plugin for architecture governance of devon4j applications. It verifies the architecture and conventions of devon4j, the Java stack of devonfw. The following changes have been incorporated: +* Plugin was renamed from sonar-devon-plugin to sonar-devon4j-plugin +* Rules/checks have been added to verify naming conventions +* New rule for proper JPA datatype mapping +* Proper tagging of rules as architecture-violation and not as bug, etc. +* Several improvements have been made to prepare the plugin to enter the SonarQube marketplace, what will happen with the very next release. +* Details can be found here: https://github.com/devonfw/sonar-devon4j-plugin/milestone/2?closed=1

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/Home.html b/docs/devonfw.github.io/1.0/getting-started.wiki/Home.html new file mode 100644 index 00000000..2d8b627a --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/Home.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Home

+
+
+

Welcome to the wiki pages of the devonfw getting-started guide!
+Please select a topic from the sidebar. ⇒

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/further-info-community-links.html b/docs/devonfw.github.io/1.0/getting-started.wiki/further-info-community-links.html new file mode 100644 index 00000000..d3122e1d --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/further-info-community-links.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+ +
+
+

We strive to foster an active, diverse and dynamic community around devonfw and are relying on modern collaboration tools to do so. Please note that some resources listed here might only be accessible to members or partners of Capgemini.

+
+
+
+
+

Microsoft Teams

+
+
+

The devonfw public channel is accessible to everyone who has a Microsoft Teams account. You can find the latest discussions on ongoing development topics here, as well as new commits and pull requests to our repos.

+
+
+

Join us to stay in the loop, and feel free to post your questions regarding devonfw here.

+
+ +
+
+
+

Yammer

+
+
+

Our corporate Yammer channel is accessible to Capgemini employees and members. If you are looking for information or feedback on current and planned projects regarding devonfw, we reccomend you ask around here first.

+
+ +
+
+
+

E-Mail

+
+
+

You can reach our dedicated iCSD Support Team via e-mail at:

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/further-info-repo-overview.html b/docs/devonfw.github.io/1.0/getting-started.wiki/further-info-repo-overview.html new file mode 100644 index 00000000..722317c9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/further-info-repo-overview.html @@ -0,0 +1,351 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Repository Overview

+
+
+

The GitHub repositories within the devonfw organization contain the source code and documentation for official devonfw projects.

+
+
+
+devonfw Repository Overview +
+
An overview of the devonfw organization repositories.
+
+
+

The most relevant repositories here are the individual devonfw technology stacks:

+
+
+ +
+
+

Our framework also delivers a number of tools and plug-ins that aim to accelerate and streamline the development process, for example:

+
+
+ +
+
+

We also provide educational material and reference implementations to aid new users and drive the adoption of our framework, for example:

+
+
+ +
+
+

Projects in early development and prototypes are located in the devonfw forge repository. They usually remain there until they are ready for broader release or use in production.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/getting-started.html b/docs/devonfw.github.io/1.0/getting-started.wiki/getting-started.html new file mode 100644 index 00000000..b8e175d7 --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/getting-started.html @@ -0,0 +1,901 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Getting Started

+
+ +
+
+
+

Introduction

+
+ +
+

What is devonfw?

+
+
+devonfw small +
+
+
+

Welcome to the devonfw platform. This is a product of the CSD (Custom Solution Development) industrialization effort to establish a standardized platform for custom software development within Capgemini APPS2. This platform is aimed at engagements, in which clients don’t specify the use of a predefined technology stack. In these cases we can offer a proven alternative as a result of our experience as a group.

+
+
+

devonfw is a development platform aiming for the standardization of processes and the boosting of productivity. It provides an architecture blueprint for server and client applications, alongside a set of tools to deliver a fully functional, out-of-the-box development environment.

+
+
+ + + + + +
+ + +The devonfw name is a registered trademark of Capgemini Logo Small, but the software and documentation included in devonfw are fully open source. Please refer to our OSS Compliance section for more information. +
+
+
+
+

Building Blocks of the Platform

+
+
+devonfwcatalog +
+
+
+

devonfw uses a state-of-the-art, open source, core reference architecture for the server (these days considered a commodity in the IT-industry) and on top of that an ever increasing number of high-value assets, which are developed by Capgemini.

+
+
+
+

The devonfw Technology Stack

+
+

devonfw is fully open source and consists of the following technology stacks:

+
+
+
+

Back-End Solutions

+
+

For server applications, devonfw includes the following solutions:

+
+
+ +
+
+
+

Front-End solutions

+
+

For client applications, devonfw includes two solutions based on TypeScript, JavaScript, C# and .NET:

+
+
+ +
+
+
+

Custom Tools

+ +
+
+

devonfw-ide

+
+

The devonfw-ide is not one monolithic program that is installed with a traditional executable; rather it’s a collection of scripts which are invoked via command line to automate several, repetetive development tasks. These scripts then interact with other tools, frameworks, and third-party IDEs to streamline the development workflow.

+
+
+
+devonfw ide +
+
+
+

The advantage of this approach is, that you can have as many instances of the devonfw-ide on your machine as you need — for different projects with different tools, tool versions and configurations. No need for a physical installation and no tweaking of your operating system required!

+
+
+

Instances of the devonfw-ide do not interfere with each other, nor with other installed software. The package size of the devonfw-ide is initally very small, the setup is simple, and the included software is portable.

+
+
+
+

== IDEs

+
+

It supports the following IDEs:

+
+ +
+
+

== Platforms

+
+

It supports the following platforms:

+
+
+ +
+
+
+

== Build-Systems

+
+

It supports the following build-systems:

+
+
+ +
+
+ + + + + +
+ + +Other IDEs, platforms, or tools can easily be integrated as commandlets. +
+
+
+
+

CobiGen

+
+

CobiGen is a code generator included in the devonfw-ide, that allows users to generate the project structure and large parts of the application component code. This saves a lot of time, which is usually wasted on repetitive engineering tasks and/or writing boilerplate code.

+
+
+
+cobigen +
+
+
+

Following the same philosophy as the devonfw-ide, CobiGen bundles a new command line interface (CLI), that enables the generation of code using only a few commands. This approach also allows us to decouple CobiGen from Eclipse and use it alongside VS Code or IntelliJ IDEA.

+
+ +
+
+

Why should I use devonfw?

+
+

devonfw aims to provide a framework for the development of web applications based on the Java EE programming model. It uses the Spring framework as its Java EE default implementation.

+
+
+
+

Objectives

+ +
+
+

Standardization

+
+

We don’t want to keep reinventing the wheel for thousands of projects, for hundreds of customers, across dozens of countries. For this reason, we aim to rationalize, harmonize and standardize the development assets for software projects and industrialize the software development process.

+
+
+
+

Industrialization of Innovative Technologies & “Digital”

+
+

devonfw’s goal is to standardize & industrialize. But this applies not only to large volume, “traditional” custom software development projects. devonfw also aims to offer a standardized platform which contains a range of state-of-the-art methodologies and technology stacks. devonfw supports agile development by small teams utilizing the latest technologies for projects related to Mobile, IoT and the Cloud.

+
+
+
+

Deliver & Improve Business Value

+
+
+devon quality agility +
+
+
+
+

Efficiency

+
+
    +
  • +

    Up to 20% reduction in time to market, with faster delivery due to automation and reuse.

    +
  • +
  • +

    Up to 25% less implementation efforts due to code generation and reuse.

    +
  • +
  • +

    Flat pyramid and rightshore, ready for junior developers.

    +
  • +
+
+
+
+

Quality

+
+
    +
  • +

    State-of-the-art architecture and design.

    +
  • +
  • +

    Lower cost on maintenance and warranty.

    +
  • +
  • +

    Technical debt reduction by reuse.

    +
  • +
  • +

    Risk reduction due to continuous improvement of individual assets.

    +
  • +
  • +

    Standardized, automated quality checks.

    +
  • +
+
+
+
+

Agility

+
+
    +
  • +

    Focus on business functionality, not on technicalities.

    +
  • +
  • +

    Shorter release cycles.

    +
  • +
  • +

    DevOps by design — Infrastructure as Code.

    +
  • +
  • +

    Continuous Delivery pipeline.

    +
  • +
  • +

    On- and off-premise flexibility.

    +
  • +
  • +

    PoCs and prototypes in days not months.

    +
  • +
+
+
+
+

Features

+ +
+
+

Everything in a Single ZIP

+
+

The devonfw distributions is packaged in a ZIP file that includes all the custom tools, software and configurations.

+
+
+

Having all the dependencies self-contained in the distribution’s ZIP file, users don’t need to install or configure anything. Just extracting the ZIP content is enough to have a fully functional devonfw.

+
+
+
+

devonfw — The Package

+
+

The devonfw platform provides:

+
+
+
    +
  • +

    Implementation blueprints for a modern cloud-ready server and a choice on JS-Client technologies (either open source Angular or a very rich and impressive solution based on commercial Sencha UI).

    +
  • +
  • +

    Quality documentation and step-by-step quick start guides.

    +
  • +
  • +

    Highly integrated and packaged development environment based around Eclipse and Jenkins. You will be ready to start implementing your first customer-specific use case in 2h time.

    +
  • +
  • +

    Iterative eclipse-based code-generator that understands "Java" and works on higher architectural concepts than Java-classes.

    +
  • +
  • +

    An example application as a reference implementation.

    +
  • +
  • +

    Support through a large community + industrialization services (Standard Platform as a Service) available in the iProd service catalog.

    +
  • +
+
+
+
+
+
+

devonfw-ide Download and Setup

+
+
+

Please refer to our devonfw-ide Setup section.

+
+
+
+
+

Guides

+
+
+

Our goal is to provide a smooth starting experience to all users of devonfw, no matter how experienced they are or what their stakeholder role is. To achieve this, we provide a list of recommended guides here:

+
+
+

For Students and Junior Engineers:

+
+ +
+

For Senior Engineers and Architects:

+
+ +
+

For Team Leaders and Product Ambassadors:

+
+ + +
+

Build Your First devonfw Application

+
+

JumpTheQueue is a small application based on the devonfw framework, which you can create yourself by following our simple step-by-step tutorial. By doing so, you will learn about the app development workflow and gain insight into the design of a professional business information system. Please visit the JumpTheQueue wiki and start working trough the tutorial HERE.

+
+
+ + + + + +
+ + +The tutorial assumes you have successfully set up the devonfw-ide previously. +
+
+
+

You can also clone the project and explore the finished source code via:

+
+
+
+
git clone https://github.com/devonfw/jump-the-queue.git
+
+
+
+
+JumpTheQueue Screenshots +
+
+
+

Another way to check out the JumpTheQueue-Application is to try our interactive katacoda scenario where you set up the application step by step.

+
+ + +
+
+

Explore Our devonfw Sample Application

+
+

MyThaiStar is a complex sample app, that demonstrates the full capabilities of our framework. On this page we will describe how to download and launch the app on your system, so you can test the various functionalities it offers and explore its code.

+
+
+

You can also check out the interactive katacoda scenario for setting up and trying out the MyThaiStar-Application.

+
+ +
+ + + + + +
+ + +We assume you have successfully set up the devonfw-ide previously. +
+
+
+
    +
  1. +

    In the root directory of a devonfw-ide directory, right click and select "Open Devon CMD shell here" from the Windows Explorer context menu. Then navigate to the main workspace and checkout the MyThaiStar Git repository like this:

    +
    +
    +
    cd workspaces/main
    +git clone https://github.com/devonfw/my-thai-star.git
    +
    +
    +
  2. +
  3. +

    Perform: cd my-thai-star

    +
  4. +
  5. +

    Execute: devon eclipse ws-up

    +
  6. +
  7. +

    Execute: devon eclipse create-script

    +
  8. +
  9. +

    Go to the root folder of the distribution and run eclipse-main.bat

    +
  10. +
  11. +

    In Eclipse navigate to File > Import > Maven > Existing Maven Projects, then import the cloned project from your workspace by clicking the "Browse" button and selecting /workspaces/my-thai-star/java/mtsj/.

    +
  12. +
  13. +

    Run the backend by right-clicking SpringBootApp.java and selecting Run as > Java Application in the context menu. The backend will start up and create log entries in the Eclipse Console tab.

    +
    +

    Running the MyThaiStar Backend

    +
    +
  14. +
  15. +

    Return to your command shell and perform: cd angular

    +
  16. +
  17. +

    Execute: npm install

    +
  18. +
  19. +

    Execute: ng serve

    +
  20. +
  21. +

    Once started, the frontend will be available at localhost:4200/restaurant. Login with the username and password waiter and take a look at the various functionalities provided by MyThaiStar.

    +
  22. +
+
+
+

You should now take a look at both the front- and backend code and familiarize yourself with its structure and concepts, since most devonfw projects follow this exemplary implementation. Please visit the architecture overview pages of devon4ng and devon4j to learn more about the internal workings of front- and backend.

+
+
+
+
+
+

Further Information

+
+ +
+

Repository Overview

+
+

The GitHub repositories within the devonfw organization contain the source code and documentation for official devonfw projects.

+
+
+
+devonfw Repository Overview +
+
An overview of the devonfw organization repositories.
+
+
+

The most relevant repositories here are the individual devonfw technology stacks:

+
+
+ +
+
+

Our framework also delivers a number of tools and plug-ins that aim to accelerate and streamline the development process, for example:

+
+
+ +
+
+

We also provide educational material and reference implementations to aid new users and drive the adoption of our framework, for example:

+
+
+ +
+
+

Projects in early development and prototypes are located in the devonfw forge repository. They usually remain there until they are ready for broader release or use in production.

+
+ +
+
+ +
+

We strive to foster an active, diverse and dynamic community around devonfw and are relying on modern collaboration tools to do so. Please note that some resources listed here might only be accessible to members or partners of Capgemini.

+
+
+
+

Microsoft Teams

+
+

The devonfw public channel is accessible to everyone who has a Microsoft Teams account. You can find the latest discussions on ongoing development topics here, as well as new commits and pull requests to our repos.

+
+
+

Join us to stay in the loop, and feel free to post your questions regarding devonfw here.

+
+ +
+
+

Yammer

+
+

Our corporate Yammer channel is accessible to Capgemini employees and members. If you are looking for information or feedback on current and planned projects regarding devonfw, we reccomend you ask around here first.

+
+ +
+
+

E-Mail

+
+

You can reach our dedicated iCSD Support Team via e-mail at:

+
+ +
+
+
+
+

Contributing

+
+
+

Please refer to our Contributing section.

+
+
+
+
+

Code of Conduct

+
+
+

Please refer to our Code of Conduct section.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/guide-enterprise-dev-basics.html b/docs/devonfw.github.io/1.0/getting-started.wiki/guide-enterprise-dev-basics.html new file mode 100644 index 00000000..82e6c832 --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/guide-enterprise-dev-basics.html @@ -0,0 +1,311 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Basics of enterprise software development with devonfw

+
+
+

Many students or young engineers starting their first job in software development are not aquainted with enterprise software development technologies or higher concepts of programming.

+
+
+

We aim to introduce some of these topics here …​

+
+
+
+
+

Contents

+
+
+
    +
  • +

    Dependency Injection (DI)

    +
  • +
  • +

    Object Relational Mapping (ORM)

    +
  • +
  • +

    Java Enterprise Edition (Java EE) Concepts

    +
    +
      +
    • +

      Java Enterprise Beans (EJB)

      +
    • +
    • +

      Java Persistence API (JPA)

      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/guide-first-application.html b/docs/devonfw.github.io/1.0/getting-started.wiki/guide-first-application.html new file mode 100644 index 00000000..f5d52dd4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/guide-first-application.html @@ -0,0 +1,310 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Build Your First devonfw Application

+
+
+

JumpTheQueue is a small application based on the devonfw framework, which you can create yourself by following our simple step-by-step tutorial. By doing so, you will learn about the app development workflow and gain insight into the design of a professional business information system. Please visit the JumpTheQueue wiki and start working trough the tutorial HERE.

+
+
+ + + + + +
+ + +The tutorial assumes you have successfully set up the devonfw-ide previously. +
+
+
+

You can also clone the project and explore the finished source code via:

+
+
+
+
git clone https://github.com/devonfw/jump-the-queue.git
+
+
+
+
+JumpTheQueue Screenshots +
+
+
+

Another way to check out the JumpTheQueue-Application is to try our interactive katacoda scenario where you set up the application step by step.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/guide-sample-application.html b/docs/devonfw.github.io/1.0/getting-started.wiki/guide-sample-application.html new file mode 100644 index 00000000..d692e336 --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/guide-sample-application.html @@ -0,0 +1,346 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Explore Our devonfw Sample Application

+
+
+

MyThaiStar is a complex sample app, that demonstrates the full capabilities of our framework. On this page we will describe how to download and launch the app on your system, so you can test the various functionalities it offers and explore its code.

+
+
+

You can also check out the interactive katacoda scenario for setting up and trying out the MyThaiStar-Application.

+
+ +
+ + + + + +
+ + +We assume you have successfully set up the devonfw-ide previously. +
+
+
+
    +
  1. +

    In the root directory of a devonfw-ide directory, right click and select "Open Devon CMD shell here" from the Windows Explorer context menu. Then navigate to the main workspace and checkout the MyThaiStar Git repository like this:

    +
    +
    +
    cd workspaces/main
    +git clone https://github.com/devonfw/my-thai-star.git
    +
    +
    +
  2. +
  3. +

    Perform: cd my-thai-star

    +
  4. +
  5. +

    Execute: devon eclipse ws-up

    +
  6. +
  7. +

    Execute: devon eclipse create-script

    +
  8. +
  9. +

    Go to the root folder of the distribution and run eclipse-main.bat

    +
  10. +
  11. +

    In Eclipse navigate to File > Import > Maven > Existing Maven Projects, then import the cloned project from your workspace by clicking the "Browse" button and selecting /workspaces/my-thai-star/java/mtsj/.

    +
  12. +
  13. +

    Run the backend by right-clicking SpringBootApp.java and selecting Run as > Java Application in the context menu. The backend will start up and create log entries in the Eclipse Console tab.

    +
    +

    Running the MyThaiStar Backend

    +
    +
  14. +
  15. +

    Return to your command shell and perform: cd angular

    +
  16. +
  17. +

    Execute: npm install

    +
  18. +
  19. +

    Execute: ng serve

    +
  20. +
  21. +

    Once started, the frontend will be available at localhost:4200/restaurant. Login with the username and password waiter and take a look at the various functionalities provided by MyThaiStar.

    +
  22. +
+
+
+

You should now take a look at both the front- and backend code and familiarize yourself with its structure and concepts, since most devonfw projects follow this exemplary implementation. Please visit the architecture overview pages of devon4ng and devon4j to learn more about the internal workings of front- and backend.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/guide-team-start.html b/docs/devonfw.github.io/1.0/getting-started.wiki/guide-team-start.html new file mode 100644 index 00000000..9d6a90d5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/guide-team-start.html @@ -0,0 +1,298 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Get a team set up and running with devonfw

+
+
+

One of the goals of devonfw is to accelerate development and get new projects off the ground quickly. Here we provide resources that team leaders should refer to, who want to use devonfw to achive this task.

+
+
+
+
+

Contents

+
+
+
    +
  • +

    Creating a unified project environment and workspaces with devonfw

    +
  • +
  • +

    Distributing updates under devonfw

    +
  • +
  • +

    Licensing checks for devonfw projects

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-download-and-setup.html b/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-download-and-setup.html new file mode 100644 index 00000000..244346c5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-download-and-setup.html @@ -0,0 +1,426 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Download and Setup

+
+
+

In this section, you will learn how to setup the devonfw environment and start working on first project based on devonfw.

+
+
+

The devonfw environment contains all software and tools necessary to develop the applications with devonfw.

+
+
+
+
+

Prerequisites

+
+
+

In order to setup the environment, following are the prerequisites:

+
+
+
    +
  • +

    internet connection (including details of your proxy configuration, if necessary)

    +
  • +
  • +

    more than 1GB of free disk space to install customized environment

    +
  • +
  • +

    command line tool devonfw IDE is already installed

    +
  • +
+
+
+
+
+

Download

+
+
+ + + + + +
+ + +Please refer to the devonfw-ide documentation for the current installation process. +
+
+
+

Older devonfw distributions can be obtained from the FTP releases library and are packaged in a ZIP file that includes all the needed tools, software and configurations. Browse to the corresponding version folder in order to get the latest version.

+
+
+
+
+

Setup the Workspace for Older Versions

+
+ +
+
+
+

Windows

+
+
+
    +
  1. +

    Unzip the devonfw distribution into a directory of your choice. The path to the devonfw distribution directory should contain no spaces, to prevent problems with some of the tools.

    +
  2. +
  3. +

    Run the batch file "create-or-update-workspace.bat".

    +
  4. +
+
+
+
+setup 1 +
+
+
+

This will configure the included tools like Eclipse with the default settings of the devonfw distribution.

+
+
+

The result should be as seen below

+
+
+
+setup 2 +
+
+
+

The working devonfw environment is ready!

+
+
+

Note : If you use a proxy to connect to the Internet, you have to manually configure it in Maven, Sencha Cmd and Eclipse. Next section explains about it.

+
+
+
+
+

Linux

+
+
+
    +
  • +

    Unzip the devonfw distribution into a directory of your choice. The path to the devonfw distribution directory should contain no spaces, to prevent problems with some of the tools.

    +
  • +
  • +

    Run the script: . env.sh

    +
  • +
+
+
+
+run env sh +
+
+
+
    +
  • +

    Run the script: . create-or-update-workspace

    +
  • +
+
+
+
+create update ws +
+
+
+

These both . env.sh and . create-or-update-workspace will set PATH for all the software included with devon distribution like: eclipse, maven, java etc. Also this will generate some file like eclipse_main used to invoke eclipse.

+
+
+
    +
  • +

    For vscode setup we have to execute create-or-update-workspace-vs

    +
  • +
  • +

    There are a also scripts initialize.sh and uninstallUI.sh.

    +
    +
      +
    • +

      initialize.sh: installs angular, node, python, ant, and subversion

      +
    • +
    • +

      uninstallUI.sh: is used to uninstall the above software

      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-the-devon-ide.html b/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-the-devon-ide.html new file mode 100644 index 00000000..389d255e --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-the-devon-ide.html @@ -0,0 +1,631 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Devon IDE

+
+
+

"Devon IDE" is the general name for two distinct variants of a customized Eclipse IDE. The Open Source variant — called devonfw-ide — is publicly available for everyone. A more extended variant is included in the "Devon Dist", which is only available to Capgemini employees.

+
+
+
+
+

Features and Advantages

+
+
+

devonfw comes with a fully featured IDE in order to simplify the installation, configuration and maintenance of this instrumental part of the development environment. As it is being included in the distribution, the IDE is ready to be used and some specific configuration of certain plugins only takes a few minutes.

+
+
+
+Integrated IDE +
+
+
+

As with the remainder of the distribution, the advantage of this approach is that you can have as many instances of the -ide "installed" on your machine for different projects with different tools, tool versions and configurations. No physical installation and no tweaking of your operating system required. "Installations" of the Devon distribution do not interfere with each other nor with other installed software.

+
+
+
+
+

Multiple Workspaces

+
+
+

There is inbuilt support for working with different workspaces on different branches. Create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

+
+
+
+
+

CobiGen

+
+
+

In the Devon distribution we have a code generator to create CRUD code, called CobiGen. This is a generic incremental generator for end to end code generation tasks, mostly used in Java projects. Due to a template-based approach, CobiGen generates any set of text-based documents and document fragments.

+
+
+
+cobigen +
+
+
+

CobiGen is distributed in the Devon distribution as an Eclipse plugin, and is available to all Devon developers for Capgemini engagements. Due to the importance of this component and the scope of its functionality, it is fully described here.

+
+
+
+
+

IDE Plugins

+
+
+

Since an application’s code can greatly vary, and every program can be written in lots of ways without being semantically different, IDE comes with pre-installed and pre-configured plugins that use some kind of a probabilistic approach, usually based on pattern matching, to determine which pieces of code should be reviewed. These hints are a real time-saver, helping you to review incoming changes and prevent bugs from propagating into the released artifacts. Apart from CobiGen mentioned in the previous paragraph, the IDE provides CheckStyle, SonarQube, FindBugs and SOAP-UI. Details of each can be found in subsequent sections.

+
+
+
+
+

CheckStyle

+
+ +
+
+
+

== What is CheckStyle?

+
+
+

CheckStyle is a Open Source development tool to help you ensure that your Java code adheres to a set of coding standards. CheckStyle does this by inspecting your Java source code and pointing out items that deviate from a defined set of coding rules.

+
+
+

With the CheckStyle IDE Plugin, your code is constantly inspected for coding standard deviations. Within the Eclipse workbench, you are immediately notified with the problems via the Eclipse Problems View and source code annotations similar to compiler errors or warnings. +This ensures an extremely short feedback loop right at the developers fingertips.

+
+
+
+
+

== Why use CheckStyle?

+
+
+

If your development team consists of more than one person, then obviously a common ground for coding standards (formatting rules, line lengths etc.) must be agreed upon - even if it is just for practical reasons to avoid superficial, format related merge conflicts. +CheckStyle Plugin helps you define and easily apply those common rules.

+
+
+

The plugin uses a project builder to check your project files with CheckStyle. Assuming the IDE Auto-Build feature is enabled, each modification of a project file will immediately get checked by CheckStyle on file save - giving you immediate feedback about the changes you made. To use a simple analogy, the CheckStyle Plug-in works very much like a compiler but instead of producing .class files, it produces warnings where the code violates CheckStyle rules. The discovered deviations are accessible in the Eclipse Problems View, as code editor annotations and via additional CheckStyle violations views.

+
+
+
+
+

== Installation of CheckStyle

+
+
+

After IDE installation, IDE provides default CheckStyle configuration file which has certain check rules specified . +The set of rules used to check the code is highly configurable. A CheckStyle configuration specifies which check rules are validated against the code and with which severity violations will be reported. Once defined a CheckStyle configuration can be used across multiple projects. The IDE comes with several pre-defined CheckStyle configurations. +You can create custom configurations using the plugin’s CheckStyle configuration editor or even use an existing CheckStyle configuration file from an external location.

+
+
+

You can see violations in your workspace as shown in below figure.

+
+
+
+checkstyle +
+
+
+
+
+

== Usage

+
+
+

So, once projects are created, follow steps mentioned below, to activate CheckStyle:

+
+
+
    +
  1. +

    Open the properties of the project you want to get checked.

    +
  2. +
+
+
+
+checkstyle2 +
+
+
+
    +
  1. +

    Select the CheckStyle section within the properties dialog.

    +
  2. +
+
+
+
+checkstyle3 +
+
+
+
    +
  1. +

    Activate CheckStyle for your project by selecting the CheckStyle active for this project check box and press OK

    +
  2. +
+
+
+
+checkstyle4 +
+
+
+

Now CheckStyle should begin checking your code. This may take a while depending on how many source files your project contains. +The CheckStyle Plug-in uses background jobs to do its work - so while CheckStyle audits your source files you should be able to continue your work. +After CheckStyle has finished checking your code please look into your Eclipse Problems View. +There should be some warnings from CheckStyle. This warnings point to the code locations where your code violates the pre-configured Checks configuration.

+
+
+
+checkstyle5 +
+
+
+

You can navigate to the problems in your code by double-clicking the problem in you problems view. +On the left hand side of the editor an icon is shown for each line that contains a CheckStyle violation. Hovering with your mouse above this icon will show you the problem message. +Also note the editor annotations - they are there to make it even easier to see where the problems are.

+
+
+
+
+

FindBugs

+
+ +
+
+
+

== What is FindBugs?

+
+
+

FindBugs is an open source project for a static analysis of the Java bytecode to identify potential software bugs. FindBugs provides early feedback about potential errors in the code.

+
+
+
+
+

== Why use FindBugs?

+
+
+

It scans your code for bugs, breaking down the list of bugs in your code into a ranked list on a 20-point scale. The lower the number, the more hardcore the bug.This helps the developer to access these problems early in the development phase.

+
+
+
+
+

== Installation and Usage of FindBugs

+
+
+

IDE comes preinstalled with FindBugs plugin.

+
+
+

You can configure that FindBugs should run automatically for a selected project. For this right-click on a project and select Properties from the popup menu. via the project properties. Select FindBugs → Run automatically as shown below.

+
+
+
+configure FindBugs +
+
+
+

To run the error analysis of FindBugs on a project, right-click on it and select the Find Bugs…​ → Find Bugs menu entry.

+
+
+
+error analysis +
+
+
+

Plugin provides specialized views to see the reported error messages. Select Window → Show View → Other…​ to access the views. +The FindBugs error messages are also displayed in the Problems view or as decorators in the Package Explorer view.

+
+
+
+ShowView bug Explorer +
+
+
+
+bug Explorer +
+
+
+
+
+

SonarLint

+
+ +
+
+
+

== What is SonarLint?

+
+
+

SonarLint is an open platform to manage code quality. +It provides on-the-fly feedback to developers on new bugs and quality issues injected into their code..

+
+
+
+
+

== Why use SonarLint?

+
+
+

It covers seven aspects of code quality like junits, coding rules,comments,complexity,duplications, architecture and design and potential bugs. +SonarLint has got a very efficient way of navigating, a balance between high-level view, dashboard and defect hunting tools. This enables to quickly uncover projects and / or components that are in analysis to establish action plans.

+
+
+
+
+

== Installation and Usage of SonarLint

+
+
+

IDE comes preinstalled with SonarLint. +To configure it , please follow below steps:

+
+
+

First of all, you need to start sonar service. For that, go to software folder which is extracted from Devon-dist zip, choose sonarqube→bin→<choose appropriate folder according to your OS>-→and execute startSonar bat file.

+
+
+

If your project is not already under analysis, you’ll need to declare it through the SonarQube web interface as described here. +Once your project exists in SonarQube, you’re ready to get started with SonarQube in Eclipse.

+
+
+

SonarLint in Eclipse is pre-configured to access a local SonarQube server listening on http://localhost:9000/. +You can edit this server, delete it or add new ones.By default, user and password is "admin".If sonar service is started properly, test connection will give you successful result.

+
+
+
+Sonar_add_server +
+
+
+

For getting a project analysed on sonar, refer this http://docs.sonarqube.org/display/SONAR/Analyzing+Source+Code [link].

+
+
+

Linking a project to one analysed on sonar server.

+
+
+
+associate-sonarqube +
+
+
+

In the SonarQube project text field, start typing the name of the project and select it in the list box:

+
+
+
+link-with-project +
+
+
+

Click on Finish. Your project is now associated to one analyzed on your SonarQube server.

+
+
+

Changing Binding

+
+
+

At any time, it is possible to change the project association.

+
+
+

To do so, right-click on the project in the Project Explorer, and then SonarQube > Change Project Association.

+
+
+
+change-link-with-project +
+
+
+

Unbinding a Project

+
+
+

To do so, right-click on the project in the Project Explorer, and then SonarQube > Remove SonarQube Nature.

+
+
+
+unlink-with-project +
+
+
+

Advanced Configuration

+
+
+

Additional settings (such as markers for new issues) are available through Window > Preferences > SonarLint

+
+
+
+eclipse-settings +
+
+
+

To look for sonarqube analysed issue, go to Window→Show View→ Others→SonarLint→SonarLint Issues. +Now you can see issues in soanrqube issues tab as shown

+
+
+
+sonarQube-issues-view +
+
+
+

Or you can go to link http://localhost:9000 and login with admin as id and admin as password and goto Dashboard.you can see all the statistics of analysis of the configured projects on sonar server.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-what-is-devonfw.html b/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-what-is-devonfw.html new file mode 100644 index 00000000..5be9d9a8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-what-is-devonfw.html @@ -0,0 +1,488 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

What is devonfw?

+
+
+
+devonfw small +
+
+
+

Welcome to the devonfw platform. This is a product of the CSD (Custom Solution Development) industrialization effort to establish a standardized platform for custom software development within Capgemini APPS2. This platform is aimed at engagements, in which clients don’t specify the use of a predefined technology stack. In these cases we can offer a proven alternative as a result of our experience as a group.

+
+
+

devonfw is a development platform aiming for the standardization of processes and the boosting of productivity. It provides an architecture blueprint for server and client applications, alongside a set of tools to deliver a fully functional, out-of-the-box development environment.

+
+
+ + + + + +
+ + +The devonfw name is a registered trademark of Capgemini Logo Small, but the software and documentation included in devonfw are fully open source. Please refer to our OSS Compliance section for more information. +
+
+
+
+
+

Building Blocks of the Platform

+
+
+
+devonfwcatalog +
+
+
+

devonfw uses a state-of-the-art, open source, core reference architecture for the server (these days considered a commodity in the IT-industry) and on top of that an ever increasing number of high-value assets, which are developed by Capgemini.

+
+
+
+
+

The devonfw Technology Stack

+
+
+

devonfw is fully open source and consists of the following technology stacks:

+
+
+
+
+

Back-End Solutions

+
+
+

For server applications, devonfw includes the following solutions:

+
+
+ +
+
+
+
+

Front-End solutions

+
+
+

For client applications, devonfw includes two solutions based on TypeScript, JavaScript, C# and .NET:

+
+
+ +
+
+
+
+

Custom Tools

+
+ +
+
+
+

devonfw-ide

+
+
+

The devonfw-ide is not one monolithic program that is installed with a traditional executable; rather it’s a collection of scripts which are invoked via command line to automate several, repetetive development tasks. These scripts then interact with other tools, frameworks, and third-party IDEs to streamline the development workflow.

+
+
+
+devonfw ide +
+
+
+

The advantage of this approach is, that you can have as many instances of the devonfw-ide on your machine as you need — for different projects with different tools, tool versions and configurations. No need for a physical installation and no tweaking of your operating system required!

+
+
+

Instances of the devonfw-ide do not interfere with each other, nor with other installed software. The package size of the devonfw-ide is initally very small, the setup is simple, and the included software is portable.

+
+
+
+
+

== IDEs

+
+
+

It supports the following IDEs:

+
+ +
+
+
+

== Platforms

+
+
+

It supports the following platforms:

+
+
+ +
+
+
+
+

== Build-Systems

+
+
+

It supports the following build-systems:

+
+
+ +
+
+ + + + + +
+ + +Other IDEs, platforms, or tools can easily be integrated as commandlets. +
+
+
+
+
+

CobiGen

+
+
+

CobiGen is a code generator included in the devonfw-ide, that allows users to generate the project structure and large parts of the application component code. This saves a lot of time, which is usually wasted on repetitive engineering tasks and/or writing boilerplate code.

+
+
+
+cobigen +
+
+
+

Following the same philosophy as the devonfw-ide, CobiGen bundles a new command line interface (CLI), that enables the generation of code using only a few commands. This approach also allows us to decouple CobiGen from Eclipse and use it alongside VS Code or IntelliJ IDEA.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-why-should-i-use-devonfw.html b/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-why-should-i-use-devonfw.html new file mode 100644 index 00000000..bd7cdeda --- /dev/null +++ b/docs/devonfw.github.io/1.0/getting-started.wiki/introduction-why-should-i-use-devonfw.html @@ -0,0 +1,437 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Why should I use devonfw?

+
+
+

devonfw aims to provide a framework for the development of web applications based on the Java EE programming model. It uses the Spring framework as its Java EE default implementation.

+
+
+
+
+

Objectives

+
+ +
+
+
+

Standardization

+
+
+

We don’t want to keep reinventing the wheel for thousands of projects, for hundreds of customers, across dozens of countries. For this reason, we aim to rationalize, harmonize and standardize the development assets for software projects and industrialize the software development process.

+
+
+
+
+

Industrialization of Innovative Technologies & “Digital”

+
+
+

devonfw’s goal is to standardize & industrialize. But this applies not only to large volume, “traditional” custom software development projects. devonfw also aims to offer a standardized platform which contains a range of state-of-the-art methodologies and technology stacks. devonfw supports agile development by small teams utilizing the latest technologies for projects related to Mobile, IoT and the Cloud.

+
+
+
+
+

Deliver & Improve Business Value

+
+
+
+devon quality agility +
+
+
+
+
+

Efficiency

+
+
+
    +
  • +

    Up to 20% reduction in time to market, with faster delivery due to automation and reuse.

    +
  • +
  • +

    Up to 25% less implementation efforts due to code generation and reuse.

    +
  • +
  • +

    Flat pyramid and rightshore, ready for junior developers.

    +
  • +
+
+
+
+
+

Quality

+
+
+
    +
  • +

    State-of-the-art architecture and design.

    +
  • +
  • +

    Lower cost on maintenance and warranty.

    +
  • +
  • +

    Technical debt reduction by reuse.

    +
  • +
  • +

    Risk reduction due to continuous improvement of individual assets.

    +
  • +
  • +

    Standardized, automated quality checks.

    +
  • +
+
+
+
+
+

Agility

+
+
+
    +
  • +

    Focus on business functionality, not on technicalities.

    +
  • +
  • +

    Shorter release cycles.

    +
  • +
  • +

    DevOps by design — Infrastructure as Code.

    +
  • +
  • +

    Continuous Delivery pipeline.

    +
  • +
  • +

    On- and off-premise flexibility.

    +
  • +
  • +

    PoCs and prototypes in days not months.

    +
  • +
+
+
+
+
+

Features

+
+ +
+
+
+

Everything in a Single ZIP

+
+
+

The devonfw distributions is packaged in a ZIP file that includes all the custom tools, software and configurations.

+
+
+

Having all the dependencies self-contained in the distribution’s ZIP file, users don’t need to install or configure anything. Just extracting the ZIP content is enough to have a fully functional devonfw.

+
+
+
+
+

devonfw — The Package

+
+
+

The devonfw platform provides:

+
+
+
    +
  • +

    Implementation blueprints for a modern cloud-ready server and a choice on JS-Client technologies (either open source Angular or a very rich and impressive solution based on commercial Sencha UI).

    +
  • +
  • +

    Quality documentation and step-by-step quick start guides.

    +
  • +
  • +

    Highly integrated and packaged development environment based around Eclipse and Jenkins. You will be ready to start implementing your first customer-specific use case in 2h time.

    +
  • +
  • +

    Iterative eclipse-based code-generator that understands "Java" and works on higher architectural concepts than Java-classes.

    +
  • +
  • +

    An example application as a reference implementation.

    +
  • +
  • +

    Support through a large community + industrialization services (Standard Platform as a Service) available in the iProd service catalog.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/hangar.wiki/aws/setup-aws-account-iam-for-eks.html b/docs/devonfw.github.io/1.0/hangar.wiki/aws/setup-aws-account-iam-for-eks.html new file mode 100644 index 00000000..dc2b29c3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/hangar.wiki/aws/setup-aws-account-iam-for-eks.html @@ -0,0 +1,555 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Setup AWS account IAM for deployment in EKS

+
+
+

The scope of this section is to prepare an AWS account to be ready for deploying in AWS EKS. By the end of this guide, a new IAM user belonging to a group with the required permissions will be created.

+
+
+
+
+

Preparing environment

+
+
+

First of all, install AWS CLI and Python for your OS.

+
+
+
+
+

Prerequisites

+
+
+
    +
  • +

    An AWS account with IAM full access permission.

    +
  • +
+
+
+
+
+

Alternative

+
+
+

In case you do not have an account or permission to create new IAM users, request it to your AWS administrator asking for the following policies being attached. Then go to Check IAM user permissions.

+
+
+
+
+

== Required managed policies

+
+
+
+
AmazonEC2FullAccess
+IAMReadOnlyAccess
+AmazonEKSServicePolicy
+AmazonS3FullAccess
+AmazonEC2ContainerRegistryFullAccess
+
+
+
+
+
+

== Required custom policies

+
+
+

Find them on /scripts/accounts/aws/eks-custom-policies.json.

+
+
+
+
+

Creating IAM user using provided script

+
+
+

The script located at /scripts/accounts/aws/create-user.sh will automatically create a user, also enrolling it in a newly created group with the required policies attached.

+
+
+

In case you do not have an AWS access key (needed to authenticate through API), follow this guide to create it.

+
+
+
+
+

Usage

+
+
+
+
create-user.sh \
+  -u <username> \
+  -g <group> \
+  [-p <policies...>] \
+  [-f <policies file path>] \
+  [-c <custom policies file path>] \
+  [-a <AWS access key>] \
+  [-s <AWS secret key>] \
+  [-r <region>]
+
+
+
+
+
+

Flags

+
+
+
+
-u      [Required] Username for the new user
+-g      [Required] Group name for the group to be created or used
+-p      [Optional] Policies to be attached to the group, splitted by comma
+-f      [Optional] Path to a file containing the policies to be attached to the group
+-c      [Optional] Path to a json file containing the custom policies to be attached to the group.
+-a      [Optional] AWS administrator access key
+-s      [Optional] AWS administrator secret key
+-r      [Optional] AWS region
+
+
+
+
+
+

Example

+
+
+
+
./create-user.sh -u Bob -g DevOps -f ./eks-managed-policies.txt -c ./eks-custom-policies.json -a "myAccessKey" -s "mySecretKey" -r eu-west-1
+
+
+
+ + + + + +
+ + +If the "DevOps" group does not exist, it will be created. +
+
+
+ + + + + +
+ + +Required policies for using EKS are located at /scripts/accounts/aws/eks-managed-policies.txt and /scripts/accounts/aws/eks-custom-policies.json +
+
+
+
+
+

After execution

+
+
+

On success, the newly created user access data will be shown as output:

+
+
+
+
Access key ID: <accessKeyID>
+Secret access key: <secretAccessKey>
+
+
+
+ + + + + +
+ + +It is mandatory to store the access key ID and the secret access key securely at this point, as they will not be retrievable again. +
+
+
+
+
+

Check IAM user permissions

+
+
+

The script located at /scripts/accounts/aws/verify-account-policies.sh will check that the necessary policies were attached to the IAM user.

+
+
+
+
+

Usage

+
+
+
+
verify-account-policies.sh \
+  -u <username> \
+  [-p <policies...>] \
+  [-f <policies file path>] \
+  [-c <custom policies file path>] \
+  [-a <AWS access key>] \
+  [-s <AWS secret key>] \
+  [-r <region>]
+
+
+
+
+
+

Flags

+
+
+
+
-u      [Required] Username whose policies will be checked
+-p      [Optional] Policies to be checked, splitted by comma
+-f      [Optional] Path to a file containing the policies to be checked
+-c      [Optional] Path to a file containing the custom policies to be checked
+-a      [Optional] AWS administrator access key
+-s      [Optional] AWS administrator secret key
+-r      [Optional] AWS region
+
+
+
+ + + + + +
+ + +At least one policies flag (-p, -f or -c) is required. +
+
+
+
+
+

Example

+
+
+
+
./verify-account-policies.sh -u Bob -f ./eks-managed-policies.txt -c ./eks-custom-policies.json -a "myAccessKey" -s "mySecretKey" -r eu-west-1
+
+
+
+

After execution, provided policies will be shown preceded by an OK or FAILED depending on the attachment status.

+
+
+ + + + + +
+ + +Required policies for using EKS are located at /scripts/accounts/aws/eks-managed-policies.txt and /scripts/accounts/aws/eks-custom-policies.json +
+
+
+
+
+

Configure AWS CLI

+
+
+

Once you have been provided with an IAM user with the required policies attached, setup the AWS CLI using the following command:

+
+
+
+
aws configure
+
+
+
+

Fill the prompted fields with your data:

+
+
+
+
AWS Access Key ID [None]: <accessKeyID>
+AWS Secret Access Key [None]: <secretAccessKey>
+Default region name [None]: eu-west-1
+Default output format [None]: json
+
+
+
+

Now you have AWS CLI ready to use.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/hangar.wiki/aws/setup-sonarqube-instance.html b/docs/devonfw.github.io/1.0/hangar.wiki/aws/setup-sonarqube-instance.html new file mode 100644 index 00000000..8b375a76 --- /dev/null +++ b/docs/devonfw.github.io/1.0/hangar.wiki/aws/setup-sonarqube-instance.html @@ -0,0 +1,438 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Setting up a SonarQube instance in AWS

+
+
+

The scope of this section is to deploy an AWS EC2 instance running SonarQube for further usage from a CI pipeline. A set of scripts and a Terraform recipe have been created in order to assist you in the launch of a SonarQube instance with an embedded database.

+
+
+
+
+

Getting Started

+
+ +
+
+
+

Prerequisites

+
+
+
    +
  • +

    Install Terraform.

    +
  • +
  • +

    Install AWS CLI.

    +
  • +
  • +

    Have a SSH keypair for the SonarQube instance. You can use an existing one or create a new one with the following command:

    +
  • +
+
+
+
+
aws ec2 create-key-pair --key-name sonarqube --query 'KeyMaterial' --output text > sonarqube.pem
+
+
+
+ + + + + +
+ + +This will create a public key, directly stored in AWS (current region only), and a private key stored in the sonarqube.pem file, that will be necessary if you ever need to access the instance, so be sure you store it securely. +
+
+
+
+
+

Relevant files

+
+
+
    +
  • +

    main.tf contains declarative definition written in HCL of AWS infrastructure.

    +
  • +
  • +

    setup_sonarqube.sh script to be run on EC2 instance that installs and deploys a container running SonarQube.

    +
  • +
  • +

    variables.tf contains variable definition for main.tf.

    +
  • +
  • +

    terraform.tfvars contains values (user-changeable) for the variables defined in variables.tf.

    +
  • +
  • +

    terraform.tfstate contains current state of the created infrastructure. Should be stored securely.

    +
  • +
  • +

    set-config.sh assists user in setting the values of terraform.tfvars.

    +
  • +
+
+
+
+
+

Usage

+
+
+

First, you need to initialize the working directory containing Terraform configuration files (located at /scripts/sonarqube) and install any required plugins:

+
+
+
+
terraform init
+
+
+
+

Then, you may need to customize some input variables about the environment. To do so, you can either edit terraform.tfvars file or take advantage of the set-config.sh script, which allows you to create or update values for the required variables, passing them as flags. As a full example:

+
+
+
+
./set-config.sh --aws_region eu-west-1 --vpc_cidr_block 10.0.0.0/16 --subnet_cidr_block 10.0.1.0/24 --nic_private_ip 10.0.1.50 --instance_type t3a.small --keypair_name sonarqube
+
+
+
+ + + + + +
+ + +Unless changed, the keypair name expected by default is sonarqube. +
+
+
+

Finally, deploy SonarQube instance:

+
+
+
+
terraform apply --auto-approve
+
+
+
+ + + + + +
+ + +terraform apply command performs a plan and actually carries out the planned changes to each resource using the relevant infrastructure provider’s API. You can use it to perform changes on the created resources later on. Remember to securely store terraform.tfstate file, otherwise you will not be able to perform any changes, including detroying them, from Terraform. More insights here. +
+
+
+

In particular, this will create an Ubuntu-based EC2 instance in AWS and deploy a Docker container running SonarQube.

+
+
+

You will get the public IP address of the EC2 instance as output. Take note of it, you will need it later on.

+
+
+

After a few minutes, you will be able to access SonarQube web interface on http://sonarqube_public_ip:9000 (replace with actual IP) with the following credentials:

+
+
+
    +
  • +

    Username: admin

    +
  • +
  • +

    Password: admin

    +
  • +
+
+
+ + + + + +
+ + +Change the default password promptly. +
+
+
+
+
+

Appendix: Destroy SonarQube instance

+
+
+

As long as you keep the terraform.tfstate file generated when creating the SonarQube instance, you can easily destroy it and all associated resources by executing:

+
+
+
+
terraform destroy
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-build-pipeline.html b/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-build-pipeline.html new file mode 100644 index 00000000..77cd4555 --- /dev/null +++ b/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-build-pipeline.html @@ -0,0 +1,367 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Setting up a Build Pipeline on Azure DevOps

+
+
+

In this section we will create a build pipeline for compiling project code. This pipeline will be configured to be triggered every time there is a commit to the Azure DevOps repository, regardless of which branch it is made on.

+
+
+

The creation of the pipeline will follow the project workflow, so a new branch named feature/build-pipeline will be created and the YAML file for the pipeline will be pushed to it.

+
+
+

Then, a Pull Request (PR) will be created in order to merge the new branch into the appropriate branch (provided in -b flag). The PR will be automatically merged if the repository policies are met. If the merge is not possible, either the PR URL will be shown as output, or it will be opened in your web browser if using -w flag.

+
+
+

The script located at /scripts/pipelines/azure-devops/pipeline_generator.sh will automatically create this new branch, create a build pipeline based on a YAML template appropriate for the project programming language or framework, create the Pull Request and, if it is possible, merge this new branch into the specified branch.

+
+
+
+
+

Prerequisites

+
+
+

This script will commit and push the corresponding YAML template into your repository, so please be sure your local repository is up-to-date (i.e you have pulled latest changes with git pull).

+
+
+
+
+

Creating the pipeline using provided script

+
+ +
+
+
+

Usage

+
+
+
+
pipeline_generator.sh \
+  -c <config file path> \
+  -n <pipeline name> \
+  -l <language or framework> \
+  -d <project local path> \
+  [-b <branch>] \
+  [-w]
+
+
+
+ + + + + +
+ + +The config file for the build pipeline is located at /scripts/pipelines/azure-devops/templates/build/build-config.cfg. +
+
+
+
+
+

Flags

+
+
+
+
-c    [Required] Configuration file containing pipeline definition.
+-n    [Required] Name that will be set to the pipeline.
+-l    [Required] Language or framework of the project.
+-d    [Required] Local directory of your project (the path should always be using '/' and not '\').
+-b               Name of the branch to which the Pull Request will target. PR is not created if the flag is not provided.
+-w               Open the Pull Request on the web browser if it cannot be automatically merged. Requires -b flag.
+
+
+
+
+
+

Examples

+
+ +
+
+
+

== Quarkus project

+
+
+
+
./pipeline_generator.sh -c ./templates/build/build-config.cfg -n quarkus-project-build -l quarkus -d C:/Users/$USERNAME/Desktop/quarkus-project -b develop -w
+
+
+
+ + + + + +
+ + +Remember to write the path to the local repository with '/' and not '\' on Windows. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-project.html b/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-project.html new file mode 100644 index 00000000..3b7dc0c2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-project.html @@ -0,0 +1,368 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Setup Azure DevOps project

+
+
+

By the end of this guide a new project in Azure DevOps will be created.

+
+
+
+
+

Prerequisites

+
+
+
    +
  1. +

    Sign up into Azure DevOps (just follow the section Sign up with a personal Microsoft account).

    +
  2. +
  3. +

    Install the Azure CLI.

    +
  4. +
  5. +

    Create an Azure DevOps Personal Access Token (PAT).

    +
  6. +
+
+
+
+
+

Creating the Azure DevOps project

+
+
+

There are two ways of creating an Azure DevOps project:

+
+
+
    +
  1. +

    Create it manually using the web interface following Microsoft official guide.

    +
  2. +
  3. +

    Create it in an automated way using the provided script, as shown below.

    +
  4. +
+
+
+ + + + + +
+ + +Both when done manually and when using the script with -p flag, you will need to choose a process workflow. Learn more about the different options in the official documentation. By default, "Basic" workflow is chosen. +
+
+
+
+
+

Creating Azure DevOps project using provided script

+
+
+

The script located at scripts/accounts/azure-devops/create-project.sh enables you to create a new Azure DevOps project or configure an existing one.

+
+
+
+
+

Usage

+
+
+
+
create-project.sh \
+  -n <name> \
+  -d <description> \
+  -o <organization> \
+  -v <visibility> \
+  -t <PAT> \
+  [-w <process workflow>]
+
+
+
+
+
+

Flags

+
+
+
+
-n    [Required] Name of the new project.
+-d    [Required] Description for the new project.
+-o    [Required] Name of the organization for which the project will be configured.
+-v    [Required] Visibility. Accepted values: private, public.
+-t    [Required] PAT token to login Azure DevOps.
+-w               Process workflow that will be used. Accepted values: basic, agile, scrum, cmmi. Default: basic.
+
+
+
+
+
+

Example

+
+
+

./create-project.sh -n "Hello World" -d "This is a sample application" -o devon-hangar -v public -t myToken -w agile

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-quality-pipeline.html b/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-quality-pipeline.html new file mode 100644 index 00000000..452e0391 --- /dev/null +++ b/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-quality-pipeline.html @@ -0,0 +1,380 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Setting up a Quality Pipeline on Azure DevOps

+
+
+

In this section we will create a quality pipeline for analyzing project code with SonarQube. This pipeline will be configured in order to be triggered every time a commit to the Azure DevOps repository is done, regardless of which branch it is made on.

+
+
+

The creation of this pipeline will follow the project workflow, so a new branch named feature/quality-pipeline will be created and the YAML file for the pipeline will be pushed to it.

+
+
+

Then, a Pull Request (PR) will be created in order to merge the new branch into the appropriate branch (provided in -b flag). The PR will be automatically merged if the repository policies are met. If the merge is not possible, either the PR URL will be shown as output, or it will be opened in your web browser if using -w flag.

+
+
+

The script located at /scripts/pipelines/azure-devops/pipeline_generator.sh will automatically create this new branch, create a quality pipeline based on a YAML template appropriate for the project programming language or framework, create the Pull Request, and if it is possible, merge this new branch into the specified branch.

+
+
+
+
+

Prerequisites

+
+
+
    +
  • +

    This script will commit and push the corresponding YAML template into your repository, so please be sure your local repository is up-to-date (i.e you have pulled the latest changes with git pull).

    +
  • +
  • +

    Generate a SonarQube token (just follow the section 'Generating a token`).

    +
  • +
+
+
+
+
+

Creating the pipeline using provided script

+
+ +
+
+
+

Usage

+
+
+
+
pipeline_generator.sh \
+  -c <config file path> \
+  -n <pipeline name> \
+  -l <language or framework> \
+  -p <build pipeline name> \
+  -u <sonarqube url> \
+  -t <sonarqube token> \
+  -d <project local path> \
+  [-b <branch>] \
+  [-w]
+
+
+
+ + + + + +
+ + +The config file for the quality pipeline is located at /scripts/pipelines/azure-devops/templates/quality/quality-config.cfg. +
+
+
+
+
+

Flags

+
+
+
+
-c    [Required] Configuration file containing pipeline definition.
+-n    [Required] Name that will be set to the pipeline.
+-l    [Required] Language or framework of the project.
+-p    [Required] Build pipeline name.
+-u    [Required] SonarQube URL.
+-t    [Required] SonarQube token.
+-d    [Required] Local directory of your project (the path should always be using '/' and not '\').
+-b               Name of the branch to which the Pull Request will target. PR is not created if the flag is not provided.
+-w               Open the Pull Request on the web browser if it cannot be automatically merged. Requires -b flag.
+
+
+
+
+
+

Examples

+
+ +
+
+
+

== Quarkus project

+
+
+
+
./pipeline_generator.sh -c ./templates/quality/quality-config.cfg -n quarkus-project-quality -l quarkus -p quarkus-project-build -u http://52.17.210.4:9000 -t 6ce6663b63fc02881c6ea4c7cBa6563b8247a04e -d C:/Users/$USERNAME/Desktop/quarkus-project -b develop -w
+
+
+
+ + + + + +
+ + +Remember to write the path to the local repository with '/' and not '\' on Windows. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-test-pipeline.html b/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-test-pipeline.html new file mode 100644 index 00000000..e17328f1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/hangar.wiki/azure-devops/setup-test-pipeline.html @@ -0,0 +1,388 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Setting up a Test Pipeline on Azure DevOps

+
+
+

In this section we will create a Test pipeline on Azure DevOps for running project test cases. This pipeline will be configured in order to be triggered every time a commit to the Azure DevOps repository is done, regardless of which branch it is made on.

+
+
+

The creation of this pipeline will follow the project workflow, so a new branch named feature/test-pipeline will be created and the YAML file for the pipeline will be pushed to it.

+
+
+

Then, a Pull Request (PR) will be created in order to merge the new branch into the appropriate branch (provided in -b flag). The PR will be automatically merged if the repository policies are met. If the merge is not possible, either the PR URL will be shown as output, or it will be opened in your web browser if using -w flag.

+
+
+

The script located at /scripts/pipelines/azure-devops/pipeline_generator.sh will automatically create new branch, create a test pipeline based on a YAML template appropriate for the project programming language or framework, create the Pull Request, and if it is possible, merge this new branch into the specified branch.

+
+
+
+
+

Prerequisites

+
+
+
    +
  • +

    This script will commit and push the corresponding YAML template into your repository, so please be sure your local repository is up-to-date (i.e you have pulled latest changes with git pull).

    +
  • +
  • +

    [Optional] Having some knowledge about the application, in particular knowing if, when tested, it produces a log file or some other blob (e.g. performance profiling data) interesting to be kept as an artifact.

    +
  • +
+
+
+
+
+

Creating the pipeline using provided script

+
+ +
+
+
+

Usage

+
+
+
+
pipeline_generator.sh \
+  -c <config file path> \
+  -n <pipeline name> \
+  -l <language or framework> \
+  -d <project local path> \
+  [-a <artifact source path>] \
+  [-b <branch>] \
+  [-w]
+
+
+
+ + + + + +
+ + +The config file for the test pipeline is located at /scripts/pipelines/azure-devops/templates/test/test-config.cfg. +
+
+
+ + + + + +
+ + +If the test pipeline failed, check the logs for the failed test case(s) summary. In More actions (three dots button) you can download complete logs which include additional diagnostic information. Also, you can retrieve the "additional pipeline output" artifact containing the application logs stored in the path specified in -a flag (if applicable). +
+
+
+
+
+

Flags

+
+
+
+
-c    [Required] Configuration file containing pipeline definition.
+-n    [Required] Name that will be set to the pipeline.
+-l    [Required] Language or framework of the project.
+-d    [Required] Local directory of your project (the path should always be using '/' and not '\').
+-a               Path to be persisted as an artifact after pipeline execution, e.g. where the application stores logs or any other blob on runtime.
+-b               Name of the branch to which the Pull Request will target. PR is not created if the flag is not provided.
+-w               Open the Pull Request on the web browser if it cannot be automatically merged. Requires -b flag.
+
+
+
+
+
+

Examples

+
+ +
+
+
+

== Quarkus project

+
+
+
+
./pipeline_generator.sh -c ./templates/test/test-config.cfg -n quarkus-project-test -l quarkus -d C:/Users/$USERNAME/Desktop/quarkus-project -b develop -w
+
+
+
+ + + + + +
+ + +Remember to write the path to the local repository with '/' and not '\' on Windows. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/hangar.wiki/hangar.html b/docs/devonfw.github.io/1.0/hangar.wiki/hangar.html new file mode 100644 index 00000000..34679ece --- /dev/null +++ b/docs/devonfw.github.io/1.0/hangar.wiki/hangar.html @@ -0,0 +1,271 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Hangar

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/Home.html b/docs/devonfw.github.io/1.0/ide.wiki/Home.html new file mode 100644 index 00000000..72ab99e4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/Home.html @@ -0,0 +1,299 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

devonfw-ide

+
+
+

Welcome to the devonfw-ide!!!

+
+
+

The devonfw-ide is a fantastic tool to automatically download, install, setup and update the IDE (integrated development environment) of your software development projects.

+
+
+

For further details visit the following links:

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/LICENSE.html b/docs/devonfw.github.io/1.0/ide.wiki/LICENSE.html new file mode 100644 index 00000000..9ff7e4b1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/LICENSE.html @@ -0,0 +1,2957 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

License

+
+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+
+

Apache Software License - Version 2.0

+
+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+
+

MIT License

+
+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+
+

License of Node.js

+
+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-generic.html b/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-generic.html new file mode 100644 index 00000000..6dae91b2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-generic.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Cross-Platform Tooling

+
+ +
+
+
+

Git Client

+
+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+
+

Draw Diagrams

+
+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+
+

Browser Plugins

+
+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-linux.html b/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-linux.html new file mode 100644 index 00000000..3889d18d --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-linux.html @@ -0,0 +1,279 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Linux Tooling

+
+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-mac.html b/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-mac.html new file mode 100644 index 00000000..bc62eaaa --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-mac.html @@ -0,0 +1,468 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MacOS Tooling

+
+ +
+
+
+

Finder

+
+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+
+

Keyboard

+
+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+
+

Keyboard Layouts

+
+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+
+

Key Bindings

+
+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+
+

Switch Control and Command

+
+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+
+

== Eclipse

+
+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+
+

Karabiner

+
+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-windows.html b/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-windows.html new file mode 100644 index 00000000..a4e83d55 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling-windows.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Windows Tooling

+
+ +
+
+
+

Installing software

+
+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+
+

Chocolatey

+
+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+
+

Winget

+
+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+
+

Integration into Windows-Explorer

+
+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+
+

Tabs everywhere

+
+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+
+

Tabs for Windows Explorer

+
+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+
+

Tabs for SSH

+
+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+
+

Tabs for CMD

+
+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+
+

Windows Helpers

+
+ +
+
+
+

Handle passwords

+
+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+
+

Real text editor

+
+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+
+

Real compression tool

+
+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+
+

Smarter clipboard

+
+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+
+

PowerToys

+
+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+
+

Sysinternals Tools

+
+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+
+

Cope with file locks

+
+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+
+ +
+
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+
+

Linux

+
+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+
+

X11

+
+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+
+

Keyboard Freak

+
+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+
+

Paint anywhere on your desktop

+
+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+
+

Analyze graphs

+
+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+
+

Up your screen capture game

+
+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+
+

Fast Search in Windows

+
+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling.html b/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling.html new file mode 100644 index 00000000..adba128a --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/advanced-tooling.html @@ -0,0 +1,295 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/build.html b/docs/devonfw.github.io/1.0/ide.wiki/build.html new file mode 100644 index 00000000..90aad881 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/build.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

build

+
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/cicdgen.html b/docs/devonfw.github.io/1.0/ide.wiki/cicdgen.html new file mode 100644 index 00000000..be1e434e --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/cicdgen.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

cicdgen

+
+
+

The cicdgen commandlet allows to install and setup cicdgen. +The arguments (devon cicdgen «args») are explained by the following table:

+
+
+
Usage of devon cicdgen
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup cicdgen (install and verify) +|update |update cicdgen (reinstall with @latest version and verify) +|java «args» |generate cicd files for the current devon4java project +|ng «args» |generate cicd files for the current devon4ng project +|node «args» |generate cicd files for the current devon4node project +|«args» |call cicdgen with the specified arguments +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/cli.html b/docs/devonfw.github.io/1.0/ide.wiki/cli.html new file mode 100644 index 00000000..be153a1f --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/cli.html @@ -0,0 +1,417 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Devon CLI

+
+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+
+

Devon

+
+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+
+

Commandlets

+
+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+
+

Command-wrapper

+
+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+
+

Commandlet overview

+
+
+

The following commandlets are currently available:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/cobigen.html b/docs/devonfw.github.io/1.0/ide.wiki/cobigen.html new file mode 100644 index 00000000..6419f9e5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/cobigen.html @@ -0,0 +1,294 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

cobigen

+
+
+

The cobigen commandlet allows to install, configure, and launch CobiGen via CLI. Calling devon cobigen «args» is more or less the same as calling cobigen «args» (or cg «args») but with the benefit that the version of CobiGen preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon cobigen «args») are explained by the following table:

+
+
+
Usage of devon cobigen
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup CobiGen (install and verify), configurable via COBIGEN_VERSION +|«args» |run CobiGen with the given arguments («args») +|== == == == == == == == == == == =

+
+
+

Please read the actual documentation of CobiGen CLI for further details.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/conf.html b/docs/devonfw.github.io/1.0/ide.wiki/conf.html new file mode 100644 index 00000000..9d3bbb1f --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/conf.html @@ -0,0 +1,313 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

conf

+
+
+

This folder contains configurations for your IDE:

+
+
+
File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide [settings]. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/configuration.html b/docs/devonfw.github.io/1.0/ide.wiki/configuration.html new file mode 100644 index 00000000..3a7d24d5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/configuration.html @@ -0,0 +1,369 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Configuration

+
+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+
+

devon.properties

+
+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/configurator.html b/docs/devonfw.github.io/1.0/ide.wiki/configurator.html new file mode 100644 index 00000000..65a5fe4f --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/configurator.html @@ -0,0 +1,390 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Configurator

+
+
+

The devonfw-ide maintains and includes a tool called devonfw-ide-configurator. This allows to synchronize and manage complex configurations. Initially it was written for Eclipse that stores its information in a .metadata folder of your workspace. Unfortunately it contains different file-formats (including XML as String value inside properties files), temporary data as well as important configurations with sometimes mixtures of project specific, developer specific, and UI specific settings. To make it short it is a mess. Instead of bashing on Eclipse we want to make this IDE more usable and created a way to manage important parts of such configuration structures.

+
+
+
+
+

How to use

+
+
+

The easiest way is that you do not care. When you launch the IDE of your choice (e.g. via devon eclipse, devon vscode or by running eclipse-main script), this will happen automatically. +If you want to explicitly update your workspace without launching the IDE, you can append ws-update (e.g. devon eclipse ws-update). Instead, if you want to launch your IDE without touching its configuration you can append run or start (e.g. devon eclipse run) what will omit this configurator.

+
+
+
+
+

How it works

+
+
+

For every tool managed with our configurator we distinguish the following file structures:

+
+
+
    +
  1. +

    The actual configuration location of the tool itself. We configure the tool to relocate this to a specific workspace (so by default workspaces/main/).

    +
  2. +
  3. +

    A configuration location with the configuration only used during the setup: $SETTINGS_PATH/«tool»/workspace/setup. Contains settings to setup a workspace. After that the user remains control over these settings.

    +
  4. +
  5. +

    A configuration location with the configuration used to update and override settings: $SETTINGS_PATH/«tool»/workspace/update. Contains settings that are overridden with every update and enforced for every team member.

    +
  6. +
+
+
+

The configurator will recursively traverse the directory structure of 2. and 3. together. For each located file «relative-path»/«file» it will create or update 1. according to the following rules:

+
+
+
    +
  • +

    If «relative-path»/«file» is present in 1. it will be loaded and used as basis.

    +
  • +
  • +

    Otherwise if «relative-path»/«file» is present in 2. it will be loaded and used as basis.

    +
  • +
  • +

    If «relative-path»/«file» is present in 3. it will be loaded and merged with the current basis.

    +
  • +
  • +

    Variables in the from ${«variable-name»} get resolved if «variable-name» is defined.

    +
  • +
  • +

    If this caused any change the result is written to «relative-path»/«file» in 1.

    +
  • +
+
+
+

In other words this means:

+
+
+
    +
  • +

    When your workspace configuration is initially created, 1. is empty. Hence, settings from 2. are used and merged with 3.

    +
  • +
  • +

    Settings in 2. are therefore used as initial defaults and suggestions but can be changed by the end-user (developer). Hence, use 2. for things such as themes, UI tweaks, etc. Once the workspace is configured 2. typically is not relevant anymore.

    +
  • +
  • +

    Settings in 3. are applied on every update. By default this happens every time you start your IDE, these settings are managed by the settings and in control configurator. If the user modifies such settings and reopens his IDE his changes are reverted. Hence, use 3. for things such as code-formatters, compiler options, paths to tools shipped with devonfw-ide, etc. that should be consistent and homogeneous for every team-member.

    +
  • +
+
+
+
+
+

How to customize

+
+
+

Many fundamental settings for Eclipse can be found in the sub-folder .metadata/.plugins/org.eclipse.core.runtime/.settings. Of course you could manually edit these settings with a text editor. However, this requires a lot of knowledge. As we want to provide a great user-experience with devonfw-ide you can also do the following:

+
+
+
    +
  • +

    Launch the IDE to configure (e.g. devon eclipse).

    +
  • +
  • +

    In case of a non-trivial tweak you may first create a backup copy of your workspace folder (for eclipse this would be workspaces/main/.metadata) to some temporary location.

    +
  • +
  • +

    Do the desired modification of the configuration via the GUI of your IDE (e.g. in Eclipse preferences).

    +
  • +
  • +

    Exit your IDE and wait till it is shutdown

    +
  • +
  • +

    Call ws-reverse command for your IDE (e.g. devon eclipse ws-reverse) - ensure you do this in the same workspace where you launched and tweaked the config (without intermediate cd commands).

    +
  • +
  • +

    Review the changes to your settings with a git and diff tool of your choice (e.g. call git diff).

    +
  • +
  • +

    If all looks as expected commit these changes and push them - consider using a feature branch and ask a colleague to test these changes before you apply this to the main branch.

    +
  • +
  • +

    In case you could not find the expected changes, you may have tweaked a property that is not yet managed. Therefore, you can try again with ws-reverse-add instead of ws-reverse (e.g. devon eclipse ws-reverse-add) but be aware to revert undesired changes. Be sure not to add undesired settings that should not be managed.

    +
  • +
  • +

    In case your changes are in an entirely new configuration file that is currently not managed,you can simply diff the current workspace folder with the previously created backup copy using a recursive diff tool (such as winmerge or maybe just diff -R). Once you figured out the relevant change from that diff, you can manually apply it to the according «ide»/workspace/update folder in your ide-settings git repository.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-advanced.html b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-advanced.html new file mode 100644 index 00000000..258e4301 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-advanced.html @@ -0,0 +1,749 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Advanced Features

+
+ +
+

Cross-Platform Tooling

+ +
+
+

Git Client

+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+

Draw Diagrams

+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+

Browser Plugins

+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+ +
+
+

Windows Tooling

+ +
+
+

Installing software

+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+

Chocolatey

+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+

Winget

+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+

Integration into Windows-Explorer

+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+

Tabs everywhere

+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+

Tabs for Windows Explorer

+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+

Tabs for SSH

+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+

Tabs for CMD

+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+

Windows Helpers

+ +
+
+

Handle passwords

+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+

Real text editor

+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+

Real compression tool

+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+

Smarter clipboard

+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+

PowerToys

+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+

Sysinternals Tools

+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+

Cope with file locks

+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+ +
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+

Linux

+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+

X11

+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+

Keyboard Freak

+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+

Paint anywhere on your desktop

+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+

Analyze graphs

+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+

Up your screen capture game

+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+

Fast Search in Windows

+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+ +
+
+

MacOS Tooling

+ +
+
+

Finder

+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+

Keyboard

+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+

Keyboard Layouts

+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+

Key Bindings

+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+

Switch Control and Command

+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+

== Eclipse

+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+

Karabiner

+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+ +
+
+

Linux Tooling

+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+ +
+
+

Lombok

+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+

Lombok in Eclipse

+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+

Lombok for VS-Code

+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+

Lombok for IntelliJ

+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-doc.html b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-doc.html new file mode 100644 index 00000000..cd323a66 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-doc.html @@ -0,0 +1,5744 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw-ide

+
+
+

The devonfw community +${project.version}, ${buildtime} +:description: comprehensive documentation of the devonfw-ide tool to manage your development tools. +:doctype: book +:toc: +:toc-title: Table of Contents +:idprefix: +:idseparator: - +:sectnums: +:reproducible: +:source-highlighter: rouge +:listing-caption: Listing +:chapter-label: +:partnums: +:imagesdir: ./

+
+
+

Introduction

+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries +as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps deliver better results.

+
+
+

This document contains the instructions for the tool devonfw-ide to set up and maintain your development tools including your favorite IDE (integrated development environment).

+
+ +
+

Features

+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+

IDEs

+
+

We support the following IDEs:

+
+
+ +
+
+
+

Platforms

+
+

We support the following platforms:

+
+
+ +
+
+
+

Build-Systems

+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+

Motivation

+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+ +
+
+

Setup

+ +
+
+

Prerequisites

+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+

Download

+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+

Install

+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+

Uninstall

+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+

Testing SNAPSHOT releases

+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+

Usage

+ +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+

Update

+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+

Working with multiple workspaces

+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+

Admin

+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+

Announce changes to your team

+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+ +
+
+

Configuration

+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+

devon.properties

+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+ +
+
+

Variables

+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+ +
+
+

Devon CLI

+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+

Devon

+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+

Commandlets

+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+

Command-wrapper

+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+

Commandlet overview

+
+

The following commandlets are currently available:

+
+
+ +
+ +
+
build
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+ +
+
+
Docker
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
requirements
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
Windows and macOS
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
== Mac A1
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
Linux
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
usage
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+ +
+
+
eclipse
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
legacy plugin config
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
dictionary
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
non-english dictionary
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+ +
+
+
gradle
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
help
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+ +
+
+
ide
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
update
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
uninstall
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+ +
+
+
intellij
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+ +
+
+
ionic
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
jasypt
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
example
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+ +
+
+
java
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
create
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
migrate
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+ +
+
+
jenkins
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+ +
+
+
Kubernetes
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
usage
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+ +
+
+
mvn
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
ng
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
node
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+ +
+
+
npm
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
release
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
Build-Tools
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+ +
+
+
sonar
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+ +
+
+
vscode
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
cleaning plugins on update
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+ +
+
+
yarn
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+ +
+
+
+

Structure

+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+ +
+
conf
+
+

This folder contains configurations for your IDE:

+
+
+
File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide [settings]. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+ +
+
+
log
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+ +
+
+
scripts
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+ +
+
+
settings
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
Structure
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
Configuration Philosophy
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
Customize Settings
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+ +
+
+
software
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
Repository
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
Shared
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
Custom
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+ +
+
+
system
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+ +
+
+
updates
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+ +
+
+
workspaces
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+ +
+
+
Project import
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+

Advanced Features

+ +
+

Cross-Platform Tooling

+ +
+
+

Git Client

+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+

Draw Diagrams

+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+

Browser Plugins

+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+ +
+
+

Windows Tooling

+ +
+
+

Installing software

+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+

Chocolatey

+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+

Winget

+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+

Integration into Windows-Explorer

+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+

Tabs everywhere

+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+

Tabs for Windows Explorer

+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+

Tabs for SSH

+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+

Tabs for CMD

+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+

Windows Helpers

+ +
+
+

Handle passwords

+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+

Real text editor

+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+

Real compression tool

+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+

Smarter clipboard

+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+

PowerToys

+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+

Sysinternals Tools

+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+

Cope with file locks

+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+ +
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+

Linux

+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+

X11

+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+

Keyboard Freak

+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+

Paint anywhere on your desktop

+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+

Analyze graphs

+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+

Up your screen capture game

+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+

Fast Search in Windows

+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+ +
+
+

MacOS Tooling

+ +
+
+

Finder

+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+

Keyboard

+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+

Keyboard Layouts

+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+

Key Bindings

+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+

Switch Control and Command

+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+

== Eclipse

+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+

Karabiner

+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+ +
+
+

Linux Tooling

+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+ +
+
+

Lombok

+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+

Lombok in Eclipse

+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+

Lombok for VS-Code

+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+

Lombok for IntelliJ

+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+

Support

+ +
+

Migration from oasp4j-ide

+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+

Get familiar with devonfw-ide

+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+

Migration of existing oasp4j-ide installation

+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+

Hints for users after migration

+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+ +
+
+

License

+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+

Apache Software License - Version 2.0

+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+

MIT License

+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+

License of Node.js

+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-introduction.html b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-introduction.html new file mode 100644 index 00000000..f7cb2f20 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-introduction.html @@ -0,0 +1,562 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Introduction

+
+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries +as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps deliver better results.

+
+
+

This document contains the instructions for the tool devonfw-ide to set up and maintain your development tools including your favorite IDE (integrated development environment).

+
+ +
+

Features

+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+

IDEs

+
+

We support the following IDEs:

+
+
+ +
+
+
+

Platforms

+
+

We support the following platforms:

+
+
+ +
+
+
+

Build-Systems

+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+

Motivation

+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+ +
+
+

Setup

+ +
+
+

Prerequisites

+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+

Download

+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+

Install

+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+

Uninstall

+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+

Testing SNAPSHOT releases

+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-support.html b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-support.html new file mode 100644 index 00000000..ebed1c58 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-support.html @@ -0,0 +1,3042 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Support

+
+ +
+

Migration from oasp4j-ide

+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+

Get familiar with devonfw-ide

+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+

Migration of existing oasp4j-ide installation

+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+

Hints for users after migration

+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+ +
+
+

License

+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+

Apache Software License - Version 2.0

+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+

MIT License

+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+

License of Node.js

+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-usage.html b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-usage.html new file mode 100644 index 00000000..7acc0ebe --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide-usage.html @@ -0,0 +1,2169 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Usage

+
+ +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+

Update

+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+

Working with multiple workspaces

+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+

Admin

+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+

Announce changes to your team

+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+ +
+
+

Configuration

+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+

devon.properties

+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+ +
+
+

Variables

+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+ +
+
+

Devon CLI

+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+

Devon

+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+

Commandlets

+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+

Command-wrapper

+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+

Commandlet overview

+
+

The following commandlets are currently available:

+
+
+ +
+ +
+
build
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+ +
+
+
Docker
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
requirements
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
Windows and macOS
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
== Mac A1
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
Linux
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
usage
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+ +
+
+
eclipse
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
legacy plugin config
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
dictionary
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
non-english dictionary
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+ +
+
+
gradle
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
help
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+ +
+
+
ide
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
update
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
uninstall
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+ +
+
+
intellij
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+ +
+
+
ionic
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
jasypt
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
example
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+ +
+
+
java
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
create
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
migrate
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+ +
+
+
jenkins
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+ +
+
+
Kubernetes
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
usage
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+ +
+
+
mvn
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
ng
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
node
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+ +
+
+
npm
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
release
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
Build-Tools
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+ +
+
+
sonar
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+ +
+
+
vscode
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
cleaning plugins on update
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+ +
+
+
yarn
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+ +
+
+
+

Structure

+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+ +
+
conf
+
+

This folder contains configurations for your IDE:

+
+
+
File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide [settings]. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+ +
+
+
log
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+ +
+
+
scripts
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+ +
+
+
settings
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
Structure
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
Configuration Philosophy
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
Customize Settings
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+ +
+
+
software
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
Repository
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
Shared
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
Custom
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+ +
+
+
system
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+ +
+
+
updates
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+ +
+
+
workspaces
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+ +
+
+
Project import
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide.html b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide.html new file mode 100644 index 00000000..5f4df087 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/devonfw-ide.html @@ -0,0 +1,5727 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw-ide

+
+
+

Introduction

+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries +as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps deliver better results.

+
+
+

This document contains the instructions for the tool devonfw-ide to set up and maintain your development tools including your favorite IDE (integrated development environment).

+
+ +
+

Features

+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+

IDEs

+
+

We support the following IDEs:

+
+
+ +
+
+
+

Platforms

+
+

We support the following platforms:

+
+
+ +
+
+
+

Build-Systems

+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+

Motivation

+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+ +
+
+

Setup

+ +
+
+

Prerequisites

+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+

Download

+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+

Install

+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+

Uninstall

+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+

Testing SNAPSHOT releases

+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+

Usage

+ +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+

Update

+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+

Working with multiple workspaces

+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+

Admin

+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+

Announce changes to your team

+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+ +
+
+

Configuration

+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+

devon.properties

+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+ +
+
+

Variables

+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+ +
+
+

Devon CLI

+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+

Devon

+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+

Commandlets

+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+

Command-wrapper

+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+

Commandlet overview

+
+

The following commandlets are currently available:

+
+
+ +
+ +
+
build
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+ +
+
+
Docker
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
requirements
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
Windows and macOS
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
== Mac A1
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
Linux
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
usage
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+ +
+
+
eclipse
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
legacy plugin config
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
dictionary
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
non-english dictionary
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+ +
+
+
gradle
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
help
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+ +
+
+
ide
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
update
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
uninstall
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+ +
+
+
intellij
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+ +
+
+
ionic
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
jasypt
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
example
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+ +
+
+
java
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
create
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
migrate
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+ +
+
+
jenkins
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+ +
+
+
Kubernetes
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
usage
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+ +
+
+
mvn
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
ng
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
node
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+ +
+
+
npm
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
release
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
Build-Tools
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+ +
+
+
sonar
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+ +
+
+
vscode
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
cleaning plugins on update
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+ +
+
+
yarn
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+ +
+
+
+

Structure

+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+ +
+
conf
+
+

This folder contains configurations for your IDE:

+
+
+
File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide [settings]. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+ +
+
+
log
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+ +
+
+
scripts
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+ +
+
+
settings
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
Structure
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
Configuration Philosophy
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
Customize Settings
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+ +
+
+
software
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
Repository
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
Shared
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
Custom
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+ +
+
+
system
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+ +
+
+
updates
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+ +
+
+
workspaces
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+ +
+
+
Project import
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+

Advanced Features

+ +
+

Cross-Platform Tooling

+ +
+
+

Git Client

+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+

Draw Diagrams

+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+

Browser Plugins

+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+ +
+
+

Windows Tooling

+ +
+
+

Installing software

+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+

Chocolatey

+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+

Winget

+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+

Integration into Windows-Explorer

+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+

Tabs everywhere

+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+

Tabs for Windows Explorer

+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+

Tabs for SSH

+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+

Tabs for CMD

+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+

Windows Helpers

+ +
+
+

Handle passwords

+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+

Real text editor

+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+

Real compression tool

+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+

Smarter clipboard

+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+

PowerToys

+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+

Sysinternals Tools

+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+

Cope with file locks

+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+ +
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+

Linux

+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+

X11

+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+

Keyboard Freak

+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+

Paint anywhere on your desktop

+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+

Analyze graphs

+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+

Up your screen capture game

+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+

Fast Search in Windows

+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+ +
+
+

MacOS Tooling

+ +
+
+

Finder

+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+

Keyboard

+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+

Keyboard Layouts

+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+

Key Bindings

+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+

Switch Control and Command

+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+

== Eclipse

+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+

Karabiner

+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+ +
+
+

Linux Tooling

+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+ +
+
+

Lombok

+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+

Lombok in Eclipse

+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+

Lombok for VS-Code

+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+

Lombok for IntelliJ

+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+

Support

+ +
+

Migration from oasp4j-ide

+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+

Get familiar with devonfw-ide

+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+

Migration of existing oasp4j-ide installation

+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+

Hints for users after migration

+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+ +
+
+

License

+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+

Apache Software License - Version 2.0

+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+

MIT License

+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+

License of Node.js

+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/docker.html b/docs/devonfw.github.io/1.0/ide.wiki/docker.html new file mode 100644 index 00000000..372d84da --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/docker.html @@ -0,0 +1,426 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Docker

+
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
+

setup

+
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
+

requirements

+
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
+

Windows and macOS

+
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
+

== Mac A1

+
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
+

Linux

+
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
+

usage

+
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/eclipse.html b/docs/devonfw.github.io/1.0/ide.wiki/eclipse.html new file mode 100644 index 00000000..d8dd6c95 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/eclipse.html @@ -0,0 +1,420 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

eclipse

+
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
+

plugins

+
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
+

legacy plugin config

+
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
+

dictionary

+
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
+

non-english dictionary

+
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/features.html b/docs/devonfw.github.io/1.0/ide.wiki/features.html new file mode 100644 index 00000000..cb23a800 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/features.html @@ -0,0 +1,472 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Features

+
+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+
+

IDEs

+
+
+

We support the following IDEs:

+
+
+ +
+
+
+
+

Platforms

+
+
+

We support the following platforms:

+
+
+ +
+
+
+
+

Build-Systems

+
+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+
+

Motivation

+
+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/gradle.html b/docs/devonfw.github.io/1.0/ide.wiki/gradle.html new file mode 100644 index 00000000..ed9416a6 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/gradle.html @@ -0,0 +1,291 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

gradle

+
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/help.html b/docs/devonfw.github.io/1.0/ide.wiki/help.html new file mode 100644 index 00000000..4be7bb04 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/help.html @@ -0,0 +1,291 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

help

+
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/ide.html b/docs/devonfw.github.io/1.0/ide.wiki/ide.html new file mode 100644 index 00000000..377c8abc --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/ide.html @@ -0,0 +1,384 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

ide

+
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
+

setup

+
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
+

update

+
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
+

uninstall

+
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/integration.html b/docs/devonfw.github.io/1.0/ide.wiki/integration.html new file mode 100644 index 00000000..edc39f49 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/integration.html @@ -0,0 +1,424 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Integration

+
+
+

The devonfw-ide already brings a lot of integration out of the box. This page is for users that want to get even more out of it. For instance this IDE ships with a console script to open a shell with the environment variables properly set for your devonfw-ide installation, so you get the correct version of your tools (Java, Maven, Yarn, etc.). However, you might want to open a shell from your IDE or your file manager. For some of these use-cases you need additional tweaks that are described on this page.

+
+
+
+
+

Windows

+
+
+

devonfw-ide automatically integrates with Windows-Explorer during setup.

+
+
+
+
+

CMD

+
+
+

If you want to open a CMD (MS Dos Shell) directly from Windows-Explorer simply right-click on the folder in your devonfw-ide you want to open. From the context menu click on Open Devon CMD shell here. This will open CMD and automatically initialize your environment according to the devonfw-ide project containing the folder (if any, see above).

+
+
+
+
+

Git-Bash

+
+
+

Just like for CMD you can also click Git Bash Here from Windows-Explorer context-menu to open a git bash. If you have selected a folder in your devonfw-ide installation, it will automatically initialize your environment.

+
+
+
+
+

Cygwin

+
+
+

In case you have cygwin installed on your machine, the devonfw-ide will autodetect this during setup and also install a Windows-Explorer integration. Just choose Open Devon Cygwin Bash Here to open cygwin bash and initialize your devonfw-ide environment.

+
+
+
+
+

ConEMU

+
+
+

ConEmu is a great extension that brings additional features such as tabs to your windows shells. If you like it, you will also want to have it integrated with devonfw-ide. All you need to do is follow these simple steps:

+
+
+
    +
  • +

    Copy the file CmdInit.cmd from your ConEmu installation (C:\Program Files\ConEmu\ConEmu\CmdInit.cmd) to a personal folder (e.g. C:\Users\MYLOGIN\scripts).

    +
  • +
  • +

    Modify this copy of CmdInit.cmd by adding the line devon (e.g. at line 6) and saving.

    +
  • +
  • +

    Go to ConEmu and open the settings (via context menu or [Windows][Alt][p]).

    +
  • +
  • +

    Select Startup > Tasks from the left tree.

    +
  • +
  • +

    Select the first option form Predefined tasks (command groups) ({Shells::cmd})

    +
  • +
  • +

    In the text area at the right bottom modify the location of CmdInit.cmd to your customized copy (%HOME%\scripts\CmdInit.cmd).

    +
  • +
  • +

    Select Integration from the left tree.

    +
  • +
  • +

    Click on the upper Register button (for ConEmu Here).

    +
  • +
  • +

    Click on Save settings

    +
  • +
+
+
+

Now you have the option ConEmu here if you right click on a folder in Windows Explorer that will open a new tab in ConEmu and automatically setup your environment according to the devonfw-ide project containing the folder (if any, see above).

+
+
+
+
+

ConEMU and StartExplorer

+
+
+

You can even integrate the Eclipse StartExplorer plug-in and ConEMU to open up console right from the file tree of eclipse into ConEMU. You can do this by adding a custom command to StartExplorer:

+
+
+
    +
  1. +

    Open up eclipse

    +
  2. +
  3. +

    Open Window > Preferences

    +
  4. +
  5. +

    Select StartExplorer > Custom Commands on the left

    +
  6. +
  7. +

    Add on the right and setup the following command: "C:\Program Files\ConEmu\ConEmu64.exe" -Dir ${resource_path} -runlist cmd.exe /k ""%ConEmuBaseDir%\CmdInit.cmd" & "IDEenv"" -cur_console:n +Be aware that you potentially have to adapt the 'ConEmu*.exe' path to match your installation.

    +
    +

    You can even add a shell login if you installed git bash on your machine. Please be aware to potentially adapt the sh.exe url to match your installation: "C:\Program Files\ConEmu\ConEmu64.exe" -Dir ${resource_path} -runlist cmd.exe /k ""%ConEmuBaseDir%\CmdInit.cmd" & "IDEenv" & "%SYSTEMDRIVE%\Program Files\Git\bin\sh.exe" --login" -cur_console:n

    +
    +
  8. +
  9. +

    State two times the "Name for *" to your choice like "ConEMU"

    +
  10. +
  11. +

    OK → OK

    +
  12. +
  13. +

    Right click on any folder/file in your eclipse file explorer and select StartExplorer > Custom Commands > ConEMU.

    +
  14. +
  15. +

    You will get a initialized console at the file/folder location! Have fun!

    +
  16. +
+
+
+
+
+

Eclipse

+
+
+

You might want to open a terminal directly as view inside your Eclipse IDE. Therefore we provide eclipse with the TM terminal plugin. +Further the settings already configure that plugin so it automatically sets the environment properties correctly. In other words the integration comes out of the box.

+
+
+

To use it all you need to do is to follow these steps:

+
+
+
    +
  • +

    Open the Terminal view (Window > Show View > Other > Terminal > Terminal > OK).

    +
  • +
  • +

    Click on the monitor icon from the left of the icon bar of the Terminal view.

    +
  • +
  • +

    Choose terminal (e.g. Local Terminal) and confirm with OK

    +
  • +
  • +

    Execute mvn -v to verify your environment.

    +
  • +
+
+
+
+
+

IntelliJ or WebStorm

+
+
+

You might want to open a terminal directly as view inside your IDEA IDE, that already ships with a feature for this out of the box. +If you start your IDE via the intellij-main script generated by devonfw-ide or via devon intellij start from a shell then everything is configured and your environment is set automatically.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/intellij.html b/docs/devonfw.github.io/1.0/ide.wiki/intellij.html new file mode 100644 index 00000000..f1591902 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/intellij.html @@ -0,0 +1,348 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

intellij

+
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
+

plugins

+
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/ionic.html b/docs/devonfw.github.io/1.0/ide.wiki/ionic.html new file mode 100644 index 00000000..cad3eb22 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/ionic.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

ionic

+
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/jasypt.html b/docs/devonfw.github.io/1.0/ide.wiki/jasypt.html new file mode 100644 index 00000000..f60fb52d --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/jasypt.html @@ -0,0 +1,347 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

jasypt

+
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
+

example

+
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/java.html b/docs/devonfw.github.io/1.0/ide.wiki/java.html new file mode 100644 index 00000000..cc56bc6d --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/java.html @@ -0,0 +1,368 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

java

+
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
+

create

+
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
+

migrate

+
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/jenkins.html b/docs/devonfw.github.io/1.0/ide.wiki/jenkins.html new file mode 100644 index 00000000..a1a8911c --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/jenkins.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

jenkins

+
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/kubectl.html b/docs/devonfw.github.io/1.0/ide.wiki/kubectl.html new file mode 100644 index 00000000..10ccfd20 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/kubectl.html @@ -0,0 +1,328 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Kubernetes

+
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
+

setup

+
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
+

usage

+
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/kubernetes.html b/docs/devonfw.github.io/1.0/ide.wiki/kubernetes.html new file mode 100644 index 00000000..acff9bbe --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/kubernetes.html @@ -0,0 +1,314 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

kubernetes

+
+
+

The kubernetes commandlet allows to install kubernetes. This command is implemented to currently work on Windows. Other OS are not supported yet.

+
+
+
Usage of devon kubernetes
+

On Windows

+
+
+
    +
  • +

    Checks whether Windows Subsystem for Linux(WSL) has been enabled and any linux distribution has been installed.

    +
  • +
  • +

    If yes, checks whether Kubernetes has already been installed either on Windows or on WSL.

    +
  • +
  • +

    If yes, program quits since Kubernetes is already available.

    +
  • +
  • +

    If not, this will install Kubernetes on WSL along with K3D

    +
  • +
  • +

    As part of the setup, K3D will create a cluster with a single node with a default name as "devonfw-cluster"

    +
  • +
+
+
+

The arguments (devon kubernetes «args») are explained by the following table:

+
+
+
Usage of devon kubernetes
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubernetes(kubectl) with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/log.html b/docs/devonfw.github.io/1.0/ide.wiki/log.html new file mode 100644 index 00000000..d0ac9d50 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/log.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

log

+
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/lombok.html b/docs/devonfw.github.io/1.0/ide.wiki/lombok.html new file mode 100644 index 00000000..2d428cfe --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/lombok.html @@ -0,0 +1,328 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Lombok

+
+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+
+

Lombok in Eclipse

+
+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+
+

Lombok for VS-Code

+
+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+
+

Lombok for IntelliJ

+
+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/master-ide.html b/docs/devonfw.github.io/1.0/ide.wiki/master-ide.html new file mode 100644 index 00000000..6fca6fb9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/master-ide.html @@ -0,0 +1,5727 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw-ide

+
+
+

Introduction

+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries +as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps deliver better results.

+
+
+

This document contains the instructions for the tool devonfw-ide to set up and maintain your development tools including your favorite IDE (integrated development environment).

+
+ +
+

Features

+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+

IDEs

+
+

We support the following IDEs:

+
+
+ +
+
+
+

Platforms

+
+

We support the following platforms:

+
+
+ +
+
+
+

Build-Systems

+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+

Motivation

+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+ +
+
+

Setup

+ +
+
+

Prerequisites

+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+

Download

+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+

Install

+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+

Uninstall

+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+

Testing SNAPSHOT releases

+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+

Usage

+ +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+

Update

+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+

Working with multiple workspaces

+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+

Admin

+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+

Announce changes to your team

+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+ +
+
+

Configuration

+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+

devon.properties

+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+ +
+
+

Variables

+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+ +
+
+

Devon CLI

+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+

Devon

+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+

Commandlets

+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+

Command-wrapper

+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+

Commandlet overview

+
+

The following commandlets are currently available:

+
+
+ +
+ +
+
build
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+ +
+
+
Docker
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
requirements
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
Windows and macOS
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
== Mac A1
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
Linux
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
usage
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+ +
+
+
eclipse
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
legacy plugin config
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
dictionary
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
non-english dictionary
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+ +
+
+
gradle
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
help
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+ +
+
+
ide
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
update
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
uninstall
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+ +
+
+
intellij
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+ +
+
+
ionic
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
jasypt
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
example
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+ +
+
+
java
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
create
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
migrate
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+ +
+
+
jenkins
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+ +
+
+
Kubernetes
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
usage
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+ +
+
+
mvn
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
ng
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
node
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+ +
+
+
npm
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
release
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
Build-Tools
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+ +
+
+
sonar
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+ +
+
+
vscode
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
cleaning plugins on update
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+ +
+
+
yarn
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+ +
+
+
+

Structure

+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+ +
+
conf
+
+

This folder contains configurations for your IDE:

+
+
+
File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide [settings]. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+ +
+
+
log
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+ +
+
+
scripts
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+ +
+
+
settings
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
Structure
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
Configuration Philosophy
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
Customize Settings
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+ +
+
+
software
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
Repository
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
Shared
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
Custom
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+ +
+
+
system
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+ +
+
+
updates
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+ +
+
+
workspaces
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+ +
+
+
Project import
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+

Advanced Features

+ +
+

Cross-Platform Tooling

+ +
+
+

Git Client

+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+

Draw Diagrams

+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+

Browser Plugins

+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+ +
+
+

Windows Tooling

+ +
+
+

Installing software

+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+

Chocolatey

+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+

Winget

+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+

Integration into Windows-Explorer

+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+

Tabs everywhere

+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+

Tabs for Windows Explorer

+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+

Tabs for SSH

+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+

Tabs for CMD

+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+

Windows Helpers

+ +
+
+

Handle passwords

+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+

Real text editor

+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+

Real compression tool

+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+

Smarter clipboard

+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+

PowerToys

+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+

Sysinternals Tools

+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+

Cope with file locks

+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+ +
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+

Linux

+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+

X11

+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+

Keyboard Freak

+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+

Paint anywhere on your desktop

+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+

Analyze graphs

+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+

Up your screen capture game

+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+

Fast Search in Windows

+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+ +
+
+

MacOS Tooling

+ +
+
+

Finder

+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+

Keyboard

+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+

Keyboard Layouts

+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+

Key Bindings

+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+

Switch Control and Command

+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+

== Eclipse

+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+

Karabiner

+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+ +
+
+

Linux Tooling

+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+ +
+
+

Lombok

+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+

Lombok in Eclipse

+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+

Lombok for VS-Code

+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+

Lombok for IntelliJ

+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+

Support

+ +
+

Migration from oasp4j-ide

+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+

Get familiar with devonfw-ide

+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+

Migration of existing oasp4j-ide installation

+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+

Hints for users after migration

+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+ +
+
+

License

+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+

Apache Software License - Version 2.0

+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+

MIT License

+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+

License of Node.js

+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/migration-from-devonfw-3.0.0-or-lower.html b/docs/devonfw.github.io/1.0/ide.wiki/migration-from-devonfw-3.0.0-or-lower.html new file mode 100644 index 00000000..c9a62995 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/migration-from-devonfw-3.0.0-or-lower.html @@ -0,0 +1,405 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Migration from oasp4j-ide

+
+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+
+

Get familiar with devonfw-ide

+
+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+
+

Migration of existing oasp4j-ide installation

+
+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+
+

Hints for users after migration

+
+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/mvn.html b/docs/devonfw.github.io/1.0/ide.wiki/mvn.html new file mode 100644 index 00000000..534cb9ae --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/mvn.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

mvn

+
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/ng.html b/docs/devonfw.github.io/1.0/ide.wiki/ng.html new file mode 100644 index 00000000..67a11af7 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/ng.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

ng

+
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/node.html b/docs/devonfw.github.io/1.0/ide.wiki/node.html new file mode 100644 index 00000000..a4b21553 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/node.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

node

+
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/npm.html b/docs/devonfw.github.io/1.0/ide.wiki/npm.html new file mode 100644 index 00000000..0737fbc0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/npm.html @@ -0,0 +1,296 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

npm

+
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/project.html b/docs/devonfw.github.io/1.0/ide.wiki/project.html new file mode 100644 index 00000000..e2a3f5dd --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/project.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

project

+
+
+

The project commandlet manages projects of your devonfw-ide. +You need to supply additional arguments as devon project «args». These are explained by the following table:

+
+
+
Usage of devon project
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«project»] |setup or update all or the specified project(s) +|== == == == == == == == == == == =

+
+
+
+
+

setup

+
+
+

Run devon project setup «project» to setup the pre-configured project «project». +During the initial setup this will happen for all active projects. +Call this command explicitly to setup a project that is not active by default. +Further, if the project has already been setup it will be updated (git pull). +In case you omit the project name all (active) projects will be setup/updated. +Use force option (-f) to setup all projects even if not active.

+
+
+

The setup of a project will include:

+
+
+
    +
  • +

    clone or pull the project from git into the configured workspace and location

    +
  • +
  • +

    trigger a build on the project (optional as configured)

    +
  • +
  • +

    import the project into eclipse (optional as configured)

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/projects.html b/docs/devonfw.github.io/1.0/ide.wiki/projects.html new file mode 100644 index 00000000..599ee3d4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/projects.html @@ -0,0 +1,317 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Project import

+
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/release.html b/docs/devonfw.github.io/1.0/ide.wiki/release.html new file mode 100644 index 00000000..e5dfc849 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/release.html @@ -0,0 +1,349 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

release

+
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
+

Build-Tools

+
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/scripts.html b/docs/devonfw.github.io/1.0/ide.wiki/scripts.html new file mode 100644 index 00000000..a6b6bc14 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/scripts.html @@ -0,0 +1,324 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

scripts

+
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/settings.html b/docs/devonfw.github.io/1.0/ide.wiki/settings.html new file mode 100644 index 00000000..7f071038 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/settings.html @@ -0,0 +1,370 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

settings

+
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
+

Structure

+
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
+

Configuration Philosophy

+
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
+

Customize Settings

+
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/setup.html b/docs/devonfw.github.io/1.0/ide.wiki/setup.html new file mode 100644 index 00000000..3a4be00f --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/setup.html @@ -0,0 +1,385 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Setup

+
+ +
+
+
+

Prerequisites

+
+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+
+

Download

+
+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+
+

Install

+
+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+
+

Uninstall

+
+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+
+

Testing SNAPSHOT releases

+
+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/software-package.html b/docs/devonfw.github.io/1.0/ide.wiki/software-package.html new file mode 100644 index 00000000..bc072778 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/software-package.html @@ -0,0 +1,475 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Software Package

+
+
+

The devon-ide requires a software package. This section explains how to build one yourself.

+
+
+
+
+

Layout

+
+
+

The software package is a simple ZIP file with the following structure:

+
+
+
    +
  • +

    software

    +
    +
      +
    • +

      eclipse

      +
    • +
    • +

      java

      +
    • +
    • +

      maven

      +
    • +
    • +

      tomcat

      +
    • +
    • +

      …​

      +
    • +
    +
    +
  • +
+
+
+

So for each tool you have a simple folder inside the software folder that is named after the tool by convention in lowercase. If a tool folder contains a bin folder the devon-ide will automatically add it to the beginning of your PATH variable (in the shell and not in your operating system). The entire concept implies that the tools are portable and do not require a real installation into the operating system (e.g. via Windows Registry settings). This way you can have multiple instances of the devon-ide "installed" on the same machine for different projects with different versions of the same tools that do not interfere with each other.

+
+
+

By intention the tool folders do not contain the version of the tool as configurations may refer to tools via their path and the idea is that the software package can be updated easy and smooth.

+
+
+
+
+

Customization of environment

+
+
+

If a tool needs extra initialization, you can create a batch file named ide-config.bat inside the tool directory. This file will be called during environment initialization (for example when a user opens console.bat), and allows you to do additional init work, without changing the main devon-ide script files.

+
+
+
+
+

Tools

+
+
+

For most of the tools you just download the official release and add the content to the folder in the layout above (resulting in e.g. software/maven/bin/mvn and NOT software/maven/apache-maven-3.2.0/bin/mvn). +However, there are some specials to care about.

+
+
+
+
+

Eclipse

+
+
+

Eclipse is typically the most complicated animal out of the tools. Here is a suggestion how to build your eclipse distribution:

+
+
+
    +
  1. +

    Download the latest stable version of Eclipse IDE for JEE developers from https://www.eclipse.org/downloads/

    +
  2. +
  3. +

    Unzip it to your software folder and launch it

    +
  4. +
  5. +

    Install CobiGen (incremental code-generator)

    +
  6. +
  7. +

    Install any edit tools (for easy compare with clipboard, etc.)

    +
  8. +
  9. +

    Install eclipse-cs (for checkstyle support)

    +
  10. +
  11. +

    Install spotbugs (successor of findbugs)

    +
  12. +
  13. +

    Install EclEmma (for JUnit code coverage)

    +
  14. +
  15. +

    Install subclipse (in case SVN support is needed. Exclude Subclipse Integration for Mylyn, Subversion Revision Graph and JNA Library)

    +
  16. +
  17. +

    Install STS (in case Spring Tools are needed. Install Spring IDE [AOP | Batch | Security] Extension, Spring IDE Maven Support, Eclipse Quicksearch and Spring IDE Core)

    +
  18. +
  19. +

    Install StartExplorer (for support to open current item in file manager of your OS)

    +
  20. +
  21. +

    Install TM Terminal (open terminal/shell inside Eclipse as view)

    +
  22. +
  23. +

    Install Enhanced Class Decompiler (to debug in classes where no sources are available)

    +
  24. +
  25. +

    Install github mylyn integration (for devonfw projects that want to access github issues in Eclipse)

    +
  26. +
  27. +

    Install Data Tools Platform (for DB viewer/access inside Eclipse)

    +
  28. +
  29. +

    Install SoapUI (for service testing)

    +
  30. +
  31. +

    Install regex util (to test regular expressions)

    +
  32. +
  33. +

    Install TemplateVariables (for advanced JDT templates)

    +
  34. +
  35. +

    If not already available install m2e (for maven support)

    +
  36. +
  37. +

    Install m2e-wtp (for maven integration into WTP, only required for non-spring-boot legacy JEE projects)

    +
  38. +
  39. +

    Ensure you have eclipse-m2e checked out and import this into your eclipse as existing Maven project. This will trigger the download and installation of some m2e extensions.

    +
  40. +
  41. +

    If not already available install egit (for git support)

    +
  42. +
  43. +

    In mylyn (Task List view) click Add Repository and then Install More Connectors…​

    +
    +
      +
    1. +

      Install hudson/jenkins connector

      +
    2. +
    +
    +
  44. +
  45. +

    Download lombok.jar into the eclipse folder (for implicit get/setter, equals and hashCode support)

    +
    +
      +
    1. +

      Start the lombok.jar to launch the installer UI.

      +
    2. +
    3. +

      Click Specify location…​ and choose your eclipse folder.

      +
    4. +
    5. +

      Click Install/Update.

      +
    6. +
    7. +

      Quit the installer

      +
    8. +
    +
    +
  46. +
  47. +

    Install sonar-ide (for SonarQube support - temporary omitted due to issue #13)

    +
  48. +
  49. +

    Optionally install DevStyle for a eye-friendly dark mode that really works (Eclipse default dark theme is completely broken and unusable)

    +
  50. +
  51. +

    Install additional plugins as needed

    +
  52. +
  53. +

    Remove any internal (non-public) update sites after installation

    +
  54. +
  55. +

    Test your Eclipse distribution.

    +
  56. +
+
+
+

Please read the license agreements of each plugin and only install what is suitable for you.

+
+
+
+
+

Creating the Software Package

+
+
+

Finally you create a ZIP from your software folder. Open it and ensure the it has the specified layout (it contains the software folder as root with the tool folders inside).

+
+
+

You may want to use our ant script for building the ZIP.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/software.html b/docs/devonfw.github.io/1.0/ide.wiki/software.html new file mode 100644 index 00000000..59d26b71 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/software.html @@ -0,0 +1,408 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

software

+
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
+

Repository

+
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
+

Shared

+
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
+

Custom

+
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/sonar.html b/docs/devonfw.github.io/1.0/ide.wiki/sonar.html new file mode 100644 index 00000000..02ce4936 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/sonar.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

sonar

+
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/structure.html b/docs/devonfw.github.io/1.0/ide.wiki/structure.html new file mode 100644 index 00000000..72a8fe36 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/structure.html @@ -0,0 +1,301 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Structure

+
+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/system.html b/docs/devonfw.github.io/1.0/ide.wiki/system.html new file mode 100644 index 00000000..95d4befe --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/system.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

system

+
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/updates.html b/docs/devonfw.github.io/1.0/ide.wiki/updates.html new file mode 100644 index 00000000..741598d1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/updates.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

updates

+
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/usage.html b/docs/devonfw.github.io/1.0/ide.wiki/usage.html new file mode 100644 index 00000000..a0a7b5b4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/usage.html @@ -0,0 +1,459 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+
+

Update

+
+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+
+

Working with multiple workspaces

+
+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+
+

Admin

+
+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+
+

Announce changes to your team

+
+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/variables.html b/docs/devonfw.github.io/1.0/ide.wiki/variables.html new file mode 100644 index 00000000..c121f511 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/variables.html @@ -0,0 +1,312 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Variables

+
+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/vscode.html b/docs/devonfw.github.io/1.0/ide.wiki/vscode.html new file mode 100644 index 00000000..3c940c19 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/vscode.html @@ -0,0 +1,353 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

vscode

+
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
+

plugins

+
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
+

cleaning plugins on update

+
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/workspaces.html b/docs/devonfw.github.io/1.0/ide.wiki/workspaces.html new file mode 100644 index 00000000..61138258 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/workspaces.html @@ -0,0 +1,302 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

workspaces

+
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/ide.wiki/yarn.html b/docs/devonfw.github.io/1.0/ide.wiki/yarn.html new file mode 100644 index 00000000..316ca857 --- /dev/null +++ b/docs/devonfw.github.io/1.0/ide.wiki/yarn.html @@ -0,0 +1,296 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

yarn

+
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/index.html b/docs/devonfw.github.io/1.0/index.html new file mode 100644 index 00000000..36cf0fb9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/index.html @@ -0,0 +1,73670 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw guide

+
+
+

Updated at 2022-03-16 13:27:49 UTC +:toc: +:idprefix: +:idseparator: - +:reproducible: +:source-highlighter: rouge +:listing-caption: Listing +:sectnums: +:chapter-label: +:partnums:

+
+
+
+
+

Getting Started

+
+ +
+
+
+

Introduction

+
+ +
+

What is devonfw?

+
+
+devonfw small +
+
+
+

Welcome to the devonfw platform. This is a product of the CSD (Custom Solution Development) industrialization effort to establish a standardized platform for custom software development within Capgemini APPS2. This platform is aimed at engagements, in which clients don’t specify the use of a predefined technology stack. In these cases we can offer a proven alternative as a result of our experience as a group.

+
+
+

devonfw is a development platform aiming for the standardization of processes and the boosting of productivity. It provides an architecture blueprint for server and client applications, alongside a set of tools to deliver a fully functional, out-of-the-box development environment.

+
+
+ + + + + +
+ + +The devonfw name is a registered trademark of Capgemini Logo Small, but the software and documentation included in devonfw are fully open source. Please refer to our OSS Compliance section for more information. +
+
+
+
+

Building Blocks of the Platform

+
+
+devonfwcatalog +
+
+
+

devonfw uses a state-of-the-art, open source, core reference architecture for the server (these days considered a commodity in the IT-industry) and on top of that an ever increasing number of high-value assets, which are developed by Capgemini.

+
+
+
+

The devonfw Technology Stack

+
+

devonfw is fully open source and consists of the following technology stacks:

+
+
+
+

Back-End Solutions

+
+

For server applications, devonfw includes the following solutions:

+
+
+ +
+
+
+

Front-End solutions

+
+

For client applications, devonfw includes two solutions based on TypeScript, JavaScript, C# and .NET:

+
+
+ +
+
+
+

Custom Tools

+ +
+
+

devonfw-ide

+
+

The devonfw-ide is not one monolithic program that is installed with a traditional executable; rather it’s a collection of scripts which are invoked via command line to automate several, repetetive development tasks. These scripts then interact with other tools, frameworks, and third-party IDEs to streamline the development workflow.

+
+
+
+devonfw ide +
+
+
+

The advantage of this approach is, that you can have as many instances of the devonfw-ide on your machine as you need — for different projects with different tools, tool versions and configurations. No need for a physical installation and no tweaking of your operating system required!

+
+
+

Instances of the devonfw-ide do not interfere with each other, nor with other installed software. The package size of the devonfw-ide is initally very small, the setup is simple, and the included software is portable.

+
+
+
+

== IDEs

+
+

It supports the following IDEs:

+
+ +
+
+

== Platforms

+
+

It supports the following platforms:

+
+
+ +
+
+
+

== Build-Systems

+
+

It supports the following build-systems:

+
+
+ +
+
+ + + + + +
+ + +Other IDEs, platforms, or tools can easily be integrated as commandlets. +
+
+
+
+

CobiGen

+
+

CobiGen is a code generator included in the devonfw-ide, that allows users to generate the project structure and large parts of the application component code. This saves a lot of time, which is usually wasted on repetitive engineering tasks and/or writing boilerplate code.

+
+
+
+cobigen +
+
+
+

Following the same philosophy as the devonfw-ide, CobiGen bundles a new command line interface (CLI), that enables the generation of code using only a few commands. This approach also allows us to decouple CobiGen from Eclipse and use it alongside VS Code or IntelliJ IDEA.

+
+ +
+
+

Why should I use devonfw?

+
+

devonfw aims to provide a framework for the development of web applications based on the Java EE programming model. It uses the Spring framework as its Java EE default implementation.

+
+
+
+

Objectives

+ +
+
+

Standardization

+
+

We don’t want to keep reinventing the wheel for thousands of projects, for hundreds of customers, across dozens of countries. For this reason, we aim to rationalize, harmonize and standardize the development assets for software projects and industrialize the software development process.

+
+
+
+

Industrialization of Innovative Technologies & “Digital”

+
+

devonfw’s goal is to standardize & industrialize. But this applies not only to large volume, “traditional” custom software development projects. devonfw also aims to offer a standardized platform which contains a range of state-of-the-art methodologies and technology stacks. devonfw supports agile development by small teams utilizing the latest technologies for projects related to Mobile, IoT and the Cloud.

+
+
+
+

Deliver & Improve Business Value

+
+
+devon quality agility +
+
+
+
+

Efficiency

+
+
    +
  • +

    Up to 20% reduction in time to market, with faster delivery due to automation and reuse.

    +
  • +
  • +

    Up to 25% less implementation efforts due to code generation and reuse.

    +
  • +
  • +

    Flat pyramid and rightshore, ready for junior developers.

    +
  • +
+
+
+
+

Quality

+
+
    +
  • +

    State-of-the-art architecture and design.

    +
  • +
  • +

    Lower cost on maintenance and warranty.

    +
  • +
  • +

    Technical debt reduction by reuse.

    +
  • +
  • +

    Risk reduction due to continuous improvement of individual assets.

    +
  • +
  • +

    Standardized, automated quality checks.

    +
  • +
+
+
+
+

Agility

+
+
    +
  • +

    Focus on business functionality, not on technicalities.

    +
  • +
  • +

    Shorter release cycles.

    +
  • +
  • +

    DevOps by design — Infrastructure as Code.

    +
  • +
  • +

    Continuous Delivery pipeline.

    +
  • +
  • +

    On- and off-premise flexibility.

    +
  • +
  • +

    PoCs and prototypes in days not months.

    +
  • +
+
+
+
+

Features

+ +
+
+

Everything in a Single ZIP

+
+

The devonfw distributions is packaged in a ZIP file that includes all the custom tools, software and configurations.

+
+
+

Having all the dependencies self-contained in the distribution’s ZIP file, users don’t need to install or configure anything. Just extracting the ZIP content is enough to have a fully functional devonfw.

+
+
+
+

devonfw — The Package

+
+

The devonfw platform provides:

+
+
+
    +
  • +

    Implementation blueprints for a modern cloud-ready server and a choice on JS-Client technologies (either open source Angular or a very rich and impressive solution based on commercial Sencha UI).

    +
  • +
  • +

    Quality documentation and step-by-step quick start guides.

    +
  • +
  • +

    Highly integrated and packaged development environment based around Eclipse and Jenkins. You will be ready to start implementing your first customer-specific use case in 2h time.

    +
  • +
  • +

    Iterative eclipse-based code-generator that understands "Java" and works on higher architectural concepts than Java-classes.

    +
  • +
  • +

    An example application as a reference implementation.

    +
  • +
  • +

    Support through a large community + industrialization services (Standard Platform as a Service) available in the iProd service catalog.

    +
  • +
+
+
+
+
+
+

devonfw-ide Download and Setup

+
+
+

Please refer to our devonfw-ide Setup section.

+
+
+
+
+

Guides

+
+
+

Our goal is to provide a smooth starting experience to all users of devonfw, no matter how experienced they are or what their stakeholder role is. To achieve this, we provide a list of recommended guides here:

+
+
+

For Students and Junior Engineers:

+
+ +
+

For Senior Engineers and Architects:

+
+ +
+

For Team Leaders and Product Ambassadors:

+
+ + +
+

Build Your First devonfw Application

+
+

JumpTheQueue is a small application based on the devonfw framework, which you can create yourself by following our simple step-by-step tutorial. By doing so, you will learn about the app development workflow and gain insight into the design of a professional business information system. Please visit the JumpTheQueue wiki and start working trough the tutorial HERE.

+
+
+ + + + + +
+ + +The tutorial assumes you have successfully set up the devonfw-ide previously. +
+
+
+

You can also clone the project and explore the finished source code via:

+
+
+
+
git clone https://github.com/devonfw/jump-the-queue.git
+
+
+
+
+JumpTheQueue Screenshots +
+
+
+

Another way to check out the JumpTheQueue-Application is to try our interactive katacoda scenario where you set up the application step by step.

+
+ + +
+
+

Explore Our devonfw Sample Application

+
+

MyThaiStar is a complex sample app, that demonstrates the full capabilities of our framework. On this page we will describe how to download and launch the app on your system, so you can test the various functionalities it offers and explore its code.

+
+
+

You can also check out the interactive katacoda scenario for setting up and trying out the MyThaiStar-Application.

+
+ +
+ + + + + +
+ + +We assume you have successfully set up the devonfw-ide previously. +
+
+
+
    +
  1. +

    In the root directory of a devonfw-ide directory, right click and select "Open Devon CMD shell here" from the Windows Explorer context menu. Then navigate to the main workspace and checkout the MyThaiStar Git repository like this:

    +
    +
    +
    cd workspaces/main
    +git clone https://github.com/devonfw/my-thai-star.git
    +
    +
    +
  2. +
  3. +

    Perform: cd my-thai-star

    +
  4. +
  5. +

    Execute: devon eclipse ws-up

    +
  6. +
  7. +

    Execute: devon eclipse create-script

    +
  8. +
  9. +

    Go to the root folder of the distribution and run eclipse-main.bat

    +
  10. +
  11. +

    In Eclipse navigate to File > Import > Maven > Existing Maven Projects, then import the cloned project from your workspace by clicking the "Browse" button and selecting /workspaces/my-thai-star/java/mtsj/.

    +
  12. +
  13. +

    Run the backend by right-clicking SpringBootApp.java and selecting Run as > Java Application in the context menu. The backend will start up and create log entries in the Eclipse Console tab.

    +
    +

    Running the MyThaiStar Backend

    +
    +
  14. +
  15. +

    Return to your command shell and perform: cd angular

    +
  16. +
  17. +

    Execute: npm install

    +
  18. +
  19. +

    Execute: ng serve

    +
  20. +
  21. +

    Once started, the frontend will be available at localhost:4200/restaurant. Login with the username and password waiter and take a look at the various functionalities provided by MyThaiStar.

    +
  22. +
+
+
+

You should now take a look at both the front- and backend code and familiarize yourself with its structure and concepts, since most devonfw projects follow this exemplary implementation. Please visit the architecture overview pages of devon4ng and devon4j to learn more about the internal workings of front- and backend.

+
+
+
+
+
+

Further Information

+
+ +
+

Repository Overview

+
+

The GitHub repositories within the devonfw organization contain the source code and documentation for official devonfw projects.

+
+
+
+devonfw Repository Overview +
+
An overview of the devonfw organization repositories.
+
+
+

The most relevant repositories here are the individual devonfw technology stacks:

+
+
+ +
+
+

Our framework also delivers a number of tools and plug-ins that aim to accelerate and streamline the development process, for example:

+
+
+ +
+
+

We also provide educational material and reference implementations to aid new users and drive the adoption of our framework, for example:

+
+
+ +
+
+

Projects in early development and prototypes are located in the devonfw forge repository. They usually remain there until they are ready for broader release or use in production.

+
+ +
+
+ +
+

We strive to foster an active, diverse and dynamic community around devonfw and are relying on modern collaboration tools to do so. Please note that some resources listed here might only be accessible to members or partners of Capgemini.

+
+
+
+

Microsoft Teams

+
+

The devonfw public channel is accessible to everyone who has a Microsoft Teams account. You can find the latest discussions on ongoing development topics here, as well as new commits and pull requests to our repos.

+
+
+

Join us to stay in the loop, and feel free to post your questions regarding devonfw here.

+
+ +
+
+

Yammer

+
+

Our corporate Yammer channel is accessible to Capgemini employees and members. If you are looking for information or feedback on current and planned projects regarding devonfw, we reccomend you ask around here first.

+
+ +
+
+

E-Mail

+
+

You can reach our dedicated iCSD Support Team via e-mail at:

+
+ +
+
+
+
+

Contributing

+
+
+

Please refer to our Contributing section.

+
+
+
+
+

Code of Conduct

+
+
+

Please refer to our Code of Conduct section.

+
+
+
+
+

devonfw-ide

+
+
+

Introduction

+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries +as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps deliver better results.

+
+
+

This document contains the instructions for the tool devonfw-ide to set up and maintain your development tools including your favorite IDE (integrated development environment).

+
+ +
+

Features

+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+

IDEs

+
+

We support the following IDEs:

+
+
+ +
+
+
+

Platforms

+
+

We support the following platforms:

+
+
+ +
+
+
+

Build-Systems

+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+

Motivation

+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+ +
+
+

Setup

+ +
+
+

Prerequisites

+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+

Download

+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+

Install

+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+

Uninstall

+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+

Testing SNAPSHOT releases

+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+

Usage

+ +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+

Update

+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+

Working with multiple workspaces

+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+

Admin

+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+

Announce changes to your team

+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+ +
+
+

Configuration

+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+

devon.properties

+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+ +
+
+

Variables

+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+ +
+
+

Devon CLI

+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+

Devon

+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+

Commandlets

+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+

Command-wrapper

+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+

Commandlet overview

+
+

The following commandlets are currently available:

+
+
+ +
+ +
+
build
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+ +
+
+
Docker
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
requirements
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
Windows and macOS
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
== Mac A1
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
Linux
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
usage
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+ +
+
+
eclipse
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
legacy plugin config
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
dictionary
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
non-english dictionary
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+ +
+
+
gradle
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
help
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+ +
+
+
ide
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
update
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
uninstall
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+ +
+
+
intellij
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+ +
+
+
ionic
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
jasypt
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
example
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+ +
+
+
java
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
create
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
migrate
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+ +
+
+
jenkins
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+ +
+
+
Kubernetes
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
usage
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+ +
+
+
mvn
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
ng
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
node
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+ +
+
+
npm
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
release
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
Build-Tools
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+ +
+
+
sonar
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+ +
+
+
vscode
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
cleaning plugins on update
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+ +
+
+
yarn
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+ +
+
+
+

Structure

+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
Listing 1. File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+ +
+
conf
+
+

This folder contains configurations for your IDE:

+
+
+
Listing 2. File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide settings. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+ +
+
+
log
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+ +
+
+
scripts
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
Listing 3. File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+ +
+
+
settings
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
Structure
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
Listing 4. File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
Configuration Philosophy
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
Customize Settings
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+ +
+
+
software
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
Repository
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
Shared
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
Custom
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+ +
+
+
system
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+ +
+
+
updates
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+ +
+
+
workspaces
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
Listing 5. File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+ +
+
+
Project import
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+

Advanced Features

+ +
+

Cross-Platform Tooling

+ +
+
+

Git Client

+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+

Draw Diagrams

+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+

Browser Plugins

+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+ +
+
+

Windows Tooling

+ +
+
+

Installing software

+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+

Chocolatey

+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+

Winget

+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+

Integration into Windows-Explorer

+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+

Tabs everywhere

+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+

Tabs for Windows Explorer

+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+

Tabs for SSH

+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+

Tabs for CMD

+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+

Windows Helpers

+ +
+
+

Handle passwords

+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+

Real text editor

+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+

Real compression tool

+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+

Smarter clipboard

+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+

PowerToys

+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+

Sysinternals Tools

+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+

Cope with file locks

+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+ +
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+

Linux

+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+

X11

+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+

Keyboard Freak

+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+

Paint anywhere on your desktop

+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+

Analyze graphs

+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+

Up your screen capture game

+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+

Fast Search in Windows

+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+ +
+
+

MacOS Tooling

+ +
+
+

Finder

+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+

Keyboard

+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+

Keyboard Layouts

+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+

Key Bindings

+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+

Switch Control and Command

+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+

== Eclipse

+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+

Karabiner

+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+ +
+
+

Linux Tooling

+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+ +
+
+

Lombok

+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+

Lombok in Eclipse

+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+

Lombok for VS-Code

+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+

Lombok for IntelliJ

+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+

Support

+ +
+

Migration from oasp4j-ide

+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+

Get familiar with devonfw-ide

+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+

Migration of existing oasp4j-ide installation

+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+

Hints for users after migration

+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+ +
+
+

License

+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+

Apache Software License - Version 2.0

+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+

MIT License

+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+

License of Node.js

+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+

==Java

+
+
+

The devonfw community +${project.version}, ${buildtime}

+
+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+

The following sections contain the complete compendium of devon4j, the Java stack of devonfw. +With devon4j we support both spring and quarkus as major frameworks. +However, the general coding patterns are based on common Java standards mainly from Jakarta EE and therefore do not differ between those frameworks. +Therefore, the general section contains all the documentation that is universal to Java and does not differ between the two frameworks. +Only the sections spring and quarkus contain documentation that is specific to the respective approach.

+
+
+

If you’re trying to decide which of the two frameworks to use, have a look at this guide.

+
+
+

You can also read the latest version of this documentation online at the following sources:

+
+ +
+
+
+
+
+

1. General

+
+
+

Here you will find documentation and code-patterns for developing with Java in general, independent of the framework you choose.

+
+ +
+

==Architecture

+
+
+

There are many different views that are summarized by the term architecture. First, we will introduce the key principles and architecture principles of devonfw. Then, we will go into details of the the architecture of an application.

+
+
+

1.1. Key Principles

+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
+
+
+
+

1.2. Architecture Principles

+
+

Additionally we define the following principles that our architecture is based on:

+
+
+
    +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of Concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information Hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise, maintenance problems will arise to ensure that data remains consistent. Therefore, interfaces of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style.

    +
  • +
+
+
+

As an architect you should be prepared for the future by reading the TechnoVision.

+
+
+
+

1.3. Application Architecture

+
+

For the architecture of an application we distinguish the following views:

+
+
+
    +
  • +

    The Business Architecture describes an application from the business perspective. It divides the application into business components and with full abstraction of technical aspects.

    +
  • +
  • +

    The Technical Architecture describes an application from the technical implementation perspective. It divides the application into technical layers and defines which technical products and frameworks are used to support these layers.

    +
  • +
  • +

    The Infrastructure Architecture describes an application from the operational infrastructure perspective. It defines the nodes used to run the application including clustering, load-balancing and networking. This view is not explored further in this guide.

    +
  • +
+
+
+
Business Architecture
+
+

The business architecture divides the application into business components. A business component has a well-defined responsibility that it encapsulates. All aspects related to that responsibility have to be implemented within that business component. Further, the business architecture defines the dependencies between the business components. These dependencies need to be free of cycles. A business component exports its functionality via well-defined interfaces as a self-contained API. A business component may use another business component via its API and compliant with the dependencies defined by the business architecture.

+
+
+

As the business domain and logic of an application can be totally different, the devonfw can not define a standardized business architecture. Depending on the business domain it has to be defined from scratch or from a domain reference architecture template. For very small systems it may be suitable to define just a single business component containing all the code.

+
+
+
+
Technical Architecture
+
+

The technical architecture divides the application into technical layers based on the multilayered architecture. A layer is a unit of code with the same category such as a service or presentation logic. So, a layer is often supported by a technical framework. Each business component can therefore be split into component parts for each layer. However, a business component may not have component parts for every layer (e.g. only a presentation part that utilized logic from other components).

+
+
+

An overview of the technical reference architecture of the devonfw is given by figure "Technical Reference Architecture". +It defines the following layers visualized as horizontal boxes:

+
+
+ +
+
+

Also, you can see the (business) components as vertical boxes (e.g. A and X) and how they are composed out of component parts each one assigned to one of the technical layers.

+
+
+

Further, there are technical components for cross-cutting aspects grouped by the gray box on the left. Here is a complete list:

+
+ +
+
+devonfw architecture blueprint +
+
Figure 5. Technical Reference Architecture
+
+
+

Please click on the architecture image to open it as SVG and click on the layers and cross-cutting topics to open the according documentation guide.

+
+
+

We reflect this architecture in our code as described in our coding conventions allowing a traceability of business components, use-cases, layers, etc. into the code and giving +developers a sound orientation within the project.

+
+
+

Further, the architecture diagram shows the allowed dependencies illustrated by the dark green connectors. +Within a business component a component part can call the next component part on the layer directly below via a dependency on its API (vertical connectors). +While this is natural and obvious, it is generally forbidden to have dependencies upwards the layers +or to skip a layer by a direct dependency on a component part two or more layers below. +The general dependencies allowed between business components are defined by the business architecture. +In our reference architecture diagram we assume that the business component A1 is allowed to depend +on component A2. Therefore, a use-case within the logic component part of A1 is allowed to call a +use-case from A2 via a dependency on the component API. The same applies for dialogs on the client layer. +This is illustrated by the horizontal connectors. Please note that persistence entities are part of the API of the data-access component part so only the logic component part of the same +business component may depend on them.

+
+
+

The technical architecture has to address non-functional requirements:

+
+
+
    +
  • +

    scalability
    +is established by keeping state in the client and making the server state-less (except for login session). Via load-balancers new server nodes can be added to improve performance (horizontal scaling).

    +
  • +
  • +

    availability and reliability
    +are addressed by clustering with redundant nodes avoiding any single-point-of failure. If one node fails the system is still available. Further, the software has to be robust so there are no dead-locks or other bad effects that can make the system unavailable or not reliable.

    +
  • +
  • +

    security
    +is archived in the devonfw by the right templates and best-practices that avoid vulnerabilities. See security guidelines for further details.

    +
  • +
  • +

    performance
    +is obtained by choosing the right products and proper configurations. While the actual implementation of the application matters for performance a proper design is important as it is the key to allow performance-optimizations (see e.g. caching).

    +
  • +
+
+
+
Technology Stack
+
+

The technology stack of the devonfw is illustrated by the following table.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. Technology Stack of devonfw
TopicDetailStandardSuggested implementation

runtime

language & VM

Java

Oracle JDK

runtime

servlet-container

JEE

tomcat

component management

dependency injection

JSR330 & JSR250

spring

configuration

framework

-

spring-boot

persistence

OR-mapper

JPA

hibernate

batch

framework

JSR352

spring-batch

service

SOAP services

JAX-WS

CXF

service

REST services

JAX-RS

CXF

logging

framework

slf4j

logback

validation

framework

beanvalidation/JSR303

hibernate-validator

security

Authentication & Authorization

JAAS

spring-security

monitoring

framework

JMX

spring

monitoring

HTTP Bridge

HTTP & JSON

jolokia

AOP

framework

dynamic proxies

spring AOP

+
+ +
+

==Configuration

+
+
+

An application needs to be configurable in order to allow internal setup (like CDI) but also to allow externalized configuration of a deployed package (e.g. integration into runtime environment). We rely on a comprehensive configuration approach following a "convention over configuration" pattern. This guide adds on to this by detailed instructions and best-practices how to deal with configurations.

+
+
+

In general we distinguish the following kinds of configuration that are explained in the following sections:

+
+
+ +
+
+
+
+
+

1.4. Internal Application Configuration

+
+

The application configuration contains all internal settings and wirings of the application (bean wiring, database mappings, etc.) and is maintained by the application developers at development time.

+
+
+

For more detail of Spring stack, see here

+
+
+
+

1.5. Externalized Configuration

+
+

Externalized configuration is a configuration that is provided separately to a deployment package and can be maintained undisturbed by re-deployments.

+
+
+
Environment Configuration
+
+

The environment configuration contains configuration parameters (typically port numbers, host names, passwords, logins, timeouts, certificates, etc.) specific for the different environments. These are under the control of the operators responsible for the application.

+
+
+

The environment configuration is maintained in application.properties files, defining various properties. +These properties are explained in the corresponding configuration sections of the guides for each topic:

+
+
+ +
+
+

Make sure your properties are thoroughly documented by providing a comment to each property. This inline documentation is most valuable for your operating department.

+
+
+

More about structuring your application.properties files can be read here for Spring.

+
+
+

For Quarkus, please refer to Quarkus Config Reference for more details.

+
+
+
+
Business Configuration
+
+

Often applications do not need business configuration. In case they do it should typically be editable by administrators via the GUI. The business configuration values should therefore be stored in the database in key/value pairs.

+
+
+

Therefore we suggest to create a dedicated table with (at least) the following columns:

+
+
+
    +
  • +

    ID

    +
  • +
  • +

    Property name

    +
  • +
  • +

    Property type (Boolean, Integer, String)

    +
  • +
  • +

    Property value

    +
  • +
  • +

    Description

    +
  • +
+
+
+

According to the entries in this table, an administrative GUI may show a generic form to modify business configuration. Boolean values should be shown as checkboxes, integer and string values as text fields. The values should be validated according to their type so an error is raised if you try to save a string in an integer property for example.

+
+
+

We recommend the following base layout for the hierarchical business configuration:

+
+
+

component.[subcomponent].[subcomponent].propertyname

+
+
+
+
+

1.6. Security

+
+

Often you need to have passwords (for databases, third-party services, etc.) as part of your configuration. These are typically environment specific (see above). However, with DevOps and continuous-deployment you might be tempted to commit such configurations into your version-control (e.g. git). Doing that with plain text passwords is a severe problem especially for production systems. Never do that! Instead we offer some suggestions how to deal with sensible configurations:

+
+
+
Password Encryption
+
+

A simple but reasonable approach is to configure the passwords encrypted with a master-password. The master-password should be a strong secret that is specific for each environment. It must never be committed to version-control.

+
+
+

For Spring, we use jasypt-spring-boot. For more details, see here

+
+
+

For Quarkus, see here

+
+
+
Is this Security by Obscurity?
+
+
    +
  • +

    Yes, from the point of view to protect the passwords on the target environment this is nothing but security by obscurity. If an attacker somehow got full access to the machine this will only cause him to spend some more time.

    +
  • +
  • +

    No, if someone only gets the configuration file. So all your developers might have access to the version-control where the config is stored. Others might have access to the software releases that include this configs. But without the master-password that should only be known to specific operators none else can decrypt the password (except with brute-force what will take a very long time, see jasypt for details).

    +
  • +
+
+
+ +
+

==Coding Conventions

+
+
+

The code should follow general conventions for Java (see Oracle Naming Conventions, Google Java Style, etc.).We consider this as common sense and provide configurations for SonarQube and related tools such as Checkstyle instead of repeating this here.

+
+
+
+
+
+

1.7. Naming

+
+

Besides general Java naming conventions, we follow the additional rules listed here explicitly:

+
+
+
    +
  • +

    Always use short but speaking names (for types, methods, fields, parameters, variables, constants, etc.).

    +
  • +
  • +

    Strictly avoid special characters in technical names (for files, types, fields, methods, properties, variables, database tables, columns, constraints, etc.). In other words only use Latin alpahnumeric ASCII characters with the common allowed technical separators for the accordign context (e.g. underscore) for technical names (even excluding whitespaces).

    +
  • +
  • +

    For package segments and type names prefer singular forms (CustomerEntity instead of CustomersEntity). Only use plural forms when there is no singular or it is really semantically required (e.g. for a container that contains multiple of such objects).

    +
  • +
  • +

    Avoid having duplicate type names. The name of a class, interface, enum or annotation should be unique within your project unless this is intentionally desired in a special and reasonable situation.

    +
  • +
  • +

    Avoid artificial naming constructs such as prefixes (I*) or suffixes (*IF) for interfaces.

    +
  • +
  • +

    Use CamelCase even for abbreviations (XmlUtil instead of XMLUtil)

    +
  • +
  • +

    Avoid property/field names where the second character is upper-case at all (e.g. 'aBc'). See #1095 for details.

    +
  • +
  • +

    Names of Generics should be easy to understand. Where suitable follow the common rule E=Element, T=Type, K=Key, V=Value but feel free to use longer names for more specific cases such as ID, DTO or ENTITY. The capitalized naming helps to distinguish a generic type from a regular class.

    +
  • +
+
+
+
+

1.8. Packages

+
+

Java Packages are the most important element to structure your code. We use a strict packaging convention to map technical layers and business components (slices) to the code (See technical architecture for further details). By using the same names in documentation and code we create a strong link that gives orientation and makes it easy to find from business requirements, specifications or story tickets into the code and back.

+
+
+

For an devon4j based application we use the following Java-Package schema:

+
+
+
+
«root».«component».«layer»[.«detail»]
+
+
+
+

E.g. in our example application we find the Spring Data repositories for the ordermanagement component in the package com.devonfw.application.mtsj.ordermanagement.dataaccess.api.repo

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2. Segments of package schema
SegmentDescriptionExample

«root»

Is the basic Java Package name-space of your app. Typically we suggest to use «group».«artifact» where «group» is your maven/gradle groupId corresponding to your organization or IT project owning the code following common Java Package conventions. The segment «artifact» is your maven/gradle artifactId and is typically the technical name of your app.

com.devonfw.application.mtsj

«component»

The (business) component the code belongs to. It is defined by the business architecture and uses terms from the business domain. Use the implicit component general for code not belonging to a specific component (foundation code).

salesmanagement

«layer»

The name of the technical layer (See technical architecture). Details are described for the modern project structure and for the classic project structure.

logic

«detail»

Here you are free to further divide your code into sub-components and other concerns according to the size of your component part. If you want to strictly separate API from implementation you should start «detail» with «scope» that is explained below.

dao

«scope»

The scope which is one of api (official API to be used by other layers or components), base (basic code to be reused by other implementations) and impl (implementation that should never be imported from outside). This segment was initially mandatory but due to trends such as microservices, lean, and agile we decided to make it optional and do not force anybody to use it.

api

+
+

Please note that devon4j library modules for spring use com.devonfw.module as «root» and the name of the module as «component». E.g. the API of our beanmapping module can be found in the package com.devonfw.module.beanmapping.common.api.

+
+
+
+

1.9. Code Tasks

+
+

Code spots that need some rework can be marked with the following tasks tags. These are already properly pre-configured in your development environment for auto completion and to view tasks you are responsible for. It is important to keep the number of code tasks low. Therefore, every member of the team should be responsible for the overall code quality. So if you change a piece of code and hit a code task that you can resolve in a reliable way, please do this as part of your change and remove the according tag.

+
+
+
TODO
+
+

Used to mark a piece of code that is not yet complete (typically because it can not be completed due to a dependency on something that is not ready).

+
+
+
+
 // TODO «author» «description»
+
+
+
+

A TODO tag is added by the author of the code who is also responsible for completing this task.

+
+
+
+
FIXME
+
+
+
 // FIXME «author» «description»
+
+
+
+

A FIXME tag is added by the author of the code or someone who found a bug he can not fix right now. The «author» who added the FIXME is also responsible for completing this task. This is very similar to a TODO but with a higher priority. FIXME tags indicate problems that should be resolved before a release is completed while TODO tags might have to stay for a longer time.

+
+
+
+
REVIEW
+
+
+
 // REVIEW «responsible» («reviewer») «description»
+
+
+
+

A REVIEW tag is added by a reviewer during a code review. Here the original author of the code is responsible to resolve the REVIEW tag and the reviewer is assigning this task to him. This is important for feedback and learning and has to be aligned with a review "process" where people talk to each other and get into discussion. In smaller or local teams a peer-review is preferable but this does not scale for large or even distributed teams.

+
+
+
+
+

1.10. Code-Documentation

+
+

As a general goal, the code should be easy to read and understand. Besides, clear naming the documentation is important. We follow these rules:

+
+
+
    +
  • +

    APIs (especially component interfaces) are properly documented with JavaDoc.

    +
  • +
  • +

    JavaDoc shall provide actual value - we do not write JavaDoc to satisfy tools such as checkstyle but to express information not already available in the signature.

    +
  • +
  • +

    We make use of {@link} tags in JavaDoc to make it more expressive.

    +
  • +
  • +

    JavaDoc of APIs describes how to use the type or method and not how the implementation internally works.

    +
  • +
  • +

    To document implementation details, we use code comments (e.g. // we have to flush explicitly to ensure version is up-to-date). This is only needed for complex logic.

    +
  • +
  • +

    Avoid the pointless {@inheritDoc} as since Java 1.5 there is the @Override annotation for overridden methods and your JavaDoc is inherited automatically even without any JavaDoc comment at all.

    +
  • +
+
+
+
+

1.11. Code-Style

+
+

This section gives you best practices to write better code and avoid pitfalls and mistakes.

+
+
+
BLOBs
+
+

Avoid using byte[] for BLOBs as this will load them entirely into your memory. This will cause performance issues or out of memory errors. Instead, use streams when dealing with BLOBs. For further details see BLOB support.

+
+
+
+
Stateless Programming
+
+

When implementing logic as components or beans of your container using dependency injection, we strongly encourage stateless programming. +This is not about data objects like an entity or transfer-object that are stateful by design. +Instead this applies to all classes annotated with @Named, @ApplicationScoped, @Stateless, etc. and all their super-classes. +These classes especially include your repositories, use-cases, and REST services. +Such classes shall never be modified after initialization. +Methods called at runtime (after initialization via the container) do not assign fields (member variables of your class) or mutate the object stored in a field. +This allows your component or bean to be stateless and thread-safe. +Therefore it can be initialized as a singleton so only one instance is created and shared accross all threads of the application. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // bad
+  private String contractOwner;
+
+  private MyState state;
+
+  @Overide
+  public void approve(Contract contract) {
+    this.contractOwner = contract.getOwner();
+    this.contractOwner = this.contractOwner.toLowerCase(Locale.US);
+    this.state.setAdmin(this.contractOwner.endsWith("admin"));
+    if (this.state.isAdmin()) {
+      ...
+    } else {
+      ...
+    }
+  }
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    if (contractOwner.endsWith("admin")) {
+      ...
+    } else {
+      ...
+    }
+  }
+}
+
+
+
+

As you can see in the bad code fields of the class are assigned when the method approve is called. +So mutliple users and therefore threads calling this method concurrently can interfere and override this state causing side-effects on parallel threads. +This will lead to nasty bugs and errors that are hard to trace down. +They will not occur in simple tests but for sure in production with real users. +Therefore never do this and implement your functionality stateless. +That is keeping all state in local variables and strictly avoid modifying fields or their value as illustrated in the fine code. +If you find yourself passing many parameters between methods that all represent state, you can easily create a separate class that encapsulates this state. +However, then you need to create this state object in your method as local variable and pass it between methods as parameter:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcApproveContractImpl implements UcApproveContract {
+
+  // fine
+  @Overide
+  public void approveContract(Contract contract) {
+    String contractOwner = contract.getOwner().toLowerCase(Locale.US);
+    MyState state = new MyState();
+    state.setAdmin(this.contractOwner.endsWith("admin"));
+    doApproveContract(contract, state);
+  }
+}
+
+
+
+
+
Closing Resources
+
+

Resources such as streams (InputStream, OutputStream, Reader, Writer) or transactions need to be handled properly. Therefore, it is important to follow these rules:

+
+
+
    +
  • +

    Each resource has to be closed properly, otherwise you will get out of file handles, TX sessions, memory leaks or the like

    +
  • +
  • +

    Where possible avoid to deal with such resources manually. That is why we are recommending @Transactional for transactions in devonfw (see Transaction Handling).

    +
  • +
  • +

    In case you have to deal with resources manually (e.g. binary streams) ensure to close them properly. See the example below for details.

    +
  • +
+
+
+

Closing streams and other such resources is error prone. Have a look at the following example:

+
+
+
+
// bad
+try {
+  InputStream in = new FileInputStream(file);
+  readData(in);
+  in.close();
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+

The code above is wrong as in case of an IOException the InputStream is not properly closed. In a server application such mistakes can cause severe errors that typically will only occur in production. As such resources implement the AutoCloseable interface you can use the try-with-resource syntax to write correct code. The following code shows a correct version of the example:

+
+
+
+
// fine
+try (InputStream in = new FileInputStream(file)) {
+  readData(in);
+} catch (IOException e) {
+  throw new IllegalStateException("Failed to read data.", e);
+}
+
+
+
+
+
Catching and handling Exceptions
+
+

When catching exceptions always ensure the following:

+
+
+
    +
  • +

    Never call printStackTrace() method on an exception

    +
  • +
  • +

    Either log or wrap and re-throw the entire catched exception. Be aware that the cause(s) of an exception is very valuable information. If you loose such information by improper exception-handling you may be unable to properly analyse production problems what can cause severe issues.

    +
    +
      +
    • +

      If you wrap and re-throw an exception ensure that the catched exception is passed as cause to the newly created and thrown exception.

      +
    • +
    • +

      If you log an exception ensure that the entire exception is passed as argument to the logger (and not only the result of getMessage() or toString() on the exception).

      +
    • +
    +
    +
  • +
  • +

    See exception handling

    +
  • +
+
+
+
+
Lambdas and Streams
+
+

With Java8 you have cool new features like lambdas and monads like (Stream, CompletableFuture, Optional, etc.). +However, these new features can also be misused or led to code that is hard to read or debug. To avoid pain, we give you the following best practices:

+
+
+
    +
  1. +

    Learn how to use the new features properly before using. Developers are often keen on using cool new features. When you do your first experiments in your project code you will cause deep pain and might be ashamed afterwards. Please study the features properly. Even Java8 experts still write for loops to iterate over collections, so only use these features where it really makes sense.

    +
  2. +
  3. +

    Streams shall only be used in fluent API calls as a Stream can not be forked or reused.

    +
  4. +
  5. +

    Each stream has to have exactly one terminal operation.

    +
  6. +
  7. +

    Do not write multiple statements into lambda code:

    +
    +
    +
    // bad
    +collection.stream().map(x -> {
    +Foo foo = doSomething(x);
    +...
    +return foo;
    +}).collect(Collectors.toList());
    +
    +
    +
    +

    This style makes the code hard to read and debug. Never do that! Instead, extract the lambda body to a private method with a meaningful name:

    +
    +
    +
    +
    // fine
    +collection.stream().map(this::convertToFoo).collect(Collectors.toList());
    +
    +
    +
  8. +
  9. +

    Do not use parallelStream() in general code (that will run on server side) unless you know exactly what you are doing and what is going on under the hood. Some developers might think that using parallel streams is a good idea as it will make the code faster. However, if you want to do performance optimizations talk to your technical lead (architect). Many features such as security and transactions will rely on contextual information that is associated with the current thread. Hence, using parallel streams will most probably cause serious bugs. Only use them for standalone (CLI) applications or for code that is just processing large amounts of data.

    +
  10. +
  11. +

    Do not perform operations on a sub-stream inside a lambda:

    +
    +
    +
    set.stream().flatMap(x -> x.getChildren().stream().filter(this::isSpecial)).collect(Collectors.toList()); // bad
    +set.stream().flatMap(x -> x.getChildren().stream()).filter(this::isSpecial).collect(Collectors.toList()); // fine
    +
    +
    +
  12. +
  13. +

    Only use collect at the end of the stream:

    +
    +
    +
    set.stream().collect(Collectors.toList()).forEach(...) // bad
    +set.stream().peek(...).collect(Collectors.toList()) // fine
    +
    +
    +
  14. +
  15. +

    Lambda parameters with Types inference

    +
    +
    +
    (String a, Float b, Byte[] c) -> a.toString() + Float.toString(b) + Arrays.toString(c)  // bad
    +(a,b,c)  -> a.toString() + Float.toString(b) + Arrays.toString(c)  // fine
    +
    +Collections.sort(personList, (Person p1, Person p2) -> p1.getSurName().compareTo(p2.getSurName()));  // bad
    +Collections.sort(personList, (p1, p2) -> p1.getSurName().compareTo(p2.getSurName()));  // fine
    +
    +
    +
  16. +
  17. +

    Avoid Return Braces and Statement

    +
    +
    +
     a ->  { return a.toString(); } // bad
    + a ->  a.toString();   // fine
    +
    +
    +
  18. +
  19. +

    Avoid Parentheses with Single Parameter

    +
    +
    +
    (a) -> a.toString(); // bad
    + a -> a.toString();  // fine
    +
    +
    +
  20. +
  21. +

    Avoid if/else inside foreach method. Use Filter method & comprehension

    +
    +
    +
    // bad
    +static public Iterator<String> TwitterHandles(Iterator<Author> authors, string company) {
    +    final List result = new ArrayList<String> ();
    +    foreach (Author a : authors) {
    +      if (a.Company.equals(company)) {
    +        String handle = a.TwitterHandle;
    +        if (handle != null)
    +          result.Add(handle);
    +      }
    +    }
    +    return result;
    +  }
    +
    +
    +
    +
    +
    // fine
    +public List<String> twitterHandles(List<Author> authors, String company) {
    +    return authors.stream()
    +            .filter(a -> null != a && a.getCompany().equals(company))
    +            .map(a -> a.getTwitterHandle())
    +            .collect(toList());
    +  }
    +
    +
    +
  22. +
+
+
+
+
Optionals
+
+

With Optional you can wrap values to avoid a NullPointerException (NPE). However, it is not a good code-style to use Optional for every parameter or result to express that it may be null. For such case use @Nullable or even better instead annotate @NotNull where null is not acceptable.

+
+
+

However, Optional can be used to prevent NPEs in fluent calls (due to the lack of the elvis operator):

+
+
+
+
Long id;
+id = fooCto.getBar().getBar().getId(); // may cause NPE
+id = Optional.ofNullable(fooCto).map(FooCto::getBar).map(BarCto::getBar).map(BarEto::getId).orElse(null); // null-safe
+
+
+
+
+
Encoding
+
+

Encoding (esp. Unicode with combining characters and surrogates) is a complex topic. Please study this topic if you have to deal with encodings and processing of special characters. For the basics follow these recommendations:

+
+
+
    +
  • +

    Whenever possible prefer unicode (UTF-8 or better) as encoding. This especially impacts your databases and has to be defined upfront as it typically can not be changed (easily) afterwards.

    +
  • +
  • +

    Do not cast from byte to char (unicode characters can be composed of multiple bytes, such cast may only work for ASCII characters)

    +
  • +
  • +

    Never convert the case of a String using the default locale (esp. when writing generic code like in devonfw). E.g. if you do "HI".toLowerCase() and your system locale is Turkish, then the output will be "hı" instead of "hi", which can lead to wrong assumptions and serious problems. If you want to do a "universal" case conversion always explicitly use an according western locale (e.g. toLowerCase(Locale.US)). Consider using a helper class (see e.g. CaseHelper) or create your own little static utility for that in your project.

    +
  • +
  • +

    Write your code independent from the default encoding (system property file.encoding) - this will most likely differ in JUnit from production environment

    +
    +
      +
    • +

      Always provide an encoding when you create a String from byte[]: new String(bytes, encoding)

      +
    • +
    • +

      Always provide an encoding when you create a Reader or Writer : new InputStreamReader(inStream, encoding)

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer general API
+
+

Avoid unnecessary strong bindings:

+
+
+
    +
  • +

    Do not bind your code to implementations such as Vector or ArrayList instead of List

    +
  • +
  • +

    In APIs for input (=parameters) always consider to make little assumptions:

    +
    +
      +
    • +

      prefer Collection over List or Set where the difference does not matter (e.g. only use Set when you require uniqueness or highly efficient contains)

      +
    • +
    • +

      consider preferring Collection<? extends Foo> over Collection<Foo> when Foo is an interface or super-class

      +
    • +
    +
    +
  • +
+
+
+
+
Prefer primitive boolean
+
+

Unless in rare cases where you need to allow a flag being null avoid using the object type Boolean.

+
+
+
+
// bad
+public Boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

Instead always use the primitive boolean type:

+
+
+
+
// fine
+public boolean isEmpty {
+  return size() == 0;
+}
+
+
+
+

The only known excuse is for flags in embeddable types due to limitations of hibernate.

+
+ +
+

==Project structure

+
+
+

In devonfw we want to give clear structure and guidance for building applications. +This also allows tools such as CobiGen or sonar-devon4j-plugin to "understand" the code. +Also this helps developers going from one devonfw project to the next one to quickly understand the code-base. +If every developer knows where to find what, the project gets more efficient. +A long time ago maven standardized the project structure with src/main/java, etc. and turned chaos into structure. +With devonfw we experienced the same for the codebase (what is inside src/main/java).

+
+
+

We initially started devon4j based on spring and spring-boot and proposed a classic project structure. +With modern cloud-native trends we added a modern project structure, that is more lean and up-to-date with the latest market trends.

+
+ +
+

==Dependency Injection +Dependency injection is one of the most important design patterns and is a key principle to a modular and component based architecture. +The Java Standard for dependency injection is javax.inject (JSR330) that we use in combination with JSR250. +Additionally, for scoping you can use CDI (Context and Dependency Injection) from JSR365.

+
+
+

There are many frameworks which support this standard including all recent Java EE application servers. +Therefore in devonfw we rely on these open standards and can propagate patterns and code examples that work independent from the underlying frameworks.

+
+
+
+
+

1.12. Key Principles

+
+

Within dependency injection a bean is typically a reusable unit of your application providing an encapsulated functionality. +This bean can be injected into other beans and it should in general be replaceable. +As an example we can think of a use-case, a repository, etc. +As best practice we use the following principles:

+
+
+
    +
  • +

    Stateless implementation
    +By default such beans shall be implemented stateless. If you store state information in member variables you can easily run into concurrency problems and nasty bugs. This is easy to avoid by using local variables and separate state classes for complex state-information. Try to avoid stateful beans wherever possible. Only add state if you are fully aware of what you are doing and properly document this as a warning in your JavaDoc.

    +
  • +
  • +

    Usage of Java standards
    +We use common standards (see above) that makes our code portable. Therefore we use standardized annotations like @Inject (javax.inject.Inject) instead of proprietary annotations such as @Autowired. Generally we avoid proprietary annotations in business code (logic layer).

    +
  • +
  • +

    Simple injection-style
    +In general you can choose between constructor, setter or field injection. For simplicity we recommend to do private field injection as it is very compact and easy to maintain. We believe that constructor injection is bad for maintenance especially in case of inheritance (if you change the dependencies you need to refactor all sub-classes). Private field injection and public setter injection are very similar but setter injection is much more verbose (often you are even forced to have javadoc for all public methods). If you are writing re-usable library code setter injection will make sense as it is more flexible. In a business application you typically do not need that and can save a lot of boiler-plate code if you use private field injection instead. Nowadays you are using container infrastructure also for your tests (see testing) so there is no need to inject manually (what would require a public setter).

    +
  • +
  • +

    KISS
    +To follow the KISS (keep it small and simple) principle we avoid advanced features (e.g. custom AOP, non-singleton beans) and only use them where necessary.

    +
  • +
  • +

    Separation of API and implementation
    +For important components we should separate a self-contained API documented with JavaDoc from its implementation. Code from other components that wants to use the implementation shall only rely on the API. However, for things that will never be exchanged no API as interface is required you can skip such separation.

    +
  • +
+
+
+
+

1.13. Example Bean

+
+

Here you can see the implementation of an example bean using dependency injection:

+
+
+
+
@ApplicationScoped
+@Named("MyComponent")
+public class MyComponentImpl implements MyComponent {
+  @Inject
+  private MyOtherComponent myOtherComponent;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+
+  ...
+}
+
+
+
+

Here MyComponentImpl depends on MyOtherComponent that is injected into the field myOtherComponent because of the @Inject annotation. +To make this work there must be exactly one bean in the container (e.g. spring or quarkus) that is an instance of MyOtherComponent. +In order to put a bean into the container, we can use @ApplicationScoped in case of CDI (required for quarkus) for a stateless bean. +In spring we can ommit a CDI annotation and the @Named annotation is already sufficient as a bean is stateless by default in spring. +If we always use @ApplicationScoped we can make this more explicit and more portable accross different frameworks. +So in our example we put MyComponentImpl into the container. +That bean will be called MyComponent as we specified in the @Named annotation but we can also omit the name to use the classname as fallback. +Now our bean can be injected into other beans using @Inject annotation either via MyComponent interface (recommended when interface is present) or even directly via MyComponentImpl. +In case you omit the interface, you should also omit the Impl suffix or instead use Bean as suffix.

+
+
+
+

1.14. Multiple bean implementations

+
+

In some cases you might have multiple implementations as beans for the same interface. +The following sub-sections handle the different scenarios to give you guidance.

+
+
+
Only one implementation in container
+
+

In some cases you still have only one implementation active as bean in the container at runtime. +A typical example is that you have different implemenations for test and main usage. +This case is easy, as @Inject will always be unique. +The only thing you need to care about is how to configure your framework (spring, quarkus, etc.) to know which implementation to put in the container depending on specific configuration. +In spring this can be archived via the proprietary @Profile annotaiton.

+
+
+
+
Injecting all of multiple implementations
+
+

In some situations you may have an interface that defines a kind of "plugin". +You can have multiple implementations in your container and want to have all of them injected. +Then you can request a list with all the bean implementations via the interface as in the following example:

+
+
+
+
  @Inject
+  private List<MyConverter> converters;
+
+
+
+

Your code may iterate over all plugins (converters) and apply them sequentially. +Please note that the injection will fail (at least in spring), when there is no bean available to inject. +So you do not get an empty list injected but will get an exception on startup.

+
+
+
+
Injecting one of multiple implementations
+
+

Another scenario is that you have multiple implementations in your container coexisting, but for injection you may want to choose a specific implementation. +Here you could use the @Named annotation to specify a unique identifier for each implementation what is called qualified injection:

+
+
+
+
@ApplicationScoped
+@Named("UserAuthenticator")
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+@Named("ServiceAuthenticator")
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  @Named("UserAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  @Named("ServiceAuthenticator")
+  private Authenticator authenticator;
+  ...
+}
+
+
+
+

However, we discovered that this pattern is not so great: +The identifiers in the @Named annotation are just strings that could easily break. +You could use constants instead but still this is not the best solution.

+
+
+

In the end you can very much simplify this by just directly injecting the implementation instead:

+
+
+
+
@ApplicationScoped
+public class UserAuthenticator implements Authenticator {
+  ...
+}
+@ApplicationScoped
+public class ServiceAuthenticator implements Authenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

In case you want to strictly decouple from implementations, you can still create dedicated interfaces:

+
+
+
+
public interface UserAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class UserAuthenticatorImpl implements UserAuthenticator {
+  ...
+}
+public interface ServiceAuthenticator extends Authenticator {}
+@ApplicationScoped
+public class ServiceAuthenticatorImpl implements ServiceAuthenticator {
+  ...
+}
+public class MyUserComponent {
+  @Inject
+  private UserAuthenticator authenticator;
+  ...
+}
+public class MyServiceComponent {
+  @Inject
+  private ServiceAuthenticator authenticator;
+  ...
+}
+
+
+
+

However, as you can see this is again introducing additional boiler-plate code. +While the principle to separate API and implementation and strictly decouple from implementation is valuable in general, +you should always consider KISS, lean, and agile in contrast and balance pros and cons instead of blindly following dogmas.

+
+
+
+
+

1.15. Imports

+
+

Here are the import statements for the most important annotations for dependency injection

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.enterprise.context.ApplicationScoped;
+// import javax.enterprise.context.RequestScoped;
+// import javax.enterprise.context.SessionScoped;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+
+
+
+

1.16. Dependencies

+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>jakarta.inject</groupId>
+  <artifactId>jakarta.inject-api</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>jakarta.annotation</groupId>
+  <artifactId>jakarta.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Basic injection annotations (JSR-330) -->
+<dependency>
+  <groupId>javax.inject</groupId>
+  <artifactId>javax.inject</artifactId>
+</dependency>
+<!-- Basic lifecycle and security annotations (JSR-250)-->
+<dependency>
+  <groupId>javax.annotation</groupId>
+  <artifactId>javax.annotation-api</artifactId>
+</dependency>
+<!-- Context and dependency injection API (JSR-365) -->
+<dependency>
+  <groupId>jakarta.enterprise</groupId>
+  <artifactId>jakarta.enterprise.cdi-api</artifactId>
+</dependency>
+
+
+ +
+

==BLOB support

+
+
+

BLOB stands for Binary Large Object. A BLOB may be an image, an office document, ZIP archive or any other multimedia object. +Often these BLOBs are large. if this is the case you need to take care, that you do not copy all the blob data into you application heap, e.g. when providing them via a REST service. +This could easily lead to performance problems or out of memory errors. +As solution for that problem is "streaming" those BLOBs directly from the database to the client. To demonstrate how this can be accomplished, devonfw provides a example.

+
+
+
+

1.17. Further Reading

+ +
+ +
+

==Common

+
+
+

In our coding-conventions we define a clear packaging and layering. +However, there is always cross-cutting code that does not belong to a specific layer such as generic helpers, general code for configuration or integration, etc. +Therefore, we define a package segment common that can be used as «layer» for such cross-cutting code. +Code from any other layer is allowed to access such common code (at least within the same component).

+
+
+ +
+

==Java Persistence API

+
+
+

For mapping java objects to a relational database we use the Java Persistence API (JPA). +As JPA implementation we recommend to use Hibernate. For general documentation about JPA and Hibernate follow the links above as we will not replicate the documentation. Here you will only find guidelines and examples how we recommend to use it properly. The following examples show how to map the data of a database to an entity. As we use JPA we abstract from SQL here. However, you will still need a DDL script for your schema and during maintenance also database migrations. Please follow our SQL guide for such artifacts.

+
+
+
+

1.18. Entity

+
+

Entities are part of the persistence layer and contain the actual data. They are POJOs (Plain Old Java Objects) on which the relational data of a database is mapped and vice versa. The mapping is configured via JPA annotations (javax.persistence). Usually an entity class corresponds to a table of a database and a property to a column of that table. A persistent entity instance then represents a row of the database table.

+
+
+
A Simple Entity
+
+

The following listing shows a simple example:

+
+
+
+
@Entity
+@Table(name="TEXTMESSAGE")
+public class MessageEntity extends ApplicationPersistenceEntity implements Message {
+
+  private String text;
+
+  public String getText() {
+    return this.text;
+  }
+
+  public void setText(String text) {
+    this.text = text;
+  }
+ }
+
+
+
+

The @Entity annotation defines that instances of this class will be entities which can be stored in the database. The @Table annotation is optional and can be used to define the name of the corresponding table in the database. If it is not specified, the simple name of the entity class is used instead.

+
+
+

In order to specify how to map the attributes to columns we annotate the corresponding getter methods (technically also private field annotation is also possible but approaches can not be mixed). +The @Id annotation specifies that a property should be used as primary key. +With the help of the @Column annotation it is possible to define the name of the column that an attribute is mapped to as well as other aspects such as nullable or unique. If no column name is specified, the name of the property is used as default.

+
+
+

Note that every entity class needs a constructor with public or protected visibility that does not have any arguments. Moreover, neither the class nor its getters and setters may be final.

+
+
+

Entities should be simple POJOs and not contain business logic.

+
+
+
+
Entities and Datatypes
+
+

Standard datatypes like Integer, BigDecimal, String, etc. are mapped automatically by JPA. Custom datatypes are mapped as serialized BLOB by default what is typically undesired. +In order to map atomic custom datatypes (implementations of`+SimpleDatatype`) we implement an AttributeConverter. Here is a simple example:

+
+
+
+
@Converter(autoApply = true)
+public class MoneyAttributeConverter implements AttributeConverter<Money, BigDecimal> {
+
+  public BigDecimal convertToDatabaseColumn(Money attribute) {
+    return attribute.getValue();
+  }
+
+  public Money convertToEntityAttribute(BigDecimal dbData) {
+    return new Money(dbData);
+  }
+}
+
+
+
+

The annotation @Converter is detected by the JPA vendor if the annotated class is in the packages to scan. Further, autoApply = true implies that the converter is automatically used for all properties of the handled datatype. Therefore all entities with properties of that datatype will automatically be mapped properly (in our example Money is mapped as BigDecimal).

+
+
+

In case you have a composite datatype that you need to map to multiple columns the JPA does not offer a real solution. As a workaround you can use a bean instead of a real datatype and declare it as @Embeddable. If you are using Hibernate you can implement CompositeUserType. Via the @TypeDef annotation it can be registered to Hibernate. If you want to annotate the CompositeUserType implementation itself you also need another annotation (e.g. MappedSuperclass tough not technically correct) so it is found by the scan.

+
+
+
Enumerations
+
+

By default JPA maps Enums via their ordinal. Therefore the database will only contain the ordinals (0, 1, 2, etc.) . So , inside the database you can not easily understand their meaning. Using @Enumerated with EnumType.STRING allows to map the enum values to their name (Enum.name()). Both approaches are fragile when it comes to code changes and refactoring (if you change the order of the enum values or rename them) after the application is deployed to production. If you want to avoid this and get a robust mapping you can define a dedicated string in each enum value for database representation that you keep untouched. Then you treat the enum just like any other custom datatype.

+
+
+
+
BLOB
+
+

If binary or character large objects (BLOB/CLOB) should be used to store the value of an attribute, e.g. to store an icon, the @Lob annotation should be used as shown in the following listing:

+
+
+
+
@Lob
+public byte[] getIcon() {
+  return this.icon;
+}
+
+
+
+ + + + + +
+ + +Using a byte array will cause problems if BLOBs get large because the entire BLOB is loaded into the RAM of the server and has to be processed by the garbage collector. For larger BLOBs the type Blob and streaming should be used. +
+
+
+
+
public Blob getAttachment() {
+  return this.attachment;
+}
+
+
+
+
+
Date and Time
+
+

To store date and time related values, the temporal annotation can be used as shown in the listing below:

+
+
+
+
@Temporal(TemporalType.TIMESTAMP)
+public java.util.Date getStart() {
+  return start;
+}
+
+
+
+

Until Java8 the java data type java.util.Date (or Jodatime) has to be used. +TemporalType defines the granularity. In this case, a precision of nanoseconds is used. If this granularity is not wanted, TemporalType.DATE can be used instead, which only has a granularity of milliseconds. +Mixing these two granularities can cause problems when comparing one value to another. This is why we only use TemporalType.TIMESTAMP.

+
+
+
+
QueryDSL and Custom Types
+
+

Using the Aliases API of QueryDSL might result in an InvalidDataAccessApiUsageException when using custom datatypes in entity properties. This can be circumvented in two steps:

+
+
+
    +
  1. +

    Ensure you have the following maven dependencies in your project (core module) to support custom types via the Aliases API:

    +
    +
    +
    <dependency>
    +  <groupId>org.ow2.asm</groupId>
    +  <artifactId>asm</artifactId>
    +</dependency>
    +<dependency>
    +  <groupId>cglib</groupId>
    +  <artifactId>cglib</artifactId>
    +</dependency>
    +
    +
    +
  2. +
  3. +

    Make sure, that all your custom types used in entities provide a non-argument constructor with at least visibility level protected.

    +
  4. +
+
+
+
+
+
Primary Keys
+
+

We only use simple Long values as primary keys (IDs). +By default it is auto generated (@GeneratedValue(strategy=GenerationType.AUTO)). +This is already provided by the class com.devonfw.<projectName>.general.dataaccess.api.AbstractPersistenceEntity within the classic project structure respectively com.devonfw.<projectName>.general.domain.model.AbstractPersistenceEntity within the modern project structure, that you can extend.

+
+
+

The reason for this recommendation is simply because using a number (Long) is the most efficient representation for the database. +You may also consider to use other types like String or UUID or even composite custom datatypes and this is technically possible. +However, please consider that the primary key is used to lookup the row from the database table, also in foreign keys and thus in JOINs. +Please note that your project sooner or later may reach some complexity where performance really matters. +Working on big data and performing JOINs when using types such as String (VARCHAR[2]) as primary and foreign keys will kill your performance. +You are still free to make a different choice and devonfw only gives recommendations but does not want to dictate you what to do. +However, you have been warned about the concequences. +If you are well aware of what you are doing, you can still use differnet types of primary keys. +In such case, create your own entity not extending AbstractPersistenceEntity or create your own copy of AbstractPersistenceEntity with a different name and a different type of primary key.

+
+
+

In case you have business oriented keys (often as String), you can define an additional property for it and declare it as unique (@Column(unique=true)). +Be sure to include "AUTO_INCREMENT" in your sql table field ID to be able to persist data (or similar for other databases).

+
+
+
+
+

1.19. Relationships

+
+
n:1 and 1:1 Relationships
+
+

Entities often do not exist independently but are in some relation to each other. For example, for every period of time one of the StaffMember’s of the restaurant example has worked, which is represented by the class WorkingTime, there is a relationship to this StaffMember.

+
+
+

The following listing shows how this can be modeled using JPA:

+
+
+
+
...
+
+@Entity
+public class WorkingTimeEntity {
+   ...
+
+   private StaffMemberEntity staffMember;
+
+   @ManyToOne
+   @JoinColumn(name="STAFFMEMBER")
+   public StaffMemberEntity getStaffMember() {
+      return this.staffMember;
+   }
+
+   public void setStaffMember(StaffMemberEntity staffMember) {
+      this.staffMember = staffMember;
+   }
+}
+
+
+
+

To represent the relationship, an attribute of the type of the corresponding entity class that is referenced has been introduced. The relationship is a n:1 relationship, because every WorkingTime belongs to exactly one StaffMember, but a StaffMember usually worked more often than once.
+This is why the @ManyToOne annotation is used here. For 1:1 relationships the @OneToOne annotation can be used which works basically the same way. To be able to save information about the relation in the database, an additional column in the corresponding table of WorkingTime is needed which contains the primary key of the referenced StaffMember. With the name element of the @JoinColumn annotation it is possible to specify the name of this column.

+
+
+
+
1:n and n:m Relationships
+
+

The relationship of the example listed above is currently an unidirectional one, as there is a getter method for retrieving the StaffMember from the WorkingTime object, but not vice versa.

+
+
+

To make it a bidirectional one, the following code has to be added to StaffMember:

+
+
+
+
  private Set<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy="staffMember")
+  public Set<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(Set<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

To make the relationship bidirectional, the tables in the database do not have to be changed. Instead the column that corresponds to the attribute staffMember in class WorkingTime is used, which is specified by the mappedBy element of the @OneToMany annotation. Hibernate will search for corresponding WorkingTime objects automatically when a StaffMember is loaded.

+
+
+

The problem with bidirectional relationships is that if a WorkingTime object is added to the set or list workingTimes in StaffMember, this does not have any effect in the database unless +the staffMember attribute of that WorkingTime object is set. That is why the devon4j advices not to use bidirectional relationships but to use queries instead. How to do this is shown here. If a bidirectional relationship should be used nevertheless, appropriate add and remove methods must be used.

+
+
+

For 1:n and n:m relations, the devon4j demands that (unordered) Sets and no other collection types are used, as shown in the listing above. The only exception is whenever an ordering is really needed, (sorted) lists can be used.
+For example, if WorkingTime objects should be sorted by their start time, this could be done like this:

+
+
+
+
  private List<WorkingTimeEntity> workingTimes;
+
+  @OneToMany(mappedBy = "staffMember")
+  @OrderBy("startTime asc")
+  public List<WorkingTimeEntity> getWorkingTimes() {
+    return this.workingTimes;
+  }
+
+  public void setWorkingTimes(List<WorkingTimeEntity> workingTimes) {
+    this.workingTimes = workingTimes;
+  }
+
+
+
+

The value of the @OrderBy annotation consists of an attribute name of the class followed by asc (ascending) or desc (descending).

+
+
+

To store information about a n:m relationship, a separate table has to be used, as one column cannot store several values (at least if the database schema is in first normal form).
+For example if one wanted to extend the example application so that all ingredients of one FoodDrink can be saved and to model the ingredients themselves as entities (e.g. to store additional information about them), this could be modeled as follows (extract of class FoodDrink):

+
+
+
+
  private Set<IngredientEntity> ingredients;
+
+  @ManyToMany()
+  @JoinTable
+  public Set<IngredientEntity> getIngredients() {
+    return this.ingredients;
+  }
+
+  public void setOrders(Set<IngredientEntity> ingredients) {
+    this.ingredients = ingredients;
+  }
+
+
+
+

Information about the relation is stored in a table called BILL_ORDER that has to have two columns, one for referencing the Bill, the other one for referencing the Order. Note that the @JoinTable annotation is not needed in this case because a separate table is the default solution here (same for n:m relations) unless there is a mappedBy element specified.

+
+
+

For 1:n relationships this solution has the disadvantage that more joins (in the database system) are needed to get a Bill with all the Orders it refers to. This might have a negative impact on performance so that the solution to store a reference to the Bill row/entity in the Order’s table is probably the better solution in most cases.

+
+
+

Note that bidirectional n:m relationships are not allowed for applications based on devon4j. Instead a third entity has to be introduced, which "represents" the relationship (it has two n:1 relationships).

+
+
+
+
Eager vs. Lazy Loading
+
+

Using JPA it is possible to use either lazy or eager loading. Eager loading means that for entities retrieved from the database, other entities that are referenced by these entities are also retrieved, whereas lazy loading means that this is only done when they are actually needed, i.e. when the corresponding getter method is invoked.

+
+
+

Application based on devon4j are strongly advised to always use lazy loading. The JPA defaults are:

+
+
+
    +
  • +

    @OneToMany: LAZY

    +
  • +
  • +

    @ManyToMany: LAZY

    +
  • +
  • +

    @ManyToOne: EAGER

    +
  • +
  • +

    @OneToOne: EAGER

    +
  • +
+
+
+

So at least for @ManyToOne and @OneToOne you always need to override the default by providing fetch = FetchType.LAZY.

+
+
+ + + + + +
+ + +Please read the performance guide. +
+
+
+
+
Cascading Relationships
+
+

For relations it is also possible to define whether operations are cascaded (like a recursion) to the related entity. +By default, nothing is done in these situations. This can be changed by using the cascade property of the annotation that specifies the relation type (@OneToOne, @ManyToOne, @OneToMany, @ManyToOne). This property accepts a CascadeType that offers the following options:

+
+
+
    +
  • +

    PERSIST (for EntityManager.persist, relevant to inserted transient entities into DB)

    +
  • +
  • +

    REMOVE (for EntityManager.remove to delete entity from DB)

    +
  • +
  • +

    MERGE (for EntityManager.merge)

    +
  • +
  • +

    REFRESH (for EntityManager.refresh)

    +
  • +
  • +

    DETACH (for EntityManager.detach)

    +
  • +
  • +

    ALL (cascade all of the above operations)

    +
  • +
+
+
+

See here for more information.

+
+
+
+
Typesafe Foreign Keys using IdRef
+
+

For simple usage you can use Long for all your foreign keys. +However, as an optional pattern for advanced and type-safe usage, we offer IdRef.

+
+
+
+
+

1.20. Embeddable

+
+

An embeddable Object is a way to group properties of an entity into a separate Java (child) object. Unlike with implement relationships the embeddable is not a separate entity and its properties are stored (embedded) in the same table together with the entity. This is helpful to structure and reuse groups of properties.

+
+
+

The following example shows an Address implemented as an embeddable class:

+
+
+
+
@Embeddable
+public class AddressEmbeddable {
+
+  private String street;
+  private String number;
+  private Integer zipCode;
+  private String city;
+
+  @Column(name="STREETNUMBER")
+  public String getNumber() {
+    return number;
+  }
+
+  public void setNumber(String number) {
+    this.number = number;
+  }
+
+  ...  // other getter and setter methods, equals, hashCode
+}
+
+
+
+

As you can see an embeddable is similar to an entity class, but with an @Embeddable annotation instead of the @Entity annotation and without primary key or modification counter. +An Embeddable does not exist on its own but in the context of an entity. +As a simplification Embeddables do not require a separate interface and ETO as the bean-mapper will create a copy automatically when converting the owning entity to an ETO. +However, in this case the embeddable becomes part of your api module that therefore needs a dependency on the JPA.

+
+
+

In addition to that the methods equals(Object) and hashCode() need to be implemented as this is required by Hibernate (it is not required for entities because they can be unambiguously identified by their primary key). For some hints on how to implement the hashCode() method please have a look here.

+
+
+

Using this AddressEmbeddable inside an entity class can be done like this:

+
+
+
+
  private AddressEmbeddable address;
+
+  @Embedded
+  public AddressEmbeddable getAddress() {
+    return this.address;
+  }
+
+  public void setAddress(AddressEmbeddable address) {
+    this.address = address;
+  }
+}
+
+
+
+

The @Embedded annotation needs to be used for embedded attributes. Note that if in all columns of the embeddable (here Address) are null, then the embeddable object itself is also null inside the entity. This has to be considered to avoid NullPointerException’s. Further this causes some issues with primitive types in embeddable classes that can be avoided by only using object types instead.

+
+
+
+

1.21. Inheritance

+
+

Just like normal java classes, entity classes can inherit from others. The only difference is that you need to specify how to map a class hierarchy to database tables. Generic abstract super-classes for entities can simply be annotated with @MappedSuperclass.

+
+
+

For all other cases the JPA offers the annotation @Inheritance with the property strategy talking an InheritanceType that has the following options:

+
+
+
+
+
    +
  • +

    SINGLE_TABLE: This strategy uses a single table that contains all columns needed to store all entity-types of the entire inheritance hierarchy. If a column is not needed for an entity because of its type, there is a null value in this column. An additional column is introduced, which denotes the type of the entity (called dtype).

    +
  • +
  • +

    TABLE_PER_CLASS: For each concrete entity class there is a table in the database that can store such an entity with all its attributes. An entity is only saved in the table corresponding to its most concrete type. To get all entities of a super type, joins are needed.

    +
  • +
  • +

    JOINED: In this case there is a table for every entity class including abstract classes, which contains only the columns for the persistent properties of that particular class. Additionally there is a primary key column in every table. To get an entity of a class that is a subclass of another one, joins are needed.

    +
  • +
+
+
+
+
+

Each of the three approaches has its advantages and drawbacks, which are discussed in detail here. In most cases, the first one should be used, because it is usually the fastest way to do the mapping, as no joins are needed when retrieving, searching or persisting entities. Moreover it is rather simple and easy to understand. +One major disadvantage is that the first approach could lead to a table with a lot of null values, which might have a negative impact on the database size.

+
+
+

The inheritance strategy has to be annotated to the top-most entity of the class hierarchy (where @MappedSuperclass classes are not considered) like in the following example:

+
+
+
+
@Entity
+@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+public abstract class MyParentEntity extends ApplicationPersistenceEntity implements MyParent {
+  ...
+}
+
+@Entity
+public class MyChildEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+@Entity
+public class MyOtherEntity extends MyParentEntity implements MyChild {
+  ...
+}
+
+
+
+

As a best practice we advise you to avoid entity hierarchies at all where possible and otherwise to keep the hierarchy as small as possible. In order to just ensure reuse or establish a common API you can consider a shared interface, a @MappedSuperclass or an @Embeddable instead of an entity hierarchy.

+
+
+
+

1.22. Repositories and DAOs

+
+

For each entity a code unit is created that groups all database operations for that entity. We recommend to use spring-data repositories for that as it is most efficient for developers. As an alternative there is still the classic approach using DAOs.

+
+
+
Concurrency Control
+
+

The concurrency control defines the way concurrent access to the same data of a database is handled. When several users (or threads of application servers) concurrently access a database, anomalies may happen, e.g. a transaction is able to see changes from another transaction although that one did, not yet commit these changes. Most of these anomalies are automatically prevented by the database system, depending on the isolation level (property hibernate.connection.isolation in the jpa.xml, see here, or quarkus.datasource.jdbc.transaction-isolation-level in the application.properties).

+
+
+

Another anomaly is when two stakeholders concurrently access a record, do some changes and write them back to the database. The JPA addresses this with different locking strategies (see here).

+
+
+

As a best practice we are using optimistic locking for regular end-user services (OLTP) and pessimistic locking for batches.

+
+
+
+
Optimistic Locking
+
+

The class com.devonfw.module.jpa.persistence.api.AbstractPersistenceEntity already provides optimistic locking via a modificationCounter with the @Version annotation. Therefore JPA takes care of optimistic locking for you. When entities are transferred to clients, modified and sent back for update you need to ensure the modificationCounter is part of the game. If you follow our guides about transfer-objects and services this will also work out of the box. +You only have to care about two things:

+
+
+
    +
  • +

    How to deal with optimistic locking in relationships?
    +Assume an entity A contains a collection of B entities. Should there be a locking conflict if one user modifies an instance of A while another user in parallel modifies an instance of B that is contained in the other instance? To address this , take a look at FeatureForceIncrementModificationCounter.

    +
  • +
  • +

    What should happen in the UI if an OptimisticLockException occurred?
    +According to KISS our recommendation is that the user gets an error displayed that tells him to do his change again on the recent data. Try to design your system and the work processing in a way to keep such conflicts rare and you are fine.

    +
  • +
+
+
+
+
Pessimistic Locking
+
+

For back-end services and especially for batches optimistic locking is not suitable. A human user shall not cause a large batch process to fail because he was editing the same entity. Therefore such use-cases use pessimistic locking what gives them a kind of priority over the human users. +In your DAO implementation you can provide methods that do pessimistic locking via EntityManager operations that take a LockModeType. Here is a simple example:

+
+
+
+
  getEntityManager().lock(entity, LockModeType.READ);
+
+
+
+

When using the lock(Object, LockModeType) method with LockModeType.READ, Hibernate will issue a SELECT …​ FOR UPDATE. This means that no one else can update the entity (see here for more information on the statement). If LockModeType.WRITE is specified, Hibernate issues a SELECT …​ FOR UPDATE NOWAIT instead, which has has the same meaning as the statement above, but if there is already a lock, the program will not wait for this lock to be released. Instead, an exception is raised.
+Use one of the types if you want to modify the entity later on, for read only access no lock is required.

+
+
+

As you might have noticed, the behavior of Hibernate deviates from what one would expect by looking at the LockModeType (especially LockModeType.READ should not cause a SELECT …​ FOR UPDATE to be issued). The framework actually deviates from what is specified in the JPA for unknown reasons.

+
+
+
+
+

1.23. Database Auditing

+ +
+
+

1.24. Testing Data-Access

+
+

For testing of Entities and Repositories or DAOs see testing guide.

+
+
+
+

1.25. Principles

+
+

We strongly recommend these principles:

+
+
+
    +
  • +

    Use the JPA where ever possible and use vendor (hibernate) specific features only for situations when JPA does not provide a solution. In the latter case consider first if you really need the feature.

    +
  • +
  • +

    Create your entities as simple POJOs and use JPA to annotate the getters in order to define the mapping.

    +
  • +
  • +

    Keep your entities simple and avoid putting advanced logic into entity methods.

    +
  • +
+
+
+
+

1.26. Database Configuration

+
+

For details on the configuration of the database connection and database logging of the individual framework, please refer to the respective configuration guide.

+
+
+

For spring see here.

+
+
+

For quarkus see here.

+
+
+
Database Migration
+ +
+
+
Pooling
+
+

You typically want to pool JDBC connections to boost performance by recycling previous connections. There are many libraries available to do connection pooling. We recommend to use HikariCP. For Oracle RDBMS see here.

+
+
+
+
+

1.27. Security

+
+
SQL-Injection
+
+

A common security threat is SQL-injection. Never build queries with string concatenation or your code might be vulnerable as in the following example:

+
+
+
+
  String query = "Select op from OrderPosition op where op.comment = " + userInput;
+  return getEntityManager().createQuery(query).getResultList();
+
+
+
+

Via the parameter userInput an attacker can inject SQL (JPQL) and execute arbitrary statements in the database causing extreme damage.

+
+
+

In order to prevent such injections you have to strictly follow our rules for queries:

+
+
+ +
+
+
+
Limited Permissions for Application
+
+

We suggest that you operate your application with a database user that has limited permissions so he can not modify the SQL schema (e.g. drop tables). For initializing the schema (DDL) or to do schema migrations use a separate user that is not used by the application itself.

+
+ +
+

==Queries +The Java Persistence API (JPA) defines its own query language, the java persistence query language (JPQL) (see also JPQL tutorial), which is similar to SQL but operates on entities and their attributes instead of tables and columns.

+
+
+

The simplest CRUD-Queries (e.g. find an entity by its ID) are already build in the devonfw CRUD functionality (via Repository or DAO). For other cases you need to write your own query. We distinguish between static and dynamic queries. Static queries have a fixed JPQL query string that may only use parameters to customize the query at runtime. Instead, dynamic queries can change their clauses (WHERE, ORDER BY, JOIN, etc.) at runtime depending on the given search criteria.

+
+
+
+
Static Queries
+
+

E.g. to find all DishEntries (from MTS sample app) that have a price not exceeding a given maxPrice we write the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice
+
+
+
+

Here dish is used as alias (variable name) for our selected DishEntity (what refers to the simple name of the Java entity class). With dish.price we are referring to the Java property price (getPrice()/setPrice(…​)) in DishEntity. A named variable provided from outside (the search criteria at runtime) is specified with a colon (:) as prefix. Here with :maxPrice we reference to a variable that needs to be set via query.setParameter("maxPrice", maxPriceValue). JPQL also supports indexed parameters (?) but they are discouraged because they easily cause confusion and mistakes.

+
+
+
Using Queries to Avoid Bidirectional Relationships
+
+

With the usage of queries it is possible to avoid exposing relationships or modelling bidirectional relationships, which have some disadvantages (see relationships). This is especially desired for relationships between entities of different business components. +So for example to get all OrderLineEntities for a specific OrderEntity without using the orderLines relation from OrderEntity the following query could be used:

+
+
+
+
SELECT line FROM OrderLineEntity line WHERE line.order.id = :orderId
+
+
+
+
+
+
Dynamic Queries
+
+

For dynamic queries, we use the JPA module for Querydsl. Querydsl also supports other modules such as MongoDB, and Apache Lucene. It allows to implement queries in a powerful but readable and type-safe way (unlike Criteria API). If you already know JPQL, you will quickly be able to read and write Querydsl code. It feels like JPQL but implemented in Java instead of plain text.

+
+
+

To use Querydsl in your Maven project, add the following dependencies:

+
+
+
+
<dependencies>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-apt</artifactId>
+        <version>${querydsl.version}</version>
+        <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+        <groupId>com.querydsl</groupId>
+        <artifactId>querydsl-jpa</artifactId>
+        <version>${querydsl.version}</version>
+    </dependency>
+
+</dependencies>
+
+
+
+

Next, configure the annotation processing tool (APT) plugin:

+
+
+
+
<project>
+  <build>
+    <plugins>
+      ...
+      <plugin>
+        <groupId>com.mysema.maven</groupId>
+        <artifactId>apt-maven-plugin</artifactId>
+        <version>1.1.3</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>process</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/generated-sources/java</outputDirectory>
+              <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      ...
+    </plugins>
+  </build>
+</project>
+
+
+
+

Here is an example from our sample application:

+
+
+
+
  public List<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    Range<BigDecimal> priceRange = criteria.getPriceRange();
+    if (priceRange != null) {
+      BigDecimal min = priceRange.getMin();
+      if (min != null) {
+        query.where(dish.price.goe(min));
+      }
+      BigDecimal max = priceRange.getMax();
+      if (max != null) {
+        query.where(dish.price.loe(max));
+      }
+    }
+    String name = criteria.getName();
+    if ((name != null) && (!name.isEmpty())) {
+      query.where(dish.name.eq(name));
+    }
+    query.orderBy(dish.price.asc(), dish.name.asc());
+    return query.fetch();
+  }
+
+
+
+

In this example, we use the so called Q-types (QDishEntity). These are classes generated at build time by the Querydsl annotation processor from entity classes. The Q-type classes can be used as static types representative of the original entity class.

+
+
+

The query.from(dish) method call defines the query source, in this case the dish table. The where method defines a filter. For example, The first call uses the goe operator to filter out any dishes that are not greater or equal to the minimal price. Further operators can be found here.

+
+
+

The orderBy method is used to sort the query results according to certain criteria. Here, we sort the results first by their price and then by their name, both in ascending order. To sort in descending order, use .desc(). To partition query results into groups of rows, see the groupBy method.

+
+
+

For spring, devon4j provides another approach that you can use for your Spring applications to implement Querydsl logic without having to use these metaclasses. An example can be found here.

+
+
+
+
Native Queries
+
+

Spring Data supports the use of native queries. Native queries use simple native SQL syntax that is not parsed in JPQL. This allows you to use all the features that your database supports. +The downside to this is that database portability is lost due to the absence of an abstraction layer. Therefore, the queries may not work with another database because it may use a different syntax.

+
+
+

You can implement a native query using @Query annotation with the nativeQuery attribute set to true:

+
+
+
+
@Query(value="...", nativeQuery=true)
+
+
+
+ + + + + +
+ + +This will not work with Quarkus because Quarkus does not support native queries by using the @Query annotation (see here). +
+
+
+

You can also implement native queries directly using the EntityManager API and the createNativeQuery method. +This approach also works with Quarkus.

+
+
+
+
Query query = entityManager.createNativeQuery("SELECT * FROM Product", ProductEntity.class);
+List<ProductEntity> products = query.getResultList();
+
+
+
+ + + + + +
+ + +Be sure to use the name of the table when using native queries, while you must use the entity name when implementing queries with JPQL. +
+
+
+
+
Using Wildcards
+
+

For flexible queries it is often required to allow wildcards (especially in dynamic queries). While users intuitively expect glob syntax, the SQL and JPQL standards work differently. Therefore, a mapping is required. devonfw provides this on a lower level with LikePatternSyntax and on a higher level with QueryUtil (see QueryHelper.newStringClause(…​)).

+
+
+
+
Pagination
+
+

When dealing with large amounts of data, an efficient method of retrieving the data is required. Fetching the entire data set each time would be too time consuming. Instead, Paging is used to process only small subsets of the entire data set.

+
+
+

If you are using Spring Data repositories you will get pagination support out of the box by providing the interfaces Page and Pageable:

+
+
+
Listing 6. repository
+
+
Page<DishEntity> findAll(Pageable pageable);
+
+
+
+

Then you can create a Pageable object and pass it to the method call as follows:

+
+
+
+
int page = criteria.getPageNumber();
+int size = criteria.getPageSize();
+Pageable pageable = PageRequest.of(page, size);
+Page<DishEntity> dishes = dishRepository.findAll(pageable);
+
+
+
+
Paging with Querydsl
+
+

Pagination is also supported for dynamic queries with Querydsl:

+
+
+
+
  public Page<DishEntity> findDishes(DishSearchCriteriaTo criteria) {
+    QDishEntity dish = QDishEntity.dishEntity;
+    JPAQuery<DishEntity> query = new JPAQuery<OrderEntity>(getEntityManager());
+    query.from(dish);
+
+    // conditions
+
+    int page = criteria.getPageNumber();
+    int size = criteria.getPageSize();
+    Pageable pageable = PageRequest.of(page, size);
+    query.offset(pageable.getOffset());
+    query.limit(pageable.getPageSize());
+
+    List<DishEntity> dishes = query.fetch();
+    return new PageImpl<>(dishes, pageable, dishes.size());
+  }
+
+
+
+
+
Pagination example
+
+

For the table entity we can make a search request by accessing the REST endpoint with pagination support like in the following examples:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "total":true
+  }
+}
+
+//Response
+{
+    "pagination": {
+        "size": 2,
+        "page": 1,
+        "total": 11
+    },
+    "result": [
+        {
+            "id": 101,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 1,
+            "state": "OCCUPIED"
+        },
+        {
+            "id": 102,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 2,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+ + + + + +
+ + +As we are requesting with the total property set to true the server responds with the total count of rows for the query. +
+
+
+

For retrieving a concrete page, we provide the page attribute with the desired value. Here we also left out the total property so the server doesn’t incur on the effort to calculate it:

+
+
+
+
POST mythaistar/services/rest/tablemanagement/v1/table/search
+{
+  "pagination": {
+    "size":2,
+    "page":2
+  }
+}
+
+//Response
+
+{
+    "pagination": {
+        "size": 2,
+        "page": 2,
+        "total": null
+    },
+    "result": [
+        {
+            "id": 103,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 3,
+            "state": "FREE"
+        },
+        {
+            "id": 104,
+            "modificationCounter": 1,
+            "revision": null,
+            "waiterId": null,
+            "number": 4,
+            "state": "FREE"
+        }
+    ]
+}
+
+
+
+
+
Pagingation in devon4j-spring
+
+

For spring applications, devon4j also offers its own solution for pagination. You can find an example of this here.

+
+
+
+
+
Query Meta-Parameters
+
+

Queries can have meta-parameters and that are provided via SearchCriteriaTo. Besides paging (see above) we also get timeout support.

+
+
+
+
Advanced Queries
+
+

Writing queries can sometimes get rather complex. The current examples given above only showed very simple basics. Within this topic a lot of advanced features need to be considered like:

+
+
+ +
+
+

This list is just containing the most important aspects. As we can not cover all these topics here, they are linked to external documentation that can help and guide you.

+
+ +
+

==Spring Data +Spring Data JPA is supported by both Spring and Quarkus. However, in Quarkus this approach still has some limitations. For detailed information, see the official Quarkus Spring Data guide.

+
+
+
+
Motivation
+
+

The benefits of Spring Data are (for examples and explanations see next sections):

+
+
+
    +
  • +

    All you need is one single repository interface for each entity. No need for a separate implementation or other code artifacts like XML descriptors, NamedQueries class, etc.

    +
  • +
  • +

    You have all information together in one place (the repository interface) that actually belong together (where as in the classic approach you have the static queries in an XML file, constants to them in NamedQueries class and referencing usages in DAO implementation classes).

    +
  • +
  • +

    Static queries are most simple to realize as you do not need to write any method body. This means you can develop faster.

    +
  • +
  • +

    Support for paging is already build-in. Again for static query method the is nothing you have to do except using the paging objects in the signature.

    +
  • +
  • +

    Still you have the freedom to write custom implementations via default methods within the repository interface (e.g. for dynamic queries).

    +
  • +
+
+
+
+
Dependency
+
+

In case you want to switch to or add Spring Data support to your Spring or Quarkus application, all you need is to add the respective maven dependency:

+
+
+
Listing 7. spring
+
+
<dependency>
+  <groupId>org.springframework.boot</groupId>
+  <artifactId>spring-boot-starter-data-jpa</artifactId>
+</dependency>
+
+
+
+
Listing 8. quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-spring-data-jpa</artifactId>
+</dependency>
+
+
+
+
+
Repository
+
+

For each entity «Entity»Entity an interface is created with the name «Entity»Repository extending JpaRepository. +Such repository is the analogy to a Data-Access-Object (DAO) used in the classic approach or when Spring Data is not an option.

+
+
+
Listing 9. Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long> {
+
+}
+
+
+
+

The Spring Data repository provides some basic implementations for accessing data, e.g. returning all instances of a type (findAll) or returning an instance by its ID (findById).

+
+
+
+
Custom method implementation
+
+

In addition, repositories can be enriched with additional functionality, e.g. to add QueryDSL functionality or to override the default implementations, by using so called repository fragments:

+
+
+
Example
+
+

The following example shows how to write such a repository:

+
+
+
Listing 10. Repository
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, ProductFragment {
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  List<ProductEntity> findByTitle(@Param("title") String title);
+
+  @Query("SELECT product FROM ProductEntity product" //
+      + " WHERE product.title = :title")
+  Page<ProductEntity> findByTitlePaginated(@Param("title") String title, Pageable pageable);
+}
+
+
+
+
Listing 11. Repository fragment
+
+
public interface ProductFragment {
+  Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria);
+}
+
+
+
+
Listing 12. Fragment implementation
+
+
public class ProductFragmentImpl implements ProductFragment {
+  @Inject
+  EntityManager entityManager;
+
+  public Page<ProductEntity> findByCriteria(ProductSearchCriteriaTo criteria) {
+    QProductEntity product = QProductEntity.productEntity;
+    JPAQuery<ProductEntity> query = new JPAQuery<ProductEntity>(this.entityManager);
+    query.from(product);
+
+    String title = criteria.getTitle();
+    if ((title != null) && !title.isEmpty()) {
+      query.where(product.title.eq(title));
+    }
+
+    List<ProductEntity> products = query.fetch();
+    return new PageImpl<>(products, PageRequest.of(criteria.getPageNumber(), criteria.getPageSize()), products.size());
+  }
+}
+
+
+
+

This ProductRepository has the following features:

+
+
+
    +
  • +

    CRUD support from Spring Data (see JavaDoc for details).

    +
  • +
  • +

    Support for QueryDSL integration, paging and more.

    +
  • +
  • +

    A static query method findByTitle to find all ProductEntity instances from DB that have the given title. Please note the @Param annotation that links the method parameter with the variable inside the query (:title).

    +
  • +
  • +

    The same with pagination support via findByTitlePaginated method.

    +
  • +
  • +

    A dynamic query method findByCriteria showing the QueryDSL and paging integration into Spring via a fragment implementation.

    +
  • +
+
+
+

You can find an implementation of this ProductRepository in our Quarkus reference application.

+
+
+ + + + + +
+ + +In Quarkus, native and named queries via the @Query annotation are currently not supported +
+
+
+
+
Integration of Spring Data in devon4j-spring
+
+

For Spring applications, devon4j offers a proprietary solution that integrates seamlessly with QueryDSL and uses default methods instead of the fragment approach. A separate guide for this can be found here.

+
+
+
+
Custom methods without fragment approach
+
+

The fragment approach is a bit laborious, as three types (repository interface, fragment interface and fragment implementation) are always needed to implement custom methods. +We cannot simply use default methods within the repository because we cannot inject the EntityManager directly into the repository interface.

+
+
+

As a workaround, you can create a GenericRepository interface, as is done in the devon4j jpa-spring-data module.

+
+
+
+
public interface GenericRepository<E> {
+
+  EntityManager getEntityManager();
+
+  ...
+}
+
+
+
+
+
public class GenericRepositoryImpl<E> implements GenericRepository<E> {
+
+  @Inject
+  EntityManager entityManager;
+
+  @Override
+  public EntityManager getEntityManager() {
+
+    return this.entityManager;
+  }
+
+  ...
+}
+
+
+
+

Then, all your repository interfaces can extend the GenericRepository and you can implement queries directly in the repository interface using default methods:

+
+
+
+
public interface ProductRepository extends JpaRepository<ProductEntity, Long>, GenericRepository<ProductEntity> {
+
+  default Page<ProductEntity> findByTitle(Title title) {
+
+    EntityManager entityManager = getEntityManager();
+    Query query = entityManager.createNativeQuery("select * from Product where title = :title", ProductEntity.class);
+    query.setParameter("title", title);
+    List<ProductEntity> products = query.getResultList();
+    return new PageImpl<>(products);
+  }
+
+  ...
+}
+
+
+
+
+
+
Drawbacks
+
+

Spring Data also has some drawbacks:

+
+
+
    +
  • +

    Some kind of magic behind the scenes that are not so easy to understand. So in case you want to extend all your repositories without providing the implementation via a default method in a parent repository interface you need to deep-dive into Spring Data. We assume that you do not need that and hope what Spring Data and devon already provides out-of-the-box is already sufficient.

    +
  • +
  • +

    The Spring Data magic also includes guessing the query from the method name. This is not easy to understand and especially to debug. Our suggestion is not to use this feature at all and either provide a @Query annotation or an implementation via default method.

    +
  • +
+
+
+
+
Limitations in Quarkus
+
+
    +
  • +

    Native and named queries are not supported using @Query annotation. You will receive something like: Build step io.quarkus.spring.data.deployment.SpringDataJPAProcessor#build threw an exception: java.lang.IllegalArgumentException: Attribute nativeQuery of @Query is currently not supported

    +
  • +
  • +

    Customizing the base repository for all repository interfaces in the code base, which is done in Spring Data by registering a class the extends SimpleJpaRepository

    +
  • +
+
+ +
+

==Data Access Object

+
+
+

The Data Access Objects (DAOs) are part of the persistence layer. +They are responsible for a specific entity and should be named «Entity»Dao and «Entity»DaoImpl. +The DAO offers the so called CRUD-functionalities (create, retrieve, update, delete) for the corresponding entity. +Additionally a DAO may offer advanced operations such as query or locking methods.

+
+
+
+
DAO Interface
+
+

For each DAO there is an interface named «Entity»Dao that defines the API. For CRUD support and common naming we derive it from the ApplicationDao interface that comes with the devon application template:

+
+
+
+
public interface MyEntityDao extends ApplicationDao<MyEntity> {
+  List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria);
+}
+
+
+
+

All CRUD operations are inherited from ApplicationDao so you only have to declare the additional methods.

+
+
+
+
DAO Implementation
+
+

Implementing a DAO is quite simple. We create a class named «Entity»DaoImpl that extends ApplicationDaoImpl and implements your «Entity»Dao interface:

+
+
+
+
public class MyEntityDaoImpl extends ApplicationDaoImpl<MyEntity> implements MyEntityDao {
+
+  public List<MyEntity> findByCriteria(MyEntitySearchCriteria criteria) {
+    TypedQuery<MyEntity> query = createQuery(criteria, getEntityManager());
+    return query.getResultList();
+  }
+  ...
+}
+
+
+
+

Again you only need to implement the additional non-CRUD methods that you have declared in your «Entity»Dao interface. +In the DAO implementation you can use the method getEntityManager() to access the EntityManager from the JPA. You will need the EntityManager to create and execute queries.

+
+
+
Static queries for DAO Implementation
+
+

All static queries are declared in the file src\main\resources\META-INF\orm.xml:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<entity-mappings version="1.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd">
+  <named-query name="find.dish.with.max.price">
+    <query><![SELECT dish FROM DishEntity dish WHERE dish.price <= :maxPrice]]></query>
+  </named-query>
+  ...
+</hibernate-mapping>
+
+
+
+

When your application is started, all these static queries will be created as prepared statements. This allows better performance and also ensures that you get errors for invalid JPQL queries when you start your app rather than later when the query is used.

+
+
+

To avoid redundant occurrences of the query name (get.open.order.positions.for.order) we define a constant for each named query:

+
+
+
+
public class NamedQueries {
+  public static final String FIND_DISH_WITH_MAX_PRICE = "find.dish.with.max.price";
+}
+
+
+
+

Note that changing the name of the java constant (FIND_DISH_WITH_MAX_PRICE) can be done easily with refactoring. Further you can trace where the query is used by searching the references of the constant.

+
+
+

The following listing shows how to use this query:

+
+
+
+
public List<DishEntity> findDishByMaxPrice(BigDecimal maxPrice) {
+  Query query = getEntityManager().createNamedQuery(NamedQueries.FIND_DISH_WITH_MAX_PRICE);
+  query.setParameter("maxPrice", maxPrice);
+  return query.getResultList();
+}
+
+
+
+

Via EntityManager.createNamedQuery(String) we create an instance of Query for our predefined static query. +Next we use setParameter(String, Object) to provide a parameter (maxPrice) to the query. This has to be done for all parameters of the query.

+
+
+

Note that using the createQuery(String) method, which takes the entire query as string (that may already contain the parameter) is not allowed to avoid SQL injection vulnerabilities. +When the method getResultList() is invoked, the query is executed and the result is delivered as List. As an alternative, there is a method called getSingleResult(), which returns the entity if the query returned exactly one and throws an exception otherwise.

+
+ +
+

==JPA Performance +When using JPA the developer sometimes does not see or understand where and when statements to the database are triggered.

+
+
+
+
+

Establishing expectations Developers shouldn’t expect to sprinkle magic pixie dust on POJOs in hopes they will become persistent.

+
+
+
+— Dan Allen
+https://epdf.tips/seam-in-action.html +
+
+
+

So in case you do not understand what is going on under the hood of JPA, you will easily run into performance issues due to lazy loading and other effects.

+
+
+
+
+
N plus 1 Problem
+
+

The most prominent phenomena is call the N+1 Problem. +We use entities from our MTS demo app as an example to explain the problem. +There is a DishEntity that has a @ManyToMany relation to +IngredientEntity. +Now we assume that we want to iterate all ingredients for a dish like this:

+
+
+
+
DishEntity dish = dao.findDishById(dishId);
+BigDecimal priceWithAllExtras = dish.getPrice();
+for (IngredientEntity ingredient : dish.getExtras()) {
+  priceWithAllExtras = priceWithAllExtras.add(ingredient.getPrice());
+}
+
+
+
+

Now dish.getExtras() is loaded lazy. Therefore the JPA vendor will provide a list with lazy initialized instances of IngredientEntity that only contain the ID of that entity. Now with every call of ingredient.getPrice() we technically trigger an SQL query statement to load the specific IngredientEntity by its ID from the database. +Now findDishById caused 1 initial query statement and for any number N of ingredients we are causing an additional query statement. This makes a total of N+1 statements. As causing statements to the database is an expensive operation with a lot of overhead (creating connection, etc.) this ends in bad performance and is therefore a problem (the N+1 Problem).

+
+
+
+
Solving N plus 1 Problem
+
+

To solve the N+1 Problem you need to change your code to only trigger a single statement instead. This can be archived in various ways. The most universal solution is to use FETCH JOIN in order to pre-load the nested N child entities into the first level cache of the JPA vendor implementation. This will behave very similar as if the @ManyToMany relation to IngredientEntity was having FetchType.EAGER but only for the specific query and not in general. Because changing @ManyToMany to FetchType.EAGER would cause bad performance for other usecases where only the dish but not its extra ingredients are needed. For this reason all relations, including @OneToOne should always be FetchType.LAZY. Back to our example we simply replace dao.findDishById(dishId) with dao.findDishWithExtrasById(dishId) that we implement by the following JPQL query:

+
+
+
+
SELECT dish FROM DishEntity dish
+  LEFT JOIN FETCH dish.extras
+  WHERE dish.id = :dishId
+
+
+
+

The rest of the code does not have to be changed but now dish.getExtras() will get the IngredientEntity from the first level cache where is was fetched by the initial query above.

+
+
+

Please note that if you only need the sum of the prices from the extras you can also create a query using an aggregator function:

+
+
+
+
SELECT sum(dish.extras.price) FROM DishEntity dish
+
+
+
+

As you can see you need to understand the concepts in order to get good performance.

+
+
+

There are many advanced topics such as creating database indexes or calculating statistics for the query optimizer to get the best performance. For such advanced topics we recommend to have a database expert in your team that cares about such things. However, understanding the N+1 Problem and its solutions is something that every Java developer in the team needs to understand.

+
+ +
+

==IdRef

+
+
+

IdRef can be used to reference other entities in TOs in order to make them type-safe and semantically more expressive. +It is an optional concept in devon4j for more complex applications that make intensive use of relations and foreign keys.

+
+
+
+
Motivation
+
+

Assuming you have a method signature like the following:

+
+
+
+
Long approve(Long cId, Long cuId);
+
+
+
+

So what are the paremeters? What is returned?

+
+
+

IdRef is just a wrapper for a Long used as foreign key. This makes our signature much more expressive and self-explanatory:

+
+
+
+
IdRef<Contract> approve(IdRef<Contract> cId, IdRef<Customer> cuId);
+
+
+
+

Now we can easily see, that the result and the parameters are foreign-keys and which entity they are referring to via their generic type. +We can read the javadoc of these entities from the generic type and understand the context. +Finally, when passing IdRef objects to such methods, we get compile errors in case we accidentally place parameters in the wrong order.

+
+
+
+
IdRef and Mapping
+
+

In order to easily map relations from entities to transfer-objects and back, we can easily also put according getters and setters into our entities:

+
+
+
+
public class ContractEntity extends ApplicationPersistenceEntity implements Contract {
+
+  private CustomerEntity customer;
+
+  ...
+
+  @ManyToOne(fetch = FetchType.LAZY)
+  @JoinColumn(name = "CUSTOMER_ID")
+  public CustomerEntity getCustomer() {
+    return this.customer;
+  }
+
+  public void setCustomer(CustomerEntity customer) {
+    this.customer = customer;
+  }
+
+  @Transient
+  public IdRef<Customer> getCustomerId() {
+    return IdRef.of(this.customer);
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customer = JpaHelper.asEntity(customerId, CustomerEntity.class);
+  }
+}
+
+
+
+

Now, ensure that you have the same getters and setters for customerId in your Eto:

+
+
+
+
public class ContractEto extends AbstractEto implements Contract {
+
+  private IdRef<Customer> customerId;
+
+  ...
+
+  public IdRef<Customer> getCustomerId() {
+    return this.customerId;
+  }
+
+  public void setCustomerId(IdRef<Customer> customerId) {
+    this.customerId = customerId;
+  }
+}
+
+
+
+

This way the bean-mapper can automatically map from your entity (ContractEntity) to your Eto (ContractEto) and vice-versa.

+
+
+
+
JpaHelper and EntityManager access
+
+

In the above example we used JpaHelper.asEntity to convert the foreign key (IdRef<Customer>) to the according entity (CustomerEntity). +This will internally use EntityManager.getReference to properly create a JPA entity. +The alternative "solution" that may be used with Long instead of IdRef is typically:

+
+
+
+
  public void setCustomerId(IdRef<Customer> customerId) {
+    Long id = null;
+    if (customerId != null) {
+      id = customerId.getId();
+    }
+    if (id == null) {
+      this.customer = null;
+    } else {
+      this.customer = new CustomerEntity();
+      this.customer.setId(id);
+    }
+  }
+
+
+
+

While this "solution" works is most cases, we discovered some more complex cases, where it fails with very strange hibernate exceptions. +When cleanly creating the entity via EntityManager.getReference instead it is working in all cases. +So how can JpaHelper.asEntity as a static method access the EntityManager? +Therefore we need to initialize this as otherwise you may see this exception:

+
+
+
+
java.lang.IllegalStateException: EntityManager has not yet been initialized!
+	at com.devonfw.module.jpa.dataaccess.api.JpaEntityManagerAccess.getEntityManager(JpaEntityManagerAccess.java:38)
+	at com.devonfw.module.jpa.dataaccess.api.JpaHelper.asEntity(JpaHelper.java:49)
+
+
+
+

For main usage in your application we assume that there is only one instance of EntityManager. +Therefore we can initialize this instance during the spring boot setup. +This is what we provide for you in JpaInitializer for you +when creating a devon4j app.

+
+
+
JpaHelper and spring-test
+
+

Further, you also want your code to work in integration tests. +Spring-test provides a lot of magic under the hood to make integration testing easy for you. +To boost the performance when running multiple tests, spring is smart and avoids creating the same spring-context multiple times. +Therefore it stores these contexts so that if a test-case is executed with a specific spring-configuration that has already been setup before, +the same spring-context can be reused instead of creating it again. +However, your tests may have multiple spring configurations leading to multiple spring-contexts. +Even worse these tests can run in any order leading to switching between spring-contexts forth and back. +Therefore, a static initializer during the spring boot setup can lead to strange errors as you can get the wrong EntityManager instance. +In order to fix such problems, we provide a solution pattern via DbTest ensuring for every test, +that the proper instance of EntityManager is initialized. +Therefore you should derive directly or indirectly (e.g. via ComponentDbTest and SubsystemDbTest) from DbTesT or adopt your own way to apply this pattern to your tests, when using JpaHelper. +This already happens if you are extending ApplicationComponentTest or ApplicationSubsystemTest.

+
+
+ +
+

==Transaction Handling

+
+
+

For transaction handling we AOP to add transaction control via annotations as aspect. +This is done by annotating your code with the @Transactional annotation. +You can either annotate your container bean at class level to make all methods transactional or your can annotate individual methods to make them transactional:

+
+
+
+
  @Transactional
+  public Output getData(Input input) {
+    ...
+  }
+
+
+
+
+
+
JTA Imports
+
+

Here are the import statements for transaction support:

+
+
+
+
import javax.transaction.Transactional;
+
+
+
+ + + + + +
+ + +Use the above import statement to follow JEE and avoid using org.springframework.transaction.annotation.Transactional. +
+
+
+
+
JTA Dependencies
+
+

Please note that with Jakarta EE the dependencies have changed. +When you want to start with Jakarta EE you should use these dependencies to get the annoations for dependency injection:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>jakarta.transaction</groupId>
+  <artifactId>jakarta.transaction-api</artifactId>
+</dependency>
+
+
+
+

Please note that with quarkus you will get them as transitive dependencies out of the box. +The above Jakarate EE dependencies replace these JEE depdencies:

+
+
+
+
<!-- Java Transaction API (JTA) -->
+<dependency>
+  <groupId>javax.transaction</groupId>
+  <artifactId>javax.transaction-api</artifactId>
+</dependency>
+
+
+
+
+
Handling constraint violations
+
+

Using @Transactional magically wraps transaction handling around your code. +As constraints are checked by the database at the end when the transaction gets committed, a constraint violation will be thrown by this aspect outside your code. +In case you have to handle constraint violations manually, you have to do that in code outside the logic that is annotated with @Transactional. +This may be done in a service operation by catching a ConstraintViolationException (org.hibernate.exception.ConstraintViolationException for hibernate). +As a generic approach you can solve this via REST execption handling.

+
+
+
+
Batches
+
+

Transaction control for batches is a lot more complicated and is described in the batch layer.

+
+
+ +
+

==SQL

+
+
+

For general guides on dealing or avoiding SQL, preventing SQL-injection, etc. you should study domain layer.

+
+
+
+
+

1.28. Naming Conventions

+
+

Here we define naming conventions that you should follow whenever you write SQL files:

+
+
+
    +
  • +

    All SQL-Keywords in UPPER CASE

    +
  • +
  • +

    Indentation should be 2 spaces as suggested by devonfw for every format.

    +
  • +
+
+
+
DDL
+
+

The naming conventions for database constructs (tables, columns, triggers, constraints, etc.) should be aligned with your database product and their operators. +However, when you have the freedom of choice and a modern case-sensitive database, you can simply use your code conventions also for database constructs to avoid explicitly mapping each and every property (e.g. RestaurantTable vs. RESTAURANT_TABLE).

+
+
+
    +
  • +

    Define columns and constraints inline in the statement to create the table

    +
  • +
  • +

    Indent column types so they all start in the same text column

    +
  • +
  • +

    Constraints should be named explicitly (to get a reasonable hint error messages) with:

    +
    +
      +
    • +

      PK_«table» for primary key (name optional here as PK constraint are fundamental)

      +
    • +
    • +

      FK_«table»_«property» for foreign keys («table» and «property» are both on the source where the foreign key is defined)

      +
    • +
    • +

      UC_«table»_«property»[_«propertyN»]* for unique constraints

      +
    • +
    • +

      CK_«table»_«check» for check constraints («check» describes the check, if it is defined on a single property it should start with the property).

      +
    • +
    +
    +
  • +
  • +

    Old RDBMS had hard limitations for names (e.g. 30 characters). Please note that recent databases have overcome this very low length limitations. However, keep your names short but precise and try to define common abbreviations in your project for according (business) terms. Especially do not just truncate the names at the limit.

    +
  • +
  • +

    If possible add comments on table and columns to help DBAs understanding your schema. This is also honored by many tools (not only DBA-tools).

    +
  • +
+
+
+

Here is a brief example of a DDL:

+
+
+
+
CREATE SEQUENCE HIBERNATE_SEQUENCE START WITH 1000000;
+
+-- *** Table ***
+CREATE TABLE RESTAURANT_TABLE (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  SEATS                INTEGER NOT NULL,
+  CONSTRAINT PK_TABLE PRIMARY KEY(ID)
+);
+COMMENT ON TABLE RESTAURANT_TABLE IS 'The physical tables inside the restaurant.';
+-- *** Order ***
+CREATE TABLE RESTAURANT_ORDER (
+  ID                   NUMBER(19) NOT NULL,
+  MODIFICATION_COUNTER INTEGER NOT NULL,
+  TABLE_ID             NUMBER(19) NOT NULL,
+  TOTAL                DECIMAL(5, 2) NOT NULL,
+  CREATION_DATE        TIMESTAMP NOT NULL,
+  PAYMENT_DATE         TIMESTAMP,
+  STATUS               VARCHAR2(10 CHAR) NOT NULL,
+  CONSTRAINT PK_ORDER PRIMARY KEY(ID),
+  CONSTRAINT FK_ORDER_TABLE_ID FOREIGN KEY(TABLE_ID) REFERENCES RESTAURANT_TABLE(ID)
+);
+COMMENT ON TABLE RESTAURANT_ORDER IS 'An order and bill at the restaurant.';
+...
+
+
+
+

ATTENTION: Please note that TABLE and ORDER are reserved keywords in SQL and you should avoid using such keywords to prevent problems.

+
+
+
+
Data
+
+

For insert, update, delete, etc. of data SQL scripts should additionally follow these guidelines:

+
+
+
    +
  • +

    Inserts always with the same order of columns in blocks for each table.

    +
  • +
  • +

    Insert column values always starting with ID, MODIFICATION_COUNTER, [DTYPE, ] …​

    +
  • +
  • +

    List columns with fixed length values (boolean, number, enums, etc.) before columns with free text to support alignment of multiple insert statements

    +
  • +
  • +

    Pro Tip: Get familiar with column mode of advanced editors such as notepad++ when editing large blocks of similar insert statements.

    +
  • +
+
+
+
+
INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (0, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (1, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (2, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (3, 1, 4);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (4, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (5, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (6, 1, 6);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (7, 1, 8);
+INSERT INTO RESTAURANT_TABLE(ID, MODIFICATION_COUNTER, SEATS) VALUES (8, 1, 8);
+...
+
+
+
+

See also Database Migrations.

+
+
+ +
+

==Database Migration

+
+
+

When you have a schema-based database, +you need a solution for schema versioning and migration for your database. +A specific release of your app requires a corresponding version of the schema in the database to run. +As you want simple and continuous deployment you should automate the schema versiong and database migration.

+
+
+

The general idea is that your software product contains "scripts" to migrate the database from schema version X to verion X+1. +When you begin your project you start with version 1 and with every increment of your app that needs a change to the database schema (e.g. a new table, a new column to an existing table, a new index, etc.) you add another "script" that migrates from the current to the next version. +For simplicity these versions are just sequential numbers or timestamps. +Now, the solution you choose will automatically manage the schema version in a separate metadata table in your database that stores the current schema version. +When your app is started, it will check the current version inside the database from that metadata table. +As long as there are "scripts" that migrate from there to a higher version, they will be automatically applied to the database and this process is protocolled to the metadata table in your database what also updates the current schema version there. +Using this approach, you can start with an empty database what will result in all "scripts" being applied sequentially. +Also any version of your database schema can be present and you will always end up in a controlled migration to the latest schema version.

+
+
+
+
+

1.29. Options for database migration

+
+

For database migration you can choose between the following options:

+
+
+
    +
  • +

    flyway (KISS based approach with migrations as SQL)

    +
  • +
  • +

    liquibase (more complex approach with database abstraction)

    +
  • +
+
+ +
+

==Flyway

+
+
+

Flyway is a tool for database migration and schema versioning. +See why for a motivation why using flyway.

+
+
+

Flyway can be used standalone e.g. via flyway-maven-plugin or can be integrated directly into your app to make sure the database migration takes place on startup. +For simplicity we recommend to integrate flyway into your app. +However, you need to be aware that therefore your app needs database access with full schema owner permissions.

+
+
+
Organizational Advice
+
+

A few considerations with respect to project organization will help to implement maintainable Flyway migrations.

+
+
+

At first, testing and production environments must be clearly and consistently distinguished. Use the following directory structure to achieve this distinction:

+
+
+
+
  src/main/resources/db
+  src/test/resources/db
+
+
+
+

Although this structure introduces redundancies, the benefit outweighs this disadvantage. +An even more fine-grained production directory structure which contains one sub folder per release should be implemented:

+
+
+
+
  src/main/resources/db/migration/releases/X.Y/x.sql
+
+
+
+

Emphasizing that migration scripts below the current version must never be changed will aid the second advantage of migrations: it will always be clearly reproducible in which state the database currently is. +Here, it is important to mention that, if test data is required, it must be managed separately from the migration data in the following directory:

+
+
+
+
  src/test/resources/db/migration/
+
+
+
+

The migration directory is added to aid easy usage of Flyway defaults. +Of course, test data should also be managed per release as like production data.

+
+
+

With regard to content, separation of concerns (SoC) is an important goal. SoC can be achieved by distinguishing and writing multiple scripts with respect to business components/use cases (or database tables in case of large volumes of master data [1]. Comprehensible file names aid this separation.

+
+
+

It is important to have clear responsibilities regarding the database, the persistence layer (JPA), and migrations. Therefore a dedicated database expert should be in charge of any migrations performed or she should at least be informed before any change to any of the mentioned parts is applied.

+
+
+
+
Technical Configuration
+
+

Database migrations can be SQL based or Java based.

+
+
+

To enable auto migration on startup (not recommended for productive environment) set the following property in the application.properties file for an environment.

+
+
+
+
flyway.enabled=true
+flyway.clean-on-validation-error=false
+
+
+
+

For development environment it is helpful to set both properties to true in order to simplify development. For regular environments flyway.clean-on-validation-error should be false.

+
+
+

If you want to use Flyway set the following property in any case to prevent Hibernate from doing changes on the database (pre-configured by default in devonfw):

+
+
+
+
spring.jpa.hibernate.ddl-auto=validate
+
+
+
+

The setting must be communicated to and coordinated with the customer and their needs. +In acceptance testing the same configuration as for the production environment should be enabled.

+
+
+

Since migration scripts will also be versioned the end-of-line (EOL) style must be fixated according to this issue. This is however solved in flyway 4.0+ and the latest devonfw release. +Also, the version numbers of migration scripts should not consist of simple ascending integer numbers like V0001…​, V0002…​, …​ This naming may lead to problems when merging branches. Instead the usage of timestamps as version numbers will help to avoid such problems.

+
+
+
+
Naming Conventions
+
+

Database migrations should follow this naming convention: +V<version>__<description> (e.g.: V12345__Add_new_table.sql).

+
+
+

It is also possible to use Flyway for test data. To do so place your test data migrations in src/main/resources/db/testdata/ and set property

+
+
+
+
flyway.locations=classpath:db/migration/releases,classpath:db/migration/testdata
+
+
+
+

Then Flyway scans the additional location for migrations and applies all in the order specified by their version. If migrations V0001__... and V0002__... exist and a test data migration should be applied in between you can name it V0001_1__....

+
+ +
+

==Liquibase

+
+ +
+

See devon4j#303 for details and status.

+
+
+
+
Spring-boot usage
+
+

For using liquibase in spring see Using Liquibase with Spring Boot.

+
+
+
+
Quarkus usage
+
+

For uisng liquibase in quarkus see Using Liquibase.

+
+
+ +
+

==REST +REST (REpresentational State Transfer) is an inter-operable protocol for services that is more lightweight than SOAP. +However, it is no real standard and can cause confusion (see REST philosophy). +Therefore we define best practices here to guide you.

+
+
+
+
+

1.30. URLs

+
+

URLs are not case sensitive. Hence, we follow the best practice to use only lower-case-letters-with-hyphen-to-separate-words. +For operations in REST we distinguish the following types of URLs:

+
+
+
    +
  • +

    A collection URL is build from the rest service URL by appending the name of a collection. This is typically the name of an entity. Such URL identifies the entire collection of all elements of this type. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity

    +
  • +
  • +

    An element URL is build from a collection URL by appending an element ID. It identifies a single element (entity) within the collection. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/42

    +
  • +
+
+
+

To follow KISS avoid using plural forms (…​/productmanagement/v1/products vs. …​/productmanagement/v1/product/42). Always use singular forms and avoid confusions (except for the rare cases where no singular exists).

+
+
+

The REST URL scheme fits perfect for CRUD operations. +For business operations (processing, calculation, advanced search, etc.) we simply append a collection URL with the name of the business operation. +Then we can POST the input for the business operation and get the result back. Example: https://mydomain.com/myapp/services/rest/mycomponent/v1/myentity/search

+
+
+
+

1.31. HTTP Methods

+
+

The following table defines the HTTP methods (verbs) and their meaning:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3. Usage of HTTP methods
HTTP MethodMeaning

GET

Read data (stateless).

PUT

Create or update data.

POST

Process data.

DELETE

Delete an entity.

+
+

Please also note that for (large) bulk deletions you may be forced to used POST instead of DELETE as according to the HTTP standard DELETE must not have payload and URLs are limited in length.

+
+
+

For general recommendations on HTTP methods for collection and element URLs see REST@wikipedia.

+
+
+
+

1.32. HTTP Status Codes

+
+

Further we define how to use the HTTP status codes for REST services properly. In general the 4xx codes correspond to an error on the client side and the 5xx codes to an error on the server side.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4. Usage of HTTP status codes
HTTP CodeMeaningResponseComment

200

OK

requested result

Result of successful GET

204

No Content

none

Result of successful POST, DELETE, or PUT with empty result (void return)

400

Bad Request

error details

The HTTP request is invalid (parse error, validation failed)

401

Unauthorized

none

Authentication failed

403

Forbidden

none

Authorization failed

404

Not found

none

Either the service URL is wrong or the requested resource does not exist

500

Server Error

error code, UUID

Internal server error occurred, in case of an exception, see REST exception handling

+
+
+

1.33. JAX-RS

+
+

For implementing REST services we use the JAX-RS standard. +As payload encoding we recommend JSON bindings using Jackson. +To implement a REST service you simply add JAX-RS annotations. +Here is a simple example:

+
+
+
+
@ApplicationScoped
+@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class ImagemanagementRestService {
+
+  @Inject
+  private Imagemanagement imagemanagement;
+
+  @GET
+  @Path("/image/{id}/")
+  public ImageDto getImage(@PathParam("id") long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+}
+
+
+
+

Here we can see a REST service for the business component imagemanagement. The method getImage can be accessed via HTTP GET (see @GET) under the URL path imagemanagement/image/{id} (see @Path annotations) where {id} is the ID of the requested table and will be extracted from the URL and provided as parameter id to the method getImage. It will return its result (ImageDto) as JSON (see @Produces annotation - you can also extend RestService marker interface that defines these annotations for JSON). As you can see it delegates to the logic component imagemanagement that contains the actual business logic while the service itself only exposes this logic via HTTP. The REST service implementation is a regular CDI bean that can use dependency injection.

+
+
+ + + + + +
+ + +With JAX-RS it is important to make sure that each service method is annotated with the proper HTTP method (@GET,@POST,etc.) to avoid unnecessary debugging. So you should take care not to forget to specify one of these annotations. +
+
+
+
Service-Interface
+
+

You may also separate API and implementation in case you want to reuse the API for service-client:

+
+
+
+
@Path("/imagemanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface ImagemanagementRestService {
+
+  @GET
+  @Path("/image/{id}/")
+  ImageEto getImage(@PathParam("id") long id);
+
+}
+
+@Named("ImagemanagementRestService")
+public class ImagemanagementRestServiceImpl implements ImagemanagementRestService {
+
+  @Override
+  public ImageEto getImage(long id) {
+
+    return this.imagemanagement.findImage(id);
+  }
+
+}
+
+
+
+
+
JAX-RS Configuration
+
+

Starting from CXF 3.0.0 it is possible to enable the auto-discovery of JAX-RS roots.

+
+
+

When the JAX-RS server is instantiated, all the scanned root and provider beans (beans annotated with javax.ws.rs.Path and javax.ws.rs.ext.Provider) are configured.

+
+
+
+
REST Exception Handling
+
+

For exceptions, a service needs to have an exception facade that catches all exceptions and handles them by writing proper log messages and mapping them to a HTTP response with an corresponding HTTP status code. +For this, devon4j provides a generic solution via RestServiceExceptionFacade that you can use within your Spring applications. You need to follow the exception guide in order for it to work out of the box because the facade needs to be able to distinguish between business and technical exceptions. +To implement a generic exception facade in Quarkus, follow the Quarkus exception guide.

+
+
+

Now your service may throw exceptions, but the facade will automatically handle them for you.

+
+
+

The general format for returning an error to the client is as follows:

+
+
+
+
{
+  "message": "A human-readable message describing the error",
+  "code": "A code identifying the concrete error",
+  "uuid": "An identifier (generally the correlation id) to help identify corresponding requests in logs"
+}
+
+
+
+
+
Pagination details
+
+

We recommend to use spring-data repositories for database access that already comes with pagination support. +Therefore, when performing a search, you can include a Pageable object. +Here is a JSON example for it:

+
+
+
+
{ "pageSize": 20, "pageNumber": 0, "sort": [] }
+
+
+
+

By increasing the pageNumber the client can browse and page through the hits.

+
+
+

As a result you will receive a Page. +It is a container for your search results just like a Collection but additionally contains pagination information for the client. +Here is a JSON example:

+
+
+
+
{ "totalElements": 1022,
+  pageable: { "pageSize": 20, "pageNumber": 0 },
+  content: [ ... ] }
+
+
+
+

The totalElements property contains the total number of hits. +This can be used by the client to compute the total number of pages and render the pagination links accordingly. +Via the pageable property the client gets back the Pageable properties from the search request. +The actual hits for the current page are returned as array in the content property.

+
+
+
+
+

1.34. REST Testing

+
+

For testing REST services in general consult the testing guide.

+
+
+

For manual testing REST services there are browser plugins:

+
+
+ +
+
+
+

1.35. Security

+
+

Your services are the major entry point to your application. Hence security considerations are important here.

+
+
+
CSRF
+
+

A common security threat is CSRF for REST services. Therefore all REST operations that are performing modifications (PUT, POST, DELETE, etc. - all except GET) have to be secured against CSRF attacks. See CSRF how to do this.

+
+
+
+
JSON top-level arrays
+
+

OWASP earlier suggested to never return JSON arrays at the top-level, to prevent attacks without rationale. +We digged deep and found anatomy-of-a-subtle-json-vulnerability. +To sum it up the attack is many years old and does not work in any recent or relevant browser. +Hence it is fine to use arrays as top-level result in a JSON REST service (means you can return List<Foo> in a Java JAX-RS service).

+
+
+ +
+

==JSON

+
+
+

JSON (JavaScript Object Notation) is a popular format to represent and exchange data especially for modern web-clients. For mapping Java objects to JSON and vice-versa there is no official standard API. We use the established and powerful open-source solution Jackson. +Due to problems with the wiki of fasterxml you should try this alternative link: Jackson/AltLink.

+
+
+
+
+

1.36. Configure JSON Mapping

+
+

In order to avoid polluting business objects with proprietary Jackson annotations (e.g. @JsonTypeInfo, @JsonSubTypes, @JsonProperty) we propose to create a separate configuration class. Every devonfw application (sample or any app created from our app-template) therefore has a class called ApplicationObjectMapperFactory that extends ObjectMapperFactory from the devon4j-rest module. It looks like this:

+
+
+
+
@Named("ApplicationObjectMapperFactory")
+public class ApplicationObjectMapperFactory extends ObjectMapperFactory {
+
+  public RestaurantObjectMapperFactory() {
+    super();
+    // JSON configuration code goes here
+  }
+}
+
+
+
+
+

1.37. JSON and Inheritance

+
+

If you are using inheritance for your objects mapped to JSON then polymorphism can not be supported out-of-the box. So in general avoid polymorphic objects in JSON mapping. However, this is not always possible. +Have a look at the following example from our sample application:

+
+
+
+inheritance class diagram +
+
Figure 6. Transfer-Objects using Inheritance
+
+
+

Now assume you have a REST service operation as Java method that takes a ProductEto as argument. As this is an abstract class the server needs to know the actual sub-class to instantiate. +We typically do not want to specify the classname in the JSON as this should be an implementation detail and not part of the public JSON format (e.g. in case of a service interface). Therefore we use a symbolic name for each polymorphic subtype that is provided as virtual attribute @type within the JSON data of the object:

+
+
+
+
{ "@type": "Drink", ... }
+
+
+
+

Therefore you add configuration code to the constructor of ApplicationObjectMapperFactory. Here you can see an example from the sample application:

+
+
+
+
setBaseClasses(ProductEto.class);
+addSubtypes(new NamedType(MealEto.class, "Meal"), new NamedType(DrinkEto.class, "Drink"),
+  new NamedType(SideDishEto.class, "SideDish"));
+
+
+
+

We use setBaseClasses to register all top-level classes of polymorphic objects. Further we declare all concrete polymorphic sub-classes together with their symbolic name for the JSON format via addSubtypes.

+
+
+
+

1.38. Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create objects dedicated for the JSON mapping you can easily avoid such situations. When this is not suitable follow these instructions to define the mapping:

+
+
+
    +
  1. +

    As an example, the use of JSR354 (javax.money) is appreciated in order to process monetary amounts properly. However, without custom mapping, the default mapping of Jackson will produce the following JSON for a MonetaryAmount:

    +
    +
    +
    "currency": {"defaultFractionDigits":2, "numericCode":978, "currencyCode":"EUR"},
    +"monetaryContext": {...},
    +"number":6.99,
    +"factory": {...}
    +
    +
    +
    +

    As clearly can be seen, the JSON contains too much information and reveals implementation secrets that do not belong here. Instead the JSON output expected and desired would be:

    +
    +
    +
    +
    "currency":"EUR","amount":"6.99"
    +
    +
    +
    +

    Even worse, when we send the JSON data to the server, Jackson will see that MonetaryAmount is an interface and does not know how to instantiate it so the request will fail. +Therefore we need a customized Serializer.

    +
    +
  2. +
  3. +

    We implement MonetaryAmountJsonSerializer to define how a MonetaryAmount is serialized to JSON:

    +
    +
    +
    public final class MonetaryAmountJsonSerializer extends JsonSerializer<MonetaryAmount> {
    +
    +  public static final String NUMBER = "amount";
    +  public static final String CURRENCY = "currency";
    +
    +  public void serialize(MonetaryAmount value, JsonGenerator jgen, SerializerProvider provider) throws ... {
    +    if (value != null) {
    +      jgen.writeStartObject();
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.CURRENCY);
    +      jgen.writeString(value.getCurrency().getCurrencyCode());
    +      jgen.writeFieldName(MonetaryAmountJsonSerializer.NUMBER);
    +      jgen.writeString(value.getNumber().toString());
    +      jgen.writeEndObject();
    +    }
    +  }
    +
    +
    +
    +

    For composite datatypes it is important to wrap the info as an object (writeStartObject() and writeEndObject()). MonetaryAmount provides the information we need by the getCurrency() and getNumber(). So that we can easily write them into the JSON data.

    +
    +
  4. +
  5. +

    Next, we implement MonetaryAmountJsonDeserializer to define how a MonetaryAmount is deserialized back as Java object from JSON:

    +
    +
    +
    public final class MonetaryAmountJsonDeserializer extends AbstractJsonDeserializer<MonetaryAmount> {
    +  protected MonetaryAmount deserializeNode(JsonNode node) {
    +    BigDecimal number = getRequiredValue(node, MonetaryAmountJsonSerializer.NUMBER, BigDecimal.class);
    +    String currencyCode = getRequiredValue(node, MonetaryAmountJsonSerializer.CURRENCY, String.class);
    +    MonetaryAmount monetaryAmount =
    +        MonetaryAmounts.getAmountFactory().setNumber(number).setCurrency(currencyCode).create();
    +    return monetaryAmount;
    +  }
    +}
    +
    +
    +
    +

    For composite datatypes we extend from AbstractJsonDeserializer as this makes our task easier. So we already get a JsonNode with the parsed payload of our datatype. Based on this API it is easy to retrieve individual fields from the payload without taking care of their order, etc. +AbstractJsonDeserializer also provides methods such as getRequiredValue to read required fields and get them converted to the desired basis datatype. So we can easily read the amount and currency and construct an instance of MonetaryAmount via the official factory API.

    +
    +
  6. +
  7. +

    Finally we need to register our custom (de)serializers with the following configuration code in the constructor of ApplicationObjectMapperFactory:+

    +
  8. +
+
+
+
+
  SimpleModule module = getExtensionModule();
+  module.addDeserializer(MonetaryAmount.class, new MonetaryAmountJsonDeserializer());
+  module.addSerializer(MonetaryAmount.class, new MonetaryAmountJsonSerializer());
+
+
+
+

Now we can read and write MonetaryAmount from and to JSON as expected.

+
+
+ +
+

==XML

+
+
+

XML (Extensible Markup Language) is a W3C standard format for structured information. It has a large eco-system of additional standards and tools.

+
+
+

In Java there are many different APIs and frameworks for accessing, producing and processing XML. For the devonfw we recommend to use JAXB for mapping Java objects to XML and vice-versa. Further there is the popular DOM API for reading and writing smaller XML documents directly. When processing large XML documents StAX is the right choice.

+
+
+
+

1.39. JAXB

+
+

We use JAXB to serialize Java objects to XML or vice-versa.

+
+
+
JAXB and Inheritance
+
+

Use @XmlSeeAlso annotation to provide sub-classes. +See section "Collective Polymorphism" described here.

+
+
+
+
JAXB Custom Mapping
+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to define a custom mapping. If you create dedicated objects for the XML mapping you can easily avoid such situations. When this is not suitable use @XmlJavaTypeAdapter and provide an XmlAdapter implementation that handles the mapping. +For details see here.

+
+
+
+
+

1.40. Security

+
+

To prevent XML External Entity attacks, follow JAXP Security Guide and enable FSP.

+
+
+ +
+

==SOAP +SOAP is a common protocol for services that is rather complex and heavy. It allows to build inter-operable and well specified services (see WSDL). SOAP is transport neutral what is not only an advantage. We strongly recommend to use HTTPS transport and ignore additional complex standards like WS-Security and use established HTTP-Standards such as RFC2617 (and RFC5280).

+
+
+
+

1.41. JAX-WS

+
+

For building web-services with Java we use the JAX-WS standard. +There are two approaches:

+
+
+
    +
  • +

    code first

    +
  • +
  • +

    contract first

    +
  • +
+
+
+

Here is an example in case you define a code-first service.

+
+
+
Web-Service Interface
+
+

We define a regular interface to define the API of the service and annotate it with JAX-WS annotations:

+
+
+
+
@WebService
+public interface TablemanagmentWebService {
+
+  @WebMethod
+  @WebResult(name = "message")
+  TableEto getTable(@WebParam(name = "id") String id);
+
+}
+
+
+
+
+
Web-Service Implementation
+
+

And here is a simple implementation of the service:

+
+
+
+
@Named
+@WebService(endpointInterface = "com.devonfw.application.mtsj.tablemanagement.service.api.ws.TablemanagmentWebService")
+public class TablemanagementWebServiceImpl implements TablemanagmentWebService {
+
+  private Tablemanagement tableManagement;
+
+  @Override
+  public TableEto getTable(String id) {
+
+    return this.tableManagement.findTable(id);
+  }
+
+
+
+
+
+

1.42. SOAP Custom Mapping

+
+

In order to map custom datatypes or other types that do not follow the Java bean conventions, you need to write adapters for JAXB (see XML).

+
+
+
+

1.43. SOAP Testing

+
+

For testing SOAP services in general consult the testing guide.

+
+
+

For testing SOAP services manually we strongly recommend SoapUI.

+
+
+ +
+

==Logging

+
+
+

We recommend to use SLF4J as API for logging, that has become a de facto standard in Java as it has a much better design than java.util.logging offered by the JDK. +There are serveral implementations for SLF4J. For Spring applications our recommended implementation is Logback. Quarkus uses JBoss Logging which provides a JBoss Log Manager implementation for SLF4J. For more information on logging in Quarkus, see the Quarkus logging guide.

+
+
+
+

1.44. Logging Dependencies

+
+

To use Logback in your Spring application, you need to include the following dependencies:

+
+
+
+
<!-- SLF4J as logging API -->
+<dependency>
+  <groupId>org.slf4j</groupId>
+  <artifactId>slf4j-api</artifactId>
+</dependency>
+<!-- Logback as logging implementation  -->
+<dependency>
+  <groupId>ch.qos.logback</groupId>
+  <artifactId>logback-classic</artifactId>
+</dependency>
+<!-- JSON logging for cloud-native log monitoring -->
+<dependency>
+  <groupId>net.logstash.logback</groupId>
+  <artifactId>logstash-logback-encoder</artifactId>
+</dependency>
+
+
+
+

In devon4j these dependencies are provided by the devon4j-logging module.

+
+
+

In Quarkus, SLF4J and the slf4j-jboss-logmanager are directly included in the Quarkus core runtime and can be used out of the box.

+
+
+
+

1.45. Logger Access

+
+

The general pattern for accessing loggers from your code is a static logger instance per class using the following pattern:

+
+
+
+
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MyClass {
+  private static final Logger LOG = LoggerFactory.getLogger(MyClass.class);
+  ...
+}
+
+
+
+

For detailed documentation how to use the logger API check the SLF4j manual.

+
+
+ + + + + +
+ + +In case you are using devonfw-ide and Eclipse you can just type LOG and hit [ctrl][space] to insert the code pattern including the imports into your class. +
+
+
+
Lombok
+
+

In case you are using Lombok, you can simply use the @Slf4j annotation in your class. This causes Lombok to generate the logger instance for you.

+
+
+
+
+

1.46. Log-Levels

+
+

We use a common understanding of the log-levels as illustrated by the following table. +This helps for better maintenance and operation of the systems.

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5. Log-levels
Log-levelDescriptionImpactActive Environments

FATAL

Only used for fatal errors that prevent the application to work at all (e.g. startup fails or shutdown/restart required)

Operator has to react immediately

all

ERROR

An abnormal error indicating that the processing failed due to technical problems.

Operator should check for known issue and otherwise inform development

all

WARNING

A situation where something worked not as expected. E.g. a business exception or user validation failure occurred.

No direct reaction required. Used for problem analysis.

all

INFO

Important information such as context, duration, success/failure of request or process

No direct reaction required. Used for analysis.

all

DEBUG

Development information that provides additional context for debugging problems.

No direct reaction required. Used for analysis.

development and testing

TRACE

Like DEBUG but exhaustive information and for code that is run very frequently. Will typically cause large log-files.

No direct reaction required. Used for problem analysis.

none (turned off by default)

+
+

Exceptions (with their stack trace) should only be logged on FATAL or ERROR level. For business exceptions typically a WARNING including the message of the exception is sufficient.

+
+
+
Configuration of Logback
+
+

The configuration of logback happens via the logback.xml file that you should place into src/main/resources of your app. +For details consult the logback configuration manual.

+
+
+ + + + + +
+ + +Logback also allows to overrule the configuration with a logback-test.xml file that you may put into src/test/resources or into a test-dependency. +
+
+
+
+
Configuration in Quarkus
+
+

The are several options you can set in the application.properties file to configure the behaviour of the logger in Quarkus. For a detailed overview, see the corresponding part of the Quarkus guide.

+
+
+
+
+

1.47. JSON-logging

+
+

For easy integration with log-monitoring, we recommend that your app logs to standard out in JSON following JSON Lines.

+
+
+

In Spring applications, this can be achieved via logstash-logback-encoder (see dependencies). In Quarkus, it can be easily achieved using the quarkus-logging-json extension (see here for more details).

+
+
+

This will produce log-lines with the following format (example formatted for readability):

+
+
+
+
{
+  "timestamp":"2000-12-31T23:59:59.999+00:00",
+  "@version":"1",
+  "message":"Processing 4 order(s) for shipment",
+  "logger_name":"com.myapp.order.logic.UcManageOrder",
+  "thread_name":"http-nio-8081-exec-6",
+  "level":"INFO",
+  "level_value":20000,
+  "appname":"myapp",
+}
+
+
+
+
Adding custom values to JSON log with Logstash
+
+

The JSON encoder even supports logging custom properties for your log-monitoring. +The trick is to use the class net.logstash.logback.argument.StructuredArguments for adding the arguments to you log message, e.g.

+
+
+
+
import static net.logstash.logback.argument.StructuredArguments.v;
+
+...
+    LOG.info("Request with {} and {} took {} ms.", v("url", url), v("status", statusCode), v("duration", millis));
+...
+
+
+
+

This will produce the a JSON log-line with the following properties:

+
+
+
+
...
+  "message":"Request with url=https://api/service/v1/ordermanagement/order and status=200 took duration=251 ms",
+  "url":"https://api/service/v1/ordermanagement/order",
+  "status":"200",
+  "duration":"251",
+...
+
+
+
+

As you can quickly see besides the human readable message you also have the structured properties url, status and duration that can be extremly valuable to configure dashboards in your log-monitoring that visualize success/failure ratio as well as performance of your requests.

+
+
+
+
+

1.48. Classic log-files

+
+ + + + + +
+ + +In devon4j, we strongly recommend using JSON logging instead of classic log files. The following section refers only to devon4j Spring applications that use Logback. +
+
+
+

Even though we do not recommend anymore to write classical log-files to the local disc, here you can still find our approach for it.

+
+
+
Maven-Integration
+
+

In the pom.xml of your application add this dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java</groupId>
+  <artifactId>devon4j-logging</artifactId>
+</dependency>
+
+
+
+

The above dependency already adds transitive dependencies to SLF4J and logback. +Also it comes with configration snipplets that can be included from your logback.xml file (see configuration).

+
+
+

The logback.xml to write regular log-files can look as following:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+  <property resource="com/devonfw/logging/logback/application-logging.properties" />
+  <property name="appname" value="MyApp"/>
+  <property name="logPath" value="../logs"/>
+  <include resource="com/devonfw/logging/logback/appenders-file-all.xml" />
+  <include resource="com/devonfw/logging/logback/appender-console.xml" />
+
+  <root level="DEBUG">
+    <appender-ref ref="ERROR_APPENDER"/>
+    <appender-ref ref="INFO_APPENDER"/>
+    <appender-ref ref="DEBUG_APPENDER"/>
+    <appender-ref ref="CONSOLE_APPENDER"/>
+  </root>
+
+  <logger name="org.springframework" level="INFO"/>
+</configuration>
+
+
+
+

The provided logback.xml is configured to use variables defined on the config/application.properties file. +On our example, the log files path point to ../logs/ in order to log to tomcat log directory when starting tomcat on the bin folder. +Change it according to your custom needs.

+
+
+
Listing 13. config/application.properties
+
+
log.dir=../logs/
+
+
+
+
+
Log Files
+
+

The classical approach uses the following log files:

+
+
+
    +
  • +

    Error Log: Includes log entries to detect errors.

    +
  • +
  • +

    Info Log: Used to analyze system status and to detect bottlenecks.

    +
  • +
  • +

    Debug Log: Detailed information for error detection.

    +
  • +
+
+
+

The log file name pattern is as follows:

+
+
+
+
«LOGTYPE»_log_«HOST»_«APPLICATION»_«TIMESTAMP».log
+
+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 6. Segments of Logfilename
ElementValueDescription

«LOGTYPE»

info, error, debug

Type of log file

«HOST»

e.g. mywebserver01

Name of server, where logs are generated

«APPLICATION»

e.g. myapp

Name of application, which causes logs

«TIMESTAMP»

YYYY-MM-DD_HH00

date of log file

+
+

Example: +error_log_mywebserver01_myapp_2013-09-16_0900.log

+
+
+

Error log from mywebserver01 at application myapp at 16th September 2013 9pm.

+
+
+
+
Output format
+
+

We use the following output format for all log entries to ensure that searching and filtering of log entries work consistent for all logfiles:

+
+
+
+
[D: «timestamp»] [P: «priority»] [C: «NDC»][T: «thread»][L: «logger»]-[M: «message»]
+
+
+
+
    +
  • +

    D: Date (Timestamp in ISO8601 format e.g. 2013-09-05 16:40:36,464)

    +
  • +
  • +

    P: Priority (the log level)

    +
  • +
  • +

    C: Correlation ID (ID to identify users across multiple systems, needed when application is distributed)

    +
  • +
  • +

    T: Thread (Name of thread)

    +
  • +
  • +

    L: Logger name (use class name)

    +
  • +
  • +

    M: Message (log message)

    +
  • +
+
+
+

Example:

+
+
+
+
[D: 2013-09-05 16:40:36,464] [P: DEBUG] [C: 12345] [T: main] [L: my.package.MyClass]-[M: My message...]
+
+
+
+ + + + + +
+ + +When using devon4j-logging, this format is used by default. To achieve this format in Quarkus, set quarkus.log.console.format=[D: %d] [P: %p] [C: %X] [T: %t] [L: %c] [M: %m]%n in your properties. +
+
+
+
+
Correlation ID
+
+

In order to correlate separate HTTP requests to services belonging to the same user / session, we provide a servlet filter called DiagnosticContextFilter. +This filter takes a provided correlation ID from the HTTP header X-Correlation-Id. +If none was found, it will generate a new correlation id as UUID. +This correlation ID is added as MDC to the logger. +Therefore, it will then be included to any log message of the current request (thread). +Further concepts such as service invocations will pass this correlation ID to subsequent calls in the application landscape. Hence you can find all log messages related to an initial request simply via the correlation ID even in highly distributed systems.

+
+
+
+
Security
+
+

In order to prevent log forging attacks you can simply use the suggested JSON logging format. +Otherwise you can use com.devonfw.module.logging.common.impl.SingleLinePatternLayout as demonstrated here in order to prevent such attacks.

+
+
+ +
+

==Monitoring

+
+
+

For monitoring a complex application landscape it is crucial to have an exact overview which applications are up and running and which are not and why. +In devonfw we only focus on topics which are most important when developing production-ready applications. +On a high level view we strongly suggest to separate the application to be monitored from the monitoring system itself. +Therefore, your application should concentrate on providing app specific data for the monitoring. +Aspects such as aggregation, visualization, search, alerting, etc. should be addressed outside of your app by a monitoring system product. +There are many products providing such a monitoring system like checkmk, icinga, SkyWalking, etc. +Please note that there is a huge list of such products and devonfw is not biased or aims to make a choice for you. +Instead please search and find the products that fit best for your requirements and infrastructure.

+
+
+
+
+

1.49. Types of monitoring

+
+

As monitoring coveres a lot of different aspects we separate the following types of monitoring and according data:

+
+
+
    +
  • +

    Log-monitoring
    +is about collecting and monitoring the logs of all apps and containers in your IT landscape. It is suitable for events such as an HTTP request with its URL, resulting status code and duration in milliseconds. Your monitoring may not react to such data in realtime. Instead it may take a delay of one or a few seconds.

    +
  • +
  • +

    Infrastructure monitoring
    +is about monitoring the (hardware) infrastructure with measures like usage of CPU, memory, disc-space, etc. This is a pure operational task and your app should have nothing to do with this. In other words it is a waste if your app tries to monitor these aspects as existing products can do this much better and your app will only see virtual machines and is unable to see the physical infrastructure.

    +
  • +
  • +

    Health check
    +is about providing internal data about the current health of your app. Typically you provide sensors with health status per component or interface to neighbour service (database connectivity, etc.).

    +
  • +
  • +

    Application Performance Monitoring
    +is about measuring performance and tracing down performance issues.

    +
  • +
+
+
+
+

1.50. Health-Check

+
+

The idea of a health check is to prodvide monitoring data about the current health status of your application. +This allows to integrate this specific data into the monitoring system used for your IT landscape. +In order to keep the monitoring simple and easy to integreate consider using the following best practices:

+
+
+
    +
  • +

    Use simple and established protocols such as REST instead of JMX via RMI.

    +
  • +
  • +

    Considuer using recent standards such as microprofile-health.

    +
  • +
  • +

    Consider to drop access-control for your monitoring interfaces and for security prevent external access to it in your infrastructure (loadbalancers or gateways). Monitoring is only for usage within an IT landscape internally. It does not make sense for externals and end-users to access your app for reading monitoring data from a random node decided by a loadbalancer. Furhter, external access can easily lead to sensitive data exposure.

    +
  • +
  • +

    Consider to define different end-points per usage-scenario. So if you want the loadbalancer to ask your app monitoring for availability of each node then create a separate service URL that only provides OK or anything else for failure (NOK, 404, 500, timeout). Do not mix this with a health-check that needs more detailed information.

    +
  • +
  • +

    Also do not forget about basic features such as prodiving the name and the release version of your application.

    +
  • +
  • +

    Be careful to automate decisions based on monitoring and health checks. It easily turns out to be stupid if you automatically restart your pod or container because of some monitoring indicator. In the worst case a failure of a central component will cause your health-check to report down for all apps and as a result all your containers will be restarted frequently. Indead of curing problems such decisions will cause much more harm and trouble.

    +
  • +
  • +

    Avoid causing reasonable load with your monitoring and health-check itself. In many cases it is better to use log-monitoring or to collect monitoring data from use-cases that happen in your app anyway. If you create dummy read and write requests in your monitoring implementation you will easily turn it into a DOS-attack.

    +
  • +
+
+
+

For spring you can simply integrate app monitoring and health check via spring-boot-actuator.

+
+
+

For quarkus you can simply integrate app monitoring via micrometer or smallrye-metrics and health check via smallrye-health.

+
+ +
+

==Log-Monitoring

+
+
+

Log-monitoring is an aspect of monitoring with a strict focus on logging. +With trends towards IT landscapes with many but much smaller apps the classicial approach to write log-files to the disc and let operators read those via SSH became entirely obsolete. +Nowadays we have up to hundreds or even thousands of apps that themselves are clustered into multiple nodes. +Therefore you should establish a centralized log monitoring system in the environment and let all your nodes log directly into that system. +This approach gives the following benefits:

+
+
+
    +
  • +

    all log information available in one place

    +
  • +
  • +

    full-text search accross all logfiles

    +
  • +
  • +

    ability to automatically trigger alerts from specific log patterns

    +
  • +
  • +

    ability to do data-mining on logs and visualize in dashboards

    +
  • +
+
+
+
+

1.51. Options for log-monitoring

+
+

Typical products for such a log monitoring system are:

+
+
+ +
+
+

In devonfw we are not biased for any of these products. Therefore, feel free to make your choice according to the requirements of your project.

+
+
+

For Quarkus applications, you can get an insight into the topic by reading the guide about centralized log management.

+
+
+
+

1.52. API for log-monitoring

+
+

The "API" for logging to a log-monitoring system for your app is pretty simple:

+
+
+
    +
  • +

    Write your logs to standard out.

    +
  • +
  • +

    Use JSON logging as format.

    +
  • +
+
+
+

Then the container infrastructure can automatically collect your logs from standard out and directly feed those into the log monitoring system. +As a result, your app does not need to know anything about your log monitoring system and logging becomes most simple. +Further, if you do not write log-files anymore, you might not need to write any other files and therefore may not even need write permissions on the filesystem of your container. +In such case an attacker who may find a vulnerability in your app will have less attack surface in case he can not write any file.

+
+ +
+

==Application Performance Management

+
+
+

This guide gives hints how to manage, monitor and analyse performance of Java applications.

+
+
+
+

1.53. Temporary Analysis

+
+

If you are facing performance issues and want to do a punctual analysis we recommend you to use glowroot. It is ideal in cases where monitoring in your local development environment is suitable. However, it is also possible to use it in your test environment. It is entirely free and open-source. Still it is very powerful and helps to trace down bottlenecks. To get a first impression of the tool take a look at the demo.

+
+
+
JEE/WTP
+
+

In case you are forced to use an JEE application server and want to do a temporary analysis you can double click your server instance from the servers view in Eclipse and click on the link Open launch configuration in order to add the -javaagent JVM option.

+
+
+
+
+

1.54. Regular Analysis

+
+

In case you want to manage application performance regularly we recommend to use JavaMelody that can be integrated into your application. More information on javamelody is available on the JavaMelody Wiki

+
+
+
+

1.55. Alternatives

+
+ +
+
+ +
+

==Security +Security is todays most important cross-cutting concern of an application and an enterprise IT-landscape. We seriously care about security and give you detailed guides to prevent pitfalls, vulnerabilities, and other disasters. While many mistakes can be avoided by following our guidelines you still have to consider security and think about it in your design and implementation. The security guide will not only automatically prevent you from any harm, but will provide you hints and best practices already used in different software products.

+
+
+

An important aspect of security is proper authentication and authorization as described in access-control. In the following we discuss about potential vulnerabilities and protection to prevent them.

+
+
+
+

1.56. Vulnerabilities and Protection

+
+

Independent from classical authentication and authorization mechanisms there are many common pitfalls that can lead to vulnerabilities and security issues in your application such as XSS, CSRF, SQL-injection, log-forging, etc. A good source of information about this is the OWASP. +We address these common threats individually in security sections of our technological guides as a concrete solution to prevent an attack typically depends on the according technology. The following table illustrates common threats and contains links to the solutions and protection-mechanisms provided by the devonfw:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 7. Security threats and protection-mechanisms
ThreatProtectionLink to details

A1 Injection

validate input, escape output, use proper frameworks

SQL Injection

A2 Broken Authentication

encrypt all channels, use a central identity management with strong password-policy

Authentication

A3 Sensitive Data Exposure

Use secured exception facade, design your data model accordingly

REST exception handling

A4 XML External Entities

Prefer JSON over XML, ensure FSP when parsing (external) XML

XML guide

A5 Broken Access Control

Ensure proper authorization for all use-cases, use @DenyAll as default to enforce

Access-control guide especially method authorization

A6 Security Misconfiguration

Use devon4j application template and guides to avoid

tutorial-newapp and sensitive configuration

A7 Cross-Site Scripting

prevent injection (see A1) for HTML, JavaScript and CSS and understand same-origin-policy

client-layer

A8 Insecure Deserialization

Use simple and established serialization formats such as JSON, prevent generic deserialization (for polymorphic types)

JSON guide especially inheritence, XML guide

A9 Using Components with Known Vulnerabilities

subscribe to security newsletters, recheck products and their versions continuously, use devonfw dependency management

CVE newsletter and dependency check

A10 Insufficient_Logging & Monitoring

Ensure to log all security related events (login, logout, errors), establish effective monitoring

Logging guide and monitoring guide

Insecure Direct Object References

Using direct object references (IDs) only with appropriate authorization

logic-layer

Cross-Site Request Forgery (CSRF)

secure mutable service operations with an explicit CSRF security token sent in HTTP header and verified on the server

CSRF guide

Log-Forging

Escape newlines in log messages

logging security

Unvalidated Redirects and Forwards

Avoid using redirects and forwards, in case you need them do a security audit on the solution.

devonfw proposes to use rich-clients (SPA/RIA). We only use redirects for login in a safe way.

+
+
+

1.57. Advanced Security

+
+

While OWASP Top 10 covers the basic aspects of application security, there are advanced standards such as AVS. +In devonfw we address this in the +Application Security Quick Solution Guide.

+
+
+
+

1.58. Tools

+
+
Dependency Check
+
+

To address the thread Using Components with Known Vulnerabilities we recomment to use OWASP dependency check that ships with a maven plugin and can analyze your dependencies for known CVEs. +In order to run this check, you can simply call this command on any maven project:

+
+
+
+
mvn org.owasp:dependency-check-maven:6.1.5:aggregate
+
+
+
+ + + + + +
+ + +The version is just for completeness. You should check yourself for using a recent version of the plugin. +
+
+
+

If you build an devon4j spring application from our app-template you can activate the dependency check even easier with the security profile:

+
+
+
+
mvn clean install -P security
+
+
+
+

This does not run by default as it causes some overhead for the build performance. However, consider to build this in your CI at least nightly. +After the dependency check is performed, you will find the results in target/dependency-check-report.html of each module. The report will also be generated when the site is build (mvn site) even without the profile.

+
+
+
+
Penetration Testing
+
+

For penetration testing (testing for vulnerabilities) of your web application, we recommend the following tools:

+
+
+ +
+
+ +
+

==CORS support

+
+
+

When you are developing Javascript client and server application separately, you have to deal with cross domain issues. We have to request from a origin domain distinct to target domain and browser does not allow this.

+
+
+

So , we need to prepare server side to accept request from other domains. We need to cover the following points:

+
+
+
    +
  • +

    Accept request from other domains.

    +
  • +
  • +

    Accept devonfw used headers like X-CSRF-TOKEN or correlationId.

    +
  • +
  • +

    Be prepared to receive secured request (cookies).

    +
  • +
+
+
+

It is important to note that if you are using security in your request (sending cookies) you have to set withCredentials flag to true in your client side request and deal with special IE8 characteristics.

+
+
+

For more information about CORS see here. Information about the CORS headers can be found here.

+
+
+
+
+

1.59. Configuring CORS support

+
+

To enable CORS support for your application, see the advanced guides. For Spring applications see here. For Quarkus follow the official Quarkus guide.

+
+
+
+

1.60. Configuration with service mesh

+
+

If you are using a service mesh, you can also define your CORS policy directly there. Here is an example from Istio.

+
+
+ +
+

==Java Development Kit

+
+
+

The Java Development Kit is an implementation of the Java platform. It provides the Java Virtual Machine (JVM) and the Java Runtime Environment (JRE).

+
+
+
+

1.61. Editions

+
+

The JDK exists in different editions:

+
+
+ +
+
+

As Java is evolving and also complex maintaining a JVM requires a lot of energy. +Therefore many alternative JDK editions are unable to cope with this and support latest Java versions and according compatibility. +Unfortunately OpenJDK only maintains a specific version of Java for a relative short period of time before moving to the next major version. +In the end, this technically means that OpenJDK is continuous beta and can not be used in production for reasonable software projects. +As OracleJDK changed its licensing model and can not be used for commercial usage even during development, things can get tricky. +You may want to use OpenJDK for development and OracleJDK only in production. +However, e.g. OpenJDK 11 never released a version that is stable enough for reasonable development (e.g. javadoc tool is broken and fixes are not available of OpenJDK 11 - fixed in 11.0.3 what is only available as OracleJDK 11 or you need to go to OpenJDK 12+, what has other bugs) so in the end there is no working release of OpenJDK 11. +This more or less forces you to use OracleJDK what requires you to buy a subscription so you can use it for commercial development. +However, there is AdoptOpenJDK that provides forked releases of OpenJDK with bug-fixes what might be an option. +Anyhow, as you want to have your development environment close to production, the productively used JDK (most likely OracleJDK) should be preferred also for development.

+
+
+
+

1.62. Upgrading

+
+

Until Java 8 compatibility was one of the key aspects for Java version updates (after the mess on the Swing updates with Java2 many years ago). +However, Java 9 introduced a lot of breaking changes. +This documentation wants to share the experience we collected in devonfw when upgrading from Java 8 to newer versions. +First of all we separate runtime changes that you need if you want to build your software with JDK 8 but such that it can also run on newer versions (e.g. JRE 11) +from changes required to also build your software with more recent JDKs (e.g. JDK 11 or 12).

+
+
+
Runtime Changes
+
+

This section describes required changes to your software in order to make it run also with versions newer than Java 8.

+
+
+
Classes removed from JDK
+
+

The first thing that most users hit when running their software with newer Java versions is a ClassNotFoundException like this:

+
+
+
+
Caused by: java.lang.ClassNotFoundException: javax.xml.bind.JAXBException
+
+
+
+

As Java 9 introduced a module system with Jigsaw, the JDK that has been a monolithic mess is now a well-defined set of structured modules. +Some of the classes that used to come with the JDK moved to modules that where not available by default in Java 9 and have even been removed entirely in later versions of Java. +Therefore you should simply treat such code just like any other 3rd party component that you can add as a (maven) dependency. +The following table gives you the required hints to make your software work even with such classes / modules removed from the JDK (please note that the specified version is just a suggestion that worked, feel free to pick a more recent or more appropriate version):

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 8. Dependencies for classes removed from Java 8 since 9+
ClassGroupIdArtifactIdVersion

javax.xml.bind.*

javax.xml.bind

jaxb-api

2.3.1

com.sun.xml.bind.*

org.glassfish.jaxb

jaxb-runtime

2.3.1

java.activation.*

javax.activation

javax.activation-api

1.2.0

java.transaction.*

javax.transaction

javax.transaction-api

1.2

java.xml.ws.*

javax.xml.ws

jaxws-api

2.3.1

javax.jws.*

javax.jws

javax.jws-api

1.1

javax.annotation.*

javax.annotation

javax.annotation-api

1.3.2

+
+
+
3rd Party Updates
+
+

Further, internal and inofficial APIs (e.g. sun.misc.Unsafe) have been removed. +These are typically not used by your software directly but by low-level 3rd party libraries like asm that need to be updated. +Also simple things like the Java version have changed (from 1.8.x to 9.x, 10.x, 11.x, 12.x, etc.). +Some 3rd party libraries were parsing the Java version in a very naive way making them unable to be used with Java 9+:

+
+
+
+
Caused by: java.lang.NullPointerException
+   at org.apache.maven.surefire.shade.org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast (SystemUtils.java:1626)
+
+
+
+

Therefore the following table gives an overview of common 3rd party libraries that have been affected by such breaking changes and need to be updated to at least the specified version:

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 9. Minimum recommended versions of common 3rd party for Java 9+
GroupIdArtifactIdVersionIssue

org.apache.commons

commons-lang3

3.7

LANG-1365

cglib

cglib

3.2.9

102, 93, 133

org.ow2.asm

asm

7.1

2941

org.javassist

javassist

3.25.0-GA

194, 228, 246, 171

+
+
+
ResourceBundles
+
+

For internationalization (i18n) and localization (l10n) ResourceBundle is used for language and country specific texts and configurations as properties (e.g. MyResourceBundle_de.properties). With Java modules there are changes and impacts you need to know to get things working. The most important change is documented in the JavaDoc of ResourceBundle. However, instead of using ResourceBundleProvider and refactoring your entire code causing incompatibilities, you can simply put the resource bundles in a regular JAR on the classpath rather than a named module (or into the lauching app). +If you want to implement (new) Java modules with i18n support, you can have a look at mmm-nls.

+
+
+
+
+
Buildtime Changes
+
+

If you also want to change your build to work with a recent JDK you also need to ensure that test frameworks and maven plugins properly support this.

+
+
+
Findbugs
+
+

Findbugs does not work with Java 9+ and is actually a dead project. +The new findbugs is SpotBugs. +For maven the new solution is spotbugs-maven-plugin:

+
+
+
+
<plugin>
+  <groupId>com.github.spotbugs</groupId>
+  <artifactId>spotbugs-maven-plugin</artifactId>
+  <version>3.1.11</version>
+</plugin>
+
+
+
+
+
Test Frameworks
+ + ++++++ + + + + + + + + + + + + + + + + +
Table 10. Minimum recommended versions of common 3rd party test frameworks for Java 9+
GroupIdArtifactIdVersionIssue

org.mockito

mockito-core

2.23.4

1419, 1696, 1607, 1594, 1577, 1482

+
+
+
Maven Plugins
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 11. Minimum recommended versions of common maven plugins for Java 9+
GroupIdArtifactId(min.) VersionIssue

org.apache.maven.plugins

maven-compiler-plugin

3.8.1

x

org.apache.maven.plugins

maven-surefire-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-surefire-report-plugin

2.22.2

SUREFIRE-1439

org.apache.maven.plugins

maven-archetype-plugin

3.1.0

x

org.apache.maven.plugins

maven-javadoc-plugin

3.1.0

x

org.jacoco

jacoco-maven-plugin

0.8.3

663

+
+
+
Maven Usage
+
+

With Java modules you can not run Javadoc standalone anymore or you will get this error when running mvn javadoc:javadoc:

+
+
+
+
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-javadoc-plugin:3.1.1:javadoc (default-cli) on project mmm-base: An error has occurred in Javadoc report generation:
+[ERROR] Exit code: 1 - error: module not found: io.github.mmm.base
+[ERROR]
+[ERROR] Command line was: /projects/mmm/software/java/bin/javadoc @options @packages @argfile
+
+
+
+

As a solution or workaround you need to include the compile goal into your build lifecycle so the module-path is properly configured:

+
+
+
+
mvn compile javadoc:javadoc
+
+
+
+
+
+
+ +
+

We want to give credits and say thanks to the following articles that have been there before and helped us on our way:

+
+ +
+ +
+

==JEE

+
+
+

This section is about Java Enterprise Edition (JEE). +Regarding to our key principles we focus on open standards. +For Java this means that we consider official standards from Java Standard and Enterprise Edition as first choice for considerations. +Therefore we also decided to recommend JAX-RS over SpringMVC as the latter is proprietary. +Only if an existing Java standard is not suitable for current demands such as Java Server Faces (JSF), we do not officially recommend it (while you are still free to use it if you have good reasons to do so). +In all other cases we officially suggest the according standard and use it in our guides, code-samples, sample application, modules, templates, etc. +Examples for such standards are JPA, JAX-RS, JAX-WS, JSR330, JSR250, JAX-B, etc.

+
+
+
+

1.64. Application-Server

+
+

We designed everything based on standards to work with different technology stacks and servlet containers. +However, we strongly encourage to use modern and leightweight frameworks such as spring or quarkus. +You are free to decide for a JEE application server but here is a list of good reasons for our decision:

+
+
+
    +
  • +

    Up-to-date

    +
    +

    With spring or quarkus you easily keep up to date with evolving technologies (microservices, reactive, NoSQL, etc.). +Most application servers put you in a jail with old legacy technology. +In many cases you are even forced to use a totally outdated version of java (JVM/JDK). +This may even cause severe IT-Security vulnerabilities but with expensive support you might get updates. +Also with leightweight open-source frameworks you need to be aware that for IT-security you need to update recently what can cost quite a lot of additional maintenance effort.

    +
    +
  • +
  • +

    Development speed

    +
    +

    With spring-boot you can implement and especially test your individual logic very fast. Starting the app in your IDE is very easy, fast, and realistic (close to production). You can easily write JUnit tests that startup your server application to e.g. test calls to your remote services via HTTP fast and easy. For application servers you need to bundle and deploy your app what takes more time and limits you in various ways. We are aware that this has improved in the past but also spring continuously improves and is always way ahead in this area. Further, with spring you have your configurations bundled together with the code in version control (still with ability to handle different environments) while with application servers these are configured externally and can not be easily tested during development.

    +
    +
  • +
  • +

    Documentation

    +
    +

    Spring and also quarkus have an extremely open and active community. +There is documentation for everything available for free on the web. +You will find solutions to almost any problem on platforms like stackoverflow. +If you have a problem you are only a google search away from your solution. +This is very much different for proprietary application server products.

    +
    +
  • +
  • +

    Helpful Exception Messages

    +
    +

    Especially spring is really great for developers on exception messages. +If you do something wrong you get detailed and helpful messages that guide you to the problem or even the solution. +This is not as great in application servers.

    +
    +
  • +
  • +

    Future-proof

    +
    +

    Spring has evolved really awesome over time. +Since its 1.0 release in 2004 spring has continuously been improved and always caught up with important trends and innovations. +Even in critical situations, when the company behind it (interface21) was sold, spring went on perfectly. +Quarkus on the other hand is relatively new. +It does not have to carry a large legacy history and is therefore most state-of-the-art for modern projects esp. in cloud environments. +JEE went through a lot of trouble and crisis. +Just look at the EJB pain stories. +This happened often in the past and also recent. +See JEE 8 in crisis.

    +
    +
  • +
  • +

    Free

    +
    +

    Spring and quarkus including their ecosystems are free and open-source. +It still perfectly integrates with commercial solutions for specific needs. +Most application servers are commercial and cost a lot of money. +As of today the ROI for this is of question.

    +
    +
  • +
  • +

    Cloud-native

    +
    +

    Quarkus is designed for cloud-native projects from the start. +With spring this is also available via spring-native. +Using an application server will effectively prevent you from going to the cloud smoothly.

    +
    +
  • +
  • +

    Fun

    +
    +

    If you go to conferences or ask developers you will see that spring or quarkus is popular and fun. +If new developers are forced to use an old application server product they will be less motivated or even get frustrated. +Especially in today’s agile projects this is a very important aspect. +In the end you will get into trouble with maintenance on the long run if you rely on a proprietary application server.

    +
    +
  • +
+
+
+

Of course the vendors of application servers will tell you a different story. +This is simply because they still make a lot of money from their products. +We do not get paid from application servers nor from spring, quarkus or any other IT product company. +We are just developers who love to build great systems. +A good reason for application servers is that they combine a set of solutions to particular aspects to one product that helps to standardize your IT. +However, devonfw fills exactly this gap for the spring and quarkus ecosystems in a very open and flexible way. +However, there is one important aspect that you need to understand and be aware of:

+
+
+

Some big companies decided for a specific application server as their IT strategy. +They may have hundreds of apps running with this application server. +All their operators and developers have learned a lot of specific skills for this product and are familiar with it. +If you are implementing yet another (small) app in this context it could make sense to stick with this application server. +However, also they have to be aware that with every additional app they increase their technical debt. +So actively help your customer and consult him to make the right choices for the future.

+
+
+ +
+

==Validation

+
+
+

Validation is about checking syntax and semantics of input data. Invalid data is rejected by the application. +Therefore validation is required in multiple places of an application. E.g. the GUI will do validation for usability reasons to assist the user, early feedback and to prevent unnecessary server requests. +On the server-side validation has to be done for consistency and security.

+
+
+

In general we distinguish these forms of validation:

+
+
+
    +
  • +

    stateless validation will produce the same result for given input at any time (for the same code/release).

    +
  • +
  • +

    stateful validation is dependent on other states and can consider the same input data as valid in once case and as invalid in another.

    +
  • +
+
+
+
+

1.65. Stateless Validation

+
+

For regular, stateless validation we use the JSR303 standard that is also called bean validation (BV). +Details can be found in the specification. +As implementation we recommend hibernate-validator.

+
+
+
Example
+
+

A description of how to enable BV for spring applications can be found in the relevant Spring documentation. A guide you can use to integrate validation in Quarkus applications can be found here. For a quick summary follow these steps:

+
+
+
    +
  • +

    Make sure that hibernate-validator is located in the classpath by adding a dependency to the pom.xml.

    +
  • +
+
+
+
Listing 14. spring
+
+
    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
Listing 15. quarkus
+
+
    <dependency>
+      <groupId>io.quarkus</groupId>
+      <artifactId>quarkus-hibernate-validator</artifactId>
+    </dependency>
+
+
+
+
    +
  • +

    For methods to validate go to their declaration and add constraint annotations to the method parameters.

    +
    +

    In spring applications you can add the @Validated annotation to the implementation (spring bean) to be validated (this is an annotation of the spring framework, so it`s not available in the Quarkus context). The standard use case is to annotate the logic layer implementation, i.e. the use case implementation or component facade in case of simple logic layer pattern. Thus, the validation will be executed for service requests as well as batch processing.

    +
    +
    +
      +
    • +

      @Valid annotation to the arguments to validate (if that class itself is annotated with constraints to check).

      +
    • +
    • +

      @NotNull for required arguments.

      +
    • +
    • +

      Other constraints (e.g. @Size) for generic arguments (e.g. of type String or Integer). However, consider to create custom datatypes and avoid adding too much validation logic (especially redundant in multiple places).

      +
    • +
    +
    +
  • +
+
+
+
Listing 16. BookingmanagementRestServiceImpl.java
+
+
@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+  ...
+  public BookingEto saveBooking(@Valid BookingCto booking) {
+  ...
+
+
+
+
    +
  • +

    Finally add appropriate validation constraint annotations to the fields of the ETO class.

    +
  • +
+
+
+
Listing 17. BookingCto.java
+
+
  @Valid
+  private BookingEto booking;
+
+
+
+
Listing 18. BookingEto.java
+
+
  @NotNull
+  @Future
+  private Timestamp bookingDate;
+
+
+
+

A list with all bean validation constraint annotations available for hibernate-validator can be found here. In addition it is possible to configure custom constraints. Therefore it is necessary to implement a annotation and a corresponding validator. A description can also be found in the Spring documentation or with more details in the hibernate documentation.

+
+
+ + + + + +
+ + +Bean Validation in Wildfly >v8: Wildfly v8 is the first version of Wildfly implementing the JEE7 specification. It comes with bean validation based on hibernate-validator out of the box. In case someone is running Spring in Wildfly for whatever reasons, the spring based annotation @Validated would duplicate bean validation at runtime and thus should be omitted. +
+
+
+
+
GUI-Integration
+
+

TODO

+
+
+
+
Cross-Field Validation
+
+

BV has poor support for this. Best practice is to create and use beans for ranges, etc. that solve this. A bean for a range could look like so:

+
+
+
+
public class Range<V extends Comparable<V>> {
+
+  private V min;
+  private V max;
+
+  public Range(V min, V max) {
+
+    super();
+    if ((min != null) && (max != null)) {
+      int delta = min.compareTo(max);
+      if (delta > 0) {
+        throw new ValueOutOfRangeException(null, min, min, max);
+      }
+    }
+    this.min = min;
+    this.max = max;
+  }
+
+  public V getMin() ...
+  public V getMax() ...
+
+
+
+
+
+

1.66. Stateful Validation

+
+

For complex and stateful business validations we do not use BV (possible with groups and context, etc.) but follow KISS and just implement this on the server in a straight forward manner. +An example is the deletion of a table in the example application. Here the state of the table must be checked first:

+
+
+

BookingmanagementImpl.java

+
+
+
+
  private void sendConfirmationEmails(BookingEntity booking) {
+
+    if (!booking.getInvitedGuests().isEmpty()) {
+      for (InvitedGuestEntity guest : booking.getInvitedGuests()) {
+        sendInviteEmailToGuest(guest, booking);
+      }
+    }
+
+    sendConfirmationEmailToHost(booking);
+  }
+
+
+
+

Implementing this small check with BV would be a lot more effort.

+
+
+ +
+

==Bean-Mapping

+
+
+

For decoupling, you sometimes need to create separate objects (beans) for a different view. E.g. for an external service, you will use a transfer-object instead of the persistence entity so internal changes to the entity do not implicitly change or break the service.

+
+
+

Therefore you have the need to map similar objects what creates a copy. This also has the benefit that modifications to the copy have no side-effect on the original source object. However, to implement such mapping code by hand is very tedious and error-prone (if new properties are added to beans but not to mapping code):

+
+
+
+
public UserEto mapUser(UserEntity source) {
+  UserEto target = new UserEto();
+  target.setUsername(source.getUsername());
+  target.setEmail(source.getEmail());
+  ...
+  return target;
+}
+
+
+
+

Therefore we are using a BeanMapper for this purpose that makes our lives a lot easier. +There are several bean mapping frameworks with different approaches.

+
+
+

For a devon4j-spring application we recommend Orika, follow Spring Bean-Mapping for an introduction to Orika and Dozer in a devon4j-spring context application.

+
+
+ + + + + +
+ + +devon4j started with Dozer as framework for Spring applications and still supports it. However, we now recommend Orika (for new projects) as it is much faster (see Performance of Java Mapping Frameworks). +
+
+
+

For a Quarkus application we recommend Mapstruct, follow Quarkus Bean-Mapping for an introduction to Mapstruct in a quarkus context application.

+
+ +
+

==Lombok

+
+
+

Lombok is a library that works with an annotation processor and will generate code for you to save you some time and reduce the amount of boilerplate code in your project. Lombok can generate getter and setter, equals methods, automate your logging variables for your classes, and more. Follow the list of all the features provided by Lombok to get an overview.

+
+
+
+

1.67. Lombok Dependency

+
+

To get access to the Lombok library just add the following dependency to the POM.xml.

+
+
+

The Lombok dependency:

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok</artifactId>
+	<version>1.18.20</version>
+</dependency>
+
+
+
+

To get Lombok working with your current IDE you should also install the Lombok addon. Follow the Eclipse installation guide, there are also guides for other supported IDEs.

+
+
+
+

1.68. Lombok with Mapstruct

+
+

MapStruct takes advantage of generated getters, setters, and constructors from Lombok and uses them to +generate the mapper implementations. Lombok is also an annotation processor and since version 1.18.14 both frameworks are working together. Just add the lombok-mapstruct-binding to your POM.xml.

+
+
+

The Lombok annotation processor and the lombok-mapstruct-binding

+
+
+
+
<dependency>
+	<groupId>org.projectlombok</groupId>
+	<artifactId>lombok-mapstruct-binding</artifactId>
+	<version>0.2.0</version>
+</dependency>
+
+<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok</artifactId>
+				<version>1.18.4</version>
+			</path>
+			<path>
+				<groupId>org.projectlombok</groupId>
+				<artifactId>lombok-mapstruct-binding</artifactId>
+				<version>0.2.0</version>
+			</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

In our quarkus reference project you can get a look into the usage of both frameworks.

+
+
+
+

1.69. Lombok Usage

+
+

Lombok can be used like any other annotation processor and will be shown in the simple example below to generate getter and setter for a Product Entity.

+
+
+
+
@Getter
+@Setter
+public class Product{
+
+    private String title;
+    private String description;
+    private BigDecimal price;
+}
+
+
+
+

For advanced Lombok usage follow the Baeldung Lombok guide or just read the Lombok javadoc

+
+
+ +
+

==OpenAPI

+
+
+

The OpenAPI Specification (OAS) defines a standard for describing RESTful web services in a machine- and human-readable format. OpenAPI allows REST APIs to be defined in a uniform manner. +Technically, an OpenAPI document is written in YAML or JSON format. The specification defines the structure of a REST API by describing attributes such as path information, response codes, and return types. Some examples can be found here. +Apart from documenting the API, this schema then also acts as a contract between provider and consumers, guaranteeing interoperability between various technologies.

+
+
+

OpenAPI is often used in combination with Swagger. Swagger is a set of tools build around OpenAPI, that help developers to design and document their REST APIs. +The most common tool is the Swagger UI, which uses the OpenAPI specification to create a graphical interface of the REST API that you can also interact with. Check out the Swagger online editor to get a feeling for it.

+
+
+ + + + + +
+ + +
+

Swagger and OpenAPI: Swagger is a former specification, based on which the OpenAPI was created. Swagger 2.0 is still commonly used for describing APIs. OpenAPI is an open-source collaboration and it started from version 3.0.0(semver)

+
+
+
+
+

There are many tools that work with OpenAPI: code generators, documentation tools, validators etc.

+
+
+
+

1.70. OpenAPI generation

+
+

There are several extensions you can use in your project to automatically generate the OpenAPI specifications and Swagger UI from your REST API (code-first approach). devon4j recommends the following two extensions/plugins to use:

+
+
+
    +
  • +

    Smallrye OpenAPI extension

    +
  • +
  • +

    ServicedocGen maven plugin

    +
  • +
+
+
+
Smallrye OpenAPI
+
+

Quarkus provides OpenAPI support through Smallrye OpenAPI extension:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-smallrye-openapi</artifactId>
+</dependency>
+
+
+
+

After adding the extension to your project, you can access the Swagger UI by navigating to /q/swagger-ui.

+
+
+

The OpenAPI specification can be accessed by requesting /q/openapi.

+
+
+

Smallrye OpenAPI is compliant with MicroProfile OpenAPI. You can add MicroProfile annotations to further describe your REST endpoints and extend the OpenAPI documentation. +More information for this can be found here or here.

+
+
+ + + + + +
+ + +
+

Quarkus recommends using this extension and you can document your APIs in great detail by using the MicroProfile annotations. The downside to this is that using these annotations will blow up your code and you will have some duplicate information in it. +If you don’t want to specify the REST API again with all this annotation based information, we also recommend taking a look at the ServicedocGen Maven plugin for your Quarkus applications when implementing JAX-RS APIs.

+
+
+
+
+
+
ServicedocGen Maven Plugin
+
+

The ServicedocGen maven plugin can be used within both Spring and Quarkus applications. +It works a bit different then the Smallrye extensions mentioned above. The plugin analysis the REST API and it’s JavaDoc and then generate the OpenAPI specification and the Swagger UI as static files. So no Swagger or MicroProfile annotations have to be added.

+
+
+

The plugin can be configured in the pom.xml file of your application as follows:

+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>org.codehaus.mojo</groupId>
+      <artifactId>servicedocgen-maven-plugin</artifactId>
+      <version>1.0.0</version>
+      <executions>
+        <execution>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <descriptor>
+          <info>
+            <title>...</title>
+            <description>...</description>
+          </info>
+          <host>...</host>
+          <port>...</port>
+          <basePath>...</basePath>
+          <schemes>
+            <scheme>...</scheme>
+          </schemes>
+        </descriptor>
+      </configuration>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

In the configuration section you have to define additional information to generate the OpenAPI specification correctly. An example can be found in our Quarkus reference application. +When building the application, an OpenApi.yaml and a SwaggerUI.html file are created in the /target/site folder. To make the Swagger UI available in the browser, the file must be served by some servlet.

+
+
+ +
+

==Spring

+
+
+

Spring is the most famous and established Java framework. +It is fully supported by devonfw as an option and alternative to quarkus.

+
+
+
+
+

1.71. Guide to the Reader

+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If you are not yet familiar with Spring, you may be interested in pros and cons of Spring. Also take a look at the official Spring website.

    +
  • +
  • +

    If you already have experience developing with Spring but are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring and coding conventions. Follow the referenced links to go deeper into a topic.

    +
  • +
  • +

    If you have already developed with devon4j and Spring and need more information on a specific topic, check out the devon4j guides for Spring. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Spring and Quarkus are documented there.

    +
  • +
  • +

    If you want to get started or create your first Spring application using devon4j, check out the guide about creating a new application or the Jump the Queue and My Thai Star reference applications.

    +
  • +
+
+
+
+

1.72. Pros

+
+

Spring offers the following benefits:

+
+
+
    +
  • +

    highly flexible
    +Spring is famous for its great flexibility. You can customize and integrate nearly everything.

    +
  • +
  • +

    well established
    +While JEE application servers including very expensive commercial products turned out to be a dead-end, spring has guided projects through the changing trends of IT throughout decades. It may be the framework with the longest history track and popularity. As a result you can easily find developers, experts, books, articles, etc. about spring.

    +
  • +
  • +

    non-invasive and not biased
    +Spring became famous for its non-invasive coding based on patterns instead of hard dependencies. It gives you a lot of freedom and avoids tight coupling of your (business) code.

    +
  • +
+
+
+

See Why Spring? for details.

+
+
+
+

1.73. Cons

+
+

Spring has the following drawbacks:

+
+
+
    +
  • +

    history and legacy
    +Due to the pro of its long established history, spring also carries a lot of legacy. As a result there are many ways to do the same thing while some options may be discouraged. Developers needs some guidance (e.g. via devon4j) as they may enter pitfalls and dead-ends when choosing the first solution they found on google or stackoverflow.

    +
  • +
  • +

    lost lead in cloud-native
    +While for the last decades spring was leading innovation in Java app development, it seems that with the latest trends and shift such as cloud-native, they have been overtaken by frameworks like quarkus. However, spring is trying to catch up with spring-native.

    +
  • +
+
+
+
+

1.74. Spring-Boot

+
+

Spring-boot is a project and initiaitve within the spring-ecosystem that brought a lot of innovation and simplification into app development on top of spring. +As of today we typically use the terms spring and spring-boot rather synonymously as we always use spring together with spring-boot.

+
+
+
+

1.75. Spring-Native

+
+

Spring-native adds cloud-native support to the spring ecosystem and allows to build a spring app as cloud-native image via GraalVM. +You may also consider Quarkus if you are interested in building cloud-native images. For a comparison of both Spring Native and Quarkus, you may refer to our Spring Native vs. Quarkus guide.

+
+ +
+

==Components

+
+
+

Following separation-of-concerns we divide an application into components using our package-conventions and project structure. +As described by the architecture each component is divided into layers as described in the project structure. +Please note that a component will only have the required layers. +So a component may have any number from one to all layers.

+
+
+

1.75.1. General Component

+
+

Cross-cutting aspects belong to the implicit component general. It contains technical configurations and very general code that is not business specific. Such code shall not have any dependencies to other components and therefore business related code.

+
+
+
+

1.75.2. Business Component

+
+

The business-architecture defines the business components with their allowed dependencies. A small application (microservice) may just have one component and no dependencies making it simple while the same architecture can scale up to large and complex applications (from bigger microservice up to modulith). +Tailoring an business domain into applications and applications into components is a tricky task that needs the skills of an experienced architect. +Also, the tailoring should follow the business and not split by technical reasons or only by size. +Size is only an indicator but not a driver of tailoring. +Whatever hypes like microservices are telling you, never get misled in this regard: +If your system grows and reaches MAX+1 lines of code, it is not the right motivation to split it into two microservices of ~MAX/2 lines of code - such approaches will waste huge amounts of money and lead to chaos.

+
+
+
+

1.75.3. App Component

+
+

Only in case you need cross-cutting code that aggregates another component you may introduce the component app. +It is allowed to depend on all other components but no other component may depend on it. +With the modularity and flexibility of spring you typically do not need this. +However, when you need to have a class that registers all services or component-facades using direct code dependencies, you can introduce this component.

+
+
+
+

1.75.4. Component Example

+
+

The following class diagram illustrates an example of the business component Staffmanagement:

+
+
+
+logic layer component pattern +
+
+
+

In this scheme, you can see the structure and flow from the service-layer (REST service call) via the logic-layer to the dataaccess-layer (and back).

+
+ +
+

==Classic project structure

+
+
+

In this section we describe the classic project structure as initially proposed for Java in devonfw. +It is still valid and fully supported. +However, if you want to start a new project, please consider using the modern structure.

+
+
+
+

1.75.5. Modules

+
+

The structure of a devon4j application is divided into the following modules:

+
+
+
    +
  • +

    api: module containing the API of your application. The API contains the required artifacts to interact with your application via remote services. This can be REST service interfaces, transfer-objects with their interfaces and datatypes but also OpenAPI or gRPC contracts.

    +
  • +
  • +

    core: maven module containing the core of the application with service implementation, as well as entire logic layer and dataaccess layer.

    +
  • +
  • +

    batch: optional module for batch layer

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) typically as a bootified WAR file.

    +
  • +
+
+
+
+

1.75.6. Deployment

+
+
+
+

Make jar not war

+
+
+
+— Josh Long +
+
+
+

First of all it is important to understand that the above defined modules aim to make api, core, and batch reusable artifacts, that can be used as a regular maven dependency. +On the other hand to build and deploy your application you want a final artifact that is containing all required 3rd party libraries. +This artifact is not reusable as a maven dependency. +That is exactly the purpose of the server module to build and package this final deployment artifact. +By default we first build a regular WAR file with maven in your server/target directory (*-server-«version».war) and in a second step create a bootified WAR out of this (*-server-bootified.war). +The bootified WAR file can then be started standalone (java -jar «filename».war). +However, it is also possible to deploy the same WAR file to a servlet container like tomcat or jetty. +As application servers and externally provided servlet containers are not recommendet anymore for various reasons (see JEE), you may also want to create a bootified JAR file instead. +All you need to do in that case is to change the packaging in your server/pom.xml from war to jar.

+
+
+
+

1.75.7. Package Structure

+
+

The package structure of your code inside src/main/java (and src/test/java) of your modules is described in our coding conventions in the sections packages. A full mapping of the architecture and the different code elements to the packaging is described in the following section.

+
+
+
+

1.75.8. Layers

+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +The following table describes our classic approach for packaging and layering:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 12. Traditional generic devon4j layers
Layer«layer»

service

service

logic

logic

data-access

dataaccess

batch (optional)

batch

client (optional)

client

common

common

+
+
+

1.75.9. Architecture Mapping

+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.common
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.datatype
+|  |  |  |  └──.«Datatype» (api)
+|  |  |  └──.«BusinessObject» (api)
+|  |  └──.impl[.«detail»]
+|  |     ├──.«Aspect»ConfigProperties (core)
+|  |     ├──.«Datatype»JsonSerializer (core)
+|  |     └──.«Datatype»JsonDeserializer (core)
+|  ├──.dataaccess
+|  |  ├──.api[.«detail»]
+|  |  |  ├──.repo
+|  |  |  |  └──.«BusinessObject»Repository (core)
+|  |  |  ├──.dao (core) [alternative to repo]
+|  |  |  |  └──.«BusinessObject»Dao (core) [alternative to Repository]
+|  |  |  └──.«BusinessObject»Entity (core)
+|  |  └──.impl[.«detail»]
+|  |     ├──.dao (core) [alternative to repo]
+|  |     |  └──.«BusinessObject»DaoImpl (core) [alternative to Repository]
+|  |     └──.«Datatype»AttributeConverter (core)
+|  ├──.logic
+|  |  ├──.api
+|  |  |  ├──.[«detail».]to
+|  |  |  |   ├──.«MyCustom»«To (api)
+|  |  |  |   ├──.«DataStructure»Embeddable (api)
+|  |  |  |   ├──.«BusinessObject»Eto (api)
+|  |  |  |   └──.«BusinessObject»«Subset»Cto (api)
+|  |  |  ├──.[«detail».]usecase
+|  |  |  |   ├──.UcFind«BusinessObject» (core)
+|  |  |  |   ├──.UcManage«BusinessObject» (core)
+|  |  |  |   └──.Uc«Operation»«BusinessObject» (core)
+|  |  |  └──.«Component» (core)
+|  |  ├──.base
+|  |  |  └──.[«detail».]usecase
+|  |  |     └──.Abstract«BusinessObject»Uc (core)
+|  |  └──.impl
+|  |     ├──.[«detail».]usecase
+|  |     |   ├──.UcFind«BusinessObject»Impl (core)
+|  |     |   ├──.UcManage«BusinessObject»Impl (core)
+|  |     |   └──.Uc«Operation»«BusinessObject»Impl (core)
+|  |     └──.«Component»Impl (core)
+|  └──.service
+|     ├──.api[.«detail»]
+|     |  ├──.rest
+|     |  |  └──.«Component»RestService (api)
+|     |  └──.ws
+|     |     └──.«Component»WebService (api)
+|     └──.impl[.«detail»]
+|        ├──.jms
+|        |  └──.«BusinessObject»JmsListener (core)
+|        ├──.rest
+|        |  └──.«Component»RestServiceImpl (core)
+|        └──.ws
+|           └──.«Component»WebServiceImpl (core)
+├──.general
+│  ├──.common
+│  |  ├──.api
+|  |  |  ├──.to
+|  |  |  |  ├──.AbstractSearchCriteriaTo (api)
+|  |  |  └──.ApplicationEntity
+│  |  ├──.base
+|  |  |  └──.AbstractBeanMapperSupport (core)
+│  |  └──.impl
+│  |     ├──.config
+│  |     |  └──.ApplicationObjectMapperFactory (core)
+│  |     └──.security
+│  |        └──.ApplicationWebSecurityConfig (core)
+│  ├──.dataaccess
+│  |  └──.api
+|  |     └──.ApplicationPersistenceEntity (core)
+│  ├──.logic
+│  |  └──.base
+|  |     ├──.AbstractComponentFacade (core)
+|  |     ├──.AbstractLogic (core)
+|  |     └──.AbstractUc (core)
+|  └──.service
+|     └──...
+└──.SpringBootApp (core)
+
+
+
+
+
+
+

1.76. Layers

+ +
+

==Client Layer

+
+
+

There are various technical approaches to building GUI clients. The devonfw proposes rich clients that connect to the server via data-oriented services (e.g. using REST with JSON). +In general, we have to distinguish among the following types of clients:

+
+
+
    +
  • +

    web clients

    +
  • +
  • +

    native desktop clients

    +
  • +
  • +

    (native) mobile clients

    +
  • +
+
+
+

Our main focus is on web-clients. In our sample application my-thai-star we offer a responsive web-client based on Angular following devon4ng that integrates seamlessly with the back ends of my-thai-star available for Java using devon4j as well as .NET/C# using devon4net. For building angular clients read the separate devon4ng guide.

+
+
+
JavaScript for Java Developers
+
+

In order to get started with client development as a Java developer we give you some hints to get started. Also if you are an experienced JavaScript developer and want to learn Java this can be helpful. First, you need to understand that the JavaScript ecosystem is as large as the Java ecosystem and developing a modern web client requires a lot of knowledge. The following table helps you as experienced developer to get an overview of the tools, configuration-files, and other related aspects from the new world to learn. Also it helps you to map concepts between the ecosystems. Please note that we list the tools recommended by devonfw here (and we know that there are alternatives not listed here such as gradle, grunt, bower, etc.).

+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 13. Aspects in JavaScript and Java ecosystem
TopicAspectJavaScriptJava

Programming

Language

TypeScript (extends JavaScript)

Java

Runtime

VM

nodejs (or web-browser)

jvm

Build- & Dependency-Management

Tool

npm or yarn

maven

Config

package.json

pom.xml

Repository

npm repo

maven central (repo search)

Build cmd

ng build or npm run build (goals are not standardized in npm)

mvn install (see lifecycle)

Test cmd

ng test

mvn test

Testing

Test-Tool

jasmine

junit

Test-Runner

karma

junit / surefire

E2E Testing

Protractor

Selenium

Code Analysis

Code Coverage

ng test --no-watch --code-coverage

JaCoCo

Development

IDE

MS VS Code or IntelliJ

Eclipse or IntelliJ

Framework

Angular (etc.)

Spring or Quarkus

+
+ +
+

==Service Layer

+
+
+

The service layer is responsible for exposing functionality made available by the logical layer to external consumers over a network via technical protocols.

+
+
+
+
Types of Services
+
+

Before you start creating your services you should consider some general design aspects:

+
+
+
    +
  • +

    Do you want to create a RPC service?

    +
  • +
  • +

    Or is your problem better addressed by messaging or eventing?

    +
  • +
  • +

    Who will consume your service?

    +
    +
      +
    • +

      Do you have one or multiple consumers?

      +
    • +
    • +

      Do web-browsers have to use your service?

      +
    • +
    • +

      Will apps from other vendors or parties have to consume your service that you can not influence if the service may have to change or be extended?

      +
    • +
    +
    +
  • +
+
+
+

For RPC a common choice is REST but there are also interesting alternatives like gRPC. We also have a guide for SOAP but this technology should rather be considered as legacy and is not recommended for new services.

+
+
+

When it comes to messaging in Java the typical answer will be JMS. However, a very promising alternative is Kafka.

+
+
+
+
Versioning
+
+

For RPC services consumed by other applications we use versioning to prevent incompatibilities between applications when deploying updates. This is done by the following conventions:

+
+
+
    +
  • +

    We define a version number and prefix it with v (e.g. v1).

    +
  • +
  • +

    If we support previous versions we use that version numbers as part of the Java package defining the service API (e.g. com.foo.application.component.service.api.v1)

    +
  • +
  • +

    We use the version number as part of the service name in the remote URL (e.g. https://application.foo.com/services/rest/component/v1/resource)

    +
  • +
  • +

    Whenever breaking changes are made to the API, create a separate version of the service and increment the version (e.g. v1v2) . The implementations of the different versions of the service contain compatibility code and delegate to the same unversioned use-case of the logic layer whenever possible.

    +
  • +
  • +

    For maintenance and simplicity, avoid keeping more than one previous version.

    +
  • +
+
+
+
+
Interoperability
+
+

For services that are consumed by clients with different technology, interoperability is required. This is addressed by selecting the right protocol, following protocol-specific best practices and following our considerations especially simplicity.

+
+
+
+
Service Considerations
+
+

The term service is quite generic and therefore easily misunderstood. It is a unit exposing coherent functionality via a well-defined interface over a network. For the design of a service, we consider the following aspects:

+
+
+
    +
  • +

    self-contained
    +The entire API of the service shall be self-contained and have no dependencies on other parts of the application (other services, implementations, etc.).

    +
  • +
  • +

    idempotence
    +E.g. creation of the same master-data entity has no effect (no error)

    +
  • +
  • +

    loosely coupled
    +Service consumers have minimum knowledge and dependencies on the service provider.

    +
  • +
  • +

    normalized
    +Complete, no redundancy, minimal

    +
  • +
  • +

    coarse-grained
    +Service provides rather large operations (save entire entity or set of entities rather than individual attributes)

    +
  • +
  • +

    atomic
    +Process individual entities (for processing large sets of data, use a batch instead of a service)

    +
  • +
  • +

    simplicity
    +Avoid polymorphism, RPC methods with unique name per signature and no overloading, avoid attachments (consider separate download service), etc.

    +
  • +
+
+
+
+
Security
+
+

Your services are the major entry point to your application. Hence, security considerations are important here.

+
+
+

See REST Security.

+
+ +
+

==Service-Versioning

+
+
+

This guide describes the aspect and details about versioning of services

+
+
+
+
Motivation
+
+

Why versioning of services? First of all, you should only care about this topic if you really have to. Service versioning is complex and requires effort (time and budget). The best way to avoid this is to be smart in the first place when designing the service API. +Further, if you are creating services where the only consumer is e.g. the web-client that you deploy together with the consumed services then you can change your service without the overhead to create new service versions and keeping old service versions for compatibility.

+
+
+

However, if the following indicators are given you typically need to do service versioning:

+
+
+
    +
  • +

    Your service is part of a complex and distributed IT landscape

    +
  • +
  • +

    Your service requires incompatible changes

    +
  • +
  • +

    There are many consumers or there is at least one (relevant) consumer that can not be updated at the same time or is entirely out of control (unknown or totally different party/company)

    +
  • +
+
+
+

What are incompatible changes?

+
+
+
    +
  • +

    Almost any change when SOAP is used (as it changes the WSDL and breaks the contract). Therefore, we recommend to use REST instead. Then, only the following changes are critical.

    +
  • +
  • +

    A change where existing properties (attributes) have to change their name

    +
  • +
  • +

    A change where existing features (properties, operations, etc.) have to change their semantics (meaning)

    +
  • +
+
+
+

What changes do not cause incompatibilities?

+
+
+
    +
  • +

    Adding new service operations is entirely uncritical with REST.

    +
  • +
  • +

    Adding new properties is only a problem in the following cases:

    +
    +
      +
    • +

      Adding new mandatory properties to the input of a service is causing incompatibilities. This problem can be avoided by contract-design.

      +
    • +
    • +

      If a consumer is using a service to read data, modify it and then save it back via a service and a property is added to the data, then this property might be lost. This is not a problem with dynamic languages such as JavaScript/TypeScript but with strictly typed languages such as Java. In Java you will typically use structured typed transfer-objects (and not Map<String, Object>) so new properties that have been added but are not known to the consumer can not be mapped to the transfer-object and will be lost. When saving that transfer-object later the property will be gone. It might be impossible to determine the difference between a lost property and a property that was removed on purpose. This is a general problem that you need to be aware of and that you have to consider by your design in such situations.

      +
    • +
    +
    +
  • +
+
+
+

Even if you hit an indicator for incompatible changes you can still think about adding a new service operation instead of changing an existing one (and deprecating the old one). Be creative to simplify and avoid extra effort.

+
+
+
+
Procedure
+
+

The procedure when rolling out incompatible changes is illustrated by the following example:

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +---+----+
+        |
++-------+--------+
+|      Sv1       |
+|                |
+|      App3      |
++----------------+
+
+
+
+

So, here we see a simple example where App3 provides a Service S in Version v1 that is consumed both by App1 and App2.

+
+
+

Now for some reason the service S has to be changed in an incompatible way to make it future-proof for demands. However, upgrading all 3 applications at the same time is not possible in this case for whatever reason. Therefore, service versioning is applied for the changes of S.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+    |
++---+------------+
+|  Sv1  |  Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Now, App3 has been upgraded and the new release was deployed. A new version v2 of S has been added while v1 is still kept for compatibility reasons and that version is still used by App1 and App2.

+
+
+
+
+------+  +------+
+| App1 |  | App2*|
++---+--+  +--+---+
+    |        |
+    |        |
+    |        |
++---+--------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, App2 has been updated and deployed and it is using the new version v2 of S.

+
+
+
+
+------+  +------+
+| App1*|  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|  Sv1  |  Sv2   |
+|                |
+|      App3      |
++----------------+
+
+
+
+

Now, also App1 has been updated and deployed and it is using the new version v2 of S. The version v1 of S is not used anymore. This can be verified via logging and monitoring.

+
+
+
+
+------+  +------+
+| App1 |  | App2 |
++---+--+  +--+---+
+    |        |
+    +--------+
+             |
++------------+---+
+|          Sv2   |
+|                |
+|      App3*     |
++----------------+
+
+
+
+

Finally, version v1 of the service S was removed from App3 and the new release has been deployed.

+
+
+
+
Versioning Schema
+
+

In general anything can be used to differentiate versions of a service. Possibilities are:

+
+
+
    +
  • +

    Code names (e.g. Strawberry, Blueberry, Grapefruit)

    +
  • +
  • +

    Timestamps (YYYYMMDD-HHmmSS)

    +
  • +
  • +

    Sequential version numbers (e.g. v1, v2, v3)

    +
  • +
  • +

    Composed version numbers (e.g. 1.0.48-pre-alpha-3-20171231-235959-Strawberry)

    +
  • +
+
+
+

As we are following the KISS principle (see key principles) we propose to use sequential version numbers. These are short, clear, and easy while still allowing to see what version is after another one. Especially composed version numbers (even 1.1 vs. 2.0) lead to decisions and discussions that easily waste more time than adding value. It is still very easy to maintain an Excel sheet or release-notes document that is explaining the changes for each version (v1, v2, v3) of a particular service.

+
+
+

We suggest to always add the version schema to the service URL to be prepared for service versioning even if service versioning is not (yet) actively used. For simplicity it is explicitly stated that you may even do incompatible changes to the current version (typically v1) of your service if you can update the according consumers within the same deployment.

+
+
+
+
Practice
+
+

So assuming you know that you have to do service versioning, the question is how to do it practically in the code. +The approach for your devon4j project in case of code-first should be as described below:

+
+
+
    +
  • +

    Determine which types in the code need to be changed. It is likely to be the API and implementation of the according service but it may also impact transfer objects and potentially even datatypes.

    +
  • +
  • +

    Create new packages for all these concerned types containing the current version number (e.g. v1).

    +
  • +
  • +

    Copy all these types to that new packages.

    +
  • +
  • +

    Rename these copies so they carry the version number as suffix (e.g. V1).

    +
  • +
  • +

    Increase the version of the service in the unversioned package (e.g. from v1 to v2).

    +
  • +
  • +

    Now you have two versions of the same service (e.g. v1 and v2) but so far they behave exactly the same.

    +
  • +
  • +

    You start with your actual changes and modify the original files that have been copied before.

    +
  • +
  • +

    You will also ensure the links (import statements) of the copied types point to the copies with the version number

    +
  • +
  • +

    This will cause incompatibilities (and compile errors) in the copied service. Therefore, you need to fix that service implementation to map from the old API to the new API and behavior. In some cases, this may be easy (e.g. mapping x.y.z.v1.FooTo to x.y.z.FooTo using bean-mapping with some custom mapping for the incompatible changes), in other cases this can get very complex. Be aware of this complexity from the start before you make your decision about service versioning.

    +
  • +
  • +

    As far as possible this mapping should be done in the service-layer, not to pollute your business code in the core-layer with versioning-aspects. If there is no way to handle it in the service layer, e.g. you need some data from the persistence-layer, implement the "mapping" in the core-layer then, but don’t forget to remove this code, when removing the old service version.

    +
  • +
  • +

    Finally, ensure that both the old service behaves as before as well as the new service works as planned.

    +
  • +
+
+
+
Modularization
+
+

For modularization, we also follow the KISS principle (see key principles): +we suggest to have one api module per application that will contain the most recent version of your service and get released with every release-version of the application. The compatibility code with the versioned packages will be added to the core module and therefore is not exposed via the api module (because it has already been exposed in the previous release of the app). This way, you can always determine for sure which version of a service is used by another application just by its maven dependencies.

+
+
+

The KISS approach with only a single module that may contain multiple services (e.g. one for each business component) will cause problems when you want to have mixed usages of service versions: You can not use an old version of one service and a new version of another service from the same APP as then you would need to have its API module twice as a dependency on different versions, which is not possible. However, to avoid complicated overhead we always suggest to follow this easy approach. Only if you come to the point that you really need this complexity you can still solve it (even afterwards by publishing another maven artefact). As we are all on our way to build more but smaller applications (SOA, microservices, etc.) we should always start simple and only add complexity when really needed.

+
+
+

The following example gives an idea of the structure:

+
+
+
+
/«my-app»
+├──/api
+|  └──/src/main/java/
+|     └──/«rootpackage»/«application»/«component»
+|        ├──/common/api/to
+|        |  └──FooTo
+|        └──/service/api/rest
+|           └──FooRestService
+└──/core
+   └──/src/main/java/
+      └──«rootpackage»/«application»/«component»
+         ├──/common/api/to/v1
+         |  └──FooToV1
+         └──/service
+            ├──/api/rest/v1
+            |  └──FooRestServiceV1
+            └──impl/rest
+               ├──/v1
+               |  └── FooRestServiceImplV1
+               └──FooRestServiceImpl
+
+
+
+ +
+

==Logic Layer

+
+
+

The logic layer is the heart of the application and contains the main business logic. +According to our business architecture, we divide an application into components. +For each component, the logic layer defines different use-cases. Another approach is to define a component-facade, which we do not recommend for future application. Especially for quarkus application, we want to simplify things and highly suggest omitting component-facade completely and using use-cases only. +It is very important that you follow the links to understand the concept of use-case in order to properly implement your business logic.

+
+
+
+
+
Responsibility
+
+

The logic layer is responsible to implement the business logic according to the specified functional demands and requirements. +Therefore, it creates the actual value of the application. The logic layer is responsible for invoking business logic in external systems. +The following additional aspects are also included in its responsibility:

+
+
+ +
+
+
+
Security
+
+

The logic layer is the heart of the application. It is also responsible for authorization and hence security is important in this current case. Every method exposed in an interface needs to be annotated with an authorization check, stating what role(s) a caller must provide in order to be allowed to make the call. The authorization concept is described here.

+
+
+
Direct Object References
+
+

A security threat are Insecure Direct Object References. This simply gives you two options:

+
+
+
    +
  • +

    avoid direct object references

    +
  • +
  • +

    ensure that direct object references are secure

    +
  • +
+
+
+

Especially when using REST, direct object references via technical IDs are common sense. This implies that you have a proper authorization in place. This is especially tricky when your authorization does not only rely on the type of the data and according to static permissions but also on the data itself. Vulnerabilities for this threat can easily happen by design flaws and inadvertence. Here is an example from our sample application:

+
+
+

We have a generic use-case to manage BLOBs. In the first place, it makes sense to write a generic REST service to load and save these BLOBs. However, the permission to read or even update such BLOB depends on the business object hosting the BLOB. Therefore, such a generic REST service would open the door for this OWASP A4 vulnerability. To solve this in a secure way, you need individual services for each hosting business object to manage the linked BLOB and have to check permissions based on the parent business object. In this example the ID of the BLOB would be the direct object reference and the ID of the business object (and a BLOB property indicator) would be the indirect object reference.

+
+ +
+

==Component Facade

+
+
+ + + + + +
+ + +Our recommended approach for implementing the logic layer is use-cases +
+
+
+

For each component of the application, the logic layer defines a component facade. +This is an interface defining all business operations of the component. +It carries the name of the component («Component») and has an implementation named «Component»Impl (see implementation).

+
+
+
+
API
+
+

The component facade interface defines the logic API of the component and has to be business oriented. +This means that all parameters and return types of all methods from this API have to be business transfer-objects, datatypes (String, Integer, MyCustomerNumber, etc.), or collections of these. +The API may also only access objects of other business components listed in the (transitive) dependencies of the business-architecture.

+
+
+

Here is an example how such an API may look like:

+
+
+
+
public interface Bookingmanagement {
+
+  BookingEto findBooking(Long id);
+
+  BookingCto findBookingCto(Long id);
+
+  Page<BookingEto> findBookingEtos(BookingSearchCriteriaTo criteria);
+
+  void approveBooking(BookingEto booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation of an interface from the logic layer (a component facade or a use-case) carries the name of that interface with the suffix Impl and is annotated with @Named. +An implementation typically needs access to the persistent data. +This is done by injecting the corresponding repository (or DAO). +According to data-sovereignty, only repositories of the same business component may be accessed directly. +For accessing data from other components the implementation has to use the corresponding API of the logic layer (the component facade). Further, it shall not expose persistent entities from the domain layer and has to map them to transfer objects using the bean-mapper.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public BookingEto findBooking(Long id) {
+
+    LOG.debug("Get Booking with id {} from database.", id);
+    BookingEntity entity = this.bookingRepository.findOne(id);
+    return getBeanMapper().map(entity, BookingEto.class));
+  }
+}
+
+
+
+

As you can see, entities (BookingEntity) are mapped to corresponding ETOs (BookingEto). +Further details about this can be found in bean-mapping.

+
+ +
+

==UseCase +A use-case is a small unit of the logic layer responsible for an operation on a particular entity (business object). +We leave it up to you to decide whether you want to define an interface (API) for each use-case or provide an implementation directly.

+
+
+

Following our architecture-mapping (for classic and modern project), use-cases are named Uc«Operation»«BusinessObject»[Impl]. The prefix Uc stands for use-case and allows to easily find and identify them in your IDE. The «Operation» stands for a verb that is operated on the entity identified by «BusinessObject». +For CRUD we use the standard operations Find and Manage that can be generated by CobiGen. This also separates read and write operations (e.g. if you want to do CQSR, or to configure read-only transactions for read operations).

+
+
+

In our example, we choose to define an interface for each use-case. We also use *To to refer to any type of transfer object. Please follow our guide to understand more about different types of transfer object e.g. Eto, Dto, Cto

+
+
+
+
Find
+
+

The UcFind«BusinessObject» defines all read operations to retrieve and search the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcFindBooking {
+  //*To = Eto, Dto or Cto
+  Booking*To findBooking(Long id);
+}
+
+
+
+
+
Manage
+
+

The UcManage«BusinessObject» defines all CRUD write operations (create, update and delete) for the «BusinessObject». +Here is an example:

+
+
+
+
public interface UcManageBooking {
+
+  //*To = Eto, Dto or Cto
+  Booking*To saveBooking(Booking*To booking);
+
+  void deleteBooking(Long id);
+
+}
+
+
+
+
+
Custom
+
+

Any other non CRUD operation Uc«Operation»«BusinessObject» uses any other custom verb for «Operation». +Typically, such custom use-cases only define a single method. +Here is an example:

+
+
+
+
public interface UcApproveBooking {
+
+  //*To = Eto, Dto or Cto
+  void approveBooking(Booking*To booking);
+
+}
+
+
+
+
+
Implementation
+
+

The implementation should carry its own name and the suffix Impl and is annotated with @Named and @ApplicationScoped. It will need access to the persistent data which is done by injecting the corresponding repository (or DAO). Furthermore, it shall not expose persistent entities from the data access layer and has to map them to transfer objects using the bean-mapper. Please refer to our bean mapping, transfer object and dependency injection documentation for more information. +Here is an example:

+
+
+
+
@ApplicationScoped
+@Named
+public class UcManageBookingImpl implements UcManageBooking {
+
+  @Inject
+  private BookingRepository bookingRepository;
+
+  @Override
+  public void deleteBooking(Long id) {
+
+    LOG.debug("Delete Booking with id {} from database.", id);
+    this.bookingRepository.deleteById(id);
+  }
+}
+
+
+
+

The use-cases can then be injected directly into the service.

+
+
+
+
@Named("BookingmanagementRestService")
+@Validated
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private UcFindBooking ucFindBooking;
+
+  @Inject
+  private UcManageBooking ucManageBooking;
+
+  @Inject
+  private UcApproveBooking ucApproveBooking;
+}
+
+
+
+
+
Internal use case
+
+

Sometimes, a component with multiple related entities and many use-cases needs to reuse business logic internally. +Of course, this can be exposed as an official use-case API but this will imply using transfer-objects (ETOs) instead of entities. In some cases, this is undesired e.g. for better performance to prevent unnecessary mapping of entire collections of entities. +In the first place, you should try to use abstract base implementations providing reusable methods the actual use-case implementations can inherit from. +If your business logic is even more complex and you have multiple aspects of business logic to share and reuse but also run into multi-inheritance issues, you may also just create use-cases that have their interface located in the impl scope package right next to the implementation (or you may just skip the interface). In such a case, you may define methods that directly take or return entity objects. +To avoid confusion with regular use-cases, we recommend to add the Internal suffix to the type name leading to Uc«Operation»«BusinessObject»Internal[Impl].

+
+
+ +
+

==Data-Access Layer

+
+
+

The data-access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store. External system could also be accessed from the data-access layer if they match this definition, e.g. a mongo-db via rest services.

+
+
+

Note: In the modern project structure, this layer is replaced by the domain layer.

+
+
+
+
+
Database
+
+

You need to make your choice for a database. Options are documented here.

+
+
+

The classical approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data so you can consider the repository guide.

+
+
+ +
+

==Batch Layer

+
+
+

We understand batch processing as a bulk-oriented, non-interactive, typically long running execution of tasks. For simplicity, we use the term "batch" or "batch job" for such tasks in the following documentation.

+
+
+

devonfw uses Spring Batch as a batch framework.

+
+
+

This guide explains how Spring Batch is used in devonfw applications. It focuses on aspects which are special to devonfw. If you want to learn about spring-batch you should adhere to springs references documentation.

+
+
+

There is an example of a simple batch implementation in the my-thai-star batch module.

+
+
+

In this chapter, we will describe the overall architecture (especially concerning layering) and how to administer batches.

+
+
+
+
Layering
+
+

Batches are implemented in the batch layer. The batch layer is responsible for batch processes, whereas the business logic is implemented in the logic layer. Compared to the service layer, you may understand the batch layer just as a different way of accessing the business logic. +From a component point of view, each batch is implemented as a subcomponent in the corresponding business component. +The business component is defined by the business architecture.

+
+
+

Let’s make an example for that. The sample application implements a batch for exporting ingredients. This ingredientExportJob belongs to the dishmanagement business component. +So the ingredientExportJob is implemented in the following package:

+
+
+
+
<basepackage>.dishmanagement.batch.impl.*
+
+
+
+

Batches should invoke use cases in the logic layer for doing their work. +Only "batch specific" technical aspects should be implemented in the batch layer.

+
+
+
+
+

Example: +For a batch, which imports product data from a CSV file, this means that all code for actually reading and parsing the CSV input file is implemented in the batch layer. +The batch calls the use case "create product" in the logic layer for actually creating the products for each line read from the CSV input file.

+
+
+
+
+
Directly accessing data access layer
+
+

In practice, it is not always appropriate to create use cases for every bit of work a batch should do. Instead, the data access layer can be used directly. +An example for that is a typical batch for data retention which deletes out-of-time data. +Often deleting, out-dated data is done by invoking a single SQL statement. It is appropriate to implement that SQL in a Repository or DAO method and call this method directly from the batch. +But be careful: this pattern is a simplification which could lead to business logic cluttered in different layers, which reduces the maintainability of your application. +It is a typical design decision you have to make when designing your specific batches.

+
+
+
+
+
Project structure and packaging
+
+

Batches will be implemented in a separate Maven module to keep the application core free of batch dependencies. The batch module includes a dependency on the application core-module to allow the reuse of the use cases, DAOs etc. +Additionally the batch module has dependencies on the required spring batch jars:

+
+
+
+
  <dependencies>
+
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>mtsj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-batch</artifactId>
+    </dependency>
+
+  </dependencies>
+
+
+
+

To allow an easy start of the batches from the command line it is advised to create a bootified jar for the batch module by adding the following to the pom.xml of the batch module:

+
+
+
+
  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>config/application.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Create bootified jar for batch execution via command line.
+           Your applications spring boot app is used as main-class.
+       -->
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        <configuration>
+          <mainClass>com.devonfw.application.mtsj.SpringBootApp</mainClass>
+          <classifier>bootified</classifier>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>repackage</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+
+
+
Implementation
+
+

Most of the details about implementation of batches is described in the spring batch documentation. +There is nothing special about implementing batches in devonfw. You will find an easy example in my-thai-star.

+
+
+
+
Starting from command line
+
+

Devonfw advises to start batches via command line. This is most common to many ops teams and allows easy integration in existing schedulers. In general batches are started with the following command:

+
+
+
+
java -jar <app>-batch-<version>-bootified.jar --spring.main.web-application-type=none --spring.batch.job.enabled=true --spring.batch.job.names=<myJob> <params>
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + +
ParameterExplanation

--spring.main.web-application-type=none

This disables the web app (e.g. Tomcat)

--spring.batch.job.names=<myJob>

This specifies the name of the job to run. If you leave this out ALL jobs will be executed. Which probably does not make to much sense.

<params>

(Optional) additional parameters which are passed to your job

+
+

This will launch your normal spring boot app, disables the web application part and runs the designated job via Spring Boots org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.

+
+
+
+
Scheduling
+
+

In real world scheduling of batches is not as simple as it first might look like.

+
+
+
    +
  • +

    Multiple batches have to be executed in order to achieve complex tasks. If one of those batches fails the further execution has to be stopped and operations should be notified for example.

    +
  • +
  • +

    Input files or those created by batches have to be copied from one node to another.

    +
  • +
  • +

    Scheduling batch executing could get complex easily (quarterly jobs, run job on first workday of a month, …​)

    +
  • +
+
+
+

For devonfw we propose the batches themselves should not mess around with details of scheduling. +Likewise your application should not do so. This complexity should be externalized to a dedicated batch administration service or scheduler. +This service could be a complex product or a simple tool like cron. We propose Rundeck as an open source job scheduler.

+
+
+

This gives full control to operations to choose the solution which fits best into existing administration procedures.

+
+
+
+
Handling restarts
+
+

If you start a job with the same parameters set after a failed run (BatchStatus.FAILED) a restart will occur. +In many cases your batch should then not reprocess all items it processed in the previous runs. +For that you need some logic to start at the desired offset. There different ways to implement such logic:

+
+
+
    +
  • +

    Marking processed items in the database in a dedicated column

    +
  • +
  • +

    Write all IDs of items to process in a separate table as an initialization step of your batch. You can then delete IDs of already processed items from that table during the batch execution.

    +
  • +
  • +

    Storing restart information in springs ExecutionContext (see below)

    +
  • +
+
+
+
Using spring batch ExecutionContext for restarts
+
+

By implementing the ItemStream interface in your ItemReader or ItemWriter you may store information about the batch progress in the ExecutionContext. You will find an example for that in the CountJob in My Thai Star.

+
+
+

Additional hint: It is important that bean definition method of your ItemReader/ItemWriter return types implementing ItemStream(and not just ItemReader or ItemWriter alone). For that the ItemStreamReader and ItemStreamWriter interfaces are provided.

+
+
+
+
+
Exit codes
+
+

Your batches should create a meaningful exit code to allow reaction to batch errors e.g. in a scheduler. +For that spring batch automatically registers an org.springframework.boot.autoconfigure.batch.JobExecutionExitCodeGenerator. To make this mechanism work your spring boot app main class as to populate this exit code to the JVM:

+
+
+
+
@SpringBootApplication
+public class SpringBootApp {
+
+  public static void main(String[] args) {
+    if (Arrays.stream(args).anyMatch((String e) -> e.contains("--spring.batch.job.names"))) {
+      // if executing batch job, explicitly exit jvm to report error code from batch
+      System.exit(SpringApplication.exit(SpringApplication.run(SpringBootApp.class, args)));
+    } else {
+      // normal web application start
+      SpringApplication.run(SpringBootApp.class, args);
+    }
+  }
+}
+
+
+
+
+
Stop batches and manage batch status
+
+

Spring batch uses several database tables to store the status of batch executions. +Each execution may have different status. +You may use this mechanism to gracefully stop batches. +Additionally in some edge cases (batch process crashed) the execution status may be in an undesired state. +E.g. the state will be running, despite the process crashed sometime ago. +For that cases you have to change the status of the execution in the database.

+
+
+
CLI-Tool
+
+

Devonfw provides a easy to use cli-tool to manage the executing status of your jobs. +The tool is implemented in the devonfw module devon4j-batch-tool. It will provide a runnable jar, which may be used as follows:

+
+
+
+
List names of all previous executed jobs
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs list

+
+
Stop job named 'countJob'
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar jobs stop countJob

+
+
Show help
+
+

java -D'spring.datasource.url=jdbc:h2:~/mts;AUTO_SERVER=TRUE' -jar devon4j-batch-tool.jar

+
+
+
+
+

As you can the each invocation includes the JDBC connection string to your database. +This means that you have to make sure that the corresponding DB driver is in the classpath (the prepared jar only contains H2).

+
+
+
+
+
Authentication
+
+

Most business application incorporate authentication and authorization. +Your spring boot application will implement some kind of security, e.g. integrated login with username+password or in many cases authentication via an existing IAM. +For security reasons your batch should also implement an authentication mechanism and obey the authorization implemented in your application (e.g. via @RolesAllowed).

+
+
+

Since there are many different authentication mechanism we cannot provide an out-of-the-box solution in devonfw, but we describe a pattern how this can be implemented in devonfw batches.

+
+
+

We suggest to implement the authentication in a Spring Batch tasklet, which runs as the first step in your batch. This tasklet will do all of the work which is required to authenticate the batch. A simple example which authenticates the batch "locally" via username and password could be implemented like this:

+
+
+
+
@Named
+public class SimpleAuthenticationTasklet implements Tasklet {
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+    String username = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("username");
+    String password = chunkContext.getStepContext().getStepExecution().getJobParameters().getString("password");
+    Authentication authentication = new UsernamePasswordAuthenticationToken(username, password);
+
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+    return RepeatStatus.FINISHED;
+  }
+
+}
+
+
+
+

The username and password have to be supplied via two cli parameters -username and -password. This implementation creates an "authenticated" Authentication and sets in the Spring Security context. This is just for demonstration normally you should not provide passwords via command line. The actual authentication will be done automatically via Spring Security as in your "normal" application. +If you have a more complex authentication mechanism in your application e.g. via OpenID connect just call this in the tasklet. Naturally you may read authentication parameters (e.g. secrets) from the command line or more securely from a configuration file.

+
+
+

In your Job Configuration set this tasklet as the first step:

+
+
+
+
@Configuration
+@EnableBatchProcessing
+public class BookingsExportBatchConfig {
+  @Inject
+  private JobBuilderFactory jobBuilderFactory;
+
+  @Inject
+  private StepBuilderFactory stepBuilderFactory;
+
+  @Bean
+  public Job myBatchJob() {
+    return this.jobBuilderFactory.get("myJob").start(myAuthenticationStep()).next(...).build();
+  }
+
+  @Bean
+  public Step myAuthenticationStep() {
+    return this.stepBuilderFactory.get("myAuthenticationStep").tasklet(myAuthenticatonTasklet()).build();
+  }
+
+  @Bean
+  public Tasklet myAuthenticatonTasklet() {
+    return new SimpleAuthenticationTasklet();
+  }
+...
+
+
+
+
+
Tipps & tricks
+
+
Identifying job parameters
+
+

Spring uses a jobs parameters to identify job executions. Parameters starting with "-" are not considered for identifying a job execution.

+
+
+
+
+
+
+

1.77. Guides

+ +
+

==Configuration

+
+
+
Internal Application Configuration
+
+

There usually is a main configuration registered with main Spring Boot App, but differing configurations to support automated test of the application can be defined using profiles (not detailed in this guide).

+
+
+
Spring Boot Application
+
+

For a complete documentation, see the Spring Boot Reference Guide.

+
+
+

With spring-boot you provide a simple main class (also called starter class) like this: +com.devonfw.mtsj.application

+
+
+
+
@SpringBootApplication(exclude = { EndpointAutoConfiguration.class })
+@EntityScan(basePackages = { "com.devonfw.mtsj.application" }, basePackageClasses = { AdvancedRevisionEntity.class })
+@EnableGlobalMethodSecurity(jsr250Enabled = true)
+@ComponentScan(basePackages = { "com.devonfw.mtsj.application.general", "com.devonfw.mtsj.application" })
+public class SpringBootApp {
+
+  /**
+   * Entry point for spring-boot based app
+   *
+   * @param args - arguments
+   */
+  public static void main(String[] args) {
+
+    SpringApplication.run(SpringBootApp.class, args);
+  }
+}
+
+
+
+

In an devonfw application this main class is always located in the <basepackage> of the application package namespace (see package-conventions). This is because a spring boot application will automatically do a classpath scan for components (spring-beans) and entities in the package where the application main class is located including all sub-packages. You can use the @ComponentScan and @EntityScan annotations to customize this behaviour.

+
+
+

If you want to map spring configuration properties into your custom code please see configuration mapping.

+
+
+
+
Standard beans configuration
+
+

For basic bean configuration we rely on spring boot using mainly configuration classes and only occasionally XML configuration files. Some key principle to understand Spring Boot auto-configuration features:

+
+
+
    +
  • +

    Spring Boot auto-configuration attempts to automatically configure your Spring application based on the jar dependencies and annotated components found in your source code.

    +
  • +
  • +

    Auto-configuration is non-invasive, at any point you can start to define your own configuration to replace specific parts of the auto-configuration by redefining your identically named bean (see also exclude attribute of @SpringBootApplication in example code above).

    +
  • +
+
+
+

Beans are configured via annotations in your java code (see dependency-injection).

+
+
+

For technical configuration you will typically write additional spring config classes annotated with @Configuration that provide bean implementations via methods annotated with @Bean. See spring @Bean documentation for further details. Like in XML you can also use @Import to make a @Configuration class include other configurations.

+
+
+

More specific configuration files (as required) reside in an adequately named subfolder of:

+
+
+

src/main/resources/app

+
+
+
+
BeanMapper Configuration
+
+

In case you are still using dozer, you will find further details in bean-mapper configuration.

+
+
+
+
Security configuration
+
+

The abstract base class BaseWebSecurityConfig should be extended to configure web application security thoroughly. +A basic and secure configuration is provided which can be overridden or extended by subclasses. +Subclasses must use the @Profile annotation to further discriminate between beans used in production and testing scenarios. See the following example:

+
+
+
Listing 19. How to extend BaseWebSecurityConfig for Production and Test
+
+
@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.JUNIT)
+public class TestWebSecurityConfig extends BaseWebSecurityConfig {...}
+
+@Configuration
+@EnableWebSecurity
+@Profile(SpringProfileConstants.NOT_JUNIT)
+public class WebSecurityConfig extends BaseWebSecurityConfig {...}
+
+
+ +
+
+
WebSocket configuration
+
+

A websocket endpoint is configured within the business package as a Spring configuration class. The annotation @EnableWebSocketMessageBroker makes Spring Boot registering this endpoint.

+
+
+
+
package your.path.to.the.websocket.config;
+...
+@Configuration
+@EnableWebSocketMessageBroker
+public class WebSocketConfig extends AbstractWebSocketMessageBrokerConfigurer {
+...
+
+
+
+
+
+
External Application Configuration
+
+
application.properties files
+
+

Here is a list of common properties provided by the Spring framework.

+
+
+

For a general understanding how spring-boot is loading and boostrapping your application.properties see spring-boot external configuration.

+
+
+

The following properties files are used in devonfw application:

+
+
+
    +
  • +

    src/main/resources/application.properties providing a default configuration - bundled and deployed with the application package. It further acts as a template to derive a tailored minimal environment-specific configuration.

    +
  • +
  • +

    src/main/resources/config/application.properties providing additional properties only used at development time (for all local deployment scenarios). This property file is excluded from all packaging.

    +
  • +
  • +

    src/test/resources/config/application.properties providing additional properties only used for testing (JUnits based on spring test).

    +
  • +
+
+
+

For other environments where the software gets deployed such as test, acceptance and production you need to provide a tailored copy of application.properties. The location depends on the deployment strategy:

+
+
+
    +
  • +

    standalone run-able Spring Boot App using embedded tomcat: config/application.properties under the installation directory of the spring boot application.

    +
  • +
  • +

    dedicated tomcat (one tomcat per app): $CATALINA_BASE/lib/config/application.properties

    +
  • +
  • +

    tomcat serving a number of apps (requires expanding the wars): $CATALINA_BASE/webapps/<app>/WEB-INF/classes/config

    +
  • +
+
+
+

In this application.properties you only define the minimum properties that are environment specific and inherit everything else from the bundled src/main/resources/application.properties. In any case, make very sure that the classloader will find the file.

+
+
+
+
Database Configuration
+
+

The configuration for spring and Hibernate is already provided by devonfw in our sample application and the application template. So you only need to worry about a few things to customize.

+
+
+Database System and Access +
+

Obviously you need to configure which type of database you want to use as well as the location and credentials to access it. The defaults are configured in application.properties that is bundled and deployed with the release of the software. The files should therefore contain the properties as in the given example:

+
+
+
+
  database.url=jdbc:postgresql://database.enterprise.com/app
+  database.user.login=appuser01
+  database.user.password=************
+  database.hibernate.dialect = org.hibernate.dialect.PostgreSQLDialect
+  database.hibernate.hbm2ddl.auto=validate
+
+
+
+

For further details about database.hibernate.hbm2ddl.auto please see here. For production and acceptance environments we use the value validate that should be set as default. In case you want to use Oracle RDBMS you can find additional hints here.

+
+
+

If your application supports multiples database types, set spring.profiles.active=XXX in src/main/resources/config/application.properties choose database of your choice. Also, one has to set all the active spring profiles in this application.properties and not in any of the other application.properties.

+
+
+
+Database Logging +
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
spring.jpa.properties.hibernate.show_sql=true
+spring.jpa.properties.hibernate.use_sql_comments=true
+spring.jpa.properties.hibernate.format_sql=true
+
+
+
+
+
+
+
Security
+
+
Password Encryption
+
+

In order to support encrypted passwords in spring-boot application.properties all you need to do is to add jasypt-spring-boot as dependency in your pom.xml (please check for recent version here):

+
+
+
+
<dependency>
+  <groupId>com.github.ulisesbocchio</groupId>
+  <artifactId>jasypt-spring-boot-starter</artifactId>
+  <version>3.0.3</version>
+</dependency>
+
+
+
+

This will smoothly integrate jasypt into your spring-boot application. Read this HOWTO to learn how to encrypt and decrypt passwords using jasypt.

+
+
+

Next, we give a simple example how to encypt and configure a secret value. +We use the algorithm PBEWITHHMACSHA512ANDAES_256 that provides strong encryption and is the default of jasypt-spring-boot-starter. +However, different algorithms can be used if perferred (e.g. PBEWITHMD5ANDTRIPLEDES).

+
+
+
+
java -cp ${M2_REPO}/org/jasypt/jasypt/1.9.3/jasypt-1.9.3.jar org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI password=masterpassword algorithm=PBEWITHHMACSHA512ANDAES_256 input=secret ivGeneratorClassName=org.jasypt.iv.RandomIvGenerator
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.5+10
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: masterpassword
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC
+
+
+
+

Of course the master-password (masterpassword) and the actual password to encrypt (secret) are just examples. +Please replace them with reasonable strong passwords for your environment. +Further, if you are using devonfw-ide you can make your life much easier and just type:

+
+
+
+
devon jasypt encrypt
+
+
+
+

See jasypt commandlet for details.

+
+
+

Now the entire line after the OUTPUT block is your encrypted secret. +It even contains some random salt so that multiple encryption invocations with the same parameters (ARGUMENTS) will produce a different OUTPUT.

+
+
+

The master-password can be configured on your target environment via the property jasypt.encryptor.password. As system properties given on the command-line are visible in the process list, we recommend to use an config/application.yml file only for this purpose (as we recommended to use application.properties for regular configs):

+
+
+
+
jasypt:
+    encryptor:
+        password: masterpassword
+
+
+
+

Again masterpassword is just an example that your replace with your actual master password. +Now you are able to put encrypted passwords into your application.properties and specify the algorithm.

+
+
+
+
spring.datasource.password=ENC(PoUxkNjY2juQMCyPu6ic5KJy1XfK+bX9vu2/mPj3pmcO4iydG6mhgZRZSw50z/oC)
+jasypt.encryptor.algorithm=PBEWITHHMACSHA512ANDAES_256
+
+
+
+

This application.properties file can be version controlled (git-opts) and without knowing the masterpassword nobody is able to decrypt this to get the actual secret back.

+
+
+

To prevent jasypt to throw an exception in dev or test scenarios you can simply put this in your local config (src/main/config/application.properties and same for test, see above for details):

+
+
+
+
jasypt.encryptor.password=none
+
+
+ +
+

==Mapping configuration to your code

+
+
+

If you are using spring-boot as suggested by devon4j your application can be configured by application.properties file as described in configuration. +To get a single configuration option into your code for flexibility, you can use

+
+
+
+
@Value("${my.property.name}")
+private String myConfigurableField;
+
+
+
+

Now, in your application.properties you can add the property:

+
+
+
+
my.property.name=my-property-value
+
+
+
+

You may even use @Value("${my.property.name:my-default-value}") to make the property optional.

+
+
+
+
Naming conventions for configuration properties
+
+

As a best practice your configruation properties should follow these naming conventions:

+
+
+
    +
  • +

    build the property-name as a path of segments separated by the dot character (.)

    +
  • +
  • +

    segments should get more specific from left to right

    +
  • +
  • +

    a property-name should either be a leaf value or a tree node (prefix of other property-names) but never both! So never have something like foo.bar=value and foo.bar.child=value2.

    +
  • +
  • +

    start with a segment namespace unique to your context or application

    +
  • +
  • +

    a good example would be «myapp».billing.service.email.sender for the sender address of billing service emails send by «myapp».

    +
  • +
+
+
+
+
Mapping advanced configuration
+
+

However, in many scenarios you will have features that require more than just one property. +Injecting those via @Value is not leading to good code quality. +Instead we create a class with the suffix ConfigProperties containing all configuration properties for our aspect that is annotated with @ConfigurationProperties:

+
+
+
+
@ConfigurationProperties(prefix = "myapp.billing.service")
+public class BillingServiceConfigProperties {
+
+  private final Email email = new Email();
+  private final Smtp smtp = new Smtp();
+
+  public Email getEmail() { return this.email; }
+  public Email getSmtp() { return this.smtp; }
+
+  public static class Email {
+
+    private String sender;
+    private String subject;
+
+    public String getSender() { return this.sender; }
+    public void setSender(String sender) { this.sender = sender; }
+    public String getSubject() { return this.subject; }
+    public void setSubject(String subject) { this.subject = subject; }
+  }
+
+  public static class Smtp {
+
+    private String host;
+    private int port = 25;
+
+    public String getHost() { return this.host; }
+    public void setHost(String host) { this.host = host; }
+    public int getPort() { return this.port; }
+    public void setPort(int port) { this.port = port; }
+  }
+
+}
+
+
+
+

Of course this is just an example to demonstrate this feature of spring-boot. +In order to send emails you would typically use the existing spring-email feature. +But as you can see this allows us to define and access our configuration in a very structured and comfortable way. +The annotation @ConfigurationProperties(prefix = "myapp.billing.service") will automatically map spring configuration properties starting with myapp.billing.service via the according getters and setters into our BillingServiceConfigProperties. +We can easily define defaults (e.g. 25 as default value for myapp.billing.service.smtp.port). +Also Email or Smtp could be top-level classes to be reused in multiple configurations. +Of course you would also add helpful JavaDoc comments to the getters and classes to document your configuration options. +Further to access this configuration, we can use standard dependency-injection:

+
+
+
+
@Inject
+private BillingServiceConfigProperties config;
+
+
+
+

For very generic cases you may also use Map<String, String> to map any kind of property in an untyped way. +An example for generic configuration from devon4j can be found in +ServiceConfigProperties.

+
+
+

For further details about this feature also consult Guide to @ConfigurationProperties in Spring Boot.

+
+
+
+
Generate configuration metadata
+
+

You should further add this dependency to your module containing the *ConfigProperties:

+
+
+
+
    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+
+
+
+

This will generate configuration metadata so projects using your code can benefit from autocompletion and getting your JavaDoc as tooltip when editing application.properites what makes this approach very powerful. +For further details about this please read A Guide to Spring Boot Configuration Metadata.

+
+
+ +
+

==Auditing

+
+
+

For database auditing we use hibernate envers. If you want to use auditing ensure you have the following dependency in your pom.xml:

+
+
+
Listing 20. spring
+
+
<dependency>
+  <groupId>com.devonfw.java.modules</groupId>
+  <artifactId>devon4j-jpa-envers</artifactId>
+</dependency>
+
+
+
+
Listing 21. quarkus
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-envers</artifactId>
+</dependency>
+
+
+
+ + + + + +
+ + +The following part applies only to spring applications. At this point, the Quarkus extension does not provide any additional configurations. For Quarkus applications, simply use the @Audited annotation to enable auditing for an entity class, as described a few lines below or seen here. +
+
+
+

Make sure that entity manager also scans the package from the devon4j-jpa[-envers] module in order to work properly. And make sure that correct Repository Factory Bean Class is chosen.

+
+
+
+
@EntityScan(basePackages = { "«my.base.package»" }, basePackageClasses = { AdvancedRevisionEntity.class })
+...
+@EnableJpaRepositories(repositoryFactoryBeanClass = GenericRevisionedRepositoryFactoryBean.class)
+...
+public class SpringBootApp {
+  ...
+}
+
+
+
+

Now let your [Entity]Repository extend from DefaultRevisionedRepository instead of DefaultRepository.

+
+
+

The repository now has a method getRevisionHistoryMetadata(id) and getRevisionHistoryMetadata(id, boolean lazy) available to get a list of revisions for a given entity and a method find(id, revision) to load a specific revision of an entity with the given ID or getLastRevisionHistoryMetadata(id) to load last revision. +To enable auditing for a entity simply place the @Audited annotation to your entity and all entity classes it extends from.

+
+
+
+
@Entity(name = "Drink")
+@Audited
+public class DrinkEntity extends ProductEntity implements Drink {
+...
+
+
+
+

When auditing is enabled for an entity an additional database table is used to store all changes to the entity table and a corresponding revision number. This table is called <ENTITY_NAME>_AUD per default. Another table called REVINFO is used to store all revisions. Make sure that these tables are available. They can be generated by hibernate with the following property (only for development environments).

+
+
+
+
  database.hibernate.hbm2ddl.auto=create
+
+
+
+

Another possibility is to put them in your database migration scripts like so.

+
+
+
+
CREATE CACHED TABLE PUBLIC.REVINFO(
+  id BIGINT NOT NULL generated by default as identity (start with 1),
+  timestamp BIGINT NOT NULL,
+  user VARCHAR(255)
+);
+...
+CREATE CACHED TABLE PUBLIC.<TABLE_NAME>_AUD(
+    <ALL_TABLE_ATTRIBUTES>,
+    revtype TINYINT,
+    rev BIGINT NOT NULL
+);
+
+
+
+ +
+

==Access-Control +Access-Control is a central and important aspect of Security. It consists of two major aspects:

+
+
+ +
+
+
+
+
Authentication
+
+

Definition:

+
+
+
+
+

Authentication is the verification that somebody interacting with the system is the actual subject for whom he claims to be.

+
+
+
+
+

The one authenticated is properly called subject or principal. There are two forms of principals you need to distinguish while designing your authentication: human users and autonomous systems. While e.g. a Kerberos/SPNEGO Single-Sign-On makes sense for human users, it is pointless for authenticating autonomous systems. For simplicity, we use the common term user to refer to any principal even though it may not be a human (e.g. in case of a service call from an external system).

+
+
+

To prove the authenticity, the user provides some secret called credentials. The most simple form of credentials is a password.

+
+
+
Implementations
+
+ + + + + +
+ + +Please never implement your own authentication mechanism or credential store. You have to be aware of implicit demands such as salting and hashing credentials, password life-cycle with recovery, expiry, and renewal including email notification confirmation tokens, central password policies, etc. This is the domain of access managers and identity management systems. In a business context you will typically already find a system for this purpose that you have to integrate (e.g. via LDAP). Otherwise you should consider establishing such a system e.g. using keycloak. +
+
+
+

We recommend using JWT when possible. For KISS, also try to avoid combining multiple authentication mechanisms (form based, basic-auth, SAMLv2, OAuth, etc.) within the same application (for different URLs).

+
+
+

For spring, check the Spring Security

+
+
+

For quarkus, check the Quarkus Authentication

+
+
+
+
+
Authorization
+
+

Definition:

+
+
+
+
+

Authorization is the verification that an authenticated user is allowed to perform the operation he intends to invoke.

+
+
+
+
+
Clarification of terms
+
+

For clarification we also want to give a common understanding of related terms that have no unique definition and consistent usage in the wild.

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 14. Security terms related to authorization
TermMeaning and comment

Permission

A permission is an object that allows a principal to perform an operation in the system. This permission can be granted (give) or revoked (taken away). Sometimes people also use the term right what is actually wrong as a right (such as the right to be free) can not be revoked.

Group

We use the term group in this context for an object that contains permissions. A group may also contain other groups. Then the group represents the set of all recursively contained permissions.

Role

We consider a role as a specific form of group that also contains permissions. A role identifies a specific function of a principal. A user can act in a role.

+

For simple scenarios a principal has a single role associated. In more complex situations a principal can have multiple roles but has only one active role at a time that he can choose out of his assigned roles. For KISS it is sometimes sufficient to avoid this by creating multiple accounts for the few users with multiple roles. Otherwise at least avoid switching roles at run-time in clients as this may cause problems with related states. Simply restart the client with the new role as parameter in case the user wants to switch his role.

Access Control

Any permission, group, role, etc., which declares a control for access management.

+
+
+
Suggestions on the access model
+
+

For the access model we give the following suggestions:

+
+
+
    +
  • +

    Each Access Control (permission, group, role, …​) is uniquely identified by a human readable string.

    +
  • +
  • +

    We create a unique permission for each use-case.

    +
  • +
  • +

    We define groups that combine permissions to typical and useful sets for the users.

    +
  • +
  • +

    We define roles as specific groups as required by our business demands.

    +
  • +
  • +

    We allow to associate users with a list of Access Controls.

    +
  • +
  • +

    For authorization of an implemented use case we determine the required permission. Furthermore, we determine the current user and verify that the required permission is contained in the tree spanned by all his associated Access Controls. If the user does not have the permission we throw a security exception and thus abort the operation and transaction.

    +
  • +
  • +

    We avoid negative permissions, that is a user has no permission by default and only those granted to him explicitly give him additional permission for specific things. Permissions granted can not be reduced by other permissions.

    +
  • +
  • +

    Technically we consider permissions as a secret of the application. Administrators shall not fiddle with individual permissions but grant them via groups. So the access management provides a list of strings identifying the Access Controls of a user. The individual application itself contains these Access Controls in a structured way, whereas each group forms a permission tree.

    +
  • +
+
+
+
+
Naming conventions
+
+

As stated above each Access Control is uniquely identified by a human readable string. This string should follow the naming convention:

+
+
+
+
«app-id».«local-name»
+
+
+
+

For Access Control Permissions the «local-name» again follows the convention:

+
+
+
+
«verb»«object»
+
+
+
+

The segments are defined by the following table:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 15. Segments of Access Control Permission ID
SegmentDescriptionExample

«app-id»

Is a unique technical but human readable string of the application (or microservice). It shall not contain special characters and especially no dot or whitespace. We recommend to use lower-train-case-ascii-syntax. The identity and access management should be organized on enterprise level rather than application level. Therefore permissions of different apps might easily clash (e.g. two apps might both define a group ReadMasterData but some user shall get this group for only one of these two apps). Using the «app-id». prefix is a simple but powerful namespacing concept that allows you to scale and grow. You may also reserve specific «app-id»s for cross-cutting concerns that do not actually reflect a single app e.g to grant access to a geographic region.

shop

«verb»

The action that is to be performed on «object». We use Find for searching and reading data. Save shall be used both for create and update. Only if you really have demands to separate these two you may use Create in addition to Save. Finally, Delete is used for deletions. For non CRUD actions you are free to use additional verbs such as Approve or Reject.

Find

«object»

The affected object or entity. Shall be named according to your data-model

Product

+
+

So as an example shop.FindProduct will reflect the permission to search and retrieve a Product in the shop application. The group shop.ReadMasterData may combine all permissions to read master-data from the shop. However, also a group shop.Admin may exist for the Admin role of the shop application. Here the «local-name» is Admin that does not follow the «verb»«object» schema.

+
+
+
+
devon4j-security
+
+

The module devon4j-security provides ready-to-use code based on spring-security that makes your life a lot easier.

+
+
+
+access-control +
+
Figure 7. devon4j Security Model
+
+
+

The diagram shows the model of devon4j-security that separates two different aspects:

+
+
+
    +
  • +

    The Identity- and Access-Management is provided by according products and typically already available in the enterprise landscape (e.g. an active directory). It provides a hierarchy of primary access control objects (roles and groups) of a user. An administrator can grant and revoke permissions (indirectly) via this way.

    +
  • +
  • +

    The application security defines a hierarchy of secondary access control objects (groups and permissions). This is done by configuration owned by the application (see following section). The "API" is defined by the IDs of the primary access control objects that will be referenced from the Identity- and Access-Management.

    +
  • +
+
+
+
+
Access Control Config
+
+

In your application simply extend AccessControlConfig to configure your access control objects as code and reference it from your use-cases. An example config may look like this:

+
+
+
+
@Named
+public class ApplicationAccessControlConfig extends AccessControlConfig {
+
+  public static final String APP_ID = "MyApp";
+
+  private static final String PREFIX = APP_ID + ".";
+
+  public static final String PERMISSION_FIND_OFFER = PREFIX + "FindOffer";
+
+  public static final String PERMISSION_SAVE_OFFER = PREFIX + "SaveOffer";
+
+  public static final String PERMISSION_DELETE_OFFER = PREFIX + "DeleteOffer";
+
+  public static final String PERMISSION_FIND_PRODUCT = PREFIX + "FindProduct";
+
+  public static final String PERMISSION_SAVE_PRODUCT = PREFIX + "SaveProduct";
+
+  public static final String PERMISSION_DELETE_PRODUCT = PREFIX + "DeleteProduct";
+
+  public static final String GROUP_READ_MASTER_DATA = PREFIX + "ReadMasterData";
+
+  public static final String GROUP_MANAGER = PREFIX + "Manager";
+
+  public static final String GROUP_ADMIN = PREFIX + "Admin";
+
+  public ApplicationAccessControlConfig() {
+
+    super();
+    AccessControlGroup readMasterData = group(GROUP_READ_MASTER_DATA, PERMISSION_FIND_OFFER, PERMISSION_FIND_PRODUCT);
+    AccessControlGroup manager = group(GROUP_MANAGER, readMasterData, PERMISSION_SAVE_OFFER, PERMISSION_SAVE_PRODUCT);
+    AccessControlGroup admin = group(GROUP_ADMIN, manager, PERMISSION_DELETE_OFFER, PERMISSION_DELETE_PRODUCT);
+  }
+}
+
+
+
+
+
Configuration on Java Method level
+
+

In your use-case you can now reference a permission like this:

+
+
+
+
@Named
+public class UcSafeOfferImpl extends ApplicationUc implements UcSafeOffer {
+
+  @Override
+  @RolesAllowed(ApplicationAccessControlConfig.PERMISSION_SAVE_OFFER)
+  public OfferEto save(OfferEto offer) { ... }
+  ...
+}
+
+
+
+
+
JEE Standard
+
+

Role-based Access Control (RBAC) is commonly used for authorization. +JSR 250 defines a number of common annotations to secure your application.

+
+
+
    +
  • +

    javax.annotation.security.PermitAll specifies that no access control is required to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DenyAll specifies that no access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.RolesAllowed specifies that only a list of access controls are allowed to invoke the specified method(s).

    +
  • +
  • +

    javax.annotation.security.DeclareRoles defines roles for security checking.

    +
  • +
  • +

    javax.annotation.security.RunAs specifies the RunAs role for the given components.

    +
  • +
+
+
+

@PermitAll, @Denyall, and @RolesAllowed annotations can be applied to both class and method. +A method-level annotation will override the behaviour of class-level annotation. Using multiple annotations of those 3 is not valid.

+
+
+
+
// invalid
+@PermitAll
+@DenyAll
+public String foo()
+
+// invalid and compilation fails
+@RolesAllowed("admin")
+@RolesAllowed("user")
+public String bar()
+
+// OK
+@RolesAllowed("admin", "user")
+public String bar()
+
+
+
+

Please note that when specifying multiple arguments to @RolesAllowed those are combined with OR (and not with AND). +So if the user has any of the specified access controls, he will be able to access the method.

+
+
+

As a best practice avoid specifying string literals to @RolesAllowed. +Instead define a class with all access controls as constants and reference them from there. +This class is typically called ApplicationAccessControlConfig in devonfw.

+
+
+

In many complicated cases where @PermitAll @DenyAll @RolesAllowed are insufficient e.g. a method should be accessed by a user in role A and not in role B at the same time, you have to verify the user role directly in the method. You can use SecurityContext class to get further needed information.

+
+
+Spring +
+

Spring Security also supports authorization on method level. To use it, you need to add the spring-security-config dependency. If you use Spring Boot, the dependency spring-boot-starter-security already includes spring-security-config. Then you can configure as follows:

+
+
+
    +
  • +

    prePostEnabled property enables Spring Security pre/post annotations. @PreAuthorize and @PostAuthorize annotations provide expression-based access control. See more here

    +
  • +
  • +

    securedEnabled property determines if the @Secured annotation should be enabled. @Secured can be used similarly as @RollesAllowed.

    +
  • +
  • +

    jsr250Enabled property allows us to use the JSR-250 annotations such as @RolesAllowed.

    +
  • +
+
+
+
+
@Configuration
+@EnableGlobalMethodSecurity(
+  prePostEnabled = true,
+  securedEnabled = true,
+  jsr250Enabled = true)
+public class MethodSecurityConfig
+  extends GlobalMethodSecurityConfiguration {
+}
+
+
+
+

A further read about the whole concept of Spring Security Authorization can be found here.

+
+
+
+Quarkus +
+

Quarkus comes with built-in security to allow for RBAC based on the common security annotations @RolesAllowed, @DenyAll, @PermitAll on REST endpoints and CDI beans. Quarkus also provides the io.quarkus.security.Authenticated annotation that will permit any authenticated user to access the resource (equivalent to @RolesAllowed("**")).

+
+
+
+
+
Data-based Permissions
+ +
+
+
Access Control Schema (deprecated)
+
+

The access-control-schema.xml approach is deprecated. The documentation can still be found in access control schema.

+
+
+ +
+

==Data-permissions

+
+
+

In some projects there are demands for permissions and authorization that is dependent on the processed data. E.g. a user may only be allowed to read or write data for a specific region. This is adding some additional complexity to your authorization. If you can avoid this it is always best to keep things simple. However, in various cases this is a requirement. Therefore the following sections give you guidance and patterns how to solve this properly.

+
+
+
+
Structuring your data
+
+

For all your business objects (entities) that have to be secured regarding to data permissions we recommend that you create a separate interface that provides access to the relevant data required to decide about the permission. Here is a simple example:

+
+
+
+
public interface SecurityDataPermissionCountry {
+
+  /**
+   * @return the 2-letter ISO code of the country this object is associated with. Users need
+   *         a data-permission for this country in order to read and write this object.
+   */
+  String getCountry();
+}
+
+
+
+

Now related business objects (entities) can implement this interface. Often such data-permissions have to be applied to an entire object-hierarchy. For security reasons we recommend that also all child-objects implement this interface. For performance reasons we recommend that the child-objects redundantly store the data-permission properties (such as country in the example above) and this gets simply propagated from the parent, when a child object is created.

+
+
+
+
Permissions for processing data
+
+

When saving or processing objects with a data-permission, we recommend to provide dedicated methods to verify the permission in an abstract base-class such as AbstractUc and simply call this explicitly from your business code. This makes it easy to understand and debug the code. Here is a simple example:

+
+
+
+
protected void verifyPermission(SecurityDataPermissionCountry entity) throws AccessDeniedException;
+
+
+
+Beware of AOP +
+

For simple but cross-cutting data-permissions you may also use AOP. This leads to programming aspects that reflectively scan method arguments and magically decide what to do. Be aware that this quickly gets tricky:

+
+
+
    +
  • +

    What if multiple of your method arguments have data-permissions (e.g. implement SecurityDataPermission*)?

    +
  • +
  • +

    What if the object to authorize is only provided as reference (e.g. Long or IdRef) and only loaded and processed inside the implementation where the AOP aspect does not apply?

    +
  • +
  • +

    How to express advanced data-permissions in annotations?

    +
  • +
+
+
+

What we have learned is that annotations like @PreAuthorize from spring-security easily lead to the "programming in string literals" anti-pattern. We strongly discourage to use this anti-pattern. In such case writing your own verifyPermission methods that you manually call in the right places of your business-logic is much better to understand, debug and maintain.

+
+
+
+
+
Permissions for reading data
+
+

When it comes to restrictions on the data to read it becomes even more tricky. In the context of a user only entities shall be loaded from the database he is permitted to read. This is simple for loading a single entity (e.g. by its ID) as you can load it and then if not permitted throw an exception to secure your code. But what if the user is performing a search query to find many entities? For performance reasons we should only find data the user is permitted to read and filter all the rest already via the database query. But what if this is not a requirement for a single query but needs to be applied cross-cutting to tons of queries? Therefore we have the following pattern that solves your problem:

+
+
+

For each data-permission attribute (or set of such) we create an abstract base entity:

+
+
+
+
@MappedSuperclass
+@EntityListeners(PermissionCheckListener.class)
+@FilterDef(name = "country", parameters = {@ParamDef(name = "countries", type = "string")})
+@Filter(name = "country", condition = "country in (:countries)")
+public abstract class SecurityDataPermissionCountryEntity extends ApplicationPersistenceEntity
+    implements SecurityDataPermissionCountry {
+
+  private String country;
+
+  @Override
+  public String getCountry() {
+    return this.country;
+  }
+
+  public void setCountry(String country) {
+    this.country = country;
+  }
+}
+
+
+
+

There are some special hibernate annotations @EntityListeners, @FilterDef, and @Filter used here allowing to apply a filter on the country for any (non-native) query performed by hibernate. The entity listener may look like this:

+
+
+
+
public class PermissionCheckListener {
+
+  @PostLoad
+  public void read(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireReadPermission(entity);
+  }
+
+  @PrePersist
+  @PreUpdate
+  public void write(SecurityDataPermissionCountryEntity entity) {
+    PermissionChecker.getInstance().requireWritePermission(entity);
+  }
+}
+
+
+
+

This will ensure that hibernate implicitly will call these checks for every such entity when it is read from or written to the database. Further to avoid reading entities from the database the user is not permitted to (and ending up with exceptions), we create an AOP aspect that automatically activates the above declared hibernate filter:

+
+
+
+
@Named
+public class PermissionCheckerAdvice implements MethodBeforeAdvice {
+
+  @Inject
+  private PermissionChecker permissionChecker;
+
+  @PersistenceContext
+  private EntityManager entityManager;
+
+  @Override
+  public void before(Method method, Object[] args, Object target) {
+
+    Collection<String> permittedCountries = this.permissionChecker.getPermittedCountriesForReading();
+    if (permittedCountries != null) { // null is returned for admins that may access all countries
+      if (permittedCountries.isEmpty()) {
+        throw new AccessDeniedException("Not permitted for any country!");
+      }
+      Session session = this.entityManager.unwrap(Session.class);
+      session.enableFilter("country").setParameterList("countries", permittedCountries.toArray());
+    }
+  }
+}
+
+
+
+

Finally to apply this aspect to all Repositories (can easily be changed to DAOs) implement the following advisor:

+
+
+
+
@Named
+public class PermissionCheckerAdvisor implements PointcutAdvisor, Pointcut, ClassFilter, MethodMatcher {
+
+  @Inject
+  private PermissionCheckerAdvice advice;
+
+  @Override
+  public Advice getAdvice() {
+    return this.advice;
+  }
+
+  @Override
+  public boolean isPerInstance() {
+    return false;
+  }
+
+  @Override
+  public Pointcut getPointcut() {
+    return this;
+  }
+
+  @Override
+  public ClassFilter getClassFilter() {
+    return this;
+  }
+
+  @Override
+  public MethodMatcher getMethodMatcher() {
+    return this;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass) {
+    return true; // apply to all methods
+  }
+
+  @Override
+  public boolean isRuntime() {
+    return false;
+  }
+
+  @Override
+  public boolean matches(Method method, Class<?> targetClass, Object... args) {
+    throw new IllegalStateException("isRuntime()==false");
+  }
+
+  @Override
+  public boolean matches(Class<?> clazz) {
+    // when using DAOs simply change to some class like ApplicationDao
+    return DefaultRepository.class.isAssignableFrom(clazz);
+  }
+}
+
+
+
+
+
Managing and granting the data-permissions
+
+

Following our authorization guide we can simply create a permission for each country. We might simply reserve a prefix (as virtual «app-id») for each data-permission to allow granting data-permissions to end-users across all applications of the IT landscape. In our example we could create access controls country.DE, country.US, country.ES, etc. and assign those to the users. The method permissionChecker.getPermittedCountriesForReading() would then scan for these access controls and only return the 2-letter country code from it.

+
+
+ + + + + +
+ + +Before you make your decisions how to design your access controls please clarify the following questions: +
+
+
+
    +
  • +

    Do you need to separate data-permissions independent of the functional permissions? E.g. may it be required to express that a user can read data from the countries ES and PL but is only permitted to modify data from PL? In such case a single assignment of "country-permissions" to users is insufficient.

    +
  • +
  • +

    Do you want to grant data-permissions individually for each application (higher flexibility and complexity) or for the entire application landscape (simplicity, better maintenance for administrators)? In case of the first approach you would rather have access controls like app1.country.GB and app2.country.GB.

    +
  • +
  • +

    Do your data-permissions depend on objects that can be created dynamically inside your application?

    +
  • +
  • +

    If you want to grant data-permissions on other business objects (entities), how do you want to reference them (primary keys, business keys, etc.)? What reference is most stable? Which is most readable?

    +
  • +
+
+
+ +
+

==JWT

+
+
+

JWT (JSON Web Token) is an open standard (see RFC 7519) for creating JSON based access tokens that assert some number of claims. +With an IT landscape divided into multiple smaller apps you want to avoid coupling all those apps or services tightly with your IAM (Identity & Access Management). +Instead your apps simply expects a JWT as bearer-token in the Authorization HTTP header field. +All it needs to do for authentication is validating this JWT. +The actual authentication is done centrally by an access system (IAM) that authors those JWTs. +Therefore we recommend to use strong asymmetric cryptography to sign the JWT when it is authored. +Create a keypair per environment and keep the private key as a secret only known to the access system authorizing the JWTs. +Your apps only need to know the public key in order to validate the JWT. +Any request without a JWT or with an invalid JWT will be rejected (with status code 401).

+
+
+

When using spring check the JWT Spring-Starter. +For quarkus follow Using JWT RBAC.

+
+
+ +
+

==Cross-site request forgery (CSRF)

+
+
+

CSRF is a type of malicious exploit of a web application that allows an attacker to induce users to perform actions that they do not intend to perform.

+
+
+
+csrf +
+
+
+

More details about csrf can be found at https://owasp.org/www-community/attacks/csrf.

+
+
+
+
+
Secure devon4j server against CSRF
+
+

In case your devon4j server application is not accessed by browsers or the web-client is using JWT based authentication, you are already safe according to CSRF. +However, if your application is accessed from a browser and you are using form based authentication (with session coockie) or basic authentication, you need to enable CSRF protection. +This guide will tell you how to do this.

+
+
+
Dependency
+
+

To secure your devon4j application against CSRF attacks, you only need to add the following dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-csrf</artifactId>
+</dependency>
+
+
+
+

Starting with devon4j version 2020.12.001 application template, this is all you need to do. +However, if you have started from an older version or you want to understand more, please read on.

+
+
+
+
Pluggable web-security
+
+

To enable pluggable security via devon4j security starters you need to apply WebSecurityConfigurer to your BaseWebSecurityConfig (your class extending spring-boot’s WebSecurityConfigurerAdapter) as following:

+
+
+
+
  @Inject
+  private WebSecurityConfigurer webSecurityConfigurer;
+
+  public void configure(HttpSecurity http) throws Exception {
+    // disable CSRF protection by default, use csrf starter to override.
+	  http = http.csrf().disable();
+	  // apply pluggable web-security from devon4j security starters
+    http = this.webSecurityConfigurer.configure(http);
+    .....
+  }
+
+
+
+
+
Custom CsrfRequestMatcher
+
+

If you want to customize which HTTP requests will require a CSRF token, you can implement your own CsrfRequestMatcher and provide it to the devon4j CSRF protection via qualified injection as following:

+
+
+
+
@Named("CsrfRequestMatcher")
+public class CsrfRequestMatcher implements RequestMatcher {
+  @Override
+  public boolean matches(HttpServletRequest request) {
+    .....
+  }
+}
+
+
+
+

Please note that the exact name (@Named("CsrfRequestMatcher")) is required here to ensure your custom implementation will be injected properly.

+
+
+
+
CsrfRestService
+
+

With the devon4j-starter-security-csrf the CsrfRestService gets integrated into your app. +It provides an operation to get the CSRF token via an HTTP GET request. +The URL path to retrieve this CSRF token is services/rest/csrf/v1/token. +As a result you will get a JSON like the following:

+
+
+
+
{
+  "token":"3a8a5f66-c9eb-4494-81e1-7cc58bc3a519",
+  "parameterName":"_csrf",
+  "headerName":"X-CSRF-TOKEN"
+}
+
+
+
+

The token value is a strong random value that will differ for each user session. +It has to be send with subsequent HTTP requests (when method is other than GET) in the specified header (X-CSRF-TOKEN).

+
+
+
+
How it works
+
+

Putting it all together, a browser client should call the CsrfRestService after successfull login to receive the current CSRF token. +With every subsequent HTTP request (other than GET) the client has to send this token in the according HTTP header. +Otherwise the server will reject the request to prevent CSRF attacks. +Therefore, an attacker might make your browser perform HTTP requests towards your devon4j application backend via <image> elements, <iframes>, etc. +Your browser will then still include your session coockie if you are already logged in (e.g. from another tab). +However, in case he wants to trigger DELETE or POST requests trying your browser to make changes in the application (delete or update data, etc.) this will fail without CSRF token. +The attacker may make your browser retrieve the CSRF token but he will not be able to retrieve the result and put it into the header of other requests due to the same-origin-policy. +This way your application will be secured against CSRF attacks.

+
+
+
+
+
Configure devon4ng client for CSRF
+
+

Devon4ng client configuration for CSRF is described here

+
+
+ +
+

==Aspect Oriented Programming (AOP)

+
+
+

AOP is a powerful feature for cross-cutting concerns. However, if used extensive and for the wrong things an application can get unmaintainable. Therefore we give you the best practices where and how to use AOP properly.

+
+
+
+
AOP Key Principles
+
+

We follow these principles:

+
+
+
    +
  • +

    We use spring AOP based on dynamic proxies (and fallback to cglib).

    +
  • +
  • +

    We avoid AspectJ and other mighty and complex AOP frameworks whenever possible

    +
  • +
  • +

    We only use AOP where we consider it as necessary (see below).

    +
  • +
+
+
+
+
AOP Usage
+
+

We recommend to use AOP with care but we consider it established for the following cross cutting concerns:

+
+
+ +
+
+
+
AOP Debugging
+
+

When using AOP with dynamic proxies the debugging of your code can get nasty. As you can see by the red boxes in the call stack in the debugger there is a lot of magic happening while you often just want to step directly into the implementation skipping all the AOP clutter. When using Eclipse this can easily be archived by enabling step filters. Therefore you have to enable the feature in the Eclipse tool bar (highlighted in read).

+
+
+
+AOP debugging +
+
+
+

In order to properly make this work you need to ensure that the step filters are properly configured:

+
+
+
+Step Filter Configuration +
+
+
+

Ensure you have at least the following step-filters configured and active:

+
+
+
+
ch.qos.logback.*
+com.devonfw.module.security.*
+java.lang.reflect.*
+java.security.*
+javax.persistence.*
+org.apache.commons.logging.*
+org.apache.cxf.jaxrs.client.*
+org.apache.tomcat.*
+org.h2.*
+org.springframework.*
+
+
+
+ +
+

==Exception Handling

+
+
+
+
Exception Principles
+
+

For exceptions we follow these principles:

+
+
+
    +
  • +

    We only use exceptions for exceptional situations and not for programming control flows, etc. Creating an exception in Java is expensive and hence should not be done for simply testing whether something is present, valid or permitted. In the latter case design your API to return this as a regular result.

    +
  • +
  • +

    We use unchecked exceptions (RuntimeException) [2]

    +
  • +
  • +

    We distinguish internal exceptions and user exceptions:

    +
    +
      +
    • +

      Internal exceptions have technical reasons. For unexpected and exotic situations, it is sufficient to throw existing exceptions such as IllegalStateException. For common scenarios a own exception class is reasonable.

      +
    • +
    • +

      User exceptions contain a message explaining the problem for end users. Therefore, we always define our own exception classes with a clear, brief, but detailed message.

      +
    • +
    +
    +
  • +
  • +

    Our own exceptions derive from an exception base class supporting

    + +
  • +
+
+
+

All this is offered by mmm-util-core, which we propose as a solution. +If you use the devon4j-rest module, this is already included. For Quarkus applications, you need to add the dependency manually.

+
+
+

If you want to avoid additional dependencies, you can implement your own solution for this by creating an abstract exception class ApplicationBusinessException extending from RuntimeException. For an example of this, see our Quarkus reference application.

+
+
+
+
Exception Example
+
+

Here is an exception class from our sample application:

+
+
+
+
public class IllegalEntityStateException extends ApplicationBusinessException {
+
+  private static final long serialVersionUID = 1L;
+
+  public IllegalEntityStateException(Object entity, Object state) {
+
+    this((Throwable) null, entity, state);
+  }
+
+
+  public IllegalEntityStateException(Object entity, Object currentState, Object newState) {
+
+    this(null, entity, currentState, newState);
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object state) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityState(entity, state));
+  }
+
+  public IllegalEntityStateException(Throwable cause, Object entity, Object currentState, Object newState) {
+
+    super(cause, createBundle(NlsBundleApplicationRoot.class).errorIllegalEntityStateChange(entity, currentState,
+        newState));
+  }
+
+}
+
+
+
+

The message templates are defined in the interface NlsBundleRestaurantRoot as following:

+
+
+
+
public interface NlsBundleApplicationRoot extends NlsBundle {
+
+
+  @NlsBundleMessage("The entity {entity} is in state {state}!")
+  NlsMessage errorIllegalEntityState(@Named("entity") Object entity, @Named("state") Object state);
+
+
+  @NlsBundleMessage("The entity {entity} in state {currentState} can not be changed to state {newState}!")
+  NlsMessage errorIllegalEntityStateChange(@Named("entity") Object entity, @Named("currentState") Object currentState,
+      @Named("newState") Object newState);
+
+
+  @NlsBundleMessage("The property {property} of object {object} can not be changed!")
+  NlsMessage errorIllegalPropertyChange(@Named("object") Object object, @Named("property") Object property);
+
+  @NlsBundleMessage("There is currently no user logged in")
+  NlsMessage errorNoActiveUser();
+
+
+
+
+
Handling Exceptions
+
+

For catching and handling exceptions we follow these rules:

+
+
+
    +
  • +

    We do not catch exceptions just to wrap or to re-throw them.

    +
  • +
  • +

    If we catch an exception and throw a new one, we always have to provide the original exception as cause to the constructor of the new exception.

    +
  • +
  • +

    At the entry points of the application (e.g. a service operation) we have to catch and handle all throwables. This is done via the exception-facade-pattern via an explicit facade or aspect. The devon4j-rest module already provides ready-to-use implementations for this such as RestServiceExceptionFacade that you can use in your Spring application. For Quarkus, follow the Quarkus guide on exception handling.
    +The exception facade has to …​

    +
    +
      +
    • +

      log all errors (user errors on info and technical errors on error level)

      +
    • +
    • +

      ensure that the entire exception is passed to the logger (not only the message) so that the logger can capture the entire stacktrace and the root cause is not lost.

      +
    • +
    • +

      convert the error to a result appropriable for the client and secure for Sensitive Data Exposure. Especially for security exceptions only a generic security error code or message may be revealed but the details shall only be logged but not be exposed to the client. All internal exceptions are converted to a generic error with a message like:

      +
      +
      +
      +

      An unexpected technical error has occurred. We apologize any inconvenience. Please try again later.

      +
      +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
Common Errors
+
+

The following errors may occur in any devon application:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 16. Common Exceptions
CodeMessageLink

TechnicalError

An unexpected error has occurred! We apologize any inconvenience. Please try again later.

TechnicalErrorUserException.java

ServiceInvoke

«original message of the cause»

ServiceInvocationFailedException.java

+
+ +
+

==Internationalization +Internationalization (I18N) is about writing code independent from locale-specific information. +For I18N of text messages we are suggesting +mmm native-language-support.

+
+
+

In devonfw we have developed a solution to manage text internationalization. devonfw solution comes into two aspects:

+
+
+
    +
  • +

    Bind locale information to the user.

    +
  • +
  • +

    Get the messages in the current user locale.

    +
  • +
+
+
+
+
Binding locale information to the user
+
+

We have defined two different points to bind locale information to user, depending on user is authenticated or not.

+
+
+
    +
  • +

    User not authenticated: devonfw intercepts unsecured request and extract locale from it. At first, we try to extract a language parameter from the request and if it is not possible, we extract locale from Àccept-language` header.

    +
  • +
  • +

    User authenticated. During login process, applications developers are responsible to fill language parameter in the UserProfile class. This language parameter could be obtain from DB, LDAP, request, etc. In devonfw sample we get the locale information from database.

    +
  • +
+
+
+

This image shows the entire process:

+
+
+
+Internationalization +
+
+
+
+
Getting internationalizated messages
+
+

devonfw has a bean that manage i18n message resolution, the ApplicationLocaleResolver. This bean is responsible to get the current user and extract locale information from it and read the correct properties file to get the message.

+
+
+

The i18n properties file must be called ApplicationMessages_la_CO.properties where la=language and CO=country. This is an example of a i18n properties file for English language to translate devonfw sample user roles:

+
+
+

ApplicationMessages_en_US.properties

+
+
+
+
admin=Admin
+
+
+
+

You should define an ApplicationMessages_la_CO.properties file for every language that your application needs.

+
+
+

ApplicationLocaleResolver bean is injected in AbstractComponentFacade class so you have available this bean in logic layer so you only need to put this code to get an internationalized message:

+
+
+
+
String msg = getApplicationLocaleResolver().getMessage("mymessage");
+
+
+
+ +
+

==Service Client

+
+
+

This guide is about consuming (calling) services from other applications (micro-services). For providing services, see the Service-Layer Guide. Services can be consumed by the client or the server. As the client is typically not written in Java, you should consult the according guide for your client technology. In case you want to call a service within your Java code, this guide is the right place to get help.

+
+
+
+
Motivation
+
+

Various solutions already exist for calling services, such as RestTemplate from spring or the JAX-RS client API. Furthermore, each and every service framework offers its own API as well. These solutions might be suitable for very small and simple projects (with one or two such invocations). However, with the trend of microservices, the invocation of a service becomes a very common use-case that occurs all over the place. You typically need a solution that is very easy to use but supports flexible configuration, adding headers for authentication, mapping of errors from the server, logging success/errors with duration for performance analysis, support for synchronous and asynchronous invocations, etc. This is exactly what this devon4j service-client solution brings to you.

+
+
+
+
Usage
+
+

Spring

+
+
+

For Spring, follow the Spring rest-client guide.

+
+
+

Quarkus

+
+
+

For Quarkus, we recommend to follow the official Quarkus rest-client guide

+
+
+ +
+

==Testing

+
+
+
+
General best practices
+
+

For testing please follow our general best practices:

+
+
+
    +
  • +

    Tests should have a clear goal that should also be documented.

    +
  • +
  • +

    Tests have to be classified into different integration levels.

    +
  • +
  • +

    Tests should follow a clear naming convention.

    +
  • +
  • +

    Automated tests need to properly assert the result of the tested operation(s) in a reliable way. E.g. avoid stuff like assertThat(service.getAllEntities()).hasSize(42) or even worse tests that have no assertion at all.

    +
  • +
  • +

    Tests need to be independent of each other. Never write test-cases or tests (in Java @Test methods) that depend on another test to be executed before.

    +
  • +
  • +

    Use AssertJ to write good readable and maintainable tests that also provide valuable feedback in case a test fails. Do not use legacy JUnit methods like assertEquals anymore!

    +
  • +
  • +

    For easy understanding divide your test in three commented sections:

    +
    +
      +
    • +

      //given

      +
    • +
    • +

      //when

      +
    • +
    • +

      //then

      +
    • +
    +
    +
  • +
  • +

    Plan your tests and test data management properly before implementing.

    +
  • +
  • +

    Instead of having a too strong focus on test coverage better ensure you have covered your critical core functionality properly and review the code including tests.

    +
  • +
  • +

    Test code shall NOT be seen as second class code. You shall consider design, architecture and code-style also for your test code but do not over-engineer it.

    +
  • +
  • +

    Test automation is good but should be considered in relation to cost per use. Creating full coverage via automated system tests can cause a massive amount of test-code that can turn out as a huge maintenance hell. Always consider all aspects including product life-cycle, criticality of use-cases to test, and variability of the aspect to test (e.g. UI, test-data).

    +
  • +
  • +

    Use continuous integration and establish that the entire team wants to have clean builds and running tests.

    +
  • +
  • +

    Prefer delegation over inheritance for cross-cutting testing functionality. Good places to put this kind of code can be realized and reused via the JUnit @Rule mechanism.

    +
  • +
+
+
+
+
Test Automation Technology Stack
+
+

For test automation we use JUnit. However, we are strictly doing all assertions with AssertJ. For mocking we use Mockito. +In order to mock remote connections we use WireMock.

+
+
+

For testing entire components or sub-systems we recommend to use for Spring stack spring-boot-starter-test as lightweight and fast testing infrastructure that is already shipped with devon4j-test. For Quarkus, you can add the necessary extensions manually such as quarkus-junit5, quarkus-junit5-mockito, assertj-core etc.

+
+
+

In case you have to use a full blown JEE application server, we recommend to use arquillian. To get started with arquillian, look here.

+
+
+
+
Test Doubles
+
+

We use test doubles as generic term for mocks, stubs, fakes, dummies, or spys to avoid confusion. Here is a short summary from stubs VS mocks:

+
+
+
    +
  • +

    Dummy objects specifying no logic at all. May declare data in a POJO style to be used as boiler plate code to parameter lists or even influence the control flow towards the test’s needs.

    +
  • +
  • +

    Fake objects actually have working implementations, but usually take some shortcut which makes them not suitable for production (an in memory database is a good example).

    +
  • +
  • +

    Stubs provide canned answers to calls made during the test, usually not responding at all to anything outside what’s programmed in for the test. Stubs may also record information about calls, such as an email gateway stub that remembers the messages it 'sent', or maybe only how many messages it 'sent'.

    +
  • +
  • +

    Mocks are objects pre-programmed with expectations, which form a specification of the calls they are expected to receive.

    +
  • +
+
+
+

We try to give some examples, which should make it somehow clearer:

+
+
+
Stubs
+
+

Best Practices for applications:

+
+
+
    +
  • +

    A good way to replace small to medium large boundary systems, whose impact (e.g. latency) should be ignored during load and performance tests of the application under development.

    +
  • +
  • +

    As stub implementation will rely on state-based verification, there is the threat, that test developers will partially reimplement the state transitions based on the replaced code. This will immediately lead to a black maintenance whole, so better use mocks to assure the certain behavior on interface level.

    +
  • +
  • +

    Do NOT use stubs as basis of a large amount of test cases as due to state-based verification of stubs, test developers will enrich the stub implementation to become a large monster with its own hunger after maintenance efforts.

    +
  • +
+
+
+
+
Mocks
+
+

Best Practices for applications:

+
+
+
    +
  • +

    Replace not-needed dependencies of your system-under-test (SUT) to minimize the application context to start of your component framework.

    +
  • +
  • +

    Replace dependencies of your SUT to impact the control flow under test without establishing all the context parameters needed to match the control flow.

    +
  • +
  • +

    Remember: Not everything has to be mocked! Especially on lower levels of tests like isolated module tests you can be betrayed into a mocking delusion, where you end up in a hundred lines of code mocking the whole context and five lines executing the test and verifying the mocks behavior. Always keep in mind the benefit-cost ratio, when implementing tests using mocks.

    +
  • +
+
+
+
+
WireMock
+
+

If you need to mock remote connections such as HTTP-Servers, WireMock offers easy to use functionality. For a full description see the homepage or the github repository. Wiremock can be used either as a JUnit Rule, in Java outside of JUnit or as a standalone process. The mocked server can be configured to respond to specific requests in a given way via a fluent Java API, JSON files and JSON over HTTP. An example as an integration to JUnit can look as follows.

+
+
+
+
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+
+public class WireMockOfferImport{
+
+  @Rule
+  public WireMockRule mockServer = new WireMockRule(wireMockConfig().dynamicPort());
+
+  @Test
+  public void requestDataTest() throws Exception {
+  int port = this.mockServer.port();
+  ...}
+
+
+
+

This creates a server on a randomly chosen free port on the running machine. You can also specify the port to be used if wanted. Other than that there are several options to further configure the server. This includes HTTPs, proxy settings, file locations, logging and extensions.

+
+
+
+
  @Test
+  public void requestDataTest() throws Exception {
+      this.mockServer.stubFor(get(urlEqualTo("/new/offers")).withHeader("Accept", equalTo("application/json"))
+      .withHeader("Authorization", containing("Basic")).willReturn(aResponse().withStatus(200).withFixedDelay(1000)
+      .withHeader("Content-Type", "application/json").withBodyFile("/wireMockTest/jsonBodyFile.json")));
+  }
+
+
+
+

This will stub the URL localhost:port/new/offers to respond with a status 200 message containing a header (Content-Type: application/json) and a body with content given in jsonBodyFile.json if the request matches several conditions. +It has to be a GET request to ../new/offers with the two given header properties.

+
+
+

Note that by default files are located in src/test/resources/__files/. When using only one WireMock server one can omit the this.mockServer in before the stubFor call (static method). +You can also add a fixed delay to the response or processing delay with WireMock.addRequestProcessingDelay(time) in order to test for timeouts.

+
+
+

WireMock can also respond with different corrupted messages to simulate faulty behaviour.

+
+
+
+
@Test(expected = ResourceAccessException.class)
+public void faultTest() {
+
+    this.mockServer.stubFor(get(urlEqualTo("/fault")).willReturn(aResponse()
+    .withFault(Fault.MALFORMED_RESPONSE_CHUNK)));
+...}
+
+
+
+

A GET request to ../fault returns an OK status header, then garbage, and then closes the connection.

+
+
+
+
+
Integration Levels
+
+

There are many discussions about the right level of integration for test automation. Sometimes it is better to focus on small, isolated modules of the system - whatever a "module" may be. In other cases it makes more sense to test integrated groups of modules. Because there is no universal answer to this question, devonfw only defines a common terminology for what could be tested. Each project must make its own decision where to put the focus of test automation. There is no worldwide accepted terminology for the integration levels of testing. In general we consider ISTQB. However, with a technical focus on test automation we want to get more precise.

+
+
+

The following picture shows a simplified view of an application based on the devonfw reference architecture. We define four integration levels that are explained in detail below. +The boxes in the picture contain parenthesized numbers. These numbers depict the lowest integration level, a box belongs to. Higher integration levels also contain all boxes of lower integration levels. When writing tests for a given integration level, related boxes with a lower integration level must be replaced by test doubles or drivers.

+
+
+
+Integration Levels +
+
+
+

The main difference between the integration levels is the amount of infrastructure needed to test them. The more infrastructure you need, the more bugs you will find, but the more instable and the slower your tests will be. So each project has to make a trade-off between pros and contras of including much infrastructure in tests and has to select the integration levels that fit best to the project.

+
+
+

Consider, that more infrastructure does not automatically lead to a better bug-detection. There may be bugs in your software that are masked by bugs in the infrastructure. The best way to find those bugs is to test with very few infrastructure.

+
+
+

External systems do not belong to any of the integration levels defined here. devonfw does not recommend involving real external systems in test automation. This means, they have to be replaced by test doubles in automated tests. An exception may be external systems that are fully under control of the own development team.

+
+
+

The following chapters describe the four integration levels.

+
+
+
Level 1 Module Test
+
+

The goal of a isolated module test is to provide fast feedback to the developer. Consequently, isolated module tests must not have any interaction with the client, the database, the file system, the network, etc.

+
+
+

An isolated module test is testing a single classes or at least a small set of classes in isolation. If such classes depend on other components or external resources, etc. these shall be replaced with a test double.

+
+
+
+
public class MyClassTest extends ModuleTest {
+
+  @Test
+  public void testMyClass() {
+
+    // given
+    MyClass myClass = new MyClass();
+    // when
+    String value = myClass.doSomething();
+    // then
+    assertThat(value).isEqualTo("expected value");
+  }
+
+}
+
+
+
+

For an advanced example see here.

+
+
+
+
Level 2 Component Test
+
+

A component test aims to test components or component parts as a unit. +These tests can access resources such as a database (e.g. for DAO tests). +Further, no remote communication is intended here. Access to external systems shall be replaced by a test double.

+
+
+
    +
  • +

    For Spring stack, they are typically run with a (light-weight) infrastructure such as spring-boot-starter-test. A component-test is illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.NONE)
    +public class UcFindCountryTest extends ComponentTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    TestUtil.login("user", MyAccessControlConfig.FIND_COUNTRY);
    +    CountryEto country = this.ucFindCountry.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    This test will start the entire spring-context of your app (MySpringBootApp). Within the test spring will inject according spring-beans into all your fields annotated with @Inject. In the test methods you can use these spring-beans and perform your actual tests. This pattern can be used for testing DAOs/Repositories, Use-Cases, or any other spring-bean with its entire configuration including database and transactions.

    +
    +
  • +
  • +

    For Quarkus, you can similarly inject the CDI beans and perform tests. An example is shown below:

    +
    +
    +
    @QuarkusTest
    +public class UcFindCountryTest {
    +  @Inject
    +  private UcFindCountry ucFindCountry;
    +  ...
    +
    +
    +
  • +
+
+
+

When you are testing use-cases your authorization will also be in place. Therefore, you have to simulate a logon in advance what is done via the login method in the above Spring example. The test-infrastructure will automatically do a logout for you after each test method in doTearDown.

+
+
+
+
Level 3 Subsystem Test
+
+

A subsystem test runs against the external interfaces (e.g. HTTP service) of the integrated subsystem. Subsystem tests of the client subsystem are described in the devon4ng testing guide. In devon4j the server (JEE application) is the subsystem under test. The tests act as a client (e.g. service consumer) and the server has to be integrated and started in a container.

+
+
+
    +
  • +

    With devon4j and Spring you can write a subsystem-test as easy as illustrated in the following example:

    +
    +
    +
    @SpringBootTest(classes = { MySpringBootApp.class }, webEnvironment = WebEnvironment.RANDOM_PORT)
    +public class CountryRestServiceTest extends SubsystemTest {
    +
    +  @Inject
    +  private ServiceClientFactory serviceClientFactory;
    +
    +  @Test
    +  public void testFindCountry() {
    +
    +    // given
    +    String countryCode = "de";
    +
    +    // when
    +    CountryRestService service = this.serviceClientFactory.create(CountryRestService.class);
    +    CountryEto country = service.findCountry(countryCode);
    +
    +    // then
    +    assertThat(country).isNotNull();
    +    assertThat(country.getCountryCode()).isEqualTo(countryCode);
    +    assertThat(country.getName()).isEqualTo("Germany");
    +  }
    +}
    +
    +
    +
    +

    Even though not obvious on the first look this test will start your entire application as a server on a free random port (so that it works in CI with parallel builds for different branches) and tests the invocation of a (REST) service including (un)marshalling of data (e.g. as JSON) and transport via HTTP (all in the invocation of the findCountry method).

    +
    +
  • +
+
+
+

Do not confuse a subsystem test with a system integration test. A system integration test validates the interaction of several systems where we do not recommend test automation.

+
+
+
+
Level 4 System Test
+
+

A system test has the goal to test the system as a whole against its official interfaces such as its UI or batches. The system itself runs as a separate process in a way close to a regular deployment. Only external systems are simulated by test doubles.

+
+
+

The devonfw only gives advice for automated system test (TODO see allure testing framework). In nearly every project there must be manual system tests, too. This manual system tests are out of scope here.

+
+
+
+
Classifying Integration-Levels
+
+

For Spring stack, devon4j defines Category-Interfaces that shall be used as JUnit Categories. +Also devon4j provides abstract base classes that you may extend in your test-cases if you like.

+
+
+

devon4j further pre-configures the maven build to only run integration levels 1-2 by default (e.g. for fast feedback in continuous integration). It offers the profiles subsystemtest (1-3) and systemtest (1-4). In your nightly build you can simply add -Psystemtest to run all tests.

+
+
+
+
+
Implementation
+
+

This section introduces how to implement tests on the different levels with the given devonfw infrastructure and the proposed frameworks. +For Spring, see Spring Test Implementation

+
+
+
+
Regression testing
+
+

When it comes to complex output (even binary) that you want to regression test by comparing with an expected result, you sould consider Approval Tests using ApprovalTests.Java. +If applied for the right problems, it can be very helpful.

+
+
+
+
Deployment Pipeline
+
+

A deployment pipeline is a semi-automated process that gets software-changes from version control into production. It contains several validation steps, e.g. automated tests of all integration levels. +Because devon4j should fit to different project types - from agile to waterfall - it does not define a standard deployment pipeline. But we recommend to define such a deployment pipeline explicitly for each project and to find the right place in it for each type of test.

+
+
+

For that purpose, it is advisable to have fast running test suite that gives as much confidence as possible without needing too much time and too much infrastructure. This test suite should run in an early stage of your deployment pipeline. Maybe the developer should run it even before he/she checked in the code. Usually lower integration levels are more suitable for this test suite than higher integration levels.

+
+
+

Note, that the deployment pipeline always should contain manual validation steps, at least manual acceptance testing. There also may be manual validation steps that have to be executed for special changes only, e.g. usability testing. Management and execution processes of those manual validation steps are currently not in the scope of devonfw.

+
+
+
+
Test Coverage
+
+

We are using tools (SonarQube/Jacoco) to measure the coverage of the tests. Please always keep in mind that the only reliable message of a code coverage of X% is that (100-X)% of the code is entirely untested. It does not say anything about the quality of the tests or the software though it often relates to it.

+
+
+
+
Test Configuration
+
+

This section covers test configuration in general without focusing on integration levels as in the first chapter.

+
+
+ +
+
+
Configure Test Specific Beans
+
+

Sometimes it can become handy to provide other or differently configured bean implementations via CDI than those available in production. For example, when creating beans using @Bean-annotated methods they are usually configured within those methods. WebSecurityBeansConfig shows an example of such methods.

+
+
+
+
@Configuration
+public class WebSecurityBeansConfig {
+  //...
+  @Bean
+  public AccessControlSchemaProvider accessControlSchemaProvider() {
+    // actually no additional configuration is shown here
+    return new AccessControlSchemaProviderImpl();
+  }
+  //...
+}
+
+
+
+

AccessControlSchemaProvider allows to programmatically access data defined in some XML file, e.g. access-control-schema.xml. Now, one can imagine that it would be helpful if AccessControlSchemaProvider would point to some other file than the default within a test class. That file could provide content that differs from the default. +The question is: how can I change resource path of AccessControlSchemaProviderImpl within a test?

+
+
+

One very helpful solution is to use static inner classes. +Static inner classes can contain @Bean -annotated methods, and by placing them in the classes parameter in @SpringBootTest(classes = { /* place class here*/ }) annotation the beans returned by these methods are placed in the application context during test execution. Combining this feature with inheritance allows to override methods defined in other configuration classes as shown in the following listing where TempWebSecurityConfig extends WebSecurityBeansConfig. This relationship allows to override public AccessControlSchemaProvider accessControlSchemaProvider(). Here we are able to configure the instance of type AccessControlSchemaProviderImpl before returning it (and, of course, we could also have used a completely different implementation of the AccessControlSchemaProvider interface). By overriding the method the implementation of the super class is ignored, hence, only the new implementation is called at runtime. Other methods defined in WebSecurityBeansConfig which are not overridden by the subclass are still dispatched to WebSecurityBeansConfig.

+
+
+
+
//... Other testing related annotations
+@SpringBootTest(classes = { TempWebSecurityConfig.class })
+public class SomeTestClass {
+
+  public static class TempWebSecurityConfig extends WebSecurityBeansConfig {
+
+    @Override
+    @Bean
+    public AccessControlSchemaProvider accessControlSchemaProvider() {
+
+      ClassPathResource resource = new ClassPathResource(locationPrefix + "access-control-schema3.xml");
+      AccessControlSchemaProviderImpl accessControlSchemaProvider = new AccessControlSchemaProviderImpl();
+      accessControlSchemaProvider.setAccessControlSchema(resource);
+      return accessControlSchemaProvider;
+    }
+  }
+}
+
+
+
+

The following chapter of the Spring framework documentation explains issue, but uses a slightly different way to obtain the configuration.

+
+
+
+
Test Data
+
+

It is possible to obtain test data in two different ways depending on your test’s integration level.

+
+
+
+
+
Debugging Tests
+
+

The following two sections describe two debugging approaches for tests. Tests are either run from within the IDE or from the command line using Maven.

+
+
+
Debugging with the IDE
+
+

Debugging with the IDE is as easy as always. Even if you want to execute a SubsystemTest which needs a Spring context and a server infrastructure to run properly, you just set your breakpoints and click on Debug As → JUnit Test. The test infrastructure will take care of initializing the necessary infrastructure - if everything is configured properly.

+
+
+
+
Debugging with Maven
+
+

Please refer to the following two links to find a guide for debugging tests when running them from Maven.

+
+ +
+

In essence, you first have to start execute a test using the command line. Maven will halt just before the test execution and wait for your IDE to connect to the process. When receiving a connection the test will start and then pause at any breakpoint set in advance. +The first link states that tests are started through the following command:

+
+
+
+
mvn -Dmaven.surefire.debug test
+
+
+
+

Although this is correct, it will run every test class in your project and - which is time consuming and mostly unnecessary - halt before each of these tests. +To counter this problem you can simply execute a single test class through the following command (here we execute the TablemanagementRestServiceTest from the restaurant sample application):

+
+
+
+
mvn test -Dmaven.surefire.debug test -Dtest=TablemanagementRestServiceTest
+
+
+
+

It is important to notice that you first have to execute the Maven command in the according submodule, e.g. to execute the TablemanagementRestServiceTest you have first to navigate to the core module’s directory.

+
+
+ +
+

==Transfer-Objects

+
+
+

The technical data model is defined in form of persistent entities. +However, passing persistent entities via call-by-reference across the entire application will soon cause problems:

+
+
+
    +
  • +

    Changes to a persistent entity are directly written back to the persistent store when the transaction is committed. When the entity is send across the application also changes tend to take place in multiple places endangering data sovereignty and leading to inconsistency.

    +
  • +
  • +

    You want to send and receive data via services across the network and have to define what section of your data is actually transferred. If you have relations in your technical model you quickly end up loading and transferring way too much data.

    +
  • +
  • +

    Modifications to your technical data model shall not automatically have impact on your external services causing incompatibilities.

    +
  • +
+
+
+

To prevent such problems transfer-objects are used leading to a call-by-value model and decoupling changes to persistent entities.

+
+
+

In the following sections the different types of transfer-objects are explained. +You will find all according naming-conventions in the architecture-mapping

+
+
+

To structure your transfer objects, we recommend the following approaches:

+
+
+ +
+
+

Also considering the following transfer objects in specific cases:

+
+
+
+
SearchCriteriaTo
+
+

For searching we create or generate a «BusinessObject»SearchCriteriaTo representing a query to find instances of «BusinessObject».

+
+
TO
+
+

There are typically transfer-objects for data that is never persistent. +For very generic cases these just carry the suffix To.

+
+
STO
+
+

We can potentially create separate service transfer objects (STO) (if possible named «BusinessObject»Sto) to keep the service API stable and independent of the actual data-model. +However, we usually do not need this and want to keep our architecture simple. +Only create STOs if you need service versioning and support previous APIs or to provide legacy service technologies that require their own isolated data-model. +In such case you also need beanmapping between STOs and ETOs/DTOs what means extra effort and complexity that should be avoided.

+
+
+
+
+
+

==Bean Mapping in devon4j-spring

+
+
+

We have developed a solution that uses a BeanMapper that allows to abstract from the underlying implementation. As mentioned in the general bean mapping guide, we started with Dozer a Java Bean to Java Bean mapper that recursively copies data from one object to another. Now we recommend using Orika. This guide will show an introduction to Orika and Dozer bean-mapper.

+
+
+
+
+
Bean-Mapper Dependency
+
+

To get access to the BeanMapper we have to use either of the below dependency in our POM:

+
+
+
Listing 22. Orika
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-orika</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
Listing 23. Dozer
+
+
<dependency>
+    <groupId>com.devonfw.java.modules</groupId>
+    <artifactId>devon4j-beanmapping-dozer</artifactId>
+    <version>2020.12.002</version>
+</dependency>
+
+
+
+
+
Bean-Mapper Configuration
+
+
Bean-Mapper Configuration using Dozer
+
+

The BeanMapper implementation is based on an existing open-source bean-mapping framework. +In case of Dozer the mapping is configured src/main/resources/config/app/common/dozer-mapping.xml.

+
+
+

See the my-thai-star dozer-mapping.xml as an example. +Important is that you configure all your custom datatypes as <copy-by-reference> tags and have the mapping from PersistenceEntity (ApplicationPersistenceEntity) to AbstractEto configured properly:

+
+
+
+
 <mapping type="one-way">
+    <class-a>com.devonfw.module.basic.common.api.entity.PersistenceEntity</class-a>
+    <class-b>com.devonfw.module.basic.common.api.to.AbstractEto</class-b>
+    <field custom-converter="com.devonfw.module.beanmapping.common.impl.dozer.IdentityConverter">
+      <a>this</a>
+      <b is-accessible="true">persistentEntity</b>
+    </field>
+</mapping>
+
+
+
+
+
+
Bean-Mapper Configuration using Orika
+
+

Orika with devonfw is configured by default and sets some custom mappings for GenericEntity.java to GenericEntityDto.java. To specify and customize the mappings you can create the class BeansOrikaConfig.java that extends the class BaseOrikaConfig.java from the devon4j.orika package. To register a basic mapping, register a ClassMap for the mapperFactory with your custom mapping. Watch the example below and follow the basic Orika mapping configuration guide and the Orika advanced mapping guide.

+
+
+

Register Mappings:

+
+
+
+
mapperFactory.classMap(UserEntity.class, UserEto.class)
+			.field("email", "email")
+			.field("username", "name")
+			.byDefault()
+			.register();
+
+
+
+
+
Bean-Mapper Usage
+
+

Then we can get the BeanMapper via dependency-injection what we typically already provide by an abstract base class (e.g. AbstractUc). Now we can solve our problem very easy:

+
+
+
+
...
+UserEntity resultEntity = ...;
+...
+return getBeanMapper().map(resultEntity, UserEto.class);
+
+
+
+ +
+

==Datatypes

+
+
+
+
+

A datatype is an object representing a value of a specific type with the following aspects:

+
+
+
    +
  • +

    It has a technical or business specific semantic.

    +
  • +
  • +

    Its JavaDoc explains the meaning and semantic of the value.

    +
  • +
  • +

    It is immutable and therefore stateless (its value assigned at construction time and can not be modified).

    +
  • +
  • +

    It is serializable.

    +
  • +
  • +

    It properly implements #equals(Object) and #hashCode() (two different instances with the same value are equal and have the same hash).

    +
  • +
  • +

    It shall ensure syntactical validation so it is NOT possible to create an instance with an invalid value.

    +
  • +
  • +

    It is responsible for formatting its value to a string representation suitable for sinks such as UI, loggers, etc. Also consider cases like a Datatype representing a password where toString() should return something like "**" instead of the actual password to prevent security accidents.

    +
  • +
  • +

    It is responsible for parsing the value from other representations such as a string (as needed).

    +
  • +
  • +

    It shall provide required logical operations on the value to prevent redundancies. Due to the immutable attribute all manipulative operations have to return a new Datatype instance (see e.g. BigDecimal.add(java.math.BigDecimal)).

    +
  • +
  • +

    It should implement Comparable if a natural order is defined.

    +
  • +
+
+
+

Based on the Datatype a presentation layer can decide how to view and how to edit the value. Therefore a structured data model should make use of custom datatypes in order to be expressive. +Common generic datatypes are String, Boolean, Number and its subclasses, Currency, etc. +Please note that both Date and Calendar are mutable and have very confusing APIs. Therefore, use JSR-310 or jodatime instead. +Even if a datatype is technically nothing but a String or a Number but logically something special it is worth to define it as a dedicated datatype class already for the purpose of having a central javadoc to explain it. On the other side avoid to introduce technical datatypes like String32 for a String with a maximum length of 32 characters as this is not adding value in the sense of a real Datatype. +It is suitable and in most cases also recommended to use the class implementing the datatype as API omitting a dedicated interface.

+
+
+
+— mmm project
+datatype javadoc +
+
+ +
+
+
Datatype Packaging
+
+

For the devonfw we use a common packaging schema. +The specifics for datatypes are as following:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
SegmentValueExplanation

<component>

*

Here we use the (business) component defining the datatype or general for generic datatypes.

<layer>

common

Datatypes are used across all layers and are not assigned to a dedicated layer.

<scope>

api

Datatypes are always used directly as API even tough they may contain (simple) implementation logic. Most datatypes are simple wrappers for generic Java types (e.g. String) but make these explicit and might add some validation.

+
+
+
Technical Concerns
+
+

Many technologies like Dozer and QueryDSL’s (alias API) are heavily based on reflection. For them to work properly with custom datatypes, the frameworks must be able to instantiate custom datatypes with no-argument constructors. It is therefore recommended to implement a no-argument constructor for each datatype of at least protected visibility.

+
+
+
+
Datatypes in Entities
+
+

The usage of custom datatypes in entities is explained in the persistence layer guide.

+
+
+
+
Datatypes in Transfer-Objects
+
+
XML
+
+

For mapping datatypes with JAXB see XML guide.

+
+
+
+
JSON
+
+

For mapping datatypes from and to JSON see JSON custom mapping.

+
+
+ +
+

==CORS configuration in Spring

+
+
+
+
+
Dependency
+
+

To enable the CORS support from the server side for your devon4j-Spring application, add the below dependency:

+
+
+
+
<dependency>
+  <groupId>com.devonfw.java.starters</groupId>
+  <artifactId>devon4j-starter-security-cors</artifactId>
+</dependency>
+
+
+
+
+
Configuration
+
+

Add the below properties in your application.properties file:

+
+
+
+
#CORS support
+security.cors.spring.allowCredentials=true
+security.cors.spring.allowedOriginPatterns=*
+security.cors.spring.allowedHeaders=*
+security.cors.spring.allowedMethods=OPTIONS,HEAD,GET,PUT,POST,DELETE,PATCH
+security.cors.pathPattern=/**
+
+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AttributeDescriptionHTTP Header

allowCredentials

Decides the browser should include any cookies associated with the request (true if cookies should be included).

Access-Control-Allow-Credentials

allowedOrigins

List of allowed origins (use * to allow all orgins).

Access-Control-Allow-Origin

allowedMethods

List of allowed HTTP request methods (OPTIONS, HEAD, GET, PUT, POST, DELETE, PATCH, etc.).

-

allowedHeaders

List of allowed headers that can be used during the request (use * to allow all headers requested by the client)

Access-Control-Allow-Headers

pathPattern

Ant-style pattern for the URL paths where to apply CORS. Use "/**" to match all URL paths.

+
+ +
+

==Microservices in devonfw

+
+
+

The Microservices architecture is an approach for application development based on a series of small services grouped under a business domain. Each individual service runs autonomously and communicating with each other through their APIs. That independence between the different services allows to manage (upgrade, fix, deploy, etc.) each one without affecting the rest of the system’s services. In addition to that the microservices architecture allows to scale specific services when facing an increment of the requests, so the applications based on microservices are more flexible and stable, and can be adapted quickly to demand changes.

+
+
+

However, this new approach, developing apps based on microservices, presents some downsides.

+
+
+

Let’s see the main challenges when working with microservices:

+
+
+
    +
  • +

    Having the applications divided in different services we will need a component (router) to redirect each request to the related microservice. These redirection rules must implement filters to guarantee a proper functionality.

    +
  • +
  • +

    In order to manage correctly the routing process, the application will also need a catalog with all the microservices and its details: IPs and ports of each of the deployed instances of each microservice, the state of each instance and some other related information. This catalog is called Service Discovery.

    +
  • +
  • +

    With all the information of the Service Discovery the application will need to calculate and select between all the available instances of a microservice which is the suitable one. This will be figured out by the library Client Side Load Balancer.

    +
  • +
  • +

    The different microservices will be likely interconnected with each other, that means that in case of failure of one of the microservices involved in a process, the application must implement a mechanism to avoid the error propagation through the rest of the services and provide an alternative as a process result. To solve this, the pattern Circuit Breaker can be implemented in the calls between microservices.

    +
  • +
  • +

    As we have mentioned, the microservices will exchange calls and information with each other so our applications will need to provide a secured context to avoid not allowed operations or intrusions. In addition, since microservices must be able to operate in an isolated way, it is not recommended to maintain a session. To meet this need without using Spring sessions, a token-based authentication is used that exchanges information using the json web token (JWT) protocol.

    +
  • +
+
+
+

In addition to all of this we will find other issues related to this particular architecture that we will address fitting the requirements of each project.

+
+
+
    +
  • +

    Distributed data bases: each instance of a microservice should have only one data base.

    +
  • +
  • +

    Centralized logs: each instance of a microservice creates a log and a trace that should be centralized to allow an easier way to read all that information.

    +
  • +
  • +

    Centralized configuration: each microservice has its own configuration, so our applications should group all those configurations in only one place to ease the configuration management.

    +
  • +
  • +

    Automatized deployments: as we are managing several components (microservices, catalogs, balancers, etc.) the deployment should be automatized to avoid errors and ease this process.

    +
  • +
+
+
+

To address the above, devonfw microservices has an alternative approach Microservices based on Netflix-Tools.

+
+
+ +
+

==Caching +Caching is a technical approach to improve performance. While it may appear easy on the first sight it is an advanced topic. In general, try to use caching only when required for performance reasons. If you come to the point that you need caching first think about:

+
+
+
    +
  • +

    What to cache?
    +Be sure about what you want to cache. Is it static data? How often will it change? What will happen if the data changes but due to caching you might receive "old" values? Can this be tolerated? For how long? This is not a technical question but a business requirement.

    +
  • +
  • +

    Where to cache?
    +Will you cache data on client or server? Where exactly?

    +
  • +
  • +

    How to cache?
    +Is a local cache sufficient or do you need a shared cache?

    +
  • +
+
+
+
+
Local Cache
+ +
+
+
Shared Cache
+
+
Distributed Cache
+ +
+
+ +
+
Caching of Web-Resources
+ +
+ +
+

==Feature-Toggles

+
+
+

The most software developing teams use Feature-Branching to be able to work in parallel and maintain a stable main branch in the VCS. However Feature-Branching might not be the ideal tool in every case because of big merges and isolation between development groups. In many cases, Feature-Toggles can avoid some of these problems, so these should definitely be considered to be used in the collaborative software development.

+
+
+
+
Implementation with the devonfw
+
+

To use Feature-Toggles with the devonfw, use the Framework Togglz because it has all the features generally needed and provides a great documentation.

+
+
+

For a pretty minimal working example, also see this fork.

+
+
+
Preparation
+
+

The following example takes place in the oasp-sample-core project, so the necessary dependencies have to be added to the according pom.xml file. Required are the main Togglz project including Spring support, the Togglz console to graphically change the feature state and the Spring security package to handle authentication for the Togglz console.

+
+
+
+
<!-- Feature-Toggle-Framework togglz -->
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-boot-starter</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-console</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+<dependency>
+  <groupId>org.togglz</groupId>
+  <artifactId>togglz-spring-security</artifactId>
+  <version>2.3.0.RC2</version>
+</dependency>
+
+
+
+

In addition to that, the following lines have to be included in the spring configuration file application.properties

+
+
+
+
##configuration for the togglz Feature-Toggle-Framework
+togglz.enabled=true
+togglz.console.secured=false
+
+
+
+
+
Small features
+
+

For small features, a simple query of the toggle state is often enough to achieve the desired functionality. To illustrate this, a simple example follows, which implements a toggle to limit the page size returned by the staffmanagement. See here for further details.

+
+
+

This is the current implementation to toggle the feature:

+
+
+
+
// Uncomment next line in order to limit the maximum page size for the staff member search
+// criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+
+
+
+

To realise this more elegantly with Togglz, first an enum is required to configure the feature-toggle.

+
+
+
+
public enum StaffmanagementFeatures implements Feature {
+  @Label("Limit the maximum page size for the staff members")
+  LIMIT_STAFF_PAGE_SIZE;
+
+  public boolean isActive() {
+    return FeatureContext.getFeatureManager().isActive(this);
+  }
+}
+
+
+
+

To familiarize the Spring framework with the enum, add the following entry to the application.properties file.

+
+
+
+
togglz.feature-enums=io.oasp.gastronomy.restaurant.staffmanagement.featuremanager.StaffmanagementFeatures
+
+
+
+

After that, the toggle can be used easily by calling the isActive() method of the enum.

+
+
+
+
if (StaffmanagementFeatures.LIMIT_STAFF_PAGE_SIZE.isActive()) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+}
+
+
+
+

This way, you can easily switch the feature on or off by using the administration console at http://localhost:8081/devon4j-sample-server/togglz-console. If you are getting redirected to the login page, just sign in with any valid user (eg. admin).

+
+
+
+
Extensive features
+
+

When implementing extensive features, you might want to consider using the strategy design pattern to maintain the overview of your software. The following example is an implementation of a feature which adds a 25% discount to all products managed by the offermanagement.

+
+
+
Therefore there are two strategies needed:
+
    +
  1. +

    Return the offers with the normal price

    +
  2. +
  3. +

    Return the offers with a 25% discount

    +
  4. +
+
+
+

The implementation is pretty straight forward so use this as a reference. Compare this for further details.

+
+
+
+
@Override
+@RolesAllowed(PermissionConstants.FIND_OFFER)
+public PaginatedListTo<OfferEto> findOfferEtos(OfferSearchCriteriaTo criteria) {
+  criteria.limitMaximumPageSize(MAXIMUM_HIT_LIMIT);
+  PaginatedListTo<OfferEntity> offers = getOfferDao().findOffers(criteria);
+
+
+  if (OffermanagementFeatures.DISCOUNT.isActive()) {
+    return getOfferEtosDiscount(offers);
+  } else {
+    return getOfferEtosNormalPrice(offers);
+  }
+
+}
+
+
+// Strategy 1: Return the OfferEtos with the normal price
+private PaginatedListTo<OfferEto> getOfferEtosNormalPrice(PaginatedListTo<OfferEntity> offers) {
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+// Strategy 2: Return the OfferEtos with the new, discounted price
+private PaginatedListTo<OfferEto> getOfferEtosDiscount(PaginatedListTo<OfferEntity> offers) {
+  offers = addDiscountToOffers(offers);
+  return mapPaginatedEntityList(offers, OfferEto.class);
+}
+
+private PaginatedListTo<OfferEntity> addDiscountToOffers(PaginatedListTo<OfferEntity> offers) {
+  for (OfferEntity oe : offers.getResult()) {
+    Double oldPrice = oe.getPrice().getValue().doubleValue();
+
+    // calculate the new price and round it to two decimal places
+    BigDecimal newPrice = new BigDecimal(oldPrice * 0.75);
+    newPrice = newPrice.setScale(2, RoundingMode.HALF_UP);
+
+    oe.setPrice(new Money(newPrice));
+  }
+
+  return offers;
+}
+
+
+
+
+
+
Guidelines for a successful use of feature-toggles
+
+

The use of feature-toggles requires a specified set of guidelines to maintain the overview on the software. The following is a collection of considerations and examples for conventions that are reasonable to use.

+
+
+
Minimize the number of toggles
+
+

When using too many toggles at the same time, it is hard to maintain a good overview of the system and things like finding bugs are getting much harder. Additionally, the management of toggles in the configuration interface gets more difficult due to the amount of toggles.

+
+
+

To prevent toggles from piling up during development, a toggle and the associated obsolete source code should be removed after the completion of the corresponding feature. In addition to that, the existing toggles should be revisited periodically to verify that these are still needed and therefore remove legacy toggles.

+
+
+
+
Consistent naming scheme
+
+

A consistent naming scheme is the key to a structured and easily maintainable set of features. This should include the naming of toggles in the source code and the appropriate naming of commit messages in the VCS. The following section contains an example for a useful naming scheme including a small example.

+
+
+

Every Feature-Toggle in the system has to get its own unique name without repeating any names of features, which were removed from the system. The chosen names should be descriptive names to simplify the association between toggles and their purpose. If the feature should be split into multiple sub-features, you might want to name the feature like the parent feature with a describing addition. If for example you want to split the DISCOUNT feature into the logic and the UI part, you might want to name the sub-features DISCOUNT_LOGIC and DISCOUNT_UI.

+
+
+

The entry in the togglz configuration enum should be named identically to the aforementioned feature name. The explicitness of feature names prevents a confusion between toggles due to using multiple enums.

+
+
+

Commit messages are very important for the use of feature-toggles and also should follow a predefined naming scheme. You might want to state the feature name at the beginning of the message, followed by the actual message, describing what the commit changes to the feature. An example commit message could look like the following:

+
+
+
+
DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

Mentioning the feature name in the commit message has the advantage, that you can search your git log for the feature name and get every commit belonging to the feature. An example for this using the tool grep could look like this.

+
+
+
+
$ git log | grep -C 4 DISCOUNT
+
+commit 034669a48208cb946cc6ba8a258bdab586929dd9
+Author: Florian Luediger <florian.luediger@somemail.com>
+Date:   Thu Jul 7 13:04:37 2016 +0100
+
+DISCOUNT: Add the feature-toggle to the offermanagement implementation.
+
+
+
+

To keep track of all the features in your software system, a platform like GitHub offers issues. When creating an issue for every feature, you can retrace, who created the feature and who is assigned to completing its development. When referencing the issue from commits, you also have links to all the relevant commits from the issue view.

+
+
+
+
Placement of toggle points
+
+

To maintain a clean codebase, you definitely want to avoid using the same toggle in different places in the software. There should be one single query of the toggle which should be able to toggle the whole functionality of the feature. If one single toggle point is not enough to switch the whole feature on or off, you might want to think about splitting the feature into multiple ones.

+
+
+
+
Use of fine-grained features
+
+

Bigger features in general should be split into multiple sub-features to maintain the overview on the codebase. These sub-features get their own feature-toggle and get implemented independently.

+
+
+ +
+

==Accessibility

+
+
+

TODO

+
+ + + +
+ +
+ + + + + +
+ + +devon4j-kafka has been abandoned. Its main feature was the implementation of a retry pattern using multiple topics. This implementation has become an integral part of Spring Kafka. We recommend to use Spring Kafkas own implemenation for retries. +
+
+
+

==Messaging Services

+
+
+

Messaging Services provide an asynchronous communication mechanism between applications. Technically this is implemented using Apache Kafka .

+
+
+

For spring, devonfw uses Spring-Kafka as kafka framework. +For more details, check the devon4j-kafka.

+
+ +
+ +
+

==Messaging

+
+
+

Messaging in Java is done using the JMS standard from JEE.

+
+
+
+
+
Products
+
+

For messaging you need to choose a JMS provider such as:

+
+
+ +
+
+
+
Receiver
+
+

As a receiver of messages is receiving data from other systems it is located in the service-layer.

+
+
+
JMS Listener
+
+

A JmsListener is a class listening and consuming JMS messages. It should carry the suffix JmsListener and implement the MessageListener interface or have its listener method annotated with @JmsListener. This is illustrated by the following example:

+
+
+
+
@Named
+@Transactional
+public class BookingJmsListener /* implements MessageListener */ {
+
+  @Inject
+  private Bookingmanagement bookingmanagement;
+
+  @Inject
+  private MessageConverter messageConverter;
+
+  @JmsListener(destination = "BOOKING_QUEUE", containerFactory = "jmsListenerContainerFactory")
+  public void onMessage(Message message) {
+    try {
+      BookingTo bookingTo = (BookingTo) this.messageConverter.fromMessage(message);
+      this.bookingmanagement.importBooking(bookingTo);
+    } catch (MessageConversionException | JMSException e) {
+      throw new InvalidMessageException(message);
+    }
+  }
+}
+
+
+
+
+
+
Sender
+
+

The sending of JMS messages is considered as any other sending of data like kafka messages or RPC calls via REST using service-client, gRPC, etc. +This will typically happen directly from a use-case in the logic-layer. +However, the technical complexity of the communication and protocols itself shall be hidden from the use-case and not be part of the logic layer. +With spring we can simply use JmsTemplate to do that.

+
+
+ +
+

==Full Text Search

+
+
+

If you want to all your users fast and simple searches with just a single search field (like in google), you need full text indexing and search support.

+
+
+
+
Solutions
+
+ +
+
+

Maybe you also want to use native features of your database

+
+ +
+
+
Best Practices
+
+

TODO

+
+
+
+
+

1.78. Tutorials

+ +
+

==Creating a new application

+
+
+
Running the archetype
+
+

In order to create a new application you must use the archetype provided by devon4j which uses the maven archetype functionality.

+
+
+

To create a new application, you should have installed devonfw IDE. Follow the devon ide documentation to install +the same. +You can choose between 2 alternatives, create it from command line or, in more visual manner, within eclipse.

+
+
+
From command Line
+
+

To create a new devon4j application from command line, you can simply run the following command:

+
+
+
+
devon java create com.example.application.sampleapp
+
+
+
+

For low-level creation you can also manually call this command:

+
+
+
+
mvn -DarchetypeVersion=${devon4j.version} -DarchetypeGroupId=com.devonfw.java.templates -DarchetypeArtifactId=devon4j-template-server archetype:generate -DgroupId=com.example.application -DartifactId=sampleapp -Dversion=1.0.0-SNAPSHOT -Dpackage=com.devonfw.application.sampleapp
+
+
+
+

Attention: The archetypeVersion (first argument) should be set to the latest version of devon4j. You can easily determine the version from this badge: +latest devon4j version

+
+
+

Further providing additional properties (using -D parameter) you can customize the generated app:

+
+ + +++++ + + + + + + + + + + + + + + + + + + + +
Table 17. Options for app template
propertycommentexample

dbType

Choose the type of RDBMS to use (hana, oracle, mssql, postgresql, mariadb, mysql, etc.)

-DdbTpye=postgresql

batch

Option to add an batch module

-Dbatch=batch

+
+
+
From Eclipse
+
+
+
After that, you should follow this Eclipse steps to create your application:
+
+
+
+
    +
  • +

    Create a new Maven Project.

    +
  • +
  • +

    Choose the devon4j-template-server archetype, just like the image.

    +
  • +
+
+
+
+Select archetype +
+
+
+
    +
  • +

    Fill the Group Id, Artifact Id, Version and Package for your project.

    +
  • +
+
+
+
+Configure archetype +
+
+
+
    +
  • +

    Finish the Eclipse assistant and you are ready to start your project.

    +
  • +
+
+
+
+
+
What is generated
+
+

The application template (archetype) generates a Maven multi-module project. It has the following modules:

+
+
+
    +
  • +

    api: module with the API (REST service interfaces, transferobjects, datatypes, etc.) to be imported by other apps as a maven dependency in order to invoke and consume the offered (micro)services.

    +
  • +
  • +

    core: maven module containing the core of the application.

    +
  • +
  • +

    batch: optional module for batch(es)

    +
  • +
  • +

    server: module that bundles the entire app (core with optional batch) as a WAR file.

    +
  • +
+
+
+

The toplevel pom.xml of the generated project has the following features:

+
+
+
    +
  • +

    Properties definition: Spring-boot version, Java version, etc.

    +
  • +
  • +

    Modules definition for the modules (described above)

    +
  • +
  • +

    Dependency management: define versions for dependencies of the technology stack that are recommended and work together in a compatible way.

    +
  • +
  • +

    Maven plugins with desired versions and configuration

    +
  • +
  • +

    Profiles for test stages

    +
  • +
+
+
+
+
How to run your app
+
+
Run app from IDE
+
+

To run your application from your favourite IDE, simply launch SpringBootApp as java application.

+
+
+
+
Run app as bootified jar or war
+
+

More details are available here.

+
+ +
+

==Quarkus

+
+
+

Quarkus is a Java framework for building cloud-native apps. +It is fully supported by devonfw as an option and alternative to spring. +Additional things like extensions will be available on the devon4quarkus GitHub repository.

+
+
+
+
+
+

1.79. Guide to the Reader

+
+

Depending on your intention of reading this document, you might be more interested in the following chapters:

+
+
+
    +
  • +

    If you are completely new to Quarkus, you may be interested in the pros and cons of Quarkus. Also, take a look at the official Quarkus website. You might also be interested in the features that GraalVM offers.

    +
  • +
  • +

    If you are new to devon4j, take a look at devon4j’s recommendations on general best practices. Check out the chapters on architecture design, project structuring, and coding conventions. Follow the referenced links to explore a topic in more depth.

    +
  • +
  • +

    If you are an experienced Spring developer and want to get in touch with Quarkus, read our Getting started with Quarkus for Spring developers guide.

    +
  • +
  • +

    If you’re looking to build your first Quarkus application, the Quarkus website offers some good getting started guides. Also, check out our Quarkus template guide, which gives you some recommendations on extensions and frameworks to use. It also provides some links to the Quarkus code generator with preselected configurations you can use to create your application.

    +
  • +
  • +

    If you want to have a Quarkus sample application using devon4j recommendations, check out our Quarkus reference application.

    +
  • +
  • +

    If you have a Spring application and want to migrate it to Quarkus, take a look at our migration guide.

    +
  • +
  • +

    If you already have some experience with devon4j and Quarkus and need more information on a specific topic, check out our Quarkus guides. If you don’t find what you are looking for there, check out the general section. devon4j uses general solutions for Java, so solutions for both Quarkus and Spring are documented there.

    +
  • +
  • +

    If you want to learn how to build native images, check out this guide.

    +
  • +
+
+
+
+

1.80. Pros

+
+

Quarkus offers the following benefits:

+
+
+
    +
  • +

    fast turn-around cycles for developers
    +Save changes in your Java code and immediately test the results without restarting or waiting

    +
  • +
  • +

    faster start-up and less memory footprint
    +When building your app as native-images via GraalVM, it gets highly optimized. As a result, it starts up lightning fast and consumes much less memory. This is a great advantage for cloud deployment as well as for sustainability. You can find a performance comparison between Spring and Quarkus here.

    +
  • +
  • +

    clean and lean +As quarkus was born as a cloud-native framework, it is very light-weight and does not carry much history and legacy.

    +
  • +
+
+
+
+

1.81. Cons

+
+

Quarkus has the following drawbacks:

+
+
+
    +
  • +

    less flexible
    +Quarkus is less flexible compared to spring, or in other words, it is more biased and coupled to specific implementations. However, the implementations work and you have less things to choose and worry about. However, in case you want to integrate a specific or custom library, you may hit limitations or lose support for native-images, especially when that library is based on reflection. Therefore, check your requirements and technology stack early on when making your choice.

    +
  • +
  • +

    less established
    +Since quarkus was born in 2019, it is modern but also less established. It will be easier to get developers for spring, but we already consider quarkus mature and established enough for building production-ready apps.

    +
  • +
+
+
+

==Quarkus Quickstart

+
+
+

This guide serves as a quickstart on how to create a Quarkus app, briefly presenting the key functionalities that Quarkus provides, both for beginners and experienced developers.

+
+
+

1.81.1. Introduction to Quarkus

+
+

To get a first introduction to Quarkus, you can read the Quarkus introduction guide. To get a brief overview of where you can find the important Quarkus related guides, follow the chapter guide to the reader. +Also, see the comparison of the advantages and disadvantages of a Quarkus application compared to the alternative framework Spring. +This comparison will be supported by our performance comparison between Spring and Quarkus, which demonstrates the lower resource consumption and startup time of Quarkus applications.

+
+
+
+

1.81.2. Installation of Tools and Dependencies

+
+

First, we need to install some dependencies and tools before we can start programming. Our tool devonfw-ide comes with many development tools for you. +We need to install the following tools for this guide:

+
+
+
    +
  • +

    Maven

    +
  • +
  • +

    Java

    +
  • +
  • +

    any IDE (devonfw-ide supports Eclipse, Intellij and VScode)

    +
  • +
  • +

    Docker

    +
  • +
+
+
+

We recommend installing the devonfw-ide with the tools, but if you already have your system configured and the tools above installed, you can skip to Bootstrap a Quarkus Project, otherwise we will show you how to set up and update your devonfw-ide.

+
+
+
devonfw-ide
+
    +
  1. +

    Install devonfw-ide
    +Follow the Setup to install the devonfw-ide with Java, Maven, Eclipse and VScode.

    +
    +
      +
    1. +

      Command to install Docker
      +devon docker setup

      +
    2. +
    +
    +
  2. +
  3. +

    Update devonfw-ide
    +As we are still working on improving devonfw-ide, we recommend to update your already installed devonfw-ide and tools in order to include essential features for cloud development with Quarkus that you could be missing.

    +
  4. +
+
+
+

Use the commands devon ide update, devon ide update software, and devon ide scripts to update devonfw-ide and all installed software.

+
+
+

Go to the main folder under workspaces of the devonfw-ide installation. +We will create the project there.

+
+
+
+

1.81.3. Bootstrap a Quarkus Project

+
+

Quarkus provides multiple ways to bootstrap a project. +The option to bootstrap a project via the command-line is shown in the Quarkus getting started guide Bootstrap the project. +Quarkus also provides a project builder where you can select some extensions, the build tool for your project, and if you want, some starter code. +This will deliver a project skeleton with the configured project dependencies and also contributes the information to compile the application natively. To get some recommendations on starter templates, follow the guide on: template recommendations.

+
+
+ + + + + +
+ + +
+

By creating a Quarkus project from the command-line or with the project builder, you get a different project structure and have to adapt it to the devon4j conventions shown in the next Chapter.

+
+
+
+
+
Project Structure
+
+

We provide a recommendation and guideline for a modern project structure to help organize your project into logically related modules. +In order to comply with the requirements of modern cloud development and microservice architectures, follow the guide and apply the modern project structure to your project. You can also find similar modules in our example projects.

+
+
+
+
+

1.81.4. Introduction to Quarkus Functionality

+
+

Before we start programming, you should first have a look at the functionality of Quarkus.

+
+
+
Quarkus functionality guides
+
    +
  1. +

    Getting started guide from Quarkus
    +This guide presents a good overview of the functionality of Quarkus. The simple Greeting Service gives a brief introduction into concepts like CDI, testing, dev mode, packaging, and running the app.

    +
  2. +
  3. +

    From Spring to Quarkus
    +For experienced Spring developers that have already followed devon4j guidelines, you can read our guide to getting started with Quarkus for Spring developer, as it goes more into the differences that can give you a more detailed comparison to Spring.

    +
    +
      +
    1. +

      Migrate a Spring app to Quarkus
      +This guide shows how to migrate a Spring application to a Quarkus application with devon4j conventions.

      +
    2. +
    +
    +
  4. +
+
+
+
+

1.81.5. Create a REST service

+
+

Now let’s create our first REST CRUD service with Quarkus. +We give you the options of using a guide to start to code the service yourself or to just download a service that’s ready to use.

+
+
+
Options
+
    +
  1. +

    Create the service yourself
    +There is a good Quarkus guide for a simple JSON REST service that will guide you through your first application and help you with implement the definition of endpoints with JAX-RS and an Entity that will be managed by the service, and also how to configure the JSON support.

    +
  2. +
  3. +

    Use an existing Quarkus project
    +You don’t want to code a service and just want to test some Quarkus functionalities? Just load a Quarkus sample project provided for every existing quickstart guide and the supported framework. +Our Team also provides some Quarkus applications that are working and can be loaded and tested.

    +
    +
      +
    • +

      reference project is a service that manages products. It contains the devon4j modern project structure, pagination, queries, a Postgres database, SwaggerUI, and support for Kubernetes deploy. To add OpenTelemetry support, see the following guide. +This project will be steadily improved and is used to showcase the abilities of Quarkus with devon4j.

      +
    • +
    • +

      minimal Quarkus project is just the Quarkus project from a getting started with Quarkus guide with a Greeting Services modified with the correct modern structure mentioned in the chapter Project Structure

      +
    • +
    +
    +
  4. +
+
+
+
+

1.81.6. OpenAPI generation

+
+

We provide a guide with a short introduction to the OpenAPI specification with two plugins that are important in a Quarkus Context.

+
+ +
+

A more detailed usage guide to the Smallrye Plugin is provided by Quarkus OpenAPI and Swagger guide.

+
+
+
+

1.81.7. How to Integrate a Database

+
+

The next step for our REST service would be to integrate a database to store the objects of the entity.

+
+
+

With Quarkus, adding a database can be easy, because Quarkus can take over the build-up and connection process. +First, you should understand our guides on the concepts of working with data. Then, we will show how to integrate a database with Quarkus.

+
+
+
Data Principles Guides
+
    +
  1. +

    General devon4j JPA guide
    +To get an insight into the general JPA usage, read the JPA guide containing a general explanation of the Java Persistence API.

    +
  2. +
  3. +

    Difference to SpringData
    +If you have already worked with SpringData, this is also partially supported with Quarkus. This is explained in more detail in this SpringData Guide.

    +
  4. +
+
+
+
Database Integration
+
    +
  1. +

    Quarkus zero config dev mode
    +Starting with the database implementation in Quarkus, we recommend for beginners to use the DEV mode Zero Config Setup (Dev Services). This is especially great for testing the code without a database set up. +Quarkus does all the work for you and configures a database and creates the database and tables (schemas) for you.

    +
    +
      +
    1. +

      Configuration Properties
      +A list of all database configuration properties for the Dev services

      +
    2. +
    +
    +
  2. +
  3. +

    Integrate a simple Hibernate ORM database
    +The zero config setup only works with the Dev mode, it’s comfortable in the first phases of the creation of your service but if the goal is to also get a deployable version, you have to create your own database and integrate it. +This Quarkus guide shows, how to integrate a Hibernate ORM database with an example service.

    +
    +
      +
    1. +

      Configuration list for JDBC
      +A list of all configuration that is possible with a JDBC configuration properties

      +
    2. +
    +
    +
  4. +
  5. +

    Reactive CRUD application with Panache
    +Quarkus unifies reactive and imperative programming. +Reactive is an architectural principle to build robust, efficient, and concurrent applications. +For an introduction into reactive and how Quarkus enables it, follow this Quarkus reactive architecture article and also the reactive quickstart. +To get started with reactive and implement reactive methods, you can follow the Quarkus reactive guide. +The reactive guide uses the Quarkus based implementation of a Hibernate ORM called Panache. +The implementation is not our first choice with devon4j and therefore not part of our recommendations, but to understand the reactive guide you can read the Hibernate ORM with Panache guide first to prevent possible problems following the guide.

    +
  6. +
+
+
+ + + + + +
+ + +
+

You need an installed Docker version for the zero config setup.

+
+
+
+
+
Database Migration
+

For schema-based databases, we recommend migrating databases with Flyway. +In that case, our general migration guide can give you an overview if you are not familiar with migration. +.. Flyway guide for Quarkus +This Quarkus guide will show how to work with the Flyway extension in a Quarkus application. +This should be used if you start your own database and do not leave the creation to quarkus.

+
+
+
+

1.81.8. Testing a Quarkus Application

+
+

After we have built the service, we have to verify it with some tests. +We will give you some guidelines to implement some test cases.

+
+
+
Testing Guides
+
    +
  1. +

    General testing guide
    +For users that aren’t familiar with the devon4j testing principles, we created a general best practices and recommendations guide for testing.

    +
    +
      +
    1. +

      Our guide for testing with Quarkus +In addition, we also provide a guide that specifically addresses the testing of a Quarkus application.

      +
    2. +
    +
    +
  2. +
+
+
+

Most of the Quarkus applications are already equipped with a basic test and our reference project provides some further test cases. If you want to improve and extend the tests, you can also follow the large Quarkus guide for testing.

+
+
+
+

1.81.9. Packaging of a Quarkus application and creation of a native executable

+
+

Quarkus applications can be packaged into different file types. The following link will show you how to build them and give you a short explanation of the characteristics of these files.

+
+
+
Package types
+
    +
  1. +

    fast-jar

    +
  2. +
  3. +

    mutable-jar

    +
  4. +
  5. +

    uber-jar

    +
  6. +
  7. +

    native executable

    +
  8. +
+
+
+

To package an application, use the command mvn package and Quarkus will generate the output in the /target folder. For the native executables, the command needs more parameters, which is explained in the link above.

+
+
+

Configure the Output with these configuration properties

+
+
+
+

1.81.10. Create and build a Docker Image

+
+

Quarkus supports Jib, S2I and Docker for building images. We focus on building a Quarkus App with Docker. +You get a generated Dockerfile from Quarkus in the src/main/docker folder of any project generated from Quarkus. There are multiple Dockerfiles.

+
+
+
Dockerfiles
+
    +
  1. +

    Dockerfile.jvm
    +Dockerfile for Quarkus application in the JVM mode. running in Red Hat Universal Base Image 8 Minimal Container

    +
  2. +
  3. +

    Dockerfile.legacy-jar
    +DockerFile for Quarkus application in JVM mode with the legacy jar running in Red Hat Universal Base Image 8 Minimal Container.

    +
  4. +
  5. +

    Dockerfile.native
    +Dockerfile using the native executable running in Red Hat Universal Base Image 8 Minimal container.

    +
  6. +
  7. +

    Dockerfile.native-distroless +The native file will run in a Distroless container. Distroless images are very small containers with just the application and runtime dependencies and without the other programs that come with a Linux distribution.

    +
  8. +
+
+
+
+
+

For more information to the different executables go back to the chapter Packaging of a Quarkus application and creation of a native executable

+
+
+
+
+

To simply build and run a Docker image, you can follow the instructions Quarkus provides for every Dockerfile in the comments block.

+
+
+

Docker commands example for the JVM Dockerfile from our reference project

+
+
+
+
####
+##This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
+#
+##Before building the container image run:
+#
+##./mvnw package
+#
+##Then, build the image with:
+#
+##docker build -f src/main/docker/Dockerfile.jvm -t quarkus/quarkus-basics-jvm .
+#
+##Then run the container using:
+#
+##docker run -i --rm -p 8080:8080 quarkus/quarkus-basics-jvm
+#
+##If you want to include the debug port into your docker image
+##you will have to expose the debug port (default 5005) like this :  EXPOSE 8080 5050
+#
+##Then run the container using :
+#
+##docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/quarkus-basics-jvm
+#
+###
+
+
+
+

Quarkus is also able to build the image while packaging the application, so you don’t have to execute the command from above. +To perform Docker builds with the generated Dockerfiles from above, you need to add the following extension to your project with the command mvn quarkus:add-extension -Dextensions="container-image-docker".

+
+
+

You also have to set the quarkus.container-image.build=true. You can add this to your application.properties or just append it to the packaging command like this: ./mvn package -Dquarkus.container-image.build=true.

+
+
+

If your needs exceed the instructions given by the file, we recommend to follow the Docker getting started guide to get familiar with Docker and customize the Dockerfiles according to your needs. +To specify your container build, you can use the general container image configurations properties and the Docker image configurations properties when building and runnig Docker images.

+
+ +
+

==Migrate from Spring to Quarkus

+
+
+

This guide will cover the migration process of a Spring application to a Quarkus application. There are already articles about migrating from Spring to Quarkus (e.g. https://developers.redhat.com/blog/2020/04/10/migrating-a-spring-boot-microservices-application-to-quarkus, https://dzone.com/articles/migrating-a-spring-boot-application-to-quarkus-cha). +This guide will focus more on the devon4j specific aspects. We assume that a working Spring application exists, built in the classic devon4j specific way (e.g. Jump The Queue or My Thai Star).

+
+
+
+

1.81.11. Create the Quarkus application

+
+

We start with an empty Quarkus project. You can create the project with Maven on the command line or use the online generator. The advantage of the online generator is that you have a pre-selection of dependencies to use in your project. +For starters, let’s select the basic dependencies required to develop a REST service with database connectivity (you can use one of the links in the Quarkus template guide): RESTEasy JAX-RS, RESTEasy Jackson, Hibernate ORM, Spring Data JPA API, JDBC Driver (choose the type of database you need), Flyway (if you have database migration schemas), SmallRye Health (optional for Health Monitoring)

+
+
+

The list does not include all required dependencies. We will add more dependencies to the project later. For now, generate the application with these dependencies.

+
+
+
Migration Toolkit from Red Hat
+
+

Red Hat provides a migration toolkit (MTA, Migration Toolkit for Applications), that supports migration of a Spring to a Quarkus application. There are several versions of this toolkit (e.g., a web console, a Maven plugin, or an IDE plugin). +The MTA analyzes your existing application and generates a report with hints and instructions for migrating from Spring to Quarkus. For example, it gives you an indication of which dependencies are not supported in your project for a Quarkus application and which dependencies you need to swap them with. The analysis is rule-based, and you can also add your own rules that will be checked during analysis.

+
+
+
+
+

1.81.12. Entities

+
+

There is nothing special to consider when creating the entities. In most cases, you can simply take the code from your Spring application and use it for your Quarkus application. Usually, the entities extend a superclass ApplicationPersistenceEntity containing, for example, the id property. You can also take this class from your Spring application and reuse it.

+
+
+
+

1.81.13. Transfer objects

+
+

The next step is to create the appropriate transfer objects for the entities. In a devon4j Spring application, we would use CobiGen to create these classes. Since CobiGen is not usable for this purpose in Quarkus applications yet, we have to create the classes manually.

+
+
+

First, we create some abstract base classes for the search criteria and DTO classes. Normally, these would also be created by CobiGen.

+
+
+
Listing 24. AbstractSearchCriteriaTo
+
+
public abstract class AbstractSearchCriteriaTo extends AbstractTo {
+
+  private static final long serialVersionUID = 1L;
+
+  private Pageable pageable;
+
+  //getter + setter for pageable
+}
+
+
+
+
Listing 25. AbstractDto
+
+
public abstract class AbstractDto extends AbstractTo {
+
+  private static final long serialVersionUID = 1L;
+
+  private Long id;
+
+  private int modificationCounter;
+
+  public AbstractDto() {
+
+    super();
+  }
+
+  //getter + setter
+
+  @Override
+  protected void toString(StringBuilder buffer) {
+    ...
+  }
+}
+
+
+
+

The class AbstractTo, extended by other classes, would be provided by the devon4j-basic module in a devon4j Spring application. You can take the code from here and reuse it in your Quarkus project.

+
+
+

Now you can create your transfer objects. Most of the code of the transfer objects of your Spring application should be reusable. For Quarkus, we recommend (as mentioned here) to use *Dto instead of *Eto classes. Be sure to change the names of the classes accordingly.

+
+
+
+

1.81.14. Data Access Layer

+
+

In devon4j, we propose to use Spring Data JPA to build the data access layer using repositories and Querydsl to build dynamic queries. We will also use this approach for Quarkus applications, but we need to change the implementation because the devon4j modules are based on reflection, which is not suitable for Quarkus. +In Quarkus we will use Querydsl using code generation. So for this layer, more changes are required and we can’t just take the existing code.

+
+
+

First, create a repository interface for your entity class that extends JpaRepository (see here).

+
+
+

To add QueryDSL support to your project, add the following dependencies to your pom.xml file:

+
+
+
Listing 26. pom.xml
+
+
<dependency>
+  <groupId>com.querydsl</groupId>
+  <artifactId>querydsl-jpa</artifactId>
+  <version>4.3.1</version>
+</dependency>
+<dependency>
+  <groupId>com.querydsl</groupId>
+  <artifactId>querydsl-apt</artifactId>
+  <scope>provided</scope>
+  <version>4.3.1</version>
+</dependency>
+
+
+
+

As mentioned above, we will use QueryDSL with code generation. For this, add the QueryDSL annotation processor to your plugins:

+
+
+
Listing 27. pom.xml
+
+
<plugins>
+...
+  <plugin>
+    <groupId>com.mysema.maven</groupId>
+    <artifactId>apt-maven-plugin</artifactId>
+    <version>1.1.3</version>
+    <executions>
+      <execution>
+        <phase>generate-sources</phase>
+        <goals>
+          <goal>process</goal>
+        </goals>
+        <configuration>
+          <outputDirectory>target/generated-sources/annotations</outputDirectory>
+          <processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
+        </configuration>
+      </execution>
+    </executions>
+  </plugin>
+</plugins>
+
+
+
+

To implement the queries, follow the corresponding guide.

+
+
+

Set the following properties in the application.properties file to configure the connection to your database (see also here):

+
+
+
+
quarkus.datasource.db-kind=...
+quarkus.datasource.jdbc.url=...
+quarkus.datasource.username=...
+quarkus.datasource.password=...
+
+
+
+
+

1.81.15. Logic Layer

+
+

For the logic layer, devon4j uses a use-case approach. You can reuse the use case interfaces from the api module of the Spring application. Again, make sure to rename the transfer objects.

+
+
+

Create the appropriate class that implements the interface. Follow the implementation section of the use-case guide to implement the methods. For mapping the entities to the corresponding transfer objects, see the next section.

+
+
+
+

1.81.16. Mapping

+
+

For bean mapping, we need to use a completely different approach in the Quarkus application than in the Spring application. For Quarkus, we use MapStruct, which creates the mapper at build time rather than at runtime using reflection. Add the following dependencies to your pom.xml.

+
+
+
Listing 28. pom.xml
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct-processor</artifactId>
+  <version>1.4.2.Final</version>
+</dependency>
+<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+</dependency>
+
+
+
+

Then you can create the mapper as follows:

+
+
+
Listing 29. Mapper
+
+
@Mapper(componentModel = "cdi")
+public interface YourEntityMapper {
+  YourEntityDto map(YourEntity entity);
+
+  YourEntity map(YourEntityDto dto);
+
+  ...
+}
+
+
+
+

Inject the mapper into your use-case implementation and simply use the methods. The method implementations of the mapper are created when the application is built.

+
+
+
+

1.81.17. Service Layer

+
+

For the implementation of the service layer, we use the JAX-RS for both Quarkus and Spring applications to create the REST services. Classic devon4j Spring applications rely on Apache CFX as the implemention of JAX-RS. +For Quarkus, we use RESTEasy. Since both are implementations of JAX-RS, much of the Spring application code can be reused.

+
+
+

Take the definition of the REST endpoints from the api module of the Spring application (make sure to rename the transfer objects), inject the use-cases from the logic layer and use them in the REST service methods as follows:

+
+
+
Listing 30. REST service
+
+
@Path("/path/v1")
+public class YourComponentRestService {
+
+  @Inject
+  UcFindYourEntity ucFindYourEntity;
+
+  @Inject
+  UcManageYourEntity ucManageYourEntity;
+
+  @GET
+  @Path("/yourEntity/{id}/")
+  public YourEntityDto getYourEntity(@PathParam("id") long id);
+
+    return this.ucFindYourEntity.findYourEntity(id);
+  }
+
+  ...
+}
+
+
+
+
+

1.81.18. Summary

+
+

As you have seen, some parts hardly differ when migrating a Spring application to a Quarkus application, while other parts differ more. The above sections describe the parts needed for simple applications that provide REST services with a data access layer. +If you add more functionality, more customization and other frameworks, dependencies may be required. If that is the case, take a look at the corresponding guide on the topic in the devon4j documentation or check if there is a tutorial on the official Quarkus website.

+
+
+

Furthermore, we can summarize that migrating from a Spring application to a Quarkus representative is not complex. Although Quarkus is a very young framework (release 1.0 was in 2019), it brings a lot of proven standards and libraries that you can integrate into your application. +This makes it easy to migrate and reuse code from existing (Spring) applications. Also, Quarkus comes with Spring API compatibility for many Spring modules (Spring Data JPA, Spring DI, etc.), which makes it easier for developers to reuse their knowledge.

+
+ +
+

==Spring Native vs Quarkus

+
+
+

Nowadays, it is very common to write an application and deploy it to a cloud. +Serverless computing and Function-as-a-Service (FaaS) have become +very popular. +While many challenges arise when deploying a Java application into the latest cloud environment, the biggest challenges facing developers are memory footprint and the startup time required +for the Java application, as more of these keeps the host’s costs high in public clouds and Kubernetes clusters. With the introduction of frameworks like micronaut and microprofile, Java processes are getting faster and more lightweight. In a similar context, Spring has introduced +Spring Native which aims to solve the big memory footprint of Spring and its slow startup time to potentially rival the new framework called Quarkus, by Red Hat. This document briefly discusses both of these two frameworks and their potential suitability with devonfw.

+
+
+
+

1.81.19. Quarkus

+
+

Quarkus is a full-stack, Kubernetes-native Java framework made for JVMs. With its container-first-philosophy and its native compilation with GraalVM, Quarkus optimizes Java for containers with low memory usage and fast startup times.

+
+
+

Quarkus achieves this in the following ways:

+
+
+
    +
  • +

    First Class Support for GraalVM

    +
  • +
  • +

    Build Time Metadata Processing: As much processing as possible is +done at build time, so your application will only contain the classes +that are actually needed at runtime. This results in less memory usage, +and also faster startup time, as all metadata processing has already been +done.

    +
  • +
  • +

    Reduction in Reflection Usage: Quarkus tries to avoid reflection as much as possible in order to reduce startup time and memory usage.

    +
  • +
  • +

    Native Image Pre Boot: When running in a native image, Quarkus +pre-boots as much of the framework as possible during the native image +build process. This means that the resulting native image has already +run most of the startup code and serialized the result into the +executable, resulting in an even faster startup-time.

    +
  • +
+
+
+

This gives Quarkus the potential for a great platform for serverless cloud and Kubernetes environments. For more information about Quarkus and its support for devonfw please refer to the Quarkus introduction guide.

+
+
+
+

1.81.20. Spring Native

+
+
+
+

The current version of Spring Native 0.10.5 is designed to be used with Spring Boot 2.5.6

+
+
+
+
+

Like Quarkus, Spring Native provides support for compiling Spring applications to native executables using the GraalVM native-image compiler deisgned to be packaged in lightweight containers.

+
+
+

Spring Native is composed of the following modules:

+
+
+
    +
  • +

    spring-native: runtime dependency required for running Spring Native, provides also Native hints API.

    +
  • +
  • +

    spring-native-configuration: configuration hints for Spring classes used by Spring AOT plugins, including various Spring Boot auto-configurations.

    +
  • +
  • +

    spring-native-docs: reference guide, in adoc format.

    +
  • +
  • +

    spring-native-tools: tools used for reviewing image building configuration and output.

    +
  • +
  • +

    spring-aot: AOT transformation infrastructure common to Maven and Gradle plugins.

    +
  • +
  • +

    spring-aot-test: Test-specific AOT transformation infrastructure.

    +
  • +
  • +

    spring-aot-gradle-plugin: Gradle plugin that invokes AOT transformations.

    +
  • +
  • +

    spring-aot-maven-plugin: Maven plugin that invokes AOT transformations.

    +
  • +
  • +

    samples: contains various samples that demonstrate features usage and are used as integration tests.

    +
  • +
+
+
+
+

1.81.21. Native compilation with GraalVM

+
+

Quarkus and Spring Native both use GraalVM for native compilation. Using a native image provides some key advantages, such as instant startup, instant peak performance, and reduced memory consumption. However, there are also some drawbacks: Creating a native image is a heavy process that is slower than a regular application. A native image also has fewer runtime optimizations after its warmup. Furthermore, it is less mature than the JVM and comes with some different behaviors.

+
+
+

Key characteristics:

+
+
+
    +
  • +

    Static analysis of the application from the main entry point is +performed at build time.

    +
  • +
  • +

    Unused parts are removed at build time.

    +
  • +
  • +

    Configuration required for reflection, resources, and dynamic proxies.

    +
  • +
  • +

    Classpath is fixed at build time.

    +
  • +
  • +

    No class lazy loading: everything shipped in the executables will be loaded in memory on startup.

    +
  • +
  • +

    Some code will run at build time.

    +
  • +
+
+
+

There are limitations around some aspects of Java applications that are not fully supported

+
+
+
+

1.81.22. Build time and start time for apps

+ +++++ + + + + + + + + + + + + + + + + + + + +
Frameworkbuild timestart time

Spring Native

19.615s

2.913s

Quarkus Native executable

52.818s

0.802s

+
+
+

1.81.23. Memory footprints

+ ++++ + + + + + + + + + + + + + + + + +
Frameworkmemory footprint

Spring Native

109 MB

Quarkus Native executable

75 MB

+
+
+

1.81.24. Considering devonfw best practices

+
+

As of now, devonfw actively supports Spring but not Spring Native. +Although Quarkus has been released to a stable release in early 2021, it has been already used in multiple big projects successfully showing its potential to implement cloud native services with low resource consumption matching the needs of scalability and resilience in cloud native environments. +With major stakeholders behind the open source community like Red Hat, its development and growth from its kickoff to the current state is very impressive and really shows the market needs and focus. +Another big advantage of Quarkus is that it started on a green field and therefore did not need to circumvent main pillars of the spring framework like reflection, being able to take clean and up-to-date design decisions not needing to cope with legacy issues. +Nonetheless, there is a experimental support also for some spring libraries already available in Quarkus, which make switching from spring to Quarkus much more easier if needed. +We also provide a guide +for Spring developers who want to adopt or try Quarkus for their +(next) projects as it really has some gamechanging advantages over +Spring.

+
+
+
+

1.81.25. General recommendations and conclusion

+
+

Quarkus and Spring Native both have their own use cases. Under the consideration of the limitations of GraalVM to be used for native images built by Quarkus and Spring Native, there is a strong recommendation towards Quarkus from devonfw. +One essential differentiation has to be made on the decision for native or against native applications - the foreseen performance optimization of the JIT compiler of the JVM, which is not available anymore in a native image deployment. +For sure, both component frameworks will also run on a JVM getting advantage again from JIT compilation, but depending on the overall landscape then, it is recommended to stay with the knowledge of the available teams, e.g. continue making use of devon4j based on spring or even if already in that state also here make use of Quarkus on JVM.

+
+ +
+

==Modern project structure

+
+
+

With trends such as cloud, microservices, lean, and agile, we decided for a more modern project structure that fits better to recent market trends. +When starting new projects with devonfw, and especially in the context of cloud-native development, we strongly recommend this modern approach over the classic structure.

+
+
+
+

1.81.26. Modules

+
+

Due to trends such as microservices, we are building smaller apps compared to moduliths. +For simplicity, we therefore do not split our app into different modules and keep everything top-level and easy.

+
+
+

In addition to java and resources, we also add helm for helm templates and docker for docker scripts (e.g. Dockerfile) in src/main:

+
+
+
+
├──/src
+|  ├──/main
+|  |  ├──/docker
+|  |  ├──/helm
+|  |  ├──/java
+|  |  └──/resources
+|  └──/test
+|     ├──/java
+|     └──/resources
+└──/pom.xml
+
+
+
+
+

1.81.27. Deployment

+
+

For modern projects, we strongly recommend that your build process generates the final deliverable as an OCI compliant container. +Further, to go fully cloud-native, you should build your app as a native image via GraalVM AOT compiler. +Therefore, we recommed to use quarkus as your main framework. +In case you want to go with spring, you may consider using spring-native.

+
+
+
+

1.81.28. Layers

+
+

The package structure of your code inside src/main/java (and src/test/java) of your app is described in our coding conventions in the sections packages. +For the modern project structure, the layers are defined by the following table:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Layer«layer»Description

service

service

The service layer exposing functionality via its remote API. Typical protocol is REST. May also be any other protocol you are using such as gRPC.

domain

domain

The domain with the data-model and DB access. Use sub-package (in «detail») repository for repository and dao for DAOs. Also we recommend to put entities in model sub-package.

logic

logic

The logic layer with the functionallity providing the business value.

common

common

cross-cutting code not assigned to a technical layer.

+
+
+

1.81.29. Architecture Mapping

+
+

In order to help you to map the architecture, packaging, layering, etc. to the code and see where different code elements should be placed, +we provide this architecture mapping:

+
+
+
+
«root»
+├──.«component»
+|  ├──.domain
+|  |  ├──.repo
+|  |  |  ├──.«BusinessObject»Repository
+|  |  |  ├──.«BusinessObject»Fragment
+|  |  |  └──.«BusinessObject»FragmentImpl
+|  |  ├──.dao [alternative to repo]
+|  |  |  ├──.«BusinessObject»Dao
+|  |  |  └──.«BusinessObject»DaoImpl
+|  |  └──.model
+|  |     └──.«BusinessObject»Entity
+|  ├──.logic
+|  |  ├──«BusinessObject»Validator
+|  |  └──«BusinessObject»EventsEmitter
+|   |  └──.Uc«Operation»«BusinessObject»[Impl]
+|  └──.rest
+|     └──.v1
+|        ├──.«Component»RestService
+|        ├──.mapper
+|        |     └──.«BusinessObject»Mapper
+|        └──.model
+|           └──.«BusinessObject»Dto
+└──.general
+   └──.domain
+      └──.model
+         └──.ApplicationPersistenceEntity
+
+
+ +
+

==Domain Layer

+
+
+

The domain layer is responsible for the data-model and mapping it to a database. +The most common approach is to use a Relational Database Management System (RDMS). In such a case, we strongly recommend to follow our JPA Guide. Some NoSQL databases are supported by spring-data, so you can consider the repository guide.

+
+
+

Note: The domain layer is the replacement for the data-access layer in the modern project structure.

+
+
+
+
+
+

1.82. Guides

+ +
+

==Getting started with Quarkus for Spring developers

+
+
+

As a Spring developer, you have heard more and more about Quarkus: its pros and cons, its fast growth etc. So, you decided to adopt/try Quarkus for your (next) project(s) and are wondering where to go next and what you need to pay attention to when moving from Spring to Quarkus.

+
+
+

This guide tries to address this exact concern. In the following, we will present you some main points you should be aware of when starting to develop with Quarkus, along with some useful sources.

+
+
+
    +
  1. +

    Quarkus is a fairly new Java toolkit. Thus, it is very well documented. It also provides a set of well-written technical guides that are a good starting point to get in touch and make the first steps with Quarkus. See here. It is an Open Source project licensed under the Apache License version 2.0. The source code is hosted in GitHub. If you have any questions or concerns, don’t hesitate to reach out to the Quarkus community.

    +
  2. +
  3. +

    Same as Spring Initializr, you can go to code.quarkus.io to create a new application. Also, check out our Template Quarkus Guide to see our recommendations on certain topics.

    +
  4. +
  5. +

    In Spring stack, we recommend structuring your application into multiple modules, known as our classic structure. Moving to Quarkus and the world of cloud-native microservices, where we build smaller applications compared to monoliths, we recommend keeping everything top-level and simple. Therefore, we propose the modern structure as a better fit.

    +
  6. +
  7. +

    Quarkus focuses not only on delivering top features, but also on the developer experience. The Quarkus’s Live Coding feature automatically detects changes made to Java files, application configuration, static resources, or even classpath dependency changes and recompiles and redeploys the changes. As that, it solves the problem of traditional Java development workflow, hence improves productivity.

    +
    +
    +
        Write Code → Compile → Deploy → Test Changes/ Refresh Browser/ etc → Repeat (traditional)
    +    Write Code → Test Changes/ Refresh Browser/ etc → Repeat (Quarkus)
    +
    +
    +
    +

    You can use this feature out of the box without any extra setup by running:

    +
    +
    +
    +
        mvn compile quarkus:dev
    +
    +
    +
    +

    Another highlight feature to speed up developing is the Quarkus’s Dev Mode with Dev Services, which can automatically provision unconfigured services in development and test mode. This means that if you include an extension and don’t configure it, Quarkus will automatically start the relevant service and wire up your application to use it, therefore saving you a lot of time setting up those services manually. In production mode, where the real configuration is provided, Dev Services will be disabled automatically.

    +
    +
    +

    Additionally, you can access the Dev UI at \q\dev in Dev Mode to browse endpoints offered by various extensions, conceptually similar to what a Spring Boot actuator might provide.

    +
    +
  8. +
  9. +

    Quarkus is made of a small core on which hundreds of extensions rely. In fact, the power of Quarkus is its extension mechanism. Think of these extensions as your project dependencies. You can add it per dependency manager such as maven or gradle.

    +
    +
    +
    mvn quarkus:list-extensions
    +mvn quarkus:add-extension -Dextensions="groupId:artifactId"
    +(or add it manually to pom.xml)
    +##or
    +gradle list-extensions
    +(add dependency to build.gradle)
    +
    +
    +
    +

    Like Spring Boot, Quarkus also has a vast ecosystem of extensions with commonly-used technologies.

    +
    + + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Table 18. Example of common Quarkus extensions and the Spring Boot Starters with similar functionality (book: Quarkus for Spring Developer)
    Quarkus extensionSpring Boot Starter

    quarkus-resteasy-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-resteasy-reactive-jackson

    spring-boot-starter-web

    +

    spring-boot-starter-webflux

    quarkus-hibernate-orm-panache

    spring-boot-starter-data-jpa

    quarkus-hibernate-orm-rest-datapanache

    spring-boot-starter-data-rest

    quarkus-hibernate-reactive-panache

    spring-boot-starter-data-r2dbc

    quarkus-mongodb-panache

    spring-boot-starter-data-mongodb

    +

    spring-boot-starter-data-mongodb-reactive

    quarkus-hibernate-validator

    spring-boot-starter-validation

    quarkus-qpid-jms

    spring-boot-starter-activemq

    quarkus-artemis-jms

    spring-boot-starter-artemis

    quarkus-cache

    spring-boot-starter-cache

    quarkus-redis-client

    spring-boot-starter-data-redis

    +

    spring-boot-starter-data-redis-reactive

    quarkus-mailer

    spring-boot-starter-mail

    quarkus-quartz

    spring-boot-starter-quartz

    quarkus-oidc

    spring-boot-starter-oauth2-resource-server

    quarkus-oidc-client

    spring-boot-starter-oauth2-client

    quarkus-smallrye-jwt

    spring-boot-starter-security

    +
    +

    A full list of all Quarkus extensions can be found here. Furthermore, you can check out the community extensions hosted by Quarkiverse Hub. Quarkus has some extensions for Spring API as well, which is helpful when migrating from Spring to Quarkus.

    +
    + +
    +

    Besides extensions, which are officially maintained by Quarkus team, Quarkus allows adding external libraries too. While extensions can be integrated seamlessly into Quarkus, as they can be processed at build time and be built in native mode with GraalVM, external dependencies might not work out of the box with native compilation. If that is the case, you have to recompile them with the right GraalVM configuration to make them work.

    +
    +
  10. +
  11. +

    Quarkus' design accounted for native compilation by default. A Quarkus native executable starts much faster and utilizes far less memory than a traditional JVM (see our performace comparision between Spring and Quarkus). To get familiar with building native executable, configuring and running it, please check out our Native Image Guide. Be sure to test your code in both JVM and native mode.

    +
  12. +
  13. +

    Both Quarkus and Spring include testing frameworks based on JUnit and Mockito. Thus, by design, Quarkus enables test-driven development by detecting affected tests as changes are made and automatically reruns them in background. As that, it gives developer instant feedback, hence improves productivity. To use continuous testing, execute the following command:

    +
    +
    +
    mvn quarkus:dev
    +
    +
    +
  14. +
  15. +

    For the sake of performance optimization, Quarkus avoids reflection as much as possible, favoring static class binding instead. When building a native executable, it analyzes the call tree and removes all the classes/methods/fields that are not used directly. As a consequence, the elements used via reflection are not part of the call tree so they are dead code eliminated (if not called directly in other cases).

    +
    +

    A common example is the JSON library, which typically use reflection to serialize the objects to JSON. If you use them out of the box, you might encounter some errors in native mode. So, be sure to register the elements for reflection explicitly. A How-to is provided by Quarkus Registering For Reflection with practical program snippets.

    +
    +
  16. +
+
+
+

A very good read on the topic is the e-book Quarkus for Spring Developers by Red Hat. Another good source for direct hands-on coding tutorial is Katacoda Quarkus for Spring Boot Developers

+
+ +
+

==Configuration

+
+
+

Quarkus provides a comprehensive guide on configuration here.

+
+
+
External Application Configuration
+
+
Database Configuration
+
+

In Quarkus, Hibernate is provided by the quarkus-hibernate-orm extension. Ensure the extension is added to your pom.xml as follows:

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-hibernate-orm</artifactId>
+</dependency>
+
+
+
+

Additionally, you have to add the respective JDBC driver extension to your pom.xml. There are different drivers for different database types. See Quarkus Hibernate guide.

+
+
+
+
Database System and Access
+
+

You need to configure which database type you want to use, as well as the location and credentials to access it. The defaults are configured in application.properties. The file should therefore contain the properties as in the given example:

+
+
+
+
quarkus.datasource.jdbc.url=jdbc:postgresql://database.enterprise.com/app
+quarkus.datasource.username=appuser01
+quarkus.datasource.password=************
+quarkus.datasource.db-kind=postgresql
+
+##drop and create the database at startup (use only for local development)
+quarkus.hibernate-orm.database.generation=drop-and-create
+
+
+
+
+
Database Logging
+
+

Add the following properties to application.properties to enable logging of database queries for debugging purposes.

+
+
+
+
quarkus.hibernate-orm.log.sql=true
+quarkus.hibernate-orm.log.format-sql=true
+
+#Logs SQL bind parameters. Setting it to true is obviously not recommended in production.
+quarkus.hibernate-orm.log.bind-parameters=true
+
+
+
+
+
+
Secrets and environment specific configurations
+
+
Environment variables
+
+

There are also some libraries to make Jasypt work with Quarkus, such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode.

+
+
+

Quarkus supports many credential providers with official extensions, such as HashiCorp Vault.

+
+
+
+
<dependency>
+  <groupId>io.quarkus</groupId>
+  <artifactId>quarkus-vault</artifactId>
+</dependency>
+
+
+
+

Quarkus reads configuration values from several locations, ordered by a certain priority. An overview of these can be found at the official Quarkus config guide.

+
+
+

Environment variables have a higher ordinal number and are therefore higher prioritized than e.g. the application.properties file. +So instead of storing secrets in plain text in the configuration files, it is better to use environment variables for critical values to configure the application.

+
+
+

Environment variables also have the advantage that they can be easily integrated into a containerized environment. +When using Kubernetes, the secrets can be stored as Kubernetes secret and then passed to the containers as an environment variable.

+
+
+
+
Custom config sources
+
+

Quarkus provides the possability to add custom config sources, which can be used to retrieve configuration values from custom locations. +For a description of this feature, see the corresponding Quarkus guide.

+
+
+Config interceptors +
+

Quarkus also allows with the concept of interceptors to hook into the resolution of configuration values. This can be useful when configuration values are encrypted or need to be extracted. +To do this, you have to implement a ConfigSourceInterceptor.

+
+
+
+
public class CustomConfigInterceptor implements ConfigSourceInterceptor {
+
+  @Override
+  public ConfigValue getValue(ConfigSourceInterceptorContext context, String name) {
+
+    ConfigValue configValue = context.proceed(name);
+    if (name.equals("config-value-to-resolve")) {
+      configValue = new ConfigValue.ConfigValueBuilder()
+          .withName(name)
+          .withValue(resolveConfigurationValue(name))
+          .build();
+    }
+
+    return configValue;
+  }
+
+  private String resolveConfigurationValue(String name) {
+    ...
+  }
+}
+
+
+
+

To use the Interceptor, you must register it. To do this, create a file io.smallrye.config.ConfigSourceInterceptor in the folder src/main/resources/META-INF/services and register the interceptor register the interceptor by writing the fully qualified class name to this file.

+
+
+
+
+
Credential encryption
+
+

As for Spring, there are also some libraries that let Jasypt work with Quarkus such as Camel Quarkus Jasypt. Unfortunately, this feature only works in JVM mode and not in native mode, so it is not a suitable approach.

+
+
+

If you want to store usernames or passwords in encrypted form or retrieve them from a custom store, you can use a custom CredentialsProvider for this purpose. +Consider the use case where you want to store your database credentials in encrypted form rather than in plain text. Then you can implement a credentials provider as follows:

+
+
+
+
@ApplicationScoped
+@Unremovable
+public class DatabaseCredentialsProvider implements CredentialsProvider {
+
+  @Override
+  public Map<String, String> getCredentials(String credentialsProviderName) {
+
+    Map<String, String> properties = new HashMap<>();
+    properties.put(USER_PROPERTY_NAME, decryptUsername());
+    properties.put(PASSWORD_PROPERTY_NAME, decryptPassword());
+    return properties;
+  }
+}
+
+
+
+

In the application.properties file you need to set quarkus.datasource.credentials-provider=custom. +For more information about the credentials provider, see the official Quarkus guide.

+
+
+
+
HashiCorp Vault
+
+

For centralized management of secrets and other critical configuration values, you can use HashiCorp Vault as external management tool.

+
+
+

For detailed instructions on how to integrate Vault into your Quarkus application, see the official Quarkus guide.

+
+ +
+

==Quarkus template

+
+
+

Quarkus Code Generator is provides many alternative technologies and libraries that can be integrated into a project. Detailed guides on multiple topics can be found here.

+
+
+

Due to the large selection, getting started can be difficult for developers. +In this guide we aim to provide a general suggestion on basic frameworks, libraries, and technologies to make it easy for developers to begin with.

+
+
+

With that said, please take this as a recommendation and not as a compulsion. Depending on your project requirements, you might have to use another stack compared to what is listed below.

+
+
+

If you are new to Quarkus, consider checking out their getting started guide to get an overview of how to create, run, test, as well as package a Quarkus application. Another recommended source to get started is the Katacoda tutorials.

+
+
+
+
Basic templates
+
+
    +
  1. +

    simple REST API (go to code.quarkus.io)

    +
  2. +
  3. +

    simple REST API with monitoring (go to code.quarkus.io)

    +
  4. +
+
+ + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 19. Topic-based suggested implementation
TopicDetailSuggested implementationNote

runtime

servlet-container

Undertow

component management

dependency injection

ArC

ArC is based on JSR 365. It also provides interceptors that can be used to implement the same functionality as AOP provides

configuration

SmallRye Config

SmallRye Config is an implementation of Eclipse MicroProfile Config. It also supports YAML configuration files

persistence

OR-mapper

Hibernate ORM, Spring Data JPA

Hibernate ORM is the de facto standard JPA implementation and works perfectly in Quarkus. Quarkus also provides a compatibility layer for Spring Data JPA repositories in the form of the spring-data-jpa extension.

batch

Quarkus JBeret Extension is a non-official extension, which is hosted in the Quarkiverse Hub. It is an implementation of JSR 352.

service

REST services

RESTEasy

RESTEasy is an portable implementation of the new JCP specification JAX-RS JSR-311. It can be documented via Swagger OpenAPI.

async messaging

SmallRye Reactive Messaging, Vert.x EventBus

SmallRye Reactive Messaging is an implementation of the Eclipse MicroProfile Reactive Messaging specification 1.0. You can also utilize SmallRye Reactive Messaging in your Quarkus application to interact with Apache Kafka.

marshalling

RESTEasy Jackson, RESTEasy JSON-B, RESTEasy JAXB, RESTEasy Multipart

cloud

kubernetes

Kubernetes

deployment

Minikube, k3d

Minikube is quite popular when a Kubernetes cluster is needed for development purposes. Quarkus supports this with the quarkus-minikube extension.

logging

framework

JBoss Log Manager and the JBoss Logging facade

Internally, Quarkus uses JBoss Log Manager and the JBoss Logging facade. Logs from other supported Logging API (JBoss Logging, SLF4J, Apache Commons Logging) will be merged.

validation

framework

Hibernate Validator/Bean Validation (JSR 380)

security

authentication & authorization

JWT authentication

Quarkus supports various security mechanisms. Depending on your protocol, identity provider you can choose the necessary extensions such as quarkus-oidc quarkus-smallrye-jwt quarkus-elytron-security-oauth2.

monitoring

framework

Micrometer Metrics, SmallRye Metrics

SmallRye Metrics is an implementation of the MicroProfile Metrics specification. Quarkus also offers various extensions to customize the metrics.

health

SmallRye Health

SmallRye Health is an implementation of the MicroProfile Health specification.

fault tolerance

SmallRye Fault Tolerance

SmallRye Fault Tolerance is an implementation of the MicroProfile Fault Tolerance specification.

+ +
+

==Building a native image

+
+
+

Quarkus provides the ability to create a native executable of the application called native image. +Unlike other Java based deployments, a native image will only run on the architecture and operating system it is compiled for. +Also, no JVM is needed to run the native-image. +This improves the startup time, performance, and efficiency. +A distribution of GraalVM is needed. +You can find the differences between the available distributions here.

+
+
+

To build your quarkus app as a native-image, you have two options that are described in the following sections.

+
+
+
+
+
Build a native executable with GraalVM
+
+

To build a Quarkus application, you can install GraalVM locally on your machine, as described below. +Therefore, read the basic Quarkus application chapter, or clone the example project provided by devonfw. +Follow this chapter from the Quarkus Guide for building a native executable.

+
+
+
Installing GraalVM
+
+

A native image can be created locally or through a container environment. +To create a native image locally, an installed and configured version of GraalVM is needed. You can follow the installation guide from Quarkus or the guide provided by GraalVM for this.

+
+
+
+
+
Build a native executable with GraalVM through container environment
+
+

In order to make the build of native images more portable, you can also use your container environment and run the GraalVM inside a container (typically Docker). +You can simply install Docker with your devonfw-ide distribution, just follow this description Docker with devonfw-ide. +Follow this chapter to build a native Linux image through container runtime.

+
+
+
+
Configuring the native executable
+
+

A list of all configuration properties for a native image can be found here.

+
+ +
+

==Bean mapping with Quarkus

+
+
+

This guide will show bean-mapping, in particular for a Quarkus application. We recommend using MapStruct with a Quarkus application because the other bean-mapper frameworks use Java reflections. They are not supported in GraalVm right now and cause problems when building native applications. MapStruct is a code generator that greatly simplifies the implementation of mappings between Java bean types based on a convention over configuration approach. The mapping code will be generated at compile-time and uses plain method invocations and is thus fast, type-safe, and easy to understand. MapStruct has to be configured to not use Java reflections, which will be shown in this guide.

+
+
+

You can find the official +MapStruct reference guide and a general introduction to MapStruct from Baeldung.

+
+
+
+
MapStruct Dependency
+
+

To get access to MapStruct, we have to add the dependency to our POM.xml:

+
+
+
+
<dependency>
+  <groupId>org.mapstruct</groupId>
+  <artifactId>mapstruct</artifactId>
+  <version>1.4.2.Final</version>
+  <scope>provided</scope>
+</dependency>
+
+
+
+

MapStruct provides an annotation processor that also has to be added to the POM.xml

+
+
+
+
<plugin>
+	<groupId>org.apache.maven.plugins</groupId>
+	<artifactId>maven-compiler-plugin</artifactId>
+	<version>3.8.1</version>
+	<configuration>
+		<source>1.8</source>
+		<target>1.8</target>
+		<annotationProcessorPaths>
+			<path>
+				<groupId>org.mapstruct</groupId>
+				<artifactId>mapstruct-processor</artifactId>
+				<version>1.4.2.Final</version>
+			</path>
+		</path>
+		</annotationProcessorPaths>
+	</configuration>
+</plugin>
+
+
+
+

MapStruct takes advantage of generated getters, setters, and constructors from the Lombok library, follow this Lombok with Mapstruct guide to get Lombok with Mapstruct working.

+
+
+
+
MapStruct Configuration
+
+

We already discussed the benefits of dependency injection. MapStruct supports CDI with EJB, spring, and jsr330. The default retrieving method for a mapper is a factory that uses reflections, which should be avoided. The component model should be set to CDI, as this will allow us to easily inject the generated mapper implementation. The component model can be configured in multiple ways.

+
+
+
Simple Configuration
+
+

Add the attribute componentModel to the @Mapper annotation in the mapper interface.

+
+
+
+
@Mapper(compnentModel = "cdi")
+public interface ProductMapper{
+  ...
+}
+
+
+
+
+
MapperConfig Configuration
+
+

Create a shared configuration that can be used for multiple mappers. Implement an interface and use the annotation @MapperConfig for the class. You can define all configurations in this interface and pass the generated MapperConfig.class with the config attribute to the mapper. The MapperConfig also defines the InjectionStrategy and MappingInheritaceStrategy, both of which will be explained later. +A list of all configurations can be found here.

+
+
+
+
@MapperConfig(
+  compnentModel = "cdi",
+  mappingInheritanceStrategy = MappingInheritanceStrategy.AUTO_INHERIT_FROM_CONFIG
+  injectionStrategy =InjectionStrategy.CONSTRUCTOR
+)
+public interface MapperConfig{
+}
+
+
+
+
+
@Mapper( config = MapperConfig.class )
+public interface ProductMapper{
+  ...
+}
+
+
+
+

Any attributes not given via @Mapper will be inherited from the shared configuration MapperConfig.class.

+
+
+
+
Configuration via annotation processor options
+
+

The MapStruct code generator can be configured using annotation processor options. +You can pass the options to the compiler while invoking javac directly, or add the parameters to the maven configuration in the POM.xml

+
+
+

We also use the constructor injection strategy to avoid field injections and potential reflections. This will also simplify our tests.

+
+
+

The option to pass the parameter to the annotation processor in the POM.xml is used and can be inspected in our quarkus reference project.

+
+
+

A list of all annotation processor options can be found here.

+
+
+
+
+
Basic Bean-Mapper Usage
+
+

To use the mapper, we have to implement the mapper interface and the function prototypes with a @Mapper annotation.

+
+
+
+
@Mapper
+public interface ProductMapper {
+
+  ProductDto map(ProductEntity model);
+
+  ProductEntity create(NewProductDto dto);
+}
+
+
+
+

The MapStruct annotation processor will generate the implementation for us under /target/generated-sources/, we just need to tell it that we would like to have a method that accepts a ProductEntity entity and returns a new ProductDto DTO.

+
+
+

The generated mapper implementation will be marked with the @ApplicationScoped annotation and can thus be injected into fields, constructor arguments, etc. using the @Inject annotation:

+
+
+
+
public class ProductRestService{
+
+  @Inject
+  ProductMapper mapper;
+}
+
+
+
+

That is the basic usage of a Mapstruct mapper. In the next chapter, we’ll go into a bit more detail and show some more configurations.

+
+
+
+
Advanced Bean-Mapper Usage
+
+

Let´s assume that our Product entity and the ProductDto have some differently named properties that should be mapped. Add a mapping annotation to map the property type from Product to kind from ProductDto. We define the source name of the property and the target name.

+
+
+
+
@Mapper
+public interface ProductMapper {
+  @Mapping(target = "kind", source = "type")
+  ProductDto map(ProductEntity entity);
+
+  @InheritInverseConfiguration(name = "map" )
+  ProductEntity create(ProductDto dto);
+}
+
+
+
+

For bi-directional mappings, we can indicate that a method shall inherit the inverse configuration of the corresponding method with the @InheritInverseConfiguration. You can omit the name parameter if the result type of method A is the same as the +single-source type of method B and if the single-source type of A is the same as the result type of B. If multiple apply, the attribute name is needed. Specific mappings from the inverse method can (optionally) be overridden, ignored, or set to constants or expressions.

+
+
+

The mappingInheritanceStrategy can be defined as showed in MapStruct Configuration. The existing options can be found here.

+
+
+

A mapped attribute does not always have the same type in the source and target objects. For instance, an attribute may be of type int in the source bean but of type Long in the target bean.

+
+
+

Another example are references to other objects which should be mapped to the corresponding types in the target model. E.g. the class ShoppingCart might have a property content of the type Product which needs to be converted into a ProductDto object when mapping a ShoppingCart object to ShoppingCartDto. For these cases, it’s useful to understand how Mapstruct converts the data types and the object references.

+
+
+

Also, the Chapter for nested bean mappings will help to configure MapStruct to map arbitrarily deep object graphs.

+
+
+

You can study running MapStruct implementation examples given by MapStruct or in our Quarkus reference project

+
+
+
+
+
+
+

2. Angular

+
+ +
+
+
+

3. Introduction

+
+ +
+

3.1. devon4ng

+
+

This guide describes an application architecture for web client development with Angular.

+
+
+
+

3.2. Motivation

+
+

Front-end development is a very difficult task since there are a lot of different frameworks, patterns and practices nowadays. For that reason, in devonfw we decided to make use of Angular since it is a full front-end framework that includes almost all the different patterns and features that any SPA may need and provides a well defined architecture to development, build and deploy.

+
+
+

The idea with devon4ng is to define an architecture which is a compromise between, on the one hand, leveraging the best practices and latest trends like reactive style development, on the other hand, providing a short on-boarding time while still using an architecture that helps us scale and be productive at the same time.

+
+
+

At the same time devon4ng aims to help developers to solve common problems that appear in many projects and provide samples and blueprints to show how to apply this solutions in real situations.

+
+
+
+

3.3. Contents

+ +
+

This section introduces in an easy way the main principles and guidelines based on Angular Style Guide.

+
+ +
+

The goal of this topic is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview.

+
+
+
+

3.4. Layers

+
+

This section provides a condensed explanation about the different layers a good Angular application must provide.

+
+ +
+
+

3.5. Guides

+
+

This section introduces concepts to help developers with the tooling and package managers.

+
+ +
+
+

3.6. Angular

+
+

This is the main section of the documentation, where the developer will find guidelines for accessibility, how to use the Angular toolchain, how to refactor components, create libraries and, in general, maintain Angular applications. But last and not least, developers will also find solutions to common problems many of the Angular projects may have.

+
+
+ + + + + +
+ + +All the different topics are demonstrated in the samples folder with a small application. +
+
+ +
+
+

3.7. Ionic

+
+

As part of the devon4ng stack, we include a small section to explain how to develop hybrid mobile Ionic/Angular applications and create PWAs with this UI library. As the previous section, the contents are demonstrated in the samples folder.

+
+ +
+
+

3.8. Layouts

+
+

Any SPA application must have a layout. So, the purpose of this section is to explain the Angular Material approach.

+
+ +
+
+

3.9. NgRx

+
+

State Management is a big topic in big front-end application. This section explains the fundamentals of the industry standard library NgRx, showing its main components.

+
+ +
+
+

3.10. Cookbook

+
+

The Cookbook section aims to provide solutions to cross-topic challenges that at this moment do not fit in the previous sections. As the Angular section, some of the topics are demonstrated with a sample located in the samples folder.

+
+ +
+
+

3.11. devon4ng templates

+
+

In order to support CobiGen generation tool for Angular applications, devon4ng demos realization and provide more opinionated samples, the following templates are also included in devon4ng contents:

+
+
+ +
+
+
+
+
+

4. Architecture

+
+ +
+

4.1. Architecture

+
+

The following principles and guidelines are based on Angular Style Guide - especially Angular modules (see Angular Docs). +It extends those where additional guidance is needed to define an architecture which is:

+
+
+
    +
  • +

    maintainable across applications and teams

    +
  • +
  • +

    easy to understand, especially when coming from a classic Java/.Net perspective - so whenever possible the same principles apply both to the server and the client

    +
  • +
  • +

    pattern based to solve common problems

    +
  • +
  • +

    based on best of breed solutions coming from open source and Capgemini project experiences

    +
  • +
  • +

    gives as much guidance as necessary and as little as possible

    +
  • +
+
+
+
+

4.2. Overview

+
+

When using Angular the web client architecture is driven by the framework in a certain way Google and the Angular community think about web client architecture. +Angular gives an opinion on how to look at architecture. +It is a component based like devon4j but uses different terms which are common language in web application development. +The important term is module which is used instead of component. The primary reason is the naming collision with the Web Components standard (see Web Components).
+To clarify this:

+
+
+
    +
  • +

    A component describes an UI element containing HTML, CSS and JavaScript - structure, design and logic encapsulated inside a reusable container called component.

    +
  • +
  • +

    A module describes an applications feature area. The application flight-app may have a module called booking.

    +
  • +
+
+
+

An application developed using Angular consists of multiple modules. +There are feature modules and special modules described by the Angular Style Guide - core and shared. +Angular or Angular Style Guide give no guidance on how to structure a module internally. +This is where this architecture comes in.

+
+
+
+

4.3. Layers

+
+

The architecture describes two layers. The terminology is based on common language in web development.

+
+
+
+Architecture - Layers +
+
Figure 8. Layers
+
+
+
    +
  • +

    Components Layer encapsulates components which present the current application state. +Components are separated into Smart and Dumb Components. +The only logic present is view logic inside Smart Components.

    +
  • +
  • +

    Services Layer is more or less what we call 'business logic layer' on the server side. +The layer defines the applications state, the transitions between state and classic business logic. +Stores contain application state over time to which Smart Components subscribe to. +Adapters are used to perform XHR, WebSocket connections, etc. +The business model is described inside the module. +Use case services perform business logic needed for use cases. +A use case services interacts with the store and adapters. +Methods of use case services are the API for Smart Components. +Those methods are Actions in reactive terminology.

    +
  • +
+
+
+
+

4.4. Modules

+
+

Angular requires a module called app which is the main entrance to an application at runtime - this module gets bootstrapped. +Angular Style Guide defines feature modules and two special modules - core and shared.

+
+
+
+Architecture - Modules +
+
Figure 9. Modules
+
+
+

A feature module is basically a vertical cut through both layers. +The shared module consists of components shared across feature modules. +The core module holds services shared across modules. +So core module is a module only having a services layer +and shared module is a module only having a components layer.

+
+ +
+
+

4.5. Meta Architecture

+ +
+
+

4.6. Introduction

+ +
+
+

4.7. Purpose of this document

+
+

In our business applications, the client easily gets underestimated. Sometimes the client is more complex to develop and design than the server. While the server architecture is nowadays easily to agree as common sense, for clients this is not as obvious and stable especially as it typically depends on the client framework used. Finding a concrete architecture applicable for all clients may therefore be difficult to accomplish.

+
+
+

This document tries to define on a high abstract level, a reference architecture which is supposed to be a mental image and frame for orientation regarding the evaluation and appliance of different client frameworks. As such it defines terms and concepts required to be provided for in any framework and thus gives a common ground of understanding for those acquainted with the reference architecture. This allows better comparison between the various frameworks out there, each having their own terms for essentially the same concepts. It also means that for each framework we need to explicitly map how it implements the concepts defined in this document.

+
+
+

The architecture proposed herein is neither new nor was it developed from scratch. Instead it is the gathered and consolidated knowledge and best practices of various projects (s. References).

+
+
+
+

4.8. Goal of the Client Architecture

+
+

The goal of the client architecture is to support the non-functional requirements for the client, i.e. mostly maintainability, scalability, efficiency and portability. As such it provides a component-oriented architecture following the same principles listed already in the devonfw architecture overview. Furthermore it ensures a homogeneity regarding how different concrete UI technologies are being applied in the projects, solving the common requirements in the same way.

+
+
+
+

4.9. Architecture Views

+
+

As for the server we distinguish between the business and the technical architecture. Where the business architecture is different from project to project and relates to the concrete design of dialog components given concrete requirements, the technical architecture can be applied to multiple projects.

+
+
+

The focus of this document is to provide a technical reference architecture on the client on a very abstract level defining required layers and components. How the architecture is implemented has to be defined for each UI technology.

+
+
+

The technical infrastructure architecture is out of scope for this document and although it needs to be considered, the concepts of the reference architecture should work across multiple TI architecture, i.e. native or web clients.

+
+
+
+

4.10. devonfw Reference Client Architecture

+
+

The following gives a complete overview of the proposed reference architecture. It will be built up incrementally in the following sections.

+
+
+
+Complete Client Architecture Overview +
+
+
+

Figure 1 Overview

+
+
+
+

4.11. Client Architecture

+
+

On the highest level of abstraction we see the need to differentiate between dialog components and their container they are managed in, as well as the access to the application server being the back-end for the client (e.g. an devon4j instance). This section gives a summary of these components and how they relate to each other. Detailed architectures for each component will be supplied in subsequent sections

+
+
+
+Client Architecture Overview +
+
+
+

Figure 2 Overview of Client Architecture

+
+
+
+

4.12. == Dialog Component

+
+

A dialog component is a logical, self-contained part of the user interface. It accepts user input and actions and controls communication with the user. Dialog components use the services provided by the dialog container in order to execute the business logic. They are self-contained, i.e. they possess their own user interface together with the associated logic, data and states.

+
+
+
    +
  • +

    Dialog components can be composed of other dialog components forming a hierarchy

    +
  • +
  • +

    Dialog components can interact with each other. This includes communication of a parent to its children, but also between components independent of each other regarding the hierarchy.

    +
  • +
+
+
+
+

4.13. == Dialog Container

+
+

Dialog components need to be managed in their life-cycle and how they can be coupled to each other. The dialog container is responsible for this along with the following:

+
+
+
    +
  • +

    Bootstrapping the client application and environment

    +
    +
      +
    • +

      Configuration of the client

      +
    • +
    • +

      Initialization of the application server access component

      +
    • +
    +
    +
  • +
  • +

    Dialog Component Management

    +
    +
      +
    • +

      Controlling the life-cycle

      +
    • +
    • +

      Controlling the dialog flow

      +
    • +
    • +

      Providing means of interaction between the dialogs

      +
    • +
    • +

      Providing application server access

      +
    • +
    • +

      Providing services to the dialog components
      +(e.g. printing, caching, data storage)

      +
    • +
    +
    +
  • +
  • +

    Shutdown of the application

    +
  • +
+
+
+
+

4.14. == Application Server Access

+
+

Dialogs will require a back-end application server in order to execute their business logic. Typically in an devonfw application the service layer will provide interfaces for the functionality exposed to the client. These business oriented interfaces should also be present on the client backed by a proxy handling the concrete call of the server over the network. This component provides the set of interfaces as well as the proxy.

+
+
+
+

4.15. Dialog Container Architecture

+
+

The dialog container can be further structured into the following components with their respective tasks described in own sections:

+
+
+
+Dialog Container Architecture Overview +
+
+
+

Figure 3 Dialog Container Architecture

+
+
+
+

4.16. == Application

+
+

The application component represents the overall client in our architecture. It is responsible for bootstrapping all other components and connecting them with each other. As such it initializes the components below and provides an environment for them to work in.

+
+
+
+

4.17. == Configuration Management

+
+

The configuration management manages the configuration of the client, so the client can be deployed in different environments. This includes configuration of the concrete application server to be called or any other environment-specific property.

+
+
+
+

4.18. == Dialog Management

+
+

The Dialog Management component provides the means to define, create and destroy dialog components. It therefore offers basic life-cycle capabilities for a component. In addition it also allows composition of dialog components in a hierarchy. The life-cycle is then managed along the hierarchy, meaning when creating/destroying a parent dialog, this affects all child components, which are created/destroyed as well.

+
+
+
+

4.19. == Service Registry

+
+

Apart from dialog components, a client application also consists of services offered to these. A service can thereby encompass among others:

+
+
+
    +
  • +

    Access to the application server

    +
  • +
  • +

    Access to the dialog container functions for managing dialogs or accessing the configuration

    +
  • +
  • +

    Dialog independent client functionality such as Printing, Caching, Logging, Encapsulated business logic such as tax calculation

    +
  • +
  • +

    Dialog component interaction

    +
  • +
+
+
+

The service registry offers the possibility to define, register and lookup these services. Note that these services could be dependent on the dialog hierarchy, meaning different child instances could obtain different instances / implementations of a service via the service registry, depending on which service implementations are registered by the parents.

+
+
+

Services should be defined as interfaces allowing for different implementations and thus loose coupling.

+
+
+
+

4.20. Dialog Component Architecture

+
+

A dialog component has to support all or a subset of the following tasks:
+(T1) Displaying the user interface incl. internationalization
+(T2) Displaying business data incl. changes made to the data due to user interactions and localization of the data
+(T3) Accepting user input including possible conversion from e.g. entered Text to an Integer
+(T4) Displaying the dialog state
+(T5) Validation of user input
+(T6) Managing the business data incl. business logic altering it due to user interactions
+(T7) Execution of user interactions
+(T8) Managing the state of the dialog (e.g. Edit vs. View)
+(T9) Calling the application server in the course of user interactions

+
+
+

Following the principle of separation of concerns, we further structure a dialog component in an own architecture allowing us the distribute responsibility for these tasks along the defined components:

+
+
+
+Dialog Component Architecture +
+
+
+

Figure 4 Overview of dialog component architecture

+
+
+
+

4.21. == Presentation Layer

+
+

The presentation layer generates and displays the user interface, accepts user input and user actions and binds these to the dialog core layer (T1-5). The tasks of the presentation layer fall into two categories:

+
+
+
    +
  • +

    Provision of the visual representation (View component)
    +The presentation layer generates and displays the user interface and accepts user input and user actions. The logical processing of the data, actions and states is performed in the dialog core layer. The data and user interface are displayed in localized and internationalized form.

    +
  • +
  • +

    Binding of the visual representation to the dialog core layer
    +The presentation layer itself does not contain any dialog logic. The data or actions entered by the user are then processed in the dialog core layer. There are three aspects to the binding to the dialog core layer. We refer to “data binding”, “state binding” and “action binding”. Syntactical and (to a certain extent) semantic validations are performed during data binding (e.g. cross-field plausibility checks). Furthermore, the formatted, localized data in the presentation layer is converted into the presentation-independent, neutral data in the dialog core layer (parsing) and vice versa (formatting).

    +
  • +
+
+
+
+

4.22. == Dialog Core Layer

+
+

The dialog core layer contains the business logic, the control logic, and the logical state of the dialog. It therefore covers tasks T5-9:

+
+
+
    +
  • +

    Maintenance of the logical dialog state and the logical data
    +The dialog core layer maintains the logical dialog state and the logical data in a form which is independent of the presentation. The states of the presentation (e.g. individual widgets) must not be maintained in the dialog core layer, e.g. the view state could lead to multiple presentation states disabling all editable widgets on the view.

    +
  • +
  • +

    Implementation of the dialog and dialog control logic
    +The component parts in the dialog core layer implement the client specific business logic and the dialog control logic. This includes, for example, the manipulation of dialog data and dialog states as well as the opening and closing of dialogs.

    +
  • +
  • +

    Communication with the application server
    +The dialog core layer calls the interfaces of the application server via the application server access component services.

    +
  • +
+
+
+

The dialog core layer should not depend on the presentation layer enforcing a strict layering and thus minimizing dependencies.

+
+
+
+

4.23. == Interactions between dialog components

+
+

Dialog components can interact in the following ways:

+
+
+
+Dialog Interactions +
+
+
+
    +
  • +

    Embedding of dialog components
    +As already said dialog components can be hierarchically composed. This composition works by embedding on dialog component within the other. Apart from the life-cycle managed by the dialog container, the embedding needs to cope for the visual embedding of the presentation and core layer.

    +
    +
      +
    • +

      Embedding dialog presentation
      +The parent dialog needs to either integrate the embedded dialog in its layout or open it in an own model window.

      +
    • +
    • +

      Embedding dialog core
      +The parent dialog needs to be able to access the embedded instance of its children. This allows initializing and changing their data and states. On the other hand the children might require context information offered by the parent dialog by registering services in the hierarchical service registry.

      +
    • +
    +
    +
  • +
  • +

    Dialog flow
    +Apart from the embedding of dialog components representing a tight coupling, dialogs can interact with each other by passing the control of the UI, i.e. switching from one dialog to another.

    +
  • +
+
+
+

When interacting, dialog components should interact only between the same or lower layers, i.e. the dialog core should not access the presentation layer of another dialog component.

+
+
+
+

4.24. Appendix

+ +
+
+

4.25. Notes about Quasar Client

+
+

The Quasar client architecture as the consolidated knowledge of our CSD projects is the major source for the above drafted architecture. However, the above is a much simplified and more agile version thereof:

+
+
+
    +
  • +

    Quasar Client tried to abstract from the concrete UI library being used, so it could decouple the business from the technical logic of a dialog. The presentation layer should be the only one knowing the concrete UI framework used. This level of abstraction was dropped in this reference architecture, although it might of course still make sense in some projects. For fast-moving agile projects in the web however introducing such a level of abstraction takes effort with little gained benefits. With frameworks like Angular 2 we would even introduce one additional seemingly artificial and redundant layer, since it already separates the dialog core from its presentation.

    +
  • +
  • +

    In the past and in the days of Struts, JSF, etc. the concept of session handling was important for the client since part of the client was sitting on a server with a session relating it to its remote counterpart on the users PC. Quasar Client catered for this need, by very prominently differentiating between session and application in the root of the dialog component hierarchy. However, in the current days of SPA applications and the lowered importance of servers-side web clients, this prominent differentiation was dropped. When still needed the referenced documents will provide in more detail how to tailor the respective architecture to this end.

    +
  • +
+
+
+ +
+
+
+

5. Layers

+
+ +
+

5.1. Components Layer

+
+

The components layer encapsulates all components presenting the current application view state, which means data to be shown to the user. +The term component refers to a component described by the standard Web Components. +So this layer has all Angular components, directives and pipes defined for an application. +The main challenges are:

+
+
+
    +
  • +

    how to structure the components layer (see File Structure Guide)

    +
  • +
  • +

    decompose components into maintainable chunks (see Component Decomposition Guide)

    +
  • +
  • +

    handle component interaction

    +
  • +
  • +

    manage calls to the services layer

    +
  • +
  • +

    apply a maintainable data and event flow throughout the component tree

    +
  • +
+
+
+
+

5.2. Smart and Dumb Components

+
+

The architecture applies the concept of Smart and Dumb Components (syn. Containers and Presenters). +The concept means that components are divided into Smart and Dumb Components.

+
+
+

A Smart Component typically is a top-level dialog inside the component tree.

+
+
+
    +
  • +

    a component, that can be routed to

    +
  • +
  • +

    a modal dialog

    +
  • +
  • +

    a component, which is placed inside AppComponent

    +
  • +
+
+
+

A Dumb Component can be used by one to many Smart Components. +Inside the component tree a Dumb Component is a child of a Smart Component.

+
+
+
+Component Tree +
+
Figure 10. Component tree example
+
+
+

As shown the topmost component is always the AppComponent in Angular applications. +The component tree describes the hierarchy of components starting from AppComponent. +The figure shows Smart Components in blue and Dumb Components in green. +AppComponent is a Smart Component by definition. +Inside the template of AppComponent placed components are static components inside the component tree. +So they are always displayed. +In the example OverviewComponent and DetailsComponent are rendered by Angular compiler depending on current URL the application displays. +So OverviewComponents sub-tree is displayed if the URL is /overview and DetailsComponents sub-tree is displayed if the URL is /details. +To clarify this distinction further the following table shows the main differences.

+
+
+
Smart vs Dumb Components
+

|== = +|Smart Components |Dumb Components

+
+
+

|contain the current view state +|show data via binding (@Input) and contain no view state

+
+
+

|handle events emitted by Dumb Components +|pass events up the component tree to be handled by Smart Components (@Output)

+
+
+

|call the services layer +|never call the services layer

+
+
+

|use services +|do not use services

+
+
+

|consists of n Dumb Components +|is independent of Smart Components +|== =

+
+
+
+

5.3. Interaction of Smart and Dumb Components

+
+

With the usage of the Smart and Dumb Components pattern one of the most important part is component interaction. +Angular comes with built in support for component interaction with @Input() and @Output() Decorators. +The following figure illustrates an unidirectional data flow.

+
+
+
    +
  • +

    Data always goes down the component tree - from a Smart Component down its children.

    +
  • +
  • +

    Events bubble up, to be handled by a Smart Component.

    +
  • +
+
+
+
+Smart and Dumb Components Interaction +
+
Figure 11. Smart and Dumb Component Interaction
+
+
+

As shown a Dumb Components role is to define a signature by declaring Input and Output Bindings.

+
+
+
    +
  • +

    @Input() defines what data is necessary for that component to work

    +
  • +
  • +

    @Output() defines which events can be listened on by the parent component

    +
  • +
+
+
+
Listing 31. Dumb Components define a signature
+
+
export class ValuePickerComponent {
+
+  @Input() columns: string[];
+  @Input() items: {}[];
+  @Input() selected: {};
+  @Input() filter: string;
+  @Input() isChunked = false;
+  @Input() showInput = true;
+  @Input() showDropdownHeader = true;
+
+  @Output() elementSelected = new EventEmitter<{}>();
+  @Output() filterChanged = new EventEmitter<string>();
+  @Output() loadNextChunk = new EventEmitter();
+  @Output() escapeKeyPressed = new EventEmitter();
+
+}
+
+
+
+

The example shows the Dumb Component ValuePickerComponent. +It describes seven input bindings with isChunked, showHeader and showDropdownHeader being non mandatory as they have a default value. +Four output bindings are present. Typically, a Dumb Component has very little code to no code inside the TypeScript class.

+
+
+
Listing 32. Smart Components use the Dumb Components signature inside the template
+
+
<div>
+
+  <value-input
+    ...>
+  </value-input>
+
+  <value-picker
+    *ngIf="isValuePickerOpen"
+    [columns]="columns"
+    [items]="filteredItems"
+    [isChunked]="isChunked"
+    [filter]="filter"
+    [selected]="selectedItem"
+    [showDropdownHeader]="showDropdownHeader"
+    (loadNextChunk)="onLoadNextChunk()"
+    (elementSelected)="onElementSelected($event)"
+    (filterChanged)="onFilterChanged($event)"
+    (escapeKeyPressed)="onEscapePressedInsideChildTable()">
+  </value-picker>
+
+</div>
+
+
+
+

Inside the Smart Components template the events emitted by Dumb Components are handled. +It is a good practice to name the handlers with the prefix on* (e.g. onInputChanged()).

+
+ +
+
+

5.4. Services Layer

+
+

The services layer is more or less what we call 'business logic layer' on the server side. +It is the layer where the business logic is placed. +The main challenges are:

+
+
+
    +
  • +

    Define application state and an API for the components layer to use it

    +
  • +
  • +

    Handle application state transitions

    +
  • +
  • +

    Perform back-end interaction (XHR, WebSocket, etc.)

    +
  • +
  • +

    Handle business logic in a maintainable way

    +
  • +
  • +

    Configuration management

    +
  • +
+
+
+

All parts of the services layer are described in this chapter. +An example which puts the concepts together can be found at the end Interaction of Smart Components through the services layer.

+
+
+
+

5.5. Boundaries

+
+

There are two APIs for the components layer to interact with the services layer:

+
+
+
    +
  • +

    A store can be subscribed to for receiving state updates over time

    +
  • +
  • +

    A use case service can be called to trigger an action

    +
  • +
+
+
+

To illustrate the fact the following figure shows an abstract overview.

+
+
+
+Smart and Dumb Components Interaction +
+
Figure 12. Boundaries to components layer
+
+
+
+

5.6. Store

+
+

A store is a class which defines and handles application state with its transitions over time. +Interaction with a store is always synchronous. +A basic implementation using RxJS can look like this.

+
+
+ + + + + +
+ + +A more profound implementation taken from a real-life project can be found here (Abstract Class Store). +
+
+
+
Listing 33. Store defined using RxJS
+
+
@Injectable()
+export class ProductSearchStore {
+
+  private stateSource = new BehaviorSubject<ProductSearchState>(defaultProductSearchState);
+  state$ = this.stateSource.asObservable();
+
+  setLoading(isLoading: boolean) {
+    const currentState = this.stateSource.getValue();
+    this.stateSource.next({
+      isLoading: isLoading,
+      products: currentState.products,
+      searchCriteria: currentState.searchCriteria
+    });
+  }
+
+}
+
+
+
+

In the example ProductSearchStore handles state of type ProductSearchState. +The public API is the property state$ which is an observable of type ProductSearchState. +The state can be changed with method calls. +So every desired change to the state needs to be modeled with an method. +In reactive terminology this would be an Action. +The store does not use any services. +Subscribing to the state$ observable leads to the subscribers receiving every new state.

+
+
+

This is basically the Observer Pattern:
+The store consumer registers itself to the observable via state$.subscribe() method call. +The first parameter of subscribe() is a callback function to be called when the subject changes. +This way the consumer - the observer - is registered. +When next() is called with a new state inside the store, all callback functions are called with the new value. +So every observer is notified of the state change. +This equals the Observer Pattern push type.

+
+
+

A store is the API for Smart Components to receive state from the service layer. +State transitions are handled automatically with Smart Components registering to the state$ observable.

+
+
+
+

5.7. Use Case Service

+
+

A use case service is a service which has methods to perform asynchronous state transitions. +In reactive terminology this would be an Action of Actions - a thunk (redux) or an effect (@ngrx).

+
+
+
+Use Case Service +
+
Figure 13. Use case services are the main API to trigger state transitions
+
+
+

A use case services method - an action - interacts with adapters, business services and stores. +So use case services orchestrate whole use cases. +For an example see use case service example.

+
+
+
+

5.8. Adapter

+
+

An adapter is used to communicate with the back-end. +This could be a simple XHR request, a WebSocket connection, etc. +An adapter is simple in the way that it does not add anything other than the pure network call. +So there is no caching or logging performed here. +The following listing shows an example.

+
+
+

For further information on back-end interaction see Consuming REST Services

+
+
+
Listing 34. Calling the back-end via an adapter
+
+
@Injectable()
+export class ProducsAdapter {
+
+  private baseUrl = environment.baseUrl;
+
+  constructor(private http: HttpClient) { }
+
+  getAll(): Observable<Product[]> {
+    return this.http.get<Product[]>(this.baseUrl + '/products');
+  }
+
+}
+
+
+
+
+

5.9. Interaction of Smart Components through the services layer

+
+

The interaction of smart components is a classic problem which has to be solved in every UI technology. +It is basically how one dialog tells the other something has changed.

+
+
+

An example is adding an item to the shopping basket. +With this action there need to be multiple state updates.

+
+
+
    +
  • +

    The small logo showing how many items are currently inside the basket needs to be updated from 0 to 1

    +
  • +
  • +

    The price needs to be recalculated

    +
  • +
  • +

    Shipping costs need to be checked

    +
  • +
  • +

    Discounts need to be updated

    +
  • +
  • +

    Ads need to be updated with related products

    +
  • +
  • +

    etc.

    +
  • +
+
+
+
+

5.10. Pattern

+
+

To handle this interaction in a scalable way we apply the following pattern.

+
+
+
+Interaction of Smart Components via services layer +
+
Figure 14. Smart Component interaction
+
+
+

The state of interest is encapsulated inside a store. All Smart Components interested in the state have to subscribe to the store’s API served by the public observable. Thus, with every update to the store the subscribed components receive the new value. The components basically react to state changes. Altering a store can be done directly if the desired change is synchronous. Most actions are of asynchronous nature so the UseCaseService comes into play. Its actions are void methods, which implement a use case, i.e., adding a new item to the basket. It calls asynchronous actions and can perform multiple store updates over time.

+
+
+

To put this pattern into perspective the UseCaseService is a programmatic alternative to redux-thunk or @ngrx/effects. The main motivation here is to use the full power of TypeScript --strictNullChecks and to let the learning curve not to become as steep as it would be when learning a new state management framework. This way actions are just void method calls.

+
+
+
+

5.11. Example

+
+
+Smart component interaction example +
+
Figure 15. Smart Components interaction example
+
+
+

The example shows two Smart Components sharing the FlightSearchState by using the FlightSearchStore. +The use case shown is started by an event in the Smart Component FlightSearchComponent. The action loadFlight() is called. This could be submitting a search form. +The UseCaseService is FlightSearchService, which handles the use case Load Flights.

+
+
+
UseCaseService example
+

+
+
+
+
export class FlightSearchService {
+
+  constructor(
+    private flightSearchAdapter: FlightSearchAdapter,
+    private store: FlightSearchStore
+  ) { }
+
+  loadFlights(criteria: FlightSearchCriteria): void {
+    this.store.setLoadingFlights(true);
+    this.store.clearFlights();
+
+    this.flightSearchAdapter.getFlights(criteria.departureDate,
+        {
+          from: criteria.departureAirport,
+          to: criteria.destinationAirport
+        })
+      .finally(() => this.store.setLoadingFlights(false))
+      .subscribe((result: FlightTo[]) => this.store.setFlights(result, criteria));
+  }
+
+}
+
+
+
+

First the loading flag is set to true and the current flights are cleared. This leads the Smart Component showing a spinner indicating the loading action. Then the asynchronous XHR is triggered by calling the adapter. After completion the loading flag is set to false causing the loading indication no longer to be shown. If the XHR was successful, the data would be put into the store. If the XHR was not successful, this would be the place to handle a custom error. All general network issues should be handled in a dedicated class, i.e., an interceptor. So for example the basic handling of 404 errors is not done here.

+
+
+
+
+
+

6. Guides

+
+ +
+

6.1. Package Managers

+
+

There are two major package managers currently used for JavaScript / TypeScript projects which leverage NodeJS as a build platform.

+
+
+
    +
  1. +

    npm

    +
  2. +
  3. +

    yarn

    +
  4. +
+
+
+

Our recommendation is to use yarn but both package managers are fine.

+
+
+ + + + + +
+ + +When using npm it is important to use a version greater 5.0 as npm 3 has major drawbacks compared to yarn. +The following guide assumes that you are using npm >= 5 or yarn. +
+
+
+

Before you start reading further, please take a look at the docs:

+
+ +
+

The following guide will describe best practices for working with yarn / npm.

+
+
+
+

6.2. Semantic Versioning

+
+

When working with package managers it is very important to understand the concept of semantic versioning.

+
+
+
Version example 1.2.3
+

|== == == = +|Version |1. |2. |3 +|Version name when incrementing |Major (2.0.0) |Minor (1.3.0) |Patch (1.2.4) +|Has breaking changes |yes |no |no +|Has features |yes |yes |no +|Has bug fixes |yes |yes |yes +|== == == =

+
+
+

The table gives an overview of the most important parts of semantic versioning. +In the header version 1.2.3 is displayed. +The first row shows the name and the resulting version when incrementing a part of the version. +The next rows show specifics of the resulting version - e.g. a major version can have breaking changes, features and bug fixes.

+
+
+

Packages from npm and yarn leverage semantic versioning and instead of selecting a fixed version one can specify a selector. +The most common selectors are:

+
+
+
    +
  • +

    ^1.2.3 +At least 1.2.3 - 1.2.4 or 1.3.0 can be used, 2.0.0 can not be used

    +
  • +
  • +

    ~1.2.3 +At lease 1.2.3 - 1.2.4 can be used, 2.0.0 and 1.3.0 can not be used

    +
  • +
  • +

    >=1.2.3 +At least 1.2.3 - every version greater can also be used

    +
  • +
+
+
+

This achieves a lower number of duplicates. +To give an example:

+
+
+

If package A needs version 1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 4 packages.

+
+
+

If package A needs version ^1.3.0 of package C and package B needs version 1.4.0 of package C one would end up with 3 packages. +A would use the same version of C as B - 1.4.0.

+
+
+
+

6.3. Do not modify package.json and lock files by hand

+
+

Dependencies are always added using a yarn or npm command. +Altering the package.json, package-json.lock or yarn.lock file by hand is not recommended.

+
+
+

Always use a yarn or npm command to add a new dependency.

+
+
+

Adding the package express with yarn to dependencies.

+
+
+
+
yarn add express
+
+
+
+

Adding the package express with npm to dependencies.

+
+
+
+
npm install express
+
+
+
+
+

6.4. What does the lock file do

+
+

The purpose of files yarn.lock and package-json.lock is to freeze versions for a short time.

+
+
+

The following problem is solved:

+
+
+
    +
  • +

    Developer A upgrades the dependency express to fixed version 4.16.3.

    +
  • +
  • +

    express has sub-dependency accepts with version selector ~1.3.5

    +
  • +
  • +

    His local node_modules folder receives accepts in version 1.3.5

    +
  • +
  • +

    On his machine everything is working fine

    +
  • +
  • +

    Afterward version 1.3.6 of accepts is published - it contains a major bug

    +
  • +
  • +

    Developer B now clones the repo and loads the dependencies.

    +
  • +
  • +

    He receives version 1.3.6 of accepts and blames developer A for upgrading to a broken version.

    +
  • +
+
+
+

Both yarn.lock and package-json.lock freeze all the dependencies. +For example in yarn lock you will find.

+
+
+
Listing 35. yarn.lock example (excerp)
+
+
accepts@~1.3.5:
+  version "1.3.5"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-types "~2.1.18"
+    negotiator "0.6.1"
+
+mime-db@~1.33.0:
+  version "1.33.0"
+  resolved "[...URL to registry]"
+
+mime-types@~2.1.18:
+  version "2.1.18"
+  resolved "[...URL to registry]"
+  dependencies:
+    mime-db "~1.33.0"
+
+negotiator@0.6.1:
+  version "0.6.1"
+  resolved "[...URL to registry]"
+
+
+
+

The described problem is solved by the example yarn.lock file.

+
+
+
    +
  • +

    accepts is frozen at version ~1.3.5

    +
  • +
  • +

    All of its sub-dependencies are also frozen. +It needs mime-types at version ~2.1.18 which is frozen at 2.1.18. +mime-types needs mime-db at ~1.33.0 which is frozen at 1.33.0

    +
  • +
+
+
+

Every developer will receive the same versions of every dependency.

+
+
+ + + + + +
+ + +You have to make sure all your developers are using the same npm/yarn version - this includes the CI build. +
+
+ +
+
+

6.5. Package Managers Workflow

+ +
+
+

6.6. Introduction

+
+

This document aims to provide you the necessary documentation and sources in order to help you understand the importance of dependencies between packages.

+
+
+

Projects in NodeJS make use of modules, chunks of reusable code made by other people or teams. These small chunks of reusable code are called packages [3]. Packages are used to solve specific problems or tasks. These relations between your project and the external packages are called dependencies.

+
+
+

For example, imagine we are doing a small program that takes your birthday as an input and tells you how many days are left until your birthday. We search in the repository if someone has published a package to retrieve the actual date and manage date types, and maybe we could search for another package to show a calendar, because we want to optimize our time, and we wish the user to click a calendar button and choose the day in the calendar instead of typing it.

+
+
+

As you can see, packages are convenient. In some cases, they may be even needed, as they can manage aspects of your program you may not be proficient in, or provide an easier use of them.

+
+
+

For more comprehensive information visit npm definition

+
+
+
+

6.7. Package.json

+
+

Dependencies in your project are stored in a file called package.json. Every package.json must contain, at least, the name and version of your project.

+
+
+

Package.json is located in the root of your project.

+
+
+ + + + + +
+ + +If package.json is not on your root directory refer to Problems you may encounter section +
+
+
+

If you wish to learn more information about package.json, click on the following links:

+
+ +
+
+

6.8. == Content of package.json

+
+

As you noticed, package.json is a really important file in your project. It contains essential information about our project, therefore you need to understand what’s inside.

+
+
+

The structure of package.json is divided in blocks, inside the first one you can find essential information of your project such as the name, version, license and optionally some [Scripts].

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e"
+  }
+
+
+
+

The next block is called dependencies and contains the packages that project needs in order to be developed, compiled and executed.

+
+
+
+
"private": true,
+  "dependencies": {
+    "@angular/animations": "^4.2.4",
+    "@angular/common": "^4.2.4",
+    "@angular/forms": "^4.2.4",
+    ...
+    "zone.js": "^0.8.14"
+  }
+
+
+
+

After dependencies we find devDependencies, another kind of dependencies present in the development of the application but unnecessary for its execution. One example is typescript. Code is written in typescript, and then, transpiled to JavaScript. This means the application is not using typescript in execution and consequently not included in the deployment of our application.

+
+
+
+
"devDependencies": {
+    "@angular/cli": "1.4.9",
+    "@angular/compiler-cli": "^4.2.4",
+    ...
+    "@types/node": "~6.0.60",
+    "typescript": "~2.3.3"
+  }
+
+
+
+

Having a peer dependency means that your package needs a dependency that is the same exact dependency as the person installing your package

+
+
+
+
"peerDependencies": {
+    "package-123": "^2.7.18"
+  }
+
+
+
+

Optional dependencies are just that: optional. If they fail to install, Yarn will still say the install process was successful.

+
+
+
+
"optionalDependencies": {
+    "package-321": "^2.7.18"
+  }
+
+
+
+

Finally you can have bundled dependencies which are packages bundled together when publishing your package in a repository.

+
+
+
+
{
+  "bundledDependencies": [
+    "package-4"
+  ]
+}
+
+
+
+

Here is the link to an in-depth explanation of dependency types​.

+
+
+
+

6.9. == Scripts

+
+

Scripts are a great way of automating tasks related to your package, such as simple build processes or development tools.

+
+
+

For example:

+
+
+
+
{
+  "name": "exampleproject",
+  "version": "0.0.0",
+  "license": "MIT",
+  "scripts": {
+    "build-project": "node hello-world.js",
+  }
+
+
+
+

You can run that script by running the command yarn (run) script or npm run script, check the example below:

+
+
+
+
$ yarn (run) build-project    # run is optional
+$ npm run build-project
+
+
+
+

There are special reserved words for scripts, like pre-install, which will execute the script automatically +before the package you install are installed.

+
+
+

Check different uses for scripts in the following links:

+
+ +
+

Or you can go back to +[Content of package.json]​.

+
+
+
+

6.10. Managing dependencies

+
+

In order to manage dependencies we recommend using package managers in your projects.

+
+
+

A big reason is their usability. Adding or removing a package is really easy, and by doing so, packet manager update the package.json and copies (or removes) the package in the needed location, with a single command.

+
+
+

Another reason, closely related to the first one, is reducing human error by automating the package management process.

+
+
+

Two of the package managers you can use in NodeJS projects are "yarn" and "npm". While you can use both, we encourage you to use only one of them while working on projects. Using both may lead to different dependencies between members of the team.

+
+
+
+

6.11. == npm

+
+

We’ll start by installing npm following this small guide here.

+
+
+

As stated on the web, npm comes inside of NodeJS, and must be updated after installing NodeJS, in the same guide you used earlier are written the instructions to update npm.

+
+
+

How npm works

+
+
+

In order to explain how npm works, let’s take a command as an example:

+
+
+
+
$ npm install @angular/material @angular/cdk
+
+
+
+

This command tells npm to look for the packages @angular/material and @angular/cdk in the npm registry, download and decompress them in the folder node_modules along with their own dependencies. Additionally, npm will update package.json and create a new file called package-lock.json.

+
+
+

After initialization and installing the first package there will be a new folder called node_modules in your project. This folder is where your packages are unzipped and stored, following a tree scheme.

+
+
+

Take in consideration both npm and yarn need a package.json in the root of your project in order to work properly. If after creating your project don’t have it, download again the package.json from the repository or you’ll have to start again.

+
+
+

Brief overview of commands

+
+
+

If we need to create a package.json from scratch, we can use the command init. This command asks the user for basic information about the project and creates a brand new package.json.

+
+
+
+
$ npm init
+
+
+
+

Install (or i) installs all modules listed as dependencies in package.json locally. You can also specify a package, and install that package. Install can also be used with the parameter -g, which tells npm to install the [Global package].

+
+
+
+
$ npm install
+$ npm i
+$ npm install Package
+
+
+
+ + + + + +
+ + +Earlier versions of npm did not add dependencies to package.json unless it was used with the flag --save, so npm install package would be npm install --save package, you have one example below. +
+
+
+
+
$ npm install --save Package
+
+
+
+

Npm needs flags in order to know what kind of dependency you want in your project, in npm you need to put the flag -D or --save-dev to install devDependencies, for more information consult the links at the end of this section.

+
+
+
+
$ npm install -D package
+$ npm install --save-dev package
+
+
+
+

+
+
+

The next command uninstalls the module you specified in the command.

+
+
+
+
$ npm uninstall Package
+
+
+
+

ls command shows us the dependencies like a nested tree, useful if you have few packages, not so useful when you need a lot of packages.

+
+
+
+
$ npm ls
+
+
+
+
+
npm@@VERSION@ /path/to/npm
+└─┬ init-package-json@0.0.4
+  └── promzard@0.1.5
+
+
+
+
example tree
+

We recommend you to learn more about npm commands in the following link, navigating to the section CLI commands.

+
+
+

About Package-lock.json

+
+
+

Package-lock.json describes the dependency tree resulting of using package.json and npm. +Whenever you update, add or remove a package, package-lock.json is deleted and redone with +the new dependencies.

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

This lock file is checked every time the command npm i (or npm install) is used without specifying a package, +in the case it exists and it’s valid, npm will install the exact tree that was generated, such that subsequent +installs are able to generate identical dependency trees.

+
+
+ + + + + +
+ + +It is not recommended to modify this file yourself. It’s better to leave its management to npm. +
+
+
+

More information is provided by the npm team at package-lock.json

+
+
+
+

6.12. == Yarn

+
+

Yarn is an alternative to npm, if you wish to install yarn follow the guide getting started with yarn and download the correct version for your operative system. NodeJS is also needed you can find it here.

+
+
+

Working with yarn

+
+
+

Yarn is used like npm, with small differences in syntax, for example npm install module is changed to yarn add module.

+
+
+
+
$ yarn add @covalent
+
+
+
+

This command is going to download the required packages, modify package.json, put the package in the folder node_modules and makes a new yarn.lock with the new dependency.

+
+
+

However, unlike npm, yarn maintains a cache with packages you download inside. You don’t need to download every file every time you do a general installation. This means installations faster than npm.

+
+
+

Similarly to npm, yarn creates and maintains his own lock file, called yarn.lock. Yarn.lock gives enough information about the project for dependency tree to be reproduced.

+
+
+

yarn commands

+
+
+

Here we have a brief description of yarn’s most used commands:

+
+
+
+
$ yarn add Package
+$ yarn add --dev Package
+
+
+
+

Adds a package locally to use in your package. Adding the flags --dev or -D will add them to devDependencies instead of the default dependencies, if you need more information check the links at the end of the section.

+
+
+
+
$ yarn init
+
+
+
+

Initializes the development of a package.

+
+
+
+
$ yarn install
+
+
+
+

Installs all the dependencies defined in a package.json file, you can also write "yarn" to achieve the same effect.

+
+
+
+
$ yarn remove Package
+
+
+
+

You use it when you wish to remove a package from your project.

+
+
+
+
$ yarn global add Package
+
+
+
+

Installs the [Global package].

+
+
+

Please, refer to the documentation to learn more about yarn commands and their attributes: yarn commands

+
+
+

yarn.lock

+
+
+

This file has the same purpose as Package-lock.json, to guide the packet manager, in this case yarn, +to install the dependency tree specified in yarn.lock.

+
+
+

Yarn.lock and package.json are +essential files when collaborating in a project more co-workers and may be a +source of errors if programmers do not use the same manager.

+
+
+

Yarn.lock follows the same structure as package-lock.json, you can find an example of dependency below:

+
+
+
+
"@angular/animations@^4.2.4":
+  version "4.4.6"
+  resolved "https://registry.yarnpkg.com/@angular/animations/-/animations-4.4.6.tgz#fa661899a8a4e38cb7c583c7a5c97ce65d592a35"
+  dependencies:
+    tslib "^1.7.1"
+
+
+
+ + + + + +
+ + +As with package-lock.json, it’s strongly not advised to modify this file. Leave its management to yarn +
+
+
+

You can learn more about yarn.lock here: yarn.lock

+
+
+
+

6.13. == Global package

+
+

Global packages are packages installed in your operative system instead of your local project, +global packages useful for developer tooling that is not part of any individual project but instead is used for local commands.

+
+
+

A good example of global package is @angular/cli, a command line interface for angular used in our projects. You can install +a global package in npm with "npm install -g package" and "yarn global add package" with yarn, you have a npm example below:

+
+
+
Listing 36. npm global package
+
+
npm install –g @angular/cli
+
+
+ +
+
+

6.14. == Package version

+
+

Dependencies are critical to the success of a package. You must be extra careful about +which version packages are using, one package in a different version may break your code.

+
+
+

Versioning in npm and yarn, follows a semantic called semver, following the logic +MAJOR.MINOR.PATCH, like for example, @angular/animations: 4.4.6.

+
+
+

Different versions

+
+
+

Sometimes, packages are installed with a different version from the one initially installed. +This happens because package.json also contains the range of versions we allow yarn or npm to +install or update to, example:

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

And here the installed one:

+
+
+
+
 "@angular/animations": {
+      "version": "4.4.6",
+      "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.4.6.tgz",
+      "integrity": "sha1-+mYYmaik44y3xYPHpcl85l1ZKjU=",
+      "requires": {
+        "tslib": "1.8.0"
+      }
+
+
+
+

As you can see, the version we initially added is 4.2.4, and the version finally installed after +a global installation of all packages, 4.4.6.

+
+
+

Installing packages without package-lock.json or yarn.lock using their respective packet managers, will always +end with npm or yarn installing the latest version allowed by package.json.

+
+
+

"@angular/animations": "^4.2.4" contains not only the version we added, but also the range we allow npm and yarn +to update. Here are some examples:

+
+
+
+
"@angular/animations": "<4.2.4"
+
+
+
+

The version installed must be lower than 4.2.4 .

+
+
+
+
"@angular/animations": ">=4.2.4"
+
+
+
+

The version installed must be greater than or equal to 4.2.4 .

+
+
+
+
"@angular/animations": "=4.2.4"
+
+
+
+

the version installed must be equal to 4.2.4 .

+
+
+
+
"@angular/animations": "^4.2.4"
+
+
+
+

The version installed cannot modify the first non zero digit, for example in this case +it cannot surpass 5.0.0 or be lower than 4.2.4 .

+
+
+

You can learn more about this in Versions

+
+
+
+

6.15. Problems you may encounter

+
+

If you can’t find package.json, you may have deleted the one you had previously, +which means you have to download the package.json from the repository. +In the case you are creating a new project you can create a new package.json. More information +in the links below. Click on Package.json if you come from that section.

+
+ +
+ + + + + +
+ + +Using npm install or yarn without package.json in your projects will +result in compilation errors. As we mentioned earlier, +Package.json contains essential information about your project. +
+
+
+

If you have package.json, but you don’t have package-lock.json or yarn.lock the use of +command "npm install" or "yarn" may result in a different dependency tree.

+
+
+

If you are trying to import a module and visual code studio is not able to find it, +is usually caused by error adding the package to the project, try to add the module again with yarn or npm, +and restart Visual Studio Code.

+
+
+

Be careful with the semantic versioning inside your package.json of the packages, +or you may find a new update on one of your dependencies breaking your code.

+
+
+ + + + + +
+ + +In the following link +there is a solution to a problematic update to one package. +
+
+
+

A list of common errors of npm can be found in: npm errors

+
+
+
+

6.16. == Recomendations

+
+

Use yarn or npm in your project, reach an agreement with your team in order to choose one, this will avoid +undesired situations like forgetting to upload an updated yarn.lock or package-lock.json. +Be sure to have the latest version of your project when possible.

+
+
+ + + + + +
+ + +Pull your project every time it’s updated. Erase your node_modules folder and reinstall all +dependencies. This assures you to be working with the same dependencies your team has. +
+
+
+

AD Center recommends the use of yarn.

+
+ +
+
+

6.17. Yarn 2

+
+

Yarn v2 is a very different software from the v1. The following list contains the main new features:

+
+ +
+

Please, read them carefully to decide if your current project is suitable to use Yarn 2 as package manager.

+
+
+ + + + + +
+ + +Some features are still experimental, so please do not use them in production environments. +
+
+
+

More info at https://yarnpkg.com/

+
+
+
+

6.18. Global Install

+
+

Installing Yarn 2.x globally is discouraged as Yarn team is moving to a per-project install strategy. We advise you to keep Yarn 1.x (Classic) as your global binary by installing it via the instructions you can find here.

+
+
+

Once you’ve followed the instructions (running yarn --version from your home directory should yield something like 1.22.0), go to the next section to see how to enable Yarn 2 on your project.

+
+
+
+

6.19. Per-project install

+
+

Follow these instructions to update your current devon4ng project to Yarn 2:

+
+
+
    +
  1. +

    Follow the global install instructions.

    +
  2. +
  3. +

    Move into your project folder:

    +
    +
    +
    cd ~/path/to/project
    +
    +
    +
  4. +
  5. +

    Run the following command:

    +
    +
    +
    yarn policies set-version berry # below v1.22
    +yarn set version berry          # on v1.22+
    +
    +
    +
  6. +
  7. +

    Since Angular CLI still is not fully supported with the new Yarn architecture as it is not compatible with PnP it is necessary to include the node-modules plugin adding the following line in the .yarnrc.yml file:

    +
    +
    +
    nodeLinker: node-modules
    +
    +
    +
  8. +
  9. +

    Commit the .yarn and .yarnrc.yml changes

    +
  10. +
  11. +

    Run again yarn install.

    +
  12. +
+
+
+ + + + + +
+ + +For more advanced migration topics please refer to https://yarnpkg.com/advanced/migration +
+
+
+
+

6.20. Which files should be added to gitignore file?

+
+

If you’re using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/cache
+!.yarn/releases
+!.yarn/plugins
+
+
+
+

If you’re not using Zero-Installs:

+
+
+
+
.yarn/*
+!.yarn/releases
+!.yarn/plugins
+.pnp.*
+
+
+ +
+
+
+
+

7. Angular

+
+ +
+

7.1. Accessibility

+
+

Multiple studies suggest that around 15-20% of the population are living with a disability of some kind. In comparison, that number is higher than any single browser demographic currently, other than Chrome2. Not considering those users when developing an application means excluding a large number of people from being able to use it comfortable or at all.

+
+
+

Some people are unable to use the mouse, view a screen, see low contrast text, Hear dialogue or music and some people having difficulty to understanding the complex language.This kind of people needed the support like Keyboard support, screen reader support, high contrast text, captions and transcripts and Plain language support. This disability may change the from permanent to the situation.

+
+
+
+

7.2. Key Concerns of Accessible Web Applications

+
+
    +
  • +

    Semantic Markup - Allows the application to be understood on a more general level rather than just details of whats being rendered

    +
  • +
  • +

    Keyboard Accessibility - Applications must still be usable when using only a keyboard

    +
  • +
  • +

    Visual Assistance - color contrast, focus of elements and text representations of audio and events

    +
  • +
+
+
+
+

7.3. Semantic Markup

+
+

If you’re creating custom element directives, Web Components or HTML in general, use native elements wherever possible to utilize built-in events and properties. Alternatively, use ARIA to communicate semantic meaning.

+
+
+

HTML tags have attributes that providers extra context on what’s being displayed on the browser. For example, the <img> tag’s alt attribute lets the reader know what is being shown using a short description.However, native tags don’t cover all cases. This is where ARIA fits in. ARIA attributes can provide context on what roles specific elements have in the application or on how elements within the document relate to each other.

+
+
+

A modal component can be given the role of dialog or alertdialog to let the browser know that that component is acting as a modal. The modal component template can use the ARIA attributes aria-labelledby and aria-described to describe to readers what the title and purpose of the modal is.

+
+
+
+
@Component({
+    selector: 'ngc2-app',
+    template: `
+      <ngc2-notification-button
+        message="Hello!"
+        label="Greeting"
+        role="button">
+      </ngc2-notification-button>
+      <ngc2-modal
+        [title]="modal.title"
+        [description]="modal.description"
+        [visible]="modal.visible"
+        (close)="modal.close()">
+      </ngc2-modal>
+    `
+})
+export class AppComponent {
+  constructor(private modal: ModalService) { }
+}
+
+
+
+

notification-button.component.ts

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `
+})
+export class ModalComponent {
+  ...
+}
+
+
+
+
+

7.4. Keyboard Accessibility

+
+

Keyboard accessibility is the ability of your application to be interacted with using just a keyboard. The more streamlined the site can be used this way, the more keyboard accessible it is. Keyboard accessibility is one of the largest aspects of web accessibility since it targets:

+
+
+
    +
  • +

    those with motor disabilities who can’t use a mouse

    +
  • +
  • +

    users who rely on screen readers and other assistive technology, which require keyboard navigation

    +
  • +
  • +

    those who prefer not to use a mouse

    +
  • +
+
+
+
+

7.5. == Focus

+
+

Keyboard interaction is driven by something called focus. In web applications, only one element on a document has focus at a time, and keypress will activate whatever function is bound to that element. +Focus element border can be styled with CSS using the outline property, but it should not be removed. Elements can also be styled using the :focus psuedo-selector.

+
+
+
+

7.6. == Tabbing

+
+

The most common way of moving focus along the page is through the tab key. Elements will be traversed in the order they appear in the document outline - so that order must be carefully considered during development. +There is way change the default behavior or tab order. This can be done through the tabindex attribute. The tabindex can be given the values: +* less than zero - to let readers know that an element should be focusable but not keyboard accessible +* 0 - to let readers know that that element should be accessible by keyboard +* greater than zero - to let readers know the order in which the focusable element should be reached using the keyboard. Order is calculated from lowest to highest.

+
+
+
+

7.7. == Transitions

+
+

The majority of transitions that happen in an Angular application will not involve a page reload. This means that developers will need to carefully manage what happens to focus in these cases.

+
+
+

For example:

+
+
+
+
@Component({
+  selector: 'ngc2-modal',
+  template: `
+    <div
+      role="dialog"
+      aria-labelledby="modal-title"
+      aria-describedby="modal-description">
+      <div id="modal-title">{{title}}</div>
+      <p id="modal-description">{{description}}</p>
+      <button (click)="close.emit()">OK</button>
+    </div>
+  `,
+})
+export class ModalComponent {
+  constructor(private modal: ModalService, private element: ElementRef) { }
+
+  ngOnInit() {
+    this.modal.visible$.subscribe(visible => {
+      if(visible) {
+        setTimeout(() => {
+          this.element.nativeElement.querySelector('button').focus();
+        }, 0);
+      }
+    })
+  }
+}
+
+
+
+
+

7.8. Visual Assistance

+
+

One large category of disability is visual impairment. This includes not just the blind, but those who are color blind or partially sighted, and require some additional consideration.

+
+
+
+

7.9. Color Contrast

+
+

When choosing colors for text or elements on a website, the contrast between them needs to be considered. For WCAG 2.0 AA, this means that the contrast ratio for text or visual representations of text needs to be at least 4.5:1. There are tools online to measure the contrast ratio such as this color contrast checker from WebAIM or be checked with using automation tests.

+
+
+
+

7.10. Visual Information

+
+

Color can help a user’s understanding of information, but it should never be the only way to convey information to a user. For example, a user with red/green color-blindness may have trouble discerning at a glance if an alert is informing them of success or failure.

+
+
+
+

7.11. Audiovisual Media

+
+

Audiovisual elements in the application such as video, sound effects or audio (that is, podcasts) need related textual representations such as transcripts, captions or descriptions. They also should never auto-play and playback controls should be provided to the user.

+
+
+
+

7.12. Accessibility with Angular Material

+
+

The a11y package provides a number of tools to improve accessibility. Import

+
+
+
+
import { A11yModule } from '@angular/cdk/a11y';
+
+
+
+
+

7.13. ListKeyManager

+
+

ListKeyManager manages the active option in a list of items based on keyboard interaction. Intended to be used with components that correspond to a role="menu" or role="listbox" pattern . Any component that uses a ListKeyManager will generally do three things:

+
+
+
    +
  • +

    Create a @ViewChildren query for the options being managed.

    +
  • +
  • +

    Initialize the ListKeyManager, passing in the options.

    +
  • +
  • +

    Forward keyboard events from the managed component to the ListKeyManager.

    +
  • +
+
+
+

Each option should implement the ListKeyManagerOption interface:

+
+
+
+
interface ListKeyManagerOption {
+  disabled?: boolean;
+  getLabel?(): string;
+}
+
+
+
+
+

7.14. == Types of ListKeyManager

+
+

There are two varieties of ListKeyManager, FocusKeyManager and ActiveDescendantKeyManager.

+
+
+
+

7.15. FocusKeyManager

+
+

Used when options will directly receive browser focus. Each item managed must implement the FocusableOption interface:

+
+
+
+
interface FocusableOption extends ListKeyManagerOption {
+  focus(): void;
+}
+
+
+
+
+

7.16. ActiveDescendantKeyManager

+
+

Used when options will be marked as active via aria-activedescendant. Each item managed must implement the Highlightable interface:

+
+
+
+
interface Highlightable extends ListKeyManagerOption {
+  setActiveStyles(): void;
+  setInactiveStyles(): void;
+}
+
+
+
+

Each item must also have an ID bound to the listbox’s or menu’s aria-activedescendant.

+
+
+
+

7.17. FocusTrap

+
+

The cdkTrapFocus directive traps Tab key focus within an element. This is intended to be used to create accessible experience for components like modal dialogs, where focus must be constrained. This directive is declared in A11yModule.

+
+
+

This directive will not prevent focus from moving out of the trapped region due to mouse interaction.

+
+
+

For example:

+
+
+
+
<div class="my-inner-dialog-content" cdkTrapFocus>
+  <!-- Tab and Shift + Tab will not leave this element. -->
+</div>
+
+
+
+
+

7.18. Regions

+
+

Regions can be declared explicitly with an initial focus element by using the cdkFocusRegionStart, cdkFocusRegionEnd and cdkFocusInitial DOM attributes. When using the tab key, focus will move through this region and wrap around on either end.

+
+
+

For example:

+
+
+
+
<a mat-list-item routerLink cdkFocusRegionStart>Focus region start</a>
+<a mat-list-item routerLink>Link</a>
+<a mat-list-item routerLink cdkFocusInitial>Initially focused</a>
+<a mat-list-item routerLink cdkFocusRegionEnd>Focus region end</a>
+
+
+
+
+

7.19. InteractivityChecker

+
+

InteractivityChecker is used to check the interactivity of an element, capturing disabled, visible, tabbable, and focusable states for accessibility purposes.

+
+
+
+

7.20. LiveAnnouncer

+
+

LiveAnnouncer is used to announce messages for screen-reader users using an aria-live region.

+
+
+

For example:

+
+
+
+
@Component({...})
+export class MyComponent {
+
+ constructor(liveAnnouncer: LiveAnnouncer) {
+   liveAnnouncer.announce("Hey Google");
+ }
+}
+
+
+
+
+

7.21. API reference for Angular CDK a11y

+ + +
+
+

7.22. Angular Elements

+ +
+
+

7.23. What are Angular Elements?

+
+

Angular elements are Angular components packaged as custom elements, a web standard for defining new HTML elements in a framework-agnostic way.

+
+
+

Custom elements are a Web Platform feature currently supported by Chrome, Firefox, Opera, and Safari, and available in other browsers through Polyfills. A custom element extends HTML by allowing you to define a tag whose content is created and controlled by JavaScript code. The browser maintains a CustomElementRegistry of defined custom elements (also called Web Components), which maps an instantiable JavaScript class to an HTML tag.

+
+
+
+

7.24. Why use Angular Elements?

+
+

Angular Elements allows Angular to work with different frameworks by using input and output elements. This allows Angular to work with many different frameworks if needed. This is an ideal situation if a slow transformation of an application to Angular is needed or some Angular needs to be added in other web applications(For example. ASP.net, JSP etc )

+
+
+
+

7.25. Negative points about Elements

+
+

Angular Elements is really powerful but since, the transition between views is going to be handled by another framework or HTML/JavaScript, using Angular Router is not possible. the view transitions have to be handled manually. This fact also eliminates the possibility of just porting an application completely.

+
+
+
+

7.26. How to use Angular Elements?

+
+

In a generalized way, a simple Angular component could be transformed to an Angular Element with this steps:

+
+
+
+

7.27. Installing Angular Elements

+
+

The first step is going to be install the library using our preferred packet manager:

+
+
+
+

7.28. == NPM

+
+
+
npm install @angular/elements
+
+
+
+
+

7.29. == YARN

+
+
+
yarn add @angular/elements
+
+
+
+
+

7.30. Preparing the components in the modules

+
+

Inside the app.module.ts, in addition to the normal declaration of the components inside declarations, the modules inside imports and the services inside providers, the components need to added in entryComponents. If there are components that have their own module, the same logic is going to be applied for them, only adding in the app.module.ts the components that do not have their own module. Here is an example of this:

+
+
+
+
....
+@NgModule({
+  declarations: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  imports: [
+    CoreModule,  // Module containing Angular Materials
+    FormsModule
+  ],
+  entryComponents: [
+    DishFormComponent,
+    DishViewComponent
+  ],
+  providers: [DishShareService]
+})
+....
+
+
+
+

After that is done, the constructor of the module is going to be modified to use injector and bootstrap the application defining the components. This is going to allow the Angular Element to get the injections and to define a component tag that will be used later:

+
+
+
+
....
+})
+export class AppModule {
+  constructor(private injector: Injector) {
+
+  }
+
+  ngDoBootstrap() {
+    const el = createCustomElement(DishFormComponent, {injector: this.injector});
+    customElements.define('dish-form', el);
+
+    const elView = createCustomElement(DishViewComponent, {injector: this.injector});
+    customElements.define('dish-view', elView);
+  }
+}
+....
+
+
+
+
+

7.31. A component example

+
+

In order to be able to use a component, @Input() and @Output() variables are used. These variables are going to be the ones that will allow the Angular Element to communicate with the framework/JavaScript:

+
+
+

Component html

+
+
+
+
<mat-card>
+    <mat-grid-list cols="1" rowHeight="100px" rowWidth="50%">
+				<mat-grid-tile colspan="1" rowspan="1">
+					<span>{{ platename }}</span>
+				</mat-grid-tile>
+				<form (ngSubmit)="onSubmit(dishForm)" #dishForm="ngForm">
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<input matInput placeholder="Name" name="name" [(ngModel)]="dish.name">
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<mat-form-field>
+							<textarea matInput placeholder="Description" name="description" [(ngModel)]="dish.description"></textarea>
+						</mat-form-field>
+					</mat-grid-tile>
+					<mat-grid-tile colspan="1" rowspan="1">
+						<button mat-raised-button color="primary" type="submit">Submit</button>
+					</mat-grid-tile>
+				</form>
+		</mat-grid-list>
+</mat-card>
+
+
+
+

Component ts

+
+
+
+
@Component({
+  templateUrl: './dish-form.component.html',
+  styleUrls: ['./dish-form.component.scss']
+})
+export class DishFormComponent implements OnInit {
+
+  @Input() platename;
+
+  @Input() platedescription;
+
+  @Output()
+  submitDishEvent = new EventEmitter();
+
+  submitted = false;
+  dish = {name: '', description: ''};
+
+  constructor(public dishShareService: DishShareService) { }
+
+  ngOnInit() {
+    this.dish.name = this.platename;
+    this.dish.description = this.platedescription;
+  }
+
+  onSubmit(dishForm: NgForm): void {
+    this.dishShareService.createDish(dishForm.value.name, dishForm.value.description);
+    this.submitDishEvent.emit('dishSubmited');
+  }
+
+}
+
+
+
+

In this file there are definitions of multiple variables that will be used as input and output. Since the input variables are going to be used directly by html, only lowercase and underscore strategies can be used for them. On the onSubmit(dishForm: NgForm) a service is used to pass this variables to another component. Finally, as a last thing, the selector inside @Component has been removed since a tag that will be used dynamically was already defined in the last step.

+
+
+
+

7.32. Solving the error

+
+

In order to be able to use this Angular Element a Polyfills/Browser support related error needs to solved. This error can be solved in two ways:

+
+
+
+

7.33. == Changing the target

+
+

One solution is to change the target in tsconfig.json to es2015. This might not be doable for every application since maybe a specific target is required.

+
+
+
+

7.34. == Installing Polyfaces

+
+

Another solution is to use AutoPollyfill. In order to do so, the library is going to be installed with a packet manager:

+
+
+

Yarn

+
+
+
+
yarn add @webcomponents/webcomponentsjs
+
+
+
+

Npm

+
+
+
+
npm install @webcomponents/webcomponentsjs
+
+
+
+

After the packet manager has finished, inside the src folder a new file polyfills.ts is found. To solve the error, importing the corresponding adapter (custom-elements-es5-adapter.js) is necessary:

+
+
+
+
....
+/***************************************************************************************************
+ * APPLICATION IMPORTS
+ */
+
+import '@webcomponents/webcomponentsjs/custom-elements-es5-adapter.js';
+....
+
+
+
+

If you want to learn more about polyfills in angular you can do it here

+
+
+
+

7.35. Building the Angular Element

+
+

First, before building the Angular Element, every element inside that app component except the module need to be removed. After that, a bash script is created in the root folder,. This script will allow to put every necessary file into a JS.

+
+
+
+
ng build "projectName" --configuration production --output-hashing=none && cat dist/"projectName"/runtime.js dist/"projectName"/polyfills.js dist/"projectName"/scripts.js dist/"projectName"/main.js > ./dist/"projectName"/"nameWantedAngularElement".js
+
+
+
+

After executing the bash script, it will generate inside the path dist/"projectName" (or dist/apps/projectname in a Nx workspace) a JS file named "nameWantedAngularElement".js and a css file.

+
+
+
+ +
+

The library ngx-build-plus allows to add different options when building. In addition, it solves some errors that will occur when trying to use multiple angular elements in an application. In order to use it, yarn or npm can be used:

+
+
+

Yarn

+
+
+
+
yarn add ngx-build-plus
+
+
+
+

Npm

+
+
+
+
npm install ngx-build-plus
+
+
+
+

If you want to add it to a specific sub project in your projects folder, use the --project:

+
+
+
+
.... ngx-build-plus --project "project-name"
+
+
+
+

Using this library and the following command, an isolated Angular Element which won’t have conflict with others can be generated. This Angular Element will not have a polyfill so, the project where we use them will need to include a poliyfill with the Angular Element requirements.

+
+
+
+
ng build "projectName" --output-hashing none --single-bundle true --configuration production --bundle-styles false
+
+
+
+

This command will generate three things:

+
+
+
    +
  1. +

    The main JS bundle

    +
  2. +
  3. +

    The script JS

    +
  4. +
  5. +

    The css

    +
  6. +
+
+
+

These files will be used later instead of the single JS generated in the last step.

+
+
+
+

7.37. == == Extra parameters

+
+

Here are some extra useful parameters that ngx-build-plus provides:

+
+
+
    +
  • +

    --keep-polyfills: This parameter is going to allow us to keep the polyfills. This needs to be used with caution, avoiding using multiple different polyfills that could cause an error is necessary.

    +
  • +
  • +

    --extraWebpackConfig webpack.extra.js: This parameter allows us to create a JavaScript file inside our Angular Elements project with the name of different libraries. Using webpack these libraries will not be included in the Angular Element. This is useful to lower the size of our Angular Element by removing libraries shared. Example:

    +
  • +
+
+
+
+
const webpack = require('webpack');
+
+module.exports = {
+    "externals": {
+        "rxjs": "rxjs",
+        "@angular/core": "ng.core",
+        "@angular/common": "ng.common",
+        "@angular/common/http": "ng.common.http",
+        "@angular/platform-browser": "ng.platformBrowser",
+        "@angular/platform-browser-dynamic": "ng.platformBrowserDynamic",
+        "@angular/compiler": "ng.compiler",
+        "@angular/elements": "ng.elements",
+        "@angular/router": "ng.router",
+        "@angular/forms": "ng.forms"
+    }
+}
+
+
+
+
+

==

+
+
+
  If some libraries are excluded from the `Angular Element` you will need to add the bundled UMD files of those libraries manually.
+== ==
+
+
+
+
+

7.38. Using the Angular Element

+
+

The Angular Element that got generated in the last step can be used in almost every framework. In this case, the Angular Element is going to be used in html:

+
+
+
Listing 37. Sample index.html version without ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+        <script src="./devon4ngAngularElements.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+
Listing 38. Sample index.html version with ngx-build-plus
+
+
<html>
+    <head>
+        <link rel="stylesheet" href="styles.css">
+    </head>
+    <body>
+        <div id="container">
+
+        </div>
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+         <script src="./polyfills.js"> </script> <!-- Created using --keep-polyfills options -->
+        <script src="./scripts.js"> </script>
+         <script src="./main.js"> </script>
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+    </body>
+</html>
+
+
+
+

In this html, the css generated in the last step is going to be imported inside the <head> and then, the JavaScript element is going to be imported at the end of the body. After that is done, There is two uses of Angular Elements in the html, one directly with use of the @input() variables as parameters commented in the html:

+
+
+
+
....
+        <!--Use of the element non dynamically-->
+        <!--<plate-form platename="test" platedescription="test"></plate-form>-->
+....
+
+
+
+

and one dynamically inside the script:

+
+
+
+
....
+        <script>
+                var elContainer = document.getElementById('container');
+                var el= document.createElement('dish-form');
+                el.setAttribute('platename','test');
+                el.setAttribute('platedescription','test');
+                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+                elContainer.appendChild(el);
+        </script>
+....
+
+
+
+

This JavaScript is an example of how to create dynamically an Angular Element inserting attributed to fill our @Input() variables and listen to the @Output() that was defined earlier. This is done with:

+
+
+
+
                el.addEventListener('submitDishEvent',(ev)=>{
+                    var elView= document.createElement('dish-view');
+                    elContainer.innerHTML = '';
+                    elContainer.appendChild(elView);
+                });
+
+
+
+

This allows JavaScript to hook with the @Output() event emitter that was defined. When this event gets called, another component that was defined gets inserted dynamically.

+
+
+
+

7.39. Angular Element within another Angular project

+
+

In order to use an Angular Element within another Angular project the following steps need to be followed:

+
+
+
+

7.40. Copy bundled script and css to resources

+
+

First copy the generated .js and .css inside assets in the corresponding folder.

+
+
+
+

7.41. Add bundled script to angular.json

+
+

Inside angular.json both of the files that were copied in the last step are going to be included. This will be done both, in test and in build. Including it on the test, will allow to perform unitary tests.

+
+
+
+
{
+....
+  "architect": {
+    ....
+    "build": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+    ....
+    "test": {
+      ....
+      "styles": [
+        ....
+          "src/assets/css/devon4ngAngularElements.css"
+        ....
+      ]
+      ....
+      "scripts": [
+        "src/assets/js/devon4ngAngularElements.js"
+      ]
+      ....
+    }
+  }
+}
+
+
+
+

By declaring the files in the angular.json angular will take care of including them in a proper way.

+
+
+
+

==

+
+
+
  If you are using Nx, the configuration file `angular.json` might be named as `workspace.json`, depending on how you had setup the workspace. The structure of the file remains similar though.
+== ==
+
+
+
+
+

7.42. Using Angular Element

+
+

There are two ways that Angular Element can be used:

+
+
+
+

7.43. == Create component dynamically

+
+

In order to add the component in a dynamic way, first adding a container is necessary:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+</div>
+....
+
+
+
+

With this container created, inside the app.component.ts a method is going to be created. This method is going to find the container, create the dynamic element and append it into the container.

+
+
+

app.component.ts

+
+
+
+
export class AppComponent implements OnInit {
+  ....
+  ngOnInit(): void {
+    this.createComponent();
+  }
+  ....
+  createComponent(): void {
+    const container = document.getElementById('container');
+    const component = document.createElement('dish-form');
+    container.appendChild(component);
+  }
+  ....
+
+
+
+
+

7.44. == Using it directly

+
+

In order to use it directly on the templates, in the app.module.ts the CUSTOM_ELEMENTS_SCHEMA needs to be added:

+
+
+
+
....
+import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
+....
+@NgModule({
+  ....
+  schemas: [ CUSTOM_ELEMENTS_SCHEMA ],
+
+
+
+

This is going to allow the use of the Angular Element in the templates directly:

+
+
+

app.component.html

+
+
+
+
....
+<div id="container">
+  <dish-form></dish-form>
+</div>
+
+
+
+

You can find a working example of Angular Elements in our devon4ts-samples repo by referring the samples named angular-elements and angular-elements-test.

+
+ +
+
+

7.45. Angular Lazy loading

+
+

When the development of an application starts, it just contains a small set of features so the app usually loads fast. However, as new features are added, the overall application size grows up and its loading speed decreases. It is in this context where Lazy loading finds its place. +Lazy loading is a design pattern that defers initialization of objects until it is needed, so, for example, users that just access to a website’s home page do not need to have other areas loaded. +Angular handles lazy loading through the routing module which redirects to requested pages. Those pages can be loaded at start or on demand.

+
+
+
+

7.46. An example with Angular

+
+

To explain how lazy loading is implemented using angular, a basic sample app is going to be developed. This app will consist in a window named "level 1" that contains two buttons that redirects to other windows in a "second level". It is a simple example, but useful to understand the relation between angular modules and lazy loading.

+
+
+
+Levels app structure +
+
Figure 16. Levels app structure.
+
+
+

This graphic shows that modules acts as gates to access components "inside" them.

+
+
+

Because the objective of this guide is related mainly with logic, the html structure and SCSS styles are less relevant, but the complete code can be found as a sample here.

+
+
+
+

7.47. Implementation

+
+

First write in a console ng new level-app --routing, to generate a new project called level-app including an app-routing.module.ts file (--routing flag). If you are using Nx, the command would be nx generate @nrwl/angular:app level-app --routing in your Nx workspace.

+
+
+

In the file app.component.html delete all the content except the router-outlet tag.

+
+
+
Listing 39. File app.component.html
+
+
<router-outlet></router-outlet>
+
+
+
+

The next steps consists on creating features modules.

+
+
+
    +
  • +

    run ng generate module first --routing to generate a module named first.

    +
  • +
  • +

    run ng generate module first/second-left --routing to generate a module named second-left under first.

    +
  • +
  • +

    run ng generate module first/second-right --routing to generate a module second-right under first.

    +
  • +
  • +

    run ng generate component first/first to generate a component named first inside the module first.

    +
  • +
  • +

    run ng generate component first/second-left/content to generate a component content inside the module second-left.

    +
  • +
  • +

    run ng generate component first/second-right/content to generate a component content inside the module second-right.

    +
  • +
+
+
+
+

==

+
+
+
  If you are using Nx, you have to specify the project name (level-app) along with the --project flag. For example, command for generating the first module will be `ng generate module first --project=level-app --routing`
+== ==
+
+
+
+

To move between components we have to configure the routes used:

+
+
+

In app-routing.module.ts add a path 'first' to FirstComponent and a redirection from '' to 'first'.

+
+
+
Listing 40. File app-routing.module.ts.
+
+
...
+import { FirstComponent } from './first/first/first.component';
+
+const routes: Routes = [
+  {
+    path: 'first',
+    component: FirstComponent
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

In app.module.ts import the module which includes FirstComponent.

+
+
+
Listing 41. File app.module.ts
+
+
....
+import { FirstModule } from './first/first.module';
+
+@NgModule({
+  ...
+  imports: [
+    ....
+    FirstModule
+  ],
+  ...
+})
+export class AppModule { }
+
+
+
+

In first-routing.module.ts add routes that direct to the content of SecondRightModule and SecondLeftModule. The content of both modules have the same name so, in order to avoid conflicts the name of the components are going to be changed using as ( original-name as new-name).

+
+
+
Listing 42. File first-routing.module.ts
+
+
...
+import { ContentComponent as ContentLeft} from './second-left/content/content.component';
+import { ContentComponent as ContentRight} from './second-right/content/content.component';
+import { FirstComponent } from './first/first.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'first/second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'first/second-right',
+    component: ContentRight
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class FirstRoutingModule { }
+
+
+
+

In first.module.ts import SecondLeftModule and SecondRightModule.

+
+
+
Listing 43. File first.module.ts
+
+
...
+import { SecondLeftModule } from './second-left/second-left.module';
+import { SecondRightModule } from './second-right/second-right.module';
+
+@NgModule({
+  ...
+  imports: [
+    ...
+    SecondLeftModule,
+    SecondRightModule,
+  ]
+})
+export class FirstModule { }
+
+
+
+

Using the current configuration, we have a project that loads all the modules in a eager way. Run ng serve (with --project=level-app in an Nx workspace) to see what happens.

+
+
+

First, during the compilation we can see that just a main file is built.

+
+
+
+Compile eager +
+
Figure 17. Compile eager.
+
+
+

If we go to http://localhost:4200/first and open developer options (F12 on Chrome), it is found that a document named "first" is loaded.

+
+
+
+First level eager +
+
Figure 18. First level eager.
+
+
+

If we click on [Go to right module] a second level module opens, but there is no 'second-right' document.

+
+
+
+Second level right eager +
+
Figure 19. Second level right eager.
+
+
+

But, typing the URL directly will load 'second-right' but no 'first', even if we click on [Go back]

+
+
+
+Second level right eager +
+
Figure 20. Second level right eager direct URL.
+
+
+

Modifying an angular application to load its modules lazily is easy, you have to change the routing configuration of the desired module (for example FirstModule).

+
+
+
Listing 44. File app-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: 'first',
+    loadChildren: () => import('./first/first.module').then(m => m.FirstModule),
+  },
+  {
+    path: '',
+    redirectTo: 'first',
+    pathMatch: 'full',
+  },
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+

Notice that instead of loading a component, you dynamically import it in a loadChildren attribute because modules acts as gates to access components "inside" them. Updating the app to load lazily has four consequences:

+
+
+
    +
  1. +

    No component attribute.

    +
  2. +
  3. +

    No import of FirstComponent.

    +
  4. +
  5. +

    FirstModule import has to be removed from the imports array at app.module.ts.

    +
  6. +
  7. +

    Change of context.

    +
  8. +
+
+
+

If we check first-routing.module.ts again, we can see that the path for ContentLeft and ContentRight is set to 'first/second-left' and 'first/second-right' respectively, so writing http://localhost:4200/first/second-left will redirect us to ContentLeft. However, after loading a module with loadChildren setting the path to 'second-left' and 'second-right' is enough because it acquires the context set by AppRoutingModule.

+
+
+
Listing 45. File first-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    component: ContentLeft
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+

If we go to 'first' then FirstModule is situated in '/first' but also its children ContentLeft and ContentRight, so it is not necessary to write in their path 'first/second-left' and 'first/second-right', because that will situate the components on 'first/first/second-left' and 'first/first/second-right'.

+
+
+
+First level wrong path +
+
Figure 21. First level lazy wrong path.
+
+
+

When we compile an app with lazy loaded modules, files containing them will be generated

+
+
+
+First level lazy compilation +
+
Figure 22. First level lazy compilation.
+
+
+

And if we go to developer tools → network, we can find those modules loaded (if they are needed).

+
+
+
+First level lazy +
+
Figure 23. First level lazy.
+
+
+

To load the component ContentComponent of SecondLeftModule lazily, we have to load SecondLeftModule as a children of FirstModule:

+
+
+
    +
  • +

    Change component to loadChildren and reference SecondLeftModule.

    +
  • +
+
+
+
Listing 46. File first-routing.module.ts.
+
+
const routes: Routes = [
+  {
+    path: '',
+    component: FirstComponent
+  },
+  {
+    path: 'second-left',
+    loadChildren: () => import('./second-left/second-left.module').then(m => m.SecondLeftModule),
+  },
+  {
+    path: 'second-right',
+    component: ContentRight
+  }
+];
+
+
+
+
    +
  • +

    Remove SecondLeftModule at first.component.ts

    +
  • +
  • +

    Route the components inside SecondLeftModule. Without this step nothing would be displayed.

    +
  • +
+
+
+
Listing 47. File second-left-routing.module.ts.
+
+
...
+import { ContentComponent } from './content/content.component';
+
+const routes: Routes = [
+  {
+    path: '',
+    component: ContentComponent
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule]
+})
+export class SecondLeftRoutingModule { }
+
+
+
+
    +
  • +

    run ng serve to generate files containing the lazy modules.

    +
  • +
+
+
+
+Second level lazy +
+
Figure 24. Second level lazy loading compilation.
+
+
+

Clicking on [Go to left module] triggers the load of SecondLeftModule.

+
+
+
+Second level lazy network +
+
Figure 25. Second level lazy loading network.
+
+
+
+

7.48. Conclusion

+
+

Lazy loading is a pattern useful when new features are added, these features are usually identified as modules which can be loaded only if needed as shown in this document, reducing the time spent loading an application.

+
+ +
+
+

7.49. Angular Library

+
+

Angular CLI provides us with methods that allow the creation of a library. After that, using a packet manager (either npm or yarn) the library can be build and packed which will allow later to install/publish it.

+
+
+
+

7.50. Whats a library?

+
+

From wikipedia: a library is a collection of non-volatile resources used by computer programs, often for software development. These may include configuration data, documentation, help data, message templates, pre-written code and subroutines, classes, values or type specifications.

+
+
+
+

7.51. How to build a library

+
+

In this section, a library is going to be build step by step. Please note, we will be explaining the steps using both Angular CLI and Nx CLI. You are free to choose either one for your development.

+
+
+
+

7.52. 1. Creating an empty application

+
+

First, using Angular CLI we are going to generate a empty application which will be later filled with the generated library. In order to do so, Angular CLI allows us to add to ng new "application-name" an option (--create-application). This option is going to tell Angular CLI not to create the initial app project. This is convenient since a library is going to be generated in later steps. Using this command ng new "application-name" --create-application=false an empty project with the name wanted is created.

+
+
+
+
ng new "application-name" --create-application=false
+
+
+
+

This step is much more easier and straight-forward when using Nx. Nx allows us to work in a monorepo workspace, where you can develop a project as an application, or a library, or a tool. You can follow this guide to get started with Nx. +The command for generating a library in Nx is nx generate @nrwl/angular:library library-name --publishable --importPath=library-name. This will create an empty angular application which we can modify and publish as a library.

+
+
+
+

7.53. 2. Generating a library

+
+

After generating an empty application, a library is going to be generated. Inside the folder of the project, the Angular CLI command ng generate library "library-name" is going to generate the library as a project (projects/"library-name"). As an addition, the option --prefix="library-prefix-wanted" allows us to switch the default prefix that Angular generated with (lib). Using the option to change the prefix the command will look like this ng generate library "library-name" --prefix="library-prefix-wanted".

+
+
+
+
ng generate library "library-name" --prefix="library-prefix-wanted"
+
+
+
+

If you are using Nx, this step is not needed as it is already covered in step 1. In this case, the library project will be generated in the libs folder of a Nx workspace.

+
+
+
+

7.54. 3. Modifying our library

+
+

In the last step we generated a library. This automatically generates a module,service and a component inside projects/"library-name" that we can modify adding new methods, components etc that we want to use in other projects. We can generate other elements, using the usual Angular CLI generate commands adding the option --project="library-name", this will allow to generate elements within our project . An example of this is: ng generate service "name" --project="library-name".

+
+
+
+
ng generate "element" "name" --project="library-name"
+
+
+
+

You can use the same command as above in a Nx workspace.

+
+
+
+

7.55. 4. Exporting the generated things

+
+

Inside the library (projects/"library-name) there’s a public_api.ts which is the file that exports the elements inside the library. (The file is named as index.ts in an Nx workspace). In case we generated other things, this file needs to be modified adding the extra exports with the generated elements. In addition, changing the library version is possible in the file package.json.

+
+
+
+

7.56. 5. Building our library

+
+

Once we added the necessary exports, in order to use the library in other applications, we need to build the library. The command ng build "library-name" is going to build the library, generating the necessary files in "project-name"/dist/"library-name".

+
+
+
+
ng build "library-name"
+
+
+
+

You can use the same command in Nx as well. Only the path for the generated files will be slightly different: "project-name"/dist/libs/"library-name"

+
+
+
+

7.57. 6. Packing the library

+
+

In this step we are going to pack the build library. In order to do so, we need to go inside dist/"library-name" (or dist/libs/"library-name") and then run either npm pack or yarn pack to generate a "library-name-version.tgz" file.

+
+
+
Listing 48. Packing using npm
+
+
npm pack
+
+
+
+
Listing 49. Packing using yarn
+
+
yarn pack
+
+
+
+
+

7.58. 7. Publishing to npm repository (optional)

+
+
    +
  • +

    Add a README.md and LICENSE file. The text inside README.md will be used in you npm package web page as documentation.

    +
  • +
  • +

    run npm adduser if you do not have a npm account to create it, otherwise run npm login and introduce your credentials.

    +
  • +
  • +

    run npm publish inside dist/"library-name" folder.

    +
  • +
  • +

    Check that the library is published: https://npmjs.com/package/library-name

    +
  • +
+
+
+
+

7.59. 8. Installing our library in other projects

+
+

In this step we are going to install/add the library on other projects.

+
+
+
+

7.60. == npm

+
+

In order to add the library in other applications, there are two ways:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command npm install "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run npm install "library-name" to install it from npm repository.

    +
  • +
+
+
+
+

7.61. == yarn

+
+

To add the package using yarn:

+
+
+
    +
  • +

    Option 1: From inside the application where the library is going to get used, using the command yarn add "path-to-tgz"/"library-name-version.tgz" allows us to install the .tgz generated in Packing the library.

    +
  • +
  • +

    Option 2: run yarn add "library-name" to install it from npm repository.

    +
  • +
+
+
+
+

7.62. 9. Using the library

+
+

Finally, once the library was installed with either packet manager, you can start using the elements from inside like they would be used in a normal element inside the application. Example app.component.ts:

+
+
+
+
import { Component, OnInit } from '@angular/core';
+import { MyLibraryService } from 'my-library';
+
+@Component({
+  selector: 'app-root',
+  templateUrl: './app.component.html',
+  styleUrls: ['./app.component.scss']
+})
+export class AppComponent implements OnInit {
+
+  toUpper: string;
+
+  constructor(private myLibraryService: MyLibraryService) {}
+  title = 'devon4ng library test';
+  ngOnInit(): void {
+    this.toUpper = this.myLibraryService.firstLetterToUpper('test');
+  }
+}
+
+
+
+

Example app.component.html:

+
+
+
+
<!--The content below is only a placeholder and can be replaced.-->
+<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+  <img width="300" alt="Angular Logo" src="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNTAgMjUwIj4KICAgIDxwYXRoIGZpbGw9IiNERDAwMzEiIGQ9Ik0xMjUgMzBMMzEuOSA2My4ybDE0LjIgMTIzLjFMMTI1IDIzMGw3OC45LTQzLjcgMTQuMi0xMjMuMXoiIC8+CiAgICA8cGF0aCBmaWxsPSIjQzMwMDJGIiBkPSJNMTI1IDMwdjIyLjItLjFWMjMwbDc4LjktNDMuNyAxNC4yLTEyMy4xTDEyNSAzMHoiIC8+CiAgICA8cGF0aCAgZmlsbD0iI0ZGRkZGRiIgZD0iTTEyNSA1Mi4xTDY2LjggMTgyLjZoMjEuN2wxMS43LTI5LjJoNDkuNGwxMS43IDI5LjJIMTgzTDEyNSA1Mi4xem0xNyA4My4zaC0zNGwxNy00MC45IDE3IDQwLjl6IiAvPgogIDwvc3ZnPg== ">
+</div>
+<h2>Here is my library service being used: {{toUpper}}</h2>
+<lib-my-library></lib-my-library>
+
+
+
+

Example app.module.ts:

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+
+import { MyLibraryModule } from 'my-library';
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    AppRoutingModule,
+    MyLibraryModule
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+

The result from using the library:

+
+
+
+result +
+
+
+
+

7.63. devon4ng libraries

+
+

In devonfw/devon4ng-library you can find some useful libraries:

+
+
+
    +
  • +

    Authorization module: This devon4ng Angular module adds rights-based authorization to your Angular app.

    +
  • +
  • +

    Cache module: Use this devon4ng Angular module when you want to cache requests to server. You may configure it to store in cache only the requests you need and to set the duration you want.

    +
  • +
+
+ +
+
+

7.64. Angular Material Theming

+
+

Angular Material library offers UI components for developers, those components follows Google Material design baselines but characteristics like colors can be modified in order to adapt them to the needs of the client: corporative colors, corporative identity, dark themes, …​

+
+
+
+

7.65. Theming basics

+
+

In Angular Material, a theme is created mixing multiple colors. Colors and its light and dark variants conform a palette. In general, a theme consists of the following palettes:

+
+
+
    +
  • +

    primary: Most used across screens and components.

    +
  • +
  • +

    accent: Floating action button and interactive elements.

    +
  • +
  • +

    warn: Error state.

    +
  • +
  • +

    foreground: Text and icons.

    +
  • +
  • +

    background: Element backgrounds.

    +
  • +
+
+
+
+Theme palette +
+
Figure 26. Palettes and variants.
+
+
+

In angular material, a palette is represented as a SCSS map.

+
+
+
+SCSS map +
+
Figure 27. SCSS map and palettes.
+
+
+ + + + + +
+ + +Some components can be forced to use primary, accent or warn palettes using the attribute color, for example: <mat-toolbar color="primary">. +
+
+
+
+

7.66. Pre-built themes

+
+

Available pre-built themes:

+
+
+
    +
  • +

    deeppurple-amber.css

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 28. deeppurple-amber theme.
+
+
+
    +
  • +

    indigo-pink.css

    +
  • +
+
+
+
+indigo-pink theme +
+
Figure 29. indigo-pink theme.
+
+
+
    +
  • +

    pink-bluegrey.css

    +
  • +
+
+
+
+` pink-bluegrey theme` +
+
Figure 30. ink-bluegrey theme.
+
+
+
    +
  • +

    purple-green.css

    +
  • +
+
+
+
+purple-green theme +
+
Figure 31. purple-green theme.
+
+
+

The pre-built themes can be added using @import.

+
+
+
+
@import '@angular/material/prebuilt-themes/deeppurple-amber.css';
+
+
+
+
+

7.67. Custom themes

+
+

Sometimes pre-built themes do not meet the needs of a project, because color schemas are too specific or do not incorporate branding colors, in those situations custom themes can be built to offer a better solution to the client.

+
+
+

For this topic, we are going to use a basic layout project that can be found in devon4ts-samples repository.

+
+
+
+

7.68. Basics

+
+

Before starting writing custom themes, there are some necessary things that have to be mentioned:

+
+
+
    +
  • +

    Add a default theme: The project mentioned before has just one global SCSS style sheet styles.scss that includes indigo-pink.scss which will be the default theme.

    +
  • +
  • +

    Add @import '~@angular/material/theming'; at the beginning of the every style sheet to be able to use angular material pre-built color palettes and functions.

    +
  • +
  • +

    Add @include mat-core(); once per project, so if you are writing multiple themes in multiple files you could import those files from a 'central' one (for example styles.scss). This includes all common styles that are used by multiple components.

    +
  • +
+
+
+
+Theme files structure +
+
Figure 32. Theme files structure.
+
+
+
+

7.69. Basic custom theme

+
+

To create a new custom theme, the .scss file containing it has to have imported the angular _theming.scss file (angular/material/theming) file and mat-core included. _theming.scss includes multiple color palettes and some functions that we are going to see below. The file for this basic theme is going to be named styles-custom-dark.scss.

+
+
+

First, declare new variables for primary, accent and warn palettes. Those variables are going to store the result of the function mat-palette.

+
+
+

mat-palette accepts four arguments: base color palette, main, lighter and darker variants (See Palettes and variants.) and returns a new palette including some additional map values: default, lighter and darker ([id_scss_map]). Only the first argument is mandatory.

+
+
+
Listing 50. File styles-custom-dark.scss.
+
+
$custom-dark-theme-primary: mat-palette($mat-pink);
+$custom-dark-theme-accent: mat-palette($mat-blue);
+$custom-dark-theme-warn: mat-palette($mat-red);
+);
+
+
+
+

In this example we are using colors available in _theming.scss: mat-pink, mat-blue, mat-red. If you want to use a custom color you need to define a new map, for instance:

+
+
+
Listing 51. File styles-custom-dark.scss custom pink.
+
+
$my-pink: (
+    50 : #fcf3f3,
+    100 : #f9e0e0,
+    200 : #f5cccc,
+    300 : #f0b8b8,
+    500 : #ea9999,
+    900 : #db6b6b,
+    A100 : #ffffff,
+    A200 : #ffffff,
+    A400 : #ffeaea,
+    A700 : #ffd0d0,
+    contrast: (
+        50 : #000000,
+        100 : #000000,
+        200 : #000000,
+        300 : #000000,
+        900 : #000000,
+        A100 : #000000,
+        A200 : #000000,
+        A400 : #000000,
+        A700 : #000000,
+    )
+);
+
+$custom-dark-theme-primary: mat-palette($my-pink);
+...
+
+
+
+ + + + + +
+ + +Some pages allows to create these palettes easily, for instance: http://mcg.mbitson.com +
+
+
+

Until now, we just have defined primary, accent and warn palettes but what about foreground and background? Angular material has two functions to change both:

+
+
+
    +
  • +

    mat-light-theme: Receives as arguments primary, accent and warn palettes and return a theme whose foreground is basically black (texts, icons, …​), the background is white and the other palettes are the received ones.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 33. Custom light theme.
+
+
+
    +
  • +

    mat-dark-theme: Similar to mat-light-theme but returns a theme whose foreground is basically white and background black.

    +
  • +
+
+
+
+`deeppurple-amber theme` +
+
Figure 34. Custom dark theme.
+
+
+

For this example we are going to use mat-dark-theme and save its result in $custom-dark-theme.

+
+
+
Listing 52. File styles-custom-dark.scss updated with mat-dark-theme.
+
+
...
+
+$custom-dark-theme: mat-dark-theme(
+  $custom-dark-theme-primary,
+  $custom-dark-theme-accent,
+  $custom-dark-theme-warn
+);
+
+
+
+

To apply the saved theme, we have to go to styles.scss and import our styles-custom-dark.scss and include a function called angular-material-theme using the theme variable as argument.

+
+
+
Listing 53. File styles.scss.
+
+
...
+@import 'styles-custom-dark.scss';
+@include angular-material-theme($custom-dark-theme);
+
+
+
+

If we have multiple themes it is necessary to add the include statement inside a css class and use it in src/index.html → app-root component.

+
+
+
Listing 54. File styles.scss updated with custom-dark-theme class.
+
+
...
+@import 'styles-custom-dark.scss';
+
+.custom-dark-theme {
+  @include angular-material-theme($custom-dark-theme);
+}
+
+
+
+
Listing 55. File src/index.html.
+
+
...
+<app-root class="custom-dark-theme"></app-root>
+...
+
+
+
+

This will apply $custom-dark-theme theme for the entire application.

+
+
+
+

7.70. Full custom theme

+
+

Sometimes it is needed to custom different elements from background and foreground, in those situations we have to create a new function similar to mat-light-theme and mat-dark-theme. Let’s focus con mat-light-theme:

+
+
+
Listing 56. Source code of mat-light-theme
+
+
@function mat-light-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $mat-light-theme-foreground,
+    background: $mat-light-theme-background,
+  );
+}
+
+
+
+

As we can see, mat-light-theme takes three arguments and returns a map including them as primary, accent and warn color; but there are three more keys in that map: is-dark, foreground and background.

+
+
+
    +
  • +

    is-dark: Boolean true if it is a dark theme, false otherwise.

    +
  • +
  • +

    background: Map that stores the color for multiple background elements.

    +
  • +
  • +

    foreground: Map that stores the color for multiple foreground elements.

    +
  • +
+
+
+

To show which elements can be colored lets create a new theme in a file styles-custom-cap.scss:

+
+
+
Listing 57. File styles-custom-cap.scss: Background and foreground variables.
+
+
@import '~@angular/material/theming';
+
+// custom background and foreground palettes
+$my-cap-theme-background: (
+  status-bar: #0070ad,
+  app-bar: map_get($mat-blue, 900),
+  background: #12abdb,
+  hover: rgba(white, 0.04),
+  card: map_get($mat-red, 800),
+  dialog: map_get($mat-grey, 800),
+  disabled-button: $white-12-opacity,
+  raised-button: map-get($mat-grey, 800),
+  focused-button: $white-6-opacity,
+  selected-button: map_get($mat-grey, 900),
+  selected-disabled-button: map_get($mat-grey, 800),
+  disabled-button-toggle: black,
+  unselected-chip: map_get($mat-grey, 700),
+  disabled-list-option: black,
+);
+
+$my-cap-theme-foreground: (
+  base: yellow,
+  divider: $white-12-opacity,
+  dividers: $white-12-opacity,
+  disabled: rgba(white, 0.3),
+  disabled-button: rgba(white, 0.3),
+  disabled-text: rgba(white, 0.3),
+  hint-text: rgba(white, 0.3),
+  secondary-text: rgba(white, 0.7),
+  icon: white,
+  icons: white,
+  text: white,
+  slider-min: white,
+  slider-off: rgba(white, 0.3),
+  slider-off-active: rgba(white, 0.3),
+);
+
+
+
+

Function which uses the variables defined before to create a new theme:

+
+
+
Listing 58. File styles-custom-cap.scss: Creating a new theme function.
+
+
// instead of creating a theme with mat-light-theme or mat-dark-theme,
+// we will create our own theme-creating function that lets us apply our own foreground and background palettes.
+@function create-my-cap-theme($primary, $accent, $warn: mat-palette($mat-red)) {
+  @return (
+    primary: $primary,
+    accent: $accent,
+    warn: $warn,
+    is-dark: false,
+    foreground: $my-cap-theme-foreground,
+    background: $my-cap-theme-background
+  );
+}
+
+
+
+

Calling the new function and storing its value in $custom-cap-theme.

+
+
+
Listing 59. File styles-custom-cap.scss: Storing the new theme.
+
+
// We use create-my-cap-theme instead of mat-light-theme or mat-dark-theme
+$custom-cap-theme-primary: mat-palette($mat-green);
+$custom-cap-theme-accent: mat-palette($mat-blue);
+$custom-cap-theme-warn: mat-palette($mat-red);
+
+$custom-cap-theme: create-my-cap-theme(
+  $custom-cap-theme-primary,
+  $custom-cap-theme-accent,
+  $custom-cap-theme-warn
+);
+
+
+
+

After defining our new theme, we can import it from styles.scss.

+
+
+
Listing 60. File styles.scss updated with custom-cap-theme class.
+
+
...
+@import 'styles-custom-cap.scss';
+.custom-cap-theme {
+  @include angular-material-theme($custom-cap-theme);
+}
+
+
+
+
+

7.71. Multiple themes and overlay-based components

+
+

Certain components (e.g. menu, select, dialog, etc.) that are inside of a global overlay container,require an additional step to be affected by the theme’s css class selector.

+
+
+
Listing 61. File app.module.ts
+
+
import {OverlayContainer} from '@angular/cdk/overlay';
+
+@NgModule({
+  // ...
+})
+export class AppModule {
+  constructor(overlayContainer: OverlayContainer) {
+    overlayContainer.getContainerElement().classList.add('custom-cap-theme');
+  }
+}
+
+
+
+ +
+

7.73. Angular Progressive Web App

+
+

Progressive web applications (PWA) are web application that offer better user experience than the traditional ones. In general, they solve problems related with reliability and speed:

+
+
+
    +
  • +

    Reliability: PWA are stable. In this context stability means than even with slow connections or even with no network at all, the application still works. To achieve this, some basic resources like styles, fonts, requests, …​ are stored; due to this caching, it is not possible to assure that the content is always up-to-date.

    +
  • +
  • +

    Speed: When an users opens an application, he or she will expect it to load almost immediately (almost 53% of users abandon sites that take longer that 3 seconds, source: https://developers.google.com/web/progressive-web-apps/#fast).

    +
  • +
+
+
+

PWA uses a script called service worker, which runs in background and essentially act as proxy between web app and network, intercepting requests and acting depending on the network conditions.

+
+
+
+

7.74. Assumptions

+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
+
+
+
+

7.75. Sample Application

+
+
+My Thai Star recommendation +
+
Figure 35. Basic angular PWA.
+
+
+

To explain how to build PWA using angular, a basic application is going to be built. This app will be able to ask for resources and save in the cache in order to work even offline.

+
+
+
+

7.76. Step 1: Create a new project

+
+

This step can be completed with one simple command using the Angular CLI: ng new <name>, where <name> is the name for the app. In this case, the app is going to be named basic-ng-pwa. If you are using Nx CLI, you can use the command nx generate @nrwl/angular:app <name> in your Nx workspace. You can follow this guide if you want to get started with Nx workspace.

+
+
+
+

7.77. Step 2: Create a service

+
+

Web applications usually uses external resources, making necessary the addition of services which can get those resources. This application gets a dish from My Thai Star’s back-end and shows it. To do so, a new service is going to be created.

+
+
+
    +
  • +

    go to project folder: cd basic-ng-pwa. If using Nx, go to the root folder of the workspace.

    +
  • +
  • +

    run ng generate service data. For Nx CLI, specify the project name with --project flag. So the command becomes ng generate service data --project=basic-ng-pwa

    +
  • +
  • +

    Modify data.service.ts, environment.ts, environment.prod.ts

    +
  • +
+
+
+

To retrieve data with this service, you have to import the module HttpClient and add it to the service’s constructor. Once added, use it to create a function getDishes() that sends HTTP request to My Thai Start’s back-end. The URL of the back-end can be stored as an environment variable MY_THAI_STAR_DISH.

+
+
+

data.service.ts

+
+
+
+
  ...
+  import { HttpClient } from '@angular/common/http';
+  import { MY_THAI_STAR_DISH } from '../environments/environment';
+  ...
+
+  export class DataService {
+    constructor(private http: HttpClient) {}
+
+    /* Get data from Back-end */
+    getDishes() {
+      return this.http.get(MY_THAI_STAR_DISH);
+    }
+    ...
+  }
+
+
+
+

environments.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+

environments.prod.ts

+
+
+
+
  ...
+  export const MY_THAI_STAR_DISH =
+  'https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1';
+  ...
+
+
+
+
+

7.78. Step 3: Use the service

+
+

The component AppComponent implements the interface OnInit and inside its method ngOnInit() the subscription to the services is done. When a dish arrives, it is saved and shown (app.component.html).

+
+
+
+
  ...
+  import { DataService } from './data.service';
+  export class AppComponent implements OnInit {
+  dish: { name: string; description: string } = { name: '', description: ''};
+
+  ...
+  ngOnInit() {
+    this.data
+      .getDishes()
+      .subscribe(
+        (dishToday: { dish: { name: string; description: string } }) => {
+          this.dish = {
+            name: dishToday.dish.name,
+            description: dishToday.dish.description,
+          };
+        },
+      );
+  }
+}
+
+
+
+
+

7.79. Step 4: Structures, styles and updates

+
+

This step shows code interesting inside the sample app. The complete content can be found in devon4ts-samples.

+
+
+

index.html

+
+
+

To use the Montserrat font add the following link inside the head tag of the app’s index.html file.

+
+
+
+
  <link href="https://fonts.googleapis.com/css?family=Montserrat" rel="stylesheet">
+
+
+
+

styles.scss

+
+
+
+
  body {
+    ...
+    font-family: 'Montserrat', sans-serif;
+  }
+
+
+
+

app.component.ts

+
+
+

This file is also used to reload the app if there are any changes.

+
+
+
    +
  • +

    SwUpdate: This object comes inside the @angular/pwa package and it is used to detect changes and reload the page if needed.

    +
  • +
+
+
+
+
  ...
+  import { SwUpdate } from '@angular/service-worker';
+
+  export class AppComponent implements OnInit {
+
+  ...
+    constructor(updates: SwUpdate, private data: DataService) {
+      updates.available.subscribe((event) => {
+        updates.activateUpdate().then(() => document.location.reload());
+      });
+    }
+    ...
+  }
+
+
+
+
+

7.80. Step 5: Make it Progressive.

+
+

Install Angular PWA package with ng add @angular/pwa --project=<name>. As before substitute name with basic-ng-pwa.

+
+
+

The above command completes the following actions:

+
+
+
    +
  1. +

    Adds the @angular/service-worker package to your project.

    +
  2. +
  3. +

    Enables service worker build support in the CLI.

    +
  4. +
  5. +

    Imports and registers the service worker in the app module.

    +
  6. +
  7. +

    Updates the index.html file:

    +
    +
      +
    • +

      Includes a link to add the manifest.json file.

      +
    • +
    • +

      Adds meta tags for theme-color.

      +
    • +
    • +

      Installs icon files to support the installed Progressive Web App (PWA).

      +
    • +
    • +

      Creates the service worker configuration file called ngsw-config.json, which specifies the caching behaviors and other settings.

      +
    • +
    +
    +
  8. +
+
+
+
+

7.81. == manifest.json

+
+

manifest.json is a file that allows to control how the app is displayed in places where native apps are displayed.

+
+
+

Fields

+
+
+

name: Name of the web application.

+
+
+

short_name: Short version of name.

+
+
+

theme_color: Default theme color for an application context.

+
+
+

background_color: Expected background color of the web application.

+
+
+

display: Preferred display mode.

+
+
+

scope: Navigation scope of this web application’s application context.

+
+
+

start_url: URL loaded when the user launches the web application.

+
+
+

icons: Array of icons that serve as representations of the web app.

+
+
+

Additional information can be found here.

+
+
+
+

7.82. == ngsw-config.json

+
+

ngsw-config.json specifies which files and data URLs have to be cached and updated by the Angular service worker.

+
+
+

Fields

+
+
+
    +
  • +

    index: File that serves as index page to satisfy navigation requests.

    +
  • +
  • +

    assetGroups: Resources that are part of the app version that update along with the app.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      installMode: How the resources are cached (pre-fetch or lazy).

      +
    • +
    • +

      updateMode: Caching behavior when a new version of the app is found (pre-fetch or lazy).

      +
    • +
    • +

      resources: Resources to cache. There are three groups.

      +
      +
        +
      • +

        files: Lists patterns that match files in the distribution directory.

        +
      • +
      • +

        urls: URL patterns matched at runtime.

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    dataGroups: UsefulIdentifies the group. for API requests.

    +
    +
      +
    • +

      name: Identifies the group.

      +
    • +
    • +

      urls: URL patterns matched at runtime.

      +
    • +
    • +

      version: Indicates that the resources being cached have been updated in a backwards-incompatible way.

      +
    • +
    • +

      cacheConfig: Policy by which matching requests will be cached

      +
      +
        +
      • +

        maxSize: The maximum number of entries, or responses, in the cache.

        +
      • +
      • +

        maxAge: How long responses are allowed to remain in the cache.

        +
        +
          +
        • +

          d: days. (5d = 5 days).

          +
        • +
        • +

          h: hours

          +
        • +
        • +

          m: minutes

          +
        • +
        • +

          s: seconds. (5m20s = 5 minutes and 20 seconds).

          +
        • +
        • +

          u: milliseconds

          +
        • +
        +
        +
      • +
      • +

        timeout: How long the Angular service worker will wait for the network to respond before using a cached response. Same dataformat as maxAge.

        +
      • +
      • +

        strategy: Caching strategies (performance or freshness).

        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    navigationUrls: List of URLs that will be redirected to the index file.

    +
  • +
+
+
+

Additional information can be found here.

+
+
+
+

7.83. Step 6: Configure the app

+
+

manifest.json

+
+
+

Default configuration.

+
+
+

 

+
+
+

ngsw-config.json

+
+
+

At assetGroups → resources → urls: In this field the google fonts API is added in order to use Montserrat font even without network.

+
+
+
+
  "urls": [
+          "https://fonts.googleapis.com/**"
+        ]
+
+
+
+

At the root of the json: A data group to cache API calls.

+
+
+
+
  {
+    ...
+    "dataGroups": [{
+      "name": "mythaistar-dishes",
+      "urls": [
+        "https://mts-devonfw-core.cloud.okteto.net/api/services/rest/dishmanagement/v1/dish/1"
+      ],
+      "cacheConfig": {
+        "maxSize": 100,
+        "maxAge": "1h",
+        "timeout": "10s",
+        "strategy": "freshness"
+      }
+    }]
+  }
+
+
+
+
+

7.84. Step 7: Check that your app is a PWA

+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ng build --prod to build the app using production settings.(nx build <name> --prod in Nx CLI)

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here.

+
+
+

Go to the dist/basic-ng-pwa/ folder running cd dist/basic-ng-pwa. In an Nx workspace, the path will be dist/apps/basic-ng-pwa

+
+
+

http-server -o to serve your built app.

+
+
+
+Http server running +
+
Figure 36. Http server running on localhost:8081.
+
+
+

 

+
+
+

In another console instance run ng serve (or nx serve basic-ng-pwa for Nx) to open the common app (not built).

+
+
+
+.Angular server running +
+
Figure 37. Angular server running on localhost:4200.
+
+
+

 

+
+
+

The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common (right) one does not.

+
+
+
+Application comparison +
+
Figure 38. Application service worker comparison.
+
+
+

 

+
+
+

If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 39. Offline application.
+
+
+

 

+
+
+

Finally, browser extensions like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 40. Lighthouse report.
+
+ +
+
+

7.85. APP_INITIALIZER

+ +
+
+

7.86. What is the APP_INITIALIZER pattern

+
+

The APP_INITIALIZER pattern allows an application to choose which configuration is going to be used in the start of the application, this is useful because it allows to setup different configurations, for example, for docker or a remote configuration. This provides benefits since this is done on runtime, so there’s no need to recompile the whole application to switch from configuration.

+
+
+
+

7.87. What is APP_INITIALIZER

+
+

APP_INITIALIZER allows to provide a service in the initialization of the application in a @NgModule. It also allows to use a factory, allowing to create a singleton in the same service. An example can be found in MyThaiStar /core/config/config.module.ts:

+
+
+
+

==

+
+

The provider expects the return of a Promise, if it is using Observables, a change with the method toPromise() will allow a switch from Observable to Promise +== ==

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

This is going to allow the creation of a ConfigService where, using a singleton, the service is going to load an external config depending on a route. This dependence with a route, allows to setup different configuration for docker etc. This is seen in the ConfigService of MyThaiStar:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  //and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it is mentioned earlier, you can see the use of a factory to create a singleton at the start. After that, loadExternalConfig is going to look for a Boolean inside the corresponding environment file inside the path src/environments/, this Boolean loadExternalConfig is going to easily allow to switch to a external config. If it is true, it generates a promise that overwrites the parameters of the local config, allowing to load the external config. Finally, the last method getValues() is going to allow to return the file config with the values (overwritten or not). The local config file from MyThaiStar can be seen here:

+
+
+
+
export enum BackendType {
+  IN_MEMORY,
+  REST,
+  GRAPHQL,
+}
+
+interface Role {
+  name: string;
+  permission: number;
+}
+
+interface Lang {
+  label: string;
+  value: string;
+}
+
+export interface Config {
+  version: string;
+  backendType: BackendType;
+  restPathRoot: string;
+  restServiceRoot: string;
+  pageSizes: number[];
+  pageSizesDialog: number[];
+  roles: Role[];
+  langs: Lang[];
+}
+
+export const config: Config = {
+  version: 'dev',
+  backendType: BackendType.REST,
+  restPathRoot: 'http://localhost:8081/mythaistar/',
+  restServiceRoot: 'http://localhost:8081/mythaistar/services/rest/',
+  pageSizes: [8, 16, 24],
+  pageSizesDialog: [4, 8, 12],
+  roles: [
+    { name: 'CUSTOMER', permission: 0 },
+    { name: 'WAITER', permission: 1 },
+  ],
+  langs: [
+    { label: 'English', value: 'en' },
+    { label: 'Deutsch', value: 'de' },
+    { label: 'Español', value: 'es' },
+    { label: 'Català', value: 'ca' },
+    { label: 'Français', value: 'fr' },
+    { label: 'Nederlands', value: 'nl' },
+    { label: 'हिन्दी', value: 'hi' },
+    { label: 'Polski', value: 'pl' },
+    { label: 'Русский', value: 'ru' },
+    { label: 'български', value: 'bg' },
+  ],
+};
+
+
+
+

Finally, inside a environment file src/environments/environment.ts the use of the Boolean loadExternalConfig is seen:

+
+
+
+
// The file contents for the current environment will overwrite these during build.
+// The build system defaults to the dev environment which uses `environment.ts`, but if you do
+// `ng build --env=prod` then `environment.prod.ts` will be used instead.
+// The list of which env maps to which file can be found in `.angular-cli.json`.
+
+export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+
+

7.88. Creating a APP_INITIALIZER configuration

+
+

This section is going to be used to create a new APP_INITIALIZER basic example. For this, a basic app with angular is going to be generated using ng new "appname" substituting appname for the name of the app opted. +If you are using Nx, the command would be nx generate @nrwl/angular:app "appname" in your Nx workspace. Click here to get started with using Nx.

+
+
+
+

7.89. Setting up the config files

+ +
+
+

7.90. Docker external configuration (Optional)

+
+

This section is only done if there is a docker configuration in the app you are setting up this type of configuration.

+
+
+

1.- Create in the root folder /docker-external-config.json. This external config is going to be used when the application is loaded with docker (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load with docker:

+
+
+
+
{
+    "version": "docker-version"
+}
+
+
+
+

2.- In the root, in the file /Dockerfile angular is going to copy the docker-external-config.json that was created before into the Nginx html route:

+
+
+
+
....
+COPY docker-external-config.json /usr/share/nginx/html/docker-external-config.json
+....
+
+
+
+
+

7.91. External json configuration

+
+

1.- Create a json file in the route /src/external-config.json. This external config is going to be used when the application is loaded with the start script (if the Boolean to load the external configuration is set to true). Here you need to add all the config parameter you want to load:

+
+
+
+
{
+    "version": "external-config"
+}
+
+
+
+

2.- The file named /angular.json (/workspace.json if using Nx) located at the root is going to be modified to add the file external-config.json that was just created to both "assets" inside Build and Test:

+
+
+
+
	....
+	"build": {
+          ....
+            "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	        ....
+        "test": {
+	  ....
+	   "assets": [
+              "src/assets",
+              "src/data",
+              "src/favicon.ico",
+              "src/manifest.json",
+              "src/external-config.json"
+            ]
+	  ....
+
+
+
+
+

7.92. Setting up the proxies

+
+

This step is going to setup two proxies. This is going to allow to load the config desired by the context, in case that it is using docker to load the app or in case it loads the app with angular. Loading different files is made possible by the fact that the ConfigService method loadExternalConfig() looks for the path /config.

+
+
+
+

7.93. Docker (Optional)

+
+

1.- This step is going to be for docker. Add docker-external-config.json to Nginx configuration (/nginx.conf) that is in the root of the application:

+
+
+
+
....
+  location  ~ ^/config {
+        alias /usr/share/nginx/html/docker-external-config.json;
+  }
+....
+
+
+
+
+

7.94. External Configuration

+
+

1.- Now the file /proxy.conf.json, needs to be created/modified this file can be found in the root of the application. In this file you can add the route of the external configuration in target and the name of the file in ^/config::

+
+
+
+
....
+  "/config": {
+    "target": "http://localhost:4200",
+    "secure": false,
+    "pathRewrite": {
+      "^/config": "/external-config.json"
+    }
+  }
+....
+
+
+
+

2.- The file package.json found in the root of the application is gonna use the start script to load the proxy config that was just created :

+
+
+
+
  "scripts": {
+....
+    "start": "ng serve --proxy-config proxy.conf.json -o",
+....
+
+
+
+

If using Nx, you need to run the command manually:

+
+
+

nx run angular-app-initializer:serve:development --proxyConfig=proxy.conf.json --o

+
+
+
+

7.95. Adding the loadExternalConfig Boolean to the environments

+
+

In order to load an external config we need to add the loadExternalConfig Boolean to the environments. To do so, inside the folder environments/ the files are going to get modified adding this Boolean to each environment that is going to be used. In this case, only two environments are going to be modified (environment.ts and environment.prod.ts). Down below there is an example of the modification being done in the environment.prod.ts:

+
+
+
+
export const environment: {
+  production: boolean;
+  loadExternalConfig: boolean;
+} = { production: false, loadExternalConfig: false };
+
+
+
+

In the file in first instance there is the declaration of the types of the variables. After that, there is the definition of those variables. This variable loadExternalConfig is going to be used by the service, allowing to setup a external config just by switching the loadExternalConfig to true.

+
+
+
+

7.96. Creating core configuration service

+
+

In order to create the whole configuration module three are going to be created:

+
+
+

1.- Create in the core app/core/config/ a config.ts

+
+
+
+
  export interface Config {
+    version: string;
+  }
+
+  export const config: Config = {
+    version: 'dev'
+  };
+
+
+
+

Taking a look to this file, it creates a interface (Config) that is going to be used by the variable that exports (export const config: Config). This variable config is going to be used by the service that is going to be created.

+
+
+

2.- Create in the core app/core/config/ a config.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Config, config } from './config';
+
+@Injectable()
+export class ConfigService {
+  constructor(private httpClient: HttpClient) {}
+
+  static factory(appLoadService: ConfigService) {
+    return () => appLoadService.loadExternalConfig();
+  }
+
+  // this method gets external configuration calling /config endpoint
+  // and merges into config object
+  loadExternalConfig(): Promise<any> {
+    if (!environment.loadExternalConfig) {
+      return Promise.resolve({});
+    }
+
+    const promise = this.httpClient
+      .get('/config')
+      .toPromise()
+      .then((settings) => {
+        Object.keys(settings || {}).forEach((k) => {
+          config[k] = settings[k];
+        });
+        return settings;
+      })
+      .catch((error) => {
+        return 'ok, no external configuration';
+      });
+
+    return promise;
+  }
+
+  getValues(): Config {
+    return config;
+  }
+}
+
+
+
+

As it was explained in previous steps, at first, there is a factory that uses the method loadExternalConfig(), this factory is going to be used in later steps in the module. After that, the loadExternalConfig() method checks if the Boolean in the environment is false. If it is false it will return the promise resolved with the normal config. Else, it is going to load the external config in the path (/config), and overwrite the values from the external config to the config that’s going to be used by the app, this is all returned in a promise.

+
+
+

3.- Create in the core a module for the config app/core/config/ a config.module.ts:

+
+
+
+
import { NgModule, APP_INITIALIZER } from '@angular/core';
+import { HttpClientModule } from '@angular/common/http';
+
+import { ConfigService } from './config.service';
+
+@NgModule({
+  imports: [HttpClientModule],
+  providers: [
+    ConfigService,
+    {
+      provide: APP_INITIALIZER,
+      useFactory: ConfigService.factory,
+      deps: [ConfigService],
+      multi: true,
+    },
+  ],
+})
+export class ConfigModule {}
+
+
+
+

As seen earlier, the ConfigService is added to the module. In this addition, the app is initialized(provide) and it uses the factory that was created in the ConfigService loading the config with or without the external values depending on the Boolean in the config.

+
+
+
+

7.97. Using the Config Service

+
+

As a first step, in the file /app/app.module.ts the ConfigModule created earlier in the other step is going to be imported:

+
+
+
+
  imports: [
+    ....
+    ConfigModule,
+    ....
+  ]
+
+
+
+

After that, the ConfigService is going to be injected into the app.component.ts

+
+
+
+
....
+import { ConfigService } from './core/config/config.service';
+....
+export class AppComponent {
+....
+  constructor(public configService: ConfigService) { }
+....
+
+
+
+

Finally, for this demonstration app, the component app/app.component.html is going to show the version of the config it is using at that moment.

+
+
+
+
<div style="text-align:center">
+  <h1>
+    Welcome to {{ title }}!
+  </h1>
+</div>
+<h2>Here is the configuration version that is using angular right now: {{configService.getValues().version}}</h2>
+
+
+
+
+

7.98. Final steps

+
+

The script start that was created earlier in the package.json (npm start) is going to be used to start the application. After that, modifying the Boolean loadExternalConfig inside the corresponding environment file inside /app/environments/ should show the different config versions.

+
+
+
+loadExternalConfigFalse +
+
+
+
+loadExternalConfigTrue +
+
+ +
+
+

7.99. Component Decomposition

+
+

When implementing a new requirement there are a few design decisions, which need to be considered. +A decomposition in Smart and Dumb Components should be done first. +This includes the definition of state and responsibilities. +Implementing a new dialog will most likely be done by defining a new Smart Component with multiple Dumb Component children.

+
+
+

In the component tree this would translate to the definition of a new sub-tree.

+
+
+
+Component Tree With Highlighted Sub Tree +
+
Figure 41. Component Tree with highlighted sub-tree
+
+
+
+

7.100. Defining Components

+
+

The following gives an example for component decomposition. +Shown is a screenshot from a style guide to be implemented. +It is a widget called Listpicker.

+
+
+

The basic function is an input field accepting direct input. +So typing otto puts otto inside the FormControl. +With arrow down key or by clicking the icon displayed in the inputs right edge a dropdown is opened. +Inside possible values can be selected and filtered beforehand. +After pressing arrow down key the focus should move into the filter input field. +Up and down arrow keys can be used to select an element from the list. +Typing into the filter input field filters the list from which the elements can be selected. +The current selected element is highlighted with green background color.

+
+
+
+Component Decomposition Example 1v2 +
+
Figure 42. Component decomposition example before
+
+
+

What should be done, is to define small reusable Dumb Components. +This way the complexity becomes manageable. +In the example every colored box describes a component with the purple box being a Smart Component.

+
+
+
+Component Decomposition Example 2v2 +
+
Figure 43. Component decomposition example after
+
+
+

This leads to the following component tree.

+
+
+
+Component Decomposition Example component tree +
+
Figure 44. Component decomposition example component tree
+
+
+

Note the uppermost component is a Dumb Component. +It is a wrapper for the label and the component to be displayed inside a form. +The Smart Component is Listpicker. +This way the widget can be reused without a form needed.

+
+
+

A widgets is a typical Smart Component to be shared across feature modules. +So the SharedModule is the place for it to be defined.

+
+
+
+

7.101. Defining state

+
+

Every UI has state. +There are different kinds of state, for example

+
+
+
    +
  • +

    View State: e.g. is a panel open, a css transition pending, etc.

    +
  • +
  • +

    Application State: e.g. is a payment pending, current URL, user info, etc.

    +
  • +
  • +

    Business Data: e.g. products loaded from back-end

    +
  • +
+
+
+

It is good practice to base the component decomposition on the state handled by a component and to define a simplified state model beforehand. +Starting with the parent - the Smart Component:

+
+
+
    +
  • +

    What overall state does the dialog have: e.g. loading, error, valid data loaded, valid input, invalid input, etc. +Every defined value should correspond to an overall appearance of the whole dialog.

    +
  • +
  • +

    What events can occur to the dialog: e.g. submitting a form, changing a filter, pressing buttons, pressing keys, etc.

    +
  • +
+
+
+

For every Dumb Component:

+
+
+
    +
  • +

    What data does a component display: e.g. a header text, user information to be displayed, a loading flag, etc.
    +This will be a slice of the overall state of the parent Smart Component. +In general a Dumb Component presents a slice of its parent Smart Components state to the user.

    +
  • +
  • +

    What events can occur: keyboard events, mouse events, etc.
    +These events are all handled by its parent Smart Component - every event is passed up the tree to be handled by a Smart Component.

    +
  • +
+
+
+

These information should be reflected inside the modeled state. +The implementation is a TypeScript type - an interface or a class describing the model.

+
+
+

So there should be a type describing all state relevant for a Smart Component. +An instance of that type is send down the component tree at runtime. +Not every Dumb Component will need the whole state. +For instance a single Dumb Component could only need a single string.

+
+
+

The state model for the previous Listpicker example is shown in the following listing.

+
+
+
Listing 62. Listpicker state model
+
+
export class ListpickerState {
+
+  items: {}[]|undefined;
+  columns = ['key', 'value'];
+  keyColumn = 'key';
+  displayValueColumn = 'value';
+  filteredItems: {}[]|undefined;
+  filter = '';
+  placeholder = '';
+  caseSensitive = true;
+  isDisabled = false;
+  isDropdownOpen = false;
+  selectedItem: {}|undefined;
+  displayValue = '';
+
+}
+
+
+
+

Listpicker holds an instance of ListpickerState which is passed down the component tree via @Input() bindings in the Dumb Components. +Events emitted by children - Dumb Components - create a new instance of ListpickerState based on the current instance and the event and its data. +So a state transition is just setting a new instance of ListpickerState. +Angular Bindings propagate the value down the tree after exchanging the state.

+
+
+
Listing 63. Listpicker State transition
+
+
export class ListpickerComponent {
+
+  // initial default values are set
+  state = new ListpickerState();
+
+  /** User changes filter */
+  onFilterChange(filter: string): void {
+    // apply filter ...
+    const filteredList = this.filterService.filter(...);
+
+    // important: A new instance is created, instead of altering the existing one.
+    //            This makes change detection easier and prevents hard to find bugs.
+    this.state = Object.assing({}, this.state, {
+      filteredItems: filteredList,
+      filter: filter
+    });
+  }
+
+}
+
+
+
+
Note:
+

It is not always necessary to define the model as independent type. +So there would be no state property and just properties for every state defined directly in the component class. +When complexity grows and state becomes larger this is usually a good idea. +If the state should be shared between Smart Components a store is to be used.

+
+
+
+

7.102. When are Dumb Components needed

+
+

Sometimes it is not necessary to perform a full decomposition. The architecture does not enforce it generally. What you should keep in mind is, that there is always a point when it becomes recommendable.

+
+
+

For example a template with 800 line of code is:

+
+
+
    +
  • +

    not understandable

    +
  • +
  • +

    not maintainable

    +
  • +
  • +

    not testable

    +
  • +
  • +

    not reusable

    +
  • +
+
+
+

So when implementing a template with more than 50 line of code you should think about decomposition.

+
+ +
+
+

7.103. Consuming REST services

+
+

A good introduction to working with Angular HttpClient can be found in Angular Docs

+
+
+

This guide will cover, how to embed Angular HttpClient in the application architecture. +For back-end request a special service with the suffix Adapter needs to be defined.

+
+
+
+

7.104. Defining Adapters

+
+

It is a good practice to have a Angular service whose single responsibility is to call the back-end and parse the received value to a transfer data model (e.g. Swagger generated TOs). +Those services need to have the suffix Adapter to make them easy to recognize.

+
+
+
+Adapters handle back-end communication +
+
Figure 45. Adapters handle back-end communication
+
+
+

As illustrated in the figure a Use Case service does not use Angular HttpClient directly but uses an adapter. +A basic adapter could look like this:

+
+
+
Listing 64. Example adapter
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+
+import { FlightTo } from './flight-to';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  getFlights(): Observable<FlightTo> {
+    return this.httpClient.get<FlightTo>('/relative/url/to/flights');
+  }
+
+}
+
+
+
+

The adapters should use a well-defined transfer data model. +This could be generated from server endpoints with CobiGen, Swagger, typescript-maven-plugin, etc. +If inside the application there is a business model defined, the adapter has to parse to the transfer model. +This is illustrated in the following listing.

+
+
+
Listing 65. Example adapter mapping from business model to transfer model
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable } from 'rxjs/Observable';
+import { map } from 'rxjs/operators';
+
+import { FlightTo } from './flight-to';
+import { Flight } from '../../../model/flight';
+
+@Injectable({
+ providedIn: 'root',
+})
+export class FlightsAdapter {
+
+  constructor(
+    private httpClient: HttpClient
+  ) {}
+
+  updateFlight(flight: Flight): Observable<Flight> {
+    const to = this.mapFlight(flight);
+
+    return this.httpClient.post<FlightTo>('/relative/url/to/flights', to).pipe(
+      map(to => this.mapFlightTo(to))
+    );
+  }
+
+  private mapFlight(flight: Flight): FlightTo {
+    // mapping logic
+  }
+
+  private mapFlightTo(flightTo: FlightTo): Flight {
+    // mapping logic
+  }
+
+}
+
+
+
+
+

7.105. Token management

+
+

In most cases the access to back-end API is secured using well known mechanisms as CSRF, JWT or both. In these cases the front-end application must manage the tokens that are generated when the user authenticates. More concretely it must store them to include them in every request automatically. Obviously, when user logs out these tokens must be removed from localStorage, memory, etc.

+
+
+
+

7.106. Store security token

+
+

In order to make this guide simple we are going to store the token in memory. Therefore, if we consider that we already have a login mechanism implemented we would like to store the token using a auth.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { Router } from '@angular/router';
+
+@Injectable({
+  providedIn: 'root',
+})
+export class AuthService {
+  private loggedIn = false;
+  private token: string;
+
+  constructor(public router: Router) {}
+
+  public isLogged(): boolean {
+    return this.loggedIn || false;
+  }
+
+  public setLogged(login: boolean): void {
+    this.loggedIn = login;
+  }
+
+  public getToken(): string {
+    return this.token;
+  }
+
+  public setToken(token: string): void {
+    this.token = token;
+  }
+}
+
+
+
+

Using the previous service we will be able to store the token obtained in the login request using the method setToken(token). Please consider that, if you want a more sophisticated approach using localStorage API, you will need to modify this service accordingly.

+
+
+
+

7.107. Include token in every request

+
+

Now that the token is available in the application it is necessary to include it in every request to a protected API endpoint. Instead of modifying all the HTTP requests in our application, Angular provides a class to intercept every request (and every response if we need to) called HttpInterceptor. Let’s create a service called http-interceptor.service.ts to implement the intercept method of this class:

+
+
+
+
import {
+  HttpEvent,
+  HttpHandler,
+  HttpInterceptor,
+  HttpRequest,
+} from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { environment } from '../../../environments/environment';
+import { AuthService } from './auth.service';
+
+@Injectable()
+export class HttpRequestInterceptorService implements HttpInterceptor {
+
+  constructor(private auth: AuthService) {}
+
+  intercept(
+    req: HttpRequest<any>,
+    next: HttpHandler,
+  ): Observable<HttpEvent<any>> {
+    // Get the auth header from the service.
+    const authHeader: string = this.auth.getToken();
+    if (authHeader) {
+      let authReq: HttpRequest<any>;
+
+      // CSRF
+      if (environment.security == 'csrf') {
+        authReq = req.clone({
+          withCredentials: true,
+          setHeaders: { 'x-csrf-token': authHeader },
+        });
+      }
+
+      // JWT
+      if (environment.security == 'jwt') {
+        authReq = req.clone({
+          setHeaders: { Authorization: authHeader },
+        });
+      }
+
+      return next.handle(authReq);
+    } else {
+      return next.handle(req);
+    }
+  }
+}
+
+
+
+

As you may notice, this service is making use of an environment field environment.security to determine if we are using JWT or CSRF in order to inject the token accordingly. In your application you can combine both if necessary.

+
+
+

Configure environment.ts file to use the CSRF/JWT.

+
+
+
+
security: 'csrf'
+
+
+
+

The authHeader used is obtained using the injected service AuthService already presented above.

+
+
+

In order to activate the interceptor we need to provide it in our app.module.ts or core.module.ts depending on the application structure. Let’s assume that we are using the latter and the interceptor file is inside a security folder:

+
+
+
+
...
+import { HttpRequestInterceptorService } from './security/http-request-interceptor.service';
+...
+
+@NgModule({
+  imports: [...],
+  exports: [...],
+  declarations: [],
+  providers: [
+    ...
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: HttpRequestInterceptorService,
+      multi: true,
+    },
+  ],
+})
+export class CoreModule {}
+
+
+
+

Angular automatically will now modify every request and include in the header the token if it is convenient.

+
+ +
+
+

7.108. Error Handler in angular

+
+

Angular allows us to set up a custom error handler that can be used to control the different errors and them in a correct way. Using a global error handler will avoid mistakes and provide a use friendly interface allowing us to indicate the user what problem is happening.

+
+
+
+

7.109. What is ErrorHandler

+
+

ErrorHandler is the class that Angular uses by default to control the errors. This means that, even if the application doesn’t have a ErrorHandler it is going to use the one setup by default in Angular. This can be tested by trying to find a page not existing in any app, instantly Angular will print the error in the console.

+
+
+
+

7.110. Creating your custom ErrorHandler step by step

+
+

In order to create a custom ErrorHandler three steps are going to be needed:

+
+
+
+

7.111. Creating the custom ErrorHandler class

+
+

In this first step the custom ErrorHandler class is going to be created inside the folder /app/core/errors/errors-handler.ts:

+
+
+
+
import { ErrorHandler, Injectable, Injector } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      //  To do: Use injector to get the necessary services to redirect or
+      // show a message to the user
+      const classname  = error.constructor.name;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          console.error('HttpError:' + error.message);
+          if (!navigator.onLine) {
+            console.error('There's no internet connection');
+            // To do: control here in internet what you wanna do if user has no internet
+          } else {
+            console.error('Server Error:' + error.message);
+            // To do: control here if the server gave an error
+          }
+          break;
+        default:
+          console.error('Error:' + error.message);
+          // To do: control here if the client/other things gave an error
+      }
+    }
+}
+
+
+
+

This class can be used to control the different type of errors. If wanted, the classname variable could be used to add more switch cases. This would allow control of more specific situations.

+
+
+
+

7.112. Creating a ErrorInterceptor

+
+

Inside the same folder created in the last step we are going to create the ErrorInterceptor(errors-handler-interceptor.ts). This ErrorInterceptor is going to retry any failed calls to the server to make sure it is not being found before showing the error:

+
+
+
+
import { HttpInterceptor, HttpRequest, HttpHandler, HttpEvent } from '@angular/common/http';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { retry } from 'rxjs/operators';
+
+@Injectable()
+export class ErrorsHandlerInterceptor implements HttpInterceptor {
+
+    constructor() {}
+    intercept(req: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>> {
+        return next.handle(req).pipe(
+            retryWhen((errors: Observable<any>) => errors.pipe(
+                delay(500),
+                take(5),
+                concatMap((error: any, retryIndex: number) => {
+                    if (++retryIndex == 5) {
+                        throw error;
+                    }
+                    return of(error);
+                })
+            ))
+        );
+    }
+}
+
+
+
+

This custom made interceptor is implementing the HttpInterceptor and inside the method intercept using the method pipe,retryWhen,delay,take and concatMap from RxJs it is going to do the next things if there is errors:

+
+
+
    +
  1. +

    With delay(500) do a delay to allow some time in between requests

    +
  2. +
  3. +

    With take(5) retry five times.

    +
  4. +
  5. +

    With concatMap if the index that take() gives is not 5 it returns the error, else, it throws the error.

    +
  6. +
+
+
+
+

7.113. Creating a Error Module

+
+

Finally, creating a module(errors-handler.module.ts) is necessary to include the interceptor and the custom error handler. In this case, the module is going to be created in the same folder as the last two:

+
+
+
+
import { NgModule, ErrorHandler } from '@angular/core';
+import { CommonModule } from '@angular/common';
+import { ErrorsHandler } from './errors-handler';
+import { HTTP_INTERCEPTORS } from '@angular/common/http';
+import { ErrorsHandlerInterceptor } from './errors-handler-interceptor';
+
+@NgModule({
+  declarations: [], // Declare here component if you want to use routing to error component
+  imports: [
+    CommonModule
+  ],
+  providers: [
+    {
+      provide: ErrorHandler,
+      useClass: ErrorsHandler,
+    },
+    {
+      provide: HTTP_INTERCEPTORS,
+      useClass: ErrorsHandlerInterceptor,
+      multi: true,
+    }
+  ]
+})
+export class ErrorsHandlerModule { }
+
+
+
+

This module simply is providing the services that are implemented by our custom classes and then telling angular to use our custom made classes instead of the default ones. After doing this, the module has to be included in the app module app.module.ts in order to be used.

+
+
+
+
....
+  imports: [
+    ErrorsHandlerModule,
+    ....
+
+
+
+
+

7.114. Handling Errors

+
+

As a final step, handling these errors is necessary. There are different ways that can be used to control the errors, here are a few:

+
+
+
    +
  • +

    Creating a custom page and using with Router to redirect to a page showing an error.

    +
  • +
  • +

    Creating a service in the server side or Backend to create a log with the error and calling it with HttpClient.

    +
  • +
  • +

    Showing a custom made SnackBar with the error message.

    +
  • +
+
+
+
+

7.115. == Using SnackBarService and NgZone

+
+

If the SnackBar is used directly, some errors can occur, this is due to SnackBar being out of the Angular zone. In order to use this service properly, NgZone is necessary. The method run() from NgZone will allow the service to be inside the Angular Zone. An example on how to use it:

+
+
+
+
import { ErrorHandler, Injectable, Injector, NgZone } from '@angular/core';
+import { HttpErrorResponse } from '@angular/common/http';
+import { MatSnackBar } from '@angular/material';
+
+@Injectable()
+export class ErrorsHandler implements ErrorHandler {
+
+    constructor(private injector: Injector, private zone: NgZone) {}
+
+    handleError(error: Error | HttpErrorResponse) {
+      // Use injector to get the necessary services to redirect or
+      const snackBar: MatSnackBar = this.injector.get(MatSnackBar);
+      const classname  = error.constructor.name;
+      let message: string;
+      switch ( classname )  {
+        case 'HttpErrorResponse':
+          message = !(navigator.onLine) ? 'There is no internet connection' : error.message;
+          break;
+        default:
+          message = error.message;
+      }
+      this.zone.run(
+        () => snackBar.open(message, 'danger', { duration : 4000})
+      );
+    }
+}
+
+
+
+

Using Injector the MatSnackBar is obtained, then the correct message is obtained inside the switch. Finally, using NgZone and run(), we open the SnackBar passing the message, and the parameters wanted.

+
+
+

You can find a working example of this guide in devon4ts-samples.

+
+ +
+
+

7.116. File Structure

+ +
+
+

7.117. Top-level

+
+

The top-level file structure is defined by Angular CLI. You might put this "top-level file structure" into a sub-directory to facilitate your build, but this is not relevant for this guide. So the applications file structure relevant to this guide is the folder /src/app inside the part managed by Angular CLI.

+
+
+
Listing 66. Top-level file structure shows feature modules
+
+
    /src
+    └── /app
+        ├── /account-management
+        ├── /billing
+        ├── /booking
+        ├── /core
+        ├── /shared
+        ├── /status
+        |
+        ├── app.module.ts
+        ├── app.component.spec.ts
+        ├── app.component.ts
+        └── app.routing-module.ts
+
+
+
+

Besides the definition of app module the app folder has feature modules on top-level. +The special modules shared and core are present as well.

+
+
+
+

7.118. Feature Modules

+
+

A feature module contains the modules definition and two folders representing both layers.

+
+
+
Listing 67. Feature module file structure has both layers
+
+
    /src
+    └── /app
+        └── /account-management
+            ├── /components
+            ├── /services
+            |
+            ├── account-management.module.ts
+            ├── account-management.component.spec.ts
+            ├── account-management.component.ts
+            └── account-management.routing-module.ts
+
+
+
+

Additionally an entry component is possible. This would be the case in lazy loading scenarios. +So account-management.component.ts would be only present if account-management is lazy loaded. +Otherwise, the module’s routes would be defined Component-less +(see vsavkin blog post).

+
+
+
+

7.119. Components Layer

+
+

The component layer reflects the distinction between Smart Components and Dumb Components.

+
+
+
Listing 68. Components layer file structure shows Smart Components on top-level
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                ├── /account-overview
+                ├── /confirm-modal
+                ├── /create-account
+                ├── /forgot-password
+                └── /shared
+
+
+
+

Every folder inside the /components folder represents a smart component. The only exception is /shared. +/shared contains Dumb Components shared across Smart Components inside the components layer.

+
+
+
Listing 69. Smart components contain Dumb components
+
+
    /src
+    └── /app
+        └── /account-management
+            └── /components
+                └── /account-overview
+                    ├── /user-info-panel
+                    |   ├── /address-tab
+                    |   ├── /last-activities-tab
+                    |   |
+                    |   ├── user-info-panel.component.html
+                    |   ├── user-info-panel.component.scss
+                    |   ├── user-info-panel.component.spec.ts
+                    |   └── user-info-panel.component.ts
+                    |
+                    ├── /user-header
+                    ├── /user-toolbar
+                    |
+                    ├── account-overview.component.html
+                    ├── account-overview.component.scss
+                    ├── account-overview.component.spec.ts
+                    └── account-overview.component.ts
+
+
+
+

Inside the folder of a Smart Component the component is defined. +Besides that are folders containing the Dumb Components the Smart Component consists of. +This can be recursive - a Dumb Component can consist of other Dumb Components. +This is reflected by the file structure as well. This way the structure of a view becomes very readable. +As mentioned before, if a Dumb Component is used by multiple Smart Components inside the components layer +it is put inside the /shared folder inside the components layer.

+
+
+

With this way of thinking the shared module makes a lot of sense. If a Dumb Component is used by multiple Smart Components +from different feature modules, the Dumb Component is placed into the shared module.

+
+
+
Listing 70. The shared module contains Dumb Components shared across Smart Components from different feature modules
+
+
    /src
+    └── /app
+        └── /shared
+            └── /user-panel
+                |
+                ├── user-panel.component.html
+                ├── user-panel.component.scss
+                ├── user-panel.component.spec.ts
+                └── user-panel.component.ts
+
+
+
+

The layer folder /components is not necessary inside the shared module. +The shared module only contains components!

+
+ +
+
+

7.120. Internationalization

+
+

Nowadays, a common scenario in front-end applications is to have the ability to translate labels and locate numbers, dates, currency and so on when the user clicks over a language selector or similar. devon4ng and specifically Angular has a default mechanism in order to fill the gap of such features, and besides there are some wide used libraries that make even easier to translate applications.

+
+ +
+
+

7.121. devon4ng i18n approach

+
+

The official approach could be a bit complicated, therefore the recommended one is to use the recommended library Transloco from https://github.com/ngneat/transloco/.

+
+
+
+

7.122. Install and configure Transloco

+
+

In order to include this library in your devon4ng Angular >= 7.2 project you will need to execute in a terminal:

+
+
+
+
$ ng add @ngneat/transloco
+
+
+
+

As part of the installation process you’ll be presented with questions; Once you answer them, everything you need will automatically be created for you.

+
+
+
    +
  • +

    First, Transloco creates boilerplate files for the requested translations.

    +
  • +
  • +

    Next, it will create a new file, transloco-root.module.ts which exposes an Angular’s module with a default configuration, and inject it into the AppModule.

    +
  • +
+
+
+
+
import { HttpClient } from '@angular/common/http';
+import {
+  TRANSLOCO_LOADER,
+  Translation,
+  TranslocoLoader,
+  TRANSLOCO_CONFIG,
+  translocoConfig,
+  TranslocoModule
+} from '@ngneat/transloco';
+import { Injectable, NgModule } from '@angular/core';
+import { environment } from '../environments/environment';
+
+@Injectable({ providedIn: 'root' })
+export class TranslocoHttpLoader implements TranslocoLoader {
+  constructor(private http: HttpClient) {}
+
+  getTranslation(lang: string) {
+    return this.http.get<Translation>(`/assets/i18n/${lang}.json`);
+  }
+}
+
+@NgModule({
+  exports: [ TranslocoModule ],
+  providers: [
+    {
+      provide: TRANSLOCO_CONFIG,
+      useValue: translocoConfig({
+        availableLangs: ['en', 'es'],
+        defaultLang: 'en',
+        // Remove this option if your application doesn't support changing language in runtime.
+        reRenderOnLangChange: true,
+        prodMode: environment.production,
+      })
+    },
+    { provide: TRANSLOCO_LOADER, useClass: TranslocoHttpLoader }
+  ]
+})
+export class TranslocoRootModule {}
+
+
+
+ + + + + +
+ + +As you might have noticed it also set an HttpLoader into the module’s providers. The HttpLoader is a class that implements the TranslocoLoader interface. It’s responsible for instructing Transloco how to load the translation files. It uses Angular HTTP client to fetch the files, based on the given path. +
+
+
+
+

7.123. Usage

+
+

In order to translate any label in any HTML template you will need to use the transloco pipe available:

+
+
+
+
{{ 'HELLO' | transloco }}
+
+
+
+

An optional parameter from the component TypeScript class could be included as follows:

+
+
+
+
{{ 'HELLO' | transloco: { value: dynamic } }}
+
+
+
+

It is possible to use with inputs:

+
+
+
+
<span [attr.alt]="'hello' | transloco">Attribute</span>
+<span [title]="'hello' | transloco">Property</span>
+
+
+
+

In order to change the language used you will need to create a button or selector that calls the this.translocoService.use(language: string) method from TranslocoService. For example:

+
+
+
+
export class AppComponent {
+  constructor(private translocoService: TranslocoService) {}
+
+  changeLanguage(lang) {
+      this.translocoService.setActiveLang(lang);
+  }
+}
+
+
+
+

The translations will be included in the en.json, es.json, de.json, etc. files inside the /assets/i18n folder. For example en.json would be (using the previous parameter):

+
+
+
+
{
+    "HELLO": "hello"
+}
+
+
+
+

Or with an optional parameter:

+
+
+
+
{
+    "HELLO": "hello {{value}}"
+}
+
+
+
+

Transloco understands nested JSON objects. This means that you can have a translation that looks like this:

+
+
+
+
{
+    "HOME": {
+        "HELLO": "hello {{value}}"
+    }
+}
+
+
+
+

In order to access access the value, use the dot notation, in this case HOME.HELLO.

+
+
+
+

7.124. Using the service, pipe or directive

+ +
+
+

7.125. == Structural Directive

+
+

Using a structural directive is the recommended approach. It’s DRY and efficient, as it creates one subscription per template:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('title') }}</p>
+
+  <comp [title]="t('title')"></comp>
+</ng-container>
+
+
+
+

Note that the t function is memoized. It means that given the same key it will return the result directly from the cache.

+
+
+

We can pass a params object as the second parameter:

+
+
+
+
<ng-container *transloco="let t">
+  <p>{{ t('name', { name: 'Transloco' }) }}</p>
+</ng-container>
+
+
+
+

We can instruct the directive to use a different language in our template:

+
+
+
+
<ng-container *transloco="let t; lang: 'es'">
+  <p>{{ t('title') }}</p>
+</ng-container>
+
+
+
+
+

7.126. == Pipe

+
+

The use of pipes can be possible too:

+
+
+

template:

+
+
+
+
<div>{{ 'HELLO' | transloco:param }}</div>
+
+
+
+

component:

+
+
+
+
param = {value: 'world'};
+
+
+
+
+

7.127. == Attribute Directive

+
+

The last option available with transloco is the attribute directive:

+
+
+
+
<div transloco="HELLO" [translocoParams]="{ value: 'world' }"></div>
+
+
+
+
+

7.128. == Service

+
+

If you need to access translations in any component or service you can do it injecting the TranslocoService into them:

+
+
+
+
// Sync translation
+translocoService.translate('HELLO', {value: 'world'});
+
+// Async translation
+translocoService.selectTranslate('HELLO', { value: 'world' }).subscribe(res => {
+    console.log(res);
+    //=> 'hello world'
+});
+
+
+
+ + + + + +
+ + +You can find a complete example at https://github.com/devonfw/devon4ng-application-template. +
+
+
+

Please, visit https://github.com/ngneat/transloco/ for more info.

+
+ +
+
+

7.129. Routing

+
+

A basic introduction to the Angular Router can be found in Angular Docs.

+
+
+

This guide will show common tasks and best practices.

+
+
+
+

7.130. Defining Routes

+
+

For each feature module and the app module all routes should be defined in a separate module with the suffix RoutingModule. +This way the routing modules are the only place where routes are defined. +This pattern achieves a clear separation of concerns. +The following figure illustrates this.

+
+
+
+Routing module declaration +
+
Figure 46. Routing module declaration
+
+
+

It is important to define routes inside app routing module with .forRoot() and in feature routing modules with .forChild().

+
+
+
+

7.131. Example 1 - No Lazy Loading

+
+

In this example two modules need to be configured with routes - AppModule and FlightModule.

+
+
+

The following routes will be configured

+
+
+
    +
  • +

    / will redirect to /search

    +
  • +
  • +

    /search displays FlightSearchComponent (FlightModule)

    +
  • +
  • +

    /search/print/:flightId/:date displays FlightPrintComponent (FlightModule)

    +
  • +
  • +

    /search/details/:flightId/:date displays FlightDetailsComponent (FlightModule)

    +
  • +
  • +

    All other routes will display ErrorPage404 (AppModule)

    +
  • +
+
+
+
Listing 71. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '', redirectTo: 'search', pathMatch: 'full' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 72. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: 'search', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+ + + + + +
+ + +The import order inside AppModule is important. +AppRoutingModule needs to be imported after FlightModule. +
+
+
+
+

7.132. Example 2 - Lazy Loading

+
+

Lazy Loading is a good practice when the application has multiple feature areas and a user might not visit every dialog. +Or at least he might not need every dialog up front.

+
+
+

The following example will configure the same routes as example 1 but will lazy load FlightModule.

+
+
+
Listing 73. app-routing.module.ts
+
+
const routes: Routes = [
+  { path: '/search', loadChildren: 'app/flight-search/flight-search.module#FlightSearchModule' },
+  { path: '**', component: ErrorPage404 }
+];
+
+@NgModule({
+  imports: [RouterModule.forRoot(routes)],
+  exports: [RouterModule]
+})
+export class AppRoutingModule { }
+
+
+
+
Listing 74. flight-search-routing.module.ts
+
+
const routes: Routes = [
+  {
+    path: '', children: [
+      { path: '', component: FlightSearchComponent },
+      { path: 'print/:flightId/:date', component: FlightPrintComponent },
+      { path: 'details/:flightId/:date', component: FlightDetailsComponent }
+    ]
+  }
+];
+
+@NgModule({
+  imports: [RouterModule.forChild(routes)],
+  exports: [RouterModule],
+})
+export class FlightSearchRoutingModule { }
+
+
+
+
+

7.133. Triggering Route Changes

+
+

With Angular you have two ways of triggering route changes.

+
+
+
    +
  1. +

    Declarative with bindings in component HTML templates

    +
  2. +
  3. +

    Programmatic with Angular Router service inside component classes

    +
  4. +
+
+
+

On the one hand, architecture-wise it is a much cleaner solution to trigger route changes in Smart Components. +This way you have every UI event that should trigger a navigation handled in one place - in a Smart Component. +It becomes very easy to look inside the code for every navigation, that can occur. +Refactoring is also much easier, as there are no navigation events "hidden" in the HTML templates

+
+
+

On the other hand, in terms of accessibility and SEO it is a better solution to rely on bindings in the view - e.g. by using Angular router-link directive. +This way screen readers and the Google crawler can move through the page easily.

+
+
+ + + + + +
+ + +If you do not have to support accessibility (screen readers, etc.) and to care about SEO (Google rank, etc.), +then you should aim for triggering navigation only in Smart Components. +
+
+
+
+Triggering navigation +
+
Figure 47. Triggering navigation
+
+
+
+

7.134. Guards

+
+

Guards are Angular services implemented on routes which determines whether a user can navigate to/from the route. There are examples below which will explain things better. We have the following types of Guards:

+
+
+
    +
  • +

    CanActivate: It is used to determine whether a user can visit a route. The most common scenario for this guard is to check if the user is authenticated. For example, if we want only logged in users to be able to go to a particular route, we will implement the CanActivate guard on this route.

    +
  • +
  • +

    CanActivateChild: Same as above, only implemented on child routes.

    +
  • +
  • +

    CanDeactivate: It is used to determine if a user can navigate away from a route. Most common example is when a user tries to go to a different page after filling up a form and does not save/submit the changes, we can use this guard to confirm whether the user really wants to leave the page without saving/submitting.

    +
  • +
  • +

    Resolve: For resolving dynamic data.

    +
  • +
  • +

    CanLoad: It is used to determine whether an Angular module can be loaded lazily. Example below will be helpful to understand it.

    +
  • +
+
+
+

Let’s have a look at some examples.

+
+
+
+

7.135. Example 1 - CanActivate and CanActivateChild guards

+ +
+
+

7.136. == CanActivate guard

+
+

As mentioned earlier, a guard is an Angular service and services are simply TypeScript classes. So we begin by creating a class. This class has to implement the CanActivate interface (imported from angular/router), and therefore, must have a canActivate function. The logic of this function determines whether the requested route can be navigated to or not. It returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. If it is true, the route is loaded, else not.

+
+
+
Listing 75. CanActivate example
+
+
...
+import {CanActivate} from "@angular/router";
+
+@Injectable()
+class ExampleAuthGuard implements CanActivate {
+  constructor(private authService: AuthService) {}
+
+  canActivate(route: ActivatedRouterSnapshot, state: RouterStateSnapshot) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

In the above example, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. We use it to return true or false from the canActivate function. +The canActivate function accepts two parameters (provided by Angular). The first parameter of type ActivatedRouterSnapshot is the snapshot of the route the user is trying to navigate to (where the guard is implemented); we can extract the route parameters from this instance. The second parameter of type RouterStateSnapshot is a snapshot of the router state the user is trying to navigate to; we can fetch the URL from it’s url property.

+
+
+ + + + + +
+ + +We can also redirect the user to another page (maybe a login page) if the authService returns false. To do that, inject Router and use it’s navigate function to redirect to the appropriate page. +
+
+
+

Since it is a service, it needs to be provided in our module:

+
+
+
Listing 76. provide the guard in a module
+
+
@NgModule({
+  ...
+  providers: [
+    ...
+    ExampleAuthGuard
+  ]
+})
+
+
+
+

Now this guard is ready to use on our routes. We implement it where we define our array of routes in the application:

+
+
+
Listing 77. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivate: [ExampleAuthGuard] }
+];
+
+
+
+

As you can see, the canActivate property accepts an array of guards. So we can implement more than one guard on a route.

+
+
+
+

7.137. == CanActivateChild guard

+
+

To use the guard on nested (children) routes, we add it to the canActivateChild property like so:

+
+
+
Listing 78. Implementing the guard on child routes
+
+
...
+const routes: Routes = [
+  { path: '', redirectTo: 'home', pathMatch: 'full' },
+  { path: 'home', component: HomeComponent },
+  { path: 'page1', component: Page1Component, canActivateChild: [ExampleAuthGuard], children: [
+	{path: 'sub-page1', component: SubPageComponent},
+    {path: 'sub-page2', component: SubPageComponent}
+  ] }
+];
+
+
+
+
+

7.138. Example 2 - CanLoad guard

+
+

Similar to CanActivate, to use this guard we implement the CanLoad interface and overwrite it’s canLoad function. Again, this function returns either a Boolean value or an Observable or a Promise which resolves to a Boolean value. The fundamental difference between CanActivate and CanLoad is that CanLoad is used to determine whether an entire module can be lazily loaded or not. If the guard returns false for a module protected by CanLoad, the entire module is not loaded.

+
+
+
Listing 79. CanLoad example
+
+
...
+import {CanLoad, Route} from "@angular/router";
+
+@Injectable()
+class ExampleCanLoadGuard implements CanLoad {
+  constructor(private authService: AuthService) {}
+
+  canLoad(route: Route) {
+	if (this.authService.isLoggedIn()) {
+      return true;
+    } else {
+	  window.alert('Please log in first');
+      return false;
+    }
+  }
+}
+
+
+
+

Again, let’s assume we have a AuthService which has a isLoggedIn() method which returns a Boolean value depending on whether the user is logged in. The canLoad function accepts a parameter of type Route which we can use to fetch the path a user is trying to navigate to (using the path property of Route).

+
+
+

This guard needs to be provided in our module like any other service.

+
+
+

To implement the guard, we use the canLoad property:

+
+
+
Listing 80. Implementing the guard
+
+
...
+const routes: Routes = [
+  { path: 'home', component: HomeComponent },
+  { path: 'admin', loadChildren: 'app/admin/admin.module#AdminModule', canLoad: [ExampleCanLoadGuard] }
+];
+
+
+ +
+
+

7.139. Testing

+
+

This guide will cover the basics of testing logic inside your code with unit test cases. +The guide assumes that you are familiar with Angular CLI (see the guide)

+
+
+

For testing your Angular application with unit test cases there are two main strategies:

+
+
+
    +
  1. +

    Isolated unit test cases
    +Isolated unit tests examine an instance of a class all by itself without any dependence on Angular or any injected values. +The amount of code and effort needed to create such tests in minimal.

    +
  2. +
  3. +

    Angular Testing Utilities
    +Let you test components including their interaction with Angular. +The amount of code and effort needed to create such tests is a little higher.

    +
  4. +
+
+
+
+

7.140. Testing Concept

+
+

The following figure shows you an overview of the application architecture divided in testing areas.

+
+
+
+Testing Areas +
+
Figure 48. Testing Areas
+
+
+

There are three areas, which need to be covered by different testing strategies.

+
+
+
    +
  1. +

    Components:
    +Smart Components need to be tested because they contain view logic. +Also the interaction with 3rd party components needs to be tested. +When a 3rd party component changes with an upgrade a test will be failing and warn you, that there is something wrong with the new version. +Most of the time Dumb Components do not need to be tested because they mainly display data and do not contain any logic. +Smart Components are always tested with Angular Testing Utilities. +For example selectors, which select data from the store and transform it further, need to be tested.

    +
  2. +
  3. +

    Stores:
    +A store contains methods representing state transitions. +If these methods contain logic, they need to be tested. +Stores are always tested using Isolated unit tests.

    +
  4. +
  5. +

    Services:
    +Services contain Business Logic, which needs to be tested. +UseCase Services represent a whole business use case. +For instance this could be initializing a store with all the data that is needed for a dialog - loading, transforming, storing. +Often Angular Testing Utilities are the optimal solution for testing UseCase Services, because they allow for an easy stubbing of the back-end. +All other services should be tested with Isolated unit tests as they are much easier to write and maintain.

    +
  6. +
+
+
+
+

7.141. Testing Smart Components

+
+

Testing Smart Components should assure the following.

+
+
+
    +
  1. +

    Bindings are correct.

    +
  2. +
  3. +

    Selectors which load data from the store are correct.

    +
  4. +
  5. +

    Asynchronous behavior is correct (loading state, error state, "normal" state).

    +
  6. +
  7. +

    Oftentimes through testing one realizes, that important edge cases are forgotten.

    +
  8. +
  9. +

    Do these test become very complex, it is often an indicator for poor code quality in the component. +Then the implementation is to be adjusted / refactored.

    +
  10. +
  11. +

    When testing values received from the native DOM, you will test also that 3rd party libraries did not change with a version upgrade. +A failing test will show you what part of a 3rd party library has changed. +This is much better than the users doing this for you. +For example a binding might fail because the property name was changed with a newer version of a 3rd party library.

    +
  12. +
+
+
+

In the function beforeEach() the TestBed imported from Angular Testing Utilities needs to be initialized. +The goal should be to define a minimal test-module with TestBed. +The following code gives you an example.

+
+
+
Listing 81. Example test setup for Smart Components
+
+
describe('PrintFlightComponent', () => {
+
+  let fixture: ComponentFixture<PrintCPrintFlightComponentomponent>;
+  let store: FlightStore;
+  let printServiceSpy: jasmine.SpyObj<FlightPrintService>;
+
+  beforeEach(() => {
+    const urlParam = '1337';
+    const activatedRouteStub = { params: of({ id: urlParam }) };
+    printServiceSpy = jasmine.createSpyObj('FlightPrintService', ['initializePrintDialog']);
+    TestBed.configureTestingModule({
+      imports: [
+        TranslateModule.forRoot(),
+        RouterTestingModule
+      ],
+      declarations: [
+        PrintFlightComponent,
+        PrintContentComponent,
+        GeneralInformationPrintPanelComponent,
+        PassengersPrintPanelComponent
+      ],
+      providers: [
+        FlightStore,
+        {provide: FlightPrintService, useValue: printServiceSpy},
+        {provide: ActivatedRoute, useValue: activatedRouteStub}
+      ]
+    });
+    fixture = TestBed.createComponent(PrintFlightComponent);
+    store = fixture.debugElement.injector.get(FlightStore);
+    fixture.detectChanges();
+  });
+
+  // ... test cases
+})
+
+
+
+

It is important:

+
+
+
    +
  • +

    Use RouterTestingModule instead of RouterModule

    +
  • +
  • +

    Use TranslateModule.forRoot() without translations +This way you can test language-neutral without translation marks.

    +
  • +
  • +

    Do not add a whole module from your application - in declarations add the tested Smart Component with all its Dumb Components

    +
  • +
  • +

    The store should never be stubbed. +If you need a complex test setup, just use the regular methods defined on the store.

    +
  • +
  • +

    Stub all services used by the Smart Component. +These are mostly UseCase services. +They should not be tested by these tests. +Only the correct call to their functions should be assured. +The logic inside the UseCase services is tested with separate tests.

    +
  • +
  • +

    detectChanges() performance an Angular Change Detection cycle (Angular refreshes all the bindings present in the view)

    +
  • +
  • +

    tick() performance a virtual macro task, tick(1000) is equal to the virtual passing of 1s.

    +
  • +
+
+
+

The following test cases show the testing strategy in action.

+
+
+
Listing 82. Example
+
+
it('calls initializePrintDialog for url parameter 1337', fakeAsync(() => {
+  expect(printServiceSpy.initializePrintDialog).toHaveBeenCalledWith(1337);
+}));
+
+it('creates correct loading subtitle', fakeAsync(() => {
+  store.setPrintStateLoading(123);
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT STATE.IS_LOADING');
+}));
+
+it('creates correct subtitle for loaded flight', fakeAsync(() => {
+  store.setPrintStateLoadedSuccess({
+    id: 123,
+    description: 'Description',
+    iata: 'FRA',
+    name: 'Frankfurt',
+    // ...
+  });
+  tick();
+  fixture.detectChanges();
+
+  const subtitle = fixture.debugElement.query(By.css('app-header-element .print-header-container span:last-child'));
+  expect(subtitle.nativeElement.textContent).toBe('PRINT_HEADER.FLIGHT "FRA (Frankfurt)" (ID: 123)');
+}));
+
+
+
+

The examples show the basic testing method

+
+
+
    +
  • +

    Set the store to a well-defined state

    +
  • +
  • +

    check if the component displays the correct values

    +
  • +
  • +

    …​ via checking values inside the native DOM.

    +
  • +
+
+
+
+

7.142. Testing state transitions performed by stores

+
+

Stores are always tested with Isolated unit tests.

+
+
+

Actions triggered by dispatchAction() calls are asynchronously performed to alter the state. +A good solution to test such a state transition is to use the done callback from Jasmine.

+
+
+
Listing 83. Example for testing a store
+
+
let sut: FlightStore;
+
+beforeEach(() => {
+  sut = new FlightStore();
+});
+
+it('setPrintStateLoading sets print state to loading', (done: Function) => {
+  sut.setPrintStateLoading(4711);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.print.isLoading).toBe(true);
+    expect(result.print.loadingId).toBe(4711);
+    done();
+  });
+});
+
+it('toggleRowChecked adds flight with given id to selectedValues Property', (done: Function) => {
+  const flight: FlightTO = {
+    id: 12
+    // dummy data
+  };
+  sut.setRegisterabgleichListe([flight]);
+  sut.toggleRowChecked(12);
+
+  sut.state$.pipe(first()).subscribe(result => {
+    expect(result.selectedValues).toContain(flight);
+    done();
+  });
+});
+
+
+
+
+

7.143. Testing services

+
+

When testing services both strategies - Isolated unit tests and Angular Testing Utilities - are valid options.

+
+
+

The goal of such tests are

+
+
+
    +
  • +

    assuring the behavior for valid data.

    +
  • +
  • +

    assuring the behavior for invalid data.

    +
  • +
  • +

    documenting functionality

    +
  • +
  • +

    save performing refactoring

    +
  • +
  • +

    thinking about edge case behavior while testing

    +
  • +
+
+
+

For simple services Isolated unit tests can be written. +Writing these tests takes lesser effort and they can be written very fast.

+
+
+

The following listing gives an example of such tests.

+
+
+
Listing 84. Testing a simple services with Isolated unit tests
+
+
let sut: IsyDatePipe;
+
+beforeEach(() => {
+  sut = new IsyDatePipe();
+});
+
+it('transform should return empty string if input value is empty', () => {
+  expect(sut.transform('')).toBe('');
+});
+
+it('transform should return empty string if input value is null', () => {
+  expect(sut.transform(undefined)).toBe('');
+});
+
+// ...more tests
+
+
+
+

For testing Use Case services the Angular Testing Utilities should be used. +The following listing gives an example.

+
+
+
Listing 85. Test setup for testing use case services with Angular Testing Utilities
+
+
let sut: FlightPrintService;
+let store: FlightStore;
+let httpController: HttpTestingController;
+let flightCalculationServiceStub: jasmine.SpyObj<FlightCalculationService>;
+const flight: FlightTo = {
+  // ... valid dummy data
+};
+
+beforeEach(() => {
+  flightCalculationServiceStub = jasmine.createSpyObj('FlightCalculationService', ['getFlightType']);
+  flightCalculationServiceStub.getFlightType.and.callFake((catalog: string, type: string, key: string) => of(`${key}_long`));
+  TestBed.configureTestingModule({
+    imports: [
+      HttpClientTestingModule,
+      RouterTestingModule,
+    ],
+    providers: [
+      FlightPrintService,
+      FlightStore,
+      FlightAdapter,
+      {provide: FlightCalculationService, useValue: flightCalculationServiceStub}
+    ]
+  });
+
+  sut = TestBed.get(FlightPrintService);
+  store = TestBed.get(FlightStore);
+  httpController = TestBed.get(HttpTestingController);
+});
+
+
+
+

When using TestBed, it is important

+
+
+
    +
  • +

    to import HttpClientTestingModule for stubbing the back-end

    +
  • +
  • +

    to import RouterTestingModule for stubbing the Angular router

    +
  • +
  • +

    not to stub stores, adapters and business services

    +
  • +
  • +

    to stub services from libraries like FlightCalculationService - the correct implementation of libraries should not be tested by these tests.

    +
  • +
+
+
+

Testing back-end communication looks like this:

+
+
+
Listing 86. Testing back-end communication with Angular HttpTestingController
+
+
it('loads flight if not present in store', fakeAsync(() => {
+  sut.initializePrintDialog(1337);
+  const processRequest = httpController.expectOne('/path/to/flight');
+  processRequest.flush(flight);
+
+  httpController.verify();
+}));
+
+it('does not load flight if present in store', fakeAsync(() => {
+  const flight = {...flight, id: 4711};
+  store.setRegisterabgleich(flight);
+
+  sut.initializePrintDialog(4711);
+  httpController.expectNone('/path/to/flight');
+
+  httpController.verify();
+}));
+
+
+
+

The first test assures a correct XHR request is performed if initializePrintDialog() is called and no data is in the store. +The second test assures no XHR request IST performed if the needed data is already in the store.

+
+
+

The next steps are checks for the correct implementation of logic.

+
+
+
Listing 87. Example testing a Use Case service
+
+
it('creates flight destination for valid key in svz', fakeAsync(() => {
+  const flightTo: FlightTo = {
+    ...flight,
+    id: 4712,
+    profile: '77'
+  };
+  store.setFlight(flightTo);
+  let result: FlightPrintContent|undefined;
+
+  sut.initializePrintDialog(4712);
+  store.select(s => s.print.content).subscribe(content => result = content);
+  tick();
+
+  expect(result!.destination).toBe('77_long (ID: 77)');
+}));
+
+
+ +
+
+

7.144. Update Angular CLI

+ +
+
+

7.145. Angular CLI common issues

+
+

There are constant updates for the official Angular framework dependencies. These dependencies are directly related with the Angular CLI package. Since this package comes installed by default inside the devonfw distribution folder for Windows OS and the distribution is updated every few months it needs to be updated in order to avoid known issues.

+
+
+
+

7.146. Angular CLI update guide

+
+

For Linux users is as easy as updating the global package:

+
+
+
+
$ npm unistall -g @angular/cli
+$ npm install -g @angular/cli
+
+
+
+

For Windows users the process is only a bit harder. Open the devonfw bundled console and do as follows:

+
+
+
+
$ cd [devonfw_dist_folder]
+$ cd software/nodejs
+$ npm uninstall @angular/cli --no-save
+$ npm install @angular/cli --no-save
+
+
+
+

After following these steps you should have the latest Angular CLI version installed in your system. In order to check it run in the distribution console:

+
+
+ + + + + +
+ + +At the time of this writing, the Angular CLI is at 1.7.4 version. +
+
+
+
+
λ ng version
+
+     _                      _                 ____ _     ___
+    / \   _ __   __ _ _   _| | __ _ _ __     / ___| |   |_ _|
+   / △ \ | '_ \ / _` | | | | |/ _` | '__|   | |   | |    | |
+  / ___ \| | | | (_| | |_| | | (_| | |      | |___| |___ | |
+ /_/   \_\_| |_|\__, |\__,_|_|\__,_|_|       \____|_____|___|
+                |___/
+
+
+Angular CLI: 7.2.3
+Node: 10.13.0
+OS: win32 x64
+Angular:
+...
+
+
+ +
+
+

7.147. Upgrade devon4ng Angular and Ionic/Angular applications

+
+

Angular CLI provides a powerful tool to upgrade Angular based applications to the current stable release of the core framework.

+
+
+

This tool is ng update. It will not only upgrade dependencies and their related ones but also will perform some fixes in your code if available thanks to the provided schematics. It will check even if the update is not possible as there is another library or libraries that are not compatible with the versions of the upgraded dependencies. In this case it will keep your application untouched.

+
+
+ + + + + +
+ + +The repository must be in a clean state before executing a ng update. So, remember to commit your changes first. +
+
+
+
+

7.148. Basic usage

+
+

In order to perform a basic upgrade we will execute:

+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
+

7.149. Upgrade to new Angular version

+
+

The process will be the same, but first we need to make sure that our devon4ng application is in the lates version of Angular 8, so the ng update command can perform the upgrade not only in the dependencies but also making code changes to reflect the new features and fixes.

+
+
+
    +
  • +

    First, upgrade to latest Angular 9 version:

    +
  • +
+
+
+
+
$ ng update @angular/cli@9 @angular/core@9
+
+
+
+

Optionally the flag -C can be added to previous command to make a commit automatically. This is also valid for the next steps.

+
+
+
    +
  • +

    Then, upgrade Angular:

    +
  • +
+
+
+
+
$ ng update @angular/cli @angular/core
+
+
+
+
    +
  • +

    In case you use Angular Material:

    +
  • +
+
+
+
+
$ ng update @angular/material
+
+
+
+
    +
  • +

    If the application depends on third party libraries, the new tool ngcc can be run to make them compatible with the new Ivy compiler. In this case it is recommended to include a postinstall script in the package.json:

    +
  • +
+
+
+
+
{
+  "scripts": {
+    "postinstall": "ngcc --properties es2015 browser module main --first-only --create-ivy-entry-points"
+  }
+}
+
+
+ +
+

Important use cases:

+
+
+
    +
  • +

    To update to the next beta or pre-release version, use the --next=true option.

    +
  • +
  • +

    To update from one major version to another, use the format ng update @angular/cli@^<major_version> @angular/core@^<major_version>.

    +
  • +
  • +

    In case your Angular application uses @angular/material include it in the first command:

    +
    +
    +
    $ ng update @angular/cli @angular/core @angular/material
    +
    +
    +
  • +
+
+
+
+

7.150. Ionic/Angular applications

+
+

Just following the same procedure we can upgrade Angular applications, but we must take care of important specific Ionic dependencies:

+
+
+
+
$ ng update @angular/cli @angular/core @ionic/angular @ionic/angular-toolkit [@ionic/...]
+
+
+
+
+

7.151. Other dependencies

+
+

Every application will make use of different dependencies. Angular CLI ng upgrade will also take care of these ones. For example, if you need to upgrade @capacitor you will perform:

+
+
+
+
$ ng update @capacitor/cli @capacitor/core [@capacitor/...]
+
+
+
+

Another example could be that you need to upgrade @ngx-translate packages. As always in this case you will execute:

+
+
+
+
$ ng update @ngx-translate/core @ngx-translate/http-loader
+
+
+
+
+

7.152. Angular Update Guide online tool

+
+

It is recommended to use the Angular Update Guide tool at https://update.angular.io/ that will provide the necessary steps to upgrade any Angular application depending on multiple criteria.

+
+ +
+
+

7.153. Working with Angular CLI

+
+

Angular CLI provides a facade for building, testing, linting, debugging and generating code. +Under the hood Angular CLI uses specific tools to achieve these tasks. +The user does no need to maintain them and can rely on Angular to keep them up to date and maybe switch to other tools which come up in the future.

+
+
+

The Angular CLI provides a wiki with common tasks you encounter when working on applications with the Angular CLI. +The Angular CLI Wiki can be found here.

+
+
+

In this guide we will go through the most important tasks. +To go into more details, please visit the Angular CLI wiki.

+
+
+
+

7.154. Installing Angular CLI

+
+

Angular CLI should be added as global and local dependency. +The following commands add Angular CLI as global Dependency.

+
+
+

yarn command

+
+
+
+
yarn global add @angular/cli
+
+
+
+

npm command

+
+
+
+
npm install -g @angular/cli
+
+
+
+

You can check a successful installtion with ng --version. +This should print out the version installed.

+
+
+
+Printing Angular CLI Version +
+
Figure 49. Printing Angular CLI Version
+
+
+
+

7.155. Running a live development server

+
+

The Angular CLI can be used to start a live development server. +First your application will be compiled and then the server will be started. +If you change the code of a file, the server will reload the displayed page. +Run your application with the following command:

+
+
+
+
ng serve -o
+
+
+
+
+

7.156. Running Unit Tests

+
+

All unit tests can be executed with the command:

+
+
+
+
ng test
+
+
+
+

To make a single run and create a code coverage file use the following command:

+
+
+
+
ng test -sr -cc
+
+
+
+ + + + + +
+ + +You can configure the output format for code coverage files to match your requirements in the file karma.conf.js which can be found on toplevel of your project folder. +For instance, this can be useful for exporting the results to a SonarQube. +
+
+
+
+

7.157. Linting the code quality

+
+

You can lint your files with the command

+
+
+
+
ng lint --type-check
+
+
+
+ + + + + +
+ + +You can adjust the linting rules in the file tslint.json which can be found on toplevel of your project folder. +
+
+
+
+

7.158. Generating Code

+ +
+
+

7.159. Creating a new Angular CLI project

+
+

For creating a new Angular CLI project the command ng new is used.

+
+
+

The following command creates a new application named my-app.

+
+
+
+
ng create my-app
+
+
+
+
+

7.160. Creating a new feature module

+
+

A new feature module can be created via ng generate module` command.

+
+
+

The following command generates a new feature module named todo.

+
+
+
+
ng generate module todo
+
+
+
+
+Generate a module with Angular CLI +
+
Figure 50. Generate a module with Angular CLI
+
+
+ + + + + +
+ + +The created feature module needs to be added to the AppModule by hand. +Other option would be to define a lazy route in AppRoutingModule to make this a lazy loaded module. +
+
+
+
+

7.161. Creating a new component

+
+

To create components the command ng generate component can be used.

+
+
+

The following command will generate the component todo-details inside the components layer of todo module. +It will generate a class, a html file, a css file and a test file. +Also, it will register this component as declaration inside the nearest module - this ist TodoModule.

+
+
+
+
ng generate component todo/components/todo-details
+
+
+
+
+Generate a component with Angular CLI +
+
Figure 51. Generate a component with Angular CLI
+
+
+ + + + + +
+ + +If you want to export the component, you have to add the component to exports array of the module. +This would be the case if you generate a component inside shared module. +
+
+
+
+

7.162. Configuring an Angular CLI project

+
+

Inside an Angular CLI project the file .angular-cli.json can be used to configure the Angular CLI.

+
+
+

The following options are very important to understand.

+
+
+
    +
  • +

    The property defaults` can be used to change the default style extension. +The following settings will make the Angular CLI generate .less files, when a new component is generated.

    +
  • +
+
+
+
+
"defaults": {
+  "styleExt": "less",
+  "component": {}
+}
+
+
+
+
    +
  • +

    The property apps contains all applications maintained with Angular CLI. +Most of the time you will have only one.

    +
    +
      +
    • +

      assets configures all the static files, that the application needs - this can be images, fonts, json files, etc. +When you add them to assets the Angular CLI will put these files to the build target and serve them while debugging. +The following will put all files in /i18n to the output folder /i18n

      +
    • +
    +
    +
  • +
+
+
+
+
"assets": [
+  { "glob": "**/*.json", "input": "./i18n", "output": "./i18n" }
+]
+
+
+
+
    +
  • +

    styles property contains all style files that will be globally available. +The Angular CLI will create a styles bundle that goes directly into index.html with it. +The following will make all styles in styles.less globally available.

    +
  • +
+
+
+
+
"styles": [
+  "styles.less"
+]
+
+
+
+
    +
  • +

    environmentSource and environments are used to configure configuration with the Angular CLI. +Inside the code always the file specified in environmentSource will be referenced. +You can define different environments - eg. production, staging, etc. - which you list in enviroments. +At compile time the Angular CLI will override all values in environmentSource with the values from the matching environment target. +The following code will build the application for the environment staging.

    +
  • +
+
+
+
+
ng build --environment=staging
+
+
+
+
+
+
+

8. Ionic

+
+ +
+

8.1. Ionic 5 Getting started

+
+

Ionic is a front-end focused framework which offers different tools for developing hybrid mobile applications. The web technologies used for this purpose are CSS, Sass, HTML5 and Typescript.

+
+
+
+

8.2. Why Ionic?

+
+

Ionic is used for developing hybrid applications, which means not having to rely on a specific IDE such as Android Studio or Xcode. Furthermore, development of native apps require learning different languages (Java/Kotlin for Android and Objective-C/Swift for Apple), with Ionic, a developer does not have to code the same functionality for multiple platforms, just use the adequate libraries and components.

+
+
+
+

8.3. Basic environment set up

+ +
+
+

8.4. Install Ionic CLI

+
+

Although the devonfw distribution comes with and already installed Ionic CLI, here are the steps to install it. The installation of Ionic is easy, just one command has to be written:

+
+
+

$ npm install -g @ionic/cli

+
+
+
+

8.5. Update Ionic CLI

+
+

If there was a previous installation of the Ionic CLI, it will need to be uninstalled due to a change in package name.

+
+
+
+
$ npm uninstall -g ionic
+$ npm install -g @ionic/cli
+
+
+
+

##Basic project set up +The set up of an ionic application is pretty immediate and can be done in one line:

+
+
+

ionic start <name> <template> --type=angular

+
+
+
    +
  • +

    ionic start: Command to create an app.

    +
  • +
  • +

    <name>: Name of the application.

    +
  • +
  • +

    <template>: Model of the application.

    +
  • +
  • +

    --type=angular: With this flag, the app produced will be based on angular.

    +
  • +
+
+
+

To create an empty project, the following command can be used:

+
+
+

ionic start MyApp blank --type=angular

+
+
+
+Ionic blank project +
+
+
+

The image above shows the directory structure generated.

+
+
+

There are more templates available that can be seen with the command +ionic start --list

+
+
+
+List of ionic templates +
+
+
+

The templates surrounded by red line are based on angular and comes with Ionic v5, while the others belong to earlier versions (before v4).

+
+
+ + + + + +
+ + +More info at https://ionicframework.com/docs. Remember to select Angular documentation, since Ionic supports React, Vue and Vanilla JS. +
+
+ +
+
+

8.6. Ionic to android

+
+

This page is written to help developers to go from the source code of an ionic application to an android one, with this in mind, topics such as: environment, commands, modifications,…​ are covered.

+
+
+
+

8.7. Assumptions

+
+

This document assumes that the reader has already:

+
+
+
    +
  • +

    Source code of an Ionic application and wants to build it on an android device,

    +
  • +
  • +

    A working installation of NodeJS

    +
  • +
  • +

    An Ionic CLI installed and up-to-date.

    +
  • +
  • +

    Android Studio and Android SDK.

    +
  • +
+
+
+
+

8.8. From Ionic to Android project

+
+

When a native application is being designed, sometimes, functionalities that uses camera, geolocation, push notification, …​ are requested. To resolve these requests, Capacitor can be used.

+
+
+

In general terms, Capacitor wraps apps made with Ionic (HTML, SCSS, Typescript) into WebViews that can be displayed in native applications (Android, IOS) and allows the developer to access native functionalities like the ones said before.

+
+
+

Installing capacitor is as easy as installing any node module, just a few commands have to be run in a console:

+
+
+
    +
  • +

    cd name-of-ionic-4-app

    +
  • +
  • +

    npm install --save @capacitor/core @capacitor/cli

    +
  • +
+
+
+

Then, it is necessary to initialize capacitor with some information: app id, name of the app and the directory where your app is stored. To fill this information, run:

+
+
+
    +
  • +

    npx cap init

    +
  • +
+
+
+
+

8.9. Modifications

+
+

Throughout the development process, usually back-end and front-end are on a local computer, so it’s a common practice to have different configuration files for each environment (commonly production and development). Ionic uses an angular.json file to store those configurations and some rules to be applied.

+
+
+

If a back-end is hosted on http://localhost:8081, and that direction is used in every environment, the application built for android will not work because computer and device do not have the same localhost. Fortunately, different configurations can be defined.

+
+
+

Android Studio uses 10.0.0.2 as alias for 127.0.0.1 (computer’s localhost) so adding http://10.0.0.2:8081 in a new environment file and modifying angular.json accordingly, will make possible connect front-end and back-end.

+
+
+
+Android environment and angular.json +
+
+
+
+
    "build": {
+    ...
+        "configurations": {
+            ...
+            "android": {
+                "fileReplacements": [
+                    {
+                        "replace": "src/environments/environment.ts",
+                        "with": "src/environments/environment.android.ts"
+                    }
+                ]
+            },
+        }
+    }
+
+
+
+
+

8.10. Build

+
+

Once configured, it is necessary to build the Ionic app using this new configuration:

+
+
+
    +
  • +

    ionic build --configuration=android

    +
  • +
+
+
+

The next commands copy the build application on a folder named android and open android studio.

+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+
+

8.11. From Android project to emulated device

+
+

Once Android Studio is opened, follow these steps:

+
+
+
    +
  1. +

    Click on "Build" → Make project.

    +
  2. +
  3. +

    Click on "Build" → Make Module 'app' (default name).

    +
  4. +
+
+
+

Click on make project +click on make app

+
+
+
    +
  1. +

    Click on" Build" → Build Bundle(s) / APK(s) → Build APK(s).

    +
  2. +
  3. +

    Click on run and choose a device.

    +
  4. +
+
+
+

click on build APK +click on running device

+
+
+

If there are no devices available, a new one can be created:

+
+
+
    +
  1. +

    Click on "Create new device"

    +
  2. +
  3. +

    Select hardware and click "Next". For example: Phone → Nexus 5X.

    +
  4. +
+
+
+

Create new device +Select hardware

+
+
+
    +
  1. +

    Download a system image.

    +
    +
      +
    1. +

      Click on download.

      +
    2. +
    3. +

      Wait until the installation finished and then click "Finish".

      +
    4. +
    5. +

      Click "Next".

      +
    6. +
    +
    +
  2. +
  3. +

    Verify configuration (default configuration should be enough) and click "Next".

    +
  4. +
+
+
+

Download system image +Check configuration

+
+
+
    +
  1. +

    Check that the new device is created correctly.

    +
  2. +
+
+
+
+New created device +
+
+
+
+

8.12. From Android project to real device

+
+

To test on a real android device, an easy approach to communicate a smartphone (front-end) and computer (back-end) is to configure a WiFi hotspot and connect the computer to it. A guide about this process can be found here.

+
+
+

Once connected, run ipconfig on a console if you are using windows or ifconfig on a Linux machine to get the IP address of your machine’s Wireless LAN adapter WiFi.

+
+
+
+Result of `ipconfig` command on Windows 10 +
+
+
+

This obtained IP must be used instead of "localhost" or "10.0.2.2" at environment.android.ts.

+
+
+
+Android environment file server URL +
+
+
+

After this configuration, follow the build steps in "From Ionic to Android project" and the first three steps in "From Android project to emulated device".

+
+
+
+

8.13. Send APK to Android through USB

+
+

To send the built application to a device, you can connect computer and mobile through USB, but first, it is necessary to unlock developer options.

+
+
+
    +
  1. +

    Open "Settings" and go to "System".

    +
  2. +
  3. +

    Click on "About".

    +
  4. +
  5. +

    Click "Build number" seven times to unlock developer options.

    +
  6. +
+
+
+
+Steps to enable developer options: 1, 2, 3 +
+
+
+
    +
  1. +

    Go to "System" again an then to "Developer options"

    +
  2. +
  3. +

    Check that the options are "On".

    +
  4. +
  5. +

    Check that "USB debugging" is activated.

    +
  6. +
+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+

After this, do the step four in "From Android project to emulated device" and choose the connected smartphone.

+
+
+
+

8.14. Send APK to Android through email

+
+

When you build an APK, a dialog gives two options: locate or analyze. If the first one is chosen, Windows file explorer will be opened showing an APK that can be send using email. Download the APK on your phone and click it to install.

+
+
+
+Steps to enable developer options: 4, 5, 6 +
+
+
+
+

8.15. Result

+
+

If everything goes correctly, the Ionic application will be ready to be tested.

+
+
+
+Application running on a real device +
+
+ +
+
+

8.16. Ionic Progressive Web App

+
+

This guide is a continuation of the guide Angular PWAs, therefore, valid concepts explained there are still valid in this page but focused on Ionic.

+
+
+
+

8.17. Assumptions

+
+

This guide assumes that you already have installed:

+
+
+
    +
  • +

    NodeJS

    +
  • +
  • +

    npm package manager

    +
  • +
  • +

    Angular CLI / Nx CLI

    +
  • +
  • +

    Ionic 5 CLI

    +
  • +
  • +

    Capacitor

    +
  • +
+
+
+

Also, it is a good idea to read the document about PWA using Angular.

+
+
+
+

8.18. Sample Application

+
+
+Ionic 5 PWA Base +
+
Figure 52. Basic ionic PWA.
+
+
+

To explain how to build progressive web apps (PWA) using Ionic, a basic application is going to be built. This app will be able to take photos even without network using PWA elements.

+
+
+
+

8.19. Step 1: Create a new project

+
+

This step can be completed with one simple command: ionic start <name> <template>, where <name> is the name and <template> a model for the app. In this case, the app is going to be named basic-ion-pwa.

+
+
+

If you are using Nx, there is a pre-requisite to this step. And that is, you have to add the @nxtend/ionic-angular plugin to your Nx workspace. The command for that is npm install --save-dev @nxtend/ionic-angular. Once you have the plugin installed, you can generate an ionic app in your Nx workspace with the command nx generate @nxtend/ionic-angular:app basic-ion-pwa. (You can refer this guide if you want to get started with Nx).

+
+
+
+

8.20. Step 2: Structures and styles

+
+

The styles (scss) and structures (html) do not have anything specially relevant, just colors and ionic web components. The code can be found in devon4ts-samples.

+
+
+
+

8.21. Step 3: Add functionality

+
+

After this step, the app will allow users to take photos and display them in the main screen. +First we have to import three important elements:

+
+
+
    +
  • +

    DomSanitizer: Sanitizes values to be safe to use.

    +
  • +
  • +

    SafeResourceUrl: Interface for values that are safe to use as URL.

    +
  • +
  • +

    Plugins: Capacitor constant value used to access to the device’s camera and toast dialogs.

    +
  • +
+
+
+
+
  import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
+  import { Plugins, CameraResultType } from '@capacitor/core';
+
+
+
+

The process of taking a picture is enclosed in a takePicture() method. takePicture() calls the Camera’s getPhoto() function which returns an URL or an exception. If a photo is taken then the image displayed in the main page will be changed for the new picture, else, if the app is closed without changing it, a toast message will be displayed.

+
+
+
+
  export class HomePage {
+    image: SafeResourceUrl;
+    ...
+
+    async takePicture() {
+      try {
+        const image = await Plugins.Camera.getPhoto({
+          quality: 90,
+          allowEditing: true,
+          resultType: CameraResultType.Uri,
+        });
+
+        // Change last picture shown
+        this.image = this.sanitizer.bypassSecurityTrustResourceUrl(image.webPath);
+      } catch (e) {
+        this.show('Closing camera');
+      }
+    }
+
+    async show(message: string) {
+      await Plugins.Toast.show({
+        text: message,
+      });
+    }
+  }
+
+
+
+
+

8.22. Step 4: PWA Elements

+
+

When Ionic apps are not running natively, some resources like Camera do not work by default but can be enabled using PWA Elements. To use Capacitor’s PWA elements run npm install @ionic/pwa-elements and modify src/main.ts as shown below.

+
+
+
+
...
+
+// Import for PWA elements
+import { defineCustomElements } from '@ionic/pwa-elements/loader';
+
+if (environment.production) {
+  enableProdMode();
+}
+
+platformBrowserDynamic().bootstrapModule(AppModule)
+  .catch(err => console.log(err));
+
+// Call the element loader after the platform has been bootstrapped
+defineCustomElements(window);
+
+
+
+
+

8.23. Step 5: Make it Progressive.

+
+

Turning an Ionic 5 app into a PWA is pretty easy. The same module used to turn Angular apps into PWAs has to be added. To do so, run: ng add @angular/pwa. This command also creates an icons folder inside src/assets and contains angular icons for multiple resolutions. (Note: In an Nx workspace, you have to add it like a normal package using npm install @angular/pwa, and you have to manually add the icons). If you want to use other images, be sure that they have the same resolution, the names can be different but the file manifest.json has to be changed accordingly.

+
+
+
+

8.24. Step 6: Configure the app

+
+

manifest.json

+
+
+

Default configuration.

+
+
+

ngsw-config.json

+
+
+

At assetGroupsresources add a URLs field and a pattern to match PWA Elements scripts and other resources (images, styles, …​):

+
+
+
+
  "urls": ["https://unpkg.com/@ionic/pwa-elements@1.0.2/dist/**"]
+
+
+
+
+

8.25. Step 7: Check that your app is a PWA

+
+

To check if an app is a PWA lets compare its normal behavior against itself but built for production. Run in the project’s root folder the commands below:

+
+
+

ionic build --configuration production to build the app using production settings. (nx build basic-ion-pwa --configuration production in your Nx workspace root).

+
+
+

npm install http-server to install an npm module that can serve your built application. Documentation here. A good alternative is also npm install serve. It can be checked here.

+
+
+

Go to the www folder running cd www.

+
+
+

http-server -o or serve to serve your built app.

+
+
+ + + + + +
+ + +In order not to install anything not necessary npx can be used directly to serve the app. i.e run npx serve [folder] will automatically download and run this HTTP server without installing it in the project dependencies. +
+
+
+
+Http server running +
+
Figure 53. Http server running on localhost:8081.
+
+
+

 
+In another console instance run ionic serve (nx serve basic-ion-pwa if using Nx CLI) to open the common app (not built).

+
+
+
+Ionic serve on Visual Studio Code console +
+
Figure 54. Ionic server running on localhost:8100.
+
+
+

 
+The first difference can be found on Developer tools → application, here it is seen that the PWA application (left) has a service worker and the common one does not.

+
+
+
+Application comparison +
+
Figure 55. Application service worker comparison.
+
+
+

 
+If the "offline" box is checked, it will force a disconnection from network. In situations where users do not have connectivity or have a slow, one the PWA can still be accessed and used.

+
+
+
+Online offline apps +
+
Figure 56. Offline application.
+
+
+

 
+Finally, plugins like Lighthouse can be used to test whether an application is progressive or not.

+
+
+
+Lighthouse report +
+
Figure 57. Lighthouse report.
+
+
+
+
+
+

9. Layouts

+
+ +
+

9.1. Angular Material Layout

+
+

The purpose of this guide is to get a basic understanding of creating layouts using Angular Material in a devon4ng application. We will create an application with a header containing some menu links and a sidenav with some navigation links.

+
+
+
+Finished application +
+
Figure 58. This is what the finished application will look like
+
+
+
+

9.2. Create a new angular application

+
+

We start with opening the devonfw IDE(right-click anywhere in your workspace and click "Open devonfw CMD shell here") and running the following command to start a project named devon4ng-mat-layout

+
+
+
    +
  • +

    ng new devon4ng-mat-layout --routing --style=scss. If you are using Nx, the command would be nx generate @nrwl/angular:app devon4ng-mat-layout --routing --style=scss in your Nx workspace. Click here to get started with using Nx.

    +
  • +
+
+
+

We are providing the routing flag so that a routing module is generated, and we are also setting the style sheet format to SCSS with --style=scss.

+
+
+

Once the creation process is complete, open your newly created application in Visual Studio Code. Try running the empty application by running the following command in the integrated terminal:

+
+
+
    +
  • +

    ng serve. (If you are using Nx, you have to specify the project name along with the --project flag, so the command becomes ng serve --project=devon4ng-mat-layout)

    +
  • +
+
+
+

Angular will spin up a server and you can check your application by visiting http://localhost:4200/ in your browser.

+
+
+
+Blank application +
+
Figure 59. Blank application
+
+
+
+

9.3. Adding Angular Material library to the project

+
+

Next we will add Angular Material to our application. In the integrated terminal, press Ctrl + C to terminate the running application and run the following command:

+
+
+
    +
  • +

    npm install --save @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

You can also use Yarn to install the dependencies if you prefer that:

+
+
+
    +
  • +

    yarn add @angular/material @angular/cdk @angular/animations

    +
  • +
+
+
+

Once the dependencies are installed, we need to import the BrowserAnimationsModule in our AppModule for animations support.

+
+
+
Listing 88. Importing BrowserAnimationsModule in AppModule
+
+
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
+
+@NgModule({
+  ...
+  imports: [BrowserAnimationsModule],
+  ...
+})
+export class AppModule { }
+
+
+
+

Angular Material provides a host of components for designing our application. All the components are well structured into individual NgModules. For each component from the Angular Material library that we want to use, we have to import the respective NgModule.

+
+
+
Listing 89. We will be using the following components in our application:
+
+
import { MatIconModule, MatButtonModule, MatMenuModule, MatListModule, MatToolbarModule, MatSidenavModule } from '@angular/material';
+
+@NgModule({
+  ...
+  imports: [
+	...
+    MatIconModule,
+    MatButtonModule,
+    MatMenuModule,
+    MatListModule,
+    MatToolbarModule,
+    MatSidenavModule,
+	...
+	],
+  ...
+})
+export class AppModule { }
+
+
+
+

A better approach is to import and then export all the required components in a shared module. But for the sake of simplicity, we are importing all the required components in the AppModule itself.

+
+
+
+

==

+
+
+
  You can find a working copy of this application https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-basic-layout[here]. The sample application is part of a Nx workspace, which means it is one of the many apps in a monorepo and capable of importing reusable code from a shared library. This guide describes the implementaion by assuming a stand-alone single-repo application, but the pages and layout described in this sample app are similar to the ones used in another sample app in the monorepo (https://github.com/devonfw-sample/devon4ts-samples/tree/master/apps/angular-material-theming[angular-material-theming]), which is why we have exported the required components from a shared library and reused them in both the apps. As a result, the code in our monorepo will be slightly different. It would still help you in following this guide.
+== ==
+
+
+
+

Next, we include a theme in our application. Angular Material comes with four pre-defined themes: indigo-pink, deeppurple-amber, pink-bluegrey and purple-green. It is also possible to create our own custom theme, but that is beyond the scope of this guide. Including a theme is required to apply all of the core and theme styles to your application. +We will include the indigo-pink theme in our application by importing the indigo-pink.css file in our src/styles.scss:

+
+
+
Listing 90. In src/styles.scss:
+
+
@import "~@angular/material/prebuilt-themes/indigo-pink.css";
+
+
+
+

To use Material Design Icons along with the mat-icon component, we will load the Material Icons library in our src/index.html file

+
+
+
Listing 91. In src/index.html:
+
+
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
+
+
+
+
+

9.4. Development

+
+

Now that we have all the Angular Material related dependencies set up in our project, we can start coding. Let’s begin by adding a suitable margin and font to the body element of our single page application. We will add it in the src/styles.scss file to apply it globally:

+
+
+
Listing 92. In src/styles.scss:
+
+
body {
+  margin: 0;
+  font-family: "Segoe UI", Roboto, sans-serif;
+}
+
+
+
+

At this point, if we run our application, this is how it will look like:

+
+
+
+Angular Material added to the application +
+
Figure 60. Application with Angular Material set up
+
+
+

We will clear the app.component.html file and setup a header with a menu button and some navigational links. We will use mat-toolbar, mat-button, mat-menu, mat-icon and mat-icon-button for this:

+
+
+
Listing 93. app.component.html:
+
+
<mat-toolbar color="primary">
+  <button mat-icon-button aria-label="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+  <button mat-button [matMenuTriggerFor]="submenu">Menu 1</button>
+  <button mat-button>Menu 2</button>
+  <button mat-button>Menu 3</button>
+
+  <mat-menu #submenu="matMenu">
+    <button mat-menu-item>Sub-menu 1</button>
+    <button mat-menu-item [matMenuTriggerFor]="submenu2">Sub-menu 2</button>
+  </mat-menu>
+
+  <mat-menu #submenu2="matMenu">
+    <button mat-menu-item>Menu Item 1</button>
+    <button mat-menu-item>Menu Item 2</button>
+    <button mat-menu-item>Menu Item 3</button>
+  </mat-menu>
+
+</mat-toolbar>
+
+
+
+

The color attribute on the mat-toolbar element will give it the primary (indigo) color as defined by our theme. The color attribute works with most Angular Material components; the possible values are 'primary', 'accent' and 'warn'. +The mat-toolbar is a suitable component to represent a header. It serves as a placeholder for elements we want in our header. +Inside the mat-toolbar, we start with a button having mat-icon-button attribute, which itself contains a mat-icon element having the value menu. This will serve as a menu button which we can use to toggle the sidenav. +We follow it with some sample buttons having the mat-button attribute. Notice the first button has a property matMenuTriggerFor bound to a local reference submenu. As the property name suggests, the click of this button will display the mat-menu element with the specified local reference as a drop-down menu. The rest of the code is self explanatory.

+
+
+
+Header added to the application +
+
Figure 61. This is how our application looks with the first menu button (Menu 1) clicked.
+
+
+

We want to keep the sidenav toggling menu button on the left and move the rest to the right to make it look better. To do this we add a class to the menu icon button:

+
+
+
Listing 94. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+

And in the app.component.scss file, we add the following style:

+
+
+
Listing 95. app.component.scss:
+
+
.menu {
+    margin-right: auto;
+}
+
+
+
+

The mat-toolbar element already has it’s display property set to flex. Setting the menu icon button’s margin-right property to auto keeps itself on the left and pushes the other elements to the right.

+
+
+
+Final look of the header +
+
Figure 62. Final look of the header.
+
+
+

Next, we will create a sidenav. But before that lets create a couple of components to navigate between, the links of which we will add to the sidenav. +We will use the ng generate component (or ng g c command for short) to create Home and Data components. (Append --project=devon4ng-mat-layout to the command in a Nx workspace). We nest them in the pages sub-directory since they represent our pages.

+
+
+
    +
  • +

    ng g c pages/home

    +
  • +
  • +

    ng g c pages/data;

    +
  • +
+
+
+

Let us set up the routing such that when we visit http://localhost:4200/ root url we see the HomeComponent and when we visit http://localhost:4200/data url we see the DataComponent. +We had opted for routing while creating the application, so we have the routing module app-routing.module.ts setup for us. In this file, we have the empty routes array where we set up our routes.

+
+
+
Listing 96. app-routing.module.ts:
+
+
import { HomeComponent } from './pages/home/home.component';
+import { DataComponent } from './pages/data/data.component';
+
+	const routes: Routes = [
+	  { path: '', component: HomeComponent },
+	  { path: 'data', component: DataComponent }
+	];
+
+
+
+

We need to provide a hook where the components will be loaded when their respective URLs are loaded. We do that by using the router-outlet directive in the app.component.html.

+
+
+
Listing 97. app.component.html:
+
+
...
+	</mat-toolbar>
+	<router-outlet></router-outlet>
+
+
+
+

Now when we visit the defined URLs we see the appropriate components rendered on screen.

+
+
+

Lets change the contents of the components to have something better.

+
+
+
Listing 98. home.component.html:
+
+
<h2>Home Page</h2>
+
+
+
+
Listing 99. home.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+
Listing 100. data.component.html:
+
+
<h2>Data Page</h2>
+
+
+
+
Listing 101. data.component.scss:
+
+
h2 {
+    text-align: center;
+    margin-top: 50px;
+}
+
+
+
+

The pages look somewhat better now:

+
+
+
+Home page +
+
Figure 63. Home page
+
+
+
+Data page +
+
Figure 64. Data page
+
+
+

Let us finally create the sidenav. To implement the sidenav we need to use 3 Angular Material components: mat-sidenav-container, mat-sidenav and mat-sidenav-content. +The mat-sidenav-container, as the name suggests, acts as a container for the sidenav and the associated content. So it is the parent element, and mat-sidenav and mat-sidenav-content are the children sibling elements. mat-sidenav represents the sidenav. We can put any content we want, though it is usually used to contain a list of navigational links. The mat-sidenav-content element is for containing the contents of our current page. Since we need the sidenav application-wide, we will put it in the app.component.html.

+
+
+
Listing 102. app.component.html:
+
+
...
+</mat-toolbar>
+
+<mat-sidenav-container>
+  <mat-sidenav mode="over" [disableClose]="false" #sidenav>
+    Sidenav
+  </mat-sidenav>
+  <mat-sidenav-content>
+    <router-outlet></router-outlet>
+  </mat-sidenav-content>
+</mat-sidenav-container>
+
+
+
+

The mat-sidenav has a mode property, which accepts one of the 3 values: over, push and side. It decides the behavior of the sidenav. mat-sidenav also has a disableClose property which accents a boolean value. It toggles the behavior where we click on the backdrop or press the Esc key to close the sidenav. There are other properties which we can use to customize the appearance, behavior and position of the sidenav. You can find the properties documented online at https://material.angular.io/components/sidenav/api +We moved the router-outlet directive inside the mat-sidenav-content where it will render the routed component. +But if you check the running application in the browser, we don’t see the sidenav yet. That is because it is closed. We want to have the sidenav opened/closed at the click of the menu icon button on the left side of the header we implemented earlier. Notice we have set a local reference #sidenav on the mat-sidenav element. We can access this element and call its toggle() function to toggle open or close the sidenav.

+
+
+
Listing 103. app.component.html:
+
+
...
+  <button mat-icon-button aria-label="menu" class="menu" (click)="sidenav.toggle()">
+    <mat-icon>menu</mat-icon>
+  </button>
+...
+
+
+
+
+Sidenav works +
+
Figure 65. Sidenav is implemented
+
+
+

We can now open the sidenav by clicking the menu icon button. But it does not look right. The sidenav is only as wide as its content. Also the page does not stretch the entire viewport due to lack of content. +Let’s add the following styles to make the page fill the viewport:

+
+
+
Listing 104. app.component.scss:
+
+
...
+mat-sidenav-container {
+    position: absolute;
+    top: 64px;
+    left: 0;
+    right: 0;
+    bottom: 0;
+}
+
+
+
+

The sidenav width will be corrected when we add the navigational links to it. That is the only thing remaining to be done. Lets implement it now:

+
+
+
Listing 105. app.component.html:
+
+
...
+  <mat-sidenav [disableClose]="false" mode="over" #sidenav>
+	<mat-nav-list>
+      <a
+        id="home"
+        mat-list-item
+        [routerLink]="['./']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+        [routerLinkActiveOptions]="{exact: true}"
+      >
+        <mat-icon matListAvatar>home</mat-icon>
+        <h3 matLine>Home</h3>
+        <p matLine>sample home page</p>
+      </a>
+      <a
+        id="sampleData"
+        mat-list-item
+        [routerLink]="['./data']"
+        (click)="sidenav.close()"
+        routerLinkActive="active"
+      >
+        <mat-icon matListAvatar>grid_on</mat-icon>
+        <h3 matLine>Data</h3>
+        <p matLine>sample data page</p>
+      </a>
+    </mat-nav-list>
+  </mat-sidenav>
+...
+
+
+
+

We use the mat-nav-list element to set a list of navigational links. We use the a tags with mat-list-item directive. We implement a click listener on each link to close the sidenav when it is clicked. The routerLink directive is used to provide the URLs to navigate to. The routerLinkActive directive is used to provide the class name which will be added to the link when it’s URL is visited. Here we name the class`active`. To style it, let' modify the app.component.scss file:

+
+
+
Listing 106. app.component.scss:
+
+
...
+mat-sidenav-container {
+...
+	a.active {
+        background: #8e8d8d;
+        color: #fff;
+
+        p {
+            color: #4a4a4a;
+        }
+    }
+}
+
+
+
+

Now we have a working application with a basic layout: a header with some menu and a sidenav with some navigational links.

+
+
+
+Finished application +
+
Figure 66. Finished application
+
+
+
+

9.5. Conclusion

+
+

The purpose of this guide was to provide a basic understanding of creating layouts with Angular Material. The Angular Material library has a huge collection of ready to use components which can be found at https://material.angular.io/components/categories +It has provided documentation and example usage for each of its components. Going through the documentation will give a better understanding of using Angular Material components in our devon4ng applications.

+
+
+
+
+
+

10. NgRx

+
+ +
+

10.1. Introduction to NgRx

+
+

NgRx is a state management framework for Angular based on the Redux pattern.

+
+
+
+

10.2. The need for client side state management

+
+

You may wonder why you should bother with state management. Usually data resides in a back-end storage system, e.g. a database, and is retrieved by the client on a per-need basis. To add, update, or delete entities from this store, clients have to invoke API endpoints at the back-end. Mimicking database-like transactions on the client side may seem redundant. However, there are many use cases for which a global client-side state is appropriate:

+
+
+
    +
  • +

    the client has some kind of global state which should survive the destruction of a component, but does not warrant server side persistence, for example: volume level of media, expansion status of menus

    +
  • +
  • +

    sever side data should not be retrieved every time it is needed, either because multiple components consume it, or because it should be cached, e.g. the personal watchlist in an online streaming app

    +
  • +
  • +

    the app provides a rich experience with offline functionality, e.g. a native app built with Ionic

    +
  • +
+
+
+

Saving global states inside the services they originates from results in a data flow that is hard to follow and state becoming inconsistent due to unordered state mutations. Following the single source of truth principle, there should be a central location holding all your application’s state, just like a server side database does. State management libraries for Angular provide tools for storing, retrieving, and updating client-side state.

+
+
+
+

10.3. Why NgRx?

+
+

As stated in the introduction, devon4ng does not stipulate a particular state library, or require using one at all. However, NgRx has proven to be a robust, mature solution for this task, with good tooling and 3rd-party library support. Albeit introducing a level of indirection that requires additional effort even for simple features, the Redux concept enforces a clear separation of concerns leading to a cleaner architecture.

+
+
+

Nonetheless, you should always compare different approaches to state management and pick the best one suiting your use case. Here’s a (non-exhaustive) list of competing state management libraries:

+
+
+
    +
  • +

    Plain RxJS using the simple store described in Abstract Class Store

    +
  • +
  • +

    NgXS reduces some boilerplate of NgRx by leveraging the power of decorators and moving side effects to the store

    +
  • +
  • +

    MobX follows a more imperative approach in contrast to the functional Redux pattern

    +
  • +
  • +

    Akita also uses an imperative approach with direct setters in the store, but keeps the concept of immutable state transitions

    +
  • +
+
+
+
+

10.4. Setup

+
+

To get a quick start, use the provided template for devon4ng + NgRx.

+
+
+

To manually install the core store package together with a set of useful extensions:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools --save`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/store @ngrx/effects @ngrx/entity @ngrx/store-devtools`
+
+
+
+

We recommend to add the NgRx schematics to your project so you can create code artifacts from the command line:

+
+
+

NPM:

+
+
+
+
`npm install @ngrx/schematics --save-dev`
+
+
+
+

Yarn:

+
+
+
+
`yarn add @ngrx/schematics --dev`
+
+
+
+

Afterwards, make NgRx your default schematics provider, so you don’t have to type the qualified package name every time:

+
+
+
+
`ng config cli.defaultCollection @ngrx/schematics`
+
+
+
+

If you have custom settings for Angular schematics, you have to configure them as described here.

+
+
+
+

10.5. Concept

+
+
+NgRx Architecture +
+
Figure 67. NgRx architecture overview
+
+
+

Figure 1 gives an overview of the NgRx data flow. The single source of truth is managed as an immutable state object by the store. Components dispatch actions to trigger state changes. Actions are handed over to reducers, which take the current state and action data to compute the next state. Actions are also consumed by-effects, which perform side-effects such as retrieving data from the back-end, and may dispatch new actions as a result. Components subscribe to state changes using selectors.

+
+
+

Continue with Creating a Simple Store.

+
+ +
+
+

10.6. State, Selection and Reducers

+ +
+
+

10.7. Creating a Simple Store

+
+

In the following pages we use the example of an online streaming service. We will model a particular feature, a watchlist that can be populated by the user with movies she or he wants to see in the future.

+
+
+
+

10.8. Initializing NgRx

+
+

If you’re starting fresh, you first have to initialize NgRx and create a root state. The fastest way to do this is using the schematic:

+
+
+
+
`ng generate @ngrx/schematics:store State --root --module app.module.ts`
+
+
+
+

This will automatically generate a root store and register it in the app module. Next we generate a feature module for the watchlist:

+
+
+

` ng generate module watchlist`

+
+
+

and create a corresponding feature store:

+
+
+

` ng generate store watchlist/Watchlist -m watchlist.module.ts`

+
+
+

This generates a file watchlist/reducers/index.ts with the reducer function, and registers the store in the watchlist module declaration.

+
+
+
+

== =

+
+

If you’re getting an error Schematic "store" not found in collection "@schematics/angular", this means you forgot to register the NgRx schematics as default. +== == =

+
+
+

Next, add the WatchlistModule to the AppModule imports so the feature store is registered when the application starts. We also added the store devtools which we will use later, resulting in the following file:

+
+
+

app.module.ts

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule } from '@angular/core';
+
+import { AppComponent } from './app.component';
+import { EffectsModule } from '@ngrx/effects';
+import { AppEffects } from './app.effects';
+import { StoreModule } from '@ngrx/store';
+import { reducers, metaReducers } from './reducers';
+import { StoreDevtoolsModule } from '@ngrx/store-devtools';
+import { environment } from '../environments/environment';
+import { WatchlistModule } from './watchlist/watchlist.module';
+
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    WatchlistModule,
+    StoreModule.forRoot(reducers, { metaReducers }),
+    // Instrumentation must be imported after importing StoreModule (config is optional)
+    StoreDevtoolsModule.instrument({
+      maxAge: 25, // Retains last 25 states
+      logOnly: environment.production, // Restrict extension to log-only mode
+    }),
+    !environment.production ? StoreDevtoolsModule.instrument() : []
+  ],
+  providers: [],
+  bootstrap: [AppComponent]
+})
+export class AppModule { }
+
+
+
+
+

10.9. Create an entity model and initial state

+
+

We need a simple model for our list of movies. Create a file watchlist/models/movies.ts and insert the following code:

+
+
+
+
export interface Movie {
+    id: number;
+    title: string;
+    releaseYear: number;
+    runtimeMinutes: number;
+    genre: Genre;
+}
+
+export type Genre = 'action' | 'fantasy' | 'sci-fi' | 'romantic' | 'comedy' | 'mystery';
+
+export interface WatchlistItem {
+    id: number;
+    movie: Movie;
+    added: Date;
+    playbackMinutes: number;
+}
+
+
+
+
+

== =

+
+

We discourage putting several types into the same file and do this only for the sake of keeping this tutorial brief. +== == =

+
+
+

Later we will learn how to retrieve data from the back-end using effects. For now we will create an initial state for the user with a default movie.

+
+
+

State is defined and transforms by a reducer function. Let’s create a watchlist reducer:

+
+
+
+
```
+cd watchlist/reducers
+ng g reducer WatchlistData --reducers index.ts
+```
+
+
+
+

Open the generated file watchlist-data.reducer.ts. You see three exports: The State interface defines the shape of the state. There is only one instance of a feature state in the store at all times. The initialState constant is the state at application creation time. The reducer function will later be called by the store to produce the next state instance based on the current state and an action object.

+
+
+

Let’s put a movie into the user’s watchlist:

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export interface State {
+  items: WatchlistItem[];
+}
+
+export const initialState: State = {
+  items: [
+    {
+      id: 42,
+      movie: {
+        id: 1,
+        title: 'Die Hard',
+        genre: 'action',
+        releaseYear: 1988,
+        runtimeMinutes: 132
+      },
+      playbackMinutes: 0,
+      added: new Date(),
+    }
+  ]
+};
+
+
+
+
+

10.10. Select the current watchlist

+
+

State slices can be retrieved from the store using selectors.

+
+
+

Create a watchlist component:

+
+
+
+
`ng g c watchlist/Watchlist`
+
+
+
+

and add it to the exports of WatchlistModule. Also, replace app.component.html with

+
+
+
+
<app-watchlist></app-watchlist>
+
+
+
+

State observables are obtained using selectors. They are memoized by default, meaning that you don’t have to worry about performance if you use complicated calculations when deriving state — these are only performed once per state emission.

+
+
+

Add a selector to watchlist-data.reducer.ts:

+
+
+
+
`export const getAllItems = (state: State) => state.items;`
+
+
+
+

Next, we have to re-export the selector for this sub-state in the feature reducer. Modify the watchlist/reducers/index.ts like this:

+
+
+

watchlist/reducers/index.ts

+
+
+
+
import {
+  ActionReducer,
+  ActionReducerMap,
+  createFeatureSelector,
+  createSelector,
+  MetaReducer
+} from '@ngrx/store';
+import { environment } from 'src/environments/environment';
+import * as fromWatchlistData from './watchlist-data.reducer';
+import * as fromRoot from 'src/app/reducers/index';
+
+export interface WatchlistState { (1)
+  watchlistData: fromWatchlistData.State;
+}
+
+export interface State extends fromRoot.State { (2)
+  watchlist: WatchlistState;
+}
+
+export const reducers: ActionReducerMap<WatchlistState> = { (3)
+  watchlistData: fromWatchlistData.reducer,
+};
+
+export const metaReducers: MetaReducer<WatchlistState>[] = !environment.production ? [] : [];
+
+export const getFeature = createFeatureSelector<State, WatchlistState>('watchlist'); (4)
+
+export const getWatchlistData = createSelector( (5)
+  getFeature,
+  state => state.watchlistData
+);
+
+export const getAllItems = createSelector( (6)
+  getWatchlistData,
+  fromWatchlistData.getAllItems
+);
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1The feature state, each member is managed by a different reducer
2Feature states are registered by the forFeature method. This interface provides a typesafe path from root to feature state.
3Tie sub-states of a feature state to the corresponding reducers
4Create a selector to access the 'watchlist' feature state
5select the watchlistData sub state
6re-export the selector
+
+
+

Note how createSelector allows to chain selectors. This is a powerful tool that also allows for selecting from multiple states.

+
+
+

You can use selectors as pipeable operators:

+
+
+

watchlist.component.ts

+
+
+
+
export class WatchlistComponent {
+  watchlistItems$: Observable<WatchlistItem[]>;
+
+  constructor(
+    private store: Store<fromWatchlist.State>
+  ) {
+    this.watchlistItems$ = this.store.pipe(select(fromWatchlist.getAllItems));
+  }
+}
+
+
+
+

watchlist.component.html

+
+
+
+
<h1>Watchlist</h1>
+<ul>
+    <li *ngFor="let item of watchlistItems$ | async">{{item.movie.title}} ({{item.movie.releaseYear}}): {{item.playbackMinutes}}/{{item.movie.runtimeMinutes}} min watched</li>
+</ul>
+
+
+
+
+

10.11. Dispatching an action to update watched minutes

+
+

We track the user’s current progress at watching a movie as the playbackMinutes property. After closing a video, the watched minutes have to be updated. In NgRx, state is being updated by dispatching actions. An action is an option with a (globally unique) type discriminator and an optional payload.

+
+
+
+

10.12. == Creating the action

+
+

Create a file playback/actions/index.ts. In this example, we do not further separate the actions per sub state. Actions can be defined by using action creators:

+
+
+

playback/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+
+export const playbackFinished = createAction('[Playback] Playback finished', props<{ movieId: number, stoppedAtMinute: number }>());
+
+const actions = union({
+    playbackFinished
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

First we specify the type, followed by a call to the payload definition function. Next, we create a union of all possible actions for this file using union, which allows us a to access action payloads in the reducer in a typesafe way.

+
+
+
+

== =

+
+

Action types should follow the naming convention [Source] Event, e.g. [Recommended List] Hide Recommendation or [Auth API] Login Success. Think of actions rather as events than commands. You should never use the same action at two different places (you can still handle multiple actions the same way). This facilitate tracing the source of an action. For details see Good Action Hygiene with NgRx by Mike Ryan (video). +== == =

+
+
+
+

10.13. == Dispatch

+
+

We skip the implementation of an actual video playback page and simulate watching a movie in 10 minute segments by adding a link in the template:

+
+
+

watchlist-component.html

+
+
+
+
<li *ngFor="let item of watchlistItems$ | async">... <button (click)="stoppedPlayback(item.movie.id, item.playbackMinutes + 10)">Add 10 Minutes</button></li>
+
+
+
+

watchlist-component.ts

+
+
+
+
import * as playbackActions from 'src/app/playback/actions';
+...
+  stoppedPlayback(movieId: number, stoppedAtMinute: number) {
+    this.store.dispatch(playbackActions.playbackFinished({ movieId, stoppedAtMinute }));
+  }
+
+
+
+
+

10.14. == State reduction

+
+

Next, we handle the action inside the watchlistData reducer. Note that actions can be handled by multiple reducers and effects at the same time to update different states, for example if we’d like to show a rating modal after playback has finished.

+
+
+

watchlist-data.reducer.ts

+
+
+
+
export function reducer(state = initialState, action: playbackActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      return {
+        ...state,
+        items: state.items.map(updatePlaybackMinutesMapper(action.movieId, action.stoppedAtMinute))
+      };
+
+    default:
+      return state;
+  }
+}
+
+export function updatePlaybackMinutesMapper(movieId: number, stoppedAtMinute: number) {
+  return (item: WatchlistItem) => {
+    if (item.movie.id == movieId) {
+      return {
+        ...item,
+        playbackMinutes: stoppedAtMinute
+      };
+    } else {
+      return item;
+    }
+  };
+}
+
+
+
+

Note how we changed the reducer’s function signature to reference the actions union. The switch-case handles all incoming actions to produce the next state. The default case handles all actions a reducer is not interested in by returning the state unchanged. Then we find the watchlist item corresponding to the movie with the given id and update the playback minutes. Since state is immutable, we have to clone all objects down to the one we would like to change using the object spread operator (…​).

+
+
+
+

== =

+
+

Selectors rely on object identity to decide whether the value has to be recalculated. Do not clone objects that are not on the path to the change you want to make. This is why updatePlaybackMinutesMapper returns the same item if the movie id does not match. +== == =

+
+
+
+

10.15. == Alternative state mapping with Immer

+
+

It can be hard to think in immutable changes, especially if your team has a strong background in imperative programming. In this case, you may find the Immer library convenient, which allows to produce immutable objects by manipulating a proxied draft. The same reducer can then be written as:

+
+
+

watchlist-data.reducer.ts with Immer

+
+
+
+
import { produce } from 'immer';
+...
+case playbackActions.playbackFinished.type:
+      return produce(state, draft => {
+        const itemToUpdate = draft.items.find(item => item.movie.id == action.movieId);
+        if (itemToUpdate) {
+          itemToUpdate.playbackMinutes = action.stoppedAtMinute;
+        }
+      });
+
+
+
+

Immer works out of the box with plain objects and arrays.

+
+
+
+

10.16. == Redux devtools

+
+

If the StoreDevToolsModule is instrumented as described above, you can use the browser extension Redux devtools to see all dispatched actions and the resulting state diff, as well as the current state, and even travel back in time by undoing actions.

+
+
+
+Redux Devtools +
+
Figure 68. Redux devtools
+
+
+

Continue with learning about effects

+
+ +
+
+

10.17. Side effects with NgRx/Effects

+
+

Reducers are pure functions, meaning they are side-effect free and deterministic. Many actions however have side effects like sending messages or displaying a toast notification. NgRx encapsulates these actions in effects.

+
+
+

Let’s build a recommended movies list so the user can add movies to their watchlist.

+
+
+
+

10.18. Obtaining the recommendation list from the server

+
+

Create a module for recommendations and add stores and states as in the previous chapter. Add EffectsModule.forRoot([]) to the imports in AppModule below StoreModule.forRoot(). Add effects to the feature module:

+
+
+
+
ng generate effect recommendation/Recommendation -m recommendation/recommendation.module.ts
+
+
+
+

We need actions for loading the movie list, success and failure cases:

+
+
+

recommendation/actions/index.ts

+
+
+
+
import { createAction, props, union } from '@ngrx/store';
+import { Movie } from 'src/app/watchlist/models/movies';
+
+export const loadRecommendedMovies = createAction('[Recommendation List] Load movies');
+export const loadRecommendedMoviesSuccess = createAction('[Recommendation API] Load movies success', props<{movies: Movie[]}>());
+export const loadRecommendedMoviesFailure = createAction('[Recommendation API] Load movies failure', props<{error: any}>());
+
+const actions = union({
+    loadRecommendedMovies,
+    loadRecommendedMoviesSuccess,
+    loadRecommendedMoviesFailure
+});
+
+export type ActionsUnion = typeof actions;
+
+
+
+

In the reducer, we use a loading flag so the UI can show a loading spinner. The store is updated with arriving data.

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State {
+  items: Movie[];
+  loading: boolean;
+}
+
+export const initialState: State = {
+  items: [],
+  loading: false
+};
+
+export function reducer(state = initialState, action: recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case '[Recommendation List] Load movies':
+      return {
+        ...state,
+        items: [],
+        loading: true
+      };
+
+    case '[Recommendation API] Load movies failure':
+      return {
+        ...state,
+          loading: false
+      };
+
+    case '[Recommendation API] Load movies success':
+      return {
+        ...state,
+        items: action.movies,
+        loading: false
+      };
+
+    default:
+      return state;
+  }
+}
+
+export const getAll = (state: State) => state.items;
+export const isLoading = (state: State) => state.loading;
+
+
+
+

We need an API service to talk to the server. For demonstration purposes, we simulate an answer delayed by one second:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable({
+  providedIn: 'root'
+})
+export class RecommendationApiService {
+
+  private readonly recommendedMovies: Movie[] = [
+    {
+      id: 2,
+      title: 'The Hunger Games',
+      genre: 'sci-fi',
+      releaseYear: 2012,
+      runtimeMinutes: 144
+    },
+    {
+      id: 4,
+      title: 'Avengers: Endgame',
+      genre: 'fantasy',
+      releaseYear: 2019,
+      runtimeMinutes: 181
+    }
+  ];
+
+  loadRecommendedMovies(): Observable<Movie[]> {
+    return of(this.recommendedMovies).pipe(delay(1000));
+  }
+}
+
+
+
+

Here are the effects:

+
+
+

recommendation/services/recommendation-api.service.ts

+
+
+
+
@Injectable()
+export class RecommendationEffects {
+
+  constructor(
+    private actions$: Actions,
+    private recommendationApi: RecommendationApiService,
+  ) { }
+
+  @Effect()
+  loadBooks$ = this.actions$.pipe(
+    ofType(recommendationActions.loadRecommendedMovies.type),
+    switchMap(() => this.recommendationApi.loadRecommendedMovies().pipe(
+      map(movies => recommendationActions.loadRecommendedMoviesSuccess({ movies })),
+      catchError(error => of(recommendationActions.loadRecommendedMoviesFailure({ error })))
+    ))
+  );
+}
+
+
+
+

Effects are always observables and return actions. In this example, we consume the actions observable provided by NgRx and listen only for the loadRecommendedMovies actions by using the ofType operator. Using switchMap, we map to a new observable, one that loads movies and maps the successful result to a new loadRecommendedMoviesSuccess action or a failure to loadRecommendedMoviesFailure. In a real application we would show a notification in the error case.

+
+
+
+

==

+
+

If an effect should not dispatch another action, return an empty observable. +== ==

+
+ + +
+
+

10.19. Simplifying CRUD with NgRx/Entity

+
+

Most of the time when manipulating entries in the store, we like to create, add, update, or delete entries (CRUD). NgRx/Entity provides convenience functions if each item of a collection has an id property. Luckily all our entities already have this property.

+
+
+

Let’s add functionality to add a movie to the watchlist. First, create the required action:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export const addToWatchlist = createAction('[Recommendation List] Add to watchlist',
+    props<{ watchlistItemId: number, movie: Movie, addedAt: Date }>());
+
+
+
+
+

==

+
+

You may wonder why the Date object is not created inside the reducer instead, since it should always be the current time. However, remember that reducers should be deterministic state machines — State A + Action B should always result in the same State C. This makes reducers easily testable. +== ==

+
+
+

Then, rewrite the watchlistData reducer to make use of NgRx/Entity:

+
+
+

recommendation/actions/index.ts

+
+
+
+
export interface State extends EntityState<WatchlistItem> { (1)
+}
+
+export const entityAdapter = createEntityAdapter<WatchlistItem>(); (2)
+
+export const initialState: State = entityAdapter.getInitialState(); (3)
+
+const entitySelectors = entityAdapter.getSelectors();
+
+export function reducer(state = initialState, action: playbackActions.ActionsUnion | recommendationActions.ActionsUnion): State {
+  switch (action.type) {
+    case playbackActions.playbackFinished.type:
+      const itemToUpdate = entitySelectors
+      .selectAll(state) (4)
+      .find(item => item.movie.id == action.movieId);
+      if (itemToUpdate) {
+        return entityAdapter.updateOne({ (5)
+          id: itemToUpdate.id,
+          changes: { playbackMinutes: action.stoppedAtMinute } (6)
+        }, state);
+      } else {
+        return state;
+      }
+
+    case recommendationActions.addToWatchlist.type:
+      return entityAdapter.addOne({id: action.watchlistItemId, movie: action.movie, added: action.addedAt, playbackMinutes: 0}, state);
+
+    default:
+      return state;
+  }
+}
+
+
+export const getAllItems = entitySelectors.selectAll;
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1NgRx/Entity requires state to extend EntityState. It provides a list of ids and a dictionary of id ⇒ entity entries
2The entity adapter provides data manipulation operations and selectors
3The state can be initialized with getInitialState(), which accepts an optional object to define any additional state beyond EntityState
4selectAll returns an array of all entities
5All adapter operations consume the state object as the last argument and produce a new state
6Update methods accept a partial change definition; you don’t have to clone the object
+
+
+

This concludes the tutorial on NgRx. If you want to learn about advanced topics such as selectors with arguments, testing, or router state, head over to the official NgRx documentation.

+
+
+
+
+
+

11. Cookbook

+
+ +
+

11.1. Abstract Class Store

+
+

The following solution presents a base class for implementing stores which handle state and its transitions. +Working with the base class achieves:

+
+
+
    +
  • +

    common API across all stores

    +
  • +
  • +

    logging (when activated in the constructor)

    +
  • +
  • +

    state transitions are asynchronous by design - sequential order problems are avoided

    +
  • +
+
+
+
Listing 107. Usage Example
+
+
@Injectable()
+export class ModalStore extends Store<ModalState> {
+
+  constructor() {
+    super({ isOpen: false }, !environment.production);
+  }
+
+  closeDialog() {
+    this.dispatchAction('Close Dialog', (currentState) => ({...currentState, isOpen: false}));
+  }
+
+  openDialog() {
+    this.dispatchAction('Open Dialog', (currentState) => ({...currentState, isOpen: true}));
+  }
+
+}
+
+
+
+
Listing 108. Abstract Base Class Store
+
+
import { OnDestroy } from '@angular/core';
+import { BehaviorSubject } from 'rxjs/BehaviorSubject';
+import { Observable } from 'rxjs/Observable';
+import { intersection, difference } from 'lodash';
+import { map, distinctUntilChanged, observeOn } from 'rxjs/operators';
+import { Subject } from 'rxjs/Subject';
+import { queue } from 'rxjs/scheduler/queue';
+import { Subscription } from 'rxjs/Subscription';
+
+interface Action<T> {
+  name: string;
+  actionFn: (state: T) => T;
+}
+
+/** Base class for implementing stores. */
+export abstract class Store<T> implements OnDestroy {
+
+  private actionSubscription: Subscription;
+  private actionSource: Subject<Action<T>>;
+  private stateSource: BehaviorSubject<T>;
+  state$: Observable<T>;
+
+  /**
+   * Initializes a store with initial state and logging.
+   * @param initialState Initial state
+   * @param logChanges When true state transitions are logged to the console.
+   */
+  constructor(initialState: T, public logChanges = false) {
+    this.stateSource = new BehaviorSubject<T>(initialState);
+    this.state$ = this.stateSource.asObservable();
+    this.actionSource = new Subject<Action<T>>();
+
+    this.actionSubscription = this.actionSource.pipe(observeOn(queue)).subscribe(action => {
+      const currentState = this.stateSource.getValue();
+      const nextState = action.actionFn(currentState);
+
+      if (this.logChanges) {
+        this.log(action.name, currentState, nextState);
+      }
+
+      this.stateSource.next(nextState);
+    });
+  }
+
+  /**
+   * Selects a property from the stores state.
+   * Will do distinctUntilChanged() and map() with the given selector.
+   * @param selector Selector function which selects the needed property from the state.
+   * @returns Observable of return type from selector function.
+   */
+  select<TX>(selector: (state: T) => TX): Observable<TX> {
+    return this.state$.pipe(
+      map(selector),
+      distinctUntilChanged()
+    );
+  }
+
+  protected dispatchAction(name: string, action: (state: T) => T) {
+    this.actionSource.next({ name, actionFn: action });
+  }
+
+  private log(actionName: string, before: T, after: T) {
+    const result: { [key: string]: { from: any, to: any} } = {};
+    const sameProbs = intersection(Object.keys(after), Object.keys(before));
+    const newProbs = difference(Object.keys(after), Object.keys(before));
+    for (const prop of newProbs) {
+      result[prop] = { from: undefined, to: (<any>after)[prop] };
+    }
+
+    for (const prop of sameProbs) {
+      if ((<any>before)[prop] !==  (<any>after)[prop]) {
+        result[prop] = { from: (<any>before)[prop], to: (<any>after)[prop] };
+      }
+    }
+
+    console.log(this.constructor.name, actionName, result);
+  }
+
+  ngOnDestroy() {
+    this.actionSubscription.unsubscribe();
+  }
+
+}
+
+
+ +
+
+

11.2. Add Electron to an Angular application using Angular CLI

+
+

This cookbook recipe explains how to integrate Electron in an Angular 10+ application. Electron is a framework for creating native applications with web technologies like JavaScript, HTML, and CSS. As an example, very well known applications as Visual Studio Code, Atom, Slack or Skype (and many more) are using Electron too.

+
+
+ + + + + +
+ + +At the moment of this writing Angular 11.2.0, Electron 11.2.3 and Electron-builder 22.9.1 were the versions available. +
+
+
+

Here are the steps to achieve this goal. Follow them in order.

+
+
+
+

11.3. Add Electron and other relevant dependencies

+
+

There are two different approaches to add the dependencies in the package.json file:

+
+
+
    +
  • +

    Writing the dependencies directly in that file.

    +
  • +
  • +

    Installing using npm install or yarn add.

    +
  • +
+
+
+ + + + + +
+ + +Please remember if the project has a package-lock.json or yarn.lock file use npm or yarn respectively. +
+
+
+

In order to add the dependencies directly in the package.json file, include the following lines in the devDependencies section:

+
+
+
+
"devDependencies": {
+...
+    "electron": "^11.2.3",
+    "electron-builder": "^22.9.1",
+...
+},
+
+
+
+

As indicated above, instead of this npm install can be used:

+
+
+
+
$ npm install -D electron electron-builder
+
+
+
+

Or with yarn:

+
+
+
+
$ yarn add -D electron electron-builder
+
+
+
+
+

11.4. Create the necessary typescript configurations

+
+

In order to initiate electron in an angular app we need to modify the tsconfig.json file and create a tsconfig.serve.json and a tsconfig.base.json in the root folder.

+
+
+
+

11.5. == tsconfig.json

+
+

This file needs to be modified to create references to ./src/tsconfig.app.json and ./src/tsconfig.spec.json to support different configurations.

+
+
+
+
{
+  "files": [],
+  "references": [
+    {
+      "path": "./src/tsconfig.app.json"
+    },
+    {
+      "path": "./src/tsconfig.spec.json"
+    }
+  ]
+}
+
+
+
+
+

11.6. == tsconfig.app.json

+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../app",
+    "module": "es2015",
+    "baseUrl": "",
+    "types": []
+  },
+  "include": [
+    "**/*.ts",
+  ],
+  "exclude": [
+    "**/*.spec.ts"
+  ],
+  "angularCompilerOptions": {
+    "fullTemplateTypeCheck": true,
+    "strictInjectionParameters": true,
+    "preserveWhitespaces": true
+  }
+}
+
+
+
+
+

11.7. == tsconfig.spec.json

+
+
+
{
+  "extends": "../tsconfig.base.json",
+  "compilerOptions": {
+    "outDir": "../spec",
+    "module": "commonjs",
+    "types": [
+      "jasmine",
+      "node"
+    ]
+  },
+  "files": [
+    "test.ts",
+  ],
+  "include": [
+    "**/*.spec.ts",
+    "**/*.d.ts"
+  ],
+  "exclude": [
+    "dist",
+    "release",
+    "node_modules"
+  ]
+}
+
+
+
+
+

11.8. == tsconfig.base.json

+
+

This is shared between tsconfig.app.json and tsconfig.spec.json and it will be extended on each config file.

+
+
+
+
{
+  "compileOnSave": false,
+  "compilerOptions": {
+    "outDir": "./dist",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "es2016",
+      "es2015",
+      "dom"
+    ]
+  },
+  "files": [
+    "electron-main.ts"
+    "src/polyfills.ts"
+  ],
+  "include": [
+    "src/**/*.d.ts"
+  ],
+  "exclude": [
+    "node_modules"
+  ]
+}
+
+
+
+
+

11.9. == tsconfig.serve.json

+
+

In the root, tsconfig.serve.json needs to be created. This typescript config file is going to be used when we serve electron:

+
+
+
+
{
+  "compilerOptions": {
+    "outDir": ".",
+    "sourceMap": true,
+    "declaration": false,
+    "moduleResolution": "node",
+    "emitDecoratorMetadata": true,
+    "experimentalDecorators": true,
+    "target": "es5",
+    "typeRoots": [
+      "node_modules/@types"
+    ],
+    "lib": [
+      "es2017",
+      "dom"
+    ]
+  },
+  "include": [
+    "electron-main.ts"
+  ],
+  "exclude": [
+    "node_modules",
+    "**/*.spec.ts"
+  ]
+}
+
+
+
+
+

11.10. Add Electron build configuration

+
+

In order to configure electron builds properly we need to create a new json on our application, let’s call it electron-builder.json. For more information and fine tuning please refer to the Electron Builder official documentation.

+
+
+

The contents of the file will be something similar to the following:

+
+
+
+
{
+  "productName": "devon4ngElectron",
+  "directories":{
+    "output": "./builder-release"
+  },
+  "win": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "portable"
+    ]
+  },
+  "mac": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "dmg"
+    ]
+  },
+  "linux": {
+    "icon": "dist/assets/icons",
+    "target": [
+      "AppImage"
+    ]
+  }
+}
+
+
+
+

There are two important things in this files:

+
+
+
    +
  1. +

    "output": this is where electron builder is going to build our application

    +
  2. +
  3. +

    "icon": in every OS possible there is an icon parameter, the route to the icon folder that will be created after building with angular needs to be used here. This will make it so the electron builder can find the icons and build.

    +
  4. +
+
+
+
+

11.11. Modify angular.json

+
+

angular.json has to to be modified so the project is build inside /dist without an intermediate folder.

+
+
+
+
{
+  "architect": {
+    "build": {
+      "outputPath": "dist"
+    }
+  }
+}
+
+
+
+
+

11.12. Create the electron window in electron-main.ts

+
+

In order to use electron, a file needs to be created at the root of the application (main.ts). This file will create a window with different settings checking if we are using --serve as an argument:

+
+
+
+
import { app, BrowserWindow } from 'electron';
+import * as path from 'path';
+import * as url from 'url';
+
+let win: any;
+const args: any = process.argv.slice(1);
+const serve: any = args.some((val) => val == '--serve');
+
+const createWindow:any = ()=>{
+  // Create the browser window.
+  win = new BrowserWindow({
+    fullscreen: true,
+    webPreferences: {
+      nodeIntegration: true,
+    }
+  });
+
+  if (serve) {
+    require('electron-reload')(__dirname, {
+      electron: require(`${__dirname}/node_modules/electron`)
+    });
+    win.loadURL('http://localhost:4200');
+  } else {
+    win.loadURL(
+      url.format({
+        pathname: path.join(__dirname, 'dist/index.html'),
+        protocol: 'file:',
+        slashes: true
+      })
+    );
+  }
+
+  if (serve) {
+    win.webContents.openDevTools();
+  }
+
+  // Emitted when the window is closed.
+  win.on('closed', () => {
+    // Dereference the window object, usually you would store window
+    // in an array if your app supports multi windows, this is the time
+    // when you should delete the corresponding element.
+    // tslint:disable-next-line:no-null-keyword
+    win = null;
+  });
+}
+
+try {
+  // This method will be called when Electron has finished
+  // initialization and is ready to create browser windows.
+  // Some APIs can only be used after this event occurs.
+  app.on('ready', createWindow);
+
+   // Quit when all windows are closed.
+  app.on('window-all-closed', () => {
+    // On OS X it is common for applications and their menu bar
+    // to stay active until the user quits explicitly with Cmd + Q
+    if (process.platform !==  'darwin') {
+      app.quit();
+    }
+  });
+
+   app.on('activate', () => {
+    // On OS X it's common to re-create a window in the app when the
+    // dock icon is clicked and there are no other windows open.
+    if (win == null) {
+      createWindow();
+    }
+  });
+} catch (e) {
+  // Catch Error
+  // throw e;
+}
+
+
+
+
+

11.13. Add the electron window and improve the package.json scripts

+
+

Inside package.json the electron window that will be transformed to electron-main.js when building needs to be added.

+
+
+
+
{
+  ....
+  "main": "electron-main.js",
+  "scripts": {...}
+  ....
+}
+
+
+
+

The scripts section in the package.json can be improved to avoid running too verbose commands. As a very complete example we can take a look to the My Thai Star’s scripts section and copy the lines useful in your project. In any case, at least we recommend to add the following lines:

+
+
+
+
  "scripts": {
+    "ng": "ng",
+    "start": "ng serve",
+    "build": "ng build",
+    "test": "ng test",
+    "lint": "ng lint",
+    "e2e": "ng e2e",
+    "electron:tsc": "tsc -p tsconfig.serve.json",
+    "electron:run": "npm run electron:tsc && ng build --base-href ./ && npx electron .",
+    "electron:serve": "npm run electron:tsc && npx electron . --serve",
+    "electron:pack": "npm run electron:tsc && electron-builder --dir --config electron-builder.json",
+    "electron:build": "npm run electron:tsc && electron-builder --config electron-builder.json build"
+  },
+
+
+
+

The electron: scripts do the following:

+
+
+
    +
  • +

    electron:tsc: Compiles electron TS files.

    +
  • +
  • +

    electron:run: Serves Angular app and runs electron.

    +
  • +
  • +

    electron:serve: Serves electron with an already running angular app (i.e. a ng serve command running on another terminal).

    +
  • +
  • +

    electron:pack: Packs electron app.

    +
  • +
  • +

    electron:build: Builds electron app.

    +
  • +
+
+
+
+

11.14. Add Electron to an Angular application using Nx CLI

+
+

Creating an Electron app is very easy and straight-forward if you are using Nx CLI. As a pre-requisite, you should already have an application in your Nx workspace which you want to run as a front-end in your Electron app. (You can follow this guide if you want to get started with Nx).

+
+
+

Follow the steps below to develop an Electron app in your Nx workspace:

+
+
+
+

11.15. Install nx-electron

+
+

Install nx-electron using the command:

+
+
+
+
  npm install -D nx-electron
+
+
+
+

This will add the packages electron and nx-electron as dev dependencies to your Nx workspace. This will help us generate our Electron app in the next step.

+
+
+
+

11.16. Generate your Electron app

+
+

Once you have installed nx-electron, you can generate your electron app using the command:

+
+
+
+
  nx g nx-electron:app <electron-app-name> --frontendProject=<frontend-app-name>
+
+
+
+

And that is it! You have generated your Electron app already. All the configuration files (tsconfig.*) are generated for you under <electron-app-name> in your Nx workspace.

+
+
+
+

11.17. Serving your app

+
+

You can use this command to serve your Electron app:

+
+
+
+
  nx run-many --target=serve --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+

If you see a blank application, it is because the Electron app was served before the front-end was served. To avoid this, you can serve the front-end and back-end separately, (that is, serve the back-end only after the front-end is served).

+
+
+
+

11.18. Building your app

+
+

The command for building your Electron app in Nx is similar to the serve command above, you only change the target from serve to build:

+
+
+
+
  nx run-many --target=build --projects=<frontend-app-name>,<electron-app-name> --parallel
+
+
+
+
+

11.19. Packaging your app

+
+

Make sure you have build your app before you try to package it using the following command:

+
+
+
+
  nx run <electron-app-name>:package [--options]
+
+
+
+

The options that can be passed can be found here.

+
+
+

You can find a working example of an Electron app in devon4ts-samples.

+
+
+

Unresolved include directive in modules/ROOT/pages/devon4ng.wiki/master-devon4ng.adoc - include::guide-angular-mock-service.adoc.adoc[]

+
+ +
+
+

11.20. Testing e2e with Cypress

+
+

This guide will cover the basics of e2e testing using Cypress.

+
+
+

Cypress is a framework “all in one” that provides the necessary libraries to write specific e2e tests, without the need of Selenium.

+
+
+

Why Cypress?

+
+
+
    +
  • +

    Uses JavaScript

    +
  • +
  • +

    It works directly with the browser so the compatibility with the front-end framework the project uses (in this case Angular) is not a problem.

    +
  • +
  • +

    Easy cross browser testing

    +
  • +
+
+
+
+

11.21. Setup

+
+

Install +First of all we need to install it, we can use npm install:

+
+
+
+
$ npm install -D cypress
+
+
+
+

Or we can install it with yarn:

+
+
+
+
$ yarn add -D cypress
+
+
+
+

We need to run Cypress in order to get the folder tree downloaded, then create a tsconfig.json file inside cypress folder to add the typescript configuration.

+
+
+
+
$ . /node_modules/.bin/cypress open
+
+
+
+
Listing 109. tsconfig.json
+
+
{
+  "compilerOptions": {
+    "strict": true,
+    "baseUrl": "../node_modules",
+    "target": "es5",
+    "lib": ["es5", "dom"],
+    "types": ["cypress"]
+  },
+  "include": [
+    "**/*.ts"
+  ]
+}
+
+
+
+

BaseUrl

+
+
+

Let’s setup the base URL so when we run the tests cypress will "navigate" to the right place, go to cypress.json on the root of the project.

+
+
+
Listing 110. cypress.json
+
+
{
+  "baseUrl": "http://localhost:4200"
+}
+
+
+
+
+

11.22. Files / Structure

+
+
+
/cypress
+  tsconfig.json
+  /fixtures
+    - example.json
+  /integration
+    - button.spec.ts
+    - test.spec.ts
+    /examples
+  /plugins
+    - index.js
+  /support
+    - commands.js
+    - index.js
+
+
+
+

tsconfig.json for typescript configuration.

+
+
+

fixtures to store our mock data or files (images, mp3…​) to use on our tests.

+
+
+

integration is where our tests go, by default it comes with an examples folder with tested samples.

+
+
+

plugins is where the configuration files of the plugins go.

+
+
+

support to add custom commands.

+
+
+
+

== =

+
+

If you are using Nx, it automatically generates a e2e cypress project for every project that you generate. So you already get the configuration files like tsconfig.json and cypress.json and also get the folder structure described above. This helps you focus more on writing your tests rather than setting up Cypress.

+
+
+
+

11.23. == =

+ +
+
+

11.24. Tests

+
+

The structure is the same than Mocha.

+
+
+

First, we create a file, for example form.spec.ts, inside we define a context to group all our tests referred to the same subject.

+
+
+
Listing 111. form.spec.ts
+
+
context('Button page', () => {
+  beforeEach(() => {
+    cy.visit('/');
+  });
+  it('should have button',()=>{
+    cy.get('button').should('exist');
+  });
+  it('should contain PRESS',()=>{
+    cy.contains('button', 'PRESS');
+  });
+});
+
+
+
+
beforeEach
+

Visit '/' before every test.

+
+
+
it
+

Inside we write the test.

+
+
+

The result:

+
+
+
+contextImg +
+
+
+

For more info check Cypress documentation

+
+
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+
+
+

11.25. Fixtures

+
+

We use fixtures to mock data, it can be a json, an image, video…​

+
+
+
+
{
+  "name": "Dummy name",
+  "phone": 999 99 99 99,
+  "body": "Mock data"
+}
+
+
+
+

You can store multiple mocks on the same fixture file.

+
+
+
+
{
+  "create":{"name": "e2etestBox"},
+  "boxFruit":{
+    "uuid":"3376339576e33dfb9145362426a33333",
+    "name":"e2etestBox",
+    "visibility":true,
+    "items":[
+      {"name":"apple","units":3},
+      {"name":"kiwi","units":2},
+    ]
+  },
+}
+
+
+
+

To access data we don’t need to import any file, we just call cy.fixture(filename) inside the **.spec.ts. We can name it as we want.

+
+
+
+
cy.fixture('box.json').as('fruitBox')
+
+
+
+

cy.fixture('box.json') we get access to box.json +.as(fruitBox) is used to create an alias (fruitBox) to the fixture.

+
+
+

For more info check Fixtures documentation

+
+
+
+

11.26. Request / Route

+
+

With cypress you can test your application with real data or with mocks.

+
+
+

Not using mocks guarantees that your tests are real e2e test but makes them vulnerable to external issues. +When you mock data you don’t know exactly if the data and the structure received from the backend is correct because you are forcing a mock on the response, but you can avoid external issues, run test faster and have better control on the structure and status.

+
+
+

To get more information go to Testing Strategies

+
+
+
+

11.27. Route

+
+

Cypress can intercept a XHR request and interact with it.

+
+
+
+
cy.server();
+cy.route(
+  'GET',
+  '/apiUrl/list',
+  [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]
+)
+
+
+
+

cy.server(options) start a server to interact with the responses.

+
+
+
cy.route(options) intercepts a XMLHttpRequests
+
    +
  • +

    method GET

    +
  • +
  • +

    URL /apiUrl/list'

    +
  • +
  • +

    response [{"name":"apple", "units":3},{"name":"kiwi", "units":2}]

    +
  • +
+
+
+

Waits

+
+
+

Every cypress action has a default await time to avoid asynchronous issues, but this time can be short for some particular actions like API calls, for those cases we can use cy.wait().

+
+
+
+
cy.server();
+cy.route('/apiUrl/list').as('list');
+cy.visit('/boxList');
+cy.wait('@list');
+
+
+
+

You can find more information about cy.wait() here

+
+
+

To mock data with fixtures:

+
+
+
+
cy.fixture('box')
+  .then(({boxFruit}) => {
+    cy.route(
+      'GET',
+      '/apiUrl/list',
+      boxFruit
+    ).as('boxFruit');
+    cy.get('#button').click();
+    cy.wait('@journalsList');
+    cy.get('#list').contains('apple');
+  })
+
+
+
+

We get boxFruit data from the box fixture and then we mock the API call with it so now the response of the call is boxFruit object. +When the button is clicked, it waits to receive the response of the call and then checks if the list contains one of the elements of the fruitBox.

+
+
+
+

11.28. Request

+
+

Make a HTTP request.

+
+
+
+
cy.server();
+cy.request('http://localhost:4200/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+
+
+
+

If we have 'http://localhost:4200' as baseUrl on cypress.json

+
+
+
+
cy.server();
+cy.request('/')
+  .its('body')
+  .should('include', '<h1>Welcome to Devon4ngAngularElementsTest!</h1>');
+// Goes to http://localhost:4200/
+
+
+
+

We can add other options, like we can send the body of a form.

+
+
+
+
cy.server();
+cy.request({
+  method: 'POST',
+  url: '/send',
+  form: true,
+  body: {
+    name: 'name task',
+    description: 'description of the task'
+  }
+});
+
+
+
+
+

11.29. Custom commands

+
+

If you see yourself writing the same test more than once (login is a common one), you can create a custom command to make things faster.

+
+
+

Cypress.Commands.add('name', ()⇒{}) to create the test.

+
+
+
Listing 112. commands.ts
+
+
Cypress.Commands.add('checkPlaceholder', (name) => {
+  cy.get(`[name='${name}']`).click();
+  cy.get('mat-form-field.mat-focused').should('exist');
+});
+
+
+
+
index.ts
+

To use the commands we need to import the files on support/index.ts

+
+
+
Listing 113. index.ts
+
+
import './commands'
+import './file1'
+import './folder/file2'
+
+
+
+

index.ts is where all our custom commands files unite so Cypress knows where to find them.

+
+
+

And as we are using typescript we need to define a namespace, interface and define our function.

+
+
+
    +
  • +

    index.d.ts

    +
  • +
+
+
+
+
declare namespace Cypress {
+  interface Chainable<Subject> {
+    checkPlaceholder(name:string):Chainable<void>
+  }
+}
+
+
+ +
+
+

11.30. Cross browser testing

+
+

By default the browser used by Cypress is Chrome, it has compatibility with it’s family browsers (including Microsoft Edge) and has beta support for Mozilla Firefox.

+
+
+

To change the browser on the panel we can do it by selecting the desired one on the browsers tab before running the spec file.

+
+
+

Cypress will detect and display, except electron, only the browsers that you have already installed on your machine.

+
+
+
+browserTab +
+
+
+

Once the browser is selected, you can run your tests.

+
+
+

To change the browser on the automatic test run, you can add a flag on the node command

+
+
+
+
cypress run --browser edge
+
+
+
+

Only if we use the cypress run command.

+
+
+

Or we can change the script file.

+
+
+
    +
  • +

    cypress/script.js

    +
  • +
+
+
+
+
const runTests= async ()=>{
+  ...
+  const {totalFailed} = await cypress.run({browser:'edge'});
+  ...
+};
+
+
+ +
+
+

11.31. Viewport

+
+

Cypress allow us to create tests depending on the Viewport, so we can test responsiveness.

+
+
+

There are different ways to use it:

+
+
+

Inside a test case

+
+
+
+
it('should change title when viewport is less than 320px', ()=>{
+  cy.get('.title-l').should('be.visible');
+  cy.get('.title-s').should('not.be.visible');
+  cy.viewport(320, 480);
+  cy.get('.title-l').should('not.be.visible');
+  cy.get('.title-s').should('be.visible');
+})
+
+
+
+

Passing the configuration as an option

+
+
+
+
describe('page display on medium size screen', {
+  viewportHeight: 1000,
+  viewportWidth: 400
+}, () => {
+  ...
+})
+
+
+
+

Or we can set a default

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+{
+ "viewportHeight": 1000
+ "viewportWidth": 400,
+}
+...
+
+
+ +
+
+

11.32. Test retries

+
+

We can get false negatives intermittently due external issues that can affect our tests, because of that we can add, in the configuration, a retries entry so Cypress can run again a certain failed test the selected number of times to verify that the error is real.

+
+
+

We can set retries for run or open mode.

+
+
+
    +
  • +

    cypress.json

    +
  • +
+
+
+
+
...
+"retries": {
+    "runMode": 3,
+    "openMode": 3
+  }
+...
+
+
+
+

The retries can be configured on the cypress.json or directly on a specific test.

+
+
+
+
it('should get button', {
+  retries: {
+    runMode: 2,
+    openMode: 2
+  }
+}, () => {
+  ...
+})
+
+
+
+

This retries those not shown on the test log.

+
+
+

Check more on retries documentation

+
+
+
+

11.33. Reporter

+
+

The tests results appear on the terminal, but to have a more friendly view we can add a reporter.

+
+
+
+reporter +
+
+
+
+

11.34. Mochawesome

+
+

In this case we are going to use Mochawesome, initially its a Mocha reporter but as Cypress uses Mocha it works the same.

+
+
+

Install

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome
+
+
+
+

To run the reporter:

+
+
+
+
cypress run --reporter mochawesome
+
+
+
+

Mochawesome saves by default the generated files on `./mochawesome-report/` but we can add options to change this behavior.

+
+
+

Options can be passed to the reporter in two ways

+
+
+

Using a flag

+
+
+
+
cypress run --reporter mochawesome --reporter-options reportDir=report
+
+
+
+

Or on cypress.json

+
+
+
+
{
+  "baseUrl": "http://localhost:4200",
+  "reporter": "mochawesome",
+  "reporterOptions": {
+    "overwrite": false,
+    "html": false,
+    "json": true,
+    "reportDir": "cypress/report"
+  }
+}
+
+
+
+

Overwrite:false to not overwrite every **:spec.ts test report, we want them to create a merged version later.

+
+
+

reportDir to set a custom directory.

+
+
+

html:false because we don’t need it.

+
+
+

json:true to save them on json.

+
+
+

Mochawesome only creates the html file of the last .spec.ts file that the tests run, that’s why we don’t generate html reports directly, in order to stack them all on the same final html we need to merge the reports.

+
+ +
+

mochawesome-merge

+
+
+

Mochawesome-merge is a library that helps us to merge the different json.

+
+
+

npm

+
+
+
+
npm install --save-dev mochawesome-merge
+npm install --save-dev mochawesome-report-generator
+
+
+
+

yarn

+
+
+
+
yarn add -D mochawesome-merge
+yarn add -D mochawesome-report-generator
+
+
+
+

To merge the files we execute this command:

+
+
+
+
mochawesome-merge cypress/report/*.json > cypress/reportFinal.json
+
+
+
+

reportFinal.json is the result of this merge, whit that we have the data of all the spec files in one json.

+
+
+

We can also automate the test, merge and conversion to html using a script.

+
+
+
+
const cypress = require('cypress');
+const fse = require('fs-extra');
+const { merge } = require('mochawesome-merge');
+const generator = require('mochawesome-report-generator');
+const runTests= async ()=>{
+  await fse.remove('mochawesome-report');
+  await fse.remove('cypress/report');
+  const {totalFailed} = await cypress.run();
+  const reporterOptions = {
+    files: ["cypress/report/*.json"]
+  };
+  await generateReport(reporterOptions);
+  if(totalFailed !==  0){
+    process.exit(2);
+  };
+};
+const generateReport = (options)=> {
+  return merge(options).then((jsonReport)=>{
+    generator.create(jsonReport).then(()=>{
+      process.exit();
+    });
+  });
+};
+runTests();
+
+
+
+

fse.remove() to remove older reports data.

+
+
+

cypress.run() to run the tests.

+
+
+

merge(options) we merge the json output from running the tests.

+
+
+

generator.create(jsonReport) then we generate the html view of the report.

+
+ +
+

On kitchensink +you can find an official cypress demo with all the commands being used.

+
+ +
+
+

11.35. Angular ESLint support

+
+ + + + + +
+ + +ESLint is supported in Angular 10.1.0 onward. +
+
+
+
+

11.36. What about TSLint?

+
+

TSLint is a fantastic tool. It is a linter that was written specifically to work based on the TypeScript AST format. This has advantages and disadvantages, as with most decisions we are faced with in software engineering!

+
+
+

One advantage is there is no tooling required to reconcile differences between ESLint and TypeScript AST formats, but the major disadvantage is that the tool is therefore unable to reuse any of the previous work which has been done in the JavaScript ecosystem around linting, and it has to re-implement everything from scratch. Everything from rules to auto-fixing capabilities and more.

+
+
+

However, the backers behind TSLint announced in 2019 that they would be deprecating TSLint in favor of supporting typescript-eslint in order to benefit the community. You can read more about that here

+
+
+

The TypeScript Team themselves also announced their plans to move the TypeScript codebase from TSLint to typescript-eslint, and they have been big supporters of this project. More details at https://github.com/microsoft/TypeScript/issues/30553

+
+
+

Angular ESLint support comes from the angular-eslint tooling package. Angular documentation also links to this repository as you can check in the ng lint section of the Angular CLI documentation.

+
+
+
+

11.37. Quick start with Angular and ESLint

+
+

In order to create a brand new Angular CLI workspace which uses ESLint instead of TSLint and Codelyzer, simply run the following commands:

+
+
+
+
##Install the Angular CLI and @angular-eslint/schematics globally however you want (e.g. npm, yarn, volta etc)
+
+$ npm i -g @angular/cli @angular-devkit/core @angular-devkit/schematics @angular-eslint/schematics
+
+##Create a new Angular CLI workspace using the @angular-eslint/schematics collection (instead of the default)
+
+$ ng new --collection=@angular-eslint/schematics
+
+
+
+
+

11.38. Migrating an Angular CLI project from Codelyzer and TSLint

+ +
+
+

11.39. 1 - Add relevant dependencies

+
+

The first step is to run the schematic to add @angular-eslint to your project:

+
+
+
+
$ ng add @angular-eslint/schematics
+
+
+
+

This will handle installing the latest version of all the relevant packages for you and adding them to the devDependencies of your package.json.

+
+
+
+

11.40. 2 - Run the convert-tslint-to-eslint schematic on a project

+
+

The next thing to do is consider which "project" you want to migrate to use ESLint. If you have a single application in your workspace you will likely have just a single entry in the projects configuration object within your angular.json file. If you have a projects/` directory in your workspace, you will have multiple entries in your projects configuration and you will need to chose which one you want to migrate using the convert-tslint-to-eslint schematic.

+
+
+

You can run it like so:

+
+
+
+
$ ng g @angular-eslint/schematics:convert-tslint-to-eslint {{YOUR_PROJECT_NAME_GOES_HERE}}
+
+
+
+

From now on, ng lint will use ESLint!

+
+
+
+

11.41. 3 - Remove root TSLint configuration and use only ESLint

+
+

Once you are happy with your ESLint setup, you simply need to remove the root-level tslint.json and potentially uninstall TSLint and any TSLint-related plugins/dependencies if your Angular CLI workspace is now no longer using TSLint at all.

+
+ +
+

==.net

+
+
+
+
+
+

12. Architecture basics

+
+ +
+

12.1. Introduction

+
+

The devonfw platform provides a solution to building applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps you to deliver better results.

+
+
+
+

12.2. Overview Onion Design

+
+

This guide shows the overall proposed architecture in terms of separated layers making use the Onion architecture pattern. Each layers represents a logical group of components and functionality. In this guide you will learn the basics of the proposed architecture based in layers in order to develop software making use of the best practices.

+
+
+
+

12.3. Layer specification

+
+
+
+

It is important to understand the distinction between layers and tiers. Layers describe the logical groupings of the functionality and components in an application; whereas tiers describe the physical distribution of the functionality and components on separate servers, computers, networks, or remote locations. Although both layers and tiers use the same set of names (presentation, business, services, and data), remember that only tiers imply a physical separation. It is quite common to locate more than one layer on the same physical machine (the same tier). You can think of the term tier as referring to physical distribution patterns such as two-tier, three-tier, and n-tier.

+
+
+
+— Layered Application Guidelines
+MSDN Microsoft +
+
+
+

The proposed architecture makes use of cooperating components called layers. To develop specific functionality each layer contains a set of components which is capable to develop such functionalities.

+
+
+

The next figure represents the different layers:

+
+
+
+technical architecture +
+
Figure 69. High level architecture representation
+
+
+

The layers are separated in physical tiers making use of interfaces. This pattern makes possible to be flexible in different kind of projects maximizing performance and deployment strategies (synchronous/asynchronous access, security, component deployment in different environments, microservices…​). Another important point is to provide automated unit testing or test-driven development (TDD) facilities.

+
+
+
+

12.4. == Application layer

+
+

The Application Layer encapsulates the different .Net projects and its resource dependencies and manages the user interaction depending on the project’s nature.

+
+
+
+technical architecture +
+
Figure 70. Net application stack
+
+
+

The provided application template implements an dotnet API application. Also integrates by default the Swagger client. This provides the possibility to share the contract with external applications (angular, mobile apps, external services…​).

+
+
+
+

12.5. == Business layer

+
+

The business layer implements the core functionality of the application and encapsulates the component’s logic. +This layer provides the interface between the data transformation and the application exposition. This allow the data to be optimized and ready for different data consumers.

+
+
+

This layer may implement for each main entity the API controller, the entity related service and other classes to support the application logic.

+
+
+

In order to implement the service logic, the services class must follow the next specification:

+
+
+
+
    public class Service<TContext> : IService where TContext: DbContext
+
+
+
+

PE: devon4Net API template shows how to implement the TODOs service as follows:

+
+
+
+
    public class TodoService: Service<TodoContext>, ITodoService
+
+
+
+

Where Service is the base service class to be inherited and have full access for the Unit of work, TodoContext is the TODOs database context and ITodoService is the interface of the service, which exposes the public extended methods to be implemented.

+
+
+
+

12.6. == Data layer

+
+

The data layer orchestrates the data obtained between the Domain Layer and the Business Layer. Also transforms the data to be used more efficiently between layers.

+
+
+

So, if a service needs the help of another service or repository, the implemented Dependency Injection is the solution to accomplish the task.

+
+
+

The main aim of this layer is to implement the repository for each entity. The repository’s interface is defined in the Domain layer.

+
+
+

In order to implement the repository logic, the repository class must follow the next specification:

+
+
+
+
    Repository<T> : IRepository<T> where T : class
+
+
+
+

PE: devon4Net API template shows how to implement the TODOs repository as follows:

+
+
+
+
    public class TodoRepository : Repository<Todos>, ITodoRepository
+
+
+
+

Where Repository is the the base repository class to be inherited and have full access for the basic CRUD operations, Todos is the entity defined in the database context. ITodoRepository is the interface of the repository, which exposes the public extended methods to be implemented.

+
+
+ + + + + +
+ + +Please remember that <T> is the mapped class which reference the entity from the database context. This abstraction allows to write services implementation with different database contexts +
+
+
+
+

12.7. == Domain layer

+
+

The domain layer provides access to data directly exposed from other systems. The main source is used to be a data base system. The provided template makes use of Entity Framework solution from Microsoft in order to achieve this functionality.

+
+
+

To make a good use of this technology, Repository Pattern has been implemented with the help of Unit Of Work pattern. Also, the use of generic types are makes this solution to be the most flexible.

+
+
+

Regarding to data base source, each entity is mapped as a class. Repository pattern allows to use this mapped classes to access the data base via Entity framework:

+
+
+
+
 public class UnitOfWork<TContext> : IUnitOfWork<TContext> where TContext : DbContext
+
+
+
+ + + + + +
+ + +Where <T> is the mapped class which reference the entity from the database. +
+
+
+

The repository and unit of work patterns are create an abstraction layer between the data access layer and the business logic layer of an application.

+
+
+ + + + + +
+ + +Domain Layer has no dependencies with other layers. It contains the Entities, datasources and the Repository Interfaces. +
+
+
+
+

12.8. devon4Net architecture layer implementation

+
+

The next picture shows how the devon4Net API template implements the architecture described in previous points:

+
+
+
+devon4Net api template architecture implementation +
+
Figure 71. devon4Net architecture implementations
+
+
+
+

12.9. == Cross-Cutting concerns

+
+

Cross-cutting provides the implementation functionality that spans layers. Each functionality is implemented through components able to work stand alone. This approach provides better reusability and maintainability.

+
+
+

A common component set of cross cutting components include different types of functionality regarding to authentication, authorization, security, caching, configuration, logging, and communication.

+
+
+
+

12.10. Communication between Layers: Interfaces

+
+

The main target of the use of interfaces is to loose coupling between layers and minimize dependencies.

+
+
+

Public interfaces allow to hide implementation details of the components within the layers making use of dependency inversion.

+
+
+

In order to make this possible, we make use of Dependency Injection Pattern (implementation of dependency inversion) given by default in .Net Core.

+
+
+

The provided Data Layer contains the abstract classes to inherit from. All new repository and service classes must inherit from them, also the must implement their own interfaces.

+
+
+
+technical architecture +
+
Figure 72. Architecture representation in deep
+
+
+
+

12.11. Templates

+ +
+
+

12.12. State of the art

+
+

The provided bundle contains the devon4Net API template based on .net core. The template allows to create a microservice solution with minimal configuration.

+
+
+

Also, the devon4Net framework can be added to third party templates such as the Amazon API template to use lambdas in serverless environments.

+
+
+

Included features:

+
+
+
    +
  • +

    Logging:

    +
  • +
  • +

    Text File

    +
  • +
  • +

    Sqlite database support

    +
  • +
  • +

    Serilog Seq Server support

    +
  • +
  • +

    Graylog integration ready through TCP/UDP/HTTP protocols

    +
  • +
  • +

    API Call params interception (simple and compose objects)

    +
  • +
  • +

    API error exception management

    +
  • +
  • +

    Swagger:

    +
  • +
  • +

    Swagger autogenerating client from comments and annotations on controller classes

    +
  • +
  • +

    Full swagger client customization (Version, Title, Description, Terms, License, Json end point definition)

    +
  • +
  • +

    Easy configuration with just one configuration node in your settings file

    +
  • +
  • +

    JWT:

    +
  • +
  • +

    Issuer, audience, token expiration customization by external file configuration

    +
  • +
  • +

    Token generation via certificate

    +
  • +
  • +

    MVC inherited classes to access JWT user properties

    +
  • +
  • +

    API method security access based on JWT Claims

    +
  • +
  • +

    CORS:

    +
  • +
  • +

    Simple CORS definition ready

    +
  • +
  • +

    Multiple CORS domain origin definition with specific headers and verbs

    +
  • +
  • +

    Headers:

    +
  • +
  • +

    Automatic header injection with middleware.

    +
  • +
  • +

    Supported header definitions: AccessControlExposeHeader, StrictTransportSecurityHeader, XFrameOptionsHeader, XssProtectionHeader, XContentTypeOptionsHeader, ContentSecurityPolicyHeader, PermittedCrossDomainPoliciesHeader, ReferrerPolicyHeader

    +
  • +
  • +

    Reporting server:

    +
  • +
  • +

    Partial implementation of reporting server based on My-FyiReporting (now runs on linux container)

    +
  • +
  • +

    Testing:

    +
  • +
  • +

    Integration test template with sqlite support

    +
  • +
  • +

    Unit test template

    +
  • +
  • +

    Moq, xunit frameworks integrated

    +
  • +
  • +

    Circuit breaker:

    +
  • +
  • +

    Integrated with HttpClient factory

    +
  • +
  • +

    Client Certificate customization

    +
  • +
  • +

    Number of retries customizables

    +
  • +
  • +

    LiteDB:

    +
  • +
  • +

    Support for LiteDB

    +
  • +
  • +

    Provided basic repository for CRUD operations

    +
  • +
  • +

    RabbitMq:

    +
  • +
  • +

    Use of EasyQNet library to perform CQRS main functions between different microservices

    +
  • +
  • +

    Send commands / Subscribe queues with one C# sentence

    +
  • +
  • +

    Events management: Handled received commands to subscribed messages

    +
  • +
  • +

    Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    MediatR:

    +
  • +
  • +

    Use of MediatR library to perform CQRS main functions in memory

    +
  • +
  • +

    Send commands / Subscribe queues with one C# sentence

    +
  • +
  • +

    Events management: Handled received commands to subscribed messages

    +
  • +
  • +

    Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    SmaxHcm:

    +
  • +
  • +

    Component to manage Microfocus SMAX for cloud infrastructure services management

    +
  • +
  • +

    CyberArk:

    +
  • +
  • +

    Manage safe credentials with CyberArk

    +
  • +
  • +

    AnsibleTower:

    +
  • +
  • +

    Ansible automates the cloud infrastructure. devon4net integrates with Ansible Tower via API consumption endpoints

    +
  • +
  • +

    gRPC+Protobuf:

    +
  • +
  • +

    Added Client + Server basic templates sample gRPC with Google’s Protobuf protocol using devon4net

    +
  • +
  • +

    Kafka:

    +
  • +
  • +

    Added Apache Kafka support for deliver/consume messages and create/delete topics as well

    +
  • +
+
+
+
+

12.13. Software stack

+
+
Technology Stack of devon4Net
+

|== == == == == == == == == == == = +|Topic|Detail|Implementation +|runtime|language & VM|.Net Core Version 3.0 +|persistence|OR-mapper| Entity Framework Core +|service|REST services|https://www.asp.net/web-api[Web API] +|service - integration to external systems - optional|SOAP services|https://msdn.microsoft.com/en-us/library/dd456779(v=vs.110).aspx[WCF] +|logging|framework|https://github.com/serilog/serilog-extensions-logging[Serilog] +|validation|framework| NewtonSoft Json, DataAnnotations +|component management|dependency injection| Unity +|security|Authentication & Authorization| JWT .Net Security - Token based, local Authentication Provider +|unit tests|framework|https://github.com/xunit/xunit[xUnit] +|Circuit breaker|framework, allows retry pattern on http calls|https://github.com/App-vNext/Polly[Polly] +|CQRS|Memory events and queue events| MediatR - EasyNetQ - Kafka +|Kafka| Kafka support for enterprise applications| Confluent.Kafka +|Fluent Validation| Fluent validation for class instances|https://fluentvalidation.net/[Fluent validation] +|== == == == == == == == == == == =

+
+
+
+

12.14. Target platforms

+
+

Thanks to the new .Net Core platform from Microsoft, the developed software can be published Windows, Linux, OS, X and Android platforms.

+
+
+ +
+
+
+

13. User guide

+
+ +
+
+technical architecture +
+
+
+

13.1. devon4net Guide

+ +
+
+

13.2. Introduction

+
+

Welcome to devon4net framework user guide. In this document you will find the information regarding how to start and deploy your project using the guidelines proposed in our solution.

+
+
+

All the guidelines shown and used in this document are a set of rules and conventions proposed and supported by Microsoft and the industry.

+
+
+
+

13.3. The package

+
+

Devon4Net package solution contains:

+
+
+

|== == == == == == == == == == == = +|File / Folder|Content +|Documentation| User documentation in HTML format +|Modules| Contains the source code of the different devon4net modules +|Samples| Different samples implemented in .NET and .NET Core. Also includes My Thai Star Devon flagship restaurant application +|Templates| Main .net Core template to start developing from scratch +|License| License agreement +|README.md| Github main page +|TERMS_OF_USE.adoc| The devon4net terms of use +|LICENSE| The devon license +|Other files| Such the code of conduct and contributing guide +|== == == == == == == == == == == =

+
+
+
+

13.4. Application templates

+
+

The application templates given in the bundle are ready to use.

+
+
+

At the moment .net Core template is supported. The template is ready to be used as a simple console Kestrel application or being deployed in a web server like IIS.

+
+
+
+

13.5. Samples

+ +
+
+

13.6. == My Thai Star

+
+

You can find My Thai Star .NET port application at Github.

+
+
+ + + + + +
+ + +As devon4net has been migrated to the latest version of .net core, the template is not finished yet. +
+
+
+
+

13.7. Cookbook

+ +
+
+

13.8. Data management

+
+

To use Entity Framework Core, install the package for the database provider(s) you want to target. This walk-through uses SQL Server.

+
+
+

For a list of available providers see Database Providers

+
+
+
    +
  • +

    Go to Tools > NuGet Package Manager > Package Manager Console

    +
  • +
  • +

    Run Install-Package Microsoft.EntityFrameworkCore.SqlServer

    +
  • +
+
+
+

We will be using some Entity Framework Tools to create a model from the database. So we will install the tools package as well:

+
+
+
    +
  • +

    Run Install-Package Microsoft.EntityFrameworkCore.Tools

    +
  • +
+
+
+

We will be using some ASP.NET Core Scaffolding tools to create controllers and views later on. So we will install this design package as well:

+
+
+
    +
  • +

    Run Install-Package Microsoft.VisualStudio.Web.CodeGeneration.Design

    +
  • +
+
+
+
+

13.9. == Entity Framework Code first

+
+

In order to design your database model from scratch, we encourage to follow the Microsoft guidelines described here.

+
+
+
+

13.10. == Entity Framework Database first

+
+
    +
  • +

    Go to Tools > NuGet Package Manager > Package Manager Console

    +
  • +
  • +

    Run the following command to create a model from the existing database:

    +
  • +
+
+
+
+
Scaffold-DbContext "Your connection string to existing database" Microsoft.EntityFrameworkCore.SqlServer -OutputDir Models
+
+
+
+

The command will create the database context and the mapped entities as well inside of Models folder.

+
+
+
+

13.11. == Register your context with dependency injection

+
+

Services are registered with dependency injection during application startup.

+
+
+

In order to register your database context (or multiple database context as well) you can add the following line at ConfigureDbService method at startup.cs:

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+

13.12. Repositories and Services

+
+

Services and Repositories are an important part of devon4net proposal. To make them work properly, first of all must be declared and injected at Startup.cs at DI Region.

+
+
+

Services are declared in devon4net.Business.Common and injected in Controller classes when needed. Use services to build your application logic.

+
+
+
+technical architecture +
+
Figure 73. Screenshot of devon4net.Business.Common project in depth
+
+
+

For example, My Thai Star Booking controller constructor looks like this:

+
+
+
+
        public BookingController(IBookingService bookingService, IMapper mapper)
+        {
+            BookingService = bookingService;
+            Mapper = mapper;
+
+        }
+
+
+
+

Currently devon4net has a Unit of Work class in order to perform CRUD operations to database making use of your designed model context.

+
+
+

Repositories are declared at devon4net.Domain.UnitOfWork project and make use of Unit of Work class.

+
+
+

The common methods to perform CRUD operations (where <T> is an entity from your model) are:

+
+
+
    +
  • +

    Sync methods:

    +
  • +
+
+
+
+
IList<T> GetAll(Expression<Func<T, bool>> predicate = null);
+T Get(Expression<Func<T, bool>> predicate = null);
+IList<T> GetAllInclude(IList<string> include, Expression<Func<T, bool>> predicate = null);
+T Create(T entity);
+void Delete(T entity);
+void DeleteById(object id);
+void Delete(Expression<Func<T, bool>> where);
+void Edit(T entity);
+
+
+
+
    +
  • +

    Async methods:

    +
  • +
+
+
+
+
Task<IList<T>> GetAllAsync(Expression<Func<T, bool>> predicate = null);
+Task<T> GetAsync(Expression<Func<T, bool>> predicate = null);
+Task<IList<T>> GetAllIncludeAsync(IList<string> include, Expression<Func<T, bool>> predicate = null);
+
+
+
+

If you perform a Commit operation and an error happens, changes will be rolled back.

+
+
+
+

13.13. Swagger integration

+
+

The given templates allow you to specify the API contract through Swagger integration and the controller classes are the responsible of exposing methods making use of comments in the source code.

+
+
+

The next example shows how to comment the method with summaries in order to define the contract. Add (Triple Slash) XML Documentation To Swagger:

+
+
+
+
/// <summary>
+/// Method to get reservations
+/// </summary>
+/// <response code="201">Ok.</response>
+/// <response code="400">Bad request. Parser data error.</response>
+/// <response code="401">Unauthorized. Authentication fail.</response>
+/// <response code="403">Forbidden. Authorization error.</response>
+/// <response code="500">Internal Server Error. The search process ended with error.</response>
+[HttpPost]
+[Route("/mythaistar/services/rest/bookingmanagement/v1/booking/search")]
+//[Authorize(Policy = "MTSWaiterPolicy")]
+[AllowAnonymous]
+[EnableCors("CorsPolicy")]
+public async Task<IActionResult> BookingSearch([FromBody]BookingSearchDto bookingSearchDto)
+{
+
+
+
+

In order to be effective and make use of the comments to build the API contract, the project which contains the controller classes must generate the XML document file. To achieve this, the XML documentation file must be checked in project settings tab:

+
+
+
+technical architecture +
+
Figure 74. Project settings tab
+
+
+

We propose to generate the file under the XmlDocumentation folder. For example in devon4net.Domain.Entities project in My Thai Star .NET implementation the output folder is:

+
+
+
+
`XmlDocumentation\devon4net.Business.Common.xml`
+
+
+
+

The file devon4net.Business.Common.xml won’t appear until you build the project. Once the file is generated, please modify its properties as a resource and set it to be Copy always .

+
+
+
+technical architecture +
+
Figure 75. Swagger XML document file properties
+
+
+

Once you have this, the swagger user interface will show the method properties defined in your controller comments.

+
+
+

Making use of this technique controller are not encapsulated to the application project. Also, you can develop your controller classes in different projects obtain code reusability.

+
+
+

Swagger comment:

+
+
+

|== == == == == == == == == == == = +|Comment|Functionality +|<summary>| Will map to the operation’s summary +|<remarks>| Will map to the operation’s description (shown as "Implementation Notes" in the UI) +|<response code="###">| Specifies the different response of the target method +|<param>| Will define the parameter(s) of the target method +| +|== == == == == == == == == == == =

+
+
+

Please check Microsoft’s site regarding to summary notations.

+
+
+
+

13.14. Logging module

+
+

An important part of life software is the need of using log and traces. devon4net has a log module pre-configured to achieve this important point.

+
+
+

By default Microsoft provides a logging module on .NET Core applications. This module is open and can it can be extended. devon4net uses the Serilog implementation. This implementation provides a huge quantity information about events and traces.

+
+
+
+

13.15. == Log file

+
+

devon4net can write the log information to a simple text file. You can configure the file name and folder at appsettings.json file (LogFile attribute) at devon4net.Application.WebApi project.

+
+
+
+

13.16. == Database log

+
+

devon4net can write the log information to a SQLite database. You can configure the file name and folder at appsettings.json file (LogDatabase attribute) at devon4net.Application.WebApi project.

+
+
+

With this method you can launch queries in order to search the information you are looking for.

+
+
+
+

13.17. == Seq log

+
+

devon4net can write the log information to a Serilog server. You can configure the Serilog URL at appsettings.json file (SeqLogServerUrl attribute) at devon4net.Application.WebApi project.

+
+
+

With this method you can make queries via HTTP.

+
+
+
+serilog seq +
+
+
+

By default you can find the log information at Logs folder.

+
+
+
+

13.18. JWT module

+
+

JSON Web Tokens are an open, industry standard RFC 7519 method for representing claims securely between two parties allowing you to decode, verify and generate JWT.

+
+
+

You should use JWT for:

+
+
+
    +
  • +

    Authentication : allowing the user to access routes, services, and resources that are permitted with that token.

    +
  • +
  • +

    Information Exchange: JSON Web Tokens are a good way of securely transmitting information between parties. Additionally, as the signature is calculated using the header and the payload, you can also verify that the content.

    +
  • +
+
+
+

The JWT module is configured at Startup.cs inside devon4net.Application.WebApi project from .NET Core template. In this class you can configure the different authentication policy and JWT properties.

+
+
+

Once the user has been authenticated, the client perform the call to the backend with the attribute Bearer plus the token generated at server side.

+
+
+
+jwt +
+
+
+

On My Thai Star sample there are two predefined users: user0 and Waiter. Once they log in the application, the client (Angular/Xamarin) will manage the server call with the json web token. With this method we can manage the server authentication and authorization.

+
+
+

You can find more information about JWT at jwt.io

+
+
+
+

13.19. AOP module

+
+

AOP (Aspect Oriented Programming) tracks all information when a method is call.AOP also tracks the input and output data when a method is call.

+
+
+

By default devon4net has AOP module pre-configured and activated for controllers at Startup.cs file at devon4net.Application.WebApi:

+
+
+
+
options.Filters.Add(new Infrastructure.AOP.AopControllerAttribute(Log.Logger));
+
+options.Filters.Add(new Infrastructure.AOP.AopExceptionFilter(Log.Logger));
+
+
+
+

This configuration allows all Controller classes to be tracked. If you don’t need to track the info comment the lines written before.

+
+
+
+

13.20. Docker support

+
+

devon4net Core projects are ready to be integrated with docker.

+
+
+

My Thai Star application sample is ready to be use with linux docker containers. The Readme file explains how to launch and setup the sample application.

+
+
+
    +
  • +

    angular : Angular client to support backend. Just binaries.

    +
  • +
  • +

    database : Database scripts and .bak file

    +
  • +
  • +

    mailservice: Microservice implementation to send notifications.

    +
  • +
  • +

    netcore: Server side using .net core 2.0.x.

    +
  • +
  • +

    xamarin: Xamarin client based on Excalibur framework from The Netherlands using XForms.

    +
  • +
+
+
+

Docker configuration and docker-compose files are provided.

+
+
+
+

13.21. Testing with XUnit

+
+
+
+

xUnit.net is a free, open source, community-focused unit testing tool for the .NET Framework. Written by the original inventor of NUnit v2, xUnit.net is the latest technology for unit testing C#, F#, VB.NET and other .NET languages. xUnit.net works with ReSharper, CodeRush, TestDriven.NET and Xamarin. It is part of the .NET Foundation, and operates under their code of conduct. It is licensed under Apache 2 (an OSI approved license).

+
+
+
+— About xUnit.net
+https://xunit.github.io/#documentation +
+
+
+

Facts are tests which are always true. They test invariant conditions.

+
+
+

Theories are tests which are only true for a particular set of data.

+
+
+
+

13.22. The first test

+
+
+
using Xunit;
+
+namespace MyFirstUnitTests
+{
+    public class Class1
+    {
+        [Fact]
+        public void PassingTest()
+        {
+            Assert.Equal(4, Add(2, 2));
+        }
+
+        [Fact]
+        public void FailingTest()
+        {
+            Assert.Equal(5, Add(2, 2));
+        }
+
+        int Add(int x, int y)
+        {
+            return x + y;
+        }
+    }
+}
+
+
+
+
+

13.23. The first test with theory

+
+

Theory attribute is used to create tests with input params:

+
+
+
+
[Theory]
+[InlineData(3)]
+[InlineData(5)]
+[InlineData(6)]
+public void MyFirstTheory(int value)
+{
+    Assert.True(IsOdd(value));
+}
+
+bool IsOdd(int value)
+{
+    return value % 2 ==  1;
+}
+
+
+
+
+

13.24. Cheat Sheet

+
+

|== == == == == == == == == == == = +|Operation| Example +|Test|

+
+
+
+
public void Test()
+{
+}
+|Setup|public class TestFixture {
+public TestFixture()
+{
+
+...
+
+    }
+
+}
+|Teardown|public class TestFixture : IDisposable
+
+{
+
+public void Dispose() {
+
+ ...
+ }
+
+}
+
+
+
+

|== == == == == == == == == == == =

+
+
+
+

13.25. Console runner return codes

+
+

|== == == == == == == == == == == = +|Code| Meaning +|0|The tests ran successfully. +|1|One or more of the tests failed. +|2|The help page was shown, either because it was requested, or because the user did not provide any command line arguments. +|3| There was a problem with one of the command line options passed to the runner. +|4|There was a problem loading one or more of the test assemblies (for example, if a 64-bit only assembly is run with the 32-bit test runner). +|== == == == == == == == == == == =

+
+
+
+

13.26. Publishing

+ +
+
+

13.27. == Nginx

+
+

In order to deploy your application to a Nginx server on Linux platform you can follow the instructions from Microsoft here.

+
+
+
+

13.28. == IIS

+
+

In this point is shown the configuration options that must implement the .Net Core application.

+
+
+

Supported operating systems:

+
+
+
    +
  • +

    Windows 7 and newer

    +
  • +
  • +

    Windows Server 2008 R2 and newer*

    +
  • +
+
+
+

WebListener server will not work in a reverse-proxy configuration with IIS. You must use the Kestrel server.

+
+
+

IIS configuration

+
+
+

Enable the Web Server (IIS) role and establish role services.

+
+
+

Windows desktop operating systems

+
+
+

Navigate to Control Panel > Programs > Programs and Features > Turn Windows features on or off (left side of the screen). Open the group for Internet Information Services and Web Management Tools. Check the box for IIS Management Console. Check the box for World Wide Web Services. Accept the default features for World Wide Web Services or customize the IIS features to suit your needs.

+
+
+
+iis 1 +
+
+
+

*Conceptually, the IIS configuration described in this document also applies to hosting ASP.NET Core applications on Nano Server IIS, but refer to ASP.NET Core with IIS on Nano Server for specific instructions.

+
+
+

Windows Server operating systems +For server operating systems, use the Add Roles and Features wizard via the Manage menu or the link in Server Manager. On the Server Roles step, check the box for Web Server (IIS).

+
+
+
+iis 2 +
+
+
+

On the Role services step, select the IIS role services you desire or accept the default role services provided.

+
+
+
+iis 3 +
+
+
+

Proceed through the Confirmation step to install the web server role and services. A server/IIS restart is not required after installing the Web Server (IIS) role.

+
+
+

Install the .NET Core Windows Server Hosting bundle

+
+
+
    +
  1. +

    Install the .NET Core Windows Server Hosting bundle on the hosting system. The bundle will install the .NET Core Runtime, .NET Core Library, and the ASP.NET Core Module. The module creates the reverse-proxy between IIS and the Kestrel server. Note: If the system doesn’t have an Internet connection, obtain and install the Microsoft Visual C++ 2015 Re-distributable before installing the .NET Core Windows Server Hosting bundle.

    +
  2. +
  3. +

    Restart the system or execute net stop was /y followed by net start w3svc from a command prompt to pick up a change to the system PATH.

    +
  4. +
+
+
+ + + + + +
+ + +If you use an IIS Shared Configuration, see ASP.NET Core Module with IIS Shared Configuration. +
+
+
+

To configure IISIntegration service options, include a service configuration for IISOptions in ConfigureServices:

+
+
+
+
services.Configure<IISOptions>(options =>
+{
+    ...
+});
+
+
+
+

|== == == == == == == == == == == = +|Option|Default|Setting +|AutomaticAuthentication| true |If true, the authentication middleware sets the HttpContext.User and responds to generic challenges. If false, the authentication middleware only provides an identity (HttpContext.User) and responds to challenges when explicitly requested by the Authentication Scheme. Windows Authentication must be enabled in IIS for AutomaticAuthentication to function. +|AuthenticationDisplayName | null| Sets the display name shown to users on login pages. +|ForwardClientCertificate |true|If true and the MS-ASPNETCORE-CLIENTCERT request header is present, the HttpContext.Connection.ClientCertificate is populated. +|== == == == == == == == == == == =

+
+
+

web.config

+
+
+

The web.config file configures the ASP.NET Core Module and provides other IIS configuration. Creating, transforming, and publishing web.config is handled by Microsoft.NET.Sdk.Web, which is included when you set your project’s SDK at the top of your .csproj file, <Project Sdk="Microsoft.NET.Sdk.Web">. To prevent the MSBuild target from transforming your web.config file, add the <IsTransformWebConfigDisabled> property to your project file with a setting of true:

+
+
+
+
<PropertyGroup>
+  <IsTransformWebConfigDisabled>true</IsTransformWebConfigDisabled>
+</PropertyGroup>
+
+
+
+
+

13.29. == Azure

+
+

In order to deploy your application to Azure platform you can follow the instructions from Microsoft:

+
+
+

Set up the development environment

+
+
+ +
+
+

Create a web app

+
+
+

In the Visual Studio Start Page, select File > New > Project…​

+
+
+
+File menu +
+
+
+

Complete the New Project dialog:

+
+
+
    +
  • +

    In the left pane, select .NET Core.

    +
  • +
  • +

    In the center pane, select ASP.NET Core Web Application.

    +
  • +
  • +

    Select OK.

    +
  • +
+
+
+
+New Project dialog +
+
+
+

In the New ASP.NET Core Web Application dialog:

+
+
+
    +
  • +

    Select Web Application.

    +
  • +
  • +

    Select Change Authentication.

    +
  • +
+
+
+
+New Project dialog +
+
+
+

The Change Authentication dialog appears.

+
+
+
    +
  • +

    Select Individual User Accounts.

    +
  • +
  • +

    Select OK to return to the New ASP.NET Core Web Application, then select OK again.

    +
  • +
+
+
+
+New ASP.NET Core Web authentication dialog +
+
+
+

Visual Studio creates the solution.

+
+
+

Run the app locally

+
+
+
    +
  • +

    Choose Debug then Start Without Debugging to run the app locally.

    +
  • +
  • +

    Click the About and Contact links to verify the web application works.

    +
  • +
+
+
+
+Web application open in Microsoft Edge on localhost +
+
+
+
    +
  • +

    Select Register and register a new user. You can use a fictitious email address. When you submit, the page displays the following error:

    +
  • +
+
+
+

"Internal Server Error: A database operation failed while processing the request. SQL exception: Cannot open the database. Applying existing migrations for Application DB context may resolve this issue."

+
+
+
    +
  • +

    Select Apply Migrations and, once the page updates, refresh the page.

    +
  • +
+
+
+
+Internal Server Error: A database operation failed while processing the request. SQL exception: Cannot open the database. Applying existing migrations for Application DB context may resolve this issue. +
+
+
+

The app displays the email used to register the new user and a Log out link.

+
+
+
+Web application open in Microsoft Edge. The Register link is replaced by the text Hello email@domain.com! +
+
+
+

Deploy the app to Azure

+
+
+

Close the web page, return to Visual Studio, and select Stop Debugging from the Debug menu.

+
+
+

Right-click on the project in Solution Explorer and select Publish…​.

+
+
+
+Contextual menu open with Publish link highlighted +
+
+
+

In the Publish dialog, select Microsoft Azure App Service and click Publish.

+
+
+
+Publish dialog +
+
+
+
    +
  • +

    Name the app a unique name.

    +
  • +
  • +

    Select a subscription.

    +
  • +
  • +

    Select New…​ for the resource group and enter a name for the new resource group.

    +
  • +
  • +

    Select New…​ for the app service plan and select a location near you. You can keep the name that is generated by default.

    +
  • +
+
+
+
+App Service dialog +
+
+
+
    +
  • +

    Select the Services tab to create a new database.

    +
  • +
  • +

    Select the green + icon to create a new SQL Database

    +
  • +
+
+
+
+New SQL Database +
+
+
+
    +
  • +

    Select New…​ on the Configure SQL Database dialog to create a new database.

    +
  • +
+
+
+
+New SQL Database and server +
+
+
+

The Configure SQL Server dialog appears.

+
+
+
    +
  • +

    Enter an administrator user name and password, and then select OK. Don’t forget the user name and password you create in this step. You can keep the default Server Name.

    +
  • +
  • +

    Enter names for the database and connection string.

    +
  • +
+
+
+
+

13.30. == Note

+
+

"admin" is not allowed as the administrator user name.

+
+
+
+Configure SQL Server dialog +
+
+
+
    +
  • +

    Select OK.

    +
  • +
+
+
+

Visual Studio returns to the Create App Service dialog.

+
+
+
    +
  • +

    Select Create on the Create App Service dialog.

    +
  • +
+
+
+
+Configure SQL Database dialog +
+
+
+
    +
  • +

    Click the Settings link in the Publish dialog.

    +
  • +
+
+
+
+Publish dialog: Connection panel +
+
+
+

On the Settings page of the Publish dialog:

+
+
+
    +
  • +

    Expand Databases and check Use this connection string at runtime.

    +
  • +
  • +

    Expand Entity Framework Migrations and check Apply this migration on publish.

    +
  • +
  • +

    Select Save. Visual Studio returns to the Publish dialog.

    +
  • +
+
+
+
+Publish dialog: Settings panel +
+
+
+

Click Publish. Visual Studio will publish your app to Azure and launch the cloud app in your browser.

+
+
+

Test your app in Azure

+
+
+
    +
  • +

    Test the About and Contact links

    +
  • +
  • +

    Register a new user

    +
  • +
+
+
+
+Web application opened in Microsoft Edge on Azure App Service +
+
+
+

Update the app

+
+
+
    +
  • +

    Edit the Pages/About.cshtml Razor page and change its contents. For example, you can modify the paragraph to say "Hello ASP.NET Core!":

    +
    +
    +
    html<button class="action copy" data-bi-name="copy">Copy</button>
    +
    +
    +
  • +
+
+
+
+
@page
+@model AboutModel
+@{
+    ViewData["Title"] = "About";
+}
+<h2>@ViewData["Title"]</h2>
+<h3>@Model.Message</h3>
+
+    <p>Hello ASP.NET Core!</p>
+
+
+
+
    +
  • +

    Right-click on the project and select Publish…​ again.

    +
  • +
+
+
+
+Contextual menu open with Publish link highlighted +
+
+
+
    +
  • +

    After the app is published, verify the changes you made are available on Azure.

    +
  • +
+
+
+
+Verify task is complete +
+
+
+

Clean up

+
+
+

When you have finished testing the app, go to the Azure portal and delete the app.

+
+
+
    +
  • +

    Select Resource groups, then select the resource group you created.

    +
  • +
+
+
+
+Azure Portal: Resource Groups in sidebar menu +
+
+
+
    +
  • +

    In the Resource groups page, select Delete.

    +
  • +
+
+
+
+Azure Portal: Resource Groups page +
+
+
+
    +
  • +

    Enter the name of the resource group and select Delete. Your app and all other resources created in this tutorial are now deleted from Azure.

    +
  • +
+
+
+ +
+
+
+

14. How To section

+
+ +
+

14.1. Introduction

+
+

The aim of this document is to show how to get devon4net things done in a easy way.

+
+
+
+

14.2. How to

+ +
+
+

14.3. Start a new devonfw project

+
+

The .Net Core 3.1 template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2019.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
+

14.4. == Using devon4Net template

+ +
+
+

14.5. == Option 1

+
+
+
Open your favourite terminal (Win/Linux/iOS)
+Go to future project's path
+Type dotnet new --install Devon4Net.WebAPI.Template
+Type dotnet new Devon4NetAPI
+Go to project's path
+You are ready to start developing with devon4Net
+
+
+
+
+

14.6. == Option 2

+
+
+
Create a new dotnet` API` project from scratch
+Add the NuGet package reference to your project:
+Install-Package Devon4Net.Application.WebAPI.Configuration
+
+
+
+

Set up your project as follows in program.cs file:

+
+
+
+
        public static void Main(string[] args)
+        {
+            // Please use
+            // Devonfw.Configure<Startup>(args);
+            // Or :
+
+            WebHost.CreateDefaultBuilder(args)
+                .UseStartup<Startup>()
+                .InitializeDevonFw()
+                .Build()
+                .Run();
+        }
+
+
+
+

Set up your project as follows in startup.cs file:

+
+
+
+
    private IConfiguration Configuration { get; }
+
+
+   public Startup(IConfiguration configuration)
+    {
+        Configuration = configuration;
+    }
+
+    public void ConfigureServices(IServiceCollection services)
+    {
+
+        services.ConfigureDevonFw(Configuration);
+        SetupDatabase(services);
+
+        ...
+    }
+
+
+    private void SetupDatabase(IServiceCollection services)
+    {
+        // Default is the database connection name in appsettings.json file
+        services.SetupDatabase<TodoContext>(Configuration, "Default", DatabaseType.InMemory);
+    }
+
+    public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
+    {
+        app.ConfigureDevonFw();
+        ...
+    }
+
+
+
+

Add the devonfw configuration options in your appsettings.json file

+
+
+
+

14.7. devon4net configuration files

+
+

To start using devon4net in your .net core application add this configuration in your appsettings.json file:

+
+
+
+
 "devonfw": {
+    "UseDetailedErrorsKey": true,
+    "UseIIS": false,
+    "UseSwagger": true,
+    "Environment": "Development",
+    "KillSwitch": {
+      "killSwitchSettingsFile": "killswitch.appsettings.json"
+    },
+    "Kestrel": {
+      "UseHttps": true,
+      "HttpProtocol": "Http2", //Http1, Http2, Http1AndHttp2, none
+      "ApplicationPort": 8082,
+      "KeepAliveTimeout": 120, //in seconds
+      "MaxConcurrentConnections": 100,
+      "MaxConcurrentUpgradedConnections": 100,
+      "MaxRequestBodySize": 28.6, //In MB. The default maximum request body size is 30,000,000 bytes, which is approximately 28.6 MB
+      "Http2MaxStreamsPerConnection": 100,
+      "Http2InitialConnectionWindowSize": 131072, // From 65,535 and less than 2^31 (2,147,483,648)
+      "Http2InitialStreamWindowSize": 98304, // From 65,535 and less than 2^31 (2,147,483,648)
+      "AllowSynchronousIO": true,
+      "SslProtocol": "Tls12", //Tls, Tls11,Tls12, Tls13, Ssl2, Ssl3, none. For Https2 Tls12 is needed
+      "ServerCertificate": {
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost"
+      },
+      "ClientCertificate": {
+        "DisableClientCertificateCheck": true,
+        "RequireClientCertificate": false,
+        "CheckCertificateRevocation": true,
+        "ClientCertificates": {
+          "Whitelist": [
+            "3A87A49460E8FE0E2A198E63D408DC58435BC501"
+          ],
+          "DisableClientCertificateCheck": false
+        }
+      }
+    },
+    "IIS": {
+      "ForwardClientCertificate": true,
+      "AutomaticAuthentication": true,
+      "AuthenticationDisplayName" : ""
+    }
+  }
+
+
+
+

Also, for start using the devon4net components, you should add the next json options in your appsettings.json or appsettings.Development.json file:

+
+
+
+
{
+  "ExtraSettingsFiles": [
+    "Put a directory path (relative/absolute/linux-like) like /run/secrets/global where there are many settings/secret files to load",
+    "Put a specific file name (with/without path) like /app-configs/app/extra-settings.json"
+  ],
+  "ConnectionStrings": {
+    "Default": "Todos",
+    "Employee": "Employee",
+    "RabbitMqBackup": "Add your database connection string here for messaging backup",
+    "MediatRBackup": "Add your database connection string here for messaging backup"
+  },
+  "Logging": {
+    "LogLevel": {
+      "Default": "Debug",
+      "System": "Information",
+      "Microsoft": "Information"
+    }
+  },
+  "Swagger": {
+    "Version": "v1",
+    "Title": "devon4net API",
+    "Description": "devon4net API Contract",
+    "Terms": "https://www.devonfw.com/terms-of-use/",
+    "Contact": {
+      "Name": "devonfw",
+      "Email": "sample@mail.com",
+      "Url": "https://www.devonfw.com"
+    },
+    "License": {
+      "Name": "devonfw - Terms of Use",
+      "Url": "https://www.devonfw.com/terms-of-use/"
+    },
+    "Endpoint": {
+      "Name": "V1 Docs",
+      "Url": "/swagger/v1/swagger.json",
+      "UrlUi": "swagger",
+      "RouteTemplate": "swagger/v1/{documentName}/swagger.json"
+    }
+  },
+  "JWT": {
+    "Audience": "devon4Net",
+    "Issuer": "devon4Net",
+    "TokenExpirationTime": 60,
+    "ValidateIssuerSigningKey": true,
+    "ValidateLifetime": true,
+    "ClockSkew": 5,
+    "Security": {
+      "SecretKeyLengthAlgorithm": "",
+      "SecretKeyEncryptionAlgorithm": "",
+      "SecretKey": "",
+      "Certificate": "",
+      "CertificatePassword": "",
+      "CertificateEncryptionAlgorithm": ""
+    }
+  },
+  "Cors": []
+  //[
+  //  {
+  //    "CorsPolicy": "CorsPolicy1",
+  //    "Origins": "http://example.com,http://www.contoso.com",
+  //    "Headers": "accept,content-type,origin,x-custom-header",
+  //    "Methods": "GET,POST,HEAD",
+  //    "AllowCredentials": true
+  //  },
+  //  {
+  //    "CorsPolicy": "CorsPolicy2",
+  //    "Origins": "http://example.com,http://www.contoso.com",
+  //    "Headers": "accept,content-type,origin,x-custom-header",
+  //    "Methods": "GET,POST,HEAD",
+  //    "AllowCredentials": true
+  //  }
+  //]
+  ,
+  "CircuitBreaker": {
+    "CheckCertificate": false,
+    "Endpoints": [
+      {
+        "Name": "AnsibleTower",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      },
+      {
+        "Name": "CyberArk",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      },
+      {
+        "Name": "SmaxHcm",
+        "BaseAddress": "PUT THE IP ADDRESS HERE",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": false,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      }
+    ]
+  },
+  "Headers": {
+    "AccessControlExposeHeader": "Authorization",
+    "StrictTransportSecurityHeader": "",
+    "XFrameOptionsHeader": "DENY",
+    "XssProtectionHeader": "1;mode=block",
+    "XContentTypeOptionsHeader": "nosniff",
+    "ContentSecurityPolicyHeader": "",
+    "PermittedCrossDomainPoliciesHeader": "",
+    "ReferrerPolicyHeader": ""
+  },
+  "Log": {
+    "UseAOPTrace": false,
+    "LogLevel": "Debug",
+    "SqliteDatabase": "logs/log.db",
+    "LogFile": "logs/{0}_devonfw.log",
+    "SeqLogServerHost": "http://127.0.0.1:5341",
+    "GrayLog": {
+      "GrayLogHost": "127.0.0.1",
+      "GrayLogPort": "12201",
+      "GrayLogProtocol": "UDP",
+      "UseSecureConnection": true,
+      "UseAsyncLogging": true,
+      "RetryCount": 5,
+      "RetryIntervalMs": 15,
+      "MaxUdpMessageSize": 8192
+    }
+  },
+  "RabbitMq": {
+    "EnableRabbitMq": false,
+    "Hosts": [
+      {
+        "Host": "127.0.0.1",
+        "Port": 5672,
+        "Ssl": false,
+        "SslServerName": "localhost",
+        "SslCertPath": "localhost.pfx",
+        "SslCertPassPhrase": "localhost",
+        "SslPolicyErrors": "RemoteCertificateNotAvailable" //None, RemoteCertificateNotAvailable, RemoteCertificateNameMismatch, RemoteCertificateChainErrors
+      }
+    ],
+
+    "VirtualHost": "/",
+    "UserName": "admin",
+    "Password": "password",
+    "Product": "devon4net",
+    "RequestedHeartbeat": 10, //Set to zero for no heartbeat
+    "PrefetchCount": 50,
+    "PublisherConfirms": false,
+    "PersistentMessages": true,
+    "Platform": "localhost",
+    "Timeout": 10,
+    "Backup": {
+      "UseLocalBackup": false,
+      "DatabaseName": "devon4netMessageBackup.db"
+    }
+  },
+  "MediatR": {
+    "EnableMediatR": false,
+    "Backup": {
+      "UseLocalBackup": false,
+      "DatabaseName": "devon4netMessageBackup.db"
+    }
+  },
+  "LiteDb": {
+    "DatabaseLocation": "devon4net.db"
+  },
+  "AnsibleTower": {
+    "EnableAnsible": false,
+    "Name": "AnsibleTower",
+    "CircuitBreakerName": "AnsibleTower",
+    "ApiUrlBase": "/api/v2/?format=json",
+    "Version": "1.0.5.29",
+    "Username": "",
+    "Password": ""
+  },
+  "CyberArk": {
+    "EnableCyberArk": false,
+    "Username": "",
+    "Password": "",
+    "CircuitBreakerName": "CyberArk"
+  },
+  "SmaxHcm": {
+    "EnableSmax": false,
+    "Username": "",
+    "Password": "",
+    "TenantId": "",
+    "CircuitBreakerName": "SmaxHcm",
+    "ProviderId": ""
+  },
+  "Kafka": {
+    "EnableKafka": true,
+    "Administration": [
+      {
+        "AdminId": "Admin1",
+        "Servers": "127.0.0.1:9092"
+      }
+    ],
+    "Producers": [
+      {
+        "ProducerId": "Producer1", // devon identifier
+        "Servers": "127.0.0.1:9092", // Initial list of brokers as a CSV list of broker host or host:port. The application may also use `rd_kafka_brokers_add()` to add brokers during runtime
+        "ClientId": "client1", //Client identifier
+        "Topic": "devonfw", // topics to deliver the message
+        "MessageMaxBytes": 1000000, //Maximum Kafka protocol request message size. Due to differing framing overhead between protocol versions the producer is unable to reliably enforce a strict max message limit at produce time and may exceed the maximum size by one message in protocol ProduceRequests, the broker will enforce the the topic's `max.message.bytes` limit (see Apache Kafka documentation)
+        "CompressionLevel": -1, // [0-9] for gzip; [0-12] for lz4; only 0 for snappy; -1 = codec-dependent default compression level
+        "CompressionType": "None", // None, Gzip, Snappy, Lz4, Zstd
+        "ReceiveMessageMaxBytes": 100000000,
+        "EnableSslCertificateVerification": false,
+        "CancellationDelayMaxMs": 100, // The maximum length of time (in milliseconds) before a cancellation request is acted on. Low values may result in measurably higher CPU usage
+        "Ack": "None", //Zero=Broker does not send any response/ack to client, One=The leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. All=Broker will block until message is committed by all in sync replicas (ISRs). If there are less than min.insync.replicas (broker configuration) in the ISR set the produce request will fail
+        "Debug": "", //A comma-separated list of debug contexts to enable. Detailed Producer debugging: broker,topic,msg. Consumer: consumer,cgrp,topic,fetch
+        "BrokerAddressTtl": 1000, //How long to cache the broker address resolving results (milliseconds)
+        "BatchNumMessages": 1000000, // Maximum size (in bytes) of all messages batched in one MessageSet, including protocol framing overhead. This limit is applied after the first message has been added to the batch, regardless of the first message's size, this is to ensure that messages that exceed batch.size are produced. The total MessageSet size is also limited by batch.num.messages and message.max.bytes
+        "EnableIdempotence": false, //When set to `true`, the producer will ensure that messages are successfully produced exactly once and in the original produce order. The following configuration properties are adjusted automatically (if not modified by the user) when idempotence is enabled: `max.in.flight.requests.per.connection=5` (must be less than or equal to 5), `retries=INT32_MAX` (must be greater than 0), `acks=all`, `queuing.strategy=fifo`. Producer instantation will fail if user-supplied configuration is incompatible
+        "MaxInFlight": 5,
+        "MessageSendMaxRetries": 5,
+        "BatchSize": 100000000 // Maximum size (in bytes) of all messages batched in one MessageSet, including protocol framing overhead. This limit is applied after the first message has been added to the batch, regardless of the first message's size, this is to ensure that messages that exceed batch.size are produced. The total MessageSet size is also limited by batch.num.messages and message.max.bytes
+      }
+    ],
+    "Consumers": [
+      {
+        "ConsumerId": "Consumer1", // devon identifier
+        "Servers": "127.0.0.1:9092",
+        "GroupId": "group1",
+        "Topics": "devonfw", // Comma separated topics to subscribe
+        "AutoCommit": true, //Automatically and periodically commit offsets in the background. Note: setting this to false does not prevent the consumer from fetching previously committed start offsets. To circumvent this behaviour set specific start offsets per partition in the call to assign()
+        "StatisticsIntervalMs": 0, //librdkafka statistics emit interval. The application also needs to register a stats callback using `rd_kafka_conf_set_stats_cb()`. The granularity is 1000ms. A value of 0 disables statistics
+        "SessionTimeoutMs": 10000, //Client group session and failure detection timeout. The consumer sends periodic heartbeats (heartbeat.interval.ms) to indicate its liveness to the broker. If no hearts are received by the broker for a group member within the session timeout, the broker will remove the consumer from the group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. Also see `max.poll.interval.ms`
+        "AutoOffsetReset": "Largest", //Action to take when there is no initial offset in offset store or the desired offset is out of range: 'smallest','earliest' - automatically reset the offset to the smallest offset, 'largest','latest' - automatically reset the offset to the largest offset, 'error' - trigger an error which is retrieved by consuming messages and checking 'message-&gt;err'
+        "EnablePartitionEof": true, //Verify CRC32 of consumed messages, ensuring no on-the-wire or on-disk corruption to the messages occurred. This check comes at slightly increased CPU usage
+        "IsolationLevel": "ReadCommitted", //Controls how to read messages written transactionally: `ReadCommitted` - only return transactional messages which have been committed. `ReadUncommitted` - return all messages, even transactional messages which have been aborted.
+        "EnableSslCertificateVerification": false,
+        "Debug": "" //A comma-separated list of debug contexts to enable. Detailed Producer debugging: broker,topic,msg. Consumer: consumer,cgrp,topic,fetch
+      }
+    ]
+  }
+}
+
+
+
+
+

14.8. devon4net Cobigen Guide

+ +
+
+

14.9. Overview

+
+

In this guide we will explain how to generate a new WebApi project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+

14.10. Getting things ready

+ +
+
+

14.11. == devonfw Distribution

+
+

The devonfw distributions can be obtained from here. You can find all releases in maven central.

+
+
+

It is not necessary to install nor configure anything. Just extracting the zip content is enough to have a fully functional devonfw. The only thing you have to do is run create-or-update-workspace.bat and then update-all-workspaces.bat to set up all the needed tools.

+
+
+
+

14.12. == devon4net Templates

+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
dotnet new -i Devon4Net.WebAPI.Template
+
+
+
+

and then:

+
+
+
+
dotnet new Devon4NetAPI
+
+
+
+
+

14.13. == OpenAPI File

+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+

14.14. Generating files

+
+

Cobigen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the Cobigen` CLI` tool.

+
+
+
+

14.15. == Generating files through Eclipse

+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+cobigen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+cobigen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+cobigen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+cobigen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+

14.16. == Generating files through Cobigen` CLI`

+
+

In order to generate the files using the Cobigen` CLI` it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    cobigen generate {yourOpenAPIFile}.yml
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+

14.17. Configuration

+ +
+
+

14.18. == Dependency Injection configuration

+
+

At this point it is needed to make some modifications in the code in order to configure correctly the server. To do so it is needed to locate the services and the repositories files that were created in Devon4Net.WebAPI.Implementation

+
+
+

Services location:

+
+
+
+cobigen +
+
+
+

Repositories location:

+
+
+
+cobigen +
+
+
+

Now, we are going to open the following file Devon4Net.WebAPI.Implementation\Configure\DevonConfiguration.cs. +In there we have to add the Dependency Injection for the services and the repositories that Cobigen has generated. The following image is an example of what is needed to add.

+
+
+
+cobigen +
+
+
+

Moreover it is needed to remove the last line in order to be able to run the application:

+
+
+
+
`throw new NotImplementedException(...);`
+
+
+
+
+

14.19. == Configure data base

+
+

Cobigen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+cobigen +
+
+
+
+

14.20. == Configure services

+
+

In order to finish the configuration of the services it is needed to go to each service file of the managements generated.

+
+
+

In there we will see some "NotImplementedExceptions", so it is needed to read carefully each comment inside of each exception in order to be able to use the service. It can be shown an example of the service with its NotImplementedExceptions comments:

+
+
+
+cobigen +
+
+
+
+

14.21. == Run the application

+
+

After doing all the steps defined above, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
dotnet run
+
+
+
+

This will deploy our application in our localhost with the port 8081, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+

14.22. Use HTTP2 protocol

+
+

You can specify the` HTTP` protocol to be used on your devon4net application modifying some node values at devonfw node in your appsettings configuration file.

+
+
+
+

14.23. HttpProtocol

+
+

The supported protocols are:

+
+
+

|== == == == == == == == == == == = +|Protocol|Description +|Http1| Http1 protocol +|Http2| Http2 Protocol +|Http1AndHttp2| Both supported +|== == == == == == == == == == == =

+
+
+
+

14.24. =` SSL`

+
+

To activate the HTTP2, the SslProtocol node must be set to Tls12 value.

+
+
+

The` SSL` protocol supported version values are:

+
+
+
    +
  • +

    Tls

    +
  • +
  • +

    Tls11

    +
  • +
  • +

    Tls12

    +
  • +
  • +

    Tls13

    +
  • +
  • +

    Ssl2

    +
  • +
  • +

    Ssl3

    +
  • +
+
+
+
+

14.25. Create a certificate for development purposes

+
+

In order to create a valid certificate for development purposes the Open` SSL` tools are needed.

+
+
+
+

14.26. Certificate authority (CA)

+
+

Run the next commands in a shell:

+
+
+
+
1. openssl req -x509 -nodes -new -sha256 -days 1024 -newkey rsa:2048 -keyout RootCA.key -out RootCA.pem -subj "/C=ES/ST=Valencia/L=Valencia/O=Certificates/CN=localhost.local"
+
+2. openssl x509 -outform pem -in RootCA.pem -out RootCA.crt
+
+
+
+

If you want to convert your certificate run the command:

+
+
+
+
openssl pkcs12 -export -out localhost.pfx -inkey RootCA.key -in RootCA.crt
+
+
+
+
+

14.27. Domain name certificate

+
+

Run the next commands in a shell:

+
+
+
+
1. openssl req -new -nodes -newkey rsa:2048 -keyout localhost.key -out localhost.csr -subj "/C=ES/ST=Valencia/L=Valencia/O=Certificates/CN=localhost.local"
+
+2. openssl x509 -req -sha256 -days 1024 -in localhost.csr -CA RootCA.pem -CAkey RootCA.key -CAcreateserial -extfile domains.ext -out localhost.crt
+
+
+
+

Where the domains.ext file should contain:

+
+
+
+
authorityKeyIdentifier=keyid,issuer
+basicConstraints=CA:FALSE
+keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment
+subjectAltName = @alt_names
+[alt_names]
+DNS.1 = localhost
+DNS.2 = localhost.local
+DNS.3 = 127.0.0.1
+DNS.4 = fake1.local
+DNS.5 = fake2.local
+
+
+
+

If you want to convert your certificate run the command:

+
+
+
+
openssl pkcs12 -export -out localhost.pfx -inkey localhost.key -in localhost.crt
+
+
+
+
+

14.28. Setup the database driver

+
+

Add the database connection on the SetupDatabase method at Startup.cs

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+

14.29. Change the JWT encryption algorithm

+
+

In the appsettings.json configuration file, you can use the next values on the SecretKeyLengthAlgorithm and SecretKeyEncryptionAlgorithm nodes at JWT configuration:

+
+
+

|== == == == == == == == == == == = +|Algorithm|Description +|Aes128Encryption|"http://www.w3.org/2001/04/xmlenc#aes128-cbc" +|Aes192Encryption|"http://www.w3.org/2001/04/xmlenc#aes192-cbc" +|Aes256Encryption|"http://www.w3.org/2001/04/xmlenc#aes256-cbc" +|DesEncryption|"http://www.w3.org/2001/04/xmlenc#des-cbc" +|Aes128KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes128" +|Aes192KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes192" +|Aes256KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes256" +|RsaV15KeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-1_5" +|Ripemd160Digest|"http://www.w3.org/2001/04/xmlenc#ripemd160" +|RsaOaepKeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-oaep" +|Aes128KW|"A128KW" +|Aes256KW|"A256KW" +|RsaPKCS1|"RSA1_5" +|RsaOAEP|"RSA-OAEP" +|ExclusiveC14n|"http://www.w3.org/2001/10/xml-exc-c14n#" +|ExclusiveC14nWithComments|"http://www.w3.org/2001/10/xml-exc-c14n#WithComments" +|EnvelopedSignature|"http://www.w3.org/2000/09/xmldsig#enveloped-signature" +|Sha256Digest|"http://www.w3.org/2001/04/xmlenc#sha256" +|Sha384Digest|"http://www.w3.org/2001/04/xmldsig-more#sha384" +|Sha512Digest|"http://www.w3.org/2001/04/xmlenc#sha512" +|Sha256|"SHA256" +|Sha384|"SHA384" +|Sha512|"SHA512" +|EcdsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" +|EcdsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384" +|EcdsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512" +|HmacSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha256" +|HmacSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha384" +|HmacSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha512" +|RsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" +|RsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" +|RsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" +|RsaSsaPssSha256Signature|"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1" +|RsaSsaPssSha384Signature|"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1" +|RsaSsaPssSha512Signature|"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1" +|EcdsaSha256|"ES256" +|EcdsaSha384|"ES384" +|EcdsaSha512|"ES512" +|HmacSha256|"HS256" +|HmacSha384|"HS384" +|HmacSha512|"HS512" +|None|"none" +|RsaSha256|"RS256" +|RsaSha384|"RS384" +|RsaSha512|"RS512" +|RsaSsaPssSha256|"PS256" +|RsaSsaPssSha384|"PS384" +|RsaSsaPssSha512|"PS512" +|Aes128CbcHmacSha256|"A128CBC-HS256" +|Aes192CbcHmacSha384|"A192CBC-HS384" +|Aes256CbcHmacSha512|"A256CBC-HS512" +|== == == == == == == == == == == =

+
+
+
+
+
+

15. Cobigen guide

+
+ +
+

15.1. devon4net Cobigen Guide

+ +
+
+

15.2. Overview

+
+

In this guide we will explain how to generate a new WebApi project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+

15.3. Getting things ready

+ +
+
+

15.4. devonfw Distribution

+
+

The devonfw distributions can be obtained from the TeamForge releases library and are packaged in zips files that include all the needed tools, software and configurations.

+
+
+

It is not necessary to install nor configure anything. Just extracting the zip content is enough to have a fully functional devonfw. The only thing you have to do is run create-or-update-workspace.bat and then update-all-workspaces.bat to set up all the needed tools.

+
+
+
+

15.5. devon4net Templates

+
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
dotnet new -i `Devon4Net`.WebAPI.Template
+
+
+
+

and then:

+
+
+
+
dotnet new Devon4NetAPI
+
+
+
+
+

15.6. OpenAPI File

+
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to `CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+

15.7. Generating files

+
+

Cobigen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the Cobigen CLI tool.

+
+
+
+

15.8. Generating files through Eclipse

+
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+cobigen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+cobigen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+cobigen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+cobigen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+

15.9. Generating files through Cobigen CLI

+
+

In order to generate the files using the Cobigen CLI it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    cobigen generate {yourOpenAPIFile}.yml
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+

15.10. Configuration

+ +
+
+

15.11. Dependency Injection configuration

+
+

At this point it is needed to make some modifications in the code in order to configure correctly the server. To do so it is needed to locate the services and the repositories files that were created in Devon4Net.WebAPI.Implementation

+
+
+

Services location:

+
+
+
+cobigen +
+
+
+

Repositories location:

+
+
+
+cobigen +
+
+
+

Now, we are going to open the following file Devon4Net.WebAPI.Implementation\Configure\DevonConfiguration.cs. +In there we have to add the Dependency Injection for the services and the repositories that Cobigen has generated. The following image is an example of what is needed to add.

+
+
+
+cobigen +
+
+
+

Moreover it is needed to remove the last line in order to be able to run the application:

+
+
+
+
`throw new NotImplementedException(...);`
+
+
+
+
+

15.12. Configure data base

+
+

Cobigen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+cobigen +
+
+
+
+

15.13. Configure services

+
+

In order to finish the configuration of the services it is needed to go to each service file of the managements generated.

+
+
+

In there we will see some "NotImplementedExceptions", so it is needed to read carefully each comment inside of each exception in order to be able to use the service. It can be shown an example of the service with its NotImplementedExceptions comments:

+
+
+
+cobigen +
+
+
+
+

15.14. Run the application

+
+

After doing all the steps defined above, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
dotnet run
+
+
+
+

This will deploy our application in our localhost with the port 8081, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+
+
+
+
+

16. Coding conventions

+
+ +
+

==Code conventions

+
+
+

16.1. Introduction

+
+

This document covers .NET Coding Standards and is recommended to be read by team leaders/sw architects and developing teams operating in the Microsoft .NET environment.

+
+
+

“All the code in the system looks as if it was written by a single – very competent – individual” (K. Beck)

+
+
+
+

16.2. Capitalization Conventions

+
+
Terminology
+
+
Camel Case (camelCase)
+
+

Each word or abbreviation in the middle of the phrase begins with a capital letter, with no intervening spaces or punctuation.

+
+
+

The camel case convention, used only for parameter names, capitalizes the first character of each word except the first word, as shown in the following examples. As the example also shows, two-letter acronyms that begin a camel-cased identifier are both lowercase.

+
+
+

use camelCasing for parameter names.

+
+
+
+
Pascal Case (PascalCase)
+
+

The first letter of each concatenated word is capitalized. No other characters are used to separate the words, like hyphens or underscores.

+
+
+

The PascalCasing convention, used for all identifiers except parameter names, capitalizes the first character of each word (including acronyms over two letters in length).

+
+
+

use PascalCasing for all public member, type, and namespace names consisting of multiple words.

+
+
+
+
Underscore Prefix (_underScore)
+
+

For underscore ( _ ), the word after _ use camelCase terminology.

+
+
+
+
+
+

16.3. General Naming Conventions

+
+

choose easily readable identifier names.

+
+
+

favor readability over brevity.

+
+
+
+
◦ e.g.: `GetLength` is a better name than GetInt.
+◦ Aim for the “ubiquitous language” (E. Evans): A language distilled from the domain language, which helps the team clarifying domain concepts and communicating with domain experts.
+
+
+
+

prefer adding a suffix rather than a prefix to indicate a new version of an existing API.

+
+
+

use a numeric suffix to indicate a new version of an existing API, particularly if the existing name of the API is the only name that makes sense (i.e., if it is an industry standard) and if adding any meaningful suffix (or changing the name) is not an appropriate option.

+
+
+

do not use underscores, hyphens, or any other non-alphanumeric characters.

+
+
+

do not use Hungarian notation.

+
+
+

avoid using identifiers that conflict with keywords of widely used programming languages.

+
+
+

do not use abbreviations or contractions as part of identifier names.

+
+
+

do not use any acronyms that are not widely accepted, and even if they are, only when necessary.

+
+
+

do not use the "Ex" (or a similar) suffix for an identifier to distinguish it from an earlier version of the same API.

+
+
+

do not use C# reserved words as names.

+
+
+

do not use Hungarian notation. Hungarian notation is the practice of including a prefix in identifiers to encode some metadata about the parameter, such as the data type of the identifier.

+
+
+
+
◦ `e.g.: iNumberOfClients, sClientName`
+
+
+
+
+

16.4. Names of Assemblies and DLLs

+
+

An assembly is the unit of deployment and identity for managed code programs. Although assemblies can span one or more files, typically an assembly maps one-to-one with a` DLL`. Therefore, this section describes only` DLL` naming conventions, which then can be mapped to assembly naming conventions.

+
+
+

choose names for your assembly DLLs that suggest large chunks of functionality, such as System.Data.

+
+
+

Assembly and DLL names don’t have to correspond to namespace names, but it is reasonable to follow the namespace name when naming assemblies. A good rule of thumb is to name the DLL based on the common prefix of the assemblies contained in the assembly. For example, an assembly with two namespaces, MyCompany.MyTechnology.FirstFeature and MyCompany.MyTechnology.SecondFeature, could be called MyCompany.MyTechnology.dll.

+
+
+

consider naming DLLs according to the following pattern:
+<Company>.<Component>.dll +where <Component> contains one or more dot-separated clauses.

+
+
+

For example: +Litware.Controls.dll.

+
+
+
+

16.5. General coding style

+
+
    +
  • +

    Source files: One Namespace per file and one class per file.

    +
  • +
  • +

    Braces: On new line. Always use braces when optional.

    +
  • +
  • +

    Indention: Use tabs with size of 4.

    +
  • +
  • +

    Comments: Use // for simple comment or /// for summaries. Do not /* … */ and do not flower box.

    +
  • +
  • +

    Use Use built-in C# native data types vs .NET CTS types (string instead of String)

    +
  • +
  • +

    Avoid changing default type in Enums.

    +
  • +
  • +

    Use base or this only in constructors or within an override.

    +
  • +
  • +

    Always check for null before invoking events.

    +
  • +
  • +

    Avoid using Finalize. Use C# Destructors and do not create Finalize() method.

    +
  • +
  • +

    Suggestion: Use blank lines, to make it much more readable by dividing it into small, easy-to-digest sections:

    +
    +
    +
    ◦ Use a single blank line to separate logical groups of code, such as control structures.
    +◦ Use two blank lines to separate method definitions
    +
    +
    +
  • +
+
+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
CaseConvention

Source File

Pascal case. Match class name and file name

Namespace

Pascal case

Class

Pascal case

Interface

Pascal case

Generics

Single capital letter (T or K)

Methods

Pascal case (use a Verb or Verb+Object)

Public field

Pascal case

Private field

Camel case with underscore (_) prefix

Static field

Pascal case

Property

Pascal case. Try to use get and and set convention {get;set;}

Constant

Pascal case

Enum

Pascal case

Variable (inline)

Camel case

Param

Camel case

+
+
+

16.6. Use of Region guideline

+
+

Regions can be used to collapse code inside Visual Studio .NET. Regions are ideal candidates to hide boiler plate style code that adds little value to the reader on your code. Regions can then be expanded to provide progressive disclosure of the underlying details of the class or method.

+
+
+
    +
  • +

    Do Not regionalise entire type definitions that are of an important nature. Types such as enums (which tend to be fairly static in their nature) can be regionalised – their permissible values show up in Intellisense anyway.

    +
  • +
  • +

    Do Not regionalise an entire file. When another developer opens the file, all they will see is a single line in the code editor pane.

    +
  • +
  • +

    Do regionalise boiler plate type code.

    +
  • +
+
+
+
+

16.7. Use of Comment guideline

+
+

Code is the only completely reliable documentation: write “good code” first!

+
+
+
Avoid Unnecessary comments
+
+
    +
  • +

    Choosing good names for fields, methods, parameters, etc. “let the code speak” (K. Beck) by itself reducing the need for comments and documentation

    +
  • +
  • +

    Avoid “repeating the code” and commenting the obvious

    +
  • +
  • +

    Avoid commenting “tricky code”: rewrite it! If there’s no time at present to refactor a tricky section, mark it with a TODO and schedule time to take care of it as soon as possible.

    +
  • +
+
+
+
+
Effective comments
+
+
    +
  • +

    Use comments to summarize a section of code

    +
  • +
  • +

    Use comments to clarify sensitive pieces of code

    +
  • +
  • +

    Use comments to clarify the intent of the code

    +
  • +
  • +

    Bad written or out-of-date comments are more damaging than helpful:

    +
  • +
  • +

    Write clear and effective comments

    +
  • +
  • +

    Pay attention to pre-existing comments when modifying code or copying&pasting code

    +
  • +
+
+
+
+ +
+
+
+

17. Environment

+
+ +
+

17.1. Environment

+ +
+
+

17.2. Overview

+ +
+
+

17.3. Required software

+ + + +
+
+

17.4. Setting up the environment

+
+
    +
  1. +

    Download and install Visual Studio Code

    +
  2. +
  3. +

    Download and install .Net Core SDK

    +
  4. +
  5. +

    Intall the extension Omnisharp in Visual Studio Code

    +
  6. +
+
+
+
+

17.5. == Hello world

+
+
    +
  1. +

    Open a project:

    +
    +
      +
    • +

      Open Visual Studio Code.

      +
    • +
    • +

      Click on the Explorer icon on the left menu and then click Open Folder.

      +
    • +
    • +

      Select the folder you want your C# project to be in and click Select Folder. For our example, we’ll create a folder for our project named 'HelloWorld'.

      +
    • +
    +
    +
  2. +
  3. +

    Initialize a C# project:

    +
    +
      +
    • +

      Open the Integrated Terminal from Visual Studio Code by typing CTRL+(backtick). Alternatively, you can select View > Integrated Terminal from the main menu.

      +
    • +
    • +

      In the terminal window, type dotnet new console.

      +
    • +
    • +

      This creates a Program.cs file in your folder with a simple "Hello World" program already written, along with a C# project file named HelloWorld.csproj.

      +
    • +
    +
    +
  4. +
  5. +

    Resolve the build assets:

    +
    +
      +
    • +

      For .NET Core 2.0, this step is optional. The dotnet restore command executes automatically when a new project is created.

      +
    • +
    +
    +
  6. +
  7. +

    Run the "Hello World" program:

    +
    +
      +
    • +

      Type dotnet run.

      +
    • +
    +
    +
  8. +
+
+
+
+

17.6. Debug

+
+
    +
  1. +

    Open Program.cs by clicking on it. The first time you open a C# file in Visual Studio Code, OmniSharp will load in the editor.

    +
  2. +
  3. +

    Visual Studio Code will prompt you to add the missing assets to build and debug your app. Select Yes.

    +
  4. +
  5. +

    To open the Debug view, click on the Debugging icon on the left side menu.

    +
  6. +
  7. +

    Locate the green arrow at the top of the pane. Make sure the drop-down next to it has .NET Core Launch (console) selected.

    +
  8. +
  9. +

    Add a breakpoint to your project by clicking on the editor margin (the space on the left of the line numbers in the editor).

    +
  10. +
  11. +

    Select F5 or the green arrow to start debugging. The debugger stops execution of your program when it reaches the breakpoint you set in the previous step.

    +
    +
      +
    • +

      While debugging you can view your local variables in the top left pane or use the debug console.

      +
    • +
    +
    +
  12. +
  13. +

    Select the green arrow at the top to continue debugging, or select the red square at the top to stop.

    +
  14. +
+
+
+
+

==

+
+

For more information and troubleshooting tips on .NET Core debugging with OmniSharp in Visual Studio Code, see Instructions for setting up the .NET Core debugger. +== ==

+
+
+ +
+
+
+

18. Packages

+
+ +
+

18.1. Packages

+ +
+
+

18.2. Packages overview

+
+ + + + + +
+ + +devon4Net is composed by a number of packages that increases the functionality and boosts time development. Each package has it’s own configuration to make them work properly. In appsettings.json set up your environment. On appsettings.{environment}.json you can configure each component. +
+
+
+
+

18.3. The packages

+
+

You can get the devon4Net packages on nuget.org.

+
+
+
+

18.4. Devon4Net.Application.WebAPI.Configuration

+ +
+
+

18.5. == Description

+
+

The devon4Net web API configuration core.

+
+
+
+

18.6. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package Devon4Net.Application.WebAPI.Configuration
    +
    +
    +
  • +
+
+
+
+

18.7. == Default configuration values

+
+
+
  "devonfw": {
+    "UseDetailedErrorsKey": true,
+    "UseIIS": false,
+    "UseSwagger": true,
+    "Environment": "Development",
+    "KillSwitch": {
+      "killSwitchSettingsFile": "killswitch.appsettings.json"
+    },
+    "Kestrel": {
+      "UseHttps": true,
+      "HttpProtocol": "Http2", //Http1, Http2, Http1AndHttp2, none
+      "ApplicationPort": 8082,
+      "KeepAliveTimeout": 120, //in seconds
+      "MaxConcurrentConnections": 100,
+      "MaxConcurrentUpgradedConnections": 100,
+      "MaxRequestBodySize": 28.6, //In MB. The default maximum request body size is 30,000,000 bytes, which is approximately 28.6 MB
+      "Http2MaxStreamsPerConnection": 100,
+      "Http2InitialConnectionWindowSize": 131072, // From 65,535 and less than 2^31 (2,147,483,648)
+      "Http2InitialStreamWindowSize": 98304, // From 65,535 and less than 2^31 (2,147,483,648)
+      "AllowSynchronousIO": true,
+      "SslProtocol": "Tls12", //Tls, Tls11,Tls12, Tls13, Ssl2, Ssl3, none. For Https2 Tls12 is needed
+      "ServerCertificate": {
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost"
+      },
+      "ClientCertificate": {
+        "DisableClientCertificateCheck": true,
+        "RequireClientCertificate": false,
+        "CheckCertificateRevocation": true,
+        "ClientCertificates": {
+          "Whitelist": [
+            "3A87A49460E8FE0E2A198E63D408DC58435BC501"
+          ],
+          "DisableClientCertificateCheck": false
+        }
+      }
+    },
+    "IIS": {
+      "ForwardClientCertificate": true,
+      "AutomaticAuthentication": true,
+      "AuthenticationDisplayName" : ""
+    }
+  }
+
+
+
+
+

18.8. Devon4Net.Infrastructure.CircuitBreaker

+ +
+
+

18.9. == Description

+
+

The Devon4Net.Infrastructure.CircuitBreaker component implements the retry pattern for HTTP/HTTPS calls.

+
+
+
+

18.10. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package Devon4Net.Infrastructure.CircuitBreaker
    +
    +
    +
  • +
+
+
+
+

18.11. == Default configuration values

+
+
+
  "CircuitBreaker": {
+    "CheckCertificate": true,
+    "Endpoints": [
+      {
+        "Name": "SampleService",
+        "BaseAddress": "https://localhost:5001/",
+        "Headers": {
+        },
+        "WaitAndRetrySeconds": [
+          0.0001,
+          0.0005,
+          0.001
+        ],
+        "DurationOfBreak": 0.0005,
+        "UseCertificate": true,
+        "Certificate": "localhost.pfx",
+        "CertificatePassword": "localhost",
+        "SslProtocol": "3072" //TLS12
+      }
+    ]
+  }
+
+
+
+

|== == == == == == == == == == == = +|Property|Description +|CheckCertificate| True if HTTPS is required. This is useful when developing an API Gateway needs a secured HTTP, disabling this on development we can use communications with a valid server certificate +|Endpoints| Array with predefined sites to connect with +|Name| The name key to identify the destination URL +|Headers| Not ready yet +|WaitAndRetrySeconds| Array which determines the number of retries and the lapse period between each retry. The value is in milliseconds. +|Certificate| Ceritificate client to use to perform the HTTP call +|SslProtocol| The secure protocol to use on the call +|== == == == == == == == == == == =

+
+
+
+

18.12. == Protocols

+
+

|== == == == == == == == == == == = +|Protocol|Key|Description +|SSl3|48| Specifies the Secure Socket Layer (SSL) 3.0 security protocol. SSL 3.0 has been superseded by the Transport Layer Security (TLS) protocol and is provided for backward compatibility only. +|TLS|192|Specifies the Transport Layer Security (TLS) 1.0 security protocol. The TLS 1.0 protocol is defined in IETF RFC 2246. +|TLS11|768| Specifies the Transport Layer Security (TLS) 1.1 security protocol. The TLS 1.1 protocol is defined in IETF RFC 4346. On Windows systems, this value is supported starting with Windows 7. +|TLS12|3072| Specifies the Transport Layer Security (TLS) 1.2 security protocol. The TLS 1.2 protocol is defined in IETF RFC 5246. On Windows systems, this value is supported starting with Windows 7. +|TLS13|12288| Specifies the TLS 1.3 security protocol. The TLS protocol is defined in IETF RFC 8446.

+
+
+

|== == == == == == == == == == == =

+
+
+
+

18.13. == Usage

+
+

Add via Dependency Injection the circuit breaker instance. PE:

+
+
+
+
    public class FooService : Service<TodosContext>, ILoginService
+    {
+ public FooService(IUnitOfWork<AUTContext> uoW,  ICircuitBreakerHttpClient circuitBreakerClient,
+            ILogger<LoginService> logger) : base(uoW)
+        {
+        ...
+        }
+    }
+
+
+
+

At this point you can use the circuit breaker functionality in your code.

+
+
+

To perform a POST call you should use your circuit breaker instance as follows:

+
+
+
+
await circuitBreakerClient.PostAsync<YourOutputClass>(NameOftheService, EndPoint, InputData, MediaType.ApplicationJson).ConfigureAwait(false);
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Property|Description +|YourOutputClass| The type of the class that you are expecting to retrieve from the POST call +|NameOftheService| The key name of the endpoint provided in the appsettings.json file at Endpoints[] node +|EndPoint|Part of the url to use with the base address. PE: /validate +|InputData| Your instance of the class with values that you want to use in the POST call +|MediaType.ApplicationJson| The media type flag for the POST call +|== == == == == == == == == == == =

+
+
+
+

18.14. devon4Net.Domain.UnitOfWork

+ +
+
+

18.15. == Description

+
+

Unit of work implementation for devon4net solution. This unit of work provides the different methods to access the data layer with an atomic context. Sync and Async repository operations are provided. Customized Eager Loading method also provided for custom entity properties.

+
+
+ + + + + +
+ + +This component will move on next releases to Infrastructure instead of being part of Domain components +
+
+
+
+

18.16. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.UnitOfWork
    +
    +
    +
  • +
  • +

    Adding the database connection information:

    +
  • +
+
+
+

Add the database connection on the SetupDatabase method at Startup.cs

+
+
+
+
       private void SetupDatabase(IServiceCollection services)
+        {
+            services.SetupDatabase<TodoContext>(Configuration, "Default", WebAPI.Configuration.Enums.DatabaseType.InMemory);
+        }
+
+
+
+

Where:

+
+
+

|== == == == == == == == == == == = +|Param|Description +|TodoContext| Is the database context definition +|Default| Is the connection string defined at ConnectionString node at the appsettings configuration file +|WebAPI.Configuration.Enums.DatabaseType.InMemory| Is the database driver selection. In this case InMemory data base is chosen +|== == == == == == == == == == == =

+
+
+

The supported databases are:

+
+
+
    +
  • +

    SqlServer

    +
  • +
  • +

    Sqlite

    +
  • +
  • +

    InMemory

    +
  • +
  • +

    Cosmos

    +
  • +
  • +

    PostgreSQL

    +
  • +
  • +

    MySql

    +
  • +
  • +

    MariaDb

    +
  • +
  • +

    FireBird

    +
  • +
  • +

    Oracle

    +
  • +
  • +

    MSAccess

    +
  • +
+
+
+
+

18.17. == Notes

+
+

Now you can use the unit of work via dependency injection on your classes:

+
+
+
+UOW `DI` Sample +
+
Figure 76. Use of Unit of work via dependency injection
+
+
+

As you can see in the image, you can use Unit Of Work class with your defined ModelContext classes.

+
+
+

Predicate expression builder

+
+
+
    +
  • +

    Use this expression builder to generate lambda expressions dynamically.

    +
    +
    +
    var predicate =  PredicateBuilder.True<T>();
    +
    +
    +
  • +
+
+
+

Where T is a class. At this moment, you can build your expression and apply it to obtain your results in a efficient way and not retrieving data each time you apply an expression.

+
+
+
    +
  • +

    Example from My Thai Star .Net Core implementation:

    +
  • +
+
+
+
+
public async Task<PaginationResult<Dish>> GetpagedDishListFromFilter(int currentpage, int pageSize, bool isFav, decimal maxPrice, int minLikes, string searchBy, IList<long> categoryIdList, long userId)
+{
+    var includeList = new List<string>{"DishCategory","DishCategory.IdCategoryNavigation", "DishIngredient","DishIngredient.IdIngredientNavigation","IdImageNavigation"};
+
+    //Here we create our predicate builder
+    var dishPredicate = PredicateBuilder.True<Dish>();
+
+
+    //Now we start applying the different criteria:
+    if (!string.IsNullOrEmpty(searchBy))
+    {
+        var criteria = searchBy.ToLower();
+        dishPredicate = dishPredicate.And(d => d.Name.ToLower().Contains(criteria) || d.Description.ToLower().Contains(criteria));
+    }
+
+    if (maxPrice > 0) dishPredicate = dishPredicate.And(d=>d.Price<=maxPrice);
+
+    if (categoryIdList.Any())
+    {
+        dishPredicate = dishPredicate.And(r => r.DishCategory.Any(a => categoryIdList.Contains(a.IdCategory)));
+    }
+
+    if (isFav && userId >= 0)
+    {
+        var favourites = await UoW.Repository<UserFavourite>().GetAllAsync(w=>w.IdUser ==  userId);
+        var dishes = favourites.Select(s => s.IdDish);
+        dishPredicate = dishPredicate.And(r=> dishes.Contains(r.Id));
+    }
+
+    // Now we can use the predicate to retrieve data from database with just one call
+    return await UoW.Repository<Dish>().GetAllIncludePagedAsync(currentpage, pageSize, includeList, dishPredicate);
+
+}
+
+
+
+
+

18.18. devon4Net.Infrastructure.Extensions

+ +
+
+

18.19. == Description

+
+

Miscellaneous extension library which contains : +- Predicate expression builder +- DateTime formatter +- HttpClient +- HttpContext (Middleware support)

+
+
+
+

18.20. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Infrastructure.Extensions
    +
    +
    +
  • +
+
+
+

HttpContext

+
+
+
    +
  • +

    TryAddHeader method is used on devon4Net.Infrastructure.Middleware component to add automatically response header options such authorization.

    +
  • +
+
+
+
+

18.21. devon4Net.Infrastructure.JWT

+ +
+
+

18.22. == Description

+
+
+
+

JSON Web Token (JWT) is an open standard (RFC 7519) that defines a compact and self-contained way for securely transmitting information between parties as a JSON object. This information can be verified and trusted because it is digitally signed. JWTs can be signed using a secret (with the` HMAC` algorithm) or a public/private key pair using RSA or ECDSA.

+
+
+
+— What is JSON Web Token?
+https://jwt.io/introduction/ +
+
+
+
    +
  • +

    devon4Net component to manage JWT standard to provide security to .Net API applications.

    +
  • +
+
+
+
+

18.23. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.JWT
    +
    +
    +
  • +
+
+
+
+

18.24. == Default configuration values

+
+
+
"JWT": {
+    "Audience": "devon4Net",
+    "Issuer": "devon4Net",
+    "TokenExpirationTime": 60,
+    "ValidateIssuerSigningKey": true,
+    "ValidateLifetime": true,
+    "ClockSkew": 5,
+    "Security": {
+      "SecretKeyLengthAlgorithm": "",
+      "SecretKeyEncryptionAlgorithm": "",
+      "SecretKey": "",
+      "Certificate": "",
+      "CertificatePassword": "",
+      "CertificateEncryptionAlgorithm": ""
+    }
+  }
+
+
+
+
    +
  • +

    ClockSkew indicates the token expiration time in minutes

    +
  • +
  • +

    Certificate you can specify the name of your certificate (if it is on the same path) or the full path of the certificate. If the certificate does not exists an exception will be raised.

    +
  • +
  • +

    SecretKeyLengthAlgorithm, SecretKeyEncryptionAlgorithm and CertificateEncryptionAlgorithm supported algorithms are:

    +
  • +
+
+
+

|== == == == == == == == == == == = +|Algorithm|Description +|Aes128Encryption|"http://www.w3.org/2001/04/xmlenc#aes128-cbc" +|Aes192Encryption|"http://www.w3.org/2001/04/xmlenc#aes192-cbc" +|Aes256Encryption|"http://www.w3.org/2001/04/xmlenc#aes256-cbc" +|DesEncryption|"http://www.w3.org/2001/04/xmlenc#des-cbc" +|Aes128KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes128" +|Aes192KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes192" +|Aes256KeyWrap|"http://www.w3.org/2001/04/xmlenc#kw-aes256" +|RsaV15KeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-1_5" +|Ripemd160Digest|"http://www.w3.org/2001/04/xmlenc#ripemd160" +|RsaOaepKeyWrap|"http://www.w3.org/2001/04/xmlenc#rsa-oaep" +|Aes128KW|"A128KW" +|Aes256KW|"A256KW" +|RsaPKCS1|"RSA1_5" +|RsaOAEP|"RSA-OAEP" +|ExclusiveC14n|"http://www.w3.org/2001/10/xml-exc-c14n#" +|ExclusiveC14nWithComments|"http://www.w3.org/2001/10/xml-exc-c14n#WithComments" +|EnvelopedSignature|"http://www.w3.org/2000/09/xmldsig#enveloped-signature" +|Sha256Digest|"http://www.w3.org/2001/04/xmlenc#sha256" +|Sha384Digest|"http://www.w3.org/2001/04/xmldsig-more#sha384" +|Sha512Digest|"http://www.w3.org/2001/04/xmlenc#sha512" +|Sha256|"SHA256" +|Sha384|"SHA384" +|Sha512|"SHA512" +|EcdsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" +|EcdsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384" +|EcdsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512" +|HmacSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha256" +|HmacSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha384" +|HmacSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#hmac-sha512" +|RsaSha256Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" +|RsaSha384Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" +|RsaSha512Signature|"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" +|RsaSsaPssSha256Signature|"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1" +|RsaSsaPssSha384Signature|"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1" +|RsaSsaPssSha512Signature|"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1" +|EcdsaSha256|"ES256" +|EcdsaSha384|"ES384" +|EcdsaSha512|"ES512" +|HmacSha256|"HS256" +|HmacSha384|"HS384" +|HmacSha512|"HS512" +|None|"none" +|RsaSha256|"RS256" +|RsaSha384|"RS384" +|RsaSha512|"RS512" +|RsaSsaPssSha256|"PS256" +|RsaSsaPssSha384|"PS384" +|RsaSsaPssSha512|"PS512" +|Aes128CbcHmacSha256|"A128CBC-HS256" +|Aes192CbcHmacSha384|"A192CBC-HS384" +|Aes256CbcHmacSha512|"A256CBC-HS512" +|== == == == == == == == == == == =

+
+
+ + + + + +
+ + +Please check Microsoft documentation to get the lastest updates on supported encryption algorithms +
+
+
+
    +
  • +

    Add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
services.AddBusinessCommonJwtPolicy();
+
+
+
+

On

+
+
+
+
Startup.cs
+
+
+
+

or on:

+
+
+
+
devon4Net.Application.Configuration.Startup/JwtApplicationConfiguration/ConfigureJwtPolicy method.
+
+
+
+
    +
  • +

    Inside the AddBusinessCommonJwtPolicy method you can add your JWT Policy like in My Thai Star application sample:

    +
  • +
+
+
+
+
 services.ConfigureJwtAddPolicy("MTSWaiterPolicy", "role", "waiter");
+
+
+
+
+

18.25. == Notes

+
+
    +
  • +

    The certificate will be used to generate the key to encrypt the json web token.

    +
  • +
+
+
+
+

18.26. devon4Net.Infrastructure.Middleware

+ +
+
+

18.27. == Description

+
+
    +
  • +

    devon4Net support for middleware classes.

    +
  • +
  • +

    In ASP.NET Core, middleware classes can handle an HTTP request or response. Middleware can either:

    +
    +
      +
    • +

      Handle an incoming HTTP request by generating an HTTP response.

      +
    • +
    • +

      Process an incoming HTTP request, modify it, and pass it on to another piece of middleware.

      +
    • +
    • +

      Process an outgoing HTTP response, modify it, and pass it on to either another piece of middleware, or the ASP.NET Core web server.

      +
    • +
    +
    +
  • +
  • +

    devon4Net supports the following automatic response headers:

    +
    +
      +
    • +

      AccessControlExposeHeader

      +
    • +
    • +

      StrictTransportSecurityHeader

      +
    • +
    • +

      XFrameOptionsHeader

      +
    • +
    • +

      XssProtectionHeader

      +
    • +
    • +

      XContentTypeOptionsHeader

      +
    • +
    • +

      ContentSecurityPolicyHeader

      +
    • +
    • +

      PermittedCrossDomainPoliciesHeader

      +
    • +
    • +

      ReferrerPolicyHeader:toc: macro

      +
    • +
    +
    +
  • +
+
+
+
+

18.28. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Infrastructure.Middleware
    +
    +
    +
  • +
  • +

    You can configure your Middleware configuration on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"Middleware": {
+    "Headers": {
+      "AccessControlExposeHeader": "Authorization",
+      "StrictTransportSecurityHeader": "",
+      "XFrameOptionsHeader": "DENY",
+      "XssProtectionHeader": "1;mode=block",
+      "XContentTypeOptionsHeader": "nosniff",
+      "ContentSecurityPolicyHeader": "",
+      "PermittedCrossDomainPoliciesHeader": "",
+      "ReferrerPolicyHeader": ""
+    }
+}
+
+
+
+
    +
  • +

    On the above sample, the server application will add to response header the AccessControlExposeHeader, XFrameOptionsHeader, XssProtectionHeader and XContentTypeOptionsHeader headers.

    +
  • +
  • +

    If the header response type does not have a value, it will not be added to the response headers.

    +
  • +
+
+
+
+

18.29. devon4Net.Infrastructure.Swagger

+ +
+
+

18.30. == Description

+
+
    +
  • +

    devon4net Swagger abstraction to provide full externalized easy configuration.

    +
  • +
  • +

    Swagger offers the easiest to use tools to take full advantage of all the capabilities of the OpenAPI Specification (OAS).

    +
  • +
+
+
+
+

18.31. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Swagger
    +
    +
    +
  • +
  • +

    You can configure your Swagger configuration on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"Swagger": {
+    "Version": "v1",
+    "Title": "devon4net API",
+    "Description": "devon4net API Contract",
+    "Terms": "https://www.devonfw.com/terms-of-use/",
+    "Contact": {
+      "Name": "devonfw",
+      "Email": "sample@mail.com",
+      "Url": "https://www.devonfw.com"
+    },
+    "License": {
+      "Name": "devonfw - Terms of Use",
+      "Url": "https://www.devonfw.com/terms-of-use/"
+    },
+    "Endpoint": {
+      "Name": "V1 Docs",
+      "Url": "/swagger/v1/swagger.json",
+      "UrlUi": "swagger",
+      "RouteTemplate": "swagger/v1/{documentName}/swagger.json"
+    }
+  }
+
+
+
+
    +
  • +

    Add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
services.ConfigureSwaggerService();
+
+
+
+

On

+
+
+
+
Startup.cs
+
+
+
+
    +
  • +

    Also add this line of code (only if you use this component stand alone):

    +
  • +
+
+
+
+
app.ConfigureSwaggerApplication();
+
+
+
+

On

+
+
+
+
Startup.cs/Configure(IApplicationBuilder app, IHostingEnvironment env)
+
+
+
+
    +
  • +

    Ensure your API actions and non-route parameters are decorated with explicit "Http" and "From" bindings.

    +
  • +
+
+
+
+

18.32. == Notes

+
+
    +
  • +

    To access to swagger UI launch your API project and type in your html browser the url http://localhost:yourPort/swagger.

    +
  • +
  • +

    In order to generate the documentation annotate your actions with summary, remarks and response tags:

    +
  • +
+
+
+
+
/// <summary>
+/// Method to make a reservation with potential guests. The method returns the reservation token with the format: {(CB_|GB_)}{now.Year}{now.Month:00}{now.Day:00}{_}{MD5({Host/Guest-email}{now.Year}{now.Month:00}{now.Day:00}{now.Hour:00}{now.Minute:00}{now.Second:00})}
+/// </summary>
+/// <param name="bookingDto"></param>
+/// <response code="201">Ok.</response>
+/// <response code="400">Bad request. Parser data error.</response>
+/// <response code="401">Unauthorized. Authentication fail.</response>
+/// <response code="403">Forbidden. Authorization error.</response>
+/// <response code="500">Internal Server Error. The search process ended with error.</response>
+[HttpPost]
+[HttpOptions]
+[Route("/mythaistar/services/rest/bookingmanagement/v1/booking")]
+[AllowAnonymous]
+[EnableCors("CorsPolicy")]
+public async Task<IActionResult> Booking([FromBody]BookingDto bookingDto)
+{
+    try
+    {
+
+    ...
+
+
+
+
    +
  • +

    Ensure that your project has the generateXMLdocumentationfile check active on build menu:

    +
  • +
+
+
+
+Generate documentation XML check +
+
Figure 77. Swagger documentation
+
+
+
    +
  • +

    Ensure that your XML files has the attribute copy always to true:

    +
  • +
+
+
+
+Generate documentation XML check +
+
Figure 78. Swagger documentation
+
+
+
+

18.33. devon4Net.Infrastructure.Test

+ +
+
+

18.34. == Description

+
+

devon4Net Base classes to create unit tests and integration tests with Moq and xUnit.

+
+
+
+

18.35. == Configuration

+
+
    +
  • +

    Load the template: +> dotnet new -i devon4Net.Test.Template +> dotnet new devon4NetTest

    +
  • +
+
+
+
+

18.36. == Notes

+
+
    +
  • +

    At this point you can find this classes:

    +
    +
      +
    • +

      BaseManagementTest

      +
    • +
    • +

      DatabaseManagementTest<T> (Where T is a devon4NetBaseContext class)

      +
    • +
    +
    +
  • +
  • +

    For unit testing, inherit a class from BaseManagementTest.

    +
  • +
  • +

    For integration tests, inherit a class from DatabaseManagementTest.

    +
  • +
  • +

    The recommended databases in integration test are in memory database or SQlite database.

    +
  • +
  • +

    Please check My thai Star test project.

    +
  • +
+
+
+
+

18.37. Deperecated packages

+ +
+
+

18.38. devon4Net.Domain.Context

+ +
+
+

18.39. == Description

+
+

devon4Net.Domain.Context contains the extended class devon4NetBaseContext in order to make easier the process of having a model context configured against different database engines. This configuration allows an easier testing configuration against local and in memory databases.

+
+
+
+

18.40. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.Context
    +
    +
    +
  • +
  • +

    Add to appsettings.{environment}.json file your database connections:

    +
  • +
+
+
+
+
"ConnectionStrings":
+{
+"DefaultConnection":
+"Server=localhost;Database=MyThaiStar;User Id=sa;Password=sa;MultipleActiveResultSets=True;",
+
+"AuthConnection":
+"Server=(localdb)\\mssqllocaldb;Database=aspnet-DualAuthCore-5E206A0B-D4DA-4E71-92D3-87FD6B120C5E;Trusted_Connection=True;MultipleActiveResultSets=true",
+
+"SqliteConnection": "Data Source=c:\\tmp\\membership.db;"
+}
+
+
+
+
    +
  • +

    On Startup.cs :

    +
  • +
+
+
+
+
void ConfigureServices(IServiceCollection services)
+
+
+
+
    +
  • +

    Add your database connections defined on previous point:

    +
  • +
+
+
+
+
services.ConfigureDataBase(
+new Dictionary<string, string> {
+{ConfigurationConst.DefaultConnection, Configuration.GetConnectionString(ConfigurationConst.DefaultConnection) }});
+
+
+
+
    +
  • +

    On devon4Net.Application.Configuration.Startup/DataBaseConfiguration/ConfigureDataBase configure your connections.

    +
  • +
+
+
+
+

18.41. devon4Net.Infrastructure.ApplicationUser

+ +
+
+

18.42. == Description

+
+

devon4Net Application user classes to implement basic Microsoft’s basic authentication in order to be used on authentication methodologies such Jason Web Token (JWT).

+
+
+
+

18.43. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.ApplicationUser
    +
    +
    +
  • +
  • +

    Add the database connection string for user management on appsettings.{environment}.json:

    +
  • +
+
+
+
+
"ConnectionStrings":
+{
+"AuthConnection":
+"Server=(localdb)\\mssqllocaldb;Database=aspnet-DualAuthCore-5E206A0B-D4DA-4E71-92D3-87FD6B120C5E;Trusted_Connection=True;MultipleActiveResultSets=true"
+}
+
+
+
+
    +
  • +

    Add the following line of code

    +
  • +
+
+
+
+
services.AddApplicationUserDependencyInjection();
+
+
+
+

On

+
+
+
+
Startup.cs/ConfigureServices(IServiceCollection services)
+
+
+
+

or on:

+
+
+
+
devon4Net.Application.Configuration.Startup/DependencyInjectionConfiguration/ConfigureDependencyInjectionService method.
+
+
+
+
    +
  • +

    Add the data seeder on Configure method on start.cs class:

    +
  • +
+
+
+
+
public void Configure(IApplicationBuilder app, IHostingEnvironment env, DataSeeder seeder)
+{
+    ...
+
+    app.UseAuthentication();
+    seeder.SeedAsync().Wait();
+
+    ...
+}
+
+
+
+
+

18.44. == Notes

+
+
    +
  • +

    You can use the following methods to set up the database configuration:

    +
  • +
+
+
+
+
public static void AddApplicationUserDbContextInMemoryService(this IServiceCollection services)
+
+public static void AddApplicationUserDbContextSQliteService(this IServiceCollection services, string connectionString)
+
+public static void AddApplicationUserDbContextSQlServerService(this IServiceCollection services, string connectionString)
+
+
+
+
    +
  • +

    The method AddApplicationUserDbContextInMemoryService uses the AuthContext connection string name to set up the database.

    +
  • +
  • +

    This component is used with the components devon4Net.Infrastructure.JWT and devon4Net.Infrastructure.JWT.MVC.

    +
  • +
+
+
+
+

18.45. devon4Net.Infrastructure.Communication

+ +
+
+

18.46. == Description

+
+

Basic client classes to invoke` GET`/POST methods asynchronously. This component has the minimal classes to send basic data. For more complex operations please use ASP4Net.Infrastructure.Extensions.

+
+
+
+

18.47. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Communication
    +
    +
    +
  • +
  • +

    Create an instance of RestManagementService class.

    +
  • +
  • +

    Use next methods to use GET/POST basic options:

    +
  • +
+
+
+
+
public Task<string> CallGetMethod(string url);
+public Task<Stream> CallGetMethodAsStream(string url);
+public Task<string> CallPostMethod<T>(string url, T dataToSend);
+public Task<string> CallPutMethod<T>(string url, T dataToSend);
+
+
+
+
+

18.48. == Notes

+
+
    +
  • +

    Example:

    +
  • +
+
+
+
+
private async Task RestManagementServiceSample(EmailDto dataToSend)
+{
+    var url = Configuration["EmailServiceUrl"];
+    var restManagementService = new RestManagementService();
+    await restManagementService.CallPostMethod(url, dataToSend);
+}
+
+
+
+
+

18.49. devon4Net.Infrastructure.JWT.MVC

+ +
+
+

18.50. == Description

+
+
    +
  • +

    devon4Net Extended controller to interact with JWT features

    +
  • +
+
+
+
+

18.51. == Configuration

+
+
    +
  • +

    Extend your _ Microsoft.AspNetCore.Mvc.Controller_ class with devon4NetJWTController class:

    +
  • +
+
+
+
+
public class LoginController : devon4NetJWTController
+{
+    private readonly ILoginService _loginService;
+
+    public LoginController(ILoginService loginService,  SignInManager<ApplicationUser>  signInManager, UserManager<ApplicationUser> userManager, ILogger<LoginController> logger, IMapper mapper) : base(logger,mapper)
+    {
+        _loginService = loginService;
+    }
+
+    ....
+
+
+
+
+

18.52. == Notes

+
+
    +
  • +

    In order to generate a JWT, you should implement the JWT generation on user login. For example, in My Thai Star is created as follows:

    +
  • +
+
+
+
+
public async Task<IActionResult> Login([FromBody]LoginDto loginDto)
+{
+    try
+    {
+        if (loginDto ==  null) return Ok();
+        var logged = await _loginService.LoginAsync(loginDto.UserName, loginDto.Password);
+
+        if (logged)
+        {
+            var user = await _loginService.GetUserByUserNameAsync(loginDto.UserName);
+
+            var encodedJwt = new JwtClientToken().CreateClientToken(_loginService.GetUserClaimsAsync(user));
+
+            Response.Headers.Add("Access-Control-Expose-Headers", "Authorization");
+
+            Response.Headers.Add("Authorization", $"{JwtBearerDefaults.AuthenticationScheme} {encodedJwt}");
+
+            return Ok(encodedJwt);
+        }
+        else
+        {
+            Response.Headers.Clear();
+            return StatusCode((int)HttpStatusCode.Unauthorized, "Login Error");
+        }
+
+    }
+    catch (Exception ex)
+    {
+        return StatusCode((int)HttpStatusCode.InternalServerError, $"{ex.Message} : {ex.InnerException}");
+    }
+}
+
+
+
+
    +
  • +

    In My Thai Star the JWT will contain the user information such id, roles…​

    +
  • +
  • +

    Once you extend your controller with devon4NetJWTController you will have available these methods to simplify user management:

    +
  • +
+
+
+
+
    public interface Idevon4NetJWTController
+    {
+        // Gets the current user
+        JwtSecurityToken GetCurrentUser();
+
+        // Gets an specific assigned claim of current user
+        Claim GetUserClaim(string claimName, JwtSecurityToken jwtUser = null);
+
+        // Gets all the assigned claims of current user
+        IEnumerable<Claim> GetUserClaims(JwtSecurityToken jwtUser = null);
+    }
+
+
+
+
+

18.53. devon4Net.Infrastructure.MVC

+ +
+
+

18.54. == Description

+
+

Common classes to extend controller functionality on API. Also provides support for paged results in devon4Net applications and automapper injected class.

+
+
+
+

18.55. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.MVC
    +
    +
    +
  • +
+
+
+
+

18.56. == Notes

+
+
    +
  • +

    The generic class ResultObjectDto<T> provides a typed result object with pagination.

    +
  • +
  • +

    The extended class provides the following methods:

    +
  • +
+
+
+
+
        ResultObjectDto<T> GenerateResultDto<T>(int? page, int? size, int? total);
+        ResultObjectDto<T> GenerateResultDto<T>(List<T> result, int? page = null, int? size = null);
+
+
+
+
    +
  • +

    GenerateResultDto provides typed ResultObjectDto object or a list of typed ResultObjectDto object. The aim of this methods is to provide a clean management for result objects and not repeating code through the different controller classes.

    +
  • +
  • +

    The following sample from My Thai Star shows how to use it:

    +
  • +
+
+
+
+
public async Task<IActionResult> Search([FromBody] FilterDtoSearchObject filterDto)
+{
+    if (filterDto ==  null) filterDto = new FilterDtoSearchObject();
+
+    try
+    {
+        var dishList = await _dishService.GetDishListFromFilter(false, filterDto.GetMaxPrice(), filterDto.GetMinLikes(), filterDto.GetSearchBy(),filterDto.GetCategories(), -1);
+
+
+        return new OkObjectResult(GenerateResultDto(dishList).ToJson());
+    }
+    catch (Exception ex)
+    {
+        return StatusCode((int)HttpStatusCode.InternalServerError, $"{ex.Message} : {ex.InnerException}");
+    }
+}
+
+
+
+
+

18.57. devon4Net.Infrastructure.AOP

+ +
+
+

18.58. == Description

+
+

Simple AOP Exception handler for .Net Controller classes integrated with Serilog.

+
+
+
+

18.59. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> Install-Package devon4Net.Domain.AOP
    +
    +
    +
  • +
+
+
+

Add this line of code on ConfigureServices method on Startup.cs

+
+
+
+
services.AddAopAttributeService();
+
+
+
+
+

18.60. == Notes

+
+

Now automatically your exposed API methods exposed on controller classes will be tracked on the methods:

+
+
+
    +
  • +

    OnActionExecuting

    +
  • +
  • +

    OnActionExecuted

    +
  • +
  • +

    OnResultExecuting

    +
  • +
  • +

    OnResultExecuted

    +
  • +
+
+
+

If an exception occurs, a message will be displayed on log with the stack trace.

+
+
+
+

18.61. devon4Net.Infrastructure.Cors

+ +
+
+

18.62. == Description

+
+

Enables CORS configuration for devon4Net application. Multiple domains can be configured from configuration. Mandatory to web clients (p.e. Angular) to prevent making AJAX requests to another domain.

+
+
+

Cross-Origin Resource Sharing (CORS) is a mechanism that uses additional HTTP headers to tell a browser to let a web application running at one origin (domain) have permission to access selected resources from a server at a different origin. A web application makes a cross-origin HTTP request when it requests a resource that has a different origin (domain, protocol, and port) than its own origin.

+
+
+

Please refer to this link to get more information about CORS and .Net core.

+
+
+
+

18.63. == Configuration

+
+
    +
  • +

    Install package on your solution:

    +
    +
    +
    PM> devon4Net.Infrastructure.Cors
    +
    +
    +
  • +
  • +

    You can configure your Cors configuration on appsettings.{environment}.json:

    +
    +
    +
    `CorsPolicy`: indicates the name of the policy. You can use this name to add security headers on your API exposed methods.
    +
    +
    +
    +
    +
    Origins: The allowed domains
    +
    +
    +
    +
    +
    Headers: The allowed headers such accept,content-type,origin,x-custom-header
    +
    +
    +
  • +
  • +

    If you specify the cors configuration as empty array, a default cors-policy will be used with all origins enabled:

    +
  • +
+
+
+
+
  "Cors": []
+
+
+
+
    +
  • +

    On the other hand, you can specify different Cors policies in your solution as follows:

    +
  • +
+
+
+
+
"Cors": []
+[
+  {
+    "CorsPolicy": "CorsPolicy1",
+    "Origins": "http:example.com,http:www.contoso.com",
+    "Headers": "accept,content-type,origin,x-custom-header",
+    "Methods": "GET,POST,HEAD",
+    "AllowCredentials": true
+  },
+  {
+    "CorsPolicy": "CorsPolicy2",
+    "Origins": "http:example.com,http:www.contoso.com",
+    "Headers": "accept,content-type,origin,x-custom-header",
+    "Methods": "GET,POST,HEAD",
+    "AllowCredentials": true
+  }
+]
+
+
+
+
+

18.64. == Notes

+
+
    +
  • +

    To use CORS in your API methods, use the next notation:

    +
  • +
+
+
+
+
[EnableCors("YourCorsPolicy")]
+public IActionResult Index() {
+    return View();
+}
+
+
+
+
    +
  • +

    if you want to disable the CORS check use the following annotation:

    +
  • +
+
+
+
+
[DisableCors]
+public IActionResult Index() {
+    return View();
+}
+
+
+
+
+

18.65. Required software

+ + + + +
+
+
+
+

19. Templates

+
+ +
+

==Templates

+
+
+

19.1. Overview

+
+

The .Net Core and .Net Framework given templates allows to start coding an application with the following functionality ready to use:

+
+
+

Please refer to User guide in order to start developing.

+
+
+
+

19.2. Net Core 3.0

+
+

The .Net Core 3.0 template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2019.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
Using devon4Net template
+ +
+
+
Option 1
+
+
    +
  1. +

    Open your favourite terminal (Win/Linux/iOS)

    +
  2. +
  3. +

    Go to future project’s path

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template

    +
  6. +
  7. +

    Type dotnet new Devon4NetAPI

    +
  8. +
  9. +

    Go to project’s path

    +
  10. +
  11. +

    You are ready to start developing with devon4Net

    +
  12. +
+
+
+
+
Option 2
+
+
    +
  1. +

    Create a new dotnet API project from scratch

    +
  2. +
  3. +

    Add the NuGet package reference to your project

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template

    +
  6. +
+
+
+
+
+

19.3. Net Core 2.1.x

+
+

The .Net Core 2.1.x template allows you to start developing an n-layer server application to provide the latest features. The template can be used in Visual Studio Code and Visual Studio 2017.

+
+
+

The application result can be deployed as a console application, microservice or web page.

+
+
+

To start developing with devon4Net template, please follow this instructions:

+
+
+
Using devon4Net template
+
+
    +
  1. +

    Open your favourite terminal (Win/Linux/iOS)

    +
  2. +
  3. +

    Go to future project’s path

    +
  4. +
  5. +

    Type dotnet new --install Devon4Net.WebAPI.Template::1.0.8

    +
  6. +
  7. +

    Type dotnet new Devon4NetAPI

    +
  8. +
  9. +

    Go to project’s path

    +
  10. +
  11. +

    You are ready to start developing with devon4Net

    +
  12. +
+
+
+ + + + + +
+ + +For the latest updates on references packages, please get the sources from Github +
+
+
+
+
+ + +
+
+
+
+

20. Samples

+
+ +
+

20.1. Samples

+ +
+
+

20.2. My Thai Star Restaurant

+ +
+
+ +
+
+
+
+

20.3. Angular requirements

+
+ +
+
+
+

20.4. Angular client

+
+
    +
  1. +

    Install Node.js LTS version

    +
  2. +
  3. +

    Install Angular CLI from command line:

    +
    +
      +
    • +

      npm install -g @angular/cli

      +
    • +
    +
    +
  4. +
  5. +

    Install Yarn

    +
  6. +
  7. +

    Go to Angular client from command line

    +
  8. +
  9. +

    Execute : yarn install

    +
  10. +
  11. +

    Launch the app from command line: ng serve and check http://localhost:4200

    +
  12. +
  13. +

    You are ready

    +
  14. +
+
+
+
+

20.5. .Net Core server

+ +
+
+

20.6. == Basic architecture details

+
+

Following the devonfw conventions the .Net Core 2.0 My Thai Star backend is going to be developed dividing the application in Components and using a n-layer architecture.

+
+
+
+project modules +
+
+
+
+

20.7. == Components

+
+

The application is going to be divided in different components to encapsulate the different domains of the application functionalities.

+
+
+
+mtsn components +
+
+
+

As main components we will find:

+
+
+
    +
  • +

    _BookingService: Manages the bookings part of the application. With this component the users (anonymous/logged in) can create new bookings or cancel an existing booking. The users with waiter role can see all scheduled bookings.

    +
  • +
+
+
+

-OrderService: This component handles the process to order dishes (related to bookings). A user (as a host or as a guest) can create orders (that contain dishes) or cancel an existing one. The users with waiter role can see all ordered orders.

+
+
+
    +
  • +

    DishService: This component groups the logic related to the menu (dishes) view. Its main feature is to provide the client with the data of the available dishes but also can be used by other components (Ordermanagement) as a data provider in some processes.

    +
  • +
  • +

    UserService: Takes care of the User Profile management, allowing to create and update the data profiles.

    +
  • +
+
+
+

As common components (that don’t exactly represent an application’s area but provide functionalities that can be used by the main components):

+
+
+
    +
  • +

    Mailservice: with this service we will provide the functionality for sending email notifications. This is a shared service between different app components such as bookingmanagement or ordercomponent.

    +
  • +
+
+
+

Other components:

+
+
+
    +
  • +

    Security (will manage the access to the private part of the application using a jwt implementation).

    +
  • +
  • +

    Twitter integration: planned as a Microservice will provide the twitter integration needed for some specific functionalities of the application.

    +
  • +
+
+
+
+

20.8. Layers

+ +
+
+

20.9. == Introduction

+
+

The .Net Core backend for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    devon4NET as the .Net Core framework

    +
  • +
  • +

    VSCode as the Development environment

    +
  • +
  • +

    TOBAGO as code generation tool

    +
  • +
+
+
+
+

20.10. == Application layer

+
+

This layer will expose the REST api to exchange information with the client applications.

+
+
+

The application will expose the services on port 8081 and it can be launched as a self host console application (microservice approach) and as a Web Api application hosted on IIS/IIS Express.

+
+
+
+

20.11. == Business layer

+
+

This layer will define the controllers which will be used on the application layer to expose the different services. Also, will define the swagger contract making use of summary comments and framework attributes.

+
+
+

This layer also includes the object response classes in order to interact with external clients.

+
+
+
+

20.12. == Service layer

+
+

The layer in charge of hosting the business logic of the application. Also orchestrates the object conversion between object response and entity objects defined in Data layer.

+
+
+
+

20.13. == Data layer

+
+

The layer to communicate with the data base.

+
+
+

Data layer makes use of Entity Framework. +The Database context is defined on DataAccessLayer assembly (ModelContext).

+
+
+

This layer makes use of the Repository pattern and Unit of work in order to encapsulate the complexity. Making use of this combined patterns we ensure an organized and easy work model.

+
+
+

As in the previous layers, the data access layer will have both interface and implementation tiers. However, in this case, the implementation will be slightly different due to the use of generics.

+
+
+
+

20.14. == Cross-Cutting concerns

+
+

the layer to make use of transversal components such JWT and mailing.

+
+
+
+

20.15. Jwt basics

+
+
    +
  • +

    A user will provide a username / password combination to our auth server.

    +
  • +
  • +

    The auth server will try to identify the user and, if the credentials match, will issue a token.

    +
  • +
  • +

    The user will send the token as the Authorization header to access resources on server protected by JWT Authentication.

    +
  • +
+
+
+
+jwt schema +
+
+
+
+

20.16. Jwt implementation details

+
+

The Json Web Token pattern will be implemented based on the jwt on .net core framework that is provided by default in the devon4Net projects.

+
+
+
+

20.17. Authentication

+
+

Based on Microsoft approach, we will implement a class to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

On devon4Net.Infrastructure.JWT assembly is defined a subset of Microsoft’s authorization schema Database. It is started up the first time the application launches.

+
+
+

You can read more about _Authorization on:

+
+ + +
+
+

20.18. Dependency injection

+
+

As it is explained in the Microsoft documentation we are going to implement the dependency injection pattern basing our solution on .Net Core.

+
+
+
+dependency injection +
+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different tiers: interface and implementation. The interface tier will store the interface with the methods definition and inside the implementation we will store the class that implements the interface.

    +
  • +
+
+
+
+

20.19. Layer communication method

+
+

The connection between layers, to access to the functionalities of each one, will be solved using the dependency injection.

+
+
+
+layer impl +
+
+
+

Connection BookingService - Logic

+
+
+
+
 public class BookingService : EntityService<Booking>, IBookingService
+    {
+        private readonly IBookingRepository _bookingRepository;
+        private readonly IRepository<Order> _orderRepository;
+        private readonly IRepository<InvitedGuest> _invitedGuestRepository;
+        private readonly IOrderLineRepository _orderLineRepository;
+        private readonly IUnitOfWork _unitOfWork;
+
+        public BookingService(IUnitOfWork unitOfWork,
+            IBookingRepository repository,
+            IRepository<Order> orderRepository,
+            IRepository<InvitedGuest> invitedGuestRepository,
+            IOrderLineRepository orderLineRepository) : base(unitOfWork, repository)
+        {
+            _unitOfWork = unitOfWork;
+            _bookingRepository = repository;
+            _orderRepository = orderRepository;
+            _invitedGuestRepository = invitedGuestRepository;
+            _orderLineRepository = orderLineRepository;
+        }
+}
+
+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

Following the [naming conventions] proposed for devon4Net applications we will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+
+

20.20. Api Exposed

+
+

The devon4Net.Business.Controller assembly in the business layer of a component will store the definition of the service by a interface. In this definition of the service we will set-up the endpoints of the service, the type of data expected and returned, the HTTP method for each endpoint of the service and other configurations if needed.

+
+
+
+
        /// <summary>
+        /// Method to make a reservation with potential guests. The method returns the reservation token with the format: {(CB_|GB_)}{now.Year}{now.Month:00}{now.Day:00}{_}{MD5({Host/Guest-email}{now.Year}{now.Month:00}{now.Day:00}{now.Hour:00}{now.Minute:00}{now.Second:00})}
+        /// </summary>
+
+        /// <param name="bookingView"></param>
+        /// <response code="201">Ok.</response>
+        /// <response code="400">Bad request. Parser data error.</response>
+        /// <response code="401">Unauthorized. Authentication fail.</response>
+        /// <response code="403">Forbidden. Authorization error.</response>
+        /// <response code="500">Internal Server Error. The search process ended with error.</response>
+        [HttpPost]
+        [HttpOptions]
+        [Route("/mythaistar/services/rest/bookingmanagement/v1/booking")]
+        [AllowAnonymous]
+        [EnableCors("CorsPolicy")]
+        public IActionResult BookingBooking([FromBody]BookingView bookingView)
+        {
+...
+
+
+
+

Using the summary annotations and attributes will tell to swagger the contract via the XML doc generated on compiling time. This doc will be stored in XmlDocumentation folder.

+
+
+

The Api methods will be exposed on the application layer.

+
+
+
+

20.21. Google Mail API Consumer

+ +
+

|== == == == == == == == == == == = +|Application| MyThaiStarEmailService.exe +|Config file| MyThaiStarEmailService.exe.Config +|Default port|8080 +|== == == == == == == == == == == =

+
+
+
+

20.22. Overview

+
+
    +
  1. +

    Execute MyThaiStarEmailService.exe.

    +
  2. +
  3. +

    The first time google will ask you for credentials +(just one time) in your default browser:

    +
    + +
    +
  4. +
  5. +

    Visit the url: http://localhost:8080/swagger

    +
  6. +
  7. +

    Your server is ready!

    +
  8. +
+
+
+
+GMail Service +
+
Figure 79. GMail Server Swagger contract page
+
+
+
+

20.23. JSON Example

+
+

This is the JSON example to test with swagger client. Please read the swagger documentation.

+
+
+
+
{
+   "EmailFrom":"mythaistarrestaurant@gmail.com",
+   "EmailAndTokenTo":{
+      "MD5Token1":" Email_Here!@gmail.com",
+      "MD5Token2":" Email_Here!@gmail.com"
+   },
+   "EmailType":0,
+   "DetailMenu":[
+      "Thai Spicy Basil Fried Rice x2",
+      "Thai green chicken curry x2"
+   ],
+   "BookingDate":"2017-05-31T12:53:39.7864723+02:00",
+   "Assistants":2,
+   "BookingToken":"MD5Booking",
+   "Price":20.0,
+   "ButtonActionList":{
+      "http://accept.url":"Accept",
+      "http://cancel.url":"Cancel"
+   },
+   "Host":{
+      " Email_Here!@gmail.com":"José Manuel"
+   }
+}
+
+
+
+
+

20.24. Configure the service port

+
+

If you want to change the default port, please edit the config file and +change the next entry in appSettings node:

+
+
+
+
<appSettings>
+   <add key="LocalListenPort" value="8080" />
+</appSettings>
+
+
+
+
+ + + +
+ +
+
+
+

21. NodeJS

+
+
+

devonfw is a platform which provides solutions to building business applications which combine best-in-class frameworks and libraries as well as industry proven practices and code conventions. devonfw is 100% Open Source (Apache License version 2.0) since the beginning of 2018.

+
+
+

devon4node is the NodeJS stack of devonfw. It allows you to build business applications (backends) using NodeJS technology in standardized way based on established best-practices.

+
+
+

devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications. It uses progressive TypeScript and combines elements of OOP (Object Oriented Programming), FP (Functional Programming), and FRP (Functional Reactive Programming).

+
+ +
+

21.1. devon4node Architecture

+
+

As we have mentioned in the introduction, devon4node is based on NestJS. Nest (NestJS) is a framework for building efficient, scalable Node.js server-side applications.

+
+
+
+

21.2. HTTP layer

+
+

By using NestJS, devon4node is a platform-agnostic framework. NestJS focuses only on the logical layer, and delegates the transport layer to another framework, such as ExpressJS. You can see it in the following diagram:

+
+
+
+devon4node architecture +
+
+
+

As you can see, NestJS do not listen directly for incoming request. It has an adapter to communicate with ExpressJS and ExpressJS is the responsible for that. ExpressJS is only one of the frameworks that NestJS can work with. We have also another adapter available out-of-the-box: the Fastify adapter. With that, you can replace ExpressJS for Fastify But you can still use all your NestJS components. You can also create your own adapter to make NestJS work with other HTTP framework.

+
+
+

At this point, you may think: why is NestJS (and devon4node) using ExpressJS by default instead of Fastify? Because, as you can see in the previous diagram, there is a component that is dependent on the HTTP framework: the middleware. As ExpressJS is the most widely used framework, there exists a lot of middleware for it, so, in order to reuse them in our NestJS applications, NestJS use ExpressJS by default. Anyway, you may think which HTTP framework best fits your requirements.

+
+
+
+

21.3. devon4node layers

+
+

As other devonfw technologies, devon4node separates the application into layers.

+
+
+

Those layers are:

+
+ +
+
+layers +
+
+
+
+

21.4. devon4node application structure

+
+

Although there are many frameworks to create backend applications in NodeJS, none of them effectively solve the main problem of - Architecture. This is the main reason we have chosen NestJS for the devon4node applications. Besides, NestJS is highly inspired by Angular, therefore a developer who knows Angular can use his already acquired knowledge to write devon4node applications.

+
+
+

NestJS adopts various Angular concepts, such as dependency injection, piping, interceptors and modularity, among others. By using modularity we can reuse some of our modules between applications. One example that devon4node provide is the mailer module.

+
+
+
+

21.5. Modules

+
+

Create a application module is simple, you only need to create an empty class with the decorator Module:

+
+
+
+
@Module({})
+export class AppModule {}
+
+
+
+

In the module you can define:

+
+
+
    +
  • +

    Imports: the list of imported modules that export the providers which are required in this module

    +
  • +
  • +

    Controllers: the set of controllers defined in this module which have to be instantiated

    +
  • +
  • +

    Providers: the providers that will be instantiated by the Nest injector and that may be shared at least across this module

    +
  • +
  • +

    Exports: the subset of providers that are provided by this module and should be available in other modules which import this module

    +
  • +
+
+
+

The main difference between Angular and NestJS is NestJS modules encapsulates providers by default. This means that it’s impossible to inject providers that are neither directly part of the current module nor exported from the imported modules. Thus, you may consider the exported providers from a module as the module’s public interface, or API. Example of modules graph:

+
+
+
+modules +
+
+
+

In devon4node we three different kind of modules:

+
+
+
    +
  • +

    AppModule: this is the root module. Everything that our application need must be imported here.

    +
  • +
  • +

    Global Modules: this is a special kind of modules. When you make a module global, it’s accessible for every module in your application. Your can see it in the next diagram. It’s the same as the previous one, but now the CoreModule is global:

    +
    +
    +module2 +
    +
    +
    +

    One example of global module is the CoreModule. In the CoreModule you must import every module which have providers that needs to be accessible in all modules of you application

    +
    +
  • +
  • +

    Feature (or application) modules: modules which contains the logic of our application. We must import it in the AppModule.

    +
  • +
+
+
+

For more information about modules, see NestJS documentation page

+
+
+
+

21.6. Folder structure

+
+

devon4node defines a folder structure that every devon4node application must follow. The folder structure is:

+
+
+
+
├───src
+│   ├───app
+│   │   ├───core
+│   │   │   ├───auth
+│   │   │   ├───configuration
+│   │   │   ├───user
+│   │   │   └───core.module.ts
+│   │   ├───shared
+│   │   └───feature
+│   │       ├───sub-module
+│   │       │   ├───controllers
+│   │       │   ├───...
+│   │       │   ├───services
+│   │       │   └───sub-module.module.ts
+│   │       ├───controllers
+│   │       ├───interceptors
+│   │       ├───pipes
+│   │       ├───guards
+│   │       ├───filters
+│   │       ├───middlewares
+│   │       ├───model
+│   │       │   ├───dto
+│   │       │   └───entities
+│   │       ├───services
+│   │       └───feature.module.ts
+│   ├───config
+│   └───migration
+├───test
+└───package.json
+
+
+
+

devon4node schematics ensures this folder structure so, please, do not create files by your own, use the devon4node schematics.

+
+
+
+

21.7. NestJS components

+
+

NestJS provides several components that you can use in your application:

+
+
+ +
+
+

In the NestJS documentation you can find all information about each component. But, something that is missing in the documentation is the execution order. Every component can be defined in different levels: globally, in the controller or in the handler. As middleware is part of the HTTP server we can define it in a different way: globally or in the module.

+
+
+
+components +
+
+
+

It is not necessary to have defined components in every level. For example, you can have defined a interceptor globally but you do not have any other in the controller or handler level. If nothing is defined in some level, the request will continue to the next component.

+
+
+

As you can see in the previous image, the first component which receive the request is the global defined middleware. Then, it send the request to the module middleware. Each of them can return a response to the client, without passing the request to the next level.

+
+
+

Then, the request continue to the guards: first the global guard, next to controller guard and finally to the handler guard. At this point, we can throw an exception in all components and the exception filter will catch it and send a proper error message to the client. We do not paint the filters in the graphic in order to simplify it.

+
+
+

After the guards, is time to interceptors: global interceptors, controller interceptors and handler interceptors. And last, before arrive to the handler inside the controller, the request pass through the pipes.

+
+
+

When the handler has the response ready to send to the client, it does not go directly to the client. It come again to the interceptors, so we can also intercept the response. The order this time is the reverse: handler interceptors, controller interceptors and global interceptors. After that, we can finally send the response to the client.

+
+
+

Now, with this in mind, you are able to create the components in a better way.

+
+
+
+
+
+

22. Layers

+
+ +
+

22.1. Controller Layer

+
+

The controller layer is responsible for handling the requests/responses to the client. This layer knows everything about the endpoints exposed, the expected input (and also validate it), the response schema, the HTTP codes for the response and the HTTP errors that every endpoint can send.

+
+
+
+

22.2. How to implement the controller layer

+
+

This layer is implemented by the NestJS controllers. Let’s see how it works with an example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    try {
+      return await this.coffeeService.searchCoffees(search);
+    } catch (error) {
+      throw new BadRequestException(error.message, error);
+    }
+  }
+}
+
+
+
+

As you can see in the example, to create a controller you only need to decorate a class with the Controller decorator. This example is handling all request to coffee/coffees.

+
+
+

Also, you have defined one handler. This handler is listening to POST request for the route coffee/coffees/search. In addition, this handler is waiting for a CoffeeSearch object and returns an array of Coffee. In order to keep it simple, that’s all that you need in order to define one route.

+
+
+

One important thing that can be observed in this example is that there is no business logic. It delegates to the service layer and return the response to the client. At this point, transformations from the value that you receive from the service layer to the desired return type are also allowed.

+
+
+

By default, every POST handler return an HTTP 204 response with the returned value as body, but you can change it in a easy way by using decorators. As you can see in the example, the handler will return a HTTP 200 response (@HttpCode(200)).

+
+
+

Finally, if the service layer throws an error, this handler will catch it and return a HTTP 400 Bad Request response. The controller layer is the only one that knows about the answers to the client, therefore it is the only one that knows which error codes should be sent.

+
+
+
+

22.3. Validation

+
+

In order to do not propagate errors in the incoming payload, we need to validate all data in the controller layer. See the validation guide for more information.

+
+
+
+

22.4. Error handling

+
+

In the previous example, we catch all errors using the try/catch statement. This is not the usual implementation. In order to catch properly the errors you must use the exception filters. Example:

+
+
+
+
@Controller('coffee/coffees')
+export class CoffeeController {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  @Post('search')
+  @HttpCode(200)
+  @UseFilters(CaffeExceptionFilter)
+  async searchCoffees(@Body() search: CoffeeSearch): Promise<Array<Coffee>> {
+    return await this.coffeeService.searchCoffees(search);
+  }
+}
+
+
+ +
+
+

22.5. Service Layer

+
+

The logic layer is the heart of the application and contains the main business logic. It knows everything about the business logic, but it does not know about the response to the client and the HTTP errors. That’s why this layer is separated from the controller layer.

+
+
+
+

22.6. How to implement the service layer

+
+

This layer is implemented by services, a specific kind of providers. Let’s see one example:

+
+
+
+
@Injectable()
+export class CoffeeService {
+  constructor(private readonly coffeeService: CoffeeService) {}
+
+  async searchCoffees(@InjectRepository(Coffee) coffeeRepository: Repository<Coffee>): Promise<Array<Coffee>> {
+    const coffees = this.coffeeRepository.find();
+
+    return doSomeBusinessLogic(coffees);
+  }
+}
+
+
+
+

This is the CoffeeService that we inject in the example of controller layer. As you can see, a service is a regular class with the Injectable decorator. Also, it inject as dependency the data access layer (in this specific case, the Repository<Coffee>).

+
+
+

The services expose methods in order to transform the input from the controllers by applying some business logic. They can also request data from the data access layer. And that’s all.

+
+ +
+
+

22.7. Data Access Layer

+
+

The data access layer is responsible for all outgoing connections to access and process data. This is mainly about accessing data from a persistent data-store but also about invoking external services.

+
+
+

This layer is implemented using providers. Those providers could be: services, repositories and others. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic.

+
+
+
+

22.8. Database

+
+

We strongly recommend TypeORM for database management in devon4node applications. Although services can be used for this layer, they should not be confused with the service layer. Services in this layer are responsible for data access, while services in the service layer are responsible for business logic. TypeORM supports the most commonly used relational databases, link Oracle, MySQL, MariaDB, PostgreSQL, SQLite, MSSQL and others. Also, it supports no-relational databases like MongoDB.

+
+
+

TypeORM supports Active Record and Repository patterns. We recommend to use the Repository pattern. This pattern allows you to separate the data objects from the methods to manipulate the database.

+
+
+
+

22.9. External APIs

+
+

In order to manage the data in a external API, you need to create a service for that purpose. In order to manage the connections with the external API, we strongly recommend the NestJS HTTP module

+
+
+
+
+
+

23. Guides

+
+ +
+

23.1. Key Principles

+
+

devon4node is built following some basic principles like:

+
+
+ +
+
+

But key principles that best define devon4node (and are inherited from NestJS) are:

+
+
+
    +
  • +

    Simplicity (aka KISS)

    +
  • +
  • +

    Reusability

    +
  • +
  • +

    Productivity

    +
  • +
+
+
+
+

23.2. Simplicity

+
+

In devon4node we tried to do everything as simple as possible. Following this principle we will be able to do easy to maintain applications.

+
+
+

For example, in order to expose all CRUD operations for an entity, you only need to create a controller like:

+
+
+
+
@Crud({
+  model: {
+    type: Employee,
+  },
+})
+@CrudType(Employee)
+@Controller('employee/employees')
+export class EmployeeCrudController {
+  constructor(public service: EmployeeCrudService) {}
+}
+
+
+
+

You can find this code in the employee example. Only with this code your exposing the full CRUD operations for the employee entity. As you can see, it’s an empty class with some decorators and the EmployeeCrudService injected as dependency. Simple, isn’t it? The EmployeeCrudService is also simple:

+
+
+
+
@Injectable()
+export class EmployeeCrudService extends TypeOrmCrudService<Employee> {
+  constructor(@InjectRepository(Employee) repo: Repository<Employee>) {
+    super(repo);
+  }
+}
+
+
+
+

Another empty class which extends from TypeOrmCrudService<Employee> and injects the Employee Repository as dependency. Nothing else.

+
+
+

With these examples you can get an idea of how simple it can be to code a devon4node application .

+
+
+
+

23.3. Reusability

+
+

NestJS (and devon4node) applications are designed in a modular way. This allows you to isolate some functionality in a module, and then reuse it in every application that you need. This is the same behaviour that Angular has. You can see it in the NestJS modules like TypeORM, Swagger and others. Also, in devon4node we have the Mailer module.

+
+
+

In your applications, you only need to import those modules and then you will be able to use the functionality that they implement. Example

+
+
+
+
@Module({
+  imports: [ AuthModule, ConfigurationModule ],
+})
+export class SomeModule {}
+
+
+
+
+

23.4. Productivity

+
+

devon4node is designed to create secure enterprise applications. But also, it allow you to do it in a fast way. To increase the productivity devon4node, devon4node provide schematics in order to generate some boilerplate code.

+
+
+

For example, to create a module you need to create a new file for a module (or copy it) and write the code, then you need to import it in the AppModule. This is a easy example, but you can introduce some errors: forget to import it in the AppModule, introduce errors with the copy/paste and so on. By using the command nest g module --name <module-name> it will do everything for you. Just a simple command. In this specific case probably you do not see any advantage, but there are other complex cases where you can generate more complex code with nest and devon4node schematics command.

+
+
+

See code generation in order to know how to increase your productivity creating devon4node applications.

+
+ +
+
+

23.5. Code Generation

+
+

As we mention in the page key principles, one of our key principles is Productivity. In order to provide that productivity, we have some tools to generate code. These tools will help you generate the common parts of the application so that you can focus only on the specific functionality.

+
+
+

Those tools are:

+
+ +
+
+

23.6. Nest CLI and Devon4node schematics

+
+

We are going to use the Nest CLI to generate code of our application, you can know more about NodeJs CLI in the official documentation.

+
+
+
+

23.7. Install devon4node schematics

+
+

First of all, you need to install Nest CLI

+
+
+

Execute the command yarn global add @nestjs/cli. +You can also use npm: npm install -g @nestjs/cli

+
+
+

And then Devon4node schematics globally with the following command:

+
+
+

yarn global add @devon4node/schematics or npm install -g @devon4node/schematics

+
+
+
+

==

+
+

If you get an error trying execute any devon4node schematic related to collection not found, try to reinstall devon4node/schematics on the project folder or be sure that schematics folder is inside @devon4node in node_modules. +yarn add @devon4node/schematics +== ==

+
+
+
+

23.8. Generate new devon4node application

+
+

To start creating a devon4node application, execute the command:

+
+
+

nest g -c @devon4node/schematics application [application-name]

+
+
+

If you do not put a name, the command line will ask you for one.

+
+
+
+

23.9. Generate code for TypeORM

+
+

Initialize TypeORM into your current project in a correct way.

+
+
+

nest g -c @devon4node/schematics typeorm

+
+
+

Then, you will be asked about which DB you want to use.

+
+
+

typeorm schematic

+
+
+
+

23.10. Generate CRUD

+
+

Generate CRUD methods for a entity. Requires TypeORM installed in the project.

+
+
+

It will add the @nestjsx/crud module as a project dependency. Then, generates an entity, a CRUD controller and a CRUD service. It also register the entity, controller and service in the module.

+
+
+

Execute nest g -c @devon4node/schematics crud and then you will need to write a name for the crud.

+
+
+
+crud schematic +
+
+
+
+

23.11. Generate TypeORM entity

+
+

Add a TypeORM entity to your project. Requires TypeORM installed in the project.

+
+
+

Execute nest g -c @devon4node/schematics entity and you will be asked for an entity name.

+
+
+
+

23.12. Add config-module

+
+

Add the config module to the project.

+
+
+

It will add the @devon4node/common module as a project dependency. Then, it will generate the configuration module into your project and add it in the core module. Also, it generates the config files for the most common environments.

+
+
+

The command to execute will be nest g -c @devon4node/schematics config-module

+
+
+
+

23.13. Add mailer module

+
+

Add @devon4node/mailer module to project.

+
+
+

It will add the @devon4node/mailer module as a project dependency. Also, it will add it to the core module and it will generate some email template examples.

+
+
+

Write the command nest g -c @devon4node/schematics mailer

+
+
+
+

23.14. Add swagger module

+
+

Add swagger module to project.

+
+
+

It will add the @nestjs/swagger module as a project dependency. Also, it will update the main.ts file in order to expose the endpoint for swagger. The default endpoint is: /v1/api

+
+
+

Execute the command nest g -c @devon4node/schematics swagger

+
+
+
+

23.15. Add auth-jwt module

+
+

Add the auth JWT module to the project.

+
+
+

It will add to your project the auth-jwt and user module. Also, it will import those modules into the core module.

+
+
+

Execute nest g -c @devon4node/schematics auth-jwt

+
+
+
+

23.16. Add security

+
+

Add cors and helmet to your project.

+
+
+

It will add helmet package as project dependency and update the main.ts file in order to enable the cors and helmet in your application.

+
+
+

Execute nest g -c @devon4node/schematics security

+
+
+
+

23.17. Generate database migrations

+
+
    +
  1. +

    Generate database migrations

    +
    +
      +
    1. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node or npm i -g ts-node

      +
    2. +
    3. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +insert data +
      +
      +
      +

      It will connect to the database, read all entities and then it will generate a migration file with all sql queries need to transform the current status of the database to the status defined by the entities. If the database is empty, it will generate all sql queries need to create all tables defined in the entities. You can find a example in the todo example

      +
      +
    4. +
    +
    +
  2. +
+
+
+

As TypeORM is the tool used for DB. You can check official documentation for more information. +See TypeORM CLI documentation.

+
+
+
+

23.18. CobiGen

+
+

Currently, we do not have templates to generate devon4node code (we have planned to do that in the future). Instead, we have templates that read the code of a devon4node application and generate a devon4ng application. Visit the CobiGen page for more information.

+
+ +
+
+

23.19. Coding Conventions

+
+

devon4node defines some coding conventions in order to improve the readability, reduce the merge conflicts and be able to develop applications in an industrialized way.

+
+
+

In order to ensure that you are following the devon4node coding conventions, you can use the following tools:

+
+
+
    +
  • +

    ESLint: ESLint ESLint is a tool for identifying and reporting on patterns found in ECMAScript/JavaScript code, with the goal of making code more consistent and avoiding bugs. We recommend to use the ESLint VSCode extension (included in the devonfw Platform Extension Pack) in order to be able to see the linting errors while you are developing.

    +
  • +
  • +

    Prettier: Prettier is a code formatter. We recommend to use the Prettier VSCode extension (included in the devonfw Platform Extension Pack) and enable the editor.formatOnSave option.

    +
  • +
  • +

    devon4node application schematic: this tool will generate code following the devon4node coding conventions. Also, when you generate a new project using the devon4node application schematic, it generates the configuration files for TSLint and Prettier that satisfy the devon4node coding conventions.

    +
  • +
+
+
+

When you combine all tools, you can be sure that you follow the devon4node coding conventions.

+
+
+
+

23.20. Detailed devon4node Coding Conventions

+
+

Here we will detail some of most important devon4node coding conventions. To be sure that you follows all devon4node coding conventions use the tools described before.

+
+
+
+

23.21. Indentation

+
+

All devon4node code files must be indented using spaces. The indentation with must be 2 spaces.

+
+
+
+

23.22. White space

+
+

In order to improve the readability of your code, you must introduce whitespaces. Example:

+
+
+
+
if(condition){
+
+
+
+

must be

+
+
+
+
if (condition) {
+
+
+
+
+

23.23. Naming conventions

+ +
+
+

23.24. == File naming

+
+

The file name must follow the pattern: (name in kebab case).(kind of component).(extension) +The test file name must follow the pattern: (name in kebab case).(kind of component).spec.(extension)

+
+
+

Example:

+
+
+
+
auth-jwt.service.ts
+auth-jwt.service.spec.ts
+
+
+
+
+

23.25. == Interface naming

+
+

The interface names must be in pascal case, and must start with I. There is some controversy in starting the interface names with an I, but we decided to do it because is most of cases you will have an interface and a class with the same name, so, to differentiate them, we decided to start the interfaces with I. Other devonfw stacks solves it by adding the suffix Impl in the class implementations.

+
+
+

Example:

+
+
+
+
interface ICoffee {}
+
+
+
+
+

23.26. == Class naming

+
+

The class names must be in pascal case.

+
+
+

Example:

+
+
+
+
class Coffee {}
+
+
+
+
+

23.27. == Variable naming

+
+

All variable names must be in camel case.

+
+
+
+
const coffeeList: Coffe[];
+
+
+
+
+

23.28. Declarations

+
+

For all variable declarations we must use const or let. var is forbidden. We prefer to use const when possible.

+
+
+
+

23.29. Programming practices

+ +
+
+

23.30. == Trailing comma

+
+

All statements must end with a trailing comma. Example:

+
+
+
+
{
+  one: 'one',
+  two: 'two'  // bad
+}
+{
+  one: 'one',
+  two: 'two', // good
+}
+
+
+
+
+

23.31. == Arrow functions

+
+

All anonymous functions must be defined with the arrow function notation. In most of cases it’s not a problem, but sometimes, when you do not want to bind this when you define the function, you can use the other function definition. In this special cases you must disable the linter for those sentence.

+
+
+
+

23.32. == Comments

+
+

Comments must start with a whitespace. Example:

+
+
+
+
//This is a bad comment
+// This is OK
+
+
+
+
+

23.33. == Quotemarks

+
+

For string definitions, we must use single quotes.

+
+
+
+

23.34. == if statements

+
+

In all if statements you always must use brackets. Example:

+
+
+
+
// Bad if statement
+if (condition)
+  return true;
+
+// Good if statement
+if (condition) {
+  return true;
+}
+
+
+
+
+

23.35. Pre-commit hooks

+
+

In order to ensure that your new code follows the coding conventions, devon4node uses by default husky. Husky is a tool that allows you to configure git hooks easily in your project. When you make a git commit in your devon4node project, it will execute two actions:

+
+
+
    +
  • +

    Prettify the staged files

    +
  • +
  • +

    Execute the linter in the staged files

    +
  • +
+
+
+

If any action fails, you won’t be able to commit your new changes.

+
+
+ + + + + +
+ + +If you want to skip the git hooks, you can do a commit passing the --no-verify flag. +
+
+ +
+
+

23.36. Dependency Injection

+
+

The dependency injection is a well-known common design pattern applied by frameworks in all languages, like Spring in Java, Angular and others. The intention of this page is not to explain how dependency injection works, but instead how it is addressed by NestJS.

+
+
+

NestJS resolve the dependency injection in their modules. When you define a provider in a module, it can be injected in all components of the module. By default, those providers are only available in the module where it is defined. The only way to export a module provider to other modules which import it is adding those provider to the export array. You can also reexport modules.

+
+
+
+

23.37. Inject dependencies in NestJS

+
+

In order to inject a dependency in a NestJS component, you need to declare it in the component constructor. Example:

+
+
+
+
export class CoffeeController {
+  constructor(public readonly conffeeService: CoffeeService) {}
+}
+
+
+
+

NestJS can resolve all dependencies that are defined in the module as provider, and also the dependencies exported by the modules imported. Example:

+
+
+
+
@Module({
+  controllers: [CoffeeController],
+  providers: [CoffeeService],
+})
+export class CoffeeModule {}
+
+
+
+

Inject dependencies in the constructor is the is the preferred choice, but, sometimes it is not possible. For example, when you are extending another class and want to keep the constructor definition. In this specific cases we can inject dependencies in the class properties. Example:

+
+
+
+
export class CoffeeController {
+  @Inject(CoffeeService)
+  private readonly conffeeService: CoffeeService;
+}
+
+
+
+
+

23.38. Dependency Graph

+
+
+dependency injection1 +
+
+
+

In the previous image, the Module A can inject dependencies exported by Module B, Module E and Module F. If module B reexport Module C and Module D, they are also accessible by Module A.

+
+
+

If there is a conflict with the injection token, it resolves the provider with less distance with the module. For example: if the modules C and F exports a UserService provider, the Module A will resolve the UserService exported by the Module F, because the distance from Module A to Module F is 1, and the distance from Module A to Module C is 2.

+
+
+

When you define a module as global, the dependency injection system is the same. The only difference is now all modules as a link to the global module. For example, if we make the Module C as global the dependency graph will be:

+
+
+
+dependency injection2 +
+
+
+
+

23.39. Custom providers

+
+

When you want to change the provider name, you can use a NestJS feature called custom providers. For example, if you want to define a provider called MockUserService with the provider token UserService you can define it like:

+
+
+
+
@Module({
+  providers: [{
+    provide: UserService,
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

With this, when you inject want to inject UserService as dependency, the MockUserService will be injected.

+
+
+

Custom provider token can be also a string:

+
+
+
+
@Module({
+  providers: [{
+    provide: 'USER_SERVICE',
+    useValue: MockUserService,
+  }],
+})
+
+
+
+

but now, when you want to inject it as dependency you need to use the @Inject decorator.

+
+
+
+
constructor(@Inject('USER_SERVICE') userService: any) {}
+
+
+ +
+
+

23.40. Configuration Module

+
+

devon4node provides a way to generate a configuration module inside your application. To generate it you only need to execute the command nest g -c @devon4node/schematics config-module. This command will generate inside your application:

+
+
+
    +
  • +

    Configuration module inside the core module.

    +
  • +
  • +

    config folder where all environment configuration are stored.

    +
    +
      +
    • +

      default configuration: configuration for your local development environment.

      +
    • +
    • +

      develop environment configuration for the develop environment.

      +
    • +
    • +

      uat environment configuration for the uat environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      production environment configuration for the production environment.

      +
    • +
    • +

      test environment configuration used by test.

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +some code generators will add some properties to this module, so, be sure that the config module is the first module that you generate in your application. +
+
+
+
+

23.41. Use the configuration service

+
+

To use the configuration service, you only need to inject it as dependency. As configuration module is defined in the core module, it will be available everywhere in your application. Example:

+
+
+
+
export class MyProvider {
+  constructor(public readonly configService: ConfigurationService) {}
+
+  myMethod() {
+    return this.confiService.isDev;
+  }
+}
+
+
+
+
+

23.42. Choose an environment file

+
+

By default, when you use the configuration service it will take the properties defined in the default.ts file. If you want to change the configuration file, you only need to set the NODE_ENV environment property with the name of the desired environment. Examples: in windows execute set NODE_ENV=develop before executing the application, in linux execute NODE_ENV=develop before executing the application or NODE_ENV=develop yarn start.

+
+
+
+

23.43. Override configuration properties

+
+

Sometimes, you want to keep some configuration property secure, and you do not want to publish it to the repository, or you want to reuse some configuration file but you need to change some properties. For those scenarios, you can override configuration properties by defining a environment variable with the same name. For example, if you want to override the property host, you can do: set host="newhost". It also works with objects. For example, if you want to change the value of secret in the property jwtConfig for this example, you can set a environment variable like this: set jwtConfig="{"secret": "newsecret"}". As you can see, this environment variable has a JSON value. It will take object and merge the jwtConfig property with the properties defined inside the environment variable. It other properties maintain their value. The behaviour is the same for the nested objects.

+
+
+
+

23.44. Add a configuration property

+
+

In order to add a new property to the configuration module, you need to follow some steps:

+
+
+
    +
  • +

    Add the property to IConfig interface in src/app/core/configuration/types.ts file. With this, we can ensure that the ConfigurationService and the environment files has those property at compiling time.

    +
  • +
  • +

    Add the new property getter to ConfigurationService. You must use the get method of ConfigurationService to ensure that the property will be loaded from the desired config file. You can also add extra logic if needed.

    +
  • +
  • +

    Add the property to all config files inside the src/config folder.

    +
  • +
+
+
+

Example:

+
+
+

We want to add the property devonfwUrl to our ConfigurationService, so:

+
+
+

We add the following code in IConfig interface:

+
+
+
+
devonfwUrl: string;
+
+
+
+

Then, we add the getter in the ConfigurationService:

+
+
+
+
get devonfwUrl(): string {
+  return this.get('devonfwUrl')!;
+}
+
+
+
+

Finally, we add the definition in all config files:

+
+
+
+
devonfwUrl: 'https://devonfw.com',
+
+
+ +
+
+

23.45. Auth JWT module

+
+

devon4node provides a way to generate a default authentication module using JWT (JSON Web Token). It uses the @nestjs/passport library describe here.

+
+
+

To generate the devon4node auth-jwt module you only need to execute the command: nest generate -c @devon4node/schematics auth-jwt. We generate this module inside the applications instead of distributing a npm package because this module is prone to be modified depending on the requirements. It also generate a basic user module.

+
+
+

In this page we will explain the default implementation provided by devon4node. For more information about authentication, JWT, passport and other you can see:

+
+
+ +
+
+
+

23.46. Auth JWT endpoints

+
+

In order to execute authentication operations, the auth-jwt module exposes the following endpoints:

+
+
+
    +
  • +

    POST /auth/login: receive an username and a password and return the token in the header if the combination of username and password is correct.

    +
  • +
  • +

    POST /auth/register: register a new user.

    +
  • +
  • +

    GET /auth/currentuser: return the user data if he is authenticated.

    +
  • +
+
+
+
+

23.47. Protect endpoints with auth-jwt

+
+

In order to protect your endpoints with auth-jwt module you only need to add the AuthGuard() in the UseGuards decorator. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+
+

Now, all request to currentuser are protected by the AuthGuard.

+
+
+
+

23.48. Role based Access Control

+
+

The auth-jwt module provides also a way to control the access to some endpoints by using roles. For example, if you want to grant access to a endpoint only to admins, you only need to add the Roles decorator to those endpoints with the roles allowed. Example:

+
+
+
+
@Get('currentuser')
+@UseGuards(AuthGuard())
+@Roles(roles.ADMIN)
+currentUser(@Request() req: UserRequest) {
+  return req.user;
+}
+
+
+ +
+
+

23.49. Swagger

+
+

We can use swagger (OpenAPI) in order to describe the endpoints that our application exposes.

+
+
+

NestJS provides a module which will read the code of our application and will expose one endpoint where we can see the swagger.

+
+
+

Add swagger to a devon4node application is simple, you only need to execute the command nest g -c @devon4node/schematics swagger and it will do everything for you. The next time that you start your application, you will be able to see the swagger at /v1/api endpoint.

+
+
+

The swagger module can read your code in order to create the swagger definition, but sometimes you need to help him by decorating your handlers.

+
+
+

For more information about decorators and other behaviour about swagger module, you can see the NestJS swagger documentation page

+
+
+ + + + + +
+ + +the OpenAPI specification that this module supports is v2.0. The OpenAPI v3.0 is not available yet by using this module. +
+
+ +
+
+

23.50. TypeORM

+
+

TypeORM is the default ORM provided by devon4node. It supports MySQL, MariaDB, Postgres, CockroachDB, SQLite, Microsoft SQL Server, Oracle, sql.js relational database and also supports MongoDB NoSQL database.

+
+
+

Add TypeORM support to a devon4node application is very easy: you only need to execute the command nest g -c @devon4node/schematics typeorm and it will add all required dependencies to the project and also imports the @nestjs/typeorm module.

+
+
+

For more information about TypeORM and the integration with NestJS you can visit TypeORM webpage, TypeORM GitHub repository and NestJS TypeORM documentation page

+
+
+
+

23.51. Configuration

+
+

When you have the configuration module, the TypeORM generator will add one property in order to be able to configure the database depending on the environment. Example:

+
+
+
+
database: {
+  type: 'sqlite',
+  database: ':memory:',
+  synchronize: false,
+  migrationsRun: true,
+  logging: true,
+  entities: ['dist/**/*.entity.js'],
+  migrations: ['dist/migration/**/*.js'],
+  subscribers: ['dist/subscriber/**/*.js'],
+  cli: {
+    entitiesDir: 'src/entity',
+    migrationsDir: 'src/migration',
+    subscribersDir: 'src/subscriber',
+  },
+},
+
+
+
+

This object is a TypeORM ConnectionOptions. For fore information about it visit the TypeORM Connection Options page.

+
+
+

There is also a special case: the default configuration. As the devon4node CLI need the database configuration when you use the devon4node db command, we also provide the ormconfig.json file. In this file you must put the configuration for you local environment. In order to do not have duplicated the configuration for local environment, in the default config file the database property is set-up like:

+
+
+
+
database: require('../../ormconfig.json'),
+
+
+
+

So, you only need to maintain the ormconfig.json file for the local environment.

+
+
+
+

23.52. Entity

+
+

Entity is a class that maps to a database table. The devon4node schematics has a generator to create new entities. You only need to execute the command nest g -c @devon4node/schematics entity <entity-name> and it generate the entity.

+
+
+

In the entity, you must define all columns, relations, primary keys of your database table. By default, devon4node provides a class named BaseEntity. All entities created with the devon4node schematics will extends the BaseEntity. This entity provides you some common columns:

+
+
+
    +
  • +

    id: the primary key of you table

    +
  • +
  • +

    version: the version of the entry (used for auditing purposes)

    +
  • +
  • +

    createdAt: creation date of the entry (used for auditing purposes)

    +
  • +
  • +

    updatedAt: last update date of the entry (used for auditing purposes)

    +
  • +
+
+
+

For more information about Entities, please visit the TypeORM entities page

+
+
+
+

23.53. Repository

+
+

With repositories, you can manage (insert, update, delete, load, etc.) a concrete entity. Using this pattern, we have separated the data (Entities) from the methods to manage it (Repositories).

+
+
+

To use a repository you only need to:

+
+
+
    +
  • +

    Import it in the module as follows:

    +
    +
    +
    @Module({
    +  imports: [TypeOrmModule.forFeature([Employee])],
    +})
    +
    +
    +
    + + + + + +
    + + +if you generate the entities with the devon4node schematic, this step is not necessary, devon4node schematic will do it for you. +
    +
    +
  • +
  • +

    Inject the repository as dependency in your service:

    +
    +
    +
    constructor(@InjectRepository(Employee) employeeRepository: Repository<Employee>) {}
    +
    +
    +
  • +
+
+
+

You can see more details in the NestJS database and NestJS TypeORM documentation pages.

+
+ +
+
+

23.54. Serializer

+
+

Serialization is the process of translating data structures or object state into a format that can be transmitted across network and reconstructed later.

+
+
+

NestJS by default serialize all data to JSON (JSON.stringify). Sometimes this is not enough. In some situations you need to exclude some property (e.g password). Instead doing it manually, devon4node provides an interceptor (ClassSerializerInterceptor) that will do it for you. You only need to return a class instance as always and the interceptor will transform those class to the expected data.

+
+
+

The ClassSerializerInterceptor takes the class-transformer decorators in order to know how to transform the class and then send the result to the client.

+
+
+

Some of class-transformer decorators are:

+
+
+
    +
  • +

    Expose

    +
  • +
  • +

    Exclude

    +
  • +
  • +

    Type

    +
  • +
  • +

    Transform

    +
  • +
+
+
+

And methods to transform data:

+
+
+
    +
  • +

    plainToClass

    +
  • +
  • +

    plainToClassFromExist

    +
  • +
  • +

    classToPlain

    +
  • +
  • +

    classToClass

    +
  • +
  • +

    serialize

    +
  • +
  • +

    deserialize

    +
  • +
  • +

    deserializeArray

    +
  • +
+
+
+

See the class-transformer page for more information.

+
+
+

See NestJS serialization page for more information about ClassSerializerInterceptor.

+
+ +
+
+

23.55. Validation

+
+

To be sure that your application will works well, you must validate any input data. devon4node by default provides a ValidationPipe. This ValidationPipe is the responsible of validate the request input and, if the input do not pass the validation process, it returns a 400 Bad Request error.

+
+
+
+

23.56. Defining Validators

+
+

The ValidationPipe needs to know how to validate the input. For that purpose we use the class-validator package. This package allows you to define the validation of a class by using decorators.

+
+
+

For example:

+
+
+
+
export class Coffee {
+  @IsDefined()
+  @IsString()
+  @MaxLength(255)
+  name: string;
+
+  @IsDefined()
+  @IsString()
+  @MaxLength(25)
+  type: string;
+
+  @IsDefined()
+  @IsNumber()
+  quantity: number;
+}
+
+
+
+

As you can see in the previous example, we used some decorators in order to define the validators for every property of the Coffee class. You can find all decorators in the class-validator github repository.

+
+
+

Now, when you want to receive a Coffee as input in some endpoint, it will execute the validations before executing the handler function.

+
+
+ + + + + +
+ + +In order to be able to use the class-validator package, you must use classes instead of interfaces. As you know interfaces disappear at compiling time, and class-validator need to know the metadata of the properties in order to be able to validate. +
+
+
+ + + + + +
+ + +The ValidationPipe only works if you put a specific type in the handler definition. For example, if you define a handler like getCoffee(@Body() coffee: any): Coffee {} the ValidationPipe will not do anything. You must specify the type of the input: getCoffee(@Body() coffee: Coffee): Coffee {} +
+
+ +
+
+

23.57. Logger

+
+

When you create a new devon4node application, it already has a logger: src/app/shared/logger/winston.logger.ts. This logger provide the methods log, error and warn. All of those methods will write a log message, but with a different log level.

+
+
+

The winston logger has two transports: one to log everything inside the file logs/general.log and the other to log only the error logs inside the file logs/error.log. In addition, it uses the default NestJS logger in order to show the logs in the console.

+
+
+

As you can see it is a simple example about how to use logger in a devon4node application. It will be update to a complex one in the next versions.

+
+
+
+

23.58. How to use logger

+
+

In order to use the logger you only need to inject the logger as a dependency:

+
+
+
+
constructor(logger: WinstonLogger){}
+
+
+
+

and then use it

+
+
+
+
async getAll() {
+  this.service.getAll();
+  this.logger.log('Returning all data');
+}
+
+
+ +
+
+

23.59. Mailer Module

+
+

This module enables you to send emails in devon4node. It also provides a template engine using Handlebars.

+
+
+

It is a NestJS module that inject into your application a MailerService, which is the responsible to send the emails using the nodemailer library.

+
+
+
+

23.60. Installing

+
+

Execute the following command in a devon4node project:

+
+
+
+
yarn add @devon4node/mailer
+
+
+
+
+

23.61. Configuring

+
+

To configure the mailer module, you only need to import it in your application into another module. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot(),
+  ],
+  ...
+})
+
+
+
+

Your must pass the configuration using the forRoot or forRootAsync methods.

+
+
+
+

23.62. forRoot()

+
+

The forRoot method receives an MailerModuleOptions object as parameter. It configures the MailerModule using the input MailerModuleOptions object.

+
+
+

The structure of MailerModuleOptions is:

+
+
+
+
{
+  hbsOptions?: {
+    templatesDir: string;
+    extension?: string;
+    partialsDir?: string;
+    helpers?: IHelperFunction[];
+    compilerOptions?: ICompileOptions;
+  },
+  mailOptions?: nodemailerSmtpTransportOptions;
+  emailFrom: string;
+}
+
+
+
+

Here, you need to specify the Handlebars compile options, the nodemailer transport options and the email address which will send the emails. +Then, you need to call to forRoot function in the module imports. Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRoot({
+      mailOptions: {
+        host: 'localhost',
+        port: 1025,
+        secure: false,
+        tls: {
+          rejectUnauthorized: false,
+        },
+      },
+      emailFrom: 'noreply@capgemini.com',
+      hbsOptions: {
+        templatesDir: join(__dirname, '../..', 'templates/views'),
+        partialsDir: join(__dirname, '../..', 'templates/partials'),
+        helpers: [{
+          name: 'fullname',
+          func: person => `${person.name} ${person.surname}`,s
+        }],
+      },
+    }),
+  ...
+})
+
+
+
+
+

23.63. forRootAsync()

+
+

The method forRootAsync enables you to get the mailer configuration in a asynchronous way. It is useful when you need to get the configuration using, for example, a service (e.g. ConfigurationService).

+
+
+

Example:

+
+
+
+
@Module({
+  ...
+  imports: [
+    MailerModule.forRootAsync({
+      imports: [ConfigurationModule],
+      useFactory: (config: ConfigurationService) => {
+        return config.mailerConfig;
+      },
+      inject: [ConfigurationService],
+    }),
+  ...
+})
+
+
+
+

In this example, we use the ConfigurationService in order to get the MailerModuleOptions (the same as forRoot)

+
+
+
+

23.64. Usage

+
+

In order to use, you only need to inject using the dependency injection the MailerService.

+
+
+

Example:

+
+
+
+
@Injectable()
+export class CatsService {
+  constructor(private readonly mailer: MailerService) {}
+}
+
+
+
+

Then, you only need to use the methods provided by the MailerService in your service. Take into account that you can inject it in every place that support NestJS dependency injection.

+
+
+
+

23.65. MailerService methods

+ +
+
+

23.66. == sendPlainMail

+
+

The method sendPlainMail receive a string sends a email.

+
+
+

The method signatures are:

+
+
+
+
sendPlainMail(emailOptions: SendMailOptions): Promise<SentMessageInfo>;
+sendPlainMail(to: string, subject: string, mail: string): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendPlainMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+});
+this.mailer.sendPlainMail('example@example.com', 'This is a subject', '<h1>Hello world</h1>');
+
+
+
+
+

23.67. == sendTemplateMail

+
+

The method sendTemplateMail sends a email based on a Handlebars template. The templates are registered using the templatesDir option or using the addTemplate method. +The template name is the name of the template (without extension) or the first parameter of the method addTemplate.

+
+
+

The method signatures are:

+
+
+
+
sendTemplateMail(emailOptions: SendMailOptions, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+sendTemplateMail(to: string, subject: string, templateName: string, emailData: any, hbsOptions?: RuntimeOptions): Promise<SentMessageInfo>;
+
+
+
+

Examples:

+
+
+
+
this.mailer.sendTemplateMail({
+  to: 'example@example.com',
+  subject: 'This is a subject',
+  html: '<h1>Hello world</h1>'
+}, 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+this.mailer.sendTemplateMail('example@example.com', 'This is a subject', 'template1', { person: {name: 'Dario', surname: 'Rodriguez'}});
+
+
+
+
+

23.68. == addTemplate

+
+

Adds a new template to the MailerService.

+
+
+

Method signature:

+
+
+
+
addTemplate(name: string, template: string, options?: CompileOptions): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.addTemplate('newTemplate', '<html><head></head><body>{{>partial1}}</body></html>')
+
+
+
+
+

23.69. == registerPartial

+
+

Register a new partial in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerPartial(name: string, partial: Handlebars.Template<any>): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerPartial('partial', '<h1>Hello World</h1>')
+
+
+
+
+

23.70. == registerHelper

+
+

Register a new helper in Handlebars.

+
+
+

Method signature:

+
+
+
+
registerHelper(name: string, helper: Handlebars.HelperDelegate): void;
+
+
+
+

Example:

+
+
+
+
this.mailer.registerHelper('fullname', person => `${person.name} ${person.surname}`)
+
+
+
+
+

23.71. Handlebars templates

+
+

As mentioned above, this module allow you to use Handlebars as template engine, but it is optional. If you do not need the Handlebars, you just need to keep the hbsOptions undefined.

+
+
+

In order to get the templates form the file system, you can specify the template folder, the partials folder and the helpers. +At the moment of module initialization, it will read the content of the template folder, and will register every file with the name (without extension) and the content as Handlebars template. It will do the same for the partials.

+
+
+

You can specify the extension of template files using the extension parameter. The default value is .handlebars

+
+
+
+

23.72. Local development

+
+

If you want to work with this module but you don’t have a SMTP server, you can use the streamTransport. Example:

+
+
+
+
{
+  mailOptions: {
+    streamTransport: true,
+    newline: 'windows',
+  },
+  emailFrom: ...
+  hbsOptions: ...
+}
+
+
+
+

Then, you need to get the sendPlainMail or sendTemplateMail result, and print the email to the standard output (STDOUT). Example:

+
+
+
+
const mail = await this.mailer.sendTemplateMail(...);
+
+mail.message.pipe(process.stdout);
+
+
+ +
+
+

23.73. Importing your ESLint reports into SonarQube

+
+

This guide covers the import of ESLint reports into SonarQube instances in CI environments, as this is the recommended way of using ESLint and SonarQube for devon4node projects. The prerequisites for this process are a CI environment, preferably a Production Line instance, and the ESLint CLI, which is already included when generating a new devon4node project.

+
+
+
+

23.74. Configuring the ESLint analysis

+
+

You can configure the ESLint analysis parameters in the .eslintrc.js file inside the top-level directory of your project. If you created your node project using the devon4node application schematic, this file will already exist. If you want to make further adjustments to it, have a look at the ESLint documentation.

+
+
+

The ESLint analysis script lint is already configured in the scripts part of your package.json. Simply add -f json > report.json, so that the output of the analysis is saved in a .json file. Additional information to customization options for the ESLint CLI can be found here.

+
+
+

To run the analysis, execute the script with npm run lint inside the base directory of your project.

+
+
+
+

23.75. Configuring SonarQube

+
+

If you haven’t already generated your CICD-related files, follow the tutorial on the devon4node schematic of our CICDGEN project, as you will need a Jenkinsfile configured in your project to proceed.

+
+
+

Inside the script for the SonarQube code analysis in your Jenkinsfile, add the parameter -Dsonar.eslint.reportPaths=report.json. Now, whenever a SonarQube analysis is triggered by your CI environment, the generated report will be loaded into your SonarQube instance. +To avoid duplicated issues, you can associate an empty TypeScript quality profile with your project in its server configurations.

+
+
+
+
+
+

24. devon4node applications

+
+ +
+

24.1. devon4node Samples

+
+

In the folder /samples, you can find some devon4node examples that could be useful for you in order to understand better the framework.

+
+
+

The samples are:

+
+
+ +
+
+

Also, we have another realistic example in the My Thai Star repository. This example is the implementation of My Thai Star backend, which is compatible with the frontend made with Angular. To do that, this node implementation exposes the same API as Java backend. Take care with this example, as we need to follow the Java API, some components do not follow the devon4node patterns and code conventions.

+
+
+
+

24.2. Todo example

+
+

This example is the backend part of an TO-DO application. It exposes and API where you can create, read, update and delete a TO-DO list.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:3000/v1/todo/todos. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:3000/v1/api.

+
+
+

Also, in this example we show you how to control the access to you application by implementing an authentication mechanism using JWT and rol based strategy. In order to access to the list of todos (http://localhost:3000/v1/todo/todos), first you need to call to POST http://localhost:3000/v1/auth/login and in the body you need to send the user information:

+
+
+
+
{
+  "username": "user",
+  "password": "password"
+}
+
+
+
+

It will return a JWT token for the user user. The rol of this user is USER, so you can only access to the methods GET, POST and DELETE of the endpoint http://localhost:3000/v1/todo/todos. If you login with the user admin/admin, you will be able to access to the methods UPDATE and PATCH.

+
+
+
+

24.3. Employee example

+
+

This is an example of employee management application. With the application you can create, read, update and delete employees.

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

Now, you can access to the application using the url http://localhost:8081/v1/employee/employees. If you want to now all endpoints exposed, you can see the swagger at: http://localhost:8081/v1/api.

+
+
+

This is a simple example without authentication. With this example you can learn how to work with database migrations. You can find them in the folder /src/migrations. The TypeORM is configured in order to execute the migrations every time that you start this application at ormconfig.json with the following flag:

+
+
+
+
"migrationsRun": true
+
+
+
+

You can also execute the migration manually by typing the command devon4node db migration:run, or revert executing devon4node db migration:revert. Take into account that the database that this application is using is an in-memory sqlite, so every time that you stop the application all data is lost.

+
+
+
+

24.4. Components example

+
+

This example allow you to understand better the execution order of the components of a devon4node application (guards, pipes, interceptors, filters, middleware).

+
+
+

In order to start the application, run the following commands in the todo folder:

+
+
+
+
$ yarn
+$ yarn build
+$ yarn start
+
+
+
+

In order to see the execution order, you can call to http://localhost:3000/v1. It will show you the execution order of all components except the filters. If you want to know the execution order while a filter is applied, call to the endpoint with the following queries: ?hello=error, ?hello=controller, ?hello=global.

+
+ +
+
+

24.5. Create the employee sample step by step

+ +
+
+

24.6. Application requisites

+
+

The employee application needs:

+
+
+
    +
  • +

    A configuration module

    +
  • +
  • +

    A SQLite in memory database

    +
  • +
  • +

    Security: CORS

    +
  • +
  • +

    Swagger support

    +
  • +
  • +

    Authentication using JWT

    +
  • +
  • +

    CRUD for manage employees. The employees will have the following properties:

    +
    +
      +
    • +

      name

      +
    • +
    • +

      surname

      +
    • +
    • +

      email

      +
    • +
    +
    +
  • +
+
+
+
+

24.7. Create the application

+
+
    +
  1. +

    Install Nest CLI

    +
    +

    Execute the command yarn global add @nestjs/cli

    +
    +
  2. +
  3. +

    Install devon4node schematics

    +
  4. +
  5. +

    Execute the command yarn global add @devon4node/schematics

    +
  6. +
  7. +

    Create the new application

    +
    +

    Execute the command nest g -c @devon4node/schematics application employee

    +
    +
  8. +
  9. +

    Then, we need to add some components, go inside the project folder and execute the following commands:

    +
    +

    Go inside project folder: cd employee.

    +
    +
    +

    Config module: nest g -c @devon4node/schematics config-module.

    +
    +
    +

    TypeORM database, choose sqlite DB when asked nest g -c @devon4node/schematics typeorm.

    +
    +
    +

    Add security: nest g -c @devon4node/schematics security.

    +
    +
    +

    Swagger module: nest g -c @devon4node/schematics swagger.

    +
    +
    +

    Auth-jwt authentication: nest g -c @devon4node/schematics auth-jwt.

    +
    +
    +

    Add an application module: nest g -c @devon4node/schematics module employee.

    +
    +
    +

    Add CRUD component: nest g -c @devon4node/schematics crud employee/employee.

    +
    +
    +

    With this, you will generate the following files:

    +
    +
    +
    +
    /employee/.prettierrc
    +/employee/nest-cli.json
    +/employee/package.json
    +/employee/README.md
    +/employee/tsconfig.build.json
    +/employee/tsconfig.json
    +/employee/tslint.json
    +/employee/src/main.ts
    +/employee/test/app.e2e-spec.ts
    +/employee/test/jest-e2e.json
    +/employee/src/app/app.controller.spec.ts
    +/employee/src/app/app.controller.ts
    +/employee/src/app/app.module.ts
    +/employee/src/app/app.service.ts
    +/employee/src/app/core/core.module.ts
    +/employee/src/app/shared/logger/winston.logger.ts
    +/employee/src/app/core/configuration/configuration.module.ts
    +/employee/src/app/core/configuration/model/index.ts
    +/employee/src/app/core/configuration/model/types.ts
    +/employee/src/app/core/configuration/services/configuration.service.spec.ts
    +/employee/src/app/core/configuration/services/configuration.service.ts
    +/employee/src/app/core/configuration/services/index.ts
    +/employee/src/config/default.ts
    +/employee/src/config/develop.ts
    +/employee/src/config/production.ts
    +/employee/src/config/test.ts
    +/employee/src/config/uat.ts
    +/employee/docker-compose.yml
    +/employee/ormconfig.json
    +/employee/src/app/shared/model/entities/base-entity.entity.ts
    +/employee/src/app/core/auth/auth.module.ts
    +/employee/src/app/core/auth/controllers/auth.controller.spec.ts
    +/employee/src/app/core/auth/controllers/auth.controller.ts
    +/employee/src/app/core/auth/controllers/index.ts
    +/employee/src/app/core/auth/decorators/index.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.spec.ts
    +/employee/src/app/core/auth/decorators/roles.decorator.ts
    +/employee/src/app/core/auth/guards/index.ts
    +/employee/src/app/core/auth/guards/roles.guard.spec.ts
    +/employee/src/app/core/auth/guards/roles.guard.ts
    +/employee/src/app/core/auth/model/index.ts
    +/employee/src/app/core/auth/model/roles.enum.ts
    +/employee/src/app/core/auth/model/user-request.interface.ts
    +/employee/src/app/core/auth/services/auth.service.spec.ts
    +/employee/src/app/core/auth/services/auth.service.ts
    +/employee/src/app/core/auth/services/index.ts
    +/employee/src/app/core/auth/strategies/index.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.spec.ts
    +/employee/src/app/core/auth/strategies/jwt.strategy.ts
    +/employee/src/app/core/user/user.module.ts
    +/employee/src/app/core/user/model/index.ts
    +/employee/src/app/core/user/model/dto/user-payload.dto.ts
    +/employee/src/app/core/user/model/entities/user.entity.ts
    +/employee/src/app/core/user/services/index.ts
    +/employee/src/app/core/user/services/user.service.spec.ts
    +/employee/src/app/core/user/services/user.service.ts
    +/employee/test/auth/auth.service.mock.ts
    +/employee/test/user/user.repository.mock.ts
    +/employee/src/app/employee/employee.module.ts
    +/employee/src/app/employee/model/entities/employee.entity.ts
    +/employee/src/app/employee/model/index.ts
    +/employee/src/app/employee/controllers/employee.crud.controller.ts
    +/employee/src/app/employee/services/employee.crud.service.ts
    +/employee/src/app/employee/services/index.ts
    +/employee/src/app/employee/controllers/index.ts
    +
    +
    +
  10. +
  11. +

    Open the VSCode

    +
    +

    Execute the commands:

    +
    +
    +
    +
    yarn install
    +code .
    +
    +
    +
  12. +
  13. +

    Fill in the entity: src/app/employee/model/entities/employee.entity.ts

    +
    +
      +
    1. +

      Add the columns

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    2. +
    3. +

      Add the validations

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    4. +
    5. +

      Add the transformations

      +
      +

      In this specific case, we will not transform any property, but you can see an example in the src/app/shared/model/entities/base-entity.entity.ts file.

      +
      +
      +
      +
      export abstract class BaseEntity {
      +  @PrimaryGeneratedColumn('increment')
      +  id!: number;
      +
      +  @VersionColumn({ default: 1 })
      +  @Exclude({ toPlainOnly: true })
      +  version!: number;
      +
      +  @CreateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  createdAt!: string;
      +
      +  @UpdateDateColumn()
      +  @Exclude({ toPlainOnly: true })
      +  updatedAt!: string;
      +}
      +
      +
      +
    6. +
    7. +

      Add swagger metadata

      +
      +
      +
      @Entity()
      +export class Employee extends BaseEntity {
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  name?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @Column('varchar', { length: 255, nullable: true })
      +  surname?: string;
      +
      +  @ApiPropertyOptional()
      +  @IsDefined({ groups: [CrudValidationGroups.CREATE] })
      +  @IsOptional({ groups: [CrudValidationGroups.UPDATE] })
      +  @MaxLength(255)
      +  @IsEmail()
      +  @Column('varchar', { length: 255, nullable: true })
      +  email?: string;
      +}
      +
      +
      +
    8. +
    +
    +
  14. +
  15. +

    Add swagger metadata to src/app/employee/controllers/employee.crud.controller.ts

    +
    +
    +
    @ApiTags('employee')
    +
    +
    +
  16. +
  17. +

    Generate database migrations

    +
    +
      +
    1. +

      Build the application: yarn build

      +
    2. +
    3. +

      In order to create migration scripts with TypeORM, you need to install ts-node: yarn global add ts-node

      +
    4. +
    5. +

      Generate the tables creation migration: yarn run typeorm migration:generate -n CreateTables

      +
      +
      +generate migrations +
      +
      +
      +

      The output will be something similar to:

      +
      +
      +
      +
      export class CreateTables1572480273012 implements MigrationInterface {
      +  name = 'CreateTables1572480273012';
      +
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `CREATE TABLE "user" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "username" varchar(255) NOT NULL, "password" varchar(255) NOT NULL, "role" integer NOT NULL DEFAULT (0))`,
      +      undefined,
      +    );
      +    await queryRunner.query(
      +      `CREATE TABLE "employee" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "version" integer NOT NULL DEFAULT (1), "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "updatedAt" datetime NOT NULL DEFAULT (datetime('now')), "name" varchar(255), "surname" varchar(255), "email" varchar(255))`,
      +      undefined,
      +    );
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DROP TABLE "employee"`, undefined);
      +    await queryRunner.query(`DROP TABLE "user"`, undefined);
      +  }
      +}
      +
      +
      +
      +

      The number in the name is a timestamp, so may change in your application.

      +
      +
    6. +
    7. +

      Create a migration to insert data:`yarn run typeorm migration:generate -n InsertData`

      +
      +
      +insert data +
      +
      +
      +

      and fill in with the following code:

      +
      +
      +
      +
      export class InsertData1572480830290 implements MigrationInterface {
      +  public async up(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(1, 'Santiago', 'Fowler', 'Santiago.Fowler@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(2, 'Clinton', 'Thornton', 'Clinton.Thornton@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(3, 'Lisa', 'Rodriquez', 'Lisa.Rodriquez@example.com');`,
      +    );
      +    await queryRunner.query(
      +      `INSERT INTO EMPLOYEE(id, name, surname, email) VALUES(4, 'Calvin', 'Becker', 'Calvin.Becker@example.com');`,
      +    );
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      1,
      +      'user',
      +      await hash('password', await genSalt(12)),
      +      roles.USER,
      +    ]);
      +    await queryRunner.query(`INSERT INTO USER(id, username, password, role) VALUES(?, ?, ?, ?);`, [
      +      2,
      +      'admin',
      +      await hash('admin', await genSalt(12)),
      +      roles.ADMIN,
      +    ]);
      +  }
      +
      +  public async down(queryRunner: QueryRunner): Promise<any> {
      +    await queryRunner.query(`DELETE FROM EMPLOYEE`);
      +    await queryRunner.query(`DELETE FROM USER`);
      +  }
      +}
      +
      +
      +
    8. +
    +
    +
  18. +
  19. +

    Start the application: yarn start:dev

    +
    +
    +start app +
    +
    +
  20. +
  21. +

    Check the swagger endpoint: http://localhost:3000/v1/api

    +
    +
    +swagger +
    +
    +
  22. +
  23. +

    Make petitions to the employee CRUD: http://localhost:3000/v1/employee/employees

    +
    +
    +employees +
    +
    +
  24. +
  25. +

    Write the tests

    +
    +

    As we do not create any method, only add some properties to the entity, all application must be tested by the autogenerated code. As we add some modules, you need to uncomment some lines in the src/app/core/configuration/services/configuration.service.spec.ts:

    +
    +
    +
    +
    describe('ConfigurationService', () => {
    +  const configService: ConfigurationService = new ConfigurationService();
    +
    +  it('should return the values of test config file', () => {
    +    expect(configService.isDev).toStrictEqual(def.isDev);
    +    expect(configService.host).toStrictEqual(def.host);
    +    expect(configService.port).toStrictEqual(def.port);
    +    expect(configService.clientUrl).toStrictEqual(def.clientUrl);
    +    expect(configService.globalPrefix).toStrictEqual(def.globalPrefix);
    +    // Remove comments if you add those modules
    +    expect(configService.database).toStrictEqual(def.database);
    +    expect(configService.swaggerConfig).toStrictEqual(def.swaggerConfig);
    +    expect(configService.jwtConfig).toStrictEqual(def.jwtConfig);
    +    // expect(configService.mailerConfig).toStrictEqual(def.mailerConfig);
    +  });
    +  it('should take the value of environment varible if defined', () => {
    +    process.env.isDev = 'true';
    +    process.env.host = 'notlocalhost';
    +    process.env.port = '123456';
    +    process.env.clientUrl = 'http://theclienturl.net';
    +    process.env.globalPrefix = 'v2';
    +    process.env.swaggerConfig = JSON.stringify({
    +      swaggerTitle: 'Test Application',
    +    });
    +    process.env.database = JSON.stringify({
    +      type: 'oracle',
    +      cli: { entitiesDir: 'src/notentitiesdir' },
    +    });
    +    process.env.jwtConfig = JSON.stringify({ secret: 'NOTSECRET' });
    +    // process.env.mailerConfig = JSON.stringify({ mailOptions: { host: 'notlocalhost' }});
    +
    +    expect(configService.isDev).toBe(true);
    +    expect(configService.host).toBe('notlocalhost');
    +    expect(configService.port).toBe(123456);
    +    expect(configService.clientUrl).toBe('http://theclienturl.net');
    +    expect(configService.globalPrefix).toBe('v2');
    +    const database: any = { ...def.database, type: 'oracle' };
    +    database.cli.entitiesDir = 'src/notentitiesdir';
    +    expect(configService.database).toStrictEqual(database);
    +    expect(configService.swaggerConfig).toStrictEqual({
    +      ...def.swaggerConfig,
    +      swaggerTitle: 'Test Application',
    +    });
    +    expect(configService.jwtConfig).toStrictEqual({
    +      ...def.jwtConfig,
    +      secret: 'NOTSECRET',
    +    });
    +    // const mail: any = { ...def.mailerConfig };
    +    // mail.mailOptions.host = 'notlocalhost';
    +    // expect(configService.mailerConfig).toStrictEqual(mail);
    +
    +    process.env.isDev = undefined;
    +    process.env.host = undefined;
    +    process.env.port = undefined;
    +    process.env.clientUrl = undefined;
    +    process.env.globalPrefix = undefined;
    +    process.env.database = undefined;
    +    process.env.swaggerConfig = undefined;
    +    process.env.jwtConfig = undefined;
    +    // process.env.mailerConfig = undefined;
    +  });
    +});
    +
    +
    +
    +

    And the output should be:

    +
    +
    +
    +test +
    +
    +
  26. +
+
+
+
+
+
+

25. Choosing your Database

+
+ +
+

25.1. Database

+
+

For your business application with devonfw you need to choose the right database. +In devonfw we are not biased for a particular product so you have the freedom of choice.

+
+
+
+

25.2. RDBMS

+
+

The classical and well-established form of a database is a relational database management system (RDBMS). +In devonfw we recommend to use an RDBMS unless you have specific need. +However, in case you have the need for big data, graph-data, BLOB focus, or schema-less dynamic data you can have a look at NoSQL options but be aware that these may be experimental and are not fully supported by devonfw.

+
+
+
+

25.3. Options

+
+

In devonfw we are not biased for a particular RDBMS so you have the freedom of choice. +Here are the most common options:

+
+
+
    +
  • +

    SAP Hana (high performance in-memory, many advanced features)

    +
  • +
  • +

    Oracle (most established, well featured for enterprise)

    +
  • +
  • +

    PostgreSQL (great open-source RDBMS)

    +
  • +
  • +

    MariaDB (true OSS successor of MySQL)

    +
  • +
  • +

    MS SQL Server (best choice for Microsoft and Windows dominated IT landscapes)

    +
  • +
+
+
+

Please click on any of the above choices and go to the according guide to find specific detials such as client/driver.

+
+
+
+

25.4. NoSQL

+
+

While not (yet) officially supported and recommendet there are also interesting NoSQL (Not Only SQL) databases that could be an interesting alternative. Please be aware that you will typically not be able to use JPA (and hibernate). Further before choosing a NoSQL database you should check the following aspects:

+
+
+
    +
  • +

    Is the database of choice reliable and mature enough for your project?

    +
  • +
  • +

    Can the operators of your product support the database of choice properly (provisioning, administration, backup, scaling & clustering, monitoring, etc.)?

    +
  • +
  • +

    Does the database of choice meet the requirements of your project (ACID vs. eventual consistencey, CAP theorem)?

    +
  • +
+
+
+

There are good reasons to choose a particular NoSQL database in specific cases (e.g. extreme demand for big-data, throughput or scaling). +But as indicated by the questions above you need to be fully aware of what you are doing. +NoSQL databases can be schemaless (untyped, dynamic & flexible) and/or schemaful (typed, structured & strict). +Furhter, there are different types of NoSQL databases that are discussed in the following sub-sections:

+
+
+
+

25.5. Java

+ +
+
+

25.6. == Column DB

+
+

Column NoSQL databases are more related to a regular RDBMS with their tables and columns. +However, they typically do not offer relational support with joins to the same level as you expect from an RDBMS. +Therefore, you have to carefully design your data-model upfront with the all the knowledge how you later want to query your data.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    Cassandra (high-performance, schema-based DB)

    +
  • +
  • +

    HBase (distributed, big-data Hadoop database)

    +
  • +
+
+
+
+

25.7. == Key-Value DB

+
+

As indicated by the name, a key-value database stores objects as key/value pairs similar to Properties or Map in Java.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    Redis (in-memory key/value store, especially used as cache or message broker)

    +
  • +
  • +

    aerospike

    +
  • +
+
+
+
+

25.8. == Document DB

+
+

A document database is similar to a key-value database, but it stores objects in standard structured formats such as XML, JSON, or BSON. +Therefore not only flat key/value pairs but even trees of hierarchical data can be stored, retrieved and queried.

+
+
+

The most prominent options are:

+
+
+ +
+
+
+

25.9. == Graph DB

+
+

If the connections (links/relations) between your data is key and an RDBMS is just not flexible or fast enough for your plans, then a graph database can help you. +They are very strong on storing and querying complex connections between entities. +For queries there are even specific standards and languages like Gremlin.

+
+
+

The most prominent options are:

+
+
+ +
+
+
+

25.10. == Hybrid DB

+
+

In addition to the above types there are some NoSQL databases that are hybrid and combine the features and aspects of these types. +While as an architect and developer you might love the idea to get all in one, you have to be careful with your choice. +If you do not exactly know your problem, you are not ready to make the right choice for your database. +Further, you might still be best-off with an good old RDBMS if you need to address multiple aspects together. +Anyhow, for experiments, PoCs, or small microservices with little risk it might be a great idea to choose a hybrid NoSQL database. +If you have collected very positive, profound and productive experience with such product you can grow on it.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    OrientDB (object-oriented, hyper-flexible, column- and graph-based)

    +
  • +
+
+ +
+
+

25.11. SAP HANA

+
+

This section contains hints for those who use SAP HANA, a very powerful and fast RDBMS. Besides general hints about the driver there are tips for more tight integration with other SAP features or products.

+
+
+
+

25.12. Java

+ +
+
+

25.13. Driver

+
+

SAP Hana is a commercial and professional product. +However, the hana JDBC driver is available in Maven Central what makes it easy to integrate. +All you need is the following maven dependency:

+
+
+
+
<dependency>
+  <groupId>com.sap.cloud.db.jdbc</groupId>
+  <artifactId>ngdbc</artifactId>
+  <version>${hana.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${hana.driver.version}) needs to be adopted to your needs (Hana installtion in production, e.g. 2.4.64). +For an overview of available driver versions see here.

+
+
+
+

25.14. Developer Usage

+
+

For your local development environment you will love the free SAP HANA, Express Edition.

+
+
+

You can run HANA in several ways:

+
+
+ +
+
+

To get started with SAP HANA, Express Edition you can check out the tutorials at the SAP Developer Center.

+
+
+ + +
+

25.17. Oracle RDBMS

+
+

This section contains hints for those who use Oracle RDBMS. Besides general hints about the driver there are tips for more tight integration with other Oracle features or products. However, if you work for a project where Oracle RDBMS is settled and not going to be replaced (you are in a vendor lock-in anyway), you might want to use even more from Oracle technology to take advantage from a closer integration.

+
+
+
+

25.18. Java

+ +
+
+

25.19. XE

+
+

For local development you should setup Oracle XE (eXpress Edition). +You need an oracle account, then you can download it from here.

+
+
+

The most comfortable way to run it as needed is using docker. You can build your own docker image from the downloaded RPM using the instructions and dockerfile from oracle. The following commands will build and start Oracle XE 18.4.0 on your machine:

+
+
+
+
git clone https://github.com/oracle/docker-images.git
+cd docker-images/OracleDatabase/SingleInstance/dockerfiles
+./buildDockerImage.sh -x -v 18.4.0
+docker run -d -p 1521:1521 --name=oracle-xe --restart=always -e ORACLE_PWD=«my-sys-pwd» oracle/database:18.4.0-xe
+
+
+
+

Please note that the buildDockerImage.sh will take a long time. Further after docker run has passed you need to give time for your new container to startup and setup the Oracle XE DB. So be patient and give it some time. +(In case the build of the docker-image fails reproducibly and you want to give up with the Dockerfiles from Oracle you can also try this inofficial docker-oracle-xe solution. However, this is not recommended and may lead to other problems.).

+
+
+

Starting with XE 18c you need to be aware that oracle introduced a multi-tenant architecture. Hence xe refers to the root CDB while you typically want to connect to the PDB (pluggable database) and XE ships with exactly one of this called xepdb1. +To connect to your local XE database you need to use xepdb1 as the Service Name (typically in SQL Developer). The hostname should be localhost and the port is by default 1521 if you did not remap it with docker to something else. +In order to create schema users, use sys as Username and change Role to SYSDBA.

+
+
+

Hint: If you happen to end up connected to xe instead of xepdb1 in some case (e.g. in sqlplus), you may switch using this statement:

+
+
+
+
ALTER SESSION SET CONTAINER = XEPDB1;
+
+
+
+

The JDBC URL for your Oracle XE Database is:

+
+
+
+
jdbc:oracle:thin:@//localhost:1521/xepdb1
+
+
+
+

To locally connect as sysdba without password use the following command (connect / as sysdba is not working anymore):

+
+
+
+
sqlplus sys/Oracle18@localhost/XE as sysdba
+
+
+
+
+

25.20. Driver

+
+

The oracle JDBC driver is available in maven central. +Oracle JDBC drivers usually are backward and forward compatible so you should be able to use an older driver with a newer Oracle DB, etc. +Your dependency for the oracle driver should look as follows:

+
+
+
+
<dependency>
+  <groupId>com.oracle.database.jdbc</groupId>
+  <artifactId>ojdbc10</artifactId>
+  <version>${oracle.driver.version}</version>
+</dependency>
+
+
+
+

For the most recent Oracle DB 19 the property oracle.driver.version should be 19.8.0.0. The number in the artifactId correlates to the minimum Java Version so for Java8 artifactId should be ojdbc8 instead. It is fine to use ojdbc10 with Java11 or higher.

+
+
+
+

25.21. Pooling

+
+

In order to boost performance JDBC connections should be pooled and reused. If you are using Oracle RDBMS and do not plan to change that you can use the Oracle specific connection pool "Universal Connection Pool (UCP)" that is perfectly integrated with the Oracle driver. According to the documentation, UCP can even be used to manage third party data sources. +Like the JDBC driver also the UCP is available in maven central. The dependency should look like this:

+
+
+
+
<dependency>
+  <groupId>com.oracle.database.jdbc</groupId>
+  <artifactId>ucp</artifactId>
+  <version>${oracle.ucp.version}</version>
+</dependency>
+
+
+
+

with property oracle.ucp.version analogue to oracle.driver.version.

+
+
+

Configuration is done via application.properties like this (example):

+
+
+
+
#Oracle UCP
+##Datasource for accessing the database
+spring.datasource.url=jdbc:oracle:thin:@192.168.58.2:1521:xe
+spring.jpa.database-platform=org.hibernate.dialect.Oracle12cDialect
+spring.datasource.user=MyUser
+spring.datasource.password=ThisIsMyPassword
+spring.datasource.driver-class-name=oracle.jdbc.OracleDriver
+spring.datasource.schema=MySchema
+
+spring.datasource.type=oracle.ucp.jdbc.PoolDataSourceImpl
+spring.datasource.factory=oracle.ucp.jdbc.PoolDataSourceFactory
+spring.datasource.factory-method=getPoolDataSource
+spring.datasource.connectionFactoryClassName=oracle.jdbc.pool.OracleDataSource
+spring.datasource.validateConnectionOnBorrow=true
+spring.datasource.connectionPoolName=MyPool
+spring.datasource.jmx-enabled=true
+
+##Optional: Set the log level to INTERNAL_ERROR, SEVERE, WARNING, INFO, CONFIG, FINE, TRACE_10, FINER, TRACE_20, TRACE_30, or FINEST
+##logging.level.oracle.ucp=INTERNAL_ERROR
+##Optional: activate tracing
+##logging.level.oracle.ucp.jdbc.oracle.OracleUniversalPooledConnection=TRACE
+
+#Optional: Configures pool size manually
+#spring.datasource.minPoolSize=10
+#spring.datasource.maxPoolSize=40
+#spring.datasource.initialPoolSize=20
+
+
+
+

Resources: FAQ, developer’s guide, Java API Reference. For an in-depth discussion on how to use JDBC and UCP, see the Oracle documentation Connection Management Strategies for Java Applications using JDBC and UCP.

+
+
+

Note: there is a bug in UCP 12.1.0.2 that results in the creation of thousands of java.lang.Timer threads over hours or days of system uptime (see article on stackoverflow). Also, Oracle has a strange bug fixing / patching policy: instead of producing a fixed version 12.1.0.3 or 12.1.0.2.x, Oracle publishes collections of *.class files that must be manually patched into the ucp.jar! Therefore, use the newest versions only.

+
+
+
+

25.22. Messaging

+
+

In case you want to do messaging based on JMS you might consider the Oracle JMS also called Oracle Streams Advanced Queuing, or Oracle Advanced Queuing, or OAQ or AQ for short. OAQ is a JMS provider based on the Oracle RDBMS and included in the DB product for no extra fee. OAQ has some features that exceed the JMS standard like a retention time (i.e. a built-in backup mechanism that allows to make messages "unread" within a configurable period of time so that these messages do not have to be resent by the sending application). Also, OAQ messages are stored in relational tables so they can easily be observed by a test driver in a system test scenario. +Capgemini has used the Spring Data JDBC Extension in order to process OAQ messages within the same technical transaction as the resulting Oracle RDBMS data changes without using 2PC and an XA-compliant transaction manager - which is not available out of the box in Tomcat. This is possible only due to the fact that OAQ queues and RDBMS tables actually reside in the same database. However, this is higher magic and should only be tried if high transaction rates must be achieved by avoiding 2PC.

+
+
+
+

25.23. General Notes on the use of Oracle products

+
+

Oracle sells commercial products and receives licence fees for them. This includes access to a support organization. Therefore, at an early stage of your project, prepare for contacting oracle support in case of technical problems. You will need the Oracle support ID of your customer [i.e. the legal entity who pays the licence fee and runs the RDBMS] and your customer must grant you permission to use it in a service request - it is not legal to use a your own support ID in a customer-related project. Your customer pays for that service anyway, so use it in case of a problem!

+
+
+

Software components like the JDBC driver or the UCP may be available without a registration or fee but they are protected by the Oracle Technology Network (OTN) License Agreement. The most important aspect of this licence agreement is the fact that an IT service provider is not allowed to simply download the Oracle software component, bundle it in a software artefact and deliver it to the customer. Instead, the Oracle software component must be (from a legal point of view) provided by the owner of the Oracle DB licence (i.e. your customer). This can be achieved in two ways: Advise your customer to install the Oracle software component in the application server as a library that can be used by your custom built system. Or, in cases where this is not feasible, e.g. in a OpenShift environment where the IT service provider delivers complete Docker images, you must advise your customer to (legally, i.e. documented in a written form) provide the Oracle software component to you, i.e. you don’t download the software component from the Oracle site but receive it from your customer.

+
+
+
+

25.24. Fix for TNS-Listener issues

+
+

When switching networks (e.g. due to VPN) you might end up that your local Oracle XE stopps working with this error:

+
+
+
+
Listener refused the connection with the following error:
+ORA-12505, TNS:listener does not currently know of SID given in connect descriptor
+
+
+
+

While a reboot resolves this problem, it is a huge pain to reboot every time this error occurs as this wastes a lot of time. +Therefore we suggest the following fix:

+
+
+
    +
  • +

    Go to your oracle installation and open the folder product/«version»/dbhomeXE/network/admin.

    +
  • +
  • +

    Edit the file listener.ora and change the value of the property HOST from your qualified hostname to localhost (HOST = localhost).

    +
  • +
  • +

    Edit the file tnsnames.ora and change the value of the HOST properties (two occurences) from your qualified hostname to localhost (HOST = localhost).

    +
  • +
  • +

    Reboot your machine or (on windows) restart the service OracleServiceXE via services.msc.

    +
  • +
  • +

    Now this problem should be gone forever and you can continue your work.

    +
  • +
+
+
+

On older XE versions until 11g you could run the following SQL (sqlplus / as sysdba @reset_tns_listener.sql):

+
+
+
+
WHENEVER SQLERROR EXIT;
+ALTER SYSTEM SET local_listener = '(ADDRESS = (PROTOCOL = TCP)(HOST = 127.0.0.1)(PORT = 1521))';
+ALTER SYSTEM REGISTER;
+EXIT;
+
+
+ +
+
+

25.25. MS-SQL-Server

+
+

This section gives guidance and hints for those who use Microsoft SQL Server as RDBMS.

+
+
+
+

25.26. Java

+ +
+
+

25.27. Driver

+
+

Microsoft SQL Server is a commercial and professional product. +However, the JDBC driver is MIT licensed and available in Maven Central what makes it easy to integrate. +Your dependency for the driver should look as following:

+
+
+
+
<dependency>
+    <groupId>com.microsoft.sqlserver</groupId>
+    <artifactId>mssql-jdbc</artifactId>
+    <version>${mssqlserver.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${mssqlserver.driver.version}) needs to be adopted to your needs (SQL Server installtion in production and JDK version, e.g. 7.4.1.jre8). +For an overview of available driver versions see here.

+
+ +
+
+

25.28. PostgreSQL

+
+

This section gives guidance and hints for those who use PostgreSQL as RDBMS.

+
+
+
+

25.29. Java

+ +
+
+

25.30. Driver

+
+

PostgreSQL is fully open-source. The driver is therefore available in maven central. +Your dependency for the driver should look as following:

+
+
+
+
<dependency>
+    <groupId>postgresql</groupId>
+    <artifactId>postgresql</artifactId>
+    <version>${postgresql.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${postgresql.driver.version}) needs to be adopted to your needs (PostgreSQL installtion in production and JDBC level suitable for your JDK, e.g. 9.1-901-1.jdbc4). +For an overview of available driver versions see here.

+
+ +
+
+

25.31. MariaDB

+
+

This section gives guidance and hints for those who use MariaDB as RDBMS.

+
+
+
+

25.32. Java

+ +
+
+

25.33. Driver

+
+

MariaDB is fully open-source. The driver is therefore available in maven central. +Your dependency for the driver should look as following:

+
+
+
+
<dependency>
+    <groupId>org.mariadb.jdbc</groupId>
+    <artifactId>mariadb-java-client</artifactId>
+    <version>${mariadb.driver.version}</version>
+</dependency>
+
+
+
+

Of course the version (${mariadb.driver.version}) needs to be adopted to your needs (MariaDB installtion in production and JDK version, e.g. 2.5.1). +For an overview of available driver versions see here.

+
+ +
+
+

25.34. Database

+
+

For your business application with devonfw you need to choose the right database. +In devonfw we are not biased for a particular product so you have the freedom of choice.

+
+
+
+

25.35. RDBMS

+
+

The classical and well-established form of a database is a relational database management system (RDBMS). +In devonfw we recommend to use an RDBMS unless you have specific need. +However, in case you have the need for big data, graph-data, BLOB focus, or schema-less dynamic data you can have a look at NoSQL options but be aware that these may be experimental and are not fully supported by devonfw.

+
+
+
+

25.36. Options

+
+

In devonfw we are not biased for a particular RDBMS so you have the freedom of choice. +Here are the most common options:

+
+
+
    +
  • +

    SAP Hana (high performance in-memory, many advanced features)

    +
  • +
  • +

    Oracle (most established, well featured for enterprise)

    +
  • +
  • +

    PostgreSQL (great open-source RDBMS)

    +
  • +
  • +

    MariaDB (true OSS successor of MySQL)

    +
  • +
  • +

    MS SQL Server (best choice for Microsoft and Windows dominated IT landscapes)

    +
  • +
+
+
+

Please click on any of the above choices and go to the according guide to find specific detials such as client/driver.

+
+
+
+

25.37. NoSQL

+
+

While not (yet) officially supported and recommendet there are also interesting NoSQL (Not Only SQL) databases that could be an interesting alternative. Please be aware that you will typically not be able to use JPA (and hibernate). Further before choosing a NoSQL database you should check the following aspects:

+
+
+
    +
  • +

    Is the database of choice reliable and mature enough for your project?

    +
  • +
  • +

    Can the operators of your product support the database of choice properly (provisioning, administration, backup, scaling & clustering, monitoring, etc.)?

    +
  • +
  • +

    Does the database of choice meet the requirements of your project (ACID vs. eventual consistencey, CAP theorem)?

    +
  • +
+
+
+

There are good reasons to choose a particular NoSQL database in specific cases (e.g. extreme demand for big-data, throughput or scaling). +But as indicated by the questions above you need to be fully aware of what you are doing. +NoSQL databases can be schemaless (untyped, dynamic & flexible) and/or schemaful (typed, structured & strict). +Furhter, there are different types of NoSQL databases that are discussed in the following sub-sections:

+
+
+
+

25.38. Java

+ +
+
+

25.39. == Column DB

+
+

Column NoSQL databases are more related to a regular RDBMS with their tables and columns. +However, they typically do not offer relational support with joins to the same level as you expect from an RDBMS. +Therefore, you have to carefully design your data-model upfront with the all the knowledge how you later want to query your data.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    Cassandra (high-performance, schema-based DB)

    +
  • +
  • +

    HBase (distributed, big-data Hadoop database)

    +
  • +
+
+
+
+

25.40. == Key-Value DB

+
+

As indicated by the name, a key-value database stores objects as key/value pairs similar to Properties or Map in Java.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    Redis (in-memory key/value store, especially used as cache or message broker)

    +
  • +
  • +

    aerospike

    +
  • +
+
+
+
+

25.41. == Document DB

+
+

A document database is similar to a key-value database, but it stores objects in standard structured formats such as XML, JSON, or BSON. +Therefore not only flat key/value pairs but even trees of hierarchical data can be stored, retrieved and queried.

+
+
+

The most prominent options are:

+
+
+ +
+
+
+

25.42. == Graph DB

+
+

If the connections (links/relations) between your data is key and an RDBMS is just not flexible or fast enough for your plans, then a graph database can help you. +They are very strong on storing and querying complex connections between entities. +For queries there are even specific standards and languages like Gremlin.

+
+
+

The most prominent options are:

+
+
+ +
+
+
+

25.43. == Hybrid DB

+
+

In addition to the above types there are some NoSQL databases that are hybrid and combine the features and aspects of these types. +While as an architect and developer you might love the idea to get all in one, you have to be careful with your choice. +If you do not exactly know your problem, you are not ready to make the right choice for your database. +Further, you might still be best-off with an good old RDBMS if you need to address multiple aspects together. +Anyhow, for experiments, PoCs, or small microservices with little risk it might be a great idea to choose a hybrid NoSQL database. +If you have collected very positive, profound and productive experience with such product you can grow on it.

+
+
+

The most prominent options are:

+
+
+
    +
  • +

    OrientDB (object-oriented, hyper-flexible, column- and graph-based)

    +
  • +
+
+ +
+
+

25.44. Cassandra

+
+

This section is the place to share experience for those who use Cassandra as NoSQL database.

+
+
+
+

25.45. Java

+ +
+
+

25.46. Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

25.47. Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+
+
+

25.48. Spring-Data

+
+

There is spring-data support available for cassandra via spring-data-cassandra.

+
+
+ + + + + +
+ + +Please note that some time ago we had feedback from projects that had issues with spring-data-cassandra and switched back to using the driver natively. We assume the issues are meanwhile resolved. TODO: collect more feedback and update this guide. +
+
+ +
+
+

25.49. neo4j

+
+

This section is the place to share experience for those who use neo4j as NoSQL database.

+
+
+
+

25.50. Java

+ +
+
+

25.51. Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

25.52. Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+
+
+

25.53. Spring-Data

+
+

There is spring-data integration available via spring-data-neo4j.

+
+ +
+
+

25.54. MongoDB

+
+

This section is the place to share experience for those who use MongoDB as NoSQL database.

+
+
+
+

25.55. Java

+ +
+
+

25.56. Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

25.57. Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+ +
+
+

25.58. CouchDB

+
+

This section is the place to share experience for those who use CouchDB as NoSQL database.

+
+
+
+

25.59. Java

+ +
+
+

25.60. Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

25.61. Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+ +
+
+

25.62. Redis

+
+

This section is the place to share experience for those who use Redis as NoSQL database.

+
+
+
+

25.63. Java

+ +
+
+

25.64. Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

25.65. Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+ +
+
+

25.66. OrientDB

+
+

This section is the place to share experience for those who use OrientDB (see also Open-Source community edition) as NoSQL database.

+
+
+
+

25.67. Java

+ +
+
+

25.68. Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

25.69. Driver

+
+

For driver options see here.

+
+
+
+

25.70. Administration

+
+

OrientDB comes with a powerful, impressive admin interface for your web-browser called Studio.

+
+ +
+
+

25.71. Blazegraph

+
+

This section is the place to share experience for those who use Blazegraph as NoSQL database.

+
+
+
+

25.72. Java

+ +
+
+

25.73. Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

25.74. Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here.

+
+ +
+
+

25.75. HBase

+
+

This section is the place to share experience for those who use HBase as NoSQL database.

+
+
+
+

25.76. Java

+ +
+
+

25.77. Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

25.78. Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see here and +hbase-java-api tutorial.

+
+ +
+
+

25.79. RavenDB

+
+

This section is the place to share experience for those who use RavenDB as NoSQL database.

+
+
+
+

25.80. Java

+ +
+
+

25.81. Attention

+
+ + + + + +
+ + +devonfw did not focus on the integration of this database so far. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

25.82. Driver

+
+

Please be aware that there is not a regular JDBC driver in case you are using Java (devon4j). +For driver options see ravendb-jvm-client and Java Client Features.

+
+ +
+
+

25.83. GigaSpaces XAP (Smart Cache)

+
+

This section is the place to share experience for those who use GigaSpaces XAP as NoSQL database.

+
+
+
+

25.84. Java

+ +
+
+

25.85. Attention

+
+ + + + + +
+ + +A sample for GigaSpaces integration has been contributed from a graduate work, which will be described here. No reports have been given from our users about successfully integrating this database with any devonfw tech stack. If you want to share your knowledge or report usage, please contribute by clicking on the pen next to the section headline.If you need help on devonfw tech stack knowledge to get the integration working for you, stay in contact at GitHub. +
+
+
+
+

25.86. Possible Approach

+
+

GigaSpaces is currently not in the central maven repository, therefore an additional repository needs to be added along with the dependency:

+
+
+
+
<repositories>
+    <repository>
+	<id>org.openspaces</id>
+	<url>http://maven-repository.openspaces.org</url>
+    </repository>
+</repositories>
+
+<dependency>
+    <groupId>org.gigaspaces</groupId>
+    <artifactId>xap-openspaces</artifactId>
+    <version>${gsVersion}</version>
+</dependency>
+
+
+
+

Of course the version (${gsVersion}) needs to be adopted to your needs.

+
+
+
+
@Configuration
+public class ContextConfiguration {
+  @Bean
+  public GigaSpace space() {
+    UrlSpaceConfigurer urlSpaceConfigurer = new UrlSpaceConfigurer("jini://*/*/my-space");
+    return new GigaSpaceConfigurer(urlSpaceConfigurer).gigaSpace();
+  }
+}
+
+
+
+

To establish a connection with a running instance of GigaSpaces, a Configuration Class is required. Here a Bean will be created that retrieves via URL the name of a Space e.g. my-space (a Space is equivalent to a Database Schema). Of course a Space needs to be firstly created in order to use it (see also the Example). This bean can be used for all database typical operations e.g. create, read, update and delete data (a complete list can be found here). Another possibility to execute those operations is via spring-data (see section below). The spring-data-gigaspaces automatically detects if a GigaSpaces Bean exists.

+
+
+
+

25.87. Spring-Data

+
+

There is spring-data support available for GigaSpaces XAP (Smart Cache) via spring-data-gigaspaces.

+
+
+
+

25.88. Example

+
+

There is an implementation of the sample application, My Thai Star, using GigaSpaces XAP (Smart Cache) as data storage. More details can be found on mts-gigaspaces.

+
+
+
+
+
+

26. Tools

+
+
+

==CobiGen — Code-based incremental Generator +:title-logo-image: images/logo/cobigen_logo.png

+
+
+

26.1. Document Description

+
+

This document contains the documentation of the CobiGen core module as well as all CobiGen plug-ins and the CobiGen eclipse integration.

+
+
+ + + + + +
+ + +
+

DISCLAIMER: All CobiGen plugins are compatible with the latest release of Devonfw unless otherwise denoted.

+
+
+
+
+

Current versions:

+
+
+
    +
  • +

    CobiGen - Eclipse Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - Maven Build Plug-in v7.1.0

    +
  • +
  • +

    CobiGen CLI v1.2.0

    +
  • +
+
+
+
+
    +
  • +

    CobiGen v7.1.0

    +
  • +
  • +

    CobiGen - Java Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - XML Plug-in v7.0.0

    +
  • +
  • +

    CobiGen - TypeScript Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - Property Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - Text Merger v7.1.1

    +
  • +
  • +

    CobiGen - JSON Plug-in v7.0.0

    +
  • +
  • +

    CobiGen - HTML Plug-in v7.0.0

    +
  • +
  • +

    CobiGen - Open API Plug-in v7.1.0

    +
  • +
  • +

    CobiGen - FreeMarker Template Engine v7.0.0

    +
  • +
  • +

    CobiGen - Velocity Template Engine v7.0.0

    +
  • +
+
+
+

Authors:

+
+
+
+
* Malte Brunnlieb
+* Jaime Diaz Gonzalez
+* Steffen Holzer
+* Ruben Diaz Martinez
+* Joerg Hohwiller
+* Fabian Kreis
+* Lukas Goerlach
+* Krati Shah
+* Christian Richter
+* Erik Grüner
+* Mike Schumacher
+* Marco Rose
+* Mohamed Ghanmi
+
+
+
+

==Guide to the Reader

+
+
+

Dependent on the intention you are reading this document, you might be most interested in the following chapters:

+
+
+
    +
  • +

    If this is your first contact with CobiGen, you will be interested in the general purpose of CobiGen, in the licensing of CobiGen, as well as in the Shared Service provided for CobiGen. Additionally, there are some general use cases, which are currently implemented and maintained to be used out of the box.

    +
  • +
  • +

    As a user of the CobiGen Eclipse integration, you should focus on the Installation and Usage chapters to get a good introduction about how to use CobiGen in eclipse.

    +
  • +
  • +

    As a user of the Maven integration, you should focus on the Maven configuration chapter, which guides you through the integration of CobiGen into your build configuration.

    +
  • +
  • +

    If you like to adapt the configuration of CobiGen, you have to step deeper into the configuration guide as well as into the plug-in configuration extensions for the Java Plug-in, XML-Plugin, Java Property Plug-in, as well as for the Text-Merger Plug-in.

    +
  • +
  • +

    Finally, if you want to develop your own templates, you will be thankful for helpful links in addition to the plug-ins documentation as referenced in the previous point.

    +
  • +
+
+ +
+

==CobiGen - Code-based incremental Generator

+
+
+
Overview
+
+

CobiGen is a generic incremental generator for end to end code generation tasks, mostly used in Java projects. +Due to a template-based approach, CobiGen generates any set of text-based documents and document fragments.

+
+
+

Input (currently):

+
+
+
    +
  • +

    Java classes

    +
  • +
  • +

    XML-based files

    +
  • +
  • +

    OpenAPI documents

    +
  • +
  • +

    Possibly more inputs like WSDL, which is currently not implemented.

    +
  • +
+
+
+

Output:

+
+
+
    +
  • +

    any text-based document or document fragments specified by templates

    +
  • +
+
+
+
+
Architecture
+
+

CobiGen is build as an extensible framework for incremental code generation. It provides extension points for new input readers which allow reading new input types and converting them to an internally processed model. The model is used to process templates of different kinds to generate patches. The template processing will be done by different template engines. There is an extension point for template engines to support multiple ones as well. Finally, the patch will be structurally merged into potentially already existing code. To allow structural merge on different programming languages, the extension point for structural mergers has been introduced. Here you will see an overview of the currently available extension points and plug-ins:

+
+
+
+
Features and Characteristics
+
+
    +
  • +

    Generate fresh files across all the layers of a application - ready to run.

    +
  • +
  • +

    Add on to existing files merging code into it. E.g. generate new methods into existing java classes or adding nodes to an XML file. Merging of contents into existing files will be done using structural merge mechanisms.

    +
  • +
  • +

    Structural merge mechanisms are currently implemented for Java, XML, Java Property Syntax, JSON, Basic HTML, Text Append, TypeScript.

    +
  • +
  • +

    Conflicts can be resolved individually but automatically by former configuration for each template.

    +
  • +
  • +

    CobiGen provides an Eclipse integration as well as a Maven Integration.

    +
  • +
  • +

    CobiGen comes with an extensive documentation for users and developers.

    +
  • +
  • +

    Templates can be fully tailored to project needs - this is considered as a simple task.

    +
  • +
+
+
+
+
Selection of current and past CobiGen applications
+
+

General applications:

+
+
+
    +
  • +

    Generation of a Java CRUD application based on devonfw architecture including all software-layers on the server plus code for JS-clients (Angular). You can find details here.

    +
  • +
  • +

    Generation of a Java CRUD application according to the Register Factory architecture. Persistence entities are the input for generation.

    +
  • +
  • +

    Generation of builder classes for generating test data for JUnit-Tests. Input are the persistence entities.

    +
  • +
  • +

    Generation of a EXT JS 6 client with full CRUD operations connected a devon4j server.

    +
  • +
  • +

    Generation of a Angular 6 client with full CRUD operations connected a devon4j server.

    +
  • +
+
+
+

Project-specific applications in the past:

+
+
+
    +
  • +

    Generation of an additional Java type hierarchy on top of existing Java classes in combination with additional methods to be integrated in the modified classes. Hibernate entities were considered as input as well as output of the generation. The rational in this case, was to generate an additional business object hierarchy on top of an existing data model for efficient business processing.

    +
  • +
  • +

    Generation of hash- and equals-methods as well as copy constructors depending on the field types of the input Java class. Furthermore, CobiGen is able to re-generate these methods/constructors triggered by the user, i.e, when fields have been changed.

    +
  • +
  • +

    Extraction of JavaDoc of test classes and their methods for generating a csv test documentation. This test documentation has been further processed manually in Excel to provide a good overview about the currently available tests in the software system, which enables further human analysis.

    +
  • +
+
+ +
+

==General use cases

+
+
+

In addition to the selection of CobiGen applications introduced before, this chapter provides a more detailed overview about the currently implemented and maintained general use cases. These can be used by any project following a supported reference architecture as e.g. the devonfw or Register Factory.

+
+
+
+
devon4j
+
+

With our templates for devon4j, you can generate a whole CRUD application from a single Entity class. You save the effort for creating, DAOs, Transfer Objects, simple CRUD use cases with REST services and even the client application can be generated.

+
+
+
CRUD server application for devon4j
+
+

For the server, the required files for all architectural layers (Data access, logic, and service layer) can be created based on your Entity class. After the generation, you have CRUD functionality for the entity from bottom to top which can be accessed via a RESTful web service. Details are provided in the devonfw wiki.

+
+
+
+
CRUD client application for devon4ng
+
+

Based on the REST services on the server, you can also generate an Angular client based on devon4ng. With the help of Node.js, you have a working client application for displaying your entities within minutes!

+
+
+
+
Test data Builder for devon4j
+
+

Generating a builder pattern for POJOs to easily create test data in your tests. CobiGen is not only able to generate a plain builder pattern but rather builder, which follow a specific concept to minimize test data generation efforts in your unit tests. The following Person class as an example:

+
+
+
Listing 114. Person class
+
+
public class Person {
+
+    private String firstname;
+    private String lastname;
+    private int birthyear;
+    @NotNull
+    private Address address;
+
+    @NotNull
+    public String getFirstname() {
+        return this.firstname;
+    }
+
+    // additional default setter and getter
+}
+
+
+
+

It is a simple POJO with a validation annotation, to indicate, that firstname should never be null. Creating this object in a test would imply to call every setter, which is kind of nasty. Therefore, the Builder Pattern has been introduced for quite a long time in software engineering, allowing to easily create POJOs with a fluent API. See below.

+
+
+
Listing 115. Builder pattern example
+
+
Person person = new PersonBuilder()
+                .firstname("Heinz")
+                .lastname("Erhardt")
+                .birthyear(1909)
+                .address(
+                    new AddressBuilder().postcode("22222")
+                        .city("Hamburg").street("Luebecker Str. 123")
+                        .createNew())
+                .addChild(
+                    new PersonBuilder()[...].createNew()).createNew();
+
+
+
+

The Builder API generated by CobiGen allows you to set any setter accessible field of a POJO in a fluent way. But in addition lets assume a test, which should check the birth year as precondition for any business operation. So specifying all other fields of Person, especially firstname as it is mandatory to enter business code, would not make sense. The test behavior should just depend on the specification of the birth year and on no other data. So we would like to just provide this data to the test.

+
+
+

The Builder classes generated by CobiGen try to tackle this inconvenience by providing the ability to declare default values for any mandatory field due to validation or database constraints.

+
+
+
Listing 116. Builder Outline
+
+
public class PersonBuilder {
+
+    private void fillMandatoryFields() {
+        firstname("lasdjfaöskdlfja");
+        address(new AddressBuilder().createNew());
+    };
+    private void fillMandatoryFields_custom() {...};
+
+    public PersonBuilder firstname(String value);
+    public PersonBuilder lastname(String value);
+    ...
+
+    public Person createNew();
+    public Person persist(EntityManager em);
+    public List<Person> persistAndDuplicate(EntityManager em, int count);
+}
+
+
+
+

Looking at the plotted builder API generated by CobiGen, you will find two private methods. The method fillMandatoryFields will be generated by CobiGen and regenerated every time CobiGen generation will be triggered for the Person class. This method will set every automatically detected field with not null constraints to a default value. However, by implementing fillMandatoryFields_custom on your own, you can reset these values or even specify more default values for any other field of the object. Thus, running new PersonBuilder().birthyear(1909).createNew(); will create a valid object of Person, which is already pre-filled such that it does not influence the test execution besides the fact that it circumvents database and validation issues.

+
+
+

This even holds for complex data structures as indicated by address(new AddressBuilder().createNew());. Due to the use of the AddressBuilder for setting the default value for the field address, also the default values for Address will be set automatically.

+
+
+

Finally, the builder API provides different methods to create new objects.

+
+
+
    +
  • +

    createNew() just creates a new object from the builder specification and returns it.

    +
  • +
  • +

    persist(EntityManager) will create a new object from the builder specification and persists it to the database.

    +
  • +
  • +

    persistAndDuplicate(EntityManager, int) will create the given amount of objects form the builder specification and persists all of these. After the initial generation of each builder, you might want to adapt the method body as you will most probably not be able to persist more than one object with the same field assignments to the database due to unique constraints. Thus, please see the generated comment in the method to adapt unique fields accordingly before persisting to the database.

    +
  • +
+
+
+Custom Builder for Business Needs +
+

CobiGen just generates basic builder for any POJO. However, for project needs you probably would like to have even more complex builders, which enable the easy generation of more complex test data which are encoded in a large object hierarchy. Therefore, the generated builders can just be seen as a tool to achieve this. You can define your own business driven builders in the same way as the generated builders, but explicitly focusing on your business needs. Just take this example as a demonstration of that idea:

+
+
+
+
  University uni = new ComplexUniversityBuilder()
+    .withStudents(200)
+    .withProfessors(4)
+    .withExternalStudent()
+    .createNew();
+
+
+
+

E.g. the method withExternalStudent() might create a person, which is a student and is flagged to be an external student. Basing this implementation on the generated builders will even assure that you would benefit from any default values you have set before. In addition, you can even imagine any more complex builder methods setting values driven your reusable testing needs based on the specific business knowledge.

+
+
+
+
+
+
Register Factory
+
+
CRUD server application
+
+

Generates a CRUD application with persistence entities as inputs. This includes DAOs, TOs, use cases, as well as a CRUD JSF user interface if needed.

+
+
+
+
Test data Builder
+ +
+
+
Test documentation
+
+

Generate test documentation from test classes. The input are the doclet tags of several test classes, which e.g. can specify a description, a cross-reference, or a test target description. The result currently is a csv file, which lists all tests with the corresponding meta-information. Afterwards, this file might be styled and passed to the customer if needed and it will be up-to-date every time!

+
+
+
+
+
+

26.2. CobiGen

+ +
+

==Configuration

+
+
+

CobiGen is maintaining a home directory further referenced in this documentation as $cghome, which is used to maintain temporary or transient data. The home folder is determined with the following location fall-back:

+
+
+
    +
  1. +

    System environment variable COBIGEN_HOME (e.g. C:\project\ide\conf\cobigen-home)

    +
  2. +
  3. +

    .cobigen directory in OS user home (e.g. ~/.cobigen)

    +
  4. +
+
+
+

The actual configuration of CobiGen is maintained by a single folder or jar. The location can be configured with respect to the implemented configuration fall-back mechanism. CobiGen will search for the location of the configuration in the following order:

+
+
+
    +
  1. +

    A configuration jar or directory, which is passed to CobiGen by the Maven or Eclipse integration or any other program using the CobiGen programming interface: +1.1. the Maven integration allows to configure a jar dependency to be included in the currently running classpath (of interest for maven configuration +1.2. the Eclipse integration allows to specify a CobiGen_Templates project in the eclipse workspace

    +
  2. +
  3. +

    The file $cghome/.cobigen exists and the property templates is set to a valid configuration (e.g. templates=C:\project\ide\conf\templates or templates=C:\project\ide\conf\templates.jar) Hint: Check for log entry like Value of property templates in $cghome/.cobigen is invalid to identify an invalid configuration which is not taken up as expected

    +
  4. +
  5. +

    The folder $cghome/templates/CobiGen_Templates exists

    +
  6. +
  7. +

    The lexicographical sorted first configuration jar of the following path pattern $cghome/templates/templates-([^-]+)-(\\d+\\.?)+.jar if exists (e.g. templates-devon4j-2020.04.001)

    +
  8. +
  9. +

    CobiGen will automatically download the latest jar configuration from maven central with groupId com.devonfw.cobigen and artifactId templates-devon4j and take it like described in 4.

    +
  10. +
+
+
+

Within the configuration jar or directory you will find the following structure:

+
+
+
+
CobiGen_Templates
+ |- templateFolder1
+    |- templates.xml
+ |- templateFolder2
+    |- templates.xml
+ |- context.xml
+
+
+
+

Find some examples here.

+
+
+
Context Configuration
+
+

The context configuration (context.xml) always has the following root structure:

+
+
+
Listing 117. Context Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        ...
+    </triggers>
+</contextConfiguration>
+
+
+
+

The context configuration has a version attribute, which should match the XSD version the context configuration is an instance of. It should not state the version of the currently released version of CobiGen. This attribute should be maintained by the context configuration developers. If configured correctly, it will provide a better feedback for the user and thus higher user experience. Currently there is only the version v1.0. For further version there will be a changelog later on.

+
+
+
Trigger Node
+
+

As children of the <triggers> node you can define different triggers. By defining a <trigger> you declare a mapping between special inputs and a templateFolder, which contains all templates, which are worth to be generated with the given input.

+
+
+
Listing 118. trigger configuration
+
+
<trigger id="..." type="..." templateFolder="..." inputCharset="UTF-8" >
+    ...
+</trigger>
+
+
+
+
    +
  • +

    The attribute id should be unique within an context configuration. It is necessary for efficient internal processing.

    +
  • +
  • +

    The attribute type declares a specific trigger interpreter, which might be provided by additional plug-ins. A trigger interpreter has to provide an input reader, which reads specific inputs and creates a template object model out of it to be processed by the FreeMarker template engine later on. Have a look at the plug-in’s documentation of your interest and see, which trigger types and thus inputs are currently supported.

    +
  • +
  • +

    The attribute templateFolder declares the relative path to the template folder, which will be used if the trigger gets activated.

    +
  • +
  • +

    The attribute inputCharset (optional) determines the charset to be used for reading any input file.

    +
  • +
+
+
+
+
Matcher Node
+
+

A trigger will be activated if its matchers hold the following formula:

+
+
+

!(NOT || …​ || NOT) && AND && …​ && AND && (OR || …​ || OR)

+
+
+

Whereas NOT/AND/OR describes the accumulationType of a matcher (see below) and e.g. NOT means 'a matcher with accumulationType NOT matches a given input'. Thus additionally to an input reader, a trigger interpreter has to define at least one set of matchers, which are satisfiable, to be fully functional. A <matcher> node declares a specific characteristics a valid input should have.

+
+
+
Listing 119. Matcher Configuration
+
+
<matcher type="..." value="..." accumulationType="...">
+    ...
+</matcher>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute accumulationType (optional) specifies how the matcher will influence the trigger activation. Valid values are:

    +
    +
      +
    • +

      OR (default): if any matcher of accumulation type OR matches, the trigger will be activated as long as there are no further matchers with different accumulation types

      +
    • +
    • +

      AND: if any matcher with AND accumulation type does not match, the trigger will not be activated

      +
    • +
    • +

      NOT: if any matcher with NOT accumulation type matches, the trigger will not be activated

      +
    • +
    +
    +
  • +
+
+
+
+
Variable Assignment Node
+
+

Finally, a <matcher> node can have multiple <variableAssignment> nodes as children. Variable assignments allow to parametrize the generation by additional values, which will be added to the object model for template processing. The variables declared using variable assignments, will be made accessible in the templates.xml as well in the object model for template processing via the namespace variables.*.

+
+
+
Listing 120. Complete Configuration Pattern
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="...">
+            <matcher type="..." value="...">
+                <variableAssignment type="..." key="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares the type of variable assignment to be processed by the trigger interpreter providing plug-in. This attribute enables variable assignments with different dynamic value resolutions.

    +
  • +
  • +

    The attribute key declares the namespace under which the resolved value will be accessible later on.

    +
  • +
  • +

    The attribute value might declare a constant value to be assigned or any hint for value resolution done by the trigger interpreter providing plug-in. For instance, if type is regex, then on value you will assign the matched group number by the regex (1, 2, 3…​)

    +
  • +
+
+
+
+
Container Matcher Node
+
+

The <containerMatcher> node is an additional matcher for matching containers of multiple input objects. +Such a container might be a package, which encloses multiple types or---more generic---a model, which encloses multiple elements. A container matcher can be declared side by side with other matchers:

+
+
+
Listing 121. ContainerMatcher Declaration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<contextConfiguration xmlns="http://capgemini.com"
+                      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                      version="1.0">
+    <triggers>
+        <trigger id="..." type="..." templateFolder="..." >
+            <containerMatcher type="..." value="..." retrieveObjectsRecursively="..." />
+            <matcher type="..." value="...">
+                <variableAssignment type="..." variable="..." value="..." />
+            </matcher>
+        </trigger>
+    </triggers>
+</contextConfiguration>
+
+
+
+
    +
  • +

    The attribute type declares a specific type of matcher, which has to be provided by the surrounding trigger interpreter. Have a look at the plug-in’s documentation, which also provides the used trigger type for more information about valid matcher and their functionalities.

    +
  • +
  • +

    The attribute value might contain any information necessary for processing the matcher’s functionality. Have a look at the relevant plug-in’s documentation for more detail.

    +
  • +
  • +

    The attribute retrieveObjectsRecursively (optional boolean) states, whether the children of the input should be retrieved recursively to find matching inputs for generation.

    +
  • +
+
+
+

The semantics of a container matchers are the following:

+
+
+
    +
  • +

    A <containerMatcher> does not declare any <variableAssignment> nodes

    +
  • +
  • +

    A <containerMatcher> matches an input if and only if one of its enclosed elements satisfies a set of <matcher> nodes of the same <trigger>

    +
  • +
  • +

    Inputs, which match a <containerMatcher> will cause a generation for each enclosed element

    +
  • +
+
+
+
+
+
Templates Configuration
+
+

The template configuration (templates.xml) specifies, which templates exist and under which circumstances it will be generated. There are two possible configuration styles:

+
+
+
    +
  1. +

    Configure the template meta-data for each template file by template nodes

    +
  2. +
  3. +

    (since cobigen-core-v1.2.0): Configure templateScan nodes to automatically retrieve a default configuration for all files within a configured folder and possibly modify the automatically configured templates using templateExtension nodes

    +
  4. +
+
+
+

To get an intuition of the idea, the following will initially describe the first (more extensive) configuration style. Such an configuration root structure looks as follows:

+
+
+
Listing 122. Extensive Templates Configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.0" templateEngine="FreeMarker">
+    <templates>
+            ...
+    </templates>
+    <increments>
+            ...
+    </increments>
+</templatesConfiguration>
+
+
+
+

The root node <templatesConfiguration> specifies two attributes. The attribute version provides further usability support and will be handled analogous to the version attribute of the context configuration. The optional attribute templateEngine specifies the template engine to be used for processing the templates (since `cobigen-core-4.0.0`). By default it is set to FreeMarker.

+
+
+

The node <templatesConfiguration> allows two different grouping nodes as children. First, there is the <templates> node, which groups all declarations of templates. Second, there is the <increments> node, which groups all declarations about increments.

+
+
+
Template Node
+
+

The <templates> node groups multiple <template> declarations, which enables further generation. Each template file should be registered at least once as a template to be considered.

+
+
+
Listing 123. Example Template Configuration
+
+
<templates>
+    <template name="..." destinationPath="..." templateFile="..." mergeStrategy="..." targetCharset="..." />
+    ...
+</templates>
+
+
+
+

A template declaration consist of multiple information:

+
+
+
    +
  • +

    The attribute name specifies an unique ID within the templates configuration, which will later be reused in the increment definitions.

    +
  • +
  • +

    The attribute destinationPath specifies the destination path the template will be generated to. It is possible to use all variables defined by variable assignments within the path declaration using the FreeMarker syntax ${variables.*}. While resolving the variable expressions, each dot within the value will be automatically replaced by a slash. This behavior is accounted for by the transformations of Java packages to paths as CobiGen has first been developed in the context of the Java world. Furthermore, the destination path variable resolution provides the following additional built-in operators analogue to the FreeMarker syntax:

    +
    +
      +
    • +

      ?cap_first analogue to FreeMarker

      +
    • +
    • +

      ?uncap_first analogue to FreeMarker

      +
    • +
    • +

      ?lower_case analogue to FreeMarker

      +
    • +
    • +

      ?upper_case analogue to FreeMarker

      +
    • +
    • +

      ?replace(regex, replacement) - Replaces all occurrences of the regular expression regex in the variable’s value with the given replacement string. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removeSuffix(suffix) - Removes the given suffix in the variable’s value iff the variable’s value ends with the given suffix. Otherwise nothing will happen. (since cobigen-core v1.1.0)

      +
    • +
    • +

      ?removePrefix(prefix) - Analogue to ?removeSuffix but removes the prefix of the variable’s value. (since cobigen-core v1.1.0)

      +
    • +
    +
    +
  • +
  • +

    The attribute templateFile describes the relative path dependent on the template folder specified in the trigger to the template file to be generated.

    +
  • +
  • +

    The attribute mergeStrategy (optional) can be optionally specified and declares the type of merge mechanism to be used, when the destinationPath points to an already existing file. CobiGen by itself just comes with a mergeStrategy override, which enforces file regeneration in total. Additional available merge strategies have to be obtained from the different plug-in’s documentations (see here for java, XML, properties, and text). Default: not set (means not mergeable)

    +
  • +
  • +

    The attribute targetCharset (optional) can be optionally specified and declares the encoding with which the contents will be written into the destination file. This also includes reading an existing file at the destination path for merging its contents with the newly generated ones. Default: UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external template (templates defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Template Scan Node
+
+

(since cobigen-core-v1.2.0)

+
+
+

The second configuration style for template meta-data is driven by initially scanning all available templates and automatically configure them with a default set of meta-data. A scanning configuration might look like this:

+
+
+
Listing 124. Example of Template-scan configuration
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<templatesConfiguration xmlns="http://capgemini.com"
+                        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+                        version="1.2">
+    <templateScans>
+        <templateScan templatePath="templates" templateNamePrefix="prefix_" destinationPath="src/main/java"/>
+    </templateScans>
+</templatesConfiguration>
+
+
+
+

You can specify multiple <templateScan …​> nodes for different templatePaths and different templateNamePrefixes.

+
+
+
    +
  • +

    The name can be specified to later on reference the templates found by a template-scan within an increment. (since cobigen-core-v2.1.)

    +
  • +
  • +

    The templatePath specifies the relative path from the templates.xml to the root folder from which the template scan should be performed.

    +
  • +
  • +

    The templateNamePrefix (optional) defines a common id prefix, which will be added to all found and automatically configured templates.

    +
  • +
  • +

    The destinationPath defines the root folder all found templates should be generated to, whereas the root folder will be a prefix for all found and automatically configured templates.

    +
  • +
+
+
+

A templateScan will result in the following default configuration of templates. For each file found, new template will be created virtually with the following default values:

+
+
+
    +
  • +

    id: file name without .ftl extension prefixed by templateNamePrefix from template-scan

    +
  • +
  • +

    destinationPath: relative file path of the file found with the prefix defined by destinationPath from template-scan. Furthermore,

    +
    +
      +
    • +

      it is possible to use the syntax for accessing and modifying variables as described for the attribute destinationPath of the template node, besides the only difference, that due to file system restrictions you have to replace all ?-signs (for built-ins) with #-signs.

      +
    • +
    • +

      the files to be scanned, should provide their final file extension by the following file naming convention: <filename>.<extension>.ftl Thus the file extension .ftl will be removed after generation.

      +
    • +
    +
    +
  • +
  • +

    templateFile: relative path to the file found

    +
  • +
  • +

    mergeStrategy: (optional) not set means not mergeable

    +
  • +
  • +

    targetCharset: (optional) defaults to UTF-8

    +
  • +
+
+
+

(Since version 4.1.0) It is possible to reference external templateScan (templateScans defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Template Extension Node
+
+

(since cobigen-core-v1.2.0)

+
+
+

Additionally to the templateScan declaration it is easily possible to rewrite specific attributes for any scanned and automatically configured template.

+
+
+
Listing 125. Example Configuration of a TemplateExtension
+
+
<templates>
+    <templateExtension ref="prefix_FooClass.java" mergeStrategy="javamerge" />
+</templates>
+
+<templateScans>
+    <templateScan templatePath="foo" templateNamePrefix="prefix_" destinationPath="src/main/java/foo"/>
+</templateScans>
+
+
+
+

Lets assume, that the above example declares a template-scan for the folder foo, which contains a file FooClass.java.ftl in any folder depth. Thus the template scan will automatically create a virtual template declaration with id=prefix_FooClass.java and further default configuration.

+
+
+

Using the templateExtension declaration above will reference the scanned template by the attribute ref and overrides the mergeStrategy of the automatically configured template by the value javamerge. Thus we are able to minimize the needed templates configuration.

+
+
+

(Since version 4.1.0) It is possible to reference external templateExtension (templateExtensions defined on another trigger), thanks to using <incrementRef …​> that are explained here.

+
+
+
+
Increment Node
+
+

The <increments> node groups multiple <increment> nodes, which can be seen as a collection of templates to be generated. An increment will be defined by a unique id and a human readable description.

+
+
+
+
<increments>
+    <increment id="..." description="...">
+        <incrementRef ref="..." />
+        <templateRef ref="..." />
+        <templateScanRef ref="..." />
+    </increment>
+</increments>
+
+
+
+

An increment might contain multiple increments and/or templates, which will be referenced using <incrementRef …​>, <templateRef …​>, resp. <templateScanRef …​> nodes. These nodes only declare the attribute ref, which will reference an increment, a template, or a template-scan by its id or name.

+
+
+

(Since version 4.1.0) An special case of <incrementRef …​> is the external incrementsRef. By default, <incrementRef …​> are used to reference increments defined in the same templates.xml file. So for example, we could have:

+
+
+
+
<increments>
+    <increment id="incA" description="...">
+        <incrementRef ref="incB" />
+    </increment>
+    <increment id="incB" description="...">
+        <templateRef .... />
+        <templateScan .... />
+    </increment>
+</increments>
+
+
+
+

However, if we want to reference an increment that it is not defined inside our templates.xml (an increment defined for another trigger), then we can use external incrementRef as shown below:

+
+
+
+
<increment name="..." description="...">
+    <incrementRef ref="trigger_id::increment_id"/>
+</increment>
+
+
+
+

The ref string is split using as delimiter ::. The first part of the string, is the trigger_id to reference. That trigger contains an increment_id. Currently, this functionality only works when both templates use the same kind of input file.

+
+
+
+
+
Java Template Logic
+
+

since cobigen-core-3.0.0 which is included in the Eclipse and Maven Plugin since version 2.0.0 +In addition, it is possible to implement more complex template logic by custom Java code. To enable this feature, you can simply import the the CobiGen_Templates by clicking on Adapt Templates, turn it into a simple maven project (if it is not already) and implement any Java logic in the common maven layout (e.g. in the source folder src/main/java). Each Java class will be instantiated by CobiGen for each generation process. Thus, you can even store any state within a Java class instance during generation. However, there is currently no guarantee according to the template processing order.

+
+
+

As a consequence, you have to implement your Java classes with a public default (non-parameter) constructor to be used by any template. Methods of the implemented Java classes can be called within templates by the simple standard FreeMarker expression for calling Bean methods: SimpleType.methodName(param1). Until now, CobiGen will shadow multiple types with the same simple name non-deterministically. So please prevent yourself from that situation.

+
+
+

Finally, if you would like to do some reflection within your Java code accessing any type of the template project or any type referenced by the input, you should load classes by making use of the classloader of the util classes. CobiGen will take care of the correct classloader building including the classpath of the input source as well as of the classpath of the template project. If you use any other classloader or build it by your own, there will be no guarantee, that generation succeeds.

+
+
+
+
Template Properties
+
+

since cobigen-core-4.0.0` +Using a configuration with `template scan, you can make use of properties in templates specified in property files named cobigen.properties next to the templates. The property files are specified as Java property files. Property files can be nested in sub-folders. Properties will be resolved including property shading. Properties defined nearest to the template to be generated will take precedence. +In addition, a cobigen.properties file can be specified in the target folder root (in eclipse plugin, this is equal to the source project root). These properties take precedence over template properties specified in the template folder.

+
+
+ + + + + +
+ + +It is not allowed to override context variables in cobigen.properties specifications as we have not found any interesting use case. This is most probably an error of the template designer, CobiGen will raise an error in this case. +
+
+
+
Multi module support or template target path redirects
+
+

since cobigen-core-4.0.0` +One special property you can specify in the template properties is the property `relocate. It will cause the current folder and its sub-folders to be relocated at destination path resolution time. Take the following example:

+
+
+
+
folder
+  - sub1
+    Template.java.ftl
+    cobigen.properties
+
+
+
+

Let the cobigen.properties file contain the line relocate=../sub2/${cwd}. Given that, the relative destination path of Template.java.ftl will be resolved to folder/sub2/Template.java. Compare template scan configuration for more information about basic path resolution. The relocate property specifies a relative path from the location of the cobigen.properties. The ${cwd} placeholder will contain the remaining relative path from the cobigen.properties location to the template file. In this basic example it just contains Template.java.ftl, but it may even be any relative path including sub-folders of sub1 and its templates. +Given the relocate feature, you can even step out of the root path, which in general is the project/maven module the input is located in. This enables template designers to even address, e.g., maven modules located next to the module the input is coming from.

+
+
+
+
+
Basic Template Model
+
+

In addition to what is served by the different model builders of the different plug-ins, CobiGen provides a minimal model based on context variables as well as CobiGen properties. The following model is independent of the input format and will be served as a template model all the time:

+
+
+ +
+
+
+
Plugin Mechanism
+
+

Since cobigen-core 4.1.0, we changed the plug-in discovery mechanism. So far it was necessary to register new plugins programmatically, which introduces the need to let every tool integration, i.e. for eclipse or maven, be dependent on every plug-in, which should be released. This made release cycles take long time as all plug-ins have to be integrated into a final release of maven or eclipse integration.

+
+
+

Now, plug-ins are automatically discovered by the Java Service Loader mechanism from the classpath. This also effects the setup of eclipse and maven integration to allow modular releases of CobiGen in future. We are now able to provide faster rollouts of bug-fixes in any of the plug-ins as they can be released completely independently.

+
+
+
+

26.2.7. Plug-ins

+ +
+

==Java Plug-in +The CobiGen Java Plug-in comes with a new input reader for java artifacts, new java related trigger and matchers, as well as a merging mechanism for Java sources.

+
+
+
Trigger extension
+
+

The Java Plug-in provides a new trigger for Java related inputs. It accepts different representations as inputs (see Java input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'java'

    +
    +
    Listing 126. Example of a java trigger definition
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables Java elements as inputs.

    +
    +
  • +
+
+
+Matcher types +
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type fqn → full qualified name matching

    +
    +
    Listing 127. Example of a java trigger definition with a full qualified name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the full qualified name (fqn) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'package' → package name of the input

    +
    +
    Listing 128. Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="package" value="(.+)\.persistence\.([^\.]+)\.entity">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the package name (package) of the declaring input class matches the given regular expression (value).

    +
    +
  • +
  • +

    type 'expression'

    +
    +
    Listing 129. Example of a java trigger definition with a package name matcher
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="expression" value="instanceof java.lang.String">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the expression evaluates to true. Valid expressions are

    +
    +
  • +
  • +

    instanceof fqn: checks an 'is a' relation of the input type

    +
  • +
  • +

    isAbstract: checks, whether the input type is declared abstract

    +
  • +
+
+
+
+Container Matcher types +
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'package'

    +
    +
    Listing 130. Example of a java trigger definition with a container matcher for packages
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <containerMatcher type="package" value="com\.example\.app\.component1\.persistence.entity" />
    +</trigger>
    +
    +
    +
    +

    The container matcher matches packages provided by the type com.capgemini.cobigen.javaplugin.inputreader.to.PackageFolder with a regular expression stated in the value attribute. (See containerMatcher semantics to get more information about containerMatchers itself.)

    +
    +
  • +
+
+
+
+Variable Assignment types +
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The Java Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'regex' → regular expression group

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="regex" key="rootPackage" value="1" />
    +        <variableAssignment type="regex" key="component" value="2" />
    +        <variableAssignment type="regex" key="pojoName" value="3" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key.

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="java" templateFolder="...">
    +    <matcher type="fqn" value="(.+)\.persistence\.([^\.]+)\.entity\.([^\.]+)">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+Java input reader +
+

The CobiGen Java Plug-in implements an input reader for parsed java sources as well as for java Class<?> objects (loaded by reflection). So API user can pass Class<?> objects as well as JavaClass objects for generation. The latter depends on QDox, which will be used for parsing and merging java sources. For getting the right parsed java inputs you can easily use the JavaParserUtil, which provides static functionality to parse java files and get the appropriate JavaClass object.

+
+
+

Furthermore, due to restrictions on both inputs according to model building (see below), it is also possible to provide an array of length two as an input, which contains the Class<?> as well as the JavaClass object of the same class.

+
+
+Template object model +
+

No matter whether you use reflection objects or parsed java classes as input, you will get the following object model for template creation:

+
+
+
    +
  • +

    classObject ('Class' :: Class object of the Java input)

    +
  • +
  • +

    POJO

    +
    +
      +
    • +

      name ('String' :: Simple name of the input class)

      +
    • +
    • +

      package ('String' :: Package name of the input class)

      +
    • +
    • +

      canonicalName ('String' :: Full qualified name of the input class)

      +
    • +
    • +

      annotations ('Map<String, Object>' :: Annotations, which will be represented by a mapping of the full qualified type of an annotation to its value. To gain template compatibility, the key will be stored with '_' instead of '.' in the full qualified annotation type. Furthermore, the annotation might be recursively defined and thus be accessed using the same type of mapping. Example ${pojo.annotations.javax_persistence_Id})

      +
    • +
    • +

      JavaDoc ('Map<String, Object>') :: A generic way of addressing all available JavaDoc doclets and comments. The only fixed variable is comment (see below). All other provided variables depend on the doclets found while parsing. The value of a doclet can be accessed by the doclets name (e.g. ${…​JavaDoc.author}). In case of doclet tags that can be declared multiple times (currently @param and @throws), you will get a map, which you access in a specific way (see below).

      +
      +
        +
      • +

        comment ('String' :: JavaDoc comment, which does not include any doclets)

        +
      • +
      • +

        params ('Map<String,String> :: JavaDoc parameter info. If the comment follows proper conventions, the key will be the name of the parameter and the value being its description. You can also access the parameters by their number, as in arg0, arg1 etc, following the order of declaration in the signature, not in order of JavaDoc)

        +
      • +
      • +

        throws ('Map<String,String> :: JavaDoc exception info. If the comment follows proper conventions, the key will be the name of the thrown exception and the value being its description)

        +
      • +
      +
      +
    • +
    • +

      extendedType ('Map<String, Object>' :: The supertype, represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        name ('String' :: Simple name of the supertype)

        +
      • +
      • +

        canonicalName ('String' :: Full qualified name of the supertype)

        +
      • +
      • +

        package ('String' :: Package name of the supertype)

        +
      • +
      +
      +
    • +
    • +

      implementedTypes ('List<Map<String, Object>>' :: A list of all implementedTypes (interfaces) represented by a set of mappings (since cobigen-javaplugin v1.1.0)

      +
      +
        +
      • +

        interface ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Simple name of the interface)

          +
        • +
        • +

          canonicalName ('String' :: Full qualified name of the interface)

          +
        • +
        • +

          package ('String' :: Package name of the interface)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      fields ('List<Map<String, Object>>' :: List of fields of the input class) (renamed since cobigen-javaplugin v1.2.0; previously attributes)

      +
      +
        +
      • +

        field ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the Java field)

          +
        • +
        • +

          type ('String' :: Type of the Java field)

          +
        • +
        • +

          canonicalType ('String' :: Full qualified type declaration of the Java field’s type)

          +
        • +
        • +

          'isId' (Deprecated :: boolean :: true if the Java field or its setter or its getter is annotated with the javax.persistence.Id annotation, false otherwise. Equivalent to ${pojo.attributes[i].annotations.javax_persistence_Id?has_content})

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations with the remark, that for fields all annotations of its setter and getter will also be collected)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      methodAccessibleFields ('List<Map<String, Object>>' :: List of fields of the input class or its inherited classes, which are accessible using setter and getter methods)

      +
      +
        +
      • +

        same as for field (but without JavaDoc!)

        +
      • +
      +
      +
    • +
    • +

      methods ('List<Map<String, Object>>' :: The list of all methods, whereas one method will be represented by a set of property mappings)

      +
      +
        +
      • +

        method ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          name ('String' :: Name of the method)

          +
        • +
        • +

          JavaDoc (see pojo.JavaDoc)

          +
        • +
        • +

          annotations (see pojo.annotations)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

Furthermore, when providing a Class<?> object as input, the Java Plug-in will provide additional functionalities as template methods (deprecated):

+
+
+
    +
  1. +

    isAbstract(String fqn) (Checks whether the type with the given full qualified name is an abstract class. Returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  2. +
  3. +

    isSubtypeOf(String subType, String superType) (Checks whether the subType declared by its full qualified name is a sub type of the superType declared by its full qualified name. Equals the Java expression subType instanceof superType and so also returns a Boolean value.) (since cobigen-javaplugin v1.1.1) (deprecated)

    +
  4. +
+
+
+
+Model Restrictions +
+

As stated before both inputs (Class<?> objects and JavaClass objects ) have their restrictions according to model building. In the following these restrictions are listed for both models, the ParsedJava Model which results from an JavaClass input and the ReflectedJava Model, which results from a Class<?> input.

+
+
+

It is important to understand, that these restrictions are only present if you work with either Parsed Model OR the Reflected Model. If you use the Maven Build Plug-in or Eclipse Plug-in these two models are merged together so that they can mutually compensate their weaknesses.

+
+
+Parsed Model +
+
    +
  • +

    annotations of the input’s supertype are not accessible due to restrictions in the QDox library. So pojo.methodAccessibleFields[i].annotations will always be empty for super type fields.

    +
  • +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Reflected Model.

    +
  • +
  • +

    fields of "supertypes" of the input JavaClass are not available at all. So pojo.methodAccessibleFields will only contain the input type’s and the direct superclass’s fields.

    +
  • +
  • +

    [resolved, since cobigen-javaplugin 1.3.1] field types of supertypes are always canonical. So pojo.methodAccessibleFields[i].type will always provide the same value as pojo.methodAccessibleFields[i].canonicalType (e.g. java.lang.String instead of the expected String) for super type fields.

    +
  • +
+
+
+
+Reflected Model +
+
    +
  • +

    annotations' parameter values are available as Strings only (e.g. the Boolean value true is transformed into "true"). This also holds for the Parsed Model.

    +
  • +
  • +

    annotations are only available if the respective annotation has @Retention(value=RUNTIME), otherwise the annotations are to be discarded by the compiler or by the VM at run time. For more information see RetentionPolicy.

    +
  • +
  • +

    information about generic types is lost. E.g. a field’s/ methodAccessibleField’s type for List<String> can only be provided as List<?>.

    +
  • +
+
+
+
+
+
+
+
Merger extensions
+
+

The Java Plug-in provides two additional merging strategies for Java sources, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy javamerge (merges two Java resources and keeps the existing Java elements on conflicts)

    +
  • +
  • +

    Merge strategy javamerge_override (merges two Java resources and overrides the existing Java elements on conflicts)

    +
  • +
+
+
+

In general merging of two Java sources will be processed as follows:

+
+
+

Precondition of processing a merge of generated contents and existing ones is a common Java root class resp. surrounding class. If this is the case this class and all further inner classes will be merged recursively. Therefore, the following Java elements will be merged and conflicts will be resolved according to the configured merge strategy:

+
+
+
    +
  • +

    extends and implements relations of a class: Conflicts can only occur for the extends relation.

    +
  • +
  • +

    Annotations of a class: Conflicted if an annotation declaration already exists.

    +
  • +
  • +

    Fields of a class: Conflicted if there is already a field with the same name in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
  • +

    Methods of a class: Conflicted if there is already a method with the same signature in the existing sources. (Will be replaced / ignored in total, also including annotations)

    +
  • +
+
+ +
+

==Property Plug-in +The CobiGen Property Plug-in currently only provides different merge mechanisms for documents written in Java property syntax.

+
+
+
+
Merger extensions
+
+

There are two merge strategies for Java properties, which can be configured in the templates.xml:

+
+
+
    +
  • +

    Merge strategy propertymerge (merges two properties documents and keeps the existing properties on conflicts)

    +
  • +
  • +

    Merge strategy propertymerge_override (merges two properties documents and overrides the existing properties on conflicts)

    +
  • +
+
+
+

Both documents (base and patch) will be parsed using the Java 7 API and will be compared according their keys. Conflicts will occur if a key in the patch already exists in the base document.

+
+ +
+

==XML Plug-in +The CobiGen XML Plug-in comes with an input reader for XML artifacts, XML related trigger and matchers and provides different merge mechanisms for XML result documents.

+
+
+
+
Trigger extension
+
+

(since cobigen-xmlplugin v2.0.0)

+
+
+

The XML Plug-in provides a trigger for XML related inputs. It accepts XML documents as input (see XML input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type 'xml'

    +
    +
    Listing 131. Example of a XML trigger definition.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as inputs.

    +
    +
  • +
  • +

    type xpath

    +
    +
    Listing 132. Example of a xpath trigger definition.
    +
    +
    <trigger id="..." type="xpath" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables XML documents as container inputs, which consists of several sub-documents.

    +
    +
  • +
+
+
+Container Matcher type +
+

A ContainerMatcher check if the input is a valid container.

+
+
+
    +
  • +

    xpath: type: xpath

    +
    +
    Listing 133. Example of a XML trigger definition with a node name matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <containerMatcher type="xpath" value="./uml:Model//packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    Before applying any Matcher, this containerMatcher checks if the XML file contains a node uml:Model with a childnode packagedElement which contains an attribute xmi:type with the value uml:Class.

    +
    +
  • +
+
+
+
+Matcher types +
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    XML: type nodename → document’s root name matching

    +
    +
    Listing 134. Example of a XML trigger definition with a node name matcher
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the root name of the declaring input document matches the given regular expression (value).

    +
    +
  • +
  • +

    xpath: type: xpath → matching a node with a xpath value

    +
    +
    Listing 135. Example of a xpath trigger definition with a xpath matcher.
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="xpath" value="/packagedElement[@xmi:type='uml:Class']">
    +        ...
    +    </matcher>
    +</trigger>
    +
    +
    +
    +

    This trigger will be enabled if the XML file contains a node /packagedElement where the xmi:type property equals uml:Class.

    +
    +
  • +
+
+
+
+Variable Assignment types +
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The XML Plug-in currently provides only one mechanism:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="xml" templateFolder="...">
    +    <matcher type="nodename" value="\D\w*">
    +        <variableAssignment type="constant" key="domain" value="restaurant" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value to the key as a constant.

+
+
+
+XML input reader +
+

The CobiGen XML Plug-in implements an input reader for parsed XML documents. So API user can pass org.w3c.dom.Document objects for generation. For getting the right parsed XML inputs you can easily use the xmlplugin.util.XmlUtil, which provides static functionality to parse XML files or input streams and get the appropriate Document object.

+
+
+Template object +
+

Due to the heterogeneous structure an XML document can have, the XML input reader does not always create exactly the same model structure (in contrast to the java input reader). For example the model’s depth differs strongly, according to it’s input document. To allow navigational access to the nodes, the model also depends on the document’s element’s node names. All child elements with unique names, are directly accessible via their names. In addition it is possible to iterate over all child elements with held of the child list Children. So it is also possible to access child elements with non unique names.

+
+
+

The XML input reader will create the following object model for template creation (EXAMPLEROOT, EXAMPLENODE1, EXAMPLENODE2, EXAMPLEATTR1,…​ are just used here as examples. Of course they will be replaced later by the actual node or attribute names):

+
+
+
    +
  • +

    ~EXAMPLEROOT~ ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      _nodeName_ ('String' :: Simple name of the root node)

      +
    • +
    • +

      _text_ ('String' :: Concatenated text content (PCDATA) of the root node)

      +
    • +
    • +

      TextNodes ('List<String>' :: List of all the root’s text node contents)

      +
    • +
    • +

      _at_~EXAMPLEATTR1~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_~EXAMPLEATTR2~ ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      _at_…​

      +
    • +
    • +

      Attributes ('List<Map<String, Object>>' :: List of the root’s attributes

      +
      +
        +
      • +

        at ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          _attName_ ('String' :: Name of the attribute)

          +
        • +
        • +

          _attValue_ ('String' :: String representation of the attribute’s value)

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      Children ('List<Map<String, Object>>' :: List of the root’s child elements

      +
      +
        +
      • +

        child ('Map<String, Object>' :: List element)

        +
        +
          +
        • +

          …​common element sub structure…​

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE1~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element structure…​

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE2~ ('Map<String, Object>' :: One of the root’s child nodes)

      +
      +
        +
      • +

        …​common element sub structure…​

        +
      • +
      • +

        ~EXAMPLENODE21~ ('Map<String, Object>' :: One of the nodes' child nodes)

        +
        +
          +
        • +

          …​common element structure…​

          +
        • +
        +
        +
      • +
      • +

        ~EXAMPLENODE…​~

        +
      • +
      +
      +
    • +
    • +

      ~EXAMPLENODE…​~

      +
    • +
    +
    +
  • +
+
+
+

In contrast to the java input reader, this XML input reader does currently not provide any additional template methods.

+
+
+
+
+
+
Merger extensions
+
+

The XML plugin uses the LeXeMe merger library to produce semantically correct merge products. The merge strategies can be found in the MergeType enum and can be configured in the templates.xml as a mergeStrategy attribute:

+
+
+
    +
  • +

    mergeStrategy xmlmerge

    +
    +
    Listing 136. Example of a template using the mergeStrategy xmlmerge
    +
    +
    <templates>
    +	<template name="..." destinationPath="..." templateFile="..." mergeStrategy="xmlmerge"/>
    +</templates>
    +
    +
    +
  • +
+
+
+

Currently only the document types included in LeXeMe are supported. +On how the merger works consult the LeXeMe Wiki.

+
+ +
+

==Text Merger Plug-in +The Text Merger Plug-in enables merging result free text documents to existing free text documents. Therefore, the algorithms are also very rudimentary.

+
+
+
+
Merger extensions
+
+

There are currently three main merge strategies that apply for the whole document:

+
+
+
    +
  • +

    merge strategy textmerge_append (appends the text directly to the end of the existing document) +_Remark_: If no anchors are defined, this will simply append the patch.

    +
  • +
  • +

    merge strategy textmerge_appendWithNewLine (appends the text after adding a new line break to the existing document) +_Remark_: empty patches will not result in appending a new line any more since v1.0.1 +Remark: Only suitable if no anchors are defined, otherwise it will simply act as textmerge_append

    +
  • +
  • +

    merge strategy textmerge_override (replaces the contents of the existing file with the patch) +_Remark_: If anchors are defined, override is set as the default mergestrategy for every text block if not redefined in an anchor specification.

    +
  • +
+
+
+
+
Anchor functionality
+
+

If a template contains text that fits the definition of anchor:${documentpart}:${mergestrategy}:anchorend or more specifically the regular expression (.*)anchor:([:]+):(newline_)?([:]+)(_newline)?:anchorend\\s*(\\r\\n|\\r|\\n), some additional functionality becomes available about specific parts of the incoming text and the way it will be merged with the existing text. These anchors always change things about the text to come up until the next anchor, text before it is ignored.

+
+
+

If no anchors are defined, the complete patch will be appended depending on your choice for the template in the file templates.xml.

+
+
+

[[anchordef]]

+
+
+Anchor Definition +
+

Anchors should always be defined as a comment of the language the template results in, as you do not want them to appear in your readable version, but cannot define them as FreeMarker comments in the template, or the merger will not know about them. +Anchors will also be read when they are not comments due to the merger being able to merge multiple types of text-based languages, thus making it practically impossible to filter for the correct comment declaration. That is why anchors have to always be followed by line breaks. That way there is a universal way to filter anchors that should have anchor functionality and ones that should appear in the text. +Remark: If the resulting language has closing tags for comments, they have to appear in the next line. +Remark: If you do not put the anchor into a new line, all the text that appears before it will be added to the anchor.

+
+
+
+Document parts +
+

In general, ${documentpart} is an id to mark a part of the document, that way the merger knows what parts of the text to merge with which parts of the patch (e.g. if the existing text contains anchor:table:${}:anchorend that part will be merged with the part tagged anchor:table:${}:anchorend of the patch).

+
+
+

If the same documentpart is defined multiple times, it can lead to errors, so instead of defining table multiple times, use table1, table2, table3 etc.

+
+
+

If a ${documentpart} is defined in the document but not in the patch and they are in the same position, it is processed in the following way: If only the documentparts header, test and footer are defined in the document in that order, and the patch contains header, order and footer, the resulting order will be header, test, order then footer.

+
+
+

The following documentparts have default functionality:

+
+
+
    +
  1. +

    anchor:header:${mergestrategy}:anchorend marks the beginning of a header, that will be added once when the document is created, but not again. +Remark: This is only done once, if you have header in another anchor, it will be ignored

    +
  2. +
  3. +

    anchor:footer:${mergestrategy}:anchorend marks the beginning of a footer, that will be added once when the document is created, but not again. Once this is invoked, all following text will be included in the footer, including other anchors.

    +
  4. +
+
+
+

[[mergestrategies]]

+
+
+
+Mergestrategies +
+

Mergestrategies are only relevant in the patch, as the merger is only interested in how text in the patch should be managed, not how it was managed in the past.

+
+
+
    +
  1. +

    anchor:${documentpart}::anchorend will use the merge strategy from templates.xml, see Merger-Extensions.

    +
  2. +
  3. +

    anchor:${}:${mergestrategy}_newline:anchorend or anchor:${}:newline_${mergestrategy}:anchorend states that a new line should be appended before or after this anchors text, depending on where the newline is (before or after the mergestrategy). anchor:${documentpart}:newline:anchorend puts a new line after the anchors text. +Remark: Only works with appending strategies, not merging/replacing ones. These strategies currently include: appendbefore, append/appendafter

    +
  4. +
  5. +

    anchor:${documentpart}:override:anchorend means that the new text of this documentpart will replace the existing one completely

    +
  6. +
  7. +

    anchor:${documentpart}:appendbefore:anchorend or anchor:${documentpart}:appendafter:anchorend/anchor:${documentpart}:append:anchorend specifies whether the text of the patch should come before the existing text or after.

    +
  8. +
+
+
+
+
+
Usage Examples
+
+General +
+

Below you can see how a file with anchors might look like (using adoc comment tags), with examples of what you might want to use the different functions for.

+
+
+
+
// anchor:header:append:anchorend
+
+Table of contents
+Introduction/Header
+
+// anchor:part1:appendafter:anchorend
+
+Lists
+Table entries
+
+// anchor:part2:nomerge:anchorend
+
+Document Separators
+adoc table definitions
+
+// anchor:part3:override:anchorend
+
+Anything that you only want once but changes from time to time
+
+// anchor:footer:append:anchorend
+
+Copyright Info
+Imprint
+
+
+
+
+Merging +
+

In this section you will see a comparison on what files look like before and after merging

+
+
+override +
+
Listing 137. Before
+
+
// anchor:part:override:anchorend
+Lorem Ipsum
+
+
+
+
Listing 138. Patch
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
Listing 139. After
+
+
// anchor:part:override:anchorend
+Dolor Sit
+
+
+
+
+Appending +
+
Listing 140. Before
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+// anchor:part3:appendbefore:anchorend
+Lorem Ipsum
+
+
+
+
Listing 141. Patch
+
+
// anchor:part:append:anchorend
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+
+
+
+
Listing 142. After
+
+
// anchor:part:append:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part2:appendafter:anchorend
+Lorem Ipsum
+Dolor Sit
+// anchor:part3:appendbefore:anchorend
+Dolor Sit
+Lorem Ipsum
+
+
+
+
+Newline +
+
Listing 143. Before
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+(end of file)
+
+
+
+
Listing 144. Patch
+
+
// anchor:part:newline_append:anchorend
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Dolor Sit
+(end of file)
+
+
+
+
Listing 145. After
+
+
// anchor:part:newline_append:anchorend
+Lorem Ipsum
+
+Dolor Sit
+// anchor:part:append_newline:anchorend
+Lorem Ipsum
+Dolor Sit
+
+(end of file)
+
+
+
+
+
+
+
Error List
+
+
    +
  • +

    If there are anchors in the text, but either base or patch do not start with one, the merging process will be aborted, as text might go missing this way.

    +
  • +
  • +

    Using _newline or newline_ with mergestrategies that don’t support it , like override, will abort the merging process. See <<`mergestrategies`,Merge Strategies>> →2 for details.

    +
  • +
  • +

    Using undefined mergestrategies will abort the merging process.

    +
  • +
  • +

    Wrong anchor definitions, for example anchor:${}:anchorend will abort the merging process, see <<`anchordef`,Anchor Definition>> for details.

    +
  • +
+
+ +
+

==JSON Plug-in +At the moment the plug-in can be used for merge generic JSON files depending on the merge strategy defined at the templates.

+
+
+
+
Merger extensions
+
+

There are currently these merge strategies:

+
+
+

Generic JSON Merge

+
+
+
    +
  • +

    merge strategy jsonmerge(add the new code respecting the existent is case of conflict)

    +
  • +
  • +

    merge strategy jsonmerge_override (add the new code overwriting the existent in case of conflict)

    +
    +
      +
    1. +

      JsonArray’s will be ignored / replaced in total

      +
    2. +
    3. +

      JsonObjects in conflict will be processed recursively ignoring adding non existent elements.

      +
    4. +
    +
    +
  • +
+
+
+
+
Merge Process
+
+Generic JSON Merging +
+

The merge process will be:

+
+
+
    +
  1. +

    Add non existent JSON Objects from patch file to base file.

    +
  2. +
  3. +

    For existent object in both files, will add non existent keys from patch to base object. This process will be done recursively for all existent objects.

    +
  4. +
  5. +

    For JSON Arrays existent in both files, the arrays will be just concatenated.

    +
  6. +
+
+ +
+

==TypeScript Plug-in

+
+
+

The TypeScript Plug-in enables merging result TS files to existing ones. This plug-in is used at the moment for generate an Angular2 client with all CRUD functionalities enabled. The plug-in also generates i18n functionality just appending at the end of the word the ES or EN suffixes, to put into the developer knowledge that this words must been translated to the correspondent language. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+
+
+
Trigger Extensions
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
Merger extensions
+
+

This plugin uses the TypeScript Merger to merge files. There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy tsmerge (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy tsmerge_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

<<<<<<< HEAD +The merge algorithm mainly handles the following AST nodes:

+
+
+
    +
  • +

    ImportDeclaration

    +
    +
      +
    • +

      Will add non existent imports whatever the merge strategy is.

      +
    • +
    • +

      For different imports from same module, the import clauses will be merged.

      +
      +
      +
      import { a } from 'b';
      +import { c } from 'b';
      +//Result
      +import { a, c } from 'b';
      +
      +
      +
    • +
    +
    +
  • +
  • +

    ClassDeclaration

    +
    +
      +
    • +

      Adds non existent base properties from patch based on the name property.

      +
    • +
    • +

      Adds non existent base methods from patch based on the name signature.

      +
    • +
    • +

      Adds non existent annotations to class, properties and methods.

      +
    • +
    +
    +
  • +
  • +

    PropertyDeclaration

    +
    +
      +
    • +

      Adds non existent decorators.

      +
    • +
    • +

      Merge existent decorators.

      +
    • +
    • +

      With override strategy, the value of the property will be replaced by the patch value.

      +
    • +
    +
    +
  • +
  • +

    MethodDeclaration

    +
    +
      +
    • +

      With override strategy, the body will be replaced.

      +
    • +
    • +

      The parameters will be merged.

      +
    • +
    +
    +
  • +
  • +

    ParameterDeclaration

    +
    +
      +
    • +

      Replace type and modifiers with override merge strategy, adding non existent from patch into base.

      +
    • +
    +
    +
  • +
  • +

    ConstructorDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
  • +

    FunctionDeclaration

    +
    +
      +
    • +

      Merged in the same way as Method is.

      +
    • +
    +
    +
  • +
+
+
+
+
Input reader
+
+

The TypeScript input reader is based on the one that the TypeScript merger uses. The current extensions are additional module fields giving from which library any entity originates. +module: null specifies a standard entity or type as string or number.

+
+
+Object model +
+

To get a first impression of the created object after parsing, let us start with analyzing a small example, namely the parsing of a simple type-orm model written in TypeScript.

+
+
+
+
import {Entity, PrimaryGeneratedColumn, Column} from "typeorm";
+
+@Entity()
+export class User {
+
+    @PrimaryGeneratedColumn()
+    id: number;
+
+    @Column()
+    firstName: string;
+
+    @Column()
+    lastName: string;
+
+    @Column()
+    age: number;
+
+}
+
+
+
+

The returned object has the following structure

+
+
+
+
{
+  "importDeclarations": [
+    {
+      "module": "typeorm",
+      "named": [
+        "Entity",
+        "PrimaryGeneratedColumn",
+        "Column"
+      ],
+      "spaceBinding": true
+    }
+  ],
+  "classes": [
+    {
+      "identifier": "User",
+      "modifiers": [
+        "export"
+      ],
+      "decorators": [
+        {
+          "identifier": {
+            "name": "Entity",
+            "module": "typeorm"
+          },
+          "isCallExpression": true
+        }
+      ],
+      "properties": [
+        {
+          "identifier": "id",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "PrimaryGeneratedColumn",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "firstName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "lastName",
+          "type": {
+            "name": "string",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        },
+        {
+          "identifier": "age",
+          "type": {
+            "name": "number",
+            "module": null
+          },
+          "decorators": [
+            {
+              "identifier": {
+                "name": "Column",
+                "module": "typeorm"
+              },
+              "isCallExpression": true
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
+
+
+
+

If we only consider the first level of the JSON response, we spot two lists of imports and classes, providing information about the only import statement and the only User class, respectively. Moving one level deeper we observe that:

+
+
+
    +
  • +

    Every import statement is translated to an import declaration entry in the declarations list, containing the module name, as well as a list of entities imported from the given module.

    +
  • +
  • +

    Every class entry provides besides the class identifier, its decoration(s), modifier(s), as well as a list of properties that the original class contains.

    +
  • +
+
+
+

Note that, for each given type, the module from which it is imported is also given as in

+
+
+
+
  "identifier": {
+    "name": "Column",
+    "module": "typeorm"
+  }
+
+
+
+

Returning to the general case, independently from the given TypeScript file, an object having the following Structure will be created.

+
+
+
    +
  • +

    importDeclarations: A list of import statement as described above

    +
  • +
  • +

    exportDeclarations: A list of export declarations

    +
  • +
  • +

    classes: A list of classes extracted from the given file, where each entry is full of class specific fields, describing its properties and decorator for example.

    +
  • +
  • +

    interfaces: A list of interfaces.

    +
  • +
  • +

    variables: A list of variables.

    +
  • +
  • +

    functions: A list of functions.

    +
  • +
  • +

    enums: A list of enumerations.

    +
  • +
+
+ +
+

==HTML Plug-in

+
+
+

The HTML Plug-in enables merging result HTML files to existing ones. This plug-in is used at the moment for generate an Angular2 client. Currently, the generation of Angular2 client requires an ETO java object as input so, there is no need to implement an input reader for ts artifacts for the moment.

+
+
+
+
+
Trigger Extensions
+
+

As for the Angular2 generation the input is a java object, the trigger expressions (including matchers and variable assignments) are implemented as Java.

+
+
+
+
Merger extensions
+
+

There are currently two merge strategies:

+
+
+
    +
  • +

    merge strategy html-ng* (add the new code respecting the existing is case of conflict)

    +
  • +
  • +

    merge strategy html-ng*_override (add the new code overwriting the existent in case of conflict)

    +
  • +
+
+
+

The merging of two Angular2 files will be processed as follows:

+
+
+

The merge algorithm handles the following AST nodes:

+
+
+
    +
  • +

    md-nav-list

    +
  • +
  • +

    a

    +
  • +
  • +

    form

    +
  • +
  • +

    md-input-container

    +
  • +
  • +

    input

    +
  • +
  • +

    name (for name attribute)

    +
  • +
  • +

    ngIf

    +
  • +
+
+
+ + + + + +
+ + +Be aware, that the HTML merger is not generic and only handles the described tags needed for merging code of a basic Angular client implementation. For future versions, it is planned to implement a more generic solution. +
+
+ +
+

==OpenAPI Plug-in

+
+
+

The OpenAPI Plug-in enables the support for Swagger files that follows the OpenAPI 3.0 standard as input for CobiGen. Until now, CobiGen was thought to follow a "code first" generation, with this plugin, now it can also follow the "contract first" strategy

+
+
+
    +
  • +

    Code First

    +
    +
      +
    • +

      Generating from a file with code (Java/XML code in our case)

      +
    • +
    +
    +
  • +
  • +

    Contract First

    +
    +
      +
    • +

      Generation from a full definition file (Swagger in this case). This file contains all the information about entities, operations, etc…​

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +If you are not a CobiGen developer, you will be more interested in usage. +
+
+
+
+
Trigger Extensions
+
+

The OpenAPI Plug-in provides a new trigger for Swagger OpenAPI 3.0 related inputs. It accepts different representations as inputs (see OpenAPI input reader) and provides additional matching and variable assignment mechanisms. The configuration in the context.xml for this trigger looks like this:

+
+
+
    +
  • +

    type openapi

    +
    +
    Listing 146. Example of a OpenAPI trigger definition
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    ...
    +</trigger>
    +
    +
    +
    +

    This trigger type enables OpenAPI elements as inputs.

    +
    +
  • +
+
+
+Matcher type +
+

With the trigger you might define matchers, which restrict the input upon specific aspects:

+
+
+
    +
  • +

    type 'element' → An object

    +
  • +
+
+
+

This trigger will be enabled if the element (Java Object) of the input file is and EntityDef (value).

+
+
+
+Container Matcher type +
+

Additionally, the java plugin provides the ability to match packages (containers) as follows:

+
+
+
    +
  • +

    type 'element'

    +
  • +
+
+
+

The container matcher matches elements as Java Objects, in this case will be always an OpenAPIFile object. (See containerMatcher semantics to get more information about containerMatchers itself.)

+
+
+
+Variable Assignment types +
+

Furthermore, it provides the ability to extract information from each input for further processing in the templates. The values assigned by variable assignments will be made available in template and the destinationPath of context.xml through the namespace variables.<key>. The OpenAPI Plug-in currently provides two different mechanisms:

+
+
+
    +
  • +

    type 'constant' → constant parameter

    +
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +        <variableAssignment type="constant" key="rootPackage" value="com.capgemini.demo" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

This variable assignment assigns the value of the given regular expression group number to the given key. +In this case, the constant type variableAssignment is used to specify the root package where the generate will place the files generated.

+
+
+
    +
  • +

    type 'extension' → Extraction of the info extensions and the extensions of each entity. (the tags that start with "x-…​").

    +
    +
    +
      <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +      <variableAssignment type="extension" key="testingAttribute" value="x-test"/>
    +      <variableAssignment type="extension" key="rootPackage" value="x-rootpackage"/>
    +      <variableAssignment type="extension" key="globalVariable" value="x-global"/>
    +    </matcher>
    +  </trigger>
    +
    +
    +
  • +
+
+
+

The 'extension' variable assignment tries to find 'extensions' (tags that start with "x-…​") on the 'info' +part of your file and on the extensions of each entity. value is the extension that our plug-in will try to find on your OpenAPI file. The result will +be stored in the variable key.

+
+
+

As you will see on the figure below, there are two types of variables: The global ones, that are defined +on the 'info' part of the file, and the local ones, that are defined inside each entity.

+
+
+

Therefore, if you want to define the root package, then you will have to declare it on the 'info' part. +That way, all your entities will be generated under the same root package (e.g. com.devonfw.project).

+
+
+

Swagger at devon4j Project

+
+
+

If no extension with that name was found, then an empty string will be assigned. In the case of not defining the root package, then the code will be generated into src/main/java.

+
+
+
    +
  • +

    type 'property' → property of the Java Object

    +
    +
    +
    <trigger id="..." type="openapi" templateFolder="...">
    +    <containerMatcher type="element" value="OpenApiFile"/>
    +    <matcher type="element" value="EntityDef">
    +        <variableAssignment type="property" key="entityName" value="name" />
    +    </matcher>
    +</trigger>
    +
    +
    +
  • +
+
+
+

The 'property' variable assignment tries to find the property value of the entities defined on the schema. +The value is assigned to the key. The current properties that you will able to get are:

+
+
+
    +
  1. +

    ComponentDef component: It is an object that stores the configuration of an devon4j component. Its only +property is List<PathDef> paths which contains the paths as the ones shown here.

    +
  2. +
  3. +

    String componentName: Stores the name of the x-component tag for this entity.

    +
  4. +
  5. +

    String name: Name of this entity (as shown on the example above).

    +
  6. +
  7. +

    String description: Description of this entity.

    +
  8. +
  9. +

    List<PropertyDef> properties: List containing all the properties of this entity. PropertyDef is an object that has the next properties:

    +
    +
      +
    1. +

      String name.

      +
    2. +
    3. +

      String type.

      +
    4. +
    5. +

      String format.

      +
    6. +
    7. +

      String description.

      +
    8. +
    9. +

      Boolean isCollection.

      +
    10. +
    11. +

      Boolean isEntity.

      +
    12. +
    13. +

      Boolean required.

      +
    14. +
    15. +

      Map<String, Object> constraints

      +
    16. +
    +
    +
  10. +
+
+
+

If no property with that name was found, then it will be set to null.

+
+
+
+Full trigger configuration +
+
+
<trigger id="..." type="openapi" templateFolder="...">
+    <containerMatcher type="element" value="OpenApiFile">
+    <matcher type="element" value="EntityDef">
+        <variableAssignment type="constant" key="rootPackage" value="com.capgemini.demo" />
+        <variableAssignment type="property" key="component" value="componentName" />
+        <variableAssignment type="property" key="entityName" value="name" />
+    </matcher>
+</trigger>
+
+
+
+
+
+
Input reader
+
+

The CobiGen OpenAPI Plug-in implements an input reader for OpenAPI 3.0 files. The XML input reader will create the following object model for template creation:

+
+
+
    +
  • +

    model ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      header (HeaderDef :: Definition of the header found at the top of the file)

      +
    • +
    • +

      name ('String' :: Name of the current Entity)

      +
    • +
    • +

      componentName ('String' :: name of the component the entity belongs to)

      +
    • +
    • +

      component (ComponentDef :: Full definition of the component that entity belongs to)

      +
    • +
    • +

      description ('String' :: Description of the Entity)

      +
    • +
    • +

      properties (List<PropertyDef> :: List of properties the entity has)

      +
    • +
    • +

      relationShips (List<RelationShip> :: List of Relationships the entity has)

      +
    • +
    +
    +
  • +
  • +

    HeaderDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      info (InfoDef :: Definition of the info found in the header)

      +
    • +
    • +

      servers (List<ServerDef> :: List of servers the specification uses)

      +
    • +
    +
    +
  • +
  • +

    InfoDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      title ('String' :: The title of the specification)

      +
    • +
    • +

      description ('String' :: The description of the specification)

      +
    • +
    +
    +
  • +
  • +

    ServerDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      URI ('String' :: String representation of the Server location)

      +
    • +
    • +

      description ('String' :: description of the server)

      +
    • +
    +
    +
  • +
  • +

    ComponentDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      paths (List<PathDef> :: List of services for this component)

      +
    • +
    +
    +
  • +
  • +

    PropertyDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      name ('String' :: Name of the property)

      +
    • +
    • +

      type ('String' :: type of the property)

      +
    • +
    • +

      format ('String' :: format of the property (i.e. int64))

      +
    • +
    • +

      isCollection (boolean :: true if the property is a collection, false by default)

      +
    • +
    • +

      isEntity (boolean :: true if the property refers to another entity, false by default)

      +
    • +
    • +

      sameComponent (boolean :: true if the entity that the property refers to belongs to the same component, false by default)

      +
    • +
    • +

      description ('String' :: Description of the property)

      +
    • +
    • +

      required (boolean :: true if the property is set as required)

      +
    • +
    • +

      constraints ('Map<String, Object>')

      +
    • +
    +
    +
  • +
  • +

    RelationShip ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      type ('String' :: type of the relationship (OneToOne, ManyToMany, etc…​))

      +
    • +
    • +

      entity ('String' :: destination entity name)

      +
    • +
    • +

      sameComponent (boolean :: true if the destination entity belongs to the same component of the source entity, false by default)

      +
    • +
    • +

      unidirectional (boolean :: true if the relationship is unidirectional, false by default)

      +
    • +
    +
    +
  • +
  • +

    PathDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      rootComponent ('String' :: the first segment of the path)

      +
    • +
    • +

      version ('String' :: version of the service)

      +
    • +
    • +

      pathURI ('String' :: URI of the path, the segment after the version)

      +
    • +
    • +

      operations (List<OperationDef> :: List of operations for this path)

      +
    • +
    +
    +
  • +
  • +

    OperationDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      type ('String' :: type of the operation (GET, PUT, etc…​))

      +
    • +
    • +

      parameters (List<ParameterDef> :: List of parameters)

      +
    • +
    • +

      operationId ('String' :: name of the operation prototype)

      +
    • +
    • +

      description ('String' :: JavaDoc Description of the operation)

      +
    • +
    • +

      summary (List<PropertyDef> :: JavaDoc operation Summary)

      +
    • +
    • +

      tags ('List<String>' :: List of different tags)

      +
    • +
    • +

      responses (List<ResponseDef> :: Responses of the operation)

      +
    • +
    +
    +
  • +
  • +

    ParameterDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      isSearchCriteria (boolean :: true if the response is an SearchCriteria object)

      +
    • +
    • +

      inPath (boolean :: true if this parameter is contained in the request path)

      +
    • +
    • +

      inQuery (boolean :: true if this parameter is contained in a query)

      +
    • +
    • +

      isBody (boolean :: true if this parameter is a response body)

      +
    • +
    • +

      inHeader (boolean :: true if this parameter is contained in a header)

      +
    • +
    • +

      mediaType ('String' :: String representation of the media type of the parameter)

      +
    • +
    +
    +
  • +
  • +

    ResponseDef ('Map<String, Object>' :: common element structure)

    +
    +
      +
    • +

      isArray (boolean :: true if the type of the response is an Array)

      +
    • +
    • +

      isPaginated (boolean :: true if the type of the response is paginated)

      +
    • +
    • +

      isVoid (boolean :: true if there is no type/an empty type)

      +
    • +
    • +

      isEntity (boolean :: true if the type of the response is an Entity)

      +
    • +
    • +

      entityRef (EntityDef :: Incomplete EntityDef containing the name and properties of the referenced Entity)

      +
    • +
    • +

      type ('String' :: String representation of the attribute’s value)

      +
    • +
    • +

      code ('String' :: String representation of the HTTP status code)

      +
    • +
    • +

      mediaTypes ('List<String>' :: List of media types that can be returned)

      +
    • +
    • +

      description ('String' :: Description of the response)

      +
    • +
    +
    +
  • +
+
+
+
+
Merger extensions
+
+

This plugin only provides an input reader, there is no support for OpenAPI merging. Nevertheless, the files generated from an OpenAPI file will be Java, XML, JSON, TS, etc…​ so, +for each file to be generated defined at templates.xml, must set the mergeStrategy for the specific language (javamerge, javamerge_override, jsonmerge, etc…​)

+
+
+
+
<templates>
+    ...
+    <templateExtension ref="${variables.entityName}.java" mergeStrategy="javamerge"/>
+    ...
+    <templateExtension ref="${variables.entityName}dataGrid.component.ts" mergeStrategy="tsmerge"/>
+    ...
+    <templateExtension ref="en.json" mergeStrategy="jsonmerge"/>
+</templates>
+
+
+
+
+
Usage
+
+Writing OpenAPI 3.0 contract file +
+

The Swagger file must follow the OpenAPI 3.0 standard to be readable by CobiGen, otherwise and error will be thrown. +A full documentation about how to follow this standard can be found Swagger3 Docs.

+
+
+

The Swagger file must be at the core folder of your devon4j project, like shown below:

+
+
+

Swagger at devon4j Project

+
+
+

To be compatible with CobiGen and devon4j, it must follow some specific configurations. This configurations allows us to avoid redundant definitions as SearchCriteria and PaginatedList objects are used at the services definitions.

+
+
+
+Paths +
+
    +
  • +

    Just adding the tags property at the end of the service definitions with the items `SearchCriteria` and/or paginated put into CobiGen knowledge that an standard devon4j SearchCriteria and/or PaginateListTo object must be generated. That way, the Swagger file will be easier to write and even more understandable.

    +
  • +
  • +

    The path must start with the component name, and define an x-component tag with the component name. That way this service will be included into the component services list.

    +
  • +
+
+
+
+
  /componentnamemanagement/v1/entityname/customOperation/:
+    x-component: componentnamemanagement
+    post:
+      summary: 'Summary of the operation'
+      description: Description of the operation.
+      operationId: customOperation
+      responses:
+        '200':
+          description: Description of the response.
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/EntityName'
+      requestBody:
+        $ref: '#/components/requestBodies/EntityName'
+      tags:
+        - searchCriteria
+        - paginated
+
+
+
+

That way, CobiGen will be able to generate the endpoint (REST service) customOperation on componentmanagement. If you do not specify the component to generate to (the x-component tag) then this service will not be taken into account for generation.

+
+
+
+Service based generation +
+

In previous CobiGen versions, we were able to generate code from a contract-first OpenAPI specification only when we defined components like the following:

+
+
+
+
components:
+    schemas:
+        Shop:
+          x-component: shopmanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            shopExample:
+              type: string
+              maxLength: 100
+              minLength: 5
+              uniqueItems: true
+
+
+
+

We could not generate services without the definition of those components.

+
+
+

In our current version, we have overcome it, so that now we are able to generate all the services independently. You just need to add an x-component tag with the name of the component that will make use of that service. See here.

+
+
+

An small OpenAPI example defining only services can be found below:

+
+
+
+
openapi: 3.0.0
+servers:
+  - url: 'https://localhost:8081/server/services/rest'
+    description: Just some data
+info:
+  title: Devon Example
+  description: Example of a API definition
+  version: 1.0.0
+  x-rootpackage: com.capgemini.spoc.openapi
+paths:
+  /salemanagement/v1/sale/{saleId}:
+    x-component: salemanagement
+    get:
+      operationId: findSale
+      parameters:
+        - name: saleId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Any
+  /salemanagement/v1/sale/{bla}:
+    x-component: salemanagement
+    get:
+      operationId: findSaleBla
+      parameters:
+        - name: bla
+          in: path
+          required: true
+          schema:
+            type: integer
+            format: int64
+            minimum: 10
+            maximum: 200
+      responses:
+        '200':
+          description: Any
+
+
+
+

Then, the increment that you need to select for generating those services is Crud devon4ng Service based Angular:

+
+
+

Service based generation

+
+
+
+Full example +
+

This example yaml file can be download from here.

+
+
+ + + + + +
+ + +As you will see on the file, "x-component" tags are obligatory if you want to generate components (entities). They have to be defined for each one. +In addition, you will find the global variable "x-rootpackage" that are explained <<,here>>. +
+
+
+
+
openapi: 3.0.0
+servers:
+  - url: 'https://localhost:8081/server/services/rest'
+    description: Just some data
+info:
+  title: Devon Example
+  description: Example of a API definition
+  version: 1.0.0
+  x-rootpackage: com.devonfw.angular.test
+paths:
+  /shopmanagement/v1/shop/{shopId}:
+    x-component: shopmanagement
+    get:
+      operationId: findShop
+      parameters:
+        - name: shopId
+          in: path
+          required: true
+          schema:
+            type: integer
+            format: int64
+            minimum: 0
+            maximum: 50
+      responses:
+        '200':
+          description: Any
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Shop'
+            text/plain:
+              schema:
+                type: string
+        '404':
+          description: Not found
+  /salemanagement/v1/sale/{saleId}:
+    x-component: salemanagement
+    get:
+      operationId: findSale
+      parameters:
+        - name: saleId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Any
+  /salemanagement/v1/sale/:
+    x-component: salemanagement
+    post:
+      responses:
+        '200':
+          description: Any
+      requestBody:
+        $ref: '#/components/requestBodies/SaleData'
+      tags:
+       - searchCriteria
+  /shopmanagement/v1/shop/new:
+    x-component: shopmanagement
+    post:
+      responses:
+       '200':
+          description: Any
+      requestBody:
+        $ref: '#/components/requestBodies/ShopData'
+components:
+    schemas:
+        Shop:
+          x-component: shopmanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            shopExample:
+              type: string
+              maxLength: 100
+              minLength: 5
+              uniqueItems: true
+            sales:
+              type: array # Many to One relationship
+              items:
+                $ref: '#/components/schemas/Sale'
+        Sale:
+          x-component: salemanagement
+          description: Entity definition of Shop
+          type: object
+          properties:
+            saleExample:
+              type: number
+              format: int64
+              maximum: 100
+              minimum: 0
+          required:
+            - saleExample
+
+    requestBodies:
+        ShopData:
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Shop'
+          required: true
+        SaleData:
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Sale'
+          required: true
+
+
+
+
+
+
+
+

26.3. CobiGen CLI

+ +
+

==CobiGen CLI

+
+
+

The command line interface (CLI) for CobiGen enables the generation of code using few commands. This feature allows us to decouple CobiGen from Eclipse.

+
+
+
Install CobiGen CLI
+
+

In order to install the CobiGen CLI you will need to use the devonfw/ide. In a console run devon cobigen.

+
+
+
+
Commands and options
+
+

Using the following command and option you will be able to customize your generation as follows:

+
+
+
    +
  • +

    cobigen, cg: Main entry point of the CLI. If no arguments are passed, man page will be printed.

    +
  • +
  • +

    [generate, g]: Command used for code generation.

    +
    +
      +
    • +

      InputGlob: Glob pattern of the input file or the whole path of the input file from which the code will be generated.

      +
    • +
    • +

      < --increment, -i > : Specifies an increment ID to be generated. You can also search increments by name and CobiGen will output the resultant list. If an exact match found, code generation will happen.

      +
    • +
    • +

      < --template, -t > : specifies a template ID to be generated. You can also search templates by name and CobiGen will output the resultant list.

      +
    • +
    • +

      < --outputRootPath, -out >: The project file path in which you want to generate your code. If no output path is given, CobiGen will use the project of your input file.

      +
    • +
    +
    +
  • +
  • +

    [adapt-templates, a]: Generates a new templates folder next to the CobiGen CLI and stores its location inside a configuration file. After executing this command, the CLI will attempt to use the specified Templates folder.

    +
  • +
  • +

    < --verbose, -v > : Prints debug information, verbose log.

    +
  • +
  • +

    < --help, -h > : Prints man page.

    +
  • +
  • +

    < update, u> : This command compare the artificial pom plug-ins version with central latest version available and user can update any outdated plug-ins version .

    +
  • +
+
+
+
+
CLI Execution steps:
+
+

CobiGen CLI is installed inside your devonfw distribution. In order to execute it follow the next steps:

+
+
+
    +
  1. +

    Run console.bat, this will open a console.

    +
  2. +
  3. +

    Execute cobigen or cg and the man page should be printed.

    +
  4. +
  5. +

    Use a valid CobiGen input file and run cobigen generate <pathToInputFile>. Note: On the first execution of the CLI, CobiGen will download all the needed dependencies, please be patient.

    +
  6. +
  7. +

    A list of increments will be printed so that you can start the generation.

    +
  8. +
+
+
+

Preview of the man page for generate command:

+
+
+
+Generation path +
+
+
+
+
Examples
+
+

A selection of commands that you can use with the CLI:

+
+
+
    +
  • +

    cobigen generate foo\bar\EmployeeEntity.java: As no output path has been defined, CobiGen will try to find the pom.xml of the current project in order to set the generation root path.

    +
  • +
  • +

    cobigen generate foo\bar\*.java --out other\project: Will retrieve all the Java files on that input folder and generate the code on the path specified by --out.

    +
  • +
  • +

    cg g foo\bar\webServices.yml --increment TO: Performs a string search using TO and will print the closest increments like in the following image:

    +
  • +
+
+
+
+Generation path +
+
+
+
    +
  • +

    cg g foo\bar\webServices.yml -i 1,4,6: Directly generates increments with IDs 1, 4 and 6. CobiGen will not request you any other input.

    +
  • +
  • +

    cg a: Downloads the latest CobiGen_Templates and unpacks them next to the CLI. CobiGen will from now on use these unpacked Templates for generation.

    +
  • +
  • +

    cg a -cl C:\my\custom\location: Downloads the latest CobiGen_Templates and unpacks them in C:\my\custom\location. CobiGen will from now on use these unpacked Templates for generation.

    +
  • +
+
+
+
+
CLI update command
+
+

Example of Update Command :

+
+
+
+Generation path +
+
+
+

Select the plug-ins which you want to update like below :

+
+
+
+Generation path +
+
+
+
+
CLI custom templates
+
+

To use custom templates, it’s necessary to set up a custom configuration path as described here.

+
+
+
+
Troubleshooting
+
+

When generating code from a Java file, CobiGen makes use of Java reflection for generating templates. In order to do that, the CLI needs to find the compiled source code of your project.

+
+
+

If you find an error like Compiled class foo\bar\EmployeeEntity.java has not been found, it means you need to run mvn clean install on the input project so that a new target folder gets created with the needed compiled sources.

+
+
+
+
+

26.4. Maven Build Integration

+ +
+

==Maven Build Integration

+
+
+

For maven integration of CobiGen you can include the following build plugin into your build:

+
+
+
Listing 147. Build integration of CobiGen
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        <execution>
+          <id>cobigen-generate</id>
+          <phase>generate-resources</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+

Available goals

+
+
+
    +
  • +

    generate: Generates contents configured by the standard non-compiled configuration folder. Thus generation can be controlled/configured due to an location URI of the configuration and template or increment ids to be generated for a set of inputs.

    +
  • +
+
+
+

Available phases are all phases, which already provide compiled sources such that CobiGen can perform reflection on it. Thus possible phases are for example package, site.

+
+
+
Provide Template Set
+
+

For generation using the CobiGen maven plug-in, the CobiGen configuration can be provided in two different styles:

+
+
+
    +
  1. +

    By a configurationFolder, which should be available on the file system whenever you are running the generation. The value of configurationFolder should correspond to the maven file path syntax.

    +
    +
    Listing 148. Provide CobiGen configuration by configuration folder (file)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <configuration>
    +        <configurationFolder>cobigen-templates</configurationFolder>
    +      </configuration>
    +       ...
    +     </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
  2. +
  3. +

    By maven dependency, whereas the maven dependency should stick on the same conventions as the configuration folder. This explicitly means that it should contain non-compiled resources as well as the context.xml on top-level.

    +
    +
    Listing 149. Provide CobiGen configuration by maven dependency (jar)
    +
    +
    <build>
    +  <plugins>
    +    <plugin>
    +      ...
    +      <dependencies>
    +        <dependency>
    +          <groupId>com.devonfw.cobigen</groupId>
    +          <artifactId>templates-XYZ</artifactId>
    +          <version>VERSION-YOU-LIKE</version>
    +        </dependency>
    +      </dependencies>
    +      ...
    +    </plugin>
    +  </plugins>
    +</build>
    +
    +
    +
    +

    We currently provide a generic deployed version of the templates on the devonfw-nexus for Register Factory (<artifactId>cobigen-templates-rf</artifactId>) and for the devonfw itself (<artifactId>cobigen-templates-devonfw</artifactId>).

    +
    +
  4. +
+
+
+
+
Build Configuration
+
+

Using the following configuration you will be able to customize your generation as follows:

+
+
+
    +
  • +

    <destinationRoot> specifies the root directory the relative destinationPath of CobiGen templates configuration should depend on. Default ${basedir}

    +
  • +
  • +

    <inputPackage> declares a package name to be used as input for batch generation. This refers directly to the CobiGen Java Plug-in container matchers of type package configuration.

    +
  • +
  • +

    <inputFile> declares a file to be used as input. The CobiGen maven plug-in will try to parse this file to get an appropriate input to be interpreted by any CobiGen plug-in.

    +
  • +
  • +

    <increment> specifies an increment ID to be generated. You can specify one single increment with content ALL to generate all increments matching the input(s).

    +
  • +
  • +

    <template> specifies a template ID to be generated. You can specify one single template with content ALL to generate all templates matching the input(s).

    +
  • +
  • +

    <forceOverride> specifies an overriding behavior, which enables non-mergeable resources to be completely rewritten by generated contents. For mergeable resources this flag indicates, that conflicting fragments during merge will be replaced by generated content. Default: false

    +
  • +
  • +

    <failOnNothingGenerated> specifies whether the build should fail if the execution does not generate anything.

    +
  • +
+
+
+
Listing 150. Example for a simple build configuration
+
+
<build>
+  <plugins>
+    <plugin>
+       ...
+      <configuration>
+        <destinationRoot>${basedir}</destinationRoot>
+        <inputPackages>
+          <inputPackage>package.to.be.used.as.input</inputPackage>
+        </inputPackages>
+        <inputFiles>
+          <inputFile>path/to/file/to/be/used/as/input</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>IncrementID</increment>
+        </increments>
+        <templates>
+          <template>TemplateID</template>
+        </templates>
+        <forceOverride>false</forceOverride>
+      </configuration>
+        ...
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
Plugin Injection Since v3
+
+

Since version 3.0.0, the plug-in mechanism has changed to support modular releases of the CobiGen plug-ins. Therefore, you need to add all plug-ins to be used for generation. Take the following example to get the idea:

+
+
+
Listing 151. Example of a full configuration including plugins
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>VERSION-YOU-LIKE</version>
+      <executions>
+        ...
+      </executions>
+      <configuration>
+        ...
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen<groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>1.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
A full example
+
+
    +
  1. +

    A complete maven configuration example

    +
  2. +
+
+
+
+
<build>
+  <plugins>
+    <plugin>
+      <groupId>com.devonfw.cobigen</groupId>
+      <artifactId>cobigen-maven-plugin</artifactId>
+      <version>6.0.0</version>
+      <executions>
+        <execution>
+          <id>generate</id>
+          <phase>package</phase>
+          <goals>
+            <goal>generate</goal>
+          </goals>
+        </execution>
+      </executions>
+      <configuration>
+        <inputFiles>
+          <inputFile>src/main/java/io/github/devonfw/cobigen/generator/dataaccess/api/InputEntity.java</inputFile>
+        </inputFiles>
+        <increments>
+          <increment>dataaccess_infrastructure</increment>
+          <increment>daos</increment>
+        </increments>
+        <failOnNothingGenerated>false</failOnNothingGenerated>
+      </configuration>
+      <dependencies>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>templates-devon4j</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>tempeng-freemarker</artifactId>
+          <version>2.0.0</version>
+        </dependency>
+        <dependency>
+          <groupId>com.devonfw.cobigen</groupId>
+          <artifactId>javaplugin</artifactId>
+          <version>1.6.0</version>
+        </dependency>
+      </dependencies>
+    </plugin>
+  </plugins>
+</build>
+
+
+
+
+
+

26.5. Eclipse Integration

+ +
+

==Installation

+
+
+
+
+

Remark: CobiGen is preinstalled in the devonfw/devon-ide.

+
+
+
+
+
Preconditions
+
+
    +
  • +

    Eclipse 4.x

    +
  • +
  • +

    Java 7 Runtime (for starting eclipse with CobiGen). This is independent from the target version of your developed code.

    +
  • +
+
+
+
+
Installation steps
+
+
    +
  1. +

    Open the eclipse installation dialog
    +menu bar → HelpInstall new Software…​

    +
    +

    01 install new software

    +
    +
  2. +
  3. +

    Open CobiGen’s update site
    +Insert the update site of your interest into the filed Work with and press Add …​
    +Unless you know what you are doing we recommend you install every plugin as shown in the picture below.

    +
    + +
    +
  4. +
  5. +

    Follow the installation wizard
    +Select CobiGen Eclipse Plug-inNextNext → accept the license → FinishOKYes

    +
  6. +
  7. +

    Once installed, a new menu entry named "CobiGen" will show up in the Package Explorer’s context menu. In the sub menu there will the Generate…​ command, which may ask you to update the templates, and then you can start the generation wizard of CobiGen. You can adapt the templates by clicking on Adapt Templates which will give you the possibility to import the CobiGen_Templates automatically so that you can modified them.

    +
  8. +
  9. +

    Checkout (clone) your project’s templates folder or use the current templates released with CobiGen (https://github.com/devonfw/cobigen/tree/master/cobigen-templates) and then choose Import -> General -> Existing Projects into Workspace to import the templates into your workspace.

    +
  10. +
  11. +

    Now you can start generating. To get an introduction of CobiGen try the devon4j templates and work on the devon4j sample application. There you might want to start with Entity objects as a selection to run CobiGen with, which will give you a good overview of what CobiGen can be used for right out of the box in devon4j based development. If you need some more introduction in how to come up with your templates and increments, please be referred to the documentation of the context configuration and the templates configuration

    +
  12. +
+
+
+

Dependent on your context configuration menu entry Generate…​ may be gray out or not. See for more information about valid selections for generation.

+
+
+
+
Updating
+
+

In general updating CobiGen for eclipse is done via the update mechanism of eclipse directly, as shown on image below:

+
+
+

03 update software

+
+
+

Upgrading eclipse CobiGen plug-in to v3.0.0 needs some more attention of the user due to a changed plug-in architecture of CobiGen’s core module and the eclipse integration. Eventually, we were able to provide any plug-in of CobiGen separately as its own eclipse bundle (fragment), which is automatically discovered by the main CobiGen Eclipse plug-in after installation.

+
+ +
+

==Usage

+
+
+

CobiGen has two different generation modes depending on the input selected for generation. The first one is the simple mode, which will be started if the input contains only one input artifact, e.g. for Java an input artifact currently is a Java file. The second one is the batch mode, which will be started if the input contains multiple input artifacts, e.g. for Java this means a list of files. In general this means also that the batch mode might be started when selecting complex models as inputs, which contain multiple input artifacts. The latter scenario has only been covered in the research group,yet.

+
+
+
+
Simple Mode
+
+

Selecting the menu entry Generate…​ the generation wizard will be opened:

+
+
+

generate wizard page1

+
+
+

The left side of the wizard shows all available increments, which can be selected to be generated. Increments are a container like concept encompassing multiple files to be generated, which should result in a semantically closed generation output. +On the right side of the wizard all files are shown, which might be effected by the generation - dependent on the increment selection of files on the left side. The type of modification of each file will be encoded into following color scheme if the files are selected for generation:

+
+
+
    +
  • +

    green: files, which are currently non-existent in the file system. These files will be created during generation

    +
  • +
  • +

    yellow: files, which are currently existent in the file system and which are configured to be merged with generated contents.

    +
  • +
  • +

    red: files, which are currently existent in the file system. These files will be overwritten if manually selected.

    +
  • +
  • +

    no color: files, which are currently existent in the file system. Additionally files, which were deselected and thus will be ignored during generation.

    +
  • +
+
+
+

Selecting an increment on the left side will initialize the selection of all shown files to be generated on the right side, whereas green and yellow categorized files will be selected initially. A manual modification of the pre-selection can be performed by switching to the customization tree using the Customize button on the right lower corner.

+
+
+
+
+

Optional: If you want to customize the generation object model of a Java input class, you might continue with the Next > button instead of finishing the generation wizard. The next generation wizard page is currently available for Java file inputs and lists all non-static fields of the input. deselecting entries will lead to an adapted object model for generation, such that deselected fields will be removed in the object model for generation. By default all fields will be included in the object model.

+
+
+
+
+

Using the Finish button, the generation will be performed. Finally, CobiGen runs the eclipse internal organize imports and format source code for all generated sources and modified sources. Thus it is possible, that---especially organize imports opens a dialog if some types could not be determined automatically. This dialog can be easily closed by pressing on Continue. If the generation is finished, the Success! dialog will pop up.

+
+
+
+
Batch mode
+
+

If there are multiple input elements selected, e.g., Java files, CobiGen will be started in batch mode. For the generation wizard dialog this means, that the generation preview will be constrained to the first selected input element. It does not preview the generation for each element of the selection or of a complex input. The selection of the files to be generated will be generated for each input element analogously afterwards.

+
+
+

generate wizard page1 batch

+
+
+

Thus the color encoding differs also a little bit:

+
+
+
    +
  • +

    yellow: files, which are configured to be merged.

    +
  • +
  • +

    red: files, which are not configured with any merge strategy and thus will be created if the file does not exist or overwritten if the file already exists

    +
  • +
  • +

    no color: files, which will be ignored during generation

    +
  • +
+
+
+

Initially all possible files to be generated will be selected.

+
+
+
+
Health Check
+
+

To check whether CobiGen runs appropriately for the selected element(s) the user can perform a Health Check by activating the respective menu entry as shown below.

+
+
+

health check menu entry

+
+
+

The simple Health Check includes 3 checks. As long as any of these steps fails, the Generate menu entry is grayed out.

+
+
+

The first step is to check whether the generation configuration is available at all. If this check fails you will see the following message:

+
+
+

health check no templates

+
+
+

This indicates, that there is no Project named CobiGen_Templates available in the current workspace. To run CobiGen appropriately, it is necessary to have a configuration project named CobiGen_Templates imported into your workspace. For more information see chapter Eclipse Installation.

+
+
+

The second step is to check whether the template project includes a valid context.xml. If this check fails, you will see the following message:

+
+
+

health check invalid config

+
+
+

This means that either your context.xml

+
+
+
    +
  • +

    does not exist (or has another name)

    +
  • +
  • +

    or it is not valid one in any released version of CobiGen

    +
  • +
  • +

    or there is simply no automatic routine of upgrading your context configuration to a valid state.

    +
  • +
+
+
+

If all this is not the case, such as, there is a context.xml, which can be successfully read by CobiGen, you might get the following information:

+
+
+

health check old context

+
+
+

This means that your context.xml is available with the correct name but it is outdated (belongs to an older CobiGen version). In this case just click on Upgrade Context Configuration to get the latest version.

+
+
+
+
+

Remark: This will create a backup of your current context configuration and converts your old configuration to the new format. The upgrade will remove all comments from the file, which could be retrieved later on again from the backup. +If the creation of the backup fails, you will be asked to continue or to abort.

+
+
+
+
+

The third step checks whether there are templates for the selected element(s). If this check fails, you will see the following message:

+
+
+

health check no matching triggers

+
+
+

This indicates, that there no trigger has been activated, which matches the current selection. The reason might be that your selection is faulty or that you imported the wrong template project (e.g. you are working on a devon4j project, but imported the Templates for the Register Factory). If you are a template developer, have a look at the trigger configuration and at the corresponding available plug-in implementations of triggers, like e.g., Java Plug-in or XML Plug-in.

+
+
+

If all the checks are passed you see the following message:

+
+
+

health check all OK

+
+
+

In this case everything is OK and the Generate button is not grayed out anymore so that you are able to trigger it and see the Simple Mode.

+
+
+

In addition to the basic check of the context configuration, you also have the opportunity to perform an Advanced Health Check, which will check all available templates configurations (templates.xml) of path-depth=1 from the configuration project root according to their compatibility.

+
+
+

health check advanced up to date

+
+
+

Analogous to the upgrade of the context configuration, the Advanced Health Check will also provide upgrade functionality for templates configurations if available.

+
+
+
+
Update Templates
+
+

Update Template: Select Entity file and right click then select CobiGen Update Templates after that click on download then download successfully message will be come .

+
+
+
+
Adapt Templates
+
+

Adapt Template: Select any file and right click, then select `cobigen → Adapt Templates `.If CobiGen templates jar is not available then it downloads them automatically. If CobiGen templates is already present then it will override existing template in workspace and click on OK then imported template successfully message will be come.

+
+
+

Finally, please change the Java version of the project to 1.8 so that you don’t have any compilation errors.

+
+ +
+

==Logging

+
+
+

If you have any problem with the CobiGen eclipse plug-in, you might want to enable logging to provide more information for further problem analysis. This can be done easily by adding the logback.xml to the root of the CobiGen_templates configuration folder. The file should contain at least the following contents, whereas you should specify an absolute path to the target log file (at the TODO). If you are using the (cobigen-templates project, you might have the contents already specified but partially commented.

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<!-- This file is for logback classic. The file contains the configuration for sl4j logging -->
+<configuration>
+    <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+        <file><!-- TODO choose your log file location --></file>
+        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+            <Pattern>%n%date %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
+            </Pattern>
+        </encoder>
+    </appender>
+    <root level="DEBUG">
+        <appender-ref ref="FILE" />
+    </root>
+</configuration>
+
+
+
+
+
+

26.6. How to

+ +
+

==Enterprise Architect client generation

+
+
+

We are going to show you how to generate source code from an Enterprise Architect diagram +using CobiGen.

+
+
+
Prerequisites
+
+

If CobiGen_Templates is not already imported into your workspace, follow the next steps:

+
+
+
    +
  • +

    Click on the Eclipse’s menu File > Import > Existing Projects into Workspace and browse to select the workspaces/main/CobiGen_Templates directory.

    +
  • +
  • +

    Click Finish and you should have the CobiGen_Templates as a new project in Eclipse’s workspace.

    +
  • +
+
+
+

Also verify that you have the latest templates of CobiGen. Your templates folder must contain the crud_java_ea_uml folder. +If you do not see it, please follow the next steps:

+
+
+
    +
  • +

    Download the accumulative patch.

    +
  • +
  • +

    Open the zip file and extract its content inside the root folder of your Devonfw distribution Devon-dist_2.4.0/

    +
  • +
+
+
+

After following those steps correctly, you should have the latest version of the templates ready to use.

+
+
+
+
Generation
+
+

In this tutorial, we are going to generate the entity infrastructure using as input a class diagram, modelled with Enterprise Architect (EA). First, create a class diagram, an example is shown on figure below:

+
+
+
+Eclipse CobiGen generation +
+
+
+

When you are finished, you will have to export that UML diagram into an XMI version 2.1 file. This is the file format that CobiGen understands. See below a figure showing this process:

+
+
+
+Eclipse CobiGen generation +
+
+
+

To open that window, see this tutorial.

+
+
+

After having that exported file, change its extension from xmi to xml. Then create an devon4j project and import the exported file into the core of your devon4j project.

+
+
+

Now we are going to start the generation, right-click your exported file and select CobiGen > Generate, finally select the entity infrastructure increment:

+
+
+
+Eclipse CobiGen generation +
+
+
+

After following all these steps, your generated files should be inside src\main\java folder. If you want an XMI example, you will find it here.

+
+ +
+

==Angular 8 Client Generation

+
+
+

The generation can create a full Angular 8 client using the devon4ng-application-template package located at workspaces/examples folder of the distribution. For more details about this package, please refer here.

+
+
+

Take into account that the TypeScript merging for CobiGen needs Node 6 or higher to be installed at your machine.

+
+
+ + + + + +
+ + +This is a short introduction to the Angular generation. For a deeper tutorial including the generation of the backend, we strongly recommend you to follow this document. +
+
+
+
+
Requisites
+
+

Install yarn globally:

+
+
+
+
npm install -g yarn
+
+
+
+
+
Angular 8 workspace
+
+

The output location of the generation can be defined editing the cobigen.properties file located at crud_angular_client_app/templates folder of the CobiGen_Templates project.

+
+
+
+`cobigen.properties file` +
+
+
+

By default, the output path would be into the devon4ng-application-template folder at the root of the devon4j project parent folder:

+
+
+
+
root/
+ |- devon4ng-application-template/
+ |- devon4j-project-parent/
+   |- core/
+   |- server/
+
+
+
+

However, this path can be changed, for example to src/main/client folder of the devon4j project:

+
+
+

relocate: ./src/main/client/${cwd}

+
+
+
+
root/
+ |- devon4j-project-parent/
+   |- core/
+      |- src
+        |- main
+          |- client
+   |- server/
+
+
+
+

Once the output path is chosen, copy the files of DEVON4NG-APPLICATION-TEMPLATE repository into this output path.

+
+
+
+
Install Node dependencies
+
+

Open a terminal into devon4ng-application-template copied and just run the command:

+
+
+
+
yarn
+
+
+
+

This will start the installation of all node packages needed by the project into the node_modules folder.

+
+
+
+
Generating
+
+

From an ETO object, right click, CobiGen → Generate will show the CobiGen wizard relative to client generation:

+
+
+
+CobiGen Client Generation Wizard +
+
+
+

Check all the increments relative to Angular:

+
+
+ + + + + +
+ + +
+

The Angular devon4j URL increment is only needed for the first generations however, checking it again on next generation will not cause any problem.

+
+
+
+
+

As we done on other generations, we click Next to choose which fields to include at the generation or simply clicking Finish will start the generation.

+
+
+
+CobiGen Client Generation Wizard 3 +
+
+
+
+
Routing
+
+

Due to the nature of the TypeScript merger, currently is not possible to merge properly the array of paths objects of the routings at app.routing.ts file so, this modification should be done by hand on this file. However, the import related to the new component generated is added.

+
+
+

This would be the generated app-routing.module file:

+
+
+
+
import { Routes, RouterModule } from '@angular/router';
+import { LoginComponent } from './login/login.component';
+import { AuthGuard } from './shared/security/auth-guard.service';
+import { InitialPageComponent } from './initial-page/initial-page.component';
+import { HomeComponent } from './home/home.component';
+import { SampleDataGridComponent } from './sampledata/sampledata-grid/sampledata-grid.component';
+//Routing array
+const appRoutes: Routes = [{
+    path: 'login',
+    component: LoginComponent
+}, {
+    path: 'home',
+    component: HomeComponent,
+    canActivate: [AuthGuard],
+    children: [{
+        path: '',
+        redirectTo: '/home/initialPage',
+        pathMatch: 'full',
+        canActivate: [AuthGuard]
+    }, {
+        path: 'initialPage',
+        component: InitialPageComponent,
+        canActivate: [AuthGuard]
+    }]
+}, {
+    path: '**',
+    redirectTo: '/login',
+    pathMatch: 'full'
+}];
+export const routing = RouterModule.forRoot(appRoutes);
+
+
+
+

Adding the following into the children object of home, will add into the side menu the entry for the component generated:

+
+
+
+
{
+    path: 'sampleData',
+    component: SampleDataGridComponent,
+    canActivate: [AuthGuard],
+}
+
+
+
+
+
import { Routes, RouterModule } from '@angular/router';
+import { LoginComponent } from './login/login.component';
+import { AuthGuard } from './shared/security/auth-guard.service';
+import { InitialPageComponent } from './initial-page/initial-page.component';
+import { HomeComponent } from './home/home.component';
+import { SampleDataGridComponent } from './sampledata/sampledata-grid/sampledata-grid.component';
+//Routing array
+const appRoutes: Routes = [{
+    path: 'login',
+    component: LoginComponent
+}, {
+    path: 'home',
+    component: HomeComponent,
+    canActivate: [AuthGuard],
+    children: [{
+        path: '',
+        redirectTo: '/home/initialPage',
+        pathMatch: 'full',
+        canActivate: [AuthGuard]
+    }, {
+        path: 'initialPage',
+        component: InitialPageComponent,
+        canActivate: [AuthGuard]
+    }, {
+        path: 'sampleData',
+        component: SampleDataGridComponent,
+        canActivate: [AuthGuard],
+    }]
+}, {
+    path: '**',
+    redirectTo: '/login',
+    pathMatch: 'full'
+}];
+export const routing = RouterModule.forRoot(appRoutes);
+
+
+
+
+`APP SideMenu` +
+
+
+
+
JWT Authentication
+
+

If you are using a backend server with JWT Authentication (there is a sample in workspaces/folder called sampleJwt) you have to specify the Angular application to use this kind of authentication.

+
+
+

By default the variable is set to CSRF but you can change it to JWT by going to the Enviroment.ts and setting security: 'jwt'.

+
+
+
+
Running
+
+

First of all, run your devon4j java server by right clicking over SpringBootApp.java Run As → Java Application. This will start to run the SpringBoot server. Once you see the Started SpringBoot in XX seconds, the backend is running.

+
+
+
+Starting `SpringBoot` +
+
+
+

Once the the server is running, open a Devon console at the output directory defined previously and run:

+
+
+
+
ng serve --open
+
+
+
+

This will run the Angular 8 application at:

+
+
+
+
http://localhost:4200
+
+
+
+
+Running Angular 8 app +
+
+
+

Once finished, the browser will open automatically at the previous localhost URL showing the Angular 8 application, using the credentials set at the devon4j java server you will be able to access.

+
+ +
+

==Ionic client generation

+
+
+

We are going to show you how to generate a CRUD Ionic application from an ETO +using CobiGen.

+
+
+ + + + + +
+ + +This is a short introduction to the Ionic generation. For a deeper tutorial including the generation of the backend, we strongly recommend you to follow this document. +
+
+
+
+
Prerequisites
+
+

Before starting, make sure you already have in your computer:

+
+
+
    +
  • +

    Ionic: by following the steps defined on that page. +It includes installing:

    +
    +
      +
    • +

      NodeJS: We have to use "NPM" for downloading packages.

      +
    • +
    • +

      Ionic CLI.

      +
    • +
    +
    +
  • +
  • +

    Capacitor: Necessary to access to native device features.

    +
  • +
+
+
+

If CobiGen_Templates are not already downloaded, follow the next steps:

+
+
+
    +
  • +

    Right click on any file of your workspace CobiGen > Update Templates and now you are able to start the generation.

    +
  • +
  • +

    If you want to adapt them, click Adapt Templates and you should have the CobiGen_Templates as a new project in Eclipse’s workspace.

    +
  • +
+
+
+

After following those steps correctly, you should have the latest version of the templates ready to use.

+
+
+
+
Generation
+
+

We are going to generate the CRUD into a sample application that we have developed for +testing this functionality. It is present on your workspaces/examples folder (devon4ng-ionic-application-template). If you do not see it, you can clone or download it from here.

+
+
+

After having that sample app, please create an devon4j project and then start implementing the ETO: You will find an example here.

+
+
+

As you can see, TableEto contains 3 attributes: 2 of them are Long and the third one TableState is an enum that you will find +here. +The Ionic generation works fine for any Java primitive attribute (Strings, floats, chars, boolean…​) and enums. However, if you want to use your own objects, you should +override the toString() method, as explained here.

+
+
+

The attributes explained above will be used for generating a page that shows a list. Each item of that list +will show the values of those attributes.

+
+
+

For generating the files:

+
+
+
    +
  • +

    Right click your ETO file and click on CobiGen > Generate as shown on the figure below.

    +
  • +
+
+
+
+Eclipse CobiGen generation +
+
+
+
    +
  • +

    Select the Ionic increments for generating as shown below. Increments group a set of templates for generating +different projects.

    +
    +
      +
    1. +

      Ionic List used for generating the page containing the list.

      +
    2. +
    3. +

      Ionic devon4ng environments is for stating the server path.

      +
    4. +
    5. +

      Ionic i18n used for generating the different language translations for the `translationService` (currently English and Spanish).

      +
    6. +
    7. +

      Ionic routing adds an app-routing.module.ts file to allow navigation similar to the one available in Angular.

      +
    8. +
    9. +

      Ionic theme generates the variables.scss file which contains variables to style the application.

      +
    10. +
    +
    +
  • +
+
+
+
+CobiGen Ionic Wizard +
+
+
+ + + + + +
+ + +By default, the generated files will be placed inside "devon4ng-ionic-application-template", next to the root of your project’s folder. +See the image below to know where they are generated. For changing the generation path and the name of the application go to CobiGen_Templates/crud_ionic_client_app/cobigen.properties. +
+
+
+
+Generation path +
+
+
+

Now that we have generated the files, lets start testing them:

+
+
+
    +
  • +

    First change the SERVER_URL of your application. For doing that, modify src/environments/environments.ts, also modify src/environments/environments.android.ts (android) and src/environments/environments.prod.ts (production) if you want to test in different environments.

    +
  • +
  • +

    Check that there are no duplicated imports. Sometimes there are duplicated imports in src/app/app.module.ts. +This happens because the merger of CobiGen prefers to duplicate rather than to delete.

    +
  • +
  • +

    Run npm install to install all the required dependencies.

    +
  • +
  • +

    Run `ionic serve on your console.

    +
  • +
+
+
+

After following all these steps your application should start. However, remember that you will need your server to be running for access to the list page.

+
+
+
+
Running it on Android
+
+

To run the application in an android emulated device, it is necessary to have Android Studio and Android SDK. After its installation, the following commands have to be run on your console:

+
+
+
    +
  • +

    npx cap init "name-for-the-app (between quotes)" "id-for-the-app (between quotes)"

    +
  • +
  • +

    ionic build --configuration=android. To use this command, you must add an android build configuration at angular.json

    +
  • +
+
+
+
+
    "build": {
+      ...
+      "configurations": {
+        ...
+        "android": {
+          "fileReplacements": [
+            {
+              "replace": "src/environments/environment.ts",
+              "with": "src/environments/environment.android.ts"
+            }
+          ]
+        },
+      }
+    }
+
+
+
+
    +
  • +

    npx cap add android

    +
  • +
  • +

    npx cap copy

    +
  • +
  • +

    npx cap open android

    +
  • +
+
+
+

The last steps are done in Android studio: make the project, make the app, build and APK and run in a device.

+
+
+
+Click on make project +
+
+
+
+click on make app +
+
+
+
+click on build APK +
+
+
+
+click on running device +
+
+ +
+

==Implementing a new Plug-in

+
+
+

New plug-ins can implement an input reader, a merger, a matcher, a trigger interpreter, and/or a template engine as explained here.

+
+
+ + + + + +
+ + +
+

It is discouraged to have cobigen-core dependencies at runtime, except for cobigen-core-api which definitely must be present.

+
+
+
+
+
+
Plugin Activator
+
+

Each plug-in has to have an plug-in activator class implementing the interface GeneratorPluginActivator from the core-api. This class will be used to load the plug-in using the PluginRegistry as explained here. This class implements two methods:

+
+
+
    +
  1. +

    bindMerger() → returns a mapping of merge strategies and its implementation to be registered.

    +
  2. +
  3. +

    bindTriggerInterpreter()→ returns the trigger interpreters to be provided by this plug-in.

    +
  4. +
+
+
+

Both methods create and register instances of mergers and trigger interpreters to be provided by the new plug-in.

+
+
+
+
Adding Trigger Interpreter
+
+

The trigger interpreter has to implement the TriggerInterpreter interface from the core. The trigger interpreter defines the type for the new plugin and creates new InputReader and new Matcher objects.

+
+
+
+
Adding Input Reader
+
+

The input reader is responsible of read the input object and parse it into + FreeMarker models. The input reader must be implemented for the type of the + input file. If there is any existent plugin that has the same file type as input, + there will be no need to add a new input reader to the new plug-in.

+
+
+
Input Reader Interface
+
+

The interface needed to add a new input reader is defined at the core. Each new +sub plug-in must implements this interface if is needed an input reader for it.

+
+
+

The interface implements the basic methods that an input reader must have, +but if additional methods are required, the developer must add a new interface +that extends the original interface `InputReader.java` from the core-api +and implement that on the sub plug-in.

+
+
+

The methods to be implemented by the input reader of the new sub plugin are:

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodReturn TypeDescription

isValidInput(Object input)

boolean

This function will be called if matching triggers or matching templates should be retrieved for a given input object.

createModel(Object input)

Map<String, Object>

This function should create the FreeMarker object model from the given input.

combinesMultipleInputObjects(Object input)

boolean

States whether the given input object combines multiple input objects to be used for generation.

getInputObjects(Object input, Charset inputCharset)

List<Object>

Will return the set of combined input objects if the given input combines multiple input objects.

getTemplateMethods(Object input)

Map<String, Object>

This method returns available template methods from the plugins as Map. If the plugin which corresponds to the input does not provide any template methods an empty Map will be returned.

getInputObjectsRecursively(Object input, Charset inputCharset)

List<Object>

Will return the set of combined input objects if the given input combines multiple input objects.

+
+
+
Model Constants
+
+

The Input reader will create a model for FreeMarker. A FreeMarker model must +have variables to use them at the .ftl template file. Refer to Java Model to see the FreeMarker model example for java input files.

+
+
+
+
Registering the Input Reader
+
+

The input reader is an object that can be retrieved using the correspondent get + method of the trigger interpreter object. The trigger interpreter object is + loaded at the eclipse plug-in using the load plug-in method explained + here. + That way, when the core needs the input reader, only needs to call that getInputReader method.

+
+
+
+
+
Adding Matcher
+
+

The matcher implements the MatcherInterpreter interface from the core-api. +Should be implemented for providing a new input matcher. Input matcher are +defined as part of a trigger and provide the ability to restrict specific +inputs to a set of templates. +This restriction is implemented with a MatcherType enum.

+
+
+

E.g JavaPlugin

+
+
+
+
private enum MatcherType {
+    /** Full Qualified Name Matching */
+    FQN,
+    /** Package Name Matching */
+    PACKAGE,
+    /** Expression interpretation */
+    EXPRESSION
+}
+
+
+
+

Furthermore, matchers may provide several variable assignments, which might be +dependent on any information of the matched input and thus should be resolvable +by the defined matcher.

+
+
+

E.g JavaPlugin

+
+
+
+
private enum VariableType {
+    /** Constant variable assignment */
+    CONSTANT,
+    /** Regular expression group assignment */
+    REGEX
+}
+
+
+
+
+
Adding Merger
+
+

The merger is responsible to perform merge action between new output with the +existent data at the file if it already exists. Must implement the Merger +interface from the core-api. +The implementation of the Merge interface must override the following methods:

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
MethodReturn TypeDescription

getType()

String

Returns the type, this merger should handle.

merge(File base, String patch, String targetCharset)

String

Merges the patch into the base file.

+
+

Is important to know that any exception caused by the merger must throw a MergeException from the core-api to the eclipse-plugin handle it.

+
+
+
+
Changes since Eclipse / Maven 3.x
+
+

Since version 3.x the Eclipse and Maven plugins of CobiGen utilize the Java ServiceLoader mechanic to find and register plugins at runtime. To enable a new plugin to be discovered by this mechanic the following steps are needed:

+
+
+
    +
  • +

    create the file META-INF/services/com.devonfw.cobigen.api.extension.GeneratorPluginActivator containing just the full qualified name of the class implementing the GeneratorPluginActivator interface, if the plugin provides a Merger and/or a TriggerInterpreter

    +
  • +
  • +

    create the file META-INF/services/com.devonfw.cobigen.api.extension.TextTemplateEngine containing just the full qualified name of the class implementing the TextTemplateEngine interface, if provided by the plugin

    +
  • +
  • +

    include META-INF into the target bundle (i.e. the folder META-INF has to be present in the target jar file)

    +
  • +
+
+
+
+
Example: Java Plugin
+
+

The java plugin provides both a Merger and a TriggerInterpreter. It contains therefore a com.devonfw.cobigen.api.extension.GeneratorPluginActivator file with the following content:

+
+
+
+
com.devonfw.cobigen.javaplugin.JavaPluginActivator
+
+
+
+

This makes the JavaPluginActivator class discoverable by the ServiceLoader at runtime.

+
+
+
+
+
    +
  • +

    to properly include the plugin into the current system and use existing infrastructure, you need to add the plugin as a module in /cobigen/pom.xml (in case of a Merger/TriggerInterpreter providing plugin) and declare that as the plugin’s parent in it’s own pom.xml via

    +
  • +
+
+
+
+
<parent>
+    <groupId>com.devonfw</groupId>
+    <artifactId>cobigen-parent</artifactId>
+    <version>dev-SNAPSHOT</version>
+</parent>
+
+
+
+

or /cobigen/cobigen-templateengines/pom.xml (in case of a Merger/TriggerInterpreter providing plugin) and declare that as the plugin’s parent in it’s own pom.xml via

+
+
+
+
<parent>
+    <groupId>com.devonfw</groupId>
+    <artifactId>cobigen-tempeng-parent</artifactId>
+    <version>dev-SNAPSHOT</version>
+</parent>
+
+
+
+

If the plugin provides both just use the /cobigen/pom.xml.

+
+
+
    +
  • +

    The dependencies of the plugin are included in the bundle

    +
  • +
  • +

    To make the plugin available to the Eclipse plugin it must be included into the current compositeContent.xml and compositeArtifacts.xml files. Both files are located in https://github.com/devonfw/cobigen/tree/gh-pages/updatesite/{test|stable}. To do so, add an <child> entry to the <children> tag in both files and adapt the size attribute to match the new number of references. The location attribute of the new <child> tag needs to be the artifact id of the plugins pom.xml.

    +
  • +
+
+
+
+
Example: Java Plugin
+
+

In case of the Java plugin, the entry is

+
+
+
+
<child location="cobigen-javaplugin"/>
+
+
+
+
+
+
Deployment
+
+

If you want to create a test release of eclipse you need to run the command

+
+
+
+
sh deploy.sh
+
+
+
+

on the cloned CobiGen repository while making sure, that your current version of CobiGen cloned is a snapshot version. This will automatically be detected by the deploy script.

+
+ +
+

==Introduction to CobiGen external plug-ins

+
+
+

Since September of 2019, a major change on CobiGen has taken place. CobiGen is written in Java code and previously, it was very hard for developers to create new plug-ins in other languages.

+
+
+

Creating a new plug-in means:

+
+
+
    +
  • +

    Being able to parse a file in that language.

    +
  • +
  • +

    Create a human readable model that can be used to generate templates (by retrieving properties from the model).

    +
  • +
  • +

    Enable merging files, so that user’s code does not get removed.

    +
  • +
+
+
+

For the Java plug-in it was relatively easy. As you are inside the Java world, you can use multiple utilities or libraries in order to get the AST or to merge Java code. With this new feature, we wanted that behaviour to be possible for any programming language.

+
+
+
+
+
General intuition
+
+

Below you will find a very high level description of how CobiGen worked in previous versions:

+
+
+
+Old CobiGen +
+
+
+

Basically, when a new input file was sent to CobiGen, it called the input reader to create a model of it (see here an example of a model). That model was sent to the template engine.

+
+
+

Afterwards, the template engine generated a new file which had to be merged with the original one. All this code was implemented in Java.

+
+
+

On the new version, we have implemented a handler (ExternalProcessHandler) which connects through TCP/IP connection to a server (normally on localhost:5000). This server can be implemented in any language (.Net, Node.js, Python…​) it just needs to implement a REST API defined here. The most important services are the input reading and merging:

+
+
+
+New CobiGen +
+
+
+

CobiGen acts as a client that sends requests to the server in order to read the input file and create a model. The model is returned to the template engine so that it generates a new file. Finally, it is sent back to get merged with the original file.

+
+
+
+
How to create new external plug-in
+
+

The creation of a new plug-in consists mainly in three steps:

+
+
+
    +
  • +

    Creation of the server (external process).

    +
  • +
  • +

    Creation of a CobiGen plug-in.

    +
  • +
  • +

    Creation of templates.

    +
  • +
+
+
+
Server (external process)
+
+

The server can be programmed in any language that is able to implement REST services endpoints. The API that needs to implement is defined with this contract. You can paste the content to https://editor.swagger.io/ for a better look.

+
+
+

We have already created a NestJS server that implements the API defined above. You can find the code here which you can use as an example.

+
+
+

As you can see, the endpoints have the following naming convention: processmanagement/todoplugin/nameOfService where you will have to change todo to your plug-in name (e.g. rustplugin, pyplugin, goplugin…​)

+
+
+

When implementing service getInputModel which returns a model from the input file there are only two restrictions:

+
+
+
    +
  • +

    A path key must be added. Its value can be the full path of the input file or just the file name. It is needed because in CobiGen there is a batch mode, in which you can have multiple input objects inside the same input file. You do not need to worry about batch mode for now.

    +
  • +
  • +

    On the root of your model, for each found key that is an object (defined with brackets [{}]), CobiGen will try to use it as an input object. For example, this could be a valid model:

    +
    +
    +
    {
    +  "path": "example/path/employee.entity.ts"
    +  "classes": [
    +    {
    +      "identifier": "Employee",
    +      "modifiers": [
    +        "export"
    +      ],
    +      "decorators": [
    +        {
    +          "identifier": {
    +            "name": "Entity",
    +            "module": "typeorm"
    +          },
    +          "isCallExpression": true
    +        }
    +      ],
    +      "properties": [
    +        {
    +          "identifier": "id",
    +    ...
    +    ...
    +    ...
    +    }]
    +    "interfaces": [{
    +        ...
    +    }]
    +}
    +
    +
    +
  • +
+
+
+

For this model, CobiGen would use as input objects all the classes and interfaces defined. On the templates we would be able to do model.classes[0].identifier to get the class name. These input objects depend on the language, therefore you can use any key.

+
+
+

In order to test the server, you will have to deploy it on your local machine (localhost), default port is 5000. If that port is already in use, you can deploy it on higher port values (5001, 5002…​). Nevertheless, we explain later the testing process as you need to complete the next step before.

+
+
+ + + + + +
+ + +Your server must accept one argument when running it. The argument will be the port number (as an integer). This will be used for CobiGen in order to handle blocked ports when deploying your server. Check this code to see how we implemented that argument on our NestJS server. +
+
+
+
+
CobiGen plug-in
+
+

You will have to create a new CobiGen plug-in that connects to the server. But do not worry, you will not have to implement anything new. We have a CobiGen plug-in template available, the only changes needed are renaming files and setting some properties on the pom.xml. Please follow these steps:

+
+
+
    +
  • +

    Get the CobiGen plug-in template from here. It is a template repository (new GitHub feature), so you can click on "Use this template" as shown below:

    +
    +
    +Plugin CobiGen template +
    +
    +
  • +
  • +

    Name your repo as cobigen-name-plugin where name can be python, rust, go…​ In our case we will create a nest plug-in. It will create a repo with only one commit which contains all the needed files.

    +
  • +
  • +

    Clone your just created repo and import folder cobigen-todoplugin as a Maven project on any Java IDE, though we recommend you devonfw ;)

    +
    +
    +Import plugin +
    +
    +
  • +
  • +

    Rename all the todoplugin folders, files and class names to nameplugin. In our case nestplugin. In Eclipse you can easily rename by right clicking and then refactor → rename:

    +
  • +
+
+
+
+Rename plugin +
+
+
+ + + + + +
+ + +We recommend you to select all the checkboxes +
+
+
+
+Rename checkbox +
+
+
+
    +
  • +

    Remember to change in src/main/java and src/test/java all the package, files and class names to use your plug-in name. The final result would be:

    +
    +
    +Package structure +
    +
    +
  • +
  • +

    Now we just need to change some strings, this is needed for CobiGen to register all the different plugins (they need unique names). In class TodoPluginActivator (in our case NestPluginActivator), change all the todo to your plug-in name. See below the 3 strings that need to be changed:

    +
    +
    +Plugin activator +
    +
    +
  • +
  • +

    Finally, we will change some properties from the pom.xml of the project. These properties define the server (external process) that is going to be used:

    +
    +
      +
    1. +

      Inside pom.xml, press Ctrl + F to perform a find and replace operation. Replace all todo with your plugin name:

      +
      +
      +Pom properties +
      +
      +
    2. +
    3. +

      We are going to explain the server properties:

      +
      +
        +
      1. +

        artifactId: This is the name of your plug-in, that will be used for a future release on Maven Central.

        +
      2. +
      3. +

        plugin.name: does not need to be changed as it uses the property from the artifactId. When connecting to the server, it will send a request to localhost:5000/{plugin.name}plugin/isConnectionReady, that is why it is important to use an unique name for the plug-in.

        +
      4. +
      5. +

        server.name: This defines how the server executable (.exe) file will be named. This .exe file contains all the needed resources for deploying the server. You can use any name you want.

        +
      6. +
      7. +

        server.version: You will specify here the server version that needs to be used. The .exe file will be named as {server.name}-{server.version}.exe.

        +
      8. +
      9. +

        server.url: This will define from where to download the server. We really recommend you using NPM which is a package manager we know it works well. We explain here how to release the server on NPM. This will download the .exe file for Windows.

        +
      10. +
      11. +

        server.url.linux: Same as before, but this should download the .exe file for Linux systems. If you do not want to implement a Linux version of the plug-in, just use the same URL from Windows or MacOS.

        +
      12. +
      13. +

        server.url.macos: Same as before, but this should download the .exe file for MacOS systems. If you do not want to implement a MacOS version of the plug-in, just use the same URL from Linux or Windows.

        +
      14. +
      +
      +
    4. +
    +
    +
  • +
+
+
+
+
+
Testing phase
+
+

Now that you have finished with the implementation of the server and the creation of a new CobiGen plug-in, we are going to explain how you can test that everything works fine:

+
+
+
    +
  1. +

    Deploy the server on port 5000.

    +
  2. +
  3. +

    Run mvn clean test on the CobiGen-plugin or run the JUnit tests directly on Eclipse.

    +
    +
      +
    1. +

      If the server and the plug-in are working properly, some tests will pass and other will fail (we need to tweak them).

      +
    2. +
    3. +

      If every test fails, something is wrong in your code.

      +
    4. +
    +
    +
  4. +
  5. +

    In order to fix the failing tests, go to src/test/java. The failing tests make use of sample input files that we added in sake of example:

    +
    +
    +Pom properties +
    +
    +
  6. +
+
+
+

Replace those files (on src/test/resources/testadata/unittest/files/…​) with the correct input files for your server.

+
+
+
+
Releasing
+
+

Now that you have already tested that everything works fine, we are going to explain how to release the server and the plug-in.

+
+
+
Release the server
+
+

We are going to use NPM to store the executable of our server. Even though NPM is a package manager for JavaScript, it can be used for our purpose.

+
+
+
    +
  • +

    Get the CobiGen server template from here. It is a template repository (new GitHub feature), so you can click on "Use this template" as shown below:

    +
    +
    +Server CobiGen template +
    +
    +
  • +
  • +

    Name your repo as cobigen-name-server where name can be python, rust, go…​ In our case we will create a nest plug-in. It will create a repo with only one commit which contains all the needed files.

    +
  • +
  • +

    Clone your just created repo and go to folder cobigen-todo-server. It will just contain two files: ExternalProcessContract.yml is the OpenAPI definition which you can modify with your own server definition (this step is optional), and package.json is a file needed for NPM in order to define where to publish this package:

    +
    +
    +
    {
    +  "name": "@devonfw/cobigen-todo-server",
    +  "version": "1.0.0",
    +  "description": "Todo server to implement the input reader and merger for CobiGen",
    +  "author": "CobiGen Team",
    +  "license": "Apache"
    +}
    +
    +
    +
  • +
+
+
+

Those are the default properties. This would push a new package cobigen-todo-server on the devonfw organization, with version 1.0.0. We have no restrictions here, you can use any organization, though we always recommend devonfw.

+
+
+ + + + + +
+ + +Remember to change all the todo to your server name. +
+
+
+
    +
  • +

    Add your executable file into the cobigen-todo-server folder, just like below. As we said previously, this .exe is the server ready to be deployed.

    +
    +
    +
    cobigen-template-server/
    + |- cobigen-todo-server/
    +   |- ExternalProcessContract.yml
    +   |- package.json
    +   |- todoserver-1.0.0.exe
    +
    +
    +
  • +
  • +

    Finally, we have to publish to NPM. If you have never done it, you can follow this tutorial. Basically you need to login into NPM and run:

    +
    +
    +
    cd cobigen-todo-server/
    +npm publish --access=public
    +
    +
    +
  • +
+
+
+ + + + + +
+ + +To release Linux and MacOS versions of your plug-in, just add the suffix into the package name (e.g. @devonfw/cobigen-todo-server-linux) +
+
+
+

That’s it! You have published the first version of your server. Now you just need to modify the properties defined on the pom of your CobiGen plug-in. Please see next section for more information.

+
+
+
+
Releasing CobiGen plug-in
+
+
    +
  • +

    Change the pom.xml to define all the properties. You can see below a final example for nest:

    +
    +
    +
    ...
    +   <groupId>com.devonfw.cobigen</groupId>
    +   <artifactId>nestplugin</artifactId>
    +   <name>CobiGen - Nest Plug-in</name>
    +   <version>1.0.0</version>
    +   <packaging>jar</packaging>
    +   <description>CobiGen - nest Plug-in</description>
    +
    +   <properties>
    +      <!-- External server properties -->
    +      <plugin.name>${project.artifactId}</plugin.name>
    +      <server.name>nestserver</server.name>
    +      <server.version>1.0.0</server.version>
    +      <server.url>https\://registry.npmjs.org/@devonfw/cobigen-nest-server/-/cobigen-nest-server-${server.version}.tgz</server.url>
    +      <server.url.linux>https\://registry.npmjs.org/@devonfw/cobigen-nest-server-linux/-/cobigen-nest-server-linux-${server.version}.tgz</server.url.linux>
    +      <server.url.macos>https\://registry.npmjs.org/@devonfw/cobigen-nest-server-macos/-/cobigen-nest-server-macos-${server.version}.tgz</server.url.macos>
    +...
    +
    +
    +
  • +
  • +

    Deploy to Maven Central.

    +
  • +
+
+
+
+
+
Templates creation
+
+

After following above steps, we now have a CobiGen plug-in that connects to a server (external process) which reads your input files, returns a model and is able to merge files.

+
+
+

However, we need a key component for our plug-in to be useful. We need to define templates:

+
+
+
    +
  • +

    Fork our CobiGen main repository, from here and clone it into your PC. Stay in the master branch and import into your IDE cobigen-templates\templates-devon4j. Set the Java version of the project to 1.8 if needed.

    +
  • +
  • +

    Create a new folder on src/main/templates, this will contain all your templates. You can use any name, but please use underscores as separators. In our case, we created a folder crud_typescript_angular_client_app to generate an Angular client from a TypeORM entity (NodeJS entity).

    +
    +
    +Templates project +
    +
    +
  • +
  • +

    Inside your folder, create a templates folder. As you can see below, the folder structure of the generated files starts here (the sources). Also we need a configuration file templates.xml that should be on the same level as templates/ folder. For now, copy and paste a templates.xml file from any of the templates folder.

    +
    +
    +Templates project +
    +
    +
  • +
  • +

    Start creating your own templates. Our default templates language is Freemarker, but you can also use Velocity. Add the extension to the file (.ftl) and start developing templates! You can find useful documentation here.

    +
  • +
  • +

    After creating all the templates, you need to modify context.xml which is located on the root of src/main/templates. There you need to define a trigger, which is used for CobiGen to know when to trigger a plug-in. I recommend you to copy and paste the following trigger:

    +
    +
    +
      <trigger id="crud_typescript_angular_client_app" type="nest" templateFolder="crud_typescript_angular_client_app">
    +    <matcher type="fqn" value="([^\.]+).entity.ts">
    +      <variableAssignment type="regex" key="entityName" value="1"/>
    +      <variableAssignment type="regex" key="component" value="1"/>
    +      <variableAssignment type="constant" key="domain" value="demo"/>
    +    </matcher>
    +  </trigger>
    +
    +
    +
  • +
  • +

    Change templateFolder to your templates folder name. id you can use any, but it is recommendable to use the same as the template folder name. type is the TRIGGER_TYPE we defined above on the NestPluginActivator class. On matcher just change the value: ([^\.]+).entity.ts means that we will only accept input files that contain anyString.entity.ts. This improves usability, so that users only generate using the correct input files. You will find more info about variableAssignment here.

    +
  • +
  • +

    Finally, is time to configure templates.xml. It is needed for organizing templates into increments, please take a look into this documentation.

    +
  • +
+
+
+
Testing templates
+
+
    +
  • +

    When you have finished your templates you will like to test them. On the templates-devon4j pom.xml remove the SNAPSHOT from the version (in our case the version will be 3.1.8). Run mvn clean install -DskipTests on the project. We skip tests because you need special permissions to download artifacts from our Nexus. Remember the version that has just been installed:

    +
    +
    +Templates snapshot version +
    +
    +
  • +
+
+
+ + + + + +
+ + +We always recommend using the devonfw console, which already contains a working Maven version. +
+
+
+
    +
  • +

    Now we have your last version of the templates ready to be used. We need to use that latest version in CobiGen. We will use the CobiGen CLI that you will find in your cloned repo, at cobigen-cli/cli. Import the project into your IDE.

    +
  • +
  • +

    Inside the project, go to src/main/resources/pom.xml. This pom.xml is used on runtime in order to install all the CobiGen plug-ins and templates. Add there your latest templates version and the previously created plug-in:

    +
    +
    +CLI pom +
    +
    +
  • +
  • +

    Afterwards, run mvn clean install -DskipTests and CobiGen will get your plug-ins. Now you have three options to test templates:

    +
    +
      +
    1. +

      Using Eclipse run as:

      +
      +
        +
      1. +

        Inside Eclipse, you can run the CobiGen-CLI as a Java application. Right click class CobiGenCLI.java → run as → run configurations…​ and create a new Java application as shown below:

        +
        +
        +Create configuration +
        +
        +
      2. +
      3. +

        That will create a CobiGenCLI configuration where we can set arguments to the CLI. Let’s first begin with showing the CLI version, which should print a list of all plug-ins, including ours.

        +
        +
        +Run version +
        +
        +
        +
        +
         ...
        + name:= propertyplugin version = 2.0.0
        + name:= jsonplugin version = 2.0.0
        + name:= templates-devon4j version = 3.1.8
        + name:= nestplugin version = 1.0.0
        + ...
        +
        +
        +
      4. +
      5. +

        If that worked, now you can send any arguments to the CLI in order to generate with your templates. Please follow this guide that explains all the CLI commands.

        +
      6. +
      +
      +
    2. +
    3. +

      Modify the already present JUnit tests on the CLI project: They test the generation of templates from multiple plug-ins, you can add your own tests and input files.

      +
    4. +
    5. +

      Use the CLI jar to execute commands:

      +
      +
        +
      1. +

        The mvn clean install -DskipTests command will have created a Cobigen.jar inside your target folder (cobigen-cli/cli/target). Open the jar with any unzipper and extract to the current location class-loader-agent.jar, cobigen.bat and cg.bat:

        +
        +
        +Extract files +
        +
        +
      2. +
      3. +

        Now you can run any CobiGen CLI commands using a console. This guide explains all the CLI commands.

        +
        +
        +Run CLI +
        +
        +
      4. +
      +
      +
    6. +
    +
    +
  • +
+
+ +
+
+
+
devon4net CobiGen Guide
+
+
Overview
+
+

In this guide we will explain how to generate a new WebAPI project from an OpenAPI 3.0.0 specification. This means that we are going to use a “contract first” strategy. This is going to be possible due to these type of files that contain all the information about entities, operations, etc…

+
+
+

In order to make it work we are using CobiGen, a powerful tool for generating source code. CobiGen allows users to generate all the structure and code of the components, helping to save a lot of time otherwise wasted on repetitive tasks.

+
+
+
+
Getting things ready
+
+devonfw-IDE +
+

First, we will install the devonfw-IDE. It is a tool that will setup your IDE within minutes. Please follow the install guide here.

+
+
+
+devon4net Templates +
+

We are going to use the template of devon4net as a base to generate all the code, so what we have to do now is to download said template using the following steps.

+
+
+

First of all you have to set up all the environment for .NET, you can do this using the following tutorial. Next we are going to create a new folder where we want to have the WebAPI project, lastly we are going to open the terminal there.

+
+
+

Type the following:

+
+
+
+
`dotnet new -i Devon4Net.WebAPI.Template`
+
+
+
+

and then:

+
+
+
+
`dotnet new Devon4NetAPI`
+
+
+
+
+OpenAPI File +
+

In order to let CobiGen generate all the files, we first have to make some modifications to our OpenAPI file.

+
+
+

It is obligatory to put the “x-rootpackage” tag to indicate where CobiGen will place the generated files as well as the "x-component" tags for each component, keep in mind that due to CobiGen’s limitations each component must have its own entity.

+
+
+

You can read more information about how to configure your OpenAPI file and a working example here.

+
+
+
+
+
Generating files
+
+

CobiGen allow us to generate the files in two different ways. One of them is using Eclipse which it can be done by using the its graphical interface. The other way to generate the code is using the CobiGen CLI tool.

+
+
+Generating files through Eclipse +
+

In order to generate the files using Eclipse we need to follow some simple steps.

+
+
+

First we are going to import our basic devon4net WebAPI Project into Eclipse. to do so open Eclipse with the “eclipse-main.bat” file that can be found in the devon distribution root folder. Once we are inside of Eclipse we go to File > Open projects from file system…​ and, under "Directory", search for your project.

+
+
+
+CobiGen +
+
+
+

Next we copy our OpenAPI file into the root folder of the project.

+
+
+
+CobiGen +
+
+
+

And then we right click on OpenAPI file and then select CobiGen > Generate…​ It will display a window like this:

+
+
+
+CobiGen +
+
+
+

To select all .NET features choose CRUD devon4net Server otherwise you can select only those that interest you.

+
+
+
+CobiGen +
+
+
+

Ones you select all the files that you want to generate, click on the “Finish” button to generate all the source code.

+
+
+
+Generating files through CobiGen CLI +
+

In order to generate the files using the CobiGen CLI it is needed to do the following steps:

+
+
+
    +
  1. +

    Go to devonfw distribution folder

    +
  2. +
  3. +

    Run console.bat, this will open a console.

    +
  4. +
  5. +

    Go to the folder you downloaded the devon4net template and your yml file.

    +
  6. +
  7. +

    Run the command:

    +
    +
    +
    `cobigen generate {yourOpenAPIFile}.yml`
    +
    +
    +
  8. +
  9. +

    A list of increments will be printed so that you can start the generation. It has to be selected CRUD devon4net Server increment.

    +
  10. +
+
+
+
+
+
Configuration
+
+Data base +
+

CobiGen is generating an empty context that has to be filled with manually in order to be able to work with the database. The context can be found in [Project_Name]/Devon4Net.WebAPI.Implementation/Domain/Database/CobigenContext.cs.

+
+
+
+CobiGen +
+
+
+
+Run the application +
+

After the configuration of the database, open a terminal in path: [Project_Name]/Devon4Net.Application.WebAPI and then type:

+
+
+
+
`dotnet run`
+
+
+
+

This will deploy our application in our localhost with the port 8082, so when you click here (https://localhost:8082/swagger) you can see, in swagger, all the services and the data model.

+
+ +
+

==How to update CobiGen

+
+
+

In order to update CobiGen from our devonfw distribution, we have two options:

+
+
+
    +
  • +

    Open Eclipse, click on HelpCheck for updates

    +
  • +
+
+
+
+Check updates +
+
+
+
    +
  • +

    Select all the CobiGen plugins listed and click on Next.

    +
  • +
+
+
+
+All the updates +
+
+
+

If this option is not working properly, then you can try the second option:

+
+
+
    +
  • +

    Open Eclipse, click on HelpAbout Eclipse IDE:

    +
  • +
+
+
+
+About Eclipse +
+
+
+
    +
  • +

    Click on Installation details:

    +
  • +
+
+
+
+Installation details +
+
+
+
    +
  • +

    Select all the CobiGen plugins and click on Update:

    +
  • +
+
+
+
+All updates details +
+
+
+

After the update process finishes, remember to restart Eclipse.

+
+
+
+
+
+
Updating templates:
+
+

To update your CobiGen templates to the latest version, you just need to do one step:

+
+
+
    +
  • +

    Right click any file on your package explorer, click on CobiGenUpdate templates, then click on download:

    +
  • +
+
+
+
+Update templates +
+
+
+

Now you will have the latest templates ready!

+
+
+

Unresolved include directive in modules/ROOT/pages/cobigen.wiki/master-cobigen.adoc - include::howto-Cobigen-CLI-generation.adoc[]

+
+ +
+

==End to End POC Code generation using Entity class +This article helps to create a sample application using cobigen.

+
+
+
+
Prerequisites
+
+

Download and install devonnfw IDE here,

+
+
+
+
Steps to create a Sample Project using Cobigen
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to start from the BE part …

+
+
+
Back End
+
+

run \devonfw-ide-scripts-3.2.4\eclipse-main.bat

+
+
+

It will open eclipse

+
+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  1. +

    Create a new SQL file (i.e: V0005CreateTables-ItaPoc.sql) inside myapp-__core and insert the following script:

    +
  2. +
+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment, modificationCounter INTEGER NOT NULL,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+

sql file

+
+
+
    +
  1. +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql) and add following script about the INSERT in order to populate the table created before

    +
  2. +
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Albert','Miller','albert.miller@capgemini.com');
+INSERT INTO  EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Wills','Smith', 'wills.smith@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Thomas', 'jaime.thomas@capgemini.com');
+
+
+
+

sql insert

+
+
+

Let’s create the Entity Class for the code generation

+
+
+
    +
  1. +

    Create a package employeemanagement.dataaccess.api under the folder myapp-core. Note: It is important to follow this naming convention for CobiGen to work properly.

    +
  2. +
+
+
+

package

+
+
+
    +
  1. +

    Now create a JPA Entity class in this package

    +
  2. +
+
+
+
+
import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Column;
+@Entity
+@javax.persistence.Table(name = "EMPLOYEE")
+public class EmployeeEntity {
+  @Column(name = "EMPLOYEEID")
+  @GeneratedValue(strategy = GenerationType.IDENTITY)
+  private Long employeeId;
+  @Column(name = "NAME")
+  private String name;
+  @Column(name = "SURNAME")
+  private String surname;
+  @Column(name = "EMAIL")
+  private String email;
+}
+
+
+
+

then generate getters and setters for all attributes …

+
+
+
    +
  1. +

    Use Cobigen to generate code. Right click on EmployeeEntity. CobiGen → Generate

    +
  2. +
+
+
+

It will ask you to download the templates, click on update:

+
+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  1. +

    Click on all the option selected as below:

    +
  2. +
+
+
+

cobigen option selection

+
+
+
    +
  1. +

    Click on finish. Below Screen would be seen. Click on continue

    +
  2. +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (myapp-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (myapp-core), normally it will be implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+

Last but not least: We make a quick REST services test !

+
+
+

See in the application.properties the TCP Port and the PATH

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id} (i.e: for getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

for all employees

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+
+
+
+

for the specific employee

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

Now download Postman to test the rest services.

+
+
+

Once done, you have to create a POST Request for the LOGIN and insert in the body the JSON containing the username and password waiter

+
+
+

postman

+
+
+

Once done with success (Status: 200 OK) …

+
+
+

… We create a NEW GET Request in order to get one employee

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+
Front End
+
+

Let’s start now with angular Web and then Ionic app.

+
+
+Angular Web App +
+
    +
  1. +

    To generate angular structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  2. +
+
+
+

devon dist folder

+
+
+
    +
  1. +

    Once done, right click on EmployeeEto.java file present under the package com.devonfw.poc.employeemanagement.logic.api.to

    +
  2. +
+
+
+

eclipse generate

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
+
+
+

eclipse

+
+
+
    +
  1. +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  2. +
+
+
+

angular ee layer

+
+
+
    +
  1. +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  2. +
+
+
+
+
,\{
+path: 'employee',
+component: EmployeeGridComponent,
+canActivate: [AuthGuard],
+},
+
+
+
+

Following picture explain where to place the above content:

+
+
+

routes

+
+
+
    +
  1. +

    Open the command prompt and execute devon yarn install from the base folder, which would download all the required libraries..

    +
  2. +
+
+
+
    +
  1. +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  2. +
+
+
+

environment

+
+
+

In order to do that it’s important to look at the application.properties to see the values as PATH, TCP port etc …

+
+
+

configure

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  1. +

    Now run the ng serve -o command to run the Angular Application.

    +
  2. +
+
+
+

image44

+
+
+
    +
  1. +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  2. +
+
+
+

image45

+
+
+

WebApp DONE

+
+
+
+Ionic Mobile App +
+
    +
  1. +

    To generate Ionic structure, download or clone devon4ng-application-template from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  2. +
  3. +

    Once done, Right click on the EmployeeEto as you already did before in order to use CobiGen.

    +
  4. +
  5. +

    Click on the selected options as seen in the screenshot:

    +
  6. +
+
+
+

image46

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
  3. +

    The entire ionic structure will be auto generated.

    +
  4. +
+
+
+

image47

+
+
+
    +
  1. +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  2. +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  1. +

    Run npm install in the root folder to download the dependecies

    +
  2. +
  3. +

    Run ionic serve

    +
  4. +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App DONE*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+Build APK +
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
Adapt CobiGen_Templates
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+ + +
+

==End to End POC Code generation using OpenAPI +This article helps to create a sample application using cobigen.

+
+
+
+
Prerequisites
+
+

Download and install devonnfw IDE here,

+
+
+
+
Steps to create a Sample Project using Cobigen
+
+

The HOW_TO is divided in 2 parts:

+
+
+
    +
  1. +

    BE-Back End generator (DB + DAO + services) – CONTRACT FIRST APPROACH

    +
  2. +
  3. +

    FE-Front End generator (Web App Angular + Ionic App) – CONTRACT FIRST APPROACH

    +
  4. +
+
+
+

cobigen ionic code genartion

+
+
+

So, ready to go! We’re going to star

+
+
+

t from the BE part …

+
+
+
Back End
+
+

run \devonfw-ide-scripts-3.2.4\eclipse-main.bat

+
+
+

It will open eclipse

+
+
+

create a project using below command from the command prompt

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Import the project to eclipse as maven project +eclipse devon

+
+
+

Click FINISH

+
+
+

Now We have the following 4 projects.

+
+
+

eclipse package explorer

+
+
+

BEFORE to start to create an Entity class, remember to create the tables !

+
+
+
    +
  1. +

    Create a new SQL file (i.e: V0005CreateTables_ItaPoc.sql) inside jwtsample-__core and insert the following script:

    +
  2. +
+
+
+
+
CREATE TABLE EMPLOYEE (
+id BIGINT auto_increment, modificationCounter *INTEGER* *NOT* *NULL*,
+employeeid BIGINT auto_increment,
+name VARCHAR(255),
+surname VARCHAR(255),
+email VARCHAR(255),
+PRIMARY KEY (employeeid)
+);
+
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+

sql file

+
+
+
    +
  1. +

    Now create another SQL file (i.e: V0006__PopulateTables-ItaPoc.sql) and add following script about the INSERT in order to populate the table created before

    +
  2. +
+
+
+

WARNING: please note that there are 2 underscore in the name !

+
+
+
+
INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (1, 1, 1, 'Stefano','Rossini','stefano.rossini@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (2, 2, 2, 'Angelo','Muresu', 'angelo.muresu@capgemini.com');
+INSERT INTO EMPLOYEE (id, modificationCounter, employeeid, name, surname,email) VALUES (3, 3, 3, 'Jaime','Gonzalez', 'jaime.diaz-gonzalez@capgemini.com');
+
+
+
+

sql insert

+
+
+

Let’s create the yml file for the code generation

+
+
+
    +
  1. +

    Now create a new file devonfw.yml in the root of your core folder. This will be our OpenAPI contract, like shown below. Then, copy the contents of this file into your OpenAPI. It defines some REST service endpoints and a EmployeeEntity with its properties defined.

    +
  2. +
+
+
+

Important: if you want to know how to write an OpenAPI contract compatible with CobiGen, please read this tutorial.

+
+
+

Swagger at OASP4J Project

+
+
+
    +
  1. +

    Right click devonfw.yml. CobiGen → Generate

    +
  2. +
+
+
+

It will ask you to download the templates, click on update:

+
+
+

cobigen generate

+
+
+

It will automatically download the latest version of CobiGen_Templates.

+
+
+

Attention: If you want to adapt the CobiGen_Templates, (normally this is not neccessary), you will find at the end of this document a tutorial on how to import them and adapt them!

+
+
+
    +
  1. +

    Click on all the option selected as below:

    +
  2. +
+
+
+

cobigen option selection

+
+
+
    +
  1. +

    Click on finish. Below Screen would be seen. Click on continue

    +
  2. +
+
+
+

cobigen finish

+
+
+

The entire BE layer structure having CRUD operation methods will be auto generated.

+
+
+

Some classes will be generated on the api part (jwtsample-api), normally it will be interfaces, as shown below:

+
+
+

be layer

+
+
+

Some other classes will be generated on the core part (jwtsample-core), normally it will be implementations as shown below:

+
+
+

core folder

+
+
+

BEFORE to generate the FE, please start the Tomcat server to check that BE Layer has been generated properly.

+
+
+

To start a server you just have to right click on SpringBootApp.javarun as → Spring Boot app

+
+
+

Eclipse run as

+
+
+

Spring boot run

+
+
+

Spring boot run

+
+
+

BE DONE

+
+
+

Last but not least: We make a quick REST services test !

+
+
+

See in the application.properties the TCP Port and the PATH

+
+
+

application properties

+
+
+

Now compose the Rest service URL:

+
+
+

service class path>/<service method path>

+
+
+
    +
  • +

    <server> refers to server with port no. (ie: localhost:8081)

    +
  • +
  • +

    <app> is in the application.propeeties (empty in our case, see above)

    +
  • +
  • +

    <rest service class path> refers to EmployeemanagementRestService: (i.e: /employeemanagement/v1)

    +
  • +
  • +

    <service method path>/employee/{id}  (i.e: for  getEmployee method)

    +
  • +
+
+
+

url mapping

+
+
+

URL of getEmployee for this example is:

+
+
+

For all employees

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/search
+
+
+
+

For the specific employee

+
+
+
+
http://localhost:8081/services/rest/employeemanagement/v1/employee/1
+
+
+
+

Now download Postman to test the rest services.

+
+
+

Once done, you have to create a POST Request for the LOGIN and insert in the body the JSON containing the username and password waiter

+
+
+

postman

+
+
+

Once done with success (Status: 200 OK) …

+
+
+

postman

+
+
+

… We create a NEW POST Request and We copy the Authorization Bearer field (see above) and We paste it in the Token field (see below)

+
+
+

postman

+
+
+

and specific the JSON parameters for the pagination of the Request that We’re going to send:

+
+
+

postman

+
+
+

postman

+
+
+

Now you can click postman

+
+
+

Now you ‘ve to check that response has got Status: 200 OK and to see the below list of Employee

+
+
+

postman

+
+
+

Now that We have successfully tested the BE is time to go to create the FE !

+
+
+
+
Front End
+
+

Let’s start now with angular Web and then Ionic app.

+
+
+Angular Web App +
+
    +
  1. +

    To generate angular structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-application-template
    +
    +
    +
  2. +
+
+
+

devon dist folder

+
+
+
    +
  1. +

    Once done, right click on devonfw.yml again (the OpenAPI contract). CobiGen → Generate

    +
  2. +
  3. +

    Click on the selected options as seen in the screenshot:

    +
  4. +
+
+
+

eclipse generate

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
+
+
+

eclipse

+
+
+
    +
  1. +

    The entire ANGULAR structure has been auto generated. The generated code will be merged to the existing.

    +
  2. +
+
+
+

angular ee layer

+
+
+
    +
  1. +

    IMPORTANT now you have to add in the app-routing.module.ts file the next content, as a child of HomeComponent, in order to enable the route of the new generated component

    +
  2. +
+
+
+
+
,\{
+path: 'employee',
+component: EmployeeGridComponent,
+canActivate: [AuthGuard],
+},
+
+
+
+

Following picture explain where to place the above content:

+
+
+

routes

+
+
+
    +
  1. +

    Open the command prompt and execute devon yarn install from the base folder, which would download all the required libraries..

    +
  2. +
+
+
+
    +
  1. +

    Check the file environment.ts if the server path is correct. (for production you will have to change also the environment.prod.ts file)

    +
  2. +
+
+
+

environment

+
+
+

In order to do that it’s important to look at the application.properties to see the values as PATH, TCP port etc …

+
+
+

configure

+
+
+

For example in this case the URL should be since the context path is empty the server URLS should be like:

+
+
+
+
export const environment = {
+production: false,
+restPathRoot: 'http://localhost:8081/',
+restServiceRoot: 'http://localhost:8081/services/rest/',
+security: 'jwt'
+};
+
+
+
+

Warning: REMEMBER to set security filed to jwt , if it is not configured already.

+
+
+
    +
  1. +

    Now run the *ng serve -o* command to run the Angular Application.

    +
  2. +
+
+
+

image44

+
+
+
    +
  1. +

    If the command execution is successful, the below screen will appear and it would be automatically redirected to the url:

    +
    +
    +
    http://localhost:4200/login
    +
    +
    +
  2. +
+
+
+

image45

+
+
+

WebApp DONE

+
+
+
+Ionic Mobile App +
+
    +
  1. +

    To generate Ionic structure, download or clone *devon4ng-application-template* from

    +
    +
    +
    https://github.com/devonfw/devon4ng-ionic-application-template
    +
    +
    +
  2. +
  3. +

    Once done, Right click on the devonfw.yml as you already did before in order to use CobiGen.

    +
  4. +
  5. +

    Click on the selected options as seen in the screenshot:

    +
  6. +
+
+
+

image46

+
+
+
    +
  1. +

    Click on Finish

    +
  2. +
  3. +

    The entire ionic structure will be auto generated.

    +
  4. +
+
+
+

image47

+
+
+
    +
  1. +

    Change the server url (with correct serve url) in environment.ts, environment.prod.ts and environment.android.ts files (i.e: itapoc\devon4ng-ionic-application-template\src\environments\).

    +
  2. +
+
+
+

The angular.json file inside the project has already a build configuration for android.

+
+
+

image48

+
+
+
    +
  1. +

    Run npm install in the root folder to download the dependecies

    +
  2. +
  3. +

    Run ionic serve

    +
  4. +
+
+
+

image49

+
+
+
    +
  1. +

    +
    +

    Once the execution is successful

    +
    +
  2. +
+
+
+

image50

+
+
+
    +
  • +

    Mobile App DONE*

    +
  • +
+
+
+

So: well done

+
+
+

Starting from an Entity class you’ve successfully generated the Back-End layer (REST, SOAP, DTO, Spring services, Hibernate DAO), the Angular Web App and the Ionic mobile App!

+
+
+

image51

+
+
+Build APK +
+

Since We’re going to create apk remember the following pre-conditions:

+
+
+ +
+
+
    +
  1. +

    Now, open cmd and type the path where your devon4ng-ionic-application-template project is present.

    +
  2. +
  3. +

    Run the following commands:

    +
    +
      +
    1. +

      npx cap init

      +
    2. +
    3. +

      ionic build --configuration=android

      +
    4. +
    5. +

      npx cap add android

      +
    6. +
    7. +

      npx cap copy

      +
    8. +
    9. +

      npx cap open android

      +
    10. +
    +
    +
  4. +
  5. +

    Build the APK using Android studio.

    +
  6. +
+
+
+

image52 +image53 +image54 +image55

+
+
+

You can find your apk file in

+
+
+

/devon4ng-ionic-application-template/android/app/build/outputs/apk/debug

+
+
+
+
+
+
+
Adapt CobiGen_Templates
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click Ok:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+ +
+

==Adapt Templates from CobiGen

+
+
+
+
Adapt CobiGen_Templates
+
+

After following this tutorial, you will have the CobiGen_Templates downloaded on your local machine. To import these templates you need to do the following:

+
+
+

Right click in any part of the package explorer, then click on CobiGen → Adapt templates

+
+
+

image56

+
+
+

Click OK:

+
+
+

image57

+
+
+

Now the CobiGen_Templates project will be automatically imported into your workspace, as shown on the image below:

+
+
+

image58

+
+
+

image59

+
+
+

Now you just need to change the Java version of the project to JRE 1.8. Right click on the JRE system library, and then on Properties:

+
+
+

image60

+
+
+

Now change the version to Java 1.8 +image61

+
+
+

Now you have successfully imported the CobiGen templates. If you want to edit them, you will find them in the folder src/main/templates. For instance, the Java templates are located here:

+
+
+

image62

+
+
+

Now you can adapt the templates as much as you want. Documentation about this can be found on:

+
+
+
+
https://github.com/devonfw/tools-cobigen/wiki/Guide-to-the-Reader
+
+
+ +
+

==Enable Composite Primary Keys in Entity

+
+
+

In order to enable Composite Primary Keys in entity in CobiGen, the below approach is suggested

+
+
+

The templates in CobiGen have been enhanced to support Composite primary keys while still supporting the default devonfw/Cobigen values with Long id.

+
+
+

Also, the current generation from Entity still holds good - right click from an Entity object, CobiGen → Generate will show the CobiGen wizard relative to the entity generation.

+
+
+

After generating, below example shows how composite primary keys can be enabled.

+
+
+
+
@Entity
+@Table(name = "employee")
+public class EmployeeEntity {
+	private CompositeEmployeeKey id;
+	private String name;
+	private String lastName;
+	@Override
+	@EmbeddedId
+	public CompositeEmployeeKey getId() {
+		return id;
+	}
+	@Override
+	public void setId(CompositeEmployeeKey id) {
+		this.id = id;
+	}
+	.
+	.
+	.
+
+
+
+
+
public class CompositeEmployeeKey implements Serializable {
+  private String companyId;
+  private String employeeId;
+
+
+
+

Once the generation is complete, implement PersistenceEntity<ID>.java in the EmployeeEntity and pass the composite primary key object which is CompositeEmployeeKey in this case as the parameter ID.

+
+
+
+
import com.devonfw.module.basic.common.api.entity.PersistenceEntity;
+@Entity
+@Table(name = "employee")
+public class EmployeeEntity implements PersistenceEntity<CompositeEmployeeKey> {
+	private CompositeEmployeeKey id;
+	private String name;
+	private String lastName;
+
+
+
+

Also, the modificationCounter methods needs to be implemented from the interface PersistenceEntity<ID>. The sample implementation of the modification counter can be referred below.

+
+
+
+
@Override
+  public int getModificationCounter() {
+    if (this.persistentEntity != null) {
+      // JPA implementations will update modification counter only after the transaction has been committed.
+      // Conversion will typically happen before and would result in the wrong (old) modification counter.
+      // Therefore we update the modification counter here (that has to be called before serialization takes
+      // place).
+      this.modificationCounter = this.persistentEntity.getModificationCounter();
+    }
+    return this.modificationCounter;
+  }
+  @Override
+  public void setModificationCounter(int version) {
+    this.modificationCounter = version;
+  }
+
+
+
+
+
+

26.7. Template Development

+ +
+
+

26.8. MrChecker - devonfw testing tool

+ +
+
+

26.9. Who Is MrChecker

+ +
+
Who is MrChecker?
+
+

MrChecker Test Framework is an end to end test automation framework which is written in Java. +It is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security, native mobile apps and, in the near future, databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+
+
Where does MrChecker apply?
+
+

The aim of MrChecker is to achieve standardize way to build BlackBox tests. It provides the possibility to have one common software standard in order to build Component, Integration and System tests.

+
+
+

A Test Engineer does not have access to the application source code in order to perform BlackBox tests, but they are able to attach their tests to any application interfaces, such as - IP address - Domain Name - communication protocol - Command Line Interface.

+
+
+
+
MrChecker specification:
+
+
    +
  • +

    Responsive Web Design application: Selenium Browser

    +
  • +
  • +

    REST/SOAP: RestAssure

    +
  • +
  • +

    Service Virtualization: Wiremock

    +
  • +
  • +

    Database: JDBC drivers for SQL

    +
  • +
  • +

    Security: RestAssure + RestAssure Security lib

    +
  • +
  • +

    Standalone Java application: SWING

    +
  • +
  • +

    Native mobile application for Android: Appium

    +
  • +
+
+
+
+
Benefits
+
+

Every customer may benefit from using MrChecker Test Framework. The main profits for your project are:

+
+
+
    +
  • +

    Resilient and robust building and validation process

    +
  • +
  • +

    Quality gates shifted closer to the software development process

    +
  • +
  • +

    Team quality awareness increase - including Unit Tests, Static Analysis, Security Tests, Performance in the testing process

    +
  • +
  • +

    Test execution environment transparent to any infrastructure

    +
  • +
  • +

    Touch base with the Cloud solution

    +
  • +
  • +

    Faster Quality and DevOps-driven delivery

    +
  • +
  • +

    Proven frameworks, technologies and processes.

    +
  • +
+
+ +
+
+
Test stages
+ +
+
+
Unit test
+
+

A module is the smallest compilable unit of source code. It is often too small to be tested by the functional tests (black-box tests). However, it is the appropriate candidate for white-box testing. White-box tests have to be performed as the first static tests (e.g. Lint and inspections), followed by dynamic tests in order to check boundaries, branches and paths. Usually, this kind of testing would require enabling stubs and special test tools.

+
+
+
+
Component test
+
+

This is the black-box test of modules or groups of modules which represent certain functionalities. There are no rules about what could be called a component. Whatever a tester defines as a component, should make sense and be a testable unit. Components can be integrated into bigger components step by step and tested as such.

+
+
+
+
Integration test
+
+

Functions are tested by feeding them input and examining the output, and internal program structure is rarely considered. The software is completed step by step and tested by tests covering a collaboration between modules or classes. The integration depends on the kind of system. For example, the steps could be as follows: run the operating system first and gradually add one component after another, then check if the black-box tests are still running (the test cases will be extended together with every added component). The integration is done in the laboratory. It may be also completed by using simulators or emulators. Additionally, the input signals could be stimulated.

+
+
+
+
Software / System test
+
+

System testing is a type of testing conducted on a complete integrated system to evaluate the system’s compliance with its specified requirements. This is a type of black-box testing of the complete software in the target system. The most important factor in successful system testing is that the environmental conditions for the software have to be as realistic as possible (complete original hardware in the destination environment).

+
+
+
+
+

26.10. Test Framework Modules

+
+

In this section, it is possible to find all the information regarding the main modules of MrChecker:

+
+
+
+

26.11. Core Test Module

+ +
+
Core Test Module
+ +
+
+
What is Core Test Module
+
+
+image1 new +
+
+
+ +
+
How to start?
+ +
+
+
Allure Logger → BFLogger
+
+

In Allure E2E Test Framework you have ability to use and log any additional information crucial for:

+
+
+
    +
  • +

    test steps

    +
  • +
  • +

    test exection

    +
  • +
  • +

    page object actions, and many more.

    +
  • +
+
+
+
+
Where to find saved logs
+
+

Every logged information is saved in a separate test file, as a result of parallel tests execution.

+
+
+

The places they are saved:

+
+
+
    +
  1. +

    In test folder C:\Allure_Test_Framework\allure-app-under-test\logs

    +
  2. +
  3. +

    In every Allure Test report, logs are always embedded as an attachment, according to test run.

    +
  4. +
+
+
+
+
How to use logger:
+
+
    +
  • +

    Start typing

    +
    +

    BFLogger

    +
    +
  • +
  • +

    Then type . (dot)

    +
  • +
+
+
+
+
Type of logger:
+
+
    +
  • +

    BFLogger.logInfo("Your text") - used for test steps

    +
  • +
  • +

    BFLogger.logDebug("Your text") - used for non official information, either during test build process or in Page Object files

    +
  • +
  • +

    BFLogger.logError("Your text") - used to emphasize critical information

    +
  • +
+
+
+
+image13 +
+
+
+

Console output:

+
+
+
+image14 +
+
+
+
+
Allure Reports
+
+
+image15 +
+
+
+

Allure is a tool designed for test reports.

+
+
+
+
Generate report - command line
+
+

You can generate a report using one of the following commands:

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:serve -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:serve -Dtest=TS_Tag1
+
+
+
+

A report will be generated into temp folder. Web server with results will start. You can additionally configure the server timeout. The default value is "3600" (one hour).

+
+
+

System property allure.serve.timeout.

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:report -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:report -Dtest=TS_Tag1
+
+
+
+

A report will be generated tо directory: target/site/allure-maven/index.html

+
+
+

NOTE: Please open index.html file under Firefox. Chrome has some limitations to presenting dynamic content. If you want to open a report with a Chromium based Web Browser, you need to launch it first with --allow-file-access-from-files argument.

+
+
+
+
Generate report - Eclipse
+
+

A report is created here allure-app-under-test\target\site\allure-report\index.html

+
+
+

NOTE: Please open index.html file under Firefox. Chrome has some limitations to presenting dynamic content. If you want to open a report with a Chromium based Web Browser, you need to launch it first with --allow-file-access-from-files argument.

+
+
+
+image17 +
+
+
+
+image18 +
+
+
+
+
Generate report - Jenkins
+
+

In our case, we’ll use the Allure Jenkins plugin. When integrating Allure in a Jenkins job configuration, we’ll have direct access to the build’s test report.

+
+
+
+image19 +
+
+
+

There are several ways to access the Allure Test Reports:

+
+
+
    +
  • +

    Using the "Allure Report" button on the left navigation bar or center of the general job overview

    +
  • +
  • +

    Using the "Allure Report" button on the left navigation bar or center of a specific build overview

    +
  • +
+
+
+

Afterwards you’ll be greeted with either the general Allure Dashboard (showing the newest build) or the Allure Dashboard for a specific (older) build.

+
+
+
+
Allure dashboard
+
+
+image20 +
+
+
+

The Dashboard provides a graphical overview on how many test cases were successful, failed or broken.

+
+
+
    +
  • +

    Passed means, that the test case was executed successfully.

    +
  • +
  • +

    Broken means, that there were mistakes, usually inside of the test method or test class. As tests are being treated as code, broken code has to be expected, resulting in occasionally broken test results.

    +
  • +
  • +

    Failed means that an assertion failed.

    +
  • +
+
+
+
+
Defects
+
+

The defects tab lists out all the defects that occurred, and also descriptions thereof. Clicking on a list item displays the test case which resulted in an error. Clicking on a test case allows the user to have a look at the test case steps, as well as Log files or Screenshots of the failure.

+
+
+
+
Graph
+
+

The graph page includes a pie chart of all tests, showing their result status (failed, passed, etc.). Another graph allows insight into the time elapsed during the tests. This is a very useful information to find and eliminate possible bottlenecks in test implementations.

+
+
+
+image21 +
+
+
+
+
Why join Test Cases in groups - Test Suites
+
+
+image22 +
+
+
+
+
Regresion Suite:
+
+

Regression testing is a type of software testing which verifies that software which was previously developed and tested still performs the same way after it was changed or interfaced with another software.

+
+
+
    +
  • +

    Smoke

    +
  • +
  • +

    Business vital functionalities

    +
  • +
  • +

    Full scope of test cases

    +
  • +
+
+
+
+
Functional Suite:
+
+
    +
  • +

    Smoke

    +
  • +
  • +

    Business function A

    +
  • +
  • +

    Business function B

    +
  • +
+
+
+
+
Single Responsibility Unit:
+
+
    +
  • +

    Single page

    +
  • +
  • +

    Specific test case

    +
  • +
+
+
+
+
How to build a Test Suite based on tags
+ +
+
+
Structure of the Test Suite
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+image23 new +
+
+
+

Where:

+
+
+
    +
  • +

    @RunWith(JUnitPlatform.class) - use Junit5 runner

    +
  • +
  • +

    @IncludeTags({"TestsTag1"}) - search all test files with the tag "TestsTag1"

    +
  • +
  • +

    @ExcludeTags({"TagToExclude"}) - exclude test files with the tag "TagToExclude"

    +
  • +
  • +

    @SelectPackages("com.capgemini.mrchecker.core.groupTestCases.testCases") - search only test files in "com.capgemini.mrchecker.core.groupTestCases.testCases" package

    +
  • +
  • +

    public class TS_Tag1 - the name of the Test Suite is "TS_Tag1"

    +
  • +
+
+
+

Most commonly used filters to build a Test Suite are ones using:

+
+
+
    +
  • +

    @IncludeTags({ })

    +
  • +
  • +

    @ExcludeTags({ })

    +
  • +
+
+
+

Example:

+
+
+
    +
  1. +

    @IncludeTags({ "TestsTag1" }) , @ExcludeTags({ }) → will execute all test cases with the tag TestsTag1

    +
  2. +
  3. +

    @IncludeTags({ "TestsTag1" }) , @ExcludeTags({ "SlowTest" }) → will execute all test cases with tag "TestsTag1" although it will exclude from this list the test cases with the tag "SlowTest"

    +
  4. +
  5. +

    @IncludeTags({ }) , @ExcludeTags({ "SlowTest" }) → It will exclude test cases with the tag "SlowTest"

    +
  6. +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+image23 +
+
+
+

Where:

+
+
+
    +
  • +

    @RunWith(WildcardPatternSuiteBF.class) - search for test files under /src/test/java

    +
  • +
  • +

    @IncludeCategories({ TestsTag1.class }) - search for all test files with the tag "TestsTag1.class"

    +
  • +
  • +

    @ExcludeCategories({ }) - exclude test files. In this example, there is no exclusion

    +
  • +
  • +

    @SuiteClasses({ "**/*Test.class" }) - search only test files, where the file name ends with "<anyChar/s>Test.class"

    +
  • +
  • +

    public class TS_Tag1 - the name of the Test Suite is "TS_Tag1"

    +
  • +
+
+
+

Most commonly used filters to build Test Suite are ones using:

+
+
+
    +
  • +

    @IncludeCategories({ })

    +
  • +
  • +

    @ExcludeCategories({ })

    +
  • +
+
+
+

Example:

+
+
+
    +
  1. +

    @IncludeCategories({ TestsTag1.class }) , @ExcludeCategories({ }) → will execute all test cases with the tag TestsTag1.class

    +
  2. +
  3. +

    @IncludeCategories({ TestsTag1.class }) , @ExcludeCategories({ SlowTest.class }) → will execute all test cases with the tag "TestsTag1.class" although it will exclude from this list the test cases with the tag "SlowTest.class"

    +
  4. +
  5. +

    @IncludeCategories({ }) , @ExcludeCategories({ SlowTest.class }) → will execute all test cases from /src/test/java, although it will exclude from this list the test cases with the tag "SlowTest.class"

    +
  6. +
+
+
+
+
Structure of Test Case
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+image24 new +
+
+
+

Where:

+
+
+
    +
  • +

    @TestsTag1, @TestsSmoke, @TestsSelenium - list of tags assigned to this test case - "TestsTag1, TestsSmoke, TestSelenium" annotations

    +
  • +
  • +

    public class FristTest_tag1_Test - the name of the test case is "FristTest_tag1_Test"

    +
  • +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+image24 +
+
+
+

Where:

+
+
+
    +
  • +

    @Category({ TestsTag1.class, TestsSmoke.class, TestSelenium.class }) - list of tags / categories assigned to this test case - "TestsTag1.class, TestsSmoke.class, TestSelenium.class"

    +
  • +
  • +

    public class FristTest_tag1_Test - the name of the test case is "FristTest_tag1_Test"

    +
  • +
+
+
+
+
Structure of Tags / Categories
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+

Tag name: TestsTag1 annotation

+
+
+
+image25 new +
+
+
+

Tag name: TestsSmoke annotation

+
+
+
+image26 new +
+
+
+

Tag name: TestSelenium annotation

+
+
+
+image27 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+

Tag name: TestsTag1.class

+
+
+
+image25 +
+
+
+

Tag name: TestsSmoke.class

+
+
+
+image26 +
+
+
+

Tag name: TestSelenium.class

+
+
+
+image27 +
+
+
+
+
How to run Test Suite
+
+

To run a Test Suite you perform the same steps as you do to run a test case

+
+
+

Command line

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+

JUnit5 disallows running suite classes from maven. Use -Dgroups=Tag1,Tag2 and -DexcludeGroups=Tag4,Tag5 to create test suites in maven.

+
+
+
+
mvn test site -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test site -Dtest=TS_Tag1
+
+
+
+

Eclipse

+
+
+
+image28 +
+
+
+
+
Data driven approach
+
+

Data driven approach - External data driven

+
+
+

External data driven - Data as external file injected in test case

+
+
+

Test case - Categorize functionality and severity

+
+
+

You can find more information about data driven here and here

+
+
+

There are a few ways to define parameters for tests.

+
+
+
+
Internal Data driven approach
+
+

Data as part of test case

+
+
+

The different means to pass in parameters are shown below.

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

Static methods are used to provide the parameters.

+
+
+
+
A method in the test class:
+
+
+
@ParameterizedTest
+@MethodSource("argumentsStream")
+
+
+
+

OR

+
+
+
+
@ParameterizedTest
+@MethodSource("arrayStream")
+
+
+
+

In the first case the arguments are directly mapped to the test method parameters. In the second case the array is passed as the argument.

+
+
+
+image30 new +
+
+
+
+
A method in a different class:
+
+
+
@ParameterizedTest
+@MethodSource("com.capgemini.mrchecker.core.datadriven.MyContainsTestProvider#provideContainsTrueParameters")
+
+
+
+
+image32 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

Parameters that are passed into tests using the @Parameters annotation must be _Object[]_s

+
+
+
+
In the annotation:
+
+
+
@Parameters({"1, 2, 3", "3, 4, 7", "5, 6, 11", "7, 8, 15"})
+
+
+
+
+image30 +
+
+
+

The parameters must be primitive objects such as integers, strings, or booleans. Each set of parameters is contained within a single string and will be parsed to their correct values as defined by the test method’s signature.

+
+
+
+
In a method named in the annotation:
+
+
+
@Parameters(method = "addParameters")
+
+
+
+
+image31 +
+
+
+

A separate method can be defined and referred to for parameters. This method must return an Object[] and can contain normal objects.

+
+
+
+
In a class:
+
+
+
@Parameters(source = MyContainsTestProvider.class)
+
+
+
+
+image32 +
+
+
+

A separate class can be used to define parameters for the test. This test must contain at least one static method that returns an Object[], and its name must be prefixed with provide. The class could also contain multiple methods that provide parameters to the test, as long as they also meet the required criteria.

+
+
+
+
External Data Driven
+
+

Data as external file injected in test case

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

Tests use the annotation @CsvFileSource to inject CSVs file.

+
+
+
+
@CsvFileSource(resources = "/datadriven/test.csv", numLinesToSkip = 1)
+
+
+
+

A CSV can also be used to contain the parameters for the tests. It is pretty simple to set up, as it’s just a comma-separated list.

+
+
+
+
Classic CSV
+
+
+image33 new +
+
+
+

and CSV file structure

+
+
+
+image34 +
+
+
+
+
CSV with headers
+
+
+image35 new +
+
+
+

and CSV file structure

+
+
+
+image36 +
+
+
+
+
CSV with specific column mapper
+
+
+image37 new +
+
+
+

and Mapper implementation

+
+
+
+image38 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

Tests use the annotation @FileParameters to inject CSVs file.

+
+
+
+
@FileParameters("src/test/resources/datadriven/test.csv")
+
+
+
+

A CSV can also be used to contain the parameters for the tests. It is pretty simple to set up, as it’s just a comma-separated list.

+
+
+
+
Classic CSV
+
+
+image33 +
+
+
+

and CSV file structure

+
+
+
+image34 +
+
+
+
+
CSV with headers
+
+
+image35 +
+
+
+

and CSV file structure

+
+
+
+image36 +
+
+
+
+
CSV with specific column mapper
+
+
+image37 +
+
+
+

and Mapper implementation

+
+
+
+image38 +
+
+
+
+
What is "Parallel test execution" ?
+
+

Parallel test execution means many "Test Classes" can run simultaneously.

+
+
+

"Test Class", as this is a Junit Test class, it can have one or more test cases - "Test case methods"

+
+
+
+image39 +
+
+
+
+
How many parallel test classes can run simultaneously?
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

JUnit5 supports parallelism natively. The feature is configured using a property file located at src\test\resources\junit-platform.properties. +As per default configuration, concurrent test execution is set to run test classes in parallel using the thread count equal to a number of your CPUs.

+
+
+
+image39a +
+
+
+

Visit JUnit5 site to learn more about parallel test execution.

+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

By default, number of parallel test classes is set to 8.

+
+
+

It can be updated as you please, on demand, by command line:

+
+
+
+
mvn test site -Dtest=TS_Tag1 -Dthread.count=16
+
+
+
+

-Dthread.count=16 - increase number of parallel Test Class execution to 16.

+
+
+
+
Overview
+
+

Cucumber / Selenium

+
+
+

Business and IT don’t always understand each other. Very often misunderstandings between business and IT result in the costly failure of IT projects. With this in mind, Cucumber was developed as a tool to support human collaboration between business and IT.

+
+
+

Cucumber uses executable specifications to encourage a close collaboration. This helps teams to keep the business goal in mind at all times. With Cucumber you can merge specification and test documentation into one cohesive whole, allowing your team to maintain one single source of truth. Because these executable specifications are automatically tested by Cucumber, your single source of truth is always up-to-date.

+
+
+
+image40 +
+
+
+

Cucumber supports testers when designing test cases. To automate these test cases, several languages can be used. Cucumber also works well with Browser Automation tools such as Selenium Webdriver.

+
+
+
+
== Selenium
+
+

Selenium automates browsers and is used for automating web applications for testing purposes. Selenium offers testers and developers full access to the properties of objects and the underlying tests, via a scripting environment and integrated debugging options.

+
+
+

Selenium consists of many parts. If you want to create robust, browser-based regression automation suites and tests, Selenium Webdriver is most appropriate. With Selenium Webdriver you can also scale and distribute scripts across many environments.

+
+
+
+
Strengths
+ +
+
+
== Supports BDD
+
+

Those familiar with Behavior Driven Development (BDD) recognize Cucumber as an excellent open source tool that supports this practice.

+
+
+
+
== All in one place
+
+

With Cucumber / Selenium you can automate at the UI level. Automation at the unit or API level can also be implemented using Cucumber. This means all tests, regardless of the level at which they are implemented, can be implemented in one tool.

+
+
+
+
== Maintainable test scripts
+
+

Many teams seem to prefer UI level automation, despite huge cost of maintaining UI level tests compared to the cost of maintaining API or unit tests. To lessen the maintenance of UI testing, when designing UI level functional tests, you can try describing the test and the automation at three levels: business rule, UI workflow, technical implementation.

+
+
+

When using Cucumber combined with Selenium, you can implement these three levels for better maintenance.

+
+
+
+
== Early start
+
+

Executable specifications can and should be written before the functionality is implemented. By starting early, teams get most return on investment from their test automation.

+
+
+
+
== Supported by a large community
+
+

Cucumber and Selenium are both open source tools with a large community, online resources and mailing lists.

+
+
+
+
How to run cucumber tests in Mr.Checker
+ +
+
+
Command line / Jenkins
+
+
    +
  • +

    Run cucumber tests and generate Allure report. Please use this for Jenkins execution. Report is saved under ./target/site.

    +
    +
    +
    mvn clean -P cucumber test site
    +
    +
    +
  • +
  • +

    Run and generate report

    +
    +
    +
    mvn clean -P cucumber test site allure:report
    +
    +
    +
  • +
  • +

    Run cucumber tests, generate Allure report and start standalone report server

    +
    +
    +
    mvn clean -P cucumber test site allure:serve
    +
    +
    +
  • +
+
+
+
+
Eclipse IDE
+
+
+image41 +
+
+
+
+
Tooling
+ +
+
+
== Cucumber
+
+

Cucumber supports over a dozen different software platforms. Every Cucumber implementation provides the same overall functionality, but they also have their own installation procedure and platform-specific functionality. See https://cucumber.io/docs for all Cucumber implementations and framework implementations.

+
+
+

Also, IDEs such as Intellij offer several plugins for Cucumber support.

+
+
+
+
== Selenium
+
+

Selenium has the support of some of the largest browser vendors who have taken (or are taking) steps to make Selenium a native part of their browser. It is also the core technology in countless other browser automation tools, APIs and frameworks.

+
+
+
+
Automation process
+ +
+
+
== Write a feature file
+
+

Test automation in Cucumber starts with writing a feature file. A feature normally consists of several (test)scenarios and each scenario consists of several steps.

+
+
+

Feature: Refund item

+
+
+

Scenario: Jeff returns a faulty microwave

+
+
+

Given Jeff has bought a microwave for $100

+
+
+

And he has a receipt

+
+
+

When he returns the microwave

+
+
+

Then Jeff should be refunded $100

+
+
+

Above example shows a feature “Refund item” with one scenario “Jeff returns a faulty microwave”. The scenario consists of four steps each starting with a key word (Given, And, When, Then).

+
+
+
+
== Implementing the steps
+
+

Next the steps are implemented. Assuming we use Java to implement the steps, the Java code will look something like this.

+
+
+
+
public class MyStepdefs \{
+
+	@Given("Jeff has bought a microwave for $(\d+)")
+
+	public void Jeff_has_bought_a_microwave_for(int amount) \{
+
+		// implementation can be plain java
+
+		// or selenium
+
+		driver.findElement(By.name("test")).sendKeys("This is an example\n");
+
+		driver.findElement(By.name("button")).click();// etc
+	}
+}
+
+
+
+

Cucumber uses an annotation (highlighted) to match the step from the feature file with the function implementing the step in the Java class. The name of the class and the function can be as the developer sees fit. Selenium code can be used within the function to automate interaction with the browser.

+
+
+
+
== Running scenarios
+
+

There are several ways to run scenarios with Cucumber, for example the JUnit runner, a command line runner and several third party runners.

+
+
+
+
== Reporting test results
+
+

Cucumber can report results in several different formats, using formatter plugins

+
+
+
+
Features
+ +
+
+
== Feature files using Gherkin
+
+

Cucumber executes your feature files. As shown in the example below, feature files in Gherkin are easy to read so they can be shared between IT and business. Data tables can be used to execute a scenario with different inputs.

+
+
+
+image42 +
+
+
+
+
== Organizing tests
+
+

Feature files are placed in a directory structure and together form a feature tree.

+
+
+

Tags can be used to group features based on all kinds of categories. Cucumber can include or exclude tests with certain tags when running the tests.

+
+
+
+
Reporting test results
+
+

Cucumber can report results in several formats, using formatter plugins. +Not supported option by Shared Services: The output from Cucumber can be used to present test results in Jenkins or Hudson depending of the preference of the project.

+
+
+
+image43 +
+
+
+
+
HOW IS Cucumber / Selenium USED AT Capgemini?
+ +
+
+
Tool deployment
+
+

Cucumber and Selenium are chosen as one of Capgemini’s test automation industrial tools. We support the Java implementation of Cucumber and Selenium Webdriver. We can help with creating Cucumber, Selenium projects in Eclipse and IntelliJ.

+
+
+
+
Application in ATaaS (Automated Testing as a Service)
+
+

In the context of industrialisation, Capgemini has developed a range of services to assist and support the projects in process and tools implementation.

+
+
+

In this context a team of experts assists projects using test automation.

+
+
+

The main services provided by the center of expertise are:

+
+
+
    +
  • +

    Advise on the feasibility of automation.

    +
  • +
  • +

    Support with installation.

    +
  • +
  • +

    Coaching teams in the use of BDD.

    +
  • +
+
+
+
+
Run on independent Operation Systems
+
+

As E2E Allure test framework is build on top of:

+
+
+
    +
  • +

    Java 1.8

    +
  • +
  • +

    Maven 3.3

    +
  • +
+
+
+

This guarantees portability to all operating systems.

+
+
+

E2E Allure test framework can run on OS:

+
+
+
    +
  • +

    Windows,

    +
  • +
  • +

    Linux and

    +
  • +
  • +

    Mac.

    +
  • +
+
+
+

Test creation and maintenance in E2E Allure test framework can be done with any type of IDE:

+
+
+
    +
  • +

    Eclipse,

    +
  • +
  • +

    IntelliJ,

    +
  • +
  • +

    WebStorm,

    +
  • +
  • +

    Visual Studio Code,

    +
  • +
  • +

    many more that support Java + Maven.

    +
  • +
+
+
+
+
System under test environments
+
+
+image44 +
+
+
+
    +
  • +

    Quality assurance or QA is a way of preventing mistakes or defects in manufactured products and avoiding problems when delivering solutions or services to customers; which ISO 9000 defines as "part of quality management focused on providing confidence that quality requirements will be fulfilled".

    +
  • +
  • +

    System integration testing or SIT is a high-level software testing process in which testers verify that all related systems maintain data integrity and can operate in coordination with other systems in the same environment. The testing process ensures that all sub-components are integrated successfully to provide expected results.

    +
  • +
  • +

    Development or Dev testing is performed by the software developer or engineer during the construction phase of the software development life-cycle. Rather than replace traditional QA focuses, it augments it. Development testing aims to eliminate construction errors before code is promoted to QA; this strategy is intended to increase the quality of the resulting software as well as the efficiency of the overall development and QA process.

    +
  • +
  • +

    Prod If the customer accepts the product, it is deployed to a production environment, making it available to all users of the system.

    +
  • +
+
+
+
+image45 +
+
+
+
+
How to use system environment
+
+

In Page classes, when you load / start web, it is uncommon to save fixed main url.

+
+
+

Value flexibility is a must, when your web application under test, have different main url, dependence on environmnent (DEV, QA, SIT, …​, PROD)

+
+
+

Instead of hard coded main url variable, you build your Page classe with dynamic variable.

+
+
+

Example of dynamic variable GetEnvironmentParam.WWW_FONT_URL

+
+
+
+image46 +
+
+
+
+
How to create / update system environment
+ +
+
+
External file with variable values
+
+

Dynamic variable values are stored under path mrchecker-app-under-test\src\resources\enviroments\environments.csv.

+
+
+

NOTE: As environments.csv is Comma-separated file, please be aware of any edition and then save it under Excel.

+
+
+
+image47 +
+
+
+
+
Encrypting sensitive data
+
+

Some types of data you might want to store as environment settings are sensitive in nature (e.g. passwords). You might not want to store them (at least not in their plaintext form) in your repository. To be able to encrypt sensitive data you need to do following:

+
+
+
    +
  1. +

    Create a secret (long, random chain of characters) and store it under mrchecker-app-under-test\src\resources\secretData.txt. Example: LhwbTm9V3FUbBO5Tt5PiTUEQrXGgWrDLCMthnzLKNy1zA5FVTFiTdHRQAyPRIGXmsAjPUPlJSoSLeSBM

    +
  2. +
  3. +

    Exclude the file from being checked into the git repository by adding it to git.ignore. You will need to pass the file over a different channel among your teammates.

    +
  4. +
  5. +

    Encrypt the values before putting them into the environments.csv file by creating following script (put the script where your jasypt library resides, e.g. C:\MrChecker_Test_Framework\m2\repository\org\jasypt\jasypt\1.9.2):

    +
    +
    +
    @ECHO OFF
    +
    +set SCRIPT_NAME=encrypt.bat
    +set EXECUTABLE_CLASS=org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI
    +set EXEC_CLASSPATH=jasypt-1.9.2.jar
    +if "%JASYPT_CLASSPATH%" ==  "" goto computeclasspath
    +set EXEC_CLASSPATH=%EXEC_CLASSPATH%;%JASYPT_CLASSPATH%
    +
    +:computeclasspath
    +IF "%OS%" ==  "Windows_NT" setlocal ENABLEDELAYEDEXPANSION
    +FOR %%c in (%~dp0..\lib\*.jar) DO set EXEC_CLASSPATH=!EXEC_CLASSPATH!;%%c
    +IF "%OS%" ==  "Windows_NT" setlocal DISABLEDELAYEDEXPANSION
    +
    +set JAVA_EXECUTABLE=java
    +if "%JAVA_HOME%" ==  "" goto execute
    +set JAVA_EXECUTABLE="%JAVA_HOME%\bin\java"
    +
    +:execute
    +%JAVA_EXECUTABLE% -classpath %EXEC_CLASSPATH% %EXECUTABLE_CLASS% %SCRIPT_NAME% %*
    +
    +
    +
  6. +
  7. +

    Encrypt the values by calling

    +
    +
    +
    .\encrypt.bat input=someinput password=secret
    +
    +----ENVIRONMENT-----------------
    +
    +Runtime: Oracle Corporation Java HotSpot(TM) 64-Bit Server VM 25.111-b14
    +
    +
    +
    +----ARGUMENTS-------------------
    +
    +input: someinput
    +password: secret
    +
    +
    +
    +----OUTPUT----------------------
    +
    +JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD
    +
    +
    +
  8. +
  9. +

    Mark the value as encrypted by adding a prefix 'ENC(' and suffix ')' like: ENC(JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD)

    +
    +
    +image48 +
    +
    +
  10. +
+
+
+
+
Bridge between external file nad Page class
+
+

To map values from external file with Page class you ought to use class GetEnvironmentParam.

+
+
+

Therefore when you add new variable (row) in environments.csv you might need to add this variable to GetEnvironmentParam.

+
+
+
+image49 +
+
+
+
+
Run test case with system environment
+
+

To run test case with system environment, please use:

+
+
+
    +
  • +

    -Denv=<NameOfEnvironment>

    +
  • +
  • +

    <NameOfEnvironment> is taken as column name from file mrchecker-app-under-test\src\test\resources\enviroments\environments.csv

    +
  • +
+
+
+
+
Command Line
+
+
+
mvn test site -Dtest=RegistryPageTest -Denv=DEV
+
+
+
+
+
Eclipse
+
+
+image50 +
+
+
+
+image51 +
+
+ +
+
+
System under test environments
+
+
+image080 +
+
+
+
    +
  • +

    Quality assurance or QA is a way of preventing mistakes or defects in the manufactured products and avoiding problems when delivering solutions or services to customers which ISO 9000 defines as "part of quality management focused on providing confidence that quality requirements will be fulfilled".

    +
  • +
  • +

    System integration testing or SIT is a high-level software testing process in which testers verify that all related systems maintain data integrity and can operate in coordination with other systems in the same environment. The testing process ensures that all sub-components are integrated successfully to provide expected results.

    +
  • +
  • +

    Development or Dev testing is performed by the software developer or engineer during the construction phase of the software development life-cycle. Rather than replace traditional QA focuses, it augments it. Development testing aims to eliminate construction errors before code is promoted to QA; this strategy is intended to increase the quality of the resulting software as well as the efficiency of the overall development and QA process.

    +
  • +
  • +

    Prod If the customer accepts the product, it is deployed to a production environment, making it available to all users of the system.

    +
  • +
+
+
+
+image051 +
+
+
+
+
How to use system environment
+
+

In Page classes, when you load / start web, it is uncommon to save fixed main url.

+
+
+

Value flexibility is a must, when your web application under test has different main url, depending on the environmnent (DEV, QA, SIT, …​, PROD)

+
+
+

Instead of hard coded main url variable, you build your Page classes with dynamic variable.

+
+
+

An example of dynamic variable GetEnvironmentParam.WWW_FONT_URL

+
+
+
+image081 +
+
+
+
+
How to create / update system environment
+ +
+
+
External file with variable values
+
+

Dynamic variable values are stored under mrchecker-app-under-test\src\resources\enviroments\environments.csv.

+
+
+

NOTE: As environments.csv is a comma-separated file, please be careful while editing and then save it under Excel.

+
+
+
+image082 +
+
+
+
+
Encrypting sensitive data
+
+

Some types of data you might want to store as environment settings are sensitive in nature (e.g. passwords). You might not want to store them (at least not in their plaintext form) in your repository. To be able to encrypt sensitive data you need to do following:

+
+
+
    +
  1. +

    Create a secret (long, random chain of characters) and store it under mrchecker-app-under-test\src\resources\secretData.txt. Example: LhwbTm9V3FUbBO5Tt5PiTUEQrXGgWrDLCMthnzLKNy1zA5FVTFiTdHRQAyPRIGXmsAjPUPlJSoSLeSBM

    +
  2. +
  3. +

    Exclude the file from being checked into the git repository by adding it to git.ignore. You will need to pass the file over a different channel among your teammates.

    +
  4. +
  5. +

    Encrypt the values before putting them into the environments.csv file by creating following script (put the script where your jasypt library resides, e.g. C:\MrChecker_Test_Framework\m2\repository\org\jasypt\jasypt\1.9.2):

    +
  6. +
+
+
+
+
@ECHO OFF
+
+set SCRIPT_NAME=encrypt.bat
+set EXECUTABLE_CLASS=org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI
+set EXEC_CLASSPATH=jasypt-1.9.2.jar
+if "%JASYPT_CLASSPATH%" ==  "" goto computeclasspath
+set EXEC_CLASSPATH=%EXEC_CLASSPATH%;%JASYPT_CLASSPATH%
+
+:computeclasspath
+IF "%OS%" ==  "Windows_NT" setlocal ENABLEDELAYEDEXPANSION
+FOR %%c in (%~dp0..\lib\*.jar) DO set EXEC_CLASSPATH=!EXEC_CLASSPATH!;%%c
+IF "%OS%" ==  "Windows_NT" setlocal DISABLEDELAYEDEXPANSION
+
+set JAVA_EXECUTABLE=java
+if "%JAVA_HOME%" ==  "" goto execute
+set JAVA_EXECUTABLE="%JAVA_HOME%\bin\java"
+
+:execute
+%JAVA_EXECUTABLE% -classpath %EXEC_CLASSPATH% %EXECUTABLE_CLASS% %SCRIPT_NAME% %*
+
+
+
+
    +
  1. +

    Encrypt the values by calling

    +
  2. +
+
+
+
+
.\encrypt.bat input=someinput password=secret
+
+----ENVIRONMENT-----------------
+
+Runtime: Oracle Corporation Java HotSpot(TM) 64-Bit Server VM 25.111-b14
+
+
+
+----ARGUMENTS-------------------
+
+input: someinput
+password: secret
+
+
+
+----OUTPUT----------------------
+
+JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD
+
+
+
+
    +
  1. +

    Mark the value as encrypted by adding a prefix 'ENC(' and suffix ')' like: ENC(JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD)

    +
  2. +
+
+
+
+image083 +
+
+
+
+
Bridge between external file nad Page class
+
+

To map values from external file with Page class you ought to use class GetEnvironmentParam

+
+
+

Therefore when you add new variable (row) in environments.csv you might need to add this variable to GetEnvironmentParam.

+
+
+
+image084 +
+
+
+
+
Run test case with system environment
+
+

To run test case with system environment, please use: +* -Denv=\<NameOfEnvironment\> +* \<NameOfEnvironment\> is taken as column name from file mrchecker-app-under-test\src\test\resources\enviroments\environments.csv

+
+
+

Since mrchecker-core-module version 5.6.2.1 +== Command Line

+
+
+
+
mvn test site -Dgroups=RegistryPageTestTag -Denv=DEV
+
+
+
+
+
Eclipse
+
+
+image085 +
+
+
+
+image086 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1 +== Command Line

+
+
+
+
mvn test site -Dtest=RegistryPageTest -Denv=DEV
+
+
+
+
+
Eclipse
+
+
+image085 +
+
+
+
+image086 +
+
+
+
+
+

26.12. Selenium Module

+ +
+
Selenium Test Module
+ +
+
+
What is MrChecker E2E Selenium Test Module
+
+
+image2 +
+
+
+
+
Selenium Structure
+ +
+
+
Framework Features
+
+ +
+
+
+
How to start?
+ +
+
+
Selenium Best Practices
+ +
+
+
Selenium UFT Comparison
+ +
+
+
+

26.13. Selenium Structure

+
+
What is Selenium
+
+

Selenium is a framework for testing browser applications. The test automation supports:

+
+
+
    +
  • +

    Frequent regression testing

    +
  • +
  • +

    Repeating test case executions

    +
  • +
  • +

    Documentation of test cases

    +
  • +
  • +

    Finding defects

    +
  • +
  • +

    Multiple Browsers

    +
  • +
+
+
+

The Selenium testing framework consists of multiple tools:

+
+
+
    +
  • +

    Selenium IDE

    +
    +

    The Selenium Integrated Development Environment is a prototyping tool for building test scripts. It is a Firefox Plugin and provides an easy-to-use interface for developing test cases. Additionally, Selenium IDE contains a recording feature, that allows the user to record user inputs that can be automatically re-executed in future.

    +
    +
  • +
  • +

    Selenium 1

    +
    +

    Selenium 1, also known as Selenium RC, commands a Selenium Server to launch and kill browsers, interpreting the Selenese commands passed from the test program. The Server acts as an HTTP proxy. This tool is deprecated.

    +
    +
  • +
  • +

    Selenium 2

    +
    +

    Selenium 2, also known as Selenium WebDriver, is designed to supply a well-designed, object-oriented API that provides improved support for modern advanced web-app testing problems.

    +
    +
  • +
  • +

    Selenium 3.0

    +
    +

    The major change in Selenium 3.0 is removing the original Selenium Core implementation and replacing it with one backed by WebDriver. There is now a W3C specification for browser automation, based on the Open Source WebDriver.

    +
    +
  • +
  • +

    Selenium Grid

    +
    +

    Selenium Grid allows the scaling of Selenium RC test cases, that must be run in multiple and potentially variable environments. The tests can be run in parallel on different remote machines.

    +
    +
  • +
+
+
+
+
Selenium on the Production Line
+
+

More information on Selenium on the Production Line can be found here.

+
+
+

tl;dr

+
+
+

The Production Line has containers running Chrome and Firefox Selenium Nodes. The communication with these nodes is accomplished using Selenium Grid.

+
+
+

Having issues using Selenium on the Production Line? Check the Production Line issue list, maybe it’s a known issue that can be worked around.

+
+
+
+
What is WebDriver
+
+

On the one hand, it is a very convenient API for a programmer that allows for interaction with the browser, on the other hand it is a driver concept that enables this direct communication.

+
+
+
+image53 +
+
+
+
+
== How does it work?
+
+
+image54 +
+
+
+

A tester, through their test script, can command WebDriver to perform certain actions on the WAUT on a certain browser. The way the user can command WebDriver to perform something is by using the client libraries or language bindings provided by WebDriver.

+
+
+

By using the language-binding client libraries, a tester can invoke browser-specific implementations of WebDriver, such as Firefox Driver, IE Driver, Opera Driver, and so on, to interact with the WAUT of the respective browser. These browser-specific implementations of WebDriver will work with the browser natively and execute commands from outside the browser to simulate exactly what the application user does.

+
+
+

After execution, WebDriver will send the test result back to the test script for developer’s analysis.

+
+
+
+
What is Page Object Model?
+
+
+image55 +
+
+
+

Creating Selenium test cases can result in an unmaintainable project. One of the reasons is that too much duplicated code is used. Duplicated code could result from duplicated functionality leading to duplicated usage of locators. The main disadvantage of duplicated code is that the project is less maintainable. If a locator changes, you have to walk through the whole test code to adjust locators where necessary. By using the page object model we can make non-brittle test code and reduce or eliminate duplicate test code. In addition, it improves the readability and allows us to create interactive documentation. Last but not least, we can create tests with less keystroke. An implementation of the page object model can be achieved by separating the abstraction of the test object and the test scripts.

+
+
+
+image56 +
+
+
+
+
Basic Web elements
+
+

This page will provide an overview of basic web elements.

+
+
+
+image57 +
+
+
+
+image58 +
+
+
+

|== = +|Name +|Method to use element

+
+
+

|Form: Input Text +|elementInputText()

+
+
+

|Form: Label +|elementLabel()

+
+
+

|Form: Submit Button +|elementButton()

+
+
+

|Page: Button +|elementButton()

+
+
+

|Checkbox +|elementCheckbox()

+
+
+

|Radio +|elementRadioButton()

+
+
+

|Elements (Tabs, Cards, Account, etc.) +|elementTab()

+
+
+

|Dropdown List +|elementDropdownList()

+
+
+

|Link +|-

+
+
+

|Combobox +|elementList() +|== =

+
+
+

Comparision how picking value from checkbox can be done:

+
+
+
    +
  • +

    by classic Selenium atomic actions

    +
  • +
  • +

    by our enhanced Selenium wrapper

    +
  • +
+
+
+

Classic Selenium atomic actions

+
+
+
+
List<WebElement> checkboxesList = getDriver()
+                .findElements(selectorHobby);
+WebElement currentElement;
+for (int i = 0; i < checkboxesList.size(); i++) {
+    currentElement = checkboxesList.get(i);
+    if (currentElement.getAttribute("value")
+                    .equals(hobby.toString()) && currentElement.isSelected() != true)
+                        {
+        currentElement.click();
+            }
+}
+
+
+
+

Enhanced Selenium in E2E test framework

+
+
+
+
getDriver().elementCheckbox(selectorHobby)
+				.setCheckBoxByValue(hobby.toString());
+
+
+
+
+
+

26.14. Framework Features

+
+
Page Class
+
+

Page Object Models allow for the representation of a webpage as a Java Class. The class contains all required web elements like buttons, textfields, labels, etc. When initializing a new project, create a new package to store the Page Object Models in.

+
+
+
+
Initialization
+
+

Source folder: allure-app-under-test/src/main/java

+
+
+

Name: com.example.selenium.pages.YOUR_PROJECT

+
+
+

Classes being created inside of this new package have to extend the BasePage class. As a result, a few abstract methods from BasePage have to be implemented.

+
+
+
+
public class DemoPage extends BasePage {
+
+	@Override
+	public boolean isLoaded() {
+
+	}
+
+	@Override
+	public void load() {
+
+	}
+
+	@Override
+	public String pageTitle() {
+
+	}
+}
+
+
+
+

The example above demonstrates a minimum valid Page Object class with all required methods included.

+
+
+
+
BasePage method: isLoaded
+
+

The inherited method isLoaded() can be used to check if the current Page Object Model has been loaded correctly. There are multiple ways to verify a correctly loaded page. One example would be to compare the actual page title with the expected page title.

+
+
+
+
public boolean isLoaded() {
+	if(getDriver().getTitle().equals("EXPECTED_TITLE")) {
+		return true;
+	}
+	return false;
+}
+
+
+
+
+
BasePage method: load
+
+

The method load() can be used to tell the webdriver to load a specific page.

+
+
+
+
public void load() {
+	getDriver().get("http://SOME_PAGE");
+}
+
+
+
+
+
BasePage method: pageTitle
+
+

The pageTitle() method returns a String containing the page title.

+
+
+
+
Creating a selector variable
+
+

To initialize web elements, a large variety of selectors can be used.

+
+
+

We recommend creating a private and constant field for every web element you’d like to represent in Java. Use the guide above to find the preferred selector and place it in the code below at "WEB_ELEMENT_SELECTOR".

+
+
+
+
private static final By someWebElementSelector = By.CSS("WEB_ELEMENT_SELECTOR");
+
+
+
+

As soon as you create the selector above, you can make use of it to initialize a WebElement object.

+
+
+
+
WebElement someWebElement = getDriver().findDynamicElement(someWebElementSelector);
+
+
+
+

Note: The examples displayed in the cssSelector.docx file use the Selenium method driver.findElement() to find elements. However, using this framework we recommend findDynamicElement() or findQuietlyElement().findDynamicElement() allows waiting for dynamic elements, for example buttons that pop up.

+
+
+
+
Creating a page method
+
+

To interact with the page object, we recommend creating methods for each action.

+
+
+
+
public void enterGoogleSearchInput(String query) {
+	...
+}
+
+
+
+

Creating a method like the one above allows the test case to run something like googleSearchPage.enterGoogleSearchInput("Hello") to interact with the page object.

+
+
+
+
Naming Conventions
+
+

For code uniformity and readability, we provide a few method naming conventions.

+
+
+

|== =

+
+
+

|Element +|Action +|Name (example)

+
+
+

|Form: Input text +|enter +|enterUsernameInput()

+
+
+

| +|is (label) +|isUsernameInputPresent()

+
+
+

| +|is (value) +|isUsernameEmpty()

+
+
+

| +|get +|getUsernameValue()

+
+
+

|Form: Label +|get +|getCashValue()

+
+
+

| +|is (value) +|isCashValueEmpty()

+
+
+

| +|is (label) +|isCashLabelPresent()

+
+
+

|Form: Submit Button +|submit +|submitLoginForm()

+
+
+

| +|is +|isLoginFormPresent()

+
+
+

|Page: Button +|click +|clickInfoButton()

+
+
+

| +|is +|isInfoButtonpresent()

+
+
+

|Checkbox +|set +|setRememberMeCheckbox()

+
+
+

| +|unset +|unsetRememberMeCheckbox()

+
+
+

| +|is (present) +|isRememberMeCheckboxPresent()

+
+
+

| +|is (value) +|isRememberMeCheckboxSet()

+
+
+

|Radio +|set +|setMaleRadioValue("Woman")

+
+
+

| +|is (present) +|isMaleRadioPresent()

+
+
+

| +|is (visible) +|isMaleRadioVisible()

+
+
+

| +|get +|getSelectedMaleValue()

+
+
+

|Elements (Tabs, Cards, Account, etc.) +|click +|clickPositionTab() / clickMyBilanceCard()

+
+
+

| +|is +|isMyBilanceCardPresent()

+
+
+

|Dropdown List +|select +|selectAccountTypeValue(typeName)

+
+
+

| +|unselect +|unselectAccountTypeValue(typeName)

+
+
+

| +|multiple select +|selectAccountTypesValues(List typeNames)

+
+
+

| +|is (list) +|isAccountTypeDropdownListPresent()

+
+
+

| +|is (element present) +|isAccountTypeElementPresent(typeName)

+
+
+

| +|is (element selected) +|isAccountTypeSelected(typeName)

+
+
+

|Link +|click +|clickMoreLink()

+
+
+

| +|is +|isMoreLinkPresent()

+
+
+

|Combobox +|select +|selectSortCombobox()

+
+
+

| +|is (present) +|isSortComboboxPresent(name)

+
+
+

| +|is (contain) +|selectSortComboboxContain(name)

+
+
+

|Element Attribute +|get +|getPositionTabCss()

+
+
+

| +|get +|getMoreLinkHref() / getRememberMeCheckboxName()

+
+
+

|== =

+
+
+

A css selector is used to select elements from an HTML page.

+
+
+

Selection by element tag, class or id are the most common selectors.

+
+
+
+
<p class='myText' id='123'>
+
+
+
+

This text element (p) can be found by using any one of the following selectors:

+
+
+
+
The HTML element: "p". Note: in practical use this will be too generic, if a preceding text section is added, the selected element will change.
+The class attribute preceded by ".": ".myText"
+The id attribute preceded by "#": "#123"
+
+
+
+
+
Using other attributes
+
+

When a class or an id attribute is not sufficient to identify an element, other attributes can be used as well, by using "[attribute=value]": For example:

+
+
+
+
<a href='https://ns.nl/example.html'>
+
+
+
+

This can be selected by using the entire value: "a[href='https://ns.nl/example.html'\]". For selecting links starting with, containing, ending with see the list below.

+
+
+
+
Using sub-elements
+
+

The css selectors can be stacked, by appending them:

+
+
+
+
<div id='1'><a href='ns.nl'></div>
+<div id='2'><a href='nsinternational.nl'></div>
+
+
+
+

In the example above, the link element to nsinternational can be obtained with: "#2 a".

+
+
+
+
When possible avoid
+
+
    +
  • +

    Using paths of commonly used HTML elements within the containers (HTML: div). This will cause failures when a container is added, a common occurrence during development, e.g. "div div p". Use class or id instead, if those are not available, request them to be added in the production code.

    +
  • +
  • +

    Magic order numbers. It is possible to get the second text element in its parent container by using the selector "p:nth-child(2)". If the items are representing different items, ask the developer to add specific attributes. It is also possible to request all items, with a selector similar to ".myList li", and iterate through them later.

    +
  • +
+
+
+
+
List
+
+

A good list with CSS Selectors can be found at W3Schools:
+https://www.w3schools.com/cssref/css_selectors.asp

+
+
+
+
Selenium UFT Comparison
+
+

|== =

+
+
+

|Subject +|HP UFT +|HP LeanFT +|Selenium +|Selenium IDE

+
+
+

|Language +|VBScript +|Same as Selenium +|Supports several languages. +Java +|Javascript

+
+
+

|Learning curve +|Based on VBScript which is relatively easy to learn +|Less intuitive, more coding knowledge necessary +|Less intuitive, more coding skills necessary +|Record/playback possible. Generated code difficult to maintain

+
+
+

|Project type +|Traditional +|Agile +|Agile +|Agile

+
+
+

|User oriented +|More Tester +|More Developer +|More Developer +|More Tester

+
+
+

|Object recognition +|Test object identification and storage in object repository +|Same as UFT +|With Firebug +|Same as SE

+
+
+

|Customizations +|Only the available standard. No custimization +|Same as UFT +|Lots of customizations possible +|Fewer then SE

+
+
+

|Framework +|Needed. +Exists in ATaaS +| +|Needed. +Integration with Fitnesse, Cucumber, Gauche +|No Framework. Limited capabilities of the tool.

+
+
+

|Operating System support +|Runs on Windows +|Runs on Windows +|Multiple OS support. With Grid: testing on multiple devices at same time +|Plugin for Firefox

+
+
+

|Application coverage +|Many +|Many +|Web only +|Web only

+
+
+

|Multiple browsers +|In UFT 12.5 available +|In 12.5 available +|Multiple tests in multiple browser windows at once and faster support for new browser versions +|Multiple tests in multiple browser windows at once and faster support for new browser versions

+
+
+

|System Load +|High system load (RAM & CPU usage) +|Lower load than HP UFT? +|Lower load than HP UFT +|Lower load than HP UFT

+
+
+

|ALM integration +|With HP ALM – full integration +| +|Jira, Jenkins +Not with ALM tool +|Same as SE

+
+
+

|Integration with other tools +|A lot can be built, but many are already covered. +|More than UFT. +|Freeware and can be integrated with different open source tools +|Freeware and can be integrated with different open source tools

+
+
+

|Addins +|Add-ins necessary to access all capabilities of the tool – license related +|Same as UFT +|See integration with other tools +|See integration with other tools

+
+
+

|Reporting +|Complete, link to ALM +|Same as UFT +|No native mechanism for generating reports, but multiple plugins available for reporting +|No native mechanism for generating reports, but multiple plugins available for reporting

+
+
+

|Support +|HP full support +|Same as UFT +|Limited support as it is open source +|Limited support as it is open source

+
+
+

|License costs +|About 17K – Capgemini price 5K. +Included in the S2 service charge +|Same price as HP UFT +|Free +|Free +limited functionality (no iterations / conditional statements)

+
+
+

|iVAL Service +|ATaaS +|Not in a S2 service +|Not in a S2 service +|Not in a S2 service

+
+
+

|== =

+
+
+

Bold for key differentiators.

+
+
+

Projects also choose an available resource and the knowledge of that resource.

+
+
+

Both: Framework determines the quality of automation. Needs to be set up by someone with experience with the tool

+
+
+
+
Run on different browsers
+
+
+image59 +
+
+
+

To execute each test with a chosen installed browser, specific arguments are required in Run configuration.

+
+
+
+image60 +
+
+
+
+image61 +
+
+
+

It is necessary to enter -Dbrowser= with browser parameter name as an argument (in 'Arguments' tab):

+
+
+

firefox +ie +phantomjs +chrome +chromeheadless +For example: -Dbrowser=ie

+
+
+
+
_-ea_ should be entered as an argument to restore default settings.
+
+
+
+
+
Browser options
+
+

To run a browser with specific options during runtime, please use

+
+
+

-DbrowserOptions="< options >"

+
+
+
+
> mvn test -DbrowserOptions="param1"
+> mvn test -DbrowserOptions="param1=value1"
+
+
+
+

examples:

+
+
+
    +
  • +

    One parameter -DbrowserOptions="headless"

    +
  • +
  • +

    One parameter -DbrowserOptions="--incognito"

    +
  • +
  • +

    Many parameters -DbrowserOptions="headless;param1=value1;testEquals=FirstEquals=SecondEquals;--testMe"

    +
  • +
+
+
+

List of options/capabilites supported by:

+
+
+ +
+
+
+
Run with full range of resolution
+
+
+image62 +
+
+
+

In order to execute tests in different browser resolutions, it is required to provide these resolutions as a test parameter.

+
+
+

Test example with resolutions included may be found in ResolutionTest test class

+
+
+
+image63 +
+
+
+

Example of resolution notation is available in ResolutionEnum class

+
+
+
+image64 +
+
+
+

Test with given resolution parameters will be launched as many times as the number of resolutions provided.

+
+
+
+
Selenium Best Practices
+
+

The following table displays a few best practices that should be taken into consideration when developing Selenium test cases.

+
+
+

|== =

+
+
+

|Best Practices +|Description

+
+
+

|"Keep it Simple" +|Do not force use every Selenium feature available - Plan before creating the actual test cases

+
+
+

|Using Cucumber +|Cucumber can be used to create initial testcases for further decision making

+
+
+

|Supporting multiple browsers +|Test on multiple browsers (in parallel, if applicable) if the application is expected to support multiple environments

+
+
+

|Test reporting +|Make use of test reporting modules like Junit which is included in the framework

+
+
+

|Maintainability +|Always be aware of the maintainability of tests - You should always be able to adapt to changes

+
+
+

|Testing types +|Which tests should be created? Rule of thumb: 70% Unit test cases, 20% Integration test cases and 10% UI Test cases

+
+
+

|Test data +|Consider before actually developing tests and choosing tools: Where to get test data from, how to reset test data

+
+
+

|== =

+
+
+
+
+

26.15. Web API Module

+ + +
+
Is it doable to keep pace in QA with today’s software agile approach?
+
+

DevOps + Microservices + Shift left + Time to Market == ? Service virtualization ?

+
+
+
+image72 +
+
+
+

Test pyramid

+
+
+
+image73 +
+
+
+
+
What is service virtualization
+
+

Service Virtualization has become recognized as one of the best ways to speed up testing and accelerate your time to market.

+
+
+

Service virtualization lets you automatically execute tests even when the application under test’s dependent system components (APIs, third-party applications, etc.) cannot be properly accessed or configured for testing. By simulating these dependencies, you can ensure that your tests will encounter the appropriate dependency behaviour and data each and every time that they execute.

+
+
+

Service virtualization is the simulation of interfaces – not the virtualization of systems.

+
+
+

According to Wikipedia’s service virtualization entry: Service virtualization emulates the behaviour of software components to remove dependency constraints on development and testing teams. Such constraints occur in complex, interdependent environments when a component connected to the application under test is:

+
+
+
    +
  • +

    Not yet completed

    +
  • +
  • +

    Still evolving

    +
  • +
  • +

    Controlled by a third-party or partner

    +
  • +
  • +

    Available for testing only in a limited capacity or at inconvenient times

    +
  • +
  • +

    Difficult to provision or configure in a test environment

    +
  • +
  • +

    Needed for simultaneous access by different teams with varied test data setup and other requirements

    +
  • +
  • +

    Restricted or costly to use for load and performance testing

    +
  • +
+
+
+

For instance, instead of virtualizing an entire database (and performing all associated test data management as well as setting up the database for every test session), you monitor how the application interacts with the database, then you emulate the related database behaviour (the SQL queries that are passed to the database, the corresponding result sets that are returned, and so forth).

+
+
+
+
Mocks, stubs and virtual services
+
+

The most commonly discussed categories of test doubles are mocks, stubs and virtual services.

+
+
+

Stub: a minimal implementation of an interface that normally returns hardcoded data that is tightly coupled to the test suite. It is most useful when the suite of tests is simple and keeping the hardcoded data in the stub is not an issue. Some stubs are handwritten; some can be generated by tools. A stub is normally written by a developer for personal use. It can be shared with testers, but wider sharing is typically limited by interoperability issues related to software platform and deployment infrastructure dependencies that were hardcoded. A common practice is when a stub works in-process directly with classes, methods, and functions for the unit, module, and acceptance testing. Some developers will say that a stub can also be primed, but you cannot verify an invocation on a stub. Stubs can also be communicating "over the wire", for example, HTTP, but some would argue that they should be called virtual services in that case.

+
+
+

Mock: a programmable interface observer, that verifies outputs against expectations defined by the test. It is frequently created using a third party library, for example in Java that is Mockito, JMock or WireMock. It is most useful when you have a large suite of tests and a stub will not be sufficient because each test needs a different data set up and maintaining them in a stub would be costly. The mock lets us keep the data set-up in the test. A mock is normally written by a developer for personal use but it can be shared with testers. However, wider sharing is typically limited by interoperability issues related to software platform and deployment infrastructure dependencies that were hardcoded. They are most often work-in-progress directly with classes, methods, and functions for a unit, module, and acceptance testing. Mock provides responses based on a given request satisfying predefined criteria (also called request or parameter matching). A mock also focuses on interactions rather than state so mocks are usually stateful. For example, you can verify how many times a given method was called or the order of calls made to a given object.

+
+
+

Virtual service: a test double often provided as a Software-as-a-Service (SaaS), is always called remotely, and is never working in-process directly with methods or functions. A virtual service is often created by recording traffic using one of the service virtualization platforms instead of building the interaction pattern from scratch based on interface or API documentation. A virtual service can be used to establish a common ground for teams to communicate and facilitate artefact sharing with other development teams as well as testing teams. A virtual service is called remotely (over HTTP, TCP, etc.) normally supports multiple protocols (e.g. HTTP, MQ, TCP, etc.), while a stub or mock normally supports only one. Sometimes virtual services will require users to authorize, especially when deployed in environments with enterprise-wide visibility. Service virtualization tools used to create virtual services will most often have user interfaces that allow less tech-savvy software testers to hit the ground running, before diving into the details of how specific protocols work. They are sometimes backed by a database. They can also simulate non-functional characteristics of systems such as response times or slow connections. You can sometimes find virtual services that provide a set of stubbed responses for given request criteria and pass every other request to a live backend system (partial stubbing). Similar to mocks, virtual services can have quite complex request matchers, that allow having one response returned for many different types of requests. Sometimes, virtual services simulate system behaviours by constructing parts of the response based on request attributes and data.

+
+
+

It is often difficult to say definitely which of the following categories a test double fits into. They should be treated as a spectrum rather than strict definitions.

+
+
+

Unresolved include directive in modules/ROOT/pages/mrchecker.wiki/master-mrchecker.adoc - include::Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-How-plug-in-service-virtualization-into-Application-Under-Test.adoc[]

+
+
+
+
How to make a virtual asset
+
+

This can be done in four ways:

+
+
+
    +
  • +

    Record all traffic (Mappings and Responses) that comes through proxy - by UI

    +
  • +
  • +

    Record all traffic (Mappings and Responses) that comes through proxy - by Code

    +
  • +
  • +

    Create Mappings and Responses manually by text files

    +
  • +
  • +

    Create Mappings and Responses manually by code

    +
  • +
+
+
+
+
Record all traffic (Mappings and Responses) that comes through proxy - UI
+
+

Full article here Wiremock record-playback.

+
+
+

First, start an instance of WireMock running standalone. Once that’s running, visit the recorder UI page at http://localhost:8080/__admin/recorder (assuming you started WireMock on the default port of 8080).

+
+
+
+image77 +
+
+
+

Enter the URL you wish to record from in the target URL field and click the Record button. You can use http://example.mocklab.io to try it out.

+
+
+

Now you need to make a request through WireMock to the target API so that it can be recorded. If you’re using the example URL, you can generate a request using curl:

+
+
+
+
$ curl http://localhost:8080/recordables/123
+
+
+
+

Now click stop. You should see a message indicating that one stub was captured.

+
+
+

You should also see that a file has been created called something like recordables_123-40a93c4a-d378-4e07-8321-6158d5dbcb29.json under the mappings directory created when WireMock started up, and that a new mapping has appeared at http://localhost:8080/__admin/mappings.

+
+
+

Requesting the same URL again (possibly disabling your wifi first if you want a firm proof) will now serve the recorded result:

+
+
+
+
$ curl http://localhost:8080/recordables/123
+
+{
+"message": "Congratulations on your first recording!"
+}
+
+
+
+
+
Record all traffic (Mappings and Responses) that comes through proxy - by Code
+
+

An example of how such a record can be achieved

+
+
+
+
@Test
+public void startRecording() {
+
+    SnapshotRecordResult recordedMappings;
+
+    DriverManager.getDriverVirtualService()
+            .start();
+    DriverManager.getDriverVirtualService()
+            .startRecording("http://example.mocklab.io");
+    recordedMappings = DriverManager.getDriverVirtualService()
+            .stopRecording();
+
+    BFLogger.logDebug("Recorded messages: " + recordedMappings.toString());
+
+}
+
+
+
+
+
Create Mappings and Responses manually by text files
+
+

EMPTY

+
+
+
+
Create Mappings and Responses manually by code
+
+

Link to full file structure: REST_FarenheitToCelsiusMethod_Test.java

+
+
+
+
Start up Virtual Server
+
+
+
public void startVirtualServer() {
+
+    // Start Virtual Server
+    WireMockServer driverVirtualService = DriverManager.getDriverVirtualService();
+
+    // Get Virtual Server running http and https ports
+    int httpPort = driverVirtualService.port();
+    int httpsPort = driverVirtualService.httpsPort();
+
+    // Print is Virtual server running
+    BFLogger.logDebug("Is Virtual server running: " + driverVirtualService.isRunning());
+
+    String baseURI = "http://localhost";
+    endpointBaseUri = baseURI + ":" + httpPort;
+}
+
+
+
+
+
Plug in a virtual asset
+
+

REST_FarenheitToCelsiusMethod_Test.java

+
+
+
+
public void activateVirtualAsset() {
+    /*
+    * ----------
+    * Mock response. Map request with virtual asset from file
+    * -----------
+    */
+    BFLogger.logInfo("#1 Create Stub content message");
+    BFLogger.logInfo("#2 Add resource to virtual server");
+    String restResourceUrl = "/some/thing";
+    String restResponseBody = "{ \"FahrenheitToCelsiusResponse\":{\"FahrenheitToCelsiusResult\":37.7777777777778}}";
+
+    new StubREST_Builder //For active virtual server ...
+            .StubBuilder(restResourceUrl) //Activate mapping, for this Url AND
+            .setResponse(restResponseBody) //Send this response  AND
+            .setStatusCode(200) // With status code 200 FINALLY
+            .build(); //Set and save mapping.
+
+}
+
+
+
+

Link to full file structure: StubREST_Builder.java

+
+
+

Source link to How to create Stub.

+
+
+

StubREST_Builder.java

+
+
+
+
public class StubREST_Builder {
+
+    // required parameters
+    private String endpointURI;
+
+    // optional parameters
+    private int statusCode;
+
+    public String getEndpointURI() {
+        return endpointURI;
+    }
+
+    public int getStatusCode() {
+        return statusCode;
+    }
+
+    private StubREST_Builder(StubBuilder builder) {
+        this.endpointURI = builder.endpointURI;
+        this.statusCode = builder.statusCode;
+    }
+
+    // Builder Class
+    public static class StubBuilder {
+
+        // required parameters
+        private String endpointURI;
+
+        // optional parameters
+        private int     statusCode  = 200;
+        private String  response    = "{ \"message\": \"Hello\" }";
+
+        public StubBuilder(String endpointURI) {
+            this.endpointURI = endpointURI;
+        }
+
+        public StubBuilder setStatusCode(int statusCode) {
+            this.statusCode = statusCode;
+            return this;
+        }
+
+        public StubBuilder setResponse(String response) {
+            this.response = response;
+            return this;
+        }
+
+        public StubREST_Builder build() {
+
+            // GET
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            get(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // POST
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            post(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // PUT
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            put(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // DELETE
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            delete(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // CATCH any other requests
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            any(anyUrl())
+                                    .atPriority(10)
+                                    .willReturn(aResponse()
+                                            .withStatus(404)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody("{\"status\":\"Error\",\"message\":\"Endpoint not found\"}")
+                                            .withTransformers("body-transformer")));
+
+            return new StubREST_Builder(this);
+        }
+    }
+}
+
+
+
+
+
Start a virtual server
+
+

The following picture presents the process of executing Smoke Tests in a virtualized environment:

+
+
+
+image78 +
+
+
+
+
Install docker service
+
+

If docker is not already installed on machine (this should be checked during C2C creation), install docker, docker-compose, apache2-utils, openssl (You can use script to install docker & docker-compose OR refer to this post and add Alias for this machine <C2C_Alias_Name>):

+
+
+
    +
  • +

    run the script

    +
  • +
  • +

    sudo apt-get install -y apache2-utils

    +
  • +
+
+
+
+
Build a docker image
+
+

Dockerfile:

+
+
+
+
FROM docker.xxx.com/ubuntu:16.04
+MAINTAINER Maintainer Name "maintainer@email.address"
+LABEL name=ubuntu_java \
+           version=v1-8.0 \
+           base="ubuntu:16.04" \
+           build_date="03-22-2018" \
+           java="1.8.0_162" \
+           wiremock="2.14.0" \
+           description="Docker to use with Ubuntu, JAVA and WIREMOCK "
+
+##Update and install the applications needed
+COPY 80proxy /etc/apt/apt.conf.d/80proxy
+RUN apt-get update
+RUN apt-get install -y \
+            wget \
+            libfontconfig \
+            unzip \
+            zip
+            ksh \
+            curl \
+            git
+
+COPY wgetrc /etc/wgetrc
+
+#Env parameters
+
+### JAVA PART ###
+#TO UPDATE:please verify url link to JDK http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html
+##Download and install JAVA JDK8
+RUN mkdir /opt/jdk
+RUN wget -qq --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u162-b12/0da788060d494f509bf8624735fa2f1/jdk-8u162-linux-x64.tar.gz && tar -zxf jdk-8u162-linux-x64.tar.gz -C /opt/jdk && rm jdk-8u162-linux-x64.tar.gz && update-alternatives --install /usr/bin/javac javac /opt/jdk/jdk1.8.0_162/bin/javac 100 && java -version && chmod 755 -R /opt/jdk/jdk1.8.0_162/
+RUN java -version
+
+##Add user
+RUN useradd -u 29001 -g 100 srvpwiredev
+
+##Add app
+RUN mkdir -p -m 777 /app
+COPY wiremock-standalone-2.14.0.jar /app/wiremock-standalone-2.14.0.jar
+
+##Expose port
+EXPOSE 8080
+
+##Set workdir
+WORKDIR /App
+
+##Run app
+CDM java -jar /app/wiremock-standalone-2.14.0.jar
+
+
+
+

Execute the following steps with a specified version to build a docker image and push it to the repository :

+
+
+
+
## Build image
+sudo docker build -t docker.xxx.com/app/build/wiremock:v2.14.0.
+
+## Push image
+sudo docker login docker.xxx.com
+sudo docker push docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+
+
Run docker image
+
+

To run a docker image, execute the following command:

+
+
+
+
sudo docker run -td -p 8080:8080 -v /home/wiremock/repo/app/docker/QA/mappings:/app/mappings -v /home/wiremock/repo/app/docker/QA/__files:/app/__files --restart always docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+

Where:

+
+
+

-p - publish a container’s port to the host

+
+
+

-v - bind mount a volume. WireMock server creates two directories under the current one: mappings and __files. It is necessary to mount directories with already created mappings and responses to make it work.

+
+
+

-restart always - restart policy to apply when a container exists

+
+
+

All of the parameters are described in: official docker documentation

+
+
+
+
Map requests with virtual assets
+
+

What is WireMock?

+
+
+

WireMock is an HTTP mock server. At its core it is a web server that can be primed to serve canned responses to particular requests (stubing) and that captures incoming requests so that they can be checked later (verification). It also has an assortment of other useful features including record/playback of interactions with other APIs, injection of faults and delays, simulation of stateful behaviour.

+
+
+

Full documentation can be found under the following link: WireMock

+
+
+
+
Record / create virtual assets mappings
+
+

Record

+
+
+

WireMock can create stub mappings from requests it has received. Combined with its proxying feature, this allows you to "record" stub mappings from interaction with existing APIs.

+
+
+

Record and playback (Legacy): documentation

+
+
+
+
java -jar wiremock-standalone-2.16.0.jar --proxy-all="http://search.twitter.com" --record-mappings --verbose
+
+
+
+

Once it’s started and request is sent to it, it will be redirected to "http://search.twitter.com" and traffic (response) is saved to files in mappings and __files directories for further use.

+
+
+

Record and playback (New): documentation

+
+
+
+
Enable mappings in a virtual server
+
+

When the WireMock server starts, it creates two directories under the current one: mappings and __files. To create a stub, it is necessary to drop a file with a .json extension under mappings.

+
+
+

Run docker with mounted volumes

+
+
+

Mappings are in a repository. It is necessary to mount directories with already created mappings and responses to make it work:

+
+
+
+
sudo docker run -td -p 8080:8080 -v /home/wiremock/repo/app/docker/QA/mappings:/app/mappings -v /home/wiremock/repo/app/docker/QA/__files:/app/__files --restart always docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+

The description of how to build and run docker is available under: Docker run command description

+
+
+

Recorded mappings

+
+
+

Recorded mappings are kept in the project repository.

+
+
+
+
Create a user and map them to docker user
+
+

To enable the connection from Jenkins to Virtual Server (C2C), it is necessary to create a user and map them to docker group user. It can be done using the following command:

+
+
+
+
adduser -G docker -m wiremock
+
+
+
+

To set the password for a wiremock user:

+
+
+
+
passwd wiremock
+
+
+
+
+
Create SSH private and public keys for a wiremock user
+
+

SSH keys serve as a means of identifying yourself to an SSH server using public-key cryptography and challenge-response authentication. One immediate advantage this method has over traditional password is that you can be authenticated by the server without ever having to send your password over the network.

+
+
+

To create an SSH key, log in as wiremock (previously created user).

+
+
+
+
su wiremock
+
+
+
+

The .ssh directory is not by default created below user home directory. Therefore, it is necessary to create it:

+
+
+
+
mkdir ~/.ssh
+
+
+
+

Now we can proceed with creating an RSA key using ssh-keygen (a tool for creating new authentication key pairs for SSH):

+
+
+
+
ssh-keygen -t rsa
+
+
+
+

A key should be created under /.ssh/id_rsa +Appending the public keys to authorized_keys:

+
+
+
+
wiremock@vc2crptXXXXXXXn:~/ssh$ cat id_rsa.pub >> authorized_keys
+
+
+
+
+
Install an SSH key in Jenkins
+
+

To add an SSH key to Jenkins, go to credentials in your job location. Choose the folder within credentials, then 'global credentials', 'Add credentials'. Fill in the fields. Finally, the entry should be created.

+
+
+
+
Build a Jenkins Groovy script
+
+

The description of how to use SSH Agent plugin in Jenkins pipeline can be found under: https://www.karthikeyan.tech/2017/09/ssh-agent-blue-ocean-via-jenkins.html

+
+
+

Example of use:

+
+
+
+
sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+     sh """
+         ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "docker container restart ${env.WIREMOCK_CONTAINER_NAME}"
+     """
+}
+
+
+
+

Where: env.WIREMOCK_CREDENTIALS is a credential id of previously created wiremock credentials. Now that it is present, we can execute commands on a remote machine, where in ssh command: +env.WIREMOCK_USERNAME - user name of user connected with configured private key +env.WIREMOCK_IP_ADDRESS - ip address of the machine where this user with this private key exists

+
+
+
+
Pull repository with virtual assets
+
+

To pull the repository on a remote machine, it is necessary to use the previously described SSH Agent plugin. An example of use:

+
+
+
+
sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+withCredentials([usernamePassword(credentialsId: end.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+     sh """
+         ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd ~/${env.APPLICATION_DIRECTORY_WIREMOCK}/${env.PROJET_HOME}; git fetch https://&USER:$PASS@${env.GIT_WITHOUT_HTTPS} ${env.GIT_BRANCH}; git reset --hard FETCH_HEAD; git clean -df"
+      """
+    }
+}
+
+
+
+

Where:

+
+
+

withCredentials allows various kinds of credentials (secrets) to be used in idiosyncratic ways. Each binding will define an environment variable active within the scope of the step. Then the necessary commands are executed:

+
+
+

cd …​ - command will change from current directory to the specified directory with git repository

+
+
+

git fetch …​ ;git reset …​ ;git clean …​ - pull from GIT branch. Git pull or checkout are not used here to prevent the situation with wrong coding between Mac OSX/Linux etc.

+
+
+

PLEASE remember that when using this script for the first time, the code from previous block should be changed to:

+
+
+
+
stage("ssh-agent"){
+        sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+            withCredentials([usernamePassword(credentialsId: end.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+                sh """
+                        ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd ~/${env.APPLICATION_DIRECTORY_WIREMOCK} ;git clone --depth=1 --branch=develop https://&USER:$PASS@${env.GIT_WITHOUT_HTTPS}"';
+                """
+    }
+}
+
+
+
+
+
Install an application with Smoke environment
+ +
+
+
Update properties settings file
+
+

New settings file is pushed to the repository. Example configuration:

+
+
+
+
...
+   <key>autocomplete</key>
+   <string>http://server:port</string>
+   <key>benefitsummary</key>
+   <string>http://server:port</string>
+   <key>checkscan</key>
+   <string>http://server:port</string>
+   <key>dpesb</key>
+   <string>http://server:port</string>
+...
+
+
+
+

Address of service (backend) should be changed to wiremock address as it is shown on listing to change the default route.

+
+
+
+
Build an application with updated properties file
+
+

New versions of application are prepared by Jenkins job.

+
+
+
+
Install an application on target properties file
+
+

Installation of an application is actually executed in a non-automated way using SeeTest environment.

+
+
+
+
UI tests
+ +
+
+
Run Jenkins job
+
+

Jenkinsfile:

+
+
+
+
// Jenkins parameters are overriding the properties below
+def properties = [
+
+          JENKINS_LABELS                                 : 'PWI_LINUX_DEV',
+          APPLICATION_FOLDER                             : 'app_dir',
+          PROJECT_HOME                                   : 'app_home_folder',
+
+          //WIREMOCK
+          WIREMOCK_CREDENTIALS                           : 'vc2crptXXXXXXn',
+          WIREMOCK_USERNAME                              : 'wiremock',
+          WIREMOCK_ADDRESS                               : 'http://vc2crptXXXXXXn.xxx.com:8080',
+          WIREMOCK_IP_ADDRESS                            : '10.196.67.XXX',
+          WIREMOCK_CONTAINER_NAME                        : 'wiremock',
+          APPLICATION_DIRECTORY_WIREMOCK                 : 'repo',
+
+          //GIT
+          GIT_CREDENTIALS                                : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          GIT_BRANCH                                     : 'develop',
+          GIT_SSH                                        : 'ssh://git@stash.xxx.com/app/app.git'
+          GIT_HTTPS                                      : 'HTTPS://git@stash.xxx.com/app/app.git',
+
+          STASH_CREDENTIALS                              : 'e47742cc-bb66-4321-2341-a2342er24f2',
+
+
+          //DOCKER
+          ARTIFACTORY_USER_CREDENTIALS                   : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          SEETEST_DOCKER_IMAGE                           : 'docker.xxx.com/project/images/app:v1-8.3',
+
+          //SEETEST_DOCKER_IMAGE
+          SEETEST_APPLICATION_FOLDER                     : 'seetest_dir',
+          SEETEST_PROJECT_HOME                           : 'Automated Scripts',
+          SEETEST_GIT_SSH                                : 'ssh://git@stash.xxx.com/pr/seetest_automation_cucumber.git'
+          SEETEST_GIT_BRANCH                             : 'develop',
+          SEETEST_GRID_USER_CREDENTIALS                  : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          SEETEST_CUCUMBER_TAG                           : '@Virtualization',
+          SEETEST_CLOUD_NAME                             : 'Core Group',
+          SEETEST_IOS_VERSION                            : '11',
+          SEETEST_IOS_APP_URL                            : '',
+          SEETEST_INSTALL_APP                            : 'No',
+          SEETEST_APP_ENVIRONMENT                        : 'SmokeTests',
+          SEETEST_DEVICE_QUERY                           : '',
+]
+
+node(properties.JENKINS_LABELS) {
+    try {
+        prepareEnv(properties)
+        gitCheckout()
+        stageStartVirtualServer()
+        stageMapApiRequests()
+        stageInstallApplication()
+        stageUITests()
+     } catch(Exception ex) {
+        currentBuild.result = 'FAILURE'
+        error = 'Error' + ex
+     }
+}
+
+//== == == == == == == == == == == == == == == == == == END OF PIPELINE== == == == == == == == == == == == == == == == == == == == ==
+
+private void prepareEnv(properties) {
+    cleanWorkspace()
+    overrideProperties(properties)
+    setWorkspace()
+}
+
+private void gitCheckout() {
+    dir(env.APPLICATION_FOLDER) {
+        checkout([$class: 'GitSCM', branches: [[name: env.GIT_BRANCH]], doGenerateSubmoduleConfiguration: false, extensions: [[$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false, timeout: 50]], gitTool: 'Default', submoduleCfg: [], userRemoteConfigs: [[credentialsId: env.GIT_CREDENTIALS, url: env.GIT_SSH]])
+     }
+}
+
+private void stageStartVirtualServer() {
+    def module = load "${env.SUBMODULES_DIR}/stageStartVirtualServer.groovy"
+    module()
+}
+
+private void stageMapApiRequests() {
+    def module = load "${env.SUBMODULES_DIR}/stageMapApiRequests.groovy"
+    module()
+}
+
+private void stageInstallApplication() {
+    def module = load "${env.SUBMODULES_DIR}/stageInstallApplication.groovy"
+    module()
+}
+
+private void stageUITests() {
+    def module = load "${env.SUBMODULES_DIR}/stageUITests.groovy"
+    module()
+}
+
+private void setWorkspace() {
+    String workspace = pwd()
+    env.APPLICATION_DIRECTORY = "/${env.APPLICATION_DIRECTORY}"
+    env.WORKSPACE_LOCAL - workspace + env.APPLICATION_DIRECTORY
+    env.SEETEST_PROJECT_HOME_ABSOLute_PATH = "${workspace}/${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}"
+    env.SUBMODULES_DIR = env.WORKSPACE_LOCAL + "/pipelines/SmokeTests.submodules"
+    env.COMMONS_DIR    = env.WORKSPACE_LOCAL + "/pipelines/commons"
+}
+
+/*
+    function ovverrides env vales based on provided properties
+*/
+private void overrideProperties(properties) {
+    for (param in properties) {
+        if (env.(param.key) ==  null) {
+           echo "Adding parameter '${param.key}' with default value: '$param.value}'"
+           env.(param.key) = param.value
+        } else {
+           echo "Parameter '${param.key}' has overriden value: '${env.(param.key)}'"
+        }
+     }
+
+     echo sh(script: "env | sort", returnStdout: true)
+}
+
+private void cleanWorkspace() {
+   sh 'rm-rf *'
+}
+
+
+
+

stageStartVirtualServer.groovy:

+
+
+
+
def call () {
+    stage("Check virtual server") {
+        def statusCode
+
+        try {
+            def response = httpRequest "${env.WIREMOCK_ADDRESS}/__admin/"
+            statusCode = response.status
+        } catch(Exception ex) {
+            currentBuild.result = 'FAILURE'
+            error 'WireMock server os unreachable.'
+        }
+
+        if(statusCode !=200) {
+            currentBuild.result = 'FAILURE'
+            error 'WireMock server is unreachable. Return code: ${statusCode}'
+        }
+    }
+}
+
+
+
+

stageMapApiRequests.groovy:

+
+
+
+
def call() {
+    stage("Map API requests with virtual assets") {
+        checkoutRepository()
+        restartWiremock()
+        checkWiremockStatus()
+     }
+}
+
+private checkoutRepository() {
+    extractHTTPSUrl()
+    sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+        withCredentials([usernamePassword(credentialsId: env.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+            sh """
+                ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd~/${env.APPLICATION_DIRECTORY_WIREMOCK}/${env.PROJECT_HOME}; git fetch https://$USER:$PASS@${env.GIT_WITHOUT_HTTPS} ${env.GIT_BRANCH}; git reset --hard FETCH_HEAD; git clean -df"
+             """
+         }
+     }
+}
+
+private restartWiremock() {
+    sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+            sh """
+                ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "docker container restart ${env.WIREMOCK_CONTAINER_NAME}"
+             """
+     }
+}
+
+private checkWiremockStatus() {
+    int wiremockStatusCheckCounter =6
+    int sleepTimeInSeconds = 10
+    def wiremockStatus
+
+    for (i = 0; i < wiremockStatusCheckCounter; i++) {
+         try {
+             wiremockStatus = getHttpRequestStatus()
+             echo "WireMock server status code: ${wiremockStatus}"
+         } catch(Exceprion ex) {
+             echo "Exception when checking connection to WireMock"
+         }
+         if(wiremockStatus ==  200) break
+         else sh "sleep $(sleepTimeInSeconds}"
+      }
+
+      if(wiremockStatus != 200) {
+          currentBuild.result = 'FAILURE'
+          error 'WireMock server is unreachable. Return code: ${wiremockStatus}'
+      }
+}
+
+private def getHttpRequestStatus() {
+    def response = httpRequest "${env.WIREMOCK_ADDRESS}/__admin"
+    return response.status
+
+private extractHTTPSUrl() {
+    env.GIT_WITHOUT_HTTPS = env.GIT_HTTPS.replace("https://", "")
+}
+
+return this
+
+
+
+

stageInstallApplication.groovy:

+
+
+
+
def call() {
+    stage('Install application with smoke tests environment') {
+        dir(env.SEETEST_APPLICATION_FOLDER) {
+            checkout([$class: 'GitSCM', branches: [[name: env.SEETEST_GIT_BRANCH]], doGenerateSubmoduleConfigurations: false, extensions: [], gitTool: 'default', submoduleCfg: [], userRemoteConfigs: [[credentialsId: env.GIT_CREDENTIALS, url: env.SEETEST_GIT_SSH]])
+        }
+     }
+}
+
+return this
+
+
+
+

stageUITests.groovy:

+
+
+
+
def call() {
+    stage('UI tests') {
+        def utils = load "${env.SUBMODULES_DIR}/utils.groovy"
+
+        try {
+            utils.generateUserIDVariable(); //Generate USER_ID and USER_GROUP
+            docker.image(env.SEETEST_DOCKER_IMAGE).inside("-u ${env.USER_ID}:${env.USER_GROUP}") {
+                withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: "${env.ARTIFACTORY_USER_CREDENTIALS}", passwordVariable: 'ARTIFACTORY_PASSWORD', usernameVariable: 'ARTIFACTORY_USERNAME]]) {
+                    executeTests()
+                    compressArtifacts()
+                    publishJUnitTestResultReport()
+                    archiveArtifacts()
+                    publishHTMLReports()
+                    publishCucumberReports()
+                 }
+             }
+        } catch (Exception exc) {
+            throw exc
+        }
+   }
+}
+
+private executeTests() {
+    withCredentials([usernamePassword(credentialsId: env.SEETEST_GRID_USER_CREDENTIALS, passwordVariable: 'GRID_USER_PASSWORD', usernameVariable: 'GRID_USER_NAME')]) {
+            sh """
+                cd ${env.SEETEST_PROJECT_HOME_ABSOLUTE_PATH}
+                mvn clean test -B -Ddriver="grid" -Dtags="${env.SEETEST_CUCUMBER_TAG}" -DcloudName="${env.SEETEST_CLOUD_NAME}" -DdeviceQuery="${env.SEETEST_DEVICE_QUERY} -DgridUser="${GRID_USER_NAME}" -DgridPassword="${GRID_USER_PASSWORD}" -Dinstall="${env.SEETEST_INSTALL_APP}" -DiosUrl="${env.SEETEST_IOS_APP_URL}" -DdeviceType="iPhone" -DiosVersion="$env.SEETEST_IOS_VERSION}" -DparallelMode="allonall" -Denv="${env.SEETEST_APP_ENVIRONMENT}" site
+             """
+     }
+}
+
+private compressartifacts() {
+    echo "Compressing artifacts from /target/site"
+    sh """
+        zip -r allure_report.zip **/${env.SEETEST_PROJECT_homE}/target/site
+    """
+
+private publishJUnitTestResultReport() {
+    echo "Publishing JUnit reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/surefire-reports/junitreporters/*.xml"
+
+    try {
+        junit "${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/surefire-reports/junitreporters/*.xml"
+    } catch(e) {
+        echo("No JUnit report found")
+    }
+}
+
+private archiveArtifacts() {
+    echo "Archiving artifacts"
+
+    try {
+        archiveArtifacts allowEmptyArchive: true, artifacts: "**/allure_report.zip"
+    } catch(e) {
+        echo("No artifacts found")
+    }
+}
+
+private publishHTMLReports() {
+    echo "Publishing HTML reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/site/allure-maven-plugin"
+
+    try {
+        publishHTML([allowMissing: false, alwaysLinkToLastBuild: true, keepAll: true, reportDir: "${env.SEETEST_APPLICATION_FOLDER/${env.SEETEST_PROJECT_HOME}/target/site/allure-maven-plugin", reportFiles: 'index.html', reportName: 'Allure report', reportTitles: 'Allure report'])
+    } catch(e) {
+        echo("No artifacts found")
+    }
+}
+
+private publishCucumberREPORTS() {
+    echo "Publishing Cucumber reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/cucumber-parallel/*.json"
+
+    try {
+        step([$class: 'CucumberReportPublisher', fileExcludePattern '', fileIncludePattern: "#{env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/cucumber-parallel/*.json", ignoreFailedTests: false, jenkinsBasePath: '', jsonReportDirectory: '', missingFails: false, parallelTesting: false, pendingFails: false, skippedFails: false, undefinedFails: false])
+    } catch(e) {
+        echo("No Cucumber report found")
+    }
+}
+
+return this
+
+
+
+

Configuration

+
+
+

It is possible to configure Jenkins job in two ways. First one is to edit the Jenkinsfile. All of the properties are in properties collection as below:

+
+
+
+
def properties = [
+
+          JENKINS_LABELS                                : 'PWI_LINUX_DEV'
+
+          ...
+
+          //Docker
+          ARTIFACTORY_USER_CREDENTIALS                  : 'ba2e4f46-56f1-4467-ae97-17b356d6s643',
+          SEETEST_DOCKER_IMAGE                          : 'docker.XXX.com/app/base-images/seetest:v1-8.3',
+
+          //SeeTest
+          SEETEST_APPLICATION_FOLDER                    : 'seetest_dit',
+          SEETEST_PROJECT_HOME                          : 'Automated_Scripts',
+          SEETEST_GIT_SSH                               : 'ssh://stash.xxx.com/app/seetest_automation_cucumber.git',
+          SEETEST_GIT_BRANCH                            : 'develop',
+
+          ...
+]
+
+
+
+

Second way is to add properties in 'Configure job'. All of the properties there are overriding properties from Jenkinsfile (the have the highest priority). They can then be set durring 'Build with Paremeters' process.

+
+
+

Reports

+
+
+

After a job execution 'Allure report' and 'Cucumber-JVM' reports should be visible. If any tests fail, You can check on which screen (printscreen from failures is attached, why and etc.)

+
+
+
+
+

26.16. Security Module

+ +
+
Security Test Module
+ +
+
+
What is Security
+
+

Application Security is concerned with Integrity, Availability and Confidentiality of data processed, stored and transferred by the application.

+
+
+

Application Security is a cross-cutting concern which touches every aspect of the Software Development Lifecycle. You can introduce some SQL injection flaws in your application and make it exploitable, but you can also expose your secrets (which will have nothing to do with code itself) due to poor secret management process, and fail as well.

+
+
+

Because of this and many other reasons, not every aspect of security can be automatically verified. Manual tests and audits will still be needed. Nevertheless, every security requirement which is automatically verified will prevent code degeneration and misconfiguration in a continuous manner.

+
+
+
+
How to test Security
+
+

Security tests can be performed in many different ways, such as:

+
+
+
    +
  • +

    Static Code Analysis - improves the security by (usually) automated code review. A good way to search for vulnerabilities, which are 'obvious' on the code level ( e.g. SQL injection). The downside of this approach is that professional tools to perform such scans are very expensive and still produce many false positives.

    +
  • +
  • +

    Dynamic Code Analysis - tests are run against a working environment. A good way to search for vulnerabilities, which require all client- and server-side components to be present and running (like e.g. Cross-Site Scripting). Tests are performed in a semi-automated manner and require a proxy tool (like e.g. OWASP ZAP)

    +
  • +
  • +

    Unit tests - self-written and self-maintained tests. They usually work on the HTTP/REST level (this defines the trust boundary between the client and the server) and run against a working environment. Unit tests are best suited for verifying requirements which involve business knowledge of the system or which assure secure configuration on the HTTP level.

    +
  • +
+
+
+

In the current release of the Security Module, the main focus will be Unit Tests.

+
+
+

Although the most common choice of environment for running security tests on will be integration(the environment offers the right stability and should mirror the production closely), it is not uncommon for some security tests to run on production as well. This is done for e.g. TLS configuration testing to ensure proper configuration of the most relevant environment in a continuous manner.

+
+
+
+
+

26.17. Database Module

+ +
+
Database Test Module
+ +
+
+
What is MrChecker Database Test Module
+
+

Database module is based on Object-Relational Mapping programming technique. All functionalities are built using Java Persistence API but examples use Hibernate as a main provider.

+
+
+
+
JPA structure schema
+
+

This module was written to allow the use of any JPA provider. The structure is represented in the schema below.

+
+
+
+image3 +
+
+
+
+
ORM representation applied in Framework
+
+
+image4 +
+
+
+
+
+

26.18. Mobile Test Module

+ +
+
Mobile Test Module
+ +
+
+
What is MrChecker E2E Mobile Test Module
+
+

MrChecker E2E Mobile test Module is a suitable solution for testing Remote Web Design, Mobile Browsers and application. +A user can write tests suitable for all mobile browsers with a full range of resolution. The way of working is similar to Selenium and uses the same rules and patterns as the Web Driver. For more information please look in the Selenium test module.

+
+
+
+
What is Page Object Architecture
+
+

Creating Selenium test cases can result in an unmaintainable project. One of the reasons is that too many duplicated code is used. Duplicated code could be caused by the duplicated functionality and this will result in duplicated usage of locators. The disadvantage of duplicated code is that the project is less maintainable. If some locator will change, you have to walk through the whole test code to adjust locators where necessary. By using the page object model we can make non-brittle test code and reduce or eliminate duplicate test code. Beside of that it improves the readability and allows us to create interactive documentation. Last but not least, we can create tests with less keystroke. An implementation of the page object model can be achieved by separating the abstraction of the test object and the test scripts.

+
+
+
+
Page Object Pattern
+
+
+Pom +
+
+
+
+
Mobile Structure
+
+

It is build on the top of the Appium library. +Appium is an open-source tool for automating native, mobile web, and hybrid applications on iOS mobile, Android mobile, and Windows desktop platforms. Native apps are those written using iOS, Android, or Windows SDKs. Mobile web apps are web apps accessed using a mobile browser (Appium supports Safari on iOS and Chrome or the built-in 'Browser' app on Android). Hybrid apps have a wrapper around a "webview" - a native control that enables interaction with web content.

+
+
+
+
Run on different mobile devices
+
+

To execute each test with chosen connected mobile devices, it is required to use specific arguments in Run configuration.

+
+
+
+image01 +
+
+
+
+image02 +
+
+
+

Default supported arguments in MrChecker:

+
+
+
    +
  • +

    deviceUrl - http url to Appium Server, default value "http://127.0.0.1:4723"

    +
  • +
  • +

    automationName - which automation engine to use , default value "Appium"

    +
  • +
  • +

    platformName - which mobile OS platform to use , default value "Appium"

    +
  • +
  • +

    platformVersion - mobile OS version , default value ""

    +
  • +
  • +

    deviceName - the kind of mobile device or emulator to use , default value "Android Emulator"

    +
  • +
  • +

    app - the absolute local path or remote http URL to a .ipa file (IOS), .app folder (IOS Simulator), .apk file (Android) or .apks file (Android App Bundle), or a .zip file, default value "."

    +
  • +
  • +

    browserName - name of mobile web browser to automate. Should be an empty string if automating an app instead, default value ""

    +
  • +
  • +

    newCommandTimeout - how long (in seconds) Appium will wait for a new command from the client before assuming the client quit and ending the session, default value "4000"

    +
  • +
  • +

    deviceOptions - any other capabilites not covered in essential ones, default value none

    +
  • +
+
+
+

Example usage:

+
+
+
+
mvn clean test -Dtest=MyTest -DdeviceUrl="http://192.168.0.1:1234" -DplatformName="iOS" -DdeviceName="iPhone Simulator" -Dapp=".\\Simple_App.ipa"
+
+
+
+
+
mvn clean test -Dtest=MyTest -Dapp=".\\Simple_App.apk -DdeviceOptions="orientation=LANDSCAPE;appActivity=MainActivity;chromeOptions=['--disable-popup-blocking']"
+
+
+
+

Check also:

+
+ + + +
+

+ +Full list of Generic Capabilities

+
+
+

+ +List of additional capabilities for Android

+
+
+

+ +List of additional capabilities for iOS

+
+ +
+
+
How to use mobile test Module
+
+
    +
  1. +

    Install IDE with MrChecker

    +
  2. +
  3. +

    Switch branch to 'feature/Create-mobile-module-#213' - by default it is 'develop'

    +
  4. +
+
+
+
+
git checkout feature/Create-mobile-module-#213
+
+
+
+
    +
  1. +

    Install and setup git checkout feature/Create-mobile-module-#213[Appium Server]

    +
  2. +
  3. +

    Connect to local Device by Appium Server

    +
    +
    +
     1.
    +Install Android SDK    https://developer.android.com/studio/index.html#command-tools    ->
    +	2.
    +Download Platform and Build-Tools  (Android versions - >    https://en.wikipedia.org/wiki/Android_version_history   )
    +* sdkmanager "platform-tools" "platforms;android-19"
    +* sdkmanager "build-tools;19.0.0"
    +* copy from /build-tools  file "aapt.exe"  to /platform-tools
    +	3.
    +Set Environment:
    +ANDROID_SDK_ROOT = D:\sdk-tools-windows-4333796
    +PATH =  %PATH%; %ANDROID_SDK_ROOT%
    +	4.
    +Start Appium Server
    +	5.
    +Start Session in Appium Server, capabilities
    +{
    +  "platformName": "Android",
    +            "deviceName": "Android Emulator",
    +            "app": "D:\\Repo\\mrchecker-source\\mrchecker-framework-modules\\mrchecker-mobile-module\\src\\test\\resources\\Simple App_v2.0.1_apkpure.com.apk",
    +            "automationName": "UiAutomator1"
    +            }
    +
    +
    +
  4. +
  5. +

    Run Mobile tests with runtime parameters. +List of supported parameters could be found here

    +
    +
      +
    • +

      From command line (as in Jenkins):

      +
    • +
    +
    +
  6. +
+
+
+
+
mvn clean compile test  -Dapp=".\\Simple_App_v2.0.1_apkpure.com.apk" -DautomationName="UiAutomator1" -Dthread.count=1
+
+
+
+
    +
  • +

    from IDE:

    +
  • +
+
+
+
+image00100 +
+
+
+
+image00101 +
+
+
+
+
+

26.19. DevOps Test Module

+ +
+
DevOPS Test Module
+ +
+
+
What does DevOps mean for us?
+
+

DevOps consists of a mixture of three key components in a technical project:

+
+
+
    +
  • +

    People’s skills and mindset

    +
  • +
  • +

    Processes

    +
  • +
  • +

    Tools

    +
  • +
+
+
+

Using E2E MrChecker Test Framework it is possible to cover the majority of these areas.

+
+
+
+
QA Team Goal
+
+

For QA engineers, it is essential to take care of the product code quality.

+
+
+

Therefore, we have to understand, that a test case is also code which has to be validated against quality gates. As a result, we must test our developed test case like it is done during standard Software Delivery Life Cycle.

+
+
+
+
Well rounded test case production process
+
+
    +
  • +

    How do we define top-notch test cases development process in E2E MrChecker Test Framework

    +
  • +
+
+
+
+image5 +
+
+
+
+
Continuous Integration (CI) and Continuous Delivery (CD)
+
+ +
+
+
+image6 +
+
+
+
+
What should you receive from this DevOps module
+
+
+image7 +
+
+
+
+
What will you gain with our DevOps module
+
+

The CI procedure has been divided into transparent modules. This solution makes configuration and maintenance very easy because everyone is able to manage versions and customize the configuration independently for each module. A separate security module ensures the protection of your credentials and assigned access roles regardless of changes in other modules.

+
+
+
+image8 +
+
+
+

Your CI process will be matched to the current project. You can easily go back to the previous configuration, test a new one or move a selected one to other projects.

+
+
+
+image9 +
+
+
+

DevOps module supports a delivery model in which executors are made available to the user as needed. It has such advantages as:

+
+
+
    +
  • +

    Saving computing resources

    +
  • +
  • +

    Eliminating guessing on your infrastructure capacity needs

    +
  • +
  • +

    Not spending time on running and maintaining additional executors +== How to build this DevOps module

    +
  • +
+
+
+

Once you have implemented the module, you can learn more about it here:

+
+
+ +
+
+
+
Continuous Integration
+
+

Embrace quality with Continuous Integration while you produce test case(s).

+
+
+
+
Overview
+
+

There are two ways to set up your Continuous Integration environment:

+
+
+
    +
  1. +

    Create a Jenkins instance from scratch (e.g. by using the Jenkins Docker image)

    +
    +

    Using a clean Jenkins instance requires the installation of additional plugins. The plugins required and their versions can be found on this page.

    +
    +
  2. +
  3. +

    Use thre pre-configured custom Docker image provided by us

    +
    +

    No more additional configuration is required (but optional) using this custom Docker image. Additionally, this Jenkins setup allows dynamical scaling across multiple machines and even cloud (AWS, Azure, Google Cloud etc.).

    +
    +
  4. +
+
+
+
+
Jenkins Overview
+
+

Jenkins is an Open Source Continuous Integration Tool. It allows the user to create automated build jobs which will run remotely on so called Jenkins Slaves. A build job can be triggered by several events, for example on new pull request on specified repositories or timed (e.g. at midnight).

+
+
+
+
Jenking Configuration
+
+

Tests created by using the testing framework can easily be implemented on a Jenkins instance. The following chapter will describe such a job configuration. If you’re running your own Jenkins instance, you may have to install additional plugins listed on the page Jenkins Plugins for a trouble-free integration of your tests.

+
+
+
+
== Initial Configuration
+
+

The test job is configured as a so-called parameterized job. This means, after starting the job, parameters can be specified, which will then be used in the build process. In this case, branch and testname will be expected when starting the job. These parameters specify which branch in the code repository should be checked out (possibly feature branch) and the name of the test that should be executed.

+
+
+
+image79 +
+
+
+
+
== Build Process Configuration
+
+
    +
  • +

    The first step inside the build process configuration is to get the author of the commit that was made. The mail will be extracted and gets stored in a file called build.properties. This way, the author can be notified if the build fails.

    +
    +
    +image80 +
    +
    +
  • +
  • +

    Next up, Maven will be used to check if the code can be compiled, without running any tests.

    +
    +
    +image81 +
    +
    +
    +

    After making sure that the code can be compiled, the actual tests will be executed.

    +
    +
    +
    +image82 +
    +
    +
  • +
  • +

    Finally, reports will be generated.

    +
    +
    +image83 +
    +
    +
  • +
+
+
+
+
== Post Build Configuration
+
+
    +
  • +

    At first, the results will be imported to the Allure System

    +
    +
    +image84 +
    +
    +
  • +
  • +

    JUnit test results will be reported as well. Using this step, the test result trend graph will be displayed on the Jenkins job overview.

    +
    +
    +image85 +
    +
    +
  • +
  • +

    Finally, an E-Mail will be sent to the previously extracted author of the commit.

    +
    +
    +image86 +
    +
    +
  • +
+
+
+
+
Using the Pre-Configured Custom Docker Image
+
+

If you are starting a new Jenkins instance for your tests, we’d suggest using the pre-configured Docker image. This image already contains all the configurations and additional features.

+
+
+

The configurations are e.g. Plugins and Pre-Installed job setup samples. This way, you don’t have to set up the entire CI-Environment from the ground up.

+
+
+

Additional features from this docker image allow dynamic creation and deletion of Jenkins slaves, by creating Docker containers. Also, Cloud Solutions can be implemented to allow wide-spread load balancing.

+
+
+
+
Continuous Delivery
+
+

Include quality with Continuous Delivery during product release.

+
+
+
+image87 +
+
+
+
+
Overview
+
+

CD from Jenkins point of view does not change a lot from Continuous Integration one.

+
+
+
+
Jenkins Overview
+
+

Use the same Jenkins settings for Jenkins CD setup as for CI, please. link. The only difference is:

+
+
+
    +
  • +

    What type of test you will execute. Before, we have been choosing test case(s), now we will choose test suite(s)

    +
  • +
  • +

    Who will trigger the given Smoke/Integration/Performance job

    +
  • +
  • +

    What is the name of official branch. This branch ought always to use be used in every CD execution. It will be either master or develop.

    +
  • +
+
+
+
+
Jenkins for Smoke Tests
+
+

In the $TESTNAME variable, where we input the test name( link ), please input the name of a test suite assembled together of tests tagged as smoke tests -( link ) thus running all the smoke tests.

+
+
+
+
Jenkins for Performance Tests
+
+

Under construction - added when WebAPI module is included.

+
+
+
+
Pipeline structure
+ +
+
+
Pipeline configuration:
+
+

The default interaction with Jenkins required manual jobs. This keeps configuration of a job in Jenkins separate from source code. With Pipeline plugin users can implement a pipeline procedure in Jenkinsfile and store it in repository with other code. This approach is used in Mr Checker framework. More info: https://jenkins.io/solutions/pipeline/

+
+
+

Our CI & CD processes are divided into a few separate files: Jenkins_node.groovy is the file to manage all processes. It defines all operations executed on a Jenkins node, so all code in this file is closed in node closure. Workflow in Jenkinsfile:

+
+
+
    +
  • +

    Read all parameters from a Jenkins job

    +
  • +
  • +

    Execute stage to prepare the environment

    +
  • +
  • +

    Execute git pull command

    +
  • +
  • +

    Set Jenkins job description

    +
  • +
  • +

    Execute compilation of the project in a special prepared docker container

    +
  • +
  • +

    Execute unit tests

    +
  • +
  • +

    Execute integration tests

    +
  • +
  • +

    Deploy artifacts to a local repository

    +
  • +
  • +

    Deploy artifacts to an external repository (nexus/arifactory)

    +
  • +
+
+
+

Not all the steps must be present in the Jenkins files. This should be configured for particular job requirements.

+
+
+
+
Description of stages:
+ +
+
+
Stage “Prepare environment”
+
+

First thing to do in this stage is overwriting properties loaded from Jenkins job. It is defined in “overrideProperties” function. The next function, “setJenkinsJobVariables” defines environment variables such as :

+
+
+
    +
  • +

    JOB_NAME_UPSTREAM

    +
  • +
  • +

    BUILD_DISPLAY_NAME_UPSTREAM

    +
  • +
  • +

    BUILD_URL_UPSTREAM

    +
  • +
  • +

    GIT_CREDENTIALS

    +
  • +
  • +

    JENKINS_CREDENTIALS

    +
  • +
+
+
+

The last function in the stage – “setWorkspace” -creates an environment variable with path to local workspace. This is required beacuse when using pipeline plugin, Jenkins does not create the WORKSPACE env variables.

+
+
+
+
Stage "Git pull"
+
+

It pulls sources from the repository and loads “git pull” file which contains additional methods:

+
+
+
    +
  • +

    setGitAuthor – setting properties about git author to the file “build.properties” and loading created file

    +
  • +
  • +

    tryMergeWithBranch – checking if actual branch can be merged with default main branch

    +
  • +
+
+
+
+
Stage “Build compile”
+
+

Verify with maven that code builds without errors

+
+
+
+
Stage “Unit test”
+
+

Execute unit tests with mvn surefire test and publish reports in junit and allure format

+
+
+
+
Stage “Integration test”
+
+

Execute integration tests with mvn surefire test and publish reports in junit and allure format

+
+
+
+
Stage “Deploy – local repo”
+
+

Archive artifacts as a jar file in the local repository

+
+
+
+
Stage ”Deploy – nexu repo”
+
+

Deploy to the external repository with maven release deploy command with credentials stored in Jenkins machine. Additional files:

+
+
+
    +
  • +

    mailSender.groovy – contains methods for sending mail with generated content

    +
  • +
  • +

    stashNotification.groovy – send job status for bitbucket by a curl command

    +
  • +
  • +

    utils.groovy - contains additional functions to load properties, files and generate additional data

    +
  • +
+
+
+
+
Selenium Grid
+ +
+
+
What is Selenium Grid
+
+

Selenium Grid allows running web/mobile browsers test cases to fulfill basic factors, such as:

+
+
+
    +
  • +

    Independent infrastructure, similar to end-users'

    +
  • +
  • +

    Scalable infrastructure (\~50 simultaneous sessions at once)

    +
  • +
  • +

    Huge variety of web browsers (from mobile to desktop)

    +
  • +
  • +

    Continuous Integration and Continuous Delivery process

    +
  • +
  • +

    Supporting multi-type programming languages (java, javascript, python, …​).

    +
  • +
+
+
+
+image88 +
+
+
+

On a daily basis, a test automation engineer uses their local environments for test case execution/development. However, a created browser test case has to be able to run on any infrastructure. Selenium Grid enables this portability.

+
+
+
+
Selenium Grid Structure
+
+
+image89 +
+
+
+

Full documentation of Selenium Grid can be found here and here.

+
+
+

'Vanilla flavour' Selenium Grid is based on two, not very complicated ingredients:

+
+
+
    +
  1. +

    Selenium Hub - as one machine, accepting connections to grid from test cases executors. It also plays a managerial role in connection to/from Selenium Nodes

    +
  2. +
  3. +

    Selenium Node - from one to many machines, where on each machine a browser used during test case execution is installed.

    +
  4. +
+
+
+
+
How to setup
+
+

There are two options of Selenium Grid setup:

+
+
+
    +
  • +

    Classic, static solution - link

    +
  • +
  • +

    Cloud, scalable solution - link

    +
  • +
+
+
+

Advantages and disadvantages of both solutions:

+
+
+
+image90 +
+
+
+
+
How to use Selenium Grid with E2E Mr Checker Test Frameworks
+
+

Run the following command either in Eclipse or in Jenkins:

+
+
+
+
> mvn test -Dtest=com.capgemini.ntc.selenium.tests.samples.resolutions.ResolutionTest -DseleniumGrid="http://10.40.232.61:4444/wd/hub" -Dos=LINUX -Dbrowser=chrome
+
+
+
+

As a result of this command:

+
+
+
    +
  • +

    -Dtest=com.capgemini.ntc.selenium.features.samples.resolutions.ResolutionTest - name of test case to execute

    +
  • +
  • +

    -DseleniumGrid="http://10.40.232.61:4444/wd/hub" - IP address of Selenium Hub

    +
  • +
  • +

    -Dos=LINUX - what operating system must be assumed during test case execution

    +
  • +
  • +

    -Dbrowser=chrome - what type of browser will be used during test case execution

    +
  • +
+
+
+
+image91 +
+
+
+
+
List of Jenkins Plugins
+
+

|== =

+
+
+

|Plugin Name +|Version

+
+
+

|blueocean-github-pipeline +|1.1.4

+
+
+

|blueocean-display-url +|2.0

+
+
+

|blueocean +|1.1.4

+
+
+

|workflow-support +|2.14

+
+
+

|workflow-api +|2.18

+
+
+

|plain-credentials +|1.4

+
+
+

|pipeline-stage-tags-metadata +|1.1.8

+
+
+

|credentials-binding +|1.12

+
+
+

|git +|3.5.1

+
+
+

|maven-plugin +|2.17

+
+
+

|workflow-durable-task-step +|2.12

+
+
+

|job-dsl +|1.64

+
+
+

|git-server +|1.7

+
+
+

|windows-slaves +|1.3.1

+
+
+

|github +|1.27.0

+
+
+

|blueocean-personalization +|1.1.4

+
+
+

|jackson2-api +|2.7.3

+
+
+

|momentjs +|1.1.1

+
+
+

|workflow-basic-steps +|2.6

+
+
+

|workflow-aggregator +|2.5

+
+
+

|blueocean-rest +|1.1.4

+
+
+

|gradle +|1.27.1

+
+
+

|pipeline-maven +|3.0.0

+
+
+

|blueocean-pipeline-editor +|0.2.0

+
+
+

|durable-task +|1.14

+
+
+

|scm-api +|2.2.2

+
+
+

|pipeline-model-api +|1.1.8

+
+
+

|config-file-provider +|2.16.3

+
+
+

|github-api +|1.85.1

+
+
+

|pam-auth +|1.3

+
+
+

|workflow-cps-global-lib +|2.8

+
+
+

|github-organization-folder +|1.6

+
+
+

|workflow-job +|2.12.1

+
+
+

|variant +|1.1

+
+
+

|git-client +|2.5.0

+
+
+

|sse-gateway +|1.15

+
+
+

|script-security +|1.29.1

+
+
+

|token-macro +|2.1

+
+
+

|jquery-detached +|1.2.1

+
+
+

|blueocean-web +|1.1.4

+
+
+

|timestamper +|1.8.8

+
+
+

|greenballs +|1.15

+
+
+

|handlebars +|1.1.1

+
+
+

|blueocean-jwt +|1.1.4

+
+
+

|pipeline-stage-view +|2.8

+
+
+

|blueocean-i18n +|1.1.4

+
+
+

|blueocean-git-pipeline +|1.1.4

+
+
+

|ace-editor +|1.1

+
+
+

|pipeline-stage-step +|2.2

+
+
+

|email-ext +|2.58

+
+
+

|envinject-api +|1.2

+
+
+

|role-strategy +|2.5.1

+
+
+

|structs +|1.9

+
+
+

|locale +|1.2

+
+
+

|docker-workflow +|1.13

+
+
+

|ssh-credentials +|1.13

+
+
+

|blueocean-pipeline-scm-api +|1.1.4

+
+
+

|metrics +|3.1.2.10

+
+
+

|external-monitor-job +|1.7

+
+
+

|junit +|1.21

+
+
+

|github-branch-source +|2.0.6

+
+
+

|blueocean-config +|1.1.4

+
+
+

|cucumber-reports +|3.8.0

+
+
+

|pipeline-model-declarative-agent +|1.1.1

+
+
+

|blueocean-dashboard +|1.1.4

+
+
+

|subversion +|2.9

+
+
+

|blueocean-autofavorite +|1.0.0

+
+
+

|pipeline-rest-api +|2.8

+
+
+

|pipeline-input-step +|2.7

+
+
+

|matrix-project +|1.11

+
+
+

|pipeline-github-lib +|1.0

+
+
+

|workflow-multibranch +|2.16

+
+
+

|docker-plugin +|0.16.2

+
+
+

|resource-disposer +|0.6

+
+
+

|icon-shim +|2.0.3

+
+
+

|workflow-step-api +|2.12

+
+
+

|blueocean-events +|1.1.4

+
+
+

|workflow-scm-step +|2.6

+
+
+

|display-url-api +|2.0

+
+
+

|favorite +|2.3.0

+
+
+

|build-timeout +|1.18

+
+
+

|mapdb-api +|1.0.9.0

+
+
+

|pipeline-build-step +|2.5.1

+
+
+

|antisamy-markup-formatter +|1.5

+
+
+

|javadoc +|1.4

+
+
+

|blueocean-commons +|1.1.4

+
+
+

|cloudbees-folder +|6.1.2

+
+
+

|ssh-slaves +|1.20

+
+
+

|pubsub-light +|1.10

+
+
+

|pipeline-graph-analysis +|1.4

+
+
+

|allure-jenkins-plugin +|2.23

+
+
+

|mailer +|1.20

+
+
+

|ws-cleanup +|0.33

+
+
+

|authentication-tokens +|1.3

+
+
+

|blueocean-pipeline-api-impl +|1.1.4

+
+
+

|ldap +|1.16

+
+
+

|docker-commons +|1.8

+
+
+

|branch-api +|2.0.10

+
+
+

|workflow-cps +|2.36.1

+
+
+

|pipeline-model-definition +|1.1.8

+
+
+

|blueocean-rest-impl +|1.1.4

+
+
+

|ant +|1.7

+
+
+

|credentials +|2.1.14

+
+
+

|matrix-auth +|1.7

+
+
+

|pipeline-model-extensions +|1.1.8

+
+
+

|pipeline-milestone-step +|1.3.1

+
+
+

|jclouds-jenkins +|2.14

+
+
+

|bouncycastle-api +|2.16.1

+
+
+

|== =

+
+
+
+
What is Docker
+
+

Docker is an open source software platform to create, deploy and manage virtualized application containers on a common operating system (OS), with an ecosystem of allied tools.

+
+
+
+
Where do we use Docker
+
+

DevOps module consists of Docker images

+
+
+
    +
  1. +

    Jenkins image

    +
  2. +
  3. +

    Jenkins job image

    +
  4. +
  5. +

    Jenkins management image

    +
  6. +
  7. +

    Security image

    +
  8. +
+
+
+

in addition, each new node is also based on Docker

+
+
+
+
Exploring basic Docker options
+
+

Let’s show some of the most important commands that are needed when working with our DevOps module based on the Docker platform. Each command given below should be preceded by a sudo call by default. If you don’t want to use sudo command create a Unix group called docker and add a user to it.

+
+
+
+
$ sudo groupadd docker
+$ sudo usermod -aG docker $USER
+
+
+
+
+
Build an image from a Dockerfile
+
+
+
##docker build [OPTIONS] PATH | URL | -
+##
+##Options:
+## --tag , -t : Name and optionally a tag in the ‘name:tag’ format
+
+$ docker build -t vc_jenkins_jobs .
+
+
+
+
+
Container start
+
+
+
##docker run [OPTIONS] IMAGE[:TAG|@DIGEST] [COMMAND] [ARG...]
+#
+##Options:
+##-d : To start a container in detached mode (background)
+##-it : interactive terminal
+##--name : assign a container name
+##--rm : clean up
+##--volumes-from="": Mount all volumes from the given container(s)
+##-p : explicitly map a single port or range of ports
+##--volume : storage associated with the image
+
+$ docker run -d --name vc_jenkins_jobs vc_jenkins_jobs
+
+
+
+
+
Remove one or more containers
+
+
+
##docker rm [OPTIONS] CONTAINER
+#
+##Options:
+##--force , -f : Force the removal of a running container
+
+$ docker rm -f jenkins
+
+
+
+
+
List containers
+
+
+
##docker ps [OPTIONS]
+##--all, -a : Show all containers (default shows just running)
+
+$ docker ps
+
+
+
+
+
Pull an image or a repository from a registry
+
+
+
##docker pull [OPTIONS] NAME[:TAG|@DIGEST]
+
+$ docker pull jenkins/jenkins:2.73.1
+
+
+
+
+
Push the image or a repository to a registry
+
+

Pushing new image takes place in two steps. First save the image by adding container ID to the commit command and next use push:

+
+
+
+
##docker push [OPTIONS] NAME[:TAG]
+
+$ docker ps
+  # copy container ID from the result
+$ docker commit b46778v943fh vc_jenkins_mng:project_x
+$ docker push vc_jenkins_mng:project_x
+
+
+
+
+
Return information on Docker object
+
+
+
##docker inspect [OPTIONS] NAME|ID [NAME|ID...]
+#
+##Options:
+##--format , -f : output format
+
+$ docker inspect -f '{{ .Mounts }}' vc_jenkins_mng
+
+
+
+
+
List images
+
+
+
##docker images [OPTIONS] [REPOSITORY[:TAG]]
+#
+##Options:
+--all , -a : show all images with intermediate images
+
+$ docker images
+$ docker images jenkins
+
+
+
+
+
Remove one or more images
+
+
+
##docker rmi [OPTIONS] IMAGE [IMAGE...]
+#
+##Options:
+##  --force , -f : Force removal of the image
+
+$ docker rmi jenkins/jenkins:latest
+
+
+
+
+
Run a command in a running container
+
+
+
##docker exec [OPTIONS] CONTAINER COMMAND [ARG...]
+##-d : run command in the background
+##-it : interactive terminal
+##-w : working directory inside the container
+##-e : Set environment variables
+
+$ docker exec vc_jenkins_jobs sh -c "chmod 755 config.xml"
+
+
+
+
+
Advanced commands
+ +
+
+
Remove dangling images
+
+
+
$ docker rmi $(docker images -f dangling=true -q)
+
+
+
+
+
Remove all images
+
+
+
$ docker rmi $(docker images -a -q)
+
+
+
+
+
Removing images according to a pattern
+
+
+
$ docker images | grep "pattern" | awk '{print $2}' | xargs docker rm
+
+
+
+
+
Remove all exited containers
+
+
+
$ docker rm $(docker ps -a -f status=exited -q)
+
+
+
+
+
Remove all stopped containers
+
+
+
$ docker rm $(docker ps --no-trunc -aq)
+
+
+
+
+
Remove containers according to a pattern
+
+
+
$ docker ps -a | grep "pattern" | awk '{print $1}' | xargs docker rmi
+
+
+
+
+
Remove dangling volumes
+
+
+
$ docker volume rm $(docker volume ls -f dangling=true -q)
+
+
+
+
+
+

26.20. MrChecker download

+ +
+
+

26.21. Windows

+ +
+
Advanced installation
+ +
+
+
Java installation
+
+

There is one important pre-requisite for Mr Checker installation - Java has to be installed on the computer and an environmental variable has to be set in order to obtain optimal functioning of the framework.

+
+
+
    +
  1. +

    Install Java 1.8 JDK 64bit

    +
    +

    Download and install Java download link

    +
    +
    +

    (To download JDK 8 from Oracle you have to have an account. It is recommended to get a JDK build based on OpenJDK from AdoptOpenJDK)

    +
    +
  2. +
  3. +

    Windows Local Environment - How to set:

    +
    +
      +
    • +

      Variable name: JAVA_HOME | Variable value: C:\Where_You’ve_Installed_Java

      +
    • +
    • +

      Variable name: PATH | Variable value: %JAVA_HOME%\bin;%JAVA_HOME%\lib

      +
      +
      +install win03 +
      +
      +
    • +
    +
    +
  4. +
  5. +

    Next, verify it in the command line:

    +
    +
    +
    > java --version
    +
    +
    +
  6. +
+
+
+
+
Other components installation
+
+

Install each component separately, or update the existing ones on your PC.

+
+
+
    +
  1. +

    Maven 3.5

    +
    +
      +
    • +

      Download Maven

      +
    • +
    • +

      Unzip Maven in following location C:\maven

      +
    • +
    • +

      Set Windows Local Environment

      +
      +
        +
      • +

        Variable name: M2_HOME | Variable value: C:\maven\apache-maven-3.5.0

        +
      • +
      • +

        Variable name: PATH | Variable value: %M2_HOME%\bin

        +
        +
        +install win04 +
        +
        +
      • +
      +
      +
    • +
    • +

      Verify it in the command line:

      +
      +
      +
      > mvn --version
      +
      +
      +
    • +
    +
    +
  2. +
  3. +

    IDE

    +
    +
      +
    • +

      Download a most recent Eclipse

      +
    • +
    • +

      Download a MrChecker Project https://downgit.github.io//home?url=https://github.com/devonfw/mrchecker/tree/develop/template[Template] to start a new project or Mrchecker Project https://downgit.github.io//home?url=https://github.com/devonfw/mrchecker/tree/develop/example[Example] to get better understanding what we are capable of.

      +
    • +
    • +

      You should consider installing some usefull plugins such as: csvedit, cucumber editor.

      +
    • +
    • +

      Import:

      +
      +
      +install win05 +
      +
      +
    • +
    • +

      Projects from folders

      +
      +
      +install win06 +
      +
      +
    • +
    • +

      Open already created projects:

      +
      +
      +install win07 +
      +
      +
    • +
    • +

      Update project structure - ALT + F5

      +
      +
      +install win08 +
      +
      +
    • +
    +
    +
  4. +
+
+
+
+
+

26.22. Mac

+ +
+
MrChecker macOS installation
+
+

On this page, you can find all the details regarding MrChecker installation on your Mac.

+
+
+
+
Java installation
+
+

There is one important pre-requisite for Mr Checker installation - Java has to be installed on the computer and an environmental variable has to be set in order to obtain optimal functioning of the framework.

+
+
+
    +
  1. +

    Install Java 1.8 JDK 64bit

    +
    +

    Download and install Java download link

    +
    +
    +

    (To download JDK 8 from Oracle you have to have an account. It is recommended to get a JDK build based on OpenJDK from AdoptOpenJDK)

    +
    +
  2. +
  3. +

    Next, verify thx in the command line:

    +
    +
    +
    > java --version
    +
    +
    +
  4. +
+
+
+
+
Other components installation
+
+

Install each component separately, or update the existing ones on your Mac.

+
+
+
    +
  1. +

    Maven 3.5

    +
    +
      +
    • +

      Download Maven

      +
    • +
    • +

      Unzip Maven in the following location /maven

      +
    • +
    • +

      Add Maven to PATH

      +
      +
      +
      > $ export PATH=$PATH:/maven/apache-maven-3.5.0/bin/
      +
      +
      +
    • +
    • +

      Verify in terminal:

      +
      +
      +
      > $ mvn -version
      +
      +
      +
    • +
    +
    +
  2. +
  3. +

    Eclipse IDE

    +
    +
      +
    • +

      Download and unzip Eclipse

      +
    • +
    • +

      Download MrCheckerTestFramework source code

      +
    • +
    • +

      Import:

      +
      +
      +image9 +
      +
      +
    • +
    • +

      Select Projects from folders:

      +
      +

      image10

      +
      +
    • +
    • +

      Open already created projects:

      +
      +

      image11

      +
      +
    • +
    • +

      Update project structure - ALT + F5

      +
      +

      image12

      +
      +
    • +
    +
    +
  4. +
+
+
+
+
+

26.23. My Thai Star

+ +
+
My Thai Star application setup
+
+

My Thai Star is a reference application for DevonFW so it was used extensively in majority of our examples. To make them run properly you definitely should set it up somewhere and configure environment.csv accordingly. +You can get the app from its official repository here https://github.com/devonfw/my-thai-star.

+
+
+
+
Setting up My Thai Start app
+
+

Most of the important informations are covered in https://github.com/devonfw/my-thai-star#deployment.

+
+
+
The quick summary would be:
+
    +
  1. +

    Get the machine with docker and docker-compose

    +
  2. +
  3. +

    Download the repository

    +
  4. +
  5. +

    Run docker-compose up

    +
  6. +
  7. +

    Go to your project to set up envrionment.csv

    +
  8. +
  9. +

    The variables we are interested in are MY_THAI_STAR_URL and MY_THAI_STAR_API_URL

    +
  10. +
  11. +

    If you set up My Thai Star application on different host adjust the values accordingly

    +
  12. +
  13. +

    The web aplication should be available using localhost:8081/restaurant

    +
  14. +
  15. +

    The web api should be avaulable using localhost:8081/api

    +
  16. +
+
+
+
+
+

26.24. Tutorials

+
+

In order to learn more about MrChecker structure, start from Project Organisation section and then check out our fantastic tutorials:

+
+
+

This tutorial will guide you through the series of test which perform basic actions on webpages using MrChecker.

+
+
+

Make sure you already have MrChecker Test Framework installed on your PC. How to install?

+
+
+

Your Product Under Test will be the following website: http://the-internet.herokuapp.com/

+
+
+
Project organization
+ +
+
+
Importing projects
+
+

Every MrChecker project should be imported as a Maven Project.

+
+
+

Example from Eclipse IDE:

+
+
+
+1 +
+
+
+
+2 +
+
+
+

Enter the project path and select projects to import.

+
+
+
+3 +
+
+
+

When the import is finished, update the project structure - ALT + F5

+
+
+
+4 +
+
+
+
+
Exporting projects
+
+

In order to create a new standalone MrChecker project, you can use template-app-under-test and export it to the new folder:

+
+
+
+5 +
+
+
+
+6 +
+
+
+

Create a new folder for the project and enter its path. Select project and files to export:

+
+
+
+7 +
+
+
+

Change project name and other properties, if necessary, in pom.xml file:

+
+
+
+8 +
+
+
+

Then you can import the project to the workspace and create new packages and classes.

+
+
+
+
Creating new packages
+
+
    +
  1. +

    You will need two new packages: one for the new page classes, the other one for test classes:

    +
    +
      +
    • +

      Create a package for page classes

      +
      +
      +
      Open Eclipse
      +Use the "Project Explorer" on the left
      +Navigate to [your-project] → src/main/java → com.capgemini.mrchecker → selenium
      +Right-click on "selenium"
      +Click on "New" → New Package
      +Name the new package "com.capgemini.mrchecker.selenium.pages.[your-product-name]"
      +
      +
      +
    • +
    • +

      Create a package for test classes

      +
      +
      +
      Navigate to [your-project] → src/test/java → com.capgemini.mrchecker → selenium
      +Right click on "selenium"
      +Click on "New" → New Package
      +Name the new package "com.capgemini.mrchecker.selenium.tests.[your-product-name]"
      +
      +
      +
    • +
    +
    +
  2. +
+
+
+

Example:

+
+
+
+9 +
+
+
+
+
Creating new Page Classes
+
+
+
Navigate to: [your-project] → src/main/java → com.capgemini.mrchecker → selenium.pages.[your-product-name]
+Click on "New" → New Class
+Enter the name "YourPage"
+
+
+
+

Every Page Class should extend BasePage class. Import all necessary packages and override all required methods:

+
+
+
    +
  • +

    public boolean isLoaded() - returns true if the page is loaded and false if not

    +
  • +
  • +

    public void load() - loads the page

    +
  • +
  • +

    public String pageTitle() - returns page title

    +
  • +
+
+
+

Example:

+
+
+
+
 public class MainPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        return false;
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Main Page'");
+    }
+
+    @Override
+    public String pageTitle() {
+        return "Main Page Title";
+    }
+ }
+
+
+
+
+
Creating new Test Classes
+
+
+
Navigate to  [your-project] → src/test/java → com.capgemini.mrchecker → selenium.tests.[your-product-name]
+Click on "New" → New Class
+Enter the name "YourCaseTest"
+
+
+
+

Test classes should extend BaseTest class, import all necessary packages and override all required methods:

+
+
+
    +
  • +

    public void setUp() - executes before each test

    +
  • +
  • +

    public void tearDown() - executes after each test

    +
  • +
+
+
+

Optionally, it is also possible to implement the following methods:

+
+
+
    +
  • +

    @BeforeClass +public static void setUpBeforeClass() - runs only once before all tests

    +
  • +
  • +

    @AfterClass +public static void tearDownAfterClass() - runs only once after performing all tests

    +
  • +
+
+
+

Every test method has to be signed with "@Test" parameter.

+
+
+
+
 public class YourCaseTest extends BaseTest {
+    private static MainPage mainPage = new MainPage();
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        mainPage.load();
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+
+    }
+
+    @Override
+    public void setUp() {
+        if (!mainPage.isLoaded()) {
+            mainPage.load();
+        }
+    }
+
+    @Override
+    public void tearDown() {
+
+    }
+
+    @Test
+    public void shouldTestRunWithoutReturningError {
+
+    }
+ }
+
+
+
+
+
Running Tests
+
+

Run the test by right-clicking on the test method → Run as → JUnit test.

+
+
+
+
+

26.25. Basic Tutorials

+ +
+
+

26.26. == Basic Tests

+
+
+example1 +
+
+
+

The goal of this test is to open A/B Test subpage and redirect to another website.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click A/B Testing link and go to A/B Test subpage

    +
  4. +
  5. +

    Click Elemental Selenium link and open it in new tab

    +
  6. +
  7. +

    Switch to Elemental Selenium page and check if it’s loaded

    +
  8. +
+
+
+
+example2 +
+
+
+== Page Class +
+

Create a Page class for AB Testing page. Override all the required methods:

+
+
+
+
 public class ABtestPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.ABTEST.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'A/B Test Control' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.ABTEST.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+ }
+
+
+
+
+== How to use Enum? +
+

Similarly as in environmental variables case, create an enum for storing values of subURLs:

+
+
+
+
 public enum PageSubURLsProjectYEnum {
+
+    BASIC_AUTH("basic_auth"),
+    NEW_WINDOW("windows/new"),
+    WINDOW("windows"),
+    CHECKBOX("checkboxes"),
+    CONTEXT_MENU("context_menu"),
+    KEY_PRESS("key_presses"),
+    DYNAMIC_CONTENT("dynamic_content"),
+    HOVERS("hovers"),
+    SORTABLE_DATA_TABLES("tables"),
+    REDIRECT("redirector"),
+    JAVASCRIPT_ALERTS("javascript_alerts"),
+    CHALLENGING_DOM("challenging_dom"),
+    STATUS_CODES("status_codes"),
+    LOGIN("login"),
+    ABTEST("abtest"),
+    BROKEN_IMAGES("broken_images"),
+    DROPDOWN("dropdown"),
+    HORIZONTAL_SLIDER("horizontal_slider"),
+    DOWNLOAD("download"),
+    FORGOT_PASSWORD("forgot_password"),
+    FORGOT_PASSWORD_EMAIL_SENT("email_sent"),
+    EXIT_INTENT("exit_intent"),
+    DYNAMIC_LOADING("dynamic_loading"),
+    DISAPPEARING_ELEMENTS("disappearing_elements"),
+    DRAG_AND_DROP("drag_and_drop"),
+    DYNAMIC_CONTROLS("dynamic_controls"),
+    UPLOAD("upload"),
+    FLOATING_MENU("floating_menu"),
+    FRAMES("frames"),
+    GEOLOCATION("geolocation"),
+    INFINITE_SCROLL("infinite_scroll"),
+    JQUERY_UI("jqueryui/menu"),
+    JAVASCRIPT_ERROR("javascript_error"),
+    LARGE_AND_DEEP_DOM("large"),
+    NESTED_FRAMES("nested_frames"),
+    NOTIFICATION_MESSAGE("notification_message"),
+    DOWNLOAD_SECURE("download_secure"),
+    SHIFTING_CONTENT("shifting_content"),
+    SLOW_RESOURCES("slow"),
+    TYPOS("typos"),
+    WYSIWYGEDITOR("tinymce");
+
+    /*
+     * Sub URLs are used as real locations in the test environment
+     */
+    private String subURL;
+
+    private PageSubURLsProjectYEnum(String subURL) {
+        this.subURL = subURL;
+    }
+
+    ;
+
+    private PageSubURLsProjectYEnum() {
+
+    }
+
+    @Override
+    public String toString() {
+        return getValue();
+    }
+
+    public String getValue() {
+        return subURL;
+    }
+
+}
+
+
+
+

Instead of mapping data from an external file, you can store and access them directly from the enum class:

+
+
+
+
PageSubURLsProjectYEnum.ABTEST.getValue()
+
+
+
+
+== Selector +
+

In this test case you need selector for only one page element:

+
+
+
+
private static final By elementalSeleniumLinkSelector = By.cssSelector("div > div > a");
+
+
+
+
+== Page methods +
+

You need two methods for performing page actions:

+
+
+
+
     /**
+     * Clicks 'Elemental Selenium' link at the bottom of the page.
+     *
+     * @return ElementalSeleniumPage object.
+     */
+    public ElementalSeleniumPage clickElementalSeleniumLink() {
+        getDriver().findElementDynamic(elementalSeleniumLinkSelector)
+                .click();
+        getDriver().waitForPageLoaded();
+        return new ElementalSeleniumPage();
+    }
+
+    /**
+     * Switches window to the next one - different than the current.
+     */
+    public void switchToNextTab() {
+        ArrayList<String> tabsList = new ArrayList<String>(getDriver().getWindowHandles());
+        getDriver().switchTo()
+                .window(tabsList.get(1));
+    }
+
+
+
+
+== Elemental Selenium Page Class +
+

To return new Elemental Selenium Page object, implement its class. You only need to write basic methods to check if the page is loaded. There is no need to interact with objects on the site:

+
+
+
+
 public class ElementalSeleniumPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(GetEnvironmentParam.ELEMENTAL_SELENIUM_PAGE.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Elemental Selenium' page.");
+        getDriver().get(GetEnvironmentParam.ELEMENTAL_SELENIUM_PAGE.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+== Test Class +
+

Create a Test class and write a @Test method to execute the scenario:

+
+
+
+
 @Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class ABtestingTest extends TheInternetBaseTest {
+
+    private static ABtestPage abTestPage;
+
+    @Test
+    public void shouldOpenElementalSeleniumPageWhenClickElementalSeleniumLink() {
+
+        logStep("Click Elemental Selenium link");
+        ElementalSeleniumPage elementalSeleniumPage = abTestPage.clickElementalSeleniumLink();
+
+        logStep("Switch browser's tab to newly opened one");
+        abTestPage.switchToNextTab();
+
+        logStep("Verify if Elemental Selenium Page is opened");
+        assertTrue("Unable to open Elemental Selenium page", elementalSeleniumPage.isLoaded());
+    }
+
+}
+
+
+
+
+== Assert +
+

Asserts methods are used for creating test pass or fail conditions. The optional first parameter is a message which will be displayed in the test failure description.

+
+
+
    +
  • +

    assertTrue(boolean condition) - test passes if condition returns true

    +
  • +
  • +

    assertFalse(boolean condition) - test passes if condition returns false

    +
  • +
+
+
+

Also, add the @BeforeClass method to open the tested page:

+
+
+
+
 @BeforeClass
+    public static void setUpBeforeClass() {
+        abTestPage = shouldTheInternetPageBeOpened().clickABtestingLink();
+        logStep("Verify if ABTest page is opened");
+        assertTrue("Unable to open ABTest page", abTestPage.isLoaded());
+    }
+
+
+
+

@BeforeClass method executes only once before all other +@Test cases in the class. There is also a possibility to create a +@AfterClass method which is performed also once after all @Test cases.

+
+
+

You don’t need to implement @setUp and @tearDown methods because they’re already in TheInternetBaseTest class which you extend.

+
+
+
+== Categories +
+

You can group tests in categories. It’s useful when running many tests at once. Use this parameter:

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+
+
+
+

Then create an interface representing each category. Example:

+
+
+
+
public interface TestsSelenium {
+    /* For test which are testing web pages considering UI (user interface) and using selenium webdriver */
+}
+
+
+
+

To run a test from specified category create Test Suite class:

+
+
+
+
@RunWith(WildcardPatternSuite.class) //search for test files under /src/test/java
+@IncludeCategories({ TestsChrome.class }) // search all test files with category TestsChrome.class
+@ExcludeCategories({ TestsLocal.class, TestsNONParallel.class }) //exclude all test files with category TestsLocal.class and TestsNONParallel.class
+@SuiteClasses({ "../**/*Test.class" }) //search only test files, where file name ends with <anyChar/s>Test.class
+
+public class _TestSuiteChrome {
+
+}
+
+
+
+

You can run a Test Suite as a JUnit test.

+
+
+
+example3 +
+
+
+

In this test case, the goal is to pass username and password authorization and login to the next page.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click on Basic Auth link

    +
  4. +
  5. +

    Open pop-up login window

    +
  6. +
  7. +

    Enter valid username and password

    +
  8. +
  9. +

    Open next subpage and verify if the user logged in successfully.

    +
  10. +
+
+
+
+== Page Class +
+

Create a page class which represents Basic Auth subpage after proper login.

+
+
+
+example4 +
+
+
+

Override all the required methods:

+
+
+
+
public class BasicAuthPage extends BasePage {
+
+    public BasicAuthPage() {
+
+    }
+
+    public BasicAuthPage(String login, String password) {
+        this.enterLoginAndPasswordByUrl(login, password);
+    }
+
+    @Override
+    public boolean isLoaded() {
+        return true;
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("load");
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+
+
+

In order to verify a login, create a selector to access the visible message.

+
+
+
+
 private static final By selectorTextMessage = By.cssSelector("#content > div > p");
+Then create a method to get message value:
+
+/**
+*       Returns message displayed by system after user's log in.
+*      @return String object representing message displayed by system after user's log in
+*/
+    public String getMessageValue() {
+                return getDriver().findElementDynamic(selectorTextMessage)
+                    .getText();
+}
+
+
+
+

Also, create a method to access the pop-up login window and enter user credentials:

+
+
+
+
    /**
+     * Authenticates user using standard simple authentication popup.
+     *
+     * @param login    User's login
+     * @param password User's password
+     * @throws AWTException
+     * @throws InterruptedException
+     */
+    public void enterLoginAndPassword(String login, String password) throws AWTException, InterruptedException {
+        Robot rb = new Robot();
+
+        Thread.sleep(2000);
+
+        StringSelection username = new StringSelection(login);
+        Toolkit.getDefaultToolkit()
+                .getSystemClipboard()
+                .setContents(username, null);
+        rb.keyPress(KeyEvent.VK_CONTROL);
+        rb.keyPress(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_CONTROL);
+
+        rb.keyPress(KeyEvent.VK_TAB);
+        rb.keyRelease(KeyEvent.VK_TAB);
+        Thread.sleep(2000);
+
+        StringSelection pwd = new StringSelection(password);
+        Toolkit.getDefaultToolkit()
+                .getSystemClipboard()
+                .setContents(pwd, null);
+        rb.keyPress(KeyEvent.VK_CONTROL);
+        rb.keyPress(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_CONTROL);
+
+        rb.keyPress(KeyEvent.VK_ENTER);
+        rb.keyRelease(KeyEvent.VK_ENTER);
+        Thread.sleep(2000);
+    }
+
+
+
+
+== Robot class +
+

Creating a Robot object allows performing basic system actions such as pressing keys, moving the mouse or taking screenshots. In this case, it’s used to paste login and password text from the clipboard using 'Ctrl + V' shortcut, go to the next field using 'Tab' key and submit by clicking 'Enter'.

+
+
+
+Toolkit +
+

Static class Toolkit can perform basic window actions such as scrolling to a specified position or moving context between components. In this case, it’s used to set clipboard content to username and password value.

+
+
+
+
Thread.sleep(long millis)
+
+
+
+

Web drivers like Selenium perform actions much faster than the normal user. This may cause unexpected consequences e.g. some elements may not be loaded before the driver wants to access them. To avoid this problem you can use Thread.sleep(long millis) to wait given time and let browser load wanted component.

+
+
+

BEWARE: Using Thread.sleep(long millis) is not the recommended approach. Selenium driver gives methods to wait for a specified element to be enabled or visible with a timeout parameter. This is a more stable and effective way. Also, method waitForPageLoaded() will not solve that issue because it only waits for the ready state from the browser while some javascript actions might be performed after that.

+
+
+
+== Test Class +
+

Create a Test class and write a @Test method to execute the scenario. Save parameters as class fields:

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class BasicAuthTest extends TheInternetBaseTest {
+
+    private static BasicAuthPage basicAuthPage;
+
+    private String login    = "admin";
+    private String password = "admin";
+    private String message  = "Congratulations! You must have the proper credentials.";
+
+    @Test
+    public void shouldUserLogInWithValidCredentials() throws InterruptedException, AWTException {
+        basicAuthPage = shouldTheInternetPageBeOpened().clickBasicAuthLink();
+
+        logStep("Enter login and password");
+        basicAuthPage.enterLoginAndPassword(login, password);
+
+        logStep("Verify if user logged in successfully");
+        assertEquals("Unable to login user with valid credentials", message,
+            basicAuthPage.getMessageValue());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        theInternetPage.load();
+    }
+}
+
+
+
+

assertEquals(Object expected, Object actual) - test passes if parameters are equal .

+
+
+
+== Alternative scenario: +
+

There is also a possibility to log in with credentials as a part of URL: http://login:password@the-internet.herokuapp.com/basic_auth

+
+
+

Another page class method:

+
+
+
+
/**
+     * Authenticates user passing credentials into URL.
+     *
+     * @param login    User's login
+     * @param password User's password
+     */
+    private void enterLoginAndPasswordByUrl(String login, String password) {
+        getDriver().get("http://" + login + ":" + password + "@" + "the-internet.herokuapp.com/" +
+            PageSubURLsProjectYEnum.BASIC_AUTH.getValue());
+    }
+
+
+
+

Another test class method:

+
+
+
+
@Test
+    public void shouldUserLogInWithValidCredentialsSetInURL() {
+        logStep("Enter user's credentials into URL to log in");
+        basicAuthPage = new BasicAuthPage(login, password);
+
+        logStep("Verify if user logged in successfully");
+        assertEquals("Unable to login user with valid credentials", message,
+            basicAuthPage.getMessageValue());
+    }
+
+
+
+

After running test class as a JUnit test, both test cases will be performed.

+
+
+

This test goal is to check the dimensions of broken images on the subpage.

+
+
+
+example5 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Broken Image link and go to Broken Image subpage

    +
  4. +
  5. +

    Get the 3 images' dimensions and compare them with expected values

    +
  6. +
+
+
+
+== Page Class +
+

In this case, create an array of selectors to access images by index number:

+
+
+
+
public class BrokenImagePage extends BasePage {
+
+    private static final By[] selectorsImages = { By.cssSelector("div > img:nth-child(2)"),
+            By.cssSelector("div > img:nth-child(3)"),
+            By.cssSelector("div > img:nth-child(4)") };
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.BROKEN_IMAGES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Broken Images' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.BROKEN_IMAGES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns an image height in pixels.
+     *
+     * @param imageIndex An index of given image.
+     * @return Height of an image in pixels.
+     */
+    public int getImageHeight(int imageIndex) {
+        return getImageDimension(imageIndex).getHeight();
+    }
+
+    /**
+     * Returns an image width in pixels.
+     *
+     * @param imageIndex An index of given image.
+     * @return Width of an image in pixels.
+     */
+    public int getImageWidth(int imageIndex) {
+        return getImageDimension(imageIndex).getWidth();
+    }
+
+    private Dimension getImageDimension(int imageIndex) {
+        return getDriver().findElementDynamic(selectorsImages[imageIndex])
+                .getSize();
+    }
+
+}
+
+
+
+
+== Test Class +
+

Create @Test and @BeforeClass methods. Save expected images' dimensions in class fields:

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class BrokenImagesTest extends TheInternetBaseTest {
+
+    private static BrokenImagePage brokenImagePage;
+
+    private final int expectedHeight = 90;
+    private final int expectedWidth  = 120;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        brokenImagePage = shouldTheInternetPageBeOpened().clickBrokenImageLink();
+
+        logStep("Verify if Broken Image page is opened");
+        assertTrue("Unable to open Broken Image page", brokenImagePage.isLoaded());
+    }
+
+    @Test
+    public void shouldImageSizesBeEqualToExpected() {
+        for (int i = 0; i < 3; i++) {
+            logStep("Verify size of image with index: " + i);
+            assertEquals("Height of image with index: " + i + " is incorrect", expectedHeight,
+                   brokenImagePage.getImageHeight(i));
+            assertEquals("Width of image with index: " + i + " is incorrect", expectedWidth,
+                   brokenImagePage.getImageWidth(i));
+        }
+    }
+
+}
+
+
+
+

The test will pass if every image had the correct width and height.

+
+
+

This case goal is to find out how to create stable selectors.

+
+
+

In the browser’s developer mode, you can see how the page is built. Notice, that buttons' IDs change after click and values in the table haven’t got unique attributes, which might be helpful in order to find them.

+
+
+
+example6 +
+
+
+
+== DOM - Document Object Model +
+

HTML DOM is a model of the page created by the browser. The page could be represented as the tree of objects. Read more.

+
+
+

To create locators you can use element attributes such as id, class name etc.

+
+
+

It this case, since there are no unique attributes, the best approach is to use HTML document structure and identify page elements by their place in an object hierarchy.

+
+
+
+
Page Class
+public class ChallengingDomPage extends BasePage {
+
+    private final By selectorTableRows   = By.cssSelector(".large-10 > table > tbody > tr");
+    private final By selectorFirstButton = By.cssSelector(".large-2.columns > .button:nth-
+            child(1)");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.CHALLENGING_DOM.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Challenging DOM' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.CHALLENGING_DOM.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns table text content as a list of String objects.
+     *
+     * @return A list of table values.
+     */
+    public List<String> getTableValues() {
+        return JsoupHelper.findTexts(selectorTableRows);
+    }
+
+    /**
+     * Clicks top button on the page from available button set.
+     */
+    public void clickFirstButton() {
+        getDriver().elementButton(selectorFirstButton)
+                .click();
+        getDriver().waitForPageLoaded();
+    }
+
+}
+
+
+
+
+== Jsoup Helper +
+

Jsoup Helper is the tool which helps to parse HTML document and get searched values. This is especially useful when values are organized in a generic structure such as a table.

+
+
+

JsoupHelper.findTexts(By selector) - this method returns text content of a table as a list of Strings

+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Challenging DOM link and go to Challenging DOM subpage

    +
  4. +
  5. +

    Get and save table values

    +
  6. +
  7. +

    Click the first button

    +
  8. +
  9. +

    Get table values again

    +
  10. +
  11. +

    Compare table values before and after button click

    +
  12. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class ChallengingDomTest extends TheInternetBaseTest {
+
+    private static ChallengingDomPage challengingDomPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        challengingDomPage = shouldTheInternetPageBeOpened().clickChallengingDomLink();
+
+        logStep("Verify if Challenging Dom page is opened");
+        assertTrue("Unable to open Challenging Dom page", challengingDomPage.isLoaded());
+    }
+
+    @Test
+    public void shouldValuesInTableCellsStayUnchangedAfterClick() {
+
+        logStep("Get table values (before click any button)");
+        List<String> tableValuesBeforeClick = challengingDomPage.getTableValues();
+
+        logStep("Click first button");
+        challengingDomPage.clickFirstButton();
+
+        logStep("Get table values (after click first button)");
+        List<String> tableValuesAfterClick = challengingDomPage.getTableValues();
+
+        logStep("Verify equality of table values before and after click");
+        assertEquals("Values from table cells were changed after click", tableValuesBeforeClick,
+                tableValuesAfterClick);
+    }
+
+}
+
+
+
+

Because values in the table don’t change, the test should pass if object locators are solid.

+
+
+

In this example, you will learn how to test checkboxes on the page.

+
+
+
+example7 +
+
+
+

A checkbox is a simple web element which can be selected or unselected by clicking on it.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Checkboxes link and go to Checkboxes page

    +
  4. +
  5. +

    Test if the first checkbox is unchecked

    +
  6. +
  7. +

    Select the first checkbox

    +
  8. +
  9. +

    Test if the first checkbox is checked

    +
  10. +
  11. +

    Test if the second checkbox is checked

    +
  12. +
  13. +

    Unselect second checkbox

    +
  14. +
  15. +

    Test if the second checkbox is unchecked

    +
  16. +
+
+
+
+== Page Class +
+

Because both checkboxes are in one form, it’s possible to locate them by one selector and then access each individual one by index.

+
+
+
+example8 +
+
+
+
+
public class CheckboxesPage extends BasePage {
+
+    private final static By checkboxesFormSelector = By.cssSelector("#checkboxes");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.CHECKBOX.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Checkboxes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.CHECKBOX.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if checkbox form is visible on the page.
+     *
+     * @return true if checkboxes are present and displayed on the page
+     */
+    public boolean isElementCheckboxesFormVisible() {
+        return getDriver().elementCheckbox(checkboxesFormSelector)
+                .isDisplayed();
+    }
+
+    /**
+     * Verifies if given checkbox is selected or not.
+     *
+     * @param index The index of given checkbox
+     * @return true if given checkbox is selected
+     */
+    public boolean isCheckboxSelected(int index) {
+        return getDriver().elementCheckbox(checkboxesFormSelector)
+                .isCheckBoxSetByIndex(index);
+    }
+
+    /**
+     * Selects given checkbox. Unselects, if it is already selected.
+     *
+     * @param index The index of given checkbox
+     */
+    public void selectCheckbox(int index) {
+        CheckBox checkbox = getDriver().elementCheckbox(checkboxesFormSelector);
+        if (isCheckboxSelected(index)) {
+            checkbox.unsetCheckBoxByIndex(index);
+        } else {
+            checkbox.setCheckBoxByIndex(index);
+        }
+    }
+
+}
+
+
+
+
+== CheckBox +
+

CheckBox class contains a method to perform actions on checkboxes such as setting and unsetting or verifying if the specified box is checked. +Use method elementCheckbox(By selector) to create CheckBox Object.

+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class CheckboxesTest extends TheInternetBaseTest {
+
+    private static CheckboxesPage checkboxesPage;
+
+    @Override
+    public void setUp() {
+        checkboxesPage = shouldTheInternetPageBeOpened().clickCheckboxesLink();
+
+        logStep("Verify if Checkboxes page is opened");
+        assertTrue("Unable to open Checkboxes page", checkboxesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldCheckboxBeSelectedAfterClick() {
+
+        logStep("Verify if first checkbox is not selected");
+        assertFalse("The checkbox is selected", checkboxesPage.isCheckboxSelected(0));
+
+        logStep("Select first checkbox");
+        checkboxesPage.selectCheckbox(0);
+
+        logStep("Verify if first checkbox is selected");
+        assertTrue("The checkbox is not selected", checkboxesPage.isCheckboxSelected(0));
+    }
+
+    @Test
+    public void shouldCheckboxBeUnselectedAfterClick() {
+
+        logStep("Verify if second checkbox is selected");
+        assertTrue("The checkbox is not selected", checkboxesPage.isCheckboxSelected(1));
+
+        logStep("Select second checkbox");
+        checkboxesPage.selectCheckbox(1);
+
+        logStep("Verify if second checkbox is not selected");
+        assertFalse("The checkbox is selected", checkboxesPage.isCheckboxSelected(1));
+    }
+
+}
+
+
+
+

After running Test Class both @Test cases will be performed. Before each one, overrode setUp method will be executed.

+
+
+

This case will show how to test changing website content.

+
+
+
+example9 +
+
+
+

After refreshing page (F5) a few times, a new element should appear:

+
+
+
+example10 +
+
+
+

Then, after another couple of refreshes, it should disappear.

+
+
+

You can check in developer mode that Gallery element does not exist in HTML document either, until appearing on the page. The element is created by Javascript.

+
+
+
+example11 +
+
+
+
+example12 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Load The Internet Main Page

    +
  2. +
  3. +

    Click Disappearing Elements link and go to that subpage

    +
  4. +
  5. +

    Check if Menu Buttons exist on the page

    +
  6. +
  7. +

    Refresh the page until a new element appears

    +
  8. +
  9. +

    Check if Gallery Button exists

    +
  10. +
  11. +

    Check if the number of buttons equals the expected value

    +
  12. +
  13. +

    Refresh the page until an element disappears

    +
  14. +
  15. +

    Check if Gallery Button does not exist

    +
  16. +
  17. +

    Check if the number of buttons is smaller than before

    +
  18. +
+
+
+
+== Page Class +
+
+
public class DisappearingElementsPage extends BasePage {
+
+    private static final By selectorGalleryMenuButton = By.cssSelector("li > a[href*=gallery]");
+    private static final By selectorMenuButtons       = By.cssSelector("li");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DISAPPEARING_ELEMENTS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Disappearing Elements' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DISAPPEARING_ELEMENTS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns a number of WebElements representing menu buttons.
+     *
+     * @return A number of WebElements.
+     */
+    public int getNumberOfMenuButtons() {
+        return getDriver().findElementDynamics(selectorMenuButtons)
+                .size();
+    }
+
+    /**
+     * Returns WebElement representing disappearing element of menu.
+     *
+     * @return Disappearing WebElement if visible, null otherwise.
+     */
+    public WebElement getGalleryMenuElement() {
+        return getDriver().findElementQuietly(selectorGalleryMenuButton);
+    }
+
+    /**
+     * Refreshes web page as many times as it is required to appear/disappear menu button
+     * WebElement.
+     *
+     * @param shouldAppear Determines if element should appear (true) or disappear (false).
+     */
+    public void refreshPageUntilWebElementAppears(boolean shouldAppear) {
+        int numberOfAttempts = 5;
+        int counter = 0;
+        while (!isVisibilityAsExpected(shouldAppear) || isMaxNumberOfAttemptsReached(counter++,
+                numberOfAttempts)) {
+            refreshPage();
+        }
+    }
+
+    /**
+     * Verify if visibility of Gallery button is the same as expected
+     *
+     * @param expected Determines if element should be visible (true) or not visible (false).
+     */
+    private boolean isVisibilityAsExpected(boolean expected) {
+        boolean isVisibilityDifferentThanExpected = isGalleryMenuElementVisible() ^ expected;
+        return !isVisibilityDifferentThanExpected;
+    }
+
+    private boolean isGalleryMenuElementVisible() {
+        boolean result = false;
+        WebElement gallery = getGalleryMenuElement();
+        if (gallery != null)
+            result = gallery.isDisplayed();
+        return result;
+    }
+
+    private boolean isMaxNumberOfAttemptsReached(int attemptNo, int maxNumberOfAttempts) {
+        return attemptNo ==  maxNumberOfAttempts;
+    }
+
+}
+
+
+
+

findElementQuietly(By selector) works similar as findElementDynamics(By selector) but won’t throw an exception if an element wasn’t found. In this case, the searched WebElement will have a NULL value.

+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DisappearingElementsTest extends TheInternetBaseTest {
+
+    private static final int totalNumberOfMenuButtons = 5;
+    private static DisappearingElementsPage disappearingElementsPage;
+    private static       int numberOfMenuButtons      = 0;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        disappearingElementsPage = shouldTheInternetPageBeOpened().clickDisappearingElementsLink();
+
+        logStep("Verify if Disappearing Elements page is opened");
+        assertTrue("Unable to open Disappearing Elements page",
+                disappearingElementsPage.isLoaded());
+
+        logStep("Verify if menu button elements are visible");
+        numberOfMenuButtons = disappearingElementsPage.getNumberOfMenuButtons();
+        assertTrue("Unable to display menu", numberOfMenuButtons > 0);
+    }
+
+    @Test
+    public void shouldMenuButtonElementAppearAndDisappearAfterRefreshTest() {
+        logStep("Click refresh button until menu button appears");
+        disappearingElementsPage.refreshPageUntilWebElementAppears(true);
+
+        logStep("Verify if menu button element appeared");
+        assertNotNull("Unable to disappear menu button element",
+                disappearingElementsPage.getGalleryMenuElement());
+        assertEquals("The number of button elements after refresh is incorrect",
+                totalNumberOfMenuButtons, disappearingElementsPage.getNumberOfMenuButtons());
+
+        logStep("Click refresh button until menu button disappears");
+        disappearingElementsPage.refreshPageUntilWebElementAppears(false);
+
+        logStep("Verify if menu button element disappeared");
+        assertNull("Unable to appear menu button element",
+                disappearingElementsPage.getGalleryMenuElement());
+        assertTrue("The number of button elements after refresh is incorrect",
+                totalNumberOfMenuButtons > disappearingElementsPage.getNumberOfMenuButtons());
+    }
+
+}
+
+
+
+

assertNull(Objetc object) - test passes if Object returns NULL +assertNotNull(Objetc object) - test passes if Object does not return NULL

+
+
+

This case shows how to move draggable elements on the page. +image::images/example13.png[]

+
+
+

Try to move A to B position and see what happens. Also, open browser developer mode and see how the DOM changes.

+
+
+
+example14 +
+
+
+

The page can easily be broken. You can try to do so and check how the page structure changed in browser developer mode.

+
+
+
+example15 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Drag and Drop link and open subpage

    +
  4. +
  5. +

    Check if the Drag and Drop message is visible

    +
  6. +
  7. +

    Check if element A is in container A and B in container B

    +
  8. +
  9. +

    Move element A to position B

    +
  10. +
  11. +

    Check if element A is in container B and B in container A

    +
  12. +
  13. +

    Move element B to position A

    +
  14. +
  15. +

    Again check if element A is in container A and B in container B

    +
  16. +
+
+
+
+== Page Class +
+
+
public class DragAndDropPage extends BasePage {
+
+    private static final By selectorDragAndDropText    = By.cssSelector("div#content h3");
+    private static final By selectorAElementContainer  = By.cssSelector("div#column-a");
+    private static final By selectorBElementContainer  = By.cssSelector("div#column-b");
+    private static final By selectorDescriptionElement = By.cssSelector("header");
+
+    private static final String dndHelperPath = "src/test/resources/js/drag_and_drop_helper.js";
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DRAG_AND_DROP.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Drag and Drop' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() + PageSubURLsProjectYEnum.DRAG_AND_DROP.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if drag and drop message is visible or not.
+     *
+     * @return true if exit drag and drop message was found on web page.
+     */
+    public boolean isDragAndDropMessageVisible() {
+        return getDriver().findElementDynamic(selectorDragAndDropText)
+                .isDisplayed();
+    }
+
+    /**
+     * Verifies if specified element is placed in designated container.
+     *
+     * @param element WebElement to be verified.
+     * @return true if element described as A exists in container A or element B exists in container B, false otherwise.
+     */
+    public boolean isElementPlacedInCorrectContainer(String element) {
+        return getDescriptionElement(findElementByDescription(element)).getText()
+                .equals(element);
+    }
+
+    private WebElement findElementByDescription(String element) {
+        WebElement result;
+        switch (element) {
+            case "A":
+                result = getContainerElement(selectorAElementContainer);
+                break;
+            case "B":
+                result = getContainerElement(selectorBElementContainer);
+                break;
+            default:
+                result = null;
+                BFLogger.logDebug("Chosen element doesn't exist on web page");
+        }
+        return result;
+    }
+
+    private WebElement getContainerElement(By container) {
+        return getDriver().findElementDynamic(container);
+    }
+
+    private WebElement getDescriptionElement(WebElement container) {
+        return container.findElement(selectorDescriptionElement);
+    }
+
+    /**
+     * Drags element to designated container and drops it.
+     *
+     * @param element         String describing WebElement expected to be dragged.
+     * @param from            String describing WebElement representing container of element expected to be dragged.
+     * @param destinationDesc String describing WebElement representing destination container where other element will be dragged.
+     */
+    public void dragElementToPosition(String element, String from, String destinationDesc) {
+        WebElement source = findElementByDescription(from);
+        WebElement description = getDescriptionElement(source);
+        WebElement destination = findElementByDescription(destinationDesc);
+        if (description.getText()
+                .equals(element))
+            dragElement(source, destination);
+    }
+
+}
+
+
+
+

Since HTML5, normal Selenium drag-and-drop action stopped working, thus it’s necessary to execute Javascript which performs the drag-and-drop. To do so, create a JavascriptExecutor object, then read the script from a file drag_and_drop_helper.js and execute it with additional arguments using method executeScript(String script).

+
+
+

An example drag-and-drop solution:

+
+
+
+
    /**
+     * Drags and drops given WebElement to it's destination location.
+     * <p>
+     * Since HTML5 all Selenium Actions performing drag and drop operations stopped working as expected, e.g.
+     * original implementation, which was:
+     * <code>
+     * BasePage.getAction()
+     * .clickAndHold(draggable)
+     * .moveToElement(target)
+     * .release()
+     * .build()
+     * .perform();
+     * </code>
+     * finishes with no effect. For this reason, there is javaScript function used, to make sure that
+     * drag and drop operation will be successful.
+     * JavaScript function is stored under the following path: 'src/test/resources/js/drag_and_drop_helper.js'.
+     * Original source of the script:
+     * <a href="https://gist.github.com/rcorreia/2362544">drag_and_drop_helper</a>
+     * </p>
+     *
+     * @param draggable A WebElement to be dragged and dropped.
+     * @param target    A destination, where element will be dropped.
+     * @see JavascriptExecutor
+     * @see Actions
+     */
+    private void dragElement(WebElement draggable, WebElement target) {
+        JavascriptExecutor js;
+        INewWebDriver driver = getDriver();
+        List<String> fileContent;
+        String draggableId = draggable.getAttribute("id");
+        String targetId = target.getAttribute("id");
+        String script = null;
+        if (draggable.getAttribute("draggable")
+                .contains("true")) {
+            if (driver instanceof JavascriptExecutor) {
+                js = (JavascriptExecutor) driver;
+                Path path = Paths.get(dndHelperPath);
+                try {
+                    fileContent = Files.readAllLines(path);
+                    script = fileContent.stream()
+                            .collect(Collectors.joining());
+                } catch (IOException e) {
+                    BFLogger.logDebug("Unable to read file content: " + e.getMessage());
+                }
+                if (script != null && !script.isEmpty()) {
+                    String arguments = "$('#%s').simulateDragDrop({ dropTarget: '#%s'});";
+                    js.executeScript(script + String.format(arguments, draggableId, targetId));
+                }
+            }
+        }
+    }
+
+
+
+

Drag and Drop helper file:

+
+
+
+
(function( $ ) {
+        $.fn.simulateDragDrop = function(options) {
+                return this.each(function() {
+                        new $.simulateDragDrop(this, options);
+                });
+        };
+        $.simulateDragDrop = function(elem, options) {
+                this.options = options;
+                this.simulateEvent(elem, options);
+        };
+        $.extend($.simulateDragDrop.prototype, {
+                simulateEvent: function(elem, options) {
+                        /*Simulating drag start*/
+                        var type = 'dragstart';
+                        var event = this.createEvent(type);
+                        this.dispatchEvent(elem, type, event);
+
+                        /*Simulating drop*/
+                        type = 'drop';
+                        var dropEvent = this.createEvent(type, {});
+                        dropEvent.dataTransfer = event.dataTransfer;
+                        this.dispatchEvent($(options.dropTarget)[0], type, dropEvent);
+
+                        /*Simulating drag end*/
+                        type = 'dragend';
+                        var dragEndEvent = this.createEvent(type, {});
+                        dragEndEvent.dataTransfer = event.dataTransfer;
+                        this.dispatchEvent(elem, type, dragEndEvent);
+                },
+                createEvent: function(type) {
+                        var event = document.createEvent("CustomEvent");
+                        event.initCustomEvent(type, true, true, null);
+                        event.dataTransfer = {
+                                data: {
+                                },
+                                setData: function(type, val){
+                                        this.data[type] = val;
+                                },
+                                getData: function(type){
+                                        return this.data[type];
+                                }
+                        };
+                        return event;
+                },
+                dispatchEvent: function(elem, type, event) {
+                        if(elem.dispatchEvent) {
+                                elem.dispatchEvent(event);
+                        }else if( elem.fireEvent ) {
+                                elem.fireEvent("on"+type, event);
+                        }
+                }
+        });
+})(jQuery);
+
+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DragAndDropTest extends TheInternetBaseTest {
+
+    private static final String ELEMENT_A   = "A";
+    private static final String CONTAINER_A = "A";
+    private static final String ELEMENT_B   = "B";
+    private static final String CONTAINER_B = "B";
+
+    private static DragAndDropPage dragAndDropPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dragAndDropPage = shouldTheInternetPageBeOpened().clickDragAndDropLink();
+
+        logStep("Verify if Drag And Drop page is opened");
+        assertTrue("Unable to open Drag And Drop page", dragAndDropPage.isLoaded());
+
+        logStep("Verify if Drag And Drop message is visible");
+        assertTrue("Drag And Drop message is not visible", dragAndDropPage.isDragAndDropMessageVisible());
+    }
+
+    @Test
+    public void shouldDraggableElementBeMovedAndDropped() {
+        logStep("Verify if elements are placed in proper containers");
+        assertTrue("Element A doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertTrue("Element B doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+
+        logStep("Step 7: Drag and drop element A into container B");
+        dragAndDropPage.dragElementToPosition(ELEMENT_A, CONTAINER_A, CONTAINER_B);
+
+        logStep("Step 8: Verify if elements are placed in improper containers");
+        assertFalse("Element A doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertFalse("Element B doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+
+        logStep("Drag and drop element B back into container B");
+        dragAndDropPage.dragElementToPosition(ELEMENT_A, CONTAINER_B, CONTAINER_A);
+
+        logStep("Verify if elements are placed in proper containers");
+        assertTrue("Element A doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertTrue("Element B doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+    }
+
+}
+
+
+
+

This example shows how to select an element from the dropdown list.

+
+
+
+example16 +
+
+
+

Check in the developer mode how a Dropdown List’s content has been organized.

+
+
+
+example17 +
+
+
+

Notice that the Dropdown Options have different attributes, such as "disabled" or "selected".

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click the Dropdown link and go to the subpage

    +
  4. +
  5. +

    Select first dropdown Option

    +
  6. +
  7. +

    Check if Option 1 is selected

    +
  8. +
  9. +

    Select second dropdown Option

    +
  10. +
  11. +

    Check if Option 2 is selected

    +
  12. +
+
+
+
+== Page Class +
+
+
public class DropdownPage extends BasePage {
+
+    private static final By dropdownListSelector = By.cssSelector("#dropdown");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DROPDOWN.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dropdown List' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DROPDOWN.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Selects doropdown's value by given index.
+     *
+     * @param index Index of option to be selected
+     */
+    public void selectDropdownValueByIndex(int index) {
+        getDriver().elementDropdownList(dropdownListSelector)
+                .selectDropdownByIndex(index);
+    }
+
+    /**
+     * Returns text value of first selected dropdown's option.
+     *
+     * @return String object representing value of dropdown's option
+     */
+    public String getSelectedDropdownValue() {
+        return getDriver().elementDropdownList(dropdownListSelector)
+                .getFirstSelectedOptionText();
+    }
+}
+
+
+
+
+== DropdownListElement class +
+

DropdownListElement is MrChecker’s class, which contains methods for performing the dropdown list of actions:

+
+
+
+
elementDropdownList() - returns DropdownListElement Object
+
+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DropdownTest extends TheInternetBaseTest {
+
+    private static final String expectedFirstOptionValue  = "Option 1";
+    private static final String expectedSecondOptionValue = "Option 2";
+    private static DropdownPage dropdownPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dropdownPage = shouldTheInternetPageBeOpened().clickDropdownLink();
+
+        logStep("Verify if Dropdown page is opened");
+        assertTrue("Unable to open Dropdown page", dropdownPage.isLoaded());
+    }
+
+    @Test
+    public void shouldGetExpectedDropdownTextOptionAfterSelection() {
+
+        logStep("Select first drodown option");
+        dropdownPage.selectDropdownValueByIndex(1);
+
+        logStep("Verify if selected option text is equal to the expected one");
+        assertEquals("Selected value is different than expected", expectedFirstOptionValue,
+                dropdownPage.getSelectedDropdownValue());
+
+        logStep("Select first drodown option");
+        dropdownPage.selectDropdownValueByIndex(2);
+
+        logStep("Verify if selected option text is equal to the expected one");
+        assertEquals("Selected value is different than expected", expectedSecondOptionValue,
+                dropdownPage.getSelectedDropdownValue());
+    }
+
+}
+
+
+
+

This case shows how to compare dynamic content.

+
+
+
+example18 +
+
+
+

Note that after site refresh, some of the content is different. You can see in the browser’s developer mode how the text and image sources are being changed.

+
+
+
+example19 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Dynamic Content link and load subpage

    +
  4. +
  5. +

    Save page images sources and descriptions before the refresh

    +
  6. +
  7. +

    Refresh page

    +
  8. +
  9. +

    Save page images sources and it’s descriptions after refresh

    +
  10. +
  11. +

    Compare page content before and after refresh and verify if it’s different

    +
  12. +
+
+
+
+== Page Class +
+
+
public class DynamicContentPage extends BasePage {
+
+    private static final By imagesLinksSelector        = By.cssSelector("div#content > div.row img");
+    private static final By imagesDescriptionsSelector = By.cssSelector("div#content > div.row div.large-10");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DYNAMIC_CONTENT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dynamic Content' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DYNAMIC_CONTENT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns list of picture descriptions being present on the web page.
+     *
+     * @return List of String objects representing descriptions
+     */
+    public List<String> getDescriptions() {
+        return new ListElements(imagesDescriptionsSelector).getTextList();
+    }
+
+    /**
+     * Returns a list of image links being present on the web page.
+     *
+     * @return List of String objects representing paths to pictures
+     */
+    public List<String> getImageLinks() {
+        return new ListElements(imagesLinksSelector)
+                .getList()
+                .stream()
+                .map(element -> element.getAttribute("src"))
+                .collect(Collectors.toList());
+    }
+}
+
+
+
+
+== ListElements +
+

ListElements is MrChecker collection which can store WebElement Objects. Constructing ListElements with cssSelector allows you to store every element on the page which fits the selector. Example methods:

+
+
+
+
getList() -  returns WebElements list,
+getTextList() - returns list of contents of each Element,
+getSize() - returns number of stored Elements
+In getImageLinks() example it's shown how to get a list of specified Elements' attributes.
+
+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DynamicContentTest extends TheInternetBaseTest {
+
+    private static DynamicContentPage dynamicContentPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dynamicContentPage = shouldTheInternetPageBeOpened().clickDynamicContentLink();
+
+        logStep("Verify if Dynamic Content page is opened");
+        assertTrue("Unable to open Dynamic Content page", dynamicContentPage.isLoaded());
+    }
+
+    @Test
+    public void shouldImagesAndDescriptionsDifferAfterRefresh() {
+
+        logStep("Read images and descriptions before refresh");
+        List<String> descriptionsBeforeRefresh = dynamicContentPage.getDescriptions();
+        List<String> imagesBeforeRefresh = dynamicContentPage.getImageLinks();
+
+        logStep("Refres page");
+        dynamicContentPage.refreshPage();
+        assertTrue("The Dynamic Content page hasn't been refreshed", dynamicContentPage.isLoaded());
+
+        logStep("Read images and descriptions after refresh");
+        List<String> descriptionsAfterRefresh = dynamicContentPage.getDescriptions();
+        List<String> imagesAfterRefresh = dynamicContentPage.getImageLinks();
+
+        logStep("Verify if descriptions are different after refresh");
+        assertEquals("Different number of descriptions before and after refresh",
+                descriptionsAfterRefresh.size(), descriptionsBeforeRefresh.size());
+
+        boolean diversity = false;
+        for (int i = 0; i < descriptionsAfterRefresh.size(); i++) {
+            if (!descriptionsAfterRefresh.get(i)
+                    .equals(descriptionsBeforeRefresh.get(i))) {
+                diversity = true;
+                break;
+            }
+        }
+        assertTrue("There are no differences between descriptions before and after refresh",
+                diversity);
+
+        logStep("Verify if images are different after refresh");
+        assertEquals("Different number of descriptions before and after refresh",
+                imagesAfterRefresh.size(), imagesBeforeRefresh.size());
+
+        diversity = false;
+        for (int i = 0; i < imagesAfterRefresh.size(); i++) {
+            if (!imagesAfterRefresh.get(i)
+                    .equals(imagesBeforeRefresh.get(i))) {
+                diversity = true;
+                break;
+            }
+        }
+        assertTrue("There are no differences between images before and after refresh", diversity);
+    }
+}
+
+
+
+

In the test method, during differences verification, the goal is to compare every element from the first and second list and find first diversity.

+
+
+

This example shows how to test a page with dynamically loading content. Some elements don’t load during page loading, but during JavaScript execution.

+
+
+
+example23 +
+
+
+

Go to Example 1:

+
+
+
+example24 +
+
+
+

Click "start" and see what happens:

+
+
+
+example25 +
+
+
+

When loading ends, you should see the following message:

+
+
+
+example26 +
+
+
+

In the developer mode, you can see that the element with the "Hello World!" message exists in page DOM but it’s not displayed. However, the loading bar does not exist there - it’s created by JavaScript. The script is also visible in developer mode:

+
+
+
+example27 +
+
+
+

After clicking the "Start" button, the element "Loading" is created by the script, and the "Start" button becomes invisible. When loading ends, "Hello World" message is displayed and the loading bar is hidden. Follow the changes the in developer mode:

+
+
+
+example28 +
+
+
+

Go to example 2: +From a user perspective, there is no difference in page functioning. However, in this case the element with the "Hello World!" message does not exist on the page before clicking "Start". It’s created by the script.

+
+
+
+example29 +
+
+
+

After clicking "Start", the element with the loading bar is been created.

+
+
+
+example30 +
+
+
+

After a certain time, the loading bar becomes invisible, and then the script creates "Hello World!" element and displays it.

+
+
+
+example31 +
+
+
+
+== Page Class +
+
+
public class DynamicLoadingPage extends BasePage {
+
+    private static final By selectorExampleOneLink     =
+            By.cssSelector("a[href*='dynamic_loading/1']");
+    private static final By selectorExampleTwoLink     =
+            By.cssSelector("a[href*='dynamic_loading/2']");
+    private static final By selectorDynamicLoadingText = By.cssSelector("div#content h3");
+    private static final By selectorStartButton        = By.cssSelector("div#start button");
+    private static final By selectorLoadingBar         = By.cssSelector("div#loading");
+    private static final By selectorExampleText        = By.cssSelector("div#finish h4");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DYNAMIC_LOADING.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dynamically Loaded Page Elements' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DYNAMIC_LOADING.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if dynamic loading message is visible or not.
+     *
+     * @return true if dynamic loading message was found on web page.
+     */
+    public boolean isDynamicLoadingMessageVisible() {
+        return getDriver().findElementDynamic(selectorDynamicLoadingText)
+                .isDisplayed();
+    }
+
+    /**
+     * Clicks Example 1 link.
+     */
+    public void clickExampleOneLink() {
+        getDriver().findElementDynamic(selectorExampleOneLink)
+                .click();
+    }
+
+    /**
+     * Clicks Example 2 link.
+     */
+    public void clickExampleTwoLink() {
+        getDriver().findElementDynamic(selectorExampleTwoLink)
+                .click();
+    }
+
+    /**
+     * Returns information if Start button is visible or not.
+     *
+     * @return true if Start button was found on web page.
+     */
+    public boolean isStartButtonVisible() {
+        return getDriver().findElementDynamic(selectorStartButton)
+                .isDisplayed();
+    }
+
+    /**
+     * Clicks Start button.
+     */
+    public void clickStartButton() {
+        getDriver().findElementDynamic(selectorStartButton)
+                .click();
+    }
+
+    /**
+     * Waits until WebElement representing waiting bar disappears and returns example text.
+     *
+     * @param waitTime The amount of time designated for waiting until waiting bar disappears.
+     * @return String representing example's text.
+     */
+    public String getExampleOneDynamicText(int waitTime) {
+        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((Function<? super WebDriver, Boolean>)
+                ExpectedConditions.invisibilityOfElementLocated(selectorLoadingBar));
+        return getDriver().findElementDynamic(selectorExampleText)
+                .getText();
+    }
+
+    /**
+     * Returns example text.
+     * <p>
+     * Waits until WebElement representing waiting bar disappear. Then waits until example text
+     * shows up.
+     * And after that returns example text.
+     * </p>
+     *
+     * @param waitTime The amount of time designated for waiting until waiting bar disappears and
+     * example text shows.
+     * @return String representing example's text.
+     */
+    public String getExampleTwoDynamicText(int waitTime) {
+        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((Function<? super WebDriver, Boolean>)
+                ExpectedConditions.invisibilityOfElementLocated(selectorLoadingBar));
+        wait.until((Function<? super WebDriver, WebElement>)
+                ExpectedConditions.visibilityOfElementLocated(selectorExampleText));
+        return getDriver().findElementDynamic(selectorExampleText)
+                .getText();
+    }
+
+}
+
+
+
+
+== WebDriverWait +
+

This class performs waiting for actions using Selenium Web Driver:

+
+
+
    +
  • +

    WebDriverWait(WebDriver driver, long timeOutInSeconds) - constructor, first parameter takes WebDriver, in a second you can specify a timeout in seconds. +FluentWait method:

    +
  • +
  • +

    until(Function<? super T, V> isTrue) - waits until condition function given as parameter returns expected value. If waiting time reaches timeout, it throws timeoutException.

    +
  • +
+
+
+

MrChecker implements various condition functions in the ExpectedConditions class :

+
+
+
    +
  • +

    visibilityOfElementLocated(By selector) - returns WebElement if it’s visible

    +
  • +
  • +

    invisibilityOfElementLocated(By selector) - returns true if Element under given selector is invisible

    +
  • +
+
+
+

WebDriver also has methods which wait for some conditions:

+
+
+
    +
  • +

    waitForElement(By selector)

    +
  • +
  • +

    waitForElementVisible(By selector)

    +
  • +
  • +

    waitUntilElementClickable(By selector)

    +
  • +
+
+
+

It’s possible to write your own condition function e.g.:

+
+
+
+
  public static ExpectedCondition<Boolean> invisibilityOfElementLocated(final By locator) {
+    return new ExpectedCondition<Boolean>() {
+      @Override
+      public Boolean apply(WebDriver driver) {
+        try {
+          return !(findElement(locator, driver).isDisplayed());
+        } catch (NoSuchElementException e) {
+          return true;
+        } catch (StaleElementReferenceException e) {
+          return true;
+        }
+      }
+    };
+  }
+
+
+
+

Or as a lambda expression:

+
+
+
+
        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((WebDriver driver) -> {
+            try {
+                return !(driver.findElement(selectorExampleText)
+                        .isDisplayed());
+            } catch (NoSuchElementException e) {
+                return true;
+            } catch (StaleElementReferenceException e) {
+                return true;
+            }
+        });
+
+
+
+
+== Test Class +
+

Case 1 steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Dynamic Loading link and go to a subpage with examples

    +
  4. +
  5. +

    Check if the page is loaded and "Dynamically Loaded Page Elements" header is visible

    +
  6. +
  7. +

    Click Example 1 link and load site

    +
  8. +
  9. +

    Verify if the "Start" button is visible

    +
  10. +
  11. +

    Click "Start"

    +
  12. +
  13. +

    Wait for the loading bar to disappear and check if the displayed message is as it should be

    +
  14. +
  15. +

    Go back to Dynamic Loading page

    +
  16. +
+
+
+

Case 2 steps:

+
+
+
    +
  1. +

    Check if the page is loaded and "Dynamically Loaded Page Elements" header is visible

    +
  2. +
  3. +

    Click Example 2 link and load site

    +
  4. +
  5. +

    Verify if the "Start" button is visible

    +
  6. +
  7. +

    Click "Start"

    +
  8. +
  9. +

    Wait for the loading bar to disappear

    +
  10. +
  11. +

    Wait for the message to appear and check if it is as it should be

    +
  12. +
  13. +

    Go back to Dynamic Loading page

    +
  14. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DynamicLoadingTest extends TheInternetBaseTest {
+
+    private static final int    EXAMPLE_WAITING_TIME = 30;
+    private static final String EXAMPLE_TEXT         = "Hello World!";
+
+    private static DynamicLoadingPage dynamicLoadingPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dynamicLoadingPage = shouldTheInternetPageBeOpened().clickDynamicLoadingLink();
+    }
+
+    @Override
+    public void setUp() {
+
+        logStep("Verify if Dynamic Loading page is opened");
+        assertTrue("Unable to open Dynamic Loading page", dynamicLoadingPage.isLoaded());
+
+        logStep("Verify if dynamic loading message is visible");
+        assertTrue("Dynamic loading message is invisible",
+                dynamicLoadingPage.isDynamicLoadingMessageVisible());
+    }
+
+    @Test
+    public void shouldExampleTextBeDisplayedAterRunExampleOne() {
+        logStep("Click Example 1 link");
+        dynamicLoadingPage.clickExampleOneLink();
+
+        logStep("Verify if Example 1 link opened content");
+        assertTrue("Fail to load Example 1 content", dynamicLoadingPage.isStartButtonVisible());
+
+        logStep("Click Start button");
+        dynamicLoadingPage.clickStartButton();
+
+        logStep("Verify if expected text is displayed on the screen");
+        assertEquals("Fail to display example text", EXAMPLE_TEXT,
+                dynamicLoadingPage.getExampleOneDynamicText(EXAMPLE_WAITING_TIME));
+    }
+
+    @Test
+    public void shouldExampleTextBeDisplayedAterRunExampleTwo() {
+        logStep("Click Example 2 link");
+        dynamicLoadingPage.clickExampleTwoLink();
+
+        logStep("Verify if Example 2 link opened content");
+        assertTrue("Fail to load Example 2 content", dynamicLoadingPage.isStartButtonVisible());
+
+        logStep("Click Start button");
+        dynamicLoadingPage.clickStartButton();
+
+        logStep("Verify if expected text is displayed on the screen");
+        assertEquals("Fail to display example text", EXAMPLE_TEXT,
+                dynamicLoadingPage.getExampleTwoDynamicText(EXAMPLE_WAITING_TIME));
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Click back to reset Dynamic Loading page");
+        BasePage.navigateBack();
+    }
+
+}
+
+
+
+

Perform both cases running Test Class as JUnit Test.

+
+
+

WARNING: In this example, there is a visible loading bar signalizing that content is loading.On many websites elements are created by scripts without clear communique. This may cause problems with test stability. When your tests aren’t finding page elements, try to add wait functions with a short timeout.

+
+
+
+example32 +
+
+
+

This case shows how to perform mouse actions and test modal windows.

+
+
+

After you move the mouse cursor out of the website, you should see a new window appearing:

+
+
+
+example33 +
+
+
+

Check in the browser’s developer mode if this window exists in Page DOM

+
+
+
+example34 +
+
+
+

Before you move the mouse out, the window exists, but it’s not displayed.

+
+
+

When the mouse is moved, JavaScript changes display attribute. It also hides window after clicking "Close".

+
+
+
+example35 +
+
+
+
+== Page Class +
+
+
public class ExitIntentPage extends BasePage {
+
+    private static final String MODAL_WINDOW_HIDDEN           = "display: none;";
+    private static final String MODAL_WINDOW_DISPLAYED        = "display: block;";
+    private static final String MODAL_WINDOW_STYLE_ATTRIBUTTE = "style";
+
+    private static final By selectorModalWindow            = By.cssSelector("div#ouibounce-modal");
+    private static final By selectorExitIntentText         = By.cssSelector("div#content h3");
+    private static final By selectorModalWindowTitle       = By.cssSelector("h3");
+    private static final By selectorModalWindowCloseButton = By.cssSelector("div.modal-footer > p");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.EXIT_INTENT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Exit Intent' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.EXIT_INTENT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if exit intent message is visible or not.
+     *
+     * @return true if exit intent message was found on web page.
+     */
+    public boolean isIntentMessageVisible() {
+        return getDriver().findElementDynamic(selectorExitIntentText)
+                .isDisplayed();
+    }
+
+    /**
+     * Returns information if modal window is hidden.
+     *
+     * @return true if modal window is hidden.
+     */
+    public boolean isModalWindowHidden() {
+        return getDriver().findElementDynamic(selectorModalWindow)
+                .getAttribute(MODAL_WINDOW_STYLE_ATTRIBUTTE)
+                .equals(MODAL_WINDOW_HIDDEN);
+    }
+
+    /**
+     * Returns information if modal window is showed on web page.
+     *
+     * @return true if modal window is displayed.
+     */
+    public boolean isModalWindowVisible() {
+        return getDriver().findElementDynamic(selectorModalWindow)
+                .getAttribute(MODAL_WINDOW_STYLE_ATTRIBUTTE)
+                .equals(MODAL_WINDOW_DISPLAYED);
+    }
+
+    /**
+     * Returns information if modal window title is shown and correct.
+     *
+     * @param expectedValue String representing expected value of modal window's title.
+     * @return true if modal window's title is equal to expected value.
+     */
+    public boolean verifyModalWindowTitle(String expectedValue) {
+        return getDriver().elementLabel(new ByChained(selectorModalWindow,
+                selectorModalWindowTitle))
+                .getText()
+                .equals(expectedValue);
+    }
+
+    /**
+     * Closes modal window by pressing 'close' button.
+     */
+    public void closeModalWindow() {
+        getDriver().elementButton(new ByChained(selectorModalWindow,
+                selectorModalWindowCloseButton))
+                .click();
+    }
+
+    /**
+     * Moves mouse pointer to the top middle of screen, then to the centre of screen and
+     * again to the top.
+     * <p>
+     * This move simulates leaving the viewport and encourages the modal to show up. There is
+     * java.awt.Robot used
+     * to move mouse pointer out of the viewport. There are timeouts used to let the browser detect
+     * mouse move.
+     * </p>
+     *
+     * @see java.awt.Robot
+     */
+    public void moveMouseOutOfViewport() {
+        Robot robot;
+        Dimension screenSize = getDriver().manage()
+                .window()
+                .getSize();
+        int halfWidth = new BigDecimal(screenSize.getWidth() / 2).intValue();
+        int halfHeight = new BigDecimal(screenSize.getHeight() / 2).intValue();
+
+        try {
+            robot = new Robot();
+            robot.mouseMove(halfWidth, 1);
+            getDriver().manage()
+                    .timeouts()
+                    .implicitlyWait(1, TimeUnit.SECONDS);
+            robot.mouseMove(halfWidth, halfHeight);
+            getDriver().manage()
+                    .timeouts()
+                    .implicitlyWait(1, TimeUnit.SECONDS);
+            robot.mouseMove(halfWidth, 1);
+        } catch (AWTException e) {
+            BFLogger.logError("Unable to connect with remote mouse");
+            e.printStackTrace();
+        }
+    }
+}
+
+
+
+
+== Attributes +
+

Elements on pages have attributes like "id", "class", "name", "style" etc. In order to check them, use method getAttribute(String name). In this case attribute "style" determinates if the element is displayed.

+
+
+
+== Robot +
+

Robot class can perform mouse movement. Method mouseMove(int x, int y) moves the remote mouse to given coordinates.

+
+
+
+== Manage Timeouts +
+

manage().timeouts() methods allows you to change WebDriver timeouts values such as:

+
+
+
    +
  • +

    pageLoadTimeout(long time, TimeUnit unit) - the amount of time to wait for a page to load before throwing an exception

    +
  • +
  • +

    setScriptTimeout(long time, TimeUnit unit) - the amount of time to wait for finish execution of a script before throwing an exception

    +
  • +
  • +

    implicitlyWait(long time, TimeUnit unit) - the amount of time the driver should wait when searching for an element if it is not immediately present. After that time, it throws an exception.

    +
  • +
+
+
+

Changing timeouts can improve test stability but can also make them run slower.

+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Exit Intent link and load subpage

    +
  4. +
  5. +

    Check if the page is loaded and "Exit Intent" message is visible

    +
  6. +
  7. +

    Verify if Modal Window is hidden

    +
  8. +
  9. +

    Move mouse out of the viewport

    +
  10. +
  11. +

    Check if Modal Window is visible

    +
  12. +
  13. +

    Verify if Modal Window title is correct

    +
  14. +
  15. +

    Click 'close' button

    +
  16. +
  17. +

    Again verify if Modal Window is hidden

    +
  18. +
+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class ExitIntentTest extends TheInternetBaseTest {
+
+    private static final String MODAL_WINDOW_TITLE = "This is a modal window";
+
+    private static ExitIntentPage exitIntentPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        exitIntentPage = shouldTheInternetPageBeOpened().clickExitIntentLink();
+
+        logStep("Verify if Exit Intent page is opened");
+        assertTrue("Unable to open Exit Intent page", exitIntentPage.isLoaded());
+
+        logStep("Verify if exit intent message is visible");
+        assertTrue("Exit intent message is not visible", exitIntentPage.isIntentMessageVisible());
+    }
+
+    @Test
+    public void shouldModalWindowAppearWhenMouseMovedOutOfViewportTest() {
+
+        logStep("Verify if modal window is hidden");
+        assertTrue("Fail to hide modal window", exitIntentPage.isModalWindowHidden());
+
+        logStep("Move mouse pointer out of viewport");
+        exitIntentPage.moveMouseOutOfViewport();
+
+        logStep("Verify if modal window showed up");
+        assertTrue("Fail to show up modal window", exitIntentPage.isModalWindowVisible());
+
+        logStep("Verify if modal window title displays properly");
+        assertTrue("Fail to display modal window's title",
+                exitIntentPage.verifyModalWindowTitle(MODAL_WINDOW_TITLE.toUpperCase()));
+
+        logStep("Close modal window");
+        exitIntentPage.closeModalWindow();
+
+        logStep("Verify if modal window is hidden again");
+        assertTrue("Fail to hide modal window", exitIntentPage.isModalWindowHidden());
+    }
+}
+
+
+
+

Remember not to move mouse manually during test execution.

+
+
+
+example36 +
+
+
+

This example shows how to check if file downloads properly.

+
+
+

After clicking on one of these links, a specific file should be downloaded to your computer.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click on the File Download link and open subpage

    +
  4. +
  5. +

    Click on "some-file.txt" download link and download file

    +
  6. +
  7. +

    Check if the file exists in the appropriate folder

    +
  8. +
  9. +

    Delete the file

    +
  10. +
  11. +

    Check if the file doesn’t exist in the folder

    +
  12. +
+
+
+
+== Page Class +
+
+
public class FileDownloadPage extends BasePage {
+
+    private static final By selectorSomeFileTxt = By.cssSelector("a[href*=some-file]");
+
+    private final String DOWNLOAD_DIR = System.getProperty("java.io.tmpdir");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DOWNLOAD.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'File Downloader' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DOWNLOAD.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if the chosen file is already downloaded and if not, downloads it .
+     * Throws RuntimeException otherwise.
+     *
+     * @return Downloaded file
+     */
+    public File downloadTextFile() {
+        String nameOfDownloadFile = getNameOfDownloadFile();
+        File fileToDownload = new File(DOWNLOAD_DIR + nameOfDownloadFile);
+
+        if (fileToDownload.exists()) {
+            throw new RuntimeException("The file that you want to download already exists. "
+                    + "Please remove it manually. Path to the file: " + fileToDownload.getPath());
+        }
+
+        getDriver().elementButton(selectorSomeFileTxt)
+                .click();
+
+        waitForFileDownload(2000, fileToDownload);
+        return fileToDownload;
+    }
+
+    private void waitForFileDownload(int totalTimeoutInMillis, File expectedFile) {
+        FluentWait<WebDriver> wait = new FluentWait<WebDriver>(getDriver())
+                .withTimeout(totalTimeoutInMillis, TimeUnit.MILLISECONDS)
+                .pollingEvery(200, TimeUnit.MILLISECONDS);
+
+        wait.until((WebDriver wd) -> expectedFile.exists());
+    }
+
+    private String getNameOfDownloadFile() {
+        String urlToDownload = getDriver().findElementDynamic(selectorSomeFileTxt)
+                .getAttribute("href");
+        String[] urlHierachy = urlToDownload.split("/");
+        return urlHierachy[urlHierachy.length - 1];
+    }
+}
+
+
+
+

Use FluentWait class and create an expected condition using a lambda expression to wait until the file downloads.

+
+
+

To perform operations on files, use java File class. To get a file name, find it in download URL.

+
+
+
+== Test Class +
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class FileDownloadTest extends TheInternetBaseTest {
+
+    private static FileDownloadPage fileDownloadPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        fileDownloadPage = shouldTheInternetPageBeOpened().clickFileDownloadLink();
+
+        logStep("Verify if File Download page is opened");
+        assertTrue("Unable to open File Download page", fileDownloadPage.isLoaded());
+    }
+
+    @Test
+    public void shouldfileBeDownloaded() {
+
+        logStep("Download the some-file.txt");
+        File downloadedFile = fileDownloadPage.downloadTextFile();
+
+        logStep("Verify if downloaded file exists");
+        assertTrue("Downloaded file does not exist", downloadedFile.exists());
+
+        logStep("Remove downloaded file");
+        downloadedFile.delete();
+
+        logStep("Verify if downloaded file has been removed");
+        assertFalse("Downloaded file still exists", downloadedFile.exists());
+    }
+}
+
+
+
+
+example37 +
+
+
+

This case shows how to pass through the standard authentication page.

+
+
+

When you enter the correct credentials, you should see the next page:

+
+
+
+example38 +
+
+
+

If user data is wrong, an appropriate message appears:

+
+
+
+example39 +
+
+
+
+== Page Class +
+
+
public class FormAuthenticationPage extends BasePage {
+
+    private final static By selectorInputUsername     = By.cssSelector("#username");
+    private final static By selectorInputUserPassword = By.cssSelector("#password");
+    private final static By selectorLoginMessage      = By.cssSelector("#flash");
+    private final static By selectorLoginButton       = By.cssSelector("#login > button > i");
+    private final static By selectorLogoutButton      = By.cssSelector("#content > div > a ");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.LOGIN.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Login Page' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() + PageSubURLsProjectYEnum.LOGIN.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Sets user name to designated form's field.
+     *
+     * @param username String representing a user's name
+     * @return FormAuthenticationPage object with user name set to the given one
+     */
+    public FormAuthenticationPage setUsername(String username) {
+        InputTextElement elementInputUsername = new InputTextElement(selectorInputUsername);
+        elementInputUsername.clearInputText();
+        elementInputUsername.setInputText(username);
+        return this;
+    }
+
+    /**
+     * Sets user password to designated form's field.
+     *
+     * @param userPassword String representing a user's password
+     * @return FormAuthenticationPage object with user's password set to the given one
+     */
+    public FormAuthenticationPage setUserPassword(String userPassword) {
+        InputTextElement elementInputPassword = new InputTextElement(selectorInputUserPassword);
+        elementInputPassword.clearInputText();
+        elementInputPassword.setInputText(userPassword);
+        return this;
+    }
+
+    /**
+     * Returns login message.
+     *
+     * @return String object representing the message returned after login operation is performed
+     */
+    public String getLoginMessageText() {
+        return new LabelElement(selectorLoginMessage).getText();
+    }
+
+    /**
+     * Clicks 'Login' button.
+     */
+    public void clickLoginButton() {
+        new Button(selectorLoginButton).click();
+    }
+
+    /**
+     * Clicks 'Logout' button.
+     */
+    public void clickLogoutButton() {
+        new Button(selectorLogoutButton).click();
+    }
+}
+
+
+
+
+== == InputTextElement +
+

Use methods from this class to perform actions on text fields:

+
+
+
    +
  • +

    clearInputText() - remove all text from selected input field

    +
  • +
  • +

    setInputText(String text) - enter given text

    +
  • +
+
+
+
+== == LabelElement +
+
    +
  • +

    String getText() method returns visible text from label

    +
  • +
+
+
+
+== TestClass +
+

Prepare six test cases:

+
+
+
    +
  1. +

    Try to login with empty user data and check if the error message appears

    +
  2. +
  3. +

    Try to login with empty username and valid password and check if the error message appears

    +
  4. +
  5. +

    Try to login with a valid username and empty password and check if the error message appears

    +
  6. +
  7. +

    Try to login with invalid username and invalid password and check if the error message appears

    +
  8. +
  9. +

    Try to login with a valid username and valid password and check if success login message appears, then log out

    +
  10. +
  11. +

    Try to login with a valid username and valid password and check if success login message appears, then log out and check if success logout message is displayed

    +
  12. +
+
+
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case: Click on the Form Authentication link and open login page

+
+
+

After each case: Go back to The Internet Main Page

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class FormAuthenticationTest extends TheInternetBaseTest {
+
+    private static FormAuthenticationPage formAuthenticationPage;
+
+    private String errorUsernameMessage = "Your username is invalid!\n" + "×";
+    private String errorPasswordMessage = "Your password is invalid!\n" + "×";
+    private String loginMessage         = "You logged into a secure area!\n" + "×";
+    private String logoutMessage        = "You logged out of the secure area!\n" + "×";
+    private String emptyUsername        = "";
+    private String emptyUserPassword    = "";
+    private String validUsername        = "tomsmith";
+    private String validPassword        = "SuperSecretPassword!";
+    private String randomUsername       = UUID.randomUUID()
+            .toString();
+    private String randomUserPassword   = UUID.randomUUID()
+            .toString();
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click subpage link");
+        formAuthenticationPage = theInternetPage.clickFormAuthenticationLink();
+
+        logStep("Verify if subpage is opened");
+        assertTrue("The Internet subpage: FormAuthenticationPage was not open", formAuthenticationPage.isLoaded());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithEmptyData() {
+        logStep("Log user with empty username and password");
+        formAuthenticationPage.setUsername(emptyUsername)
+                .setUserPassword(emptyUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty data", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithEmptyUsernameAndValidPassword() {
+        logStep("Log user with empty username and valid password");
+        formAuthenticationPage.setUsername(emptyUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty username", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithValidUsernameAndEmptyPassword() {
+        logStep("Log user with valid username and empty password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(emptyUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty password", errorPasswordMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithInvalidUsernameAndInvalidPassword() {
+        logStep("Log user with invalid username and invalid password");
+        formAuthenticationPage.setUsername(randomUsername)
+                .setUserPassword(randomUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with random credentials", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldUserLogInWithValidCredentials() {
+        logStep("Log user with valid username and valid password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unable to login user with valid credentials", loginMessage,
+                formAuthenticationPage.getLoginMessageText());
+        logStep("Log out user");
+        formAuthenticationPage.clickLogoutButton();
+    }
+
+    @Test
+    public void shouldUserLogOutAfterProperLogInAndClickLogoutButon() {
+        logStep("Log user with valid username and valid password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unable to login user with valid credentials", loginMessage,
+                formAuthenticationPage.getLoginMessageText());
+        logStep("Log out user");
+        formAuthenticationPage.clickLogoutButton();
+        assertEquals("User cannot log out after prper log in", logoutMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        theInternetPage.load();
+    }
+}
+
+
+
+

After running Test Class, cases might be performed in a different order.

+
+
+
+example40 +
+
+
+

This example shows how to approach elements dynamically appearing after the user’s action.

+
+
+

Move the mouse over an image to see the additional label.

+
+
+
+example41 +
+
+
+

Labels exist in page DOM all the time but their display attributes change. In this case, there is no JavaScript. Elements' visibility is managed by CSS.

+
+
+
+example42 +
+
+
+
+== Page Class +
+
+
public class HoversPage extends BasePage {
+
+    private final static By selectorImages = By.cssSelector("div.figure > img");
+    private final static By selectorNames  = By.cssSelector("div.figcaption h5");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.HOVERS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Hovers' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.HOVERS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Moves mouse pointer over an image with given index.
+     *
+     * @param index An index of the picture, where mouse pointer should be moved
+     */
+    public void hoverOverAvatar(int index) {
+        Actions action = new Actions(getDriver());
+        WebElement avatarImage = getDriver().findElementDynamics(selectorImages)
+                .get(index);
+        action.moveToElement(avatarImage)
+                .perform();
+    }
+
+    /**
+     * Returns the information displayed under a picture with given index.
+     *
+     * @param index An index of the picture, where the information should be read
+     * @return String object representing picture's information
+     */
+    public String getAvatarsInformation(int index) {
+        return getDriver().findElementDynamics(selectorNames)
+                .get(index)
+                .getText();
+    }
+}
+
+
+
+
+== == Actions +
+

Actions class contains methods used to execute basic user actions such as mouse moving and clicking or keys sending. Action or actions series will be performed after calling perform() method.

+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Hovers page

    +
  4. +
  5. +

    Move mouse over random image

    +
  6. +
  7. +

    Check if displayed text is equal to expected.

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class HoversTest extends TheInternetBaseTest {
+
+    private static HoversPage    hoversPage;
+    private final String        names[]    = { "name: user1", "name: user2", "name: user3" };
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        hoversPage = shouldTheInternetPageBeOpened().clickHoversLink();
+
+        logStep("Verify if Hovers page is opened");
+        assertTrue("Unable to open Hovers page", hoversPage.isLoaded());
+    }
+
+    @Test
+    public void shouldProperInformationBeDisplayedWhenMousePointerHoveredOverRandomElement() {
+        logStep("Hover mouse pointer over random element");
+        int randomIndex = new Random().nextInt(names.length);
+        hoversPage.hoverOverAvatar(randomIndex);
+        assertEquals("Picture's information is different than expected", names[randomIndex],
+                hoversPage.getAvatarsInformation(randomIndex));
+    }
+}
+
+
+
+

Because in this case the tested content is being chosen randomly, each test run could check a different element.

+
+
+
+example43 +
+
+
+

This case shows how to test pop-up JS alerts.

+
+
+

After clicking one of the buttons, an adequate alert should appear.

+
+
+
+example44 +
+
+
+

Performed action will be displayed under "Result" label.

+
+
+

In developer mode, you can view JavaScript which creates alerts.

+
+
+
+example45 +
+
+
+
+== Page Class +
+
+
public class JavaScriptAlertsPage extends BasePage {
+
+    private static final By selectorAlertButton   = By.cssSelector("button[onclick*=jsAlert]");
+    private static final By selectorConfirmButton = By.cssSelector("button[onclick*=jsConfirm]");
+    private static final By selectorPromptButton  = By.cssSelector("button[onclick*=jsPrompt]");
+    private static final By resultLabelSelector   = By.cssSelector("p#result");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.JAVASCRIPT_ALERTS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'JavaScript Alerts' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.JAVASCRIPT_ALERTS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'JS alert' button.
+     */
+    public void clickAlertButton() {
+        new Button(selectorAlertButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Clicks 'JS confirm' button.
+     */
+    public void clickConfirmButton() {
+        new Button(selectorConfirmButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Clicks 'JS prompt' button.
+     */
+    public void clickPromptButton() {
+        new Button(selectorPromptButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Returns message displayed by popup.
+     *
+     * @return String object representing message displayed by popup
+     */
+    public String readResultLabel() {
+        return new LabelElement(resultLabelSelector).getText();
+    }
+
+    /**
+     * Clicks alert's 'OK' button.
+     */
+    public void clickAlertAccept() {
+        getDriver().switchTo()
+                .alert()
+                .accept();
+    }
+
+    /**
+     * Clicks alert's 'Cancel' button.
+     */
+    public void clickAlertDismiss() {
+        getDriver().switchTo()
+                .alert()
+                .dismiss();
+    }
+
+    /**
+     * Types text into alert's text field.
+     *
+     * @param text String object sent into alert's text field
+     */
+    public void writeTextInAlert(String text) {
+        getDriver().switchTo()
+                .alert()
+                .sendKeys(text);
+    }
+}
+
+
+
+
+== == alert() +
+

Using switchTo() method you can change processed content. switchTo().alert() allows performing actions on appearing alerts such as accepting, dismissing or entering keys.

+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page and go to JavaScript Alert page

+
+
+
    +
  1. +

    Click JS Alert button, accept alert and check if Result message returns performed an action

    +
  2. +
  3. +

    Click JS Confirm button, accept alert and check if Result message returns performed action

    +
  4. +
  5. +

    Click JS Confirm button, dismiss alert and check if Result message returns performed action

    +
  6. +
  7. +

    Click JS Prompt button, write random text, accept alert and check if Result message returns performed action with written text

    +
  8. +
  9. +

    Click JS Prompt button, dismiss the alert and check if Result message returns performed action

    +
  10. +
+
+
+

After each case: Refresh Page

+
+
+

After all tests: Navigate back to The Internet Main Page

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class JavaScriptAlertsTest extends TheInternetBaseTest {
+
+    private static JavaScriptAlertsPage javaScriptAlertsPage;
+
+    private final String jsAlertCofirmMessage    = "You successfuly clicked an alert";
+    private final String jsConfirmConfirmMessage = "You clicked: Ok";
+    private final String jsConfirmCancelMessage  = "You clicked: Cancel";
+    private final String jsPromptConfirmMessage  = "You entered: ";
+    private final String jsPromptCancelMessage   = "You entered: null";
+    private final String randomString            = "random";
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        javaScriptAlertsPage = shouldTheInternetPageBeOpened().clickJavaScriptAlertLink();
+
+        logStep("Verify if JavaScript Alerts page is opened");
+        assertTrue("Unable to open JavaScript Alerts page", javaScriptAlertsPage.isLoaded());
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+        logStep("Navigate back to The-Internet page");
+        BasePage.navigateBack();
+    }
+
+    @Test
+    public void shouldJSAlertCloseWithProperMessageAfterPressOkButton() {
+        logStep("Click Alert button");
+        javaScriptAlertsPage.clickAlertButton();
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsAlertCofirmMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSConfirmCloseWithProperMessageAfterPressOkButton() {
+        logStep("Click Confirm button");
+        javaScriptAlertsPage.clickConfirmButton();
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsConfirmConfirmMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSConfirmCloseWithProperMessageAfterPressCancelButton() {
+        logStep("Click Confirm button");
+        javaScriptAlertsPage.clickConfirmButton();
+
+        logStep("Click 'Cancel' button on alert");
+        javaScriptAlertsPage.clickAlertDismiss();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsConfirmCancelMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSPromptCloseWithProperMessageAfterPressOKButton() {
+        logStep("Click Prompt button");
+        javaScriptAlertsPage.clickPromptButton();
+
+        logStep("Insert text to alert: " + randomString);
+        javaScriptAlertsPage.writeTextInAlert(randomString);
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsPromptConfirmMessage + randomString, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSPromptCloseWithProperMessageAfterPressCancelButton() {
+        logStep("Click Prompt button");
+        javaScriptAlertsPage.clickPromptButton();
+
+        logStep("Click 'Cancel' button on alert");
+        javaScriptAlertsPage.clickAlertDismiss();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsPromptCancelMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Refresh JavaScriptAlersPage");
+        javaScriptAlertsPage.refreshPage();
+    }
+
+}
+
+
+
+
+example46 +
+
+
+

This simple case shows how to test key pressing

+
+
+

This site uses JavaScript to read the key pressed and display its value.

+
+
+
+example47 +
+
+
+
+== Page Class +
+
+
public class KeyPressesPage extends BasePage {
+
+    private static final By selectorResult = By.cssSelector("#result");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.KEY_PRESS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Key Presses' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.KEY_PRESS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Presses given keyboard key.
+     *
+     * @param keyToPress Key to be pressed on keyboard
+     */
+    public void pressKey(String keyToPress) {
+        getAction().sendKeys(keyToPress)
+                .perform();
+    }
+
+    /**
+     * Returns information from web page about pressed keyboard key.
+     *
+     * @return Information from web page about pressed key
+     */
+    public String getPressedKeyInformation() {
+        return getDriver().findElementDynamic(selectorResult)
+                .getText();
+    }
+}
+
+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Key Presses site

    +
  4. +
  5. +

    Press a key

    +
  6. +
  7. +

    Check if a displayed message contains the pressed key

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class KeyPressesTest extends TheInternetBaseTest {
+
+    private static KeyPressesPage keyPressesPage;
+
+    private final String keyToBePressed  = "Q";
+    private final String expectedMessage = "You entered: Q";
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        keyPressesPage = shouldTheInternetPageBeOpened().clickKeyPressesLink();
+
+        logStep("Verify if Key Presses page is opened");
+        assertTrue("Unable to open Key Presses page", keyPressesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldWebsiteReturnInformationAboutPressedKey() {
+        logStep("Press a keyboard key");
+        keyPressesPage.pressKey(keyToBePressed);
+
+        logStep("Verify if website give valid information about pressed keyboard key");
+        assertEquals("Information about the pressed key is invalid", expectedMessage,
+                keyPressesPage.getPressedKeyInformation());
+    }
+}
+
+
+
+
+example48 +
+
+
+

This simple example shows how operate on many browser tabs

+
+
+

When you click the link, a new website will be opened in the second tab.

+
+
+
+example49 +
+
+
+
+== Page Class +
+
+
public class MultipleWindowsPage extends BasePage {
+
+    private final static By selectorLink = By.cssSelector("#content > div > a");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.WINDOW.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Opening a new window' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.WINDOW.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'click here' link.
+     *
+     * @return NewWindowPage object
+     */
+    public NewWindowPage clickHereLink() {
+        getDriver().findElementDynamic(selectorLink)
+                .click();
+        getDriver().waitForPageLoaded();
+        return new NewWindowPage();
+    }
+}
+
+
+
+

You also need a second page class for New Window Page. Implement only the required methods.

+
+
+
+
public class NewWindowPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.NEW_WINDOW.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'New window' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.NEW_WINDOW.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Multiple Windows Page

    +
  4. +
  5. +

    Click the link

    +
  6. +
  7. +

    Check if a new page is opened in the second tab

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class MultipleWindowsTest extends TheInternetBaseTest {
+
+    private static MultipleWindowsPage    multipleWindowsPage;
+    private static NewWindowPage        newWindowPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        multipleWindowsPage = shouldTheInternetPageBeOpened().clickmultipleWindowsLink();
+
+        logStep("Verify if Multiple Windows page is opened");
+        assertTrue("Unable to open Multiple Windows page", multipleWindowsPage.isLoaded());
+    }
+
+    @Test
+    public void verifyIfNewBrowserWindowOpen() {
+        logStep("Click 'Click here' link");
+        newWindowPage = multipleWindowsPage.clickHereLink();
+
+        logStep("Verify if 'New window page' is opened");
+        assertTrue("Unable to open a new browser window", newWindowPage.isLoaded());
+    }
+}
+
+
+
+
+example50 +
+
+
+

This simple case shows how to approach redirecting links.

+
+
+

After clicking on the link, you will be redirected to Status Codes Page.

+
+
+
+example51 +
+
+
+
+== Page Class + +
+
+== == Redirect Link Page +
+
+
public class RedirectLinkPage extends BasePage {
+
+    private static final By selectorRedirectHere = By.cssSelector("a#redirect");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.REDIRECT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Redirection' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.REDIRECT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'Redirect here' link.
+     *
+     * @return StatusCodesHomePage object
+     */
+    public StatusCodesHomePage clickRedirectHereLink() {
+        new Button(selectorRedirectHere).click();
+        return new StatusCodesHomePage();
+    }
+}
+
+
+
+
+== == Status Codes Page +
+
+
public class StatusCodesHomePage extends BasePage {
+
+    private static final By selectorLink200Code = By.linkText("200");
+    private static final By selectorLink301Code = By.linkText("301");
+    private static final By selectorLink404Code = By.linkText("404");
+    private static final By selectorLink500Code = By.linkText("500");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Redirection Page

    +
  4. +
  5. +

    Click the link

    +
  6. +
  7. +

    Check if Status Codes Page is loaded

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class RedirectLinkTest extends TheInternetBaseTest {
+
+    private static RedirectLinkPage    redirectLinkPage;
+    private static StatusCodesHomePage statusCodesHomePage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        redirectLinkPage = shouldTheInternetPageBeOpened().clickRedirectLink();
+
+        logStep("Verify if Redirect Link page is opened");
+        assertTrue("Unable to open Redirect Link page", redirectLinkPage.isLoaded());
+    }
+
+    @Test
+    public void shouldUserBeRedirectedToStatusCodePage() {
+        logStep("Click 'Redirect here' link");
+        statusCodesHomePage = redirectLinkPage.clickRedirectHereLink();
+
+        logStep("Verify redirection to Status Code page");
+        assertTrue("User hasn't been redirected to the expected website",
+                statusCodesHomePage.isLoaded());
+    }
+}
+
+
+
+
+example52 +
+
+
+

This case shows how to move horizontal slider.

+
+
+

You can move the slider by dragging it with a mouse or using arrow keys. The page uses a simple script to get slider position and display set value.

+
+
+
+example53 +
+
+
+
+== Page Class +
+
+
public class HorizontalSliderPage extends BasePage {
+
+    private static final By selectorHorizontalSlider = By.cssSelector("div.sliderContainer");
+    private static final By sliderSelector           = By.cssSelector("input");
+    private static final By valueSelector            = By.cssSelector("#range");
+
+    private HorizontalSliderElement horizontalSlider;
+
+    public HorizontalSliderPage() {
+        horizontalSlider = getDriver().elementHorizontalSlider(selectorHorizontalSlider,
+                sliderSelector, valueSelector, BigDecimal.ZERO, new BigDecimal(5),
+                new BigDecimal(0.5));
+    }
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.HORIZONTAL_SLIDER.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Horizontal Slider' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.HORIZONTAL_SLIDER.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Validates if WebElement representing horizontal slider is visible on the page.
+     *
+     * @return true if horizontal slider is visible, false otherwise.
+     */
+    public boolean isElementHorizontalSliderVisible() {
+        return getDriver().elementHorizontalSlider(selectorHorizontalSlider)
+                .isDisplayed();
+    }
+
+    /**
+     * Returns the value of slider's start position.
+     *
+     * @return BigDecimal representing the lowest possible value of slider.
+     */
+    public BigDecimal getStartPosition() {
+        return horizontalSlider.getMinRange();
+    }
+
+    /**
+     * Returns the value of slider's middle position.
+     *
+     * @return BigDecimal representing the average value between start and end position.
+     */
+    public BigDecimal getMiddlePosition() {
+        return horizontalSlider.getMaxRange()
+                .subtract(horizontalSlider.getMinRange())
+                .divide(new BigDecimal(2));
+    }
+
+    /**
+     * Returns the value of slider's end position.
+     *
+     * @return BigDecimal representing the highest possible value of slider.
+     */
+    public BigDecimal getEndPosition() {
+        return horizontalSlider.getMaxRange();
+    }
+
+    /**
+     * Returns current value of slider's position.
+     *
+     * @return BigDecimal representing current value of slider.
+     */
+    public BigDecimal getCurrentPosition() {
+        return horizontalSlider.getCurrentSliderValue();
+    }
+
+    /**
+     * Sets horizontal slider to a given position using one of the available methods: using keyboard
+     * or using mouse move.
+     *
+     * @param position
+     * @param method
+     */
+    public void setSliderPositionTo(BigDecimal position, int method) {
+        horizontalSlider.setSliderPositionTo(position, method);
+    }
+
+    /**
+     * Verifies the correctness of the given position value and rounds it when necessary.
+     *
+     * @param position
+     * @return Correct value of horizontal slider's position.
+     */
+    public BigDecimal verifyAndCorrectPositionValue(BigDecimal position) {
+        return horizontalSlider.verifyAndCorrectPositionValue(position);
+    }
+}
+
+
+
+
+== == Horizontal Slider Element +
+

This class implements methods wich can perform actions on slider:

+
+
+

Create Slider Object using method:

+
+
+
    +
  • +

    getDriver().elementHorizontalSlider(By sliderContainerSelector, By sliderSelector, By valueSelector, BigDecimal minRange, BigDecimal maxRange, BigDecimal step)

    +
  • +
+
+
+

And use:

+
+
+
    +
  • +

    BigDecimal getMaxRange()

    +
  • +
  • +

    BigDecimal getMinRange()

    +
  • +
  • +

    BigDecimal getCurrentSliderValue()

    +
  • +
  • +

    setSliderPositionTo(BigDecimal position, int method) - moves slider to a given position. If the position is not valid, it changes it to the nearest proper value. Second parameter determinates movement method: 0 - Keyboard, 1 - Mouse

    +
  • +
  • +

    BigDecimal verifyAndCorrectPositionValue(BigDecimal position) - returns nearest correct position

    +
  • +
+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case:

+
+
+
    +
  1. +

    Go to Horizontal Slider Page

    +
  2. +
  3. +

    Check if the slider is visible

    +
  4. +
  5. +

    Save start, middle and end position

    +
  6. +
+
+
+

Case 1 - Moving with the keyboard:

+
+
+
    +
  1. +

    Move slider to start position, and check if the current position equals the beginning value

    +
  2. +
  3. +

    Move the slider to middle position, and check if the current position equals the middle value

    +
  4. +
  5. +

    Move slider to end position, and check if the current position equals the end value

    +
  6. +
  7. +

    Try to move slider before start position, and check if the current position equals the beginning value

    +
  8. +
  9. +

    Try to move slider after end position, and check if the current position equals the end value

    +
  10. +
  11. +

    Try to move the slider to an improperly defined position between start and middle, and check if the current position equals the corrected value

    +
  12. +
  13. +

    Try to move the slider to an improperly defined random position, and check if the current position equals the corrected value

    +
  14. +
  15. +

    Move the slider back to start position, and check if the current position equals the beginning value

    +
  16. +
+
+
+

Case 2 - Moving with a mouse: Repeat each Case 1 step using a mouse instead of keyboard

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class SliderTest extends TheInternetBaseTest {
+
+    private static HorizontalSliderPage horizontalSliderPage;
+
+    BigDecimal startPosition;
+    BigDecimal middlePosition;
+    BigDecimal endPosition;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click Horizontal Slider link");
+        horizontalSliderPage = theInternetPage.clickHorizontalSliderLink();
+
+        logStep("Verify if Horizontal Slider page is opened");
+        assertTrue("Unable to load Horizontal Slider page", horizontalSliderPage.isLoaded());
+
+        logStep("Verify if horizontal slider element is visible");
+        assertTrue("Horizontal slider is not visible",
+                horizontalSliderPage.isElementHorizontalSliderVisible());
+
+        startPosition = horizontalSliderPage.getStartPosition();
+        middlePosition = horizontalSliderPage.getMiddlePosition();
+        endPosition = horizontalSliderPage.getEndPosition();
+    }
+
+    @Test
+    public void shouldHorizontalSliderMoveWhenKeyboardArrowButtonsArePressed() {
+        BigDecimal position;
+        logStep("Move slider to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to middle position: " + middlePosition);
+        horizontalSliderPage.setSliderPositionTo(middlePosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(middlePosition),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to end position: " + endPosition);
+        horizontalSliderPage.setSliderPositionTo(endPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = startPosition.subtract(BigDecimal.ONE);
+        logStep("Move slider to position before start position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = endPosition.add(BigDecimal.ONE);
+        logStep("Move slider to position after end position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = middlePosition.divide(new BigDecimal(2));
+        logStep("Move slider to improperly defined position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        position = new BigDecimal(new BigInteger("233234"), 5);
+        logStep("Move slider to improperly defined random position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider back to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+    }
+
+    @Test
+    public void shouldHorizontalSliderMoveWhenMouseButtonIsPressedAndMouseIsMoving() {
+        BigDecimal position;
+        logStep("Move slider to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to middle position: " + middlePosition);
+        horizontalSliderPage.setSliderPositionTo(middlePosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(middlePosition),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to end position: " + endPosition);
+        horizontalSliderPage.setSliderPositionTo(endPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = startPosition.subtract(BigDecimal.ONE);
+        logStep("Move slider to position before start position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = endPosition.add(BigDecimal.ONE);
+        logStep("Move slider to position after end position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = middlePosition.divide(new BigDecimal(2));
+        logStep("Move slider to improperly defined position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        position = new BigDecimal(new BigInteger("212348"), 5);
+        logStep("Move slider to improperly defined random position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider back to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+    }
+}
+
+
+
+
+example54 +
+
+
+

This example shows how to sort and read data from tables.

+
+
+

After clicking on a column header, the data will be sorted descending and after another click sorted ascending by selected attribute. Watch how both tables' content changes on page DOM. Sorting is performed by JavaScript functions.

+
+
+
+example55 +
+
+
+
+== Page Class +
+
+
public class SortableDataTablesPage extends BasePage {
+
+    private static final By selectorTable  = By.cssSelector("table.tablesorter");
+    private static final By selectorHeader = By.cssSelector("th");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.SORTABLE_DATA_TABLES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Data Tables' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.SORTABLE_DATA_TABLES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Sorts data in given column using ascending order.
+     *
+     * @param columnNumber The number of column where data should be sorted
+     * @param tableNumber  The number of table where data should be sorted
+     */
+    public void sortColumnAscending(int columnNumber, int tableNumber) {
+        WebElement header = this.getTableHeaders(columnNumber, tableNumber);
+        String className = header.getAttribute("class");
+        if (className.contains("headerSortUp") || !className.contains("headerSortDown")) {
+            header.click();
+        }
+    }
+
+    /**
+     * Sorts data in given column using descending order.
+     *
+     * @param columnNumber The number of the column where data should be sorted
+     * @param tableNumber  The number of the table where data should be sorted
+     */
+    public void sortColumnDescending(int columnNumber, int tableNumber) {
+        WebElement header = this.getTableHeaders(columnNumber, tableNumber);
+        String className = header.getAttribute("class");
+        if (!className.contains("headerSortUp")) {
+            header.click();
+            if (!className.contains("headerSortDown")) {
+                header.click();
+            }
+        }
+    }
+
+    /**
+     * Return given column values from chosen table.
+     *
+     * @param columnNumber The number of the column the data should be retrieved from
+     * @param tableNumber  The number of the table the data should be retrieved from
+     * @return list of values from given column
+     */
+    public List<String> getColumnValues(int columnNumber, int tableNumber) {
+        WebElement table = getTable(tableNumber);
+        return JsoupHelper.findTexts(table, By.cssSelector("tr > td:nth-child(" + (columnNumber + 1)
+                + ")"));
+    }
+
+    /**
+     * Returns column's class name.
+     *
+     * @param columnNumber The number of the column to get class number from
+     * @param tableNumber  The number of the table to get column class name from
+     * @return String object representing column's class name
+     */
+    public String readColumnClass(int columnNumber, int tableNumber) {
+        return this.getTableHeaders(columnNumber, tableNumber)
+                .getAttribute("class");
+    }
+
+    private WebElement getTable(int tableNumber) {
+        return new ListElements(selectorTable).getList()
+                .get(tableNumber);
+    }
+
+    private WebElement getTableHeaders(int columnNumber, int tableNumber) {
+        return getTable(tableNumber).findElements(selectorHeader)
+                .get(columnNumber);
+    }
+}
+
+
+
+
+== == Finding values +
+

Using proper selectors, save elements such as tables and their columns' headers as Web Element Lists. Afterwards, you can get the desired element finding it by index (e. g. table or column number). To get column values, use JsoupHelper and to check if the column is sorted get its class attribute.

+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case: Go to Sortable Data Tables Page

+
+
+

Case 1:

+
+
+
    +
  1. +

    Choose a random table

    +
  2. +
  3. +

    Sort first column "Last Name" in ascending order

    +
  4. +
  5. +

    Check if column header class contains "headerSortDown"

    +
  6. +
  7. +

    Save column content to the List

    +
  8. +
  9. +

    Create List copy and sort it

    +
  10. +
  11. +

    Compare sorted values and values from the table

    +
  12. +
+
+
+

Case 2:

+
+
+
    +
  1. +

    Choose a random table

    +
  2. +
  3. +

    Sort second column "First Name" in descending order

    +
  4. +
  5. +

    Check if column header class contains "headerSortUp"

    +
  6. +
  7. +

    Save column content to the List

    +
  8. +
  9. +

    Create List copy and sort it then reverse it

    +
  10. +
  11. +

    Compare reversed sorted values and values from the table

    +
  12. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class SortableDataTablesTest extends TheInternetBaseTest {
+
+    private static SortableDataTablesPage sortableDataTablesPage;
+
+    private List<String> actualValues;
+    private List<String> expectedValues;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click subpage link");
+        sortableDataTablesPage = theInternetPage.clickSortableDataTablesLink();
+
+        logStep("Verify if subpage is opened");
+        assertTrue("Unable to open Sortable Data Tables page", sortableDataTablesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldLastNameColumnBeOrderedAscendingAfterSort() {
+        int columnNumber = 0;
+        int tableNumber = new Random().nextInt(2);
+
+        logStep("Sort 'Last Name' column");
+        sortableDataTablesPage.sortColumnAscending(columnNumber, tableNumber);
+        assertTrue("Unable to set ascending order for 'Last Name' column",
+                sortableDataTablesPage.readColumnClass(columnNumber, tableNumber)
+                        .contains("headerSortDown"));
+
+        logStep("Verify data order for 'Last Name' column");
+        actualValues = sortableDataTablesPage.getColumnValues(columnNumber, tableNumber);
+        expectedValues = new ArrayList<String>(actualValues);
+        Collections.sort(expectedValues);
+        assertEquals("'Last Name' column is not sorted in ascending order",
+                expectedValues, actualValues);
+    }
+
+    @Test
+    public void shouldFirstNameColumnBeOrderedDescendingAfterSort() {
+        int columnNumber = 1;
+        int tableNumber = new Random().nextInt(2);
+
+        logStep("Sort 'First Name' column");
+        sortableDataTablesPage.sortColumnDescending(columnNumber, tableNumber);
+        assertTrue("Unable to set descending order for 'First Name' column",
+                sortableDataTablesPage.readColumnClass(columnNumber, tableNumber)
+                        .contains("headerSortUp"));
+
+        logStep("Verify data order for 'First Name' column");
+        actualValues = sortableDataTablesPage.getColumnValues(columnNumber, tableNumber);
+        expectedValues = new ArrayList<String>(actualValues);
+        Collections.sort(expectedValues);
+        Collections.reverse(expectedValues);
+        assertEquals("'First Name' column is not sorted in descending order",
+                expectedValues, actualValues);
+    }
+}
+
+
+
+
+example56 +
+
+
+

This example shows how to process HTTP status codes returned by page

+
+
+

When you click status code link, you will be redirected to the subpage which returns the proper HTTP status code. In order to check what code was returned:

+
+
+
    +
  1. +

    Open developer tools

    +
  2. +
  3. +

    Go to Network tab

    +
  4. +
  5. +

    Click request name

    +
  6. +
  7. +

    Find a code number in Headers section

    +
  8. +
+
+
+
+example57 +
+
+
+
+== Page Class +
+

Add new methods to existing Status Codes Home Page Class

+
+
+
+
public class StatusCodesHomePage extends BasePage {
+
+    private static final By selectorLink200Code = By.linkText("200");
+    private static final By selectorLink301Code = By.linkText("301");
+    private static final By selectorLink404Code = By.linkText("404");
+    private static final By selectorLink500Code = By.linkText("500");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if given link is displayed.
+     *
+     * @param selector Selector of the given link
+     * @return true if link is displayed
+     */
+    public boolean isLinkCodeDisplayed(By selector) {
+        return getDriver().findElementDynamic(selector)
+                .isDisplayed();
+
+    }
+
+    /**
+     * Clicks '200' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode200Link() {
+        return clickCodeLink(selectorLink200Code);
+    }
+
+    /**
+     * Clicks '301' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode301Link() {
+        return clickCodeLink(selectorLink301Code);
+    }
+
+    /**
+     * Clicks '404' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode404Link() {
+        return clickCodeLink(selectorLink404Code);
+    }
+
+    /**
+     * Clicks '500' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode500Link() {
+        return clickCodeLink(selectorLink500Code);
+    }
+
+    /**
+     * Clicks code link according to given code number.
+     *
+     * @param code Given code
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCodeLink(String code) {
+        return clickCodeLink(By.linkText(code));
+    }
+
+    private StatusCodesCodePage clickCodeLink(By selector) {
+        String codeNumber = getCodeNumberToCheck(selector);
+        getDriver().findElementDynamic(selector)
+                .click();
+        return new StatusCodesCodePage(codeNumber);
+    }
+
+    private String getCodeNumberToCheck(By selector) {
+        return getDriver().findElementDynamic(selector)
+                .getText();
+    }
+}
+
+
+
+

Create a page class for status codes subpages as well. In the class constructor specify which code number should be returned.

+
+
+
+
public class StatusCodesCodePage extends BasePage {
+
+    private static final By selectorDisplayedText   = By.cssSelector("#content > div > p");
+    private static final By selectorLinkToCodesPage = By.cssSelector("#content > div > p > a");
+
+    private String codeNumber;
+
+    public StatusCodesCodePage(String codeNumber) {
+        this.codeNumber = codeNumber;
+    }
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue() + '/');
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue() + '/' + codeNumber);
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    public String getCodeNumber() {
+        return codeNumber;
+    }
+
+    /**
+     * Verifies if page is loaded with given code number.
+     *
+     * @param codeNumber Expected code number
+     * @return true if expected code number is loaded with web page
+     */
+    public boolean isLoadedWithStatusCode(String codeNumber) {
+        return getDriver().getCurrentUrl()
+                .equals(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue() + "/" + codeNumber);
+    }
+
+    /**
+     * Returns displayed code number.
+     * <p>
+     * Code number is retrieved from following text displayed on the page:<b>
+     * 'This page returned a *** status code.', where *** represent the code number to be
+     * returned.
+     * </p>
+     *
+     * @return String object representing the displayed code number retrieved from specific sentence.
+     */
+    public String getDisplayedCodeNumber() {
+        return getDriver().findElementDynamic(selectorDisplayedText)
+                .getText()
+                .substring(21, 24);
+    }
+
+    /**
+     * Clicks link to return to 'Code Page'.
+     *
+     * @return StatusCodesHomePage object
+     */
+    public StatusCodesHomePage clickLinkToCodePage() {
+        getDriver().findElementDynamic(selectorLinkToCodesPage)
+                .click();
+        return new StatusCodesHomePage();
+    }
+}
+
+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page, go to Status Codes page

+
+
+

Steps:

+
+
+

For each status code

+
+
+
    +
  1. +

    Click code link

    +
  2. +
  3. +

    Check if the page is loaded with an expected code number

    +
  4. +
  5. +

    Check if the displayed code number equals the expected number

    +
  6. +
  7. +

    Go back to Status Codes Home Page

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class StatusCodeTest extends TheInternetBaseTest {
+
+    private static StatusCodesHomePage statusCodesHomePage;
+    private        StatusCodesCodePage statusCodesCodePage;
+
+    private String[] codes = { "200", "301", "404", "500" };
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        statusCodesHomePage = shouldTheInternetPageBeOpened().clickStatusCodesLink();
+
+        logStep("Verify if Status Codes Home page is opened");
+        assertTrue("Unable to open Status Codes Home page", statusCodesHomePage.isLoaded());
+    }
+
+    @Test
+    public void shouldProperCodeBeDisplayedAfterClickCodeLink() {
+
+        for (String code : codes) {
+            logStep("Click link to " + code + " code");
+            statusCodesCodePage = statusCodesHomePage.clickCodeLink(code);
+
+            logStep("Verify if proper web page corresponding to the code is opened");
+            assertTrue("Unable to open proper web page",
+                    statusCodesCodePage.isLoadedWithStatusCode(code));
+
+            logStep("Verify if the displayed code is equal to the expected one");
+            assertEquals(code, statusCodesCodePage.getDisplayedCodeNumber());
+
+            logStep("Click link to come back to 'Status Codes' page");
+            statusCodesCodePage.clickLinkToCodePage();
+        }
+    }
+}
+
+
+
+
+
+

26.27. == First Steps

+
+Page Object +
+

Your Product Under Test will be the following website: http://the-internet.herokuapp.com/

+
+
+

At first, create an Object to represent The Internet Main Page:

+
+
+
+
public class TheInternetPage extends BasePage
+
+
+
+

Each class which extends BasePage class must override three methods:

+
+
+
    +
  • +

    public boolean isLoaded() - returns true if the page is loaded and false if not

    +
  • +
  • +

    public void load() - loads the page

    +
  • +
  • +

    public String pageTitle() - returns page title

    +
  • +
+
+
+
+
public class TheInternetPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        BFLogger.logDebug("The internet page is loaded: " + getDriver().getCurrentUrl());
+        return getDriver().getCurrentUrl()
+                .equals(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'The internet' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+Environment Variables +
+

In Page classes, when you load/start web, it is uncommon to save fixed main URL.

+
+
+

Instead of hardcoded main URL variable, you build your Page class with a dynamic variable.

+
+
+
+How to create / update system environment +
+

Dynamic variable values are stored under path \src\resources\enviroments\environments.csv.

+
+
+
+image1 +
+
+
+

By default, the environment takes value from DEV column.

+
+
+
+== Access to the external file variables +
+

Create a class GetEnvironmentParam to map values from an external file with Page class:

+
+
+
+
public enum GetEnvironmentParam {
+
+    // Name if enum must be in line with cell name in /src/resources/environments/environment.csv
+    WWW_FONT_URL,
+    TOOLS_QA,
+    WEB_SERVICE,
+    THE_INTERNET_MAIN_PAGE,
+    ELEMENTAL_SELENIUM_PAGE;
+
+    public String getValue() {
+
+        if (null ==  BaseTest.getEnvironmentService()) {
+            throw new BFInputDataException("Environment Parameters class wasn't initialized properly");
+        }
+
+        return BaseTest.getEnvironmentService()
+                .getValue(this.name());
+
+    }
+
+    @Override
+    public String toString() {
+
+        return this.getValue();
+
+    }
+}
+
+
+
+

When you add a new row to environments.csv also add a new variable to GetEnvironmentParam class.

+
+
+

In Page class access environmental variable using this method:

+
+
+
+
GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue();
+
+
+
+
+Selectors + +
+
+Create selectors +
+

Create a selector for every interactable element on a webpage using By type. Find elements and it’s attributes using browser developer mode (F12).

+
+
+
+image2 +
+
+
+
+
private static final By abTestLinkSelector               = By.cssSelector("li >
+            a[href*='abtest']");
+    private static final By basicAuthLinkSelector            = By.cssSelector("li >
+            a[href*='basic_auth']");
+    private static final By brokenImageLinkSelector          = By.cssSelector("li >
+            a[href*='broken_images']");
+    private static final By challengingDomLinkSelector       = By.cssSelector("li >
+            a[href*='challenging_dom']");
+    private static final By checkboxesLinkSelector           = By.cssSelector("li >
+            a[href*='checkboxes']");
+    private static final By contextMenuLinkSelector          = By.cssSelector("li >
+            a[href*='context_menu']");
+    private static final By disappearingElementsLinkSelector = By.cssSelector("li >
+            a[href*='disappearing_elements']");
+    private static final By dragAndDropLinkSelector          = By.cssSelector("li >
+            a[href*='drag_and_drop']");
+    private static final By dropdownLinkSelector             = By.cssSelector("li >
+            a[href*='dropdown']");
+    private static final By dynamicContentLinkSelector       = By.cssSelector("li >
+            a[href*='dynamic_content']");
+    private static final By dynamicControlsLinkSelector      = By.cssSelector("li >
+            a[href*='dynamic_controls']");
+    private static final By dynamicLoadingLinkSelector       = By.cssSelector("li >
+            a[href*='dynamic_loading']");
+    private static final By exitIntentLinkSelector           = By.cssSelector("li >
+            a[href*='exit_intent']");
+    private static final By fileDownloadLinkSelector         = By.cssSelector("li >
+            a[href$='download']");
+    private static final By fileUploadLinkSelector           = By.cssSelector("li >
+           a[href*='upload']");
+    private static final By floatingMenuLinkSelector         = By.cssSelector("li >
+           a[href*='floating_menu']");
+    private static final By forgotPasswordLinkSelector       = By.cssSelector("li >
+           a[href*='forgot_password']");
+    private static final By formAuthenticationLinkSelector   = By.cssSelector("li >
+           a[href*='login']");
+    private static final By framesLinkSelector               = By.cssSelector("li >
+           a[href*='frames']");
+    private static final By geolocationLinkSelector          = By.cssSelector("li >
+           a[href*='geolocation']");
+    private static final By horizontalSliderLinkSelector     = By.cssSelector("li >
+           a[href*='horizontal_slider']");
+    private static final By hoversLinkSelector               = By.cssSelector("li >
+           a[href*='hovers']");
+    private static final By infiniteScrollLinkSelector       = By.cssSelector("li >
+           a[href*='infinite_scroll']");
+    private static final By javaScriptAlertLinkSelector      = By.cssSelector("li >
+           a[href*='javascript_alerts']");
+    private static final By javaScriptErrorLinkSelector      = By.cssSelector("li >
+           a[href*='javascript_error']");
+    private static final By jQueryUIMenuLinkSelector         = By.cssSelector("li >
+           a[href*='jqueryui/menu']");
+    private static final By keyPressesLinkSelector           = By.cssSelector("li >
+           a[href*='key_presses']");
+    private static final By largeAndDeepDOMLinkSelector      = By.cssSelector("li >
+           a[href*='large']");
+    private static final By multipleWindowsLinkSelector      = By.cssSelector("li >
+           a[href*='windows']");
+    private static final By nestedFramesLinkSelector         = By.cssSelector("li >
+           a[href*='nested_frames']");
+    private static final By notificationMessagesLinkSelector = By.cssSelector("li >
+           a[href*='notification_message']");
+    private static final By redirectLinkSelector             = By.cssSelector("li >
+           a[href*='redirector']");
+    private static final By secureFileDownloadLinkSelector   = By.cssSelector("li >
+           a[href*='download_secure']");
+    private static final By shiftingContentLinkSelector      = By.cssSelector("li >
+           a[href*='shifting_content']");
+    private static final By slowResourcesLinkSelector        = By.cssSelector("li >
+           a[href*='slow']");
+    private static final By sortableDataTablesLinkSelector   = By.cssSelector("li >
+           a[href*='tables']");
+    private static final By statusCodesLinkSelector          = By.cssSelector("li >
+           a[href*='status_codes']");
+    private static final By typosLinkSelector                = By.cssSelector("li >
+           a[href*='typos']");
+    private static final By wYSIWYGEditorLinkSelector        = By.cssSelector("li >
+           a[href*='tinymce']");
+
+
+
+
+Implement methods +
+

Then use these selectors to create Objects and perform actions on page elements:

+
+
+
+
public ABtestPage clickABtestingLink() {
+        new Button(abTestLinkSelector).click();
+        return new ABtestPage();
+    }
+
+    public BasicAuthPage clickBasicAuthLink() {
+        getDriver().waitForPageLoaded();
+        WebElement link = getDriver().findElementDynamic(basicAuthLinkSelector);
+        JavascriptExecutor executor = (JavascriptExecutor) getDriver();
+        executor.executeScript("var elem=arguments[0]; setTimeout(function() {elem.click();}, 100)",
+                link);
+        return new BasicAuthPage();
+    }
+
+    public BrokenImagePage clickBrokenImageLink() {
+        new Button(brokenImageLinkSelector).click();
+        return new BrokenImagePage();
+    }
+
+    public ChallengingDomPage clickChallengingDomLink() {
+        new Button(challengingDomLinkSelector).click();
+        return new ChallengingDomPage();
+    }
+
+    public CheckboxesPage clickCheckboxesLink() {
+        new Button(checkboxesLinkSelector).click();
+        return new CheckboxesPage();
+    }
+
+    public ContextMenuPage clickContextMenuLink() {
+        new Button(contextMenuLinkSelector).click();
+        return new ContextMenuPage();
+    }
+
+    public DisappearingElementsPage clickDisappearingElementsLink() {
+        new Button(disappearingElementsLinkSelector).click();
+        return new DisappearingElementsPage();
+    }
+
+    public DragAndDropPage clickDragAndDropLink() {
+        new Button(dragAndDropLinkSelector).click();
+        return new DragAndDropPage();
+    }
+
+    public DropdownPage clickDropdownLink() {
+        new Button(dropdownLinkSelector).click();
+        return new DropdownPage();
+    }
+
+    public DynamicContentPage clickDynamicContentLink() {
+        new Button(dynamicContentLinkSelector).click();
+        return new DynamicContentPage();
+    }
+
+    public DynamicControlsPage clickDynamicControlsLink() {
+        new Button(dynamicControlsLinkSelector).click();
+        return new DynamicControlsPage();
+    }
+
+    public DynamicLoadingPage clickDynamicLoadingLink() {
+        new Button(dynamicLoadingLinkSelector).click();
+        return new DynamicLoadingPage();
+    }
+
+    public ExitIntentPage clickExitIntentLink() {
+        new Button(exitIntentLinkSelector).click();
+        return new ExitIntentPage();
+    }
+
+    public FileDownloadPage clickFileDownloadLink() {
+        new Button(fileDownloadLinkSelector).click();
+        return new FileDownloadPage();
+    }
+
+    public FileUploadPage clickFileUploadLink() {
+        new Button(fileUploadLinkSelector).click();
+        return new FileUploadPage();
+    }
+
+    public FloatingMenuPage clickFloatingMenuLink() {
+        new Button(floatingMenuLinkSelector).click();
+        return new FloatingMenuPage();
+    }
+
+    public ForgotPasswordPage clickForgotPasswordLink() {
+        new Button(forgotPasswordLinkSelector).click();
+        return new ForgotPasswordPage();
+    }
+
+    public FormAuthenticationPage clickFormAuthenticationLink() {
+        new Button(formAuthenticationLinkSelector).click();
+        return new FormAuthenticationPage();
+    }
+
+    public FramesPage clickFramesLink() {
+        new Button(framesLinkSelector).click();
+        return new FramesPage();
+    }
+
+    public GeolocationPage clickGeolocationLink() {
+        new Button(geolocationLinkSelector).click();
+        return new GeolocationPage();
+    }
+
+    public HorizontalSliderPage clickHorizontalSliderLink() {
+        new Button(horizontalSliderLinkSelector).click();
+        return new HorizontalSliderPage();
+    }
+
+    public HoversPage clickHoversLink() {
+        new Button(hoversLinkSelector).click();
+        return new HoversPage();
+    }
+
+    public InfiniteScrollPage clickInfiniteScrollLink() {
+        new Button(infiniteScrollLinkSelector).click();
+        return new InfiniteScrollPage();
+    }
+
+    public JavaScriptAlertsPage clickJavaScriptAlertLink() {
+        new Button(javaScriptAlertLinkSelector).click();
+        return new JavaScriptAlertsPage();
+    }
+
+    public JavaScriptErrorPage clickJavaScriptErrorLink() {
+        new Button(javaScriptErrorLinkSelector).click();
+        return new JavaScriptErrorPage();
+    }
+
+    public JQueryUIMenuPage clickJQueryUIMenuLink() {
+        new Button(jQueryUIMenuLinkSelector).click();
+        return new JQueryUIMenuPage();
+    }
+
+    public KeyPressesPage clickKeyPressesLink() {
+        new Button(keyPressesLinkSelector).click();
+        return new KeyPressesPage();
+    }
+
+    public LargeAndDeepDOMPage clickLargeAndDeepDOMLink() {
+        new Button(largeAndDeepDOMLinkSelector).click();
+        return new LargeAndDeepDOMPage();
+    }
+
+    public MultipleWindowsPage clickmultipleWindowsLink() {
+        new Button(multipleWindowsLinkSelector).click();
+        return new MultipleWindowsPage();
+    }
+
+    public NestedFramesPage clickNestedFramesLink() {
+        new Button(nestedFramesLinkSelector).click();
+        return new NestedFramesPage();
+    }
+
+    public NotificationMessagesPage clickNotificationMessagesLink() {
+        new Button(notificationMessagesLinkSelector).click();
+        return new NotificationMessagesPage();
+    }
+
+    public RedirectLinkPage clickRedirectLink() {
+        new Button(redirectLinkSelector).click();
+        return new RedirectLinkPage();
+    }
+
+    public SecureFileDownloadPage clickSecureFileDownloadLink() {
+        new Button(secureFileDownloadLinkSelector).click();
+        return new SecureFileDownloadPage();
+    }
+
+    public ShiftingContentPage clickShiftingContentLink() {
+        new Button(shiftingContentLinkSelector).click();
+        return new ShiftingContentPage();
+    }
+
+    public SlowResourcesPage clickSlowResourcesLink() {
+        new Button(slowResourcesLinkSelector).click();
+        return new SlowResourcesPage();
+    }
+
+    public SortableDataTablesPage clickSortableDataTablesLink() {
+        new Button(sortableDataTablesLinkSelector).click();
+        return new SortableDataTablesPage();
+    }
+
+    public StatusCodesHomePage clickStatusCodesLink() {
+        new Button(statusCodesLinkSelector).click();
+        return new StatusCodesHomePage();
+    }
+
+    public TyposPage clickTyposLink() {
+        new Button(typosLinkSelector).click();
+        return new TyposPage();
+    }
+
+    public WYSIWYGEditorPage clickWYSIWYGEditorLink() {
+        new Button(wYSIWYGEditorLinkSelector).click();
+        return new WYSIWYGEditorPage();
+    }
+
+
+
+

These methods create a Button object for every link on The Internet Page and click it to redirect on a different subpage.

+
+
+
+Elements types +
+

MrChecker includes Object types for various elements existing on webpages such as Button, TextBox etc. There is also WebElement class and getDriver().findElementDynamic(By selector) method for creating webpage objects dynamically and performing basic actions:

+
+
+

Instead of using static types you can use:

+
+
+
+
    public TyposPage clickTyposLink() {
+        WebElement checkboxesLink = getDriver().findElementDynamic(checkboxesLinkSelector);
+        checkboxesLink.click();
+        return new TyposPage();
+    }
+
+
+
+

Or perform actions without creating a variable:

+
+
+
+
    public TyposPage clickTyposLink() {
+        getDriver().findElementDynamic(checkboxesLinkSelector).click();
+        return new TyposPage();
+    }
+
+
+
+
+The Internet Base Test + +
+
+Test Class +
+

Create Test class and override methods:

+
+
+
    +
  • +

    public void setUp() - executes before each test

    +
  • +
  • +

    public void tearDown() - executes after each test

    +
  • +
+
+
+
+
public class TheInternetBaseTest extends BaseTest {
+    @Override
+    public void setUp() {
+
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        BasePage.navigateBack();
+    }
+}
+
+
+
+

logStep(String message) method doesn’t exist yet so you should create it:

+
+
+
+
    protected static int             step = 0;
+
+     /**
+     * Logs test step including step number calculated individually for each test.
+     *
+     * @param message Text message representing step description.
+     */
+    public static void logStep(String message) {
+        BFLogger.logInfo("Step " + ++step + ": " + message);
+    }
+
+
+
+

Write a method for loading The Internet Page and checking if it is properly opened:

+
+
+
+
    protected static TheInternetPage theInternetPage;
+
+    /**
+     * Performs operations required for verifying if The Internet Page is properly opened.
+     *
+     * @return TheInternetPage
+     */
+    public static TheInternetPage shouldTheInternetPageBeOpened() {
+
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+
+        return theInternetPage;
+    }
+
+
+
+

This Test class can’t be launched because it doesn’t contain any @Test methods. It’s been created only for supporting other Test classes.

+
+
+
+BFLogger +
+

BFLogger is a default MrChecker logging tool. Use it to communicate important information from test execution. There are three basic logging methods:

+
+
+
    +
  • +

    logInfo(String message) - used for test steps

    +
  • +
  • +

    logDebug(String message) - used for non-official information, either during the test build process or in Page Object files

    +
  • +
  • +

    logError(String message) - used to emphasize critical information

    +
  • +
+
+
+

Logs will be visible in the console and in the log file under path: MrChecker_Test_Framework\workspace\project-folder\logs

+
+
+
+
+

26.28. E2E Tutorials

+ +
+
MrChecker E2E tutorials
+
+

In order to learn more about MrChecker structure, start from Project Organisation section and then check out our fantastic tutorials:

+
+
+
+
How to create a basic test in MrChecker
+ +
+
+
Example: Booking a table
+
+

As an example to test we will use MyThaiStar booking page.
+In order to book a table, do the following steps:

+
+
+
    +
  1. +

    Open MyThaiStar Book Table Page

    +
  2. +
  3. +

    Enter booking data: Date and time, Name, Email and number of Table guests

    +
  4. +
  5. +

    Click Accept terms

    +
  6. +
  7. +

    Click Book table

    +
  8. +
  9. +

    Display confirmation box and send booking

    +
  10. +
  11. +

    Check if the booking was successful.

    +
  12. +
+
+
+
+image1 +
+
+
+
+image2 +
+
+
+

You can go through these steps manually and doublecheck the result.

+
+
+
+
How to prepare a test
+ +
+
+
== 1. Create BookTablePage class
+
+

You will need a class which will represent MyThaiStart booking page.
+Fill the required methods with the following code:

+
+
+
+
public class BookTablePage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded(); //waits untli the page is loaded
+        return getDriver().getCurrentUrl()
+                .equals("https://mts-devonfw-core.cloud.okteto.net/bookTable"); //checks if current page address equals MyThaiStar booking page adress
+    }
+
+    @Override
+    public void load() {
+        getDriver().get("https://mts-devonfw-core.cloud.okteto.net/bookTable"); //loads page under specified adress
+        getDriver().waitForPageLoaded(); //waits until the page is loaded
+    }
+
+    @Override
+    public String pageTitle() {
+        return "My Thai Star"; //returns page title
+    }
+}
+
+
+
+

getDriver() method allows accessing Selenium Web Driver which performs actions on the webpage.

+
+
+

As this page class represents the MyThaiStar booking page, you have to set up selectors for web elements required in the test case. In the example you have to create selectors for elements we’ll interact with:

+
+
+
    +
  • +

    Date and time input field

    +
  • +
  • +

    Name input field

    +
  • +
  • +

    Email input field

    +
  • +
  • +

    Table guests input field

    +
  • +
  • +

    Accept terms checkbox

    +
  • +
  • +

    Book table button

    +
  • +
+
+
+

Selectors will be implemented as fields.

+
+
+

Example of the selector for Date and time input field:

+
+
+
+
/** Date field search criteria */
+private static final By dateSearch = By.cssSelector("input[formcontrolname='bookingDate']");
+
+
+
+

The input field’s name "bookingDate" was found by using the developer console in Google Chrome. How to prepare an everlasting selector?

+
+
+
+image3 +
+
+
+

This selector can be used to create a WebElement object of the said input field. Therefore, you will create a new method and call it "enterTimeAndDate".

+
+
+
+
public void enterTimeAndDate(String date) {
+    WebElement dateInput = getDriver().findElementDynamic(dateSearch); //creates a new WebElement to access Date and time input field
+    dateInput.sendKeys(date); //enters date value
+}
+
+
+
+

Now you can create other selectors and objects and methods for every element on the webpage:

+
+
+
+
/** Name input field search criteria */
+private static final By nameSearch = By.cssSelector("input[formcontrolname='name']");
+
+/** Email input field search criteria */
+private static final By emailSearch = By.cssSelector("input[formcontrolname='email']");
+
+/** Number of guests search criteria */
+private static final By guestsSearch = By.cssSelector("input[formcontrolname='assistants']");
+
+/** Check box search criteria */
+private static final By checkboxSearch = By.cssSelector("mat-checkbox[data-name='bookTableTerms']");
+
+/** Book table button search criteria */
+private static By bookTableSearch = By.name("bookTableSubmit");
+
+
+
+
+
public void enterName(String name) {
+    WebElement nameInput = getDriver().findElementDynamic(nameSearch); //creates a new WebElement to access name input field
+    nameInput.sendKeys(name); //enters name value
+}
+
+public void enterEmail(String email) {
+    WebElement emailInput = getDriver().findElementDynamic(emailSearch); //creates a new WebElement to access email input field
+    emailInput.sendKeys(email); //enters email value
+}
+
+public void enterGuests(int amountOfGuests) {
+    WebElement guestsInput = getDriver().findElementDynamic(guestsSearch); //creates a new WebElement to access amount of guests input field
+    guestsInput.sendKeys(Integer.toString(amountOfGuests)); //enters the number of guests value converted from integer to string
+}
+
+public void acceptTerms() {
+    WebElement checkbox = getDriver().findElementDynamic(checkboxSearch); //creates aa new WebElement to access accept terms checkbox
+    WebElement square = checkbox.findElement(By.className("mat-checkbox-inner-container")); //creates a new WebElement to access inner square
+    JavascriptExecutor js = (JavascriptExecutor) getDriver(); //creates a Javascript executor object
+    js.executeScript("arguments[0].click()", square); //executes a script which clicks the square
+
+}
+
+public void clickBookTable() {
+    WebElement buttonbutton = getDriver().findElementDynamic(bookTableSearch); //creates a new WebElement to access book table button
+    getDriver().waitUntilElementIsClickable(bookTableSearch); //waits until a button might be clicked
+    buttonbutton.click(); //clicks the button
+}
+
+
+
+

You can use those methods in order to create a new method to go through the whole booking process:

+
+
+
+
public ConfirmBookPage enterBookingData(String date, String name, String email, int guests) {
+    enterTimeAndDate(date);
+    enterName(name);
+    enterEmail(email);
+    enterGuests(guests);
+    acceptTerms();
+
+    clickBookTable();
+
+    return new ConfirmBookPage();
+}
+
+
+
+
+
== 2. Create ConfirmBookPage class
+
+

As you can see, this method returns another page object that has not yet been created. This step is required, as the booking information that you would like to check is on another webpage. This means that you will have to create another page class and call it ConfirmBookPage:

+
+
+
+
public class ConfirmBookPage extends BasePage {
+
+    /** Confirmation dialog search criteria */
+    private static final By confirmationDialogSearch = By.className("mat-dialog-container");
+
+    /** Send confirmation button search criteria */
+    private static final By sendButtonSearch = By.name("bookTableConfirm");
+
+    /** Cancel confirmation button search criteria */
+    private static final By cancelButtonSearch = By.name("bookTableCancel");
+
+    @Override
+    public boolean isLoaded() {
+        //creates a new WebElement to access confirmation dialog box
+        WebElement confirmationDialog = getDriver().findElementDynamic(confirmationDialogSearch);
+
+        return confirmationDialog.isDisplayed(); //checks if the box is displayed
+    }
+
+    //this method won't be called because the page is loaded only after clicking book table button
+    @Override
+    public void load() {
+        BFLogger.logError("MyThaiStar booking confirmation page was not loaded."); //logs error
+    }
+
+    @Override
+    public String pageTitle() {
+        return "My Thai Star";
+    }
+
+    public void confirmBookingData() {
+        WebElement sendButton = getDriver().findElementDynamic(sendButtonSearch); //creates a new WebElement to access confirmation button
+        sendButton.click(); //clicks the send button
+    }
+
+    public void cancelBookingData() {
+        WebElement cancelButton = getDriver().findElementDynamic(cancelButtonSearch); //creates a new WebElement to access resignation button
+        cancelButton.click(); //clicks the cancel button
+    }
+}
+
+
+
+
+image4 +
+
+
+

After the click on Send button - the green confirmation dialogue appears with the message "Table successfully booked":

+
+
+
+image5 +
+
+
+

To be able to check if the booking was successful, you should go back to the BookTablePage class and add one more method in order to check if the green box was displayed:

+
+
+
+
/** Dialog search criteria */
+private static final By dialogSearch = By.className("bgc-green-600");
+
+public boolean checkConfirmationDialog() {
+    WebElement greenConfirmationDialog = getDriver().findElementDynamic(dialogSearch); //creates a new WebElement to access confirmation dialog
+
+    return greenConfirmationDialog.isDisplayed(); //checks if the dialog is displayed
+}
+
+
+
+
+
== 3. Create BookTableTest class
+
+

At this point you can start creating a test class:

+
+
+
+
import static org.junit.Assert.assertTrue;
+
+public class BookTableTest extends BaseTest {
+    private static BookTablePage bookTablePage = new BookTablePage(); //the field contains book table page object
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        bookTablePage.load(); //loads book table page
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+
+    }
+
+    @Override
+    public void setUp() {
+        if (!bookTablePage.isLoaded()) {
+            bookTablePage.load(); //if the page is not loaded, loads it
+        }
+    }
+
+    @Override
+    public void tearDown() {
+
+    }
+}
+
+
+
+
+
== 4. Write the first test
+
+

You can prepare our first test method using the methods from page classes

+
+
+
+
@Test
+public void Test_BookTableAndCheckConfirmation() {
+    String date = "07/23/2019 1:00 PM"; //replace with tommorow's date in format "MM/dd/yyyy hh:mm a"
+    String name = "Smith"; //name field
+    String email = "smith@somemail.com"; //email field
+    int guests = 3; //number of guests
+
+    //enters booking data and returns a new confirmation page
+    ConfirmBookPage confirmBookPage = bookTablePage.enterBookingData(date, name, email, guests);
+    confirmBookPage.confirmBookingData(); //confirms booking
+
+    //checks if the green dialog box appears, if it does, test is passed, if not, the test failed and displays message given in the first argument
+    assertTrue("Test failed: Table not booked", bookTablePage.checkConfirmationDialog()); //returns true if dialog box appears and false if not
+}
+
+
+
+
+
== 5. Run the test
+
+

Run the test by right-clicking on the test method → Run as → JUnit test.

+
+
+
+image6 +
+
+
+
+
+

26.29. Migration from JUnit4 to JUnit5

+ +
+
+

26.30. Migration guide

+
+
Junit4 to Junit5 migration guide
+
+

mrchecker-core-module version 5.6.2.1 features the upgrade of Junit4 to Junit5. Consequently, the Junit4 features are now obsolete and current test projects require migration +in order to use the latest revision of MrChecker. This site provides guidance on the migration.

+
+ +
+
+
POM
+
+

The project pom.xml file needs to be adjusted in the first place. An exemplary POM file for download can be found here: https://github.com/devonfw/mrchecker/blob/develop/template/pom.xml

+
+
+
+
Test Annotations
+
+

Junit5 redefines annotations defining a test flow. The annotations need to be adjusted as per the following table.

+
+
+
+migration01 +
+
+
+
+
Rule, ClassRule, TestRule and TestMethod
+
+

Junit4 @Rule and @ClassRule annoations as well as TestRule and TestMethod interfaces have been replaced +with the Junit5 extension mechanism (https://junit.org/junit5/docs/current/user-guide/#extensions). +During the migration to Junit5, all the instances of the mentioned types need to be rewritten according to the Junit5 User Guide. +The extension mechanism is far more flexible than the Junit4 functionality based on rules.

+
+
+

Note: as per Junit5 API spec: ExpectedExceptionSupport, ExternalResourceSupport, VerifierSupport +provide native support of the correspoinding Junit4 rules.

+
+
+

Extension registration example:

+
+
+
+migration02 +
+
+
+
+migration arrow down +
+
+
+
+migration03 +
+
+
+

TestRule (TestWatcher and ExternalResource) to Extension (TestWatcher and AfterAllCallback) example:

+
+
+
+migration04 +
+
+
+
+migration arrow down +
+
+
+
+migration05 +
+
+
+
+
Page, BasePageAutoRegistration and PageFactory classes
+
+

Page class is a new MrChecker class. It was introduced to provide common implemenation for its subpages in specific MrChecker modules. +In order to receive test lifecycle notifications, particular Pages need to be registered by calling addToTestExecutionObserver() method. +To facilitate this process, PageFactory class was designed and it’s usage is a recommended way of creating Page objects for tests. +Although in MrChecker based on Junit4, the registration process was done in a specific BasePage constructor, it’s been considered error prone and reimplemented. +Furthermore, to reduce migration cost BasePageAutoRegistration classes are available in MrChceker modules. They use the old way of registration. +Given that three ways of migration are possible.

+
+
+

Migration with PageFactory class example (RECOMMENDED):

+
+
+
+migration06 +
+
+
+
+migration arrow down +
+
+
+
+migration07 +
+
+
+

Migration with calling addToTestExecutionObserver() method example:

+
+
+
+migration06 +
+
+
+
+migration arrow down +
+
+
+
+migration08 +
+
+
+

Migration with BasePageAutoRegistration class example:

+
+
+
+migration09 +
+
+
+
+migration arrow down +
+
+
+
+migration10 +
+
+
+
+
Test suites
+
+

Test suite migration example:

+
+
+
+migration11 +
+
+
+
+migration arrow down +
+
+
+
+migration12 +
+
+
+

Running tests from Maven:

+
+
+
+migration13 +
+
+
+
+migration arrow down +
+
+
+
+migration14 +
+
+
+
+
Concurrency
+
+

Junit5 provides native thread count and parallel execution control in contrast to Junit4 where it was controlled by Maven Surefire plugin. +To enable concurrent test execution, junit-platform.properties file needs to placed in the test/resources directory of a project.

+
+
+

Exemplary file contents:

+
+
+
+migration15 +
+
+
+

A ready-to-use file can be found here.

+
+
+

MrChecker supports only concurrent test class execution. +@ResourceLock can be used to synchronize between classes if needed:

+
+
+
+migration16 +
+
+
+
+
Cucumber
+
+

If Cucumber is used in a project, it is neccessary to change a hook class. +An exemplary hook source file for download can be found here.

+
+
+
+
Data driven tests
+
+

Junit5 implements new approach to data driven tests by various data resolution mechanisms.

+
+
+

An example of method source parameters migration version one:

+
+
+
+migration17 +
+
+
+
+migration arrow down +
+
+
+
+migration18 +
+
+
+

An example of method source parameters migration version two:

+
+
+
+migration17 +
+
+
+
+migration arrow down +
+
+
+
+migration19 +
+
+
+

An example of method source in another class parameters migration:

+
+
+
+migration20 +
+
+
+
+migration arrow down +
+
+
+
+migration21 +
+
+
+

Providing parameters directly in annotations has no analogy in Junit5 and needs to be replaced with e.g. method source:

+
+
+
+migration22 +
+
+
+
+migration arrow down +
+
+
+
+migration23 +
+
+
+

An example of csv parameters source with no header line migration:

+
+
+
+migration24 +
+
+
+
+migration arrow down +
+
+
+
+migration25 +
+
+
+

An example of csv parameters source with the header line migration:

+
+
+
+migration26 +
+
+
+
+migration arrow down +
+
+
+
+migration27 +
+
+
+

An example of csv parameters source with object mapping migration step1:

+
+
+
+migration28 +
+
+
+
+migration arrow down +
+
+
+
+migration29 +
+
+
+

An example of csv parameters source with object mapping migration step 2:

+
+
+
+migration30 +
+
+
+
+migration arrow down +
+
+
+
+migration31 +
+
+
+
+
setUp() and tearDown()
+
+

BaseTest.setUp() and BaseTest.tearDown() methods are now not abstract and need no implementation in subclasses. @Override when a custom implemenatation is needed.

+
+
+
+
+

26.31. FAQ

+
+

Here you can find the most frequently asked questions regarding working with MrChecker and installation problems.

+
+
+
+

26.32. Common problems

+
+
I can’t find the boilerplate module. Has it been removed?
+
+

The boilerplate module has been removed from the GitHub project on purpose.

+
+
+

There were problems with naming and communication, not everybody was aware of the meaning of the word boilerplate.

+
+
+

The name of the folder has been changed to template. It can be found in the GitHub project.

+
+
+
+
Is it possible to use Docker with MrChecker?
+
+

MrChecker works seamlessly with Docker.

+
+ +
+

Note that the structure of the folders can be changed. If that happens - search in repo for /pipeline/CI/Jenkinsfile_node.groovy

+
+
+
+
Tests are not stable
+
+

Selenium tests perform actions much faster than a normal user would. Because pages can contain dynamically changing content, some web elements can still not be loaded when Selenium driver tries to access them.

+
+
+

getDriver().waitForPageLoaded() method checks ready state in the browser, that’s why stability problems may happen in advanced frontend projects.

+
+
+

To improve test stability you can:

+
+
+
    +
  • +

    add waiting methods before dynamically loading elements e.g. getDriver().waitForElement(By selector)

    +
  • +
  • +

    add timeout parameter in method getDriver().findElementDynamic(By selector, int timeOut)

    +
  • +
  • +

    change global waiting timeout value using method getDriver().manage().timeouts().implicitlyWait(long time, TimeUnit unit)

    +
  • +
+
+
+

Furthermore, if the page displays visible loading bars or spinners, create FluentWait method to wait until they disappear.

+
+
+

Notice that by increasing timeouts you may improve stability but too long waiting time makes tests run slower.

+
+ +
+
+
+

26.33. How to

+
+
How to: Change timeouts?
+
+

If you would like to change timeouts - you don’t have to change them globally. +It is possible to add waiting time parameter to searching methods, such as:

+
+
+

getDriver().findElementDynamic(By selector, int timeOut)
+timeout - in seconds

+
+
+

It is recommended to use methods that significantly level up the repetitiveness of the code:

+
+
+
+
getDriver().waitForElement(By selector);
+
+getDriver().waitForElementVisible(By selector);
+
+getDriver().waitForPageLoaded();
+
+getDriver().waitUntilElementIsClickable(By selector);
+
+
+
+

Or Fluent Wait methods with changed timeout and interval:

+
+
+
+
FluentWait<WebDriver> wait = new FluentWait<WebDriver>(getDriver())
+        .withTimeout(long duration, TimeUnit unit)
+        .pollingEvery(long duration, TimeUnit unit);
+wait.until((WebDriver wd) -> expectedCondition.isTrue());
+getWebDriverWait().withTimeout(millis, TimeUnit.MILLISECONDS)
+        .withTimeout(long duration, TimeUnit unit)
+        .pollingEvery(long duration, TimeUnit unit)
+        .until((WebDriver wd) -> expectedCondition.isTrue());
+
+
+
+

These methods allow You to change WebDriver timeouts values such as:

+
+
+

getDriver().manage().timeouts().pageLoadTimeout(long time, TimeUnit unit)
+the amount of time to wait for a page to load before throwing an exception. This is the default timeout for method getDriver().waitForPageLoaded()

+
+
+

getDriver().manage().timeouts().setScriptTimeout(long time, TimeUnit unit)
+the amount of time to wait for execution of script to finish before throwing an exception

+
+
+

getDriver().manage().timeouts().implicitlyWait(long time, TimeUnit unit) +the amount of time the driver should wait when searching for an element if it is not immediately present. After that time, it throws an exception. This the default timeout for methods such as getDriver().findElementDynamic(By selector) or getDriver().waitForElement(By selector)

+
+
+

Changing timeouts can improve test stability but can also make test run time longer.

+
+
+
+
How to: Start a browser in Incognito/Private mode?
+
+

In MrChecker there is a fpossibility of changing browser options during runtime execution.

+
+
+

To run the browser in incognito mode:

+
+
+
    +
  1. +

    In Eclipse - open Run Configurations window:

    +
    +

    ht image1

    +
    +
  2. +
  3. +

    Select a test which you want to run and switch to arguments tab:

    +
    +

    ht image2

    +
    +
  4. +
  5. +

    Add VM argument:

    +
    +
      +
    • +

      for the incognito mode in chrome:

      +
      +

      ht image3

      +
      +
    • +
    +
    +
  6. +
+
+ +
+
+
+

26.34. Installation problems

+
+
Chromedriver version is not compatible with Chrome browser
+
+

Problem:

+
+
+

During the tests your web browser window opens and immediately closes, all your tests are broken.

+
+
+

Following error message is visible in the test description:

+
+
+
+
session not created: This version of ChromeDriver only supports Chrome version 76
+Build info: version: '<build_version>', revision: '<build_revision>', time: '<time>'
+System info: host: '<your_computer_name>', ip: '<your_ip_address>', os.name: '<your_os_name>', os.arch: '<your_os_architecture>', os.version: '<your_os_version>', java.version: '<java_version_installed>'
+Driver info: driver.version: NewChromeDriver
+
+
+
+

Solution:

+
+
+
    +
  1. +

    Make a change in the following files:

    +
    +
      +
    • +

      MrChecker_Test_Framework\workspace\devonfw-testing\src\resources\settings.properties

      +
    • +
    • +

      For project template-app-under-test: MrChecker_Test_Framework\workspace\devonfw-testing\template\src\resources\settings.properties

      +
    • +
    • +

      For project example-app-under-test: MrChecker_Test_Framework\workspace\devonfw-testing\example\src\resources\settings.properties

      +
      +

      Change the value of selenium.driverAutoUpdate field form true to false

      +
      +
    • +
    +
    +
  2. +
  3. +

    Replace the following file with a version compatible with your browser: +MrChecker_Test_Framework\workspace\devonfw-testing\example\lib\webdrivers\chrome\chromedriver.exe .

    +
  4. +
+
+
+
+
My browser opens up in German by default
+
+

Problem:

+
+
+

I would like my browser to use the English language, but the default language for the browser is German. How can I change the settings?

+
+
+

Solution:

+
+
+

There is a Properties file installed together with MrCheker installation. It is possible to set the language in which a browser could be opened for testing purposes in Properties > Selenium configuration,.

+
+
+
+
+

26.35. devonfw dashboard

+ +
+
+

26.36. Landing page

+ +
+

==Landing page

+
+
+

This is the entry point of the devonfw dashboard. Click on GET STARTED NOW to start using it.

+
+
+
+Get Started +
+
Figure 80. Get Started
+
+
+
Your devonfw distributions
+
+

The first time you open the application you will get a dialog with all the devonfw distributions found on your machine. Click on OK GOT IT to continue.

+
+
+
+devon-ide distributions +
+
Figure 81. devon-ide distributions
+
+
+
+
Profile form
+
+

Here you will find a screen that allows you to create a profile. This is just for the purpose of customizing your dashboard.

+
+
+
+Profile +
+
Figure 82. Profile
+
+
+

Fill the data and click on CREATE MY PROFILE if you want to create the profile at the moment or click WILL DO IT LATER to skip the creation.

+
+
+
+
+

26.37. Home

+ +
+

==Home page

+
+
+

This is the main page that you will find after your profile creation and the page where you will start from henceforth.

+
+
+

It contains three sections as below:

+
+
+
    +
  1. +

    Toolbar

    +
  2. +
  3. +

    Sidebar

    +
  4. +
  5. +

    Content

    +
  6. +
+
+
+
Topbar
+
+

This section is at the top of the page, it contains devonfw instance dropdown to select devonfw-ide that can be used as a base for the projects.

+
+
+
+Toolbar +
+
+
+

Next to the devonfw instance dropdown, there is a quick help icon, clicking on it will open a popup which gives some tips for how to use Devon Dashboard IDE.

+
+
+
+Quick Help +
+
+
+
+ +
+

The sidebar has divided into two sections:

+
+
+
    +
  1. +

    User Profile - Users can see his/her pic, name, and role.

    +
  2. +
  3. +

    Links to access to different sections of the dashboard.

    +
  4. +
+
+
+
+Sidebar +
+
+
+
+
Content Section
+
+

The Content section has also divided into three sections:

+
+
+
    +
  1. +

    A small introduction about the devonfw IDE

    +
  2. +
  3. +

    A button to Download latest version of devonfw IDE

    +
  4. +
  5. +

    A "Project" block which shows the total number of Projects which are available in different devonfw IDE

    +
  6. +
+
+
+
+
Steps to download and Install devonfw IDE
+
+

Step 1: Click on Download latest version button which is in the Content section. Check the below screen for the reference.

+
+
+
+Download Latest Version +
+
+
+

Step 2: By clicking Download latest version button, Installing devonfw popup will open.

+
+
+
+Installing Devonfw +
+
+
+

Step 3: Installing devonfw popup will automatically trigger one more popup to specify the location for downloading Devonfw IDE. Specify the location and click the Save button to download.

+
+
+
+Download location popup +
+
+
+

Step 3: Once the download completes successfully, the Next button will be enabled for the further installation process.

+
+
+
+Download Devonfw Completed +
+
+
+

Step 4: By Clicking Next button in the Installing devonfw pop up, two options are shown:
+1: Select the Git url for the installation setup.
+2: Skip this process.

+
+
+
+Installation Options +
+
+
+

Step 5: Select one of the above options.

+
+
+
    +
  • +

    If the selection is Git url, then Configuration file url should be filled in the input box and needs to click Next button to start the further installation process.

    +
  • +
  • +

    In case the user doesn’t have Git url, then simply Skip the process and click the Next button to start the further installation process.

    +
  • +
+
+
+

Step 6: Click on the Next button for the final installation process. Wait for some time to complete the installation setup. Once the installation setup completes, the Close button will appear. Just click on it and go to the specified folder location.

+
+
+
+Installation Setup +
+
+
+
+
+

26.38. Projects

+ +
+
Introduction to project management in the dashboard
+
+
    +
  • +

    The dashboard manages multiple projects in multiple workspaces that include Angular, JAVA, and Node.

    +
  • +
  • +

    The dashboard provides rich UI for creating multiple projects, abstracting all the functionality which is usually required while creating an application like opening a command terminal, specifying workspace, and executing commands.

    +
  • +
  • +

    The dashboard makes it easy to see all the projects which are in different devonfw-ide workspace, just by changing the "devonfw Instance" dropdown.

    +
  • +
  • +

    The dashboard makes it very easy to open a project in a different IDE like Visual Studio or Eclipse respectively just by right click on the Project folder and open option.

    +
  • +
  • +

    The dashboard also makes it easy to delete the project, explore the project location.

    +
  • +
+
+
+
+
Projects
+
+

Click on the Projects link on the sidebar to navigate to the project’s screen. The screen displays all the projects in the currently selected devonfw-ide, grouped by the workspaces in which they exist.
+Note: Currently it only displays projects created through the dashboard.

+
+
+
+Project Screen +
+
+
+
    +
  • +

    It shows the total number of projects available in each devonfw-ide.

    +
  • +
  • +

    Filtering and searching the projects.

    +
  • +
  • +

    Add New Project - For creating a Project.

    +
  • +
  • +

    Project folder which gives information about the project like which technology the project belongs to, the name of the project, and when it has created.

    +
  • +
  • +

    There are many operations that are available on right-click on Project folder they are :

    +
    +
      +
    1. +

      Opening a project in different IDE ( Visual Studio or Eclipse )

      +
    2. +
    3. +

      Enclosing Folder, and

      +
    4. +
    5. +

      Deleting the project.

      +
    6. +
    +
    +
  • +
  • +

    Users can see projects of different devonfw-ide workspace just by changing the option in the devonfw instance dropdown which is set globally at the top of the screen.

    +
  • +
+
+
+

Click on Add New Project to start creating a new project.

+
+
+
+
How to create a project
+
+

Three main steps are involved in creating any devonfw project. They are:

+
+
+

Step 1. Project Type

+
+
+

In this first step the user has to choose the language technology to start the project with, e.g. Angular, Java or Node and click the Next button for to continue to the next step.

+
+
+
+Project Type +
+
+
+

Step 2. Project Data

+
+
+

After the Project type selection, the second screen will appear for the user to fill up all the required fields. User can select the workspace in the active devonfw-ide for the project in this step. Once the user enters all the required fields, the Next button will be enabled for the final step.

+
+
+
+Project Data +
+
+
+

User can change the devonfw-ide workspace where the project is going to generate, just by changing the option in the devonfw instance dropdown which is set globally in the header of the dashboard.

+
+
+
+Toolbar +
+
+
+

Step 3. Execution

+
+
+

The execution step takes all the user entered data from the Project Data step and executes the respective commands to generate the project.

+
+
+

Execution has divided into two sections:
+- Creation
+- Setup Installation

+
+
+3.1 Creation +
+
    +
  • +

    Creates only source code and notify the user if the project creation fails or success.

    +
  • +
+
+
+
+Creation +
+
+
+
    +
  • +

    In case any network issue or technical issue and the user wants to re-run the Project execution process, then the Retry button will help to start the process again.

    +
  • +
+
+
+
+Retry +
+
+
+
+3.2 Setup installation +
+

Allows user to install the dependencies of application (maven modules for java, node modules for node, angular) by clicking Proceed button.

+
+
+

The installation can be skipped by clicking cancel button.

+
+
+
+Installation +
+
+
+

Step 4. Click on Finish button to go to Project Details Screen.

+
+
+
+
+
+

26.39. Repositories

+ +
+

==Repositories

+
+
+

This page lists the different repositories under devonfw organization.

+
+
+
+Repositories +
+
Figure 83. Repositories
+
+
+

The list updates as you type in the search bar.

+
+
+
+Search Repositories +
+
Figure 84. Search Repositories
+
+
+
    +
  • +

    You can click COPY GITHUB URL for any of the repository list item to copy its github URL to your clipboard and clone it locally.

    +
  • +
  • +

    You can also click the OPEN REPOSITORY button to view its github repository page in your default browser.

    +
  • +
+
+
+
+

26.40. Wiki

+ +
+

==Wiki page.

+
+
+

This page displays the documentation of devonfw. You can also find it at https://devonfw.com/

+
+
+
+Wiki +
+
Figure 85. Wiki
+
+
+
+

26.41. Settings

+ +
+

==Settings

+
+
+
Account settings
+
+

Here you get a screen that allows you to create a profile. This is the same screen which you see during the initial setup of the dashboard. It is completely optional.

+
+
+
+Account settings +
+
Figure 86. Account settings
+
+
+

Fill the data and click on Save if you want to create the profile.

+
+
+
+
Installed versions
+
+

The installed versions subsection allows you to manage the different versions of devonfw-ide available.

+
+
+
+Installed versions +
+
Figure 87. Installed versions
+
+
+
    +
  • +

    It lists the devonfw-ide you have installed in your system, along with the ones available for download from our maven repository

    +
  • +
  • +

    If you want to install specific version, you can search it here and DOWNLOAD it

    +
  • +
  • +

    To check the release notes for a version, simply click on Consolidated list of features

    +
  • +
  • +

    For the installed versions:

    +
    +
      +
    • +

      Hovering over the eye icon shows you the path for the devonfw-ide in a tooltip

      +
    • +
    • +

      You can view it in your system explorer by clicking the eye icon

      +
    • +
    • +

      You can update its settings and softwares by clicking on UPDATE

      +
    • +
    • +

      You can also UNINSTALL an installed version, after which the dashboard will no longer keep track of the projects and IDEs belonging to that devonfw-ide

      +
    • +
    +
    +
  • +
+
+
+
+
+

26.42. Solicitor User Guide

+
+

SPDX-License-Identifier: Apache-2.0

+
+
+
+

26.43. Introduction

+
+

Todays software projects often make use of large amounts of Open Source software. Being +compliant with the license obligations of the used software components is a prerequisite for every such project. This results in different requirements that the project might need to fulfill. Those requirements can be grouped into two main categories:

+
+
+
    +
  • +

    Things that need to be done to actually fulfill license obligations

    +
  • +
  • +

    Things that need to be done to monitor / report fulfillment of license obligations

    +
  • +
+
+
+

Most of the above activities share common points:

+
+
+
    +
  • +

    The need to have an inventory of used (open source) components and their licenses

    +
  • +
  • +

    Some rule based evaluation and reporting based on this inventory

    +
  • +
+
+
+

In practice these easy looking tasks might get complex due to various aspects:

+
+
+
    +
  • +

    The number of open source components might be quite large (>> 100 for a typical webapplication based on state of the art programming frameworks)

    +
  • +
  • +

    Agile development and rapid changes of used components result in frequent changes of the inventory

    +
  • +
  • +

    Open Source usage scenarios and license obligations might be OK in one context (e.g. in the relation between a software developer and his client) but might be completely inacceptable in another context (e.g. when the client distributes the same software to end customers)

    +
  • +
  • +

    Legal interpretation of license conditions often differ from organisation to organisation and result in different compliance rules to be respected.

    +
  • +
  • +

    License information for components is often not available in a standardized form which would allow automatic processing

    +
  • +
  • +

    Tools for supporting the license management processes are often specific to a technology or build tool and do not support all aspects of OSS license management.

    +
  • +
+
+
+

Of course there are specific commercial tool suites which address the IP rights and license domain. But due to high complexity and license costs those tools are out of reach for most projects - at least for permanent use.

+
+
+

Solicitor tries to address some of the issues hightlighted above. In its initial version it is a tool for programmatically executing a process which was originally defined as an Excel-supported manual process.

+
+
+

When running Solicitor three subsequent processing steps are executed:

+
+
+
    +
  • +

    Creating an initial component and license inventory based on technology specific input files

    +
  • +
  • +

    Rule based normalization and evaluation of licenses

    +
  • +
  • +

    Generation of output documents

    +
  • +
+
+
+ + + + + +
+ + +Solicitor comes with a set of sample rules for the normalization and evaluation of licenses. +Even though these included rules are not "intentionally wrong" they are only samples and you should never rely on these builtin rules without checking and possibly modifying their content and consulting your lawyer. Solicitor is a tool +for technically supporting the management of OSS licenses within your project. Solicitor neither gives legal advice nor is a replacement for a lawyer. +
+
+
+
+

26.44. Licensing of Solicitor

+
+

The Solicitor code and accompanying resources (including this userguide) as stored in the GIT Repository https://github.com/devonfw/solicitor are licensed as Open Source under Apache 2 license (https://www.apache.org/licenses/LICENSE-2.0).

+
+
+ + + + + +
+ + +Specifically observe the "Disclaimer of Warranty" and "Limitation of Liability" which are part of the license. +
+
+
+ + + + + +
+ + +The executable JAR file which is created by the Maven based build process includes numerous other Open Source components which are subject to different Open Source licenses. Any distribution of the Solicitor executable JAR file needs to comply with the license conditions of all those components. +If you are running Solicitor from the executable JAR you might use the -eug option to store detailed license information as file solicitor_licenseinfo.html in your current working directory (together with a copy of this user guide). +
+
+
+
+

26.45. Architecture

+
+

The following picture show a business oriented view of Solicitor.

+
+
+

domain model

+
+
+

Raw data about the components and attached licenses within an application is gathered by scanning with technology and build chain specific tools. This happens outside Solicitor.

+
+
+

The import step reads this data and transforms it into a common technology independent internal format.

+
+
+

In the normalization step the license information is completed and unified. Information not contained in the raw data is added. Where possible the applicable licenses are expressed by SPDX-IDs.

+
+
+

Many open source compontents are available via multi licensing models. Within qualification the finally applicable licenses are selected.

+
+
+

In the legal assessment the compliance of applicable licenses will be checked based on generic rules defined in company wide policies and possibly project specific project specific extensions. Defining those rules is considered as "legal advice" and possibly needs to be done by lawyers which are authorized to do so. For this step Solicitor only provides a framework / tool to support the process here but does not deliver any predefined rules.

+
+
+

The final export step produces documents based on the internal data model. This might be the list of licenses to be forwarded to the customer or a license compliance report. Data might also be fed into other systems.

+
+
+

A more technical oriented view of Solicitor is given below.

+
+
+

solution

+
+
+

There are three major technical components: The reader and writer components are performing import and export of data. The business logic - doing normalization, qualification and legal assessment is done by a rule engine. Rules are mainly defined via decision tables. Solicitor comes with a starting set of rules for normalization and qualification but these rulesets need to be extended within the projects. Rules for legal evaluation need to be completely defined by the user.

+
+
+

Solicitor is working without additional persisted data: When being executed it generates the output direcly from the read input data after processing the business rules.

+
+
+
+

26.46. Data Model

+
+

datamodel

+
+
+

The internal business data model consists of 6 entities:

+
+
+
    +
  • +

    ModelRoot: root object of the business data model which holds metadata about the data processing

    +
  • +
  • +

    Engagement: the masterdata of the overall project

    +
  • +
  • +

    Application: a deliverable within the Engagement

    +
  • +
  • +

    ApplicationComponent: component within an Application

    +
  • +
  • +

    RawLicense: License info attached to an ApplicationComponent as it is read from the input data

    +
  • +
  • +

    NormalizedLicense: License info attached to an ApplicationComponent processed by the business rules

    +
  • +
+
+
+
+

26.47. == ModelRoot

+
+

|== = +| Property | Type | Description +| modelVersion | int | version number of the data model +| executionTime | String | timestamp when the data was processed +| solicitorVersion | String | Solicitor version which processed the model +| solicitorGitHash | String | buildnumber / GitHash of the Solicitor build +| solicitorBuilddate | String | build date of the Solicitor build +| extensionArtifactId | String | artifactId of the active Solicitor Extension ("NONE" if no extension) +| extensionVersion | String | Version of the active Extension (or "NONE") +| extensionGitHash | String | Buildnumber / GitHash of the Extension (or "NONE") +| extensionBuilddate | String build date of the Extension (or "NONE") +|== =

+
+
+
+

26.48. == Engagement

+
+

|== = +| Property | Type | Description +| engagementName | String | the engagement name +| engagementType | EngagementType | the engagement type; possible values: INTERN, EXTERN +| clientName | String | name of the client +| goToMarketModel | GoToMarketModel | the go-to-market-model; possible values: LICENSE +| contractAllowsOss | boolean | does the contract explicitely allow OSS? +| ossPolicyFollowed | boolean | is the companies OSS policy followed? +| customerProvidesOss | boolean | does the customer provide the OSS? +|== =

+
+
+
+

26.49. == Application

+
+

|== = +| Property | Type | Description +| applicationName | String | the name of the application / deliverable +| releaseId | String | version identifier of the application +| releaseDate | Sting | release data of the application +| sourceRepo | String | URL of the source repo of the application (should be an URL) +| programmingEcosystem | String | programming ecosystem (e.g. Java8; Android/Java, iOS / Objective C) +|== =

+
+
+
+

26.50. == ApplicationComponent

+
+

|== = +| Property | Type | Description +| usagePattern | UsagePattern | possible values: DYNAMIC_LINKING, STATIC_LINKING, STANDALONE_PRODUCT +| ossModified | boolean | is the OSS modified? +| ossHomepage | String | URL of the OSS homepage +| groupId | String | component identifier: maven group +| artifactId | String | component identifier: maven artifactId +| version | String | component identifier: Version +| repoType | String | component identifier: RepoType +|== =

+
+
+
+

26.51. == RawLicense

+
+

|== = +| Property | Type | Description +| declaredLicense | String | name of the declared license +| licenseUrl | String | URL of the declared license +| trace | String | detail info of history of this data record +| specialHandling | boolean | (for controlling rule processing) +|== =

+
+
+
+

26.52. == NormalizedLicense

+
+

|== = +| Property | Type | Description +| declaredLicense | String | name of the declared license (copied from RawLicense) +| licenseUrl | String | URL of the declared license (copied from RawLicense +| declaredLicenseContent | String | resolved content of licenseUrl +| normalizedLicenseType | String | type of the license, see [License types] +| normalizedLicense | String | name of the license in normalized form (SPDX-Id) or special "pseudo license id", see [Pseudo License Ids] +| normalizedLicenseUrl | String | URL pointing to a normalized form of the license +| normalizedLicenseType | String | type of the license, see [License types] +| effectiveNormalizedLicenseType | String | type of the effective license, see [License types]| effectiveNormalizedLicense | String | effective normalized license (SPDX-Id) or "pseudo license id"; this is the information after selecting the right license in case of multi licensing or any license override due to a component being redistributed under a different license +| effectiveNormalizedLicenseUrl | String | URL pointing to the effective normalized license +| effectiveNormalizedLicenseContent | String | resolved content of effectiveNormalizedLicenseUrl +| legalPreApproved | String | indicates whether the license is pre approved based on company standard policy +| copyLeft | String | indicates the type of copyleft of the license +| licenseCompliance | String | indicates if the license is compliant according to the default company policy +| licenseRefUrl | String | URL to the reference license information (TBD) +| licenseRefContent | String | resolved content of licenseRefUrl +| includeLicense | String | does the license require to include the license text ? +| includeSource | String | does the license require to deliver source code of OSS component ? +| reviewedForRelease | String | for which release was the legal evaluation done? +| comments | String | comments on the component/license (mainly as input to legal) +| legalApproved | String | indicates whether this usage is legally approved +| legalComments | String | comments from legal, possibly indicating additional conditions to be fulfilled +| trace | String | detail info of history of this data record (rule executions) +|== =

+
+
+

For the mechanism how Solicitor resolves the content of URLs and how the result +might be influenced see Resolving of License URLs

+
+
+
+

26.53. == License types

+
+

Defines the type of license

+
+
+
    +
  • +

    OSS-SPDX - An OSS license which has a corresponding SPDX-Id

    +
  • +
  • +

    OSS-OTHER - An OSS license which has no SPDX-Id

    +
  • +
  • +

    COMMERCIAL - Commercial (non OSS) license; this might also include code which is owned by the project

    +
  • +
  • +

    UNKNOWN- License is unknown

    +
  • +
  • +

    IGNORED- license will be ignored (non selected license in multi licensing case; only to be used as "Effective Normalized License Type")

    +
  • +
+
+
+
+

26.54. == Pseudo License Ids

+
+

A "normalized" license id might be either a SPDX-Id or a "pseudo license id" which is used to indicate a specific situation. The following pseudo license ids are used:

+
+
+
    +
  • +

    OSS specific - a nonstandard OSS license which could not be mapped to a SPDX-Id

    +
  • +
  • +

    PublicDomain - any form of public domain which is not represented by an explicit SPDX-Id

    +
  • +
  • +

    Ignored - license will be ignored (non selected license in multi licensing case; only to be used as "Effective Normalized License")

    +
  • +
  • +

    NonOSS - commercial license, not OSS

    +
  • +
+
+
+
+

26.55. Usage

+ +
+
+

26.56. Executing Solicitor

+
+

Solicitor is a standalone Java (Spring Boot) application. Prerequisite for running it is an existing Java 8 or 11 runtime environment. If you do not yet have a the Solicitor executable JAR (solicitor.jar) you need to build it as given on the project GitHub homepage https://github.com/devonfw/solicitor .

+
+
+

Solicitor is executed with the following command:

+
+
+
+
java -jar solicitor.jar -c <configfile>
+
+
+
+

where <configfile> is to be replaced by the location of the [Configuration File].

+
+
+

To get a first idea on what Solicitor does you might call

+
+
+
+
java -jar solicitor.jar -c classpath:samples/solicitor_sample.cfg
+
+
+
+

This executes Solicitor with default configuration on it own list of internal components and produces sample output.

+
+
+

To get an overview of the available command line options use

+
+
+
+
java -jar solicitor.jar -h
+
+
+
+
+
Adressing of resources
+
+

For unique adressing of resources to be read (configuration files, input data, rule templates and decision tables) Solicitor makes use of the Spring ResourceLoader functionality, see https://docs.spring.io/spring-framework/docs/current/spring-framework-reference/core.html#resources-resourceloader . This allows to load from the classpath, the filesystem or even via http get.

+
+
+

If you want to reference a file in the filesystem you need to write it as follows: file:path/to/file.txt

+
+
+

Note that this only applies to resources being read. Output files are adressed without that prefix.

+
+
+
+
+
+

26.57. Project Configuration File

+
+

The project configuration of Solicitor is done via a configuration file in +JSON format. This configuration file defines the engagements and applications master data, configures the readers for importing component and license information, references the business rules to be applied and defines the exports to be done.

+
+
+

The config file has the following skeleton:

+
+
+
+
{
+  "version" : 1,
+  "comment" : "Sample Solicitor configuration file",
+  "engagementName" : "DevonFW", (1)
+  .
+  .
+  .
+  "applications" : [ ... ], (2)
+  "rules" : [ ... ],  (3)
+  "writers" : [ ... ] (4)
+}
+
+
+
+ + + + + + + + + + + + + + + + + +
1The leading data defines the engagement master data, see [Header and Engagement Master Data]
2applications defines the applications within the engagement and configures the readers to import the component/license information, see [Readers / Applications]
3rules references the rules to apply to the imported data, see [Business Rules]
4writers configures how the processed data should be exported, see [Writers / Reporting]
+
+
+ + + + + +
+ + +The following section describes all sections of the Solicitor configuration file format. Often the configuration of writers and especially rules will be identical for projects. To facilitate the project specific configuration setup Solicitor internally provides a base configuration which contains reasonable defaults for the rules and writers section. If the project specific configuration file omits the rules and/or writers sections then the corresponding settings from the base configuration will be taken. For details see Default Base Configuration. +
+
+
+ + + + + +
+ + +If locations of files are specified within the configuration files as relative +pathnames then this is always evaluated relative to the current working directory (which +might differ from the location of the configuration file). If some file location +should be given relative to the location of the configuration file this might be done +using the special placeholder ${cfgdir} as described in the following. +
+
+
+
+

26.58. == Placeholders within the configuration file

+
+

Within certain parts of the configuration file (path and filenames) special placeholders might be used to parameterize the configuration. These areas are explicitely marked in the following +description.

+
+
+

These placeholders are available:

+
+
+
    +
  • +

    ${project} - A simplified project name (taking the engagement name, +removing all non-word characters and converting to lowercase).

    +
  • +
  • +

    ${cfgdir} - If the config file was loaded from the filesystem this denotes the directory where the config file resides, . otherwise. This can be used to reference locations relative to the location of the config file.

    +
  • +
+
+
+
+

26.59. == Header and Engagement Master Data

+
+

The leading section of the config file defines some metadata and the engagement master data.

+
+
+
+
  "version" : 1, (1)
+  "comment" : "Sample Solicitor configuration file", (2)
+  "engagementName" : "DevonFW", (3)
+  "engagementType" : "INTERN", (4)
+  "clientName" : "none", (5)
+  "goToMarketModel" : "LICENSE", (6)
+  "contractAllowsOss" : true, (7)
+  "ossPolicyFollowed" : true, (8)
+  "customerProvidesOss" : false, (9)
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
1version of the config file format (currently needs to be 1)
2is a free text comment (no further function at the moment)
3the engagement name (any string)
4the engagement type; possible values: INTERN, EXTERN
5name of the client (any string)
6the go-to-market-model; possible values: LICENSE
7does the contract explicitely allow OSS? (boolean)
8is the companies OSS policy followed? (boolean)
9does the customer provide the OSS? (boolean)
+
+
+
+

26.60. == Applications

+
+

Within this section the different applications (=deliverables) of the engagement are defined. Furtheron for each application at least one reader needs to be defined which imports the component and license information.

+
+
+
+
 "applications" : [ {
+    "name" : "Devon4J", (1)
+    "releaseId" : "3.1.0-SNAPSHOT", (2)
+    "sourceRepo" : "https://github.com/devonfw/devon4j.git", (3)
+    "programmingEcosystem" : "Java8", (4)
+    "readers" : [ { (5)
+      "type" : "maven", (6)
+      "source" : "classpath:samples/licenses_devon4j.xml", (7) (10)
+      "usagePattern" : "DYNAMIC_LINKING", (8)
+      "repoType" : "maven" (9)
+    } ]
+  } ],
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
1The name of the application / deliverable (any string)
2Version identifier of the application (any string)
3URL of the source repo of the application (string; should be an URL)
4programming ecosystem (any string; e.g. Java8; Android/Java, iOS / Objective C)
5multiple readers might be defined per application
6the type of reader; for possible values see Reading License Information with Readers
7location of the source file to read (ResourceLoader-URL)
8usage pattern; possible values: DYNAMIC_LINKING, STATIC_LINKING, STANDALONE_PRODUCT
9repoType: Repository to download the sources from: currently possible values: maven, npm; if omitted then "maven" will be taken as default
10placeholder patterns might be used here
+
+
+

The different readers are described in chapter Reading License Information with Readers

+
+
+
+

26.61. == Business Rules

+
+

Business rules are executed within a Drools rule engine. They are defined as a sequence of rule templates and corresponding XLS files which together represent decision tables.

+
+
+
+
  "rules" : [ {
+    "type" : "dt", (1)
+    "optional" : false, (2)
+    "ruleSource" : "classpath:samples/LicenseAssignmentSample.xls", (3) (7)
+    "templateSource" : "classpath:com/.../rules/rule_templates/LicenseAssignment.drt", (4) (7)
+    "ruleGroup" : "LicenseAssignment", (5)
+    "description" : "setting license in case that no one was detected" (6)
+  },
+  .
+  .
+  .
+,{
+    "type" : "dt",
+    "optional" : false,
+    "ruleSource" : "classpath:samples/LegalEvaluationSample.xls",
+    "templateSource" : "classpath:com/.../rules/rule_templates/LegalEvaluation.drt",
+    "ruleGroup" : "LegalEvaluation",
+    "decription" : "final legal evaluation based on the rules defined by legal"
+  } ],
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
1type of the rule; only possible value: dt which stands for "decision table"
2if set to true the processing of this group of rules will be skipped if the XLS with table +data (given by ruleSource) does not exist; if set to false a missing XLS table will result +in program termination
3location of the tabular decision table data
4location of the drools rule template to be used to define the rules together with the decision table data
5id of the group of rules; used to reference it e.g. when doing logging
6some textual description of the rule group
7placeholder patterns might be used here
+
+
+

When running, Solicitor will execute the rules of each rule group separately and in the order +given by the configuration. Only if there are no more rules to fire in a group Solicitor will +move to the next rule group and start firing those rules.

+
+
+

Normally a project will only customize (part of) the data of the decision tables and thus will only change the ruleSource and the data in the XLS. All other configuration (the different templates and processing order) is part of the Solicitor application itself and should not be changed by end users.

+
+
+

See Working with Decision Tables and Standard Business Rules for further information on the business rules.

+
+
+
+

26.62. == Writers / Reporting

+
+

The writer configuration defines how the processed data will be exported and/or reported.

+
+
+
+
  "writers" : [ {
+    "type" : "xls", (1)
+    "templateSource" : "classpath:samples/Solicitor_Output_Template_Sample.xlsx", (2) (6)
+    "target" : "OSS-Inventory-DevonFW.xlsx", (3) (6)
+    "description" : "The XLS OSS-Inventory document", (4)
+    "dataTables" : { (5)
+      "ENGAGEMENT"  : "classpath:com/devonfw/tools/solicitor/sql/allden_engagements.sql",
+      "LICENSE" : "classpath:com/devonfw/tools/solicitor/sql/allden_normalizedlicenses.sql"
+    }
+  } ]
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1type of writer to be selected; possible values: xls, velo
2path to the template to be used
3location of the output file
4some textual description
5reference to SQL statements used to transform the internal data model to data tables used for reporting
6placeholder patterns might be used here
+
+
+

For details on the writer configuration see Reporting / Creating output documents.

+
+
+
+

26.63. Starting a new project

+
+

To simplify setting up a new project Solicitor provides an option to create a project starter configuration in a given directory.

+
+
+
+
java -jar solicitor.jar -wiz some/directory/path
+
+
+
+

Besides the necessary configuration file this includes also empty XLS files for defining project +specific rules which amend the builtin rules. Furtheron a sample license.xml file is provided to +directly enable execution of solicitor and check functionality.

+
+
+

This configuration then serves as starting point for project specific configuration.

+
+
+
+

26.64. Exporting the Builtin Configuration

+
+

When working with Solicitor it might be necessary to get access to the builtin base configuration, e.g. for reviewing the builtin sample rules or using builtin reporting templates as starting point for the creation of own templates.

+
+
+

The command

+
+
+
+
java -jar solicitor.jar -ec some/directory/path
+
+
+
+

will export all internal configuration to the given directory. This includes:

+
+
+ +
+
+
+

26.65. Configuration of Technical Properties

+
+

Besides the project configuration done via the above described file there are a set of technical settings in Solicitor which are done via properties. Solicitor is implemented as a Spring Boot Application and makes use +of the standard configuration mechanism provided by the Spring Boot Platform which provides several ways to define/override properties.

+
+
+

The default property values are given in Built in Default Properties.

+
+
+

In case that a property shall be overridden when executing Solicitor this can easiest be done via the command line when executing +Solicitor:

+
+
+
+
java -Dsome.property.name1=value -Dsome.property.name2=another_value -jar solicitor.jar <any other arguments>
+
+
+
+
+

26.66. Reading License Information with Readers

+
+

Different Readers are available to import raw component / license information for different +technologies. This chapter describes how to setup the different build / dependency management systems to create the required input and how to configure the corresponding reader.

+
+
+
+

26.67. Maven

+
+

For the export of the licenses from a maven based project the license-maven-plugin is used, which can directly be called without the need to change anything in the pom.xml.

+
+
+

To generate the input file required for Solicitor the License Plugin needs to be executed with the following command:

+
+
+
+
mvn org.codehaus.mojo:license-maven-plugin:1.14:aggregate-download-licenses -Dlicense.excludedScopes=test,provided
+
+
+
+

The generated output file named licenses.xml (in the directory specified in the +plugin config) should look like the following:

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::licenses.xml[]
+
+
+
+

In Solicitor the data is read with the following reader config:

+
+
+
+
"readers" : [ {
+  "type" : "maven",
+  "source" : "file:target/generated-resouces/licenses.xml",
+  "usagePattern" : "DYNAMIC_LINKING"
+} ]
+
+
+
+

(the above assumes that Solicitor is executed in the maven projects main directory)

+
+
+
+

26.68. CSV

+
+

The CSV input is normally manually generated and should look like this (The csv File is ";" separated):

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::csvlicenses.csv[]
+
+
+
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "csv",
+  "source" : "file:path/to/the/file.csv",
+  "usagePattern" : "DYNAMIC_LINKING"
+} ]
+
+
+
+

The following 5 columns need to be contained:

+
+
+
    +
  • +

    groupId

    +
  • +
  • +

    artifactId

    +
  • +
  • +

    version

    +
  • +
  • +

    license name

    +
  • +
  • +

    license URL

    +
  • +
+
+
+

In case that a component has multiple licenses attached, there needs to be a separate +line in the file for each license.

+
+
+
+

26.69. NPM

+
+

For NPM based projects either the NPM License Crawler (https://www.npmjs.com/package/npm-license-crawler) or the NPM License Checker (https://www.npmjs.com/package/license-checker) might be used. The NPM License Crawler can process several node packages in one run.

+
+
+
+

26.70. == NPM License Crawler

+
+

To install the NPM License Crawler the following command needs to be executed.

+
+
+
+
npm i npm-license-crawler -g
+
+
+
+

To get the licenses, the crawler needs to be executed like the following example

+
+
+
+
npm-license-crawler --dependencies --csv licenses.csv
+
+
+
+

The export should look like the following (The csv file is "," separated)

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::licenses.csv[]
+
+
+ +
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "npm-license-crawler-csv",
+  "source" : "file:path/to/licenses.csv",
+  "usagePattern" : "DYNAMIC_LINKING",
+  "repoType" : "npm"
+} ]
+
+
+
+
+

26.71. == NPM License Checker

+
+

To install the NPM License Checker the following command needs to be executed.

+
+
+
+
npm i license-checker -g
+
+
+
+

To get the licenses, the checker needs to be executed like the following example (we require JSON output here)

+
+
+
+
license-checker --json > /path/to/licenses.json
+
+
+
+

The export should look like the following

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::licensesNpmLicenseChecker.json[]
+
+
+ +
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "npm-license-checker",
+  "source" : "file:path/to/licenses.json",
+  "usagePattern" : "DYNAMIC_LINKING",
+  "repoType" : "npm"
+} ]
+
+
+
+
+

26.72. Gradle (Windows)

+
+

For the export of the licenses from a Gradle based project the Gradle License Plugin is used.

+
+
+

To install the plugin some changes need to be done in build.gradle, like following example

+
+
+
+
buildscript {
+  repositories {
+    maven { url 'https://oss.jfrog.org/artifactory/oss-snapshot-local/' }
+  }
+
+  dependencies {
+    classpath 'com.jaredsburrows:gradle-license-plugin:0.8.5-SNAPSHOT'
+  }
+}
+
+apply plugin: 'java-library'
+apply plugin: 'com.jaredsburrows.license'
+
+
+
+

Afterwards execute the following command in the console:

+
+
+

For Windows (Java Application)

+
+
+
+
gradlew licenseReport
+
+
+
+

The Export should look like this:

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::licenses.json[]
+
+
+ +
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "gradle2",
+  "source" : "file:path/to/licenses.json",
+  "usagePattern" : "DYNAMIC_LINKING"
+} ]
+
+
+
+ + + + + +
+ + +The former reader of type gradle is deprecated and should no longer be used. See List of Deprecated Features. +
+
+
+
+

26.73. Gradle (Android)

+
+

For the Export of the the Licenses from a Gradle based Android Projects the Gradle License Plugin is used.

+
+
+

To install the Plugin some changes need to be done in the build.gradle of the Project, like following example

+
+
+
+
buildscript {
+  repositories {
+    jcenter()
+  }
+
+  dependencies {
+    classpath 'com.jaredsburrows:gradle-license-plugin:0.8.5'
+  }
+}
+
+
+
+

Also there is a change in the build.gradle of the App. Add the line in the second line

+
+
+
+
apply plugin: 'com.android.application'
+
+
+
+

Afterwards execute the following command in the Terminal of Android studio: +For Windows(Android Application)

+
+
+
+
gradlew licenseDebugReport
+
+
+
+

The Export is in the following folder

+
+
+
+
$Projectfolder\app\build\reports\licenses
+
+
+
+

It should look like this:

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::licenseDebugReport.json[]
+
+
+ +
+

In Solicitor the Data is read with the following part of the config

+
+
+
+
"readers" : [ {
+      "type" : "gradle2",
+      "source" : "file:$/input/licenses.json",
+      "usagePattern" : "DYNAMIC_LINKING"
+   	} ]
+
+
+
+ + + + + +
+ + +The former reader of type gradle is deprecated and should no longer be used. See List of Deprecated Features. +
+
+
+
+

26.74. Working with Decision Tables

+
+

Solicitor uses the Drools rule engine to execute business rules. Business rules are +defined as "extended" decision tables. Each such decision table consists of two artifacts:

+
+
+
    +
  • +

    A rule template file in specific drools template format

    +
  • +
  • +

    An Excel (XLSX) table which defines the decision table data

    +
  • +
+
+
+

When processing, Solicitor will internally use the rule template to create one or multiple rules for every record found in the Excel sheet. The following points are important here:

+
+
+
    +
  • +

    Rule templates:

    +
    +
      +
    • +

      Rule templates should be regarded as part of the Solicitor implementation and should not be changed on an engagement level.

      +
    • +
    +
    +
  • +
  • +

    Excel decision table data

    +
    +
      +
    • +

      The Excel tables might be extended or changed on a per project level.

      +
    • +
    • +

      The rules defined by the tabular data will have decreasing "salience" (priority) from top to bottom

      +
    • +
    • +

      In general multiple rules defined within a table might fire for the same data to be processed; the definition of the rules within the rule template will normally ensure that once a rule from the decision table was processed no other rule from that table will be processed for the same data

      +
    • +
    • +

      The excel tables contain header information in the first row which is only there for documentation purposes; the first row is completely ignored when creating rules from the xls

      +
    • +
    • +

      The rows starting from the second row contain decision table data

      +
    • +
    • +

      The first "empty" row (which does not contain data in any of the defined columns) ends the decision table

      +
    • +
    • +

      Decision tables might use multiple condition columns which define the data that a rule matches. Often such conditions are optional: If left free in the Excel table the condition will be omitted from the rule conditions. This allows to define very specific rules (which only fire on exact data patterns) or quite general rules which get activated on large groups of data. Defining general rules further down in the table (with lower salience/priority) ensures that more specific rules get fired earlier. This even allows to define a default rule at the end of the table which gets fired if no other rule could be applied.

      +
    • +
    +
    +
  • +
  • +

    rule groups: Business rules are executed within groups. All rules resulting from a single decision table are assigned to the same rule group. The order of execution of the rule groups +is defined by the sequence of declaration in the config file. Processing of the current group will +be finished when there are no more rules to fire in that group. Processing of the next group will then start. Rule groups which have been finished processing will not be resumed even if rules within that group might have been activated again due to changes of the facts.

    +
  • +
+
+
+
+

26.75. Extended comparison syntax

+
+

By default any condtions given in the fields of decision tables are simple textual comparisons: The condition +is true if the property of the model is identical to the given value in the XLS sheet.

+
+
+

Depending on the configuration of the rule templates for some fields, an extended syntax might be available. For those fields the following syntax applies:

+
+
+
    +
  • +

    If the given value of the XLS field starts with the prefix NOT: then the outcome of the remaining condition is logically negated, i.e. this field condition is true if the rest of the condition is NOT fulfilled.

    +
  • +
  • +

    A prefix of REGEX: indicates that the remainder of the field defines a Java Regular Expression. For the condition to become true the whole property needs to match the given regular expression.

    +
  • +
  • +

    The prefix RANGE: indicates that the remainder of the field defines +a Maven Version Range. Using this makes only sense on the artifact version property.

    +
  • +
  • +

    If no such prefix is detected, then the behavior is identical to the normal (verbatim) comparison logic

    +
  • +
+
+
+

Fields which are subject to this extended syntax are marked explicitly in the following section.

+
+
+
+

26.76. Standard Business Rules

+
+

The processing of business rules is organized in different phases. Each phase might consist of multiple decision tables to be processed in order.

+
+
+
+

26.77. Phase 1: Determining assigned Licenses

+
+

In this phase the license data imported via the readers is cleaned and normalized. At the end of this phase the internal data model should clearly represent all components and their assigned licenses in normalized form.

+
+
+

The phase itself consists of two decision tables / rule groups:

+
+
+
+

26.78. == Decision Table: Explicitely setting Licenses

+
+

With this decision table is is possible to explicitely assign NormalizedLicenses to components. This will be used if the imported RawLicense data is either incomplete or incorrect. Items which have been processed by rules of this group will not be reprocessed by the next rule group.

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      Engagement.clientName

      +
    • +
    • +

      Engagement.engagementName

      +
    • +
    • +

      Application.applicationName

      +
    • +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationCompomnent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      RawLicense.declaredLicense

      +
    • +
    • +

      RawLicense.url

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.normalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.normalizedLicense

      +
    • +
    • +

      NormalizedLicense.normalizedLicenseUrl

      +
    • +
    • +

      NormalizedLicense.comment

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+

All RawLicenses which are in scope of fired rules will be marked so that they do not get reprocessed by the following decision table.

+
+
+
+

26.79. == Decision Table: Detecting Licenses from Imported Data

+
+

With this decision table the license info from the RawLicense is mapped to the NormalizedLicense. This is based on the name and/or URL of the license as imported via the readers.

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      RawLicense.declaredLicense

      +
    • +
    • +

      RawLicense.url

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.normalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.normalizedLicense

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+
+

26.80. Phase 2: Selecting applicable Licenses

+
+

Within this phase the actually applicable licenses will be selected for each component.

+
+
+

This phase consists of two decision tables.

+
+
+
+

26.81. == Choosing specific License in case of Multi-Licensing

+
+

This group of rules has the speciality that it might match to a group of NormalizedLicenses associated to an ApplicationComponent. In case that multiple licenses are associated to an ApplicationComponent one of them might be selected as "effective" license and the others might be marked as Ignored.

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationComponent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToTake; mandatory)

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToIgnore1; mandatory)

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToIgnore2; optional)

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToIgnore3; optional)

      +
    • +
    +
    +
  • +
  • +

    RHS result

    +
    +
      +
    • +

      license matching "licenseToTake" will get this value assigned to effectiveNormalizedLicense

      +
    • +
    • +

      licenses matching "licenseToIgnoreN" will get IGNORED assigned to effectiveNormalizedLicenseType Ignored assigned to effectiveNormalizedLicense

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+

It is important to note that the rules only match, if all licenses given in the conditions actually exist and are assigned to the same ApplicationComponent.

+
+
+
+

26.82. == Selecting / Overriding applicable License

+
+

The second decision table in this group is used to define the effectiveNormalizedLicense (if not already handled by the decision table before).

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationComponent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      NormalizedLicense.normalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.normalizedLicense

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.effectiveNormalizedLicenseType (if empty in the decision table then the value of normalizedLicenseType will be taken)

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicense (if empty in the decision table then the value of normalizedLicense will be taken)

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicenseUrl (if empty in the decision table then the value of normalizedLicenseUrl will be taken)

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+
+ +
+

The third phase ist the legal evaluation of the licenses and the check, whether OSS usage is according to defined legal policies. Again this phase comprises two decision tables.

+
+
+
+

26.84. == Pre-Evaluation based on common rules

+
+

Within the pre evaluation the license info is checked against standard OSS usage policies. This roughly qualifies the usage and might already determine licenses which are OK in any case or which need to be further evaluated. Furtheron they qualify whether the license text or source code needs to be included in the distribution. The rules in this decision table are only based on the effectiveNormalizedLicense and do not consider any project, application of component information.

+
+
+
    +
  • +

    LHS condition:

    +
    +
      +
    • +

      NormalizedLicense.effectiveNormalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicense

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.legalPreApproved

      +
    • +
    • +

      NormalizedLicense.copyLeft

      +
    • +
    • +

      NormalizedLicense.licenseCompliance

      +
    • +
    • +

      NormalizedLicense.licenseRefUrl

      +
    • +
    • +

      NormalizedLicense.includeLicense

      +
    • +
    • +

      NormalizedLicense.includeSource

      +
    • +
    +
    +
  • +
+
+
+
+

26.85. == Final evaluation

+
+

The decision table for final legal evaluation defines all rules which are needed +to create the result of the legal evaluation. Rules here might be general for all projects or even very specific to a project if the rule can not be applied to other projects.

+
+
+
    +
  • +

    LHS condition:

    +
    +
      +
    • +

      Engagement.clientName

      +
    • +
    • +

      Engagement.engagementName

      +
    • +
    • +

      Engagement.customerProvidesOss

      +
    • +
    • +

      Application.applicationName

      +
    • +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationComponent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      ApplicationComponent.usagePattern

      +
    • +
    • +

      ApplicationComponent.ossModified

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicense

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.legalApproved

      +
    • +
    • +

      NormalizedLicense.legalComments

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+
+

26.86. Amending the builtin decision tables with own rules

+
+

The standard process as described before consists of 6 decision tables / rule +groups to be processed in sequence. When using the builtin base configuration all those decision tables use the internal sample data / rules as contained in Solicitor.

+
+
+

To use your own rule data there are three approaches:

+
+
+
    +
  • +

    Include your own rules section in the project configuration file (so not inheriting from +the builtin base configuration file) and reference your own decision tables there.

    +
  • +
  • +

    Create your own "Solicitor Extension" which might completely redefine/replace the buitin Solicitor setup including all decision tables and the base configuration file. See Extending Solicitor for details.

    +
  • +
  • +

    Make use of the optional project specific decision tables which are defined in the default base configuration: For every builtin decision table there is an optional external decision table (expected in the filesystem) which will be checked for existence. If such external decision table exists it will be processed first - before processing the builtin decision table. Thus is it possible to amend / override the builtin rules by project specific rules. When you create the starter configuration of your project as described in Starting a new project, those project specific decision tables are automatically created.

    +
  • +
+
+
+
+

26.87. Reporting / Creating output documents

+
+

After applying the business rules the resulting data can can be used to create reports and +other output documents.

+
+
+

Creating such reports consists of three steps:

+
+
+
    +
  • +

    transform and filter the model data by using an embedded SQL database

    +
  • +
  • +

    determining difference to previously stored model (optional)

    +
  • +
  • +

    Template based reporting via

    +
    +
      +
    • +

      Velocity templates (for textual output like e.g. HTML)

      +
    • +
    • +

      Excel templates

      +
    • +
    +
    +
  • +
+
+
+
+

26.88. SQL transformation and filtering

+ +
+
+

26.89. == Database structure

+
+

After the business rules have been processed (or a Solicitor data model has been loaded via +command line option -l) the model data is stored in a dynamically created internal SQL database.

+
+
+
    +
  • +

    For each type of model object a separate table is created. The tablename is the name of model object type written in uppercase characters. (E.g. type NormalizedLicense stored in table NORMALIZEDLICENSE)

    +
  • +
  • +

    All properties of the model objects are stored as strings in fields named like the properties within the database table. Field names are case sensitive (see note below for handling this in SQL statements).

    +
  • +
  • +

    An additional primary key is defined for each table, named ID_<TABLENAME>.

    +
  • +
  • +

    For all model elements that belong to some parent in the object hierarchy (i.e. all objects except ModelRoot) a foreign key field is added named PARENT_<TABLENAME> which contains the unique key of the corresponding parent

    +
  • +
+
+
+
+

26.90. == SQL queries for filtering and transformation

+
+

Each Writer configuration (see [Writers / Reporting]) includes a section which references SQL select statements that are applied on the database data. The result of the SQL select statements is made accessible for the subsequent processing of the Writer via the dataTable name given in the configuration.

+
+
+
+

26.91. == Postprocessing of data selected from the database tables

+
+

Before the result of the SQL select statement is handed over to the Writer the following postprocessing +is done:

+
+
+
    +
  • +

    a rowCount column is added to the result which gives the position of the entry in the result set (starting with 1).

    +
  • +
  • +

    Columns named ID_<TABLENAME> are replaced with columns named OBJ_<TABLENAME>. The fields of those columns are filled with the corresponding original model objects (java objects).

    +
  • +
+
+
+ + + + + +
+ + +The result table column OBJ_<TABLENAME> gives access to the native Solicitor data model (java objects), e.g. in the Velocity writer. As this breaks the decoupling done via the SQL database using this feature is explicitely discouraged. It should only be used with high caution and in exceptional situations. The feature might be discontinued in future versions without prior notice. +
+
+
+
+

26.92. Determining difference to previously stored model

+
+

When using the command line option -d Solicitor can determine difference information between two different data models (e.g. the difference between the licenses of the current release and a former release.) The difference is calculated on the result of the above described SQL statements:

+
+
+
    +
  • +

    First the internal reporting database is created for the current data model and all defined SQL statements are executed

    +
  • +
  • +

    Then the internal database is recreated for the "old" data model and all defined SQL stements are executed again

    +
  • +
  • +

    Finally for each defined result table the difference between the current result and the "old" result +is calculated

    +
  • +
+
+
+

To correctly correlate corresponding rows of the two different versions of table data it is necessary to define explicit correlation keys for each table in the SQL select statement. +It is possible to define up to 10 correlation keys named CORR_KEY_X with X in the range from 0 to 9. CORR_KEY_0 has highest priority, CORR_KEY_9 has lowest priority.

+
+
+

The correlation algorithm will first try to match rows using CORR_KEY_0. It will then attempt to correlate unmatched rows using CORR_KEY_1 e.t.c.. Correlation will stop, when

+
+
+
    +
  • +

    all correlations keys CORR_KEY_0 to CORR_KEY_9 have been processed OR

    +
  • +
  • +

    the required correlation key column does not exist in the SQL select result OR

    +
  • +
  • +

    there are no unmatched "new" rows OR

    +
  • +
  • +

    there are no unmatched "old" rows

    +
  • +
+
+
+

The result of the correlation / difference calulation is stored in the reporting table data structure. For each row the status is accessible if

+
+
+
    +
  • +

    The row is "new" (did not exist in the old data)

    +
  • +
  • +

    The row is unchanged (no changes in the field values representing the properties of the Solicitor data model)

    +
  • +
  • +

    The row is changed (at least one field corresponding to the Solicitor data model changed)

    +
  • +
+
+
+

For each field of "changed" or "unchanged" rows the following status is available:

+
+
+
    +
  • +

    Field is "changed"

    +
  • +
  • +

    Field is "unchanged"

    +
  • +
+
+
+

For each field of such rows it is furtheron possible to access the new and the old field value.

+
+
+
+

26.93. Sample SQL statement

+
+

The following shows a sample SQL statement showing some join over multiple tables and the use of +correlations keys.

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::../resources/com/devonfw/tools/solicitor/sql/allden_normalizedlicenses.sql[]
+
+
+
+ + + + + +
+ + +Above example also shows how the case sensitive column names have to be handled within the SQL +
+
+
+
+

26.94. Writers

+
+

The above dscribed SQL processing is identical for all Writers. Writers only differ in the +way how the output document is created based on a template and the reporting table data +obtained by the SQL transformation.

+
+
+
+

26.95. == Velocity Writer

+
+

The Velocity Writer uses the Apache Velocity Templating Engine +to create text based reports. The reporting data tables created by the SQL transformation are +directly put to the into Velocity Context.

+
+
+

For further information see the

+
+
+
    +
  • +

    Velocity Documentation

    +
  • +
  • +

    The Solicitor JavaDoc (which also includes datails on how to access the diff information for rows and fields of reporting data tables)

    +
  • +
  • +

    The samples included in Solicitor

    +
  • +
+
+
+
+

26.96. == Excel Writer

+ +
+
+

26.97. == Using Placeholders in Excel Spreadsheets

+
+

Within Excel spreadsheet templates there are two kinds of placeholders / markers possible, which control the processing:

+
+
+
+

26.98. == Iterator Control

+
+

The templating logic searches within the XLSX workbook for fields containing the names of the +reporting data tables as defined in the Writer configuration like e.g.:

+
+
+
    +
  • +

    #ENGAGEMENT#

    +
  • +
  • +

    #LICENSE#

    +
  • +
+
+
+

Whenever such a string is found in a cell this indicates that this row is a template row. For each entry in the respective resporting data table a copy of this row is created and the attribute replacement will be done with the data from that reporting table. (The pattern #…​# will be removed when copying.)

+
+
+
+

26.99. == Attribute replacement

+
+

Within each row which was copied in the previous step the templating logic searches for the string pattern $someAttributeName$ where someAttributeName corresponds to the column names of the reporting table. Any such occurence is replaced with the corresponding data value.

+
+
+
+

26.100. == Representation of Diff Information

+
+

In case that a difference processing (new vs. old model data) was done this will be represented +as follows when using the XLS templating:

+
+
+
    +
  • +

    For rows that are "new" (so no corresponding old row available) an Excel note indicating that this row is new will be attached to the field that contained the #…​# placeholder.

    +
  • +
  • +

    Fields in non-new rows that have changed their value will be marked with an Excel note indicating the old value.

    +
  • +
+
+
+
+

26.101. Resolving of License URLs

+
+

Resolving of the content of license texts which are referenced by the URLs given in NormalizedLicense is done in the following way:

+
+
+
    +
  • +

    If the content is found as a resource in the classpath under licenses this will be taken. (The Solicitor application might include a set of often used license texts and thus it is not necessary to fetch those via the net.) If the classpath does not contain the content of the URL the next step is taken.

    +
  • +
  • +

    If the content is found as a file in subdirectory licenses of the current working directory this is taken. If no such file exists the content is fetched via the net. The result will be written to the file directory, so any content will only be fetched once. (The user might alter the files in that directory to change/correct its content.) A file of length zero indicates that no content could be fetched.

    +
  • +
+
+
+
+

26.102. Encoding of URLs

+
+

When creating the resource or filename for given URLs in the above steps the following encoding scheme will be applied to ensure that always a valid name can be created:

+
+
+

All "non-word" characters (i.e. characters outside the set [a-zA-Z_0-9]) are replaced by underscores (“_”).

+
+
+
+

26.103. Feature Deprecation

+
+

Within the lifecycle of the Solicitor development features might be discontinued due +to various reasons. In case that such discontinuation is expected to break existing projects +a two stage deprecation mechanism is used:

+
+
+
    +
  • +

    Stage 1: Usage of a deprecated feature will produce a warning only giving details on +what needs to be changed.

    +
  • +
  • +

    Stage 2: When a deprecated feature is used Solicitor by default will terminate with an error +message giving information about the deprecation.

    +
  • +
+
+
+

By setting the property solicitor.deprecated-features-allowed to true +(e.g. via the command line, see Configuration of Technical Properties), even in second stage +the feature will still be available and only a warning will be logged. The project setup should in any +case ASAP be changed to no longer use the feature as it might soon be removed without further +notice.

+
+
+ + + + + +
+ + +Enabling the use of deprecated feature via the above property should only be +a temporary workaround and not a standard setting. +
+
+
+ + + + + +
+ + +If usage of a feature should be discontinued immediately (e.g. because it might lead to +wrong/misleading output) the first stage of deprecation will be skipped. +
+
+
+
+

26.104. List of Deprecated Features

+
+

The following features are deprecated via the above mechanism:

+
+
+ +
+
+
+

Appendix A: Default Base Configuration

+
+

The builtin default base configuration contains settings for the rules and writers section +of the Solicitor configuration file which will be used if the project specific config file omits those sections.

+
+
+
Listing 152. Default Configuration
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::../resources/com/devonfw/tools/solicitor/config/solicitor_base.cfg[]
+
+
+
+
+

Appendix B: Built in Default Properties

+
+

The following lists the default settings of technical properties as given by the built in application.properties file.

+
+
+

If required these values might be overridden on the command line when starting Solicitor:

+
+
+
+
java -Dpropertyname1=value1 -Dpropertyname2=value2 -jar solicitor.jar <any other arguments>
+
+
+
+
Listing 153. application.properties
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::../resources/application.properties[]
+
+
+
+
+

Appendix C: Extending Solicitor

+
+

Solicitor comes with a sample rule data set and sample reporting templates. In general it will +be required to correct, supplement and extend this data sets and templates. This can be done straightforward +by creating copies of the appropriate resources (rule data XLS and template files), adopting them and furtheron referencing those copies instead of the original resources from the project configuration file.

+
+
+

Even though this approach is possible it will result in hard to maintain configurations, +especially in the case of multiple projects using Solicitor in parallel.

+
+
+

To support such scenarios Solicitor provides an easy extension mechanism which allows +to package all those customized configurations into a single archive and reference it from the +command line when starting Solicitor.

+
+
+

This facilitates configuration management, distribution and deployment of such extensions.

+
+
+
+

26.105. Format of the extension file

+
+

The extensions might be provided as JAR file or even as a simple ZIP file. There is only +one mandatory file which contains (at least metadata) about the extension and which needs +to be included in this archive in the root folder.

+
+
+
Listing 154. application-extension.properties
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::../resources/samples/application-extension.properties[]
+
+
+
+

This file is included via the standard Spring Boot profile mechanism. Besides containing +naming and version info on the extension this file might override any +property values defined within Solicitor.

+
+
+

Any other resources (like rule data or templates) which need to be part of the Extension +can be included in the archive as well - either in the root directory or any subdirectories. +If the extension is active those resources will be available on the classpath like any +resources included in the Solicitor jar.

+
+
+

Overriding / redefining the default base configuration within the Extension enables to +update all rule data and templates without the need to touch the projects configuration +file.

+
+
+
+

26.106. Activating the Extension

+
+

The Extension will be activated by referencing it as follows when starting Solicitor:

+
+
+
+
java -Dloader.path=path/to/the/extension.zip -jar solicitor.jar <any other arguments>
+
+
+
+
+

Appendix D: Release Notes

+
+
+
Changes in 1.1.1
+
+
+
    +
  • +

    Corrected order of license name mapping which prevented Unlicense, The W3C License, WTFPL, Zlib and +Zope Public License 2.1 to be mapped.

    +
  • +
+
+
+
Changes in 1.1.0
+
+
+
    +
  • +

    https://github.com/devonfw/solicitor/issues/67: Inclusion of detailed license information for the +dependencies included in the executable JAR. Use the '-eug' command line option to store this file +(together with a copy of the user guide) in the current work directory.

    +
  • +
  • +

    Additional rules for license name mappings in decision table LicenseNameMappingSample.xls.

    +
  • +
  • +

    https://github.com/devonfw/solicitor/pull/61: Solicitor can now run with Java 8 or Java 11.

    +
  • +
+
+
+
Changes in 1.0.8
+
+
+
    +
  • +

    https://github.com/devonfw/solicitor/issues/62: New Reader of type npm-license-checker +for reading component/license data collected by NPM License Checker (https://www.npmjs.com/package/license-checker). +The type of the existing Reader for reading CSV data from the NPM License Crawler has been changed from npm +to npm-license-crawler-csv. (npm is still available but deprecated.) Projects should adopt their Reader +configuration and replace type npm by npm-license-crawler-csv.

    +
  • +
+
+
+
Changes in 1.0.7
+
+
+
    +
  • +

    https://github.com/devonfw/solicitor/issues/56: Enable continuing analysis in +multiapplication projects even is some license files are unavailable.

    +
  • +
  • +

    Described simplified usage of license-maven-plugin without need to change pom.xml. (Documentation only)

    +
  • +
  • +

    Ensure consistent sorting even in case that multiple "Ignored" licenses exist for a component

    +
  • +
+
+
+
+
+
+
+

26.107. devonfw shop floor

+
+

devonfw-shop-floor

+
+
+
+
+
+

27. CI/CD

+
+
+

There are different CICD tools:

+
+
+

production-line-link

+
+
+

==cicdgen

+
+ +
+

==CICDGEN

+
+
+

cicdgen is a devonfw tool for generate all code/files related to CICD. It will include/modify into your project all files that the project needs run a Jenkins cicd pipeline, to create a docker image based on your project, etc. It’s based on angular schematics, so you can add it as a dependency into your project and generate the code using ng generate. In addition, it has its own CLI for those projects that are not angular based.

+
+
+

27.1. What is angular schematics?

+
+

Schematics are generators that transform an existing filesystem. They can create files, refactor existing files, or move files from one place to another.

+
+
+

What distinguishes Schematics from other generators, such as Yeoman or Yarn Create, is that schematics are purely descriptive; no changes are applied to the actual filesystem until everything is ready to be committed. There is no side effect, by design, in Schematics.

+
+
+
+

27.2. cicdgen CLI

+
+

In order to know more about how to use the cicdgen CLI, you can check the CLI page

+
+
+
+

27.3. cicdgen Schematics

+
+

In order to know more about how to use the cicdgen schematics, you can check the schematics page

+
+
+
+

27.4. Usage example

+
+

A specific page about how to use cicdgen is also available.

+
+
+
+

27.5. cicdgen CLI

+ +
+
CICDGEN CLI
+
+

cicdgen is a command line interface that helps you with some CICD in a devonfw project. At this moment we can only generate files related to CICD in a project but we plan to add more functionality in a future.

+
+
+
Installation
+
+
+
$ npm i -g @devonfw/cicdgen
+
+
+
+
+
Usage
+
+Global arguments +
+
    +
  • +

    --version

    +
    +
    +
    Prints the cicdgen version number
    +
    +
    +
  • +
  • +

    --help

    +
    +
    +
    Shows the usage of the command
    +
    +
    +
  • +
+
+
+
+Commands +
+Generate. +
+

This command wraps the usage of angular schematics CLI. With this we generate files in a easy way and also print a better help about usage.

+
+
+

Available schematics that generate the code:

+
+
+ +
+
+
+
+Examples +
+
    +
  • +

    Generate all CICD files related to a devon4j project

    +
    +
    +
    $ cicdgen generate devon4j
    +
    +
    +
  • +
  • +

    Generate all CICD files related to a devon4ng project with docker deployment.

    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --docker --registryurl docker-registry-devon.s2-eu.capgemini.com
    +
    +
    +
  • +
  • +

    Generate all CICD files related to a devon4node project with OpenShift deployment.

    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --openshift --registryurl docker-registry-devon.s2-eu.capgemini.com --ocname default --ocn devonfw
    +
    +
    +
  • +
+
+ +
+
+
+
+
cicdgen usage example
+
+

In this example we are going to show how to use cicdgen step by step in a devon4ng project.

+
+
+
    +
  1. +

    Install cicdgen

    +
    +

    cicdgen is already included in the devonfw distribution, but if you want to use it outside the devonfw console you can execute the following command:

    +
    +
    +
    +
    $ npm i -g cicdgen
    +
    +
    +
  2. +
  3. +

    Generate a new devon4ng project using devonfw ide.

    +
    +

    Inside a devonfw ide distribution execute the command (devon ng create <app-name>):

    +
    +
    +
    +
    $ devon ng create devon4ng
    +
    +
    +
  4. +
  5. +

    Execute cicdgen generate command

    +
    +

    As we want to send notifications to MS Teams, we need to create the connector first:

    +
    +
    +
    +
    +
      +
    • +

      Go to a channel in teams and click at the connectors button. Then click at the jenkins configure button.

      +
      +

      teams 1

      +
      +
    • +
    • +

      Put a name for the connector

      +
      +

      teams 2

      +
      +
    • +
    • +

      Copy the name and the Webhook URL, we will use it later.

      +
      +

      teams 3

      +
      +
    • +
    +
    +
    +
    +
    +

    With the values that we get in the previous steps, we will execute the cicdgen command inside the project folder. If you have any doubt you can use the help.

    +
    +
    +

    help 1

    +
    +
    +

    help 2

    +
    +
    +
    +
    $ cicdgen generate devon4ng --groupid com.devonfw --docker --dockerurl tpc://127.0.0.1:2376 `--registryurl docker-registry-devon.s2-eu.capgemini.com --teams --teamsname devon4ng --teamsurl https://outlook.office.com/webhook/...`
    +
    +
    +
    +

    cicdgen command

    +
    +
  6. +
  7. +

    Create a git repository and upload the code

    +
    +

    gitlab

    +
    +
    +

    gitlab 2

    +
    +
    +
    +
    $ git remote add origin https://devon.s2-eu.capgemini.com/gitlab/darrodri/devon4ng.git
    +$ git push -u origin master
    +
    +
    +
    +

    push code

    +
    +
    +

    As you can see, no git init or git commit is required, cicdgen do it for you.

    +
    +
  8. +
  9. +

    Create a multibranch-pipeline in Jenkins

    +
    +

    new pipeline

    +
    +
    +

    When you push the save button, it will download the repository and execute the pipeline defined in the Jenkinsfile. If you get any problem, check the environment variables defined in the Jenkinsfile. Here we show all variables related with Jenkins:

    +
    +
    +
    +
    +
      +
    • +

      chrome

      +
      +

      chrome stable

      +
      +
    • +
    • +

      sonarTool

      +
      +

      sonar tool

      +
      +
    • +
    • +

      sonarEnv

      +
      +

      sonar env

      +
      +
    • +
    • +

      repositoryId

      +
      +

      repository id

      +
      +
    • +
    • +

      globalSettingsId

      +
      +

      global settings id

      +
      +
    • +
    • +

      mavenInstallation

      +
      +

      maven installation

      +
      +
    • +
    • +

      dockerTool

      +
      +

      docker global

      +
      +
    • +
    +
    +
    +
    +
  10. +
  11. +

    Add a webhook in GitLab

    +
    +

    In order to run the pipeline every time that you push code to GitLab, you need to configure a webhook in your repository.

    +
    +
    +

    gitlab webhook

    +
    +
  12. +
+
+
+

Now your project is ready to work following a CICD strategy.

+
+
+

The last thing to take into account is the branch naming. We prepare the pipeline in order to work following the git-flow strategy. So all stages of the pipeline will be executed for the branches: develop, release/*, master. For the branches: feature/*, hotfix/*, bugfix/* only the steps related to unit testing will be executed.

+
+
+
+
+

27.6. cicdgen Schematics

+ +
+
CICDGEN SCHEMATICS
+
+

We use angular schematics to create and update an existing devonfw project in order to adapt it to a CICD environment. All schematics are prepared to work with Production Line, a Capgemini CICD platform, but it can also work in other environment which have the following tools:

+
+
+
    +
  • +

    Jenkins

    +
  • +
  • +

    GitLab

    +
  • +
  • +

    Nexus 3

    +
  • +
  • +

    SonarQube

    +
  • +
+
+
+

The list of available schematics are:

+
+
+ +
+
+
How to run the schematics
+
+

You can run the schematics using the schematics CLI provided by the angular team, but the easiest way to run it is using the cicdgen CLI which is a wrapper for the schematics CLI in order to use it in a easy way.

+
+
+

To generate files you only need to run the command

+
+
+
+
$ cicdgen generate <schematic-name> [arguments]
+
+
+
+

<schematic-name> is the name of the schematic that you want to execute.

+
+
+

You can find all information about arguments in the schematic section.

+
+ +
+
+
Merge Strategies
+
+

When you execute cicdgen in a project, is possible that you already have some files that cicdgen will generate. Until version 1.5 the behaviour in these cases was to throw an error and not create/modify any file. Since version 1.6 you can choose what to do in case of conflict. In this page we will explain who to choose one merge strategy and how it works.

+
+
+Choose a merge strategy +
+

To choose a merge strategy, you must pass to cicdgen the merge parameter followed by the name of the strategy. The strategies available are: error, keep, override, combine.

+
+
+

Example:

+
+
+
+
$ cicdgen generate devon4j --merge keep
+
+
+
+
+Merge strategies +
+
    +
  • +

    error: The error strategy is the same as until version 1.5, throwing an error and do not create/modify any file. This is the default value, if you do not pass the merge parameter this value will be taken.

    +
  • +
  • +

    keep: The keep strategy will keep the actual content of your files in case of conflict. If there is no conflict, the file will be created with the new content.

    +
  • +
  • +

    override: The override strategy will override your current files, without throwing any error, and create a new ones with the new content. If there is no conflict, the file will be created with the new content.

    +
  • +
  • +

    combine: The combine strategy will create a new file combining the current content with the new content. In order to combine both files, it will apply a diff algorithm and it will show the conflicts in the same way that git does. If there is no conflict, the file will be created with the new content.

    +
    +

    By resolving the conflicts in the same way as git, you can use the same tools in order to solve them. For example, you can use VSCode:

    +
    +
    +

    merge combine vscode

    +
    +
  • +
+
+
+

Examples:

+
+
+

keep +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

override +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

combine +Current file:

+
+
+
+
Line 1
+Line 2
+Line 3
+Line 4
+
+
+
+

New file:

+
+
+
+
Line 5
+Line 2
+Line 3
+Line 4
+
+
+
+

The result will be:

+
+
+
+
<<<<<<< HEAD
+Line 1
+=======
+Line 5
+>>>>>>> new_content
+Line 2
+Line 3
+Line 4
+
+
+ +
+
+
+
+
devon4j schematic
+
+

With the cicdgen generate devon4j command you will be able to generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4j schematic arguments
+
+

When you execute the cicdgen generate devon4j command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for docker (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
Devon4ng generated files
+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    .gitignore

    +
    +

    Defines all files that git will ignore. e.g: compiled files, IDE configurations. It will download the content from: https://gitignore.io/api/java,maven,eclipse,intellij,intellij+all,intellij+iml,visualstudiocode

    +
    +
  • +
  • +

    pom.xml

    +
    +

    The pom.xml is modified in order to add, if needed, the distributionManagement.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        Java 11 installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker to deploy:

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift to deploy:

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options to the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: Load some custom tools that can not be loaded in the tools section. Also set some variables depending on the git branch which you are executing. Also, we set properly the version number in all pom files. It means that if your branch is develop, your version should end with the word -SNAPSHOT, in order case, if -SNAPSHOT is present it will be removed.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your java project.

          +
        • +
        • +

          Unit Tests: execute the mvn test command.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Deliver application into Nexus: build the project and send all bundle files to Nexsus3.

          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4j Docker generated files
+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker --registryurl docker-registry-test.s2-eu.capgemini.com. +
+
+
+Files +
+
    +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a maven image in order to compile the source code, then it will use a java image to run the application. With the multi-stage build we keep the final image as clean as possible.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes the compiled war from Jenkins to the image.

    +
    +
  • +
+
+ +
+
+
+
+
devon4ng schematic
+
+

With the cicdgen generate devon4ng command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4ng schematic arguments
+
+

When you execute the cicdgen generate devon4ng command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
devon4ng generated files
+
+

When you execute the generate devon4ng command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    angular.json

    +
    +

    The angular.json is modified in order to change the compiled files destination folder. Now, when you make a build of your project, the compiled files will be generated into dist folder instead of dist/<project-name> folder.

    +
    +
  • +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for test the application using Chrome Headless instead of a regular chrome. This script is called test:ci.

    +
    +
  • +
  • +

    karma.conf.js

    +
    +

    The karma.conf.js is also modified in order to add the Chrome Headless as a browser to execute test. The coverage output folder is change to ./coverage instead of ./coverage/<project-name>

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        Google Chrome installed in Jenkins as a global custom tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        • +

          A docker network called application.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your angular project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute Angular tests: execute the angular test in a Chrome Headless.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4ng Docker generated files
+
+

When you generate the files for a devon4ng you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+Files +
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is using a multi-stage build. First, it use a node image in order to compile the source code, then it will use a nginx image as a web server for our devon4ng application. With the multi-stage build we avoid everything related to node.js in our final image, where we only have a nginx with our application compiled.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files and the nginx.conf from Jenkins to the image.

    +
    +
  • +
+
+ +
+
+
+
+
devon4net schematic
+
+

With the cicdgen generate devon4net command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4net schematic arguments
+
+

When you execute the cicdgen generate devon4net command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --appname

    +
    +

    The name of your devon4net application.

    +
    +
  • +
  • +

    --appversion

    +
    +

    The initial version of your devon4net application

    +
    +
  • +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the xref:`jenkinsfile-teams`[Jenkinsfile].

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
devon4net generated files
+
+

When you execute the generate devon4net command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        dotnet core installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all dependencies need to build/run your dotnet project.

          +
        • +
        • +

          Execute dotnet tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4net Docker generated files
+
+

When you generate the files for devon4net you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4net --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+Files +
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for your project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+ +
+
+
+
+
devon4node schematic
+
+

With the cicdgen generate devon4node command you can generate some files required for CICD. In this section we will explain the arguments of this command and also the files that will be generated.

+
+
+
devon4node schematic arguments
+
+

When you execute the cicdgen generate devon4node command you can also add some arguments in order to modify the behaviour of the command. Those arguments are:

+
+
+
    +
  • +

    --docker

    +
    +

    The type of this parameter if boolean. If it is present, docker related files and pipeline stage will be also generated. For more details see docker section of Jenkinsfile and files generated for docker

    +
    +
  • +
  • +

    --dockerurl

    +
    +

    The URL of your external docker daemon. Example: tcp://127.0.0.1:2376

    +
    +
  • +
  • +

    --dockercertid

    +
    +

    The Jenkins credential id for your docker daemon certificate. It is only required when your docker daemon is secure.

    +
    +
  • +
  • +

    --registryurl

    +
    +

    Your docker registry URL. It is required when --docker is true, and it will be used to know where the docker image will be uploaded.

    +
    +
  • +
  • +

    --openshift

    +
    +

    The type of this parameter if boolean. If it is present, OpenShift related files and pipeline stage will be also generated. For more details see OpenShift section of Jenkinsfile and files generated for OpenShift (same as --docker)

    +
    +
  • +
  • +

    --ocname

    +
    +

    The name used for register your OpenShift cluster in Jenkins.

    +
    +
  • +
  • +

    --ocn

    +
    +

    OpenShift cluster namespace

    +
    +
  • +
  • +

    --groupid

    +
    +

    The project groupId. This argument is required. It will be used for store the project in a maven repository at Nexus 3. Why maven? Because is the kind of repository where we can upload/download a zip file easily. Npm repository needs a package.json file but, as we compile the angular application to static javascript and html files, the package.json is no needed anymore.

    +
    +
  • +
  • +

    --teams

    +
    +

    With this argument we can add the teams notification option in the Jenkinsfile.

    +
    +
  • +
  • +

    --teamsname

    +
    +

    The name of the Microsoft Teams webhook. It is defined at Microsoft Teams connectors.

    +
    +
  • +
  • +

    --teamsurl

    +
    +

    The url of the Microsoft Teams webhook. It is returned by Microsoft Teams when you create a connector.

    +
    +
  • +
  • +

    --merge

    +
    +

    If you have used cicdgen previously, you can choose what you want to do in case of file conflict. The default behavior is to throw an error and not modify any file. You can see the other strategies on their specific page.

    +
    +
  • +
  • +

    --commit

    +
    +

    If true, all changes will be committed at the end of the process (if possible). In order to send a false value, you need to write --commit=false

    +
    +
  • +
+
+
+
+
devon4node generated files
+
+

When you execute the generate devon4node command, some files will be added/updated in your project.

+
+
+Files +
+
    +
  • +

    package.json

    +
    +

    The package.json is modified in order to add a script for run the linter and generate the json report. This script is called lint:ci.

    +
    +
  • +
  • +

    Jenkinsfile

    +
    +

    The Jenkinsfile is the file which define the Jenkins pipeline of our project. With this we can execute the test, build the application and deploy it automatically following a CICD methodology. This file is prepared to work with the Production Line default values, but it is also fully configurable to your needs.

    +
    +
    +
      +
    • +

      Prerequisites

      +
      +
        +
      • +

        A Production Line instance. It can works also if you have a Jenkins, SonarQube and Nexus3, but in this case maybe you need to configure them properly.

        +
      • +
      • +

        NodeJS installed in Jenkins as a global tool.

        +
      • +
      • +

        SonarQube installed in Jenkins as a global tool.

        +
      • +
      • +

        Maven3 installed in Jenkins as a global tool.

        +
      • +
      • +

        A maven global settings properly configured in Jenkins.

        +
      • +
      • +

        If you will use docker :

        +
        +
          +
        • +

          Docker installed in Jenkins as a global custom tool.

          +
        • +
        • +

          The Nexus3 with a docker repository.

          +
        • +
        • +

          A machine with docker installed where the build and deploy will happen.

          +
        • +
        +
        +
      • +
      • +

        If you will use OpenShift :

        +
        +
          +
        • +

          An OpenShift instance

          +
        • +
        • +

          The OpenShift projects created

          +
        • +
        +
        +
      • +
      +
      +
    • +
    • +

      The Jenkins syntax

      +
      +

      In this section we will explain a little bit the syntax of the Jenkins, so if you need to change something you will be able to do it properly.

      +
      +
      +
        +
      • +

        agent: Here you can specify the Jenkins agent where the pipeline will be executed. The default value is any.

        +
      • +
      • +

        options: Here you can set global options for the pipeline. By default, we add a build discarded to delete old artifacts/build of the pipeline and also we disable the concurrent builds.

        +
        +

        [[jenkinsfile-teams]] +If the teams option is passed to cicdgen, we add a new option in order to send notifications to Microsoft Teams with the status of the pipeline executions.

        +
        +
      • +
      • +

        tools: Here we define the global tools configurations. By default a version of nodejs is added here.

        +
      • +
      • +

        environment: Here all environment variables are defined. All values defined here matches with the Production Line defaults. If you Jenkins has other values, you need to update it manually.

        +
      • +
      • +

        stages: Here are defined all stages that our pipeline will execute. Those stages are:

        +
        +
          +
        • +

          Loading Custom Tools: in this stage some custom tools are loaded. Also we set some variables depending on the git branch which you are executing.

          +
        • +
        • +

          Fresh Dependency Installation: install all packages need to build/run your node project.

          +
        • +
        • +

          Code Linting: execute the linter analysis.

          +
        • +
        • +

          Execute tests: execute the tests.

          +
        • +
        • +

          SonarQube code analysis: send the project to SonarQube in order to get the static code analysis of your project.

          +
        • +
        • +

          Build Application: compile the application to be ready to deploy in a web server.

          +
        • +
        • +

          Deliver application into Nexus: store all compiled files in Nexus3 as a zip file.

          +
          +

          [[jenkinsfile-docker]]

          +
          +
        • +
        • +

          If --docker is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application using the image created in the previous stage. The previous version is removed.

            +
            +

            [[jenkinsfile-openshift]]

            +
            +
          • +
          +
          +
        • +
        • +

          If --openshift is present:

          +
          +
            +
          • +

            Create the Docker image: build a new docker image that contains the new version of the project using a OpenShift build config.

            +
          • +
          • +

            Deploy the new image: deploy a new version of the application in OpenShift.

            +
          • +
          • +

            Check pod status: checks that the application deployed in the previous stage is running properly. If the application does not run the pipeline will fail.

            +
          • +
          +
          +
        • +
        +
        +
      • +
      • +

        post: actions that will be executed after the stages. We use it to clean up all files.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+
+
devon4node Docker generated files
+
+

When you generate the files for a devon4node you can also pass the option --docker. It will generate also some extra files related to docker.

+
+
+ + + + + +
+ + +If you pass the --docker option the option --registryurl is also required. It will be used to upload the images to a docker registry. Example: if your registry url is docker-registry-test.s2-eu.capgemini.com you should execute the command in this way: cicdgen generate devon4node --groupid com.devonfw --docker `--registryurl docker-registry-test.s2-eu.capgemini.com`. +
+
+
+Files +
+
    +
  • +

    .dockerignore

    +
    +

    In this files are defined the folders that will not be copied to the docker image. Fore more information read the official documentation.

    +
    +
  • +
  • +

    Dockerfile

    +
    +

    This file contains the instructions to build a docker image for you project. This Dockerfile is for local development purposes, you can use it in your machine executing:

    +
    +
    +
    +
    $ cd <path-to-your-project>
    +$ docker build -t <project-name>/<tag> .
    +
    +
    +
    +

    This build is installs all dependencies in ordre to build the project and then remove all devDependencies in order to keep only the production dependencies.

    +
    +
  • +
  • +

    .dockerignore.ci

    +
    +

    Another .dockerignore. The purpose of this one is to define de file exclusions in your CI pipeline.

    +
    +
  • +
  • +

    Dockerfile.ci

    +
    +

    This file contains the instructions to create a docker image for you project. The main difference with the Dockerfile is that this file will be only used in the Jenkins pipeline. Instead of compiling again the code, it takes all compiled files from Jenkins to the image.

    +
    +
  • +
+
+
+
+
+
+
+

27.7. Hangar

+ +
+
+
+
+

28. MyThaiStar

+
+
+

28.1. 1. My Thai Star – Agile Framework

+ +
+
+

28.2. 1.1 Team Setup

+
+

The team working on the development of the My Thai Star app and the documentation beside the technical development works distributed in various locations across Germany, the Netherlands, Spain and Poland. For the communication part the team uses the two channels Skype and Mail and for the documentation part the team makes usage mainly of GitHub and JIRA.

+
+
+
+

28.3. 1.2 Scrum events

+ +
+
+

28.4. Sprint Planning

+
+

Within the My Thai Star project we decided on having one hour Sprint Planning meetings for a four-week Sprints. This decision is based on the fact that this project is not the main project of the team members. As the backlog refinement is done during the Sprint Planning we make usage of the planningpoker.com tool for the estimation of the tasks.

+
+
+
+Screenshot of planningpoker.com +
+
Figure 88. Screenshot of planningpoker.com during Sprint 1 Planning
+
+
+

During the Sprint Planning meeting the team receives support from Devon colleagues outside the development. This feedback helps the team to focus on important functionalities and task by keeping the eyes on the overall aim which is to have a working application by the end of June 2017.

+
+
+
+

28.5. Sprint Review

+
+

The Sprint Review meetings are time boxed to one hour for the four week Sprint. Within the Sprint Review meeting the team plans to do a retrospective of the finished Sprint. As well as it is done during the Sprint Planning the team receives support from Devon colleagues.

+
+
+
+

28.6. Sprint Retrospective

+
+

For this project the team aligned on not having a specific Sprint Retrospective meeting. The team is going to have a retrospective of a finished Sprint during the Sprint Review.

+
+
+
+

28.7. Daily Stand-ups

+
+

The team aligned on having two weekly Stand-up meetings instead of a Daily Stand-up meeting. In comparison with the time boxed length of 15mins described in the CAF for this project the team extended the Stand-up meeting to 30mins. The content of the meetings remains the same.

+
+
+
+

28.8. Backlog refinement

+
+

The team decided that the backlog refinement meeting is part of the Sprint Planning meeting.

+
+
+
+

28.9. 1.3 Establish Product Backlog

+
+

For the My Thai Stair project the team decided on using the JIRA agile documentation which is one of the widely used agile tools. JIRA is equipped with several of useful tools regarding the agile software development (e.g. Scrum-Board). One of the big advantages of JIRA are the extensive configuration and possibilities to personalize.

+
+
+

With having a list of the Epics and User Stories for the My Thai Star development in GitHub, the team transferred the User Stories into the JIRA backlog as it is shown in the screenshot below. All User Stories are labeled colorfully with the related Epic which shapes the backlog in clearly manner.

+
+
+
+Screenshot of planningpoker.com +
+
Figure 89. Screenshot of the JIRA backlog during Sprint 2
+
+
+

We decided on working with Sub-task as a single user story comprised a number of single and separated tasks. Another benefit of working with sub-task is that every single sub-task can be assigned to a single team member whereas a user story can only be assigned to one team member. By picking single sub-task the whole process of a user story is better organized.

+
+
+
+Screenshot of Sub-tasks +
+
Figure 90. Screenshots of Sub-tasks during Sprint 2
+
+
+
+

28.10. 2. My Thai Star – Agile Diary

+
+

In parallel to the Diary Ideation we use this Agile Diary to document our Scrum events. The target of this diary is to describe the differences to the Scrum methodology as well as specific characteristics of the project. We also document the process on how we approach the Scrum methodology over the length of the project.

+
+
+
+

28.11. 24.03.2017 Sprint 1 Planning

+
+

Within the Sprint 1 Planning we used planning poker.com for the estimation of the user stories. The estimation process usually is part of the backlog refinement meeting. Regarding the project circumstances we decided to estimate the user stories during the Sprint Planning. Starting the estimation process we noticed that we had to align our interpretation of the estimation effort as these story points are not equivalent to a certain time interval. The story points are relative values to compare the effort of the user stories. With this in mind we proceeded with the estimation of the user stories. We decided to start Sprint 1 with the following user stories and the total amount of 37 story points: +• ICSDSHOW-2 Create invite for friends (8 Story Points) +• ICSDSHOW-4 Create reservation (3) +• ICSDSHOW-5 Handle invite (3) +• ICSDSHOW-6 Revoke accepted invite (5) +• ICSDSHOW-9 Cancel invite (3) +• ICSDSHOW-11 Filter menu (5) +• ICSDSHOW-12 Define order (5) +• ICSDSHOW-13 Order the order (5) +As the Sprint Planning is time boxed to one hour we managed to hold this meeting within this time window.

+
+
+
+

28.12. 27.04.2017 Sprint 1 Review

+
+

During the Sprint 1 Review we had a discussion about the data model proposal. For the discussion we extended this particular Review meeting to 90min. As this discussion took almost 2/3 of the Review meeting we only had a short time left for our review of Sprint 1. For the following scrum events we decided to focus on the primary target of these events and have discussions needed for alignments in separate meetings. +Regarding the topic of splitting user stories we had the example of a certain user story which included a functionality of a twitter integration (ICSDSHOW-17 User Profile and Twitter integration). As the twitter functionality could not have been implemented at this early point of time we thought about cutting the user story into two user stories. We aligned on mocking the twitter functionality until the dependencies are developed in order to test the components. As this user story is estimated with 13 story points it is a good example for the question whether to cut a user story into multiple user stories or not. +Unfortunately not all user stories of Sprint 1 could have been completed. Due this situation we discussed on whether pushing all unfinished user stories into the status done or moving them to Sprint 2. We aligned on transferring the unfinished user stories into the next Sprint. During the Sprint 1 the team underestimated that a lot of holidays crossed the Sprint 1 goals. As taking holidays and absences of team members into consideration is part of a Sprint Planning we have a learning effect on setting a Sprint Scope.

+
+
+
+

28.13. 03.05.2017 Sprint 2 Planning

+
+

As we aligned during the Sprint 1 Review on transferring unfinished user stories into Sprint 2 the focus for Sprint 2 was on finishing these transferred user stories. During our discussion on how many user stories we could work on in Sprint 2 we needed to remind ourselves that the overall target is to develop an example application for the devonfw. Considering this we aligned on a clear target for Sprint 2: To focus on finishing User Stories as we need to aim for a practicable and realizable solution. Everybody aligned on the aim of having a working application at the end of Sprint 2. +For the estimation process of user stories we make again usage of planningpoker.com as the team prefers this “easy-to-use” tool. During our second estimation process we had the situation in which the estimated story points differs strongly from one team member to another. In this case the team members shortly explains how the understood and interpreted the user story. It turned out that team members misinterpreted the user stories. With having this discussion all team members got the same understanding of the specific functionality and scope of a user story. After the alignment the team members adjusted their estimations. +Beside this need for discussion the team estimated most of the user stories with very similar story points. This fact shows the increase within the effort estimation for each team member in comparison to Sprint 1 planning. Over the short time of two Sprint planning the team received a better understanding and feeling for the estimation with story points.

+
+
+
+

28.14. 01.06.2017 Sprint 2 Review

+
+

As our Sprint 1 Review four weeks ago was not completely structured like a Sprint Review meeting we focused on the actual intention of a Sprint Review meeting during Sprint 2 Review. This means we demonstrated the completed and implemented functionalities with screen sharing and the product owner accepted the completed tasks. +Within the User Story ICSDSHOW-22 “See all orders/reservations” the functionality “filtering the list by date” could have not been implemented during Sprint 2. The team was unsure on how to proceed with this task. One team member added that especially in regards of having a coherent release, implementing less but working functionalities is much better than implementing more but not working functionalities. For this the team reminded itself focusing on completing functionalities and not working straight to a working application.

+
+ +
+
+

28.15. User Stories

+
+

The list of user stories, exported from JIRA, can be downloaded from here.

+
+
+
+

28.16. Epic: Invite friends

+ +
+
+

28.17. US: create invite for friends

+
+

Epic: Invite friends

+
+
+

As a guest I want to create an dinner event by entering date and time and adding potential guests by their emails so that each potential guest will receives an email in order to confirm or decline my invite.

+
+
+
+

28.18. == Acceptance criteria

+
+
    +
  1. +

    only date and time in future possible and both required

    +
  2. +
  3. +

    only valid email addresses: text@text.xx, one entered email-address is required

    +
  4. +
  5. +

    if AGB are not checked, an error message is shown

    +
  6. +
  7. +

    after the invite is done

    +
    +
      +
    1. +

      I see the confirmation screen of my invite (see wireframe)

      +
    2. +
    3. +

      I receive a confirmation email about my invite containing date, time and invited guests

      +
    4. +
    5. +

      all guests receive a mail with my invite

      +
    6. +
    +
    +
  8. +
+
+
+
+

28.19. US: create reservation

+
+

Epic: Invite friends

+
+
+

As a guest I want to create a reservation by entering date and time and number of adults and kids

+
+
+
+

28.20. == Acceptance criteria

+
+
    +
  1. +

    only date and time in future possible and both required

    +
  2. +
  3. +

    only valid email addresses: text@text.xx, one entered email-address is required

    +
  4. +
  5. +

    if AGB are not checked, an error message is shown

    +
  6. +
  7. +

    after the reservation is done

    +
    +
      +
    1. +

      I see a confirmation screen of my reservation with date-time, number of persons and kids

      +
    2. +
    3. +

      I receive a confirmation email about my reservation

      +
    4. +
    +
    +
  8. +
+
+
+
+

28.21. == Wireframes

+
+

see real time board

+
+
+
+

28.22. US: handle invite

+
+

As an invited guest I would like to receive an email - after somebody as invited me - with the option to accept or decline the invite so that the system knows about my participation

+
+
+
+

28.23. == AC:

+
+
    +
  1. +

    the mail contains the following information about the invite

    +
    +
      +
    1. +

      who has invited

      +
    2. +
    3. +

      who else is invited

      +
    4. +
    5. +

      date and time of the invite

      +
    6. +
    7. +

      button to accept or decline

      +
    8. +
    9. +

      after pressing the buttons the system will store the status (yes/no) of my invite

      +
    10. +
    +
    +
  2. +
+
+
+
+

28.24. US: revoke accepted invite

+
+

As an invited guest I would like to revoke my previous answer in order to inform the system and the inviter about my no showup

+
+
+
+

28.25. == AC:

+
+
    +
  1. +

    the inviter and myself receives an email about my cancellation

    +
  2. +
  3. +

    the system sets my status of my invite to no

    +
  4. +
  5. +

    in case I have placed an order, the order is also removed from the system.

    +
  6. +
  7. +

    the cancellation is only possible 10 minutes before the event takes place. The system shows a message that cancellation is not possible anymore.

    +
  8. +
+
+
+
+

28.26. US: calculate best table

+
+

As a guest I would like the system to check (1 hour before my invite) all my invites and to reserve a table fitting the number of accepted users

+
+
+
+

28.27. == Details

+
+

Pseudo-algorithm for reservation: +Find table for given date and time where seats of guests >= Count of invited guests plus one. In case no results, decline request and show error message to user. In case of any result, make a reservation for table…​. +For each decline of a guest remove guest and search with reduced number for new table. In case table is found, reserve it and remove reservation from previous table. In case not, do not change reservations.

+
+
+
+

28.28. US: find table by reservation info

+
+

As a waiter I would like to search by reference number or email address for the reserved table in order to know the table for my visit. (when arriving at the restaurant)

+
+
+
+

28.29. == AC:

+
+
    +
  1. +

    After entering the email the systems shows the number of the table. In case no reservation found, a message is shown.

    +
  2. +
  3. +

    Entered email address could be email of inviter or any invited guest.

    +
  4. +
+
+
+
+

28.30. US: cancel invite

+
+

Epic: Invite friends

+
+
+

As a guests who has sent an invite I want to be able to cancel my previous invite in order to inform the restaurant and my invited guests that I will not show up

+
+
+
+

28.31. == AC:

+
+
    +
  1. +

    the option to cancel the invite is available in the confirmation-mail about my invite

    +
  2. +
  3. +

    after my cancellation all invited guests receives a mail about the cancellation

    +
  4. +
  5. +

    I see a confirmation that my invite was canceled successfully

    +
  6. +
  7. +

    after my cancellation my invite and reservation and all orders related to it are deleted from the system and no one can accept or decline any invite for it

    +
  8. +
  9. +

    the cancellation is only possible one hour before the invite takes place. After that I am not allowed to cancel it any more.

    +
  10. +
+
+
+
+

28.32. Epic: Digital Menu

+ +
+
+

28.33. US: filter menu

+
+

As a guest I want to filter the menu so that I only see the dishes I am interested in

+
+
+
+

28.34. == AC:

+
+
    +
  1. +

    the guest can filter by

    +
    +
      +
    1. +

      type: starter | main dish | dessert; XOR; if nothing is selected all are shown (default value)

      +
    2. +
    3. +

      veggy (yes|no|does not matter (default))

      +
    4. +
    5. +

      vegan (yes|no|does not matter (default))

      +
    6. +
    7. +

      rice (yes|no|does not matter (default))

      +
    8. +
    9. +

      curry (yes|no|does not matter (default))

      +
    10. +
    11. +

      noodle (yes|no|does not matter (default))

      +
    12. +
    13. +

      price (range)

      +
    14. +
    15. +

      ratings (range)

      +
    16. +
    17. +

      my favorite (yes|no|does not matter (default)) — free text (search in title and description)

      +
    18. +
    +
    +
  2. +
  3. +

    the guest can sort by price asc, rating asc

    +
  4. +
  5. +

    after setting the filter only dishes are shown which fulfills those criteria

    +
  6. +
  7. +

    by pressing the button reset filter all filter are reset to the initial value

    +
  8. +
  9. +

    by pressing the filter button the filter is applied [or is it triggered after each change?]

    +
  10. +
+
+
+
+

28.35. US: Define order

+
+

As a guest I want to define my order by selecting dishes from the menu

+
+
+
+

28.36. == AC:

+
+
    +
  • +

    The guest can add each dish to the order

    +
  • +
  • +

    In case the guest adds the same dish multiple times, a counter in the order for this dish is increased for this dish

    +
  • +
  • +

    The guest can remove the dish from the order

    +
  • +
  • +

    The guest can add for each main dish the type of meat (pork, chicken, tofu)

    +
  • +
  • +

    The guest can add for each dish a free-text-comment

    +
  • +
  • +

    After adding/removing any dish the price is calculated including VAT

    +
  • +
+
+
+
+

28.37. US: Order the order

+
+

As a guest I want to order my selected dishes (order)

+
+
+

AC:

+
+
+
    +
  1. +

    I receive a mail containing my order with all dishes and the final price

    +
  2. +
  3. +

    precondition for ordering:

    +
    +
      +
    1. +

      Each order must be associated with a reservation / invite. Without any reference no order could be placed. The reference could be obtained from a previous reservation/invite (created during same session) or by the previous accepted invite (link in email) or by entering the reference id when asked by the system.

      +
      +
        +
      1. +

        In case precondition is not fulfilled, the guest is asked

        +
        +
          +
        1. +

          whether he/she would like to create a reservation/invite and is forwarded to US Invite Friends. Only after finalizing the reservation the order is accepted.

          +
        2. +
        3. +

          or he/she would enter previous created reservation-id he/she knows in order to associate his/her order with this reservation

          +
        4. +
        +
        +
      2. +
      +
      +
    2. +
    +
    +
  4. +
+
+
+
+

28.38. US: Cancel order

+
+

As a guest I want to cancel my order.

+
+
+

AC:

+
+
+
    +
  1. +

    in my received confirmation mail I have the option to cancel my order

    +
  2. +
  3. +

    the cancellation is only possible one hour before my reservation takes place

    +
  4. +
  5. +

    my order is deleted from the system

    +
  6. +
+
+
+

Remark: Changing the order is not possible. For that the order must be canceled and created from scratch again

+
+
+
+

28.39. US: Read twitter rating for dishes

+
+

As a guest I want to read for all dishes the rating done be twitter because I would like to know the opinion of others

+
+
+

AC:

+
+
+
    +
  1. +

    For each dish I see the latest 3 comments done by twitter for this vote (text, username, avatar)

    +
  2. +
  3. +

    For each dish I see the number of likes done by twitter

    +
  4. +
+
+
+
+

28.40. Epic: User Profile

+ +
+
+

28.41. US: User Profile

+
+

As a guest I want to have a user profile to associate it with my twitter account to be able to like/rate dishes

+
+
+

AC:

+
+
+
    +
  1. +

    Username of my profile is my email address

    +
  2. +
  3. +

    My profile is protected by password

    +
  4. +
  5. +

    I can log in and log out to my profile

    +
  6. +
  7. +

    I can reset my password by triggering the reset by mail

    +
  8. +
  9. +

    I can associate my profile with my twitter account in order to rate dishes and store my favorites by liking posts associated to dishes

    +
  10. +
+
+
+
+

28.42. Epic: Rate by twitter

+ +
+
+

28.43. US: Receive mail to rate your dish

+
+

As a guest I want to receive a mail by the system in order to rate my dish

+
+
+
+

28.44. US: Rate your dish

+
+

As a guest I want to add a comment or a like via my twitter account for a dish

+
+
+

AC:

+
+
+
    +
  1. +

    Before I write my rate I would like to be able to read all tweets of other users for this dish

    +
  2. +
  3. +

    I would like to see the number of likes for a dish

    +
  4. +
+
+
+
+

28.45. Epic: Waiter Cockpit

+ +
+
+

28.46. US: See all orders/reservations

+
+

As a waiter I want to see all orders/reservation in order to know what is going on in my restaurant

+
+
+

AC:

+
+
+
    +
  1. +

    all orders/reservations are shown in a list view (read-only). Those list can be filtered and sorted (similar to excel-data-filters)

    +
  2. +
  3. +

    orders/reservations are shown in separate lists.

    +
  4. +
  5. +

    for each order the dish, meat, comment, item, reservation-id, reservation date-time, creation-date-time is shown

    +
  6. +
  7. +

    for each reservation the inviters email, the guests-emails, the number of accepts and declines, calculated table number, the reservation-id, reservation date-time and creation-date-time are shown

    +
  8. +
  9. +

    the default filter for all lists is the today’s date for reservation date-time. this filter can be deleted.

    +
  10. +
  11. +

    only reservations and orders with reservation date in the future shall be available in this view. All other orders and reservation shall not be deleted; for data Analytics those orders and reservation shall still exist in the system.

    +
  12. +
+
+
+

checklist:

+
+
+

talk about:

+
+
+
    +
  • +

    who?

    +
  • +
  • +

    what?

    +
  • +
  • +

    why (purpose)

    +
  • +
  • +

    why (objective)

    +
  • +
  • +

    what happens outside the software

    +
  • +
  • +

    what might go wrong

    +
  • +
  • +

    any question or assumptions (write them down) , DoR should check that those sections are empty.

    +
  • +
  • +

    is there any better solution?

    +
  • +
  • +

    how (technical perspective)

    +
  • +
  • +

    do a rough estimate

    +
  • +
  • +

    check INVEST

    +
  • +
+
+
+
+
+
+

29. Technical design

+
+ +
+
+
+

30. Data Model

+
+ +
+
Data Model
+
+
+mts datamodel +
+
+ +
+
+
NoSQL Data Model
+
+
+dynamodb data model 1.4.1 +
+
+
+
+
+
+

31. Server Side

+
+ +
+
Java design
+ +
+
+
Introduction
+
+

The Java back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    DEVON4J as the Java framework

    +
  • +
  • +

    Devonfw as the Development environment

    +
  • +
  • +

    CobiGen as code generation tool

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
Basic architecture details
+
+

Following the DEVON4J conventions the Java My Thai Star back-end is going to be developed dividing the application in Components and using a three layers architecture.

+
+
+
+
Project modules
+
+

Using the DEVON4J approach for the Java back-end project we will have a structure of a Maven project formed by three projects

+
+
+
+project modules +
+
+
+
    +
  • +

    api: Stores all the REST interfaces and corresponding Request/Response objects.

    +
  • +
  • +

    core: Stores all the logic and functionality of the application.

    +
  • +
  • +

    server: Configures the packaging of the application.

    +
  • +
+
+
+

We can automatically generate this project structure using the DEVON4J Maven archetype

+
+
+
+
Components
+
+

The application is going to be divided in different components to encapsulate the different domains of the application functionalities.

+
+
+
+mtsj components +
+
+
+

As main components we will find:

+
+
+
    +
  • +

    Bookingmanagement: Manages the bookings part of the application. With this component the users (anonymous/logged in) can create new bookings or cancel an existing booking. The users with waiter role can see all scheduled bookings.

    +
  • +
  • +

    Ordermanagement: This component handles the process to order dishes (related to bookings). A user (as a host or as a guest) can create orders (that contain dishes) or cancel an existing one. The users with waiter role can see all ordered orders.

    +
  • +
  • +

    Dishmanagement: This component groups the logic related to the menu (dishes) view. Its main feature is to provide the client with the data of the available dishes but also can be used by other components (Ordermanagement) as a data provider in some processes.

    +
  • +
  • +

    Usermanagement: Takes care of the User Profile management, allowing to create and update the data profiles.

    +
  • +
+
+
+

As common components (that don’t exactly represent an application’s area but provide functionalities that can be used by the main components):

+
+
+
    +
  • +

    Imagemanagement: Manages the images of the application. In a first approach the` Dishmanagement` component and the Usermanagement component will have an image as part of its data. The Imagemanagement component will expose the functionality to store and retrieve this kind of data.

    +
  • +
  • +

    Mailservice: with this service we will provide the functionality for sending email notifications. This is a shared service between different app components such as bookingmanagement or ordercomponent.

    +
  • +
+
+
+

Other components:

+
+
+
    +
  • +

    Security (will manage the access to the private part of the application using a jwt implementation).

    +
  • +
  • +

    Twitter integration: planned as a Microservice will provide the twitter integration needed for some specific functionalities of the application.

    +
  • +
+
+
+
+
Layers
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+

This architecture is going to be reflected dividing each component of the application in different packages to match those three layers.

+
+
+
+
Component structure
+
+

Each one of the components defined previously are going to be structured using the three-layers architecture. In each case we will have a service package, a logic package and a dataaccess package to fit the layers definition.

+
+
+
+component structure +
+
+
+
+
Dependency injection
+
+

As it is explained in the devonfw documentation we are going to implement the dependency injection pattern basing our solution on Spring and the Java standards: java.inject (JSR330) combined with JSR250.

+
+
+
+dependency injection +
+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different packages: api and impl. The api will store the interface with the methods definition and inside the impl we will store the class that implements the interface.

    +
  • +
+
+
+
+layer api impl +
+
+
+
    +
  • +

    Usage of JSR330: The Java standard set of annotations for dependency injection (@Named, @Inject, @PostConstruct, @PreDestroy, etc.) provides us with all the needed annotations to define our beans and inject them.

    +
  • +
+
+
+
+
@Named
+public class MyBeanImpl implements MyBean {
+  @Inject
+  private MyOtherBean myOtherBean;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+}
+
+
+
+
+
Layers communication
+
+

The connection between layers, to access to the functionalities of each one, will be solved using the dependency injection and the JSR330 annotations.

+
+
+
+layers impl +
+
+
+

Connection Service - Logic

+
+
+
+
@Named("DishmanagementRestService")
+public class DishmanagementRestServiceImpl implements DishmanagementRestService {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  // use the 'this.dishmanagement' object to access to the functionalities of the logic layer of the component
+
+  ...
+
+}
+
+
+
+

Connection Logic - Data Access

+
+
+
+
@Named
+public class DishmanagementImpl extends AbstractComponentFacade implements Dishmanagement {
+
+  @Inject
+  private DishDao dishDao;
+
+  // use the 'this.dishDao' to access to the functionalities of the data access layer of the component
+  ...
+
+}
+
+
+
+
+
Service layer
+
+

The services layer will be solved using REST services with the JAX-RS implementation.

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

Following the naming conventions proposed for Devon4j applications we will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+
+
Service API
+
+

The api.rest package in the service layer of a component will store the definition of the service by a Java interface. In this definition of the service we will set-up the endpoints of the service, the type of data expected and returned, the HTTP method for each endpoint of the service and other configurations if needed.

+
+
+
+
@Path("/dishmanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface DishmanagementRestService {
+
+  @GET
+  @Path("/dish/{id}/")
+  public DishCto getDish(@PathParam("id") long id);
+
+  ...
+
+}
+
+
+
+
+
Service impl
+
+

Once the service api is defined we need to implement it using the Java interface as reference. We will add the service implementation class to the impl.rest package and implement the RestService interface.

+
+
+
+
@Named("DishmanagementRestService")
+public class DishmanagementRestServiceImpl implements DishmanagementRestService {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Override
+  public DishCto getDish(long id) {
+    return this.dishmanagement.findDish(id);
+  }
+
+  ...
+
+}
+
+
+
+
+
==
+
+

You can see the Devon4j conventions for REST services here. And the My Thai Star services definition here as part of the My Thai Star project. +== ==

+
+
+
+
Logic layer
+
+

In the logic layer we will locate all the business logic of the application. We will keep the same schema as we have done for the service layer, having an api package with the definition of the methods and a impl package for the implementation.

+
+
+

Also, inside the api package, a to package will be the place to store the transfer objects needed to pass data through the layers of the component.

+
+
+
+logic layer +
+
+
+

The logic api definition:

+
+
+
+
public interface Dishmanagement {
+
+  DishCto findDish(Long id);
+
+  ...
+}
+
+
+
+

The logic impl class:

+
+
+
+
@Named
+public class DishmanagementImpl extends AbstractComponentFacade implements Dishmanagement {
+
+  @Inject
+  private DishDao dishDao;
+
+
+  @Override
+  public DishCto findDish(Long id) {
+
+    return getBeanMapper().map(this.dishDao.findOne(id), DishCto.class);
+  }
+
+  ...
+
+}
+
+
+
+

The BeanMapper will provide the needed transformations between entity and transfer objects.

+
+
+

Also, the logic layer is the place to add validation for Authorization based on roles as we will see later.

+
+
+
+
Data Access layer
+
+

The data-access layer is responsible for managing the connections to access and process data. The mapping between java objects to a relational database is done in Devon4j with the spring-data-jpa.

+
+
+

As in the previous layers, the data-access layer will have both api and impl packages. However, in this case, the implementation will be slightly different. The api package will store the component main entities and, inside the _api package, another api.repo package will store the Repositories. The repository interface will extend DefaultRepository interface (located in com.devonfw.module.jpa.dataaccess.api.data package of devon4j-starter-spring-data-jpa ).

+
+
+

For queries we will differentiate between static queries (that will be located in a mapped file) and dynamic queries (implemented with QueryDsl). You can find all the details about how to manage queries with Devon4j here.

+
+
+

The default data base included in the project will be the H2 instance included with the Devon4j projects.

+
+
+

To get more details about pagination, data base security, _concurrency control, inheritance or how to solve the different relationships between entities visit the official devon4j dataaccess documentation.

+
+
+
+
Security with Json Web Token
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
JWT basics
+
+
    +
  • +

    A user will provide a username / password combination to our Auth server.

    +
  • +
  • +

    The Auth server will try to identify the user and, if the credentials match, will issue a token.

    +
  • +
  • +

    The user will send the token as the Authorization header to access resources on server protected by JWT Authentication.

    +
  • +
+
+
+
+jwt schema +
+
+
+
+
JWT implementation details
+
+

The Json Web Token pattern will be implemented based on the Spring Security framework that is provided by default in the Devon4j projects.

+
+
+
+
== Authentication
+
+

Based on the Spring Security approach, we will implement a class extending WebSecurityConfigurerAdapter (Devon4j already provides the` BaseWebSecurityConfig` class) to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will set up the HttpSecurity object in the configure method of the class. We will define a JWTLoginFilter class that will handle the requests to the /login endpoint.

+
+
+
+
http.[...].antMatchers(HttpMethod.POST, "/login").permitAll().[...].addFilterBefore(new JWTLoginFilter("/login", authenticationManager()), UsernamePasswordAuthenticationFilter.class);
+
+
+
+

In the same HttpSecurity object we will set up the filter for the rest of the requests, to check the presence of the JWT token in the header. First we will need to create a JWTAuthenticationFilter class extending the GenericFilterBean class. Then we can add the filter to the HttpSecurity object

+
+
+
+
http.[...].addFilterBefore(new `JWTAuthenticationFilter()`, UsernamePasswordAuthenticationFilter.class);
+
+
+
+

Finally, as default users to start using the My Thai Star app we are going to define two profiles using the inMemoryAuthentication of the Spring Security framework. In the configure(AuthenticationManagerBuilder Auth) method we will create:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: Waiter

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: Customer

    +
  • +
+
+
+
+
auth.inMemoryAuthentication().withUser("waiter").password("waiter").roles("Waiter").and().withUser("user0").password("password").roles("Customer");
+
+
+
+
+
== Token set up
+
+

Following the official documentation the implementation details for the MyThaiStar’s JWT will be:

+
+
+
    +
  • +

    Secret: Used as part of the signature of the token, acting as a private key. For the showcase purposes we will use simply "ThisIsASecret".

    +
  • +
  • +

    Token Prefix schema: Bearer. The token will look like Bearer <token>

    +
  • +
  • +

    Header: Authorization. The response header where the token will be included. Also, in the requests, when checking the token it will be expected to be in the same header.

    +
  • +
  • +

    The Authorization header should be part of the Access-Control-Expose-Headers header to allow clients access to the Authorization header content (the token);

    +
  • +
  • +

    The claims are the content of the payload of the token. The claims are statements about the user, so we will include the user info in this section.

    +
    +
      +
    • +

      subject: "sub". The username.

      +
    • +
    • +

      issuer: "iss". Who creates the token. We could use the url of our service but, as this is a showcase app, we simply will use "MyThaiStarApp"

      +
    • +
    • +

      expiration date: "exp". Defines when the token expires.

      +
    • +
    • +

      creation date: "iat". Defines when the token has been created.

      +
    • +
    • +

      scope: "scope". Array of strings to store the user roles.

      +
    • +
    +
    +
  • +
  • +

    Signature Algorithm: To encrypt the token we will use the default algorithm HS512.

    +
  • +
+
+
+

An example of a token claims before encryption would be:

+
+
+

{sub=waiter, scope=[ROLE_Waiter], iss=MyThaiStarApp, exp=1496920280, iat=1496916680}

+
+
+
+
== Current User request
+
+

To provide to the client with the current user data our application should expose a service to return the user details. In Devon4j applications the /general/service/impl/rest/SecurityRestServiceImpl.java class is ready to do that.

+
+
+
+
@Path("/security/v1")
+@Named("SecurityRestService")
+public class SecurityRestServiceImpl {
+
+  @Produces(MediaType.APPLICATION_JSON)
+  @GET
+  @Path("/currentuser/")
+  public UserDetailsClientTo getCurrentUserDetails(@Context HttpServletRequest request) {
+
+  }
+}
+
+
+
+

we only will need to implement the getCurrentUserDetails method.

+
+
+
+
== Authorization
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

As part of the token we are providing the user Role. So, when validating the token, we can obtain that same information and build a UsernamePasswordAuthenticationToken with username and the roles as collection of Granted Authorities.

+
+
+

Doing so, afterwards, in the implementation class of the logic layer we can set up the related methods with the java security '@RolesAllowed' annotation to block the access to the resource to users that does not match the expected roles.

+
+
+
+
`@RolesAllowed(Roles.WAITER)`
+public PaginatedListTo<BookingEto> findBookings(BookingSearchCriteriaTo criteria) {
+  return findBookings(criteria);
+}
+
+
+ +
+
+
.NET design
+
+

TODO

+
+ +
+
+
NodeJS design (deprecated)
+ +
+
+
Introduction
+
+

The NodeJS back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    ExpressJS as the web application framework

    +
  • +
  • +

    devon4node as data access layer framework

    +
  • +
  • +

    DynamoDB as NoSQL Database

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
Basic architecture details
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
    +
  • +

    public - All files which be exposed on the server directly

    +
  • +
  • +

    src

    +
    +
      +
    • +

      database folder - Folder with scripts to create/delete/seed the database

      +
    • +
    • +

      model - Folder with all data model

      +
    • +
    • +

      routes - Folder with all ExpressJS routers

      +
    • +
    • +

      utils - Folder with all utils like classes and functions

      +
    • +
    • +

      app.ts - File with ExpressJS declaration

      +
    • +
    • +

      config.ts - File with server configs

      +
    • +
    • +

      logic.ts - File with the business logic

      +
    • +
    +
    +
  • +
  • +

    test - Folder with all tests

    +
  • +
+
+
+
+
Layers
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+
+
Service layer
+
+

The services layer will be solved using REST services with ExpressJS

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

In order to be compatible with the other back-end implementations, we must follow the naming conventions proposed for Devon4j applications. We will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+

To treat these services separately, the following routers were created:

+
+
+
    +
  • +

    bookingmanagement: will answer all requests with the prefix /mythaistar/services/rest/bookingmanagement/v1

    +
  • +
  • +

    dishmanagement: will answer all requests with the prefix /mythaistar/services/rest/dishmanagement/v1

    +
  • +
  • +

    ordermanagement: will answer all requests with the prefix /mythaistar/services/rest/ordermanagement/v1

    +
  • +
+
+
+

These routers will define the behavior for each service and use the logical layer.

+
+
+

An example of service definition:

+
+
+
+
router.post('/booking/search', (req: types.CustomRequest, res: Response) => {
+    try {
+        // body content must be SearchCriteria
+        if (!types.isSearchCriteria(req.body)) {
+            throw {code: 400, message: 'No booking token given' };
+        }
+
+        // use the searchBooking method defined at business logic
+        business.searchBooking(req.body, (err: types.Error | null, bookingEntity: types.PaginatedList) => {
+            if (err) {
+                res.status(err.code || 500).json(err.message);
+            } else {
+                res.json(bookingEntity);
+            }
+        });
+    } catch (err) {
+        res.status(err.code || 500).json({ message: err.message });
+    }
+});
+
+
+
+
+
Logic layer and Data access layer
+
+

In the logic layer we will locate all the business logic of the application. It will be located in the file logic.ts. If in this layer we need to get access to the data, we make use of data access layer directly, in this case using devon4node with the DynamoDB adapter.

+
+
+

Example:

+
+
+
+
export async function cancelOrder(orderId: string, callback: (err: types.Error | null) => void) {
+    let order: dbtypes.Order;
+
+    try {
+        // Data access
+        order = await oasp4fn.table('Order', orderId).promise() as dbtypes.Order;
+
+        [...]
+    }
+}
+
+
+
+

We could define the data access layer separately, but devon4node allows us to do this in a simple and clear way. So, we decided to not separate the access layer to the logic business.

+
+
+
+
Security with Json Web Token
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
JWT basics
+
+

Refer to JWT basics for more information.

+
+
+
+
JWT implementation details
+
+

The Json Web Token pattern will be implemented based on the JSON web token library available on npm.

+
+
+
+
== Authentication
+
+

Based on the JSON web token approach, we will implement a class Authentication to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/\**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/\**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/\**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/\**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will create an instance of Authentication in the app file and then we will use the method auth for handle the requests to the /login endpoint.

+
+
+
+
app.post('/mythaistar/login', auth.auth);
+
+
+
+

To verify the presence of the Authorization token in the headers, we will register in the express the Authentication.registerAuthentication middleware. This middleware will check if the token is correct, if so, it will place the user in the request and continue to process it. If the token is not correct it will continue processing the request normally.

+
+
+
+
app.use(auth.registerAuthentication);
+
+
+
+

Finally, we have two default users created in the database:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: WAITER

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: CUSTOMER

    +
  • +
+
+
+
+
== Token set up
+
+

Following the official documentation the implementation details for the MyThaiStar’s JWT will be:

+
+
+
    +
  • +

    Secret: Used as part of the signature of the token, acting as a private key. It can be modified at config.ts file.

    +
  • +
  • +

    Token Prefix schema: Bearer. The token will look like Bearer <token>

    +
  • +
  • +

    Header: Authorization. The response header where the token will be included. Also, in the requests, when checking the token it will be expected to be in the same header.

    +
  • +
  • +

    The Authorization header should be part of the Access-Control-Expose-Headers header to allow clients access to the Authorization header content (the token);

    +
  • +
  • +

    Signature Algorithm: To encrypt the token we will use the default algorithm HS512.

    +
  • +
+
+
+
+
== Current User request
+
+

To provide to the client with the current user data our application should expose a service to return the user details. In this case the Authentication has a method called getCurrentUser which will return the user data. We only need register it at express.

+
+
+
+
app.get('/mythaistar/services/rest/security/v1/currentuser', auth.getCurrentUser);
+
+
+
+
+
== Authorization
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

To ensure this, the Authorization class has the securizedEndpoint method that guarantees access based on the role. This method can be used as middleware in secure services. As the role is included in the token, once validated we will have this information in the request and the middleware can guarantee access or return a 403 error.

+
+
+
+
app.use('/mythaistar/services/rest/ordermanagement/v1/order/filter', auth.securizedEndpoint('WAITER'));
+app.use('/mythaistar/services/rest/ordermanagement/v1/order/search', auth.securizedEndpoint('WAITER'));
+app.use('/mythaistar/services/rest/bookingmanagement/v1/booking/search', auth.securizedEndpoint('WAITER'));
+
+
+ +
+
+
Serverless design (deprecated)
+ +
+
+
Introduction
+
+

The NodeJS back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    Serverless as serverless framework

    +
  • +
  • +

    devon4node as data access layer framework

    +
  • +
  • +

    DynamoDB as NoSQL Database

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
Basic architecture details
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
    +
  • +

    handlers - All function handlers following devon4node structure

    +
  • +
  • +

    src

    +
    +
      +
    • +

      model - Folder with all data model

      +
    • +
    • +

      utils - Folder with all utils like classes and functions

      +
    • +
    • +

      config.ts - File with server configs

      +
    • +
    • +

      logic.ts - File with the business logic

      +
    • +
    +
    +
  • +
  • +

    test - Folder with all tests

    +
  • +
+
+
+
+
Layers
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+
+
Service layer
+
+

The services layer will be solved using REST services with Serverless

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

In order to be compatible with the other back-end implementations, we must follow the naming conventions proposed for Devon4j applications. We will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+

To treat these Http services, we must define the handlers following the devon4node convention:

+
+
+
    +
  • +

    (handlers/Http/POST/dish-search-handler) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (handlers/Http/POST/booking-handler) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (handlers/Http/POST/order-handler) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (handlers/Http/POST/booking-search-handler) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (handlers/Http/POST/order-search-handler) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (handlers/Http/POST/order-filter-handler) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (handlers/Http/POST/login-handler) /mythaistar/login.

    +
  • +
  • +

    (handlers/Http/GET/current-user-handler) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

These handlers will define the behavior for each service and use the logical layer.

+
+
+

An example of handler definition:

+
+
+
+
oasp4fn.config({ path: '/mythaistar/services/rest/bookingmanagement/v1/booking/search' });
+export async function bookingSearch(event: HttpEvent, context: Context, callback: Function) {
+    try {
+        const search = <types.SearchCriteria>event.body;
+        const authToken = event.headers.Authorization;
+        // falta lo que viene siendo comprobar el token y eso
+
+        auth.decode(authToken, (err, decoded) => {
+            if (err || decoded.role !==  'WAITER') {
+                throw { code: 403, message: 'Forbidden'};
+            }
+
+            // body content must be SearchCriteria
+            if (!types.isSearchCriteria(search)) {
+                throw { code: 400, message: 'No booking token given' };
+            }
+
+            business.searchBooking(search, (err: types.Error | null, bookingEntity: types.PaginatedList) => {
+                if (err) {
+                    callback(new Error(`[${err.code || 500}] ${err.message}`));
+                } else {
+                    callback(null, bookingEntity);
+                }
+            });
+        });
+    } catch (err) {
+        callback(new Error(`[${err.code || 500}] ${err.message}`));
+    }
+}
+
+
+
+

The default integration for a handler is lambda. See devon4node documentation for more information about default values and how to change it.

+
+
+
+
==
+
+

If you change the integration to lambda-proxy, you must take care that in this case the data will not be parsed. You must do JSON.parse explicitly +== ==

+
+
+

After defining all the handlers, we must execute the fun command, which will generate the files serverless.yml and webpack.config.js.

+
+
+
+
Logic layer and Data access layer
+ +
+
+
Security with Json Web Token
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
JWT basics
+
+

Refer to JWT basics for more information.

+
+
+
+
JWT implementation details
+
+

The Json Web Token pattern will be implemented based on the JSON web token library available on npm.

+
+
+
+
== Authentication
+
+

Based on the JSON web token approach, we will implement two methods in order to verify and user + generate the token and decode the token + return the user data. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will create a handler called login and then we will use the method code to verify the user and generate the token.

+
+
+
+
app.post(oasp4fn.config({ integration: 'lambda-proxy', path: '/mythaistar/login' });
+export async function login(event: HttpEvent, context: Context, callback: Function) {
+.
+.
+.
+.
+}
+
+
+
+

We have two default users created in the database:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: WAITER

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: CUSTOMER

    +
  • +
+
+
+
+
== Token set up
+ +
+
+
== Current User request
+
+

To provide the client with the current user data our application should expose a service to return the user details. In order to do this, we must define a handler called current-user-handler. This handler must decode the Authorization token and return the user data.

+
+
+
+
oasp4fn.config({
+    path: '/mythaistar/services/rest/security/v1/currentuser',
+});
+export async function currentUser(event: HttpEvent, context: Context, callback: Function) {
+    let authToken = event.headers.Authorization;
+    try {
+        auth.decode(authToken, (err: any, decoded?: any) => {
+            if (err) {
+                callback(new Error(`[403] Forbidden`));
+            } else {
+                callback(null, decoded);
+            }
+        });
+    } catch (err) {
+        callback(new Error(`[${err.code || 500}] ${err.message}`));
+    }
+}
+
+
+
+
+
== Authorization
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

To ensure this, we must decode the Authorization token and check the result. As the role is included in the token, once validated we will have this information and can guarantee access or return a 403 error.

+
+
+
+
oasp4fn.config({ path: '/mythaistar/services/rest/bookingmanagement/v1/booking/search' });
+export async function bookingSearch(event: HttpEvent, context: Context, callback: Function) {
+    const authToken = event.headers.Authorization;
+    auth.decode(authToken, (err, decoded) => {
+        try {
+            if (err || decoded.role !==  'WAITER') {
+                throw { code: 403, message: 'Forbidden' };
+            }
+
+            [...]
+
+        } catch (err) {
+            callback(new Error(`[${err.code || 500}] ${err.message}`));
+        }
+    });
+}
+
+
+ +
+
+
GraphQL design
+
+

TODO

+
+
+
+
+
+

32. Client Side

+
+ +
+
Angular design
+ +
+
+
Introduction
+
+

MyThaiStar client side has been built using latest frameworks, component libraries and designs:

+
+
+

Angular 4 as main front-end Framework. https://angular.io/

+
+
+

Angular/CLI 1.0.5 as Angular tool helper. https://github.com/angular/angular-cli

+
+
+

Covalent Teradata 1.0.0-beta4 as Angular native component library based on Material Design. https://teradata.github.io/covalent/#/

+
+
+

Angular/Material2 1.0.0-beta5 used by Covalent Teradata. https://github.com/angular/material2

+
+
+

Note: this dependencies are evolving at this moment and if it is possible, we are updating it on the project.

+
+
+
+
Basic project structure
+
+

The project is using the basic project seed that Angular/CLI provides with “ng new <project name>”. Then the app folder has been organized as Angular recommends and goes as follows:

+
+
+
    +
  • +

    app

    +
    +
      +
    • +

      components

      +
      +
        +
      • +

        sub-components

        +
      • +
      • +

        shared

        +
      • +
      • +

        component files

        +
      • +
      +
      +
    • +
    • +

      main app component

      +
    • +
    +
    +
  • +
  • +

    assets folder

    +
  • +
  • +

    environments folder

    +
  • +
  • +

    rest of angular files

    +
  • +
+
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
+
Main Views and components
+
+

List of components that serve as a main view to navigate or components developed to make atomically a group of functionalities which given their nature, can be highly reusable through the app.

+
+
+
+routes +
+
+
+

Note: no-name-route corresponds to whatever URL the user introduced and does not exist, it redirects to Home-Component.

+
+
+
+
Public area
+ +
+
+
== App Component
+
+

Contains the components that are on top of all views, including:

+
+
+
+
== Order sidenav
+
+

Sidenav where selected orders are displayed with their total price and some comments.

+
+
+
+ +
+

This sidenav proposal is to let user navigate through the app when the screen is too small to show the navigation buttons on the header.

+
+
+
+ +
+

It contains the title, and some other basic functions regarding open and close sidenavs.

+
+
+
+ +
+

At the end of the page that shows only when open on desktop.

+
+
+
+
== Home-Component
+
+

Main view that shows up when the app initializes.

+
+
+
+ +
+

View where the users can view, filter and select the dishes (with their extras) they want to order it contains a component to each menu entry:

+
+
+
+ +
+

This component composes all the data of a dish in a card. Component made to display indeterminate number of dishes easily.

+
+
+
+
== Book Table Component
+
+

View to make book a table in a given data with a given number of assistants or create a reservation with a number of invitations via email.

+
+
+
+
== Book-table-dialog
+
+

Dialog which opens as a result of fulfilling the booking form, it displays all the data of the booking attempt, if everything is correct, the user can send the information or cancel if something is wrong.

+
+
+
+
== Invitation-dialog
+
+

Dialog which opens as a result of fulfilling the invitation form, it displays all the data of the booking with friends attempt, if everything is correct, the user can send the information or cancel if something is wrong.

+
+
+
+
== User Area
+
+

Group of dialogs with the proposal of giving some functionalities to the user, as login, register, change password or connect with Twitter.

+
+
+
+
== Login-dialog
+
+

Dialog with a tab to navigate between login and register.

+
+
+
+
== Password-dialog
+
+

Functionality reserved to already logged users, in this dialog the user can change freely their password.

+
+
+
+
== Twitter-dialog
+
+

Dialog designed specifically to connect your user account with Twitter.

+
+
+
+
Waiter cockpit area
+
+

Restricted area to workers of the restaurant, here we can see all information about booked tables with the selected orders and the reservations with all the guests and their acceptance or decline of the event.

+
+
+
+
== Order Cockpit Component
+
+

Data table with all the booked tables and a filter to search them, to show more info about that table you can click on it and open a dialog.

+
+
+
+
== Order-dialog
+
+

Complete display of data regarding the selected table and its orders.

+
+
+
+
== Reservation Cockpit Component
+
+

Data table with all the reservations and a filter to search them, to show more info about that table you can click on it and open a dialog.

+
+
+
+
== Reservation-dialog
+
+

Complete display of data regarding the selected table and its guests.

+
+
+
+
Email Management
+
+

As the application send emails to both guests and hosts, we choose an approach based on URL where the email contain a button with an URL to a service in the app and a token, front-end read that token and depending on the URL, will redirect to one service or another. For example:

+
+
+
+
`http://localhost:4200/booking/cancel/CB_20170605_8fb5bc4c84a1c5049da1f6beb1968afc`
+
+
+
+

This URL will tell the app that is a cancellation of a booking with the token CB_20170605_8fb5bc4c84a1c5049da1f6beb1968afc. The app will process this information, send it to back-end with the correct headers, show the confirmation of the event and redirect to home page.

+
+
+

The main cases at the moment are:

+
+
+
+
== Accept Invite
+
+

A guest accept an invitation sent by a host. It will receive another email to decline if it change its mind later on.

+
+
+
+
== Reject Invite
+
+

A guest decline the invitation.

+
+
+
+
== Cancel Reservation
+
+

A host cancel the reservation, everybody that has accepted or not already answered will receive an email notifying this event is canceled. Also all the orders related to this reservations will be removed.

+
+
+
+
== Cancel Orders
+
+

When you have a reservation, you will be assigned to a token, with that token you can save your order in the restaurant. When sent, you will receive an email confirming the order and the possibility to remove it.

+
+
+
+
Services and directives
+
+

Services are where all the main logic between components of that view should be. This includes calling a remote server, composing objects, calculate prices, etc.

+
+
+

Directives are a single functionality that are related to a component.

+
+
+

As it can be seen in the basic structure, every view that has a minimum of logic or need to call a server has its own service located in the shared folder.

+
+
+

Also, services and directives can be created to compose a reusable piece of code that will be reused in some parts of the code:

+
+
+
+
Price-calculator-service
+
+

This service located in the shared folder of sidenav contains the basic logic to calculate the price of a single order (with all the possibilities) and to calculate the price of a full list of orders for a table. As this is used in the sidenav and in the waiter cockpit, it has been exported as a service to be imported where needed and easily testable.

+
+
+
+
Authentication
+
+

Authentication services serves as a validator of roles and login and, at the same time, stores the basic data regarding security and authentication.

+
+
+

Main task of this services is to provide visibility at app level of the current user information:

+
+
+
    +
  • +

    Check if the user is logged or not.

    +
  • +
  • +

    Check the permissions of the current user.

    +
  • +
  • +

    Store the username and the JWT token.

    +
  • +
+
+
+
+
Snack Service
+
+

Service created to serve as a factory of Angular Material Snackbars, which are used commonly through the app. This service accepts some parameters to customize the snackBar and opens it with this parameters.

+
+
+
+
Window Service
+
+

For responsiveness reasons, the dialogs have to accept a width parameter to adjust to screen width and this information is given by Window object, as it is a good practice to have it in an isolated service, which also calculates the width percentage to apply on the dialogs.

+
+
+
+
Equal-validator-directive
+
+

This directive located in the shared folder of userArea is used in 2 fields to make sure they have the same value. This directive is used in confirm password fields in register and change password.

+
+
+
+
Mock Back-end
+
+

To develop meanwhile a real back-end is being developed let us to make a more realistic application and to make easier the adaptation when the back-end is able to be connected and called. Its structure is as following:

+
+
+
+back end +
+
+
+

Contains the three main groups of functionalities in the application. Every group is composed by:

+
+
+
    +
  • +

    An interface with all the methods to implement.

    +
  • +
  • +

    A service that implements that interface, the main task of this service is to choose between real back-end and mock back-end depending on an environment variable.

    +
  • +
  • +

    Mock back-end service which implements all the methods declared in the interface using mock data stored in a local file and mainly uses Lodash to operate the arrays.

    +
  • +
  • +

    Real back-end service works as Mock back-end but in this case the methods call for server rest services through Http.

    +
  • +
+
+
+
+
Booking
+
+

The booking group of functionalities manages the calls to reserve a table with a given time and assistants or with guests, get reservations filtered, accept or decline invitations or cancel the reservation.

+
+
+
+
Orders
+
+

Management of the orders, including saving, filtering and cancel an order.

+
+
+
+
Dishes
+
+

The dishes group of functionalities manages the calls to get and filter dishes.

+
+
+
+
Login
+
+

Login manages the userArea logic: login, register and change password.

+
+
+
+
Security
+
+

My Thai Star security is composed by two main security services:

+
+
+
+
Auth-guard
+
+

Front-end security approach, this service implements an interface called CanActivate that comes from angular/router module. CanActivate interface forces you to implement a canActivate() function which returns a Boolean. +This service checks with the Auth-Service stored data if the user is logged and if he has enough permission to access the waiter cockpit. This prevents that a forbidden user could access to waiter cockpit just by editing the URL in the browser.

+
+
+
+
JWT
+
+

JSON Web Token consists of a token that is generated by the server when the user logs in. Once provided, the token has to be included in an Authentication header on every Http call to the rest service, otherwise the call will be forbidden. +JWT also has an expiration date and a role checking, so if a user has not enough permissions or keeps logged for a long certain amount of time that exceeds this expiration date, the next time he calls for a service call, the server will return an error and forbid the call. You can log again to restore the token.

+
+
+
+
== HttpClient
+
+

To implement this Authorization header management, an HttpClient service has been implemented. +This services works as an envelope of Http, providing some more functionalities, likes a header management and an automatically management of a server token error in case the JWT has expired, corrupted or not permitted.

+
+ +
+
+
Xamarin design
+
+

TODO

+
+
+
+
+
+

33. Security

+
+ +
+

33.1. Two-Factor Authentication

+
+

Two-factor Authentication (2FA) provides an additional level of security to your account. Once enabled, in addition to supplying your username and password to login, you’ll be prompted for a code generated by your Google Authenticator. For example, a password manager on one of your devices.

+
+
+

By enabling 2FA, to log into your account an additional one-time password is required what requires access to your paired device. This massively increases the barrier for an attacker to break into your account.

+
+
+
+

33.2. Back-end mechanism

+
+

In the back-end, we utilize Spring Security for any authentication.

+
+
+

Following the arrows, one can see all processes regarding authentication. The main idea is to check all credentials depending on their 2FA status and then either grand access to the specific user or deny access. This picture illustrates a normal authentication with username and password.

+
+
+
+security cross component +
+
+
+

When dealing with 2FA, another provider and filter is handling the request from /verify

+
+
+
+security cross component twofactor +
+
+
+

Here you can observe which filter will be used. +JWT-Authentication-Filter does intercept any request, which enforces being authenticated via JWT

+
+
+
+filters png +
+
+
+ + + + + +
+ + +Whenever the secret or qr code gets transferred between two parties, one must enforce SSL/TLS or IPsec to be comply with RFC 6238. +
+
+
+
+

33.3. Activating Two-Factor Authentication

+
+

In the current state, TOTP +will be used for OTP generation. For this purpose we recommend the Google Authenticator or any TOTP generator out there.

+
+
+
    +
  • +

    Login with your account

    +
  • +
  • +

    Open the 2FA settings

    +
  • +
  • +

    Activate the 2FA Status

    +
  • +
  • +

    Initialize your device with either a QR-Code or a secret

    +
  • +
+
+
+
+

33.4. Frontend

+
+

These are the two main options, which you can obtain my toggling between QR-Code and secret.

+
+
+
+2FA qr code menu +
+
+
+
+2FA secret menu +
+
+
+

After an activation and logout. This prompt will ask you to enter the OTP given from your device.

+
+
+
+otp prompt +
+
+
+
+
+
+

34. Testing

+
+ +
+
+
+

35. Server Side

+
+ +
+
Java testing
+ +
+
+
Component testing
+
+

We are going to test our components as a unit using Spring Test and Devon4j-test modules.

+
+
+

In order to test a basic component of the app first we will create a test class in the src/test/java folder and inside the main package of the test module. We will name the class following the convention.

+
+
+
+
[Component]Test
+
+
+
+

Then, in the declaration of the test class, we will use the @SpringBootTest annotation to run the application context. In addition, we will extend the ComponentTest from Devon4j-test module to have access to the main functionalities of the module, see more details here.

+
+
+

Spring Test allows us to use Dependency Injection so we can inject our component directly using the @Inject annotation.

+
+
+

Each test will be represented by a method annotated with @Test. Inside the method we will test one functionality, evaluating the result thanks to the asserts provided by the ComponentTest class that we are extending.

+
+
+

A simple test example

+
+
+
+
@SpringBootTest(classes = SpringBootApp.class)
+public class DishmanagementTest extends `ComponentTest` {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Test
+  public void findAllDishes() {
+
+    PaginatedListTo<DishCto> result = this.dishmanagement.findDishes();
+    assertThat(result).isNotNull();
+  }
+
+  ...
+}
+
+
+
+
+
Running the tests
+ +
+
+
From Eclipse
+
+

We can run the test from within Eclipse with the contextual menu Run As > JUnit Test. This functionality can be launched from method level, class level or even package level. The results will be shown in the JUnit tab.

+
+
+
+test results eclipse +
+
+
+
+
From command line using Maven
+
+

We can also run tests using Maven and the command line, using the command mvn test (or mvn clean test).

+
+
+
+
`C:\MyThaiStar>mvn clean test`
+
+
+
+

Doing this we will run all the tests of the project (recognized by the Test word at the end of the classes) and the results will be shown by sub-project.

+
+
+
+
...
+
+[D: 2017-07-17 09:30:08,457] [P: INFO ] [C: ] [T: Thread-5] [L: org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean] - [M: Closing JPA EntityManagerFactory for persistence unit 'default']
+
+Results :
+
+Tests run: 11, Failures: 0, Errors: 0, Skipped: 1
+
+...
+
+[INFO]
+[INFO] --- maven-compiler-plugin:3.1:testCompile (default-testCompile) @ mtsj-server ---
+[INFO] No sources to compile
+[INFO]
+[INFO] --- maven-surefire-plugin:2.12.4:test (default-test) @ mtsj-server ---
+[INFO] No tests to run.
+[INFO] ------------------------------------------------------------------------
+[INFO] Reactor Summary:
+[INFO]
+[INFO] mtsj ............................................... SUCCESS [  0.902 s]
+[INFO] mtsj-core .......................................... SUCCESS [02:30 min]
+[INFO] mtsj-server ........................................ SUCCESS [  1.123 s]
+[INFO] ------------------------------------------------------------------------
+[INFO] BUILD SUCCESS
+[INFO] ------------------------------------------------------------------------
+[INFO] Total time: 02:35 min
+[INFO] Finished at: 20XX-07-17T09:30:13+02:00
+[INFO] Final Memory: 39M/193M
+[INFO] ------------------------------------------------------------------------
+
+
+ +
+
+
.NET testing
+
+

TODO

+
+ +
+
+
NodeJS testing
+
+

TODO

+
+ +
+
+
GraphQL testing
+
+

TODO

+
+
+
+
+
+

36. Client Side

+
+ +
+
Angular testing
+
+
+testing +
+
+
+

MyThaiStar testing is made using Angular default testing environment and syntax language: Karma and Jasmine

+
+
+

To test an element of the application, you indicate that tests are a special type of files with the extension .spec.ts, then, in MyThaiStar angular/CLI config you can notice that there is an array with only one entry, Karma, with at the same time has one entry to Karma.config.js.

+
+
+

In the configuration of Karma we indicate which syntax language we are going to use (currently Jasmine as said before) between some other configurations, it is remarkable the last one: browsers. By default, the only available browser is chrome, that is because Karma works opening a chrome view to run all the tests, in that same window, Karma shows the result or errors of the test run. But we can add some other browser to adjust to our necessities, for example, in some automatic processes that run from console, it is not an option to open a chrome window, in that case, MyThaiStar used PhantomJS and ChromeHeadless.

+
+
+

Taking all of this into account, to run the test in MyThaiStar we need to move to project root folder and run this command : ng test --browser <browser>

+
+
+
+
==
+
+

If you run just ng test it will run the three browser options simultaneously, giving as a result three test runs and outputs, it can cause timeouts and unwanted behaviors, if you want a shortcut to run the test with chrome window you can just run yarn test so we really encourage to not use just ng test. +== ==

+
+
+

Here we are going to see how Client side testing of MyThaiStar has been done.

+
+
+
+
Testing Components
+
+

Angular components were created using angular/CLI ng create component so they already come with an spec file to test them. The only thing left to do is to add the providers and imports needed in the component to work as the component itself, once this is done, the most basic test is to be sure that all the dependencies and the component itself can be correctly created.

+
+
+

As an example, this is the spec.ts of the menu view component:

+
+
+
+
all the imports...
+
+describe('MenuComponent', () => {
+  let component: MenuComponent;
+  let fixture: ComponentFixture<MenuComponent>;
+
+  beforeEach(async(() => {
+    TestBed.configureTestingModule({
+      declarations: [ MenuComponent, MenuCardComponent ],
+      providers: [SidenavService, MenuService, SnackBarService],
+      imports: [
+        BrowserAnimationsModule,
+        BackendModule.forRoot({environmentType: 0, restServiceRoot: 'v1'}),
+        CovalentModule,
+      ],
+    })
+    .compileComponents();
+  }));
+
+  beforeEach(() => {
+    fixture = TestBed.createComponent(MenuComponent);
+    component = fixture.componentInstance;
+    fixture.detectChanges();
+  });
+
+  it('should create', () => {
+    expect(component).toBeTruthy();
+  });
+});
+
+
+
+

First we declare the component to be tested and a Fixture object, then, we configure the testingModule right in the same way we could configure the MenuModule with the difference here that tests always have to use the mock back-end because we do not want to really depend on a server to test our components.

+
+
+

Once configured the test module, we have to prepare the context of the test, in this case we create the component, that is exactly what is going on in the beforeEach() function.

+
+
+

Finally, we are ready to use the component and it’s fixture to check if the component has bee correctly created.

+
+
+

At this moment this is the case for most of the components, in the future, some work would be applied on this matter to have a full testing experience in MyThaiStar components.

+
+
+
+
Dialog components
+
+

Dialog components are in a special category because they can not be tested normally. In the way Material implements the opening of dialogs, you have to create a component that will load into a dialog, to tell the module to load this components when needed, they have to be added into a special array category: EntryComponents. So, to test them, we need to import them in the test file as well.

+
+
+

Also, the testing code to open the component is a bit different too:

+
+
+
+
...
+  beforeEach(() => {
+    dialog = TestBed.get(MdDialog);
+    component = dialog.open(CommentDialogComponent).componentInstance;
+  });
+...
+
+
+
+

That is right, the beforeEach() function is slightly different from the the example above, in this case we have to force to the test to know that the component is only displayed in a dialog, so we have to open a dialog with this component in order to access it.

+
+
+
+
Testing Services
+
+

As well as components, services can be tested too, actually, they are even more necessary to be tested because they have inside more complex logic and data management.

+
+
+

As an example of testing services i am going to use a well done services, with a specific purpose and with its logic completely tested, the price-calculator service:

+
+
+
+
...
+
+describe('PriceCalculatorService', () => {
+
+  beforeEach(() => {
+    TestBed.configureTestingModule({
+      providers: [PriceCalculatorService],
+    });
+  });
+
+  it('should be properly injected', inject([PriceCalculatorService], (service: PriceCalculatorService) => {
+    expect(service).toBeTruthy();
+  }));
+
+  describe('check getPrice method', () => {
+
+    it('should calculate price for single order without extras', inject([PriceCalculatorService], (service: PriceCalculatorService) => {
+      const order: OrderView = {
+        dish: {
+          id: 0,
+          price: 12.50,
+          name: 'Order without extras',
+        },
+        orderLine: {
+          comment: '',
+          amount: 1,
+        },
+        extras: [],
+      };
+
+      expect(service.getPrice(order)).toEqual(order.dish.price);
+    }));
+...
+
+
+
+

In services test, we have to inject the service in order to use it, then we can define some initializing contexts to test if the functions of the services returns the expected values, in the example we can see how an imaginary order is created and expected the function getPrice() to correctly calculate the price of that order.

+
+
+

In this same test file you can find some more test regarding all the possibilities of use in that services: orders with and without extras, single order, multiple orders and so on.

+
+
+

Some services as well as the components have only tested that they are correctly created and they dependencies properly injected, in the future, will be full covering regarding this services test coverage.

+
+
+
+
Testing in a CI environment
+ +
+
+
Xamarin testing
+
+

TODO

+
+
+
+
+
+

37. End to end

+
+ +
+
MrChecker E2E Testing
+ +
+
+
Introduction
+
+

MrChecker is a testing framework included in devonfw with several useful modules, from which we will focus on the Selenium Module, a module designed to make end-to-end testing easier to implement.

+
+
+
+
How to use it
+
+

First of all download the repository.

+
+
+

You must run My Thai Star front-end and back-end application and modify your URL to the front in mrchecker/endtoend-test/src/resources/settings.properties

+
+
+

Now you can run end to end test to check if the application works properly.

+
+
+

To run the e2e test you have two options:

+
+
+

The first option is using the command line in devonfw distribution

+
+
+
+
cd mrchecker/endtoend-test/
+mvn test -Dtest=MyThaiStarTest -Dbrowser=Chrome
+
+
+
+

optionally you can use it with a headless version or using another navigator:

+
+
+
+
// chrome headless (without visual component)
+mvn test -Dtest=MyThaiStarTest -Dbrowser=ChromeHeadless// use firefox navigator
+mvn test -Dtest=MyThaiStarTest -Dbrowser=FireFox
+
+
+
+

The second is importing the project in devonfw Eclipse and running MyThaiStarTest.java as JUnit (right click, run as JUnit)

+
+
+

They can be executed one by one or all in one go, comment or uncomment @Test before those tests to enable or disable them.

+
+
+

For more information about how to use MrChecker and build your own end to end test read: + * MrChecker documentation + * MrChecker tutorial for My Thai Star

+
+
+
+
End to end tests in My Thai Star
+
+

We have included a test suite with four tests to run in My Thai Star to verify everything works properly.

+
+
+

The included tests do the following:

+
+
+
    +
  • +

    Test_loginAndLogOut: Log in and log out.

    +
  • +
  • +

    Test_loginFake: Attempt to log in with a fake user.

    +
  • +
  • +

    Test_bookTable: Log in and book a table, then login with a waiter and check if the table was successfully booked.

    +
  • +
+
+
+

*` Test_orderMenu`: Log in and order food for a certain booked table.

+
+
+

These four tests can be found inside MyThaiStarTest.java located here.

+
+
+
+
+
+

38. UI design

+
+ +
+

38.1. Style guide

+
+
+mts styleguide +
+
+
+ +
+
+
+

39. CI/CD

+
+ +
+

39.1. My Thai Star in Production Line

+ +
+
+

39.2. What is PL?

+
+

The Production Line Project is a set of server-side collaboration tools for Capgemini engagements. It has been developed for supporting project engagements with individual tools like issue tracking, continuous integration, continuous deployment, documentation, binary storage and much more!

+
+
+
+pl logo +
+
+
+
+

39.3. Introduction

+
+

Although the PL Project is a wide set of tools, only 3 are going to be mainly used for My Thai Star projects to build a Continuous Integration and Continuos Delivery environment. All three are available in the PL instance used for this project.

+
+
+
    +
  1. +

    Jenkins

    +
    +

    This is going to be the "main tool". Jenkins helps to automate the non-human part of the development with Continuos Integration and is going to host all Pipelines (and, obviously, execute them).

    +
    +
  2. +
  3. +

    Nexus

    +
    +

    Nexus manages software "artifacts" required for development. It is possible to both download dependencies from Nexus and publish artifacts as well. It allows to share resources within an organization.

    +
    +
  4. +
  5. +

    SonarQube

    +
    +

    It is a platform for continuous inspection of the code. It is going to be used for the Java back-end.

    +
    +
  6. +
+
+
+
+

39.4. Where can I find all My Thai Star Pipelines?

+
+

They are located under the MTS folder of the PL instance:

+
+
+
+mts pipelines +
+
+
+

Those Jenkins Pipelines will not have any code to execute. They’re just pointing to all Jenkinsfiles under the /jenkins folder of the repository. They can be found here.

+
+
+
+

39.5. CI in My Thai Star stack

+
+ +
+
+
+

39.6. How to configure everything out of the box

+
+

Production Line currently has a template to integrate My Thai Star. All information can be found at devonfw production line repository

+
+ +
+
Angular CI
+
+

The Angular client-side of My Thai Star is going to have some specific needs for the CI-CD Pipeline to perform mandatory operations.

+
+
+
+
Pipeline
+
+

The Pipeline for the Angular client-side is going to be called MyThaiStar_FRONT-END_BUILD. It is located in the PL instance, under the MTS folder (as previously explained). It is going to follow a process flow like this one:

+
+
+
+angular pipeline flow +
+
+
+

Each of those steps are called stages in the Jenkins context.Let’s see what those steps mean in the context of the Angular application:

+
+
+
    +
  1. +

    Declarative: Checkout SCM

    +
    +

    Retrieves the project from the GitHub repository which it’s located. This step is not defined directly in our pipeline, but as it is loaded from the repository this step should always be done at the beginning.

    +
    +
    +
    +pipeline config +
    +
    +
  2. +
  3. +

    Declarative: Tool Install

    +
    +

    The Pipeline needs some Tools to perform some operations with the Angular project. These tool is a correct version of NodeJS (10.17.0 LTS) with Yarn installed as global package.

    +
    +
    +
    +
    tools {
    +    nodejs "NodeJS 10.14.0"
    +}
    +
    +
    +
  4. +
  5. +

    Loading Custom Tools

    +
    +

    The Pipeline also needs a browser in order to execute the tests, so in this step the chrome-stable will be loaded. We will use it in a headless mode.

    +
    +
    +
    +
    tool chrome
    +
    +
    +
  6. +
  7. +

    Fresh Dependency Installation

    +
    +

    The script $ yarn does a package installation. As we always clean the workspace after the pipeline, all packages must be installed in every execution.

    +
    +
  8. +
  9. +

    Code Linting

    +
    +

    This script executes a linting process of TypeScript. Rules can be defined in the tslint.json file of the project. It throws an exception whenever a file contains a non-compliant piece of code.

    +
    +
  10. +
  11. +

    Execute Angular tests

    +
    +

    The CI testing of the Angular client is different than the standard local testing (adapted to CI environments, as specified in the Adaptation section of document). This script just executes the following commands:

    +
    +
    +
    +
    ng test --browsers ChromeHeadless --watch=false
    +
    +
    +
  12. +
  13. +

    Check dependencies

    +
    +

    Before continue, we print the result of yarn audit. It shows the vulnerabilities in the dependencies. It do not process the response. The purpose is only to track the result of the command.

    +
    +
    +
    +
    yarn audit
    +
    +
    +
  14. +
  15. +

    SonarQube code analysis

    +
    +

    The script load and execute the tool sonar-scanner. This tool is loaded here because it’s not used in any other part of the pipeline. The sonar-scanner will take all code, upload it to SonarQube and wait until SonarQube send us a response with the quality of our code. If the code do not pass the quality gate, the pipeline will stop at this point.

    +
    +
  16. +
  17. +

    Build Application

    +
    +

    The building process of the Angular client would result in a folder called /dist in the main Angular’s directory. That folder is the one that is going to be served afterwards as an artifact. This process has also been adapted to some Deployment needs. This building script executes the following:

    +
    +
    +
    +
    ng build --configuration=docker
    +
    +
    +
  18. +
  19. +

    Deliver application into Nexus

    +
    +

    Once the scripts produce the Angular artifact (/dist folder), it’s time to package it and store into nexus.

    +
    +
  20. +
  21. +

    Declarative: Post Actions

    +
    +

    At the end, this step is always executed, even if a previous stage fail. We use this step to clean up the workspace for future executions

    +
    +
    +
    +
    post {
    +    always {
    +        cleanWs()
    +    }
    +}
    +
    +
    +
  22. +
+
+
+
+
Adjustments
+
+

The Angular project Pipeline needed some "extra" features to complete all planned processes. Those features resulted in some additions to the project.

+
+
+
+
Pipeline Environment
+
+

In order to easily reuse the pipeline in other angular projects, all variables have been defined in the block environment. All variables have the default values that Production Line uses, so if you’re going to work in production line you won’t have to change anything. Example:

+
+
+
+
environment {
+    // Script for build the application. Defined at package.json
+    buildScript = 'build --configuration=docker'
+    // Script for lint the application. Defined at package.json
+    lintScript = 'lint'
+    // Script for test the application. Defined at package.json
+    testScript = 'test:ci'
+    // Angular directory
+    angularDir = 'angular'
+    // SRC folder. It will be angularDir/srcDir
+    srcDir = 'src'
+    // Name of the custom tool for chrome stable
+    chrome = 'Chrome-stable'
+
+    // SonarQube
+    // Name of the SonarQube tool
+    sonarTool = 'SonarQube'
+    // Name of the SonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus
+    // Artifact groupId
+    groupId = 'com.devonfw.mythaistar'
+    // Nexus repository ID
+    repositoryId= 'pl-nexus'
+    // Nexus internal URL
+    repositoryUrl = 'http://nexus3-core:8081/nexus3/repository/maven-snapshots'
+    // Maven global settings configuration ID
+    globalSettingsId = 'MavenSettings'
+    // Maven tool id
+    mavenInstallation = 'Maven3'
+}
+
+
+
+
+
== Description
+
+
    +
  • +

    build Script: script for build the application. It must be defined at package.json.

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "build": "ng build",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${buildScript}"""
    +
    +
    +
  • +
  • +

    lint Script: Script for lint the application. Defined at package.json

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "lint": "ng lint",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${lintScript}"""
    +
    +
    +
  • +
  • +

    test Script: Script for test the application. Defined at package.json

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "test:ci": "npm run postinstall:web && ng test --browsers ChromeHeadless --watch=false",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${testScript}"""
    +
    +
    +
  • +
  • +

    angular-Dir: Relative route to angular application. In My Thai Star this is the angular folder. The actual directory (.) is also allowed.

    +
    +
    +angular directory +
    +
    +
  • +
  • +

    srcDir: Directory where you store the source code. For angular applications the default value is src

    +
    +
    +src directory +
    +
    +
  • +
  • +

    chrome: Since you need a browser to run your tests, we must provide one. This variable contains the name of the custom tool for google chrome.

    +
    +
    +chrome installation +
    +
    +
  • +
  • +

    sonar-Tool: Name of the SonarQube scanner installation.

    +
    +
    +sonar scanner +
    +
    +
  • +
  • +

    sonar-Env: Name of the SonarQube environment. SonarQube is the default value for PL.

    +
    +
    +sonar env +
    +
    +
  • +
  • +

    group-Id: Group id of the application. It will be used to storage the application in nexus3

    +
    +
    +nexus3 groupid +
    +
    +
  • +
  • +

    repository-Id: Id of the nexus3 repository. It must be defined at maven global config file.

    +
    +
    +nexus3 id +
    +
    +
  • +
  • +

    repository URL: The URL of the repository.

    +
  • +
  • +

    global Settings Id: The id of the global settings file.

    +
    +
    +nexus3 global config +
    +
    +
  • +
  • +

    maven Installation: The name of the maven tool.

    +
    +
    +maven tool +
    +
    +
  • +
+
+ +
+
+
Java CI
+
+

The Java server-side of My Thai Star is an devon4j-based application. As long as Maven and a Java 8 are going to be needed, the Pipeline should have those tools available as well.

+
+
+
+
Pipeline
+
+

This Pipeline is called MyThaiStar_SERVER_BUILD, and it is located exactly in the same PL instance’s folder than MyThaiStar_FRONTEND_BUILD. Let’s see how the Pipeline’s flow behaves.

+
+
+
+java pipeline flow +
+
+
+

Check those Pipeline stages with more detail:

+
+
+
    +
  1. +

    Declarative: Checkout SCM

    +
    +

    Gets the code from https://github.com/devonfw/my-thai-star . This step is not defined directly in our pipeline, but as it is loaded from the repository this step should always be done at the beginning.

    +
    +
  2. +
  3. +

    Declarative: Tool Install

    +
    +

    The My Thai Star application works with JDK11. In this step, if JDK11 is not installed, we install it and then put the JDK folder into PATH.

    +
    +
    +
    +
    tools {
    +  jdk 'OpenJDK11'
    +}
    +
    +
    +
  4. +
  5. +

    Loading Custom Tools

    +
    +

    In this step we load the tools that can not be loaded in the previous step. As My Thai Star is delivered as docker container, in this step we load docker as custom tool.

    +
    +
    +
    +
    tool dockerTool
    +
    +
    +
  6. +
  7. +

    Install dependencies

    +
    +

    This step will download all project dependencies.

    +
    +
    +
    +
    mvn clean install -Dmaven.test.skip=true
    +
    +
    +
  8. +
  9. +

    Unit Tests

    +
    +

    This step will execute the project unit test with maven.

    +
    +
    +
    +
    mvn clean test
    +
    +
    +
  10. +
  11. +

    Dependency Checker

    +
    +

    Execute the OWASP Dependency Checker in order to validate the project dependencies. It will generate a report that can be used in SonarQube

    +
    +
    +
    +
    dependencyCheck additionalArguments: '--project "MTSJ" --scan java/mtsj --format XML', odcInstallation: 'dependency-check'
    +dependencyCheckPublisher pattern: ''
    +
    +
    +
  12. +
  13. +

    SonarQube analysis

    +
    +

    The code is evaluated using the integrated PL instance’s SonarQube. Also, it will wait for the quality gate status. If the status is failing, the pipeline execution will be stopped.

    +
    +
    +
    +
    withSonarQubeEnv(sonarEnv) {
    +    sh "mvn sonar:sonar"
    +}
    +
    +def qg = waitForQualityGate()
    +if (qg.status != 'OK') {
    +    error "Pipeline aborted due to quality gate failure: ${qg.status}"
    +}
    +
    +
    +
  14. +
  15. +

    Deliver application into Nexus

    +
    +

    Store all artifacts into nexus.

    +
    +
    +
    +
    mvn deploy -Dmaven.test.skip=true
    +
    +
    +
  16. +
  17. +

    Create the Docker image

    +
    +

    Create the docker image and then publish the image into a docker registry.

    +
    +
  18. +
+
+
+
+
Adjustments
+ +
+
+
Pipeline Environment
+
+

In order to easily reuse the pipeline in other java projects, all variables have been defined in the block environment. All variables have the default values that Production Line uses, so if you’re going to work in production line you won’t have to change anything. Example:

+
+
+
+
environment {
+    // Directory with java project
+    javaDir = 'java/mtsj'
+
+    // SonarQube
+    // Name of the SonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus 3
+    // Maven global settings configuration ID
+    `globalSettingsId = 'MavenSettings'`
+    // Maven tool id
+    `mavenInstallation = 'Maven3'`
+
+    // Docker
+    dockerRegistryCredentials = 'nexus-api'
+    dockerRegistryProtocol = 'https://\'
+    dockerTool = 'docker-global
+}
+
+
+
+
+
== Description
+
+
    +
  • +

    java Dir: Relative route to java application. In My Thai Star this is the java/mtsj folder. The actual directory (.) is also allowed.

    +
    +
    +java directory +
    +
    +
  • +
  • +

    sonar Env: Name of the SonarQube environment. SonarQube is the default value for PL.

    +
  • +
  • +

    global Settings Id: The id of the global settings file. MavenSettings is the default value for PL.

    +
    +
    +nexus3 global config +
    +
    +
  • +
  • +

    maven Installation: The name of the maven tool. Maven3 is the default value for PL.

    +
    +
    +maven tool +
    +
    +
  • +
+
+
+
+
Distribution management
+
+

The only extra thing that needs to be added to the Java server-side is some information that determines where the artifact of the project is going to be stored in Nexus. This is going to be a section in the main pom.xml file called <distributionManagement>. This section will point to the PL instance’s Nexus. Let’s have a look at it. It’s already configured with the PL default values.

+
+
+
+
<distributionManagement>
+    <repository>
+      <id>pl-nexus</id>
+      <name>PL Releases</name>
+      <url>http://nexus3-core:8081/nexus/content/repositories/maven-releases/</url>
+    </repository>
+    <snapshotRepository>
+      <id>pl-nexus</id>
+      <name>PL Snapshots</name>
+      <url>http://nexus3-core:8081/nexus3/repository/maven-snapshots</url>
+    </snapshotRepository>
+</distributionManagement>
+
+
+ +
+
+
+

39.7. Deployment

+
+

The main deployment tool used for My Thai Star is be Docker.

+
+
+
+docker +
+
+
+

It is a tool to run application in isolated environments. Those isolated environments will be what we call Docker containers. For instance, it won’t be necessary any installation of Nginx or Apache tomcat or anything necessary to deploy, because there will be some containers that actually have those technologies inside.

+
+
+
+

39.8. Where Docker containers will be running?

+
+

Of course, it is necessary to have an external Deployment Server. Every Docker process will run in it. It will be accessed from Production Line pipelines via SSH. Thus, the pipeline itself will manage the scenario of, if every previous process like testing passes as OK, stop actual containers and create new ones.

+
+
+

This external server will be located in https://mts-devonfw-core.cloud.okteto.net/

+
+
+
+

39.9. Container Schema

+
+

3 Docker containers are being used for the deployment of My Thai Star:

+
+
+
    +
  1. +

    Nginx for the Reverse Proxy

    +
  2. +
  3. +

    tomcat for the Java Server

    +
  4. +
  5. +

    Nginx for the Angular Client

    +
  6. +
+
+
+

The usage of the Reverse Proxy will allow the client to call via /api every single Java Server’s REST operation. Moreover, there will only be 1 port in usage in the remote Docker host, the one mapped for the Reverse Proxy: 8080. +Besides the deployment itself using Nginx and tomcat, both client and server are previously built using NodeJS and maven images. Artifacts produced by them will be pasted in servers' containers using multi-stage docker builds. It will all follow this schema:

+
+
+
+36028242 8998f41c 0d9e 11e8 93b3 6bfe50152bf8 +
+
+
+

This orchestration of all 3 containers will be done by using a docker-compose.yml file. To redirect traffic from one container to another (i.e. reverse-proxy to angular client or angular client to java server) will be done by using, as host names, the service name docker-compose defines for each of them, followed by the internally exposed port:

+
+ +
+ + + + + +
+ + +A implementation using Traefik as reverse proxy instead of NGINX is also available. +
+
+
+
+

39.10. Run My Thai Star

+
+

The steps to run My Thai Star are:

+
+
+
    +
  1. +

    Clone the repository $ git clone https://github.com/devonfw/my-thai-star.git

    +
  2. +
  3. +

    Run the docker compose command: $ docker-compose up

    +
  4. +
+
+ +
+
Deployment Pipelines
+
+

As PL does not support deployments, we have created separate pipelines for this purpose. Those pipelines are: MyThaiStar_REVERSE-PROXY_DEPLOY, MyThaiStar_FRONT-END_DEPLOY and MyThaiStar_SERVER_DEPLOY.

+
+
+

The application will be deployed using docker on a remote machine. The architecture is as follows:

+
+
+
+deployment arch +
+
+
+

The parts to be deployed are: an NGINX reverse proxy, the java application and the angular application.

+
+
+
+
MyThaiStar_SERVER_DEPLOY Pipeline
+
+

Deploys on the server the Java part of My Thai Star.

+
+
+
+
Parameters
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    dockerNetwork: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the dockerNetwork.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
Pipeline steps
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Deploy new image: Deploy a new java container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
MyThaiStar_FRONT-END_DEPLOY
+
+

Deploys on the server the Angular part of My Thai Star

+
+
+
+
Parameters
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    dockerNetwork: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the dockerNetwork.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
Pipeline steps
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Deploy new image: Deploy a new angular container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
MyThaiStar_REVERSE-PROXY_DEPLOY Pipeline
+
+ + + + + +
+ + +As reverse proxy connects to the Java and Angular application, both must be deployed before you execute this pipeline. +
+
+
+

The MyThaiStar_REVERSE-PROXY_DEPLOY pipeline will deploy the My Thai Star reverse proxy into a remote machine using docker.

+
+
+
+
Parameters
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    buildReverseProxy: If yes, it will build and publish a new version of reverse-proxy.

    +
  • +
  • +

    port: Port of the MTS application. You must ensure that those port is available in the deployment machine.

    +
  • +
  • +

    docker Network: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the port and the docker Network.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
Pipeline steps
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Create the Docker image: If build-Reverse-Proxy is enabled, this step will create a new docker image and publish it to the docker registry.

    +
  • +
  • +

    Deploy new image: Deploy a new reverse proxy container. If it already exists, first it delete the previous one.

    +
  • +
+
+ +
+
+
Deployment Strategies
+
+

In this chapter different way of deploying My Thai Star are explained. Everything will be based in Docker.

+
+
+
+
Independent Docker containers
+
+

The first way of deployment will use isolated Docker containers. That means that if the client-side container is deployed, it does not affect the server-side container’s life cycle and vice versa.

+
+
+

Let’s show how the containers will behave during their life cycle.

+
+
+
    +
  • +

    0) Copy everything you need into the Deployment Server directory

    +
  • +
  • +

    1) Remove existing container (Nginx or Tomcat)

    +
    +
    +container1 +
    +
    +
  • +
  • +

    2) Run new one from the Docker images collection of the external Deployment Server.

    +
    +
    +container2 +
    +
    +
  • +
  • +

    3) Add the artifact /dist to the "deployable" folder of the Docker container (/usr/share/nginx/html/)

    +
    +
    +container3 +
    +
    +
    +

    Now, let’s see how it’s being executed in the command line (simplified due to documentation purposes). The next block of code represents what is inside of the last stage of the Pipeline.

    +
    +
    +
    +
    sshagent (credentials: ['my_ssh_token']) {
    +    sh """
    +        // Copy artifact from workspace to deployment server
    +
    +        // Manage container:
    +        docker rm -f [mts-container]
    +        docker run -itd --name=[mts-container] [base_image]
    +        docker exec [mts-container] bash -C \\"rm [container_deployment_folder]/*\\"
    +        docker cp [artifact] [mts-container]:[container_deployment_folder]
    +    """
    +}
    +
    +
    +
    +

    For every operation performed in the external Deployment Server, it is necessary to define where those commands are going to be executed. So, for each one of previous docker commands, this should appear before:

    +
    +
    +
    +
    `ssh -o StrictHostKeyChecking=no root@10.40.235.244`
    +
    +
    +
  • +
+
+
+
+
Docker Compose
+
+

The second way of deployment will be by orchestrating both elements of the application: The Angular client-side and the Java server-side. Both elements will be running in Docker containers as well, but in this case they won’t be independent anymore. Docker Compose will be in charge of keeping both containers up, or to put them down.

+
+
+
+
Project adjustment
+
+

In order to perform this second way of deployment, some files will be created in the project. The first one is the Dockerfile for the Angular client-side. This file will pull (if necessary) an Nginx Docker image and copy the Angular artifact (/dist folder) inside of the deployment folder of the image. It will be located in the main directory of the Angular client-side project.

+
+
+
+dockerfile angular +
+
+
+

The second file is the Dockerfile for the Java server-side. Its function will be quite similar to the Angular one. It will run a tomcat Docker image and copy the Java artifact (mythaistar.war file) in its deployment folder.

+
+
+
+dockerfile java +
+
+
+

Finally, as long as the docker-compose is being used, a file containing its configuration will be necessary as well. A new folder one the main My That Star’s directory is created, and it’s called /docker. Inside there is just a docker-compose.yml file. It contains all the information needed to orchestrate the deployment process. For example, which port both containers are going to be published on, and so on. This way of deployment will allow the application to be published or not just with one action.

+
+
+
+
docker-compose rm -f            # down
+docker-compose up --build -d    # up fresh containers
+
+
+
+
+docker compose +
+
+
+

Let’s have a look at the file itself:

+
+
+
+
version: '3'
+services:
+  client_compose:
+    build: "angular"
+    ports:
+      - "8091:80"
+    depends_on:
+      - server_compose
+  server_compose:
+    build: "java"
+    ports:
+      - "9091:8080"
+
+
+
+

This Orchestrated Deployment will offer some interesting possibilities for the future of the application.

+
+ +
+
+
Future Deployment
+
+

The My Thai Star project is going to be built in many technologies. Thus, let’s think about one deployment schema that allow the Angular client to communicate to all three back ends: Java, Node and .NET.

+
+
+

As long as Docker containers are being used, it shouldn’t be that hard to deal with this "distributed" deployment. The schema represents 6 Docker containers that will have client-side(s) and server-side(s). Each of 3 Angular client containers (those in red) are going to communicate with different back-ends. So, when the deployment is finished, it would be possible to use all three server-sides just by changing the "port" in the URL.

+
+
+

Let’s see how it would look like:

+
+
+
+deployment schema +
+
+ +
+
+
Reverse proxy strategy using Traefik
+
+

This implementation is the same as described at My Thai Star deployment wiki page. The only thing that changes is that Traefik is used instead of NGINX.

+
+
+

Using Traefik as reverse proxy, we can define the routes using labels in the docker containers instead of using a nginx.conf file. With this, it is not necessary to modify the reverse proxy container for each application. In addition, as Traefik is listening to the docker daemon, it can detect new containers and create routes for them without rebooting.

+
+
+

Example of labels:

+
+
+
+
labels:
+    - "traefik.http.routers.angular.rule=PathPrefix(`/`)"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.path=/health"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.interval=10s"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.scheme=http"
+
+
+
+
+
How to use it
+
+

If you want to build the images from code, change to My Thai Star root folder and execute:

+
+
+
+
$ docker-compose -f docker-compose.traefik.yml up -d --build
+
+
+
+

If you want to build the images from artifacts, change to Traefik folder (reverse-proxy/traefik) and execute:

+
+
+
+
$ docker-compose up -d --build
+
+
+
+

After a seconds, when the healthcheck detects that containers are running, your application will be available at http://localhost:8090. Also, the Traefik dashboard is available at http://localhost:8080.

+
+
+

If you want to check the behavior of the application when you scale up the back-end, you can execute:

+
+
+
+
$ docker-compose scale java=5
+
+
+
+

With this, the access to the java back-end will be using the load balancing method: Weighted Round Robin.

+
+ +
+
+
+

39.11. MyThaiStar on Native Kubernetes as a Service (nKaaS)

+
+

The MyThaiStar sample application can be deployed on a nKaaS environment. The required Kubernetes configuration files can be found in the MyThaiStar repository. There are no additional changes required in order to deploy the application.

+
+
+
+

39.12. Setting up the environment

+ +
+
+

39.13. Following the nKaaS guide

+
+

After requesting access to the nKaaS platform you’ll be greeted with a welcome mail which contains your personal credentials. Make sure to change the given password to a personal one within the 24 hour time period, otherwise the credentials will expire.

+
+
+

After successfully following the guide mentioned in the welcome mail you should be able to establish a connection to the nKaaS VPN and have access to all their services (Jenkins, BitBucket, etc.). You should also be able to communicate with Kubernetes using kubectl.

+
+
+

Known issues: The nKaaS guide provides a download link for OpenVPN Connect. However, some users experienced connection issues with this client. If you’re having issues connecting to the VPN with OpenVPN Connect, you may try out the client by OVPN.

+
+
+
+

39.14. Requesting a namespace

+
+

Initially, you won’t be able to edit anything on Kubernetes, as you don’t have any privileges on any namespace. To request your own namespace you should raise a ticket at the Customer Support Portal containing your desired name for the namespace.

+
+
+

As soon as the namespace was created you can change your kubectl context:

+
+
+
+
kubectl config set-context --current -namespace=YOUR-NAMESPACE
+
+
+
+

On your own namespace you should have permissions to create/delete deployments/services etc. and perform other actions.

+
+
+
+

39.15. Setting up Harbor

+
+

Jenkins will build the MyThaiStar Docker images and push them to the nKaaS Harbor registry. The Jenkinsfile defaults to a Harbor project called "my-thai-star". If there’s no such project on Harbor, simply create a new one.

+
+
+
+

39.16. Setting up Jenkins

+
+

As MyThaiStar includes all required Jenkinsfiles for nKaaS, almost no configurations have to be performed by the user. +Create a new Pipeline on Jenkins and configure its definition to be a "Pipeline script from SCM". The SCM used is "Git" and the repository URL is the MyThaiStar repository https://github.com/devonfw/my-thai-star.git or your fork of it.

+
+
+

The Branch Specifier should point to */develop, the Script Path is jenkins/nKaaS/Jenkinsfile as that’s where the Jenkinsfile is located at the MyThaiStar repository. +Checking the "Lightweight checkout" could speed up the Pipeline.

+
+
+

Note: If you’re using the nKaaS Bitbucket as repository for your MyThaiStar clone you have to perform some additional configurations. First you’ll have to create a new SSH keypair, for example with ssh-keygen. Add the public key to the Bitbucket authentication methods and the private key in Jenkins to a new pair of credentials. This step is required for Jenkins to be able to authenticate against Bitbucket. +Afterwards, instead of the official MyThaiStar repository, specify your Bitbucket repository:

+
+
+
+
ssh://git@bitbucket.demo.xpaas.io:7999/YOUR-PROJECT/YOUR-MTS-REPO.git
+
+
+
+

Under "Credentials" choose the credentials that contain your Bitbucket private key you’ve created earlier.

+
+
+
+

39.17. Deploying MTS

+
+

After setting up the Jenkins Pipeline, you can simply run it by clicking on the "Build" button. This will trigger the pipeline, Jenkins will:

+
+
+
    +
  1. +

    Check out the MTS project

    +
  2. +
  3. +

    Build the docker images

    +
  4. +
  5. +

    Push the docker images to the Harbor registry

    +
  6. +
  7. +

    Deploy the MTS application onto Kubernetes

    +
  8. +
+
+
+

Finally, the applications should be available at http://my-thai-star.demo.xpaas.io.

+
+
+

The first part, my-thai-star, IST specified in the MTS ingress configuration at host. The second part, demo.xpaas.io, is the host of the nKaaS you’re working on.

+
+
+
+
+
+

40. Contributing

+
+
+

Unresolved include directive in modules/ROOT/pages/general/master-contributing.adoc - include::../devonfw-github/CONTRIBUTING.adoc[]

+
+
+

Unresolved include directive in modules/ROOT/pages/general/master-contributing.adoc - include::../devonfw-github/CODE_OF_CONDUCT.adoc[]

+
+ +
+

40.1. OSS Compliance

+
+

This chapter helps you to gain transparency on OSS usage and reach OSS compliance in your project.

+
+
+
+

40.2. Preface

+
+

devonfw, as most Java software, makes strong use of Open Source Software (OSS). It is using about 150 OSS products on the server only and on the client even more. Using a platform like devonfw to develop your own custom solution requires handling contained OSS correctly, i.e acting OSS-compliant.

+
+
+

Please read the Open Source policy of your company first, e.g. the Capgemini OSS Policy which contains a short, comprehensive and well written explanation on relevant OSS-knowledge. Make sure you:

+
+
+
    +
  • +

    understand the copyleft effect and its effect in commercial projects

    +
  • +
  • +

    understand the 3 license categories: "permissive", "weak copyleft" and "strong copyleft"

    +
  • +
  • +

    know prominent license types as e.g. "Apache-2.0" or GPL-3.0" and what copyleft-category they are in

    +
  • +
  • +

    are aware that some OSS offer dual/multi-licenses

    +
  • +
  • +

    Understand that OSS libraries often come with sub-dependencies of other OSS carrying licenses themselves

    +
  • +
+
+
+

To define sufficient OSS compliance measures, contact your IP officer or legal team as early as possible, especially if you develop software for clients.

+
+
+
+

40.3. Obligations when using OSS

+
+

If you create a custom solution containing OSS, this in legal sense is a "derived" work. If you distribute your derived work to your business client or any other legal entity in binary packaged form, the license obligations of contained OSS get into effect. Ignoring these leads to a license infringement which can create high damage.

+
+
+

To carefully handle these obligations you must:

+
+
+
    +
  • +

    maintain an OSS inventory (to gain transparency on OSS usage and used licenses)

    +
  • +
  • +

    check license conformity depending on usage/distribution in a commercial scenario

    +
  • +
  • +

    check license compatibility between used OSS-licenses

    +
  • +
  • +

    fulfill obligations defined by the OSS-licenses

    +
  • +
+
+
+

Obligations need to be checked per license. Frequent obligations are:

+
+
+
    +
  • +

    deliver the license terms of all used versions of the OSS licenses

    +
  • +
  • +

    not to change any copyright statements or warranty exclusions contained in the used OSS components

    +
  • +
  • +

    deliver the source code of the OSS components (e.g. on a data carrier)

    +
  • +
  • +

    when modifying OSS, track any source code modification (including date and name of the employee/company)

    +
  • +
  • +

    display OSS license notice in a user frontend (if any)

    +
  • +
  • +

    other obligations depending on individual license

    +
  • +
+
+
+
+

40.4. Automate OSS handling

+
+

Carefully judging the OSS usage in your project is a MANUAL activity! However, collecting OSS information and fulfilling license obligations should be automated as much as possible. A prominent professional tool to automate OSS compliance is the commercial software "Black Duck". Unfortunately it is rather expensive - either purchased or used as SaaS.

+
+
+

The most recommended lightweight tooling is a combination of Maven plugins. We will mainly use the Mojo Maven License Plugin.

+
+
+
+

40.5. Configure the Mojo Maven License Plugin

+
+

You can use it from command line but this will limit the ability to sustainably configure it (shown later). +Therefore we add it permanently as a build-plugin to the project parent-pom like this:

+
+
+
+
<plugin>
+  <groupId>org.codehaus.mojo</groupId>
+  <artifactId>license-maven-plugin</artifactId>
+  <version>1.14</version>
+
+  <configuration>
+    <outputDirectory>${project.build.directory}/generated-resources</outputDirectory>
+    <sortArtifactByName>true</sortArtifactByName>
+    <includeTransitiveDependencies>true</includeTransitiveDependencies>
+    <!-- the "missing file" declares licenses for dependencies that could not be detected automatically -->
+    <useMissingFile>true</useMissingFile>
+    <!-- find the "missing files" in all child-projects at the following location -->
+    <missingFile>src/license/THIRD-PARTY.properties</missingFile>
+    <!-- if the "missing files" are not yet existing in child-projects they will be created automatically -->
+    <failOnMissing>false</failOnMissing>
+    <overrideFile>src/license/override-THIRD-PARTY.properties</overrideFile>
+    <!-- harmonize different ways of writing license names -->
+    <licenseMerges>
+      <licenseMerge>Apache-2.0|Apache 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache Software License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|The Apache Software License, Version 2.0</licenseMerge>
+    </licenseMerges>
+    <encoding>utf-8</encoding>
+  </configuration>
+</plugin>
+
+
+
+

In the config above there are several settings that help to permanently improve the result of an automated OSS scan. We explain these now.

+
+
+
+

40.6. Declare additional licenses

+
+

Sometimes the licenses of used OSS cannot be resolved automatically. That is not the mistake of the maven-license-tool, but the mistake of the OSS author who didn’t make the respective license-information properly available.

+
+
+

Declare additional licenses in a "missing file" within each maven-subproject: /src/license/THIRD-PARTY.properties.

+
+
+
+
##Generated by org.codehaus.mojo.license.AddThirdPartyMojo
+#-------------------------------------------------------------------------------
+##Already used licenses in project :
+##- ASF 2.0
+##- Apache 2
+...
+#-------------------------------------------------------------------------------
+##Please fill the missing licenses for dependencies :
+...
+dom4j--dom4j--1.6.1=BSD 3-Clause
+javax.servlet--jstl--1.2=CDDL
+...
+
+
+
+

In case the use of "missing files" is activated, but the THIRD-PARTY.properties-file is not yet existing, the first run of an "aggregate-add-third-party" goal (see below) will fail. Luckily the license-plugin just helped us and created the properties-files automatically (in each maven-subproject) and prefilled it with:

+
+
+
    +
  • +

    a list of all detected licenses within the maven project

    +
  • +
  • +

    all OSS libraries where a license could not be detected automatically.

    +
  • +
+
+
+

You now need to fill in missing license information and rerun the plugin.

+
+
+
+

40.7. Redefine wrongly detected licenses

+
+

In case automatically detected licenses proof to be wrong by closer investigation, this wrong detection can be overwritten. Add a configuration to declare alternative licenses within each maven-subproject: /src/license/override-THIRD-PARTY.properties

+
+
+
+
com.sun.mail--javax.mail--1.5.6=Common Development and Distribution License 1.1
+
+
+
+

This can be also be useful for OSS that provides a multi-license to make a decision which license to actually choose .

+
+
+
+

40.8. Merge licenses

+
+

You will see that many prominent licenses come in all sorts of notations, e.g. Apache-2.0 as: "Apache 2" or "ASL-2.0" or "The Apache License, Version 2.0". The Mojo Maven License Plugin allows to harmonize different forms of a license-naming like this:

+
+
+
+
    <!-- harmonize different ways of writing license names -->
+    <licenseMerges>
+      <licenseMerge>Apache-2.0|Apache 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|Apache Software License, Version 2.0</licenseMerge>
+      <licenseMerge>Apache-2.0|The Apache Software License, Version 2.0</licenseMerge>
+    </licenseMerges>
+
+
+
+

License-names will be harmonized in the OSS report to one common term. We propose to harmonize to short-license-IDs defined by the SPDX standard.

+
+
+
+

40.9. Retrieve licenses list

+
+

For a quick initial judgement of OSS license situation run the following maven command from command line:

+
+
+
+
$ mvn license:license-list
+
+
+
+

You receive the summary list of all used OSS licenses on the cmd-out.

+
+
+
+

40.10. Create an OSS inventory

+
+

To create an OSS inventory means to report on the overall bill of material of used OSS and corresponding licenses. +Within the parent project, run the following maven goal from command line.

+
+
+
+
$ mvn license:aggregate-download-licenses -Dlicense.excludedScopes=test,provided
+
+
+
+

Running the aggregate-download-licenses goal creates two results.

+
+
+
    +
  1. +

    a license.xml that contains all used OSS dependencies (even sub-dependencies) with respective license information

    +
  2. +
  3. +

    puts all used OSS-license-texts as html files into folder target/generated resources

    +
  4. +
+
+
+

Carefully validate and judge the outcome of the license list. It is recommended to copy the license.xml to the project documentation and hand it over to your client. You may also import it into a spreadsheet to get a better overview.

+
+
+
+

40.11. Create a THIRD PARTY file

+
+

Within Java software it is a common practice to add a "THIRD-PARTY" text file to the distribution. Contained is a summary-list of all used OSS and respective licenses. This can also be achieved with the Mojo Maven License Plugin.

+
+
+

Within the parent project, run the following maven goal from command line.

+
+
+
+
$ mvn license:aggregate-add-third-party -Dlicense.excludedScopes=test,provided
+
+
+
+

Find the THIRD-PARTY.txt in the folder: target\generated-resources. The goal aggregate-add-third-party also profits from configuration as outlined above.

+
+
+
+

40.12. Download and package OSS SourceCode

+
+

Some OSS licenses require handing over the OSS source code which is packaged with your custom software to the client the solution is distributed to. It is a good practice to hand over the source code of all used OSS to your client. Collecting all source code can be accomplished by another Maven plugin: Apache Maven Dependency Plugin.

+
+
+

It downloads all OSS Source Jars into the folder: \target\sources across the parent and all child maven projects.

+
+
+

You configure the plugin like this:

+
+
+
+
<plugin>
+  <groupId>org.apache.maven.plugins</groupId>
+  <artifactId>maven-dependency-plugin</artifactId>
+  <version>3.0.2</version>
+
+  <configuration>
+    <classifier>sources</classifier>
+    <failOnMissingClassifierArtifact>false</failOnMissingClassifierArtifact>
+    <outputDirectory>${project.build.directory}/sources</outputDirectory>
+  </configuration>
+  <executions>
+    <execution>
+      <id>src-dependencies</id>
+      <phase>package</phase>
+      <goals>
+        <!-- use unpack-dependencies instead if you want to explode the sources -->
+        <goal>copy-dependencies</goal>
+      </goals>
+    </execution>
+  </executions>
+</plugin>
+
+
+
+

You run the plugin from command line like this:

+
+
+
+
$ mvn dependency:copy-dependencies -Dclassifier=sources
+
+
+
+

The plugin provides another goal that also unzips the jars, which is not recommended, since contents get mixed up.

+
+
+

Deliver the OSS source jars to your client with the release of your custom solution. This has been done physically - e.g. on DVD.

+
+
+
+

40.13. Handle OSS within CI-process

+
+

To automate OSS handling in the regular build-process (which is not recommended to start with) you may declare the following executions and goals in your maven-configuration:

+
+
+
+
<plugin>
+  ...
+
+  <executions>
+    <execution>
+      <id>aggregate-add-third-party</id>
+      <phase>generate-resources</phase>
+      <goals>
+        <goal>aggregate-add-third-party</goal>
+      </goals>
+    </execution>
+
+    <execution>
+      <id>aggregate-download-licenses</id>
+      <phase>generate-resources</phase>
+      <goals>
+        <goal>aggregate-download-licenses</goal>
+      </goals>
+    </execution>
+  </executions>
+</plugin>
+
+
+
+

Note that the build may fail in case the OSS information was not complete. Check the build-output to understand and resolve the issue - like e.g. add missing license information in the "missing file".

+
+
+
+
+
+

41. Release Notes

+
+ +
+

41.1. devonfw Release notes 2021.04

+ +
+
+

41.2. Introduction

+
+

We are proud to announce the release of devonfw version 2021.04.

+
+
+

This release includes lots of addition of new features, updates and bug fixes but it is very important to highlight the following improvements:

+
+
+
+

41.3. devonfw IDE

+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+

41.4. 2021.04.001

+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #537: Update eclipse to 2021-03

    +
  • +
  • +

    #287: Command autocompletion

    +
  • +
  • +

    #536: Improve handling of aborted downloads

    +
  • +
  • +

    #542: Support placeholders in settings.xml template

    +
  • +
  • +

    #557: minimize setup by reducing DEVON_IDE_TOOLS

    +
  • +
  • +

    #550: update maven to 3.8.1

    +
  • +
  • +

    #545: update devon4j to 2021.04.002 and add migration

    +
  • +
  • +

    #575: jasypt support for password encryption and decryption

    +
  • +
  • +

    #546: Problems with tm-terminal Eclipse plugin

    +
  • +
  • +

    #553: VSCode user-data-dir shall be part of workspace config

    +
  • +
  • +

    #513: Configurable generation of IDE start scripts

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2021.04.001.

+
+
+
+

41.5. devon4j

+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+

41.6. 2021.04.002

+
+

Bugfix release of with the following stories: +* #389: archetype build broken with ci-friendly-maven +* #391: jasypt documentation improvements +* #387: rebuild and updated diagram with drawio

+
+
+

Documentation is available at devon4j guide 2021.04.002. +The full list of changes for this release can be found in milestone devon4j 2020.04.002.

+
+
+
+

41.7. 2021.04.001

+
+

New release of devon4j with fixes, updates and improvements:

+
+
+
    +
  • +

    #370: Minor updates (spring-boot 2.4.4, jackson 2.12.2, CXF 3.4.3, etc.)

    +
  • +
  • +

    #366: BaseTest.isInitialSetup() broken

    +
  • +
  • +

    #85: ci-friendly-maven also for archetype

    +
  • +
  • +

    #373: CORS starter not part of devon4j release

    +
  • +
  • +

    #164: Flattened pom for core project invalid

    +
  • +
  • +

    #323: Add spring integration test to archetype

    +
  • +
  • +

    #351: improved error handling of service client

    +
  • +
  • +

    #71: improve documentation for strong password encryption

    +
  • +
  • +

    #354: JMS senders should not be part of data access layer, but logical layer

    +
  • +
  • +

    #377: updated T-Architecture

    +
  • +
  • +

    #294: integrate sonarcloud analysis into devon4j CI pipeline

    +
  • +
+
+
+

Documentation is available at devon4j guide 2021.04.001. +The full list of changes for this release can be found in milestone devon4j 2020.04.001.

+
+
+
+

41.8. devon4ng

+
+

Updated template and samples to Angular 11. +Updated guide of devon4ng.

+
+
+
+

41.9. MrChecker

+
+

MrChecker Test Framework is an end to end test automation framework written in Java. It is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security, native mobile apps and, in the near future, databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Two new modules are added to MrChecker:

+
+
+
    +
  • +

    DB Module - we have created a module intended to make testing efforts on DBs easier. It is founded on JPA in conjunction with Hibernate and therefore supports both high level, object based access to DB entities via the IDao interface and low level, native SQL commands via the EntityManager class .

    +
  • +
  • +

    CLI Module - we have created a module intended to make testing command line applications like compilers or batches easier and faster. Huge success here is that, team using this solution was able to prepare a test suite, without app provided, basing only on documentation and using mocking technique.

    +
  • +
+
+
+
+

41.10. Trainings/tutorials

+
+ +
+ +
+
+

41.11. devonfw Release notes 2020.12

+ +
+
+

41.12. Introduction

+
+

We are proud to announce the release of devonfw version 2020.12.

+
+
+

This release includes lots of addition of new features, updates and bug fixes but it is very important to highlight the following improvements:

+
+
+
+

41.13. devonfw IDE

+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+

41.14. 2020.12.001

+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #495: Documentation corrections

    +
  • +
  • +

    #491: Consider lombok support

    +
  • +
  • +

    #489: Update node to v12.19.0 and VS Code to 1.50.1

    +
  • +
  • +

    #470: reverse merge of workspace settings not sorting properties anymore

    +
  • +
  • +

    #483: Error during installation when npm is already installed

    +
  • +
  • +

    #415: documentation to customize settings

    +
  • +
  • +

    #479: Error for vscode plugin installation

    +
  • +
  • +

    #471: Preconfigure Project Explorer with Hierarchical Project Presentation

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.12.001.

+
+
+
+

41.15. 2020.08.001

+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #454: update to eclipse 2020.06

    +
  • +
  • +

    #442: update nodejs and vscode

    +
  • +
  • +

    #432: vsCode settings are not updated

    +
  • +
  • +

    #446: intellij: doConfigureEclipse: command not found

    +
  • +
  • +

    #440: Software update may lead to inconsistent state due to windows file locks

    +
  • +
  • +

    #427: release: keep leading zeros

    +
  • +
  • +

    #450: update settings

    +
  • +
  • +

    #431: devon build command not working correct for yarn or npm

    +
  • +
  • +

    #449: update to devon4j 2020.08.001

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.08.001.

+
+
+
+

41.16. 2020.04.004

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #433: Windows: devon command line sets wrong environment variables (with tilde symbol)

    +
  • +
  • +

    #435: fix variable resolution on bash

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.004.

+
+
+
+

41.17. 2020.04.003

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #395: variable from devon.properites unset if value is in double quotes

    +
  • +
  • +

    #429: Added script to create a meta file in the users directory after setup

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.003.

+
+
+
+

41.18. 2020.04.002

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #418: Make projects optional

    +
  • +
  • +

    #421: update devon4j to 2020.04.002

    +
  • +
  • +

    #413: Update Eclipse to 2020-03

    +
  • +
  • +

    #424: Strange errors on windows if devon.properties contains mixed line endings

    +
  • +
  • +

    #399: launching of Intellij fails with No such file or directory error.

    +
  • +
  • +

    #410: fix jsonmerge for boolean and null values

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.002.

+
+
+
+

41.19. devon4j

+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+

41.20. 2020.12.001

+
+

New release of devon4j with pluggable web security (CSRF starter) and CompletableFuture support for async REST service client as well as other improvements:

+
+
+
    +
  • +

    #283: Support for CompletableFuture in async service client

    +
  • +
  • +

    #307: Fix CSRF protection support

    +
  • +
  • +

    #287: spring-boot update to 2.3.3

    +
  • +
  • +

    #288: Update jackson to 2.11.2

    +
  • +
  • +

    #293: Update owasp-dependency-check plugin version to 5.3.2

    +
  • +
  • +

    #302: added guide for project/app structure

    +
  • +
  • +

    #315: devon4j documentation correction

    +
  • +
  • +

    #306: improve documentation to launch app

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.12.001. +The full list of changes for this release can be found in milestone devon4j 2020.12.001.

+
+
+
+

41.21. 2020.08.001

+
+

New release of devon4j with async REST service client support and other improvements:

+
+
+
    +
  • +

    #279: support for async service clients

    +
  • +
  • +

    #277: Update Security-Guide to recent OWASP Top (2017)

    +
  • +
  • +

    #281: cleanup documentation

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.08.001. +The full list of changes for this release can be found in milestone devon4j 2020.08.001.

+
+
+
+

41.22. 2020.04.002

+
+

Minor update of devon4j with the following bugfixes and small improvements:

+
+
+
    +
  • +

    #261: JUnit4 backward compatibility

    +
  • +
  • +

    #267: Fix JWT permission expansion

    +
  • +
  • +

    #254: JWT Authentication support for devon4j-kafka

    +
  • +
  • +

    #258: archetype is still lacking a .gitignore

    +
  • +
  • +

    #273: Update libs

    +
  • +
  • +

    #271: Do not enable resource filtering by default

    +
  • +
  • +

    #255: Kafka: Support different retry configuration for different topics

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.04.002. +The full list of changes for this release can be found in milestone devon4j 2020.04.002.

+
+
+
+

41.23. devon4node

+
+

New devon4node version is published, the changes are:

+
+
+

On this release we have deprecated devon4node cli, now we use nest cli, and we have added a GraphQL sample.

+
+
+
    +
  • +

    #375: GraphQL Sample.

    +
  • +
  • +

    #257: D4N cli remove

    +
  • +
+
+
+
+

41.24. CobiGen

+
+

Various bugfixes were made as well as consolidating behavior of eclipse vs maven vs cli by properly sharing more code across the different clients. +Also properly takes into account a files line delimiter instead of defaulting to those of the host system.

+
+ +
+
+

41.25. Templates

+
+
    +
  • +

    Removed environment.ts from the crud_angular_client_app/CRUD devon4ng Angular App increment since Cobigen did not make any changes in it

    +
  • +
  • +

    Removed cross referencing between template increments since there is currently no useful use case for it and it leads to a few problems

    +
  • +
  • +

    v2020.12.001

    +
  • +
+
+
+
+

41.26. Java Plug-in

+
+
    +
  • +

    Now properly merges using the input files line delimiters instead of defaulting to those of the host system.

    +
  • +
  • +

    v7.1.0

    +
  • +
+
+
+
+

41.27. TypeScript Plug-in

+
+
    +
  • +

    Fixed NPE Added the option to read a path from an object input

    +
  • +
  • +

    v7.1.0

    +
  • +
+
+
+
+

41.28. Property Plug-in

+
+
    +
  • +

    Now properly merges using the input files line delimiters instead of defaulting to those of the host system.

    +
  • +
  • +

    v7.1.0

    +
  • +
+
+
+
+

41.29. OpenAPI Plug-in

+
+
    +
  • +

    Fixed an issue where nullable enums lead to errors

    +
  • +
  • +

    7.1.0

    +
  • +
+
+
+
+

41.30. Textmerger

+
+
    +
  • +

    Now properly merges using the input files line delimiters instead of defaulting to those of the host system.

    +
  • +
  • +

    v7.1.0

    +
  • +
  • +

    v7.1.1

    +
  • +
+
+
+
+

41.31. Sonar devon4j plugin

+
+

With this release, we made the package structure configurable and did some other improvements and fixes:

+
+
+
    +
  • +

    #117: Rule from checkstyle plugin could not be instantiated in our quality profile

    +
  • +
  • +

    #118: NPE during project analysis

    +
  • +
  • +

    #97: Custom configuration for architecture

    +
  • +
  • +

    #92: Display warnings on the 'devonfw' config page in the 'Administration' section of SonarQube

    +
  • +
  • +

    #95: Add 3rd Party rule to avoid Immutable annotation from wrong package

    +
  • +
  • +

    #94: Add 3rd Party rule to avoid legacy date types

    +
  • +
  • +

    #93: Improve devonfw Java quality profile

    +
  • +
  • +

    #114: Deleted unused architecture config from SonarQube settings to avoid confusion

    +
  • +
+
+
+

Changes for this release can be found in milestone 2020.12.001 and + milestone 2020.12.002

+
+
+
+

41.32. devon4net

+
+

The consolidated list of features for devon4net is as follows:

+
+
+
    +
  • +

    LiteDb: - Support for LiteDB - Provided basic repository for CRUD operations.

    +
  • +
  • +

    RabbitMq: - Use of EasyQNet library to perform CQRS main functions between different microservices - Send commands / Subscribe queues with one C# sentence - Events management: Handled received commands to subscribed messages - Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    MediatR: - Use of MediatR library to perform CQRS main functions in memory - Send commands / Subscribe queues with one C# sentence - Events management: Handled received commands to subscribed messages - Automatic messaging backup when sent and handled (Internal database via LiteDB and database backup via Entity Framework)

    +
  • +
  • +

    SmaxHcm: - Component to manage Microfocus SMAX for cloud infrastructure services management

    +
  • +
  • +

    CyberArk: - Manage safe credentials with CyberArk

    +
  • +
  • +

    AnsibleTower: - Ansible automates the cloud infrastructure. devon4net integrates with Ansible Tower via API consumption endpoints

    +
  • +
  • +

    gRPC+Protobuf: - Added Client + Server basic templates sample gRPC with Google’s Protobuf protocol using devon4net

    +
  • +
  • +

    Kafka: - Added Apache Kafka support for deliver/consume messages and create/delete topics as well

    +
  • +
  • +

    AWS support

    +
    +
      +
    • +

      AWS Template to create serverless applications with auto generation of an APIGateway using AWS base template

      +
    • +
    • +

      AWS template to create pure Lambda functions and manage SQS Events, SNS Events, Generic Events, CloudWatch, S3 Management, AWS Secrets management as a configuration provider in .NET life cycle

      +
    • +
    • +

      AWS CDK integration component to create/manage AWS infrastructures (Infra As Code): Database, Database cluster, VPC, Secrets, S3 buckets, Roles…

      +
    • +
    +
    +
  • +
  • +

    Minor performance and stability improvements such Entity framework migration integration

    +
  • +
  • +

    Updated to the latest .net Core 3.1 TLS

    +
  • +
+
+
+
+

41.33. dashboard (beta version)

+
+

We are adding dashboard beta version as part of this release. Dashboard is a tool that allows you to create and manage devonfw projects.It makes it easy to onboard a new person with devonfw.

+
+
+
    +
  • +

    Dashboard list all ide available on user system or if no ide is availble it will provide option to download latest version of ide.

    +
  • +
  • +

    Project creation and management: Project page list all projects created by user using dahboard. User will be able to create devon4j, devon4ng and devon4node projects using dashboard.

    +
  • +
  • +

    Support for Eclipse and VSCode IDE

    +
  • +
  • +

    Integrated devonfw-ide usage guide from the website

    +
  • +
+
+
+
+

41.34. Solicitor

+
+

Solicitor is a tool which helps managing Open Source Software used within projects. Below is consolidated feature list of solicitor:

+
+
+
    +
  • +

    Standalone Command Line Java Tool

    +
  • +
  • +

    Importers for component/license information from

    +
  • +
  • +

    Maven

    +
  • +
  • +

    Gradle

    +
  • +
  • +

    NPM

    +
  • +
  • +

    CSV (e.g. for manual entry of data)

    +
  • +
  • +

    Rules processing (using Drools Rule Engine) controls the the different phases:

    +
  • +
  • +

    Normalizing / Enhancing of license information

    +
  • +
  • +

    Handling of multilicensing (including selection of applicable licenses) and re-licensing

    +
  • +
  • +

    Legal evaluation

    +
  • +
  • +

    Rules to be defined as Decision Tables

    +
  • +
  • +

    Sample Decision Tables included

    +
  • +
  • +

    Automatic download and file based caching of license texts

    +
  • +
  • +

    Allows manual editing / reformatting of license text

    +
  • +
  • +

    Output processing

    +
  • +
  • +

    Template based text (Velocity) and XLS generation

    +
  • +
  • +

    SQL based pre-processor (e.g. for filtering, aggregation)

    +
  • +
  • +

    Audit log which documents all applied rules for every item might be included in report

    +
  • +
  • +

    "Diff Mode" allows to mark data which has changed as compared to a previous run of Solicitor (in Velocity and XLS reporting)

    +
  • +
  • +

    Customization

    +
  • +
  • +

    Project specific configuration (containing e.g. reporting templates, decision tables) allows to override/amend builtin configuration

    +
  • +
  • +

    Builtin configuration might be overridden/extended by configuration data contained in a single extension file (ZIP format)

    +
  • +
  • +

    This allows to safely provide organization specific rules and reporting templates to all projects of an organization (e.g. to reflect the specific OSS usage policy of the organization)

    +
  • +
+
+
+
+

41.35. MrChecker

+
+

MrChecker Test Framework is an end to end test automation framework written in Java. It is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security, native mobile apps and, in the near future, databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Below is consolidated list of updates in MrChecker:

+
+
+ +
+
+
+

41.36. Trainings/tutorials

+
+ +
+ +
+
+

41.37. devonfw Release notes 2020.08

+ +
+
+

41.38. Introduction

+
+

We are proud to announce the release of devonfw version 2020.08.

+
+
+

This release includes lots of addition of new features, updates and bug fixes but it is very important to highlight the following improvements:

+
+
+
+

41.39. devonfw IDE

+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+

41.40. 2020.08.001

+
+

Update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #454: update to eclipse 2020.06

    +
  • +
  • +

    #442: update nodejs and vscode

    +
  • +
  • +

    #432: vsCode settings are not updated

    +
  • +
  • +

    #446: intellij: doConfigureEclipse: command not found

    +
  • +
  • +

    #440: Software update may lead to inconsistent state due to windows file locks

    +
  • +
  • +

    #427: release: keep leading zeros

    +
  • +
  • +

    #450: update settings

    +
  • +
  • +

    #431: devon build command not working correct for yarn or npm

    +
  • +
  • +

    #449: update to devon4j 2020.08.001

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.08.001.

+
+
+
+

41.41. 2020.04.004

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #433: Windows: devon command line sets wrong environment variables (with tilde symbol)

    +
  • +
  • +

    #435: fix variable resolution on bash

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.004.

+
+
+
+

41.42. 2020.04.003

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #395: variable from devon.properites unset if value is in double quotes

    +
  • +
  • +

    #429: Added script to create a meta file in the users directory after setup

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.003.

+
+
+
+

41.43. 2020.04.002

+
+

Minor update with the following bugfixes and improvements:

+
+
+
    +
  • +

    #418: Make projects optional

    +
  • +
  • +

    #421: update devon4j to 2020.04.002

    +
  • +
  • +

    #413: Update Eclipse to 2020-03

    +
  • +
  • +

    #424: Strange errors on windows if devon.properties contains mixed line endings

    +
  • +
  • +

    #399: launching of Intellij fails with No such file or directory error.

    +
  • +
  • +

    #410: fix jsonmerge for boolean and null values

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.002.

+
+
+
+

41.44. devon4j

+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+

41.45. 2020.08.001

+
+

New release of devon4j with async REST service client support and other improvements:

+
+
+
    +
  • +

    #279: support for async service clients

    +
  • +
  • +

    #277: Update Security-Guide to recent OWASP Top (2017)

    +
  • +
  • +

    #281: cleanup documentation

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.08.001. +The full list of changes for this release can be found in milestone devon4j 2020.08.001.

+
+
+
+

41.46. 2020.04.002

+
+

Minor update of devon4j with the following bugfixes and small improvements:

+
+
+
    +
  • +

    #261: JUnit4 backward compatibility

    +
  • +
  • +

    #267: Fix JWT permission expansion

    +
  • +
  • +

    #254: JWT Authentication support for devon4j-kafka

    +
  • +
  • +

    #258: archetype is still lacking a .gitignore

    +
  • +
  • +

    #273: Update libs

    +
  • +
  • +

    #271: Do not enable resource filtering by default

    +
  • +
  • +

    #255: Kafka: Support different retry configuration for different topics

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.04.002. +The full list of changes for this release can be found in milestone devon4j 2020.04.002.

+
+
+
+

41.47. devon4ng

+
+

This release is focused mainly on the Angular 10 upgrade:

+
+
+
    +
  • +

    #176: Template submodules updated to Angular 10 and NgRx 10.

    +
  • +
  • +

    #167, #168, #174 and #175: Updated electron (sample and documentation).

    +
  • +
  • +

    #166: Update error handler.

    +
  • +
  • +

    #165: Cypress sample.

    +
  • +
  • +

    #164: Update to Angular 10 (samples and documentation).

    +
  • +
+
+
+
+

41.48. devon4node

+
+

New devon4node version is published, the changes are:

+
+
+
    +
  • +

    Updated dependencies.

    +
  • +
  • +

    Solved bug when you introduce a name with dashes in new command.

    +
  • +
  • +

    Add more options to the non-interactive new command.

    +
  • +
+
+
+
+

41.49. CobiGen

+
+

CobiGen version numbers have been consolidated to now represent plug-in compatibility in the major release number (7.x.x).

+
+
+
+

41.50. CLI

+
+
    +
  • +

    CLI increments can be referenced by name and description.

    +
  • +
  • +

    Ability to configure logging.

    +
  • +
  • +

    Fixed error on code formatting.

    +
  • +
  • +

    Improved Performance by lazy plug-in loading.

    +
  • +
  • +

    Possibility to prefer custom plug-ins over CobiGen ones.

    +
  • +
  • +

    Fixed bug, which broke whole CobiGen execution in case a custom CobiGen Plug-in was throwing an arbitrary exception.

    +
  • +
+
+
+
+

41.51. Eclipse

+
+
    +
  • +

    Improved Performance by lazy plug-in loading.

    +
  • +
  • +

    Possibility to prefer custom plug-ins over CobiGen ones.

    +
  • +
  • +

    Fixed bug, which broke whole CobiGen execution in case a custom CobiGen Plug-in was throwing an arbitrary exception.

    +
  • +
+
+
+
+

41.52. Maven

+
+
    +
  • +

    Fixed bug to properly load template util classes.

    +
  • +
  • +

    Improved Performance by lazy plug-in loading.

    +
  • +
  • +

    Possibility to prefer custom plug-ins over CobiGen ones.

    +
  • +
  • +

    Fixed bug, which broke whole CobiGen execution in case a custom CobiGen Plug-in was throwing an arbitrary exception.

    +
  • +
+
+
+
+

41.53. XML Plug-in

+
+
    +
  • +

    Added ability to provide custom merge schemas as part of the template folder.

    +
  • +
  • +

    Added further merge strategies for merging including XML validation.

    +
  • +
+
+
+
+

41.54. Java Plug-in

+
+
    +
  • +

    Fixed NPE for annotated constructors.

    +
  • +
  • +

    Fixed line separator handling to now prefer the file’s one instead of the system ones.

    +
  • +
  • +

    Fixed unwanted new lines in constructors after merging.

    +
  • +
  • +

    Fixed annotation formatting after merge.

    +
  • +
+
+
+
+

41.55. TypeScript Plug-in

+
+
    +
  • +

    Fixed issue on automatic update of the ts-merger bundle.

    +
  • +
+
+
+
+

41.56. Sonar devon4j plugin

+
+

The consolidated list of features for this Sonar devon4j plugin release is as it follows.

+
+
+

With this release, we added our own quality profile:

+
+
+
    +
  • +

    #16: Install devon4j quality profile

    +
  • +
+
+
+

Changes for this release can be found in milestone 2020.08.001

+
+
+
+

41.57. My Thai Star with Microservices and ISTIO Service Mesh Implementation

+
+

As always, our reference application, My Thai Star now has been implemented with Microservices and ISTIO Service Mesh features:

+
+
+
    +
  • +

    devon4j - Java

    +
    +
      +
    • +

      My Thai Star now has a sample version on Microservices architecture.

      +
    • +
    • +

      The github repository for the microservices version of My Thai Star is hosted at My Thai Star with Microservices

      +
    • +
    • +

      My Thai Star Microservices now has a multi stage docker build which generates the respective docker images for all the My Thai Star services.

      +
    • +
    • +

      My Thai Star microservices has the Kubernetes artifacts available to be able to deploy into Kubernetes pods.

      +
    • +
    • +

      My Thai Star microservices has ISTIO the service mesh implementation.

      +
    • +
    • +

      Check out the guides to implement or configure ISTIO features such as Traffic Routing, Network Resiliency features(RequestRouting, RequestTimeouts, Fault Injection, Circuit Breaker), Canary Deployments.

      +
    • +
    +
    +
  • +
+
+ +
+
+

41.58. devonfw Release notes 2020.04

+ +
+
+

41.59. Introduction

+
+

We are proud to announce the immediate release of devonfw version 2020.04. This version is the first one with the new versioning that will make easier to the community to identify when it was released since we use the year and month as many other software distributions.

+
+
+

This release includes lots of bug fixes and many version updates, but it is very important to highlight the following improvements:

+
+
+
    +
  • +

    New devonfw IDE auto-configure project feature.

    +
  • +
  • +

    Improved devonfw IDE plugin configuration.

    +
  • +
  • +

    New devon4j kafka module.

    +
  • +
  • +

    New devon4j JWT module.

    +
  • +
  • +

    New devon4j authorization of batches feature.

    +
  • +
  • +

    Dozer replaced with Orika in devon4j.

    +
  • +
  • +

    Support for composite keys in devon4j and CobiGen.

    +
  • +
  • +

    Multiple enhancements for project specific plugin development and usage of project specific template sets in CobiGen.

    +
  • +
  • +

    Ability to adapt your own templates by making use of CobiGen CLI.

    +
  • +
  • +

    Better responsiveness in eclipse and bugfixes in all assets in CobiGen.

    +
  • +
  • +

    devon4ng updated to Angular 9, NgRx 9 and Ionic 5, including documentation, samples and templates.

    +
  • +
  • +

    Yarn 2 support in devon4ng.

    +
  • +
  • +

    devon4node updated to NestJS 7 (packages, samples and documentation)

    +
  • +
  • +

    devon4node TSLint replaced with ESLint.

    +
  • +
  • +

    @devon4node/config package added.

    +
  • +
  • +

    devon4net updated to latest .NET Core 3.1.3 LTS version.

    +
  • +
  • +

    Update of the Production Line templates for devonfw projects in devonfw shop floor.

    +
  • +
  • +

    New merge feature included in the devonfw shop floor cicdgen tool.

    +
  • +
  • +

    Updated sonar-devon4j-plugin:

    +
    +
      +
    • +

      Improved coloring and other visual cues to our rule descriptions to highlight good and bad code examples.

      +
    • +
    • +

      Improved the locations of issues thrown on method- and class-level.

      +
    • +
    +
    +
  • +
+
+
+

Please check the detailed list below.

+
+
+

This would have not been possible without the commitment and hard work of the devonfw core team, German, Indian and ADCenter Valencia colleagues and collaborators as, among many others, the Production Line team.

+
+
+
+

41.60. devonfw IDE

+
+

The consolidated list of features for this devonfw IDE release is as it follows.

+
+
+
+

41.61. 2020.04.001

+
+

Starting with this release we have changed the versioning schema in devonfw to yyyy.mm.NNN where yyyy.mm is the date of the planned milestone release and NNN is a running number increased with every bug- or security-fix update.

+
+
+
    +
  • +

    #394 variable from devon.properties not set if not terminated with newline

    +
  • +
  • +

    #399 launching of Intellij fails with No such file or directory error.

    +
  • +
  • +

    #371 Eclipse plugin installation broke

    +
  • +
  • +

    #390 maven get/set-version buggy

    +
  • +
  • +

    #397 migration support for devon4j 2020.04.001

    +
  • +
  • +

    #400 allow custom args for release

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 2020.04.001.

+
+
+
+

41.62. 3.3.1

+
+

New release with bugfixes and new ide plugin feature:

+
+
+
    +
  • +

    #343: Setup can’t find Bash nor Git

    +
  • +
  • +

    #369: Fix flattening of POMs

    +
  • +
  • +

    #386: Feature/clone recursive

    +
  • +
  • +

    #379: Use own extensions folder in devonfw-ide

    +
  • +
  • +

    #381: Add ability to configure VS Code plugins via settings

    +
  • +
  • +

    #376: Improve Eclipse plugin configuration

    +
  • +
  • +

    #373: Fix project import on windows

    +
  • +
  • +

    #374: Rework build on import

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 3.3.1.

+
+
+
+

41.63. 3.3.0

+
+

New release with bugfixes and new project import feature:

+
+
+
    +
  • +

    #343: Detect non-admin GitForWindows and Cygwin

    +
  • +
  • +

    #175: Ability to clone projects and import into Eclipse automatically

    +
  • +
  • +

    #346: devon eclipse add-plugin parameters swapped

    +
  • +
  • +

    #363: devon ide update does not pull latest project settings

    +
  • +
  • +

    #366: update java versions to latest fix releases

    +
  • +
+
+
+

The full list of changes for this release can be found in milestone 3.3.0.

+
+
+
+

41.64. devon4j

+
+

The consolidated list of features for this devon4j release is as it follows.

+
+
+
+

41.65. 2020.04.001

+
+

Starting with this release we have changed the versioning schema in devonfw to yyyy.mm.NNN where yyyy.mm is the date of the planned milestone release and NNN is a running number increased with every bug- or security-fix update.

+
+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    #233: Various version updates

    +
  • +
  • +

    #241: Add module to support JWT and parts of OAuth

    +
  • +
  • +

    #147: Switch from dozer to orika

    +
  • +
  • +

    #180: Cleanup archtype

    +
  • +
  • +

    #240: Add unreferenced guides

    +
  • +
  • +

    #202: Architecture documentation needs update for components

    +
  • +
  • +

    #145: Add a microservices article in the documentation

    +
  • +
  • +

    #198: Deploy SNAPSHOTs to OSSRH in travis CI

    +
  • +
  • +

    #90: Authorization of batches

    +
  • +
  • +

    #221: Wrote monitoring guide

    +
  • +
  • +

    #213: Document logging of custom field in json

    +
  • +
  • +

    #138: Remove deprecated RevisionMetadata[Type]

    +
  • +
  • +

    #211: Archetype: security config broken

    +
  • +
  • +

    #109: LoginController not following devon4j to use JAX-RS but uses spring-webmvc instead

    +
  • +
  • +

    #52: Improve configuration

    +
  • +
  • +

    #39: Ability to log custom fields via SLF4J

    +
  • +
  • +

    #204: Slf4j version

    +
  • +
  • +

    #190: Rework of spring-batch integration

    +
  • +
  • +

    #210: Rework documentation for blob support

    +
  • +
  • +

    #191: Rework of devon4j-batch module

    +
  • +
  • +

    #209: Include performance info in separate fields

    +
  • +
  • +

    #207: Use more specific exception for not found entity

    +
  • +
  • +

    #208: Remove unnecesary clone

    +
  • +
  • +

    #116: Bug in JSON Mapping for ZonedDateTime

    +
  • +
  • +

    #184: Fixed BOMs so devon4j and archetype can be used again

    +
  • +
  • +

    #183: Error in executing the project created with devon4j

    +
  • +
  • +

    #177: Switch to new maven-parent

    +
  • +
  • +

    169: Provide a reason, why unchecked exceptions are used in devon4j

    +
  • +
+
+
+

Documentation is available at devon4j guide 2020.04.001. +The full list of changes for this release can be found in milestone devon4j 2020.04.001.

+
+
+
+

41.66. devon4ng

+
+

The consolidated list of features for this devon4ng release is as it follows.

+
+
+
+

41.67. 2020.04.001

+
+

Starting with this release we have changed the versioning schema in devonfw to yyyy.mm.NNN where yyyy.mm is the date of the planned milestone release and NNN is a running number increased with every bug- or security-fix update.

+
+
+
    +
  • +

    #111: Yarn 2 support included

    +
  • +
  • +

    #96: devon4ng upgrade to Angular 9

    +
    +
      +
    • +

      Templates and samples updated to Angular 9, NgRx 9 and Ionic 5.

      +
    • +
    • +

      New internationalization module.

      +
    • +
    • +

      Documentation updates and improvements.

      +
    • +
    +
    +
  • +
  • +

    #95: Added token management info in documentation

    +
  • +
+
+
+
+

41.68. devon4net

+
+

The consolidated list of features for this devon4net release is as it follows:

+
+
+
    +
  • +

    Updated to latest .NET Core 3.1.3 LTS version

    +
  • +
  • +

    Dependency Injection Autoregistration for services and repositories

    +
  • +
  • +

    Added multiple role managing claims in JWT

    +
  • +
  • +

    Added custom headers to circuit breaker

    +
  • +
  • +

    Reviewed default log configuration

    +
  • +
  • +

    Added support to order query results from database via lambda expression

    +
  • +
  • +

    Updated template and nuget packages

    +
  • +
+
+
+
+

41.69. devon4node

+
+

The consolidated list of features for this devon4node release is as it follows:

+
+
+
    +
  • +

    Upgrade to NestJS 7 (packages, samples and documentation)

    +
  • +
  • +

    TSLint replaced with ESLint

    +
  • +
  • +

    Add lerna to project to manage all the packages

    +
  • +
  • +

    Add @devon4node/config package

    +
  • +
  • +

    Add new schematics: Repository

    +
  • +
  • +

    Improve WinstonLogger

    +
  • +
  • +

    Improve documentation

    +
  • +
  • +

    Update dependencies to latest versions

    +
  • +
+
+
+
+

41.70. CobiGen

+
+

New release with updates and bugfixes:

+
+
+
    +
  • +

    devonfw templates:

    +
    +
      +
    • +

      #1063: Upgrade devon4ng Ionic template to latest version

      +
    • +
    • +

      #1065: devon4ng templates for devon4node

      +
    • +
    • +

      #1128: update java templates for composite keys

      +
    • +
    • +

      #1130: Update template for devon4ng application template

      +
    • +
    • +

      #1131: Update template for devon4ng NgRx template

      +
    • +
    • +

      #1149: .NET templates

      +
    • +
    • +

      #1146: Dev ionic template update bug fix

      +
    • +
    +
    +
  • +
  • +

    TypeScript plugin:

    +
    +
      +
    • +

      #1126: OpenApi parse/merge issues (ionic List templates)

      +
    • +
    +
    +
  • +
  • +

    Eclipse plugin:

    +
    +
      +
    • +

      #412: Write UI Test for HealthCheck use

      +
    • +
    • +

      #867: Cobigen processbar

      +
    • +
    • +

      #1069: #953 dot path

      +
    • +
    • +

      #1099: NPE on HealthCheck

      +
    • +
    • +

      #1100: 1099 NPE on health check

      +
    • +
    • +

      #1101: #867 fix import of core and api

      +
    • +
    • +

      #1102: eclipse_plugin doesn’t accept folders as input

      +
    • +
    • +

      #1134: (Eclipse-Plugin) Resolve Template utility classes from core

      +
    • +
    • +

      #1142: #1102 accept all kinds of input

      +
    • +
    +
    +
  • +
  • +

    CobiGen core:

    +
    +
      +
    • +

      #429: Reference external template files

      +
    • +
    • +

      #1143: Abort generation if external trigger does not match

      +
    • +
    • +

      #1125: Generation of templates from external increments does not work

      +
    • +
    • +

      #747: Variable assignment for external increments throws exception

      +
    • +
    • +

      #1133: Bugfix/1125 generation of templates from external increments does not work

      +
    • +
    • +

      #1127: #1119 added new TemplatesUtilsClassesUtil class to core

      +
    • +
    • +

      #953: NPE bug if foldername contains a dot

      +
    • +
    • +

      #1067: Feature/158 lat variables syntax

      +
    • +
    +
    +
  • +
  • +

    CobiGen CLI:

    +
    +
      +
    • +

      #1111: Infinity loop in mmm-code (MavenDependencyCollector.collectWithReactor)

      +
    • +
    • +

      #1113: cobigen-cli does not seem to properly resolve classes from dependencies

      +
    • +
    • +

      #1120: Feature #1108 custom templates folder

      +
    • +
    • +

      #1115: Fixing CLI bugs related to dependencies and custom templates jar

      +
    • +
    • +

      #1108: CobiGen CLI: Allow easy use of user’s templates

      +
    • +
    • +

      #1110: FileSystemNotFoundException blocking cobigen-cli

      +
    • +
    • +

      #1138: #1108 dev cli feature custom templates folder

      +
    • +
    • +

      #1136: (Cobigen-CLI) Resolve Template utility classes from core

      +
    • +
    +
    +
  • +
+
+
+
+

41.71. devonfw-shop-floor

+
+
    +
  • +

    Add documentation for deploy jenkins slaves

    +
  • +
  • +

    Improve documentation

    +
  • +
  • +

    Add devon4net Openshift template

    +
  • +
  • +

    Add nginx docker image for devon4ng

    +
  • +
  • +

    Add Openshift provisioning

    +
  • +
  • +

    Production Line:

    +
    +
      +
    • +

      Updated MTS template: add step for dependency check and change the deployment method

      +
    • +
    • +

      Add template utils: initialize instance, openshift configuration, docker configuration and install sonar plugin

      +
    • +
    • +

      Add devon4net template

      +
    • +
    • +

      Add from existing template

      +
    • +
    • +

      Improve documentation

      +
    • +
    • +

      Refactor the documentation in order to follow the devonfw wiki workflow

      +
    • +
    • +

      Update devon4j, devon4ng, devon4net and devon4node in order to be able to choose the deployment method: none, docker or openshift.

      +
    • +
    • +

      Update the tools version in order to use the latest.

      +
    • +
    +
    +
  • +
  • +

    Production Line Shared Lib

    +
    +
      +
    • +

      Add more fuctionality to the existing classes.

      +
    • +
    • +

      Add classes: DependencyCheckConfiguration, DockerConfiguration and OpenshiftConfiguration

      +
    • +
    +
    +
  • +
  • +

    CICDGEN

    +
    +
      +
    • +

      Add devon4net support

      +
    • +
    • +

      Update tools versions in Jenkinsfiles to align with Production Line templates

      +
    • +
    • +

      Add merge strategies: error, keep, override, combine

      +
    • +
    • +

      Add lerna to the project

      +
    • +
    • +

      Minor improvements in the code

      +
    • +
    • +

      Add GitHub actions workflow to validate the new changes

      +
    • +
    • +

      Improve documentation

      +
    • +
    • +

      Breaking changes:

      +
      +
        +
      • +

        Remove the following parameters: plurl, ocurl

        +
      • +
      • +

        Add the following parameters: dockerurl, dockercertid, registryurl, ocname and merge

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+

41.72. Sonar devon4j plugin

+
+

The consolidated list of features for this Sonar devon4j plugin release is as it follows.

+
+
+
+

41.73. 2020.04.001

+
+

This is the first version using our new versioning scheme. Here, the following issues were resolved:

+
+
+
    +
  • +

    #60: Fixed a bug in the naming check for Use-Case implementation classes

    +
  • +
  • +

    #67: Fixed a bug where the whole body of a method or a class was marked as the issue location. Now only the method / class headers will be highlighted.

    +
  • +
  • +

    #68: Made our rule descriptions more accessible by using better readable colors as well as alternative visual cues

    +
  • +
  • +

    #71: Fixed a bug where a NPE could be thrown

    +
  • +
  • +

    #74: Fixed a bug where a method always returned null

    +
  • +
+
+
+

Unrelated to any specific issues, there was some refactoring and cleaning up done with the following two PRs:

+
+
+
    +
  • +

    PR #66: Refactored the prefixes of our rule names from 'Devon' to 'devonfw'

    +
  • +
  • +

    PR #65: Sorted security-related test files into their own package

    +
  • +
+
+
+

Changes for this release can be found in milestone 2020.04.001.

+
+
+
+

41.74. My Thai Star

+
+

As always, our reference application, My Thai Star, contains some interesting improvements that come from the new features and bug fixes from the other assets. The list is as it follows:

+
+
+
    +
  • +

    devon4j - Java

    +
    +
      +
    • +

      Implement example batches with modified devon-batch

      +
    • +
    • +

      Upgrade spring boot version to 2.2.6 and devon4j 2020.004.001

      +
    • +
    • +

      Migrate from dozer to orika

      +
    • +
    +
    +
  • +
  • +

    devon4ng - Angular

    +
    +
      +
    • +

      Move configuration to NgRx store

      +
    • +
    +
    +
  • +
  • +

    devonfw shop floor - Jenkins

    +
    +
      +
    • +

      Update tools versions in order to align with Production Line templates

      +
    • +
    • +

      Add dependency check step (using dependency checker and yarn audit)

      +
    • +
    • +

      Send dependency checker reports to SonarQube

      +
    • +
    • +

      Changed deployment pipelines. Now pipelines are able to deploy docker containers using docker directly. No more ssh connections to execute commands in a remote machine are required.

      +
    • +
    • +

      Update documentation in order to reflect all changes

      +
    • +
    +
    +
  • +
  • +

    devon4nde - Node.js

    +
    +
      +
    • +

      Upgrade to NestJS 7

      +
    • +
    • +

      Add custom repositories

      +
    • +
    • +

      Add exceptions and exception filters

      +
    • +
    • +

      Add tests (missing in the previous version)

      +
    • +
    • +

      Split logic into use cases in order to make the test process easier

      +
    • +
    • +

      Minor patches and improvemets

      +
    • +
    • +

      Documentation updated in order to reflect the new implementation

      +
    • +
    +
    +
  • +
+
+ +
+
+

41.75. devonfw Release notes 3.2 “Homer”

+ +
+
+

41.76. Introduction

+
+

We are proud to announce the immediate release of devonfw version 3.2 (code named “Homer” during development). This version is the first one that contains the new devonfw IDE by default, so there is no need to download a huge ZIP with the whole distribution regardless of the use to which it will be put. The new devonfw IDE CLI will allow any user to setup a customized development environment completely configured with access to all the devonfw features, frameworks and tools. As we access to the official IDEs this is also the first version macOS compatible.

+
+
+

This release consolidates the documentation workflow adding the contents dynamically to the new devonfw website at the same time the PDF is generated. This have been achieved using a new GitHub action that takes the contents and builds the HTML files for the documentation section of the website. The documentation workflow proposed in the following picture is now complete:

+
+
+
+documentation workflow +
+
+
+

This release also includes the first version of devon4node. We consider that node.js should be a first-class citizen inside the devonfw platform and for that reason we have included the latest development technologies for this ecosystem. The devon4node CLI, schematics and other tools will allow our users to create powerful node.js applications with the same philosophy you may find in the other languages and frameworks included. More information at its section below.

+
+
+

The new devon4net 3.2.0 version is also included in this release. Based on the .NET Core 3.0 and containing lots of new features gathered from important and recent projects, it represents a great improvement and an intermediate step to provide support for the incoming .NET Core 3.1 LTS. More information at its section below.

+
+
+

This release includes the final version of the new CobiGen CLI and completely integrated with the new devonfw IDE. Now using commands, you will be able to generate code the same way as you do with Eclipse. This means that you can use CobiGen on other IDEs like Visual Studio Code or IntelliJ. Besides the Update command has been implemented. Now you will be able to update easily all your CobiGen plug-ins and templates inside the CLI.

+
+
+

On the other hand, the refactoring process has been completely developed, improving the mergers and including input readers for any other languages and frameworks, allowing the creation of models to generate code from them. Last, but not least, this new version includes the new templates for devon4net, devon4ng and devon4j generation.

+
+
+

And as always, My Thai Star has been updated to the latest versions of devon4j, devon4node and devon4net including completely State Management with NgRx in its devon4ng implementation upgrade.

+
+
+

This is the last release with the current semantic versioning number and without a fixed release calendar. From now on the new devonfw releases will happen in April, August and December and will be named YYYY.MM.NN, being the first release of the next year the 2020.04.00.

+
+
+
+

41.77. Changes and new features

+ +
+
+

41.78. devonfw-ide

+
+

We have entirely rewritten our automated solution for your local IDE (integrated desktop environment). The former oasp4j-ide and devonfw distributions with their extra-large gigabyte zip files are not entirely replaced with devonfw-ide. This new solution is provided as a small *.tar.gz file that is publicly available. It works on all platforms and has been tested on Windows, MacOS, and Linux. After extraction you only need to run a setup script. Here you provide a settings git URL for your customer project or simply hit return for testing or small projects. After reading and confirming the terms of use it will download all required tools in the proper versions for your operating system and configure them. Instead of various confusing scripts there is now only one CLI command devon for all use-cases what gives a much better user experience.

+
+
+

To get started go to the home page. There is even a migration-guide if you are currently used to the old approach and want to quickly jump into the new solution.

+
+
+
+

41.79. My Thai Star Sample Application

+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 3.2.0.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      devon4j 3.2.0 integrated.

      +
    • +
    • +

      Spring Boot 2.1.9 integrated.

      +
    • +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      2FA toggleable (two factor authentication).

      +
    • +
    • +

      NgRx full integrated (PR #285).

      +
    • +
    +
    +
  • +
  • +

    devon4net

    +
    +
      +
    • +

      devon4net for dotnet core 3.0 updated

      +
    • +
    • +

      Updated the API contract compatible with the other stacks

      +
    • +
    • +

      JWT implementation reviewed to increase security

      +
    • +
    • +

      ASP.NET user database dependencies removed

      +
    • +
    • +

      HTTP2 support

      +
    • +
    • +

      Clearer CRUD pattern implementation

      +
    • +
    +
    +
  • +
  • +

    devon4node

    +
    +
      +
    • +

      TypeScript 3.6.3.

      +
    • +
    • +

      Based on Nest framework.

      +
    • +
    • +

      Configuration Module

      +
    • +
    • +

      Added cors and security headers

      +
    • +
    • +

      Added mailer module and email templates.

      +
    • +
    • +

      Built in winston logger

      +
    • +
    • +

      Custom ClassSerializerInterceptor

      +
    • +
    +
    +
  • +
  • +

    MrChecker

    +
    +
      +
    • +

      Example cases for end-to-end test.

      +
    • +
    • +

      Production line configuration.

      +
    • +
    +
    +
  • +
  • +

    CICD

    +
    +
      +
    • +

      Improved integration with Production Line

      +
    • +
    • +

      New Traefik load balancer and reverse proxy

      +
    • +
    • +

      New deployment from artifact

      +
    • +
    • +

      New CICD pipelines

      +
    • +
    • +

      New deployment pipelines

      +
    • +
    • +

      Automated creation of pipelines in Jenkins

      +
    • +
    +
    +
  • +
+
+
+
+

41.80. Documentation updates

+
+

This release addresses the new documentation workflow, being now possible to keep the documentation synced with any change. The new documentation includes the following contents:

+
+
+
    +
  • +

    Getting started

    +
  • +
  • +

    devonfw ide

    +
  • +
  • +

    devon4j documentation

    +
  • +
  • +

    devon4ng documentation

    +
  • +
  • +

    devon4net documentation

    +
  • +
  • +

    devon4node documentation

    +
  • +
  • +

    CobiGen documentation

    +
  • +
  • +

    devonfw-shop-floor documentation

    +
  • +
  • +

    cicdgen documentation

    +
  • +
  • +

    devonfw testing with MrChecker

    +
  • +
  • +

    My Thai Star documentation

    +
  • +
  • +

    Contribution guide

    +
  • +
  • +

    Release notes

    +
  • +
+
+
+
+

41.81. devon4j

+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Completed full support from Java8 to Java11

    +
  • +
  • +

    Several security fixes

    +
  • +
  • +

    Upgrade to Spring Boot 2.1.9

    +
  • +
  • +

    Upgrade to Spring 5.1.8

    +
  • +
  • +

    Upgrade to JUnit 5 (requires migration via devonfw-ide)

    +
  • +
  • +

    Improved JPA support for IdRef

    +
  • +
  • +

    Improved auditing metadata support

    +
  • +
  • +

    Many improvements to documentation (added JDK guide, architecture-mapping, JMS, etc.)

    +
  • +
  • +

    For all details see milestone.

    +
  • +
+
+
+
+

41.82. devon4ng

+
+

The following changes have been incorporated in devon4ng:

+
+
+
    +
  • +

    Angular CLI 8.3.1,

    +
  • +
  • +

    Angular 8.2.11,

    +
  • +
  • +

    Angular Material 8.2.3,

    +
  • +
  • +

    Ionic 4.11.1,

    +
  • +
  • +

    Capacitor 1.2.1 as Cordova replacement,

    +
  • +
  • +

    NgRx 8.3 support for State Management,

    +
  • +
  • +

    devon4ng Angular application template updated to Angular 8.2.11 with visual improvements and bugfixes https://github.com/devonfw/devon4ng-application-template

    +
  • +
  • +

    devon4ng Ionic application template updated to 4.11.1 and improved https://github.com/devonfw/devon4ng-ionic-application-template

    +
  • +
  • +

    Improved devon4ng Angular application template with state management using Angular 8 and NgRx 8 https://github.com/devonfw/devon4ng-ngrx-template

    +
  • +
  • +

    Documentation and samples updated to latest versions:

    +
    +
      +
    • +

      Web Components with Angular Elements

      +
    • +
    • +

      Initial configuration with App Initializer pattern

      +
    • +
    • +

      Error Handling

      +
    • +
    • +

      PWA with Angular and Ionic

      +
    • +
    • +

      Lazy Loading

      +
    • +
    • +

      Library construction

      +
    • +
    • +

      Layout with Angular Material

      +
    • +
    • +

      Theming with Angular Material

      +
    • +
    +
    +
  • +
+
+
+
+

41.83. devon4net

+
+

The following changes have been incorporated in devon4net:

+
+
+
    +
  • +

    Updated to latest .net core 3.0 version

    +
  • +
  • +

    Template

    +
    +
      +
    • +

      Global configuration automated. devon4net can be instantiated on any .net core application template with no effort

      +
    • +
    • +

      Added support for HTTP2

      +
    • +
    • +

      Number of libraries minimized

      +
    • +
    • +

      Architecture layer review. More clear and scalable

      +
    • +
    • +

      Added red button functionality (aka killswitch) to stop attending API request with custom error

      +
    • +
    • +

      Improved API error management

      +
    • +
    • +

      Added support to only accept request from clients with a specific client certificate on Kestrel server. Special thanks to Bart Roozendaal (Capgemini NL)

      +
    • +
    • +

      All components use IOptions pattern to be set up properly

      +
    • +
    • +

      Swagger generation compatible with OpenAPI v3

      +
    • +
    +
    +
  • +
  • +

    Modules

    +
    +
      +
    • +

      The devon4net netstandard libraries have been updated to netstandard 2.1

      +
    • +
    • +

      JWT:

      +
      +
        +
      • +

        Added token encryption (token cannot be decrypted anymore by external parties). Now You can choose the encryption algorithm depending on your needs

        +
      • +
      • +

        Added support for secret key or certificate encryption

        +
      • +
      • +

        Added authorization for swagger portal

        +
      • +
      +
      +
    • +
    • +

      Circuit breaker

      +
      +
        +
      • +

        Added support to bypass certificate validation

        +
      • +
      • +

        Added support to use a certificate for https communications using Microsoft’s httpclient factory

        +
      • +
      +
      +
    • +
    • +

      Unit of Work

      +
      +
        +
      • +

        Repository classes unified and reviewed for increasing performance and reduce the consumed memory

        +
      • +
      • +

        Added support for different database servers: In memory, Cosmos, MySQL + MariaDB, Firebird, PostgreSQL, Oracle, SQLite, Access, MS Local.

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+

41.84. devon4node

+
+

The following changes have been incorporated in devon4node:

+
+
+
    +
  • +

    TypeScript 3.6.3.

    +
  • +
  • +

    Based on Nest framework.

    +
  • +
  • +

    Complete backend implementation.

    +
  • +
  • +

    New devon4node CLI. It will provide you some commands

    +
    +
      +
    • +

      new: create a new devon4node interactively

      +
    • +
    • +

      generate: generate code based on schematics

      +
    • +
    • +

      db: manage the database

      +
    • +
    +
    +
  • +
  • +

    New devon4node schematics

    +
    +
      +
    • +

      application: create a new devon4node application

      +
    • +
    • +

      config-module: add a configuration module to the project

      +
    • +
    • +

      mailer: install and configure the devon4node mailer module

      +
    • +
    • +

      typeorm: install TypeORM in the project

      +
    • +
    • +

      auth-jwt: add users and auth-jwt modules to the project

      +
    • +
    • +

      swagger: expose an endpoint with the auto-generated swagger

      +
    • +
    • +

      security: add cors and other security headers to the project.

      +
    • +
    • +

      crud: create all CRUD for an entity

      +
    • +
    • +

      entity: create an entity

      +
    • +
    +
    +
  • +
  • +

    New mailer module

    +
  • +
  • +

    New common library

    +
  • +
  • +

    Build in winston logger

    +
  • +
  • +

    Custom ClassSerializerInterceptor

    +
  • +
  • +

    Extendable base entity

    +
  • +
  • +

    New application samples

    +
  • +
+
+
+
+

41.85. CobiGen

+
+
    +
  • +

    CobiGen core new features:

    +
    +
      +
    • +

      CobiGen CLI: Update command implemented. Now you will be able to update easily all your CobiGen plug-ins and templates inside the CLI. Please take a look into the documentation for more info.

      +
      +
        +
      • +

        CobiGen CLI is now JDK11 compatible.

        +
      • +
      • +

        CobiGen CLI commandlet for devonfw-ide has been added. You can use it to setup easily your CLI and to run CobiGen related commands.

        +
      • +
      • +

        Added a version provider so that you will be able to know all the CobiGen plug-ins versions.

        +
      • +
      • +

        Added a process bar when the CLI is downloading the CobiGen plug-ins.

        +
      • +
      +
      +
    • +
    • +

      CobiGen refactoring finished: With this refactoring we have been able to decouple CobiGen completely from the target and input language. This facilitates the creation of parsers and mergers for any language. For more information please take a look here.

      +
      +
        +
      • +

        New TypeScript input reader: We are now able to parse any TypeScript class and generate code using the parsed information. We currently use TypeORM entities as a base for generation.

        +
      • +
      +
      +
    • +
    • +

      Improving CobiGen templates:

      +
      +
        +
      • +

        Updated devon4ng-NgRx templates to NgRx 8.

        +
      • +
      • +

        Generation of an Angular client using as input a TypeORM entity. This is possible thanks to the new TypeScript input reader.

        +
      • +
      • +

        .Net templates have been upgraded to .Net Core 3.0

        +
      • +
      +
      +
    • +
    • +

      CobiGen for Eclipse is now JDK11 compatible.

      +
    • +
    • +

      Fixed bugs when adapting templates and other bugs on the CobiGen core.

      +
    • +
    +
    +
  • +
+
+
+
+

41.86. devonfw shop floor

+
+ +
+
+
+

41.87. == cicdgen

+
+
    +
  • +

    Patched minor bugs

    +
  • +
+
+
+
+

41.88. sonar-devon4j-plugin

+
+

sonar-devon4j-plugin is a SonarQube plugin for architecture governance of devon4j applications. It verifies the architecture and conventions of devon4j, the Java stack of devonfw. The following changes have been incorporated: +* Plugin was renamed from sonar-devon-plugin to sonar-devon4j-plugin +* Rules/checks have been added to verify naming conventions +* New rule for proper JPA datatype mapping +* Proper tagging of rules as architecture-violation and not as bug, etc. +* Several improvements have been made to prepare the plugin to enter the SonarQube marketplace, what will happen with the very next release. +* Details can be found here: https://github.com/devonfw/sonar-devon4j-plugin/milestone/2?closed=1

+
+ +
+
+

41.89. devonfw Release notes 3.1 “Goku”

+ +
+
+

41.90. Introduction

+
+

We are proud to announce the immediate release of devonfw version 3.1 (code named “Goku” during development). This version is the first one that implements our new documentation workflow, that will allow users to get the updated documentation at any moment and not to wait for the next devonfw release.

+
+
+

This is now possible as we have established a new workflow and rules during development of our assets. The idea behind this is that all the repositories contain a documentation folder and, in any pull request, the developer must include the related documentation change. A new Travis CI configuration added to all these repositories will automatically take the changes and publish them in the wiki section of every repository and in the new devonfw-guide repository that consolidates all the changes from all the repositories. Another pipeline will take changes from this consolidated repository and generate dynamically the devonfw guide in PDF and in the next weeks in HTML for the new planned devonfw website. The following schema explains this process:

+
+
+
+documentation workflow +
+
+
+

This release includes the very first version of the new CobiGen CLI. Now using commands, you will be able to generate code the same way as you do with Eclipse. This means that you can use CobiGen on other IDEs like Visual Studio Code or IntelliJ. Please take a look at https://github.com/devonfw/cobigen/wiki/howto_Cobigen-CLI-generation for more info.

+
+
+

The devonfw-shop-floor project has got a lot of updates in order to make even easier the creation of devonfw projects with CICD pipelines that run on the Production Line, deploy on Red Hat OpenShift Clusters and in general Docker environments. See the details below.

+
+
+

This release includes the very first version of our devonfw-ide tool that will allow users to automate devonfw setup and update the development environment. This tool will become the default devonfw setup tool in future releases. For more information please visit the repository https://github.com/devonfw/devon-ide.

+
+
+

Following the same collaboration model we used in order to improve the integration of devonfw with Red Hat OpenShift and which allowed us to get the Red Hat Open Shift Primed certification, we have been working alongside with SAP HANA developers in order to support this database in the devon4j. This model was based on the contribution and review of pull requests in our reference application My Thai Star. In this case, SAP developers collaborated with us in the following two new use cases:

+
+
+
    +
  • +

    Prediction of future demand

    +
  • +
  • +

    Geospatial analysis and clustering of customers

    +
  • +
+
+ +
+

Last but not least the devonfw extension pack for VS Code has been improved with the latest extensions and helpers for this IDE. Among many others you can now use:

+
+ +
+

Also it is worth the try of the updated support for Java and Spring Boot development in VS Code. Check it out for yourself!

+
+
+

More information at https://marketplace.visualstudio.com/items?itemName=devonfw.devonfw-extension-pack. Also, you can contribute to this extension in this GitHub repository https://github.com/devonfw/devonfw-extension-pack-vscode.

+
+
+
+

41.91. Changes and new features

+ +
+
+

41.92. Devonfw dist

+
+
    +
  • +

    Eclipse 2018.12 integrated

    +
    +
      +
    • +

      CheckStyle Plugin updated.

      +
    • +
    • +

      SonarLint Plugin updated.

      +
    • +
    • +

      Git Plugin updated.

      +
    • +
    • +

      FindBugs Plugin updated.

      +
    • +
    • +

      CobiGen plugin updated.

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Visual Studio Code latest version included and pre-configured with the devonfw Platform Extension Pack.

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      @devonfw/cicdgen included.

      +
    • +
    • +

      Yarn package manager updated.

      +
    • +
    • +

      Python3 updated.

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation updated.

      +
    • +
    • +

      devon4ng-application-template for Angular 8 at workspaces/examples

      +
    • +
    • +

      devon4ng-ionic-application-template for Ionic 4 at workspace/samples

      +
    • +
    +
    +
  • +
+
+
+
+

41.93. My Thai Star Sample Application

+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 3.1.0.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      devon4j 3.1.0 integrated.

      +
    • +
    • +

      Spring Boot 2.1.6 integrated.

      +
    • +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      SAP 4/HANA prediction use case.

      +
    • +
    • +

      2FA toggleable (two factor authentication).

      +
    • +
    • +

      NgRx integration in process (PR #234).

      +
    • +
    +
    +
  • +
  • +

    devon4node

    +
    +
      +
    • +

      TypeScript 3.1.3.

      +
    • +
    • +

      Based on Nest framework.

      +
    • +
    • +

      Aligned with devon4j.

      +
    • +
    • +

      Complete backend implementation.

      +
    • +
    • +

      TypeORM integrated with SQLite database configuration.

      +
    • +
    • +

      Webpack bundler.

      +
    • +
    • +

      Nodemon runner.

      +
    • +
    • +

      Jest unit tests.

      +
    • +
    +
    +
  • +
  • +

    Mr.Checker

    +
    +
      +
    • +

      Example cases for end-to-end test.

      +
    • +
    • +

      Production line configuration.

      +
    • +
    • +

      CICD

      +
    • +
    • +

      Improved integration with Production Line

      +
    • +
    • +

      New Traefik load balancer and reverse proxy

      +
    • +
    • +

      New deployment from artifact

      +
    • +
    • +

      New CICD pipelines

      +
    • +
    • +

      New deployment pipelines

      +
    • +
    • +

      Automated creation of pipelines in Jenkins

      +
    • +
    +
    +
  • +
+
+
+
+

41.94. Documentation updates

+
+

This release addresses the new documentation workflow, being now possible to keep the documentation synced with any change. The new documentation includes the following contents:

+
+
+
    +
  • +

    Getting started

    +
  • +
  • +

    Contribution guide

    +
  • +
  • +

    Devcon

    +
  • +
  • +

    Release notes

    +
  • +
  • +

    devon4j documentation

    +
  • +
  • +

    devon4ng documentation

    +
  • +
  • +

    devon4net documentation

    +
  • +
  • +

    devonfw-shop-floor documentation

    +
  • +
  • +

    cicdgen documentation

    +
  • +
  • +

    devonfw testing with MrChecker

    +
  • +
  • +

    My Thai Star documentation

    +
  • +
+
+
+
+

41.95. devon4j

+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Added Support for Java8 up to Java11

    +
  • +
  • +

    Upgrade to Spring Boot 2.1.6.

    +
  • +
  • +

    Upgrade to Spring 5.1.8

    +
  • +
  • +

    Upgrade to JPA 2.2

    +
  • +
  • +

    Upgrade to Hibernate 5.3

    +
  • +
  • +

    Upgrade to Dozer 6.4.1 (ATTENTION: Requires Migration, use devon-ide for automatic upgrade)

    +
  • +
  • +

    Many improvements to documentation (added JDK guide, architecture-mapping, JMS, etc.)

    +
  • +
  • +

    Completed support (JSON, Beanmapping) for pagination, IdRef, and java.time

    +
  • +
  • +

    Added MasterCto

    +
  • +
  • +

    For all details see milestone.

    +
  • +
+
+
+
+

41.96. devon4ng

+
+

The following changes have been incorporated in devon4ng:

+
+
+
    +
  • +

    Angular CLI 8,

    +
  • +
  • +

    Angular 8,

    +
  • +
  • +

    Angular Material 8,

    +
  • +
  • +

    Ionic 4,

    +
  • +
  • +

    Capacitor 1.0 as Cordova replacement,

    +
  • +
  • +

    NgRx 8 support for State Management,

    +
  • +
  • +

    devon4ng Angular application template updated to Angular 8 with visual improvements and bugfixes https://github.com/devonfw/devon4ng-application-template

    +
  • +
  • +

    devon4ng Ionic application template updated and improved https://github.com/devonfw/devon4ng-ionic-application-template

    +
  • +
  • +

    New devon4ng Angular application template with state management using Angular 8 and NgRx 8 https://github.com/devonfw/devon4ng-ngrx-template

    +
  • +
  • +

    New devon4ng library https://github.com/devonfw/devon4ng-library that includes the following libraries:

    +
    +
      +
    • +

      Cache Module for Angular 7+ projects.

      +
    • +
    • +

      Authorization Module for Angular 7+ projects.

      +
    • +
    +
    +
  • +
  • +

    New use cases with documentation and samples:

    +
    +
      +
    • +

      Web Components with Angular Elements

      +
    • +
    • +

      Initial configuration with App Initializer pattern

      +
    • +
    • +

      Error Handling

      +
    • +
    • +

      PWA with Angular and Ionic

      +
    • +
    • +

      Lazy Loading

      +
    • +
    • +

      Library construction

      +
    • +
    • +

      Layout with Angular Material

      +
    • +
    • +

      Theming with Angular Material

      +
    • +
    +
    +
  • +
+
+
+
+

41.97. devon4net

+
+

The following changes have been incorporated in devon4net:

+
+
+
    +
  • +

    New circuit breaker component to communicate microservices via HTTP

    +
  • +
  • +

    Resolved the update packages issue

    +
  • +
+
+
+
+

41.98. AppSec Quick Solution Guide

+
+

This release incorporates a new Solution Guide for Application Security based on the state of the art in OWASP based application security. The purpose of this guide is to offer quick solutions for common application security issues for all applications based on devonfw. It’s often the case that we need our systems to comply to certain sets of security requirements and standards. Each of these requirements needs to be understood, addressed and converted to code or project activity. We want this guide to prevent the wheel from being reinvented over and over again and to give clear hints and solutions to common security problems.

+
+
+ +
+
+
+

41.99. CobiGen

+
+
    +
  • +

    CobiGen core new features:

    +
    +
      +
    • +

      CobiGen CLI: New command line interface for CobiGen. Using commands, you will be able to generate code the same way as you do with Eclipse. This means that you can use CobiGen on other IDEs like Visual Studio Code or IntelliJ. Please take a look into the documentation for more info.

      +
      +
        +
      • +

        Performance improves greatly in the CLI thanks to the lack of GUI.

        +
      • +
      • +

        You will be able to use path globs for selecting multiple input files.

        +
      • +
      • +

        We have implemented a search functionality so that you can easily search for increments or templates.

        +
      • +
      +
      +
    • +
    • +

      First steps taken on CobiGen refactoring: With the new refactoring we will be able to decouple CobiGen completely from the target and input language. This will facilitate the creation of parsers and mergers for any language.

      +
      +
        +
      • +

        NashornJS has been deprecated: It was used for executing JavaScript code inside JVM. With the refactoring, performance has improved on the TypeScript merger.

        +
      • +
      +
      +
    • +
    • +

      Improving CobiGen templates:

      +
      +
        +
      • +

        Removed Covalent from Angular templates as it is not compatible with Angular 8.

        +
      • +
      • +

        Added devon4ng-NgRx templates that implement reactive state management. Note: The TypeScript merger is currently being improved in order to accept NgRx. The current templates are set as overridable by default.

        +
      • +
      • +

        Test data builder templates now make use of Lambdas and Consumers.

        +
      • +
      • +

        CTOs and ETOs increments have been correctly separated.

        +
      • +
      +
      +
    • +
    • +

      TypeScript merger has been improved: Now it is possible to merge comments (like tsdoc) and enums.

      +
    • +
    • +

      OpenAPI parsing extended to read enums. Also fixed some bugs when no properties were set or when URLs were too short.

      +
    • +
    • +

      Java static and object initializers now get merged.

      +
    • +
    • +

      Fixed bugs when downloading and adapting templates.

      +
    • +
    +
    +
  • +
+
+
+
+

41.100. Devcon

+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Updated to match current devon4j

    +
  • +
  • +

    Update to download Linux distribution.

    +
  • +
  • +

    Custom modules creation improvements.

    +
  • +
  • +

    Code Migration feature added.

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+

41.101. Devonfw OSS Modules

+
+

Modules upgraded to be used in new devon4j projects:

+
+
+ +
+
+
+

41.102. devonfw shop floor

+
+
    +
  • +

    Industrialization oriented to configure the provisioning environment provided by Production Line and deploy applications on an OpenShift cluster.

    +
  • +
  • +

    Added Jenkinsfiles to configure automatically OpenShift environments to deploy devonfw applications.

    +
  • +
  • +

    Industrialization to start new projects and configure them with CICD.

    +
  • +
  • +

    Upgrade the documentation with getting started guide to configure CICD in any devonfw project and deploy it.

    +
  • +
  • +

    Added new tool cicdgen to generate CICD code/files.

    +
  • +
+
+
+
+

41.103. == cicdgen

+
+

cicdgen is a devonfw tool to generate all code/files related to CICD in your project. It’s based on angular schematics and it has its own CLI. +More information here.

+
+
+
    +
  • +

    CICD configuration for devon4j, devon4ng and devon4node projects

    +
  • +
  • +

    Option to deploy devonfw projects with Docker

    +
  • +
  • +

    Option to deploy devonfw projects with OpenShift

    +
  • +
+
+
+
+

41.104. Devonfw Testing

+ +
+
+

41.105. == Mr.Checker

+
+

The Mr.Checker Test Framework is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Mr.Checker updates and improvements:

+
+
+
    +
  • +

    Examples available under embedded project “MrChecker-App-Under-Test” and in project wiki: https://github.com/devonfw/devonfw-testing/wiki

    +
  • +
  • +

    How to install:

    + +
  • +
  • +

    Release Note:

    +
    +
      +
    • +

      module selenium - 3.8.2.1:

      +
      +
        +
      • +

        possibility to define version of driver in properties.file

        +
      • +
      • +

        automatic driver download if the version is not specified

        +
      • +
      • +

        possibility to run with different browser options

        +
      • +
      • +

        module webAPI – 1.2.1:

        +
      • +
      • +

        possibility to connect to the remote WireMock server

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+ +
+
+

41.106. devonfw Release notes 3.0 “Fry”

+ +
+
+

41.107. Introduction

+
+

We are proud to announce the immediate release of devonfw version 3.0 (code named “Fry” during development). This version is the consolidation of Open Source, focused on the major namespace change ever in the platform, removing the OASP references and adopting the new devonfw names for each technical stack or framework.

+
+
+

The new stack names are the following:

+
+
+
    +
  • +

    devon4j, former devon4j, is the new name for Java.

    +
  • +
  • +

    devon4ng, former devon4ng, is the new one for Angular.

    +
  • +
  • +

    devon4net, is the new .NET stack.

    +
  • +
  • +

    devon4X, is the new stack for Xamarin development.

    +
  • +
  • +

    devon4node, is the new devonfw incubator for node.js.

    +
  • +
+
+
+

The new devon4j version was created directly from the latest devon4j version (3.0.0). Hence it brings all the features and values that devon4j offered. However, the namespace migration was used to do some housekeeping and remove deprecated code as well as reduce dependencies. Therefore your data-access layer will no longer have to depend on any third party except for devon4j as well as of course the JPA. We also have improved the application template that now comes with a modern JSON logging ready for docker and logstash based environments.

+
+
+

To help you upgrading we introduced a migration feature in devcon. This can automatically migrate your code from devon4j (even older versions starting from 2.4.0) to the latest version of devon4j. There might be some small manual changes left to do but 90% of the migration will be done automatically for you.

+
+
+

Besides, the first version of the devonfw plugin for SonarQube has been released. It extends SonarQube with the ability to validate your code according to the devon4j architecture. More details at https://github.com/devonfw/sonar-devon-plugin.

+
+
+

This is the first release that integrates the new devonfw .NET framework, called devon4net, and Xamarin for mobile native development, devon4X. devon4NET and devon4X are the Capgemini standard frameworks for .NET and Xamarin software development. With the two new family members devonfw provides guidance and acceleration for the major software development platforms in our industry. Their interoperability provides you the assurance your multichannel solution will be consistent across web and mobile channels.

+
+
+

“Fry” release contains lots of improvements in our Mr.Checker E2E Testing Framework, including a complete E2E sample inside our reference application My Thai Star. Besides Mr.Checker, we include as an incubator Testar, a test tool (and framework) to test applications at the GUI level whose objective is to solve part of the maintenance problem affecting tests by automatically generating test cases based on a structure that is automatically derived from the GUI. Testar is not included to replace Mr.Checker but rather to provide development teams with a series of interesting options which go beyond what Mr.Checker already provides.

+
+
+

Apart from Mr.Checker, engagements can now use Testar as an extra option for testing. This is a tool that enables the automated system testing of desktop, web and mobile applications at the GUI level. Testar has been added as an incubator to the platform awaiting further development during 2019.

+
+
+

The new incubator for node.js, called devon4node, has been included and implemented in several internal projects. This incubator is based on the Nest framework https://www.nestjs.com/. Nest is a framework for building efficient, scalable Node.js server-side applications. It uses progressive JavaScript, is built with TypeScript (preserves compatibility with pure JavaScript) and combines elements of OOP (Object Oriented Programming), FP (Functional Programming), and FRP (Functional Reactive Programming). Under the hood, Nest makes use of Express, but also provides compatibility with a wide range of other libraries (e.g. Fastify). This allows for easy use of the myriad third-party plugins which are available.

+
+
+

In order to facilitate the utilization of Microsoft Visual Studio Code in devonfw, we have developed and included the new devonfw Platform Extension Pack with lots of features to develop and test applications with this IDE in languages and frameworks such as TypeScript, JavaScript, .NET, Java, Rust, C++ and many more. More information at https://marketplace.visualstudio.com/items?itemName=devonfw.devonfw-extension-pack. Also, you can contribute to this extension in this GitHub repository https://github.com/devonfw/devonfw-extension-pack-vscode.

+
+
+

There is a whole range of new features and improvements which can be seen in that light. The My Thai Star sample app has now been upgraded to devon4j and devon4ng, a new devon4node backend implementation has been included that is seamless interchangeable, an E2E MrChecker sample project, CICD and deployment scripts and lots of bugs have been fixed.

+
+
+

Last but not least, the projects wikis and the devonfw Guide has once again been updated accordingly before the big refactor that will be addressed in the following release in 2019.

+
+
+
+

41.108. Changes and new features

+ +
+
+

41.109. Devonfw dist

+
+
    +
  • +

    Eclipse 2018.9 integrated

    +
    +
      +
    • +

      CheckStyle Plugin updated.

      +
    • +
    • +

      SonarLint Plugin updated.

      +
    • +
    • +

      Git Plugin updated.

      +
    • +
    • +

      FindBugs Plugin updated.

      +
    • +
    • +

      CobiGen plugin updated.

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Visual Studio Code latest version included and pre-configured with the devonfw Platform Extension Pack.

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      Yarn package manager updated.

      +
    • +
    • +

      Python3 updated.

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation updated.

      +
    • +
    • +

      devon4ng-application-template for Angular 7 at workspaces/examples

      +
    • +
    • +

      devon4ng-ionic-application-template for Ionic 3.20 at workspace/samples

      +
    • +
    +
    +
  • +
+
+
+
+

41.110. My Thai Star Sample Application

+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 1.12.2.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      devon4j 3.0.0 integrated.

      +
    • +
    • +

      Spring Boot 2.0.4 integrated.

      +
    • +
    • +

      Spring Data integration.

      +
    • +
    • +

      New pagination and search system.

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      Client devon4ng updated to Angular 7.

      +
    • +
    • +

      Angular Material and Covalent UI frameworks updated.

      +
    • +
    • +

      Electron framework integrated.

      +
    • +
    +
    +
  • +
  • +

    devon4node

    +
    +
      +
    • +

      TypeScript 3.1.3.

      +
    • +
    • +

      Based on Nest framework.

      +
    • +
    • +

      Aligned with devon4j.

      +
    • +
    • +

      Complete backend implementation.

      +
    • +
    • +

      TypeORM integrated with SQLite database configuration.

      +
    • +
    • +

      Webpack bundler.

      +
    • +
    • +

      Nodemon runner.

      +
    • +
    • +

      Jest unit tests.

      +
    • +
    +
    +
  • +
  • +

    Mr.Checker

    +
    +
      +
    • +

      Example cases for end-to-end test.

      +
    • +
    • +

      Production line configuration.

      +
    • +
    • +

      CICD

      +
    • +
    • +

      Improved integration with Production Line

      +
    • +
    • +

      New deployment from artifact

      +
    • +
    • +

      New CICD pipelines

      +
    • +
    • +

      New deployment pipelines

      +
    • +
    • +

      Automated creation of pipelines in Jenkins

      +
    • +
    +
    +
  • +
+
+
+
+

41.111. Documentation updates

+
+

The following contents in the devonfw guide have been updated:

+
+
+
    +
  • +

    Upgrade of all the new devonfw named assets.

    +
    +
      +
    • +

      devon4j

      +
    • +
    • +

      devon4ng

      +
    • +
    • +

      Mr.Checker

      +
    • +
    +
    +
  • +
  • +

    Electron integration cookbook.

    +
  • +
  • +

    Updated cookbook about Swagger.

    +
  • +
  • +

    Removed deprecated entries.

    +
  • +
+
+
+

Apart from this the documentation has been reviewed and some typos and errors have been fixed.

+
+
+

The current development of the guide has been moved to https://github.com/devonfw-forge/devon-guide/wiki in order to be available as the rest of OSS assets.

+
+
+
+

41.112. devon4j

+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Spring Boot 2.0.4 Integrated.

    +
  • +
  • +

    Spring Data layer Integrated.

    +
  • +
  • +

    Decouple mmm.util.*

    +
  • +
  • +

    Removed depreciated restaurant sample.

    +
  • +
  • +

    Updated Pagination support for Spring Data

    +
  • +
  • +

    Add support for hana as dbType.

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+

41.113. devon4ng

+
+

The following changes have been incorporated in devon4ng:

+
+
+ +
+
+
+

41.114. devon4net

+
+

Some of the highlights of devon4net 1.0 are:

+
+
+
    +
  • +

    External configuration file for each environment.

    +
  • +
  • +

    .NET Core 2.1.X working solution (Latest 2.1.402).

    +
  • +
  • +

    Packages and solution templates published on nuget.org.

    +
  • +
  • +

    Full components customization by config file.

    +
  • +
  • +

    Docker ready (My Thai Star sample fully working on docker).

    +
  • +
  • +

    Port specification by configuration.

    +
  • +
  • +

    Dependency injection by Microsoft .NET Core.

    +
  • +
  • +

    Automapper support.

    +
  • +
  • +

    Entity framework ORM (Unit of work, async methods).

    +
  • +
  • +

    .NET Standard library 2.0 ready.

    +
  • +
  • +

    Multi-platform support: Windows, Linux, Mac.

    +
  • +
  • +

    Samples: My Thai Star back-end, Google API integration, Azure login, AOP with Castle.

    +
  • +
  • +

    Documentation site.

    +
  • +
  • +

    SPA page support.

    +
  • +
+
+
+

And included the following features:

+
+
+
    +
  • +

    Logging:

    +
    +
      +
    • +

      Text File.

      +
    • +
    • +

      Sqlite database support.

      +
    • +
    • +

      Serilog Seq Server support.

      +
    • +
    • +

      Graylog integration ready through TCP/UDP/HTTP protocols.

      +
    • +
    • +

      API Call params interception (simple and compose objects).

      +
    • +
    • +

      API error exception management.

      +
    • +
    +
    +
  • +
  • +

    Swagger:

    +
    +
      +
    • +

      Swagger auto generating client from comments and annotations on controller classes.

      +
    • +
    • +

      Full swagger client customization (Version, Title, Description, Terms, License, Json endpoint definition).

      +
    • +
    +
    +
  • +
  • +

    JWT:

    +
    +
      +
    • +

      Issuer, audience, token expiration customization by external file configuration.

      +
    • +
    • +

      Token generation via certificate.

      +
    • +
    • +

      MVC inherited classes to access JWT user properties.

      +
    • +
    • +

      API method security access based on JWT Claims.

      +
    • +
    +
    +
  • +
  • +

    CORS:

    +
    +
      +
    • +

      Simple CORS definition ready.

      +
    • +
    • +

      Multiple CORS domain origin definition with specific headers and verbs.

      +
    • +
    +
    +
  • +
  • +

    Headers:

    +
    +
      +
    • +

      Automatic header injection with middleware.

      +
    • +
    • +

      Supported header definitions: AccessControlExposeHeader, StrictTransportSecurityHeader, XFrameOptionsHeader, XssProtectionHeader, XContentTypeOptionsHeader, ContentSecurityPolicyHeader, PermittedCrossDomainPoliciesHeader, ReferrerPolicyHeader.

      +
    • +
    +
    +
  • +
  • +

    Reporting server:

    +
    +
      +
    • +

      Partial implementation of reporting server based on My-FyiReporting (now runs on linux container).

      +
    • +
    +
    +
  • +
  • +

    Testing:

    +
    +
      +
    • +

      Integration test template with sqlite support.

      +
    • +
    • +

      Unit test template.

      +
    • +
    • +

      Moq, xunit frameworks integrated.

      +
    • +
    +
    +
  • +
+
+
+
+

41.115. devon4X

+
+

Some of the highlights of the new devonfw Xamarin framework are:

+
+
+
    +
  • +

    Based on Excalibur framework by Hans Harts (https://github.com/Xciles/Excalibur).

    +
  • +
  • +

    Updated to latest MVVMCross 6 version.

    +
  • +
  • +

    My Thai Star Excalibur forms sample.

    +
  • +
  • +

    Xamarin Forms template available on nuget.org.

    +
  • +
+
+
+
+

41.116. AppSec Quick Solution Guide

+
+

This release incorporates a new Solution Guide for Application Security based on the state of the art in OWASP based application security. The purpose of this guide is to offer quick solutions for common application security issues for all applications based on devonfw. It’s often the case that we need our systems to comply to certain sets of security requirements and standards. Each of these requirements needs to be understood, addressed and converted to code or project activity. We want this guide to prevent the wheel from being reinvented over and over again and to give clear hints and solutions to common security problems.

+
+
+ +
+
+
+

41.117. CobiGen

+
+
    +
  • +

    CobiGen core new features:

    +
    +
      +
    • +

      CobiGen_Templates will not need to be imported into the workspace anymore. However, If you want to adapt them, you can still click on a button that automatically imports them for you.

      +
    • +
    • +

      CobiGen_Templates can be updated by one-click whenever the user wants to have the latest version.

      +
    • +
    • +

      Added the possibility to reference external increments on configuration level. This is used for reducing the number of duplicated templates.

      +
    • +
    +
    +
  • +
  • +

    CobiGen_Templates project and docs updated:

    +
    +
      +
    • +

      Spring standard has been followed better than ever.

      +
    • +
    • +

      Interface templates get automatically relocated to the api project. Needed for following the new devon4j standard.

      +
    • +
    +
    +
  • +
  • +

    CobiGen Angular:

    +
    +
      +
    • +

      Angular 7 generation improved based on the updated application template.

      +
    • +
    • +

      Pagination changed to fit Spring standard.

      +
    • +
    +
    +
  • +
  • +

    CobiGen Ionic: Pagination changed to fit Spring standard.

    +
  • +
  • +

    CobiGen OpenAPI plugin released with multiple bug-fixes and other functionalities like:

    +
    +
      +
    • +

      Response and parameter types are parsed properly when they are a reference to an entity.

      +
    • +
    • +

      Parameters defined on the body of a request are being read correctly.

      +
    • +
    +
    +
  • +
+
+
+
+

41.118. Devcon

+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Updated to match current devon4j

    +
  • +
  • +

    Update to download Linux distribution.

    +
  • +
  • +

    Custom modules creation improvements.

    +
  • +
  • +

    Code Migration feature added

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+

41.119. Devonfw OSS Modules

+
+

Modules upgraded to be used in new devon4j projects:

+
+
+
    +
  • +

    Reporting module

    +
  • +
  • +

    WinAuth AD Module

    +
  • +
  • +

    WinAuth SSO Module

    +
  • +
  • +

    I18n Module

    +
  • +
  • +

    Async Module

    +
  • +
  • +

    Integration Module

    +
  • +
  • +

    Microservice Module

    +
  • +
  • +

    Compose for Redis Module

    +
  • +
+
+ +
+
+

41.120. Devonfw Testing

+ +
+
+

41.121. == Mr.Checker

+
+

The Mr.Checker Test Framework is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions. Mr.Checker updates and improvements:

+
+
+ +
+
+
+

41.122. == Testar

+
+

We have added Test*, Testar, as an incubator to the available test tools within devonfw. This ground-breaking tool is being developed by the Technical University of Valencia (UPV). In 2019 Capgemini will co-develop Testar with the UPV.

+
+
+

Testar is a tool that enables the automated system testing of desktop, web and mobile applications at the GUI level.

+
+
+

With Testar, you can start testing immediately. It automatically generates and executes test sequences based on a structure that is automatically derived from the UI through the accessibility API. Testar can detect the violation of general-purpose system requirements and you can use plugins to customize your tests.

+
+
+

You do not need test scripts and maintenance of it. The tests are random and are generated and executed automatically.

+
+
+

If you need to do directed tests you can create scripts to test specific requirements of your application.

+
+
+

Testar is included in the devonfw distro or can be downloaded from https://testar.org/download/.

+
+
+

The Github repository can be found at o: https://github.com/TESTARtool/TESTAR.

+
+ +
+
+

41.123. devonfw Release notes 2.4 “EVE”

+ +
+
+

41.124. Introduction

+
+

We are proud to announce the immediate release of devonfw version 2.4 (code named “EVE” during development). This version is the first one that fully embraces Open Source, including components like the documentation assets and CobiGen. Most of the IP (Intellectual Property or proprietary) part of devonfw are now published under the Apache License version 2.0 (with the documentation under the Creative Commons License (Attribution-NoDerivatives)). This includes the GitHub repositories where all the code and documentation is located. All of these repositories are now open for public viewing as well.

+
+
+

“EVE” contains a slew of new features but in essence it is already driven by what we expect to be the core focus of 2018: strengthening the platform and improving quality.

+
+
+

This release is also fully focused on deepening the platform rather than expanding it. That is to say: we have worked on improving existing features rather than adding new ones and strengthen the qualitative aspects of the software development life cycle, i.e. security, testing, infrastructure (CI, provisioning) etc.

+
+
+

“EVE” already is very much an example of this. This release contains the Allure Test Framework (included as an incubator in version 2.3) update called MrChecker Test Framework. MrChecker is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+

Another incubator being updated is the devonfw Shop Floor which intended to be a compilation of DevOps experiences from the devonfw perspective. A new part of the release is the new Solution Guide for Application Security based on the state of the art in OWASP based application security.

+
+
+

There is a whole range of new features and improvements which can be seen in that light. devon4j 2.6 changes and improves the package structure of the core Java framework. The My Thai Star sample app has now been upgraded to Angular 6, lots of bugs have been fixed and the devonfw Guide has once again been improved.

+
+
+

Last but not least, this release contains the formal publication of the devonfw Methodology or The Accelerated Solution Design - an Industry Standards based solution design and specification (documentation) methodology for Agile (and less-than-agile) projects.

+
+
+
+

41.125. Changes and new features

+ +
+
+

41.126. devonfw 2.4 is Open Source

+
+

This version is the first release of devonfw that fully embraces Open Source, including components like the documentation assets and CobiGen. This is done in response to intensive market pressure and demands from the MU´s (Public Sector France, Netherlands)

+
+
+

Most of the IP (Intellectual Property or proprietary) part of devonfw are now published under the Apache License version 2.0 (with the documentation under the Creative Commons License (Attribution-NoDerivatives)).

+
+
+

So you can now use the devonfw distribution (the "zip" file), CobiGen, the devonfw modules and all other components without any worry to expose the client unwittingly to Capgemini IP.

+
+
+

Note: there are still some components which are IP and are not published under an OSS license. The class room trainings, the Sencha components and some CobiGen templates. But these are not includes in the distribution nor documentation and are now completely maintained separately.

+
+
+
+

41.127. devonfw dist

+
+
    +
  • +

    Eclipse Oxygen integrated

    +
    +
      +
    • +

      CheckStyle Plugin updated.

      +
    • +
    • +

      SonarLint Plugin updated.

      +
    • +
    • +

      Git Plugin updated.

      +
    • +
    • +

      FindBugs Plugin updated.

      +
    • +
    • +

      CobiGen plugin updated.

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Visual Studio Code latest version included and pre-configured with https://devonfw.com/website/pages/docs/cli.adoc.html#vscode.adoc

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      Yarn package manager updated.

      +
    • +
    • +

      Python3 updated.

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation updated.

      +
    • +
    • +

      devon4ng-application-template for Angular 6 at workspaces/examples

      +
    • +
    +
    +
  • +
+
+
+
+

41.128. My Thai Star Sample Application

+
+

The new release of My Thai Star has focused on the following improvements:

+
+
+
    +
  • +

    Release 1.6.0.

    +
  • +
  • +

    Travis CI integration with Docker. Now we get a valuable feedback of the current status and when collaborators make pull requests.

    +
  • +
  • +

    Docker compose deployment.

    +
  • +
  • +

    devon4j:

    +
    +
      +
    • +

      Flyway upgrade from 3.2.1 to 4.2.0

      +
    • +
    • +

      Bug fixes.

      +
    • +
    +
    +
  • +
  • +

    devon4ng:

    +
    +
      +
    • +

      Client devon4ng updated to Angular 6.

      +
    • +
    • +

      Frontend translated into 9 languages.

      +
    • +
    • +

      Improved mobile and tablet views.

      +
    • +
    • +

      Routing fade animations.

      +
    • +
    • +

      Compodoc included to generate dynamically frontend documentation.

      +
    • +
    +
    +
  • +
+
+
+
+

41.129. Documentation updates

+
+

The following contents in the devonfw guide have been updated:

+
+
+
    +
  • +

    devonfw OSS modules documentation.

    +
  • +
  • +

    Creating a new devon4j application.

    +
  • +
  • +

    How to update Angular CLI in devonfw.

    +
  • +
  • +

    Include Angular i18n.

    +
  • +
+
+
+

Apart from this the documentation has been reviewed and some typos and errors have been fixed.

+
+
+

The current development of the guide has been moved to https://github.com/devonfw/devonfw-guide/wiki in order to be available as the rest of OSS assets.

+
+
+
+

41.130. devon4j

+
+

The following changes have been incorporated in devon4j:

+
+
+
    +
  • +

    Integrate batch with archetype.

    +
  • +
  • +

    Application module structure and dependencies improved.

    +
  • +
  • +

    Issues with Application Template fixed.

    +
  • +
  • +

    Solved issue where Eclipse maven template devon4j-template-server version 2.4.0 produced pom with missing dependency spring-boot-starter-jdbc.

    +
  • +
  • +

    Solved datasource issue with project archetype 2.4.0.

    +
  • +
  • +

    Decouple archetype from sample (restaurant).

    +
  • +
  • +

    Upgrade to Flyway 4.

    +
  • +
  • +

    Fix for issue with Java 1.8 and QueryDSL #599.

    +
  • +
+
+
+
+

41.131. devon4ng

+
+

The following changes have been incorporated in devon4ng:

+
+
+ +
+
+
+

41.132. AppSec Quick Solution Guide

+
+

This release incorporates a new Solution Guide for Application Security based on the state of the art in OWASP based application security. The purpose of this guide is to offer quick solutions for common application security issues for all applications based on devonfw. It’s often the case that we need our systems to comply to certain sets of security requirements and standards. Each of these requirements needs to be understood, addressed and converted to code or project activity. We want this guide to prevent the wheel from being reinvented over and over again and to give clear hints and solutions to common security problems.

+
+
+ +
+
+
+

41.133. CobiGen

+
+
    +
  • +

    CobiGen_Templates project and docs updated.

    +
  • +
  • +

    CobiGen Angular 6 generation improved based on the updated application template

    +
  • +
  • +

    CobiGen Ionic CRUD App generation based on Ionic application template. Although a first version was already implemented, it has been deeply improved:

    +
    +
      +
    • +

      Changed the code structure to comply with Ionic standards.

      +
    • +
    • +

      Added pagination.

      +
    • +
    • +

      Pull-to-refresh, swipe and attributes header implemented.

      +
    • +
    • +

      Code documented and JSDoc enabled (similar to Javadoc)

      +
    • +
    +
    +
  • +
  • +

    CobiGen TSPlugin Interface Merge support.

    +
  • +
  • +

    CobiGen XML plugin comes out with new cool features:

    +
    +
      +
    • +

      Enabled the use of XPath within variable assignment. You can now retrieve almost any data from an XML file and store it on a variable for further processing on the templates. Documented here.

      +
    • +
    • +

      Able to generate multiple output files per XML input file.

      +
    • +
    • +

      Generating code from UML diagrams. XMI files (standard XML for UML) can be now read and processed. This means that you can develop templates and generate code from an XMI like class diagrams.

      +
    • +
    +
    +
  • +
  • +

    CobiGen OpenAPI plugin released with multiple bug-fixes and other functionalities like:

    +
    +
      +
    • +

      Assigning global and local variables is now possible. Therefore you can set any string for further processing on the templates. For instance, changing the root package name of the generated files. Documented here.

      +
    • +
    • +

      Enabled having a class with more than one relationship to another class (more than one property of the same type).

      +
    • +
    +
    +
  • +
  • +

    CobiGen Text merger plugin has been extended and now it is able to merge text blocks. This means, for example, that the generation and merging of adoc documentation is possible. Documented here.

    +
  • +
+
+
+
+

41.134. Devcon

+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Now Devcon is OSS, with public repository at https://github.com/devonfw/devcon

    +
  • +
  • +

    Updated to match current devon4j

    +
  • +
  • +

    Update to download Linux distribution.

    +
  • +
  • +

    Custom modules creation improvements.

    +
  • +
  • +

    Bugfixes.

    +
  • +
+
+
+
+

41.135. devonfw OSS Modules

+
+
    +
  • +

    Existing devonfw IP modules have been moved to OSS.

    +
    +
      +
    • +

      They can now be accessed in any devon4j project as optional dependencies from Maven Central.

      +
    • +
    • +

      The repository now has public access https://github.com/devonfw/devon

      +
    • +
    +
    +
  • +
  • +

    Starters available for modules:

    +
    +
      +
    • +

      Reporting module

      +
    • +
    • +

      WinAuth AD Module

      +
    • +
    • +

      WinAuth SSO Module

      +
    • +
    • +

      I18n Module

      +
    • +
    • +

      Async Module

      +
    • +
    • +

      Integration Module

      +
    • +
    • +

      Microservice Module

      +
    • +
    • +

      Compose for Redis Module

      +
    • +
    +
    +
  • +
+
+ +
+
+

41.136. devonfw Shop Floor

+
+
    +
  • +

    devonfw Shop Floor 4 Docker

    +
    +
      +
    • +

      Docker-based CICD environment

      +
      +
        +
      • +

        docker-compose.yml (installation file)

        +
      • +
      • +

        dsf4docker.sh (installation script)

        +
      • +
      • +

        Service Integration (documentation in Wiki)

        +
      • +
      +
      +
    • +
    • +

      devonfw projects build and deployment with Docker

      +
      +
        +
      • +

        Dockerfiles (multi-stage building)

        +
        +
          +
        • +

          Build artifact (NodeJS for Angular and Maven for Java)

          +
        • +
        • +

          Deploy built artifact (NGINX for Angular and Tomcat for Java)

          +
        • +
        • +

          NGINX Reverse-Proxy to redirect traffic between both Angular client and Java server containers.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
  • +

    devonfw Shop Floor 4 OpenShift

    +
    +
      +
    • +

      devonfw projects deployment in OpenShift cluster

      +
      +
        +
      • +

        s2i images

        +
      • +
      • +

        OpenShift templates

        +
      • +
      • +

        Video showcase (OpenShift Origin 3.6)

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

This incubator is intended to be a compilation of DevOps experiences from the devonfw perspective. “How we use our devonfw projects in DevOps environments”. Integration with the Production Line, creation and service integration of a Docker-based CI environment and deploying devonfw applications in an OpenShift Origin cluster using devonfw templates. +See: https://github.com/devonfw/devonfw-shop-floor

+
+
+
+

41.137. devonfw Testing

+
+

The MrChecker Test Framework is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security and in coming future native mobile apps, and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+
    +
  • +

    Examples available under embedded project “MrChecker-App-Under-Test” and in project wiki: https://github.com/devonfw/devonfw-testing/wiki

    +
  • +
  • +

    How to install:

    + +
  • +
  • +

    Release Note:

    +
    +
      +
    • +

      module core - 4.12.0.8:

      +
      +
        +
      • +

        fixes on getting Environment values

        +
      • +
      • +

        top notch example how to keep vulnerable data in repo , like passwords

        +
      • +
      +
      +
    • +
    • +

      module selenium - 3.8.1.8:

      +
      +
        +
      • +

        browser driver auto downloader

        +
      • +
      • +

        list of out off the box examples to use in any web page

        +
      • +
      +
      +
    • +
    • +

      module webAPI - ver. 1.0.2 :

      +
      +
        +
      • +

        api service virtualization with REST and SOAP examples

        +
      • +
      • +

        api service virtualization with dynamic arguments

        +
      • +
      • +

        REST working test examples with page object model

        +
      • +
      +
      +
    • +
    • +

      module security - 1.0.1 (security tests against My Thai Start)

      +
    • +
    • +

      module DevOps :

      +
      +
        +
      • +

        dockerfile for Test environment execution

        +
      • +
      • +

        CI + CD as Jenkinsfile code

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+
+

41.138. devonfw methodology: Accelerated Solution Design

+
+

One of the prime challenges in Distributed Agile Delivery is the maintenance of a common understanding and unity of intent among all participants in the process of creating a product. That is: how can you guarantee that different parties in the client, different providers, all in different locations and time zones during a particular period of time actually understand the requirements of the client, the proposed solution space and the state of implementation.

+
+
+

We offer the Accelerated Solution Design as a possible answer to these challenges. The ASD is carefully designed to be a practical guideline that fosters and ensures the collaboration and communication among all team members.

+
+
+

The Accelerated Solution Design is:

+
+
+
    +
  • +

    A practical guideline rather than a “methodology”

    +
  • +
  • +

    Based on industry standards rather than proprietary methods

    +
  • +
  • +

    Consisting of an evolving, “living”, document set rather than a static, fixed document

    +
  • +
  • +

    Encapsulating the business requirements, functional definitions as well as Architecture design

    +
  • +
  • +

    Based on the intersection of Lean, Agile, DDD and User Story Mapping

    +
  • +
+
+
+

And further it is based on the essential belief or paradigm that ASD should be:

+
+
+
    +
  • +

    Focused on the design (definition) of the “externally observable behavior of a system”

    +
  • +
  • +

    Promoting communication and collaboration between team members

    +
  • +
  • +

    Guided by prototypes

    +
  • +
+
+ + +
+
+

41.139. devonfw Release notes 2.3 "Dash"

+ +
+
+

41.140. Release: improving and strengthening the Platform

+
+

We are proud to announce the immediate release of devonfw version 2.3 (code named “Dash” during development). This release comes with a bit of a delay as we decided to wait for the publication of devon4j 2.5. “Dash” contains a slew of new features but in essence it is already driven by what we expect to be the core focus of 2018: strengthening the platform and improving quality.

+
+
+

After one year and a half of rapid expansion, we expect the next release(s) of the devonfw 2.x series to be fully focused on deepening the platform rather than expanding it. That is to say: we should work on improving existing features rather than adding new ones and strengthen the qualitative aspects of the software development life cycle, i.e. testing, infrastructure (CI, provisioning) etc.

+
+
+

“Dash” already is very much an example of this. This release contains the Allure Test Framework as an incubator. This is an automated testing framework for functional testing of web applications. Another incubator is the devonfw Shop Floor which intended to be a compilation of DevOps experiences from the devonfw perspective. And based on this devonfw has been OpenShift Primed (“certified”) by Red Hat.

+
+
+

There is a whole range of new features and improvements which can be seen in that light. devon4j 2.5 changes and improves the package structure of the core Java framework. The My Thai Star sample app has now been fully integrated in the different frameworks and the devonfw Guide has once again been significantly expanded and improved.

+
+
+
+

41.141. An industrialized platform for the ADcenter

+
+

Although less visible to the overall devonfw community, an important driving force was (meaning that lots of work has been done in the context of) the creation of the ADcenter concept towards the end of 2017. Based on a radical transformation of on/near/offshore software delivery, the focus of the ADcenters is to deliver agile & accelerated “Rightshore” services with an emphasis on:

+
+
+
    +
  • +

    Delivering Business Value and optimized User Experience

    +
  • +
  • +

    Innovative software development with state of the art technology

    +
  • +
  • +

    Highly automated devops; resulting in lower costs & shorter time-to-market

    +
  • +
+
+
+

The first two ADcenters, in Valencia (Spain) and Bangalore (India), are already servicing clients all over Europe - Germany, France, Switzerland and the Netherlands - while ADcenter aligned production teams are currently working for Capgemini UK as well (through Spain).Through the ADcenter, Capgemini establishes industrialized innovation; designed for & with the user. The availability of platforms for industrialized software delivery like devonfw and the Production Line has allowed us to train and make available over a 150 people in very short time.

+
+
+

The creation of the ADcenter is such a short time is visible proof that we´re getting closer to a situation where devonfw and Production Line are turning into the default development platform for APPS2, thereby standardizing all aspects of the software development life cycle: from training and design, architecture, devops and development, all the way up to QA and deployment.

+
+
+
+

41.142. Changes and new features

+ +
+
+

41.143. devonfw dist

+
+

The devonfw dist, or distribution, i.e. the central zip file which contains the main working environment for the devonfw developer, has been significantly enhanced. New features include:

+
+
+
    +
  • +

    Eclipse Oxygen integrated

    +
    +
      +
    • +

      CheckStyle Plugin installed and configured

      +
    • +
    • +

      SonarLint Plugin installed and configured

      +
    • +
    • +

      Git Plugin installed

      +
    • +
    • +

      FindBugs replaced by SpotBugs and configured

      +
    • +
    • +

      Tomcat8 specific Oxygen configuration

      +
    • +
    • +

      CobiGen Plugin installed

      +
    • +
    +
    +
  • +
  • +

    Other Software

    +
    +
      +
    • +

      Cmder integrated (when console.bat launched)

      +
    • +
    • +

      Visual Studio Code latest version included and pre-configured with https://github.com/devonfw/extension-pack-vscode

      +
    • +
    • +

      Ant updated to latest.

      +
    • +
    • +

      Maven updated to latest.

      +
    • +
    • +

      Java updated to latest.

      +
    • +
    • +

      Nodejs LTS updated to latest.

      +
    • +
    • +

      @angular/cli included.

      +
    • +
    • +

      Yarn package manager included.

      +
    • +
    • +

      Python3 integrated

      +
    • +
    • +

      Spyder3 IDE integrated in python3 installation

      +
    • +
    • +

      devon4ng-application-template for Angular5 at workspaces/examples

      +
    • +
    • +

      Devon4sencha starter templates updated

      +
    • +
    +
    +
  • +
+
+
+
+

41.144. devon4j 2.5

+ +
+
+

41.145. == Support for JAX-RS & JAX-WS clients

+
+

With the aim to enhance the ease in consuming RESTful and SOAP web services, JAX-RS and JAX-WS clients have been introduced. They enable developers to concisely and efficiently implement portable client-side solutions that leverage existing and well-established client-side HTTP connector implementations. Furthermore, the getting started time for consuming web services has been considerably reduced with the default configuration out-of-the-box which can be tweaked as per individual project requirements.

+
+ +
+
+

41.146. == Separate security logs for devon4j log component

+
+

Based on OWASP(Open Web Application Security Project), devon4j aims to give developers more control and flexibility with the logging of security events and tracking of forensic information. Furthermore, it helps classifying the information in log messages and applying masking when necessary. It provides powerful security features while based on set of logging APIs developers are already familiar with over a decade of their experience with Log4J and its successors.

+
+
+
+

41.147. == Support for Microservices

+
+

Integration of an devon4j application to a Microservices environment can now be leveraged with this release of devon4j. Introduction of service clients for RESTful and SOAP web services based on Java EE give developers agility and ease to access microservices in the Devon framework. It significantly cuts down the efforts on part of developers around boilerplate code and stresses more focus on the business code improving overall efficiency and quality of deliverables.

+
+
+
+

41.148. Cobigen

+
+

A new version of Cobigen has been included. New features include:

+
+
+ +
+
+
+

41.149. My Thai Star Sample Application

+
+

From this release on the My Thai Star application has been fully integrated in the different frameworks in the platform. Further more, a more modularized approach has been followed in the current release of My Thai star application to decouple client from implementation details. Which provides better encapsulation of code and dependency management for API and implementation classes. This has been achieved with creation of a new “API” module that contain interfaces for REST services and corresponding Request/Response objects. With existing “Core” module being dependent on “API” module. To read further you can follow the link https://github.com/devonfw/my-thai-star/wiki/java-design#basic-architecture-details

+
+
+

Furthermore: an email and Twitter micro service were integrated in my-thai-star. This is just for demonstration purposes. A full micro service framework is already part of devon4j 2.5.0

+
+
+
+

41.150. Documentation refactoring

+
+

The complete devonfw guide is restructured and refactored. Getting started guides are added for easy start with devonfw.Integration of the new Tutorial with the existing devonfw Guide whereby existing chapters of the previous tutorial were converted to Cookbook chapters. Asciidoctor is used for devonfw guide PDF generation. +See: https://github.com/devonfw/devonfw-guide/wiki

+
+
+
+

41.151. devon4ng

+
+

The following changes have been incorporated in devon4ng:

+
+
+
    +
  • +

    Angular CLI 1.6.0,

    +
  • +
  • +

    Angular 5.1,

    +
  • +
  • +

    Angular Material 5 and Covalent 1.0.0 RC1,

    +
  • +
  • +

    PWA enabled,

    +
  • +
  • +

    Core and Shared Modules included to follow the recommended Angular projects structure,

    +
  • +
  • +

    Yarn and NPM compliant since both lock files are included in order to get a stable installation.

    +
  • +
+
+
+
+

41.152. Admin interface for devon4j apps

+
+

The new version includes an Integration of an admin interface for devon4j apps (Spring Boot). This module is based on CodeCentric´s Spring Boot Admin (https://github.com/codecentric/spring-boot-admin).

+
+
+
+

41.153. Devcon

+
+

A new version of Devcon has been released. Fixes and new features include:

+
+
+
    +
  • +

    Renaming of system Commands.

    +
  • +
  • +

    New menu has been added - “other modules”, if menus are more than 10, other modules will display some menus.

    +
  • +
  • +

    A progress bar has been added for installing the distribution

    +
  • +
+
+
+
+

41.154. devonfw Modules

+
+

Existing devonfw modules can now be accessed with the help of starters following namespace devonfw-<module_name>-starter. Starters available for modules:

+
+
+
    +
  • +

    Reporting module

    +
  • +
  • +

    WinAuth AD Module

    +
  • +
  • +

    WinAuth SSO Module

    +
  • +
  • +

    I18n Module

    +
  • +
  • +

    Async Module

    +
  • +
  • +

    Integration Module

    +
  • +
  • +

    Microservice Module

    +
  • +
  • +

    Compose for Redis Module

    +
  • +
+
+ +
+
+

41.155. devonfw Shop Floor

+
+

This incubator is intended to be a compilation of DevOps experiences from the devonfw perspective. “How we use our devonfw projects in DevOps environments”. Integration with the Production Line, creation and service integration of a Docker-based CI environment and deploying devonfw applications in an OpenShift Origin cluster using devonfw templates.

+
+ +
+
+

41.156. devonfw-testing

+
+

The Allure Test Framework is an automated testing framework for functional testing of web applications and in coming future native mobile apps, web services and databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+
    +
  • +

    Examples available under embedded project “Allure-App-Under-Test” and in project wiki: https://github.com/devonfw/devonfw-testing/wiki

    +
  • +
  • +

    How to install: https://github.com/devonfw/devonfw-testing/wiki/How-to-install

    +
  • +
  • +

    Release Notes:

    +
    +
      +
    • +

      Core Module – ver.4.12.0.3:

      +
      +
        +
      • +

        Test report with logs and/or screenshots

        +
      • +
      • +

        Test groups/tags

        +
      • +
      • +

        Data Driven (inside test case, external file)

        +
      • +
      • +

        Test case parallel execution

        +
      • +
      • +

        Run on independent Operating System (Java)

        +
      • +
      • +

        Externalize test environment (DEV, QA, PROD)

        +
      • +
      +
      +
    • +
    • +

      UI Selenium module – ver. 3.4.0.3:

      +
      +
        +
      • +

        Malleable resolution ( Remote Web Design, Mobile browsers)

        +
      • +
      • +

        Support for many browsers( Internet Explorer, Edge, Chrome, Firefox, Safari)

        +
      • +
      • +

        User friendly actions ( elementCheckBox, elementDropdown, etc. )

        +
      • +
      • +

        Ubiquese test execution (locally, against Selenium Grid through Jenkins)

        +
      • +
      • +

        Page Object Model architecture

        +
      • +
      • +

        Selenium WebDriver library ver. 3.4.0

        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+ +
+
+

41.157. DOT.NET Framework incubators

+
+

The .NET Core and Xamarin frameworks are still under development by a workgroup from The Netherlands, Spain, Poland, Italy, Norway and Germany. The 1.0 release is expected to be coming soon but the current incubator frameworks are already being used in several engagements. Some features to highlight are:

+
+
+
    +
  • +

    Full .NET implementation with multi-platform support

    +
  • +
  • +

    Detailed documentation for developers

    +
  • +
  • +

    Docker ready

    +
  • +
  • +

    Web API server side template :

    +
    +
      +
    • +

      Swagger auto-generation

      +
    • +
    • +

      JWT security

      +
    • +
    • +

      Entity Framework Support

      +
    • +
    • +

      Advanced log features

      +
    • +
    +
    +
  • +
  • +

    Xamarin Templates based on Excalibur framework

    +
  • +
  • +

    My Thai Star implementation:

    +
    +
      +
    • +

      Backend (.NET Core)

      +
    • +
    • +

      FrontEnd (Xamarin)

      +
    • +
    +
    +
  • +
+
+
+
+

41.158. devonfw has been Primed by Red Hat for OpenShift

+
+

OpenShift is a supported distribution of Kubernetes from Red Hat for container-based software deployment and management. It is using Docker containers and DevOps tools for accelerated application development. Using OpenShift allows Capgemini to avoid Cloud Vendor lock-in. OpenShift provides devonfw with a state of the art CI/CD environment (devonfw Shop Floor), providing devonfw with a platform for the whole development life cycle: from development to staging / deploy.

+
+ +
+
+

41.159. Harvested components and modules

+
+

The devonfw Harvesting process continues to add valuable components and modules to the devonfw platform. The last months the following elements were contributed:

+
+
+
+

41.160. == Service Client support (for Micro service Projects).

+
+

This client is for consuming microservices from other application.This solution is already very flexible and customizable.As of now,this is suitable for small and simple project where two or three microservices are invoked. Donated by Jörg Hohwiller. See: https://github.com/devonfw-forge/devonfw-microservices

+
+
+
+

41.161. == JHipster devonfw code generation

+
+

This component was donated by the ADcenter in Valencia. It was made in order to comply with strong requirements (especially from the French BU) to use jHipster for code generation.

+
+
+

JHipster is a code generator based on Yeoman generators. Its default generator generator-jhipster generates a specific JHipster structure. The purpose of generator-jhipster-DevonModule is to generate the structure and files of a typical devon4j project. It is therefore equivalent to the standard devon4j application template based CobiGen code generation.

+
+
+
+

41.162. == Simple Jenkins task status dashboard

+
+

This component has been donated by, has been harvested from system in use by, Capgemini Valencia. This dashboard, apart from an optional gamification element, allows the display of multiple Jenkins instances.

+
+
+
+

41.163. And lots more, among others:

+
+ +
+ +
+
+

41.164. devonfw Release notes 2.2 "Courage"

+ +
+
+

41.165. Production Line Integration

+
+

devonfw is now fully supported on the Production Line v1.3 and the coming v2.0. Besides that, we now "eat our own dogfood" as the whole devonfw project, all "buildable assets", now run on the Production Line.

+
+
+
+

41.166. devon4ng 2.0

+
+

The main focus of the Courage release is the renewed introduction of "devonfw for JavaScript", or devon4ng. This new version is a completely new implementation based on Angular (version 4). This new "stack" comes with:

+
+
+
    +
  • +

    New application templates for Angular 4 application (as well as Ionic 3)

    +
  • +
  • +

    A new reference application

    +
  • +
  • +

    A new tutorial (and Architecture Guide following soon)

    +
  • +
  • +

    Component Gallery

    +
  • +
  • +

    New CobiGen templates for generation of both Angular 4 and Ionic 3 UI components ("screens")

    +
  • +
  • +

    Integration of Covalent and Bootstrap offering a large number of components

    +
  • +
  • +

    my-thai-star, a showcase and reference implementation in Angular of a real, responsive usable app using recommended architecture and patterns

    +
  • +
  • +

    A new Tutorial using my-thai-star as a starting point

    +
  • +
+
+ +
+
+

41.167. New Cobigen

+
+

Major changes in this release:

+
+
+
    +
  • +

    Support for multi-module projects

    +
  • +
  • +

    Client UI Generation:

    +
    +
      +
    • +

      New Angular 4 templates based on the latest - angular project seed

      +
    • +
    • +

      Basic Typescript Merger

      +
    • +
    • +

      Basic Angular Template Merger

      +
    • +
    • +

      JSON Merger

      +
    • +
    +
    +
  • +
  • +

    Refactored devon4j templates to make use of Java template logic feature

    +
  • +
  • +

    Bugfixes:

    +
    +
      +
    • +

      Fixed merging of nested Java annotations including array values

      +
    • +
    • +

      more minor issues

      +
    • +
    +
    +
  • +
  • +

    Under the hood:

    +
    +
      +
    • +

      Large refactoring steps towards language agnostic templates formatting sensitive placeholder descriptions automatically formatting camelCase to TrainCase to snake-case, etc.

      +
    • +
    +
    +
  • +
  • +

    Easy setup of CobiGen IDE to enable fluent contribution

    +
  • +
  • +

    CI integration improved to integrate with GitHub for more valuable feedback

    +
  • +
+
+ +
+
+

41.168. MyThaiStar: New Restaurant Example, reference implementation & Methodology showcase

+
+

A major part of the new devonfw release is the incorporation of a new application, "my-thai-star" which among others:

+
+
+
    +
  • +

    serve as an example of how to make a "real" devonfw application (i.e. the application could be used for real)

    +
  • +
  • +

    Serves as an attractive showcase

    +
  • +
  • +

    Serves as a reference application of devonfw patterns and practices as well as the standard example in the new devonfw tutorial

    +
  • +
  • +

    highlights modern security option like JWT Integration

    +
  • +
+
+
+

The application is accompanied by a substantial new documentation asset, the devonfw methodology, which described in detail the whole lifecycle of the development of a devonfw application, from requirements gathering to technical design. Officially my-that-star is still considered to be an incubator as especially this last part is still not as mature as it could be. But the example application and tutorial are 100% complete and functional and form a marked improvement over the "old" restaurant example app. My-Thai-star will become the standard example app from devonfw 3.0 onwards.

+
+ +
+
+

41.169. The new devonfw Tutorial

+
+

The devonfw Tutorial is a new part of the devonfw documentation which changes the focus of how people can get started with the platform

+
+
+

There are tutorials for devon4j, devon4ng (Angular) and more to come. My-Thai-Star is used throughout the tutorial series to demonstrate the basic principles, architecture, and good practices of the different devonfw "stacks". There is an elaborated exercise where the readers get to write their own application "JumpTheQueue".

+
+
+

We hope that the new tutorial offers a better, more efficient way for people to get started with devonfw. Answering especially the question: how to make a devonfw application.

+
+ +
+
+

41.170. devon4j 2.4.0

+
+

"devonfw for Java" or devon4j now includes updated versions of the latest stable versions of Spring Boot and the Spring Framework and all related dependencies. This allows guaranteed, stable, execution of any devonfw 2.X application on the latest versions of the Industry Standard Spring stack. +Another important new feature is a new testing architecture/infrastructure. All database options are updated to the latest versions as well as guaranteed to function on all Application Servers which should cause less friction and configuration time when starting a new devon4j project.

+
+
+

Details:

+
+
+
    +
  • +

    Spring Boot Upgrade to 1.5.3

    +
  • +
  • +

    Updated all underlying dependencies

    +
  • +
  • +

    Spring version is 4.3.8

    +
  • +
  • +

    Exclude Third Party Libraries that are not needed from sample restaurant application

    +
  • +
  • +

    Bugfix:Fixed the 'WhiteLabel' error received when tried to login to the sample restaurant application that is deployed onto external Tomcat

    +
  • +
  • +

    Bugfix:Removed the API api.org.apache.catalina.filters.SetCharacterEncodingFilter and used spring framework’s API org.springframework.web.filter.CharacterEncodingFilter instead

    +
  • +
  • +

    Bugfix:Fixed the error "class file for javax.interceptor.InterceptorBinding not found" received when executing the command 'mvn site' when trying to generate javadoc using Maven javadoc plugin

    +
  • +
  • +

    Documentation of the usage of UserDetailsService of Spring Security

    +
  • +
+
+ + +
+
+

41.171. Microservices Netflix

+
+

devonfw now includes a microservices implementation based on Spring Cloud Netflix. It provides a Netflix OSS integrations for Spring Boot apps through auto-configuration and binding to the Spring Environment. It offers microservices archetypes and a complete user guide with all the details to start creating microservices with devonfw.

+
+ +
+
+

41.172. devonfw distribution based on Eclipse OOMPH

+
+

The new Eclipse devonfw distribution is now based on Eclipse OOMPH, which allows us, an any engagement, to create and manage the distribution more effectively by formalizing the setup instructions so they can be performed automatically (due to a blocking issue postponed to devonfw 2.2.1 which will be released a few weeks after 2.2.0)

+
+
+
+

41.173. Visual Studio Code or Atom

+
+

The devonfw distro now contains Visual Studio Code alongside Eclipse in order to provide a default, state of the art, environment for web based development.

+
+ +
+
+

41.174. More I18N options

+
+

The platform now contains more documentation and a conversion utility which makes it easier to share i18n resource files between the different frameworks.

+
+ +
+
+

41.175. Spring Integration as devonfw Module

+
+

This release includes a new module based on the Java Message Service (JMS) and Spring Integration which provides a communication system (sender/subscriber) out-of-the-box with simple channels (only to send and read messages), request and reply channels (to send messages and responses) and request & reply asynchronously channels.

+
+ +
+
+

41.176. devonfw Harvest contributions

+
+

devonfw contains a whole series of new components obtained through the Harvesting process. Examples are :

+
+
+
    +
  • +

    New backend IP module Compose for Redis: management component for cloud environments. Redis is an open-source, blazingly fast, key/value low maintenance store. Compose’s platform gives you a configuration pre-tuned for high availability and locked down with additional security features. The component will manage the service connection and the main methods to manage the key/values on the storage. The library used is "lettuce".

    +
  • +
  • +

    Sencha component for extending GMapPanel with the following functionality :

    +
    +
      +
    • +

      Markers management

      +
    • +
    • +

      Google Maps options management

      +
    • +
    • +

      Geoposition management

      +
    • +
    • +

      Search address and coordinates management

      +
    • +
    • +

      Map events management

      +
    • +
    • +

      Map life cycle and behavior management

      +
    • +
    +
    +
  • +
  • +

    Sencha responsive Footer that moves from horizontal to vertical layout depending on the screen resolution or the device type. It is a simple functionality but we consider it very useful and reusable.

    +
  • +
+
+ +
+
+

41.177. More Deployment options to JEE Application Servers and Docker/CloudFoundry

+
+

The platform now fully supports deployment on the latest version of Weblogic, WebSphere, Wildfly (JBoss) as well as Docker and Cloud Foundry.

+
+ +
+
+

41.178. Devcon on Linux

+
+

Devcon is now fully supported on Linux which, together with the devonfw distro running on Linux, makes devonfw fully multi-platform and Cloud compatible (as Linux is the default OS in the Cloud!)

+
+ +
+
+

41.179. New devonfw Incubators

+
+

From different Business Units (countries) have contributed "incubator" frameworks:

+
+
+
    +
  • +

    devon4NET (Stack based on .NET Core / .NET "Classic" (4.6))

    +
  • +
  • +

    devon4X (Stack based on Xamarin)

    +
  • +
  • +

    devon4node (Stack based on Node-js/Serverless): https://github.com/devonfw/devon4node

    +
  • +
+
+
+

An "incubator" status means that the frameworks are production ready, all are actually already used in production, but are still not fully compliant with the devonfw definition of a "Minimally Viable Product".

+
+
+

During this summer devon4NET will be properly installed. In the mean time, if you want to have access to the source code, please contact the devonfw Core Team.

+
+ +
+
+

41.180. Release notes devonfw 2.1.1 "Balu"

+ +
+
+

41.181. Version 2.1.2: devon4j updates and some new features

+
+

We’ve released the latest update release of devonfw in the Balu series: version 2.1.2. The next major release, code named Courage, will be released approximately the end of June. This current release contains the following items:

+
+
+
+

41.182. devon4j 2.3.0 Release

+
+

Friday the 12th of May 2017 devon4j version 2.3.0 was released. Major features added are :

+
+
+
    +
  • +

    Database Integration with PostGres, MSSQL Server, MariaDB

    +
  • +
  • +

    Added docs folder for gh pages and added oomph setups

    +
  • +
  • +

    Refactored Code

    +
  • +
  • +

    Refactored Test Infrastructure

    +
  • +
  • +

    Added Documentation on debugging tests

    +
  • +
  • +

    Added Two Batch Job tests in the restaurant sample

    +
  • +
  • +

    Bugfix: Fixed the error received when the Spring Boot Application from sample application that is created from maven archetype is launched

    +
  • +
  • +

    Bugfix: Fix for 404 error received when clicked on the link '1. Table' in index.html of the sample application created from maven archetype

    +
  • +
+
+
+

The devon4j wiki and other documents are updated for release 2.3.0.

+
+
+
+

41.183. CobiGen Enhancements

+
+

Previous versions of CobiGen are able to generate code for REST services only. Now it is possible to generate the code for SOAP services as well. There are two use cases available in CobiGen:

+
+
+
    +
  • +

    SOAP without nested data

    +
  • +
  • +

    SOAP nested data

    +
  • +
+
+
+

The "nested data" use case is when there are 3 or more entities which are interrelated with each other. CobiGen will generate code which will return the nested data. Currently CobiGen services return ETO classes, CobiGen has been enhanced as to return CTO classes (ETO + relationship).

+
+
+

Apart from the SOAP code generation, the capability to express nested relationships have been added to the existing ReST code generator as well.

+
+
+
+

41.184. Micro services module (Spring Cloud/Netflix OSS)

+
+

To make it easier for devonfw users to design and develop applications based on microservices, this release provides a series of archetypes and resources based on Spring Cloud Netflix to automate the creation and configuration of microservices.

+
+
+

New documentation ind de devonfw Guide contains all the details to start creating microservices with devonfw

+
+
+
+

41.185. Spring Integration Module

+
+

Based on the Java Message Service (JMS) and Spring Integration, the devonfw Integration module provides a communication system (sender/subscriber) out-of-the-box with simple channels (only to send and read messages), request and reply channels (to send messages and responses) and request & reply asynchronously channels.

+
+
+
+

41.186. Version 2.1.1 Updates, fixes and some new features

+ +
+
+

41.187. CobiGen code-generator fixes

+
+

The CobiGen incremental code generator released in the previous version contained a regression which has now been fixed. Generating services in Batch mode whereby a package can be given as an input, using all Entities contained in that package, works again as expected.

+
+
+

For more information see: The CobiGen documentation

+
+
+
+

41.188. Devcon enhancements

+
+

In this new release we have added devcon to the devonfw distribution itself so one can directly use devcon from the console.bat or ps-console.bat windows. It is therefore no longer necessary to independently install devcon. However, as devcon is useful outside of the devonfw distribution, this remains a viable option.

+
+
+
+

41.189. Devon4Sencha

+
+

in Devon4Sencha there are changes in the sample application. It now complies fully with the architecture which is known as "universal app", so now it has screens custom tailored for desktop and mobile devices. All the basic logic remains the same for both versions. (The StarterTemplate is still only for creating a desktop app. This will be tackled in the next release.)

+
+
+
+

41.190. New Winauth modules

+
+

The original winauth module that, in previous Devon versions, implemented the Active Directory authentication and the Single Sign-on authentication now has been divided in two independent modules. The Active Directory authentication now is included in the new Winauth-ad module whereas the Single Sign-on implementation is included in a separate module called Winauth-sso. +Also some improvements have been added to Winauth-sso module to ease the way in which the module can be injected.

+
+
+
+

41.191. General updates

+
+

There are a series of updates to the devonfw documentation, principally the devonfw Guide. Further more, from this release on, you can find the devonfw guide in the doc folder of the distribution.

+
+
+

Furthermore, the devon4j and devonfw source-code in the "examples" workspace, have been updated to the latest version.

+
+
+
+

41.192. Version 2.1 New features, improvements and updates

+ +
+
+

41.193. Introduction

+
+

We are proud to present the new release of devonfw, version "2.1" which we’ve baptized "Balu". A major focus for this release is developer productivity. So that explains the name, as Balu is not just big, friendly and cuddly but also was very happy to let Mowgli do the work for him.

+
+
+
+

41.194. Cobigen code-generator UI code generation and more

+
+

The Cobigen incremental code generator which is part of devonfw has been significantly improved. Based on a single data schema it can generate the JPA/Hibernate code for the whole service layer (from data-access code to web services) for all CRUD operations. When generating code, Cobigen is able to detect and leave untouched any code which developers have added manually.

+
+
+

In the new release it supports Spring Data for data access and it is now capable of generating the whole User Interface as well: data-grids and individual rows/records with support for filters, pagination etc. That is to say: Cobigen can now generate automatically all the code from the server-side database access layer all the way up to the UI "screens" in the web browser.

+
+
+

Currently we support Sencha Ext JS with support for Angular 2 coming soon. The code generated by Cobigen can be opened and used by Sencha Architect, the visual design tool, which enables the programmer to extend and enhance the generated UI non-programmatically. When Cobigen regenerates the code, even those additions are left intact. All these features combined allow for an iterative, incremental way of development which can be up to an order of an magnitude more productive than "programming manual"

+
+
+

Cobigen can now also be used for code-generation within the context of an engagement. It is easily extensible and the process of how to extend it for your own project is well documented. This becomes already worthwhile ("delivers ROI") when having 5+ identical elements within the project.

+
+
+

For more information see: The Cobigen documentation

+
+
+
+

41.195. Angular 2

+
+

With the official release of Angular 2 and TypeScript 2, we’re slowly but steadily moving to embrace these important new players in the web development scene. We keep supporting the Angular 1 based devon4ng framework and are planning a migration of this framework to Angular 2 in the near future. For "Balu" we’ve have decided to integrate "vanilla" Angular 2.

+
+
+

We have migrated the Restaurant Sample application to serve as a, documented and supported, blueprint for Angular 2 applications. Furthermore, we support three "kickstarter" projects which help engagement getting started with Angular2 - either using Bootstrap or Google´s Material Design - or, alternatively, Ionic 2 (the mobile framework on top of Angular 2).

+
+
+
+

41.196. devon4j 2.2.0 Release

+
+

A new release of devon4j, version 2.2.0, is included in this release of devonfw. This release mainly focuses on server side of devonfw. i.e devon4j.

+
+
+

Major features added are :

+
+
+
    +
  • +

    Upgrade to Spring Boot 1.3.8.RELEASE

    +
  • +
  • +

    Upgrade to Apache CXF 3.1.8

    +
  • +
  • +

    Database Integration with Oracle 11g

    +
  • +
  • +

    Added Servlet for HTTP-Debugging

    +
  • +
  • +

    Refactored code and improved JavaDoc

    +
  • +
  • +

    Bugfix: mvn spring-boot:run executes successfully for devon4j application created using devon4j template

    +
  • +
  • +

    Added subsystem tests of SalesmanagementRestService and several other tests

    +
  • +
  • +

    Added Tests to test java packages conformance to devonfw conventions

    +
  • +
+
+
+

More details on features added can be found at https://github.com/devonfw/devon4j/milestone/19?closed=1(here). The devon4j wiki and other documents are updated for release 2.2.0.

+
+
+
+

41.197. Devon4Sencha

+
+

Devon4Sencha is an alternative view layer for web applications developed with devonfw. It is based on Sencha Ext JS. As it requires a license for commercial applications it is not provided as Open Source and is considered to be part of the IP of Capgemini.

+
+
+

These libraries provide support for creating SPA (Single Page Applications) with a very rich set of components for both desktop and mobile. In the new version we extend this functionality to support for "Universal Apps", the Sencha specific term for true multi-device applications which make it possible to develop a single application for desktop, tablet as well as mobile devices. In the latest version Devon4Sencha has been upgraded to support Ext JS 6.2 and we now support the usage of Cobigen as well as Sencha Architect as extra option to improve developer productivity.

+
+
+
+

41.198. Devcon enhancements

+
+

The Devon Console, Devcon, is a cross-platform command line tool running on the JVM that provides many automated tasks around the full life-cycle of Devon applications, from installing the basic working environment and generating a new project, to running a test server and deploying an application to production. It can be used by the engagements to integrate with their proprietary tool chain.

+
+
+

In this new release we have added an optional graphical user interface (with integrated help) which makes using Devcon even easier to use. Another new feature is that it is now possible to easily extend it with commands just by adding your own or project specific Javascript files. This makes it an attractive option for project task automation.

+
+
+
+

41.199. Ready for the Cloud

+
+

devonfw is in active use in the Cloud, with projects running on IBM Bluemix and on Amazon AWS. The focus is very much to keep Cloud-specific functionality decoupled from the devonfw core. The engagement can choose between - and easily configure the use of - either CloudFoundry or Spring Cloud (alternatively, you can run devonfw in Docker containers in the Cloud as well. See elsewhere in the release notes).

+
+
+
+

41.200. Spring Data

+
+

The java server stack within devonfw, devon4j, is build on a very solid DDD architecture which uses JPA for its data access layer. We now offer integration of Spring Data as an alternative or to be used in conjunction with JPA. Spring Data offers significant advantages over JPA through its query mechanism which allows the developer to specify complex queries in an easy way. Overall working with Spring Data should be quite more productive compared with JPA for the average or junior developer. And extra advantage is that Spring Data also allows - and comes with support for - the usage of NoSQL databases like MongoDB, Cassandra, DynamoDB etc. THis becomes especially critical in the Cloud where NoSQL databases typically offer better scalability than relational databases.

+
+
+
+

41.201. Videos content in the devonfw Guide

+
+

The devonfw Guide is the single, authoritative tutorial and reference ("cookbook") for all things devonfw, targeted at the general developer working with the platform (there is another document for Architects). It is clear and concise but because of the large scope and wide reach of devonfw, it comes with a hefty 370+ pages. For the impatient - and sometimes images do indeed say more than words - we’ve added videos to the Guide which significantly speed up getting started with the diverse aspects of devonfw.

+
+
+

For more information on videos check out our devonfw Youtube channel

+
+
+
+

41.202. Containerisation with Docker and the Production Line

+
+

Docker (see: https://www.docker.com/) containers wrap a piece of software in a complete filesystem that contains everything needed to run: code, runtime, system tools, system libraries – anything that can be installed on a server. Docker containers resemble virtual machines but are far more resource efficient. Because of this, Docker and related technologies like Kubernetes are taking the Enterprise and Cloud by storm. We have certified and documented the usage of devonfw on Docker so we can now firmly state that "devonfw is Docker" ready. All the more so as the iCSD Production Line is now supporting devonfw as well. The Production Line is a Docker based set of methods and tools that make possible to develop custom software to our customers on time and with the expected quality. By having first-class support for devonfw on the Production Line, iCSD has got an unified, integral solution which covers all the phases involved on the application development cycle from requirements to testing and hand-off to the client.

+
+
+
+

41.203. Eclipse Neon

+
+

devonfw comes with its own pre configured and enhanced Eclipse based IDE: the Open Source "devonfw IDE" and "devonfw Distr" which falls under Capgemini IP. We’ve updated both versions to the latest stable version of Eclipse, Neon. From Balu onwards we support the IDE on Linux as well and we offer downloadable versions for both Windows and Linux.

+
+
+

See: The Devon IDE

+
+
+
+

41.204. Default Java 8 with Java 7 compatibility

+
+

From version 2.1. "Balu" onwards, devonfw is using by default Java 8 for both the tool-chain as well as the integrated development environments. However, both the framework as well as the IDE and tool-set remain fully backward compatible with Java 7. We have added documentation to help configuring aspects of the framework to use Java 7 or to upgrade existing projects to Java 8. See: Compatibility guide for Java7, Java8 and Tomcat7, Tomcat8

+
+
+
+

41.205. Full Linux support

+
+

In order to fully support the move towards the Cloud, from version 2.1. "Balu" onwards, devonfw is fully supported on Linux. Linux is the de-facto standard for most Cloud providers. We currently only offer first-class support for Ubuntu 16.04 LTS onward but most aspects of devonfw should run without problems on other and older distributions as well.

+
+
+
+

41.206. Initial ATOM support

+
+

Atom is a text editor that’s modern, approachable, yet hackable to the core - a tool you can customize to do anything but also use productively without ever touching a config file. It is turning into a standard for modern web development. In devonfw 2.1 "Balu" we provide a script which installs automatically the most recent version of Atom in the devonfw distribution with a pre-configured set of essential plugins.

+
+
+
+

41.207. Database support

+
+

Through JPA (and now Spring Data as well) devonfw supports many databases. In Balu we’ve extended this support to prepared configuration, extensive documentations and supporting examples for all major "Enterprise" DB servers. So it becomes even easier for engagements to start using these standard database options. Currently we provide this extended support for Oracle, Microsoft SQL Server, MySQL and PostgreSQL. +For more information see: devonfw Database Migration Guide

+
+
+
+

41.208. Internationalisation (I18N) improvements

+
+

Likewise, existing basic Internationalisation (I18N) support has been significantly enhanced through an new devonfw module and extended to support Ext JS and Angular 2 apps as well. This means that both server as well as client side applications can be made easily to support multiple languages ("locales"), using industry standard tools and without touching programming code (essential when working with teams of translators).

+
+
+
+

41.209. Asynchronous HTTP support

+
+

Asynchronous HTTP is an important feature allowing so-called "long polling" HTTP Requests (for streaming applications, for example) or with requests sending large amounts of data. By making HTTP Requests asynchronous, devonfw server instances can better support these types of use-cases while offering far better performance.

+
+
+
+

41.210. Security and License guarantees

+
+

In devonfw security comes first. The components of the framework are designed and implemented according to the recommendations and guidelines as specified by OWASP in order to confront the top 10 security vulnerabilities.

+
+
+

From version 2.1 "Balu" onward we certify that devonfw has been scanned by software from "Black Duck". This verifies that devonfw is based on 100% Open Source Software (non Copyleft) and demonstrates that at moment of release there are no known, critical security flaws. Less critical issues are clearly documented.

+
+
+
+

41.211. Documentation improvements

+
+

Apart from the previously mentioned additions and improvements to diverse aspects of the devonfw documentation, principally the devonfw Guide, there are a number of other important changes. We’ve incorporated the Devon Modules Developer´s Guide which describes how to extend devonfw with its Spring-based module system. Furthermore we’ve significantly improved the Guide to the usage of web services. We’ve included a Compatibility Guide which details a series of considerations related with different version of the framework as well as Java 7 vs 8. And finally, we’ve extended the F.A.Q. to provide the users with direct answers to common, Frequently Asked Questions.

+
+
+
+

41.212. Contributors

+
+

Many thanks to adrianbielewicz, aferre777, amarinso, arenstedt, azzigeorge, cbeldacap, cmammado, crisjdiaz, csiwiak, Dalgar, drhoet, Drophoff, dumbNickname, EastWindShak, fawinter, fbougeno, fkreis, GawandeKunal, henning-cg, hennk, hohwille, ivanderk, jarek-jpa, jart, jensbartelheimer, jhcore, jkokoszk, julianmetzler, kalmuczakm, kiran-vadla, kowalj, lgoerlach, ManjiriBirajdar, MarcoRose, maybeec, mmatczak, nelooo, oelsabba, pablo-parra, patrhel, pawelkorzeniowski, PriyankaBelorkar, RobertoGM, sekaiser, sesslinger, SimonHuber, sjimenez77, sobkowiak, sroeger, ssarmokadam, subashbasnet, szendo, tbialecki, thoptr, tsowada, znazir and anyone who we may have forgotten to add!

+
+
+
+
+
+
+
+1. "Stammdaten" in German. +
+
+2. Whether to use checked exceptions or not is a controversial topic. Arguments for both sides can be found under The Trouble with Checked Exceptions, Unchecked Exceptions — The Controversy, and Checked Exceptions are Evil. The arguments in favor of unchecked exceptions tend to prevail for applications built with devon4j. Therefore, unchecked exceptions should be used for a consistent style. +
+
+3. A package is a file or directory that is described by a package.json. . +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Common-problems/I-cannot-find.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Common-problems/I-cannot-find.html new file mode 100644 index 00000000..ae1c5265 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Common-problems/I-cannot-find.html @@ -0,0 +1,279 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

I can’t find the boilerplate module. Has it been removed?

+
+
+

The boilerplate module has been removed from the GitHub project on purpose.

+
+
+

There were problems with naming and communication, not everybody was aware of the meaning of the word boilerplate.

+
+
+

The name of the folder has been changed to template. It can be found in the GitHub project.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Common-problems/It-is-possible.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Common-problems/It-is-possible.html new file mode 100644 index 00000000..99e12a55 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Common-problems/It-is-possible.html @@ -0,0 +1,279 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Common-problems/Tests-are-not-stable.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Common-problems/Tests-are-not-stable.html new file mode 100644 index 00000000..0f4dde05 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Common-problems/Tests-are-not-stable.html @@ -0,0 +1,304 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Tests are not stable

+
+
+

Selenium tests perform actions much faster than a normal user would. Because pages can contain dynamically changing content, some web elements can still not be loaded when Selenium driver tries to access them.

+
+
+

getDriver().waitForPageLoaded() method checks ready state in the browser, that’s why stability problems may happen in advanced frontend projects.

+
+
+

To improve test stability you can:

+
+
+
    +
  • +

    add waiting methods before dynamically loading elements e.g. getDriver().waitForElement(By selector)

    +
  • +
  • +

    add timeout parameter in method getDriver().findElementDynamic(By selector, int timeOut)

    +
  • +
  • +

    change global waiting timeout value using method getDriver().manage().timeouts().implicitlyWait(long time, TimeUnit unit)

    +
  • +
+
+
+

Furthermore, if the page displays visible loading bars or spinners, create FluentWait method to wait until they disappear.

+
+
+

Notice that by increasing timeouts you may improve stability but too long waiting time makes tests run slower.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/How-to/Change-timeouts.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/How-to/Change-timeouts.html new file mode 100644 index 00000000..561cf397 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/How-to/Change-timeouts.html @@ -0,0 +1,325 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to: Change timeouts?

+
+
+

If you would like to change timeouts - you don’t have to change them globally. +It is possible to add waiting time parameter to searching methods, such as:

+
+
+

getDriver().findElementDynamic(By selector, int timeOut)
+timeout - in seconds

+
+
+

It is recommended to use methods that significantly level up the repetitiveness of the code:

+
+
+
+
getDriver().waitForElement(By selector);
+
+getDriver().waitForElementVisible(By selector);
+
+getDriver().waitForPageLoaded();
+
+getDriver().waitUntilElementIsClickable(By selector);
+
+
+
+

Or Fluent Wait methods with changed timeout and interval:

+
+
+
+
FluentWait<WebDriver> wait = new FluentWait<WebDriver>(getDriver())
+        .withTimeout(long duration, TimeUnit unit)
+        .pollingEvery(long duration, TimeUnit unit);
+wait.until((WebDriver wd) -> expectedCondition.isTrue());
+getWebDriverWait().withTimeout(millis, TimeUnit.MILLISECONDS)
+        .withTimeout(long duration, TimeUnit unit)
+        .pollingEvery(long duration, TimeUnit unit)
+        .until((WebDriver wd) -> expectedCondition.isTrue());
+
+
+
+

These methods allow You to change WebDriver timeouts values such as:

+
+
+

getDriver().manage().timeouts().pageLoadTimeout(long time, TimeUnit unit)
+the amount of time to wait for a page to load before throwing an exception. This is the default timeout for method getDriver().waitForPageLoaded()

+
+
+

getDriver().manage().timeouts().setScriptTimeout(long time, TimeUnit unit)
+the amount of time to wait for execution of script to finish before throwing an exception

+
+
+

getDriver().manage().timeouts().implicitlyWait(long time, TimeUnit unit) +the amount of time the driver should wait when searching for an element if it is not immediately present. After that time, it throws an exception. This the default timeout for methods such as getDriver().findElementDynamic(By selector) or getDriver().waitForElement(By selector)

+
+
+

Changing timeouts can improve test stability but can also make test run time longer.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/How-to/Start-a-browser.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/How-to/Start-a-browser.html new file mode 100644 index 00000000..04da6618 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/How-to/Start-a-browser.html @@ -0,0 +1,308 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to: Start a browser in Incognito/Private mode?

+
+
+

In MrChecker there is a fpossibility of changing browser options during runtime execution.

+
+
+

To run the browser in incognito mode:

+
+
+
    +
  1. +

    In Eclipse - open Run Configurations window:

    +
    +

    ht image1

    +
    +
  2. +
  3. +

    Select a test which you want to run and switch to arguments tab:

    +
    +

    ht image2

    +
    +
  4. +
  5. +

    Add VM argument:

    +
    +
      +
    • +

      for the incognito mode in chrome:

      +
      +

      ht image3

      +
      +
    • +
    +
    +
  6. +
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Installation-problems/Chromedriver-version.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Installation-problems/Chromedriver-version.html new file mode 100644 index 00000000..2fc879a4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Installation-problems/Chromedriver-version.html @@ -0,0 +1,317 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Chromedriver version is not compatible with Chrome browser

+
+
+

Problem:

+
+
+

During the tests your web browser window opens and immediately closes, all your tests are broken.

+
+
+

Following error message is visible in the test description:

+
+
+
+
session not created: This version of ChromeDriver only supports Chrome version 76
+Build info: version: '<build_version>', revision: '<build_revision>', time: '<time>'
+System info: host: '<your_computer_name>', ip: '<your_ip_address>', os.name: '<your_os_name>', os.arch: '<your_os_architecture>', os.version: '<your_os_version>', java.version: '<java_version_installed>'
+Driver info: driver.version: NewChromeDriver
+
+
+
+

Solution:

+
+
+
    +
  1. +

    Make a change in the following files:

    +
    +
      +
    • +

      MrChecker_Test_Framework\workspace\devonfw-testing\src\resources\settings.properties

      +
    • +
    • +

      For project template-app-under-test: MrChecker_Test_Framework\workspace\devonfw-testing\template\src\resources\settings.properties

      +
    • +
    • +

      For project example-app-under-test: MrChecker_Test_Framework\workspace\devonfw-testing\example\src\resources\settings.properties

      +
      +

      Change the value of selenium.driverAutoUpdate field form true to false

      +
      +
    • +
    +
    +
  2. +
  3. +

    Replace the following file with a version compatible with your browser: +MrChecker_Test_Framework\workspace\devonfw-testing\example\lib\webdrivers\chrome\chromedriver.exe .

    +
  4. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Installation-problems/My-browser-opens-up.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Installation-problems/My-browser-opens-up.html new file mode 100644 index 00000000..dcd24ec8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Installation-problems/My-browser-opens-up.html @@ -0,0 +1,282 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

My browser opens up in German by default

+
+
+

Problem:

+
+
+

I would like my browser to use the English language, but the default language for the browser is German. How can I change the settings?

+
+
+

Solution:

+
+
+

There is a Properties file installed together with MrCheker installation. It is possible to set the language in which a browser could be opened for testing purposes in Properties > Selenium configuration,.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Installation-problems/run-Mobile-Tests-with-runtime-parameters.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Installation-problems/run-Mobile-Tests-with-runtime-parameters.html new file mode 100644 index 00000000..874e320e --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/FAQ/Installation-problems/run-Mobile-Tests-with-runtime-parameters.html @@ -0,0 +1,306 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Where should I run the comand :

+
+
+
+
- mvn clean compile test  -Dapp="mio-file.apk -DautomationName="UiAutomator1" -Dthread.count=1
+
+
+
+
+
I tried from:
+----
+C:\MrChecker_Test_Framework\workspace\devonfw-testing
+----
+ but it doesn’t work because of a missing POM file .Then I tried from:
+ ----
+ C:\MrChecker_Test_Framework\workspace\devonfw-testing\example” and run “mvn clean compile test  -Dapp="mio-file.apk
+ DautomationName="UiAutomator1" -Dthread.count=1
+
+
+
+
+
 ----
+and I have the following errors:
+
+
+
+
+
image::images/imageerror.png[]
+
+
+
+

If I check the repository online http://repo1.maven.org/maven2 and I go in http://repo1.maven.org/maven2/com/capgemini/mrchecker/ - nothing is done about it

+
+
+
+
mrchecker-mobile-module:jar:7.2.0.1-SNAPSHOT
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Migration/Migration-guide.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Migration/Migration-guide.html new file mode 100644 index 00000000..b3eaa6d6 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Migration/Migration-guide.html @@ -0,0 +1,656 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Junit4 to Junit5 migration guide

+
+
+

mrchecker-core-module version 5.6.2.1 features the upgrade of Junit4 to Junit5. Consequently, the Junit4 features are now obsolete and current test projects require migration +in order to use the latest revision of MrChecker. This site provides guidance on the migration.

+
+ +
+
+
+

POM

+
+
+

The project pom.xml file needs to be adjusted in the first place. An exemplary POM file for download can be found here: https://github.com/devonfw/mrchecker/blob/develop/template/pom.xml

+
+
+
+
+

Test Annotations

+
+
+

Junit5 redefines annotations defining a test flow. The annotations need to be adjusted as per the following table.

+
+
+
+migration01 +
+
+
+
+
+

Rule, ClassRule, TestRule and TestMethod

+
+
+

Junit4 @Rule and @ClassRule annoations as well as TestRule and TestMethod interfaces have been replaced +with the Junit5 extension mechanism (https://junit.org/junit5/docs/current/user-guide/#extensions). +During the migration to Junit5, all the instances of the mentioned types need to be rewritten according to the Junit5 User Guide. +The extension mechanism is far more flexible than the Junit4 functionality based on rules.

+
+
+

Note: as per Junit5 API spec: ExpectedExceptionSupport, ExternalResourceSupport, VerifierSupport +provide native support of the correspoinding Junit4 rules.

+
+
+

Extension registration example:

+
+
+
+migration02 +
+
+
+
+migration arrow down +
+
+
+
+migration03 +
+
+
+

TestRule (TestWatcher and ExternalResource) to Extension (TestWatcher and AfterAllCallback) example:

+
+
+
+migration04 +
+
+
+
+migration arrow down +
+
+
+
+migration05 +
+
+
+
+
+

Page, BasePageAutoRegistration and PageFactory classes

+
+
+

Page class is a new MrChecker class. It was introduced to provide common implemenation for its subpages in specific MrChecker modules. +In order to receive test lifecycle notifications, particular Pages need to be registered by calling addToTestExecutionObserver() method. +To facilitate this process, PageFactory class was designed and it’s usage is a recommended way of creating Page objects for tests. +Although in MrChecker based on Junit4, the registration process was done in a specific BasePage constructor, it’s been considered error prone and reimplemented. +Furthermore, to reduce migration cost BasePageAutoRegistration classes are available in MrChceker modules. They use the old way of registration. +Given that three ways of migration are possible.

+
+
+

Migration with PageFactory class example (RECOMMENDED):

+
+
+
+migration06 +
+
+
+
+migration arrow down +
+
+
+
+migration07 +
+
+
+

Migration with calling addToTestExecutionObserver() method example:

+
+
+
+migration06 +
+
+
+
+migration arrow down +
+
+
+
+migration08 +
+
+
+

Migration with BasePageAutoRegistration class example:

+
+
+
+migration09 +
+
+
+
+migration arrow down +
+
+
+
+migration10 +
+
+
+
+
+

Test suites

+
+
+

Test suite migration example:

+
+
+
+migration11 +
+
+
+
+migration arrow down +
+
+
+
+migration12 +
+
+
+

Running tests from Maven:

+
+
+
+migration13 +
+
+
+
+migration arrow down +
+
+
+
+migration14 +
+
+
+
+
+

Concurrency

+
+
+

Junit5 provides native thread count and parallel execution control in contrast to Junit4 where it was controlled by Maven Surefire plugin. +To enable concurrent test execution, junit-platform.properties file needs to placed in the test/resources directory of a project.

+
+
+

Exemplary file contents:

+
+
+
+migration15 +
+
+
+

A ready-to-use file can be found here.

+
+
+

MrChecker supports only concurrent test class execution. +@ResourceLock can be used to synchronize between classes if needed:

+
+
+
+migration16 +
+
+
+
+
+

Cucumber

+
+
+

If Cucumber is used in a project, it is neccessary to change a hook class. +An exemplary hook source file for download can be found here.

+
+
+
+
+

Data driven tests

+
+
+

Junit5 implements new approach to data driven tests by various data resolution mechanisms.

+
+
+

An example of method source parameters migration version one:

+
+
+
+migration17 +
+
+
+
+migration arrow down +
+
+
+
+migration18 +
+
+
+

An example of method source parameters migration version two:

+
+
+
+migration17 +
+
+
+
+migration arrow down +
+
+
+
+migration19 +
+
+
+

An example of method source in another class parameters migration:

+
+
+
+migration20 +
+
+
+
+migration arrow down +
+
+
+
+migration21 +
+
+
+

Providing parameters directly in annotations has no analogy in Junit5 and needs to be replaced with e.g. method source:

+
+
+
+migration22 +
+
+
+
+migration arrow down +
+
+
+
+migration23 +
+
+
+

An example of csv parameters source with no header line migration:

+
+
+
+migration24 +
+
+
+
+migration arrow down +
+
+
+
+migration25 +
+
+
+

An example of csv parameters source with the header line migration:

+
+
+
+migration26 +
+
+
+
+migration arrow down +
+
+
+
+migration27 +
+
+
+

An example of csv parameters source with object mapping migration step1:

+
+
+
+migration28 +
+
+
+
+migration arrow down +
+
+
+
+migration29 +
+
+
+

An example of csv parameters source with object mapping migration step 2:

+
+
+
+migration30 +
+
+
+
+migration arrow down +
+
+
+
+migration31 +
+
+
+
+
+

setUp() and tearDown()

+
+
+

BaseTest.setUp() and BaseTest.tearDown() methods are now not abstract and need no implementation in subclasses. @Override when a custom implemenatation is needed.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Mac.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Mac.html new file mode 100644 index 00000000..3f3e6bf9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Mac.html @@ -0,0 +1,390 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MrChecker macOS installation

+
+
+

On this page, you can find all the details regarding MrChecker installation on your Mac.

+
+
+
+
+

Java installation

+
+
+

There is one important pre-requisite for Mr Checker installation - Java has to be installed on the computer and an environmental variable has to be set in order to obtain optimal functioning of the framework.

+
+
+
    +
  1. +

    Install Java 1.8 JDK 64bit

    +
    +

    Download and install Java download link

    +
    +
    +

    (To download JDK 8 from Oracle you have to have an account. It is recommended to get a JDK build based on OpenJDK from AdoptOpenJDK)

    +
    +
  2. +
  3. +

    Next, verify thx in the command line:

    +
    +
    +
    > java --version
    +
    +
    +
  4. +
+
+
+
+
+

Other components installation

+
+
+

Install each component separately, or update the existing ones on your Mac.

+
+
+
    +
  1. +

    Maven 3.5

    +
    +
      +
    • +

      Download Maven

      +
    • +
    • +

      Unzip Maven in the following location /maven

      +
    • +
    • +

      Add Maven to PATH

      +
      +
      +
      > $ export PATH=$PATH:/maven/apache-maven-3.5.0/bin/
      +
      +
      +
    • +
    • +

      Verify in terminal:

      +
      +
      +
      > $ mvn -version
      +
      +
      +
    • +
    +
    +
  2. +
  3. +

    Eclipse IDE

    +
    +
      +
    • +

      Download and unzip Eclipse

      +
    • +
    • +

      Download MrCheckerTestFramework source code

      +
    • +
    • +

      Import:

      +
      +
      +image9 +
      +
      +
    • +
    • +

      Select Projects from folders:

      +
      +

      image10

      +
      +
    • +
    • +

      Open already created projects:

      +
      +

      image11

      +
      +
    • +
    • +

      Update project structure - ALT + F5

      +
      +

      image12

      +
      +
    • +
    +
    +
  4. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/MyThaiStar.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/MyThaiStar.html new file mode 100644 index 00000000..cea444d3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/MyThaiStar.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

My Thai Star application setup

+
+
+

My Thai Star is a reference application for DevonFW so it was used extensively in majority of our examples. To make them run properly you definitely should set it up somewhere and configure environment.csv accordingly. +You can get the app from its official repository here https://github.com/devonfw/my-thai-star.

+
+
+
+
+

Setting up My Thai Start app

+
+
+

Most of the important informations are covered in https://github.com/devonfw/my-thai-star#deployment.

+
+
+
The quick summary would be:
+
    +
  1. +

    Get the machine with docker and docker-compose

    +
  2. +
  3. +

    Download the repository

    +
  4. +
  5. +

    Run docker-compose up

    +
  6. +
  7. +

    Go to your project to set up envrionment.csv

    +
  8. +
  9. +

    The variables we are interested in are MY_THAI_STAR_URL and MY_THAI_STAR_API_URL

    +
  10. +
  11. +

    If you set up My Thai Star application on different host adjust the values accordingly

    +
  12. +
  13. +

    The web aplication should be available using localhost:8081/restaurant

    +
  14. +
  15. +

    The web api should be avaulable using localhost:8081/api

    +
  16. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Windows/Advanced-installation.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Windows/Advanced-installation.html new file mode 100644 index 00000000..ddcd1d71 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Windows/Advanced-installation.html @@ -0,0 +1,425 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Advanced installation

+
+ +
+
+
+

Java installation

+
+
+

There is one important pre-requisite for Mr Checker installation - Java has to be installed on the computer and an environmental variable has to be set in order to obtain optimal functioning of the framework.

+
+
+
    +
  1. +

    Install Java 1.8 JDK 64bit

    +
    +

    Download and install Java download link

    +
    +
    +

    (To download JDK 8 from Oracle you have to have an account. It is recommended to get a JDK build based on OpenJDK from AdoptOpenJDK)

    +
    +
  2. +
  3. +

    Windows Local Environment - How to set:

    +
    +
      +
    • +

      Variable name: JAVA_HOME | Variable value: C:\Where_You’ve_Installed_Java

      +
    • +
    • +

      Variable name: PATH | Variable value: %JAVA_HOME%\bin;%JAVA_HOME%\lib

      +
      +
      +install win03 +
      +
      +
    • +
    +
    +
  4. +
  5. +

    Next, verify it in the command line:

    +
    +
    +
    > java --version
    +
    +
    +
  6. +
+
+
+
+
+

Other components installation

+
+
+

Install each component separately, or update the existing ones on your PC.

+
+
+
    +
  1. +

    Maven 3.5

    +
    +
      +
    • +

      Download Maven

      +
    • +
    • +

      Unzip Maven in following location C:\maven

      +
    • +
    • +

      Set Windows Local Environment

      +
      +
        +
      • +

        Variable name: M2_HOME | Variable value: C:\maven\apache-maven-3.5.0

        +
      • +
      • +

        Variable name: PATH | Variable value: %M2_HOME%\bin

        +
        +
        +install win04 +
        +
        +
      • +
      +
      +
    • +
    • +

      Verify it in the command line:

      +
      +
      +
      > mvn --version
      +
      +
      +
    • +
    +
    +
  2. +
  3. +

    IDE

    +
    +
      +
    • +

      Download a most recent Eclipse

      +
    • +
    • +

      Download a MrChecker Project https://downgit.github.io//home?url=https://github.com/devonfw/mrchecker/tree/develop/template[Template] to start a new project or Mrchecker Project https://downgit.github.io//home?url=https://github.com/devonfw/mrchecker/tree/develop/example[Example] to get better understanding what we are capable of.

      +
    • +
    • +

      You should consider installing some usefull plugins such as: csvedit, cucumber editor.

      +
    • +
    • +

      Import:

      +
      +
      +install win05 +
      +
      +
    • +
    • +

      Projects from folders

      +
      +
      +install win06 +
      +
      +
    • +
    • +

      Open already created projects:

      +
      +
      +install win07 +
      +
      +
    • +
    • +

      Update project structure - ALT + F5

      +
      +
      +install win08 +
      +
      +
    • +
    +
    +
  4. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Windows/Easy-out-of-the-box.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Windows/Easy-out-of-the-box.html new file mode 100644 index 00000000..a4f4c9c6 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Windows/Easy-out-of-the-box.html @@ -0,0 +1,311 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Easy out of the Box

+
+
+
    +
  1. +

    Click on the link Ready to use MrChecker_Test_Environment for Junit4 or Ready to use MrChecker_Test_Environment for Junit5 and download the package

    +
  2. +
  3. +

    Unzip the downloaded MrChecker Test Framework into the folder C:\ on your PC - recommended tool: 7z All the necessary components, such as Eclipse, Java and Maven will be pre-installed for you. There is no need for any additional installations.

    +
    +

    Note: Please double check the place into which you have unzipped MrChecker_Test_Framework

    +
    +
  4. +
  5. +

    Go to folder C:\MrChecker_Test_Framework\ , into which Mr.Checker has been unzipped

    +
    +
    +install win01 +
    +
    +
  6. +
  7. +

    In order to run the program, double click on start-eclipse-with-java.bat

    +
    +

    (note that start-eclipse.bat won’t detect Java)

    +
    +
  8. +
  9. +

    Update project structure (ALT + F5)

    +
    +
    +install win02 +
    +
    +
  10. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Windows/Out-of-the-box-installation.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Windows/Out-of-the-box-installation.html new file mode 100644 index 00000000..d4094c4e --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/MrChecker-download/Windows/Out-of-the-box-installation.html @@ -0,0 +1,301 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Out of the box installation

+
+
+
    +
  1. +

    Start from Easy out of the box installation

    +
  2. +
  3. +

    Open Eclipse

    +
  4. +
  5. +

    Manually Delete folders that appear in Eclipse

    +
  6. +
  7. +

    Click inside Eclipse with a right mouse button and open Import

    +
  8. +
  9. +

    Select Maven → existing Maven project

    +
  10. +
  11. +

    Select Mr Checker → workspace → devonfw-testing and click OK

    +
  12. +
+
+
+

All test folders should be imported into Eclipse and ready to use.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-BDD-Gherkin-Cucumber-approach.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-BDD-Gherkin-Cucumber-approach.html new file mode 100644 index 00000000..d66b9ace --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-BDD-Gherkin-Cucumber-approach.html @@ -0,0 +1,587 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Overview

+
+
+

Cucumber / Selenium

+
+
+

Business and IT don’t always understand each other. Very often misunderstandings between business and IT result in the costly failure of IT projects. With this in mind, Cucumber was developed as a tool to support human collaboration between business and IT.

+
+
+

Cucumber uses executable specifications to encourage a close collaboration. This helps teams to keep the business goal in mind at all times. With Cucumber you can merge specification and test documentation into one cohesive whole, allowing your team to maintain one single source of truth. Because these executable specifications are automatically tested by Cucumber, your single source of truth is always up-to-date.

+
+
+
+image40 +
+
+
+

Cucumber supports testers when designing test cases. To automate these test cases, several languages can be used. Cucumber also works well with Browser Automation tools such as Selenium Webdriver.

+
+
+
+
+

== Selenium

+
+
+

Selenium automates browsers and is used for automating web applications for testing purposes. Selenium offers testers and developers full access to the properties of objects and the underlying tests, via a scripting environment and integrated debugging options.

+
+
+

Selenium consists of many parts. If you want to create robust, browser-based regression automation suites and tests, Selenium Webdriver is most appropriate. With Selenium Webdriver you can also scale and distribute scripts across many environments.

+
+
+
+
+

Strengths

+
+ +
+
+
+

== Supports BDD

+
+
+

Those familiar with Behavior Driven Development (BDD) recognize Cucumber as an excellent open source tool that supports this practice.

+
+
+
+
+

== All in one place

+
+
+

With Cucumber / Selenium you can automate at the UI level. Automation at the unit or API level can also be implemented using Cucumber. This means all tests, regardless of the level at which they are implemented, can be implemented in one tool.

+
+
+
+
+

== Maintainable test scripts

+
+
+

Many teams seem to prefer UI level automation, despite huge cost of maintaining UI level tests compared to the cost of maintaining API or unit tests. To lessen the maintenance of UI testing, when designing UI level functional tests, you can try describing the test and the automation at three levels: business rule, UI workflow, technical implementation.

+
+
+

When using Cucumber combined with Selenium, you can implement these three levels for better maintenance.

+
+
+
+
+

== Early start

+
+
+

Executable specifications can and should be written before the functionality is implemented. By starting early, teams get most return on investment from their test automation.

+
+
+
+
+

== Supported by a large community

+
+
+

Cucumber and Selenium are both open source tools with a large community, online resources and mailing lists.

+
+
+
+
+

How to run cucumber tests in Mr.Checker

+
+ +
+
+
+

Command line / Jenkins

+
+
+
    +
  • +

    Run cucumber tests and generate Allure report. Please use this for Jenkins execution. Report is saved under ./target/site.

    +
    +
    +
    mvn clean -P cucumber test site
    +
    +
    +
  • +
  • +

    Run and generate report

    +
    +
    +
    mvn clean -P cucumber test site allure:report
    +
    +
    +
  • +
  • +

    Run cucumber tests, generate Allure report and start standalone report server

    +
    +
    +
    mvn clean -P cucumber test site allure:serve
    +
    +
    +
  • +
+
+
+
+
+

Eclipse IDE

+
+
+
+image41 +
+
+
+
+
+

Tooling

+
+ +
+
+
+

== Cucumber

+
+
+

Cucumber supports over a dozen different software platforms. Every Cucumber implementation provides the same overall functionality, but they also have their own installation procedure and platform-specific functionality. See https://cucumber.io/docs for all Cucumber implementations and framework implementations.

+
+
+

Also, IDEs such as Intellij offer several plugins for Cucumber support.

+
+
+
+
+

== Selenium

+
+
+

Selenium has the support of some of the largest browser vendors who have taken (or are taking) steps to make Selenium a native part of their browser. It is also the core technology in countless other browser automation tools, APIs and frameworks.

+
+
+
+
+

Automation process

+
+ +
+
+
+

== Write a feature file

+
+
+

Test automation in Cucumber starts with writing a feature file. A feature normally consists of several (test)scenarios and each scenario consists of several steps.

+
+
+

Feature: Refund item

+
+
+

Scenario: Jeff returns a faulty microwave

+
+
+

Given Jeff has bought a microwave for $100

+
+
+

And he has a receipt

+
+
+

When he returns the microwave

+
+
+

Then Jeff should be refunded $100

+
+
+

Above example shows a feature “Refund item” with one scenario “Jeff returns a faulty microwave”. The scenario consists of four steps each starting with a key word (Given, And, When, Then).

+
+
+
+
+

== Implementing the steps

+
+
+

Next the steps are implemented. Assuming we use Java to implement the steps, the Java code will look something like this.

+
+
+
+
public class MyStepdefs \{
+
+	@Given("Jeff has bought a microwave for $(\d+)")
+
+	public void Jeff_has_bought_a_microwave_for(int amount) \{
+
+		// implementation can be plain java
+
+		// or selenium
+
+		driver.findElement(By.name("test")).sendKeys("This is an example\n");
+
+		driver.findElement(By.name("button")).click();// etc
+	}
+}
+
+
+
+

Cucumber uses an annotation (highlighted) to match the step from the feature file with the function implementing the step in the Java class. The name of the class and the function can be as the developer sees fit. Selenium code can be used within the function to automate interaction with the browser.

+
+
+
+
+

== Running scenarios

+
+
+

There are several ways to run scenarios with Cucumber, for example the JUnit runner, a command line runner and several third party runners.

+
+
+
+
+

== Reporting test results

+
+
+

Cucumber can report results in several different formats, using formatter plugins

+
+
+
+
+

Features

+
+ +
+
+
+

== Feature files using Gherkin

+
+
+

Cucumber executes your feature files. As shown in the example below, feature files in Gherkin are easy to read so they can be shared between IT and business. Data tables can be used to execute a scenario with different inputs.

+
+
+
+image42 +
+
+
+
+
+

== Organizing tests

+
+
+

Feature files are placed in a directory structure and together form a feature tree.

+
+
+

Tags can be used to group features based on all kinds of categories. Cucumber can include or exclude tests with certain tags when running the tests.

+
+
+
+
+

Reporting test results

+
+
+

Cucumber can report results in several formats, using formatter plugins. +Not supported option by Shared Services: The output from Cucumber can be used to present test results in Jenkins or Hudson depending of the preference of the project.

+
+
+
+image43 +
+
+
+
+
+

HOW IS Cucumber / Selenium USED AT Capgemini?

+
+ +
+
+
+

Tool deployment

+
+
+

Cucumber and Selenium are chosen as one of Capgemini’s test automation industrial tools. We support the Java implementation of Cucumber and Selenium Webdriver. We can help with creating Cucumber, Selenium projects in Eclipse and IntelliJ.

+
+
+
+
+

Application in ATaaS (Automated Testing as a Service)

+
+
+

In the context of industrialisation, Capgemini has developed a range of services to assist and support the projects in process and tools implementation.

+
+
+

In this context a team of experts assists projects using test automation.

+
+
+

The main services provided by the center of expertise are:

+
+
+
    +
  • +

    Advise on the feasibility of automation.

    +
  • +
  • +

    Support with installation.

    +
  • +
  • +

    Coaching teams in the use of BDD.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Data-driven-approach.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Data-driven-approach.html new file mode 100644 index 00000000..eeb409ce --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Data-driven-approach.html @@ -0,0 +1,550 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Data driven approach

+
+
+

Data driven approach - External data driven

+
+
+

External data driven - Data as external file injected in test case

+
+
+

Test case - Categorize functionality and severity

+
+
+

You can find more information about data driven here and here

+
+
+

There are a few ways to define parameters for tests.

+
+
+
+
+

Internal Data driven approach

+
+
+

Data as part of test case

+
+
+

The different means to pass in parameters are shown below.

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

Static methods are used to provide the parameters.

+
+
+
+
+

A method in the test class:

+
+
+
+
@ParameterizedTest
+@MethodSource("argumentsStream")
+
+
+
+

OR

+
+
+
+
@ParameterizedTest
+@MethodSource("arrayStream")
+
+
+
+

In the first case the arguments are directly mapped to the test method parameters. In the second case the array is passed as the argument.

+
+
+
+image30 new +
+
+
+
+
+

A method in a different class:

+
+
+
+
@ParameterizedTest
+@MethodSource("com.capgemini.mrchecker.core.datadriven.MyContainsTestProvider#provideContainsTrueParameters")
+
+
+
+
+image32 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

Parameters that are passed into tests using the @Parameters annotation must be _Object[]_s

+
+
+
+
+

In the annotation:

+
+
+
+
@Parameters({"1, 2, 3", "3, 4, 7", "5, 6, 11", "7, 8, 15"})
+
+
+
+
+image30 +
+
+
+

The parameters must be primitive objects such as integers, strings, or booleans. Each set of parameters is contained within a single string and will be parsed to their correct values as defined by the test method’s signature.

+
+
+
+
+

In a method named in the annotation:

+
+
+
+
@Parameters(method = "addParameters")
+
+
+
+
+image31 +
+
+
+

A separate method can be defined and referred to for parameters. This method must return an Object[] and can contain normal objects.

+
+
+
+
+

In a class:

+
+
+
+
@Parameters(source = MyContainsTestProvider.class)
+
+
+
+
+image32 +
+
+
+

A separate class can be used to define parameters for the test. This test must contain at least one static method that returns an Object[], and its name must be prefixed with provide. The class could also contain multiple methods that provide parameters to the test, as long as they also meet the required criteria.

+
+
+
+
+

External Data Driven

+
+
+

Data as external file injected in test case

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

Tests use the annotation @CsvFileSource to inject CSVs file.

+
+
+
+
@CsvFileSource(resources = "/datadriven/test.csv", numLinesToSkip = 1)
+
+
+
+

A CSV can also be used to contain the parameters for the tests. It is pretty simple to set up, as it’s just a comma-separated list.

+
+
+
+
+

Classic CSV

+
+
+
+image33 new +
+
+
+

and CSV file structure

+
+
+
+image34 +
+
+
+
+
+

CSV with headers

+
+
+
+image35 new +
+
+
+

and CSV file structure

+
+
+
+image36 +
+
+
+
+
+

CSV with specific column mapper

+
+
+
+image37 new +
+
+
+

and Mapper implementation

+
+
+
+image38 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

Tests use the annotation @FileParameters to inject CSVs file.

+
+
+
+
@FileParameters("src/test/resources/datadriven/test.csv")
+
+
+
+

A CSV can also be used to contain the parameters for the tests. It is pretty simple to set up, as it’s just a comma-separated list.

+
+
+
+
+

Classic CSV

+
+
+
+image33 +
+
+
+

and CSV file structure

+
+
+
+image34 +
+
+
+
+
+

CSV with headers

+
+
+
+image35 +
+
+
+

and CSV file structure

+
+
+
+image36 +
+
+
+
+
+

CSV with specific column mapper

+
+
+
+image37 +
+
+
+

and Mapper implementation

+
+
+
+image38 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Different-Environments.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Different-Environments.html new file mode 100644 index 00000000..6a6d5046 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Different-Environments.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

System under test environments

+
+
+
+image080 +
+
+
+
    +
  • +

    Quality assurance or QA is a way of preventing mistakes or defects in the manufactured products and avoiding problems when delivering solutions or services to customers which ISO 9000 defines as "part of quality management focused on providing confidence that quality requirements will be fulfilled".

    +
  • +
  • +

    System integration testing or SIT is a high-level software testing process in which testers verify that all related systems maintain data integrity and can operate in coordination with other systems in the same environment. The testing process ensures that all sub-components are integrated successfully to provide expected results.

    +
  • +
  • +

    Development or Dev testing is performed by the software developer or engineer during the construction phase of the software development life-cycle. Rather than replace traditional QA focuses, it augments it. Development testing aims to eliminate construction errors before code is promoted to QA; this strategy is intended to increase the quality of the resulting software as well as the efficiency of the overall development and QA process.

    +
  • +
  • +

    Prod If the customer accepts the product, it is deployed to a production environment, making it available to all users of the system.

    +
  • +
+
+
+
+image051 +
+
+
+
+
+

How to use system environment

+
+
+

In Page classes, when you load / start web, it is uncommon to save fixed main url.

+
+
+

Value flexibility is a must, when your web application under test has different main url, depending on the environmnent (DEV, QA, SIT, …​, PROD)

+
+
+

Instead of hard coded main url variable, you build your Page classes with dynamic variable.

+
+
+

An example of dynamic variable GetEnvironmentParam.WWW_FONT_URL

+
+
+
+image081 +
+
+
+
+
+

How to create / update system environment

+
+ +
+
+
+

External file with variable values

+
+
+

Dynamic variable values are stored under mrchecker-app-under-test\src\resources\enviroments\environments.csv.

+
+
+

NOTE: As environments.csv is a comma-separated file, please be careful while editing and then save it under Excel.

+
+
+
+image082 +
+
+
+
+
+

Encrypting sensitive data

+
+
+

Some types of data you might want to store as environment settings are sensitive in nature (e.g. passwords). You might not want to store them (at least not in their plaintext form) in your repository. To be able to encrypt sensitive data you need to do following:

+
+
+
    +
  1. +

    Create a secret (long, random chain of characters) and store it under mrchecker-app-under-test\src\resources\secretData.txt. Example: LhwbTm9V3FUbBO5Tt5PiTUEQrXGgWrDLCMthnzLKNy1zA5FVTFiTdHRQAyPRIGXmsAjPUPlJSoSLeSBM

    +
  2. +
  3. +

    Exclude the file from being checked into the git repository by adding it to git.ignore. You will need to pass the file over a different channel among your teammates.

    +
  4. +
  5. +

    Encrypt the values before putting them into the environments.csv file by creating following script (put the script where your jasypt library resides, e.g. C:\MrChecker_Test_Framework\m2\repository\org\jasypt\jasypt\1.9.2):

    +
  6. +
+
+
+
+
@ECHO OFF
+
+set SCRIPT_NAME=encrypt.bat
+set EXECUTABLE_CLASS=org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI
+set EXEC_CLASSPATH=jasypt-1.9.2.jar
+if "%JASYPT_CLASSPATH%" ==  "" goto computeclasspath
+set EXEC_CLASSPATH=%EXEC_CLASSPATH%;%JASYPT_CLASSPATH%
+
+:computeclasspath
+IF "%OS%" ==  "Windows_NT" setlocal ENABLEDELAYEDEXPANSION
+FOR %%c in (%~dp0..\lib\*.jar) DO set EXEC_CLASSPATH=!EXEC_CLASSPATH!;%%c
+IF "%OS%" ==  "Windows_NT" setlocal DISABLEDELAYEDEXPANSION
+
+set JAVA_EXECUTABLE=java
+if "%JAVA_HOME%" ==  "" goto execute
+set JAVA_EXECUTABLE="%JAVA_HOME%\bin\java"
+
+:execute
+%JAVA_EXECUTABLE% -classpath %EXEC_CLASSPATH% %EXECUTABLE_CLASS% %SCRIPT_NAME% %*
+
+
+
+
    +
  1. +

    Encrypt the values by calling

    +
  2. +
+
+
+
+
.\encrypt.bat input=someinput password=secret
+
+----ENVIRONMENT-----------------
+
+Runtime: Oracle Corporation Java HotSpot(TM) 64-Bit Server VM 25.111-b14
+
+
+
+----ARGUMENTS-------------------
+
+input: someinput
+password: secret
+
+
+
+----OUTPUT----------------------
+
+JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD
+
+
+
+
    +
  1. +

    Mark the value as encrypted by adding a prefix 'ENC(' and suffix ')' like: ENC(JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD)

    +
  2. +
+
+
+
+image083 +
+
+
+
+
+

Bridge between external file nad Page class

+
+
+

To map values from external file with Page class you ought to use class GetEnvironmentParam

+
+
+

Therefore when you add new variable (row) in environments.csv you might need to add this variable to GetEnvironmentParam.

+
+
+
+image084 +
+
+
+
+
+

Run test case with system environment

+
+
+

To run test case with system environment, please use: +* -Denv=\<NameOfEnvironment\> +* \<NameOfEnvironment\> is taken as column name from file mrchecker-app-under-test\src\test\resources\enviroments\environments.csv

+
+
+

Since mrchecker-core-module version 5.6.2.1 +== Command Line

+
+
+
+
mvn test site -Dgroups=RegistryPageTestTag -Denv=DEV
+
+
+
+
+
+

Eclipse

+
+
+
+image085 +
+
+
+
+image086 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1 +== Command Line

+
+
+
+
mvn test site -Dtest=RegistryPageTest -Denv=DEV
+
+
+
+
+
+

Eclipse

+
+
+
+image085 +
+
+
+
+image086 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Framework-Test-Class.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Framework-Test-Class.html new file mode 100644 index 00000000..56f9699b --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Framework-Test-Class.html @@ -0,0 +1,416 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Test Class

+
+ +
+
+
+

Overview

+
+
+

The following image gives a general overview of a test class "lifecycle".

+
+
+
+image52 +
+
+
+

More information on the methods and annotations used in this image can be found in the following chapter.

+
+
+
+
+

Methods and annotations

+
+
+

The actual tests that will be executed are located in the so-called Test Classes. Starting a new project, a new package should be created.

+
+
+

Source folder: mrchecker-app-under-test/src/test/java

+
+
+

Name: com.example.selenium.tests.tests.YOUR_PROJECT

+
+
+

Test classes have to extend the BaseTest class.

+
+
+
+
public class DemoTest extends BaseTest {
+
+	@Override
+	public void setUp() {
+
+	}
+
+	@Override
+	public void tearDown() {
+
+	}
+}
+
+
+
+
+
+

BasePage method: setUp

+
+
+

This method will be executed before the test. It allows objects to be instantiated, e.g. Page objects.

+
+
+
+
@Override
+public void setUp() {
+	someTestPage = new SomeTestPage();
+}
+
+
+
+
+
+

BasePage method: tearDown

+
+
+

The tearDown methods executes after the test. It allows the clean up of the testing environment.

+
+
+
+
+

Annotations

+
+
+

The @Test annotation indicates that the following method is a test method.

+
+
+

Additionally, there are two annotations that can help preparing and disassembling the test class: @BeforeClass and @AfterClass.

+
+
+

@BeforeClass will execute the following method once at the beginning, before running any test method. Compared to the setUp() method provided by the BaseTest class, this annotation will only run once, instead of before every single test method. The advantage here: things like login can be set up in @BeforeClass, as they can often be very time consuming. Logging in on a webapplication once and afterwards running all the test methods is more efficient than logging in before every test method, especially if they are being executed on the same page.

+
+
+

@AfterClass will execute after the last test method. Just like @BeforeClass this method will only run once, in contrary to the tearDown() method.

+
+
+

Initialize a new test method by using the @Test annotation.

+
+
+
+
@Test
+public void willResultBeShown() {
+
+}
+
+
+
+

This method will interact with a page object in order to test it.

+
+
+
+
+

Sample Setup

+
+
+
+
@BeforeClass
+public static void setUpBeforeClass() throws Exception {
+	BFLogger.logInfo("[Step1] Login as Account Administrator");
+}
+
+@AfterClass
+public static void tearDownAfterClass() throws Exception {
+	BFLogger.logInfo("[Step4] Logout");
+}
+
+@Override
+public void setUp() {
+	BFLogger.logInfo("Open home page before each test");
+}
+
+@Override
+public void tearDown() {
+	BFLogger.logInfo("Clean all data updated while executing each test");
+}
+
+@Test
+public void test1() {
+	BFLogger.logInfo("[Step2] Filter by \"Creation Date\" - Descending");
+	BFLogger.logInfo("[Step3] Set $1 for first 10 Users in column \"Invoice to pay\"");
+
+}
+
+@Test
+public void test2() {
+	BFLogger.logInfo("[Step2] Filter by \"Invoice to pay\" - Ascending");
+	BFLogger.logInfo("[Step3] Set $100 for first 10 Users in column \"Invoice to pay\"");
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Test-case-parallel-execution.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Test-case-parallel-execution.html new file mode 100644 index 00000000..06f5eb52 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Test-case-parallel-execution.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

What is "Parallel test execution" ?

+
+
+

Parallel test execution means many "Test Classes" can run simultaneously.

+
+
+

"Test Class", as this is a Junit Test class, it can have one or more test cases - "Test case methods"

+
+
+
+image39 +
+
+
+
+
+

How many parallel test classes can run simultaneously?

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

JUnit5 supports parallelism natively. The feature is configured using a property file located at src\test\resources\junit-platform.properties. +As per default configuration, concurrent test execution is set to run test classes in parallel using the thread count equal to a number of your CPUs.

+
+
+
+image39a +
+
+
+

Visit JUnit5 site to learn more about parallel test execution.

+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

By default, number of parallel test classes is set to 8.

+
+
+

It can be updated as you please, on demand, by command line:

+
+
+
+
mvn test site -Dtest=TS_Tag1 -Dthread.count=16
+
+
+
+

-Dthread.count=16 - increase number of parallel Test Class execution to 16.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-test-groups-tags.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-test-groups-tags.html new file mode 100644 index 00000000..bca89b96 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-test-groups-tags.html @@ -0,0 +1,602 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Why join Test Cases in groups - Test Suites

+
+
+
+image22 +
+
+
+
+
+

Regresion Suite:

+
+
+

Regression testing is a type of software testing which verifies that software which was previously developed and tested still performs the same way after it was changed or interfaced with another software.

+
+
+
    +
  • +

    Smoke

    +
  • +
  • +

    Business vital functionalities

    +
  • +
  • +

    Full scope of test cases

    +
  • +
+
+
+
+
+

Functional Suite:

+
+
+
    +
  • +

    Smoke

    +
  • +
  • +

    Business function A

    +
  • +
  • +

    Business function B

    +
  • +
+
+
+
+
+

Single Responsibility Unit:

+
+
+
    +
  • +

    Single page

    +
  • +
  • +

    Specific test case

    +
  • +
+
+
+
+
+

How to build a Test Suite based on tags

+
+ +
+
+
+

Structure of the Test Suite

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+image23 new +
+
+
+

Where:

+
+
+
    +
  • +

    @RunWith(JUnitPlatform.class) - use Junit5 runner

    +
  • +
  • +

    @IncludeTags({"TestsTag1"}) - search all test files with the tag "TestsTag1"

    +
  • +
  • +

    @ExcludeTags({"TagToExclude"}) - exclude test files with the tag "TagToExclude"

    +
  • +
  • +

    @SelectPackages("com.capgemini.mrchecker.core.groupTestCases.testCases") - search only test files in "com.capgemini.mrchecker.core.groupTestCases.testCases" package

    +
  • +
  • +

    public class TS_Tag1 - the name of the Test Suite is "TS_Tag1"

    +
  • +
+
+
+

Most commonly used filters to build a Test Suite are ones using:

+
+
+
    +
  • +

    @IncludeTags({ })

    +
  • +
  • +

    @ExcludeTags({ })

    +
  • +
+
+
+

Example:

+
+
+
    +
  1. +

    @IncludeTags({ "TestsTag1" }) , @ExcludeTags({ }) → will execute all test cases with the tag TestsTag1

    +
  2. +
  3. +

    @IncludeTags({ "TestsTag1" }) , @ExcludeTags({ "SlowTest" }) → will execute all test cases with tag "TestsTag1" although it will exclude from this list the test cases with the tag "SlowTest"

    +
  4. +
  5. +

    @IncludeTags({ }) , @ExcludeTags({ "SlowTest" }) → It will exclude test cases with the tag "SlowTest"

    +
  6. +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+image23 +
+
+
+

Where:

+
+
+
    +
  • +

    @RunWith(WildcardPatternSuiteBF.class) - search for test files under /src/test/java

    +
  • +
  • +

    @IncludeCategories({ TestsTag1.class }) - search for all test files with the tag "TestsTag1.class"

    +
  • +
  • +

    @ExcludeCategories({ }) - exclude test files. In this example, there is no exclusion

    +
  • +
  • +

    @SuiteClasses({ "**/*Test.class" }) - search only test files, where the file name ends with "<anyChar/s>Test.class"

    +
  • +
  • +

    public class TS_Tag1 - the name of the Test Suite is "TS_Tag1"

    +
  • +
+
+
+

Most commonly used filters to build Test Suite are ones using:

+
+
+
    +
  • +

    @IncludeCategories({ })

    +
  • +
  • +

    @ExcludeCategories({ })

    +
  • +
+
+
+

Example:

+
+
+
    +
  1. +

    @IncludeCategories({ TestsTag1.class }) , @ExcludeCategories({ }) → will execute all test cases with the tag TestsTag1.class

    +
  2. +
  3. +

    @IncludeCategories({ TestsTag1.class }) , @ExcludeCategories({ SlowTest.class }) → will execute all test cases with the tag "TestsTag1.class" although it will exclude from this list the test cases with the tag "SlowTest.class"

    +
  4. +
  5. +

    @IncludeCategories({ }) , @ExcludeCategories({ SlowTest.class }) → will execute all test cases from /src/test/java, although it will exclude from this list the test cases with the tag "SlowTest.class"

    +
  6. +
+
+
+
+
+

Structure of Test Case

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+image24 new +
+
+
+

Where:

+
+
+
    +
  • +

    @TestsTag1, @TestsSmoke, @TestsSelenium - list of tags assigned to this test case - "TestsTag1, TestsSmoke, TestSelenium" annotations

    +
  • +
  • +

    public class FristTest_tag1_Test - the name of the test case is "FristTest_tag1_Test"

    +
  • +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+image24 +
+
+
+

Where:

+
+
+
    +
  • +

    @Category({ TestsTag1.class, TestsSmoke.class, TestSelenium.class }) - list of tags / categories assigned to this test case - "TestsTag1.class, TestsSmoke.class, TestSelenium.class"

    +
  • +
  • +

    public class FristTest_tag1_Test - the name of the test case is "FristTest_tag1_Test"

    +
  • +
+
+
+
+
+

Structure of Tags / Categories

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+

Tag name: TestsTag1 annotation

+
+
+
+image25 new +
+
+
+

Tag name: TestsSmoke annotation

+
+
+
+image26 new +
+
+
+

Tag name: TestSelenium annotation

+
+
+
+image27 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+

Tag name: TestsTag1.class

+
+
+
+image25 +
+
+
+

Tag name: TestsSmoke.class

+
+
+
+image26 +
+
+
+

Tag name: TestSelenium.class

+
+
+
+image27 +
+
+
+
+
+

How to run Test Suite

+
+
+

To run a Test Suite you perform the same steps as you do to run a test case

+
+
+

Command line

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+

JUnit5 disallows running suite classes from maven. Use -Dgroups=Tag1,Tag2 and -DexcludeGroups=Tag4,Tag5 to create test suites in maven.

+
+
+
+
mvn test site -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test site -Dtest=TS_Tag1
+
+
+
+

Eclipse

+
+
+
+image28 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module.html new file mode 100644 index 00000000..441e0769 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module.html @@ -0,0 +1,331 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Core Test Module

+
+ +
+
+
+

What is Core Test Module

+
+
+
+image1 new +
+
+
+
+ +
+

How to start?

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Database-Test-Module.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Database-Test-Module.html new file mode 100644 index 00000000..e7318ef8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Database-Test-Module.html @@ -0,0 +1,311 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Database Test Module

+
+ +
+
+
+

What is MrChecker Database Test Module

+
+
+

Database module is based on Object-Relational Mapping programming technique. All functionalities are built using Java Persistence API but examples use Hibernate as a main provider.

+
+
+
+
+

JPA structure schema

+
+
+

This module was written to allow the use of any JPA provider. The structure is represented in the schema below.

+
+
+
+image3 +
+
+
+
+
+

ORM representation applied in Framework

+
+
+
+image4 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Delivery-CD.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Delivery-CD.html new file mode 100644 index 00000000..a57e8330 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Delivery-CD.html @@ -0,0 +1,323 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Continuous Delivery

+
+
+

Include quality with Continuous Delivery during product release.

+
+
+
+image87 +
+
+
+
+
+

Overview

+
+
+

CD from Jenkins point of view does not change a lot from Continuous Integration one.

+
+
+
+
+

Jenkins Overview

+
+
+

Use the same Jenkins settings for Jenkins CD setup as for CI, please. link. The only difference is:

+
+
+
    +
  • +

    What type of test you will execute. Before, we have been choosing test case(s), now we will choose test suite(s)

    +
  • +
  • +

    Who will trigger the given Smoke/Integration/Performance job

    +
  • +
  • +

    What is the name of official branch. This branch ought always to use be used in every CD execution. It will be either master or develop.

    +
  • +
+
+
+
+
+

Jenkins for Smoke Tests

+
+
+

In the $TESTNAME variable, where we input the test name( link ), please input the name of a test suite assembled together of tests tagged as smoke tests -( link ) thus running all the smoke tests.

+
+
+
+
+

Jenkins for Performance Tests

+
+
+

Under construction - added when WebAPI module is included.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Integration-CI.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Integration-CI.html new file mode 100644 index 00000000..eca3837f --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Integration-CI.html @@ -0,0 +1,414 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Continuous Integration

+
+
+

Embrace quality with Continuous Integration while you produce test case(s).

+
+
+
+
+

Overview

+
+
+

There are two ways to set up your Continuous Integration environment:

+
+
+
    +
  1. +

    Create a Jenkins instance from scratch (e.g. by using the Jenkins Docker image)

    +
    +

    Using a clean Jenkins instance requires the installation of additional plugins. The plugins required and their versions can be found on this page.

    +
    +
  2. +
  3. +

    Use thre pre-configured custom Docker image provided by us

    +
    +

    No more additional configuration is required (but optional) using this custom Docker image. Additionally, this Jenkins setup allows dynamical scaling across multiple machines and even cloud (AWS, Azure, Google Cloud etc.).

    +
    +
  4. +
+
+
+
+
+

Jenkins Overview

+
+
+

Jenkins is an Open Source Continuous Integration Tool. It allows the user to create automated build jobs which will run remotely on so called Jenkins Slaves. A build job can be triggered by several events, for example on new pull request on specified repositories or timed (e.g. at midnight).

+
+
+
+
+

Jenking Configuration

+
+
+

Tests created by using the testing framework can easily be implemented on a Jenkins instance. The following chapter will describe such a job configuration. If you’re running your own Jenkins instance, you may have to install additional plugins listed on the page Jenkins Plugins for a trouble-free integration of your tests.

+
+
+
+
+

== Initial Configuration

+
+
+

The test job is configured as a so-called parameterized job. This means, after starting the job, parameters can be specified, which will then be used in the build process. In this case, branch and testname will be expected when starting the job. These parameters specify which branch in the code repository should be checked out (possibly feature branch) and the name of the test that should be executed.

+
+
+
+image79 +
+
+
+
+
+

== Build Process Configuration

+
+
+
    +
  • +

    The first step inside the build process configuration is to get the author of the commit that was made. The mail will be extracted and gets stored in a file called build.properties. This way, the author can be notified if the build fails.

    +
    +
    +image80 +
    +
    +
  • +
  • +

    Next up, Maven will be used to check if the code can be compiled, without running any tests.

    +
    +
    +image81 +
    +
    +
    +

    After making sure that the code can be compiled, the actual tests will be executed.

    +
    +
    +
    +image82 +
    +
    +
  • +
  • +

    Finally, reports will be generated.

    +
    +
    +image83 +
    +
    +
  • +
+
+
+
+
+

== Post Build Configuration

+
+
+
    +
  • +

    At first, the results will be imported to the Allure System

    +
    +
    +image84 +
    +
    +
  • +
  • +

    JUnit test results will be reported as well. Using this step, the test result trend graph will be displayed on the Jenkins job overview.

    +
    +
    +image85 +
    +
    +
  • +
  • +

    Finally, an E-Mail will be sent to the previously extracted author of the commit.

    +
    +
    +image86 +
    +
    +
  • +
+
+
+
+
+

Using the Pre-Configured Custom Docker Image

+
+
+

If you are starting a new Jenkins instance for your tests, we’d suggest using the pre-configured Docker image. This image already contains all the configurations and additional features.

+
+
+

The configurations are e.g. Plugins and Pre-Installed job setup samples. This way, you don’t have to set up the entire CI-Environment from the ground up.

+
+
+

Additional features from this docker image allow dynamic creation and deletion of Jenkins slaves, by creating Docker containers. Also, Cloud Solutions can be implemented to allow wide-spread load balancing.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Docker-commands.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Docker-commands.html new file mode 100644 index 00000000..8f8e67e5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Docker-commands.html @@ -0,0 +1,546 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

What is Docker

+
+
+

Docker is an open source software platform to create, deploy and manage virtualized application containers on a common operating system (OS), with an ecosystem of allied tools.

+
+
+
+
+

Where do we use Docker

+
+
+

DevOps module consists of Docker images

+
+
+
    +
  1. +

    Jenkins image

    +
  2. +
  3. +

    Jenkins job image

    +
  4. +
  5. +

    Jenkins management image

    +
  6. +
  7. +

    Security image

    +
  8. +
+
+
+

in addition, each new node is also based on Docker

+
+
+
+
+

Exploring basic Docker options

+
+
+

Let’s show some of the most important commands that are needed when working with our DevOps module based on the Docker platform. Each command given below should be preceded by a sudo call by default. If you don’t want to use sudo command create a Unix group called docker and add a user to it.

+
+
+
+
$ sudo groupadd docker
+$ sudo usermod -aG docker $USER
+
+
+
+
+
+

Build an image from a Dockerfile

+
+
+
+
##docker build [OPTIONS] PATH | URL | -
+##
+##Options:
+## --tag , -t : Name and optionally a tag in the ‘name:tag’ format
+
+$ docker build -t vc_jenkins_jobs .
+
+
+
+
+
+

Container start

+
+
+
+
##docker run [OPTIONS] IMAGE[:TAG|@DIGEST] [COMMAND] [ARG...]
+#
+##Options:
+##-d : To start a container in detached mode (background)
+##-it : interactive terminal
+##--name : assign a container name
+##--rm : clean up
+##--volumes-from="": Mount all volumes from the given container(s)
+##-p : explicitly map a single port or range of ports
+##--volume : storage associated with the image
+
+$ docker run -d --name vc_jenkins_jobs vc_jenkins_jobs
+
+
+
+
+
+

Remove one or more containers

+
+
+
+
##docker rm [OPTIONS] CONTAINER
+#
+##Options:
+##--force , -f : Force the removal of a running container
+
+$ docker rm -f jenkins
+
+
+
+
+
+

List containers

+
+
+
+
##docker ps [OPTIONS]
+##--all, -a : Show all containers (default shows just running)
+
+$ docker ps
+
+
+
+
+
+

Pull an image or a repository from a registry

+
+
+
+
##docker pull [OPTIONS] NAME[:TAG|@DIGEST]
+
+$ docker pull jenkins/jenkins:2.73.1
+
+
+
+
+
+

Push the image or a repository to a registry

+
+
+

Pushing new image takes place in two steps. First save the image by adding container ID to the commit command and next use push:

+
+
+
+
##docker push [OPTIONS] NAME[:TAG]
+
+$ docker ps
+  # copy container ID from the result
+$ docker commit b46778v943fh vc_jenkins_mng:project_x
+$ docker push vc_jenkins_mng:project_x
+
+
+
+
+
+

Return information on Docker object

+
+
+
+
##docker inspect [OPTIONS] NAME|ID [NAME|ID...]
+#
+##Options:
+##--format , -f : output format
+
+$ docker inspect -f '{{ .Mounts }}' vc_jenkins_mng
+
+
+
+
+
+

List images

+
+
+
+
##docker images [OPTIONS] [REPOSITORY[:TAG]]
+#
+##Options:
+--all , -a : show all images with intermediate images
+
+$ docker images
+$ docker images jenkins
+
+
+
+
+
+

Remove one or more images

+
+
+
+
##docker rmi [OPTIONS] IMAGE [IMAGE...]
+#
+##Options:
+##  --force , -f : Force removal of the image
+
+$ docker rmi jenkins/jenkins:latest
+
+
+
+
+
+

Run a command in a running container

+
+
+
+
##docker exec [OPTIONS] CONTAINER COMMAND [ARG...]
+##-d : run command in the background
+##-it : interactive terminal
+##-w : working directory inside the container
+##-e : Set environment variables
+
+$ docker exec vc_jenkins_jobs sh -c "chmod 755 config.xml"
+
+
+
+
+
+

Advanced commands

+
+ +
+
+
+

Remove dangling images

+
+
+
+
$ docker rmi $(docker images -f dangling=true -q)
+
+
+
+
+
+

Remove all images

+
+
+
+
$ docker rmi $(docker images -a -q)
+
+
+
+
+
+

Removing images according to a pattern

+
+
+
+
$ docker images | grep "pattern" | awk '{print $2}' | xargs docker rm
+
+
+
+
+
+

Remove all exited containers

+
+
+
+
$ docker rm $(docker ps -a -f status=exited -q)
+
+
+
+
+
+

Remove all stopped containers

+
+
+
+
$ docker rm $(docker ps --no-trunc -aq)
+
+
+
+
+
+

Remove containers according to a pattern

+
+
+
+
$ docker ps -a | grep "pattern" | awk '{print $1}' | xargs docker rmi
+
+
+
+
+
+

Remove dangling volumes

+
+
+
+
$ docker volume rm $(docker volume ls -f dangling=true -q)
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Jenkins-Plugins.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Jenkins-Plugins.html new file mode 100644 index 00000000..730e28cb --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Jenkins-Plugins.html @@ -0,0 +1,712 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

List of Jenkins Plugins

+
+
+

|== =

+
+
+

|Plugin Name +|Version

+
+
+

|blueocean-github-pipeline +|1.1.4

+
+
+

|blueocean-display-url +|2.0

+
+
+

|blueocean +|1.1.4

+
+
+

|workflow-support +|2.14

+
+
+

|workflow-api +|2.18

+
+
+

|plain-credentials +|1.4

+
+
+

|pipeline-stage-tags-metadata +|1.1.8

+
+
+

|credentials-binding +|1.12

+
+
+

|git +|3.5.1

+
+
+

|maven-plugin +|2.17

+
+
+

|workflow-durable-task-step +|2.12

+
+
+

|job-dsl +|1.64

+
+
+

|git-server +|1.7

+
+
+

|windows-slaves +|1.3.1

+
+
+

|github +|1.27.0

+
+
+

|blueocean-personalization +|1.1.4

+
+
+

|jackson2-api +|2.7.3

+
+
+

|momentjs +|1.1.1

+
+
+

|workflow-basic-steps +|2.6

+
+
+

|workflow-aggregator +|2.5

+
+
+

|blueocean-rest +|1.1.4

+
+
+

|gradle +|1.27.1

+
+
+

|pipeline-maven +|3.0.0

+
+
+

|blueocean-pipeline-editor +|0.2.0

+
+
+

|durable-task +|1.14

+
+
+

|scm-api +|2.2.2

+
+
+

|pipeline-model-api +|1.1.8

+
+
+

|config-file-provider +|2.16.3

+
+
+

|github-api +|1.85.1

+
+
+

|pam-auth +|1.3

+
+
+

|workflow-cps-global-lib +|2.8

+
+
+

|github-organization-folder +|1.6

+
+
+

|workflow-job +|2.12.1

+
+
+

|variant +|1.1

+
+
+

|git-client +|2.5.0

+
+
+

|sse-gateway +|1.15

+
+
+

|script-security +|1.29.1

+
+
+

|token-macro +|2.1

+
+
+

|jquery-detached +|1.2.1

+
+
+

|blueocean-web +|1.1.4

+
+
+

|timestamper +|1.8.8

+
+
+

|greenballs +|1.15

+
+
+

|handlebars +|1.1.1

+
+
+

|blueocean-jwt +|1.1.4

+
+
+

|pipeline-stage-view +|2.8

+
+
+

|blueocean-i18n +|1.1.4

+
+
+

|blueocean-git-pipeline +|1.1.4

+
+
+

|ace-editor +|1.1

+
+
+

|pipeline-stage-step +|2.2

+
+
+

|email-ext +|2.58

+
+
+

|envinject-api +|1.2

+
+
+

|role-strategy +|2.5.1

+
+
+

|structs +|1.9

+
+
+

|locale +|1.2

+
+
+

|docker-workflow +|1.13

+
+
+

|ssh-credentials +|1.13

+
+
+

|blueocean-pipeline-scm-api +|1.1.4

+
+
+

|metrics +|3.1.2.10

+
+
+

|external-monitor-job +|1.7

+
+
+

|junit +|1.21

+
+
+

|github-branch-source +|2.0.6

+
+
+

|blueocean-config +|1.1.4

+
+
+

|cucumber-reports +|3.8.0

+
+
+

|pipeline-model-declarative-agent +|1.1.1

+
+
+

|blueocean-dashboard +|1.1.4

+
+
+

|subversion +|2.9

+
+
+

|blueocean-autofavorite +|1.0.0

+
+
+

|pipeline-rest-api +|2.8

+
+
+

|pipeline-input-step +|2.7

+
+
+

|matrix-project +|1.11

+
+
+

|pipeline-github-lib +|1.0

+
+
+

|workflow-multibranch +|2.16

+
+
+

|docker-plugin +|0.16.2

+
+
+

|resource-disposer +|0.6

+
+
+

|icon-shim +|2.0.3

+
+
+

|workflow-step-api +|2.12

+
+
+

|blueocean-events +|1.1.4

+
+
+

|workflow-scm-step +|2.6

+
+
+

|display-url-api +|2.0

+
+
+

|favorite +|2.3.0

+
+
+

|build-timeout +|1.18

+
+
+

|mapdb-api +|1.0.9.0

+
+
+

|pipeline-build-step +|2.5.1

+
+
+

|antisamy-markup-formatter +|1.5

+
+
+

|javadoc +|1.4

+
+
+

|blueocean-commons +|1.1.4

+
+
+

|cloudbees-folder +|6.1.2

+
+
+

|ssh-slaves +|1.20

+
+
+

|pubsub-light +|1.10

+
+
+

|pipeline-graph-analysis +|1.4

+
+
+

|allure-jenkins-plugin +|2.23

+
+
+

|mailer +|1.20

+
+
+

|ws-cleanup +|0.33

+
+
+

|authentication-tokens +|1.3

+
+
+

|blueocean-pipeline-api-impl +|1.1.4

+
+
+

|ldap +|1.16

+
+
+

|docker-commons +|1.8

+
+
+

|branch-api +|2.0.10

+
+
+

|workflow-cps +|2.36.1

+
+
+

|pipeline-model-definition +|1.1.8

+
+
+

|blueocean-rest-impl +|1.1.4

+
+
+

|ant +|1.7

+
+
+

|credentials +|2.1.14

+
+
+

|matrix-auth +|1.7

+
+
+

|pipeline-model-extensions +|1.1.8

+
+
+

|pipeline-milestone-step +|1.3.1

+
+
+

|jclouds-jenkins +|2.14

+
+
+

|bouncycastle-api +|2.16.1

+
+
+

|== =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Pipeline-structure.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Pipeline-structure.html new file mode 100644 index 00000000..68ec3069 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Pipeline-structure.html @@ -0,0 +1,423 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Pipeline structure

+
+ +
+
+
+

Pipeline configuration:

+
+
+

The default interaction with Jenkins required manual jobs. This keeps configuration of a job in Jenkins separate from source code. With Pipeline plugin users can implement a pipeline procedure in Jenkinsfile and store it in repository with other code. This approach is used in Mr Checker framework. More info: https://jenkins.io/solutions/pipeline/

+
+
+

Our CI & CD processes are divided into a few separate files: Jenkins_node.groovy is the file to manage all processes. It defines all operations executed on a Jenkins node, so all code in this file is closed in node closure. Workflow in Jenkinsfile:

+
+
+
    +
  • +

    Read all parameters from a Jenkins job

    +
  • +
  • +

    Execute stage to prepare the environment

    +
  • +
  • +

    Execute git pull command

    +
  • +
  • +

    Set Jenkins job description

    +
  • +
  • +

    Execute compilation of the project in a special prepared docker container

    +
  • +
  • +

    Execute unit tests

    +
  • +
  • +

    Execute integration tests

    +
  • +
  • +

    Deploy artifacts to a local repository

    +
  • +
  • +

    Deploy artifacts to an external repository (nexus/arifactory)

    +
  • +
+
+
+

Not all the steps must be present in the Jenkins files. This should be configured for particular job requirements.

+
+
+
+
+

Description of stages:

+
+ +
+
+
+

Stage “Prepare environment”

+
+
+

First thing to do in this stage is overwriting properties loaded from Jenkins job. It is defined in “overrideProperties” function. The next function, “setJenkinsJobVariables” defines environment variables such as :

+
+
+
    +
  • +

    JOB_NAME_UPSTREAM

    +
  • +
  • +

    BUILD_DISPLAY_NAME_UPSTREAM

    +
  • +
  • +

    BUILD_URL_UPSTREAM

    +
  • +
  • +

    GIT_CREDENTIALS

    +
  • +
  • +

    JENKINS_CREDENTIALS

    +
  • +
+
+
+

The last function in the stage – “setWorkspace” -creates an environment variable with path to local workspace. This is required beacuse when using pipeline plugin, Jenkins does not create the WORKSPACE env variables.

+
+
+
+
+

Stage "Git pull"

+
+
+

It pulls sources from the repository and loads “git pull” file which contains additional methods:

+
+
+
    +
  • +

    setGitAuthor – setting properties about git author to the file “build.properties” and loading created file

    +
  • +
  • +

    tryMergeWithBranch – checking if actual branch can be merged with default main branch

    +
  • +
+
+
+
+
+

Stage “Build compile”

+
+
+

Verify with maven that code builds without errors

+
+
+
+
+

Stage “Unit test”

+
+
+

Execute unit tests with mvn surefire test and publish reports in junit and allure format

+
+
+
+
+

Stage “Integration test”

+
+
+

Execute integration tests with mvn surefire test and publish reports in junit and allure format

+
+
+
+
+

Stage “Deploy – local repo”

+
+
+

Archive artifacts as a jar file in the local repository

+
+
+
+
+

Stage ”Deploy – nexu repo”

+
+
+

Deploy to the external repository with maven release deploy command with credentials stored in Jenkins machine. Additional files:

+
+
+
    +
  • +

    mailSender.groovy – contains methods for sending mail with generated content

    +
  • +
  • +

    stashNotification.groovy – send job status for bitbucket by a curl command

    +
  • +
  • +

    utils.groovy - contains additional functions to load properties, files and generate additional data

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Selenium-Grid.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Selenium-Grid.html new file mode 100644 index 00000000..14b32256 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Selenium-Grid.html @@ -0,0 +1,395 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Selenium Grid

+
+ +
+
+
+

What is Selenium Grid

+
+
+

Selenium Grid allows running web/mobile browsers test cases to fulfill basic factors, such as:

+
+
+
    +
  • +

    Independent infrastructure, similar to end-users'

    +
  • +
  • +

    Scalable infrastructure (\~50 simultaneous sessions at once)

    +
  • +
  • +

    Huge variety of web browsers (from mobile to desktop)

    +
  • +
  • +

    Continuous Integration and Continuous Delivery process

    +
  • +
  • +

    Supporting multi-type programming languages (java, javascript, python, …​).

    +
  • +
+
+
+
+image88 +
+
+
+

On a daily basis, a test automation engineer uses their local environments for test case execution/development. However, a created browser test case has to be able to run on any infrastructure. Selenium Grid enables this portability.

+
+
+
+
+

Selenium Grid Structure

+
+
+
+image89 +
+
+
+

Full documentation of Selenium Grid can be found here and here.

+
+
+

'Vanilla flavour' Selenium Grid is based on two, not very complicated ingredients:

+
+
+
    +
  1. +

    Selenium Hub - as one machine, accepting connections to grid from test cases executors. It also plays a managerial role in connection to/from Selenium Nodes

    +
  2. +
  3. +

    Selenium Node - from one to many machines, where on each machine a browser used during test case execution is installed.

    +
  4. +
+
+
+
+
+

How to setup

+
+
+

There are two options of Selenium Grid setup:

+
+
+
    +
  • +

    Classic, static solution - link

    +
  • +
  • +

    Cloud, scalable solution - link

    +
  • +
+
+
+

Advantages and disadvantages of both solutions:

+
+
+
+image90 +
+
+
+
+
+

How to use Selenium Grid with E2E Mr Checker Test Frameworks

+
+
+

Run the following command either in Eclipse or in Jenkins:

+
+
+
+
> mvn test -Dtest=com.capgemini.ntc.selenium.tests.samples.resolutions.ResolutionTest -DseleniumGrid="http://10.40.232.61:4444/wd/hub" -Dos=LINUX -Dbrowser=chrome
+
+
+
+

As a result of this command:

+
+
+
    +
  • +

    -Dtest=com.capgemini.ntc.selenium.features.samples.resolutions.ResolutionTest - name of test case to execute

    +
  • +
  • +

    -DseleniumGrid="http://10.40.232.61:4444/wd/hub" - IP address of Selenium Hub

    +
  • +
  • +

    -Dos=LINUX - what operating system must be assumed during test case execution

    +
  • +
  • +

    -Dbrowser=chrome - what type of browser will be used during test case execution

    +
  • +
+
+
+
+image91 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module.html new file mode 100644 index 00000000..aa318201 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module.html @@ -0,0 +1,413 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

DevOPS Test Module

+
+ +
+
+
+

What does DevOps mean for us?

+
+
+

DevOps consists of a mixture of three key components in a technical project:

+
+
+
    +
  • +

    People’s skills and mindset

    +
  • +
  • +

    Processes

    +
  • +
  • +

    Tools

    +
  • +
+
+
+

Using E2E MrChecker Test Framework it is possible to cover the majority of these areas.

+
+
+
+
+

QA Team Goal

+
+
+

For QA engineers, it is essential to take care of the product code quality.

+
+
+

Therefore, we have to understand, that a test case is also code which has to be validated against quality gates. As a result, we must test our developed test case like it is done during standard Software Delivery Life Cycle.

+
+
+
+
+

Well rounded test case production process

+
+
+
    +
  • +

    How do we define top-notch test cases development process in E2E MrChecker Test Framework

    +
  • +
+
+
+
+image5 +
+
+
+
+
+

Continuous Integration (CI) and Continuous Delivery (CD)

+
+
+ +
+
+
+image6 +
+
+
+
+
+

What should you receive from this DevOps module

+
+
+
+image7 +
+
+
+
+
+

What will you gain with our DevOps module

+
+
+

The CI procedure has been divided into transparent modules. This solution makes configuration and maintenance very easy because everyone is able to manage versions and customize the configuration independently for each module. A separate security module ensures the protection of your credentials and assigned access roles regardless of changes in other modules.

+
+
+
+image8 +
+
+
+

Your CI process will be matched to the current project. You can easily go back to the previous configuration, test a new one or move a selected one to other projects.

+
+
+
+image9 +
+
+
+

DevOps module supports a delivery model in which executors are made available to the user as needed. It has such advantages as:

+
+
+
    +
  • +

    Saving computing resources

    +
  • +
  • +

    Eliminating guessing on your infrastructure capacity needs

    +
  • +
  • +

    Not spending time on running and maintaining additional executors +== How to build this DevOps module

    +
  • +
+
+
+

Once you have implemented the module, you can learn more about it here:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module-How-to-use.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module-How-to-use.html new file mode 100644 index 00000000..b2a6ee67 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module-How-to-use.html @@ -0,0 +1,359 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

How to use mobile test Module

+
+
+
    +
  1. +

    Install IDE with MrChecker

    +
  2. +
  3. +

    Switch branch to 'feature/Create-mobile-module-#213' - by default it is 'develop'

    +
  4. +
+
+
+
+
git checkout feature/Create-mobile-module-#213
+
+
+
+
    +
  1. +

    Install and setup git checkout feature/Create-mobile-module-#213[Appium Server]

    +
  2. +
  3. +

    Connect to local Device by Appium Server

    +
    +
    +
     1.
    +Install Android SDK    https://developer.android.com/studio/index.html#command-tools    ->
    +	2.
    +Download Platform and Build-Tools  (Android versions - >    https://en.wikipedia.org/wiki/Android_version_history   )
    +* sdkmanager "platform-tools" "platforms;android-19"
    +* sdkmanager "build-tools;19.0.0"
    +* copy from /build-tools  file "aapt.exe"  to /platform-tools
    +	3.
    +Set Environment:
    +ANDROID_SDK_ROOT = D:\sdk-tools-windows-4333796
    +PATH =  %PATH%; %ANDROID_SDK_ROOT%
    +	4.
    +Start Appium Server
    +	5.
    +Start Session in Appium Server, capabilities
    +{
    +  "platformName": "Android",
    +            "deviceName": "Android Emulator",
    +            "app": "D:\\Repo\\mrchecker-source\\mrchecker-framework-modules\\mrchecker-mobile-module\\src\\test\\resources\\Simple App_v2.0.1_apkpure.com.apk",
    +            "automationName": "UiAutomator1"
    +            }
    +
    +
    +
  4. +
  5. +

    Run Mobile tests with runtime parameters. +List of supported parameters could be found here

    +
    +
      +
    • +

      From command line (as in Jenkins):

      +
    • +
    +
    +
  6. +
+
+
+
+
mvn clean compile test  -Dapp=".\\Simple_App_v2.0.1_apkpure.com.apk" -DautomationName="UiAutomator1" -Dthread.count=1
+
+
+
+
    +
  • +

    from IDE:

    +
  • +
+
+
+
+image00100 +
+
+
+
+image00101 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module.html new file mode 100644 index 00000000..11a0cda7 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module.html @@ -0,0 +1,410 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Mobile Test Module

+
+ +
+
+
+

What is MrChecker E2E Mobile Test Module

+
+
+

MrChecker E2E Mobile test Module is a suitable solution for testing Remote Web Design, Mobile Browsers and application. +A user can write tests suitable for all mobile browsers with a full range of resolution. The way of working is similar to Selenium and uses the same rules and patterns as the Web Driver. For more information please look in the Selenium test module.

+
+
+
+
+

What is Page Object Architecture

+
+
+

Creating Selenium test cases can result in an unmaintainable project. One of the reasons is that too many duplicated code is used. Duplicated code could be caused by the duplicated functionality and this will result in duplicated usage of locators. The disadvantage of duplicated code is that the project is less maintainable. If some locator will change, you have to walk through the whole test code to adjust locators where necessary. By using the page object model we can make non-brittle test code and reduce or eliminate duplicate test code. Beside of that it improves the readability and allows us to create interactive documentation. Last but not least, we can create tests with less keystroke. An implementation of the page object model can be achieved by separating the abstraction of the test object and the test scripts.

+
+
+
+
+

Page Object Pattern

+
+
+
+Pom +
+
+
+
+
+

Mobile Structure

+
+
+

It is build on the top of the Appium library. +Appium is an open-source tool for automating native, mobile web, and hybrid applications on iOS mobile, Android mobile, and Windows desktop platforms. Native apps are those written using iOS, Android, or Windows SDKs. Mobile web apps are web apps accessed using a mobile browser (Appium supports Safari on iOS and Chrome or the built-in 'Browser' app on Android). Hybrid apps have a wrapper around a "webview" - a native control that enables interaction with web content.

+
+
+
+
+

Run on different mobile devices

+
+
+

To execute each test with chosen connected mobile devices, it is required to use specific arguments in Run configuration.

+
+
+
+image01 +
+
+
+
+image02 +
+
+
+

Default supported arguments in MrChecker:

+
+
+
    +
  • +

    deviceUrl - http url to Appium Server, default value "http://127.0.0.1:4723"

    +
  • +
  • +

    automationName - which automation engine to use , default value "Appium"

    +
  • +
  • +

    platformName - which mobile OS platform to use , default value "Appium"

    +
  • +
  • +

    platformVersion - mobile OS version , default value ""

    +
  • +
  • +

    deviceName - the kind of mobile device or emulator to use , default value "Android Emulator"

    +
  • +
  • +

    app - the absolute local path or remote http URL to a .ipa file (IOS), .app folder (IOS Simulator), .apk file (Android) or .apks file (Android App Bundle), or a .zip file, default value "."

    +
  • +
  • +

    browserName - name of mobile web browser to automate. Should be an empty string if automating an app instead, default value ""

    +
  • +
  • +

    newCommandTimeout - how long (in seconds) Appium will wait for a new command from the client before assuming the client quit and ending the session, default value "4000"

    +
  • +
  • +

    deviceOptions - any other capabilites not covered in essential ones, default value none

    +
  • +
+
+
+

Example usage:

+
+
+
+
mvn clean test -Dtest=MyTest -DdeviceUrl="http://192.168.0.1:1234" -DplatformName="iOS" -DdeviceName="iPhone Simulator" -Dapp=".\\Simple_App.ipa"
+
+
+
+
+
mvn clean test -Dtest=MyTest -Dapp=".\\Simple_App.apk -DdeviceOptions="orientation=LANDSCAPE;appActivity=MainActivity;chromeOptions=['--disable-popup-blocking']"
+
+
+
+

Check also:

+
+ + + +
+

+ +Full list of Generic Capabilities

+
+
+

+ +List of additional capabilities for Android

+
+
+

+ +List of additional capabilities for iOS

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Security-Test-Module.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Security-Test-Module.html new file mode 100644 index 00000000..b205d589 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Security-Test-Module.html @@ -0,0 +1,320 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Security Test Module

+
+ +
+
+
+

What is Security

+
+
+

Application Security is concerned with Integrity, Availability and Confidentiality of data processed, stored and transferred by the application.

+
+
+

Application Security is a cross-cutting concern which touches every aspect of the Software Development Lifecycle. You can introduce some SQL injection flaws in your application and make it exploitable, but you can also expose your secrets (which will have nothing to do with code itself) due to poor secret management process, and fail as well.

+
+
+

Because of this and many other reasons, not every aspect of security can be automatically verified. Manual tests and audits will still be needed. Nevertheless, every security requirement which is automatically verified will prevent code degeneration and misconfiguration in a continuous manner.

+
+
+
+
+

How to test Security

+
+
+

Security tests can be performed in many different ways, such as:

+
+
+
    +
  • +

    Static Code Analysis - improves the security by (usually) automated code review. A good way to search for vulnerabilities, which are 'obvious' on the code level ( e.g. SQL injection). The downside of this approach is that professional tools to perform such scans are very expensive and still produce many false positives.

    +
  • +
  • +

    Dynamic Code Analysis - tests are run against a working environment. A good way to search for vulnerabilities, which require all client- and server-side components to be present and running (like e.g. Cross-Site Scripting). Tests are performed in a semi-automated manner and require a proxy tool (like e.g. OWASP ZAP)

    +
  • +
  • +

    Unit tests - self-written and self-maintained tests. They usually work on the HTTP/REST level (this defines the trust boundary between the client and the server) and run against a working environment. Unit tests are best suited for verifying requirements which involve business knowledge of the system or which assure secure configuration on the HTTP level.

    +
  • +
+
+
+

In the current release of the Security Module, the main focus will be Unit Tests.

+
+
+

Although the most common choice of environment for running security tests on will be integration(the environment offers the right stability and should mirror the production closely), it is not uncommon for some security tests to run on production as well. This is done for e.g. TLS configuration testing to ensure proper configuration of the most relevant environment in a continuous manner.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Building-basic-Selenium-Test.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Building-basic-Selenium-Test.html new file mode 100644 index 00000000..a7216d0a --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Building-basic-Selenium-Test.html @@ -0,0 +1,616 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Sample Walkthrough

+
+
+

This page will walk you through the process of creating a test case. We’ll create a very simple test for the Google search engine.

+
+
+
+
+

Test Procedure

+
+
+

We would like to open the Google search engine, enter some search query and afterwards submit the form. We hope to see some results being listed, otherwise the test will fail. Summarized, the testing process would look like this.

+
+
+
    +
  1. +

    Open google.com

    +
  2. +
  3. +

    Enter the string "Test" into the searchbox

    +
  4. +
  5. +

    Submit the form

    +
  6. +
  7. +

    Get the results and check if the result list is empty

    +
  8. +
+
+
+
+
+

Creating new packages

+
+
+

We will need two new packages, one for the new page classes, the other for our test classes.

+
+
+
+
+

Creating package for test classes

+
+
+

Open Eclipse, use the "Project Explorer" on the left to navigate to

+
+
+

mrchecker-app-under-test → src/test/java → com.example → selenium.tests → tests

+
+
+

Right click on "tests", click on "New" → New Package. We’ll name the new package "com.example.selenium.tests.googleSearch".

+
+
+
+image65 +
+
+
+
+
+

Creating package for page classes

+
+
+

Navigate to

+
+
+

mrchecker-app-under-test → src/main/java → com.example → selenium → pages

+
+
+

Right click on "pages", click on "New" → New Package. The new package will be called "com.example.selenium.pages.googleSearch".

+
+
+
+image66 +
+
+
+
+
+

Creating the test class

+
+
+

The test class will contain the entire testing-routine. At first, we’ll create a new class inside our newly created "googleSearch" package (under src/test/java) and call it "GoogleSearchTest".

+
+
+
+image67 +
+
+
+

As "GoogleSearchTest" is a test class, it has to extend the BaseTest class. You may have to import some required packages and afterwards include a few required methods.

+
+
+
+
public class GoogleSearchTest extends BaseTest {
+
+	@Override
+	public void setUp() {
+
+	}
+
+	@Override
+	public void tearDown() {
+
+	}
+}
+
+
+
+

Now, we’ll need a new Page object, which will represent the Google Search page. The page class will be named "GoogleSearchPage".

+
+
+
+
private GoogleSearchPage googleSearchPage;
+
+@Override
+public void setUp() {
+	googleSearchPage = new GoogleSearchPage();
+}
+
+
+
+
+
+

Creating the GoogleSearchPage class

+
+
+

We have created a new field for the GoogleSearchPage class and instantiated an object in the setUp() method. As this class doesn’t exist yet, we’ll have to create it inside the googleSearch page class package.

+
+
+
+image68 +
+
+
+

We extend the BasePage class with GoogleSearchPage, import all necessary packages and include all the required methods.

+
+
+
+
public class GoogleSearchPage extends BasePage {
+
+	@Override
+	public boolean isLoaded() {
+		return false;
+	}
+
+	@Override
+	public void load() {
+
+	}
+
+	@Override
+	public String pageTitle() {
+		return "";
+	}
+}
+
+
+
+

As this page class represents the Google homepage, we have to set up selectors for web elements required in our test case. In our example we have to create a selector for the search bar which we’ll interact with. The selector will be implemented as a field.

+
+
+
+
private static final By selectorGoogleSearchInput = By.css(#lst-ib);
+
+
+
+

The input field’s id #lst-ib was found by using the developer console in Google Chrome.

+
+
+

This selector can be used to create a WebElement object of said search bar. Therefore, we’ll create a new method and call it "enterGoogleSearchInput".

+
+
+
+
public GoogleResultPage enterGoogleSearchInput(String searchText) {
+	WebElement googleSearchInput = getDriver().findDynamicElement(selectorGoogleSearchInput);
+	googleSearchInput.sendKeys(searchText);
+	googleSearchInput.submit();
+
+	return new GoogleResultPage();
+}
+
+
+
+

As you can see, we return another page object that wasn’t yet created. This step is required, as the results that we would like to check are on another Google Page. This means we’ll have to create another page class, which will be shown later.

+
+
+

Finally, the empty methods inherited from the BasePage class have to be filled:

+
+
+
+
@Override
+public boolean isLoaded() {
+	if(getDriver().getTitle().equals(pageTitle())) {
+		return true;
+	}
+	return false;
+}
+
+@Override
+public void load() {
+	getDriver().get("http://google.com");
+}
+
+@Override
+public String pageTitle() {
+	return "Google";
+}
+
+
+
+

The method isLoaded() checks if the page was loaded by comparing the actual title with the expected title provided by the method pageTitle(). The load() method simply loads a given URL, in this case http://google.com.

+
+
+

The completion of these methods finalizes our GoogleSearchPage class. We still have to create the GoogleResultPage class mentioned before. This page will deal with the elements on the Google search result page.

+
+
+
+
+

Creating the GoogleResultPage class

+
+
+

By right-clicking on the "pages" package, we’ll navigate to "new" → "Class" to create a new class.

+
+
+
+image69 +
+
+
+

The GoogleResultPage class also has to extend BasePage and include all required methods. Next, a new selector for the result list will be created. By using the result list, we can finally check if the result count is bigger than zero and thus, if the search request was successful.

+
+
+
+
private static final By selectorResultList = By.cssSelector("#res");
+
+
+
+

We’ll use this selector inside a new getter-method, which will return all ListElements.

+
+
+
+
public ListElements getResultList() {
+	return getDriver().elementList(selectorResultList);
+}
+
+
+
+

This method will allow the testcase to simply get the result list and afterwards check if the list is empty or not.

+
+
+

Finally, we have to complete all inherited methods.

+
+
+
+
@Override
+public boolean isLoaded() {
+	getDriver().waitForPageLoaded();
+	if(getDriver().getCurrentUrl().contains("search")) {
+		return true;
+	}
+	return false;
+}
+
+@Override
+public void load() {
+	BFLogger.logError("Google result page was not loaded.");
+}
+
+@Override
+public String getTitle() {
+	return "";
+}
+
+
+
+

The method isLoaded() differs from the same method in GoogleSearchPage, because this site is being loaded as a result from a previous action. That’s why we’ll have to use the method getDriver().waitForPageLoaded() to be certain that the page was loaded completely. Afterwards we check if the current URL contains the term "search", as it only occurs on the result page. This way we can check if we’re on the right page.

+
+
+

Another result of this page being loaded by another object is that we don’t have to load any specific URL. We just add a BFLogger instance to print an error message if the page was not successfully loaded.

+
+
+

As we don’t use the getTitle() method we simply return an empty String.

+
+
+

Finally, all required page classes are complete and we can finalize the test class.

+
+
+
+
+

Finalizing the test class

+
+
+

At this point, our GoogleSearchTest class looks like this:

+
+
+
+
public class GoogleSearchTest {
+
+	private GoogleSearchPage googleSearchPage;
+
+
+	@Override
+	public void setUp() {
+		googleSearchPage = new GoogleSearchPage();
+	}
+
+	@Override
+	public void tearDown() {
+
+	}
+}
+
+
+
+

Next, we’ll create the test method, let’s call it shouldResultReturn().

+
+
+
+
@Test
+public void shouldResultReturn() {
+	GoogleResultPage googleResultPage = googleSearchPage.enterGoogleSearchInput("Test");
+	ListElements results = googleResultPage.getResultList();
+	assertTrue("Number of results equals 0", results.getSize() > 0);
+}
+
+
+
+

Code explanation: At first, we will run the enterGoogleSearchInput() method on the GoogleSearchPage with the parameter "Test" to search for this exact string on Google. As this method returns a GoogleResultPage object, we will store this in the local variable googleResultPage. Afterwards, we get the result list by utilizing the getter method that we created before. Finally, we create an assertion: We expect the list size to be bigger than zero, meaning that the google search query was successful as we received results. If this assertion is wrong, a message will be printed out, stating that the number of results equals zero.

+
+
+

We can run the test by right clicking on the test method → Run as → JUnit test.

+
+
+
+image70 +
+
+
+

After starting the test, you’ll notice a browser window opening, resizing to given dimensions, opening Google, entering the query "Test" and submitting the form. After completing the test, you’ll see the test results on the right side of Eclipse. Green color indicator means that the test was successful, red means the test failed.

+
+
+
+image71 +
+
+
+

This walkthrough should’ve provided you with basic understanding on how the framework can be used to create test cases.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-CSS-selectors.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-CSS-selectors.html new file mode 100644 index 00000000..716aff76 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-CSS-selectors.html @@ -0,0 +1,343 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

A css selector is used to select elements from an HTML page.

+
+
+

Selection by element tag, class or id are the most common selectors.

+
+
+
+
<p class='myText' id='123'>
+
+
+
+

This text element (p) can be found by using any one of the following selectors:

+
+
+
+
The HTML element: "p". Note: in practical use this will be too generic, if a preceding text section is added, the selected element will change.
+The class attribute preceded by ".": ".myText"
+The id attribute preceded by "#": "#123"
+
+
+
+

Using other attributes

+
+
+

When a class or an id attribute is not sufficient to identify an element, other attributes can be used as well, by using "[attribute=value]": For example:

+
+
+
+
<a href='https://ns.nl/example.html'>
+
+
+
+

This can be selected by using the entire value: "a[href='https://ns.nl/example.html'\]". For selecting links starting with, containing, ending with see the list below.

+
+
+
+
+

Using sub-elements

+
+
+

The css selectors can be stacked, by appending them:

+
+
+
+
<div id='1'><a href='ns.nl'></div>
+<div id='2'><a href='nsinternational.nl'></div>
+
+
+
+

In the example above, the link element to nsinternational can be obtained with: "#2 a".

+
+
+
+
+

When possible avoid

+
+
+
    +
  • +

    Using paths of commonly used HTML elements within the containers (HTML: div). This will cause failures when a container is added, a common occurrence during development, e.g. "div div p". Use class or id instead, if those are not available, request them to be added in the production code.

    +
  • +
  • +

    Magic order numbers. It is possible to get the second text element in its parent container by using the selector "p:nth-child(2)". If the items are representing different items, ask the developer to add specific attributes. It is also possible to request all items, with a selector similar to ".myList li", and iterate through them later.

    +
  • +
+
+
+
+
+

List

+
+
+

A good list with CSS Selectors can be found at W3Schools:
+https://www.w3schools.com/cssref/css_selectors.asp

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-List-of-web-elements.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-List-of-web-elements.html new file mode 100644 index 00000000..842577ec --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-List-of-web-elements.html @@ -0,0 +1,369 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Basic Web elements

+
+
+

This page will provide an overview of basic web elements.

+
+
+
+image57 +
+
+
+
+image58 +
+
+
+

|== = +|Name +|Method to use element

+
+
+

|Form: Input Text +|elementInputText()

+
+
+

|Form: Label +|elementLabel()

+
+
+

|Form: Submit Button +|elementButton()

+
+
+

|Page: Button +|elementButton()

+
+
+

|Checkbox +|elementCheckbox()

+
+
+

|Radio +|elementRadioButton()

+
+
+

|Elements (Tabs, Cards, Account, etc.) +|elementTab()

+
+
+

|Dropdown List +|elementDropdownList()

+
+
+

|Link +|-

+
+
+

|Combobox +|elementList() +|== =

+
+
+

Comparision how picking value from checkbox can be done:

+
+
+
    +
  • +

    by classic Selenium atomic actions

    +
  • +
  • +

    by our enhanced Selenium wrapper

    +
  • +
+
+
+

Classic Selenium atomic actions

+
+
+
+
List<WebElement> checkboxesList = getDriver()
+                .findElements(selectorHobby);
+WebElement currentElement;
+for (int i = 0; i < checkboxesList.size(); i++) {
+    currentElement = checkboxesList.get(i);
+    if (currentElement.getAttribute("value")
+                    .equals(hobby.toString()) && currentElement.isSelected() != true)
+                        {
+        currentElement.click();
+            }
+}
+
+
+
+

Enhanced Selenium in E2E test framework

+
+
+
+
getDriver().elementCheckbox(selectorHobby)
+				.setCheckBoxByValue(hobby.toString());
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-on-different-browsers.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-on-different-browsers.html new file mode 100644 index 00000000..b2dd079c --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-on-different-browsers.html @@ -0,0 +1,304 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Run on different browsers

+
+
+
+image59 +
+
+
+

To execute each test with a chosen installed browser, specific arguments are required in Run configuration.

+
+
+
+image60 +
+
+
+
+image61 +
+
+
+

It is necessary to enter -Dbrowser= with browser parameter name as an argument (in 'Arguments' tab):

+
+
+

firefox +ie +phantomjs +chrome +chromeheadless +For example: -Dbrowser=ie

+
+
+
+
_-ea_ should be entered as an argument to restore default settings.
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-Best-Practices.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-Best-Practices.html new file mode 100644 index 00000000..6e6f3143 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-Best-Practices.html @@ -0,0 +1,311 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Selenium Best Practices

+
+
+

The following table displays a few best practices that should be taken into consideration when developing Selenium test cases.

+
+
+

|== =

+
+
+

|Best Practices +|Description

+
+
+

|"Keep it Simple" +|Do not force use every Selenium feature available - Plan before creating the actual test cases

+
+
+

|Using Cucumber +|Cucumber can be used to create initial testcases for further decision making

+
+
+

|Supporting multiple browsers +|Test on multiple browsers (in parallel, if applicable) if the application is expected to support multiple environments

+
+
+

|Test reporting +|Make use of test reporting modules like Junit which is included in the framework

+
+
+

|Maintainability +|Always be aware of the maintainability of tests - You should always be able to adapt to changes

+
+
+

|Testing types +|Which tests should be created? Rule of thumb: 70% Unit test cases, 20% Integration test cases and 10% UI Test cases

+
+
+

|Test data +|Consider before actually developing tests and choosing tools: Where to get test data from, how to reset test data

+
+
+

|== =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-UFT-Comparison.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-UFT-Comparison.html new file mode 100644 index 00000000..3e64313a --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-UFT-Comparison.html @@ -0,0 +1,424 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Selenium UFT Comparison

+
+
+

|== =

+
+
+

|Subject +|HP UFT +|HP LeanFT +|Selenium +|Selenium IDE

+
+
+

|Language +|VBScript +|Same as Selenium +|Supports several languages. +Java +|Javascript

+
+
+

|Learning curve +|Based on VBScript which is relatively easy to learn +|Less intuitive, more coding knowledge necessary +|Less intuitive, more coding skills necessary +|Record/playback possible. Generated code difficult to maintain

+
+
+

|Project type +|Traditional +|Agile +|Agile +|Agile

+
+
+

|User oriented +|More Tester +|More Developer +|More Developer +|More Tester

+
+
+

|Object recognition +|Test object identification and storage in object repository +|Same as UFT +|With Firebug +|Same as SE

+
+
+

|Customizations +|Only the available standard. No custimization +|Same as UFT +|Lots of customizations possible +|Fewer then SE

+
+
+

|Framework +|Needed. +Exists in ATaaS +| +|Needed. +Integration with Fitnesse, Cucumber, Gauche +|No Framework. Limited capabilities of the tool.

+
+
+

|Operating System support +|Runs on Windows +|Runs on Windows +|Multiple OS support. With Grid: testing on multiple devices at same time +|Plugin for Firefox

+
+
+

|Application coverage +|Many +|Many +|Web only +|Web only

+
+
+

|Multiple browsers +|In UFT 12.5 available +|In 12.5 available +|Multiple tests in multiple browser windows at once and faster support for new browser versions +|Multiple tests in multiple browser windows at once and faster support for new browser versions

+
+
+

|System Load +|High system load (RAM & CPU usage) +|Lower load than HP UFT? +|Lower load than HP UFT +|Lower load than HP UFT

+
+
+

|ALM integration +|With HP ALM – full integration +| +|Jira, Jenkins +Not with ALM tool +|Same as SE

+
+
+

|Integration with other tools +|A lot can be built, but many are already covered. +|More than UFT. +|Freeware and can be integrated with different open source tools +|Freeware and can be integrated with different open source tools

+
+
+

|Addins +|Add-ins necessary to access all capabilities of the tool – license related +|Same as UFT +|See integration with other tools +|See integration with other tools

+
+
+

|Reporting +|Complete, link to ALM +|Same as UFT +|No native mechanism for generating reports, but multiple plugins available for reporting +|No native mechanism for generating reports, but multiple plugins available for reporting

+
+
+

|Support +|HP full support +|Same as UFT +|Limited support as it is open source +|Limited support as it is open source

+
+
+

|License costs +|About 17K – Capgemini price 5K. +Included in the S2 service charge +|Same price as HP UFT +|Free +|Free +limited functionality (no iterations / conditional statements)

+
+
+

|iVAL Service +|ATaaS +|Not in a S2 service +|Not in a S2 service +|Not in a S2 service

+
+
+

|== =

+
+
+

Bold for key differentiators.

+
+
+

Projects also choose an available resource and the knowledge of that resource.

+
+
+

Both: Framework determines the quality of automation. Needs to be set up by someone with experience with the tool

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-Selenium.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-Selenium.html new file mode 100644 index 00000000..1a0baf5e --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-Selenium.html @@ -0,0 +1,346 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

What is Selenium

+
+
+

Selenium is a framework for testing browser applications. The test automation supports:

+
+
+
    +
  • +

    Frequent regression testing

    +
  • +
  • +

    Repeating test case executions

    +
  • +
  • +

    Documentation of test cases

    +
  • +
  • +

    Finding defects

    +
  • +
  • +

    Multiple Browsers

    +
  • +
+
+
+

The Selenium testing framework consists of multiple tools:

+
+
+
    +
  • +

    Selenium IDE

    +
    +

    The Selenium Integrated Development Environment is a prototyping tool for building test scripts. It is a Firefox Plugin and provides an easy-to-use interface for developing test cases. Additionally, Selenium IDE contains a recording feature, that allows the user to record user inputs that can be automatically re-executed in future.

    +
    +
  • +
  • +

    Selenium 1

    +
    +

    Selenium 1, also known as Selenium RC, commands a Selenium Server to launch and kill browsers, interpreting the Selenese commands passed from the test program. The Server acts as an HTTP proxy. This tool is deprecated.

    +
    +
  • +
  • +

    Selenium 2

    +
    +

    Selenium 2, also known as Selenium WebDriver, is designed to supply a well-designed, object-oriented API that provides improved support for modern advanced web-app testing problems.

    +
    +
  • +
  • +

    Selenium 3.0

    +
    +

    The major change in Selenium 3.0 is removing the original Selenium Core implementation and replacing it with one backed by WebDriver. There is now a W3C specification for browser automation, based on the Open Source WebDriver.

    +
    +
  • +
  • +

    Selenium Grid

    +
    +

    Selenium Grid allows the scaling of Selenium RC test cases, that must be run in multiple and potentially variable environments. The tests can be run in parallel on different remote machines.

    +
    +
  • +
+
+
+
+
+

Selenium on the Production Line

+
+
+

More information on Selenium on the Production Line can be found here.

+
+
+

tl;dr

+
+
+

The Production Line has containers running Chrome and Firefox Selenium Nodes. The communication with these nodes is accomplished using Selenium Grid.

+
+
+

Having issues using Selenium on the Production Line? Check the Production Line issue list, maybe it’s a known issue that can be worked around.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-WebDriver.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-WebDriver.html new file mode 100644 index 00000000..3cf5a95c --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-WebDriver.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

What is WebDriver

+
+
+

On the one hand, it is a very convenient API for a programmer that allows for interaction with the browser, on the other hand it is a driver concept that enables this direct communication.

+
+
+
+image53 +
+
+
+
+
+

== How does it work?

+
+
+
+image54 +
+
+
+

A tester, through their test script, can command WebDriver to perform certain actions on the WAUT on a certain browser. The way the user can command WebDriver to perform something is by using the client libraries or language bindings provided by WebDriver.

+
+
+

By using the language-binding client libraries, a tester can invoke browser-specific implementations of WebDriver, such as Firefox Driver, IE Driver, Opera Driver, and so on, to interact with the WAUT of the respective browser. These browser-specific implementations of WebDriver will work with the browser natively and execute commands from outside the browser to simulate exactly what the application user does.

+
+
+

After execution, WebDriver will send the test result back to the test script for developer’s analysis.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module.html new file mode 100644 index 00000000..f38c0478 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module.html @@ -0,0 +1,392 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Selenium Test Module

+
+ +
+
+
+

What is MrChecker E2E Selenium Test Module

+
+
+
+image2 +
+
+
+
+
+

Selenium Structure

+ +
+
+

Framework Features

+
+
+ +
+
+
+
+

How to start?

+ +
+
+

Selenium Best Practices

+
+ +
+
+
+

Selenium UFT Comparison

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Standalone-Test-Module.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Standalone-Test-Module.html new file mode 100644 index 00000000..bd2ef6c9 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Standalone-Test-Module.html @@ -0,0 +1,279 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Standalone Test Module

+
+
+

The inspiring content will be here soon.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-How-to-make-virtual-asset.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-How-to-make-virtual-asset.html new file mode 100644 index 00000000..780476a8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-How-to-make-virtual-asset.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to make a virtual asset

+
+
+

This can be done in four ways:

+
+
+
    +
  • +

    Record all traffic (Mappings and Responses) that comes through proxy - by UI

    +
  • +
  • +

    Record all traffic (Mappings and Responses) that comes through proxy - by Code

    +
  • +
  • +

    Create Mappings and Responses manually by text files

    +
  • +
  • +

    Create Mappings and Responses manually by code

    +
  • +
+
+
+
+
+

Record all traffic (Mappings and Responses) that comes through proxy - UI

+
+
+

Full article here Wiremock record-playback.

+
+
+

First, start an instance of WireMock running standalone. Once that’s running, visit the recorder UI page at http://localhost:8080/__admin/recorder (assuming you started WireMock on the default port of 8080).

+
+
+
+image77 +
+
+
+

Enter the URL you wish to record from in the target URL field and click the Record button. You can use http://example.mocklab.io to try it out.

+
+
+

Now you need to make a request through WireMock to the target API so that it can be recorded. If you’re using the example URL, you can generate a request using curl:

+
+
+
+
$ curl http://localhost:8080/recordables/123
+
+
+
+

Now click stop. You should see a message indicating that one stub was captured.

+
+
+

You should also see that a file has been created called something like recordables_123-40a93c4a-d378-4e07-8321-6158d5dbcb29.json under the mappings directory created when WireMock started up, and that a new mapping has appeared at http://localhost:8080/__admin/mappings.

+
+
+

Requesting the same URL again (possibly disabling your wifi first if you want a firm proof) will now serve the recorded result:

+
+
+
+
$ curl http://localhost:8080/recordables/123
+
+{
+"message": "Congratulations on your first recording!"
+}
+
+
+
+
+
+

Record all traffic (Mappings and Responses) that comes through proxy - by Code

+
+
+

An example of how such a record can be achieved

+
+
+
+
@Test
+public void startRecording() {
+
+    SnapshotRecordResult recordedMappings;
+
+    DriverManager.getDriverVirtualService()
+            .start();
+    DriverManager.getDriverVirtualService()
+            .startRecording("http://example.mocklab.io");
+    recordedMappings = DriverManager.getDriverVirtualService()
+            .stopRecording();
+
+    BFLogger.logDebug("Recorded messages: " + recordedMappings.toString());
+
+}
+
+
+
+
+
+

Create Mappings and Responses manually by text files

+
+
+

EMPTY

+
+
+
+
+

Create Mappings and Responses manually by code

+
+
+

Link to full file structure: REST_FarenheitToCelsiusMethod_Test.java

+
+
+
+
+

Start up Virtual Server

+
+
+
+
public void startVirtualServer() {
+
+    // Start Virtual Server
+    WireMockServer driverVirtualService = DriverManager.getDriverVirtualService();
+
+    // Get Virtual Server running http and https ports
+    int httpPort = driverVirtualService.port();
+    int httpsPort = driverVirtualService.httpsPort();
+
+    // Print is Virtual server running
+    BFLogger.logDebug("Is Virtual server running: " + driverVirtualService.isRunning());
+
+    String baseURI = "http://localhost";
+    endpointBaseUri = baseURI + ":" + httpPort;
+}
+
+
+
+
+
+

Plug in a virtual asset

+
+
+

REST_FarenheitToCelsiusMethod_Test.java

+
+
+
+
public void activateVirtualAsset() {
+    /*
+    * ----------
+    * Mock response. Map request with virtual asset from file
+    * -----------
+    */
+    BFLogger.logInfo("#1 Create Stub content message");
+    BFLogger.logInfo("#2 Add resource to virtual server");
+    String restResourceUrl = "/some/thing";
+    String restResponseBody = "{ \"FahrenheitToCelsiusResponse\":{\"FahrenheitToCelsiusResult\":37.7777777777778}}";
+
+    new StubREST_Builder //For active virtual server ...
+            .StubBuilder(restResourceUrl) //Activate mapping, for this Url AND
+            .setResponse(restResponseBody) //Send this response  AND
+            .setStatusCode(200) // With status code 200 FINALLY
+            .build(); //Set and save mapping.
+
+}
+
+
+
+

Link to full file structure: StubREST_Builder.java

+
+
+

Source link to How to create Stub.

+
+
+

StubREST_Builder.java

+
+
+
+
public class StubREST_Builder {
+
+    // required parameters
+    private String endpointURI;
+
+    // optional parameters
+    private int statusCode;
+
+    public String getEndpointURI() {
+        return endpointURI;
+    }
+
+    public int getStatusCode() {
+        return statusCode;
+    }
+
+    private StubREST_Builder(StubBuilder builder) {
+        this.endpointURI = builder.endpointURI;
+        this.statusCode = builder.statusCode;
+    }
+
+    // Builder Class
+    public static class StubBuilder {
+
+        // required parameters
+        private String endpointURI;
+
+        // optional parameters
+        private int     statusCode  = 200;
+        private String  response    = "{ \"message\": \"Hello\" }";
+
+        public StubBuilder(String endpointURI) {
+            this.endpointURI = endpointURI;
+        }
+
+        public StubBuilder setStatusCode(int statusCode) {
+            this.statusCode = statusCode;
+            return this;
+        }
+
+        public StubBuilder setResponse(String response) {
+            this.response = response;
+            return this;
+        }
+
+        public StubREST_Builder build() {
+
+            // GET
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            get(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // POST
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            post(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // PUT
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            put(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // DELETE
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            delete(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // CATCH any other requests
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            any(anyUrl())
+                                    .atPriority(10)
+                                    .willReturn(aResponse()
+                                            .withStatus(404)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody("{\"status\":\"Error\",\"message\":\"Endpoint not found\"}")
+                                            .withTransformers("body-transformer")));
+
+            return new StubREST_Builder(this);
+        }
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-Smoke-Tests-virtualization.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-Smoke-Tests-virtualization.html new file mode 100644 index 00000000..1cca2e8a --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-Smoke-Tests-virtualization.html @@ -0,0 +1,1017 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Start a virtual server

+
+
+

The following picture presents the process of executing Smoke Tests in a virtualized environment:

+
+
+
+image78 +
+
+
+
+
+

Install docker service

+
+
+

If docker is not already installed on machine (this should be checked during C2C creation), install docker, docker-compose, apache2-utils, openssl (You can use script to install docker & docker-compose OR refer to this post and add Alias for this machine <C2C_Alias_Name>):

+
+
+
    +
  • +

    run the script

    +
  • +
  • +

    sudo apt-get install -y apache2-utils

    +
  • +
+
+
+
+
+

Build a docker image

+
+
+

Dockerfile:

+
+
+
+
FROM docker.xxx.com/ubuntu:16.04
+MAINTAINER Maintainer Name "maintainer@email.address"
+LABEL name=ubuntu_java \
+           version=v1-8.0 \
+           base="ubuntu:16.04" \
+           build_date="03-22-2018" \
+           java="1.8.0_162" \
+           wiremock="2.14.0" \
+           description="Docker to use with Ubuntu, JAVA and WIREMOCK "
+
+##Update and install the applications needed
+COPY 80proxy /etc/apt/apt.conf.d/80proxy
+RUN apt-get update
+RUN apt-get install -y \
+            wget \
+            libfontconfig \
+            unzip \
+            zip
+            ksh \
+            curl \
+            git
+
+COPY wgetrc /etc/wgetrc
+
+#Env parameters
+
+### JAVA PART ###
+#TO UPDATE:please verify url link to JDK http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html
+##Download and install JAVA JDK8
+RUN mkdir /opt/jdk
+RUN wget -qq --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u162-b12/0da788060d494f509bf8624735fa2f1/jdk-8u162-linux-x64.tar.gz && tar -zxf jdk-8u162-linux-x64.tar.gz -C /opt/jdk && rm jdk-8u162-linux-x64.tar.gz && update-alternatives --install /usr/bin/javac javac /opt/jdk/jdk1.8.0_162/bin/javac 100 && java -version && chmod 755 -R /opt/jdk/jdk1.8.0_162/
+RUN java -version
+
+##Add user
+RUN useradd -u 29001 -g 100 srvpwiredev
+
+##Add app
+RUN mkdir -p -m 777 /app
+COPY wiremock-standalone-2.14.0.jar /app/wiremock-standalone-2.14.0.jar
+
+##Expose port
+EXPOSE 8080
+
+##Set workdir
+WORKDIR /App
+
+##Run app
+CDM java -jar /app/wiremock-standalone-2.14.0.jar
+
+
+
+

Execute the following steps with a specified version to build a docker image and push it to the repository :

+
+
+
+
## Build image
+sudo docker build -t docker.xxx.com/app/build/wiremock:v2.14.0.
+
+## Push image
+sudo docker login docker.xxx.com
+sudo docker push docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+
+
+

Run docker image

+
+
+

To run a docker image, execute the following command:

+
+
+
+
sudo docker run -td -p 8080:8080 -v /home/wiremock/repo/app/docker/QA/mappings:/app/mappings -v /home/wiremock/repo/app/docker/QA/__files:/app/__files --restart always docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+

Where:

+
+
+

-p - publish a container’s port to the host

+
+
+

-v - bind mount a volume. WireMock server creates two directories under the current one: mappings and __files. It is necessary to mount directories with already created mappings and responses to make it work.

+
+
+

-restart always - restart policy to apply when a container exists

+
+
+

All of the parameters are described in: official docker documentation

+
+
+
+
+

Map requests with virtual assets

+
+
+

What is WireMock?

+
+
+

WireMock is an HTTP mock server. At its core it is a web server that can be primed to serve canned responses to particular requests (stubing) and that captures incoming requests so that they can be checked later (verification). It also has an assortment of other useful features including record/playback of interactions with other APIs, injection of faults and delays, simulation of stateful behaviour.

+
+
+

Full documentation can be found under the following link: WireMock

+
+
+
+
+

Record / create virtual assets mappings

+
+
+

Record

+
+
+

WireMock can create stub mappings from requests it has received. Combined with its proxying feature, this allows you to "record" stub mappings from interaction with existing APIs.

+
+
+

Record and playback (Legacy): documentation

+
+
+
+
java -jar wiremock-standalone-2.16.0.jar --proxy-all="http://search.twitter.com" --record-mappings --verbose
+
+
+
+

Once it’s started and request is sent to it, it will be redirected to "http://search.twitter.com" and traffic (response) is saved to files in mappings and __files directories for further use.

+
+
+

Record and playback (New): documentation

+
+
+
+
+

Enable mappings in a virtual server

+
+
+

When the WireMock server starts, it creates two directories under the current one: mappings and __files. To create a stub, it is necessary to drop a file with a .json extension under mappings.

+
+
+

Run docker with mounted volumes

+
+
+

Mappings are in a repository. It is necessary to mount directories with already created mappings and responses to make it work:

+
+
+
+
sudo docker run -td -p 8080:8080 -v /home/wiremock/repo/app/docker/QA/mappings:/app/mappings -v /home/wiremock/repo/app/docker/QA/__files:/app/__files --restart always docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+

The description of how to build and run docker is available under: Docker run command description

+
+
+

Recorded mappings

+
+
+

Recorded mappings are kept in the project repository.

+
+
+
+
+

Create a user and map them to docker user

+
+
+

To enable the connection from Jenkins to Virtual Server (C2C), it is necessary to create a user and map them to docker group user. It can be done using the following command:

+
+
+
+
adduser -G docker -m wiremock
+
+
+
+

To set the password for a wiremock user:

+
+
+
+
passwd wiremock
+
+
+
+
+
+

Create SSH private and public keys for a wiremock user

+
+
+

SSH keys serve as a means of identifying yourself to an SSH server using public-key cryptography and challenge-response authentication. One immediate advantage this method has over traditional password is that you can be authenticated by the server without ever having to send your password over the network.

+
+
+

To create an SSH key, log in as wiremock (previously created user).

+
+
+
+
su wiremock
+
+
+
+

The .ssh directory is not by default created below user home directory. Therefore, it is necessary to create it:

+
+
+
+
mkdir ~/.ssh
+
+
+
+

Now we can proceed with creating an RSA key using ssh-keygen (a tool for creating new authentication key pairs for SSH):

+
+
+
+
ssh-keygen -t rsa
+
+
+
+

A key should be created under /.ssh/id_rsa +Appending the public keys to authorized_keys:

+
+
+
+
wiremock@vc2crptXXXXXXXn:~/ssh$ cat id_rsa.pub >> authorized_keys
+
+
+
+
+
+

Install an SSH key in Jenkins

+
+
+

To add an SSH key to Jenkins, go to credentials in your job location. Choose the folder within credentials, then 'global credentials', 'Add credentials'. Fill in the fields. Finally, the entry should be created.

+
+
+
+
+

Build a Jenkins Groovy script

+
+
+

The description of how to use SSH Agent plugin in Jenkins pipeline can be found under: https://www.karthikeyan.tech/2017/09/ssh-agent-blue-ocean-via-jenkins.html

+
+
+

Example of use:

+
+
+
+
sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+     sh """
+         ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "docker container restart ${env.WIREMOCK_CONTAINER_NAME}"
+     """
+}
+
+
+
+

Where: env.WIREMOCK_CREDENTIALS is a credential id of previously created wiremock credentials. Now that it is present, we can execute commands on a remote machine, where in ssh command: +env.WIREMOCK_USERNAME - user name of user connected with configured private key +env.WIREMOCK_IP_ADDRESS - ip address of the machine where this user with this private key exists

+
+
+
+
+

Pull repository with virtual assets

+
+
+

To pull the repository on a remote machine, it is necessary to use the previously described SSH Agent plugin. An example of use:

+
+
+
+
sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+withCredentials([usernamePassword(credentialsId: end.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+     sh """
+         ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd ~/${env.APPLICATION_DIRECTORY_WIREMOCK}/${env.PROJET_HOME}; git fetch https://&USER:$PASS@${env.GIT_WITHOUT_HTTPS} ${env.GIT_BRANCH}; git reset --hard FETCH_HEAD; git clean -df"
+      """
+    }
+}
+
+
+
+

Where:

+
+
+

withCredentials allows various kinds of credentials (secrets) to be used in idiosyncratic ways. Each binding will define an environment variable active within the scope of the step. Then the necessary commands are executed:

+
+
+

cd …​ - command will change from current directory to the specified directory with git repository

+
+
+

git fetch …​ ;git reset …​ ;git clean …​ - pull from GIT branch. Git pull or checkout are not used here to prevent the situation with wrong coding between Mac OSX/Linux etc.

+
+
+

PLEASE remember that when using this script for the first time, the code from previous block should be changed to:

+
+
+
+
stage("ssh-agent"){
+        sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+            withCredentials([usernamePassword(credentialsId: end.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+                sh """
+                        ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd ~/${env.APPLICATION_DIRECTORY_WIREMOCK} ;git clone --depth=1 --branch=develop https://&USER:$PASS@${env.GIT_WITHOUT_HTTPS}"';
+                """
+    }
+}
+
+
+
+
+
+

Install an application with Smoke environment

+
+ +
+
+
+

Update properties settings file

+
+
+

New settings file is pushed to the repository. Example configuration:

+
+
+
+
...
+   <key>autocomplete</key>
+   <string>http://server:port</string>
+   <key>benefitsummary</key>
+   <string>http://server:port</string>
+   <key>checkscan</key>
+   <string>http://server:port</string>
+   <key>dpesb</key>
+   <string>http://server:port</string>
+...
+
+
+
+

Address of service (backend) should be changed to wiremock address as it is shown on listing to change the default route.

+
+
+
+
+

Build an application with updated properties file

+
+
+

New versions of application are prepared by Jenkins job.

+
+
+
+
+

Install an application on target properties file

+
+
+

Installation of an application is actually executed in a non-automated way using SeeTest environment.

+
+
+
+
+

UI tests

+
+ +
+
+
+

Run Jenkins job

+
+
+

Jenkinsfile:

+
+
+
+
// Jenkins parameters are overriding the properties below
+def properties = [
+
+          JENKINS_LABELS                                 : 'PWI_LINUX_DEV',
+          APPLICATION_FOLDER                             : 'app_dir',
+          PROJECT_HOME                                   : 'app_home_folder',
+
+          //WIREMOCK
+          WIREMOCK_CREDENTIALS                           : 'vc2crptXXXXXXn',
+          WIREMOCK_USERNAME                              : 'wiremock',
+          WIREMOCK_ADDRESS                               : 'http://vc2crptXXXXXXn.xxx.com:8080',
+          WIREMOCK_IP_ADDRESS                            : '10.196.67.XXX',
+          WIREMOCK_CONTAINER_NAME                        : 'wiremock',
+          APPLICATION_DIRECTORY_WIREMOCK                 : 'repo',
+
+          //GIT
+          GIT_CREDENTIALS                                : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          GIT_BRANCH                                     : 'develop',
+          GIT_SSH                                        : 'ssh://git@stash.xxx.com/app/app.git'
+          GIT_HTTPS                                      : 'HTTPS://git@stash.xxx.com/app/app.git',
+
+          STASH_CREDENTIALS                              : 'e47742cc-bb66-4321-2341-a2342er24f2',
+
+
+          //DOCKER
+          ARTIFACTORY_USER_CREDENTIALS                   : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          SEETEST_DOCKER_IMAGE                           : 'docker.xxx.com/project/images/app:v1-8.3',
+
+          //SEETEST_DOCKER_IMAGE
+          SEETEST_APPLICATION_FOLDER                     : 'seetest_dir',
+          SEETEST_PROJECT_HOME                           : 'Automated Scripts',
+          SEETEST_GIT_SSH                                : 'ssh://git@stash.xxx.com/pr/seetest_automation_cucumber.git'
+          SEETEST_GIT_BRANCH                             : 'develop',
+          SEETEST_GRID_USER_CREDENTIALS                  : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          SEETEST_CUCUMBER_TAG                           : '@Virtualization',
+          SEETEST_CLOUD_NAME                             : 'Core Group',
+          SEETEST_IOS_VERSION                            : '11',
+          SEETEST_IOS_APP_URL                            : '',
+          SEETEST_INSTALL_APP                            : 'No',
+          SEETEST_APP_ENVIRONMENT                        : 'SmokeTests',
+          SEETEST_DEVICE_QUERY                           : '',
+]
+
+node(properties.JENKINS_LABELS) {
+    try {
+        prepareEnv(properties)
+        gitCheckout()
+        stageStartVirtualServer()
+        stageMapApiRequests()
+        stageInstallApplication()
+        stageUITests()
+     } catch(Exception ex) {
+        currentBuild.result = 'FAILURE'
+        error = 'Error' + ex
+     }
+}
+
+//== == == == == == == == == == == == == == == == == == END OF PIPELINE== == == == == == == == == == == == == == == == == == == == ==
+
+private void prepareEnv(properties) {
+    cleanWorkspace()
+    overrideProperties(properties)
+    setWorkspace()
+}
+
+private void gitCheckout() {
+    dir(env.APPLICATION_FOLDER) {
+        checkout([$class: 'GitSCM', branches: [[name: env.GIT_BRANCH]], doGenerateSubmoduleConfiguration: false, extensions: [[$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false, timeout: 50]], gitTool: 'Default', submoduleCfg: [], userRemoteConfigs: [[credentialsId: env.GIT_CREDENTIALS, url: env.GIT_SSH]])
+     }
+}
+
+private void stageStartVirtualServer() {
+    def module = load "${env.SUBMODULES_DIR}/stageStartVirtualServer.groovy"
+    module()
+}
+
+private void stageMapApiRequests() {
+    def module = load "${env.SUBMODULES_DIR}/stageMapApiRequests.groovy"
+    module()
+}
+
+private void stageInstallApplication() {
+    def module = load "${env.SUBMODULES_DIR}/stageInstallApplication.groovy"
+    module()
+}
+
+private void stageUITests() {
+    def module = load "${env.SUBMODULES_DIR}/stageUITests.groovy"
+    module()
+}
+
+private void setWorkspace() {
+    String workspace = pwd()
+    env.APPLICATION_DIRECTORY = "/${env.APPLICATION_DIRECTORY}"
+    env.WORKSPACE_LOCAL - workspace + env.APPLICATION_DIRECTORY
+    env.SEETEST_PROJECT_HOME_ABSOLute_PATH = "${workspace}/${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}"
+    env.SUBMODULES_DIR = env.WORKSPACE_LOCAL + "/pipelines/SmokeTests.submodules"
+    env.COMMONS_DIR    = env.WORKSPACE_LOCAL + "/pipelines/commons"
+}
+
+/*
+    function ovverrides env vales based on provided properties
+*/
+private void overrideProperties(properties) {
+    for (param in properties) {
+        if (env.(param.key) ==  null) {
+           echo "Adding parameter '${param.key}' with default value: '$param.value}'"
+           env.(param.key) = param.value
+        } else {
+           echo "Parameter '${param.key}' has overriden value: '${env.(param.key)}'"
+        }
+     }
+
+     echo sh(script: "env | sort", returnStdout: true)
+}
+
+private void cleanWorkspace() {
+   sh 'rm-rf *'
+}
+
+
+
+

stageStartVirtualServer.groovy:

+
+
+
+
def call () {
+    stage("Check virtual server") {
+        def statusCode
+
+        try {
+            def response = httpRequest "${env.WIREMOCK_ADDRESS}/__admin/"
+            statusCode = response.status
+        } catch(Exception ex) {
+            currentBuild.result = 'FAILURE'
+            error 'WireMock server os unreachable.'
+        }
+
+        if(statusCode !=200) {
+            currentBuild.result = 'FAILURE'
+            error 'WireMock server is unreachable. Return code: ${statusCode}'
+        }
+    }
+}
+
+
+
+

stageMapApiRequests.groovy:

+
+
+
+
def call() {
+    stage("Map API requests with virtual assets") {
+        checkoutRepository()
+        restartWiremock()
+        checkWiremockStatus()
+     }
+}
+
+private checkoutRepository() {
+    extractHTTPSUrl()
+    sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+        withCredentials([usernamePassword(credentialsId: env.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+            sh """
+                ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd~/${env.APPLICATION_DIRECTORY_WIREMOCK}/${env.PROJECT_HOME}; git fetch https://$USER:$PASS@${env.GIT_WITHOUT_HTTPS} ${env.GIT_BRANCH}; git reset --hard FETCH_HEAD; git clean -df"
+             """
+         }
+     }
+}
+
+private restartWiremock() {
+    sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+            sh """
+                ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "docker container restart ${env.WIREMOCK_CONTAINER_NAME}"
+             """
+     }
+}
+
+private checkWiremockStatus() {
+    int wiremockStatusCheckCounter =6
+    int sleepTimeInSeconds = 10
+    def wiremockStatus
+
+    for (i = 0; i < wiremockStatusCheckCounter; i++) {
+         try {
+             wiremockStatus = getHttpRequestStatus()
+             echo "WireMock server status code: ${wiremockStatus}"
+         } catch(Exceprion ex) {
+             echo "Exception when checking connection to WireMock"
+         }
+         if(wiremockStatus ==  200) break
+         else sh "sleep $(sleepTimeInSeconds}"
+      }
+
+      if(wiremockStatus != 200) {
+          currentBuild.result = 'FAILURE'
+          error 'WireMock server is unreachable. Return code: ${wiremockStatus}'
+      }
+}
+
+private def getHttpRequestStatus() {
+    def response = httpRequest "${env.WIREMOCK_ADDRESS}/__admin"
+    return response.status
+
+private extractHTTPSUrl() {
+    env.GIT_WITHOUT_HTTPS = env.GIT_HTTPS.replace("https://", "")
+}
+
+return this
+
+
+
+

stageInstallApplication.groovy:

+
+
+
+
def call() {
+    stage('Install application with smoke tests environment') {
+        dir(env.SEETEST_APPLICATION_FOLDER) {
+            checkout([$class: 'GitSCM', branches: [[name: env.SEETEST_GIT_BRANCH]], doGenerateSubmoduleConfigurations: false, extensions: [], gitTool: 'default', submoduleCfg: [], userRemoteConfigs: [[credentialsId: env.GIT_CREDENTIALS, url: env.SEETEST_GIT_SSH]])
+        }
+     }
+}
+
+return this
+
+
+
+

stageUITests.groovy:

+
+
+
+
def call() {
+    stage('UI tests') {
+        def utils = load "${env.SUBMODULES_DIR}/utils.groovy"
+
+        try {
+            utils.generateUserIDVariable(); //Generate USER_ID and USER_GROUP
+            docker.image(env.SEETEST_DOCKER_IMAGE).inside("-u ${env.USER_ID}:${env.USER_GROUP}") {
+                withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: "${env.ARTIFACTORY_USER_CREDENTIALS}", passwordVariable: 'ARTIFACTORY_PASSWORD', usernameVariable: 'ARTIFACTORY_USERNAME]]) {
+                    executeTests()
+                    compressArtifacts()
+                    publishJUnitTestResultReport()
+                    archiveArtifacts()
+                    publishHTMLReports()
+                    publishCucumberReports()
+                 }
+             }
+        } catch (Exception exc) {
+            throw exc
+        }
+   }
+}
+
+private executeTests() {
+    withCredentials([usernamePassword(credentialsId: env.SEETEST_GRID_USER_CREDENTIALS, passwordVariable: 'GRID_USER_PASSWORD', usernameVariable: 'GRID_USER_NAME')]) {
+            sh """
+                cd ${env.SEETEST_PROJECT_HOME_ABSOLUTE_PATH}
+                mvn clean test -B -Ddriver="grid" -Dtags="${env.SEETEST_CUCUMBER_TAG}" -DcloudName="${env.SEETEST_CLOUD_NAME}" -DdeviceQuery="${env.SEETEST_DEVICE_QUERY} -DgridUser="${GRID_USER_NAME}" -DgridPassword="${GRID_USER_PASSWORD}" -Dinstall="${env.SEETEST_INSTALL_APP}" -DiosUrl="${env.SEETEST_IOS_APP_URL}" -DdeviceType="iPhone" -DiosVersion="$env.SEETEST_IOS_VERSION}" -DparallelMode="allonall" -Denv="${env.SEETEST_APP_ENVIRONMENT}" site
+             """
+     }
+}
+
+private compressartifacts() {
+    echo "Compressing artifacts from /target/site"
+    sh """
+        zip -r allure_report.zip **/${env.SEETEST_PROJECT_homE}/target/site
+    """
+
+private publishJUnitTestResultReport() {
+    echo "Publishing JUnit reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/surefire-reports/junitreporters/*.xml"
+
+    try {
+        junit "${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/surefire-reports/junitreporters/*.xml"
+    } catch(e) {
+        echo("No JUnit report found")
+    }
+}
+
+private archiveArtifacts() {
+    echo "Archiving artifacts"
+
+    try {
+        archiveArtifacts allowEmptyArchive: true, artifacts: "**/allure_report.zip"
+    } catch(e) {
+        echo("No artifacts found")
+    }
+}
+
+private publishHTMLReports() {
+    echo "Publishing HTML reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/site/allure-maven-plugin"
+
+    try {
+        publishHTML([allowMissing: false, alwaysLinkToLastBuild: true, keepAll: true, reportDir: "${env.SEETEST_APPLICATION_FOLDER/${env.SEETEST_PROJECT_HOME}/target/site/allure-maven-plugin", reportFiles: 'index.html', reportName: 'Allure report', reportTitles: 'Allure report'])
+    } catch(e) {
+        echo("No artifacts found")
+    }
+}
+
+private publishCucumberREPORTS() {
+    echo "Publishing Cucumber reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/cucumber-parallel/*.json"
+
+    try {
+        step([$class: 'CucumberReportPublisher', fileExcludePattern '', fileIncludePattern: "#{env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/cucumber-parallel/*.json", ignoreFailedTests: false, jenkinsBasePath: '', jsonReportDirectory: '', missingFails: false, parallelTesting: false, pendingFails: false, skippedFails: false, undefinedFails: false])
+    } catch(e) {
+        echo("No Cucumber report found")
+    }
+}
+
+return this
+
+
+
+

Configuration

+
+
+

It is possible to configure Jenkins job in two ways. First one is to edit the Jenkinsfile. All of the properties are in properties collection as below:

+
+
+
+
def properties = [
+
+          JENKINS_LABELS                                : 'PWI_LINUX_DEV'
+
+          ...
+
+          //Docker
+          ARTIFACTORY_USER_CREDENTIALS                  : 'ba2e4f46-56f1-4467-ae97-17b356d6s643',
+          SEETEST_DOCKER_IMAGE                          : 'docker.XXX.com/app/base-images/seetest:v1-8.3',
+
+          //SeeTest
+          SEETEST_APPLICATION_FOLDER                    : 'seetest_dit',
+          SEETEST_PROJECT_HOME                          : 'Automated_Scripts',
+          SEETEST_GIT_SSH                               : 'ssh://stash.xxx.com/app/seetest_automation_cucumber.git',
+          SEETEST_GIT_BRANCH                            : 'develop',
+
+          ...
+]
+
+
+
+

Second way is to add properties in 'Configure job'. All of the properties there are overriding properties from Jenkinsfile (the have the highest priority). They can then be set durring 'Build with Paremeters' process.

+
+
+

Reports

+
+
+

After a job execution 'Allure report' and 'Cucumber-JVM' reports should be visible. If any tests fail, You can check on which screen (printscreen from failures is attached, why and etc.)

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-What-is-service-virtualization.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-What-is-service-virtualization.html new file mode 100644 index 00000000..b760e01d --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-What-is-service-virtualization.html @@ -0,0 +1,351 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Is it doable to keep pace in QA with today’s software agile approach?

+
+
+

DevOps + Microservices + Shift left + Time to Market == ? Service virtualization ?

+
+
+
+image72 +
+
+
+

Test pyramid

+
+
+
+image73 +
+
+
+
+
+

What is service virtualization

+
+
+

Service Virtualization has become recognized as one of the best ways to speed up testing and accelerate your time to market.

+
+
+

Service virtualization lets you automatically execute tests even when the application under test’s dependent system components (APIs, third-party applications, etc.) cannot be properly accessed or configured for testing. By simulating these dependencies, you can ensure that your tests will encounter the appropriate dependency behaviour and data each and every time that they execute.

+
+
+

Service virtualization is the simulation of interfaces – not the virtualization of systems.

+
+
+

According to Wikipedia’s service virtualization entry: Service virtualization emulates the behaviour of software components to remove dependency constraints on development and testing teams. Such constraints occur in complex, interdependent environments when a component connected to the application under test is:

+
+
+
    +
  • +

    Not yet completed

    +
  • +
  • +

    Still evolving

    +
  • +
  • +

    Controlled by a third-party or partner

    +
  • +
  • +

    Available for testing only in a limited capacity or at inconvenient times

    +
  • +
  • +

    Difficult to provision or configure in a test environment

    +
  • +
  • +

    Needed for simultaneous access by different teams with varied test data setup and other requirements

    +
  • +
  • +

    Restricted or costly to use for load and performance testing

    +
  • +
+
+
+

For instance, instead of virtualizing an entire database (and performing all associated test data management as well as setting up the database for every test session), you monitor how the application interacts with the database, then you emulate the related database behaviour (the SQL queries that are passed to the database, the corresponding result sets that are returned, and so forth).

+
+
+
+
+

Mocks, stubs and virtual services

+
+
+

The most commonly discussed categories of test doubles are mocks, stubs and virtual services.

+
+
+

Stub: a minimal implementation of an interface that normally returns hardcoded data that is tightly coupled to the test suite. It is most useful when the suite of tests is simple and keeping the hardcoded data in the stub is not an issue. Some stubs are handwritten; some can be generated by tools. A stub is normally written by a developer for personal use. It can be shared with testers, but wider sharing is typically limited by interoperability issues related to software platform and deployment infrastructure dependencies that were hardcoded. A common practice is when a stub works in-process directly with classes, methods, and functions for the unit, module, and acceptance testing. Some developers will say that a stub can also be primed, but you cannot verify an invocation on a stub. Stubs can also be communicating "over the wire", for example, HTTP, but some would argue that they should be called virtual services in that case.

+
+
+

Mock: a programmable interface observer, that verifies outputs against expectations defined by the test. It is frequently created using a third party library, for example in Java that is Mockito, JMock or WireMock. It is most useful when you have a large suite of tests and a stub will not be sufficient because each test needs a different data set up and maintaining them in a stub would be costly. The mock lets us keep the data set-up in the test. A mock is normally written by a developer for personal use but it can be shared with testers. However, wider sharing is typically limited by interoperability issues related to software platform and deployment infrastructure dependencies that were hardcoded. They are most often work-in-progress directly with classes, methods, and functions for a unit, module, and acceptance testing. Mock provides responses based on a given request satisfying predefined criteria (also called request or parameter matching). A mock also focuses on interactions rather than state so mocks are usually stateful. For example, you can verify how many times a given method was called or the order of calls made to a given object.

+
+
+

Virtual service: a test double often provided as a Software-as-a-Service (SaaS), is always called remotely, and is never working in-process directly with methods or functions. A virtual service is often created by recording traffic using one of the service virtualization platforms instead of building the interaction pattern from scratch based on interface or API documentation. A virtual service can be used to establish a common ground for teams to communicate and facilitate artefact sharing with other development teams as well as testing teams. A virtual service is called remotely (over HTTP, TCP, etc.) normally supports multiple protocols (e.g. HTTP, MQ, TCP, etc.), while a stub or mock normally supports only one. Sometimes virtual services will require users to authorize, especially when deployed in environments with enterprise-wide visibility. Service virtualization tools used to create virtual services will most often have user interfaces that allow less tech-savvy software testers to hit the ground running, before diving into the details of how specific protocols work. They are sometimes backed by a database. They can also simulate non-functional characteristics of systems such as response times or slow connections. You can sometimes find virtual services that provide a set of stubbed responses for given request criteria and pass every other request to a live backend system (partial stubbing). Similar to mocks, virtual services can have quite complex request matchers, that allow having one response returned for many different types of requests. Sometimes, virtual services simulate system behaviours by constructing parts of the response based on request attributes and data.

+
+
+

It is often difficult to say definitely which of the following categories a test double fits into. They should be treated as a spectrum rather than strict definitions.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module.html new file mode 100644 index 00000000..301aca98 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module.html @@ -0,0 +1,292 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ + +
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Stages.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Stages.html new file mode 100644 index 00000000..287002ad --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Test-Stages.html @@ -0,0 +1,313 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Test stages

+
+ +
+
+
+

Unit test

+
+
+

A module is the smallest compilable unit of source code. It is often too small to be tested by the functional tests (black-box tests). However, it is the appropriate candidate for white-box testing. White-box tests have to be performed as the first static tests (e.g. Lint and inspections), followed by dynamic tests in order to check boundaries, branches and paths. Usually, this kind of testing would require enabling stubs and special test tools.

+
+
+
+
+

Component test

+
+
+

This is the black-box test of modules or groups of modules which represent certain functionalities. There are no rules about what could be called a component. Whatever a tester defines as a component, should make sense and be a testable unit. Components can be integrated into bigger components step by step and tested as such.

+
+
+
+
+

Integration test

+
+
+

Functions are tested by feeding them input and examining the output, and internal program structure is rarely considered. The software is completed step by step and tested by tests covering a collaboration between modules or classes. The integration depends on the kind of system. For example, the steps could be as follows: run the operating system first and gradually add one component after another, then check if the black-box tests are still running (the test cases will be extended together with every added component). The integration is done in the laboratory. It may be also completed by using simulators or emulators. Additionally, the input signals could be stimulated.

+
+
+
+
+

Software / System test

+
+
+

System testing is a type of testing conducted on a complete integrated system to evaluate the system’s compliance with its specified requirements. This is a type of black-box testing of the complete software in the target system. The most important factor in successful system testing is that the environmental conditions for the software have to be as realistic as possible (complete original hardware in the destination environment).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-01-AB-Test-Control.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-01-AB-Test-Control.html new file mode 100644 index 00000000..c1c8bcbd --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-01-AB-Test-Control.html @@ -0,0 +1,609 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example1 +
+
+
+

The goal of this test is to open A/B Test subpage and redirect to another website.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click A/B Testing link and go to A/B Test subpage

    +
  4. +
  5. +

    Click Elemental Selenium link and open it in new tab

    +
  6. +
  7. +

    Switch to Elemental Selenium page and check if it’s loaded

    +
  8. +
+
+
+
+example2 +
+
+
+

== Page Class

+
+
+

Create a Page class for AB Testing page. Override all the required methods:

+
+
+
+
 public class ABtestPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.ABTEST.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'A/B Test Control' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.ABTEST.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+ }
+
+
+
+
+
+

== How to use Enum?

+
+
+

Similarly as in environmental variables case, create an enum for storing values of subURLs:

+
+
+
+
 public enum PageSubURLsProjectYEnum {
+
+    BASIC_AUTH("basic_auth"),
+    NEW_WINDOW("windows/new"),
+    WINDOW("windows"),
+    CHECKBOX("checkboxes"),
+    CONTEXT_MENU("context_menu"),
+    KEY_PRESS("key_presses"),
+    DYNAMIC_CONTENT("dynamic_content"),
+    HOVERS("hovers"),
+    SORTABLE_DATA_TABLES("tables"),
+    REDIRECT("redirector"),
+    JAVASCRIPT_ALERTS("javascript_alerts"),
+    CHALLENGING_DOM("challenging_dom"),
+    STATUS_CODES("status_codes"),
+    LOGIN("login"),
+    ABTEST("abtest"),
+    BROKEN_IMAGES("broken_images"),
+    DROPDOWN("dropdown"),
+    HORIZONTAL_SLIDER("horizontal_slider"),
+    DOWNLOAD("download"),
+    FORGOT_PASSWORD("forgot_password"),
+    FORGOT_PASSWORD_EMAIL_SENT("email_sent"),
+    EXIT_INTENT("exit_intent"),
+    DYNAMIC_LOADING("dynamic_loading"),
+    DISAPPEARING_ELEMENTS("disappearing_elements"),
+    DRAG_AND_DROP("drag_and_drop"),
+    DYNAMIC_CONTROLS("dynamic_controls"),
+    UPLOAD("upload"),
+    FLOATING_MENU("floating_menu"),
+    FRAMES("frames"),
+    GEOLOCATION("geolocation"),
+    INFINITE_SCROLL("infinite_scroll"),
+    JQUERY_UI("jqueryui/menu"),
+    JAVASCRIPT_ERROR("javascript_error"),
+    LARGE_AND_DEEP_DOM("large"),
+    NESTED_FRAMES("nested_frames"),
+    NOTIFICATION_MESSAGE("notification_message"),
+    DOWNLOAD_SECURE("download_secure"),
+    SHIFTING_CONTENT("shifting_content"),
+    SLOW_RESOURCES("slow"),
+    TYPOS("typos"),
+    WYSIWYGEDITOR("tinymce");
+
+    /*
+     * Sub URLs are used as real locations in the test environment
+     */
+    private String subURL;
+
+    private PageSubURLsProjectYEnum(String subURL) {
+        this.subURL = subURL;
+    }
+
+    ;
+
+    private PageSubURLsProjectYEnum() {
+
+    }
+
+    @Override
+    public String toString() {
+        return getValue();
+    }
+
+    public String getValue() {
+        return subURL;
+    }
+
+}
+
+
+
+

Instead of mapping data from an external file, you can store and access them directly from the enum class:

+
+
+
+
PageSubURLsProjectYEnum.ABTEST.getValue()
+
+
+
+
+
+

== Selector

+
+
+

In this test case you need selector for only one page element:

+
+
+
+
private static final By elementalSeleniumLinkSelector = By.cssSelector("div > div > a");
+
+
+
+
+
+

== Page methods

+
+
+

You need two methods for performing page actions:

+
+
+
+
     /**
+     * Clicks 'Elemental Selenium' link at the bottom of the page.
+     *
+     * @return ElementalSeleniumPage object.
+     */
+    public ElementalSeleniumPage clickElementalSeleniumLink() {
+        getDriver().findElementDynamic(elementalSeleniumLinkSelector)
+                .click();
+        getDriver().waitForPageLoaded();
+        return new ElementalSeleniumPage();
+    }
+
+    /**
+     * Switches window to the next one - different than the current.
+     */
+    public void switchToNextTab() {
+        ArrayList<String> tabsList = new ArrayList<String>(getDriver().getWindowHandles());
+        getDriver().switchTo()
+                .window(tabsList.get(1));
+    }
+
+
+
+
+
+

== Elemental Selenium Page Class

+
+
+

To return new Elemental Selenium Page object, implement its class. You only need to write basic methods to check if the page is loaded. There is no need to interact with objects on the site:

+
+
+
+
 public class ElementalSeleniumPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(GetEnvironmentParam.ELEMENTAL_SELENIUM_PAGE.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Elemental Selenium' page.");
+        getDriver().get(GetEnvironmentParam.ELEMENTAL_SELENIUM_PAGE.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+
+

== Test Class

+
+
+

Create a Test class and write a @Test method to execute the scenario:

+
+
+
+
 @Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class ABtestingTest extends TheInternetBaseTest {
+
+    private static ABtestPage abTestPage;
+
+    @Test
+    public void shouldOpenElementalSeleniumPageWhenClickElementalSeleniumLink() {
+
+        logStep("Click Elemental Selenium link");
+        ElementalSeleniumPage elementalSeleniumPage = abTestPage.clickElementalSeleniumLink();
+
+        logStep("Switch browser's tab to newly opened one");
+        abTestPage.switchToNextTab();
+
+        logStep("Verify if Elemental Selenium Page is opened");
+        assertTrue("Unable to open Elemental Selenium page", elementalSeleniumPage.isLoaded());
+    }
+
+}
+
+
+
+
+
+

== Assert

+
+
+

Asserts methods are used for creating test pass or fail conditions. The optional first parameter is a message which will be displayed in the test failure description.

+
+
+
    +
  • +

    assertTrue(boolean condition) - test passes if condition returns true

    +
  • +
  • +

    assertFalse(boolean condition) - test passes if condition returns false

    +
  • +
+
+
+

Also, add the @BeforeClass method to open the tested page:

+
+
+
+
 @BeforeClass
+    public static void setUpBeforeClass() {
+        abTestPage = shouldTheInternetPageBeOpened().clickABtestingLink();
+        logStep("Verify if ABTest page is opened");
+        assertTrue("Unable to open ABTest page", abTestPage.isLoaded());
+    }
+
+
+
+

@BeforeClass method executes only once before all other +@Test cases in the class. There is also a possibility to create a +@AfterClass method which is performed also once after all @Test cases.

+
+
+

You don’t need to implement @setUp and @tearDown methods because they’re already in TheInternetBaseTest class which you extend.

+
+
+
+
+

== Categories

+
+
+

You can group tests in categories. It’s useful when running many tests at once. Use this parameter:

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+
+
+
+

Then create an interface representing each category. Example:

+
+
+
+
public interface TestsSelenium {
+    /* For test which are testing web pages considering UI (user interface) and using selenium webdriver */
+}
+
+
+
+

To run a test from specified category create Test Suite class:

+
+
+
+
@RunWith(WildcardPatternSuite.class) //search for test files under /src/test/java
+@IncludeCategories({ TestsChrome.class }) // search all test files with category TestsChrome.class
+@ExcludeCategories({ TestsLocal.class, TestsNONParallel.class }) //exclude all test files with category TestsLocal.class and TestsNONParallel.class
+@SuiteClasses({ "../**/*Test.class" }) //search only test files, where file name ends with <anyChar/s>Test.class
+
+public class _TestSuiteChrome {
+
+}
+
+
+
+

You can run a Test Suite as a JUnit test.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-02-Basic-Auth-Test.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-02-Basic-Auth-Test.html new file mode 100644 index 00000000..1d21d566 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-02-Basic-Auth-Test.html @@ -0,0 +1,516 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example3 +
+
+
+

In this test case, the goal is to pass username and password authorization and login to the next page.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click on Basic Auth link

    +
  4. +
  5. +

    Open pop-up login window

    +
  6. +
  7. +

    Enter valid username and password

    +
  8. +
  9. +

    Open next subpage and verify if the user logged in successfully.

    +
  10. +
+
+
+

== Page Class

+
+
+

Create a page class which represents Basic Auth subpage after proper login.

+
+
+
+example4 +
+
+
+

Override all the required methods:

+
+
+
+
public class BasicAuthPage extends BasePage {
+
+    public BasicAuthPage() {
+
+    }
+
+    public BasicAuthPage(String login, String password) {
+        this.enterLoginAndPasswordByUrl(login, password);
+    }
+
+    @Override
+    public boolean isLoaded() {
+        return true;
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("load");
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+
+
+

In order to verify a login, create a selector to access the visible message.

+
+
+
+
 private static final By selectorTextMessage = By.cssSelector("#content > div > p");
+Then create a method to get message value:
+
+/**
+*       Returns message displayed by system after user's log in.
+*      @return String object representing message displayed by system after user's log in
+*/
+    public String getMessageValue() {
+                return getDriver().findElementDynamic(selectorTextMessage)
+                    .getText();
+}
+
+
+
+

Also, create a method to access the pop-up login window and enter user credentials:

+
+
+
+
    /**
+     * Authenticates user using standard simple authentication popup.
+     *
+     * @param login    User's login
+     * @param password User's password
+     * @throws AWTException
+     * @throws InterruptedException
+     */
+    public void enterLoginAndPassword(String login, String password) throws AWTException, InterruptedException {
+        Robot rb = new Robot();
+
+        Thread.sleep(2000);
+
+        StringSelection username = new StringSelection(login);
+        Toolkit.getDefaultToolkit()
+                .getSystemClipboard()
+                .setContents(username, null);
+        rb.keyPress(KeyEvent.VK_CONTROL);
+        rb.keyPress(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_CONTROL);
+
+        rb.keyPress(KeyEvent.VK_TAB);
+        rb.keyRelease(KeyEvent.VK_TAB);
+        Thread.sleep(2000);
+
+        StringSelection pwd = new StringSelection(password);
+        Toolkit.getDefaultToolkit()
+                .getSystemClipboard()
+                .setContents(pwd, null);
+        rb.keyPress(KeyEvent.VK_CONTROL);
+        rb.keyPress(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_CONTROL);
+
+        rb.keyPress(KeyEvent.VK_ENTER);
+        rb.keyRelease(KeyEvent.VK_ENTER);
+        Thread.sleep(2000);
+    }
+
+
+
+
+
+

== Robot class

+
+
+

Creating a Robot object allows performing basic system actions such as pressing keys, moving the mouse or taking screenshots. In this case, it’s used to paste login and password text from the clipboard using 'Ctrl + V' shortcut, go to the next field using 'Tab' key and submit by clicking 'Enter'.

+
+
+
+
+

Toolkit

+
+
+

Static class Toolkit can perform basic window actions such as scrolling to a specified position or moving context between components. In this case, it’s used to set clipboard content to username and password value.

+
+
+
+
Thread.sleep(long millis)
+
+
+
+

Web drivers like Selenium perform actions much faster than the normal user. This may cause unexpected consequences e.g. some elements may not be loaded before the driver wants to access them. To avoid this problem you can use Thread.sleep(long millis) to wait given time and let browser load wanted component.

+
+
+

BEWARE: Using Thread.sleep(long millis) is not the recommended approach. Selenium driver gives methods to wait for a specified element to be enabled or visible with a timeout parameter. This is a more stable and effective way. Also, method waitForPageLoaded() will not solve that issue because it only waits for the ready state from the browser while some javascript actions might be performed after that.

+
+
+
+
+

== Test Class

+
+
+

Create a Test class and write a @Test method to execute the scenario. Save parameters as class fields:

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class BasicAuthTest extends TheInternetBaseTest {
+
+    private static BasicAuthPage basicAuthPage;
+
+    private String login    = "admin";
+    private String password = "admin";
+    private String message  = "Congratulations! You must have the proper credentials.";
+
+    @Test
+    public void shouldUserLogInWithValidCredentials() throws InterruptedException, AWTException {
+        basicAuthPage = shouldTheInternetPageBeOpened().clickBasicAuthLink();
+
+        logStep("Enter login and password");
+        basicAuthPage.enterLoginAndPassword(login, password);
+
+        logStep("Verify if user logged in successfully");
+        assertEquals("Unable to login user with valid credentials", message,
+            basicAuthPage.getMessageValue());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        theInternetPage.load();
+    }
+}
+
+
+
+

assertEquals(Object expected, Object actual) - test passes if parameters are equal .

+
+
+
+
+

== Alternative scenario:

+
+
+

There is also a possibility to log in with credentials as a part of URL: http://login:password@the-internet.herokuapp.com/basic_auth

+
+
+

Another page class method:

+
+
+
+
/**
+     * Authenticates user passing credentials into URL.
+     *
+     * @param login    User's login
+     * @param password User's password
+     */
+    private void enterLoginAndPasswordByUrl(String login, String password) {
+        getDriver().get("http://" + login + ":" + password + "@" + "the-internet.herokuapp.com/" +
+            PageSubURLsProjectYEnum.BASIC_AUTH.getValue());
+    }
+
+
+
+

Another test class method:

+
+
+
+
@Test
+    public void shouldUserLogInWithValidCredentialsSetInURL() {
+        logStep("Enter user's credentials into URL to log in");
+        basicAuthPage = new BasicAuthPage(login, password);
+
+        logStep("Verify if user logged in successfully");
+        assertEquals("Unable to login user with valid credentials", message,
+            basicAuthPage.getMessageValue());
+    }
+
+
+
+

After running test class as a JUnit test, both test cases will be performed.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-03-Broken-Images-Test.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-03-Broken-Images-Test.html new file mode 100644 index 00000000..e980f7fb --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-03-Broken-Images-Test.html @@ -0,0 +1,396 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This test goal is to check the dimensions of broken images on the subpage.

+
+
+
+example5 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Broken Image link and go to Broken Image subpage

    +
  4. +
  5. +

    Get the 3 images' dimensions and compare them with expected values

    +
  6. +
+
+
+

== Page Class

+
+
+

In this case, create an array of selectors to access images by index number:

+
+
+
+
public class BrokenImagePage extends BasePage {
+
+    private static final By[] selectorsImages = { By.cssSelector("div > img:nth-child(2)"),
+            By.cssSelector("div > img:nth-child(3)"),
+            By.cssSelector("div > img:nth-child(4)") };
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.BROKEN_IMAGES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Broken Images' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.BROKEN_IMAGES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns an image height in pixels.
+     *
+     * @param imageIndex An index of given image.
+     * @return Height of an image in pixels.
+     */
+    public int getImageHeight(int imageIndex) {
+        return getImageDimension(imageIndex).getHeight();
+    }
+
+    /**
+     * Returns an image width in pixels.
+     *
+     * @param imageIndex An index of given image.
+     * @return Width of an image in pixels.
+     */
+    public int getImageWidth(int imageIndex) {
+        return getImageDimension(imageIndex).getWidth();
+    }
+
+    private Dimension getImageDimension(int imageIndex) {
+        return getDriver().findElementDynamic(selectorsImages[imageIndex])
+                .getSize();
+    }
+
+}
+
+
+
+
+
+

== Test Class

+
+
+

Create @Test and @BeforeClass methods. Save expected images' dimensions in class fields:

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class BrokenImagesTest extends TheInternetBaseTest {
+
+    private static BrokenImagePage brokenImagePage;
+
+    private final int expectedHeight = 90;
+    private final int expectedWidth  = 120;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        brokenImagePage = shouldTheInternetPageBeOpened().clickBrokenImageLink();
+
+        logStep("Verify if Broken Image page is opened");
+        assertTrue("Unable to open Broken Image page", brokenImagePage.isLoaded());
+    }
+
+    @Test
+    public void shouldImageSizesBeEqualToExpected() {
+        for (int i = 0; i < 3; i++) {
+            logStep("Verify size of image with index: " + i);
+            assertEquals("Height of image with index: " + i + " is incorrect", expectedHeight,
+                   brokenImagePage.getImageHeight(i));
+            assertEquals("Width of image with index: " + i + " is incorrect", expectedWidth,
+                   brokenImagePage.getImageWidth(i));
+        }
+    }
+
+}
+
+
+
+

The test will pass if every image had the correct width and height.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-04-Challenging-DOM-Test.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-04-Challenging-DOM-Test.html new file mode 100644 index 00000000..18f27a5c --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-04-Challenging-DOM-Test.html @@ -0,0 +1,419 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This case goal is to find out how to create stable selectors.

+
+
+

In the browser’s developer mode, you can see how the page is built. Notice, that buttons' IDs change after click and values in the table haven’t got unique attributes, which might be helpful in order to find them.

+
+
+
+example6 +
+
+
+

== DOM - Document Object Model

+
+
+

HTML DOM is a model of the page created by the browser. The page could be represented as the tree of objects. Read more.

+
+
+

To create locators you can use element attributes such as id, class name etc.

+
+
+

It this case, since there are no unique attributes, the best approach is to use HTML document structure and identify page elements by their place in an object hierarchy.

+
+
+
+
Page Class
+public class ChallengingDomPage extends BasePage {
+
+    private final By selectorTableRows   = By.cssSelector(".large-10 > table > tbody > tr");
+    private final By selectorFirstButton = By.cssSelector(".large-2.columns > .button:nth-
+            child(1)");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.CHALLENGING_DOM.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Challenging DOM' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.CHALLENGING_DOM.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns table text content as a list of String objects.
+     *
+     * @return A list of table values.
+     */
+    public List<String> getTableValues() {
+        return JsoupHelper.findTexts(selectorTableRows);
+    }
+
+    /**
+     * Clicks top button on the page from available button set.
+     */
+    public void clickFirstButton() {
+        getDriver().elementButton(selectorFirstButton)
+                .click();
+        getDriver().waitForPageLoaded();
+    }
+
+}
+
+
+
+
+
+

== Jsoup Helper

+
+
+

Jsoup Helper is the tool which helps to parse HTML document and get searched values. This is especially useful when values are organized in a generic structure such as a table.

+
+
+

JsoupHelper.findTexts(By selector) - this method returns text content of a table as a list of Strings

+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Challenging DOM link and go to Challenging DOM subpage

    +
  4. +
  5. +

    Get and save table values

    +
  6. +
  7. +

    Click the first button

    +
  8. +
  9. +

    Get table values again

    +
  10. +
  11. +

    Compare table values before and after button click

    +
  12. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class ChallengingDomTest extends TheInternetBaseTest {
+
+    private static ChallengingDomPage challengingDomPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        challengingDomPage = shouldTheInternetPageBeOpened().clickChallengingDomLink();
+
+        logStep("Verify if Challenging Dom page is opened");
+        assertTrue("Unable to open Challenging Dom page", challengingDomPage.isLoaded());
+    }
+
+    @Test
+    public void shouldValuesInTableCellsStayUnchangedAfterClick() {
+
+        logStep("Get table values (before click any button)");
+        List<String> tableValuesBeforeClick = challengingDomPage.getTableValues();
+
+        logStep("Click first button");
+        challengingDomPage.clickFirstButton();
+
+        logStep("Get table values (after click first button)");
+        List<String> tableValuesAfterClick = challengingDomPage.getTableValues();
+
+        logStep("Verify equality of table values before and after click");
+        assertEquals("Values from table cells were changed after click", tableValuesBeforeClick,
+                tableValuesAfterClick);
+    }
+
+}
+
+
+
+

Because values in the table don’t change, the test should pass if object locators are solid.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-05-Checkboxes.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-05-Checkboxes.html new file mode 100644 index 00000000..0e5327ab --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-05-Checkboxes.html @@ -0,0 +1,445 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

In this example, you will learn how to test checkboxes on the page.

+
+
+
+example7 +
+
+
+

A checkbox is a simple web element which can be selected or unselected by clicking on it.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Checkboxes link and go to Checkboxes page

    +
  4. +
  5. +

    Test if the first checkbox is unchecked

    +
  6. +
  7. +

    Select the first checkbox

    +
  8. +
  9. +

    Test if the first checkbox is checked

    +
  10. +
  11. +

    Test if the second checkbox is checked

    +
  12. +
  13. +

    Unselect second checkbox

    +
  14. +
  15. +

    Test if the second checkbox is unchecked

    +
  16. +
+
+
+

== Page Class

+
+
+

Because both checkboxes are in one form, it’s possible to locate them by one selector and then access each individual one by index.

+
+
+
+example8 +
+
+
+
+
public class CheckboxesPage extends BasePage {
+
+    private final static By checkboxesFormSelector = By.cssSelector("#checkboxes");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.CHECKBOX.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Checkboxes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.CHECKBOX.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if checkbox form is visible on the page.
+     *
+     * @return true if checkboxes are present and displayed on the page
+     */
+    public boolean isElementCheckboxesFormVisible() {
+        return getDriver().elementCheckbox(checkboxesFormSelector)
+                .isDisplayed();
+    }
+
+    /**
+     * Verifies if given checkbox is selected or not.
+     *
+     * @param index The index of given checkbox
+     * @return true if given checkbox is selected
+     */
+    public boolean isCheckboxSelected(int index) {
+        return getDriver().elementCheckbox(checkboxesFormSelector)
+                .isCheckBoxSetByIndex(index);
+    }
+
+    /**
+     * Selects given checkbox. Unselects, if it is already selected.
+     *
+     * @param index The index of given checkbox
+     */
+    public void selectCheckbox(int index) {
+        CheckBox checkbox = getDriver().elementCheckbox(checkboxesFormSelector);
+        if (isCheckboxSelected(index)) {
+            checkbox.unsetCheckBoxByIndex(index);
+        } else {
+            checkbox.setCheckBoxByIndex(index);
+        }
+    }
+
+}
+
+
+
+
+
+

== CheckBox

+
+
+

CheckBox class contains a method to perform actions on checkboxes such as setting and unsetting or verifying if the specified box is checked. +Use method elementCheckbox(By selector) to create CheckBox Object.

+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class CheckboxesTest extends TheInternetBaseTest {
+
+    private static CheckboxesPage checkboxesPage;
+
+    @Override
+    public void setUp() {
+        checkboxesPage = shouldTheInternetPageBeOpened().clickCheckboxesLink();
+
+        logStep("Verify if Checkboxes page is opened");
+        assertTrue("Unable to open Checkboxes page", checkboxesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldCheckboxBeSelectedAfterClick() {
+
+        logStep("Verify if first checkbox is not selected");
+        assertFalse("The checkbox is selected", checkboxesPage.isCheckboxSelected(0));
+
+        logStep("Select first checkbox");
+        checkboxesPage.selectCheckbox(0);
+
+        logStep("Verify if first checkbox is selected");
+        assertTrue("The checkbox is not selected", checkboxesPage.isCheckboxSelected(0));
+    }
+
+    @Test
+    public void shouldCheckboxBeUnselectedAfterClick() {
+
+        logStep("Verify if second checkbox is selected");
+        assertTrue("The checkbox is not selected", checkboxesPage.isCheckboxSelected(1));
+
+        logStep("Select second checkbox");
+        checkboxesPage.selectCheckbox(1);
+
+        logStep("Verify if second checkbox is not selected");
+        assertFalse("The checkbox is selected", checkboxesPage.isCheckboxSelected(1));
+    }
+
+}
+
+
+
+

After running Test Class both @Test cases will be performed. Before each one, overrode setUp method will be executed.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-06-Disappearing-Elements.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-06-Disappearing-Elements.html new file mode 100644 index 00000000..4b3b2ac6 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-06-Disappearing-Elements.html @@ -0,0 +1,480 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This case will show how to test changing website content.

+
+
+
+example9 +
+
+
+

After refreshing page (F5) a few times, a new element should appear:

+
+
+
+example10 +
+
+
+

Then, after another couple of refreshes, it should disappear.

+
+
+

You can check in developer mode that Gallery element does not exist in HTML document either, until appearing on the page. The element is created by Javascript.

+
+
+
+example11 +
+
+
+
+example12 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Load The Internet Main Page

    +
  2. +
  3. +

    Click Disappearing Elements link and go to that subpage

    +
  4. +
  5. +

    Check if Menu Buttons exist on the page

    +
  6. +
  7. +

    Refresh the page until a new element appears

    +
  8. +
  9. +

    Check if Gallery Button exists

    +
  10. +
  11. +

    Check if the number of buttons equals the expected value

    +
  12. +
  13. +

    Refresh the page until an element disappears

    +
  14. +
  15. +

    Check if Gallery Button does not exist

    +
  16. +
  17. +

    Check if the number of buttons is smaller than before

    +
  18. +
+
+
+

== Page Class

+
+
+
+
public class DisappearingElementsPage extends BasePage {
+
+    private static final By selectorGalleryMenuButton = By.cssSelector("li > a[href*=gallery]");
+    private static final By selectorMenuButtons       = By.cssSelector("li");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DISAPPEARING_ELEMENTS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Disappearing Elements' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DISAPPEARING_ELEMENTS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns a number of WebElements representing menu buttons.
+     *
+     * @return A number of WebElements.
+     */
+    public int getNumberOfMenuButtons() {
+        return getDriver().findElementDynamics(selectorMenuButtons)
+                .size();
+    }
+
+    /**
+     * Returns WebElement representing disappearing element of menu.
+     *
+     * @return Disappearing WebElement if visible, null otherwise.
+     */
+    public WebElement getGalleryMenuElement() {
+        return getDriver().findElementQuietly(selectorGalleryMenuButton);
+    }
+
+    /**
+     * Refreshes web page as many times as it is required to appear/disappear menu button
+     * WebElement.
+     *
+     * @param shouldAppear Determines if element should appear (true) or disappear (false).
+     */
+    public void refreshPageUntilWebElementAppears(boolean shouldAppear) {
+        int numberOfAttempts = 5;
+        int counter = 0;
+        while (!isVisibilityAsExpected(shouldAppear) || isMaxNumberOfAttemptsReached(counter++,
+                numberOfAttempts)) {
+            refreshPage();
+        }
+    }
+
+    /**
+     * Verify if visibility of Gallery button is the same as expected
+     *
+     * @param expected Determines if element should be visible (true) or not visible (false).
+     */
+    private boolean isVisibilityAsExpected(boolean expected) {
+        boolean isVisibilityDifferentThanExpected = isGalleryMenuElementVisible() ^ expected;
+        return !isVisibilityDifferentThanExpected;
+    }
+
+    private boolean isGalleryMenuElementVisible() {
+        boolean result = false;
+        WebElement gallery = getGalleryMenuElement();
+        if (gallery != null)
+            result = gallery.isDisplayed();
+        return result;
+    }
+
+    private boolean isMaxNumberOfAttemptsReached(int attemptNo, int maxNumberOfAttempts) {
+        return attemptNo ==  maxNumberOfAttempts;
+    }
+
+}
+
+
+
+

findElementQuietly(By selector) works similar as findElementDynamics(By selector) but won’t throw an exception if an element wasn’t found. In this case, the searched WebElement will have a NULL value.

+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DisappearingElementsTest extends TheInternetBaseTest {
+
+    private static final int totalNumberOfMenuButtons = 5;
+    private static DisappearingElementsPage disappearingElementsPage;
+    private static       int numberOfMenuButtons      = 0;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        disappearingElementsPage = shouldTheInternetPageBeOpened().clickDisappearingElementsLink();
+
+        logStep("Verify if Disappearing Elements page is opened");
+        assertTrue("Unable to open Disappearing Elements page",
+                disappearingElementsPage.isLoaded());
+
+        logStep("Verify if menu button elements are visible");
+        numberOfMenuButtons = disappearingElementsPage.getNumberOfMenuButtons();
+        assertTrue("Unable to display menu", numberOfMenuButtons > 0);
+    }
+
+    @Test
+    public void shouldMenuButtonElementAppearAndDisappearAfterRefreshTest() {
+        logStep("Click refresh button until menu button appears");
+        disappearingElementsPage.refreshPageUntilWebElementAppears(true);
+
+        logStep("Verify if menu button element appeared");
+        assertNotNull("Unable to disappear menu button element",
+                disappearingElementsPage.getGalleryMenuElement());
+        assertEquals("The number of button elements after refresh is incorrect",
+                totalNumberOfMenuButtons, disappearingElementsPage.getNumberOfMenuButtons());
+
+        logStep("Click refresh button until menu button disappears");
+        disappearingElementsPage.refreshPageUntilWebElementAppears(false);
+
+        logStep("Verify if menu button element disappeared");
+        assertNull("Unable to appear menu button element",
+                disappearingElementsPage.getGalleryMenuElement());
+        assertTrue("The number of button elements after refresh is incorrect",
+                totalNumberOfMenuButtons > disappearingElementsPage.getNumberOfMenuButtons());
+    }
+
+}
+
+
+
+

assertNull(Objetc object) - test passes if Object returns NULL +assertNotNull(Objetc object) - test passes if Object does not return NULL

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-07-Drag-and-Drop.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-07-Drag-and-Drop.html new file mode 100644 index 00000000..e67655f3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-07-Drag-and-Drop.html @@ -0,0 +1,588 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This case shows how to move draggable elements on the page. +image::images/example13.png[]

+
+
+

Try to move A to B position and see what happens. Also, open browser developer mode and see how the DOM changes.

+
+
+
+example14 +
+
+
+

The page can easily be broken. You can try to do so and check how the page structure changed in browser developer mode.

+
+
+
+example15 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Drag and Drop link and open subpage

    +
  4. +
  5. +

    Check if the Drag and Drop message is visible

    +
  6. +
  7. +

    Check if element A is in container A and B in container B

    +
  8. +
  9. +

    Move element A to position B

    +
  10. +
  11. +

    Check if element A is in container B and B in container A

    +
  12. +
  13. +

    Move element B to position A

    +
  14. +
  15. +

    Again check if element A is in container A and B in container B

    +
  16. +
+
+
+

== Page Class

+
+
+
+
public class DragAndDropPage extends BasePage {
+
+    private static final By selectorDragAndDropText    = By.cssSelector("div#content h3");
+    private static final By selectorAElementContainer  = By.cssSelector("div#column-a");
+    private static final By selectorBElementContainer  = By.cssSelector("div#column-b");
+    private static final By selectorDescriptionElement = By.cssSelector("header");
+
+    private static final String dndHelperPath = "src/test/resources/js/drag_and_drop_helper.js";
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DRAG_AND_DROP.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Drag and Drop' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() + PageSubURLsProjectYEnum.DRAG_AND_DROP.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if drag and drop message is visible or not.
+     *
+     * @return true if exit drag and drop message was found on web page.
+     */
+    public boolean isDragAndDropMessageVisible() {
+        return getDriver().findElementDynamic(selectorDragAndDropText)
+                .isDisplayed();
+    }
+
+    /**
+     * Verifies if specified element is placed in designated container.
+     *
+     * @param element WebElement to be verified.
+     * @return true if element described as A exists in container A or element B exists in container B, false otherwise.
+     */
+    public boolean isElementPlacedInCorrectContainer(String element) {
+        return getDescriptionElement(findElementByDescription(element)).getText()
+                .equals(element);
+    }
+
+    private WebElement findElementByDescription(String element) {
+        WebElement result;
+        switch (element) {
+            case "A":
+                result = getContainerElement(selectorAElementContainer);
+                break;
+            case "B":
+                result = getContainerElement(selectorBElementContainer);
+                break;
+            default:
+                result = null;
+                BFLogger.logDebug("Chosen element doesn't exist on web page");
+        }
+        return result;
+    }
+
+    private WebElement getContainerElement(By container) {
+        return getDriver().findElementDynamic(container);
+    }
+
+    private WebElement getDescriptionElement(WebElement container) {
+        return container.findElement(selectorDescriptionElement);
+    }
+
+    /**
+     * Drags element to designated container and drops it.
+     *
+     * @param element         String describing WebElement expected to be dragged.
+     * @param from            String describing WebElement representing container of element expected to be dragged.
+     * @param destinationDesc String describing WebElement representing destination container where other element will be dragged.
+     */
+    public void dragElementToPosition(String element, String from, String destinationDesc) {
+        WebElement source = findElementByDescription(from);
+        WebElement description = getDescriptionElement(source);
+        WebElement destination = findElementByDescription(destinationDesc);
+        if (description.getText()
+                .equals(element))
+            dragElement(source, destination);
+    }
+
+}
+
+
+
+

Since HTML5, normal Selenium drag-and-drop action stopped working, thus it’s necessary to execute Javascript which performs the drag-and-drop. To do so, create a JavascriptExecutor object, then read the script from a file drag_and_drop_helper.js and execute it with additional arguments using method executeScript(String script).

+
+
+

An example drag-and-drop solution:

+
+
+
+
    /**
+     * Drags and drops given WebElement to it's destination location.
+     * <p>
+     * Since HTML5 all Selenium Actions performing drag and drop operations stopped working as expected, e.g.
+     * original implementation, which was:
+     * <code>
+     * BasePage.getAction()
+     * .clickAndHold(draggable)
+     * .moveToElement(target)
+     * .release()
+     * .build()
+     * .perform();
+     * </code>
+     * finishes with no effect. For this reason, there is javaScript function used, to make sure that
+     * drag and drop operation will be successful.
+     * JavaScript function is stored under the following path: 'src/test/resources/js/drag_and_drop_helper.js'.
+     * Original source of the script:
+     * <a href="https://gist.github.com/rcorreia/2362544">drag_and_drop_helper</a>
+     * </p>
+     *
+     * @param draggable A WebElement to be dragged and dropped.
+     * @param target    A destination, where element will be dropped.
+     * @see JavascriptExecutor
+     * @see Actions
+     */
+    private void dragElement(WebElement draggable, WebElement target) {
+        JavascriptExecutor js;
+        INewWebDriver driver = getDriver();
+        List<String> fileContent;
+        String draggableId = draggable.getAttribute("id");
+        String targetId = target.getAttribute("id");
+        String script = null;
+        if (draggable.getAttribute("draggable")
+                .contains("true")) {
+            if (driver instanceof JavascriptExecutor) {
+                js = (JavascriptExecutor) driver;
+                Path path = Paths.get(dndHelperPath);
+                try {
+                    fileContent = Files.readAllLines(path);
+                    script = fileContent.stream()
+                            .collect(Collectors.joining());
+                } catch (IOException e) {
+                    BFLogger.logDebug("Unable to read file content: " + e.getMessage());
+                }
+                if (script != null && !script.isEmpty()) {
+                    String arguments = "$('#%s').simulateDragDrop({ dropTarget: '#%s'});";
+                    js.executeScript(script + String.format(arguments, draggableId, targetId));
+                }
+            }
+        }
+    }
+
+
+
+

Drag and Drop helper file:

+
+
+
+
(function( $ ) {
+        $.fn.simulateDragDrop = function(options) {
+                return this.each(function() {
+                        new $.simulateDragDrop(this, options);
+                });
+        };
+        $.simulateDragDrop = function(elem, options) {
+                this.options = options;
+                this.simulateEvent(elem, options);
+        };
+        $.extend($.simulateDragDrop.prototype, {
+                simulateEvent: function(elem, options) {
+                        /*Simulating drag start*/
+                        var type = 'dragstart';
+                        var event = this.createEvent(type);
+                        this.dispatchEvent(elem, type, event);
+
+                        /*Simulating drop*/
+                        type = 'drop';
+                        var dropEvent = this.createEvent(type, {});
+                        dropEvent.dataTransfer = event.dataTransfer;
+                        this.dispatchEvent($(options.dropTarget)[0], type, dropEvent);
+
+                        /*Simulating drag end*/
+                        type = 'dragend';
+                        var dragEndEvent = this.createEvent(type, {});
+                        dragEndEvent.dataTransfer = event.dataTransfer;
+                        this.dispatchEvent(elem, type, dragEndEvent);
+                },
+                createEvent: function(type) {
+                        var event = document.createEvent("CustomEvent");
+                        event.initCustomEvent(type, true, true, null);
+                        event.dataTransfer = {
+                                data: {
+                                },
+                                setData: function(type, val){
+                                        this.data[type] = val;
+                                },
+                                getData: function(type){
+                                        return this.data[type];
+                                }
+                        };
+                        return event;
+                },
+                dispatchEvent: function(elem, type, event) {
+                        if(elem.dispatchEvent) {
+                                elem.dispatchEvent(event);
+                        }else if( elem.fireEvent ) {
+                                elem.fireEvent("on"+type, event);
+                        }
+                }
+        });
+})(jQuery);
+
+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DragAndDropTest extends TheInternetBaseTest {
+
+    private static final String ELEMENT_A   = "A";
+    private static final String CONTAINER_A = "A";
+    private static final String ELEMENT_B   = "B";
+    private static final String CONTAINER_B = "B";
+
+    private static DragAndDropPage dragAndDropPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dragAndDropPage = shouldTheInternetPageBeOpened().clickDragAndDropLink();
+
+        logStep("Verify if Drag And Drop page is opened");
+        assertTrue("Unable to open Drag And Drop page", dragAndDropPage.isLoaded());
+
+        logStep("Verify if Drag And Drop message is visible");
+        assertTrue("Drag And Drop message is not visible", dragAndDropPage.isDragAndDropMessageVisible());
+    }
+
+    @Test
+    public void shouldDraggableElementBeMovedAndDropped() {
+        logStep("Verify if elements are placed in proper containers");
+        assertTrue("Element A doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertTrue("Element B doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+
+        logStep("Step 7: Drag and drop element A into container B");
+        dragAndDropPage.dragElementToPosition(ELEMENT_A, CONTAINER_A, CONTAINER_B);
+
+        logStep("Step 8: Verify if elements are placed in improper containers");
+        assertFalse("Element A doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertFalse("Element B doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+
+        logStep("Drag and drop element B back into container B");
+        dragAndDropPage.dragElementToPosition(ELEMENT_A, CONTAINER_B, CONTAINER_A);
+
+        logStep("Verify if elements are placed in proper containers");
+        assertTrue("Element A doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertTrue("Element B doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+    }
+
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-08-Dropdown-List.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-08-Dropdown-List.html new file mode 100644 index 00000000..c7ffd566 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-08-Dropdown-List.html @@ -0,0 +1,418 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This example shows how to select an element from the dropdown list.

+
+
+
+example16 +
+
+
+

Check in the developer mode how a Dropdown List’s content has been organized.

+
+
+
+example17 +
+
+
+

Notice that the Dropdown Options have different attributes, such as "disabled" or "selected".

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click the Dropdown link and go to the subpage

    +
  4. +
  5. +

    Select first dropdown Option

    +
  6. +
  7. +

    Check if Option 1 is selected

    +
  8. +
  9. +

    Select second dropdown Option

    +
  10. +
  11. +

    Check if Option 2 is selected

    +
  12. +
+
+
+

== Page Class

+
+
+
+
public class DropdownPage extends BasePage {
+
+    private static final By dropdownListSelector = By.cssSelector("#dropdown");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DROPDOWN.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dropdown List' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DROPDOWN.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Selects doropdown's value by given index.
+     *
+     * @param index Index of option to be selected
+     */
+    public void selectDropdownValueByIndex(int index) {
+        getDriver().elementDropdownList(dropdownListSelector)
+                .selectDropdownByIndex(index);
+    }
+
+    /**
+     * Returns text value of first selected dropdown's option.
+     *
+     * @return String object representing value of dropdown's option
+     */
+    public String getSelectedDropdownValue() {
+        return getDriver().elementDropdownList(dropdownListSelector)
+                .getFirstSelectedOptionText();
+    }
+}
+
+
+
+
+
+

== DropdownListElement class

+
+
+

DropdownListElement is MrChecker’s class, which contains methods for performing the dropdown list of actions:

+
+
+
+
elementDropdownList() - returns DropdownListElement Object
+
+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DropdownTest extends TheInternetBaseTest {
+
+    private static final String expectedFirstOptionValue  = "Option 1";
+    private static final String expectedSecondOptionValue = "Option 2";
+    private static DropdownPage dropdownPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dropdownPage = shouldTheInternetPageBeOpened().clickDropdownLink();
+
+        logStep("Verify if Dropdown page is opened");
+        assertTrue("Unable to open Dropdown page", dropdownPage.isLoaded());
+    }
+
+    @Test
+    public void shouldGetExpectedDropdownTextOptionAfterSelection() {
+
+        logStep("Select first drodown option");
+        dropdownPage.selectDropdownValueByIndex(1);
+
+        logStep("Verify if selected option text is equal to the expected one");
+        assertEquals("Selected value is different than expected", expectedFirstOptionValue,
+                dropdownPage.getSelectedDropdownValue());
+
+        logStep("Select first drodown option");
+        dropdownPage.selectDropdownValueByIndex(2);
+
+        logStep("Verify if selected option text is equal to the expected one");
+        assertEquals("Selected value is different than expected", expectedSecondOptionValue,
+                dropdownPage.getSelectedDropdownValue());
+    }
+
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-09-Dynamic-Content.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-09-Dynamic-Content.html new file mode 100644 index 00000000..fa5ea43d --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-09-Dynamic-Content.html @@ -0,0 +1,448 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This case shows how to compare dynamic content.

+
+
+
+example18 +
+
+
+

Note that after site refresh, some of the content is different. You can see in the browser’s developer mode how the text and image sources are being changed.

+
+
+
+example19 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Dynamic Content link and load subpage

    +
  4. +
  5. +

    Save page images sources and descriptions before the refresh

    +
  6. +
  7. +

    Refresh page

    +
  8. +
  9. +

    Save page images sources and it’s descriptions after refresh

    +
  10. +
  11. +

    Compare page content before and after refresh and verify if it’s different

    +
  12. +
+
+
+

== Page Class

+
+
+
+
public class DynamicContentPage extends BasePage {
+
+    private static final By imagesLinksSelector        = By.cssSelector("div#content > div.row img");
+    private static final By imagesDescriptionsSelector = By.cssSelector("div#content > div.row div.large-10");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DYNAMIC_CONTENT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dynamic Content' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DYNAMIC_CONTENT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns list of picture descriptions being present on the web page.
+     *
+     * @return List of String objects representing descriptions
+     */
+    public List<String> getDescriptions() {
+        return new ListElements(imagesDescriptionsSelector).getTextList();
+    }
+
+    /**
+     * Returns a list of image links being present on the web page.
+     *
+     * @return List of String objects representing paths to pictures
+     */
+    public List<String> getImageLinks() {
+        return new ListElements(imagesLinksSelector)
+                .getList()
+                .stream()
+                .map(element -> element.getAttribute("src"))
+                .collect(Collectors.toList());
+    }
+}
+
+
+
+
+
+

== ListElements

+
+
+

ListElements is MrChecker collection which can store WebElement Objects. Constructing ListElements with cssSelector allows you to store every element on the page which fits the selector. Example methods:

+
+
+
+
getList() -  returns WebElements list,
+getTextList() - returns list of contents of each Element,
+getSize() - returns number of stored Elements
+In getImageLinks() example it's shown how to get a list of specified Elements' attributes.
+
+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DynamicContentTest extends TheInternetBaseTest {
+
+    private static DynamicContentPage dynamicContentPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dynamicContentPage = shouldTheInternetPageBeOpened().clickDynamicContentLink();
+
+        logStep("Verify if Dynamic Content page is opened");
+        assertTrue("Unable to open Dynamic Content page", dynamicContentPage.isLoaded());
+    }
+
+    @Test
+    public void shouldImagesAndDescriptionsDifferAfterRefresh() {
+
+        logStep("Read images and descriptions before refresh");
+        List<String> descriptionsBeforeRefresh = dynamicContentPage.getDescriptions();
+        List<String> imagesBeforeRefresh = dynamicContentPage.getImageLinks();
+
+        logStep("Refres page");
+        dynamicContentPage.refreshPage();
+        assertTrue("The Dynamic Content page hasn't been refreshed", dynamicContentPage.isLoaded());
+
+        logStep("Read images and descriptions after refresh");
+        List<String> descriptionsAfterRefresh = dynamicContentPage.getDescriptions();
+        List<String> imagesAfterRefresh = dynamicContentPage.getImageLinks();
+
+        logStep("Verify if descriptions are different after refresh");
+        assertEquals("Different number of descriptions before and after refresh",
+                descriptionsAfterRefresh.size(), descriptionsBeforeRefresh.size());
+
+        boolean diversity = false;
+        for (int i = 0; i < descriptionsAfterRefresh.size(); i++) {
+            if (!descriptionsAfterRefresh.get(i)
+                    .equals(descriptionsBeforeRefresh.get(i))) {
+                diversity = true;
+                break;
+            }
+        }
+        assertTrue("There are no differences between descriptions before and after refresh",
+                diversity);
+
+        logStep("Verify if images are different after refresh");
+        assertEquals("Different number of descriptions before and after refresh",
+                imagesAfterRefresh.size(), imagesBeforeRefresh.size());
+
+        diversity = false;
+        for (int i = 0; i < imagesAfterRefresh.size(); i++) {
+            if (!imagesAfterRefresh.get(i)
+                    .equals(imagesBeforeRefresh.get(i))) {
+                diversity = true;
+                break;
+            }
+        }
+        assertTrue("There are no differences between images before and after refresh", diversity);
+    }
+}
+
+
+
+

In the test method, during differences verification, the goal is to compare every element from the first and second list and find first diversity.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-11-Exit-Intent.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-11-Exit-Intent.html new file mode 100644 index 00000000..656b90a8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-11-Exit-Intent.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example32 +
+
+
+

This case shows how to perform mouse actions and test modal windows.

+
+
+

After you move the mouse cursor out of the website, you should see a new window appearing:

+
+
+
+example33 +
+
+
+

Check in the browser’s developer mode if this window exists in Page DOM

+
+
+
+example34 +
+
+
+

Before you move the mouse out, the window exists, but it’s not displayed.

+
+
+

When the mouse is moved, JavaScript changes display attribute. It also hides window after clicking "Close".

+
+
+
+example35 +
+
+
+

== Page Class

+
+
+
+
public class ExitIntentPage extends BasePage {
+
+    private static final String MODAL_WINDOW_HIDDEN           = "display: none;";
+    private static final String MODAL_WINDOW_DISPLAYED        = "display: block;";
+    private static final String MODAL_WINDOW_STYLE_ATTRIBUTTE = "style";
+
+    private static final By selectorModalWindow            = By.cssSelector("div#ouibounce-modal");
+    private static final By selectorExitIntentText         = By.cssSelector("div#content h3");
+    private static final By selectorModalWindowTitle       = By.cssSelector("h3");
+    private static final By selectorModalWindowCloseButton = By.cssSelector("div.modal-footer > p");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.EXIT_INTENT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Exit Intent' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.EXIT_INTENT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if exit intent message is visible or not.
+     *
+     * @return true if exit intent message was found on web page.
+     */
+    public boolean isIntentMessageVisible() {
+        return getDriver().findElementDynamic(selectorExitIntentText)
+                .isDisplayed();
+    }
+
+    /**
+     * Returns information if modal window is hidden.
+     *
+     * @return true if modal window is hidden.
+     */
+    public boolean isModalWindowHidden() {
+        return getDriver().findElementDynamic(selectorModalWindow)
+                .getAttribute(MODAL_WINDOW_STYLE_ATTRIBUTTE)
+                .equals(MODAL_WINDOW_HIDDEN);
+    }
+
+    /**
+     * Returns information if modal window is showed on web page.
+     *
+     * @return true if modal window is displayed.
+     */
+    public boolean isModalWindowVisible() {
+        return getDriver().findElementDynamic(selectorModalWindow)
+                .getAttribute(MODAL_WINDOW_STYLE_ATTRIBUTTE)
+                .equals(MODAL_WINDOW_DISPLAYED);
+    }
+
+    /**
+     * Returns information if modal window title is shown and correct.
+     *
+     * @param expectedValue String representing expected value of modal window's title.
+     * @return true if modal window's title is equal to expected value.
+     */
+    public boolean verifyModalWindowTitle(String expectedValue) {
+        return getDriver().elementLabel(new ByChained(selectorModalWindow,
+                selectorModalWindowTitle))
+                .getText()
+                .equals(expectedValue);
+    }
+
+    /**
+     * Closes modal window by pressing 'close' button.
+     */
+    public void closeModalWindow() {
+        getDriver().elementButton(new ByChained(selectorModalWindow,
+                selectorModalWindowCloseButton))
+                .click();
+    }
+
+    /**
+     * Moves mouse pointer to the top middle of screen, then to the centre of screen and
+     * again to the top.
+     * <p>
+     * This move simulates leaving the viewport and encourages the modal to show up. There is
+     * java.awt.Robot used
+     * to move mouse pointer out of the viewport. There are timeouts used to let the browser detect
+     * mouse move.
+     * </p>
+     *
+     * @see java.awt.Robot
+     */
+    public void moveMouseOutOfViewport() {
+        Robot robot;
+        Dimension screenSize = getDriver().manage()
+                .window()
+                .getSize();
+        int halfWidth = new BigDecimal(screenSize.getWidth() / 2).intValue();
+        int halfHeight = new BigDecimal(screenSize.getHeight() / 2).intValue();
+
+        try {
+            robot = new Robot();
+            robot.mouseMove(halfWidth, 1);
+            getDriver().manage()
+                    .timeouts()
+                    .implicitlyWait(1, TimeUnit.SECONDS);
+            robot.mouseMove(halfWidth, halfHeight);
+            getDriver().manage()
+                    .timeouts()
+                    .implicitlyWait(1, TimeUnit.SECONDS);
+            robot.mouseMove(halfWidth, 1);
+        } catch (AWTException e) {
+            BFLogger.logError("Unable to connect with remote mouse");
+            e.printStackTrace();
+        }
+    }
+}
+
+
+
+
+
+

== Attributes

+
+
+

Elements on pages have attributes like "id", "class", "name", "style" etc. In order to check them, use method getAttribute(String name). In this case attribute "style" determinates if the element is displayed.

+
+
+
+
+

== Robot

+
+
+

Robot class can perform mouse movement. Method mouseMove(int x, int y) moves the remote mouse to given coordinates.

+
+
+
+
+

== Manage Timeouts

+
+
+

manage().timeouts() methods allows you to change WebDriver timeouts values such as:

+
+
+
    +
  • +

    pageLoadTimeout(long time, TimeUnit unit) - the amount of time to wait for a page to load before throwing an exception

    +
  • +
  • +

    setScriptTimeout(long time, TimeUnit unit) - the amount of time to wait for finish execution of a script before throwing an exception

    +
  • +
  • +

    implicitlyWait(long time, TimeUnit unit) - the amount of time the driver should wait when searching for an element if it is not immediately present. After that time, it throws an exception.

    +
  • +
+
+
+

Changing timeouts can improve test stability but can also make them run slower.

+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Exit Intent link and load subpage

    +
  4. +
  5. +

    Check if the page is loaded and "Exit Intent" message is visible

    +
  6. +
  7. +

    Verify if Modal Window is hidden

    +
  8. +
  9. +

    Move mouse out of the viewport

    +
  10. +
  11. +

    Check if Modal Window is visible

    +
  12. +
  13. +

    Verify if Modal Window title is correct

    +
  14. +
  15. +

    Click 'close' button

    +
  16. +
  17. +

    Again verify if Modal Window is hidden

    +
  18. +
+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class ExitIntentTest extends TheInternetBaseTest {
+
+    private static final String MODAL_WINDOW_TITLE = "This is a modal window";
+
+    private static ExitIntentPage exitIntentPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        exitIntentPage = shouldTheInternetPageBeOpened().clickExitIntentLink();
+
+        logStep("Verify if Exit Intent page is opened");
+        assertTrue("Unable to open Exit Intent page", exitIntentPage.isLoaded());
+
+        logStep("Verify if exit intent message is visible");
+        assertTrue("Exit intent message is not visible", exitIntentPage.isIntentMessageVisible());
+    }
+
+    @Test
+    public void shouldModalWindowAppearWhenMouseMovedOutOfViewportTest() {
+
+        logStep("Verify if modal window is hidden");
+        assertTrue("Fail to hide modal window", exitIntentPage.isModalWindowHidden());
+
+        logStep("Move mouse pointer out of viewport");
+        exitIntentPage.moveMouseOutOfViewport();
+
+        logStep("Verify if modal window showed up");
+        assertTrue("Fail to show up modal window", exitIntentPage.isModalWindowVisible());
+
+        logStep("Verify if modal window title displays properly");
+        assertTrue("Fail to display modal window's title",
+                exitIntentPage.verifyModalWindowTitle(MODAL_WINDOW_TITLE.toUpperCase()));
+
+        logStep("Close modal window");
+        exitIntentPage.closeModalWindow();
+
+        logStep("Verify if modal window is hidden again");
+        assertTrue("Fail to hide modal window", exitIntentPage.isModalWindowHidden());
+    }
+}
+
+
+
+

Remember not to move mouse manually during test execution.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-12-File-download-test.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-12-File-download-test.html new file mode 100644 index 00000000..73b33028 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-12-File-download-test.html @@ -0,0 +1,417 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example36 +
+
+
+

This example shows how to check if file downloads properly.

+
+
+

After clicking on one of these links, a specific file should be downloaded to your computer.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click on the File Download link and open subpage

    +
  4. +
  5. +

    Click on "some-file.txt" download link and download file

    +
  6. +
  7. +

    Check if the file exists in the appropriate folder

    +
  8. +
  9. +

    Delete the file

    +
  10. +
  11. +

    Check if the file doesn’t exist in the folder

    +
  12. +
+
+
+

== Page Class

+
+
+
+
public class FileDownloadPage extends BasePage {
+
+    private static final By selectorSomeFileTxt = By.cssSelector("a[href*=some-file]");
+
+    private final String DOWNLOAD_DIR = System.getProperty("java.io.tmpdir");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DOWNLOAD.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'File Downloader' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DOWNLOAD.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if the chosen file is already downloaded and if not, downloads it .
+     * Throws RuntimeException otherwise.
+     *
+     * @return Downloaded file
+     */
+    public File downloadTextFile() {
+        String nameOfDownloadFile = getNameOfDownloadFile();
+        File fileToDownload = new File(DOWNLOAD_DIR + nameOfDownloadFile);
+
+        if (fileToDownload.exists()) {
+            throw new RuntimeException("The file that you want to download already exists. "
+                    + "Please remove it manually. Path to the file: " + fileToDownload.getPath());
+        }
+
+        getDriver().elementButton(selectorSomeFileTxt)
+                .click();
+
+        waitForFileDownload(2000, fileToDownload);
+        return fileToDownload;
+    }
+
+    private void waitForFileDownload(int totalTimeoutInMillis, File expectedFile) {
+        FluentWait<WebDriver> wait = new FluentWait<WebDriver>(getDriver())
+                .withTimeout(totalTimeoutInMillis, TimeUnit.MILLISECONDS)
+                .pollingEvery(200, TimeUnit.MILLISECONDS);
+
+        wait.until((WebDriver wd) -> expectedFile.exists());
+    }
+
+    private String getNameOfDownloadFile() {
+        String urlToDownload = getDriver().findElementDynamic(selectorSomeFileTxt)
+                .getAttribute("href");
+        String[] urlHierachy = urlToDownload.split("/");
+        return urlHierachy[urlHierachy.length - 1];
+    }
+}
+
+
+
+

Use FluentWait class and create an expected condition using a lambda expression to wait until the file downloads.

+
+
+

To perform operations on files, use java File class. To get a file name, find it in download URL.

+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class FileDownloadTest extends TheInternetBaseTest {
+
+    private static FileDownloadPage fileDownloadPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        fileDownloadPage = shouldTheInternetPageBeOpened().clickFileDownloadLink();
+
+        logStep("Verify if File Download page is opened");
+        assertTrue("Unable to open File Download page", fileDownloadPage.isLoaded());
+    }
+
+    @Test
+    public void shouldfileBeDownloaded() {
+
+        logStep("Download the some-file.txt");
+        File downloadedFile = fileDownloadPage.downloadTextFile();
+
+        logStep("Verify if downloaded file exists");
+        assertTrue("Downloaded file does not exist", downloadedFile.exists());
+
+        logStep("Remove downloaded file");
+        downloadedFile.delete();
+
+        logStep("Verify if downloaded file has been removed");
+        assertFalse("Downloaded file still exists", downloadedFile.exists());
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-13-Form-Authentication-Test.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-13-Form-Authentication-Test.html new file mode 100644 index 00000000..6c416f46 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-13-Form-Authentication-Test.html @@ -0,0 +1,559 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example37 +
+
+
+

This case shows how to pass through the standard authentication page.

+
+
+

When you enter the correct credentials, you should see the next page:

+
+
+
+example38 +
+
+
+

If user data is wrong, an appropriate message appears:

+
+
+
+example39 +
+
+
+

== Page Class

+
+
+
+
public class FormAuthenticationPage extends BasePage {
+
+    private final static By selectorInputUsername     = By.cssSelector("#username");
+    private final static By selectorInputUserPassword = By.cssSelector("#password");
+    private final static By selectorLoginMessage      = By.cssSelector("#flash");
+    private final static By selectorLoginButton       = By.cssSelector("#login > button > i");
+    private final static By selectorLogoutButton      = By.cssSelector("#content > div > a ");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.LOGIN.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Login Page' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() + PageSubURLsProjectYEnum.LOGIN.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Sets user name to designated form's field.
+     *
+     * @param username String representing a user's name
+     * @return FormAuthenticationPage object with user name set to the given one
+     */
+    public FormAuthenticationPage setUsername(String username) {
+        InputTextElement elementInputUsername = new InputTextElement(selectorInputUsername);
+        elementInputUsername.clearInputText();
+        elementInputUsername.setInputText(username);
+        return this;
+    }
+
+    /**
+     * Sets user password to designated form's field.
+     *
+     * @param userPassword String representing a user's password
+     * @return FormAuthenticationPage object with user's password set to the given one
+     */
+    public FormAuthenticationPage setUserPassword(String userPassword) {
+        InputTextElement elementInputPassword = new InputTextElement(selectorInputUserPassword);
+        elementInputPassword.clearInputText();
+        elementInputPassword.setInputText(userPassword);
+        return this;
+    }
+
+    /**
+     * Returns login message.
+     *
+     * @return String object representing the message returned after login operation is performed
+     */
+    public String getLoginMessageText() {
+        return new LabelElement(selectorLoginMessage).getText();
+    }
+
+    /**
+     * Clicks 'Login' button.
+     */
+    public void clickLoginButton() {
+        new Button(selectorLoginButton).click();
+    }
+
+    /**
+     * Clicks 'Logout' button.
+     */
+    public void clickLogoutButton() {
+        new Button(selectorLogoutButton).click();
+    }
+}
+
+
+
+
+
+

== == InputTextElement

+
+
+

Use methods from this class to perform actions on text fields:

+
+
+
    +
  • +

    clearInputText() - remove all text from selected input field

    +
  • +
  • +

    setInputText(String text) - enter given text

    +
  • +
+
+
+
+
+

== == LabelElement

+
+
+
    +
  • +

    String getText() method returns visible text from label

    +
  • +
+
+
+
+
+

== TestClass

+
+
+

Prepare six test cases:

+
+
+
    +
  1. +

    Try to login with empty user data and check if the error message appears

    +
  2. +
  3. +

    Try to login with empty username and valid password and check if the error message appears

    +
  4. +
  5. +

    Try to login with a valid username and empty password and check if the error message appears

    +
  6. +
  7. +

    Try to login with invalid username and invalid password and check if the error message appears

    +
  8. +
  9. +

    Try to login with a valid username and valid password and check if success login message appears, then log out

    +
  10. +
  11. +

    Try to login with a valid username and valid password and check if success login message appears, then log out and check if success logout message is displayed

    +
  12. +
+
+
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case: Click on the Form Authentication link and open login page

+
+
+

After each case: Go back to The Internet Main Page

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class FormAuthenticationTest extends TheInternetBaseTest {
+
+    private static FormAuthenticationPage formAuthenticationPage;
+
+    private String errorUsernameMessage = "Your username is invalid!\n" + "×";
+    private String errorPasswordMessage = "Your password is invalid!\n" + "×";
+    private String loginMessage         = "You logged into a secure area!\n" + "×";
+    private String logoutMessage        = "You logged out of the secure area!\n" + "×";
+    private String emptyUsername        = "";
+    private String emptyUserPassword    = "";
+    private String validUsername        = "tomsmith";
+    private String validPassword        = "SuperSecretPassword!";
+    private String randomUsername       = UUID.randomUUID()
+            .toString();
+    private String randomUserPassword   = UUID.randomUUID()
+            .toString();
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click subpage link");
+        formAuthenticationPage = theInternetPage.clickFormAuthenticationLink();
+
+        logStep("Verify if subpage is opened");
+        assertTrue("The Internet subpage: FormAuthenticationPage was not open", formAuthenticationPage.isLoaded());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithEmptyData() {
+        logStep("Log user with empty username and password");
+        formAuthenticationPage.setUsername(emptyUsername)
+                .setUserPassword(emptyUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty data", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithEmptyUsernameAndValidPassword() {
+        logStep("Log user with empty username and valid password");
+        formAuthenticationPage.setUsername(emptyUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty username", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithValidUsernameAndEmptyPassword() {
+        logStep("Log user with valid username and empty password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(emptyUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty password", errorPasswordMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithInvalidUsernameAndInvalidPassword() {
+        logStep("Log user with invalid username and invalid password");
+        formAuthenticationPage.setUsername(randomUsername)
+                .setUserPassword(randomUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with random credentials", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldUserLogInWithValidCredentials() {
+        logStep("Log user with valid username and valid password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unable to login user with valid credentials", loginMessage,
+                formAuthenticationPage.getLoginMessageText());
+        logStep("Log out user");
+        formAuthenticationPage.clickLogoutButton();
+    }
+
+    @Test
+    public void shouldUserLogOutAfterProperLogInAndClickLogoutButon() {
+        logStep("Log user with valid username and valid password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unable to login user with valid credentials", loginMessage,
+                formAuthenticationPage.getLoginMessageText());
+        logStep("Log out user");
+        formAuthenticationPage.clickLogoutButton();
+        assertEquals("User cannot log out after prper log in", logoutMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        theInternetPage.load();
+    }
+}
+
+
+
+

After running Test Class, cases might be performed in a different order.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-14-Hovers-Test.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-14-Hovers-Test.html new file mode 100644 index 00000000..3fec024b --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-14-Hovers-Test.html @@ -0,0 +1,410 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example40 +
+
+
+

This example shows how to approach elements dynamically appearing after the user’s action.

+
+
+

Move the mouse over an image to see the additional label.

+
+
+
+example41 +
+
+
+

Labels exist in page DOM all the time but their display attributes change. In this case, there is no JavaScript. Elements' visibility is managed by CSS.

+
+
+
+example42 +
+
+
+

== Page Class

+
+
+
+
public class HoversPage extends BasePage {
+
+    private final static By selectorImages = By.cssSelector("div.figure > img");
+    private final static By selectorNames  = By.cssSelector("div.figcaption h5");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.HOVERS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Hovers' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.HOVERS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Moves mouse pointer over an image with given index.
+     *
+     * @param index An index of the picture, where mouse pointer should be moved
+     */
+    public void hoverOverAvatar(int index) {
+        Actions action = new Actions(getDriver());
+        WebElement avatarImage = getDriver().findElementDynamics(selectorImages)
+                .get(index);
+        action.moveToElement(avatarImage)
+                .perform();
+    }
+
+    /**
+     * Returns the information displayed under a picture with given index.
+     *
+     * @param index An index of the picture, where the information should be read
+     * @return String object representing picture's information
+     */
+    public String getAvatarsInformation(int index) {
+        return getDriver().findElementDynamics(selectorNames)
+                .get(index)
+                .getText();
+    }
+}
+
+
+
+
+
+

== == Actions

+
+
+

Actions class contains methods used to execute basic user actions such as mouse moving and clicking or keys sending. Action or actions series will be performed after calling perform() method.

+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Hovers page

    +
  4. +
  5. +

    Move mouse over random image

    +
  6. +
  7. +

    Check if displayed text is equal to expected.

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class HoversTest extends TheInternetBaseTest {
+
+    private static HoversPage    hoversPage;
+    private final String        names[]    = { "name: user1", "name: user2", "name: user3" };
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        hoversPage = shouldTheInternetPageBeOpened().clickHoversLink();
+
+        logStep("Verify if Hovers page is opened");
+        assertTrue("Unable to open Hovers page", hoversPage.isLoaded());
+    }
+
+    @Test
+    public void shouldProperInformationBeDisplayedWhenMousePointerHoveredOverRandomElement() {
+        logStep("Hover mouse pointer over random element");
+        int randomIndex = new Random().nextInt(names.length);
+        hoversPage.hoverOverAvatar(randomIndex);
+        assertEquals("Picture's information is different than expected", names[randomIndex],
+                hoversPage.getAvatarsInformation(randomIndex));
+    }
+}
+
+
+
+

Because in this case the tested content is being chosen randomly, each test run could check a different element.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-15-JavaScript-Alerts.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-15-JavaScript-Alerts.html new file mode 100644 index 00000000..d1f9b34c --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-15-JavaScript-Alerts.html @@ -0,0 +1,539 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example43 +
+
+
+

This case shows how to test pop-up JS alerts.

+
+
+

After clicking one of the buttons, an adequate alert should appear.

+
+
+
+example44 +
+
+
+

Performed action will be displayed under "Result" label.

+
+
+

In developer mode, you can view JavaScript which creates alerts.

+
+
+
+example45 +
+
+
+

== Page Class

+
+
+
+
public class JavaScriptAlertsPage extends BasePage {
+
+    private static final By selectorAlertButton   = By.cssSelector("button[onclick*=jsAlert]");
+    private static final By selectorConfirmButton = By.cssSelector("button[onclick*=jsConfirm]");
+    private static final By selectorPromptButton  = By.cssSelector("button[onclick*=jsPrompt]");
+    private static final By resultLabelSelector   = By.cssSelector("p#result");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.JAVASCRIPT_ALERTS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'JavaScript Alerts' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.JAVASCRIPT_ALERTS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'JS alert' button.
+     */
+    public void clickAlertButton() {
+        new Button(selectorAlertButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Clicks 'JS confirm' button.
+     */
+    public void clickConfirmButton() {
+        new Button(selectorConfirmButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Clicks 'JS prompt' button.
+     */
+    public void clickPromptButton() {
+        new Button(selectorPromptButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Returns message displayed by popup.
+     *
+     * @return String object representing message displayed by popup
+     */
+    public String readResultLabel() {
+        return new LabelElement(resultLabelSelector).getText();
+    }
+
+    /**
+     * Clicks alert's 'OK' button.
+     */
+    public void clickAlertAccept() {
+        getDriver().switchTo()
+                .alert()
+                .accept();
+    }
+
+    /**
+     * Clicks alert's 'Cancel' button.
+     */
+    public void clickAlertDismiss() {
+        getDriver().switchTo()
+                .alert()
+                .dismiss();
+    }
+
+    /**
+     * Types text into alert's text field.
+     *
+     * @param text String object sent into alert's text field
+     */
+    public void writeTextInAlert(String text) {
+        getDriver().switchTo()
+                .alert()
+                .sendKeys(text);
+    }
+}
+
+
+
+
+
+

== == alert()

+
+
+

Using switchTo() method you can change processed content. switchTo().alert() allows performing actions on appearing alerts such as accepting, dismissing or entering keys.

+
+
+
+
+

== Test Class

+
+
+

Before all tests: Open The Internet Main Page and go to JavaScript Alert page

+
+
+
    +
  1. +

    Click JS Alert button, accept alert and check if Result message returns performed an action

    +
  2. +
  3. +

    Click JS Confirm button, accept alert and check if Result message returns performed action

    +
  4. +
  5. +

    Click JS Confirm button, dismiss alert and check if Result message returns performed action

    +
  6. +
  7. +

    Click JS Prompt button, write random text, accept alert and check if Result message returns performed action with written text

    +
  8. +
  9. +

    Click JS Prompt button, dismiss the alert and check if Result message returns performed action

    +
  10. +
+
+
+

After each case: Refresh Page

+
+
+

After all tests: Navigate back to The Internet Main Page

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class JavaScriptAlertsTest extends TheInternetBaseTest {
+
+    private static JavaScriptAlertsPage javaScriptAlertsPage;
+
+    private final String jsAlertCofirmMessage    = "You successfuly clicked an alert";
+    private final String jsConfirmConfirmMessage = "You clicked: Ok";
+    private final String jsConfirmCancelMessage  = "You clicked: Cancel";
+    private final String jsPromptConfirmMessage  = "You entered: ";
+    private final String jsPromptCancelMessage   = "You entered: null";
+    private final String randomString            = "random";
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        javaScriptAlertsPage = shouldTheInternetPageBeOpened().clickJavaScriptAlertLink();
+
+        logStep("Verify if JavaScript Alerts page is opened");
+        assertTrue("Unable to open JavaScript Alerts page", javaScriptAlertsPage.isLoaded());
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+        logStep("Navigate back to The-Internet page");
+        BasePage.navigateBack();
+    }
+
+    @Test
+    public void shouldJSAlertCloseWithProperMessageAfterPressOkButton() {
+        logStep("Click Alert button");
+        javaScriptAlertsPage.clickAlertButton();
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsAlertCofirmMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSConfirmCloseWithProperMessageAfterPressOkButton() {
+        logStep("Click Confirm button");
+        javaScriptAlertsPage.clickConfirmButton();
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsConfirmConfirmMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSConfirmCloseWithProperMessageAfterPressCancelButton() {
+        logStep("Click Confirm button");
+        javaScriptAlertsPage.clickConfirmButton();
+
+        logStep("Click 'Cancel' button on alert");
+        javaScriptAlertsPage.clickAlertDismiss();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsConfirmCancelMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSPromptCloseWithProperMessageAfterPressOKButton() {
+        logStep("Click Prompt button");
+        javaScriptAlertsPage.clickPromptButton();
+
+        logStep("Insert text to alert: " + randomString);
+        javaScriptAlertsPage.writeTextInAlert(randomString);
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsPromptConfirmMessage + randomString, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSPromptCloseWithProperMessageAfterPressCancelButton() {
+        logStep("Click Prompt button");
+        javaScriptAlertsPage.clickPromptButton();
+
+        logStep("Click 'Cancel' button on alert");
+        javaScriptAlertsPage.clickAlertDismiss();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsPromptCancelMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Refresh JavaScriptAlersPage");
+        javaScriptAlertsPage.refreshPage();
+    }
+
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-16-Key-Presses-test.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-16-Key-Presses-test.html new file mode 100644 index 00000000..1c0ae9ee --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-16-Key-Presses-test.html @@ -0,0 +1,388 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example46 +
+
+
+

This simple case shows how to test key pressing

+
+
+

This site uses JavaScript to read the key pressed and display its value.

+
+
+
+example47 +
+
+
+

== Page Class

+
+
+
+
public class KeyPressesPage extends BasePage {
+
+    private static final By selectorResult = By.cssSelector("#result");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.KEY_PRESS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Key Presses' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.KEY_PRESS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Presses given keyboard key.
+     *
+     * @param keyToPress Key to be pressed on keyboard
+     */
+    public void pressKey(String keyToPress) {
+        getAction().sendKeys(keyToPress)
+                .perform();
+    }
+
+    /**
+     * Returns information from web page about pressed keyboard key.
+     *
+     * @return Information from web page about pressed key
+     */
+    public String getPressedKeyInformation() {
+        return getDriver().findElementDynamic(selectorResult)
+                .getText();
+    }
+}
+
+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Key Presses site

    +
  4. +
  5. +

    Press a key

    +
  6. +
  7. +

    Check if a displayed message contains the pressed key

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class KeyPressesTest extends TheInternetBaseTest {
+
+    private static KeyPressesPage keyPressesPage;
+
+    private final String keyToBePressed  = "Q";
+    private final String expectedMessage = "You entered: Q";
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        keyPressesPage = shouldTheInternetPageBeOpened().clickKeyPressesLink();
+
+        logStep("Verify if Key Presses page is opened");
+        assertTrue("Unable to open Key Presses page", keyPressesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldWebsiteReturnInformationAboutPressedKey() {
+        logStep("Press a keyboard key");
+        keyPressesPage.pressKey(keyToBePressed);
+
+        logStep("Verify if website give valid information about pressed keyboard key");
+        assertEquals("Information about the pressed key is invalid", expectedMessage,
+                keyPressesPage.getPressedKeyInformation());
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-17-Multiple-Windows.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-17-Multiple-Windows.html new file mode 100644 index 00000000..ea6818b4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-17-Multiple-Windows.html @@ -0,0 +1,406 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example48 +
+
+
+

This simple example shows how operate on many browser tabs

+
+
+

When you click the link, a new website will be opened in the second tab.

+
+
+
+example49 +
+
+
+

== Page Class

+
+
+
+
public class MultipleWindowsPage extends BasePage {
+
+    private final static By selectorLink = By.cssSelector("#content > div > a");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.WINDOW.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Opening a new window' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.WINDOW.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'click here' link.
+     *
+     * @return NewWindowPage object
+     */
+    public NewWindowPage clickHereLink() {
+        getDriver().findElementDynamic(selectorLink)
+                .click();
+        getDriver().waitForPageLoaded();
+        return new NewWindowPage();
+    }
+}
+
+
+
+

You also need a second page class for New Window Page. Implement only the required methods.

+
+
+
+
public class NewWindowPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.NEW_WINDOW.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'New window' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.NEW_WINDOW.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Multiple Windows Page

    +
  4. +
  5. +

    Click the link

    +
  6. +
  7. +

    Check if a new page is opened in the second tab

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class MultipleWindowsTest extends TheInternetBaseTest {
+
+    private static MultipleWindowsPage    multipleWindowsPage;
+    private static NewWindowPage        newWindowPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        multipleWindowsPage = shouldTheInternetPageBeOpened().clickmultipleWindowsLink();
+
+        logStep("Verify if Multiple Windows page is opened");
+        assertTrue("Unable to open Multiple Windows page", multipleWindowsPage.isLoaded());
+    }
+
+    @Test
+    public void verifyIfNewBrowserWindowOpen() {
+        logStep("Click 'Click here' link");
+        newWindowPage = multipleWindowsPage.clickHereLink();
+
+        logStep("Verify if 'New window page' is opened");
+        assertTrue("Unable to open a new browser window", newWindowPage.isLoaded());
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-18-Redirection.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-18-Redirection.html new file mode 100644 index 00000000..e77b6718 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-18-Redirection.html @@ -0,0 +1,418 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example50 +
+
+
+

This simple case shows how to approach redirecting links.

+
+
+

After clicking on the link, you will be redirected to Status Codes Page.

+
+
+
+example51 +
+
+
+

== Page Class

+
+ +
+
+
+ +
+
+
+
public class RedirectLinkPage extends BasePage {
+
+    private static final By selectorRedirectHere = By.cssSelector("a#redirect");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.REDIRECT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Redirection' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.REDIRECT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'Redirect here' link.
+     *
+     * @return StatusCodesHomePage object
+     */
+    public StatusCodesHomePage clickRedirectHereLink() {
+        new Button(selectorRedirectHere).click();
+        return new StatusCodesHomePage();
+    }
+}
+
+
+
+
+
+

== == Status Codes Page

+
+
+
+
public class StatusCodesHomePage extends BasePage {
+
+    private static final By selectorLink200Code = By.linkText("200");
+    private static final By selectorLink301Code = By.linkText("301");
+    private static final By selectorLink404Code = By.linkText("404");
+    private static final By selectorLink500Code = By.linkText("500");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Redirection Page

    +
  4. +
  5. +

    Click the link

    +
  6. +
  7. +

    Check if Status Codes Page is loaded

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class RedirectLinkTest extends TheInternetBaseTest {
+
+    private static RedirectLinkPage    redirectLinkPage;
+    private static StatusCodesHomePage statusCodesHomePage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        redirectLinkPage = shouldTheInternetPageBeOpened().clickRedirectLink();
+
+        logStep("Verify if Redirect Link page is opened");
+        assertTrue("Unable to open Redirect Link page", redirectLinkPage.isLoaded());
+    }
+
+    @Test
+    public void shouldUserBeRedirectedToStatusCodePage() {
+        logStep("Click 'Redirect here' link");
+        statusCodesHomePage = redirectLinkPage.clickRedirectHereLink();
+
+        logStep("Verify redirection to Status Code page");
+        assertTrue("User hasn't been redirected to the expected website",
+                statusCodesHomePage.isLoaded());
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-19-Slider-Test.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-19-Slider-Test.html new file mode 100644 index 00000000..9b0caa42 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-19-Slider-Test.html @@ -0,0 +1,633 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example52 +
+
+
+

This case shows how to move horizontal slider.

+
+
+

You can move the slider by dragging it with a mouse or using arrow keys. The page uses a simple script to get slider position and display set value.

+
+
+
+example53 +
+
+
+

== Page Class

+
+
+
+
public class HorizontalSliderPage extends BasePage {
+
+    private static final By selectorHorizontalSlider = By.cssSelector("div.sliderContainer");
+    private static final By sliderSelector           = By.cssSelector("input");
+    private static final By valueSelector            = By.cssSelector("#range");
+
+    private HorizontalSliderElement horizontalSlider;
+
+    public HorizontalSliderPage() {
+        horizontalSlider = getDriver().elementHorizontalSlider(selectorHorizontalSlider,
+                sliderSelector, valueSelector, BigDecimal.ZERO, new BigDecimal(5),
+                new BigDecimal(0.5));
+    }
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.HORIZONTAL_SLIDER.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Horizontal Slider' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.HORIZONTAL_SLIDER.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Validates if WebElement representing horizontal slider is visible on the page.
+     *
+     * @return true if horizontal slider is visible, false otherwise.
+     */
+    public boolean isElementHorizontalSliderVisible() {
+        return getDriver().elementHorizontalSlider(selectorHorizontalSlider)
+                .isDisplayed();
+    }
+
+    /**
+     * Returns the value of slider's start position.
+     *
+     * @return BigDecimal representing the lowest possible value of slider.
+     */
+    public BigDecimal getStartPosition() {
+        return horizontalSlider.getMinRange();
+    }
+
+    /**
+     * Returns the value of slider's middle position.
+     *
+     * @return BigDecimal representing the average value between start and end position.
+     */
+    public BigDecimal getMiddlePosition() {
+        return horizontalSlider.getMaxRange()
+                .subtract(horizontalSlider.getMinRange())
+                .divide(new BigDecimal(2));
+    }
+
+    /**
+     * Returns the value of slider's end position.
+     *
+     * @return BigDecimal representing the highest possible value of slider.
+     */
+    public BigDecimal getEndPosition() {
+        return horizontalSlider.getMaxRange();
+    }
+
+    /**
+     * Returns current value of slider's position.
+     *
+     * @return BigDecimal representing current value of slider.
+     */
+    public BigDecimal getCurrentPosition() {
+        return horizontalSlider.getCurrentSliderValue();
+    }
+
+    /**
+     * Sets horizontal slider to a given position using one of the available methods: using keyboard
+     * or using mouse move.
+     *
+     * @param position
+     * @param method
+     */
+    public void setSliderPositionTo(BigDecimal position, int method) {
+        horizontalSlider.setSliderPositionTo(position, method);
+    }
+
+    /**
+     * Verifies the correctness of the given position value and rounds it when necessary.
+     *
+     * @param position
+     * @return Correct value of horizontal slider's position.
+     */
+    public BigDecimal verifyAndCorrectPositionValue(BigDecimal position) {
+        return horizontalSlider.verifyAndCorrectPositionValue(position);
+    }
+}
+
+
+
+
+
+

== == Horizontal Slider Element

+
+
+

This class implements methods wich can perform actions on slider:

+
+
+

Create Slider Object using method:

+
+
+
    +
  • +

    getDriver().elementHorizontalSlider(By sliderContainerSelector, By sliderSelector, By valueSelector, BigDecimal minRange, BigDecimal maxRange, BigDecimal step)

    +
  • +
+
+
+

And use:

+
+
+
    +
  • +

    BigDecimal getMaxRange()

    +
  • +
  • +

    BigDecimal getMinRange()

    +
  • +
  • +

    BigDecimal getCurrentSliderValue()

    +
  • +
  • +

    setSliderPositionTo(BigDecimal position, int method) - moves slider to a given position. If the position is not valid, it changes it to the nearest proper value. Second parameter determinates movement method: 0 - Keyboard, 1 - Mouse

    +
  • +
  • +

    BigDecimal verifyAndCorrectPositionValue(BigDecimal position) - returns nearest correct position

    +
  • +
+
+
+
+
+

== Test Class

+
+
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case:

+
+
+
    +
  1. +

    Go to Horizontal Slider Page

    +
  2. +
  3. +

    Check if the slider is visible

    +
  4. +
  5. +

    Save start, middle and end position

    +
  6. +
+
+
+

Case 1 - Moving with the keyboard:

+
+
+
    +
  1. +

    Move slider to start position, and check if the current position equals the beginning value

    +
  2. +
  3. +

    Move the slider to middle position, and check if the current position equals the middle value

    +
  4. +
  5. +

    Move slider to end position, and check if the current position equals the end value

    +
  6. +
  7. +

    Try to move slider before start position, and check if the current position equals the beginning value

    +
  8. +
  9. +

    Try to move slider after end position, and check if the current position equals the end value

    +
  10. +
  11. +

    Try to move the slider to an improperly defined position between start and middle, and check if the current position equals the corrected value

    +
  12. +
  13. +

    Try to move the slider to an improperly defined random position, and check if the current position equals the corrected value

    +
  14. +
  15. +

    Move the slider back to start position, and check if the current position equals the beginning value

    +
  16. +
+
+
+

Case 2 - Moving with a mouse: Repeat each Case 1 step using a mouse instead of keyboard

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class SliderTest extends TheInternetBaseTest {
+
+    private static HorizontalSliderPage horizontalSliderPage;
+
+    BigDecimal startPosition;
+    BigDecimal middlePosition;
+    BigDecimal endPosition;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click Horizontal Slider link");
+        horizontalSliderPage = theInternetPage.clickHorizontalSliderLink();
+
+        logStep("Verify if Horizontal Slider page is opened");
+        assertTrue("Unable to load Horizontal Slider page", horizontalSliderPage.isLoaded());
+
+        logStep("Verify if horizontal slider element is visible");
+        assertTrue("Horizontal slider is not visible",
+                horizontalSliderPage.isElementHorizontalSliderVisible());
+
+        startPosition = horizontalSliderPage.getStartPosition();
+        middlePosition = horizontalSliderPage.getMiddlePosition();
+        endPosition = horizontalSliderPage.getEndPosition();
+    }
+
+    @Test
+    public void shouldHorizontalSliderMoveWhenKeyboardArrowButtonsArePressed() {
+        BigDecimal position;
+        logStep("Move slider to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to middle position: " + middlePosition);
+        horizontalSliderPage.setSliderPositionTo(middlePosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(middlePosition),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to end position: " + endPosition);
+        horizontalSliderPage.setSliderPositionTo(endPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = startPosition.subtract(BigDecimal.ONE);
+        logStep("Move slider to position before start position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = endPosition.add(BigDecimal.ONE);
+        logStep("Move slider to position after end position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = middlePosition.divide(new BigDecimal(2));
+        logStep("Move slider to improperly defined position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        position = new BigDecimal(new BigInteger("233234"), 5);
+        logStep("Move slider to improperly defined random position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider back to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+    }
+
+    @Test
+    public void shouldHorizontalSliderMoveWhenMouseButtonIsPressedAndMouseIsMoving() {
+        BigDecimal position;
+        logStep("Move slider to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to middle position: " + middlePosition);
+        horizontalSliderPage.setSliderPositionTo(middlePosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(middlePosition),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to end position: " + endPosition);
+        horizontalSliderPage.setSliderPositionTo(endPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = startPosition.subtract(BigDecimal.ONE);
+        logStep("Move slider to position before start position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = endPosition.add(BigDecimal.ONE);
+        logStep("Move slider to position after end position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = middlePosition.divide(new BigDecimal(2));
+        logStep("Move slider to improperly defined position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        position = new BigDecimal(new BigInteger("212348"), 5);
+        logStep("Move slider to improperly defined random position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider back to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-20-Sortable-Data-Tables.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-20-Sortable-Data-Tables.html new file mode 100644 index 00000000..9d7d6160 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-20-Sortable-Data-Tables.html @@ -0,0 +1,520 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example54 +
+
+
+

This example shows how to sort and read data from tables.

+
+
+

After clicking on a column header, the data will be sorted descending and after another click sorted ascending by selected attribute. Watch how both tables' content changes on page DOM. Sorting is performed by JavaScript functions.

+
+
+
+example55 +
+
+
+

== Page Class

+
+
+
+
public class SortableDataTablesPage extends BasePage {
+
+    private static final By selectorTable  = By.cssSelector("table.tablesorter");
+    private static final By selectorHeader = By.cssSelector("th");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.SORTABLE_DATA_TABLES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Data Tables' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.SORTABLE_DATA_TABLES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Sorts data in given column using ascending order.
+     *
+     * @param columnNumber The number of column where data should be sorted
+     * @param tableNumber  The number of table where data should be sorted
+     */
+    public void sortColumnAscending(int columnNumber, int tableNumber) {
+        WebElement header = this.getTableHeaders(columnNumber, tableNumber);
+        String className = header.getAttribute("class");
+        if (className.contains("headerSortUp") || !className.contains("headerSortDown")) {
+            header.click();
+        }
+    }
+
+    /**
+     * Sorts data in given column using descending order.
+     *
+     * @param columnNumber The number of the column where data should be sorted
+     * @param tableNumber  The number of the table where data should be sorted
+     */
+    public void sortColumnDescending(int columnNumber, int tableNumber) {
+        WebElement header = this.getTableHeaders(columnNumber, tableNumber);
+        String className = header.getAttribute("class");
+        if (!className.contains("headerSortUp")) {
+            header.click();
+            if (!className.contains("headerSortDown")) {
+                header.click();
+            }
+        }
+    }
+
+    /**
+     * Return given column values from chosen table.
+     *
+     * @param columnNumber The number of the column the data should be retrieved from
+     * @param tableNumber  The number of the table the data should be retrieved from
+     * @return list of values from given column
+     */
+    public List<String> getColumnValues(int columnNumber, int tableNumber) {
+        WebElement table = getTable(tableNumber);
+        return JsoupHelper.findTexts(table, By.cssSelector("tr > td:nth-child(" + (columnNumber + 1)
+                + ")"));
+    }
+
+    /**
+     * Returns column's class name.
+     *
+     * @param columnNumber The number of the column to get class number from
+     * @param tableNumber  The number of the table to get column class name from
+     * @return String object representing column's class name
+     */
+    public String readColumnClass(int columnNumber, int tableNumber) {
+        return this.getTableHeaders(columnNumber, tableNumber)
+                .getAttribute("class");
+    }
+
+    private WebElement getTable(int tableNumber) {
+        return new ListElements(selectorTable).getList()
+                .get(tableNumber);
+    }
+
+    private WebElement getTableHeaders(int columnNumber, int tableNumber) {
+        return getTable(tableNumber).findElements(selectorHeader)
+                .get(columnNumber);
+    }
+}
+
+
+
+
+
+

== == Finding values

+
+
+

Using proper selectors, save elements such as tables and their columns' headers as Web Element Lists. Afterwards, you can get the desired element finding it by index (e. g. table or column number). To get column values, use JsoupHelper and to check if the column is sorted get its class attribute.

+
+
+
+
+

== Test Class

+
+
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case: Go to Sortable Data Tables Page

+
+
+

Case 1:

+
+
+
    +
  1. +

    Choose a random table

    +
  2. +
  3. +

    Sort first column "Last Name" in ascending order

    +
  4. +
  5. +

    Check if column header class contains "headerSortDown"

    +
  6. +
  7. +

    Save column content to the List

    +
  8. +
  9. +

    Create List copy and sort it

    +
  10. +
  11. +

    Compare sorted values and values from the table

    +
  12. +
+
+
+

Case 2:

+
+
+
    +
  1. +

    Choose a random table

    +
  2. +
  3. +

    Sort second column "First Name" in descending order

    +
  4. +
  5. +

    Check if column header class contains "headerSortUp"

    +
  6. +
  7. +

    Save column content to the List

    +
  8. +
  9. +

    Create List copy and sort it then reverse it

    +
  10. +
  11. +

    Compare reversed sorted values and values from the table

    +
  12. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class SortableDataTablesTest extends TheInternetBaseTest {
+
+    private static SortableDataTablesPage sortableDataTablesPage;
+
+    private List<String> actualValues;
+    private List<String> expectedValues;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click subpage link");
+        sortableDataTablesPage = theInternetPage.clickSortableDataTablesLink();
+
+        logStep("Verify if subpage is opened");
+        assertTrue("Unable to open Sortable Data Tables page", sortableDataTablesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldLastNameColumnBeOrderedAscendingAfterSort() {
+        int columnNumber = 0;
+        int tableNumber = new Random().nextInt(2);
+
+        logStep("Sort 'Last Name' column");
+        sortableDataTablesPage.sortColumnAscending(columnNumber, tableNumber);
+        assertTrue("Unable to set ascending order for 'Last Name' column",
+                sortableDataTablesPage.readColumnClass(columnNumber, tableNumber)
+                        .contains("headerSortDown"));
+
+        logStep("Verify data order for 'Last Name' column");
+        actualValues = sortableDataTablesPage.getColumnValues(columnNumber, tableNumber);
+        expectedValues = new ArrayList<String>(actualValues);
+        Collections.sort(expectedValues);
+        assertEquals("'Last Name' column is not sorted in ascending order",
+                expectedValues, actualValues);
+    }
+
+    @Test
+    public void shouldFirstNameColumnBeOrderedDescendingAfterSort() {
+        int columnNumber = 1;
+        int tableNumber = new Random().nextInt(2);
+
+        logStep("Sort 'First Name' column");
+        sortableDataTablesPage.sortColumnDescending(columnNumber, tableNumber);
+        assertTrue("Unable to set descending order for 'First Name' column",
+                sortableDataTablesPage.readColumnClass(columnNumber, tableNumber)
+                        .contains("headerSortUp"));
+
+        logStep("Verify data order for 'First Name' column");
+        actualValues = sortableDataTablesPage.getColumnValues(columnNumber, tableNumber);
+        expectedValues = new ArrayList<String>(actualValues);
+        Collections.sort(expectedValues);
+        Collections.reverse(expectedValues);
+        assertEquals("'First Name' column is not sorted in descending order",
+                expectedValues, actualValues);
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-21-Status-Codes.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-21-Status-Codes.html new file mode 100644 index 00000000..f6de7347 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-21-Status-Codes.html @@ -0,0 +1,556 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example56 +
+
+
+

This example shows how to process HTTP status codes returned by page

+
+
+

When you click status code link, you will be redirected to the subpage which returns the proper HTTP status code. In order to check what code was returned:

+
+
+
    +
  1. +

    Open developer tools

    +
  2. +
  3. +

    Go to Network tab

    +
  4. +
  5. +

    Click request name

    +
  6. +
  7. +

    Find a code number in Headers section

    +
  8. +
+
+
+
+example57 +
+
+
+

== Page Class

+
+
+

Add new methods to existing Status Codes Home Page Class

+
+
+
+
public class StatusCodesHomePage extends BasePage {
+
+    private static final By selectorLink200Code = By.linkText("200");
+    private static final By selectorLink301Code = By.linkText("301");
+    private static final By selectorLink404Code = By.linkText("404");
+    private static final By selectorLink500Code = By.linkText("500");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if given link is displayed.
+     *
+     * @param selector Selector of the given link
+     * @return true if link is displayed
+     */
+    public boolean isLinkCodeDisplayed(By selector) {
+        return getDriver().findElementDynamic(selector)
+                .isDisplayed();
+
+    }
+
+    /**
+     * Clicks '200' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode200Link() {
+        return clickCodeLink(selectorLink200Code);
+    }
+
+    /**
+     * Clicks '301' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode301Link() {
+        return clickCodeLink(selectorLink301Code);
+    }
+
+    /**
+     * Clicks '404' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode404Link() {
+        return clickCodeLink(selectorLink404Code);
+    }
+
+    /**
+     * Clicks '500' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode500Link() {
+        return clickCodeLink(selectorLink500Code);
+    }
+
+    /**
+     * Clicks code link according to given code number.
+     *
+     * @param code Given code
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCodeLink(String code) {
+        return clickCodeLink(By.linkText(code));
+    }
+
+    private StatusCodesCodePage clickCodeLink(By selector) {
+        String codeNumber = getCodeNumberToCheck(selector);
+        getDriver().findElementDynamic(selector)
+                .click();
+        return new StatusCodesCodePage(codeNumber);
+    }
+
+    private String getCodeNumberToCheck(By selector) {
+        return getDriver().findElementDynamic(selector)
+                .getText();
+    }
+}
+
+
+
+

Create a page class for status codes subpages as well. In the class constructor specify which code number should be returned.

+
+
+
+
public class StatusCodesCodePage extends BasePage {
+
+    private static final By selectorDisplayedText   = By.cssSelector("#content > div > p");
+    private static final By selectorLinkToCodesPage = By.cssSelector("#content > div > p > a");
+
+    private String codeNumber;
+
+    public StatusCodesCodePage(String codeNumber) {
+        this.codeNumber = codeNumber;
+    }
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue() + '/');
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue() + '/' + codeNumber);
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    public String getCodeNumber() {
+        return codeNumber;
+    }
+
+    /**
+     * Verifies if page is loaded with given code number.
+     *
+     * @param codeNumber Expected code number
+     * @return true if expected code number is loaded with web page
+     */
+    public boolean isLoadedWithStatusCode(String codeNumber) {
+        return getDriver().getCurrentUrl()
+                .equals(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue() + "/" + codeNumber);
+    }
+
+    /**
+     * Returns displayed code number.
+     * <p>
+     * Code number is retrieved from following text displayed on the page:<b>
+     * 'This page returned a *** status code.', where *** represent the code number to be
+     * returned.
+     * </p>
+     *
+     * @return String object representing the displayed code number retrieved from specific sentence.
+     */
+    public String getDisplayedCodeNumber() {
+        return getDriver().findElementDynamic(selectorDisplayedText)
+                .getText()
+                .substring(21, 24);
+    }
+
+    /**
+     * Clicks link to return to 'Code Page'.
+     *
+     * @return StatusCodesHomePage object
+     */
+    public StatusCodesHomePage clickLinkToCodePage() {
+        getDriver().findElementDynamic(selectorLinkToCodesPage)
+                .click();
+        return new StatusCodesHomePage();
+    }
+}
+
+
+
+
+
+

== Test Class

+
+
+

Before all tests: Open The Internet Main Page, go to Status Codes page

+
+
+

Steps:

+
+
+

For each status code

+
+
+
    +
  1. +

    Click code link

    +
  2. +
  3. +

    Check if the page is loaded with an expected code number

    +
  4. +
  5. +

    Check if the displayed code number equals the expected number

    +
  6. +
  7. +

    Go back to Status Codes Home Page

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class StatusCodeTest extends TheInternetBaseTest {
+
+    private static StatusCodesHomePage statusCodesHomePage;
+    private        StatusCodesCodePage statusCodesCodePage;
+
+    private String[] codes = { "200", "301", "404", "500" };
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        statusCodesHomePage = shouldTheInternetPageBeOpened().clickStatusCodesLink();
+
+        logStep("Verify if Status Codes Home page is opened");
+        assertTrue("Unable to open Status Codes Home page", statusCodesHomePage.isLoaded());
+    }
+
+    @Test
+    public void shouldProperCodeBeDisplayedAfterClickCodeLink() {
+
+        for (String code : codes) {
+            logStep("Click link to " + code + " code");
+            statusCodesCodePage = statusCodesHomePage.clickCodeLink(code);
+
+            logStep("Verify if proper web page corresponding to the code is opened");
+            assertTrue("Unable to open proper web page",
+                    statusCodesCodePage.isLoadedWithStatusCode(code));
+
+            logStep("Verify if the displayed code is equal to the expected one");
+            assertEquals(code, statusCodesCodePage.getDisplayedCodeNumber());
+
+            logStep("Click link to come back to 'Status Codes' page");
+            statusCodesCodePage.clickLinkToCodePage();
+        }
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/BFLogger.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/BFLogger.html new file mode 100644 index 00000000..4cbf8bf5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/BFLogger.html @@ -0,0 +1,289 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

BFLogger

+
+
+

BFLogger is a default MrChecker logging tool. Use it to communicate important information from test execution. There are three basic logging methods:

+
+
+
    +
  • +

    logInfo(String message) - used for test steps

    +
  • +
  • +

    logDebug(String message) - used for non-official information, either during the test build process or in Page Object files

    +
  • +
  • +

    logError(String message) - used to emphasize critical information

    +
  • +
+
+
+

Logs will be visible in the console and in the log file under path: MrChecker_Test_Framework\workspace\project-folder\logs

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Elements-Types.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Elements-Types.html new file mode 100644 index 00000000..d0d62a9c --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Elements-Types.html @@ -0,0 +1,296 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Elements types

+
+
+

MrChecker includes Object types for various elements existing on webpages such as Button, TextBox etc. There is also WebElement class and getDriver().findElementDynamic(By selector) method for creating webpage objects dynamically and performing basic actions:

+
+
+

Instead of using static types you can use:

+
+
+
+
    public TyposPage clickTyposLink() {
+        WebElement checkboxesLink = getDriver().findElementDynamic(checkboxesLinkSelector);
+        checkboxesLink.click();
+        return new TyposPage();
+    }
+
+
+
+

Or perform actions without creating a variable:

+
+
+
+
    public TyposPage clickTyposLink() {
+        getDriver().findElementDynamic(checkboxesLinkSelector).click();
+        return new TyposPage();
+    }
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Environment-variables.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Environment-variables.html new file mode 100644 index 00000000..39c486f2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Environment-variables.html @@ -0,0 +1,342 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Environment Variables

+
+
+

In Page classes, when you load/start web, it is uncommon to save fixed main URL.

+
+
+

Instead of hardcoded main URL variable, you build your Page class with a dynamic variable.

+
+
+
+
+

How to create / update system environment

+
+
+

Dynamic variable values are stored under path \src\resources\enviroments\environments.csv.

+
+
+
+image1 +
+
+
+

By default, the environment takes value from DEV column.

+
+
+
+
+

== Access to the external file variables

+
+
+

Create a class GetEnvironmentParam to map values from an external file with Page class:

+
+
+
+
public enum GetEnvironmentParam {
+
+    // Name if enum must be in line with cell name in /src/resources/environments/environment.csv
+    WWW_FONT_URL,
+    TOOLS_QA,
+    WEB_SERVICE,
+    THE_INTERNET_MAIN_PAGE,
+    ELEMENTAL_SELENIUM_PAGE;
+
+    public String getValue() {
+
+        if (null ==  BaseTest.getEnvironmentService()) {
+            throw new BFInputDataException("Environment Parameters class wasn't initialized properly");
+        }
+
+        return BaseTest.getEnvironmentService()
+                .getValue(this.name());
+
+    }
+
+    @Override
+    public String toString() {
+
+        return this.getValue();
+
+    }
+}
+
+
+
+

When you add a new row to environments.csv also add a new variable to GetEnvironmentParam class.

+
+
+

In Page class access environmental variable using this method:

+
+
+
+
GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue();
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Page-object.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Page-object.html new file mode 100644 index 00000000..c2211d51 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Page-object.html @@ -0,0 +1,322 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Page Object

+
+
+

Your Product Under Test will be the following website: http://the-internet.herokuapp.com/

+
+
+

At first, create an Object to represent The Internet Main Page:

+
+
+
+
public class TheInternetPage extends BasePage
+
+
+
+

Each class which extends BasePage class must override three methods:

+
+
+
    +
  • +

    public boolean isLoaded() - returns true if the page is loaded and false if not

    +
  • +
  • +

    public void load() - loads the page

    +
  • +
  • +

    public String pageTitle() - returns page title

    +
  • +
+
+
+
+
public class TheInternetPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        BFLogger.logDebug("The internet page is loaded: " + getDriver().getCurrentUrl());
+        return getDriver().getCurrentUrl()
+                .equals(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'The internet' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Selectors.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Selectors.html new file mode 100644 index 00000000..bc92560a --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Selectors.html @@ -0,0 +1,579 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Selectors

+
+ +
+
+
+

Create selectors

+
+
+

Create a selector for every interactable element on a webpage using By type. Find elements and it’s attributes using browser developer mode (F12).

+
+
+
+image2 +
+
+
+
+
private static final By abTestLinkSelector               = By.cssSelector("li >
+            a[href*='abtest']");
+    private static final By basicAuthLinkSelector            = By.cssSelector("li >
+            a[href*='basic_auth']");
+    private static final By brokenImageLinkSelector          = By.cssSelector("li >
+            a[href*='broken_images']");
+    private static final By challengingDomLinkSelector       = By.cssSelector("li >
+            a[href*='challenging_dom']");
+    private static final By checkboxesLinkSelector           = By.cssSelector("li >
+            a[href*='checkboxes']");
+    private static final By contextMenuLinkSelector          = By.cssSelector("li >
+            a[href*='context_menu']");
+    private static final By disappearingElementsLinkSelector = By.cssSelector("li >
+            a[href*='disappearing_elements']");
+    private static final By dragAndDropLinkSelector          = By.cssSelector("li >
+            a[href*='drag_and_drop']");
+    private static final By dropdownLinkSelector             = By.cssSelector("li >
+            a[href*='dropdown']");
+    private static final By dynamicContentLinkSelector       = By.cssSelector("li >
+            a[href*='dynamic_content']");
+    private static final By dynamicControlsLinkSelector      = By.cssSelector("li >
+            a[href*='dynamic_controls']");
+    private static final By dynamicLoadingLinkSelector       = By.cssSelector("li >
+            a[href*='dynamic_loading']");
+    private static final By exitIntentLinkSelector           = By.cssSelector("li >
+            a[href*='exit_intent']");
+    private static final By fileDownloadLinkSelector         = By.cssSelector("li >
+            a[href$='download']");
+    private static final By fileUploadLinkSelector           = By.cssSelector("li >
+           a[href*='upload']");
+    private static final By floatingMenuLinkSelector         = By.cssSelector("li >
+           a[href*='floating_menu']");
+    private static final By forgotPasswordLinkSelector       = By.cssSelector("li >
+           a[href*='forgot_password']");
+    private static final By formAuthenticationLinkSelector   = By.cssSelector("li >
+           a[href*='login']");
+    private static final By framesLinkSelector               = By.cssSelector("li >
+           a[href*='frames']");
+    private static final By geolocationLinkSelector          = By.cssSelector("li >
+           a[href*='geolocation']");
+    private static final By horizontalSliderLinkSelector     = By.cssSelector("li >
+           a[href*='horizontal_slider']");
+    private static final By hoversLinkSelector               = By.cssSelector("li >
+           a[href*='hovers']");
+    private static final By infiniteScrollLinkSelector       = By.cssSelector("li >
+           a[href*='infinite_scroll']");
+    private static final By javaScriptAlertLinkSelector      = By.cssSelector("li >
+           a[href*='javascript_alerts']");
+    private static final By javaScriptErrorLinkSelector      = By.cssSelector("li >
+           a[href*='javascript_error']");
+    private static final By jQueryUIMenuLinkSelector         = By.cssSelector("li >
+           a[href*='jqueryui/menu']");
+    private static final By keyPressesLinkSelector           = By.cssSelector("li >
+           a[href*='key_presses']");
+    private static final By largeAndDeepDOMLinkSelector      = By.cssSelector("li >
+           a[href*='large']");
+    private static final By multipleWindowsLinkSelector      = By.cssSelector("li >
+           a[href*='windows']");
+    private static final By nestedFramesLinkSelector         = By.cssSelector("li >
+           a[href*='nested_frames']");
+    private static final By notificationMessagesLinkSelector = By.cssSelector("li >
+           a[href*='notification_message']");
+    private static final By redirectLinkSelector             = By.cssSelector("li >
+           a[href*='redirector']");
+    private static final By secureFileDownloadLinkSelector   = By.cssSelector("li >
+           a[href*='download_secure']");
+    private static final By shiftingContentLinkSelector      = By.cssSelector("li >
+           a[href*='shifting_content']");
+    private static final By slowResourcesLinkSelector        = By.cssSelector("li >
+           a[href*='slow']");
+    private static final By sortableDataTablesLinkSelector   = By.cssSelector("li >
+           a[href*='tables']");
+    private static final By statusCodesLinkSelector          = By.cssSelector("li >
+           a[href*='status_codes']");
+    private static final By typosLinkSelector                = By.cssSelector("li >
+           a[href*='typos']");
+    private static final By wYSIWYGEditorLinkSelector        = By.cssSelector("li >
+           a[href*='tinymce']");
+
+
+
+
+
+

Implement methods

+
+
+

Then use these selectors to create Objects and perform actions on page elements:

+
+
+
+
public ABtestPage clickABtestingLink() {
+        new Button(abTestLinkSelector).click();
+        return new ABtestPage();
+    }
+
+    public BasicAuthPage clickBasicAuthLink() {
+        getDriver().waitForPageLoaded();
+        WebElement link = getDriver().findElementDynamic(basicAuthLinkSelector);
+        JavascriptExecutor executor = (JavascriptExecutor) getDriver();
+        executor.executeScript("var elem=arguments[0]; setTimeout(function() {elem.click();}, 100)",
+                link);
+        return new BasicAuthPage();
+    }
+
+    public BrokenImagePage clickBrokenImageLink() {
+        new Button(brokenImageLinkSelector).click();
+        return new BrokenImagePage();
+    }
+
+    public ChallengingDomPage clickChallengingDomLink() {
+        new Button(challengingDomLinkSelector).click();
+        return new ChallengingDomPage();
+    }
+
+    public CheckboxesPage clickCheckboxesLink() {
+        new Button(checkboxesLinkSelector).click();
+        return new CheckboxesPage();
+    }
+
+    public ContextMenuPage clickContextMenuLink() {
+        new Button(contextMenuLinkSelector).click();
+        return new ContextMenuPage();
+    }
+
+    public DisappearingElementsPage clickDisappearingElementsLink() {
+        new Button(disappearingElementsLinkSelector).click();
+        return new DisappearingElementsPage();
+    }
+
+    public DragAndDropPage clickDragAndDropLink() {
+        new Button(dragAndDropLinkSelector).click();
+        return new DragAndDropPage();
+    }
+
+    public DropdownPage clickDropdownLink() {
+        new Button(dropdownLinkSelector).click();
+        return new DropdownPage();
+    }
+
+    public DynamicContentPage clickDynamicContentLink() {
+        new Button(dynamicContentLinkSelector).click();
+        return new DynamicContentPage();
+    }
+
+    public DynamicControlsPage clickDynamicControlsLink() {
+        new Button(dynamicControlsLinkSelector).click();
+        return new DynamicControlsPage();
+    }
+
+    public DynamicLoadingPage clickDynamicLoadingLink() {
+        new Button(dynamicLoadingLinkSelector).click();
+        return new DynamicLoadingPage();
+    }
+
+    public ExitIntentPage clickExitIntentLink() {
+        new Button(exitIntentLinkSelector).click();
+        return new ExitIntentPage();
+    }
+
+    public FileDownloadPage clickFileDownloadLink() {
+        new Button(fileDownloadLinkSelector).click();
+        return new FileDownloadPage();
+    }
+
+    public FileUploadPage clickFileUploadLink() {
+        new Button(fileUploadLinkSelector).click();
+        return new FileUploadPage();
+    }
+
+    public FloatingMenuPage clickFloatingMenuLink() {
+        new Button(floatingMenuLinkSelector).click();
+        return new FloatingMenuPage();
+    }
+
+    public ForgotPasswordPage clickForgotPasswordLink() {
+        new Button(forgotPasswordLinkSelector).click();
+        return new ForgotPasswordPage();
+    }
+
+    public FormAuthenticationPage clickFormAuthenticationLink() {
+        new Button(formAuthenticationLinkSelector).click();
+        return new FormAuthenticationPage();
+    }
+
+    public FramesPage clickFramesLink() {
+        new Button(framesLinkSelector).click();
+        return new FramesPage();
+    }
+
+    public GeolocationPage clickGeolocationLink() {
+        new Button(geolocationLinkSelector).click();
+        return new GeolocationPage();
+    }
+
+    public HorizontalSliderPage clickHorizontalSliderLink() {
+        new Button(horizontalSliderLinkSelector).click();
+        return new HorizontalSliderPage();
+    }
+
+    public HoversPage clickHoversLink() {
+        new Button(hoversLinkSelector).click();
+        return new HoversPage();
+    }
+
+    public InfiniteScrollPage clickInfiniteScrollLink() {
+        new Button(infiniteScrollLinkSelector).click();
+        return new InfiniteScrollPage();
+    }
+
+    public JavaScriptAlertsPage clickJavaScriptAlertLink() {
+        new Button(javaScriptAlertLinkSelector).click();
+        return new JavaScriptAlertsPage();
+    }
+
+    public JavaScriptErrorPage clickJavaScriptErrorLink() {
+        new Button(javaScriptErrorLinkSelector).click();
+        return new JavaScriptErrorPage();
+    }
+
+    public JQueryUIMenuPage clickJQueryUIMenuLink() {
+        new Button(jQueryUIMenuLinkSelector).click();
+        return new JQueryUIMenuPage();
+    }
+
+    public KeyPressesPage clickKeyPressesLink() {
+        new Button(keyPressesLinkSelector).click();
+        return new KeyPressesPage();
+    }
+
+    public LargeAndDeepDOMPage clickLargeAndDeepDOMLink() {
+        new Button(largeAndDeepDOMLinkSelector).click();
+        return new LargeAndDeepDOMPage();
+    }
+
+    public MultipleWindowsPage clickmultipleWindowsLink() {
+        new Button(multipleWindowsLinkSelector).click();
+        return new MultipleWindowsPage();
+    }
+
+    public NestedFramesPage clickNestedFramesLink() {
+        new Button(nestedFramesLinkSelector).click();
+        return new NestedFramesPage();
+    }
+
+    public NotificationMessagesPage clickNotificationMessagesLink() {
+        new Button(notificationMessagesLinkSelector).click();
+        return new NotificationMessagesPage();
+    }
+
+    public RedirectLinkPage clickRedirectLink() {
+        new Button(redirectLinkSelector).click();
+        return new RedirectLinkPage();
+    }
+
+    public SecureFileDownloadPage clickSecureFileDownloadLink() {
+        new Button(secureFileDownloadLinkSelector).click();
+        return new SecureFileDownloadPage();
+    }
+
+    public ShiftingContentPage clickShiftingContentLink() {
+        new Button(shiftingContentLinkSelector).click();
+        return new ShiftingContentPage();
+    }
+
+    public SlowResourcesPage clickSlowResourcesLink() {
+        new Button(slowResourcesLinkSelector).click();
+        return new SlowResourcesPage();
+    }
+
+    public SortableDataTablesPage clickSortableDataTablesLink() {
+        new Button(sortableDataTablesLinkSelector).click();
+        return new SortableDataTablesPage();
+    }
+
+    public StatusCodesHomePage clickStatusCodesLink() {
+        new Button(statusCodesLinkSelector).click();
+        return new StatusCodesHomePage();
+    }
+
+    public TyposPage clickTyposLink() {
+        new Button(typosLinkSelector).click();
+        return new TyposPage();
+    }
+
+    public WYSIWYGEditorPage clickWYSIWYGEditorLink() {
+        new Button(wYSIWYGEditorLinkSelector).click();
+        return new WYSIWYGEditorPage();
+    }
+
+
+
+

These methods create a Button object for every link on The Internet Page and click it to redirect on a different subpage.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/The-Internet-Base-Test.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/The-Internet-Base-Test.html new file mode 100644 index 00000000..9ee8a31b --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/The-Internet-Base-Test.html @@ -0,0 +1,350 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

The Internet Base Test

+
+ +
+
+
+

Test Class

+
+
+

Create Test class and override methods:

+
+
+
    +
  • +

    public void setUp() - executes before each test

    +
  • +
  • +

    public void tearDown() - executes after each test

    +
  • +
+
+
+
+
public class TheInternetBaseTest extends BaseTest {
+    @Override
+    public void setUp() {
+
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        BasePage.navigateBack();
+    }
+}
+
+
+
+

logStep(String message) method doesn’t exist yet so you should create it:

+
+
+
+
    protected static int             step = 0;
+
+     /**
+     * Logs test step including step number calculated individually for each test.
+     *
+     * @param message Text message representing step description.
+     */
+    public static void logStep(String message) {
+        BFLogger.logInfo("Step " + ++step + ": " + message);
+    }
+
+
+
+

Write a method for loading The Internet Page and checking if it is properly opened:

+
+
+
+
    protected static TheInternetPage theInternetPage;
+
+    /**
+     * Performs operations required for verifying if The Internet Page is properly opened.
+     *
+     * @return TheInternetPage
+     */
+    public static TheInternetPage shouldTheInternetPageBeOpened() {
+
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+
+        return theInternetPage;
+    }
+
+
+
+

This Test class can’t be launched because it doesn’t contain any @Test methods. It’s been created only for supporting other Test classes.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/E2E-Tutorials/Tutorial1.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/E2E-Tutorials/Tutorial1.html new file mode 100644 index 00000000..a7a3f321 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/E2E-Tutorials/Tutorial1.html @@ -0,0 +1,664 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MrChecker E2E tutorials

+
+
+

In order to learn more about MrChecker structure, start from Project Organisation section and then check out our fantastic tutorials:

+
+
+
+
+

How to create a basic test in MrChecker

+
+ +
+
+
+

Example: Booking a table

+
+
+

As an example to test we will use MyThaiStar booking page.
+In order to book a table, do the following steps:

+
+
+
    +
  1. +

    Open MyThaiStar Book Table Page

    +
  2. +
  3. +

    Enter booking data: Date and time, Name, Email and number of Table guests

    +
  4. +
  5. +

    Click Accept terms

    +
  6. +
  7. +

    Click Book table

    +
  8. +
  9. +

    Display confirmation box and send booking

    +
  10. +
  11. +

    Check if the booking was successful.

    +
  12. +
+
+
+
+image1 +
+
+
+
+image2 +
+
+
+

You can go through these steps manually and doublecheck the result.

+
+
+
+
+

How to prepare a test

+
+ +
+
+
+

== 1. Create BookTablePage class

+
+
+

You will need a class which will represent MyThaiStart booking page.
+Fill the required methods with the following code:

+
+
+
+
public class BookTablePage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded(); //waits untli the page is loaded
+        return getDriver().getCurrentUrl()
+                .equals("https://mts-devonfw-core.cloud.okteto.net/bookTable"); //checks if current page address equals MyThaiStar booking page adress
+    }
+
+    @Override
+    public void load() {
+        getDriver().get("https://mts-devonfw-core.cloud.okteto.net/bookTable"); //loads page under specified adress
+        getDriver().waitForPageLoaded(); //waits until the page is loaded
+    }
+
+    @Override
+    public String pageTitle() {
+        return "My Thai Star"; //returns page title
+    }
+}
+
+
+
+

getDriver() method allows accessing Selenium Web Driver which performs actions on the webpage.

+
+
+

As this page class represents the MyThaiStar booking page, you have to set up selectors for web elements required in the test case. In the example you have to create selectors for elements we’ll interact with:

+
+
+
    +
  • +

    Date and time input field

    +
  • +
  • +

    Name input field

    +
  • +
  • +

    Email input field

    +
  • +
  • +

    Table guests input field

    +
  • +
  • +

    Accept terms checkbox

    +
  • +
  • +

    Book table button

    +
  • +
+
+
+

Selectors will be implemented as fields.

+
+
+

Example of the selector for Date and time input field:

+
+
+
+
/** Date field search criteria */
+private static final By dateSearch = By.cssSelector("input[formcontrolname='bookingDate']");
+
+
+
+

The input field’s name "bookingDate" was found by using the developer console in Google Chrome. How to prepare an everlasting selector?

+
+
+
+image3 +
+
+
+

This selector can be used to create a WebElement object of the said input field. Therefore, you will create a new method and call it "enterTimeAndDate".

+
+
+
+
public void enterTimeAndDate(String date) {
+    WebElement dateInput = getDriver().findElementDynamic(dateSearch); //creates a new WebElement to access Date and time input field
+    dateInput.sendKeys(date); //enters date value
+}
+
+
+
+

Now you can create other selectors and objects and methods for every element on the webpage:

+
+
+
+
/** Name input field search criteria */
+private static final By nameSearch = By.cssSelector("input[formcontrolname='name']");
+
+/** Email input field search criteria */
+private static final By emailSearch = By.cssSelector("input[formcontrolname='email']");
+
+/** Number of guests search criteria */
+private static final By guestsSearch = By.cssSelector("input[formcontrolname='assistants']");
+
+/** Check box search criteria */
+private static final By checkboxSearch = By.cssSelector("mat-checkbox[data-name='bookTableTerms']");
+
+/** Book table button search criteria */
+private static By bookTableSearch = By.name("bookTableSubmit");
+
+
+
+
+
public void enterName(String name) {
+    WebElement nameInput = getDriver().findElementDynamic(nameSearch); //creates a new WebElement to access name input field
+    nameInput.sendKeys(name); //enters name value
+}
+
+public void enterEmail(String email) {
+    WebElement emailInput = getDriver().findElementDynamic(emailSearch); //creates a new WebElement to access email input field
+    emailInput.sendKeys(email); //enters email value
+}
+
+public void enterGuests(int amountOfGuests) {
+    WebElement guestsInput = getDriver().findElementDynamic(guestsSearch); //creates a new WebElement to access amount of guests input field
+    guestsInput.sendKeys(Integer.toString(amountOfGuests)); //enters the number of guests value converted from integer to string
+}
+
+public void acceptTerms() {
+    WebElement checkbox = getDriver().findElementDynamic(checkboxSearch); //creates aa new WebElement to access accept terms checkbox
+    WebElement square = checkbox.findElement(By.className("mat-checkbox-inner-container")); //creates a new WebElement to access inner square
+    JavascriptExecutor js = (JavascriptExecutor) getDriver(); //creates a Javascript executor object
+    js.executeScript("arguments[0].click()", square); //executes a script which clicks the square
+
+}
+
+public void clickBookTable() {
+    WebElement buttonbutton = getDriver().findElementDynamic(bookTableSearch); //creates a new WebElement to access book table button
+    getDriver().waitUntilElementIsClickable(bookTableSearch); //waits until a button might be clicked
+    buttonbutton.click(); //clicks the button
+}
+
+
+
+

You can use those methods in order to create a new method to go through the whole booking process:

+
+
+
+
public ConfirmBookPage enterBookingData(String date, String name, String email, int guests) {
+    enterTimeAndDate(date);
+    enterName(name);
+    enterEmail(email);
+    enterGuests(guests);
+    acceptTerms();
+
+    clickBookTable();
+
+    return new ConfirmBookPage();
+}
+
+
+
+
+
+

== 2. Create ConfirmBookPage class

+
+
+

As you can see, this method returns another page object that has not yet been created. This step is required, as the booking information that you would like to check is on another webpage. This means that you will have to create another page class and call it ConfirmBookPage:

+
+
+
+
public class ConfirmBookPage extends BasePage {
+
+    /** Confirmation dialog search criteria */
+    private static final By confirmationDialogSearch = By.className("mat-dialog-container");
+
+    /** Send confirmation button search criteria */
+    private static final By sendButtonSearch = By.name("bookTableConfirm");
+
+    /** Cancel confirmation button search criteria */
+    private static final By cancelButtonSearch = By.name("bookTableCancel");
+
+    @Override
+    public boolean isLoaded() {
+        //creates a new WebElement to access confirmation dialog box
+        WebElement confirmationDialog = getDriver().findElementDynamic(confirmationDialogSearch);
+
+        return confirmationDialog.isDisplayed(); //checks if the box is displayed
+    }
+
+    //this method won't be called because the page is loaded only after clicking book table button
+    @Override
+    public void load() {
+        BFLogger.logError("MyThaiStar booking confirmation page was not loaded."); //logs error
+    }
+
+    @Override
+    public String pageTitle() {
+        return "My Thai Star";
+    }
+
+    public void confirmBookingData() {
+        WebElement sendButton = getDriver().findElementDynamic(sendButtonSearch); //creates a new WebElement to access confirmation button
+        sendButton.click(); //clicks the send button
+    }
+
+    public void cancelBookingData() {
+        WebElement cancelButton = getDriver().findElementDynamic(cancelButtonSearch); //creates a new WebElement to access resignation button
+        cancelButton.click(); //clicks the cancel button
+    }
+}
+
+
+
+
+image4 +
+
+
+

After the click on Send button - the green confirmation dialogue appears with the message "Table successfully booked":

+
+
+
+image5 +
+
+
+

To be able to check if the booking was successful, you should go back to the BookTablePage class and add one more method in order to check if the green box was displayed:

+
+
+
+
/** Dialog search criteria */
+private static final By dialogSearch = By.className("bgc-green-600");
+
+public boolean checkConfirmationDialog() {
+    WebElement greenConfirmationDialog = getDriver().findElementDynamic(dialogSearch); //creates a new WebElement to access confirmation dialog
+
+    return greenConfirmationDialog.isDisplayed(); //checks if the dialog is displayed
+}
+
+
+
+
+
+

== 3. Create BookTableTest class

+
+
+

At this point you can start creating a test class:

+
+
+
+
import static org.junit.Assert.assertTrue;
+
+public class BookTableTest extends BaseTest {
+    private static BookTablePage bookTablePage = new BookTablePage(); //the field contains book table page object
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        bookTablePage.load(); //loads book table page
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+
+    }
+
+    @Override
+    public void setUp() {
+        if (!bookTablePage.isLoaded()) {
+            bookTablePage.load(); //if the page is not loaded, loads it
+        }
+    }
+
+    @Override
+    public void tearDown() {
+
+    }
+}
+
+
+
+
+
+

== 4. Write the first test

+
+
+

You can prepare our first test method using the methods from page classes

+
+
+
+
@Test
+public void Test_BookTableAndCheckConfirmation() {
+    String date = "07/23/2019 1:00 PM"; //replace with tommorow's date in format "MM/dd/yyyy hh:mm a"
+    String name = "Smith"; //name field
+    String email = "smith@somemail.com"; //email field
+    int guests = 3; //number of guests
+
+    //enters booking data and returns a new confirmation page
+    ConfirmBookPage confirmBookPage = bookTablePage.enterBookingData(date, name, email, guests);
+    confirmBookPage.confirmBookingData(); //confirms booking
+
+    //checks if the green dialog box appears, if it does, test is passed, if not, the test failed and displays message given in the first argument
+    assertTrue("Test failed: Table not booked", bookTablePage.checkConfirmationDialog()); //returns true if dialog box appears and false if not
+}
+
+
+
+
+
+

== 5. Run the test

+
+
+

Run the test by right-clicking on the test method → Run as → JUnit test.

+
+
+
+image6 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Project-Organisation.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Project-Organisation.html new file mode 100644 index 00000000..93b5462f --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/Project-Organisation.html @@ -0,0 +1,530 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Project organization

+
+ +
+
+
+

Importing projects

+
+
+

Every MrChecker project should be imported as a Maven Project.

+
+
+

Example from Eclipse IDE:

+
+
+
+1 +
+
+
+
+2 +
+
+
+

Enter the project path and select projects to import.

+
+
+
+3 +
+
+
+

When the import is finished, update the project structure - ALT + F5

+
+
+
+4 +
+
+
+
+
+

Exporting projects

+
+
+

In order to create a new standalone MrChecker project, you can use template-app-under-test and export it to the new folder:

+
+
+
+5 +
+
+
+
+6 +
+
+
+

Create a new folder for the project and enter its path. Select project and files to export:

+
+
+
+7 +
+
+
+

Change project name and other properties, if necessary, in pom.xml file:

+
+
+
+8 +
+
+
+

Then you can import the project to the workspace and create new packages and classes.

+
+
+
+
+

Creating new packages

+
+
+
    +
  1. +

    You will need two new packages: one for the new page classes, the other one for test classes:

    +
    +
      +
    • +

      Create a package for page classes

      +
      +
      +
      Open Eclipse
      +Use the "Project Explorer" on the left
      +Navigate to [your-project] → src/main/java → com.capgemini.mrchecker → selenium
      +Right-click on "selenium"
      +Click on "New" → New Package
      +Name the new package "com.capgemini.mrchecker.selenium.pages.[your-product-name]"
      +
      +
      +
    • +
    • +

      Create a package for test classes

      +
      +
      +
      Navigate to [your-project] → src/test/java → com.capgemini.mrchecker → selenium
      +Right click on "selenium"
      +Click on "New" → New Package
      +Name the new package "com.capgemini.mrchecker.selenium.tests.[your-product-name]"
      +
      +
      +
    • +
    +
    +
  2. +
+
+
+

Example:

+
+
+
+9 +
+
+
+
+
+

Creating new Page Classes

+
+
+
+
Navigate to: [your-project] → src/main/java → com.capgemini.mrchecker → selenium.pages.[your-product-name]
+Click on "New" → New Class
+Enter the name "YourPage"
+
+
+
+

Every Page Class should extend BasePage class. Import all necessary packages and override all required methods:

+
+
+
    +
  • +

    public boolean isLoaded() - returns true if the page is loaded and false if not

    +
  • +
  • +

    public void load() - loads the page

    +
  • +
  • +

    public String pageTitle() - returns page title

    +
  • +
+
+
+

Example:

+
+
+
+
 public class MainPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        return false;
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Main Page'");
+    }
+
+    @Override
+    public String pageTitle() {
+        return "Main Page Title";
+    }
+ }
+
+
+
+
+
+

Creating new Test Classes

+
+
+
+
Navigate to  [your-project] → src/test/java → com.capgemini.mrchecker → selenium.tests.[your-product-name]
+Click on "New" → New Class
+Enter the name "YourCaseTest"
+
+
+
+

Test classes should extend BaseTest class, import all necessary packages and override all required methods:

+
+
+
    +
  • +

    public void setUp() - executes before each test

    +
  • +
  • +

    public void tearDown() - executes after each test

    +
  • +
+
+
+

Optionally, it is also possible to implement the following methods:

+
+
+
    +
  • +

    @BeforeClass +public static void setUpBeforeClass() - runs only once before all tests

    +
  • +
  • +

    @AfterClass +public static void tearDownAfterClass() - runs only once after performing all tests

    +
  • +
+
+
+

Every test method has to be signed with "@Test" parameter.

+
+
+
+
 public class YourCaseTest extends BaseTest {
+    private static MainPage mainPage = new MainPage();
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        mainPage.load();
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+
+    }
+
+    @Override
+    public void setUp() {
+        if (!mainPage.isLoaded()) {
+            mainPage.load();
+        }
+    }
+
+    @Override
+    public void tearDown() {
+
+    }
+
+    @Test
+    public void shouldTestRunWithoutReturningError {
+
+    }
+ }
+
+
+
+
+
+

Running Tests

+
+
+

Run the test by right-clicking on the test method → Run as → JUnit test.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/tutorials.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/tutorials.html new file mode 100644 index 00000000..6660aa7e --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/Who-Is-MrChecker/Tutorials/tutorials.html @@ -0,0 +1,277 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

In order to learn more about MrChecker structure, start from Project Organisation section and then check out our fantastic tutorials:

+
+
+

This tutorial will guide you through the series of test which perform basic actions on webpages using MrChecker.

+
+
+

Make sure you already have MrChecker Test Framework installed on your PC. How to install?

+
+
+

Your Product Under Test will be the following website: http://the-internet.herokuapp.com/

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/benefits.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/benefits.html new file mode 100644 index 00000000..6fec6422 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/benefits.html @@ -0,0 +1,298 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Benefits

+
+
+

Every customer may benefit from using MrChecker Test Framework. The main profits for your project are:

+
+
+
    +
  • +

    Resilient and robust building and validation process

    +
  • +
  • +

    Quality gates shifted closer to the software development process

    +
  • +
  • +

    Team quality awareness increase - including Unit Tests, Static Analysis, Security Tests, Performance in the testing process

    +
  • +
  • +

    Test execution environment transparent to any infrastructure

    +
  • +
  • +

    Touch base with the Cloud solution

    +
  • +
  • +

    Faster Quality and DevOps-driven delivery

    +
  • +
  • +

    Proven frameworks, technologies and processes.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/home.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/home.html new file mode 100644 index 00000000..6309fdc4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/home.html @@ -0,0 +1,323 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Who is MrChecker?

+
+
+

MrChecker Test Framework is an end to end test automation framework which is written in Java. +It is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security, native mobile apps and, in the near future, databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+
+
+

Where does MrChecker apply?

+
+
+

The aim of MrChecker is to achieve standardize way to build BlackBox tests. It provides the possibility to have one common software standard in order to build Component, Integration and System tests.

+
+
+

A Test Engineer does not have access to the application source code in order to perform BlackBox tests, but they are able to attach their tests to any application interfaces, such as - IP address - Domain Name - communication protocol - Command Line Interface.

+
+
+
+
+

MrChecker specification:

+
+
+
    +
  • +

    Responsive Web Design application: Selenium Browser

    +
  • +
  • +

    REST/SOAP: RestAssure

    +
  • +
  • +

    Service Virtualization: Wiremock

    +
  • +
  • +

    Database: JDBC drivers for SQL

    +
  • +
  • +

    Security: RestAssure + RestAssure Security lib

    +
  • +
  • +

    Standalone Java application: SWING

    +
  • +
  • +

    Native mobile application for Android: Appium

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/mrchecker.wiki/master-mrchecker.html b/docs/devonfw.github.io/1.0/mrchecker.wiki/master-mrchecker.html new file mode 100644 index 00000000..380aed41 --- /dev/null +++ b/docs/devonfw.github.io/1.0/mrchecker.wiki/master-mrchecker.html @@ -0,0 +1,12598 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

MrChecker - devonfw testing tool

+
+ +
+
+
+

Who Is MrChecker

+
+ +
+
Who is MrChecker?
+
+

MrChecker Test Framework is an end to end test automation framework which is written in Java. +It is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security, native mobile apps and, in the near future, databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+
+
Where does MrChecker apply?
+
+

The aim of MrChecker is to achieve standardize way to build BlackBox tests. It provides the possibility to have one common software standard in order to build Component, Integration and System tests.

+
+
+

A Test Engineer does not have access to the application source code in order to perform BlackBox tests, but they are able to attach their tests to any application interfaces, such as - IP address - Domain Name - communication protocol - Command Line Interface.

+
+
+
+
MrChecker specification:
+
+
    +
  • +

    Responsive Web Design application: Selenium Browser

    +
  • +
  • +

    REST/SOAP: RestAssure

    +
  • +
  • +

    Service Virtualization: Wiremock

    +
  • +
  • +

    Database: JDBC drivers for SQL

    +
  • +
  • +

    Security: RestAssure + RestAssure Security lib

    +
  • +
  • +

    Standalone Java application: SWING

    +
  • +
  • +

    Native mobile application for Android: Appium

    +
  • +
+
+
+
+
Benefits
+
+

Every customer may benefit from using MrChecker Test Framework. The main profits for your project are:

+
+
+
    +
  • +

    Resilient and robust building and validation process

    +
  • +
  • +

    Quality gates shifted closer to the software development process

    +
  • +
  • +

    Team quality awareness increase - including Unit Tests, Static Analysis, Security Tests, Performance in the testing process

    +
  • +
  • +

    Test execution environment transparent to any infrastructure

    +
  • +
  • +

    Touch base with the Cloud solution

    +
  • +
  • +

    Faster Quality and DevOps-driven delivery

    +
  • +
  • +

    Proven frameworks, technologies and processes.

    +
  • +
+
+ +
+
+
Test stages
+ +
+
+
Unit test
+
+

A module is the smallest compilable unit of source code. It is often too small to be tested by the functional tests (black-box tests). However, it is the appropriate candidate for white-box testing. White-box tests have to be performed as the first static tests (e.g. Lint and inspections), followed by dynamic tests in order to check boundaries, branches and paths. Usually, this kind of testing would require enabling stubs and special test tools.

+
+
+
+
Component test
+
+

This is the black-box test of modules or groups of modules which represent certain functionalities. There are no rules about what could be called a component. Whatever a tester defines as a component, should make sense and be a testable unit. Components can be integrated into bigger components step by step and tested as such.

+
+
+
+
Integration test
+
+

Functions are tested by feeding them input and examining the output, and internal program structure is rarely considered. The software is completed step by step and tested by tests covering a collaboration between modules or classes. The integration depends on the kind of system. For example, the steps could be as follows: run the operating system first and gradually add one component after another, then check if the black-box tests are still running (the test cases will be extended together with every added component). The integration is done in the laboratory. It may be also completed by using simulators or emulators. Additionally, the input signals could be stimulated.

+
+
+
+
Software / System test
+
+

System testing is a type of testing conducted on a complete integrated system to evaluate the system’s compliance with its specified requirements. This is a type of black-box testing of the complete software in the target system. The most important factor in successful system testing is that the environmental conditions for the software have to be as realistic as possible (complete original hardware in the destination environment).

+
+
+
+
+
+

Test Framework Modules

+
+
+

In this section, it is possible to find all the information regarding the main modules of MrChecker:

+
+
+
+
+

Core Test Module

+
+ +
+
Core Test Module
+ +
+
+
What is Core Test Module
+
+
+image1 new +
+
+
+ +
+
How to start?
+ +
+
+
Allure Logger → BFLogger
+
+

In Allure E2E Test Framework you have ability to use and log any additional information crucial for:

+
+
+
    +
  • +

    test steps

    +
  • +
  • +

    test exection

    +
  • +
  • +

    page object actions, and many more.

    +
  • +
+
+
+
+
Where to find saved logs
+
+

Every logged information is saved in a separate test file, as a result of parallel tests execution.

+
+
+

The places they are saved:

+
+
+
    +
  1. +

    In test folder C:\Allure_Test_Framework\allure-app-under-test\logs

    +
  2. +
  3. +

    In every Allure Test report, logs are always embedded as an attachment, according to test run.

    +
  4. +
+
+
+
+
How to use logger:
+
+
    +
  • +

    Start typing

    +
    +

    BFLogger

    +
    +
  • +
  • +

    Then type . (dot)

    +
  • +
+
+
+
+
Type of logger:
+
+
    +
  • +

    BFLogger.logInfo("Your text") - used for test steps

    +
  • +
  • +

    BFLogger.logDebug("Your text") - used for non official information, either during test build process or in Page Object files

    +
  • +
  • +

    BFLogger.logError("Your text") - used to emphasize critical information

    +
  • +
+
+
+
+image13 +
+
+
+

Console output:

+
+
+
+image14 +
+
+
+
+
Allure Reports
+
+
+image15 +
+
+
+

Allure is a tool designed for test reports.

+
+
+
+
Generate report - command line
+
+

You can generate a report using one of the following commands:

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:serve -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:serve -Dtest=TS_Tag1
+
+
+
+

A report will be generated into temp folder. Web server with results will start. You can additionally configure the server timeout. The default value is "3600" (one hour).

+
+
+

System property allure.serve.timeout.

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:report -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:report -Dtest=TS_Tag1
+
+
+
+

A report will be generated tо directory: target/site/allure-maven/index.html

+
+
+

NOTE: Please open index.html file under Firefox. Chrome has some limitations to presenting dynamic content. If you want to open a report with a Chromium based Web Browser, you need to launch it first with --allow-file-access-from-files argument.

+
+
+
+
Generate report - Eclipse
+
+

A report is created here allure-app-under-test\target\site\allure-report\index.html

+
+
+

NOTE: Please open index.html file under Firefox. Chrome has some limitations to presenting dynamic content. If you want to open a report with a Chromium based Web Browser, you need to launch it first with --allow-file-access-from-files argument.

+
+
+
+image17 +
+
+
+
+image18 +
+
+
+
+
Generate report - Jenkins
+
+

In our case, we’ll use the Allure Jenkins plugin. When integrating Allure in a Jenkins job configuration, we’ll have direct access to the build’s test report.

+
+
+
+image19 +
+
+
+

There are several ways to access the Allure Test Reports:

+
+
+
    +
  • +

    Using the "Allure Report" button on the left navigation bar or center of the general job overview

    +
  • +
  • +

    Using the "Allure Report" button on the left navigation bar or center of a specific build overview

    +
  • +
+
+
+

Afterwards you’ll be greeted with either the general Allure Dashboard (showing the newest build) or the Allure Dashboard for a specific (older) build.

+
+
+
+
Allure dashboard
+
+
+image20 +
+
+
+

The Dashboard provides a graphical overview on how many test cases were successful, failed or broken.

+
+
+
    +
  • +

    Passed means, that the test case was executed successfully.

    +
  • +
  • +

    Broken means, that there were mistakes, usually inside of the test method or test class. As tests are being treated as code, broken code has to be expected, resulting in occasionally broken test results.

    +
  • +
  • +

    Failed means that an assertion failed.

    +
  • +
+
+
+
+
Defects
+
+

The defects tab lists out all the defects that occurred, and also descriptions thereof. Clicking on a list item displays the test case which resulted in an error. Clicking on a test case allows the user to have a look at the test case steps, as well as Log files or Screenshots of the failure.

+
+
+
+
Graph
+
+

The graph page includes a pie chart of all tests, showing their result status (failed, passed, etc.). Another graph allows insight into the time elapsed during the tests. This is a very useful information to find and eliminate possible bottlenecks in test implementations.

+
+
+
+image21 +
+
+
+
+
Why join Test Cases in groups - Test Suites
+
+
+image22 +
+
+
+
+
Regresion Suite:
+
+

Regression testing is a type of software testing which verifies that software which was previously developed and tested still performs the same way after it was changed or interfaced with another software.

+
+
+
    +
  • +

    Smoke

    +
  • +
  • +

    Business vital functionalities

    +
  • +
  • +

    Full scope of test cases

    +
  • +
+
+
+
+
Functional Suite:
+
+
    +
  • +

    Smoke

    +
  • +
  • +

    Business function A

    +
  • +
  • +

    Business function B

    +
  • +
+
+
+
+
Single Responsibility Unit:
+
+
    +
  • +

    Single page

    +
  • +
  • +

    Specific test case

    +
  • +
+
+
+
+
How to build a Test Suite based on tags
+ +
+
+
Structure of the Test Suite
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+image23 new +
+
+
+

Where:

+
+
+
    +
  • +

    @RunWith(JUnitPlatform.class) - use Junit5 runner

    +
  • +
  • +

    @IncludeTags({"TestsTag1"}) - search all test files with the tag "TestsTag1"

    +
  • +
  • +

    @ExcludeTags({"TagToExclude"}) - exclude test files with the tag "TagToExclude"

    +
  • +
  • +

    @SelectPackages("com.capgemini.mrchecker.core.groupTestCases.testCases") - search only test files in "com.capgemini.mrchecker.core.groupTestCases.testCases" package

    +
  • +
  • +

    public class TS_Tag1 - the name of the Test Suite is "TS_Tag1"

    +
  • +
+
+
+

Most commonly used filters to build a Test Suite are ones using:

+
+
+
    +
  • +

    @IncludeTags({ })

    +
  • +
  • +

    @ExcludeTags({ })

    +
  • +
+
+
+

Example:

+
+
+
    +
  1. +

    @IncludeTags({ "TestsTag1" }) , @ExcludeTags({ }) → will execute all test cases with the tag TestsTag1

    +
  2. +
  3. +

    @IncludeTags({ "TestsTag1" }) , @ExcludeTags({ "SlowTest" }) → will execute all test cases with tag "TestsTag1" although it will exclude from this list the test cases with the tag "SlowTest"

    +
  4. +
  5. +

    @IncludeTags({ }) , @ExcludeTags({ "SlowTest" }) → It will exclude test cases with the tag "SlowTest"

    +
  6. +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+image23 +
+
+
+

Where:

+
+
+
    +
  • +

    @RunWith(WildcardPatternSuiteBF.class) - search for test files under /src/test/java

    +
  • +
  • +

    @IncludeCategories({ TestsTag1.class }) - search for all test files with the tag "TestsTag1.class"

    +
  • +
  • +

    @ExcludeCategories({ }) - exclude test files. In this example, there is no exclusion

    +
  • +
  • +

    @SuiteClasses({ "**/*Test.class" }) - search only test files, where the file name ends with "<anyChar/s>Test.class"

    +
  • +
  • +

    public class TS_Tag1 - the name of the Test Suite is "TS_Tag1"

    +
  • +
+
+
+

Most commonly used filters to build Test Suite are ones using:

+
+
+
    +
  • +

    @IncludeCategories({ })

    +
  • +
  • +

    @ExcludeCategories({ })

    +
  • +
+
+
+

Example:

+
+
+
    +
  1. +

    @IncludeCategories({ TestsTag1.class }) , @ExcludeCategories({ }) → will execute all test cases with the tag TestsTag1.class

    +
  2. +
  3. +

    @IncludeCategories({ TestsTag1.class }) , @ExcludeCategories({ SlowTest.class }) → will execute all test cases with the tag "TestsTag1.class" although it will exclude from this list the test cases with the tag "SlowTest.class"

    +
  4. +
  5. +

    @IncludeCategories({ }) , @ExcludeCategories({ SlowTest.class }) → will execute all test cases from /src/test/java, although it will exclude from this list the test cases with the tag "SlowTest.class"

    +
  6. +
+
+
+
+
Structure of Test Case
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+image24 new +
+
+
+

Where:

+
+
+
    +
  • +

    @TestsTag1, @TestsSmoke, @TestsSelenium - list of tags assigned to this test case - "TestsTag1, TestsSmoke, TestSelenium" annotations

    +
  • +
  • +

    public class FristTest_tag1_Test - the name of the test case is "FristTest_tag1_Test"

    +
  • +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+image24 +
+
+
+

Where:

+
+
+
    +
  • +

    @Category({ TestsTag1.class, TestsSmoke.class, TestSelenium.class }) - list of tags / categories assigned to this test case - "TestsTag1.class, TestsSmoke.class, TestSelenium.class"

    +
  • +
  • +

    public class FristTest_tag1_Test - the name of the test case is "FristTest_tag1_Test"

    +
  • +
+
+
+
+
Structure of Tags / Categories
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+

Tag name: TestsTag1 annotation

+
+
+
+image25 new +
+
+
+

Tag name: TestsSmoke annotation

+
+
+
+image26 new +
+
+
+

Tag name: TestSelenium annotation

+
+
+
+image27 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+

Tag name: TestsTag1.class

+
+
+
+image25 +
+
+
+

Tag name: TestsSmoke.class

+
+
+
+image26 +
+
+
+

Tag name: TestSelenium.class

+
+
+
+image27 +
+
+
+
+
How to run Test Suite
+
+

To run a Test Suite you perform the same steps as you do to run a test case

+
+
+

Command line

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+

JUnit5 disallows running suite classes from maven. Use -Dgroups=Tag1,Tag2 and -DexcludeGroups=Tag4,Tag5 to create test suites in maven.

+
+
+
+
mvn test site -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test site -Dtest=TS_Tag1
+
+
+
+

Eclipse

+
+
+
+image28 +
+
+
+
+
Data driven approach
+
+

Data driven approach - External data driven

+
+
+

External data driven - Data as external file injected in test case

+
+
+

Test case - Categorize functionality and severity

+
+
+

You can find more information about data driven here and here

+
+
+

There are a few ways to define parameters for tests.

+
+
+
+
Internal Data driven approach
+
+

Data as part of test case

+
+
+

The different means to pass in parameters are shown below.

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

Static methods are used to provide the parameters.

+
+
+
+
A method in the test class:
+
+
+
@ParameterizedTest
+@MethodSource("argumentsStream")
+
+
+
+

OR

+
+
+
+
@ParameterizedTest
+@MethodSource("arrayStream")
+
+
+
+

In the first case the arguments are directly mapped to the test method parameters. In the second case the array is passed as the argument.

+
+
+
+image30 new +
+
+
+
+
A method in a different class:
+
+
+
@ParameterizedTest
+@MethodSource("com.capgemini.mrchecker.core.datadriven.MyContainsTestProvider#provideContainsTrueParameters")
+
+
+
+
+image32 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

Parameters that are passed into tests using the @Parameters annotation must be _Object[]_s

+
+
+
+
In the annotation:
+
+
+
@Parameters({"1, 2, 3", "3, 4, 7", "5, 6, 11", "7, 8, 15"})
+
+
+
+
+image30 +
+
+
+

The parameters must be primitive objects such as integers, strings, or booleans. Each set of parameters is contained within a single string and will be parsed to their correct values as defined by the test method’s signature.

+
+
+
+
In a method named in the annotation:
+
+
+
@Parameters(method = "addParameters")
+
+
+
+
+image31 +
+
+
+

A separate method can be defined and referred to for parameters. This method must return an Object[] and can contain normal objects.

+
+
+
+
In a class:
+
+
+
@Parameters(source = MyContainsTestProvider.class)
+
+
+
+
+image32 +
+
+
+

A separate class can be used to define parameters for the test. This test must contain at least one static method that returns an Object[], and its name must be prefixed with provide. The class could also contain multiple methods that provide parameters to the test, as long as they also meet the required criteria.

+
+
+
+
External Data Driven
+
+

Data as external file injected in test case

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

Tests use the annotation @CsvFileSource to inject CSVs file.

+
+
+
+
@CsvFileSource(resources = "/datadriven/test.csv", numLinesToSkip = 1)
+
+
+
+

A CSV can also be used to contain the parameters for the tests. It is pretty simple to set up, as it’s just a comma-separated list.

+
+
+
+
Classic CSV
+
+
+image33 new +
+
+
+

and CSV file structure

+
+
+
+image34 +
+
+
+
+
CSV with headers
+
+
+image35 new +
+
+
+

and CSV file structure

+
+
+
+image36 +
+
+
+
+
CSV with specific column mapper
+
+
+image37 new +
+
+
+

and Mapper implementation

+
+
+
+image38 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

Tests use the annotation @FileParameters to inject CSVs file.

+
+
+
+
@FileParameters("src/test/resources/datadriven/test.csv")
+
+
+
+

A CSV can also be used to contain the parameters for the tests. It is pretty simple to set up, as it’s just a comma-separated list.

+
+
+
+
Classic CSV
+
+
+image33 +
+
+
+

and CSV file structure

+
+
+
+image34 +
+
+
+
+
CSV with headers
+
+
+image35 +
+
+
+

and CSV file structure

+
+
+
+image36 +
+
+
+
+
CSV with specific column mapper
+
+
+image37 +
+
+
+

and Mapper implementation

+
+
+
+image38 +
+
+
+
+
What is "Parallel test execution" ?
+
+

Parallel test execution means many "Test Classes" can run simultaneously.

+
+
+

"Test Class", as this is a Junit Test class, it can have one or more test cases - "Test case methods"

+
+
+
+image39 +
+
+
+
+
How many parallel test classes can run simultaneously?
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

JUnit5 supports parallelism natively. The feature is configured using a property file located at src\test\resources\junit-platform.properties. +As per default configuration, concurrent test execution is set to run test classes in parallel using the thread count equal to a number of your CPUs.

+
+
+
+image39a +
+
+
+

Visit JUnit5 site to learn more about parallel test execution.

+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

By default, number of parallel test classes is set to 8.

+
+
+

It can be updated as you please, on demand, by command line:

+
+
+
+
mvn test site -Dtest=TS_Tag1 -Dthread.count=16
+
+
+
+

-Dthread.count=16 - increase number of parallel Test Class execution to 16.

+
+
+
+
Overview
+
+

Cucumber / Selenium

+
+
+

Business and IT don’t always understand each other. Very often misunderstandings between business and IT result in the costly failure of IT projects. With this in mind, Cucumber was developed as a tool to support human collaboration between business and IT.

+
+
+

Cucumber uses executable specifications to encourage a close collaboration. This helps teams to keep the business goal in mind at all times. With Cucumber you can merge specification and test documentation into one cohesive whole, allowing your team to maintain one single source of truth. Because these executable specifications are automatically tested by Cucumber, your single source of truth is always up-to-date.

+
+
+
+image40 +
+
+
+

Cucumber supports testers when designing test cases. To automate these test cases, several languages can be used. Cucumber also works well with Browser Automation tools such as Selenium Webdriver.

+
+
+
+
== Selenium
+
+

Selenium automates browsers and is used for automating web applications for testing purposes. Selenium offers testers and developers full access to the properties of objects and the underlying tests, via a scripting environment and integrated debugging options.

+
+
+

Selenium consists of many parts. If you want to create robust, browser-based regression automation suites and tests, Selenium Webdriver is most appropriate. With Selenium Webdriver you can also scale and distribute scripts across many environments.

+
+
+
+
Strengths
+ +
+
+
== Supports BDD
+
+

Those familiar with Behavior Driven Development (BDD) recognize Cucumber as an excellent open source tool that supports this practice.

+
+
+
+
== All in one place
+
+

With Cucumber / Selenium you can automate at the UI level. Automation at the unit or API level can also be implemented using Cucumber. This means all tests, regardless of the level at which they are implemented, can be implemented in one tool.

+
+
+
+
== Maintainable test scripts
+
+

Many teams seem to prefer UI level automation, despite huge cost of maintaining UI level tests compared to the cost of maintaining API or unit tests. To lessen the maintenance of UI testing, when designing UI level functional tests, you can try describing the test and the automation at three levels: business rule, UI workflow, technical implementation.

+
+
+

When using Cucumber combined with Selenium, you can implement these three levels for better maintenance.

+
+
+
+
== Early start
+
+

Executable specifications can and should be written before the functionality is implemented. By starting early, teams get most return on investment from their test automation.

+
+
+
+
== Supported by a large community
+
+

Cucumber and Selenium are both open source tools with a large community, online resources and mailing lists.

+
+
+
+
How to run cucumber tests in Mr.Checker
+ +
+
+
Command line / Jenkins
+
+
    +
  • +

    Run cucumber tests and generate Allure report. Please use this for Jenkins execution. Report is saved under ./target/site.

    +
    +
    +
    mvn clean -P cucumber test site
    +
    +
    +
  • +
  • +

    Run and generate report

    +
    +
    +
    mvn clean -P cucumber test site allure:report
    +
    +
    +
  • +
  • +

    Run cucumber tests, generate Allure report and start standalone report server

    +
    +
    +
    mvn clean -P cucumber test site allure:serve
    +
    +
    +
  • +
+
+
+
+
Eclipse IDE
+
+
+image41 +
+
+
+
+
Tooling
+ +
+
+
== Cucumber
+
+

Cucumber supports over a dozen different software platforms. Every Cucumber implementation provides the same overall functionality, but they also have their own installation procedure and platform-specific functionality. See https://cucumber.io/docs for all Cucumber implementations and framework implementations.

+
+
+

Also, IDEs such as Intellij offer several plugins for Cucumber support.

+
+
+
+
== Selenium
+
+

Selenium has the support of some of the largest browser vendors who have taken (or are taking) steps to make Selenium a native part of their browser. It is also the core technology in countless other browser automation tools, APIs and frameworks.

+
+
+
+
Automation process
+ +
+
+
== Write a feature file
+
+

Test automation in Cucumber starts with writing a feature file. A feature normally consists of several (test)scenarios and each scenario consists of several steps.

+
+
+

Feature: Refund item

+
+
+

Scenario: Jeff returns a faulty microwave

+
+
+

Given Jeff has bought a microwave for $100

+
+
+

And he has a receipt

+
+
+

When he returns the microwave

+
+
+

Then Jeff should be refunded $100

+
+
+

Above example shows a feature “Refund item” with one scenario “Jeff returns a faulty microwave”. The scenario consists of four steps each starting with a key word (Given, And, When, Then).

+
+
+
+
== Implementing the steps
+
+

Next the steps are implemented. Assuming we use Java to implement the steps, the Java code will look something like this.

+
+
+
+
public class MyStepdefs \{
+
+	@Given("Jeff has bought a microwave for $(\d+)")
+
+	public void Jeff_has_bought_a_microwave_for(int amount) \{
+
+		// implementation can be plain java
+
+		// or selenium
+
+		driver.findElement(By.name("test")).sendKeys("This is an example\n");
+
+		driver.findElement(By.name("button")).click();// etc
+	}
+}
+
+
+
+

Cucumber uses an annotation (highlighted) to match the step from the feature file with the function implementing the step in the Java class. The name of the class and the function can be as the developer sees fit. Selenium code can be used within the function to automate interaction with the browser.

+
+
+
+
== Running scenarios
+
+

There are several ways to run scenarios with Cucumber, for example the JUnit runner, a command line runner and several third party runners.

+
+
+
+
== Reporting test results
+
+

Cucumber can report results in several different formats, using formatter plugins

+
+
+
+
Features
+ +
+
+
== Feature files using Gherkin
+
+

Cucumber executes your feature files. As shown in the example below, feature files in Gherkin are easy to read so they can be shared between IT and business. Data tables can be used to execute a scenario with different inputs.

+
+
+
+image42 +
+
+
+
+
== Organizing tests
+
+

Feature files are placed in a directory structure and together form a feature tree.

+
+
+

Tags can be used to group features based on all kinds of categories. Cucumber can include or exclude tests with certain tags when running the tests.

+
+
+
+
Reporting test results
+
+

Cucumber can report results in several formats, using formatter plugins. +Not supported option by Shared Services: The output from Cucumber can be used to present test results in Jenkins or Hudson depending of the preference of the project.

+
+
+
+image43 +
+
+
+
+
HOW IS Cucumber / Selenium USED AT Capgemini?
+ +
+
+
Tool deployment
+
+

Cucumber and Selenium are chosen as one of Capgemini’s test automation industrial tools. We support the Java implementation of Cucumber and Selenium Webdriver. We can help with creating Cucumber, Selenium projects in Eclipse and IntelliJ.

+
+
+
+
Application in ATaaS (Automated Testing as a Service)
+
+

In the context of industrialisation, Capgemini has developed a range of services to assist and support the projects in process and tools implementation.

+
+
+

In this context a team of experts assists projects using test automation.

+
+
+

The main services provided by the center of expertise are:

+
+
+
    +
  • +

    Advise on the feasibility of automation.

    +
  • +
  • +

    Support with installation.

    +
  • +
  • +

    Coaching teams in the use of BDD.

    +
  • +
+
+
+
+
Run on independent Operation Systems
+
+

As E2E Allure test framework is build on top of:

+
+
+
    +
  • +

    Java 1.8

    +
  • +
  • +

    Maven 3.3

    +
  • +
+
+
+

This guarantees portability to all operating systems.

+
+
+

E2E Allure test framework can run on OS:

+
+
+
    +
  • +

    Windows,

    +
  • +
  • +

    Linux and

    +
  • +
  • +

    Mac.

    +
  • +
+
+
+

Test creation and maintenance in E2E Allure test framework can be done with any type of IDE:

+
+
+
    +
  • +

    Eclipse,

    +
  • +
  • +

    IntelliJ,

    +
  • +
  • +

    WebStorm,

    +
  • +
  • +

    Visual Studio Code,

    +
  • +
  • +

    many more that support Java + Maven.

    +
  • +
+
+
+
+
System under test environments
+
+
+image44 +
+
+
+
    +
  • +

    Quality assurance or QA is a way of preventing mistakes or defects in manufactured products and avoiding problems when delivering solutions or services to customers; which ISO 9000 defines as "part of quality management focused on providing confidence that quality requirements will be fulfilled".

    +
  • +
  • +

    System integration testing or SIT is a high-level software testing process in which testers verify that all related systems maintain data integrity and can operate in coordination with other systems in the same environment. The testing process ensures that all sub-components are integrated successfully to provide expected results.

    +
  • +
  • +

    Development or Dev testing is performed by the software developer or engineer during the construction phase of the software development life-cycle. Rather than replace traditional QA focuses, it augments it. Development testing aims to eliminate construction errors before code is promoted to QA; this strategy is intended to increase the quality of the resulting software as well as the efficiency of the overall development and QA process.

    +
  • +
  • +

    Prod If the customer accepts the product, it is deployed to a production environment, making it available to all users of the system.

    +
  • +
+
+
+
+image45 +
+
+
+
+
How to use system environment
+
+

In Page classes, when you load / start web, it is uncommon to save fixed main url.

+
+
+

Value flexibility is a must, when your web application under test, have different main url, dependence on environmnent (DEV, QA, SIT, …​, PROD)

+
+
+

Instead of hard coded main url variable, you build your Page classe with dynamic variable.

+
+
+

Example of dynamic variable GetEnvironmentParam.WWW_FONT_URL

+
+
+
+image46 +
+
+
+
+
How to create / update system environment
+ +
+
+
External file with variable values
+
+

Dynamic variable values are stored under path mrchecker-app-under-test\src\resources\enviroments\environments.csv.

+
+
+

NOTE: As environments.csv is Comma-separated file, please be aware of any edition and then save it under Excel.

+
+
+
+image47 +
+
+
+
+
Encrypting sensitive data
+
+

Some types of data you might want to store as environment settings are sensitive in nature (e.g. passwords). You might not want to store them (at least not in their plaintext form) in your repository. To be able to encrypt sensitive data you need to do following:

+
+
+
    +
  1. +

    Create a secret (long, random chain of characters) and store it under mrchecker-app-under-test\src\resources\secretData.txt. Example: LhwbTm9V3FUbBO5Tt5PiTUEQrXGgWrDLCMthnzLKNy1zA5FVTFiTdHRQAyPRIGXmsAjPUPlJSoSLeSBM

    +
  2. +
  3. +

    Exclude the file from being checked into the git repository by adding it to git.ignore. You will need to pass the file over a different channel among your teammates.

    +
  4. +
  5. +

    Encrypt the values before putting them into the environments.csv file by creating following script (put the script where your jasypt library resides, e.g. C:\MrChecker_Test_Framework\m2\repository\org\jasypt\jasypt\1.9.2):

    +
    +
    +
    @ECHO OFF
    +
    +set SCRIPT_NAME=encrypt.bat
    +set EXECUTABLE_CLASS=org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI
    +set EXEC_CLASSPATH=jasypt-1.9.2.jar
    +if "%JASYPT_CLASSPATH%" ==  "" goto computeclasspath
    +set EXEC_CLASSPATH=%EXEC_CLASSPATH%;%JASYPT_CLASSPATH%
    +
    +:computeclasspath
    +IF "%OS%" ==  "Windows_NT" setlocal ENABLEDELAYEDEXPANSION
    +FOR %%c in (%~dp0..\lib\*.jar) DO set EXEC_CLASSPATH=!EXEC_CLASSPATH!;%%c
    +IF "%OS%" ==  "Windows_NT" setlocal DISABLEDELAYEDEXPANSION
    +
    +set JAVA_EXECUTABLE=java
    +if "%JAVA_HOME%" ==  "" goto execute
    +set JAVA_EXECUTABLE="%JAVA_HOME%\bin\java"
    +
    +:execute
    +%JAVA_EXECUTABLE% -classpath %EXEC_CLASSPATH% %EXECUTABLE_CLASS% %SCRIPT_NAME% %*
    +
    +
    +
  6. +
  7. +

    Encrypt the values by calling

    +
    +
    +
    .\encrypt.bat input=someinput password=secret
    +
    +----ENVIRONMENT-----------------
    +
    +Runtime: Oracle Corporation Java HotSpot(TM) 64-Bit Server VM 25.111-b14
    +
    +
    +
    +----ARGUMENTS-------------------
    +
    +input: someinput
    +password: secret
    +
    +
    +
    +----OUTPUT----------------------
    +
    +JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD
    +
    +
    +
  8. +
  9. +

    Mark the value as encrypted by adding a prefix 'ENC(' and suffix ')' like: ENC(JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD)

    +
    +
    +image48 +
    +
    +
  10. +
+
+
+
+
Bridge between external file nad Page class
+
+

To map values from external file with Page class you ought to use class GetEnvironmentParam.

+
+
+

Therefore when you add new variable (row) in environments.csv you might need to add this variable to GetEnvironmentParam.

+
+
+
+image49 +
+
+
+
+
Run test case with system environment
+
+

To run test case with system environment, please use:

+
+
+
    +
  • +

    -Denv=<NameOfEnvironment>

    +
  • +
  • +

    <NameOfEnvironment> is taken as column name from file mrchecker-app-under-test\src\test\resources\enviroments\environments.csv

    +
  • +
+
+
+
+
Command Line
+
+
+
mvn test site -Dtest=RegistryPageTest -Denv=DEV
+
+
+
+
+
Eclipse
+
+
+image50 +
+
+
+
+image51 +
+
+ +
+
+
System under test environments
+
+
+image080 +
+
+
+
    +
  • +

    Quality assurance or QA is a way of preventing mistakes or defects in the manufactured products and avoiding problems when delivering solutions or services to customers which ISO 9000 defines as "part of quality management focused on providing confidence that quality requirements will be fulfilled".

    +
  • +
  • +

    System integration testing or SIT is a high-level software testing process in which testers verify that all related systems maintain data integrity and can operate in coordination with other systems in the same environment. The testing process ensures that all sub-components are integrated successfully to provide expected results.

    +
  • +
  • +

    Development or Dev testing is performed by the software developer or engineer during the construction phase of the software development life-cycle. Rather than replace traditional QA focuses, it augments it. Development testing aims to eliminate construction errors before code is promoted to QA; this strategy is intended to increase the quality of the resulting software as well as the efficiency of the overall development and QA process.

    +
  • +
  • +

    Prod If the customer accepts the product, it is deployed to a production environment, making it available to all users of the system.

    +
  • +
+
+
+
+image051 +
+
+
+
+
How to use system environment
+
+

In Page classes, when you load / start web, it is uncommon to save fixed main url.

+
+
+

Value flexibility is a must, when your web application under test has different main url, depending on the environmnent (DEV, QA, SIT, …​, PROD)

+
+
+

Instead of hard coded main url variable, you build your Page classes with dynamic variable.

+
+
+

An example of dynamic variable GetEnvironmentParam.WWW_FONT_URL

+
+
+
+image081 +
+
+
+
+
How to create / update system environment
+ +
+
+
External file with variable values
+
+

Dynamic variable values are stored under mrchecker-app-under-test\src\resources\enviroments\environments.csv.

+
+
+

NOTE: As environments.csv is a comma-separated file, please be careful while editing and then save it under Excel.

+
+
+
+image082 +
+
+
+
+
Encrypting sensitive data
+
+

Some types of data you might want to store as environment settings are sensitive in nature (e.g. passwords). You might not want to store them (at least not in their plaintext form) in your repository. To be able to encrypt sensitive data you need to do following:

+
+
+
    +
  1. +

    Create a secret (long, random chain of characters) and store it under mrchecker-app-under-test\src\resources\secretData.txt. Example: LhwbTm9V3FUbBO5Tt5PiTUEQrXGgWrDLCMthnzLKNy1zA5FVTFiTdHRQAyPRIGXmsAjPUPlJSoSLeSBM

    +
  2. +
  3. +

    Exclude the file from being checked into the git repository by adding it to git.ignore. You will need to pass the file over a different channel among your teammates.

    +
  4. +
  5. +

    Encrypt the values before putting them into the environments.csv file by creating following script (put the script where your jasypt library resides, e.g. C:\MrChecker_Test_Framework\m2\repository\org\jasypt\jasypt\1.9.2):

    +
  6. +
+
+
+
+
@ECHO OFF
+
+set SCRIPT_NAME=encrypt.bat
+set EXECUTABLE_CLASS=org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI
+set EXEC_CLASSPATH=jasypt-1.9.2.jar
+if "%JASYPT_CLASSPATH%" ==  "" goto computeclasspath
+set EXEC_CLASSPATH=%EXEC_CLASSPATH%;%JASYPT_CLASSPATH%
+
+:computeclasspath
+IF "%OS%" ==  "Windows_NT" setlocal ENABLEDELAYEDEXPANSION
+FOR %%c in (%~dp0..\lib\*.jar) DO set EXEC_CLASSPATH=!EXEC_CLASSPATH!;%%c
+IF "%OS%" ==  "Windows_NT" setlocal DISABLEDELAYEDEXPANSION
+
+set JAVA_EXECUTABLE=java
+if "%JAVA_HOME%" ==  "" goto execute
+set JAVA_EXECUTABLE="%JAVA_HOME%\bin\java"
+
+:execute
+%JAVA_EXECUTABLE% -classpath %EXEC_CLASSPATH% %EXECUTABLE_CLASS% %SCRIPT_NAME% %*
+
+
+
+
    +
  1. +

    Encrypt the values by calling

    +
  2. +
+
+
+
+
.\encrypt.bat input=someinput password=secret
+
+----ENVIRONMENT-----------------
+
+Runtime: Oracle Corporation Java HotSpot(TM) 64-Bit Server VM 25.111-b14
+
+
+
+----ARGUMENTS-------------------
+
+input: someinput
+password: secret
+
+
+
+----OUTPUT----------------------
+
+JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD
+
+
+
+
    +
  1. +

    Mark the value as encrypted by adding a prefix 'ENC(' and suffix ')' like: ENC(JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD)

    +
  2. +
+
+
+
+image083 +
+
+
+
+
Bridge between external file nad Page class
+
+

To map values from external file with Page class you ought to use class GetEnvironmentParam

+
+
+

Therefore when you add new variable (row) in environments.csv you might need to add this variable to GetEnvironmentParam.

+
+
+
+image084 +
+
+
+
+
Run test case with system environment
+
+

To run test case with system environment, please use: +* -Denv=\<NameOfEnvironment\> +* \<NameOfEnvironment\> is taken as column name from file mrchecker-app-under-test\src\test\resources\enviroments\environments.csv

+
+
+

Since mrchecker-core-module version 5.6.2.1 +== Command Line

+
+
+
+
mvn test site -Dgroups=RegistryPageTestTag -Denv=DEV
+
+
+
+
+
Eclipse
+
+
+image085 +
+
+
+
+image086 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1 +== Command Line

+
+
+
+
mvn test site -Dtest=RegistryPageTest -Denv=DEV
+
+
+
+
+
Eclipse
+
+
+image085 +
+
+
+
+image086 +
+
+
+
+
+
+

Selenium Module

+
+ +
+
Selenium Test Module
+ +
+
+
What is MrChecker E2E Selenium Test Module
+
+
+image2 +
+
+
+
+
Selenium Structure
+ +
+
+
Framework Features
+
+ +
+
+
+
How to start?
+ +
+
+
Selenium Best Practices
+ +
+
+
Selenium UFT Comparison
+ +
+
+
+
+

Selenium Structure

+
+
+
What is Selenium
+
+

Selenium is a framework for testing browser applications. The test automation supports:

+
+
+
    +
  • +

    Frequent regression testing

    +
  • +
  • +

    Repeating test case executions

    +
  • +
  • +

    Documentation of test cases

    +
  • +
  • +

    Finding defects

    +
  • +
  • +

    Multiple Browsers

    +
  • +
+
+
+

The Selenium testing framework consists of multiple tools:

+
+
+
    +
  • +

    Selenium IDE

    +
    +

    The Selenium Integrated Development Environment is a prototyping tool for building test scripts. It is a Firefox Plugin and provides an easy-to-use interface for developing test cases. Additionally, Selenium IDE contains a recording feature, that allows the user to record user inputs that can be automatically re-executed in future.

    +
    +
  • +
  • +

    Selenium 1

    +
    +

    Selenium 1, also known as Selenium RC, commands a Selenium Server to launch and kill browsers, interpreting the Selenese commands passed from the test program. The Server acts as an HTTP proxy. This tool is deprecated.

    +
    +
  • +
  • +

    Selenium 2

    +
    +

    Selenium 2, also known as Selenium WebDriver, is designed to supply a well-designed, object-oriented API that provides improved support for modern advanced web-app testing problems.

    +
    +
  • +
  • +

    Selenium 3.0

    +
    +

    The major change in Selenium 3.0 is removing the original Selenium Core implementation and replacing it with one backed by WebDriver. There is now a W3C specification for browser automation, based on the Open Source WebDriver.

    +
    +
  • +
  • +

    Selenium Grid

    +
    +

    Selenium Grid allows the scaling of Selenium RC test cases, that must be run in multiple and potentially variable environments. The tests can be run in parallel on different remote machines.

    +
    +
  • +
+
+
+
+
Selenium on the Production Line
+
+

More information on Selenium on the Production Line can be found here.

+
+
+

tl;dr

+
+
+

The Production Line has containers running Chrome and Firefox Selenium Nodes. The communication with these nodes is accomplished using Selenium Grid.

+
+
+

Having issues using Selenium on the Production Line? Check the Production Line issue list, maybe it’s a known issue that can be worked around.

+
+
+
+
What is WebDriver
+
+

On the one hand, it is a very convenient API for a programmer that allows for interaction with the browser, on the other hand it is a driver concept that enables this direct communication.

+
+
+
+image53 +
+
+
+
+
== How does it work?
+
+
+image54 +
+
+
+

A tester, through their test script, can command WebDriver to perform certain actions on the WAUT on a certain browser. The way the user can command WebDriver to perform something is by using the client libraries or language bindings provided by WebDriver.

+
+
+

By using the language-binding client libraries, a tester can invoke browser-specific implementations of WebDriver, such as Firefox Driver, IE Driver, Opera Driver, and so on, to interact with the WAUT of the respective browser. These browser-specific implementations of WebDriver will work with the browser natively and execute commands from outside the browser to simulate exactly what the application user does.

+
+
+

After execution, WebDriver will send the test result back to the test script for developer’s analysis.

+
+
+
+
What is Page Object Model?
+
+
+image55 +
+
+
+

Creating Selenium test cases can result in an unmaintainable project. One of the reasons is that too much duplicated code is used. Duplicated code could result from duplicated functionality leading to duplicated usage of locators. The main disadvantage of duplicated code is that the project is less maintainable. If a locator changes, you have to walk through the whole test code to adjust locators where necessary. By using the page object model we can make non-brittle test code and reduce or eliminate duplicate test code. In addition, it improves the readability and allows us to create interactive documentation. Last but not least, we can create tests with less keystroke. An implementation of the page object model can be achieved by separating the abstraction of the test object and the test scripts.

+
+
+
+image56 +
+
+
+
+
Basic Web elements
+
+

This page will provide an overview of basic web elements.

+
+
+
+image57 +
+
+
+
+image58 +
+
+
+

|== = +|Name +|Method to use element

+
+
+

|Form: Input Text +|elementInputText()

+
+
+

|Form: Label +|elementLabel()

+
+
+

|Form: Submit Button +|elementButton()

+
+
+

|Page: Button +|elementButton()

+
+
+

|Checkbox +|elementCheckbox()

+
+
+

|Radio +|elementRadioButton()

+
+
+

|Elements (Tabs, Cards, Account, etc.) +|elementTab()

+
+
+

|Dropdown List +|elementDropdownList()

+
+
+

|Link +|-

+
+
+

|Combobox +|elementList() +|== =

+
+
+

Comparision how picking value from checkbox can be done:

+
+
+
    +
  • +

    by classic Selenium atomic actions

    +
  • +
  • +

    by our enhanced Selenium wrapper

    +
  • +
+
+
+

Classic Selenium atomic actions

+
+
+
+
List<WebElement> checkboxesList = getDriver()
+                .findElements(selectorHobby);
+WebElement currentElement;
+for (int i = 0; i < checkboxesList.size(); i++) {
+    currentElement = checkboxesList.get(i);
+    if (currentElement.getAttribute("value")
+                    .equals(hobby.toString()) && currentElement.isSelected() != true)
+                        {
+        currentElement.click();
+            }
+}
+
+
+
+

Enhanced Selenium in E2E test framework

+
+
+
+
getDriver().elementCheckbox(selectorHobby)
+				.setCheckBoxByValue(hobby.toString());
+
+
+
+
+
+
+

Framework Features

+
+
+
Page Class
+
+

Page Object Models allow for the representation of a webpage as a Java Class. The class contains all required web elements like buttons, textfields, labels, etc. When initializing a new project, create a new package to store the Page Object Models in.

+
+
+
+
Initialization
+
+

Source folder: allure-app-under-test/src/main/java

+
+
+

Name: com.example.selenium.pages.YOUR_PROJECT

+
+
+

Classes being created inside of this new package have to extend the BasePage class. As a result, a few abstract methods from BasePage have to be implemented.

+
+
+
+
public class DemoPage extends BasePage {
+
+	@Override
+	public boolean isLoaded() {
+
+	}
+
+	@Override
+	public void load() {
+
+	}
+
+	@Override
+	public String pageTitle() {
+
+	}
+}
+
+
+
+

The example above demonstrates a minimum valid Page Object class with all required methods included.

+
+
+
+
BasePage method: isLoaded
+
+

The inherited method isLoaded() can be used to check if the current Page Object Model has been loaded correctly. There are multiple ways to verify a correctly loaded page. One example would be to compare the actual page title with the expected page title.

+
+
+
+
public boolean isLoaded() {
+	if(getDriver().getTitle().equals("EXPECTED_TITLE")) {
+		return true;
+	}
+	return false;
+}
+
+
+
+
+
BasePage method: load
+
+

The method load() can be used to tell the webdriver to load a specific page.

+
+
+
+
public void load() {
+	getDriver().get("http://SOME_PAGE");
+}
+
+
+
+
+
BasePage method: pageTitle
+
+

The pageTitle() method returns a String containing the page title.

+
+
+
+
Creating a selector variable
+
+

To initialize web elements, a large variety of selectors can be used.

+
+
+

We recommend creating a private and constant field for every web element you’d like to represent in Java. Use the guide above to find the preferred selector and place it in the code below at "WEB_ELEMENT_SELECTOR".

+
+
+
+
private static final By someWebElementSelector = By.CSS("WEB_ELEMENT_SELECTOR");
+
+
+
+

As soon as you create the selector above, you can make use of it to initialize a WebElement object.

+
+
+
+
WebElement someWebElement = getDriver().findDynamicElement(someWebElementSelector);
+
+
+
+

Note: The examples displayed in the cssSelector.docx file use the Selenium method driver.findElement() to find elements. However, using this framework we recommend findDynamicElement() or findQuietlyElement().findDynamicElement() allows waiting for dynamic elements, for example buttons that pop up.

+
+
+
+
Creating a page method
+
+

To interact with the page object, we recommend creating methods for each action.

+
+
+
+
public void enterGoogleSearchInput(String query) {
+	...
+}
+
+
+
+

Creating a method like the one above allows the test case to run something like googleSearchPage.enterGoogleSearchInput("Hello") to interact with the page object.

+
+
+
+
Naming Conventions
+
+

For code uniformity and readability, we provide a few method naming conventions.

+
+
+

|== =

+
+
+

|Element +|Action +|Name (example)

+
+
+

|Form: Input text +|enter +|enterUsernameInput()

+
+
+

| +|is (label) +|isUsernameInputPresent()

+
+
+

| +|is (value) +|isUsernameEmpty()

+
+
+

| +|get +|getUsernameValue()

+
+
+

|Form: Label +|get +|getCashValue()

+
+
+

| +|is (value) +|isCashValueEmpty()

+
+
+

| +|is (label) +|isCashLabelPresent()

+
+
+

|Form: Submit Button +|submit +|submitLoginForm()

+
+
+

| +|is +|isLoginFormPresent()

+
+
+

|Page: Button +|click +|clickInfoButton()

+
+
+

| +|is +|isInfoButtonpresent()

+
+
+

|Checkbox +|set +|setRememberMeCheckbox()

+
+
+

| +|unset +|unsetRememberMeCheckbox()

+
+
+

| +|is (present) +|isRememberMeCheckboxPresent()

+
+
+

| +|is (value) +|isRememberMeCheckboxSet()

+
+
+

|Radio +|set +|setMaleRadioValue("Woman")

+
+
+

| +|is (present) +|isMaleRadioPresent()

+
+
+

| +|is (visible) +|isMaleRadioVisible()

+
+
+

| +|get +|getSelectedMaleValue()

+
+
+

|Elements (Tabs, Cards, Account, etc.) +|click +|clickPositionTab() / clickMyBilanceCard()

+
+
+

| +|is +|isMyBilanceCardPresent()

+
+
+

|Dropdown List +|select +|selectAccountTypeValue(typeName)

+
+
+

| +|unselect +|unselectAccountTypeValue(typeName)

+
+
+

| +|multiple select +|selectAccountTypesValues(List typeNames)

+
+
+

| +|is (list) +|isAccountTypeDropdownListPresent()

+
+
+

| +|is (element present) +|isAccountTypeElementPresent(typeName)

+
+
+

| +|is (element selected) +|isAccountTypeSelected(typeName)

+
+
+

|Link +|click +|clickMoreLink()

+
+
+

| +|is +|isMoreLinkPresent()

+
+
+

|Combobox +|select +|selectSortCombobox()

+
+
+

| +|is (present) +|isSortComboboxPresent(name)

+
+
+

| +|is (contain) +|selectSortComboboxContain(name)

+
+
+

|Element Attribute +|get +|getPositionTabCss()

+
+
+

| +|get +|getMoreLinkHref() / getRememberMeCheckboxName()

+
+
+

|== =

+
+
+

A css selector is used to select elements from an HTML page.

+
+
+

Selection by element tag, class or id are the most common selectors.

+
+
+
+
<p class='myText' id='123'>
+
+
+
+

This text element (p) can be found by using any one of the following selectors:

+
+
+
+
The HTML element: "p". Note: in practical use this will be too generic, if a preceding text section is added, the selected element will change.
+The class attribute preceded by ".": ".myText"
+The id attribute preceded by "#": "#123"
+
+
+
+
+
Using other attributes
+
+

When a class or an id attribute is not sufficient to identify an element, other attributes can be used as well, by using "[attribute=value]": For example:

+
+
+
+
<a href='https://ns.nl/example.html'>
+
+
+
+

This can be selected by using the entire value: "a[href='https://ns.nl/example.html'\]". For selecting links starting with, containing, ending with see the list below.

+
+
+
+
Using sub-elements
+
+

The css selectors can be stacked, by appending them:

+
+
+
+
<div id='1'><a href='ns.nl'></div>
+<div id='2'><a href='nsinternational.nl'></div>
+
+
+
+

In the example above, the link element to nsinternational can be obtained with: "#2 a".

+
+
+
+
When possible avoid
+
+
    +
  • +

    Using paths of commonly used HTML elements within the containers (HTML: div). This will cause failures when a container is added, a common occurrence during development, e.g. "div div p". Use class or id instead, if those are not available, request them to be added in the production code.

    +
  • +
  • +

    Magic order numbers. It is possible to get the second text element in its parent container by using the selector "p:nth-child(2)". If the items are representing different items, ask the developer to add specific attributes. It is also possible to request all items, with a selector similar to ".myList li", and iterate through them later.

    +
  • +
+
+
+
+
List
+
+

A good list with CSS Selectors can be found at W3Schools:
+https://www.w3schools.com/cssref/css_selectors.asp

+
+
+
+
Selenium UFT Comparison
+
+

|== =

+
+
+

|Subject +|HP UFT +|HP LeanFT +|Selenium +|Selenium IDE

+
+
+

|Language +|VBScript +|Same as Selenium +|Supports several languages. +Java +|Javascript

+
+
+

|Learning curve +|Based on VBScript which is relatively easy to learn +|Less intuitive, more coding knowledge necessary +|Less intuitive, more coding skills necessary +|Record/playback possible. Generated code difficult to maintain

+
+
+

|Project type +|Traditional +|Agile +|Agile +|Agile

+
+
+

|User oriented +|More Tester +|More Developer +|More Developer +|More Tester

+
+
+

|Object recognition +|Test object identification and storage in object repository +|Same as UFT +|With Firebug +|Same as SE

+
+
+

|Customizations +|Only the available standard. No custimization +|Same as UFT +|Lots of customizations possible +|Fewer then SE

+
+
+

|Framework +|Needed. +Exists in ATaaS +| +|Needed. +Integration with Fitnesse, Cucumber, Gauche +|No Framework. Limited capabilities of the tool.

+
+
+

|Operating System support +|Runs on Windows +|Runs on Windows +|Multiple OS support. With Grid: testing on multiple devices at same time +|Plugin for Firefox

+
+
+

|Application coverage +|Many +|Many +|Web only +|Web only

+
+
+

|Multiple browsers +|In UFT 12.5 available +|In 12.5 available +|Multiple tests in multiple browser windows at once and faster support for new browser versions +|Multiple tests in multiple browser windows at once and faster support for new browser versions

+
+
+

|System Load +|High system load (RAM & CPU usage) +|Lower load than HP UFT? +|Lower load than HP UFT +|Lower load than HP UFT

+
+
+

|ALM integration +|With HP ALM – full integration +| +|Jira, Jenkins +Not with ALM tool +|Same as SE

+
+
+

|Integration with other tools +|A lot can be built, but many are already covered. +|More than UFT. +|Freeware and can be integrated with different open source tools +|Freeware and can be integrated with different open source tools

+
+
+

|Addins +|Add-ins necessary to access all capabilities of the tool – license related +|Same as UFT +|See integration with other tools +|See integration with other tools

+
+
+

|Reporting +|Complete, link to ALM +|Same as UFT +|No native mechanism for generating reports, but multiple plugins available for reporting +|No native mechanism for generating reports, but multiple plugins available for reporting

+
+
+

|Support +|HP full support +|Same as UFT +|Limited support as it is open source +|Limited support as it is open source

+
+
+

|License costs +|About 17K – Capgemini price 5K. +Included in the S2 service charge +|Same price as HP UFT +|Free +|Free +limited functionality (no iterations / conditional statements)

+
+
+

|iVAL Service +|ATaaS +|Not in a S2 service +|Not in a S2 service +|Not in a S2 service

+
+
+

|== =

+
+
+

Bold for key differentiators.

+
+
+

Projects also choose an available resource and the knowledge of that resource.

+
+
+

Both: Framework determines the quality of automation. Needs to be set up by someone with experience with the tool

+
+
+
+
Run on different browsers
+
+
+image59 +
+
+
+

To execute each test with a chosen installed browser, specific arguments are required in Run configuration.

+
+
+
+image60 +
+
+
+
+image61 +
+
+
+

It is necessary to enter -Dbrowser= with browser parameter name as an argument (in 'Arguments' tab):

+
+
+

firefox +ie +phantomjs +chrome +chromeheadless +For example: -Dbrowser=ie

+
+
+
+
_-ea_ should be entered as an argument to restore default settings.
+
+
+
+
+
Browser options
+
+

To run a browser with specific options during runtime, please use

+
+
+

-DbrowserOptions="< options >"

+
+
+
+
> mvn test -DbrowserOptions="param1"
+> mvn test -DbrowserOptions="param1=value1"
+
+
+
+

examples:

+
+
+
    +
  • +

    One parameter -DbrowserOptions="headless"

    +
  • +
  • +

    One parameter -DbrowserOptions="--incognito"

    +
  • +
  • +

    Many parameters -DbrowserOptions="headless;param1=value1;testEquals=FirstEquals=SecondEquals;--testMe"

    +
  • +
+
+
+

List of options/capabilites supported by:

+
+
+ +
+
+
+
Run with full range of resolution
+
+
+image62 +
+
+
+

In order to execute tests in different browser resolutions, it is required to provide these resolutions as a test parameter.

+
+
+

Test example with resolutions included may be found in ResolutionTest test class

+
+
+
+image63 +
+
+
+

Example of resolution notation is available in ResolutionEnum class

+
+
+
+image64 +
+
+
+

Test with given resolution parameters will be launched as many times as the number of resolutions provided.

+
+
+
+
Selenium Best Practices
+
+

The following table displays a few best practices that should be taken into consideration when developing Selenium test cases.

+
+
+

|== =

+
+
+

|Best Practices +|Description

+
+
+

|"Keep it Simple" +|Do not force use every Selenium feature available - Plan before creating the actual test cases

+
+
+

|Using Cucumber +|Cucumber can be used to create initial testcases for further decision making

+
+
+

|Supporting multiple browsers +|Test on multiple browsers (in parallel, if applicable) if the application is expected to support multiple environments

+
+
+

|Test reporting +|Make use of test reporting modules like Junit which is included in the framework

+
+
+

|Maintainability +|Always be aware of the maintainability of tests - You should always be able to adapt to changes

+
+
+

|Testing types +|Which tests should be created? Rule of thumb: 70% Unit test cases, 20% Integration test cases and 10% UI Test cases

+
+
+

|Test data +|Consider before actually developing tests and choosing tools: Where to get test data from, how to reset test data

+
+
+

|== =

+
+
+
+
+
+

Web API Module

+
+ + +
+
Is it doable to keep pace in QA with today’s software agile approach?
+
+

DevOps + Microservices + Shift left + Time to Market == ? Service virtualization ?

+
+
+
+image72 +
+
+
+

Test pyramid

+
+
+
+image73 +
+
+
+
+
What is service virtualization
+
+

Service Virtualization has become recognized as one of the best ways to speed up testing and accelerate your time to market.

+
+
+

Service virtualization lets you automatically execute tests even when the application under test’s dependent system components (APIs, third-party applications, etc.) cannot be properly accessed or configured for testing. By simulating these dependencies, you can ensure that your tests will encounter the appropriate dependency behaviour and data each and every time that they execute.

+
+
+

Service virtualization is the simulation of interfaces – not the virtualization of systems.

+
+
+

According to Wikipedia’s service virtualization entry: Service virtualization emulates the behaviour of software components to remove dependency constraints on development and testing teams. Such constraints occur in complex, interdependent environments when a component connected to the application under test is:

+
+
+
    +
  • +

    Not yet completed

    +
  • +
  • +

    Still evolving

    +
  • +
  • +

    Controlled by a third-party or partner

    +
  • +
  • +

    Available for testing only in a limited capacity or at inconvenient times

    +
  • +
  • +

    Difficult to provision or configure in a test environment

    +
  • +
  • +

    Needed for simultaneous access by different teams with varied test data setup and other requirements

    +
  • +
  • +

    Restricted or costly to use for load and performance testing

    +
  • +
+
+
+

For instance, instead of virtualizing an entire database (and performing all associated test data management as well as setting up the database for every test session), you monitor how the application interacts with the database, then you emulate the related database behaviour (the SQL queries that are passed to the database, the corresponding result sets that are returned, and so forth).

+
+
+
+
Mocks, stubs and virtual services
+
+

The most commonly discussed categories of test doubles are mocks, stubs and virtual services.

+
+
+

Stub: a minimal implementation of an interface that normally returns hardcoded data that is tightly coupled to the test suite. It is most useful when the suite of tests is simple and keeping the hardcoded data in the stub is not an issue. Some stubs are handwritten; some can be generated by tools. A stub is normally written by a developer for personal use. It can be shared with testers, but wider sharing is typically limited by interoperability issues related to software platform and deployment infrastructure dependencies that were hardcoded. A common practice is when a stub works in-process directly with classes, methods, and functions for the unit, module, and acceptance testing. Some developers will say that a stub can also be primed, but you cannot verify an invocation on a stub. Stubs can also be communicating "over the wire", for example, HTTP, but some would argue that they should be called virtual services in that case.

+
+
+

Mock: a programmable interface observer, that verifies outputs against expectations defined by the test. It is frequently created using a third party library, for example in Java that is Mockito, JMock or WireMock. It is most useful when you have a large suite of tests and a stub will not be sufficient because each test needs a different data set up and maintaining them in a stub would be costly. The mock lets us keep the data set-up in the test. A mock is normally written by a developer for personal use but it can be shared with testers. However, wider sharing is typically limited by interoperability issues related to software platform and deployment infrastructure dependencies that were hardcoded. They are most often work-in-progress directly with classes, methods, and functions for a unit, module, and acceptance testing. Mock provides responses based on a given request satisfying predefined criteria (also called request or parameter matching). A mock also focuses on interactions rather than state so mocks are usually stateful. For example, you can verify how many times a given method was called or the order of calls made to a given object.

+
+
+

Virtual service: a test double often provided as a Software-as-a-Service (SaaS), is always called remotely, and is never working in-process directly with methods or functions. A virtual service is often created by recording traffic using one of the service virtualization platforms instead of building the interaction pattern from scratch based on interface or API documentation. A virtual service can be used to establish a common ground for teams to communicate and facilitate artefact sharing with other development teams as well as testing teams. A virtual service is called remotely (over HTTP, TCP, etc.) normally supports multiple protocols (e.g. HTTP, MQ, TCP, etc.), while a stub or mock normally supports only one. Sometimes virtual services will require users to authorize, especially when deployed in environments with enterprise-wide visibility. Service virtualization tools used to create virtual services will most often have user interfaces that allow less tech-savvy software testers to hit the ground running, before diving into the details of how specific protocols work. They are sometimes backed by a database. They can also simulate non-functional characteristics of systems such as response times or slow connections. You can sometimes find virtual services that provide a set of stubbed responses for given request criteria and pass every other request to a live backend system (partial stubbing). Similar to mocks, virtual services can have quite complex request matchers, that allow having one response returned for many different types of requests. Sometimes, virtual services simulate system behaviours by constructing parts of the response based on request attributes and data.

+
+
+

It is often difficult to say definitely which of the following categories a test double fits into. They should be treated as a spectrum rather than strict definitions.

+
+
+

Unresolved include directive in modules/ROOT/pages/mrchecker.wiki/master-mrchecker.adoc - include::Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-How-plug-in-service-virtualization-into-Application-Under-Test.adoc[]

+
+
+
+
How to make a virtual asset
+
+

This can be done in four ways:

+
+
+
    +
  • +

    Record all traffic (Mappings and Responses) that comes through proxy - by UI

    +
  • +
  • +

    Record all traffic (Mappings and Responses) that comes through proxy - by Code

    +
  • +
  • +

    Create Mappings and Responses manually by text files

    +
  • +
  • +

    Create Mappings and Responses manually by code

    +
  • +
+
+
+
+
Record all traffic (Mappings and Responses) that comes through proxy - UI
+
+

Full article here Wiremock record-playback.

+
+
+

First, start an instance of WireMock running standalone. Once that’s running, visit the recorder UI page at http://localhost:8080/__admin/recorder (assuming you started WireMock on the default port of 8080).

+
+
+
+image77 +
+
+
+

Enter the URL you wish to record from in the target URL field and click the Record button. You can use http://example.mocklab.io to try it out.

+
+
+

Now you need to make a request through WireMock to the target API so that it can be recorded. If you’re using the example URL, you can generate a request using curl:

+
+
+
+
$ curl http://localhost:8080/recordables/123
+
+
+
+

Now click stop. You should see a message indicating that one stub was captured.

+
+
+

You should also see that a file has been created called something like recordables_123-40a93c4a-d378-4e07-8321-6158d5dbcb29.json under the mappings directory created when WireMock started up, and that a new mapping has appeared at http://localhost:8080/__admin/mappings.

+
+
+

Requesting the same URL again (possibly disabling your wifi first if you want a firm proof) will now serve the recorded result:

+
+
+
+
$ curl http://localhost:8080/recordables/123
+
+{
+"message": "Congratulations on your first recording!"
+}
+
+
+
+
+
Record all traffic (Mappings and Responses) that comes through proxy - by Code
+
+

An example of how such a record can be achieved

+
+
+
+
@Test
+public void startRecording() {
+
+    SnapshotRecordResult recordedMappings;
+
+    DriverManager.getDriverVirtualService()
+            .start();
+    DriverManager.getDriverVirtualService()
+            .startRecording("http://example.mocklab.io");
+    recordedMappings = DriverManager.getDriverVirtualService()
+            .stopRecording();
+
+    BFLogger.logDebug("Recorded messages: " + recordedMappings.toString());
+
+}
+
+
+
+
+
Create Mappings and Responses manually by text files
+
+

EMPTY

+
+
+
+
Create Mappings and Responses manually by code
+
+

Link to full file structure: REST_FarenheitToCelsiusMethod_Test.java

+
+
+
+
Start up Virtual Server
+
+
+
public void startVirtualServer() {
+
+    // Start Virtual Server
+    WireMockServer driverVirtualService = DriverManager.getDriverVirtualService();
+
+    // Get Virtual Server running http and https ports
+    int httpPort = driverVirtualService.port();
+    int httpsPort = driverVirtualService.httpsPort();
+
+    // Print is Virtual server running
+    BFLogger.logDebug("Is Virtual server running: " + driverVirtualService.isRunning());
+
+    String baseURI = "http://localhost";
+    endpointBaseUri = baseURI + ":" + httpPort;
+}
+
+
+
+
+
Plug in a virtual asset
+
+

REST_FarenheitToCelsiusMethod_Test.java

+
+
+
+
public void activateVirtualAsset() {
+    /*
+    * ----------
+    * Mock response. Map request with virtual asset from file
+    * -----------
+    */
+    BFLogger.logInfo("#1 Create Stub content message");
+    BFLogger.logInfo("#2 Add resource to virtual server");
+    String restResourceUrl = "/some/thing";
+    String restResponseBody = "{ \"FahrenheitToCelsiusResponse\":{\"FahrenheitToCelsiusResult\":37.7777777777778}}";
+
+    new StubREST_Builder //For active virtual server ...
+            .StubBuilder(restResourceUrl) //Activate mapping, for this Url AND
+            .setResponse(restResponseBody) //Send this response  AND
+            .setStatusCode(200) // With status code 200 FINALLY
+            .build(); //Set and save mapping.
+
+}
+
+
+
+

Link to full file structure: StubREST_Builder.java

+
+
+

Source link to How to create Stub.

+
+
+

StubREST_Builder.java

+
+
+
+
public class StubREST_Builder {
+
+    // required parameters
+    private String endpointURI;
+
+    // optional parameters
+    private int statusCode;
+
+    public String getEndpointURI() {
+        return endpointURI;
+    }
+
+    public int getStatusCode() {
+        return statusCode;
+    }
+
+    private StubREST_Builder(StubBuilder builder) {
+        this.endpointURI = builder.endpointURI;
+        this.statusCode = builder.statusCode;
+    }
+
+    // Builder Class
+    public static class StubBuilder {
+
+        // required parameters
+        private String endpointURI;
+
+        // optional parameters
+        private int     statusCode  = 200;
+        private String  response    = "{ \"message\": \"Hello\" }";
+
+        public StubBuilder(String endpointURI) {
+            this.endpointURI = endpointURI;
+        }
+
+        public StubBuilder setStatusCode(int statusCode) {
+            this.statusCode = statusCode;
+            return this;
+        }
+
+        public StubBuilder setResponse(String response) {
+            this.response = response;
+            return this;
+        }
+
+        public StubREST_Builder build() {
+
+            // GET
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            get(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // POST
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            post(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // PUT
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            put(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // DELETE
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            delete(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // CATCH any other requests
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            any(anyUrl())
+                                    .atPriority(10)
+                                    .willReturn(aResponse()
+                                            .withStatus(404)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody("{\"status\":\"Error\",\"message\":\"Endpoint not found\"}")
+                                            .withTransformers("body-transformer")));
+
+            return new StubREST_Builder(this);
+        }
+    }
+}
+
+
+
+
+
Start a virtual server
+
+

The following picture presents the process of executing Smoke Tests in a virtualized environment:

+
+
+
+image78 +
+
+
+
+
Install docker service
+
+

If docker is not already installed on machine (this should be checked during C2C creation), install docker, docker-compose, apache2-utils, openssl (You can use script to install docker & docker-compose OR refer to this post and add Alias for this machine <C2C_Alias_Name>):

+
+
+
    +
  • +

    run the script

    +
  • +
  • +

    sudo apt-get install -y apache2-utils

    +
  • +
+
+
+
+
Build a docker image
+
+

Dockerfile:

+
+
+
+
FROM docker.xxx.com/ubuntu:16.04
+MAINTAINER Maintainer Name "maintainer@email.address"
+LABEL name=ubuntu_java \
+           version=v1-8.0 \
+           base="ubuntu:16.04" \
+           build_date="03-22-2018" \
+           java="1.8.0_162" \
+           wiremock="2.14.0" \
+           description="Docker to use with Ubuntu, JAVA and WIREMOCK "
+
+##Update and install the applications needed
+COPY 80proxy /etc/apt/apt.conf.d/80proxy
+RUN apt-get update
+RUN apt-get install -y \
+            wget \
+            libfontconfig \
+            unzip \
+            zip
+            ksh \
+            curl \
+            git
+
+COPY wgetrc /etc/wgetrc
+
+#Env parameters
+
+### JAVA PART ###
+#TO UPDATE:please verify url link to JDK http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html
+##Download and install JAVA JDK8
+RUN mkdir /opt/jdk
+RUN wget -qq --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u162-b12/0da788060d494f509bf8624735fa2f1/jdk-8u162-linux-x64.tar.gz && tar -zxf jdk-8u162-linux-x64.tar.gz -C /opt/jdk && rm jdk-8u162-linux-x64.tar.gz && update-alternatives --install /usr/bin/javac javac /opt/jdk/jdk1.8.0_162/bin/javac 100 && java -version && chmod 755 -R /opt/jdk/jdk1.8.0_162/
+RUN java -version
+
+##Add user
+RUN useradd -u 29001 -g 100 srvpwiredev
+
+##Add app
+RUN mkdir -p -m 777 /app
+COPY wiremock-standalone-2.14.0.jar /app/wiremock-standalone-2.14.0.jar
+
+##Expose port
+EXPOSE 8080
+
+##Set workdir
+WORKDIR /App
+
+##Run app
+CDM java -jar /app/wiremock-standalone-2.14.0.jar
+
+
+
+

Execute the following steps with a specified version to build a docker image and push it to the repository :

+
+
+
+
## Build image
+sudo docker build -t docker.xxx.com/app/build/wiremock:v2.14.0.
+
+## Push image
+sudo docker login docker.xxx.com
+sudo docker push docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+
+
Run docker image
+
+

To run a docker image, execute the following command:

+
+
+
+
sudo docker run -td -p 8080:8080 -v /home/wiremock/repo/app/docker/QA/mappings:/app/mappings -v /home/wiremock/repo/app/docker/QA/__files:/app/__files --restart always docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+

Where:

+
+
+

-p - publish a container’s port to the host

+
+
+

-v - bind mount a volume. WireMock server creates two directories under the current one: mappings and __files. It is necessary to mount directories with already created mappings and responses to make it work.

+
+
+

-restart always - restart policy to apply when a container exists

+
+
+

All of the parameters are described in: official docker documentation

+
+
+
+
Map requests with virtual assets
+
+

What is WireMock?

+
+
+

WireMock is an HTTP mock server. At its core it is a web server that can be primed to serve canned responses to particular requests (stubing) and that captures incoming requests so that they can be checked later (verification). It also has an assortment of other useful features including record/playback of interactions with other APIs, injection of faults and delays, simulation of stateful behaviour.

+
+
+

Full documentation can be found under the following link: WireMock

+
+
+
+
Record / create virtual assets mappings
+
+

Record

+
+
+

WireMock can create stub mappings from requests it has received. Combined with its proxying feature, this allows you to "record" stub mappings from interaction with existing APIs.

+
+
+

Record and playback (Legacy): documentation

+
+
+
+
java -jar wiremock-standalone-2.16.0.jar --proxy-all="http://search.twitter.com" --record-mappings --verbose
+
+
+
+

Once it’s started and request is sent to it, it will be redirected to "http://search.twitter.com" and traffic (response) is saved to files in mappings and __files directories for further use.

+
+
+

Record and playback (New): documentation

+
+
+
+
Enable mappings in a virtual server
+
+

When the WireMock server starts, it creates two directories under the current one: mappings and __files. To create a stub, it is necessary to drop a file with a .json extension under mappings.

+
+
+

Run docker with mounted volumes

+
+
+

Mappings are in a repository. It is necessary to mount directories with already created mappings and responses to make it work:

+
+
+
+
sudo docker run -td -p 8080:8080 -v /home/wiremock/repo/app/docker/QA/mappings:/app/mappings -v /home/wiremock/repo/app/docker/QA/__files:/app/__files --restart always docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+

The description of how to build and run docker is available under: Docker run command description

+
+
+

Recorded mappings

+
+
+

Recorded mappings are kept in the project repository.

+
+
+
+
Create a user and map them to docker user
+
+

To enable the connection from Jenkins to Virtual Server (C2C), it is necessary to create a user and map them to docker group user. It can be done using the following command:

+
+
+
+
adduser -G docker -m wiremock
+
+
+
+

To set the password for a wiremock user:

+
+
+
+
passwd wiremock
+
+
+
+
+
Create SSH private and public keys for a wiremock user
+
+

SSH keys serve as a means of identifying yourself to an SSH server using public-key cryptography and challenge-response authentication. One immediate advantage this method has over traditional password is that you can be authenticated by the server without ever having to send your password over the network.

+
+
+

To create an SSH key, log in as wiremock (previously created user).

+
+
+
+
su wiremock
+
+
+
+

The .ssh directory is not by default created below user home directory. Therefore, it is necessary to create it:

+
+
+
+
mkdir ~/.ssh
+
+
+
+

Now we can proceed with creating an RSA key using ssh-keygen (a tool for creating new authentication key pairs for SSH):

+
+
+
+
ssh-keygen -t rsa
+
+
+
+

A key should be created under /.ssh/id_rsa +Appending the public keys to authorized_keys:

+
+
+
+
wiremock@vc2crptXXXXXXXn:~/ssh$ cat id_rsa.pub >> authorized_keys
+
+
+
+
+
Install an SSH key in Jenkins
+
+

To add an SSH key to Jenkins, go to credentials in your job location. Choose the folder within credentials, then 'global credentials', 'Add credentials'. Fill in the fields. Finally, the entry should be created.

+
+
+
+
Build a Jenkins Groovy script
+
+

The description of how to use SSH Agent plugin in Jenkins pipeline can be found under: https://www.karthikeyan.tech/2017/09/ssh-agent-blue-ocean-via-jenkins.html

+
+
+

Example of use:

+
+
+
+
sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+     sh """
+         ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "docker container restart ${env.WIREMOCK_CONTAINER_NAME}"
+     """
+}
+
+
+
+

Where: env.WIREMOCK_CREDENTIALS is a credential id of previously created wiremock credentials. Now that it is present, we can execute commands on a remote machine, where in ssh command: +env.WIREMOCK_USERNAME - user name of user connected with configured private key +env.WIREMOCK_IP_ADDRESS - ip address of the machine where this user with this private key exists

+
+
+
+
Pull repository with virtual assets
+
+

To pull the repository on a remote machine, it is necessary to use the previously described SSH Agent plugin. An example of use:

+
+
+
+
sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+withCredentials([usernamePassword(credentialsId: end.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+     sh """
+         ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd ~/${env.APPLICATION_DIRECTORY_WIREMOCK}/${env.PROJET_HOME}; git fetch https://&USER:$PASS@${env.GIT_WITHOUT_HTTPS} ${env.GIT_BRANCH}; git reset --hard FETCH_HEAD; git clean -df"
+      """
+    }
+}
+
+
+
+

Where:

+
+
+

withCredentials allows various kinds of credentials (secrets) to be used in idiosyncratic ways. Each binding will define an environment variable active within the scope of the step. Then the necessary commands are executed:

+
+
+

cd …​ - command will change from current directory to the specified directory with git repository

+
+
+

git fetch …​ ;git reset …​ ;git clean …​ - pull from GIT branch. Git pull or checkout are not used here to prevent the situation with wrong coding between Mac OSX/Linux etc.

+
+
+

PLEASE remember that when using this script for the first time, the code from previous block should be changed to:

+
+
+
+
stage("ssh-agent"){
+        sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+            withCredentials([usernamePassword(credentialsId: end.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+                sh """
+                        ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd ~/${env.APPLICATION_DIRECTORY_WIREMOCK} ;git clone --depth=1 --branch=develop https://&USER:$PASS@${env.GIT_WITHOUT_HTTPS}"';
+                """
+    }
+}
+
+
+
+
+
Install an application with Smoke environment
+ +
+
+
Update properties settings file
+
+

New settings file is pushed to the repository. Example configuration:

+
+
+
+
...
+   <key>autocomplete</key>
+   <string>http://server:port</string>
+   <key>benefitsummary</key>
+   <string>http://server:port</string>
+   <key>checkscan</key>
+   <string>http://server:port</string>
+   <key>dpesb</key>
+   <string>http://server:port</string>
+...
+
+
+
+

Address of service (backend) should be changed to wiremock address as it is shown on listing to change the default route.

+
+
+
+
Build an application with updated properties file
+
+

New versions of application are prepared by Jenkins job.

+
+
+
+
Install an application on target properties file
+
+

Installation of an application is actually executed in a non-automated way using SeeTest environment.

+
+
+
+
UI tests
+ +
+
+
Run Jenkins job
+
+

Jenkinsfile:

+
+
+
+
// Jenkins parameters are overriding the properties below
+def properties = [
+
+          JENKINS_LABELS                                 : 'PWI_LINUX_DEV',
+          APPLICATION_FOLDER                             : 'app_dir',
+          PROJECT_HOME                                   : 'app_home_folder',
+
+          //WIREMOCK
+          WIREMOCK_CREDENTIALS                           : 'vc2crptXXXXXXn',
+          WIREMOCK_USERNAME                              : 'wiremock',
+          WIREMOCK_ADDRESS                               : 'http://vc2crptXXXXXXn.xxx.com:8080',
+          WIREMOCK_IP_ADDRESS                            : '10.196.67.XXX',
+          WIREMOCK_CONTAINER_NAME                        : 'wiremock',
+          APPLICATION_DIRECTORY_WIREMOCK                 : 'repo',
+
+          //GIT
+          GIT_CREDENTIALS                                : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          GIT_BRANCH                                     : 'develop',
+          GIT_SSH                                        : 'ssh://git@stash.xxx.com/app/app.git'
+          GIT_HTTPS                                      : 'HTTPS://git@stash.xxx.com/app/app.git',
+
+          STASH_CREDENTIALS                              : 'e47742cc-bb66-4321-2341-a2342er24f2',
+
+
+          //DOCKER
+          ARTIFACTORY_USER_CREDENTIALS                   : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          SEETEST_DOCKER_IMAGE                           : 'docker.xxx.com/project/images/app:v1-8.3',
+
+          //SEETEST_DOCKER_IMAGE
+          SEETEST_APPLICATION_FOLDER                     : 'seetest_dir',
+          SEETEST_PROJECT_HOME                           : 'Automated Scripts',
+          SEETEST_GIT_SSH                                : 'ssh://git@stash.xxx.com/pr/seetest_automation_cucumber.git'
+          SEETEST_GIT_BRANCH                             : 'develop',
+          SEETEST_GRID_USER_CREDENTIALS                  : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          SEETEST_CUCUMBER_TAG                           : '@Virtualization',
+          SEETEST_CLOUD_NAME                             : 'Core Group',
+          SEETEST_IOS_VERSION                            : '11',
+          SEETEST_IOS_APP_URL                            : '',
+          SEETEST_INSTALL_APP                            : 'No',
+          SEETEST_APP_ENVIRONMENT                        : 'SmokeTests',
+          SEETEST_DEVICE_QUERY                           : '',
+]
+
+node(properties.JENKINS_LABELS) {
+    try {
+        prepareEnv(properties)
+        gitCheckout()
+        stageStartVirtualServer()
+        stageMapApiRequests()
+        stageInstallApplication()
+        stageUITests()
+     } catch(Exception ex) {
+        currentBuild.result = 'FAILURE'
+        error = 'Error' + ex
+     }
+}
+
+//== == == == == == == == == == == == == == == == == == END OF PIPELINE== == == == == == == == == == == == == == == == == == == == ==
+
+private void prepareEnv(properties) {
+    cleanWorkspace()
+    overrideProperties(properties)
+    setWorkspace()
+}
+
+private void gitCheckout() {
+    dir(env.APPLICATION_FOLDER) {
+        checkout([$class: 'GitSCM', branches: [[name: env.GIT_BRANCH]], doGenerateSubmoduleConfiguration: false, extensions: [[$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false, timeout: 50]], gitTool: 'Default', submoduleCfg: [], userRemoteConfigs: [[credentialsId: env.GIT_CREDENTIALS, url: env.GIT_SSH]])
+     }
+}
+
+private void stageStartVirtualServer() {
+    def module = load "${env.SUBMODULES_DIR}/stageStartVirtualServer.groovy"
+    module()
+}
+
+private void stageMapApiRequests() {
+    def module = load "${env.SUBMODULES_DIR}/stageMapApiRequests.groovy"
+    module()
+}
+
+private void stageInstallApplication() {
+    def module = load "${env.SUBMODULES_DIR}/stageInstallApplication.groovy"
+    module()
+}
+
+private void stageUITests() {
+    def module = load "${env.SUBMODULES_DIR}/stageUITests.groovy"
+    module()
+}
+
+private void setWorkspace() {
+    String workspace = pwd()
+    env.APPLICATION_DIRECTORY = "/${env.APPLICATION_DIRECTORY}"
+    env.WORKSPACE_LOCAL - workspace + env.APPLICATION_DIRECTORY
+    env.SEETEST_PROJECT_HOME_ABSOLute_PATH = "${workspace}/${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}"
+    env.SUBMODULES_DIR = env.WORKSPACE_LOCAL + "/pipelines/SmokeTests.submodules"
+    env.COMMONS_DIR    = env.WORKSPACE_LOCAL + "/pipelines/commons"
+}
+
+/*
+    function ovverrides env vales based on provided properties
+*/
+private void overrideProperties(properties) {
+    for (param in properties) {
+        if (env.(param.key) ==  null) {
+           echo "Adding parameter '${param.key}' with default value: '$param.value}'"
+           env.(param.key) = param.value
+        } else {
+           echo "Parameter '${param.key}' has overriden value: '${env.(param.key)}'"
+        }
+     }
+
+     echo sh(script: "env | sort", returnStdout: true)
+}
+
+private void cleanWorkspace() {
+   sh 'rm-rf *'
+}
+
+
+
+

stageStartVirtualServer.groovy:

+
+
+
+
def call () {
+    stage("Check virtual server") {
+        def statusCode
+
+        try {
+            def response = httpRequest "${env.WIREMOCK_ADDRESS}/__admin/"
+            statusCode = response.status
+        } catch(Exception ex) {
+            currentBuild.result = 'FAILURE'
+            error 'WireMock server os unreachable.'
+        }
+
+        if(statusCode !=200) {
+            currentBuild.result = 'FAILURE'
+            error 'WireMock server is unreachable. Return code: ${statusCode}'
+        }
+    }
+}
+
+
+
+

stageMapApiRequests.groovy:

+
+
+
+
def call() {
+    stage("Map API requests with virtual assets") {
+        checkoutRepository()
+        restartWiremock()
+        checkWiremockStatus()
+     }
+}
+
+private checkoutRepository() {
+    extractHTTPSUrl()
+    sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+        withCredentials([usernamePassword(credentialsId: env.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+            sh """
+                ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd~/${env.APPLICATION_DIRECTORY_WIREMOCK}/${env.PROJECT_HOME}; git fetch https://$USER:$PASS@${env.GIT_WITHOUT_HTTPS} ${env.GIT_BRANCH}; git reset --hard FETCH_HEAD; git clean -df"
+             """
+         }
+     }
+}
+
+private restartWiremock() {
+    sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+            sh """
+                ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "docker container restart ${env.WIREMOCK_CONTAINER_NAME}"
+             """
+     }
+}
+
+private checkWiremockStatus() {
+    int wiremockStatusCheckCounter =6
+    int sleepTimeInSeconds = 10
+    def wiremockStatus
+
+    for (i = 0; i < wiremockStatusCheckCounter; i++) {
+         try {
+             wiremockStatus = getHttpRequestStatus()
+             echo "WireMock server status code: ${wiremockStatus}"
+         } catch(Exceprion ex) {
+             echo "Exception when checking connection to WireMock"
+         }
+         if(wiremockStatus ==  200) break
+         else sh "sleep $(sleepTimeInSeconds}"
+      }
+
+      if(wiremockStatus != 200) {
+          currentBuild.result = 'FAILURE'
+          error 'WireMock server is unreachable. Return code: ${wiremockStatus}'
+      }
+}
+
+private def getHttpRequestStatus() {
+    def response = httpRequest "${env.WIREMOCK_ADDRESS}/__admin"
+    return response.status
+
+private extractHTTPSUrl() {
+    env.GIT_WITHOUT_HTTPS = env.GIT_HTTPS.replace("https://", "")
+}
+
+return this
+
+
+
+

stageInstallApplication.groovy:

+
+
+
+
def call() {
+    stage('Install application with smoke tests environment') {
+        dir(env.SEETEST_APPLICATION_FOLDER) {
+            checkout([$class: 'GitSCM', branches: [[name: env.SEETEST_GIT_BRANCH]], doGenerateSubmoduleConfigurations: false, extensions: [], gitTool: 'default', submoduleCfg: [], userRemoteConfigs: [[credentialsId: env.GIT_CREDENTIALS, url: env.SEETEST_GIT_SSH]])
+        }
+     }
+}
+
+return this
+
+
+
+

stageUITests.groovy:

+
+
+
+
def call() {
+    stage('UI tests') {
+        def utils = load "${env.SUBMODULES_DIR}/utils.groovy"
+
+        try {
+            utils.generateUserIDVariable(); //Generate USER_ID and USER_GROUP
+            docker.image(env.SEETEST_DOCKER_IMAGE).inside("-u ${env.USER_ID}:${env.USER_GROUP}") {
+                withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: "${env.ARTIFACTORY_USER_CREDENTIALS}", passwordVariable: 'ARTIFACTORY_PASSWORD', usernameVariable: 'ARTIFACTORY_USERNAME]]) {
+                    executeTests()
+                    compressArtifacts()
+                    publishJUnitTestResultReport()
+                    archiveArtifacts()
+                    publishHTMLReports()
+                    publishCucumberReports()
+                 }
+             }
+        } catch (Exception exc) {
+            throw exc
+        }
+   }
+}
+
+private executeTests() {
+    withCredentials([usernamePassword(credentialsId: env.SEETEST_GRID_USER_CREDENTIALS, passwordVariable: 'GRID_USER_PASSWORD', usernameVariable: 'GRID_USER_NAME')]) {
+            sh """
+                cd ${env.SEETEST_PROJECT_HOME_ABSOLUTE_PATH}
+                mvn clean test -B -Ddriver="grid" -Dtags="${env.SEETEST_CUCUMBER_TAG}" -DcloudName="${env.SEETEST_CLOUD_NAME}" -DdeviceQuery="${env.SEETEST_DEVICE_QUERY} -DgridUser="${GRID_USER_NAME}" -DgridPassword="${GRID_USER_PASSWORD}" -Dinstall="${env.SEETEST_INSTALL_APP}" -DiosUrl="${env.SEETEST_IOS_APP_URL}" -DdeviceType="iPhone" -DiosVersion="$env.SEETEST_IOS_VERSION}" -DparallelMode="allonall" -Denv="${env.SEETEST_APP_ENVIRONMENT}" site
+             """
+     }
+}
+
+private compressartifacts() {
+    echo "Compressing artifacts from /target/site"
+    sh """
+        zip -r allure_report.zip **/${env.SEETEST_PROJECT_homE}/target/site
+    """
+
+private publishJUnitTestResultReport() {
+    echo "Publishing JUnit reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/surefire-reports/junitreporters/*.xml"
+
+    try {
+        junit "${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/surefire-reports/junitreporters/*.xml"
+    } catch(e) {
+        echo("No JUnit report found")
+    }
+}
+
+private archiveArtifacts() {
+    echo "Archiving artifacts"
+
+    try {
+        archiveArtifacts allowEmptyArchive: true, artifacts: "**/allure_report.zip"
+    } catch(e) {
+        echo("No artifacts found")
+    }
+}
+
+private publishHTMLReports() {
+    echo "Publishing HTML reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/site/allure-maven-plugin"
+
+    try {
+        publishHTML([allowMissing: false, alwaysLinkToLastBuild: true, keepAll: true, reportDir: "${env.SEETEST_APPLICATION_FOLDER/${env.SEETEST_PROJECT_HOME}/target/site/allure-maven-plugin", reportFiles: 'index.html', reportName: 'Allure report', reportTitles: 'Allure report'])
+    } catch(e) {
+        echo("No artifacts found")
+    }
+}
+
+private publishCucumberREPORTS() {
+    echo "Publishing Cucumber reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/cucumber-parallel/*.json"
+
+    try {
+        step([$class: 'CucumberReportPublisher', fileExcludePattern '', fileIncludePattern: "#{env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/cucumber-parallel/*.json", ignoreFailedTests: false, jenkinsBasePath: '', jsonReportDirectory: '', missingFails: false, parallelTesting: false, pendingFails: false, skippedFails: false, undefinedFails: false])
+    } catch(e) {
+        echo("No Cucumber report found")
+    }
+}
+
+return this
+
+
+
+

Configuration

+
+
+

It is possible to configure Jenkins job in two ways. First one is to edit the Jenkinsfile. All of the properties are in properties collection as below:

+
+
+
+
def properties = [
+
+          JENKINS_LABELS                                : 'PWI_LINUX_DEV'
+
+          ...
+
+          //Docker
+          ARTIFACTORY_USER_CREDENTIALS                  : 'ba2e4f46-56f1-4467-ae97-17b356d6s643',
+          SEETEST_DOCKER_IMAGE                          : 'docker.XXX.com/app/base-images/seetest:v1-8.3',
+
+          //SeeTest
+          SEETEST_APPLICATION_FOLDER                    : 'seetest_dit',
+          SEETEST_PROJECT_HOME                          : 'Automated_Scripts',
+          SEETEST_GIT_SSH                               : 'ssh://stash.xxx.com/app/seetest_automation_cucumber.git',
+          SEETEST_GIT_BRANCH                            : 'develop',
+
+          ...
+]
+
+
+
+

Second way is to add properties in 'Configure job'. All of the properties there are overriding properties from Jenkinsfile (the have the highest priority). They can then be set durring 'Build with Paremeters' process.

+
+
+

Reports

+
+
+

After a job execution 'Allure report' and 'Cucumber-JVM' reports should be visible. If any tests fail, You can check on which screen (printscreen from failures is attached, why and etc.)

+
+
+
+
+
+

Security Module

+
+ +
+
Security Test Module
+ +
+
+
What is Security
+
+

Application Security is concerned with Integrity, Availability and Confidentiality of data processed, stored and transferred by the application.

+
+
+

Application Security is a cross-cutting concern which touches every aspect of the Software Development Lifecycle. You can introduce some SQL injection flaws in your application and make it exploitable, but you can also expose your secrets (which will have nothing to do with code itself) due to poor secret management process, and fail as well.

+
+
+

Because of this and many other reasons, not every aspect of security can be automatically verified. Manual tests and audits will still be needed. Nevertheless, every security requirement which is automatically verified will prevent code degeneration and misconfiguration in a continuous manner.

+
+
+
+
How to test Security
+
+

Security tests can be performed in many different ways, such as:

+
+
+
    +
  • +

    Static Code Analysis - improves the security by (usually) automated code review. A good way to search for vulnerabilities, which are 'obvious' on the code level ( e.g. SQL injection). The downside of this approach is that professional tools to perform such scans are very expensive and still produce many false positives.

    +
  • +
  • +

    Dynamic Code Analysis - tests are run against a working environment. A good way to search for vulnerabilities, which require all client- and server-side components to be present and running (like e.g. Cross-Site Scripting). Tests are performed in a semi-automated manner and require a proxy tool (like e.g. OWASP ZAP)

    +
  • +
  • +

    Unit tests - self-written and self-maintained tests. They usually work on the HTTP/REST level (this defines the trust boundary between the client and the server) and run against a working environment. Unit tests are best suited for verifying requirements which involve business knowledge of the system or which assure secure configuration on the HTTP level.

    +
  • +
+
+
+

In the current release of the Security Module, the main focus will be Unit Tests.

+
+
+

Although the most common choice of environment for running security tests on will be integration(the environment offers the right stability and should mirror the production closely), it is not uncommon for some security tests to run on production as well. This is done for e.g. TLS configuration testing to ensure proper configuration of the most relevant environment in a continuous manner.

+
+
+
+
+
+

Database Module

+
+ +
+
Database Test Module
+ +
+
+
What is MrChecker Database Test Module
+
+

Database module is based on Object-Relational Mapping programming technique. All functionalities are built using Java Persistence API but examples use Hibernate as a main provider.

+
+
+
+
JPA structure schema
+
+

This module was written to allow the use of any JPA provider. The structure is represented in the schema below.

+
+
+
+image3 +
+
+
+
+
ORM representation applied in Framework
+
+
+image4 +
+
+
+
+
+
+

Mobile Test Module

+
+ +
+
Mobile Test Module
+ +
+
+
What is MrChecker E2E Mobile Test Module
+
+

MrChecker E2E Mobile test Module is a suitable solution for testing Remote Web Design, Mobile Browsers and application. +A user can write tests suitable for all mobile browsers with a full range of resolution. The way of working is similar to Selenium and uses the same rules and patterns as the Web Driver. For more information please look in the Selenium test module.

+
+
+
+
What is Page Object Architecture
+
+

Creating Selenium test cases can result in an unmaintainable project. One of the reasons is that too many duplicated code is used. Duplicated code could be caused by the duplicated functionality and this will result in duplicated usage of locators. The disadvantage of duplicated code is that the project is less maintainable. If some locator will change, you have to walk through the whole test code to adjust locators where necessary. By using the page object model we can make non-brittle test code and reduce or eliminate duplicate test code. Beside of that it improves the readability and allows us to create interactive documentation. Last but not least, we can create tests with less keystroke. An implementation of the page object model can be achieved by separating the abstraction of the test object and the test scripts.

+
+
+
+
Page Object Pattern
+
+
+Pom +
+
+
+
+
Mobile Structure
+
+

It is build on the top of the Appium library. +Appium is an open-source tool for automating native, mobile web, and hybrid applications on iOS mobile, Android mobile, and Windows desktop platforms. Native apps are those written using iOS, Android, or Windows SDKs. Mobile web apps are web apps accessed using a mobile browser (Appium supports Safari on iOS and Chrome or the built-in 'Browser' app on Android). Hybrid apps have a wrapper around a "webview" - a native control that enables interaction with web content.

+
+
+
+
Run on different mobile devices
+
+

To execute each test with chosen connected mobile devices, it is required to use specific arguments in Run configuration.

+
+
+
+image01 +
+
+
+
+image02 +
+
+
+

Default supported arguments in MrChecker:

+
+
+
    +
  • +

    deviceUrl - http url to Appium Server, default value "http://127.0.0.1:4723"

    +
  • +
  • +

    automationName - which automation engine to use , default value "Appium"

    +
  • +
  • +

    platformName - which mobile OS platform to use , default value "Appium"

    +
  • +
  • +

    platformVersion - mobile OS version , default value ""

    +
  • +
  • +

    deviceName - the kind of mobile device or emulator to use , default value "Android Emulator"

    +
  • +
  • +

    app - the absolute local path or remote http URL to a .ipa file (IOS), .app folder (IOS Simulator), .apk file (Android) or .apks file (Android App Bundle), or a .zip file, default value "."

    +
  • +
  • +

    browserName - name of mobile web browser to automate. Should be an empty string if automating an app instead, default value ""

    +
  • +
  • +

    newCommandTimeout - how long (in seconds) Appium will wait for a new command from the client before assuming the client quit and ending the session, default value "4000"

    +
  • +
  • +

    deviceOptions - any other capabilites not covered in essential ones, default value none

    +
  • +
+
+
+

Example usage:

+
+
+
+
mvn clean test -Dtest=MyTest -DdeviceUrl="http://192.168.0.1:1234" -DplatformName="iOS" -DdeviceName="iPhone Simulator" -Dapp=".\\Simple_App.ipa"
+
+
+
+
+
mvn clean test -Dtest=MyTest -Dapp=".\\Simple_App.apk -DdeviceOptions="orientation=LANDSCAPE;appActivity=MainActivity;chromeOptions=['--disable-popup-blocking']"
+
+
+
+

Check also:

+
+ + + +
+

+ +Full list of Generic Capabilities

+
+
+

+ +List of additional capabilities for Android

+
+
+

+ +List of additional capabilities for iOS

+
+ +
+
+
How to use mobile test Module
+
+
    +
  1. +

    Install IDE with MrChecker

    +
  2. +
  3. +

    Switch branch to 'feature/Create-mobile-module-#213' - by default it is 'develop'

    +
  4. +
+
+
+
+
git checkout feature/Create-mobile-module-#213
+
+
+
+
    +
  1. +

    Install and setup git checkout feature/Create-mobile-module-#213[Appium Server]

    +
  2. +
  3. +

    Connect to local Device by Appium Server

    +
    +
    +
     1.
    +Install Android SDK    https://developer.android.com/studio/index.html#command-tools    ->
    +	2.
    +Download Platform and Build-Tools  (Android versions - >    https://en.wikipedia.org/wiki/Android_version_history   )
    +* sdkmanager "platform-tools" "platforms;android-19"
    +* sdkmanager "build-tools;19.0.0"
    +* copy from /build-tools  file "aapt.exe"  to /platform-tools
    +	3.
    +Set Environment:
    +ANDROID_SDK_ROOT = D:\sdk-tools-windows-4333796
    +PATH =  %PATH%; %ANDROID_SDK_ROOT%
    +	4.
    +Start Appium Server
    +	5.
    +Start Session in Appium Server, capabilities
    +{
    +  "platformName": "Android",
    +            "deviceName": "Android Emulator",
    +            "app": "D:\\Repo\\mrchecker-source\\mrchecker-framework-modules\\mrchecker-mobile-module\\src\\test\\resources\\Simple App_v2.0.1_apkpure.com.apk",
    +            "automationName": "UiAutomator1"
    +            }
    +
    +
    +
  4. +
  5. +

    Run Mobile tests with runtime parameters. +List of supported parameters could be found here

    +
    +
      +
    • +

      From command line (as in Jenkins):

      +
    • +
    +
    +
  6. +
+
+
+
+
mvn clean compile test  -Dapp=".\\Simple_App_v2.0.1_apkpure.com.apk" -DautomationName="UiAutomator1" -Dthread.count=1
+
+
+
+
    +
  • +

    from IDE:

    +
  • +
+
+
+
+image00100 +
+
+
+
+image00101 +
+
+
+
+
+
+

DevOps Test Module

+
+ +
+
DevOPS Test Module
+ +
+
+
What does DevOps mean for us?
+
+

DevOps consists of a mixture of three key components in a technical project:

+
+
+
    +
  • +

    People’s skills and mindset

    +
  • +
  • +

    Processes

    +
  • +
  • +

    Tools

    +
  • +
+
+
+

Using E2E MrChecker Test Framework it is possible to cover the majority of these areas.

+
+
+
+
QA Team Goal
+
+

For QA engineers, it is essential to take care of the product code quality.

+
+
+

Therefore, we have to understand, that a test case is also code which has to be validated against quality gates. As a result, we must test our developed test case like it is done during standard Software Delivery Life Cycle.

+
+
+
+
Well rounded test case production process
+
+
    +
  • +

    How do we define top-notch test cases development process in E2E MrChecker Test Framework

    +
  • +
+
+
+
+image5 +
+
+
+
+
Continuous Integration (CI) and Continuous Delivery (CD)
+
+ +
+
+
+image6 +
+
+
+
+
What should you receive from this DevOps module
+
+
+image7 +
+
+
+
+
What will you gain with our DevOps module
+
+

The CI procedure has been divided into transparent modules. This solution makes configuration and maintenance very easy because everyone is able to manage versions and customize the configuration independently for each module. A separate security module ensures the protection of your credentials and assigned access roles regardless of changes in other modules.

+
+
+
+image8 +
+
+
+

Your CI process will be matched to the current project. You can easily go back to the previous configuration, test a new one or move a selected one to other projects.

+
+
+
+image9 +
+
+
+

DevOps module supports a delivery model in which executors are made available to the user as needed. It has such advantages as:

+
+
+
    +
  • +

    Saving computing resources

    +
  • +
  • +

    Eliminating guessing on your infrastructure capacity needs

    +
  • +
  • +

    Not spending time on running and maintaining additional executors +== How to build this DevOps module

    +
  • +
+
+
+

Once you have implemented the module, you can learn more about it here:

+
+
+ +
+
+
+
Continuous Integration
+
+

Embrace quality with Continuous Integration while you produce test case(s).

+
+
+
+
Overview
+
+

There are two ways to set up your Continuous Integration environment:

+
+
+
    +
  1. +

    Create a Jenkins instance from scratch (e.g. by using the Jenkins Docker image)

    +
    +

    Using a clean Jenkins instance requires the installation of additional plugins. The plugins required and their versions can be found on this page.

    +
    +
  2. +
  3. +

    Use thre pre-configured custom Docker image provided by us

    +
    +

    No more additional configuration is required (but optional) using this custom Docker image. Additionally, this Jenkins setup allows dynamical scaling across multiple machines and even cloud (AWS, Azure, Google Cloud etc.).

    +
    +
  4. +
+
+
+
+
Jenkins Overview
+
+

Jenkins is an Open Source Continuous Integration Tool. It allows the user to create automated build jobs which will run remotely on so called Jenkins Slaves. A build job can be triggered by several events, for example on new pull request on specified repositories or timed (e.g. at midnight).

+
+
+
+
Jenking Configuration
+
+

Tests created by using the testing framework can easily be implemented on a Jenkins instance. The following chapter will describe such a job configuration. If you’re running your own Jenkins instance, you may have to install additional plugins listed on the page Jenkins Plugins for a trouble-free integration of your tests.

+
+
+
+
== Initial Configuration
+
+

The test job is configured as a so-called parameterized job. This means, after starting the job, parameters can be specified, which will then be used in the build process. In this case, branch and testname will be expected when starting the job. These parameters specify which branch in the code repository should be checked out (possibly feature branch) and the name of the test that should be executed.

+
+
+
+image79 +
+
+
+
+
== Build Process Configuration
+
+
    +
  • +

    The first step inside the build process configuration is to get the author of the commit that was made. The mail will be extracted and gets stored in a file called build.properties. This way, the author can be notified if the build fails.

    +
    +
    +image80 +
    +
    +
  • +
  • +

    Next up, Maven will be used to check if the code can be compiled, without running any tests.

    +
    +
    +image81 +
    +
    +
    +

    After making sure that the code can be compiled, the actual tests will be executed.

    +
    +
    +
    +image82 +
    +
    +
  • +
  • +

    Finally, reports will be generated.

    +
    +
    +image83 +
    +
    +
  • +
+
+
+
+
== Post Build Configuration
+
+
    +
  • +

    At first, the results will be imported to the Allure System

    +
    +
    +image84 +
    +
    +
  • +
  • +

    JUnit test results will be reported as well. Using this step, the test result trend graph will be displayed on the Jenkins job overview.

    +
    +
    +image85 +
    +
    +
  • +
  • +

    Finally, an E-Mail will be sent to the previously extracted author of the commit.

    +
    +
    +image86 +
    +
    +
  • +
+
+
+
+
Using the Pre-Configured Custom Docker Image
+
+

If you are starting a new Jenkins instance for your tests, we’d suggest using the pre-configured Docker image. This image already contains all the configurations and additional features.

+
+
+

The configurations are e.g. Plugins and Pre-Installed job setup samples. This way, you don’t have to set up the entire CI-Environment from the ground up.

+
+
+

Additional features from this docker image allow dynamic creation and deletion of Jenkins slaves, by creating Docker containers. Also, Cloud Solutions can be implemented to allow wide-spread load balancing.

+
+
+
+
Continuous Delivery
+
+

Include quality with Continuous Delivery during product release.

+
+
+
+image87 +
+
+
+
+
Overview
+
+

CD from Jenkins point of view does not change a lot from Continuous Integration one.

+
+
+
+
Jenkins Overview
+
+

Use the same Jenkins settings for Jenkins CD setup as for CI, please. link. The only difference is:

+
+
+
    +
  • +

    What type of test you will execute. Before, we have been choosing test case(s), now we will choose test suite(s)

    +
  • +
  • +

    Who will trigger the given Smoke/Integration/Performance job

    +
  • +
  • +

    What is the name of official branch. This branch ought always to use be used in every CD execution. It will be either master or develop.

    +
  • +
+
+
+
+
Jenkins for Smoke Tests
+
+

In the $TESTNAME variable, where we input the test name( link ), please input the name of a test suite assembled together of tests tagged as smoke tests -( link ) thus running all the smoke tests.

+
+
+
+
Jenkins for Performance Tests
+
+

Under construction - added when WebAPI module is included.

+
+
+
+
Pipeline structure
+ +
+
+
Pipeline configuration:
+
+

The default interaction with Jenkins required manual jobs. This keeps configuration of a job in Jenkins separate from source code. With Pipeline plugin users can implement a pipeline procedure in Jenkinsfile and store it in repository with other code. This approach is used in Mr Checker framework. More info: https://jenkins.io/solutions/pipeline/

+
+
+

Our CI & CD processes are divided into a few separate files: Jenkins_node.groovy is the file to manage all processes. It defines all operations executed on a Jenkins node, so all code in this file is closed in node closure. Workflow in Jenkinsfile:

+
+
+
    +
  • +

    Read all parameters from a Jenkins job

    +
  • +
  • +

    Execute stage to prepare the environment

    +
  • +
  • +

    Execute git pull command

    +
  • +
  • +

    Set Jenkins job description

    +
  • +
  • +

    Execute compilation of the project in a special prepared docker container

    +
  • +
  • +

    Execute unit tests

    +
  • +
  • +

    Execute integration tests

    +
  • +
  • +

    Deploy artifacts to a local repository

    +
  • +
  • +

    Deploy artifacts to an external repository (nexus/arifactory)

    +
  • +
+
+
+

Not all the steps must be present in the Jenkins files. This should be configured for particular job requirements.

+
+
+
+
Description of stages:
+ +
+
+
Stage “Prepare environment”
+
+

First thing to do in this stage is overwriting properties loaded from Jenkins job. It is defined in “overrideProperties” function. The next function, “setJenkinsJobVariables” defines environment variables such as :

+
+
+
    +
  • +

    JOB_NAME_UPSTREAM

    +
  • +
  • +

    BUILD_DISPLAY_NAME_UPSTREAM

    +
  • +
  • +

    BUILD_URL_UPSTREAM

    +
  • +
  • +

    GIT_CREDENTIALS

    +
  • +
  • +

    JENKINS_CREDENTIALS

    +
  • +
+
+
+

The last function in the stage – “setWorkspace” -creates an environment variable with path to local workspace. This is required beacuse when using pipeline plugin, Jenkins does not create the WORKSPACE env variables.

+
+
+
+
Stage "Git pull"
+
+

It pulls sources from the repository and loads “git pull” file which contains additional methods:

+
+
+
    +
  • +

    setGitAuthor – setting properties about git author to the file “build.properties” and loading created file

    +
  • +
  • +

    tryMergeWithBranch – checking if actual branch can be merged with default main branch

    +
  • +
+
+
+
+
Stage “Build compile”
+
+

Verify with maven that code builds without errors

+
+
+
+
Stage “Unit test”
+
+

Execute unit tests with mvn surefire test and publish reports in junit and allure format

+
+
+
+
Stage “Integration test”
+
+

Execute integration tests with mvn surefire test and publish reports in junit and allure format

+
+
+
+
Stage “Deploy – local repo”
+
+

Archive artifacts as a jar file in the local repository

+
+
+
+
Stage ”Deploy – nexu repo”
+
+

Deploy to the external repository with maven release deploy command with credentials stored in Jenkins machine. Additional files:

+
+
+
    +
  • +

    mailSender.groovy – contains methods for sending mail with generated content

    +
  • +
  • +

    stashNotification.groovy – send job status for bitbucket by a curl command

    +
  • +
  • +

    utils.groovy - contains additional functions to load properties, files and generate additional data

    +
  • +
+
+
+
+
Selenium Grid
+ +
+
+
What is Selenium Grid
+
+

Selenium Grid allows running web/mobile browsers test cases to fulfill basic factors, such as:

+
+
+
    +
  • +

    Independent infrastructure, similar to end-users'

    +
  • +
  • +

    Scalable infrastructure (\~50 simultaneous sessions at once)

    +
  • +
  • +

    Huge variety of web browsers (from mobile to desktop)

    +
  • +
  • +

    Continuous Integration and Continuous Delivery process

    +
  • +
  • +

    Supporting multi-type programming languages (java, javascript, python, …​).

    +
  • +
+
+
+
+image88 +
+
+
+

On a daily basis, a test automation engineer uses their local environments for test case execution/development. However, a created browser test case has to be able to run on any infrastructure. Selenium Grid enables this portability.

+
+
+
+
Selenium Grid Structure
+
+
+image89 +
+
+
+

Full documentation of Selenium Grid can be found here and here.

+
+
+

'Vanilla flavour' Selenium Grid is based on two, not very complicated ingredients:

+
+
+
    +
  1. +

    Selenium Hub - as one machine, accepting connections to grid from test cases executors. It also plays a managerial role in connection to/from Selenium Nodes

    +
  2. +
  3. +

    Selenium Node - from one to many machines, where on each machine a browser used during test case execution is installed.

    +
  4. +
+
+
+
+
How to setup
+
+

There are two options of Selenium Grid setup:

+
+
+
    +
  • +

    Classic, static solution - link

    +
  • +
  • +

    Cloud, scalable solution - link

    +
  • +
+
+
+

Advantages and disadvantages of both solutions:

+
+
+
+image90 +
+
+
+
+
How to use Selenium Grid with E2E Mr Checker Test Frameworks
+
+

Run the following command either in Eclipse or in Jenkins:

+
+
+
+
> mvn test -Dtest=com.capgemini.ntc.selenium.tests.samples.resolutions.ResolutionTest -DseleniumGrid="http://10.40.232.61:4444/wd/hub" -Dos=LINUX -Dbrowser=chrome
+
+
+
+

As a result of this command:

+
+
+
    +
  • +

    -Dtest=com.capgemini.ntc.selenium.features.samples.resolutions.ResolutionTest - name of test case to execute

    +
  • +
  • +

    -DseleniumGrid="http://10.40.232.61:4444/wd/hub" - IP address of Selenium Hub

    +
  • +
  • +

    -Dos=LINUX - what operating system must be assumed during test case execution

    +
  • +
  • +

    -Dbrowser=chrome - what type of browser will be used during test case execution

    +
  • +
+
+
+
+image91 +
+
+
+
+
List of Jenkins Plugins
+
+

|== =

+
+
+

|Plugin Name +|Version

+
+
+

|blueocean-github-pipeline +|1.1.4

+
+
+

|blueocean-display-url +|2.0

+
+
+

|blueocean +|1.1.4

+
+
+

|workflow-support +|2.14

+
+
+

|workflow-api +|2.18

+
+
+

|plain-credentials +|1.4

+
+
+

|pipeline-stage-tags-metadata +|1.1.8

+
+
+

|credentials-binding +|1.12

+
+
+

|git +|3.5.1

+
+
+

|maven-plugin +|2.17

+
+
+

|workflow-durable-task-step +|2.12

+
+
+

|job-dsl +|1.64

+
+
+

|git-server +|1.7

+
+
+

|windows-slaves +|1.3.1

+
+
+

|github +|1.27.0

+
+
+

|blueocean-personalization +|1.1.4

+
+
+

|jackson2-api +|2.7.3

+
+
+

|momentjs +|1.1.1

+
+
+

|workflow-basic-steps +|2.6

+
+
+

|workflow-aggregator +|2.5

+
+
+

|blueocean-rest +|1.1.4

+
+
+

|gradle +|1.27.1

+
+
+

|pipeline-maven +|3.0.0

+
+
+

|blueocean-pipeline-editor +|0.2.0

+
+
+

|durable-task +|1.14

+
+
+

|scm-api +|2.2.2

+
+
+

|pipeline-model-api +|1.1.8

+
+
+

|config-file-provider +|2.16.3

+
+
+

|github-api +|1.85.1

+
+
+

|pam-auth +|1.3

+
+
+

|workflow-cps-global-lib +|2.8

+
+
+

|github-organization-folder +|1.6

+
+
+

|workflow-job +|2.12.1

+
+
+

|variant +|1.1

+
+
+

|git-client +|2.5.0

+
+
+

|sse-gateway +|1.15

+
+
+

|script-security +|1.29.1

+
+
+

|token-macro +|2.1

+
+
+

|jquery-detached +|1.2.1

+
+
+

|blueocean-web +|1.1.4

+
+
+

|timestamper +|1.8.8

+
+
+

|greenballs +|1.15

+
+
+

|handlebars +|1.1.1

+
+
+

|blueocean-jwt +|1.1.4

+
+
+

|pipeline-stage-view +|2.8

+
+
+

|blueocean-i18n +|1.1.4

+
+
+

|blueocean-git-pipeline +|1.1.4

+
+
+

|ace-editor +|1.1

+
+
+

|pipeline-stage-step +|2.2

+
+
+

|email-ext +|2.58

+
+
+

|envinject-api +|1.2

+
+
+

|role-strategy +|2.5.1

+
+
+

|structs +|1.9

+
+
+

|locale +|1.2

+
+
+

|docker-workflow +|1.13

+
+
+

|ssh-credentials +|1.13

+
+
+

|blueocean-pipeline-scm-api +|1.1.4

+
+
+

|metrics +|3.1.2.10

+
+
+

|external-monitor-job +|1.7

+
+
+

|junit +|1.21

+
+
+

|github-branch-source +|2.0.6

+
+
+

|blueocean-config +|1.1.4

+
+
+

|cucumber-reports +|3.8.0

+
+
+

|pipeline-model-declarative-agent +|1.1.1

+
+
+

|blueocean-dashboard +|1.1.4

+
+
+

|subversion +|2.9

+
+
+

|blueocean-autofavorite +|1.0.0

+
+
+

|pipeline-rest-api +|2.8

+
+
+

|pipeline-input-step +|2.7

+
+
+

|matrix-project +|1.11

+
+
+

|pipeline-github-lib +|1.0

+
+
+

|workflow-multibranch +|2.16

+
+
+

|docker-plugin +|0.16.2

+
+
+

|resource-disposer +|0.6

+
+
+

|icon-shim +|2.0.3

+
+
+

|workflow-step-api +|2.12

+
+
+

|blueocean-events +|1.1.4

+
+
+

|workflow-scm-step +|2.6

+
+
+

|display-url-api +|2.0

+
+
+

|favorite +|2.3.0

+
+
+

|build-timeout +|1.18

+
+
+

|mapdb-api +|1.0.9.0

+
+
+

|pipeline-build-step +|2.5.1

+
+
+

|antisamy-markup-formatter +|1.5

+
+
+

|javadoc +|1.4

+
+
+

|blueocean-commons +|1.1.4

+
+
+

|cloudbees-folder +|6.1.2

+
+
+

|ssh-slaves +|1.20

+
+
+

|pubsub-light +|1.10

+
+
+

|pipeline-graph-analysis +|1.4

+
+
+

|allure-jenkins-plugin +|2.23

+
+
+

|mailer +|1.20

+
+
+

|ws-cleanup +|0.33

+
+
+

|authentication-tokens +|1.3

+
+
+

|blueocean-pipeline-api-impl +|1.1.4

+
+
+

|ldap +|1.16

+
+
+

|docker-commons +|1.8

+
+
+

|branch-api +|2.0.10

+
+
+

|workflow-cps +|2.36.1

+
+
+

|pipeline-model-definition +|1.1.8

+
+
+

|blueocean-rest-impl +|1.1.4

+
+
+

|ant +|1.7

+
+
+

|credentials +|2.1.14

+
+
+

|matrix-auth +|1.7

+
+
+

|pipeline-model-extensions +|1.1.8

+
+
+

|pipeline-milestone-step +|1.3.1

+
+
+

|jclouds-jenkins +|2.14

+
+
+

|bouncycastle-api +|2.16.1

+
+
+

|== =

+
+
+
+
What is Docker
+
+

Docker is an open source software platform to create, deploy and manage virtualized application containers on a common operating system (OS), with an ecosystem of allied tools.

+
+
+
+
Where do we use Docker
+
+

DevOps module consists of Docker images

+
+
+
    +
  1. +

    Jenkins image

    +
  2. +
  3. +

    Jenkins job image

    +
  4. +
  5. +

    Jenkins management image

    +
  6. +
  7. +

    Security image

    +
  8. +
+
+
+

in addition, each new node is also based on Docker

+
+
+
+
Exploring basic Docker options
+
+

Let’s show some of the most important commands that are needed when working with our DevOps module based on the Docker platform. Each command given below should be preceded by a sudo call by default. If you don’t want to use sudo command create a Unix group called docker and add a user to it.

+
+
+
+
$ sudo groupadd docker
+$ sudo usermod -aG docker $USER
+
+
+
+
+
Build an image from a Dockerfile
+
+
+
##docker build [OPTIONS] PATH | URL | -
+##
+##Options:
+## --tag , -t : Name and optionally a tag in the ‘name:tag’ format
+
+$ docker build -t vc_jenkins_jobs .
+
+
+
+
+
Container start
+
+
+
##docker run [OPTIONS] IMAGE[:TAG|@DIGEST] [COMMAND] [ARG...]
+#
+##Options:
+##-d : To start a container in detached mode (background)
+##-it : interactive terminal
+##--name : assign a container name
+##--rm : clean up
+##--volumes-from="": Mount all volumes from the given container(s)
+##-p : explicitly map a single port or range of ports
+##--volume : storage associated with the image
+
+$ docker run -d --name vc_jenkins_jobs vc_jenkins_jobs
+
+
+
+
+
Remove one or more containers
+
+
+
##docker rm [OPTIONS] CONTAINER
+#
+##Options:
+##--force , -f : Force the removal of a running container
+
+$ docker rm -f jenkins
+
+
+
+
+
List containers
+
+
+
##docker ps [OPTIONS]
+##--all, -a : Show all containers (default shows just running)
+
+$ docker ps
+
+
+
+
+
Pull an image or a repository from a registry
+
+
+
##docker pull [OPTIONS] NAME[:TAG|@DIGEST]
+
+$ docker pull jenkins/jenkins:2.73.1
+
+
+
+
+
Push the image or a repository to a registry
+
+

Pushing new image takes place in two steps. First save the image by adding container ID to the commit command and next use push:

+
+
+
+
##docker push [OPTIONS] NAME[:TAG]
+
+$ docker ps
+  # copy container ID from the result
+$ docker commit b46778v943fh vc_jenkins_mng:project_x
+$ docker push vc_jenkins_mng:project_x
+
+
+
+
+
Return information on Docker object
+
+
+
##docker inspect [OPTIONS] NAME|ID [NAME|ID...]
+#
+##Options:
+##--format , -f : output format
+
+$ docker inspect -f '{{ .Mounts }}' vc_jenkins_mng
+
+
+
+
+
List images
+
+
+
##docker images [OPTIONS] [REPOSITORY[:TAG]]
+#
+##Options:
+--all , -a : show all images with intermediate images
+
+$ docker images
+$ docker images jenkins
+
+
+
+
+
Remove one or more images
+
+
+
##docker rmi [OPTIONS] IMAGE [IMAGE...]
+#
+##Options:
+##  --force , -f : Force removal of the image
+
+$ docker rmi jenkins/jenkins:latest
+
+
+
+
+
Run a command in a running container
+
+
+
##docker exec [OPTIONS] CONTAINER COMMAND [ARG...]
+##-d : run command in the background
+##-it : interactive terminal
+##-w : working directory inside the container
+##-e : Set environment variables
+
+$ docker exec vc_jenkins_jobs sh -c "chmod 755 config.xml"
+
+
+
+
+
Advanced commands
+ +
+
+
Remove dangling images
+
+
+
$ docker rmi $(docker images -f dangling=true -q)
+
+
+
+
+
Remove all images
+
+
+
$ docker rmi $(docker images -a -q)
+
+
+
+
+
Removing images according to a pattern
+
+
+
$ docker images | grep "pattern" | awk '{print $2}' | xargs docker rm
+
+
+
+
+
Remove all exited containers
+
+
+
$ docker rm $(docker ps -a -f status=exited -q)
+
+
+
+
+
Remove all stopped containers
+
+
+
$ docker rm $(docker ps --no-trunc -aq)
+
+
+
+
+
Remove containers according to a pattern
+
+
+
$ docker ps -a | grep "pattern" | awk '{print $1}' | xargs docker rmi
+
+
+
+
+
Remove dangling volumes
+
+
+
$ docker volume rm $(docker volume ls -f dangling=true -q)
+
+
+
+
+
+
+

MrChecker download

+
+ +
+
+
+

Windows

+
+ +
+
Advanced installation
+ +
+
+
Java installation
+
+

There is one important pre-requisite for Mr Checker installation - Java has to be installed on the computer and an environmental variable has to be set in order to obtain optimal functioning of the framework.

+
+
+
    +
  1. +

    Install Java 1.8 JDK 64bit

    +
    +

    Download and install Java download link

    +
    +
    +

    (To download JDK 8 from Oracle you have to have an account. It is recommended to get a JDK build based on OpenJDK from AdoptOpenJDK)

    +
    +
  2. +
  3. +

    Windows Local Environment - How to set:

    +
    +
      +
    • +

      Variable name: JAVA_HOME | Variable value: C:\Where_You’ve_Installed_Java

      +
    • +
    • +

      Variable name: PATH | Variable value: %JAVA_HOME%\bin;%JAVA_HOME%\lib

      +
      +
      +install win03 +
      +
      +
    • +
    +
    +
  4. +
  5. +

    Next, verify it in the command line:

    +
    +
    +
    > java --version
    +
    +
    +
  6. +
+
+
+
+
Other components installation
+
+

Install each component separately, or update the existing ones on your PC.

+
+
+
    +
  1. +

    Maven 3.5

    +
    +
      +
    • +

      Download Maven

      +
    • +
    • +

      Unzip Maven in following location C:\maven

      +
    • +
    • +

      Set Windows Local Environment

      +
      +
        +
      • +

        Variable name: M2_HOME | Variable value: C:\maven\apache-maven-3.5.0

        +
      • +
      • +

        Variable name: PATH | Variable value: %M2_HOME%\bin

        +
        +
        +install win04 +
        +
        +
      • +
      +
      +
    • +
    • +

      Verify it in the command line:

      +
      +
      +
      > mvn --version
      +
      +
      +
    • +
    +
    +
  2. +
  3. +

    IDE

    +
    +
      +
    • +

      Download a most recent Eclipse

      +
    • +
    • +

      Download a MrChecker Project https://downgit.github.io//home?url=https://github.com/devonfw/mrchecker/tree/develop/template[Template] to start a new project or Mrchecker Project https://downgit.github.io//home?url=https://github.com/devonfw/mrchecker/tree/develop/example[Example] to get better understanding what we are capable of.

      +
    • +
    • +

      You should consider installing some usefull plugins such as: csvedit, cucumber editor.

      +
    • +
    • +

      Import:

      +
      +
      +install win05 +
      +
      +
    • +
    • +

      Projects from folders

      +
      +
      +install win06 +
      +
      +
    • +
    • +

      Open already created projects:

      +
      +
      +install win07 +
      +
      +
    • +
    • +

      Update project structure - ALT + F5

      +
      +
      +install win08 +
      +
      +
    • +
    +
    +
  4. +
+
+
+
+
+
+

Mac

+
+ +
+
MrChecker macOS installation
+
+

On this page, you can find all the details regarding MrChecker installation on your Mac.

+
+
+
+
Java installation
+
+

There is one important pre-requisite for Mr Checker installation - Java has to be installed on the computer and an environmental variable has to be set in order to obtain optimal functioning of the framework.

+
+
+
    +
  1. +

    Install Java 1.8 JDK 64bit

    +
    +

    Download and install Java download link

    +
    +
    +

    (To download JDK 8 from Oracle you have to have an account. It is recommended to get a JDK build based on OpenJDK from AdoptOpenJDK)

    +
    +
  2. +
  3. +

    Next, verify thx in the command line:

    +
    +
    +
    > java --version
    +
    +
    +
  4. +
+
+
+
+
Other components installation
+
+

Install each component separately, or update the existing ones on your Mac.

+
+
+
    +
  1. +

    Maven 3.5

    +
    +
      +
    • +

      Download Maven

      +
    • +
    • +

      Unzip Maven in the following location /maven

      +
    • +
    • +

      Add Maven to PATH

      +
      +
      +
      > $ export PATH=$PATH:/maven/apache-maven-3.5.0/bin/
      +
      +
      +
    • +
    • +

      Verify in terminal:

      +
      +
      +
      > $ mvn -version
      +
      +
      +
    • +
    +
    +
  2. +
  3. +

    Eclipse IDE

    +
    +
      +
    • +

      Download and unzip Eclipse

      +
    • +
    • +

      Download MrCheckerTestFramework source code

      +
    • +
    • +

      Import:

      +
      +
      +image9 +
      +
      +
    • +
    • +

      Select Projects from folders:

      +
      +

      image10

      +
      +
    • +
    • +

      Open already created projects:

      +
      +

      image11

      +
      +
    • +
    • +

      Update project structure - ALT + F5

      +
      +

      image12

      +
      +
    • +
    +
    +
  4. +
+
+
+
+
+
+

My Thai Star

+
+ +
+
My Thai Star application setup
+
+

My Thai Star is a reference application for DevonFW so it was used extensively in majority of our examples. To make them run properly you definitely should set it up somewhere and configure environment.csv accordingly. +You can get the app from its official repository here https://github.com/devonfw/my-thai-star.

+
+
+
+
Setting up My Thai Start app
+
+

Most of the important informations are covered in https://github.com/devonfw/my-thai-star#deployment.

+
+
+
The quick summary would be:
+
    +
  1. +

    Get the machine with docker and docker-compose

    +
  2. +
  3. +

    Download the repository

    +
  4. +
  5. +

    Run docker-compose up

    +
  6. +
  7. +

    Go to your project to set up envrionment.csv

    +
  8. +
  9. +

    The variables we are interested in are MY_THAI_STAR_URL and MY_THAI_STAR_API_URL

    +
  10. +
  11. +

    If you set up My Thai Star application on different host adjust the values accordingly

    +
  12. +
  13. +

    The web aplication should be available using localhost:8081/restaurant

    +
  14. +
  15. +

    The web api should be avaulable using localhost:8081/api

    +
  16. +
+
+
+
+
+
+

Tutorials

+
+
+

In order to learn more about MrChecker structure, start from Project Organisation section and then check out our fantastic tutorials:

+
+
+

This tutorial will guide you through the series of test which perform basic actions on webpages using MrChecker.

+
+
+

Make sure you already have MrChecker Test Framework installed on your PC. How to install?

+
+
+

Your Product Under Test will be the following website: http://the-internet.herokuapp.com/

+
+
+
Project organization
+ +
+
+
Importing projects
+
+

Every MrChecker project should be imported as a Maven Project.

+
+
+

Example from Eclipse IDE:

+
+
+
+1 +
+
+
+
+2 +
+
+
+

Enter the project path and select projects to import.

+
+
+
+3 +
+
+
+

When the import is finished, update the project structure - ALT + F5

+
+
+
+4 +
+
+
+
+
Exporting projects
+
+

In order to create a new standalone MrChecker project, you can use template-app-under-test and export it to the new folder:

+
+
+
+5 +
+
+
+
+6 +
+
+
+

Create a new folder for the project and enter its path. Select project and files to export:

+
+
+
+7 +
+
+
+

Change project name and other properties, if necessary, in pom.xml file:

+
+
+
+8 +
+
+
+

Then you can import the project to the workspace and create new packages and classes.

+
+
+
+
Creating new packages
+
+
    +
  1. +

    You will need two new packages: one for the new page classes, the other one for test classes:

    +
    +
      +
    • +

      Create a package for page classes

      +
      +
      +
      Open Eclipse
      +Use the "Project Explorer" on the left
      +Navigate to [your-project] → src/main/java → com.capgemini.mrchecker → selenium
      +Right-click on "selenium"
      +Click on "New" → New Package
      +Name the new package "com.capgemini.mrchecker.selenium.pages.[your-product-name]"
      +
      +
      +
    • +
    • +

      Create a package for test classes

      +
      +
      +
      Navigate to [your-project] → src/test/java → com.capgemini.mrchecker → selenium
      +Right click on "selenium"
      +Click on "New" → New Package
      +Name the new package "com.capgemini.mrchecker.selenium.tests.[your-product-name]"
      +
      +
      +
    • +
    +
    +
  2. +
+
+
+

Example:

+
+
+
+9 +
+
+
+
+
Creating new Page Classes
+
+
+
Navigate to: [your-project] → src/main/java → com.capgemini.mrchecker → selenium.pages.[your-product-name]
+Click on "New" → New Class
+Enter the name "YourPage"
+
+
+
+

Every Page Class should extend BasePage class. Import all necessary packages and override all required methods:

+
+
+
    +
  • +

    public boolean isLoaded() - returns true if the page is loaded and false if not

    +
  • +
  • +

    public void load() - loads the page

    +
  • +
  • +

    public String pageTitle() - returns page title

    +
  • +
+
+
+

Example:

+
+
+
+
 public class MainPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        return false;
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Main Page'");
+    }
+
+    @Override
+    public String pageTitle() {
+        return "Main Page Title";
+    }
+ }
+
+
+
+
+
Creating new Test Classes
+
+
+
Navigate to  [your-project] → src/test/java → com.capgemini.mrchecker → selenium.tests.[your-product-name]
+Click on "New" → New Class
+Enter the name "YourCaseTest"
+
+
+
+

Test classes should extend BaseTest class, import all necessary packages and override all required methods:

+
+
+
    +
  • +

    public void setUp() - executes before each test

    +
  • +
  • +

    public void tearDown() - executes after each test

    +
  • +
+
+
+

Optionally, it is also possible to implement the following methods:

+
+
+
    +
  • +

    @BeforeClass +public static void setUpBeforeClass() - runs only once before all tests

    +
  • +
  • +

    @AfterClass +public static void tearDownAfterClass() - runs only once after performing all tests

    +
  • +
+
+
+

Every test method has to be signed with "@Test" parameter.

+
+
+
+
 public class YourCaseTest extends BaseTest {
+    private static MainPage mainPage = new MainPage();
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        mainPage.load();
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+
+    }
+
+    @Override
+    public void setUp() {
+        if (!mainPage.isLoaded()) {
+            mainPage.load();
+        }
+    }
+
+    @Override
+    public void tearDown() {
+
+    }
+
+    @Test
+    public void shouldTestRunWithoutReturningError {
+
+    }
+ }
+
+
+
+
+
Running Tests
+
+

Run the test by right-clicking on the test method → Run as → JUnit test.

+
+
+
+
+
+

Basic Tutorials

+
+ +
+
+
+

== Basic Tests

+
+
+
+example1 +
+
+
+

The goal of this test is to open A/B Test subpage and redirect to another website.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click A/B Testing link and go to A/B Test subpage

    +
  4. +
  5. +

    Click Elemental Selenium link and open it in new tab

    +
  6. +
  7. +

    Switch to Elemental Selenium page and check if it’s loaded

    +
  8. +
+
+
+
+example2 +
+
+
+== Page Class +
+

Create a Page class for AB Testing page. Override all the required methods:

+
+
+
+
 public class ABtestPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.ABTEST.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'A/B Test Control' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.ABTEST.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+ }
+
+
+
+
+== How to use Enum? +
+

Similarly as in environmental variables case, create an enum for storing values of subURLs:

+
+
+
+
 public enum PageSubURLsProjectYEnum {
+
+    BASIC_AUTH("basic_auth"),
+    NEW_WINDOW("windows/new"),
+    WINDOW("windows"),
+    CHECKBOX("checkboxes"),
+    CONTEXT_MENU("context_menu"),
+    KEY_PRESS("key_presses"),
+    DYNAMIC_CONTENT("dynamic_content"),
+    HOVERS("hovers"),
+    SORTABLE_DATA_TABLES("tables"),
+    REDIRECT("redirector"),
+    JAVASCRIPT_ALERTS("javascript_alerts"),
+    CHALLENGING_DOM("challenging_dom"),
+    STATUS_CODES("status_codes"),
+    LOGIN("login"),
+    ABTEST("abtest"),
+    BROKEN_IMAGES("broken_images"),
+    DROPDOWN("dropdown"),
+    HORIZONTAL_SLIDER("horizontal_slider"),
+    DOWNLOAD("download"),
+    FORGOT_PASSWORD("forgot_password"),
+    FORGOT_PASSWORD_EMAIL_SENT("email_sent"),
+    EXIT_INTENT("exit_intent"),
+    DYNAMIC_LOADING("dynamic_loading"),
+    DISAPPEARING_ELEMENTS("disappearing_elements"),
+    DRAG_AND_DROP("drag_and_drop"),
+    DYNAMIC_CONTROLS("dynamic_controls"),
+    UPLOAD("upload"),
+    FLOATING_MENU("floating_menu"),
+    FRAMES("frames"),
+    GEOLOCATION("geolocation"),
+    INFINITE_SCROLL("infinite_scroll"),
+    JQUERY_UI("jqueryui/menu"),
+    JAVASCRIPT_ERROR("javascript_error"),
+    LARGE_AND_DEEP_DOM("large"),
+    NESTED_FRAMES("nested_frames"),
+    NOTIFICATION_MESSAGE("notification_message"),
+    DOWNLOAD_SECURE("download_secure"),
+    SHIFTING_CONTENT("shifting_content"),
+    SLOW_RESOURCES("slow"),
+    TYPOS("typos"),
+    WYSIWYGEDITOR("tinymce");
+
+    /*
+     * Sub URLs are used as real locations in the test environment
+     */
+    private String subURL;
+
+    private PageSubURLsProjectYEnum(String subURL) {
+        this.subURL = subURL;
+    }
+
+    ;
+
+    private PageSubURLsProjectYEnum() {
+
+    }
+
+    @Override
+    public String toString() {
+        return getValue();
+    }
+
+    public String getValue() {
+        return subURL;
+    }
+
+}
+
+
+
+

Instead of mapping data from an external file, you can store and access them directly from the enum class:

+
+
+
+
PageSubURLsProjectYEnum.ABTEST.getValue()
+
+
+
+
+== Selector +
+

In this test case you need selector for only one page element:

+
+
+
+
private static final By elementalSeleniumLinkSelector = By.cssSelector("div > div > a");
+
+
+
+
+== Page methods +
+

You need two methods for performing page actions:

+
+
+
+
     /**
+     * Clicks 'Elemental Selenium' link at the bottom of the page.
+     *
+     * @return ElementalSeleniumPage object.
+     */
+    public ElementalSeleniumPage clickElementalSeleniumLink() {
+        getDriver().findElementDynamic(elementalSeleniumLinkSelector)
+                .click();
+        getDriver().waitForPageLoaded();
+        return new ElementalSeleniumPage();
+    }
+
+    /**
+     * Switches window to the next one - different than the current.
+     */
+    public void switchToNextTab() {
+        ArrayList<String> tabsList = new ArrayList<String>(getDriver().getWindowHandles());
+        getDriver().switchTo()
+                .window(tabsList.get(1));
+    }
+
+
+
+
+== Elemental Selenium Page Class +
+

To return new Elemental Selenium Page object, implement its class. You only need to write basic methods to check if the page is loaded. There is no need to interact with objects on the site:

+
+
+
+
 public class ElementalSeleniumPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(GetEnvironmentParam.ELEMENTAL_SELENIUM_PAGE.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Elemental Selenium' page.");
+        getDriver().get(GetEnvironmentParam.ELEMENTAL_SELENIUM_PAGE.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+== Test Class +
+

Create a Test class and write a @Test method to execute the scenario:

+
+
+
+
 @Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class ABtestingTest extends TheInternetBaseTest {
+
+    private static ABtestPage abTestPage;
+
+    @Test
+    public void shouldOpenElementalSeleniumPageWhenClickElementalSeleniumLink() {
+
+        logStep("Click Elemental Selenium link");
+        ElementalSeleniumPage elementalSeleniumPage = abTestPage.clickElementalSeleniumLink();
+
+        logStep("Switch browser's tab to newly opened one");
+        abTestPage.switchToNextTab();
+
+        logStep("Verify if Elemental Selenium Page is opened");
+        assertTrue("Unable to open Elemental Selenium page", elementalSeleniumPage.isLoaded());
+    }
+
+}
+
+
+
+
+== Assert +
+

Asserts methods are used for creating test pass or fail conditions. The optional first parameter is a message which will be displayed in the test failure description.

+
+
+
    +
  • +

    assertTrue(boolean condition) - test passes if condition returns true

    +
  • +
  • +

    assertFalse(boolean condition) - test passes if condition returns false

    +
  • +
+
+
+

Also, add the @BeforeClass method to open the tested page:

+
+
+
+
 @BeforeClass
+    public static void setUpBeforeClass() {
+        abTestPage = shouldTheInternetPageBeOpened().clickABtestingLink();
+        logStep("Verify if ABTest page is opened");
+        assertTrue("Unable to open ABTest page", abTestPage.isLoaded());
+    }
+
+
+
+

@BeforeClass method executes only once before all other +@Test cases in the class. There is also a possibility to create a +@AfterClass method which is performed also once after all @Test cases.

+
+
+

You don’t need to implement @setUp and @tearDown methods because they’re already in TheInternetBaseTest class which you extend.

+
+
+
+== Categories +
+

You can group tests in categories. It’s useful when running many tests at once. Use this parameter:

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+
+
+
+

Then create an interface representing each category. Example:

+
+
+
+
public interface TestsSelenium {
+    /* For test which are testing web pages considering UI (user interface) and using selenium webdriver */
+}
+
+
+
+

To run a test from specified category create Test Suite class:

+
+
+
+
@RunWith(WildcardPatternSuite.class) //search for test files under /src/test/java
+@IncludeCategories({ TestsChrome.class }) // search all test files with category TestsChrome.class
+@ExcludeCategories({ TestsLocal.class, TestsNONParallel.class }) //exclude all test files with category TestsLocal.class and TestsNONParallel.class
+@SuiteClasses({ "../**/*Test.class" }) //search only test files, where file name ends with <anyChar/s>Test.class
+
+public class _TestSuiteChrome {
+
+}
+
+
+
+

You can run a Test Suite as a JUnit test.

+
+
+
+example3 +
+
+
+

In this test case, the goal is to pass username and password authorization and login to the next page.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click on Basic Auth link

    +
  4. +
  5. +

    Open pop-up login window

    +
  6. +
  7. +

    Enter valid username and password

    +
  8. +
  9. +

    Open next subpage and verify if the user logged in successfully.

    +
  10. +
+
+
+
+== Page Class +
+

Create a page class which represents Basic Auth subpage after proper login.

+
+
+
+example4 +
+
+
+

Override all the required methods:

+
+
+
+
public class BasicAuthPage extends BasePage {
+
+    public BasicAuthPage() {
+
+    }
+
+    public BasicAuthPage(String login, String password) {
+        this.enterLoginAndPasswordByUrl(login, password);
+    }
+
+    @Override
+    public boolean isLoaded() {
+        return true;
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("load");
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+
+
+

In order to verify a login, create a selector to access the visible message.

+
+
+
+
 private static final By selectorTextMessage = By.cssSelector("#content > div > p");
+Then create a method to get message value:
+
+/**
+*       Returns message displayed by system after user's log in.
+*      @return String object representing message displayed by system after user's log in
+*/
+    public String getMessageValue() {
+                return getDriver().findElementDynamic(selectorTextMessage)
+                    .getText();
+}
+
+
+
+

Also, create a method to access the pop-up login window and enter user credentials:

+
+
+
+
    /**
+     * Authenticates user using standard simple authentication popup.
+     *
+     * @param login    User's login
+     * @param password User's password
+     * @throws AWTException
+     * @throws InterruptedException
+     */
+    public void enterLoginAndPassword(String login, String password) throws AWTException, InterruptedException {
+        Robot rb = new Robot();
+
+        Thread.sleep(2000);
+
+        StringSelection username = new StringSelection(login);
+        Toolkit.getDefaultToolkit()
+                .getSystemClipboard()
+                .setContents(username, null);
+        rb.keyPress(KeyEvent.VK_CONTROL);
+        rb.keyPress(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_CONTROL);
+
+        rb.keyPress(KeyEvent.VK_TAB);
+        rb.keyRelease(KeyEvent.VK_TAB);
+        Thread.sleep(2000);
+
+        StringSelection pwd = new StringSelection(password);
+        Toolkit.getDefaultToolkit()
+                .getSystemClipboard()
+                .setContents(pwd, null);
+        rb.keyPress(KeyEvent.VK_CONTROL);
+        rb.keyPress(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_CONTROL);
+
+        rb.keyPress(KeyEvent.VK_ENTER);
+        rb.keyRelease(KeyEvent.VK_ENTER);
+        Thread.sleep(2000);
+    }
+
+
+
+
+== Robot class +
+

Creating a Robot object allows performing basic system actions such as pressing keys, moving the mouse or taking screenshots. In this case, it’s used to paste login and password text from the clipboard using 'Ctrl + V' shortcut, go to the next field using 'Tab' key and submit by clicking 'Enter'.

+
+
+
+Toolkit +
+

Static class Toolkit can perform basic window actions such as scrolling to a specified position or moving context between components. In this case, it’s used to set clipboard content to username and password value.

+
+
+
+
Thread.sleep(long millis)
+
+
+
+

Web drivers like Selenium perform actions much faster than the normal user. This may cause unexpected consequences e.g. some elements may not be loaded before the driver wants to access them. To avoid this problem you can use Thread.sleep(long millis) to wait given time and let browser load wanted component.

+
+
+

BEWARE: Using Thread.sleep(long millis) is not the recommended approach. Selenium driver gives methods to wait for a specified element to be enabled or visible with a timeout parameter. This is a more stable and effective way. Also, method waitForPageLoaded() will not solve that issue because it only waits for the ready state from the browser while some javascript actions might be performed after that.

+
+
+
+== Test Class +
+

Create a Test class and write a @Test method to execute the scenario. Save parameters as class fields:

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class BasicAuthTest extends TheInternetBaseTest {
+
+    private static BasicAuthPage basicAuthPage;
+
+    private String login    = "admin";
+    private String password = "admin";
+    private String message  = "Congratulations! You must have the proper credentials.";
+
+    @Test
+    public void shouldUserLogInWithValidCredentials() throws InterruptedException, AWTException {
+        basicAuthPage = shouldTheInternetPageBeOpened().clickBasicAuthLink();
+
+        logStep("Enter login and password");
+        basicAuthPage.enterLoginAndPassword(login, password);
+
+        logStep("Verify if user logged in successfully");
+        assertEquals("Unable to login user with valid credentials", message,
+            basicAuthPage.getMessageValue());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        theInternetPage.load();
+    }
+}
+
+
+
+

assertEquals(Object expected, Object actual) - test passes if parameters are equal .

+
+
+
+== Alternative scenario: +
+

There is also a possibility to log in with credentials as a part of URL: http://login:password@the-internet.herokuapp.com/basic_auth

+
+
+

Another page class method:

+
+
+
+
/**
+     * Authenticates user passing credentials into URL.
+     *
+     * @param login    User's login
+     * @param password User's password
+     */
+    private void enterLoginAndPasswordByUrl(String login, String password) {
+        getDriver().get("http://" + login + ":" + password + "@" + "the-internet.herokuapp.com/" +
+            PageSubURLsProjectYEnum.BASIC_AUTH.getValue());
+    }
+
+
+
+

Another test class method:

+
+
+
+
@Test
+    public void shouldUserLogInWithValidCredentialsSetInURL() {
+        logStep("Enter user's credentials into URL to log in");
+        basicAuthPage = new BasicAuthPage(login, password);
+
+        logStep("Verify if user logged in successfully");
+        assertEquals("Unable to login user with valid credentials", message,
+            basicAuthPage.getMessageValue());
+    }
+
+
+
+

After running test class as a JUnit test, both test cases will be performed.

+
+
+

This test goal is to check the dimensions of broken images on the subpage.

+
+
+
+example5 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Broken Image link and go to Broken Image subpage

    +
  4. +
  5. +

    Get the 3 images' dimensions and compare them with expected values

    +
  6. +
+
+
+
+== Page Class +
+

In this case, create an array of selectors to access images by index number:

+
+
+
+
public class BrokenImagePage extends BasePage {
+
+    private static final By[] selectorsImages = { By.cssSelector("div > img:nth-child(2)"),
+            By.cssSelector("div > img:nth-child(3)"),
+            By.cssSelector("div > img:nth-child(4)") };
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.BROKEN_IMAGES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Broken Images' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.BROKEN_IMAGES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns an image height in pixels.
+     *
+     * @param imageIndex An index of given image.
+     * @return Height of an image in pixels.
+     */
+    public int getImageHeight(int imageIndex) {
+        return getImageDimension(imageIndex).getHeight();
+    }
+
+    /**
+     * Returns an image width in pixels.
+     *
+     * @param imageIndex An index of given image.
+     * @return Width of an image in pixels.
+     */
+    public int getImageWidth(int imageIndex) {
+        return getImageDimension(imageIndex).getWidth();
+    }
+
+    private Dimension getImageDimension(int imageIndex) {
+        return getDriver().findElementDynamic(selectorsImages[imageIndex])
+                .getSize();
+    }
+
+}
+
+
+
+
+== Test Class +
+

Create @Test and @BeforeClass methods. Save expected images' dimensions in class fields:

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class BrokenImagesTest extends TheInternetBaseTest {
+
+    private static BrokenImagePage brokenImagePage;
+
+    private final int expectedHeight = 90;
+    private final int expectedWidth  = 120;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        brokenImagePage = shouldTheInternetPageBeOpened().clickBrokenImageLink();
+
+        logStep("Verify if Broken Image page is opened");
+        assertTrue("Unable to open Broken Image page", brokenImagePage.isLoaded());
+    }
+
+    @Test
+    public void shouldImageSizesBeEqualToExpected() {
+        for (int i = 0; i < 3; i++) {
+            logStep("Verify size of image with index: " + i);
+            assertEquals("Height of image with index: " + i + " is incorrect", expectedHeight,
+                   brokenImagePage.getImageHeight(i));
+            assertEquals("Width of image with index: " + i + " is incorrect", expectedWidth,
+                   brokenImagePage.getImageWidth(i));
+        }
+    }
+
+}
+
+
+
+

The test will pass if every image had the correct width and height.

+
+
+

This case goal is to find out how to create stable selectors.

+
+
+

In the browser’s developer mode, you can see how the page is built. Notice, that buttons' IDs change after click and values in the table haven’t got unique attributes, which might be helpful in order to find them.

+
+
+
+example6 +
+
+
+
+== DOM - Document Object Model +
+

HTML DOM is a model of the page created by the browser. The page could be represented as the tree of objects. Read more.

+
+
+

To create locators you can use element attributes such as id, class name etc.

+
+
+

It this case, since there are no unique attributes, the best approach is to use HTML document structure and identify page elements by their place in an object hierarchy.

+
+
+
+
Page Class
+public class ChallengingDomPage extends BasePage {
+
+    private final By selectorTableRows   = By.cssSelector(".large-10 > table > tbody > tr");
+    private final By selectorFirstButton = By.cssSelector(".large-2.columns > .button:nth-
+            child(1)");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.CHALLENGING_DOM.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Challenging DOM' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.CHALLENGING_DOM.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns table text content as a list of String objects.
+     *
+     * @return A list of table values.
+     */
+    public List<String> getTableValues() {
+        return JsoupHelper.findTexts(selectorTableRows);
+    }
+
+    /**
+     * Clicks top button on the page from available button set.
+     */
+    public void clickFirstButton() {
+        getDriver().elementButton(selectorFirstButton)
+                .click();
+        getDriver().waitForPageLoaded();
+    }
+
+}
+
+
+
+
+== Jsoup Helper +
+

Jsoup Helper is the tool which helps to parse HTML document and get searched values. This is especially useful when values are organized in a generic structure such as a table.

+
+
+

JsoupHelper.findTexts(By selector) - this method returns text content of a table as a list of Strings

+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Challenging DOM link and go to Challenging DOM subpage

    +
  4. +
  5. +

    Get and save table values

    +
  6. +
  7. +

    Click the first button

    +
  8. +
  9. +

    Get table values again

    +
  10. +
  11. +

    Compare table values before and after button click

    +
  12. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class ChallengingDomTest extends TheInternetBaseTest {
+
+    private static ChallengingDomPage challengingDomPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        challengingDomPage = shouldTheInternetPageBeOpened().clickChallengingDomLink();
+
+        logStep("Verify if Challenging Dom page is opened");
+        assertTrue("Unable to open Challenging Dom page", challengingDomPage.isLoaded());
+    }
+
+    @Test
+    public void shouldValuesInTableCellsStayUnchangedAfterClick() {
+
+        logStep("Get table values (before click any button)");
+        List<String> tableValuesBeforeClick = challengingDomPage.getTableValues();
+
+        logStep("Click first button");
+        challengingDomPage.clickFirstButton();
+
+        logStep("Get table values (after click first button)");
+        List<String> tableValuesAfterClick = challengingDomPage.getTableValues();
+
+        logStep("Verify equality of table values before and after click");
+        assertEquals("Values from table cells were changed after click", tableValuesBeforeClick,
+                tableValuesAfterClick);
+    }
+
+}
+
+
+
+

Because values in the table don’t change, the test should pass if object locators are solid.

+
+
+

In this example, you will learn how to test checkboxes on the page.

+
+
+
+example7 +
+
+
+

A checkbox is a simple web element which can be selected or unselected by clicking on it.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Checkboxes link and go to Checkboxes page

    +
  4. +
  5. +

    Test if the first checkbox is unchecked

    +
  6. +
  7. +

    Select the first checkbox

    +
  8. +
  9. +

    Test if the first checkbox is checked

    +
  10. +
  11. +

    Test if the second checkbox is checked

    +
  12. +
  13. +

    Unselect second checkbox

    +
  14. +
  15. +

    Test if the second checkbox is unchecked

    +
  16. +
+
+
+
+== Page Class +
+

Because both checkboxes are in one form, it’s possible to locate them by one selector and then access each individual one by index.

+
+
+
+example8 +
+
+
+
+
public class CheckboxesPage extends BasePage {
+
+    private final static By checkboxesFormSelector = By.cssSelector("#checkboxes");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.CHECKBOX.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Checkboxes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.CHECKBOX.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if checkbox form is visible on the page.
+     *
+     * @return true if checkboxes are present and displayed on the page
+     */
+    public boolean isElementCheckboxesFormVisible() {
+        return getDriver().elementCheckbox(checkboxesFormSelector)
+                .isDisplayed();
+    }
+
+    /**
+     * Verifies if given checkbox is selected or not.
+     *
+     * @param index The index of given checkbox
+     * @return true if given checkbox is selected
+     */
+    public boolean isCheckboxSelected(int index) {
+        return getDriver().elementCheckbox(checkboxesFormSelector)
+                .isCheckBoxSetByIndex(index);
+    }
+
+    /**
+     * Selects given checkbox. Unselects, if it is already selected.
+     *
+     * @param index The index of given checkbox
+     */
+    public void selectCheckbox(int index) {
+        CheckBox checkbox = getDriver().elementCheckbox(checkboxesFormSelector);
+        if (isCheckboxSelected(index)) {
+            checkbox.unsetCheckBoxByIndex(index);
+        } else {
+            checkbox.setCheckBoxByIndex(index);
+        }
+    }
+
+}
+
+
+
+
+== CheckBox +
+

CheckBox class contains a method to perform actions on checkboxes such as setting and unsetting or verifying if the specified box is checked. +Use method elementCheckbox(By selector) to create CheckBox Object.

+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class CheckboxesTest extends TheInternetBaseTest {
+
+    private static CheckboxesPage checkboxesPage;
+
+    @Override
+    public void setUp() {
+        checkboxesPage = shouldTheInternetPageBeOpened().clickCheckboxesLink();
+
+        logStep("Verify if Checkboxes page is opened");
+        assertTrue("Unable to open Checkboxes page", checkboxesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldCheckboxBeSelectedAfterClick() {
+
+        logStep("Verify if first checkbox is not selected");
+        assertFalse("The checkbox is selected", checkboxesPage.isCheckboxSelected(0));
+
+        logStep("Select first checkbox");
+        checkboxesPage.selectCheckbox(0);
+
+        logStep("Verify if first checkbox is selected");
+        assertTrue("The checkbox is not selected", checkboxesPage.isCheckboxSelected(0));
+    }
+
+    @Test
+    public void shouldCheckboxBeUnselectedAfterClick() {
+
+        logStep("Verify if second checkbox is selected");
+        assertTrue("The checkbox is not selected", checkboxesPage.isCheckboxSelected(1));
+
+        logStep("Select second checkbox");
+        checkboxesPage.selectCheckbox(1);
+
+        logStep("Verify if second checkbox is not selected");
+        assertFalse("The checkbox is selected", checkboxesPage.isCheckboxSelected(1));
+    }
+
+}
+
+
+
+

After running Test Class both @Test cases will be performed. Before each one, overrode setUp method will be executed.

+
+
+

This case will show how to test changing website content.

+
+
+
+example9 +
+
+
+

After refreshing page (F5) a few times, a new element should appear:

+
+
+
+example10 +
+
+
+

Then, after another couple of refreshes, it should disappear.

+
+
+

You can check in developer mode that Gallery element does not exist in HTML document either, until appearing on the page. The element is created by Javascript.

+
+
+
+example11 +
+
+
+
+example12 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Load The Internet Main Page

    +
  2. +
  3. +

    Click Disappearing Elements link and go to that subpage

    +
  4. +
  5. +

    Check if Menu Buttons exist on the page

    +
  6. +
  7. +

    Refresh the page until a new element appears

    +
  8. +
  9. +

    Check if Gallery Button exists

    +
  10. +
  11. +

    Check if the number of buttons equals the expected value

    +
  12. +
  13. +

    Refresh the page until an element disappears

    +
  14. +
  15. +

    Check if Gallery Button does not exist

    +
  16. +
  17. +

    Check if the number of buttons is smaller than before

    +
  18. +
+
+
+
+== Page Class +
+
+
public class DisappearingElementsPage extends BasePage {
+
+    private static final By selectorGalleryMenuButton = By.cssSelector("li > a[href*=gallery]");
+    private static final By selectorMenuButtons       = By.cssSelector("li");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DISAPPEARING_ELEMENTS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Disappearing Elements' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DISAPPEARING_ELEMENTS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns a number of WebElements representing menu buttons.
+     *
+     * @return A number of WebElements.
+     */
+    public int getNumberOfMenuButtons() {
+        return getDriver().findElementDynamics(selectorMenuButtons)
+                .size();
+    }
+
+    /**
+     * Returns WebElement representing disappearing element of menu.
+     *
+     * @return Disappearing WebElement if visible, null otherwise.
+     */
+    public WebElement getGalleryMenuElement() {
+        return getDriver().findElementQuietly(selectorGalleryMenuButton);
+    }
+
+    /**
+     * Refreshes web page as many times as it is required to appear/disappear menu button
+     * WebElement.
+     *
+     * @param shouldAppear Determines if element should appear (true) or disappear (false).
+     */
+    public void refreshPageUntilWebElementAppears(boolean shouldAppear) {
+        int numberOfAttempts = 5;
+        int counter = 0;
+        while (!isVisibilityAsExpected(shouldAppear) || isMaxNumberOfAttemptsReached(counter++,
+                numberOfAttempts)) {
+            refreshPage();
+        }
+    }
+
+    /**
+     * Verify if visibility of Gallery button is the same as expected
+     *
+     * @param expected Determines if element should be visible (true) or not visible (false).
+     */
+    private boolean isVisibilityAsExpected(boolean expected) {
+        boolean isVisibilityDifferentThanExpected = isGalleryMenuElementVisible() ^ expected;
+        return !isVisibilityDifferentThanExpected;
+    }
+
+    private boolean isGalleryMenuElementVisible() {
+        boolean result = false;
+        WebElement gallery = getGalleryMenuElement();
+        if (gallery != null)
+            result = gallery.isDisplayed();
+        return result;
+    }
+
+    private boolean isMaxNumberOfAttemptsReached(int attemptNo, int maxNumberOfAttempts) {
+        return attemptNo ==  maxNumberOfAttempts;
+    }
+
+}
+
+
+
+

findElementQuietly(By selector) works similar as findElementDynamics(By selector) but won’t throw an exception if an element wasn’t found. In this case, the searched WebElement will have a NULL value.

+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DisappearingElementsTest extends TheInternetBaseTest {
+
+    private static final int totalNumberOfMenuButtons = 5;
+    private static DisappearingElementsPage disappearingElementsPage;
+    private static       int numberOfMenuButtons      = 0;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        disappearingElementsPage = shouldTheInternetPageBeOpened().clickDisappearingElementsLink();
+
+        logStep("Verify if Disappearing Elements page is opened");
+        assertTrue("Unable to open Disappearing Elements page",
+                disappearingElementsPage.isLoaded());
+
+        logStep("Verify if menu button elements are visible");
+        numberOfMenuButtons = disappearingElementsPage.getNumberOfMenuButtons();
+        assertTrue("Unable to display menu", numberOfMenuButtons > 0);
+    }
+
+    @Test
+    public void shouldMenuButtonElementAppearAndDisappearAfterRefreshTest() {
+        logStep("Click refresh button until menu button appears");
+        disappearingElementsPage.refreshPageUntilWebElementAppears(true);
+
+        logStep("Verify if menu button element appeared");
+        assertNotNull("Unable to disappear menu button element",
+                disappearingElementsPage.getGalleryMenuElement());
+        assertEquals("The number of button elements after refresh is incorrect",
+                totalNumberOfMenuButtons, disappearingElementsPage.getNumberOfMenuButtons());
+
+        logStep("Click refresh button until menu button disappears");
+        disappearingElementsPage.refreshPageUntilWebElementAppears(false);
+
+        logStep("Verify if menu button element disappeared");
+        assertNull("Unable to appear menu button element",
+                disappearingElementsPage.getGalleryMenuElement());
+        assertTrue("The number of button elements after refresh is incorrect",
+                totalNumberOfMenuButtons > disappearingElementsPage.getNumberOfMenuButtons());
+    }
+
+}
+
+
+
+

assertNull(Objetc object) - test passes if Object returns NULL +assertNotNull(Objetc object) - test passes if Object does not return NULL

+
+
+

This case shows how to move draggable elements on the page. +image::images/example13.png[]

+
+
+

Try to move A to B position and see what happens. Also, open browser developer mode and see how the DOM changes.

+
+
+
+example14 +
+
+
+

The page can easily be broken. You can try to do so and check how the page structure changed in browser developer mode.

+
+
+
+example15 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Drag and Drop link and open subpage

    +
  4. +
  5. +

    Check if the Drag and Drop message is visible

    +
  6. +
  7. +

    Check if element A is in container A and B in container B

    +
  8. +
  9. +

    Move element A to position B

    +
  10. +
  11. +

    Check if element A is in container B and B in container A

    +
  12. +
  13. +

    Move element B to position A

    +
  14. +
  15. +

    Again check if element A is in container A and B in container B

    +
  16. +
+
+
+
+== Page Class +
+
+
public class DragAndDropPage extends BasePage {
+
+    private static final By selectorDragAndDropText    = By.cssSelector("div#content h3");
+    private static final By selectorAElementContainer  = By.cssSelector("div#column-a");
+    private static final By selectorBElementContainer  = By.cssSelector("div#column-b");
+    private static final By selectorDescriptionElement = By.cssSelector("header");
+
+    private static final String dndHelperPath = "src/test/resources/js/drag_and_drop_helper.js";
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DRAG_AND_DROP.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Drag and Drop' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() + PageSubURLsProjectYEnum.DRAG_AND_DROP.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if drag and drop message is visible or not.
+     *
+     * @return true if exit drag and drop message was found on web page.
+     */
+    public boolean isDragAndDropMessageVisible() {
+        return getDriver().findElementDynamic(selectorDragAndDropText)
+                .isDisplayed();
+    }
+
+    /**
+     * Verifies if specified element is placed in designated container.
+     *
+     * @param element WebElement to be verified.
+     * @return true if element described as A exists in container A or element B exists in container B, false otherwise.
+     */
+    public boolean isElementPlacedInCorrectContainer(String element) {
+        return getDescriptionElement(findElementByDescription(element)).getText()
+                .equals(element);
+    }
+
+    private WebElement findElementByDescription(String element) {
+        WebElement result;
+        switch (element) {
+            case "A":
+                result = getContainerElement(selectorAElementContainer);
+                break;
+            case "B":
+                result = getContainerElement(selectorBElementContainer);
+                break;
+            default:
+                result = null;
+                BFLogger.logDebug("Chosen element doesn't exist on web page");
+        }
+        return result;
+    }
+
+    private WebElement getContainerElement(By container) {
+        return getDriver().findElementDynamic(container);
+    }
+
+    private WebElement getDescriptionElement(WebElement container) {
+        return container.findElement(selectorDescriptionElement);
+    }
+
+    /**
+     * Drags element to designated container and drops it.
+     *
+     * @param element         String describing WebElement expected to be dragged.
+     * @param from            String describing WebElement representing container of element expected to be dragged.
+     * @param destinationDesc String describing WebElement representing destination container where other element will be dragged.
+     */
+    public void dragElementToPosition(String element, String from, String destinationDesc) {
+        WebElement source = findElementByDescription(from);
+        WebElement description = getDescriptionElement(source);
+        WebElement destination = findElementByDescription(destinationDesc);
+        if (description.getText()
+                .equals(element))
+            dragElement(source, destination);
+    }
+
+}
+
+
+
+

Since HTML5, normal Selenium drag-and-drop action stopped working, thus it’s necessary to execute Javascript which performs the drag-and-drop. To do so, create a JavascriptExecutor object, then read the script from a file drag_and_drop_helper.js and execute it with additional arguments using method executeScript(String script).

+
+
+

An example drag-and-drop solution:

+
+
+
+
    /**
+     * Drags and drops given WebElement to it's destination location.
+     * <p>
+     * Since HTML5 all Selenium Actions performing drag and drop operations stopped working as expected, e.g.
+     * original implementation, which was:
+     * <code>
+     * BasePage.getAction()
+     * .clickAndHold(draggable)
+     * .moveToElement(target)
+     * .release()
+     * .build()
+     * .perform();
+     * </code>
+     * finishes with no effect. For this reason, there is javaScript function used, to make sure that
+     * drag and drop operation will be successful.
+     * JavaScript function is stored under the following path: 'src/test/resources/js/drag_and_drop_helper.js'.
+     * Original source of the script:
+     * <a href="https://gist.github.com/rcorreia/2362544">drag_and_drop_helper</a>
+     * </p>
+     *
+     * @param draggable A WebElement to be dragged and dropped.
+     * @param target    A destination, where element will be dropped.
+     * @see JavascriptExecutor
+     * @see Actions
+     */
+    private void dragElement(WebElement draggable, WebElement target) {
+        JavascriptExecutor js;
+        INewWebDriver driver = getDriver();
+        List<String> fileContent;
+        String draggableId = draggable.getAttribute("id");
+        String targetId = target.getAttribute("id");
+        String script = null;
+        if (draggable.getAttribute("draggable")
+                .contains("true")) {
+            if (driver instanceof JavascriptExecutor) {
+                js = (JavascriptExecutor) driver;
+                Path path = Paths.get(dndHelperPath);
+                try {
+                    fileContent = Files.readAllLines(path);
+                    script = fileContent.stream()
+                            .collect(Collectors.joining());
+                } catch (IOException e) {
+                    BFLogger.logDebug("Unable to read file content: " + e.getMessage());
+                }
+                if (script != null && !script.isEmpty()) {
+                    String arguments = "$('#%s').simulateDragDrop({ dropTarget: '#%s'});";
+                    js.executeScript(script + String.format(arguments, draggableId, targetId));
+                }
+            }
+        }
+    }
+
+
+
+

Drag and Drop helper file:

+
+
+
+
(function( $ ) {
+        $.fn.simulateDragDrop = function(options) {
+                return this.each(function() {
+                        new $.simulateDragDrop(this, options);
+                });
+        };
+        $.simulateDragDrop = function(elem, options) {
+                this.options = options;
+                this.simulateEvent(elem, options);
+        };
+        $.extend($.simulateDragDrop.prototype, {
+                simulateEvent: function(elem, options) {
+                        /*Simulating drag start*/
+                        var type = 'dragstart';
+                        var event = this.createEvent(type);
+                        this.dispatchEvent(elem, type, event);
+
+                        /*Simulating drop*/
+                        type = 'drop';
+                        var dropEvent = this.createEvent(type, {});
+                        dropEvent.dataTransfer = event.dataTransfer;
+                        this.dispatchEvent($(options.dropTarget)[0], type, dropEvent);
+
+                        /*Simulating drag end*/
+                        type = 'dragend';
+                        var dragEndEvent = this.createEvent(type, {});
+                        dragEndEvent.dataTransfer = event.dataTransfer;
+                        this.dispatchEvent(elem, type, dragEndEvent);
+                },
+                createEvent: function(type) {
+                        var event = document.createEvent("CustomEvent");
+                        event.initCustomEvent(type, true, true, null);
+                        event.dataTransfer = {
+                                data: {
+                                },
+                                setData: function(type, val){
+                                        this.data[type] = val;
+                                },
+                                getData: function(type){
+                                        return this.data[type];
+                                }
+                        };
+                        return event;
+                },
+                dispatchEvent: function(elem, type, event) {
+                        if(elem.dispatchEvent) {
+                                elem.dispatchEvent(event);
+                        }else if( elem.fireEvent ) {
+                                elem.fireEvent("on"+type, event);
+                        }
+                }
+        });
+})(jQuery);
+
+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DragAndDropTest extends TheInternetBaseTest {
+
+    private static final String ELEMENT_A   = "A";
+    private static final String CONTAINER_A = "A";
+    private static final String ELEMENT_B   = "B";
+    private static final String CONTAINER_B = "B";
+
+    private static DragAndDropPage dragAndDropPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dragAndDropPage = shouldTheInternetPageBeOpened().clickDragAndDropLink();
+
+        logStep("Verify if Drag And Drop page is opened");
+        assertTrue("Unable to open Drag And Drop page", dragAndDropPage.isLoaded());
+
+        logStep("Verify if Drag And Drop message is visible");
+        assertTrue("Drag And Drop message is not visible", dragAndDropPage.isDragAndDropMessageVisible());
+    }
+
+    @Test
+    public void shouldDraggableElementBeMovedAndDropped() {
+        logStep("Verify if elements are placed in proper containers");
+        assertTrue("Element A doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertTrue("Element B doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+
+        logStep("Step 7: Drag and drop element A into container B");
+        dragAndDropPage.dragElementToPosition(ELEMENT_A, CONTAINER_A, CONTAINER_B);
+
+        logStep("Step 8: Verify if elements are placed in improper containers");
+        assertFalse("Element A doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertFalse("Element B doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+
+        logStep("Drag and drop element B back into container B");
+        dragAndDropPage.dragElementToPosition(ELEMENT_A, CONTAINER_B, CONTAINER_A);
+
+        logStep("Verify if elements are placed in proper containers");
+        assertTrue("Element A doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertTrue("Element B doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+    }
+
+}
+
+
+
+

This example shows how to select an element from the dropdown list.

+
+
+
+example16 +
+
+
+

Check in the developer mode how a Dropdown List’s content has been organized.

+
+
+
+example17 +
+
+
+

Notice that the Dropdown Options have different attributes, such as "disabled" or "selected".

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click the Dropdown link and go to the subpage

    +
  4. +
  5. +

    Select first dropdown Option

    +
  6. +
  7. +

    Check if Option 1 is selected

    +
  8. +
  9. +

    Select second dropdown Option

    +
  10. +
  11. +

    Check if Option 2 is selected

    +
  12. +
+
+
+
+== Page Class +
+
+
public class DropdownPage extends BasePage {
+
+    private static final By dropdownListSelector = By.cssSelector("#dropdown");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DROPDOWN.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dropdown List' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DROPDOWN.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Selects doropdown's value by given index.
+     *
+     * @param index Index of option to be selected
+     */
+    public void selectDropdownValueByIndex(int index) {
+        getDriver().elementDropdownList(dropdownListSelector)
+                .selectDropdownByIndex(index);
+    }
+
+    /**
+     * Returns text value of first selected dropdown's option.
+     *
+     * @return String object representing value of dropdown's option
+     */
+    public String getSelectedDropdownValue() {
+        return getDriver().elementDropdownList(dropdownListSelector)
+                .getFirstSelectedOptionText();
+    }
+}
+
+
+
+
+== DropdownListElement class +
+

DropdownListElement is MrChecker’s class, which contains methods for performing the dropdown list of actions:

+
+
+
+
elementDropdownList() - returns DropdownListElement Object
+
+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DropdownTest extends TheInternetBaseTest {
+
+    private static final String expectedFirstOptionValue  = "Option 1";
+    private static final String expectedSecondOptionValue = "Option 2";
+    private static DropdownPage dropdownPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dropdownPage = shouldTheInternetPageBeOpened().clickDropdownLink();
+
+        logStep("Verify if Dropdown page is opened");
+        assertTrue("Unable to open Dropdown page", dropdownPage.isLoaded());
+    }
+
+    @Test
+    public void shouldGetExpectedDropdownTextOptionAfterSelection() {
+
+        logStep("Select first drodown option");
+        dropdownPage.selectDropdownValueByIndex(1);
+
+        logStep("Verify if selected option text is equal to the expected one");
+        assertEquals("Selected value is different than expected", expectedFirstOptionValue,
+                dropdownPage.getSelectedDropdownValue());
+
+        logStep("Select first drodown option");
+        dropdownPage.selectDropdownValueByIndex(2);
+
+        logStep("Verify if selected option text is equal to the expected one");
+        assertEquals("Selected value is different than expected", expectedSecondOptionValue,
+                dropdownPage.getSelectedDropdownValue());
+    }
+
+}
+
+
+
+

This case shows how to compare dynamic content.

+
+
+
+example18 +
+
+
+

Note that after site refresh, some of the content is different. You can see in the browser’s developer mode how the text and image sources are being changed.

+
+
+
+example19 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Dynamic Content link and load subpage

    +
  4. +
  5. +

    Save page images sources and descriptions before the refresh

    +
  6. +
  7. +

    Refresh page

    +
  8. +
  9. +

    Save page images sources and it’s descriptions after refresh

    +
  10. +
  11. +

    Compare page content before and after refresh and verify if it’s different

    +
  12. +
+
+
+
+== Page Class +
+
+
public class DynamicContentPage extends BasePage {
+
+    private static final By imagesLinksSelector        = By.cssSelector("div#content > div.row img");
+    private static final By imagesDescriptionsSelector = By.cssSelector("div#content > div.row div.large-10");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DYNAMIC_CONTENT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dynamic Content' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DYNAMIC_CONTENT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns list of picture descriptions being present on the web page.
+     *
+     * @return List of String objects representing descriptions
+     */
+    public List<String> getDescriptions() {
+        return new ListElements(imagesDescriptionsSelector).getTextList();
+    }
+
+    /**
+     * Returns a list of image links being present on the web page.
+     *
+     * @return List of String objects representing paths to pictures
+     */
+    public List<String> getImageLinks() {
+        return new ListElements(imagesLinksSelector)
+                .getList()
+                .stream()
+                .map(element -> element.getAttribute("src"))
+                .collect(Collectors.toList());
+    }
+}
+
+
+
+
+== ListElements +
+

ListElements is MrChecker collection which can store WebElement Objects. Constructing ListElements with cssSelector allows you to store every element on the page which fits the selector. Example methods:

+
+
+
+
getList() -  returns WebElements list,
+getTextList() - returns list of contents of each Element,
+getSize() - returns number of stored Elements
+In getImageLinks() example it's shown how to get a list of specified Elements' attributes.
+
+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DynamicContentTest extends TheInternetBaseTest {
+
+    private static DynamicContentPage dynamicContentPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dynamicContentPage = shouldTheInternetPageBeOpened().clickDynamicContentLink();
+
+        logStep("Verify if Dynamic Content page is opened");
+        assertTrue("Unable to open Dynamic Content page", dynamicContentPage.isLoaded());
+    }
+
+    @Test
+    public void shouldImagesAndDescriptionsDifferAfterRefresh() {
+
+        logStep("Read images and descriptions before refresh");
+        List<String> descriptionsBeforeRefresh = dynamicContentPage.getDescriptions();
+        List<String> imagesBeforeRefresh = dynamicContentPage.getImageLinks();
+
+        logStep("Refres page");
+        dynamicContentPage.refreshPage();
+        assertTrue("The Dynamic Content page hasn't been refreshed", dynamicContentPage.isLoaded());
+
+        logStep("Read images and descriptions after refresh");
+        List<String> descriptionsAfterRefresh = dynamicContentPage.getDescriptions();
+        List<String> imagesAfterRefresh = dynamicContentPage.getImageLinks();
+
+        logStep("Verify if descriptions are different after refresh");
+        assertEquals("Different number of descriptions before and after refresh",
+                descriptionsAfterRefresh.size(), descriptionsBeforeRefresh.size());
+
+        boolean diversity = false;
+        for (int i = 0; i < descriptionsAfterRefresh.size(); i++) {
+            if (!descriptionsAfterRefresh.get(i)
+                    .equals(descriptionsBeforeRefresh.get(i))) {
+                diversity = true;
+                break;
+            }
+        }
+        assertTrue("There are no differences between descriptions before and after refresh",
+                diversity);
+
+        logStep("Verify if images are different after refresh");
+        assertEquals("Different number of descriptions before and after refresh",
+                imagesAfterRefresh.size(), imagesBeforeRefresh.size());
+
+        diversity = false;
+        for (int i = 0; i < imagesAfterRefresh.size(); i++) {
+            if (!imagesAfterRefresh.get(i)
+                    .equals(imagesBeforeRefresh.get(i))) {
+                diversity = true;
+                break;
+            }
+        }
+        assertTrue("There are no differences between images before and after refresh", diversity);
+    }
+}
+
+
+
+

In the test method, during differences verification, the goal is to compare every element from the first and second list and find first diversity.

+
+
+

This example shows how to test a page with dynamically loading content. Some elements don’t load during page loading, but during JavaScript execution.

+
+
+
+example23 +
+
+
+

Go to Example 1:

+
+
+
+example24 +
+
+
+

Click "start" and see what happens:

+
+
+
+example25 +
+
+
+

When loading ends, you should see the following message:

+
+
+
+example26 +
+
+
+

In the developer mode, you can see that the element with the "Hello World!" message exists in page DOM but it’s not displayed. However, the loading bar does not exist there - it’s created by JavaScript. The script is also visible in developer mode:

+
+
+
+example27 +
+
+
+

After clicking the "Start" button, the element "Loading" is created by the script, and the "Start" button becomes invisible. When loading ends, "Hello World" message is displayed and the loading bar is hidden. Follow the changes the in developer mode:

+
+
+
+example28 +
+
+
+

Go to example 2: +From a user perspective, there is no difference in page functioning. However, in this case the element with the "Hello World!" message does not exist on the page before clicking "Start". It’s created by the script.

+
+
+
+example29 +
+
+
+

After clicking "Start", the element with the loading bar is been created.

+
+
+
+example30 +
+
+
+

After a certain time, the loading bar becomes invisible, and then the script creates "Hello World!" element and displays it.

+
+
+
+example31 +
+
+
+
+== Page Class +
+
+
public class DynamicLoadingPage extends BasePage {
+
+    private static final By selectorExampleOneLink     =
+            By.cssSelector("a[href*='dynamic_loading/1']");
+    private static final By selectorExampleTwoLink     =
+            By.cssSelector("a[href*='dynamic_loading/2']");
+    private static final By selectorDynamicLoadingText = By.cssSelector("div#content h3");
+    private static final By selectorStartButton        = By.cssSelector("div#start button");
+    private static final By selectorLoadingBar         = By.cssSelector("div#loading");
+    private static final By selectorExampleText        = By.cssSelector("div#finish h4");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DYNAMIC_LOADING.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dynamically Loaded Page Elements' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DYNAMIC_LOADING.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if dynamic loading message is visible or not.
+     *
+     * @return true if dynamic loading message was found on web page.
+     */
+    public boolean isDynamicLoadingMessageVisible() {
+        return getDriver().findElementDynamic(selectorDynamicLoadingText)
+                .isDisplayed();
+    }
+
+    /**
+     * Clicks Example 1 link.
+     */
+    public void clickExampleOneLink() {
+        getDriver().findElementDynamic(selectorExampleOneLink)
+                .click();
+    }
+
+    /**
+     * Clicks Example 2 link.
+     */
+    public void clickExampleTwoLink() {
+        getDriver().findElementDynamic(selectorExampleTwoLink)
+                .click();
+    }
+
+    /**
+     * Returns information if Start button is visible or not.
+     *
+     * @return true if Start button was found on web page.
+     */
+    public boolean isStartButtonVisible() {
+        return getDriver().findElementDynamic(selectorStartButton)
+                .isDisplayed();
+    }
+
+    /**
+     * Clicks Start button.
+     */
+    public void clickStartButton() {
+        getDriver().findElementDynamic(selectorStartButton)
+                .click();
+    }
+
+    /**
+     * Waits until WebElement representing waiting bar disappears and returns example text.
+     *
+     * @param waitTime The amount of time designated for waiting until waiting bar disappears.
+     * @return String representing example's text.
+     */
+    public String getExampleOneDynamicText(int waitTime) {
+        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((Function<? super WebDriver, Boolean>)
+                ExpectedConditions.invisibilityOfElementLocated(selectorLoadingBar));
+        return getDriver().findElementDynamic(selectorExampleText)
+                .getText();
+    }
+
+    /**
+     * Returns example text.
+     * <p>
+     * Waits until WebElement representing waiting bar disappear. Then waits until example text
+     * shows up.
+     * And after that returns example text.
+     * </p>
+     *
+     * @param waitTime The amount of time designated for waiting until waiting bar disappears and
+     * example text shows.
+     * @return String representing example's text.
+     */
+    public String getExampleTwoDynamicText(int waitTime) {
+        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((Function<? super WebDriver, Boolean>)
+                ExpectedConditions.invisibilityOfElementLocated(selectorLoadingBar));
+        wait.until((Function<? super WebDriver, WebElement>)
+                ExpectedConditions.visibilityOfElementLocated(selectorExampleText));
+        return getDriver().findElementDynamic(selectorExampleText)
+                .getText();
+    }
+
+}
+
+
+
+
+== WebDriverWait +
+

This class performs waiting for actions using Selenium Web Driver:

+
+
+
    +
  • +

    WebDriverWait(WebDriver driver, long timeOutInSeconds) - constructor, first parameter takes WebDriver, in a second you can specify a timeout in seconds. +FluentWait method:

    +
  • +
  • +

    until(Function<? super T, V> isTrue) - waits until condition function given as parameter returns expected value. If waiting time reaches timeout, it throws timeoutException.

    +
  • +
+
+
+

MrChecker implements various condition functions in the ExpectedConditions class :

+
+
+
    +
  • +

    visibilityOfElementLocated(By selector) - returns WebElement if it’s visible

    +
  • +
  • +

    invisibilityOfElementLocated(By selector) - returns true if Element under given selector is invisible

    +
  • +
+
+
+

WebDriver also has methods which wait for some conditions:

+
+
+
    +
  • +

    waitForElement(By selector)

    +
  • +
  • +

    waitForElementVisible(By selector)

    +
  • +
  • +

    waitUntilElementClickable(By selector)

    +
  • +
+
+
+

It’s possible to write your own condition function e.g.:

+
+
+
+
  public static ExpectedCondition<Boolean> invisibilityOfElementLocated(final By locator) {
+    return new ExpectedCondition<Boolean>() {
+      @Override
+      public Boolean apply(WebDriver driver) {
+        try {
+          return !(findElement(locator, driver).isDisplayed());
+        } catch (NoSuchElementException e) {
+          return true;
+        } catch (StaleElementReferenceException e) {
+          return true;
+        }
+      }
+    };
+  }
+
+
+
+

Or as a lambda expression:

+
+
+
+
        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((WebDriver driver) -> {
+            try {
+                return !(driver.findElement(selectorExampleText)
+                        .isDisplayed());
+            } catch (NoSuchElementException e) {
+                return true;
+            } catch (StaleElementReferenceException e) {
+                return true;
+            }
+        });
+
+
+
+
+== Test Class +
+

Case 1 steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Dynamic Loading link and go to a subpage with examples

    +
  4. +
  5. +

    Check if the page is loaded and "Dynamically Loaded Page Elements" header is visible

    +
  6. +
  7. +

    Click Example 1 link and load site

    +
  8. +
  9. +

    Verify if the "Start" button is visible

    +
  10. +
  11. +

    Click "Start"

    +
  12. +
  13. +

    Wait for the loading bar to disappear and check if the displayed message is as it should be

    +
  14. +
  15. +

    Go back to Dynamic Loading page

    +
  16. +
+
+
+

Case 2 steps:

+
+
+
    +
  1. +

    Check if the page is loaded and "Dynamically Loaded Page Elements" header is visible

    +
  2. +
  3. +

    Click Example 2 link and load site

    +
  4. +
  5. +

    Verify if the "Start" button is visible

    +
  6. +
  7. +

    Click "Start"

    +
  8. +
  9. +

    Wait for the loading bar to disappear

    +
  10. +
  11. +

    Wait for the message to appear and check if it is as it should be

    +
  12. +
  13. +

    Go back to Dynamic Loading page

    +
  14. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DynamicLoadingTest extends TheInternetBaseTest {
+
+    private static final int    EXAMPLE_WAITING_TIME = 30;
+    private static final String EXAMPLE_TEXT         = "Hello World!";
+
+    private static DynamicLoadingPage dynamicLoadingPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dynamicLoadingPage = shouldTheInternetPageBeOpened().clickDynamicLoadingLink();
+    }
+
+    @Override
+    public void setUp() {
+
+        logStep("Verify if Dynamic Loading page is opened");
+        assertTrue("Unable to open Dynamic Loading page", dynamicLoadingPage.isLoaded());
+
+        logStep("Verify if dynamic loading message is visible");
+        assertTrue("Dynamic loading message is invisible",
+                dynamicLoadingPage.isDynamicLoadingMessageVisible());
+    }
+
+    @Test
+    public void shouldExampleTextBeDisplayedAterRunExampleOne() {
+        logStep("Click Example 1 link");
+        dynamicLoadingPage.clickExampleOneLink();
+
+        logStep("Verify if Example 1 link opened content");
+        assertTrue("Fail to load Example 1 content", dynamicLoadingPage.isStartButtonVisible());
+
+        logStep("Click Start button");
+        dynamicLoadingPage.clickStartButton();
+
+        logStep("Verify if expected text is displayed on the screen");
+        assertEquals("Fail to display example text", EXAMPLE_TEXT,
+                dynamicLoadingPage.getExampleOneDynamicText(EXAMPLE_WAITING_TIME));
+    }
+
+    @Test
+    public void shouldExampleTextBeDisplayedAterRunExampleTwo() {
+        logStep("Click Example 2 link");
+        dynamicLoadingPage.clickExampleTwoLink();
+
+        logStep("Verify if Example 2 link opened content");
+        assertTrue("Fail to load Example 2 content", dynamicLoadingPage.isStartButtonVisible());
+
+        logStep("Click Start button");
+        dynamicLoadingPage.clickStartButton();
+
+        logStep("Verify if expected text is displayed on the screen");
+        assertEquals("Fail to display example text", EXAMPLE_TEXT,
+                dynamicLoadingPage.getExampleTwoDynamicText(EXAMPLE_WAITING_TIME));
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Click back to reset Dynamic Loading page");
+        BasePage.navigateBack();
+    }
+
+}
+
+
+
+

Perform both cases running Test Class as JUnit Test.

+
+
+

WARNING: In this example, there is a visible loading bar signalizing that content is loading.On many websites elements are created by scripts without clear communique. This may cause problems with test stability. When your tests aren’t finding page elements, try to add wait functions with a short timeout.

+
+
+
+example32 +
+
+
+

This case shows how to perform mouse actions and test modal windows.

+
+
+

After you move the mouse cursor out of the website, you should see a new window appearing:

+
+
+
+example33 +
+
+
+

Check in the browser’s developer mode if this window exists in Page DOM

+
+
+
+example34 +
+
+
+

Before you move the mouse out, the window exists, but it’s not displayed.

+
+
+

When the mouse is moved, JavaScript changes display attribute. It also hides window after clicking "Close".

+
+
+
+example35 +
+
+
+
+== Page Class +
+
+
public class ExitIntentPage extends BasePage {
+
+    private static final String MODAL_WINDOW_HIDDEN           = "display: none;";
+    private static final String MODAL_WINDOW_DISPLAYED        = "display: block;";
+    private static final String MODAL_WINDOW_STYLE_ATTRIBUTTE = "style";
+
+    private static final By selectorModalWindow            = By.cssSelector("div#ouibounce-modal");
+    private static final By selectorExitIntentText         = By.cssSelector("div#content h3");
+    private static final By selectorModalWindowTitle       = By.cssSelector("h3");
+    private static final By selectorModalWindowCloseButton = By.cssSelector("div.modal-footer > p");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.EXIT_INTENT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Exit Intent' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.EXIT_INTENT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if exit intent message is visible or not.
+     *
+     * @return true if exit intent message was found on web page.
+     */
+    public boolean isIntentMessageVisible() {
+        return getDriver().findElementDynamic(selectorExitIntentText)
+                .isDisplayed();
+    }
+
+    /**
+     * Returns information if modal window is hidden.
+     *
+     * @return true if modal window is hidden.
+     */
+    public boolean isModalWindowHidden() {
+        return getDriver().findElementDynamic(selectorModalWindow)
+                .getAttribute(MODAL_WINDOW_STYLE_ATTRIBUTTE)
+                .equals(MODAL_WINDOW_HIDDEN);
+    }
+
+    /**
+     * Returns information if modal window is showed on web page.
+     *
+     * @return true if modal window is displayed.
+     */
+    public boolean isModalWindowVisible() {
+        return getDriver().findElementDynamic(selectorModalWindow)
+                .getAttribute(MODAL_WINDOW_STYLE_ATTRIBUTTE)
+                .equals(MODAL_WINDOW_DISPLAYED);
+    }
+
+    /**
+     * Returns information if modal window title is shown and correct.
+     *
+     * @param expectedValue String representing expected value of modal window's title.
+     * @return true if modal window's title is equal to expected value.
+     */
+    public boolean verifyModalWindowTitle(String expectedValue) {
+        return getDriver().elementLabel(new ByChained(selectorModalWindow,
+                selectorModalWindowTitle))
+                .getText()
+                .equals(expectedValue);
+    }
+
+    /**
+     * Closes modal window by pressing 'close' button.
+     */
+    public void closeModalWindow() {
+        getDriver().elementButton(new ByChained(selectorModalWindow,
+                selectorModalWindowCloseButton))
+                .click();
+    }
+
+    /**
+     * Moves mouse pointer to the top middle of screen, then to the centre of screen and
+     * again to the top.
+     * <p>
+     * This move simulates leaving the viewport and encourages the modal to show up. There is
+     * java.awt.Robot used
+     * to move mouse pointer out of the viewport. There are timeouts used to let the browser detect
+     * mouse move.
+     * </p>
+     *
+     * @see java.awt.Robot
+     */
+    public void moveMouseOutOfViewport() {
+        Robot robot;
+        Dimension screenSize = getDriver().manage()
+                .window()
+                .getSize();
+        int halfWidth = new BigDecimal(screenSize.getWidth() / 2).intValue();
+        int halfHeight = new BigDecimal(screenSize.getHeight() / 2).intValue();
+
+        try {
+            robot = new Robot();
+            robot.mouseMove(halfWidth, 1);
+            getDriver().manage()
+                    .timeouts()
+                    .implicitlyWait(1, TimeUnit.SECONDS);
+            robot.mouseMove(halfWidth, halfHeight);
+            getDriver().manage()
+                    .timeouts()
+                    .implicitlyWait(1, TimeUnit.SECONDS);
+            robot.mouseMove(halfWidth, 1);
+        } catch (AWTException e) {
+            BFLogger.logError("Unable to connect with remote mouse");
+            e.printStackTrace();
+        }
+    }
+}
+
+
+
+
+== Attributes +
+

Elements on pages have attributes like "id", "class", "name", "style" etc. In order to check them, use method getAttribute(String name). In this case attribute "style" determinates if the element is displayed.

+
+
+
+== Robot +
+

Robot class can perform mouse movement. Method mouseMove(int x, int y) moves the remote mouse to given coordinates.

+
+
+
+== Manage Timeouts +
+

manage().timeouts() methods allows you to change WebDriver timeouts values such as:

+
+
+
    +
  • +

    pageLoadTimeout(long time, TimeUnit unit) - the amount of time to wait for a page to load before throwing an exception

    +
  • +
  • +

    setScriptTimeout(long time, TimeUnit unit) - the amount of time to wait for finish execution of a script before throwing an exception

    +
  • +
  • +

    implicitlyWait(long time, TimeUnit unit) - the amount of time the driver should wait when searching for an element if it is not immediately present. After that time, it throws an exception.

    +
  • +
+
+
+

Changing timeouts can improve test stability but can also make them run slower.

+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Exit Intent link and load subpage

    +
  4. +
  5. +

    Check if the page is loaded and "Exit Intent" message is visible

    +
  6. +
  7. +

    Verify if Modal Window is hidden

    +
  8. +
  9. +

    Move mouse out of the viewport

    +
  10. +
  11. +

    Check if Modal Window is visible

    +
  12. +
  13. +

    Verify if Modal Window title is correct

    +
  14. +
  15. +

    Click 'close' button

    +
  16. +
  17. +

    Again verify if Modal Window is hidden

    +
  18. +
+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class ExitIntentTest extends TheInternetBaseTest {
+
+    private static final String MODAL_WINDOW_TITLE = "This is a modal window";
+
+    private static ExitIntentPage exitIntentPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        exitIntentPage = shouldTheInternetPageBeOpened().clickExitIntentLink();
+
+        logStep("Verify if Exit Intent page is opened");
+        assertTrue("Unable to open Exit Intent page", exitIntentPage.isLoaded());
+
+        logStep("Verify if exit intent message is visible");
+        assertTrue("Exit intent message is not visible", exitIntentPage.isIntentMessageVisible());
+    }
+
+    @Test
+    public void shouldModalWindowAppearWhenMouseMovedOutOfViewportTest() {
+
+        logStep("Verify if modal window is hidden");
+        assertTrue("Fail to hide modal window", exitIntentPage.isModalWindowHidden());
+
+        logStep("Move mouse pointer out of viewport");
+        exitIntentPage.moveMouseOutOfViewport();
+
+        logStep("Verify if modal window showed up");
+        assertTrue("Fail to show up modal window", exitIntentPage.isModalWindowVisible());
+
+        logStep("Verify if modal window title displays properly");
+        assertTrue("Fail to display modal window's title",
+                exitIntentPage.verifyModalWindowTitle(MODAL_WINDOW_TITLE.toUpperCase()));
+
+        logStep("Close modal window");
+        exitIntentPage.closeModalWindow();
+
+        logStep("Verify if modal window is hidden again");
+        assertTrue("Fail to hide modal window", exitIntentPage.isModalWindowHidden());
+    }
+}
+
+
+
+

Remember not to move mouse manually during test execution.

+
+
+
+example36 +
+
+
+

This example shows how to check if file downloads properly.

+
+
+

After clicking on one of these links, a specific file should be downloaded to your computer.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click on the File Download link and open subpage

    +
  4. +
  5. +

    Click on "some-file.txt" download link and download file

    +
  6. +
  7. +

    Check if the file exists in the appropriate folder

    +
  8. +
  9. +

    Delete the file

    +
  10. +
  11. +

    Check if the file doesn’t exist in the folder

    +
  12. +
+
+
+
+== Page Class +
+
+
public class FileDownloadPage extends BasePage {
+
+    private static final By selectorSomeFileTxt = By.cssSelector("a[href*=some-file]");
+
+    private final String DOWNLOAD_DIR = System.getProperty("java.io.tmpdir");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DOWNLOAD.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'File Downloader' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DOWNLOAD.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if the chosen file is already downloaded and if not, downloads it .
+     * Throws RuntimeException otherwise.
+     *
+     * @return Downloaded file
+     */
+    public File downloadTextFile() {
+        String nameOfDownloadFile = getNameOfDownloadFile();
+        File fileToDownload = new File(DOWNLOAD_DIR + nameOfDownloadFile);
+
+        if (fileToDownload.exists()) {
+            throw new RuntimeException("The file that you want to download already exists. "
+                    + "Please remove it manually. Path to the file: " + fileToDownload.getPath());
+        }
+
+        getDriver().elementButton(selectorSomeFileTxt)
+                .click();
+
+        waitForFileDownload(2000, fileToDownload);
+        return fileToDownload;
+    }
+
+    private void waitForFileDownload(int totalTimeoutInMillis, File expectedFile) {
+        FluentWait<WebDriver> wait = new FluentWait<WebDriver>(getDriver())
+                .withTimeout(totalTimeoutInMillis, TimeUnit.MILLISECONDS)
+                .pollingEvery(200, TimeUnit.MILLISECONDS);
+
+        wait.until((WebDriver wd) -> expectedFile.exists());
+    }
+
+    private String getNameOfDownloadFile() {
+        String urlToDownload = getDriver().findElementDynamic(selectorSomeFileTxt)
+                .getAttribute("href");
+        String[] urlHierachy = urlToDownload.split("/");
+        return urlHierachy[urlHierachy.length - 1];
+    }
+}
+
+
+
+

Use FluentWait class and create an expected condition using a lambda expression to wait until the file downloads.

+
+
+

To perform operations on files, use java File class. To get a file name, find it in download URL.

+
+
+
+== Test Class +
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class FileDownloadTest extends TheInternetBaseTest {
+
+    private static FileDownloadPage fileDownloadPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        fileDownloadPage = shouldTheInternetPageBeOpened().clickFileDownloadLink();
+
+        logStep("Verify if File Download page is opened");
+        assertTrue("Unable to open File Download page", fileDownloadPage.isLoaded());
+    }
+
+    @Test
+    public void shouldfileBeDownloaded() {
+
+        logStep("Download the some-file.txt");
+        File downloadedFile = fileDownloadPage.downloadTextFile();
+
+        logStep("Verify if downloaded file exists");
+        assertTrue("Downloaded file does not exist", downloadedFile.exists());
+
+        logStep("Remove downloaded file");
+        downloadedFile.delete();
+
+        logStep("Verify if downloaded file has been removed");
+        assertFalse("Downloaded file still exists", downloadedFile.exists());
+    }
+}
+
+
+
+
+example37 +
+
+
+

This case shows how to pass through the standard authentication page.

+
+
+

When you enter the correct credentials, you should see the next page:

+
+
+
+example38 +
+
+
+

If user data is wrong, an appropriate message appears:

+
+
+
+example39 +
+
+
+
+== Page Class +
+
+
public class FormAuthenticationPage extends BasePage {
+
+    private final static By selectorInputUsername     = By.cssSelector("#username");
+    private final static By selectorInputUserPassword = By.cssSelector("#password");
+    private final static By selectorLoginMessage      = By.cssSelector("#flash");
+    private final static By selectorLoginButton       = By.cssSelector("#login > button > i");
+    private final static By selectorLogoutButton      = By.cssSelector("#content > div > a ");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.LOGIN.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Login Page' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() + PageSubURLsProjectYEnum.LOGIN.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Sets user name to designated form's field.
+     *
+     * @param username String representing a user's name
+     * @return FormAuthenticationPage object with user name set to the given one
+     */
+    public FormAuthenticationPage setUsername(String username) {
+        InputTextElement elementInputUsername = new InputTextElement(selectorInputUsername);
+        elementInputUsername.clearInputText();
+        elementInputUsername.setInputText(username);
+        return this;
+    }
+
+    /**
+     * Sets user password to designated form's field.
+     *
+     * @param userPassword String representing a user's password
+     * @return FormAuthenticationPage object with user's password set to the given one
+     */
+    public FormAuthenticationPage setUserPassword(String userPassword) {
+        InputTextElement elementInputPassword = new InputTextElement(selectorInputUserPassword);
+        elementInputPassword.clearInputText();
+        elementInputPassword.setInputText(userPassword);
+        return this;
+    }
+
+    /**
+     * Returns login message.
+     *
+     * @return String object representing the message returned after login operation is performed
+     */
+    public String getLoginMessageText() {
+        return new LabelElement(selectorLoginMessage).getText();
+    }
+
+    /**
+     * Clicks 'Login' button.
+     */
+    public void clickLoginButton() {
+        new Button(selectorLoginButton).click();
+    }
+
+    /**
+     * Clicks 'Logout' button.
+     */
+    public void clickLogoutButton() {
+        new Button(selectorLogoutButton).click();
+    }
+}
+
+
+
+
+== == InputTextElement +
+

Use methods from this class to perform actions on text fields:

+
+
+
    +
  • +

    clearInputText() - remove all text from selected input field

    +
  • +
  • +

    setInputText(String text) - enter given text

    +
  • +
+
+
+
+== == LabelElement +
+
    +
  • +

    String getText() method returns visible text from label

    +
  • +
+
+
+
+== TestClass +
+

Prepare six test cases:

+
+
+
    +
  1. +

    Try to login with empty user data and check if the error message appears

    +
  2. +
  3. +

    Try to login with empty username and valid password and check if the error message appears

    +
  4. +
  5. +

    Try to login with a valid username and empty password and check if the error message appears

    +
  6. +
  7. +

    Try to login with invalid username and invalid password and check if the error message appears

    +
  8. +
  9. +

    Try to login with a valid username and valid password and check if success login message appears, then log out

    +
  10. +
  11. +

    Try to login with a valid username and valid password and check if success login message appears, then log out and check if success logout message is displayed

    +
  12. +
+
+
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case: Click on the Form Authentication link and open login page

+
+
+

After each case: Go back to The Internet Main Page

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class FormAuthenticationTest extends TheInternetBaseTest {
+
+    private static FormAuthenticationPage formAuthenticationPage;
+
+    private String errorUsernameMessage = "Your username is invalid!\n" + "×";
+    private String errorPasswordMessage = "Your password is invalid!\n" + "×";
+    private String loginMessage         = "You logged into a secure area!\n" + "×";
+    private String logoutMessage        = "You logged out of the secure area!\n" + "×";
+    private String emptyUsername        = "";
+    private String emptyUserPassword    = "";
+    private String validUsername        = "tomsmith";
+    private String validPassword        = "SuperSecretPassword!";
+    private String randomUsername       = UUID.randomUUID()
+            .toString();
+    private String randomUserPassword   = UUID.randomUUID()
+            .toString();
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click subpage link");
+        formAuthenticationPage = theInternetPage.clickFormAuthenticationLink();
+
+        logStep("Verify if subpage is opened");
+        assertTrue("The Internet subpage: FormAuthenticationPage was not open", formAuthenticationPage.isLoaded());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithEmptyData() {
+        logStep("Log user with empty username and password");
+        formAuthenticationPage.setUsername(emptyUsername)
+                .setUserPassword(emptyUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty data", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithEmptyUsernameAndValidPassword() {
+        logStep("Log user with empty username and valid password");
+        formAuthenticationPage.setUsername(emptyUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty username", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithValidUsernameAndEmptyPassword() {
+        logStep("Log user with valid username and empty password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(emptyUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty password", errorPasswordMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithInvalidUsernameAndInvalidPassword() {
+        logStep("Log user with invalid username and invalid password");
+        formAuthenticationPage.setUsername(randomUsername)
+                .setUserPassword(randomUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with random credentials", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldUserLogInWithValidCredentials() {
+        logStep("Log user with valid username and valid password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unable to login user with valid credentials", loginMessage,
+                formAuthenticationPage.getLoginMessageText());
+        logStep("Log out user");
+        formAuthenticationPage.clickLogoutButton();
+    }
+
+    @Test
+    public void shouldUserLogOutAfterProperLogInAndClickLogoutButon() {
+        logStep("Log user with valid username and valid password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unable to login user with valid credentials", loginMessage,
+                formAuthenticationPage.getLoginMessageText());
+        logStep("Log out user");
+        formAuthenticationPage.clickLogoutButton();
+        assertEquals("User cannot log out after prper log in", logoutMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        theInternetPage.load();
+    }
+}
+
+
+
+

After running Test Class, cases might be performed in a different order.

+
+
+
+example40 +
+
+
+

This example shows how to approach elements dynamically appearing after the user’s action.

+
+
+

Move the mouse over an image to see the additional label.

+
+
+
+example41 +
+
+
+

Labels exist in page DOM all the time but their display attributes change. In this case, there is no JavaScript. Elements' visibility is managed by CSS.

+
+
+
+example42 +
+
+
+
+== Page Class +
+
+
public class HoversPage extends BasePage {
+
+    private final static By selectorImages = By.cssSelector("div.figure > img");
+    private final static By selectorNames  = By.cssSelector("div.figcaption h5");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.HOVERS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Hovers' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.HOVERS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Moves mouse pointer over an image with given index.
+     *
+     * @param index An index of the picture, where mouse pointer should be moved
+     */
+    public void hoverOverAvatar(int index) {
+        Actions action = new Actions(getDriver());
+        WebElement avatarImage = getDriver().findElementDynamics(selectorImages)
+                .get(index);
+        action.moveToElement(avatarImage)
+                .perform();
+    }
+
+    /**
+     * Returns the information displayed under a picture with given index.
+     *
+     * @param index An index of the picture, where the information should be read
+     * @return String object representing picture's information
+     */
+    public String getAvatarsInformation(int index) {
+        return getDriver().findElementDynamics(selectorNames)
+                .get(index)
+                .getText();
+    }
+}
+
+
+
+
+== == Actions +
+

Actions class contains methods used to execute basic user actions such as mouse moving and clicking or keys sending. Action or actions series will be performed after calling perform() method.

+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Hovers page

    +
  4. +
  5. +

    Move mouse over random image

    +
  6. +
  7. +

    Check if displayed text is equal to expected.

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class HoversTest extends TheInternetBaseTest {
+
+    private static HoversPage    hoversPage;
+    private final String        names[]    = { "name: user1", "name: user2", "name: user3" };
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        hoversPage = shouldTheInternetPageBeOpened().clickHoversLink();
+
+        logStep("Verify if Hovers page is opened");
+        assertTrue("Unable to open Hovers page", hoversPage.isLoaded());
+    }
+
+    @Test
+    public void shouldProperInformationBeDisplayedWhenMousePointerHoveredOverRandomElement() {
+        logStep("Hover mouse pointer over random element");
+        int randomIndex = new Random().nextInt(names.length);
+        hoversPage.hoverOverAvatar(randomIndex);
+        assertEquals("Picture's information is different than expected", names[randomIndex],
+                hoversPage.getAvatarsInformation(randomIndex));
+    }
+}
+
+
+
+

Because in this case the tested content is being chosen randomly, each test run could check a different element.

+
+
+
+example43 +
+
+
+

This case shows how to test pop-up JS alerts.

+
+
+

After clicking one of the buttons, an adequate alert should appear.

+
+
+
+example44 +
+
+
+

Performed action will be displayed under "Result" label.

+
+
+

In developer mode, you can view JavaScript which creates alerts.

+
+
+
+example45 +
+
+
+
+== Page Class +
+
+
public class JavaScriptAlertsPage extends BasePage {
+
+    private static final By selectorAlertButton   = By.cssSelector("button[onclick*=jsAlert]");
+    private static final By selectorConfirmButton = By.cssSelector("button[onclick*=jsConfirm]");
+    private static final By selectorPromptButton  = By.cssSelector("button[onclick*=jsPrompt]");
+    private static final By resultLabelSelector   = By.cssSelector("p#result");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.JAVASCRIPT_ALERTS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'JavaScript Alerts' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.JAVASCRIPT_ALERTS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'JS alert' button.
+     */
+    public void clickAlertButton() {
+        new Button(selectorAlertButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Clicks 'JS confirm' button.
+     */
+    public void clickConfirmButton() {
+        new Button(selectorConfirmButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Clicks 'JS prompt' button.
+     */
+    public void clickPromptButton() {
+        new Button(selectorPromptButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Returns message displayed by popup.
+     *
+     * @return String object representing message displayed by popup
+     */
+    public String readResultLabel() {
+        return new LabelElement(resultLabelSelector).getText();
+    }
+
+    /**
+     * Clicks alert's 'OK' button.
+     */
+    public void clickAlertAccept() {
+        getDriver().switchTo()
+                .alert()
+                .accept();
+    }
+
+    /**
+     * Clicks alert's 'Cancel' button.
+     */
+    public void clickAlertDismiss() {
+        getDriver().switchTo()
+                .alert()
+                .dismiss();
+    }
+
+    /**
+     * Types text into alert's text field.
+     *
+     * @param text String object sent into alert's text field
+     */
+    public void writeTextInAlert(String text) {
+        getDriver().switchTo()
+                .alert()
+                .sendKeys(text);
+    }
+}
+
+
+
+
+== == alert() +
+

Using switchTo() method you can change processed content. switchTo().alert() allows performing actions on appearing alerts such as accepting, dismissing or entering keys.

+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page and go to JavaScript Alert page

+
+
+
    +
  1. +

    Click JS Alert button, accept alert and check if Result message returns performed an action

    +
  2. +
  3. +

    Click JS Confirm button, accept alert and check if Result message returns performed action

    +
  4. +
  5. +

    Click JS Confirm button, dismiss alert and check if Result message returns performed action

    +
  6. +
  7. +

    Click JS Prompt button, write random text, accept alert and check if Result message returns performed action with written text

    +
  8. +
  9. +

    Click JS Prompt button, dismiss the alert and check if Result message returns performed action

    +
  10. +
+
+
+

After each case: Refresh Page

+
+
+

After all tests: Navigate back to The Internet Main Page

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class JavaScriptAlertsTest extends TheInternetBaseTest {
+
+    private static JavaScriptAlertsPage javaScriptAlertsPage;
+
+    private final String jsAlertCofirmMessage    = "You successfuly clicked an alert";
+    private final String jsConfirmConfirmMessage = "You clicked: Ok";
+    private final String jsConfirmCancelMessage  = "You clicked: Cancel";
+    private final String jsPromptConfirmMessage  = "You entered: ";
+    private final String jsPromptCancelMessage   = "You entered: null";
+    private final String randomString            = "random";
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        javaScriptAlertsPage = shouldTheInternetPageBeOpened().clickJavaScriptAlertLink();
+
+        logStep("Verify if JavaScript Alerts page is opened");
+        assertTrue("Unable to open JavaScript Alerts page", javaScriptAlertsPage.isLoaded());
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+        logStep("Navigate back to The-Internet page");
+        BasePage.navigateBack();
+    }
+
+    @Test
+    public void shouldJSAlertCloseWithProperMessageAfterPressOkButton() {
+        logStep("Click Alert button");
+        javaScriptAlertsPage.clickAlertButton();
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsAlertCofirmMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSConfirmCloseWithProperMessageAfterPressOkButton() {
+        logStep("Click Confirm button");
+        javaScriptAlertsPage.clickConfirmButton();
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsConfirmConfirmMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSConfirmCloseWithProperMessageAfterPressCancelButton() {
+        logStep("Click Confirm button");
+        javaScriptAlertsPage.clickConfirmButton();
+
+        logStep("Click 'Cancel' button on alert");
+        javaScriptAlertsPage.clickAlertDismiss();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsConfirmCancelMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSPromptCloseWithProperMessageAfterPressOKButton() {
+        logStep("Click Prompt button");
+        javaScriptAlertsPage.clickPromptButton();
+
+        logStep("Insert text to alert: " + randomString);
+        javaScriptAlertsPage.writeTextInAlert(randomString);
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsPromptConfirmMessage + randomString, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSPromptCloseWithProperMessageAfterPressCancelButton() {
+        logStep("Click Prompt button");
+        javaScriptAlertsPage.clickPromptButton();
+
+        logStep("Click 'Cancel' button on alert");
+        javaScriptAlertsPage.clickAlertDismiss();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsPromptCancelMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Refresh JavaScriptAlersPage");
+        javaScriptAlertsPage.refreshPage();
+    }
+
+}
+
+
+
+
+example46 +
+
+
+

This simple case shows how to test key pressing

+
+
+

This site uses JavaScript to read the key pressed and display its value.

+
+
+
+example47 +
+
+
+
+== Page Class +
+
+
public class KeyPressesPage extends BasePage {
+
+    private static final By selectorResult = By.cssSelector("#result");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.KEY_PRESS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Key Presses' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.KEY_PRESS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Presses given keyboard key.
+     *
+     * @param keyToPress Key to be pressed on keyboard
+     */
+    public void pressKey(String keyToPress) {
+        getAction().sendKeys(keyToPress)
+                .perform();
+    }
+
+    /**
+     * Returns information from web page about pressed keyboard key.
+     *
+     * @return Information from web page about pressed key
+     */
+    public String getPressedKeyInformation() {
+        return getDriver().findElementDynamic(selectorResult)
+                .getText();
+    }
+}
+
+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Key Presses site

    +
  4. +
  5. +

    Press a key

    +
  6. +
  7. +

    Check if a displayed message contains the pressed key

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class KeyPressesTest extends TheInternetBaseTest {
+
+    private static KeyPressesPage keyPressesPage;
+
+    private final String keyToBePressed  = "Q";
+    private final String expectedMessage = "You entered: Q";
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        keyPressesPage = shouldTheInternetPageBeOpened().clickKeyPressesLink();
+
+        logStep("Verify if Key Presses page is opened");
+        assertTrue("Unable to open Key Presses page", keyPressesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldWebsiteReturnInformationAboutPressedKey() {
+        logStep("Press a keyboard key");
+        keyPressesPage.pressKey(keyToBePressed);
+
+        logStep("Verify if website give valid information about pressed keyboard key");
+        assertEquals("Information about the pressed key is invalid", expectedMessage,
+                keyPressesPage.getPressedKeyInformation());
+    }
+}
+
+
+
+
+example48 +
+
+
+

This simple example shows how operate on many browser tabs

+
+
+

When you click the link, a new website will be opened in the second tab.

+
+
+
+example49 +
+
+
+
+== Page Class +
+
+
public class MultipleWindowsPage extends BasePage {
+
+    private final static By selectorLink = By.cssSelector("#content > div > a");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.WINDOW.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Opening a new window' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.WINDOW.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'click here' link.
+     *
+     * @return NewWindowPage object
+     */
+    public NewWindowPage clickHereLink() {
+        getDriver().findElementDynamic(selectorLink)
+                .click();
+        getDriver().waitForPageLoaded();
+        return new NewWindowPage();
+    }
+}
+
+
+
+

You also need a second page class for New Window Page. Implement only the required methods.

+
+
+
+
public class NewWindowPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.NEW_WINDOW.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'New window' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.NEW_WINDOW.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Multiple Windows Page

    +
  4. +
  5. +

    Click the link

    +
  6. +
  7. +

    Check if a new page is opened in the second tab

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class MultipleWindowsTest extends TheInternetBaseTest {
+
+    private static MultipleWindowsPage    multipleWindowsPage;
+    private static NewWindowPage        newWindowPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        multipleWindowsPage = shouldTheInternetPageBeOpened().clickmultipleWindowsLink();
+
+        logStep("Verify if Multiple Windows page is opened");
+        assertTrue("Unable to open Multiple Windows page", multipleWindowsPage.isLoaded());
+    }
+
+    @Test
+    public void verifyIfNewBrowserWindowOpen() {
+        logStep("Click 'Click here' link");
+        newWindowPage = multipleWindowsPage.clickHereLink();
+
+        logStep("Verify if 'New window page' is opened");
+        assertTrue("Unable to open a new browser window", newWindowPage.isLoaded());
+    }
+}
+
+
+
+
+example50 +
+
+
+

This simple case shows how to approach redirecting links.

+
+
+

After clicking on the link, you will be redirected to Status Codes Page.

+
+
+
+example51 +
+
+
+
+== Page Class + +
+
+== == Redirect Link Page +
+
+
public class RedirectLinkPage extends BasePage {
+
+    private static final By selectorRedirectHere = By.cssSelector("a#redirect");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.REDIRECT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Redirection' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.REDIRECT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'Redirect here' link.
+     *
+     * @return StatusCodesHomePage object
+     */
+    public StatusCodesHomePage clickRedirectHereLink() {
+        new Button(selectorRedirectHere).click();
+        return new StatusCodesHomePage();
+    }
+}
+
+
+
+
+== == Status Codes Page +
+
+
public class StatusCodesHomePage extends BasePage {
+
+    private static final By selectorLink200Code = By.linkText("200");
+    private static final By selectorLink301Code = By.linkText("301");
+    private static final By selectorLink404Code = By.linkText("404");
+    private static final By selectorLink500Code = By.linkText("500");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Redirection Page

    +
  4. +
  5. +

    Click the link

    +
  6. +
  7. +

    Check if Status Codes Page is loaded

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class RedirectLinkTest extends TheInternetBaseTest {
+
+    private static RedirectLinkPage    redirectLinkPage;
+    private static StatusCodesHomePage statusCodesHomePage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        redirectLinkPage = shouldTheInternetPageBeOpened().clickRedirectLink();
+
+        logStep("Verify if Redirect Link page is opened");
+        assertTrue("Unable to open Redirect Link page", redirectLinkPage.isLoaded());
+    }
+
+    @Test
+    public void shouldUserBeRedirectedToStatusCodePage() {
+        logStep("Click 'Redirect here' link");
+        statusCodesHomePage = redirectLinkPage.clickRedirectHereLink();
+
+        logStep("Verify redirection to Status Code page");
+        assertTrue("User hasn't been redirected to the expected website",
+                statusCodesHomePage.isLoaded());
+    }
+}
+
+
+
+
+example52 +
+
+
+

This case shows how to move horizontal slider.

+
+
+

You can move the slider by dragging it with a mouse or using arrow keys. The page uses a simple script to get slider position and display set value.

+
+
+
+example53 +
+
+
+
+== Page Class +
+
+
public class HorizontalSliderPage extends BasePage {
+
+    private static final By selectorHorizontalSlider = By.cssSelector("div.sliderContainer");
+    private static final By sliderSelector           = By.cssSelector("input");
+    private static final By valueSelector            = By.cssSelector("#range");
+
+    private HorizontalSliderElement horizontalSlider;
+
+    public HorizontalSliderPage() {
+        horizontalSlider = getDriver().elementHorizontalSlider(selectorHorizontalSlider,
+                sliderSelector, valueSelector, BigDecimal.ZERO, new BigDecimal(5),
+                new BigDecimal(0.5));
+    }
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.HORIZONTAL_SLIDER.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Horizontal Slider' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.HORIZONTAL_SLIDER.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Validates if WebElement representing horizontal slider is visible on the page.
+     *
+     * @return true if horizontal slider is visible, false otherwise.
+     */
+    public boolean isElementHorizontalSliderVisible() {
+        return getDriver().elementHorizontalSlider(selectorHorizontalSlider)
+                .isDisplayed();
+    }
+
+    /**
+     * Returns the value of slider's start position.
+     *
+     * @return BigDecimal representing the lowest possible value of slider.
+     */
+    public BigDecimal getStartPosition() {
+        return horizontalSlider.getMinRange();
+    }
+
+    /**
+     * Returns the value of slider's middle position.
+     *
+     * @return BigDecimal representing the average value between start and end position.
+     */
+    public BigDecimal getMiddlePosition() {
+        return horizontalSlider.getMaxRange()
+                .subtract(horizontalSlider.getMinRange())
+                .divide(new BigDecimal(2));
+    }
+
+    /**
+     * Returns the value of slider's end position.
+     *
+     * @return BigDecimal representing the highest possible value of slider.
+     */
+    public BigDecimal getEndPosition() {
+        return horizontalSlider.getMaxRange();
+    }
+
+    /**
+     * Returns current value of slider's position.
+     *
+     * @return BigDecimal representing current value of slider.
+     */
+    public BigDecimal getCurrentPosition() {
+        return horizontalSlider.getCurrentSliderValue();
+    }
+
+    /**
+     * Sets horizontal slider to a given position using one of the available methods: using keyboard
+     * or using mouse move.
+     *
+     * @param position
+     * @param method
+     */
+    public void setSliderPositionTo(BigDecimal position, int method) {
+        horizontalSlider.setSliderPositionTo(position, method);
+    }
+
+    /**
+     * Verifies the correctness of the given position value and rounds it when necessary.
+     *
+     * @param position
+     * @return Correct value of horizontal slider's position.
+     */
+    public BigDecimal verifyAndCorrectPositionValue(BigDecimal position) {
+        return horizontalSlider.verifyAndCorrectPositionValue(position);
+    }
+}
+
+
+
+
+== == Horizontal Slider Element +
+

This class implements methods wich can perform actions on slider:

+
+
+

Create Slider Object using method:

+
+
+
    +
  • +

    getDriver().elementHorizontalSlider(By sliderContainerSelector, By sliderSelector, By valueSelector, BigDecimal minRange, BigDecimal maxRange, BigDecimal step)

    +
  • +
+
+
+

And use:

+
+
+
    +
  • +

    BigDecimal getMaxRange()

    +
  • +
  • +

    BigDecimal getMinRange()

    +
  • +
  • +

    BigDecimal getCurrentSliderValue()

    +
  • +
  • +

    setSliderPositionTo(BigDecimal position, int method) - moves slider to a given position. If the position is not valid, it changes it to the nearest proper value. Second parameter determinates movement method: 0 - Keyboard, 1 - Mouse

    +
  • +
  • +

    BigDecimal verifyAndCorrectPositionValue(BigDecimal position) - returns nearest correct position

    +
  • +
+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case:

+
+
+
    +
  1. +

    Go to Horizontal Slider Page

    +
  2. +
  3. +

    Check if the slider is visible

    +
  4. +
  5. +

    Save start, middle and end position

    +
  6. +
+
+
+

Case 1 - Moving with the keyboard:

+
+
+
    +
  1. +

    Move slider to start position, and check if the current position equals the beginning value

    +
  2. +
  3. +

    Move the slider to middle position, and check if the current position equals the middle value

    +
  4. +
  5. +

    Move slider to end position, and check if the current position equals the end value

    +
  6. +
  7. +

    Try to move slider before start position, and check if the current position equals the beginning value

    +
  8. +
  9. +

    Try to move slider after end position, and check if the current position equals the end value

    +
  10. +
  11. +

    Try to move the slider to an improperly defined position between start and middle, and check if the current position equals the corrected value

    +
  12. +
  13. +

    Try to move the slider to an improperly defined random position, and check if the current position equals the corrected value

    +
  14. +
  15. +

    Move the slider back to start position, and check if the current position equals the beginning value

    +
  16. +
+
+
+

Case 2 - Moving with a mouse: Repeat each Case 1 step using a mouse instead of keyboard

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class SliderTest extends TheInternetBaseTest {
+
+    private static HorizontalSliderPage horizontalSliderPage;
+
+    BigDecimal startPosition;
+    BigDecimal middlePosition;
+    BigDecimal endPosition;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click Horizontal Slider link");
+        horizontalSliderPage = theInternetPage.clickHorizontalSliderLink();
+
+        logStep("Verify if Horizontal Slider page is opened");
+        assertTrue("Unable to load Horizontal Slider page", horizontalSliderPage.isLoaded());
+
+        logStep("Verify if horizontal slider element is visible");
+        assertTrue("Horizontal slider is not visible",
+                horizontalSliderPage.isElementHorizontalSliderVisible());
+
+        startPosition = horizontalSliderPage.getStartPosition();
+        middlePosition = horizontalSliderPage.getMiddlePosition();
+        endPosition = horizontalSliderPage.getEndPosition();
+    }
+
+    @Test
+    public void shouldHorizontalSliderMoveWhenKeyboardArrowButtonsArePressed() {
+        BigDecimal position;
+        logStep("Move slider to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to middle position: " + middlePosition);
+        horizontalSliderPage.setSliderPositionTo(middlePosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(middlePosition),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to end position: " + endPosition);
+        horizontalSliderPage.setSliderPositionTo(endPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = startPosition.subtract(BigDecimal.ONE);
+        logStep("Move slider to position before start position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = endPosition.add(BigDecimal.ONE);
+        logStep("Move slider to position after end position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = middlePosition.divide(new BigDecimal(2));
+        logStep("Move slider to improperly defined position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        position = new BigDecimal(new BigInteger("233234"), 5);
+        logStep("Move slider to improperly defined random position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider back to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+    }
+
+    @Test
+    public void shouldHorizontalSliderMoveWhenMouseButtonIsPressedAndMouseIsMoving() {
+        BigDecimal position;
+        logStep("Move slider to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to middle position: " + middlePosition);
+        horizontalSliderPage.setSliderPositionTo(middlePosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(middlePosition),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to end position: " + endPosition);
+        horizontalSliderPage.setSliderPositionTo(endPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = startPosition.subtract(BigDecimal.ONE);
+        logStep("Move slider to position before start position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = endPosition.add(BigDecimal.ONE);
+        logStep("Move slider to position after end position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = middlePosition.divide(new BigDecimal(2));
+        logStep("Move slider to improperly defined position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        position = new BigDecimal(new BigInteger("212348"), 5);
+        logStep("Move slider to improperly defined random position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider back to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+    }
+}
+
+
+
+
+example54 +
+
+
+

This example shows how to sort and read data from tables.

+
+
+

After clicking on a column header, the data will be sorted descending and after another click sorted ascending by selected attribute. Watch how both tables' content changes on page DOM. Sorting is performed by JavaScript functions.

+
+
+
+example55 +
+
+
+
+== Page Class +
+
+
public class SortableDataTablesPage extends BasePage {
+
+    private static final By selectorTable  = By.cssSelector("table.tablesorter");
+    private static final By selectorHeader = By.cssSelector("th");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.SORTABLE_DATA_TABLES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Data Tables' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.SORTABLE_DATA_TABLES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Sorts data in given column using ascending order.
+     *
+     * @param columnNumber The number of column where data should be sorted
+     * @param tableNumber  The number of table where data should be sorted
+     */
+    public void sortColumnAscending(int columnNumber, int tableNumber) {
+        WebElement header = this.getTableHeaders(columnNumber, tableNumber);
+        String className = header.getAttribute("class");
+        if (className.contains("headerSortUp") || !className.contains("headerSortDown")) {
+            header.click();
+        }
+    }
+
+    /**
+     * Sorts data in given column using descending order.
+     *
+     * @param columnNumber The number of the column where data should be sorted
+     * @param tableNumber  The number of the table where data should be sorted
+     */
+    public void sortColumnDescending(int columnNumber, int tableNumber) {
+        WebElement header = this.getTableHeaders(columnNumber, tableNumber);
+        String className = header.getAttribute("class");
+        if (!className.contains("headerSortUp")) {
+            header.click();
+            if (!className.contains("headerSortDown")) {
+                header.click();
+            }
+        }
+    }
+
+    /**
+     * Return given column values from chosen table.
+     *
+     * @param columnNumber The number of the column the data should be retrieved from
+     * @param tableNumber  The number of the table the data should be retrieved from
+     * @return list of values from given column
+     */
+    public List<String> getColumnValues(int columnNumber, int tableNumber) {
+        WebElement table = getTable(tableNumber);
+        return JsoupHelper.findTexts(table, By.cssSelector("tr > td:nth-child(" + (columnNumber + 1)
+                + ")"));
+    }
+
+    /**
+     * Returns column's class name.
+     *
+     * @param columnNumber The number of the column to get class number from
+     * @param tableNumber  The number of the table to get column class name from
+     * @return String object representing column's class name
+     */
+    public String readColumnClass(int columnNumber, int tableNumber) {
+        return this.getTableHeaders(columnNumber, tableNumber)
+                .getAttribute("class");
+    }
+
+    private WebElement getTable(int tableNumber) {
+        return new ListElements(selectorTable).getList()
+                .get(tableNumber);
+    }
+
+    private WebElement getTableHeaders(int columnNumber, int tableNumber) {
+        return getTable(tableNumber).findElements(selectorHeader)
+                .get(columnNumber);
+    }
+}
+
+
+
+
+== == Finding values +
+

Using proper selectors, save elements such as tables and their columns' headers as Web Element Lists. Afterwards, you can get the desired element finding it by index (e. g. table or column number). To get column values, use JsoupHelper and to check if the column is sorted get its class attribute.

+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case: Go to Sortable Data Tables Page

+
+
+

Case 1:

+
+
+
    +
  1. +

    Choose a random table

    +
  2. +
  3. +

    Sort first column "Last Name" in ascending order

    +
  4. +
  5. +

    Check if column header class contains "headerSortDown"

    +
  6. +
  7. +

    Save column content to the List

    +
  8. +
  9. +

    Create List copy and sort it

    +
  10. +
  11. +

    Compare sorted values and values from the table

    +
  12. +
+
+
+

Case 2:

+
+
+
    +
  1. +

    Choose a random table

    +
  2. +
  3. +

    Sort second column "First Name" in descending order

    +
  4. +
  5. +

    Check if column header class contains "headerSortUp"

    +
  6. +
  7. +

    Save column content to the List

    +
  8. +
  9. +

    Create List copy and sort it then reverse it

    +
  10. +
  11. +

    Compare reversed sorted values and values from the table

    +
  12. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class SortableDataTablesTest extends TheInternetBaseTest {
+
+    private static SortableDataTablesPage sortableDataTablesPage;
+
+    private List<String> actualValues;
+    private List<String> expectedValues;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click subpage link");
+        sortableDataTablesPage = theInternetPage.clickSortableDataTablesLink();
+
+        logStep("Verify if subpage is opened");
+        assertTrue("Unable to open Sortable Data Tables page", sortableDataTablesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldLastNameColumnBeOrderedAscendingAfterSort() {
+        int columnNumber = 0;
+        int tableNumber = new Random().nextInt(2);
+
+        logStep("Sort 'Last Name' column");
+        sortableDataTablesPage.sortColumnAscending(columnNumber, tableNumber);
+        assertTrue("Unable to set ascending order for 'Last Name' column",
+                sortableDataTablesPage.readColumnClass(columnNumber, tableNumber)
+                        .contains("headerSortDown"));
+
+        logStep("Verify data order for 'Last Name' column");
+        actualValues = sortableDataTablesPage.getColumnValues(columnNumber, tableNumber);
+        expectedValues = new ArrayList<String>(actualValues);
+        Collections.sort(expectedValues);
+        assertEquals("'Last Name' column is not sorted in ascending order",
+                expectedValues, actualValues);
+    }
+
+    @Test
+    public void shouldFirstNameColumnBeOrderedDescendingAfterSort() {
+        int columnNumber = 1;
+        int tableNumber = new Random().nextInt(2);
+
+        logStep("Sort 'First Name' column");
+        sortableDataTablesPage.sortColumnDescending(columnNumber, tableNumber);
+        assertTrue("Unable to set descending order for 'First Name' column",
+                sortableDataTablesPage.readColumnClass(columnNumber, tableNumber)
+                        .contains("headerSortUp"));
+
+        logStep("Verify data order for 'First Name' column");
+        actualValues = sortableDataTablesPage.getColumnValues(columnNumber, tableNumber);
+        expectedValues = new ArrayList<String>(actualValues);
+        Collections.sort(expectedValues);
+        Collections.reverse(expectedValues);
+        assertEquals("'First Name' column is not sorted in descending order",
+                expectedValues, actualValues);
+    }
+}
+
+
+
+
+example56 +
+
+
+

This example shows how to process HTTP status codes returned by page

+
+
+

When you click status code link, you will be redirected to the subpage which returns the proper HTTP status code. In order to check what code was returned:

+
+
+
    +
  1. +

    Open developer tools

    +
  2. +
  3. +

    Go to Network tab

    +
  4. +
  5. +

    Click request name

    +
  6. +
  7. +

    Find a code number in Headers section

    +
  8. +
+
+
+
+example57 +
+
+
+
+== Page Class +
+

Add new methods to existing Status Codes Home Page Class

+
+
+
+
public class StatusCodesHomePage extends BasePage {
+
+    private static final By selectorLink200Code = By.linkText("200");
+    private static final By selectorLink301Code = By.linkText("301");
+    private static final By selectorLink404Code = By.linkText("404");
+    private static final By selectorLink500Code = By.linkText("500");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if given link is displayed.
+     *
+     * @param selector Selector of the given link
+     * @return true if link is displayed
+     */
+    public boolean isLinkCodeDisplayed(By selector) {
+        return getDriver().findElementDynamic(selector)
+                .isDisplayed();
+
+    }
+
+    /**
+     * Clicks '200' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode200Link() {
+        return clickCodeLink(selectorLink200Code);
+    }
+
+    /**
+     * Clicks '301' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode301Link() {
+        return clickCodeLink(selectorLink301Code);
+    }
+
+    /**
+     * Clicks '404' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode404Link() {
+        return clickCodeLink(selectorLink404Code);
+    }
+
+    /**
+     * Clicks '500' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode500Link() {
+        return clickCodeLink(selectorLink500Code);
+    }
+
+    /**
+     * Clicks code link according to given code number.
+     *
+     * @param code Given code
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCodeLink(String code) {
+        return clickCodeLink(By.linkText(code));
+    }
+
+    private StatusCodesCodePage clickCodeLink(By selector) {
+        String codeNumber = getCodeNumberToCheck(selector);
+        getDriver().findElementDynamic(selector)
+                .click();
+        return new StatusCodesCodePage(codeNumber);
+    }
+
+    private String getCodeNumberToCheck(By selector) {
+        return getDriver().findElementDynamic(selector)
+                .getText();
+    }
+}
+
+
+
+

Create a page class for status codes subpages as well. In the class constructor specify which code number should be returned.

+
+
+
+
public class StatusCodesCodePage extends BasePage {
+
+    private static final By selectorDisplayedText   = By.cssSelector("#content > div > p");
+    private static final By selectorLinkToCodesPage = By.cssSelector("#content > div > p > a");
+
+    private String codeNumber;
+
+    public StatusCodesCodePage(String codeNumber) {
+        this.codeNumber = codeNumber;
+    }
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue() + '/');
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue() + '/' + codeNumber);
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    public String getCodeNumber() {
+        return codeNumber;
+    }
+
+    /**
+     * Verifies if page is loaded with given code number.
+     *
+     * @param codeNumber Expected code number
+     * @return true if expected code number is loaded with web page
+     */
+    public boolean isLoadedWithStatusCode(String codeNumber) {
+        return getDriver().getCurrentUrl()
+                .equals(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue() + "/" + codeNumber);
+    }
+
+    /**
+     * Returns displayed code number.
+     * <p>
+     * Code number is retrieved from following text displayed on the page:<b>
+     * 'This page returned a *** status code.', where *** represent the code number to be
+     * returned.
+     * </p>
+     *
+     * @return String object representing the displayed code number retrieved from specific sentence.
+     */
+    public String getDisplayedCodeNumber() {
+        return getDriver().findElementDynamic(selectorDisplayedText)
+                .getText()
+                .substring(21, 24);
+    }
+
+    /**
+     * Clicks link to return to 'Code Page'.
+     *
+     * @return StatusCodesHomePage object
+     */
+    public StatusCodesHomePage clickLinkToCodePage() {
+        getDriver().findElementDynamic(selectorLinkToCodesPage)
+                .click();
+        return new StatusCodesHomePage();
+    }
+}
+
+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page, go to Status Codes page

+
+
+

Steps:

+
+
+

For each status code

+
+
+
    +
  1. +

    Click code link

    +
  2. +
  3. +

    Check if the page is loaded with an expected code number

    +
  4. +
  5. +

    Check if the displayed code number equals the expected number

    +
  6. +
  7. +

    Go back to Status Codes Home Page

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class StatusCodeTest extends TheInternetBaseTest {
+
+    private static StatusCodesHomePage statusCodesHomePage;
+    private        StatusCodesCodePage statusCodesCodePage;
+
+    private String[] codes = { "200", "301", "404", "500" };
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        statusCodesHomePage = shouldTheInternetPageBeOpened().clickStatusCodesLink();
+
+        logStep("Verify if Status Codes Home page is opened");
+        assertTrue("Unable to open Status Codes Home page", statusCodesHomePage.isLoaded());
+    }
+
+    @Test
+    public void shouldProperCodeBeDisplayedAfterClickCodeLink() {
+
+        for (String code : codes) {
+            logStep("Click link to " + code + " code");
+            statusCodesCodePage = statusCodesHomePage.clickCodeLink(code);
+
+            logStep("Verify if proper web page corresponding to the code is opened");
+            assertTrue("Unable to open proper web page",
+                    statusCodesCodePage.isLoadedWithStatusCode(code));
+
+            logStep("Verify if the displayed code is equal to the expected one");
+            assertEquals(code, statusCodesCodePage.getDisplayedCodeNumber());
+
+            logStep("Click link to come back to 'Status Codes' page");
+            statusCodesCodePage.clickLinkToCodePage();
+        }
+    }
+}
+
+
+
+
+
+
+

== First Steps

+
+
+Page Object +
+

Your Product Under Test will be the following website: http://the-internet.herokuapp.com/

+
+
+

At first, create an Object to represent The Internet Main Page:

+
+
+
+
public class TheInternetPage extends BasePage
+
+
+
+

Each class which extends BasePage class must override three methods:

+
+
+
    +
  • +

    public boolean isLoaded() - returns true if the page is loaded and false if not

    +
  • +
  • +

    public void load() - loads the page

    +
  • +
  • +

    public String pageTitle() - returns page title

    +
  • +
+
+
+
+
public class TheInternetPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        BFLogger.logDebug("The internet page is loaded: " + getDriver().getCurrentUrl());
+        return getDriver().getCurrentUrl()
+                .equals(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'The internet' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+Environment Variables +
+

In Page classes, when you load/start web, it is uncommon to save fixed main URL.

+
+
+

Instead of hardcoded main URL variable, you build your Page class with a dynamic variable.

+
+
+
+How to create / update system environment +
+

Dynamic variable values are stored under path \src\resources\enviroments\environments.csv.

+
+
+
+image1 +
+
+
+

By default, the environment takes value from DEV column.

+
+
+
+== Access to the external file variables +
+

Create a class GetEnvironmentParam to map values from an external file with Page class:

+
+
+
+
public enum GetEnvironmentParam {
+
+    // Name if enum must be in line with cell name in /src/resources/environments/environment.csv
+    WWW_FONT_URL,
+    TOOLS_QA,
+    WEB_SERVICE,
+    THE_INTERNET_MAIN_PAGE,
+    ELEMENTAL_SELENIUM_PAGE;
+
+    public String getValue() {
+
+        if (null ==  BaseTest.getEnvironmentService()) {
+            throw new BFInputDataException("Environment Parameters class wasn't initialized properly");
+        }
+
+        return BaseTest.getEnvironmentService()
+                .getValue(this.name());
+
+    }
+
+    @Override
+    public String toString() {
+
+        return this.getValue();
+
+    }
+}
+
+
+
+

When you add a new row to environments.csv also add a new variable to GetEnvironmentParam class.

+
+
+

In Page class access environmental variable using this method:

+
+
+
+
GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue();
+
+
+
+
+Selectors + +
+
+Create selectors +
+

Create a selector for every interactable element on a webpage using By type. Find elements and it’s attributes using browser developer mode (F12).

+
+
+
+image2 +
+
+
+
+
private static final By abTestLinkSelector               = By.cssSelector("li >
+            a[href*='abtest']");
+    private static final By basicAuthLinkSelector            = By.cssSelector("li >
+            a[href*='basic_auth']");
+    private static final By brokenImageLinkSelector          = By.cssSelector("li >
+            a[href*='broken_images']");
+    private static final By challengingDomLinkSelector       = By.cssSelector("li >
+            a[href*='challenging_dom']");
+    private static final By checkboxesLinkSelector           = By.cssSelector("li >
+            a[href*='checkboxes']");
+    private static final By contextMenuLinkSelector          = By.cssSelector("li >
+            a[href*='context_menu']");
+    private static final By disappearingElementsLinkSelector = By.cssSelector("li >
+            a[href*='disappearing_elements']");
+    private static final By dragAndDropLinkSelector          = By.cssSelector("li >
+            a[href*='drag_and_drop']");
+    private static final By dropdownLinkSelector             = By.cssSelector("li >
+            a[href*='dropdown']");
+    private static final By dynamicContentLinkSelector       = By.cssSelector("li >
+            a[href*='dynamic_content']");
+    private static final By dynamicControlsLinkSelector      = By.cssSelector("li >
+            a[href*='dynamic_controls']");
+    private static final By dynamicLoadingLinkSelector       = By.cssSelector("li >
+            a[href*='dynamic_loading']");
+    private static final By exitIntentLinkSelector           = By.cssSelector("li >
+            a[href*='exit_intent']");
+    private static final By fileDownloadLinkSelector         = By.cssSelector("li >
+            a[href$='download']");
+    private static final By fileUploadLinkSelector           = By.cssSelector("li >
+           a[href*='upload']");
+    private static final By floatingMenuLinkSelector         = By.cssSelector("li >
+           a[href*='floating_menu']");
+    private static final By forgotPasswordLinkSelector       = By.cssSelector("li >
+           a[href*='forgot_password']");
+    private static final By formAuthenticationLinkSelector   = By.cssSelector("li >
+           a[href*='login']");
+    private static final By framesLinkSelector               = By.cssSelector("li >
+           a[href*='frames']");
+    private static final By geolocationLinkSelector          = By.cssSelector("li >
+           a[href*='geolocation']");
+    private static final By horizontalSliderLinkSelector     = By.cssSelector("li >
+           a[href*='horizontal_slider']");
+    private static final By hoversLinkSelector               = By.cssSelector("li >
+           a[href*='hovers']");
+    private static final By infiniteScrollLinkSelector       = By.cssSelector("li >
+           a[href*='infinite_scroll']");
+    private static final By javaScriptAlertLinkSelector      = By.cssSelector("li >
+           a[href*='javascript_alerts']");
+    private static final By javaScriptErrorLinkSelector      = By.cssSelector("li >
+           a[href*='javascript_error']");
+    private static final By jQueryUIMenuLinkSelector         = By.cssSelector("li >
+           a[href*='jqueryui/menu']");
+    private static final By keyPressesLinkSelector           = By.cssSelector("li >
+           a[href*='key_presses']");
+    private static final By largeAndDeepDOMLinkSelector      = By.cssSelector("li >
+           a[href*='large']");
+    private static final By multipleWindowsLinkSelector      = By.cssSelector("li >
+           a[href*='windows']");
+    private static final By nestedFramesLinkSelector         = By.cssSelector("li >
+           a[href*='nested_frames']");
+    private static final By notificationMessagesLinkSelector = By.cssSelector("li >
+           a[href*='notification_message']");
+    private static final By redirectLinkSelector             = By.cssSelector("li >
+           a[href*='redirector']");
+    private static final By secureFileDownloadLinkSelector   = By.cssSelector("li >
+           a[href*='download_secure']");
+    private static final By shiftingContentLinkSelector      = By.cssSelector("li >
+           a[href*='shifting_content']");
+    private static final By slowResourcesLinkSelector        = By.cssSelector("li >
+           a[href*='slow']");
+    private static final By sortableDataTablesLinkSelector   = By.cssSelector("li >
+           a[href*='tables']");
+    private static final By statusCodesLinkSelector          = By.cssSelector("li >
+           a[href*='status_codes']");
+    private static final By typosLinkSelector                = By.cssSelector("li >
+           a[href*='typos']");
+    private static final By wYSIWYGEditorLinkSelector        = By.cssSelector("li >
+           a[href*='tinymce']");
+
+
+
+
+Implement methods +
+

Then use these selectors to create Objects and perform actions on page elements:

+
+
+
+
public ABtestPage clickABtestingLink() {
+        new Button(abTestLinkSelector).click();
+        return new ABtestPage();
+    }
+
+    public BasicAuthPage clickBasicAuthLink() {
+        getDriver().waitForPageLoaded();
+        WebElement link = getDriver().findElementDynamic(basicAuthLinkSelector);
+        JavascriptExecutor executor = (JavascriptExecutor) getDriver();
+        executor.executeScript("var elem=arguments[0]; setTimeout(function() {elem.click();}, 100)",
+                link);
+        return new BasicAuthPage();
+    }
+
+    public BrokenImagePage clickBrokenImageLink() {
+        new Button(brokenImageLinkSelector).click();
+        return new BrokenImagePage();
+    }
+
+    public ChallengingDomPage clickChallengingDomLink() {
+        new Button(challengingDomLinkSelector).click();
+        return new ChallengingDomPage();
+    }
+
+    public CheckboxesPage clickCheckboxesLink() {
+        new Button(checkboxesLinkSelector).click();
+        return new CheckboxesPage();
+    }
+
+    public ContextMenuPage clickContextMenuLink() {
+        new Button(contextMenuLinkSelector).click();
+        return new ContextMenuPage();
+    }
+
+    public DisappearingElementsPage clickDisappearingElementsLink() {
+        new Button(disappearingElementsLinkSelector).click();
+        return new DisappearingElementsPage();
+    }
+
+    public DragAndDropPage clickDragAndDropLink() {
+        new Button(dragAndDropLinkSelector).click();
+        return new DragAndDropPage();
+    }
+
+    public DropdownPage clickDropdownLink() {
+        new Button(dropdownLinkSelector).click();
+        return new DropdownPage();
+    }
+
+    public DynamicContentPage clickDynamicContentLink() {
+        new Button(dynamicContentLinkSelector).click();
+        return new DynamicContentPage();
+    }
+
+    public DynamicControlsPage clickDynamicControlsLink() {
+        new Button(dynamicControlsLinkSelector).click();
+        return new DynamicControlsPage();
+    }
+
+    public DynamicLoadingPage clickDynamicLoadingLink() {
+        new Button(dynamicLoadingLinkSelector).click();
+        return new DynamicLoadingPage();
+    }
+
+    public ExitIntentPage clickExitIntentLink() {
+        new Button(exitIntentLinkSelector).click();
+        return new ExitIntentPage();
+    }
+
+    public FileDownloadPage clickFileDownloadLink() {
+        new Button(fileDownloadLinkSelector).click();
+        return new FileDownloadPage();
+    }
+
+    public FileUploadPage clickFileUploadLink() {
+        new Button(fileUploadLinkSelector).click();
+        return new FileUploadPage();
+    }
+
+    public FloatingMenuPage clickFloatingMenuLink() {
+        new Button(floatingMenuLinkSelector).click();
+        return new FloatingMenuPage();
+    }
+
+    public ForgotPasswordPage clickForgotPasswordLink() {
+        new Button(forgotPasswordLinkSelector).click();
+        return new ForgotPasswordPage();
+    }
+
+    public FormAuthenticationPage clickFormAuthenticationLink() {
+        new Button(formAuthenticationLinkSelector).click();
+        return new FormAuthenticationPage();
+    }
+
+    public FramesPage clickFramesLink() {
+        new Button(framesLinkSelector).click();
+        return new FramesPage();
+    }
+
+    public GeolocationPage clickGeolocationLink() {
+        new Button(geolocationLinkSelector).click();
+        return new GeolocationPage();
+    }
+
+    public HorizontalSliderPage clickHorizontalSliderLink() {
+        new Button(horizontalSliderLinkSelector).click();
+        return new HorizontalSliderPage();
+    }
+
+    public HoversPage clickHoversLink() {
+        new Button(hoversLinkSelector).click();
+        return new HoversPage();
+    }
+
+    public InfiniteScrollPage clickInfiniteScrollLink() {
+        new Button(infiniteScrollLinkSelector).click();
+        return new InfiniteScrollPage();
+    }
+
+    public JavaScriptAlertsPage clickJavaScriptAlertLink() {
+        new Button(javaScriptAlertLinkSelector).click();
+        return new JavaScriptAlertsPage();
+    }
+
+    public JavaScriptErrorPage clickJavaScriptErrorLink() {
+        new Button(javaScriptErrorLinkSelector).click();
+        return new JavaScriptErrorPage();
+    }
+
+    public JQueryUIMenuPage clickJQueryUIMenuLink() {
+        new Button(jQueryUIMenuLinkSelector).click();
+        return new JQueryUIMenuPage();
+    }
+
+    public KeyPressesPage clickKeyPressesLink() {
+        new Button(keyPressesLinkSelector).click();
+        return new KeyPressesPage();
+    }
+
+    public LargeAndDeepDOMPage clickLargeAndDeepDOMLink() {
+        new Button(largeAndDeepDOMLinkSelector).click();
+        return new LargeAndDeepDOMPage();
+    }
+
+    public MultipleWindowsPage clickmultipleWindowsLink() {
+        new Button(multipleWindowsLinkSelector).click();
+        return new MultipleWindowsPage();
+    }
+
+    public NestedFramesPage clickNestedFramesLink() {
+        new Button(nestedFramesLinkSelector).click();
+        return new NestedFramesPage();
+    }
+
+    public NotificationMessagesPage clickNotificationMessagesLink() {
+        new Button(notificationMessagesLinkSelector).click();
+        return new NotificationMessagesPage();
+    }
+
+    public RedirectLinkPage clickRedirectLink() {
+        new Button(redirectLinkSelector).click();
+        return new RedirectLinkPage();
+    }
+
+    public SecureFileDownloadPage clickSecureFileDownloadLink() {
+        new Button(secureFileDownloadLinkSelector).click();
+        return new SecureFileDownloadPage();
+    }
+
+    public ShiftingContentPage clickShiftingContentLink() {
+        new Button(shiftingContentLinkSelector).click();
+        return new ShiftingContentPage();
+    }
+
+    public SlowResourcesPage clickSlowResourcesLink() {
+        new Button(slowResourcesLinkSelector).click();
+        return new SlowResourcesPage();
+    }
+
+    public SortableDataTablesPage clickSortableDataTablesLink() {
+        new Button(sortableDataTablesLinkSelector).click();
+        return new SortableDataTablesPage();
+    }
+
+    public StatusCodesHomePage clickStatusCodesLink() {
+        new Button(statusCodesLinkSelector).click();
+        return new StatusCodesHomePage();
+    }
+
+    public TyposPage clickTyposLink() {
+        new Button(typosLinkSelector).click();
+        return new TyposPage();
+    }
+
+    public WYSIWYGEditorPage clickWYSIWYGEditorLink() {
+        new Button(wYSIWYGEditorLinkSelector).click();
+        return new WYSIWYGEditorPage();
+    }
+
+
+
+

These methods create a Button object for every link on The Internet Page and click it to redirect on a different subpage.

+
+
+
+Elements types +
+

MrChecker includes Object types for various elements existing on webpages such as Button, TextBox etc. There is also WebElement class and getDriver().findElementDynamic(By selector) method for creating webpage objects dynamically and performing basic actions:

+
+
+

Instead of using static types you can use:

+
+
+
+
    public TyposPage clickTyposLink() {
+        WebElement checkboxesLink = getDriver().findElementDynamic(checkboxesLinkSelector);
+        checkboxesLink.click();
+        return new TyposPage();
+    }
+
+
+
+

Or perform actions without creating a variable:

+
+
+
+
    public TyposPage clickTyposLink() {
+        getDriver().findElementDynamic(checkboxesLinkSelector).click();
+        return new TyposPage();
+    }
+
+
+
+
+The Internet Base Test + +
+
+Test Class +
+

Create Test class and override methods:

+
+
+
    +
  • +

    public void setUp() - executes before each test

    +
  • +
  • +

    public void tearDown() - executes after each test

    +
  • +
+
+
+
+
public class TheInternetBaseTest extends BaseTest {
+    @Override
+    public void setUp() {
+
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        BasePage.navigateBack();
+    }
+}
+
+
+
+

logStep(String message) method doesn’t exist yet so you should create it:

+
+
+
+
    protected static int             step = 0;
+
+     /**
+     * Logs test step including step number calculated individually for each test.
+     *
+     * @param message Text message representing step description.
+     */
+    public static void logStep(String message) {
+        BFLogger.logInfo("Step " + ++step + ": " + message);
+    }
+
+
+
+

Write a method for loading The Internet Page and checking if it is properly opened:

+
+
+
+
    protected static TheInternetPage theInternetPage;
+
+    /**
+     * Performs operations required for verifying if The Internet Page is properly opened.
+     *
+     * @return TheInternetPage
+     */
+    public static TheInternetPage shouldTheInternetPageBeOpened() {
+
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+
+        return theInternetPage;
+    }
+
+
+
+

This Test class can’t be launched because it doesn’t contain any @Test methods. It’s been created only for supporting other Test classes.

+
+
+
+BFLogger +
+

BFLogger is a default MrChecker logging tool. Use it to communicate important information from test execution. There are three basic logging methods:

+
+
+
    +
  • +

    logInfo(String message) - used for test steps

    +
  • +
  • +

    logDebug(String message) - used for non-official information, either during the test build process or in Page Object files

    +
  • +
  • +

    logError(String message) - used to emphasize critical information

    +
  • +
+
+
+

Logs will be visible in the console and in the log file under path: MrChecker_Test_Framework\workspace\project-folder\logs

+
+
+
+
+
+

E2E Tutorials

+
+ +
+
MrChecker E2E tutorials
+
+

In order to learn more about MrChecker structure, start from Project Organisation section and then check out our fantastic tutorials:

+
+
+
+
How to create a basic test in MrChecker
+ +
+
+
Example: Booking a table
+
+

As an example to test we will use MyThaiStar booking page.
+In order to book a table, do the following steps:

+
+
+
    +
  1. +

    Open MyThaiStar Book Table Page

    +
  2. +
  3. +

    Enter booking data: Date and time, Name, Email and number of Table guests

    +
  4. +
  5. +

    Click Accept terms

    +
  6. +
  7. +

    Click Book table

    +
  8. +
  9. +

    Display confirmation box and send booking

    +
  10. +
  11. +

    Check if the booking was successful.

    +
  12. +
+
+
+
+image1 +
+
+
+
+image2 +
+
+
+

You can go through these steps manually and doublecheck the result.

+
+
+
+
How to prepare a test
+ +
+
+
== 1. Create BookTablePage class
+
+

You will need a class which will represent MyThaiStart booking page.
+Fill the required methods with the following code:

+
+
+
+
public class BookTablePage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded(); //waits untli the page is loaded
+        return getDriver().getCurrentUrl()
+                .equals("https://mts-devonfw-core.cloud.okteto.net/bookTable"); //checks if current page address equals MyThaiStar booking page adress
+    }
+
+    @Override
+    public void load() {
+        getDriver().get("https://mts-devonfw-core.cloud.okteto.net/bookTable"); //loads page under specified adress
+        getDriver().waitForPageLoaded(); //waits until the page is loaded
+    }
+
+    @Override
+    public String pageTitle() {
+        return "My Thai Star"; //returns page title
+    }
+}
+
+
+
+

getDriver() method allows accessing Selenium Web Driver which performs actions on the webpage.

+
+
+

As this page class represents the MyThaiStar booking page, you have to set up selectors for web elements required in the test case. In the example you have to create selectors for elements we’ll interact with:

+
+
+
    +
  • +

    Date and time input field

    +
  • +
  • +

    Name input field

    +
  • +
  • +

    Email input field

    +
  • +
  • +

    Table guests input field

    +
  • +
  • +

    Accept terms checkbox

    +
  • +
  • +

    Book table button

    +
  • +
+
+
+

Selectors will be implemented as fields.

+
+
+

Example of the selector for Date and time input field:

+
+
+
+
/** Date field search criteria */
+private static final By dateSearch = By.cssSelector("input[formcontrolname='bookingDate']");
+
+
+
+

The input field’s name "bookingDate" was found by using the developer console in Google Chrome. How to prepare an everlasting selector?

+
+
+
+image3 +
+
+
+

This selector can be used to create a WebElement object of the said input field. Therefore, you will create a new method and call it "enterTimeAndDate".

+
+
+
+
public void enterTimeAndDate(String date) {
+    WebElement dateInput = getDriver().findElementDynamic(dateSearch); //creates a new WebElement to access Date and time input field
+    dateInput.sendKeys(date); //enters date value
+}
+
+
+
+

Now you can create other selectors and objects and methods for every element on the webpage:

+
+
+
+
/** Name input field search criteria */
+private static final By nameSearch = By.cssSelector("input[formcontrolname='name']");
+
+/** Email input field search criteria */
+private static final By emailSearch = By.cssSelector("input[formcontrolname='email']");
+
+/** Number of guests search criteria */
+private static final By guestsSearch = By.cssSelector("input[formcontrolname='assistants']");
+
+/** Check box search criteria */
+private static final By checkboxSearch = By.cssSelector("mat-checkbox[data-name='bookTableTerms']");
+
+/** Book table button search criteria */
+private static By bookTableSearch = By.name("bookTableSubmit");
+
+
+
+
+
public void enterName(String name) {
+    WebElement nameInput = getDriver().findElementDynamic(nameSearch); //creates a new WebElement to access name input field
+    nameInput.sendKeys(name); //enters name value
+}
+
+public void enterEmail(String email) {
+    WebElement emailInput = getDriver().findElementDynamic(emailSearch); //creates a new WebElement to access email input field
+    emailInput.sendKeys(email); //enters email value
+}
+
+public void enterGuests(int amountOfGuests) {
+    WebElement guestsInput = getDriver().findElementDynamic(guestsSearch); //creates a new WebElement to access amount of guests input field
+    guestsInput.sendKeys(Integer.toString(amountOfGuests)); //enters the number of guests value converted from integer to string
+}
+
+public void acceptTerms() {
+    WebElement checkbox = getDriver().findElementDynamic(checkboxSearch); //creates aa new WebElement to access accept terms checkbox
+    WebElement square = checkbox.findElement(By.className("mat-checkbox-inner-container")); //creates a new WebElement to access inner square
+    JavascriptExecutor js = (JavascriptExecutor) getDriver(); //creates a Javascript executor object
+    js.executeScript("arguments[0].click()", square); //executes a script which clicks the square
+
+}
+
+public void clickBookTable() {
+    WebElement buttonbutton = getDriver().findElementDynamic(bookTableSearch); //creates a new WebElement to access book table button
+    getDriver().waitUntilElementIsClickable(bookTableSearch); //waits until a button might be clicked
+    buttonbutton.click(); //clicks the button
+}
+
+
+
+

You can use those methods in order to create a new method to go through the whole booking process:

+
+
+
+
public ConfirmBookPage enterBookingData(String date, String name, String email, int guests) {
+    enterTimeAndDate(date);
+    enterName(name);
+    enterEmail(email);
+    enterGuests(guests);
+    acceptTerms();
+
+    clickBookTable();
+
+    return new ConfirmBookPage();
+}
+
+
+
+
+
== 2. Create ConfirmBookPage class
+
+

As you can see, this method returns another page object that has not yet been created. This step is required, as the booking information that you would like to check is on another webpage. This means that you will have to create another page class and call it ConfirmBookPage:

+
+
+
+
public class ConfirmBookPage extends BasePage {
+
+    /** Confirmation dialog search criteria */
+    private static final By confirmationDialogSearch = By.className("mat-dialog-container");
+
+    /** Send confirmation button search criteria */
+    private static final By sendButtonSearch = By.name("bookTableConfirm");
+
+    /** Cancel confirmation button search criteria */
+    private static final By cancelButtonSearch = By.name("bookTableCancel");
+
+    @Override
+    public boolean isLoaded() {
+        //creates a new WebElement to access confirmation dialog box
+        WebElement confirmationDialog = getDriver().findElementDynamic(confirmationDialogSearch);
+
+        return confirmationDialog.isDisplayed(); //checks if the box is displayed
+    }
+
+    //this method won't be called because the page is loaded only after clicking book table button
+    @Override
+    public void load() {
+        BFLogger.logError("MyThaiStar booking confirmation page was not loaded."); //logs error
+    }
+
+    @Override
+    public String pageTitle() {
+        return "My Thai Star";
+    }
+
+    public void confirmBookingData() {
+        WebElement sendButton = getDriver().findElementDynamic(sendButtonSearch); //creates a new WebElement to access confirmation button
+        sendButton.click(); //clicks the send button
+    }
+
+    public void cancelBookingData() {
+        WebElement cancelButton = getDriver().findElementDynamic(cancelButtonSearch); //creates a new WebElement to access resignation button
+        cancelButton.click(); //clicks the cancel button
+    }
+}
+
+
+
+
+image4 +
+
+
+

After the click on Send button - the green confirmation dialogue appears with the message "Table successfully booked":

+
+
+
+image5 +
+
+
+

To be able to check if the booking was successful, you should go back to the BookTablePage class and add one more method in order to check if the green box was displayed:

+
+
+
+
/** Dialog search criteria */
+private static final By dialogSearch = By.className("bgc-green-600");
+
+public boolean checkConfirmationDialog() {
+    WebElement greenConfirmationDialog = getDriver().findElementDynamic(dialogSearch); //creates a new WebElement to access confirmation dialog
+
+    return greenConfirmationDialog.isDisplayed(); //checks if the dialog is displayed
+}
+
+
+
+
+
== 3. Create BookTableTest class
+
+

At this point you can start creating a test class:

+
+
+
+
import static org.junit.Assert.assertTrue;
+
+public class BookTableTest extends BaseTest {
+    private static BookTablePage bookTablePage = new BookTablePage(); //the field contains book table page object
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        bookTablePage.load(); //loads book table page
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+
+    }
+
+    @Override
+    public void setUp() {
+        if (!bookTablePage.isLoaded()) {
+            bookTablePage.load(); //if the page is not loaded, loads it
+        }
+    }
+
+    @Override
+    public void tearDown() {
+
+    }
+}
+
+
+
+
+
== 4. Write the first test
+
+

You can prepare our first test method using the methods from page classes

+
+
+
+
@Test
+public void Test_BookTableAndCheckConfirmation() {
+    String date = "07/23/2019 1:00 PM"; //replace with tommorow's date in format "MM/dd/yyyy hh:mm a"
+    String name = "Smith"; //name field
+    String email = "smith@somemail.com"; //email field
+    int guests = 3; //number of guests
+
+    //enters booking data and returns a new confirmation page
+    ConfirmBookPage confirmBookPage = bookTablePage.enterBookingData(date, name, email, guests);
+    confirmBookPage.confirmBookingData(); //confirms booking
+
+    //checks if the green dialog box appears, if it does, test is passed, if not, the test failed and displays message given in the first argument
+    assertTrue("Test failed: Table not booked", bookTablePage.checkConfirmationDialog()); //returns true if dialog box appears and false if not
+}
+
+
+
+
+
== 5. Run the test
+
+

Run the test by right-clicking on the test method → Run as → JUnit test.

+
+
+
+image6 +
+
+
+
+
+
+

Migration from JUnit4 to JUnit5

+
+ +
+
+
+

Migration guide

+
+
+
Junit4 to Junit5 migration guide
+
+

mrchecker-core-module version 5.6.2.1 features the upgrade of Junit4 to Junit5. Consequently, the Junit4 features are now obsolete and current test projects require migration +in order to use the latest revision of MrChecker. This site provides guidance on the migration.

+
+ +
+
+
POM
+
+

The project pom.xml file needs to be adjusted in the first place. An exemplary POM file for download can be found here: https://github.com/devonfw/mrchecker/blob/develop/template/pom.xml

+
+
+
+
Test Annotations
+
+

Junit5 redefines annotations defining a test flow. The annotations need to be adjusted as per the following table.

+
+
+
+migration01 +
+
+
+
+
Rule, ClassRule, TestRule and TestMethod
+
+

Junit4 @Rule and @ClassRule annoations as well as TestRule and TestMethod interfaces have been replaced +with the Junit5 extension mechanism (https://junit.org/junit5/docs/current/user-guide/#extensions). +During the migration to Junit5, all the instances of the mentioned types need to be rewritten according to the Junit5 User Guide. +The extension mechanism is far more flexible than the Junit4 functionality based on rules.

+
+
+

Note: as per Junit5 API spec: ExpectedExceptionSupport, ExternalResourceSupport, VerifierSupport +provide native support of the correspoinding Junit4 rules.

+
+
+

Extension registration example:

+
+
+
+migration02 +
+
+
+
+migration arrow down +
+
+
+
+migration03 +
+
+
+

TestRule (TestWatcher and ExternalResource) to Extension (TestWatcher and AfterAllCallback) example:

+
+
+
+migration04 +
+
+
+
+migration arrow down +
+
+
+
+migration05 +
+
+
+
+
Page, BasePageAutoRegistration and PageFactory classes
+
+

Page class is a new MrChecker class. It was introduced to provide common implemenation for its subpages in specific MrChecker modules. +In order to receive test lifecycle notifications, particular Pages need to be registered by calling addToTestExecutionObserver() method. +To facilitate this process, PageFactory class was designed and it’s usage is a recommended way of creating Page objects for tests. +Although in MrChecker based on Junit4, the registration process was done in a specific BasePage constructor, it’s been considered error prone and reimplemented. +Furthermore, to reduce migration cost BasePageAutoRegistration classes are available in MrChceker modules. They use the old way of registration. +Given that three ways of migration are possible.

+
+
+

Migration with PageFactory class example (RECOMMENDED):

+
+
+
+migration06 +
+
+
+
+migration arrow down +
+
+
+
+migration07 +
+
+
+

Migration with calling addToTestExecutionObserver() method example:

+
+
+
+migration06 +
+
+
+
+migration arrow down +
+
+
+
+migration08 +
+
+
+

Migration with BasePageAutoRegistration class example:

+
+
+
+migration09 +
+
+
+
+migration arrow down +
+
+
+
+migration10 +
+
+
+
+
Test suites
+
+

Test suite migration example:

+
+
+
+migration11 +
+
+
+
+migration arrow down +
+
+
+
+migration12 +
+
+
+

Running tests from Maven:

+
+
+
+migration13 +
+
+
+
+migration arrow down +
+
+
+
+migration14 +
+
+
+
+
Concurrency
+
+

Junit5 provides native thread count and parallel execution control in contrast to Junit4 where it was controlled by Maven Surefire plugin. +To enable concurrent test execution, junit-platform.properties file needs to placed in the test/resources directory of a project.

+
+
+

Exemplary file contents:

+
+
+
+migration15 +
+
+
+

A ready-to-use file can be found here.

+
+
+

MrChecker supports only concurrent test class execution. +@ResourceLock can be used to synchronize between classes if needed:

+
+
+
+migration16 +
+
+
+
+
Cucumber
+
+

If Cucumber is used in a project, it is neccessary to change a hook class. +An exemplary hook source file for download can be found here.

+
+
+
+
Data driven tests
+
+

Junit5 implements new approach to data driven tests by various data resolution mechanisms.

+
+
+

An example of method source parameters migration version one:

+
+
+
+migration17 +
+
+
+
+migration arrow down +
+
+
+
+migration18 +
+
+
+

An example of method source parameters migration version two:

+
+
+
+migration17 +
+
+
+
+migration arrow down +
+
+
+
+migration19 +
+
+
+

An example of method source in another class parameters migration:

+
+
+
+migration20 +
+
+
+
+migration arrow down +
+
+
+
+migration21 +
+
+
+

Providing parameters directly in annotations has no analogy in Junit5 and needs to be replaced with e.g. method source:

+
+
+
+migration22 +
+
+
+
+migration arrow down +
+
+
+
+migration23 +
+
+
+

An example of csv parameters source with no header line migration:

+
+
+
+migration24 +
+
+
+
+migration arrow down +
+
+
+
+migration25 +
+
+
+

An example of csv parameters source with the header line migration:

+
+
+
+migration26 +
+
+
+
+migration arrow down +
+
+
+
+migration27 +
+
+
+

An example of csv parameters source with object mapping migration step1:

+
+
+
+migration28 +
+
+
+
+migration arrow down +
+
+
+
+migration29 +
+
+
+

An example of csv parameters source with object mapping migration step 2:

+
+
+
+migration30 +
+
+
+
+migration arrow down +
+
+
+
+migration31 +
+
+
+
+
setUp() and tearDown()
+
+

BaseTest.setUp() and BaseTest.tearDown() methods are now not abstract and need no implementation in subclasses. @Override when a custom implemenatation is needed.

+
+
+
+
+
+

FAQ

+
+
+

Here you can find the most frequently asked questions regarding working with MrChecker and installation problems.

+
+
+
+
+

Common problems

+
+
+
I can’t find the boilerplate module. Has it been removed?
+
+

The boilerplate module has been removed from the GitHub project on purpose.

+
+
+

There were problems with naming and communication, not everybody was aware of the meaning of the word boilerplate.

+
+
+

The name of the folder has been changed to template. It can be found in the GitHub project.

+
+
+
+
Is it possible to use Docker with MrChecker?
+
+

MrChecker works seamlessly with Docker.

+
+ +
+

Note that the structure of the folders can be changed. If that happens - search in repo for /pipeline/CI/Jenkinsfile_node.groovy

+
+
+
+
Tests are not stable
+
+

Selenium tests perform actions much faster than a normal user would. Because pages can contain dynamically changing content, some web elements can still not be loaded when Selenium driver tries to access them.

+
+
+

getDriver().waitForPageLoaded() method checks ready state in the browser, that’s why stability problems may happen in advanced frontend projects.

+
+
+

To improve test stability you can:

+
+
+
    +
  • +

    add waiting methods before dynamically loading elements e.g. getDriver().waitForElement(By selector)

    +
  • +
  • +

    add timeout parameter in method getDriver().findElementDynamic(By selector, int timeOut)

    +
  • +
  • +

    change global waiting timeout value using method getDriver().manage().timeouts().implicitlyWait(long time, TimeUnit unit)

    +
  • +
+
+
+

Furthermore, if the page displays visible loading bars or spinners, create FluentWait method to wait until they disappear.

+
+
+

Notice that by increasing timeouts you may improve stability but too long waiting time makes tests run slower.

+
+ +
+
+
+
+

How to

+
+
+
How to: Change timeouts?
+
+

If you would like to change timeouts - you don’t have to change them globally. +It is possible to add waiting time parameter to searching methods, such as:

+
+
+

getDriver().findElementDynamic(By selector, int timeOut)
+timeout - in seconds

+
+
+

It is recommended to use methods that significantly level up the repetitiveness of the code:

+
+
+
+
getDriver().waitForElement(By selector);
+
+getDriver().waitForElementVisible(By selector);
+
+getDriver().waitForPageLoaded();
+
+getDriver().waitUntilElementIsClickable(By selector);
+
+
+
+

Or Fluent Wait methods with changed timeout and interval:

+
+
+
+
FluentWait<WebDriver> wait = new FluentWait<WebDriver>(getDriver())
+        .withTimeout(long duration, TimeUnit unit)
+        .pollingEvery(long duration, TimeUnit unit);
+wait.until((WebDriver wd) -> expectedCondition.isTrue());
+getWebDriverWait().withTimeout(millis, TimeUnit.MILLISECONDS)
+        .withTimeout(long duration, TimeUnit unit)
+        .pollingEvery(long duration, TimeUnit unit)
+        .until((WebDriver wd) -> expectedCondition.isTrue());
+
+
+
+

These methods allow You to change WebDriver timeouts values such as:

+
+
+

getDriver().manage().timeouts().pageLoadTimeout(long time, TimeUnit unit)
+the amount of time to wait for a page to load before throwing an exception. This is the default timeout for method getDriver().waitForPageLoaded()

+
+
+

getDriver().manage().timeouts().setScriptTimeout(long time, TimeUnit unit)
+the amount of time to wait for execution of script to finish before throwing an exception

+
+
+

getDriver().manage().timeouts().implicitlyWait(long time, TimeUnit unit) +the amount of time the driver should wait when searching for an element if it is not immediately present. After that time, it throws an exception. This the default timeout for methods such as getDriver().findElementDynamic(By selector) or getDriver().waitForElement(By selector)

+
+
+

Changing timeouts can improve test stability but can also make test run time longer.

+
+
+
+
How to: Start a browser in Incognito/Private mode?
+
+

In MrChecker there is a fpossibility of changing browser options during runtime execution.

+
+
+

To run the browser in incognito mode:

+
+
+
    +
  1. +

    In Eclipse - open Run Configurations window:

    +
    +

    ht image1

    +
    +
  2. +
  3. +

    Select a test which you want to run and switch to arguments tab:

    +
    +

    ht image2

    +
    +
  4. +
  5. +

    Add VM argument:

    +
    +
      +
    • +

      for the incognito mode in chrome:

      +
      +

      ht image3

      +
      +
    • +
    +
    +
  6. +
+
+ +
+
+
+
+

Installation problems

+
+
+
Chromedriver version is not compatible with Chrome browser
+
+

Problem:

+
+
+

During the tests your web browser window opens and immediately closes, all your tests are broken.

+
+
+

Following error message is visible in the test description:

+
+
+
+
session not created: This version of ChromeDriver only supports Chrome version 76
+Build info: version: '<build_version>', revision: '<build_revision>', time: '<time>'
+System info: host: '<your_computer_name>', ip: '<your_ip_address>', os.name: '<your_os_name>', os.arch: '<your_os_architecture>', os.version: '<your_os_version>', java.version: '<java_version_installed>'
+Driver info: driver.version: NewChromeDriver
+
+
+
+

Solution:

+
+
+
    +
  1. +

    Make a change in the following files:

    +
    +
      +
    • +

      MrChecker_Test_Framework\workspace\devonfw-testing\src\resources\settings.properties

      +
    • +
    • +

      For project template-app-under-test: MrChecker_Test_Framework\workspace\devonfw-testing\template\src\resources\settings.properties

      +
    • +
    • +

      For project example-app-under-test: MrChecker_Test_Framework\workspace\devonfw-testing\example\src\resources\settings.properties

      +
      +

      Change the value of selenium.driverAutoUpdate field form true to false

      +
      +
    • +
    +
    +
  2. +
  3. +

    Replace the following file with a version compatible with your browser: +MrChecker_Test_Framework\workspace\devonfw-testing\example\lib\webdrivers\chrome\chromedriver.exe .

    +
  4. +
+
+
+
+
My browser opens up in German by default
+
+

Problem:

+
+
+

I would like my browser to use the English language, but the default language for the browser is German. How can I change the settings?

+
+
+

Solution:

+
+
+

There is a Properties file installed together with MrCheker installation. It is possible to set the language in which a browser could be opened for testing purposes in Properties > Selenium configuration,.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/Home.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/Home.html new file mode 100644 index 00000000..684c96b5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/Home.html @@ -0,0 +1,467 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MyThaiStar Wiki

+
+ +
+
+
+

User Stories

+
+
+ +
+
+
+
+

Technical design

+
+ +
+
+
+

Data Model

+
+
+ +
+
+
+
+

Server Side

+ +
+
+

Client Side

+
+ +
+
+
+

SAP HANA Integration

+
+
+ +
+
+
+
+

Security

+ +
+
+

Testing

+
+ +
+
+
+

Server Side

+ +
+
+

Client Side

+
+ +
+
+
+

End to end

+
+
+ +
+
+
+
+

UI design

+
+
+ +
+
+
+
+

CI/CD

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/My-Thai-Star-data-model.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/My-Thai-Star-data-model.html new file mode 100644 index 00000000..249138de --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/My-Thai-Star-data-model.html @@ -0,0 +1,282 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Data Model

+
+
+
+mts datamodel +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/User-Stories.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/User-Stories.html new file mode 100644 index 00000000..46d331a5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/User-Stories.html @@ -0,0 +1,906 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

User Stories

+
+
+

The list of user stories, exported from JIRA, can be downloaded from here.

+
+
+
+
+

Epic: Invite friends

+
+ +
+
+
+

US: create invite for friends

+
+
+

Epic: Invite friends

+
+
+

As a guest I want to create an dinner event by entering date and time and adding potential guests by their emails so that each potential guest will receives an email in order to confirm or decline my invite.

+
+
+
+
+

== Acceptance criteria

+
+
+
    +
  1. +

    only date and time in future possible and both required

    +
  2. +
  3. +

    only valid email addresses: text@text.xx, one entered email-address is required

    +
  4. +
  5. +

    if AGB are not checked, an error message is shown

    +
  6. +
  7. +

    after the invite is done

    +
    +
      +
    1. +

      I see the confirmation screen of my invite (see wireframe)

      +
    2. +
    3. +

      I receive a confirmation email about my invite containing date, time and invited guests

      +
    4. +
    5. +

      all guests receive a mail with my invite

      +
    6. +
    +
    +
  8. +
+
+
+
+
+

US: create reservation

+
+
+

Epic: Invite friends

+
+
+

As a guest I want to create a reservation by entering date and time and number of adults and kids

+
+
+
+
+

== Acceptance criteria

+
+
+
    +
  1. +

    only date and time in future possible and both required

    +
  2. +
  3. +

    only valid email addresses: text@text.xx, one entered email-address is required

    +
  4. +
  5. +

    if AGB are not checked, an error message is shown

    +
  6. +
  7. +

    after the reservation is done

    +
    +
      +
    1. +

      I see a confirmation screen of my reservation with date-time, number of persons and kids

      +
    2. +
    3. +

      I receive a confirmation email about my reservation

      +
    4. +
    +
    +
  8. +
+
+
+
+
+

== Wireframes

+
+
+

see real time board

+
+
+
+
+

US: handle invite

+
+
+

As an invited guest I would like to receive an email - after somebody as invited me - with the option to accept or decline the invite so that the system knows about my participation

+
+
+
+
+

== AC:

+
+
+
    +
  1. +

    the mail contains the following information about the invite

    +
    +
      +
    1. +

      who has invited

      +
    2. +
    3. +

      who else is invited

      +
    4. +
    5. +

      date and time of the invite

      +
    6. +
    7. +

      button to accept or decline

      +
    8. +
    9. +

      after pressing the buttons the system will store the status (yes/no) of my invite

      +
    10. +
    +
    +
  2. +
+
+
+
+
+

US: revoke accepted invite

+
+
+

As an invited guest I would like to revoke my previous answer in order to inform the system and the inviter about my no showup

+
+
+
+
+

== AC:

+
+
+
    +
  1. +

    the inviter and myself receives an email about my cancellation

    +
  2. +
  3. +

    the system sets my status of my invite to no

    +
  4. +
  5. +

    in case I have placed an order, the order is also removed from the system.

    +
  6. +
  7. +

    the cancellation is only possible 10 minutes before the event takes place. The system shows a message that cancellation is not possible anymore.

    +
  8. +
+
+
+
+
+

US: calculate best table

+
+
+

As a guest I would like the system to check (1 hour before my invite) all my invites and to reserve a table fitting the number of accepted users

+
+
+
+
+

== Details

+
+
+

Pseudo-algorithm for reservation: +Find table for given date and time where seats of guests >= Count of invited guests plus one. In case no results, decline request and show error message to user. In case of any result, make a reservation for table…​. +For each decline of a guest remove guest and search with reduced number for new table. In case table is found, reserve it and remove reservation from previous table. In case not, do not change reservations.

+
+
+
+
+

US: find table by reservation info

+
+
+

As a waiter I would like to search by reference number or email address for the reserved table in order to know the table for my visit. (when arriving at the restaurant)

+
+
+
+
+

== AC:

+
+
+
    +
  1. +

    After entering the email the systems shows the number of the table. In case no reservation found, a message is shown.

    +
  2. +
  3. +

    Entered email address could be email of inviter or any invited guest.

    +
  4. +
+
+
+
+
+

US: cancel invite

+
+
+

Epic: Invite friends

+
+
+

As a guests who has sent an invite I want to be able to cancel my previous invite in order to inform the restaurant and my invited guests that I will not show up

+
+
+
+
+

== AC:

+
+
+
    +
  1. +

    the option to cancel the invite is available in the confirmation-mail about my invite

    +
  2. +
  3. +

    after my cancellation all invited guests receives a mail about the cancellation

    +
  4. +
  5. +

    I see a confirmation that my invite was canceled successfully

    +
  6. +
  7. +

    after my cancellation my invite and reservation and all orders related to it are deleted from the system and no one can accept or decline any invite for it

    +
  8. +
  9. +

    the cancellation is only possible one hour before the invite takes place. After that I am not allowed to cancel it any more.

    +
  10. +
+
+
+
+
+

Epic: Digital Menu

+
+ +
+
+
+

US: filter menu

+
+
+

As a guest I want to filter the menu so that I only see the dishes I am interested in

+
+
+
+
+

== AC:

+
+
+
    +
  1. +

    the guest can filter by

    +
    +
      +
    1. +

      type: starter | main dish | dessert; XOR; if nothing is selected all are shown (default value)

      +
    2. +
    3. +

      veggy (yes|no|does not matter (default))

      +
    4. +
    5. +

      vegan (yes|no|does not matter (default))

      +
    6. +
    7. +

      rice (yes|no|does not matter (default))

      +
    8. +
    9. +

      curry (yes|no|does not matter (default))

      +
    10. +
    11. +

      noodle (yes|no|does not matter (default))

      +
    12. +
    13. +

      price (range)

      +
    14. +
    15. +

      ratings (range)

      +
    16. +
    17. +

      my favorite (yes|no|does not matter (default)) — free text (search in title and description)

      +
    18. +
    +
    +
  2. +
  3. +

    the guest can sort by price asc, rating asc

    +
  4. +
  5. +

    after setting the filter only dishes are shown which fulfills those criteria

    +
  6. +
  7. +

    by pressing the button reset filter all filter are reset to the initial value

    +
  8. +
  9. +

    by pressing the filter button the filter is applied [or is it triggered after each change?]

    +
  10. +
+
+
+
+
+

US: Define order

+
+
+

As a guest I want to define my order by selecting dishes from the menu

+
+
+
+
+

== AC:

+
+
+
    +
  • +

    The guest can add each dish to the order

    +
  • +
  • +

    In case the guest adds the same dish multiple times, a counter in the order for this dish is increased for this dish

    +
  • +
  • +

    The guest can remove the dish from the order

    +
  • +
  • +

    The guest can add for each main dish the type of meat (pork, chicken, tofu)

    +
  • +
  • +

    The guest can add for each dish a free-text-comment

    +
  • +
  • +

    After adding/removing any dish the price is calculated including VAT

    +
  • +
+
+
+
+
+

US: Order the order

+
+
+

As a guest I want to order my selected dishes (order)

+
+
+

AC:

+
+
+
    +
  1. +

    I receive a mail containing my order with all dishes and the final price

    +
  2. +
  3. +

    precondition for ordering:

    +
    +
      +
    1. +

      Each order must be associated with a reservation / invite. Without any reference no order could be placed. The reference could be obtained from a previous reservation/invite (created during same session) or by the previous accepted invite (link in email) or by entering the reference id when asked by the system.

      +
      +
        +
      1. +

        In case precondition is not fulfilled, the guest is asked

        +
        +
          +
        1. +

          whether he/she would like to create a reservation/invite and is forwarded to US Invite Friends. Only after finalizing the reservation the order is accepted.

          +
        2. +
        3. +

          or he/she would enter previous created reservation-id he/she knows in order to associate his/her order with this reservation

          +
        4. +
        +
        +
      2. +
      +
      +
    2. +
    +
    +
  4. +
+
+
+
+
+

US: Cancel order

+
+
+

As a guest I want to cancel my order.

+
+
+

AC:

+
+
+
    +
  1. +

    in my received confirmation mail I have the option to cancel my order

    +
  2. +
  3. +

    the cancellation is only possible one hour before my reservation takes place

    +
  4. +
  5. +

    my order is deleted from the system

    +
  6. +
+
+
+

Remark: Changing the order is not possible. For that the order must be canceled and created from scratch again

+
+
+
+
+

US: Read twitter rating for dishes

+
+
+

As a guest I want to read for all dishes the rating done be twitter because I would like to know the opinion of others

+
+
+

AC:

+
+
+
    +
  1. +

    For each dish I see the latest 3 comments done by twitter for this vote (text, username, avatar)

    +
  2. +
  3. +

    For each dish I see the number of likes done by twitter

    +
  4. +
+
+
+
+
+

Epic: User Profile

+
+ +
+
+
+

US: User Profile

+
+
+

As a guest I want to have a user profile to associate it with my twitter account to be able to like/rate dishes

+
+
+

AC:

+
+
+
    +
  1. +

    Username of my profile is my email address

    +
  2. +
  3. +

    My profile is protected by password

    +
  4. +
  5. +

    I can log in and log out to my profile

    +
  6. +
  7. +

    I can reset my password by triggering the reset by mail

    +
  8. +
  9. +

    I can associate my profile with my twitter account in order to rate dishes and store my favorites by liking posts associated to dishes

    +
  10. +
+
+
+
+
+

Epic: Rate by twitter

+
+ +
+
+
+

US: Receive mail to rate your dish

+
+
+

As a guest I want to receive a mail by the system in order to rate my dish

+
+
+
+
+

US: Rate your dish

+
+
+

As a guest I want to add a comment or a like via my twitter account for a dish

+
+
+

AC:

+
+
+
    +
  1. +

    Before I write my rate I would like to be able to read all tweets of other users for this dish

    +
  2. +
  3. +

    I would like to see the number of likes for a dish

    +
  4. +
+
+
+
+
+

Epic: Waiter Cockpit

+
+ +
+
+
+

US: See all orders/reservations

+
+
+

As a waiter I want to see all orders/reservation in order to know what is going on in my restaurant

+
+
+

AC:

+
+
+
    +
  1. +

    all orders/reservations are shown in a list view (read-only). Those list can be filtered and sorted (similar to excel-data-filters)

    +
  2. +
  3. +

    orders/reservations are shown in separate lists.

    +
  4. +
  5. +

    for each order the dish, meat, comment, item, reservation-id, reservation date-time, creation-date-time is shown

    +
  6. +
  7. +

    for each reservation the inviters email, the guests-emails, the number of accepts and declines, calculated table number, the reservation-id, reservation date-time and creation-date-time are shown

    +
  8. +
  9. +

    the default filter for all lists is the today’s date for reservation date-time. this filter can be deleted.

    +
  10. +
  11. +

    only reservations and orders with reservation date in the future shall be available in this view. All other orders and reservation shall not be deleted; for data Analytics those orders and reservation shall still exist in the system.

    +
  12. +
+
+
+

checklist:

+
+
+

talk about:

+
+
+
    +
  • +

    who?

    +
  • +
  • +

    what?

    +
  • +
  • +

    why (purpose)

    +
  • +
  • +

    why (objective)

    +
  • +
  • +

    what happens outside the software

    +
  • +
  • +

    what might go wrong

    +
  • +
  • +

    any question or assumptions (write them down) , DoR should check that those sections are empty.

    +
  • +
  • +

    is there any better solution?

    +
  • +
  • +

    how (technical perspective)

    +
  • +
  • +

    do a rough estimate

    +
  • +
  • +

    check INVEST

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/agile.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/agile.html new file mode 100644 index 00000000..d9856636 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/agile.html @@ -0,0 +1,414 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

1. My Thai Star – Agile Framework

+
+ +
+
+
+

1.1 Team Setup

+
+
+

The team working on the development of the My Thai Star app and the documentation beside the technical development works distributed in various locations across Germany, the Netherlands, Spain and Poland. For the communication part the team uses the two channels Skype and Mail and for the documentation part the team makes usage mainly of GitHub and JIRA.

+
+
+
+
+

1.2 Scrum events

+
+ +
+
+
+

Sprint Planning

+
+
+

Within the My Thai Star project we decided on having one hour Sprint Planning meetings for a four-week Sprints. This decision is based on the fact that this project is not the main project of the team members. As the backlog refinement is done during the Sprint Planning we make usage of the planningpoker.com tool for the estimation of the tasks.

+
+
+
+Screenshot of planningpoker.com +
+
Figure 1. Screenshot of planningpoker.com during Sprint 1 Planning
+
+
+

During the Sprint Planning meeting the team receives support from Devon colleagues outside the development. This feedback helps the team to focus on important functionalities and task by keeping the eyes on the overall aim which is to have a working application by the end of June 2017.

+
+
+
+
+

Sprint Review

+
+
+

The Sprint Review meetings are time boxed to one hour for the four week Sprint. Within the Sprint Review meeting the team plans to do a retrospective of the finished Sprint. As well as it is done during the Sprint Planning the team receives support from Devon colleagues.

+
+
+
+
+

Sprint Retrospective

+
+
+

For this project the team aligned on not having a specific Sprint Retrospective meeting. The team is going to have a retrospective of a finished Sprint during the Sprint Review.

+
+
+
+
+

Daily Stand-ups

+
+
+

The team aligned on having two weekly Stand-up meetings instead of a Daily Stand-up meeting. In comparison with the time boxed length of 15mins described in the CAF for this project the team extended the Stand-up meeting to 30mins. The content of the meetings remains the same.

+
+
+
+
+

Backlog refinement

+
+
+

The team decided that the backlog refinement meeting is part of the Sprint Planning meeting.

+
+
+
+
+

1.3 Establish Product Backlog

+
+
+

For the My Thai Stair project the team decided on using the JIRA agile documentation which is one of the widely used agile tools. JIRA is equipped with several of useful tools regarding the agile software development (e.g. Scrum-Board). One of the big advantages of JIRA are the extensive configuration and possibilities to personalize.

+
+
+

With having a list of the Epics and User Stories for the My Thai Star development in GitHub, the team transferred the User Stories into the JIRA backlog as it is shown in the screenshot below. All User Stories are labeled colorfully with the related Epic which shapes the backlog in clearly manner.

+
+
+
+Screenshot of planningpoker.com +
+
Figure 2. Screenshot of the JIRA backlog during Sprint 2
+
+
+

We decided on working with Sub-task as a single user story comprised a number of single and separated tasks. Another benefit of working with sub-task is that every single sub-task can be assigned to a single team member whereas a user story can only be assigned to one team member. By picking single sub-task the whole process of a user story is better organized.

+
+
+
+Screenshot of Sub-tasks +
+
Figure 3. Screenshots of Sub-tasks during Sprint 2
+
+
+
+
+

2. My Thai Star – Agile Diary

+
+
+

In parallel to the Diary Ideation we use this Agile Diary to document our Scrum events. The target of this diary is to describe the differences to the Scrum methodology as well as specific characteristics of the project. We also document the process on how we approach the Scrum methodology over the length of the project.

+
+
+
+
+

24.03.2017 Sprint 1 Planning

+
+
+

Within the Sprint 1 Planning we used planning poker.com for the estimation of the user stories. The estimation process usually is part of the backlog refinement meeting. Regarding the project circumstances we decided to estimate the user stories during the Sprint Planning. Starting the estimation process we noticed that we had to align our interpretation of the estimation effort as these story points are not equivalent to a certain time interval. The story points are relative values to compare the effort of the user stories. With this in mind we proceeded with the estimation of the user stories. We decided to start Sprint 1 with the following user stories and the total amount of 37 story points: +• ICSDSHOW-2 Create invite for friends (8 Story Points) +• ICSDSHOW-4 Create reservation (3) +• ICSDSHOW-5 Handle invite (3) +• ICSDSHOW-6 Revoke accepted invite (5) +• ICSDSHOW-9 Cancel invite (3) +• ICSDSHOW-11 Filter menu (5) +• ICSDSHOW-12 Define order (5) +• ICSDSHOW-13 Order the order (5) +As the Sprint Planning is time boxed to one hour we managed to hold this meeting within this time window.

+
+
+
+
+

27.04.2017 Sprint 1 Review

+
+
+

During the Sprint 1 Review we had a discussion about the data model proposal. For the discussion we extended this particular Review meeting to 90min. As this discussion took almost 2/3 of the Review meeting we only had a short time left for our review of Sprint 1. For the following scrum events we decided to focus on the primary target of these events and have discussions needed for alignments in separate meetings. +Regarding the topic of splitting user stories we had the example of a certain user story which included a functionality of a twitter integration (ICSDSHOW-17 User Profile and Twitter integration). As the twitter functionality could not have been implemented at this early point of time we thought about cutting the user story into two user stories. We aligned on mocking the twitter functionality until the dependencies are developed in order to test the components. As this user story is estimated with 13 story points it is a good example for the question whether to cut a user story into multiple user stories or not. +Unfortunately not all user stories of Sprint 1 could have been completed. Due this situation we discussed on whether pushing all unfinished user stories into the status done or moving them to Sprint 2. We aligned on transferring the unfinished user stories into the next Sprint. During the Sprint 1 the team underestimated that a lot of holidays crossed the Sprint 1 goals. As taking holidays and absences of team members into consideration is part of a Sprint Planning we have a learning effect on setting a Sprint Scope.

+
+
+
+
+

03.05.2017 Sprint 2 Planning

+
+
+

As we aligned during the Sprint 1 Review on transferring unfinished user stories into Sprint 2 the focus for Sprint 2 was on finishing these transferred user stories. During our discussion on how many user stories we could work on in Sprint 2 we needed to remind ourselves that the overall target is to develop an example application for the devonfw. Considering this we aligned on a clear target for Sprint 2: To focus on finishing User Stories as we need to aim for a practicable and realizable solution. Everybody aligned on the aim of having a working application at the end of Sprint 2. +For the estimation process of user stories we make again usage of planningpoker.com as the team prefers this “easy-to-use” tool. During our second estimation process we had the situation in which the estimated story points differs strongly from one team member to another. In this case the team members shortly explains how the understood and interpreted the user story. It turned out that team members misinterpreted the user stories. With having this discussion all team members got the same understanding of the specific functionality and scope of a user story. After the alignment the team members adjusted their estimations. +Beside this need for discussion the team estimated most of the user stories with very similar story points. This fact shows the increase within the effort estimation for each team member in comparison to Sprint 1 planning. Over the short time of two Sprint planning the team received a better understanding and feeling for the estimation with story points.

+
+
+
+
+

01.06.2017 Sprint 2 Review

+
+
+

As our Sprint 1 Review four weeks ago was not completely structured like a Sprint Review meeting we focused on the actual intention of a Sprint Review meeting during Sprint 2 Review. This means we demonstrated the completed and implemented functionalities with screen sharing and the product owner accepted the completed tasks. +Within the User Story ICSDSHOW-22 “See all orders/reservations” the functionality “filtering the list by date” could have not been implemented during Sprint 2. The team was unsure on how to proceed with this task. One team member added that especially in regards of having a coherent release, implementing less but working functionalities is much better than implementing more but not working functionalities. For this the team reminded itself focusing on completing functionalities and not working straight to a working application.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/angular-ci.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/angular-ci.html new file mode 100644 index 00000000..94e9b036 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/angular-ci.html @@ -0,0 +1,630 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular CI

+
+
+

The Angular client-side of My Thai Star is going to have some specific needs for the CI-CD Pipeline to perform mandatory operations.

+
+
+
+
+

Pipeline

+
+
+

The Pipeline for the Angular client-side is going to be called MyThaiStar_FRONT-END_BUILD. It is located in the PL instance, under the MTS folder (as previously explained). It is going to follow a process flow like this one:

+
+
+
+angular pipeline flow +
+
+
+

Each of those steps are called stages in the Jenkins context.Let’s see what those steps mean in the context of the Angular application:

+
+
+
    +
  1. +

    Declarative: Checkout SCM

    +
    +

    Retrieves the project from the GitHub repository which it’s located. This step is not defined directly in our pipeline, but as it is loaded from the repository this step should always be done at the beginning.

    +
    +
    +
    +pipeline config +
    +
    +
  2. +
  3. +

    Declarative: Tool Install

    +
    +

    The Pipeline needs some Tools to perform some operations with the Angular project. These tool is a correct version of NodeJS (10.17.0 LTS) with Yarn installed as global package.

    +
    +
    +
    +
    tools {
    +    nodejs "NodeJS 10.14.0"
    +}
    +
    +
    +
  4. +
  5. +

    Loading Custom Tools

    +
    +

    The Pipeline also needs a browser in order to execute the tests, so in this step the chrome-stable will be loaded. We will use it in a headless mode.

    +
    +
    +
    +
    tool chrome
    +
    +
    +
  6. +
  7. +

    Fresh Dependency Installation

    +
    +

    The script $ yarn does a package installation. As we always clean the workspace after the pipeline, all packages must be installed in every execution.

    +
    +
  8. +
  9. +

    Code Linting

    +
    +

    This script executes a linting process of TypeScript. Rules can be defined in the tslint.json file of the project. It throws an exception whenever a file contains a non-compliant piece of code.

    +
    +
  10. +
  11. +

    Execute Angular tests

    +
    +

    The CI testing of the Angular client is different than the standard local testing (adapted to CI environments, as specified in the Adaptation section of document). This script just executes the following commands:

    +
    +
    +
    +
    ng test --browsers ChromeHeadless --watch=false
    +
    +
    +
  12. +
  13. +

    Check dependencies

    +
    +

    Before continue, we print the result of yarn audit. It shows the vulnerabilities in the dependencies. It do not process the response. The purpose is only to track the result of the command.

    +
    +
    +
    +
    yarn audit
    +
    +
    +
  14. +
  15. +

    SonarQube code analysis

    +
    +

    The script load and execute the tool sonar-scanner. This tool is loaded here because it’s not used in any other part of the pipeline. The sonar-scanner will take all code, upload it to SonarQube and wait until SonarQube send us a response with the quality of our code. If the code do not pass the quality gate, the pipeline will stop at this point.

    +
    +
  16. +
  17. +

    Build Application

    +
    +

    The building process of the Angular client would result in a folder called /dist in the main Angular’s directory. That folder is the one that is going to be served afterwards as an artifact. This process has also been adapted to some Deployment needs. This building script executes the following:

    +
    +
    +
    +
    ng build --configuration=docker
    +
    +
    +
  18. +
  19. +

    Deliver application into Nexus

    +
    +

    Once the scripts produce the Angular artifact (/dist folder), it’s time to package it and store into nexus.

    +
    +
  20. +
  21. +

    Declarative: Post Actions

    +
    +

    At the end, this step is always executed, even if a previous stage fail. We use this step to clean up the workspace for future executions

    +
    +
    +
    +
    post {
    +    always {
    +        cleanWs()
    +    }
    +}
    +
    +
    +
  22. +
+
+
+
+
+

Adjustments

+
+
+

The Angular project Pipeline needed some "extra" features to complete all planned processes. Those features resulted in some additions to the project.

+
+
+
+
+

Pipeline Environment

+
+
+

In order to easily reuse the pipeline in other angular projects, all variables have been defined in the block environment. All variables have the default values that Production Line uses, so if you’re going to work in production line you won’t have to change anything. Example:

+
+
+
+
environment {
+    // Script for build the application. Defined at package.json
+    buildScript = 'build --configuration=docker'
+    // Script for lint the application. Defined at package.json
+    lintScript = 'lint'
+    // Script for test the application. Defined at package.json
+    testScript = 'test:ci'
+    // Angular directory
+    angularDir = 'angular'
+    // SRC folder. It will be angularDir/srcDir
+    srcDir = 'src'
+    // Name of the custom tool for chrome stable
+    chrome = 'Chrome-stable'
+
+    // SonarQube
+    // Name of the SonarQube tool
+    sonarTool = 'SonarQube'
+    // Name of the SonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus
+    // Artifact groupId
+    groupId = 'com.devonfw.mythaistar'
+    // Nexus repository ID
+    repositoryId= 'pl-nexus'
+    // Nexus internal URL
+    repositoryUrl = 'http://nexus3-core:8081/nexus3/repository/maven-snapshots'
+    // Maven global settings configuration ID
+    globalSettingsId = 'MavenSettings'
+    // Maven tool id
+    mavenInstallation = 'Maven3'
+}
+
+
+
+
+
+

== Description

+
+
+
    +
  • +

    build Script: script for build the application. It must be defined at package.json.

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "build": "ng build",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${buildScript}"""
    +
    +
    +
  • +
  • +

    lint Script: Script for lint the application. Defined at package.json

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "lint": "ng lint",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${lintScript}"""
    +
    +
    +
  • +
  • +

    test Script: Script for test the application. Defined at package.json

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "test:ci": "npm run postinstall:web && ng test --browsers ChromeHeadless --watch=false",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${testScript}"""
    +
    +
    +
  • +
  • +

    angular-Dir: Relative route to angular application. In My Thai Star this is the angular folder. The actual directory (.) is also allowed.

    +
    +
    +angular directory +
    +
    +
  • +
  • +

    srcDir: Directory where you store the source code. For angular applications the default value is src

    +
    +
    +src directory +
    +
    +
  • +
  • +

    chrome: Since you need a browser to run your tests, we must provide one. This variable contains the name of the custom tool for google chrome.

    +
    +
    +chrome installation +
    +
    +
  • +
  • +

    sonar-Tool: Name of the SonarQube scanner installation.

    +
    +
    +sonar scanner +
    +
    +
  • +
  • +

    sonar-Env: Name of the SonarQube environment. SonarQube is the default value for PL.

    +
    +
    +sonar env +
    +
    +
  • +
  • +

    group-Id: Group id of the application. It will be used to storage the application in nexus3

    +
    +
    +nexus3 groupid +
    +
    +
  • +
  • +

    repository-Id: Id of the nexus3 repository. It must be defined at maven global config file.

    +
    +
    +nexus3 id +
    +
    +
  • +
  • +

    repository URL: The URL of the repository.

    +
  • +
  • +

    global Settings Id: The id of the global settings file.

    +
    +
    +nexus3 global config +
    +
    +
  • +
  • +

    maven Installation: The name of the maven tool.

    +
    +
    +maven tool +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/angular-design.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/angular-design.html new file mode 100644 index 00000000..9c656656 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/angular-design.html @@ -0,0 +1,804 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular design

+
+ +
+
+
+

Introduction

+
+
+

MyThaiStar client side has been built using latest frameworks, component libraries and designs:

+
+
+

Angular 4 as main front-end Framework. https://angular.io/

+
+
+

Angular/CLI 1.0.5 as Angular tool helper. https://github.com/angular/angular-cli

+
+
+

Covalent Teradata 1.0.0-beta4 as Angular native component library based on Material Design. https://teradata.github.io/covalent/#/

+
+
+

Angular/Material2 1.0.0-beta5 used by Covalent Teradata. https://github.com/angular/material2

+
+
+

Note: this dependencies are evolving at this moment and if it is possible, we are updating it on the project.

+
+
+
+
+

Basic project structure

+
+
+

The project is using the basic project seed that Angular/CLI provides with “ng new <project name>”. Then the app folder has been organized as Angular recommends and goes as follows:

+
+
+
    +
  • +

    app

    +
    +
      +
    • +

      components

      +
      +
        +
      • +

        sub-components

        +
      • +
      • +

        shared

        +
      • +
      • +

        component files

        +
      • +
      +
      +
    • +
    • +

      main app component

      +
    • +
    +
    +
  • +
  • +

    assets folder

    +
  • +
  • +

    environments folder

    +
  • +
  • +

    rest of angular files

    +
  • +
+
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
+
+

Main Views and components

+
+
+

List of components that serve as a main view to navigate or components developed to make atomically a group of functionalities which given their nature, can be highly reusable through the app.

+
+
+
+routes +
+
+
+

Note: no-name-route corresponds to whatever URL the user introduced and does not exist, it redirects to Home-Component.

+
+
+
+
+

Public area

+
+ +
+
+
+

== App Component

+
+
+

Contains the components that are on top of all views, including:

+
+
+
+
+

== Order sidenav

+
+
+

Sidenav where selected orders are displayed with their total price and some comments.

+
+
+
+
+

== Navigation sidenav (only for mobile)

+
+
+

This sidenav proposal is to let user navigate through the app when the screen is too small to show the navigation buttons on the header.

+
+
+
+
+

== Header

+
+
+

It contains the title, and some other basic functions regarding open and close sidenavs.

+
+
+
+
+ +
+
+

At the end of the page that shows only when open on desktop.

+
+
+
+
+

== Home-Component

+
+
+

Main view that shows up when the app initializes.

+
+
+
+
+

== Menu-Component

+
+
+

View where the users can view, filter and select the dishes (with their extras) they want to order it contains a component to each menu entry:

+
+
+
+
+

== Menu-card

+
+
+

This component composes all the data of a dish in a card. Component made to display indeterminate number of dishes easily.

+
+
+
+
+

== Book Table Component

+
+
+

View to make book a table in a given data with a given number of assistants or create a reservation with a number of invitations via email.

+
+
+
+
+

== Book-table-dialog

+
+
+

Dialog which opens as a result of fulfilling the booking form, it displays all the data of the booking attempt, if everything is correct, the user can send the information or cancel if something is wrong.

+
+
+
+
+

== Invitation-dialog

+
+
+

Dialog which opens as a result of fulfilling the invitation form, it displays all the data of the booking with friends attempt, if everything is correct, the user can send the information or cancel if something is wrong.

+
+
+
+
+

== User Area

+
+
+

Group of dialogs with the proposal of giving some functionalities to the user, as login, register, change password or connect with Twitter.

+
+
+
+
+

== Login-dialog

+
+
+

Dialog with a tab to navigate between login and register.

+
+
+
+
+

== Password-dialog

+
+
+

Functionality reserved to already logged users, in this dialog the user can change freely their password.

+
+
+
+
+

== Twitter-dialog

+
+
+

Dialog designed specifically to connect your user account with Twitter.

+
+
+
+
+

Waiter cockpit area

+
+
+

Restricted area to workers of the restaurant, here we can see all information about booked tables with the selected orders and the reservations with all the guests and their acceptance or decline of the event.

+
+
+
+
+

== Order Cockpit Component

+
+
+

Data table with all the booked tables and a filter to search them, to show more info about that table you can click on it and open a dialog.

+
+
+
+
+

== Order-dialog

+
+
+

Complete display of data regarding the selected table and its orders.

+
+
+
+
+

== Reservation Cockpit Component

+
+
+

Data table with all the reservations and a filter to search them, to show more info about that table you can click on it and open a dialog.

+
+
+
+
+

== Reservation-dialog

+
+
+

Complete display of data regarding the selected table and its guests.

+
+
+
+
+

Email Management

+
+
+

As the application send emails to both guests and hosts, we choose an approach based on URL where the email contain a button with an URL to a service in the app and a token, front-end read that token and depending on the URL, will redirect to one service or another. For example:

+
+
+
+
`http://localhost:4200/booking/cancel/CB_20170605_8fb5bc4c84a1c5049da1f6beb1968afc`
+
+
+
+

This URL will tell the app that is a cancellation of a booking with the token CB_20170605_8fb5bc4c84a1c5049da1f6beb1968afc. The app will process this information, send it to back-end with the correct headers, show the confirmation of the event and redirect to home page.

+
+
+

The main cases at the moment are:

+
+
+
+
+

== Accept Invite

+
+
+

A guest accept an invitation sent by a host. It will receive another email to decline if it change its mind later on.

+
+
+
+
+

== Reject Invite

+
+
+

A guest decline the invitation.

+
+
+
+
+

== Cancel Reservation

+
+
+

A host cancel the reservation, everybody that has accepted or not already answered will receive an email notifying this event is canceled. Also all the orders related to this reservations will be removed.

+
+
+
+
+

== Cancel Orders

+
+
+

When you have a reservation, you will be assigned to a token, with that token you can save your order in the restaurant. When sent, you will receive an email confirming the order and the possibility to remove it.

+
+
+
+
+

Services and directives

+
+
+

Services are where all the main logic between components of that view should be. This includes calling a remote server, composing objects, calculate prices, etc.

+
+
+

Directives are a single functionality that are related to a component.

+
+
+

As it can be seen in the basic structure, every view that has a minimum of logic or need to call a server has its own service located in the shared folder.

+
+
+

Also, services and directives can be created to compose a reusable piece of code that will be reused in some parts of the code:

+
+
+
+
+

Price-calculator-service

+
+
+

This service located in the shared folder of sidenav contains the basic logic to calculate the price of a single order (with all the possibilities) and to calculate the price of a full list of orders for a table. As this is used in the sidenav and in the waiter cockpit, it has been exported as a service to be imported where needed and easily testable.

+
+
+
+
+

Authentication

+
+
+

Authentication services serves as a validator of roles and login and, at the same time, stores the basic data regarding security and authentication.

+
+
+

Main task of this services is to provide visibility at app level of the current user information:

+
+
+
    +
  • +

    Check if the user is logged or not.

    +
  • +
  • +

    Check the permissions of the current user.

    +
  • +
  • +

    Store the username and the JWT token.

    +
  • +
+
+
+
+
+

Snack Service

+
+
+

Service created to serve as a factory of Angular Material Snackbars, which are used commonly through the app. This service accepts some parameters to customize the snackBar and opens it with this parameters.

+
+
+
+
+

Window Service

+
+
+

For responsiveness reasons, the dialogs have to accept a width parameter to adjust to screen width and this information is given by Window object, as it is a good practice to have it in an isolated service, which also calculates the width percentage to apply on the dialogs.

+
+
+
+
+

Equal-validator-directive

+
+
+

This directive located in the shared folder of userArea is used in 2 fields to make sure they have the same value. This directive is used in confirm password fields in register and change password.

+
+
+
+
+

Mock Back-end

+
+
+

To develop meanwhile a real back-end is being developed let us to make a more realistic application and to make easier the adaptation when the back-end is able to be connected and called. Its structure is as following:

+
+
+
+back end +
+
+
+

Contains the three main groups of functionalities in the application. Every group is composed by:

+
+
+
    +
  • +

    An interface with all the methods to implement.

    +
  • +
  • +

    A service that implements that interface, the main task of this service is to choose between real back-end and mock back-end depending on an environment variable.

    +
  • +
  • +

    Mock back-end service which implements all the methods declared in the interface using mock data stored in a local file and mainly uses Lodash to operate the arrays.

    +
  • +
  • +

    Real back-end service works as Mock back-end but in this case the methods call for server rest services through Http.

    +
  • +
+
+
+
+
+

Booking

+
+
+

The booking group of functionalities manages the calls to reserve a table with a given time and assistants or with guests, get reservations filtered, accept or decline invitations or cancel the reservation.

+
+
+
+
+

Orders

+
+
+

Management of the orders, including saving, filtering and cancel an order.

+
+
+
+
+

Dishes

+
+
+

The dishes group of functionalities manages the calls to get and filter dishes.

+
+
+
+
+

Login

+
+
+

Login manages the userArea logic: login, register and change password.

+
+
+
+
+

Security

+
+
+

My Thai Star security is composed by two main security services:

+
+
+
+
+

Auth-guard

+
+
+

Front-end security approach, this service implements an interface called CanActivate that comes from angular/router module. CanActivate interface forces you to implement a canActivate() function which returns a Boolean. +This service checks with the Auth-Service stored data if the user is logged and if he has enough permission to access the waiter cockpit. This prevents that a forbidden user could access to waiter cockpit just by editing the URL in the browser.

+
+
+
+
+

JWT

+
+
+

JSON Web Token consists of a token that is generated by the server when the user logs in. Once provided, the token has to be included in an Authentication header on every Http call to the rest service, otherwise the call will be forbidden. +JWT also has an expiration date and a role checking, so if a user has not enough permissions or keeps logged for a long certain amount of time that exceeds this expiration date, the next time he calls for a service call, the server will return an error and forbid the call. You can log again to restore the token.

+
+
+
+
+

== HttpClient

+
+
+

To implement this Authorization header management, an HttpClient service has been implemented. +This services works as an envelope of Http, providing some more functionalities, likes a header management and an automatically management of a server token error in case the JWT has expired, corrupted or not permitted.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/angular-testing.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/angular-testing.html new file mode 100644 index 00000000..b0775cd3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/angular-testing.html @@ -0,0 +1,453 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular testing

+
+
+
+testing +
+
+
+

MyThaiStar testing is made using Angular default testing environment and syntax language: Karma and Jasmine

+
+
+

To test an element of the application, you indicate that tests are a special type of files with the extension .spec.ts, then, in MyThaiStar angular/CLI config you can notice that there is an array with only one entry, Karma, with at the same time has one entry to Karma.config.js.

+
+
+

In the configuration of Karma we indicate which syntax language we are going to use (currently Jasmine as said before) between some other configurations, it is remarkable the last one: browsers. By default, the only available browser is chrome, that is because Karma works opening a chrome view to run all the tests, in that same window, Karma shows the result or errors of the test run. But we can add some other browser to adjust to our necessities, for example, in some automatic processes that run from console, it is not an option to open a chrome window, in that case, MyThaiStar used PhantomJS and ChromeHeadless.

+
+
+

Taking all of this into account, to run the test in MyThaiStar we need to move to project root folder and run this command : ng test --browser <browser>

+
+
+
+
+

==

+
+
+

If you run just ng test it will run the three browser options simultaneously, giving as a result three test runs and outputs, it can cause timeouts and unwanted behaviors, if you want a shortcut to run the test with chrome window you can just run yarn test so we really encourage to not use just ng test. +== ==

+
+
+

Here we are going to see how Client side testing of MyThaiStar has been done.

+
+
+
+
+

Testing Components

+
+
+

Angular components were created using angular/CLI ng create component so they already come with an spec file to test them. The only thing left to do is to add the providers and imports needed in the component to work as the component itself, once this is done, the most basic test is to be sure that all the dependencies and the component itself can be correctly created.

+
+
+

As an example, this is the spec.ts of the menu view component:

+
+
+
+
all the imports...
+
+describe('MenuComponent', () => {
+  let component: MenuComponent;
+  let fixture: ComponentFixture<MenuComponent>;
+
+  beforeEach(async(() => {
+    TestBed.configureTestingModule({
+      declarations: [ MenuComponent, MenuCardComponent ],
+      providers: [SidenavService, MenuService, SnackBarService],
+      imports: [
+        BrowserAnimationsModule,
+        BackendModule.forRoot({environmentType: 0, restServiceRoot: 'v1'}),
+        CovalentModule,
+      ],
+    })
+    .compileComponents();
+  }));
+
+  beforeEach(() => {
+    fixture = TestBed.createComponent(MenuComponent);
+    component = fixture.componentInstance;
+    fixture.detectChanges();
+  });
+
+  it('should create', () => {
+    expect(component).toBeTruthy();
+  });
+});
+
+
+
+

First we declare the component to be tested and a Fixture object, then, we configure the testingModule right in the same way we could configure the MenuModule with the difference here that tests always have to use the mock back-end because we do not want to really depend on a server to test our components.

+
+
+

Once configured the test module, we have to prepare the context of the test, in this case we create the component, that is exactly what is going on in the beforeEach() function.

+
+
+

Finally, we are ready to use the component and it’s fixture to check if the component has bee correctly created.

+
+
+

At this moment this is the case for most of the components, in the future, some work would be applied on this matter to have a full testing experience in MyThaiStar components.

+
+
+
+
+

Dialog components

+
+
+

Dialog components are in a special category because they can not be tested normally. In the way Material implements the opening of dialogs, you have to create a component that will load into a dialog, to tell the module to load this components when needed, they have to be added into a special array category: EntryComponents. So, to test them, we need to import them in the test file as well.

+
+
+

Also, the testing code to open the component is a bit different too:

+
+
+
+
...
+  beforeEach(() => {
+    dialog = TestBed.get(MdDialog);
+    component = dialog.open(CommentDialogComponent).componentInstance;
+  });
+...
+
+
+
+

That is right, the beforeEach() function is slightly different from the the example above, in this case we have to force to the test to know that the component is only displayed in a dialog, so we have to open a dialog with this component in order to access it.

+
+
+
+
+

Testing Services

+
+
+

As well as components, services can be tested too, actually, they are even more necessary to be tested because they have inside more complex logic and data management.

+
+
+

As an example of testing services i am going to use a well done services, with a specific purpose and with its logic completely tested, the price-calculator service:

+
+
+
+
...
+
+describe('PriceCalculatorService', () => {
+
+  beforeEach(() => {
+    TestBed.configureTestingModule({
+      providers: [PriceCalculatorService],
+    });
+  });
+
+  it('should be properly injected', inject([PriceCalculatorService], (service: PriceCalculatorService) => {
+    expect(service).toBeTruthy();
+  }));
+
+  describe('check getPrice method', () => {
+
+    it('should calculate price for single order without extras', inject([PriceCalculatorService], (service: PriceCalculatorService) => {
+      const order: OrderView = {
+        dish: {
+          id: 0,
+          price: 12.50,
+          name: 'Order without extras',
+        },
+        orderLine: {
+          comment: '',
+          amount: 1,
+        },
+        extras: [],
+      };
+
+      expect(service.getPrice(order)).toEqual(order.dish.price);
+    }));
+...
+
+
+
+

In services test, we have to inject the service in order to use it, then we can define some initializing contexts to test if the functions of the services returns the expected values, in the example we can see how an imaginary order is created and expected the function getPrice() to correctly calculate the price of that order.

+
+
+

In this same test file you can find some more test regarding all the possibilities of use in that services: orders with and without extras, single order, multiple orders and so on.

+
+
+

Some services as well as the components have only tested that they are correctly created and they dependencies properly injected, in the future, will be full covering regarding this services test coverage.

+
+
+
+
+

Testing in a CI environment

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/clientserver-ci.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/clientserver-ci.html new file mode 100644 index 00000000..32cb5934 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/clientserver-ci.html @@ -0,0 +1,296 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Client and Server CI - deprecated

+
+
+

The fact that there are 2 different pipelines dedicated to 2 different technologies ( my_thai_star_angular and my_thai_star_java ) does not mean that both can be fusioned in another different one. That is the case of the MTS pipeline. Basically the greater difference is the way of deploying at the end of it. Both only-one-part pipelines use the first deployment strategy (deploying independent Docker containers) but this one uses the second one: Docker Compose.

+
+
+
+
+

Pipeline

+
+
+

The flow of processes is going to be almost exactly a merge of other 2 pipelines.

+
+
+
+clientserver pipeline flow +
+
+
+

The result is going to be exactly the same at the end of MTS. It will be possible to know if any aspect of both Angular client-side and Java server-side fails and there will be a complete application deployed in [serverPath]:8091 (client) and [serverPath]:9091.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/deployment-pipelines.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/deployment-pipelines.html new file mode 100644 index 00000000..7890899f --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/deployment-pipelines.html @@ -0,0 +1,452 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Deployment Pipelines

+
+
+

As PL does not support deployments, we have created separate pipelines for this purpose. Those pipelines are: MyThaiStar_REVERSE-PROXY_DEPLOY, MyThaiStar_FRONT-END_DEPLOY and MyThaiStar_SERVER_DEPLOY.

+
+
+

The application will be deployed using docker on a remote machine. The architecture is as follows:

+
+
+
+deployment arch +
+
+
+

The parts to be deployed are: an NGINX reverse proxy, the java application and the angular application.

+
+
+
+
+

MyThaiStar_SERVER_DEPLOY Pipeline

+
+
+

Deploys on the server the Java part of My Thai Star.

+
+
+
+
+

Parameters

+
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    dockerNetwork: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the dockerNetwork.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
+

Pipeline steps

+
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Deploy new image: Deploy a new java container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
+

MyThaiStar_FRONT-END_DEPLOY

+
+
+

Deploys on the server the Angular part of My Thai Star

+
+
+
+
+

Parameters

+
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    dockerNetwork: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the dockerNetwork.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
+

Pipeline steps

+
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Deploy new image: Deploy a new angular container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
+

MyThaiStar_REVERSE-PROXY_DEPLOY Pipeline

+
+
+ + + + + +
+ + +As reverse proxy connects to the Java and Angular application, both must be deployed before you execute this pipeline. +
+
+
+

The MyThaiStar_REVERSE-PROXY_DEPLOY pipeline will deploy the My Thai Star reverse proxy into a remote machine using docker.

+
+
+
+
+

Parameters

+
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    buildReverseProxy: If yes, it will build and publish a new version of reverse-proxy.

    +
  • +
  • +

    port: Port of the MTS application. You must ensure that those port is available in the deployment machine.

    +
  • +
  • +

    docker Network: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the port and the docker Network.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
+

Pipeline steps

+
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Create the Docker image: If build-Reverse-Proxy is enabled, this step will create a new docker image and publish it to the docker registry.

    +
  • +
  • +

    Deploy new image: Deploy a new reverse proxy container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/deployment-strategies.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/deployment-strategies.html new file mode 100644 index 00000000..f1b7d602 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/deployment-strategies.html @@ -0,0 +1,415 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Deployment Strategies

+
+
+

In this chapter different way of deploying My Thai Star are explained. Everything will be based in Docker.

+
+
+
+
+

Independent Docker containers

+
+
+

The first way of deployment will use isolated Docker containers. That means that if the client-side container is deployed, it does not affect the server-side container’s life cycle and vice versa.

+
+
+

Let’s show how the containers will behave during their life cycle.

+
+
+
    +
  • +

    0) Copy everything you need into the Deployment Server directory

    +
  • +
  • +

    1) Remove existing container (Nginx or Tomcat)

    +
    +
    +container1 +
    +
    +
  • +
  • +

    2) Run new one from the Docker images collection of the external Deployment Server.

    +
    +
    +container2 +
    +
    +
  • +
  • +

    3) Add the artifact /dist to the "deployable" folder of the Docker container (/usr/share/nginx/html/)

    +
    +
    +container3 +
    +
    +
    +

    Now, let’s see how it’s being executed in the command line (simplified due to documentation purposes). The next block of code represents what is inside of the last stage of the Pipeline.

    +
    +
    +
    +
    sshagent (credentials: ['my_ssh_token']) {
    +    sh """
    +        // Copy artifact from workspace to deployment server
    +
    +        // Manage container:
    +        docker rm -f [mts-container]
    +        docker run -itd --name=[mts-container] [base_image]
    +        docker exec [mts-container] bash -C \\"rm [container_deployment_folder]/*\\"
    +        docker cp [artifact] [mts-container]:[container_deployment_folder]
    +    """
    +}
    +
    +
    +
    +

    For every operation performed in the external Deployment Server, it is necessary to define where those commands are going to be executed. So, for each one of previous docker commands, this should appear before:

    +
    +
    +
    +
    `ssh -o StrictHostKeyChecking=no root@10.40.235.244`
    +
    +
    +
  • +
+
+
+
+
+

Docker Compose

+
+
+

The second way of deployment will be by orchestrating both elements of the application: The Angular client-side and the Java server-side. Both elements will be running in Docker containers as well, but in this case they won’t be independent anymore. Docker Compose will be in charge of keeping both containers up, or to put them down.

+
+
+
+
+

Project adjustment

+
+
+

In order to perform this second way of deployment, some files will be created in the project. The first one is the Dockerfile for the Angular client-side. This file will pull (if necessary) an Nginx Docker image and copy the Angular artifact (/dist folder) inside of the deployment folder of the image. It will be located in the main directory of the Angular client-side project.

+
+
+
+dockerfile angular +
+
+
+

The second file is the Dockerfile for the Java server-side. Its function will be quite similar to the Angular one. It will run a tomcat Docker image and copy the Java artifact (mythaistar.war file) in its deployment folder.

+
+
+
+dockerfile java +
+
+
+

Finally, as long as the docker-compose is being used, a file containing its configuration will be necessary as well. A new folder one the main My That Star’s directory is created, and it’s called /docker. Inside there is just a docker-compose.yml file. It contains all the information needed to orchestrate the deployment process. For example, which port both containers are going to be published on, and so on. This way of deployment will allow the application to be published or not just with one action.

+
+
+
+
docker-compose rm -f            # down
+docker-compose up --build -d    # up fresh containers
+
+
+
+
+docker compose +
+
+
+

Let’s have a look at the file itself:

+
+
+
+
version: '3'
+services:
+  client_compose:
+    build: "angular"
+    ports:
+      - "8091:80"
+    depends_on:
+      - server_compose
+  server_compose:
+    build: "java"
+    ports:
+      - "9091:8080"
+
+
+
+

This Orchestrated Deployment will offer some interesting possibilities for the future of the application.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/deployment.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/deployment.html new file mode 100644 index 00000000..217ca491 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/deployment.html @@ -0,0 +1,377 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Deployment

+
+
+

The main deployment tool used for My Thai Star is be Docker.

+
+
+
+docker +
+
+
+

It is a tool to run application in isolated environments. Those isolated environments will be what we call Docker containers. For instance, it won’t be necessary any installation of Nginx or Apache tomcat or anything necessary to deploy, because there will be some containers that actually have those technologies inside.

+
+
+
+
+

Where Docker containers will be running?

+
+
+

Of course, it is necessary to have an external Deployment Server. Every Docker process will run in it. It will be accessed from Production Line pipelines via SSH. Thus, the pipeline itself will manage the scenario of, if every previous process like testing passes as OK, stop actual containers and create new ones.

+
+
+

This external server will be located in https://mts-devonfw-core.cloud.okteto.net/

+
+
+
+
+

Container Schema

+
+
+

3 Docker containers are being used for the deployment of My Thai Star:

+
+
+
    +
  1. +

    Nginx for the Reverse Proxy

    +
  2. +
  3. +

    tomcat for the Java Server

    +
  4. +
  5. +

    Nginx for the Angular Client

    +
  6. +
+
+
+

The usage of the Reverse Proxy will allow the client to call via /api every single Java Server’s REST operation. Moreover, there will only be 1 port in usage in the remote Docker host, the one mapped for the Reverse Proxy: 8080. +Besides the deployment itself using Nginx and tomcat, both client and server are previously built using NodeJS and maven images. Artifacts produced by them will be pasted in servers' containers using multi-stage docker builds. It will all follow this schema:

+
+
+
+36028242 8998f41c 0d9e 11e8 93b3 6bfe50152bf8 +
+
+
+

This orchestration of all 3 containers will be done by using a docker-compose.yml file. To redirect traffic from one container to another (i.e. reverse-proxy to angular client or angular client to java server) will be done by using, as host names, the service name docker-compose defines for each of them, followed by the internally exposed port:

+
+ +
+ + + + + +
+ + +A implementation using Traefik as reverse proxy instead of NGINX is also available. +
+
+
+
+
+

Run My Thai Star

+
+
+

The steps to run My Thai Star are:

+
+
+
    +
  1. +

    Clone the repository $ git clone https://github.com/devonfw/my-thai-star.git

    +
  2. +
  3. +

    Run the docker compose command: $ docker-compose up

    +
  4. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/future-deployment.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/future-deployment.html new file mode 100644 index 00000000..52f9a086 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/future-deployment.html @@ -0,0 +1,291 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Future Deployment

+
+
+

The My Thai Star project is going to be built in many technologies. Thus, let’s think about one deployment schema that allow the Angular client to communicate to all three back ends: Java, Node and .NET.

+
+
+

As long as Docker containers are being used, it shouldn’t be that hard to deal with this "distributed" deployment. The schema represents 6 Docker containers that will have client-side(s) and server-side(s). Each of 3 Angular client containers (those in red) are going to communicate with different back-ends. So, when the deployment is finished, it would be possible to use all three server-sides just by changing the "port" in the URL.

+
+
+

Let’s see how it would look like:

+
+
+
+deployment schema +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/graphql-design.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/graphql-design.html new file mode 100644 index 00000000..1145345a --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/graphql-design.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

GraphQL design

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/graphql-testing.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/graphql-testing.html new file mode 100644 index 00000000..68a89d0a --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/graphql-testing.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

GraphQL testing

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/java-ci.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/java-ci.html new file mode 100644 index 00000000..65e89b85 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/java-ci.html @@ -0,0 +1,500 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Java CI

+
+
+

The Java server-side of My Thai Star is an devon4j-based application. As long as Maven and a Java 8 are going to be needed, the Pipeline should have those tools available as well.

+
+
+
+
+

Pipeline

+
+
+

This Pipeline is called MyThaiStar_SERVER_BUILD, and it is located exactly in the same PL instance’s folder than MyThaiStar_FRONTEND_BUILD. Let’s see how the Pipeline’s flow behaves.

+
+
+
+java pipeline flow +
+
+
+

Check those Pipeline stages with more detail:

+
+
+
    +
  1. +

    Declarative: Checkout SCM

    +
    +

    Gets the code from https://github.com/devonfw/my-thai-star . This step is not defined directly in our pipeline, but as it is loaded from the repository this step should always be done at the beginning.

    +
    +
  2. +
  3. +

    Declarative: Tool Install

    +
    +

    The My Thai Star application works with JDK11. In this step, if JDK11 is not installed, we install it and then put the JDK folder into PATH.

    +
    +
    +
    +
    tools {
    +  jdk 'OpenJDK11'
    +}
    +
    +
    +
  4. +
  5. +

    Loading Custom Tools

    +
    +

    In this step we load the tools that can not be loaded in the previous step. As My Thai Star is delivered as docker container, in this step we load docker as custom tool.

    +
    +
    +
    +
    tool dockerTool
    +
    +
    +
  6. +
  7. +

    Install dependencies

    +
    +

    This step will download all project dependencies.

    +
    +
    +
    +
    mvn clean install -Dmaven.test.skip=true
    +
    +
    +
  8. +
  9. +

    Unit Tests

    +
    +

    This step will execute the project unit test with maven.

    +
    +
    +
    +
    mvn clean test
    +
    +
    +
  10. +
  11. +

    Dependency Checker

    +
    +

    Execute the OWASP Dependency Checker in order to validate the project dependencies. It will generate a report that can be used in SonarQube

    +
    +
    +
    +
    dependencyCheck additionalArguments: '--project "MTSJ" --scan java/mtsj --format XML', odcInstallation: 'dependency-check'
    +dependencyCheckPublisher pattern: ''
    +
    +
    +
  12. +
  13. +

    SonarQube analysis

    +
    +

    The code is evaluated using the integrated PL instance’s SonarQube. Also, it will wait for the quality gate status. If the status is failing, the pipeline execution will be stopped.

    +
    +
    +
    +
    withSonarQubeEnv(sonarEnv) {
    +    sh "mvn sonar:sonar"
    +}
    +
    +def qg = waitForQualityGate()
    +if (qg.status != 'OK') {
    +    error "Pipeline aborted due to quality gate failure: ${qg.status}"
    +}
    +
    +
    +
  14. +
  15. +

    Deliver application into Nexus

    +
    +

    Store all artifacts into nexus.

    +
    +
    +
    +
    mvn deploy -Dmaven.test.skip=true
    +
    +
    +
  16. +
  17. +

    Create the Docker image

    +
    +

    Create the docker image and then publish the image into a docker registry.

    +
    +
  18. +
+
+
+
+
+

Adjustments

+
+ +
+
+
+

Pipeline Environment

+
+
+

In order to easily reuse the pipeline in other java projects, all variables have been defined in the block environment. All variables have the default values that Production Line uses, so if you’re going to work in production line you won’t have to change anything. Example:

+
+
+
+
environment {
+    // Directory with java project
+    javaDir = 'java/mtsj'
+
+    // SonarQube
+    // Name of the SonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus 3
+    // Maven global settings configuration ID
+    `globalSettingsId = 'MavenSettings'`
+    // Maven tool id
+    `mavenInstallation = 'Maven3'`
+
+    // Docker
+    dockerRegistryCredentials = 'nexus-api'
+    dockerRegistryProtocol = 'https://\'
+    dockerTool = 'docker-global
+}
+
+
+
+
+
+

== Description

+
+
+
    +
  • +

    java Dir: Relative route to java application. In My Thai Star this is the java/mtsj folder. The actual directory (.) is also allowed.

    +
    +
    +java directory +
    +
    +
  • +
  • +

    sonar Env: Name of the SonarQube environment. SonarQube is the default value for PL.

    +
  • +
  • +

    global Settings Id: The id of the global settings file. MavenSettings is the default value for PL.

    +
    +
    +nexus3 global config +
    +
    +
  • +
  • +

    maven Installation: The name of the maven tool. Maven3 is the default value for PL.

    +
    +
    +maven tool +
    +
    +
  • +
+
+
+
+
+

Distribution management

+
+
+

The only extra thing that needs to be added to the Java server-side is some information that determines where the artifact of the project is going to be stored in Nexus. This is going to be a section in the main pom.xml file called <distributionManagement>. This section will point to the PL instance’s Nexus. Let’s have a look at it. It’s already configured with the PL default values.

+
+
+
+
<distributionManagement>
+    <repository>
+      <id>pl-nexus</id>
+      <name>PL Releases</name>
+      <url>http://nexus3-core:8081/nexus/content/repositories/maven-releases/</url>
+    </repository>
+    <snapshotRepository>
+      <id>pl-nexus</id>
+      <name>PL Snapshots</name>
+      <url>http://nexus3-core:8081/nexus3/repository/maven-snapshots</url>
+    </snapshotRepository>
+</distributionManagement>
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/java-design.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/java-design.html new file mode 100644 index 00000000..4b0bd78b --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/java-design.html @@ -0,0 +1,1004 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Java design

+
+ +
+
+
+

Introduction

+
+
+

The Java back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    DEVON4J as the Java framework

    +
  • +
  • +

    Devonfw as the Development environment

    +
  • +
  • +

    CobiGen as code generation tool

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
+

Basic architecture details

+
+
+

Following the DEVON4J conventions the Java My Thai Star back-end is going to be developed dividing the application in Components and using a three layers architecture.

+
+
+
+
+

Project modules

+
+
+

Using the DEVON4J approach for the Java back-end project we will have a structure of a Maven project formed by three projects

+
+
+
+project modules +
+
+
+
    +
  • +

    api: Stores all the REST interfaces and corresponding Request/Response objects.

    +
  • +
  • +

    core: Stores all the logic and functionality of the application.

    +
  • +
  • +

    server: Configures the packaging of the application.

    +
  • +
+
+
+

We can automatically generate this project structure using the DEVON4J Maven archetype

+
+
+
+
+

Components

+
+
+

The application is going to be divided in different components to encapsulate the different domains of the application functionalities.

+
+
+
+mtsj components +
+
+
+

As main components we will find:

+
+
+
    +
  • +

    Bookingmanagement: Manages the bookings part of the application. With this component the users (anonymous/logged in) can create new bookings or cancel an existing booking. The users with waiter role can see all scheduled bookings.

    +
  • +
  • +

    Ordermanagement: This component handles the process to order dishes (related to bookings). A user (as a host or as a guest) can create orders (that contain dishes) or cancel an existing one. The users with waiter role can see all ordered orders.

    +
  • +
  • +

    Dishmanagement: This component groups the logic related to the menu (dishes) view. Its main feature is to provide the client with the data of the available dishes but also can be used by other components (Ordermanagement) as a data provider in some processes.

    +
  • +
  • +

    Usermanagement: Takes care of the User Profile management, allowing to create and update the data profiles.

    +
  • +
+
+
+

As common components (that don’t exactly represent an application’s area but provide functionalities that can be used by the main components):

+
+
+
    +
  • +

    Imagemanagement: Manages the images of the application. In a first approach the` Dishmanagement` component and the Usermanagement component will have an image as part of its data. The Imagemanagement component will expose the functionality to store and retrieve this kind of data.

    +
  • +
  • +

    Mailservice: with this service we will provide the functionality for sending email notifications. This is a shared service between different app components such as bookingmanagement or ordercomponent.

    +
  • +
+
+
+

Other components:

+
+
+
    +
  • +

    Security (will manage the access to the private part of the application using a jwt implementation).

    +
  • +
  • +

    Twitter integration: planned as a Microservice will provide the twitter integration needed for some specific functionalities of the application.

    +
  • +
+
+
+
+
+

Layers

+
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+

This architecture is going to be reflected dividing each component of the application in different packages to match those three layers.

+
+
+
+
+

Component structure

+
+
+

Each one of the components defined previously are going to be structured using the three-layers architecture. In each case we will have a service package, a logic package and a dataaccess package to fit the layers definition.

+
+
+
+component structure +
+
+
+
+
+

Dependency injection

+
+
+

As it is explained in the devonfw documentation we are going to implement the dependency injection pattern basing our solution on Spring and the Java standards: java.inject (JSR330) combined with JSR250.

+
+
+
+dependency injection +
+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different packages: api and impl. The api will store the interface with the methods definition and inside the impl we will store the class that implements the interface.

    +
  • +
+
+
+
+layer api impl +
+
+
+
    +
  • +

    Usage of JSR330: The Java standard set of annotations for dependency injection (@Named, @Inject, @PostConstruct, @PreDestroy, etc.) provides us with all the needed annotations to define our beans and inject them.

    +
  • +
+
+
+
+
@Named
+public class MyBeanImpl implements MyBean {
+  @Inject
+  private MyOtherBean myOtherBean;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+}
+
+
+
+
+
+

Layers communication

+
+
+

The connection between layers, to access to the functionalities of each one, will be solved using the dependency injection and the JSR330 annotations.

+
+
+
+layers impl +
+
+
+

Connection Service - Logic

+
+
+
+
@Named("DishmanagementRestService")
+public class DishmanagementRestServiceImpl implements DishmanagementRestService {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  // use the 'this.dishmanagement' object to access to the functionalities of the logic layer of the component
+
+  ...
+
+}
+
+
+
+

Connection Logic - Data Access

+
+
+
+
@Named
+public class DishmanagementImpl extends AbstractComponentFacade implements Dishmanagement {
+
+  @Inject
+  private DishDao dishDao;
+
+  // use the 'this.dishDao' to access to the functionalities of the data access layer of the component
+  ...
+
+}
+
+
+
+
+
+

Service layer

+
+
+

The services layer will be solved using REST services with the JAX-RS implementation.

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

Following the naming conventions proposed for Devon4j applications we will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+
+
+

Service API

+
+
+

The api.rest package in the service layer of a component will store the definition of the service by a Java interface. In this definition of the service we will set-up the endpoints of the service, the type of data expected and returned, the HTTP method for each endpoint of the service and other configurations if needed.

+
+
+
+
@Path("/dishmanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface DishmanagementRestService {
+
+  @GET
+  @Path("/dish/{id}/")
+  public DishCto getDish(@PathParam("id") long id);
+
+  ...
+
+}
+
+
+
+
+
+

Service impl

+
+
+

Once the service api is defined we need to implement it using the Java interface as reference. We will add the service implementation class to the impl.rest package and implement the RestService interface.

+
+
+
+
@Named("DishmanagementRestService")
+public class DishmanagementRestServiceImpl implements DishmanagementRestService {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Override
+  public DishCto getDish(long id) {
+    return this.dishmanagement.findDish(id);
+  }
+
+  ...
+
+}
+
+
+
+
+
+

==

+
+
+

You can see the Devon4j conventions for REST services here. And the My Thai Star services definition here as part of the My Thai Star project. +== ==

+
+
+
+
+

Logic layer

+
+
+

In the logic layer we will locate all the business logic of the application. We will keep the same schema as we have done for the service layer, having an api package with the definition of the methods and a impl package for the implementation.

+
+
+

Also, inside the api package, a to package will be the place to store the transfer objects needed to pass data through the layers of the component.

+
+
+
+logic layer +
+
+
+

The logic api definition:

+
+
+
+
public interface Dishmanagement {
+
+  DishCto findDish(Long id);
+
+  ...
+}
+
+
+
+

The logic impl class:

+
+
+
+
@Named
+public class DishmanagementImpl extends AbstractComponentFacade implements Dishmanagement {
+
+  @Inject
+  private DishDao dishDao;
+
+
+  @Override
+  public DishCto findDish(Long id) {
+
+    return getBeanMapper().map(this.dishDao.findOne(id), DishCto.class);
+  }
+
+  ...
+
+}
+
+
+
+

The BeanMapper will provide the needed transformations between entity and transfer objects.

+
+
+

Also, the logic layer is the place to add validation for Authorization based on roles as we will see later.

+
+
+
+
+

Data Access layer

+
+
+

The data-access layer is responsible for managing the connections to access and process data. The mapping between java objects to a relational database is done in Devon4j with the spring-data-jpa.

+
+
+

As in the previous layers, the data-access layer will have both api and impl packages. However, in this case, the implementation will be slightly different. The api package will store the component main entities and, inside the _api package, another api.repo package will store the Repositories. The repository interface will extend DefaultRepository interface (located in com.devonfw.module.jpa.dataaccess.api.data package of devon4j-starter-spring-data-jpa ).

+
+
+

For queries we will differentiate between static queries (that will be located in a mapped file) and dynamic queries (implemented with QueryDsl). You can find all the details about how to manage queries with Devon4j here.

+
+
+

The default data base included in the project will be the H2 instance included with the Devon4j projects.

+
+
+

To get more details about pagination, data base security, _concurrency control, inheritance or how to solve the different relationships between entities visit the official devon4j dataaccess documentation.

+
+
+
+
+

Security with Json Web Token

+
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
+

JWT basics

+
+
+
    +
  • +

    A user will provide a username / password combination to our Auth server.

    +
  • +
  • +

    The Auth server will try to identify the user and, if the credentials match, will issue a token.

    +
  • +
  • +

    The user will send the token as the Authorization header to access resources on server protected by JWT Authentication.

    +
  • +
+
+
+
+jwt schema +
+
+
+
+
+

JWT implementation details

+
+
+

The Json Web Token pattern will be implemented based on the Spring Security framework that is provided by default in the Devon4j projects.

+
+
+
+
+

== Authentication

+
+
+

Based on the Spring Security approach, we will implement a class extending WebSecurityConfigurerAdapter (Devon4j already provides the` BaseWebSecurityConfig` class) to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will set up the HttpSecurity object in the configure method of the class. We will define a JWTLoginFilter class that will handle the requests to the /login endpoint.

+
+
+
+
http.[...].antMatchers(HttpMethod.POST, "/login").permitAll().[...].addFilterBefore(new JWTLoginFilter("/login", authenticationManager()), UsernamePasswordAuthenticationFilter.class);
+
+
+
+

In the same HttpSecurity object we will set up the filter for the rest of the requests, to check the presence of the JWT token in the header. First we will need to create a JWTAuthenticationFilter class extending the GenericFilterBean class. Then we can add the filter to the HttpSecurity object

+
+
+
+
http.[...].addFilterBefore(new `JWTAuthenticationFilter()`, UsernamePasswordAuthenticationFilter.class);
+
+
+
+

Finally, as default users to start using the My Thai Star app we are going to define two profiles using the inMemoryAuthentication of the Spring Security framework. In the configure(AuthenticationManagerBuilder Auth) method we will create:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: Waiter

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: Customer

    +
  • +
+
+
+
+
auth.inMemoryAuthentication().withUser("waiter").password("waiter").roles("Waiter").and().withUser("user0").password("password").roles("Customer");
+
+
+
+
+
+

== Token set up

+
+
+

Following the official documentation the implementation details for the MyThaiStar’s JWT will be:

+
+
+
    +
  • +

    Secret: Used as part of the signature of the token, acting as a private key. For the showcase purposes we will use simply "ThisIsASecret".

    +
  • +
  • +

    Token Prefix schema: Bearer. The token will look like Bearer <token>

    +
  • +
  • +

    Header: Authorization. The response header where the token will be included. Also, in the requests, when checking the token it will be expected to be in the same header.

    +
  • +
  • +

    The Authorization header should be part of the Access-Control-Expose-Headers header to allow clients access to the Authorization header content (the token);

    +
  • +
  • +

    The claims are the content of the payload of the token. The claims are statements about the user, so we will include the user info in this section.

    +
    +
      +
    • +

      subject: "sub". The username.

      +
    • +
    • +

      issuer: "iss". Who creates the token. We could use the url of our service but, as this is a showcase app, we simply will use "MyThaiStarApp"

      +
    • +
    • +

      expiration date: "exp". Defines when the token expires.

      +
    • +
    • +

      creation date: "iat". Defines when the token has been created.

      +
    • +
    • +

      scope: "scope". Array of strings to store the user roles.

      +
    • +
    +
    +
  • +
  • +

    Signature Algorithm: To encrypt the token we will use the default algorithm HS512.

    +
  • +
+
+
+

An example of a token claims before encryption would be:

+
+
+

{sub=waiter, scope=[ROLE_Waiter], iss=MyThaiStarApp, exp=1496920280, iat=1496916680}

+
+
+
+
+

== Current User request

+
+
+

To provide to the client with the current user data our application should expose a service to return the user details. In Devon4j applications the /general/service/impl/rest/SecurityRestServiceImpl.java class is ready to do that.

+
+
+
+
@Path("/security/v1")
+@Named("SecurityRestService")
+public class SecurityRestServiceImpl {
+
+  @Produces(MediaType.APPLICATION_JSON)
+  @GET
+  @Path("/currentuser/")
+  public UserDetailsClientTo getCurrentUserDetails(@Context HttpServletRequest request) {
+
+  }
+}
+
+
+
+

we only will need to implement the getCurrentUserDetails method.

+
+
+
+
+

== Authorization

+
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

As part of the token we are providing the user Role. So, when validating the token, we can obtain that same information and build a UsernamePasswordAuthenticationToken with username and the roles as collection of Granted Authorities.

+
+
+

Doing so, afterwards, in the implementation class of the logic layer we can set up the related methods with the java security '@RolesAllowed' annotation to block the access to the resource to users that does not match the expected roles.

+
+
+
+
`@RolesAllowed(Roles.WAITER)`
+public PaginatedListTo<BookingEto> findBookings(BookingSearchCriteriaTo criteria) {
+  return findBookings(criteria);
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/java-testing.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/java-testing.html new file mode 100644 index 00000000..12b41b72 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/java-testing.html @@ -0,0 +1,396 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Java testing

+
+ +
+
+
+

Component testing

+
+
+

We are going to test our components as a unit using Spring Test and Devon4j-test modules.

+
+
+

In order to test a basic component of the app first we will create a test class in the src/test/java folder and inside the main package of the test module. We will name the class following the convention.

+
+
+
+
[Component]Test
+
+
+
+

Then, in the declaration of the test class, we will use the @SpringBootTest annotation to run the application context. In addition, we will extend the ComponentTest from Devon4j-test module to have access to the main functionalities of the module, see more details here.

+
+
+

Spring Test allows us to use Dependency Injection so we can inject our component directly using the @Inject annotation.

+
+
+

Each test will be represented by a method annotated with @Test. Inside the method we will test one functionality, evaluating the result thanks to the asserts provided by the ComponentTest class that we are extending.

+
+
+

A simple test example

+
+
+
+
@SpringBootTest(classes = SpringBootApp.class)
+public class DishmanagementTest extends `ComponentTest` {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Test
+  public void findAllDishes() {
+
+    PaginatedListTo<DishCto> result = this.dishmanagement.findDishes();
+    assertThat(result).isNotNull();
+  }
+
+  ...
+}
+
+
+
+
+
+

Running the tests

+
+ +
+
+
+

From Eclipse

+
+
+

We can run the test from within Eclipse with the contextual menu Run As > JUnit Test. This functionality can be launched from method level, class level or even package level. The results will be shown in the JUnit tab.

+
+
+
+test results eclipse +
+
+
+
+
+

From command line using Maven

+
+
+

We can also run tests using Maven and the command line, using the command mvn test (or mvn clean test).

+
+
+
+
`C:\MyThaiStar>mvn clean test`
+
+
+
+

Doing this we will run all the tests of the project (recognized by the Test word at the end of the classes) and the results will be shown by sub-project.

+
+
+
+
...
+
+[D: 2017-07-17 09:30:08,457] [P: INFO ] [C: ] [T: Thread-5] [L: org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean] - [M: Closing JPA EntityManagerFactory for persistence unit 'default']
+
+Results :
+
+Tests run: 11, Failures: 0, Errors: 0, Skipped: 1
+
+...
+
+[INFO]
+[INFO] --- maven-compiler-plugin:3.1:testCompile (default-testCompile) @ mtsj-server ---
+[INFO] No sources to compile
+[INFO]
+[INFO] --- maven-surefire-plugin:2.12.4:test (default-test) @ mtsj-server ---
+[INFO] No tests to run.
+[INFO] ------------------------------------------------------------------------
+[INFO] Reactor Summary:
+[INFO]
+[INFO] mtsj ............................................... SUCCESS [  0.902 s]
+[INFO] mtsj-core .......................................... SUCCESS [02:30 min]
+[INFO] mtsj-server ........................................ SUCCESS [  1.123 s]
+[INFO] ------------------------------------------------------------------------
+[INFO] BUILD SUCCESS
+[INFO] ------------------------------------------------------------------------
+[INFO] Total time: 02:35 min
+[INFO] Finished at: 20XX-07-17T09:30:13+02:00
+[INFO] Final Memory: 39M/193M
+[INFO] ------------------------------------------------------------------------
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/master-my-thai-star.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/master-my-thai-star.html new file mode 100644 index 00000000..865b1beb --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/master-my-thai-star.html @@ -0,0 +1,11384 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

MyThaiStar

+
+
+

1. My Thai Star – Agile Framework

+ +
+
+

1.1 Team Setup

+
+

The team working on the development of the My Thai Star app and the documentation beside the technical development works distributed in various locations across Germany, the Netherlands, Spain and Poland. For the communication part the team uses the two channels Skype and Mail and for the documentation part the team makes usage mainly of GitHub and JIRA.

+
+
+
+

1.2 Scrum events

+ +
+
+

Sprint Planning

+
+

Within the My Thai Star project we decided on having one hour Sprint Planning meetings for a four-week Sprints. This decision is based on the fact that this project is not the main project of the team members. As the backlog refinement is done during the Sprint Planning we make usage of the planningpoker.com tool for the estimation of the tasks.

+
+
+
+Screenshot of planningpoker.com +
+
Figure 1. Screenshot of planningpoker.com during Sprint 1 Planning
+
+
+

During the Sprint Planning meeting the team receives support from Devon colleagues outside the development. This feedback helps the team to focus on important functionalities and task by keeping the eyes on the overall aim which is to have a working application by the end of June 2017.

+
+
+
+

Sprint Review

+
+

The Sprint Review meetings are time boxed to one hour for the four week Sprint. Within the Sprint Review meeting the team plans to do a retrospective of the finished Sprint. As well as it is done during the Sprint Planning the team receives support from Devon colleagues.

+
+
+
+

Sprint Retrospective

+
+

For this project the team aligned on not having a specific Sprint Retrospective meeting. The team is going to have a retrospective of a finished Sprint during the Sprint Review.

+
+
+
+

Daily Stand-ups

+
+

The team aligned on having two weekly Stand-up meetings instead of a Daily Stand-up meeting. In comparison with the time boxed length of 15mins described in the CAF for this project the team extended the Stand-up meeting to 30mins. The content of the meetings remains the same.

+
+
+
+

Backlog refinement

+
+

The team decided that the backlog refinement meeting is part of the Sprint Planning meeting.

+
+
+
+

1.3 Establish Product Backlog

+
+

For the My Thai Stair project the team decided on using the JIRA agile documentation which is one of the widely used agile tools. JIRA is equipped with several of useful tools regarding the agile software development (e.g. Scrum-Board). One of the big advantages of JIRA are the extensive configuration and possibilities to personalize.

+
+
+

With having a list of the Epics and User Stories for the My Thai Star development in GitHub, the team transferred the User Stories into the JIRA backlog as it is shown in the screenshot below. All User Stories are labeled colorfully with the related Epic which shapes the backlog in clearly manner.

+
+
+
+Screenshot of planningpoker.com +
+
Figure 2. Screenshot of the JIRA backlog during Sprint 2
+
+
+

We decided on working with Sub-task as a single user story comprised a number of single and separated tasks. Another benefit of working with sub-task is that every single sub-task can be assigned to a single team member whereas a user story can only be assigned to one team member. By picking single sub-task the whole process of a user story is better organized.

+
+
+
+Screenshot of Sub-tasks +
+
Figure 3. Screenshots of Sub-tasks during Sprint 2
+
+
+
+

2. My Thai Star – Agile Diary

+
+

In parallel to the Diary Ideation we use this Agile Diary to document our Scrum events. The target of this diary is to describe the differences to the Scrum methodology as well as specific characteristics of the project. We also document the process on how we approach the Scrum methodology over the length of the project.

+
+
+
+

24.03.2017 Sprint 1 Planning

+
+

Within the Sprint 1 Planning we used planning poker.com for the estimation of the user stories. The estimation process usually is part of the backlog refinement meeting. Regarding the project circumstances we decided to estimate the user stories during the Sprint Planning. Starting the estimation process we noticed that we had to align our interpretation of the estimation effort as these story points are not equivalent to a certain time interval. The story points are relative values to compare the effort of the user stories. With this in mind we proceeded with the estimation of the user stories. We decided to start Sprint 1 with the following user stories and the total amount of 37 story points: +• ICSDSHOW-2 Create invite for friends (8 Story Points) +• ICSDSHOW-4 Create reservation (3) +• ICSDSHOW-5 Handle invite (3) +• ICSDSHOW-6 Revoke accepted invite (5) +• ICSDSHOW-9 Cancel invite (3) +• ICSDSHOW-11 Filter menu (5) +• ICSDSHOW-12 Define order (5) +• ICSDSHOW-13 Order the order (5) +As the Sprint Planning is time boxed to one hour we managed to hold this meeting within this time window.

+
+
+
+

27.04.2017 Sprint 1 Review

+
+

During the Sprint 1 Review we had a discussion about the data model proposal. For the discussion we extended this particular Review meeting to 90min. As this discussion took almost 2/3 of the Review meeting we only had a short time left for our review of Sprint 1. For the following scrum events we decided to focus on the primary target of these events and have discussions needed for alignments in separate meetings. +Regarding the topic of splitting user stories we had the example of a certain user story which included a functionality of a twitter integration (ICSDSHOW-17 User Profile and Twitter integration). As the twitter functionality could not have been implemented at this early point of time we thought about cutting the user story into two user stories. We aligned on mocking the twitter functionality until the dependencies are developed in order to test the components. As this user story is estimated with 13 story points it is a good example for the question whether to cut a user story into multiple user stories or not. +Unfortunately not all user stories of Sprint 1 could have been completed. Due this situation we discussed on whether pushing all unfinished user stories into the status done or moving them to Sprint 2. We aligned on transferring the unfinished user stories into the next Sprint. During the Sprint 1 the team underestimated that a lot of holidays crossed the Sprint 1 goals. As taking holidays and absences of team members into consideration is part of a Sprint Planning we have a learning effect on setting a Sprint Scope.

+
+
+
+

03.05.2017 Sprint 2 Planning

+
+

As we aligned during the Sprint 1 Review on transferring unfinished user stories into Sprint 2 the focus for Sprint 2 was on finishing these transferred user stories. During our discussion on how many user stories we could work on in Sprint 2 we needed to remind ourselves that the overall target is to develop an example application for the devonfw. Considering this we aligned on a clear target for Sprint 2: To focus on finishing User Stories as we need to aim for a practicable and realizable solution. Everybody aligned on the aim of having a working application at the end of Sprint 2. +For the estimation process of user stories we make again usage of planningpoker.com as the team prefers this “easy-to-use” tool. During our second estimation process we had the situation in which the estimated story points differs strongly from one team member to another. In this case the team members shortly explains how the understood and interpreted the user story. It turned out that team members misinterpreted the user stories. With having this discussion all team members got the same understanding of the specific functionality and scope of a user story. After the alignment the team members adjusted their estimations. +Beside this need for discussion the team estimated most of the user stories with very similar story points. This fact shows the increase within the effort estimation for each team member in comparison to Sprint 1 planning. Over the short time of two Sprint planning the team received a better understanding and feeling for the estimation with story points.

+
+
+
+

01.06.2017 Sprint 2 Review

+
+

As our Sprint 1 Review four weeks ago was not completely structured like a Sprint Review meeting we focused on the actual intention of a Sprint Review meeting during Sprint 2 Review. This means we demonstrated the completed and implemented functionalities with screen sharing and the product owner accepted the completed tasks. +Within the User Story ICSDSHOW-22 “See all orders/reservations” the functionality “filtering the list by date” could have not been implemented during Sprint 2. The team was unsure on how to proceed with this task. One team member added that especially in regards of having a coherent release, implementing less but working functionalities is much better than implementing more but not working functionalities. For this the team reminded itself focusing on completing functionalities and not working straight to a working application.

+
+
+
Table of Contents
+ +
+
+
+

User Stories

+
+

The list of user stories, exported from JIRA, can be downloaded from here.

+
+
+
+

Epic: Invite friends

+ +
+
+

US: create invite for friends

+
+

Epic: Invite friends

+
+
+

As a guest I want to create an dinner event by entering date and time and adding potential guests by their emails so that each potential guest will receives an email in order to confirm or decline my invite.

+
+
+
+

== Acceptance criteria

+
+
    +
  1. +

    only date and time in future possible and both required

    +
  2. +
  3. +

    only valid email addresses: text@text.xx, one entered email-address is required

    +
  4. +
  5. +

    if AGB are not checked, an error message is shown

    +
  6. +
  7. +

    after the invite is done

    +
    +
      +
    1. +

      I see the confirmation screen of my invite (see wireframe)

      +
    2. +
    3. +

      I receive a confirmation email about my invite containing date, time and invited guests

      +
    4. +
    5. +

      all guests receive a mail with my invite

      +
    6. +
    +
    +
  8. +
+
+
+
+

US: create reservation

+
+

Epic: Invite friends

+
+
+

As a guest I want to create a reservation by entering date and time and number of adults and kids

+
+
+
+

== Acceptance criteria

+
+
    +
  1. +

    only date and time in future possible and both required

    +
  2. +
  3. +

    only valid email addresses: text@text.xx, one entered email-address is required

    +
  4. +
  5. +

    if AGB are not checked, an error message is shown

    +
  6. +
  7. +

    after the reservation is done

    +
    +
      +
    1. +

      I see a confirmation screen of my reservation with date-time, number of persons and kids

      +
    2. +
    3. +

      I receive a confirmation email about my reservation

      +
    4. +
    +
    +
  8. +
+
+
+
+

== Wireframes

+
+

see real time board

+
+
+
+

US: handle invite

+
+

As an invited guest I would like to receive an email - after somebody as invited me - with the option to accept or decline the invite so that the system knows about my participation

+
+
+
+

== AC:

+
+
    +
  1. +

    the mail contains the following information about the invite

    +
    +
      +
    1. +

      who has invited

      +
    2. +
    3. +

      who else is invited

      +
    4. +
    5. +

      date and time of the invite

      +
    6. +
    7. +

      button to accept or decline

      +
    8. +
    9. +

      after pressing the buttons the system will store the status (yes/no) of my invite

      +
    10. +
    +
    +
  2. +
+
+
+
+

US: revoke accepted invite

+
+

As an invited guest I would like to revoke my previous answer in order to inform the system and the inviter about my no showup

+
+
+
+

== AC:

+
+
    +
  1. +

    the inviter and myself receives an email about my cancellation

    +
  2. +
  3. +

    the system sets my status of my invite to no

    +
  4. +
  5. +

    in case I have placed an order, the order is also removed from the system.

    +
  6. +
  7. +

    the cancellation is only possible 10 minutes before the event takes place. The system shows a message that cancellation is not possible anymore.

    +
  8. +
+
+
+
+

US: calculate best table

+
+

As a guest I would like the system to check (1 hour before my invite) all my invites and to reserve a table fitting the number of accepted users

+
+
+
+

== Details

+
+

Pseudo-algorithm for reservation: +Find table for given date and time where seats of guests >= Count of invited guests plus one. In case no results, decline request and show error message to user. In case of any result, make a reservation for table…​. +For each decline of a guest remove guest and search with reduced number for new table. In case table is found, reserve it and remove reservation from previous table. In case not, do not change reservations.

+
+
+
+

US: find table by reservation info

+
+

As a waiter I would like to search by reference number or email address for the reserved table in order to know the table for my visit. (when arriving at the restaurant)

+
+
+
+

== AC:

+
+
    +
  1. +

    After entering the email the systems shows the number of the table. In case no reservation found, a message is shown.

    +
  2. +
  3. +

    Entered email address could be email of inviter or any invited guest.

    +
  4. +
+
+
+
+

US: cancel invite

+
+

Epic: Invite friends

+
+
+

As a guests who has sent an invite I want to be able to cancel my previous invite in order to inform the restaurant and my invited guests that I will not show up

+
+
+
+

== AC:

+
+
    +
  1. +

    the option to cancel the invite is available in the confirmation-mail about my invite

    +
  2. +
  3. +

    after my cancellation all invited guests receives a mail about the cancellation

    +
  4. +
  5. +

    I see a confirmation that my invite was canceled successfully

    +
  6. +
  7. +

    after my cancellation my invite and reservation and all orders related to it are deleted from the system and no one can accept or decline any invite for it

    +
  8. +
  9. +

    the cancellation is only possible one hour before the invite takes place. After that I am not allowed to cancel it any more.

    +
  10. +
+
+
+
+

Epic: Digital Menu

+ +
+
+

US: filter menu

+
+

As a guest I want to filter the menu so that I only see the dishes I am interested in

+
+
+
+

== AC:

+
+
    +
  1. +

    the guest can filter by

    +
    +
      +
    1. +

      type: starter | main dish | dessert; XOR; if nothing is selected all are shown (default value)

      +
    2. +
    3. +

      veggy (yes|no|does not matter (default))

      +
    4. +
    5. +

      vegan (yes|no|does not matter (default))

      +
    6. +
    7. +

      rice (yes|no|does not matter (default))

      +
    8. +
    9. +

      curry (yes|no|does not matter (default))

      +
    10. +
    11. +

      noodle (yes|no|does not matter (default))

      +
    12. +
    13. +

      price (range)

      +
    14. +
    15. +

      ratings (range)

      +
    16. +
    17. +

      my favorite (yes|no|does not matter (default)) — free text (search in title and description)

      +
    18. +
    +
    +
  2. +
  3. +

    the guest can sort by price asc, rating asc

    +
  4. +
  5. +

    after setting the filter only dishes are shown which fulfills those criteria

    +
  6. +
  7. +

    by pressing the button reset filter all filter are reset to the initial value

    +
  8. +
  9. +

    by pressing the filter button the filter is applied [or is it triggered after each change?]

    +
  10. +
+
+
+
+

US: Define order

+
+

As a guest I want to define my order by selecting dishes from the menu

+
+
+
+

== AC:

+
+
    +
  • +

    The guest can add each dish to the order

    +
  • +
  • +

    In case the guest adds the same dish multiple times, a counter in the order for this dish is increased for this dish

    +
  • +
  • +

    The guest can remove the dish from the order

    +
  • +
  • +

    The guest can add for each main dish the type of meat (pork, chicken, tofu)

    +
  • +
  • +

    The guest can add for each dish a free-text-comment

    +
  • +
  • +

    After adding/removing any dish the price is calculated including VAT

    +
  • +
+
+
+
+

US: Order the order

+
+

As a guest I want to order my selected dishes (order)

+
+
+

AC:

+
+
+
    +
  1. +

    I receive a mail containing my order with all dishes and the final price

    +
  2. +
  3. +

    precondition for ordering:

    +
    +
      +
    1. +

      Each order must be associated with a reservation / invite. Without any reference no order could be placed. The reference could be obtained from a previous reservation/invite (created during same session) or by the previous accepted invite (link in email) or by entering the reference id when asked by the system.

      +
      +
        +
      1. +

        In case precondition is not fulfilled, the guest is asked

        +
        +
          +
        1. +

          whether he/she would like to create a reservation/invite and is forwarded to US Invite Friends. Only after finalizing the reservation the order is accepted.

          +
        2. +
        3. +

          or he/she would enter previous created reservation-id he/she knows in order to associate his/her order with this reservation

          +
        4. +
        +
        +
      2. +
      +
      +
    2. +
    +
    +
  4. +
+
+
+
+

US: Cancel order

+
+

As a guest I want to cancel my order.

+
+
+

AC:

+
+
+
    +
  1. +

    in my received confirmation mail I have the option to cancel my order

    +
  2. +
  3. +

    the cancellation is only possible one hour before my reservation takes place

    +
  4. +
  5. +

    my order is deleted from the system

    +
  6. +
+
+
+

Remark: Changing the order is not possible. For that the order must be canceled and created from scratch again

+
+
+
+

US: Read twitter rating for dishes

+
+

As a guest I want to read for all dishes the rating done be twitter because I would like to know the opinion of others

+
+
+

AC:

+
+
+
    +
  1. +

    For each dish I see the latest 3 comments done by twitter for this vote (text, username, avatar)

    +
  2. +
  3. +

    For each dish I see the number of likes done by twitter

    +
  4. +
+
+
+
+

Epic: User Profile

+ +
+
+

US: User Profile

+
+

As a guest I want to have a user profile to associate it with my twitter account to be able to like/rate dishes

+
+
+

AC:

+
+
+
    +
  1. +

    Username of my profile is my email address

    +
  2. +
  3. +

    My profile is protected by password

    +
  4. +
  5. +

    I can log in and log out to my profile

    +
  6. +
  7. +

    I can reset my password by triggering the reset by mail

    +
  8. +
  9. +

    I can associate my profile with my twitter account in order to rate dishes and store my favorites by liking posts associated to dishes

    +
  10. +
+
+
+
+

Epic: Rate by twitter

+ +
+
+

US: Receive mail to rate your dish

+
+

As a guest I want to receive a mail by the system in order to rate my dish

+
+
+
+

US: Rate your dish

+
+

As a guest I want to add a comment or a like via my twitter account for a dish

+
+
+

AC:

+
+
+
    +
  1. +

    Before I write my rate I would like to be able to read all tweets of other users for this dish

    +
  2. +
  3. +

    I would like to see the number of likes for a dish

    +
  4. +
+
+
+
+

Epic: Waiter Cockpit

+ +
+
+

US: See all orders/reservations

+
+

As a waiter I want to see all orders/reservation in order to know what is going on in my restaurant

+
+
+

AC:

+
+
+
    +
  1. +

    all orders/reservations are shown in a list view (read-only). Those list can be filtered and sorted (similar to excel-data-filters)

    +
  2. +
  3. +

    orders/reservations are shown in separate lists.

    +
  4. +
  5. +

    for each order the dish, meat, comment, item, reservation-id, reservation date-time, creation-date-time is shown

    +
  6. +
  7. +

    for each reservation the inviters email, the guests-emails, the number of accepts and declines, calculated table number, the reservation-id, reservation date-time and creation-date-time are shown

    +
  8. +
  9. +

    the default filter for all lists is the today’s date for reservation date-time. this filter can be deleted.

    +
  10. +
  11. +

    only reservations and orders with reservation date in the future shall be available in this view. All other orders and reservation shall not be deleted; for data Analytics those orders and reservation shall still exist in the system.

    +
  12. +
+
+
+

checklist:

+
+
+

talk about:

+
+
+
    +
  • +

    who?

    +
  • +
  • +

    what?

    +
  • +
  • +

    why (purpose)

    +
  • +
  • +

    why (objective)

    +
  • +
  • +

    what happens outside the software

    +
  • +
  • +

    what might go wrong

    +
  • +
  • +

    any question or assumptions (write them down) , DoR should check that those sections are empty.

    +
  • +
  • +

    is there any better solution?

    +
  • +
  • +

    how (technical perspective)

    +
  • +
  • +

    do a rough estimate

    +
  • +
  • +

    check INVEST

    +
  • +
+
+
+
+
+
+

Technical design

+
+ +
+
+
+

Data Model

+
+
+
Table of Contents
+ +
+
+
Data Model
+
+
+mts datamodel +
+
+
+
Table of Contents
+ +
+
+
+
NoSQL Data Model
+
+
+dynamodb data model 1.4.1 +
+
+
+
+
+
+

Server Side

+
+
+
Table of Contents
+ +
+
+
Java design
+ +
+
+
Introduction
+
+

The Java back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    DEVON4J as the Java framework

    +
  • +
  • +

    Devonfw as the Development environment

    +
  • +
  • +

    CobiGen as code generation tool

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
Basic architecture details
+
+

Following the DEVON4J conventions the Java My Thai Star back-end is going to be developed dividing the application in Components and using a three layers architecture.

+
+
+
+
Project modules
+
+

Using the DEVON4J approach for the Java back-end project we will have a structure of a Maven project formed by three projects

+
+
+
+project modules +
+
+
+
    +
  • +

    api: Stores all the REST interfaces and corresponding Request/Response objects.

    +
  • +
  • +

    core: Stores all the logic and functionality of the application.

    +
  • +
  • +

    server: Configures the packaging of the application.

    +
  • +
+
+
+

We can automatically generate this project structure using the DEVON4J Maven archetype

+
+
+
+
Components
+
+

The application is going to be divided in different components to encapsulate the different domains of the application functionalities.

+
+
+
+mtsj components +
+
+
+

As main components we will find:

+
+
+
    +
  • +

    Bookingmanagement: Manages the bookings part of the application. With this component the users (anonymous/logged in) can create new bookings or cancel an existing booking. The users with waiter role can see all scheduled bookings.

    +
  • +
  • +

    Ordermanagement: This component handles the process to order dishes (related to bookings). A user (as a host or as a guest) can create orders (that contain dishes) or cancel an existing one. The users with waiter role can see all ordered orders.

    +
  • +
  • +

    Dishmanagement: This component groups the logic related to the menu (dishes) view. Its main feature is to provide the client with the data of the available dishes but also can be used by other components (Ordermanagement) as a data provider in some processes.

    +
  • +
  • +

    Usermanagement: Takes care of the User Profile management, allowing to create and update the data profiles.

    +
  • +
+
+
+

As common components (that don’t exactly represent an application’s area but provide functionalities that can be used by the main components):

+
+
+
    +
  • +

    Imagemanagement: Manages the images of the application. In a first approach the` Dishmanagement` component and the Usermanagement component will have an image as part of its data. The Imagemanagement component will expose the functionality to store and retrieve this kind of data.

    +
  • +
  • +

    Mailservice: with this service we will provide the functionality for sending email notifications. This is a shared service between different app components such as bookingmanagement or ordercomponent.

    +
  • +
+
+
+

Other components:

+
+
+
    +
  • +

    Security (will manage the access to the private part of the application using a jwt implementation).

    +
  • +
  • +

    Twitter integration: planned as a Microservice will provide the twitter integration needed for some specific functionalities of the application.

    +
  • +
+
+
+
+
Layers
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+

This architecture is going to be reflected dividing each component of the application in different packages to match those three layers.

+
+
+
+
Component structure
+
+

Each one of the components defined previously are going to be structured using the three-layers architecture. In each case we will have a service package, a logic package and a dataaccess package to fit the layers definition.

+
+
+
+component structure +
+
+
+
+
Dependency injection
+
+

As it is explained in the devonfw documentation we are going to implement the dependency injection pattern basing our solution on Spring and the Java standards: java.inject (JSR330) combined with JSR250.

+
+
+
+dependency injection +
+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different packages: api and impl. The api will store the interface with the methods definition and inside the impl we will store the class that implements the interface.

    +
  • +
+
+
+
+layer api impl +
+
+
+
    +
  • +

    Usage of JSR330: The Java standard set of annotations for dependency injection (@Named, @Inject, @PostConstruct, @PreDestroy, etc.) provides us with all the needed annotations to define our beans and inject them.

    +
  • +
+
+
+
+
@Named
+public class MyBeanImpl implements MyBean {
+  @Inject
+  private MyOtherBean myOtherBean;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+}
+
+
+
+
+
Layers communication
+
+

The connection between layers, to access to the functionalities of each one, will be solved using the dependency injection and the JSR330 annotations.

+
+
+
+layers impl +
+
+
+

Connection Service - Logic

+
+
+
+
@Named("DishmanagementRestService")
+public class DishmanagementRestServiceImpl implements DishmanagementRestService {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  // use the 'this.dishmanagement' object to access to the functionalities of the logic layer of the component
+
+  ...
+
+}
+
+
+
+

Connection Logic - Data Access

+
+
+
+
@Named
+public class DishmanagementImpl extends AbstractComponentFacade implements Dishmanagement {
+
+  @Inject
+  private DishDao dishDao;
+
+  // use the 'this.dishDao' to access to the functionalities of the data access layer of the component
+  ...
+
+}
+
+
+
+
+
Service layer
+
+

The services layer will be solved using REST services with the JAX-RS implementation.

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

Following the naming conventions proposed for Devon4j applications we will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+
+
Service API
+
+

The api.rest package in the service layer of a component will store the definition of the service by a Java interface. In this definition of the service we will set-up the endpoints of the service, the type of data expected and returned, the HTTP method for each endpoint of the service and other configurations if needed.

+
+
+
+
@Path("/dishmanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface DishmanagementRestService {
+
+  @GET
+  @Path("/dish/{id}/")
+  public DishCto getDish(@PathParam("id") long id);
+
+  ...
+
+}
+
+
+
+
+
Service impl
+
+

Once the service api is defined we need to implement it using the Java interface as reference. We will add the service implementation class to the impl.rest package and implement the RestService interface.

+
+
+
+
@Named("DishmanagementRestService")
+public class DishmanagementRestServiceImpl implements DishmanagementRestService {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Override
+  public DishCto getDish(long id) {
+    return this.dishmanagement.findDish(id);
+  }
+
+  ...
+
+}
+
+
+
+
+
==
+
+

You can see the Devon4j conventions for REST services here. And the My Thai Star services definition here as part of the My Thai Star project. +== ==

+
+
+
+
Logic layer
+
+

In the logic layer we will locate all the business logic of the application. We will keep the same schema as we have done for the service layer, having an api package with the definition of the methods and a impl package for the implementation.

+
+
+

Also, inside the api package, a to package will be the place to store the transfer objects needed to pass data through the layers of the component.

+
+
+
+logic layer +
+
+
+

The logic api definition:

+
+
+
+
public interface Dishmanagement {
+
+  DishCto findDish(Long id);
+
+  ...
+}
+
+
+
+

The logic impl class:

+
+
+
+
@Named
+public class DishmanagementImpl extends AbstractComponentFacade implements Dishmanagement {
+
+  @Inject
+  private DishDao dishDao;
+
+
+  @Override
+  public DishCto findDish(Long id) {
+
+    return getBeanMapper().map(this.dishDao.findOne(id), DishCto.class);
+  }
+
+  ...
+
+}
+
+
+
+

The BeanMapper will provide the needed transformations between entity and transfer objects.

+
+
+

Also, the logic layer is the place to add validation for Authorization based on roles as we will see later.

+
+
+
+
Data Access layer
+
+

The data-access layer is responsible for managing the connections to access and process data. The mapping between java objects to a relational database is done in Devon4j with the spring-data-jpa.

+
+
+

As in the previous layers, the data-access layer will have both api and impl packages. However, in this case, the implementation will be slightly different. The api package will store the component main entities and, inside the _api package, another api.repo package will store the Repositories. The repository interface will extend DefaultRepository interface (located in com.devonfw.module.jpa.dataaccess.api.data package of devon4j-starter-spring-data-jpa ).

+
+
+

For queries we will differentiate between static queries (that will be located in a mapped file) and dynamic queries (implemented with QueryDsl). You can find all the details about how to manage queries with Devon4j here.

+
+
+

The default data base included in the project will be the H2 instance included with the Devon4j projects.

+
+
+

To get more details about pagination, data base security, _concurrency control, inheritance or how to solve the different relationships between entities visit the official devon4j dataaccess documentation.

+
+
+
+
Security with Json Web Token
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
JWT basics
+
+
    +
  • +

    A user will provide a username / password combination to our Auth server.

    +
  • +
  • +

    The Auth server will try to identify the user and, if the credentials match, will issue a token.

    +
  • +
  • +

    The user will send the token as the Authorization header to access resources on server protected by JWT Authentication.

    +
  • +
+
+
+
+jwt schema +
+
+
+
+
JWT implementation details
+
+

The Json Web Token pattern will be implemented based on the Spring Security framework that is provided by default in the Devon4j projects.

+
+
+
+
== Authentication
+
+

Based on the Spring Security approach, we will implement a class extending WebSecurityConfigurerAdapter (Devon4j already provides the` BaseWebSecurityConfig` class) to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will set up the HttpSecurity object in the configure method of the class. We will define a JWTLoginFilter class that will handle the requests to the /login endpoint.

+
+
+
+
http.[...].antMatchers(HttpMethod.POST, "/login").permitAll().[...].addFilterBefore(new JWTLoginFilter("/login", authenticationManager()), UsernamePasswordAuthenticationFilter.class);
+
+
+
+

In the same HttpSecurity object we will set up the filter for the rest of the requests, to check the presence of the JWT token in the header. First we will need to create a JWTAuthenticationFilter class extending the GenericFilterBean class. Then we can add the filter to the HttpSecurity object

+
+
+
+
http.[...].addFilterBefore(new `JWTAuthenticationFilter()`, UsernamePasswordAuthenticationFilter.class);
+
+
+
+

Finally, as default users to start using the My Thai Star app we are going to define two profiles using the inMemoryAuthentication of the Spring Security framework. In the configure(AuthenticationManagerBuilder Auth) method we will create:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: Waiter

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: Customer

    +
  • +
+
+
+
+
auth.inMemoryAuthentication().withUser("waiter").password("waiter").roles("Waiter").and().withUser("user0").password("password").roles("Customer");
+
+
+
+
+
== Token set up
+
+

Following the official documentation the implementation details for the MyThaiStar’s JWT will be:

+
+
+
    +
  • +

    Secret: Used as part of the signature of the token, acting as a private key. For the showcase purposes we will use simply "ThisIsASecret".

    +
  • +
  • +

    Token Prefix schema: Bearer. The token will look like Bearer <token>

    +
  • +
  • +

    Header: Authorization. The response header where the token will be included. Also, in the requests, when checking the token it will be expected to be in the same header.

    +
  • +
  • +

    The Authorization header should be part of the Access-Control-Expose-Headers header to allow clients access to the Authorization header content (the token);

    +
  • +
  • +

    The claims are the content of the payload of the token. The claims are statements about the user, so we will include the user info in this section.

    +
    +
      +
    • +

      subject: "sub". The username.

      +
    • +
    • +

      issuer: "iss". Who creates the token. We could use the url of our service but, as this is a showcase app, we simply will use "MyThaiStarApp"

      +
    • +
    • +

      expiration date: "exp". Defines when the token expires.

      +
    • +
    • +

      creation date: "iat". Defines when the token has been created.

      +
    • +
    • +

      scope: "scope". Array of strings to store the user roles.

      +
    • +
    +
    +
  • +
  • +

    Signature Algorithm: To encrypt the token we will use the default algorithm HS512.

    +
  • +
+
+
+

An example of a token claims before encryption would be:

+
+
+

{sub=waiter, scope=[ROLE_Waiter], iss=MyThaiStarApp, exp=1496920280, iat=1496916680}

+
+
+
+
== Current User request
+
+

To provide to the client with the current user data our application should expose a service to return the user details. In Devon4j applications the /general/service/impl/rest/SecurityRestServiceImpl.java class is ready to do that.

+
+
+
+
@Path("/security/v1")
+@Named("SecurityRestService")
+public class SecurityRestServiceImpl {
+
+  @Produces(MediaType.APPLICATION_JSON)
+  @GET
+  @Path("/currentuser/")
+  public UserDetailsClientTo getCurrentUserDetails(@Context HttpServletRequest request) {
+
+  }
+}
+
+
+
+

we only will need to implement the getCurrentUserDetails method.

+
+
+
+
== Authorization
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

As part of the token we are providing the user Role. So, when validating the token, we can obtain that same information and build a UsernamePasswordAuthenticationToken with username and the roles as collection of Granted Authorities.

+
+
+

Doing so, afterwards, in the implementation class of the logic layer we can set up the related methods with the java security '@RolesAllowed' annotation to block the access to the resource to users that does not match the expected roles.

+
+
+
+
`@RolesAllowed(Roles.WAITER)`
+public PaginatedListTo<BookingEto> findBookings(BookingSearchCriteriaTo criteria) {
+  return findBookings(criteria);
+}
+
+
+
+
Table of Contents
+ +
+
+
+
.NET design
+
+

TODO

+
+
+
Table of Contents
+ +
+
+
+
NodeJS design (deprecated)
+ +
+
+
Introduction
+
+

The NodeJS back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    ExpressJS as the web application framework

    +
  • +
  • +

    devon4node as data access layer framework

    +
  • +
  • +

    DynamoDB as NoSQL Database

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
Basic architecture details
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
    +
  • +

    public - All files which be exposed on the server directly

    +
  • +
  • +

    src

    +
    +
      +
    • +

      database folder - Folder with scripts to create/delete/seed the database

      +
    • +
    • +

      model - Folder with all data model

      +
    • +
    • +

      routes - Folder with all ExpressJS routers

      +
    • +
    • +

      utils - Folder with all utils like classes and functions

      +
    • +
    • +

      app.ts - File with ExpressJS declaration

      +
    • +
    • +

      config.ts - File with server configs

      +
    • +
    • +

      logic.ts - File with the business logic

      +
    • +
    +
    +
  • +
  • +

    test - Folder with all tests

    +
  • +
+
+
+
+
Layers
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+
+
Service layer
+
+

The services layer will be solved using REST services with ExpressJS

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

In order to be compatible with the other back-end implementations, we must follow the naming conventions proposed for Devon4j applications. We will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+

To treat these services separately, the following routers were created:

+
+
+
    +
  • +

    bookingmanagement: will answer all requests with the prefix /mythaistar/services/rest/bookingmanagement/v1

    +
  • +
  • +

    dishmanagement: will answer all requests with the prefix /mythaistar/services/rest/dishmanagement/v1

    +
  • +
  • +

    ordermanagement: will answer all requests with the prefix /mythaistar/services/rest/ordermanagement/v1

    +
  • +
+
+
+

These routers will define the behavior for each service and use the logical layer.

+
+
+

An example of service definition:

+
+
+
+
router.post('/booking/search', (req: types.CustomRequest, res: Response) => {
+    try {
+        // body content must be SearchCriteria
+        if (!types.isSearchCriteria(req.body)) {
+            throw {code: 400, message: 'No booking token given' };
+        }
+
+        // use the searchBooking method defined at business logic
+        business.searchBooking(req.body, (err: types.Error | null, bookingEntity: types.PaginatedList) => {
+            if (err) {
+                res.status(err.code || 500).json(err.message);
+            } else {
+                res.json(bookingEntity);
+            }
+        });
+    } catch (err) {
+        res.status(err.code || 500).json({ message: err.message });
+    }
+});
+
+
+
+
+
Logic layer and Data access layer
+
+

In the logic layer we will locate all the business logic of the application. It will be located in the file logic.ts. If in this layer we need to get access to the data, we make use of data access layer directly, in this case using devon4node with the DynamoDB adapter.

+
+
+

Example:

+
+
+
+
export async function cancelOrder(orderId: string, callback: (err: types.Error | null) => void) {
+    let order: dbtypes.Order;
+
+    try {
+        // Data access
+        order = await oasp4fn.table('Order', orderId).promise() as dbtypes.Order;
+
+        [...]
+    }
+}
+
+
+
+

We could define the data access layer separately, but devon4node allows us to do this in a simple and clear way. So, we decided to not separate the access layer to the logic business.

+
+
+
+
Security with Json Web Token
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
JWT basics
+
+

Refer to JWT basics for more information.

+
+
+
+
JWT implementation details
+
+

The Json Web Token pattern will be implemented based on the JSON web token library available on npm.

+
+
+
+
== Authentication
+
+

Based on the JSON web token approach, we will implement a class Authentication to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/\**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/\**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/\**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/\**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will create an instance of Authentication in the app file and then we will use the method auth for handle the requests to the /login endpoint.

+
+
+
+
app.post('/mythaistar/login', auth.auth);
+
+
+
+

To verify the presence of the Authorization token in the headers, we will register in the express the Authentication.registerAuthentication middleware. This middleware will check if the token is correct, if so, it will place the user in the request and continue to process it. If the token is not correct it will continue processing the request normally.

+
+
+
+
app.use(auth.registerAuthentication);
+
+
+
+

Finally, we have two default users created in the database:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: WAITER

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: CUSTOMER

    +
  • +
+
+
+
+
== Token set up
+
+

Following the official documentation the implementation details for the MyThaiStar’s JWT will be:

+
+
+
    +
  • +

    Secret: Used as part of the signature of the token, acting as a private key. It can be modified at config.ts file.

    +
  • +
  • +

    Token Prefix schema: Bearer. The token will look like Bearer <token>

    +
  • +
  • +

    Header: Authorization. The response header where the token will be included. Also, in the requests, when checking the token it will be expected to be in the same header.

    +
  • +
  • +

    The Authorization header should be part of the Access-Control-Expose-Headers header to allow clients access to the Authorization header content (the token);

    +
  • +
  • +

    Signature Algorithm: To encrypt the token we will use the default algorithm HS512.

    +
  • +
+
+
+
+
== Current User request
+
+

To provide to the client with the current user data our application should expose a service to return the user details. In this case the Authentication has a method called getCurrentUser which will return the user data. We only need register it at express.

+
+
+
+
app.get('/mythaistar/services/rest/security/v1/currentuser', auth.getCurrentUser);
+
+
+
+
+
== Authorization
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

To ensure this, the Authorization class has the securizedEndpoint method that guarantees access based on the role. This method can be used as middleware in secure services. As the role is included in the token, once validated we will have this information in the request and the middleware can guarantee access or return a 403 error.

+
+
+
+
app.use('/mythaistar/services/rest/ordermanagement/v1/order/filter', auth.securizedEndpoint('WAITER'));
+app.use('/mythaistar/services/rest/ordermanagement/v1/order/search', auth.securizedEndpoint('WAITER'));
+app.use('/mythaistar/services/rest/bookingmanagement/v1/booking/search', auth.securizedEndpoint('WAITER'));
+
+
+
+
Table of Contents
+ +
+
+
+
Serverless design (deprecated)
+ +
+
+
Introduction
+
+

The NodeJS back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    Serverless as serverless framework

    +
  • +
  • +

    devon4node as data access layer framework

    +
  • +
  • +

    DynamoDB as NoSQL Database

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
Basic architecture details
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
    +
  • +

    handlers - All function handlers following devon4node structure

    +
  • +
  • +

    src

    +
    +
      +
    • +

      model - Folder with all data model

      +
    • +
    • +

      utils - Folder with all utils like classes and functions

      +
    • +
    • +

      config.ts - File with server configs

      +
    • +
    • +

      logic.ts - File with the business logic

      +
    • +
    +
    +
  • +
  • +

    test - Folder with all tests

    +
  • +
+
+
+
+
Layers
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+
+
Service layer
+
+

The services layer will be solved using REST services with Serverless

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

In order to be compatible with the other back-end implementations, we must follow the naming conventions proposed for Devon4j applications. We will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+

To treat these Http services, we must define the handlers following the devon4node convention:

+
+
+
    +
  • +

    (handlers/Http/POST/dish-search-handler) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (handlers/Http/POST/booking-handler) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (handlers/Http/POST/order-handler) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (handlers/Http/POST/booking-search-handler) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (handlers/Http/POST/order-search-handler) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (handlers/Http/POST/order-filter-handler) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (handlers/Http/POST/login-handler) /mythaistar/login.

    +
  • +
  • +

    (handlers/Http/GET/current-user-handler) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

These handlers will define the behavior for each service and use the logical layer.

+
+
+

An example of handler definition:

+
+
+
+
oasp4fn.config({ path: '/mythaistar/services/rest/bookingmanagement/v1/booking/search' });
+export async function bookingSearch(event: HttpEvent, context: Context, callback: Function) {
+    try {
+        const search = <types.SearchCriteria>event.body;
+        const authToken = event.headers.Authorization;
+        // falta lo que viene siendo comprobar el token y eso
+
+        auth.decode(authToken, (err, decoded) => {
+            if (err || decoded.role !==  'WAITER') {
+                throw { code: 403, message: 'Forbidden'};
+            }
+
+            // body content must be SearchCriteria
+            if (!types.isSearchCriteria(search)) {
+                throw { code: 400, message: 'No booking token given' };
+            }
+
+            business.searchBooking(search, (err: types.Error | null, bookingEntity: types.PaginatedList) => {
+                if (err) {
+                    callback(new Error(`[${err.code || 500}] ${err.message}`));
+                } else {
+                    callback(null, bookingEntity);
+                }
+            });
+        });
+    } catch (err) {
+        callback(new Error(`[${err.code || 500}] ${err.message}`));
+    }
+}
+
+
+
+

The default integration for a handler is lambda. See devon4node documentation for more information about default values and how to change it.

+
+
+
+
==
+
+

If you change the integration to lambda-proxy, you must take care that in this case the data will not be parsed. You must do JSON.parse explicitly +== ==

+
+
+

After defining all the handlers, we must execute the fun command, which will generate the files serverless.yml and webpack.config.js.

+
+
+
+
Logic layer and Data access layer
+ +
+
+
Security with Json Web Token
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
JWT basics
+
+

Refer to JWT basics for more information.

+
+
+
+
JWT implementation details
+
+

The Json Web Token pattern will be implemented based on the JSON web token library available on npm.

+
+
+
+
== Authentication
+
+

Based on the JSON web token approach, we will implement two methods in order to verify and user + generate the token and decode the token + return the user data. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will create a handler called login and then we will use the method code to verify the user and generate the token.

+
+
+
+
app.post(oasp4fn.config({ integration: 'lambda-proxy', path: '/mythaistar/login' });
+export async function login(event: HttpEvent, context: Context, callback: Function) {
+.
+.
+.
+.
+}
+
+
+
+

We have two default users created in the database:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: WAITER

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: CUSTOMER

    +
  • +
+
+
+
+
== Token set up
+ +
+
+
== Current User request
+
+

To provide the client with the current user data our application should expose a service to return the user details. In order to do this, we must define a handler called current-user-handler. This handler must decode the Authorization token and return the user data.

+
+
+
+
oasp4fn.config({
+    path: '/mythaistar/services/rest/security/v1/currentuser',
+});
+export async function currentUser(event: HttpEvent, context: Context, callback: Function) {
+    let authToken = event.headers.Authorization;
+    try {
+        auth.decode(authToken, (err: any, decoded?: any) => {
+            if (err) {
+                callback(new Error(`[403] Forbidden`));
+            } else {
+                callback(null, decoded);
+            }
+        });
+    } catch (err) {
+        callback(new Error(`[${err.code || 500}] ${err.message}`));
+    }
+}
+
+
+
+
+
== Authorization
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

To ensure this, we must decode the Authorization token and check the result. As the role is included in the token, once validated we will have this information and can guarantee access or return a 403 error.

+
+
+
+
oasp4fn.config({ path: '/mythaistar/services/rest/bookingmanagement/v1/booking/search' });
+export async function bookingSearch(event: HttpEvent, context: Context, callback: Function) {
+    const authToken = event.headers.Authorization;
+    auth.decode(authToken, (err, decoded) => {
+        try {
+            if (err || decoded.role !==  'WAITER') {
+                throw { code: 403, message: 'Forbidden' };
+            }
+
+            [...]
+
+        } catch (err) {
+            callback(new Error(`[${err.code || 500}] ${err.message}`));
+        }
+    });
+}
+
+
+
+
Table of Contents
+ +
+
+
+
GraphQL design
+
+

TODO

+
+
+
+
+
+

Client Side

+
+
+
Table of Contents
+ +
+
+
Angular design
+ +
+
+
Introduction
+
+

MyThaiStar client side has been built using latest frameworks, component libraries and designs:

+
+
+

Angular 4 as main front-end Framework. https://angular.io/

+
+
+

Angular/CLI 1.0.5 as Angular tool helper. https://github.com/angular/angular-cli

+
+
+

Covalent Teradata 1.0.0-beta4 as Angular native component library based on Material Design. https://teradata.github.io/covalent/#/

+
+
+

Angular/Material2 1.0.0-beta5 used by Covalent Teradata. https://github.com/angular/material2

+
+
+

Note: this dependencies are evolving at this moment and if it is possible, we are updating it on the project.

+
+
+
+
Basic project structure
+
+

The project is using the basic project seed that Angular/CLI provides with “ng new <project name>”. Then the app folder has been organized as Angular recommends and goes as follows:

+
+
+
    +
  • +

    app

    +
    +
      +
    • +

      components

      +
      +
        +
      • +

        sub-components

        +
      • +
      • +

        shared

        +
      • +
      • +

        component files

        +
      • +
      +
      +
    • +
    • +

      main app component

      +
    • +
    +
    +
  • +
  • +

    assets folder

    +
  • +
  • +

    environments folder

    +
  • +
  • +

    rest of angular files

    +
  • +
+
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
+
Main Views and components
+
+

List of components that serve as a main view to navigate or components developed to make atomically a group of functionalities which given their nature, can be highly reusable through the app.

+
+
+
+routes +
+
+
+

Note: no-name-route corresponds to whatever URL the user introduced and does not exist, it redirects to Home-Component.

+
+
+
+
Public area
+ +
+
+
== App Component
+
+

Contains the components that are on top of all views, including:

+
+
+
+
== Order sidenav
+
+

Sidenav where selected orders are displayed with their total price and some comments.

+
+
+
+
== Navigation sidenav (only for mobile)
+
+

This sidenav proposal is to let user navigate through the app when the screen is too small to show the navigation buttons on the header.

+
+
+
+
== Header
+
+

It contains the title, and some other basic functions regarding open and close sidenavs.

+
+
+
+ +
+

At the end of the page that shows only when open on desktop.

+
+
+
+
== Home-Component
+
+

Main view that shows up when the app initializes.

+
+
+
+
== Menu-Component
+
+

View where the users can view, filter and select the dishes (with their extras) they want to order it contains a component to each menu entry:

+
+
+
+
== Menu-card
+
+

This component composes all the data of a dish in a card. Component made to display indeterminate number of dishes easily.

+
+
+
+
== Book Table Component
+
+

View to make book a table in a given data with a given number of assistants or create a reservation with a number of invitations via email.

+
+
+
+
== Book-table-dialog
+
+

Dialog which opens as a result of fulfilling the booking form, it displays all the data of the booking attempt, if everything is correct, the user can send the information or cancel if something is wrong.

+
+
+
+
== Invitation-dialog
+
+

Dialog which opens as a result of fulfilling the invitation form, it displays all the data of the booking with friends attempt, if everything is correct, the user can send the information or cancel if something is wrong.

+
+
+
+
== User Area
+
+

Group of dialogs with the proposal of giving some functionalities to the user, as login, register, change password or connect with Twitter.

+
+
+
+
== Login-dialog
+
+

Dialog with a tab to navigate between login and register.

+
+
+
+
== Password-dialog
+
+

Functionality reserved to already logged users, in this dialog the user can change freely their password.

+
+
+
+
== Twitter-dialog
+
+

Dialog designed specifically to connect your user account with Twitter.

+
+
+
+
Waiter cockpit area
+
+

Restricted area to workers of the restaurant, here we can see all information about booked tables with the selected orders and the reservations with all the guests and their acceptance or decline of the event.

+
+
+
+
== Order Cockpit Component
+
+

Data table with all the booked tables and a filter to search them, to show more info about that table you can click on it and open a dialog.

+
+
+
+
== Order-dialog
+
+

Complete display of data regarding the selected table and its orders.

+
+
+
+
== Reservation Cockpit Component
+
+

Data table with all the reservations and a filter to search them, to show more info about that table you can click on it and open a dialog.

+
+
+
+
== Reservation-dialog
+
+

Complete display of data regarding the selected table and its guests.

+
+
+
+
Email Management
+
+

As the application send emails to both guests and hosts, we choose an approach based on URL where the email contain a button with an URL to a service in the app and a token, front-end read that token and depending on the URL, will redirect to one service or another. For example:

+
+
+
+
`http://localhost:4200/booking/cancel/CB_20170605_8fb5bc4c84a1c5049da1f6beb1968afc`
+
+
+
+

This URL will tell the app that is a cancellation of a booking with the token CB_20170605_8fb5bc4c84a1c5049da1f6beb1968afc. The app will process this information, send it to back-end with the correct headers, show the confirmation of the event and redirect to home page.

+
+
+

The main cases at the moment are:

+
+
+
+
== Accept Invite
+
+

A guest accept an invitation sent by a host. It will receive another email to decline if it change its mind later on.

+
+
+
+
== Reject Invite
+
+

A guest decline the invitation.

+
+
+
+
== Cancel Reservation
+
+

A host cancel the reservation, everybody that has accepted or not already answered will receive an email notifying this event is canceled. Also all the orders related to this reservations will be removed.

+
+
+
+
== Cancel Orders
+
+

When you have a reservation, you will be assigned to a token, with that token you can save your order in the restaurant. When sent, you will receive an email confirming the order and the possibility to remove it.

+
+
+
+
Services and directives
+
+

Services are where all the main logic between components of that view should be. This includes calling a remote server, composing objects, calculate prices, etc.

+
+
+

Directives are a single functionality that are related to a component.

+
+
+

As it can be seen in the basic structure, every view that has a minimum of logic or need to call a server has its own service located in the shared folder.

+
+
+

Also, services and directives can be created to compose a reusable piece of code that will be reused in some parts of the code:

+
+
+
+
Price-calculator-service
+
+

This service located in the shared folder of sidenav contains the basic logic to calculate the price of a single order (with all the possibilities) and to calculate the price of a full list of orders for a table. As this is used in the sidenav and in the waiter cockpit, it has been exported as a service to be imported where needed and easily testable.

+
+
+
+
Authentication
+
+

Authentication services serves as a validator of roles and login and, at the same time, stores the basic data regarding security and authentication.

+
+
+

Main task of this services is to provide visibility at app level of the current user information:

+
+
+
    +
  • +

    Check if the user is logged or not.

    +
  • +
  • +

    Check the permissions of the current user.

    +
  • +
  • +

    Store the username and the JWT token.

    +
  • +
+
+
+
+
Snack Service
+
+

Service created to serve as a factory of Angular Material Snackbars, which are used commonly through the app. This service accepts some parameters to customize the snackBar and opens it with this parameters.

+
+
+
+
Window Service
+
+

For responsiveness reasons, the dialogs have to accept a width parameter to adjust to screen width and this information is given by Window object, as it is a good practice to have it in an isolated service, which also calculates the width percentage to apply on the dialogs.

+
+
+
+
Equal-validator-directive
+
+

This directive located in the shared folder of userArea is used in 2 fields to make sure they have the same value. This directive is used in confirm password fields in register and change password.

+
+
+
+
Mock Back-end
+
+

To develop meanwhile a real back-end is being developed let us to make a more realistic application and to make easier the adaptation when the back-end is able to be connected and called. Its structure is as following:

+
+
+
+back end +
+
+
+

Contains the three main groups of functionalities in the application. Every group is composed by:

+
+
+
    +
  • +

    An interface with all the methods to implement.

    +
  • +
  • +

    A service that implements that interface, the main task of this service is to choose between real back-end and mock back-end depending on an environment variable.

    +
  • +
  • +

    Mock back-end service which implements all the methods declared in the interface using mock data stored in a local file and mainly uses Lodash to operate the arrays.

    +
  • +
  • +

    Real back-end service works as Mock back-end but in this case the methods call for server rest services through Http.

    +
  • +
+
+
+
+
Booking
+
+

The booking group of functionalities manages the calls to reserve a table with a given time and assistants or with guests, get reservations filtered, accept or decline invitations or cancel the reservation.

+
+
+
+
Orders
+
+

Management of the orders, including saving, filtering and cancel an order.

+
+
+
+
Dishes
+
+

The dishes group of functionalities manages the calls to get and filter dishes.

+
+
+
+
Login
+
+

Login manages the userArea logic: login, register and change password.

+
+
+
+
Security
+
+

My Thai Star security is composed by two main security services:

+
+
+
+
Auth-guard
+
+

Front-end security approach, this service implements an interface called CanActivate that comes from angular/router module. CanActivate interface forces you to implement a canActivate() function which returns a Boolean. +This service checks with the Auth-Service stored data if the user is logged and if he has enough permission to access the waiter cockpit. This prevents that a forbidden user could access to waiter cockpit just by editing the URL in the browser.

+
+
+
+
JWT
+
+

JSON Web Token consists of a token that is generated by the server when the user logs in. Once provided, the token has to be included in an Authentication header on every Http call to the rest service, otherwise the call will be forbidden. +JWT also has an expiration date and a role checking, so if a user has not enough permissions or keeps logged for a long certain amount of time that exceeds this expiration date, the next time he calls for a service call, the server will return an error and forbid the call. You can log again to restore the token.

+
+
+
+
== HttpClient
+
+

To implement this Authorization header management, an HttpClient service has been implemented. +This services works as an envelope of Http, providing some more functionalities, likes a header management and an automatically management of a server token error in case the JWT has expired, corrupted or not permitted.

+
+
+
Table of Contents
+ +
+
+
+
Xamarin design
+
+

TODO

+
+
+
+
+
+

Security

+
+
+
Table of Contents
+ +
+
+

Two-Factor Authentication

+
+

Two-factor Authentication (2FA) provides an additional level of security to your account. Once enabled, in addition to supplying your username and password to login, you’ll be prompted for a code generated by your Google Authenticator. For example, a password manager on one of your devices.

+
+
+

By enabling 2FA, to log into your account an additional one-time password is required what requires access to your paired device. This massively increases the barrier for an attacker to break into your account.

+
+
+
+

Back-end mechanism

+
+

In the back-end, we utilize Spring Security for any authentication.

+
+
+

Following the arrows, one can see all processes regarding authentication. The main idea is to check all credentials depending on their 2FA status and then either grand access to the specific user or deny access. This picture illustrates a normal authentication with username and password.

+
+
+
+security cross component +
+
+
+

When dealing with 2FA, another provider and filter is handling the request from /verify

+
+
+
+security cross component twofactor +
+
+
+

Here you can observe which filter will be used. +JWT-Authentication-Filter does intercept any request, which enforces being authenticated via JWT

+
+
+
+filters png +
+
+
+ + + + + +
+ + +Whenever the secret or qr code gets transferred between two parties, one must enforce SSL/TLS or IPsec to be comply with RFC 6238. +
+
+
+
+

Activating Two-Factor Authentication

+
+

In the current state, TOTP +will be used for OTP generation. For this purpose we recommend the Google Authenticator or any TOTP generator out there.

+
+
+
    +
  • +

    Login with your account

    +
  • +
  • +

    Open the 2FA settings

    +
  • +
  • +

    Activate the 2FA Status

    +
  • +
  • +

    Initialize your device with either a QR-Code or a secret

    +
  • +
+
+
+
+

Frontend

+
+

These are the two main options, which you can obtain my toggling between QR-Code and secret.

+
+
+
+2FA qr code menu +
+
+
+
+2FA secret menu +
+
+
+

After an activation and logout. This prompt will ask you to enter the OTP given from your device.

+
+
+
+otp prompt +
+
+
+
+
+
+

Testing

+
+ +
+
+
+

Server Side

+
+
+
Table of Contents
+ +
+
+
Java testing
+ +
+
+
Component testing
+
+

We are going to test our components as a unit using Spring Test and Devon4j-test modules.

+
+
+

In order to test a basic component of the app first we will create a test class in the src/test/java folder and inside the main package of the test module. We will name the class following the convention.

+
+
+
+
[Component]Test
+
+
+
+

Then, in the declaration of the test class, we will use the @SpringBootTest annotation to run the application context. In addition, we will extend the ComponentTest from Devon4j-test module to have access to the main functionalities of the module, see more details here.

+
+
+

Spring Test allows us to use Dependency Injection so we can inject our component directly using the @Inject annotation.

+
+
+

Each test will be represented by a method annotated with @Test. Inside the method we will test one functionality, evaluating the result thanks to the asserts provided by the ComponentTest class that we are extending.

+
+
+

A simple test example

+
+
+
+
@SpringBootTest(classes = SpringBootApp.class)
+public class DishmanagementTest extends `ComponentTest` {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Test
+  public void findAllDishes() {
+
+    PaginatedListTo<DishCto> result = this.dishmanagement.findDishes();
+    assertThat(result).isNotNull();
+  }
+
+  ...
+}
+
+
+
+
+
Running the tests
+ +
+
+
From Eclipse
+
+

We can run the test from within Eclipse with the contextual menu Run As > JUnit Test. This functionality can be launched from method level, class level or even package level. The results will be shown in the JUnit tab.

+
+
+
+test results eclipse +
+
+
+
+
From command line using Maven
+
+

We can also run tests using Maven and the command line, using the command mvn test (or mvn clean test).

+
+
+
+
`C:\MyThaiStar>mvn clean test`
+
+
+
+

Doing this we will run all the tests of the project (recognized by the Test word at the end of the classes) and the results will be shown by sub-project.

+
+
+
+
...
+
+[D: 2017-07-17 09:30:08,457] [P: INFO ] [C: ] [T: Thread-5] [L: org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean] - [M: Closing JPA EntityManagerFactory for persistence unit 'default']
+
+Results :
+
+Tests run: 11, Failures: 0, Errors: 0, Skipped: 1
+
+...
+
+[INFO]
+[INFO] --- maven-compiler-plugin:3.1:testCompile (default-testCompile) @ mtsj-server ---
+[INFO] No sources to compile
+[INFO]
+[INFO] --- maven-surefire-plugin:2.12.4:test (default-test) @ mtsj-server ---
+[INFO] No tests to run.
+[INFO] ------------------------------------------------------------------------
+[INFO] Reactor Summary:
+[INFO]
+[INFO] mtsj ............................................... SUCCESS [  0.902 s]
+[INFO] mtsj-core .......................................... SUCCESS [02:30 min]
+[INFO] mtsj-server ........................................ SUCCESS [  1.123 s]
+[INFO] ------------------------------------------------------------------------
+[INFO] BUILD SUCCESS
+[INFO] ------------------------------------------------------------------------
+[INFO] Total time: 02:35 min
+[INFO] Finished at: 20XX-07-17T09:30:13+02:00
+[INFO] Final Memory: 39M/193M
+[INFO] ------------------------------------------------------------------------
+
+
+
+
Table of Contents
+ +
+
+
+
.NET testing
+
+

TODO

+
+
+
Table of Contents
+ +
+
+
+
NodeJS testing
+
+

TODO

+
+
+
Table of Contents
+ +
+
+
+
GraphQL testing
+
+

TODO

+
+
+
+
+
+

Client Side

+
+
+
Table of Contents
+ +
+
+
Angular testing
+
+
+testing +
+
+
+

MyThaiStar testing is made using Angular default testing environment and syntax language: Karma and Jasmine

+
+
+

To test an element of the application, you indicate that tests are a special type of files with the extension .spec.ts, then, in MyThaiStar angular/CLI config you can notice that there is an array with only one entry, Karma, with at the same time has one entry to Karma.config.js.

+
+
+

In the configuration of Karma we indicate which syntax language we are going to use (currently Jasmine as said before) between some other configurations, it is remarkable the last one: browsers. By default, the only available browser is chrome, that is because Karma works opening a chrome view to run all the tests, in that same window, Karma shows the result or errors of the test run. But we can add some other browser to adjust to our necessities, for example, in some automatic processes that run from console, it is not an option to open a chrome window, in that case, MyThaiStar used PhantomJS and ChromeHeadless.

+
+
+

Taking all of this into account, to run the test in MyThaiStar we need to move to project root folder and run this command : ng test --browser <browser>

+
+
+
+
==
+
+

If you run just ng test it will run the three browser options simultaneously, giving as a result three test runs and outputs, it can cause timeouts and unwanted behaviors, if you want a shortcut to run the test with chrome window you can just run yarn test so we really encourage to not use just ng test. +== ==

+
+
+

Here we are going to see how Client side testing of MyThaiStar has been done.

+
+
+
+
Testing Components
+
+

Angular components were created using angular/CLI ng create component so they already come with an spec file to test them. The only thing left to do is to add the providers and imports needed in the component to work as the component itself, once this is done, the most basic test is to be sure that all the dependencies and the component itself can be correctly created.

+
+
+

As an example, this is the spec.ts of the menu view component:

+
+
+
+
all the imports...
+
+describe('MenuComponent', () => {
+  let component: MenuComponent;
+  let fixture: ComponentFixture<MenuComponent>;
+
+  beforeEach(async(() => {
+    TestBed.configureTestingModule({
+      declarations: [ MenuComponent, MenuCardComponent ],
+      providers: [SidenavService, MenuService, SnackBarService],
+      imports: [
+        BrowserAnimationsModule,
+        BackendModule.forRoot({environmentType: 0, restServiceRoot: 'v1'}),
+        CovalentModule,
+      ],
+    })
+    .compileComponents();
+  }));
+
+  beforeEach(() => {
+    fixture = TestBed.createComponent(MenuComponent);
+    component = fixture.componentInstance;
+    fixture.detectChanges();
+  });
+
+  it('should create', () => {
+    expect(component).toBeTruthy();
+  });
+});
+
+
+
+

First we declare the component to be tested and a Fixture object, then, we configure the testingModule right in the same way we could configure the MenuModule with the difference here that tests always have to use the mock back-end because we do not want to really depend on a server to test our components.

+
+
+

Once configured the test module, we have to prepare the context of the test, in this case we create the component, that is exactly what is going on in the beforeEach() function.

+
+
+

Finally, we are ready to use the component and it’s fixture to check if the component has bee correctly created.

+
+
+

At this moment this is the case for most of the components, in the future, some work would be applied on this matter to have a full testing experience in MyThaiStar components.

+
+
+
+
Dialog components
+
+

Dialog components are in a special category because they can not be tested normally. In the way Material implements the opening of dialogs, you have to create a component that will load into a dialog, to tell the module to load this components when needed, they have to be added into a special array category: EntryComponents. So, to test them, we need to import them in the test file as well.

+
+
+

Also, the testing code to open the component is a bit different too:

+
+
+
+
...
+  beforeEach(() => {
+    dialog = TestBed.get(MdDialog);
+    component = dialog.open(CommentDialogComponent).componentInstance;
+  });
+...
+
+
+
+

That is right, the beforeEach() function is slightly different from the the example above, in this case we have to force to the test to know that the component is only displayed in a dialog, so we have to open a dialog with this component in order to access it.

+
+
+
+
Testing Services
+
+

As well as components, services can be tested too, actually, they are even more necessary to be tested because they have inside more complex logic and data management.

+
+
+

As an example of testing services i am going to use a well done services, with a specific purpose and with its logic completely tested, the price-calculator service:

+
+
+
+
...
+
+describe('PriceCalculatorService', () => {
+
+  beforeEach(() => {
+    TestBed.configureTestingModule({
+      providers: [PriceCalculatorService],
+    });
+  });
+
+  it('should be properly injected', inject([PriceCalculatorService], (service: PriceCalculatorService) => {
+    expect(service).toBeTruthy();
+  }));
+
+  describe('check getPrice method', () => {
+
+    it('should calculate price for single order without extras', inject([PriceCalculatorService], (service: PriceCalculatorService) => {
+      const order: OrderView = {
+        dish: {
+          id: 0,
+          price: 12.50,
+          name: 'Order without extras',
+        },
+        orderLine: {
+          comment: '',
+          amount: 1,
+        },
+        extras: [],
+      };
+
+      expect(service.getPrice(order)).toEqual(order.dish.price);
+    }));
+...
+
+
+
+

In services test, we have to inject the service in order to use it, then we can define some initializing contexts to test if the functions of the services returns the expected values, in the example we can see how an imaginary order is created and expected the function getPrice() to correctly calculate the price of that order.

+
+
+

In this same test file you can find some more test regarding all the possibilities of use in that services: orders with and without extras, single order, multiple orders and so on.

+
+
+

Some services as well as the components have only tested that they are correctly created and they dependencies properly injected, in the future, will be full covering regarding this services test coverage.

+
+
+
+
Testing in a CI environment
+
+
Table of Contents
+ +
+
+
+
Xamarin testing
+
+

TODO

+
+
+
+
+
+

End to end

+
+
+
Table of Contents
+ +
+
+
MrChecker E2E Testing
+ +
+
+
Introduction
+
+

MrChecker is a testing framework included in devonfw with several useful modules, from which we will focus on the Selenium Module, a module designed to make end-to-end testing easier to implement.

+
+
+
+
How to use it
+
+

First of all download the repository.

+
+
+

You must run My Thai Star front-end and back-end application and modify your URL to the front in mrchecker/endtoend-test/src/resources/settings.properties

+
+
+

Now you can run end to end test to check if the application works properly.

+
+
+

To run the e2e test you have two options:

+
+
+

The first option is using the command line in devonfw distribution

+
+
+
+
cd mrchecker/endtoend-test/
+mvn test -Dtest=MyThaiStarTest -Dbrowser=Chrome
+
+
+
+

optionally you can use it with a headless version or using another navigator:

+
+
+
+
// chrome headless (without visual component)
+mvn test -Dtest=MyThaiStarTest -Dbrowser=ChromeHeadless// use firefox navigator
+mvn test -Dtest=MyThaiStarTest -Dbrowser=FireFox
+
+
+
+

The second is importing the project in devonfw Eclipse and running MyThaiStarTest.java as JUnit (right click, run as JUnit)

+
+
+

They can be executed one by one or all in one go, comment or uncomment @Test before those tests to enable or disable them.

+
+
+

For more information about how to use MrChecker and build your own end to end test read: + * MrChecker documentation + * MrChecker tutorial for My Thai Star

+
+
+
+
End to end tests in My Thai Star
+
+

We have included a test suite with four tests to run in My Thai Star to verify everything works properly.

+
+
+

The included tests do the following:

+
+
+
    +
  • +

    Test_loginAndLogOut: Log in and log out.

    +
  • +
  • +

    Test_loginFake: Attempt to log in with a fake user.

    +
  • +
  • +

    Test_bookTable: Log in and book a table, then login with a waiter and check if the table was successfully booked.

    +
  • +
+
+
+

*` Test_orderMenu`: Log in and order food for a certain booked table.

+
+
+

These four tests can be found inside MyThaiStarTest.java located here.

+
+
+
+
+
+

UI design

+
+
+
Table of Contents
+ +
+
+

Style guide

+
+
+mts styleguide +
+
+
+ +
+
+
+

CI/CD

+
+
+
Table of Contents
+ +
+
+

My Thai Star in Production Line

+ +
+
+

What is PL?

+
+

The Production Line Project is a set of server-side collaboration tools for Capgemini engagements. It has been developed for supporting project engagements with individual tools like issue tracking, continuous integration, continuous deployment, documentation, binary storage and much more!

+
+
+
+pl logo +
+
+
+
+

Introduction

+
+

Although the PL Project is a wide set of tools, only 3 are going to be mainly used for My Thai Star projects to build a Continuous Integration and Continuos Delivery environment. All three are available in the PL instance used for this project.

+
+
+
    +
  1. +

    Jenkins

    +
    +

    This is going to be the "main tool". Jenkins helps to automate the non-human part of the development with Continuos Integration and is going to host all Pipelines (and, obviously, execute them).

    +
    +
  2. +
  3. +

    Nexus

    +
    +

    Nexus manages software "artifacts" required for development. It is possible to both download dependencies from Nexus and publish artifacts as well. It allows to share resources within an organization.

    +
    +
  4. +
  5. +

    SonarQube

    +
    +

    It is a platform for continuous inspection of the code. It is going to be used for the Java back-end.

    +
    +
  6. +
+
+
+
+

Where can I find all My Thai Star Pipelines?

+
+

They are located under the MTS folder of the PL instance:

+
+
+
+mts pipelines +
+
+
+

Those Jenkins Pipelines will not have any code to execute. They’re just pointing to all Jenkinsfiles under the /jenkins folder of the repository. They can be found here.

+
+
+
+

CI in My Thai Star stack

+
+ +
+
+
+

How to configure everything out of the box

+
+

Production Line currently has a template to integrate My Thai Star. All information can be found at devonfw production line repository

+
+
+
Table of Contents
+ +
+
+
Angular CI
+
+

The Angular client-side of My Thai Star is going to have some specific needs for the CI-CD Pipeline to perform mandatory operations.

+
+
+
+
Pipeline
+
+

The Pipeline for the Angular client-side is going to be called MyThaiStar_FRONT-END_BUILD. It is located in the PL instance, under the MTS folder (as previously explained). It is going to follow a process flow like this one:

+
+
+
+angular pipeline flow +
+
+
+

Each of those steps are called stages in the Jenkins context.Let’s see what those steps mean in the context of the Angular application:

+
+
+
    +
  1. +

    Declarative: Checkout SCM

    +
    +

    Retrieves the project from the GitHub repository which it’s located. This step is not defined directly in our pipeline, but as it is loaded from the repository this step should always be done at the beginning.

    +
    +
    +
    +pipeline config +
    +
    +
  2. +
  3. +

    Declarative: Tool Install

    +
    +

    The Pipeline needs some Tools to perform some operations with the Angular project. These tool is a correct version of NodeJS (10.17.0 LTS) with Yarn installed as global package.

    +
    +
    +
    +
    tools {
    +    nodejs "NodeJS 10.14.0"
    +}
    +
    +
    +
  4. +
  5. +

    Loading Custom Tools

    +
    +

    The Pipeline also needs a browser in order to execute the tests, so in this step the chrome-stable will be loaded. We will use it in a headless mode.

    +
    +
    +
    +
    tool chrome
    +
    +
    +
  6. +
  7. +

    Fresh Dependency Installation

    +
    +

    The script $ yarn does a package installation. As we always clean the workspace after the pipeline, all packages must be installed in every execution.

    +
    +
  8. +
  9. +

    Code Linting

    +
    +

    This script executes a linting process of TypeScript. Rules can be defined in the tslint.json file of the project. It throws an exception whenever a file contains a non-compliant piece of code.

    +
    +
  10. +
  11. +

    Execute Angular tests

    +
    +

    The CI testing of the Angular client is different than the standard local testing (adapted to CI environments, as specified in the Adaptation section of document). This script just executes the following commands:

    +
    +
    +
    +
    ng test --browsers ChromeHeadless --watch=false
    +
    +
    +
  12. +
  13. +

    Check dependencies

    +
    +

    Before continue, we print the result of yarn audit. It shows the vulnerabilities in the dependencies. It do not process the response. The purpose is only to track the result of the command.

    +
    +
    +
    +
    yarn audit
    +
    +
    +
  14. +
  15. +

    SonarQube code analysis

    +
    +

    The script load and execute the tool sonar-scanner. This tool is loaded here because it’s not used in any other part of the pipeline. The sonar-scanner will take all code, upload it to SonarQube and wait until SonarQube send us a response with the quality of our code. If the code do not pass the quality gate, the pipeline will stop at this point.

    +
    +
  16. +
  17. +

    Build Application

    +
    +

    The building process of the Angular client would result in a folder called /dist in the main Angular’s directory. That folder is the one that is going to be served afterwards as an artifact. This process has also been adapted to some Deployment needs. This building script executes the following:

    +
    +
    +
    +
    ng build --configuration=docker
    +
    +
    +
  18. +
  19. +

    Deliver application into Nexus

    +
    +

    Once the scripts produce the Angular artifact (/dist folder), it’s time to package it and store into nexus.

    +
    +
  20. +
  21. +

    Declarative: Post Actions

    +
    +

    At the end, this step is always executed, even if a previous stage fail. We use this step to clean up the workspace for future executions

    +
    +
    +
    +
    post {
    +    always {
    +        cleanWs()
    +    }
    +}
    +
    +
    +
  22. +
+
+
+
+
Adjustments
+
+

The Angular project Pipeline needed some "extra" features to complete all planned processes. Those features resulted in some additions to the project.

+
+
+
+
Pipeline Environment
+
+

In order to easily reuse the pipeline in other angular projects, all variables have been defined in the block environment. All variables have the default values that Production Line uses, so if you’re going to work in production line you won’t have to change anything. Example:

+
+
+
+
environment {
+    // Script for build the application. Defined at package.json
+    buildScript = 'build --configuration=docker'
+    // Script for lint the application. Defined at package.json
+    lintScript = 'lint'
+    // Script for test the application. Defined at package.json
+    testScript = 'test:ci'
+    // Angular directory
+    angularDir = 'angular'
+    // SRC folder. It will be angularDir/srcDir
+    srcDir = 'src'
+    // Name of the custom tool for chrome stable
+    chrome = 'Chrome-stable'
+
+    // SonarQube
+    // Name of the SonarQube tool
+    sonarTool = 'SonarQube'
+    // Name of the SonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus
+    // Artifact groupId
+    groupId = 'com.devonfw.mythaistar'
+    // Nexus repository ID
+    repositoryId= 'pl-nexus'
+    // Nexus internal URL
+    repositoryUrl = 'http://nexus3-core:8081/nexus3/repository/maven-snapshots'
+    // Maven global settings configuration ID
+    globalSettingsId = 'MavenSettings'
+    // Maven tool id
+    mavenInstallation = 'Maven3'
+}
+
+
+
+
+
== Description
+
+
    +
  • +

    build Script: script for build the application. It must be defined at package.json.

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "build": "ng build",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${buildScript}"""
    +
    +
    +
  • +
  • +

    lint Script: Script for lint the application. Defined at package.json

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "lint": "ng lint",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${lintScript}"""
    +
    +
    +
  • +
  • +

    test Script: Script for test the application. Defined at package.json

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "test:ci": "npm run postinstall:web && ng test --browsers ChromeHeadless --watch=false",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${testScript}"""
    +
    +
    +
  • +
  • +

    angular-Dir: Relative route to angular application. In My Thai Star this is the angular folder. The actual directory (.) is also allowed.

    +
    +
    +angular directory +
    +
    +
  • +
  • +

    srcDir: Directory where you store the source code. For angular applications the default value is src

    +
    +
    +src directory +
    +
    +
  • +
  • +

    chrome: Since you need a browser to run your tests, we must provide one. This variable contains the name of the custom tool for google chrome.

    +
    +
    +chrome installation +
    +
    +
  • +
  • +

    sonar-Tool: Name of the SonarQube scanner installation.

    +
    +
    +sonar scanner +
    +
    +
  • +
  • +

    sonar-Env: Name of the SonarQube environment. SonarQube is the default value for PL.

    +
    +
    +sonar env +
    +
    +
  • +
  • +

    group-Id: Group id of the application. It will be used to storage the application in nexus3

    +
    +
    +nexus3 groupid +
    +
    +
  • +
  • +

    repository-Id: Id of the nexus3 repository. It must be defined at maven global config file.

    +
    +
    +nexus3 id +
    +
    +
  • +
  • +

    repository URL: The URL of the repository.

    +
  • +
  • +

    global Settings Id: The id of the global settings file.

    +
    +
    +nexus3 global config +
    +
    +
  • +
  • +

    maven Installation: The name of the maven tool.

    +
    +
    +maven tool +
    +
    +
  • +
+
+
+
Table of Contents
+ +
+
+
+
Java CI
+
+

The Java server-side of My Thai Star is an devon4j-based application. As long as Maven and a Java 8 are going to be needed, the Pipeline should have those tools available as well.

+
+
+
+
Pipeline
+
+

This Pipeline is called MyThaiStar_SERVER_BUILD, and it is located exactly in the same PL instance’s folder than MyThaiStar_FRONTEND_BUILD. Let’s see how the Pipeline’s flow behaves.

+
+
+
+java pipeline flow +
+
+
+

Check those Pipeline stages with more detail:

+
+
+
    +
  1. +

    Declarative: Checkout SCM

    +
    +

    Gets the code from https://github.com/devonfw/my-thai-star . This step is not defined directly in our pipeline, but as it is loaded from the repository this step should always be done at the beginning.

    +
    +
  2. +
  3. +

    Declarative: Tool Install

    +
    +

    The My Thai Star application works with JDK11. In this step, if JDK11 is not installed, we install it and then put the JDK folder into PATH.

    +
    +
    +
    +
    tools {
    +  jdk 'OpenJDK11'
    +}
    +
    +
    +
  4. +
  5. +

    Loading Custom Tools

    +
    +

    In this step we load the tools that can not be loaded in the previous step. As My Thai Star is delivered as docker container, in this step we load docker as custom tool.

    +
    +
    +
    +
    tool dockerTool
    +
    +
    +
  6. +
  7. +

    Install dependencies

    +
    +

    This step will download all project dependencies.

    +
    +
    +
    +
    mvn clean install -Dmaven.test.skip=true
    +
    +
    +
  8. +
  9. +

    Unit Tests

    +
    +

    This step will execute the project unit test with maven.

    +
    +
    +
    +
    mvn clean test
    +
    +
    +
  10. +
  11. +

    Dependency Checker

    +
    +

    Execute the OWASP Dependency Checker in order to validate the project dependencies. It will generate a report that can be used in SonarQube

    +
    +
    +
    +
    dependencyCheck additionalArguments: '--project "MTSJ" --scan java/mtsj --format XML', odcInstallation: 'dependency-check'
    +dependencyCheckPublisher pattern: ''
    +
    +
    +
  12. +
  13. +

    SonarQube analysis

    +
    +

    The code is evaluated using the integrated PL instance’s SonarQube. Also, it will wait for the quality gate status. If the status is failing, the pipeline execution will be stopped.

    +
    +
    +
    +
    withSonarQubeEnv(sonarEnv) {
    +    sh "mvn sonar:sonar"
    +}
    +
    +def qg = waitForQualityGate()
    +if (qg.status != 'OK') {
    +    error "Pipeline aborted due to quality gate failure: ${qg.status}"
    +}
    +
    +
    +
  14. +
  15. +

    Deliver application into Nexus

    +
    +

    Store all artifacts into nexus.

    +
    +
    +
    +
    mvn deploy -Dmaven.test.skip=true
    +
    +
    +
  16. +
  17. +

    Create the Docker image

    +
    +

    Create the docker image and then publish the image into a docker registry.

    +
    +
  18. +
+
+
+
+
Adjustments
+ +
+
+
Pipeline Environment
+
+

In order to easily reuse the pipeline in other java projects, all variables have been defined in the block environment. All variables have the default values that Production Line uses, so if you’re going to work in production line you won’t have to change anything. Example:

+
+
+
+
environment {
+    // Directory with java project
+    javaDir = 'java/mtsj'
+
+    // SonarQube
+    // Name of the SonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus 3
+    // Maven global settings configuration ID
+    `globalSettingsId = 'MavenSettings'`
+    // Maven tool id
+    `mavenInstallation = 'Maven3'`
+
+    // Docker
+    dockerRegistryCredentials = 'nexus-api'
+    dockerRegistryProtocol = 'https://\'
+    dockerTool = 'docker-global
+}
+
+
+
+
+
== Description
+
+
    +
  • +

    java Dir: Relative route to java application. In My Thai Star this is the java/mtsj folder. The actual directory (.) is also allowed.

    +
    +
    +java directory +
    +
    +
  • +
  • +

    sonar Env: Name of the SonarQube environment. SonarQube is the default value for PL.

    +
  • +
  • +

    global Settings Id: The id of the global settings file. MavenSettings is the default value for PL.

    +
    +
    +nexus3 global config +
    +
    +
  • +
  • +

    maven Installation: The name of the maven tool. Maven3 is the default value for PL.

    +
    +
    +maven tool +
    +
    +
  • +
+
+
+
+
Distribution management
+
+

The only extra thing that needs to be added to the Java server-side is some information that determines where the artifact of the project is going to be stored in Nexus. This is going to be a section in the main pom.xml file called <distributionManagement>. This section will point to the PL instance’s Nexus. Let’s have a look at it. It’s already configured with the PL default values.

+
+
+
+
<distributionManagement>
+    <repository>
+      <id>pl-nexus</id>
+      <name>PL Releases</name>
+      <url>http://nexus3-core:8081/nexus/content/repositories/maven-releases/</url>
+    </repository>
+    <snapshotRepository>
+      <id>pl-nexus</id>
+      <name>PL Snapshots</name>
+      <url>http://nexus3-core:8081/nexus3/repository/maven-snapshots</url>
+    </snapshotRepository>
+</distributionManagement>
+
+
+
+
Table of Contents
+ +
+
+
+
+

Deployment

+
+

The main deployment tool used for My Thai Star is be Docker.

+
+
+
+docker +
+
+
+

It is a tool to run application in isolated environments. Those isolated environments will be what we call Docker containers. For instance, it won’t be necessary any installation of Nginx or Apache tomcat or anything necessary to deploy, because there will be some containers that actually have those technologies inside.

+
+
+
+

Where Docker containers will be running?

+
+

Of course, it is necessary to have an external Deployment Server. Every Docker process will run in it. It will be accessed from Production Line pipelines via SSH. Thus, the pipeline itself will manage the scenario of, if every previous process like testing passes as OK, stop actual containers and create new ones.

+
+
+

This external server will be located in https://mts-devonfw-core.cloud.okteto.net/

+
+
+
+

Container Schema

+
+

3 Docker containers are being used for the deployment of My Thai Star:

+
+
+
    +
  1. +

    Nginx for the Reverse Proxy

    +
  2. +
  3. +

    tomcat for the Java Server

    +
  4. +
  5. +

    Nginx for the Angular Client

    +
  6. +
+
+
+

The usage of the Reverse Proxy will allow the client to call via /api every single Java Server’s REST operation. Moreover, there will only be 1 port in usage in the remote Docker host, the one mapped for the Reverse Proxy: 8080. +Besides the deployment itself using Nginx and tomcat, both client and server are previously built using NodeJS and maven images. Artifacts produced by them will be pasted in servers' containers using multi-stage docker builds. It will all follow this schema:

+
+
+
+36028242 8998f41c 0d9e 11e8 93b3 6bfe50152bf8 +
+
+
+

This orchestration of all 3 containers will be done by using a docker-compose.yml file. To redirect traffic from one container to another (i.e. reverse-proxy to angular client or angular client to java server) will be done by using, as host names, the service name docker-compose defines for each of them, followed by the internally exposed port:

+
+ +
+ + + + + +
+ + +A implementation using Traefik as reverse proxy instead of NGINX is also available. +
+
+
+
+

Run My Thai Star

+
+

The steps to run My Thai Star are:

+
+
+
    +
  1. +

    Clone the repository $ git clone https://github.com/devonfw/my-thai-star.git

    +
  2. +
  3. +

    Run the docker compose command: $ docker-compose up

    +
  4. +
+
+
+
Table of Contents
+ +
+
+
Deployment Pipelines
+
+

As PL does not support deployments, we have created separate pipelines for this purpose. Those pipelines are: MyThaiStar_REVERSE-PROXY_DEPLOY, MyThaiStar_FRONT-END_DEPLOY and MyThaiStar_SERVER_DEPLOY.

+
+
+

The application will be deployed using docker on a remote machine. The architecture is as follows:

+
+
+
+deployment arch +
+
+
+

The parts to be deployed are: an NGINX reverse proxy, the java application and the angular application.

+
+
+
+
MyThaiStar_SERVER_DEPLOY Pipeline
+
+

Deploys on the server the Java part of My Thai Star.

+
+
+
+
Parameters
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    dockerNetwork: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the dockerNetwork.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
Pipeline steps
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Deploy new image: Deploy a new java container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
MyThaiStar_FRONT-END_DEPLOY
+
+

Deploys on the server the Angular part of My Thai Star

+
+
+
+
Parameters
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    dockerNetwork: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the dockerNetwork.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
Pipeline steps
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Deploy new image: Deploy a new angular container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
MyThaiStar_REVERSE-PROXY_DEPLOY Pipeline
+
+ + + + + +
+ + +As reverse proxy connects to the Java and Angular application, both must be deployed before you execute this pipeline. +
+
+
+

The MyThaiStar_REVERSE-PROXY_DEPLOY pipeline will deploy the My Thai Star reverse proxy into a remote machine using docker.

+
+
+
+
Parameters
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    buildReverseProxy: If yes, it will build and publish a new version of reverse-proxy.

    +
  • +
  • +

    port: Port of the MTS application. You must ensure that those port is available in the deployment machine.

    +
  • +
  • +

    docker Network: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the port and the docker Network.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
Pipeline steps
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Create the Docker image: If build-Reverse-Proxy is enabled, this step will create a new docker image and publish it to the docker registry.

    +
  • +
  • +

    Deploy new image: Deploy a new reverse proxy container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
Table of Contents
+ +
+
+
+
Deployment Strategies
+
+

In this chapter different way of deploying My Thai Star are explained. Everything will be based in Docker.

+
+
+
+
Independent Docker containers
+
+

The first way of deployment will use isolated Docker containers. That means that if the client-side container is deployed, it does not affect the server-side container’s life cycle and vice versa.

+
+
+

Let’s show how the containers will behave during their life cycle.

+
+
+
    +
  • +

    0) Copy everything you need into the Deployment Server directory

    +
  • +
  • +

    1) Remove existing container (Nginx or Tomcat)

    +
    +
    +container1 +
    +
    +
  • +
  • +

    2) Run new one from the Docker images collection of the external Deployment Server.

    +
    +
    +container2 +
    +
    +
  • +
  • +

    3) Add the artifact /dist to the "deployable" folder of the Docker container (/usr/share/nginx/html/)

    +
    +
    +container3 +
    +
    +
    +

    Now, let’s see how it’s being executed in the command line (simplified due to documentation purposes). The next block of code represents what is inside of the last stage of the Pipeline.

    +
    +
    +
    +
    sshagent (credentials: ['my_ssh_token']) {
    +    sh """
    +        // Copy artifact from workspace to deployment server
    +
    +        // Manage container:
    +        docker rm -f [mts-container]
    +        docker run -itd --name=[mts-container] [base_image]
    +        docker exec [mts-container] bash -C \\"rm [container_deployment_folder]/*\\"
    +        docker cp [artifact] [mts-container]:[container_deployment_folder]
    +    """
    +}
    +
    +
    +
    +

    For every operation performed in the external Deployment Server, it is necessary to define where those commands are going to be executed. So, for each one of previous docker commands, this should appear before:

    +
    +
    +
    +
    `ssh -o StrictHostKeyChecking=no root@10.40.235.244`
    +
    +
    +
  • +
+
+
+
+
Docker Compose
+
+

The second way of deployment will be by orchestrating both elements of the application: The Angular client-side and the Java server-side. Both elements will be running in Docker containers as well, but in this case they won’t be independent anymore. Docker Compose will be in charge of keeping both containers up, or to put them down.

+
+
+
+
Project adjustment
+
+

In order to perform this second way of deployment, some files will be created in the project. The first one is the Dockerfile for the Angular client-side. This file will pull (if necessary) an Nginx Docker image and copy the Angular artifact (/dist folder) inside of the deployment folder of the image. It will be located in the main directory of the Angular client-side project.

+
+
+
+dockerfile angular +
+
+
+

The second file is the Dockerfile for the Java server-side. Its function will be quite similar to the Angular one. It will run a tomcat Docker image and copy the Java artifact (mythaistar.war file) in its deployment folder.

+
+
+
+dockerfile java +
+
+
+

Finally, as long as the docker-compose is being used, a file containing its configuration will be necessary as well. A new folder one the main My That Star’s directory is created, and it’s called /docker. Inside there is just a docker-compose.yml file. It contains all the information needed to orchestrate the deployment process. For example, which port both containers are going to be published on, and so on. This way of deployment will allow the application to be published or not just with one action.

+
+
+
+
docker-compose rm -f            # down
+docker-compose up --build -d    # up fresh containers
+
+
+
+
+docker compose +
+
+
+

Let’s have a look at the file itself:

+
+
+
+
version: '3'
+services:
+  client_compose:
+    build: "angular"
+    ports:
+      - "8091:80"
+    depends_on:
+      - server_compose
+  server_compose:
+    build: "java"
+    ports:
+      - "9091:8080"
+
+
+
+

This Orchestrated Deployment will offer some interesting possibilities for the future of the application.

+
+
+
Table of Contents
+ +
+
+
+
Future Deployment
+
+

The My Thai Star project is going to be built in many technologies. Thus, let’s think about one deployment schema that allow the Angular client to communicate to all three back ends: Java, Node and .NET.

+
+
+

As long as Docker containers are being used, it shouldn’t be that hard to deal with this "distributed" deployment. The schema represents 6 Docker containers that will have client-side(s) and server-side(s). Each of 3 Angular client containers (those in red) are going to communicate with different back-ends. So, when the deployment is finished, it would be possible to use all three server-sides just by changing the "port" in the URL.

+
+
+

Let’s see how it would look like:

+
+
+
+deployment schema +
+
+
+
Table of Contents
+ +
+
+
+
Reverse proxy strategy using Traefik
+
+

This implementation is the same as described at My Thai Star deployment wiki page. The only thing that changes is that Traefik is used instead of NGINX.

+
+
+

Using Traefik as reverse proxy, we can define the routes using labels in the docker containers instead of using a nginx.conf file. With this, it is not necessary to modify the reverse proxy container for each application. In addition, as Traefik is listening to the docker daemon, it can detect new containers and create routes for them without rebooting.

+
+
+

Example of labels:

+
+
+
+
labels:
+    - "traefik.http.routers.angular.rule=PathPrefix(`/`)"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.path=/health"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.interval=10s"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.scheme=http"
+
+
+
+
+
How to use it
+
+

If you want to build the images from code, change to My Thai Star root folder and execute:

+
+
+
+
$ docker-compose -f docker-compose.traefik.yml up -d --build
+
+
+
+

If you want to build the images from artifacts, change to Traefik folder (reverse-proxy/traefik) and execute:

+
+
+
+
$ docker-compose up -d --build
+
+
+
+

After a seconds, when the healthcheck detects that containers are running, your application will be available at http://localhost:8090. Also, the Traefik dashboard is available at http://localhost:8080.

+
+
+

If you want to check the behavior of the application when you scale up the back-end, you can execute:

+
+
+
+
$ docker-compose scale java=5
+
+
+
+

With this, the access to the java back-end will be using the load balancing method: Weighted Round Robin.

+
+
+
Table of Contents
+ +
+
+
+
+

MyThaiStar on Native Kubernetes as a Service (nKaaS)

+
+

The MyThaiStar sample application can be deployed on a nKaaS environment. The required Kubernetes configuration files can be found in the MyThaiStar repository. There are no additional changes required in order to deploy the application.

+
+
+
+

Setting up the environment

+ +
+
+

Following the nKaaS guide

+
+

After requesting access to the nKaaS platform you’ll be greeted with a welcome mail which contains your personal credentials. Make sure to change the given password to a personal one within the 24 hour time period, otherwise the credentials will expire.

+
+
+

After successfully following the guide mentioned in the welcome mail you should be able to establish a connection to the nKaaS VPN and have access to all their services (Jenkins, BitBucket, etc.). You should also be able to communicate with Kubernetes using kubectl.

+
+
+

Known issues: The nKaaS guide provides a download link for OpenVPN Connect. However, some users experienced connection issues with this client. If you’re having issues connecting to the VPN with OpenVPN Connect, you may try out the client by OVPN.

+
+
+
+

Requesting a namespace

+
+

Initially, you won’t be able to edit anything on Kubernetes, as you don’t have any privileges on any namespace. To request your own namespace you should raise a ticket at the Customer Support Portal containing your desired name for the namespace.

+
+
+

As soon as the namespace was created you can change your kubectl context:

+
+
+
+
kubectl config set-context --current -namespace=YOUR-NAMESPACE
+
+
+
+

On your own namespace you should have permissions to create/delete deployments/services etc. and perform other actions.

+
+
+
+

Setting up Harbor

+
+

Jenkins will build the MyThaiStar Docker images and push them to the nKaaS Harbor registry. The Jenkinsfile defaults to a Harbor project called "my-thai-star". If there’s no such project on Harbor, simply create a new one.

+
+
+
+

Setting up Jenkins

+
+

As MyThaiStar includes all required Jenkinsfiles for nKaaS, almost no configurations have to be performed by the user. +Create a new Pipeline on Jenkins and configure its definition to be a "Pipeline script from SCM". The SCM used is "Git" and the repository URL is the MyThaiStar repository https://github.com/devonfw/my-thai-star.git or your fork of it.

+
+
+

The Branch Specifier should point to */develop, the Script Path is jenkins/nKaaS/Jenkinsfile as that’s where the Jenkinsfile is located at the MyThaiStar repository. +Checking the "Lightweight checkout" could speed up the Pipeline.

+
+
+

Note: If you’re using the nKaaS Bitbucket as repository for your MyThaiStar clone you have to perform some additional configurations. First you’ll have to create a new SSH keypair, for example with ssh-keygen. Add the public key to the Bitbucket authentication methods and the private key in Jenkins to a new pair of credentials. This step is required for Jenkins to be able to authenticate against Bitbucket. +Afterwards, instead of the official MyThaiStar repository, specify your Bitbucket repository:

+
+
+
+
ssh://git@bitbucket.demo.xpaas.io:7999/YOUR-PROJECT/YOUR-MTS-REPO.git
+
+
+
+

Under "Credentials" choose the credentials that contain your Bitbucket private key you’ve created earlier.

+
+
+
+

Deploying MTS

+
+

After setting up the Jenkins Pipeline, you can simply run it by clicking on the "Build" button. This will trigger the pipeline, Jenkins will:

+
+
+
    +
  1. +

    Check out the MTS project

    +
  2. +
  3. +

    Build the docker images

    +
  4. +
  5. +

    Push the docker images to the Harbor registry

    +
  6. +
  7. +

    Deploy the MTS application onto Kubernetes

    +
  8. +
+
+
+

Finally, the applications should be available at http://my-thai-star.demo.xpaas.io.

+
+
+

The first part, my-thai-star, IST specified in the MTS ingress configuration at host. The second part, demo.xpaas.io, is the host of the nKaaS you’re working on.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/mrchecker.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/mrchecker.html new file mode 100644 index 00000000..7a43cb56 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/mrchecker.html @@ -0,0 +1,365 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MrChecker E2E Testing

+
+ +
+
+
+

Introduction

+
+
+

MrChecker is a testing framework included in devonfw with several useful modules, from which we will focus on the Selenium Module, a module designed to make end-to-end testing easier to implement.

+
+
+
+
+

How to use it

+
+
+

First of all download the repository.

+
+
+

You must run My Thai Star front-end and back-end application and modify your URL to the front in mrchecker/endtoend-test/src/resources/settings.properties

+
+
+

Now you can run end to end test to check if the application works properly.

+
+
+

To run the e2e test you have two options:

+
+
+

The first option is using the command line in devonfw distribution

+
+
+
+
cd mrchecker/endtoend-test/
+mvn test -Dtest=MyThaiStarTest -Dbrowser=Chrome
+
+
+
+

optionally you can use it with a headless version or using another navigator:

+
+
+
+
// chrome headless (without visual component)
+mvn test -Dtest=MyThaiStarTest -Dbrowser=ChromeHeadless// use firefox navigator
+mvn test -Dtest=MyThaiStarTest -Dbrowser=FireFox
+
+
+
+

The second is importing the project in devonfw Eclipse and running MyThaiStarTest.java as JUnit (right click, run as JUnit)

+
+
+

They can be executed one by one or all in one go, comment or uncomment @Test before those tests to enable or disable them.

+
+
+

For more information about how to use MrChecker and build your own end to end test read: + * MrChecker documentation + * MrChecker tutorial for My Thai Star

+
+
+
+
+

End to end tests in My Thai Star

+
+
+

We have included a test suite with four tests to run in My Thai Star to verify everything works properly.

+
+
+

The included tests do the following:

+
+
+
    +
  • +

    Test_loginAndLogOut: Log in and log out.

    +
  • +
  • +

    Test_loginFake: Attempt to log in with a fake user.

    +
  • +
  • +

    Test_bookTable: Log in and book a table, then login with a waiter and check if the table was successfully booked.

    +
  • +
+
+
+

*` Test_orderMenu`: Log in and order food for a certain booked table.

+
+
+

These four tests can be found inside MyThaiStarTest.java located here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/my-thai-star-nosql-data-model.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/my-thai-star-nosql-data-model.html new file mode 100644 index 00000000..047db157 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/my-thai-star-nosql-data-model.html @@ -0,0 +1,282 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

NoSQL Data Model

+
+
+
+dynamodb data model 1.4.1 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/my-thai-star-publish.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/my-thai-star-publish.html new file mode 100644 index 00000000..85cec1f5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/my-thai-star-publish.html @@ -0,0 +1,536 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Publishing the MyThaiStar Application

+
+
+

This page will explain how to build and deploy the application.

+
+
+
+
+

Production Line Instance

+
+
+

The Production Line instance being used can be found here. After logging in you’ll see a list of existing jobs and pipelines. +However, only a folder is relevant to this topic: MTS

+
+
+

Note: A user account is required for authentication. Contact a devon team member to request a new account.

+
+
+
+
+

Pipeline Script

+
+
+

We’ll have a closer look at the pipeline configuration script and its stages:

+
+
+

Note: Have a look at this wiki over here to get a basic idea on how to write pipeline scripts.

+
+
+
    +
  1. +

    Checking out MyThaiStar form GitHub

    +
    +

    This stage will check out the source code directly from GitHub:

    +
    +
    +
    +
    git credentialsId:'github-devonfw-ci', url:'https://github.com/devonfw/my-thai-star/'
    +
    +
    +
    +

    Credentials are required for authentication as we’re checking out a private repository. 'github-devonfw-ci' is a pair of credentials that was created for this sole purpose.

    +
    +
  2. +
  3. +

    Loading custom tools

    +
    +

    To build the application, two tools are required: Node 6 and Angular CLI.

    +
    +
    +

    They just have to be referenced, as the Custom Tool Plugin will handle the installation process:

    +
    +
    +
    +
    tool 'Node 6'
    +tool 'Angular CLI'
    +
    +
    +
  4. +
  5. +

    Fresh Dependency installation +To ensure that we get fresh dependencies, we’ll have to make sure that our dependencies folder node_modules is removed and the installation process is run again.

    +
    +
    +
    find . -name "node_modules" -exec rm -rf '{}' +
    +npm i
    +
    +
    +
  6. +
  7. +

    Code Linting

    +
    +

    By "linting" our Angular code we check the quality of the code. TypeScript provides a useful tool for that. It is call TSLint. This process is triggered via Angular CLI.

    +
    +
    +
    +
    ng lint --format checkstyle
    +
    +
    +
  8. +
  9. +

    Execute Unit Tests

    +
    +

    By default, Angular tests are executed using the Chrome browser. That can be a problem when they need to be executed in a CI environment, such as Jenkins (which is the case) or Travis. Angular projects can be prepared to deal with it, using the PhantomJS browser instead of chrome.

    +
    +
    +

    We can prepare a script for that in our package.json file, or we can directly write it in the command line. Also, it is necessary to make sure that those test will just executed once, because by default it will be watching for changes.

    +
    +
    +
    +
    ng test --browsers PhantomJS --single-run
    +
    +
    +
    +

    or

    +
    +
    +
    +
    yarn test:ci
    +
    +
    +
  10. +
  11. +

    Build application

    +
    +

    The building process needs to be sufficiently flexible to be adapted for different deployments. As long as the My Thai Star Angular client needs (or will need) to point to different servers (devon4j, Node and .NET), it is mandatory to have the chance to separately "prepare" the artifact for all of them.

    +
    +
    +

    What does that mean? There are some files dedicated to those situations. They’re called environment. They’ll define some data to be used under different circumstances.

    +
    +
    +
    +
    ng build --aot --environment=prod
    +
    +
    +
    +

    or

    +
    +
    +
    +
    yarn build:prod
    +
    +
    +
    +

    The ng build command creates a dist folder which contains the application.

    +
    +
  12. +
  13. +

    Deployment

    +
    +

    The deployment step has to be approved by a human. Otherwise it won’t proceed.

    +
    +
    +

    The user can decide on whether to proceed and deploy the application or to abort and just keep the generated files inside the dist directory.

    +
    +
    +

    After clicking on proceed, the following lines will be executed:

    +
    +
    +
    +
    ##Change to Angular directory
    +cd angular
    +
    +##Copy "dist" folder from workspace to deployment server
    +scp -o StrictHostKeyChecking=no -r dist root@10.40.235.244:/root/mythaistar/
    +
    +##Launch application in Docker container
    +ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker rm -f mythaistar
    +ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker run -itd --name=mythaistar -p 8090:80 nginx
    +ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker exec mythaistar bash -c \\"rm /usr/share/nginx/html/*\\"
    +ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker cp mythaistar/dist/. mythaistar:/usr/share/nginx/html/
    +
    +
    +
    +

    After deploying the application will be available at http://de-mucdevondepl01:8090

    +
    +
  14. +
+
+
+
+
+

Complete Pipeline Script:

+
+
+

The complete Groovy script:

+
+
+
+
node {
+    stage 'Checking out my-thai-star from GitHub'
+        node {
+            git branch: 'develop', credentialsId: 'github-devonfw-ci', url: 'https://github.com/devonfw/my-thai-star/'
+        }
+
+    stage 'Loading Custom Tools'
+        node {
+            tool 'Node 6'
+            tool 'Angular CLI'
+        }
+
+    stage 'Fresh Dependency Installation'
+        node {
+            sh """
+                cd angular
+                find . -name "node_modules" -exec rm -rf '{}' +
+                npm i
+            """
+        }
+
+    stage 'Code Linting'
+        node {
+            sh """
+                cd angular
+                ng lint --format checkstyle
+            """
+        }
+
+    stage 'Execute Angular tests'
+        node {
+            sh """
+                cd angular
+                ng test --browsers PhantomJS --single-run
+            """
+        }
+
+    stage 'Build Application'
+        node {
+            sh """
+                cd angular
+                ng build --aot --prod
+            """
+        }
+
+    stage 'Deployment'
+        input 'Should this build be deployed?'
+            node {
+                sshagent (credentials: ['3d0fa2a4-5cf0-4cf5-a3fd-23655eb33c11']) {
+                    sh """
+                        cd angular
+                        # Copy resulting "dist" folder from workspace to deployment server
+                        scp -o StrictHostKeyChecking=no -r dist root@10.40.235.244:/root/mythaistar/
+
+                        # Launch application in Docker container
+                        ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker rm -f mythaistar
+                        ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker run -itd --name=mythaistar -p 8090:80 nginx
+                        ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker exec mythaistar bash -c \\"rm /usr/share/nginx/html/*\\"
+                        ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker cp mythaistar/dist/. mythaistar:/usr/share/nginx/html/
+
+                    """
+                }
+                sh 'echo \\"Application available at http://de-mucdevondepl01:8090\\"'
+            }
+}
+
+
+
+
+
+

Accessing MyThaiStar

+
+
+

Finally, the application will be available at this URL: http://de-mucdevondepl01:8090.

+
+
+
+
+

Notes

+
+
+

Make sure not to launch multiple instances of this pipeline in parallel. While a pipeline is waiting for approval it’ll still be blocking a build executor. +This PL instance is set up to have two build executors.

+
+
+

This means: When launching this pipeline two times in parallel without approving the build, other jobs/pipeline won’t be able +to run properly.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/net-design.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/net-design.html new file mode 100644 index 00000000..db120363 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/net-design.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

.NET design

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/net-testing.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/net-testing.html new file mode 100644 index 00000000..745e9fc8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/net-testing.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

.NET testing

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/nkaas.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/nkaas.html new file mode 100644 index 00000000..f7ae9663 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/nkaas.html @@ -0,0 +1,387 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MyThaiStar on Native Kubernetes as a Service (nKaaS)

+
+
+

The MyThaiStar sample application can be deployed on a nKaaS environment. The required Kubernetes configuration files can be found in the MyThaiStar repository. There are no additional changes required in order to deploy the application.

+
+
+
+
+

Setting up the environment

+
+ +
+
+
+

Following the nKaaS guide

+
+
+

After requesting access to the nKaaS platform you’ll be greeted with a welcome mail which contains your personal credentials. Make sure to change the given password to a personal one within the 24 hour time period, otherwise the credentials will expire.

+
+
+

After successfully following the guide mentioned in the welcome mail you should be able to establish a connection to the nKaaS VPN and have access to all their services (Jenkins, BitBucket, etc.). You should also be able to communicate with Kubernetes using kubectl.

+
+
+

Known issues: The nKaaS guide provides a download link for OpenVPN Connect. However, some users experienced connection issues with this client. If you’re having issues connecting to the VPN with OpenVPN Connect, you may try out the client by OVPN.

+
+
+
+
+

Requesting a namespace

+
+
+

Initially, you won’t be able to edit anything on Kubernetes, as you don’t have any privileges on any namespace. To request your own namespace you should raise a ticket at the Customer Support Portal containing your desired name for the namespace.

+
+
+

As soon as the namespace was created you can change your kubectl context:

+
+
+
+
kubectl config set-context --current -namespace=YOUR-NAMESPACE
+
+
+
+

On your own namespace you should have permissions to create/delete deployments/services etc. and perform other actions.

+
+
+
+
+

Setting up Harbor

+
+
+

Jenkins will build the MyThaiStar Docker images and push them to the nKaaS Harbor registry. The Jenkinsfile defaults to a Harbor project called "my-thai-star". If there’s no such project on Harbor, simply create a new one.

+
+
+
+
+

Setting up Jenkins

+
+
+

As MyThaiStar includes all required Jenkinsfiles for nKaaS, almost no configurations have to be performed by the user. +Create a new Pipeline on Jenkins and configure its definition to be a "Pipeline script from SCM". The SCM used is "Git" and the repository URL is the MyThaiStar repository https://github.com/devonfw/my-thai-star.git or your fork of it.

+
+
+

The Branch Specifier should point to */develop, the Script Path is jenkins/nKaaS/Jenkinsfile as that’s where the Jenkinsfile is located at the MyThaiStar repository. +Checking the "Lightweight checkout" could speed up the Pipeline.

+
+
+

Note: If you’re using the nKaaS Bitbucket as repository for your MyThaiStar clone you have to perform some additional configurations. First you’ll have to create a new SSH keypair, for example with ssh-keygen. Add the public key to the Bitbucket authentication methods and the private key in Jenkins to a new pair of credentials. This step is required for Jenkins to be able to authenticate against Bitbucket. +Afterwards, instead of the official MyThaiStar repository, specify your Bitbucket repository:

+
+
+
+
ssh://git@bitbucket.demo.xpaas.io:7999/YOUR-PROJECT/YOUR-MTS-REPO.git
+
+
+
+

Under "Credentials" choose the credentials that contain your Bitbucket private key you’ve created earlier.

+
+
+
+
+

Deploying MTS

+
+
+

After setting up the Jenkins Pipeline, you can simply run it by clicking on the "Build" button. This will trigger the pipeline, Jenkins will:

+
+
+
    +
  1. +

    Check out the MTS project

    +
  2. +
  3. +

    Build the docker images

    +
  4. +
  5. +

    Push the docker images to the Harbor registry

    +
  6. +
  7. +

    Deploy the MTS application onto Kubernetes

    +
  8. +
+
+
+

Finally, the applications should be available at http://my-thai-star.demo.xpaas.io.

+
+
+

The first part, my-thai-star, IST specified in the MTS ingress configuration at host. The second part, demo.xpaas.io, is the host of the nKaaS you’re working on.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/nodejs-design.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/nodejs-design.html new file mode 100644 index 00000000..8f229977 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/nodejs-design.html @@ -0,0 +1,707 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

NodeJS design (deprecated)

+
+ +
+
+
+

Introduction

+
+
+

The NodeJS back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    ExpressJS as the web application framework

    +
  • +
  • +

    devon4node as data access layer framework

    +
  • +
  • +

    DynamoDB as NoSQL Database

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
+

Basic architecture details

+
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
    +
  • +

    public - All files which be exposed on the server directly

    +
  • +
  • +

    src

    +
    +
      +
    • +

      database folder - Folder with scripts to create/delete/seed the database

      +
    • +
    • +

      model - Folder with all data model

      +
    • +
    • +

      routes - Folder with all ExpressJS routers

      +
    • +
    • +

      utils - Folder with all utils like classes and functions

      +
    • +
    • +

      app.ts - File with ExpressJS declaration

      +
    • +
    • +

      config.ts - File with server configs

      +
    • +
    • +

      logic.ts - File with the business logic

      +
    • +
    +
    +
  • +
  • +

    test - Folder with all tests

    +
  • +
+
+
+
+
+

Layers

+
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+
+
+

Service layer

+
+
+

The services layer will be solved using REST services with ExpressJS

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

In order to be compatible with the other back-end implementations, we must follow the naming conventions proposed for Devon4j applications. We will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+

To treat these services separately, the following routers were created:

+
+
+
    +
  • +

    bookingmanagement: will answer all requests with the prefix /mythaistar/services/rest/bookingmanagement/v1

    +
  • +
  • +

    dishmanagement: will answer all requests with the prefix /mythaistar/services/rest/dishmanagement/v1

    +
  • +
  • +

    ordermanagement: will answer all requests with the prefix /mythaistar/services/rest/ordermanagement/v1

    +
  • +
+
+
+

These routers will define the behavior for each service and use the logical layer.

+
+
+

An example of service definition:

+
+
+
+
router.post('/booking/search', (req: types.CustomRequest, res: Response) => {
+    try {
+        // body content must be SearchCriteria
+        if (!types.isSearchCriteria(req.body)) {
+            throw {code: 400, message: 'No booking token given' };
+        }
+
+        // use the searchBooking method defined at business logic
+        business.searchBooking(req.body, (err: types.Error | null, bookingEntity: types.PaginatedList) => {
+            if (err) {
+                res.status(err.code || 500).json(err.message);
+            } else {
+                res.json(bookingEntity);
+            }
+        });
+    } catch (err) {
+        res.status(err.code || 500).json({ message: err.message });
+    }
+});
+
+
+
+
+
+

Logic layer and Data access layer

+
+
+

In the logic layer we will locate all the business logic of the application. It will be located in the file logic.ts. If in this layer we need to get access to the data, we make use of data access layer directly, in this case using devon4node with the DynamoDB adapter.

+
+
+

Example:

+
+
+
+
export async function cancelOrder(orderId: string, callback: (err: types.Error | null) => void) {
+    let order: dbtypes.Order;
+
+    try {
+        // Data access
+        order = await oasp4fn.table('Order', orderId).promise() as dbtypes.Order;
+
+        [...]
+    }
+}
+
+
+
+

We could define the data access layer separately, but devon4node allows us to do this in a simple and clear way. So, we decided to not separate the access layer to the logic business.

+
+
+
+
+

Security with Json Web Token

+
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
+

JWT basics

+
+
+

Refer to JWT basics for more information.

+
+
+
+
+

JWT implementation details

+
+
+

The Json Web Token pattern will be implemented based on the JSON web token library available on npm.

+
+
+
+
+

== Authentication

+
+
+

Based on the JSON web token approach, we will implement a class Authentication to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/\**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/\**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/\**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/\**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will create an instance of Authentication in the app file and then we will use the method auth for handle the requests to the /login endpoint.

+
+
+
+
app.post('/mythaistar/login', auth.auth);
+
+
+
+

To verify the presence of the Authorization token in the headers, we will register in the express the Authentication.registerAuthentication middleware. This middleware will check if the token is correct, if so, it will place the user in the request and continue to process it. If the token is not correct it will continue processing the request normally.

+
+
+
+
app.use(auth.registerAuthentication);
+
+
+
+

Finally, we have two default users created in the database:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: WAITER

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: CUSTOMER

    +
  • +
+
+
+
+
+

== Token set up

+
+
+

Following the official documentation the implementation details for the MyThaiStar’s JWT will be:

+
+
+
    +
  • +

    Secret: Used as part of the signature of the token, acting as a private key. It can be modified at config.ts file.

    +
  • +
  • +

    Token Prefix schema: Bearer. The token will look like Bearer <token>

    +
  • +
  • +

    Header: Authorization. The response header where the token will be included. Also, in the requests, when checking the token it will be expected to be in the same header.

    +
  • +
  • +

    The Authorization header should be part of the Access-Control-Expose-Headers header to allow clients access to the Authorization header content (the token);

    +
  • +
  • +

    Signature Algorithm: To encrypt the token we will use the default algorithm HS512.

    +
  • +
+
+
+
+
+

== Current User request

+
+
+

To provide to the client with the current user data our application should expose a service to return the user details. In this case the Authentication has a method called getCurrentUser which will return the user data. We only need register it at express.

+
+
+
+
app.get('/mythaistar/services/rest/security/v1/currentuser', auth.getCurrentUser);
+
+
+
+
+
+

== Authorization

+
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

To ensure this, the Authorization class has the securizedEndpoint method that guarantees access based on the role. This method can be used as middleware in secure services. As the role is included in the token, once validated we will have this information in the request and the middleware can guarantee access or return a 403 error.

+
+
+
+
app.use('/mythaistar/services/rest/ordermanagement/v1/order/filter', auth.securizedEndpoint('WAITER'));
+app.use('/mythaistar/services/rest/ordermanagement/v1/order/search', auth.securizedEndpoint('WAITER'));
+app.use('/mythaistar/services/rest/bookingmanagement/v1/booking/search', auth.securizedEndpoint('WAITER'));
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/nodejs-testing.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/nodejs-testing.html new file mode 100644 index 00000000..7062b8a5 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/nodejs-testing.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

NodeJS testing

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/production-line-ci.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/production-line-ci.html new file mode 100644 index 00000000..69f75923 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/production-line-ci.html @@ -0,0 +1,364 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

My Thai Star in Production Line

+
+ +
+
+
+

What is PL?

+
+
+

The Production Line Project is a set of server-side collaboration tools for Capgemini engagements. It has been developed for supporting project engagements with individual tools like issue tracking, continuous integration, continuous deployment, documentation, binary storage and much more!

+
+
+
+pl logo +
+
+
+
+
+

Introduction

+
+
+

Although the PL Project is a wide set of tools, only 3 are going to be mainly used for My Thai Star projects to build a Continuous Integration and Continuos Delivery environment. All three are available in the PL instance used for this project.

+
+
+
    +
  1. +

    Jenkins

    +
    +

    This is going to be the "main tool". Jenkins helps to automate the non-human part of the development with Continuos Integration and is going to host all Pipelines (and, obviously, execute them).

    +
    +
  2. +
  3. +

    Nexus

    +
    +

    Nexus manages software "artifacts" required for development. It is possible to both download dependencies from Nexus and publish artifacts as well. It allows to share resources within an organization.

    +
    +
  4. +
  5. +

    SonarQube

    +
    +

    It is a platform for continuous inspection of the code. It is going to be used for the Java back-end.

    +
    +
  6. +
+
+
+
+
+

Where can I find all My Thai Star Pipelines?

+
+
+

They are located under the MTS folder of the PL instance:

+
+
+
+mts pipelines +
+
+
+

Those Jenkins Pipelines will not have any code to execute. They’re just pointing to all Jenkinsfiles under the /jenkins folder of the repository. They can be found here.

+
+
+
+
+

CI in My Thai Star stack

+
+
+ +
+
+
+
+

How to configure everything out of the box

+
+
+

Production Line currently has a template to integrate My Thai Star. All information can be found at devonfw production line repository

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/sap-hana-guide.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/sap-hana-guide.html new file mode 100644 index 00000000..2fd1a7ed --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/sap-hana-guide.html @@ -0,0 +1,538 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

SAP HANA

+
+ +
+
+
+

Download/Install VMware/SAP HANA

+
+
+ +
+
+
+
+

Run SAP HANA Database Server

+
+
+
    +
  • +

    Once the .ova file has been opened inside VMware workstation. Click on the image and go to Edit Virtual Machine Settings. Set the memory allocation to 5GB. And Network Connection to NAT . NAT shows the IP for the virtual machine which will be used to establish JDBC connection

    +
  • +
  • +

    Click Play Virtual Machine. When first time the virtual machine runs it display following. Copy the IP address which will be used for JDBC connection

    +
  • +
  • +

    Type hxeadm, which is the username and hit Enter. Next it will ask for password which is HXEHana1. Once successfully logged in it will ask to set a new password. Choose a new password and remember.

    +
  • +
  • +

    You need to set Master password for SAP HANA database. Set it as you like and remember.

    +
  • +
  • +

    For “proceed with configuration” type y and hit Enter. HANA database has started in the background.

    +
  • +
  • +

    Try connecting with following command, replace the password with the master password

    +
  • +
+
+
+
+
hxehost:hxeadm>hdbsql
+   \c -d SYSTEMDB -n localhost:39013 -u SYSTEM -p <>
+
+
+
+
+
+

Setting up Database for MTSJ

+
+
+

Once you have install SAP HANA with VMware , you need to setup the DB.

+
+
+
+
+

Connect to DB

+
+
+
    +
  • +

    After you start VMware, login with hxeadm as login and the password. +At the prompt - hxehost:hxeadm>hdbsql +Please note the IP address, that need to be put in MTSJ java back-end

    +
  • +
  • +

    On prompt hdbsql> type below to connect to the DB

    +
  • +
+
+
+
+
\c -d SYSTEMDB -n localhost:39013 -u SYSTEM -p <password>
+
+
+
+
    +
  • +

    Type below query to see, if you have access to tenant database i.e. HXE

    +
  • +
+
+
+
+
SELECT DATABASE_NAME,  ACTIVE_STATUS FROM SYS.M_DATABASES ORDER BY 1;
+
+
+
+
+
+

Enabling the script server

+
+
+

Run the below for enabling the script server

+
+
+
+
ALTER DATABASE HXE ADD 'scriptserver'
+
+
+
+

To check if the script server is enable, execute below statement

+
+
+
+
SELECT SERVICE_NAME, PORT, ACTIVE_STATUS FROM SYS.M_SERVICES ORDER BY 1;
+
+
+
+

It should see the scriptserver in it.

+
+
+
+
+

Creating a User on HXE

+
+
+
    +
  • +

    Connect using the below

    +
  • +
+
+
+
+
\c -d hxe -n localhost:39013 -u system -p <password>
+
+
+
+
    +
  • +

    To create a user

    +
  • +
+
+
+
+
Create user hanauser1 password <password> no force_first_password_change
+
+
+
+
    +
  • +

    Grant below permission to the user

    +
  • +
+
+
+
+
GRANT AFLPM_CREATOR_ERASER_EXECUTE TO hanauser1
+GRANT AFL__SYS_AFL_AFLPAL_EXECUTE TO hanauser1 – here we have 2 underscore
+grant AFL__SYS_AFL_AFLPAL_EXECUTE_WITH_GRANT_OPTION to hanauser1
+grant AFLPM_CREATOR_ERASER_EXECUTE to hanauser
+GRANT DATA ADMIN TO hanauser1
+GRANT IMPORT TO hanauser1
+
+GRANT EXECUTE on _SYS_REPO.GRANT_ACTIVATED_ROLE TO hanauser1
+GRANT EXECUTE ON system.afl_wrapper_generator to hanauser1
+
+GRANT EXECUTE ON system.afl_wrapper_eraser to hanauser1
+GRANT MODELING TO hanauser1
+
+
+
+
    +
  • +

    Now connect to HXE tenant using below

    +
  • +
+
+
+
+
\c -d hxe -n localhost:39013 -u hanauser1 -p <password>
+
+
+
+
+
+

Setting up MTSJ Java back-end

+
+
+
    +
  • +

    Update application.properties file

    +
  • +
+
+
+
+
##update the below
+`spring.flyway.locations=classpath:db/migration,classpath:db/specific/hana`
+##Add the below
+spring.jpa.database=default
+spring.jpa.database-platform=org.hibernate.dialect.HANAColumnStoreDialect
+spring.datasource.driver-class-name=com.sap.db.jdbc.Driver
+spring.jpa.properties.hibernate.jdbc.lob.non_contextual_creation=true
+
+#Comment the below
+#spring.profiles.active=h2mem
+
+spring.profiles.active=hana
+
+
+
+
    +
  • +

    Update config/application.properties file

    +
  • +
+
+
+
+
##update the below
+spring.flyway.locations=classpath:db/migration,classpath:db/specific/hana
+spring.datasource.url=jdbc:sap://ip:port/?databaseName=hxe
+spring.datasource.username=username
+spring.datasource.password=password
+
+
+
+
+
+

Enabling prediction UseCase in MTSJ

+
+ +
+
+
+

Setting up MTSJ angular

+
+
+

update the following property in config file in my-thai-star\angular\src\app\core\config

+
+
+
+
enablePrediction: true,
+
+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/serverless-design.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/serverless-design.html new file mode 100644 index 00000000..ed2c17ac --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/serverless-design.html @@ -0,0 +1,727 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Serverless design (deprecated)

+
+ +
+
+
+

Introduction

+
+
+

The NodeJS back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    Serverless as serverless framework

    +
  • +
  • +

    devon4node as data access layer framework

    +
  • +
  • +

    DynamoDB as NoSQL Database

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
+

Basic architecture details

+
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
    +
  • +

    handlers - All function handlers following devon4node structure

    +
  • +
  • +

    src

    +
    +
      +
    • +

      model - Folder with all data model

      +
    • +
    • +

      utils - Folder with all utils like classes and functions

      +
    • +
    • +

      config.ts - File with server configs

      +
    • +
    • +

      logic.ts - File with the business logic

      +
    • +
    +
    +
  • +
  • +

    test - Folder with all tests

    +
  • +
+
+
+
+
+

Layers

+
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+
+
+

Service layer

+
+
+

The services layer will be solved using REST services with Serverless

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

In order to be compatible with the other back-end implementations, we must follow the naming conventions proposed for Devon4j applications. We will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+

To treat these Http services, we must define the handlers following the devon4node convention:

+
+
+
    +
  • +

    (handlers/Http/POST/dish-search-handler) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (handlers/Http/POST/booking-handler) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (handlers/Http/POST/order-handler) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (handlers/Http/POST/booking-search-handler) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (handlers/Http/POST/order-search-handler) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (handlers/Http/POST/order-filter-handler) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (handlers/Http/POST/login-handler) /mythaistar/login.

    +
  • +
  • +

    (handlers/Http/GET/current-user-handler) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

These handlers will define the behavior for each service and use the logical layer.

+
+
+

An example of handler definition:

+
+
+
+
oasp4fn.config({ path: '/mythaistar/services/rest/bookingmanagement/v1/booking/search' });
+export async function bookingSearch(event: HttpEvent, context: Context, callback: Function) {
+    try {
+        const search = <types.SearchCriteria>event.body;
+        const authToken = event.headers.Authorization;
+        // falta lo que viene siendo comprobar el token y eso
+
+        auth.decode(authToken, (err, decoded) => {
+            if (err || decoded.role !==  'WAITER') {
+                throw { code: 403, message: 'Forbidden'};
+            }
+
+            // body content must be SearchCriteria
+            if (!types.isSearchCriteria(search)) {
+                throw { code: 400, message: 'No booking token given' };
+            }
+
+            business.searchBooking(search, (err: types.Error | null, bookingEntity: types.PaginatedList) => {
+                if (err) {
+                    callback(new Error(`[${err.code || 500}] ${err.message}`));
+                } else {
+                    callback(null, bookingEntity);
+                }
+            });
+        });
+    } catch (err) {
+        callback(new Error(`[${err.code || 500}] ${err.message}`));
+    }
+}
+
+
+
+

The default integration for a handler is lambda. See devon4node documentation for more information about default values and how to change it.

+
+
+
+
+

==

+
+
+

If you change the integration to lambda-proxy, you must take care that in this case the data will not be parsed. You must do JSON.parse explicitly +== ==

+
+
+

After defining all the handlers, we must execute the fun command, which will generate the files serverless.yml and webpack.config.js.

+
+
+
+
+

Logic layer and Data access layer

+ +
+
+

Security with Json Web Token

+
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
+

JWT basics

+
+
+

Refer to JWT basics for more information.

+
+
+
+
+

JWT implementation details

+
+
+

The Json Web Token pattern will be implemented based on the JSON web token library available on npm.

+
+
+
+
+

== Authentication

+
+
+

Based on the JSON web token approach, we will implement two methods in order to verify and user + generate the token and decode the token + return the user data. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will create a handler called login and then we will use the method code to verify the user and generate the token.

+
+
+
+
app.post(oasp4fn.config({ integration: 'lambda-proxy', path: '/mythaistar/login' });
+export async function login(event: HttpEvent, context: Context, callback: Function) {
+.
+.
+.
+.
+}
+
+
+
+

We have two default users created in the database:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: WAITER

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: CUSTOMER

    +
  • +
+
+
+
+
+

== Token set up

+ +
+
+

== Current User request

+
+
+

To provide the client with the current user data our application should expose a service to return the user details. In order to do this, we must define a handler called current-user-handler. This handler must decode the Authorization token and return the user data.

+
+
+
+
oasp4fn.config({
+    path: '/mythaistar/services/rest/security/v1/currentuser',
+});
+export async function currentUser(event: HttpEvent, context: Context, callback: Function) {
+    let authToken = event.headers.Authorization;
+    try {
+        auth.decode(authToken, (err: any, decoded?: any) => {
+            if (err) {
+                callback(new Error(`[403] Forbidden`));
+            } else {
+                callback(null, decoded);
+            }
+        });
+    } catch (err) {
+        callback(new Error(`[${err.code || 500}] ${err.message}`));
+    }
+}
+
+
+
+
+
+

== Authorization

+
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

To ensure this, we must decode the Authorization token and check the result. As the role is included in the token, once validated we will have this information and can guarantee access or return a 403 error.

+
+
+
+
oasp4fn.config({ path: '/mythaistar/services/rest/bookingmanagement/v1/booking/search' });
+export async function bookingSearch(event: HttpEvent, context: Context, callback: Function) {
+    const authToken = event.headers.Authorization;
+    auth.decode(authToken, (err, decoded) => {
+        try {
+            if (err || decoded.role !==  'WAITER') {
+                throw { code: 403, message: 'Forbidden' };
+            }
+
+            [...]
+
+        } catch (err) {
+            callback(new Error(`[${err.code || 500}] ${err.message}`));
+        }
+    });
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/style-guide.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/style-guide.html new file mode 100644 index 00000000..3634ac73 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/style-guide.html @@ -0,0 +1,313 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/traefik-reverse-proxy.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/traefik-reverse-proxy.html new file mode 100644 index 00000000..d491cbc1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/traefik-reverse-proxy.html @@ -0,0 +1,330 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Reverse proxy strategy using Traefik

+
+
+

This implementation is the same as described at My Thai Star deployment wiki page. The only thing that changes is that Traefik is used instead of NGINX.

+
+
+

Using Traefik as reverse proxy, we can define the routes using labels in the docker containers instead of using a nginx.conf file. With this, it is not necessary to modify the reverse proxy container for each application. In addition, as Traefik is listening to the docker daemon, it can detect new containers and create routes for them without rebooting.

+
+
+

Example of labels:

+
+
+
+
labels:
+    - "traefik.http.routers.angular.rule=PathPrefix(`/`)"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.path=/health"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.interval=10s"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.scheme=http"
+
+
+
+
+
+

How to use it

+
+
+

If you want to build the images from code, change to My Thai Star root folder and execute:

+
+
+
+
$ docker-compose -f docker-compose.traefik.yml up -d --build
+
+
+
+

If you want to build the images from artifacts, change to Traefik folder (reverse-proxy/traefik) and execute:

+
+
+
+
$ docker-compose up -d --build
+
+
+
+

After a seconds, when the healthcheck detects that containers are running, your application will be available at http://localhost:8090. Also, the Traefik dashboard is available at http://localhost:8080.

+
+
+

If you want to check the behavior of the application when you scale up the back-end, you can execute:

+
+
+
+
$ docker-compose scale java=5
+
+
+
+

With this, the access to the java back-end will be using the load balancing method: Weighted Round Robin.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/twofactor.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/twofactor.html new file mode 100644 index 00000000..e8f3b44a --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/twofactor.html @@ -0,0 +1,381 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Two-Factor Authentication

+
+
+

Two-factor Authentication (2FA) provides an additional level of security to your account. Once enabled, in addition to supplying your username and password to login, you’ll be prompted for a code generated by your Google Authenticator. For example, a password manager on one of your devices.

+
+
+

By enabling 2FA, to log into your account an additional one-time password is required what requires access to your paired device. This massively increases the barrier for an attacker to break into your account.

+
+
+
+
+

Back-end mechanism

+
+
+

In the back-end, we utilize Spring Security for any authentication.

+
+
+

Following the arrows, one can see all processes regarding authentication. The main idea is to check all credentials depending on their 2FA status and then either grand access to the specific user or deny access. This picture illustrates a normal authentication with username and password.

+
+
+
+security cross component +
+
+
+

When dealing with 2FA, another provider and filter is handling the request from /verify

+
+
+
+security cross component twofactor +
+
+
+

Here you can observe which filter will be used. +JWT-Authentication-Filter does intercept any request, which enforces being authenticated via JWT

+
+
+
+filters png +
+
+
+ + + + + +
+ + +Whenever the secret or qr code gets transferred between two parties, one must enforce SSL/TLS or IPsec to be comply with RFC 6238. +
+
+
+
+
+

Activating Two-Factor Authentication

+
+
+

In the current state, TOTP +will be used for OTP generation. For this purpose we recommend the Google Authenticator or any TOTP generator out there.

+
+
+
    +
  • +

    Login with your account

    +
  • +
  • +

    Open the 2FA settings

    +
  • +
  • +

    Activate the 2FA Status

    +
  • +
  • +

    Initialize your device with either a QR-Code or a secret

    +
  • +
+
+
+
+
+

Frontend

+
+
+

These are the two main options, which you can obtain my toggling between QR-Code and secret.

+
+
+
+2FA qr code menu +
+
+
+
+2FA secret menu +
+
+
+

After an activation and logout. This prompt will ask you to enter the OTP given from your device.

+
+
+
+otp prompt +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/xamarin-design.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/xamarin-design.html new file mode 100644 index 00000000..77dd8ab8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/xamarin-design.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Xamarin design

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/my-thai-star.wiki/xamarin-testing.html b/docs/devonfw.github.io/1.0/my-thai-star.wiki/xamarin-testing.html new file mode 100644 index 00000000..ec65df97 --- /dev/null +++ b/docs/devonfw.github.io/1.0/my-thai-star.wiki/xamarin-testing.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Xamarin testing

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/Home.html b/docs/devonfw.github.io/1.0/production-line.wiki/Home.html new file mode 100644 index 00000000..40f5c024 --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/Home.html @@ -0,0 +1,368 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Production Line Templates

+
+
+

This repository contains a collection of templates that can be used inside a Production Line Jenkins to setup/configure and execute certain tasks.

+
+
+
+ + +
+

MrChecker

+
+
+ +
+
+
+
+

Samples

+
+ +
+
+
+

Troubleshooting

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/devon4j-mts.html b/docs/devonfw.github.io/1.0/production-line.wiki/devon4j-mts.html new file mode 100644 index 00000000..7ebb57b3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/devon4j-mts.html @@ -0,0 +1,711 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4j My-Thai-Star Sample Application Template for Production Line

+
+ +
+
+
+

Introduction

+
+
+

Please read all of the following sections carefully.

+
+
+
+
+

Overview

+
+
+

This template will configure your PL instance to have a 'ready to use' My-Thai-Star devonfw application. It is only an example. In order to start a new project, please use the other templates. This includes:

+
+
+
    +
  • +

    Cloning the official My-Thai-Star (https://github.com/devonfw/my-thai-star) repository into your GitLab, which allows you to do customizations on your own.

    +
  • +
  • +

    Adding a build job for the Angular front-end, including a SonarQube analysis and a delivery to Nexus as zip and docker image.

    +
  • +
  • +

    Adding a build job for the Java back-end, including a SonarQube analysis and a deployment to Nexus as zip and docker image.

    +
  • +
  • +

    Adding a deployment job for the Angular front-end

    +
  • +
  • +

    Adding a deployment job for the Java back-end

    +
  • +
  • +

    Adding a deployment job for the reverse proxy. Please see My Thai Star deployment documentation

    +
  • +
+
+
+

Especially the build and deployment jobs require several additional Jenkins plugins, which are not part of the PL by default. The Template will also take care of those installations.

+
+
+

All build and deployment jobs are taken from the official My-Thai-Star (https://github.com/devonfw/my-thai-star) repository. The created build and deployment jobs inside Jenkins will use the Jenkinsfiles from the cloned repo in Gitlab. These are currently the following Jenkinsfiles:

+
+
+
+
+

Jenkins Jobs

+
+
+
Jenkins Jobs
+

|== == == == == == == == == == +| Jenkins job name | Path to Jenkinsfile in repo | Description +| MyThaiStar_FRONTEND_BUILD | jenkins/angular/cicd/Jenkinsfile | Builds and tests the Angular frontend. Pushes artifacts to Nexus. +| MyThaiStar_SERVER_BUILD | jenkins/java/cicd/Jenkinsfile | Builds and tests the Java backend. Pushes artifacts to Nexus. +| MyThaiStar_FRONTEND_DEPLOY | jenkins/angular/deployment/Jenkinsfile | Frontend deployment job. Downloads the docker images from Nexus3 and starts a new container using that image. +| MyThaiStar_SERVER_DEPLOY | jenkins/java/deployment/Jenkinsfile | Backend deployment job. Downloads the docker images from Nexus3 and starts a new container using that image. +| MyThaiStar_REVERSE-PROXY_DEPLOY | jenkins/deployment/Jenkinsfile | Reverse proxy deployment job. Downloads the docker images from Nexus3 and starts a new container using that image. With this job you can also build the reverse proxy image. +|== == == == == == == == == ==

+
+
+
+
+

How to report Issues

+
+
+

This template is independent from PL and devonfw releases and is also not really connected to one of the projects. Therefore issues that occur during the template setup or execution should be tracked in the issue section of this GitHub project.

+
+
+
+
+

How to contribute

+
+
+

In case you see improvements we would love to see a Pull Request.

+
+
+
+
+

Prerequisities before running the template

+
+ +
+
+
+

Production Line Components

+
+
+

To use the template you need to make sure that your PL has the following components installed:

+
+
+
    +
  • +

    Jenkins (required to run the template and to execute the build/deployment Jobs)

    +
  • +
  • +

    SonarQube (required for a static code analysis)

    +
  • +
  • +

    GitLab (required as a repository)

    +
  • +
  • +

    Nexus3 (required to store the build artifacts)

    +
  • +
+
+
+
+
+

==

+
+
+

Additional components can be ordered from the ProductionLine service team. +== ==

+
+
+
+
+

Technical User Setup

+
+
+

In order to configure the services, we need technical users for the following components:

+
+
+
    +
  • +

    Gitlab

    +
  • +
  • +

    Nexus3

    +
  • +
  • +

    SonarQube

    +
  • +
+
+
+

The following sections describe how to configure the components to enable technical users and tokens.

+
+
+
+
+

== Manual configuration

+
+
+

In order to configure the Production Line components manually you can follow this guide

+
+
+
+
+

== Automatic configuration

+
+
+

In order to configure the Production Line components automatically you can follow this guide

+
+
+

There is one thing that initialize-template can not do automatically: the gitlab token creation.

+
+
+

The creation of the GitLab Group and Project will require a private GitLab token which has to be created manually. The token can be obtained like this:

+
+
+
    +
  1. +

    Go to your Profile in Gitlab

    +
  2. +
+
+
+
+500 +
+
+
+
    +
  1. +

    Next click on the pen icon

    +
  2. +
+
+
+
+500 +
+
+
+
    +
  1. +

    On the left menu choose Access Tokens and put token name and check fields like below

    +
  2. +
+
+
+
+600 +
+
+
+
    +
  1. +

    Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

    +
  2. +
+
+
+
+600 +
+
+
+
+
+

==

+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps: +== ==

+
+
+
    +
  1. +

    Enter the Admin control panel

    +
  2. +
  3. +

    Select 'Users'

    +
  4. +
  5. +

    Select the user(s) in question and click 'Edit'

    +
  6. +
  7. +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  8. +
+
+
+
+
+

Build/Deployment Requirements

+
+
+

The My Thai Star CICD pipelines will create a docker image and then the deployment pipelines will use it in order to deploy the application. As Production Line do not include a docker daemon, you need an additional server to do it. Those server needs:

+
+
+ +
+
+
+
+

How to run it

+
+ +
+
+
+

==

+
+
+

If Jenkins needs to install plugins, a restart will be performed. +So please make sure, that nothing important is running. +== ==

+
+
+
+
+

==

+
+
+

We have job-parameters inside the template Jenkinsfile that will only be active if Jenkins has run the job at least once! +== ==

+
+
+
+
+

Setup template job in Jenkins

+
+
+

The guide on how to add a template to your Jenkins can be found in the root directory of the template repository: https://github.com/devonfw/production-line.git

+
+
+
+
+

Execute the Jenkins job in your Jenkins

+
+
+
    +
  • +

    Go to the Jenkins job.

    +
  • +
  • +

    Execute job.

    +
  • +
  • +

    It will try to configure and setup the PL components such as Jenkins/Gitlab and Nexus.

    +
  • +
+
+
+
+
+

==

+
+
+

If a restart was needed, you need to trigger the job again! +== ==

+
+
+
    +
  • +

    The job should now show the required parameters, you only need to change the GITLAB PRIVATE TOKEN that you should have generated in the prerequisite section

    +
  • +
+
+
+
+600 +
+
+
+

When everything is "green" the template is done and you can have a look in the created "MTS" folder in Jenkins.

+
+
+
+
+

==

+
+
+

It will take a few minutes to clone the official MTS repository to the internal Gitlab. So you need to wait before executing the build jobs at the first time. +== ==

+
+
+
+
+

== Build Jobs

+
+
+

You can now execute the build for the frontend and also the backend. They do not require any parameters to run. The expected result is, that both jobs can run without any errors. They will build, test and deploy the artifacts to Nexus3.

+
+
+
+
+

== Deployment Jobs

+
+
+

All deployment jobs have several parameters configured in their Jenkinsfile. Unfortunately, Jenkins does not pick them up immediately, so you need to execute the job once, by pressing the "Build now" button. +The run should fail quite fast and once you refresh the page, the "Build now" button should have changed to "Build with Parameters". If you now click on the button you should see the parameters below:

+
+
+
+Jenkins Deployment Parameters +
+
+
+

You need to set the following parameters in order to get it running:

+
+
+
Required Parameters
+

|== == == == == == == == == == +| Parameter | Description +| registryUrl | The docker registry URL where image is stored. +| registryCredentialsId | The nexus credentials to access to the docker registry. +| VERSION | The version of the image that was built in the build jobs. For example "1.12.3-SNAPSHOT". +| dockerNetwork | The docker network where the container will be deployed. +|== == == == == == == == == ==

+
+
+

Also, the reverse proxy deployment has two more parameters:

+
+
+
Reverse Proxy extra parameters
+

|== == == == == == == == == == +| Parameter | Description +| buildReverseProxy | If true, it will build a new reverse proxy docker image and then deploy that image. +| port | The port where the application will be listening. It’s a host port, not a container port. +|== == == == == == == == == ==

+
+
+
+
+

==

+
+
+

You can deploy multiple versions of My Thai Star in the same machine by changing the docker network in all deployments and the port in the reverse proxy deployment. +== ==

+
+
+
+
+

==

+
+
+

You must choose the same docker network for all deployments +== ==

+
+
+
+
+

==

+
+
+

You need to deploy the angular and java applications before the reverse proxy. Also, the first you need to check the buildReverseProxy parameter in order to create the reverse proxy image and then deploy the container. +== ==

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/devon4j-pl.html b/docs/devonfw.github.io/1.0/production-line.wiki/devon4j-pl.html new file mode 100644 index 00000000..c7d2b0de --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/devon4j-pl.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+400 +
+
+
+
+400 +
+
+
+

devon4j Template for Production Line

+
+ +
+
+
+

Overview

+
+
+

This template will configure your PL instance to have a 'ready to use' devon4j template. It can be used as a starting point for your Java projects.
+This includes CICD files for a devonfw technology stack with configuration for:

+
+
+
    +
  • +

    docker or openshift deployment

    +
  • +
  • +

    pushing artifacts to nexus3

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins devon4j job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+ +
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+800 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and enable 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+

In order to add the template, you can follow the guide.

+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job with parameters:

    +
    +
      +
    • +

      PROJECT_NAME: The project name.

      +
    • +
    • +

      PROJECT_SUFFIX: The project name suffix. As your project can have multiple assets (backend, frontend, middleware…​), you can define a suffix in order to identify each one with a different name

      +
    • +
    • +

      DB_TYPE: The type of the database. Possible values: h2|postgresql|mysql|mariadb|oracle|hana|db2

      +
    • +
    • +

      GROUP_ID: The group id of the project.

      +
    • +
    • +

      GITLAB_USER_PRIVATE_TOKEN: Private Token of a Production Line Gitlab User that can be used to create repositories. Created as prerequisite, you only need to add it as credential with GitLab API token Kind.

      +
    • +
    • +

      GITLAB_CREATE_GROUP_NAME: Name of the GitLab group. The repository will be create inside this group.

      +
    • +
    • +

      GITLAB_CREATE_PROJECT_DESCRIPTION: Description of the repository.

      +
    • +
    • +

      DEPLOY: Choose the environment where you want to deploy. The deployment could be none, docker or openshift. If docker or openshift were selected, extra parameters will be required in their dedicated steps:

      +
      +
        +
      • +

        Configuring DOCKER:

        +
        +
          +
        • +

          DOCKER_URL: The remote docker daemon URL

          +
        • +
        • +

          DOCKER_CERT: Credentials to access docker daemon. If the daemon is not secure, you can leave this empty.

          +
        • +
        +
        +
      • +
      • +

        Configuring Openshift:

        +
        +
          +
        • +

          OC_NAME: Openshift cluster name. It was defined in the Openshift Configuration template

          +
        • +
        • +

          DOCKER_REGISTRY_CREDENTIALS: Nexus docker registry user credentials. It was created in the initialize instance pipeline. The default username is nexus-api, the default password is the same as your service account.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

After executing this template, you will have:

+
+
+
    +
  • +

    A new GitLab repository.

    +
    +
      +
    • +

      The repository group is the value passed in the GITLAB_CREATE_GROUP_NAME parameter.

      +
    • +
    • +

      The repository name is PROJECT_NAME-PROJECT_SUFFIX

      +
    • +
    • +

      The repository contains a clean devon4j project.

      +
    • +
    • +

      The repository contains a Jenkinsfile.

      +
    • +
    • +

      The repository has already setted the jenkins webhook.

      +
    • +
    • +

      The repository protects the branches master and release/* to only maintainers to push. Develop is the default branch.

      +
    • +
    +
    +
  • +
  • +

    A new multibranch pipeline in jenkins inside the folder PROJECT_NAME with the name PROJECT_NAME-PROJECT_SUFFIX. As the webhook is already configured, it should be executed on every push to GitLab repository.

    +
  • +
  • +

    If you choose docker for deployment, your Jenkinsfile should contain two extra stages in order to build and deploy the docker image. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
  • +

    If you choose OpenShift for deployment, three new applications should be created in your OpenShift. Those applications represent three environments of your application: develop, uat and stage. Also, your Jenkinsfile should contain three extra stages in order to build and deploy the docker image and check that the pod is running without errors. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/devon4net.html b/docs/devonfw.github.io/1.0/production-line.wiki/devon4net.html new file mode 100644 index 00000000..711afbc4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/devon4net.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+400 +
+
+
+
+400 +
+
+
+

devon4net Template for Production Line

+
+ +
+
+
+

Overview

+
+
+

This template will configure your PL instance to have a 'ready to use' devon4net template. It can be used as a starting point for your .NET projects.
+This includes CICD files for a devonfw technology stack with configuration for:

+
+
+
    +
  • +

    ProductionLine instance

    +
  • +
  • +

    docker or openshift deployment

    +
  • +
  • +

    pushing artifacts to nexus3

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins Node job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+ +
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+600 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+

In order to add the template, you can follow the guide.

+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job with parameters:

    +
    +
      +
    • +

      PROJECT_NAME: The project name.

      +
    • +
    • +

      PROJECT_SUFFIX: The project name suffix. As your project can have multiple assets (backend, frontend, middleware…​), you can define a suffix in order to identify each one with a different name

      +
    • +
    • +

      GROUP_ID: The group id of the project.

      +
    • +
    • +

      GITLAB_USER_PRIVATE_TOKEN: Private Token of a Production Line Gitlab User that can be used to create repositories. Created as prerequisite, you only need to add it as credential with GitLab API token Kind.

      +
    • +
    • +

      GITLAB_CREATE_GROUP_NAME: Name of the GitLab group. The repository will be create inside this group.

      +
    • +
    • +

      GITLAB_CREATE_PROJECT_DESCRIPTION: Description of the repository.

      +
    • +
    • +

      DEPLOY: Choose the environment where you want to deploy. The deployment could be none, docker or openshift. If docker or openshift were selected, extra parameters will be required in their dedicated steps:

      +
      +
        +
      • +

        Configuring DOCKER:

        +
        +
          +
        • +

          DOCKER_URL: The remote docker daemon URL

          +
        • +
        • +

          DOCKER_CERT: Credentials to access docker daemon. If the daemon is not secure, you can leave this empty.

          +
        • +
        +
        +
      • +
      • +

        Configuring Openshift:

        +
        +
          +
        • +

          OC_NAME: Openshift cluster name. It was defined in the Openshift Configuration template

          +
        • +
        • +

          DOCKER_REGISTRY_CREDENTIALS: Nexus docker registry user credentials. It was created in the initialize instance pipeline. The default username is nexus-api, the default password is the same as your service account.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

After executing this template, you will have:

+
+
+
    +
  • +

    A new GitLab repository.

    +
    +
      +
    • +

      The repository group is the value passed in the GITLAB_CREATE_GROUP_NAME parameter.

      +
    • +
    • +

      The repository name is PROJECT_NAME-PROJECT_SUFFIX

      +
    • +
    • +

      The repository contains a clean devon4net project.

      +
    • +
    • +

      The repository contains a Jenkinsfile.

      +
    • +
    • +

      The repository has already configured the jenkins webhook.

      +
    • +
    • +

      The repository protects the branches master and release/* to only maintainers to push. Develop is the default branch.

      +
    • +
    +
    +
  • +
  • +

    A new multibranch pipeline in jenkins inside the folder PROJECT_NAME with the name PROJECT_NAME-PROJECT_SUFFIX. As the webhook is already configured, it should be executed on every push to GitLab repository.

    +
  • +
  • +

    If you choose docker for deployment, your Jenkinsfile should contain two extra stages in order to build and deploy the docker image. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
  • +

    If you choose OpenShift for deployment, three new applications should be created in your OpenShift. Those applications represent three environments of your application: develop, uat and stage. Also, your Jenkinsfile should contain three extra stages in order to build and deploy the docker image and check that the pod is running without errors. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/devon4ng-pl.html b/docs/devonfw.github.io/1.0/production-line.wiki/devon4ng-pl.html new file mode 100644 index 00000000..a8b0afec --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/devon4ng-pl.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+400 +
+
+
+
+400 +
+
+
+

devon4ng Template for Production Line

+
+ +
+
+
+

Overview

+
+
+

This template will configure your PL instance to have a 'ready to use' devon4ng template. It can be used as a starting point for your Angular projects.
+This includes CICD files for a devonfw technology stack with configuration for:

+
+
+
    +
  • +

    ProductionLine instance

    +
  • +
  • +

    docker or openshift deployment

    +
  • +
  • +

    pushing artifacts to nexus3

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins Angular job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+ +
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+600 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+

In order to add the template, you can follow the guide.

+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job with parameters:

    +
    +
      +
    • +

      PROJECT_NAME: The project name.

      +
    • +
    • +

      PROJECT_SUFFIX: The project name suffix. As your project can have multiple assets (backend, frontend, middleware…​), you can define a suffix in order to identify each one with a different name

      +
    • +
    • +

      GROUP_ID: The group id of the project.

      +
    • +
    • +

      GITLAB_USER_PRIVATE_TOKEN: Private Token of a Production Line Gitlab User that can be used to create repositories. Created as prerequisite, you only need to add it as credential with GitLab API token Kind.

      +
    • +
    • +

      GITLAB_CREATE_GROUP_NAME: Name of the GitLab group. The repository will be create inside this group.

      +
    • +
    • +

      GITLAB_CREATE_PROJECT_DESCRIPTION: Description of the repository.

      +
    • +
    • +

      DEPLOY: Choose the environment where you want to deploy. The deployment could be none, docker or openshift. If docker or openshift were selected, extra parameters will be required in their dedicated steps:

      +
      +
        +
      • +

        Configuring DOCKER:

        +
        +
          +
        • +

          DOCKER_URL: The remote docker daemon URL

          +
        • +
        • +

          DOCKER_CERT: Credentials to access docker daemon. If the daemon is not secure, you can leave this empty.

          +
        • +
        +
        +
      • +
      • +

        Configuring Openshift:

        +
        +
          +
        • +

          OC_NAME: Openshift cluster name. It was defined in the Openshift Configuration template

          +
        • +
        • +

          DOCKER_REGISTRY_CREDENTIALS: Nexus docker registry user credentials. It was created in the initialize instance pipeline. The default username is nexus-api, the default password is the same as your service account.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

After executing this template, you will have:

+
+
+
    +
  • +

    A new GitLab repository.

    +
    +
      +
    • +

      The repository group is the value passed in the GITLAB_CREATE_GROUP_NAME parameter.

      +
    • +
    • +

      The repository name is PROJECT_NAME-PROJECT_SUFFIX

      +
    • +
    • +

      The repository contains a clean devon4ng project.

      +
    • +
    • +

      The repository contains a Jenkinsfile.

      +
    • +
    • +

      The repository has already configured the jenkins webhook.

      +
    • +
    • +

      The repository protects the branches master and release/* to only maintainers to push. Develop is the default branch.

      +
    • +
    +
    +
  • +
  • +

    A new multibranch pipeline in jenkins inside the folder PROJECT_NAME with the name PROJECT_NAME-PROJECT_SUFFIX. As the webhook is already configured, it should be executed on every push to GitLab repository.

    +
  • +
  • +

    If you choose docker for deployment, your Jenkinsfile should contain two extra stages in order to build and deploy the docker image. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
  • +

    If you choose OpenShift for deployment, three new applications should be created in your OpenShift. Those applications represent three environments of your application: develop, uat and stage. Also, your Jenkinsfile should contain three extra stages in order to build and deploy the docker image and check that the pod is running without errors. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/devon4node-pl.html b/docs/devonfw.github.io/1.0/production-line.wiki/devon4node-pl.html new file mode 100644 index 00000000..2d4efa68 --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/devon4node-pl.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+400 +
+
+
+
+400 +
+
+
+

devon4node Template for Production Line

+
+ +
+
+
+

Overview

+
+
+

This template will configure your PL instance to have a 'ready to use' devon4node template. It can be used as a starting point for your Node projects.
+This includes CICD files for a devonfw technology stack with configuration for:

+
+
+
    +
  • +

    ProductionLine instance

    +
  • +
  • +

    docker or openshift deployment

    +
  • +
  • +

    pushing artifacts to nexus3

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins Node job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+ +
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+600 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+

In order to add the template, you can follow the guide.

+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job with parameters:

    +
    +
      +
    • +

      PROJECT_NAME: The project name.

      +
    • +
    • +

      PROJECT_SUFFIX: The project name suffix. As your project can have multiple assets (backend, frontend, middleware…​), you can define a suffix in order to identify each one with a different name

      +
    • +
    • +

      GROUP_ID: The group id of the project.

      +
    • +
    • +

      GITLAB_USER_PRIVATE_TOKEN: Private Token of a Production Line Gitlab User that can be used to create repositories. Created as prerequisite, you only need to add it as credential with GitLab API token Kind.

      +
    • +
    • +

      GITLAB_CREATE_GROUP_NAME: Name of the GitLab group. The repository will be create inside this group.

      +
    • +
    • +

      GITLAB_CREATE_PROJECT_DESCRIPTION: Description of the repository.

      +
    • +
    • +

      DEPLOY: Choose the environment where you want to deploy. The deployment could be none, docker or openshift. If docker or openshift were selected, extra parameters will be required in their dedicated steps:

      +
      +
        +
      • +

        Configuring DOCKER:

        +
        +
          +
        • +

          DOCKER_URL: The remote docker daemon URL

          +
        • +
        • +

          DOCKER_CERT: Credentials to access docker daemon. If the daemon is not secure, you can leave this empty.

          +
        • +
        +
        +
      • +
      • +

        Configuring Openshift:

        +
        +
          +
        • +

          OC_NAME: Openshift cluster name. It was defined in the Openshift Configuration template

          +
        • +
        • +

          DOCKER_REGISTRY_CREDENTIALS: Nexus docker registry user credentials. It was created in the initialize instance pipeline. The default username is nexus-api, the default password is the same as your service account.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

After executing this template, you will have:

+
+
+
    +
  • +

    A new GitLab repository.

    +
    +
      +
    • +

      The repository group is the value passed in the GITLAB_CREATE_GROUP_NAME parameter.

      +
    • +
    • +

      The repository name is PROJECT_NAME-PROJECT_SUFFIX

      +
    • +
    • +

      The repository contains a clean devon4node project.

      +
    • +
    • +

      The repository contains a Jenkinsfile.

      +
    • +
    • +

      The repository has already configured the jenkins webhook.

      +
    • +
    • +

      The repository protects the branches master and release/* to only maintainers to push. Develop is the default branch.

      +
    • +
    +
    +
  • +
  • +

    A new multibranch pipeline in jenkins inside the folder PROJECT_NAME with the name PROJECT_NAME-PROJECT_SUFFIX. As the webhook is already configured, it should be executed on every push to GitLab repository.

    +
  • +
  • +

    If you choose docker for deployment, your Jenkinsfile should contain two extra stages in order to build and deploy the docker image. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
  • +

    If you choose OpenShift for deployment, three new applications should be created in your OpenShift. Those applications represent three environments of your application: develop, uat and stage. Also, your Jenkinsfile should contain three extra stages in order to build and deploy the docker image and check that the pod is running without errors. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/docker-configuration.html b/docs/devonfw.github.io/1.0/production-line.wiki/docker-configuration.html new file mode 100644 index 00000000..b24672f8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/docker-configuration.html @@ -0,0 +1,453 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Docker Configuration

+
+ +
+
+
+

Introduction

+
+
+

Docker is the most popular container technology. It allows you to build your application in an image and then deploy it into a container.

+
+
+
+
+

Overview

+
+
+

This template allow you to configure Jenkins in order to work with docker.

+
+
+

It will:

+
+
+
    +
  • +

    Add docker client as custom tool.

    +
  • +
  • +

    Configure docker to work with an external docker dameon.

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

In order to execute this template, you need the following plugins installed in your Jenkins:

+
+ +
+ + + + + +
+ + +The initialize instance template will install all plugins if you select 'Docker' or 'Docker+Openshift' in the installDeploymentPlugins parameter +
+
+
+
+
+

Template

+
+
+

This template will be automatically created in your jenkins after executing the Initialize_Instance template inside the UTILS folder with the name Docker_Configuration.

+
+
+

For manual creation see: How to add a Template

+
+
+ + + + + +
+ + +This template needs the devonfw Production Line Shared Lib +
+
+
+
+
+

Parameters

+
+
+

The only parameter required is remote docker daemon URL. Example: tcp://127.0.0.1:2367

+
+
+ + + + + +
+ + +You need to expose the docker daemon manually in your machine. Here you can find how to do it +
+
+
+ + + + + +
+ + +This configuration requires that the docker daemon has no security. It’s prepared for development environments, for production environments please add security to your docker daemon. +
+
+
+
+
+

Execution

+
+
+
    +
  1. +

    Press the Build with Parameters button

    +
  2. +
  3. +

    Insert remote docker daemon URL.

    +
  4. +
  5. +

    Press the Build button.

    +
  6. +
  7. +

    Wait until the pipeline ends.

    +
  8. +
+
+
+
+docker configuration +
+
+
+
+docker configuration2 +
+
+
+

Then, you can see that the docker is configured and the remote docker daemon environment variable is set:

+
+
+
+docker env var +
+
+
+
+docker custom tool +
+
+
+

The environment variable is configured globally, if you want to use another remote docker daemon for a specific build, you can override the DOCKER_HOST environment variable in your job.

+
+
+

If the DOCKER_HOST is already configured globally, when you execute again this template the value will not be changed. You need to change the value manually at: Jenkins → Manage Jenkins → Configure System → Global properties

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/from-existing-devonfw.html b/docs/devonfw.github.io/1.0/production-line.wiki/from-existing-devonfw.html new file mode 100644 index 00000000..15ed8523 --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/from-existing-devonfw.html @@ -0,0 +1,484 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+400 +
+
+
+
+400 +
+
+
+

From existing devonfw Template for Production Line

+
+ +
+
+
+

Overview

+
+
+

From existing devonfw template is very similar to devon4j, devon4ng, devon4net and devon4node templates. The main difference is from existing devonfw template will no create a new devonfw project, it takes an existing project from GitLab and then add/create everything in order to apply a CICD strategy to your project.

+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins Node job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+
    +
  • +

    Jenkins

    +
    + +
    +
  • +
  • +

    Gitlab

    +
    +
      +
    • +

      Create a project and upload your current code. In order to start a new project in your local machine, you can use the devonfw-ide. The project must be a devon4j, devon4ng, devon4net or devon4node project.

      +
    • +
    • +

      Generate User Private Token
      +Go to your Profile in Gitlab

      +
    • +
    +
    +
  • +
+
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+600 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+

In order to add the template, you can follow the guide.

+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job with parameters:

    +
    +
      +
    • +

      REPOSITORY_URL: The internal repository URL. Without protocol. Example: gitlab-core:80/gitlab/mygroup/myproject-frontend.

      +
    • +
    • +

      GIT_BRANCH: The branch where you want to apply the CICD changes.

      +
    • +
    • +

      MERGE_STRATEGY: Choose the merge strategy for cicdgen. For more information see the CICDGEN merge documentation page

      +
    • +
    • +

      GITLAB_USER_PRIVATE_TOKEN: Private Token of a Production Line Gitlab User that can be used to create/update repositories. The token proprietary user must have admin rights in the repository. Created as prerequisite, you only need to add it as credential with GitLab API token Kind.

      +
    • +
    • +

      DEPLOY: Choose the environment where you want to deploy. The deployment could be none, docker or openshift. If docker or openshift were selected, extra parameters will be required in their dedicated steps:

      +
      +
        +
      • +

        Configuring DOCKER:

        +
        +
          +
        • +

          DOCKER_URL: The remote docker daemon URL

          +
        • +
        • +

          DOCKER_CERT: Credentials to access docker daemon. If the daemon is not secure, you can leave this empty.

          +
        • +
        +
        +
      • +
      • +

        Configuring Openshift:

        +
        +
          +
        • +

          OC_NAME: Openshift cluster name. It was defined in the Openshift Configuration template

          +
        • +
        • +

          DOCKER_REGISTRY_CREDENTIALS: Nexus docker registry user credentials. It was created in the initialize instance pipeline. The default username is nexus-api, the default password is the same as your service account.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

After executing this template, you will have:

+
+
+
    +
  • +

    Your GitLab project updated.

    +
    +
      +
    • +

      Added a Jenkinsfile with all CICD stages.

      +
    • +
    • +

      The repository is updated in order to have the jenkins webhook.

      +
    • +
    +
    +
  • +
  • +

    A new multibranch pipeline in jenkins inside the folder PROJECT_NAME with the name PROJECT_NAME-PROJECT_SUFFIX. As the webhook is already configured, it should be executed on every push to GitLab repository.

    +
  • +
  • +

    If you choose docker for deployment, your Jenkinsfile should contain two extra stages in order to build and deploy the docker image. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
  • +

    If you choose OpenShift for deployment, three new applications should be created in your OpenShift. Those applications represent three environments of your application: develop, uat and stage. Also, your Jenkinsfile should contain three extra stages in order to build and deploy the docker image and check that the pod is running without errors. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/how-to-add-a-template.html b/docs/devonfw.github.io/1.0/production-line.wiki/how-to-add-a-template.html new file mode 100644 index 00000000..65dfe1c1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/how-to-add-a-template.html @@ -0,0 +1,325 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

How to add a Template to your PL instance

+
+
+
    +
  • +

    Go to Jenkins.

    +
  • +
  • +

    On the upper left click on "New Element" to create a new Jenkins job.

    +
  • +
  • +

    Chose a name for the job such as "MTS-template-seed-job". The job type has to be "Pipeline". Click on ok.

    +
  • +
+
+
+
+newjenkinsjob +
+
+
+
    +
  • +

    Scroll down to the bottom of the job creation page where you will find the "Pipeline" section.

    +
    +
      +
    • +

      Switch to "Pipeline script from SCM".

      +
    • +
    • +

      Set "SCM" to "Git".

      +
    • +
    • +

      Set "Repository URL" to: https://github.com/devonfw/production-line.git

      +
    • +
    • +

      Credentials can be left empty, because the repository is public.

      +
    • +
    • +

      Set "Script Path" to the template that you want to use e.g. "devon4j-mts/Jenkinsfile".

      +
    • +
    +
    +
  • +
+
+
+
+pipelinesettings +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/initialize-instance-manually.html b/docs/devonfw.github.io/1.0/production-line.wiki/initialize-instance-manually.html new file mode 100644 index 00000000..d481032d --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/initialize-instance-manually.html @@ -0,0 +1,606 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Initialize Instance Template for Production Line

+
+ +
+
+
+

Technical User Setup

+
+
+

In order to configure the services, we need technical users for the following components:

+
+
+
    +
  • +

    Gitlab

    +
  • +
  • +

    Nexus3

    +
  • +
  • +

    SonarQube

    +
  • +
+
+
+

The following sections describe how to configure the components to enable technical users and tokens.

+
+
+
+
+

Technical Gitlab User and settings

+
+
+

The creation of the GitLab Group and Project will require a private GitLab token which has to be created manually. The token can be obtained like this:

+
+
+
    +
  1. +

    Go to your Profile in Gitlab

    +
  2. +
+
+
+
+500 +
+
+
+
    +
  1. +

    Next click on the pen icon

    +
  2. +
+
+
+
+500 +
+
+
+
    +
  1. +

    On the left menu choose Access Tokens and put token name and check fields like below

    +
  2. +
+
+
+
+600 +
+
+
+
    +
  1. +

    Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

    +
  2. +
+
+
+
+600 +
+
+
+
+
+

==

+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps: +== ==

+
+
+
    +
  1. +

    Enter the Admin control panel

    +
  2. +
  3. +

    Select 'Users'

    +
  4. +
  5. +

    Select the user(s) in question and click 'Edit'

    +
  6. +
  7. +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  8. +
+
+
+
+
+

Technical Nexus3 user and settings

+
+
+

Nexus3 is used to store build artifacts such as the frontend and the backend. In the future it might also be used to store docker images of MTS.

+
+
+
+
+

== Create the technical Nexus User

+
+
+
    +
  1. +

    The nexus3-api user should be created in section Administration

    +
  2. +
+
+
+
+600 +
+
+
+
    +
  1. +

    New user should have added roles: Admins, nx-admins

    +
  2. +
+
+
+
+600 +
+
+
+
+
+

== Add it as credential in Jenkins

+
+
+

Credentials 'nexus-api' user should be added to Jenkins +Jenkins → Credentials → System → Global credentials (unrestricted) → Add Credentials

+
+
+
+800 +
+
+
+
+
+

== Add the user to maven global settings in Jenkins:

+
+
+
    +
  1. +

    Jenkins → Settings → Managed Files → Edit Global Maven Settings XML

    +
  2. +
+
+
+
+600 +
+
+
+
    +
  1. +

    Add the credential to the settings xml, use the ID "pl-nexus"

    +
  2. +
+
+
+
+600 +
+
+
+
+
+

Jenkins Preparation

+
+
+
    +
  • +

    Install required plugins:
    +HTTP Request Plugin
    +Allure Jenkins Plugin

    +
  • +
  • +

    In Jenkins Global Tool Configuration configure Allure Commandline and Maven like

    +
  • +
+
+
+
+500 +
+
+
+
+
+

== Sonarqube Server configuration in Jenkins

+
+
+

SonarQube must be configured in Jenkins, so that we can easily use the SonarQube server in our builds.

+
+
+

Go to Jenkins → Settings → Configuration → SonarQube Servers

+
+
+

Add the following data

+
+
+
+600 +
+
+
+
+
+

Technical SonarQube user and settings

+
+ +
+
+
+

== User Token for SonarQube

+
+
+
    +
  • +

    Go to SonarQube.

    +
  • +
  • +

    Go to your account.

    +
  • +
+
+
+
+600 +
+
+
+
    +
  • +

    Go to Security tab.

    +
  • +
  • +

    Generate the token.

    +
  • +
+
+
+
+
+

== Install SonarQube plugins from Marketplace

+
+
+

In order to analyze devonfw projects in SonarQube properly, you need to install manually some plugins. To do that you only need to open your SonarQube website and go to Administration → Marketplace. Then, you need to install the following plugins:

+
+
+
    +
  • +

    Checkstyle

    +
  • +
  • +

    Cobertura

    +
  • +
+
+
+
+
+

== SonarQube Webhook to inform Jenkins

+
+
+

A part of the Build Job will ask SonarQube if the quality gate has been passed. For this step a so called "webhook" has to be configured in SonarQube. To do so,

+
+
+
    +
  1. +

    Go to SonarQube

    +
  2. +
  3. +

    Select 'Administration'

    +
  4. +
  5. +

    Select 'Configuration', 'General Settings' and select 'Webhooks' in the left menu

    +
  6. +
  7. +

    Add the following webhook

    +
  8. +
+
+
+
+SonarqubeWebhook +
+
+
+
    +
  1. +

    Press 'Save'

    +
  2. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/initialize-instance.html b/docs/devonfw.github.io/1.0/production-line.wiki/initialize-instance.html new file mode 100644 index 00000000..d090081d --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/initialize-instance.html @@ -0,0 +1,708 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Initialize Instance Template for Production Line

+
+ +
+
+
+

Introduction

+
+
+

Production Line Templates allows you to create/configure certain task. In order to work properly, Production Line Templates needs some previous configurations. You can do it manually or executing the Initialize Instance Template.

+
+
+
+
+

Prerequisites

+
+
+

In order to be able to start this template, you need:

+
+
+ +
+
+

Production Line provides by default the Shared Lib and the plugins, so no actions are required. The only thing that you need to do manually is the creation of the service account.

+
+
+

In order to create the service account you need:

+
+
+
    +
  1. +

    Open the LAM

    +
  2. +
  3. +

    Press the New User button

    +
    +
    +create account +
    +
    +
  4. +
  5. +

    Enter the required parameters

    +
    +
    +create account2 +
    +
    +
  6. +
  7. +

    Change to Unix tab and enter the required parameters

    +
    +
    +create account3 +
    +
    +
    +

    The user name will be used later in order to login. As this user will do some configuration changes, its primary group must be admins.

    +
    +
  8. +
  9. +

    Set a password for the user.

    +
    +
    +create account4 +
    +
    +
  10. +
  11. +

    Press the Save button

    +
    +
    +create account5 +
    +
    +
  12. +
+
+
+
+
+

Template

+
+
+

In order to execute this template, you need to add it into Jenkins manually. In order to do that, you can follow this guide

+
+
+
+
+

Parameters

+
+
+

The required parameters are:

+
+
+
    +
  • +

    svcaccount: The service account created as prerequisite. It must be added as a Jenkins credential.

    +
  • +
  • +

    installDeploymentPlugins: With this parameter you can install extra plugins into Jenkins. Also, you can add extra template utils.

    +
  • +
+
+
+
+
+

Execution

+
+
+
    +
  1. +

    Press the Build with Parameters button

    +
  2. +
  3. +

    Insert the parameters.

    +
  4. +
  5. +

    If the service account is not added as credential, please add a new entry.

    +
  6. +
  7. +

    Press the Build button.

    +
  8. +
  9. +

    Wait until the pipeline ends.

    +
  10. +
+
+
+ + + + + +
+ + +if any plugin is installed, Jenkins will be restarted and the pipeline will fail. You need to execute it again with the same parameters. +
+
+
+
+initialize instance +
+
+
+
+initialize instance2 +
+
+
+
+
+

The result

+
+
+
    +
  • +

    Install plugins stage

    +
    +

    In this stage the following plugins will be installed:

    +
    +
    + +
    +
  • +
  • +

    Configure SonarQube stage

    +
    +

    This stage is the responsible of configure the Jenkins-SonarQube integration. It will:

    +
    +
    +
      +
    • +

      Generate a SonarQube API token for the user Admin

      +
    • +
    • +

      Register the token in Jenkins as credential with the id sonar-token

      +
    • +
    • +

      Add the SonarQube server in Jenkins → Manage Jenkins → Configure System → SonarQube servers. The values used are:

      +
      +
        +
      • +

        Name: SonarQube

        +
      • +
      • +

        Server URL: http://sonarqube-core:9000/sonarqube (default Production Line SonarQube URL)

        +
      • +
      • +

        Server authentication token: sonar-token (generated in the previous step)

        +
      • +
      +
      +
    • +
    • +

      Add a webhook in SonarQube:

      +
      + +
      +
    • +
    • +

      Install the following SonarQube plugins:

      +
      +
        +
      • +

        java

        +
      • +
      • +

        javascript

        +
      • +
      • +

        typescript

        +
      • +
      • +

        csharp

        +
      • +
      • +

        web

        +
      • +
      • +

        cssfamily

        +
      • +
      • +

        jacoco

        +
      • +
      • +

        checkstyle

        +
      • +
      • +

        cobertura

        +
      • +
      • +

        smells

        +
      • +
      • +

        findbugs

        +
      • +
      • +

        scmgit

        +
      • +
      • +

        ansible

        +
      • +
      • +

        sonar-dependency-check-plugin

        +
      • +
      +
      +
    • +
    • +

      Restart the SonarQube server in order to enable the plugins installed.

      +
    • +
    +
    +
  • +
  • +

    Create UTIL templates stage

    +
    +

    Some templates needs that Jenkins has installed some plugins. If the plugins are not installed, the template will fail. In order to prevent this behaviour, we use the initialize-instance to install all plugins required in order templates. Then, we create another templates that will use the plugins installed by initialize-instance. In this stage we create some template utils to configure Jenkins after all required plugins are installed. Those templates are:

    +
    +
    + +
    +
  • +
  • +

    Configure Nexus 3 stage

    +
    +

    This stage will configure the Production Line Nexus3

    +
    +
    +
      +
    • +

      Enable anonymous access

      +
    • +
    • +

      Add a internal user to download/upload docker images

      +
      +
        +
      • +

        username: nexus-api

        +
      • +
      • +

        password: The same as the service account created in LAM

        +
      • +
      +
      +
    • +
    • +

      Create the maven repositories: maven-central, maven-snapshots, maven-release, maven-plugin

      +
    • +
    • +

      Create the docker repository

      +
    • +
    • +

      Create the npmjs repositories: npmjs, npm-registry, npm

      +
    • +
    • +

      Create in Jenkins a new credential with the id nexus-api with the username and password created in nexus3

      +
    • +
    +
    +
  • +
  • +

    Configure Maven File stage

    +
    +

    This stage adds the nexus3 credentials created in the previous stage to the maven global configuration file with the id pl-nexus

    +
    +
    +
    +maven config +
    +
    +
  • +
+
+
+

Now, you are able to execute other templates adding them manually or using the Production Line Market Place.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/install-sonar-plugin.html b/docs/devonfw.github.io/1.0/production-line.wiki/install-sonar-plugin.html new file mode 100644 index 00000000..6830b40f --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/install-sonar-plugin.html @@ -0,0 +1,396 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Install SonarQube Plugin

+
+ +
+
+
+

Introduction

+
+
+

SonarQube can extends its behaviour by adding plugins. Some on them can be installed by using the SonarQube Marketplace, others can be installed by copying the .jar into the SonarQube plugins folder.

+
+
+
+
+

Overview

+
+
+

This template will help you to install SonarQube plugins by copying the .jar into the SonarQube plugins folder. As you do not have access to the Production Line volumes, it will help you when you want to install a plugin that is not installed in the SonarQube Marketplace.

+
+
+

It will:

+
+
+
    +
  • +

    Download the .jar file from a provided URL.

    +
  • +
  • +

    Copy the .jar file to the plugins folder.

    +
  • +
  • +

    Restart the SonarQube server in order to enable the plugin.

    +
  • +
+
+
+ + + + + +
+ + +this template only works in a Production Line instance. +
+
+
+
+
+

Template

+
+
+

This template will be automatically created in your jenkins after executing the Initialize_Instance template inside the UTILS folder with the name Install_SonarQube_Plugin.

+
+
+

For manual creation see: How to add a Template

+
+
+ + + + + +
+ + +This template needs the devonfw Production Line Shared Lib +
+
+
+
+
+

Parameters

+
+
+

The only parameter required is the plugin download URL.

+
+
+
+
+

Execution

+
+
+
    +
  1. +

    Press the Build with Parameters button

    +
  2. +
  3. +

    Insert plugin the download url. Example: https://github.com/dependency-check/dependency-check-sonar-plugin/releases/download/1.2.6/sonar-dependency-check-plugin-1.2.6.jar

    +
  4. +
  5. +

    Press the Build button.

    +
  6. +
  7. +

    Wait until the pipeline ends.

    +
  8. +
+
+
+
+build install sonar plugin +
+
+
+
+build install sonar plugin2 +
+
+
+

After the execution, when the SonarQube is restarted, you can check that your plugin is installed visiting the Marketplace.

+
+
+
+sonar plugin +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/master-production-line.html b/docs/devonfw.github.io/1.0/production-line.wiki/master-production-line.html new file mode 100644 index 00000000..24ae9a7f --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/master-production-line.html @@ -0,0 +1,273 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Production Line Templates

+
+
+

production-line

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/mrchecker.html b/docs/devonfw.github.io/1.0/production-line.wiki/mrchecker.html new file mode 100644 index 00000000..91659282 --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/mrchecker.html @@ -0,0 +1,521 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MrChecker under ProductionLine

+
+
+
+500 +
+
+
+
+
+

Introduction

+
+
+

MrChecker is end to end automation test framework written in Java. It has been released +by devonfw but it is not supported by the devonfw core team.

+
+
+

This framework consist of eight test modules:

+
+
+
    +
  • +

    Core test module

    +
  • +
  • +

    Selenium test module

    +
  • +
  • +

    WebAPI test module

    +
  • +
  • +

    Security test module

    +
  • +
  • +

    DataBase test module

    +
  • +
  • +

    Standalone test module

    +
  • +
  • +

    DevOps module

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins MrChecker job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+
    +
  • +

    Jenkins

    +
    +
      +
    • +

      Add Jenkins Shared Library using documentation https://github.com/devonfw/production-line-shared-lib

      +
    • +
    • +

      Install required plugins:
      +HTTP Request Plugin
      +Allure Jenkins Plugin

      +
    • +
    • +

      In Jenkins Global Tool Configuration configure Allure Commandline and Maven like

      +
    • +
    +
    +
  • +
+
+
+
+500 +
+
+
+
+500 +
+
+
+
    +
  • +

    Gitlab

    +
    +
      +
    • +

      Generate User Private Token
      +Go to your Profile in Gitlab

      +
    • +
    +
    +
  • +
+
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+600 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+
    +
  • +

    Create new Jenkins Pipeline Job

    +
  • +
  • +

    In job configuration check "This project is parametrized", choose "String parameter and provide
    +Name: GITLAB_USER_PRIVATE_TOKEN
    +Default Value: <GITLAB_TOKEN_STRING_YOU_JUST_CREATED>

    +
  • +
  • +

    Add the template
    +The guide on how to add a template to your Jenkins can be found in the root directory of the template repository: https://github.com/devonfw/production-line.git

    +
  • +
  • +

    Save job configuration

    +
  • +
+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job

    +
  • +
  • +

    After job ends with success wait few seconds for repository import to Gitlab

    +
  • +
  • +

    As output of the build new Jenkins Pipline job is created with name "MrChecker_Example_Tests" also new repository "Mrchecker" will be created in Gitlab

    +
  • +
  • +

    Build "MrChecker_Example_Tests" job

    +
  • +
+
+
+
+500 +
+
+
+
+
+

Expected Result

+
+
+
    +
  • +

    As output of this job Allure Report will be generated

    +
  • +
+
+
+
+500 +
+
+
+
+
+

Summary

+
+
+

Using this documentation you should be able to run MrChercker test framework on ProductionLine.
+MrChecker offers two projects to your disposal:

+
+
+
    +
  • +

    First project "mrchecker-app-under-test/pipelines/CI/Jenkinsfile_ProductionLine.groovy" has all tests included in the project and is the default project used in "MrChecker_Example_Tests" job.

    +
  • +
  • +

    Second project "mrchecker-app-under-testboilerplate/pipelines/CI/Jenkinsfile_ProductionLine.groovy" here tests are not included, therefore if you choose to run "MrChecker_Example_Tests" job Allure report will be not generated.

    +
  • +
+
+
+

To change the project change script path at the bottom of the "MrChecker_Example_Tests" job.

+
+
+
+500 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/openshift-configuration.html b/docs/devonfw.github.io/1.0/production-line.wiki/openshift-configuration.html new file mode 100644 index 00000000..2c6e7de8 --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/openshift-configuration.html @@ -0,0 +1,445 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Docker Configuration

+
+ +
+
+
+

Introduction

+
+
+

OpenShift is a docker container orchestrator built on top Kubernetes.

+
+
+
+
+

Overview

+
+
+

This template allow you to configure Jenkins in order to work with OpenShift.

+
+
+

It will:

+
+
+
    +
  • +

    Add OpenShift client as custom tool.

    +
  • +
  • +

    Configure an OpenShift cluster to work with.

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

In order to execute this template, you need the following plugins installed in your Jenkins:

+
+ +
+ + + + + +
+ + +The initialize instance template will install all plugins if you select Openshift or Docker+Openshift in the installDeploymentPlugins parameter +
+
+
+
+
+

Template

+
+
+

This template will be automatically created in your jenkins after executing the Initialize_Instance template inside the UTILS folder with the name Openshift_Configuration.

+
+
+

For manual creation see: How to add a Template

+
+
+ + + + + +
+ + +This template needs the devonfw Production Line Shared Lib +
+
+
+
+
+

Parameters

+
+
+

The required parameters are:

+
+
+
    +
  • +

    ocName: The name of the OpenShift connection. You can define multiple OpenShift connections by changing the name.

    +
  • +
  • +

    ocUrl: The OpenShift URL.

    +
  • +
  • +

    ocProject: The OpenShift Project.

    +
  • +
  • +

    ocToken: The OpenShift token. In order to have a long-term token, this token should be a service account token.

    +
  • +
+
+
+
+
+

Execution

+
+
+
    +
  1. +

    Press the Build with Parameters button

    +
  2. +
  3. +

    Insert the parameters.

    +
  4. +
  5. +

    If the OpenShift token is not added as credential, please add a new entry.

    +
  6. +
  7. +

    Press the Build button.

    +
  8. +
  9. +

    Wait until the pipeline ends.

    +
  10. +
+
+
+ + + + + +
+ + +If a cluster already exists with the provided name, it will not modify anything. +
+
+
+
+openshift configuration +
+
+
+
+openshift configuration2 +
+
+
+
+openshift configuration3 +
+
+
+

You can add more clusters by executing the template again or in Jenkins → Manage Jenkins → Configure System

+
+
+
+openshift clusters +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/production-line.wiki/troubleshoot.html b/docs/devonfw.github.io/1.0/production-line.wiki/troubleshoot.html new file mode 100644 index 00000000..187d0840 --- /dev/null +++ b/docs/devonfw.github.io/1.0/production-line.wiki/troubleshoot.html @@ -0,0 +1,335 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Troubleshootibng

+
+ +
+
+
+

Introduction

+
+
+

In this section you can find the solution of the most common errors using the templates.

+
+
+
+
+

Template startup failed

+
+
+

Sometimes, when you execute any template you will see this an error like:

+
+
+
+
+

==

+
+
+

org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed: +/home/pl/jobs/devon4j-mts_PL_Template/builds/8/libs/ProductionLineTemplateLib/src/com/capgemini/productionline/configuration/JenkinsConfiguration.groovy: 38: unable to resolve class ru.yandex.qatools.allure.jenkins.tools.AllureCommandlineInstaller + @ line 38, column 1. + import ru.yandex.qatools.allure.jenkins.tools.AllureCommandlineInstaller +== ==

+
+
+

In most of our templates we use the Production Line Shared Lib. In order to work, the Shared Lib needs some plugins installed in your Jenkins, so to solve this error you need to install those plugins manually using the Manage Plugins.

+
+
+

In this specific case the problem is the Allure plugin is not installed. Just install it, restart Jenkins and execute again the template.

+
+
+
+
+

Build Now instead Build with Parameters

+
+
+

Sometimes, when you go to execute a template, mostly the first time, the Build Now button is available instead Build with Parameters button. The root cause of this problem is the parameters are defined in the Jenkinsfile and, as you never execute it before, Jenkins do not have those Jenkinsfile yet. For this reason it does not knows the parameters required.

+
+
+

To solve this problem, you only need to press the Build Now button. Then, the execution will start and fail. It’s not a problem as you do not enter any parameter. Now you only need to reload the page and the Build with Parameters button will be available.

+
+
+
+
+

Error at Install plugins stage

+
+
+

In some templates you can see the Install plugins stage. In this stage some plugins required for the template will be installed. In order to properly load the plugins, Jekins needs to be restarted, for that reason the pipeline fails on that stage. It is not a bug or problem, so do not worry about that. You only need to wait until Jenkins is restarted and execute the template again.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/Home.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/Home.html new file mode 100644 index 00000000..e9dcb1bb --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/Home.html @@ -0,0 +1,302 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

What is devonfw shop floor?

+
+
+
+devonfw shop floor +
+
+
+

devonfw shop floor is a platform to industrialize continuous delivery and continuous integration processes.

+
+
+

devonfw shop floor is a set of documentation, tools and methodologies used to configure the provisioning, development and uat environments used in your projects. devonfw shop floor allows the administrators of those environments to apply CI/CD operations and enables automated application deployment.

+
+
+

devonfw shop floor is mainly oriented to configure the provisioning environment provided by Production Line and deploy applications on an OpenShift cluster. In the cases where Production Line or OpenShift cluster are not available, there will be alternatives to achieve similar goals.

+
+
+

The devonfw shop floor 4 OpenShift is a solution based on the experience of priming devonfw for OpenShift by RedHat.

+
+
+
+primed +
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/TODO-dsf-provisioning-dsf4openshift.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/TODO-dsf-provisioning-dsf4openshift.html new file mode 100644 index 00000000..9d2d7b45 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/TODO-dsf-provisioning-dsf4openshift.html @@ -0,0 +1,278 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

TODO

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/devonfw-shop-floor-doc.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/devonfw-shop-floor-doc.html new file mode 100644 index 00000000..f838e207 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/devonfw-shop-floor-doc.html @@ -0,0 +1,3312 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw shop floor ${project.version}

+
+
+

The devonfw community +${project.version}, ${buildtime}: Subtitle {doctitle} +:description: comprehensive documentation for the devonfw shop floor. +:sectnums: +:toc: +:toc-title: Table of Contents +:toclevels: 3 +:imagesdir: ./ +:footnote: test footnote +:productname: test productname

+
+ +
+
+
+

What is devonfw shop floor?

+
+
+
+devonfw shop floor +
+
+
+

devonfw shop floor is a platform to industrialize continuous delivery and continuous integration processes.

+
+
+

devonfw shop floor is a set of documentation, tools and methodologies used to configure the provisioning, development and uat environments used in your projects. devonfw shop floor allows the administrators of those environments to apply CI/CD operations and enables automated application deployment.

+
+
+

devonfw shop floor is mainly oriented to configure the provisioning environment provided by Production Line and deploy applications on an OpenShift cluster. In the cases where Production Line or OpenShift cluster are not available, there will be alternatives to achieve similar goals.

+
+
+

The devonfw shop floor 4 OpenShift is a solution based on the experience of priming devonfw for OpenShift by RedHat.

+
+
+
+primed +
+
+ +
+

How to use it

+
+

This is the documentation about shop floor and its different tools. Here you are going to learn how to create new projects, so that they can include continuous integration and continuous delivery processes, and be deployed automatically in different environments.

+
+
+
+

Prerequisites - Provisioning environment

+
+

To start working you need to have some services running in your provisioning environment, such as Jenkins (automation server), GitLab (git repository), SonarQube (program analysis), Nexus (software repository) or similar.

+
+
+

To host those services we recommend to have a Production Line instance but you can use other platforms. Here is the list for the different options:

+
+
+ +
+
+
+

Step 1 - Configuration and services integration

+
+

The first step is configuring your services and integrate them with jenkins. Here you have an example about how to manually configure the next services:

+
+
+ +
+
+
+

Step 2 - Create the project

+ +
+
+

Create and integrate git repository

+
+

The second is create or git repository and integrate it with Jenkins.

+
+
+

Here you can find a manual guide about how it:

+
+
+ +
+
+
+

Start new devonfw project

+
+

It is time to create your devonfw project:

+
+
+

You can find all that you need about how to create a new devonfw project

+
+
+
+

cicd configuration

+
+

Now you need to add cicd files in your project.

+
+
+
+

== Manual configuration

+ +
+
+

== Jenkinsfile

+
+

Here you can find all that you need to know to do your Jenkinsfile.

+
+
+
+

== Dockerfile

+
+

Here you can find all that you need to know to do your Dockerfile.

+
+
+
+

== Automatic configuration

+ +
+
+

== cicdgen

+
+

If you are using production line for provisioning you could use cicdgen to configure automatically almost everything explained in the manual configuration. To do it see the cicdgen documentation.

+
+
+
+

Step 3 - Deployment

+
+

The third is configure our deployment environment. Here is the list for the different options:

+
+
+ +
+
+
+

Step 4 - Monitoring

+
+

Here you can find information about tools for monitoring:

+
+
+
    +
  • +

    build monitor view for Jenkins. With this tool you will be able to see in real time what is the state of your Jenkins pipelines.

    +
  • +
+
+
+
+
+
+

Provisioning environments

+
+ +
+

Production Line provisioning environment

+
+

pl

+
+
+

The Production Line Project is a set of server-side collaboration tools for Capgemini engagements. It has been developed for supporting project engagements with individual tools like issue tracking, continuous integration, continuous deployment, documentation, binary storage and much more!

+
+
+

For additional information use the official documentation.

+
+
+
+

How to obtain your Production Line

+
+

You can order your Production Line environment instance following the official guide. Remember that you need to order at least the next tools: + * Jenkins + * GitLab + * SonarQube + * Nexus

+
+
+
+

Back.

+
+
+
+

dsf4docker provisioning environment

+
+
+docker +
+
+
+
+

Architecture overview

+
+

dsf docker arch

+
+
+
+

Prerequisite

+
+

To use dsf4docker provisioning environment you need a remote server and you must clone or download devonfw shop floor.

+
+
+
+

How to use it

+
+

Navigate to ./devonfw-shop-floor/dsf4docker/environment and here you can find one scripts to install it, and another one to uninstall it.

+
+
+
+

Install devonfw shop floor 4 Docker

+
+

There is an installation script to do so, so the complete installation should be completed by running it. Make sure this script has execution permissions in the Docker Host:

+
+
+
+
 chmod +x dsf4docker-install.sh
+ sudo ./dsf4docker-install.sh
+
+
+
+

This script, besides the container "installation" itself, will also adapt the docker-compose.yml file to your host (using sed to replace the IP_ADDRESS word of the file for your real Docker Host’s IP address).

+
+
+
+

Uninstall devonfw shop floor 4 Docker

+
+

As well as for the installation, if we want to remove everything concerning devonfw shop floor 4 Docker from our Docker Host, we’ll run this script:

+
+
+
+
 chmod +x dsf4docker-uninstall.sh
+ sudo ./dsf4docker-uninstall.sh
+
+
+
+
+

Troubleshooting

+
+

When trying to execute the install or uninstall .sh there may be some problems related to the windows/linux format file, so if you see this error log while executing the script:

+
+
+
+
./dsf4docker-install.sh: line 16: $'\r': command not found
+
+
+
+

You need to do a file conversion with this command:

+
+
+
+
dos2unix dsf4docker-install.sh
+
+
+
+

or

+
+
+
+
dos2unix dsf4docker-uninstall.sh
+
+
+
+
+

A little history

+
+

The Docker part of the shop floor is created based on the experience of the environment setup of the project Mirabaud Advisory, and intended to be updated to latest versions. Mirabaud Advisory is a web service developed with devonfw (Java) that, alongside its own implementation, it needed an environment both for the team to follow CICD rules through their 1-week-long sprints and for the client (Mirabaud) to check the already done work.

+
+
+

There is a practical experience about the Mirabaud Case.

+
+
+
+

Back.

+
+
+
+
+
+

Configuration and services integration

+
+
+

Nexus Configuration

+
+

In this document you will see how you can configure Nexus repository and how to integrate it with jenkins.

+
+
+
+

Login in Nexus

+
+

The first time you enter in Nexus you need to log in with the user 'admin' and the password that is inside the path: /volumes/nexus/nexus-data +Then you can change that password and create a new one.

+
+
+
+

Prerequisites

+ +
+
+

Repositories

+
+

You need to have one repository for snapshots, another for releases and another one for release-candidates. Normally you use maven2 (hosted) repositories and if you are going to use a docker registry, you need docker (hosted) too.

+
+
+

To create a repository in Nexus go to the administration clicking on the gear icon at top menu bar. Then on the left menu click on Repositories and press the Create repository button.

+
+
+
+nexus create repository +
+
+
+

Now you must choose the type of the repository and configure it. This is an example for Snapshot:

+
+
+
+nexus create repository form +
+
+
+
+

Create user to upload/download content

+
+

Once you have the repositories, you need a user to upload/download content. To do it go to the administration clicking on the gear icon at top menu bar. Then on the left menu click on Users and press the Create local user button.

+
+
+
+nexus create user +
+
+
+

Now you need to fill a form like this:

+
+
+
+nexus create user form +
+
+
+
+

Jenkins integration

+
+

To use Nexus in our pipelines you need to configure Jenkins.

+
+
+
+

Customize jenkins

+
+

The first time you enter jenkins, you are asked fot the pluggins to be installed. +We select install suggested plugins and later we can add the plugins that we need depending on the project necessities.

+
+
+
+plugins jenkins +
+
+
+

Then we need to create our first admin user, we can do it like this:

+
+
+
+jenkins first admin user +
+
+
+

The next step is the jenkins URL:

+
+
+
+jenkins url +
+
+
+

Your jenkins setup is ready!

+
+
+
+

Add nexus user credentials

+
+

First of all you need to add the user created in the step before to Jenkins. To do it (on the left menu) click on Credentials, then on System. Now you could access to Global credentials (unrestricted).

+
+
+
+nexus jenkins credentials +
+
+
+

Enter on it and you could see a button on the left to Add credentials. Click on it and fill a form like this:

+
+
+
+nexus jenkins credentials form +
+
+
+
+

Add the nexus user to maven global settings

+
+

In order to do this, you will need the Config File Provider plugin so we need to download it.Go to Jenkins→Manage jenkins→Manage plugins and "available" tab and search for it:

+
+
+
+jenkins config fp +
+
+
+

Click on "Download now and install after restart".

+
+
+

Now you need to go to Manage Jenkins clicking on left menu and enter in Managed files.

+
+
+

Click on Add a new config/Global Maven settings.xml, change the id for a new one more readable:

+
+
+
+jenkins maven settings +
+
+
+

Then click on "Submit"

+
+
+
+jenkins global maven +
+
+
+

Edit the Global Maven settings.xml to add your nexus repositories credentials(the ones you created before) as you could see in the next image:

+
+
+
+nexus jenkins global maven form +
+
+
+

And you are done.

+
+
+
+

SonarQube Configuration

+
+

To use SonarQube you need to use a token to connect, and to know the results of the analysis you need a webhook. Also, you need to install and configure SonarQube in Jenkins.

+
+
+
+

Generate user token

+
+

To generate the user token, go to your account clicking in the left icon on the top menu bar.

+
+
+ + + + + +
+ + +If you don’t have any account, you can use the admin/admin user/pass +
+
+
+
+sonarqube administration +
+
+
+

Go to security tab and generate the token.

+
+
+
+sonarqube token +
+
+
+
+

Webhook

+
+

When you execute our SonarQube scanner in our pipeline job, you need to ask SonarQube if the quality gate has been passed. To do it you need to create a webhook.

+
+
+

Go to administration clicking the option on the top bar menu and select the tab for Configuration.

+
+
+

Then search in the left menu to go to webhook section and create your webhook.

+
+
+

An example for Production Line:

+
+
+
+sonarqube webhook +
+
+
+
+

Jenkins integration

+
+

To use SonarQube in our pipelines you need to configure Jenkins to integrate SonarQube.

+
+
+
+

SonarQube Scanner

+
+

First, you need to configure the scanner. Go to Manage Jenkins clicking on left menu and enter in Global Tool Configuration.

+
+
+

Go to SonarQube Scanner section and add a new SonarQube scanner like this.

+
+
+
+sonarqube jenkins scanner +
+
+
+
+

SonarQube Server

+
+

Now you need to configure where is our SonarQube server using the user token that you create before. Go to Manage Jenkins clicking on left menu and enter in Configure System.

+
+
+

For example, in Production Line the server is the next:

+
+
+
+sonarqube jenkins server +
+
+
+ + + + + +
+ + +Remember, the token was created at the beginning of this SonarQube configuration. +
+
+
+
+

SonarQube configuration

+
+

Now is time to configure your sonar in order to measure the quality of your code. To do it, please follow the official documentation about our plugins and Quality Gates and Profiles here.

+
+
+
+

How to ignore files

+
+

Usually the developers need to ignore some files from Sonar analysis. To do that, they must add the next line as a parameter of the sonar execution to their Jenkinsfile in the SonarQube code analysis step.

+
+
+
+
-Dsonar.exclusions='**/*.spec.ts, **/*.model.ts, **/*mock.ts'
+
+
+
+
+
+
+

Create project

+
+ +
+
+
+

Create and integrate git repository

+
+
+

include::dsf-configure-gitlab.adoc[leveloffset=2].

+
+
+
+
+

start new devonfw project

+
+
+

It is time to create your devonfw project:

+
+
+ +
+
+
+
+

cicd configuration

+
+ +
+
+
+

== Manual configuration

+
+ +
+
+
+

== Jenkinsfile

+
+
+

include::dsf-configure-jenkins.adoc[leveloffset=2].

+
+
+
+
+

Deployment

+
+
+

include::dsf-deployment-dsf4openshift.adoc[leveloffset=2].

+
+
+
+
+

Annexes

+
+ +
+
+
+

Custom Services

+
+ +
+
+
+

== BitBucket

+
+
+

[Under construction]

+
+
+
+

The purpose of the present document is to provide the basic steps carried out to setup a BitBucket server in OpenShift.

+
+
+
Introduction
+
+

BitBucket is the Atlassian tool that extends the Git functionality, by adding integration with JIRA, Confluence, or Trello, as well as incorporates extra features for security or management of user accounts (See BitBucket).

+
+
+

BitBucket server is the Atlassian tool that runs the BitBucket services (See BitBucket server).

+
+
+

The followed approach has been not using command line, but OpenShift Web Console, by deploying the Docker image atlassian/bitbucket-server (available in Docker Hub) in the existing project Deployment.

+
+
+

The procedure below exposed consists basically in three main steps:

+
+
+
    +
  1. +

    Deploy the BitBucket server image (from OpenShift web console)

    +
  2. +
  3. +

    Add a route for the external traffic (from OpenShift web console)

    +
  4. +
  5. +

    Configure the BitBucket server (from BitBucket server web console)

    +
  6. +
+
+
+
+
Prerequisites
+
+
    +
  • +

    OpenShift up & running

    +
  • +
  • +

    Atlassian account (with personal account key). Not required for OpenShift, but for the initial BitBucket server configuration.

    +
  • +
+
+
+
+
Procedure
+ +
+
+
Step 0: Log into our OpenShift Web console
+
+
+step0 +
+
+
+
+
Step 1: Get into Development project
+
+
+] +
+
+
+
+
Step 2.1: Deploy a new image to the project
+
+
+step2.1 +
+
+
+
+ +
+

Image name: atlassian/bitbucket-server

+
+
+
+step2.2 +
+
+
+
+
Step 2.3: Leave by the moment the default config. since it is enough for the basic setup. Press Create
+
+
+step2.3 +
+
+
+
+
Step 2.4: Copy the oc commands in case it is required to work via command line, and Go to overview
+
+
+step2.4 +
+
+
+
+
Step 2.5: Wait until OpenShift deploys and starts up the image. All the info will be available.
+
+

Please notice that there are no pre-configured routes, hence the application is not accessible from outside the cluster.

+
+
+
+step2.5 +
+
+
+
+
Step 3: Create a route in order for the application to be accessible from outside the cluster (external traffic). Press Create
+
+

Please notice that there are different fields that can be specified (hostname, port). If required, the value of those fields can be modified later.

+
+
+
+step3a +
+
+
+

Leave by the moment the default config. as it is enough for the basic setup.

+
+
+

The route for external traffic is now available.

+
+
+
+step3b +
+
+
+
+

Now the BitBucker server container is up & running in our cluster.

+
+
+

The below steps correspond to the basic configuration of our BitBucket server.

+
+
+
+
+ + +
+
+
Step 4.2: Leave by the moment the Internal database since it is enough for the basic setup (and it can be modified later), and click Next
+
+
+step4.2 +
+
+
+
+
Step 4.3: Select the evaluation license, and click I have an account
+
+
+step4.3 +
+
+
+
+
Step 4.4: Select the option Bitbucker (Server)
+
+
+step4.4 +
+
+
+
+
Step 4.5: Introduce your organization (Capgemini), and click Generate License
+ +
+
+
Step 4.6: Confirm that you want to install the license on the BitBucket server
+
+
+step4.6 +
+
+
+

The license key will be automatically generated. Click Next

+
+
+
+
Step 4.7: Introduce the details of the Administration account.
+
+

Since our BitBucket server is not going to be integrated with JIRA, click on Go to Bitbucket. The integration with JIRA can be configured later.

+
+
+
+step4.7 +
+
+
+
+
Step 4.8: Log in with the admin account that has been just created
+ +
+
+
DONE !!
+
+
+done +
+
+
+

[Under construction]

+
+
+
+

The purpose of the present document is to provide the basic steps carried out to improve the configuration of BitBucket server in OpenShift.

+
+
+

The improved configuration consists on:

+
+
+
    +
  • +

    Persistent Volume Claims

    +
  • +
  • +

    Health Checks (pending to be completed)

    +
  • +
+
+
+
+
Persistent Volume Claims.
+
+

Please notice that the BitBucket server container does not use persistent volume claims by default, which means that the data (e.g.: BitBucket server config.) will be lost from one deployment to another.

+
+
+
+pvc0 +
+
+
+

It is very important to create a persistent volume claim in order to prevent the mentioned loss of data.

+
+
+
+
Step 1: Add storage
+
+
+pvc1 +
+
+
+
+
Step 2: Select the appropriate storage, or create it from scratch if necessary
+
+
+pvc2 +
+
+
+
+
Step 3: Introduce the required information
+
+
    +
  • +

    Path as it is specified in the BitBucket server Docker image (/var/atlassian/application-data/bitbucket)

    +
  • +
  • +

    Volume name with a unique name to clearly identify the volume

    +
  • +
+
+
+
+pvc3 +
+
+
+
+
The change will be inmediately applied
+
+
+done +
+
+
+
+

Mirabaud CICD Environment Setup

+
+

Initial requirements:

+
+
+
    +
  • +

    OS: RHEL 6.5

    +
  • +
+
+
+

Remote setup in CI machine (located in the Netherlands)

+
+
+
+
    - Jenkins
+    - Nexus
+    - GitLab
+    - Mattermost
+    - Atlassian Crucible
+    - SonarQube
+
+
+
+
+

1. Install Docker and Docker Compose in RHEL 6.5

+ +
+
+

Docker

+
+

Due to that OS version, the only way to have Docker running in the CI machine is by installing it from the EPEL repository (Extra Packages for Enterprise Linux).

+
+
+
    +
  1. +

    Add EPEL

    +
  2. +
+
+
+
+
##rpm -iUvh http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm
+
+
+
+
    +
  1. +

    Install docker.io from that repository

    +
  2. +
+
+
+
+
##yum -y install docker-io
+
+
+
+
    +
  1. +

    Start Docker daemon

    +
  2. +
+
+
+
+
##service docker start
+
+
+
+
    +
  1. +

    Check the installation

    +
  2. +
+
+
+
+
##docker -v
+Docker version 1.7.1, build 786b29d/1.7.1
+
+
+
+
+

Docker Compose

+
+

Download and install it via curl. It will use this site.

+
+
+
+
##curl -L https://github.com/docker/compose/releases/download/1.5.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
+
+##chmod +x /usr/local/bin/docker-compose
+
+
+
+

Add it to your sudo path:

+
+
+
    +
  1. +

    Find out where it is:

    +
  2. +
+
+
+
+
##echo $PATH
+
+
+
+
    +
  1. +

    Copy the docker-compose file from /usr/local/bin/ to your sudo PATH.

    +
  2. +
+
+
+
+
##docker-compose -v
+docker-compose version 1.5.2, build 7240ff3
+
+
+
+
+

2. Directories structure

+
+

Several directories had been added to organize some files related to docker (like docker-compose.yml) and docker volumes for each service. Here’s how it looks:

+
+
+
+
/home
+    /[username]
+        /jenkins
+            /volumes
+                /jenkins_home
+        /sonarqube
+            /volumes
+                /conf
+                /data
+                /extensions
+                /lib
+                    /bundled-plugins
+        /nexus
+            /volumes
+                /nexus-data
+        /crucible
+            /volumes
+                /
+        /gitlab
+            docker-compose.yml
+            /volumes
+                /etc
+                    /gitlab
+                /var
+                    /log
+                    /opt
+        /mattermost
+            docker-compose.yml
+            /volumes
+                /db
+                    /var
+                        /lib
+                            /postgresql
+                                /data
+                /app
+                    /mattermost
+                        /config
+                        /data
+                        /logs
+                /web
+                    /cert
+
+
+
+
+

3. CICD Services with Docker

+
+

Some naming conventions had been followed as naming containers as mirabaud_[service].

+
+
+

Several folders have been created to store each service’s volumes, docker-compose.yml(s), extra configuration settings and so on:

+
+
+
+

Jenkins

+ +
+
+

== Command

+
+
+
##docker run -d -p 8080:8080 -p 50000:50000 --name=mirabaud_jenkins \
+    -v /home/[username]/jenkins/volumes/jenkins_home:/var/jenkins_home \
+    jenkins
+
+
+
+
+

== Generate keystore

+
+
+
keytool -importkeystore -srckeystore server.p12 -srcstoretype pkcs12 -srcalias 1 -destkeystore newserver.jks -deststoretype jks -destalias server
+
+
+
+
+

== Start jekins with SSL (TODO: make a docker-compose.yml for this):

+
+
+
sudo docker run -d --name mirabaud_jenkins -v /jenkins:/var/jenkins_home -p 8080:8443 jenkins --httpPort=-1 --httpsPort=8443 --httpsKeyStore=/var/jenkins_home/certs/keystore.jks --httpsKeyStorePassword=Mirabaud2017
+
+
+
+
+

== Volumes

+
+
+
volumes/jenkins_home:/var/jenkins_home
+
+
+
+
+

SonarQube

+ +
+
+

== Command

+
+
+
##docker run -d -p 9000:9000 -p 9092:9092 --name=mirabaud_sonarqube \
+    -v /home/[username]/sonarqube/volumes/conf:/opt/sonarqube/conf \
+    -v /home/[username]/sonarqube/volumes/data:/opt/sonarqube/data \
+    -v /home/[username]/sonarqube/volumes/extensions:/opt/sonarqube/extensions \
+    -v /home/[username]/sonarqube/volumes/lib/bundled-plugins:/opt/sonarqube//lib/bundled-plugins \
+    sonarqube
+
+
+
+
+

== Volumes

+
+
+
volumes/conf:/opt/sonarqube/conf
+volumes/data:/opt/sonarqube/data
+volumes/extensions:/opt/sonarqube/extensions
+volumes/lib/bundled-plugins:/opt/sonarqube/lib/bundled-plugins
+
+
+
+
+

Nexus

+ +
+
+

== Command

+
+
+
##docker run -d -p 8081:8081 --name=mirabaud_nexus\
+    -v /home/[username]/nexus/nexus-data:/sonatype-work
+    sonatype/nexus
+
+
+
+
+

== Volumes

+
+
+
volumes/nexus-data/:/sonatype-work
+
+
+
+
+

Atlassian Crucible

+ +
+
+

== Command

+
+
+
##docker run -d -p 8084:8080 --name=mirabaud_crucible \
+    -v /home/[username]/crucible/volumes/data:/atlassian/data/crucible
+    mswinarski/atlassian-crucible:latest
+
+
+
+
+

== Volumes

+
+
+
volumes/data:/atlassian/data/crucible
+
+
+
+
+

4. CICD Services with Docker Compose

+
+

Both Services had been deploying by using the # docker-compose up -d command from their root directories (/gitlab and /mattermost). The syntax of the two docker-compose.yml files is the one corresponding with the 1st version (due to the docker-compose v1.5).

+
+
+
+

GitLab

+ +
+
+

== docker-compose.yml

+
+
+
mirabaud:
+    image: 'gitlab/gitlab-ce:latest'
+    restart: always
+    ports:
+            - '8888:80'
+    volumes:
+            - '/home/[username]/gitlab/volumes/etc/gilab:/etc/gitlab'
+            - '/home/[username]/gitlab/volumes/var/log:/var/log/gitlab'
+            - '/home/[username]/gitlab/volumes/var/opt:/var/opt/gitlab'
+
+
+
+
+

== Command (docker)

+
+
+
docker run -d -p 8888:80 --name=mirabaud_gitlab \
+    -v /home/[username]/gitlab/volumes/etc/gitlab/:/etc/gitlab \
+    -v /home/[username]/gitlab/volumes/var/log:/var/log/gitlab \
+    -v /home/[username]/gitlab/volumes/var/opt:/var/opt/gitlab \
+    gitlab/gitlab-ce
+
+
+
+
+

== Volumes

+
+
+
volumes/etc/gitlab:/etc/gitlab
+volumes/var/opt:/var/log/gitlab
+volumes/var/log:/var/log/gitlab
+
+
+
+
+

Mattermost

+ +
+
+

== docker-compose.yml:

+
+
+
db:
+  image: mattermost/mattermost-prod-db
+  restart: unless-stopped
+  volumes:
+    - ./volumes/db/var/lib/postgresql/data:/var/lib/postgresql/data
+    - /etc/localtime:/etc/localtime:ro
+  environment:
+    - POSTGRES_USER=mmuser
+    - POSTGRES_PASSWORD=mmuser_password
+    - POSTGRES_DB=mattermost
+
+app:
+  image: mattermost/mattermost-prod-app
+  links:
+    - db:db
+  restart: unless-stopped
+  volumes:
+    - ./volumes/app/mattermost/config:/mattermost/config:rw
+    - ./volumes/app/mattermost/data:/mattermost/data:rw
+    - ./volumes/app/mattermost/logs:/mattermost/logs:rw
+    - /etc/localtime:/etc/localtime:ro
+  environment:
+    - MM_USERNAME=mmuser
+    - MM_PASSWORD=mmuser_password
+    - MM_DBNAME=mattermost
+
+web:
+  image: mattermost/mattermost-prod-web
+  ports:
+    - "8088:80"
+    - "8089:443"
+  links:
+    - app:app
+  restart: unless-stopped
+  volumes:
+    - ./volumes/web/cert:/cert:ro
+    - /etc/localtime:/etc/localtime:ro
+
+
+
+
+

== SSL Certificate

+
+

How to generate the certificates:

+
+
+

Get the crt and key from CA or generate a new one self-signed. Then:

+
+
+
+
// 1. create the p12 keystore
+##openssl pkcs12 -export -in cert.crt -inkey mycert.key -out certkeystore.p12
+
+// 2. export the pem certificate with password
+##openssl pkcs12 -in certkeystore.p12 -out cert.pem
+
+// 3. export the pem certificate without password
+##openssl rsa -in cert.pem -out key-no-password.pem
+
+
+
+

SSL:

+
+
+

Copy the cert and the key without password at:

+
+
+

./volumes/web/cert/cert.pem

+
+
+

and

+
+
+

./volumes/web/cert/key-no-password.pem

+
+
+

Restart the server and the SSL should be enabled at port 8089 using HTTPS.

+
+
+
+

== Volumes

+
+
+
-- db --
+volumes/db/var/lib/postgresql/data:/var/lib/postgresql/data
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+-- app --
+volumes/app/mattermost/config:/mattermost/config:rw
+volumes/app/mattermost/data:/mattermost/data:rw
+volumes/app/mattermost/logs:/mattermost/logs:rw
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+-- web --
+volumes/web/cert:/cert:ro
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+
+
+
+

5. Service Integration

+
+

All integrations had been done following CICD Services Integration guides:

+
+ +
+ + + + + +
+ + +These guides may be obsolete. You can find here the official configuration guides, +
+
+
+
Jenkins - GitLab integration
+
+

The first step to have a Continuous Integration system for your development is to make sure that all your changes to your team’s remote repository are evaluated by the time they are pushed. That usually implies the usage of so-called webhooks. You’ll find a fancy explanation about what Webhooks are in here.

+
+
+

To resume what we’re doing here, we are going to prepare our Jenkins and our GitLab so when a developer pushes some changes to the GitLab repository, a pipeline in Jenkins gets triggered. Just like that, in an automatic way.

+
+
+
+
1. Jenkins GitLab plugin
+
+

As it usually happens, some Jenkins plug-in(s) must be installed. In this case, let’s install those related with GitLab:

+
+
+
+jenkins gitlab plugins +
+
+
+
+
2. GitLab API Token
+
+

To communicate with GitLab from Jenkins, we will need to create an authentication token from your GitLab user settings. A good practice for this would be to create it from a machine user. Something like (i.e.) devonfw-ci/******.

+
+
+
+gitlab access token +
+
+
+

Simply by adding a name to it and a date for it expire is enough:

+
+
+
+gitlab access token generation +
+
+
+
+gitlab access token generated +
+
+
+

As GitLab said, you should make sure you don’t lose your token. Otherwise you would need to create a new one.

+
+
+

This will allow Jenkins to connect with right permissions to our GitLab server.

+
+
+
+
3. Create "GitLab API" Token credentials
+
+

Those credentials will use that token already generated in GitLab to connect once we declare the GitLab server in the Global Jenkins configuration. Obviously, those credentials must be GitLab API token-like.

+
+
+
+jenkins gitlab api token credentials kind +
+
+
+

Then, we add the generated token in the API token field:

+
+
+
+jenkins gitlab api token credentials complete +
+
+
+

Look in your Global credentials if they had been correctly created:

+
+
+
+jenkins gitlab api token credentials +
+
+
+
+
4. Create GitLab connection in Jenkins
+
+

Specify a GitLab connection in your Jenkins’s Manage Jenkins > Configure System configuration. This will tell Jenkins where is our GitLab server, a user to access it from and so on.

+
+
+

You’ll need to give it a name, for example, related with what this GitLab is dedicated for (specific clients, internal projects…​). Then, the Gitlab host URL is just where your GitLab server is. If you have it locally, that field should look similar to:

+
+
+
    +
  • +

    Connection name: my-local-gitlab

    +
  • +
  • +

    Gitlab host URL: http://localhost:${PORT_NUMBER}

    +
  • +
+
+
+

Finally, we select our recently GitLab API token as credentials.

+
+
+
+jenkins gitlab connection +
+
+
+
+
5. Jenkins Pipeline changes
+ +
+
+
5.1 Choose GitLab connection in Pipeline’s General configuration
+
+

First, our pipeline should allow us to add a GitLab connection to connect to (the already created one).

+
+
+
+jenkins pipeline gitlab connection +
+
+
+

In the case of the local example, could be like this:

+
+
+
    +
  • +

    GitLab connection: my-local-gitlab

    +
  • +
  • +

    GitLab Repository Name: myusername/webhook-test (for example)

    +
  • +
+
+
+
+
5.2 Create a Build Trigger
+
+
    +
  1. +

    You should already see your GitLab project’s URL (as you stated in the General settings of the Pipeline).

    +
  2. +
  3. +

    Write .*build.* in the comment for triggering a build

    +
  4. +
  5. +

    Specify or filter the branch of your repo you want use as target. That means, whenever a git action is done to that branch (for example, master), this Pipeline is going to be built.

    +
  6. +
  7. +

    Generate a Secret token (to be added in the yet-to-be-created GitLab webhook).

    +
  8. +
+
+
+
+jenkins pipeline build trigger +
+
+
+
+
6. GitLab Webhook
+
+
    +
  1. +

    Go to you GitLab project’s Settings > Integration section.

    +
  2. +
  3. +

    Add the path to your Jenkins Pipeline. Make sure you add project instead of job in the path.

    +
  4. +
  5. +

    Paste the generated Secret token of your Jenkins pipeline

    +
  6. +
  7. +

    Select your git action that will trigger the build.

    +
  8. +
+
+
+
+gitlab webhook +
+
+
+
+
7. Results
+
+

After all those steps you should have a result similar to this in your Pipeline:

+
+
+
+jenkins pipeline result +
+
+
+

Enjoy the Continuous Integration! :)

+
+
+
+
Jenkins - Nexus integration
+
+

Nexus is used to both host dependencies for devonfw projects to download (common Maven ones, custom ones such as ojdb and even devonfw so-far-IP modules). Moreover, it will host our projects' build artifacts (.jar, .war, …​) and expose them for us to download, wget and so on. A team should have a bidirectional relation with its Nexus repository.

+
+
+
+
1. Jenkins credentials to access Nexus
+
+

By default, when Nexus is installed, it contains 3 user credentials for different purposes. The admin ones look like this: admin/admin123. There are also other 2: deployment/deployment123 and TODO.

+
+
+
+
// ADD USER TABLE IMAGE FROM NEXUS
+
+
+
+

In this case, let’s use the ones with the greater permissions: admin/admin123.

+
+
+

Go to Credentials > System (left sidebar of Jenkins) then to Global credentials (unrestricted) on the page table and on the left sidebar again click on Add Credentials.

+
+
+

This should be shown in your Jenkins:

+
+
+
+jenkins new nexus credentials +
+
+
+

Fill the form like this:

+
+
+
+jenkins new nexus credentials filled +
+
+
+

And click in OK to create them. Check if the whole thing went as expected:

+
+
+
+jenkins new nexus credentials completed +
+
+
+
+
2. Jenkins Maven Settings
+
+

Those settings are also configured (or maybe not-yet-configured) in our devonfw distributions in:

+
+
+
+
/${devonfw-dist-path}
+    /software
+        /maven
+            /conf
+                settings.xml
+
+
+
+

Go to Manage Jenkins > Managed files and select Add a new Config in the left sidebar.

+
+
+
+jenkins new maven settings +
+
+
+

The ID field will get automatically filled with a unique value if you don’t set it up. No problems about that. Click on Submit and let’s create some Servers Credentials:

+
+
+
+jenkins new maven settings servers +
+
+
+

Those Server Credentials will allow Jenkins to access to the different repositories/servers that are going to be declared afterwards.

+
+
+

Let’s create 4 server credentials.

+
+
+
    +
  • +

    my.nexus: Will serve as general profile for Maven.

    +
  • +
  • +

    mynexus.releases: When a mvn deploy process is executed, this will tell Maven where to push releases to.

    +
  • +
  • +

    mynexus.snapshots: The same as before, but with snapshots instead.

    +
  • +
  • +

    mynexus.central: Just in case we want to install an specific dependency that is not by default in the Maven Central repository (such as ojdbc), Maven will point to it instead.

    +
  • +
+
+
+
+jenkins new maven settings servers credentials +
+
+
+

A more or less complete Jenkins Maven settings would look look like this:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+          xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">
+
+    <mirrors>
+        <mirror>
+            <id>mynexus.central</id>
+            <mirrorOf>central</mirrorOf>
+            <name>central</name>
+            <url>http://${URL-TO-YOUR-NEXUS-REPOS}/central</url>
+        </mirror>
+    </mirrors>
+
+    <profiles>
+        <profile>
+            <id>my.nexus</id>
+            <!-- 3 REPOS ARE DECLARED -->
+            <repositories>
+                <repository>
+                    <id>mynexus.releases</id>
+                    <name>mynexus Releases</name>
+                    <url>http://${URL-TO-YOUR-NEXUS-REPOS}/releases</url>
+                    <releases>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>false</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </repository>
+                <repository>
+                    <id>mynexus.snapshots</id>
+                    <name>mynexus Snapshots</name>
+                    <url>http://${URL-TO-YOUR-NEXUS-REPOS}/snapshots</url>
+                    <releases>
+                        <enabled>false</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </repository>
+            </repositories>
+            <pluginRepositories>
+                <pluginRepository>
+                    <id>public</id>
+                    <name>Public Repositories</name>
+                    <url>http://${URL-TO-YOUR-NEXUS}/nexus/content/groups/public/</url>
+                    <releases>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </pluginRepository>
+            </pluginRepositories>
+        </profile>
+    </profiles>
+    <!-- HERE IS WHERE WE TELL MAVEN TO CHOOSE THE my.nexus PROFILE -->
+    <activeProfiles>
+        <activeProfile>my.nexus</activeProfile>
+    </activeProfiles>
+</settings>
+
+
+
+
+
3. Use it in Jenkins Pipelines
+ +
+
+
Jenkins - SonarQube integration
+
+

First thing is installing both tools by, for example, Docker or Docker Compose. Then, we have to think about how they should collaborate to create a more efficient Continuous Integration process.

+
+
+

Once our project’s pipeline is triggered (it could also be triggered in a fancy way, such as when a merge to the develop branch is done).

+
+
+
+
1. Jenkins SonarQube plugin
+
+

Typically in those integration cases, Jenkins plug-in installations become a must. Let’s look for some available SonarQube plug-in(s) for Jenkins:

+
+
+
+jenkins sonarqube plugin +
+
+
+
+
2. SonarQube token
+
+

Once installed let’s create a token in SonarQube so that Jenkins can communicate with it to trigger their Jobs. Once we install SonarQube in our CI/CD machine (ideally a remote machine) let’s login with admin/admin credentials:

+
+
+
+sonarqube login +
+
+
+

Afterwards, SonarQube itself asks you to create this token we talked about (the name is up to you):

+
+
+
+sonarqube token name +
+
+
+

Then a token is generated:

+
+
+
+sonarqube token generation +
+
+
+

You click in "continue" and the token’s generation is completed:

+
+
+
+sonarqube token done +
+
+
+
+
3. Jenkins SonarQube Server setup
+
+

Now we need to tell Jenkins where is SonarQube and how to communicate with it. In Manage Jenkins > Configure Settings. We add a name for the server (up to you), where it is located (URL), version and the Server authentication token created in point 2.

+
+
+
+jenkins sonarqube server setup +
+
+
+
+
4. Jenkins SonarQube Scanner
+
+

Install a SonarQube Scanner as a Global tool in Jenkins to be used in the project’s pipeline.

+
+
+
+jenkins sonarqube scanner +
+
+
+
+
5. Pipeline code
+
+

Last step is to add the SonarQube process in our project’s Jenkins pipeline. The following code will trigger a SonarQube process that will evaluate our code’s quality looking for bugs, duplications, and so on.

+
+
+
+
    stage 'SonarQube Analysis'
+        def scannerHome = tool 'SonarQube scanner';
+        sh "${scannerHome}/bin/sonar-scanner \
+             -Dsonar.host.url=http://url-to-your-sq-server:9000/ \
+             -Dsonar.login=[SONAR_USER] -Dsonar.password=[SONAR_PASS] \
+             -Dsonar.projectKey=[PROJECT_KEY] \
+             -Dsonar.projectName=[PROJECT_NAME] -Dsonar.projectVersion=[PROJECT_VERSION] \
+             -Dsonar.sources=. -Dsonar.java.binaries=. \
+             -Dsonar.java.source=1.8 -Dsonar.language=java"
+
+
+
+
+
6. Results
+
+

After all this, you should end up having something like this in Jenkins:

+
+
+
+jenkins sonarqube feedback +
+
+
+

And in SonarQube:

+
+
+
+sonarqube project result +
+
+
+
+
7. Changes in a devonfw project to execute SonarQube tests with Coverage
+
+

The plugin used to have Coverage reports in the SonarQube for devonfw projects is Jacoco. There are some changes in the project’s parent pom.xml that are mandatory to use it.

+
+
+

Inside of the <properties> tag:

+
+
+
+
<properties>
+
+    (...)
+
+    <sonar.jacoco.version>3.8</sonar.jacoco.version>
+    <sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>
+    <sonar.core.codeCoveragePlugin>jacoco</sonar.core.codeCoveragePlugin>
+    <sonar.dynamicAnalysis>reuseReports</sonar.dynamicAnalysis>
+    <sonar.language>java</sonar.language>
+    <sonar.java.source>1.7</sonar.java.source>
+    <sonar.junit.reportPaths>target/surefire-reports</sonar.junit.reportPaths>
+    <sonar.jacoco.reportPaths>target/jacoco.exec</sonar.jacoco.reportPaths>
+    <sonar.sourceEncoding>UTF-8</sonar.sourceEncoding>
+    <sonar.exclusions>
+        **/generated-sources/**/*,
+        **io/oasp/mirabaud/general/**/*,
+        **/*Dao.java,
+        **/*Entity.java,
+        **/*Cto.java,
+        **/*Eto.java,
+        **/*SearchCriteriaTo.java,
+        **/*management.java,
+        **/*SpringBootApp.java,
+        **/*SpringBootBatchApp.java,
+        **/*.xml,
+        **/*.jsp
+    </sonar.exclusions>
+    <sonar.coverage.exclusions>
+        **io/oasp/mirabaud/general/**/*,
+        **/*Dao.java,
+        **/*Entity.java,
+        **/*Cto.java,
+        **/*Eto.java,
+        **/*SearchCriteriaTo.java,
+        **/*management.java,
+        **/*SpringBootApp.java,
+        **/*SpringBootBatchApp.java,
+        **/*.xml,
+        **/*.jsp
+    </sonar.coverage.exclusions>
+    <sonar.host.url>http://${YOUR_SONAR_SERVER_URL}/</sonar.host.url>
+    <jacoco.version>0.7.9</jacoco.version>
+
+    <war.plugin.version>3.2.0</war.plugin.version>
+    <assembly.plugin.version>3.1.0</assembly.plugin.version>
+</properties>
+
+
+
+

Of course, those sonar amd sonar.coverage can/must be changed to fit with other projects.

+
+
+

Now add the Jacoco Listener as a dependency:

+
+
+
+
<dependencies>
+    <dependency>
+        <groupId>org.sonarsource.java</groupId>
+        <artifactId>sonar-jacoco-listeners</artifactId>
+        <scope>test</scope>
+    </dependency>
+</dependencies>
+
+
+
+

Plugin Management declarations:

+
+
+
+
<pluginManagement>
+    <plugins>
+        <plugin>
+            <groupId>org.sonarsource.scanner.maven</groupId>
+            <artifactId>sonar-maven-plugin</artifactId>
+            <version>3.2</version>
+        </plugin>
+        <plugin>
+            <groupId>org.jacoco</groupId>
+            <artifactId>jacoco-maven-plugin</artifactId>
+            <version>${jacoco.version}</version>
+        </plugin>
+    </plugins>
+<pluginManagement>
+
+
+
+

Plugins:

+
+
+
+
<plugins>
+
+    (...)
+
+    <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.20.1</version>
+        <configuration>
+            <argLine>-XX:-UseSplitVerifier -Xmx2048m ${surefireArgLine}</argLine>
+            <testFailureIgnore>false</testFailureIgnore>
+            <useFile>false</useFile>
+            <reportsDirectory>${project.basedir}/${sonar.junit.reportPaths}</reportsDirectory>
+            <argLine>${jacoco.agent.argLine}</argLine>
+            <excludedGroups>${oasp.test.excluded.groups}</excludedGroups>
+            <alwaysGenerateSurefireReport>true</alwaysGenerateSurefireReport>
+            <aggregate>true</aggregate>
+            <properties>
+                <property>
+                    <name>listener</name>
+                    <value>org.sonar.java.jacoco.JUnitListener</value>
+                </property>
+            </properties>
+        </configuration>
+    </plugin>
+    <plugin>
+        <groupId>org.jacoco</groupId>
+        <artifactId>jacoco-maven-plugin</artifactId>
+        <configuration>
+            <argLine>-Xmx128m</argLine>
+            <append>true</append>
+            <propertyName>jacoco.agent.argLine</propertyName>
+            <destFile>${sonar.jacoco.reportPath}</destFile>
+            <excludes>
+                <exclude>**/generated-sources/**/*,</exclude>
+                <exclude>**io/oasp/${PROJECT_NAME}/general/**/*</exclude>
+                <exclude>**/*Dao.java</exclude>
+                <exclude>**/*Entity.java</exclude>
+                <exclude>**/*Cto.java</exclude>
+                <exclude>**/*Eto.java</exclude>
+                <exclude>**/*SearchCriteriaTo.java</exclude>
+                <exclude>**/*management.java</exclude>
+                <exclude>**/*SpringBootApp.java</exclude>
+                <exclude>**/*SpringBootBatchApp.java</exclude>
+                <exclude>**/*.class</exclude>
+            </excludes>
+        </configuration>
+        <executions>
+            <execution>
+                <id>prepare-agent</id>
+                <phase>initialize</phase>
+                <goals>
+                    <goal>prepare-agent</goal>
+                </goals>
+                <configuration>
+                    <destFile>${sonar.jacoco.reportPath}</destFile>
+                    <append>true</append>
+                </configuration>
+            </execution>
+            <execution>
+                <id>report-aggregate</id>
+                <phase>verify</phase>
+                <goals>
+                    <goal>report-aggregate</goal>
+                </goals>
+            </execution>
+            <execution>
+                <id>jacoco-site</id>
+                <phase>verify</phase>
+                <goals>
+                    <goal>report</goal>
+                </goals>
+            </execution>
+        </executions>
+    </plugin>
+</plugins>
+
+
+
+
+
Jenkins SonarQube execution
+
+

If the previous configuration is already setup, once Jenkins execute the sonar maven plugin, it will automatically execute coverage as well.

+
+
+

This is an example of a block of code from a devonfw project’s Jenkinsfile:

+
+
+
+
    withMaven(globalMavenSettingsConfig: 'YOUR_GLOBAL_MAVEN_SETTINGS', jdk: 'OpenJDK 1.8', maven: 'Maven_3.3.9') {
+        sh "mvn sonar:sonar -Dsonar.login=[USERNAME] -Dsonar.password=[PASSWORD]"
+    }
+
+
+
+
+
+

OKD (OpenShift Origin)

+ +
+
+

What is OKD

+
+

OKD is a distribution of Kubernetes optimized for continuous application development and multi-tenant deployment. OKD is the upstream Kubernetes distribution embedded in Red Hat OpenShift.

+
+
+

OKD embeds Kubernetes and extends it with security and other integrated concepts. OKD is also referred to as Origin in github and in the documentation.

+
+
+

OKD provides a complete open source container application platform. If you are looking for enterprise-level support, or information on partner certification, Red Hat also offers Red Hat OpenShift Container Platform.

+
+
+

Continue reading…​

+ +
+
Install OKD (Openshift Origin)
+ +
+
+
Pre-requisites
+ +
+
+
Install docker
+ +
+
+
$ sudo groupadd docker
+$ sudo usermod -aG docker $USER
+
+
+
+
+
Download Openshift Origin Client
+
+

Download Openshift Origin Client from here

+
+
+

When the download it’s complete, only extract it on the directory that you want, for example /home/administrador/oc

+
+
+
+
Add oc to path
+
+
+
$ export PATH=$PATH:/home/administrador/oc
+
+
+
+
+
Install Openshift Cluster
+ +
+
+
Add the insecure registry
+
+

Create file /etc/docker/daemon.json with the next content:

+
+
+
+
{
+    "insecure-registries" : [ "172.30.0.0/16" ]
+}
+
+
+
+
+
Download docker images for openshift
+
+
+
$ oc cluster up
+
+
+
+
+
Install Oc Cluster Wrapper
+
+

To manage easier the cluster persistent, we are going to use oc cluster wrapper.

+
+
+
+
cd /home/administrador/oc
+wget https://raw.githubusercontent.com/openshift-evangelists/oc-cluster-wrapper/master/oc-cluster
+
+
+
+

oc-cluster up devonfw-shop-floor --public-hostname X.X.X.X

+
+
+
+
Configure iptables
+
+

We must create iptables rules to allow traffic from other machines.

+
+
+
+
- The next commands it's to let all traffic, don't do it on a real server.
+
+- $ iptables -F
+- $ iptables -X
+- $ iptables -t nat -F
+- $ iptables -t nat -X
+- $ iptables -t mangle -F
+- $ iptables -t mangle -X
+- $ iptables -P INPUT ACCEPT
+- $ iptables -P OUTPUT ACCEPT
+- $ iptables -P FORWARD ACCEPT
+
+
+
+
+
How to use Oc Cluster Wrapper
+
+

With oc cluster wrapper we could have different clusters with different context.

+
+
+
+
Cluster up
+
+
+
$ oc-cluster up devonfw-shop-floor --public-hostname X.X.X.X
+
+
+
+
+
Cluster down
+
+
+
$ oc-cluster down
+
+
+
+
+
Use non-persistent cluster
+
+
+
oc cluster up --image openshift/origin --public-hostname X.X.X.X --routing-suffix apps.X.X.X.X.nip.io
+
+
+
+
+
devonfw Openshift Origin Initial Setup
+
+

These are scripts to customize an Openshift cluster to be a devonfw Openshift.

+
+
+
+
How to use
+ +
+
+
Prerequisite: Customize Openshift
+
+

devonfw Openshift Origin use custom icons, and we need to add it to openshift. More information:

+
+ +
+
+
Script initial-setup
+
+

Download this script and execute it.

+
+
+

More information about what this script does here.

+
+
+
+
Known issues
+ +
+
+
Failed to push image
+
+

If you receive an error like this:

+
+
+
+
error: build error: Failed to push image: After retrying 6 times, Push image still failed due to error: Get http://172.30.1.1:5000/v2/:  dial tcp 172.30.1.1:5000: getsockopt: connection refused
+
+
+
+

It’s because the registry isn’t working, go to openshift console and enter into the default project https://x.x.x.x:8443/console/project/default/overview and you must see two resources, docker-registry and router they must be running. If they don’t work, try to deploy them and look at the logs what is happen.

+
+
+
+
s2i devonfw
+
+

This are the s2i source and templates to build an s2i images. It provides OpenShift builder images for components of the devonfw (at this moment only for angular and java).

+
+
+

This work is totally based on the implementation of Michael Kuehl from RedHat for Oasp s2i.

+
+
+

All this information is used as a part of the initial setup for openshift.

+
+
+
+
Previous setup
+
+

In order to build all of this, it will be necessary, first, to have a running OpenShift cluster. How to install it here.

+
+
+
+
Usage
+
+

Before using the builder images, add them to the OpenShift cluster.

+
+
+
+
Deploy the Source-2-Image builder images
+
+

First, create a dedicated devonfw project as admin.

+
+
+
+
$ oc new-project devonfw --display-name='devonfw' --description='devonfw Application Standard Platform'
+
+
+
+

Now add the builder image configuration and start their build.

+
+
+
+
oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/s2i/java/s2i-devonfw-java-imagestream.json --namespace=devonfw
+oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/s2i/angular/s2i-devonfw-angular-imagestream.json --namespace=devonfw
+oc start-build s2i-devonfw-java --namespace=devonfw
+oc start-build s2i-devonfw-angular --namespace=devonfw
+
+
+
+

Make sure other projects can access the builder images:

+
+
+
+
oc policy add-role-to-group system:image-puller system:authenticated --namespace=devonfw
+
+
+
+

That’s all!

+
+
+
+
Deploy devonfw templates
+
+

Now, it’s time to create devonfw templates to use this s2i and add it to the browse catalog. More information here.

+
+
+
+
Build All
+
+

Use this script to automatically install and build all image streams. The script also creates templates devonfw-angular and devonfw-java inside the project 'openshift' to be used by everyone.

+
+
+
    +
  1. +

    Open a bash shell as Administrator

    +
  2. +
  3. +

    Execute shell file:

    +
  4. +
+
+
+
+
$ /PATH/TO/BUILD/FILE/initial-setup.sh
+
+
+
+

More information about what this script does here.

+
+
+
+ +
+

This is a list of useful articles, etc, that I found while creating the templates.

+
+ +
+
+
devonfw templates
+
+

This are the devonfw templates to build devonfw apps for Openshift using the s2i images. They are based on the work of Mickuehl in Oasp templates/mythaistar for deploy My Thai Star.

+
+
+
    +
  • +

    Inside the example-mythaistar we have an example to deploy My Thai Star application using devonfw templates.

    +
  • +
+
+
+

All this information is used as a part of the initial setup for openshift.

+
+
+
+
How to use
+ +
+
+
Previous requirements
+ +
+
+
== Deploy the Source-2-Image builder images
+
+

Remember that this templates need a build image from s2i-devonfw-angular and s2i-devonfw-java. More information:

+
+ +
+
+
== Customize Openshift
+
+

Remember that this templates also have custom icons, and to use it, we must modify the master-config.yml inside openshift. More information:

+
+
+ +
+
+
+
Deploy devonfw templates
+
+

Now, it’s time to create devonfw templates to use this s2i and add it to the browse catalog.

+
+
+

To let all user to use these templates in all openshift projects, we should create it in an openshift namespace. To do that, we must log in as an admin.

+
+
+
+
oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/templates/devonfw-java-template.json --namespace=openshift
+oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/templates/devonfw-angular-template.json --namespace=openshift
+
+
+
+

When it finishes, remember to logout as an admin and enter with our normal user.

+
+
+
+
$ oc login
+
+
+
+
+
How to use devonfw templates in openshift
+
+

To use these templates with openshift, we can override any parameter values defined in the file by adding the --param-file=paramfile option.

+
+
+

This file must be a list of <name>=<value> pairs. A parameter reference may appear in any text field inside the template items.

+
+
+

The parameters that we must override are the following

+
+
+
+
$ cat paramfile
+  APPLICATION_NAME=app-Name
+  APPLICATION_GROUP_NAME=group-Name
+  GIT_URI=Git uri
+  GIT_REF=master
+  CONTEXT_DIR=/context
+
+
+
+

The following parameters are optional

+
+
+
+
$ cat paramfile
+  APPLICATION_HOSTNAME=Custom hostname for service routes. Leave blank for default hostname, e.g.: <application-name>.<project>.<default-domain-suffix>,
+  # Only for angular
+  REST_ENDPOINT_URL=The URL of the backend's REST API endpoint. This can be declared after,
+  REST_ENDPOINT_PATTERN=The pattern URL of the backend's REST API endpoint that must be modify by the REST_ENDPOINT_URL variable,
+
+
+
+

For example, to deploy My Thai Star Java

+
+
+
+
$ cat paramfile
+  APPLICATION_NAME="mythaistar-java"
+  APPLICATION_GROUP_NAME="My-Thai-Star"
+  GIT_URI="https://github.com/devonfw/my-thai-star.git"
+  GIT_REF="develop"
+  CONTEXT_DIR="/java/mtsj"
+
+$ oc new-app --template=devonfw-java --namespace=mythaistar --param-file=paramfile
+
+
+
+
+
Customize Openshift Origin for devonfw
+
+

This is a guide to customize Openshift cluster.

+
+
+
+
Images Styles
+
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+
How to use
+
+

To use it, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own css in the stylesheetURLs and javascript in the scriptURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. Scripts must be served with Content-Type: application/javascript and stylesheets with Content-Type: text/css.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+  [...]
+    extensions:
+      scriptURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/scripts/catalog-categories.js
+      stylesheetURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/stylesheet/icons.css
+  [...]
+
+
+
+
+
More information
+
+ +
+
+
+
Old versions
+
+ +
+
+How to add Custom Icons inside openshift +
+

This is a guide to add custom icons into an Openshift cluster.

+
+
+

Here we can find an icons.css example to use the devonfw icons.

+
+
+
+Images Styles +
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+Create a css + +
+
+Custom logo for openshift cluster +
+

For this example, we are going to call the css icons.css but you can call as you wish. +Openshift cluster draw their icon by the id header-logo, then we only need to add to our icons.css the next Style Attribute ID

+
+
+
+
#header-logo {
+  background-image: url("https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/images/devonfw-openshift.png);
+  width: 230px;
+  height: 40px;
+}
+
+
+
+
+Custom icons for templates +
+

To use a custom icon to a template openshift use a class name. Then, we need to insert inside our icons.css the next Style Class

+
+
+
+
.devonfw-logo {
+  background-image: url("https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/images/devonfw.png");
+  width: 50px;
+  height: 50px;
+}
+
+
+
+

To show that custom icon on a template, we only need to write the name of our class in the tag "iconClass" of our template.

+
+
+
+
{
+    ...
+    "items": [
+        {
+            ...
+            "metadata": {
+                ...
+                "annotations": {
+                    ...
+                    "iconClass": "devonfw-logo",
+                    ...
+                }
+            },
+            ...
+        }
+    ]
+}
+
+
+
+
+Use our own css inside openshift +
+

To do that, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own css in the stylesheetURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. stylesheets must be served with Content-Type: text/css.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+	[...]
+    extensions:
+      stylesheetURLs:
+		- https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/stylesheet/icons.css
+    [...]
+
+
+
+
+How to add custom catalog categories inside openshift +
+

This is a guide to add custom Catalog Categories into an Openshift cluster.

+
+
+

Here we can find a catalog-categories.js example to use the devonfw catalog categories.

+
+
+
+Create a scrip to add custom langauges and custom catalog categories + +
+
+Custom language +
+

For this example, we are going add a new language into the languages category. To do that we must create a script and we named as catalog-categories.js

+
+
+
+
// Find the Languages category.
+var category = _.find(window.OPENSHIFT_CONSTANTS.SERVICE_CATALOG_CATEGORIES,
+                      { id: 'languages' });
+// Add Go as a new subcategory under Languages.
+category.subCategories.splice(2,0,{ // Insert at the third spot.
+  // Required. Must be unique.
+  id: "devonfw-languages",
+  // Required.
+  label: "devonfw",
+  // Optional. If specified, defines a unique icon for this item.
+  icon: "devonfw-logo-language",
+  // Required. Items matching any tag will appear in this subcategory.
+  tags: [
+    "devonfw",
+    "devonfw-angular",
+    "devonfw-java"
+  ]
+});
+
+
+
+
+Custom category +
+

For this example, we are going add a new category into the category tab. To do that we must create a script and we named as catalog-categories.js

+
+
+
+
// Add a Featured category as the first category tab.
+window.OPENSHIFT_CONSTANTS.SERVICE_CATALOG_CATEGORIES.unshift({
+  // Required. Must be unique.
+  id: "devonfw-featured",
+  // Required
+  label: "devonfw",
+  subCategories: [
+    {
+      // Required. Must be unique.
+      id: "devonfw-languages",
+      // Required.
+      label: "devonfw",
+      // Optional. If specified, defines a unique icon for this item.
+      icon: "devonfw-logo-language",
+      // Required. Items matching any tag will appear in this subcategory.
+      tags: [
+        "devonfw",
+        "devonfw-angular",
+        "devonfw-java"
+      ]
+    }
+  ]
+});
+
+
+
+
+Use our own javascript inside openshift +
+

To do that, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own javascript in the scriptURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. Scripts must be served with Content-Type: application/javascript.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+  [...]
+    extensions:
+      scriptURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/scripts/catalog-categories.js
+  [...]
+
+
+
+
+Customize Openshift Origin v3.7 for devonfw +
+

This is a guide to customize Openshift cluster. For more information read the next:

+
+
+ +
+
+
+Images Styles +
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+Quick Use +
+

This is a quick example to add custom icons and categories inside openshift.

+
+
+

To modify the icons inside openshift, we must to modify our master-config.yaml of our openshift cluster. This file is inside the openshift container and to obtain a copy of it, we must to know what’s our openshift container name.

+
+
+
+Obtain the master-config.yaml of our openshift cluster + +
+
+== Obtain the name of our openshift container +
+

To obtain it, we can know it executing the next:

+
+
+
+
$ docker container ls
+CONTAINER ID        IMAGE                                           COMMAND                  CREATED             STATUS              PORTS                                     NAMES
+83a4e3acda5b        openshift/origin:v3.7.0                         "/usr/bin/openshift …"   6 days ago          Up 6 days                                                     origin
+
+
+
+

Here we can see that the name of the container is origin. Normaly the container it’s called as origin.

+
+
+
+== Copy the master-config.yaml of our openshift container to our directory +
+

This file is inside the openshift container in the next directory: /var/lib/origin/openshift.local.config/master/master-config.yaml and we can copy it with the next command:

+
+
+
+
$ docker cp origin:/var/lib/origin/openshift.local.config/master/master-config.yaml ./
+
+
+
+

Now we have a file with the configuration of our openshift cluster.

+
+
+
+Copy all customize files inside the openshift container +
+

To use our customization of devonfw Openshift, we need to copy our files inside the openshift container.

+
+
+

To do this we need to copy the images, scripts and stylesheets from here inside openshift +container, for example, we could put it all inside a folder called openshift.local.devonfw. On the step one we obtain the name of this container, for this example we assume that it’s called origin. Then our images are located inside openshift container and we can see an access it in /var/lib/origin/openshift.local.devonfw/images.

+
+
+
+
$ docker cp ./openshift.local.devonfw origin:/var/lib/origin/
+
+
+
+
+Edit and copy the master-config.yaml to use our customize files +
+

The master-config.yaml have a sections to charge our custom files. All these sections are inside the assetConfig and their names are the next:

+
+
+
    +
  • +

    The custom stylessheets are into extensionStylesheets.

    +
  • +
  • +

    The custom scripts are into extensionScripts.

    +
  • +
  • +

    The custom images are into extensions.

    +
  • +
+
+
+

To use all our custom elements only need to add the directory routes of each element in their appropriate section of the master-config.yaml

+
+
+
+
...
+assetConfig:
+  ...
+  extensionScripts:
+  - /var/lib/origin/openshift.local.devonfw/scripts/catalog-categories.js
+  extensionStylesheets:
+  - /var/lib/origin/openshift.local.devonfw/stylesheet/icons.css
+  extensions:
+  - name: images
+    sourceDirectory: /var/lib/origin/openshift.local.devonfw/images
+  ...
+...
+
+
+
+

Now we only need to copy that master-config.yaml inside openshift, and restart it to load the new configuration. To do that execute the next:

+
+
+
+
$ docker cp ./master-config.yaml origin:/var/lib/origin/openshift.local.config/master/master-config.yaml
+
+
+
+

To re-start openshift do oc cluster down and start again your persistent openshift cluster.

+
+
+
+More information +
+ +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-connection-strings.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-connection-strings.html new file mode 100644 index 00000000..0e7bb9a1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-connection-strings.html @@ -0,0 +1,340 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

CONNECTION STRINGS

+
+
+

Once your database is created, you will need to connect that DB to your backend application, this can be made using connection strings.

+
+
+
+
+

== CREATE THE CONNECTION STRING

+
+
+

Go to the Azure portal,select the App Service that you want to connect with the DB, to be able to establish this connection, both your DB and your App Service must be under the same resource group.

+
+
+

P.E

+
+
+
+resource group +
+
+
+

As we can see here, both the app service and the DB exist under the same resource group 'BW-dev'

+
+
+

Select your app service and go to 'settings > Configuration', scroll down looking for 'Connection strings' and click on "New connection string"

+
+
+
+appservice +
+
+
+

Put the name you want(we’ve put the name 'Context', this name will be used later in your appSettings.json) and select the DB type, and for fill the value box go to 'Home>SQL databases', click on the target DB and click on 'Show database connection strings', copy the value that appears there and paste it in the value box.

+
+
+
+db +
+
+
+
+connection string +
+
+
+

Paste the connection string in the 'value' box and click OK

+
+
+

Your connection string has been created.

+
+
+
+
+

== USE THE CONNECTION STRING

+
+
+

Go to your project, open the file AppSettings.json and add the connection string

+
+
+
+
  "ConnectionStrings": {
+    "Context": "Source=(localdb)\\MSSQLLocalDB;Initial Catalog=my-db;Integrated Security=True;"
+  }
+
+
+
+

Context is the name that we choose for the connection string that we’ve created before and that value is only for local purposes.

+
+
+

When the application is deployed,the value for context will be replaced for the value of the connection string that we’ve created in the earlier steps, using this we avoid to put the user and the password into the code and we use them as secrets that will be replaced in the deployment.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-install-sonar-with-docker-in-a-virtual-machine.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-install-sonar-with-docker-in-a-virtual-machine.html new file mode 100644 index 00000000..321c4fd2 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-install-sonar-with-docker-in-a-virtual-machine.html @@ -0,0 +1,447 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Connect to a Virtual Machine(VM) in Azure

+
+ +
+
+
+

== Pre-requisites

+
+
+

Have a VM created and a private key in order to connect to it

+
+
+
+
+

== Establish a connection

+
+
+

1- Open the client of your choice(putty,cmder,bash)

+
+
+

2- Ensure you have read-only access to the private key.

+
+
+
+
chmod 400 azureuser.pem
+
+
+
+

3- Run this command to connect to your VM

+
+
+
+
ssh -i <private key path> azureuser@51.103.78.61
+
+
+
+

note: To get the IP go to your azure portal, click on your VM, click on Networking and you will find the IP needed to establish the connection

+
+
+

You are connected:

+
+
+
+vm connection +
+
+
+
+
+

Install Sonar using Docker and Docker-compose

+
+
+

As an example we will use the practical case of Bad Weather, a project where we were asked to install Sonar inside a VM in Azure portal

+
+
+
+install sonar +
+
+
+

We had 2 possible scenarios, we went for the case A since no other service will be installed in this VM

+
+
+
+
+

== Steps

+
+
+

1- Install docker and docker compose in the VM

+
+
+
+
sudo dnf config-manager --add-repo=https://download.docker.com/linux/centos/docker-ce.repo
+sudo dnf list docker-ce
+sudo dnf install docker-ce --nobest -y
+sudo systemctl start docker
+sudo systemctl enable docker
+docker --version
+sudo dnf install curl -y
+sudo curl -L "https://github.com/docker/compose/releases/download/1.25.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
+sudo chmod +x /usr/local/bin/docker-compose
+sudo ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
+docker-compose --version
+
+
+
+

2- Deploy SonarQube and Postgress

+
+
+

2.1- Set necesary parameters for sonarqube

+
+
+
+
sudo sysctl -w vm.max_map_count=262144
+sudo sysctl -w fs.file-max=65536
+sudo ulimit -n 65536
+sudo ulimit -u 4096
+
+
+
+

2.2- Use docker-compose with the next definition to deploy it:

+
+
+

vim /home/sonar/docker-compose.yaml

+
+
+
+
version: "3"
+
+services:
+  sonarqube:
+    image: "sonarqube:7.9-community"
+    networks:
+      - sonar
+    environment:
+      - sonar.jdbc.username=user
+      - sonar.jdbc.password=pass
+      - sonar.jdbc.url=jdbc:postgresql://sonarqube-db:5432/sonar
+    ports:
+      - "80:9000"
+	depends_on:
+      - "sonarqube-db"
+    volumes:
+      - "$PWD/volumes/sonarqube/conf:/opt/sonarqube/conf"
+      - "$PWD/volumes/sonarqube/data:/opt/sonarqube/data"
+      - "$PWD/volumes/sonarqube/extensions:/opt/sonarqube/extensions"
+      - "$PWD/volumes/sonarqube/logs:/opt/sonarqube/logs"
+    ulimits:
+      nofile:
+        soft: 65536
+        hard: 65536
+  sonarqube-db:
+    image: "postgres:12-alpine"
+    networks:
+      - sonar
+    volumes:
+      - "$PWD/volumes/sonarqube-db/data:/var/lib/postgresql/data"
+    environment:
+      - POSTGRES_USER=youruser
+      - POSTGRES_PASSWORD=yourpass
+      - POSTGRES_DB=sonar
+      - PGDATA=/var/lib/postgresql/data
+
+networks:
+  sonar:
+    driver: bridge
+
+
+
+

3- Update the start configuration to set automatically the correct values and run the docker-compose

+
+
+
+
vim /usr/local/sbin/start.sh
+
+sysctl -w vm.max_map_count=262144
+sysctl -w fs.file-max=65536
+ulimit -n 65536
+ulimit -u 4096
+
+cd /home/sonar && docker-compose up -d
+
+
+
+

4- Add this to execute the docker-compose file every time the machine turns on

+
+
+
+
crontab -e
+@reboot /usr/local/sbin/start.sh
+
+vim /etc/sysctl.conf
+vm.max_map_count=262144
+fs.file-max=65536
+
+
+
+

Your Sonar is Up and running in your VM

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-pipelines.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-pipelines.html new file mode 100644 index 00000000..02ca15fc --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-pipelines.html @@ -0,0 +1,514 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Create an Azure pipeline from scratch

+
+
+

The following steps will allow you to create a basic pipeline in Azure Devops from scratch

+
+
+

In order to deploy in Azure, we’ve created an automatic pipeline in Azure Devops that will be executed automaticaly when developers make a push to the Azure repositories, the pipeline will compile the code, build the application and ensure with automatic tests that the build is not going to break the application, to ensure a good quality code the code will be analyzed by sonar as well as your code coverage and last but not least, your application will be deployed using Azure App Services.

+
+
+
+
+

Steps

+
+
+

1- Sign in to your Azure DevOps organization and navigate to your project.

+
+
+

2- Go to Pipelines, and then select New Pipeline.

+
+
+

3- Choose the location of your source code(Github, Bitbucket,Azure repos..etc), in this case we have our code in Azure Repos Git.

+
+
+

A list of your repositories will be shown here:

+
+
+

4- When the list of repositories appears, select your repository.

+
+
+

Depending on your project type(Java, .NET, Python or JavaScript) the following configuration will change, in this case our project is a .NET, for more type of projects please follow the official documentation.

+
+
+

5- When the Configure tab appears, select ASP.NET Core(or the one according to your project)

+
+
+
+configuration +
+
+
+

6- A .yaml file in your ./ location will be generated with all the required steps to run your pipeline. +The name of this .yaml file is 'azure-pipelines.yaml' wich is the default name that will be used in your pipeline settings.

+
+
+

Note:If you change the name or the location, you will need to specify in the pipeline settings the new name or location:

+
+
+
+pipeline settings +
+
+
+

The pipeline is created with the minimum required steps to run it which are the following:

+
+
+
+
+

TRIGGERS

+
+
+

Triggers that will activate the pipeline execution

+
+
+
+
trigger:
+- master
+- develop
+
+
+
+
+
+

VARIABLES

+
+
+

Variables that will be used in the next steps

+
+
+
+
variables:
+  solution: '**/*.sln'
+  buildPlatform: 'Any CPU'
+  buildConfiguration: 'Release'
+
+
+
+
+
+

TOOLS AND LIBRARIES

+
+
+

For .NET:

+
+
+

-NuGet Tool Installer task:

+
+
+
+
- task: NuGetToolInstaller@1
+
+
+
+

Use this task to find, download, and cache a specified version of NuGet and add it to the PATH.

+
+
+

-The NuGet command to run:

+
+
+
+
- task: NuGetCommand@2
+  inputs:
+    restoreSolution: '$(solution)'
+
+
+
+

The NuGet command to run.

+
+
+

For more info use the official documentation.

+
+
+
+
+

BUILD

+
+
+

-Visual Studio Build task:

+
+
+
+
- task: VSBuild@1
+  inputs:
+    solution: '$(solution)'
+    msbuildArgs: '/p:DeployOnBuild=true /p:WebPublishMethod=Package /p:PackageAsSingleFile=true /p:SkipInvalidConfigurations=true /p:DesktopBuildPackageLocation="$(build.artifactStagingDirectory)\WebApp.zip" /p:DeployIisAppPath="Default Web Site"'
+    platform: '$(buildPlatform)'
+    configuration: '$(buildConfiguration)'
+
+
+
+

Use this task to build with MSBuild and set the Visual Studio version property.

+
+
+

For more info use the official documentation

+
+
+
+
+

TEST

+
+
+

-Visual Studio Test task:

+
+
+
+
- task: DotNetCoreCLI@2
+  inputs:
+    command: 'test'
+    arguments: '/p:CollectCoverage=true /p:CoverletOutputFormat=opencover /p:CoverletOutput=$(Agent.TempDirectory)/'
+    projects: '$(solution)'
+    publishTestResults: true
+  continueOnError: false
+  displayName: 'Dot Net Core CLI Test'
+
+
+
+

Use this task to run unit and functional tests (Selenium, Appium, Coded UI test, and more) using the Visual Studio Test Runner.

+
+
+

For more info use the official documentation

+
+
+

This steps are the ones generated when your pipeline is created, we can create the ones we need using the Azure Devops wizard in an easy way.

+
+
+

In our case, apart from build and test, we also need to deploy

+
+
+
+
+

DEPLOY

+
+ +
+
+
+

App Services

+
+
+

While deploying with App Services, 2 steps are required:

+
+
+
+
+

== Step 1: Publish

+
+
+

Use this task in a pipeline to publish artifacts for the Azure Pipeline

+
+
+
+
- task: PublishPipelineArtifact@0
+  inputs:
+    artifactName: 'Bad_Weather_Backend'
+    targetPath: '$(Build.ArtifactStagingDirectory)'
+
+
+
+

To know more about the use of predefined variables in azure take a look at the documentation

+
+
+
+
+

== Step 2: Deployment

+
+
+

Use this task to deploy to a range of App Services on Azure

+
+
+
+
- task: AzureRmWebAppDeployment@4
+  inputs:
+    ConnectionType: 'AzureRM'
+    azureSubscription: 'bad-weather-poc-rs-bw-dev'
+    appType: 'webApp'
+    WebAppName: 'bwbackendbe'
+    packageForLinux: '$(build.artifactStagingDirectory)\WebApp.zip'
+
+
+
+

This task has 2 prerequisites:

+
+
+

1-App Service instance:

+
+
+

The task is used to deploy a Web App project or Azure Function project to an existing Azure App Service instance, which must exist before the task runs.

+
+
+

2-Azure Subscription:

+
+
+

In order to deploy to Azure, an Azure subscription must be linked to the pipeline.

+
+
+

To know more about the input arguments for this task, make use of the offcial documentation

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-sonarqube-integration.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-sonarqube-integration.html new file mode 100644 index 00000000..efd4f314 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-azure-sonarqube-integration.html @@ -0,0 +1,380 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Integrate the SonarQube plugin in an Azure DevOps pipeline

+
+
+

The purpose of this readme is that you can configure your Azure Devops pipeline in order to be able to run a code analysis, analyse the code coverage and publish the results through the Sonar plugin.

+
+
+
+
+

How to do it

+
+ +
+
+
+

== Step 1: Create a service connection

+
+
+

The first thing to do is to declare your SonarQube server as a service endpoint in your Azure DevOps project settings.

+
+
+

Go to project settings → pipelines → service connections and create and choose 'SonarQube'.

+
+
+

Create service connection

+
+
+

Specify the server url and the connection name of your SonarQube server and the token Auth +Go to your SonarQube server and log in as admin, +once inside, go to administration → Security → Users → Administrator → Tokens→ And generate the token. +Copy the generated token(once created it will never appear again so don’t lose it) and paste it and click on save .

+
+
+
+ServiceConnection +
+
+
+

The service connection has been created. +Once this step is done your service creation will appear now in the service connections side bar.

+
+
+

For more info regarding the Authentication part please read the official documentation

+
+
+
+
+

== Step 2: Add the required tasks in the azure pipeline

+
+
+

In order to integrate the SonarQube in the pipeline, 3 steps or tasks are required(Depending on the different solutions like .NET, Java, C..etc some of this tasks can be optional), this tasks are:

+
+
+

Prepare Analysis configuration +Run Code Analysis +Publish Quality Gate result

+
+
+

We can use the wizard to create this in an easy way, search "SonarQube" and let’s configure the tasks one by one.

+
+
+

Prepare Analysis configuration:

+
+
+

Fill the required fields and click on add

+
+
+

The prepare task will be now shown in the pipeline code:

+
+
+
+sonarprepare +
+
+
+

Follow the official documentation if you have doubts while filling the fields:

+
+
+

Once the prepare is done, continue with the code analysis.

+
+
+

Run Code Analysis

+
+
+

Select this from the task assistant and just like happened with the first task, the code will appear in your pipeline.

+
+
+
+runAnalysis +
+
+
+

Now, let’s publish the result of the analysis.

+
+
+

Publish quality gate result

+
+
+

Same as we did before, select in the display the publish extension and add it

+
+
+
+publish +
+
+
+
+
+

== Step 3: Run the pipeline

+
+
+

With this, all the required steps to integrate SonarQube in your Azure DevOps pipeline are done, the last thing you need to do is run your pipeline and your code will be analyzed and the results published.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-dockerfile.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-dockerfile.html new file mode 100644 index 00000000..e2d88e12 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-dockerfile.html @@ -0,0 +1,308 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Dockerfile

+
+
+

You have examples of dockerfiles in cicdgen repository.

+
+
+

inside these folders you could find all the files that you need to use those dockerfiles. Two dockerfiles are provaided, Dockerfile and Dockerfile.ci, the first one is to compile the code and create the docker image used normally in local, and Dockerfile.ci is to use in Jenkins or similar, after building the application.

+
+
+ +
+
+ + + + + +
+ + +Dockerfile.ci should be copied to de artifacts and renamed as Dockerfile to work. In the case or devon4ng and devon4node this is the dist folder, in case of devon4ng is on server/target folder. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-gitlab.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-gitlab.html new file mode 100644 index 00000000..bb018c31 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-gitlab.html @@ -0,0 +1,312 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

GitLab Configuration

+
+ +
+
+
+

Create new repository

+
+
+

To create a new project in GitLab, go to your dashboard and click the green New project button or use the plus icon in the navigation bar.

+
+
+
+gitlab new prject +
+
+
+

This opens the New project page. Choose your group and fill the name of your project, the description and the visibility level in the next form:

+
+
+
+gitlab new prject form +
+
+
+ + + + + +
+ + +more information about how to create projects in GitLab in the official documentation +
+
+
+
+
+

Service integration

+
+
+

To learn how to configure the integration between GitLab and Jenkins see the next example

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-jenkins-build-monitor-view.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-jenkins-build-monitor-view.html new file mode 100644 index 00000000..6089be8a --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-jenkins-build-monitor-view.html @@ -0,0 +1,341 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Build monitor view

+
+
+

This tool you will be able to see in real time what is the state of your Jenkins pipelines.

+
+
+
+
+

Prerequisites

+
+ +
+
+
+

Add build monitor view plugin

+
+
+

To integrate it, you need to have installed the build monitor view. To install it go to Manage Jenkins clicking on left menu and enter in Manage Plugins. Go to Available tab and search it using the filter textbox in the top right corner and install it.

+
+
+
+
+

How to use it

+
+
+

When you have build monitor view installed, you could add a new view clicking on the + tab in the top bar.

+
+
+
+jenkins new view +
+
+
+

Now you need to fill which is the name that you are goint to give to your view and select Build Monitor View option.

+
+
+
+jenkins build monitor view add +
+
+
+

Then you can see the configuration.

+
+
+
+jenkins build monitor view configuration +
+
+
+

In Job Filters section you can specify which resources are going to be showed and whether subfolders should be included in the search.

+
+
+

In Build Monitor - View Settings you could specify which is the name at the top of the view and what is the ordering criterion.

+
+
+

In Build Monitor - Widget Settings you could specify if you want to show the committers and which is the field to display if it fails.

+
+
+

And this is the output:

+
+
+
+jenkins build monitor view output +
+
+
+

You could limit the colums and the text scale clicking on the gear button at the right top corner.

+
+
+
+jenkins build monitor view output config +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-jenkins.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-jenkins.html new file mode 100644 index 00000000..2142d1ae --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-jenkins.html @@ -0,0 +1,904 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Jenkinsfile

+
+ +
+
+
+

Introduction

+
+
+
+jenkinsfile cicd activity diagram +
+
+
+

Here you are going to learn how you should configure the jenkinsfile of your project to apply CI/CD operations and enables automated application deployment.

+
+
+

Here you can find examples of the Jenkinsfile generated by cicdgen:

+
+
+ +
+
+

Next you could find an explanation about what is done in these Jenkinsfiles.

+
+
+
+
+

Environment values

+
+
+

At the top of the pipeline you should add the environment variables. in this tutorial you need the next variables:

+
+
+
+
    // sonarQube
+    // Name of the sonarQube tool
+    sonarTool = 'SonarQube'
+    // Name of the sonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus
+    // Artifact groupId
+    groupId = '<%= groupid %>'
+    // Nexus repository ID
+    repositoryId = 'pl-nexus'
+    // Nexus internal URL
+    repositoryUrl = 'http://nexus3-core:8081/nexus3/repository/'
+    // Maven global settings configuration ID
+    globalSettingsId = 'MavenSettings'
+    // Maven tool id
+    mavenInstallation = 'Maven3'
+
+    // Docker registry
+    dockerRegistry = 'docker-registry-<%= plurl %>'
+    dockerRegistryCredentials = 'nexus-docker'
+    dockerTool = 'docker-global'
+
+    // OpenShift
+    openshiftUrl = '<%= ocurl %>'
+    openShiftCredentials = 'openshift'
+    openShiftNamespace = '<%= ocn %>'
+
+
+
+
+
+

Stages

+
+
+

The pipeline consists of stages, and at the beginning of each stage it is declared for which branches the step will be executed.

+
+
+
+jenkinsfile stages +
+
+
+

Now it is time to create the stages.

+
+
+
+
+

Setup Jenkins tools

+
+
+

The first stage is one of the most dangerous, because in it on one hand the tools are added to the pipeline and to the path and on other hand the values are tagged depending on the branch that is being executed. If you are going to create a ci/cd for a new branch or you are going to modify something, be very careful with everything that this first step declares.

+
+
+

This is an example of this stage:

+
+
+
+
script {
+    tool yarn
+    tool Chrome-stable
+    tool dockerTool
+
+    if (env.BRANCH_NAME.startsWith('release')) {
+        dockerTag = "release"
+        repositoryName = 'maven-releases'
+        dockerEnvironment = "_uat"
+        openShiftNamespace += "-uat"
+        sonarProjectKey = '-release'
+    }
+
+    if (env.BRANCH_NAME ==  'develop') {
+        dockerTag = "latest"
+        repositoryName = 'maven-snapshots'
+        dockerEnvironment = "_dev"
+        openShiftNamespace += "-dev"
+        sonarProjectKey = '-develop'
+    }
+
+    if (env.BRANCH_NAME ==  'master') {
+        dockerTag = "production"
+        repositoryName = 'maven-releases'
+        dockerEnvironment = '_prod'
+        openShiftNamespace += "-prod"
+        sonarProjectKey = ''
+    }
+
+    sh "yarn"
+}
+
+
+
+
+
+

Code lint analysis

+
+
+

The next stage is to analyze the code making a lint analysis. To do it your project should have a tslint file with the configuration (tslint.json).

+
+
+

analyzing the code in your pipeline is as simple as executing the following command:

+
+
+
+
sh """yarn lint"""
+
+
+
+ + + + + +
+ + +Your project need to have an script with tslint configuration (tslint.json). +
+
+
+
+
+

Execute tests

+
+
+

To test you application first of all your application should have created the tests and you should use one of the next two options:

+
+
+

Execute test with maven (It should be used by devon4j).

+
+
+
+
withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+    sh "mvn clean test"
+}
+
+
+
+

Execute test with yarn (It should be used by devon4ng or devon4node).

+
+
+
+
sh """yarn test:ci"""
+
+
+
+ + + + + +
+ + +Remeber that your project should have the tests created and in case of do it with yarn or npm, you package.json should have the script declared. This is an example "test:ci": "ng test --browsers ChromeHeadless --watch=false". +
+
+
+
+
+

SonarQube Analisys

+
+
+

It is time to see if your application complies the requirements of the sonar analysis.

+
+
+

To do it you could use one of the next two options:

+
+
+

Execute Sonar with sonarTool (It should be used by devon4ng or devon4node).

+
+
+
+
script {
+    def scannerHome = tool sonarTool
+    def props = readJSON file: 'package.json'
+    withSonarQubeEnv(sonarEnv) {
+        sh """
+            ${scannerHome}/bin/sonar-scanner \
+                -Dsonar.projectKey=${props.name}${sonarProjectKey} \
+                -Dsonar.projectName=${props.name}${sonarProjectKey} \
+                -Dsonar.projectVersion=${props.version} \
+                -Dsonar.sources=${srcDir} \
+                -Dsonar.typescript.lcov.reportPaths=coverage/lcov.info
+        """
+    }
+    timeout(time: 1, unit: 'HOURS') {
+        def qg = waitForQualityGate()
+        if (qg.status != 'OK') {
+            error "Pipeline aborted due to quality gate failure: ${qg.status}"
+        }
+    }
+}
+
+
+
+

Execute Sonar with maven (It should be used by devon4j).

+
+
+
+
script {
+    withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+        withSonarQubeEnv(sonarEnv) {
+            // Change the project name (in order to simulate branches with the free version)
+            sh "cp pom.xml pom.xml.bak"
+            sh "cp api/pom.xml api/pom.xml.bak"
+            sh "cp core/pom.xml core/pom.xml.bak"
+            sh "cp server/pom.xml server/pom.xml.bak"
+
+            def pom = readMavenPom file: './pom.xml';
+            pom.artifactId = "${pom.artifactId}${sonarProjectKey}"
+            writeMavenPom model: pom, file: 'pom.xml'
+
+            def apiPom = readMavenPom file: 'api/pom.xml'
+            apiPom.parent.artifactId = pom.artifactId
+            apiPom.artifactId = "${pom.artifactId}-api"
+            writeMavenPom model: apiPom, file: 'api/pom.xml'
+
+            def corePom = readMavenPom file: 'core/pom.xml'
+            corePom.parent.artifactId = pom.artifactId
+            corePom.artifactId = "${pom.artifactId}-core"
+            writeMavenPom model: corePom, file: 'core/pom.xml'
+
+            def serverPom = readMavenPom file: 'server/pom.xml'
+            serverPom.parent.artifactId = pom.artifactId
+            serverPom.artifactId = "${pom.artifactId}-server"
+            writeMavenPom model: serverPom, file: 'server/pom.xml'
+
+            sh "mvn sonar:sonar"
+
+            sh "mv pom.xml.bak pom.xml"
+            sh "mv api/pom.xml.bak api/pom.xml"
+            sh "mv core/pom.xml.bak core/pom.xml"
+            sh "mv server/pom.xml.bak server/pom.xml"
+        }
+    }
+    timeout(time: 1, unit: 'HOURS') {
+        def qg = waitForQualityGate()
+        if (qg.status != 'OK') {
+            error "Pipeline aborted due to quality gate failure: ${qg.status}"
+        }
+    }
+}
+
+
+
+
+
+

Build

+
+
+

If SonarQube is passed, you could build your application. To do it, if you are using devon4ng or devon4node you only need to add the next command:

+
+
+

sh """yarn build"""

+
+
+ + + + + +
+ + +If you are using devon4j this and the next step Store in Nexus are making together using mvn deploy. +
+
+
+
+
+

Store in Nexus

+
+
+

One time the application has been built the code of the application you could find the the artifacts stored in the dist folder. You should push these artifacts to store them in Nexus.

+
+
+

You can do it following one of the next options:

+
+
+

Use maven deploy config of your project (It should be used by devon4j).

+
+
+
+
withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+    sh "mvn deploy -Dmaven.test.skip=true"
+}
+
+
+
+

Configure maven deploy in your pipeline (It should be used by devon4ng and devon4node).

+
+
+
+
script {
+    def props = readJSON file: 'package.json'
+    zip dir: 'dist/', zipFile: """${props.name}.zip"""
+    version = props.version
+    if (!version.endsWith("-SNAPSHOT") && env.BRANCH_NAME ==  'develop') {
+        version = "${version}-SNAPSHOT"
+        version = version.replace("-RC", "")
+    }
+
+    if (!version.endsWith("-RC") && env.BRANCH_NAME.startsWith('release')) {
+        version = "${version}-RC"
+        version = version.replace("-SNAPSHOT", "")
+    }
+
+    if (env.BRANCH_NAME ==  'master' && (version.endsWith("-RC") || version.endsWith("-SNAPSHOT"))){
+        version = version.replace("-RC", "")
+        version = version.replace("-SNAPSHOT", "")
+    }
+
+    withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+        sh """
+            mvn deploy:deploy-file \
+                -DgroupId=${groupId} \
+                -DartifactId=${props.name} \
+                -Dversion=${version} \
+                -Dpackaging=zip \
+                -Dfile=${props.name}.zip \
+                -DrepositoryId=${repositoryId} \
+                -Durl=${repositoryUrl}${repositoryName}
+        """
+    }
+}
+
+
+
+
+
+

Create docker image

+
+
+

Now we need to use this artifacts to create a Docker image. To create the docker image you need an external server to do it. You could do it using one of the next:

+
+
+

Create docker image using OpenShift cluster

+
+
+

To create the docker image with this option you need to configure your OpenShift. You could read how to configure it here.

+
+
+
+
props = readJSON file: 'package.json'
+withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+    sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+    try {
+        sh "oc start-build ${props.name} --namespace=${openShiftNamespace} --from-dir=dist --wait"
+        sh "oc import-image ${props.name} --namespace=${openShiftNamespace} --from=${dockerRegistry}/${props.name}:${dockerTag} --confirm"
+    } catch (e) {
+        sh """
+            oc logs \$(oc get builds -l build=${props.name} --namespace=${openShiftNamespace} --sort-by=.metadata.creationTimestamp -o name | tail -n 1) --namespace=${namespace}
+            throw e
+        """
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} in the code. +
+
+
+

Create docker image using docker server

+
+
+

To create the docker image with this option you need to install docker and configure where is the docker host in your jenkins.

+
+
+
+
docker.withRegistry("""${dockerRegistryProtocol}${dockerRegistry}""", dockerRegistryCredentials) {
+    def props = readJSON file: 'package.json'
+    def customImage = docker.build("${props.name}:${props.version}", "-f ${dockerFileName} .")
+    customImage.push()
+    customImage.push(dockerTag);
+}
+
+
+
+

here

+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} and ${props.version} for ${pom.version} in the code. +
+
+
+
+
+

Deploy docker image

+
+
+

Once you have the docker image in the registry we only need to import it into your deployment environment. We can do it executing one of the next commands:

+
+
+

Deploy docker image in OpenShift cluster

+
+
+

To deploy the docker image with this option you need to configure your OpenShift. You could read how to configure it here.

+
+
+
+
script {
+    props = readJSON file: 'package.json'
+    withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+        sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+        try {
+            sh "oc import-image ${props.name} --namespace=${openShiftNamespace} --from=${dockerRegistry}/${props.name}:${dockerTag} --confirm"
+        } catch (e) {
+            sh """
+                oc logs \$(oc get builds -l build=${props.name} --namespace=${openShiftNamespace} --sort-by=.metadata.creationTimestamp -o name | tail -n 1) --namespace=${openShiftNamespace}
+                throw e
+            """
+        }
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} in the code. +
+
+
+

Deploy docker image using docker server

+
+
+

To deploy the docker image with this option you need to install docker and configure your docker server and also integrate it with Jenkins.

+
+
+
+
script {
+    docker.withRegistry("""${dockerRegistryProtocol}${dockerRegistry}""", dockerRegistryCredentials) {
+        def props = readJSON file: 'package.json'
+        docker.image("${props.name}:${props.version}").pull()
+
+        def containerId = sh returnStdout: true, script: """docker ps -aqf "name=${containerName}${dockerEnvironment}" """
+        if (containerId?.trim()) {
+            sh "docker rm -f ${containerId.trim()}"
+        }
+
+        println """docker run -d --name ${containerName}${dockerEnvironment} --network=${networkName} ${dockerRegistry}/${props.name}:${props.version}"""
+        sh """docker run -d --name ${containerName}${dockerEnvironment} --network=${networkName} ${dockerRegistry}/${props.name}:${props.version}"""
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} and ${props.version} for ${pom.version} in the code. +
+
+
+
+
+

Check status

+
+
+

Now is time to check if your pods are running ok.

+
+
+

To check if your pods are ok in OpenShift you should add the next code to your pipeline:

+
+
+
+
script {
+    props = readJSON file: 'package.json'
+    sleep 30
+    withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+        sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+        sh "oc project ${openShiftNamespace}"
+
+        def oldRetry = -1;
+        def oldState = "";
+
+        sh "oc get pods -l app=${props.name} > out"
+        def status = sh (
+            script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f3",
+            returnStdout: true
+        ).trim()
+
+        def retry = sh (
+            script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f4",
+            returnStdout: true
+        ).trim().toInteger();
+
+        while (retry < 5 && (oldRetry != retry || oldState != status)) {
+            sleep 30
+            oldRetry = retry
+            oldState = status
+
+            sh """oc get pods -l app=${props.name} > out"""
+            status = sh (
+                script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f3",
+                returnStdout: true
+            ).trim()
+
+            retry = sh (
+                script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f4",
+                returnStdout: true
+            ).trim().toInteger();
+        }
+
+        if(status != "Running"){
+            try {
+                sh """oc logs \$(oc get pods -l app=${props.name} --sort-by=.metadata.creationTimestamp -o name | tail -n 1)"""
+            } catch (e) {
+                sh "echo error reading logs"
+            }
+            error("The pod is not running, cause: " + status)
+        }
+    }
+}
+
+
+
+
+
+

Post operations

+
+
+

When all its finish, remember to clean your workspace.

+
+
+

post { + cleanup { + cleanWs() + } +}

+
+
+ + + + + +
+ + +You could also delete your dir adding the next command deleteDir(). +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-jenkinsfile.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-jenkinsfile.html new file mode 100644 index 00000000..5573e384 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-jenkinsfile.html @@ -0,0 +1,904 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Jenkinsfile

+
+ +
+
+
+

Introduction

+
+
+
+jenkinsfile cicd activity diagram +
+
+
+

Here you are going to learn how you should configure the jenkinsfile of your project to apply CI/CD operations and enables automated application deployment.

+
+
+

Here you can find examples of the Jenkinsfile generated by cicdgen:

+
+
+ +
+
+

Next you could find an explanation about what is done in these Jenkinsfiles.

+
+
+
+
+

Environment values

+
+
+

At the top of the pipeline you should add the environment variables. in this tutorial you need the next variables:

+
+
+
+
    // sonarQube
+    // Name of the sonarQube tool
+    sonarTool = 'SonarQube'
+    // Name of the sonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus
+    // Artifact groupId
+    groupId = '<%= groupid %>'
+    // Nexus repository ID
+    repositoryId = 'pl-nexus'
+    // Nexus internal URL
+    repositoryUrl = 'http://nexus3-core:8081/nexus3/repository/'
+    // Maven global settings configuration ID
+    globalSettingsId = 'MavenSettings'
+    // Maven tool id
+    mavenInstallation = 'Maven3'
+
+    // Docker registry
+    dockerRegistry = 'docker-registry-<%= plurl %>'
+    dockerRegistryCredentials = 'nexus-docker'
+    dockerTool = 'docker-global'
+
+    // OpenShift
+    openshiftUrl = '<%= ocurl %>'
+    openShiftCredentials = 'openshift'
+    openShiftNamespace = '<%= ocn %>'
+
+
+
+
+
+

Stages

+
+
+

The pipeline consists of stages, and at the beginning of each stage it is declared for which branches the step will be executed.

+
+
+
+jenkinsfile stages +
+
+
+

Now it is time to create the stages.

+
+
+
+
+

Setup Jenkins tools

+
+
+

The first stage is one of the most dangerous, because in it on one hand the tools are added to the pipeline and to the path and on other hand the values are tagged depending on the branch that is being executed. If you are going to create a ci/cd for a new branch or you are going to modify something, be very careful with everything that this first step declares.

+
+
+

This is an example of this stage:

+
+
+
+
script {
+    tool yarn
+    tool Chrome-stable
+    tool dockerTool
+
+    if (env.BRANCH_NAME.startsWith('release')) {
+        dockerTag = "release"
+        repositoryName = 'maven-releases'
+        dockerEnvironment = "_uat"
+        openShiftNamespace += "-uat"
+        sonarProjectKey = '-release'
+    }
+
+    if (env.BRANCH_NAME ==  'develop') {
+        dockerTag = "latest"
+        repositoryName = 'maven-snapshots'
+        dockerEnvironment = "_dev"
+        openShiftNamespace += "-dev"
+        sonarProjectKey = '-develop'
+    }
+
+    if (env.BRANCH_NAME ==  'master') {
+        dockerTag = "production"
+        repositoryName = 'maven-releases'
+        dockerEnvironment = '_prod'
+        openShiftNamespace += "-prod"
+        sonarProjectKey = ''
+    }
+
+    sh "yarn"
+}
+
+
+
+
+
+

Code lint analysis

+
+
+

The next stage is to analyze the code making a lint analysis. To do it your project should have a tslint file with the configuration (tslint.json).

+
+
+

analyzing the code in your pipeline is as simple as executing the following command:

+
+
+
+
sh """yarn lint"""
+
+
+
+ + + + + +
+ + +Your project need to have an script with tslint configuration (tslint.json). +
+
+
+
+
+

Execute tests

+
+
+

To test you application first of all your application should have created the tests and you should use one of the next two options:

+
+
+

Execute test with maven (It should be used by devon4j).

+
+
+
+
withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+    sh "mvn clean test"
+}
+
+
+
+

Execute test with yarn (It should be used by devon4ng or devon4node).

+
+
+
+
sh """yarn test:ci"""
+
+
+
+ + + + + +
+ + +Remember that your project should have the tests created and in case of do it with yarn or npm, you package.json should have the script declared. This is an example "test:ci": "ng test --browsers ChromeHeadless --watch=false". +
+
+
+
+
+

SonarQube Analisys

+
+
+

It is time to see if your application complies the requirements of the sonar analysis.

+
+
+

To do it you could use one of the next two options:

+
+
+

Execute Sonar with sonarTool (It should be used by devon4ng or devon4node).

+
+
+
+
script {
+    def scannerHome = tool sonarTool
+    def props = readJSON file: 'package.json'
+    withSonarQubeEnv(sonarEnv) {
+        sh """
+            ${scannerHome}/bin/sonar-scanner \
+                -Dsonar.projectKey=${props.name}${sonarProjectKey} \
+                -Dsonar.projectName=${props.name}${sonarProjectKey} \
+                -Dsonar.projectVersion=${props.version} \
+                -Dsonar.sources=${srcDir} \
+                -Dsonar.typescript.lcov.reportPaths=coverage/lcov.info
+        """
+    }
+    timeout(time: 1, unit: 'HOURS') {
+        def qg = waitForQualityGate()
+        if (qg.status != 'OK') {
+            error "Pipeline aborted due to quality gate failure: ${qg.status}"
+        }
+    }
+}
+
+
+
+

Execute Sonar with maven (It should be used by devon4j).

+
+
+
+
script {
+    withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+        withSonarQubeEnv(sonarEnv) {
+            // Change the project name (in order to simulate branches with the free version)
+            sh "cp pom.xml pom.xml.bak"
+            sh "cp api/pom.xml api/pom.xml.bak"
+            sh "cp core/pom.xml core/pom.xml.bak"
+            sh "cp server/pom.xml server/pom.xml.bak"
+
+            def pom = readMavenPom file: './pom.xml';
+            pom.artifactId = "${pom.artifactId}${sonarProjectKey}"
+            writeMavenPom model: pom, file: 'pom.xml'
+
+            def apiPom = readMavenPom file: 'api/pom.xml'
+            apiPom.parent.artifactId = pom.artifactId
+            apiPom.artifactId = "${pom.artifactId}-api"
+            writeMavenPom model: apiPom, file: 'api/pom.xml'
+
+            def corePom = readMavenPom file: 'core/pom.xml'
+            corePom.parent.artifactId = pom.artifactId
+            corePom.artifactId = "${pom.artifactId}-core"
+            writeMavenPom model: corePom, file: 'core/pom.xml'
+
+            def serverPom = readMavenPom file: 'server/pom.xml'
+            serverPom.parent.artifactId = pom.artifactId
+            serverPom.artifactId = "${pom.artifactId}-server"
+            writeMavenPom model: serverPom, file: 'server/pom.xml'
+
+            sh "mvn sonar:sonar"
+
+            sh "mv pom.xml.bak pom.xml"
+            sh "mv api/pom.xml.bak api/pom.xml"
+            sh "mv core/pom.xml.bak core/pom.xml"
+            sh "mv server/pom.xml.bak server/pom.xml"
+        }
+    }
+    timeout(time: 1, unit: 'HOURS') {
+        def qg = waitForQualityGate()
+        if (qg.status != 'OK') {
+            error "Pipeline aborted due to quality gate failure: ${qg.status}"
+        }
+    }
+}
+
+
+
+
+
+

Build

+
+
+

If SonarQube is passed, you could build your application. To do it, if you are using devon4ng or devon4node you only need to add the next command:

+
+
+

sh """yarn build"""

+
+
+ + + + + +
+ + +If you are using devon4j this and the next step Store in Nexus are making together using mvn deploy. +
+
+
+
+
+

Store in Nexus

+
+
+

One time the application has been built the code of the application you could find the artifacts stored in the dist folder. You should push these artifacts to store them in Nexus.

+
+
+

You can do it following one of the next options:

+
+
+

Use maven deploy config of your project (It should be used by devon4j).

+
+
+
+
withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+    sh "mvn deploy -Dmaven.test.skip=true"
+}
+
+
+
+

Configure maven deploy in your pipeline (It should be used by devon4ng and devon4node).

+
+
+
+
script {
+    def props = readJSON file: 'package.json'
+    zip dir: 'dist/', zipFile: """${props.name}.zip"""
+    version = props.version
+    if (!version.endsWith("-SNAPSHOT") && env.BRANCH_NAME ==  'develop') {
+        version = "${version}-SNAPSHOT"
+        version = version.replace("-RC", "")
+    }
+
+    if (!version.endsWith("-RC") && env.BRANCH_NAME.startsWith('release')) {
+        version = "${version}-RC"
+        version = version.replace("-SNAPSHOT", "")
+    }
+
+    if (env.BRANCH_NAME ==  'master' && (version.endsWith("-RC") || version.endsWith("-SNAPSHOT"))){
+        version = version.replace("-RC", "")
+        version = version.replace("-SNAPSHOT", "")
+    }
+
+    withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+        sh """
+            mvn deploy:deploy-file \
+                -DgroupId=${groupId} \
+                -DartifactId=${props.name} \
+                -Dversion=${version} \
+                -Dpackaging=zip \
+                -Dfile=${props.name}.zip \
+                -DrepositoryId=${repositoryId} \
+                -Durl=${repositoryUrl}${repositoryName}
+        """
+    }
+}
+
+
+
+
+
+

Create docker image

+
+
+

Now we need to use this artifacts to create a Docker image. To create the docker image you need an external server to do it. You could do it using one of the next:

+
+
+

Create docker image using OpenShift cluster

+
+
+

To create the docker image with this option you need to configure your OpenShift. You could read how to configure it here.

+
+
+
+
props = readJSON file: 'package.json'
+withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+    sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+    try {
+        sh "oc start-build ${props.name} --namespace=${openShiftNamespace} --from-dir=dist --wait"
+        sh "oc import-image ${props.name} --namespace=${openShiftNamespace} --from=${dockerRegistry}/${props.name}:${dockerTag} --confirm"
+    } catch (e) {
+        sh """
+            oc logs \$(oc get builds -l build=${props.name} --namespace=${openShiftNamespace} --sort-by=.metadata.creationTimestamp -o name | tail -n 1) --namespace=${namespace}
+            throw e
+        """
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} in the code. +
+
+
+

Create docker image using docker server

+
+
+

To create the docker image with this option you need to install docker and configure where is the docker host in your jenkins.

+
+
+
+
docker.withRegistry("""${dockerRegistryProtocol}${dockerRegistry}""", dockerRegistryCredentials) {
+    def props = readJSON file: 'package.json'
+    def customImage = docker.build("${props.name}:${props.version}", "-f ${dockerFileName} .")
+    customImage.push()
+    customImage.push(dockerTag);
+}
+
+
+
+

here

+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} and ${props.version} for ${pom.version} in the code. +
+
+
+
+
+

Deploy docker image

+
+
+

Once you have the docker image in the registry we only need to import it into your deployment environment. We can do it executing one of the next commands:

+
+
+

Deploy docker image in OpenShift cluster

+
+
+

To deploy the docker image with this option you need to configure your OpenShift. You could read how to configure it here.

+
+
+
+
script {
+    props = readJSON file: 'package.json'
+    withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+        sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+        try {
+            sh "oc import-image ${props.name} --namespace=${openShiftNamespace} --from=${dockerRegistry}/${props.name}:${dockerTag} --confirm"
+        } catch (e) {
+            sh """
+                oc logs \$(oc get builds -l build=${props.name} --namespace=${openShiftNamespace} --sort-by=.metadata.creationTimestamp -o name | tail -n 1) --namespace=${openShiftNamespace}
+                throw e
+            """
+        }
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} in the code. +
+
+
+

Deploy docker image using docker server

+
+
+

To deploy the docker image with this option you need to install docker and configure your docker server and also integrate it with Jenkins.

+
+
+
+
script {
+    docker.withRegistry("""${dockerRegistryProtocol}${dockerRegistry}""", dockerRegistryCredentials) {
+        def props = readJSON file: 'package.json'
+        docker.image("${props.name}:${props.version}").pull()
+
+        def containerId = sh returnStdout: true, script: """docker ps -aqf "name=${containerName}${dockerEnvironment}" """
+        if (containerId?.trim()) {
+            sh "docker rm -f ${containerId.trim()}"
+        }
+
+        println """docker run -d --name ${containerName}${dockerEnvironment} --network=${networkName} ${dockerRegistry}/${props.name}:${props.version}"""
+        sh """docker run -d --name ${containerName}${dockerEnvironment} --network=${networkName} ${dockerRegistry}/${props.name}:${props.version}"""
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} and ${props.version} for ${pom.version} in the code. +
+
+
+
+
+

Check status

+
+
+

Now is time to check if your pods are running ok.

+
+
+

To check if your pods are ok in OpenShift you should add the next code to your pipeline:

+
+
+
+
script {
+    props = readJSON file: 'package.json'
+    sleep 30
+    withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+        sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+        sh "oc project ${openShiftNamespace}"
+
+        def oldRetry = -1;
+        def oldState = "";
+
+        sh "oc get pods -l app=${props.name} > out"
+        def status = sh (
+            script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f3",
+            returnStdout: true
+        ).trim()
+
+        def retry = sh (
+            script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f4",
+            returnStdout: true
+        ).trim().toInteger();
+
+        while (retry < 5 && (oldRetry != retry || oldState != status)) {
+            sleep 30
+            oldRetry = retry
+            oldState = status
+
+            sh """oc get pods -l app=${props.name} > out"""
+            status = sh (
+                script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f3",
+                returnStdout: true
+            ).trim()
+
+            retry = sh (
+                script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f4",
+                returnStdout: true
+            ).trim().toInteger();
+        }
+
+        if(status != "Running"){
+            try {
+                sh """oc logs \$(oc get pods -l app=${props.name} --sort-by=.metadata.creationTimestamp -o name | tail -n 1)"""
+            } catch (e) {
+                sh "echo error reading logs"
+            }
+            error("The pod is not running, cause: " + status)
+        }
+    }
+}
+
+
+
+
+
+

Post operations

+
+
+

When all its finish, remember to clean your workspace.

+
+
+

post { + cleanup { + cleanWs() + } +}

+
+
+ + + + + +
+ + +You could also delete your dir adding the next command deleteDir(). +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-nexus.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-nexus.html new file mode 100644 index 00000000..d1490bb3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-nexus.html @@ -0,0 +1,441 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Nexus Configuration

+
+
+

In this document you will see how you can configure Nexus repository and how to integrate it with jenkins.

+
+
+
+
+

Login in Nexus

+
+
+

The first time you enter in Nexus you need to log in with the user 'admin' and the password that is inside the path: /volumes/nexus/nexus-data +Then you can change that password and create a new one.

+
+
+
+
+

Prerequisites

+
+ +
+
+
+

Repositories

+
+
+

You need to have one repository for snapshots, another for releases and another one for release-candidates. Normally you use maven2 (hosted) repositories and if you are going to use a docker registry, you need docker (hosted) too.

+
+
+

To create a repository in Nexus go to the administration clicking on the gear icon at top menu bar. Then on the left menu click on Repositories and press the Create repository button.

+
+
+
+nexus create repository +
+
+
+

Now you must choose the type of the repository and configure it. This is an example for Snapshot:

+
+
+
+nexus create repository form +
+
+
+
+
+

Create user to upload/download content

+
+
+

Once you have the repositories, you need a user to upload/download content. To do it go to the administration clicking on the gear icon at top menu bar. Then on the left menu click on Users and press the Create local user button.

+
+
+
+nexus create user +
+
+
+

Now you need to fill a form like this:

+
+
+
+nexus create user form +
+
+
+
+
+

Jenkins integration

+
+
+

To use Nexus in our pipelines you need to configure Jenkins.

+
+
+
+
+

Customize jenkins

+
+
+

The first time you enter jenkins, you are asked fot the pluggins to be installed. +We select install suggested plugins and later we can add the plugins that we need depending on the project necessities.

+
+
+
+plugins jenkins +
+
+
+

Then we need to create our first admin user, we can do it like this:

+
+
+
+jenkins first admin user +
+
+
+

The next step is the jenkins URL:

+
+
+
+jenkins url +
+
+
+

Your jenkins setup is ready!

+
+
+
+
+

Add nexus user credentials

+
+
+

First of all you need to add the user created in the step before to Jenkins. To do it (on the left menu) click on Credentials, then on System. Now you could access to Global credentials (unrestricted).

+
+
+
+nexus jenkins credentials +
+
+
+

Enter on it and you could see a button on the left to Add credentials. Click on it and fill a form like this:

+
+
+
+nexus jenkins credentials form +
+
+
+
+
+

Add the nexus user to maven global settings

+
+
+

In order to do this, you will need the Config File Provider plugin so we need to download it.Go to Jenkins→Manage jenkins→Manage plugins and "available" tab and search for it:

+
+
+
+jenkins config fp +
+
+
+

Click on "Download now and install after restart".

+
+
+

Now you need to go to Manage Jenkins clicking on left menu and enter in Managed files.

+
+
+

Click on Add a new config/Global Maven settings.xml, change the id for a new one more readable:

+
+
+
+jenkins maven settings +
+
+
+

Then click on "Submit"

+
+
+
+jenkins global maven +
+
+
+

Edit the Global Maven settings.xml to add your nexus repositories credentials(the ones you created before) as you could see in the next image:

+
+
+
+nexus jenkins global maven form +
+
+
+

And you are done.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-sonarqube.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-sonarqube.html new file mode 100644 index 00000000..5eaa6039 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-configure-sonarqube.html @@ -0,0 +1,401 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

SonarQube Configuration

+
+
+

To use SonarQube you need to use a token to connect, and to know the results of the analysis you need a webhook. Also, you need to install and configure SonarQube in Jenkins.

+
+
+
+
+

Generate user token

+
+
+

To generate the user token, go to your account clicking in the left icon on the top menu bar.

+
+
+ + + + + +
+ + +If you don’t have any account, you can use the admin/admin user/pass +
+
+
+
+sonarqube administration +
+
+
+

Go to security tab and generate the token.

+
+
+
+sonarqube token +
+
+
+
+
+

Webhook

+
+
+

When you execute our SonarQube scanner in our pipeline job, you need to ask SonarQube if the quality gate has been passed. To do it you need to create a webhook.

+
+
+

Go to administration clicking the option on the top bar menu and select the tab for Configuration.

+
+
+

Then search in the left menu to go to webhook section and create your webhook.

+
+
+

An example for Production Line:

+
+
+
+sonarqube webhook +
+
+
+
+
+

Jenkins integration

+
+
+

To use SonarQube in our pipelines you need to configure Jenkins to integrate SonarQube.

+
+
+
+
+

SonarQube Scanner

+
+
+

First, you need to configure the scanner. Go to Manage Jenkins clicking on left menu and enter in Global Tool Configuration.

+
+
+

Go to SonarQube Scanner section and add a new SonarQube scanner like this.

+
+
+
+sonarqube jenkins scanner +
+
+
+
+
+

SonarQube Server

+
+
+

Now you need to configure where is our SonarQube server using the user token that you create before. Go to Manage Jenkins clicking on left menu and enter in Configure System.

+
+
+

For example, in Production Line the server is the next:

+
+
+
+sonarqube jenkins server +
+
+
+ + + + + +
+ + +Remember, the token was created at the beginning of this SonarQube configuration. +
+
+
+
+
+

SonarQube configuration

+
+
+

Now is time to configure your sonar in order to measure the quality of your code. To do it, please follow the official documentation about our plugins and Quality Gates and Profiles here.

+
+
+
+
+

How to ignore files

+
+
+

Usually the developers need to ignore some files from Sonar analysis. To do that, they must add the next line as a parameter of the sonar execution to their Jenkinsfile in the SonarQube code analysis step.

+
+
+
+
-Dsonar.exclusions='**/*.spec.ts, **/*.model.ts, **/*mock.ts'
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-create-new-devonfw-project.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-create-new-devonfw-project.html new file mode 100644 index 00000000..8c11116e --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-create-new-devonfw-project.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

How to create new devonfw project

+
+
+

Here you can find the official guides to start new devonfw projects:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-custom-plugin-for-sonar-AzureDevops.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-custom-plugin-for-sonar-AzureDevops.html new file mode 100644 index 00000000..c3d0d2e4 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-custom-plugin-for-sonar-AzureDevops.html @@ -0,0 +1,335 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Install and use custom sonar plugin in Azure Devops

+
+
+

By default, the sonar plugin is not capable to be used in every branch you want to, to do this you need to purchase a license or customize the current plugin in order to satisfy our needs.

+
+
+

How to costumize the plugin is not the purpose of this documentation, this documentation is for the intallment and use of it.

+
+
+

If you want to install a custom plugin, sign into your Azure Devops organization and once you are in, click on the marketplace icon:

+
+
+
+marketplace +
+
+
+

Select *browse marketplace>publish extension

+
+
+

Choose the extension you want to install and clik on the options

+
+
+
+extension +
+
+
+

Important:

+
+
+

You need to choose the organization for which you are going to use the extension and share it, if not, you won’t be able to install it.

+
+
+
+share unshare +
+
+
+

Once you’ve done this click on View extension and 'Get it free', the extension will be downloaded and you will be able to use it in the next screen

+
+
+
+install +
+
+
+

If there are no organizations you can seee the possible causes here.

+
+
+

Another cause might be that you forgot to share the extension.

+
+
+

Note: If the install button does not appear, it’s possible that you don’t have permissions to install it so you will need to talk with the owner of the org. +Another posibility is that you can request an installation.

+
+
+

Once installed, in the pipeline wizard it will appear and you will be able to select it.

+
+
+
+wizard +
+
+
+

We can see in the image the default plugin and the customized one.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-deployment-dsf4openshift-automatic-configuration.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-deployment-dsf4openshift-automatic-configuration.html new file mode 100644 index 00000000..75449e7b --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-deployment-dsf4openshift-automatic-configuration.html @@ -0,0 +1,556 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

OpenShift deployment environment automatic configuration

+
+
+

In this section you will see how you can create a new environment instance in your OpenShift cluster to deploy devonfw projects using docker images.

+
+
+
+
+

Prerequisites

+
+ +
+
+
+

Add OpenShift Client to Jenkins

+
+
+

To integrate it, you need to have installed the plugin OpenShift Client. To install it go to Manage Jenkins clicking on left menu and enter in Manage Plugins. Go to Available tab and search it using the filter textbox in the top right corner and install it.

+
+
+
+
+

Configuration OpenShift Client in Jenkins

+
+
+

Second, you need to configure the OC Client. Go to Manage Jenkins clicking on left menu and enter in Global Tool Configuration.

+
+
+

Go to OpenShift Client Tools section and add a new one like this.

+
+
+
+openshift jenkins plugin +
+
+
+
+
+

devonfw project

+
+
+

You need to have a devonfw project in a git repository or a docker image uploaded to a docker registry.

+
+
+
+
+

Comunication between components

+
+
+

Jenkins must have access to git, docker registry and OpenShift.

+
+
+

Openshift must have access to docker registry.

+
+
+
+
+

Jenkinsfiles to Configure OpenShift

+
+
+

You can find one Jenkinsfile per devonfw technology in devonfw shop floor repository to configure automatically your OpenShift cluster.

+
+
+
+
+

How to use it

+
+
+

To use it you need to follow the next steps

+
+
+
+
+

Create a new pipeline

+
+
+

You need to create a new pipeline in your repository and point it to Jenkinsfile in devonfw shop floor repository.

+
+
+
+openshift jenkins configure environments repo +
+
+
+

Note: In the script path section you should use the Jenkinsfile of the technology that you need.

+
+
+
+
+

Build with parameters

+
+
+

The first time that you execute the pipeline is going to fail because Jenkins does not know that this pipeline needs parameters to execute. The better that you can do is stop it manually when Declarative: Checkout SCM is over.

+
+
+

Then you could see a button to Build with Parameters, click on it and fill the next form, these are the parameters:

+
+
+

Docker registry credentials for OpenShift

+
+
+

CREATE_SECRET: This option allows you to add the credentials of your docker registry in your OpenShift and stored it as a secret called docker-registry + registry_secret_name_suffix value.

+
+
+

Remember that you only need one secret to connect with your registry per namespace, if you are going to add more than one application in the same namespace that use the same registry, use the same name suffix and please do not create more than one secret in the same namespace. The namespace is the OpenShift project when you are going to deploy your application.

+
+
+

You can see your secrets stored in OpenShift going to OpenShift and click on the left menu:

+
+
+
+openshift secrets menu +
+
+
+ + + + + +
+ + +If the secret exists, you should uncheck the checkbox and fill the name suffix to use it. +
+
+
+

REGISTRY_SECRET_NAME_SUFFIX: This is the suffix of the name for your docker registry credentials stored in OpenShift as a secret. The name is going to be docker-registry + this suffix, if you use more than one docker-registry in the same namespace you need to add a suffix. For example you could add the name of your project, then to have the name as docker-registry-myprojectname you should use -myprojectname value.

+
+
+

Build your docker image using OpenShift and store it in your docker registry

+
+
+

CREATE_DOCKER_BUILDER: This option allows you to create a build configuration in your OpenShift to create the docker images of your project and store them in your docker registry. If you are going to create the builder, your application is needed, you need to specify where is your git repository and which is the branch and credentials to use it.

+
+
+

The following parameters of this section are only necessary if a builder is to be created.

+
+
+

GIT_REPOSITORY: This is the url of your git repository.

+
+
+ + + + + +
+ + +If you are using production line, remember to use the internal rout of your repository, to use it you must change the base url of your production line for the internal route http://gitlab-core:80/gitlab. For example, if your production line repository is for example https://shared-services.pl.s2-eu.capgemini.com/gitlab/boat/boat-frontend.git use http://gitlab-core:80/gitlab/boat/boat-frontend.git) +
+
+
+

GIT_BRANCH: This is the branch that we are going to use for creating the first docker image. The next time that you are going to use the builder you could use another branches.

+
+
+

GIT_CREDENTIALS: This is the credentials id stored in your jenkins to download the code from your git repository.

+
+
+

BUILD_SCRIPT: In case of use devon4ng or devon4node you could specify which is the build script used to build and create the first docker image with this builder.

+
+
+

JAVA_VERSION In case of use devon4j this is the java version used for your docker image.

+
+
+

Docker registry information

+
+
+

DOCKER_REGISTRY: This is the url of your docker registry.

+
+
+ + + + + +
+ + +If you are using production line, the url of your registry is docker-registry- + your production line url. For example, if your production line is shared-services.pl.s2-eu.capgemini.com your docker registry is docker-registry-shared-services.pl.s2-eu.capgemini.com. +
+
+
+

If you cannot access to your docker registry, please open an incident in i4u.

+
+
+

DOCKER_REGISTRY_CREDENTIALS: This is the credentials id stored in your jenkins to download or upload docker images in your docker registry.

+
+
+

DOCKER_TAG: This is the tag that is going to be used for the builder to push the docker image and for the deployment config to pull and deploy it.

+
+
+

OpenShift cluster information

+
+
+

OPENSHIFT_URL: This is the url of your OpenShift cluster.

+
+
+

OPENSHIFT_CREDENTIALS: This is the credentials id stored in your jenkins to use OpenShift.

+
+
+

OPENSHIFT_NAMESPACE: This is the name of the project in your OpenShift where you are going to use. The name of the project in OpenShift is called namespace.

+
+
+

Take care because although you see at the top of your OpenShift interface the name of the project that you are using, this name is the display-name and not the value that you need. To obtain the correct value you must check your OpenShift url like you see in the next image:

+
+
+
+openshift namespace name +
+
+
+

APP_NAME_SUFFIX: The name of all things created in your OpenShift project are going to be called as the configuration of your application says. Normaly, our projects use a suffix that depends on the environment. You can see the values in the next list:

+
+
+
    +
  • +

    For develop branch we use -dev

    +
  • +
  • +

    For release branch we use -uat

    +
  • +
  • +

    For master branch we use -prod

    +
  • +
+
+
+

HOSTNAME: If you do not specify nothing, OpenShift is going to autogenerate a valid url for your application. You could modify the value by default but be sure that you configure everything to server your application in the route that you specify.

+
+
+

SECURED_PROTOCOL: If true, the protocol for the route will be https otherwise will be http.

+
+
+

Jenkins tools

+
+
+

All those parameters are the name of the tools in your Jenkinsfile.

+
+
+

To obtain it you need enter in your Jenkins and go to Manage Jenkins clicking on left menu and enter in Global Tool Configuration or in Managed files.

+
+
+

OPENSHIFT_TOOL: Is located in Global tool configuration.

+
+
+
+openshift jenkins plugin name +
+
+
+

NODEJS_TOOL: Is located in Global tool configuration.

+
+
+
+jenkins openshift tool +
+
+
+

YARN_TOOL: Is located in Global tool configuration, inside the custom tools.

+
+
+
+jenkins yarn tool name +
+
+
+

GLOBAL_SETTINGS_ID Is located in Managed files. You need to click on edit button and take the id.

+
+
+
+jenkins config file management +
+
+
+
+jenkins edit configuration file +
+
+
+

MAVEN_INSTALLATION Is located in Global tool configuration.

+
+
+
+jenkins mave tool name +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-deployment-dsf4openshift-manual-configuration.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-deployment-dsf4openshift-manual-configuration.html new file mode 100644 index 00000000..49012435 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-deployment-dsf4openshift-manual-configuration.html @@ -0,0 +1,773 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

OpenShift deployment environment manual configuration

+
+
+

In this section you will see how you can create a new environment instance in your OpenShift cluster to deploy devonfw projects using docker images.

+
+
+
+
+

Prerequisites

+
+ +
+
+
+

devonfw project

+
+
+

Yo need to have a devonfw project in a git repository or a docker image uploaded to a docker registry.

+
+
+
+
+

Comunication between components

+
+
+

Openshift must have access to docker registry.

+
+
+
+
+

Download OpenShift Client Tools

+
+
+

First of all you need to download the OpenShift client, you can find it here.

+
+
+

Remember that what you need to download oc Client Tools and not OKD Server.

+
+
+ + + + + +
+ + +This tutorial has been made with the version 3.10.0 of the client, it is recommended to use the most current client, but if it does not work, it is possible that the instructions have become obsolete or that the OpenShift used needs another older/newer version of the client. To download a specific version of the client you can find here the older versions and the version 3.10.0. +
+
+
+
+
+

Add oc client to path

+
+
+

Once you have downloaded the client you have to add it to the PATH environment variable.

+
+
+
+
+

Log into OpenShift with admin account

+
+
+

You can log using a terminal and executing the next instructions:

+
+
+
+
oc login $OpenShiftUrl
+
+
+
+ + + + + +
+ + +You need a valid user to log in. +
+
+
+
+
+

Select the project where you are going to create the environment

+
+
+
+
oc project $projectName
+
+
+
+
+
+

Add all the secrets that you need

+
+
+

For example, to create a secret for a nexus repository you should execute the next commands:

+
+
+
+
oc create secret docker-registry $nameForSecret --docker-server=${dockerRegistry} --docker-username=${user} --docker-password=${pass} --docker-email=no-reply@email.com
+
+
+
+
+
+

Configure OpenShift

+
+ +
+
+
+

Configure builds to create docker image using OpenShift

+
+
+

If you need to create docker images of your projects you could use OpenShift to do it (Off course only if you have enough rights).

+
+
+

To do it, follow the next steps.

+
+
+
+
+

== Create new builds configs

+
+
+

The first thing you need to do for create a new environment is prepare the buildconfigs for the front and for the middleware and rise default memory limits for the middleware. You can do it using a terminal and executing the next instructions:

+
+
+

These are a summary about the parameters used in our commands:

+
+
+
    +
  • +

    ${dockerRegistry}: The url of the docker repository.

    +
  • +
  • +

    ${props.name}: The name of the project (for example could be find on package.json)

    +
  • +
  • +

    ${dockerTag}: The tag of the image

    +
  • +
+
+
+ + + + + +
+ + +From now on you will refer to the name that you are going to give to the environment as $enviroment. Remember to modify it for the correct value in all instructions. +
+
+
+
+
+

== devon4ng build config

+
+
+

You need to create nginx build config with docker.

+
+
+
+
oc new-build --strategy docker --binary --docker-image nginx:alpine-perl --name=${props.name}-$environment --to=${dockerRegistry}/${props.name}:${dockerTag} --to-docker=true
+
+
+
+ + + + + +
+ + +You need nginx:alpine-perl to read the environment config file in openshift, if you are not going to use it, you could use nginx:latest instead. +
+
+
+
+
+

== devon4node build config

+
+
+
+
oc new-build --strategy docker --binary --docker-image node:lts --name=${props.name}-$environment --to=${dockerRegistry}/${props.name}:${dockerTag} --to-docker=true
+
+
+
+
+
+

== devon4j build config

+
+
+
+
oc new-build --strategy docker --binary --docker-image openjdk:<version> --name=${props.name}-$environment --to=${dockerRegistry}/${props.name}:${dockerTag} --to-docker=true
+
+
+
+ + + + + +
+ + +You need to specify the <version> of java used for your project. Also you can use the -alpine image. This image is based on the popular Alpine Linux project. Alpine Linux is much smaller than most distribution base images (~5MB), and thus leads to much slimmer images in general. More information on docker hub. +
+
+
+
+
+

== How to use the build

+
+
+

In this step is where you will build a docker image from a compiled application.

+
+
+
+
+

== == Prerequisite

+
+
+

To build the source in OpenShift, first of all you need to compile your source and obtain the artifacts "dist folder" or download it from a repository. Normally the artifacts have been built on Jenkins and have been stored in Nexus.

+
+
+

To download it, you can access to your registry, select the last version and download the ".tar". The next image shows an example of where is the link to download it, marked in yellow:

+
+
+
+nexus stored artifacts +
+
+
+
+
+

== == Build in OpenShift

+
+
+

When you have the artifacts, you can send them to your openshift and build them using your buildconfig that you created on the previous step. This is going to create a new docker image and push it to your registry.

+
+
+

If your docker registry need credentials you should use a secret. You could add it to your buildconfig using the next command:

+
+
+
+
oc set build-secret --push bc/${props.name}-$environment ${nameForSecret}
+
+
+
+

Now you can use your build config and push the docker image to your registry. To do it you need to use a terminal and execute the following:

+
+
+
+
oc start-build ${props.name}-$environment --from-dir=${artifactsPath} --follow
+
+
+
+ + + + + +
+ + +${artifactsPath} is the path where you have the artifacts of the prerequisite (On jenkins is the dist folder generated by the build). +
+
+
+ + + + + +
+ + +Maybe you need to raise your memory or CPU limits. +
+
+
+
+
+

Configure new environment

+
+
+

Now it is time to configure the environment.

+
+
+
+
+

== Prerequisite

+
+
+

You need a docker image of your application. You could create it using OpenShift as you see in the last step.

+
+
+
+
+

== Create new app on OpenShift

+
+
+

To create new app you need to use the next command.

+
+
+
+
oc new-app --docker-image=${artifactsPath} --name=${props.name}-$environment --source-secret=${nameForSecret}
+
+
+
+ + + + + +
+ + +You could add environment variables using -e $name=$value +
+
+
+ + + + + +
+ + +If you do not need to use a secret remove the end part of the command --source-secret=${nameForSecret} +
+
+
+
+
+

== Create routes

+
+
+

Finally, you need add a route to access the service.

+
+
+

Add http route

+
+
+

If you want to create an http route execute the following command in a terminal:

+
+
+
+
oc expose svc/${props.name}-$environment
+
+
+
+

Add https route

+
+
+

If you want to create an https route you can do it executing the following command:

+
+
+
+
oc create route edge --service=${props.name}-$environment
+
+
+
+

If you want to change the default route path you can use the command --hostname=$url. For example:

+
+
+
+
oc expose svc/${props.name}-$environment --hostname=$url
+
+oc create route edge --service=${props.name}-$environment --hostname=$url
+
+
+
+
+
+

Import new images from registry

+
+
+

When you have new images in the registry you must import them to OpenShift. You could do it executing the next commands:

+
+
+
+
oc import-image ${props.name}-$environment --from=${dockerRegistry}/${props.name}:${dockerTag} --confirm
+
+
+
+ + + + + +
+ + +Maybe you need to raise your memory or CPU limits. It is explained below. +
+
+
+
+
+

Raise/decrease memory or CPU limits

+
+
+

If you need to raise (or decrease) the memory or CPU limits that you need you could do it for your deployments and builders configurations following the next steps.

+
+
+
+
+

== For deployments

+
+
+

You could do it in OpenShift using the user interface. To do it you should enter in OpenShift and go to deployments.

+
+
+
+openshift deployments menu +
+
+
+

At the right top, you could see a drop down actions, click on it and you could edit the resource limits of the container.

+
+
+
+openshift deployments actions +
+
+
+
+openshift deployments resource limits +
+
+
+

Maybe you should modify the resource limits of the pod too. To do it you should click on drop down actions and go to edit YAML. Then you could see something like the next image.

+
+
+
+openshift deployments yaml resources +
+
+
+

In the image, you could see that appear resources two times. One at the bottom of the image, this are the container resources that you modified on the previous paragraph and another one at the top of the image. The resources of the top are for the pod, you should give to it at least the same of the sum for all containers that the pod use.

+
+
+

Also you could do it using command line interface and executing the next command:

+
+
+

To modify pod limits

+
+
+
+
oc patch dc/boat-frontend-test --patch '{"spec":{"strategy":{"resources":{"limits":{"cpu": "100m", "memory": "100Mi"}, "requests":{"cpu": "100m", "memory": "100Mi"}}}}}'
+
+
+
+

To modify container limits

+
+
+

When this guide was written Openshift have a bug and you cannot do it from command line interface.

+
+
+ + + + + +
+ + +If that command did not work and you received an error like this error: unable to parse "'{spec:…​": yaml: found unexpected end of stream, try to use the patch using "" instead of ''. It looks like this: --patch "{\"spec\":…​\"}}}}" +
+
+
+
+
+

== For builders

+
+
+

You could do it using command line interface and executing the next command:

+
+
+
+
oc patch bc/${props.name}${APP_NAME_SUFFIX} --patch '{"spec":{"resources":{"limits":{"cpu": "125m", "memory": "400Mi"},"requests":{"cpu": "125m", "memory": "400Mi"}}}}'
+
+
+
+ + + + + +
+ + +If that command did not work and you received an error like this error: unable to parse "'{spec:…​": yaml: found unexpected end of stream, try to use the patch using "" instead of ''. It looks like this: --patch "{\"spec\":…​\"}}}}" +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-deployment-dsf4openshift.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-deployment-dsf4openshift.html new file mode 100644 index 00000000..651e05d6 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-deployment-dsf4openshift.html @@ -0,0 +1,371 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

dsf4openshift deployment environment

+
+
+

In this section you will see how you can create a new environment instance in OpenShift and the things that you must add to the Jenkinsfiles of your repository to deploy a branch in this new environment. To conclude you are going to see how to add config files for environment in the source code of the applications.

+
+
+
+
+

Configure your OpenShift to deploy your devonfw projects

+
+ +
+
+
+

Prerequisites

+
+ +
+
+
+

== OpenShift Cluster

+
+
+

To have your deployment environment with OpenShift you need to have an OpenShift Cluster.

+
+
+
+
+

Manual configuration

+
+
+

Here you can find all that you need to know to configure OpenShift manually.

+
+
+
+
+

Automatic configuration

+
+
+

Here you can find all that you need to know to configure OpenShift automatically.

+
+
+
+
+

Service integration with jenkins

+
+ +
+
+
+

Prerequisites

+
+
+

To integrate it, you need to have installed the plugin OpenShift Client. To install it go to Manage Jenkins clicking on left menu and enter in Manage Plugins. Go to Available tab and search it using the filter textbox in the top right corner and install it.

+
+
+
+
+

Configuration

+
+
+

Second, you need to configure the OC Client. Go to Manage Jenkins clicking on left menu and enter in Global Tool Configuration.

+
+
+

Go to OpenShift Client Tools section and add a new one like this.

+
+
+
+openshift jenkins plugin +
+
+
+
+
+

Upgrade your Jenkinsfile

+
+
+

Now it is time to add/upgrade the next stages in to your Jenkinsfile:

+
+
+

Add create docker image stage.

+
+
+

Add deploy docker image stage.

+
+
+

Add check status stage.

+
+
+

Upgrade Setup Jenkins tools stage.

+
+
+ + + + + +
+ + +Remember to upgrade your parameters to difference which environment is used per branch. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-how-to-use.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-how-to-use.html new file mode 100644 index 00000000..e7b4f369 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-how-to-use.html @@ -0,0 +1,421 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to use it

+
+
+

This is the documentation about shop floor and its different tools. Here you are going to learn how to create new projects, so that they can include continuous integration and continuous delivery processes, and be deployed automatically in different environments.

+
+
+
+
+

Prerequisites - Provisioning environment

+
+
+

To start working you need to have some services running in your provisioning environment, such as Jenkins (automation server), GitLab (git repository), SonarQube (program analysis), Nexus (software repository) or similar.

+
+
+

To host those services we recommend to have a Production Line instance but you can use other platforms. Here is the list for the different options:

+
+
+ +
+
+
+
+

Step 1 - Configuration and services integration

+
+
+

The first step is configuring your services and integrate them with jenkins. Here you have an example about how to manually configure the next services:

+
+
+ +
+
+
+
+

Step 2 - Create the project

+
+ +
+
+
+

Create and integrate git repository

+
+
+

The second is create or git repository and integrate it with Jenkins.

+
+
+

Here you can find a manual guide about how it:

+
+
+ +
+
+
+
+

Start new devonfw project

+
+
+

It is time to create your devonfw project:

+
+
+

You can find all that you need about how to create a new devonfw project

+
+
+
+
+

cicd configuration

+
+
+

Now you need to add cicd files in your project.

+
+
+
+
+

== Manual configuration

+
+ +
+
+
+

== Jenkinsfile

+
+
+

Here you can find all that you need to know to do your Jenkinsfile.

+
+
+
+
+

== Dockerfile

+
+
+

Here you can find all that you need to know to do your Dockerfile.

+
+
+
+
+

== Automatic configuration

+
+ +
+
+
+

== cicdgen

+
+
+

If you are using production line for provisioning you could use cicdgen to configure automatically almost everything explained in the manual configuration. To do it see the cicdgen documentation.

+
+
+
+
+

Step 3 - Deployment

+
+
+

The third is configure our deployment environment. Here is the list for the different options:

+
+
+ +
+
+
+
+

Step 4 - Monitoring

+
+
+

Here you can find information about tools for monitoring:

+
+
+
    +
  • +

    build monitor view for Jenkins. With this tool you will be able to see in real time what is the state of your Jenkins pipelines.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-istio-guide.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-istio-guide.html new file mode 100644 index 00000000..1fd47891 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-istio-guide.html @@ -0,0 +1,855 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

ISTIO Service Mesh Implementation Guide

+
+ +
+
+
+

Introduction

+
+
+

A service mesh separating applications from network functions like resilience, fault tolerance, etc,.

+
+
+

A service mesh addresses the below functions without changing the application code.

+
+
+
    +
  • +

    Test the new versions of services without impacting the users.

    +
  • +
  • +

    Scale the services.

    +
  • +
  • +

    Find the services with the help of service registry.

    +
  • +
  • +

    Test against failures.

    +
  • +
  • +

    Secure service-to-service communication.

    +
  • +
  • +

    Route traffic to a specific way.

    +
  • +
  • +

    Circuit breaking and fault injection.

    +
  • +
  • +

    Monitor the services and collect matrices.

    +
  • +
  • +

    Tracing.

    +
  • +
+
+
+

ISTIO service mesh is an open environment for Connecting, Securing, Monitoring services across the environments.

+
+
+
+
+

ISTIO Architecture

+
+
+

image

+
+
+

ISTIO is split into data plane and control plane. Refer ISTIO Architecture

+
+
+
+
+

Data Plane

+
+
+

The data plane is a set of intelligent proxies (Envoy) deployed as sidecars that mediate and control all network communication among microservices.

+
+
+

image

+
+
+
+
+

Control Plane

+
+
+

The control plane is managing and configuring proxies to route traffic and enforcing policies.

+
+
+
    +
  • +

    Pilot manages all the proxies and responsible for routing

    +
  • +
  • +

    Mixer collects telemetry and policy check

    +
  • +
  • +

    Citadel does Certificate management (TLS certs to Envoys)

    +
  • +
+
+
+
+
+

ISTIO installation

+
+
+

Download ISTIO from releases

+
+
+

istioctl install --set profile=demo

+
+
+

Here used demo profile, there are other profiles for production.

+
+
+

Verify installation:

+
+
+

kubectl get all -n istio-system

+
+
+

Inject sidecar container automatically by issuing the below command.

+
+
+

kubectl label namespace default istio-injection=enabled

+
+
+

Verify:

+
+
+

kubectl get namespace -L istio-injection

+
+
+

For more installation guides, refer ISTIO Installation

+
+
+
+
+

Traffic Management

+
+
+

ISITO’s traffic management model relies on the Envoy proxies which deployed as sidecars to services.

+
+
+

Below are the traffic management API resources

+
+
+
    +
  • +

    Virtual Services

    +
  • +
  • +

    Destination Rules

    +
  • +
  • +

    Gateways

    +
  • +
  • +

    Service Entries

    +
  • +
  • +

    Sidecars

    +
  • +
+
+
+

A virtual service, higher level abstraction of Kubernetes Service, lets you configure how requests are routed to a service within an Istio service mesh. Your mesh may have multiple virtual services or none. Virtual service consists of routing rules that are evaluated in order.

+
+
+
+
+

Dark Launch

+
+
+

The following virtual service routes requests to different versions of a service depending on whether the request comes from a testuser. If the testuser calls then version v1 will be used, and for others version v2.

+
+
+

image

+
+
+
+
+

Blue/Green deployment

+
+
+

In blue/green deployment two versions of the application running. Both versions are live on different domain names, in this example it is mtsj.com and test.mtsj.com.

+
+
+
    +
  1. +

    Define 2 virtual services for mtsj v1 and v2 versions.

    +
  2. +
  3. +

    Define DestinationRule and configure the subsets for v1 and v2.

    +
  4. +
+
+
+

image

+
+
+

When end user browses mtsj.com, the gateway call goes to subset v1 of the virtual service and redirects to destination version v1, and for test.mtsj.com to version v2.

+
+
+
+
+

Canary Deployment (Traffic Splitting)

+
+
+

In canary deployment old and new versions of the application alive. ISTIO can be configured, how much percentage of traffic can go to each version.

+
+
+

image

+
+
+

Here, the traffic is divided 75% to the version V1, and 25% to the version V2, as we gain confidence the percentage can be increased the latest version and gradually the traffic to the old version can be reduced and removed.

+
+
+

You may refer ISTIO Traffic Management for more details.

+
+
+
+
+

== MyThaiStar Implementation

+
+
+

In this example dish will have two versions and the traffic will be routed alternately using the ISTIO configuration.

+
+
+

Find all configuration files in istio/trafficmanagement/canary directory under mythaistarmicroservices example.

+
+
+
    +
  1. +

    MyThaiStar defines below

    +
    +
      +
    1. +

      Service

      +
    2. +
    3. +

      Service Account

      +
    4. +
    5. +

      Deployment

      +
    6. +
    +
    +
  2. +
+
+
+

The above configurations are defined in a single yaml file for all the different services like angular, dish, image etc.

+
+
+
    +
  1. +

    dish-v2: Dish Version 2 can be kept separately in different yaml file.

    +
  2. +
  3. +

    mts-gateway defines the ingress gateway which routes the outbound request to each service.

    +
  4. +
  5. +

    destination-rule-all defines the subsets here for later traffic routing

    +
  6. +
  7. +

    dish-50-50: traffic routing for different versions of dishmanagement.

    +
  8. +
+
+
+
+
+

Network Resilience

+
+ +
+
+
+

== Timeout

+
+
+

Istio lets you adjust the timeouts using virtual services. The default timeout is 15 seconds.

+
+
+

image

+
+
+
+
+

== Retry

+
+
+

A retry setting specifies the maximum number of times an Envoy proxy attempts to connect to a service if the initial call fails.

+
+
+

image

+
+
+

Retries can also be configured on Gateway Error, Connection failure, Connection Refused or any 5xx error from the application.

+
+
+

retryOn: gateway-error,connect-failure,refused-stream,5xx

+
+
+
+
+

== Circuit Breakers

+
+
+

By defining the destination rule, set limits for calls to individual hosts within a service, such as the number of concurrent connections or how many times calls to this host have failed once the limit reached.

+
+
+
    +
  • +

    Outlier Detection is an ISTIO Resiliency strategy to detect unusual host behaviour and evict the unhealthy hosts from the set of load balanced healthy hosts inside a cluster.

    +
  • +
  • +

    If a request is sent to a service instance and it fails (returns a 50X error code), then ISTIO ejects the instance from the load balanced pool for a specified duration.

    +
  • +
+
+
+

image

+
+
+
+
+

== Fault Injection

+
+
+

Two types of faults can be generated using ISTIO. This is useful for the testing.

+
+
+

Delays: timing failures.

+
+
+

Aborts: crash failures.

+
+
+

Below example is a crash failure Virtual Service. The below example configured to receive http status 500 error for the testuser. The application works fine for all other users.

+
+
+

image

+
+
+

The below virtual service configured to wait 10s for all requests.

+
+
+

image

+
+
+
+
+

Security

+
+
+

ISTIO provides security solution has the below functions.

+
+
+
    +
  • +

    Traffic encryption

    +
  • +
  • +

    Mutual TLS and fine-grained access policies.

    +
  • +
  • +

    Auditing tools

    +
  • +
+
+
+
+
+

Authentication

+
+
+

ISTIO provides two types of authentication.

+
+
+
    +
  • +

    Peer authentication, secures service to service authentication

    +
  • +
  • +

    Request authentication is end user authentication to verify credential attached to the request.

    +
  • +
+
+
+
+
+

Mutual TLS Authentication

+
+
+

By default, the TLS protocol only proves the identity of the server to the client. Mutual TLS authentication ensures that traffic has been traffic is secure and trusted in both the directions between the client and server.

+
+
+

All traffic between services with proxies uses mutual TLS by default.

+
+
+
+
+

Peer Authentication

+
+
+

Peer authentication has Permissive, Strict and Disabled mode. With permissive mode, a service accepts both plain text and mutual TLS traffic. Permissive mode is good at the time of onboarding and should switch to Strict later.

+
+
+

The authentication policy can be applied to mesh-wide, namespace wide or workload specific using the selector field.

+
+
+

image

+
+
+

Here the policy applied to the workload bookings.

+
+
+

Check the default mesh policy:

+
+
+

kubectl describe meshpolicy default

+
+
+
+
+

Request authentication

+
+
+

Request authentication policies specify the values needed to validate JWT tokens.

+
+
+

|== = +|Authentication |Applies to |Uses |Identity +|Peer authentication |Service to service |mTLS |source.principal +|Request authentication |End User authentication |JWT |request.auth.principal +|== =

+
+
+
+
+

Authorization

+
+
+

Apply an authorization policy to the workload/namespace/mesh to enforce the access control. Supports ALLOW and DENY actions.

+
+
+
+
+

== Deny All

+
+
+

Below example authorization policy without any rules denies access to all workloads in admin namespace.

+
+
+

image

+
+
+

Example below allowing the GET methods from order service.

+
+
+

image

+
+
+

Example below denies the request to the /registered path for requests without request principals.

+
+
+

image

+
+
+

You may refer ISTIO Security for more details.

+
+
+
+
+

Observability

+
+
+

ISTIO generates

+
+
+
    +
  • +

    Metrics - for monitor latency, traffic, errors and saturation.

    +
  • +
  • +

    Distributed Traces to identify call flows and service dependencies

    +
  • +
  • +

    Access Logs enables audit service behaviour to the individual service level.

    +
  • +
+
+
+
+
+

Grafana dashboard

+
+
+

Grafana and Prometheus are preconfigured addons on ISTIO. To enable, choose the configuration profile which has Prometheus and Grafana enabled. Eg: Demo profile

+
+
+

Verify Prometheus and Grafana running in the cluster.

+
+
+

kubectl get pods -n istio-system

+
+
+
+
+

Kiali dashboard

+
+
+

The Kiali dashboard helps you understand the structure of your service mesh by displaying the topology. The demo profile enables Kiali dashboard also.

+
+
+

Access the Kiali dashboard. The default user name is admin and default password is admin.

+
+
+

istioctl dashboard kiali

+
+
+

You may refer ISTIO Observability

+
+
+
+
+

Minikube Troubleshooting Tips

+
+
+

This documentation provides the troubleshooting tips while working with minikube in a local machine.

+
+
+
    +
  1. +

    Always start minikube with a minimum of 4GB of memory or more if available. Using command minikube start --memory=4096

    +
  2. +
  3. +

    If minikube is not starting or throwing any error even after multiple attempts. Try the below tips:

    +
    +
      +
    1. +

      Delete the minikube in your local machine using minikube delete and do a fresh minikube start.

      +
    2. +
    3. +

      In any case, if minikube is not starting even after the above step, go to .minikube folder under the users directory and delete it manually. Now try starting minikube.

      +
    4. +
    +
    +
  4. +
  5. +

    Set docker environment in minikube using minikube docker-env. Now all the docker commands that are run will be on the docker inside minikube. So building your application after executing the above command will have the application docker images available to minikube.

    +
    +
      +
    1. +

      To exit minikube docker environment use minikube docker-env -u

      +
    2. +
    +
    +
  6. +
  7. +

    In any case, if you face any error related to docker image such as Failed to pull image, or image not found errors we will have to manually push the application docker image to minikube docker cache using the below commands.

    +
  8. +
  9. +

    For better results - stop minikube using minikube stop command.

    +
  10. +
  11. +

    Execute the command minikube cache add imageName/tagName.

    +
  12. +
  13. +

    Now start the minikube. To verify if the docker image has been added to minikube docker execute minikube ssh docker images.

    +
  14. +
  15. +

    To remove any docker image from minikube docker stop any containers running that docker image and then execute minikube cache delete imageName/tagName.

    +
  16. +
  17. +

    To reload any docker image to minikube docker environment, execute minikube cache reload.

    +
  18. +
  19. +

    In any case, if the docker images are not getting removed from minikube docker environment then navigate to .minikube/cache/images and then delete the particular image.

    +
  20. +
+
+
+

Execute the below command to make the Grafana available.

+
+
+

kubectl -n istio-system port-forward $(kubectl -n istio-system get pod -l app=grafana -o jsonpath='\{.items[0].metadata.name}') 3000:3000 &

+
+
+

Use the below URLs to view the dashboard in local machine.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-cicd-environment-setup.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-cicd-environment-setup.html new file mode 100644 index 00000000..957be91f --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-cicd-environment-setup.html @@ -0,0 +1,807 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Mirabaud CICD Environment Setup

+
+
+

Initial requirements:

+
+
+
    +
  • +

    OS: RHEL 6.5

    +
  • +
+
+
+

Remote setup in CI machine (located in the Netherlands)

+
+
+
+
    - Jenkins
+    - Nexus
+    - GitLab
+    - Mattermost
+    - Atlassian Crucible
+    - SonarQube
+
+
+
+
+
+

1. Install Docker and Docker Compose in RHEL 6.5

+
+ +
+
+
+

Docker

+
+
+

Due to that OS version, the only way to have Docker running in the CI machine is by installing it from the EPEL repository (Extra Packages for Enterprise Linux).

+
+
+
    +
  1. +

    Add EPEL

    +
  2. +
+
+
+
+
##rpm -iUvh http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm
+
+
+
+
    +
  1. +

    Install docker.io from that repository

    +
  2. +
+
+
+
+
##yum -y install docker-io
+
+
+
+
    +
  1. +

    Start Docker daemon

    +
  2. +
+
+
+
+
##service docker start
+
+
+
+
    +
  1. +

    Check the installation

    +
  2. +
+
+
+
+
##docker -v
+Docker version 1.7.1, build 786b29d/1.7.1
+
+
+
+
+
+

Docker Compose

+
+
+

Download and install it via curl. It will use this site.

+
+
+
+
##curl -L https://github.com/docker/compose/releases/download/1.5.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
+
+##chmod +x /usr/local/bin/docker-compose
+
+
+
+

Add it to your sudo path:

+
+
+
    +
  1. +

    Find out where it is:

    +
  2. +
+
+
+
+
##echo $PATH
+
+
+
+
    +
  1. +

    Copy the docker-compose file from /usr/local/bin/ to your sudo PATH.

    +
  2. +
+
+
+
+
##docker-compose -v
+docker-compose version 1.5.2, build 7240ff3
+
+
+
+
+
+

2. Directories structure

+
+
+

Several directories had been added to organize some files related to docker (like docker-compose.yml) and docker volumes for each service. Here’s how it looks:

+
+
+
+
/home
+    /[username]
+        /jenkins
+            /volumes
+                /jenkins_home
+        /sonarqube
+            /volumes
+                /conf
+                /data
+                /extensions
+                /lib
+                    /bundled-plugins
+        /nexus
+            /volumes
+                /nexus-data
+        /crucible
+            /volumes
+                /
+        /gitlab
+            docker-compose.yml
+            /volumes
+                /etc
+                    /gitlab
+                /var
+                    /log
+                    /opt
+        /mattermost
+            docker-compose.yml
+            /volumes
+                /db
+                    /var
+                        /lib
+                            /postgresql
+                                /data
+                /app
+                    /mattermost
+                        /config
+                        /data
+                        /logs
+                /web
+                    /cert
+
+
+
+
+
+

3. CICD Services with Docker

+
+
+

Some naming conventions had been followed as naming containers as mirabaud_[service].

+
+
+

Several folders have been created to store each service’s volumes, docker-compose.yml(s), extra configuration settings and so on:

+
+
+
+
+

Jenkins

+
+ +
+
+
+

== Command

+
+
+
+
##docker run -d -p 8080:8080 -p 50000:50000 --name=mirabaud_jenkins \
+    -v /home/[username]/jenkins/volumes/jenkins_home:/var/jenkins_home \
+    jenkins
+
+
+
+
+
+

== Generate keystore

+
+
+
+
keytool -importkeystore -srckeystore server.p12 -srcstoretype pkcs12 -srcalias 1 -destkeystore newserver.jks -deststoretype jks -destalias server
+
+
+
+
+
+

== Start jekins with SSL (TODO: make a docker-compose.yml for this):

+
+
+
+
sudo docker run -d --name mirabaud_jenkins -v /jenkins:/var/jenkins_home -p 8080:8443 jenkins --httpPort=-1 --httpsPort=8443 --httpsKeyStore=/var/jenkins_home/certs/keystore.jks --httpsKeyStorePassword=Mirabaud2017
+
+
+
+
+
+

== Volumes

+
+
+
+
volumes/jenkins_home:/var/jenkins_home
+
+
+
+
+
+

SonarQube

+
+ +
+
+
+

== Command

+
+
+
+
##docker run -d -p 9000:9000 -p 9092:9092 --name=mirabaud_sonarqube \
+    -v /home/[username]/sonarqube/volumes/conf:/opt/sonarqube/conf \
+    -v /home/[username]/sonarqube/volumes/data:/opt/sonarqube/data \
+    -v /home/[username]/sonarqube/volumes/extensions:/opt/sonarqube/extensions \
+    -v /home/[username]/sonarqube/volumes/lib/bundled-plugins:/opt/sonarqube//lib/bundled-plugins \
+    sonarqube
+
+
+
+
+
+

== Volumes

+
+
+
+
volumes/conf:/opt/sonarqube/conf
+volumes/data:/opt/sonarqube/data
+volumes/extensions:/opt/sonarqube/extensions
+volumes/lib/bundled-plugins:/opt/sonarqube/lib/bundled-plugins
+
+
+
+
+
+

Nexus

+
+ +
+
+
+

== Command

+
+
+
+
##docker run -d -p 8081:8081 --name=mirabaud_nexus\
+    -v /home/[username]/nexus/nexus-data:/sonatype-work
+    sonatype/nexus
+
+
+
+
+
+

== Volumes

+
+
+
+
volumes/nexus-data/:/sonatype-work
+
+
+
+
+
+

Atlassian Crucible

+
+ +
+
+
+

== Command

+
+
+
+
##docker run -d -p 8084:8080 --name=mirabaud_crucible \
+    -v /home/[username]/crucible/volumes/data:/atlassian/data/crucible
+    mswinarski/atlassian-crucible:latest
+
+
+
+
+
+

== Volumes

+
+
+
+
volumes/data:/atlassian/data/crucible
+
+
+
+
+
+

4. CICD Services with Docker Compose

+
+
+

Both Services had been deploying by using the # docker-compose up -d command from their root directories (/gitlab and /mattermost). The syntax of the two docker-compose.yml files is the one corresponding with the 1st version (due to the docker-compose v1.5).

+
+
+
+
+

GitLab

+
+ +
+
+
+

== docker-compose.yml

+
+
+
+
mirabaud:
+    image: 'gitlab/gitlab-ce:latest'
+    restart: always
+    ports:
+            - '8888:80'
+    volumes:
+            - '/home/[username]/gitlab/volumes/etc/gilab:/etc/gitlab'
+            - '/home/[username]/gitlab/volumes/var/log:/var/log/gitlab'
+            - '/home/[username]/gitlab/volumes/var/opt:/var/opt/gitlab'
+
+
+
+
+
+

== Command (docker)

+
+
+
+
docker run -d -p 8888:80 --name=mirabaud_gitlab \
+    -v /home/[username]/gitlab/volumes/etc/gitlab/:/etc/gitlab \
+    -v /home/[username]/gitlab/volumes/var/log:/var/log/gitlab \
+    -v /home/[username]/gitlab/volumes/var/opt:/var/opt/gitlab \
+    gitlab/gitlab-ce
+
+
+
+
+
+

== Volumes

+
+
+
+
volumes/etc/gitlab:/etc/gitlab
+volumes/var/opt:/var/log/gitlab
+volumes/var/log:/var/log/gitlab
+
+
+
+
+
+

Mattermost

+
+ +
+
+
+

== docker-compose.yml:

+
+
+
+
db:
+  image: mattermost/mattermost-prod-db
+  restart: unless-stopped
+  volumes:
+    - ./volumes/db/var/lib/postgresql/data:/var/lib/postgresql/data
+    - /etc/localtime:/etc/localtime:ro
+  environment:
+    - POSTGRES_USER=mmuser
+    - POSTGRES_PASSWORD=mmuser_password
+    - POSTGRES_DB=mattermost
+
+app:
+  image: mattermost/mattermost-prod-app
+  links:
+    - db:db
+  restart: unless-stopped
+  volumes:
+    - ./volumes/app/mattermost/config:/mattermost/config:rw
+    - ./volumes/app/mattermost/data:/mattermost/data:rw
+    - ./volumes/app/mattermost/logs:/mattermost/logs:rw
+    - /etc/localtime:/etc/localtime:ro
+  environment:
+    - MM_USERNAME=mmuser
+    - MM_PASSWORD=mmuser_password
+    - MM_DBNAME=mattermost
+
+web:
+  image: mattermost/mattermost-prod-web
+  ports:
+    - "8088:80"
+    - "8089:443"
+  links:
+    - app:app
+  restart: unless-stopped
+  volumes:
+    - ./volumes/web/cert:/cert:ro
+    - /etc/localtime:/etc/localtime:ro
+
+
+
+
+
+

== SSL Certificate

+
+
+

How to generate the certificates:

+
+
+

Get the crt and key from CA or generate a new one self-signed. Then:

+
+
+
+
// 1. create the p12 keystore
+##openssl pkcs12 -export -in cert.crt -inkey mycert.key -out certkeystore.p12
+
+// 2. export the pem certificate with password
+##openssl pkcs12 -in certkeystore.p12 -out cert.pem
+
+// 3. export the pem certificate without password
+##openssl rsa -in cert.pem -out key-no-password.pem
+
+
+
+

SSL:

+
+
+

Copy the cert and the key without password at:

+
+
+

./volumes/web/cert/cert.pem

+
+
+

and

+
+
+

./volumes/web/cert/key-no-password.pem

+
+
+

Restart the server and the SSL should be enabled at port 8089 using HTTPS.

+
+
+
+
+

== Volumes

+
+
+
+
-- db --
+volumes/db/var/lib/postgresql/data:/var/lib/postgresql/data
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+-- app --
+volumes/app/mattermost/config:/mattermost/config:rw
+volumes/app/mattermost/data:/mattermost/data:rw
+volumes/app/mattermost/logs:/mattermost/logs:rw
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+-- web --
+volumes/web/cert:/cert:ro
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+
+
+
+
+

5. Service Integration

+
+
+

All integrations had been done following CICD Services Integration guides:

+
+ +
+ + + + + +
+ + +These guides may be obsolete. You can find here the official configuration guides, +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-jenkins-gitLab-integration.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-jenkins-gitLab-integration.html new file mode 100644 index 00000000..350cc480 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-jenkins-gitLab-integration.html @@ -0,0 +1,479 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Jenkins - GitLab integration

+
+
+

The first step to have a Continuous Integration system for your development is to make sure that all your changes to your team’s remote repository are evaluated by the time they are pushed. That usually implies the usage of so-called webhooks. You’ll find a fancy explanation about what Webhooks are in here.

+
+
+

To resume what we’re doing here, we are going to prepare our Jenkins and our GitLab so when a developer pushes some changes to the GitLab repository, a pipeline in Jenkins gets triggered. Just like that, in an automatic way.

+
+
+
+
+

1. Jenkins GitLab plugin

+
+
+

As it usually happens, some Jenkins plug-in(s) must be installed. In this case, let’s install those related with GitLab:

+
+
+
+jenkins gitlab plugins +
+
+
+
+
+

2. GitLab API Token

+
+
+

To communicate with GitLab from Jenkins, we will need to create an authentication token from your GitLab user settings. A good practice for this would be to create it from a machine user. Something like (i.e.) devonfw-ci/******.

+
+
+
+gitlab access token +
+
+
+

Simply by adding a name to it and a date for it expire is enough:

+
+
+
+gitlab access token generation +
+
+
+
+gitlab access token generated +
+
+
+

As GitLab said, you should make sure you don’t lose your token. Otherwise you would need to create a new one.

+
+
+

This will allow Jenkins to connect with right permissions to our GitLab server.

+
+
+
+
+

3. Create "GitLab API" Token credentials

+
+
+

Those credentials will use that token already generated in GitLab to connect once we declare the GitLab server in the Global Jenkins configuration. Obviously, those credentials must be GitLab API token-like.

+
+
+
+jenkins gitlab api token credentials kind +
+
+
+

Then, we add the generated token in the API token field:

+
+
+
+jenkins gitlab api token credentials complete +
+
+
+

Look in your Global credentials if they had been correctly created:

+
+
+
+jenkins gitlab api token credentials +
+
+
+
+
+

4. Create GitLab connection in Jenkins

+
+
+

Specify a GitLab connection in your Jenkins’s Manage Jenkins > Configure System configuration. This will tell Jenkins where is our GitLab server, a user to access it from and so on.

+
+
+

You’ll need to give it a name, for example, related with what this GitLab is dedicated for (specific clients, internal projects…​). Then, the Gitlab host URL is just where your GitLab server is. If you have it locally, that field should look similar to:

+
+
+
    +
  • +

    Connection name: my-local-gitlab

    +
  • +
  • +

    Gitlab host URL: http://localhost:${PORT_NUMBER}

    +
  • +
+
+
+

Finally, we select our recently GitLab API token as credentials.

+
+
+
+jenkins gitlab connection +
+
+
+
+
+

5. Jenkins Pipeline changes

+
+ +
+
+
+

5.1 Choose GitLab connection in Pipeline’s General configuration

+
+
+

First, our pipeline should allow us to add a GitLab connection to connect to (the already created one).

+
+
+
+jenkins pipeline gitlab connection +
+
+
+

In the case of the local example, could be like this:

+
+
+
    +
  • +

    GitLab connection: my-local-gitlab

    +
  • +
  • +

    GitLab Repository Name: myusername/webhook-test (for example)

    +
  • +
+
+
+
+
+

5.2 Create a Build Trigger

+
+
+
    +
  1. +

    You should already see your GitLab project’s URL (as you stated in the General settings of the Pipeline).

    +
  2. +
  3. +

    Write .*build.* in the comment for triggering a build

    +
  4. +
  5. +

    Specify or filter the branch of your repo you want use as target. That means, whenever a git action is done to that branch (for example, master), this Pipeline is going to be built.

    +
  6. +
  7. +

    Generate a Secret token (to be added in the yet-to-be-created GitLab webhook).

    +
  8. +
+
+
+
+jenkins pipeline build trigger +
+
+
+
+
+

6. GitLab Webhook

+
+
+
    +
  1. +

    Go to you GitLab project’s Settings > Integration section.

    +
  2. +
  3. +

    Add the path to your Jenkins Pipeline. Make sure you add project instead of job in the path.

    +
  4. +
  5. +

    Paste the generated Secret token of your Jenkins pipeline

    +
  6. +
  7. +

    Select your git action that will trigger the build.

    +
  8. +
+
+
+
+gitlab webhook +
+
+
+
+
+

7. Results

+
+
+

After all those steps you should have a result similar to this in your Pipeline:

+
+
+
+jenkins pipeline result +
+
+
+

Enjoy the Continuous Integration! :)

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-jenkins-nexus-integration.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-jenkins-nexus-integration.html new file mode 100644 index 00000000..f3cde6d0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-jenkins-nexus-integration.html @@ -0,0 +1,457 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Jenkins - Nexus integration

+
+
+

Nexus is used to both host dependencies for devonfw projects to download (common Maven ones, custom ones such as ojdb and even devonfw so-far-IP modules). Moreover, it will host our projects' build artifacts (.jar, .war, …​) and expose them for us to download, wget and so on. A team should have a bidirectional relation with its Nexus repository.

+
+
+
+
+

1. Jenkins credentials to access Nexus

+
+
+

By default, when Nexus is installed, it contains 3 user credentials for different purposes. The admin ones look like this: admin/admin123. There are also other 2: deployment/deployment123 and TODO.

+
+
+
+
// ADD USER TABLE IMAGE FROM NEXUS
+
+
+
+

In this case, let’s use the ones with the greater permissions: admin/admin123.

+
+
+

Go to Credentials > System (left sidebar of Jenkins) then to Global credentials (unrestricted) on the page table and on the left sidebar again click on Add Credentials.

+
+
+

This should be shown in your Jenkins:

+
+
+
+jenkins new nexus credentials +
+
+
+

Fill the form like this:

+
+
+
+jenkins new nexus credentials filled +
+
+
+

And click in OK to create them. Check if the whole thing went as expected:

+
+
+
+jenkins new nexus credentials completed +
+
+
+
+
+

2. Jenkins Maven Settings

+
+
+

Those settings are also configured (or maybe not-yet-configured) in our devonfw distributions in:

+
+
+
+
/${devonfw-dist-path}
+    /software
+        /maven
+            /conf
+                settings.xml
+
+
+
+

Go to Manage Jenkins > Managed files and select Add a new Config in the left sidebar.

+
+
+
+jenkins new maven settings +
+
+
+

The ID field will get automatically filled with a unique value if you don’t set it up. No problems about that. Click on Submit and let’s create some Servers Credentials:

+
+
+
+jenkins new maven settings servers +
+
+
+

Those Server Credentials will allow Jenkins to access to the different repositories/servers that are going to be declared afterwards.

+
+
+

Let’s create 4 server credentials.

+
+
+
    +
  • +

    my.nexus: Will serve as general profile for Maven.

    +
  • +
  • +

    mynexus.releases: When a mvn deploy process is executed, this will tell Maven where to push releases to.

    +
  • +
  • +

    mynexus.snapshots: The same as before, but with snapshots instead.

    +
  • +
  • +

    mynexus.central: Just in case we want to install an specific dependency that is not by default in the Maven Central repository (such as ojdbc), Maven will point to it instead.

    +
  • +
+
+
+
+jenkins new maven settings servers credentials +
+
+
+

A more or less complete Jenkins Maven settings would look look like this:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+          xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">
+
+    <mirrors>
+        <mirror>
+            <id>mynexus.central</id>
+            <mirrorOf>central</mirrorOf>
+            <name>central</name>
+            <url>http://${URL-TO-YOUR-NEXUS-REPOS}/central</url>
+        </mirror>
+    </mirrors>
+
+    <profiles>
+        <profile>
+            <id>my.nexus</id>
+            <!-- 3 REPOS ARE DECLARED -->
+            <repositories>
+                <repository>
+                    <id>mynexus.releases</id>
+                    <name>mynexus Releases</name>
+                    <url>http://${URL-TO-YOUR-NEXUS-REPOS}/releases</url>
+                    <releases>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>false</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </repository>
+                <repository>
+                    <id>mynexus.snapshots</id>
+                    <name>mynexus Snapshots</name>
+                    <url>http://${URL-TO-YOUR-NEXUS-REPOS}/snapshots</url>
+                    <releases>
+                        <enabled>false</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </repository>
+            </repositories>
+            <pluginRepositories>
+                <pluginRepository>
+                    <id>public</id>
+                    <name>Public Repositories</name>
+                    <url>http://${URL-TO-YOUR-NEXUS}/nexus/content/groups/public/</url>
+                    <releases>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </pluginRepository>
+            </pluginRepositories>
+        </profile>
+    </profiles>
+    <!-- HERE IS WHERE WE TELL MAVEN TO CHOOSE THE my.nexus PROFILE -->
+    <activeProfiles>
+        <activeProfile>my.nexus</activeProfile>
+    </activeProfiles>
+</settings>
+
+
+
+
+
+

3. Use it in Jenkins Pipelines

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-jenkins-sonarqube-integration.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-jenkins-sonarqube-integration.html new file mode 100644 index 00000000..c21d432a --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-mirabaud-jenkins-sonarqube-integration.html @@ -0,0 +1,594 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Jenkins - SonarQube integration

+
+
+

First thing is installing both tools by, for example, Docker or Docker Compose. Then, we have to think about how they should collaborate to create a more efficient Continuous Integration process.

+
+
+

Once our project’s pipeline is triggered (it could also be triggered in a fancy way, such as when a merge to the develop branch is done).

+
+
+
+
+

1. Jenkins SonarQube plugin

+
+
+

Typically in those integration cases, Jenkins plug-in installations become a must. Let’s look for some available SonarQube plug-in(s) for Jenkins:

+
+
+
+jenkins sonarqube plugin +
+
+
+
+
+

2. SonarQube token

+
+
+

Once installed let’s create a token in SonarQube so that Jenkins can communicate with it to trigger their Jobs. Once we install SonarQube in our CI/CD machine (ideally a remote machine) let’s login with admin/admin credentials:

+
+
+
+sonarqube login +
+
+
+

Afterwards, SonarQube itself asks you to create this token we talked about (the name is up to you):

+
+
+
+sonarqube token name +
+
+
+

Then a token is generated:

+
+
+
+sonarqube token generation +
+
+
+

You click in "continue" and the token’s generation is completed:

+
+
+
+sonarqube token done +
+
+
+
+
+

3. Jenkins SonarQube Server setup

+
+
+

Now we need to tell Jenkins where is SonarQube and how to communicate with it. In Manage Jenkins > Configure Settings. We add a name for the server (up to you), where it is located (URL), version and the Server authentication token created in point 2.

+
+
+
+jenkins sonarqube server setup +
+
+
+
+
+

4. Jenkins SonarQube Scanner

+
+
+

Install a SonarQube Scanner as a Global tool in Jenkins to be used in the project’s pipeline.

+
+
+
+jenkins sonarqube scanner +
+
+
+
+
+

5. Pipeline code

+
+
+

Last step is to add the SonarQube process in our project’s Jenkins pipeline. The following code will trigger a SonarQube process that will evaluate our code’s quality looking for bugs, duplications, and so on.

+
+
+
+
    stage 'SonarQube Analysis'
+        def scannerHome = tool 'SonarQube scanner';
+        sh "${scannerHome}/bin/sonar-scanner \
+             -Dsonar.host.url=http://url-to-your-sq-server:9000/ \
+             -Dsonar.login=[SONAR_USER] -Dsonar.password=[SONAR_PASS] \
+             -Dsonar.projectKey=[PROJECT_KEY] \
+             -Dsonar.projectName=[PROJECT_NAME] -Dsonar.projectVersion=[PROJECT_VERSION] \
+             -Dsonar.sources=. -Dsonar.java.binaries=. \
+             -Dsonar.java.source=1.8 -Dsonar.language=java"
+
+
+
+
+
+

6. Results

+
+
+

After all this, you should end up having something like this in Jenkins:

+
+
+
+jenkins sonarqube feedback +
+
+
+

And in SonarQube:

+
+
+
+sonarqube project result +
+
+
+
+
+

7. Changes in a devonfw project to execute SonarQube tests with Coverage

+
+
+

The plugin used to have Coverage reports in the SonarQube for devonfw projects is Jacoco. There are some changes in the project’s parent pom.xml that are mandatory to use it.

+
+
+

Inside of the <properties> tag:

+
+
+
+
<properties>
+
+    (...)
+
+    <sonar.jacoco.version>3.8</sonar.jacoco.version>
+    <sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>
+    <sonar.core.codeCoveragePlugin>jacoco</sonar.core.codeCoveragePlugin>
+    <sonar.dynamicAnalysis>reuseReports</sonar.dynamicAnalysis>
+    <sonar.language>java</sonar.language>
+    <sonar.java.source>1.7</sonar.java.source>
+    <sonar.junit.reportPaths>target/surefire-reports</sonar.junit.reportPaths>
+    <sonar.jacoco.reportPaths>target/jacoco.exec</sonar.jacoco.reportPaths>
+    <sonar.sourceEncoding>UTF-8</sonar.sourceEncoding>
+    <sonar.exclusions>
+        **/generated-sources/**/*,
+        **io/oasp/mirabaud/general/**/*,
+        **/*Dao.java,
+        **/*Entity.java,
+        **/*Cto.java,
+        **/*Eto.java,
+        **/*SearchCriteriaTo.java,
+        **/*management.java,
+        **/*SpringBootApp.java,
+        **/*SpringBootBatchApp.java,
+        **/*.xml,
+        **/*.jsp
+    </sonar.exclusions>
+    <sonar.coverage.exclusions>
+        **io/oasp/mirabaud/general/**/*,
+        **/*Dao.java,
+        **/*Entity.java,
+        **/*Cto.java,
+        **/*Eto.java,
+        **/*SearchCriteriaTo.java,
+        **/*management.java,
+        **/*SpringBootApp.java,
+        **/*SpringBootBatchApp.java,
+        **/*.xml,
+        **/*.jsp
+    </sonar.coverage.exclusions>
+    <sonar.host.url>http://${YOUR_SONAR_SERVER_URL}/</sonar.host.url>
+    <jacoco.version>0.7.9</jacoco.version>
+
+    <war.plugin.version>3.2.0</war.plugin.version>
+    <assembly.plugin.version>3.1.0</assembly.plugin.version>
+</properties>
+
+
+
+

Of course, those sonar amd sonar.coverage can/must be changed to fit with other projects.

+
+
+

Now add the Jacoco Listener as a dependency:

+
+
+
+
<dependencies>
+    <dependency>
+        <groupId>org.sonarsource.java</groupId>
+        <artifactId>sonar-jacoco-listeners</artifactId>
+        <scope>test</scope>
+    </dependency>
+</dependencies>
+
+
+
+

Plugin Management declarations:

+
+
+
+
<pluginManagement>
+    <plugins>
+        <plugin>
+            <groupId>org.sonarsource.scanner.maven</groupId>
+            <artifactId>sonar-maven-plugin</artifactId>
+            <version>3.2</version>
+        </plugin>
+        <plugin>
+            <groupId>org.jacoco</groupId>
+            <artifactId>jacoco-maven-plugin</artifactId>
+            <version>${jacoco.version}</version>
+        </plugin>
+    </plugins>
+<pluginManagement>
+
+
+
+

Plugins:

+
+
+
+
<plugins>
+
+    (...)
+
+    <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.20.1</version>
+        <configuration>
+            <argLine>-XX:-UseSplitVerifier -Xmx2048m ${surefireArgLine}</argLine>
+            <testFailureIgnore>false</testFailureIgnore>
+            <useFile>false</useFile>
+            <reportsDirectory>${project.basedir}/${sonar.junit.reportPaths}</reportsDirectory>
+            <argLine>${jacoco.agent.argLine}</argLine>
+            <excludedGroups>${oasp.test.excluded.groups}</excludedGroups>
+            <alwaysGenerateSurefireReport>true</alwaysGenerateSurefireReport>
+            <aggregate>true</aggregate>
+            <properties>
+                <property>
+                    <name>listener</name>
+                    <value>org.sonar.java.jacoco.JUnitListener</value>
+                </property>
+            </properties>
+        </configuration>
+    </plugin>
+    <plugin>
+        <groupId>org.jacoco</groupId>
+        <artifactId>jacoco-maven-plugin</artifactId>
+        <configuration>
+            <argLine>-Xmx128m</argLine>
+            <append>true</append>
+            <propertyName>jacoco.agent.argLine</propertyName>
+            <destFile>${sonar.jacoco.reportPath}</destFile>
+            <excludes>
+                <exclude>**/generated-sources/**/*,</exclude>
+                <exclude>**io/oasp/${PROJECT_NAME}/general/**/*</exclude>
+                <exclude>**/*Dao.java</exclude>
+                <exclude>**/*Entity.java</exclude>
+                <exclude>**/*Cto.java</exclude>
+                <exclude>**/*Eto.java</exclude>
+                <exclude>**/*SearchCriteriaTo.java</exclude>
+                <exclude>**/*management.java</exclude>
+                <exclude>**/*SpringBootApp.java</exclude>
+                <exclude>**/*SpringBootBatchApp.java</exclude>
+                <exclude>**/*.class</exclude>
+            </excludes>
+        </configuration>
+        <executions>
+            <execution>
+                <id>prepare-agent</id>
+                <phase>initialize</phase>
+                <goals>
+                    <goal>prepare-agent</goal>
+                </goals>
+                <configuration>
+                    <destFile>${sonar.jacoco.reportPath}</destFile>
+                    <append>true</append>
+                </configuration>
+            </execution>
+            <execution>
+                <id>report-aggregate</id>
+                <phase>verify</phase>
+                <goals>
+                    <goal>report-aggregate</goal>
+                </goals>
+            </execution>
+            <execution>
+                <id>jacoco-site</id>
+                <phase>verify</phase>
+                <goals>
+                    <goal>report</goal>
+                </goals>
+            </execution>
+        </executions>
+    </plugin>
+</plugins>
+
+
+
+
+
+

Jenkins SonarQube execution

+
+
+

If the previous configuration is already setup, once Jenkins execute the sonar maven plugin, it will automatically execute coverage as well.

+
+
+

This is an example of a block of code from a devonfw project’s Jenkinsfile:

+
+
+
+
    withMaven(globalMavenSettingsConfig: 'YOUR_GLOBAL_MAVEN_SETTINGS', jdk: 'OpenJDK 1.8', maven: 'Maven_3.3.9') {
+        sh "mvn sonar:sonar -Dsonar.login=[USERNAME] -Dsonar.password=[PASSWORD]"
+    }
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize-catalog.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize-catalog.html new file mode 100644 index 00000000..3b923971 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize-catalog.html @@ -0,0 +1,383 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to add custom catalog categories inside openshift

+
+
+

This is a guide to add custom Catalog Categories into an Openshift cluster.

+
+
+

Here we can find a catalog-categories.js example to use the devonfw catalog categories.

+
+
+
+
+

Create a scrip to add custom langauges and custom catalog categories

+
+ +
+
+
+

Custom language

+
+
+

For this example, we are going add a new language into the languages category. To do that we must create a script and we named as catalog-categories.js

+
+
+
+
// Find the Languages category.
+var category = _.find(window.OPENSHIFT_CONSTANTS.SERVICE_CATALOG_CATEGORIES,
+                      { id: 'languages' });
+// Add Go as a new subcategory under Languages.
+category.subCategories.splice(2,0,{ // Insert at the third spot.
+  // Required. Must be unique.
+  id: "devonfw-languages",
+  // Required.
+  label: "devonfw",
+  // Optional. If specified, defines a unique icon for this item.
+  icon: "devonfw-logo-language",
+  // Required. Items matching any tag will appear in this subcategory.
+  tags: [
+    "devonfw",
+    "devonfw-angular",
+    "devonfw-java"
+  ]
+});
+
+
+
+
+
+

Custom category

+
+
+

For this example, we are going add a new category into the category tab. To do that we must create a script and we named as catalog-categories.js

+
+
+
+
// Add a Featured category as the first category tab.
+window.OPENSHIFT_CONSTANTS.SERVICE_CATALOG_CATEGORIES.unshift({
+  // Required. Must be unique.
+  id: "devonfw-featured",
+  // Required
+  label: "devonfw",
+  subCategories: [
+    {
+      // Required. Must be unique.
+      id: "devonfw-languages",
+      // Required.
+      label: "devonfw",
+      // Optional. If specified, defines a unique icon for this item.
+      icon: "devonfw-logo-language",
+      // Required. Items matching any tag will appear in this subcategory.
+      tags: [
+        "devonfw",
+        "devonfw-angular",
+        "devonfw-java"
+      ]
+    }
+  ]
+});
+
+
+
+
+
+

Use our own javascript inside openshift

+
+
+

To do that, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own javascript in the scriptURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. Scripts must be served with Content-Type: application/javascript.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+  [...]
+    extensions:
+      scriptURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/scripts/catalog-categories.js
+  [...]
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize-icons.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize-icons.html new file mode 100644 index 00000000..edb4a718 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize-icons.html @@ -0,0 +1,398 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to add Custom Icons inside openshift

+
+
+

This is a guide to add custom icons into an Openshift cluster.

+
+
+

Here we can find an icons.css example to use the devonfw icons.

+
+
+
+
+

Images Styles

+
+
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+
+

Create a css

+
+ +
+
+
+

Custom logo for openshift cluster

+
+
+

For this example, we are going to call the css icons.css but you can call as you wish. +Openshift cluster draw their icon by the id header-logo, then we only need to add to our icons.css the next Style Attribute ID

+
+
+
+
#header-logo {
+  background-image: url("https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/images/devonfw-openshift.png);
+  width: 230px;
+  height: 40px;
+}
+
+
+
+
+
+

Custom icons for templates

+
+
+

To use a custom icon to a template openshift use a class name. Then, we need to insert inside our icons.css the next Style Class

+
+
+
+
.devonfw-logo {
+  background-image: url("https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/images/devonfw.png");
+  width: 50px;
+  height: 50px;
+}
+
+
+
+

To show that custom icon on a template, we only need to write the name of our class in the tag "iconClass" of our template.

+
+
+
+
{
+    ...
+    "items": [
+        {
+            ...
+            "metadata": {
+                ...
+                "annotations": {
+                    ...
+                    "iconClass": "devonfw-logo",
+                    ...
+                }
+            },
+            ...
+        }
+    ]
+}
+
+
+
+
+
+

Use our own css inside openshift

+
+
+

To do that, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own css in the stylesheetURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. stylesheets must be served with Content-Type: text/css.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+	[...]
+    extensions:
+      stylesheetURLs:
+		- https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/stylesheet/icons.css
+    [...]
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize-v3-7.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize-v3-7.html new file mode 100644 index 00000000..b4308989 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize-v3-7.html @@ -0,0 +1,438 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Customize Openshift Origin v3.7 for devonfw

+
+
+

This is a guide to customize Openshift cluster. For more information read the next:

+
+
+ +
+
+
+
+

Images Styles

+
+
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+
+

Quick Use

+
+
+

This is a quick example to add custom icons and categories inside openshift.

+
+
+

To modify the icons inside openshift, we must to modify our master-config.yaml of our openshift cluster. This file is inside the openshift container and to obtain a copy of it, we must to know what’s our openshift container name.

+
+
+
+
+

Obtain the master-config.yaml of our openshift cluster

+
+ +
+
+
+

== Obtain the name of our openshift container

+
+
+

To obtain it, we can know it executing the next:

+
+
+
+
$ docker container ls
+CONTAINER ID        IMAGE                                           COMMAND                  CREATED             STATUS              PORTS                                     NAMES
+83a4e3acda5b        openshift/origin:v3.7.0                         "/usr/bin/openshift …"   6 days ago          Up 6 days                                                     origin
+
+
+
+

Here we can see that the name of the container is origin. Normaly the container it’s called as origin.

+
+
+
+
+

== Copy the master-config.yaml of our openshift container to our directory

+
+
+

This file is inside the openshift container in the next directory: /var/lib/origin/openshift.local.config/master/master-config.yaml and we can copy it with the next command:

+
+
+
+
$ docker cp origin:/var/lib/origin/openshift.local.config/master/master-config.yaml ./
+
+
+
+

Now we have a file with the configuration of our openshift cluster.

+
+
+
+
+

Copy all customize files inside the openshift container

+
+
+

To use our customization of devonfw Openshift, we need to copy our files inside the openshift container.

+
+
+

To do this we need to copy the images, scripts and stylesheets from here inside openshift +container, for example, we could put it all inside a folder called openshift.local.devonfw. On the step one we obtain the name of this container, for this example we assume that it’s called origin. Then our images are located inside openshift container and we can see an access it in /var/lib/origin/openshift.local.devonfw/images.

+
+
+
+
$ docker cp ./openshift.local.devonfw origin:/var/lib/origin/
+
+
+
+
+
+

Edit and copy the master-config.yaml to use our customize files

+
+
+

The master-config.yaml have a sections to charge our custom files. All these sections are inside the assetConfig and their names are the next:

+
+
+
    +
  • +

    The custom stylessheets are into extensionStylesheets.

    +
  • +
  • +

    The custom scripts are into extensionScripts.

    +
  • +
  • +

    The custom images are into extensions.

    +
  • +
+
+
+

To use all our custom elements only need to add the directory routes of each element in their appropriate section of the master-config.yaml

+
+
+
+
...
+assetConfig:
+  ...
+  extensionScripts:
+  - /var/lib/origin/openshift.local.devonfw/scripts/catalog-categories.js
+  extensionStylesheets:
+  - /var/lib/origin/openshift.local.devonfw/stylesheet/icons.css
+  extensions:
+  - name: images
+    sourceDirectory: /var/lib/origin/openshift.local.devonfw/images
+  ...
+...
+
+
+
+

Now we only need to copy that master-config.yaml inside openshift, and restart it to load the new configuration. To do that execute the next:

+
+
+
+
$ docker cp ./master-config.yaml origin:/var/lib/origin/openshift.local.config/master/master-config.yaml
+
+
+
+

To re-start openshift do oc cluster down and start again your persistent openshift cluster.

+
+
+
+
+

More information

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize.html new file mode 100644 index 00000000..7f2258c3 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-customize.html @@ -0,0 +1,363 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Customize Openshift Origin for devonfw

+
+
+

This is a guide to customize Openshift cluster.

+
+
+
+
+

Images Styles

+
+
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+
+

How to use

+
+
+

To use it, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own css in the stylesheetURLs and javascript in the scriptURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. Scripts must be served with Content-Type: application/javascript and stylesheets with Content-Type: text/css.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+  [...]
+    extensions:
+      scriptURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/scripts/catalog-categories.js
+      stylesheetURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/stylesheet/icons.css
+  [...]
+
+
+
+
+
+

More information

+
+
+ +
+
+
+
+

Old versions

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-how-to-install.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-how-to-install.html new file mode 100644 index 00000000..382ae87e --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-how-to-install.html @@ -0,0 +1,421 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Install OKD (Openshift Origin)

+
+ +
+
+
+

Pre-requisites

+
+ +
+
+
+

Install docker

+
+ +
+
+
$ sudo groupadd docker
+$ sudo usermod -aG docker $USER
+
+
+
+
+
+

Download Openshift Origin Client

+
+
+

Download Openshift Origin Client from here

+
+
+

When the download it’s complete, only extract it on the directory that you want, for example /home/administrador/oc

+
+
+
+
+

Add oc to path

+
+
+
+
$ export PATH=$PATH:/home/administrador/oc
+
+
+
+
+
+

Install Openshift Cluster

+
+ +
+
+
+

Add the insecure registry

+
+
+

Create file /etc/docker/daemon.json with the next content:

+
+
+
+
{
+    "insecure-registries" : [ "172.30.0.0/16" ]
+}
+
+
+
+
+
+

Download docker images for openshift

+
+
+
+
$ oc cluster up
+
+
+
+
+
+

Install Oc Cluster Wrapper

+
+
+

To manage easier the cluster persistent, we are going to use oc cluster wrapper.

+
+
+
+
cd /home/administrador/oc
+wget https://raw.githubusercontent.com/openshift-evangelists/oc-cluster-wrapper/master/oc-cluster
+
+
+
+

oc-cluster up devonfw-shop-floor --public-hostname X.X.X.X

+
+
+
+
+

Configure iptables

+
+
+

We must create iptables rules to allow traffic from other machines.

+
+
+
+
- The next commands it's to let all traffic, don't do it on a real server.
+
+- $ iptables -F
+- $ iptables -X
+- $ iptables -t nat -F
+- $ iptables -t nat -X
+- $ iptables -t mangle -F
+- $ iptables -t mangle -X
+- $ iptables -P INPUT ACCEPT
+- $ iptables -P OUTPUT ACCEPT
+- $ iptables -P FORWARD ACCEPT
+
+
+
+
+
+

How to use Oc Cluster Wrapper

+
+
+

With oc cluster wrapper we could have different clusters with different context.

+
+
+
+
+

Cluster up

+
+
+
+
$ oc-cluster up devonfw-shop-floor --public-hostname X.X.X.X
+
+
+
+
+
+

Cluster down

+
+
+
+
$ oc-cluster down
+
+
+
+
+
+

Use non-persistent cluster

+
+
+
+
oc cluster up --image openshift/origin --public-hostname X.X.X.X --routing-suffix apps.X.X.X.X.nip.io
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-initial-setup.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-initial-setup.html new file mode 100644 index 00000000..dc8863ad --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-initial-setup.html @@ -0,0 +1,327 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw Openshift Origin Initial Setup

+
+
+

These are scripts to customize an Openshift cluster to be a devonfw Openshift.

+
+
+
+
+

How to use

+
+ +
+
+
+

Prerequisite: Customize Openshift

+
+
+

devonfw Openshift Origin use custom icons, and we need to add it to openshift. More information:

+
+ +
+
+
+

Script initial-setup

+
+
+

Download this script and execute it.

+
+
+

More information about what this script does here.

+
+
+
+
+

Known issues

+
+ +
+
+
+

Failed to push image

+
+
+

If you receive an error like this:

+
+
+
+
error: build error: Failed to push image: After retrying 6 times, Push image still failed due to error: Get http://172.30.1.1:5000/v2/:  dial tcp 172.30.1.1:5000: getsockopt: connection refused
+
+
+
+

It’s because the registry isn’t working, go to openshift console and enter into the default project https://x.x.x.x:8443/console/project/default/overview and you must see two resources, docker-registry and router they must be running. If they don’t work, try to deploy them and look at the logs what is happen.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-s2i.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-s2i.html new file mode 100644 index 00000000..1d302f39 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-s2i.html @@ -0,0 +1,385 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

s2i devonfw

+
+
+

This are the s2i source and templates to build an s2i images. It provides OpenShift builder images for components of the devonfw (at this moment only for angular and java).

+
+
+

This work is totally based on the implementation of Michael Kuehl from RedHat for Oasp s2i.

+
+
+

All this information is used as a part of the initial setup for openshift.

+
+
+
+
+

Previous setup

+
+
+

In order to build all of this, it will be necessary, first, to have a running OpenShift cluster. How to install it here.

+
+
+
+
+

Usage

+
+
+

Before using the builder images, add them to the OpenShift cluster.

+
+
+
+
+

Deploy the Source-2-Image builder images

+
+
+

First, create a dedicated devonfw project as admin.

+
+
+
+
$ oc new-project devonfw --display-name='devonfw' --description='devonfw Application Standard Platform'
+
+
+
+

Now add the builder image configuration and start their build.

+
+
+
+
oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/s2i/java/s2i-devonfw-java-imagestream.json --namespace=devonfw
+oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/s2i/angular/s2i-devonfw-angular-imagestream.json --namespace=devonfw
+oc start-build s2i-devonfw-java --namespace=devonfw
+oc start-build s2i-devonfw-angular --namespace=devonfw
+
+
+
+

Make sure other projects can access the builder images:

+
+
+
+
oc policy add-role-to-group system:image-puller system:authenticated --namespace=devonfw
+
+
+
+

That’s all!

+
+
+
+
+

Deploy devonfw templates

+
+
+

Now, it’s time to create devonfw templates to use this s2i and add it to the browse catalog. More information here.

+
+
+
+
+

Build All

+
+
+

Use this script to automatically install and build all image streams. The script also creates templates devonfw-angular and devonfw-java inside the project 'openshift' to be used by everyone.

+
+
+
    +
  1. +

    Open a bash shell as Administrator

    +
  2. +
  3. +

    Execute shell file:

    +
  4. +
+
+
+
+
$ /PATH/TO/BUILD/FILE/initial-setup.sh
+
+
+
+

More information about what this script does here.

+
+
+
+
+ +
+
+

This is a list of useful articles, etc, that I found while creating the templates.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-templates.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-templates.html new file mode 100644 index 00000000..7630b982 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd-templates.html @@ -0,0 +1,401 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw templates

+
+
+

This are the devonfw templates to build devonfw apps for Openshift using the s2i images. They are based on the work of Mickuehl in Oasp templates/mythaistar for deploy My Thai Star.

+
+
+
    +
  • +

    Inside the example-mythaistar we have an example to deploy My Thai Star application using devonfw templates.

    +
  • +
+
+
+

All this information is used as a part of the initial setup for openshift.

+
+
+
+
+

How to use

+
+ +
+
+
+

Previous requirements

+
+ +
+
+
+

== Deploy the Source-2-Image builder images

+
+
+

Remember that this templates need a build image from s2i-devonfw-angular and s2i-devonfw-java. More information:

+
+ +
+
+
+

== Customize Openshift

+
+
+

Remember that this templates also have custom icons, and to use it, we must modify the master-config.yml inside openshift. More information:

+
+
+ +
+
+
+
+

Deploy devonfw templates

+
+
+

Now, it’s time to create devonfw templates to use this s2i and add it to the browse catalog.

+
+
+

To let all user to use these templates in all openshift projects, we should create it in an openshift namespace. To do that, we must log in as an admin.

+
+
+
+
oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/templates/devonfw-java-template.json --namespace=openshift
+oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/templates/devonfw-angular-template.json --namespace=openshift
+
+
+
+

When it finishes, remember to logout as an admin and enter with our normal user.

+
+
+
+
$ oc login
+
+
+
+
+
+

How to use devonfw templates in openshift

+
+
+

To use these templates with openshift, we can override any parameter values defined in the file by adding the --param-file=paramfile option.

+
+
+

This file must be a list of <name>=<value> pairs. A parameter reference may appear in any text field inside the template items.

+
+
+

The parameters that we must override are the following

+
+
+
+
$ cat paramfile
+  APPLICATION_NAME=app-Name
+  APPLICATION_GROUP_NAME=group-Name
+  GIT_URI=Git uri
+  GIT_REF=master
+  CONTEXT_DIR=/context
+
+
+
+

The following parameters are optional

+
+
+
+
$ cat paramfile
+  APPLICATION_HOSTNAME=Custom hostname for service routes. Leave blank for default hostname, e.g.: <application-name>.<project>.<default-domain-suffix>,
+  # Only for angular
+  REST_ENDPOINT_URL=The URL of the backend's REST API endpoint. This can be declared after,
+  REST_ENDPOINT_PATTERN=The pattern URL of the backend's REST API endpoint that must be modify by the REST_ENDPOINT_URL variable,
+
+
+
+

For example, to deploy My Thai Star Java

+
+
+
+
$ cat paramfile
+  APPLICATION_NAME="mythaistar-java"
+  APPLICATION_GROUP_NAME="My-Thai-Star"
+  GIT_URI="https://github.com/devonfw/my-thai-star.git"
+  GIT_REF="develop"
+  CONTEXT_DIR="/java/mtsj"
+
+$ oc new-app --template=devonfw-java --namespace=mythaistar --param-file=paramfile
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd.html new file mode 100644 index 00000000..30d8a769 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-okd.html @@ -0,0 +1,320 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

OKD (OpenShift Origin)

+
+ +
+
+
+

What is OKD

+
+
+

OKD is a distribution of Kubernetes optimized for continuous application development and multi-tenant deployment. OKD is the upstream Kubernetes distribution embedded in Red Hat OpenShift.

+
+
+

OKD embeds Kubernetes and extends it with security and other integrated concepts. OKD is also referred to as Origin in github and in the documentation.

+
+
+

OKD provides a complete open source container application platform. If you are looking for enterprise-level support, or information on partner certification, Red Hat also offers Red Hat OpenShift Container Platform.

+
+
+

Continue reading…​

+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-service-account.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-service-account.html new file mode 100644 index 00000000..6e240753 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-service-account.html @@ -0,0 +1,345 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

OpenShift Service Accounts

+
+
+

The service accounts are special system users associated with projects. As the regular users, service accounts have a token to connect with the OpenShift API. The main difference is service account tokens are long-lived. By using the service account tokens you can access to the OpenShift API without sharing/exposing your user password/token.

+
+
+
+
+

How to create a Service Account

+
+
+

The process to create a service account is very simple, you only need to execute the following command:

+
+
+
+
oc create sa <service account name>
+
+
+
+

This command will create a service account in your current project. You can create a service account in another project by using the -n parameter.

+
+
+
+
+

How to give rights to a Service Account

+
+
+

By default the service account has no rights. In order to give rights to edit the project, you need to execute the command:

+
+
+
+
oc policy add-role-to-user edit -z <service account name>
+
+
+
+
+
+

Get the service account role

+
+
+

In order to get the service account token, you only need to:

+
+
+
+
oc describe sa <service account name>
+
+
+
+

Get the secret name for the token:

+
+
+
+sa secret +
+
+
+

Then:

+
+
+
+
oc describe secret <previous step token secret name>
+
+
+
+
+sa secret2 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-services-bitbucket-basic-server-setup.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-services-bitbucket-basic-server-setup.html new file mode 100644 index 00000000..553eadf1 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-services-bitbucket-basic-server-setup.html @@ -0,0 +1,515 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

[Under construction]

+
+
+
+

The purpose of the present document is to provide the basic steps carried out to setup a BitBucket server in OpenShift.

+
+
+

Introduction

+
+
+

BitBucket is the Atlassian tool that extends the Git functionality, by adding integration with JIRA, Confluence, or Trello, as well as incorporates extra features for security or management of user accounts (See BitBucket).

+
+
+

BitBucket server is the Atlassian tool that runs the BitBucket services (See BitBucket server).

+
+
+

The followed approach has been not using command line, but OpenShift Web Console, by deploying the Docker image atlassian/bitbucket-server (available in Docker Hub) in the existing project Deployment.

+
+
+

The procedure below exposed consists basically in three main steps:

+
+
+
    +
  1. +

    Deploy the BitBucket server image (from OpenShift web console)

    +
  2. +
  3. +

    Add a route for the external traffic (from OpenShift web console)

    +
  4. +
  5. +

    Configure the BitBucket server (from BitBucket server web console)

    +
  6. +
+
+
+
+
+

Prerequisites

+
+
+
    +
  • +

    OpenShift up & running

    +
  • +
  • +

    Atlassian account (with personal account key). Not required for OpenShift, but for the initial BitBucket server configuration.

    +
  • +
+
+
+
+
+

Procedure

+
+ +
+
+
+

Step 0: Log into our OpenShift Web console

+
+
+
+step0 +
+
+
+
+
+

Step 1: Get into Development project

+
+
+
+] +
+
+
+
+
+

Step 2.1: Deploy a new image to the project

+
+
+
+step2.1 +
+
+
+
+
+ +
+
+

Image name: atlassian/bitbucket-server

+
+
+
+step2.2 +
+
+
+
+
+

Step 2.3: Leave by the moment the default config. since it is enough for the basic setup. Press Create

+
+
+
+step2.3 +
+
+
+
+
+

Step 2.4: Copy the oc commands in case it is required to work via command line, and Go to overview

+
+
+
+step2.4 +
+
+
+
+
+

Step 2.5: Wait until OpenShift deploys and starts up the image. All the info will be available.

+
+
+

Please notice that there are no pre-configured routes, hence the application is not accessible from outside the cluster.

+
+
+
+step2.5 +
+
+
+
+
+

Step 3: Create a route in order for the application to be accessible from outside the cluster (external traffic). Press Create

+
+
+

Please notice that there are different fields that can be specified (hostname, port). If required, the value of those fields can be modified later.

+
+
+
+step3a +
+
+
+

Leave by the moment the default config. as it is enough for the basic setup.

+
+
+

The route for external traffic is now available.

+
+
+
+step3b +
+
+
+
+

Now the BitBucker server container is up & running in our cluster.

+
+
+

The below steps correspond to the basic configuration of our BitBucket server.

+
+
+
+
+
+ +
+ +
+
+
+

Step 4.2: Leave by the moment the Internal database since it is enough for the basic setup (and it can be modified later), and click Next

+
+
+
+step4.2 +
+
+
+
+
+

Step 4.3: Select the evaluation license, and click I have an account

+
+
+
+step4.3 +
+
+
+
+
+

Step 4.4: Select the option Bitbucker (Server)

+
+
+
+step4.4 +
+
+
+
+
+

Step 4.5: Introduce your organization (Capgemini), and click Generate License

+
+ +
+
+
+

Step 4.6: Confirm that you want to install the license on the BitBucket server

+
+
+
+step4.6 +
+
+
+

The license key will be automatically generated. Click Next

+
+
+
+
+

Step 4.7: Introduce the details of the Administration account.

+
+
+

Since our BitBucket server is not going to be integrated with JIRA, click on Go to Bitbucket. The integration with JIRA can be configured later.

+
+
+
+step4.7 +
+
+
+
+
+

Step 4.8: Log in with the admin account that has been just created

+
+ +
+
+
+

DONE !!

+
+
+
+done +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-services-bitbucket-extra-server-configuration.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-services-bitbucket-extra-server-configuration.html new file mode 100644 index 00000000..a777d6fc --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-services-bitbucket-extra-server-configuration.html @@ -0,0 +1,351 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

[Under construction]

+
+
+
+

The purpose of the present document is to provide the basic steps carried out to improve the configuration of BitBucket server in OpenShift.

+
+
+

The improved configuration consists on:

+
+
+
    +
  • +

    Persistent Volume Claims

    +
  • +
  • +

    Health Checks (pending to be completed)

    +
  • +
+
+
+

Persistent Volume Claims.

+
+
+

Please notice that the BitBucket server container does not use persistent volume claims by default, which means that the data (e.g.: BitBucket server config.) will be lost from one deployment to another.

+
+
+
+pvc0 +
+
+
+

It is very important to create a persistent volume claim in order to prevent the mentioned loss of data.

+
+
+
+
+

Step 1: Add storage

+
+
+
+pvc1 +
+
+
+
+
+

Step 2: Select the appropriate storage, or create it from scratch if necessary

+
+
+
+pvc2 +
+
+
+
+
+

Step 3: Introduce the required information

+
+
+
    +
  • +

    Path as it is specified in the BitBucket server Docker image (/var/atlassian/application-data/bitbucket)

    +
  • +
  • +

    Volume name with a unique name to clearly identify the volume

    +
  • +
+
+
+
+pvc3 +
+
+
+
+
+

The change will be inmediately applied

+
+
+
+done +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-services-selenium-basic-grid.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-services-selenium-basic-grid.html new file mode 100644 index 00000000..c6c19952 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-openshift-services-selenium-basic-grid.html @@ -0,0 +1,563 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Basic Selenium Grid setup in OpenShift

+
+
+

[Under construction]

+
+
+
+

The purpose of the present document is to provide the basic steps carried out to setup a Selenium Grid (Hub + Nodes) in OpenShift.

+
+
+
+
+

Introduction

+
+
+

Selenium is a tool to automate web browser across many platforms. It allows the automation of the testing in many different browsers, operating systems, programing laguages, or testing frameworks. (for further information pelase see Selenium)

+
+
+

Selenium Grid is the platform provided by Selenium in order to perform the execution of tests in parallel and in a distributed way.

+
+
+

It basically consists on a Selenium Server (also known as hub or simply server) which redirects the requests it receives to the appropriate node (Firefox node, Chrome node, …​) depending on how the Selenium WebDriver is configured or implemented (See Selenium Doc.)

+
+
+
+ +
+

Prerequisites

+
+
+
    +
  • +

    OpenShift up & running

    +
  • +
+
+
+
+
+

Procedure

+
+
+

The present procedure is divided into two different main parts: +* First part: Selenium Hub (server) installation +* Second part: Selenium node installation (Firefox & Chrome) +* Create persistent volumes for the hub and the node(s)

+
+
+
+
+

Selenium Hub installation

+
+
+

The followed approach consists on deploying new image from the OpenShift WenConsole.

+
+
+

The image as well as its documentation and details can be found at Selenium Hub Docker Image

+
+
+
+
+

== Step 1: Deploy Image

+
+
+
+step1 +
+
+
+
+
+

== Step 2: Image Name

+
+
+

As it is specified in the documentation (selenium/hub)

+
+
+

(Please notice that, as it is described in the additional documentation of the above links, the server will run by default on 4444 port)

+
+
+
+step2 +
+
+
+
+
+

== Step 3: Introduce the appropriate resource name

+
+
+

(selenium-hub in this case)

+
+
+

(No additional config. is required by the moment)

+
+
+
+step3a +
+
+
+

Once the image is deployed, you will be able to check & review the config. of the container. Please notice by, by default, no route is created for external traffic, hence the application (the selenium server or hub) is not reachable from outside the cluster

+
+
+
+step3b +
+
+
+
+
+

== Step 4: Create a route for external traffic

+
+
+
+step4 +
+
+
+
+
+

== Step 5: Change the default config. if necessary

+
+
+
+step5 +
+
+
+
+
+

== DONE !!

+
+
+

The Selenium Server is now accesible from outside the cluster. Click on the link of the route and you will be able to see the server home page.

+
+
+
+done1 +
+
+
+
+
+

== console/view config to see the default server config.

+
+
+

Please notice that the server is not detecting any node up & running, since we have not yet installed none of them.

+
+
+
+done2 +
+
+
+
+
+
+

Selenium Node Firefox installation

+
+
+

(Same steps apply for Selenium Node Chrome with the selenium/node-chrome Docker image)

+
+
+

The key point of the nodes installation is to specify the host name and port of the hub. If this step is not correctly done, the container will be setup but the application will not run.

+
+
+

The followed approach consists on deploying new image from the OpenShift WenConsole.

+
+
+

The image as well as its documentation and details can be found at Selenium Hub Docker Image (firefox node in this case)

+
+
+
+
+

== Step 1: Deploy Image

+
+
+

Introduce the appropriate Docker Image name as it is specified in the documentation (selenium/node-firefox)

+
+
+
+step1 +
+
+
+
+
+

== Step 2: Introduce the appropriate resource name

+
+
+

(selenium-node-firefox in this case)

+
+
+
+step2 +
+
+
+
+
+

== Step 3: Introduce, as environment variables, the host name and port of the selenium hub previously created

+
+
+

Env. var. for selenium hub host name

+
+
+
    +
  • +

    Name: HUB_PORT_4444_TCP_ADDR

    +
  • +
  • +

    Value: The Selenium hub host name. It’s recommended to use the service name of the internal OpenShift service.

    +
  • +
+
+
+

Env. var. for host selenium hub host port

+
+
+
    +
  • +

    Name: HUB_PORT_4444_TCP_PORT

    +
  • +
  • +

    Value: 4444 (by default), or the appropriate one if it was changed during the installation.

    +
  • +
+
+
+
+step3 +
+
+
+
+
+

== DONE !!

+
+
+

If the creation of the container was correct, we will be able to see our new selenium-node-firefox application up & running, as well as we will be able to see that the firefox node has correctly detected the selenium hub (in the log of the POD)

+
+
+
+done1 +
+
+
+
+done2 +
+
+
+

If we go back to the configuration of the SeleniumHub through the WebConsole, we also will be able to see the our new firefox node

+
+
+
+done3 +
+
+
+
+
+
+

Persistent Volumes

+
+
+

Last part of the installation of the Selenium Grid consists on creating persistent volumes for both, the hub container and the node container.

+
+
+

Persistent Volumes can be easely created folling the the BitBucket Extra server configuration

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-provisioning-dsf4docker.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-provisioning-dsf4docker.html new file mode 100644 index 00000000..a6e216e0 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-provisioning-dsf4docker.html @@ -0,0 +1,374 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

dsf4docker provisioning environment

+
+
+
+docker +
+
+
+
+
+

Architecture overview

+
+
+

dsf docker arch

+
+
+
+
+

Prerequisite

+
+
+

To use dsf4docker provisioning environment you need a remote server and you must clone or download devonfw shop floor.

+
+
+
+
+

How to use it

+
+
+

Navigate to ./devonfw-shop-floor/dsf4docker/environment and here you can find one scripts to install it, and another one to uninstall it.

+
+
+
+
+

Install devonfw shop floor 4 Docker

+
+
+

There is an installation script to do so, so the complete installation should be completed by running it. Make sure this script has execution permissions in the Docker Host:

+
+
+
+
 chmod +x dsf4docker-install.sh
+ sudo ./dsf4docker-install.sh
+
+
+
+

This script, besides the container "installation" itself, will also adapt the docker-compose.yml file to your host (using sed to replace the IP_ADDRESS word of the file for your real Docker Host’s IP address).

+
+
+
+
+

Uninstall devonfw shop floor 4 Docker

+
+
+

As well as for the installation, if we want to remove everything concerning devonfw shop floor 4 Docker from our Docker Host, we’ll run this script:

+
+
+
+
 chmod +x dsf4docker-uninstall.sh
+ sudo ./dsf4docker-uninstall.sh
+
+
+
+
+
+

Troubleshooting

+
+
+

When trying to execute the install or uninstall .sh there may be some problems related to the windows/linux format file, so if you see this error log while executing the script:

+
+
+
+
./dsf4docker-install.sh: line 16: $'\r': command not found
+
+
+
+

You need to do a file conversion with this command:

+
+
+
+
dos2unix dsf4docker-install.sh
+
+
+
+

or

+
+
+
+
dos2unix dsf4docker-uninstall.sh
+
+
+
+
+
+

A little history

+
+
+

The Docker part of the shop floor is created based on the experience of the environment setup of the project Mirabaud Advisory, and intended to be updated to latest versions. Mirabaud Advisory is a web service developed with devonfw (Java) that, alongside its own implementation, it needed an environment both for the team to follow CICD rules through their 1-week-long sprints and for the client (Mirabaud) to check the already done work.

+
+
+

There is a practical experience about the Mirabaud Case.

+
+
+
+

Back.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-provisioning-production-line.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-provisioning-production-line.html new file mode 100644 index 00000000..24f52992 --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/dsf-provisioning-production-line.html @@ -0,0 +1,303 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Production Line provisioning environment

+
+
+

pl

+
+
+

The Production Line Project is a set of server-side collaboration tools for Capgemini engagements. It has been developed for supporting project engagements with individual tools like issue tracking, continuous integration, continuous deployment, documentation, binary storage and much more!

+
+
+

For additional information use the official documentation.

+
+
+
+
+

How to obtain your Production Line

+
+
+

You can order your Production Line environment instance following the official guide. Remember that you need to order at least the next tools: + * Jenkins + * GitLab + * SonarQube + * Nexus

+
+
+
+

Back.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/jenkins-slave.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/jenkins-slave.html new file mode 100644 index 00000000..57cd7b8a --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/jenkins-slave.html @@ -0,0 +1,341 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Slaves creation for jenkins

+
+
+

Click on "Manage Jenkins" → "Manage Nodes" → "New Node": +We define a name and select "Permanent Agent"

+
+
+
+1 +
+
+
+

As we can see in the following image, we need to fill in the fields:

+
+
+

-Name: The slave name

+
+
+

-# of executors: The maximum number of concurrent builds that Jenkins may perform on this node. +(can be modified later)

+
+
+

-Remote root directory :An agent needs to have a directory dedicated to Jenkins. Specify the path to this directory on the agent.

+
+
+

We have this path in our docker-compose.yml

+
+
+
+root directory +
+
+
+

-Usage:In this mode, Jenkins uses this node freely. Whenever there is a build that can be done by using this node, Jenkins will use it.

+
+
+

Launch method: Controls how Jenkins starts this agent.

+
+
+
+2 +
+
+
+

Once is filled, we click on 'save' button and we can see our slave created:

+
+
+
+3 +
+
+
+

If we click in our slave we can see the 'secret' that we need to add to our docker-compose.yml:

+
+
+
+4 +
+
+
+

We add it to the docker-compose.yml file:

+
+
+
+7 +
+
+
+

Once it’s done, we go to the command line and write:

+
+
+

$docker-compose up -d

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/shop-floor.wiki/master-devonfw-shop-floor.html b/docs/devonfw.github.io/1.0/shop-floor.wiki/master-devonfw-shop-floor.html new file mode 100644 index 00000000..fe153dae --- /dev/null +++ b/docs/devonfw.github.io/1.0/shop-floor.wiki/master-devonfw-shop-floor.html @@ -0,0 +1,273 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw shop floor

+
+
+

devonfw-shop-floor

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/devonfw.github.io/1.0/solicitor.wiki/master-solicitor.html b/docs/devonfw.github.io/1.0/solicitor.wiki/master-solicitor.html new file mode 100644 index 00000000..f6845b6c --- /dev/null +++ b/docs/devonfw.github.io/1.0/solicitor.wiki/master-solicitor.html @@ -0,0 +1,2581 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Solicitor User Guide

+
+
+

SPDX-License-Identifier: Apache-2.0

+
+
+
+
+

1. Introduction

+
+
+

Todays software projects often make use of large amounts of Open Source software. Being +compliant with the license obligations of the used software components is a prerequisite for every such project. This results in different requirements that the project might need to fulfill. Those requirements can be grouped into two main categories:

+
+
+
    +
  • +

    Things that need to be done to actually fulfill license obligations

    +
  • +
  • +

    Things that need to be done to monitor / report fulfillment of license obligations

    +
  • +
+
+
+

Most of the above activities share common points:

+
+
+
    +
  • +

    The need to have an inventory of used (open source) components and their licenses

    +
  • +
  • +

    Some rule based evaluation and reporting based on this inventory

    +
  • +
+
+
+

In practice these easy looking tasks might get complex due to various aspects:

+
+
+
    +
  • +

    The number of open source components might be quite large (>> 100 for a typical webapplication based on state of the art programming frameworks)

    +
  • +
  • +

    Agile development and rapid changes of used components result in frequent changes of the inventory

    +
  • +
  • +

    Open Source usage scenarios and license obligations might be OK in one context (e.g. in the relation between a software developer and his client) but might be completely inacceptable in another context (e.g. when the client distributes the same software to end customers)

    +
  • +
  • +

    Legal interpretation of license conditions often differ from organisation to organisation and result in different compliance rules to be respected.

    +
  • +
  • +

    License information for components is often not available in a standardized form which would allow automatic processing

    +
  • +
  • +

    Tools for supporting the license management processes are often specific to a technology or build tool and do not support all aspects of OSS license management.

    +
  • +
+
+
+

Of course there are specific commercial tool suites which address the IP rights and license domain. But due to high complexity and license costs those tools are out of reach for most projects - at least for permanent use.

+
+
+

Solicitor tries to address some of the issues hightlighted above. In its initial version it is a tool for programmatically executing a process which was originally defined as an Excel-supported manual process.

+
+
+

When running Solicitor three subsequent processing steps are executed:

+
+
+
    +
  • +

    Creating an initial component and license inventory based on technology specific input files

    +
  • +
  • +

    Rule based normalization and evaluation of licenses

    +
  • +
  • +

    Generation of output documents

    +
  • +
+
+
+ + + + + +
+ + +Solicitor comes with a set of sample rules for the normalization and evaluation of licenses. +Even though these included rules are not "intentionally wrong" they are only samples and you should never rely on these builtin rules without checking and possibly modifying their content and consulting your lawyer. Solicitor is a tool +for technically supporting the management of OSS licenses within your project. Solicitor neither gives legal advice nor is a replacement for a lawyer. +
+
+
+
+
+

2. Licensing of Solicitor

+
+
+

The Solicitor code and accompanying resources (including this userguide) as stored in the GIT Repository https://github.com/devonfw/solicitor are licensed as Open Source under Apache 2 license (https://www.apache.org/licenses/LICENSE-2.0).

+
+
+ + + + + +
+ + +Specifically observe the "Disclaimer of Warranty" and "Limitation of Liability" which are part of the license. +
+
+
+ + + + + +
+ + +The executable JAR file which is created by the Maven based build process includes numerous other Open Source components which are subject to different Open Source licenses. Any distribution of the Solicitor executable JAR file needs to comply with the license conditions of all those components. +If you are running Solicitor from the executable JAR you might use the -eug option to store detailed license information as file solicitor_licenseinfo.html in your current working directory (together with a copy of this user guide). +
+
+
+
+
+

3. Architecture

+
+
+

The following picture show a business oriented view of Solicitor.

+
+
+

domain model

+
+
+

Raw data about the components and attached licenses within an application is gathered by scanning with technology and build chain specific tools. This happens outside Solicitor.

+
+
+

The import step reads this data and transforms it into a common technology independent internal format.

+
+
+

In the normalization step the license information is completed and unified. Information not contained in the raw data is added. Where possible the applicable licenses are expressed by SPDX-IDs.

+
+
+

Many open source compontents are available via multi licensing models. Within qualification the finally applicable licenses are selected.

+
+
+

In the legal assessment the compliance of applicable licenses will be checked based on generic rules defined in company wide policies and possibly project specific project specific extensions. Defining those rules is considered as "legal advice" and possibly needs to be done by lawyers which are authorized to do so. For this step Solicitor only provides a framework / tool to support the process here but does not deliver any predefined rules.

+
+
+

The final export step produces documents based on the internal data model. This might be the list of licenses to be forwarded to the customer or a license compliance report. Data might also be fed into other systems.

+
+
+

A more technical oriented view of Solicitor is given below.

+
+
+

solution

+
+
+

There are three major technical components: The reader and writer components are performing import and export of data. The business logic - doing normalization, qualification and legal assessment is done by a rule engine. Rules are mainly defined via decision tables. Solicitor comes with a starting set of rules for normalization and qualification but these rulesets need to be extended within the projects. Rules for legal evaluation need to be completely defined by the user.

+
+
+

Solicitor is working without additional persisted data: When being executed it generates the output direcly from the read input data after processing the business rules.

+
+
+
+
+

4. Data Model

+
+
+

datamodel

+
+
+

The internal business data model consists of 6 entities:

+
+
+
    +
  • +

    ModelRoot: root object of the business data model which holds metadata about the data processing

    +
  • +
  • +

    Engagement: the masterdata of the overall project

    +
  • +
  • +

    Application: a deliverable within the Engagement

    +
  • +
  • +

    ApplicationComponent: component within an Application

    +
  • +
  • +

    RawLicense: License info attached to an ApplicationComponent as it is read from the input data

    +
  • +
  • +

    NormalizedLicense: License info attached to an ApplicationComponent processed by the business rules

    +
  • +
+
+
+
+
+

5. == ModelRoot

+
+
+

|== = +| Property | Type | Description +| modelVersion | int | version number of the data model +| executionTime | String | timestamp when the data was processed +| solicitorVersion | String | Solicitor version which processed the model +| solicitorGitHash | String | buildnumber / GitHash of the Solicitor build +| solicitorBuilddate | String | build date of the Solicitor build +| extensionArtifactId | String | artifactId of the active Solicitor Extension ("NONE" if no extension) +| extensionVersion | String | Version of the active Extension (or "NONE") +| extensionGitHash | String | Buildnumber / GitHash of the Extension (or "NONE") +| extensionBuilddate | String build date of the Extension (or "NONE") +|== =

+
+
+
+
+

6. == Engagement

+
+
+

|== = +| Property | Type | Description +| engagementName | String | the engagement name +| engagementType | EngagementType | the engagement type; possible values: INTERN, EXTERN +| clientName | String | name of the client +| goToMarketModel | GoToMarketModel | the go-to-market-model; possible values: LICENSE +| contractAllowsOss | boolean | does the contract explicitely allow OSS? +| ossPolicyFollowed | boolean | is the companies OSS policy followed? +| customerProvidesOss | boolean | does the customer provide the OSS? +|== =

+
+
+
+
+

7. == Application

+
+
+

|== = +| Property | Type | Description +| applicationName | String | the name of the application / deliverable +| releaseId | String | version identifier of the application +| releaseDate | Sting | release data of the application +| sourceRepo | String | URL of the source repo of the application (should be an URL) +| programmingEcosystem | String | programming ecosystem (e.g. Java8; Android/Java, iOS / Objective C) +|== =

+
+
+
+
+

8. == ApplicationComponent

+
+
+

|== = +| Property | Type | Description +| usagePattern | UsagePattern | possible values: DYNAMIC_LINKING, STATIC_LINKING, STANDALONE_PRODUCT +| ossModified | boolean | is the OSS modified? +| ossHomepage | String | URL of the OSS homepage +| groupId | String | component identifier: maven group +| artifactId | String | component identifier: maven artifactId +| version | String | component identifier: Version +| repoType | String | component identifier: RepoType +|== =

+
+
+
+
+

9. == RawLicense

+
+
+

|== = +| Property | Type | Description +| declaredLicense | String | name of the declared license +| licenseUrl | String | URL of the declared license +| trace | String | detail info of history of this data record +| specialHandling | boolean | (for controlling rule processing) +|== =

+
+
+
+
+

10. == NormalizedLicense

+
+
+

|== = +| Property | Type | Description +| declaredLicense | String | name of the declared license (copied from RawLicense) +| licenseUrl | String | URL of the declared license (copied from RawLicense +| declaredLicenseContent | String | resolved content of licenseUrl +| normalizedLicenseType | String | type of the license, see [License types] +| normalizedLicense | String | name of the license in normalized form (SPDX-Id) or special "pseudo license id", see [Pseudo License Ids] +| normalizedLicenseUrl | String | URL pointing to a normalized form of the license +| normalizedLicenseType | String | type of the license, see [License types] +| effectiveNormalizedLicenseType | String | type of the effective license, see [License types]| effectiveNormalizedLicense | String | effective normalized license (SPDX-Id) or "pseudo license id"; this is the information after selecting the right license in case of multi licensing or any license override due to a component being redistributed under a different license +| effectiveNormalizedLicenseUrl | String | URL pointing to the effective normalized license +| effectiveNormalizedLicenseContent | String | resolved content of effectiveNormalizedLicenseUrl +| legalPreApproved | String | indicates whether the license is pre approved based on company standard policy +| copyLeft | String | indicates the type of copyleft of the license +| licenseCompliance | String | indicates if the license is compliant according to the default company policy +| licenseRefUrl | String | URL to the reference license information (TBD) +| licenseRefContent | String | resolved content of licenseRefUrl +| includeLicense | String | does the license require to include the license text ? +| includeSource | String | does the license require to deliver source code of OSS component ? +| reviewedForRelease | String | for which release was the legal evaluation done? +| comments | String | comments on the component/license (mainly as input to legal) +| legalApproved | String | indicates whether this usage is legally approved +| legalComments | String | comments from legal, possibly indicating additional conditions to be fulfilled +| trace | String | detail info of history of this data record (rule executions) +|== =

+
+
+

For the mechanism how Solicitor resolves the content of URLs and how the result +might be influenced see Resolving of License URLs

+
+
+
+
+

11. == License types

+
+
+

Defines the type of license

+
+
+
    +
  • +

    OSS-SPDX - An OSS license which has a corresponding SPDX-Id

    +
  • +
  • +

    OSS-OTHER - An OSS license which has no SPDX-Id

    +
  • +
  • +

    COMMERCIAL - Commercial (non OSS) license; this might also include code which is owned by the project

    +
  • +
  • +

    UNKNOWN- License is unknown

    +
  • +
  • +

    IGNORED- license will be ignored (non selected license in multi licensing case; only to be used as "Effective Normalized License Type")

    +
  • +
+
+
+
+
+

12. == Pseudo License Ids

+
+
+

A "normalized" license id might be either a SPDX-Id or a "pseudo license id" which is used to indicate a specific situation. The following pseudo license ids are used:

+
+
+
    +
  • +

    OSS specific - a nonstandard OSS license which could not be mapped to a SPDX-Id

    +
  • +
  • +

    PublicDomain - any form of public domain which is not represented by an explicit SPDX-Id

    +
  • +
  • +

    Ignored - license will be ignored (non selected license in multi licensing case; only to be used as "Effective Normalized License")

    +
  • +
  • +

    NonOSS - commercial license, not OSS

    +
  • +
+
+
+
+
+

13. Usage

+
+ +
+
+
+

14. Executing Solicitor

+
+
+

Solicitor is a standalone Java (Spring Boot) application. Prerequisite for running it is an existing Java 8 or 11 runtime environment. If you do not yet have a the Solicitor executable JAR (solicitor.jar) you need to build it as given on the project GitHub homepage https://github.com/devonfw/solicitor .

+
+
+

Solicitor is executed with the following command:

+
+
+
+
java -jar solicitor.jar -c <configfile>
+
+
+
+

where <configfile> is to be replaced by the location of the [Configuration File].

+
+
+

To get a first idea on what Solicitor does you might call

+
+
+
+
java -jar solicitor.jar -c classpath:samples/solicitor_sample.cfg
+
+
+
+

This executes Solicitor with default configuration on it own list of internal components and produces sample output.

+
+
+

To get an overview of the available command line options use

+
+
+
+
java -jar solicitor.jar -h
+
+
+
+
+
Adressing of resources
+
+

For unique adressing of resources to be read (configuration files, input data, rule templates and decision tables) Solicitor makes use of the Spring ResourceLoader functionality, see https://docs.spring.io/spring-framework/docs/current/spring-framework-reference/core.html#resources-resourceloader . This allows to load from the classpath, the filesystem or even via http get.

+
+
+

If you want to reference a file in the filesystem you need to write it as follows: file:path/to/file.txt

+
+
+

Note that this only applies to resources being read. Output files are adressed without that prefix.

+
+
+
+
+
+
+

15. Project Configuration File

+
+
+

The project configuration of Solicitor is done via a configuration file in +JSON format. This configuration file defines the engagements and applications master data, configures the readers for importing component and license information, references the business rules to be applied and defines the exports to be done.

+
+
+

The config file has the following skeleton:

+
+
+
+
{
+  "version" : 1,
+  "comment" : "Sample Solicitor configuration file",
+  "engagementName" : "DevonFW", (1)
+  .
+  .
+  .
+  "applications" : [ ... ], (2)
+  "rules" : [ ... ],  (3)
+  "writers" : [ ... ] (4)
+}
+
+
+
+ + + + + + + + + + + + + + + + + +
1The leading data defines the engagement master data, see [Header and Engagement Master Data]
2applications defines the applications within the engagement and configures the readers to import the component/license information, see [Readers / Applications]
3rules references the rules to apply to the imported data, see [Business Rules]
4writers configures how the processed data should be exported, see [Writers / Reporting]
+
+
+ + + + + +
+ + +The following section describes all sections of the Solicitor configuration file format. Often the configuration of writers and especially rules will be identical for projects. To facilitate the project specific configuration setup Solicitor internally provides a base configuration which contains reasonable defaults for the rules and writers section. If the project specific configuration file omits the rules and/or writers sections then the corresponding settings from the base configuration will be taken. For details see Default Base Configuration. +
+
+
+ + + + + +
+ + +If locations of files are specified within the configuration files as relative +pathnames then this is always evaluated relative to the current working directory (which +might differ from the location of the configuration file). If some file location +should be given relative to the location of the configuration file this might be done +using the special placeholder ${cfgdir} as described in the following. +
+
+
+
+
+

16. == Placeholders within the configuration file

+
+
+

Within certain parts of the configuration file (path and filenames) special placeholders might be used to parameterize the configuration. These areas are explicitely marked in the following +description.

+
+
+

These placeholders are available:

+
+
+
    +
  • +

    ${project} - A simplified project name (taking the engagement name, +removing all non-word characters and converting to lowercase).

    +
  • +
  • +

    ${cfgdir} - If the config file was loaded from the filesystem this denotes the directory where the config file resides, . otherwise. This can be used to reference locations relative to the location of the config file.

    +
  • +
+
+
+
+
+

17. == Header and Engagement Master Data

+
+
+

The leading section of the config file defines some metadata and the engagement master data.

+
+
+
+
  "version" : 1, (1)
+  "comment" : "Sample Solicitor configuration file", (2)
+  "engagementName" : "DevonFW", (3)
+  "engagementType" : "INTERN", (4)
+  "clientName" : "none", (5)
+  "goToMarketModel" : "LICENSE", (6)
+  "contractAllowsOss" : true, (7)
+  "ossPolicyFollowed" : true, (8)
+  "customerProvidesOss" : false, (9)
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
1version of the config file format (currently needs to be 1)
2is a free text comment (no further function at the moment)
3the engagement name (any string)
4the engagement type; possible values: INTERN, EXTERN
5name of the client (any string)
6the go-to-market-model; possible values: LICENSE
7does the contract explicitely allow OSS? (boolean)
8is the companies OSS policy followed? (boolean)
9does the customer provide the OSS? (boolean)
+
+
+
+
+

18. == Applications

+
+
+

Within this section the different applications (=deliverables) of the engagement are defined. Furtheron for each application at least one reader needs to be defined which imports the component and license information.

+
+
+
+
 "applications" : [ {
+    "name" : "Devon4J", (1)
+    "releaseId" : "3.1.0-SNAPSHOT", (2)
+    "sourceRepo" : "https://github.com/devonfw/devon4j.git", (3)
+    "programmingEcosystem" : "Java8", (4)
+    "readers" : [ { (5)
+      "type" : "maven", (6)
+      "source" : "classpath:samples/licenses_devon4j.xml", (7) (10)
+      "usagePattern" : "DYNAMIC_LINKING", (8)
+      "repoType" : "maven" (9)
+    } ]
+  } ],
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
1The name of the application / deliverable (any string)
2Version identifier of the application (any string)
3URL of the source repo of the application (string; should be an URL)
4programming ecosystem (any string; e.g. Java8; Android/Java, iOS / Objective C)
5multiple readers might be defined per application
6the type of reader; for possible values see Reading License Information with Readers
7location of the source file to read (ResourceLoader-URL)
8usage pattern; possible values: DYNAMIC_LINKING, STATIC_LINKING, STANDALONE_PRODUCT
9repoType: Repository to download the sources from: currently possible values: maven, npm; if omitted then "maven" will be taken as default
10placeholder patterns might be used here
+
+
+

The different readers are described in chapter Reading License Information with Readers

+
+
+
+
+

19. == Business Rules

+
+
+

Business rules are executed within a Drools rule engine. They are defined as a sequence of rule templates and corresponding XLS files which together represent decision tables.

+
+
+
+
  "rules" : [ {
+    "type" : "dt", (1)
+    "optional" : false, (2)
+    "ruleSource" : "classpath:samples/LicenseAssignmentSample.xls", (3) (7)
+    "templateSource" : "classpath:com/.../rules/rule_templates/LicenseAssignment.drt", (4) (7)
+    "ruleGroup" : "LicenseAssignment", (5)
+    "description" : "setting license in case that no one was detected" (6)
+  },
+  .
+  .
+  .
+,{
+    "type" : "dt",
+    "optional" : false,
+    "ruleSource" : "classpath:samples/LegalEvaluationSample.xls",
+    "templateSource" : "classpath:com/.../rules/rule_templates/LegalEvaluation.drt",
+    "ruleGroup" : "LegalEvaluation",
+    "decription" : "final legal evaluation based on the rules defined by legal"
+  } ],
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
1type of the rule; only possible value: dt which stands for "decision table"
2if set to true the processing of this group of rules will be skipped if the XLS with table +data (given by ruleSource) does not exist; if set to false a missing XLS table will result +in program termination
3location of the tabular decision table data
4location of the drools rule template to be used to define the rules together with the decision table data
5id of the group of rules; used to reference it e.g. when doing logging
6some textual description of the rule group
7placeholder patterns might be used here
+
+
+

When running, Solicitor will execute the rules of each rule group separately and in the order +given by the configuration. Only if there are no more rules to fire in a group Solicitor will +move to the next rule group and start firing those rules.

+
+
+

Normally a project will only customize (part of) the data of the decision tables and thus will only change the ruleSource and the data in the XLS. All other configuration (the different templates and processing order) is part of the Solicitor application itself and should not be changed by end users.

+
+
+

See Working with Decision Tables and Standard Business Rules for further information on the business rules.

+
+
+
+
+

20. == Writers / Reporting

+
+
+

The writer configuration defines how the processed data will be exported and/or reported.

+
+
+
+
  "writers" : [ {
+    "type" : "xls", (1)
+    "templateSource" : "classpath:samples/Solicitor_Output_Template_Sample.xlsx", (2) (6)
+    "target" : "OSS-Inventory-DevonFW.xlsx", (3) (6)
+    "description" : "The XLS OSS-Inventory document", (4)
+    "dataTables" : { (5)
+      "ENGAGEMENT"  : "classpath:com/devonfw/tools/solicitor/sql/allden_engagements.sql",
+      "LICENSE" : "classpath:com/devonfw/tools/solicitor/sql/allden_normalizedlicenses.sql"
+    }
+  } ]
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1type of writer to be selected; possible values: xls, velo
2path to the template to be used
3location of the output file
4some textual description
5reference to SQL statements used to transform the internal data model to data tables used for reporting
6placeholder patterns might be used here
+
+
+

For details on the writer configuration see Reporting / Creating output documents.

+
+
+
+
+

21. Starting a new project

+
+
+

To simplify setting up a new project Solicitor provides an option to create a project starter configuration in a given directory.

+
+
+
+
java -jar solicitor.jar -wiz some/directory/path
+
+
+
+

Besides the necessary configuration file this includes also empty XLS files for defining project +specific rules which amend the builtin rules. Furtheron a sample license.xml file is provided to +directly enable execution of solicitor and check functionality.

+
+
+

This configuration then serves as starting point for project specific configuration.

+
+
+
+
+

22. Exporting the Builtin Configuration

+
+
+

When working with Solicitor it might be necessary to get access to the builtin base configuration, e.g. for reviewing the builtin sample rules or using builtin reporting templates as starting point for the creation of own templates.

+
+
+

The command

+
+
+
+
java -jar solicitor.jar -ec some/directory/path
+
+
+
+

will export all internal configuration to the given directory. This includes:

+
+
+ +
+
+
+
+

23. Configuration of Technical Properties

+
+
+

Besides the project configuration done via the above described file there are a set of technical settings in Solicitor which are done via properties. Solicitor is implemented as a Spring Boot Application and makes use +of the standard configuration mechanism provided by the Spring Boot Platform which provides several ways to define/override properties.

+
+
+

The default property values are given in Built in Default Properties.

+
+
+

In case that a property shall be overridden when executing Solicitor this can easiest be done via the command line when executing +Solicitor:

+
+
+
+
java -Dsome.property.name1=value -Dsome.property.name2=another_value -jar solicitor.jar <any other arguments>
+
+
+
+
+
+

24. Reading License Information with Readers

+
+
+

Different Readers are available to import raw component / license information for different +technologies. This chapter describes how to setup the different build / dependency management systems to create the required input and how to configure the corresponding reader.

+
+
+
+
+

25. Maven

+
+
+

For the export of the licenses from a maven based project the license-maven-plugin is used, which can directly be called without the need to change anything in the pom.xml.

+
+
+

To generate the input file required for Solicitor the License Plugin needs to be executed with the following command:

+
+
+
+
mvn org.codehaus.mojo:license-maven-plugin:1.14:aggregate-download-licenses -Dlicense.excludedScopes=test,provided
+
+
+
+

The generated output file named licenses.xml (in the directory specified in the +plugin config) should look like the following:

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::licenses.xml[]
+
+
+
+

In Solicitor the data is read with the following reader config:

+
+
+
+
"readers" : [ {
+  "type" : "maven",
+  "source" : "file:target/generated-resouces/licenses.xml",
+  "usagePattern" : "DYNAMIC_LINKING"
+} ]
+
+
+
+

(the above assumes that Solicitor is executed in the maven projects main directory)

+
+
+
+
+

26. CSV

+
+
+

The CSV input is normally manually generated and should look like this (The csv File is ";" separated):

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::csvlicenses.csv[]
+
+
+
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "csv",
+  "source" : "file:path/to/the/file.csv",
+  "usagePattern" : "DYNAMIC_LINKING"
+} ]
+
+
+
+

The following 5 columns need to be contained:

+
+
+
    +
  • +

    groupId

    +
  • +
  • +

    artifactId

    +
  • +
  • +

    version

    +
  • +
  • +

    license name

    +
  • +
  • +

    license URL

    +
  • +
+
+
+

In case that a component has multiple licenses attached, there needs to be a separate +line in the file for each license.

+
+
+
+
+

27. NPM

+
+
+

For NPM based projects either the NPM License Crawler (https://www.npmjs.com/package/npm-license-crawler) or the NPM License Checker (https://www.npmjs.com/package/license-checker) might be used. The NPM License Crawler can process several node packages in one run.

+
+
+
+
+

28. == NPM License Crawler

+
+
+

To install the NPM License Crawler the following command needs to be executed.

+
+
+
+
npm i npm-license-crawler -g
+
+
+
+

To get the licenses, the crawler needs to be executed like the following example

+
+
+
+
npm-license-crawler --dependencies --csv licenses.csv
+
+
+
+

The export should look like the following (The csv file is "," separated)

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::licenses.csv[]
+
+
+ +
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "npm-license-crawler-csv",
+  "source" : "file:path/to/licenses.csv",
+  "usagePattern" : "DYNAMIC_LINKING",
+  "repoType" : "npm"
+} ]
+
+
+
+
+
+

29. == NPM License Checker

+
+
+

To install the NPM License Checker the following command needs to be executed.

+
+
+
+
npm i license-checker -g
+
+
+
+

To get the licenses, the checker needs to be executed like the following example (we require JSON output here)

+
+
+
+
license-checker --json > /path/to/licenses.json
+
+
+
+

The export should look like the following

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::licensesNpmLicenseChecker.json[]
+
+
+ +
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "npm-license-checker",
+  "source" : "file:path/to/licenses.json",
+  "usagePattern" : "DYNAMIC_LINKING",
+  "repoType" : "npm"
+} ]
+
+
+
+
+
+

30. Gradle (Windows)

+
+
+

For the export of the licenses from a Gradle based project the Gradle License Plugin is used.

+
+
+

To install the plugin some changes need to be done in build.gradle, like following example

+
+
+
+
buildscript {
+  repositories {
+    maven { url 'https://oss.jfrog.org/artifactory/oss-snapshot-local/' }
+  }
+
+  dependencies {
+    classpath 'com.jaredsburrows:gradle-license-plugin:0.8.5-SNAPSHOT'
+  }
+}
+
+apply plugin: 'java-library'
+apply plugin: 'com.jaredsburrows.license'
+
+
+
+

Afterwards execute the following command in the console:

+
+
+

For Windows (Java Application)

+
+
+
+
gradlew licenseReport
+
+
+
+

The Export should look like this:

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::licenses.json[]
+
+
+ +
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "gradle2",
+  "source" : "file:path/to/licenses.json",
+  "usagePattern" : "DYNAMIC_LINKING"
+} ]
+
+
+
+ + + + + +
+ + +The former reader of type gradle is deprecated and should no longer be used. See List of Deprecated Features. +
+
+
+
+
+

31. Gradle (Android)

+
+
+

For the Export of the the Licenses from a Gradle based Android Projects the Gradle License Plugin is used.

+
+
+

To install the Plugin some changes need to be done in the build.gradle of the Project, like following example

+
+
+
+
buildscript {
+  repositories {
+    jcenter()
+  }
+
+  dependencies {
+    classpath 'com.jaredsburrows:gradle-license-plugin:0.8.5'
+  }
+}
+
+
+
+

Also there is a change in the build.gradle of the App. Add the line in the second line

+
+
+
+
apply plugin: 'com.android.application'
+
+
+
+

Afterwards execute the following command in the Terminal of Android studio: +For Windows(Android Application)

+
+
+
+
gradlew licenseDebugReport
+
+
+
+

The Export is in the following folder

+
+
+
+
$Projectfolder\app\build\reports\licenses
+
+
+
+

It should look like this:

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::licenseDebugReport.json[]
+
+
+ +
+

In Solicitor the Data is read with the following part of the config

+
+
+
+
"readers" : [ {
+      "type" : "gradle2",
+      "source" : "file:$/input/licenses.json",
+      "usagePattern" : "DYNAMIC_LINKING"
+   	} ]
+
+
+
+ + + + + +
+ + +The former reader of type gradle is deprecated and should no longer be used. See List of Deprecated Features. +
+
+
+
+
+

32. Working with Decision Tables

+
+
+

Solicitor uses the Drools rule engine to execute business rules. Business rules are +defined as "extended" decision tables. Each such decision table consists of two artifacts:

+
+
+
    +
  • +

    A rule template file in specific drools template format

    +
  • +
  • +

    An Excel (XLSX) table which defines the decision table data

    +
  • +
+
+
+

When processing, Solicitor will internally use the rule template to create one or multiple rules for every record found in the Excel sheet. The following points are important here:

+
+
+
    +
  • +

    Rule templates:

    +
    +
      +
    • +

      Rule templates should be regarded as part of the Solicitor implementation and should not be changed on an engagement level.

      +
    • +
    +
    +
  • +
  • +

    Excel decision table data

    +
    +
      +
    • +

      The Excel tables might be extended or changed on a per project level.

      +
    • +
    • +

      The rules defined by the tabular data will have decreasing "salience" (priority) from top to bottom

      +
    • +
    • +

      In general multiple rules defined within a table might fire for the same data to be processed; the definition of the rules within the rule template will normally ensure that once a rule from the decision table was processed no other rule from that table will be processed for the same data

      +
    • +
    • +

      The excel tables contain header information in the first row which is only there for documentation purposes; the first row is completely ignored when creating rules from the xls

      +
    • +
    • +

      The rows starting from the second row contain decision table data

      +
    • +
    • +

      The first "empty" row (which does not contain data in any of the defined columns) ends the decision table

      +
    • +
    • +

      Decision tables might use multiple condition columns which define the data that a rule matches. Often such conditions are optional: If left free in the Excel table the condition will be omitted from the rule conditions. This allows to define very specific rules (which only fire on exact data patterns) or quite general rules which get activated on large groups of data. Defining general rules further down in the table (with lower salience/priority) ensures that more specific rules get fired earlier. This even allows to define a default rule at the end of the table which gets fired if no other rule could be applied.

      +
    • +
    +
    +
  • +
  • +

    rule groups: Business rules are executed within groups. All rules resulting from a single decision table are assigned to the same rule group. The order of execution of the rule groups +is defined by the sequence of declaration in the config file. Processing of the current group will +be finished when there are no more rules to fire in that group. Processing of the next group will then start. Rule groups which have been finished processing will not be resumed even if rules within that group might have been activated again due to changes of the facts.

    +
  • +
+
+
+
+
+

33. Extended comparison syntax

+
+
+

By default any condtions given in the fields of decision tables are simple textual comparisons: The condition +is true if the property of the model is identical to the given value in the XLS sheet.

+
+
+

Depending on the configuration of the rule templates for some fields, an extended syntax might be available. For those fields the following syntax applies:

+
+
+
    +
  • +

    If the given value of the XLS field starts with the prefix NOT: then the outcome of the remaining condition is logically negated, i.e. this field condition is true if the rest of the condition is NOT fulfilled.

    +
  • +
  • +

    A prefix of REGEX: indicates that the remainder of the field defines a Java Regular Expression. For the condition to become true the whole property needs to match the given regular expression.

    +
  • +
  • +

    The prefix RANGE: indicates that the remainder of the field defines +a Maven Version Range. Using this makes only sense on the artifact version property.

    +
  • +
  • +

    If no such prefix is detected, then the behavior is identical to the normal (verbatim) comparison logic

    +
  • +
+
+
+

Fields which are subject to this extended syntax are marked explicitly in the following section.

+
+
+
+
+

34. Standard Business Rules

+
+
+

The processing of business rules is organized in different phases. Each phase might consist of multiple decision tables to be processed in order.

+
+
+
+
+

35. Phase 1: Determining assigned Licenses

+
+
+

In this phase the license data imported via the readers is cleaned and normalized. At the end of this phase the internal data model should clearly represent all components and their assigned licenses in normalized form.

+
+
+

The phase itself consists of two decision tables / rule groups:

+
+
+
+
+

36. == Decision Table: Explicitely setting Licenses

+
+
+

With this decision table is is possible to explicitely assign NormalizedLicenses to components. This will be used if the imported RawLicense data is either incomplete or incorrect. Items which have been processed by rules of this group will not be reprocessed by the next rule group.

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      Engagement.clientName

      +
    • +
    • +

      Engagement.engagementName

      +
    • +
    • +

      Application.applicationName

      +
    • +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationCompomnent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      RawLicense.declaredLicense

      +
    • +
    • +

      RawLicense.url

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.normalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.normalizedLicense

      +
    • +
    • +

      NormalizedLicense.normalizedLicenseUrl

      +
    • +
    • +

      NormalizedLicense.comment

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+

All RawLicenses which are in scope of fired rules will be marked so that they do not get reprocessed by the following decision table.

+
+
+
+
+

37. == Decision Table: Detecting Licenses from Imported Data

+
+
+

With this decision table the license info from the RawLicense is mapped to the NormalizedLicense. This is based on the name and/or URL of the license as imported via the readers.

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      RawLicense.declaredLicense

      +
    • +
    • +

      RawLicense.url

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.normalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.normalizedLicense

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+
+
+

38. Phase 2: Selecting applicable Licenses

+
+
+

Within this phase the actually applicable licenses will be selected for each component.

+
+
+

This phase consists of two decision tables.

+
+
+
+
+

39. == Choosing specific License in case of Multi-Licensing

+
+
+

This group of rules has the speciality that it might match to a group of NormalizedLicenses associated to an ApplicationComponent. In case that multiple licenses are associated to an ApplicationComponent one of them might be selected as "effective" license and the others might be marked as Ignored.

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationComponent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToTake; mandatory)

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToIgnore1; mandatory)

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToIgnore2; optional)

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToIgnore3; optional)

      +
    • +
    +
    +
  • +
  • +

    RHS result

    +
    +
      +
    • +

      license matching "licenseToTake" will get this value assigned to effectiveNormalizedLicense

      +
    • +
    • +

      licenses matching "licenseToIgnoreN" will get IGNORED assigned to effectiveNormalizedLicenseType Ignored assigned to effectiveNormalizedLicense

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+

It is important to note that the rules only match, if all licenses given in the conditions actually exist and are assigned to the same ApplicationComponent.

+
+
+
+
+

40. == Selecting / Overriding applicable License

+
+
+

The second decision table in this group is used to define the effectiveNormalizedLicense (if not already handled by the decision table before).

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationComponent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      NormalizedLicense.normalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.normalizedLicense

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.effectiveNormalizedLicenseType (if empty in the decision table then the value of normalizedLicenseType will be taken)

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicense (if empty in the decision table then the value of normalizedLicense will be taken)

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicenseUrl (if empty in the decision table then the value of normalizedLicenseUrl will be taken)

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+
+
+ +
+
+

The third phase ist the legal evaluation of the licenses and the check, whether OSS usage is according to defined legal policies. Again this phase comprises two decision tables.

+
+
+
+
+

42. == Pre-Evaluation based on common rules

+
+
+

Within the pre evaluation the license info is checked against standard OSS usage policies. This roughly qualifies the usage and might already determine licenses which are OK in any case or which need to be further evaluated. Furtheron they qualify whether the license text or source code needs to be included in the distribution. The rules in this decision table are only based on the effectiveNormalizedLicense and do not consider any project, application of component information.

+
+
+
    +
  • +

    LHS condition:

    +
    +
      +
    • +

      NormalizedLicense.effectiveNormalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicense

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.legalPreApproved

      +
    • +
    • +

      NormalizedLicense.copyLeft

      +
    • +
    • +

      NormalizedLicense.licenseCompliance

      +
    • +
    • +

      NormalizedLicense.licenseRefUrl

      +
    • +
    • +

      NormalizedLicense.includeLicense

      +
    • +
    • +

      NormalizedLicense.includeSource

      +
    • +
    +
    +
  • +
+
+
+
+
+

43. == Final evaluation

+
+
+

The decision table for final legal evaluation defines all rules which are needed +to create the result of the legal evaluation. Rules here might be general for all projects or even very specific to a project if the rule can not be applied to other projects.

+
+
+
    +
  • +

    LHS condition:

    +
    +
      +
    • +

      Engagement.clientName

      +
    • +
    • +

      Engagement.engagementName

      +
    • +
    • +

      Engagement.customerProvidesOss

      +
    • +
    • +

      Application.applicationName

      +
    • +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationComponent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      ApplicationComponent.usagePattern

      +
    • +
    • +

      ApplicationComponent.ossModified

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicense

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.legalApproved

      +
    • +
    • +

      NormalizedLicense.legalComments

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+
+
+

44. Amending the builtin decision tables with own rules

+
+
+

The standard process as described before consists of 6 decision tables / rule +groups to be processed in sequence. When using the builtin base configuration all those decision tables use the internal sample data / rules as contained in Solicitor.

+
+
+

To use your own rule data there are three approaches:

+
+
+
    +
  • +

    Include your own rules section in the project configuration file (so not inheriting from +the builtin base configuration file) and reference your own decision tables there.

    +
  • +
  • +

    Create your own "Solicitor Extension" which might completely redefine/replace the buitin Solicitor setup including all decision tables and the base configuration file. See Extending Solicitor for details.

    +
  • +
  • +

    Make use of the optional project specific decision tables which are defined in the default base configuration: For every builtin decision table there is an optional external decision table (expected in the filesystem) which will be checked for existence. If such external decision table exists it will be processed first - before processing the builtin decision table. Thus is it possible to amend / override the builtin rules by project specific rules. When you create the starter configuration of your project as described in Starting a new project, those project specific decision tables are automatically created.

    +
  • +
+
+
+
+
+

45. Reporting / Creating output documents

+
+
+

After applying the business rules the resulting data can can be used to create reports and +other output documents.

+
+
+

Creating such reports consists of three steps:

+
+
+
    +
  • +

    transform and filter the model data by using an embedded SQL database

    +
  • +
  • +

    determining difference to previously stored model (optional)

    +
  • +
  • +

    Template based reporting via

    +
    +
      +
    • +

      Velocity templates (for textual output like e.g. HTML)

      +
    • +
    • +

      Excel templates

      +
    • +
    +
    +
  • +
+
+
+
+
+

46. SQL transformation and filtering

+
+ +
+
+
+

47. == Database structure

+
+
+

After the business rules have been processed (or a Solicitor data model has been loaded via +command line option -l) the model data is stored in a dynamically created internal SQL database.

+
+
+
    +
  • +

    For each type of model object a separate table is created. The tablename is the name of model object type written in uppercase characters. (E.g. type NormalizedLicense stored in table NORMALIZEDLICENSE)

    +
  • +
  • +

    All properties of the model objects are stored as strings in fields named like the properties within the database table. Field names are case sensitive (see note below for handling this in SQL statements).

    +
  • +
  • +

    An additional primary key is defined for each table, named ID_<TABLENAME>.

    +
  • +
  • +

    For all model elements that belong to some parent in the object hierarchy (i.e. all objects except ModelRoot) a foreign key field is added named PARENT_<TABLENAME> which contains the unique key of the corresponding parent

    +
  • +
+
+
+
+
+

48. == SQL queries for filtering and transformation

+
+
+

Each Writer configuration (see [Writers / Reporting]) includes a section which references SQL select statements that are applied on the database data. The result of the SQL select statements is made accessible for the subsequent processing of the Writer via the dataTable name given in the configuration.

+
+
+
+
+

49. == Postprocessing of data selected from the database tables

+
+
+

Before the result of the SQL select statement is handed over to the Writer the following postprocessing +is done:

+
+
+
    +
  • +

    a rowCount column is added to the result which gives the position of the entry in the result set (starting with 1).

    +
  • +
  • +

    Columns named ID_<TABLENAME> are replaced with columns named OBJ_<TABLENAME>. The fields of those columns are filled with the corresponding original model objects (java objects).

    +
  • +
+
+
+ + + + + +
+ + +The result table column OBJ_<TABLENAME> gives access to the native Solicitor data model (java objects), e.g. in the Velocity writer. As this breaks the decoupling done via the SQL database using this feature is explicitely discouraged. It should only be used with high caution and in exceptional situations. The feature might be discontinued in future versions without prior notice. +
+
+
+
+
+

50. Determining difference to previously stored model

+
+
+

When using the command line option -d Solicitor can determine difference information between two different data models (e.g. the difference between the licenses of the current release and a former release.) The difference is calculated on the result of the above described SQL statements:

+
+
+
    +
  • +

    First the internal reporting database is created for the current data model and all defined SQL statements are executed

    +
  • +
  • +

    Then the internal database is recreated for the "old" data model and all defined SQL stements are executed again

    +
  • +
  • +

    Finally for each defined result table the difference between the current result and the "old" result +is calculated

    +
  • +
+
+
+

To correctly correlate corresponding rows of the two different versions of table data it is necessary to define explicit correlation keys for each table in the SQL select statement. +It is possible to define up to 10 correlation keys named CORR_KEY_X with X in the range from 0 to 9. CORR_KEY_0 has highest priority, CORR_KEY_9 has lowest priority.

+
+
+

The correlation algorithm will first try to match rows using CORR_KEY_0. It will then attempt to correlate unmatched rows using CORR_KEY_1 e.t.c.. Correlation will stop, when

+
+
+
    +
  • +

    all correlations keys CORR_KEY_0 to CORR_KEY_9 have been processed OR

    +
  • +
  • +

    the required correlation key column does not exist in the SQL select result OR

    +
  • +
  • +

    there are no unmatched "new" rows OR

    +
  • +
  • +

    there are no unmatched "old" rows

    +
  • +
+
+
+

The result of the correlation / difference calulation is stored in the reporting table data structure. For each row the status is accessible if

+
+
+
    +
  • +

    The row is "new" (did not exist in the old data)

    +
  • +
  • +

    The row is unchanged (no changes in the field values representing the properties of the Solicitor data model)

    +
  • +
  • +

    The row is changed (at least one field corresponding to the Solicitor data model changed)

    +
  • +
+
+
+

For each field of "changed" or "unchanged" rows the following status is available:

+
+
+
    +
  • +

    Field is "changed"

    +
  • +
  • +

    Field is "unchanged"

    +
  • +
+
+
+

For each field of such rows it is furtheron possible to access the new and the old field value.

+
+
+
+
+

51. Sample SQL statement

+
+
+

The following shows a sample SQL statement showing some join over multiple tables and the use of +correlations keys.

+
+
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::../resources/com/devonfw/tools/solicitor/sql/allden_normalizedlicenses.sql[]
+
+
+
+ + + + + +
+ + +Above example also shows how the case sensitive column names have to be handled within the SQL +
+
+
+
+
+

52. Writers

+
+
+

The above dscribed SQL processing is identical for all Writers. Writers only differ in the +way how the output document is created based on a template and the reporting table data +obtained by the SQL transformation.

+
+
+
+
+

53. == Velocity Writer

+
+
+

The Velocity Writer uses the Apache Velocity Templating Engine +to create text based reports. The reporting data tables created by the SQL transformation are +directly put to the into Velocity Context.

+
+
+

For further information see the

+
+
+
    +
  • +

    Velocity Documentation

    +
  • +
  • +

    The Solicitor JavaDoc (which also includes datails on how to access the diff information for rows and fields of reporting data tables)

    +
  • +
  • +

    The samples included in Solicitor

    +
  • +
+
+
+
+
+

54. == Excel Writer

+
+ +
+
+
+

55. == Using Placeholders in Excel Spreadsheets

+
+
+

Within Excel spreadsheet templates there are two kinds of placeholders / markers possible, which control the processing:

+
+
+
+
+

56. == Iterator Control

+
+
+

The templating logic searches within the XLSX workbook for fields containing the names of the +reporting data tables as defined in the Writer configuration like e.g.:

+
+
+
    +
  • +

    #ENGAGEMENT#

    +
  • +
  • +

    #LICENSE#

    +
  • +
+
+
+

Whenever such a string is found in a cell this indicates that this row is a template row. For each entry in the respective resporting data table a copy of this row is created and the attribute replacement will be done with the data from that reporting table. (The pattern #…​# will be removed when copying.)

+
+
+
+
+

57. == Attribute replacement

+
+
+

Within each row which was copied in the previous step the templating logic searches for the string pattern $someAttributeName$ where someAttributeName corresponds to the column names of the reporting table. Any such occurence is replaced with the corresponding data value.

+
+
+
+
+

58. == Representation of Diff Information

+
+
+

In case that a difference processing (new vs. old model data) was done this will be represented +as follows when using the XLS templating:

+
+
+
    +
  • +

    For rows that are "new" (so no corresponding old row available) an Excel note indicating that this row is new will be attached to the field that contained the #…​# placeholder.

    +
  • +
  • +

    Fields in non-new rows that have changed their value will be marked with an Excel note indicating the old value.

    +
  • +
+
+
+
+
+

59. Resolving of License URLs

+
+
+

Resolving of the content of license texts which are referenced by the URLs given in NormalizedLicense is done in the following way:

+
+
+
    +
  • +

    If the content is found as a resource in the classpath under licenses this will be taken. (The Solicitor application might include a set of often used license texts and thus it is not necessary to fetch those via the net.) If the classpath does not contain the content of the URL the next step is taken.

    +
  • +
  • +

    If the content is found as a file in subdirectory licenses of the current working directory this is taken. If no such file exists the content is fetched via the net. The result will be written to the file directory, so any content will only be fetched once. (The user might alter the files in that directory to change/correct its content.) A file of length zero indicates that no content could be fetched.

    +
  • +
+
+
+
+
+

60. Encoding of URLs

+
+
+

When creating the resource or filename for given URLs in the above steps the following encoding scheme will be applied to ensure that always a valid name can be created:

+
+
+

All "non-word" characters (i.e. characters outside the set [a-zA-Z_0-9]) are replaced by underscores (“_”).

+
+
+
+
+

61. Feature Deprecation

+
+
+

Within the lifecycle of the Solicitor development features might be discontinued due +to various reasons. In case that such discontinuation is expected to break existing projects +a two stage deprecation mechanism is used:

+
+
+
    +
  • +

    Stage 1: Usage of a deprecated feature will produce a warning only giving details on +what needs to be changed.

    +
  • +
  • +

    Stage 2: When a deprecated feature is used Solicitor by default will terminate with an error +message giving information about the deprecation.

    +
  • +
+
+
+

By setting the property solicitor.deprecated-features-allowed to true +(e.g. via the command line, see Configuration of Technical Properties), even in second stage +the feature will still be available and only a warning will be logged. The project setup should in any +case ASAP be changed to no longer use the feature as it might soon be removed without further +notice.

+
+
+ + + + + +
+ + +Enabling the use of deprecated feature via the above property should only be +a temporary workaround and not a standard setting. +
+
+
+ + + + + +
+ + +If usage of a feature should be discontinued immediately (e.g. because it might lead to +wrong/misleading output) the first stage of deprecation will be skipped. +
+
+
+
+
+

62. List of Deprecated Features

+
+
+

The following features are deprecated via the above mechanism:

+
+
+ +
+
+
+
+

Appendix A: Default Base Configuration

+
+
+

The builtin default base configuration contains settings for the rules and writers section +of the Solicitor configuration file which will be used if the project specific config file omits those sections.

+
+
+
Default Configuration
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::../resources/com/devonfw/tools/solicitor/config/solicitor_base.cfg[]
+
+
+
+
+
+

Appendix B: Built in Default Properties

+
+
+

The following lists the default settings of technical properties as given by the built in application.properties file.

+
+
+

If required these values might be overridden on the command line when starting Solicitor:

+
+
+
+
java -Dpropertyname1=value1 -Dpropertyname2=value2 -jar solicitor.jar <any other arguments>
+
+
+
+
application.properties
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::../resources/application.properties[]
+
+
+
+
+
+

Appendix C: Extending Solicitor

+
+
+

Solicitor comes with a sample rule data set and sample reporting templates. In general it will +be required to correct, supplement and extend this data sets and templates. This can be done straightforward +by creating copies of the appropriate resources (rule data XLS and template files), adopting them and furtheron referencing those copies instead of the original resources from the project configuration file.

+
+
+

Even though this approach is possible it will result in hard to maintain configurations, +especially in the case of multiple projects using Solicitor in parallel.

+
+
+

To support such scenarios Solicitor provides an easy extension mechanism which allows +to package all those customized configurations into a single archive and reference it from the +command line when starting Solicitor.

+
+
+

This facilitates configuration management, distribution and deployment of such extensions.

+
+
+
+
+

63. Format of the extension file

+
+
+

The extensions might be provided as JAR file or even as a simple ZIP file. There is only +one mandatory file which contains (at least metadata) about the extension and which needs +to be included in this archive in the root folder.

+
+
+
application-extension.properties
+
+
Unresolved include directive in modules/ROOT/pages/solicitor.wiki/master-solicitor.adoc - include::../resources/samples/application-extension.properties[]
+
+
+
+

This file is included via the standard Spring Boot profile mechanism. Besides containing +naming and version info on the extension this file might override any +property values defined within Solicitor.

+
+
+

Any other resources (like rule data or templates) which need to be part of the Extension +can be included in the archive as well - either in the root directory or any subdirectories. +If the extension is active those resources will be available on the classpath like any +resources included in the Solicitor jar.

+
+
+

Overriding / redefining the default base configuration within the Extension enables to +update all rule data and templates without the need to touch the projects configuration +file.

+
+
+
+
+

64. Activating the Extension

+
+
+

The Extension will be activated by referencing it as follows when starting Solicitor:

+
+
+
+
java -Dloader.path=path/to/the/extension.zip -jar solicitor.jar <any other arguments>
+
+
+
+
+
+

Appendix D: Release Notes

+
+
+
+
Changes in 1.1.1
+
+
+
    +
  • +

    Corrected order of license name mapping which prevented Unlicense, The W3C License, WTFPL, Zlib and +Zope Public License 2.1 to be mapped.

    +
  • +
+
+
+
Changes in 1.1.0
+
+
+
    +
  • +

    https://github.com/devonfw/solicitor/issues/67: Inclusion of detailed license information for the +dependencies included in the executable JAR. Use the '-eug' command line option to store this file +(together with a copy of the user guide) in the current work directory.

    +
  • +
  • +

    Additional rules for license name mappings in decision table LicenseNameMappingSample.xls.

    +
  • +
  • +

    https://github.com/devonfw/solicitor/pull/61: Solicitor can now run with Java 8 or Java 11.

    +
  • +
+
+
+
Changes in 1.0.8
+
+
+
    +
  • +

    https://github.com/devonfw/solicitor/issues/62: New Reader of type npm-license-checker +for reading component/license data collected by NPM License Checker (https://www.npmjs.com/package/license-checker). +The type of the existing Reader for reading CSV data from the NPM License Crawler has been changed from npm +to npm-license-crawler-csv. (npm is still available but deprecated.) Projects should adopt their Reader +configuration and replace type npm by npm-license-crawler-csv.

    +
  • +
+
+
+
Changes in 1.0.7
+
+
+
    +
  • +

    https://github.com/devonfw/solicitor/issues/56: Enable continuing analysis in +multiapplication projects even is some license files are unavailable.

    +
  • +
  • +

    Described simplified usage of license-maven-plugin without need to change pom.xml. (Documentation only)

    +
  • +
  • +

    Ensure consistent sorting even in case that multiple "Ignored" licenses exist for a component

    +
  • +
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/_images/images/architecture.png b/docs/getting-started/1.0/_images/images/architecture.png new file mode 100644 index 00000000..f0f57a0f Binary files /dev/null and b/docs/getting-started/1.0/_images/images/architecture.png differ diff --git a/docs/getting-started/1.0/_images/images/devonfw-org-old.drawio b/docs/getting-started/1.0/_images/images/devonfw-org-old.drawio new file mode 100644 index 00000000..53ce818d --- /dev/null +++ b/docs/getting-started/1.0/_images/images/devonfw-org-old.drawio @@ -0,0 +1 @@ +7L1Zk6tIli38a+oxy5iHR2aQmMUkXtqYQcwz6Nd/uCKyqjJPftbdt+r0zb5WkWYnJAc5sIe119ruivwLyrWHNEVDqfVp1vwFgdLjLyj/FwRBcJy8foGR82uEhtGvgWKq0q8h+O8Dj+qdfQ9C36NrlWbzb05c+r5ZquG3g0nfdVmy/GYsmqZ+/+1ped/89qpDVGQ/DDySqPlx1K/SpfwapRDy7+NyVhXlr1eGCfrrSBv9evL3k8xllPb7Pwyhwl9Qbur75etVe3BZA4z3q13ewnPL1Sa/ucewqkIi9w38y9dk4n/nI397hCnrlv/jqccSF07WSWX15vf/scXZuoXfH4G2qFm/7fX9rMv5qwGnfu3SDEwC/wVl97JasscQJeDofoXMNVYubfN9OK+ahuubfvp8Fs1x8B8Y77tFjNqqAVGkrkmVRtdVuL6be3AdNpqS79ABZ8/L1NfZP8wDfX6+5/mH8a+fa/y/aKFvS27ZtGTHP8THt8WkrG+zZTqvU76PYt8G+g5+GP1+v/89lK6I+R4s/yGO8F8Ho+/4Lf429999dL34dtN/w2XwH7iMaJZv4/zGd8S49r8e+GX+2Je5TrgMdfz94PWqAL/TbOu7fL+OT9nQz9XST9WVuN9zX7f6Nf3XyT9EyWXP5beh8Fsndn2X/S4+voeipiq6621yuS67xlngnepKYOb7QFul6SdI/ij2/h6d0O/DTKymryBLs+9Dj+/7hf504fLTogX5Fyf4z7IcDv3WdDjyB6ZD/sByfxv8l5sO/V9iOvR3UYcS/9dNh/3npruq6QBeVu2ngLOf38w8fHEAkMzRr2/y6gA2/hswqFGcNSaAqKoHABH3y9K3f4AcSz/8WJMYiBUF6r9Qk77vjC+XBXAVBpgEEfd9/+vcd9E09+uUZH9Nrisj4vXvEHWX4cSymi/kBK+qKzQunBU/Z49rnP116IqfGAcIhf0V/20k0PAPkYD8ynx+EwnQz4oE/OcSDFEU6I8zf5pRf4dLf3v/fy+5iP8luET86SxH/ueWS6O5/Bud6NelqbqLw/wqTaDfWu4H6tL8Dpl+4DT/Tej6FSTbowDq7K97Fjd90c9/fUVb9LP8BkPobx2H/eg4FPsr9gcsBv9JjqP++/XkH9z0x7b9T13ycSYbJXXxSad/lDWfn/9a0frjKpKkHfrX6tK8OagS03cVSaMlAoXjGr+It/hxNaAh4kWPrn+jrlibaPoFRqhPKflZkIf8toogKPlrXfmHCCD/IHPJn5W49A/+/6gV7PVDGPzpdcifDW2xn6ZTf+wtfDnt18j9t9f+CXrx89z2R/2FfyFj43mBEcX/QTn0J+Ad8I8q/DsVsuXfufBPO+7n5cK/ugXwu1wQKFEUsf9Bk/4Juirwj72B9vxlKaPql3mJpn+nwz/tu5+XDv9q5fl7MS8I7E8tDb+vtn+GdPhRk77WdrgSIvtlXLNr7N8J8c967+clxP+LuhT6T3XpOmfT/EvUpVNfpb80vzR901RDP/wCTvjlipoaSNYL50Vwh8Pf9OqP9Y8QRYH4L7Rif1pIIeTvWqY4/qPU/bWt8T/S6oB/1Lr/HMj+T6Xjn2DZ4deJ/8F2SR9XRdb9G0X/abf9NBT9NcB/GstGLpSBfqJJMepPpziRH5XLt+IE0fXvZPhnPffzkuHPvHQqYAL6Iej/Z0unIPZe81/7qQCrokt0XRCwCnDu39rdfzvtly7bLy7RLVeg/RI3F6n4ycunGP17LoBgxA/RQP0BF8DRv/7qpX99PPy4gPpZT/7l14058fSbDTzY65ehWYvq3xXvn98f8fOS/H/pEu6fgeL9qJerf1e0f95jPy/W/1/UyNh/qpGXLCm766rF+QsFVnBhsH8V7BNKs/+I1yb+jytqo/+4tMn3iwm44uv1558mGq5HBa///+XzRWt5gf+/KJ8p7LfBRv8YbCTyY6z9Ovavj7X/Ldr597zzTwCs6I/auZ1+ScosqS8Y/De+/rOe+2kAi/4XFmx/D7D/Cvz8Y2T+Ywz9Dcz+iqofxESZr7eI+NnRwlUea9g7dJeKnrl+9IdbCm7BMCyLXW9FiGOe129OoPLxcwIrpazjCgyjStnTPVh6B4fZ10O86dcL+TrCvFiFYUwYvGfNay4/mxnGVszrEIVf76WEIM6HcoLpVKnhLZhlQ4muwgf7sCGluPMCbjjFcB3v34hePPnbnsriGdq34qm99PtZ35MufVRKpbkt8OKu8eyuQcVb4rXnDXkG4ruQWL7Yd0bI8gYfrf2J6TPM1k8r9+ogZJ989GSmNeJ6VnhdMzD+/dXoThUyT8kutMIgV2oA138V8smGt7OI3jrGP+2HyL5rHhEeXKweKLcVt+oGyU8H4mdlZLrC2DfFDIYn/3RYukg4cZYODr9NFh9escdySQCzYXNrMze6FaHEFr3BPevhOuSOJZv43vlom3XGbm3QibRbi+CIf7tHTGOpiU09OUFxi/qm7TT/gOq7UyQ9IzKUBa8pk5jFWQ0wW3Eik7SNLYXO7ZRsrVWoywCYwGDm9axu8ZSaF6VooZ6x/VM0il27hkXNfrajxDP7ZVL2M3alNXv5XzM2V6ILIvGBexkuYBU/0K4XWHW9XZVCGrEFeVzuJHq3ESzPxoIVTdmgSGDalTBV68Ninh9I2KbzbiCWJNfYE3dcXYf6KNKFenjr9i4h59OIxVfYhq1PRs8kSK5sFANTglFKmyd3T9Gwpbs1t+kgm1P0OdJtuGbXD0Ysvvu5ua8fVn0eQfL39xpUUn9/x0AtN7EWzzzt0RuR+ytpO0h/dHmLT8MuBI+w0s97Tr8uMchShnR2+WvlG5MwKT/H39c9mfS7MNX4jDrKCvPzCKjsMzHHPj5xzkqyik5k1B1usNBKhM1xJFyTwQ0aBtnYYtMgjZuL1vjKMXlA31sdRXyMA9/7gE8nmbuaxEkpXF3YDR3ertE7cl+BR4ICYs+Ha14Q3SEmNV4feJN5MgGyIzAvuPU7/57pyByRo7CeLTq269jor5tG3NTWqorn50abXmWYnRXKO9rIzZAjNfNOl8B61f12i1+LZ6TTUEQG0z8U7aixnmXu794pnsJjWbC26uRn1NgwfyMf5nAQQ7wJ5JdteY17CjKjcnMXehfQsG3JsxF03UTS0aWf3YbbcaZaRenMg68d9zoDH4eEvMdxeT4fK+QE2pUO4gPkdfWZUmFvH6ve4jcdPkcwJx6LjtuDhY/n6BAadu+v0XnXTLYvDyTNN3lQyCMV2i2UlZJhvmLj1svgqZ9smwCSqHJLOx3623tsu/2859cUtZ3xFRNB6f5M5vdaOdYQI49D/I4tQ+eway5HuU5FPX1/jxBLT26glzU/SEGB7drTU1kP5f2FV0j9BevfUShEws5oTKtPqbbP/USPk9i6WoMyglyQBVK6OPfAiVDaW9vZ7OHQPh97MRnL7sw+OIHI5w43l8o4+Wt9vNj8mJUw44viuHyuZXBFRBE7ZoryZTPm+Njs/nw/0WTJ40fZMcOYGzclY+uS0rhHfyBuCzCpyjHSEITvO9UUs7ASRmOD1bvMeNTjGN/4fEpuuV3UlKbmvL10h9pNCPjwcbBfnyu2T9FgnVGi56HIO1FX1zvw1io/NuoZOczF6FmTq+nucLphtMpQNGAeZaxP3HAcjhZMwrj5AocWOWuSbO1Kj3X42W1wCL2p2bW/no3freTJM+wrk1EyfJD+Ne9tnBCJN/un+tDKXcOoU3jDdWu+qI9brwL5/rpPS+6KB2uxfIaU1yEhaN1HDNvFjktHJzd70+fg21dkZwp3Qfq+NxbENM/Ny8PVF1XWLJCK8FHVdeSE+juBR8iQ4MYPSH7/ugzBnF+eUwKSWAs9jG8J6cqW4jnZ0cYA2aKzGfDMrbgvXzPmwILo4u/5HYLsbaqIVxe6WWLZ3OHVL4TqaV2mt84puM/5d8YEJt9l/mNXI3+sq0VEHLylXqZHPF/Y0NfMKlQA/Cs3dUW6+0i9L7ljNUxGeLD58q+7/IBXIe5WdgXpvjQpFuXjKo3T8ay97Dbj2OVa8zuysitkrzgxscu0ok17OsGodnTqeRUReprq8eMNVavxjQfKdWngKw53QB9SCTf9Vj8LH/F45Q1CaLyVd1eY+S8EtXlgg30l3k7o1Qvd9lLgpunNyzFCB2jHza7ybWPDvh5f4fZrEoSly7jnLkuIU2UJ15vH5Sg3jwGbuaa1WOzyIFNNLRFfRgp5iXchfRFz/LBGAAbCd3gwOHi6QmXxSBBsMY2Jg3lx5XMBF289v7CLr4CXXircn41dMEqBGuAWavcWF5fpE/B1lCO5DQsubac9BlAFakZc0dlXrLM9c9Qmc+fIKF7Gu44vC+RYuztiIwWatfiR2RNUfyckb11GzBiBvQLpuszUS+Bi3qBecO2K0805duN2qWRQA2h2P5jiO0ssBVwjPkkyt0A12W5RdFONiQ2xpygaBgpBUgKs8knfdGl6ppAfyQri/7m3N5UtkbkqFgPd1MlcRs4b5i8w427X7XNMqn5J9OwCa9bTHfvRQz7PN7EGcElPwtv7IHL72azHvWa/bqrE6sJiRvsKfMH2uir+XGWaHMOGc7rlUon5LqUgx5ir7jj4s9Oa6UEPvRJKi1Dv65vW30e2fXuM+0SY8GIXC9AlRAx13bvcnZxGer23QJD3XRw0WPFlmsKkLFCw34E0YzYIqmqs6neVGYXeeddHwqHHYozwm+2hKNh3dhYywEVIP7r2nKwtuXN045FE4jU3X/kqEAaTg4y0ZDOPCfiBvCWODp1Ah0WRQQnk+5zrgp+zRNNJ6dTKH8YpR+edw0AxXOLy+fp+ML/PQb3qicH1Br9Ko0CO76/DdZWNkL+QqZBBzgrMG3RBcleo5PYptzBaPeH9K9gYjKGukGYZDSTbeq91+7SqY0pP1S++WBHGJp8zBHxTTaFEKo730CqCs73+gkxp/tjLCc596++jqcAVK8YBqh+aiZyTWH0hWHHXP9lrzWGqqpGrY4VtKwPW4LAvpr4qfEU/cx6fvL0IbbspuI1U/Ol1dUxE9epJ37V6+cJPBUlPyp8Ch3EfGZyUX85QWP9T3Ti5vACr7XBh0pKBjbDxeFtfDOdVEJ/Pn0/szkMuHkuKUEfRvA3SdS+P29e98Ez9hSEWJQ1oZhmI71oj2RJHwlLcF5aze8pchhJYhS4z35aC20MMXfw0hK748kHEfs1B3jukahJrDKrJCV25t75SOGJrMJe1yEOukUpasKPnIiy5utk39DNGmVw3c50y0YOqvub7A4oIo6WcK+7FcJ9/O480XHWq6yzqyQsGstB9E3yngmytl4EVVaopmJCvmIgRhpbXx+IQX5bjfCUHKSzHLhbcbusRuIA7aezXs/YSQA2Wg4pm0PRW+nwP4gF/f/ZlGWD2uyT2w17oS9mfEQjar/rBiBWwGMtSqJLbFieiz5j5ri0Qr3zwN+Lfui5IO9JXMdAZXyHK2JBaMBbHlW3dyhIPJBkl3L4DePm6qtou981xLHaM12hm8f2rHslM/TG/VNp8vUPP+PqshLcxR3zPrbryZ25xvY5wN8tBqLeFCMftCwzuTNPyLmPp/ONZvivRfRA+8yi/c7Wn7pZyWUutOuPmHfzgjopVMsrneZtK/jwvhu0ywEF1skIheX08CS5saQAxOHFYi36tOhBFomR6aop9PTe735krsTi2H/1RZ69MVgaUD0dtPa3v1BR49VOPrrtcV9jFkfmhYKWrPbB4cX36m21gXPE1T2esVnm/c71WKUdQfwcF/JWVi+Ld86c9UWTsyMCDX8dtTOhs68qUiNMiVQ1dIynrzlP211NlvgKc+A5w2h21Xh63VPBhy+6Pvz3HpUf9yyA1ad8TXFpWsTIUnRdWY0e1iv0GNIV5fWKPEw8EgVyMejQ38XbcKcRApP6rqmoZg1wGL6UDExt1UY4WQb6yjJkzdhAsltNfCik2kK4sj12Vzvc3Irx2aL8gTPXVN3InysXGT/lh892iq43al99V+1efsDUKIWdeyCigh1GEr2EX3a7Xasd/XU8u5+uiUnmxiQjy28JS6myGIPwq9aL2NVvPugqog7eanAWBurlxVdtMb2gDGorp/JW0dxZuOZcpuPDEBrSODoWf1ePZ4IgRH1RnF98ZgH1hPb6eK5cpni8iFQKRt1Nmvu4cUJtitAVGl/UkrfZmZQEKh81MX4V7UtIv+11E8AqKQoqjw1kHfpsXFbRIsjBKDtJXRe7bpcgXW9dfCco+pten6o3x/bEZd/cLQS9fSECzRNIgIEd5Iqn4gpLhKkbRzftmYJNrXrnFQPCDzGxPNFiWmCcm+UZOkL+gOrFOE+ISZY3zODrGDcbYUkzxKcx8kqsa/siXQabpBXuOxPSuwNyVMo4SclX08kbElADo1TKjI9Hifr5zjPEy4HG3pv2K6i6WX/I78lbM7qlXseibAh8W3tDKZWM0MxxfbiSKWedqm+65zno+tMbcfSk9D4LjxiCp1X08PRZfLjOhp3N/9Luv7JzC8qdzPtatPxGxyQb42c5HcBpjEwhekyZA277Ok06lN6cuLJMo/IcfaG/A9viBiH0vPO6IYsNTsWf+BU0VMgeODPH2wXIsWJ9g5Sjwxhdb2hg1X0DSkXqNZRG7fSo1wACxCFAkRZvl0cO7Be93NlFAIFxcySOB6F8R+0JH5UMDLkaT7oga0ojnhv5SGOqiMXyFBiERwQ6CC7FzqS1FoF/g4kSx14LD7th+eWG9XWKaVcigsK95dnPHKkoNylpaBaobushC8cZY5PsnaoovlsPqlJJ3qU7Or7hQDeUl7x9PIRBNy6HHBaGaYMzCEQDN3+GjB79nQEJl+KMhLi0p5/AgTxSaWNO9ve3XEyuWma4FV3DxnTjmAe4b9TE+etq5iNTLvzvCrYheiA71isFIoEMF56AERS6oL8X2ZpXrzkzAX86n7V5kyhp4LBYJCfvKIxlnABrqmjwJWPFr14LB7K98sK+UES9WfXtzsa2m/MZ8CWqD77840ls02qtGS+edRxDpxfLpqnxxCct47TDTM+2zORBqvPWBZu/AIKcrKr7GvmFMwtfhvSwKH2MtayafKzc6awg7A1HksqXE6gxC4VQZBAmEltCGg6naqNj5BYkKer8mi+/TcosQEc6dZbyNZ7VuQ37es9WfXvMwmp6TYqIxregcGWm5E2pzIUTdI6tfDGwR3PJbsCjzKRCmN5HhTUst3AwJ4i5S+D2Wa0185SubdXF06/Km3uxl4A0IYhJ0K0Y4AIHTq8tztOhF1wSO/eaWZGqeRJY2NnZU5WlLcqUk6TRc0XGeik8ZoCrPI1G97HRRo6wzhGZ0PEieaNiztjgFLPzeJ66LyWlC33uVXOq2YV7b3WshNdUOE4jchdMBIsnMfCEM7w9uVzz9xdYDhNRA/GoLn63Z/tTjgarsJUIM0H9I8gbOFSGO1rbE2PdVW73Hpa7PV0emQeXJdaU5jEW+Y+diCGzaVLqU8C/Du4FEXsLy4hJnQdjzPZLS2yuvPEoZ1WRRd6etobGeiVeG+BetOIX5sRGMAXgnJoKKfD/UmuJ5UDKMKys+HVXRf3gSsq5R7X6vcIIKJJwwBXgYlx5HP78jA824ThT8k871/jDIcF7sgo9acnyXzaWdgkGILtS2fbXoveUu8tBhNoZWmDJSedcNry3pBTUUehCQZqlHHhH19C7G6fPj4B1RClcglsou1Gr00mgZYFjgphOGoJX6EYTyI/awW3IBVzURsVavWnxfYzhuH9ZMzCwqHk15gI7OdncC2zZldVOg+xe7eXoenTsJrFz1chOGVEM/ZK9Sw3jRhcAuMt+HaY2g4sApI1TvcQ60oKET/DuJwbZ+8YoIah9SLyR3ItAdnSi8LULuHrTm7eO2ic8ivKVuMHHzBPetVzKFhce2M5FWrz2HxBQAe8VJzdPN+LjnDyoBCFTtA9seheTqSLj0z850xkwNcFp16bzrsfmjPyPh7uyasdbNdDxKbH2bAbuXcBXySK4ubbMp9MnsmmIq7PzwRGTc7KcOMaAvSLlEnALtqgXKW+6iAF7uudkQrbMAf/vx3THZp1ptuj9aQVytHUqeZ/1IaN0jM0Nhvup9bxU4pTGmQm4qfeus7XXQNT2pqNRjINQNMXhn99p5J2sBM8uM39RNuE/3KYOJo10gK4o7+eYfopGZQwB6phNO9lUfxne8v6lfzX1NeLTbu+Jv43twi+SLzZXaqUhyMQ4E6oVkNc4xocM4VUxGySxdslZ2Py+hQuOSKq5sIBjv51y2O4qpd2jZEaoOn5FLrUxsci5oSYwtLlK8JwVNbK4T5KVmG4Om3EvFg0341mHXiWX+7k+u7bJuWOA2Gmmv6AhMJleVOp+k9jwTf0rQrjnlnlmtsDlEhuPqI0ImDtGyu0eHgZsZL4k+cie2qbWP7mPWCZDWiUzivWSPFz7kh7pRUmGlNLFH7h04ig2US1cveD4LZlDCLc1ArpU5fbNSlcpnl17MjBNCxeVJBQpShvOdkJ3rJg2TQMM7hgvLLKRuVXc3BzBIGjC6O7fUjGZm1AR1FEg1s51VOCipZ2tnwR3E4RJ2RMjPiaAQuQhZ/kLD0ukJhmZd2vkRj02r0vugbG+61BoSwwFFQyHGXFPTYeXvuqQ4F78uyR20nNJVbtKUIFQUx8HCAEyplGjQD0IMpqpbIWbevKWAI9Wbg+Vi8gk8oiXmN7P7WnzT7ByjHUZrjQ3mQcjdI0bj87sbowhHmDz5DDF9r9HR1bjCbzjx3klrU2Eyts0lV3zUnv2JKR51Jva6wDiho+W6q6+JsjRQr/24v+pAlj6MtzQItjafj8H3xcnppafW6gFYhlqHGDQqJMbMAiIkSgZn26c8IutLMeD1ZuXE2qYC5Sh+PUym7NwkVB+g/PlILMtIt5gJTOH+uWmZKdmDwGzfwBLtyYQ78eSj/CQaNMaeHH1eDER2LTK2H+KpmBefhhCinUBeFNgFmQTczUa0ybeDZTD/0tKsX9Xo0FU26FBcY4JyN1nb03Gme7Gf3mT5wkJTNDTnbjpXOPWzeqdYa77D/CUd4tnfn+lLKyU6WCcsiK7Stnr+ghb9CQifhlMIeZ9dYER2B2sg9lIRsPORgbe71VUOIj1YQFNpP/Spsuv7GgJN1YlvMpRZxTKywoqTSzY81kAYRyjYnUEK50tO0DEDX/PqsdmUHAiOyK0eplVPmtyHs/jxCBdBu6A9F7hOZjWjmgmD3Ie0A8yHMvJBSTiqaarJR+eWIb2rxJ1krLckfY1lHu9PPwHovTWwrr0Tv2Il6MZpR1uHyMNOPgqwMLinufFMMCBHGw66Nou4nbdkuSyZeCsE2blkGws9XCvI72BlJnVSa3NTs2nP87gtre+SCcQY3Oxynz6SoGgmW7zCgI5dJ6AXOdoktgsaOcAXtQatVHNV6lxgslUwOPV48Gsc3E/0rpWmleYXIfHpfDqUl8GG8/ylWZaSt14nn18kMm6ITDZvUqULsZL6Ks4ydVjk3rtjrgL8BkQ8jky8KfngQca1A7VQro4iERMa9mzet9kn784TeXUt5EQuu3JX/jXQSSNVaA/v+GhumOX45K0jkFWyOvf2Xm5fUpnjQmTxKz1hP00L2HjGY5aTWRrpvSOmL7PcN0QMOZEESGpjb47AQRMWyZ17eFU+TuBWGI58KLRSOLX7ZoMtK2BGAdXeYlzrnqLGFlAwGVwT2ttASpKOLbACdgXDQ39VskJEIk+Khjs1VlzfxpUtw9P1lcFDt9yH5ZKKXly40Pxyv7dllwTewoF6Wy/xPEXc3Qe1FgsSCU/g27ZuHRa39/pl/yOzHOOcpuqzHjXQQdRus7w1g0iChXJr2eGVIPTumc9P61YJzSYWs4Ep01GAfcKs7GlydBq309aaANCQkZxb4tAmmQv0czaM5u0jxwVB84I2NKBZOII54Ka4+wtV7cw1I4hYA55IGx7H7zL4pqzYrte/7GCnOrEY/tomYIGM9aCnLK431Lu9iOxAU3WgVo2ANsQBcK+SMIZU4mcNFqct0hlt+V3CCshgb7L3S+KnXoyl0RDuqKAX80cr2PWF29CdZ22yjR9IBQdd82ytBIil1IPGQy99xc3bquOC8Z4uZW/r8zNpudsePqrOiSJElyn0cWJpFuYbXN6Uh4NHzzuqvg9QKSo2yk2qztaRs/yx2FOqsAgGU2T5oBgSIqFVHmdB5+sy0eP+kmcRgpYh/FBS96zbw9sW9xOJoKQVzP5FI3GeHAT4UV4KlISQmlxWiUv0leRG9GL2lsZEcYkkdtXZSRUGip5NAmmKd1Qn048H/AkQRqt/Q/D7OZQDQUbci6bGxCHknEcaA3rQIGsZhJKlBYK7HEfsF5B3IDZpsIa+8iggyi3XZ9BwrJALsFO37lx/A/frF+MlG822qMYI0KQd0hzFAFl02+lsTm+wKi98edzUN9LogV1yWTfel8WeVBAcjdZf0H1/MXyNwO+2z7KgadjBOjX4s8g4nK5qq8TmZUJlog4VmlEaKwManfk75IcANksQBwdrjTwpt5bDYid19/r3ZAehTjyoZeYY/zpDx6kZHbt1TPHwqsVZ8u7mxai2AWbPyYyXRJRfSn53YbnqB3Vkx3VCVbjEJXxTnnHmQIjomKVuPPoScZshEblA7E3kbLbK90BFFUn0bFf67MDX6kV0M2EzzVefPqgsFfDrRqvOc5L+iAEKnLL51mzdJuYSbHowDQFIoU0jJrV5v9fk5ThvsHmEtU/tBceGlblUfDe1B8xZqD5y/OutXekhNCSEO9ip4aFiy1lXJnukuMEL+DTcPFrDqCdoFw/mUqILhO1p7J1588pcOLwqzjLBEfSyPVf37nHyNBusDG42qFYmNgo4vhuSKtE01VKPuhaTUAB4ww3vw6Iw3chqxQ0fgeiqWy1JYy7VeXh26cyF/PYuO3y5o1xElFovss97zlv6govpfuj9Sd4urgwmY0FLJbIRWrqKomHgdyo9RAspKXo/jkN9YSBEXjwxoRAJYjAeRfjOq/BxWK/jTvf3T+0uU6RepnQJeh8ncTOOs2Sk+yuyg7Mwdx1rdascF7pzI9B81BBIuLkP/F4FxsRTIgXCnSUZhI6JKEi0Q3YNftieb2O5AlY60cgfZ0/XD8+FerKSQEa9n2Sn3C9mwPLG2W16muCYn7jq9LrTSIIZEwknghzi8XKPIvO8slNKz64zpezT/nwUu00lNw123y21xtoCVgB5s5/yzy6gyVOHxmkoggmuG9Om2JEsaHWjnctf6FvHc8MGm+pewCTkW75iPjMN0r31OBFWXRdA2Mknh675TmGWbywbhiAY8+wgX75SpECg8mi3c6SZI31Ezx8A0Cbqkl0A6HFn0l4ztt+M8Q0WNABMDzHboXkDoHqMpXFBIF15EawOYYYMVEX/PqgN1DEs6ZpOpTWPi3wsgHe8eL1HDawKuo+ctTczHzHihaFLWw3iAQd91FhZmGx+mUT6yaRL5AmXRhvXwTFb68wuinhveWqVp4l/Y6mor+sLeQvj548SLvL0vDGaNK69dxLCBRji+bRR90v2A1C+v7R9LJ7JE4/CN375WOdfjjEY09z05ivA52olg2dMSJscwA8peEjVarQM+N7OoVVRpBWGqZ1keiqe986tToPFRS7lyulveJ8cU87wCM0Zo/OokmMmrLiVpconteaNBbf2jEvAHefq0hvtvngrGSIdaE1krKq9z43KQXivbSspXt7s2O4HF7Fae7iNOTxAt+djlEiLPK5aIMrMszPi7DZkRgIZNYaT79a6Ho88HiPLwmQIk0bswXfNQHa2iT0StK6qTs8XAN59SOFBS7ubLt4CDJEJhqrA327MNWxu6Rl/dIDWLK+BGrLLm8jrAPslGt16XOKrMRvTjQMCCbMj0rI82YmZIjUVJtQRdkI7zuTUwmuZcozV6xXDVHck2WeaXvXoYuaZD3pYsxVQxBolzzJJ1SstfehQLv2nHohc80RyK+XkKht8yW0oG1f5xJyAQlCWs1qTzRqyIqQoT4h2590GrKGRYTi1VoBoClt7Bmp4/1bHzfNmyhyIb5BlHY5zXrduuDS/g+cLPR4rcjEH8pGGgaVKliy/SsfywmKtAqqQ4xUTdyKHSlJ35krYuCbeRonZhvp0iWjodjMUXdqT961bgPiTPYjoR5N4ssctY4M3gns4bXaPIJnert89tSIEDFE0CpR9wJpBDucM+k0z/IBCJWB22gEVLnDoFVvAPR9tqj04OZhKrGJfQ3GkoItgTzsR3tLl7u+P8u3ZwJbNDDaCBJcCcVUaG0PG8+iLiqyTITRVlZPuR+9KOdx1Nx4W1foIdsOSeYuKnMz3e+bO2uyl1DSE2ein+d61jNoE1N0w2WMKgAG34/VGKnI6n7WNOsYhLvjbCp5vroSTF45AwzA1cUU7i0FugtwaL42+IfKkR65Xmu+Tmvr8RmCHP/ChdU1JJWRIWBKUoXyNeTZ1bsdqvOon8HA/Pgti9NSsuNHJrD1ZFjGsLi4U75040xbl26PFyCXWi5XrEGDCYDoddDhpOwW6VWgJSpB5tUrhYTBvTCz5lGKaLXlHISy5p+5Li4tZRvmMYHbMG40HV9vOSCGWZgL+bEyEegThWA8k2j/RqtXRaVkvowHQWmwJkyBqosjEEakVOVCkcoQAVJ1ngZvBax36ldK2AIKC+EE/cB6lj3BKbLEZ4v2oLRP8iVJ2YwtRXV/Mjlx6JfI2b91tBTEJnK55WCVbLOfLfoAh9EimQyuxqXyFVY4fXkcdUAHweWji9R5pobRPbmPfjDetTMp4Nug5B2u+GMSpS8J9V7elWSp3Z++7ldWjktB6hTnaeI/ITi3sltZR6oL9F3x/SY/Xfil+mqKCBdq4B/KOVr5k7phDxUspZy+jiqFZQN8VtlS5qjfiG0LDXiC4lx9qy34SHlnnNBk+2cLeAMuQIhB2dDwLxuIhHkR5wtoCvMttwyMjM7G0PcnWGz8iIDF0T1ot4im8pxfg4Em0r4B54AnQVehwzzaPZ7KXE7J4nXeR5qH9FTWkmx628XLDPY2q6OXFsTlJTDOePWZRt+VYTbWk7x2wOgQXB9e8LSwWKobGKX7LcsRb3UcEPQkFLHm35F49pYd4Fp5wkYo96uNTiTcSRJkLkXcpJ3QAk29CCfIFVsA3YNS7YzfQUzxmzr2hfkbXaeIKe0XhZysNPaOTIU5QJZ3zVABQVRQ8XggeSsIfN8AL+ZJ/TeUYddARqyk2lriaLp496Hl3F9aBLa685MFqeq198AMjv1qHPqGv9psh+RZYWfNn9RXQ1X2D0XcTOix73RLx1PSsLMKBlG7TVG6uT7A2ghkGG3sH1ZnZi4HfoAEnnwllhO4nlxynvm9nJ3esFY9ON+CSS8dNRZjSLN9Mw0ER7N1HEwnscHuj9P2NfrahjdFCBEojCPIxg5msQ+QwTcYy0sD1tHLpfMwL9n3DCqWwxzgQLyxosaUnhE/nefE21eNeg/6OTmvEMO4hvBqTfOK7YiqP+KonUEd3nj/7UXajapEwvRP1XivNP+O1qdibZHDwlFFJfa5bwA/+aIeqWT5Sh0cBgjUUCIHQsTfEe46vY/YdJSRqB8XLchhukRcmjrlCOJJbyvs6c9bTE6mWSCPbaVFBCAJ2l2zdnba9mEf3i+wD0O2QxO9K79YQJaDneeNnE36/6/ukx+f9s10M1J4kgJpvv7EyonCOWVMJZdaBXsjNgrexE3Pe5uQFx+/REgQclkhWJvFoXGS1lxmMJ1z6Ojq95G03qTo9bFHyO/+WfE16qCWhX+wcQ7VlCUFAZAkxvSLUIzA54BMZk/l8Ws4XueXtY7Xb/TmDr9qKB9ls/fwko9kWScgAX0xokNwZoAatKr7ble3+dt+xtSZ9HaMGS1karV5FVw8UfoLfTCvNGaFIKGBIKFmP8gPschJxeR5Zk8PPiI+SNpED7Y7LwgNLRkPyGzh2TMp5q2PR5TWR6tRSJ5uMCC+fm9jMWEBA5H51SEfjzUW/e8CE6jiNXFr78GtOoeqtFnQXuhfwVzDd517mKzWKGc6XOTqQKMekVY4mvbsLApcLzLMCA8Oy1iDxwyj1js95v1TBvq3AxYEz/dPwqztVy7lNoKarRlUCt7iEGt2VfSCmP0KY9IIOet8s/Vk5+UZhmaK6qQm+QcAB2r4QYorip3PjU9FV4DmmHq+JRN+mR/r34uVVaE0OdXl7phL9cNxOgjuWqOb1FeZPbeLxmQV9C/a4VJgS3MvB7rieQvv6OS2iZApIRWykv64JfnG+rAjkNVKxKkZr03pwes7f4dPb3qKvvo/aC/cmAcn55E7EurgcvjlTjVCrNqpwlNLZTUtDWXurlTiLQSQVEXcjPTSfbsnyru2UMMhMrShvK5LIfNHrvg1o3hGwmz0qb54CYkjSd1twozxBJzbg2n22rOvgOOqgtaXPJwCXiaZBHnBbPfHxuWDmAhrpI3KWydhPDqHzm5sut6BeGsNlMElOlAZB2rJvppBGNH05p4HGBo/KqacPtlIwDlCJMiiDzqzCzz3p0YZEXxEB1ppw3izQE3I7+UZPkEC+NDT26z3YXqEMUjB1kLiCo8gpleGWOBC3YcDgHa8R3jr6JgLROZ0+ubaMJKQrzjERXDyeVPUxl7o4zOReExglAl0mRCNYNnVWNOji01+rFr30R0GzRJqsdrxzk/W+qJCIVG++PO4pVtFB+Gmd0UVQNCKZedIiDM5zZ/wppbyzpqkNBhxZoo947mWamukRCqIwAl6Uqy53ILDlc2z2bbkwLOTWbQS45DnO0zy5PMeesi/DcjSNuT/ZqYpkPdk0JGt2/KG8yFwGW/bZqUFJfQeaSUKrIuMLSEM3NXvCQ1u4d8KOYnmXWqewPFKy0OjsmFsnn2R0jwPu7tOgYyHxNv+SH6A9yoYnVGliZS5KzliGQTodrldT6pXKu5oi7A76QBZPYC3kA8zJdg+0g0gycWFP5Ib3PlXNBbQVnIAFutUQa76W/AAl09bPVuCdmGiIaGlkejl6NnwPF3Hy38uikZ2+DAMipC4VgLW/qTAO8OecxUtOxNwWDO3sxL5I0Dlb4BYFH+Y+rkTAg6Y9HlvKoG80UuvxW+7eSvq4ND+qvfsLLnp5TT0O7R8egDNBiwAVodnorODayNXQC5IwJtNcz5QKSWFuVbPJ04XLBqK31kvkZ6GY2K/mgXMPEsaLnKg6OSQyy6AuZcJbC+RPcqkY1pCn6114gvU3Q70id6aDUkUNaFMvWeIE4OGnxQHdPzTDHbTTZ74szSA1XzzQCY8P82dAxXrb5Jw8W5u+Ch5dreGsltZrjcsUb5bgRVjYAmsJlAL5vsGwDr3TZ4RuIpTty0QEsQn3o11ywIpcEaRuDwItqI9V4/iDaaZHs7uB09wvlz4Q1GzEgASxtIjRigxQCI239LnadD3o3rjxnMjnVoi91+rSU1hgrDdoyOH2Fowz8t2XRTB57qzCUPevVXjTCzF/pd+On2V6kD5Ks4AXG8+Nsd6PrL3zOUpnedXF7zOqfHvR0IrPXPDklMjFsRG/zjDMcvxrQR8fbdDdvz3x/nWkOoq36JVnFTDkWy9zJ19DGY7pkEPR8Ha+DEJSHCZha3NMepmMAaNHwofdm5APmtvk9javVNOe4oO0jTW7Zo8q5FaPl5hKsZptmHkEJcCIXpuirdabLe9zWtSIVmHg4apn2t2Rpfed4IWCpSwHEwVlCkKEKKsXC9fmyzmotZ/1W0LHIzRz0dbpRDKBJTmmpDcrIXi5AnmDjxxhQ278Pt4eSaRCDmOtHdwOlgI3ML1GbSVy5RReHQlsQWwT8KQ+Vh6ocdPDoIdHuBHV1E3OSLC565IkqLiONFA+AQPvnM2Buq5NOnDTX7BHJrWtEavQXLybESqAwU+ctbAQRO0NenR3+7aKLiWsGgQRXbq3i0yadiIGa5wEl1zuXv2wQxNn3B/1rc8MP16hrnci6Og9Y1xZiaaM+XLQcjNNVM2aERnhhzYpxlZa+jnFVHEl/PsrTN7HVcCRWy5Vu0ptshvxCLWQAdkY7KU+DGbuhU44nBSvmI10og1zAi26uHZwwvrr08CGLwJjuxwd9q9gxaOpk2M4HVQXjXsJEunIfuo4RN+FlO4e/YHtEltSXH2DbKeZ7aqDGOgSl/6WtVwuH7gTs08dm/Vr4nOAM4eve0aTT0bk6BynyD5qgGwfPlyxTs0odsYRtinfmSQbIQPunVwJ78eOe/KAfbl2M9kWYRY4uq+PuXK8tMEAyNYvfCdqAR3aqtLPjW/IO4FG7hlK9bOLY7BVJnvGcNFzb1pPJoxfxwFwlrmqilStAn9A6U2w6U8FWFEXhqfqUkaonhwWZlLbZNOQekw+llMqnVQz5XHSZD9Z4rlx9XJPkUfAJoNeKtiAIGt3N5cwHtL7lOhvi7tPBd/djunpDUYZIl8NtxkdzxvS5HoRnO98cper8iQWkHYXiz6PeNfNiwW879ELhmAQ551LNMabWkCH4bYdWxT07xr8bQGb+vL92kiasqyOCaWIoIf5wxHWCET9+E7jpL+RexgR7g10+qAdKE2Y4MiqiYEaDcgovq8Lv13D4lWsL+ei2a12IqzM4kZ5XwiVk5U23d17eZUTQCMRP6kCYUWK0HBV3LjNJSyHcDurK4zIlWXz6bFZF67dNazOVpb0YDkgPFApOs/cnawN7C7UCYhaAz4Ce2AuZKchwso/ioShX4TaquVyz50Vvz3JpSH0PuyjdvAj5FDSddLeucMPyPrAxN3IdhcPHkKg8+HZu43hv4TWwgnUy1ZJ98MYqGZ2PV43YKzSJFDLNNeoPRZR3N/bw6Soq/rQiYYDNrbKNaDsAR9TueOpmTls2SKNXp2ZRj51o7dlX04Uwf+/jnDx2oxt6cS8wEcN5ukBJqyYH1oCzwe0ECcLUXR4C74cJeYdpCsp0iTDFxxvQ561xBwJor4Q8VV/lpEwGrA9ZWih8xIZecs4lufpRhdG/r2ennYvEh87IU11YxI5y0gqvNXPM9HQN366kjEm3chKuKM4T9rhOqE91H6wZSSccWkZlZZ+CXDgZ1eq9msvLbGCofADo6RyMmMfh8Dq8UzzSYE03TBYeSc2t6ZA23EKjDGKi/Eau/fB4b9B+tkPNocbPRt1+FhzmQLyN7bb6bI3UBAXuyUs11IwQYWu6gHwlJobQsMNdWk75GJ23oiK0FsYywW6CkzRbMtDv9emSboZvvarOmbpELuQeUvDbEDeyu0Ovnd+TYPZ5Jqrs4Ka1duliZDcMQtBu5EBDakN5FoCCdCyr9O2YXNQ8ni4iUY6afP87DCm+zQsSByqY3eZ8talZTBt7YnwgxY3bn96V+Sa7uQLVJKUXtej0S3Jxj3i+AWbDg/k14kUcWPmqRV0LeCbTfdsRFr8rAzdI19nB3usI7cZA9Za8oxxtVQ1Phseln5LgphNXqQP0zR3XMr/s4EW17fLioA+xzekexcVOLmjEVpSxc9aZ7CGo+NhYpd3D0eEzsSsQDlTdpbMXvc6hPuEAQ8y7xaKi9nog43hrD/hcgPkQYfl2jrlfBcpKljPThPexGjI+ahxvkWrQ4YufIUflQg0NOg4HW/MrE2wauD37kgczSUr7yMZmV8xXU4UejdVA3ZeM+qspZGC+3k8sIMTUL4oQI+gVoC1cf95QgdYZDSukmmYxoOjTGTcAkdlsU188AvFhPgBU+0SdKeBR49uoGVfaRHSD95Q+RagftE3OwGL4WKq7iVKPRQEBbhGogPmGPWnJeXIQYWFl7ahE9UfEehh58IZDzzGUDVbsT554v6mrS673CzkXoKENzuS6bfF7HMbyxJQHzAnRd/aRauyoD2T4gBjHCu/weaNC9JVrzrsRAP2CYk2D/AW+3wpfMypQr+USEhqPJ8YwYf74GQ5TWinmi4zQRvFavYCL2S8DhLtEAXAgSvSCFEuy3IgkfQiTV4q+g81dbZBJXJkqFeTOByIAnchgq9VigQGT4SrZEgZUPgCqgf9DI7gii8/IOnkAqrpeGr6W0DKyxaoy6WuadYKm1/pt+5973X3EFOnNoMft0aTKcnBB3eVqkFEXuCLI9LnK/m4iaXv4hkCbrzvwzNLn8aa+0npePMG+jrrkKLPhxkA6qYY4ytnKJ8lCYy5x35gaSZ+scsow3NnCSnNFRHMkLFuaKDJUWSseGsHC6QNDr/JEgEJRLFH3+SKtgQ3MP1DDLWITvTmxLvJ8nqaEUzRcNYE4VDyxat7TPEUfIdeWJPJgKtAOnbhYAQbJV4Lcvfac1m2i01SNo7KkgzMGbUhmqnwSp/P5yOVLpS4sUGkYXldz2aXIvk20XDxul+FDSw3bDKhtZV6PxzMTnXelPfsKgNQYIlzEt/eiZFID8vKpSKQQ2RMw2n3h8YB4U55DBwEzfOgHwVZbHcbsvRAPg4IrogcveXoc0ovsRbdRPJGRYqRkT05aIGto1QyIVjAZ97ywEoEqi6VemeC5kY18Z5Zj2xb5iLfQSSeoKXDOlR+0PpCHYLuo/SJbzL0DHijt5JU4imQEsztDfZnGYSQvMn3eMunN52p0zLcSHd2ESqM03Dj7aVGcLaE5AVEewvfsXgSOqeI7/kglTkf722eASBITZ64+Jz54hxvOzOFaSAAJkcVE9lWZ2CfGfYeURf0nfjeeqY+WPvmRzh+x2mGDFuYYm9/RiQc2fBPn3SeSABPDHqHEaS3KRHBA+Mm6HEr6hgNtlIUeWDq/MM4BbcNjm7bnsFgaYsOAkgiIuaIx+QZRVTSPRDjktm9rCe4E7QK6cZm8CoFmXHpVDigT8tKFtVFqY9p6YU2xU1X5Mmsum8tyx4tm/BMG9QwQdk3I8fB8ss95C7YBXvpIlKGVDtKG6Xrw3dmDA9yQBo4Ny5m9nos8sNo9rTgtmEkc+9sJGTrLlqHjnCN0ZTZ4Dtmr0Sv3iStY4sbHlaBQz4cGODvAHVQ4T+qk5P2M1XTRXiDfHeGTFNhXidA1X9dAO0VADFaqwRcZOaCt5YboN4VYS+4fEm+xcF5l1RBwyQeto80YzJtDgqSCvh2rPHJO/QZXVh1h72jtbH1JXp3KpRL+R7MOY5QE1+NYklqzaJgEfX5+q6qixyFMgvrP6DnFmksya0lldZuuROBVEkyhMvhnFrheFKW9MQ8J8f4pY5u7kIwr3uuFO3GdNLCLJ2PN2AP1iXLmkzYeKfBFiwM4IWgwjF4viB6esZrB02kJlc3IdkYWHygOwCH7nU4L+lRYdr5himoNDB8iBOwGsI+5Rnu47cHIFikWRrg7sspnheXJWmUdEt9blRqdwJ9a6gmRRV6dulYdxctOt4o7jNgu4LIq3MVbYEGP5it5t5o1X/U1V6z78lYx9pkbSoJarTSdlD5XnoU4CVm0qQN1bcmnZ799F4Qy5aYxGh9bIyMR66CTfZuCcWfDZFLylAFs0lELbYVou1Ds5gkJ1zsN/Ax3DibC+dsrVlBknW7YWlwRAa+0GLbabr3UJdBwcmb9rCJMQr5XH9yZK/C2OkwsLbb3IzY8ugN5mdDWZG6YaWb6LYuEiFWgJs+12Yd7Rq72DI9WrNU4TJimBuNPFdpVDw7zIToVrE5pkiy39VmSqUREtOEx6bw4eLTpVjiyY2AhkgCr3LotG+h2qdhI1ne9oLrOya4fustlyC+wSgqD6JxwWBt9JkUjPQVKJ9IP6SK4IYiWdWitctLEecv2nAKmSuoBnRacvr8LOUM9dmuJSpeeNmwhfX/sXUVi44CWfSXcFlCcHcIO5wQ3OHrh8rrmdUsW4LVlXOuGpjyAO222jZQOMfW74K7MCoiYCnBaBoEVQ1NQNu6ZhetJlEp2jXjBYKbUK32CjAiOQ0NRmGwHqnNTQ4iLNubTjjI7WbdGbZv+9loEe+OXVBxZvzoL26X+2mxxy+t1+qg8C7UHZi6oCyL0vQWXdbYZMO5tQqStVIEwcpnFNGjTdiD0d+hentOrZPnd1HMb+KOr0pqsu3MW4lnR1BKoBXrBy3n5TyCxoxyW+sF0h5bpG1dLaBnPNi1AveWPsmNz+wFAKrwd2iBEKX5fjhIFG8I6a1qsnoTpSnnkeV2SGkAIXzUCqjDYym31KJdtBb29IFaq6qF7qYguDVFPnBTsnG+y1v5VisjLQzMklW+VJeWd/XNvdlGh71qSEPaawvTmjVsqNOiLwDCzIcVxTvwKOA+2gT9Ws8K156Ea40Xw381JpGUS8cvvHVgK/zQ40fjNcRxFjGYyoiZ06yiHtQpp+X02NN0RtpxvtXx27Z+1nNkoPlMF2CbmAvObmiFPsDJSM0P76UwtLxP6kgU3nY1EP5xciqm8UBgxhLMGhQErO/t8eGs/SCfHyqBbwZHH/MlVi9I4XFcoqhkstCwoN4wp2VB8nPZta8DzWbsVr/mN7KAIiY3RS71lQOJg32yqTwTyRVCJZbn+4/DmySQKWbpNl4AghEgQhi/q5FKAbPc1bZjYyt/Anzg4tSLJ+EvEfPRlj58oGAFh8KuzUZie9EB4rPNJhZqIHGrllD+83nud/h6wb5B4DaqCSqKHqMMp/ny0BDQbIEXrJ9oLTOIRJEOVL0m4kA3kWcgxzqEzzt+3AcVggvfV81Do26eQHWD9pu3ry8JxMnLE/ysPigU5+3qpxHjfYiD0wuFTaJUhNseoHk05iOA2/X2+1WkNRN9TfNsS7mjxg41g6tea28MY7+k/dlJmvNm3fd5Q4hW63qqIkoNTV/PLL2KNx/XsW0u7ZVDiKi/hPbmL+FexxaxGmFj2fc4bhmmUwTnKdzGb+9bS4ZG3ozoggDnunM3UyRBpOnysXcAdBPH26POco8OrCkGt59Obu5E1AQRnSJWfrVxQHHc100gsfSSAnmEQW1Ku8+jYcdHZsbJR68RKyi4pjo6/oGSXoiLU1dQk7mv0+Gh36rTFBNB3RbWqmEnBudCLlS+TZtVmfYB97Vj3CxoJxO237ikezWOIPwYJ7JHMbNJzhS/eREE2sr827oVqGfMfhmfts73Lxn03fPhLqQrkLhlSTOGcdNabtgqSgLy5HQJMCQSc5VPXzrKfLfC9GLPmcFLnq+qPR4AH2n1d7dabgjJ9V0vZLNQHgpFdFPNOgjEmZGKFHo/aIwrvZr7OTtl2n9lfeCJMRo/HtsyI7tbttU+51V9VjkRoS1Mer9wMnZhW/UPcbFh+7UjQdEVk3QIxwsHYLKhyL+zZl0ekbi7dRadDCfPHGFpkzuJwNCY4ZIU9MzCAAlN4AkssbEJEHuSZ4Lohqkl9JXLLu+1YZFem23LeeTsT770hSTLJTLlkzHj5jgyRUjCoRFqoNU0r1iZmWZudSOYaBD1VBAQZ828d7rtSqQaouQML0mdh7h7gT4SFwAnE367CKEKsdDIyMPsguMcP7PcyuUkXN4t/63db0vhuKvaXu77CthW72FkPZzrre+CHgVybpilZoDK7AcbbfVN9o4CmVpeMhs3Id/AaDD55RJVeqqNe9HXJCUkTdS3g6NRZ7ZlNHEEOjnsbWDW+6FfU1alKbhc+bhrLHsPazje5sIAqdpH9BUXj360xD1RK3mFuTHBYsq/npM8WOHsehjYn+Rsj4RCGwVI8K9zJYXfmWj3dNqGRbjBvkuU0F3GJJ65paewVZBwIoM3Oq5jigAmSZnCgLUnsoH4DpZzoippGlKyuEmgC9vxW3fKhxRs+/kJihpxDzxO6yi8v3MzofW0p5nsM2I6QW9NIZZE4Hus/ZgksDzsxz0y8QBDYdbhpSCTZeppld1zpV4LA+yUjGypIL5RQfx5ryEMEbxikJENQjg35Yfxh+DsoLV8TJ87oDfAbTNxIzsK2eiuOSCb55nepO0pDlXOvoFwUK7od2KKPX0c6xpsDzI6OBr3enzZnHX+wBpscDhVz4FG4tSKgHIHjdUbMgLRFwz+wBBz8u3e/gbdNELsBn6MvyBQO05SohoNmPJaxF+hNlPpVi/8AG4t4DSRcabYUFcXcSdTvehChbzS00Gf2y640n6T+PJ9eD7/V10xenmXOvxzZ6S3JHqAIm0HKs5GEPIwAuzr/Ii29guB0beEfwS2MsPhxEk2O7+bdXdUMoY2HdIUGwFk3NCPNKGbV6gMJEpXicJkqRyGAaa4zfCh5W5yg2FKafQWCcsyJWUK1e8erRYBPbQJPNTkbG/H1xOONKLT1t5+Ju7G+v2e78nOm7a+9fjNtRs0Qfc9HDpvlJhuemYcvjCMvADKYpmL4z7m8c7Z/KGDWmo7z4NotLWx0uFOVeF20yHWmhKzMRm35xkdC+kiOw9KNthI1f12PUsNdXbTsE1seGXvLy2+uOFCOZcbeK7ZvMq6WDbGvlzNNpFMMOvUXMCnXxfiwvAIgHxI73qOhBDEXIfzDV+48IZ1IJYbw2lHnayoSH7HG4l9MkSAjwZJ3Dccc3kmY0N+I2UifmXMwOusVLzyGITohPZDLd5+O7uTj20tLueJlPBoT0tNTqw185aftxZfGWt/2AON1c0+vEJu0IXkvoeLFXcL1atIzbRhJmXgMUIizmCQ0keCo13wQ+h7pvN4yyIbxfdnGE83RD/16CZvKHtMvGcW9wt3qwMDIRhtDbTTzgrjNVHH6+1hZ5p+Hl4/QFN3Bh+oFg/fHOdLIb+5D1u/z8skPLm0YYgBPl/YB+9NC2HSrwuM0qI+yUD54kJM2W+qXaMw5G9clrUkoAUFFO6+0NKuqSkKus5i4QObGUNg6Q/XJeZYlURsi6+ZjWunmG5vRhPo69q0MmvkRi2TKfkHOpJicWe1vnRTTdgtOd8uxlto63NBzXE1pA7plUuy1g3qUa4t6T2Q6y7m51QMObb1/luHGCXC/pVyMXfwIVVWV/tg0HY9IFnkOHi61IP6QYU9tnfj0Oc8fauL0AlCsspG9uGWTP7kbFpPGBT6HTHqwgIF13kwhzaUwIKwRT71cwZhVk3bgJVnuEtXiumooP97eC1Sk+btX0S0c+MhHCaMG2zpRV3i5KsEgydqyX/xDeKPgn1Uwbh/g6Kv6wcrVx4PLE3ohS38slZeSeZNYgfFS8tLL8zI8dbmeePX+6VTzGWj4Ef1NLSxRtJv4+xcrq6SIf2+howTaI9idRMBYBwnjeHDa9PeAlvemTceOw8ptwIvwO7B5lh7a6/taJhCiVz41QX1/brs+/XySRtJq8GR4ecJ+E3HwhQEHaTsm2NUKmat0JzuRLYbSs4l41SGKi4oX1xjzV54GFjBpNpol6kRQHhvG/3LdhTqt67AdajUOV4nvYx2TDJMRYkgYi4cx2Ohl6s1ahKhh+KTfB8KsR1pGQLb6D+2FAa9kQsEMjUHIekLO8w+lj1OXA0i4/yIyfJ5C249G6ek94vspGcLpFXLzN5bmBA+KFPabjzR/LJ+/CulvoAQayz6fW2jHGfJImDHfp2ZTn5G/owc4QMG+7K4aFOyJT8+goOClQ64whmAaTzRsZ0ECy9zIr30FYX9zqk4sapCEuNdwdfd9INxGi6jR0LucanhEBAQdN7u4CO8EoxnbMH2CJypFspVrqqepw/oKGpPKQnKXzJJAOnPxy7H8uwsAntWFjM48yE/Vibp6O+MOogrbx7Uqbji6Y/LrCNGjz+wmNUDthhyyVUtd8ZUwmqfoqdyCg+LTQIjK77WrftHE35C/bt+LyO3xaK9mXby5GaVZ90dkNbtkePUx/SuN8ftuQ06eH7Ra4zhFnEkq4qWfiMuj2k5KfSaCxAtY0MbMLA5/85Rxb8/57rWg+5KiIMabvwYG/WRdCCPtHn53INBsBb+jC86YzHZS5VcoHiaiYlIfyWP/z/a75q/9TqDu4FaOVRMWXvo+RVKO6EemDCqYRT6rBN2VfMj8NtRfhMQqGunF+iQGMfcIYgQ485Lv+xVU3nC5EoD79N30Dr528DYML5RPJsTM+nsRnI5AuMOu2/Nh9W8rvv50uJLrI2PUzEpqHv+q2gxGtHQ4T6dvjVMHp6YigwZJxXbB8W4yxXFcbI7neo3pTPcEoChBW6cWw1VWF7nW/c3U/7Wu0grekie7kB+1A5NN6i4YK6mP7R0M0tr/HoOuzVa5pMV6J9F0c492795YT7mejn9+f4Y0Orq7R4ctQ8i9RLejuXviMzJzlKOLyv6E3nPnxIFYCqFXsIDZR58b1oXxb+g06XV6dup3IPRoU+sp4VRx+FnLewEdIhvYrcgXWt6pF9IMIreX3maVnnPLn+NNgxPgkiRg0MaJFDAxp7ERPSwW+C7mIzceehY0rGWe/uGBtKuOa9ZGserXlLB8kC0r4GXdFWgDY98aAw2TFD5KFsBmfqofmq+2YrJr6kvGiNUIB9c4wIGJf9SbEx0deeR0FD88dQSVF+hQvu84/maUMMGOHq/usxzly+Ks8kYl75fRju52WL/HM3bmW2JdXnpY19rusS38+uPaqL18wF5GGCXZ2fVMLJuP4IySiV8SjMQ7EzjU5A1YumxLBoiC1BB5dxSW2EhR6kJDPMWDgJIeKZ1wFcgD03IVzPvr+M3ohWmy814i+tRB++j+jYXPcpeR6pfByBI9uGA8LRk65FdwKP8huKYnIpwTR1lQ3DeJruyHHmhBRXnysMBCgn1cgqdzrBLCaUt4WFz6C0tDei7Gh9UW6/9JgzQt/Zx6IbAysY1SOMUG1lBL2e9k5IM34/ko1OGNoLfGeubwO+vYNENjfqgvUsWZgoL5bv99vSDVrL/YcscoNg1lW5Y03xPT2NhLwESOckaUe/6Y9sXnVm8q+NG6wXfvAGtUrTQaDa6inpiRRErc+LFbYQ5vsNc8XwIy7oRzHHRiuJd7DKJQc8H7IkhciEz39uzz6oC72O3N2MMSxnzXduEBnx0+aj8/SANw3WgSwJ/cJJvmCSxhQxJaXlf4AvkDqo49jdSa7oz64aiT9qttpS/VroOFH/u4WSWqvZ+kJxSyEEiiyUE/yX935WbI6ADETizIyLzzS+KLgTtEXrkkKb9nkK4MN9mMwLxcJCUZrRN56XGXd7xy+O/ek9ER23mZ1yh8qD6Ccl8giT1fVxccZVBmK51cH0Bocw+bjFo2EhwLxaLcUXLgPbSVa/wSIlvFByGjC22DmyN72A7Jv5XPRG7Dv9wTXVFmJSp6CS724W2lPZA6lcDnafD0r96p7Moprcwg6TLBX+NmFrmGRIhgalju7GLeFXwd3SH04JWgdQWJmz9qvpvtGEivzhe9iSO4+BFPEcmq7iY55Fufk3MXasEzhs5G6ezf54+PH0Ytc/vQKOaQeJXJPtd5XGEQgSqQD7WL7+/USWwVLWFXA3QXRt+dHFgO/SAiD16FMzp30r53jwQJ/gU2eO+XKRNs9TH5G9mffIMHE5T6mfkM8wGkufnYH3Za1rVm7tZ30OaMh/ouYT/3uL5+TkdERdUyyGOGIbTJZYI4ENLKYJvw7n1eGzh6AV/EJLOHYre5pfc7QyfD65af+MkbFnp0Th5/NUgIT7yh/wQf7ceG16L6TJPYd4G5OUIzHN+WQKvVkG1E0EmZnv135GrXrwf08uWoIKAHPvYQ0EeyvwsUuRW0Pg78GktU5DX1f1KJMlSdmBhv0DYKK4io6HrPQtB9g96q9ZCUGZwleEwVpHPfy2+KpJNqNmInijTpyPur+Inir/xfT1v7b9PoAy9QHw9fADvgPog59AFAXNfxzYA9IgGn68Xg1w7QeCR6qVDmLqm9RkAfRVYHG+d9sgSxIo3ZBviImzzF5cd8zj9iuUITdtpGvzNLpBksrkRmvjD/dXCSerI7qxYEksm/828bUu495bYkct49RGweWVHnkzztuDru/hzkRO5ApDk8J68RqzpjOCw3okIuqkexAN7Lgocn6L+WgAnDc2wbb3X2nPgj4qHzlpp2Vux+PoNxlKwKXHdA1hB0Uca200VRYmjkQ+QlGqhXV9Ln0GJIqD7HqLRBpPrGEUiZ3ZSR2NrRLFLwoe2HIQWoJQdy/dpoSSju8N+BOeW5T+nENZCJKh74YOClwnlL4suNfYAYoreyIP9WJD3Zo97VvvIpXSESum+KmOxArazgOISJkjgTaGLRdXvCZVOSVCE1090Mxj0fo1W2O5dlfaKvy15ZnNXF1DIhxUY4jgdjBK6BRYh4Ht2hgHCWW7SB8tH0G0hqAICK5zt8yAm+OBJ5EJAyJK9gWHYpTeILRShJkSemtHmCB9WOQUUyCnsPJeDf52N28Mi8P6uAxTKGhMQCwn9h8firmYd0UOu/M8tLSIfNvZJ45MGtsIII85N5QO6PgJ0TqOBjaAGGsU7orVAL+LVcAfWXqIeGxk+lRFG6OIeDF8nFwdvPJO4VVunKyqaKnVCt+V46/wOem0R55pSs6KntNPj48fy0B1FZBPwr3mF1C+yck/WCAy1s7r23MXvj87SeBrHFsD3Ka6fdALehAH/xxXUChivJREf+BwMu53xb449TdAIwjr9tom0U3rnMCKQntLuHb2xUOiM+ow3lUiisUM+C/AZiZtktAVZ6+v8vbw9diTRtWp3lRAL99Z9nYg256IEeDRxcgoHwxLIC4UXGnlnglrBtdKdT+TZShPxioM0b0QGkcTwt7hmqw2j72/R9UYEehBA7ky46SrnStwJH0x7t7tKzsPFuURyD3TBg1y2HmsWTNf8il5HzU9BDLlrU2yPryOVjH5ACtJaYgj8u23uFNfcsqeHkZm0W3s+jwhglR/u1mKnXbKuDNXJ16oyOYe9BCy6yNI+zaKbBpGnxpl5tGc26MhOgYPeZM8sH2OZusTAf7PmJi96DbxY/sUh+NnVdegMBSPRvl6Um6pIFipIO8TpqhKp14+03rtooshUHPpCa3l/nviq/PfPM+6/fv41oHWSepQyOuTb5QlleIuEKur9fSN0NOEgIWhrExQX3kzXSKnxeiRl3SYanaLVc+G/7+8FfOkw0KCGLZxm3F49Ht213n5FAffLr2yDw2sVq94CLyxrLFLNJqmQ9fW7OE0P3CIpfQWVV1O9HbCr2LuKfryT2ocbPOQueEbMVIIHc2YpfrsyDajmrtzQn2c+smUBzy+BZC8Fe3sFErgehpAcE74WQgLGOjx+DQMEbKaeOhjbLHTH7AdlejYg2lYHRLLuAGnNIGo4Om75ABMGNsIcGR+st5rSm/z7eMKvozkahdgABpbr49cNXep3ZoH8SgzlpqC+SvCjKTsamQL/G+jTd9kh6XMg8aIRcRh3DzgX6bsLomQOqETQP7RL9gbpxU0LrN5wnsJbQ5Idl+8N/eaz1vYSedd4nIHUzFt3cZx1E4dymPe+tr/0YvybkD9zwVeltLeF1ikFA2KnhGICUZTnE94S/ga1W3PvGOQHLusFrclXjNIk3P/6m7Noqi0iMzULIUEFIZBqrBcsTw21CP8Fc5umyYEh5KhsnvIHKT+ORyfkizVzFQ1vYKdz0E31c0uzJyypo9PK1p++jyUoBH6aNFu6k8E7CgOxed4kYrB0j+5Y5yetORUJl/burzqMDlotEs+Sg5ffCoKPLiClO/OoSMaX8BZgTHStQ+b6pgqgcUvDjrSAvCiNwVTnGr1NksqVloDoDNTfsY/MwNt3TNAp1HWFW6y+NyiUGa3PLD+wKd9kv2yO4obhyMNzyhwXc49wovTE+YVAIwRK5+poLP3bXO/PGhF3CpCrhX/WrFnB/HTBGtq8T9U0H73ok5PRssG+bCl1HHQqpPXbzzE0e6/Cv3Ip/R6No6wogV33Pv2s4fpyM4pOFkxdy73exgiW7qL9yHB4q9mZTLg2iMGG7wdoAfoIH+xxNoB2mjpWYy+EaOvfVlD8IE04gd9x1xoMjMJwpcE6UvgUoVLwd3kI1jT+5pOWHTZskil8PyoULtXSL73+Xdf0289UT+NJeX+au/Q1sfqzHj/x5/vcCW3lyv/+LjoFx0R+j2FQ5VFpHmLcDgOC79LYZAGdTXiCxgsIRLnS3tba++F0D52VC8U0FTBBxRdf2gd73yw2Ri2Z2ajxYNeKbaKf/l6C+enHv7tzaG4mmk6HBBcc+vJSmbBxKcv2fgVxl7Eh6PgJMlpD20ZrUeJsThgPTHwsOlW5Hydg9duNpx4kAxs78mvRQ2upGasBJpeMjreb9MVLdee3jt04u7aqKtFlQL+LscRJupwWao8zMGWBxTsrkR0UD+fPHkafI4YTHyhdKfJSJ38jGI81Dz1gfzhfLZzi0UG5yQPPsKsENvh7GxCB73+9r8Gck5dA3UHXoyLAFjqSzVGZdhY7IxRPQzOqWlHakZqqzZPTsZ73BXZNG9qM35nj2qEEC+fz16+ButX53UpHokuHOOe9DzW0XxNpBq4zCh6b3ZXNkku+1dr36nfKeDdQ/8EFxGqzzZLAt1m+l75aJd6EZrseA9EJRTzK8I/ZzeuEgw4OsHsDeZwDOqb3GAY20YNdGJBy0M9Xxw1P3Y5Mna8u5X9GyrqpI5CRlSS0BSYGU4YiYSoldXER1GhPbR4X+93Sk9ZHdNjoFOx1yE577qTZCH4Jp+K9TWLarF7zoCrHUbdDiATiEd3erc/Kw5M682CMARCUdFRx+1pUTwsIozVDjoCKGYFM+FRL/hSLfrJFNtNro0ki0EMdWQPO4bHxYnonN5gY+yW9d4N/90HBJo9XqiMtxyPHeLDaY9qswwfyL/J6Y06h8vUOYAtMixlWDNv0QdBACmBuIVSiqdOfH8iECQMMYFMeQrq2KuHYax1JExw8uAeMfq09QkoQHbGtUgtVtKWqGbZU+Ftar23IcGF/PDmIbsBemOIkqPpXYobeFD/8hCrA6jcFE7HmpwTWLwmL0GaQkRqPrm0SEQbejrykn+UQHFywoXF5Qo+8SCiI/nR9hLqh/L0pwhXH+cAF+4Seo8yAlQKa5QQYJhdZuue0DKUJhlvH7a6g5oxCzRGYa8XwJr7nXrbSy+5pS73zzVMde6R8v0EGWfA5DVRhPhB9x6HOWacz3F7scsIgRAA1BcCQJTg54wQzr0ZixJ0JBVnayA7ofWpfjX5LtyxKMFuBwowwd9e6jkUAlX0PbTexTQ+G8oRfITFFRtX5qqDX46o6cH+O2STu7G+8UL9pzRNldpONBwYTsYibu6fn3O24uNOf+OR4tdypa1N+vDmKFpYH/OpKxFNaZhG9l46HmBJVuxPAxJ6tdR5N8pepjhRQq96Dmc71NBL2GNEWspkLbfKlrlSeMNLYKbBK5/oemd6kIpcn+lN0INCmZYLqU79WLSQml0sO78NIBu2D3/FtUndOlvUMI62Rus3Gvw1c4thDNOmb0skxl6TyrBpn4s/k+apCkAa/Bm6bcPlIsLLIwvefBu5nsiQvoam8y27Sgau9UgQT4ia3/75yZHFn/ApQrPhNwocpDpVJ+oa9Kc+4aBTjFmgXiLHvOq452WzJIMIY3ygvQhfZRgvGwv7rWpuvvGxEOUK7tX7Lagh/0U4Hx4Qwsi2UskFkD8nRmcr3yFN8TnQBohpRgRb9PJ/aWhLn3i8tyWXFrAWLFtn2gKHPwaHEC7F6tRPFXkoGUSJoPHZBini7T4CP2JjAM/lDeFkzxdu1jYBrB2zzW1xkO53WHbz1kIyayNWbEnpWPNwVRkXhdR0spCg1cWK9aiMqJTK+Vb3z5ns+0m9YNM0+UJ3YXt/iUv3H5klIOX2hHruG26B1xyIoT0b6aqawQfLOsIFYMP311aVubfeHz72ij2Av25LpmGxaZYukM42+0WmDl94HNmhRnu8Aupcfugov6UTYBYisljeiuifJEMWafNcW1JZeA2pRvE7MVfSbQW6+kEzYFNHsuBeUC9VwvlnIJ43fFhtQaMPCHv4q59D/2vfz0B9Uyp+vRRnZwxCCIWlu51fBAO2d7G9pOmwgakEmW4Dr94IHw7U0nk6a7FI9Sv6AiRW+lAF/q+QlWeCjVzX8EdjZNMY5i/BtFZFENAFoqDG4rOwolRi6H7WqbCw/TuhUp70O9dSUQslKcrz5tyNPtKVOv8wBz4kdyh++Md2/QucQWWyMCyJMg5Xj6sQo5lohx9CFHg1i2rfqAGozpB24yJvcWuj8crNCs/l1pYoRKUkFjDFCg/DM9qbeIXBw3UuZdSoc7vS07Iv4TS02woU55Sj8EnIzHUJNGSvXmjTguQyJUTxR2aU4RXECcSM5NA/zvAoI5i4EXmfzcpixoKUGjRKV0sGXX+T6RnK38gj3RN6fYkcqOc/jioBe34d8+bOTRAxqkJQMUhfJF7DpINtTSp5g3CTj1fg5EZlEx45l91tGspQDjMKy0N15SzJBm718+iG/+ZsjzJ/w2BFPvnfy5XwtrDBKjBeuiQqGPtjHyFMp5oJJynDt/UoniNfutyFs25uXEJvvQO27pVLRNPsR1sUTlPNy/R4xKOGmyebKuTo6GefTNMAqZns/7gp2zdGzHeuoVGFmyht8X4v+lLo05a359ecdzgjprr82nxexV3aOEYM+TgglyHpNaTKU9xEiQPvLgu71nPS/bi7RaaWx3T4A5NquRHW7GCHh2TC5SRRv9fpVBhIRWjZ1GvRd6Eghc21HXBavVIwknuA3RyQYG+vNKcjuYXWAG5lnXDg6oFDoXBMj+xGr/G8ej8mxr/4DKMIAJaufwTLcrtdv/Ex1YHvEnb5WKGsc81iXPgDWxKfzfNGWdT8wBUyHXLJKvv1fj2wep0FdEImVy6lccI1tVZ3pRQnD1l/rPN/tlqUi5HsqThiNv3mBWFUouaT1x+JcsUdBSo+EMfxF6by166DVmg0xXIthNnqwL8n7PZ1h7kBVbrOjuUxu0FCu5oImdA19JipiGBQyf0Mk4CrbOuhRUq2wyL8sqjLxyaugs8KXV9dQRlMEI73bTZsWX6af51SwyANYtlw026zs4vgsI8eaXgj6a1JBYDqbIl4ULK55ATdU33kkOPSwVj2ci+qVn/YWYWcmmzH5NoSLKtzBSgi6MJFP81pBcRaJNEJuqMsZf52shl4zM9Qd1OjzNyTdLFuE/PwkAyxJC4Wv2NYZKpQOfd9urNrcPIuJSDg4/Rh8F1yksQ9GHqKDbEl7YmPww6O54HyWra1dwU25AuulH4gAuShgKjeto+IR3iN5mvGyf+evhU3ZL4ag89n+kKDhPjRUfqyrSv+G7QyA+OXmXUeNevT8rBeikROEIgYqn15WyQXqgiCv6p47deysr5NQoqDJxUDVsn7qNXHAmbwvH0olxPFMWdTFPgAJuSsNHjh92CyYfB0JH+eyI4tZtN1ApIvvlMGld+mb9QR8YAh/qIBCrSNWvl3dVu9HbRBo36Mm0NIKylRryOYDZpZu51dy+WrsagnyPaky/XbdGFq5Lb4QszmpoxyDJW8clId3b7yX8yOk3zvKtajwXRdGfE+00QYTos/QNkfN5WVRpKYjRTVzLuZJlfDXMTNIzzVJNYOOZ7pp51EImmU3WWJ3mjCk5QB9KgHHzKTPgN/QjzxzRIF7xbYhKZ0vkIG+wv6z0/fzDpN7/ybbyM/TRvd0SOUYY7hysLmxKpYLRekPRDZMhsivIp7XGOycmfU79fpZJnR7zrNsvskkQxs1QNRYdBJnNxOI+VK/HGMlH5L9Yy2wreRaMN5nxdHScmCQ5F7nGwiJkGcwVG/ztT2IulxkMO1fODYr7S1tdEAhN4uCI+Pu8n79hmuCy2mFOcbwTkNdaWBLGRTE92TWmXJqFT0g0oqaDy81n0a6v41ssl1tRCZYdyxMtkuX5vdGPX6K5Rk2MAfxilY0vdpCi45qrPKijcHvQNnMx2l9Duh1xL/ArgL5jb9hgNd3tWrSHaBobSJhqfqyJ3HcdLUvjV74wnXlSQlfRIK4nvBRjEI47mmYCDqkzW0T57nq6F80gnRoWjcRdZohQ5EtHu8OTEIArfeCYutzhAnQC3Vw4h18xzYPQK9YBO/O9ChvqVZ0Xd4dzFEhNpdtzLHuXjfI4PEErnkP47zDxqTNyMCQRtv6cO6DT/t8B2DvJ2R06K1DXrE80umLWBrzU8XDffUW6pnMg4lIIoMLpC/gTj6hb3zN8l5s4Rs4xzKC9gfiiWZ65+xCgC2SlSnaVCfvK320iQbvAKJaX5JtPxAOfwGDtTRREoWe6zZZ3Lo8ZGL0hl2cKMNwFLvPQLlakrJ2Vqm1t3fpNk9SCgIQpVehukbEWQ4XrkLJjP0wQhzkUT4bbS65QnO4TY/pXoODgqKX0WFFAOR6HnMc5+MDXhSMeAmRqP1qjoKrfK1Xg8ppLq4LnyYojqTiQ91Rrw774hNbGRdj4vbtEx9+12589qUFSVEKfa1ydsXHh5oUALYfYgnQoaaXflLharZHWNq9PqISDmcdCpaaA6C0W+dFUG7CyvmuKrp0SfuBN0jZ4BD6NbTPg2PTzta1HiGW0aDpBd+KSDj1DhpS2DQLp7MfoBx42W0ERuO5xKQQRb+uZoDSb5wkFnrNhbxNda/xiAxYIMlagfXcPe285U4TeoIWBvJxS39Tm39fpcXuYWLi8ihnbCtOCxjh4gsMNg4xI/oc4UILxM0n1ga8d6S9+BsyMwWjVaP0AUAsupK/OxJk0jIPhhlosYjSiSAiVvP3ZK/pXCpzApxPWXwPo6wX44G8AwesO3PQl0K4DrzIbajJt9QNtAvIEwLDesQpb5ctEqmxs44mAqXMzjwAuOQGdYdEHvgceFBSjZ4Xte2cAC0esRHrSkY8uDJQATe+qW4e9/X78K/AKkrvbrq5xa9fbP3wPvm4W5Hn8vEK0/r+G+Ao5CoYj05Jr+cMLP9+gRc3UE16UGjxwFchNjWFXTMQgDHnLvMcxwR9rbo0cBV+1YVlslVXC2dlWfHrKGbkok0i3M2KobqMsDdJ4sUPTAAZDEMOjL1mQxq7BQS7erQXP8jcNe/H/Wa5SxX6/mB0g4TysKHdRXztO/74Bnt3NNN5Iw+KUjUXmjY85MHbalYFf01Kn/iAWxiaInd2nCE7/aAoJKzztUYJsGNsFkHVQ432x+UsRBDU3JoUyrUt6aOGQRx5OI3uAwyuiEJO2sYnMT5C2RDar8B+fQHkcqXOB8/00TGIOH4syBGDSD7L03hPh9LigtOwvsAA7GT4CsfTC7H9tXAEfnleQo3f/hfBvaAcGdrKS+2+ejMghGfVQDbSx45vO+Wv76rLL+ZYRjz+/E1n/4UJfmlsU0c/tEJhDIBq14BfWOKuhUVck+nM6Q2OyJvWL1Iv6gSoc7P23dkoR76Y5V1AVgk/jLOdMBlPKWUYdSWJRd6MpLVQLBCwYw39bxgGi7gE/va2VlW84KGZ5xTrr/aBO6ULMBi8AxJh2mvmohQBdmDJVSjfn6Qp2yDXxxuiHLk4D7aXLOsK1W8D3j0ikzdpwd/OWX/1DXpyGlrNFfJylWn7i1ej80YHYAgnpYjXQa2chVzb872nfTDKTgdQhl7Uk+c81Xe62rFnrHYQ5bzZT98XDy17/kPve7sE/X00bMS65jtNNwrlDhB3pBHPr5rsj+25us0Q8ebXrUkMcwcPpZXr4ko1NplJC/t/J8/79H2fZDZQYGqfAmr1ooeCT18KYpc8hurrna57a+Uge7K+PxN4b7kqFCVj6BZbME/GMR2YRdcCI1CIWEzoriTpaUZBYIEMpIJEtEucmMeTqrqLIpaZNJyUfqBteFOU4U8FiEixOagLWmPppuW1QtY0835Mcz2hyMPKTzZbFWmBjlqz+LQn1BqlK1DjbfVvAhFft6e4UuK8rWSkX4O2faG1X0Q6T3D7DNA7bv7icitBA0Pnn3HMWfNvL++ZxjESoePnccIEmvE5w9i1maR4XXJFgmpU3rlIk61kEfbjlNmtYCLw5PbY90N7zHEIDtcssAp6VV5Aph152A1SUZR0JjLit/HZq8qy0Fbcsb8r2q7aZBUhNJE2orEts3OzYftwsRZEX0pitUC/qc+50R5OhYWoZuIfFd4a3Pr039ew5vg9dq/dQhTmgCmgS8hcPogj3WPmIAUfLnf/AOumWJrc5LUuIwJ4BXa8ch7CGXPKZljGQjFHcCCg+dC+jwXERjnKtJc7CdS6YM540w581F3cTgu1maS1G1Zg6LaBvriCJaXokd6yN3V+fWGMwNj04XXAqlC7qeLvthiLDzAd/ssUfFrDsKXyKpHPIdoOmBlnugV5zvXxMsPo5qlmRA7HdDIvcZVC7VPrak4yCjA+4irHfRz0QyhKcS2gxZVHSxoER3g+zcwfExiTgEdIF1nHAPkaoJx/wLWJ/YzhPs0TAxBsdRUUPqPemXyMXi+5r/xaxq5XPIyo6WQ6E0m+yWvTWc7yXiAL0Cc7MyYzfs8mNE3JO7v0HE99PxPaMvB5nwl5g7qL0kfkW0j2KL+Uat2zQru3FL7K3qJfCaHwZt8bXZ7M+Zra8aLSLZ3mtRP5JHlFVUmUuwnXzMNu+l0lb2A7qWjPbLy3wQ6wlffYhOYsgNHDeUMjE8qWAMIVtIjgAJyp40GBumVtDiokTgEtQ/eNaP6Gr34x0M1Ca9Y3syJGJ9ahu1NzxVWwZ9RnrhTR78IZUjVLW8NEJ0agvuVah14JtgjFWw3B624DjdaDucIS26QizztYhygfT1Sm4ujXd8BB1Ps9Qoj5MMl2IocvdCnp6p3Uu0zec8qOKH4mTdj7qLlnHmniJ7gi6KR5gDV18xuLTsbbLzeW9ZnH92HctQNYWtABeCOdw2+byJDHKYNscwaP0NygDGbsZJZB6jVQsEbbBYVa2Hp4DpBPrB7eV8pvnoEzBnkGEyzjILTQFicwUl4J0ElDuJW0nyeCl2SXfZqIeecEV3D7m4Vk6zEKfiRF10WhAa1I4jX77Ombw/JyGtbBLrmGod6SYqJN39KtgoB5v3V2CA2XTK12jA31vS8tkFX69SHWllJHP20Y+7evqOL/dqZKFGcjEJWlgJc25JxH/iQcn4ZTR+MzNSyLYwNgSBZU9L5ywar0khK/aIPq+EseCc89thr1lSww/kYlDMmBzerx257lsI8yVRxQFDrAgBf5NTBS63huWELtnwROGG7VsaF6Z/f28Ww4QqyUtdCzeD8Wmc435yM8XNCvSK3pI+8+Hhflgo0YYHvRi3F/+4cFz4F4Z5+vrlHsAH6Uhm2xiBFf0vFVrGqYsi7W0rf/ludsP+H8I6yaiL5LCqyWfTnChlTdUbT+mH+FF/TbGruARbkV8wklIZdJBJ+GO2xvx24jkDPKHEqOtIrjDiJtHCxvISFvbpFF4b0Z7q0TiHrVMOh7FEhVaAX4SVNhbTSB159j4W8F4mthsN9NIA/v+Oy76uhOl0jx2053GUOYPnc4S8RTZa24XBXKJXrTkARt60D5piK/0cjEwt7v6kNrx50jb3+La0RGAMs/HeWVtwFOFL4/4nn53I2zfe65KnkaWCqcXeLpSIleNNoKqzoN0XDpCoW3AbZ+0cY+Wj/uTuzz75ps97umzBnxwyGT6OGQXgbJG1PZafXiLkJqYizpoRBnHGfU1HYeja3HYdOA4TrVKizCSMUDlE+JdlGoXl9UbhxD+u0X1l86+BaLxsCnjULQLrvT7iM+7j//nsdHHWCbOu5gp6viiyDUy+VNfiE/3NLIx3p1bYSQawGyfqzgbUv22PvfprFK+VYUw1Z2tqjE1gu4G02x1iQgPHgFqbvIINP9qnN4AGQ2XHV27wAnZGU5p6BlUNFWavjLmgC8wgTDxLOVUGghjhxDKbLld5zHywbLf4+v1uJrOBgsHM+U4BAqzZMUyzWrod8nkjvxdHIS0+vIKItFz25ftyM77ePT9NjJ93Qnl5JDL/M8y6T+d5ZgzSIT6q99Q+ZhdcIWF5juXYaUxlwE1OxHZlpMwE24jbIwiDSM9B1ORk0pSX/XH289X62mV17LFfUDPpTAXPiD/1uoqRqP55RACQX4IY2G6j7vRKOq2foZB4dna5T7DbJiDoUzQRi9HzPx5lVUX4DCTXc3nDrAZVvFSv2ySscsPFy2ox4jPK9Y+7d7+/itRmYOEaTXrk8Jf/wroLdZeHlejxrfNHTI2pgVS8Qpx/UGTSPPxF3WfkVlgd0ozmp72SpTs3V/wTyeoPUJzBQQ9g/SOkyGbgHu+r9AgfaqXBnZa4Z3qQvCaQTynLWEPfX1cFYRGv7em6HB9lRPVeslBr0KaMuyYIv6CDYGECuq+3pjqgItWXBtv4aRgnYNqzAzp45h0M5pvpuruzdxnSSHDDVe5Viw7ZQof3OK2zxId3JVhnVlo0DLcrDhWBD0SaWAEAXq375TucoYrtKlQGUtkabjADIP2ijmP5itvkHELGuQ1DGM8hfD9NrYGiX6zr2eXkxUTuUS3H1sJmF81y424okPaRcVNqLKya3DLZOZ0YpRwfJoUHzbVH87s9H6eXnuNXyGd1d0qyG60DoJBcfhgH35ETx4cpqABpKgzn5lP0InDLOTwupHKr3sFXQ+L7rTiT/36sfOfX/fjkOaj2Mxspx77qeOCamkEvNbX8Ts7iPCInM4UWjQZjLp62hD31cgbVt/lhtnnjVViok5pvoDVPr01BxKnTByWUHwR8iK9regRE9AL4XQ0jwNIaVDBwXMUoYurb75WCxOHuBSYf/WZTPNI3Ds8eIQYJThR2tWzXJd3DdeXJU1r0IjcTLxhmshsqgdvediGASKOor02+wChGGMr9vAYbx9EOpTX2bDv86t8BCEaMLjMXTJS8cY+W+juwmgN5Co4wOg3vktUC9fJcjHSKbCpXcZMQehUWY/Lu/0V5jovhv4C1w4EJee3dkVcEss9jNjT6crkO8q0/hto4cAHxJ2Cufa/+3se9nANo0wUjH6h1x9S7uqj0kkYcIi+pezZU8JCmUeWido37udphWNAzDP+YOsm2z6nICVJlkjvtrVITqqeQ71Dv25/2438uBMQN0E5k9NEuYbcwdbU4Yogik/2enDhqiVO/A61BU1WOEXRjjYt2SeiHj7ApLjkBPy9OwCrcoWhYzXjN+m0VrqSqh3qqMpahwtv+XKxap+Y1EjKVUbyXKrS7/V11fN/u1Mn4HUqK9mLkDnwQP78yHkT4r+4+1fr9Dqk2iwaeK6S1f1opfA3nn2LeuW5vLhDcCW4Ypa1Z2fXwlCoqBfE7INBmd6Y8CrjBzb7+e0wyT7t7d7X1SbGxSrb8dkmGgWTmToN0rydQAO/0Hk8DEUJ4yA7oR+qA4oeFcei+lXPN9xBAG635NTSBE7w//tsnstAAYxeosEUT9W3WOQiANUgu4H/VZNLb23K3fxy41EJuyWuAcGJPTbZBH4FwP3i+I9R6Zy2TEOoS1M1t9VeWZ4riqDr4wENQi54XU68fjna/rMXHQuKbw5jpI+FAKnm1/oNrNkpVRPrgdwBJxIDP+zBfaDk5jXA34DqCET9IQSipzoUHlB7AyqB8Lr8lNIiv2NqLU7Dj61MHzSXNrTkGhMIg4euRuOv2sdwFW9OBbEFbe+V6PPtJwlflBgUna7x/Ko0NjoDNJpcJFHUQ/qSEVhVqZNDcnz9dXY1CvQtu+H+vkCWPW3lv35LfOg8+NlWOl0dBz+KxMA3APTwBmniTJmY44BfQVYMw58gQgCDJwbqK3QZFRtar95sb6U1+k+Eda909+1+dMnxjofxTU4O8aX6YG3Qo3N89dsK2WVTEqA1DcOxJ2eEM6/+vc4gKGp+7AhsATXtPyJUpu2Dfd8kNwxfi2UI+G/qNfH1X/PD7FgazUDLRPxrSIyD9/W+ui/R5KpB4IJj9R7+vb4JnNIJyK91fa8G+BR4L2Lf+bpgwVaehJvt+pZenP5NzIfRiAI9mnSWzrq/z3LNyV46psRC+A56PexaSoImnaX5KC6ls6/kKedz9d/UWN4IW9LN6tGh1QsNoma6Z/TIUl8vNRuqCEpcGu0Snr2uzoflhuTwmm3NJrOb/+3x5v9oZNEZmfzZ01s+RYVf4OrZECvE+1aJxeTiLXAskIt4+bxfKvDmw1xxU7RxCiCQ0wFc5vifak/rMeeQ8mSYA3lb/Nw/fhHW246kkCautyncDsGRk8FwwFwJRMqo+YBohG24buBOZuAg3xHP1PKSv1S3p5eVADGMbLH+U2jF/VBpnbHv+/vBAPDK3ypTavf/xBupTBg9Xk0ql3kLROYNE9adSqDZZvcQWkpmxTuceEI1lCIMsBSrIeyvwl51LYCbhP6z38kurYLE16lw0iTkSPsmhU7UZMP2+8ufPF8L+NRkiQq3Cas86AFeXs+efGm5VdmneGq9ps2Wqh8mrgYjECAxpEsSQXvPi607KHY1yruVKtkhtjuKSfI9aEm5z6CHAf8WKTfOl+mhqyKcRkeWFxfMB96elPTWzjsMueJcf4IH2NFvWNoC5UjudgJV1qeCYpaRv0VI01mfRzjzivg2iIh4nN8C94v0/hTyvXo9MlWGOTRVp8ZhEpqDw57g/XaMM9VvZOAvT7NCllpqiSKVBijz6cH6gITvSeqnrTD8F6GWByvkSkEL1jomIMykAA6/p4f+NNqT9f2M0xTpqsj3upkfVpleS1VzHgk7UAEpHS9Ym6vSUpPnSKTrsbt+OfJk1hb2n2Ca+74jcUdwxTq/uz8ywEK+TLUV/kQ/+UoG79bKprH3J/iPHdRrDIf+J4QjsPCNx75+lJoi05EGA2NF+GFi/U9zcmhjL1SsbY/HgjSkqw9Gz9uyh4g+Q623wYFjn+r51M4yqrkGJere41SoBpby94pstzE7R4a7FbIFlcPi5Xqs3BBkosVpk0Jf8N0O7ji2LBQximtMebnHA+TP7JDlpm6cCekZny/u135MdXkjmHiUI16hhHmYK/f5qvM8/BtCTYOhoxUlvUhPeBz8KJ+6B/sMv/T3pVtuYks26/pte59KC/m4REEQhOSQKDpxYt5nme+/maCqtquqj52d7t6ONdlP0gIEhSxM2LHzgBpMq6dg7kq5VawphbXytLB2vNJMW5c787q9fzfiEsL9tgdDpGQishmUUvhcVzhp3sj9T0DKL99PtOUtcKW0lq8o8fEGPaHtdHBWCgACgicCkpGbUgx1ADEep8yTXkwYl/TA3TlmX5xk1LuGsEsjy52tOmLDIncSfcSlc4m6oPRUuOalRwj7tz5kikJzihvW7CaqtiABEMZFDaoXWPSPsUnh9hp61y/GlQL71PdSrtLYxAFSLzVxd05G220L0h724Z9wF9cAc3U0DZnlB88O9KUtYCdV9HGUuxxm7BZDmc84FeEaOABJfYHcxnYK7WBtw4QNZgJl6qFrxOmYlj5kpwAc4QJUMKSotl5cLpXU9W74wJtw3BcpaFck5FCT8qGSzuXvEBhQg8PHDmiEajn6AUy3Sl2PuSAHGsgOYSic292VFduFEPXj2J93HbopO4IIHaCMMad1g2a6ivqgq7voSBR7baYfsJGv3ULcfblAG8s1p0QRN8E86j2emAk7tq0pn4eKu28RQAiizPa3VFzssbSWE6WXmSGvI26ps9bkcaQdgkSXWgRMpkldtgNDah3q1p2Qa45RUJVGatzz+HwxqxSL+RuO7UZcY9KBERWjl94l4A9EVvUg9wRLtwMoZp1Cxu9une5kA+4QvfadVu5p1sn5O5qKaRGR+QSt5M8nZjp4toGtazoKTJaKAjWeaUL4vKeN6/cfbcXp+IV7fVFeTeP7SgR97i3FxvaOeoosyJgU1Y2bIt8aSpcqayiwyyUyT4wpbLeNTdjvSP3Lijx1ud4o6rVLR3beuFRFVItNrpCUvI9BIwgGw3A8/MCm0S+ajFyl+tiLraF7ArIwIKTk+30MB1YVkB5vy1OfFtHJ3cJygI8ixEHE5DBI7dluDOMSxqcSVu8Djtt5ByV5tnzlO25ajHRCqHftu6a5v2tzQHuNJEekjmiS/NUbkv/IAZ0vK2LNjw3Mte6xRCuC5WBzyz2B5AN9+Hm2BwH0p45BMgaFcy5HLUx06xM4E/ZEH1z5aYnWayQhg89Bql3xXmVtm40sqx28koMvXlqEjc8hog6CzlpsD/ZabGf9UbOzY7wmytOYVCUBeoM9kDggg9QvMM8Nq6n51Eqm74qex02LERtLjmpfAah/3Y1NU1L8PDqakFwwXN6S2yIYJ6eK26ASULBtYwA89FKdPLQIDjLtUfqFsuxeaLHYAMf7rxESZBur85Nrg3PX4lNjyKOw0ZOm0SgGNXva+Q4ezzgNoPOCVd1Y9eHFLIE8wTV/8hFvVuG70dXW1TxbtjLqQ0HXo2hnxv62WvVhMPkE0Hbuo8gVJXW1FU7VM4umxg73+3hvJTCzPfEnN0vAD2/slh2rXj0CO+PAQ5zMGAB0YeLxRMzhGvzDSBemHUi6ZVat71K0ZlSsdpcV9ccedQUzhgQtENujgYXxgJk9NK1Gy5ymrbPiFMGPHzOA5RfK2I9bPmp6f+y8fZ8n1C5rqkJeTSQW1oTjKpOwx6BYcGlRvF43pORI5FOfi5r/EAK7E24tVMYiQ/5NM508xiA3VJCClTAAu4qLA+QsEy6CStrlJblOLHjJi4sLqaqJTX6MM1xG5z3HNsiuQLjbpa1RlMjVtZNRFcFOIvat5HpiBxtXcBXOaWadmItNDuEFrZZ6taUsNYgkBzBzCI2G+fkaoi/6Lfh9a7EvOwe6MOtLhop9k/i1TudxMFxLmi+vVbxcouI5j5v9pnfY01yVpK4GHlDn+tAwL37IeJS87SxbCjFaEm8WuLqMRcVvIK6t0w0gJafFZPIjV042jqyJraunSNK4hMGq3GhFFLwJk/FkmoRU/1u+nV3bwntuoApYHmgNuxm3NBGPAK6VPmsnO0GPzLMquClsNvr+L4+Xi9UCXLZuDXLg4pgVHjs2H3W2avcP26raRLwGU/LDiAI68DcacVgpwh1YaPAczDYki+xgBTJoA4z1VPYCUNfrFVGc/zgdjlZnnZiTKVnrcRokgaZ0+k+n+JoKNNZFHFtyg4Xy99YRnNm5C2xInujoDVOkK+suzKQrOTlUAb030p4PMR8ge3gDyL4wzTWBuzXY5ws1h3mpEuRbcCxDhmExE3HpDocSYvG2Wvg5608YmSxDORO8oiBEOnztSfttC2i/vaiWwmbMOb4UOzXdM/uS9GuFRLTsX673benDajwC80+a0R/KKHQOAbMqWT49WplmEmTdii5yu1jo+xX4jx9JG1BcyanSFt7X15rstUFPYikTrEFqDeI2gVDwPSjCsLA4vEC6Iy3hKpaWPiu2rNufGtsne0Daq7z1nydAHYs7QUijev5N8xxVFvjC9ZrhOIydsi9KQfmmp0kfa9dsp0Wcgc6Nn27DrMVJY/U1uALZ509uMdS7rMp8G43SHEM5fyy0fPSsGs5WTRLmMGPGNPeMizGW5ESHGtxXOGNDXx7Olmktsj1EVylCK+yRwJjvspsYcG0IZuJigk7uGrjeDRqMMc7mL2soSrW7cg5aGMG1ICf611bDGtOnNqr42opWQNCgUvBItjvX8TBdh5V5AgwDxc3b7+XjLwkUsKIIqtuYFOLIXC2RiZNuPTou77cKxhS1sAOyrrLknEl01VUeBK9L5cH5jxHIM67TvqspHb3UGVt6dYIZ1gabAaBXjtjQm8T/6zr+34dHQWGqPEVo3SBub0ZTrTlrCnu7JrG3qznVQBl5/lwFUBuB3/6WQvC3uWlwzfo7gRnuOJ6TFUsbmkMTBBdoFnK650XohOuEFSNV0F7RYsDU8jq/TatEXkbcVpX0ONx9OxqEVbD4XyybZGnnDwwjJa2pVDkW6gB1rcYRUo5ve9XrROcto+MMBecm9BKreMJ5bp12baecrwJKi84Zqqq46BSN2yN5foNSh7cbToK/ngqt/S5EFVw97q87DL8GJ5lwAaXt8tt1LQ67Xdp2ee7O3KuLdFS53oq5Aqe7zjYEuEcvfMq7+5CRtLafdguGe7sH9u6PsbufZfjg4PmJy3ezwfeViRtcIshOdFb8xgiYzyA+lB0MQ71pVCuDyt1Z7OHcoNd1SLH/X458x2Zg8rXugG8m9ketS7REXRnDyua07sKTS7pjCFkBSwDkMX4M/BH7nwEgfgS50cv55iEZPk8nfujlEXSIZJEF7HKblgNq0XI4ER9vs5OQptRKTuJ41oT9nkeQge7XXRxH4p4ZIzupSdxnJ2V395gm/GkB0Vp9kro2nG9T7ItQl2RLqc26MKqT9lwYPeXyxakKtOMbolip1TdIdvNVarX1VmdvChwJYTq2utWGJVlm0mQOx/izThorCQQIh5wcR7VDC+0drxYqbzLmmdFup2tOScIV69SCK+LR0smEuegUTtAk+3l4SpulIXcXYwxsmtfH5vxKCiSosxJastI4UrnxFNW98WxTVzWCiPgh+KIcMT9AjjAQZCLXd4BA+6FrnzOQdy25mB898CX2t0l50iL9d26kaiGo2pywRJflZBE8FiCLG9jWO3bckQEgEPpRMn+9NiA6W9x8SamqO7C8ou8w17O9+EYdPLRkXLmppyANVbhcosdfW1TqeblmCHjfjQuvFQL3vZRV4gd4AeFSLfTL8DDJecyqsmTQWxlAz6lgIf1FLa81dcM8vhFkpnGOeZT1bt7zAE7Urif+oCrHmjfVPyNMGVG4eJTxIVbLcRCq0tqg6H3s1Yb6mbVTT8mKbAEdgC+JI0FOz3hrJj6x22Yt60z4oKKcPROLqj0oE8rEheISFPntW3IE+E119vwgCLdcVwknjtxtqLVjU3H9WsNuLIhbhqb9lq69UGghMxLyh3ikkTx7Yqy8jXZBkUuMyaoBNwBsrepcREG2CI2g7iyeGVKLNNKEceP2ihgKMh8W73IRv0o8MbKD+hLu8hNJU4PeieHzcX0A2WdEZNplS0suEXuUl2XKxeqpSUJPIJH+9rFF1uA3AO/STNcuAaVu7qj25W6sYjFrIyqCzh/j4LppSg463gSQ13x9HZl9vA4NT1cuTpMWnr9WAWdJJv1pNjelbKPgFmwuDxcIE/er4swUuanKKKW4Ddk7LpB4C7vhoccHpiSoE156Xy/aQhbF/xa0FVYrCa66N1c9qIQqU0E6YBn6bO6yh0rWKNxirMvBIJpd9vbfqOgumSt7gIpWs4a1DmLVBpM3aoCL0QD6cDN+GCgMAvi4p1q1hBkdWspzS6JI0R1xGSDyrwAV/AAHY0QTmfFSp+z05oL11B72OSkE0Wb8lRUIR1WPuLtjqJC06oGIqmsEeM636j5fUYj35mTvLhI9vmdpPbknTc0uTeiXglMMHNV4rTiaGo3NsFVvaCd2s8mFTz4SCTwIgeM0zll7UhRwdC5Jfx2Cvh2PXaK0CGSAGgaGADs51U3g0egOOuNAMk7FhllKIwQ6+QYB2Z65mAPR29mrlFeNqceRO3H+Ti3g9fJIybVFRkdk/VZ9RGnWxXdzQGJDBsyH0WifSgH/sFRkEf+7wCr4Lkl3hTuVcPD1Ths75ZlUgd0F9YXQeg75LAy3CBGlLYiLGUO2WvBg7yBK2vyfkFoza7Pt6I1hFhZH/EtFlzNAYq6IUF51NrGF9x5Bgo3f7UDPcAli5HpFkXCVuuMWUJ+zldiGnr7PMEFEeQXBw16da5uvT03oeXmHlXfKynr2qCUKbsjTxLEYaum9w0gD8JaENc5P8d3RYKXyPOXzfWyt49OxRVlv48loUQia686I++T1T5vB3twKAP1TCm/ndeP7gNO8nYeiNjyWmn3qHs6k01Z7Ddh23QrGdgTEOMDlZtBXjJOjNDba8/M83ZaMxA4eZEZgDkBLn/WrzW7tk/m1g15jZCXTdWvVpJDuIlOCDK1w1cP3TTiApjnuxS2puFn+7iShiIbAg0wvNvmuDhRK1vBj3ppX7zZKhlvTWeLtgWjXeHiAC2ui6Ldq6m7M4Z15zgr4WbuT9n1kLnS3d60523YL+ejuRUCBSfOD+/ovcPYChQcRYhLgkbcDu5CkQ4XT1FT9cG954NE3p09OD1I717mnZksaxj+jAKHWUbms5D3aYKefiAqY8Bpz7vr/QjVSK+bT3yDVKxb3PqiY2sXJKF4J5gm0s13JvfSvXJPAlyb4R/eP/JTNNTkS6gdYeuhbDpr2CRT2JnXn5y9mPrd9socp9sxoAq782UqF+ATXObpIa9nf271x/MUyqwx9JiU4ipPebfPOsU5cJXfJaR1VVEce5nHnDTNY0FjgvhaEm66ZKWLFbG3Cia3hXT1uwM2FvCkR0AmPXoMkkek4m58BzAo8dTQtSV1F5eqDXwh4wtLj3bQgPD7dol9G1KoIL3ERtkQYES9cBt34EnzIo7lqS+yU+NWPI0IAioeDsyFaDZ3YZlcxUP1UABAkaU0HM8t+li7SgKWllpe9g3SpEOeuN7aXtWZACrYWiBBWbwUlYc/4IqDwlXUPYo35mkoNkQBA6uawh5RTwXRCsOClDfPFn0uVHHNPPKG3AnQqtW2399VmhqoblHVICgHusQnympaOwqhRqIO8FcIz/yzWiVzzIRcJiaNa1RM64FNdJ8efrcIR7E5DLrUYJ7K3BO92Jm77DEvN94OhmORl0A2DPM0Tsl+aTHlTRZlMN+BUeirfaMvDmpMWi3g3XdvOkB0LvuzZOH7gtK3AogdNdcx/a6sAB8pc9hAsR8MhViS7vqhqXf7DjYHCQG9TTHEBuGKPXueafWs2HhnMNGFsGQgR9asYC8i1sN33RHaJFiGbRlg6RV3z/5ypNe2Nj3lii8N3m2zdFpALMAUBll4Po43YIHnrYPL9ZkDn/X1oU1UEEsYwGA4xXR2wt2UCo+4Ta0BWKzbRffI+7y3U6YJnR3rXFE0tik7Amr+g3TrxAODIZIdxSNCyJ5MQtKye56Py0yD85HTl7trIcFFmPOqMQt5lcL7ae7CGvCJMd102C1tVjl5UEYQRc6PKHKAXbygjhR41yyoAK6n68V1BW9B61YuIJTBhVjZeo0iOBIiUUquH2cVFQmuDnPLWEFA+clWXZO1O0ql4nLjJl7NSAscu4WhAHnJ4VTACEJZD4ViLawhjwVA6l1YGdWXAvbq6CtMBqaGzZbEQC1bx9fsPVzJ4GZFU1xGsKOJM7jr1d1BTmMH4hltk0wDJWSzuS3m7sDiDBvSqpAYt/kZK/TDo7NCXM3HM/ujnmpXhM2zAfAjuDaw4kHuEPshWgpycszQSsqfV8KVgzrZaM0J09rwTSJ82oC//7dMGY6IBIAqMMLYM/epnxHeE5cZc8YUThNn4Rcidq0L6q6f80vpnxSCgNjdC2s6TohbtIAFDP+wrMzBi1wsQHQr8rQGAXpxWHgM62IbE/7m5tITS2aeT4+VIzD+IjR0J0iP9+3J5bc34jGUwENUvvxJaUWE1ctb73Cb7PH8Z0zrRM9/fP71sdHH7CtOD5rw2LXgM3I1iJE/1+kbVSfFMtp4nvcLLkz/QeQHVkdyo3TSGm7BsFG8te4udjd6nzc70VplMfqEzvu1Tlk7/bzftAkXf8EXSS85WeLU8CGqyONTgn0cMjzeo9j8vgtAEfgYAUHmbb4TeP7j9Dj1CWHmzUY1b/JeRp8e2TidE65L9Qsnjp8vYXqNIYE9H1P4pDjwmr3abS7Z59Z0mvb+hD+uoTXixnmM3QS2M2+t6iF+bAVfElwO79cJGF9AwcuqLrPIWWRxVoItaZaCPXnAAuNXm4w48FLw1gL2dMB2HpossIyYe3yQBLYNT8N3flA7p9yw4Dm7EuAQh0k4tR34FRA4fJbWSyMJYmjDZVACOoEsMtv5QLeRzy55uI3EkLduw95xG4F8lM/wNz57467KN3L4MkgMz/nab6/NX2f5F1t3hunEx6wK6iCDn5qgTsoSsEMMP+ANK/Imlzw7+RcMd6c/sMt0Mq7KHat++Mt4fuMGPXQi/7gewa/rHNiFmzUBy06xT4GVpW4AnF1+sjJ4e5htTM+Wh9uhrG6UZdZVT0ZqPzVpAK63MuInw4LXWT3BnZ4qB4ZFDDbFLZOsdJ5QjPmUp94rXIJLFin47zWgdo0V2DOk0iqLPxJVFEp/Ir/CFc68xRXzDqyYD4MV8W1Y/Tob0d+Yr18A7aNshyIU8sp43z0pXzb+ePORb8xnZ5YHbz/7GUr/vOM+LppS/xLY08w/EfX0G+tVfpY/uXEGgPsT+X/edx8HfOZfAnyK+IMM7ANhz76xnWFZTuyURu3YT79gVAyBDvtyKQ++AnSigUzlyXYqCNifM+PPOvfD5sXzwP/4ecFi6B8z3cfNCwJ9Y7vKsZoyqIefkP/Tfvs4yL9VAd64619XUeLfrCi7fMyyBITk1omz3Cm/rCKnsn+JItg/uIhkyVexk2TeAInC3uLoeduPx9F/ozLxbRx58ZD72ZNbOvAOOwo2QQM0uVmZGKnlPFlxZkVP4HyAGfxzwURR1NdgotC3YGLeAdNHSZPEd+gR/zowod8EU5BVTyj6NGEKvodP2F1ONdVvYgeZ/v5G7DDP+vRLIHpRt75ED/kOesiPQs9bOeYNer4Jhm/C6Q0T+SaF+dJdz+hNeg9QE/+TMTal86myyiCvP4OD4LF5FsChxRacoXpcy7fc/AaOH+V3FGFfOZ56S2WId9xOUp+wD/P8jxZ0XrlvuVxyIvLG11869sNi9D+uECbe6j+eU9dB6j1VtQG+l/2T9/9Z730c7f8OBej/S4YtAUdz6t/Isc+Q/LvS6+u1ovfC7F+bXd/qX8+aFzTSVxCiiiZ7/uCpCkZn8hYCDNX/+uGzTlY7lp8Cq3vDNAZASfU8MLjQeex5zw+IK+VsuQ8KK/Cj0+NykY+CCgD211EGw9+JMvg7UYb6qChDvqen/QisxI5RpiDTgB1Kp8qa0nJ+guV3guWVEkX+7WB5KyD+GLBUTZ5nJRwA5IQcuBLS6Z9g+RORBWff4y/sXwmW/0bZEvkmf0mM/CnOLGNaTwLUI7Cf5i+MLWGL2VI28s9Pn3ePPT6f4B6fJ3x8RvAvGM5v1y/zJ5eHX7G3NdDLN/1mKfqwbey4v06tD2jSQrHXFInA3pEg2HdIEvtRJIn8DjXUSW0O9jX9GjK+LDy/Ydw3DmSnv1cRAf19Rndsz/mPJv/CnOQ7qeF5W+nEAH/tl2O9b+PHGY5Q6PgP4ealD+95jDnjPw771VVvRkKxb40EClVQtL4ZaXL6yxf/Ezj41zRWvTSXPYf4d+qMv1hgIN/qeJ1jgqj8s0X1B/jtw6QF8jtEuPekTZCu7SkaQlPaRuW/2PX3uPXLXBa/yuNvfPybif59fpA1dRyk4Oxp+kV6f6MDv5J2M9cNLAfk89Ry8rr69Izhv09UQHGU+Ao0BEK+kzHJdwjdc/X/41HzVk38ezImDh/QUg7XL9/coK8/0eTze6F/OH9+Nzze/T5/zannP5mE/UelZeJVpCFfB/7vzcqvB6Jeh6KPTsrfoX3+JGffS87w1977w+TszUgfjYOPUjLB9ZaZ3Vh1Bi35VwkNH8xgvlYa5vfvQfyjkhbxG0Hjy+VG4j/g/odnrGei9cPhY9gtbB6xf0LnR0GHQb+JHZr6S7Hzntb5DfnqO5SlP8hr3xekKseJ4szLHkrUtBPUoOBDo5LAKrMqc+un1Kmf4F5PFI6KS5EmCO7pyyMfgtNHuRanXi+QYc+daN9aIMM+YcSfdy/7mQoKRbDEm9KY/JrWw+j49B1txK+rBDNP0k+VU7agVvhcG1X09Yx+t/T4WqvD30PER9kdw1+rbgTytmmMfK//4wdobu8a/b9REf72irabZfZ0Y6RdBmn01Gbx0+TOJQnvMV6ib0TfZ7gQHzkr6VeLS++IEuQ7DYXkD2go1AjlsmOsi846NmLKn7fUnXl3Qv7OVE2+l6pf7vZ43rA/aOJ0wEvqNn/dFxH7x0IU+IIYctxLv2CLaWxwfWBaILAned5Ul0ZaPRyDIeYLwMCb/wGhN//febfKiZ3pNljwOkvj4dO/izP8Bq6+F5LfyxUZ5B1NDH0v4f8BUWxqIIGg+LU0gQFdBtQH7vF/ \ No newline at end of file diff --git a/docs/getting-started/1.0/_images/images/devonfw-org-old.png b/docs/getting-started/1.0/_images/images/devonfw-org-old.png new file mode 100644 index 00000000..9491742a Binary files /dev/null and b/docs/getting-started/1.0/_images/images/devonfw-org-old.png differ diff --git a/docs/getting-started/1.0/_images/images/further-info/devonfw-org-old.drawio b/docs/getting-started/1.0/_images/images/further-info/devonfw-org-old.drawio new file mode 100644 index 00000000..7082aecd --- /dev/null +++ b/docs/getting-started/1.0/_images/images/further-info/devonfw-org-old.drawio @@ -0,0 +1 @@ +7LzZkrPMki34NHW5jzEPl8wgZsQkbsqYQcwz6OmbUOZfe6q2U6erdveutsrPLFMKUADuy5cv9wh9/4Jy3SnN8VjpQ5a3/4JA2fkvKP8vCIKgMHT/ASPXzwhJoT8D5VxnP0Pwnwee9Sf/Hfz9XLnVWb781YnrMLRrPf71YDr0fZ6ufzUWz/Nw/PVpxdD+9VXHuMz/buCZxu3fjwZ1tlY/oxRC/nlczuuy+uPKMEH/HOniP07+fZKlirPh+IshVPgXlJuHYf151Z1c3gLj/WGXj/DaC60tHt45bpqQykML/+lnMvH/5CP/9ghz3q//j6eeKly4WDeTtUcw/Oue5Nse/X4E2uN2+7XX77Ou1x8GnIetz3IwCfwvKHtU9Zo/xzgFR48bMvdYtXbt7+GibltuaIf5+1m0wME/MD70qxh3dQtQpG1pncX3VbihXwZwHTae01/ogLOXdR6a/C/mgb4/v/P8xfjPzz3+H7TQryX3fF7z8y/w8WsxKR+6fJ2v+5Tfo9ivgX7BD6O/748/Q+lGzO9g9Rc4wv8YjH/xW/7b3H/20f3i103/By6D/x2XEe36a5y/8h0xbcMfB/60fO3L3Ccg2Hj++eD9qgR/s3wf+uK4j8/5OCz1Osz1Hbi/c9+3+jP9z8l/h5LbnutfQ+GvndgPff43+Pgditu67O+36e26/B5ngXfqO4CZ3wNdnWVfkPx72PszOqG/hZlYzz8gy/LfQ8/f+4X+6eDyD0ML8l8c4P8oy+HQX5sOR/4d0yH/juX+bfC/3HTofxPToX+DOpT4/9x0+H+x6X645I/8/Y80JvI3OPy39//fGZP4b2tM4p/OlvC/lz3/exjzb8P8n8Ga/10p8p8gu8DY39muu/60VnH9p2WN57+z4z+9wPqn8x32D3PdfzUj/7+VzP4ZYE/+ne3eWzfewM//NG35PfY/wP/Peu8fB3zqfw/8pYpH8LLuvo2Zv/Tb35p/Hca/GNXiJG8tUHjWAziaDOs6dPcJLTjAxmlTfl3yl+2F7899yvdizDL+NJCAv+I/3hT1CZzI/t4PX60r6DwxwBKImGY99L/q9C5869vZ8/9K7ysiYhavt39FMH4XwOK25PPyp7jP5qHO/tT+qR3ath6H8U/ghD/dqGnuk5Cbz0Vwh+OfYIT6X2Nf/n07RCBEUSD+A+2QfxikEPJ/4X8NKhz/O1AR+N9j6o+x/3pM0f9NyfSfoMz6Y+K/sF06JHWZ9//Dov9pt/3DWPQPgP83rEEw6p+uBkH+vtewDH08/+mPzmUy/1WHE3v/aWy3sv6fEPnPN5D+cSHy30VhE/98OeHvBXad/Y+q/k977B+H9f8/imrsfyuq1zyt+vuq5fUnsKIswmCB7/7s7f1/TbY2+dcbtfG/3mLm98UMXPHz+vurjcf7UcHr/3u9fattXuD/A3r7+xx/tsbfGOIfBUUK+2so0n8PRRL5eyT+MfZfj8T/LlL8b2XIPwHton8vxbv5T2mVp03+P828/7zn/mH0i/4HVgT+ln7/K9j13+ftf59h/4qE/+DcL5+izM9bRAQEiHC1z5rOAalSOTD3j/H0KsErGYZlsfutCHHM6/7LCVQxfU9gpYx1PYFhNCl/eSdLH+Aw+36KD+N+Id9HmDerMIwFg/esdc8V5AvDOIp1H6Lw+72UEsT1VC4wnSa1vA2zbCTRdfRknw6klCov4KZbjvfx4YMY5Yt/HJksXpHzKF/621CvRk377Fkrte51wIuHzrOHDpUfiddfD+QVip9SYvnyOBghL1p8so8XZiww27zswm/CiH3x8YuZt5gbWOF9z8AE6rs13DpiXpJT6qVJbtQIrv8u5YuNHlcZfwyMfzlPkf00PCI8uUQ7UW4vH/UDkl8uxC/KxPSleeyKFY4v/uWydJly4iKdHP6YbT66scdyaQizUfvoci9+lJHEloPJvZrxPuRNFZsG/vXs2m3BHl3Yi7TXiOBI8FBjprW11KFenKB4ZfPQD5p/Qo3qlunAiAxlw1vGpFZ51SPM1pzIpF3rSJH7uCRH7xTqNgAmMJh1P6tXvqT2TSl6ZOTs8BLN8tDvYVF3Xt0k8cxxm5T9jt1hzd7+183dk+iSSAPgXoYLWSUI9fsFVt9vN6WUJmxFnrc7icFrBdt3sHBDMzYsU5j2JEzTh6hclicSddlymIgtyQ32wl3PMKAhjg2hGT+Gc0jI9TIT8R11UReQ8SsN0zsaxdCSYJTSl9k7MjTq6H4rHDrMlwx9TXQXbfn9gxFr4H1v7ueH1V5nmP75vQ5V1J/fMVDHzazNMy9n8idEfaddDxnPvujweTyE8BnVxqUW9PsuFVnKlK6+eG98axEWFRT4574ni/6UlpZccU/ZUXGdIZV/J+bY5xfnrCRr6EzG/emFK63E2JLEwj0Z3KJRmE8dNo/StHtog28cU4S02hkoEmAc2DYDX2669A2Jk1K0ebAXubzToCqibsAjYQmx19OzboruEYua7g98yCKdgRQSmDfcBX2g5gayxOQkbFeHTt02tcb7oRMPrbPr8vW90XbQGOZghUpFW7kdC6RhPtka2u9m2B/Je/XNbB7L2GSGp6KfDTawjPoZ3PIlPNcV6+pefsWtA/MP8mmNJzEmu0D+2JbXuZcgMxq39JF/Ew3bVTwbQ/dNpD1dBfljfJxXpteUwTz5xvXuM/BpTEk1Sarr9dwgN9TvcBCfIK7r75QK+/ha9ZF86Og1gTnxRHS9AfRRX5NL6Jg63KPLoVvsUJ1IVuzyqJBnJnR7JCsVw/xg4zHI4KlfbJcCCalxazefxsd/7ofzUot7isbJ+ZqJoex4pctnq117TJDnKf5iyzQ47J7LVe5TUd84PhPE0rMXGlXDj1JYYof+8jXWR/lg5RXSeMPGLwqFWDgYnemMOdOPZZjpaRY7T29RRpBLskQqD+eeOBFJR+e4uzOe+vdjbyZn2YM5RjcU+cLllkqZ5mBrzjdbnIsS5XxZnrfP9RyuiThmp1xRfmzGnF+bqa/PC03XInlWPTNOhflQcrapKJ17DifidYCT6gIjTUH4vVNdsUo7ZXQ23PzbjGczTcmDL+b0UThlQ+lawTtrf2r9jIAPnyf787ly/yYN1p0kehnLohcNbVOBtzb5uVOv2GVuvc9aXEP3p9uPk11FognzKGN/ccNxOFoyKeMVKxzZ5KJLsn0oA9bjV7/DEfShFs/5eTb+sNMXz7DvXEbJ6EkG97yPaUYk3hpe2lOvDh2jLuEDN531pr5uvRPk5+c+bbkvn6zN8jlS3YeEsPOeCeyUBy6dvdwe7VCAzWtkbwmqIP3eGwswzXPL+vSMVZN1G4QifNZNE7uR8UnhCTIluA1Ckj9+LkMw14/nlJAkttKIkkdKerKt+G5+dglgtvhqRzz3au7H14w1sgBdvFqoEOTsc028+8jLU9vhTr95I9RAGzK9927Jfc9XGQuY/JD5r13N4rltNhFz8J75uRHzfOlAPzNrUAn4r9q1DenVifrcxZDdMjnhw9Y7uO/yS16leNj5DdJjbTMsLqZNmubz1fj5Y8Gx27XWL7LyG7I3TizsNq3o0L5BMJoTX0ZRx4SRZUby/ED1Zv7ygXJfGviKw9379Buzu/FoXmWA+LzyARCaHpXqCQv/w6AOD2xwbMTHjfxmpbtBCr0se/gFRhiA7bjFU35tbDr34yvccU+CsHSVDNxtCXGubeF+87wd5RUJUDP3tDaL3R5k6rkjkttIES/xHmSsYoGf9gTIQPiFB4ODpys1Fo8FwRGzhDiZN1e9VnDxzg9Kp/wBvPTW4OFqnZJRStQEt9B4j6S8TZ/emV4808e44tJ+OVMI1SBnJDWd/2CdHZizsRiVI+NknVQDX1fItQ9vwibqVmUifubODDW/AcnbtxFzRmBvIN2XmQcJXMwftZuuPXF+uOdhPu4aGuQAmj1OpvyNElsB10gukixskE32Rxw/NHNmI+wliqaJQpCUAqt8wzdb24Ep5We6Afy/ju6hsRWy1OVqors2W+vE+ePyQ2bc4759jsm0nwI+v8ma9Q3XeQ5QwPNtogNeMtLo8TmJwnm126k27M9NVVhT2szk3MAXHL+vk+9V5tk1HbigOy6TmN9UCmKMufOOi796vZ2f9DgokbQKzbF9aONz5vuvx7gvwoQ3u9pALiFiZBj+7e70MrP7vQ1APvRJ2GLlj2lKi7JBwv6E0oI5AFT1VDefOjdLo/fvj0TjgCUYEbT7U1Gw3+gsZcCLkHH23TXbe6pydOuTROq3j0D5SRAmU4CItGWrSAj4iXwkjo7c0IBFkUEJ5Pec+4Lfs0TLzejMLp7mJceXymEgGa5J9Xr/PlgwFCBfDcTo+WNQZ3EoJ+r79DxlJ+QfZiplELMC8wE9ksITarl7yR2M1i/4+AEbgzHUDWmW0UGwbWpjOJddn3N2aUH5o4owNv2eIeC7ZgkVUnO8j9YxnB/ND2VKy9debngd+6BOlgLXrJiEqHHqFnLNYv3DYKVqfKPXXqJM02LPwErHUUasxeFAzAJN+EE/c53fuL0FbbcruIPU/OX3TULEzeZLv7l6/eFPBckuKphDl/GeOZxWP85Q2OCb3Ti5ugmr63Fh1tORjbHp/Ng/CuddEt/PXy9M5SEPTyRFaOJ42Ufpvpfn4+deeKb54RCbkkY0t00k8OyJ7IgzZSnuh8vZI2NuQwmsQld54Ejh4ylGHn6ZQl/++CBmf+Yg1R6p29Sewnp2I08e7J8QjtkGzGWv8ljopJKV7OR7CEtuXv5L/YxZpffN3KfM9Khp70V9QjFhdpR7416MjuWv55HGO0/1vU29eMFEVnpow99QkO3tNrCiSQ0FE/KNiQRhaHl7ri7xYzkuUAoQwnLiYeHjsZ2hB7STzv486yAB1mA5qGxH3egkkKb6J/z72bdtgtlVSRzGozTWarhiANqf/MGINbAYy1KoUjg2J6KvhPnNLRCvfPk35j+GIUgHMtQJqDN+IMo4kFYyNsdVXdPJEg9KMkp4/AJ4/bmq1q3q7ro2OyVbvLD48ZOPZKb5ml+qHL45oFdyf1bCu4QjfufWPPk7t7jdR7iH7SLUx0aE8/FDBirTdrzH2Ab/fFWfWvSeRMA8q99YHSjVVm5raXVvPvyTH71JsStG+T5vW8vf58WwQwY8qM12JKTvryfBhW0dMAYnjls5bHUPUCRKlq9l2M9zs4fK3IHFscMUTAZ7R7Iyonw06dtl/4amwGvffHTf5bbBHo4sTwWrPP2JJasX0L9qA+PKn3l6c7MrVeUGvVbOsPkFBfwTlaviq8XLmSkycWXgwZ/jDib0jn1HSszpsaZFnplWTe8rx/ulMT8AJ34BTnuTPsjTngkBbDvD+W/PcdejwW2QhnTUFJfWTaxNxeCFzTxQvWZ/CU1h3l/sceKJIJCHUc/2IT5OlUJMRBp+sqqeM8ht8Eo6MbHVVuXsEOQnypglZ0fBZjnjrZBiCxnK+jw06fr8MsL7gI6bwrRA+yAqUa0OfslPh+9XQ2u1ofrN2n/4hG1QCLmKUkaBPIxjfIv6+HG/1nr+53pytdwXlapbTcRQ0JW20uQLBOF3qhf1n9kG1lNAHnw05CII1MNL6sZhBlMf0UjMlp+gVVm44zym5KILG9EmPhV+0c5XiyNmclK9U/5GAPbD9fh2bVyu+IGI1AhEPi6Z+blzIG3KyREYQzbSrD7ajQUsHLULfSfuWcl+7HcLwRsUpZTEp7uN/L6sGmiR5FGcnmSgidyvS5EftW68U5R9zu9v1psS9bmbqvfDoLcvJFCzxNIoIGd1IZn4htLxTkbxw/9VYLNn3bHFQPCTzB1fNFmWWGYm/WVOEL8gO7FuG+ESZU/LNLnmA8bYSszwOcoDkqtb/izWUabpFXtNxPypwdy1Mk0Scmf06kEklADk1bqgE9HhQXFwjPk24emw5+NGdZ/Ib/kT+xvmDNS7XI1dgU8bb2nltjGam24gtxLFbEu9z2phsH4AbQmnrpXvQ3DSmiS1ec+Xz+LrbSb0ctXncATKwSksf7nXc9uHCxHbfIRf3XKGlzm1oeC3WQpq2/d10Zn04bSVZVKF/+oD/QPUHj8SSeBHp4ooDjyXRx7c1FQjS+jKEO+cLMeC1QtWjkN/erOVg1HLTSQ9aTRYHrP7N1MDDhDLEEUytF2fA3zY8KGyqQKAcGslnwRF/4Y4NzsqXxlwK5rsQLSIRnwvCtbS1Fad4Ws0jIgYdhFcSNy72lIE+g0uTpRHI7jsgR23F7bHXUyzChmWzj3PYR1YTWlh1UibQPVjH9so3pqrrH5RU/6oHNaglKLPDHJ5J6VmKm/5+HoKgWhajnwujLQUY1aOAGz+iZ4D+LsAESrD3xririXlAh7lmUJTe1a7x3E/sWJb2VZyJZeoxLmM8NBqz+k50O4tpN6B6gqPMn4jBjQoJiOBDhVcgBQUeyC/lPuHVe47s4B+uV6Od4spe+SxRCQk7CeOZJwBbGjo8ixg5R9dCwZzfuLBuUNGvFX148MljpbxO/NTUJv88KORPqLZ3TlaulQeQaQ3y2eb8qMlbPN9wMzAdK/2RKjpMYS6cwCDXJ6oBDr7gTEJ38bPuip8gnWslX6v3BqsKRwMRJHrnhGbOwqlW+cQJBB6SpsupumT4hQ3JSqoek+WqPP6iBERLtx1ekxXve1jcan5FszvZZws380w0Zw3dInNrDoIrb0ZohmQLShHtgwfxSNcleUSCMufyeihZzZuRQShihSuJnKji+9iY/M+iR990Ta7s468CUFMiu7lBIcAOIO2viabXg1d4NhfbUlm1kXkWetgZ11djiTXSprN442O61ICygRZeZmI+u1kqxbnvSm0k+tD8kzDvr0nGVDh6pB6HiZnKa0OGrk2Xcu8d9XvIC3TTwsUuStnAEaSmeVmGD4Yvb58BatjhAipA/zqK59v+fEykpGqnTVGTNB/SIsWLhQhibeuwtjPnVv9511dX++ezMLal5tadxmb/CTurRDYrK0NKeXfpv8AgbxG1a0lrpJwFjWWsse7qH1KmbR01Q63a6CpWYh3jgS3rLiE5bkTjAl0JyaCjKyeWkPxPEgZ5h0V346qGDx9Cdm2uPF+1z9BBhIumAI6jMvOc1g+sYnmXC8KwUUXxnCaZLSsTsnHHTl9qvauncJRiG/WdgKtHPxVFXnotFpTLy0Zqf37hreO9MMGinwIlGaZT54x9fJvxRnw0+ifcQbXAEtVH+kNetdoOVBY4KZThqCV5hlG8jPxsUd6E1c9E4nebHqibgmcdE97IRYWFc+2OkFHZ1fd0HEsWdsVSP1RNy/fpws3hZU7X+7CmOnoV+zVWpSshhA6ZR4EMK0TVBK6VYwaA86BFjR0gd+zGO7bj66Ioe4pDUKqEqHhGkTp7zGi+tBWdM/HLr7K6JF54cwtMzx0fsWUNp447kzag/4aU0sA6hUndd+wklMtnlQKGKg+RrY7S8kzkGgdXr3lTrkW4rTm0UU/YMu3/owF1T10c2va+XxW2PaxQvao4DrikUJbu3ZX6Is5dMVS2OXpi8i0Oy8DYkBfkPKIJAO1qx4qH7mPQ3hVC6slOncF/g4S1bXYl1bvRjDZYVJvPUpeV/NMacMnc1NhfvL9YJc4pTOWQu4a/ejt/X3SDT1rqDRgAOqmGH5ytXE/6VbCzLrgD20X1Fmdc5g4uxWy46SXH8Epmrk1hqBnOuPkUA9RouLDQ/tp7uvCs9s/Nf+YPqNXpj9qrtIvRZLLaSRQPyLraUkIA8apcjYrZu3TrXaGZY0UGpc0cWNDwfy8lqo7UExTofVAqCZ6xR61MYnFeaAlMXW4SPG+FLaJtc2Qn1ldAppybw0Pd+G3DrtPrIrPcHFdn/fjCnfxRPtlT2AyuWnU9SL115UGc4r27SUPzGZH7SkyHNecMTJziJ6rPh2FXm6+Jfos3MShtiFWp7wXIL0XmdR/yz4vfMUP9aCk0s5o4og9FTiKDZW7rl7xYhGssII7moE8O3eHdqNqjc/vejE3LwgV1xcVKkgVLSohu/dNmhaBRiqGC+siZF7d9A8XKEgaKDqVWxtGt3JqhnoKhJrVLRocVtSrc/JQBThco56I+CUVFKIQITtYaVi6fMHU7bt2fiZT22n0MSr7h670lsRwINFQiLG2zHJZ+TcvKe6tryvyAC2nbJPbLCMIDcVxsDAAUxolmvSTEMO57jeIWXZ/LeFY85dwvZV8Ck9ohQXt4r3XwLJ61+zGyd4Sk3kScv9M0OT67cYowhmlLz5HrMBvDXQzb/iNFz64WWMpTM52heSJz8Z3vpjiUXdm7wtMMzrZnrcFuihLI/U+TvXdhLL0VbyVSbCN9XqOQSDO7iC99M4IwTLUNiagUSExVh4SEVExONu95AnZ3ooJbw+7ILYuEyhXCZpxtmT3IaHGCBWvZ2rbZrYnTGgJ6vemZaZiTwJzAhNL9RcTHcSLj4uLaNEEe3H0dSsQ2bPJxHmKl2LdehpCiG4GcVFiN2UScL+Y8S4/TpbBgruWZoO6Qce+dkCH4h4TFNViHd/Amf7NfnuT1RuLLNHUXdVybzgNi6ZSrL2oMH+XDskSHK/srVcSHW4zFsZ3atv8YEXL4QKCT8cphFQXDxiRPcAaiLPWBOx+y8CHave1i0hPFshUOogCquqHoYFAU3Xm2xxlNrGK7ajm5IqNzi0UpgkKD3eUouUuJ+iEge95jcRqKw6AI/bqp2U3sy4P0SJ+PcLF0CHorxVu0kXLqXbGIO8pHYDzoZx8UhKO6rpm8fG158jgKUkvmdsjzd5TVSTHK0gBe+8tbOifNKhZCXpw+tk1EfJ00m8FWJrcy9p5JhyRs4tGQ19E3Ck6slrXXHyUguzeZRsLPT07LFSwMpO5mb17mdV213U+1i7wyBRiTG7xuG8fSVB0iy3fUUgnnhvSqxzvEtuHrRziq9aAVqq1KU0hMPkmmJx2PvktCdULVfXKsrPiFiQBXcyn8jbZaFl+apa14u33xRe3iExaIpeth1QbQqJkgYazTBOVhf/pmTsBf4AQT2ILbys+fJJJ40IdVGiTSCSEjr3az2MJSNV9Ie++g9zYYzfujr8Wumikjpzxk5ztA7PdgHz0BLJJdu89Puvjp1TmuAhZg9pI2W/TAjZfyZQXZJ7FxuCK2duqjh0RI04kAZM62IcjcNCERQpXje7MxwncBsNxAEV2BmfO0O6wbYfMJKD6R0waw1e0xAYVTA43hP4xkYqkExusgN1geBrvWlaIWORJ0fTm1k6ax7SxVXR5gTL66F4EsFxR8ZuLVppfVbWr+jT0Vw7k22ZNljnm1ADkWixMJTyFH/u291jSqc3b+UtlOSUFTTVXM+mgg6g/FnlvR5EEC+X2esAbQRj9q1he9qMW2l0sFxNT5rPEQBtc9nU5vszH5ehtCGTIRC4dceqzzIXGtZhm+wmQ86agZUVbGsgsHMFccFOc+kY1J/esGCK2kCeylsdxVQZfsBO77f7Njk5mEKsZbF0KFshYH3rJ4vZA/cebyE8000Zq0wloR1xA9xoJY0gtftdgcdom3cmRPxWsgAj2Z+e4S/zMT7AsHqMDFYxy+dYKTnPzNqTyrEN2yROp4bBvX52dgmIp86HpNKpA8Yqu7rlwUrO1GhxjeaUd9ziiZ927cYwYMoU+LyzLo2KHq4fydPH4paLa5wSZombjwqKafJs4O5jKI6NKm2AwRZZPiiEhEtrkaREMvqlSIxnu8ixG0CqCn0rmXU13+vvqfZEIUlrJHD8yEufJUYCf1V2BkhDSkOsmcamxkdyE3sre1pk4qZDUqXsnraNQMfJZIC1RRQ0y+3ogmIFgtIcPBH9eYzUSZMy9aWpKXUIueKQ1oScNopZBKFlaIbgvcMR5g/IOYJMGa+gbjwKh3HFDDo3nBnmAOw1b5YYHuN+gnO6y0erKeoqBTDog3VVMEEWPg86X7AFr8spX50P7IK0ROhWX95O6rs6sAXC0+nBTt/pm+AaBP92Q52HbsqN96fB3kXG8PM3RiN3PhdpCXSqy4ixRRjS+ik/EjyFsVQAHJ2tPPCl3tstiF6X6w2d2wsggntS6cExwn2Hg1IJO/TZleHTn4jz99Mtq1vsIs9dsJWsqym+lUD1YrodRm9hpm1ENrnAJ35VXkrsQIrpWZZjPoUK8dkxFLhQHC7navQ58kFFFEr26jb568K1bEd0t2MqKLaBPKs8E/L7RuvfddDgTwAKXbH10x3CIpQKbHixTAKXQrhOz1n4+W/p23Q/YPMI6l/6GE9POPSpRLf0JczZqTBz//uh3eAgtCeEudul4pDhy3lfpESte+AY+jXaf1jHqBdrFo7VW6AphR5b4V9G+cw+O7oyzznAMvR3fM3w1SV9Wi1XhwwHZysImAccPU9IkmqY66tk0YhoJgG+48XPaFGaYeaN40TMUPW1vJGkqpKaIrj5buIjfP1WPryrKxUSlDyL7UgveNlZczI7TGC7ycWtlMBkLWiqxg9DSnRRNE1ep7BRtpKLo4zxP7Y0BiLx5YkYhEmAwmURY5TX4PO33qdKD+s3dVYY065yt4RDgJG4lSZ5O9HAjO7xK6zCwzrCraaV7LwbNRx2BhIf3xNU6NGeeEikAd5ZkEDoh4jDVT9kz+XF/fcz1Bqx0oXEwLb5hnL4HDWQtgYj6vMheUW9lwPLm1e9GluJYkHra/FZpJMXMmYRTQY7wZFXj2Lru6JSyq+8tKf+2P5/l4VDpQ4e9T0dtib6CFUDeGubiuwto9rWxdVuKYML7xvQ5cSUb2rz44Io3+jHwwnTApro3MAn5kW/M55ZJeo8BJ6K670MIu/j0NPTALa3qg+XjGIZTkZ/kO1DKDBSoPNofHGkVyBDTy5cA9Jm6yy5A9Lg76+8FOx7m9AELGoCmx4Tt0aIFVD0l0rQikKG8CdaAMFMGVcXwOakd5DEs7dteo3WfiwMshA+8fH8mHawKes+CdXarmDDijaFrV4/iCYdD3Np5lO5BlcbGxWRr7At3jTZto2t19pXfElHteGqT55n/YJlobNsb+QgT2LCcrvL8ejC6NG2DfxHCTRji9XJQ76fsB6SsvvVjKl/pC4+jD3772ODfrjma89IO1jvEl3ojw1dCSLscwk8pfEr1ZnZMCj6u13Gsl6alX2R2Kb7/Kexeh8VVruTaHR74kJ5zwfAIzZmT+6zTcyHspJOlOiD19oOFj+5KKqAdl/quN7pj9TcyQnrQmshZTf9cO1UAeG9dJyl+0R7YEYS3sNoGuEs4PET313OSSJs871wgysyrN5P8MeZmCpkNhpOfzr4fjzyfE8vCZASTZuLDqm4iB9smPglaV3VvFCsg7yGi8LCjvd0QHyGGyARD1SdYwNSxpaMX/NkDWbO+R2rMb28i7xPsl2gN+3kXX63VWl4SEkiUn7GeF+lBLBSpazChTbAbOUkuZzbeyJRrbv6gmJZ2IOmx0PRmxLcyzwPQw1rskCK2OH1VaabdYRlAp3LXf9qJyA1PpI9KTu+0wVfcjrJJXczMBSQEZbubPTusKStChvKE6PT+Y8RaGhnHS+8EiKawbWCglg8eTdK+HpbMAXyDKOtxnPP7bcel5RO+3uj53JBbOZDPLAptTbJl+V25th+VWx1SpZxsmHgQBVSRhrvUws61yT5JzD42l0fEY39YkejRvnzs/QqKP9mHiGGyiBd7PnI2/CC4j9NW/wzT+eMF/UsvI6AQRbNE2Sesm+R4LaDftMBPKFJC5qBdkOFCl96wFdzz2WX6k5PDucJq9j2WZwa6CM58ENEjW9XgeFYf3wG2bBewESS8KxBPo7EpYnyfvqXINptCW9cF6X3rXamA+/7Bw6LWnOFh2jJvU7GbB8HAqKzD3pWajjA7/bI+h55Tu4B6Oyb7TAk44HG+P0hNztercVDXPMUV/9jh68NVcPrGEWgc5zapaXc1yV2QO/Ot0w9Eno3Y8yvrc1HzUDwI7AxGPrLvKamUjAhbgnKUbzDfoa793Mx38wIeHqZXSUy+lpcPOl30F8sipt0npeJ/Unfe42J/dhi5Jka5cT0CTBjOl4uOF+1koG4VOoISZF6rM3gcrQeTSAGlWFZHqiiEpWrmvfWkXGSUzwnmwPzJfHKN404UYusW0M/mTGhnGE3NSKLDC607A53X7TYaIK3VkTAJomaKTF2R2pATRWpXCEHWeZW4Fb63cdgofQ8hKEye9BPnUfqM5tQR2zE5zsa2aODknS1FbXszB3LXK7G/+9vhKIhF4HTDwxrZYQVfDSMMoWc6n3qFzdU7qgv89HvqhErAz2ObbGqsR9Ixe63zMD+0MivT1aLXEm7FahKXIQnqoe1ru9bewaqHnTeTktJGjbn6pMZkr5VORxsoddP+G1bf0vN93BU/TVHhCu3cE/nEG18xKuZSyVrJ+dusE2gR0E+NrXWhGa34gdBoEAjuHUT6elyETzYFTUYvtnR2oDKkGMCOThbBXH3Ehyhf2DrAd4Vj+mRspbZ+pPn24CcEBIbhS5tNvITP/AYaPI2PDSgPPAV1FTqq+e7zTP52IxZvij7WfXS4UUN62emYby86sriO336SWLPEtNM1YDb1WM/N0ipa7YHVIbg8ufZjY4lQMzRO8XteIP7mPWPoRShgybsjj/olPcWr9IVbVBzxkFxKspMAZR5EqlJBGIAmP4QSFiusgO/HaKrrtNBLPBfOe6BBTjdZ6glHTeFXJ40DY5ARTlAVXfBUCFhVFHxeCJ9Kyp8PoAv5in/P1RT30JloGTZVuJatvjMaRa8K28iWd1zyYDW90b/8gZE/rcOAMDbnw5B8B6ysB4v2Dula3WH000Yuy963RLx0I6/KaCSlxzxXuxcQrINgpskm/kn1Vv5m4A9owMlXSpmR940l123U/erlnrWTye1HXPLopK0JS1rkh2W6KIJ9hngmgR0eH5RWP+h3G9oUr0SotIIgnwuYyT5FDtNlLCdN3Mhqjy6momQ/D6xUSmdKQvHmgg5bB0L4dp5Xf9d87j0an/iyJwzjnsK7tcgXfiiW8kzufAL1dO8HSxDnD6oRCcu/UP+90fwr2dqafUgmB885lTbXtof8GExOpFnVM3N5FDBYSwEIRK6zI/5rep9L4CoR0bgoXlXj+Ij9KHWtDcKRwlY+95mLkV1IvcY62c2rBiAI1F269yrt+AmPHrfYB6TbI2nQV/6jJSogz4s2yGdcVY1jNpJL/W4XA7knDaH212+sjCicazVUSllNaJRyu+Jd4iacv7tFyfFHvIYhh6WSnUs8mpR54+cm4wt3fR1ffvpx2kybn44oBX3wSH8mPbWKMG51jqH6ukYAEHlKzO8Y9QlMDvlUxmS+mNfrTe5F99yc7ngt4Iu44km2+7C8yHhxRBIywRcTWqRwR6hF65rvD2VXP94nsbd0aBLUZClbp7U76Rqhws/wh+mkJScUCQUKCSWbSX6CXU4iLi8Ta3H4FfNx2qVyqKu4LDyxdDKloIUT16LcjzaVfdEQmUGtTbrLiPAOuJnNzRUAogjqUzpbfymHwwcm1KZ54rImgN9LBtUfraT7yLuJv4bpofDzQGlQzHR/zNGDQDlnvXZ16dPfFLjeZJ6XGBiW9RZJnmZl9HzBB5UG9m2FHg6cGVxmUKtUIxcOgVqeFtcp3OESavZ39AFMfwth0g976POwjVftFjuF5YrmZRb4BgEHZPtKiBmKX+6Dz0RPgZeEer5nEv1YPhmo5duv0YYcm+rxyiT66Xq9BPcsUS/bOype+szjCwv6Fux5V2FKqFaj03MDhQ7Na15FyRKQmtjJYNtS/NZ8eRnKW6xhdYI2lv3kjIJX4cvfP2Kgfc7Gj442BcH54i7EvrUcvrtzg1CbPmlwnNH5Q88iWf9otbiIYSyVMfcgfbSYH+n6aZyMMMlcqyl/L9PYetPbsY9o0ROwlz9rf5lDYkyzT1dykzxDFzbiurrY9n1wmgzQ2jKWC5DLTNMgDri9mfnkWjFrBY30CbmqdBpmlzD43cvWR9isrekxmCSnSosgXTW0c0QjurFe80hjo08V1CsAWykYF1SJMkiD7qLBryMd0JZE3zEB1ppw3irRC/J6+UHPkEC+dTQJmiPc35EMQjBzkaSG49itlPGRuhC3Y8DgPa8T/jYFFgLRBZ29uK6KJaQvrykVPDyZNe25VIY4LuTREBglgrpMiCewbOpuaNgnV7DVHXrXHyXNElm6OcnBzfbnlkIiUn/46lQzrKbD6Ns6o8uwbEUy96VVGN3XwQRzRvlXQ1M7DDSyRJ/JMsg0tdATFMZRDLwo133hQmDL59Qe+3pzWMRt+wR4yXfdl3VxRYG95ECG5XieimB2Mg3JB7JtSdbq+VN5k4UMtuyzc4uSxgFqJgmty5wvIR3dtfwFj13pqYQTJ/IhdW5p+6Rko/HVM49evshYTUJODWjQsZB4h3/LT9AeZaMLqnWxtlalYGzTJN0eN+o58yvlU88xpoI+kM0TWAcFgHPywwftIJJMPdgXufFzzHV7E20Np2CBbjPFhm+kIETJrAvyDXgnIVoiXluZXs+BjT7jLZyCz7rqZG+s44gImUeFYO1vLs0TAu2Wu5xIuD0cu8VNApGgC7bEbQo+rWPaiJAHTXs8sZXR2GmkMZKP3H+U7HnX/Kj+GW66GOQt8zl0ePqAzgQ9BlKEZuOrhhuz0CI/TKOEzAojV2okg7lNy2ffEG4biP7WrHGQR2LqvNsnzj1JGC8Lou7liMhtk7orE95eoWCWK8W0xyLbVOEF1t9M7UbuQoeVhprQrt1liRuCh59XF3T/0Bx30d5Y+Kqywsx686BOeH6VPwMy1schl/TVOfSd8Oh6ixatst9bUmV4u4ZvwsZWWE+hDJTvOwwb0Cd7xeguQvmxzkSYWPAwORUHrMiVYeYNAGhhc246x59MOz/bwwvdVr1d+kRQqxVDEmBpFeMNGaEImh7Za3PoZjT8aec5kS/sCPts9V1PYaG5PaCxgLtHOC3Ib18WweSlt0tTO35W4S0/woKN/rhBnhth9qysEl4dvDCn5jjzTuULlM6Luk8+V1wHzqqjNZ974MkpkUsSM3lfUZQX+M+CPj45oLv/eOHD+8wMFO/QO85qYMiPURVusUUynNARh6LR43qbhKS4TMo21pQOMpkARY9ET2ewoAA0t8n9Y92hpr/EJ+mYW37PHtfIo5nuYirDGrZllgmkADN+74q+2R+2UpesbBC9xsDD1a+sV5F1CNzwjYKlLBcTBWUOI4So6jcLN9bbPaltWIxHSicTtHDx3htEOoMlOaaidzsleLkGcYNPHOFAXvI5Pz5JZEIBY50TPk6WAjcwvyd9IwrlEt49CWxB7DPwpDHVPshx89Okx2e0E/Xcz+5EsIXnkSTIuK40UgEBA+9c7Yl6nkO6cDvctEemjaMTm9DeupsRasDBL5y1sQig9gE9e9V5bKJHCZsOQUSfHd0qk5aTiuGWpOFdLvfvYTygmTPVZ/MYcjNINqgf3Bg6B9+cNlaiKXO5HbQ+LAvV8nZCJvipz4q5V7ZxzQlV3gH/+YHJ57wTOPIopPrQqF32Yh6hVjIkW5O9qw+TWQahF043w2tmJ914x9xQj2+tHV6w8f42sOFbwDgeR0fDO9zweO7lBM5GzUOTQYJEOnZeBg7RqpDR/XM4sUNiK4prHpDjtotT9xAD3cVlsOcdV8gn7ibsy8AW4574GuHc5ZuB0eWLETm6wClyiFtQto9frdhkVpy40wQ7VODOkoOQIfdJ74APEte7eKC+PKedHZuwShw9tudSu37WYoBkmzd+EI2Ajl1dG9fOt6RKoLF3RVLz6pMEbJXJXwlcDtyHNtIZ47dpBJplqesy0+owGFF6Fxz6mwE21IPhub4rI9RITxuzqH12aEg75wArKI1O64XyOWl2Xizx2rlmVTPkGbLpaFQKNiLI1qvWGiVjps6p8bE5dS75/nHOL380qwj5abgt6HQ9kLYwyvD6FLO33pkntUFpd6vo60wOw7pVwEeN3zAEA5z3HtGaH2oFHYbHfu5xOHwa8D8PONSP77dW0pV1cy0oQwQjKp6usMUA9dMnS9LhQR5RTHgP0OmDDlBpwgRH1m0CqtGQjBN1W/n9HhbvZH07F80fjRtjVZ60yudmqIKs9Vn11OpOJ0BGIkFah8KGlJHpabj5WCpYjuBu0TYYkWvb4bNzt29eU3WsyTeW9GE5JHyQKXrfOty8C50+MgiI2kI+BntgbmanIcIuvhUJQ78JrdOqVS3cDX+8yLUljCEa4m4MYuRUsm3WP4XLj8j2xMTDzA8PD59CaPDRNXitGbyFzsYJ1M83yQiiBFTN7Ha+H8BYlUWgtmVtcXeuonh89qdFUXf2oVMdB2pskxsg2UM+oQrX13Jr3PNVmvwmt8xi7id/z3+cKIL//p/w8MZKHOnC/DBATeblAyWsWF9ZAi8ntBIXC1F09Ah/HCUWPWQoGdKm4w8d72ORd8QSC6KxEsmdf9aJMFuwPWXsoOsuMoqOcW3fN8w+igO1mV/OIBJfOyFt/WBSOc9JKno0ryvV0Q9+eZI5pf3ESriruC/a5XqhO7VhdGQkWnBpnZSOfgtwGOR3qA7bIK2JgqHwE6OkaraSAIfA6vFC82mJtP042kUvto+2RLtpDs0pTsrpHlOH8Aw+IPycJ1vArZFPBnxuhUyB8jdxuvm2N6ggbnVL2J6tYIIG3dkD8Cm1tISOm9ra9cit7PwJFaGPMFUrdCeYst3Xp6E2lkV6Ob4Nmzbl2Zh4kPXIonxEPspDBd87v6fBHHIrtEVBrfrj0UREHpiNoP3EgIbUDmIthQRoPbZ537ElrHg82kUzm/VlefUY038bFiQONYm3zkXn0TKYtvFF+EmLO3e8/Bu5ljcHApWmld8PaPxI8+mIOX7F5tMH8XUhZdJaRWaHfQf0Ztu/WpEWvytDahwY7OhMTey1U8jaa5Eznp5p5nfDwzrsaZiw6ZsMYJrmzrvy/26gxY39tiKQz8kD6T9lDU7uaYSWNPG71hlu0eT6mNgX/dMVoSu1apDOlIMl87faRPCQMuBBlsNGcTGfArAxnA1mXG5BedBjhb7NBd/HigbWs7OUtzAacr/VON+h9SlDN7/Cz1oENTToOJ0fzGossGoQDN5EnO1dVqoTGVs/mK5mClUtzYTd94K6W2Vm4H6eT+zkBJQvS9AjaBRgbTx4XdAJFhnNO2WalvnkKAuZ9tDVWGwXn/xKMRF+wlS3hv1l4vGzH2k5UDqEDMIPVH0EaFiN3UnBYriYaUeFUk8FQQGvkeiIuWbzbUm5clhj0V3b0KkWTAj0dArhSkYeY6iGrdmAvPBg1zePXR82olYg4K2eZIZ9tYbCwfIU5AfMzdCPfsuqPOyutDzBGMfKH7B546Z0za9PJ9WBfSKiK0K8w75fCp8KqjTuSiQidZ5PzfCrfXCymme01yyPmaGdYnVnhVcy2UaJdokS8MCNNEKUq6oaSSS7RZOficFTy9x91IgCGZvNIk4XosBdiOBrlSKBwTPhKTlShRS+guxBv8IzvPEVhCSd3kQ1ny/d+AhIddsC9bjMs6xGYYs7/LZjGPxejTBt7nL4+Wh1mZJcfPQ2qR5F5A2+OCJ9v5KPW1j2KV8R0MbHMb7y7GVuRZBWrr/soK+zjRn6elohkG6KOb0LhgpYksAYNQlCW7fwW13GOV64a0Tpnohgpoz1YwvNriJj5Uc/WVDa4PCHrBAQQBR7Dm2h6Gv4ANM/xUiP6dRoL7yfbX+gGcESTXdLEQ4l37x2JBRPwSr0xtpcBloFMrCbB2PYrPBGkPv3UciyU+6SsnNUnuZgzriL0FyDN/p6vZ6ZdLPEgw1jHSuaZrH6DCn2mYbLt3onNrDcsMuE3tWaerqYkxm8JR/5nQag0BaXNHl8UjOVnrZdSGUoR8iURfMRjK0L4E75DByG7euknyVZ7qoD2UYonycE10SBPgr0NWd3sRY/RPJBxYqZkwM56qFjoFQ6I1jI5/76xCoEqu8qVWXC9kG1yZHbz3xfl7I4ABIv0NJhXao4aWOlTsEIUPrCdxl6hbw52Gkm8RQICebxAfuzTEJIP+RnehTzh861eR0fpLd4CBUlWbTzztogOFtB8grQ3sEqlsxC75aJWoxSVfDJ0RU5IILM4olbz1lvzvX3K1eYFgJkctYJke9NDvaZYZ8J9UDfiR/sVxaAtW9+gpNPkuXIuEcZ9gkWRMKRHf/2SZeZBPTEoCqMIINDiQgemg/BSDrRwGiwlaIsQsvgn+YleF149vv+CkdbXw0AIImImTOZ0lccU2n/RMy7zB5kI8XdsFNIL7HCdyXIjEdnwgl9W1ayqK1Kc87rIHQZbnkiT+a1uncse3ZsyjNd2MAE5TzMAgfLL2rE3bQL9tLFpAxpTpy1Sj9En9wcn+SItHBh3srs/Vzlp9keWcnt40QW/tVKyN7fsg6d4AajKavFD8zZiEF7SHrPlg88qkOXfLow4N8R6qEyeNYXJx1XpmWr8AHx7o65rsG8QYCs/74J2i8BY3R2BbTIwoUfvTBBviujQfD4ivyIo/upqJKGSTzqnlnO5PoSliQV8t3U4LN/Ggu6stoB+2fnYNtb9FUqkitZDZcCR6iZryexIvV2VbCY+n59VzNEjkKZlQ2e0GuPdZbktorKGq86iFCqJRnC5WjJ7Gi6KFt6Yb5bYPzaxA9vJZi3WihltzO9tDJrH+At2IN1l2VtLuy822IrFoXwSlDRFL7eED2/kq2HZlKX64eQ7gwsPtEDkEP/Pt239Kwx/frAFFSZGD4mKVgNYV/yAg/JxwcULNIsDXj37ZavW8uSNEp6lbG0GnW4obG3VJuhCr14dGJ4qx6fHxQPGLBdQeS1pY73UIefzN5wH7QevtXV0bCf2dymxmIdKg0btNYPkPneRhziFWbRpAM1jzabX8P8WRHbkZjU7AJsis1noYFN9l4FJd8NkWvGUCWzS0QjdjWiH2O7WiQn3Oo3DDDcvNqb5xy93UCQ9Ydp63BMhoHQYftleWpkyCDhFG13OsQUR3xhvDhy0GDschlYPxxuQRx58kfru6GszLyoNix031aJEGugTV9bu01Og91qmZ7sRapxGTGtnUZemzQpvhPlQvyo2QJTJDnoGyujshhJaMJnM/j08PmuWJLZi0ENkYZ+7dLZ0EFNQMNmun6cFTcOTPCCzl/vgvgBo6g8iuZNg4055FI40TdQvkg/pZrgxjLdtLJzqrsiLt606ZYyV1It6LQU9PVdyhmbq9sqVLz5smVL28Aet9Buy20DG+fY6pXzF0aFBCzHGE2DpqqhiWhbVeyiVSQqh7tmcKC5CVVq/wAkktHQYOQG65La/M5Ah2V70TEPPbtZd4ataeuNlvDu2EUVZ8Za5/hd6afFHhtar9ThITyh7sDUBWVZlKa38LLGdzqcW/tA0lYOIfhRjxJ6tDF7MPorUD+uU+nk2SwPs4mfI1fK73Q7s1YW2BFsJdDytUaLeTkP/22Gma31ImmPLdK2T82nZ/z/YusqtiQFsugv4bKExN0h2eEkiTt8/RBZPbOac3rT3VVYPLn3abBrBe4tfZIbn9kLAFTh79ACIUrz/XCQKN4Q0lvVZPUmSlPOI8vtkNIAQvioFVCHx1JuqUW7aC3s6QO1VlUL3U1BcGuKfOCmZON8l7fyrVZGWhiYJat8qS4t7+qbe7ONDnvVkIa01xamNWvYUKdFXwCEmQ8rinfgUcB9tAn6tZ4Vrj0J1xovhv9qTCIpl45feOvAVvihx4/Ga4jjLGIwlREzp1lFPahTTsvpsafpjLTjfKvjt239rOfIQPOZLsA2MRec3dAKfYCTkZof3kthaHmf1JEovO1qIPzj5FRM44HAjCWYRCgIWN/b48NZ+0E+P1QC3wyOPuZLrF6QwuO4RFHJZKFhQb1hTsuC5Oeya18Hms3YrX7Nb2QBRUxuilzqKwcSB/tkU3kmkiuESizP9x+HN0kgU8zSbbwABCNAhDB+VyOVAma5q23Hxlb+BPjAxakXT8JfIuajLX34QMEKDoVdm43E9qIDxGebTSzUQOJWLaH85/Pc7/D1gn2DwG1UE1QUPUYZTvPloSGg2QIvWD/RWmYQiSIdqHpNxIFuIs9AjnUIn3f8uA8qBBe+r5qHRt08geoG7TdvX18SiJOXJ/hZfVAoztvVTyPG+xAHpxcKm0SpCLc9QPNozEcAt+vt96tIayb6mubZlnJHjR1qBle91t4Yxn5J+7OTNOfNuu/zhhCt1vVURZQamr6eWXoVbz6uY9tc2iuHEFF/Ce3NX8K9ji1iNcLGsu9x3DJMpwjOU7iN3963lgyNvBnRBQHOdedupkiCSNPlY+8A6CaOt0ed5R4dWFMMbj+d3NyJqAkiOkWs/GrjgOK4r5tAYuklBfIIg9qUdp9Hw46PzIyTj14jVlBwTXV0/AMlvRAXp66gJnNfp8NDv1WnKSaCui2sVcNODM6FXKh8mzarMu0D7mvHuFnQTiZsv3FJ92ocQfgxTmSPYmaTnCl+8yIItJX5t3UrUM+Y/TI+bZ3vXzLou+fDXUhXIHHLkmYM46a13LBVlATkyekSYEgk5iqfvnSU+W6F6cWeM4OXPF9VezwAPtLq72613BCS67teyGahPBSK6KaadRCIMyMVKfR+0BhXejX3c3bKtP/K+sATYzR+PLZlRna3bKt9zqv6rHIiQluY9H7hZOzCtuof4mLD9mtHgqIrJukQjhcOwGRDkX9nzbo8InF36yw6GU6eOcLSJncSgaExwyUp6JmFARKawBNYYmMTIPYkzwTRDVNL6CuXXd5rwyK9NtuW88jZn3zpC0mWS2TKJ2PGzXFkipCEQyPUQKtpXrEyM83c6kYw0SDqqSAgzpp573TblUg1RMkZXpI6D3H3An0kLgBOJvx2EUIVYqGRkYfZBcc5fma5lctJuLxb/lu735bCcVe1vdz3FbCt3sPIejjXW98FPQrk3DBLzQCV2Q822uqb7B0FMrW8ZDZuQr6B0WDyyyWq9FQb96KvSUpImqhvB0ejzmzLaOIIdHLY28Cs90O/pqxKU3C58nHXWPYe1nC8zYUBUrWP6CsuHv1oiXuiVvIKc2OCxZR/PSd5sMLZ9TCwP8nZHgmFNgqQ4F/nSgq/M9Hu6bQNi3CDfZcoobuMSTxzS09hqyDhRAZvdFzHFAFMkjKFAWtPZAPxHSznRFXSNKRkcZNAF7bjt+6UDynY9vMTFDXiHnic1lF4f+dmQutpTzPZZ8R0gt6aQiyJwPdY+zFJYHnYj3tk4gGGwqzDS0Emy9TTKrvnSr0WBtgpGdlSQXyjgvjzXkMYInjFICMbhHBuyg/jD8HZQWv5mD53QG+A22biRnYUstFdc0A2zzO9SdtTHKqcfQPhoFzR78QUe/o41jXYHmR0cDTu9fiyOev8gTXY4HCqngONxKkVAeUOGqs3ZASiLxj8gSHm5Nu9/Q26aYTYDfwYf0GgdpykRDUaMOW1iL9CbabSrV74AdxawGki40yxoa4u4k6metGFCnmlp4M+t11wpf0m8eX78Hz+r7pi9PIudfjnzkhvSfQARdoOVJyNIORhBNjX+RFt7RcCo28J/whsZYbDiZNsdn436+6oZAxtOqQpNgLIuKEfaUI3r1AZSJSuEoXJUjkMA0xxm+FDy93kBsOU0ugtEpZlSsoUqt89Wi0CemgTeKjJ2d6OryccaUSnrb39TNyN9fs935OdN2196/Gbazdogu57OHTeKDHd9Mw4fGEYeQGUxTIXx33M452z+UMHtdR2ngfRaGtjpcOdqsLtpkOsNSVmYzJuzzM6FtJFdh6UbLCRqvvtepYa6uymYZvY8MreX1p8ccOFci438FyzeZV1sWyMfbmabSKZYNapuYBPvy7EheERAPmQ3vUcCSGIuQ7nG75w4Q3rQCw3htOOOllRkfyONxL7ZIgAHw2SuG845vJMxob8RspE/MqYgddZqXjlMQjRCe2HWrz9dnYnH9taXM4TKeHRnpaanFhr5i0/by2+Mtb+sAcaq5t9eIXcoAvJfQ8XK+4WqleRmmnDTMrAY4REnMEgpY8ER7vgh9D3TOfxlkU2iu/PMJ5uiH7q0U3eUPaYeM8s7hfuVgcGQjDaGminnRXGa6KO19vDzjT9PLx+gKbuDD5QLR6+Oc6XQn5zH7Z+n5dJeHJpwxADfL6wD96bFsKkXxcYpUV9koHyxYWYst9Uu0ZhyN+4LGtJQAsKKNx9oaVdU1MUdJ3Fwgc2M4bA0h+uS8yxKonYFl8zG9dOMd3ejCbQ17VpZdbIjVomU/IPdCTF4s5qfemmmrBbcr5djLfQ1ueCmuNqSB3SK5dkrRvUo1xb0nsg113Mz6kYcmzr/bcOMUqE/SvlYu7gQ6qsrvbBoO16QLLIcfB0qQf1gwp7bO/Goc95+lYXoROEZJWN7MMtmfzJ2bSeMCj0O2LUhQUKrvNgDm0ogQVhi3zq5wzCrJq2ASvPcJeuFNNRQf/38FqkJs3bv4ho58ZDOEwYN9jSi7rEyVcJBk/Ukv/iG8QfBfuognH/xkhf1w9WrjweWJrQC1v4Za28ksybxA6Kl5aXXpiR463N88av90unmMtGwS/V09DGGkm/jbNzubpKhvT7GjJOoD2K1U0EgHGcNIYPr017C2x5Z9547Dyk3Aq8ALsHm2Ptrb22o2EKJXLhVxfU9+uy79fLJ20krQZHhp8n4DcdC1MQdJCyb45RqZi1QnO6E9luKDmXjFMZqrigfHGNNXvhYWAFk2qjXaZGAOG9bfQv21Go37oC16FS53id9DLaMckwFSWCiLlwHI+FXq7WqEmEHopP8n0oxHakZQhso//YUhj0Ri4QyNQchKQv7DD7WPY4cTWIjPMjJsvnLbj1bJyS3i+yk54tkFYtM3tvYUL4oExpu/FE88v68a+U+gJCrLHo97WNcpwli4Ad+3VmOvkZ+TNyhA8Y7Mviok3Jlvz4CA4KVjrgCmcApvFEx3YSLLzMifTSVxT2O6fixKoKSYx3BV930w/GabiMHgm5x6WGQ0BA0Hm7g4/wSjCesQXbI3CmWihXuap6nj6go6g9pSQof8kkAaQ/H7scy7OzCOxZWczgzIf8WJmko78z6iCuvHlQp+KKpz8us44YPf7AYlYP2GLIJVe13BlTCat9ip7KKTwsNgmMrPhat+4fTfgJ9e/6vYzcFov2ZtrJk5tVnnV3QFq3R45TH9O73hy35zbo4PlFrzGGW8SRrCpa+o24PKblpNBrLkC0jA1twMDm/DtHFf/+nOtaD7orIQ5quPFjbNRH0oE80ublcw8GwVr4M77ojMVkL1VygeJpJiYi/ZU8/v9ov2v+1usM7gZq5VAxZe2h51co7YR6YMKohlHos07YVc2PwG9H+U1AoK6dXqBDYhxzhyBCjDsv/bJXTeUJkysNvE/fQevkbwNjw/hG8WxOzKSzG8nlCIw77L41H1bzuu7nS4svsTY+TsWkoO75r6LFaERDh/t0+tYweXhiKjJknFRsHxTjLlcUx8nudKrflM5wSwCGFrhxbjVUYXmdb93fTPlb7yKt6CF5ugP5UTs03aDigrma/tDSzSyt8es57NZomU9WoH8WRTv3bP/mhfmY6+X05/tjQKurt3tw1D6I1Et4O5a/IzInO0s5vqzoT+Q9f0sUgKkUegkPlHnwvWldFP+CTpdWp2+ncg9Ghz6xnhZGHYeftbAT0CG+id2CdK3pkX4hwSh6f+VpWuU9u/w12jA8CSJFDg5pkEABG3sSE9HDboHvYjJy56FjScda7u0bGki75rxmaRyvekkFywPRvgZe0lWBNjzyoTHYMEHlo2wFZOqj+qn5Zismv6a+aIxQgXxwjQsYlPxLsTHR1Z1HQkPxx1NLUH2FCu3zjudrQg0b4Oj96jLPXb4oziZjXPp+Ge3kZov9czRvZ7Yl1uWlj32t6RLfzq8/qonWzwfkYYBdnp1Vw8i6/QjKKJXwKc1AsDONT0HWiKXHsmiILEAFlXNLbYWFHKUmMMxbOAgg4ZnWAV+BPDQhX828v47fiFaYLjfjLa5HHbyP6ttc9Ch7Hal+HYAg2YcDwtOSrUd2AY/yG4pjcirCNXWUDcF5m+zKcuSFFlScKw8HKCTUyyl0OsMuJZS2hIfNobe0NKDvanxQbb32mzBA39rHoRsCKxvXII1TbGQFvZz1TkoyfD+Sj04Z2gh+Z6xvAr+/gkU3NOqD9i5ZmCkslO/229MPWsn+hy1zgGLXVLphTfM9PY2FvQRI5CRrRL3rj21fdGbxro4brRd88wa0StFCo9noKuqJFUWszIkXtxHm+A5zxfMhLOtGMMdFK4p3scskBj0fsCeGyIXMfG/PPqsKvI/d3owxLGXMd20TGvDR5aPy94M0DNeBLgn8xUm+YZLEFjIkpeV9gS+QO6ji2N9IrenOrBuKPmm32lL+Wuk6UPy5h5NZqtr7QXJKIQeJLJYQ/Jf0f1dujoAORODMjojMN78ouhC0R+iRQ5r2ewrhwnybzQjEw0FSmtE2nZcad3nHL4//6j0RHbWZn3GFyoPqJyTzCZLU93FxxVUGYbrWwfUFhDL7uMWgYSPBvVgsxhUtA9pLV73CIyW+UXAYMrbYOrA1voPtmPhf9UTsOvzDNdUVYVKmopPsbhfaUtoDqV8NdJ4OS//qnc6imN7CDJIuF/w1YmqZZ0iEBKaO7cYu4lXB39EdTgtaBVJbmLD1q+q/0YaJ/OJ42ZM4joMX8RyZrOJinke6+TUxd60SOG/kbJzO/nn68PRh1D6/A41qBolfkex3lccRChGoAvlYv/z+RpXAUtUWcjVAd2340cWB7dADIvboUTCnfyvle/NAnOBTZI/7cpE2zVIfk7+Z9ckzcDhNqZ+RzzAbSJ6fg/Vlr2lVb+5mfQ9pynyg5xL+e4vn18/piLigWg5xxDCcLrFEAB9aShF8G86tx2MLRy/4g5B07lD0Nr/kbmf4fHDV+hsnYctKj8bJ468GCfGRP+SH+Lv12PBaTJd5CvM2IC9HYJ7zyxJ4tQqqnQgyMdur/45c9eL9mF62BBUE5NjHHgryUOZnkSK3gsbfgU9rmYK8ru5XIkmWsgML+wXCRnEVGQ1d71kIsn/QW7UWgjKDqwyHsYp8/mvxVZFsQs1G9ESZPh1xfxU/UfyN7+t5a/99AmXoBeLr4QN4B9QHOYcuCJj7OrYBoEc0+Hy9GOTaCQKPVC8dwtQ1rc8A6KvA4njrtEeWIFa8IdsQF2Gbv7jsmMfpVyxHaNpO0+BfdoEkk82N0MQf7q8WTlJHdmfFklgy+W/mbVvCvbfEjlzGq4+AzSs78mSatwVf38Wfi5zIFYAkh/fkNWJNZwSH9U5E0E31IB7Yc1Hg+BT11wI4aWiGbeu91p4Df1Q8dNZKy96KxddvMJaCTYnrHsAKij7S2G6qKEocjXyApFQL7fpa+gxKFAHd9xCNNphcxygSObOTOhpbI4pdEj605SC0AKXsWL5PCyUZ3R32Izi3LP85hbAWIkHdCx8UvEwof1l0qbEHEFP0Rh7sx4K8N3vcs9pHLqUjVEr3VRmLFbCdBRSXMEECbwpdLKp+T6h0SoIivH6im8Gg92u0wnbvqrRX/G3JM5u7uoBCPqzAEMfpYJTQLbAIAd+zMwwQznKTPlg+gm4LQRUQWOFsnwcxwQdPIhcCQpbsDQzDLr1BbKEINSHy1Iw2R/iwyimgQE5h57kc/O9s3B4Wgfd3HaBQ1piAWEjoPzwWdzXriB5y5X9uaRH5sLFPGp80sBVGGHFuKh/Q9RGgcxoNbAQ10CjeEa0FehGvhjuw9hL12MjwqYwwQhf3YPg6uTh445nErdo6XVHRVKkTui3HW+d30GuLONeUmhU9pZ0eHz+Wh+4oIpuAf80rpH6RlXuyRmCondW15y5+f3SWxtM4tgC+T3H9pBPwJgz4GVdQK2C8lkR84HMw7HbGvzn2NEEjCOv02ybSTumdw4hAekq7d/TGQqEz6jPeVCKJxg75LMBnJG6S0RZkra/z9/L22JFE16rdVUIs3Fv3dSLanIsS4NHEySkcDEsgLxReaOSdCWoF10p3PpFnK03EKw7SvBEZRBLD3+KarTaMvr9F1xsR6EEAuTPhpqucK3EnfDDt3e4qOQ8X5xLJPdAFD3LZeqxZMF3zK3odNT8FMeSuTbE9vo5UMvoBKUhriSHw77a5U1xzy54eRmbSbu35PCKAVX64W4uddsm6MlQnX6vK5Bz2ErDoIkv7NItuGkSeGmfm0Z7ZoCM7BQ56kz2zfIxl6hID/82am7zoNfBi+ReH4GdX16EzFIxE+3pRbqoiWagg7RCnq0qkXj/Seu+iiSJTcegLreX9eeKr8t8/z7j/+vnXgNZJ6lHK6JBvlyeU4S0Sqqj3943Q0YSDhKCtTVBceDNdI6XG65GUdZtodIpWz4X/vr8X8KXDQIMatnCacXv1eHTXevsVBdwvv7INDq9VrHoLvLCssUg1m6RC1tfv4jQ9cIuk9BVUXk31dsCuYu8q+vFOah9u8JC74BkxUwkezJml+O3KNKCau3JDf575yJYFPL8Ekr0U7O0VSOB6GEJyTPhaCAkY6/D4NQwQsJl66mBss9Adsx+U6dmAaFsdEMm6A6Q1g6jh6LjlA0wY2AhzZHyw3mpKb/Lv4wm/juZoFGIDGFiuj183dKnfmQXyKzGUm4L6KsGPpuxoZAr8NNCn77JD0udA4kUj4jDuHnAu0ncXRMkcUImgf2iX7A3Si5sWWL3hPIW3hiQ7Lt8b+s1nre0l8q7xOAOpmbfu4jjrJg7lMO99bX/pxfg3IX/mgq9KaW8LrVMKBsROCcUEoijPJ7wl/A1qt+beMcgPXNYLWpOvGKVJuP/1N2fRVFtEZmoWQoIKQiDVWC9YnhpqEf4L5jZNkwNDyFHZPOUPUn4cj07IF2vmKhrewE7noJvq55ZmT1hSR6eVrT99H0tQCPxq0mzpTgbvKAzE5nmTiMHSPbpjnZ+05lQkXNq7v+owOmi1SDxLDl5+Kwg+uoCU7syjIhlfwluAMdG1DpnrmyqAxi0NO9IC8qI0BlOda/Q2SSpXWgKiM1B/xz4yA2/fMUGnUNcVbrH63qBQZrQ+s/zApnyT/bI5ihuGIw/PKXNczD3CidIT5xcCjRAonaujsfRvc70/a0TcKUCuFv5Zs2YF89MFa2jzPlXTfPSiT05Gywb7sqXUcdCpkNZvP8fQ7L0K/8ql9Hs0jrKiBHbd+/SzhuvLzSg6WTB1Lfd6GyNYuov2I8PhrWZnMuHaIAYbvh+gBegjfLDH2QDaaepYjb0Qoq2BErb4QZpwAr/jrjUYGIXhSoN1pPApQqXg7/IQrGn8zSctO2zYJFP4flQoXKqlX3r9u67pt5+pnsaT8v40d+lrYvVnPX7iz/e5E9rKlf/9W3QKjon8HsOgyqPSPMS4HQYE36WxyQI6m/AEjRcQiHKlva2198PpHjorF4ppKmCCii++tA/2vllsjFoys1Hjwa4V20Q//b0E89OPf3fn0NxMNJ0OCS449OWlMmHjUpbt/QriLmND0PETZLSGto3WosTZnDAemPhYdKpyP07A6rcbTz1IBjZ25Neih9ZSM1YDTC4ZHW836YuX6s5vHbtxdm1VVaLLgH4XY4mTdDkt1B5nYMoCi3dWIjsoHs6fPYw+RwwnPlC6UuSlTv5GMB5rHnrA/nC+WjjFo4NykweeYVcJbPD3NiAC3/96X4M5Jy+BuoOuR0WALXQkm6My7Sx2RiiehmZUtaK0IzVVmyenYz3vC+yaNrQZvzPHtUMJFs7nr18Ddavzu5WORJcOcc57H2povybSDFxnFDw2uyubJZd8q7Xv1e+U8W6g/oMLiNVmmyWBb7N8L321SrwJzXY9BqITiniU4R+zm9cJBx0cYPcG8jgHdEzvMQxsoge7MCDloJ+vjhueuh2ZOl9dyv+MlHVTRyAjK0loC0wMpgxFwlRK6uIiqNGe2jwu9rulJ62P6LDRKdjrkJ323EmzEfwSTsV7m8S0Wb3mQVWOo26HEAnEI7q9W5+Vhyd15sEYAyAo6aji9rWonhYQRmuGHAEVMwKZ8KmW/CkW/WSLbKbXRpNEoIc6sgacw2PjxfRObjAx9kt67wb/7oOCTR6vVEdajkeO8WC1x7RZhw/kX+T1xpxC5esdwBaYFjOsGLbpg6CBFMDcQqhEU6c/P5AJEwYYwKY8hHRtVcKx1zqSJjh4cA8Y/Vp7hJQgOmJbpRaqaEtVM2yp8Le0XtuQ4cL+eHIQ3YC9MMVJUPWvxAy9KX74CVWA1W8KJmLNTwmsXxIWoc0gIzUeXdskIgy8HXlJP8shOLhgQ+PyhB55kVAQ/en6CHVD+XtThCuO84EL9gk9R5kBKwU0ywkwTC6ydM9pGUoTDLeO211BzRmFmiMw14rhTXzPvWyll93Tlnrnm6c69kj5foMMsuBzGqjCfCD6jkOds05nuL3Y5YRBiABqCoAhS3ByxglmXo3EiDsTCrK0kR3Q+9S+Gv2WblmUYLYChRlh7q51HYsAKvse2m5imx4M5Qm/QmKKjKrzVUGvx1V14P4cs0nc2d94oX7TmifK7CYbDwwmYhE3d0/Pudtxcac/8cnxarlT16b8eHMULSwP+NWViKe0zCJ6Lx0PMSWqdieAiT1b6zya5C9THSmgVr0HM53raSTsMaItZDMX2uRLXak8YaSxU2CVzvU9Mr1JRS5P9KfoQKBNywTVp36tWkhMLpcc3oeRDNoHv+PbpO6cLOsZRlojdZuNfxu4xLGHaNI3pZNjLknlWTXOxJ/J81WFIA1+Ddw24fKRYGWRhe8/DdzPZEleQlN5l92kA1d7pQgmxE1u/33lyOLO+BWgWPGbhA9THCqT9A17U55x0SjGLdAuEGPfdVxzstmSQYQxvlFehC6yjRaMhf3XtTZfedmIcoR2a/2W1RD+op0OjglhZFsoZYPIHpKjM5Xvkaf4nOgCRDWiAi36eT61tSTOvV9aksuKWQsWLbLtAUOfg0OJF2L1aieKvZQMokTQeOyCFPF2nwAfsTGBZ/KH8LJmirdrGwHXDtjmt7jIdjqtO3jrIRk1kas3JfSseLgrjIrC6zpYSFFq4sR61UZUSmR8q3rnzfd8pN+waJp9oDqxvb7FpfqPzZOQcvpCPXYNt0HrjkVQnoz01Uxhg+SdYQOxYPrrq0vd2u4Pn3tFH8FetiXTMdm0yhZJZxp9o9MGL70PbNCiPN8BdC8/dBVe0omwCxBZLW9EdU+SIYo1+a4tqC29BtSieJ2Yq+g3g9x8IZmwKaLZcS8oF6rhfLOQTxq/LTag0IaFPfxVzqH/te/noT+olD9fizKyhyEEQ9Lczq+CAdo72d/SdNhA1IJMtgDX7wUPhmtpPJ002aV6lPwBEyt8KQP+VslLssBHr2r4I7CzaYxzFuHbKiKJaALQUGNwWdlRKjF0P2pV2Vh+nNCpTnsd6qkphZKV5Hjzb0eeaEudfpkDnhM7lD98Y7p/hc4hstgYF0SYBivH1YlRzLVCjqELPRrEtG/VAdRmSDtwkTe5tdD55WaFZvPrShUjUpIKGGOEBuGZ7U29Q+Dgupcy61Q43Olp2Rfxm1pshAtzylH4JeRmOoSaMlauNWnAcxkSo3iisktxiuIE4kZyaB7meRUQzF0IvM7m5TBjQUsNGiUqpYMvv8j1jeRu5RHuibw/xY5Ucp7HFQG9vg/58mcniRjUICkZpC6SL2DTQbanlDzBuEnGq/FzIjKJjh3L7reMZCkHGIVlobvzlmSCNnv59EN+8zdHmD/hsSOefO/ky/laWGGUGC9cExUMfbCPkadSzAWTlOHa+5VOEK/db0PYtjcvITbfgdp3S6WiafYjrIsnKOfl+j1iUMJNk82Vc3V0Ms6naYBVzPZ+3BXsmqNnO9ZRqcLMlDf4vhb9KXVpylvz6887nBHSXX9tPi9ir+wcIwZ9nBBKkPWa0mQo7yNEgPaXBd3rOel/3Vyi00pju30AyLVdiep2MULCs2Fykyje6vWrDCQitGzqNOi70JFC5tqOuCxeqRhJPMFvjkgwNtabU5Ddw+oANzLPuHB0QKHQuSZG9iNW+d88HpNjX/0HUIQBSlY/g2W4Xa/f+JnqwPaIO32tUNY45rEufQCsiU/n+aIt635gCpgOuWSVfPu/Htk8ToO6IBIrl1O54BrbqjrTixKGrb/Web7bLUtFyPdUnDAaf/MCsapQcknrj8W5Yo+ClB4JY/iL0nlr10GrNRtiuBbDbPRgX5L3ezrD3IGq3GZHc5ncoKFczQVN6Br6TFTEMChk/oZIwFW2ddCjpFphkX9ZVGXik1dBZ4Uvr66hjKYIRnq3mzYtvkw/z6lgkQewbLlotlnZxfFZRo41vRD016SCwHQ2RbwoWFzzAm6ovvNIcOhhrXo4F9UrP+0tws5MNmPybQgXVbiDlRB0YSKf5rWC4iwSaYTcUJcz/jpZDb1mZqg7qNHnb0i6WbYI+flJBliSFgpfsa0zVCgd+r7dWLW5eRYTkXBw+jH4LrhIYx+MPEQH2ZL2xMbgh0dzwfksW1u7gptyBdZLPxABclHAVG5aR8UjvEfyNONl/85fC5uyXwxB57P9IUHDfWio/FhXlf4N2xkA8cvNu44a9ej5WS9EIycIRQxUPr2skgvUBUFe1T136thZXyehREGTi4GqZf3Ua+KAM3lfPpRKiOOZsqiLfQASclcaPHD6sFkw+ToSPs5lRxazaLuBSBffKYNL79I36wn4wBD+UAGFWkesfLu6rd6P2iDQvkdNoKUVlKnWkM0HzCzdzq/k8tXY1RLke1Jl+u26MbRyW3whZnNSRzkGS944KA/v3ngv50dIv3eUa1Hhuy6M+J5oow0mRJ+hbY6ay8uiSE1HimrmXMyTKuGvY2aQnmuSagYdz3TTzqMQNMtussTuNGFIywH6VAKOmUmfAb+hH3nmiAL3im1DUjpfIAN9hf1np+/nHSb3/k22kZ+nje7pkMoxxnDlYHNjVSwXitIfiGyYDJFfRTyvMdg5M+t36vWzTOj2nGfZfJNJhjZqgKix6CTObiYQ86V+OcZKPiT7x1pgW8m1YLzPiqOl5cAgyb3ONxASIc9gqN7ma3sQdbnIYNq/cGxW2lva6IBCbhYFR8bd5f36DdcEl9MKc4zhnYa60sCWMiiI78msM+XUKnpApBU1H15qPo10fxvZZLvaiEyw7liYbJcuze+NevwUyzNsYA7iFa1oerWFFh3VWOVFG4PfgbKZj9P6HNDriH+BXQXyG3/DAK/vatWkO0DR2kTCUvVlT+K46WpfGr3whevKkxK+iARxPeGjGIVw3NMwEXRIm9smznPV0b9oBOnQtG4i6jRDhiJbPN4dmIQAWu8FxdbnCBOgF+rgxDv4jm0egF6xCN6d6VHeUq3ourw7mKNCbC7bmGPdvW6QweMJXPMexnmHjUmbkYEhjbb14dwHn/b5DsDeT8jo0FuHvGJ5pNMXsTTmp4qH++ot1DOZBxORRAYXSF/AnXxC3/ia5b3YwjdwjmUE7Q/EE830ztmFAFskK1O0qU7eV/poEw3eAUS1viTbfiAc/gIGa2miJAo9122yuHV5yMToDbs4UYbhKHafgXK1JGXtrFJrb+/SbZ6kFAQgSq9CdY2IsxwuXIWSGfthhDjIo3w22lxyheZwmx7TvQYHBUUvo8OKAMj1POY4zscHvCgY8RIiUfvVHAVX+VqvBpXTXFwXPk1QHEnFh7qjXh32xSe2Mi7GxO3bJz78rt347EsLkqIU+lrl7IqPDzUpAGw/xBKgQ00v/aTC1WyPsLR7fUQlHM46FCw1B0Bpt86LoNyElfNdVXTpkvYDb5CywSH0a2ifB8emna1rPUIso0HTC74VkXDqHTSksGkWTmc/QDnwstsIjMZziUkhin5dzQCl3zhJLPSaC3mb6l7jERmwQJK1Auu5e9p5y50m9AQtDOTjlv6mNv++Sovdw8TE5VHO2FacFjDCxRcYbBxiRvQ5woUWiJtPrA1470h78TdkZgpGq0bpA4BYdCV/dyTIpGUeDDPQYhGlE0FErObvyV7TuVTmBDifsvgeRlkvxgN5Bw5Yd+agL4VwHXiR21CTb6kbaBeQJwSG9YhT3i5bJFJjZx1NBEqZnXkAcMkN6g6JPPA58KCkGj0vats5AVo8YiPWlYx4cGWgAm58U9087uv34V+BVZTe3XRzi1+/2PrhffJxtyLP5eMVpvX9N8BRyFUwHp2SXs8ZWP79Ai9uoJr0oNDiga9CbGoKu2YgAGPOXeY5jgn6WnVp4Cr8qgvLZKuuFs7KsuLXUczIRZtEuJsVQ3UZYW+SxIsfmAAyGIYcGHvNhjR2Cwh29WgvfpC5a96P+81ylyr0/cHoBgnlYUO7i/jad/zxDfbuaKbzRh4UpWouNG14yIO31awK/pqUPvEBtzA0Re7sOEN2+kFRSFjna40SYMfYLIKqhxrtj8tZiCCouTUplGtb0kcNgzjycBrdBxhcEYWctI1PYnyEsiG0X4H9+gLI5UqdD57po2MQcfxYkCMGkXyWp/GeDqXFBadhfYEB2MnwFY6nF2L7a+EI/PK8hBq//S+Ce0E5MrSVl9p99WZACM+qgWykjx3fdspf31WXX8yxjHj8+ZvO/gsT/NLYpo5+aIXCGADVrgG/sMRdC4u4JtOZ0xsckTetX6Re1AlQ52btu7NRjnwxy7uArBJ+GGc7YTKeUsow6koSi7wZSWuhWCBgxxr63zAMFnEJ/O1trap4wUMzzynWX+0Dd0oXYDB4ByTCtNfMRSkC7MCSq1C+P0lTtkGujzdEOXJxHmwvWdYVqt8GvHtEJm/Sgr+ds/7qG/TkNLSaK+TlKtP2F69G540OwBBOShGvg1o5C7m253tP+2CUnQ6gDL2oJ895qu90tWPPWO0gynmzn74vHlr2/EDve7sE/X00bMS65jtNNwrlDhB3pBHPr5rsj+25us0Q8ebXrUkMcwcPpZXr4ko1NplJC/t/J8/79H2fZDZQYGqfAmr1ooeCT18KYpc8hurrna57a+Uge7K+PxN4b7kqFCVj6BZbME/GMR2YRdcCI1CIWEzoriTpaUZBYIEMpIJEtEucmMeTqrqLIpaZNJyUfqBteFOU4U8FiEixOagLWmPppuW1QtY0835Mcz2hyMPKTzZbFWmBjlqz+LQn1BqlK1DjbfVvAhFft6e4UuK8rWSkX4O2faG1X0Q6T3D7DNA7bv7icitBA0Pnn3HMWfNvL++ZxjESoePnccIEmvE5w9i1maR4XXJFgmpU3rlIk61kEfbjlNmtYCLw5PbY90N7zHEIDtcssAp6VV5Aph152A1SUZR0JjLit/HZq8qy0Fbcsb8r2q7aZBUhNJE2orEts3OzYftwsRZEX0pitUC/qc+50R5OhYWoZuIfFd4a3Pr039ew5vg9dq/dQhTmgCmgS8hcPogj3WPmIAUfLnf/AOumWJrc5LUuIwJ4BXa8ch7CGXPKZljGQjFHcCCg+dC+jwXERjnKtJc7CdS6YM540w581F3cTgu1maS1G1Zg6LaBvriCJaXokd6yN3V+fWGMwNj04XXAqlC7qeLvthiLDzAd/ssUfFrDsKXyKpHPIdoOmBlnugV5zvXxMsPo5qlmRA7HdDIvcZVC7VPrak4yCjA+4irHfRz0QyhKcS2gxZVHSxoER3g+zcwfExiTgEdIF1nHAPkaoJx/wLWJ/YzhPs0TAxBsdRUUPqPemXyMXi+5r/xaxq5XPIyo6WQ6E0m+yWvTWc7yXiAL0Cc7MyYzfs8mNE3JO7v0HE99PxPaMvB5nwl5g7qL0kfkW0j2KL+Uat2zQru3FL7K3qJfCaHwZt8bXZ7M+Zra8aLSLZ3mtRP5JHlFVUmUuwnXzMNu+l0lb2A7qWjPbLy3wQ6wlffYhOYsgNHDeUMjE8qWAMIVtIjgAJyp40GBumVtDiokTgEtQ/eNaP6Gr34x0M1Ca9Y3syJGJ9ahu1NzxVWwZ9RnrhTR78IZUjVLW8NEJ0agvuVah14JtgjFWw3B624DjdaDucIS26QizztYhygfT1Sm4ujXd8BB1Ps9Qoj5MMl2IocvdCnp6p3Uu0zec8qOKH4mTdj7qLlnHmniJ7gi6KR5gDV18xuLTsbbLzeW9ZnH92HctQNYWtABeCOdw2+byJDHKYNscwaP0NygDGbsZJZB6jVQsEbbBYVa2Hp4DpBPrB7eV8pvnoEzBnkGEyzjILTQFicwUl4J0ElDuJW0nyeCl2SXfZqIeecEV3D7m4Vk6zEKfiRF10WhAa1I4jX77Ombw/JyGtbBLrmGod6SYqJN39KtgoB5v3V2CA2XTK12jA31vS8tkFX69SHWllJHP20Y+7evqOL/dqZKFGcjEJWlgJc25JxH/iQcn4ZTR+MzNSyLYwNgSBZU9L5ywar0khK/aIPq+EseCc89thr1lSww/kYlDMmBzerx257lsI8yVRxQFDrAgBf5NTBS63huWELtnwROGG7VsaF6Z/f28Ww4QqyUtdCzeD8Wmc435yM8XNCvSK3pI+8+Hhflgo0YYHvRi3F/+4cFz4F4Z5+vrlHsAH6Uhm2xiBFf0vFVrGqYsi7W0rf/ludsP+H8I6yaiL5LCqyWfTnChlTdUbT+mH+FF/TbGruARbkV8wklIZdJBJ+GO2xvx24jkDPKHEqOtIrjDiJtHCxvISFvbpFF4b0Z7q0TiHrVMOh7FEhVaAX4labC2mgCrz/Hwt8KxNfCYL+bQB7e8dl31dGdLpHit53uMoYwfe5wloinylpxuSqUS/SmIQna1oHyTUV+o5GJhb3f1YfWjjtH3v4W14iMAJZ/OsorbwOcKHx/xPPyuRtn+9xzVfI0sFQ4u8TTkRK9aLQVVnUaouHSFQpvA2z9oo19tH7cndjn3zXZ7ndNmTPih0Mm0cMhvQySN6ay0+rFXYTUxFjSQyHOOM6oqe08GluPw6YBw3WqVViEkYoHKJ8S7aJQvb6o3DiG9NsvrL908C0WjYFPG4WgXXan3Ud83H/+P4+POsA2ddzBTlfFF0Gol8ub/EJ+uKWRj/Xq2ggh1wJk/VjB25bssfe/TWOV8q0ohq3sbFGJrRdwN5pirUlAePAKUneRQab7VefwAMhsuOrs3gFOyMpyTkHLoKKt1PCXNQF4hQmGiWcrodBCHDmGUmTL7ziPlw2W/x5frcXXcDBYOJ4pwSFUmicplmtWQ79PJHfi6eQkpteRURaLnt2+bkd22sen6bGT7+lOLiWHXuZ5lkn97yzBmkUm1F/7hszD6oQtLjDduwwpjbkIqNmPzLSYgJtwG2VhEGkY6TucjJpSkv6uP956vlpNr7yWK+oHfCiBufAH/7dQUzUezymBEgrwizQaqvu8E42qZutnHByerVHuN8iKORTOBGH0fszEm1dRfQEKN93dcOoAl20VK/XLKh2z8HDZjnqM8Lxi7d/u7eO3Gpk5RJBeuz4l/PGvgN5m4eV5PWp809Aha2NWLBGnHNcbNI08E3dZ+xWVBXajOKvtZatMzdb9BfN4gtYnMFNA2D9I6zAZugW46/8CBdqrcmVkrxnepS4IpxHIc9YS9tTXw1lFaPh7b4YG21M9Va2XGPQqoC3Lgi3qI9gYQKyo7uuNqQq0ZMG1/RpGCto1rMLMnDqGQTun+W6u7t7EdZIcMtR4lWPBtlOi/M0pbvMg3clVGdaVjQIty8GGY0HQJ5UCQhSof/tO5SpjuEqXApW1RJqOA8g8aKOY/2C2+gYRs6xBUscwyl8M02tja5ToO/d6ejFROZVLcPexmYTxXbvYiCc+pF1U2IgqJ7cOt0xmRitGBcujQfFtU/3tzEbr5+W51/AZ3l3RrYboQuskFByHA/blR/DgyWkCGkiCOvuV/QidMMxOCqsfqfSyV9D5vOhOJ/7cqx879/19Ow5pPo7FyHLuuZ86JqSSSsxvfRGzu48Ii8zhRKFBm8mkr6MNfV+BtG39WW6cedZUKSbmmOoPUOnTU3ModcLIZQXBHyEr2t+CEj0BvRRCS/M0hJQOHRQwSxm6tPrmY7E4eYBLhf1bl800j8Cxx4tDgFGGH61ZNct1cd94cVXWvAqNxMnEG66FyKJ29J6LYRAo6ijSb7MLEIYxvm4Dh/H2QahPfZkN/zq3wkMQogmPx9AlLx1j5L+N7iaA3kCijg+Aeue3QL18lSAfI5kKl95lxByERpn9uLzTX2Gi+27gL3DhQFx6dmdXwC2x2M+MPZ2uQL6rTOO3jR4CfEjYKZxr/7ez72UD2zTCSMXoH3L1Le2qPiaRhAmL6F/Olj0lKJR5aJ2gfe92mlY0DsA85w+ybrLpcwJWmmSN+GpXh+io5jnUO/Tn/rvdyIMzAXUTmD81SZhvzB1sTRmiCKb8ZKcPG6JW7sDrUFfUYIVfGOFg35J5IuLtC0iOQ07I07MLtCpbFDJeM36bRmupK6HeqY6mqHG0/JYrF6v6jUWNpFRtJMutLv1WX181+7czfQZSo76auQCdBw/sz4eQPyn6j7d/vUKrT6LBponrLl3Vi14Ce+fZt6xbmsuHNwBbhitqVXd+fiUIiYJ+Tcg2GJzpjQGvMnJsv5/TDpPs397ufVFtblCsvh2TYaJZOJGh3yjJ1wE4/AeRw8dQnDACuhP6oTqg4F15LKZf8XzHEQTofk9OIUXsDP+3y+61ABjE6C0SRP1YdY9BIg5QCbof9Fs1tfTertzFLzcSmbBb4h4YkNBvk0XgXwzcL4r3HJnKZcc4hLYwWX9X5ZnhuaoMvjIS1CDkhtfpxOOfr+kzc9G5pPDmOEr6UAicbn6h28ySlVI9uR7AEXAiMfzPFtgPTmJeD/gNoIZM0BNKKHKiQ+UFsTOoHgivy08hKfY3otbuOPjUwvBJc2lPQ6IxiTh45G44/q51AFf14lgQV9z6Xo0+03KW+EGBSdntHsujQmOjM0inwUUeRT2oIxWFWZk2NSTP11djU69A274f6ucLYNXfWvbnd5kHnR8vw0qno+PwX5kA4B6YBs44TZQxG3MM6CvAmnHgC0QQYODcQG2FJqNqU/vNi/WlvE73ibDunf6uzZ8+Mdb5KK7B2TG+TA+8FWpsnr9mWymrZFICpL5xIO70hHD+1b/HAQxN3YcNgSW4puVPlNq0bbjng+SO8WuhHAn/Rb0+rv57fogFW6sZaJmIbxWRefi21kf/PZJMPRBMeKTe07fHN5lDOhHprbbn3QCPAu9d/DNPHyzQ0pN4u1XP0pvLv5H5MAJBsE+T3tJR/+9ZvinBU9+MWADPQb+PTVNB0LS7JAfVtXT+hTztfL7+ixrDC3lbulk1OqRisUnUTP+cDkni46V2Qw1JgVujVdKz39X5sNyYFE67pdF0fvu/Pd7sD50kMjubP2tiy7eo+BtcJQN6nWjXOrmYRKwFlhVqGTeP51sd3myIK3aKJkYRHGIqmNsU70v9YT32HEqWBGsof5uH68c/2nLTkQTS1OU+hdsxMHoqGA6AK5lQGTUPEI2wDd8NzNkEHOQ7+plSVuqX8vb0ogIwjpE9zm8avagPMrU7/n1/JxgYXuFLbVr9/odwK4UBq8+jUe0ib5nApHnSqlMZLNvkDkpL2aRwjwtHsIZClAGWYj2U/U3Io7YVcJvQf/4j0bVdmPAqHUaajBxh16zYiZp82H534YvnexmPkiRR4TZhnQctyNvzyYs3Lb8y6wxXtd+00ULl08TFYAQCNI5kSSp493GhZQ/FvlZxp1olM8R2TzlBrg81OfcR5DjgxyL91vkyNWRVjMvwwOL6gvnQ05ua3sJhlzlPjPNH+Bgr6h1DW6gcycVOuNLyTFDUMuqvGGky6+MYd14B1xYJEZ/jW/B+mcafUq5Hp0+2wiCPtvrMIFRSe3DYG6zXhnmu6p0E7PVpVshKUyVRpMIYfT49UBeY6D1R9aQdhvcyxOJ4jUwheMFCxxyUgQTQ8ff8wJ9We7q2n2GaMl0d8VYn69Mqy2upYsYjaQciIKXrFXN7TVJ66hSZdDVuxz9PnsTa0u4TXHPHbyzuGKZQ92fnXw5QyJehvsqH+C9H2fjdUtE85v4U57mLYpX5wPeEcBwWvvHI15dCW3QiwmhovAgvXKzvaU4OZeyVirX98UCQlmTt2fhxU/YAyXew/TYocPxbPZ/CUVYlx7hc3WuUAtXYWvZOkeUmbvfQYLdCtrh6WKxUn4ULklysMG1K+Bum28EVx4aFMk5pjTE/53iY/JEdsszUhTshNeP73e3Kj6kmdwwTh2rUM4wwB3v9Nl9lnodvS7BxMGSksqwP6QGfgxf1Q/9gl+npqBd8/lgpIwFOzcu2UCB74NrJmznLv+j135+knTNQY2eaX67nIeW1io11S6gbb+J5Ug/kz4OAJDIJEUSZj2GrSy7DlJMD2ELugYDPoT6U0bt6BE4eYG301DabSVt7/geWqrSe3mLPRF/g5eGXRqY1T+FQjJfhdy6U7/m5M6ddabFI2qP8e2RCBBpVqRPtOXb+gGAQBgUFalGL527rFpjmyaMfJcQO+lRVUQu3BJsex7uEpVYo3p2H0P5Wm/PDhiUHD06Tp39Sbv6nvevqchMJ1r9mz7n3YeaQwyMIhBKSkEDpxYccRA4i/PrbDdKsPZLX9u5od73XYz9ICBqo+rrqq0DjWmdVmQrYbnKemYrVzSM2SeGMB/yKEHXcp8RmZYx9a7Kp4KMDRAlmwr64wM8RUzCsvI+2gDlCByhhUVYtXDjdiz7qXXC+OmM4rlBRrkpIoSFl3aHtfZqh0KEHK47s0DOI5+gR0j8ptlulgByrwDkEon2qFlSdzxRd09ZiuZ7XaJ/dEYDtBGaM204rNNYm1B6dngJBoi7zrH+FjXasR+KgyxY+WKzZAbC+EeZSl8OKkbhDdTG0XVuouzkCEJnt0PqEGr00xvq4l/Qo0eX5ua6a9CLSGHIZA0cXmIRMJpEV1G0F4t2ilB3ga7ZnoSj0ya7hcPhgVq5lcj3v24y4ayQCLCvHj9y9z26JOepC7ggLN22wSeqRhR6ck5zJK1yhG/UwL5ztsRZSZzIWYr0mUolbSK5GDHRxaoFYVnQVGc0UBKvd3AF2eckbB+60WIp98Io22ig/GetLJxGnsLFGM9peaygzIWBTVtLOs3RsKFyuTM6rIVEme0CUynRRHfXpglw6IMSb7sLZZlMc4+5SjlyqQIrRTFNISj4FgBEknQ54fpphfZKvGHXc/jAagm0hOQAyMOLkaN4vpgPDCpjev2Rb/lKet84YhAV4EiI2JiCtS87zYKHr+9jfkZZ4aBdqx9kbmmd3vbfnilFPK4RmfnGmNO/NLQ5wp570kMwaHRvbfJ57K9Gnw3mZXYJdJXMXJ2uDabZh4JrFXgu84TKYrat1S1oDhwBeo4A+l6NmRpzkEXyVDdFUB65fyWKCVHzgMki5yHaT+OKcO5ZVt26OoUd3E4UVjyGixkJO6i+3Vpwth3wj5yRreOeKnekUZYI4g10RuOABFC8wlw3Lfj1KZdYUeaPBhoXzJZXsWN4B0388GKqqRnhwcFTf3+MpPSdmhD9MzwnXQieh4GpCgPloRhq5qhCc5S5r6hjKobGlO38GF3ceoyRwtwf7KJe6603EqkER22bP9iU6g2BUO02R9aBxn5u1GiccNjOrXMWQJRhbmP0/O6h7TPBl56ijIly0Szm24MCTLvBSXdu5l03EYfKWoC3NQxCqiEvqoK4Ke5H0jJ2vl3BeSkHiuWLKLkeAnh9YLDkUPLqGz8cAhdkYkIDowWJxzwxhbb4CxAsztyQ92ZSXZkPRiVKw6hBXlxy5VhVObxG0Ro62CgtjPtK58dQJRilNWzvEzn0ervMA068FMW3nfN/0v5+5S76JqFRTNxG51pFjXBLMZtMPuwaCBZd6DrvdkjzbEmmnu7zEV6TAHoXjpTcj4Srtx+kfHgOwG0tIhgqYzx2E8QoSlj5vwsoqpSYpTiy4nguLoz5qifUmiFPcAufdhZZITsC4s3Gp0lSH5WV1posMnGXTXM6GLXK0uQe3so1VdcuaaLIKTGw21szeYU2BIVmDmUXMZvbWURFv1MyDw0kJedlZ0atjmVVS6G3Fg7vdiq1t79F0fijC8RwRjWVaLROvwapop0Rh1vG6NsSBgHs37ZmLje3MtGAqRo3CyRjfrFNRwQuY95aJCtDynWIQqb4IOktDpsTcsVJEiTxCZ1UukAIKPuSpmFIpYhuv7t/u7o6hXEfQBYxX1IyddTNaDztAlwqPlZNF6511o8h4KaiXGr4s14c9lQNf1s2NfLVBMCpY1+wyqa1J6q3nRT8J+ISnZRsQhKlvLNSstWKE2rNn37Ux2JIvsYAUySAOMzbboBbaJptuGNX2/ON+a7rqljGUhjUjvYoqZHCny7S3o4FMJ+czd4nZdm96M1Ovdow8JyZko2e0ygnygXUmOpLkvBzIgP6bEY8HmCewNXwhgtf2Y83Afg3GyWJZY3Y8FtkKHGuTfkAcNUwqg440aZw9+F56kTuMzMa+XEsu0RIivTs0pBVfsnNzfMtbCbMg5PhAbKZ0wy5z0SoVEtOwZj5fXrYzEOFnqrVTiWaVw0Rj5zPbnOGnk4luRFVco+QktdaVspyIw/SR1BHNGZwiza1lfijJiyZo/lmqFUuA+QZR3WMImH5URuhY2O0BnXHHMKsWZJ6zaVgnPFaWxjY+NcR5U76MADuWlgIRh+XwDnMcVaf4iHUrIdt3NXKq8pY5JFtJW6r7ZKEG3IoODc8qg2RCyR011/nMniZX7jGWm6Q3vPMZkq0DOd3PtDTXrVKORtUYevA1xlyOCRbiF5ESbHO0nuCVBXS73ZqkOkq1DlylCK+yQXx9uMpkZEK3IRvRBhMWsGpjuzSqM+sTmL2svlHM45qz0crwqRbflYtL1k45sW+vDouxZLYIBS4FO8N+/yz058OoIkeAeTg6usulpKc5ERP6+WyWFWxq0QXOUsmoCsYufdLGSwVD8hLIQZnWSdRNZLo4Z65EL/PxitkNFohzD31+VtrUp2DDWtKxEnYwNJi1Aj21u4ieR95O05bN9LwWGKLEJ4xS+8b8qNvnOWf2dmdRVdZsOlQBlIXrwSqAfGm9/rUWhLVIc5uv0MUWznDFcZkiGx3jEIjgvIdiyQ8nXjhvcYWgSrzwLwc0WzGZvDkd+xqROxP7uoIWdp1rFaOgaFe7rWWJPGWnvq5faEsKRP4Cc4DlMUSRXI5Py8nF9rfzq0cYAs5ZYMbmeoty9TS/XFxlfRQ2vGAb8WbTtRvqiE2xVDvClAd37I+CL0/lxh4XoAruHMb7RYKvg50M2OD4uD92qlrGzSLOm3RxQnalKZqbIZ4KuIznaw62RNhrdzdJ65OQkLR6audjhtt560tZrkPntEjx1kbTrRouhwOPE5LWuVEbbem5sQ6QLmxBfCg6GId6UiCXq8lmYbGrfIYdNlmKe8144DsyBzNf0wrwbma+VutIQ9CF1U5oTqsLNNrHA4aQCZAMQBbjDcDvuN0aGOJ9mK7dlGMikuXTeOiPUkZRjUgSnYUbdsaqWClCBidqw3XWElp1Sl5LHHcxYJ/nKrCx414Tl4GIn/XO2TckjrND5rfR2arban6WG40SOFZYLqNkjlAHpE6pGToyy23Srtjlfj8HrsowzsdIsWKqrJH57CCV02K36bUocDmE6tStJxiVJLM+IbdbhbOuVVlJIETc58L0XDK8cLHC0WTDO6yxU6Tjzhx8gnBwC4Vw67AzZSKyVyq1ADTZGq8O4kwZyfVe785W6Wld1a0FRVKUwUnNGSmYaJy4TcomW18ihzWDM9BDtkY44rQHHGAlyNkirYEAl0Kd33wQNy85aN9dcFOLk2SvabE8mUcSVXF0E+2xyNtISCS4LEHmxy4olpe8QwSAQ2lLyV6/bED/N9q7PVPcLIL8M7/D7nendu3X8tqWUuaobIE0JsF4jq09dVZsjP06Qbplp+95qRTc+TWuEGvADzKRvvRvgIcl5/xckludmMs6XKWAh/EUNj6WhwTy+FGUGPou5OONe3KZFbamcC/2AFdd0Z6heDOh94zC3qOIPTcZiZla5tQMQ087tdQ3s0ndv0xSYAlsBXRJ6iO2X+Es6/vHLei3zR3igIiwc7cOiPSgTgsSF4izuhlq25Anwmsu58EKRep1N4pcp+ds2UXTZzXXTFWgyoo4qmzcqPHcA4YSMi8ptYl9dA6PB5SVD9Hcz1KZMUAk4LSQvfWNi9DAZqHhh4XJK71j6StFHN+pnYChwPPNtSzptLXA6xPPp/eXUWooYbzSajmo9obnK9OE6EWrzGHALXL74jCeODBbmpNAI/h5WTr4aA6Qu+JncYILB79wJid0PtnMTGI0ZEY3Izh/14Lhxig4a7cVA01xtcvEaOBxm3h14MogutDTaxW0T9lM+4ztScmbMxALFuarPeTJy2kWnJVhFUXUFLyKDB3H953xSXeR1RVTEpQpL+1ORxVhy4yfCtoGBquRJrpHh90rRGwRftziSXzLrnLrAsZonGIvM4FgLov5cTlTUE0yJyeBFE17CuKcUSy1hmYWvhugvrTiBnwwMDEL7OKJqqYQZOXFVKpFFJ6RjS1GM1TmBVjBA3T0jHAaKxba4J2mXDCFuYdZStrn8yzfZkVAB4WHuIu1qND0RgWWVFaJbprONulpQCNfG316cRQt0xNJLckTr6tyo58bxTfAzN0Q2wlHU4uu8g+bPVpvmkGkgguXRAIfUsA47W1y6SjKb2snh3engLtrsO0Zbc8SAE0FDYB1q7rpPAKTs24HkLxgkU6GiRFiGq1D34h3HOzhaIzE0fP9bNsAq309H+fU8Dp5xKDqLKFDstxtPMSuJ1l9tIEjw9rEQ5HzMpB9b2UryNX/14BV8NwYrzLnoOLBpGvnJ9M0qBW6CMq9IDQ1sprojh8iyqUgTGUw2VPBhbyBy0vytEdo1Sp3x+yiC6EyXeNzzD8YLUzqBgTlUlMLH3G7ASjccGsruoUli46pR1nEFtOEGUN+zhdiHLjLNMIFEfgXG/WbzRDdukuuR8vRWW88N6fMQ4VShux0PEkQq/kmPs0AeRCmgjhN+cG+KxK8RJ7fzw77pbW2Cy7Lm2UoCTlyNpcbu+M9sliml9ZqbUpHXUNKj7vptfuAk9yFCyy2PFUuS9TZ7sgqz5az4FLVExnIExDjFZUafpozdojQ80PDDPO2rxkInDxKdMCcAJffaYeSnVpbY+4EvErI46poJhPJJpxIIwSZWuCTa970zPnQz9cxbE3Dd9Z6IrVZ0voqYHjH2Xq0pSaWgq+13Nq7g1QS3uzPdp5njHqAxQFanGbZZbmJnYXeTmvbnghHY7lNDqvEkU7W7LKbB814OJqbIDDhxHnBCT3VGFuAgCMLcElQiePKGSnSau8qm3hz5d7DQSLvDBrsF9I75WltROMSmj89w6GXkfkk4D2aoPsXRCUMOO1ucTitYTbSrYcTHyEVq0fHJqvZ0gFOKFwIhoHUw5PJjXQqnK0AazP8VftrvreGqrwP1DVsPZQNewqbZDIrcZutvRRjr54fmHX/OAbMwi48mUoFuILLMD3k6aDPuXZdTyFPKl0LSSks0ph3mqRW7BVXeHVEmocNimNv85iT+nksqIwfHnLCicestDfP7LGAzm0kHbx6hXUZPOkakEmX7vzoaqm4I18DDEo81daXnDqJ440FdCHjI1M7L6AA4f3WkXVsY5hBerONsi5Ai7rnZk7Lk8Ze7PJtkyXbyil4GhEEVFytmD1RzU7CODqIq+KaAQBBllJxPDdqQvUgCVicq2neVEgVt2nkuFNrUiYCiGBLgQRh8VhUrvqAFQeFK6jTOZwZ2zabERk0rJsY9oi6G2CtMMyPeWNn0rtsI06Zq9+QawFKtZg3y9OGplqqHhUlMMq+JvGRMulrRwHMkWxa+BbCHX/LVskc0yOXCUn9cM76emB1PvWL342CTqxWrSZVmLthTpGWLYxFcp2XM3cBzbHIS8AbBmkcxmQzNpn8KIsymO9AKPTBOtJ7G9X7XC3g3Se3P0C098udZOLLjNLmArAdJVczzSIvAB/JU9hAsWx1hRiTzvSaU6+XNWwOEnx6HmOIBcwVu3Ndw2xYsXJ3YKILQc5Ajqya/lJEzKvu6jWUiT8OLrmPxQfc2Xnjjp5aar/KFZ/rvHNJ4r6AmIEpDLzwcByvwwDPnfr7w40D77Tp6hJtgC1hAIPhFMNeCCdDylzi2LcGYKFmZfXV7/PuQukndLIuU0VR2SqvCZjzb6VjLa4YDJGsc9ghhOzKJCQti9t8HCcqnI+cNl4cMgkWYXaTysjkSQyfpzkJU8AnunhWY8e4mqTkSumAFdldrcgKdvGCOFLgHSOjfFhP17LDBD6CVk8cQCj9PTGxtBJFcCRAzjE5vZ5VVCRYHebGoYKA8JMt6iq5LKgNFeYzJ3JLRhrh2DEIBMhLVtsMWhDKvGYopsIU8lgApMaBkVG5z2CvjjbBZCBq2GxJtNT4YnuqtYSVDG7IaIrjM+xo4nTucHAWkNNYvrhDL1GighCymh1HQ3dgtoMNaUVAdPN0h2Xa6tpZIU6G45nlWovVA8KmSQv4EawNTHjgO8SmPY8FOVonaCGlt0q4str0MppyQl8bPkqER+vw/X/jmOGIswBQBUboGubU9zPCZ+ISffCYwrbnLPxIxA5lRp20XbrPva1CEBC7S2FKhxFxPI9gAMNfJStz8CJHI2DdsjQugYEerUYuwzrYzIDv3By7Ys4M8+laOQLjjwJds/14fZpvHX5+JK5DCTxE5dufFBdEULx9dVfHXh63P72vE93++PTLY8/P2VfsF5pw2angMXLRimdviNNnG40U8/PMdd3fcKH/Dyw/kDqS6rkdl3ALhnXi8eIsQmemNWm1EM1JEqIv6LDfxc5Luxn26zfh4m/4KGokO4nsEi6iilx/JdjrIe31O0oN32sfBIHXERBk2ObZvutdT49TrwgzbNaLYZP7Nnq/ZGN/TliXakZ2GN4uof+MIb41HJN5pNjyqjVZzPbJp4thV5fTCyze9Lehh5V9HbvyLXvYWpRteN0KbhJcDu+VERhfQMHHosyTsz1KwiQHW+IkBnvygAWG7zbpoe/G4KsJ5GmD7TwUmW/qIXf9IfItC56Grz2/tLepbsJz1jnAIQ6dcGzZ8BYQOHwSl2M98kMow7GfAzqBjBLLfqLayJtKrmojMeRebdgDtRHIs3SG3+nsTl2Fp6fwox/prv2l3t6Lv0zSz7YudMMO10nhl34CfzVAnJREYIcQ/sDr5tntVXJT8m8Y7vR/YJf+ZFyR2mZ51Zd+++L4DVQif70ewSvLFMiFG3ICphVjr76ZxI4PlJ2/mgl8PMzS+7Xl4XaYVtfzPKmLFz22XqrYB9db6OGLbsLrLF7gTi+FDc0iBpvixlGS2y8oxrymsfsOl+CSRQr+ew+oRWX61gCpuEjCZ6KKQulX8gtc4cw9rpgHsGKeBivi27D6fTaiX5mvnwHtWbJDEQp5J7zvnpRvGz9efOSd+KzEdOHjZ79M6V9X3POsKfWTwJ5m/o2op++kV3hJ+uKECQDuL+T/dd09D/jMTwJ8iviTDOyJsGfvZKebph3auV7a1stvGBVCoMO+XMqFnwCdqCBTebHsAgL218z4q8p92ry4Dfyvnxcshv450T1vXhDonewK26xyv2x/Qf4v6+15kL/PAtyp66eLKPFvRpR12iVJBEzyxQ6T1M4/jyL7sH+MIti/OIhkyXe2k2TugERh9zi6bft4HP0XMxPfxpEbtqmXvDi5DZ+wo2ATNECTk+SRHpv2ixkm5vkFnA8wg38vmCiK+hJMFHoPJuYBmJ6VmiS+Ix/x04EJ/SaY/KR4QdGXHlPwO1xhd9zHVF/FDtL//YPYYW756TdD9Jbd+hw95AP0kM9Cz3065g493wTDN+F0x0S+SWE+V9cNvVHjAmrivepdlduvhZn7afkJHASPTRMfDi1ewBmK67V8S813cHyW3lGEfad46p7KEA/UTlKv2NM0/7MkdKh/XVhL3GdzXLss/dh9KUod3Jf1i8X/Ve09j8R/Rz7n/4u/zAHjssuveMwbJP8pZ/m+8vPIaP69vvI+m3XLYEEhfQEhKquS2w8vhd/ZvbYQIKjm9x9vWa/SNr0YSN1t+zEASorbwOBCh7GHPZ9gV/JBck8yK/Cn7fVykWdBBQD7SyuD4Q+sDP7AylDPsjLko+zYR2AltPU8Bp4G7JDbRVLlpv0LLD8Ilnd5JfIfB8t9OvBjwFJUaZrkcADgE1KgSkiOf4HlL1gWnH3EX9i/Eyz/xSQk8k3+EunpS5iYel8dAtTDt16GG8bGsGFsLOvpp5dPi+sen7Zwj089Pj4h+GcM50sQfxFm9r/sr3rF7gPStzv9ZmB5lW1oO79PrSe0XKHYe4pEYA8SCuwDksQ+iySR35HbtGOLg11Kv5uMzwD6LeHeKZDt/95ZBPTHhG5brv2HIv9MnOQD13DbltshwN/l87Eey/h6hjVMW/yBuXnrqruNMXj862G/q+puJBT71kggUAVB691IvdLfbvwv4OCnaZN6axW7mfgHccbfnGAg77NytW0Aq/yr4fQD9Pa01AL5HSm1R4lK4K6t3hpCUVp64b3J9UfU+rkvC9/58Tsdf9XRP+YHSVWGfgzOHsefufe7rO67RG3iOL5pA38em3ZaFq83DP9zSQUUR4kvQEMg5AOPST4gdLfo/+NRc59N/Gc8Jg6XW8nbw+dfjlDXrzR5+y40V+UP39rrtx/T1+B6/kgk7L/KLRPvLA353vB/r1d+PxD13hQ92yl/R+7zFzn7XnKGv9fenyZndyM9GwfPymSC680TqzLLBEry70o0PJnBfJlpGL4/gviznBbxFaPxefGQ+APcf7jHuhGtD4ePbl1gK4j1CzofBR0G/SZ2aPQB2/kI7LCfKD9TBFM8KpXBT2ktOK9fvqNr9D2NNNIofi3s/ALI5KdSL85fqvwhN/0ymYM/SkY9S+YY/j4tQyD3PULko3L/ByRlHgr9v5gy/HbJ00kSq38Ozsr9+PxyScKXXp1jEoXyRO+ygje4EE9EB06/qz48iFrJB/1j5Af0j6mEsl8w5l5jbQsx5E9z6sQ8nJA/aMvJR7b8rbn/tmG5UsX+gDfbbvy+LyI210oFuEEMWS+l37BRPza4PjAtENiCOmwqcz0urorBEOMNYODL/5h5kv7vsFthh3b/1CP4nMRh+/pzOZW/iUwwyIOkCUo9KWsyt7GTg5kyO8ePEf8ywptC/BGPcDNOH2F7vtKp9tD+fGGiHlsky74Aa1RfLVG/T3G1RS8zwAmuhuYr6vpeTX+9zeZLq4Kh+AO1PvDzbxs/XK2Papr/LbXqsVuFev5kzaLsl2QCQ+671x+Wq982fjj//3r0COxyfDOsveiI4DOz+/mv/1Kz+08XnN+1y2GPzPPfmtNGv84P7pUN21J+afsHQgXkH1T3Q6v9JyKFn8tqv8ZAv0822cQ7jo+yD5aweZSxQZ+m1z/xxNHPpdeX19gun6xX/B15xlDs+6br8yrF6KMJ+1XzDB/Z+2WffyBYf6/wf9wdY48m8lf1DUX1S+E/lC/9tznkj26j+TLThT7RWlLYn5Tlh1jLh8L8jv6Mn9wLNnqk53789GzD+xQ3hjxIOOAPctxvGz9cuY+eZPyaZTz8l83iv2gqP80sot/x3CqckOlXRWIO9SLduO3+Q07lD/L7X4qKQIg7UVH0K42hDMLSLMOyFI3fC46kXkmKYCkCQQgUxemnZem+IzC4M1Qf28P20b1qdxPve58rjgpTt1/7o9LcL+zX2s/t0C6KT1es9Kd/X8MlTMO0zX7Klvr1Cl8w5Hut7x+g++sYY19pCqVwjCJxBNji++oiRr+yFILSNEqQLE6SNxB+sV4q/koT4BcUZ3Dk93UMPhxi39He9qDY+FWl36n3Tzvg+4b/P3g44Kvwfo8iPynovjRZvJqVYT8FB1fbAmBAYiQDL5FGWOzBsiYEMCMkgiEEwgx4eWCiqVcGxXAGWBoQ7hDk7bnwH8BB/yQpLP69/SZBUcjAQ8E9/g8= \ No newline at end of file diff --git a/docs/getting-started/1.0/_images/images/further-info/devonfw-org-old.png b/docs/getting-started/1.0/_images/images/further-info/devonfw-org-old.png new file mode 100644 index 00000000..23bca7be Binary files /dev/null and b/docs/getting-started/1.0/_images/images/further-info/devonfw-org-old.png differ diff --git a/docs/getting-started/1.0/_images/images/further-info/devonfw-org.drawio b/docs/getting-started/1.0/_images/images/further-info/devonfw-org.drawio new file mode 100644 index 00000000..7082aecd --- /dev/null +++ b/docs/getting-started/1.0/_images/images/further-info/devonfw-org.drawio @@ -0,0 +1 @@ +7LzZkrPMki34NHW5jzEPl8wgZsQkbsqYQcwz6OmbUOZfe6q2U6erdveutsrPLFMKUADuy5cv9wh9/4Jy3SnN8VjpQ5a3/4JA2fkvKP8vCIKgMHT/ASPXzwhJoT8D5VxnP0Pwnwee9Sf/Hfz9XLnVWb781YnrMLRrPf71YDr0fZ6ufzUWz/Nw/PVpxdD+9VXHuMz/buCZxu3fjwZ1tlY/oxRC/nlczuuy+uPKMEH/HOniP07+fZKlirPh+IshVPgXlJuHYf151Z1c3gLj/WGXj/DaC60tHt45bpqQykML/+lnMvH/5CP/9ghz3q//j6eeKly4WDeTtUcw/Oue5Nse/X4E2uN2+7XX77Ou1x8GnIetz3IwCfwvKHtU9Zo/xzgFR48bMvdYtXbt7+GibltuaIf5+1m0wME/MD70qxh3dQtQpG1pncX3VbihXwZwHTae01/ogLOXdR6a/C/mgb4/v/P8xfjPzz3+H7TQryX3fF7z8y/w8WsxKR+6fJ2v+5Tfo9ivgX7BD6O/748/Q+lGzO9g9Rc4wv8YjH/xW/7b3H/20f3i103/By6D/x2XEe36a5y/8h0xbcMfB/60fO3L3Ccg2Hj++eD9qgR/s3wf+uK4j8/5OCz1Osz1Hbi/c9+3+jP9z8l/h5LbnutfQ+GvndgPff43+Pgditu67O+36e26/B5ngXfqO4CZ3wNdnWVfkPx72PszOqG/hZlYzz8gy/LfQ8/f+4X+6eDyD0ML8l8c4P8oy+HQX5sOR/4d0yH/juX+bfC/3HTofxPToX+DOpT4/9x0+H+x6X645I/8/Y80JvI3OPy39//fGZP4b2tM4p/OlvC/lz3/exjzb8P8n8Ga/10p8p8gu8DY39muu/60VnH9p2WN57+z4z+9wPqn8x32D3PdfzUj/7+VzP4ZYE/+ne3eWzfewM//NG35PfY/wP/Peu8fB3zqfw/8pYpH8LLuvo2Zv/Tb35p/Hca/GNXiJG8tUHjWAziaDOs6dPcJLTjAxmlTfl3yl+2F7899yvdizDL+NJCAv+I/3hT1CZzI/t4PX60r6DwxwBKImGY99L/q9C5869vZ8/9K7ysiYhavt39FMH4XwOK25PPyp7jP5qHO/tT+qR3ath6H8U/ghD/dqGnuk5Cbz0Vwh+OfYIT6X2Nf/n07RCBEUSD+A+2QfxikEPJ/4X8NKhz/O1AR+N9j6o+x/3pM0f9NyfSfoMz6Y+K/sF06JHWZ9//Dov9pt/3DWPQPgP83rEEw6p+uBkH+vtewDH08/+mPzmUy/1WHE3v/aWy3sv6fEPnPN5D+cSHy30VhE/98OeHvBXad/Y+q/k977B+H9f8/imrsfyuq1zyt+vuq5fUnsKIswmCB7/7s7f1/TbY2+dcbtfG/3mLm98UMXPHz+vurjcf7UcHr/3u9fattXuD/A3r7+xx/tsbfGOIfBUUK+2so0n8PRRL5eyT+MfZfj8T/LlL8b2XIPwHton8vxbv5T2mVp03+P828/7zn/mH0i/4HVgT+ln7/K9j13+ftf59h/4qE/+DcL5+izM9bRAQEiHC1z5rOAalSOTD3j/H0KsErGYZlsfutCHHM6/7LCVQxfU9gpYx1PYFhNCl/eSdLH+Aw+36KD+N+Id9HmDerMIwFg/esdc8V5AvDOIp1H6Lw+72UEsT1VC4wnSa1vA2zbCTRdfRknw6klCov4KZbjvfx4YMY5Yt/HJksXpHzKF/621CvRk377Fkrte51wIuHzrOHDpUfiddfD+QVip9SYvnyOBghL1p8so8XZiww27zswm/CiH3x8YuZt5gbWOF9z8AE6rs13DpiXpJT6qVJbtQIrv8u5YuNHlcZfwyMfzlPkf00PCI8uUQ7UW4vH/UDkl8uxC/KxPSleeyKFY4v/uWydJly4iKdHP6YbT66scdyaQizUfvoci9+lJHEloPJvZrxPuRNFZsG/vXs2m3BHl3Yi7TXiOBI8FBjprW11KFenKB4ZfPQD5p/Qo3qlunAiAxlw1vGpFZ51SPM1pzIpF3rSJH7uCRH7xTqNgAmMJh1P6tXvqT2TSl6ZOTs8BLN8tDvYVF3Xt0k8cxxm5T9jt1hzd7+183dk+iSSAPgXoYLWSUI9fsFVt9vN6WUJmxFnrc7icFrBdt3sHBDMzYsU5j2JEzTh6hclicSddlymIgtyQ32wl3PMKAhjg2hGT+Gc0jI9TIT8R11UReQ8SsN0zsaxdCSYJTSl9k7MjTq6H4rHDrMlwx9TXQXbfn9gxFr4H1v7ueH1V5nmP75vQ5V1J/fMVDHzazNMy9n8idEfaddDxnPvujweTyE8BnVxqUW9PsuFVnKlK6+eG98axEWFRT4574ni/6UlpZccU/ZUXGdIZV/J+bY5xfnrCRr6EzG/emFK63E2JLEwj0Z3KJRmE8dNo/StHtog28cU4S02hkoEmAc2DYDX2669A2Jk1K0ebAXubzToCqibsAjYQmx19OzboruEYua7g98yCKdgRQSmDfcBX2g5gayxOQkbFeHTt02tcb7oRMPrbPr8vW90XbQGOZghUpFW7kdC6RhPtka2u9m2B/Je/XNbB7L2GSGp6KfDTawjPoZ3PIlPNcV6+pefsWtA/MP8mmNJzEmu0D+2JbXuZcgMxq39JF/Ew3bVTwbQ/dNpD1dBfljfJxXpteUwTz5xvXuM/BpTEk1Sarr9dwgN9TvcBCfIK7r75QK+/ha9ZF86Og1gTnxRHS9AfRRX5NL6Jg63KPLoVvsUJ1IVuzyqJBnJnR7JCsVw/xg4zHI4KlfbJcCCalxazefxsd/7ofzUot7isbJ+ZqJoex4pctnq117TJDnKf5iyzQ47J7LVe5TUd84PhPE0rMXGlXDj1JYYof+8jXWR/lg5RXSeMPGLwqFWDgYnemMOdOPZZjpaRY7T29RRpBLskQqD+eeOBFJR+e4uzOe+vdjbyZn2YM5RjcU+cLllkqZ5mBrzjdbnIsS5XxZnrfP9RyuiThmp1xRfmzGnF+bqa/PC03XInlWPTNOhflQcrapKJ17DifidYCT6gIjTUH4vVNdsUo7ZXQ23PzbjGczTcmDL+b0UThlQ+lawTtrf2r9jIAPnyf787ly/yYN1p0kehnLohcNbVOBtzb5uVOv2GVuvc9aXEP3p9uPk11FognzKGN/ccNxOFoyKeMVKxzZ5KJLsn0oA9bjV7/DEfShFs/5eTb+sNMXz7DvXEbJ6EkG97yPaUYk3hpe2lOvDh2jLuEDN531pr5uvRPk5+c+bbkvn6zN8jlS3YeEsPOeCeyUBy6dvdwe7VCAzWtkbwmqIP3eGwswzXPL+vSMVZN1G4QifNZNE7uR8UnhCTIluA1Ckj9+LkMw14/nlJAkttKIkkdKerKt+G5+dglgtvhqRzz3au7H14w1sgBdvFqoEOTsc028+8jLU9vhTr95I9RAGzK9927Jfc9XGQuY/JD5r13N4rltNhFz8J75uRHzfOlAPzNrUAn4r9q1DenVifrcxZDdMjnhw9Y7uO/yS16leNj5DdJjbTMsLqZNmubz1fj5Y8Gx27XWL7LyG7I3TizsNq3o0L5BMJoTX0ZRx4SRZUby/ED1Zv7ygXJfGviKw9379Buzu/FoXmWA+LzyARCaHpXqCQv/w6AOD2xwbMTHjfxmpbtBCr0se/gFRhiA7bjFU35tbDr34yvccU+CsHSVDNxtCXGubeF+87wd5RUJUDP3tDaL3R5k6rkjkttIES/xHmSsYoGf9gTIQPiFB4ODpys1Fo8FwRGzhDiZN1e9VnDxzg9Kp/wBvPTW4OFqnZJRStQEt9B4j6S8TZ/emV4808e44tJ+OVMI1SBnJDWd/2CdHZizsRiVI+NknVQDX1fItQ9vwibqVmUifubODDW/AcnbtxFzRmBvIN2XmQcJXMwftZuuPXF+uOdhPu4aGuQAmj1OpvyNElsB10gukixskE32Rxw/NHNmI+wliqaJQpCUAqt8wzdb24Ep5We6Afy/ju6hsRWy1OVqors2W+vE+ePyQ2bc4759jsm0nwI+v8ma9Q3XeQ5QwPNtogNeMtLo8TmJwnm126k27M9NVVhT2szk3MAXHL+vk+9V5tk1HbigOy6TmN9UCmKMufOOi796vZ2f9DgokbQKzbF9aONz5vuvx7gvwoQ3u9pALiFiZBj+7e70MrP7vQ1APvRJ2GLlj2lKi7JBwv6E0oI5AFT1VDefOjdLo/fvj0TjgCUYEbT7U1Gw3+gsZcCLkHH23TXbe6pydOuTROq3j0D5SRAmU4CItGWrSAj4iXwkjo7c0IBFkUEJ5Pec+4Lfs0TLzejMLp7mJceXymEgGa5J9Xr/PlgwFCBfDcTo+WNQZ3EoJ+r79DxlJ+QfZiplELMC8wE9ksITarl7yR2M1i/4+AEbgzHUDWmW0UGwbWpjOJddn3N2aUH5o4owNv2eIeC7ZgkVUnO8j9YxnB/ND2VKy9debngd+6BOlgLXrJiEqHHqFnLNYv3DYKVqfKPXXqJM02LPwErHUUasxeFAzAJN+EE/c53fuL0FbbcruIPU/OX3TULEzeZLv7l6/eFPBckuKphDl/GeOZxWP85Q2OCb3Ti5ugmr63Fh1tORjbHp/Ng/CuddEt/PXy9M5SEPTyRFaOJ42Ufpvpfn4+deeKb54RCbkkY0t00k8OyJ7IgzZSnuh8vZI2NuQwmsQld54Ejh4ylGHn6ZQl/++CBmf+Yg1R6p29Sewnp2I08e7J8QjtkGzGWv8ljopJKV7OR7CEtuXv5L/YxZpffN3KfM9Khp70V9QjFhdpR7416MjuWv55HGO0/1vU29eMFEVnpow99QkO3tNrCiSQ0FE/KNiQRhaHl7ri7xYzkuUAoQwnLiYeHjsZ2hB7STzv486yAB1mA5qGxH3egkkKb6J/z72bdtgtlVSRzGozTWarhiANqf/MGINbAYy1KoUjg2J6KvhPnNLRCvfPk35j+GIUgHMtQJqDN+IMo4kFYyNsdVXdPJEg9KMkp4/AJ4/bmq1q3q7ro2OyVbvLD48ZOPZKb5ml+qHL45oFdyf1bCu4QjfufWPPk7t7jdR7iH7SLUx0aE8/FDBirTdrzH2Ab/fFWfWvSeRMA8q99YHSjVVm5raXVvPvyTH71JsStG+T5vW8vf58WwQwY8qM12JKTvryfBhW0dMAYnjls5bHUPUCRKlq9l2M9zs4fK3IHFscMUTAZ7R7Iyonw06dtl/4amwGvffHTf5bbBHo4sTwWrPP2JJasX0L9qA+PKn3l6c7MrVeUGvVbOsPkFBfwTlaviq8XLmSkycWXgwZ/jDib0jn1HSszpsaZFnplWTe8rx/ulMT8AJ34BTnuTPsjTngkBbDvD+W/PcdejwW2QhnTUFJfWTaxNxeCFzTxQvWZ/CU1h3l/sceKJIJCHUc/2IT5OlUJMRBp+sqqeM8ht8Eo6MbHVVuXsEOQnypglZ0fBZjnjrZBiCxnK+jw06fr8MsL7gI6bwrRA+yAqUa0OfslPh+9XQ2u1ofrN2n/4hG1QCLmKUkaBPIxjfIv6+HG/1nr+53pytdwXlapbTcRQ0JW20uQLBOF3qhf1n9kG1lNAHnw05CII1MNL6sZhBlMf0UjMlp+gVVm44zym5KILG9EmPhV+0c5XiyNmclK9U/5GAPbD9fh2bVyu+IGI1AhEPi6Z+blzIG3KyREYQzbSrD7ajQUsHLULfSfuWcl+7HcLwRsUpZTEp7uN/L6sGmiR5FGcnmSgidyvS5EftW68U5R9zu9v1psS9bmbqvfDoLcvJFCzxNIoIGd1IZn4htLxTkbxw/9VYLNn3bHFQPCTzB1fNFmWWGYm/WVOEL8gO7FuG+ESZU/LNLnmA8bYSszwOcoDkqtb/izWUabpFXtNxPypwdy1Mk0Scmf06kEklADk1bqgE9HhQXFwjPk24emw5+NGdZ/Ib/kT+xvmDNS7XI1dgU8bb2nltjGam24gtxLFbEu9z2phsH4AbQmnrpXvQ3DSmiS1ec+Xz+LrbSb0ctXncATKwSksf7nXc9uHCxHbfIRf3XKGlzm1oeC3WQpq2/d10Zn04bSVZVKF/+oD/QPUHj8SSeBHp4ooDjyXRx7c1FQjS+jKEO+cLMeC1QtWjkN/erOVg1HLTSQ9aTRYHrP7N1MDDhDLEEUytF2fA3zY8KGyqQKAcGslnwRF/4Y4NzsqXxlwK5rsQLSIRnwvCtbS1Fad4Ws0jIgYdhFcSNy72lIE+g0uTpRHI7jsgR23F7bHXUyzChmWzj3PYR1YTWlh1UibQPVjH9so3pqrrH5RU/6oHNaglKLPDHJ5J6VmKm/5+HoKgWhajnwujLQUY1aOAGz+iZ4D+LsAESrD3xririXlAh7lmUJTe1a7x3E/sWJb2VZyJZeoxLmM8NBqz+k50O4tpN6B6gqPMn4jBjQoJiOBDhVcgBQUeyC/lPuHVe47s4B+uV6Od4spe+SxRCQk7CeOZJwBbGjo8ixg5R9dCwZzfuLBuUNGvFX148MljpbxO/NTUJv88KORPqLZ3TlaulQeQaQ3y2eb8qMlbPN9wMzAdK/2RKjpMYS6cwCDXJ6oBDr7gTEJ38bPuip8gnWslX6v3BqsKRwMRJHrnhGbOwqlW+cQJBB6SpsupumT4hQ3JSqoek+WqPP6iBERLtx1ekxXve1jcan5FszvZZws380w0Zw3dInNrDoIrb0ZohmQLShHtgwfxSNcleUSCMufyeihZzZuRQShihSuJnKji+9iY/M+iR990Ta7s468CUFMiu7lBIcAOIO2viabXg1d4NhfbUlm1kXkWetgZ11djiTXSprN442O61ICygRZeZmI+u1kqxbnvSm0k+tD8kzDvr0nGVDh6pB6HiZnKa0OGrk2Xcu8d9XvIC3TTwsUuStnAEaSmeVmGD4Yvb58BatjhAipA/zqK59v+fEykpGqnTVGTNB/SIsWLhQhibeuwtjPnVv9511dX++ezMLal5tadxmb/CTurRDYrK0NKeXfpv8AgbxG1a0lrpJwFjWWsse7qH1KmbR01Q63a6CpWYh3jgS3rLiE5bkTjAl0JyaCjKyeWkPxPEgZ5h0V346qGDx9Cdm2uPF+1z9BBhIumAI6jMvOc1g+sYnmXC8KwUUXxnCaZLSsTsnHHTl9qvauncJRiG/WdgKtHPxVFXnotFpTLy0Zqf37hreO9MMGinwIlGaZT54x9fJvxRnw0+ifcQbXAEtVH+kNetdoOVBY4KZThqCV5hlG8jPxsUd6E1c9E4nebHqibgmcdE97IRYWFc+2OkFHZ1fd0HEsWdsVSP1RNy/fpws3hZU7X+7CmOnoV+zVWpSshhA6ZR4EMK0TVBK6VYwaA86BFjR0gd+zGO7bj66Ioe4pDUKqEqHhGkTp7zGi+tBWdM/HLr7K6JF54cwtMzx0fsWUNp447kzag/4aU0sA6hUndd+wklMtnlQKGKg+RrY7S8kzkGgdXr3lTrkW4rTm0UU/YMu3/owF1T10c2va+XxW2PaxQvao4DrikUJbu3ZX6Is5dMVS2OXpi8i0Oy8DYkBfkPKIJAO1qx4qH7mPQ3hVC6slOncF/g4S1bXYl1bvRjDZYVJvPUpeV/NMacMnc1NhfvL9YJc4pTOWQu4a/ejt/X3SDT1rqDRgAOqmGH5ytXE/6VbCzLrgD20X1Fmdc5g4uxWy46SXH8Epmrk1hqBnOuPkUA9RouLDQ/tp7uvCs9s/Nf+YPqNXpj9qrtIvRZLLaSRQPyLraUkIA8apcjYrZu3TrXaGZY0UGpc0cWNDwfy8lqo7UExTofVAqCZ6xR61MYnFeaAlMXW4SPG+FLaJtc2Qn1ldAppybw0Pd+G3DrtPrIrPcHFdn/fjCnfxRPtlT2AyuWnU9SL115UGc4r27SUPzGZH7SkyHNecMTJziJ6rPh2FXm6+Jfos3MShtiFWp7wXIL0XmdR/yz4vfMUP9aCk0s5o4og9FTiKDZW7rl7xYhGssII7moE8O3eHdqNqjc/vejE3LwgV1xcVKkgVLSohu/dNmhaBRiqGC+siZF7d9A8XKEgaKDqVWxtGt3JqhnoKhJrVLRocVtSrc/JQBThco56I+CUVFKIQITtYaVi6fMHU7bt2fiZT22n0MSr7h670lsRwINFQiLG2zHJZ+TcvKe6tryvyAC2nbJPbLCMIDcVxsDAAUxolmvSTEMO57jeIWXZ/LeFY85dwvZV8Ck9ohQXt4r3XwLJ61+zGyd4Sk3kScv9M0OT67cYowhmlLz5HrMBvDXQzb/iNFz64WWMpTM52heSJz8Z3vpjiUXdm7wtMMzrZnrcFuihLI/U+TvXdhLL0VbyVSbCN9XqOQSDO7iC99M4IwTLUNiagUSExVh4SEVExONu95AnZ3ooJbw+7ILYuEyhXCZpxtmT3IaHGCBWvZ2rbZrYnTGgJ6vemZaZiTwJzAhNL9RcTHcSLj4uLaNEEe3H0dSsQ2bPJxHmKl2LdehpCiG4GcVFiN2UScL+Y8S4/TpbBgruWZoO6Qce+dkCH4h4TFNViHd/Amf7NfnuT1RuLLNHUXdVybzgNi6ZSrL2oMH+XDskSHK/srVcSHW4zFsZ3atv8YEXL4QKCT8cphFQXDxiRPcAaiLPWBOx+y8CHave1i0hPFshUOogCquqHoYFAU3Xm2xxlNrGK7ajm5IqNzi0UpgkKD3eUouUuJ+iEge95jcRqKw6AI/bqp2U3sy4P0SJ+PcLF0CHorxVu0kXLqXbGIO8pHYDzoZx8UhKO6rpm8fG158jgKUkvmdsjzd5TVSTHK0gBe+8tbOifNKhZCXpw+tk1EfJ00m8FWJrcy9p5JhyRs4tGQ19E3Ck6slrXXHyUguzeZRsLPT07LFSwMpO5mb17mdV213U+1i7wyBRiTG7xuG8fSVB0iy3fUUgnnhvSqxzvEtuHrRziq9aAVqq1KU0hMPkmmJx2PvktCdULVfXKsrPiFiQBXcyn8jbZaFl+apa14u33xRe3iExaIpeth1QbQqJkgYazTBOVhf/pmTsBf4AQT2ILbys+fJJJ40IdVGiTSCSEjr3az2MJSNV9Ie++g9zYYzfujr8Wumikjpzxk5ztA7PdgHz0BLJJdu89Puvjp1TmuAhZg9pI2W/TAjZfyZQXZJ7FxuCK2duqjh0RI04kAZM62IcjcNCERQpXje7MxwncBsNxAEV2BmfO0O6wbYfMJKD6R0waw1e0xAYVTA43hP4xkYqkExusgN1geBrvWlaIWORJ0fTm1k6ax7SxVXR5gTL66F4EsFxR8ZuLVppfVbWr+jT0Vw7k22ZNljnm1ADkWixMJTyFH/u291jSqc3b+UtlOSUFTTVXM+mgg6g/FnlvR5EEC+X2esAbQRj9q1he9qMW2l0sFxNT5rPEQBtc9nU5vszH5ehtCGTIRC4dceqzzIXGtZhm+wmQ86agZUVbGsgsHMFccFOc+kY1J/esGCK2kCeylsdxVQZfsBO77f7Njk5mEKsZbF0KFshYH3rJ4vZA/cebyE8000Zq0wloR1xA9xoJY0gtftdgcdom3cmRPxWsgAj2Z+e4S/zMT7AsHqMDFYxy+dYKTnPzNqTyrEN2yROp4bBvX52dgmIp86HpNKpA8Yqu7rlwUrO1GhxjeaUd9ziiZ927cYwYMoU+LyzLo2KHq4fydPH4paLa5wSZombjwqKafJs4O5jKI6NKm2AwRZZPiiEhEtrkaREMvqlSIxnu8ixG0CqCn0rmXU13+vvqfZEIUlrJHD8yEufJUYCf1V2BkhDSkOsmcamxkdyE3sre1pk4qZDUqXsnraNQMfJZIC1RRQ0y+3ogmIFgtIcPBH9eYzUSZMy9aWpKXUIueKQ1oScNopZBKFlaIbgvcMR5g/IOYJMGa+gbjwKh3HFDDo3nBnmAOw1b5YYHuN+gnO6y0erKeoqBTDog3VVMEEWPg86X7AFr8spX50P7IK0ROhWX95O6rs6sAXC0+nBTt/pm+AaBP92Q52HbsqN96fB3kXG8PM3RiN3PhdpCXSqy4ixRRjS+ik/EjyFsVQAHJ2tPPCl3tstiF6X6w2d2wsggntS6cExwn2Hg1IJO/TZleHTn4jz99Mtq1vsIs9dsJWsqym+lUD1YrodRm9hpm1ENrnAJ35VXkrsQIrpWZZjPoUK8dkxFLhQHC7navQ58kFFFEr26jb568K1bEd0t2MqKLaBPKs8E/L7RuvfddDgTwAKXbH10x3CIpQKbHixTAKXQrhOz1n4+W/p23Q/YPMI6l/6GE9POPSpRLf0JczZqTBz//uh3eAgtCeEudul4pDhy3lfpESte+AY+jXaf1jHqBdrFo7VW6AphR5b4V9G+cw+O7oyzznAMvR3fM3w1SV9Wi1XhwwHZysImAccPU9IkmqY66tk0YhoJgG+48XPaFGaYeaN40TMUPW1vJGkqpKaIrj5buIjfP1WPryrKxUSlDyL7UgveNlZczI7TGC7ycWtlMBkLWiqxg9DSnRRNE1ep7BRtpKLo4zxP7Y0BiLx5YkYhEmAwmURY5TX4PO33qdKD+s3dVYY065yt4RDgJG4lSZ5O9HAjO7xK6zCwzrCraaV7LwbNRx2BhIf3xNU6NGeeEikAd5ZkEDoh4jDVT9kz+XF/fcz1Bqx0oXEwLb5hnL4HDWQtgYj6vMheUW9lwPLm1e9GluJYkHra/FZpJMXMmYRTQY7wZFXj2Lru6JSyq+8tKf+2P5/l4VDpQ4e9T0dtib6CFUDeGubiuwto9rWxdVuKYML7xvQ5cSUb2rz44Io3+jHwwnTApro3MAn5kW/M55ZJeo8BJ6K670MIu/j0NPTALa3qg+XjGIZTkZ/kO1DKDBSoPNofHGkVyBDTy5cA9Jm6yy5A9Lg76+8FOx7m9AELGoCmx4Tt0aIFVD0l0rQikKG8CdaAMFMGVcXwOakd5DEs7dteo3WfiwMshA+8fH8mHawKes+CdXarmDDijaFrV4/iCYdD3Np5lO5BlcbGxWRr7At3jTZto2t19pXfElHteGqT55n/YJlobNsb+QgT2LCcrvL8ejC6NG2DfxHCTRji9XJQ76fsB6SsvvVjKl/pC4+jD3772ODfrjma89IO1jvEl3ojw1dCSLscwk8pfEr1ZnZMCj6u13Gsl6alX2R2Kb7/Kexeh8VVruTaHR74kJ5zwfAIzZmT+6zTcyHspJOlOiD19oOFj+5KKqAdl/quN7pj9TcyQnrQmshZTf9cO1UAeG9dJyl+0R7YEYS3sNoGuEs4PET313OSSJs871wgysyrN5P8MeZmCpkNhpOfzr4fjzyfE8vCZASTZuLDqm4iB9smPglaV3VvFCsg7yGi8LCjvd0QHyGGyARD1SdYwNSxpaMX/NkDWbO+R2rMb28i7xPsl2gN+3kXX63VWl4SEkiUn7GeF+lBLBSpazChTbAbOUkuZzbeyJRrbv6gmJZ2IOmx0PRmxLcyzwPQw1rskCK2OH1VaabdYRlAp3LXf9qJyA1PpI9KTu+0wVfcjrJJXczMBSQEZbubPTusKStChvKE6PT+Y8RaGhnHS+8EiKawbWCglg8eTdK+HpbMAXyDKOtxnPP7bcel5RO+3uj53JBbOZDPLAptTbJl+V25th+VWx1SpZxsmHgQBVSRhrvUws61yT5JzD42l0fEY39YkejRvnzs/QqKP9mHiGGyiBd7PnI2/CC4j9NW/wzT+eMF/UsvI6AQRbNE2Sesm+R4LaDftMBPKFJC5qBdkOFCl96wFdzz2WX6k5PDucJq9j2WZwa6CM58ENEjW9XgeFYf3wG2bBewESS8KxBPo7EpYnyfvqXINptCW9cF6X3rXamA+/7Bw6LWnOFh2jJvU7GbB8HAqKzD3pWajjA7/bI+h55Tu4B6Oyb7TAk44HG+P0hNztercVDXPMUV/9jh68NVcPrGEWgc5zapaXc1yV2QO/Ot0w9Eno3Y8yvrc1HzUDwI7AxGPrLvKamUjAhbgnKUbzDfoa793Mx38wIeHqZXSUy+lpcPOl30F8sipt0npeJ/Unfe42J/dhi5Jka5cT0CTBjOl4uOF+1koG4VOoISZF6rM3gcrQeTSAGlWFZHqiiEpWrmvfWkXGSUzwnmwPzJfHKN404UYusW0M/mTGhnGE3NSKLDC607A53X7TYaIK3VkTAJomaKTF2R2pATRWpXCEHWeZW4Fb63cdgofQ8hKEye9BPnUfqM5tQR2zE5zsa2aODknS1FbXszB3LXK7G/+9vhKIhF4HTDwxrZYQVfDSMMoWc6n3qFzdU7qgv89HvqhErAz2ObbGqsR9Ixe63zMD+0MivT1aLXEm7FahKXIQnqoe1ru9bewaqHnTeTktJGjbn6pMZkr5VORxsoddP+G1bf0vN93BU/TVHhCu3cE/nEG18xKuZSyVrJ+dusE2gR0E+NrXWhGa34gdBoEAjuHUT6elyETzYFTUYvtnR2oDKkGMCOThbBXH3Ehyhf2DrAd4Vj+mRspbZ+pPn24CcEBIbhS5tNvITP/AYaPI2PDSgPPAV1FTqq+e7zTP52IxZvij7WfXS4UUN62emYby86sriO336SWLPEtNM1YDb1WM/N0ipa7YHVIbg8ufZjY4lQMzRO8XteIP7mPWPoRShgybsjj/olPcWr9IVbVBzxkFxKspMAZR5EqlJBGIAmP4QSFiusgO/HaKrrtNBLPBfOe6BBTjdZ6glHTeFXJ40DY5ARTlAVXfBUCFhVFHxeCJ9Kyp8PoAv5in/P1RT30JloGTZVuJatvjMaRa8K28iWd1zyYDW90b/8gZE/rcOAMDbnw5B8B6ysB4v2Dula3WH000Yuy963RLx0I6/KaCSlxzxXuxcQrINgpskm/kn1Vv5m4A9owMlXSpmR940l123U/erlnrWTye1HXPLopK0JS1rkh2W6KIJ9hngmgR0eH5RWP+h3G9oUr0SotIIgnwuYyT5FDtNlLCdN3Mhqjy6momQ/D6xUSmdKQvHmgg5bB0L4dp5Xf9d87j0an/iyJwzjnsK7tcgXfiiW8kzufAL1dO8HSxDnD6oRCcu/UP+90fwr2dqafUgmB885lTbXtof8GExOpFnVM3N5FDBYSwEIRK6zI/5rep9L4CoR0bgoXlXj+Ij9KHWtDcKRwlY+95mLkV1IvcY62c2rBiAI1F269yrt+AmPHrfYB6TbI2nQV/6jJSogz4s2yGdcVY1jNpJL/W4XA7knDaH212+sjCicazVUSllNaJRyu+Jd4iacv7tFyfFHvIYhh6WSnUs8mpR54+cm4wt3fR1ffvpx2kybn44oBX3wSH8mPbWKMG51jqH6ukYAEHlKzO8Y9QlMDvlUxmS+mNfrTe5F99yc7ngt4Iu44km2+7C8yHhxRBIywRcTWqRwR6hF65rvD2VXP94nsbd0aBLUZClbp7U76Rqhws/wh+mkJScUCQUKCSWbSX6CXU4iLi8Ta3H4FfNx2qVyqKu4LDyxdDKloIUT16LcjzaVfdEQmUGtTbrLiPAOuJnNzRUAogjqUzpbfymHwwcm1KZ54rImgN9LBtUfraT7yLuJv4bpofDzQGlQzHR/zNGDQDlnvXZ16dPfFLjeZJ6XGBiW9RZJnmZl9HzBB5UG9m2FHg6cGVxmUKtUIxcOgVqeFtcp3OESavZ39AFMfwth0g976POwjVftFjuF5YrmZRb4BgEHZPtKiBmKX+6Dz0RPgZeEer5nEv1YPhmo5duv0YYcm+rxyiT66Xq9BPcsUS/bOype+szjCwv6Fux5V2FKqFaj03MDhQ7Na15FyRKQmtjJYNtS/NZ8eRnKW6xhdYI2lv3kjIJX4cvfP2Kgfc7Gj442BcH54i7EvrUcvrtzg1CbPmlwnNH5Q88iWf9otbiIYSyVMfcgfbSYH+n6aZyMMMlcqyl/L9PYetPbsY9o0ROwlz9rf5lDYkyzT1dykzxDFzbiurrY9n1wmgzQ2jKWC5DLTNMgDri9mfnkWjFrBY30CbmqdBpmlzD43cvWR9isrekxmCSnSosgXTW0c0QjurFe80hjo08V1CsAWykYF1SJMkiD7qLBryMd0JZE3zEB1ppw3irRC/J6+UHPkEC+dTQJmiPc35EMQjBzkaSG49itlPGRuhC3Y8DgPa8T/jYFFgLRBZ29uK6KJaQvrykVPDyZNe25VIY4LuTREBglgrpMiCewbOpuaNgnV7DVHXrXHyXNElm6OcnBzfbnlkIiUn/46lQzrKbD6Ns6o8uwbEUy96VVGN3XwQRzRvlXQ1M7DDSyRJ/JMsg0tdATFMZRDLwo133hQmDL59Qe+3pzWMRt+wR4yXfdl3VxRYG95ECG5XieimB2Mg3JB7JtSdbq+VN5k4UMtuyzc4uSxgFqJgmty5wvIR3dtfwFj13pqYQTJ/IhdW5p+6Rko/HVM49evshYTUJODWjQsZB4h3/LT9AeZaMLqnWxtlalYGzTJN0eN+o58yvlU88xpoI+kM0TWAcFgHPywwftIJJMPdgXufFzzHV7E20Np2CBbjPFhm+kIETJrAvyDXgnIVoiXluZXs+BjT7jLZyCz7rqZG+s44gImUeFYO1vLs0TAu2Wu5xIuD0cu8VNApGgC7bEbQo+rWPaiJAHTXs8sZXR2GmkMZKP3H+U7HnX/Kj+GW66GOQt8zl0ePqAzgQ9BlKEZuOrhhuz0CI/TKOEzAojV2okg7lNy2ffEG4biP7WrHGQR2LqvNsnzj1JGC8Lou7liMhtk7orE95eoWCWK8W0xyLbVOEF1t9M7UbuQoeVhprQrt1liRuCh59XF3T/0Bx30d5Y+Kqywsx686BOeH6VPwMy1schl/TVOfSd8Oh6ixatst9bUmV4u4ZvwsZWWE+hDJTvOwwb0Cd7xeguQvmxzkSYWPAwORUHrMiVYeYNAGhhc246x59MOz/bwwvdVr1d+kRQqxVDEmBpFeMNGaEImh7Za3PoZjT8aec5kS/sCPts9V1PYaG5PaCxgLtHOC3Ib18WweSlt0tTO35W4S0/woKN/rhBnhth9qysEl4dvDCn5jjzTuULlM6Luk8+V1wHzqqjNZ974MkpkUsSM3lfUZQX+M+CPj45oLv/eOHD+8wMFO/QO85qYMiPURVusUUynNARh6LR43qbhKS4TMo21pQOMpkARY9ET2ewoAA0t8n9Y92hpr/EJ+mYW37PHtfIo5nuYirDGrZllgmkADN+74q+2R+2UpesbBC9xsDD1a+sV5F1CNzwjYKlLBcTBWUOI4So6jcLN9bbPaltWIxHSicTtHDx3htEOoMlOaaidzsleLkGcYNPHOFAXvI5Pz5JZEIBY50TPk6WAjcwvyd9IwrlEt49CWxB7DPwpDHVPshx89Okx2e0E/Xcz+5EsIXnkSTIuK40UgEBA+9c7Yl6nkO6cDvctEemjaMTm9DeupsRasDBL5y1sQig9gE9e9V5bKJHCZsOQUSfHd0qk5aTiuGWpOFdLvfvYTygmTPVZ/MYcjNINqgf3Bg6B9+cNlaiKXO5HbQ+LAvV8nZCJvipz4q5V7ZxzQlV3gH/+YHJ57wTOPIopPrQqF32Yh6hVjIkW5O9qw+TWQahF043w2tmJ914x9xQj2+tHV6w8f42sOFbwDgeR0fDO9zweO7lBM5GzUOTQYJEOnZeBg7RqpDR/XM4sUNiK4prHpDjtotT9xAD3cVlsOcdV8gn7ibsy8AW4574GuHc5ZuB0eWLETm6wClyiFtQto9frdhkVpy40wQ7VODOkoOQIfdJ74APEte7eKC+PKedHZuwShw9tudSu37WYoBkmzd+EI2Ajl1dG9fOt6RKoLF3RVLz6pMEbJXJXwlcDtyHNtIZ47dpBJplqesy0+owGFF6Fxz6mwE21IPhub4rI9RITxuzqH12aEg75wArKI1O64XyOWl2Xizx2rlmVTPkGbLpaFQKNiLI1qvWGiVjps6p8bE5dS75/nHOL380qwj5abgt6HQ9kLYwyvD6FLO33pkntUFpd6vo60wOw7pVwEeN3zAEA5z3HtGaH2oFHYbHfu5xOHwa8D8PONSP77dW0pV1cy0oQwQjKp6usMUA9dMnS9LhQR5RTHgP0OmDDlBpwgRH1m0CqtGQjBN1W/n9HhbvZH07F80fjRtjVZ60yudmqIKs9Vn11OpOJ0BGIkFah8KGlJHpabj5WCpYjuBu0TYYkWvb4bNzt29eU3WsyTeW9GE5JHyQKXrfOty8C50+MgiI2kI+BntgbmanIcIuvhUJQ78JrdOqVS3cDX+8yLUljCEa4m4MYuRUsm3WP4XLj8j2xMTDzA8PD59CaPDRNXitGbyFzsYJ1M83yQiiBFTN7Ha+H8BYlUWgtmVtcXeuonh89qdFUXf2oVMdB2pskxsg2UM+oQrX13Jr3PNVmvwmt8xi7id/z3+cKIL//p/w8MZKHOnC/DBATeblAyWsWF9ZAi8ntBIXC1F09Ah/HCUWPWQoGdKm4w8d72ORd8QSC6KxEsmdf9aJMFuwPWXsoOsuMoqOcW3fN8w+igO1mV/OIBJfOyFt/WBSOc9JKno0ryvV0Q9+eZI5pf3ESriruC/a5XqhO7VhdGQkWnBpnZSOfgtwGOR3qA7bIK2JgqHwE6OkaraSAIfA6vFC82mJtP042kUvto+2RLtpDs0pTsrpHlOH8Aw+IPycJ1vArZFPBnxuhUyB8jdxuvm2N6ggbnVL2J6tYIIG3dkD8Cm1tISOm9ra9cit7PwJFaGPMFUrdCeYst3Xp6E2lkV6Ob4Nmzbl2Zh4kPXIonxEPspDBd87v6fBHHIrtEVBrfrj0UREHpiNoP3EgIbUDmIthQRoPbZ537ElrHg82kUzm/VlefUY038bFiQONYm3zkXn0TKYtvFF+EmLO3e8/Bu5ljcHApWmld8PaPxI8+mIOX7F5tMH8XUhZdJaRWaHfQf0Ztu/WpEWvytDahwY7OhMTey1U8jaa5Eznp5p5nfDwzrsaZiw6ZsMYJrmzrvy/26gxY39tiKQz8kD6T9lDU7uaYSWNPG71hlu0eT6mNgX/dMVoSu1apDOlIMl87faRPCQMuBBlsNGcTGfArAxnA1mXG5BedBjhb7NBd/HigbWs7OUtzAacr/VON+h9SlDN7/Cz1oENTToOJ0fzGossGoQDN5EnO1dVqoTGVs/mK5mClUtzYTd94K6W2Vm4H6eT+zkBJQvS9AjaBRgbTx4XdAJFhnNO2WalvnkKAuZ9tDVWGwXn/xKMRF+wlS3hv1l4vGzH2k5UDqEDMIPVH0EaFiN3UnBYriYaUeFUk8FQQGvkeiIuWbzbUm5clhj0V3b0KkWTAj0dArhSkYeY6iGrdmAvPBg1zePXR82olYg4K2eZIZ9tYbCwfIU5AfMzdCPfsuqPOyutDzBGMfKH7B546Z0za9PJ9WBfSKiK0K8w75fCp8KqjTuSiQidZ5PzfCrfXCymme01yyPmaGdYnVnhVcy2UaJdokS8MCNNEKUq6oaSSS7RZOficFTy9x91IgCGZvNIk4XosBdiOBrlSKBwTPhKTlShRS+guxBv8IzvPEVhCSd3kQ1ny/d+AhIddsC9bjMs6xGYYs7/LZjGPxejTBt7nL4+Wh1mZJcfPQ2qR5F5A2+OCJ9v5KPW1j2KV8R0MbHMb7y7GVuRZBWrr/soK+zjRn6elohkG6KOb0LhgpYksAYNQlCW7fwW13GOV64a0Tpnohgpoz1YwvNriJj5Uc/WVDa4PCHrBAQQBR7Dm2h6Gv4ANM/xUiP6dRoL7yfbX+gGcESTXdLEQ4l37x2JBRPwSr0xtpcBloFMrCbB2PYrPBGkPv3UciyU+6SsnNUnuZgzriL0FyDN/p6vZ6ZdLPEgw1jHSuaZrH6DCn2mYbLt3onNrDcsMuE3tWaerqYkxm8JR/5nQag0BaXNHl8UjOVnrZdSGUoR8iURfMRjK0L4E75DByG7euknyVZ7qoD2UYonycE10SBPgr0NWd3sRY/RPJBxYqZkwM56qFjoFQ6I1jI5/76xCoEqu8qVWXC9kG1yZHbz3xfl7I4ABIv0NJhXao4aWOlTsEIUPrCdxl6hbw52Gkm8RQICebxAfuzTEJIP+RnehTzh861eR0fpLd4CBUlWbTzztogOFtB8grQ3sEqlsxC75aJWoxSVfDJ0RU5IILM4olbz1lvzvX3K1eYFgJkctYJke9NDvaZYZ8J9UDfiR/sVxaAtW9+gpNPkuXIuEcZ9gkWRMKRHf/2SZeZBPTEoCqMIINDiQgemg/BSDrRwGiwlaIsQsvgn+YleF149vv+CkdbXw0AIImImTOZ0lccU2n/RMy7zB5kI8XdsFNIL7HCdyXIjEdnwgl9W1ayqK1Kc87rIHQZbnkiT+a1uncse3ZsyjNd2MAE5TzMAgfLL2rE3bQL9tLFpAxpTpy1Sj9En9wcn+SItHBh3srs/Vzlp9keWcnt40QW/tVKyN7fsg6d4AajKavFD8zZiEF7SHrPlg88qkOXfLow4N8R6qEyeNYXJx1XpmWr8AHx7o65rsG8QYCs/74J2i8BY3R2BbTIwoUfvTBBviujQfD4ivyIo/upqJKGSTzqnlnO5PoSliQV8t3U4LN/Ggu6stoB+2fnYNtb9FUqkitZDZcCR6iZryexIvV2VbCY+n59VzNEjkKZlQ2e0GuPdZbktorKGq86iFCqJRnC5WjJ7Gi6KFt6Yb5bYPzaxA9vJZi3WihltzO9tDJrH+At2IN1l2VtLuy822IrFoXwSlDRFL7eED2/kq2HZlKX64eQ7gwsPtEDkEP/Pt239Kwx/frAFFSZGD4mKVgNYV/yAg/JxwcULNIsDXj37ZavW8uSNEp6lbG0GnW4obG3VJuhCr14dGJ4qx6fHxQPGLBdQeS1pY73UIefzN5wH7QevtXV0bCf2dymxmIdKg0btNYPkPneRhziFWbRpAM1jzabX8P8WRHbkZjU7AJsis1noYFN9l4FJd8NkWvGUCWzS0QjdjWiH2O7WiQn3Oo3DDDcvNqb5xy93UCQ9Ydp63BMhoHQYftleWpkyCDhFG13OsQUR3xhvDhy0GDschlYPxxuQRx58kfru6GszLyoNix031aJEGugTV9bu01Og91qmZ7sRapxGTGtnUZemzQpvhPlQvyo2QJTJDnoGyujshhJaMJnM/j08PmuWJLZi0ENkYZ+7dLZ0EFNQMNmun6cFTcOTPCCzl/vgvgBo6g8iuZNg4055FI40TdQvkg/pZrgxjLdtLJzqrsiLt606ZYyV1It6LQU9PVdyhmbq9sqVLz5smVL28Aet9Buy20DG+fY6pXzF0aFBCzHGE2DpqqhiWhbVeyiVSQqh7tmcKC5CVVq/wAkktHQYOQG65La/M5Ah2V70TEPPbtZd4ataeuNlvDu2EUVZ8Za5/hd6afFHhtar9ThITyh7sDUBWVZlKa38LLGdzqcW/tA0lYOIfhRjxJ6tDF7MPorUD+uU+nk2SwPs4mfI1fK73Q7s1YW2BFsJdDytUaLeTkP/22Gma31ImmPLdK2T82nZ/z/YusqtiQFsugv4bKExN0h2eEkiTt8/RBZPbOac3rT3VVYPLn3abBrBe4tfZIbn9kLAFTh79ACIUrz/XCQKN4Q0lvVZPUmSlPOI8vtkNIAQvioFVCHx1JuqUW7aC3s6QO1VlUL3U1BcGuKfOCmZON8l7fyrVZGWhiYJat8qS4t7+qbe7ONDnvVkIa01xamNWvYUKdFXwCEmQ8rinfgUcB9tAn6tZ4Vrj0J1xovhv9qTCIpl45feOvAVvihx4/Ga4jjLGIwlREzp1lFPahTTsvpsafpjLTjfKvjt239rOfIQPOZLsA2MRec3dAKfYCTkZof3kthaHmf1JEovO1qIPzj5FRM44HAjCWYRCgIWN/b48NZ+0E+P1QC3wyOPuZLrF6QwuO4RFHJZKFhQb1hTsuC5Oeya18Hms3YrX7Nb2QBRUxuilzqKwcSB/tkU3kmkiuESizP9x+HN0kgU8zSbbwABCNAhDB+VyOVAma5q23Hxlb+BPjAxakXT8JfIuajLX34QMEKDoVdm43E9qIDxGebTSzUQOJWLaH85/Pc7/D1gn2DwG1UE1QUPUYZTvPloSGg2QIvWD/RWmYQiSIdqHpNxIFuIs9AjnUIn3f8uA8qBBe+r5qHRt08geoG7TdvX18SiJOXJ/hZfVAoztvVTyPG+xAHpxcKm0SpCLc9QPNozEcAt+vt96tIayb6mubZlnJHjR1qBle91t4Yxn5J+7OTNOfNuu/zhhCt1vVURZQamr6eWXoVbz6uY9tc2iuHEFF/Ce3NX8K9ji1iNcLGsu9x3DJMpwjOU7iN3963lgyNvBnRBQHOdedupkiCSNPlY+8A6CaOt0ed5R4dWFMMbj+d3NyJqAkiOkWs/GrjgOK4r5tAYuklBfIIg9qUdp9Hw46PzIyTj14jVlBwTXV0/AMlvRAXp66gJnNfp8NDv1WnKSaCui2sVcNODM6FXKh8mzarMu0D7mvHuFnQTiZsv3FJ92ocQfgxTmSPYmaTnCl+8yIItJX5t3UrUM+Y/TI+bZ3vXzLou+fDXUhXIHHLkmYM46a13LBVlATkyekSYEgk5iqfvnSU+W6F6cWeM4OXPF9VezwAPtLq72613BCS67teyGahPBSK6KaadRCIMyMVKfR+0BhXejX3c3bKtP/K+sATYzR+PLZlRna3bKt9zqv6rHIiQluY9H7hZOzCtuof4mLD9mtHgqIrJukQjhcOwGRDkX9nzbo8InF36yw6GU6eOcLSJncSgaExwyUp6JmFARKawBNYYmMTIPYkzwTRDVNL6CuXXd5rwyK9NtuW88jZn3zpC0mWS2TKJ2PGzXFkipCEQyPUQKtpXrEyM83c6kYw0SDqqSAgzpp573TblUg1RMkZXpI6D3H3An0kLgBOJvx2EUIVYqGRkYfZBcc5fma5lctJuLxb/lu735bCcVe1vdz3FbCt3sPIejjXW98FPQrk3DBLzQCV2Q822uqb7B0FMrW8ZDZuQr6B0WDyyyWq9FQb96KvSUpImqhvB0ejzmzLaOIIdHLY28Cs90O/pqxKU3C58nHXWPYe1nC8zYUBUrWP6CsuHv1oiXuiVvIKc2OCxZR/PSd5sMLZ9TCwP8nZHgmFNgqQ4F/nSgq/M9Hu6bQNi3CDfZcoobuMSTxzS09hqyDhRAZvdFzHFAFMkjKFAWtPZAPxHSznRFXSNKRkcZNAF7bjt+6UDynY9vMTFDXiHnic1lF4f+dmQutpTzPZZ8R0gt6aQiyJwPdY+zFJYHnYj3tk4gGGwqzDS0Emy9TTKrvnSr0WBtgpGdlSQXyjgvjzXkMYInjFICMbhHBuyg/jD8HZQWv5mD53QG+A22biRnYUstFdc0A2zzO9SdtTHKqcfQPhoFzR78QUe/o41jXYHmR0cDTu9fiyOev8gTXY4HCqngONxKkVAeUOGqs3ZASiLxj8gSHm5Nu9/Q26aYTYDfwYf0GgdpykRDUaMOW1iL9CbabSrV74AdxawGki40yxoa4u4k6metGFCnmlp4M+t11wpf0m8eX78Hz+r7pi9PIudfjnzkhvSfQARdoOVJyNIORhBNjX+RFt7RcCo28J/whsZYbDiZNsdn436+6oZAxtOqQpNgLIuKEfaUI3r1AZSJSuEoXJUjkMA0xxm+FDy93kBsOU0ugtEpZlSsoUqt89Wi0CemgTeKjJ2d6OryccaUSnrb39TNyN9fs935OdN2196/Gbazdogu57OHTeKDHd9Mw4fGEYeQGUxTIXx33M452z+UMHtdR2ngfRaGtjpcOdqsLtpkOsNSVmYzJuzzM6FtJFdh6UbLCRqvvtepYa6uymYZvY8MreX1p8ccOFci438FyzeZV1sWyMfbmabSKZYNapuYBPvy7EheERAPmQ3vUcCSGIuQ7nG75w4Q3rQCw3htOOOllRkfyONxL7ZIgAHw2SuG845vJMxob8RspE/MqYgddZqXjlMQjRCe2HWrz9dnYnH9taXM4TKeHRnpaanFhr5i0/by2+Mtb+sAcaq5t9eIXcoAvJfQ8XK+4WqleRmmnDTMrAY4REnMEgpY8ER7vgh9D3TOfxlkU2iu/PMJ5uiH7q0U3eUPaYeM8s7hfuVgcGQjDaGminnRXGa6KO19vDzjT9PLx+gKbuDD5QLR6+Oc6XQn5zH7Z+n5dJeHJpwxADfL6wD96bFsKkXxcYpUV9koHyxYWYst9Uu0ZhyN+4LGtJQAsKKNx9oaVdU1MUdJ3Fwgc2M4bA0h+uS8yxKonYFl8zG9dOMd3ejCbQ17VpZdbIjVomU/IPdCTF4s5qfemmmrBbcr5djLfQ1ueCmuNqSB3SK5dkrRvUo1xb0nsg113Mz6kYcmzr/bcOMUqE/SvlYu7gQ6qsrvbBoO16QLLIcfB0qQf1gwp7bO/Goc95+lYXoROEZJWN7MMtmfzJ2bSeMCj0O2LUhQUKrvNgDm0ogQVhi3zq5wzCrJq2ASvPcJeuFNNRQf/38FqkJs3bv4ho58ZDOEwYN9jSi7rEyVcJBk/Ukv/iG8QfBfuognH/xkhf1w9WrjweWJrQC1v4Za28ksybxA6Kl5aXXpiR463N88av90unmMtGwS/V09DGGkm/jbNzubpKhvT7GjJOoD2K1U0EgHGcNIYPr017C2x5Z9547Dyk3Aq8ALsHm2Ptrb22o2EKJXLhVxfU9+uy79fLJ20krQZHhp8n4DcdC1MQdJCyb45RqZi1QnO6E9luKDmXjFMZqrigfHGNNXvhYWAFk2qjXaZGAOG9bfQv21Go37oC16FS53id9DLaMckwFSWCiLlwHI+FXq7WqEmEHopP8n0oxHakZQhso//YUhj0Ri4QyNQchKQv7DD7WPY4cTWIjPMjJsvnLbj1bJyS3i+yk54tkFYtM3tvYUL4oExpu/FE88v68a+U+gJCrLHo97WNcpwli4Ad+3VmOvkZ+TNyhA8Y7Mviok3Jlvz4CA4KVjrgCmcApvFEx3YSLLzMifTSVxT2O6fixKoKSYx3BV930w/GabiMHgm5x6WGQ0BA0Hm7g4/wSjCesQXbI3CmWihXuap6nj6go6g9pSQof8kkAaQ/H7scy7OzCOxZWczgzIf8WJmko78z6iCuvHlQp+KKpz8us44YPf7AYlYP2GLIJVe13BlTCat9ip7KKTwsNgmMrPhat+4fTfgJ9e/6vYzcFov2ZtrJk5tVnnV3QFq3R45TH9O73hy35zbo4PlFrzGGW8SRrCpa+o24PKblpNBrLkC0jA1twMDm/DtHFf/+nOtaD7orIQ5quPFjbNRH0oE80ublcw8GwVr4M77ojMVkL1VygeJpJiYi/ZU8/v9ov2v+1usM7gZq5VAxZe2h51co7YR6YMKohlHos07YVc2PwG9H+U1AoK6dXqBDYhxzhyBCjDsv/bJXTeUJkysNvE/fQevkbwNjw/hG8WxOzKSzG8nlCIw77L41H1bzuu7nS4svsTY+TsWkoO75r6LFaERDh/t0+tYweXhiKjJknFRsHxTjLlcUx8nudKrflM5wSwCGFrhxbjVUYXmdb93fTPlb7yKt6CF5ugP5UTs03aDigrma/tDSzSyt8es57NZomU9WoH8WRTv3bP/mhfmY6+X05/tjQKurt3tw1D6I1Et4O5a/IzInO0s5vqzoT+Q9f0sUgKkUegkPlHnwvWldFP+CTpdWp2+ncg9Ghz6xnhZGHYeftbAT0CG+id2CdK3pkX4hwSh6f+VpWuU9u/w12jA8CSJFDg5pkEABG3sSE9HDboHvYjJy56FjScda7u0bGki75rxmaRyvekkFywPRvgZe0lWBNjzyoTHYMEHlo2wFZOqj+qn5Zismv6a+aIxQgXxwjQsYlPxLsTHR1Z1HQkPxx1NLUH2FCu3zjudrQg0b4Oj96jLPXb4oziZjXPp+Ge3kZov9czRvZ7Yl1uWlj32t6RLfzq8/qonWzwfkYYBdnp1Vw8i6/QjKKJXwKc1AsDONT0HWiKXHsmiILEAFlXNLbYWFHKUmMMxbOAgg4ZnWAV+BPDQhX828v47fiFaYLjfjLa5HHbyP6ttc9Ch7Hal+HYAg2YcDwtOSrUd2AY/yG4pjcirCNXWUDcF5m+zKcuSFFlScKw8HKCTUyyl0OsMuJZS2hIfNobe0NKDvanxQbb32mzBA39rHoRsCKxvXII1TbGQFvZz1TkoyfD+Sj04Z2gh+Z6xvAr+/gkU3NOqD9i5ZmCkslO/229MPWsn+hy1zgGLXVLphTfM9PY2FvQRI5CRrRL3rj21fdGbxro4brRd88wa0StFCo9noKuqJFUWszIkXtxHm+A5zxfMhLOtGMMdFK4p3scskBj0fsCeGyIXMfG/PPqsKvI/d3owxLGXMd20TGvDR5aPy94M0DNeBLgn8xUm+YZLEFjIkpeV9gS+QO6ji2N9IrenOrBuKPmm32lL+Wuk6UPy5h5NZqtr7QXJKIQeJLJYQ/Jf0f1dujoAORODMjojMN78ouhC0R+iRQ5r2ewrhwnybzQjEw0FSmtE2nZcad3nHL4//6j0RHbWZn3GFyoPqJyTzCZLU93FxxVUGYbrWwfUFhDL7uMWgYSPBvVgsxhUtA9pLV73CIyW+UXAYMrbYOrA1voPtmPhf9UTsOvzDNdUVYVKmopPsbhfaUtoDqV8NdJ4OS//qnc6imN7CDJIuF/w1YmqZZ0iEBKaO7cYu4lXB39EdTgtaBVJbmLD1q+q/0YaJ/OJ42ZM4joMX8RyZrOJinke6+TUxd60SOG/kbJzO/nn68PRh1D6/A41qBolfkex3lccRChGoAvlYv/z+RpXAUtUWcjVAd2340cWB7dADIvboUTCnfyvle/NAnOBTZI/7cpE2zVIfk7+Z9ckzcDhNqZ+RzzAbSJ6fg/Vlr2lVb+5mfQ9pynyg5xL+e4vn18/piLigWg5xxDCcLrFEAB9aShF8G86tx2MLRy/4g5B07lD0Nr/kbmf4fHDV+hsnYctKj8bJ468GCfGRP+SH+Lv12PBaTJd5CvM2IC9HYJ7zyxJ4tQqqnQgyMdur/45c9eL9mF62BBUE5NjHHgryUOZnkSK3gsbfgU9rmYK8ru5XIkmWsgML+wXCRnEVGQ1d71kIsn/QW7UWgjKDqwyHsYp8/mvxVZFsQs1G9ESZPh1xfxU/UfyN7+t5a/99AmXoBeLr4QN4B9QHOYcuCJj7OrYBoEc0+Hy9GOTaCQKPVC8dwtQ1rc8A6KvA4njrtEeWIFa8IdsQF2Gbv7jsmMfpVyxHaNpO0+BfdoEkk82N0MQf7q8WTlJHdmfFklgy+W/mbVvCvbfEjlzGq4+AzSs78mSatwVf38Wfi5zIFYAkh/fkNWJNZwSH9U5E0E31IB7Yc1Hg+BT11wI4aWiGbeu91p4Df1Q8dNZKy96KxddvMJaCTYnrHsAKij7S2G6qKEocjXyApFQL7fpa+gxKFAHd9xCNNphcxygSObOTOhpbI4pdEj605SC0AKXsWL5PCyUZ3R32Izi3LP85hbAWIkHdCx8UvEwof1l0qbEHEFP0Rh7sx4K8N3vcs9pHLqUjVEr3VRmLFbCdBRSXMEECbwpdLKp+T6h0SoIivH6im8Gg92u0wnbvqrRX/G3JM5u7uoBCPqzAEMfpYJTQLbAIAd+zMwwQznKTPlg+gm4LQRUQWOFsnwcxwQdPIhcCQpbsDQzDLr1BbKEINSHy1Iw2R/iwyimgQE5h57kc/O9s3B4Wgfd3HaBQ1piAWEjoPzwWdzXriB5y5X9uaRH5sLFPGp80sBVGGHFuKh/Q9RGgcxoNbAQ10CjeEa0FehGvhjuw9hL12MjwqYwwQhf3YPg6uTh445nErdo6XVHRVKkTui3HW+d30GuLONeUmhU9pZ0eHz+Wh+4oIpuAf80rpH6RlXuyRmCondW15y5+f3SWxtM4tgC+T3H9pBPwJgz4GVdQK2C8lkR84HMw7HbGvzn2NEEjCOv02ybSTumdw4hAekq7d/TGQqEz6jPeVCKJxg75LMBnJG6S0RZkra/z9/L22JFE16rdVUIs3Fv3dSLanIsS4NHEySkcDEsgLxReaOSdCWoF10p3PpFnK03EKw7SvBEZRBLD3+KarTaMvr9F1xsR6EEAuTPhpqucK3EnfDDt3e4qOQ8X5xLJPdAFD3LZeqxZMF3zK3odNT8FMeSuTbE9vo5UMvoBKUhriSHw77a5U1xzy54eRmbSbu35PCKAVX64W4uddsm6MlQnX6vK5Bz2ErDoIkv7NItuGkSeGmfm0Z7ZoCM7BQ56kz2zfIxl6hID/82am7zoNfBi+ReH4GdX16EzFIxE+3pRbqoiWagg7RCnq0qkXj/Seu+iiSJTcegLreX9eeKr8t8/z7j/+vnXgNZJ6lHK6JBvlyeU4S0Sqqj3943Q0YSDhKCtTVBceDNdI6XG65GUdZtodIpWz4X/vr8X8KXDQIMatnCacXv1eHTXevsVBdwvv7INDq9VrHoLvLCssUg1m6RC1tfv4jQ9cIuk9BVUXk31dsCuYu8q+vFOah9u8JC74BkxUwkezJml+O3KNKCau3JDf575yJYFPL8Ekr0U7O0VSOB6GEJyTPhaCAkY6/D4NQwQsJl66mBss9Adsx+U6dmAaFsdEMm6A6Q1g6jh6LjlA0wY2AhzZHyw3mpKb/Lv4wm/juZoFGIDGFiuj183dKnfmQXyKzGUm4L6KsGPpuxoZAr8NNCn77JD0udA4kUj4jDuHnAu0ncXRMkcUImgf2iX7A3Si5sWWL3hPIW3hiQ7Lt8b+s1nre0l8q7xOAOpmbfu4jjrJg7lMO99bX/pxfg3IX/mgq9KaW8LrVMKBsROCcUEoijPJ7wl/A1qt+beMcgPXNYLWpOvGKVJuP/1N2fRVFtEZmoWQoIKQiDVWC9YnhpqEf4L5jZNkwNDyFHZPOUPUn4cj07IF2vmKhrewE7noJvq55ZmT1hSR6eVrT99H0tQCPxq0mzpTgbvKAzE5nmTiMHSPbpjnZ+05lQkXNq7v+owOmi1SDxLDl5+Kwg+uoCU7syjIhlfwluAMdG1DpnrmyqAxi0NO9IC8qI0BlOda/Q2SSpXWgKiM1B/xz4yA2/fMUGnUNcVbrH63qBQZrQ+s/zApnyT/bI5ihuGIw/PKXNczD3CidIT5xcCjRAonaujsfRvc70/a0TcKUCuFv5Zs2YF89MFa2jzPlXTfPSiT05Gywb7sqXUcdCpkNZvP8fQ7L0K/8ql9Hs0jrKiBHbd+/SzhuvLzSg6WTB1Lfd6GyNYuov2I8PhrWZnMuHaIAYbvh+gBegjfLDH2QDaaepYjb0Qoq2BErb4QZpwAr/jrjUYGIXhSoN1pPApQqXg7/IQrGn8zSctO2zYJFP4flQoXKqlX3r9u67pt5+pnsaT8v40d+lrYvVnPX7iz/e5E9rKlf/9W3QKjon8HsOgyqPSPMS4HQYE36WxyQI6m/AEjRcQiHKlva2198PpHjorF4ppKmCCii++tA/2vllsjFoys1Hjwa4V20Q//b0E89OPf3fn0NxMNJ0OCS449OWlMmHjUpbt/QriLmND0PETZLSGto3WosTZnDAemPhYdKpyP07A6rcbTz1IBjZ25Neih9ZSM1YDTC4ZHW836YuX6s5vHbtxdm1VVaLLgH4XY4mTdDkt1B5nYMoCi3dWIjsoHs6fPYw+RwwnPlC6UuSlTv5GMB5rHnrA/nC+WjjFo4NykweeYVcJbPD3NiAC3/96X4M5Jy+BuoOuR0WALXQkm6My7Sx2RiiehmZUtaK0IzVVmyenYz3vC+yaNrQZvzPHtUMJFs7nr18Ddavzu5WORJcOcc57H2povybSDFxnFDw2uyubJZd8q7Xv1e+U8W6g/oMLiNVmmyWBb7N8L321SrwJzXY9BqITiniU4R+zm9cJBx0cYPcG8jgHdEzvMQxsoge7MCDloJ+vjhueuh2ZOl9dyv+MlHVTRyAjK0loC0wMpgxFwlRK6uIiqNGe2jwu9rulJ62P6LDRKdjrkJ323EmzEfwSTsV7m8S0Wb3mQVWOo26HEAnEI7q9W5+Vhyd15sEYAyAo6aji9rWonhYQRmuGHAEVMwKZ8KmW/CkW/WSLbKbXRpNEoIc6sgacw2PjxfRObjAx9kt67wb/7oOCTR6vVEdajkeO8WC1x7RZhw/kX+T1xpxC5esdwBaYFjOsGLbpg6CBFMDcQqhEU6c/P5AJEwYYwKY8hHRtVcKx1zqSJjh4cA8Y/Vp7hJQgOmJbpRaqaEtVM2yp8Le0XtuQ4cL+eHIQ3YC9MMVJUPWvxAy9KX74CVWA1W8KJmLNTwmsXxIWoc0gIzUeXdskIgy8HXlJP8shOLhgQ+PyhB55kVAQ/en6CHVD+XtThCuO84EL9gk9R5kBKwU0ywkwTC6ydM9pGUoTDLeO211BzRmFmiMw14rhTXzPvWyll93Tlnrnm6c69kj5foMMsuBzGqjCfCD6jkOds05nuL3Y5YRBiABqCoAhS3ByxglmXo3EiDsTCrK0kR3Q+9S+Gv2WblmUYLYChRlh7q51HYsAKvse2m5imx4M5Qm/QmKKjKrzVUGvx1V14P4cs0nc2d94oX7TmifK7CYbDwwmYhE3d0/Pudtxcac/8cnxarlT16b8eHMULSwP+NWViKe0zCJ6Lx0PMSWqdieAiT1b6zya5C9THSmgVr0HM53raSTsMaItZDMX2uRLXak8YaSxU2CVzvU9Mr1JRS5P9KfoQKBNywTVp36tWkhMLpcc3oeRDNoHv+PbpO6cLOsZRlojdZuNfxu4xLGHaNI3pZNjLknlWTXOxJ/J81WFIA1+Ddw24fKRYGWRhe8/DdzPZEleQlN5l92kA1d7pQgmxE1u/33lyOLO+BWgWPGbhA9THCqT9A17U55x0SjGLdAuEGPfdVxzstmSQYQxvlFehC6yjRaMhf3XtTZfedmIcoR2a/2W1RD+op0OjglhZFsoZYPIHpKjM5Xvkaf4nOgCRDWiAi36eT61tSTOvV9aksuKWQsWLbLtAUOfg0OJF2L1aieKvZQMokTQeOyCFPF2nwAfsTGBZ/KH8LJmirdrGwHXDtjmt7jIdjqtO3jrIRk1kas3JfSseLgrjIrC6zpYSFFq4sR61UZUSmR8q3rnzfd8pN+waJp9oDqxvb7FpfqPzZOQcvpCPXYNt0HrjkVQnoz01Uxhg+SdYQOxYPrrq0vd2u4Pn3tFH8FetiXTMdm0yhZJZxp9o9MGL70PbNCiPN8BdC8/dBVe0omwCxBZLW9EdU+SIYo1+a4tqC29BtSieJ2Yq+g3g9x8IZmwKaLZcS8oF6rhfLOQTxq/LTag0IaFPfxVzqH/te/noT+olD9fizKyhyEEQ9Lczq+CAdo72d/SdNhA1IJMtgDX7wUPhmtpPJ002aV6lPwBEyt8KQP+VslLssBHr2r4I7CzaYxzFuHbKiKJaALQUGNwWdlRKjF0P2pV2Vh+nNCpTnsd6qkphZKV5Hjzb0eeaEudfpkDnhM7lD98Y7p/hc4hstgYF0SYBivH1YlRzLVCjqELPRrEtG/VAdRmSDtwkTe5tdD55WaFZvPrShUjUpIKGGOEBuGZ7U29Q+Dgupcy61Q43Olp2Rfxm1pshAtzylH4JeRmOoSaMlauNWnAcxkSo3iisktxiuIE4kZyaB7meRUQzF0IvM7m5TBjQUsNGiUqpYMvv8j1jeRu5RHuibw/xY5Ucp7HFQG9vg/58mcniRjUICkZpC6SL2DTQbanlDzBuEnGq/FzIjKJjh3L7reMZCkHGIVlobvzlmSCNnv59EN+8zdHmD/hsSOefO/ky/laWGGUGC9cExUMfbCPkadSzAWTlOHa+5VOEK/db0PYtjcvITbfgdp3S6WiafYjrIsnKOfl+j1iUMJNk82Vc3V0Ms6naYBVzPZ+3BXsmqNnO9ZRqcLMlDf4vhb9KXVpylvz6887nBHSXX9tPi9ir+wcIwZ9nBBKkPWa0mQo7yNEgPaXBd3rOel/3Vyi00pju30AyLVdiep2MULCs2Fykyje6vWrDCQitGzqNOi70JFC5tqOuCxeqRhJPMFvjkgwNtabU5Ddw+oANzLPuHB0QKHQuSZG9iNW+d88HpNjX/0HUIQBSlY/g2W4Xa/f+JnqwPaIO32tUNY45rEufQCsiU/n+aIt635gCpgOuWSVfPu/Htk8ToO6IBIrl1O54BrbqjrTixKGrb/Web7bLUtFyPdUnDAaf/MCsapQcknrj8W5Yo+ClB4JY/iL0nlr10GrNRtiuBbDbPRgX5L3ezrD3IGq3GZHc5ncoKFczQVN6Br6TFTEMChk/oZIwFW2ddCjpFphkX9ZVGXik1dBZ4Uvr66hjKYIRnq3mzYtvkw/z6lgkQewbLlotlnZxfFZRo41vRD016SCwHQ2RbwoWFzzAm6ovvNIcOhhrXo4F9UrP+0tws5MNmPybQgXVbiDlRB0YSKf5rWC4iwSaYTcUJcz/jpZDb1mZqg7qNHnb0i6WbYI+flJBliSFgpfsa0zVCgd+r7dWLW5eRYTkXBw+jH4LrhIYx+MPEQH2ZL2xMbgh0dzwfksW1u7gptyBdZLPxABclHAVG5aR8UjvEfyNONl/85fC5uyXwxB57P9IUHDfWio/FhXlf4N2xkA8cvNu44a9ej5WS9EIycIRQxUPr2skgvUBUFe1T136thZXyehREGTi4GqZf3Ua+KAM3lfPpRKiOOZsqiLfQASclcaPHD6sFkw+ToSPs5lRxazaLuBSBffKYNL79I36wn4wBD+UAGFWkesfLu6rd6P2iDQvkdNoKUVlKnWkM0HzCzdzq/k8tXY1RLke1Jl+u26MbRyW3whZnNSRzkGS944KA/v3ngv50dIv3eUa1Hhuy6M+J5oow0mRJ+hbY6ay8uiSE1HimrmXMyTKuGvY2aQnmuSagYdz3TTzqMQNMtussTuNGFIywH6VAKOmUmfAb+hH3nmiAL3im1DUjpfIAN9hf1np+/nHSb3/k22kZ+nje7pkMoxxnDlYHNjVSwXitIfiGyYDJFfRTyvMdg5M+t36vWzTOj2nGfZfJNJhjZqgKix6CTObiYQ86V+OcZKPiT7x1pgW8m1YLzPiqOl5cAgyb3ONxASIc9gqN7ma3sQdbnIYNq/cGxW2lva6IBCbhYFR8bd5f36DdcEl9MKc4zhnYa60sCWMiiI78msM+XUKnpApBU1H15qPo10fxvZZLvaiEyw7liYbJcuze+NevwUyzNsYA7iFa1oerWFFh3VWOVFG4PfgbKZj9P6HNDriH+BXQXyG3/DAK/vatWkO0DR2kTCUvVlT+K46WpfGr3whevKkxK+iARxPeGjGIVw3NMwEXRIm9smznPV0b9oBOnQtG4i6jRDhiJbPN4dmIQAWu8FxdbnCBOgF+rgxDv4jm0egF6xCN6d6VHeUq3ourw7mKNCbC7bmGPdvW6QweMJXPMexnmHjUmbkYEhjbb14dwHn/b5DsDeT8jo0FuHvGJ5pNMXsTTmp4qH++ot1DOZBxORRAYXSF/AnXxC3/ia5b3YwjdwjmUE7Q/EE830ztmFAFskK1O0qU7eV/poEw3eAUS1viTbfiAc/gIGa2miJAo9122yuHV5yMToDbs4UYbhKHafgXK1JGXtrFJrb+/SbZ6kFAQgSq9CdY2IsxwuXIWSGfthhDjIo3w22lxyheZwmx7TvQYHBUUvo8OKAMj1POY4zscHvCgY8RIiUfvVHAVX+VqvBpXTXFwXPk1QHEnFh7qjXh32xSe2Mi7GxO3bJz78rt347EsLkqIU+lrl7IqPDzUpAGw/xBKgQ00v/aTC1WyPsLR7fUQlHM46FCw1B0Bpt86LoNyElfNdVXTpkvYDb5CywSH0a2ifB8emna1rPUIso0HTC74VkXDqHTSksGkWTmc/QDnwstsIjMZziUkhin5dzQCl3zhJLPSaC3mb6l7jERmwQJK1Auu5e9p5y50m9AQtDOTjlv6mNv++Sovdw8TE5VHO2FacFjDCxRcYbBxiRvQ5woUWiJtPrA1470h78TdkZgpGq0bpA4BYdCV/dyTIpGUeDDPQYhGlE0FErObvyV7TuVTmBDifsvgeRlkvxgN5Bw5Yd+agL4VwHXiR21CTb6kbaBeQJwSG9YhT3i5bJFJjZx1NBEqZnXkAcMkN6g6JPPA58KCkGj0vats5AVo8YiPWlYx4cGWgAm58U9087uv34V+BVZTe3XRzi1+/2PrhffJxtyLP5eMVpvX9N8BRyFUwHp2SXs8ZWP79Ai9uoJr0oNDiga9CbGoKu2YgAGPOXeY5jgn6WnVp4Cr8qgvLZKuuFs7KsuLXUczIRZtEuJsVQ3UZYW+SxIsfmAAyGIYcGHvNhjR2Cwh29WgvfpC5a96P+81ylyr0/cHoBgnlYUO7i/jad/zxDfbuaKbzRh4UpWouNG14yIO31awK/pqUPvEBtzA0Re7sOEN2+kFRSFjna40SYMfYLIKqhxrtj8tZiCCouTUplGtb0kcNgzjycBrdBxhcEYWctI1PYnyEsiG0X4H9+gLI5UqdD57po2MQcfxYkCMGkXyWp/GeDqXFBadhfYEB2MnwFY6nF2L7a+EI/PK8hBq//S+Ce0E5MrSVl9p99WZACM+qgWykjx3fdspf31WXX8yxjHj8+ZvO/gsT/NLYpo5+aIXCGADVrgG/sMRdC4u4JtOZ0xsckTetX6Re1AlQ52btu7NRjnwxy7uArBJ+GGc7YTKeUsow6koSi7wZSWuhWCBgxxr63zAMFnEJ/O1trap4wUMzzynWX+0Dd0oXYDB4ByTCtNfMRSkC7MCSq1C+P0lTtkGujzdEOXJxHmwvWdYVqt8GvHtEJm/Sgr+ds/7qG/TkNLSaK+TlKtP2F69G540OwBBOShGvg1o5C7m253tP+2CUnQ6gDL2oJ895qu90tWPPWO0gynmzn74vHlr2/EDve7sE/X00bMS65jtNNwrlDhB3pBHPr5rsj+25us0Q8ebXrUkMcwcPpZXr4ko1NplJC/t/J8/79H2fZDZQYGqfAmr1ooeCT18KYpc8hurrna57a+Uge7K+PxN4b7kqFCVj6BZbME/GMR2YRdcCI1CIWEzoriTpaUZBYIEMpIJEtEucmMeTqrqLIpaZNJyUfqBteFOU4U8FiEixOagLWmPppuW1QtY0835Mcz2hyMPKTzZbFWmBjlqz+LQn1BqlK1DjbfVvAhFft6e4UuK8rWSkX4O2faG1X0Q6T3D7DNA7bv7icitBA0Pnn3HMWfNvL++ZxjESoePnccIEmvE5w9i1maR4XXJFgmpU3rlIk61kEfbjlNmtYCLw5PbY90N7zHEIDtcssAp6VV5Aph152A1SUZR0JjLit/HZq8qy0Fbcsb8r2q7aZBUhNJE2orEts3OzYftwsRZEX0pitUC/qc+50R5OhYWoZuIfFd4a3Pr039ew5vg9dq/dQhTmgCmgS8hcPogj3WPmIAUfLnf/AOumWJrc5LUuIwJ4BXa8ch7CGXPKZljGQjFHcCCg+dC+jwXERjnKtJc7CdS6YM540w581F3cTgu1maS1G1Zg6LaBvriCJaXokd6yN3V+fWGMwNj04XXAqlC7qeLvthiLDzAd/ssUfFrDsKXyKpHPIdoOmBlnugV5zvXxMsPo5qlmRA7HdDIvcZVC7VPrak4yCjA+4irHfRz0QyhKcS2gxZVHSxoER3g+zcwfExiTgEdIF1nHAPkaoJx/wLWJ/YzhPs0TAxBsdRUUPqPemXyMXi+5r/xaxq5XPIyo6WQ6E0m+yWvTWc7yXiAL0Cc7MyYzfs8mNE3JO7v0HE99PxPaMvB5nwl5g7qL0kfkW0j2KL+Uat2zQru3FL7K3qJfCaHwZt8bXZ7M+Zra8aLSLZ3mtRP5JHlFVUmUuwnXzMNu+l0lb2A7qWjPbLy3wQ6wlffYhOYsgNHDeUMjE8qWAMIVtIjgAJyp40GBumVtDiokTgEtQ/eNaP6Gr34x0M1Ca9Y3syJGJ9ahu1NzxVWwZ9RnrhTR78IZUjVLW8NEJ0agvuVah14JtgjFWw3B624DjdaDucIS26QizztYhygfT1Sm4ujXd8BB1Ps9Qoj5MMl2IocvdCnp6p3Uu0zec8qOKH4mTdj7qLlnHmniJ7gi6KR5gDV18xuLTsbbLzeW9ZnH92HctQNYWtABeCOdw2+byJDHKYNscwaP0NygDGbsZJZB6jVQsEbbBYVa2Hp4DpBPrB7eV8pvnoEzBnkGEyzjILTQFicwUl4J0ElDuJW0nyeCl2SXfZqIeecEV3D7m4Vk6zEKfiRF10WhAa1I4jX77Ombw/JyGtbBLrmGod6SYqJN39KtgoB5v3V2CA2XTK12jA31vS8tkFX69SHWllJHP20Y+7evqOL/dqZKFGcjEJWlgJc25JxH/iQcn4ZTR+MzNSyLYwNgSBZU9L5ywar0khK/aIPq+EseCc89thr1lSww/kYlDMmBzerx257lsI8yVRxQFDrAgBf5NTBS63huWELtnwROGG7VsaF6Z/f28Ww4QqyUtdCzeD8Wmc435yM8XNCvSK3pI+8+Hhflgo0YYHvRi3F/+4cFz4F4Z5+vrlHsAH6Uhm2xiBFf0vFVrGqYsi7W0rf/ludsP+H8I6yaiL5LCqyWfTnChlTdUbT+mH+FF/TbGruARbkV8wklIZdJBJ+GO2xvx24jkDPKHEqOtIrjDiJtHCxvISFvbpFF4b0Z7q0TiHrVMOh7FEhVaAX4labC2mgCrz/Hwt8KxNfCYL+bQB7e8dl31dGdLpHit53uMoYwfe5wloinylpxuSqUS/SmIQna1oHyTUV+o5GJhb3f1YfWjjtH3v4W14iMAJZ/OsorbwOcKHx/xPPyuRtn+9xzVfI0sFQ4u8TTkRK9aLQVVnUaouHSFQpvA2z9oo19tH7cndjn3zXZ7ndNmTPih0Mm0cMhvQySN6ay0+rFXYTUxFjSQyHOOM6oqe08GluPw6YBw3WqVViEkYoHKJ8S7aJQvb6o3DiG9NsvrL908C0WjYFPG4WgXXan3Ud83H/+P4+POsA2ddzBTlfFF0Gol8ub/EJ+uKWRj/Xq2ggh1wJk/VjB25bssfe/TWOV8q0ohq3sbFGJrRdwN5pirUlAePAKUneRQab7VefwAMhsuOrs3gFOyMpyTkHLoKKt1PCXNQF4hQmGiWcrodBCHDmGUmTL7ziPlw2W/x5frcXXcDBYOJ4pwSFUmicplmtWQ79PJHfi6eQkpteRURaLnt2+bkd22sen6bGT7+lOLiWHXuZ5lkn97yzBmkUm1F/7hszD6oQtLjDduwwpjbkIqNmPzLSYgJtwG2VhEGkY6TucjJpSkv6uP956vlpNr7yWK+oHfCiBufAH/7dQUzUezymBEgrwizQaqvu8E42qZutnHByerVHuN8iKORTOBGH0fszEm1dRfQEKN93dcOoAl20VK/XLKh2z8HDZjnqM8Lxi7d/u7eO3Gpk5RJBeuz4l/PGvgN5m4eV5PWp809Aha2NWLBGnHNcbNI08E3dZ+xWVBXajOKvtZatMzdb9BfN4gtYnMFNA2D9I6zAZugW46/8CBdqrcmVkrxnepS4IpxHIc9YS9tTXw1lFaPh7b4YG21M9Va2XGPQqoC3Lgi3qI9gYQKyo7uuNqQq0ZMG1/RpGCto1rMLMnDqGQTun+W6u7t7EdZIcMtR4lWPBtlOi/M0pbvMg3clVGdaVjQIty8GGY0HQJ5UCQhSof/tO5SpjuEqXApW1RJqOA8g8aKOY/2C2+gYRs6xBUscwyl8M02tja5ToO/d6ejFROZVLcPexmYTxXbvYiCc+pF1U2IgqJ7cOt0xmRitGBcujQfFtU/3tzEbr5+W51/AZ3l3RrYboQuskFByHA/blR/DgyWkCGkiCOvuV/QidMMxOCqsfqfSyV9D5vOhOJ/7cqx879/19Ow5pPo7FyHLuuZ86JqSSSsxvfRGzu48Ii8zhRKFBm8mkr6MNfV+BtG39WW6cedZUKSbmmOoPUOnTU3ModcLIZQXBHyEr2t+CEj0BvRRCS/M0hJQOHRQwSxm6tPrmY7E4eYBLhf1bl800j8Cxx4tDgFGGH61ZNct1cd94cVXWvAqNxMnEG66FyKJ29J6LYRAo6ijSb7MLEIYxvm4Dh/H2QahPfZkN/zq3wkMQogmPx9AlLx1j5L+N7iaA3kCijg+Aeue3QL18lSAfI5kKl95lxByERpn9uLzTX2Gi+27gL3DhQFx6dmdXwC2x2M+MPZ2uQL6rTOO3jR4CfEjYKZxr/7ez72UD2zTCSMXoH3L1Le2qPiaRhAmL6F/Olj0lKJR5aJ2gfe92mlY0DsA85w+ybrLpcwJWmmSN+GpXh+io5jnUO/Tn/rvdyIMzAXUTmD81SZhvzB1sTRmiCKb8ZKcPG6JW7sDrUFfUYIVfGOFg35J5IuLtC0iOQ07I07MLtCpbFDJeM36bRmupK6HeqY6mqHG0/JYrF6v6jUWNpFRtJMutLv1WX181+7czfQZSo76auQCdBw/sz4eQPyn6j7d/vUKrT6LBponrLl3Vi14Ce+fZt6xbmsuHNwBbhitqVXd+fiUIiYJ+Tcg2GJzpjQGvMnJsv5/TDpPs397ufVFtblCsvh2TYaJZOJGh3yjJ1wE4/AeRw8dQnDACuhP6oTqg4F15LKZf8XzHEQTofk9OIUXsDP+3y+61ABjE6C0SRP1YdY9BIg5QCbof9Fs1tfTertzFLzcSmbBb4h4YkNBvk0XgXwzcL4r3HJnKZcc4hLYwWX9X5ZnhuaoMvjIS1CDkhtfpxOOfr+kzc9G5pPDmOEr6UAicbn6h28ySlVI9uR7AEXAiMfzPFtgPTmJeD/gNoIZM0BNKKHKiQ+UFsTOoHgivy08hKfY3otbuOPjUwvBJc2lPQ6IxiTh45G44/q51AFf14lgQV9z6Xo0+03KW+EGBSdntHsujQmOjM0inwUUeRT2oIxWFWZk2NSTP11djU69A274f6ucLYNXfWvbnd5kHnR8vw0qno+PwX5kA4B6YBs44TZQxG3MM6CvAmnHgC0QQYODcQG2FJqNqU/vNi/WlvE73ibDunf6uzZ8+Mdb5KK7B2TG+TA+8FWpsnr9mWymrZFICpL5xIO70hHD+1b/HAQxN3YcNgSW4puVPlNq0bbjng+SO8WuhHAn/Rb0+rv57fogFW6sZaJmIbxWRefi21kf/PZJMPRBMeKTe07fHN5lDOhHprbbn3QCPAu9d/DNPHyzQ0pN4u1XP0pvLv5H5MAJBsE+T3tJR/+9ZvinBU9+MWADPQb+PTVNB0LS7JAfVtXT+hTztfL7+ixrDC3lbulk1OqRisUnUTP+cDkni46V2Qw1JgVujVdKz39X5sNyYFE67pdF0fvu/Pd7sD50kMjubP2tiy7eo+BtcJQN6nWjXOrmYRKwFlhVqGTeP51sd3myIK3aKJkYRHGIqmNsU70v9YT32HEqWBGsof5uH68c/2nLTkQTS1OU+hdsxMHoqGA6AK5lQGTUPEI2wDd8NzNkEHOQ7+plSVuqX8vb0ogIwjpE9zm8avagPMrU7/n1/JxgYXuFLbVr9/odwK4UBq8+jUe0ib5nApHnSqlMZLNvkDkpL2aRwjwtHsIZClAGWYj2U/U3Io7YVcJvQf/4j0bVdmPAqHUaajBxh16zYiZp82H534YvnexmPkiRR4TZhnQctyNvzyYs3Lb8y6wxXtd+00ULl08TFYAQCNI5kSSp493GhZQ/FvlZxp1olM8R2TzlBrg81OfcR5DjgxyL91vkyNWRVjMvwwOL6gvnQ05ua3sJhlzlPjPNH+Bgr6h1DW6gcycVOuNLyTFDUMuqvGGky6+MYd14B1xYJEZ/jW/B+mcafUq5Hp0+2wiCPtvrMIFRSe3DYG6zXhnmu6p0E7PVpVshKUyVRpMIYfT49UBeY6D1R9aQdhvcyxOJ4jUwheMFCxxyUgQTQ8ff8wJ9We7q2n2GaMl0d8VYn69Mqy2upYsYjaQciIKXrFXN7TVJ66hSZdDVuxz9PnsTa0u4TXHPHbyzuGKZQ92fnXw5QyJehvsqH+C9H2fjdUtE85v4U57mLYpX5wPeEcBwWvvHI15dCW3QiwmhovAgvXKzvaU4OZeyVirX98UCQlmTt2fhxU/YAyXew/TYocPxbPZ/CUVYlx7hc3WuUAtXYWvZOkeUmbvfQYLdCtrh6WKxUn4ULklysMG1K+Bum28EVx4aFMk5pjTE/53iY/JEdsszUhTshNeP73e3Kj6kmdwwTh2rUM4wwB3v9Nl9lnodvS7BxMGSksqwP6QGfgxf1Q/9gl+npqBd8/lgpIwFOzcu2UCB74NrJmznLv+j135+knTNQY2eaX67nIeW1io11S6gbb+J5Ug/kz4OAJDIJEUSZj2GrSy7DlJMD2ELugYDPoT6U0bt6BE4eYG301DabSVt7/geWqrSe3mLPRF/g5eGXRqY1T+FQjJfhdy6U7/m5M6ddabFI2qP8e2RCBBpVqRPtOXb+gGAQBgUFalGL527rFpjmyaMfJcQO+lRVUQu3BJsex7uEpVYo3p2H0P5Wm/PDhiUHD06Tp39Sbv6nvevqchMJ1r9mz7n3YeaQwyMIhBKSkEDpxYccRA4i/PrbDdKsPZLX9u5od73XYz9ICBqo+rrqq0DjWmdVmQrYbnKemYrVzSM2SeGMB/yKEHXcp8RmZYx9a7Kp4KMDRAlmwr64wM8RUzCsvI+2gDlCByhhUVYtXDjdiz7qXXC+OmM4rlBRrkpIoSFl3aHtfZqh0KEHK47s0DOI5+gR0j8ptlulgByrwDkEon2qFlSdzxRd09ZiuZ7XaJ/dEYDtBGaM204rNNYm1B6dngJBoi7zrH+FjXasR+KgyxY+WKzZAbC+EeZSl8OKkbhDdTG0XVuouzkCEJnt0PqEGr00xvq4l/Qo0eX5ua6a9CLSGHIZA0cXmIRMJpEV1G0F4t2ilB3ga7ZnoSj0ya7hcPhgVq5lcj3v24y4ayQCLCvHj9y9z26JOepC7ggLN22wSeqRhR6ck5zJK1yhG/UwL5ztsRZSZzIWYr0mUolbSK5GDHRxaoFYVnQVGc0UBKvd3AF2eckbB+60WIp98Io22ig/GetLJxGnsLFGM9peaygzIWBTVtLOs3RsKFyuTM6rIVEme0CUynRRHfXpglw6IMSb7sLZZlMc4+5SjlyqQIrRTFNISj4FgBEknQ54fpphfZKvGHXc/jAagm0hOQAyMOLkaN4vpgPDCpjev2Rb/lKet84YhAV4EiI2JiCtS87zYKHr+9jfkZZ4aBdqx9kbmmd3vbfnilFPK4RmfnGmNO/NLQ5wp570kMwaHRvbfJ57K9Gnw3mZXYJdJXMXJ2uDabZh4JrFXgu84TKYrat1S1oDhwBeo4A+l6NmRpzkEXyVDdFUB65fyWKCVHzgMki5yHaT+OKcO5ZVt26OoUd3E4UVjyGixkJO6i+3Vpwth3wj5yRreOeKnekUZYI4g10RuOABFC8wlw3Lfj1KZdYUeaPBhoXzJZXsWN4B0388GKqqRnhwcFTf3+MpPSdmhD9MzwnXQieh4GpCgPloRhq5qhCc5S5r6hjKobGlO38GF3ceoyRwtwf7KJe6603EqkER22bP9iU6g2BUO02R9aBxn5u1GiccNjOrXMWQJRhbmP0/O6h7TPBl56ijIly0Szm24MCTLvBSXdu5l03EYfKWoC3NQxCqiEvqoK4Ke5H0jJ2vl3BeSkHiuWLKLkeAnh9YLDkUPLqGz8cAhdkYkIDowWJxzwxhbb4CxAsztyQ92ZSXZkPRiVKw6hBXlxy5VhVObxG0Ro62CgtjPtK58dQJRilNWzvEzn0ervMA068FMW3nfN/0v5+5S76JqFRTNxG51pFjXBLMZtMPuwaCBZd6DrvdkjzbEmmnu7zEV6TAHoXjpTcj4Srtx+kfHgOwG0tIhgqYzx2E8QoSlj5vwsoqpSYpTiy4nguLoz5qifUmiFPcAufdhZZITsC4s3Gp0lSH5WV1posMnGXTXM6GLXK0uQe3so1VdcuaaLIKTGw21szeYU2BIVmDmUXMZvbWURFv1MyDw0kJedlZ0atjmVVS6G3Fg7vdiq1t79F0fijC8RwRjWVaLROvwapop0Rh1vG6NsSBgHs37ZmLje3MtGAqRo3CyRjfrFNRwQuY95aJCtDynWIQqb4IOktDpsTcsVJEiTxCZ1UukAIKPuSpmFIpYhuv7t/u7o6hXEfQBYxX1IyddTNaDztAlwqPlZNF6511o8h4KaiXGr4s14c9lQNf1s2NfLVBMCpY1+wyqa1J6q3nRT8J+ISnZRsQhKlvLNSstWKE2rNn37Ux2JIvsYAUySAOMzbboBbaJptuGNX2/ON+a7rqljGUhjUjvYoqZHCny7S3o4FMJ+czd4nZdm96M1Ovdow8JyZko2e0ygnygXUmOpLkvBzIgP6bEY8HmCewNXwhgtf2Y83Afg3GyWJZY3Y8FtkKHGuTfkAcNUwqg440aZw9+F56kTuMzMa+XEsu0RIivTs0pBVfsnNzfMtbCbMg5PhAbKZ0wy5z0SoVEtOwZj5fXrYzEOFnqrVTiWaVw0Rj5zPbnOGnk4luRFVco+QktdaVspyIw/SR1BHNGZwiza1lfijJiyZo/lmqFUuA+QZR3WMImH5URuhY2O0BnXHHMKsWZJ6zaVgnPFaWxjY+NcR5U76MADuWlgIRh+XwDnMcVaf4iHUrIdt3NXKq8pY5JFtJW6r7ZKEG3IoODc8qg2RCyR011/nMniZX7jGWm6Q3vPMZkq0DOd3PtDTXrVKORtUYevA1xlyOCRbiF5ESbHO0nuCVBXS73ZqkOkq1DlylCK+yQXx9uMpkZEK3IRvRBhMWsGpjuzSqM+sTmL2svlHM45qz0crwqRbflYtL1k45sW+vDouxZLYIBS4FO8N+/yz058OoIkeAeTg6usulpKc5ERP6+WyWFWxq0QXOUsmoCsYufdLGSwVD8hLIQZnWSdRNZLo4Z65EL/PxitkNFohzD31+VtrUp2DDWtKxEnYwNJi1Aj21u4ieR95O05bN9LwWGKLEJ4xS+8b8qNvnOWf2dmdRVdZsOlQBlIXrwSqAfGm9/rUWhLVIc5uv0MUWznDFcZkiGx3jEIjgvIdiyQ8nXjhvcYWgSrzwLwc0WzGZvDkd+xqROxP7uoIWdp1rFaOgaFe7rWWJPGWnvq5faEsKRP4Cc4DlMUSRXI5Py8nF9rfzq0cYAs5ZYMbmeoty9TS/XFxlfRQ2vGAb8WbTtRvqiE2xVDvClAd37I+CL0/lxh4XoAruHMb7RYKvg50M2OD4uD92qlrGzSLOm3RxQnalKZqbIZ4KuIznaw62RNhrdzdJ65OQkLR6audjhtt560tZrkPntEjx1kbTrRouhwOPE5LWuVEbbem5sQ6QLmxBfCg6GId6UiCXq8lmYbGrfIYdNlmKe8144DsyBzNf0wrwbma+VutIQ9CF1U5oTqsLNNrHA4aQCZAMQBbjDcDvuN0aGOJ9mK7dlGMikuXTeOiPUkZRjUgSnYUbdsaqWClCBidqw3XWElp1Sl5LHHcxYJ/nKrCx414Tl4GIn/XO2TckjrND5rfR2arban6WG40SOFZYLqNkjlAHpE6pGToyy23Srtjlfj8HrsowzsdIsWKqrJH57CCV02K36bUocDmE6tStJxiVJLM+IbdbhbOuVVlJIETc58L0XDK8cLHC0WTDO6yxU6Tjzhx8gnBwC4Vw67AzZSKyVyq1ADTZGq8O4kwZyfVe785W6Wld1a0FRVKUwUnNGSmYaJy4TcomW18ihzWDM9BDtkY44rQHHGAlyNkirYEAl0Kd33wQNy85aN9dcFOLk2SvabE8mUcSVXF0E+2xyNtISCS4LEHmxy4olpe8QwSAQ2lLyV6/bED/N9q7PVPcLIL8M7/D7nendu3X8tqWUuaobIE0JsF4jq09dVZsjP06Qbplp+95qRTc+TWuEGvADzKRvvRvgIcl5/xckludmMs6XKWAh/EUNj6WhwTy+FGUGPou5OONe3KZFbamcC/2AFdd0Z6heDOh94zC3qOIPTcZiZla5tQMQ087tdQ3s0ndv0xSYAlsBXRJ6iO2X+Es6/vHLei3zR3igIiwc7cOiPSgTgsSF4izuhlq25Anwmsu58EKRep1N4pcp+ds2UXTZzXXTFWgyoo4qmzcqPHcA4YSMi8ptYl9dA6PB5SVD9Hcz1KZMUAk4LSQvfWNi9DAZqHhh4XJK71j6StFHN+pnYChwPPNtSzptLXA6xPPp/eXUWooYbzSajmo9obnK9OE6EWrzGHALXL74jCeODBbmpNAI/h5WTr4aA6Qu+JncYILB79wJid0PtnMTGI0ZEY3Izh/14Lhxig4a7cVA01xtcvEaOBxm3h14MogutDTaxW0T9lM+4ztScmbMxALFuarPeTJy2kWnJVhFUXUFLyKDB3H953xSXeR1RVTEpQpL+1ORxVhy4yfCtoGBquRJrpHh90rRGwRftziSXzLrnLrAsZonGIvM4FgLov5cTlTUE0yJyeBFE17CuKcUSy1hmYWvhugvrTiBnwwMDEL7OKJqqYQZOXFVKpFFJ6RjS1GM1TmBVjBA3T0jHAaKxba4J2mXDCFuYdZStrn8yzfZkVAB4WHuIu1qND0RgWWVFaJbprONulpQCNfG316cRQt0xNJLckTr6tyo58bxTfAzN0Q2wlHU4uu8g+bPVpvmkGkgguXRAIfUsA47W1y6SjKb2snh3engLtrsO0Zbc8SAE0FDYB1q7rpPAKTs24HkLxgkU6GiRFiGq1D34h3HOzhaIzE0fP9bNsAq309H+fU8Dp5xKDqLKFDstxtPMSuJ1l9tIEjw9rEQ5HzMpB9b2UryNX/14BV8NwYrzLnoOLBpGvnJ9M0qBW6CMq9IDQ1sprojh8iyqUgTGUw2VPBhbyBy0vytEdo1Sp3x+yiC6EyXeNzzD8YLUzqBgTlUlMLH3G7ASjccGsruoUli46pR1nEFtOEGUN+zhdiHLjLNMIFEfgXG/WbzRDdukuuR8vRWW88N6fMQ4VShux0PEkQq/kmPs0AeRCmgjhN+cG+KxK8RJ7fzw77pbW2Cy7Lm2UoCTlyNpcbu+M9sliml9ZqbUpHXUNKj7vptfuAk9yFCyy2PFUuS9TZ7sgqz5az4FLVExnIExDjFZUafpozdojQ80PDDPO2rxkInDxKdMCcAJffaYeSnVpbY+4EvErI46poJhPJJpxIIwSZWuCTa970zPnQz9cxbE3Dd9Z6IrVZ0voqYHjH2Xq0pSaWgq+13Nq7g1QS3uzPdp5njHqAxQFanGbZZbmJnYXeTmvbnghHY7lNDqvEkU7W7LKbB814OJqbIDDhxHnBCT3VGFuAgCMLcElQiePKGSnSau8qm3hz5d7DQSLvDBrsF9I75WltROMSmj89w6GXkfkk4D2aoPsXRCUMOO1ucTitYTbSrYcTHyEVq0fHJqvZ0gFOKFwIhoHUw5PJjXQqnK0AazP8VftrvreGqrwP1DVsPZQNewqbZDIrcZutvRRjr54fmHX/OAbMwi48mUoFuILLMD3k6aDPuXZdTyFPKl0LSSks0ph3mqRW7BVXeHVEmocNimNv85iT+nksqIwfHnLCicestDfP7LGAzm0kHbx6hXUZPOkakEmX7vzoaqm4I18DDEo81daXnDqJ440FdCHjI1M7L6AA4f3WkXVsY5hBerONsi5Ai7rnZk7Lk8Ze7PJtkyXbyil4GhEEVFytmD1RzU7CODqIq+KaAQBBllJxPDdqQvUgCVicq2neVEgVt2nkuFNrUiYCiGBLgQRh8VhUrvqAFQeFK6jTOZwZ2zabERk0rJsY9oi6G2CtMMyPeWNn0rtsI06Zq9+QawFKtZg3y9OGplqqHhUlMMq+JvGRMulrRwHMkWxa+BbCHX/LVskc0yOXCUn9cM76emB1PvWL342CTqxWrSZVmLthTpGWLYxFcp2XM3cBzbHIS8AbBmkcxmQzNpn8KIsymO9AKPTBOtJ7G9X7XC3g3Se3P0C098udZOLLjNLmArAdJVczzSIvAB/JU9hAsWx1hRiTzvSaU6+XNWwOEnx6HmOIBcwVu3Ndw2xYsXJ3YKILQc5Ajqya/lJEzKvu6jWUiT8OLrmPxQfc2Xnjjp5aar/KFZ/rvHNJ4r6AmIEpDLzwcByvwwDPnfr7w40D77Tp6hJtgC1hAIPhFMNeCCdDylzi2LcGYKFmZfXV7/PuQukndLIuU0VR2SqvCZjzb6VjLa4YDJGsc9ghhOzKJCQti9t8HCcqnI+cNl4cMgkWYXaTysjkSQyfpzkJU8AnunhWY8e4mqTkSumAFdldrcgKdvGCOFLgHSOjfFhP17LDBD6CVk8cQCj9PTGxtBJFcCRAzjE5vZ5VVCRYHebGoYKA8JMt6iq5LKgNFeYzJ3JLRhrh2DEIBMhLVtsMWhDKvGYopsIU8lgApMaBkVG5z2CvjjbBZCBq2GxJtNT4YnuqtYSVDG7IaIrjM+xo4nTucHAWkNNYvrhDL1GighCymh1HQ3dgtoMNaUVAdPN0h2Xa6tpZIU6G45nlWovVA8KmSQv4EawNTHjgO8SmPY8FOVonaCGlt0q4str0MppyQl8bPkqER+vw/X/jmOGIswBQBUboGubU9zPCZ+ISffCYwrbnLPxIxA5lRp20XbrPva1CEBC7S2FKhxFxPI9gAMNfJStz8CJHI2DdsjQugYEerUYuwzrYzIDv3By7Ys4M8+laOQLjjwJds/14fZpvHX5+JK5DCTxE5dufFBdEULx9dVfHXh63P72vE93++PTLY8/P2VfsF5pw2angMXLRimdviNNnG40U8/PMdd3fcKH/Dyw/kDqS6rkdl3ALhnXi8eIsQmemNWm1EM1JEqIv6LDfxc5Luxn26zfh4m/4KGokO4nsEi6iilx/JdjrIe31O0oN32sfBIHXERBk2ObZvutdT49TrwgzbNaLYZP7Nnq/ZGN/TliXakZ2GN4uof+MIb41HJN5pNjyqjVZzPbJp4thV5fTCyze9Lehh5V9HbvyLXvYWpRteN0KbhJcDu+VERhfQMHHosyTsz1KwiQHW+IkBnvygAWG7zbpoe/G4KsJ5GmD7TwUmW/qIXf9IfItC56Grz2/tLepbsJz1jnAIQ6dcGzZ8BYQOHwSl2M98kMow7GfAzqBjBLLfqLayJtKrmojMeRebdgDtRHIs3SG3+nsTl2Fp6fwox/prv2l3t6Lv0zSz7YudMMO10nhl34CfzVAnJREYIcQ/sDr5tntVXJT8m8Y7vR/YJf+ZFyR2mZ51Zd+++L4DVQif70ewSvLFMiFG3ICphVjr76ZxI4PlJ2/mgl8PMzS+7Xl4XaYVtfzPKmLFz22XqrYB9db6OGLbsLrLF7gTi+FDc0iBpvixlGS2y8oxrymsfsOl+CSRQr+ew+oRWX61gCpuEjCZ6KKQulX8gtc4cw9rpgHsGKeBivi27D6fTaiX5mvnwHtWbJDEQp5J7zvnpRvGz9efOSd+KzEdOHjZ79M6V9X3POsKfWTwJ5m/o2op++kV3hJ+uKECQDuL+T/dd09D/jMTwJ8iviTDOyJsGfvZKebph3auV7a1stvGBVCoMO+XMqFnwCdqCBTebHsAgL218z4q8p92ry4Dfyvnxcshv450T1vXhDonewK26xyv2x/Qf4v6+15kL/PAtyp66eLKPFvRpR12iVJBEzyxQ6T1M4/jyL7sH+MIti/OIhkyXe2k2TugERh9zi6bft4HP0XMxPfxpEbtqmXvDi5DZ+wo2ATNECTk+SRHpv2ixkm5vkFnA8wg38vmCiK+hJMFHoPJuYBmJ6VmiS+Ix/x04EJ/SaY/KR4QdGXHlPwO1xhd9zHVF/FDtL//YPYYW756TdD9Jbd+hw95AP0kM9Cz3065g493wTDN+F0x0S+SWE+V9cNvVHjAmrivepdlduvhZn7afkJHASPTRMfDi1ewBmK67V8S813cHyW3lGEfad46p7KEA/UTlKv2NM0/7MkdKh/XVhL3GdzXLss/dh9KUod3Jf1i8X/Ve09j8R/Rz7n/4u/zAHjssuveMwbJP8pZ/m+8vPIaP69vvI+m3XLYEEhfQEhKquS2w8vhd/ZvbYQIKjm9x9vWa/SNr0YSN1t+zEASorbwOBCh7GHPZ9gV/JBck8yK/Cn7fVykWdBBQD7SyuD4Q+sDP7AylDPsjLko+zYR2AltPU8Bp4G7JDbRVLlpv0LLD8Ilnd5JfIfB8t9OvBjwFJUaZrkcADgE1KgSkiOf4HlL1gWnH3EX9i/Eyz/xSQk8k3+EunpS5iYel8dAtTDt16GG8bGsGFsLOvpp5dPi+sen7Zwj089Pj4h+GcM50sQfxFm9r/sr3rF7gPStzv9ZmB5lW1oO79PrSe0XKHYe4pEYA8SCuwDksQ+iySR35HbtGOLg11Kv5uMzwD6LeHeKZDt/95ZBPTHhG5brv2HIv9MnOQD13DbltshwN/l87Eey/h6hjVMW/yBuXnrqruNMXj862G/q+puJBT71kggUAVB691IvdLfbvwv4OCnaZN6axW7mfgHccbfnGAg77NytW0Aq/yr4fQD9Pa01AL5HSm1R4lK4K6t3hpCUVp64b3J9UfU+rkvC9/58Tsdf9XRP+YHSVWGfgzOHsefufe7rO67RG3iOL5pA38em3ZaFq83DP9zSQUUR4kvQEMg5AOPST4gdLfo/+NRc59N/Gc8Jg6XW8nbw+dfjlDXrzR5+y40V+UP39rrtx/T1+B6/kgk7L/KLRPvLA353vB/r1d+PxD13hQ92yl/R+7zFzn7XnKGv9fenyZndyM9GwfPymSC680TqzLLBEry70o0PJnBfJlpGL4/gviznBbxFaPxefGQ+APcf7jHuhGtD4ePbl1gK4j1CzofBR0G/SZ2aPQB2/kI7LCfKD9TBFM8KpXBT2ktOK9fvqNr9D2NNNIofi3s/ALI5KdSL85fqvwhN/0ymYM/SkY9S+YY/j4tQyD3PULko3L/ByRlHgr9v5gy/HbJ00kSq38Ozsr9+PxyScKXXp1jEoXyRO+ygje4EE9EB06/qz48iFrJB/1j5Af0j6mEsl8w5l5jbQsx5E9z6sQ8nJA/aMvJR7b8rbn/tmG5UsX+gDfbbvy+LyI210oFuEEMWS+l37BRPza4PjAtENiCOmwqcz0urorBEOMNYODL/5h5kv7vsFthh3b/1CP4nMRh+/pzOZW/iUwwyIOkCUo9KWsyt7GTg5kyO8ePEf8ywptC/BGPcDNOH2F7vtKp9tD+fGGiHlsky74Aa1RfLVG/T3G1RS8zwAmuhuYr6vpeTX+9zeZLq4Kh+AO1PvDzbxs/XK2Papr/LbXqsVuFev5kzaLsl2QCQ+671x+Wq982fjj//3r0COxyfDOsveiI4DOz+/mv/1Kz+08XnN+1y2GPzPPfmtNGv84P7pUN21J+afsHQgXkH1T3Q6v9JyKFn8tqv8ZAv0822cQ7jo+yD5aweZSxQZ+m1z/xxNHPpdeX19gun6xX/B15xlDs+6br8yrF6KMJ+1XzDB/Z+2WffyBYf6/wf9wdY48m8lf1DUX1S+E/lC/9tznkj26j+TLThT7RWlLYn5Tlh1jLh8L8jv6Mn9wLNnqk53789GzD+xQ3hjxIOOAPctxvGz9cuY+eZPyaZTz8l83iv2gqP80sot/x3CqckOlXRWIO9SLduO3+Q07lD/L7X4qKQIg7UVH0K42hDMLSLMOyFI3fC46kXkmKYCkCQQgUxemnZem+IzC4M1Qf28P20b1qdxPve58rjgpTt1/7o9LcL+zX2s/t0C6KT1es9Kd/X8MlTMO0zX7Klvr1Cl8w5Hut7x+g++sYY19pCqVwjCJxBNji++oiRr+yFILSNEqQLE6SNxB+sV4q/koT4BcUZ3Dk93UMPhxi39He9qDY+FWl36n3Tzvg+4b/P3g44Kvwfo8iPynovjRZvJqVYT8FB1fbAmBAYiQDL5FGWOzBsiYEMCMkgiEEwgx4eWCiqVcGxXAGWBoQ7hDk7bnwH8BB/yQpLP69/SZBUcjAQ8E9/g8= \ No newline at end of file diff --git a/docs/getting-started/1.0/_images/images/further-info/devonfw-org.png b/docs/getting-started/1.0/_images/images/further-info/devonfw-org.png new file mode 100644 index 00000000..23bca7be Binary files /dev/null and b/docs/getting-started/1.0/_images/images/further-info/devonfw-org.png differ diff --git a/docs/getting-started/1.0/_images/images/further-info/teams.png b/docs/getting-started/1.0/_images/images/further-info/teams.png new file mode 100644 index 00000000..b486cac5 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/further-info/teams.png differ diff --git a/docs/getting-started/1.0/_images/images/further-info/yammer.png b/docs/getting-started/1.0/_images/images/further-info/yammer.png new file mode 100644 index 00000000..8fb51547 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/further-info/yammer.png differ diff --git a/docs/getting-started/1.0/_images/images/guide/jtq-screens.png b/docs/getting-started/1.0/_images/images/guide/jtq-screens.png new file mode 100644 index 00000000..d4c93cdb Binary files /dev/null and b/docs/getting-started/1.0/_images/images/guide/jtq-screens.png differ diff --git a/docs/getting-started/1.0/_images/images/guide/run-mythaistar.png b/docs/getting-started/1.0/_images/images/guide/run-mythaistar.png new file mode 100644 index 00000000..1a139198 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/guide/run-mythaistar.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/Capgemini_Logo_Small.png b/docs/getting-started/1.0/_images/images/introduction/Capgemini_Logo_Small.png new file mode 100644 index 00000000..92281a7e Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/Capgemini_Logo_Small.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/cobigen.png b/docs/getting-started/1.0/_images/images/introduction/cobigen.png new file mode 100644 index 00000000..0d0ecfc3 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/cobigen.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs1.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs1.png new file mode 100644 index 00000000..55229a0e Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs1.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs2.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs2.png new file mode 100644 index 00000000..5133a861 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs2.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs3.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs3.png new file mode 100644 index 00000000..299a686b Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs3.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs4.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs4.png new file mode 100644 index 00000000..0c48e491 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/FindBugs4.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/Sonar_add_server.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/Sonar_add_server.png new file mode 100644 index 00000000..23f7b1c5 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/Sonar_add_server.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/associate-sonarqube.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/associate-sonarqube.png new file mode 100644 index 00000000..b7afaaf8 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/associate-sonarqube.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/change-link-with-project.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/change-link-with-project.png new file mode 100644 index 00000000..c9a4a51f Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/change-link-with-project.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle.png new file mode 100644 index 00000000..48cf00a3 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle2.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle2.png new file mode 100644 index 00000000..cf43b2b4 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle2.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle3.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle3.png new file mode 100644 index 00000000..2918c1a6 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle3.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle4.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle4.png new file mode 100644 index 00000000..422cdbfd Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle4.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle5.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle5.png new file mode 100644 index 00000000..90b4b772 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/checkstyle5.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/cobigen.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/cobigen.png new file mode 100644 index 00000000..a8849d0a Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/cobigen.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/eclipse-settings.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/eclipse-settings.png new file mode 100644 index 00000000..65cd5773 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/eclipse-settings.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/integrated-ide.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/integrated-ide.png new file mode 100644 index 00000000..bb2068dc Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/integrated-ide.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/link-with-project.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/link-with-project.png new file mode 100644 index 00000000..fe9e9f88 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/link-with-project.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/sonarQube-issues-view.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/sonarQube-issues-view.png new file mode 100644 index 00000000..27050c25 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/sonarQube-issues-view.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon-ide/unlink-with-project.png b/docs/getting-started/1.0/_images/images/introduction/devon-ide/unlink-with-project.png new file mode 100644 index 00000000..91b17bd7 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon-ide/unlink-with-project.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devon_quality_agility.png b/docs/getting-started/1.0/_images/images/introduction/devon_quality_agility.png new file mode 100644 index 00000000..5ee9a433 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devon_quality_agility.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devonfw-ide.png b/docs/getting-started/1.0/_images/images/introduction/devonfw-ide.png new file mode 100644 index 00000000..99c0db1a Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devonfw-ide.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devonfw-small.png b/docs/getting-started/1.0/_images/images/introduction/devonfw-small.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devonfw-small.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/devonfwcatalog.png b/docs/getting-started/1.0/_images/images/introduction/devonfwcatalog.png new file mode 100644 index 00000000..311fc2fe Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/devonfwcatalog.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/download-install/create_update_ws.png b/docs/getting-started/1.0/_images/images/introduction/download-install/create_update_ws.png new file mode 100644 index 00000000..92d4bce9 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/download-install/create_update_ws.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/download-install/run_env_sh.png b/docs/getting-started/1.0/_images/images/introduction/download-install/run_env_sh.png new file mode 100644 index 00000000..cf29ec09 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/download-install/run_env_sh.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/download-install/setup_1.png b/docs/getting-started/1.0/_images/images/introduction/download-install/setup_1.png new file mode 100644 index 00000000..cb973e08 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/download-install/setup_1.png differ diff --git a/docs/getting-started/1.0/_images/images/introduction/download-install/setup_2.png b/docs/getting-started/1.0/_images/images/introduction/download-install/setup_2.png new file mode 100644 index 00000000..fdace6d2 Binary files /dev/null and b/docs/getting-started/1.0/_images/images/introduction/download-install/setup_2.png differ diff --git a/docs/getting-started/1.0/further-info-community-links.html b/docs/getting-started/1.0/further-info-community-links.html new file mode 100644 index 00000000..223825cd --- /dev/null +++ b/docs/getting-started/1.0/further-info-community-links.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+ +
+
+

We strive to foster an active, diverse and dynamic community around devonfw and are relying on modern collaboration tools to do so. Please note that some resources listed here might only be accessible to members or partners of Capgemini.

+
+
+
+
+

Microsoft Teams

+
+
+

The devonfw public channel is accessible to everyone who has a Microsoft Teams account. You can find the latest discussions on ongoing development topics here, as well as new commits and pull requests to our repos.

+
+
+

Join us to stay in the loop, and feel free to post your questions regarding devonfw here.

+
+ +
+
+
+

Yammer

+
+
+

Our corporate Yammer channel is accessible to Capgemini employees and members. If you are looking for information or feedback on current and planned projects regarding devonfw, we reccomend you ask around here first.

+
+ +
+
+
+

E-Mail

+
+
+

You can reach our dedicated iCSD Support Team via e-mail at:

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/further-info-repo-overview.html b/docs/getting-started/1.0/further-info-repo-overview.html new file mode 100644 index 00000000..be063e0e --- /dev/null +++ b/docs/getting-started/1.0/further-info-repo-overview.html @@ -0,0 +1,351 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Repository Overview

+
+
+

The GitHub repositories within the devonfw organization contain the source code and documentation for official devonfw projects.

+
+
+
+devonfw Repository Overview +
+
An overview of the devonfw organization repositories.
+
+
+

The most relevant repositories here are the individual devonfw technology stacks:

+
+
+ +
+
+

Our framework also delivers a number of tools and plug-ins that aim to accelerate and streamline the development process, for example:

+
+
+ +
+
+

We also provide educational material and reference implementations to aid new users and drive the adoption of our framework, for example:

+
+
+ +
+
+

Projects in early development and prototypes are located in the devonfw forge repository. They usually remain there until they are ready for broader release or use in production.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/getting-started.html b/docs/getting-started/1.0/getting-started.html new file mode 100644 index 00000000..bccbf64b --- /dev/null +++ b/docs/getting-started/1.0/getting-started.html @@ -0,0 +1,901 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Getting Started

+
+ +
+
+
+

Introduction

+
+ +
+

What is devonfw?

+
+
+devonfw small +
+
+
+

Welcome to the devonfw platform. This is a product of the CSD (Custom Solution Development) industrialization effort to establish a standardized platform for custom software development within Capgemini APPS2. This platform is aimed at engagements, in which clients don’t specify the use of a predefined technology stack. In these cases we can offer a proven alternative as a result of our experience as a group.

+
+
+

devonfw is a development platform aiming for the standardization of processes and the boosting of productivity. It provides an architecture blueprint for server and client applications, alongside a set of tools to deliver a fully functional, out-of-the-box development environment.

+
+
+ + + + + +
+ + +The devonfw name is a registered trademark of Capgemini Logo Small, but the software and documentation included in devonfw are fully open source. Please refer to our OSS Compliance section for more information. +
+
+
+
+

Building Blocks of the Platform

+
+
+devonfwcatalog +
+
+
+

devonfw uses a state-of-the-art, open source, core reference architecture for the server (these days considered a commodity in the IT-industry) and on top of that an ever increasing number of high-value assets, which are developed by Capgemini.

+
+
+
+

The devonfw Technology Stack

+
+

devonfw is fully open source and consists of the following technology stacks:

+
+
+
+

Back-End Solutions

+
+

For server applications, devonfw includes the following solutions:

+
+
+ +
+
+
+

Front-End solutions

+
+

For client applications, devonfw includes two solutions based on TypeScript, JavaScript, C# and .NET:

+
+
+ +
+
+
+

Custom Tools

+ +
+
+

devonfw-ide

+
+

The devonfw-ide is not one monolithic program that is installed with a traditional executable; rather it’s a collection of scripts which are invoked via command line to automate several, repetetive development tasks. These scripts then interact with other tools, frameworks, and third-party IDEs to streamline the development workflow.

+
+
+
+devonfw ide +
+
+
+

The advantage of this approach is, that you can have as many instances of the devonfw-ide on your machine as you need — for different projects with different tools, tool versions and configurations. No need for a physical installation and no tweaking of your operating system required!

+
+
+

Instances of the devonfw-ide do not interfere with each other, nor with other installed software. The package size of the devonfw-ide is initally very small, the setup is simple, and the included software is portable.

+
+
+
+

== IDEs

+
+

It supports the following IDEs:

+
+ +
+
+

== Platforms

+
+

It supports the following platforms:

+
+
+ +
+
+
+

== Build-Systems

+
+

It supports the following build-systems:

+
+
+ +
+
+ + + + + +
+ + +Other IDEs, platforms, or tools can easily be integrated as commandlets. +
+
+
+
+

CobiGen

+
+

CobiGen is a code generator included in the devonfw-ide, that allows users to generate the project structure and large parts of the application component code. This saves a lot of time, which is usually wasted on repetitive engineering tasks and/or writing boilerplate code.

+
+
+
+cobigen +
+
+
+

Following the same philosophy as the devonfw-ide, CobiGen bundles a new command line interface (CLI), that enables the generation of code using only a few commands. This approach also allows us to decouple CobiGen from Eclipse and use it alongside VS Code or IntelliJ IDEA.

+
+ +
+
+

Why should I use devonfw?

+
+

devonfw aims to provide a framework for the development of web applications based on the Java EE programming model. It uses the Spring framework as its Java EE default implementation.

+
+
+
+

Objectives

+ +
+
+

Standardization

+
+

We don’t want to keep reinventing the wheel for thousands of projects, for hundreds of customers, across dozens of countries. For this reason, we aim to rationalize, harmonize and standardize the development assets for software projects and industrialize the software development process.

+
+
+
+

Industrialization of Innovative Technologies & “Digital”

+
+

devonfw’s goal is to standardize & industrialize. But this applies not only to large volume, “traditional” custom software development projects. devonfw also aims to offer a standardized platform which contains a range of state-of-the-art methodologies and technology stacks. devonfw supports agile development by small teams utilizing the latest technologies for projects related to Mobile, IoT and the Cloud.

+
+
+
+

Deliver & Improve Business Value

+
+
+devon quality agility +
+
+
+
+

Efficiency

+
+
    +
  • +

    Up to 20% reduction in time to market, with faster delivery due to automation and reuse.

    +
  • +
  • +

    Up to 25% less implementation efforts due to code generation and reuse.

    +
  • +
  • +

    Flat pyramid and rightshore, ready for junior developers.

    +
  • +
+
+
+
+

Quality

+
+
    +
  • +

    State-of-the-art architecture and design.

    +
  • +
  • +

    Lower cost on maintenance and warranty.

    +
  • +
  • +

    Technical debt reduction by reuse.

    +
  • +
  • +

    Risk reduction due to continuous improvement of individual assets.

    +
  • +
  • +

    Standardized, automated quality checks.

    +
  • +
+
+
+
+

Agility

+
+
    +
  • +

    Focus on business functionality, not on technicalities.

    +
  • +
  • +

    Shorter release cycles.

    +
  • +
  • +

    DevOps by design — Infrastructure as Code.

    +
  • +
  • +

    Continuous Delivery pipeline.

    +
  • +
  • +

    On- and off-premise flexibility.

    +
  • +
  • +

    PoCs and prototypes in days not months.

    +
  • +
+
+
+
+

Features

+ +
+
+

Everything in a Single ZIP

+
+

The devonfw distributions is packaged in a ZIP file that includes all the custom tools, software and configurations.

+
+
+

Having all the dependencies self-contained in the distribution’s ZIP file, users don’t need to install or configure anything. Just extracting the ZIP content is enough to have a fully functional devonfw.

+
+
+
+

devonfw — The Package

+
+

The devonfw platform provides:

+
+
+
    +
  • +

    Implementation blueprints for a modern cloud-ready server and a choice on JS-Client technologies (either open source Angular or a very rich and impressive solution based on commercial Sencha UI).

    +
  • +
  • +

    Quality documentation and step-by-step quick start guides.

    +
  • +
  • +

    Highly integrated and packaged development environment based around Eclipse and Jenkins. You will be ready to start implementing your first customer-specific use case in 2h time.

    +
  • +
  • +

    Iterative eclipse-based code-generator that understands "Java" and works on higher architectural concepts than Java-classes.

    +
  • +
  • +

    An example application as a reference implementation.

    +
  • +
  • +

    Support through a large community + industrialization services (Standard Platform as a Service) available in the iProd service catalog.

    +
  • +
+
+
+
+
+
+

devonfw-ide Download and Setup

+
+
+

Please refer to our devonfw-ide Setup section.

+
+
+
+
+

Guides

+
+
+

Our goal is to provide a smooth starting experience to all users of devonfw, no matter how experienced they are or what their stakeholder role is. To achieve this, we provide a list of recommended guides here:

+
+
+

For Students and Junior Engineers:

+
+ +
+

For Senior Engineers and Architects:

+
+ +
+

For Team Leaders and Product Ambassadors:

+
+ + +
+

Build Your First devonfw Application

+
+

JumpTheQueue is a small application based on the devonfw framework, which you can create yourself by following our simple step-by-step tutorial. By doing so, you will learn about the app development workflow and gain insight into the design of a professional business information system. Please visit the JumpTheQueue wiki and start working trough the tutorial HERE.

+
+
+ + + + + +
+ + +The tutorial assumes you have successfully set up the devonfw-ide previously. +
+
+
+

You can also clone the project and explore the finished source code via:

+
+
+
+
git clone https://github.com/devonfw/jump-the-queue.git
+
+
+
+
+JumpTheQueue Screenshots +
+
+
+

Another way to check out the JumpTheQueue-Application is to try our interactive katacoda scenario where you set up the application step by step.

+
+ + +
+
+

Explore Our devonfw Sample Application

+
+

MyThaiStar is a complex sample app, that demonstrates the full capabilities of our framework. On this page we will describe how to download and launch the app on your system, so you can test the various functionalities it offers and explore its code.

+
+
+

You can also check out the interactive katacoda scenario for setting up and trying out the MyThaiStar-Application.

+
+ +
+ + + + + +
+ + +We assume you have successfully set up the devonfw-ide previously. +
+
+
+
    +
  1. +

    In the root directory of a devonfw-ide directory, right click and select "Open Devon CMD shell here" from the Windows Explorer context menu. Then navigate to the main workspace and checkout the MyThaiStar Git repository like this:

    +
    +
    +
    cd workspaces/main
    +git clone https://github.com/devonfw/my-thai-star.git
    +
    +
    +
  2. +
  3. +

    Perform: cd my-thai-star

    +
  4. +
  5. +

    Execute: devon eclipse ws-up

    +
  6. +
  7. +

    Execute: devon eclipse create-script

    +
  8. +
  9. +

    Go to the root folder of the distribution and run eclipse-main.bat

    +
  10. +
  11. +

    In Eclipse navigate to File > Import > Maven > Existing Maven Projects, then import the cloned project from your workspace by clicking the "Browse" button and selecting /workspaces/my-thai-star/java/mtsj/.

    +
  12. +
  13. +

    Run the backend by right-clicking SpringBootApp.java and selecting Run as > Java Application in the context menu. The backend will start up and create log entries in the Eclipse Console tab.

    +
    +

    Running the MyThaiStar Backend

    +
    +
  14. +
  15. +

    Return to your command shell and perform: cd angular

    +
  16. +
  17. +

    Execute: npm install

    +
  18. +
  19. +

    Execute: ng serve

    +
  20. +
  21. +

    Once started, the frontend will be available at localhost:4200/restaurant. Login with the username and password waiter and take a look at the various functionalities provided by MyThaiStar.

    +
  22. +
+
+
+

You should now take a look at both the front- and backend code and familiarize yourself with its structure and concepts, since most devonfw projects follow this exemplary implementation. Please visit the architecture overview pages of devon4ng and devon4j to learn more about the internal workings of front- and backend.

+
+
+
+
+
+

Further Information

+
+ +
+

Repository Overview

+
+

The GitHub repositories within the devonfw organization contain the source code and documentation for official devonfw projects.

+
+
+
+devonfw Repository Overview +
+
An overview of the devonfw organization repositories.
+
+
+

The most relevant repositories here are the individual devonfw technology stacks:

+
+
+ +
+
+

Our framework also delivers a number of tools and plug-ins that aim to accelerate and streamline the development process, for example:

+
+
+ +
+
+

We also provide educational material and reference implementations to aid new users and drive the adoption of our framework, for example:

+
+
+ +
+
+

Projects in early development and prototypes are located in the devonfw forge repository. They usually remain there until they are ready for broader release or use in production.

+
+ +
+
+ +
+

We strive to foster an active, diverse and dynamic community around devonfw and are relying on modern collaboration tools to do so. Please note that some resources listed here might only be accessible to members or partners of Capgemini.

+
+
+
+

Microsoft Teams

+
+

The devonfw public channel is accessible to everyone who has a Microsoft Teams account. You can find the latest discussions on ongoing development topics here, as well as new commits and pull requests to our repos.

+
+
+

Join us to stay in the loop, and feel free to post your questions regarding devonfw here.

+
+ +
+
+

Yammer

+
+

Our corporate Yammer channel is accessible to Capgemini employees and members. If you are looking for information or feedback on current and planned projects regarding devonfw, we reccomend you ask around here first.

+
+ +
+
+

E-Mail

+
+

You can reach our dedicated iCSD Support Team via e-mail at:

+
+ +
+
+
+
+

Contributing

+
+
+

Please refer to our Contributing section.

+
+
+
+
+

Code of Conduct

+
+
+

Please refer to our Code of Conduct section.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/guide-enterprise-dev-basics.html b/docs/getting-started/1.0/guide-enterprise-dev-basics.html new file mode 100644 index 00000000..ddceb515 --- /dev/null +++ b/docs/getting-started/1.0/guide-enterprise-dev-basics.html @@ -0,0 +1,311 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Basics of enterprise software development with devonfw

+
+
+

Many students or young engineers starting their first job in software development are not aquainted with enterprise software development technologies or higher concepts of programming.

+
+
+

We aim to introduce some of these topics here …​

+
+
+
+
+

Contents

+
+
+
    +
  • +

    Dependency Injection (DI)

    +
  • +
  • +

    Object Relational Mapping (ORM)

    +
  • +
  • +

    Java Enterprise Edition (Java EE) Concepts

    +
    +
      +
    • +

      Java Enterprise Beans (EJB)

      +
    • +
    • +

      Java Persistence API (JPA)

      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/guide-first-application.html b/docs/getting-started/1.0/guide-first-application.html new file mode 100644 index 00000000..ee17de82 --- /dev/null +++ b/docs/getting-started/1.0/guide-first-application.html @@ -0,0 +1,310 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Build Your First devonfw Application

+
+
+

JumpTheQueue is a small application based on the devonfw framework, which you can create yourself by following our simple step-by-step tutorial. By doing so, you will learn about the app development workflow and gain insight into the design of a professional business information system. Please visit the JumpTheQueue wiki and start working trough the tutorial HERE.

+
+
+ + + + + +
+ + +The tutorial assumes you have successfully set up the devonfw-ide previously. +
+
+
+

You can also clone the project and explore the finished source code via:

+
+
+
+
git clone https://github.com/devonfw/jump-the-queue.git
+
+
+
+
+JumpTheQueue Screenshots +
+
+
+

Another way to check out the JumpTheQueue-Application is to try our interactive katacoda scenario where you set up the application step by step.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/guide-sample-application.html b/docs/getting-started/1.0/guide-sample-application.html new file mode 100644 index 00000000..e1a0ecbb --- /dev/null +++ b/docs/getting-started/1.0/guide-sample-application.html @@ -0,0 +1,346 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Explore Our devonfw Sample Application

+
+
+

MyThaiStar is a complex sample app, that demonstrates the full capabilities of our framework. On this page we will describe how to download and launch the app on your system, so you can test the various functionalities it offers and explore its code.

+
+
+

You can also check out the interactive katacoda scenario for setting up and trying out the MyThaiStar-Application.

+
+ +
+ + + + + +
+ + +We assume you have successfully set up the devonfw-ide previously. +
+
+
+
    +
  1. +

    In the root directory of a devonfw-ide directory, right click and select "Open Devon CMD shell here" from the Windows Explorer context menu. Then navigate to the main workspace and checkout the MyThaiStar Git repository like this:

    +
    +
    +
    cd workspaces/main
    +git clone https://github.com/devonfw/my-thai-star.git
    +
    +
    +
  2. +
  3. +

    Perform: cd my-thai-star

    +
  4. +
  5. +

    Execute: devon eclipse ws-up

    +
  6. +
  7. +

    Execute: devon eclipse create-script

    +
  8. +
  9. +

    Go to the root folder of the distribution and run eclipse-main.bat

    +
  10. +
  11. +

    In Eclipse navigate to File > Import > Maven > Existing Maven Projects, then import the cloned project from your workspace by clicking the "Browse" button and selecting /workspaces/my-thai-star/java/mtsj/.

    +
  12. +
  13. +

    Run the backend by right-clicking SpringBootApp.java and selecting Run as > Java Application in the context menu. The backend will start up and create log entries in the Eclipse Console tab.

    +
    +

    Running the MyThaiStar Backend

    +
    +
  14. +
  15. +

    Return to your command shell and perform: cd angular

    +
  16. +
  17. +

    Execute: npm install

    +
  18. +
  19. +

    Execute: ng serve

    +
  20. +
  21. +

    Once started, the frontend will be available at localhost:4200/restaurant. Login with the username and password waiter and take a look at the various functionalities provided by MyThaiStar.

    +
  22. +
+
+
+

You should now take a look at both the front- and backend code and familiarize yourself with its structure and concepts, since most devonfw projects follow this exemplary implementation. Please visit the architecture overview pages of devon4ng and devon4j to learn more about the internal workings of front- and backend.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/guide-team-start.html b/docs/getting-started/1.0/guide-team-start.html new file mode 100644 index 00000000..b177b29f --- /dev/null +++ b/docs/getting-started/1.0/guide-team-start.html @@ -0,0 +1,298 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Get a team set up and running with devonfw

+
+
+

One of the goals of devonfw is to accelerate development and get new projects off the ground quickly. Here we provide resources that team leaders should refer to, who want to use devonfw to achive this task.

+
+
+
+
+

Contents

+
+
+
    +
  • +

    Creating a unified project environment and workspaces with devonfw

    +
  • +
  • +

    Distributing updates under devonfw

    +
  • +
  • +

    Licensing checks for devonfw projects

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/index.html b/docs/getting-started/1.0/index.html new file mode 100644 index 00000000..081229a6 --- /dev/null +++ b/docs/getting-started/1.0/index.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Home

+
+
+

Welcome to the wiki pages of the devonfw getting-started guide!
+Please select a topic from the sidebar. ⇒

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/introduction-download-and-setup.html b/docs/getting-started/1.0/introduction-download-and-setup.html new file mode 100644 index 00000000..a6155895 --- /dev/null +++ b/docs/getting-started/1.0/introduction-download-and-setup.html @@ -0,0 +1,426 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Download and Setup

+
+
+

In this section, you will learn how to setup the devonfw environment and start working on first project based on devonfw.

+
+
+

The devonfw environment contains all software and tools necessary to develop the applications with devonfw.

+
+
+
+
+

Prerequisites

+
+
+

In order to setup the environment, following are the prerequisites:

+
+
+
    +
  • +

    internet connection (including details of your proxy configuration, if necessary)

    +
  • +
  • +

    more than 1GB of free disk space to install customized environment

    +
  • +
  • +

    command line tool devonfw IDE is already installed

    +
  • +
+
+
+
+
+

Download

+
+
+ + + + + +
+ + +Please refer to the devonfw-ide documentation for the current installation process. +
+
+
+

Older devonfw distributions can be obtained from the FTP releases library and are packaged in a ZIP file that includes all the needed tools, software and configurations. Browse to the corresponding version folder in order to get the latest version.

+
+
+
+
+

Setup the Workspace for Older Versions

+
+ +
+
+
+

Windows

+
+
+
    +
  1. +

    Unzip the devonfw distribution into a directory of your choice. The path to the devonfw distribution directory should contain no spaces, to prevent problems with some of the tools.

    +
  2. +
  3. +

    Run the batch file "create-or-update-workspace.bat".

    +
  4. +
+
+
+
+setup 1 +
+
+
+

This will configure the included tools like Eclipse with the default settings of the devonfw distribution.

+
+
+

The result should be as seen below

+
+
+
+setup 2 +
+
+
+

The working devonfw environment is ready!

+
+
+

Note : If you use a proxy to connect to the Internet, you have to manually configure it in Maven, Sencha Cmd and Eclipse. Next section explains about it.

+
+
+
+
+

Linux

+
+
+
    +
  • +

    Unzip the devonfw distribution into a directory of your choice. The path to the devonfw distribution directory should contain no spaces, to prevent problems with some of the tools.

    +
  • +
  • +

    Run the script: . env.sh

    +
  • +
+
+
+
+run env sh +
+
+
+
    +
  • +

    Run the script: . create-or-update-workspace

    +
  • +
+
+
+
+create update ws +
+
+
+

These both . env.sh and . create-or-update-workspace will set PATH for all the software included with devon distribution like: eclipse, maven, java etc. Also this will generate some file like eclipse_main used to invoke eclipse.

+
+
+
    +
  • +

    For vscode setup we have to execute create-or-update-workspace-vs

    +
  • +
  • +

    There are a also scripts initialize.sh and uninstallUI.sh.

    +
    +
      +
    • +

      initialize.sh: installs angular, node, python, ant, and subversion

      +
    • +
    • +

      uninstallUI.sh: is used to uninstall the above software

      +
    • +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/introduction-the-devon-ide.html b/docs/getting-started/1.0/introduction-the-devon-ide.html new file mode 100644 index 00000000..978fcf12 --- /dev/null +++ b/docs/getting-started/1.0/introduction-the-devon-ide.html @@ -0,0 +1,631 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Devon IDE

+
+
+

"Devon IDE" is the general name for two distinct variants of a customized Eclipse IDE. The Open Source variant — called devonfw-ide — is publicly available for everyone. A more extended variant is included in the "Devon Dist", which is only available to Capgemini employees.

+
+
+
+
+

Features and Advantages

+
+
+

devonfw comes with a fully featured IDE in order to simplify the installation, configuration and maintenance of this instrumental part of the development environment. As it is being included in the distribution, the IDE is ready to be used and some specific configuration of certain plugins only takes a few minutes.

+
+
+
+Integrated IDE +
+
+
+

As with the remainder of the distribution, the advantage of this approach is that you can have as many instances of the -ide "installed" on your machine for different projects with different tools, tool versions and configurations. No physical installation and no tweaking of your operating system required. "Installations" of the Devon distribution do not interfere with each other nor with other installed software.

+
+
+
+
+

Multiple Workspaces

+
+
+

There is inbuilt support for working with different workspaces on different branches. Create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

+
+
+
+
+

CobiGen

+
+
+

In the Devon distribution we have a code generator to create CRUD code, called CobiGen. This is a generic incremental generator for end to end code generation tasks, mostly used in Java projects. Due to a template-based approach, CobiGen generates any set of text-based documents and document fragments.

+
+
+
+cobigen +
+
+
+

CobiGen is distributed in the Devon distribution as an Eclipse plugin, and is available to all Devon developers for Capgemini engagements. Due to the importance of this component and the scope of its functionality, it is fully described here.

+
+
+
+
+

IDE Plugins

+
+
+

Since an application’s code can greatly vary, and every program can be written in lots of ways without being semantically different, IDE comes with pre-installed and pre-configured plugins that use some kind of a probabilistic approach, usually based on pattern matching, to determine which pieces of code should be reviewed. These hints are a real time-saver, helping you to review incoming changes and prevent bugs from propagating into the released artifacts. Apart from CobiGen mentioned in the previous paragraph, the IDE provides CheckStyle, SonarQube, FindBugs and SOAP-UI. Details of each can be found in subsequent sections.

+
+
+
+
+

CheckStyle

+
+ +
+
+
+

== What is CheckStyle?

+
+
+

CheckStyle is a Open Source development tool to help you ensure that your Java code adheres to a set of coding standards. CheckStyle does this by inspecting your Java source code and pointing out items that deviate from a defined set of coding rules.

+
+
+

With the CheckStyle IDE Plugin, your code is constantly inspected for coding standard deviations. Within the Eclipse workbench, you are immediately notified with the problems via the Eclipse Problems View and source code annotations similar to compiler errors or warnings. +This ensures an extremely short feedback loop right at the developers fingertips.

+
+
+
+
+

== Why use CheckStyle?

+
+
+

If your development team consists of more than one person, then obviously a common ground for coding standards (formatting rules, line lengths etc.) must be agreed upon - even if it is just for practical reasons to avoid superficial, format related merge conflicts. +CheckStyle Plugin helps you define and easily apply those common rules.

+
+
+

The plugin uses a project builder to check your project files with CheckStyle. Assuming the IDE Auto-Build feature is enabled, each modification of a project file will immediately get checked by CheckStyle on file save - giving you immediate feedback about the changes you made. To use a simple analogy, the CheckStyle Plug-in works very much like a compiler but instead of producing .class files, it produces warnings where the code violates CheckStyle rules. The discovered deviations are accessible in the Eclipse Problems View, as code editor annotations and via additional CheckStyle violations views.

+
+
+
+
+

== Installation of CheckStyle

+
+
+

After IDE installation, IDE provides default CheckStyle configuration file which has certain check rules specified . +The set of rules used to check the code is highly configurable. A CheckStyle configuration specifies which check rules are validated against the code and with which severity violations will be reported. Once defined a CheckStyle configuration can be used across multiple projects. The IDE comes with several pre-defined CheckStyle configurations. +You can create custom configurations using the plugin’s CheckStyle configuration editor or even use an existing CheckStyle configuration file from an external location.

+
+
+

You can see violations in your workspace as shown in below figure.

+
+
+
+checkstyle +
+
+
+
+
+

== Usage

+
+
+

So, once projects are created, follow steps mentioned below, to activate CheckStyle:

+
+
+
    +
  1. +

    Open the properties of the project you want to get checked.

    +
  2. +
+
+
+
+checkstyle2 +
+
+
+
    +
  1. +

    Select the CheckStyle section within the properties dialog.

    +
  2. +
+
+
+
+checkstyle3 +
+
+
+
    +
  1. +

    Activate CheckStyle for your project by selecting the CheckStyle active for this project check box and press OK

    +
  2. +
+
+
+
+checkstyle4 +
+
+
+

Now CheckStyle should begin checking your code. This may take a while depending on how many source files your project contains. +The CheckStyle Plug-in uses background jobs to do its work - so while CheckStyle audits your source files you should be able to continue your work. +After CheckStyle has finished checking your code please look into your Eclipse Problems View. +There should be some warnings from CheckStyle. This warnings point to the code locations where your code violates the pre-configured Checks configuration.

+
+
+
+checkstyle5 +
+
+
+

You can navigate to the problems in your code by double-clicking the problem in you problems view. +On the left hand side of the editor an icon is shown for each line that contains a CheckStyle violation. Hovering with your mouse above this icon will show you the problem message. +Also note the editor annotations - they are there to make it even easier to see where the problems are.

+
+
+
+
+

FindBugs

+
+ +
+
+
+

== What is FindBugs?

+
+
+

FindBugs is an open source project for a static analysis of the Java bytecode to identify potential software bugs. FindBugs provides early feedback about potential errors in the code.

+
+
+
+
+

== Why use FindBugs?

+
+
+

It scans your code for bugs, breaking down the list of bugs in your code into a ranked list on a 20-point scale. The lower the number, the more hardcore the bug.This helps the developer to access these problems early in the development phase.

+
+
+
+
+

== Installation and Usage of FindBugs

+
+
+

IDE comes preinstalled with FindBugs plugin.

+
+
+

You can configure that FindBugs should run automatically for a selected project. For this right-click on a project and select Properties from the popup menu. via the project properties. Select FindBugs → Run automatically as shown below.

+
+
+
+configure FindBugs +
+
+
+

To run the error analysis of FindBugs on a project, right-click on it and select the Find Bugs…​ → Find Bugs menu entry.

+
+
+
+error analysis +
+
+
+

Plugin provides specialized views to see the reported error messages. Select Window → Show View → Other…​ to access the views. +The FindBugs error messages are also displayed in the Problems view or as decorators in the Package Explorer view.

+
+
+
+ShowView bug Explorer +
+
+
+
+bug Explorer +
+
+
+
+
+

SonarLint

+
+ +
+
+
+

== What is SonarLint?

+
+
+

SonarLint is an open platform to manage code quality. +It provides on-the-fly feedback to developers on new bugs and quality issues injected into their code..

+
+
+
+
+

== Why use SonarLint?

+
+
+

It covers seven aspects of code quality like junits, coding rules,comments,complexity,duplications, architecture and design and potential bugs. +SonarLint has got a very efficient way of navigating, a balance between high-level view, dashboard and defect hunting tools. This enables to quickly uncover projects and / or components that are in analysis to establish action plans.

+
+
+
+
+

== Installation and Usage of SonarLint

+
+
+

IDE comes preinstalled with SonarLint. +To configure it , please follow below steps:

+
+
+

First of all, you need to start sonar service. For that, go to software folder which is extracted from Devon-dist zip, choose sonarqube→bin→<choose appropriate folder according to your OS>-→and execute startSonar bat file.

+
+
+

If your project is not already under analysis, you’ll need to declare it through the SonarQube web interface as described here. +Once your project exists in SonarQube, you’re ready to get started with SonarQube in Eclipse.

+
+
+

SonarLint in Eclipse is pre-configured to access a local SonarQube server listening on http://localhost:9000/. +You can edit this server, delete it or add new ones.By default, user and password is "admin".If sonar service is started properly, test connection will give you successful result.

+
+
+
+Sonar_add_server +
+
+
+

For getting a project analysed on sonar, refer this http://docs.sonarqube.org/display/SONAR/Analyzing+Source+Code [link].

+
+
+

Linking a project to one analysed on sonar server.

+
+
+
+associate-sonarqube +
+
+
+

In the SonarQube project text field, start typing the name of the project and select it in the list box:

+
+
+
+link-with-project +
+
+
+

Click on Finish. Your project is now associated to one analyzed on your SonarQube server.

+
+
+

Changing Binding

+
+
+

At any time, it is possible to change the project association.

+
+
+

To do so, right-click on the project in the Project Explorer, and then SonarQube > Change Project Association.

+
+
+
+change-link-with-project +
+
+
+

Unbinding a Project

+
+
+

To do so, right-click on the project in the Project Explorer, and then SonarQube > Remove SonarQube Nature.

+
+
+
+unlink-with-project +
+
+
+

Advanced Configuration

+
+
+

Additional settings (such as markers for new issues) are available through Window > Preferences > SonarLint

+
+
+
+eclipse-settings +
+
+
+

To look for sonarqube analysed issue, go to Window→Show View→ Others→SonarLint→SonarLint Issues. +Now you can see issues in soanrqube issues tab as shown

+
+
+
+sonarQube-issues-view +
+
+
+

Or you can go to link http://localhost:9000 and login with admin as id and admin as password and goto Dashboard.you can see all the statistics of analysis of the configured projects on sonar server.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/introduction-what-is-devonfw.html b/docs/getting-started/1.0/introduction-what-is-devonfw.html new file mode 100644 index 00000000..0d6937a3 --- /dev/null +++ b/docs/getting-started/1.0/introduction-what-is-devonfw.html @@ -0,0 +1,488 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

What is devonfw?

+
+
+
+devonfw small +
+
+
+

Welcome to the devonfw platform. This is a product of the CSD (Custom Solution Development) industrialization effort to establish a standardized platform for custom software development within Capgemini APPS2. This platform is aimed at engagements, in which clients don’t specify the use of a predefined technology stack. In these cases we can offer a proven alternative as a result of our experience as a group.

+
+
+

devonfw is a development platform aiming for the standardization of processes and the boosting of productivity. It provides an architecture blueprint for server and client applications, alongside a set of tools to deliver a fully functional, out-of-the-box development environment.

+
+
+ + + + + +
+ + +The devonfw name is a registered trademark of Capgemini Logo Small, but the software and documentation included in devonfw are fully open source. Please refer to our OSS Compliance section for more information. +
+
+
+
+
+

Building Blocks of the Platform

+
+
+
+devonfwcatalog +
+
+
+

devonfw uses a state-of-the-art, open source, core reference architecture for the server (these days considered a commodity in the IT-industry) and on top of that an ever increasing number of high-value assets, which are developed by Capgemini.

+
+
+
+
+

The devonfw Technology Stack

+
+
+

devonfw is fully open source and consists of the following technology stacks:

+
+
+
+
+

Back-End Solutions

+
+
+

For server applications, devonfw includes the following solutions:

+
+
+ +
+
+
+
+

Front-End solutions

+
+
+

For client applications, devonfw includes two solutions based on TypeScript, JavaScript, C# and .NET:

+
+
+ +
+
+
+
+

Custom Tools

+
+ +
+
+
+

devonfw-ide

+
+
+

The devonfw-ide is not one monolithic program that is installed with a traditional executable; rather it’s a collection of scripts which are invoked via command line to automate several, repetetive development tasks. These scripts then interact with other tools, frameworks, and third-party IDEs to streamline the development workflow.

+
+
+
+devonfw ide +
+
+
+

The advantage of this approach is, that you can have as many instances of the devonfw-ide on your machine as you need — for different projects with different tools, tool versions and configurations. No need for a physical installation and no tweaking of your operating system required!

+
+
+

Instances of the devonfw-ide do not interfere with each other, nor with other installed software. The package size of the devonfw-ide is initally very small, the setup is simple, and the included software is portable.

+
+
+
+
+

== IDEs

+
+
+

It supports the following IDEs:

+
+ +
+
+
+

== Platforms

+
+
+

It supports the following platforms:

+
+
+ +
+
+
+
+

== Build-Systems

+
+
+

It supports the following build-systems:

+
+
+ +
+
+ + + + + +
+ + +Other IDEs, platforms, or tools can easily be integrated as commandlets. +
+
+
+
+
+

CobiGen

+
+
+

CobiGen is a code generator included in the devonfw-ide, that allows users to generate the project structure and large parts of the application component code. This saves a lot of time, which is usually wasted on repetitive engineering tasks and/or writing boilerplate code.

+
+
+
+cobigen +
+
+
+

Following the same philosophy as the devonfw-ide, CobiGen bundles a new command line interface (CLI), that enables the generation of code using only a few commands. This approach also allows us to decouple CobiGen from Eclipse and use it alongside VS Code or IntelliJ IDEA.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/getting-started/1.0/introduction-why-should-i-use-devonfw.html b/docs/getting-started/1.0/introduction-why-should-i-use-devonfw.html new file mode 100644 index 00000000..60b804c4 --- /dev/null +++ b/docs/getting-started/1.0/introduction-why-should-i-use-devonfw.html @@ -0,0 +1,437 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Why should I use devonfw?

+
+
+

devonfw aims to provide a framework for the development of web applications based on the Java EE programming model. It uses the Spring framework as its Java EE default implementation.

+
+
+
+
+

Objectives

+
+ +
+
+
+

Standardization

+
+
+

We don’t want to keep reinventing the wheel for thousands of projects, for hundreds of customers, across dozens of countries. For this reason, we aim to rationalize, harmonize and standardize the development assets for software projects and industrialize the software development process.

+
+
+
+
+

Industrialization of Innovative Technologies & “Digital”

+
+
+

devonfw’s goal is to standardize & industrialize. But this applies not only to large volume, “traditional” custom software development projects. devonfw also aims to offer a standardized platform which contains a range of state-of-the-art methodologies and technology stacks. devonfw supports agile development by small teams utilizing the latest technologies for projects related to Mobile, IoT and the Cloud.

+
+
+
+
+

Deliver & Improve Business Value

+
+
+
+devon quality agility +
+
+
+
+
+

Efficiency

+
+
+
    +
  • +

    Up to 20% reduction in time to market, with faster delivery due to automation and reuse.

    +
  • +
  • +

    Up to 25% less implementation efforts due to code generation and reuse.

    +
  • +
  • +

    Flat pyramid and rightshore, ready for junior developers.

    +
  • +
+
+
+
+
+

Quality

+
+
+
    +
  • +

    State-of-the-art architecture and design.

    +
  • +
  • +

    Lower cost on maintenance and warranty.

    +
  • +
  • +

    Technical debt reduction by reuse.

    +
  • +
  • +

    Risk reduction due to continuous improvement of individual assets.

    +
  • +
  • +

    Standardized, automated quality checks.

    +
  • +
+
+
+
+
+

Agility

+
+
+
    +
  • +

    Focus on business functionality, not on technicalities.

    +
  • +
  • +

    Shorter release cycles.

    +
  • +
  • +

    DevOps by design — Infrastructure as Code.

    +
  • +
  • +

    Continuous Delivery pipeline.

    +
  • +
  • +

    On- and off-premise flexibility.

    +
  • +
  • +

    PoCs and prototypes in days not months.

    +
  • +
+
+
+
+
+

Features

+
+ +
+
+
+

Everything in a Single ZIP

+
+
+

The devonfw distributions is packaged in a ZIP file that includes all the custom tools, software and configurations.

+
+
+

Having all the dependencies self-contained in the distribution’s ZIP file, users don’t need to install or configure anything. Just extracting the ZIP content is enough to have a fully functional devonfw.

+
+
+
+
+

devonfw — The Package

+
+
+

The devonfw platform provides:

+
+
+
    +
  • +

    Implementation blueprints for a modern cloud-ready server and a choice on JS-Client technologies (either open source Angular or a very rich and impressive solution based on commercial Sencha UI).

    +
  • +
  • +

    Quality documentation and step-by-step quick start guides.

    +
  • +
  • +

    Highly integrated and packaged development environment based around Eclipse and Jenkins. You will be ready to start implementing your first customer-specific use case in 2h time.

    +
  • +
  • +

    Iterative eclipse-based code-generator that understands "Java" and works on higher architectural concepts than Java-classes.

    +
  • +
  • +

    An example application as a reference implementation.

    +
  • +
  • +

    Support through a large community + industrialization services (Standard Platform as a Service) available in the iProd service catalog.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/github/1.0/CODE_OF_CONDUCT.html b/docs/github/1.0/CODE_OF_CONDUCT.html new file mode 100644 index 00000000..9d4f1407 --- /dev/null +++ b/docs/github/1.0/CODE_OF_CONDUCT.html @@ -0,0 +1,379 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Contributor Covenant Code of Conduct

+
+ +
+
+
+

Our Pledge

+
+
+

In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.

+
+
+
+
+

Our Standards

+
+
+

Examples of behavior that contributes to creating a positive environment include:

+
+
+
    +
  • +

    Using welcoming and inclusive language

    +
  • +
  • +

    Being respectful of differing viewpoints and experiences

    +
  • +
  • +

    Gracefully accepting constructive criticism

    +
  • +
  • +

    Focusing on what is best for the community

    +
  • +
  • +

    Showing empathy towards other community members

    +
  • +
+
+
+

Examples of unacceptable behavior by participants include:

+
+
+
    +
  • +

    The use of sexualized language or imagery and unwelcome sexual attention or advances

    +
  • +
  • +

    Trolling, insulting/derogatory comments, and personal or political attacks

    +
  • +
  • +

    Public or private harassment

    +
  • +
  • +

    Publishing others' private information, such as a physical or electronic address, without explicit permission

    +
  • +
  • +

    Other conduct which could reasonably be considered inappropriate in a professional setting

    +
  • +
+
+
+
+
+

Our Responsibilities

+
+
+

Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.

+
+
+

Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.

+
+
+
+
+

Scope

+
+
+

This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.

+
+
+
+
+

Enforcement

+
+
+

Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at icsddevonfwsupport.apps2@capgemini.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.

+
+
+

Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project’s leadership.

+
+
+
+
+

Attribution

+
+
+

This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/github/1.0/CONTRIBUTING.html b/docs/github/1.0/CONTRIBUTING.html new file mode 100644 index 00000000..2e724e6d --- /dev/null +++ b/docs/github/1.0/CONTRIBUTING.html @@ -0,0 +1,677 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Contributing

+
+
+

devonfw is truly free and open. +We are looking forward to your contribution and are more than happy to receive your feedback and improvements to code and documentation. +This page describes the few conventions to follow. +Please note that this is an open and international project and all content has to be in English language. +Also read our code of conduct.

+
+
+
+
+

Using GitHub

+
+
+

We are using GitHub as our social coding platform. Hence, we follow the principles of GitHub to deal with changes. If you are a first time contributor to GitHub projects, read first contributions

+
+
+
+
+

Account

+
+
+

In case you do not have an account please first join GitHub. +If you have a CORP username use it also as GitHub username. +If possible and suitable also provide your real name in your profile. +Now that you are logged into GitHub you are ready to go.

+
+
+
+
+

Organizations

+
+
+

For devonfw we have the following organizations on GitHub:

+
+
+
    +
  • +

    devonfw

    +
    +

    The official devonfw Platform organization.

    +
    +
  • +
  • +

    devonfw-sample

    +
    +

    The organization used for sample and demo repositories. Here you can find things working in action that can give you a jumpstart. We do not claim every sample to be up-to-date. However, it needs to build and run out-of-the-box without errors.

    +
    +
  • +
  • +

    devonfw-training

    +
    +

    The organization used as a starting point for trainings about devonfw. We do not claim the repositories to be self-explanatory. In case you need to attend a training please contact us.

    +
    +
  • +
  • +

    devonfw-forge

    +
    +

    The organization used for work on incubators and other research projects. +New projects start here and in case they evolve properly and get mature, they are moved to the official devonfw organization.

    +
    +
  • +
+
+
+
+
+

Repositories

+
+
+

Within the organization we have many different repositories. +In case you want to give feedback or provide contributions you need to know the corresponding repository.

+
+
+

The major technology stacks have their own repository carrying the prefix devon4 followed by a shortcut for their stack or programming language:

+
+
+ +
+
+

Tools we provide typically have a repository named like the tool they provide (omitting prefixes like devon[fw]):

+
+
+ +
+
+

There is much more to discover. +Browse our organization to find out.

+
+
+
+
+

Trivial Changes

+
+
+

Please note that for trivial changes like a typo in the documentation you do not need to follow a complex process. Please do the following:

+
+
+
    +
  • +

    Just browse to the file online on GitHub.

    +
  • +
  • +

    Click on the small pencil icon on the top right of the file content.

    +
  • +
  • +

    Make the required changes.

    +
  • +
  • +

    When editing documentation, verify your change by switching to the Preview tab at the top.

    +
  • +
  • +

    When your change is complete, select Create a new branch for this commit and start a pull request at the bottom.

    +
  • +
  • +

    Comit your change by clicking the green Propose file change button at the bottom.

    +
  • +
  • +

    Now fill summary and description and click on the green Create pull request button.

    +
  • +
  • +

    That is all. Thank you very much! For details about pull requests read here.

    +
  • +
+
+
+

For non-trivial changes please read on.

+
+
+
+
+

Issues

+
+
+

We are working issue-based, so check if there is already an issue in our tracker for the task you want to work on. +Otherwise first create a new issue for it (e.g. a bug report or a feature request).

+
+
+

If you want to contribute actively to resolve the issue (by providing code, documentation, etc.), +please assure via communication in that issue (comments, assigned user, etc.) that this is recognized and accepted by somebody from the community. +Especially in case of more complex issues, please get sure not to miss out such consensus with the community +and ensure that there is a common understanding of what and potentially even how to do it. +You surely do not want to invest your valuable work and time into something that will later be rejected by the community. +When you have been assigned to the issue (see Assignees on the right) you are sure that nobody else will work on the same issue in parallel and you are ready to go.

+
+
+
+
+

Code Changes

+
+
+

Before you start with your code changes, please check the following conventions:

+
+
+
    +
  • +

    For each programming language we have a stack repository (see repositories) containing documentation about the coding conventions (example: Java). Please read and follow these conventions before making (bigger) changes.

    +
  • +
  • +

    Use devon-ide to setup your development environment and get code formatters, etc. configured properly as we do not like "diff-wars" because of inconsistent formatter settings.

    +
  • +
  • +

    Thank you, happy coding!

    +
  • +
+
+
+
+
+

Documentation Changes

+
+
+

Before you start with your documentation changes, please check the following conventions:

+
+
+
    +
  • +

    Documentation will always be found in the documentation folder at the root of a repository.

    +
  • +
  • +

    All our documentation is written in the adoc format.

    +
  • +
  • +

    All documentation files need to carry the .adoc extension and should be named in lower-train-case style.

    +
  • +
  • +

    Common prefixes help to categorize documentation files: tutorial- is used for step-by-step instructions, guide- is used for guidelines on a particular aspect, coding- is for specific conventions or details about source-code, alternative- is for less official options that are not recommended but to still offer knowledge for people using that option, decision- is for rationales why a complex (technology) decision was made.

    +
  • +
  • +

    For automatic spellcheck we have integrated PySpelling in repositories. In case you have failure for spellcheck you can refer document here

    +
  • +
  • +

    Please read and follow our documentation guidelines.

    +
  • +
+
+
+

contributing-internal-snippets

+
+
+
+
+

Testing Changes

+
+
+

To test your changes all you need to do is run the following command:

+
+
+
+
devon build
+
+
+
+

If the build failed, you need to rework your changes.

+
+
+
+
+

Committing Changes

+
+
+

Always commit your changes in small logical units associated with an issue (see above section) using the commit message format:

+
+
+
+
#«issue-id»: «describe your change»
+Then GitHub will automatically link the commit with the issue.
+
+
+
+

Example:

+
+
+
+
#1: added REST service for tablemanagement
+
+
+
+

In case you worked on an issue from a different repository (e.g. change in ide-settings due to issue in ide), we use this commit message format:

+
+
+
+
«organization»/«repository»#«issue-id»: «describe your change»
+
+
+
+

Example:

+
+
+
+
devonfw/devon4j#1: added REST service for tablemanagement
+
+
+
+
+
+

Definition of Done

+
+
+

To complete your changes ensure the following aspects:

+
+
+
    +
  • +

    You have tested your changes and the build succeeds.

    +
  • +
  • +

    Code and documentation are in sync (if you coded new features you also extended documentation, etc.).

    +
  • +
  • +

    You followed the coding conventions and documentation guidelines.

    +
  • +
  • +

    For new features you have added automated unit tests.

    +
  • +
+
+
+

Do not worry; we will assist you in case you are unsure or missed out on something. +However, you make your and our life easier, if you follow this Definition of Done (DoD) before providing your pull request.

+
+
+

Please ensure the following aspects:

+
+
+
    +
  • +

    When selecting a title for your pull request, follow the same conventions that apply to commit messages.

    +
  • +
  • +

    Also add the related issue(s) to the description of the pull request (e.g. fixes #«issue-id»).

    +
  • +
  • +

    If you are providing a PR that is not yet ready for merging, please use GitHub’s draft pull request feature:

    +
    +
      +
    • +

      Expand the drop-down menu of the green Create Pull Request button and select Create Draft Pull Request

      +
    • +
    • +

      You can make further code changes to your PR by pushing commits to the corresponding feature branch.

      +
    • +
    • +

      When you’re ready to get feedback on your PR, click the Ready for review button.

      +
    • +
    +
    +
  • +
  • +

    If you are providing a PR that is ready for merging, click on the green Create Pull Request button.

    +
  • +
+
+
+

Your pull request will automatically be checked for these requirements:

+
+
+
    +
  • +

    Can be merged without conflicts.

    +
  • +
  • +

    Builds correctly (no compile or test errors).

    +
  • +
  • +

    CLA has been signed. If you contribute for the first time, you need to sign the CLA once.

    +
  • +
+
+
+

Please ensure to do the required tasks and reworks unless all checks are satisfied. +From here a reviewer should take over and give feedback. +In the best case, your contribution gets merged and everything is completed. +You might also get review feedback and requests for changes. +In that case walk through the review feedback and try to resolve it. +Once you push your new commits, the PR gets updated automatically and all checks will verify again. +Also GitHub will automatically make resolved review comments as outdated. +If you do not plan to put any further work into your PR before it is completed and merged, please let us know by writing an according comment. +We might find resources to get the PR done for you if it is already valuable. +In case you should not get feedback for weeks, do not hesitate to ask the community.

+
+
+ + + + + +
+ + +If one (typically the reviewer) has to change the base branch (because the wrong develop branch was used, see above) onto which the changes will be merged, one can do the same by following the instructions at here. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/github/1.0/DEFINITION_OF_DONE.html b/docs/github/1.0/DEFINITION_OF_DONE.html new file mode 100644 index 00000000..d1b7b242 --- /dev/null +++ b/docs/github/1.0/DEFINITION_OF_DONE.html @@ -0,0 +1,414 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Definition Of Done

+
+
+

A user story is considered Done in the sprint if the below conditions are met:

+
+
+
    +
  1. +

    Implementation of task following guidelines

    +
    +
      +
    • +

      Follow coding convention and standard guidelines to complete your task implementation. Check out here

      +
    • +
    • +

      TODO - Check if automation is possible

      +
    • +
    +
    +
  2. +
  3. +

    Add unit test cases +You should add unit test cases or integration tests (if required) for your topic. Unit/integration test should pass.

    +
    +
      +
    • +

      Enhancements require unit test cases to be added.

      +
    • +
    • +

      Bug fixes existing unit test cases needs to be updated.

      +
    • +
    +
    +
  4. +
  5. +

    Build success on local

    +
    +
      +
    • +

      Build on local should pass without any errors.

      +
    • +
    +
    +
  6. +
  7. +

    Create/modify documentation

    +
    +
      +
    • +

      Follow documentation guideline and add/modify documentation for your topic.

      +
    • +
    +
    +
  8. +
  9. +

    Raise Pull Request for changes done

    +
    + +
    +
  10. +
  11. +

    Pull Request is reviewed and merged

    +
    +
      +
    • +

      Points to be checked in the review

      +
      +
        +
      1. +

        Followed coding convention while implementation. For devon4j you can find details here

        +
      2. +
      3. +

        If working on devon4j, add javadocs wherever necessary

        +
      4. +
      5. +

        Added unit tests

        +
      6. +
      7. +

        Build is successful
        +format the code and pass local linter (check for indentation ,clean your code. Remove unnecessary blank lines )

        +
      8. +
      9. +

        Removed the warnings

        +
      10. +
      11. +

        Removed unnecessary comments and do not mention author name etc

        +
      12. +
      13. +

        A reference to a related issue (mention Github issue number in PR name) in your repository while raising PR

        +
      14. +
      15. +

        Add or modify documentation for topic

        +
      16. +
      +
      +
    • +
    • +

      The code should be reviewed by one or more people with a similar competence. If there are any review comments incorporate those changes. Once this is complete reviewer can merge PR. At least 2 rounds of peer review are recommended.

      +
    • +
    +
    +
  12. +
  13. +

    Peer test

    +
    +
      +
    • +

      Technical implementation and documentation are peer tested

      +
    • +
    • +

      Take screen shots and add it to the JIRA story.

      +
    • +
    +
    +
  14. +
  15. +

    Create tutorial/videos

    +
    +
      +
    • +

      If required, add a tasks for YouTube/katacoda tutorials for topic.

      +
    • +
    +
    +
  16. +
+
+
+

|== == == == == == == == == == == = +|Accountability|Responsibility +|Peer reviewer | Developer +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/github/1.0/SECURITY.html b/docs/github/1.0/SECURITY.html new file mode 100644 index 00000000..c5e7f5f9 --- /dev/null +++ b/docs/github/1.0/SECURITY.html @@ -0,0 +1,285 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Security

+
+
+

The devonfw team is commited to keep its projects and – by extension – the users of these projects safe from information security risks. For this reason, all our GitHub repositories are monitored by an instance of Dependabot, which notifies the responsible product/project owners, as soon as known security vulnerabilities are detected.

+
+
+

Although this covers most vulnerabilities introduced by external or third-party dependencies, there is still a chance that flaws in the code of our framework components inadvertently introduce other vulnerabilities or exposures.

+
+
+

If you suspect to have found such an issue, we implore you to directly contact our support team at icsddevonfwsupport.apps2@capgemini.com. In less severe cases you may open a new issue report in the affected repository. If applicable, please provide the related CVE identifier(s) in your report.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/github/1.0/SPELLCHECK_HELP.html b/docs/github/1.0/SPELLCHECK_HELP.html new file mode 100644 index 00000000..8d5132c6 --- /dev/null +++ b/docs/github/1.0/SPELLCHECK_HELP.html @@ -0,0 +1,395 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Spellcheck

+
+ +
+
+
+

What is Spellcheck and how it is configured?

+
+
+

Spellcheck is setup in each repository to automatically detect mispelled words in documents of respective repository.We are using PySpelling module for automatic spellcheck. Basic usage can be found here.PySpell github repository can be found here. +devonfw-spellcheck.yml setup this workflow for each respective repository.It reuses common workflow from .github repository.

+
+
+

Spellcheck workflow has 2 parts:

+
+
+
    +
  • +

    Configuration

    +
  • +
  • +

    Dictionary

    +
  • +
+
+
+
+
+

Configuration

+
+
+

Configuration file from .github contains all configuration for spellcheck workflow.

+
+
+

In configuration file you can configure folder path, file type to be scanned as well as filters to ignore some format of text. Currently, spellcheck is following en_US as configured in this file.

+
+
+
+
+

Dictionary

+
+
+

Spellcheck has option to add your customized spellings which you think are correct but detected as mistakes in spellcheck report.

+
+
+

You can add technical terms or nouns detected as spell mistake. For example you can add names like FreeMarker or Angular etc. Also you can add standard terms like localhost, IDE etc. +You can find dictionary file here.

+
+
+
+
+

How to fix your workflow?

+
+
+

After you have understood how spellcheck is configured lets see how we can fix failing workflow:

+
+
+
    +
  • +

    NOTE : Spellcheck is following en_US. Do not change this setting. It will affect setting in all repositories causing workflow to fail.

    +
  • +
  • +

    Check for actual misspelled words and fix it in the document.

    +
  • +
  • +

    Check if any unnecessary special character is present in file and remove it.

    +
  • +
  • +

    Put your single line command in backticks like

    +
  • +
+
+
+
+
`command`
+
+
+
+
    +
  • +

    If you have multiline command put it in triple backticks like below

    +
  • +
+
+
+
+
``` command ```
+
+
+
+

or

+
+
+

put it in format like

+
+
+
+
[source]
+----
+command
+----
+
+
+
+

Be careful if you have nested backticks. Make sure you have formatted it correctly.

+
+
+
    +
  • +

    By default urls starting with http or https is ignored. Also dictionary path is ignored. But in case you find out some path like /sample/text etc is not ignore put it into backticks. You can find out more by looking at configuration file https://github.com/devonfw/.github/blob/master/.github/workflows/.spellcheck.yml what is ignored in spellcheck.

    +
  • +
  • +

    Dictionary file is a common repository used by spellchecker. Check if your word already exists for example you have url in your document but if you check in dictionary it will be URL all in uppercase letter. Please follow dictionary standard and change your word from url to URL. +If you want to add any word to dictionary add its official word for example. FreeMarker can be written like FreeMarker or freemarker etc. But as official word is FreeMarker, we should not add all lowercase word freemarker to dictionary but only use FreeMarker in camel case.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/github/1.0/SUPPORT.html b/docs/github/1.0/SUPPORT.html new file mode 100644 index 00000000..f000cb27 --- /dev/null +++ b/docs/github/1.0/SUPPORT.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Support

+
+
+

We strive to support all community members and users of devonfw as best as we can, weather that be in their efforts to contribute to the framework directly, or to develop applications based on the framework. +If you have any questions about these topics, please don’t hesitate to contact us via e-mail at icsddevonfwsupport.apps2@capgemini.com.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/github/1.0/index.html b/docs/github/1.0/index.html new file mode 100644 index 00000000..06a60186 --- /dev/null +++ b/docs/github/1.0/index.html @@ -0,0 +1,277 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw default community health files

+
+
+

Apache License, Version 2.0

+
+
+

devonfw is a standard development platform and consists of many repositories. +This repository is for default community health files of this entire devonfw github organization.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/github/1.0/key-principles.html b/docs/github/1.0/key-principles.html new file mode 100644 index 00000000..66689c6f --- /dev/null +++ b/docs/github/1.0/key-principles.html @@ -0,0 +1,323 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Key Principles

+
+
+

For devonfw we follow these fundamental key principles for all decisions about architecture, design, or choosing standards, libraries, and frameworks:

+
+
+
    +
  • +

    KISS
    +Keep it small and simple - perfer easy solutions, do not over-engineer.

    +
  • +
  • +

    Open
    +Commitment to open standards and solutions (no required dependencies to commercial or vendor-specific standards or solutions)

    +
  • +
  • +

    Patterns
    +We concentrate on providing patterns, best-practices and examples rather than writing framework code.

    +
  • +
  • +

    Solid
    +We pick solutions that are established and have been proven to be solid and robust in real-live (business) projects.

    +
  • +
  • +

    Component Oriented Design
    +We follow a strictly component oriented design to address the following sub-principles:

    +
    +
      +
    • +

      Separation of concerns

      +
    • +
    • +

      Reusability and avoiding redundant code

      +
    • +
    • +

      Information hiding via component API and its exchangeable implementation treated as secret.

      +
    • +
    • +

      Design by Contract for self-contained, descriptive, and stable component APIs.

      +
    • +
    • +

      Layering as well as separation of business logic from technical code for better maintenance.

      +
    • +
    • +

      Data Sovereignty (and high cohesion with low coupling) says that a component is responsible for its data and changes to this data shall only happen via the component. Otherwise maintenance problems will arise to ensure that data remains consistent. Therefore APIs of a component that may be used by other components are designed call-by-value and not call-by-reference.

      +
    • +
    +
    +
  • +
  • +

    Homogeneity
    +Solve similar problems in similar ways and establish a uniform code-style. For each programming language addressed by devonfw we therefore define clear coding-conventions.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/hangar/1.0/aws/setup-aws-account-iam-for-eks.html b/docs/hangar/1.0/aws/setup-aws-account-iam-for-eks.html new file mode 100644 index 00000000..2f5c3ac8 --- /dev/null +++ b/docs/hangar/1.0/aws/setup-aws-account-iam-for-eks.html @@ -0,0 +1,555 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Setup AWS account IAM for deployment in EKS

+
+
+

The scope of this section is to prepare an AWS account to be ready for deploying in AWS EKS. By the end of this guide, a new IAM user belonging to a group with the required permissions will be created.

+
+
+
+
+

Preparing environment

+
+
+

First of all, install AWS CLI and Python for your OS.

+
+
+
+
+

Prerequisites

+
+
+
    +
  • +

    An AWS account with IAM full access permission.

    +
  • +
+
+
+
+
+

Alternative

+
+
+

In case you do not have an account or permission to create new IAM users, request it to your AWS administrator asking for the following policies being attached. Then go to Check IAM user permissions.

+
+
+
+
+

== Required managed policies

+
+
+
+
AmazonEC2FullAccess
+IAMReadOnlyAccess
+AmazonEKSServicePolicy
+AmazonS3FullAccess
+AmazonEC2ContainerRegistryFullAccess
+
+
+
+
+
+

== Required custom policies

+
+
+

Find them on /scripts/accounts/aws/eks-custom-policies.json.

+
+
+
+
+

Creating IAM user using provided script

+
+
+

The script located at /scripts/accounts/aws/create-user.sh will automatically create a user, also enrolling it in a newly created group with the required policies attached.

+
+
+

In case you do not have an AWS access key (needed to authenticate through API), follow this guide to create it.

+
+
+
+
+

Usage

+
+
+
+
create-user.sh \
+  -u <username> \
+  -g <group> \
+  [-p <policies...>] \
+  [-f <policies file path>] \
+  [-c <custom policies file path>] \
+  [-a <AWS access key>] \
+  [-s <AWS secret key>] \
+  [-r <region>]
+
+
+
+
+
+

Flags

+
+
+
+
-u      [Required] Username for the new user
+-g      [Required] Group name for the group to be created or used
+-p      [Optional] Policies to be attached to the group, splitted by comma
+-f      [Optional] Path to a file containing the policies to be attached to the group
+-c      [Optional] Path to a json file containing the custom policies to be attached to the group.
+-a      [Optional] AWS administrator access key
+-s      [Optional] AWS administrator secret key
+-r      [Optional] AWS region
+
+
+
+
+
+

Example

+
+
+
+
./create-user.sh -u Bob -g DevOps -f ./eks-managed-policies.txt -c ./eks-custom-policies.json -a "myAccessKey" -s "mySecretKey" -r eu-west-1
+
+
+
+ + + + + +
+ + +If the "DevOps" group does not exist, it will be created. +
+
+
+ + + + + +
+ + +Required policies for using EKS are located at /scripts/accounts/aws/eks-managed-policies.txt and /scripts/accounts/aws/eks-custom-policies.json +
+
+
+
+
+

After execution

+
+
+

On success, the newly created user access data will be shown as output:

+
+
+
+
Access key ID: <accessKeyID>
+Secret access key: <secretAccessKey>
+
+
+
+ + + + + +
+ + +It is mandatory to store the access key ID and the secret access key securely at this point, as they will not be retrievable again. +
+
+
+
+
+

Check IAM user permissions

+
+
+

The script located at /scripts/accounts/aws/verify-account-policies.sh will check that the necessary policies were attached to the IAM user.

+
+
+
+
+

Usage

+
+
+
+
verify-account-policies.sh \
+  -u <username> \
+  [-p <policies...>] \
+  [-f <policies file path>] \
+  [-c <custom policies file path>] \
+  [-a <AWS access key>] \
+  [-s <AWS secret key>] \
+  [-r <region>]
+
+
+
+
+
+

Flags

+
+
+
+
-u      [Required] Username whose policies will be checked
+-p      [Optional] Policies to be checked, splitted by comma
+-f      [Optional] Path to a file containing the policies to be checked
+-c      [Optional] Path to a file containing the custom policies to be checked
+-a      [Optional] AWS administrator access key
+-s      [Optional] AWS administrator secret key
+-r      [Optional] AWS region
+
+
+
+ + + + + +
+ + +At least one policies flag (-p, -f or -c) is required. +
+
+
+
+
+

Example

+
+
+
+
./verify-account-policies.sh -u Bob -f ./eks-managed-policies.txt -c ./eks-custom-policies.json -a "myAccessKey" -s "mySecretKey" -r eu-west-1
+
+
+
+

After execution, provided policies will be shown preceded by an OK or FAILED depending on the attachment status.

+
+
+ + + + + +
+ + +Required policies for using EKS are located at /scripts/accounts/aws/eks-managed-policies.txt and /scripts/accounts/aws/eks-custom-policies.json +
+
+
+
+
+

Configure AWS CLI

+
+
+

Once you have been provided with an IAM user with the required policies attached, setup the AWS CLI using the following command:

+
+
+
+
aws configure
+
+
+
+

Fill the prompted fields with your data:

+
+
+
+
AWS Access Key ID [None]: <accessKeyID>
+AWS Secret Access Key [None]: <secretAccessKey>
+Default region name [None]: eu-west-1
+Default output format [None]: json
+
+
+
+

Now you have AWS CLI ready to use.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/hangar/1.0/aws/setup-sonarqube-instance.html b/docs/hangar/1.0/aws/setup-sonarqube-instance.html new file mode 100644 index 00000000..b6f57794 --- /dev/null +++ b/docs/hangar/1.0/aws/setup-sonarqube-instance.html @@ -0,0 +1,438 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Setting up a SonarQube instance in AWS

+
+
+

The scope of this section is to deploy an AWS EC2 instance running SonarQube for further usage from a CI pipeline. A set of scripts and a Terraform recipe have been created in order to assist you in the launch of a SonarQube instance with an embedded database.

+
+
+
+
+

Getting Started

+
+ +
+
+
+

Prerequisites

+
+
+
    +
  • +

    Install Terraform.

    +
  • +
  • +

    Install AWS CLI.

    +
  • +
  • +

    Have a SSH keypair for the SonarQube instance. You can use an existing one or create a new one with the following command:

    +
  • +
+
+
+
+
aws ec2 create-key-pair --key-name sonarqube --query 'KeyMaterial' --output text > sonarqube.pem
+
+
+
+ + + + + +
+ + +This will create a public key, directly stored in AWS (current region only), and a private key stored in the sonarqube.pem file, that will be necessary if you ever need to access the instance, so be sure you store it securely. +
+
+
+
+
+

Relevant files

+
+
+
    +
  • +

    main.tf contains declarative definition written in HCL of AWS infrastructure.

    +
  • +
  • +

    setup_sonarqube.sh script to be run on EC2 instance that installs and deploys a container running SonarQube.

    +
  • +
  • +

    variables.tf contains variable definition for main.tf.

    +
  • +
  • +

    terraform.tfvars contains values (user-changeable) for the variables defined in variables.tf.

    +
  • +
  • +

    terraform.tfstate contains current state of the created infrastructure. Should be stored securely.

    +
  • +
  • +

    set-config.sh assists user in setting the values of terraform.tfvars.

    +
  • +
+
+
+
+
+

Usage

+
+
+

First, you need to initialize the working directory containing Terraform configuration files (located at /scripts/sonarqube) and install any required plugins:

+
+
+
+
terraform init
+
+
+
+

Then, you may need to customize some input variables about the environment. To do so, you can either edit terraform.tfvars file or take advantage of the set-config.sh script, which allows you to create or update values for the required variables, passing them as flags. As a full example:

+
+
+
+
./set-config.sh --aws_region eu-west-1 --vpc_cidr_block 10.0.0.0/16 --subnet_cidr_block 10.0.1.0/24 --nic_private_ip 10.0.1.50 --instance_type t3a.small --keypair_name sonarqube
+
+
+
+ + + + + +
+ + +Unless changed, the keypair name expected by default is sonarqube. +
+
+
+

Finally, deploy SonarQube instance:

+
+
+
+
terraform apply --auto-approve
+
+
+
+ + + + + +
+ + +terraform apply command performs a plan and actually carries out the planned changes to each resource using the relevant infrastructure provider’s API. You can use it to perform changes on the created resources later on. Remember to securely store terraform.tfstate file, otherwise you will not be able to perform any changes, including detroying them, from Terraform. More insights here. +
+
+
+

In particular, this will create an Ubuntu-based EC2 instance in AWS and deploy a Docker container running SonarQube.

+
+
+

You will get the public IP address of the EC2 instance as output. Take note of it, you will need it later on.

+
+
+

After a few minutes, you will be able to access SonarQube web interface on http://sonarqube_public_ip:9000 (replace with actual IP) with the following credentials:

+
+
+
    +
  • +

    Username: admin

    +
  • +
  • +

    Password: admin

    +
  • +
+
+
+ + + + + +
+ + +Change the default password promptly. +
+
+
+
+
+

Appendix: Destroy SonarQube instance

+
+
+

As long as you keep the terraform.tfstate file generated when creating the SonarQube instance, you can easily destroy it and all associated resources by executing:

+
+
+
+
terraform destroy
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/hangar/1.0/azure-devops/setup-build-pipeline.html b/docs/hangar/1.0/azure-devops/setup-build-pipeline.html new file mode 100644 index 00000000..8061f20f --- /dev/null +++ b/docs/hangar/1.0/azure-devops/setup-build-pipeline.html @@ -0,0 +1,367 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Setting up a Build Pipeline on Azure DevOps

+
+
+

In this section we will create a build pipeline for compiling project code. This pipeline will be configured to be triggered every time there is a commit to the Azure DevOps repository, regardless of which branch it is made on.

+
+
+

The creation of the pipeline will follow the project workflow, so a new branch named feature/build-pipeline will be created and the YAML file for the pipeline will be pushed to it.

+
+
+

Then, a Pull Request (PR) will be created in order to merge the new branch into the appropriate branch (provided in -b flag). The PR will be automatically merged if the repository policies are met. If the merge is not possible, either the PR URL will be shown as output, or it will be opened in your web browser if using -w flag.

+
+
+

The script located at /scripts/pipelines/azure-devops/pipeline_generator.sh will automatically create this new branch, create a build pipeline based on a YAML template appropriate for the project programming language or framework, create the Pull Request and, if it is possible, merge this new branch into the specified branch.

+
+
+
+
+

Prerequisites

+
+
+

This script will commit and push the corresponding YAML template into your repository, so please be sure your local repository is up-to-date (i.e you have pulled latest changes with git pull).

+
+
+
+
+

Creating the pipeline using provided script

+
+ +
+
+
+

Usage

+
+
+
+
pipeline_generator.sh \
+  -c <config file path> \
+  -n <pipeline name> \
+  -l <language or framework> \
+  -d <project local path> \
+  [-b <branch>] \
+  [-w]
+
+
+
+ + + + + +
+ + +The config file for the build pipeline is located at /scripts/pipelines/azure-devops/templates/build/build-config.cfg. +
+
+
+
+
+

Flags

+
+
+
+
-c    [Required] Configuration file containing pipeline definition.
+-n    [Required] Name that will be set to the pipeline.
+-l    [Required] Language or framework of the project.
+-d    [Required] Local directory of your project (the path should always be using '/' and not '\').
+-b               Name of the branch to which the Pull Request will target. PR is not created if the flag is not provided.
+-w               Open the Pull Request on the web browser if it cannot be automatically merged. Requires -b flag.
+
+
+
+
+
+

Examples

+
+ +
+
+
+

== Quarkus project

+
+
+
+
./pipeline_generator.sh -c ./templates/build/build-config.cfg -n quarkus-project-build -l quarkus -d C:/Users/$USERNAME/Desktop/quarkus-project -b develop -w
+
+
+
+ + + + + +
+ + +Remember to write the path to the local repository with '/' and not '\' on Windows. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/hangar/1.0/azure-devops/setup-project.html b/docs/hangar/1.0/azure-devops/setup-project.html new file mode 100644 index 00000000..0acaae98 --- /dev/null +++ b/docs/hangar/1.0/azure-devops/setup-project.html @@ -0,0 +1,368 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Setup Azure DevOps project

+
+
+

By the end of this guide a new project in Azure DevOps will be created.

+
+
+
+
+

Prerequisites

+
+
+
    +
  1. +

    Sign up into Azure DevOps (just follow the section Sign up with a personal Microsoft account).

    +
  2. +
  3. +

    Install the Azure CLI.

    +
  4. +
  5. +

    Create an Azure DevOps Personal Access Token (PAT).

    +
  6. +
+
+
+
+
+

Creating the Azure DevOps project

+
+
+

There are two ways of creating an Azure DevOps project:

+
+
+
    +
  1. +

    Create it manually using the web interface following Microsoft official guide.

    +
  2. +
  3. +

    Create it in an automated way using the provided script, as shown below.

    +
  4. +
+
+
+ + + + + +
+ + +Both when done manually and when using the script with -p flag, you will need to choose a process workflow. Learn more about the different options in the official documentation. By default, "Basic" workflow is chosen. +
+
+
+
+
+

Creating Azure DevOps project using provided script

+
+
+

The script located at scripts/accounts/azure-devops/create-project.sh enables you to create a new Azure DevOps project or configure an existing one.

+
+
+
+
+

Usage

+
+
+
+
create-project.sh \
+  -n <name> \
+  -d <description> \
+  -o <organization> \
+  -v <visibility> \
+  -t <PAT> \
+  [-w <process workflow>]
+
+
+
+
+
+

Flags

+
+
+
+
-n    [Required] Name of the new project.
+-d    [Required] Description for the new project.
+-o    [Required] Name of the organization for which the project will be configured.
+-v    [Required] Visibility. Accepted values: private, public.
+-t    [Required] PAT token to login Azure DevOps.
+-w               Process workflow that will be used. Accepted values: basic, agile, scrum, cmmi. Default: basic.
+
+
+
+
+
+

Example

+
+
+

./create-project.sh -n "Hello World" -d "This is a sample application" -o devon-hangar -v public -t myToken -w agile

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/hangar/1.0/azure-devops/setup-quality-pipeline.html b/docs/hangar/1.0/azure-devops/setup-quality-pipeline.html new file mode 100644 index 00000000..ee6a6d62 --- /dev/null +++ b/docs/hangar/1.0/azure-devops/setup-quality-pipeline.html @@ -0,0 +1,380 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Setting up a Quality Pipeline on Azure DevOps

+
+
+

In this section we will create a quality pipeline for analyzing project code with SonarQube. This pipeline will be configured in order to be triggered every time a commit to the Azure DevOps repository is done, regardless of which branch it is made on.

+
+
+

The creation of this pipeline will follow the project workflow, so a new branch named feature/quality-pipeline will be created and the YAML file for the pipeline will be pushed to it.

+
+
+

Then, a Pull Request (PR) will be created in order to merge the new branch into the appropriate branch (provided in -b flag). The PR will be automatically merged if the repository policies are met. If the merge is not possible, either the PR URL will be shown as output, or it will be opened in your web browser if using -w flag.

+
+
+

The script located at /scripts/pipelines/azure-devops/pipeline_generator.sh will automatically create this new branch, create a quality pipeline based on a YAML template appropriate for the project programming language or framework, create the Pull Request, and if it is possible, merge this new branch into the specified branch.

+
+
+
+
+

Prerequisites

+
+
+
    +
  • +

    This script will commit and push the corresponding YAML template into your repository, so please be sure your local repository is up-to-date (i.e you have pulled the latest changes with git pull).

    +
  • +
  • +

    Generate a SonarQube token (just follow the section 'Generating a token`).

    +
  • +
+
+
+
+
+

Creating the pipeline using provided script

+
+ +
+
+
+

Usage

+
+
+
+
pipeline_generator.sh \
+  -c <config file path> \
+  -n <pipeline name> \
+  -l <language or framework> \
+  -p <build pipeline name> \
+  -u <sonarqube url> \
+  -t <sonarqube token> \
+  -d <project local path> \
+  [-b <branch>] \
+  [-w]
+
+
+
+ + + + + +
+ + +The config file for the quality pipeline is located at /scripts/pipelines/azure-devops/templates/quality/quality-config.cfg. +
+
+
+
+
+

Flags

+
+
+
+
-c    [Required] Configuration file containing pipeline definition.
+-n    [Required] Name that will be set to the pipeline.
+-l    [Required] Language or framework of the project.
+-p    [Required] Build pipeline name.
+-u    [Required] SonarQube URL.
+-t    [Required] SonarQube token.
+-d    [Required] Local directory of your project (the path should always be using '/' and not '\').
+-b               Name of the branch to which the Pull Request will target. PR is not created if the flag is not provided.
+-w               Open the Pull Request on the web browser if it cannot be automatically merged. Requires -b flag.
+
+
+
+
+
+

Examples

+
+ +
+
+
+

== Quarkus project

+
+
+
+
./pipeline_generator.sh -c ./templates/quality/quality-config.cfg -n quarkus-project-quality -l quarkus -p quarkus-project-build -u http://52.17.210.4:9000 -t 6ce6663b63fc02881c6ea4c7cBa6563b8247a04e -d C:/Users/$USERNAME/Desktop/quarkus-project -b develop -w
+
+
+
+ + + + + +
+ + +Remember to write the path to the local repository with '/' and not '\' on Windows. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/hangar/1.0/azure-devops/setup-test-pipeline.html b/docs/hangar/1.0/azure-devops/setup-test-pipeline.html new file mode 100644 index 00000000..eb5a26cc --- /dev/null +++ b/docs/hangar/1.0/azure-devops/setup-test-pipeline.html @@ -0,0 +1,388 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Setting up a Test Pipeline on Azure DevOps

+
+
+

In this section we will create a Test pipeline on Azure DevOps for running project test cases. This pipeline will be configured in order to be triggered every time a commit to the Azure DevOps repository is done, regardless of which branch it is made on.

+
+
+

The creation of this pipeline will follow the project workflow, so a new branch named feature/test-pipeline will be created and the YAML file for the pipeline will be pushed to it.

+
+
+

Then, a Pull Request (PR) will be created in order to merge the new branch into the appropriate branch (provided in -b flag). The PR will be automatically merged if the repository policies are met. If the merge is not possible, either the PR URL will be shown as output, or it will be opened in your web browser if using -w flag.

+
+
+

The script located at /scripts/pipelines/azure-devops/pipeline_generator.sh will automatically create new branch, create a test pipeline based on a YAML template appropriate for the project programming language or framework, create the Pull Request, and if it is possible, merge this new branch into the specified branch.

+
+
+
+
+

Prerequisites

+
+
+
    +
  • +

    This script will commit and push the corresponding YAML template into your repository, so please be sure your local repository is up-to-date (i.e you have pulled latest changes with git pull).

    +
  • +
  • +

    [Optional] Having some knowledge about the application, in particular knowing if, when tested, it produces a log file or some other blob (e.g. performance profiling data) interesting to be kept as an artifact.

    +
  • +
+
+
+
+
+

Creating the pipeline using provided script

+
+ +
+
+
+

Usage

+
+
+
+
pipeline_generator.sh \
+  -c <config file path> \
+  -n <pipeline name> \
+  -l <language or framework> \
+  -d <project local path> \
+  [-a <artifact source path>] \
+  [-b <branch>] \
+  [-w]
+
+
+
+ + + + + +
+ + +The config file for the test pipeline is located at /scripts/pipelines/azure-devops/templates/test/test-config.cfg. +
+
+
+ + + + + +
+ + +If the test pipeline failed, check the logs for the failed test case(s) summary. In More actions (three dots button) you can download complete logs which include additional diagnostic information. Also, you can retrieve the "additional pipeline output" artifact containing the application logs stored in the path specified in -a flag (if applicable). +
+
+
+
+
+

Flags

+
+
+
+
-c    [Required] Configuration file containing pipeline definition.
+-n    [Required] Name that will be set to the pipeline.
+-l    [Required] Language or framework of the project.
+-d    [Required] Local directory of your project (the path should always be using '/' and not '\').
+-a               Path to be persisted as an artifact after pipeline execution, e.g. where the application stores logs or any other blob on runtime.
+-b               Name of the branch to which the Pull Request will target. PR is not created if the flag is not provided.
+-w               Open the Pull Request on the web browser if it cannot be automatically merged. Requires -b flag.
+
+
+
+
+
+

Examples

+
+ +
+
+
+

== Quarkus project

+
+
+
+
./pipeline_generator.sh -c ./templates/test/test-config.cfg -n quarkus-project-test -l quarkus -d C:/Users/$USERNAME/Desktop/quarkus-project -b develop -w
+
+
+
+ + + + + +
+ + +Remember to write the path to the local repository with '/' and not '\' on Windows. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/hangar/1.0/index.html b/docs/hangar/1.0/index.html new file mode 100644 index 00000000..e7b24958 --- /dev/null +++ b/docs/hangar/1.0/index.html @@ -0,0 +1,271 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Hangar

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/LICENSE.html b/docs/ide/1.0/LICENSE.html new file mode 100644 index 00000000..ddb1f515 --- /dev/null +++ b/docs/ide/1.0/LICENSE.html @@ -0,0 +1,2957 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

License

+
+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+
+

Apache Software License - Version 2.0

+
+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+
+

MIT License

+
+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+
+

License of Node.js

+
+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/_images/images/devonfw.png b/docs/ide/1.0/_images/images/devonfw.png new file mode 100644 index 00000000..94dbda05 Binary files /dev/null and b/docs/ide/1.0/_images/images/devonfw.png differ diff --git a/docs/ide/1.0/_images/images/eclipse-spellcheck.png b/docs/ide/1.0/_images/images/eclipse-spellcheck.png new file mode 100644 index 00000000..ccad7e5b Binary files /dev/null and b/docs/ide/1.0/_images/images/eclipse-spellcheck.png differ diff --git a/docs/ide/1.0/_images/images/finder-integration.png b/docs/ide/1.0/_images/images/finder-integration.png new file mode 100644 index 00000000..989351aa Binary files /dev/null and b/docs/ide/1.0/_images/images/finder-integration.png differ diff --git a/docs/ide/1.0/_images/images/keyboard-layouts.png b/docs/ide/1.0/_images/images/keyboard-layouts.png new file mode 100644 index 00000000..3964c038 Binary files /dev/null and b/docs/ide/1.0/_images/images/keyboard-layouts.png differ diff --git a/docs/ide/1.0/_images/images/keyboard-modifier-keys.png b/docs/ide/1.0/_images/images/keyboard-modifier-keys.png new file mode 100644 index 00000000..191aca29 Binary files /dev/null and b/docs/ide/1.0/_images/images/keyboard-modifier-keys.png differ diff --git a/docs/ide/1.0/_images/images/keyboard-shortcuts.png b/docs/ide/1.0/_images/images/keyboard-shortcuts.png new file mode 100644 index 00000000..9b31daa5 Binary files /dev/null and b/docs/ide/1.0/_images/images/keyboard-shortcuts.png differ diff --git a/docs/ide/1.0/_images/images/tools-tabs-cmd.png b/docs/ide/1.0/_images/images/tools-tabs-cmd.png new file mode 100644 index 00000000..e7e149f6 Binary files /dev/null and b/docs/ide/1.0/_images/images/tools-tabs-cmd.png differ diff --git a/docs/ide/1.0/_images/images/tools-tabs-explorer.png b/docs/ide/1.0/_images/images/tools-tabs-explorer.png new file mode 100644 index 00000000..c79cf21d Binary files /dev/null and b/docs/ide/1.0/_images/images/tools-tabs-explorer.png differ diff --git a/docs/ide/1.0/_images/images/tools-tabs-firefox.png b/docs/ide/1.0/_images/images/tools-tabs-firefox.png new file mode 100644 index 00000000..da31539d Binary files /dev/null and b/docs/ide/1.0/_images/images/tools-tabs-firefox.png differ diff --git a/docs/ide/1.0/_images/images/tools-tabs-ssh.png b/docs/ide/1.0/_images/images/tools-tabs-ssh.png new file mode 100644 index 00000000..96f6055f Binary files /dev/null and b/docs/ide/1.0/_images/images/tools-tabs-ssh.png differ diff --git a/docs/ide/1.0/advanced-tooling-generic.html b/docs/ide/1.0/advanced-tooling-generic.html new file mode 100644 index 00000000..4f98fad8 --- /dev/null +++ b/docs/ide/1.0/advanced-tooling-generic.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Cross-Platform Tooling

+
+ +
+
+
+

Git Client

+
+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+
+

Draw Diagrams

+
+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+
+

Browser Plugins

+
+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/advanced-tooling-linux.html b/docs/ide/1.0/advanced-tooling-linux.html new file mode 100644 index 00000000..0d8049d3 --- /dev/null +++ b/docs/ide/1.0/advanced-tooling-linux.html @@ -0,0 +1,279 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Linux Tooling

+
+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/advanced-tooling-mac.html b/docs/ide/1.0/advanced-tooling-mac.html new file mode 100644 index 00000000..2c40fbe5 --- /dev/null +++ b/docs/ide/1.0/advanced-tooling-mac.html @@ -0,0 +1,468 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MacOS Tooling

+
+ +
+
+
+

Finder

+
+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+
+

Keyboard

+
+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+
+

Keyboard Layouts

+
+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+
+

Key Bindings

+
+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+
+

Switch Control and Command

+
+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+
+

== Eclipse

+
+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+
+

Karabiner

+
+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/advanced-tooling-windows.html b/docs/ide/1.0/advanced-tooling-windows.html new file mode 100644 index 00000000..43183a5d --- /dev/null +++ b/docs/ide/1.0/advanced-tooling-windows.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Windows Tooling

+
+ +
+
+
+

Installing software

+
+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+
+

Chocolatey

+
+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+
+

Winget

+
+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+
+

Integration into Windows-Explorer

+
+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+
+

Tabs everywhere

+
+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+
+

Tabs for Windows Explorer

+
+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+
+

Tabs for SSH

+
+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+
+

Tabs for CMD

+
+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+
+

Windows Helpers

+
+ +
+
+
+

Handle passwords

+
+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+
+

Real text editor

+
+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+
+

Real compression tool

+
+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+
+

Smarter clipboard

+
+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+
+

PowerToys

+
+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+
+

Sysinternals Tools

+
+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+
+

Cope with file locks

+
+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+
+ +
+
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+
+

Linux

+
+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+
+

X11

+
+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+
+

Keyboard Freak

+
+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+
+

Paint anywhere on your desktop

+
+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+
+

Analyze graphs

+
+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+
+

Up your screen capture game

+
+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+
+

Fast Search in Windows

+
+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/advanced-tooling.html b/docs/ide/1.0/advanced-tooling.html new file mode 100644 index 00000000..ef52cb24 --- /dev/null +++ b/docs/ide/1.0/advanced-tooling.html @@ -0,0 +1,295 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/build.html b/docs/ide/1.0/build.html new file mode 100644 index 00000000..b55b53b2 --- /dev/null +++ b/docs/ide/1.0/build.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

build

+
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/cicdgen.html b/docs/ide/1.0/cicdgen.html new file mode 100644 index 00000000..2bc71ed2 --- /dev/null +++ b/docs/ide/1.0/cicdgen.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

cicdgen

+
+
+

The cicdgen commandlet allows to install and setup cicdgen. +The arguments (devon cicdgen «args») are explained by the following table:

+
+
+
Usage of devon cicdgen
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup cicdgen (install and verify) +|update |update cicdgen (reinstall with @latest version and verify) +|java «args» |generate cicd files for the current devon4java project +|ng «args» |generate cicd files for the current devon4ng project +|node «args» |generate cicd files for the current devon4node project +|«args» |call cicdgen with the specified arguments +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/cli.html b/docs/ide/1.0/cli.html new file mode 100644 index 00000000..516484d2 --- /dev/null +++ b/docs/ide/1.0/cli.html @@ -0,0 +1,417 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Devon CLI

+
+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+
+

Devon

+
+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+
+

Commandlets

+
+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+
+

Command-wrapper

+
+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+
+

Commandlet overview

+
+
+

The following commandlets are currently available:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/cobigen.html b/docs/ide/1.0/cobigen.html new file mode 100644 index 00000000..f72f5a09 --- /dev/null +++ b/docs/ide/1.0/cobigen.html @@ -0,0 +1,294 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

cobigen

+
+
+

The cobigen commandlet allows to install, configure, and launch CobiGen via CLI. Calling devon cobigen «args» is more or less the same as calling cobigen «args» (or cg «args») but with the benefit that the version of CobiGen preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon cobigen «args») are explained by the following table:

+
+
+
Usage of devon cobigen
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup CobiGen (install and verify), configurable via COBIGEN_VERSION +|«args» |run CobiGen with the given arguments («args») +|== == == == == == == == == == == =

+
+
+

Please read the actual documentation of CobiGen CLI for further details.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/conf.html b/docs/ide/1.0/conf.html new file mode 100644 index 00000000..1e2217b7 --- /dev/null +++ b/docs/ide/1.0/conf.html @@ -0,0 +1,313 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

conf

+
+
+

This folder contains configurations for your IDE:

+
+
+
File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide [settings]. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/configuration.html b/docs/ide/1.0/configuration.html new file mode 100644 index 00000000..a36aec32 --- /dev/null +++ b/docs/ide/1.0/configuration.html @@ -0,0 +1,369 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Configuration

+
+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+
+

devon.properties

+
+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/configurator.html b/docs/ide/1.0/configurator.html new file mode 100644 index 00000000..48a08b4a --- /dev/null +++ b/docs/ide/1.0/configurator.html @@ -0,0 +1,390 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Configurator

+
+
+

The devonfw-ide maintains and includes a tool called devonfw-ide-configurator. This allows to synchronize and manage complex configurations. Initially it was written for Eclipse that stores its information in a .metadata folder of your workspace. Unfortunately it contains different file-formats (including XML as String value inside properties files), temporary data as well as important configurations with sometimes mixtures of project specific, developer specific, and UI specific settings. To make it short it is a mess. Instead of bashing on Eclipse we want to make this IDE more usable and created a way to manage important parts of such configuration structures.

+
+
+
+
+

How to use

+
+
+

The easiest way is that you do not care. When you launch the IDE of your choice (e.g. via devon eclipse, devon vscode or by running eclipse-main script), this will happen automatically. +If you want to explicitly update your workspace without launching the IDE, you can append ws-update (e.g. devon eclipse ws-update). Instead, if you want to launch your IDE without touching its configuration you can append run or start (e.g. devon eclipse run) what will omit this configurator.

+
+
+
+
+

How it works

+
+
+

For every tool managed with our configurator we distinguish the following file structures:

+
+
+
    +
  1. +

    The actual configuration location of the tool itself. We configure the tool to relocate this to a specific workspace (so by default workspaces/main/).

    +
  2. +
  3. +

    A configuration location with the configuration only used during the setup: $SETTINGS_PATH/«tool»/workspace/setup. Contains settings to setup a workspace. After that the user remains control over these settings.

    +
  4. +
  5. +

    A configuration location with the configuration used to update and override settings: $SETTINGS_PATH/«tool»/workspace/update. Contains settings that are overridden with every update and enforced for every team member.

    +
  6. +
+
+
+

The configurator will recursively traverse the directory structure of 2. and 3. together. For each located file «relative-path»/«file» it will create or update 1. according to the following rules:

+
+
+
    +
  • +

    If «relative-path»/«file» is present in 1. it will be loaded and used as basis.

    +
  • +
  • +

    Otherwise if «relative-path»/«file» is present in 2. it will be loaded and used as basis.

    +
  • +
  • +

    If «relative-path»/«file» is present in 3. it will be loaded and merged with the current basis.

    +
  • +
  • +

    Variables in the from ${«variable-name»} get resolved if «variable-name» is defined.

    +
  • +
  • +

    If this caused any change the result is written to «relative-path»/«file» in 1.

    +
  • +
+
+
+

In other words this means:

+
+
+
    +
  • +

    When your workspace configuration is initially created, 1. is empty. Hence, settings from 2. are used and merged with 3.

    +
  • +
  • +

    Settings in 2. are therefore used as initial defaults and suggestions but can be changed by the end-user (developer). Hence, use 2. for things such as themes, UI tweaks, etc. Once the workspace is configured 2. typically is not relevant anymore.

    +
  • +
  • +

    Settings in 3. are applied on every update. By default this happens every time you start your IDE, these settings are managed by the settings and in control configurator. If the user modifies such settings and reopens his IDE his changes are reverted. Hence, use 3. for things such as code-formatters, compiler options, paths to tools shipped with devonfw-ide, etc. that should be consistent and homogeneous for every team-member.

    +
  • +
+
+
+
+
+

How to customize

+
+
+

Many fundamental settings for Eclipse can be found in the sub-folder .metadata/.plugins/org.eclipse.core.runtime/.settings. Of course you could manually edit these settings with a text editor. However, this requires a lot of knowledge. As we want to provide a great user-experience with devonfw-ide you can also do the following:

+
+
+
    +
  • +

    Launch the IDE to configure (e.g. devon eclipse).

    +
  • +
  • +

    In case of a non-trivial tweak you may first create a backup copy of your workspace folder (for eclipse this would be workspaces/main/.metadata) to some temporary location.

    +
  • +
  • +

    Do the desired modification of the configuration via the GUI of your IDE (e.g. in Eclipse preferences).

    +
  • +
  • +

    Exit your IDE and wait till it is shutdown

    +
  • +
  • +

    Call ws-reverse command for your IDE (e.g. devon eclipse ws-reverse) - ensure you do this in the same workspace where you launched and tweaked the config (without intermediate cd commands).

    +
  • +
  • +

    Review the changes to your settings with a git and diff tool of your choice (e.g. call git diff).

    +
  • +
  • +

    If all looks as expected commit these changes and push them - consider using a feature branch and ask a colleague to test these changes before you apply this to the main branch.

    +
  • +
  • +

    In case you could not find the expected changes, you may have tweaked a property that is not yet managed. Therefore, you can try again with ws-reverse-add instead of ws-reverse (e.g. devon eclipse ws-reverse-add) but be aware to revert undesired changes. Be sure not to add undesired settings that should not be managed.

    +
  • +
  • +

    In case your changes are in an entirely new configuration file that is currently not managed,you can simply diff the current workspace folder with the previously created backup copy using a recursive diff tool (such as winmerge or maybe just diff -R). Once you figured out the relevant change from that diff, you can manually apply it to the according «ide»/workspace/update folder in your ide-settings git repository.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/devonfw-ide-advanced.html b/docs/ide/1.0/devonfw-ide-advanced.html new file mode 100644 index 00000000..e56ddddc --- /dev/null +++ b/docs/ide/1.0/devonfw-ide-advanced.html @@ -0,0 +1,749 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Advanced Features

+
+ +
+

Cross-Platform Tooling

+ +
+
+

Git Client

+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+

Draw Diagrams

+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+

Browser Plugins

+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+ +
+
+

Windows Tooling

+ +
+
+

Installing software

+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+

Chocolatey

+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+

Winget

+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+

Integration into Windows-Explorer

+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+

Tabs everywhere

+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+

Tabs for Windows Explorer

+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+

Tabs for SSH

+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+

Tabs for CMD

+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+

Windows Helpers

+ +
+
+

Handle passwords

+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+

Real text editor

+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+

Real compression tool

+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+

Smarter clipboard

+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+

PowerToys

+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+

Sysinternals Tools

+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+

Cope with file locks

+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+ +
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+

Linux

+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+

X11

+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+

Keyboard Freak

+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+

Paint anywhere on your desktop

+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+

Analyze graphs

+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+

Up your screen capture game

+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+

Fast Search in Windows

+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+ +
+
+

MacOS Tooling

+ +
+
+

Finder

+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+

Keyboard

+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+

Keyboard Layouts

+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+

Key Bindings

+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+

Switch Control and Command

+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+

== Eclipse

+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+

Karabiner

+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+ +
+
+

Linux Tooling

+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+ +
+
+

Lombok

+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+

Lombok in Eclipse

+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+

Lombok for VS-Code

+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+

Lombok for IntelliJ

+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/devonfw-ide-doc.html b/docs/ide/1.0/devonfw-ide-doc.html new file mode 100644 index 00000000..734a56a0 --- /dev/null +++ b/docs/ide/1.0/devonfw-ide-doc.html @@ -0,0 +1,5744 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw-ide

+
+
+

The devonfw community +${project.version}, ${buildtime} +:description: comprehensive documentation of the devonfw-ide tool to manage your development tools. +:doctype: book +:toc: +:toc-title: Table of Contents +:idprefix: +:idseparator: - +:sectnums: +:reproducible: +:source-highlighter: rouge +:listing-caption: Listing +:chapter-label: +:partnums: +:imagesdir: ./

+
+
+

Introduction

+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries +as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps deliver better results.

+
+
+

This document contains the instructions for the tool devonfw-ide to set up and maintain your development tools including your favorite IDE (integrated development environment).

+
+ +
+

Features

+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+

IDEs

+
+

We support the following IDEs:

+
+
+ +
+
+
+

Platforms

+
+

We support the following platforms:

+
+
+ +
+
+
+

Build-Systems

+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+

Motivation

+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+ +
+
+

Setup

+ +
+
+

Prerequisites

+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+

Download

+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+

Install

+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+

Uninstall

+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+

Testing SNAPSHOT releases

+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+

Usage

+ +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+

Update

+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+

Working with multiple workspaces

+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+

Admin

+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+

Announce changes to your team

+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+ +
+
+

Configuration

+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+

devon.properties

+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+ +
+
+

Variables

+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+ +
+
+

Devon CLI

+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+

Devon

+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+

Commandlets

+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+

Command-wrapper

+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+

Commandlet overview

+
+

The following commandlets are currently available:

+
+
+ +
+ +
+
build
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+ +
+
+
Docker
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
requirements
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
Windows and macOS
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
== Mac A1
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
Linux
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
usage
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+ +
+
+
eclipse
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
legacy plugin config
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
dictionary
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
non-english dictionary
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+ +
+
+
gradle
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
help
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+ +
+
+
ide
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
update
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
uninstall
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+ +
+
+
intellij
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+ +
+
+
ionic
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
jasypt
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
example
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+ +
+
+
java
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
create
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
migrate
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+ +
+
+
jenkins
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+ +
+
+
Kubernetes
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
usage
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+ +
+
+
mvn
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
ng
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
node
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+ +
+
+
npm
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
release
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
Build-Tools
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+ +
+
+
sonar
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+ +
+
+
vscode
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
cleaning plugins on update
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+ +
+
+
yarn
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+ +
+
+
+

Structure

+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+ +
+
conf
+
+

This folder contains configurations for your IDE:

+
+
+
File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide [settings]. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+ +
+
+
log
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+ +
+
+
scripts
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+ +
+
+
settings
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
Structure
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
Configuration Philosophy
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
Customize Settings
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+ +
+
+
software
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
Repository
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
Shared
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
Custom
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+ +
+
+
system
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+ +
+
+
updates
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+ +
+
+
workspaces
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+ +
+
+
Project import
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+

Advanced Features

+ +
+

Cross-Platform Tooling

+ +
+
+

Git Client

+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+

Draw Diagrams

+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+

Browser Plugins

+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+ +
+
+

Windows Tooling

+ +
+
+

Installing software

+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+

Chocolatey

+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+

Winget

+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+

Integration into Windows-Explorer

+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+

Tabs everywhere

+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+

Tabs for Windows Explorer

+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+

Tabs for SSH

+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+

Tabs for CMD

+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+

Windows Helpers

+ +
+
+

Handle passwords

+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+

Real text editor

+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+

Real compression tool

+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+

Smarter clipboard

+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+

PowerToys

+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+

Sysinternals Tools

+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+

Cope with file locks

+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+ +
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+

Linux

+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+

X11

+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+

Keyboard Freak

+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+

Paint anywhere on your desktop

+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+

Analyze graphs

+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+

Up your screen capture game

+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+

Fast Search in Windows

+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+ +
+
+

MacOS Tooling

+ +
+
+

Finder

+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+

Keyboard

+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+

Keyboard Layouts

+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+

Key Bindings

+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+

Switch Control and Command

+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+

== Eclipse

+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+

Karabiner

+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+ +
+
+

Linux Tooling

+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+ +
+
+

Lombok

+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+

Lombok in Eclipse

+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+

Lombok for VS-Code

+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+

Lombok for IntelliJ

+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+

Support

+ +
+

Migration from oasp4j-ide

+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+

Get familiar with devonfw-ide

+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+

Migration of existing oasp4j-ide installation

+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+

Hints for users after migration

+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+ +
+
+

License

+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+

Apache Software License - Version 2.0

+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+

MIT License

+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+

License of Node.js

+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/devonfw-ide-introduction.html b/docs/ide/1.0/devonfw-ide-introduction.html new file mode 100644 index 00000000..fb87e541 --- /dev/null +++ b/docs/ide/1.0/devonfw-ide-introduction.html @@ -0,0 +1,562 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Introduction

+
+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries +as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps deliver better results.

+
+
+

This document contains the instructions for the tool devonfw-ide to set up and maintain your development tools including your favorite IDE (integrated development environment).

+
+ +
+

Features

+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+

IDEs

+
+

We support the following IDEs:

+
+
+ +
+
+
+

Platforms

+
+

We support the following platforms:

+
+
+ +
+
+
+

Build-Systems

+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+

Motivation

+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+ +
+
+

Setup

+ +
+
+

Prerequisites

+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+

Download

+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+

Install

+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+

Uninstall

+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+

Testing SNAPSHOT releases

+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/devonfw-ide-support.html b/docs/ide/1.0/devonfw-ide-support.html new file mode 100644 index 00000000..b3b14c5c --- /dev/null +++ b/docs/ide/1.0/devonfw-ide-support.html @@ -0,0 +1,3042 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Support

+
+ +
+

Migration from oasp4j-ide

+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+

Get familiar with devonfw-ide

+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+

Migration of existing oasp4j-ide installation

+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+

Hints for users after migration

+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+ +
+
+

License

+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+

Apache Software License - Version 2.0

+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+

MIT License

+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+

License of Node.js

+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/devonfw-ide-usage.html b/docs/ide/1.0/devonfw-ide-usage.html new file mode 100644 index 00000000..10738a76 --- /dev/null +++ b/docs/ide/1.0/devonfw-ide-usage.html @@ -0,0 +1,2169 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Usage

+
+ +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+

Update

+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+

Working with multiple workspaces

+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+

Admin

+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+

Announce changes to your team

+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+ +
+
+

Configuration

+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+

devon.properties

+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+ +
+
+

Variables

+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+ +
+
+

Devon CLI

+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+

Devon

+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+

Commandlets

+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+

Command-wrapper

+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+

Commandlet overview

+
+

The following commandlets are currently available:

+
+
+ +
+ +
+
build
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+ +
+
+
Docker
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
requirements
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
Windows and macOS
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
== Mac A1
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
Linux
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
usage
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+ +
+
+
eclipse
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
legacy plugin config
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
dictionary
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
non-english dictionary
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+ +
+
+
gradle
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
help
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+ +
+
+
ide
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
update
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
uninstall
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+ +
+
+
intellij
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+ +
+
+
ionic
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
jasypt
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
example
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+ +
+
+
java
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
create
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
migrate
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+ +
+
+
jenkins
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+ +
+
+
Kubernetes
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
usage
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+ +
+
+
mvn
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
ng
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
node
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+ +
+
+
npm
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
release
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
Build-Tools
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+ +
+
+
sonar
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+ +
+
+
vscode
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
cleaning plugins on update
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+ +
+
+
yarn
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+ +
+
+
+

Structure

+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+ +
+
conf
+
+

This folder contains configurations for your IDE:

+
+
+
File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide [settings]. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+ +
+
+
log
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+ +
+
+
scripts
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+ +
+
+
settings
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
Structure
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
Configuration Philosophy
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
Customize Settings
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+ +
+
+
software
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
Repository
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
Shared
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
Custom
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+ +
+
+
system
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+ +
+
+
updates
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+ +
+
+
workspaces
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+ +
+
+
Project import
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/devonfw-ide.html b/docs/ide/1.0/devonfw-ide.html new file mode 100644 index 00000000..1b8e358b --- /dev/null +++ b/docs/ide/1.0/devonfw-ide.html @@ -0,0 +1,5727 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw-ide

+
+
+

Introduction

+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries +as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps deliver better results.

+
+
+

This document contains the instructions for the tool devonfw-ide to set up and maintain your development tools including your favorite IDE (integrated development environment).

+
+ +
+

Features

+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+

IDEs

+
+

We support the following IDEs:

+
+
+ +
+
+
+

Platforms

+
+

We support the following platforms:

+
+
+ +
+
+
+

Build-Systems

+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+

Motivation

+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+ +
+
+

Setup

+ +
+
+

Prerequisites

+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+

Download

+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+

Install

+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+

Uninstall

+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+

Testing SNAPSHOT releases

+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+

Usage

+ +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+

Update

+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+

Working with multiple workspaces

+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+

Admin

+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+

Announce changes to your team

+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+ +
+
+

Configuration

+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+

devon.properties

+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+ +
+
+

Variables

+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+ +
+
+

Devon CLI

+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+

Devon

+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+

Commandlets

+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+

Command-wrapper

+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+

Commandlet overview

+
+

The following commandlets are currently available:

+
+
+ +
+ +
+
build
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+ +
+
+
Docker
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
requirements
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
Windows and macOS
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
== Mac A1
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
Linux
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
usage
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+ +
+
+
eclipse
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
legacy plugin config
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
dictionary
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
non-english dictionary
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+ +
+
+
gradle
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
help
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+ +
+
+
ide
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
update
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
uninstall
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+ +
+
+
intellij
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+ +
+
+
ionic
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
jasypt
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
example
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+ +
+
+
java
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
create
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
migrate
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+ +
+
+
jenkins
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+ +
+
+
Kubernetes
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
usage
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+ +
+
+
mvn
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
ng
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
node
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+ +
+
+
npm
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
release
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
Build-Tools
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+ +
+
+
sonar
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+ +
+
+
vscode
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
cleaning plugins on update
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+ +
+
+
yarn
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+ +
+
+
+

Structure

+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+ +
+
conf
+
+

This folder contains configurations for your IDE:

+
+
+
File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide [settings]. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+ +
+
+
log
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+ +
+
+
scripts
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+ +
+
+
settings
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
Structure
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
Configuration Philosophy
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
Customize Settings
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+ +
+
+
software
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
Repository
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
Shared
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
Custom
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+ +
+
+
system
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+ +
+
+
updates
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+ +
+
+
workspaces
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+ +
+
+
Project import
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+

Advanced Features

+ +
+

Cross-Platform Tooling

+ +
+
+

Git Client

+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+

Draw Diagrams

+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+

Browser Plugins

+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+ +
+
+

Windows Tooling

+ +
+
+

Installing software

+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+

Chocolatey

+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+

Winget

+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+

Integration into Windows-Explorer

+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+

Tabs everywhere

+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+

Tabs for Windows Explorer

+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+

Tabs for SSH

+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+

Tabs for CMD

+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+

Windows Helpers

+ +
+
+

Handle passwords

+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+

Real text editor

+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+

Real compression tool

+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+

Smarter clipboard

+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+

PowerToys

+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+

Sysinternals Tools

+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+

Cope with file locks

+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+ +
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+

Linux

+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+

X11

+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+

Keyboard Freak

+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+

Paint anywhere on your desktop

+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+

Analyze graphs

+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+

Up your screen capture game

+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+

Fast Search in Windows

+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+ +
+
+

MacOS Tooling

+ +
+
+

Finder

+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+

Keyboard

+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+

Keyboard Layouts

+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+

Key Bindings

+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+

Switch Control and Command

+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+

== Eclipse

+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+

Karabiner

+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+ +
+
+

Linux Tooling

+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+ +
+
+

Lombok

+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+

Lombok in Eclipse

+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+

Lombok for VS-Code

+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+

Lombok for IntelliJ

+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+

Support

+ +
+

Migration from oasp4j-ide

+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+

Get familiar with devonfw-ide

+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+

Migration of existing oasp4j-ide installation

+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+

Hints for users after migration

+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+ +
+
+

License

+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+

Apache Software License - Version 2.0

+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+

MIT License

+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+

License of Node.js

+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/docker.html b/docs/ide/1.0/docker.html new file mode 100644 index 00000000..7e3335c8 --- /dev/null +++ b/docs/ide/1.0/docker.html @@ -0,0 +1,426 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Docker

+
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
+

setup

+
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
+

requirements

+
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
+

Windows and macOS

+
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
+

== Mac A1

+
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
+

Linux

+
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
+

usage

+
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/eclipse.html b/docs/ide/1.0/eclipse.html new file mode 100644 index 00000000..786a36d6 --- /dev/null +++ b/docs/ide/1.0/eclipse.html @@ -0,0 +1,420 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

eclipse

+
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
+

plugins

+
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
+

legacy plugin config

+
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
+

dictionary

+
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
+

non-english dictionary

+
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/features.html b/docs/ide/1.0/features.html new file mode 100644 index 00000000..a415c115 --- /dev/null +++ b/docs/ide/1.0/features.html @@ -0,0 +1,472 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Features

+
+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+
+

IDEs

+
+
+

We support the following IDEs:

+
+
+ +
+
+
+
+

Platforms

+
+
+

We support the following platforms:

+
+
+ +
+
+
+
+

Build-Systems

+
+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+
+

Motivation

+
+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/gradle.html b/docs/ide/1.0/gradle.html new file mode 100644 index 00000000..6b2eb74e --- /dev/null +++ b/docs/ide/1.0/gradle.html @@ -0,0 +1,291 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

gradle

+
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/help.html b/docs/ide/1.0/help.html new file mode 100644 index 00000000..7cb29255 --- /dev/null +++ b/docs/ide/1.0/help.html @@ -0,0 +1,291 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

help

+
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/ide.html b/docs/ide/1.0/ide.html new file mode 100644 index 00000000..948e085b --- /dev/null +++ b/docs/ide/1.0/ide.html @@ -0,0 +1,384 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

ide

+
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
+

setup

+
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
+

update

+
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
+

uninstall

+
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/index.html b/docs/ide/1.0/index.html new file mode 100644 index 00000000..2ce5df50 --- /dev/null +++ b/docs/ide/1.0/index.html @@ -0,0 +1,299 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

devonfw-ide

+
+
+

Welcome to the devonfw-ide!!!

+
+
+

The devonfw-ide is a fantastic tool to automatically download, install, setup and update the IDE (integrated development environment) of your software development projects.

+
+
+

For further details visit the following links:

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/integration.html b/docs/ide/1.0/integration.html new file mode 100644 index 00000000..18ca790d --- /dev/null +++ b/docs/ide/1.0/integration.html @@ -0,0 +1,424 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Integration

+
+
+

The devonfw-ide already brings a lot of integration out of the box. This page is for users that want to get even more out of it. For instance this IDE ships with a console script to open a shell with the environment variables properly set for your devonfw-ide installation, so you get the correct version of your tools (Java, Maven, Yarn, etc.). However, you might want to open a shell from your IDE or your file manager. For some of these use-cases you need additional tweaks that are described on this page.

+
+
+
+
+

Windows

+
+
+

devonfw-ide automatically integrates with Windows-Explorer during setup.

+
+
+
+
+

CMD

+
+
+

If you want to open a CMD (MS Dos Shell) directly from Windows-Explorer simply right-click on the folder in your devonfw-ide you want to open. From the context menu click on Open Devon CMD shell here. This will open CMD and automatically initialize your environment according to the devonfw-ide project containing the folder (if any, see above).

+
+
+
+
+

Git-Bash

+
+
+

Just like for CMD you can also click Git Bash Here from Windows-Explorer context-menu to open a git bash. If you have selected a folder in your devonfw-ide installation, it will automatically initialize your environment.

+
+
+
+
+

Cygwin

+
+
+

In case you have cygwin installed on your machine, the devonfw-ide will autodetect this during setup and also install a Windows-Explorer integration. Just choose Open Devon Cygwin Bash Here to open cygwin bash and initialize your devonfw-ide environment.

+
+
+
+
+

ConEMU

+
+
+

ConEmu is a great extension that brings additional features such as tabs to your windows shells. If you like it, you will also want to have it integrated with devonfw-ide. All you need to do is follow these simple steps:

+
+
+
    +
  • +

    Copy the file CmdInit.cmd from your ConEmu installation (C:\Program Files\ConEmu\ConEmu\CmdInit.cmd) to a personal folder (e.g. C:\Users\MYLOGIN\scripts).

    +
  • +
  • +

    Modify this copy of CmdInit.cmd by adding the line devon (e.g. at line 6) and saving.

    +
  • +
  • +

    Go to ConEmu and open the settings (via context menu or [Windows][Alt][p]).

    +
  • +
  • +

    Select Startup > Tasks from the left tree.

    +
  • +
  • +

    Select the first option form Predefined tasks (command groups) ({Shells::cmd})

    +
  • +
  • +

    In the text area at the right bottom modify the location of CmdInit.cmd to your customized copy (%HOME%\scripts\CmdInit.cmd).

    +
  • +
  • +

    Select Integration from the left tree.

    +
  • +
  • +

    Click on the upper Register button (for ConEmu Here).

    +
  • +
  • +

    Click on Save settings

    +
  • +
+
+
+

Now you have the option ConEmu here if you right click on a folder in Windows Explorer that will open a new tab in ConEmu and automatically setup your environment according to the devonfw-ide project containing the folder (if any, see above).

+
+
+
+
+

ConEMU and StartExplorer

+
+
+

You can even integrate the Eclipse StartExplorer plug-in and ConEMU to open up console right from the file tree of eclipse into ConEMU. You can do this by adding a custom command to StartExplorer:

+
+
+
    +
  1. +

    Open up eclipse

    +
  2. +
  3. +

    Open Window > Preferences

    +
  4. +
  5. +

    Select StartExplorer > Custom Commands on the left

    +
  6. +
  7. +

    Add on the right and setup the following command: "C:\Program Files\ConEmu\ConEmu64.exe" -Dir ${resource_path} -runlist cmd.exe /k ""%ConEmuBaseDir%\CmdInit.cmd" & "IDEenv"" -cur_console:n +Be aware that you potentially have to adapt the 'ConEmu*.exe' path to match your installation.

    +
    +

    You can even add a shell login if you installed git bash on your machine. Please be aware to potentially adapt the sh.exe url to match your installation: "C:\Program Files\ConEmu\ConEmu64.exe" -Dir ${resource_path} -runlist cmd.exe /k ""%ConEmuBaseDir%\CmdInit.cmd" & "IDEenv" & "%SYSTEMDRIVE%\Program Files\Git\bin\sh.exe" --login" -cur_console:n

    +
    +
  8. +
  9. +

    State two times the "Name for *" to your choice like "ConEMU"

    +
  10. +
  11. +

    OK → OK

    +
  12. +
  13. +

    Right click on any folder/file in your eclipse file explorer and select StartExplorer > Custom Commands > ConEMU.

    +
  14. +
  15. +

    You will get a initialized console at the file/folder location! Have fun!

    +
  16. +
+
+
+
+
+

Eclipse

+
+
+

You might want to open a terminal directly as view inside your Eclipse IDE. Therefore we provide eclipse with the TM terminal plugin. +Further the settings already configure that plugin so it automatically sets the environment properties correctly. In other words the integration comes out of the box.

+
+
+

To use it all you need to do is to follow these steps:

+
+
+
    +
  • +

    Open the Terminal view (Window > Show View > Other > Terminal > Terminal > OK).

    +
  • +
  • +

    Click on the monitor icon from the left of the icon bar of the Terminal view.

    +
  • +
  • +

    Choose terminal (e.g. Local Terminal) and confirm with OK

    +
  • +
  • +

    Execute mvn -v to verify your environment.

    +
  • +
+
+
+
+
+

IntelliJ or WebStorm

+
+
+

You might want to open a terminal directly as view inside your IDEA IDE, that already ships with a feature for this out of the box. +If you start your IDE via the intellij-main script generated by devonfw-ide or via devon intellij start from a shell then everything is configured and your environment is set automatically.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/intellij.html b/docs/ide/1.0/intellij.html new file mode 100644 index 00000000..a3b5e3ab --- /dev/null +++ b/docs/ide/1.0/intellij.html @@ -0,0 +1,348 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

intellij

+
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
+

plugins

+
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/ionic.html b/docs/ide/1.0/ionic.html new file mode 100644 index 00000000..3f0eafb5 --- /dev/null +++ b/docs/ide/1.0/ionic.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

ionic

+
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/jasypt.html b/docs/ide/1.0/jasypt.html new file mode 100644 index 00000000..fd3f6881 --- /dev/null +++ b/docs/ide/1.0/jasypt.html @@ -0,0 +1,347 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

jasypt

+
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
+

example

+
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/java.html b/docs/ide/1.0/java.html new file mode 100644 index 00000000..9d8b524d --- /dev/null +++ b/docs/ide/1.0/java.html @@ -0,0 +1,368 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

java

+
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
+

create

+
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
+

migrate

+
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/jenkins.html b/docs/ide/1.0/jenkins.html new file mode 100644 index 00000000..2efed8b6 --- /dev/null +++ b/docs/ide/1.0/jenkins.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

jenkins

+
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/kubectl.html b/docs/ide/1.0/kubectl.html new file mode 100644 index 00000000..024fbc70 --- /dev/null +++ b/docs/ide/1.0/kubectl.html @@ -0,0 +1,328 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Kubernetes

+
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
+

setup

+
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
+

usage

+
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/kubernetes.html b/docs/ide/1.0/kubernetes.html new file mode 100644 index 00000000..eefb8735 --- /dev/null +++ b/docs/ide/1.0/kubernetes.html @@ -0,0 +1,314 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

kubernetes

+
+
+

The kubernetes commandlet allows to install kubernetes. This command is implemented to currently work on Windows. Other OS are not supported yet.

+
+
+
Usage of devon kubernetes
+

On Windows

+
+
+
    +
  • +

    Checks whether Windows Subsystem for Linux(WSL) has been enabled and any linux distribution has been installed.

    +
  • +
  • +

    If yes, checks whether Kubernetes has already been installed either on Windows or on WSL.

    +
  • +
  • +

    If yes, program quits since Kubernetes is already available.

    +
  • +
  • +

    If not, this will install Kubernetes on WSL along with K3D

    +
  • +
  • +

    As part of the setup, K3D will create a cluster with a single node with a default name as "devonfw-cluster"

    +
  • +
+
+
+

The arguments (devon kubernetes «args») are explained by the following table:

+
+
+
Usage of devon kubernetes
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubernetes(kubectl) with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/log.html b/docs/ide/1.0/log.html new file mode 100644 index 00000000..761b7a80 --- /dev/null +++ b/docs/ide/1.0/log.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

log

+
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/lombok.html b/docs/ide/1.0/lombok.html new file mode 100644 index 00000000..62eceb92 --- /dev/null +++ b/docs/ide/1.0/lombok.html @@ -0,0 +1,328 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Lombok

+
+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+
+

Lombok in Eclipse

+
+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+
+

Lombok for VS-Code

+
+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+
+

Lombok for IntelliJ

+
+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/master-ide.html b/docs/ide/1.0/master-ide.html new file mode 100644 index 00000000..6acac8aa --- /dev/null +++ b/docs/ide/1.0/master-ide.html @@ -0,0 +1,5727 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw-ide

+
+
+

Introduction

+
+

devonfw provides a solution to building applications which combine best-in-class frameworks and libraries +as well as industry proven practices and code conventions. +It massively speeds up development, reduces risks and helps deliver better results.

+
+
+

This document contains the instructions for the tool devonfw-ide to set up and maintain your development tools including your favorite IDE (integrated development environment).

+
+ +
+

Features

+
+

Every developer needs great tools to work efficiently. Setting up these tools manually can be tedious and error-prone. Furthermore, some projects may require different versions and configurations of such tools. Especially configurations like code-formatters should be consistent within a project to avoid diff-wars.

+
+
+

The devonfw-ide will solve these issues. Here are the features you will find through devonfw-ide:

+
+
+
    +
  • +

    Efficient
    +Set up your IDE within minutes tailored for the requirements of your project.

    +
  • +
  • +

    Automated
    +Automate the setup and update, avoid manual steps and mistakes.

    +
  • +
  • +

    Simple
    +KISS (Keep It Small and Simple), no native installers that globally mess your OS or tool-integration that break with every release. Instead, use templates and simple shell scripts.

    +
  • +
  • +

    Configurable
    +You can change the configuration depending on your needs. Furthermore, the settings contain configuration templates for the different tools (see configurator).

    +
  • +
  • +

    Maintainable
    +For your project you should copy these settings to an own git repository that can be maintained and updated to manage the tool configurations during the project lifecycle. If you use GitHub or GitLab every developer can easily suggest changes and improvements to these settings via pull/merge requests, which is easier to manage with big teams.

    +
  • +
  • +

    Customizable
    +Do you need an additional tool you had never heard of before? Put it in the software folder of the structure. The devon CLI will then automatically add it to your PATH variable.
    +Further you can create your own commandlet for your additional tool. For closed-source tools you can create your own archive and distribute it to your team members as long as you care about the terms and licenses of these tools.

    +
  • +
  • +

    Multi-platform
    +It works on all major platforms: Windows, Mac and Linux.

    +
  • +
  • +

    Multi-tenancy
    +You can have several instances of the devonfw-ide "installed" on your machine for different projects with different tools, tool versions and configurations. You won’t need to set up any physical installation nor changing your operating system. "Installations" of devonfw-ide do not interfere with each other nor with other installed software.

    +
  • +
  • +

    Multiple Workspaces
    +It supports working with different workspaces on different branches. You can create and update new workspaces with a few clicks. You can see the workspace name in the title-bar of your IDE so you do not get confused and work on the right branch.

    +
  • +
  • +

    Free
    +The devonfw-ide is free just like everything from devonfw. See LICENSE for details.

    +
  • +
+
+
+
+

IDEs

+
+

We support the following IDEs:

+
+
+ +
+
+
+

Platforms

+
+

We support the following platforms:

+
+
+ +
+
+
+

Build-Systems

+
+

We support the following build-systems:

+
+
+ +
+
+

However, also other IDEs, platforms, or tools can be easily integrated as commandlet.

+
+
+
+

Motivation

+
+

TL;DR? Lets talk to developers a correct language. Here are some examples with devonfw-ide:

+
+
+
+
[/]$ devon
+You are not inside a devonfw-ide installation: /
+[/]$ cd /projects/devonfw
+[devonfw]$ mvn
+zsh: command not found: mvn
+[devonfw]$ devon
+devonfw-ide environment variables have been set for /projects/devonfw in workspace main
+[devonfw]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/devonfw/software/maven
+Java version: 1.8.0_191, vendor: Oracle Corporation, runtime: /projects/devonfw/software/java
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[devonfw]$ cd /projects/ide-test/workspaces/test/my-project
+[my-project]$ devon
+devonfw-ide environment variables have been set for /projects/ide-test in workspace test
+[my-project]$ mvn -v
+Apache Maven 3.6.0 (97c98ec64a1fdfee7767ce5ffb20918da4f719f3; 2018-10-24T20:41:47+02:00)
+Maven home: /projects/ide-test/software/maven
+Java version: 11.0.2, vendor: Oracle Corporation, runtime: /projects/ide-test/software/jdk/Contents/Home
+Default locale: en_DE, platform encoding: UTF-8
+OS name: "mac os x", version: "10.14.3", arch: "x86_64", family: "mac"
+[ide-test]$ devon eclipse
+launching Eclipse for workspace test...
+[my-project]$ devon build
+[INFO] Scanning for projects...
+...
+[INFO] BUILD SUCCESS
+
+
+
+

This was just a very simple demo of what devonfw-ide can do. For further details have a look at our CLI documentation.

+
+
+

Now you might ask:

+
+
+
    +
  • +

    But I use Windows/Linux/MacOS/… - it works on all platforms!

    +
  • +
  • +

    But how about Windows CMD or Power-Shell? - it works!

    +
  • +
  • +

    But what if I use cygwin or git-bash on windows? - it works!

    +
  • +
  • +

    But I love to use ConEmu or Commander - it works with full integration!

    +
  • +
  • +

    How about macOS Terminal or iTerm2? - it works with full integration!

    +
  • +
  • +

    But I use Zsh - it works!

    +
  • +
  • +

    …​? - it works!

    +
  • +
+
+
+

Wow! So let’s get started with download & setup.

+
+ +
+
+

Setup

+ +
+
+

Prerequisites

+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+

Download

+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+

Install

+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+

Uninstall

+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+

Testing SNAPSHOT releases

+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+

Usage

+ +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+

Update

+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+

Working with multiple workspaces

+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+

Admin

+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+

Announce changes to your team

+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+ +
+
+

Configuration

+
+

The devonfw-ide aims to be highly configurable and flexible. The configuration of the devon command and environment variables takes place via devon.properties files. The following list shows these configuration files in the order they are loaded so files can override variables from files above in the list:

+
+
+
    +
  1. +

    build in defaults (for JAVA_VERSION, ECLIPSE_PLUGINS, etc.)

    +
  2. +
  3. +

    ~/devon.properties - user specific global defaults (on windows in %USERPROFILE%/devon.properties)

    +
  4. +
  5. +

    scripts/devon.properties - defaults provided by devonfw-ide. Never directly modify this file!

    +
  6. +
  7. +

    devon.properties - vendor variables for custom distributions of devonfw-ide-scripts, may e.g. tweak SETTINGS_PATH or predefine SETTINGS_URL.

    +
  8. +
  9. +

    settings/devon.properties (${SETTINGS_PATH}/devon.properties) - project specific configurations from settings.

    +
  10. +
  11. +

    workspaces/${WORKSPACE}/devon.properties - optional workspace specific configurations (especially helpful in projects using docker).

    +
  12. +
  13. +

    conf/devon.properties - user specific configurations (e.g. M2_REPO=~/.m2/repository). During setup this file is created by copying a template from ${SETTINGS_PATH}/devon/conf/devon.properties.

    +
  14. +
  15. +

    settings/projects/*.properties- properties to configure project checkout and import

    +
  16. +
+
+
+
+

devon.properties

+
+

The devon.properties files allow to define environment variables in a simple and OS independent way:

+
+
+
    +
  • +

    # comments begin with a hash sign (#) and are ignored

    +
  • +
  • +

    variable_name=variable_value with space etc.

    +
  • +
  • +

    variable_name=${predefined_variable}/folder_name

    +
    +

    variable values can refer to other variables that are already defined, which will be resolved to their value. You have to used ${…​} syntax to make it work on all platforms (never use %…​%, $…​, or $(…​) syntax in devon.properties files).

    +
    +
  • +
  • +

    export exported_variable=this value will be exported in bash, in windows CMD the export prefix is ignored

    +
  • +
  • +

    variable_name=

    +
    +

    this will unset the specified variable

    +
    +
  • +
  • +

    variable_name=~/some/path/and.file

    +
    +

    tilde is resolved to your personal home directory on any OS including windows.

    +
    +
  • +
  • +

    array_variable=(value1 value2 value3)

    +
    +

    This will only work properly in bash worlds but as no arrays are used in CMD world of devonfw-ide it does not hurt on windows.

    +
    +
  • +
  • +

    Please never surround values with quotes (var="value")

    +
  • +
  • +

    This format is similar to Java *.properties but does not support advanced features as unicode literals, multi-lined values, etc.

    +
  • +
+
+
+

In order to know what to configure, have a look at the available variables.

+
+
+

Please only tweak configurations that you need to change and take according responsibility. There is a price to pay for flexibility, which means you have to be careful what you do.

+
+
+

Further, you can configure maven via conf/settings.xml. To configure your IDE such as eclipse or vscode you can tweak the settings.

+
+
+ +
+
+

Variables

+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+ +
+
+

Devon CLI

+
+

The devonfw-ide is shipped with a central command devon. The setup will automatically register this command so it is available in any shell on your system. This page describes the Command Line Interface (CLI) of this command.

+
+
+
+

Devon

+
+

Without any argument the devon command will determine your DEVON_IDE_HOME and setup your environment variables automatically. In case you are not inside of a devonfw-ide folder the command will echo a message and do nothing.

+
+
+
+
[/]$ devon
+You are not inside a devon IDE installation: /
+[/]$ cd /projects/my-project/workspaces/test/my-git-repo
+[my-git-repo]$ devon
+devonfw-ide has environment variables have been set for /projects/my-project in workspace main
+[my-git-repo]$ echo $DEVON_IDE_HOME
+/projects/devon
+[my-git-repo]$ echo $JAVA_HOME
+/projects/my-project/software/java
+
+
+
+
+

Commandlets

+
+

The devon command supports a pluggable set of commandlets. Such commandlet is provided as first argument to the devon command and may take additional arguments:

+
+
+

devon «commandlet» [«arg»]*

+
+
+

Technically, a commandlet is a bash script located in $DEVON_IDE_HOME/scripts/command. So if you want to integrate another tool with devonfw-ide we are awaiting your pull-request. +Every commandlet takes the following generic arguments:

+
+
+
Generic arguments of every commandlet
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|-b or --batch |run in non-interactive mode (do not ask any questions). +|-q or --quiet |be quiet and avoid output. +|== == == == == == == == == == == =

+
+
+
+

Command-wrapper

+
+

For many commandlets the devon command acts as a wrapper. +Similar to mvnw or gradlew you can use it as a proxy command. +Therefore devon mvn clean install will be the same as mvn clean install. +The benefit when using devon as wrapper is that it will even work when the command (mvn, node, npm, etc.) is not on your PATH variable or even not yet installed. +We see the main benefit in this for writing portable scripts that you may commit to your git repository and that will then run everywhere and will lazily install the required tools on the fly. +In your daily usage you can and surely should avoid to always type devon as prefix to every command. +However, when you automate and want to avoid "command not found" errors, you can simply prefix the command with devon.

+
+
+
+

Commandlet overview

+
+

The following commandlets are currently available:

+
+
+ +
+ +
+
build
+
+

The build commandlet is an abstraction of build systems like maven, gradle, yarn, npm, etc. +It will auto-detect your build-system (via existence of files like pom.xml, package.json, etc.). According to this detection, it will simply delegate to the according commandlet of the specific build system. If that build-system is not yet available it will be downloaded and installed automatically.

+
+
+

So devon build allows users to build any project without bothering about the build-system. Further specific build options can be configured per project. This makes devon build a universal part of every definition of done. Before pushing your changes, please always run the following command to verify the build:

+
+
+

devon build

+
+
+

You may also supply additional arguments as devon build «args». This will simply delegate these arguments to the detected build command (e.g. call mvn «args»).

+
+ +
+
+
Docker
+
+

The Docker commandlet allows to install and use Docker. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite.

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon docker «args») are explained by the following table:

+
+
+
Usage of devon docker
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Docker (install and verify) as per above flow. +|«args» |call docker with the specified arguments. Call docker help for details or use docker directly as preferred." («args») +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

The Docker commandlet will install Docker automatically. +Please note that besides the sandbox concept of devonfw-ide this is a global installation on your system. +When uninstalling devonfw-ide, you may have to manually uninstall Docker and Kubernetes if you do not need it anymore.

+
+
+
+
requirements
+
+

Running Docker and especially Kubernetes on your machine in the background will require quite some resources. +This will allocate at least 2GB of additional RAM.

+
+
+

You will need at least 8GB of total RAM while we recommend to use 16GB+.

+
+
+

You may also tune and scale it to your needs. +When using Docker Desktop (Windows or MacOS) simply go to the resources tab in the settings. +It will depend on your usage frequency if you want to have it running in the background all the time. +This is a balance between resource utilization and convenience. +If you use Docker and Kubernetes on your local machine on a daily basis this makes sense.

+
+
+

In case you only use Docker rarely, you can save resources by stopping it when not needed after it has been installed.

+
+
+
+
Windows and macOS
+
+

To enable or disable autostart, you can launch Docker Desktop on Windows or MacOS got to the Preferences (gear icon in the title bar). Then in the General tab you can check or uncheck the option Start Docker Desktop when you login (see also here). When autostart is disabled and you launch Docker Desktop it will notice and ask your to start the service or do this automatically for you. +On Windows you can also manually tweak this:

+
+
+
    +
  • +

    Hit [windows][r]

    +
  • +
  • +

    Enter services.msc

    +
  • +
  • +

    Confirm with OK

    +
  • +
  • +

    In the services app search for the Docker Desktop Service in the list and select it.

    +
  • +
  • +

    Now you can start or stop the service by clicking on the according link text.

    +
  • +
  • +

    Also when right clicking on Docker Desktop Service and selecting Options from the context-menu, you can change the start type to automatic or manual.

    +
  • +
+
+
+
+
== Mac A1
+
+

In case you have a new Mac with A1 CPU, we automatically download and install the according ARM version of Docker Desktop for macOS. +However, if you use Docker and search for images you may end up with errors like:

+
+
+
+
docker: no matching manifest for linux/arm64/v8 in the manifest list entries.
+
+
+
+

So with A1 CPU you may need to add --platform linux/x86_64 as option to your Docker command to find the expected container image.

+
+
+
+
Linux
+
+

There is no Docker Desktop for Linux. +As Docker initially comes from the Linux world, it is easy to set it up on a Linux machine and use it from the commandline. +Therefore we do not install a GUI for you in case you are a Linux user. +In case you need a GUI for Docker and Kubernetes on Linux you can choose from the following options:

+
+
+ +
+
+
+
usage
+
+

Once installed via setup, you can run Docker directly from any shell of your OS directly. +Run docker help to get started and use the online documentations and resources on the web to get familiar with Docker. +It is not our intention to repeat this here.

+
+
+

Please note that the docker commandlet is a command wrapper.

+
+ +
+
+
eclipse
+
+

The eclipse commandlet allows to install, configure, and launch the Eclipse IDE. +To launch eclipse for your current workspace and devonfw-ide installation simply run: +devon eclipse

+
+
+

You may also supply additional arguments as devon eclipse «args». These are explained by the following table:

+
+
+
Usage of devon eclipse
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup Eclipse (install or update) +|add-plugin «id» [«url»]|install an additional plugin +|run |launch Eclipse (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add |reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for Eclipse. These are explained by the following table:

+
+
+
Variables of devonfw-ide for Eclipse
+

|== == == == == == == == == == == = +|Variable|Meaning +|ECLIPSE_VERSION|The version of the tool Eclipse to install and use. +|ECLIPSE_EDITION_TYPE|The edition of the tool Eclipse to install and use. You can choose between Java for standard edition or JEE for enterprise edition. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with Eclipse you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder eclipse/plugins (click on this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example tmterminal.properties:

+
+
+
+
plugin_url=http://download.eclipse.org/tm/terminal/marketplace
+plugin_id=org.eclipse.tm.terminal.feature.feature.group,org.eclipse.tm.terminal.view.feature.feature.group,org.eclipse.tm.terminal.control.feature.feature.group,org.eclipse.tm.terminal.connector.ssh.feature.feature.group,org.eclipse.tm.terminal.connector.telnet.feature.feature.group
+plugin_active=true
+
+
+
+

The variables are defined as follows:

+
+
+
    +
  • +

    plugin_url defines the URL of the Eclipse update site of the plugin

    +
  • +
  • +

    plugin_id defines the feature group ID(s) to install. To install multiple features/plugins provide a coma-separated list of IDs. If you want to customize devonfw-ide with new plugins you can first install them manually and then go to About Eclipse > Installation Details then you can filter for your newly installed plugin and find the values in the Id column. Copy & paste them from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise, developers can still install the plugin manually via devon eclipse add-plugin «plugin-name» from the config file settings/eclipse/plugins/«plugin-name».properties. See the settings/eclipse/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. This e.g. applies for devstyle that allows a real dark mode for eclipse and tunes the theming and layout of Eclipse in general. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually.

+
+
+

As the maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
legacy plugin config
+
+

For downward compatibility we still support the deprecated legacy configuration if the folder settings/eclipse/plugins does not exist: +The project configuration typically defines the plugins that will be installed via ECLIPSE_PLUGINS variable. Otherwise defaults from this eclipse commandlet will apply. +Be aware that this comes at your own risk and sometimes plugins can conflict and break your IDE.

+
+
+

Here is an example how a project can configure the plugins in its devon.properties inside the settings:

+
+
+
+
ECLIPSE_PLUGINS=("AnyEditTools.feature.group" "https://raw.githubusercontent.com/iloveeclipse/plugins/latest/" "com.ess.regexutil.feature.group" "http://regex-util.sourceforge.net/update/")
+
+
+
+

For the above listed plugins you can also use the short form:

+
+
+
+
ECLIPSE_PLUGINS=("anyedit" "" "regexutil" "")
+
+
+
+

Of course you may also mix plugin IDs with fully qualified plugins.

+
+
+
+
dictionary
+
+

Eclipse already comes with a build-in spellchecker. This is very helpful when writing comments. The default settings of devonfw-ide ship with a project specific dictionary file and according configurations to enable spellchecking and configuring this dictionary. +When typing JavaDoc, inline comments or other texts the spellchecker will underline unknown words in red. +If your cursor is located at such a word you can hit [Ctrl][1] to get a context menu with additional options. +There you can either choose similar correct words to correct a typo or you may even add the word (maybe a new business term) to your local dictionary.

+
+
+
+"Eclipse spellchecker” +
+
+
+

In the latter case, you should commit the changes to your settings so that it will be available to your entire team. +For further details about committing changes to the settings please consult the admin usage.

+
+
+
+
non-english dictionary
+
+

In case your project has to write documentation or text in languages other than English, you might want to prefill your project dictionary for that language. +Here we collect a list of such dictionaries that you can download and merge into your project dictionary:

+
+
+ +
+ +
+
+
gradle
+
+

The gradle commandlet allows to install, configure, and launch gradle. It is similar to gradle-wrapper. So calling devon gradle «args» is more or less the same as calling gradle «args» but with the benefit that the version of gradle preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon gradle «args») are explained by the following table:

+
+
+
Usage of devon gradle
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup gradle (install and verify), configurable via GRADLE_VERSION +|«args» |run gradle with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
help
+
+

The help commandlet provides help for the CLI.

+
+
+
Usage of devon help
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |Print general help +|«command» |Print help for the commandlet «command». +|== == == == == == == == == == == =

+
+
+

Please note that devon help «command» will do the same as devon «command» help.

+
+ +
+
+
ide
+
+

The ide commandlet manages your devonfw-ide. +You need to supply additional arguments as devon ide «args». These are explained by the following table:

+
+
+
Usage of devon ide
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«SETTINGS_URL»] |setup devonfw-ide (cloning the settings from the given URL, optionally from specific branch URL#branch) +|update [«package»] |update devonfw-ide +|update scripts [to «version»] |update devonfw-ide +|uninstall |uninstall devonfw-ide (if you want remote it entirely from your system) +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Run devon ide setup to initially setup your devonfw-ide. It is recommended to run the setup script in the top-level directory ($DEVON_IDE_HOME). However, in case you want to skip some system specific integration, you may also run this command directly instead. The setup only needs to be called once after a new devonfw-ide instance has been created. It will follow this process:

+
+
+
    +
  • +

    install the devon command on your system (if not already installed).

    +
  • +
  • +

    clone the settings (you may provide a git URL directly as argument or you will be prompted for it).

    +
  • +
  • +

    install all required software from DEVON_IDE_TOOLS variable (if not already installed).

    +
  • +
  • +

    configure all these tools

    +
  • +
  • +

    create IDE launch scripts

    +
  • +
  • +

    perform OS specific system integration such as Windows Explorer integration (only done from setup script and not from devon ide setup)

    +
  • +
+
+
+
+
update
+
+

Run devon ide update to update your devonfw-ide. This will check for updates and install them automatically. +The optional extra argument («package») behaves as follows:

+
+
+
    +
  • +

    scripts: check if a new version of devonfw-ide-scripts is available. If so it will be downloaded and installed. As Windows is using file-locks, it is tricky to update a script while it is executed. Therefore, we update the scripts folder as an async background task and have to abort further processing at this point on windows as a workaround.

    +
  • +
  • +

    settings: update the settings (git pull).

    +
  • +
  • +

    software: update the software (e.g. if versions have changed via scripts or settings update).

    +
  • +
  • +

    projects: update the projects (checkout and import repositories into workspace/IDEs).

    +
  • +
  • +

    all: do all the above sequentially.

    +
  • +
  • +

    none: settings and software are updated by default if no extra argument is given. This is the regular usage for project developers. Only perform an update of scripts when you are requested to do so by your technical lead. Bigger projects especially need to test updates before rolling them out to the entire team. If developers always updated the latest release of the scripts which is released globally, some project functionality would break causing problems and extra efforts in the teams.

    +
  • +
+
+
+

In order to update to a specific version of scripts an explicit version can be specified after the additional to argument:

+
+
+
+
devon ide update scripts to 3.1.99
+
+
+
+

The above example will update to the exact version 3.1.99 no matter if this is an upgrade or a downgrade of your current installed version. +If you just use devon ide update scripts then the latest available version will be installed. In larger teams it is recommended to communicate exact version updates to avoid that a new release can interfere and break anything. Therefore, some pilot user will test a new version for the entire team and, only after a successful test, they will communicate to the team to update to that exact version by providing the complete command as in the above example.

+
+
+
+
uninstall
+
+

We hope you love devonfw-ide. However, if you don’t and want to get rid of it entirely and completely remove all integration, you can use this command:

+
+
+
+
devon ide uninstall
+
+
+
+

This will remove devonfw-ide from all central places of your OS (user home directory such as scripts, .devon, .bashrc, as well as windows registry, etc.). +However, it will not remove your current installations (or shared software folder). So after running this uninstall, simply remove your DEVON_IDE_HOME directory of all devonfw-ide installations and potential shared software folder. You may also want to clean up your ~/Downloads directory from files downloaded by devonfw-ide. We do not automate this as deleting a directory is a very simple manual step and we do not want to take responsibility for severe data loss if your workspaces contained valuable work.

+
+ +
+
+
intellij
+
+

The intellij commandlet allows to install, configure, and launch IntelliJ. +To launch IntelliJ for your current workspace and devonfw-ide installation, simply run: +devon intellij

+
+
+

You may also supply additional arguments as devon intellij «args». These are explained by the following table:

+
+
+
Usage of devon intellij
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup IntelliJ (install or update) +|add-plugin «id»|install an additional plugin +|run |launch IntelliJ (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+

There are variables that can be used for IntelliJ. These are explained by the following table:

+
+
+
Variables of devonfw-ide for intelliJ
+

|== == == == == == == == == == == = +|Variable|Meaning +|INTELLIJ_VERSION|The version of the tool IntelliJ to install and use. +|INTELLIJ_EDITION_TYPE|The edition of the tool IntelliJ to install and use. The value C mean Community edition and the value U mean Ultimate edition. The Ultimate edition requires a license. The user has to buy the license separately and it is not part of devonfw-ide. The devonfw-ide only supports download and installation. +|*EXTRA_JAVA_VERSION|You can set this to a different (newer) version of Java used to launch your IDE (other than JAVA_VERSION that is used to build your project) +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with IntelliJ you need plugins. Of course devonfw-ide can automate this for your: +In your settings git repository create a folder intellij/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example scala.properties:

+
+
+
+
plugin_id=org.intellij.scala
+plugin_active=false
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins use the search on https://plugins.jetbrains.com/idea_ce to find the plugin of your choice. Select the tab Versions and click on a version in the list. The plugin ID is displayed in the upper right corner. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon intellij add-plugin «plugin_id».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of IntelliJ. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+ +
+
+
ionic
+
+

The ionic commandlet allows to install, configure, and launch ionic (ionic-cli). Calling devon ionic «args» is more or less the same as calling ionic «args» but with some advanced features and ensuring that ionic is properly set up for your project.

+
+
+

The arguments (devon ionic «args») are explained by the following table:

+
+
+
Usage of devon ionic
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|create |Create a new devon4ng ionic project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ionic with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
jasypt
+
+

The jasypt commandlet allows to install jasypt and encrypt or decrypt secrets using strong encryption given a secure masterpassword. See also devon4j password encryption guide for further details.

+
+
+

The arguments (devon jasypt «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup jasypt (install and verify), configurable via JASYPT_VERSION +|encrypt |Encrypt a secret with a masterpassword +|decrypt |Decrypt an encrypted secret with a masterpassword +|== == == == == == == == == == == =

+
+
+
+
example
+
+
+
devon jasypt encrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: secret
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: secret
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+devon jasypt decrypt
+Enter masterpassword: master
+Enter secret to encrypt/decrypt: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+
+----ENVIRONMENT-----------------
+
+Runtime: AdoptOpenJDK OpenJDK 64-Bit Server VM 11.0.9.1+1
+
+
+
+----ARGUMENTS-------------------
+
+input: fQPbaDd8wq0h0qOZw/AEKp2TD4Y07Y//M5PzaLgF3qL7YnBQjiGLtW8s5XkP3Ly9
+password: master
+ivGeneratorClassName: org.jasypt.iv.RandomIvGenerator
+algorithm: PBEWITHHMACSHA512ANDAES_256
+
+
+
+----OUTPUT----------------------
+
+secret
+
+
+ +
+
+
java
+
+

The java commandlet allows to install and setup Java. Also it supports devon4j. +The arguments (devon java «args») are explained by the following table:

+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup OpenJDK (install or update and verify), configurable via JAVA_VERSION (e.g. 8u242b08 or 11.0.6_10) +|create «args» |create a new Java project based on devon4j application template. If a single argument is provided, this is the package name and is automatically split into groupId and artifactId. Use -DdbType=«db» to choose the database (hana, oracle, mssql, postgresql, mariadb, mysql, h2, hsqldb). Any option starting with dash is passed as is." +|migrate [from «version»] [single] |migrate a devon4j project to the latest version. If for some reasons the current devonfw version can not be auto-detected you may provide it manually after the 'from' argument. Also the 'single' option allows to migrate only to the next available version." +|cicd «args» |generate cicd files for the current devon4java project +|== == == == == == == == == == == =

+
+
+

Since 2021.12.003 an extra version of Java can be configured via EXTRA_JAVA_VERSION variable. This can be used to launch your IDE with a different (newer) version of Java but keeping the build of your project stable.

+
+
+
+
create
+
+

Examples for create a new devon4j application:

+
+
+
+
devon java create com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create -Dversion=0.0.1-alpha1 com.example.domain.myapp
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.domain, artifactId myapp, version 0.0.1-alpha1, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp com.example.group demo-app
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and h2 database.

+
+
+
+
devon java create com.example.domain.myapp -DartifactId=demo-app -DdbType=hana
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId demo-app, version 1.0.0-SNAPSHOT, and SAP hana database.

+
+
+
+
devon java create com.example.domain.myapp -DdbType=oracle -Dversion=0.0.1 com.example.group -Dbatch=batch
+
+
+
+

Will create an app with package com.example.domain.myapp, groupId com.example.group, artifactId myapp, version 0.0.1, oracle database, and with a batch module.

+
+
+
+
migrate
+
+

Example for migrating a devon4j application:

+
+
+
+
devon java migrate
+
+
+
+

Will migrate current devon4j application to the latest version available.

+
+ +
+
+
jenkins
+
+

The jenkins commandlet allows to install, configure, and launch Jenkins.

+
+
+
Usage of devon jenkins
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup Jenkins (install and verify) +|start |Start your local Jenkins server +|stop |Stop your local Jenkins server +|add |Add current project as CI job to your local Jenkins +|== == == == == == == == == == == =

+
+ +
+
+
Kubernetes
+
+

The kubectl commandlet allows to install and use kubernetes. +On Windows WSL 2(Windows Subsystem for Linux) has to be installed properly as a prerequisite. +The setup on windows will then install kubernetes with K3D. K3D will create a cluster with a single node with a default name as "devonfw-cluster"

+
+
+

ATTENTION: +Currently this feature is new and therefore experimental. +It may change in incompatible ways in the next releases until we reach a stable state. +We hope that all is working fine for you. +However, do not expect everything to work out of the box. +In case you are facing issues (e.g. network problems with Cisco AnyConnect, etc.) please give us feedback so we can improve.

+
+
+

The arguments (devon kubectl «args») are explained by the following table:

+
+
+
Usage of devon kubectl
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup Kubernetes (install and verify) as per above flow. +|«args» |call kubectl with the specified arguments. Call kubectl help for details or use kubectl directly as preferred. +|== == == == == == == == == == == =

+
+
+
+
setup
+
+

Please note that on Windows and macOS, Kubernetes support comes together with Docker Desktop that is installed via docker commandlet. +When you have installed and launched Docker Desktop, you can once enable Kubernetes in the Preferences.

+
+
+

On Linux however, Kubernetes is installed separately by this commandlet.

+
+
+
+
usage
+
+

Once installed via setup, you can run kubectl directly from any shell of your OS directly. +Run kubectl help to get started and use the online documentations and resources on the web to get familiar with Kubernetes. +It is not our intention to repeat this here.

+
+
+

Please note that the kubectl commandlet is a command wrapper.

+
+ +
+
+
mvn
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
ng
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
node
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+ +
+
+
npm
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+ +
+
+
release
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
Build-Tools
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+ +
+
+
sonar
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+ +
+
+
vscode
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
plugins
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
cleaning plugins on update
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+ +
+
+
yarn
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+ +
+
+
+

Structure

+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+ +
+
conf
+
+

This folder contains configurations for your IDE:

+
+
+
File structure of the conf folder
+
+
/ conf
+├──/ .m2
+│  ├──/ repository
+│  │  ├──/ ant
+│  │  ├──/ ...
+│  │  └──/ zw
+│  ├── settings-security.xml
+│  └── settings.xml
+├──/ .sonar
+├──/ ...
+└── variables
+
+
+
+

The .m2 folder is used for configurations of maven. It contains the local repository folder used as cache for artifacts downloaded and installed by maven (see also maven repositories). +Further, there are two configuration files for maven:

+
+
+
    +
  • +

    settings.xml initialized from a template from your devonfw-ide [settings]. You may customize this to your needs (configuring HTTP proxies, credentials, or other user-specific settings). Secrets can be specified as $[«variable.name»] and will be prompted, encrypted and replaced automatically during the setup (unless in batch mode). Please note that this process is skipped in batch mode and also if you use the default settings URL (for simplicity of testing). To make use of this feature simply fork or copy the settings to your own git repo. In case your credentials have changed or you made a typo, you can simply redo this step by first moving your ${DEVON_IDE_HOME}/conf/.m2/settings.xml file to a temporary folder and then calling devon mvn setup.

    +
  • +
  • +

    settings-security.xml is auto-generated for you by devonfw-ide with a random password. This should make it easier for devonfw-ide users to use password encryption and never add passwords in plain text for better security.

    +
  • +
+
+
+

Finally,there is a file variables for the user-specific configuration of devonfw-ide.

+
+ +
+
+
log
+
+

The log directory is used to store log files e.g. for the IDE configurator. You may look here for debug information if something goes wrong.

+
+ +
+
+
scripts
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+ +
+
+
settings
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
Structure
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
Configuration Philosophy
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
Customize Settings
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+ +
+
+
software
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
Repository
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
Shared
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
Custom
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+ +
+
+
system
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+ +
+
+
updates
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+ +
+
+
workspaces
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+ +
+
+
Project import
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+

Advanced Features

+ +
+

Cross-Platform Tooling

+ +
+
+

Git Client

+
+

If you are looking for a git client that works cross-platform we recommend to use Fork.

+
+
+
+

Draw Diagrams

+
+

To draw diagrams for your project or for blueprints in devonfw, we recommend the following cross-platform tools:

+
+
+
    +
  • +

    draw.io is a powerful generic vector painting program (similar to visio). You can get a free open-source edition for your desktop from here.

    +
  • +
  • +

    ObjectAid is a nice and easy to use eclipse plugin that you can use to quickly create UML diagrams from existing code. While class-diagrams are supported for free, you need to buy a license if you want to use the other diagram types.

    +
  • +
  • +

    PlantUML is a great tool that can render UML diagrams from simple markup that can be easily managed in git or other version-control systems together with your code. Its simplicity allows branching and merging unlike other greedy binary UML data-formats.

    +
  • +
+
+
+
+

Browser Plugins

+
+

There are tons of helpful browser plugins out there and it might be a matter of personal taste what you like to have installed. However, as we are heavily using github we want to promote octotree. +In case you also work with ZenHub you might want to install the Zenhub Browser Extension.

+
+ +
+
+

Windows Tooling

+ +
+
+

Installing software

+
+

The devon IDE already contains a lot of software. But if you need more, here are some ways to get it easily:

+
+
+
+

Chocolatey

+
+

Chocolatey is a repository for free and open source software similar to the repositories know from the Linux like apt, apk, pacman, …​

+
+
+
+

Winget

+
+

Microsoft is also working on a repository for Windows called winget. It is currently in alpha state, but is expected to be integrated in the upcoming Windows 11.

+
+
+
+

Integration into Windows-Explorer

+
+

After you have set up your devonfw-ide on a windows machine, +you already have windows-explorer integration out-of-the-box. +Just right-click on the folder you would like to open in a terminal and choose from the context menu:

+
+
+
    +
  • +

    Git Bash

    +
  • +
  • +

    Open devonfw cmd shell here

    +
  • +
  • +

    Open devonfw PowerShell here

    +
  • +
  • +

    Open devonfw Cygwin Bash here (only if cygwin was installed during setup)

    +
  • +
+
+
+
+

Tabs everywhere

+
+

Many people got used to tabs that have been introduced by all major browsers:

+
+
+
+tabs in firefox +
+
Figure 1. Tabs in Firefox
+
+
+

This nice feature can be added to many other tools.

+
+
+
+

Tabs for Windows Explorer

+
+

If you want to have tabs for windows explorer simply install Clover

+
+
+
+tabs in windows explorer +
+
Figure 2. Tabs in Windows Explorer
+
+
+
+

Tabs for SSH

+
+

If you want to have tabs for your SSH client Putty (or even better Kitty that comes with WinSCP integration) you simply install SuperPutty +BTW: Windows 10 has already an SSH client included.

+
+
+
+tabs for SSH sessions +
+
Figure 3. Tabs for SSH
+
+
+
+

Tabs for CMD

+
+

If you want to have tabs for your windows command-line you simply install ConEmu. Here you can also add other shells like Putty. +Also you should have a look at the new Windows Terminal which also supports tabs.

+
+
+
+tabs for windows shells +
+
Figure 4. Tabs for CMD
+
+
+

See integration to make ConEmu work flawless with devonfw-ide.

+
+
+
+

Windows Helpers

+ +
+
+

Handle passwords

+
+

Do you want complex passwords that differ for each account for security? Do you only want to remember a single password for simplicity? Do you want to have both? Then, you need to install KeePass right now.

+
+
+
+

Real text editor

+
+

A real developer needs a real text editor and not windows built in notepad. +The most common choice is Notepad++.

+
+
+
+

Real compression tool

+
+

Do you need to deal with ZIP files, TGZ, dpkg, etc.? Just install 7zip and forget about windows build-in ZIP support (that is buggy with long file paths, etc.).

+
+
+
+

Smarter clipboard

+
+

Do you want to paste something from the clipboard but meanwhile you had to copy something else? Just, one of the many things you can easily do with ditto.

+
+
+
+

PowerToys

+
+

Microsoft provides some extensions to improve the workflow in windows called PowerToys. The include tools like a file renamer, a way to order your windows on the screen, a color picker and more.

+
+
+
+

Sysinternals Tools

+
+

A real developer will quickly notice that windows build in tools to analyze processes, network connections, autostarts, etc. are quite poor. So, what you really would like is the Sysinternals-Suite. You can make process-explorer your default task manager. Use autoruns to prevent nasty background things to be started automatically. Use tcpview to figure out which process is blocking port 8080, etc.

+
+
+
+

Cope with file locks

+
+

Did you ever fail to delete a file or directory that was locked by some process and you did not even know which one it was? +Then you might love IoBit Unlocker. +See also this article.

+
+
+
+ +
+

Are you are used to symbolic and hard links in Linux? Do you have to work with Windows? Would you also like to have such links in Windows? Why not? Windows supports real links (not shortcuts like in other cases). +If you even want to have it integrated in windows explorer you might want to install linkshellextension. However, you might want to disable SmartMove in the configuration if you face strange performance issues when moving folders.

+
+
+
+

Linux

+
+

Install Cygwin and get your bash in windows with ssh-agent, awk, sed, tar, and all the tools you love (or hate). Windows 10 has already a Linux as an installable feature included: WSL and from Version 2004 on WSL2, which is a native Linux Kernel running on Windows (in a light weight VM).

+
+
+
+

X11

+
+

Do you want to connect via SSH and need to open an X11 app from the server? Do you want to see the GUI on your windows desktop? +No problem: Install VcXsrv.

+
+
+
+

Keyboard Freak

+
+

Are you a keyboard shortcut person? Do you want to have shortcuts for things like « and » ? +Then you should try AutoHotKey. +For the example (« and ») you can simply use this script to get started:

+
+
+
+
^<::Send {U+00AB}
+^+<::Send {U+00BB}
+
+
+
+

First, just press [ctrl][<] and [ctrl][>] ([ctrl][shift][<]). Next, create shortcuts to launch your IDE, to open your favorite tool, etc. +If you like a GUI to easily configure the scrips, that comes with a lot of extensions preinstalled, you should have a look at Ac’tive Aid.

+
+
+
+

Paint anywhere on your desktop

+
+

Do you collaborate sharing your screen, and want to mark a spot on top of what you see? Use Epic Pen to do just that.

+
+
+
+

Analyze graphs

+
+

Do you need to visualize complex graph structures? Convert them to Trivial Graph Format (.tgf), a run yEd to get an interactive visualization of your graph.

+
+
+
+

Up your screen capture game

+
+

Capture any part of your screen with a single click, directly upload to dropbox, or run a svn commit all in one go with Greenshot. Another screen capture tool where you can easily manage and edit your screenshots and also do screen recordings with is Screenpresso.

+
+
+
+

Fast Search in Windows

+
+

Everything is a desktop search utility for Windows that can rapidly find files and folders by name.

+
+ +
+
+

MacOS Tooling

+ +
+
+

Finder

+
+

If you want to open a terminal from a folder in Finder and automatically get your environment set properly for devonfw-ide you will find the perfect solution here.

+
+
+
+devonfw-ide integration in MacOS Finder +
+
+
+

So after installing (see below) the integration(s) provided here, you can easily open a terminal ready for your devonfw-ide:

+
+
+
    +
  • +

    right click ([control] + click) on file or folder in Finder

    +
  • +
  • +

    Expand the Quick-Actions sub-menu

    +
  • +
  • +

    Click on the desired action (e.g. Open devonfw-Terminal here)

    +
  • +
  • +

    Verify that you environment is properly initialized by invoking:

    +
    +
    +
    mvn -v
    +
    +
    +
  • +
+
+
+

To get this feature for macOS Terminal.app open Finder and run the workflow system/mac/terminal/Open_devonfw-Terminal_here.workflow (in ${DEVON_IDE_HOME}). For iTerm2.app (that can be installed from App Store) do the same with system/mac/iterm/Open_devonfw-iTerm_here.workflow.

+
+
+
+

Keyboard

+
+

Keyboard support is not an integration however, some users coming from other platforms may struggle with the way macOS deals with (external non-apple) keyboards. +So to make it short: if you are happy with your keyboard and shortcuts, you can skip all the following. +Otherwise, if you think that pressing keys like Home, End, etc. should just work as expected or pressing Alt Gr should allow you to type the special characters as printed on your German keyboard then here you will find a solution to your problems! +To get all automated you can just run the script system/mac/keyboard/install-mac-keyboard-support.sh (in ${DEVON_IDE_HOME}). +If you would like to understand what is going on, you want to customize the keyboard settings to your needs, or you want a keyboard layout other than German ISO, please read on.

+
+
+
+

Keyboard Layouts

+
+

Keyboard layouts allow a find-grained mapping of each key on your keyboard to its resulting input character or behaviour. +They are macOS native features and do not need to have software running as a background service to make the keyboard mapping work (see Karabiner section below as an alternative). +They are provided as so called bundle (white lego brick icon). Like a macOS app this is a folder containing a Contents folder with a specific sub-folder structure. +In the Resources subfolder *.keylayout files are placed and define the exact mapping for the keyboard. +As an example we provide a Keyboard Layouts folder containing a bundle for a German keyboard mapping.

+
+
+

To install keyboard layouts simply double-click the bundle or copy it to ~/Library/Keyboard Layouts. +To actually use them go to System Preferences and select Keyboard. +Then, select the tab Input Sources. +With the + button you can add a keyboard layout for your daily usage with your Mac. +Please note that the keyboard layout shipped with devonfw-ide is called German-ISO and can be found in the Others section at the end of the list. +It can be used as an example or template, if you want to create your own layout.

+
+
+
+Keyboard Preferences / Input Sources +
+
+
+

When you have multiple mappings in place, on the top menu bar you will find a little icon next to the current time that allows you to switch between the keyboard layouts, which is very handy when you switch from your native MacBook keyboard to an external USB keyboard or vice versa. +Even for a pure MacOS geek this can be helpful in case a friend coming from Windows/Linux is supposed to type something on the Mac in a pair-programming session.

+
+
+

In our German keyboard mapping example you can use the keys like Alt Gr, etc. to type special characters as you would expect and as printed on your keyboard. +To make Pos1, End, etc. work properly across all apps please read on to the next section(s).

+
+
+

In case you would like to create your own keyboard layout you can of course edit the *.keylayout files in a text editor. +However, to make this much more comfortable, you can use the graphical editor tool Ukelele. +Besides, the app itself, the Ukelele dmg file, also contains a Documentation and a Resources folder. +The latter contains many keyboard layouts that you can use as a starting point.

+
+
+
+

Key Bindings

+
+

Still, various keyboard shortcuts might not work as expected for you. +Therefore, we provide you with an advanced configuration in the folder system/mac/keyboard/KeyBindings that you can copy to your ~/Library folder:

+
+
+
+
cd system/mac/keyboard/
+cp -r KeyBindings ~/Library
+
+
+
+

To make the changes work you need to log out and log in again or you can reboot. +After that, your Home (Pos1) and End buttons should work as expected including with selection via Shift and/or Command. +Also, you can use Command together with the left or right arrow key to move between words and combined it with Shift for selection. +As an example, for further customization you can press Command + < to type the unicode character «.

+
+
+

However, still some apps listen to keyboard events on a lower level and come with their own keyboard mappings. +In these apps you might still experience unexpected behaviour. +Solutions can be found in the following sub-sections.

+
+
+
+

Switch Control and Command

+
+

If you are used to windows or linux and get easily confused by the apple keyboard behaviour you might want to switch the Control and the Option key. +Open System Preferences and select Keyboard. +Then, in the first tab, click on the button Modifier Keys…​. +For every keyboard you can customize the behaviour of your modifier keys and therefore switch Control and Option as illustrated in the screenshot:

+
+
+
+Keyboard Preferences / Modifier Keys +
+
+
+

Programmers now should also disable that Control + Space is opening Spotlight Search as otherwise this shortcut can not be redefined in other apps like common IDEs.

+
+
+
+Keyboard Preferences / Shortcuts +
+
+
+
+

== Eclipse

+
+

In Eclipse, move and select by word as described above does not work. +Even worse, the most important shortcut does not work: Control + Space for code completion (content assist). +You can manually redefine the key bindings in Preferences under General > Keys. +However, with multiple IDE installations and workspaces this will quickly get tedious. +Therefore, you can Export and Import specific Preferences such as Keys Preferences to/from a *.epf (Eclipse PreFerences) file. +We have done all this for you so you can just import the file located in system/mac/keyboard/Eclipse/eclipse-mac-keybindings.epf into your Eclipse. +Happy coding.

+
+
+
+

Karabiner

+
+

If you want more dynamics and do not worry about an app that has to run in the background to make your keyboard work as you like (no relevant performance overhead), you can try Karabiner Elements. +This is a powerful tool to remap your keyboard shortcuts. +In the UI you can only directly create and edit Simple Modifications that are too limited for most use-cases. +However, using Complex Modifications you can do a lot of magic to customize the keyboard behaviour to your personal needs. +A key with any combination of modifiers can be mapped to any key with arbitrary modifiers. +This can also be bound to conditions based on the frontmost application or the keyboard model. +These complex modifications are configured as *.json files. +We have included a set with useful rules for external keyboards, programmer shortcuts, etc. +If you have Karabiner installed, you only need to copy the contents of the karabiner folder located in this directory to your ~/.config folder:

+
+
+
+
cd system/mac/keyboard/
+cp karabiner/assets/complex_modifications/*.json ~/.config/karabiner/assets/complex_modifications/
+
+
+
+

Now, if you open the Complex Modifications in the Karabiner app, you can click on the + Add rule button and will see these mappings in the pop up. +Select the rules you want to add (e.g. add all) and you are done. +Unlike other solutions, you can quickly tweak your keyboard without the need to log out and restart apps, which gives faster trial and error turnarounds. +Further, if you want to tweak your own configs, Karabiner comes with a secondary app called Karabiner-EventViewer that shows you the names of the keys, modifiers, and apps for the events you are triggering. +This is very helpful to get the config right.

+
+ +
+
+

Linux Tooling

+
+

There is nothing in this section so far. If you are a Linux user, please share your experience and provide your valuable hints.

+
+ +
+
+

Lombok

+
+

Even though not officially recommended by devon4j some projects want to use lombok in their project. +As this requires some tweaks for IDEs we do support you with this guide in case you want to use it.

+
+
+
+

Lombok in Eclipse

+
+

For eclipse there is a plugin to activate lombok support in eclipse. +We have this already configured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon eclipse add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for eclipse (replace false with true for plugin_active).

+
+
+
+

Lombok for VS-Code

+
+

For VisualStudio Code there is an extension to activate lombok support in VS-Code. +We have this already preconfigured for you in our default settings. So for manual installation after setup, you can get it via this command:

+
+
+
+
devon vscode add-plugin lombok
+
+
+
+

However, to avoid manual extra effort for lombok based projects you only need to activate this plugin in your project specific settings in lombok.properties for vscode (replace false with true for plugin_active).

+
+
+
+

Lombok for IntelliJ

+
+

For IntelliJ there is a plugin to activate lombok support in IntelliJ. +Currently we have not yet configured or automated this in devonfw-ide. +Please contribute to change this. See issues #453 and #491.

+
+
+
+
+
+

Support

+ +
+

Migration from oasp4j-ide

+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+

Get familiar with devonfw-ide

+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+

Migration of existing oasp4j-ide installation

+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+

Hints for users after migration

+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+ +
+
+

License

+
+

The product devonfw-ide is licensed under the following terms.

+
+
+

Binaries of this product have been made available to you by devonfw under the Apache Public License 2.0.

+
+
+

The documentation of this product is licensed under the terms of the Creative Commons License (Attribution-No Derivatives 4.0 International).

+
+
+

All of the source code to this product is available under licenses which are both free and open source.

+
+
+

More specifically, most of the source code is available under the Apache Public License 2.0. The remainder of the software which is not under the Apache license is available under one of a variety of other free and open source licenses. Those that require reproduction of the license text in the distribution are given below. (Note: your copy of this product may not contain code covered by one or more of the licenses listed here, depending on the exact product and version you choose.)

+
+
+

The following table shows the components that may be used. The column inclusion indicates the way the component is included:

+
+
+
    +
  • +

    directly included means the component is directly contained in the download package of devonfw-ide we provide

    +
  • +
  • +

    default setup means the component is not initially included but will be downloaded during the setup by default

    +
  • +
  • +

    optional means the component is neither initially included nor downloaded by default, but only gets downloaded and installed if explicitly triggered by you when invoking additional commands or if explicitly configured by your project.

    +
  • +
+
+
+
Third party components
+

|== == == == == == == == == == == = +|Component|Inclusion|License +|https://github.com/devonfw/ide[devonfw-ide] | Directly included |https://github.com/devonfw/ide/blob/master/LICENSE[ASL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] API | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://github.com/eclipse-ee4j/jsonp[JSON-P] Implementation | Directly included |https://github.com/eclipse-ee4j/jsonp/blob/master/LICENSE.md[EPL 2.0] +|https://openjdk.java.net/[OpenJDK] / AdoptOpenJDK (Java) |Default Setup| GPLv2 +|https://maven.apache.org/[Maven] | Default Setup|https://www.apache.org/licenses/LICENSE-2.0[ASL 2.0] +|https://code.visualstudio.com/[VS Code] |Optional| MIT (Terms) +|https://github.com/devonfw/extension-pack-vscode[extension-pack-vscode] |Optional|https://github.com/devonfw/extension-pack-vscode/blob/master/LICENSE[ASL 2.0] +|https://www.eclipse.org/[Eclipse] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] +|https://github.com/devonfw/cobigen[CobiGen] |Optional|https://github.com/devonfw/cobigen/blob/master/LICENSE.txt[ASL 2.0] +|https://marketplace.eclipse.org/content/tm-terminal[TM Terminal] |Optional|https://www.eclipse.org/legal/epl-2.0/[EPL 2.0] (see here) +|https://github.com/iloveeclipse/anyedittools/[AnyEdit] |Optional|https://github.com/iloveeclipse/anyedittools/blob/master/LICENSE.md[EPL 1.0] +|https://checkstyle.org/eclipse-cs/[EclipseCS] |Optional|https://github.com/checkstyle/eclipse-cs/blob/master/LICENSE[LGPL 2.1] +|https://marketplace.eclipse.org/content/spotbugs-eclipse-plugin[SpotBugs Eclipse plugin] |Optional|https://github.com/spotbugs/spotbugs/blob/master/LICENSE[LGPL 2.1] +|https://www.eclemma.org/[EclEmma] |Optional|https://www.eclemma.org/license.html[EPL 1.0] +|https://basti1302.github.io/startexplorer/[StartExplorer] |Optional|http://www.wtfpl.net/txt/copying/[WTFPL 2] +|http://myregexp.com/eclipsePlugin.html[regex tester] |Optional|http://www.gnu.org/licenses/gpl-2.0.html[GPL 2.0] (see here) +|https://github.com/m-m-m/eclipse-templatevariables/[eclipse-templatevariables] |Optional|https://github.com/m-m-m/eclipse-templatevariables/blob/master/LICENSE.txt[ASL 2.0] +|https://nodejs.org/[Node.js] |Default Setup|https://raw.githubusercontent.com/nodejs/node/master/LICENSE[License] +|https://www.npmjs.com/[NPM] |Default Setup|https://github.com/npm/cli/blob/latest/LICENSE[Artistic License 2.0] (Terms) +|https://cli.angular.io/[Angular CLI] (ng) |Optional|https://cli.angular.io/license.html[MIT] +|http://groovy-lang.org/[Groovy]|Optional|https://github.com/apache/groovy/blob/master/LICENSE[ASL 2.0] +|https://ant.apache.org/[Apache Ant]|Optional|https://github.com/apache/ant/blob/master/LICENSE[ASL 2.0] +|https://gradle.org/[Gradle] |Optional|https://github.com/gradle/gradle/blob/master/LICENSE[ASL 2.0] +|https://jenkins.io/[Jenkins] |Optional|https://github.com/jenkinsci/jenkins/blob/master/LICENSE.txt[MIT] +|https://www.sonarsource.com/plans-and-pricing/community/[SonarQube (Community Edition)] |Optional|https://github.com/SonarSource/sonarqube/blob/master/LICENSE.txt[LGPL 3.0] +|https://www.sonarlint.org/eclipse/[SonarLint] |Optional|https://github.com/SonarSource/sonarlint-eclipse/blob/master/LICENSE.txt[LGPL 3+] +|https://github.com/devonfw/cicdgen[cicdgen] |Optional|https://github.com/devonfw/cicdgen/blob/develop/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4j[devon4j] |Optional|https://github.com/devonfw/devon4j/blob/develop/LICENSE[ASL 2.0] +|https://github.com/devonfw/devon4ng[devon4ng] |Optional|https://github.com/devonfw/devon4ng/blob/master/LICENSE.txt[ASL 2.0] +|https://github.com/devonfw/devon4node[devon4node] |Optional|https://github.com/devonfw/devon4node/blob/develop/LICENSE.txt[ASL 2.0] +|https://www.jetbrains.com/idea/[IntelliJ IDEA] |Optional|https://www.jetbrains.com/opensource/idea/[ASL 2.0] +|http://www.jasypt.org/[jasypt] |Optional|http://www.jasypt.org/license.html[ASL 2.0] +|https://www.docker.com/[docker]|Optional|https://docs.docker.com/engine/#licensing[ASL 2.0] and EULA +|https://kubernetes.io/[kubernetes]|Optional|https://github.com/kubernetes/kubernetes/blob/master/LICENSE[ASL 2.0] +|== == == == == == == == == == == =

+
+
+
+

Apache Software License - Version 2.0

+
+
+
                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+
+

Eclipse Public License - Version 1.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
+
+b) in the case of each subsequent Contributor:
+
+i) changes to the Program, and
+
+ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
+
+"Contributor" means any person or entity that distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+"Program" means the Contributions distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
+
+2. GRANT OF RIGHTS
+
+a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
+
+b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+
+c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+
+d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+
+3. REQUIREMENTS
+
+A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
+
+a) it complies with the terms and conditions of this Agreement; and
+
+b) its license agreement:
+
+i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+
+ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+
+iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
+
+iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
+
+When the Program is made available in source code form:
+
+a) it must be made available under this Agreement; and
+
+b) a copy of this Agreement must be included with each copy of the Program.
+
+Contributors may not remove or alter any copyright notices contained within the Program.
+
+Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
+
+This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
+
+
+
+
+

Eclipse Public License - Version 2.0

+
+
+
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+1. DEFINITIONS
+
+“Contribution” means:
+
+    a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and
+    b) in the case of each subsequent Contributor:
+        i) changes to the Program, and
+        ii) additions to the Program;
+    where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works.
+
+“Contributor” means any person or entity that Distributes the Program.
+
+“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
+
+“Program” means the Contributions Distributed in accordance with this Agreement.
+
+“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors.
+
+“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship.
+
+“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof.
+
+“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy.
+
+“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor.
+2. GRANT OF RIGHTS
+
+    a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works.
+    b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
+    c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
+    d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
+    e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+    a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and
+    b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license:
+        i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
+        ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
+        iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and
+        iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+    a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and
+    b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices.
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement.
+Exhibit A – Form of Secondary Licenses Notice
+
+“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.”
+
+    Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses.
+
+    If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
+
+    You may add additional accurate notices of copyright ownership.
+
+
+
+
+

MIT License

+
+
+
Copyright <YEAR> <COPYRIGHT HOLDER>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+
+
+

Artistic License - Version 2.0

+
+
+
Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+Preamble
+
+This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
+Definitions
+
+"Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
+
+"Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
+
+"You" and "your" means any person who would like to copy, distribute, or modify the Package.
+
+"Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
+
+"Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
+
+"Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
+
+"Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
+
+"Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
+
+"Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
+
+"Source" form means the source code, documentation source, and configuration files for the Package.
+
+"Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
+
+(a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
+(b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
+(c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
+(i) the Original License or
+(ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
+Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
+
+(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
+
+(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
+
+(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+
+

Creative Commons License - Attribution-NoDerivatives 4.0 International

+
+
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
+
+Section 1 – Definitions.
+
+    Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
+    Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
+    Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
+    Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
+    Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
+    Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
+    Licensor means the individual(s) or entity(ies) granting rights under this Public License.
+    Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
+    Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
+    You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
+
+Section 2 – Scope.
+
+    License grant.
+        Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
+            reproduce and Share the Licensed Material, in whole or in part; and
+            produce and reproduce, but not Share, Adapted Material.
+        Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
+        Term. The term of this Public License is specified in Section 6(a).
+        Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
+        Downstream recipients.
+            Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
+            No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
+        No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
+
+    Other rights.
+        Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
+        Patent and trademark rights are not licensed under this Public License.
+        To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
+
+Section 3 – License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the following conditions.
+
+    Attribution.
+
+        If You Share the Licensed Material, You must:
+            retain the following if it is supplied by the Licensor with the Licensed Material:
+                identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
+                a copyright notice;
+                a notice that refers to this Public License;
+                a notice that refers to the disclaimer of warranties;
+                a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
+            indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
+            indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
+        For the avoidance of doubt, You do not have permission under this Public License to Share Adapted Material.
+        You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
+        If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
+
+Section 4 – Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
+
+    for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database, provided You do not Share Adapted Material;
+    if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
+    You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
+
+Section 5 – Disclaimer of Warranties and Limitation of Liability.
+
+    Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
+    To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
+
+    The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
+
+Section 6 – Term and Termination.
+
+    This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
+
+    Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
+        automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
+        upon express reinstatement by the Licensor.
+    For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
+    For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
+    Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
+
+Section 7 – Other Terms and Conditions.
+
+    The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
+    Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
+
+Section 8 – Interpretation.
+
+    For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
+    To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
+    No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
+    Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1

+
+
+
 Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+    b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+    c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+    d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+    b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+    c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+    d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+    e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+    b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License).
+
+To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the library's name and an idea of what it does.
+Copyright (C) year  name of author
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright interest in
+the library `Frob' (a library for tweaking knobs) written
+by James Random Hacker.
+
+signature of Ty Coon, 1 April 1990
+Ty Coon, President of Vice
+
+
+
+
+

GNU LESSER GENERAL PUBLIC LICENSE - Version 3

+
+
+
Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
+0. Additional Definitions.
+
+As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
+
+“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
+
+An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
+
+A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
+
+The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
+
+The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
+1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
+2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
+
+    a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
+    b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
+
+3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
+
+    a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the object code with a copy of the GNU GPL and this license document.
+
+4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
+
+    a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
+    b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
+    c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
+    d) Do one of the following:
+        0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
+        1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
+    e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
+
+5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
+
+    a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
+    b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+
+6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
+
+
+
+
+

GNU GENERAL PUBLIC LICENSE - Version 2

+
+
+
 Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
+
+Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
+
+Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
+
+The precise terms and conditions for copying, distribution and modification follow.
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
+
+1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
+    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
+    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
+    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
+
+4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
+
+6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
+
+7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
+
+10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+END OF TERMS AND CONDITIONS
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
+
+one line to give the program's name and an idea of what it does.
+Copyright (C) yyyy  name of author
+
+This program is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2
+of the License, or (at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
+
+Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+type `show w'.  This is free software, and you are welcome
+to redistribute it under certain conditions; type `show c'
+for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
+
+Yoyodyne, Inc., hereby disclaims all copyright
+interest in the program `Gnomovision'
+(which makes passes at compilers) written
+by James Hacker.
+
+signature of Ty Coon, 1 April 1989
+Ty Coon, President of Vice
+
+
+
+
+

DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2

+
+
+
            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+                    Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+  14 rue de Plaisance, 75014 Paris, France
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+            DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. You just DO WHAT THE FUCK YOU WANT TO.
+
+
+
+
+

License of Node.js

+
+
+
Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+The Node.js license applies to all parts of Node.js that are not externally
+maintained libraries.
+
+The externally maintained libraries used by Node.js are:
+
+- Acorn, located at deps/acorn, is licensed as follows:
+  """
+    Copyright (C) 2012-2018 by various contributors (see AUTHORS)
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- Acorn plugins, located at deps/acorn-plugins, is licensed as follows:
+  """
+    Copyright (C) 2017-2018 by Adrian Heine
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- c-ares, located at deps/cares, is licensed as follows:
+  """
+    Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
+    file.
+
+    Copyright 1998 by the Massachusetts Institute of Technology.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee is hereby granted, provided that
+    the above copyright notice appear in all copies and that both that copyright
+    notice and this permission notice appear in supporting documentation, and that
+    the name of M.I.T. not be used in advertising or publicity pertaining to
+    distribution of the software without specific, written prior permission.
+    M.I.T. makes no representations about the suitability of this software for any
+    purpose.  It is provided "as is" without express or implied warranty.
+  """
+
+- ICU, located at deps/icu-small, is licensed as follows:
+  """
+    COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+    Copyright © 1991-2019 Unicode, Inc. All rights reserved.
+    Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of the Unicode data files and any associated documentation
+    (the "Data Files") or Unicode software and any associated documentation
+    (the "Software") to deal in the Data Files or Software
+    without restriction, including without limitation the rights to use,
+    copy, modify, merge, publish, distribute, and/or sell copies of
+    the Data Files or Software, and to permit persons to whom the Data Files
+    or Software are furnished to do so, provided that either
+    (a) this copyright and permission notice appear with all copies
+    of the Data Files or Software, or
+    (b) this copyright and permission notice appear in associated
+    Documentation.
+
+    THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+    NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+    DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+    DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+    TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+    PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale,
+    use or other dealings in these Data Files or Software without prior
+    written authorization of the copyright holder.
+
+    ---------------------
+
+    Third-Party Software Licenses
+
+    This section contains third-party software notices and/or additional
+    terms for licensed third-party software components included within ICU
+    libraries.
+
+    1. ICU License - ICU 1.8.1 to ICU 57.1
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1995-2016 International Business Machines Corporation and others
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies of
+    the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+    OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+    HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+    SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+    RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+    CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+    CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder
+    shall not be used in advertising or otherwise to promote the sale, use
+    or other dealings in this Software without prior written authorization
+    of the copyright holder.
+
+    All trademarks and registered trademarks mentioned herein are the
+    property of their respective owners.
+
+    2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt)
+
+     #     The Google Chrome software developed by Google is licensed under
+     # the BSD license. Other software included in this distribution is
+     # provided under other licenses, as set forth below.
+     #
+     #  The BSD License
+     #  http://opensource.org/licenses/bsd-license.php
+     #  Copyright (C) 2006-2008, Google Inc.
+     #
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     # modification, are permitted provided that the following conditions are met:
+     #
+     #  Redistributions of source code must retain the above copyright notice,
+     # this list of conditions and the following disclaimer.
+     #  Redistributions in binary form must reproduce the above
+     # copyright notice, this list of conditions and the following
+     # disclaimer in the documentation and/or other materials provided with
+     # the distribution.
+     #  Neither the name of  Google Inc. nor the names of its
+     # contributors may be used to endorse or promote products derived from
+     # this software without specific prior written permission.
+     #
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+     # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+     # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+     # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+     # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+     # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+     #
+     #
+     #  The word list in cjdict.txt are generated by combining three word lists
+     # listed below with further processing for compound word breaking. The
+     # frequency is generated with an iterative training against Google web
+     # corpora.
+     #
+     #  * Libtabe (Chinese)
+     #    - https://sourceforge.net/project/?group_id=1519
+     #    - Its license terms and conditions are shown below.
+     #
+     #  * IPADIC (Japanese)
+     #    - http://chasen.aist-nara.ac.jp/chasen/distribution.html
+     #    - Its license terms and conditions are shown below.
+     #
+     #  ---------COPYING.libtabe ---- BEGIN--------------------
+     #
+     #  /*
+     #   * Copyright (c) 1999 TaBE Project.
+     #   * Copyright (c) 1999 Pai-Hsiang Hsiao.
+     #   * All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the TaBE Project nor the names of its
+     #   *   contributors may be used to endorse or promote products derived
+     #   *   from this software without specific prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  /*
+     #   * Copyright (c) 1999 Computer Systems and Communication Lab,
+     #   *                    Institute of Information Science, Academia
+     #       *                    Sinica. All rights reserved.
+     #   *
+     #   * Redistribution and use in source and binary forms, with or without
+     #   * modification, are permitted provided that the following conditions
+     #   * are met:
+     #   *
+     #   * . Redistributions of source code must retain the above copyright
+     #   *   notice, this list of conditions and the following disclaimer.
+     #   * . Redistributions in binary form must reproduce the above copyright
+     #   *   notice, this list of conditions and the following disclaimer in
+     #   *   the documentation and/or other materials provided with the
+     #   *   distribution.
+     #   * . Neither the name of the Computer Systems and Communication Lab
+     #   *   nor the names of its contributors may be used to endorse or
+     #   *   promote products derived from this software without specific
+     #   *   prior written permission.
+     #   *
+     #   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     #   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     #   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     #   * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     #   * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+     #   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     #   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     #   * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     #   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     #   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     #   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     #   * OF THE POSSIBILITY OF SUCH DAMAGE.
+     #   */
+     #
+     #  Copyright 1996 Chih-Hao Tsai @ Beckman Institute,
+     #      University of Illinois
+     #  c-tsai4@uiuc.edu  http://casper.beckman.uiuc.edu/~c-tsai4
+     #
+     #  ---------------COPYING.libtabe-----END--------------------------------
+     #
+     #
+     #  ---------------COPYING.ipadic-----BEGIN-------------------------------
+     #
+     #  Copyright 2000, 2001, 2002, 2003 Nara Institute of Science
+     #  and Technology.  All Rights Reserved.
+     #
+     #  Use, reproduction, and distribution of this software is permitted.
+     #  Any copy of this software, whether in its original form or modified,
+     #  must include both the above copyright notice and the following
+     #  paragraphs.
+     #
+     #  Nara Institute of Science and Technology (NAIST),
+     #  the copyright holders, disclaims all warranties with regard to this
+     #  software, including all implied warranties of merchantability and
+     #  fitness, in no event shall NAIST be liable for
+     #  any special, indirect or consequential damages or any damages
+     #  whatsoever resulting from loss of use, data or profits, whether in an
+     #  action of contract, negligence or other tortuous action, arising out
+     #  of or in connection with the use or performance of this software.
+     #
+     #  A large portion of the dictionary entries
+     #  originate from ICOT Free Software.  The following conditions for ICOT
+     #  Free Software applies to the current dictionary as well.
+     #
+     #  Each User may also freely distribute the Program, whether in its
+     #  original form or modified, to any third party or parties, PROVIDED
+     #  that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+     #  on, or be attached to, the Program, which is distributed substantially
+     #  in the same form as set out herein and that such intended
+     #  distribution, if actually made, will neither violate or otherwise
+     #  contravene any of the laws and regulations of the countries having
+     #  jurisdiction over the User or the intended distribution itself.
+     #
+     #  NO WARRANTY
+     #
+     #  The program was produced on an experimental basis in the course of the
+     #  research and development conducted during the project and is provided
+     #  to users as so produced on an experimental basis.  Accordingly, the
+     #  program is provided without any warranty whatsoever, whether express,
+     #  implied, statutory or otherwise.  The term "warranty" used herein
+     #  includes, but is not limited to, any warranty of the quality,
+     #  performance, merchantability and fitness for a particular purpose of
+     #  the program and the nonexistence of any infringement or violation of
+     #  any right of any third party.
+     #
+     #  Each user of the program will agree and understand, and be deemed to
+     #  have agreed and understood, that there is no warranty whatsoever for
+     #  the program and, accordingly, the entire risk arising from or
+     #  otherwise connected with the program is assumed by the user.
+     #
+     #  Therefore, neither ICOT, the copyright holder, or any other
+     #  organization that participated in or was otherwise related to the
+     #  development of the program and their respective officials, directors,
+     #  officers and other employees shall be held liable for any and all
+     #  damages, including, without limitation, general, special, incidental
+     #  and consequential damages, arising out of or otherwise in connection
+     #  with the use or inability to use the program or any product, material
+     #  or result produced or otherwise obtained by using the program,
+     #  regardless of whether they have been advised of, or otherwise had
+     #  knowledge of, the possibility of such damages at any time during the
+     #  project or thereafter.  Each user will be deemed to have agreed to the
+     #  foregoing by his or her commencement of use of the program.  The term
+     #  "use" as used herein includes, but is not limited to, the use,
+     #  modification, copying and distribution of the program and the
+     #  production of secondary products from the program.
+     #
+     #  In the case where the program, whether in its original form or
+     #  modified, was distributed or delivered to or received by a user from
+     #  any person, organization or entity other than ICOT, unless it makes or
+     #  grants independently of ICOT any specific warranty to the user in
+     #  writing, such person, organization or entity, will also be exempted
+     #  from and not be held liable to the user for any such damages as noted
+     #  above as far as the program is concerned.
+     #
+     #  ---------------COPYING.ipadic-----END----------------------------------
+
+    3. Lao Word Break Dictionary Data (laodict.txt)
+
+     #  Copyright (c) 2013 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     # Project: http://code.google.com/p/lao-dictionary/
+     # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt
+     # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt
+     #              (copied below)
+     #
+     #  This file is derived from the above dictionary, with slight
+     #  modifications.
+     #  ----------------------------------------------------------------------
+     #  Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell.
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification,
+     #  are permitted provided that the following conditions are met:
+     #
+     #
+     # Redistributions of source code must retain the above copyright notice, this
+     #  list of conditions and the following disclaimer. Redistributions in
+     #  binary form must reproduce the above copyright notice, this list of
+     #  conditions and the following disclaimer in the documentation and/or
+     #  other materials provided with the distribution.
+     #
+     #
+     # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+     # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+     # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+     # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+     # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+     # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+     # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+     # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+     # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+     # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+     # OF THE POSSIBILITY OF SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    4. Burmese Word Break Dictionary Data (burmesedict.txt)
+
+     #  Copyright (c) 2014 International Business Machines Corporation
+     #  and others. All Rights Reserved.
+     #
+     #  This list is part of a project hosted at:
+     #    github.com/kanyawtech/myanmar-karen-word-lists
+     #
+     #  --------------------------------------------------------------------------
+     #  Copyright (c) 2013, LeRoy Benjamin Sharon
+     #  All rights reserved.
+     #
+     #  Redistribution and use in source and binary forms, with or without
+     #  modification, are permitted provided that the following conditions
+     #  are met: Redistributions of source code must retain the above
+     #  copyright notice, this list of conditions and the following
+     #  disclaimer.  Redistributions in binary form must reproduce the
+     #  above copyright notice, this list of conditions and the following
+     #  disclaimer in the documentation and/or other materials provided
+     #  with the distribution.
+     #
+     #    Neither the name Myanmar Karen Word Lists, nor the names of its
+     #    contributors may be used to endorse or promote products derived
+     #    from this software without specific prior written permission.
+     #
+     #  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+     #  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+     #  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     #  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+     #  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
+     #  BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+     #  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+     #  TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+     #  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+     #  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+     #  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
+     #  THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+     #  SUCH DAMAGE.
+     #  --------------------------------------------------------------------------
+
+    5. Time Zone Database
+
+      ICU uses the public domain data and code derived from Time Zone
+    Database for its time zone support. The ownership of the TZ database
+    is explained in BCP 175: Procedure for Maintaining the Time Zone
+    Database section 7.
+
+     # 7.  Database Ownership
+     #
+     #    The TZ database itself is not an IETF Contribution or an IETF
+     #    document.  Rather it is a pre-existing and regularly updated work
+     #    that is in the public domain, and is intended to remain in the
+     #    public domain.  Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do
+     #    not apply to the TZ Database or contributions that individuals make
+     #    to it.  Should any claims be made and substantiated against the TZ
+     #    Database, the organization that is providing the IANA
+     #    Considerations defined in this RFC, under the memorandum of
+     #    understanding with the IETF, currently ICANN, may act in accordance
+     #    with all competent court orders.  No ownership claims will be made
+     #    by ICANN or the IETF Trust on the database or the code.  Any person
+     #    making a contribution to the database or code waives all rights to
+     #    future claims in that contribution or in the TZ Database.
+
+    6. Google double-conversion
+
+    Copyright 2006-2011, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- libuv, located at deps/uv, is licensed as follows:
+  """
+    libuv is licensed for use as follows:
+
+    == ==
+    Copyright (c) 2015-present libuv project contributors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+    == ==
+
+    This license applies to parts of libuv originating from the
+    https://github.com/joyent/libuv repository:
+
+    == ==
+
+    Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+
+    == ==
+
+    This license applies to all parts of libuv that are not externally
+    maintained libraries.
+
+    The externally maintained libraries used by libuv are:
+
+      - tree.h (from FreeBSD), copyright Niels Provos. Two clause BSD license.
+
+      - inet_pton and inet_ntop implementations, contained in src/inet.c, are
+        copyright the Internet Systems Consortium, Inc., and licensed under the ISC
+        license.
+
+      - stdint-msvc2008.h (from msinttypes), copyright Alexander Chemeris. Three
+        clause BSD license.
+
+      - pthread-fixes.c, copyright Google Inc. and Sony Mobile Communications AB.
+        Three clause BSD license.
+
+      - android-ifaddrs.h, android-ifaddrs.c, copyright Berkeley Software Design
+        Inc, Kenneth MacKay and Emergya (Cloud4all, FP7/2007-2013, grant agreement
+        n° 289016). Three clause BSD license.
+  """
+
+- llhttp, located at deps/llhttp, is licensed as follows:
+  """
+    This software is licensed under the MIT License.
+
+    Copyright Fedor Indutny, 2018.
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to permit
+    persons to whom the Software is furnished to do so, subject to the
+    following conditions:
+
+    The above copyright notice and this permission notice shall be included
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+    NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- OpenSSL, located at deps/openssl, is licensed as follows:
+  """
+    Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+    3. All advertising materials mentioning features or use of this
+    software must display the following acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+    endorse or promote products derived from this software without
+    prior written permission. For written permission, please contact
+    openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+    nor may "OpenSSL" appear in their names without prior written
+    permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+    acknowledgment:
+    "This product includes software developed by the OpenSSL Project
+    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+  """
+
+- Punycode.js, located at lib/punycode.js, is licensed as follows:
+  """
+    Copyright Mathias Bynens <https://mathiasbynens.be/>
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- V8, located at deps/v8, is licensed as follows:
+  """
+    This license applies to all parts of V8 that are not externally
+    maintained libraries.  The externally maintained libraries used by V8
+    are:
+
+      - PCRE test suite, located in
+        test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+        test suite from PCRE-7.3, which is copyrighted by the University
+        of Cambridge and Google, Inc.  The copyright notice and license
+        are embedded in regexp-pcre.js.
+
+      - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+        based on layout tests from webkit.org which are copyrighted by
+        Apple Computer, Inc. and released under a 3-clause BSD license.
+
+      - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+        assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+        assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+        assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+        assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+        This code is copyrighted by Sun Microsystems Inc. and released
+        under a 3-clause BSD license.
+
+      - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+        This is released under the BSD license.
+
+      - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+        This is released under the Apache license. The API's upstream prototype
+        implementation also formed the basis of V8's implementation in
+        src/wasm/c-api.cc.
+
+    These libraries have their own licenses; we recommend you read them,
+    as their terms may differ from the terms below.
+
+    Further license information can be found in LICENSE files located in
+    sub-directories.
+
+    Copyright 2014, the V8 project authors. All rights reserved.
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+        * Neither the name of Google Inc. nor the names of its
+          contributors may be used to endorse or promote products derived
+          from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- SipHash, located at deps/v8/src/third_party/siphash, is licensed as follows:
+  """
+    SipHash reference C implementation
+
+    Copyright (c) 2016 Jean-Philippe Aumasson <jeanphilippe.aumasson@gmail.com>
+
+    To the extent possible under law, the author(s) have dedicated all
+    copyright and related and neighboring rights to this software to the public
+    domain worldwide. This software is distributed without any warranty.
+  """
+
+- zlib, located at deps/zlib, is licensed as follows:
+  """
+    zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.11, January 15th, 2017
+
+    Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly        Mark Adler
+    jloup@gzip.org          madler@alumni.caltech.edu
+  """
+
+- npm, located at deps/npm, is licensed as follows:
+  """
+    The npm application
+    Copyright (c) npm, Inc. and Contributors
+    Licensed on the terms of The Artistic License 2.0
+
+    Node package dependencies of the npm application
+    Copyright (c) their respective copyright owners
+    Licensed on their respective license terms
+
+    The npm public registry at https://registry.npmjs.org
+    and the npm website at https://www.npmjs.com
+    Operated by npm, Inc.
+    Use governed by terms published on https://www.npmjs.com
+
+    "Node.js"
+    Trademark Joyent, Inc., https://joyent.com
+    Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+    The Node.js application
+    Project of Node Foundation, https://nodejs.org
+
+    The npm Logo
+    Copyright (c) Mathias Pettersson and Brian Hammond
+
+    "Gubblebum Blocky" typeface
+    Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+    Used with permission
+
+    --------
+
+    The Artistic License 2.0
+
+    Copyright (c) 2000-2006, The Perl Foundation.
+
+    Everyone is permitted to copy and distribute verbatim copies
+    of this license document, but changing it is not allowed.
+
+    Preamble
+
+    This license establishes the terms under which a given free software
+    Package may be copied, modified, distributed, and/or redistributed.
+    The intent is that the Copyright Holder maintains some artistic
+    control over the development of that Package while still keeping the
+    Package available as open source and free software.
+
+    You are always permitted to make arrangements wholly outside of this
+    license directly with the Copyright Holder of a given Package.  If the
+    terms of this license do not permit the full use that you propose to
+    make of the Package, you should contact the Copyright Holder and seek
+    a different licensing arrangement.
+
+    Definitions
+
+        "Copyright Holder" means the individual(s) or organization(s)
+        named in the copyright notice for the entire Package.
+
+        "Contributor" means any party that has contributed code or other
+        material to the Package, in accordance with the Copyright Holder's
+        procedures.
+
+        "You" and "your" means any person who would like to copy,
+        distribute, or modify the Package.
+
+        "Package" means the collection of files distributed by the
+        Copyright Holder, and derivatives of that collection and/or of
+        those files. A given Package may consist of either the Standard
+        Version, or a Modified Version.
+
+        "Distribute" means providing a copy of the Package or making it
+        accessible to anyone else, or in the case of a company or
+        organization, to others outside of your company or organization.
+
+        "Distributor Fee" means any fee that you charge for Distributing
+        this Package or providing support for this Package to another
+        party.  It does not mean licensing fees.
+
+        "Standard Version" refers to the Package if it has not been
+        modified, or has been modified only in ways explicitly requested
+        by the Copyright Holder.
+
+        "Modified Version" means the Package, if it has been changed, and
+        such changes were not explicitly requested by the Copyright
+        Holder.
+
+        "Original License" means this Artistic License as Distributed with
+        the Standard Version of the Package, in its current version or as
+        it may be modified by The Perl Foundation in the future.
+
+        "Source" form means the source code, documentation source, and
+        configuration files for the Package.
+
+        "Compiled" form means the compiled bytecode, object code, binary,
+        or any other form resulting from mechanical transformation or
+        translation of the Source form.
+
+    Permission for Use and Modification Without Distribution
+
+    (1)  You are permitted to use the Standard Version and create and use
+    Modified Versions for any purpose without restriction, provided that
+    you do not Distribute the Modified Version.
+
+    Permissions for Redistribution of the Standard Version
+
+    (2)  You may Distribute verbatim copies of the Source form of the
+    Standard Version of this Package in any medium without restriction,
+    either gratis or for a Distributor Fee, provided that you duplicate
+    all of the original copyright notices and associated disclaimers.  At
+    your discretion, such verbatim copies may or may not include a
+    Compiled form of the Package.
+
+    (3)  You may apply any bug fixes, portability changes, and other
+    modifications made available from the Copyright Holder.  The resulting
+    Package will still be considered the Standard Version, and as such
+    will be subject to the Original License.
+
+    Distribution of Modified Versions of the Package as Source
+
+    (4)  You may Distribute your Modified Version as Source (either gratis
+    or for a Distributor Fee, and with or without a Compiled form of the
+    Modified Version) provided that you clearly document how it differs
+    from the Standard Version, including, but not limited to, documenting
+    any non-standard features, executables, or modules, and provided that
+    you do at least ONE of the following:
+
+        (a)  make the Modified Version available to the Copyright Holder
+        of the Standard Version, under the Original License, so that the
+        Copyright Holder may include your modifications in the Standard
+        Version.
+
+        (b)  ensure that installation of your Modified Version does not
+        prevent the user installing or running the Standard Version. In
+        addition, the Modified Version must bear a name that is different
+        from the name of the Standard Version.
+
+        (c)  allow anyone who receives a copy of the Modified Version to
+        make the Source form of the Modified Version available to others
+        under
+
+            (i)  the Original License or
+
+            (ii)  a license that permits the licensee to freely copy,
+            modify and redistribute the Modified Version using the same
+            licensing terms that apply to the copy that the licensee
+            received, and requires that the Source form of the Modified
+            Version, and of any works derived from it, be made freely
+            available in that license fees are prohibited but Distributor
+            Fees are allowed.
+
+    Distribution of Compiled Forms of the Standard Version
+    or Modified Versions without the Source
+
+    (5)  You may Distribute Compiled forms of the Standard Version without
+    the Source, provided that you include complete instructions on how to
+    get the Source of the Standard Version.  Such instructions must be
+    valid at the time of your distribution.  If these instructions, at any
+    time while you are carrying out such distribution, become invalid, you
+    must provide new instructions on demand or cease further distribution.
+    If you provide valid instructions or cease distribution within thirty
+    days after you become aware that the instructions are invalid, then
+    you do not forfeit any of your rights under this license.
+
+    (6)  You may Distribute a Modified Version in Compiled form without
+    the Source, provided that you comply with Section 4 with respect to
+    the Source of the Modified Version.
+
+    Aggregating or Linking the Package
+
+    (7)  You may aggregate the Package (either the Standard Version or
+    Modified Version) with other packages and Distribute the resulting
+    aggregation provided that you do not charge a licensing fee for the
+    Package.  Distributor Fees are permitted, and licensing fees for other
+    components in the aggregation are permitted. The terms of this license
+    apply to the use and Distribution of the Standard or Modified Versions
+    as included in the aggregation.
+
+    (8) You are permitted to link Modified and Standard Versions with
+    other works, to embed the Package in a larger work of your own, or to
+    build stand-alone binary or bytecode versions of applications that
+    include the Package, and Distribute the result without restriction,
+    provided the result does not expose a direct interface to the Package.
+
+    Items That are Not Considered Part of a Modified Version
+
+    (9) Works (including, but not limited to, modules and scripts) that
+    merely extend or make use of the Package, do not, by themselves, cause
+    the Package to be a Modified Version.  In addition, such works are not
+    considered parts of the Package itself, and are not subject to the
+    terms of this license.
+
+    General Provisions
+
+    (10)  Any use, modification, and distribution of the Standard or
+    Modified Versions is governed by this Artistic License. By using,
+    modifying or distributing the Package, you accept this license. Do not
+    use, modify, or distribute the Package, if you do not accept this
+    license.
+
+    (11)  If your Modified Version has been derived from a Modified
+    Version made by someone other than you, you are nevertheless required
+    to ensure that your Modified Version complies with the requirements of
+    this license.
+
+    (12)  This license does not grant you the right to use any trademark,
+    service mark, tradename, or logo of the Copyright Holder.
+
+    (13)  This license includes the non-exclusive, worldwide,
+    free-of-charge patent license to make, have made, use, offer to sell,
+    sell, import and otherwise transfer the Package with respect to any
+    patent claims licensable by the Copyright Holder that are necessarily
+    infringed by the Package. If you institute patent litigation
+    (including a cross-claim or counterclaim) against any party alleging
+    that the Package constitutes direct or contributory patent
+    infringement, then this Artistic License to you shall terminate on the
+    date that such litigation is filed.
+
+    (14)  Disclaimer of Warranty:
+    THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+    IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+    WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+    NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+    LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+    BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+    DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+    ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    --------
+  """
+
+- GYP, located at tools/gyp, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- inspector_protocol, located at tools/inspector_protocol, is licensed as follows:
+  """
+    // Copyright 2016 The Chromium Authors. All rights reserved.
+    //
+    // Redistribution and use in source and binary forms, with or without
+    // modification, are permitted provided that the following conditions are
+    // met:
+    //
+    //    * Redistributions of source code must retain the above copyright
+    // notice, this list of conditions and the following disclaimer.
+    //    * Redistributions in binary form must reproduce the above
+    // copyright notice, this list of conditions and the following disclaimer
+    // in the documentation and/or other materials provided with the
+    // distribution.
+    //    * Neither the name of Google Inc. nor the names of its
+    // contributors may be used to endorse or promote products derived from
+    // this software without specific prior written permission.
+    //
+    // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- jinja2, located at tools/inspector_protocol/jinja2, is licensed as follows:
+  """
+    Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+          notice, this list of conditions and the following disclaimer.
+
+        * Redistributions in binary form must reproduce the above
+          copyright notice, this list of conditions and the following
+          disclaimer in the documentation and/or other materials provided
+          with the distribution.
+
+        * The names of the contributors may not be used to endorse or
+          promote products derived from this software without specific
+          prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- markupsafe, located at tools/inspector_protocol/markupsafe, is licensed as follows:
+  """
+    Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+    for more details.
+
+    Some rights reserved.
+
+    Redistribution and use in source and binary forms of the software as well
+    as documentation, with or without modification, are permitted provided
+    that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+    THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+    NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+    OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+    SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGE.
+  """
+
+- cpplint.py, located at tools/cpplint.py, is licensed as follows:
+  """
+    Copyright (c) 2009 Google Inc. All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+       * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+       * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- ESLint, located at tools/node_modules/eslint, is licensed as follows:
+  """
+    Copyright JS Foundation and other contributors, https://js.foundation
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- babel-eslint, located at tools/node_modules/babel-eslint, is licensed as follows:
+  """
+    Copyright (c) 2014-2016 Sebastian McKenzie <sebmck@gmail.com>
+
+    MIT License
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- gtest, located at test/cctest/gtest, is licensed as follows:
+  """
+    Copyright 2008, Google Inc.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions are
+    met:
+
+        * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+        * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following disclaimer
+    in the documentation and/or other materials provided with the
+    distribution.
+        * Neither the name of Google Inc. nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- nghttp2, located at deps/nghttp2, is licensed as follows:
+  """
+    The MIT License
+
+    Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa
+    Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, sublicense, and/or sell copies of the Software, and to
+    permit persons to whom the Software is furnished to do so, subject to
+    the following conditions:
+
+    The above copyright notice and this permission notice shall be
+    included in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- node-inspect, located at deps/node-inspect, is licensed as follows:
+  """
+    Copyright Node.js contributors. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to
+    deal in the Software without restriction, including without limitation the
+    rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+    sell copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+    IN THE SOFTWARE.
+  """
+
+- large_pages, located at src/large_pages, is licensed as follows:
+  """
+     Copyright (C) 2018 Intel Corporation
+
+     Permission is hereby granted, free of charge, to any person obtaining a copy
+     of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom
+     the Software is furnished to do so, subject to the following conditions:
+
+     The above copyright notice and this permission notice shall be included
+     in all copies or substantial portions of the Software.
+
+     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+     OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+     THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
+     OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+     ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+     OR OTHER DEALINGS IN THE SOFTWARE.
+  """
+
+- caja, located at lib/internal/freeze_intrinsics.js, is licensed as follows:
+  """
+     Adapted from SES/Caja - Copyright (C) 2011 Google Inc.
+     Copyright (C) 2018 Agoric
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  """
+
+- brotli, located at deps/brotli, is licensed as follows:
+  """
+    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+  """
+
+- HdrHistogram, located at deps/histogram, is licensed as follows:
+  """
+    The code in this repository code was Written by Gil Tene, Michael Barker,
+    and Matt Warren, and released to the public domain, as explained at
+    http://creativecommons.org/publicdomain/zero/1.0/
+
+    For users of this code who wish to consume it under the "BSD" license
+    rather than under the public domain or CC0 contribution text mentioned
+    above, the code found under this directory is *also* provided under the
+    following license (commonly referred to as the BSD 2-Clause License). This
+    license does not detract from the above stated release of the code into
+    the public domain, and simply represents an additional license granted by
+    the Author.
+
+    -----------------------------------------------------------------------------
+    ** Beginning of "BSD 2-Clause License" text. **
+
+     Copyright (c) 2012, 2013, 2014 Gil Tene
+     Copyright (c) 2014 Michael Barker
+     Copyright (c) 2014 Matt Warren
+     All rights reserved.
+
+     Redistribution and use in source and binary forms, with or without
+     modification, are permitted provided that the following conditions are met:
+
+     1. Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimer.
+
+     2. Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimer in the documentation
+        and/or other materials provided with the distribution.
+
+     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGE.
+  """
+
+- node-heapdump, located at src/heap_utils.cc, is licensed as follows:
+  """
+    ISC License
+
+    Copyright (c) 2012, Ben Noordhuis <info@bnoordhuis.nl>
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+    == src/compat.h src/compat-inl.h == =
+
+    ISC License
+
+    Copyright (c) 2014, StrongLoop Inc.
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- rimraf, located at lib/internal/fs/rimraf.js, is licensed as follows:
+  """
+    The ISC License
+
+    Copyright (c) Isaac Z. Schlueter and Contributors
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+    IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+  """
+
+- uvwasi, located at deps/uvwasi, is licensed as follows:
+  """
+    MIT License
+
+    Copyright (c) 2019 Colin Ihrig and Contributors
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE.
+  """
+
+
+
+
+

MICROSOFT SOFTWARE LICENSE TERMS

+
+
+
MICROSOFT VISUAL STUDIO CODE
+
+These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services or updates for the software, except to the extent those have different terms.
+
+IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
+
+    1. INSTALLATION AND USE RIGHTS.
+        a. General. You may use any number of copies of the software to develop and test your applications, including deployment within your internal corporate network.
+        b. Demo use. The uses permitted above include use of the software in demonstrating your applications.
+        c. Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file accompanying the software.
+        d. Extensions. The software gives you the option to download other Microsoft and third party software packages from our extension marketplace or package managers. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third party packages. By accessing or using our extension marketplace, you agree to the extension marketplace terms located at https://aka.ms/vsmarketplace-ToU.
+    2. DATA.
+        a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt-out of many of these scenarios, but not all, as described in the product documentation located at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting. There may also be some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
+        c. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://go.microsoft.com/?linkid=9840733.
+    3. UPDATES. The software may periodically check for updates and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices. If you do not want automatic updates, you may turn them off by following the instructions in the documentation at https://go.microsoft.com/fwlink/?LinkID=616397.
+    4. FEEDBACK. If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because we include your feedback in them. These rights survive this agreement.
+    5. SCOPE OF LICENSE. This license applies to the Visual Studio Code product. Source code for Visual Studio Code is available at https://github.com/Microsoft/vscode under the MIT license agreement. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
+        reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software except and solely to the extent required by third party licensing terms governing use of certain open source components that may be included in the software;
+        remove, minimize, block or modify any notices of Microsoft or its suppliers in the software;
+        use the software in any way that is against the law;
+        share, publish, rent or lease the software, or provide the software as a stand-alone offering for others to use.
+    6. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
+    7. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
+    8. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end-users, and end use. For further information on export restrictions, see https://www.microsoft.com/exporting.
+    9. APPLICABLE LAW. If you acquired the software in the United States, Washington law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquired the software in any other country, its laws apply.
+    10. CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+        a. Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+        b. Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+        c. Germany and Austria.
+            Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+            Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+        Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+    11. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
+
+    12. LIMITATION ON AND EXCLUSION OF DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
+
+    This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third party applications; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
+
+    It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state or country may not allow the exclusion or limitation of incidental, consequential or other damages.
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/migration-from-devonfw-3.0.0-or-lower.html b/docs/ide/1.0/migration-from-devonfw-3.0.0-or-lower.html new file mode 100644 index 00000000..4e7a4976 --- /dev/null +++ b/docs/ide/1.0/migration-from-devonfw-3.0.0-or-lower.html @@ -0,0 +1,405 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Migration from oasp4j-ide

+
+
+

The devonfw-ide is a completely new and innovative solution for managing the local development environment that has been created from scratch. +Releases of OASP as well as releases of devonfw until version 3.1.x are based on the old oasp4j-ide that is now considered deprecated. As devonfw-ide is a complete redesign this will have some impact for the users. This section should help and assist so you do not get lost.

+
+
+
+
+

Get familiar with devonfw-ide

+
+
+

First of all you should roughly get familiar with the new devonfw-ide. The key features and changes are:

+
+
+
    +
  • +

    platform-agnostic (supports Windows, Mac, and Linux in a single distribution)

    +
  • +
  • +

    small core (reduced the download package from ~2 gigabyte to ~2 megabyte)

    +
  • +
  • +

    fast and easy updates (built in update support)

    +
  • +
  • +

    minimum number of scripts (removed tons of end-user scripts making things much simpler)

    +
  • +
  • +

    fully automated setup (run setup script and you are ready - even for advanced features that had to be configured manually before)

    +
  • +
  • +

    single command for everything (entire CLI available via new devon command)

    +
  • +
+
+
+

For all the details you should study the documentation starting from the beginning.

+
+
+
+
+

Migration of existing oasp4j-ide installation

+
+
+
    +
  • +

    extract new devonfw-ide-scripts on top of your existing installation

    +
  • +
  • +

    run setup

    +
  • +
  • +

    done

    +
  • +
+
+
+

If you get errors:

+
+
+
    +
  • +

    ask your technical lead to fix the settings git repo for devonfw-ide or offer him to do it for you.

    +
  • +
  • +

    you need to merge the devon folder into your settings

    +
  • +
  • +

    you need to merge the devon.properties into your settings

    +
  • +
  • +

    you should check your variables[-customized][.bat] and merge required customizations into the proper configuration

    +
  • +
+
+
+
+
+

Hints for users after migration

+
+
+

Getting used to all the new commands might be tedious when starting after a migration.

+
+
+
Comparison of commands
+

|== == == == == == == == == == == = +|oasp4j-ide command|devonfw-ide command|Comment +|create-or-update-workspace|devon eclipse ws-update +.4+|actually not needed anymore as workspace is updated automatically when IDE is launched. To launch your IDE simply run devon eclipse, devon intellij, or devon vscode. If you like to get launch scripts for your IDE e.g. Eclipse just call devon eclipse --all create-script. +|create-or-update-workspace «workspace»|cd «workspace» && devon eclipse ws-update +|update-all-workspaces|devon eclipse --all ws-update +|create-or-update-workspace-vs|devon vscode ws-update

+
+
+

|devcon workspace create «workspace»|Simply create the «workspace» directory (e.g. cd workspaces && mkdir examples)|

+
+
+

|scripts/update-eclipse-workspace-settings|devon eclipse ws-reverse|To add new properties (old option --new) use devon eclipse ws-reverse-add

+
+
+

|devcon project build
+devcon devon4j build
+devcon devon4ng build +|devon build|

+
+
+

|devcon devon4j create|devon java create|

+
+
+

|devcon devon4ng create|devon ng create|

+
+
+

|devcon system *
+devcon dist * +|setup or devon ide setup|

+
+
+

|console.bat|-|Simply open terminal in selected folder. On Windows right-click folder in windows-explorer and select open devonfw CMD here.

+
+
+

|devcon help|devon help|

+
+
+

|devcon doc|Read the documentation from devonfw.com| +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/mvn.html b/docs/ide/1.0/mvn.html new file mode 100644 index 00000000..28cd1704 --- /dev/null +++ b/docs/ide/1.0/mvn.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

mvn

+
+
+

The mvn commandlet allows to install, configure, and launch maven. It is similar to maven-wrapper and mdub. So calling devon mvn «args» is more or less the same as calling mvn «args» but with the benefit that the version of maven preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon mvn «args») are explained by the following table:

+
+
+
Usage of devon mvn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via MVN_BUILD_OPTS +|setup |setup Maven (install and verify), configurable via MAVEN_VERSION +|get-version |Print the version of your current project. Will consolidate the version for multi-module projects ignoring dev[-SNAPSHOT] versions and fail on mixed versions. +|set-version «nv» [«cv»] |Set the version of your current project to «nv» (assuming your current version is «cv»). +|check-no-snapshots |Check if no «version»-SNAPSHOT dependencies are used. +|check-top-level-project |Check if you are running on a top-level project or fail if in a module or no maven project at all. +|release |Start a clean deploy release build, configurable via MVN_RELEASE_OPTS +|«args» |run maven with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/ng.html b/docs/ide/1.0/ng.html new file mode 100644 index 00000000..406bb0a8 --- /dev/null +++ b/docs/ide/1.0/ng.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

ng

+
+
+

The ng commandlet allows to install, configure, and launch ng (angular-cli). Calling devon ng «args» is more or less the same as calling ng «args» but with some advanced features and ensuring that ng is properly set up for your project.

+
+
+

The arguments (devon ng «args») are explained by the following table:

+
+
+
Usage of devon ng
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup yarn (install and verify), configurable via NG_VERSION +|create |Create a new devon4ng project. +|cicd «args» |generate cicd files for the current devon4ng project +|«args» |run ng with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/node.html b/docs/ide/1.0/node.html new file mode 100644 index 00000000..01db9571 --- /dev/null +++ b/docs/ide/1.0/node.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

node

+
+
+

The node commandlet allows to install and setup node.js. +The arguments (devon node «args») are explained by the following table:

+
+
+
Usage of devon node
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |setup node.js (install and verify), configurable via NODE_VERSION +|create «name» [«args»] | create a new devon4node application (same as devon4node new) +|generate «s» [«args»] | generate devon4node components using the schematic «s» (same as devon4node generate) +|db «c» [«args»] | execute a TypeORM command «c» (same as devon4node db) +|cicd «args» |generate cicd files for the current devon4node project +|«args» | call NodeJS with the specified arguments +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/npm.html b/docs/ide/1.0/npm.html new file mode 100644 index 00000000..702d09b3 --- /dev/null +++ b/docs/ide/1.0/npm.html @@ -0,0 +1,296 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

npm

+
+
+

The npm commandlet allows to install, configure, and launch npm. Calling devon npm «args» is more or less the same as calling npm «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon npm «args») are explained by the following table:

+
+
+
Usage of devon npm
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via NPM_BUILD_OPTS +|setup |setup NPM (install and verify), configurable via NPM_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |Start a clean deploy release build, configurable via NPM_RELEASE_OPTS +|«args» |run NPM with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/project.html b/docs/ide/1.0/project.html new file mode 100644 index 00000000..63ee1a99 --- /dev/null +++ b/docs/ide/1.0/project.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

project

+
+
+

The project commandlet manages projects of your devonfw-ide. +You need to supply additional arguments as devon project «args». These are explained by the following table:

+
+
+
Usage of devon project
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup [«project»] |setup or update all or the specified project(s) +|== == == == == == == == == == == =

+
+
+
+
+

setup

+
+
+

Run devon project setup «project» to setup the pre-configured project «project». +During the initial setup this will happen for all active projects. +Call this command explicitly to setup a project that is not active by default. +Further, if the project has already been setup it will be updated (git pull). +In case you omit the project name all (active) projects will be setup/updated. +Use force option (-f) to setup all projects even if not active.

+
+
+

The setup of a project will include:

+
+
+
    +
  • +

    clone or pull the project from git into the configured workspace and location

    +
  • +
  • +

    trigger a build on the project (optional as configured)

    +
  • +
  • +

    import the project into eclipse (optional as configured)

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/projects.html b/docs/ide/1.0/projects.html new file mode 100644 index 00000000..8be95b5f --- /dev/null +++ b/docs/ide/1.0/projects.html @@ -0,0 +1,317 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Project import

+
+
+

The devonfw-ide supports to automatically check out and import required projects into your IDE during setup. To configure this you put a .properties file for each desired project into the projects sub-folder in your settings. Each .properties file describes one "project" which you would like to check out and (potentially) import:

+
+
+
+
path=myproject
+workingsets=Set1,Set2
+workspace=example
+git.url=http://github.com/someorg/someproject
+git.branch=develop
+build.path=.
+build.cmd=mvn -DskipTests=true -Darchetype.test.skip=true clean install
+eclipse=import
+active=true
+
+
+
+
+
.Variables of project import
+
+
+
+

|== = +|Variable|Value|Meaning +|path|e.g. myproject, will clone into ${WORKSPACE_PATH}/myproject|(required) Path into which the projects is cloned. This path is relative to the workspace. +|working sets|e.g. ws1,ws2|(optional) This will create working sets (in eclipse). Each module (eclipse project) of this project will be part of all these working sets. Working sets will be automatically created if necessary. +|workspace|main|Workspace to use for checkout and import. Default is main. +|git.url|e.g. http://github.com/someorg/someproject|(required) Git URL to use for cloning the project. +|git.branch|e.g. develop|(optional) Git branch to checkout. Git default branch is default. +|build.path|e.g. . (default)|(optional) The directory inside path where to trigger an initial build after clone or pull (if build.cmd is set). For a regular project use . to build top-level project. +|build.cmd +|e.g. mvn -D skip Tests=true -Darchetype.test.skip=true clean install +|(optional) The devonfw command to invoke to build the project after clone or pull. If omitted no build is triggered. +|eclipse|e.g. import|(optional) Desired action for eclipse IDE. If you put import here all modules (eclipse projects) in the current project will be imported into eclipse. If you leave this out or put any other value for this parameter, no change in eclipse is done. +|active|true|(optional) If set to false the project is skipped during the setup. +|== =

+
+
+

Please note that the .properties file is parsed via shell and not via java. So be careful with "advanced" features .properties files normally support.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/release.html b/docs/ide/1.0/release.html new file mode 100644 index 00000000..91f33047 --- /dev/null +++ b/docs/ide/1.0/release.html @@ -0,0 +1,349 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

release

+
+
+

Create a release in a standardized way including the following steps:

+
+
+
    +
  • +

    verify the current project (no local changes, etc.)

    +
  • +
  • +

    warn if «version»-SNAPSHOT dependencies are used

    +
  • +
  • +

    determine «version» (if currently «version»-SNAPSHOT) and print out release information.

    +
  • +
  • +

    ask user for confirmation

    +
  • +
  • +

    bump release to «version» in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    create annotated tag for your release as release/«version»

    +
  • +
  • +

    invoke deployment on build-system

    +
  • +
  • +

    set next version as («version»+1)-SNAPSHOT in build configuration (e.g. pom.xml files)

    +
  • +
  • +

    commit the change

    +
  • +
  • +

    push your changes

    +
  • +
+
+
+
Usage of devon java
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|…​ |any optional argument will directly be passed to the actual command to build the deployment +|== == == == == == == == == == == =

+
+
+
+
+

Build-Tools

+
+
+

This release commandlet utilizes the build commandlet to support multiple build-tools such as maven, gradle, or npm. Each of those commandlets should respect the variable «TOOL»_RELEASE_OPTS to customize the parameters for the release build.

+
+
+

So e.g. if a pom.xml is detected, maven will be used. In this example the variable MVN_RELEASE_OPTS is used that defaults to clean deploy -Dchangelist= -Pdeploy. +If you provide a specific argument this will be passed additionally. +So if you invoke the command devon release -P myProfile, the above step invoke deployment on build-system would technically call this:

+
+
+
+
mvn clean deploy -Dchangelist= -Pdeploy -P myProfile
+
+
+
+

Please also note that it is very tricky to determine and modify the version of a project in a fully generic way. +Even though we try our best to support different scenarios, we can not ensure this is working for edge-cases. +Therefore, we strongly encourage to follow best practices such as ci-friendly maven. +Further, sticking to the defaults and follow the devonfw standard to name the profile for custom goals in deployment simply deploy is recommended.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/scripts.html b/docs/ide/1.0/scripts.html new file mode 100644 index 00000000..bc372465 --- /dev/null +++ b/docs/ide/1.0/scripts.html @@ -0,0 +1,324 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

scripts

+
+
+

This directory is the heart of the devonfw-ide and contains the required scripts.

+
+
+
File structure of the conf folder
+
+
/scripts
+├──/ command
+│  ├── build
+│  ├── docker
+│  ├── eclipse
+│  ├── gradle
+│  ├── help
+│  ├── ide
+│  ├── intellij
+│  ├── ionic
+│  ├── jasypt
+│  ├── java
+│  ├── jenkins
+│  ├── kubectl
+│  ├── mvn
+│  ├── ng
+│  ├── node
+│  ├── npm
+│  ├── project
+│  ├── release
+│  ├── sonar
+│  ├── vscode
+│  └── yarn
+├── devon
+├── devon.bat
+├── environment-project
+├── environment-project.bat
+├── functions
+└── devon.properties
+
+
+
+

The command folder contains the commandlets. +The devon script is the key command line interface for devonfw-ide. +There is also devon.bat that can be used in cmd or PowerShell. +As the devon CLI can be used as a global command on your computer from any directory and gets installed centrally, it aims to be stable, minimal, and lightweight. +The key logic to set up the environment variables is therefore in a separate script environment-project and its Windows variant environment-project.bat inside this scripts folder. +The file functions contains a collection of reusable bash functions. +These are sourced and used by the commandlets. +Finally the devon.properties file contains defaults for the general configuration of devonfw-ide.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/settings.html b/docs/ide/1.0/settings.html new file mode 100644 index 00000000..e6c3fb18 --- /dev/null +++ b/docs/ide/1.0/settings.html @@ -0,0 +1,370 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

settings

+
+
+

The devonfw-ide requires settings with configuration templates for the arbitrary tools.

+
+
+

To get an initial set of these settings we provide the default ide-settings as an initial package. These are also released so you can download the latest stable or any history version at maven central.

+
+
+

To test devonfw-ide or for very small projects you can also use these the latest default settings (just hit return when setup is asking for the Settings URL). +However, for collaborative projects we strongly encourage you to distribute and maintain the settings via a dedicated and project specific git repository. +This gives you the freedom to control and manage the tools with their versions and configurations during the project lifecycle. +Therefore simply follow the admin usage guide.

+
+
+
+
+

Structure

+
+
+

The settings folder (see SETTINGS_PATH) has to follow this file structure:

+
+
+
File structure of settings
+
+
/settings
+├──/ devon
+│  ├──/ conf
+│  │  ├──/ .m2
+│  │  │  └── settings.xml
+│  │  ├──/ npm
+│  │  │  └── .npmrc
+│  │  └── devon.properties
+├──/ eclipse
+│  ├──/ workspace
+│  │  ├──/ setup
+│  │  └──/ update
+│  ├── lifecycle-mapping-metadata.xml
+│  └── project.dictionary
+├──/ ...
+├──/ sonarqube
+│  └──/ profiles
+│     ├── Devon-C#.xml
+│     ├── ...
+│     └── Devon-XML.xml
+├──/ vscode
+│  └──/ workspace
+│     ├──/ setup
+│     └──/ update
+└── devon.properties
+
+
+
+

As you can see, the settings folder contains sub-folders for tools of the IDE. +So the devon folder contains devon.properties files for the configuration of your environment. +Further, for the IDEs such as eclipse or vscode, the according folders contain the templates to manage the workspace via our configurator.

+
+
+
+
+

Configuration Philosophy

+
+
+

Different tools and configuration files require a different handling:

+
+
+
    +
  • +

    Where suitable, we directly use these configurations from your settings (e.g. for eclipse/lifecycle-mapping-metadata.xml, or eclipse/project.dictionary).

    +
  • +
  • +

    The devon folder in settings contains templates for configuration files. There are copied to the devonfw-ide installation during setup (if no such file already exists). In this way the settings repository can provide reasonable defaults but allows the user to take over control and customize to his personal needs (e.g. .m2/settings.xml).

    +
  • +
  • +

    Other configurations need to be imported manually. To avoid manual steps and simplify use we try to automate as much as possible. This currently applies to sonarqube profiles but will be automated with sonar-devon4j-plugin in the future.

    +
  • +
  • +

    For tools with complex configuration structures like eclipse, intellij, or vscode we provide a smart mechanism via our configurator.

    +
  • +
+
+
+
+
+

Customize Settings

+
+
+

You can easily customize these settings for the requirements of your project. We suggest that one team member is responsible to ensure that everything stays consistent and works.

+
+
+

You may also create new sub-folders in settings and put individual items according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/setup.html b/docs/ide/1.0/setup.html new file mode 100644 index 00000000..9e4d6ff7 --- /dev/null +++ b/docs/ide/1.0/setup.html @@ -0,0 +1,385 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Setup

+
+ +
+
+
+

Prerequisites

+
+
+

We try to make it as simple as possible for you. However, there are some minimal prerequisites:

+
+
+
    +
  • +

    You need to have a tool to extract *.tar.gz files (tar and gzip). On Windows before Version 10 (1803) use 7-zip. On all other platforms this comes out of the box.

    +
  • +
  • +

    You need to have git and curl installed.

    +
    +
      +
    • +

      On Windows you only need to download and install git for windows. This also ships with bash and curl.

      +
    • +
    • +

      On Linux you might need to install the above tools in case they are not present (e.g. sudo apt-get install git curl or sudo yum install git-core curl)

      +
    • +
    • +

      On MacOS you only need to download and install git for mac.

      +
    • +
    +
    +
  • +
+
+
+
+
+

Download

+
+
+

The latest release of devonfw-ide can be downloaded from here (You can find all releases in maven central).

+
+
+
+
+

Install

+
+
+

Create a central folder like C:\projects or /projects. Inside this folder, create a sub-folder for your new project such as my-project and extract the contents of the downloaded archive (devonfw-ide-scripts-*.tar.gz) to this new folder. Run the command setup in this folder (on windows double clicking on setup.bat). +That’s all. To get started read the usage.

+
+
+
+
+

Uninstall

+
+
+

To "uninstall" your devonfw-ide you only need to call the following command:

+
+
+
+
devon ide uninstall
+
+
+
+

Then you can delete the devonfw-ide top-level folder(s) (${DEVON_IDE_HOME}).

+
+
+

The devonfw-ide is designed to be non-invasive to your operating system and computer. Therefore it is not "installed" on your system in a classical way. Instead you just create a folder and extract the downloaded archive to it. You only have to install regularly in advance some specific prerequisites like git. All the other softwares remain locally in your devonfw-ide folder. However, there are the following excuses (what is reverted by devon ide uninstall):

+
+
+
    +
  • +

    The devon command is copied to your home directory (~/.devon/devon)

    +
  • +
  • +

    The devon alias is added to your shell config (~/.bashrc and ~/.zshrc, search for alias devon="source ~/.devon/devon").

    +
  • +
  • +

    On Windows the devon.bat command is copied to your home directory (%USERPROFILE%\scripts\devon.bat)

    +
  • +
  • +

    On Windows this %USERPROFILE%\scripts directory is added to the PATH of your user.

    +
  • +
  • +

    The devonfw-ide will download a third party software to your ~/Downloads/devonfw-ide folder to reduce redundant storage. You have to delete this folder manually as we do not want to be responsible for data-loss in case users manually put files here.

    +
  • +
+
+
+
+
+

Testing SNAPSHOT releases

+
+
+

Whenever a story in devonfw-ide is completed by merging a PR, +our github actions will build a new SNAPSHOT release and on success deploy it to nexus on OSSRH. +You can therefore find the latest devonfw SNAPSHOT releases here. +Simply choose the latest SNAPSHOT version folder and then inside the *.tar.gz file for the latest version. +Once downloaded, you can proceed as with official releases (see install).

+
+
+

If you test the latest SNAPSHOT please also give feedback to bug or feature tickets to let us know if things are working or not. +Thanks for your testing, support and help to make devonfw better!

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/software-package.html b/docs/ide/1.0/software-package.html new file mode 100644 index 00000000..390054cb --- /dev/null +++ b/docs/ide/1.0/software-package.html @@ -0,0 +1,475 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Software Package

+
+
+

The devon-ide requires a software package. This section explains how to build one yourself.

+
+
+
+
+

Layout

+
+
+

The software package is a simple ZIP file with the following structure:

+
+
+
    +
  • +

    software

    +
    +
      +
    • +

      eclipse

      +
    • +
    • +

      java

      +
    • +
    • +

      maven

      +
    • +
    • +

      tomcat

      +
    • +
    • +

      …​

      +
    • +
    +
    +
  • +
+
+
+

So for each tool you have a simple folder inside the software folder that is named after the tool by convention in lowercase. If a tool folder contains a bin folder the devon-ide will automatically add it to the beginning of your PATH variable (in the shell and not in your operating system). The entire concept implies that the tools are portable and do not require a real installation into the operating system (e.g. via Windows Registry settings). This way you can have multiple instances of the devon-ide "installed" on the same machine for different projects with different versions of the same tools that do not interfere with each other.

+
+
+

By intention the tool folders do not contain the version of the tool as configurations may refer to tools via their path and the idea is that the software package can be updated easy and smooth.

+
+
+
+
+

Customization of environment

+
+
+

If a tool needs extra initialization, you can create a batch file named ide-config.bat inside the tool directory. This file will be called during environment initialization (for example when a user opens console.bat), and allows you to do additional init work, without changing the main devon-ide script files.

+
+
+
+
+

Tools

+
+
+

For most of the tools you just download the official release and add the content to the folder in the layout above (resulting in e.g. software/maven/bin/mvn and NOT software/maven/apache-maven-3.2.0/bin/mvn). +However, there are some specials to care about.

+
+
+
+
+

Eclipse

+
+
+

Eclipse is typically the most complicated animal out of the tools. Here is a suggestion how to build your eclipse distribution:

+
+
+
    +
  1. +

    Download the latest stable version of Eclipse IDE for JEE developers from https://www.eclipse.org/downloads/

    +
  2. +
  3. +

    Unzip it to your software folder and launch it

    +
  4. +
  5. +

    Install CobiGen (incremental code-generator)

    +
  6. +
  7. +

    Install any edit tools (for easy compare with clipboard, etc.)

    +
  8. +
  9. +

    Install eclipse-cs (for checkstyle support)

    +
  10. +
  11. +

    Install spotbugs (successor of findbugs)

    +
  12. +
  13. +

    Install EclEmma (for JUnit code coverage)

    +
  14. +
  15. +

    Install subclipse (in case SVN support is needed. Exclude Subclipse Integration for Mylyn, Subversion Revision Graph and JNA Library)

    +
  16. +
  17. +

    Install STS (in case Spring Tools are needed. Install Spring IDE [AOP | Batch | Security] Extension, Spring IDE Maven Support, Eclipse Quicksearch and Spring IDE Core)

    +
  18. +
  19. +

    Install StartExplorer (for support to open current item in file manager of your OS)

    +
  20. +
  21. +

    Install TM Terminal (open terminal/shell inside Eclipse as view)

    +
  22. +
  23. +

    Install Enhanced Class Decompiler (to debug in classes where no sources are available)

    +
  24. +
  25. +

    Install github mylyn integration (for devonfw projects that want to access github issues in Eclipse)

    +
  26. +
  27. +

    Install Data Tools Platform (for DB viewer/access inside Eclipse)

    +
  28. +
  29. +

    Install SoapUI (for service testing)

    +
  30. +
  31. +

    Install regex util (to test regular expressions)

    +
  32. +
  33. +

    Install TemplateVariables (for advanced JDT templates)

    +
  34. +
  35. +

    If not already available install m2e (for maven support)

    +
  36. +
  37. +

    Install m2e-wtp (for maven integration into WTP, only required for non-spring-boot legacy JEE projects)

    +
  38. +
  39. +

    Ensure you have eclipse-m2e checked out and import this into your eclipse as existing Maven project. This will trigger the download and installation of some m2e extensions.

    +
  40. +
  41. +

    If not already available install egit (for git support)

    +
  42. +
  43. +

    In mylyn (Task List view) click Add Repository and then Install More Connectors…​

    +
    +
      +
    1. +

      Install hudson/jenkins connector

      +
    2. +
    +
    +
  44. +
  45. +

    Download lombok.jar into the eclipse folder (for implicit get/setter, equals and hashCode support)

    +
    +
      +
    1. +

      Start the lombok.jar to launch the installer UI.

      +
    2. +
    3. +

      Click Specify location…​ and choose your eclipse folder.

      +
    4. +
    5. +

      Click Install/Update.

      +
    6. +
    7. +

      Quit the installer

      +
    8. +
    +
    +
  46. +
  47. +

    Install sonar-ide (for SonarQube support - temporary omitted due to issue #13)

    +
  48. +
  49. +

    Optionally install DevStyle for a eye-friendly dark mode that really works (Eclipse default dark theme is completely broken and unusable)

    +
  50. +
  51. +

    Install additional plugins as needed

    +
  52. +
  53. +

    Remove any internal (non-public) update sites after installation

    +
  54. +
  55. +

    Test your Eclipse distribution.

    +
  56. +
+
+
+

Please read the license agreements of each plugin and only install what is suitable for you.

+
+
+
+
+

Creating the Software Package

+
+
+

Finally you create a ZIP from your software folder. Open it and ensure the it has the specified layout (it contains the software folder as root with the tool folders inside).

+
+
+

You may want to use our ant script for building the ZIP.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/software.html b/docs/ide/1.0/software.html new file mode 100644 index 00000000..0be608f6 --- /dev/null +++ b/docs/ide/1.0/software.html @@ -0,0 +1,408 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

software

+
+
+

The software folder contains the third party tools for your IDE such as maven, npm, java, etc. +With respect to the licensing terms you may create a custom archive containing a devonfw-ide together with the required software. +However, to be platform independent and allow lightweight updates, the devonfw-ide is capable to download and install the software automatically for you.

+
+
+
+
+

Repository

+
+
+

By default, software is downloaded via the internet from public download URLs of the according tools. However, some projects may need specific tools or tool versions that are not publicly available. +In such case, they can create their own software repository (e.g. in a VPN) and configure the base URL of it via DEVON_SOFTWARE_REPOSITORY variable. +Then, devonfw-ide will download all software from this repository only instead of the default public download URLs. +This repository (URL) should be accessible within your network via HTTPS (or HTTP) and without any authentication. +The repository needs to have the following structure:

+
+
+
+
${DEVON_SOFTWARE_REPOSITORY}/«tool»/«version»/«tool»-«version»[-«os»].tgz
+
+
+
+

So for every tool «tool» (java, maven, vscode, eclipse, etc.) you need to provide a folder in your repository. +Within this folder for every supported version «version» you need a subfolder. +This subfolder needs to contain the tool in that version for every operating system «os» (windows, linux, or mac - omitted if platform independent, e.g. for maven).

+
+
+
+
+

Shared

+
+
+

By default, each installation of devonfw-ide has its own physical installations of the required tools in the desired versions stored in its local software folder. +While this is great for isolation of devonfw-ide installations and to prevent side-effects, it can cause a huge waste of disc resources in case you are having many installations of devonfw-ide. +If you are a power-user of devonfw-ide with more then ten or even up to hundreds of installations on your machine, you might love to share installations of a software tool in a particular version between multiple devonfw-ide installations.

+
+
+ + + + + +
+ + +If you use this power-feature you are taking responsibility for side-effects and should not expect support. Also if you are using Windows please read Symlinks in Windows and make your mind if you really want to do so. You might also use this hint and maintain it manually without enabling the following feature. +
+
+
+

In order to do so, you only need to configure the variable DEVON_SOFTWARE_PATH in your ~/devon.properties pointing to an existing directory on your disc (e.g. /projects/software or C:\projects\software). +Then devonfw-ide will install required software into ${DEVON_SOFTWARE_PATH}/${software_name}/${software_version} as needed and create a symbolic link to it in ${DEVON_IDE_HOME}/software/${software_name}.

+
+
+

As a benefit, another devonfw-ide installation will using the same software with the same version can re-use the existing installation and only needs to create the symbolic link. No more waste of having many identical JDK installations on your disc.

+
+
+

As a drawback, you need to be aware that specific tools may be "manipulated" after installation. +The most common case is that a tool allows to install plugins or extensions such as all IDEs do. Such "manipulations" will cause side-effects between the different devonfw-ide installations sharing the same version of that tool. +While this can also be a benefit it may also cause trouble. +If you have a sensitive project that should not be affected by such side-effects, you may again override the DEVON_SOFTWARE_PATH variable to the empty value in your ${DEVON_IDE_HOME}/conf/devon.properties of that sensitive installation:

+
+
+
+
DEVON_SOFTWARE_PATH=
+
+
+
+

This will disable this feature particularly for that specific sensitive devonfw-ide installation but let you use it for all other ones.

+
+
+
+
+

Custom

+
+
+

In some cases, a project might need a (proprietary) tool(s) that (are) not supported by devonfw-ide. A very simple solution is to get a release of devonfw-ide and add the tool(s) to the software folder and then distribute this modified release to your team. However, this has several drawbacks as you then have a fork of devonfw-ide all will loose your tool(s) when updating to a new release.

+
+
+

As a solution for this need, devonfw-ide let’s you configure custom tools via the DEVON_IDE_CUSTOM_TOOLS variable. It can be defined in devon.properties of your settings git repository as an array of the custom tools you need to add. +Each entry applies:

+
+
+
    +
  • +

    It needs to have the form «tool»:«version»[:all][:«repository-url»]

    +
  • +
  • +

    The first entry must have the «repository-url» included which is used as default

    +
  • +
  • +

    Further entries will inherit this default if omitted

    +
  • +
  • +

    This URL is used in the same way as described above for a software repository.

    +
  • +
  • +

    The DEVON_SOFTWARE_REPOSITORY variable is ignored by this feature.

    +
  • +
  • +

    The optional infix :all is used to indicate that the tool is platform independent. Otherwise, an OS specific infix is appended to the URL file to download for your platform (windows, linux, or mac).

    +
  • +
+
+
+

As an example, we define it in ${DEVON_IDE_HOME}/settings/devon.properties:

+
+
+
+
DEVON_IDE_CUSTOM_TOOLS=(jboss-eap:7.1.4.GA:all:https://host.tld/projects/my-project firefox:70.0.1)
+
+
+
+

This will download and extract the following content to your software folder:

+
+ +
+

Please note that if you are not using windows, the -windows suffix will be -mac or -linux.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/sonar.html b/docs/ide/1.0/sonar.html new file mode 100644 index 00000000..e23ea84f --- /dev/null +++ b/docs/ide/1.0/sonar.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

sonar

+
+
+

The sonar commandlet allows to install, configure, and launch SonarQube.

+
+
+
Usage of devon sonar
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|setup |Setup SonarQube (install and verify) +|start |Start your local SonarQube server +|stop |Stop your local SonarQube server +|analyze |Analyze current project with SonarQube +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/structure.html b/docs/ide/1.0/structure.html new file mode 100644 index 00000000..94916de8 --- /dev/null +++ b/docs/ide/1.0/structure.html @@ -0,0 +1,301 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Structure

+
+
+

The directory layout of your devonfw-ide will look like this:

+
+
+
File structure of your devonfw-ide
+
+
/ projects (or C:\Projects, etc.)
+└──/ my-project ($DEVON_IDE_HOME)
+    ├──/ conf
+    ├──/ log
+    ├──/ scripts
+    ├──/ settings
+    ├──/ software
+    ├──/ system
+    ├──/ updates
+    ├──/ workspaces
+    ├── setup
+    ├── setup.bat
+    └── devon-ide-doc.pdf
+
+
+
+

The elements of the above structure are described in the individual sections. As they are hyperlinks you can simply click on them to get more details.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/system.html b/docs/ide/1.0/system.html new file mode 100644 index 00000000..7e6fbd4d --- /dev/null +++ b/docs/ide/1.0/system.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

system

+
+
+

The system folder contains documentation and solutions for operation system specific integration. Please have a look to get the maximum out of devonfw-ide and become a very efficient power user.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/updates.html b/docs/ide/1.0/updates.html new file mode 100644 index 00000000..bace9743 --- /dev/null +++ b/docs/ide/1.0/updates.html @@ -0,0 +1,293 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

updates

+
+
+

The updates folder is used for temporary data. This includes:

+
+
+
    +
  • +

    extracted archives for installation and updates

    +
  • +
  • +

    backups of old content on updates to prevent data loss

    +
  • +
+
+
+

If all works fine you may clean this folder to save some kilo- or mega-bytes. Otherwise, you can ignore it unless you are looking for a backup after a failed or unplanned upgrade.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/usage.html b/docs/ide/1.0/usage.html new file mode 100644 index 00000000..52d41edb --- /dev/null +++ b/docs/ide/1.0/usage.html @@ -0,0 +1,459 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

This section explains the usage of devonfw-ide according to your role:

+
+
+
    +
  • +

    Everybody should read and follow the usage for a developer.

    +
  • +
  • +

    In case you want to administrate devonfw-ide settings for your project, you should also read the usage for the ide-admin.

    +
  • +
+
+
+

Developer

+
+
+

As a developer you are supported to setup your IDE automated and fast while you can have a nice cup of coffee (after you provided settings-URL and accepted the license). +You only need the settings URL from your ide-admin. +Experienced developers can directly call setup «settings-URL». +Otherwise if you just call setup (e.g. by double-clicking it), you can enter it when you are prompted for Settings URL (using copy&paste to avoid typos).

+
+
+

Note: devonfw-ide supports autocompletion (since 2021.04.001). Currently this only works in bash (on windows use git bash). Simply type devon and hit [Tab] to get completion.

+
+
+
+
+

Update

+
+
+

To update your IDE (if instructed by your ide-admin), you only need to run the following command:

+
+
+
+
devon ide update
+
+
+
+

Please note that windows is using file-locking what can have ugly side-effects. +To be safe, you should have your IDE tools shut down before invoking the above update command. +E.g. if a tool needs to be updated, the old installation folder will be moved to a backup and the new version is installed on top. +If there are windows file locks in place this can fail and mess up things. +You can still delete the according installation from your software folder and rerun devon ide update if you ran into this error.

+
+
+
+
+

Working with multiple workspaces

+
+
+

If you are working on different branches in parallel you typically want to use multiple workspaces.

+
+
+
    +
  1. +

    Go to the workspaces folder in your ${DEVON_IDE_HOME} and create a new folder with the name of your choice (e.g. release2.1).

    +
  2. +
  3. +

    Check out (git clone …​) the according projects and branch into that workspace folder.

    +
  4. +
  5. +

    Open a shell in that new workspace folder (cd to it) and according to your IDE run e.g. eclipse, vscode, or intellij to create your workspace and launch the IDE. You can also add the parameter create-script to the IDE commandlet in order to create a launch-script for your IDE.

    +
  6. +
+
+
+

You can have multiple instances of eclipse running for each workspace in parallel. To distinguish these instances you will find the workspace name in the title of eclipse.

+
+
+
+
+

Admin

+
+
+

You can easily customize and configure devonfw-ide for the requirements of your project. +In order to do so, you need to create your own project-specific settings git repository and provide the URL to all developers for the setup. +With tools such as gitlab, bitbucket or github every developer can easily propose changes and improvements. +However, we suggest that one team member is responsible to ensure that everything stays consistent and works. +We will call this person the ide-admin of your project.

+
+
+

The following are the suggested step-by-step instructions how an ide-admin should prepare devonfw-ide for his new project:

+
+
+
    +
  1. +

    Fork ide-settings to a git repository specific for your project (e.g. a new project in the gitlab of your production-line instance). In case you are using github, all you need to do is use the Fork button. In other cases simply create a new and empty git repository and clone this to your machine. Then add the default ide-settings as origin, fetch and pull from it:

    +
    +
    +
    git remote add upstream https://github.com/devonfw/ide-settings.git
    +git fetch upstream
    +git pull upstream master
    +git push
    +
    +
    +
    +

    Now you should have a full fork as a copy of the settings git repo with all its history that is ready for upstream merges.

    +
    +
  2. +
  3. +

    Study the structure of this git repository to understand where to find which configuration.

    +
  4. +
  5. +

    Study the configuration and understand that general settings can be tweaked in the toplevel devon.properties file of your settings git repository.

    +
  6. +
  7. +

    Configure the tools and their versions for your project. Here is an example:

    +
    +
    +
    DEVON_IDE_TOOLS=(java mvn eclipse)
    +ECLIPSE_VERSION=2020-06
    +##use e.g. 8u242b08 for Java 8
    +#JAVA_VERSION=8u242b08
    +JAVA_VERSION=11.0.5_10
    +MAVEN_VERSION=3.6.2
    +
    +
    +
    +

    This way you will take over control of the tools and their versions for every developer in your project team and ensure that things get reproducible.

    +
    +
  8. +
  9. +

    In case you need a proprietary or unsupported tool, you can study how to include custom tools.

    +
  10. +
  11. +

    In case you have very restrictive policies about downloading tools from the internet, you can create and configure a software repository for your project or company.

    +
  12. +
  13. +

    Some of the tools (especially the actual IDEs) allow extensions via plugins. You can customize them to your needs for eclipse, VS code, or intelliJ.

    +
  14. +
  15. +

    In your settings git repository you will find a projects folder. Here you will find configurations files for every git project relevant for your actual project. Feel free to create new projects for your needs and delete the devonfw specific default projects. The projects documentation will explain you how to do this.

    +
  16. +
  17. +

    For every IDE you will also find an according folder in your settings git repository. Here are the individual configuration settings for that IDE. You can change them by directly editing the according configuration files directly with a text-editor in your settings git repository. However, this is a really complex way and will take you a lot of time to find the right file and property to tweak for your actual need. Instead we suggest to study +how to customize IDE specific settings.

    +
  18. +
  19. +

    You may also create new sub-folders in your settings git repository and put individual things according to your needs. E.g. you could add scripts for greasemonkey or tampermonkey, as well as scripts for your database or whatever may be useful and worth to share in your team. However, to share and maintain knowledge we recommend to use a wiki instead.

    +
  20. +
  21. +

    You may want to customize the Eclipse spellchecker dictionary for your project and your language.

    +
  22. +
+
+
+

All described in the above steps (except the first one) can be used to manage and update the configuration during the project lifecycle. +However, when you have done changes especially in a larger project, please consider the following best-practices to avoid that a large teams gets blocked by a non-functional IDE:

+
+
+
    +
  • +

    Commit your changes to a feature-branch.

    +
  • +
  • +

    First test the changes yourself.

    +
  • +
  • +

    If all works as expected, pick a pilot user of the team to test the changes from the feature branch (go to settings folder, git fetch, git checkout -t origin/feature/«name», devon ide update).

    +
  • +
  • +

    Only after that works well for a couple of days, inform the entire team to update.

    +
  • +
+
+
+
+
+

Announce changes to your team

+
+
+

In order to roll out the perfectly configured devonfw-ide to your project initially or when new members join, you only have to provide the Settings URL to the developers of your team. +You can also provide a specific branch with Settings URL#branch to use variations of common settings or to test new settings before making them public to the team.

+
+
+

After you changed and tested your settings git repository (main branch), you only need to announce this to your developers (e.g. via email or some communication tool) so that they will can devon ide update and automatically get up-to-date with the latest changes (see update).

+
+
+

In case you want to go to a new version of devonfw-ide itself, developers have to call devon ide update scripts.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/variables.html b/docs/ide/1.0/variables.html new file mode 100644 index 00000000..a06bf4b1 --- /dev/null +++ b/docs/ide/1.0/variables.html @@ -0,0 +1,312 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Variables

+
+
+

The devonfw-ide defines a set of standard variables to your environment for configuration via variables[.bat] files. +These environment variables are described by the following table. +Those variables printed bold are also exported in your shell (except for windows CMD that does not have such concept). Variables with the value - are not set by default but may be set via configuration to override defaults. +Please note that we are trying to minimize any potential side-effect from devonfw-ide to the outside world by reducing the number of variables and only exporting those that are required.

+
+
+
Variables of devonfw-ide
+

|== == == == == == == == == == == = +|Variable|Value|Meaning +|DEVON_IDE_HOME|e.g. /projects/my-project|The top level directory of your devonfw-ide structure. +|PATH|$PATH:$DEVON_IDE_HOME/software/java:…​|You system path is adjusted by devon command. +|DEVON_HOME_DIR|~|The platform independent home directory of the current user. In some edge-cases (e.g. in cygwin) this differs from ~ to ensure a central home directory for the user on a single machine in any context or environment. +|DEVON_IDE_TOOLS|(java mvn node npm)|List of tools that should be installed and upgraded by default for your current IDE. +|DEVON_IDE_CUSTOM_TOOLS|-|List of custom tools that should be installed additionally. See software for further details. +|DEVON_CREATE_START_SCRIPTS|(eclipse vscode)|List of IDEs that shall be used by developers in the project and therefore start-scripts are created on setup. +|DEVON_OLD_PATH|…​|A "backup" of PATH before it was extended by devon to allow recovering it. Internal variable that should never be set or tweaked. +|WORKSPACE|main|The workspace you are currently in. Defaults to main if you are not inside a workspace. Never touch this variable in any variables file. +|WORKSPACE_PATH|$DEVON_IDE_HOME/workspaces/$WORKSPACE|Absolute path to current workspace. Never touch this variable in any variables file. +|JAVA_HOME|$DEVON_IDE_HOME/software/java|Path to JDK +|SETTINGS_PATH|$DEVON_IDE_HOME/settings|Path to your settings. To keep oasp4j-ide legacy behaviour set this to $DEVON_IDE_HOME/workspaces/main/development/settings. +|M2_REPO|$DEVON_IDE_HOME/conf/.m2/repository|Path to your local maven repository. For projects without high security demands, you may change this to the maven default ~/.m2/repository and share your repository among multiple projects. +|MAVEN_HOME|$DEVON_IDE_HOME/software/maven|Path to Maven +|MAVEN_OPTS|-Xmx512m -Duser.home=$DEVON_IDE_HOME/conf|Maven options +|DEVON_SOFTWARE_REPOSITORY|-|Project specific or custom software-repository. +|DEVON_SOFTWARE_PATH|-|Globally shared user-specific local software installation location. +|ECLIPSE_VMARGS|-Xms128M -Xmx768M -XX:MaxPermSize=256M|JVM options for Eclipse +|deprecated: ECLIPSE_PLUGINS|-|Array with "feature groups" and "update site URLs" to customize required eclipse plugins. Deprecated - see Eclipse plugins. +|«TOOL»_VERSION|-|The version of the tool «TOOL» to install and use (e.g. ECLIPSE_VERSION or MAVEN_VERSION). +|EXTRA_JAVA_VERSION|-|An additional (newer) version of java that will be used to run java-based IDEs (e.g. eclipse or intellij). +|«TOOL»_BUILD_OPTS|e.g.clean install|The arguments provided to the build-tool «TOOL» in order to run a build. +|«TOOL»_RELEASE_OPTS|e.g.clean deploy -Dchangelist= -Pdeploy|The arguments provided to the build-tool «TOOL» in order to perform a release build. +|DEVON_IDE_TRACE||If value is not an empty string, the devonfw-ide scripts will trace each script line executed. For bash two lines output: before and again after expansion. ATTENTION: This is not a regular variable working via devon.properties. Instead manually do export DEVON_IDE_TRACE=true in bash or DEVON_IDE_TRACE=true in windows CMD before running a devon command to get a trace log that you can provide to experts in order to trace down a bug and see what went wrong. +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/vscode.html b/docs/ide/1.0/vscode.html new file mode 100644 index 00000000..d4509d45 --- /dev/null +++ b/docs/ide/1.0/vscode.html @@ -0,0 +1,353 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

vscode

+
+
+

The vscode commandlet allows to install, configure, and launch Visual Studio Code. +To launch VSCode for your current workspace and devonfw-ide installation, simply run: +devon vscode

+
+
+

You may also supply additional arguments as devon vscode «args». These are explained by the following table:

+
+
+
Usage of devon vscode
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +|--all |if provided as first arg then to command will be invoked for each workspace +|setup |setup VSCode (install or update) +|add-plugin «id»|install an additional plugin (extension) +|run |launch VSCode (default if no argument is given) +|start |same as run +|ws-up[date] |update workspace +|ws-re[verse] |reverse merge changes from workspace into settings +|ws-reverse-add|reverse merge adding new properties +|create-script |create launch script for this IDE, your current workspace and your OS +|== == == == == == == == == == == =

+
+
+
+
+

plugins

+
+
+

To be productive with VS Code you need plugins (called extensions in VS Code). Of course devonfw-ide can automate this for your: +In your settings git repository create a folder vscode/plugins (click this link to see more examples and see which plugins come by default). +Here you can create a properties file for each plugin. This is an example devonfw-extension-pack.properties:

+
+
+
+
plugin_id=devonfw.devonfw-extension-pack
+plugin_active=true
+
+
+
+

The variables are defined as following:

+
+
+
    +
  • +

    plugin_id defines the unique ID of the plugin to install. If you want to customize devonfw-ide with new plugins click on Extensions at the bottom of the left navigation icon bar in VS code. Then use the search to find the plugin of your choice. If you click on it the plugin ID is displayed in grey beside the official title at the top of the plugin details page. Copy & paste the ID from here to make up your own custom config.

    +
  • +
  • +

    plugin_active is an optional parameter. If it is true (default) the plugin will be installed automatically during the project setup for all developers in your team. Otherwise developers can still install the plugin manually via devon vscode add-plugin «plugin-name» from the config file settings/vscode/plugins/«plugin-name».properties. See the settings/vscode/plugins folder for possible values of «plugin-name».

    +
  • +
+
+
+

In general you should try to stick with the configuration pre-defined by your project. But some plugins may be considered as personal flavor and are typically not predefined by the project config. Such plugins should be shipped with your settings as described above with plugin_active=false allowing you to easily install it manually. Surely, you can easily add plugins via the UI of VS code. However, be aware that some plugins may collect sensitive data or could introduce other vulnerabilities. So consider the governance of your project and talk to your technical lead before installing additional plugins that are not pre-defined in your settings.

+
+
+

As maintainer of the settings for your project you should avoid to ship too many plugins that may waste resources but are not used by every developer. By configuring additional plugins with plugin_active=false you can give your developers the freedom to install some additional plugins easily.

+
+
+
+
+

cleaning plugins on update

+
+
+

If you want to strictly manage the plugins for VS code in your project, you can create or edit the file settings/vscode/plugins in your settings and add this variable:

+
+
+
+
clean_plugins_on_update=true
+
+
+
+

This will wipe all plugins when an update of VS code is performed (e.g. via devon ide update) and reinstall all configured plugins. While this gives you more control over the governance of the plugins and allows to remove a plugin later during project lifecycle. However, this will delete all manually installed plugins automatically without asking.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/workspaces.html b/docs/ide/1.0/workspaces.html new file mode 100644 index 00000000..3fc7508b --- /dev/null +++ b/docs/ide/1.0/workspaces.html @@ -0,0 +1,302 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

workspaces

+
+
+

The workspaces folder contains folders for your active work. There is a workspace folder main dedicated for your primary work. You may do all your work inside the main workspace. Also, you are free to create any number of additional workspace folders named as you like (e.g. test, release, testing, my-sub-project, etc.). Using multiple workspaces is especially relevant for Eclipse as each workspace has its own Eclipse runtime instance and configuration.

+
+
+

Within the workspace folder (e.g. workspaces/main) you are again free to create sub-folders for (sub-)projects according to your needs. We assume that in most cases you clone git repositories here. The following structure shows an example layout for devonfw:

+
+
+
File structure of workspaces
+
+
/ workspaces
+├──/ main
+│  ├──/ .metadata
+│  ├──/ ide
+│  ├──/ devon4j
+│  └──/ my-thai-star
+└──/ stable
+   ├──/ .metadata
+   ├──/ ide
+   └──/ devon4j
+
+
+
+

In the main workspace you may find the cloned forks for regular work (in the example e.g. devon4j) as a base to create pull-requests while in the stable workspace there is a clone of devon4j from the official devon4j. +However, this is just an example. Some people like to create separate workspaces for development and maintenance branches with git. Other people just switch between those via git checkout.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/ide/1.0/yarn.html b/docs/ide/1.0/yarn.html new file mode 100644 index 00000000..260d9876 --- /dev/null +++ b/docs/ide/1.0/yarn.html @@ -0,0 +1,296 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

yarn

+
+
+

The yarn commandlet allows to install, configure, and launch npm. Calling devon yarn «args» is more or less the same as calling yarn «args» but with the benefit that the version of npm preferred by your project is used (and will be installed if not yet available).

+
+
+

The arguments (devon yarn «args») are explained by the following table:

+
+
+
Usage of devon yarn
+

|== == == == == == == == == == == = +|Argument(s) |Meaning +| |run default build, configurable via YARN_BUILD_OPTS +|setup |setup yarn (install and verify), configurable via YARN_VERSION +|get-version |print the version of your current project +|set-version «nv» [«cv»] |set the version of your current project to «nv» (assuming your current version is «cv») +|check-top-level-project |check if you are running on a top-level project or fail if in a module or no NPM project at all +|release |start a clean deploy release build, configurable via YARN_RELEASE_OPTS +|«args» |run yarn with the given arguments («args») +|== == == == == == == == == == == =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 00000000..bc6e5ea4 --- /dev/null +++ b/docs/index.html @@ -0,0 +1,8 @@ + + + + + +Redirect Notice +

Redirect Notice

+

The page you requested has been relocated to devonfw.github.io/1.0/index.html.

diff --git a/docs/jump-the-queue/1.0/BuildOASP4FnApplication.html b/docs/jump-the-queue/1.0/BuildOASP4FnApplication.html new file mode 100644 index 00000000..cd76256f --- /dev/null +++ b/docs/jump-the-queue/1.0/BuildOASP4FnApplication.html @@ -0,0 +1,767 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Build your OASP4Fn Application

+
+
+

In this chapter we are going to build a serverless back-end with OASP4Fn. The main objective of this tutorial is to take an initial contact with OASP4Fn and the necessary tools we are going to use in the development, so at the end of it, the user will be enough confident to start developing a new project with OASP4Fn without problems.

+
+
+
+
+

Install Tools

+
+
+

In this section we’re going to introduce all the necessary tools we’re going to need to start programming and a initial configuration if necessary.

+
+
+
+
+

Visual Studio Code

+
+
+

Download the installer from the official page and install it. +Once installed, the first thing you should do is install the extensions that will help you during the development, to do that follow this steps:

+
+
+
    +
  1. +

    Install Settings Sync extension.

    +
  2. +
  3. +

    Open the command palette (CTRL+Shift+P) and introduce the command: Sync: Download Settings.

    +
  4. +
  5. +

    Provide GIST ID: 3b1d9d60e842f499fc39334a1dd28564.

    +
  6. +
+
+
+

In the case that you are unable to set up the extensions using the method mentioned, you can also use the scripts provided in this repository.

+
+
+
+
+

NodeJS

+
+
+

Go to the node.js official page and download the version you like the most, the LTS or the Current as you wish.

+
+
+
+
+

Typescript

+
+
+

Let’s install what is going to be the main language during development: TypeScript. This is a ES6 super set that will help us to get a final clean and distributable JavaScript code. This is installed globally with npm, the package manager used to install and create JavaScript modules in NodeJS, that is installed along with Node, so for install typescript you don’t have to install npm explicitly, only run this command:

+
+
+

npm install –g typescript

+
+
+
+
+

Yarn

+
+
+

As npm, Yarn is a package manager, the differences are that Yarn is quite more faster and usable, so we decided to use it to manage the dependencies of OASP4Fn projects.

+
+
+

To install it you only have to go to the official installation page and follow the instructions.

+
+
+

Even though, if you feel more comfortable with npm, you can remain using npm, there is no problem regarding this point.

+
+
+
+
+

Serverless

+
+
+

Lastly, we are going to install the serverless framework, that are going to help us deploying our handlers in our provider we have chosen.

+
+
+

npm install –g serverless

+
+
+
+
+

Postman

+
+
+

Postman is an app that helps you build HTTP requests and send them to a server through any of the HTTP methods. This tool will be useful at the end of the tutorial when we are going to run our handlers locally and send POST HTTP requests to them.

+
+
+
+
+

Starting our Project through a Template

+
+
+

To start with the tutorial we are going to use the oasp4fn application template, so use the following command to clone it in your local machine:

+
+ +
+

Before continue, remember to replace the remote repository, for one that you own:

+
+
+
+
cd jumpTheQueue\
+git remote remove origin
+git remote add origin <your-git-repository>
+
+
+
+

This template comes with the structure that has to have an OASP4Fn application and the skeleton of some handlers. These handlers are stored on event folders, which we can add or remove adjusting to our needs, so how we only are going to use HTTP events, we are going to access to the cloned folder and remove S3 folder inside the handlers and test folders:

+
+
+
+
rm handlers\S3\ -r
+rm test\S3\ -r
+
+
+
+

Only remains to install the base dependencies of our code using yarn, so we only have to run:

+
+
+

yarn

+
+
+
+
+

Local Database Setup

+
+
+

The database we are going to use during this tutorial is dynamodb, the NoSQL database provided by AWS, which is supported by OASP4Fn. +First you have to download and start it following Amazon Official Documentation, once you have downloaded DynamoDB on your computer, open the corresponding shell using the local endpoint:

+
+
+
+
http://localhost:8000/shell/
+
+
+
+

And an interactive shell will be opened in your default browser like this:

+
+
+
+Dynamo DB Shell +
+
+
+

Now we are going to create a table called Queue with the opened shell, to do that write createTable in the text pane sited at the left side of the screen and press CTRL + Space, this will generate a template object specifying the properties that has to be passed to the create function, so we have to modify that object, having at the end something like this:

+
+
+
+
var params = {
+    TableName: 'Queue',
+    KeySchema: [ // The type of of schema.  Must start with a HASH type, with an optional second RANGE.
+        { // Required HASH type attribute
+            AttributeName: 'code',
+            KeyType: 'HASH',
+        }
+    ],
+    AttributeDefinitions: [ // The names and types of all primary and index key attributes only
+        {
+            AttributeName: 'code',
+            AttributeType: 'S', // (S | N | B) for string, number, binary
+        },
+        // ... more attributes ...
+    ],
+    ProvisionedThroughput: { // required provisioned throughput for the table
+        ReadCapacityUnits: 1,
+        WriteCapacityUnits: 1,
+    }
+};
+dynamodb.createTable(params, function(err, data) {
+    if (err) ppJson(err); // an error occurred
+    else ppJson(data); // successful response
+
+});
+
+
+
+

Finally press CTRL + Enter, and if we have specified the properties properly an output with table description will be displayed at the left side console:

+
+
+
+Table Description +
+
+
+
+
+

AWS Credentials

+
+
+

Although we are going to use a local instance, aws-sdk is going to look for credentials required for the configuration and an error will raise if the credentials are missing, so for that reason we are going to add a credentials file in an .aws folder in our home directory. Said that, first of all create the folder with the following commands:

+
+
+
+
cd %HOME% #or only 'cd' if you are in a Unix based OS
+mkdir .aws
+
+
+
+

Once you have created the folder, add a file inside called credentials and write the following:

+
+
+
+
[default]
+aws_access_key_id = your_key_id
+aws_secret_access_key = your_secret_key
+
+
+
+

There is no need to put real credentials in the file as we are going to work locally in this tutorial, you can leave it as above, without replacing your_key_id or your_secret_key, so the sdk will inject the credentials and won’t throw any error, but if you already have credentials, feel free to replace them there, so you have well located for future developments.

+
+
+

Finally, it’s worth saying that there are more ways to pass the credentials to the sdk, but this is the best in our case, for more information about credentials take a look on to the +official documentation.

+
+
+
+
+

Adding Types

+
+
+

The template we have cloned comes with a declaration types at the root of the handlers folder with types for AWS lambda service and events, but must add more types for the data we are going to manage, so we are going to export an interface Visitor and an interface Code in our declaration file, that will look like this:

+
+
+
+
export interface Visitor {
+    name: string;
+    email: string;
+    phone: string;
+}
+
+export interface Code {
+    code: string;
+    dateAndTime: number;
+}
+
+
+
+
+
+

Start the Development

+
+
+

Now that we already have finish the set up of our project, we are going to add our handlers based on our design:

+
+
+
    +
  • +

    One that will add the visitor to the queue

    +
  • +
  • +

    And other to get your position in the queue

    +
  • +
+
+
+

Both of the handlers will be triggered by HTTP events with a post method, so we should delete the rest of the methods than don’t are going to use, both in the handlers and test folders. So once we have done that we are going to modify our initial handler in the template following the next steps:

+
+
+
    +
  1. +

    Rename the template handler to register-handler.ts

    +
  2. +
  3. +

    Install the Lodash package through yarn add <package_name> and import it.

    +
  4. +
  5. +

    Import the fn-dynamo adapter.

    +
  6. +
  7. +

    Add our Visitor interface we add to the types.d.ts file.

    +
  8. +
  9. +

    Set the dynamo adapter to oasp4fn as the database adapter.

    +
  10. +
  11. +

    Specify the configuration to this concrete handler, in this case only the path property is necessary.

    +
  12. +
  13. +

    Rename the handler.

    +
  14. +
  15. +

    Write the logic of our function with the the imported adapter.

    +
  16. +
+
+
+

But before write the logic of our handler, we are going to add some utility function to the utils.ts file at the root of our handlers folder, and export them, so that functions can be exported in our handler:

+
+
+
+
import * as _ from 'lodash';
+import { Visitor } from './types';
+
+const ALPHABET = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz';
+
+export let getRandomCode = (len: number) => {
+    if (!Number.isFinite(len) || len < 1) {
+	throw new TypeError('Invalid code lenght');
+    }
+
+    let str = '';
+    while(len > 0) {
+        str += ALPHABET[_.random(Number.MAX_SAFE_INTEGER) % ALPHABET.length];
+        --len;
+    }
+
+	return str;
+};
+
+export let validateVisitor = (visitor: Visitor) => {
+    let ok = true;
+
+    _.some(visitor, (value, key) => {
+        switch (key) {
+            case 'phone':
+                ok = /^(\d+\s?)+\d+$/.test(value);
+                break;
+            case 'email':
+                ok = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/.test(value);
+                break;
+        }
+        return !ok;
+    })
+
+    return ok;
+};
+
+export let isVisitor = (object: any): object is Visitor => {
+    return 'name' in object && 'phone' in object && 'email' in object;
+}
+
+
+
+

So the handler that will register the user to the queue will be able to take the visitor information, generate a unique code with the above function package, insert it into our data base, along with the result of the handler, the generated code and the hour to the visit, so the resulting handler will look like this:

+
+
+
+
import oasp4fn from '@oasp/oasp4fn';
+import dynamo from '@oasp/oasp4fn/dist/adapters/fn-dynamo';
+import { HttpEvent, Context, Visitor } from '../../types';
+import * as _ from 'lodash';
+import { getRandomCode, validateVisitor, isVisitor } from '../../utils';
+
+oasp4fn.setDB(dynamo);
+
+oasp4fn.config({path: 'register'});
+export async function register (event: HttpEvent, context: Context, callback: Function) {
+    try {
+        let visitor = event.body;
+
+        if(!isVisitor(visitor) || !validateVisitor(visitor))
+            throw new Error();
+
+        let date = new Date();
+        date.setDate(date.getDate() + 1);
+
+        let code: string | undefined;
+        while(!code) {
+            let aux = getRandomCode(3);
+            let res = await oasp4fn.table('Queue', aux).promise();
+            if(!res)
+                code = aux;
+        }
+
+        let result = { code: code, dateAndTime: Date.parse(date.toDateString())};
+        await oasp4fn.insert('Queue', _.assign(visitor, result)).promise();
+        callback(null, result);
+    }
+    catch(err){
+        callback(new Error('[500] Cannot register the visitor to the queue'));
+    }
+}
+
+
+
+

The second and last handler for the application will be that which return the full or part of the queue, by passing full or partial information of a visitor or, in case to the full queue, an empty object, so for achieve that we will have to create a new file in the same directory we have the last one, and name it search-handler.ts, next we are going to repeat the 3 to 8 steps, so we will have the next handler:

+
+
+
+
import oasp4fn from '@oasp/oasp4fn';
+import dynamo from '@oasp/oasp4fn/dist/adapters/fn-dynamo';
+import { HttpEvent, Context } from '../../types';
+
+oasp4fn.setDB(dynamo);
+
+oasp4fn.config({path: 'search'});
+export async function search (event: HttpEvent, context: Context, callback: Function) {
+    try {
+        let visitor = event.body;
+        let res = await oasp4fn.table('Queue')
+                        .filter(visitor)
+                        .promise();
+        callback(null, res);
+    }
+    catch(err){
+        callback(new Error('[500] Cannot get the queue'));
+    }
+}
+
+
+
+
+
+

Generating the Configuration Files

+
+
+

In this part we are going to learn how to generate the configuration files that we are going to use to build and deploy our handlers. The first step, is to add the configuration in the oasp4fn.config.js file, but how isn’t necessary more configuration than the default one in this tutorial, we are going to remove that file:

+
+
+

rm oasp4fn.config.js

+
+
+

Finally we can execute the command:

+
+
+

yarn fun

+
+
+

And is all goes well, two files, serverless.yml and webpack.config.json will be generated and we will see this command line output:

+
+
+
+FUN Output +
+
+
+
+
+

Build and Run your Handlers Locally

+
+
+

To execute our handlers locally we will make use of the serverless-offline plugin, that emulates a local API-gateway that let you build your handlers through webpack and send HTTP requests to them, so run:

+
+
+

yarn offline

+
+
+
+
+

==

+
+
+

To run this command you must have the serverless.yml file generated, and the serverless-offline plugin specified in the plugin section (that is automatically added by the default configuration of OASP4Fn). To search for more information about the serverless plugins, you can dive into the serverless documentation. +== ==

+
+
+

and you will see the following output:

+
+
+
+Offline +
+
+
+

And when the webpack rebuild line appears you can start to send requests to the specified endpoints, so open the postman and create a visitor sending a POST request to the register endpoint:

+
+
+
+Postman Register +
+
+
+

After this, test your other handler, sending a void object with the POST HTTP request, and see how our handler return the visitor inserted:

+
+
+
+Postman Search +
+
+
+
+

Next Chapter: Test your OASP4Fn App

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/OASP4FnDeployment.html b/docs/jump-the-queue/1.0/OASP4FnDeployment.html new file mode 100644 index 00000000..5e411d95 --- /dev/null +++ b/docs/jump-the-queue/1.0/OASP4FnDeployment.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Deployment of OASP4Fn Applications

+
+
+

The deployment is performed by the serverless framework and it’s quite simple, you only have to run the following command to deploy the full service:

+
+
+

sls deploy

+
+
+
+
+

==

+
+
+

This command will fail if the AWS credentials are missing. For more information see serverless credentials +== ==

+
+
+

When the deploy is finish with no errors, you will have a command line output with the endpoints and the functions deployed.

+
+
+
+AWS Deploy +
+
+
+

Note that until now we have been working locally, so if we gonna deploy our handlers and make them work, we should change the endpoint of our services:

+
+
+
+
oasp4fn.setDB(dynamo, {endpoint: 'https://dynamodb.us-west-2.amazonaws.com'});
+
+
+
+
+
+

==

+
+
+

You can get more information about the deployment in the serverless documentation, but be aware that OASP4Fn doesn’t support the deployment of a single function. +== ==

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/OASP4FnIntroduction.html b/docs/jump-the-queue/1.0/OASP4FnIntroduction.html new file mode 100644 index 00000000..336f96f1 --- /dev/null +++ b/docs/jump-the-queue/1.0/OASP4FnIntroduction.html @@ -0,0 +1,506 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

OASP4Fn Introduction

+
+
+

Serverless is a framework that allows developers to build auto-scalable applications, pay-per-execution, event-driven apps on AWS Lambda, Microsoft Azure, IBM OpenWhisk and Google Cloud Platform.

+
+
+

OASP4Fn is a npm package full of functionality independent of the goal of the developments made over this framework. It provides many different features following this approach in order to allow the developers use the ones that fit in their needs to build, test and deploy applications in an easy, fast and clean way using the Serverless framework.

+
+
+
+
+

Serverless Computing

+
+
+

Serverless computing consists in the following concepts and benefits:

+
+
+
    +
  • +

    Functions as a Service (FaaS).

    +
  • +
  • +

    Cloud provider automatically manages starting, stopping and scaling instances for functions.

    +
  • +
  • +

    More cost-efficient.

    +
  • +
  • +

    The business or person that owns the system does not have to purchase, rent or provision servers or virtual machines for the back-end code to run on.

    +
  • +
  • +

    It can be used in conjunction with code written in traditional server style, such as microservices.

    +
  • +
  • +

    Functions in FaaS are triggered by event types defined by the provider. For example:

    +
    +
      +
    • +

      HTTP requests.

      +
    • +
    • +

      AWS S3 updates.

      +
    • +
    • +

      Messages added to a message bus.

      +
    • +
    +
    +
  • +
+
+
+

Besides the automatic horizontal scaling, the biggest benefit is that you only pay for the compute that you need. Depending on your traffic scale and shape this may be a huge economic win in many projects.

+
+
+

It solves common and inefficient situations like occasional requests, where an application with a few small requests per minute makes the CPU idle most of time, or like inconsistent traffic where the traffic profile of an application is very inconsistent. For example:

+
+
+
+Inconsistent Traffic Pattern +
+
+
+

In a traditional environment you may need to increase your total hardware capability by a factor of 10 to handle the spikes, even though they only account for less than 4% of total machine up-time.

+
+
+
+
+

Installation

+
+
+

OASP4Fn is distributed as a npm package that you can install in your NodeJS application running in a terminal the following command:

+
+
+
+
$ npm install @oasp/oasp4fn
+##or if you have yarn installed
+$ yarn add @oasp/oasp4fn
+
+
+
+
+
+

What does OASP4Fn provide on Serverless?

+
+
+

The following picture shows what OASP4Fn offers in order to facilitate Serverless development.

+
+
+
+OASP4Fn Adds +
+
+
+

Currently, the OASP4Fn architecture defines a series of adapters to help developers to build applications and make use of different cloud providers.

+
+
+
+OASP4Fn Available +
+
+
+
+
+

TypeScript

+
+
+

The Serverless framework is not prepared by default to use TypeScript. OASP4Fn allows the developer to use TypeScript as the programming language in any development, as it provides the types definitions and a pre-configured webpack building system that automates the transpilation of the different handlers.

+
+
+
+
+

Infrastructure as Code

+
+
+

The service made with OASP4Fn must follow a specified structure, that along with a configuration file will allow the user to avoid having to configure his service manually.

+
+
+
+
/handlers
+	/Http
+		/get
+			handler1.ts
+			handler2.ts
+			…
+			handlerN.ts
+		/post
+			handler1.ts
+			handler2.ts
+		/put
+			…
+	/S3
+	...
+	/{EventName}
+		/{TriggerMethod}
+			{HandlerName}.ts
+
+
+
+

The logic of our application must be stored in a folder, called handlers, inside it we will have a folder for each event used to trigger the handler and inside a folder with the name of the method that triggers the handler.

+
+
+

Furthermore of the specified before, in the file oasp4fn.config.js, it is specified the configuration of the events, deployment information and the runtime environment in which the handlers will run.

+
+
+
+
+

Annotations

+
+
+

In addition to the configuration file, we can specify to each handler a concrete configuration, related to the event that triggers the handler, using a dummy function that adds or modifies the configuration specified in OASP4Fn configuration file.

+
+
+
+
// ...
+oasp4fn.config({path: 'attachments/{id}'});
+export async function getAttachment (event: HttpEvent, context: Context, callback: Function) {
+    // ...
+}
+
+
+
+

These annotations will be only interpreted by the framework, so they do not inject or add any kind of functionality to the actual handler.

+
+
+
+
+

Cloud Adapters

+
+
+

OASP4Fn also comes with a simple interface that allows the user to have access to different services of cloud providers using adapters.

+
+
+

That interface makes use of this adapters to retrieve data to the user through Promises, and let the user query that retrieved data.

+
+
+

Currently available adapters:

+
+
+
    +
  • +

    AWS

    +
    +
      +
    • +

      AWS DynamoDB

      +
    • +
    • +

      AWS S3

      +
    • +
    • +

      AWS Cognito

      +
    • +
    +
    +
  • +
+
+
+
+
+

Command Line Interface

+
+
+

OASP4Fn provides a simple command line interface, that using the resources and the information provided by Infrastructure as Code, will help the user generate the proper files to build, deploy and test our application.

+
+
+
+
Usage: oasp4fn [provider] [options]
+   or: fun [provider] [options]
+
+Supported Providers: aws (by default aws)
+
+Options:
+  -o, --opts file       file with the options for the yml generation
+  -p, --path directory  directory where the handlers are stored
+  -e, --express         generates an express app.ts file
+  -h, --help            display the help
+
+
+
+
+

Next Chapter: Build your OASP4Fn App

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/OASP4FnTesting.html b/docs/jump-the-queue/1.0/OASP4FnTesting.html new file mode 100644 index 00000000..66a3816c --- /dev/null +++ b/docs/jump-the-queue/1.0/OASP4FnTesting.html @@ -0,0 +1,543 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Testing of OASP4Fn Applications

+
+
+

In this chapter we are going to learn how to tests applications in OASP4Fn using the mocha framework and the chai assertion library.

+
+
+
+
+

Install Global Dependencies

+
+
+

As the title says in this section we’re going to specify the global dependencies that we need to run our tests, that only are the test framework and the typescript interpreter:

+
+
+
+
yarn global add mocha
+yarn global add ts-node
+
+
+
+

No more dependencies are needed, because we start our project with the oasp4fn application template and the local part of the test dependencies are specified as dev-dependencies at the package.json.

+
+
+
+
+

Writing the Tests

+
+
+

Once we have all the dependencies installed, it’s time to put hands on the tests.

+
+
+
+
+

Setting up File

+
+
+

First of all we are going to import all the handlers which we are going to test in our file/s (in our case only in test/Http/Post.ts), our typing and the every time useful Lodash package, so the head of our test file will look like this:

+
+
+
+
import { HttpEvent, Context, Code } from '../../handlers/types';
+import { register } from '../../handlers/Http/POST/register-handler';
+import { search } from '../../handlers/Http/POST/search-handler';
+import { expect } from 'chai';
+import * as _ from 'lodash';
+
+
+
+

Next we are going to define our tests to the register handler, that specify the behavior of our handler, like isn’t really complex, we are going to specify that should return an object with, and we will have something like this:

+
+
+
+
import { HttpEvent, Context, Code } from '../../handlers/types';
+import { register } from '../../handlers/Http/POST/register-handler';
+import { search } from '../../handlers/Http/POST/search-handler';
+import { expect } from 'chai';
+import * as _ from 'lodash';
+
+describe('register', function () {
+    it('The register should return an object, with the code and dateAndTime properties');
+});
+
+
+
+

If we execute this test with yarn test we should see that we only have one test and is in a pending state:

+
+
+
+Yarn Test +
+
+
+
+
+

Add Tests of the Handlers

+
+
+

Now is time to call the function and check his behavior, first of all execute yarn test:auto to watch for changes and execute the tests automatically, next write your test:

+
+
+
+
+

==

+
+
+

Keep in mind that if you don’t have the instance of the database running, as is specified in this section, the tests won’t pass. +== ==

+
+
+
+
import { HttpEvent, Context, Code } from '../../handlers/types';
+import { register } from '../../handlers/Http/POST/register-handler';
+import { search } from '../../handlers/Http/POST/search-handler';
+import { expect } from 'chai';
+import * as _ from 'lodash';
+
+const EVENT = {
+    method: 'POST',
+    path: {},
+    body: {},
+    query: {},
+    headers: {}
+}
+
+let context: Context;
+
+describe('register', function () {
+    this.timeout(6000);
+    it('The register should return an object, with the code and dateAndTime properties', (done: Function) => {
+        let event =  <HttpEvent>_.assign({ body: { "name": "David", "email": "somenthing@something.com", "phone": "658974145"}}, EVENT);
+        register(event, context, (err: Error, res: Code) => {
+            try {
+                expect(err).to.be.null;
+                expect(res).to.be.an('object').that.contains.all.keys('code', 'dateAndTime');
+                done();
+            }
+            catch(error){
+                done(error);
+            }
+        })
+    });
+});
+
+
+
+

Note that we declare and instantiate a HttpEvent and only declare the Context, that’s why we don’t use the context variable inside our handlers but our event variable yes.

+
+
+

Also, realize that we have modified the mocha default timeout (that is 2000ms) to 6000ms, and we change it to match the default AWS lambda timeout. Be careful here, and look that the function passed to the describe is not an arrow function, that’s because the scope of an arrow function is bounded when it’s created not when it’s called, so if that function would been an arrow function we couldn’t have access to the mocha methods and properties.

+
+
+

Once we have tested the register handler, we have to add tests for the search handler, so you have to repeat the last process until you have something like this:

+
+
+
+
describe('search', function () {
+    this.timeout(6000);
+    it('The search should return an array with the items of the table Queue', (done: Function) => {
+        search(EVENT, context, (err: Error, res: object[]) => {
+            try {
+                expect(err).to.be.null;
+                expect(res).to.be.an('Array');
+                res.forEach(obj => {
+                    expect(obj).to.be.an('object');
+                    expect(obj).to.contain.all.keys(
+                        ['name', 'email', 'phone', 'code', 'dateAndTime']
+                    );
+                })
+                done();
+            }
+            catch(error){
+                done(error);
+            }
+        })
+    });
+});
+
+
+
+
+
+

Add an after Hook

+
+
+

Finally, it’s worth to point that we are inserting test data into our database, so we should erase them if it’s necessary after executing the test, for do this, we are going to add a hook that will execute at the end of our tests, and will erase a visitor if we had inserted any, using OASP4Fn, so for that we are going to import OASP4Fn in our file, store the code of the inserted visitor and delete it if it had been inserted successfully, so our hook will look like this:

+
+
+
+
after(async () => {
+    if(code)
+        await oasp4fn.delete('Queue', code).promise();
+});
+
+
+
+

Keep in mind that the variable code is the property code located in the returned object of the handler register that you should store, when his callback return it.

+
+
+
+
+

Final Result

+
+
+

So, at the end of the road we have this test file:

+
+
+
+
import { HttpEvent, Context, Code } from '../../handlers/types';
+import { register } from '../../handlers/Http/POST/register-handler';
+import { search } from '../../handlers/Http/POST/search-handler';
+import { expect } from 'chai';
+import * as _ from 'lodash';
+import oasp4fn from '@oasp/oasp4fn';
+
+const EVENT = {
+    method: 'POST',
+    path: {},
+    body: {},
+    query: {},
+    headers: {}
+}
+
+let context: Context;
+
+let code: string;
+
+describe('register', () => {
+    it('The register should return an object, with the code and dateAndTime properties', (done: Function) => {
+        let event =  <HttpEvent>_.assign({}, EVENT, { body: { "name": "David", "email": "somenthing@something.com", "phone": "658974145"}});
+        register(event, context, (err: Error, res: Code) => {
+            try {
+                expect(err).to.be.null;
+                expect(res).to.be.an('object').that.contains.all.keys('code', 'dateAndTime');
+                code = res.code;
+                done();
+            }
+            catch(error){
+                done(error);
+            }
+        })
+    });
+});
+
+describe('search', () => {
+    it('The search should return an array with the items of the table Queue', (done: Function) => {
+        search(EVENT, context, (err: Error, res: object[]) => {
+            try {
+                expect(err).to.be.null;
+                expect(res).to.be.an('Array');
+                res.forEach(obj => {
+                    expect(obj).to.be.an('object');
+                    expect(obj).to.contain.all.keys(
+                        ['name', 'email', 'phone', 'code', 'dateAndTime']
+                    );
+                })
+                done();
+            }
+            catch(error){
+                done(error);
+            }
+        })
+    });
+});
+
+after(async () => {
+    if(code)
+        await oasp4fn.delete('Queue', code).promise();
+});
+
+
+
+

And the output console look like this:

+
+
+
+Yarn Test +
+
+
+
+

Next Chapter: Deploy your OASP4Fn App

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/_images/images/devon/cobigen.png b/docs/jump-the-queue/1.0/_images/images/devon/cobigen.png new file mode 100644 index 00000000..89faed6a Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon/cobigen.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon/devcon.png b/docs/jump-the-queue/1.0/_images/images/devon/devcon.png new file mode 100644 index 00000000..b8036301 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon/devcon.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon/devon_dist.png b/docs/jump-the-queue/1.0/_images/images/devon/devon_dist.png new file mode 100644 index 00000000..63c657c7 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon/devon_dist.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon/devonfw.png b/docs/jump-the-queue/1.0/_images/images/devon/devonfw.png new file mode 100644 index 00000000..ff674964 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon/devonfw.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon/newapp1.png b/docs/jump-the-queue/1.0/_images/images/devon/newapp1.png new file mode 100644 index 00000000..dec147ff Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon/newapp1.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/1.Overview/devon4j_architecture.png b/docs/jump-the-queue/1.0/_images/images/devon4j/1.Overview/devon4j_architecture.png new file mode 100644 index 00000000..4457e0af Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/1.Overview/devon4j_architecture.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/adoptopenjdk-install.jpg b/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/adoptopenjdk-install.jpg new file mode 100644 index 00000000..7ff17cc6 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/adoptopenjdk-install.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/devonfw-ide-setup-run.png b/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/devonfw-ide-setup-run.png new file mode 100644 index 00000000..514ef41d Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/devonfw-ide-setup-run.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/devonfw-ide-setup-step02.png b/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/devonfw-ide-setup-step02.png new file mode 100644 index 00000000..3d8d3111 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/devonfw-ide-setup-step02.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/gitforwindows-install.jpg b/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/gitforwindows-install.jpg new file mode 100644 index 00000000..47ac166b Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/10.IdeSetup/gitforwindows-install.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/component_layers.png b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/component_layers.png new file mode 100644 index 00000000..766ea448 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/component_layers.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/get_request.png b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/get_request.png new file mode 100644 index 00000000..2435b93d Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/get_request.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/mts.png b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/mts.png new file mode 100644 index 00000000..17528e52 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/mts.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/project_components.png b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/project_components.png new file mode 100644 index 00000000..f1af0d39 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/project_components.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/project_modules.png b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/project_modules.png new file mode 100644 index 00000000..37091a42 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/project_modules.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/run.png b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/run.png new file mode 100644 index 00000000..1a59f90a Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/2.Example_app/run.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/after_setup.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/after_setup.png new file mode 100644 index 00000000..63c657c7 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/after_setup.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/build_successful.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/build_successful.png new file mode 100644 index 00000000..a890ef20 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/build_successful.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_flyway.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_flyway.png new file mode 100644 index 00000000..d884b776 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_flyway.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_project_exp.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_project_exp.png new file mode 100644 index 00000000..ddeb296d Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/devcon_devon4j_project_exp.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_1.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_1.png new file mode 100644 index 00000000..997f6cc2 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_1.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_2.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_2.png new file mode 100644 index 00000000..5cb26f88 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_2.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_3.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_3.png new file mode 100644 index 00000000..058bc2dd Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_3.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_4.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_4.png new file mode 100644 index 00000000..6d9e11ca Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/eclipse_import_4.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/hierarchical_view.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/hierarchical_view.png new file mode 100644 index 00000000..43ce8fab Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/hierarchical_view.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/rest_services.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/rest_services.png new file mode 100644 index 00000000..f47b3926 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/rest_services.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/run_java_app.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/run_java_app.png new file mode 100644 index 00000000..3c005701 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/run_java_app.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/small_dropdown.png b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/small_dropdown.png new file mode 100644 index 00000000..84472eeb Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/3.BuildYourOwn/small_dropdown.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/accesscode_entity.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/accesscode_entity.png new file mode 100644 index 00000000..f41105a2 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/accesscode_entity.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/adapt-templates.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/adapt-templates.png new file mode 100644 index 00000000..60e88895 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/adapt-templates.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/architecture.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/architecture.png new file mode 100644 index 00000000..4457e0af Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/architecture.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen-accesscode-new.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen-accesscode-new.png new file mode 100644 index 00000000..0aefd488 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen-accesscode-new.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen-folder.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen-folder.png new file mode 100644 index 00000000..b7a91e4b Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen-folder.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen-visitor-new.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen-visitor-new.png new file mode 100644 index 00000000..c93572c7 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen-visitor-new.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen1.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen1.png new file mode 100644 index 00000000..09f90a99 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen1.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest.png new file mode 100644 index 00000000..56f111fe Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest_services.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest_services.png new file mode 100644 index 00000000..748523dd Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest_services.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest_services_cto.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest_services_cto.png new file mode 100644 index 00000000..80752349 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_rest_services_cto.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_springdata_repository.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_springdata_repository.png new file mode 100644 index 00000000..7b39bd67 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_springdata_repository.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_uc_logic.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_uc_logic.png new file mode 100644 index 00000000..37759294 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_uc_logic.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_uc_logic_cto.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_uc_logic_cto.png new file mode 100644 index 00000000..88a2564a Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_crud_uc_logic_cto.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_cto.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_cto.png new file mode 100644 index 00000000..44eab161 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_cto.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_entity_infrastructure.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_entity_infrastructure.png new file mode 100644 index 00000000..179cff89 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_entity_infrastructure.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_to.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_to.png new file mode 100644 index 00000000..bd50ee1f Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen2_to.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen3_allpackages.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen3_allpackages.png new file mode 100644 index 00000000..b9fec7f1 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen3_allpackages.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen3_allpackages_cto.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen3_allpackages_cto.png new file mode 100644 index 00000000..2315950e Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen3_allpackages_cto.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen4_review_imports.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen4_review_imports.png new file mode 100644 index 00000000..06705062 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen4_review_imports.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen5_manual_import.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen5_manual_import.png new file mode 100644 index 00000000..805d0acb Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen5_manual_import.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen6_expected_errors.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen6_expected_errors.png new file mode 100644 index 00000000..e89151d7 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen6_expected_errors.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_health_1.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_health_1.png new file mode 100644 index 00000000..6ddd5fc7 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_health_1.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_health_2.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_health_2.png new file mode 100644 index 00000000..8325d273 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_health_2.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_health_3.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_health_3.png new file mode 100644 index 00000000..df72e70e Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_health_3.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_plugin_check.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_plugin_check.png new file mode 100644 index 00000000..190df335 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_plugin_check.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_templates.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_templates.png new file mode 100644 index 00000000..5491218c Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/cobigen_templates.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/example_entity.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/example_entity.png new file mode 100644 index 00000000..76520e8d Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/example_entity.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/getter_setter.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/getter_setter.png new file mode 100644 index 00000000..71e1da9e Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/getter_setter.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/jtq_entities.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/jtq_entities.png new file mode 100644 index 00000000..3567b182 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/jtq_entities.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/layers.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/layers.png new file mode 100644 index 00000000..34d55c27 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/layers.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/new_class.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/new_class.png new file mode 100644 index 00000000..885e32e1 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/new_class.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/new_package_1.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/new_package_1.png new file mode 100644 index 00000000..07715ac8 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/new_package_1.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/new_package_2.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/new_package_2.png new file mode 100644 index 00000000..e8ae1405 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/new_package_2.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/queue_entity.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/queue_entity.png new file mode 100644 index 00000000..8d4a9186 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/queue_entity.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/templates_not_found.png b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/templates_not_found.png new file mode 100644 index 00000000..3339ceb0 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/4.Components/templates_not_found.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/dependency_injection.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/dependency_injection.png new file mode 100644 index 00000000..fca7dead Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/dependency_injection.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated1.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated1.png new file mode 100644 index 00000000..8d31b3e5 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated1.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated2.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated2.png new file mode 100644 index 00000000..f39067b2 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_paginated2.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_saveVisitor.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_saveVisitor.png new file mode 100644 index 00000000..660bffe1 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_saveVisitor.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet1.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet1.png new file mode 100644 index 00000000..67e9d5f4 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet1.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet2.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet2.png new file mode 100644 index 00000000..e644ff9e Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_simpeGet2.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure.png new file mode 100644 index 00000000..6a3d4be8 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_api.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_api.png new file mode 100644 index 00000000..cb522c33 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_api.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_api_cto.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_api_cto.png new file mode 100644 index 00000000..b5a2e89f Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_api_cto.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_core.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_core.png new file mode 100644 index 00000000..93b66614 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_core.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_core_cto.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_core_cto.png new file mode 100644 index 00000000..d263b90d Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/jumpthequeue_structure_core_cto.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/layer_api_impl.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/layer_api_impl.png new file mode 100644 index 00000000..dfb9bc09 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/layer_api_impl.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/layers_impl.png b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/layers_impl.png new file mode 100644 index 00000000..1e2612f1 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/5.Layers/layers_impl.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_accesscode.png b/docs/jump-the-queue/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_accesscode.png new file mode 100644 index 00000000..30c77c22 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_accesscode.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_listwithcode.png b/docs/jump-the-queue/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_listwithcode.png new file mode 100644 index 00000000..f039ddc1 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/6.Customizations/jumpthequeue_listwithcode.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_name.png b/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_name.png new file mode 100644 index 00000000..956c7b9b Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_name.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_nullemail.png b/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_nullemail.png new file mode 100644 index 00000000..a8574595 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_nullemail.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongemail.png b/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongemail.png new file mode 100644 index 00000000..59dc54b1 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongemail.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongphone.png b/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongphone.png new file mode 100644 index 00000000..93460f26 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/7.Validations/jumpthequeue_validation_wrongphone.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_junit5_error.png b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_junit5_error.png new file mode 100644 index 00000000..45323358 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_junit5_error.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_maven.png b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_maven.png new file mode 100644 index 00000000..66b4ed23 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_maven.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result.png b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result.png new file mode 100644 index 00000000..4ac1fbf8 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result2.png b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result2.png new file mode 100644 index 00000000..48027b19 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result2.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result3.png b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result3.png new file mode 100644 index 00000000..cd10e77c Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_result3.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_runtest.png b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_runtest.png new file mode 100644 index 00000000..38a7abdb Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_runtest.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_structure.png b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_structure.png new file mode 100644 index 00000000..491f4eec Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/jumpthequeue_testing_structure.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/mythaistar_testing_structure.png b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/mythaistar_testing_structure.png new file mode 100644 index 00000000..3e909244 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/8.Testing/mythaistar_testing_structure.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_server_structure.png b/docs/jump-the-queue/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_server_structure.png new file mode 100644 index 00000000..a9e11315 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_server_structure.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_simpleget1.png b/docs/jump-the-queue/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_simpleget1.png new file mode 100644 index 00000000..03303480 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4j/9.Deployment/jumpthequeue_simpleget1.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/1.Intro/architecture_overview.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/1.Intro/architecture_overview.png new file mode 100644 index 00000000..b6d6d09c Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/1.Intro/architecture_overview.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/1.Intro/flex_box.jpeg b/docs/jump-the-queue/1.0/_images/images/devon4ng/1.Intro/flex_box.jpeg new file mode 100644 index 00000000..77a18166 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/1.Intro/flex_box.jpeg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/1.Intro/theming.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/1.Intro/theming.png new file mode 100644 index 00000000..d5651ef5 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/1.Intro/theming.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/app_structure.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/app_structure.jpg new file mode 100644 index 00000000..9ad3caf9 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/app_structure.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/authentication.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/authentication.jpg new file mode 100644 index 00000000..fa5c49b5 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/authentication.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/authorization_header.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/authorization_header.jpg new file mode 100644 index 00000000..373af982 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/authorization_header.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/book_table.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/book_table.jpg new file mode 100644 index 00000000..8108dd98 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/book_table.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/menu_cards.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/menu_cards.jpg new file mode 100644 index 00000000..e2d5a170 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/menu_cards.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/ng_compiled.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/ng_compiled.png new file mode 100644 index 00000000..08433fb4 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/ng_compiled.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/price_calculator.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/price_calculator.jpg new file mode 100644 index 00000000..2deeb0e8 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/price_calculator.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/project_main_files.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/project_main_files.jpg new file mode 100644 index 00000000..d920a317 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/project_main_files.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/waiter_cockpit.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/waiter_cockpit.jpg new file mode 100644 index 00000000..e8740759 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/2.Example_app/waiter_cockpit.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/angularcli.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/angularcli.png new file mode 100644 index 00000000..441eec8f Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/angularcli.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/appnew.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/appnew.png new file mode 100644 index 00000000..c0cfe644 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/appnew.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/filesnew.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/filesnew.png new file mode 100644 index 00000000..455f365a Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/filesnew.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/formlogin.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/formlogin.png new file mode 100644 index 00000000..94888ccb Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/formlogin.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/jumptheq.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/jumptheq.png new file mode 100644 index 00000000..1c642dfa Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/jumptheq.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/login.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/login.png new file mode 100644 index 00000000..793ee157 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/login.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/mockups.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/mockups.png new file mode 100644 index 00000000..3c342e2b Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/mockups.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/ngnew.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/ngnew.png new file mode 100644 index 00000000..36f2b8a4 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/ngnew.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/ngnewoptions.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/ngnewoptions.png new file mode 100644 index 00000000..60f62d26 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/ngnewoptions.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/register.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/register.png new file mode 100644 index 00000000..03b1cc5f Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/register.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/root_header.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/root_header.jpg new file mode 100644 index 00000000..7e6667f1 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/root_header.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/root_router.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/root_router.jpg new file mode 100644 index 00000000..7dbcc6b2 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/root_router.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/withCodeAccess.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/withCodeAccess.png new file mode 100644 index 00000000..d7b6ca11 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/withCodeAccess.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/withoutCodeAccess.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/withoutCodeAccess.png new file mode 100644 index 00000000..78d65633 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/3.BuildYourOwn/withoutCodeAccess.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/5.Angular_Services/injector.png b/docs/jump-the-queue/1.0/_images/images/devon4ng/5.Angular_Services/injector.png new file mode 100644 index 00000000..0fb6f6a5 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/5.Angular_Services/injector.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/6.Deployment/dist_folder.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/6.Deployment/dist_folder.jpg new file mode 100644 index 00000000..31e59b9e Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/6.Deployment/dist_folder.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/7.IdeSetup/adoptopenjdk-install.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/7.IdeSetup/adoptopenjdk-install.jpg new file mode 100644 index 00000000..7ff17cc6 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/7.IdeSetup/adoptopenjdk-install.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/devon4ng/7.IdeSetup/gitforwindows-install.jpg b/docs/jump-the-queue/1.0/_images/images/devon4ng/7.IdeSetup/gitforwindows-install.jpg new file mode 100644 index 00000000..47ac166b Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/devon4ng/7.IdeSetup/gitforwindows-install.jpg differ diff --git a/docs/jump-the-queue/1.0/_images/images/jumpthequeue/event.png b/docs/jump-the-queue/1.0/_images/images/jumpthequeue/event.png new file mode 100644 index 00000000..9b2ffc5e Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/jumpthequeue/event.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/jumpthequeue/flow.png b/docs/jump-the-queue/1.0/_images/images/jumpthequeue/flow.png new file mode 100644 index 00000000..afabaad6 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/jumpthequeue/flow.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/jumpthequeue/mockups.png b/docs/jump-the-queue/1.0/_images/images/jumpthequeue/mockups.png new file mode 100644 index 00000000..20fa254f Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/jumpthequeue/mockups.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/jumpthequeue/model.png b/docs/jump-the-queue/1.0/_images/images/jumpthequeue/model.png new file mode 100644 index 00000000..5c7afdba Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/jumpthequeue/model.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/jumpthequeue/scene.png b/docs/jump-the-queue/1.0/_images/images/jumpthequeue/scene.png new file mode 100644 index 00000000..14346709 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/jumpthequeue/scene.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/1.Introduction/inconsistent-traffic-pattern.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/1.Introduction/inconsistent-traffic-pattern.png new file mode 100644 index 00000000..c3c0d640 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/1.Introduction/inconsistent-traffic-pattern.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/1.Introduction/oasp4fn_adds.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/1.Introduction/oasp4fn_adds.png new file mode 100644 index 00000000..ed90074d Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/1.Introduction/oasp4fn_adds.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/1.Introduction/oasp4fn_available.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/1.Introduction/oasp4fn_available.png new file mode 100644 index 00000000..a0fb86e4 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/1.Introduction/oasp4fn_available.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/dynamo_db_shell.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/dynamo_db_shell.png new file mode 100644 index 00000000..1fed4580 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/dynamo_db_shell.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/fun_output.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/fun_output.png new file mode 100644 index 00000000..d55e21b4 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/fun_output.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/offline.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/offline.png new file mode 100644 index 00000000..6f432dcc Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/offline.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/postman_register.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/postman_register.png new file mode 100644 index 00000000..c638a684 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/postman_register.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/postman_search.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/postman_search.png new file mode 100644 index 00000000..e66683b9 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/postman_search.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/sls_deploy.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/sls_deploy.png new file mode 100644 index 00000000..6f1b2924 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/sls_deploy.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/table_description.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/table_description.png new file mode 100644 index 00000000..37402f55 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/table_description.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/yarn_test1.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/yarn_test1.png new file mode 100644 index 00000000..d3f1ac36 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/yarn_test1.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/yarn_test2.png b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/yarn_test2.png new file mode 100644 index 00000000..fe8a9e6d Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/oasp4fn/3.BuildYourOwn/yarn_test2.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-create-backend.png b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-create-backend.png new file mode 100644 index 00000000..e6d21c27 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-create-backend.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-gui-project.png b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-gui-project.png new file mode 100644 index 00000000..c749109a Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-gui-project.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-gui-workspace.png b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-gui-workspace.png new file mode 100644 index 00000000..f8a5a7d3 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-gui-workspace.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-gui-ws02.png b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-gui-ws02.png new file mode 100644 index 00000000..3527cb82 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devcon-gui-ws02.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-run.png b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-run.png new file mode 100644 index 00000000..514ef41d Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-run.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-step01.png b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-step01.png new file mode 100644 index 00000000..33651505 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-step01.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-step02.png b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-step02.png new file mode 100644 index 00000000..dbf4cc41 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-step02.png differ diff --git a/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-step03.png b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-step03.png new file mode 100644 index 00000000..4c428820 Binary files /dev/null and b/docs/jump-the-queue/1.0/_images/images/tutorialsources/devonfw-ide-setup-step03.png differ diff --git a/docs/jump-the-queue/1.0/an-devon4j-application.html b/docs/jump-the-queue/1.0/an-devon4j-application.html new file mode 100644 index 00000000..778cac1c --- /dev/null +++ b/docs/jump-the-queue/1.0/an-devon4j-application.html @@ -0,0 +1,446 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4j Application

+
+ +
+
+
+

MyThaiStar Basics

+
+
+

As an example of how a devon4ng application is built, we are going to show the MyThaiStar application front-end project, that can be found in GitHub.

+
+
+

The MyThaiStar application is a solution for managing the online booking and orders of a restaurant. It is presented as a showcase app but designed with real requirements in mind. Moreover, it attempts to demonstrate common features provided by many modern web apps (routing with and without guards, use of flex-box, theme, re-usable components, mock back end, …​).

+
+
+
+MyThaiStar Home Page +
+
+
+

The main features of the app are:

+
+
+
    +
  • +

    Anonymous users can:

    +
    +
      +
    • +

      Book a table.

      +
    • +
    • +

      Create an event and invite some friends via email.

      +
    • +
    • +

      See the menu of dishes and make their own orders.

      +
    • +
    +
    +
  • +
  • +

    Logged-in users in the role Waiter can also:

    +
    +
      +
    • +

      Access a restricted area to see and filter the list and details of all reservations and orders made by users.

      +
    • +
    +
    +
  • +
+
+
+
+
+

MyThaiStar devon4j Back-end Overview

+
+
+

In this section we are going to focus on the implementation of the server project. We will show how MyThaiStar is created and how you can kickstart your own devon4j server project with the devonfw framework.

+
+
+

The MyThaiStar project is hosted on GitHub and includes different technologies such as Java, .NET and Node for back-end solutions Angular and Xamarin as default clients.

+
+
+
+
+

The devon4j Project

+
+
+

Using the devon4j approach for the Java back-end project we will have a structure of a main Maven project formed by four sub-projects:

+
+
+
+Maven Project Structure +
+
+
+

In the core project we will store all the logic and functionality of the application.

+
+
+

In the api project contains the definitions of all interfaces in the application.

+
+
+

The batch project contains components related to the Spring Batch framework which handles batch processing.

+
+
+

The server project configures the packaging of the application.

+
+
+
+
+

The Components

+
+
+

In early chapters we have mentioned that the devon4j applications should be divided in different components that will provide the functionality for the different features of the application. Following the naming convention [Target]management being the Target the main entity that we want to manage.

+
+
+

The components, as part of the logic of the app, are located in the core project of the app. In the case of MyThaiStar we need to show the different available dishes, we need to manage the booking and the orders and we need to create new users. So the application will be divided in the following components:

+
+
+
+Application Components +
+
+
+
+
+

The Component Structure (Layers)

+
+
+

Each component of the app is internally divided following the three-layer architecture (service, logic and dataaccess) that Devon4j proposes. So we will have three different packages to order our component’s elements:

+
+
+
+Component Layers +
+
+
+
+
+

Running MyThaiStar Back-end

+
+
+

Using Spring Boot features, we can easily run our Java back-end applications using the Run as > Java application over the SpringBootApp.java main class.

+
+
+
+MyThaiStar Back-end Launch +
+
+
+

Once we see a console messages like:

+
+
+
+
Tomcat started on port(s): 8081 (http)
+Started SpringBootApp in XX.XXX seconds (JVM running for XX.XXX)
+
+
+
+

we can start accessing our Java back-end.

+
+
+

To show the back-end services results we are going to use Postman app for desktop, although you can use any other similar application.

+
+
+

Now, with Postman, we can do a simple GET request to obtain the info of a dish with id=1 (http://localhost:8081/mythaistar/services/rest/dishmanagement/v1/dish/1). And we obtain a result like this:

+
+
+
+Postman Get Request Result +
+
+
+
+
+

Creating your own devon4j Back-end App

+
+
+

Once we have seen what we can achieve using devon4j as our back-end solution, in next sections we are going to see how to create our own devon4j project step by step, starting from how to create a new devon4j project and explaining how to generate each element of the application.

+
+
+
+

Next Chapter: Create your own devon4j App

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/an-devon4ng-application.html b/docs/jump-the-queue/1.0/an-devon4ng-application.html new file mode 100644 index 00000000..4761b968 --- /dev/null +++ b/docs/jump-the-queue/1.0/an-devon4ng-application.html @@ -0,0 +1,624 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4ng Application

+
+ +
+
+
+

MyThaiStar Basics

+
+
+

As an example of how a devon4ng application is built, we are going to show the MyThaiStar application front-end project, that can be found in GitHub.

+
+
+

The MyThaiStar application is a solution for managing the online booking and orders of a restaurant. It is presented as a showcase app but designed with real requirements in mind. Moreover, it attempts to demonstrate common features provided by many modern web apps (routing with and without guards, use of flex-box, theming, re-usable components, mock back end, …​).

+
+
+
+MyThaiStar Home Page +
+
+
+

The main features of the app are:

+
+
+
    +
  • +

    Anonymous users can:

    +
    +
      +
    • +

      Book a table.

      +
    • +
    • +

      Create an event and invite some friends via email.

      +
    • +
    • +

      See the menu of dishes and make their own orders.

      +
    • +
    +
    +
  • +
  • +

    Logged-in users in the role Waiter can also:

    +
    +
      +
    • +

      Access a restricted area to see and filter the list and details of all reservations and orders made by users.

      +
    • +
    +
    +
  • +
+
+
+
+
+

MyThaiStar devon4ng Front-end Overview

+
+
+

In the previous section we have shown the aspect of the MyThaiStar server project and the services it provides.

+
+
+

In this section we are going to focus on the implementation of the front-end components, services and directives. We will show how MyThaiStar is created and how you can kickstart your own devon4ng client project with the devonfw framework.

+
+
+

The MyThaiStar project is hosted on GitHub and includes different technologies such as Java, .NET and Node for back-end solutions Angular and Xamarin as default clients.

+
+
+
+
+

The devon4ng Project

+
+
+

Using the devon4ng approach for the client project we will have the structure of a main Angular project as follows:

+
+
+
+Angular Project Structure +
+
+
+

In the e2e folder will be all end-to-end tests.

+
+
+

In the node modules folder, all installed dependencies will be stored.

+
+
+

The src folder contains all the application code.

+
+
+

Finally, the rest of the files are configuration files for different technologies involved in the project.

+
+
+
+
+

Angular Folder Structure

+
+
+

Following the Angular style guide rules, the application has been structured this way:

+
+
+
    +
  • +

    app

    +
    +
      +
    • +

      components

      +
      +
        +
      • +

        sub-components

        +
      • +
      • +

        shared

        +
        +
          +
        • +

          services

          +
        • +
        +
        +
      • +
      • +

        component files

        +
      • +
      +
      +
    • +
    • +

      main app component

      +
    • +
    +
    +
  • +
  • +

    assets folder

    +
  • +
  • +

    environments folder

    +
  • +
  • +

    rest of angular files

    +
  • +
+
+
+

As can be seen in this image:

+
+
+
+Angular Folder Structure +
+
+
+
+
+

Components

+
+
+

As we already saw in the previous chapter, the Angular architecture is based on four types of elements: components, services, modules and directives.

+
+
+

In this section we are going to focus on the components. We can distinguish them, because they all are named with the extension .component.ts.

+
+
+

Components represent a single element of the application, but can — at the same time — contain multiple components themselves. This is the case for components that are main views:

+
+
+
    +
  • +

    app (the main component)

    +
  • +
  • +

    home

    +
  • +
  • +

    menu

    +
  • +
  • +

    book-table

    +
  • +
  • +

    cockpit-area

    +
  • +
  • +

    some components for dialogs

    +
  • +
+
+
+

These views have their own Teradata Covalent layouts to manage their contents, other components or tags that are displayed.

+
+
+
+
import {...} from '...'
+
+@Component({
+  selector: 'public-menu',
+  templateUrl: './menu.component.html',
+  styleUrls: ['./menu.component.scss'],
+})
+export class MenuComponent implements OnInit {
+    methods implementation...
+}
+
+
+
+

There are also components, which are part of a template and can be reused multiple times and/or in multiple places. This is the case for components like:

+
+
+
    +
  • +

    sidenav

    +
  • +
  • +

    header

    +
  • +
  • +

    menu-card

    +
  • +
+
+
+

menu-card is an element, that accepts menu information as input data displays this information as a card. This component will be reused for every single dish on the menu, so the best way to handle it, is to isolate its logic and template in a component, so the menu view just has to know about the existence of the component and the data it needs to work.

+
+
+
+
<public-menu-card *ngFor="let menu of menus" [menu]="menu"></public-menu-card>
+
+
+
+
+MyThaiStar Menu Cards View +
+
+
+

To interact and navigate between the main views, Angular provides a Router that provides the functionality to move between URLs in the same app. Additionally it provides an HTML tag <router-outlet></router-outlet> that shows, which component has been navigated to. This router tag is placed in the main app component, at the same level as the sidenav and the header. This means, that these two components are on top of whatever the router shows. That is why we can always see the header, no matter what component we are displaying via the router.

+
+
+

Angular Material also provides a Tab component, which changes its content depending on which tab has been clicked on. An example for the usage of this component can be seen in the book-table view:

+
+
+
+MyThaiStar Book Table View +
+
+
+

This component view shows a card, that contains a form for reservation or the creation of an event.

+
+
+
+
+

Services

+
+
+

Ideally, all logic should be taken out of a component. Only calls to services and minimal script interactions should be contained in a component. The services should then contain all the logic, for example, code that calls the server and so on.

+
+
+

MyThaiStar components consume those services, for example a price-calculator service, which is called when a costumer makes an order:

+
+
+
+MyThaiStar Price Calculator +
+
+
+

There are two special services in MyThaiStar, which serve a different purpose, than just being consumed by a component. They are:

+
+
+
    +
  • +

    Authentication

    +
  • +
  • +

    AuthGuard

    +
  • +
  • +

    HttpClient

    +
  • +
+
+
+

To secure the access to the waiter cockpit — which is a forbidden area for anyone who is not a waiter — MyThaiStar employs a service of authentication as well as a Router Guard.

+
+
+
+MyThaiStar Authentication & Router Guard Service +
+
+
+

Guards are services, that implement the CanActivate function, which returns a Boolean, indicating if a navigation attempt is valid or forbidden. If it is forbidden, the router stops navigation — if it is valid, the router navigates to the desired location. The authentication service serves as storage and validator for certain kinds of data, including usernames, roles, permissions and JWT tokens.

+
+
+

HttpClient — among other things — implements the management of HTTP headers. The workflow is exactly the same as with standard HTTP requests/responses, but here a token is added to a header, when specific, secured services are called. HttpClient has also been extended to handle errors, in case a token has expired or is corrupted.

+
+
+
+MyThaiStar Security Token +
+
+
+

With all of this correctly setup, we can log into to the waiter cockpit by entering the correct credentials. This way the logged-in state is set to true. The server will return a header with the correct token. As a result, the application will navigate to the waiter cockpit correctly.

+
+
+
+MyThaiStar Waiter Cockpit View +
+
+
+
+
+

Modules

+
+
+

Through modules, you can encapsulate whole functionalities or parts of the application. All Angular apps have at least one module: app.module. Angular encourages the use of more modules to organize all components and services. In MyThaiStar, every component and service is inside a module, so the app.module is only composed of other, smaller modules.

+
+
+
+
+

Running MyThaiStar Client

+
+
+

To run MyThaiStar, you have to have Node installed globally and Angular CLI. Once you have installed these dependencies, you can go to the project folder and run: yarn install. Once finished, you are ready to run the client via: ng serve.

+
+
+

If everything goes well, the console will output something like this:

+
+
+
+Angular CLI Output +
+
+
+

Navigate to http://localhost:4200 to see the MyThaiStar app client running.

+
+
+

Now that we know, what can be done with devon4ng, we are going to show you step-by-step how you can make your own app from scratch. We will explain how to create components and services, how to set up routing and how each other element of the application works.

+
+
+
+

Next Chapter: devon4ng Components

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/build-devon4j-application.html b/docs/jump-the-queue/1.0/build-devon4j-application.html new file mode 100644 index 00000000..fb7dc4e4 --- /dev/null +++ b/docs/jump-the-queue/1.0/build-devon4j-application.html @@ -0,0 +1,572 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Build your own devon4j Application

+
+
+

In this step we are going to create our work environment and establish the first part of our back-end. Like most devonfw projects, our back-end will be build in Java and thus utilize the tools, libraries and frameworks provided by the devon4j stack.

+
+
+
+
+

Setting up a devonfw Work Environment

+
+
+

Follow the setup instructions listed here.
+After you have done so, your working directory should look like this:

+
+
+
+After Setup +
+
+
+

Press "Shift + Right Click" and select "Open Devon CMD shell here".
+Your shell should now show the message:

+
+
+
+
+

devon IDE environment variables have been set for C:\... in workspace main

+
+
+
+
+

Navigate into workspace "main" by entering:

+
+
+
+
C:\...> cd workspaces\main
+
+
+
+
+
+

Creating the Project

+
+
+

Now we are going to create the folder structure for our back-end by running the following commands:

+
+
+
+
C:\...\workspaces\main>                 mkdir jumpthequeue
+C:\...\workspaces\main>                 cd jumpthequeue
+C:\...\workspaces\main\jumpthequeue>    mkdir java
+C:\...\workspaces\main\jumpthequeue>    cd java
+C:\...\workspaces\main\jumpthequeue\java>
+
+
+
+

Here we are going to generate our devon4j application template as a starting point for our back-end:

+
+
+
+
C:\...\workspaces\main\jumpthequeue\java>
+    devon java create com.devonfw.application.jtqj com.devonfw.java.jtqj -Dversion=0.0.1
+
+
+
+
+
+

==

+
+
+

We are using the java commandlet of the devon IDE. After the command create we are passing the arguments package, groupId and version in this case. +== ==

+
+
+
+
+

==

+
+
+

Be sure that you are using devonfw CMD or bash, as the command is not working properly with devonfw PowerShell. +== ==

+
+
+
+Build Success +
+
+
+

Once you see the BUILD SUCCESS message, the new app template has been created and can now be expanded.

+
+
+
+
+

Importing the Application in Eclipse

+
+
+

First we have to launch our project-specific Eclipse instance by executing the eclipse-main.bat script in the root folder. Now we can import our application into Eclipse:

+
+
+
+Eclipse Maven Import 1 +
+
+
+

Select Import projects…​ from the Package Explorer (or optionally Import…​ via the File menu).

+
+
+
+Eclipse Maven Import 2 +
+
+
+

Select Existing Maven Projects and click Next >.
+Now click Browse and select the java folder inside our jumpthequeue project.

+
+
+

Eclipse Maven Import 3 +Eclipse Maven Import 4

+
+
+

Eclipse will detect the pre-generated Maven project structure inside our project by looking for pom.xml files.
+In our case it should find the api, core and server packages.
+Finally click Finish and wait for the import to complete.

+
+
+
+
+

Preparing the Back-end for an Initial Launch

+
+
+

Before test-launching our back-end we have to change the server context path of our application. Via the Eclipse Package Explorer navigate to:

+
+
+
+
/jtqj-core/src/main/resources/config/
+
+
+
+

Now open application.properties and update the following line of code:

+
+
+
+
server.servlet.context-path=/jumpthequeue
+
+
+
+
+
+

==

+
+
+

For a more manageable view of the package structure inside the Eclipse Package Explorer select Package Presentation > Hierarchical from the small Dropdown Icon dropdown-menu.

+
+
+
+Hierarchical View +
+
+
+
+
+

==

+
+ +
+
+
+

Launching the Back-end

+
+
+

Finally, using Spring Boot (which provides us with an embedded Tomcat server), we can run the back-end of our app in one easy step. Navigate to:

+
+
+
+
/jtqj-core/src/main/java/com.devonfw.application.jtqj/
+
+
+
+

Now right click SpringBootApp.java select Run As > Java Application.

+
+
+
+Run as Java Application +
+
+
+

If you did everything right up until now the console should show a message like this:

+
+
+
+
  .   ____          _            __ _ _
+ /\\ / ___'_ __ _ _(_)_ __  __ _ \ \ \ \
+( ( )\___ | '_ | '_| | '_ \/ _` | \ \ \ \
+ \\/  ___)| |_)| | | | | || (_| |  ) ) ) )
+  '  |____| .__|_| |_|_| |_\__, | / / / /
+ == == == == =|_|== == == == == == == |___/=/_/_/_/
+ :: Spring Boot ::        (vX.X.X.RELEASE)
+
+.
+.
+.
+... c.d.application.jtqj.SpringBootApp       : Starting SpringBootApp on XXXXXXX with PID XXXX
+... o.s.b.w.embedded.tomcat.TomcatWebServer  : Tomcat started on port(s): 8081 (http) with context path '/jumpthequeue'
+... c.d.application.jtqj.SpringBootApp       : Started SpringBootApp in XX.XXX seconds (JVM running for XX.XXX)
+
+
+
+

The back-end is now accessible via localhost:8081/jumpthequeue.
+You can view the exposed services by logging in with these credentials:

+
+
+
+
Username:   admin
+Password:   admin
+
+
+
+
+RESTful Services +
+
+
+
+
+

==

+
+
+

You are redirected to a login screen because — by default — a new devon4j applications provides a basic security setup. +== ==

+
+
+
+
+

Structure of the App Template

+
+
+

When creating devon4j based apps this way, we get the following features out-of-the-box:

+
+
+
    +
  • +

    Maven project with api project, core project and server project:

    +
    +
      +
    • +

      api project for the common API

      +
    • +
    • +

      core project for the app implementation

      +
    • +
    • +

      server project ready to package the app for deployment

      +
    • +
    +
    +
  • +
  • +

    Data base ready environment with an h2 instance, including:

    +
    +
      +
    • +

      Data model schema

      +
    • +
    • +

      Mock data schema

      +
    • +
    +
    +
  • +
  • +

    Database version control with Flyway

    +
  • +
  • +

    Bean Mapper ready

    +
  • +
  • +

    Basic security enabled (based on Spring Security)

    +
  • +
  • +

    Unit test support and model

    +
  • +
+
+
+
+

Next Chapter: Add a Component to your App

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/build-devon4ng-application.html b/docs/jump-the-queue/1.0/build-devon4ng-application.html new file mode 100644 index 00000000..b877b947 --- /dev/null +++ b/docs/jump-the-queue/1.0/build-devon4ng-application.html @@ -0,0 +1,684 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Build your own devon4ng Application

+
+
+

In this chapter we are going to see how to build a new devon4ng from scratch. The proposal of this tutorial is to end having enough knowledge of Angular and the of the remaining technologies regarding devon4ng to know how to start developing. If you need more advanced and specific functionalities, you can find them on the cookbook.

+
+
+
+
+

Goal of JumpTheQueue

+
+
+

These mock-ups show what you can expect to see after you finished this tutorial. It will be an App to manage codes assigned to people attending a queue in order to ease the management of the queue. With a code, you can jump between positions in the queue and you will be able to keep track of your position.

+
+
+
+JumpTheQueue Mock-Ups +
+
+
+

So, hands on it: Let’s configure the environment and build this app!

+
+
+
+
+ +
+
+

We will be utilizing NodeJS, npm and Yarn to create our Angular front end. Using NodeJS inside the devonfw environment avoids a manual download and installation, and keeps the tool isolated from other projects, so we don’t run into version conflicts down the road.

+
+
+

Node should already be 'installed' inside your devonfw environment (in the /software/node directory). However, to make sure we have its most current version and the right package manager (Yarn) available, execute the following commands anywhere inside the devonfw environment:

+
+
+
+
devon node setup
+
+devon yarn setup
+
+
+
+
+
+

Installing Global Tools (optional)

+
+ +
+
+
+

==

+
+
+

If you are using a devonfw work environment — as instructed in the devon4j part of this tutorial — you DON’T have to install these tools globally, since they can be accessed from the devon CLI.
+A global installation is only useful if you want to create Angular projects outside of a devonfw environment or without the devon CLI. +== ==

+
+
+
+
+

Visual Studio Code

+
+
+

To install the editor, download the installer from the official page and install it.

+
+
+

Once installed, the first thing you should do is install the extensions that will help you during the development. To do that, follow these steps:

+
+
+
    +
  1. +

    Go to the Extensions panel in VS Code and search for "devonfw" on the market place.

    +
  2. +
  3. +

    Install the devonfw Platform Extension Pack (this might take a while).

    +
  4. +
+
+
+
+
+

NodeJS

+
+
+

Go to nodejs.org and download the version you like the most — LTS or Current — as you wish.

+
+
+

The recommendation is to install the latest version, but keep in mind that to use Angular CLI, your version must be at least 8.x and to use npm at least 5.x. If you have NodeJS already installed on your computer, this is a good moment to check your version and upgrade if necessary.

+
+
+
+
+

TypeScript

+
+
+

Let’s install what is going to be the main language during development: TypeScript. This ES6 super set is tightly coupled to the Angular framework and will help us to get a final clean and distributable JavaScript code. This is installed globally with npm, the package manager used to install and create JavaScript modules in NodeJS, that is installed along with Node. To install TypeScript, you don’t have to install npm explicitly, just run this command:

+
+
+
+
npm install –g typescript
+
+
+
+
+
+

Yarn

+
+
+

Just like npm, Yarn is a package manager for JavaScript/Node modules. Yarn is quite a bit faster and more usable in our opinion, so we decided to use it to manage the dependencies of devon4ng projects.

+
+
+

To install Yarn, you only have to go to the official installation page and follow the instructions.

+
+
+

However, if you feel more comfortable with npm, you can keep using it.

+
+
+
+
+

Angular/CLI

+
+
+

This CLI is specifically built to make Angular projects easier to develop, maintain and deploy, so we are going to make use of it.

+
+
+

To install the Angular/CLI, you have to run this command in your console prompt:

+
+
+
+
npm install –g @angular/cli
+
+
+
+

Now you should be able to run ng version and this will appear in the console:

+
+
+
+Angular CLI Version +
+
+
+

In addition, you can set Yarn as the default package manager to use with Angular/CLI by running this command:

+
+
+
+
ng config -g cli.packageManager yarn
+
+
+
+

Finally, once all these tools have been installed successfully, you are ready to create a new project.

+
+
+
+
+

Creating a New Project with the Angular/CLI

+
+
+

One of the main reasons to use Angular/CLI is the feature to create whole new projects from scratch by simply running one command. We are going create an Angular 7 (legacy) project, to keep this tutorial working even if a new Angular version is released. Inside the C:...\workspaces\main\jumpthequeue directory run:

+
+
+
+
npx -p @angular/cli@7 ng new angular
+
+
+
+
+
+

==

+
+
+

If you want to create a 'real' project on your own later on, you should do so using the latest Angular version by running:

+
+
+
+
ng new <project name>
+
+
+
+

Where <project name> is the name of the Angular project you want to create.

+
+
+

In the case shown above we called our project angular, since we want to distribute its code as part of our complete jumpthequeue project. This is analogous to the java directory used for our devon4j back-end. +== ==

+
+
+

After executing the command, Angular/CLI will ask, if we want to use Angular routing (Yes) and what style sheet format we want to use (SCSS):

+
+
+
+Angular Options +
+
+
+

This command will establish a project directory structure, initialize default files, and store references to basic dependencies in the package.json file:

+
+
+
+Angular Project Creation +
+
+
+

After project creation, navigate into the new /angular folder and execute the following command, to set Yarn as your default package manager for this project:

+
+
+
+
ng config cli.packageManager yarn
+
+
+
+

Now install the required dependencies using Yarn by executing:

+
+
+
+
yarn install
+
+
+
+

Now run vscode-main.bat to start the VS Code instance for the main workspace and expand the /jumpthequeue/angular directory, i.e. the project we have just created. It should look like this:

+
+
+
+Angular New Project Files +
+
+
+

Finally, it’s time to check if the created project works properly. To do this, simply run:

+
+
+
+
ng serve -o
+
+
+
+

If everything was compiled correctly, you’ll see the default Angular 7 landing page:

+
+
+
+Angular Default Page +
+
+
+

The ng serve command starts the development mode of the Angular/CLI. This means, that every time you make a change in the code and save it, the project will automatically recompile and run. The -o option causes the project to open in your default browser once compiled.

+
+
+

For the next steps we’ll have to stop the development mode by pressing Ctrl + C and terminating the batch job (Y).

+
+
+
+
+

Adding Google Material and Covalent Teradata

+
+ +
+
+
+

==

+
+
+

We will be using very specific module versions in this tutorial, to ensure that all dependencies are compatible with the legacy Angular 7 project. For this reason we will be appending @<version> behind each dependency.

+
+
+

If you create a new project with the latest Angular version on your own later down the line, you can omit this tag. This way, the latest compatible version of a dependency will be downloaded and linked.

+
+
+

For Angular Material this would be for example be done via npm install @angular/material or yarn add @angular/material. +== ==

+
+
+

Go to the C:/…​/workspaces/main/jumpthequeue/angular directory and run the following command to add Google Material to the project dependencies:

+
+
+
+
yarn add @angular/material@7.1.0
+
+
+
+

Now we are going to add the Angular CDK (Component Dev Kit):

+
+
+
+
yarn add @angular/cdk@7.1.0
+
+
+
+

Then we are going to add Animations:

+
+
+
+
yarn add @angular/animations@7.1.0
+
+
+
+

The Angular animations library implements a domain-specific language (DSL) for defining web animation sequences for HTML elements as multiple transformations over time. Finally, some material components need gestures support, so we need to add this dependency:

+
+
+
+
yarn add hammerjs@^2.0.8
+
+
+
+

That is all regarding Angular/Material. We are now going to install Covalent Teradata dependency:

+
+
+
+
yarn add @covalent/core@2.0.0-beta.4
+
+
+
+

Now that we have downloaded and linked all dependencies, we can check the project’s package.json file and see if everything has been correctly added (Some of the minor dependencies may have a different versions for you, which is fine. Our main concern are the versions of the modules manually installed in the previous steps.):

+
+
+
+
  "dependencies": {
+    "@angular/animations": "7.1.0",
+    "@angular/cdk": "7.1.0",
+    "@angular/common": "~7.1.0",
+    "@angular/compiler": "~7.1.0",
+    "@angular/core": "~7.1.0",
+    "@angular/forms": "~7.1.0",
+    "@angular/material": "7.1.0",
+    "@angular/platform-browser": "~7.1.0",
+    "@angular/platform-browser-dynamic": "~7.1.0",
+    "@angular/router": "~7.1.0",
+    "@covalent/core": "2.0.0-beta.4",
+    "core-js": "^2.5.4",
+    "hammerjs": "^2.0.8",
+    "rxjs": "~6.3.3",
+    "tslib": "^1.9.0",
+    "zone.js": "~0.8.26"
+  }
+
+
+
+

Angular Material and Covalent need the following modules to work: CdkTableModule, BrowserAnimationsModule and every Covalent and Material Module used in the application. These modules come from @angular/material, @angular/cdk/table, @angular/platform-browser/animations and @covalent/core. In future steps a CoreModule will be created. This module will contain the imports of these libraries which will avoid code repetition.

+
+
+

Now let’s continue to make some config modifications to have all the styles imported to use Material and Teradata:

+
+
+

1.- Inside angular/src we will create a theme.scss file to configure the themes of our app. We will use one primary color, one secondary — called accent — and another one for warnings. Teradata also accepts a foreground and background color. Paste the following content into the file:

+
+
+
+
@import '~@angular/material/theming';
+@import '~@covalent/core/theming/all-theme';
+
+@include mat-core();
+
+$primary: mat-palette($mat-blue, 700);
+$accent:  mat-palette($mat-orange, 800);
+
+$warn:    mat-palette($mat-red, 600);
+
+$theme: mat-light-theme($primary, $accent, $warn);
+
+$foreground: map-get($theme, foreground);
+$background: map-get($theme, background);
+
+@include angular-material-theme($theme);
+@include covalent-theme($theme);
+
+
+
+

2.- Now we have to add these styles to our Angular/CLI config. Go to angular.json in the angular root folder, then search both of the "styles" arrays (inside build and test) and add theme.scss and also the platform.css from Covalent library to make it look like this:

+
+
+
+
...
+
+  "styles": [
+    "src/styles.css",
+    "src/theme.scss",
+    "node_modules/@covalent/core/common/platform.css"
+  ],
+
+...
+
+
+
+

3.- In the same file, the minimized hammer.min.js library/script will be added. To do so, paste the following code inside both "scripts" arrays (build and test):

+
+
+
+
...
+
+  "scripts": [
+    "node_modules/hammerjs/hammer.min.js"
+  ]
+
+...
+
+
+
+

Now we have successfully set up a blank Angular project with Google Material and Covalent Teradata modules. We can continue by adding custom functionality and components to the app.

+
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4j-adding-custom-functionality.html b/docs/jump-the-queue/1.0/devon4j-adding-custom-functionality.html new file mode 100644 index 00000000..5e14bf7c --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4j-adding-custom-functionality.html @@ -0,0 +1,924 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4j adding Custom Functionality

+
+
+

In the previous chapter we have seen that, using CobiGen, we can generate all the structure and functionality of a devon4j component in a few clicks.

+
+
+

In this chapter we are going to show how to add custom functionalities to our projects, that are out of the scope of code, that CobiGen is able to generate.

+
+
+
+
+

Return the Access Code

+
+
+

The JumpTheQueue design defines a User Story in which a visitor can register into an event and obtain an access code to avoid a queue.

+
+
+

In our standard implementation of the JumpTheQueue app we have used CobiGen to generate the components, so we have a default implementation of the services. Since the AccessCode component is more complex and requires the use of CTOs, we need to create our own usecasemanage and the methods save and delete.

+
+
+

We also have to add some methods to the Queue component, since when saving/deleting an AccessCode, the amount of customers in the Queue needs to increase/decrease.

+
+
+
+
+

Adding Methods to the Queue Component

+
+
+

In our case, two new methods are going to be needed; decreaseQueueCustomer and increaseQueueCustomer. In order to add those methods to the queuemanagement, we need to follow these three steps:

+
+
+
    +
  1. +

    Modify the corresponding usecase interface, adding the methods.

    +
  2. +
  3. +

    Implement the methods in the usecaseimpl.

    +
  4. +
  5. +

    Modify the management implementation managementimpl.

    +
  6. +
+
+
+
+
+

== 1. Modifying UcManageQueue

+
+
+

Inside jtqj-api/queuemanagement/logic/api/usecase/UcManageQueue the declarations of the two methods are going to be added:

+
+
+
+
...
+
+public interface UcManageQueue {
+
+	...
+
+	/**
+	 * Decrease number of customers of the queue and update the queue.
+	 *
+	 * @param queueId id of the queue to decrease customer.
+	 */
+	void decreaseQueueCustomer(long queueId);
+
+	/**
+	 * Increase number of customers of the queue and update the queue.
+	 *
+	 * @param queueId id of the queue to increase customer.
+	 */
+	void increaseQueueCustomer(long queueId);
+
+}
+
+
+
+
+
+

== 2. Implementing the Methods in UcManageQueueImpl

+
+
+

In jtqj-core/src/main/java/queuemanagement/logic/impl/usecase/UcManageQueueImpl the implementation of the methods, that were just added in the interface, are going to be added:

+
+
+
+
...
+
+public class UcManageQueueImpl extends AbstractQueueUc implements UcManageQueue {
+
+  ...
+
+  @Override
+  public void decreaseQueueCustomer(long queueId) {
+
+    // the queue is found by using the repository find method and queueId parameter
+    QueueEntity queueEntity = getQueueRepository().find(queueId);
+
+    // the customers gets reduced by one
+    queueEntity.setCustomers(queueEntity.getCustomers() - 1);
+
+    // Based on Hibernate, the command save(Entity) is not strictly required, but it improves readability.
+    // the queueEntity gets saved
+    getQueueRepository().save(queueEntity);
+  }
+
+  @Override
+  public void increaseQueueCustomer(long queueId) {
+
+    // the queue is found by using the repository find method and queueId paremeter
+    QueueEntity queueEntity = getQueueRepository().find(queueId);
+
+    // the customers gets increased by one
+    queueEntity.setCustomers(queueEntity.getCustomers() + 1);
+
+    // Based on Hibernate, the command save(Entity) is not strictly required, but it improves readability.
+    // the queueEntity gets saved
+    getQueueRepository().save(queueEntity);
+  }
+
+}
+
+
+
+
+
+

== 3. Modify the Management Implementation QueuemanagementImpl

+
+
+

Since the Queuemanagement extends the usecase UcManageQueue the methods from the previous step need to be added in the QueuemanagementImpl.

+
+
+
+
...
+
+public class QueuemanagementImpl extends AbstractComponentFacade implements Queuemanagement {
+
+  ...
+
+  @Override
+  public void decreaseQueueCustomer(long queueId) {
+
+    this.ucManageQueue.decreaseQueueCustomer(queueId);
+  }
+
+  @Override
+  public void increaseQueueCustomer(long queueId) {
+
+    this.ucManageQueue.increaseQueueCustomer(queueId);
+  }
+
+}
+
+
+
+

These methods are simply going to call the ucManageQueue methods that were just added.

+
+
+
+
+

Creating the usecasemanage for the Access Code

+
+ +
+
+
+

== Adding method to the Access Code usecasefind

+
+
+

Before creating the usecasemanage, a method needs to be added to the usecasefind, that will recover our AccessCodeEto. In jtqj-api, inside the package accesscodemanagement/logic/api/usecase/, the file UcFindAccessCode is going to be modified, adding the new method to the interface:

+
+
+
+
...
+
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.to.AccessCodeEto;
+
+...
+
+public interface UcFindAccessCode {
+
+  ...
+
+  /**
+   * Returns a paginated list of AccessCodeEto matching the search criteria.
+   *
+   * @param criteria the {@link AccessCodeSearchCriteriaTo}.
+   * @return the {@link List} of matching {@link AccessCodeEto}s.
+   */
+  Page<AccessCodeEto> findAccessCodeEtos(AccessCodeSearchCriteriaTo criteria);
+
+}
+
+
+
+

Once that is finished, we will see that an error is going to appear in UcFindAccessCodeImpl and AccesscodemanagementImpl. The second error will be solved in later steps. To solve the first error, in jtqj-core the accesscodemanagement/logic/impl/usecase/UcFindAccessCodeImpl class needs to implement another method:

+
+
+
+
...
+
+public class UcFindAccessCodeImpl extends AbstractAccessCodeUc implements UcFindAccessCode {
+
+  ...
+
+  @Override
+  public Page<AccessCodeEto> findAccessCodeEtos(AccessCodeSearchCriteriaTo criteria) {
+
+    Page<AccessCodeEntity> accessCodes = getAccessCodeRepository().findByCriteria(criteria);
+
+    return mapPaginatedEntityList(accessCodes, AccessCodeEto.class);
+  }
+
+}
+
+
+
+

This method uses a AcessCodeSearchCriteriaTo to find a page of entities — AccessCodeEntity — using the repository. After that, it changes the mapping of the list from AccessCodeEntity to AccessCodeEto.

+
+
+
+
+

== Creating the usecasemanage

+
+
+

In jtqj-api, inside the package accesscodemanagement/logic/api/usecase/, we are going to create a new interface called UcManageAccessCode, where we will define the save and delete methods:

+
+
+
+
...
+
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.to.AccessCodeEto;
+
+public interface UcManageAccessCode {
+
+  /**
+   * Deletes an accessCode from the database by its ID 'accessCodeId'. Decreases the count of customers of the queue
+   * assigned to the access code by one.
+   *
+   * @param queueId Id of the queue to delete
+   */
+  void deleteAccessCode(long accessCodeId);
+
+  /**
+   * Saves a queue and stores it in the database. Increases the count of customers of the queue assigned to the access
+   * code by one.
+   *
+   * @param queue the {@link AccessCodeEto} to create.
+   * @return the new {@link AccessCodeEto} that has been saved with ID and version.
+   */
+  AccessCodeEto saveAccessCode(AccessCodeEto accessCodeEto);
+
+}
+
+
+
+

Then jtqj-core, inside the package accesscodemanagement/logic/impl/usecase, we are going to create a class called UcManageAccessCodeImpl, implementing the definition we just made and extending AbstractAccessCodeUc. This will allow us to have access to the repository.

+
+
+

Also, here is the part where we will use the methods that we created in the Queue component:

+
+
+
+
...
+
+import java.sql.Timestamp;
+import java.time.Instant;
+import java.util.List;
+import java.util.Objects;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.Pageable;
+import org.springframework.transaction.annotation.Transactional;
+import org.springframework.validation.annotation.Validated;
+
+import com.devonfw.application.jtqj.accesscodemanagement.dataaccess.api.AccessCodeEntity;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.to.AccessCodeCto;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.to.AccessCodeEto;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.to.AccessCodeSearchCriteriaTo;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.usecase.UcFindAccessCode;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.usecase.UcManageAccessCode;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.base.usecase.AbstractAccessCodeUc;
+import com.devonfw.application.jtqj.queuemanagement.dataaccess.api.QueueEntity;
+import com.devonfw.application.jtqj.queuemanagement.logic.api.Queuemanagement;
+import com.devonfw.application.jtqj.queuemanagement.logic.api.to.QueueEto;
+import com.devonfw.application.jtqj.queuemanagement.logic.impl.usecase.UcManageQueueImpl;
+
+@Named
+@Validated
+@Transactional
+public class UcManageAccessCodeImpl extends AbstractAccessCodeUc implements UcManageAccessCode {
+
+  @Inject
+  private Queuemanagement queuemanagement;
+
+  @Inject
+  private Accesscodemanagement accesscodemanagement;
+
+  /** Logger instance. */
+  private static final Logger LOG = LoggerFactory.getLogger(UcManageQueueImpl.class);
+
+  @Override
+  public void deleteAccessCode(long accessCodeId) {
+
+    // we get the queueId using the AccessCodeRepository
+    long queueId = getAccessCodeRepository().find(accessCodeId).getQueueId();
+
+    /**
+     * Using the method getQueuemanagement() gives access to the methods that were created earlier in the usecasemanage
+     * (inside the queue component). This is done so each component takes care of its own modifications.
+     */
+    this.queuemanagement.decreaseQueueCustomer(queueId);
+
+    LOG.debug("The queue with id '{}' has decreased its customers.", queueId);
+
+    // then we delete the accesscode
+    getAccessCodeRepository().deleteById(accessCodeId);
+    LOG.debug("The accesscode with id '{}' has been deleted.", accessCodeId);
+
+  }
+
+  @Override
+  public AccessCodeEto saveAccessCode(AccessCodeEto accessCodeEto) {
+
+    // make sure the object is not null
+    Objects.requireNonNull(accessCodeEto, "UcManageAccessImpl accessCode null");
+
+    AccessCodeEntity accessCodeEntity = getBeanMapper().map(accessCodeEto, AccessCodeEntity.class);
+
+    long queueEntityId = accessCodeEntity.getQueueId();
+
+    AccessCodeSearchCriteriaTo accessCodeSearchCriteriaTo = new AccessCodeSearchCriteriaTo();
+    accessCodeSearchCriteriaTo.setQueueId(queueEntityId);
+    Pageable pageable = PageRequest.of(0, 1000);
+    accessCodeSearchCriteriaTo.setPageable(pageable);
+
+    /**
+     * Calling the parent with the method getAccesscodemanagement() we use the method findAccessCodeEtos() that will
+     * call the implementation of the method inside (UcFindAccessCodeImpl) through the interface. This allows us to use
+     * the {@link UcFindAccessCodeImpl}.
+     */
+    List<AccessCodeEto> accessCodeEtosInQueue = getAccesscodemanagement().findAccessCodeEtos(accessCodeSearchCriteriaTo)
+        .getContent();
+
+    // if there are no ETOs, we set the ticket to the first code
+    // else we get the digit of the last ticket in the list and generate a new code for the ticket
+    if (accessCodeEtosInQueue.isEmpty()) {
+      accessCodeEntity.setTicketNumber("Q000");
+    } else {
+      AccessCodeEto lastAccessCode = accessCodeEtosInQueue.get(accessCodeEtosInQueue.size() - 1);
+      int lastTicketDigit = Integer.parseInt(lastAccessCode.getTicketNumber().substring(1));
+      accessCodeEntity.setTicketNumber(generateTicketCode(lastTicketDigit));
+    }
+
+    // set the creation time, startTime and endTime
+    accessCodeEntity.setCreationTime(Timestamp.from(Instant.now()));
+    accessCodeEntity.setStartTime(null);
+    accessCodeEntity.setEndTime(null);
+
+    // save the AccessCode
+    AccessCodeEntity accessCodeEntitySaved = getAccessCodeRepository().save(accessCodeEntity);
+    LOG.debug("The accesscode with id '{}' has been saved.", accessCodeEntitySaved.getId());
+
+    /**
+     * Using the method getQueuemanagement() gives access to the methods that were created earlier in the usecasemanage
+     * (inside the queue component). This is done so each component takes care of its own modifications.
+     */
+    getQueuemanagement().increaseQueueCustomer(accessCodeEntitySaved.getQueueId());
+
+    LOG.debug("The queue with id '{}' has increased its customers.", accessCodeEntitySaved.getQueueId());
+
+    return getBeanMapper().map(accessCodeEntitySaved, AccessCodeEto.class);
+  }
+
+  /**
+   * Generates a new ticked code using the ticket digit of the last codeaccess created.
+   *
+   * @param lastTicketDigit the int of the last codeaccess created.
+   * @return the String with the new ticket code (example: 'Q005').
+   */
+  public String generateTicketCode(int lastTicketDigit) {
+
+    int newTicketDigit = lastTicketDigit + 1;
+    String newTicketCode = "";
+    if (newTicketDigit ==  1000) {
+      newTicketCode = "Q000";
+    } else {
+      StringBuilder stringBuilder = new StringBuilder();
+      stringBuilder.append(newTicketDigit);
+      while (stringBuilder.length() < 3) {
+        stringBuilder.insert(0, "0");
+      }
+      stringBuilder.insert(0, "Q");
+      newTicketCode = stringBuilder.toString();
+    }
+    return newTicketCode;
+  }
+
+  public Queuemanagement getQueuemanagement() {
+
+    return this.queuemanagement;
+  }
+
+  public Accesscodemanagement getAccesscodemanagement() {
+
+    return this.accesscodemanagement;
+  }
+
+}
+
+
+
+

Taking a closer look at the code, we can see that, in order to use the methods from the UcFindAccessCodeImpl, we need to use the parent (Accesscodemanagement) instead of the class directly. Also, following the devon4j structure, each component needs to take care of its own. In this case, by using the method getQueuemanagement(), we get access to the Queuemanagement injection that will allow the use of the methods we created earlier in the use cases in the queue component.

+
+
+
+
+

Adding to the Logic

+
+
+

Inside jtqj-api, in the class accesscodemanagement/logic/api/Accesscodemanagement we are going to extend the UcManageAccessCode that we just defined:

+
+
+
+
...
+
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.usecase.UcFindAccessCode;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.usecase.UcManageAccessCode;
+
+/**
+ * Interface for Accesscodemanagement component.
+ */
+public interface Accesscodemanagement extends UcFindAccessCode, UcManageAccessCode {
+
+}
+
+
+
+

After that, in jtqj-core, in the class accesscodemanagement/logic/impl/AccesscodemanagementImpl, we will see that an error has appeared because the methods +from the extended interfaces are missing. We add the unimplemented methods and inject the usecasemanage solving the error:

+
+
+
+
...
+
+import javax.inject.Inject;
+import javax.inject.Named;
+
+import org.springframework.data.domain.Page;
+
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.Accesscodemanagement;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.to.AccessCodeCto;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.to.AccessCodeEto;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.to.AccessCodeSearchCriteriaTo;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.usecase.UcFindAccessCode;
+import com.devonfw.application.jtqj.accesscodemanagement.logic.api.usecase.UcManageAccessCode;
+import com.devonfw.application.jtqj.general.logic.base.AbstractComponentFacade;
+
+/**
+ * Implementation of component interface of Accesscodemanagement.
+ */
+@Named
+public class AccesscodemanagementImpl extends AbstractComponentFacade implements Accesscodemanagement {
+
+  @Inject
+  private UcFindAccessCode ucFindAccessCode;
+
+  @Inject
+  private UcManageAccessCode ucManageAccessCode;
+
+  @Override
+  public AccessCodeCto findAccessCodeCto(long id) {
+
+    return this.ucFindAccessCode.findAccessCodeCto(id);
+  }
+
+  @Override
+  public Page<AccessCodeCto> findAccessCodeCtos(AccessCodeSearchCriteriaTo criteria) {
+
+    return this.ucFindAccessCode.findAccessCodeCtos(criteria);
+  }
+
+  @Override
+  public void deleteAccessCode(long accessCodeId) {
+
+    this.ucManageAccessCode.deleteAccessCode(accessCodeId);
+  }
+
+  @Override
+  public AccessCodeEto saveAccessCode(AccessCodeEto accessCodeEto) {
+
+    return this.ucManageAccessCode.saveAccessCode(accessCodeEto);
+  }
+
+  @Override
+  public Page<AccessCodeEto> findAccessCodeEtos(AccessCodeSearchCriteriaTo criteria) {
+
+    return this.ucFindAccessCode.findAccessCodeEtos(criteria);
+  }
+
+}
+
+
+
+
+
+

Adding to the Service

+
+
+

To add the new service, we need to add its definition to the jtqj-api in accesscodemanagement/service/api/rest/AccesscodemanagementRestService.java. We are going to create a new /accessCode REST resource bound to three methods, one called findAccessCodeEtos, another one called saveAccessCode and yet another one called deleteAccessCode.

+
+
+
+
...
+
+public interface AccesscodemanagementRestService {
+
+  ...
+
+  /**
+   * Delegates to {@link Accesscodemanagement#findAccessCodeEtos}.
+   *
+   * @param searchCriteriaTo the pagination and search criteria to be used for finding accesscodes.
+   * @return the {@link Page list} of matching {@link AccessCodeEto}s.
+   */
+  @POST
+  @Path("/accesscode/search")
+  public Page<AccessCodeEto> findAccessCodeEtos(AccessCodeSearchCriteriaTo searchCriteriaTo);
+
+  /**
+   * Delegates to {@link Accesscodemanagement#saveAccessCode}.
+   *
+   * @param accessCodeEto queue the {@link AccessCodeEto} to be saved.
+   * @return the recently created {@link AccessCodeEto}.
+   */
+  @POST
+  @Path("/accesscode/")
+  public AccessCodeEto saveAccessCode(AccessCodeEto accessCodeEto);
+
+  /**
+   * Delegates to {@link Accesscodemanagement#deleteAccessCode}.
+   *
+   * @param id of the {@link AccessCodeEto} to be deleted.
+   */
+  @DELETE
+  @Path("/accesscode/{id}/")
+  public void deleteAccessCode(@PathParam("id") long id);
+
+}
+
+
+
+

Then we need to implement the new methods in the accesscodemanagement/service/impl/rest/AccesscodemanagementRestServiceImpl.java class:

+
+
+
+
...
+
+public class AccesscodemanagementRestServiceImpl implements AccesscodemanagementRestService {
+
+  ...
+
+  @Override
+  public AccessCodeEto saveAccessCode(AccessCodeEto accessCodeEto) {
+
+    return this.accesscodemanagement.saveAccessCode(accessCodeEto);
+  }
+
+  @Override
+  public void deleteAccessCode(long id) {
+
+    this.accesscodemanagement.deleteAccessCode(id);
+  }
+
+  @Override
+  public Page<AccessCodeEto> findAccessCodeEtos(AccessCodeSearchCriteriaTo searchCriteriaTo) {
+
+    return this.accesscodemanagement.findAccessCodeEtos(searchCriteriaTo);
+  }
+
+}
+
+
+
+
+
+

Testing the Changes

+
+
+

Now run the app again via Eclipse and use Postman to call the new save service via POST:
+http://localhost:8081/jumpthequeue/services/rest/accesscodemanagement/v1/accesscode/

+
+
+

In the body, provide an AccessCode object with the following required parameters:

+
+
+
+
{
+  "queueId" : "1",
+  "visitorId" : "1000000"
+}
+
+
+
+

The result should be something similar to this:

+
+
+
+JumpTheQueue `AccessCode` +
+
+
+

In order to know, if the new codeaccess has been successfully created, we can search all the CTOs, like we did in the previous steps. The new accesscode should be on the bottom:

+
+
+
+JumpTheQueue List with Code +
+
+
+

To test the delete, you can send a DELETE to this URL: +http://localhost:8081/jumpthequeue/services/rest/accesscodemanagement/v1/accesscode/{id} +Pass the AccessCode ID of the new entry, which can be found in the returned result of the save or the search operation.

+
+
+

In this chapter we learned, how easy it is to extend a devon4j application. With only a few steps you can add new services to your back-end, to fit the functional requirements of your project, or edit services to adapt the default implementation to your needs.

+
+
+

In the next chapter we will show how to add validations for the data, that we receive from the client.

+
+
+
+

Next Chapter: Validations in devon4j

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4j-components.html b/docs/jump-the-queue/1.0/devon4j-components.html new file mode 100644 index 00000000..3caddea0 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4j-components.html @@ -0,0 +1,1655 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Components

+
+
+
+Architecture +
+
+
+

When working with devon4j the recommended approach for designing an applications is Component Oriented Design. Each component will represent a significant part (or feature) of our application related to CRUD operations. Internally, the components will be divided into three layers (service, logic, and dataaccess) and will communicate in two directions: service with database or — in the logic layer — a component with another component.

+
+
+
+
+

Principles

+
+
+

The benefits of dividing our application into components are:

+
+
+ +
+
+
+
+

Component Structure

+
+
+

A component consists of three packages, which correspond to the three layers defined by the devon4j architecture: service, logic and dataaccess.

+
+
+
+Component Layers +
+
+
+
    +
  • +

    Service layer: exposes the REST API to exchange information with client applications

    +
  • +
  • +

    Logic layer: in charge of hosting the business logic of the application

    +
  • +
  • +

    Data Access layer: communicates with the database

    +
  • +
+
+
+

Apart from that, most components will have a fourth package — common — to store shared elements, which will be used by all layers of the component. It will contain common interfaces, constants, exceptions or enumerations.

+
+
+
+
+

Component Core

+
+
+

As we mentioned earlier, each component will be related to a functionality. This functionality will be represented in code by an Entity that defines all the properties needed to wrap the logic of that feature.

+
+
+

This Entity represents the "core" of the component and will be located in the dataaccess.api package.

+
+
+

The naming convention for these entities in devon4j is:

+
+
+
+
[Target]Entity
+
+
+
+

"Target" should match the name of the related table in the database — although this is not mandatory.

+
+
+

Basically, each Entity is a POJO (plain old Java object) that will be mapped to a table in the database and represent each column via a suitable property.

+
+
+
+Example Entity +
+
Example: An entity and its corresponding table in the DB (taken from another application).
+
+
+
+
+

Create your Components

+
+
+

We are now going to create our first app components. Our example application needs to provide two basic functionalities:

+
+
+
    +
  • +

    register a user (returning an access code)

    +
  • +
  • +

    show registered queue members

    +
  • +
+
+
+

To accomplish this we are going to work with three entities; Visitor, Queue and AccessCode:

+
+
+
+JumpTheQueue Entities +
+
+
+

The components will be defined as follows:

+
+
+

|== = +|Visitor | Access Code | Daily Queue

+
+
+

|username +|ticketNumber +|name

+
+
+

|name +|creationTime +|logo

+
+
+

|password +|startTime +|password

+
+
+

|phoneNumber +|endTime +|currentNumber

+
+
+

|acceptedCommercial +|-- +|attentionTime

+
+
+

|acceptedTerms +|-- +|minAttentionTime

+
+
+

|userType +|-- +|active

+
+
+

|-- +|-- +|customers +|== =

+
+
+

In addition, we will have to represent two relationships:

+
+
+
    +
  1. +

    The one to one relation between Visitor and Access Code.

    +
  2. +
  3. +

    The one to many relation between Daily Queue and Access Code.

    +
  4. +
+
+
+

Now is the moment to decide about the components of our app. The low complexity of the functionality would allow us to create only one component for managing all entities. In order to clarify the example we are going to create three managing components however; one for Visitor, one for Access Code and one for Daily Queue.

+
+
+
+
+

==

+
+
+

If you feel more comfortable managing all the entities in a single component, you could also do it this way. The result will be the same, the only difference will be the structure of some elements and the distribution of code inside the packages. +== ==

+
+
+
+
+

The Database

+
+
+

Projects created with the devon4j archetype already contain a pre-defined database schema, which we can use as a basis to create our own. We are going to utilize the H2 Database Engine, because our generated devon4j application uses it by default.

+
+
+

There are four pre-defined database schemas:

+
+
+
+
jtqj-core/src/main/resources/db/type/h2/V0001__Create_Sequence.sql
+jtqj-core/src/main/resources/db/type/h2/V0002__Create_RevInfo.sql
+jtqj-core/src/main/resources/db/type/h2/V0003__Create_BinaryObject.sql
+jtqj-core/src/main/resources/db/migration/1.0/V0004__Add_blob_data.sql
+
+
+
+
+
+

==

+
+
+

May be you need to install some SQL editor from eclipse marketplace, or use an external one. +== ==

+
+
+
+
+

== Visitor Table

+
+
+

We are going to create our own table for Visitor(s) by right-clicking the folder /jtqj-core/src/main/resources/db/migration/1.0 and selecting New > File. Following the naming scheme we are going to call it:

+
+
+
+
V0005__Create_Visitor.sql
+
+
+
+

A visitor will provide: username, name, password, phoneNumber, acceptedCommercial and acceptedTerms in order to obtain an Access Code. We need to represent this data in our table:

+
+
+
+
create table Visitor(
+  id BIGINT NOT NULL AUTO_INCREMENT,
+  modificationCounter INTEGER NOT NULL,
+  username VARCHAR(255),
+  name VARCHAR(255),
+  password VARCHAR(255),
+  phoneNumber VARCHAR(255),
+  acceptedCommercial BOOL DEFAULT '0',
+  acceptedTerms BOOL NOT NULL DEFAULT '0',
+  userType BOOL DEFAULT '0',
+  CONSTRAINT PK_Visitor PRIMARY KEY(id)
+);
+
+
+
+
    +
  • +

    id: The ID of each visitor.

    +
  • +
  • +

    modificationCounter: Used internally by JPA to take care of optimistic locking for us.

    +
  • +
  • +

    username: The visitors email address.

    +
  • +
  • +

    name: The visitors name.

    +
  • +
  • +

    password: The visitors password.

    +
  • +
  • +

    phoneNumber: The visitors phone number.

    +
  • +
  • +

    acceptedCommercial: A boolean to denote if the visitor has the accepted commercial agreements.

    +
  • +
  • +

    acceptedTerms: A boolean to denote if the visitor has accepted the terms & conditions.

    +
  • +
  • +

    userType: Denotes the type of user.

    +
  • +
+
+
+
+
+

== Daily Queue Table

+
+
+

In a second table we will represent the Daily Queue, which will contain name, logo, currentNumber, attentionTime, minAttentionTime, active and customers. This table will be created in /jtqj-core/src/main/resources/db/type/h2, and is called:

+
+
+
+
V0006__Create_Queue.sql
+
+
+
+

It will contain the following declarations:

+
+
+
+
create table DailyQueue(
+  id BIGINT NOT NULL AUTO_INCREMENT,
+  modificationCounter INTEGER NOT NULL,
+  name VARCHAR(255),
+  logo VARCHAR(255),
+  currentNumber VARCHAR(255),
+  attentionTime TIMESTAMP,
+  minAttentionTime TIMESTAMP NOT NULL DEFAULT '60000',
+  active BOOL NOT NULL DEFAULT '1',
+  customers INTEGER NOT NULL DEFAULT '0',
+  CONSTRAINT PK_DailyQueue PRIMARY KEY(id)
+);
+
+
+
+
    +
  • +

    id: The ID of each queue.

    +
  • +
  • +

    modificationCounter: Used internally by JPA to take care of optimistic locking for us.

    +
  • +
  • +

    name: The queues name.

    +
  • +
  • +

    logo: The queues logo.

    +
  • +
  • +

    currentNumber: the queue’s number being attended.

    +
  • +
  • +

    attentionTime: Average time required to attend a visitor.

    +
  • +
  • +

    minAttentionTime: Minimum time required to attend a visitor, set by default.

    +
  • +
  • +

    active: A boolean to denote if the queue is active.

    +
  • +
  • +

    customer: The queues total number of customers.

    +
  • +
+
+
+
+
+

== Access Code Table

+
+
+

The third table will represent the Access Code and contain the ticketNumber, creationTime, startTime and endTime. This table will be created in /jtqj-core/src/main/resources/db/type/h2, and is called:

+
+
+
+
V0007__Create_Access_Code.sql
+
+
+
+

It will contain the following declarations:

+
+
+
+
CREATE TABLE AccessCode(
+  id BIGINT NOT NULL AUTO_INCREMENT,
+  modificationCounter INTEGER NOT NULL,
+  ticketNumber VARCHAR(5),
+  creationTime TIMESTAMP,
+  startTime TIMESTAMP,
+  endTime TIMESTAMP,
+  idVisitor BIGINT NOT NULL,
+  idQueue BIGINT NOT NULL,
+  CONSTRAINT PK_AccessCode PRIMARY KEY(id),
+  CONSTRAINT FK_AccessCode_idVisitor FOREIGN KEY(idVisitor) REFERENCES Visitor(id),
+  CONSTRAINT FK_AccessCode_idQueue FOREIGN KEY(idQueue) REFERENCES DailyQueue(id)
+);
+
+
+
+
    +
  • +

    id: The ID of each code.

    +
  • +
  • +

    modificationCounter: Used internally by JPA to take care of optimistic locking for us.

    +
  • +
  • +

    ticketNumber: The number of the ticket for a queue.

    +
  • +
  • +

    creationTime: The date and time of creation.

    +
  • +
  • +

    startTime: The date and time, from which the code is valid.

    +
  • +
  • +

    endTime: The date and time, when the code expires.

    +
  • +
  • +

    idVisitor: The relation with the Visitor table.

    +
  • +
  • +

    idQueue: The relation with the DailyQueue table.

    +
  • +
+
+
+
+
+

== Mock Data

+
+
+

Finally we are going to provide a certain amount of mock data, which will be available right from the start in our application. Create a new SQL script in /jtqj-core/src/main/resources/db/migration/1.0/, called:

+
+
+
+
V0008__Master_data.sql
+
+
+
+

Copy and paste the following data into it:

+
+
+
+
INSERT INTO Visitor (id, modificationCounter, username, name, password, phoneNumber, acceptedCommercial, acceptedTerms, userType) VALUES (0, 1, 'mike@mail.com', 'test', '1', '123456789', '0', '1', '1');
+INSERT INTO Visitor (id, modificationCounter, username, name, password, phoneNumber, acceptedCommercial, acceptedTerms, userType) VALUES (1, 1, 'peter@mail.com', 'test', '1', '123456789', '1', '1', '0');
+INSERT INTO Visitor (id, modificationCounter, username, name, password, phoneNumber, acceptedCommercial, acceptedTerms, userType) VALUES (2, 1, 'pablo@mail.com', 'test', '1', '123456789', '0', '1', '0');
+INSERT INTO Visitor (id, modificationCounter, username, name, password, phoneNumber, acceptedCommercial, acceptedTerms, userType) VALUES (3, 1, 'test1@mail.com', 'test', '1', '123456789', '0', '1', '0');
+INSERT INTO Visitor (id, modificationCounter, username, name, password, phoneNumber, acceptedCommercial, acceptedTerms, userType) VALUES (4, 1, 'test2@mail.com', 'test', '1', '123456789', '1', '1', '0');
+INSERT INTO Visitor (id, modificationCounter, username, name, password, phoneNumber, acceptedCommercial, acceptedTerms, userType) VALUES (5, 1, 'test3@mail.com', 'test', '1', '123456789', '0', '1', '0');
+INSERT INTO Visitor (id, modificationCounter, username, name, password, phoneNumber, acceptedCommercial, acceptedTerms, userType) VALUES (6, 1, 'test4@mail.com', 'test', '1', '123456789', '0', '1', '0');
+INSERT INTO Visitor (id, modificationCounter, username, name, password, phoneNumber, acceptedCommercial, acceptedTerms, userType) VALUES (7, 1, 'test5@mail.com', 'test', '1', '123456789', '1', '1', '0');
+INSERT INTO Visitor (id, modificationCounter, username, name, password, phoneNumber, acceptedCommercial, acceptedTerms, userType) VALUES (8, 1, 'test6@mail.com', 'test', '1', '123456789', '0', '1', '0');
+INSERT INTO Visitor (id, modificationCounter, username, name, password, phoneNumber, acceptedCommercial, acceptedTerms, userType) VALUES (9, 1, 'test7@mail.com', 'test', '1', '123456789', '0', '1', '0');
+
+INSERT INTO DailyQueue (id, modificationCounter, name, logo, currentNumber, attentionTime, minAttentionTime, active, customers) VALUES (1, 1, 'Day2', 'C:/logos/Day1Logo.png', 'Q001', NULL, '1970-01-01 00:01:00', TRUE, 9);
+
+INSERT INTO AccessCode (id, modificationCounter, ticketNumber, creationTime, startTime, endTime, idVisitor, idQueue) VALUES (1, 1, 'Q001', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, NULL, 1, 1);
+INSERT INTO AccessCode (id, modificationCounter, ticketNumber, creationTime, startTime, endTime, idVisitor, idQueue) VALUES (2, 1, 'Q002', CURRENT_TIMESTAMP, '2008-01-01 00:00:01', NULL, 2, 1);
+INSERT INTO AccessCode (id, modificationCounter, ticketNumber, creationTime, startTime, endTime, idVisitor, idQueue) VALUES (3, 1, 'Q003', CURRENT_TIMESTAMP, '2008-01-01 00:00:01', NULL, 3, 1);
+INSERT INTO AccessCode (id, modificationCounter, ticketNumber, creationTime, startTime, endTime, idVisitor, idQueue) VALUES (4, 1, 'Q004', CURRENT_TIMESTAMP, '2008-01-01 00:00:01', NULL, 4, 1);
+INSERT INTO AccessCode (id, modificationCounter, ticketNumber, creationTime, startTime, endTime, idVisitor, idQueue) VALUES (5, 1, 'Q005', CURRENT_TIMESTAMP, '2008-01-01 00:00:01', NULL, 5, 1);
+INSERT INTO AccessCode (id, modificationCounter, ticketNumber, creationTime, startTime, endTime, idVisitor, idQueue) VALUES (6, 1, 'Q006', CURRENT_TIMESTAMP, '2008-01-01 00:00:01', NULL, 6, 1);
+INSERT INTO AccessCode (id, modificationCounter, ticketNumber, creationTime, startTime, endTime, idVisitor, idQueue) VALUES (7, 1, 'Q007', CURRENT_TIMESTAMP, '2008-01-01 00:00:01', NULL, 7, 1);
+INSERT INTO AccessCode (id, modificationCounter, ticketNumber, creationTime, startTime, endTime, idVisitor, idQueue) VALUES (8, 1, 'Q008', CURRENT_TIMESTAMP, '2008-01-01 00:00:01', NULL, 8, 1);
+INSERT INTO AccessCode (id, modificationCounter, ticketNumber, creationTime, startTime, endTime, idVisitor, idQueue) VALUES (9, 1, 'Q009', CURRENT_TIMESTAMP, '2008-01-01 00:00:01', NULL, 9, 1);
+
+
+
+
+
+

The Core of the Components

+
+
+

Now that we have defined the database for our entities, we should start creating the code of the related components.

+
+
+

We are going to use CobiGen to generate the component structure. That means that — as already commented — we can generate all the structure and layers starting from a core element: a simple Plain Old Java Object that represents our Entity. So, in order to use CobiGen, we have to create our entities in the expected locations (as you will see in the following section): <entitymanagement>.dataaccess.api.

+
+
+
+
+

== Visitor Component

+
+
+

To implement the component we will need to define a VisitorEntity to connect and manage the data of the Visitor table in the database. The name of this component will be visitormanagement, the entity will be called VisitorEntity.

+
+
+

Right-click on the root folder of the project /jtqj-core/src/main/java, select New > Package and create the following package:

+
+
+
+
com.devonfw.application.jtqj.visitormanagement.dataaccess.api
+
+
+
+
+New Package Creation Step 1 +
+
+
+
+New Package Creation Step 2 +
+
+
+

Now create a new Java class in this package and call it VisitorEntity:

+
+
+
+New Class Creation +
+
+
+

We are going to need fields, which represent the data model, so our entity should contain the following code:

+
+
+
+
  ...
+
+  private String username;
+
+  private String name;
+
+  private String phoneNumber;
+
+  private String password;
+
+  private Boolean acceptedCommercial;
+
+  private Boolean acceptedTerms;
+
+  private Boolean userType;
+
+  ...
+
+
+
+
+
+

==

+
+
+

We are not adding id or modificationCounter, because CobiGen will generate these fields for us. +== ==

+
+
+

Now we need to declare our entity as a JPA entity with the @Entity annotation (javax.persistence.Entity) at class level. To map the entity to the database table, we will use the @Table annotation (javax.persistence.Table) defining the name of our already created Visitor table (also at class level):

+
+
+
+
...
+
+@Entity
+@Table(name = "Visitor")
+public class VisitorEntity {
+
+  ...
+
+
+
+

Now we have to declare the getter and setter methods for the fields of our entity. We can do this manually or automatically generate them using Eclipse:

+
+
+
+Generating Getter and Setter Methods with Eclipse +
+
+
+

The resulting implementation of our VisitorEntity class should now look like this:

+
+
+
+
package com.devonfw.application.jtqj.visitormanagement.dataaccess.api;
+
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+@Entity
+@Table(name = "Visitor")
+public class VisitorEntity {
+
+  private String username;
+
+  private String name;
+
+  private String phoneNumber;
+
+  private String password;
+
+  private Boolean acceptedCommercial;
+
+  private Boolean acceptedTerms;
+
+  private Boolean userType;
+
+  /**
+   * @return the username
+   */
+  public String getUsername() {
+    return username;
+  }
+
+  /**
+   * @param username the username to set
+   */
+  public void setUsername(String username) {
+    this.username = username;
+  }
+
+  /**
+   * @return the name
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * @param name the name to set
+   */
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  /**
+   * @return the phoneNumber
+   */
+  public String getPhoneNumber() {
+    return phoneNumber;
+  }
+
+  /**
+   * @param phoneNumber the phoneNumber to set
+   */
+  public void setPhoneNumber(String phoneNumber) {
+    this.phoneNumber = phoneNumber;
+  }
+
+  /**
+   * @return the password
+   */
+  public String getPassword() {
+    return password;
+  }
+
+  /**
+   * @param password the password to set
+   */
+  public void setPassword(String password) {
+    this.password = password;
+  }
+
+  /**
+   * @return the acceptedCommercial
+   */
+  public Boolean getAcceptedCommercial() {
+    return acceptedCommercial;
+  }
+
+  /**
+   * @param acceptedCommercial the acceptedCommercial to set
+   */
+  public void setAcceptedCommercial(Boolean acceptedCommercial) {
+    this.acceptedCommercial = acceptedCommercial;
+  }
+
+  /**
+   * @return the acceptedTerms
+   */
+  public Boolean getAcceptedTerms() {
+    return acceptedTerms;
+  }
+
+  /**
+   * @param acceptedTerms the acceptedTerms to set
+   */
+  public void setAcceptedTerms(Boolean acceptedTerms) {
+    this.acceptedTerms = acceptedTerms;
+  }
+
+  /**
+   * @return the userType
+   */
+  public Boolean getUserType() {
+    return userType;
+  }
+
+  /**
+   * @param userType the userType to set
+   */
+  public void setUserType(Boolean userType) {
+    this.userType = userType;
+  }
+
+}
+
+
+
+
+
+

== AccessCode component

+
+
+

We are going to repeat the same process for the AccessCode component. Create these packages in /jtqj-core/src/main/java:

+
+
+
+
com.devonfw.application.jtqj.accesscodemanagement.dataaccess.api
+
+
+
+

... and create a class called AccessCodeEntity inside of them.
+We will end up with the following structure:

+
+
+
+`AccessCode` Entity +
+
+
+

The contents of AccessCodeEntity before using CobiGen will be:

+
+
+
+
package com.devonfw.application.jtqj.accesscodemanagement.dataaccess.api;
+
+import java.sql.Timestamp;
+
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+import javax.persistence.Temporal;
+import javax.persistence.TemporalType;
+import javax.validation.constraints.Size;
+
+import com.devonfw.application.jtqj.visitormanagement.dataaccess.api.VisitorEntity;
+
+@Entity
+@Table(name = "AccessCode")
+public class AccessCodeEntity {
+
+  @Size(min = 2, max = 5)
+  private String ticketNumber;
+
+  @Temporal(TemporalType.TIMESTAMP)
+  private Timestamp creationTime;
+
+  @Temporal(TemporalType.TIMESTAMP)
+  private Timestamp startTime;
+
+  @Temporal(TemporalType.TIMESTAMP)
+  private Timestamp endTime;
+
+  private VisitorEntity visitor;
+
+  private QueueEntity queue;
+
+  /**
+   * @return the ticketNumber
+   */
+  public String getTicketNumber() {
+    return ticketNumber;
+  }
+
+  /**
+   * @param ticketNumber the ticketNumber to set
+   */
+  public void setTicketNumber(String ticketNumber) {
+    this.ticketNumber = ticketNumber;
+  }
+
+  /**
+   * @return the creationTime
+   */
+  public Timestamp getCreationTime() {
+    return creationTime;
+  }
+
+  /**
+   * @param creationTime the creationTime to set
+   */
+  public void setCreationTime(Timestamp creationTime) {
+    this.creationTime = creationTime;
+  }
+
+  /**
+   * @return the startTime
+   */
+  public Timestamp getStartTime() {
+    return startTime;
+  }
+
+  /**
+   * @param startTime the startTime to set
+   */
+  public void setStartTime(Timestamp startTime) {
+    this.startTime = startTime;
+  }
+
+  /**
+   * @return the endTime
+   */
+  public Timestamp getEndTime() {
+    return endTime;
+  }
+
+  /**
+   * @param endTime the endTime to set
+   */
+  public void setEndTime(Timestamp endTime) {
+    this.endTime = endTime;
+  }
+
+  /**
+   * @return the visitor
+   */
+  @OneToOne(cascade = CascadeType.DETACH, fetch = FetchType.EAGER)
+  @JoinColumn(name = "idVisitor")
+  public VisitorEntity getVisitor() {
+    return visitor;
+  }
+
+  /**
+   * @param visitor the visitor to set
+   */
+  public void setVisitor(VisitorEntity visitor) {
+    this.visitor = visitor;
+  }
+
+  /**
+   * @return the queue
+   */
+  @ManyToOne(cascade = CascadeType.DETACH, fetch = FetchType.EAGER)
+  @JoinColumn(name = "idQueue")
+  public QueueEntity getQueue() {
+    return queue;
+  }
+
+  /**
+   * @param queue the queue to set
+   */
+  public void setQueue(QueueEntity queue) {
+    this.queue = queue;
+  }
+
+}
+
+
+
+
+
+

==

+
+
+

Eclipse will report some errors related to QueueEntity.
+These will be resolved, when we create the corresponding class in the next step. +== ==

+
+
+
+
+

== Queue Component

+
+
+

Finally, we are going to repeat the same process for our last entity QueueEntity component. Create these packages in /jtqj-core/src/main/java:

+
+
+
+
com.devonfw.application.jtqj.queuemanagement.dataaccess.api
+
+
+
+

... and create a class called QueueEntity inside of them.
+We will end up with the following structure:

+
+
+
+Queue Entity +
+
+
+

The contents of QueueEntity before using CobiGen will be:

+
+
+
+
package com.devonfw.application.jtqj.queuemanagement.dataaccess.api;
+
+import java.sql.Timestamp;
+
+import javax.persistence.Entity;
+import javax.persistence.Table;
+import javax.persistence.Temporal;
+import javax.persistence.TemporalType;
+
+@Entity
+@Table(name = "DailyQueue")
+public class QueueEntity {
+
+  private String name;
+
+  private String logo;
+
+  private String currentNumber;
+
+  @Temporal(TemporalType.TIMESTAMP)
+  private Timestamp attentionTime;
+
+  @Temporal(TemporalType.TIMESTAMP)
+  private Timestamp minAttentionTime;
+
+  private Boolean active;
+
+  private int customers;
+
+  /**
+   * @return the name
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * @param name the name to set
+   */
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  /**
+   * @return the logo
+   */
+  public String getLogo() {
+    return logo;
+  }
+
+  /**
+   * @param logo the logo to set
+   */
+  public void setLogo(String logo) {
+    this.logo = logo;
+  }
+
+  /**
+   * @return the currentNumber
+   */
+  public String getCurrentNumber() {
+    return currentNumber;
+  }
+
+  /**
+   * @param currentNumber the currentNumber to set
+   */
+  public void setCurrentNumber(String currentNumber) {
+    this.currentNumber = currentNumber;
+  }
+
+  /**
+   * @return the attentionTime
+   */
+  public Timestamp getAttentionTime() {
+    return attentionTime;
+  }
+
+  /**
+   * @param attentionTime the attentionTime to set
+   */
+  public void setAttentionTime(Timestamp attentionTime) {
+    this.attentionTime = attentionTime;
+  }
+
+  /**
+   * @return the minAttentionTime
+   */
+  public Timestamp getMinAttentionTime() {
+    return minAttentionTime;
+  }
+
+  /**
+   * @param minAttentionTime the minAttentionTime to set
+   */
+  public void setMinAttentionTime(Timestamp minAttentionTime) {
+    this.minAttentionTime = minAttentionTime;
+  }
+
+  /**
+   * @return the active
+   */
+  public Boolean getActive() {
+    return active;
+  }
+
+  /**
+   * @param active the active to set
+   */
+  public void setActive(Boolean active) {
+    this.active = active;
+  }
+
+  /**
+   * @return the customers
+   */
+  public int getCustomers() {
+    return customers;
+  }
+
+  /**
+   * @param customers the customers to set
+   */
+  public void setCustomers(int customers) {
+    this.customers = customers;
+  }
+
+}
+
+
+
+

Now we have finished preparing the core of our components and can start using CobiGen to generate the remaining structure (services, layers, DAOs, …​).

+
+
+
+
+

==

+
+
+

Now we can resolve the compilation errors related to QueueEntity in the AccessCodeEntity.java by applying the suggestions of the IDE. To do this, open the offending file, click the first red light bulb on the left border of the editor and select Import 'QueueEntity' (com.devonfw. …​).

+
+
+

Or just manually add this line to your import statements:

+
+
+
+
import com.devonfw.application.jtqj.queuemanagement.dataaccess.api.QueueEntity;
+
+
+
+
+
+

==

+
+ +
+
+
+

The Component Structure (using CobiGen)

+
+
+

Once we are finished creating the core of our components we could continue to create the structure and all elements manually, but we are going to use CobiGen for these tasks, since we can save a significant amount of time and effort this way.

+
+
+

First however, we need to make sure that the CobiGen plugin is installed in our Eclipse instance:

+
+
+
+CobiGen Plugin Check +
+
+
+

If you don’t see this option in the dropdown menu, close Eclipse (remember to save all your progress) and in the jump-the-queue folder right-click to Open a Devon `CMD shell here`.

+
+
+

Now enter and execute:

+
+
+
+
devon eclipse add-plugin cobigen
+
+
+
+

... and re-open Eclipse via the eclipse-main.bat script.

+
+
+
+
+

== CobiGen Health Check

+
+
+

When using CobiGen for the first time it’s recommended to check the health of the tool.
+To do so, right-click one of our entities and select CobiGen > Health Check…​.

+
+
+
+CobiGen Health Check 1 +
+
+
+

The next dialogs will show us if there are outdated templates. In that case just click the "Update" button. You can also run an Advanced Health Check to see exactly which CobiGen templates are available for this project.

+
+
+
+CobiGen Health Check 2 +
+
+
+
+CobiGen Health Check 3 +
+
+
+

In case you receive an error like this: +image::images/devon4j/4.Components/templates_not_found.png[CobiGen Health Check 3, 400]

+
+
+

You need to force download of templates as in the following image: +image::images/devon4j/4.Components/adapt-templates.png[CobiGen Health Check 3, 550]

+
+
+

Now the templates should be downloaded, and you will see a new folder in the workspace: +image::images/devon4j/4.Components/cobigen-folder.png[CobiGen Health Check 3, 400]

+
+
+
+
+

== Queue Component Structure (Entity without Relations)

+
+
+

In order to create the whole structure of a component with CobiGen we only need to right-click our component core entity (QueueEntity) and select CobiGen > Generate.

+
+
+
+CobiGen Generate +
+
+
+

Now we’ll get to choose which packages we want to generate with the tool.

+
+
+

To get the needed functionalities for our component we are going to select all of the following packages at the same time:

+
+
+
+CobiGen Package Selection +
+
+
+

By default, all files will be selected for generation (which is what we want in this case), but you could also change which files will be generated by clicking Customize.

+
+
+

For now just click Finish and let CobiGen do its work.

+
+
+
+
+

==

+
+
+

In detail the selected options do the following:

+
+
+
    +
  • +

    CRUD SpringData Repository: Generates the entity repository (that contains the CRUD operations) in the data access layer.

    +
  • +
+
+
+
+CobiGen CRUD Spring Data Repository +
+
+
+
    +
  • +

    CRUD REST Services: Generates a complete service layer with CRUD operations for our entity exposed as a REST service.

    +
  • +
+
+
+
+CobiGen CRUD REST Services +
+
+
+
    +
  • +

    CRUD UC Logic: Generates the logic layer dividing the implementation in different use cases.

    +
  • +
+
+
+
+CobiGen CRUD UC Logic +
+
+
+
    +
  • +

    Entity Infrastructure: Creates the entity main interface and edits (by a merge) the current entity to extend the devon classes.

    +
  • +
+
+
+
+CobiGen Entity Infrastructure +
+
+
+
    +
  • +

    TO’s: Generates the related Transfer Objects, that we will explain in next chapters of this tutorial.

    +
  • +
+
+
+
+CobiGen Transfer Objects +
+
+
+
+
+

==

+
+
+

During the process CobiGen will show a message asking us to review some ambiguous references, which we will get to right away. For now just click Continue.

+
+
+
+CobiGen Import Review +
+
+
+

Once CobiGen has finished generating the new classes, we will check for and fix those ambiguous references if we need to introduce manual adjustments.

+
+
+

First, we need to adjust manually some imports related to Timestamp in:

+
+
+

jtqj-core:

+
+
+
    +
  • +

    queuemanagement.dataaccess.api.repo.QueueRepository

    +
  • +
+
+
+

jtqj-api:

+
+
+
    +
  • +

    queuemanagement.common.api.Queue

    +
  • +
  • +

    queuemanagement.logic.api.to.QueueEto

    +
  • +
  • +

    queuemanagement.logic.api.to.QueueSearchCriteriaTo

    +
  • +
+
+
+

We can fix these errors manually by adding import java.sql.Timestamp to the affected Java files:

+
+
+
+CobiGen Manual Import +
+
+
+
+
+

== AccessCode Component Structure (Entity with Relations)

+
+
+

We repeat this process on our AccessCodeEntity, but in this case — since its an entity with relations — we are going to have to select different CobiGen options:

+
+
+
+CobiGen New `AccessCode` +
+
+
+

After CobiGen has finished generating, fix the issues regarding import java.sql.Timestamp (as you did in the last step) in the following files:

+
+
+

jtqj-core:

+
+
+
    +
  • +

    accesscodemanagement.dataaccess.api.repo.AccessCodeRepository

    +
  • +
+
+
+

jtqj-api:

+
+
+
    +
  • +

    accesscodemanagement.common.api.AccessCode

    +
  • +
  • +

    accesscodemanagement.logic.api.to.AccessCodeEto

    +
  • +
  • +

    accesscodemanagement.logic.api.to.AccessCodeSearchCriteriaTo

    +
  • +
+
+
+

There will be some compilation errors left. This is because we have some dependencies on Queue and Visitor component elements, that are not created yet. These compilation errors will be fixed in the next steps.

+
+
+
+CobiGen Expected Errors +
+
+
+
+
+

== Visitor Component Structure (Entity without Relations)

+
+
+

Finally we are going to generate the same classes that we generated for the QueueEntity component for our VisitorEntity component:

+
+
+
+CobiGen New Visitor +
+
+
+

Once CobiGen has finished we can fix the rest of the compilation errors related to VisitorEto by manually importing the class into:

+
+
+

jtqj-core:

+
+
+
    +
  • +

    accesscodemanagement.logic.impl.usecase.UcFindAccessCodeImpl

    +
  • +
+
+
+

jtqj-api:

+
+
+
    +
  • +

    accesscodemanagement.logic.api.to.AccessCodeCto

    +
  • +
+
+
+
+
+

== Run the App

+
+
+

If all compilation errors are solved run the app (right-click SpringBootApp.java > Run As > Java Application). The back-end should launch without errors.

+
+
+

Congratulations!
+You have created your first devon4j components. You should be able to access the login screen via localhost:8081/jumpthequeue. You can login with the username and password "waiter". In the next chapter we will show and explain each of the created elements in detail.

+
+
+
+

Next Chapter: devon4j Structure

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4j-creating-a-project.html b/docs/jump-the-queue/1.0/devon4j-creating-a-project.html new file mode 100644 index 00000000..def400be --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4j-creating-a-project.html @@ -0,0 +1,484 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Introduction

+
+
+

In this chapter you will learn to create a workspace and create a project using the CLI or the GUI. When using devon, each project has its own workspace and can be edited by different instances of the IDE, e.g. eclipse.

+
+
+
+
+

Command Line Interface

+
+
+
    +
  • +

    Go to your Devon-dist. folder. Open console.bat

    +
  • +
  • +

    Execute: cd workspaces

    +
  • +
  • +

    Execute: devon workspace create -workspace MyWS

    +
  • +
  • +

    If the job is finished, use CTRL+C to and answer y to the question

    +
  • +
  • +

    Execute: cd MyWS

    +
  • +
  • +

    Execute: devon devon4j create –p and insert the following entries.

    +
    +
      +
    • +

      serverpath: hit Enter. (So it will be the current directory).

      +
    • +
    • +

      servername: myServer (or any name you like).

      +
    • +
    • +

      packagename: com.capgemini.myServer

      +
    • +
    • +

      groupid: com.capgemini

      +
    • +
    • +

      version: 1.0

      +
    • +
    • +

      dbtype: h2

      +
    • +
    +
    +
  • +
+
+
+
+Devcon Back-end Creation +
+
+
+
    +
  • +

    Execute: update-all-workspaces.bat from your devon-dist. folder.

    +
  • +
  • +

    Execute: cd MyServer

    +
  • +
  • +

    Execute: mvn install (installs the Maven project).

    +
  • +
  • +

    Execute: devon devon4j run –port 8081

    +
  • +
+
+
+
+
+

==

+
+
+

There are two commands devon and devcon. Any of these two commands can be used. With the -h option you will always get help, e.g. devon -h or devon workspaces -h. +== ==

+
+
+

We see later, how to access these server. For now just use ctr+c to stop it from running.

+
+
+
+
+

Graphical User Interface

+
+
+
    +
  • +

    Open: console.bat

    +
  • +
  • +

    Go to your workspaces directory.

    +
  • +
  • +

    Execute : devcon -g

    +
  • +
  • +

    The GUI should open. Open workspaces select "create".

    +
  • +
+
+
+
+Devcon GUI Workspace 1 +
+
+
+
    +
  • +

    Insert: MyProject. Click "Start".

    +
  • +
+
+
+
+Devcon GUI Workspace 2 +
+
+
+
    +
  • +

    Click "back".

    +
  • +
  • +

    Open "devon4j". Select "create".

    +
    +
      +
    • +

      serverpath: ../MyProject/Servers

      +
    • +
    • +

      servername: mp

      +
    • +
    • +

      packagename: com.devonfw.application.mp

      +
    • +
    • +

      groupid: com.devonfw.mp

      +
    • +
    • +

      version: v4

      +
    • +
    • +

      dbtype: h2

      +
    • +
    +
    +
  • +
+
+
+
+Devcon GUI Project +
+
+
+
    +
  • +

    Press: "Start"

    +
  • +
  • +

    From you devon-dist. folder execute : update-all-workspaces.bat

    +
  • +
  • +

    From your devon-dist. folder execute: eclipse-jumpthequeue.bat

    +
  • +
  • +

    Eclipse instance should open.

    +
  • +
+
+
+

Now you know how to create a project with devon/devcon.

+
+
+
+
+

==

+
+
+

You can also create new projects:

+
+
+ +
+
+
+
+

==

+
+
+
+

Next Chapter: JumpTheQueue Design

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4j-deployment.html b/docs/jump-the-queue/1.0/devon4j-deployment.html new file mode 100644 index 00000000..5ea67e78 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4j-deployment.html @@ -0,0 +1,651 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Deployment of devon4j Applications

+
+
+

As mentioned already in the devon4j project section, apart from the core and api project, apps created with devon4j also provide a server project that configures the packaging of the app.

+
+
+

In our JumpTheQueue app we can verify that this server project is available:

+
+
+
+JumpTheQueue Server Structure +
+
+
+

So — using Maven — we are going to be able to easily package our app in a .war file to be deployed in an application server like Tomcat (the default server provided in devonfw).

+
+
+
+
+

The server Project

+
+
+

The server project provided in devon4j applications is an almost empty Maven project. It only has a pom.xml file that is used to configure the packaging of the core project. Taking a closer look at this pom.xml file, we realize that it only contains a single dependency to the core project:

+
+
+
+
...
+
+  <dependencies>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>jtqj-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+
+...
+
+
+
+

It also includes the Spring Boot Maven Plugin, that allows us to package the project in .jar or .war archives and run the application "in-place":

+
+
+
+
...
+
+    <plugins>
+      <plugin>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
+        ...
+
+      </plugin>
+    </plugins>
+
+...
+
+
+
+
+
+

Disabling Security Tests

+
+
+

Since this is a basic tutorial and there is no security or permission handling, we have to modify the files SecurityRestServiceImplTest and PermissionCheckTest in jtqj-core, to disable the tests for these features. This is done by adding the @Disabled annotation to the affected test methods.

+
+
+

Your Eclipse should automatically add the required dependencies for the annotation once you save the files, so they should contain the same import statements as shown below. More specifically org.junit.jupiter will be replacing the older org.junit test framework, which was used before.

+
+
+

First, in src/test/java/…​/general/service/impl/rest/SecurityRestServiceImplTest.java disable the testLogin() method and make sure that the class is annotated with @ExtendWith(SpringExtension.class), instead of the older @RunWith(SpringRunner.class) annotation:

+
+
+
+
+

==

+
+
+

If this class doesn’t exist, go to the next one +== ==

+
+
+
+
package com.devonfw.application.jtqj.general.service.impl.rest;
+
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.security.web.csrf.CsrfToken;
+import org.springframework.test.context.junit.jupiter.SpringExtension;
+import org.springframework.web.client.RestTemplate;
+
+import com.devonfw.application.jtqj.general.common.api.to.UserProfileTo;
+import com.devonfw.application.jtqj.general.service.api.rest.SecurityRestService;
+import com.devonfw.application.jtqj.general.service.base.test.RestServiceTest;
+import com.devonfw.module.service.common.api.client.config.ServiceClientConfigBuilder;
+
+/**
+ * This class tests the login functionality of {@link SecurityRestServiceImpl}.
+ */
+@ExtendWith(SpringExtension.class)
+public class SecurityRestServiceImplTest extends RestServiceTest {
+
+  /** Logger instance. */
+  private static final Logger LOG = LoggerFactory.getLogger(SecurityRestServiceImplTest.class);
+
+  /**
+   * Test the login functionality as it will be used from a JavaScript client.
+   */
+  @Test
+  @Disabled // Security via Login is currently not implemented, so ignore this test
+  public void testLogin() {
+
+    String login = "waiter";
+    String password = "waiter";
+
+    ResponseEntity<String> postResponse = login(login, password);
+    LOG.debug("Body: " + postResponse.getBody());
+    assertThat(postResponse.getStatusCode()).isEqualTo(HttpStatus.OK);
+    assertThat(postResponse.getHeaders().containsKey(HttpHeaders.SET_COOKIE)).isTrue();
+  }
+
+  /**
+   * Test of {@code SecurityRestService.getCsrfToken()}.
+   */
+  @Test
+  public void testGetCsrfToken() {
+
+    String login = "waiter";
+    String password = "waiter";
+    SecurityRestService securityService = getServiceClientFactory().create(SecurityRestService.class,
+        new ServiceClientConfigBuilder().host("localhost").authBasic().userLogin(login).userPassword(password)
+            .buildMap());
+    CsrfToken csrfToken = securityService.getCsrfToken(null, null);
+    assertThat(csrfToken.getHeaderName()).isEqualTo("X-CSRF-TOKEN");
+    assertThat(csrfToken.getParameterName()).isEqualTo("_csrf");
+    assertThat(csrfToken.getToken()).isNotNull();
+    LOG.debug("Csrf Token: {}", csrfToken.getToken());
+  }
+
+  /**
+   * Test of {@link SecurityRestService#getCurrentUser()}.
+   */
+  @Test
+  public void testGetCurrentUser() {
+
+    String login = "waiter";
+    String password = "waiter";
+    SecurityRestService securityService = getServiceClientFactory().create(SecurityRestService.class,
+        new ServiceClientConfigBuilder().host("localhost").authBasic().userLogin(login).userPassword(password)
+            .buildMap());
+    UserProfileTo userProfile = securityService.getCurrentUser();
+    assertThat(userProfile.getLogin()).isEqualTo(login);
+  }
+
+  /**
+   * Performs the login as required by a JavaScript client.
+   *
+   * @param userName the username of the user
+   * @param tmpPassword the password of the user
+   * @return @ {@link ResponseEntity} containing containing a cookie in its header.
+   */
+  private ResponseEntity<String> login(String userName, String tmpPassword) {
+
+    String tmpUrl = "http://localhost:" + String.valueOf(this.port) + "/services/rest/login";
+
+    HttpEntity<String> postRequest = new HttpEntity<>(
+        "{\"j_username\": \"" + userName + "\", \"j_password\": \"" + tmpPassword + "\"}", new HttpHeaders());
+
+    ResponseEntity<String> postResponse = new RestTemplate().exchange(tmpUrl, HttpMethod.POST, postRequest,
+        String.class);
+    return postResponse;
+  }
+}
+
+
+
+

And in src/test/java/…​/general/common/base/PermissionCheckTest.java just disable the permissionCheckAnnotationPresent() method:

+
+
+
+
package com.devonfw.application.jtqj.general.common.base;
+
+import java.lang.reflect.Method;
+import java.util.Set;
+
+import javax.annotation.security.DenyAll;
+import javax.annotation.security.PermitAll;
+import javax.annotation.security.RolesAllowed;
+
+import net.sf.mmm.util.filter.api.Filter;
+import net.sf.mmm.util.reflect.api.ReflectionUtil;
+import net.sf.mmm.util.reflect.base.ReflectionUtilImpl;
+
+import org.assertj.core.api.SoftAssertions;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+
+import com.devonfw.module.test.common.base.ModuleTest;
+
+/**
+ * Tests the permission check in logic layer.
+ */
+public class PermissionCheckTest extends ModuleTest {
+
+  /**
+   * Check if all relevant methods in use case implementations have permission checks i.e. {@link RolesAllowed},
+   * {@link DenyAll} or {@link PermitAll} annotation is applied. This is only checked for methods that are declared in
+   * the corresponding interface and thus have the {@link Override} annotations applied.
+   */
+  @Test
+  @Disabled // Permission Checks are currently not implemented, so ignore this test
+  public void permissionCheckAnnotationPresent() {
+
+    String packageName = "com.devonfw.application.jtqj";
+    Filter<String> filter = new Filter<String>() {
+
+      @Override
+      public boolean accept(String value) {
+
+        return value.contains(".logic.impl.usecase.Uc") && value.endsWith("Impl");
+      }
+
+    };
+    ReflectionUtil ru = ReflectionUtilImpl.getInstance();
+    Set<String> classNames = ru.findClassNames(packageName, true, filter);
+    Set<Class<?>> classes = ru.loadClasses(classNames);
+    SoftAssertions assertions = new SoftAssertions();
+    for (Class<?> clazz : classes) {
+      Method[] methods = clazz.getDeclaredMethods();
+      for (Method method : methods) {
+        Method parentMethod = ru.getParentMethod(method);
+        if (parentMethod != null) {
+          Class<?> declaringClass = parentMethod.getDeclaringClass();
+          if (declaringClass.isInterface() && declaringClass.getSimpleName().startsWith("Uc")) {
+            boolean hasAnnotation = false;
+            if (method.getAnnotation(RolesAllowed.class) != null || method.getAnnotation(DenyAll.class) != null
+                || method.getAnnotation(PermitAll.class) != null) {
+              hasAnnotation = true;
+            }
+            assertions.assertThat(hasAnnotation)
+                .as("Method " + method.getName() + " in Class " + clazz.getSimpleName() + " is missing access control")
+                .isTrue();
+          }
+        }
+      }
+    }
+    assertions.assertAll();
+  }
+}
+
+
+
+

This is going to allow our application to pass the tests and be built.

+
+
+
+
+

Running the App with Maven

+
+
+

Thanks to Spring Boot and the Spring Boot Maven Plugin, we can run our app using Maven. To do so, just open a command prompt with access to Maven (in our devonfw project folder we can simply do so by right clicking and selecting Open Devon CMD shell here).

+
+
+

Now we need to follow these steps:

+
+
+

1.- As is explained in the devon4j configuration guide, the default application.properties file used for packaging is located in src/main/resources/ (don’t use the one located in src/main/resources/config/). We need to modify some settings in this file in order to gain access to the app:

+
+
+
+
server.port=8081
+
+spring.application.name=jtqj
+server.servlet.context-path=/jumpthequeue
+
+
+
+

2.- Install the jtqj project in our local Maven repository:

+
+
+
+
C:\...\workspaces\main\jumpthequeue\java\jtqj> mvn install
+
+
+
+

3.- Go to the jtqj/server project and boot the application:

+
+
+
+
C:\...\workspaces\main\jumpthequeue\java\jtqj\server> mvn spring-boot:run
+
+
+
+

The app should be launched in the Spring Boot embedded Tomcat server. Wait a few seconds until you see a console message like this:

+
+
+
+
{"timestamp":"20XX-XX-XXTXX:XX:XX.XXX+00:00","message":"Tomcat started on port(s): 8081 (http) with context path '/jumpthequeue'","logger_name":"org.springframework.boot.web.embedded.tomcat.TomcatWebServer","thread_name":"main","level":"INFO","appname":"jtqj"}
+{"timestamp":"20XX-XX-XXTXX:XX:XX.XXX+00:00","message":"Started SpringBootApp in XX.XXX seconds (JVM running for XX.XXX)","logger_name":"com.devonfw.application.jtqj.SpringBootApp","thread_name":"main","level":"INFO","appname":"jtqj"}
+
+
+
+

Now we can try to access the app resource.

+
+ +
+
+JumpTheQueue Simple GET Request +
+
+
+

If you get a response similar to the one in the image, you have verified that the app is running fine.

+
+
+
+
+

Packaging the App with Maven

+
+
+

In the same way, using Maven we can package our project in a .war file. As in the previous section, open a command prompt with access to Maven (in our devonfw project folder we can simply do so by right clicking and selecting Open Devon CMD shell here). Now execute the following command in the projects root directory:

+
+
+
+
C:\...\workspaces\main\jumpthequeue\java\jtqj> mvn clean package
+
+
+
+

The packaging process (which includes compilation, tests and generation of the .war file) will be launched. Once the process is finished you should see a result like this:

+
+
+
+
[INFO] Packaging webapp
+[INFO] Assembling webapp [jtqj-server] in [C:\...\workspaces\main\jump-the-queue\jump-the-queue\java\jtqj\server\target\jtqj-server-v4]
+[INFO] Processing war project
+[INFO] Webapp assembled in [XXXX msecs]
+[INFO] Building war: C:\...\workspaces\main\jump-the-queue\jump-the-queue\java\jtqj\server\target\jtqj-server-v4.war
+[INFO]
+[INFO] --- spring-boot-maven-plugin:2.1.6.RELEASE:repackage (default) @ jtqj-server ---
+[INFO] Attaching repackaged archive C:\...\workspaces\main\jump-the-queue\jump-the-queue\java\jtqj\server\target\jtqj-server-bootified.war with classifier bootified
+[INFO] ------------------------------------------------------------------------
+[INFO] Reactor Summary for jtqj v4:
+[INFO]
+[INFO] jtqj ............................................... SUCCESS [  X.XXX s]
+[INFO] jtqj-api ........................................... SUCCESS [ XX.XXX s]
+[INFO] jtqj-core .......................................... SUCCESS [XX:XX min]
+[INFO] jtqj-server ........................................ SUCCESS [ XX.XXX s]
+[INFO] ------------------------------------------------------------------------
+[INFO] BUILD SUCCESS
+[INFO] ------------------------------------------------------------------------
+[INFO] Total time:  XX:XX min
+[INFO] Finished at: 20XX-XX-XXTXX:XX:XX+0X:00
+[INFO] ------------------------------------------------------------------------
+
+
+
+

The packaging process creates two .war files, that are stored in the \java\jtqj\server\target directory. They contain the web application and can be deployed on any Servlet/JSP container.

+
+
+
+

Next Chapter: devon4ng Introduction

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4j-ide-setup.html b/docs/jump-the-queue/1.0/devon4j-ide-setup.html new file mode 100644 index 00000000..26705238 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4j-ide-setup.html @@ -0,0 +1,376 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

IDE Setup

+
+
+

This Tutorial explains how to setup the development environment to work on and contribute to devonfw4j with your Windows computer.

+
+
+

We are using a pre-configured devon-ide for development. To get started follow these steps:

+
+
+
    +
  1. +

    Get a Git client. For Windows use:

    +
    + +
    +
  2. +
  3. +

    Download the distribution

    +
    +
      +
    • +

      If you are a member of Capgemini: download the current devonfw distribution (for devonfw please find the setup guide within the devon-dist).

      +
      +
      +devonfw IDE Download +
      +
      +
    • +
    +
    +
  4. +
  5. +

    Choose a project location for your project (e.g. C:\projects\devonfw, referred to with $projectLoc in this setup guides following steps). Avoid long paths and white spaces to prevent trouble. Extract the downloaded ZIP files via Extract Here (e.g. using 7-Zip). Do not use the Windows native ZIP tool to extract as this is not working properly on long paths and filenames.

    +
  6. +
  7. +

    Run the script update-all-workspaces.bat in $projectLoc.

    +
    +

    Hint: You can use update-all-workspaces.bat whenever you created a new folder in workspaces to separate different workspaces. This update will create new Eclipse start batches allowing to run a number of Eclipse instances using different workspaces in parallel.

    +
    +
  8. +
  9. +

    Open console.bat and check out the git repositories you need to work on into workspaces\main. with the following commands:

    +
    +
    +
    cd workspaces/main
    +git clone --recursive https://github.com/devonfw/my-thai-star.git
    +
    +
    +
    +

    Do another check whether there are files in folder workspaces\main\my-thai-star\!

    +
    +
  10. +
  11. +

    Run the script eclipse-main.bat to start the Eclipse IDE.

    +
  12. +
  13. +

    In Eclipse select File > Import > Maven > Existing Maven Projects and then choose the cloned projects from your workspace by clicking the Browse button and select the folder structure (workspaces\main\my-thai-star\java\MTSJ).

    +
  14. +
  15. +

    Execute the application by starting the SpringBootApp. Select the class and click the right mouse button. In the context menu select the entry Run as ⇒ Java Application (or Debug as …​). The application starts up and creates log entries in the Eclipse Console Tab.

    +
    +
    +devonfw Running an Application +
    +
    +
  16. +
  17. +

    Open console.bat.

    +
  18. +
  19. +

    Go to the folder workspaces\main\my-thai-star\angular in the console.bat terminal.

    +
  20. +
  21. +

    Execute the command: yarn install. Wait till everything is finished.

    +
  22. +
  23. +

    Execute the command: yarn start.

    +
  24. +
  25. +

    Once started, the sample application runs on http://localhost:4200/restaurant, login with waiter/waiter and have a look at the services list provided.

    +
  26. +
+
+
+
+

Next Chapter: Creating a devon4j Project

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4j-layers.html b/docs/jump-the-queue/1.0/devon4j-layers.html new file mode 100644 index 00000000..f30152c5 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4j-layers.html @@ -0,0 +1,1109 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4j Component Layers

+
+
+

As we already mentioned in the introduction to devon4j the components of our Java back-end apps will be divided into three layers: service, logic and dataaccess.

+
+
+
    +
  • +

    service: The layer that contains the REST services to exchange information with the client applications.

    +
  • +
  • +

    logic: The layer hosting the logic of the application (validations, authorization control, business logic, etc.).

    +
  • +
  • +

    dataaccess: The layer that communicates with the database.

    +
  • +
+
+
+
+
+

Layers Implementation

+
+
+
+Dependency Injection +
+
+
+
+
+

==

+
+
+

If you haven’t learned about Dependency Injection yet, please visit the devon4j guide to Dependency Injection. Dependency Injection is an important principle in enterprise software development, that enables separation of concerns, decouples interfaces from their implementation and allows us to potentially reuse code. +== ==

+
+
+

Following the devon4j recommendations for Dependency Injection in MyThaiStar’s layers we will find:

+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different packages: api and impl. The api will store the interface with the methods definition and inside the impl we will store the class that implements the interface:

    +
  • +
+
+
+
+Layer API Implementation +
+
+
+
    +
  • +

    Usage of JSR330: The Java standard set of annotations for Dependency Injection (@Named, @Inject, @PostConstruct, @PreDestroy, etc.) provides us with all the needed annotations to define our beans and inject them:

    +
  • +
+
+
+
+
@Named
+public class MyBeanImpl implements MyBean {
+
+  @Inject
+  private MyOtherBean myOtherBean;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+}
+
+
+
+
+
+

Communication between Layers

+
+
+

The communication between layers is solved using the described Dependency Injection pattern, based on Spring and the Java standards: java.inject (JSR330) combined with JSR250.

+
+
+
+Layer Implementation +
+
+
+
+
+

== Service Layer - Logic Layer

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+
+import com.devonfw.application.mtsj.bookingmanagement.logic.api.Bookingmanagement;
+
+@Named("BookingmanagementRestService")
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private Bookingmanagement bookingmanagement;
+
+  @Override
+  public BookingCto getBooking(long id) {
+    return this.bookingmanagement.findBooking(id);
+  }
+
+  ...
+
+}
+
+
+
+
+
+

== Logic Layer - Data Access Layer

+
+
+
+
import javax.inject.Inject;
+import javax.inject.Named;
+import javax.transaction.Transactional;
+
+import com.devonfw.application.mtsj.bookingmanagement.dataaccess.api.repo.BookingRepository;
+
+@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  @Inject
+  private BookingRepository bookingDao;
+
+  @Override
+  public boolean deleteBooking(Long bookingId) {
+
+    BookingEntity booking = this.bookingDao.find(bookingId);
+    this.bookingDao.delete(booking);
+    return true;
+  }
+
+  ...
+
+}
+
+
+
+
+
+

Service Layer

+
+
+

As we mentioned at the beginning, the service layer is where the services of our application (REST or SOAP) will be located.

+
+
+

In devon4j applications the default implementation for web services is based on Apache CXF, a services framework for Java apps that supports web service standards like SOAP (implementing JAX-WS) and REST services (implementing JAX-RS).

+
+
+

In this tutorial we are going to focus only on the REST implementation of services.

+
+
+
+
+

== Service Definition

+
+
+

The services definition is done by the service interface located in the service.api.rest package. In the booking component of MyThaiStar application we can see a service definition statement like the following:

+
+
+
+
@Path("/bookingmanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface BookingmanagementRestService {
+
+  @GET
+  @Path("/booking/{id}/")
+  public BookingCto getBooking(@PathParam("id") long id);
+
+  ...
+}
+
+
+
+

JAX-RS annotations:

+
+
+
    +
  • +

    @Path: Defines the common path for all the resources of the service.

    +
  • +
  • +

    @Consumes and @Produces: Declares the type of data that the service expects to receive from the client and the type of data that will return to the client as a response.

    +
  • +
  • +

    @GET: Annotation for the HTTP GET method.

    +
  • +
  • +

    @Path: The path definition for the getBooking resource.

    +
  • +
  • +

    @PathParam: Annotation to configure the id received in the url as a parameter.

    +
  • +
+
+
+
+
+

== Service Implementation

+
+
+

The service implementation is a class located in the service.impl.rest package that implements the previous defined interface.

+
+
+
+
@Named("BookingmanagementRestService")
+public class BookingmanagementRestServiceImpl implements BookingmanagementRestService {
+
+  @Inject
+  private Bookingmanagement bookingmanagement;
+
+  @Override
+  public BookingCto getBooking(long id) {
+
+    return this.bookingmanagement.findBooking(id);
+  }
+
+  ...
+}
+
+
+
+

As you can see, this layer simply delegates method-calls to the logic layer, in order to resolve business logic requirements of the app.

+
+
+
+
+

Logic Layer

+
+
+

In this layer we store all the custom implementations we will need to fulfill the requirements of our application. Including:

+
+
+
    +
  • +

    business logic

    +
  • +
  • +

    delegation of the transaction management to Spring framework

    +
  • +
  • +

    object mappings

    +
  • +
  • +

    validations

    +
  • +
  • +

    authorizations

    +
  • +
+
+
+

Within the logic layer we must avoid including code related to services or data access, we must delegate those tasks to the suitable layers.

+
+
+
+
+

== Logic Layer Definition

+
+
+

As in the service layer, the logic implementation will be defined by an interface located in a logic.api package.

+
+
+
+
public interface Bookingmanagement {
+
+  BookingCto findBooking(Long id);
+  ...
+}
+
+
+
+
+
+

== Logic Layer Implementation

+
+
+

In a logic.impl package an "…​Impl" class will implement the interface defined in the previous section.

+
+
+
+
@Named
+@Transactional
+public class BookingmanagementImpl extends AbstractComponentFacade implements Bookingmanagement {
+
+  // @see #getBookingDao()
+  @Inject
+  private BookingDao bookingDao;
+
+  // The constructor.
+  public BookingmanagementImpl() {
+
+    super();
+  }
+
+  @Override
+  public BookingCto findBooking(Long id) {
+
+    BookingEntity entity = getBookingDao().findOne(id);
+    BookingCto cto = new BookingCto();
+    cto.setBooking(getBeanMapper().map(entity, BookingEto.class));
+    cto.setOrder(getBeanMapper().map(entity.getOrder(), OrderEto.class));
+    cto.setInvitedGuests(getBeanMapper().mapList(entity.getInvitedGuests(), InvitedGuestEto.class));
+    cto.setOrders(getBeanMapper().mapList(entity.getOrders(), OrderEto.class));
+    return cto;
+  }
+
+  public BookingDao getBookingDao() {
+    return this.bookingDao;
+  }
+
+  ...
+}
+
+
+
+

In the above MyThaiStar logic layer example we can see:

+
+
+
    +
  • +

    business logic and object mappings

    +
  • +
  • +

    delegation of the transaction management through Spring’s @Transactional annotation

    +
  • +
+
+
+
+
+

Transfer Objects

+
+
+

In the code examples of the logic layer section you may have seen a BookingCto object. This is one of the transfer objects defined in devon4j. It is used to transfer data between the layers.

+
+
+

The main benefits of using Transfer Objects are:

+
+
+
    +
  • +

    Avoid inconsistent data (when entities are sent across the app, changes tend to occur in multiple places).

    +
  • +
  • +

    Clearly define how much data to transfer (direct relations often lead to the transfer of too much data).

    +
  • +
  • +

    Hide internal details.

    +
  • +
+
+
+

In devon4j we can find two different Transfer Objects (TOs):

+
+
+
+
+

== Entity Transfer Objects (ETOs)

+
+
+
    +
  • +

    Have the same data-properties as their underlying entity.

    +
  • +
  • +

    Hold no relations to other entities.

    +
  • +
  • +

    Provide simple and solid mapping.

    +
  • +
+
+
+
+
+

== Composite Transfer Objects (CTOs)

+
+
+
    +
  • +

    Have no data-properties at all.

    +
  • +
  • +

    Only hold relations to other TOs.

    +
  • +
  • +

    Either a 1:1 reference or a Collection (List) of TOs.

    +
  • +
  • +

    Are easy to map manually by reusing ETOs or other CTOs.

    +
  • +
+
+
+
+
+

Data Access Layer

+
+
+

The third — and last — layer of the devon4j architecture is the one responsible for storing all the code related to the connection and access of the database.

+
+
+

For mapping Java objects to the database, devon4j use the Java Persistence API (JPA). Explicitly, the JPA implementation devon4j uses is Hibernate.

+
+
+

Apart from the Entities of components, we are going to find the same elements, that we saw in other layers, in the dataaccess layer: a definition (i.e. an interface) and an implementation (a class that implements that interface).

+
+
+

However, in this layer the implementation is slightly different: The [Target]Repository extends com.devonfw.module.jpa.dataaccess.api.data.DefaultRepository, which provides us with the basic dataaccess methods: save(Entity), findOne(id), findAll(ids), delete(id), etc.

+
+
+

Because of that, in the [Target]Repository implementation of the layer, we only need to add the custom methods that are not implemented yet. Following the MyThaiStar component example (bookingmanagement), we will only find the paginated findBookings implementation here:

+
+
+
+
import org.springframework.data.jpa.repository.Query;
+import com.querydsl.jpa.impl.JPAQuery;
+
+...
+
+public interface BookingRepository extends DefaultRepository<BookingEntity> {
+
+  @Query("SELECT booking FROM BookingEntity booking WHERE booking.bookingToken = :token")
+  BookingEntity findBookingByToken(@Param("token") String token);
+
+  default Page<BookingEntity> findBookings(BookingSearchCriteriaTo criteria) {
+
+    BookingEntity alias = newDslAlias();
+    JPAQuery<BookingEntity> query = newDslQuery(alias);
+
+    ...
+  }
+}
+
+
+
+

The implementation of findBookings uses Spring Data and QueryDSL to manage dynamic queries.

+
+
+
+
+

Layers of the JumpTheQueue Application

+
+
+

The sections above describe the main elements of layers of the devon4j components. If you have completed the exercise in the previous chapter, you may have noticed that all those components are already created for us by CobiGen.

+
+
+

Take a look to our application structure:

+
+
+

Visitor Component Core (without Relations)

+
+
+
+JumpTheQueue Core Structure +
+
+
+

You’ll see the following components:

+
+
+
    +
  1. +

    Definition for dataaccess layer repository.

    +
  2. +
  3. +

    The entity that we created to be used by CobiGen to generate the component structure.

    +
  4. +
  5. +

    Definition of abstract usecase in the logic layer.

    +
  6. +
  7. +

    Implementation of the usecasefind layer in the logic layer.

    +
  8. +
  9. +

    Implementation of the usecasemanage layer in the logic layer.

    +
  10. +
  11. +

    Implementation of the logic layer.

    +
  12. +
  13. +

    Implementation of the rest service.

    +
  14. +
+
+
+

Visitor Component API (without Relations)

+
+
+
+JumpTheQueue API Structure +
+
+
+
    +
  1. +

    definition for entity in the api layer.

    +
  2. +
  3. +

    Entity Transfer Object located in the api layer.

    +
  4. +
  5. +

    Search Criteria Transfer Object located in the api layer.

    +
  6. +
  7. +

    Definition of usecasefind in the logic layer.

    +
  8. +
  9. +

    Definition of usecasemanage in the logic layer.

    +
  10. +
  11. +

    Definition of the logic layer.

    +
  12. +
  13. +

    Definition of the rest service of the component.

    +
  14. +
+
+
+

The queue component will have a similar structure. The access code component will also have a similar structure — with minor differences — since it has some relations with visitor and queue.

+
+
+

Access Code Component Core (with Relations)

+
+
+
+JumpTheQueue Core Structure CTO +
+
+
+

There is only a single difference in the core. If you look closely, you’ll see that CobiGen didn’t generate the usecasemanage implementation. This is due to the complexity of entities with relations. In this case CobiGen will leave us to create the save and delete methods, so we can properly address them.

+
+
+

Access Code Component API (with Relations)

+
+
+
+JumpTheQueue API Structure CTO +
+
+
+

There is two differences here:

+
+
+
    +
  1. +

    As you can see, CobiGen generated a CTO for our entity with relations.

    +
  2. +
  3. +

    As explained in the core, the usecasemanage definition is missing.

    +
  4. +
+
+
+

So, as you can see, our components have all the layers defined and implemented following the devon4j principles.

+
+
+

By using CobiGen we have created a complete and functional devon4j application without the necessity of any manual implementation (except for more complex entities which will be explained to the next chapter).

+
+
+

Now we’ll run the application and try to use the REST service to save a new visitor:

+
+
+
+
+

JumpTheQueue Running

+
+
+

As already mentioned, in this tutorial we will be using Postman to test our API.

+
+
+

First, open your JumpTheQueue project in Eclipse and run the app (right-click SpringBootApp.java > Run as > Java Application.

+
+
+
+
+

== Simple Call

+
+
+

If you remember, we added some mock data previously, to have some visitor info available. Let’s try to retrieve a visitor’s information by using our visitormanagement service.

+
+ +
+

We hope to obtain the data of the visitor with id 1.

+
+
+
+JumpTheQueue Simple GET Request 1 +
+
+
+

Instead we get a response containing the login form. This is because devon4j applications, by default, implement Spring Security. So we would need to login to access this service.

+
+
+

To make testing easier, we are going to "open" the application, to avoid the security filter, and we are going to enable the CORS filter to allow requests from (Angular-) clients.

+
+
+

In the file general/service/impl/config/BaseWebSecurityConfig.java edit the configure() method and remove the HTTP request filter. This will authorize every request and allow us access to the app:

+
+
+
+
@Override
+public void configure(HttpSecurity http) throws Exception {
+
+  http.authorizeRequests().anyRequest().permitAll().and().csrf().disable();
+
+  if (this.corsEnabled) {
+    http.addFilterBefore(getCorsFilter(), CsrfFilter.class);
+  }
+}
+
+
+
+

Finally in the file /jtqj-core/src/main/resources/application.properties set security.cors.enabled to true:

+
+
+
+
security.cors.enabled=true
+
+
+
+

Now we run the app again and send the same GET request. We should now be able to obtain the data of our visitor:

+
+
+
+JumpTheQueue Simple GET Request 2 +
+
+
+
+
+

== Paginated Response

+
+
+

CobiGen has created a complete set of services for us, so we can access a paginated list of visitors without any extra implementation.

+
+
+

We are going to use the following service defined in visitormanagement/service/api/rest/VisitormanagementRestService.java:

+
+
+
+
	@Path("/visitor/search")
+	@POST
+	public Page<VisitorEto> findVisitors(VisitorSearchCriteriaTo searchCriteriaTo);
+
+
+
+

The service definition states, that we will need to provide a Search Criteria Transfer Object. This object will work as a filter for the search, as you can see in visitormanagement/dataaccess/api/repo/VisitorRepository.java in the findByCriteria method.

+
+
+

If the Search Criteria Transfer Object is empty, we will retrieve all visitors. However, if we pass data with the object, the result will be filtered.

+
+ +
+

In the 'Body' tab, below the address bar, we’ll have to define a SearchCriteria object, which will have a pageable defined (make sure, the 'raw' option is selected):

+
+
+
+
{
+	"pageable" : {
+		"pageNumber" : "0",
+		"pageSize": "10",
+		"sort": []
+	}
+}
+
+
+
+

In the 'Headers' tab we’ll have to ensure that Content-Type application/json is set, indicating to the server, that it’ll have to interpret the body as JSON format (otherwise, you may face an 415 unsupported type error).

+
+
+
+
+

==

+
+
+

You can see the definition of the VisitorSearchCriteriaTo in: +visitormanagement/logic/api/to/VisitorSearchCriteriaTo.java +== ==

+
+
+

The result will appear in the 'Headers' tab and look something like this:

+
+
+
+JumpTheQueue Paginated Response 1 +
+
+
+

If we want to filter the results, we can define a criteria object in the body. Instead of the previously empty criteria, we now provide an object like this:

+
+
+
+
{
+	"username": "test1@mail.com",
+	"pageable" : {
+		"pageNumber" : "0",
+		"pageSize": "10",
+		"sort": []
+	}
+}
+
+
+
+

This will filter the results to find only visitors with username test1@mail.com. If we repeat the request now, the result will be this:

+
+
+
+JumpTheQueue Paginated Response 2 +
+
+
+

We could customize this filter by editing the visitormanagement/logic/impl/usecase/UcFindVisitorImpl.java class.

+
+
+
+
+

== Saving a Visitor

+
+
+

To meet the requirements of the User Story: Register, we need to register a visitor and return an access code.

+
+
+

By default CobiGen has generated the Read operation in the UcFindEntityImpl for us, as well as the rest of the CRUD operations in UcManageEntityImpl. So we are already able to create, read, update and delete visitors in our database, without any extra implementation required.

+
+
+

To delegate Spring to manage transactions, we only have to add the @Transactional annotation to our usecase implementations. Since devonfw 2.2.0 CobiGen adds this annotation automatically, so we don’t have to do it manually. Check your logic implementation classes and add the annotation in case it’s not present:

+
+
+
+
@Named
+@Validated
+@Transactional
+public class UcManageVisitorImpl extends AbstractVisitorUc implements UcManageVisitor {
+  ...
+}
+
+
+
+

To save a visitor we only need to use the REST resource /services/rest/visitormanagement/v1/visitor and provide the visitor definition for VisitorEto in the body.

+
+
+
+
+

==

+
+
+

You can see the definition for VisitorEto in: +visitormanagement/logic/api/to/VisitorEto.java +== ==

+
+ +
+

Provide a Visitor object in the body, such as this:

+
+
+
+
{
+	"username": "mary@mail.com",
+	"name": "Mary",
+	"phoneNumber": "1234567",
+	"password": "12345",
+	"acceptedCommercial": "true",
+	"acceptedTerms": "true",
+	"userType": "false"
+}
+
+
+
+

We will get the following result:

+
+
+
+JumpTheQueue Save Visitor +
+
+
+

In the body of the response we can see the default content for a successful service response; the data of the new visitor. This is the default implementation when saving a new entity with devon4j applications. However, the JumpTheQueue design defines, that the response must provide the access code created for the user as well, so we will need to change the logic of our application to fit this requirement.

+
+
+

In the next chapter we will learn how to customize the code generated by CobiGen to adapt it to our needs.

+
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4j-overview.html b/docs/jump-the-queue/1.0/devon4j-overview.html new file mode 100644 index 00000000..db8bf191 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4j-overview.html @@ -0,0 +1,438 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4j

+
+
+

For Java based back-end solutions, devonfw includes devon4j that provides a standardized architecture blueprint, an open best-of-breed technology stack as well as industry proven best practices and code conventions for a cloud ready Spring based server.

+
+
+

Included in devonfw framework as default server solution, devon4j is the result of applying devonfw principles in a Java based technology stack. With devon4j developers are able to create web application back-ends in a fast and reliable way, generating web services (REST, SOAP) that web clients can consume.

+
+
+
+
+

devon4j Technology Stack

+
+
+

As mentioned before, devon4j is not only a framework but a set of tools and conventions. devon4j provides a Java back-end solution based on the following technologies:

+
+
+
    +
  • +

    Spring framework as the main development framework.

    +
  • +
  • +

    Spring Boot as project accelerator.

    +
  • +
  • +

    Maven as project and dependencies management tool. The Maven projects use the POM file to store all the necessary information for building the project (project configuration, dependencies, plugins, etc.). You can get more details about POM files here.

    +
  • +
+
+
+

Some of the main features of Spring Boot are:

+
+
+
    +
  • +

    Creation of stand-alone Spring applications in an easy way.

    +
  • +
  • +

    Embedded Tomcat directly (no need to deploy WAR files).

    +
  • +
  • +

    Provide 'starter' POMs to simplify your Maven configuration.

    +
  • +
  • +

    Automatically configure Spring (whenever possible).

    +
  • +
  • +

    Provide production-ready features such as metrics, health checks and externalized configuration.

    +
  • +
  • +

    No requirement for XML configuration.

    +
  • +
+
+
+

For persistence and data access devon4j implements:

+
+
+
    +
  • +

    JPA and Hibernate

    +
  • +
  • +

    QueryDsl as query manager

    +
  • +
  • +

    H2 instance embedded as out-of-the-box database that is launched each time the application is started so the developers are able to start working with a real data access from scratch.

    +
  • +
  • +

    Flyway as a tool for version control of the database.

    +
  • +
+
+
+

As service framework:

+
+
+ +
+
+
+
+

devon4j Tools

+
+
+

The following tools are included in the devonfw IDE, which — among many other things — can create, run and deploy devon4j applications, avoiding lots of manual work.

+
+
+
+
+

Custom Eclipse Instance

+
+
+

As part of the devonfw framework, devon4j projects are integrated into a customized Eclipse instance, which provides pre-configurations and pre-installed plugins, focusing on code quality and productivity boosting.

+
+
+
+
+

CobiGen

+
+
+

A generic incremental generator for end to end code generation that allows us to automate the generation of major components of our apps. Based on entities, CobiGen can generate all CRUD functionality for us, starting with services and ending with the persistence data layer.

+
+
+
+
+

devon4j Architecture Overview

+
+
+

devon4j provides a solution for industrialized web apps based on components and a three-layers architecture.

+
+
+
+devon4j Architecture +
+
+
+

A component is a package that contains the services and logic related to one feature of the app.

+
+
+

Each component will be divided in three layers: service, logic and dataaccess.

+
+
+
    +
  • +

    Service Layer: will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+

Finally the devon4j applications provide a general package to locate the cross-cutting functionalities such as security, logging or exception handling.

+
+
+

In the next chapters you can find all the details about the implementation of each layer and how to develop all the relevant parts of a web app based on devonfw framework and devon4j.

+
+
+
+

Next Chapter: A devon4j Application

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4j-testing.html b/docs/jump-the-queue/1.0/devon4j-testing.html new file mode 100644 index 00000000..7c648836 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4j-testing.html @@ -0,0 +1,711 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Testing with devon4j

+
+
+

Testing our applications is one of the most important parts of the development. The devon4j documentation provides detailed information about the testing principles. In addition to that, you can also find information about the topic in the devonfw guide.

+
+
+

In this chapter we are going to focus on showing some test examples, and explain briefly how to start testing our devon4j apps.

+
+
+
+
+

MyThaiStar Testing Example

+
+
+

In all the devon4j projects (based on Maven and Spring) we are going to find a dedicated package for testing.

+
+
+

In addition to this, the testing part of the project also has its own resources package, so we are going to be able to configure the application properties or other resources to create specific test scenarios:

+
+
+
+MyThaiStar Testing Structure +
+
+
+

We should incorporate unit tests as one of our main efforts during development, even considering approaches like test-driven development (TDD).

+
+
+

The tests in our applications should cover a significant amount of functionality. However, in this part of the tutorial, we are going to focus on the test of our devon4j components.

+
+
+

As you have seen in the previous image, each component of our application should have a dedicated package for testing in the test package. Inside each testing package, we will create the related test classes. They should follow this naming convention:

+
+
+
+
[Component]Test.java
+
+
+
+

This is because we are going to use Maven to launch the tests of our application and Maven will look for test classes that end with the Test keyword.

+
+
+

Testing under devon4j means, that we already have Spring Test and the devon4j test module available. This also means, that we will find a significant amount of annotations and implementations, which are going to provide us with all the necessary libraries and tools to create our tests in a really simple way.

+
+
+

Focusing on the component tests means, that we are going to test the implementation of the logic layer of our application. Because of this, you can see in our test structure, that our test classes are inside the [component].logic.impl package:

+
+
+
+MyThaiStar Testing Structure +
+
+
+

If we open one of the test classes we will find something like this:

+
+
+
+
...
+
+@SpringBootTest(classes = SpringBootApp.class)
+public class DishmanagementTest extends ComponentTest {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Test
+  public void findAllDishes() {
+
+    DishSearchCriteriaTo criteria = new DishSearchCriteriaTo();
+    List<CategoryEto> categories = new ArrayList<>();
+    criteria.setCategories(categories);
+    PageRequest pageable = PageRequest.of(0, 100, new Sort(Direction.DESC, "price"));
+    criteria.setPageable(pageable);
+    Page<DishCto> result = this.dishmanagement.findDishCtos(criteria);
+    assertThat(result).isNotNull();
+  }
+
+  ...
+
+}
+
+
+
+
    +
  • +

    @SpringBootTest is the Spring Test annotation to load the context of our application. So we will have the application running like in a real situation.

    +
  • +
  • +

    Extending the devon4j test class ComponentTest will inject functionalities like assertions into our test class.

    +
  • +
  • +

    Spring Test gives us the option for dependency injection, so we are going to be able to @Inject our components to test them.

    +
  • +
  • +

    Finally with the @Test annotation we can declare a test to be executed during the testing process.

    +
  • +
+
+
+
+
+

Testing our Application

+
+
+

Now that we have brief overview, we are going to add some tests to our JumpTheQueue application.

+
+
+

We have a main component for managing visitors, so we are going to create a dedicated package for testing that component within the com.devonfw.application.jtqj package, called visitormanagement.logic.impl. Inside this new package we are going to add a new test class named VisitormanagementTest.java:

+
+
+
+JumpTheQueue Testing Structure +
+
+
+
+
+

==

+
+
+

You can see that we already have some test packages in the src/test/java/com.devonfw.application.jtqj.general package. Those tests are from the devon4j archetype and we can use them as a model for further tests in our apps. +== ==

+
+
+

In the VisitormanagementTest class we are going to add annotations to run our app in the correct context when executing tests, extend the ComponentTest class to obtain assertions, and inject our visitormanagement component:

+
+
+
+
...
+
+import javax.inject.Inject;
+
+import org.junit.Test;
+import org.springframework.boot.test.context.SpringBootTest;
+
+import com.devonfw.application.jtqj.SpringBootApp;
+import com.devonfw.application.jtqj.visitormanagement.logic.api.Visitormanagement;
+import com.devonfw.application.jtqj.visitormanagement.logic.api.to.VisitorEto;
+import com.devonfw.application.jtqj.visitormanagement.logic.api.to.VisitorSearchCriteriaTo;
+import com.devonfw.module.test.common.base.ComponentTest;
+
+@SpringBootTest(classes = SpringBootApp.class)
+public class VisitormanagementTest extends ComponentTest {
+
+  @Inject
+  private Visitormanagement visitormanagement;
+}
+
+
+
+
+
+

==

+
+
+

Please note, that the class VisitormanagementTest in the code snippet above, extends the class ComponentTest. Make sure, that this is the case in your code too. +== ==

+
+
+

Now we can start adding our first test. In JumpTheQueue we have two main functionalities:

+
+
+
    +
  1. +

    Register a visitor, returning an access code.

    +
  2. +
  3. +

    List the current visitors.

    +
  4. +
+
+
+

Let’s add tests to check these functionalities:

+
+
+
+
+

1. Registration

+
+
+

We are going to create a method with a descriptive name; saveVisitorTest, and we are going to add the @Test annotation to it.

+
+
+

Inside this test, we are going to verify the registration process of our app. To do so, we only need to call the saveVisitor method of the component and provide a VisitorEto object. After the method is called, we are going to check the response of the method, to verify that the expected business logic has been executed correctly:

+
+
+
+
...
+
+@SpringBootTest(classes = SpringBootApp.class)
+public class VisitormanagementTest extends ComponentTest {
+
+  ...
+
+  @Test
+  public void saveVisitorTest() {
+
+    VisitorEto visitorEto = new VisitorEto();
+    visitorEto.setName("Mary");
+    visitorEto.setUsername("mary@mary.com");
+    visitorEto.setPhoneNumber("123456789");
+    visitorEto.setPassword("test");
+    visitorEto.setUserType(false);
+    visitorEto.setAcceptedTerms(true);
+    visitorEto.setAcceptedCommercial(true);
+    VisitorEto visitorEtoResult = this.visitormanagement.saveVisitor(visitorEto);
+
+    assertThat(visitorEtoResult.getId()).isNotNull();
+
+    this.visitormanagement.deleteVisitor(visitorEtoResult.getId());
+  }
+
+}
+
+
+
+
+
+

==

+
+
+

In this saveVisitorTest method, that we give as an example, we can see that there is a deleteVisitor at the end. This would normally only be done, if the tests were run against a production database.If you use a separate database for testing, the last delete is not needed. +== ==

+
+
+
+
+

==

+
+
+

Have you noticed, that the mock data of this test is the same data, that we used in previous chapters for the manual verification of our services? Exactly! From now on, this test will allow us to automate the manual verification process. +== ==

+
+
+

Now is the time for running the test:

+
+
+

We can do this in several ways, but to simplify the example, just right click the test-method and select Run as > JUnit Test:

+
+
+
+JumpTheQueue Running Tests +
+
+
+
+
+

==

+
+
+

We can also debug our tests using the Debug As > JUnit Test option. +== ==

+
+
+
+
+

==

+
+
+

If you get an error-message saying: No tests found with test runner 'JUnit 5', right-click the test-class, Run As > Run Configurations…​ and then select JUnit 4 as your test runner:

+
+
+
+JumpTheQueue Running Tests +
+
+
+
+
+

==

+
+
+

The result of the test will be shown in the JUnit tab of Eclipse:

+
+
+
+JumpTheQueue Test Results 1 +
+
+
+

Seems that everything went okay, our register process passes the test. Let’s complete the test checking if the just created user is "Mary".

+
+
+

We can do it simply adding more asserts to check the result object

+
+
+
+
assertThat(visitorEtoResult.getName()).isEqualTo("Mary");
+
+
+
+

Now running again the test we should obtain the expected result

+
+
+
+JumpTheQueue Test Results 2 +
+
+
+
+
+

2. Finding Visitors

+
+
+

For the second functionality (finding visitors) we can add a new test with a very similar approach. The only difference is that in this case we are going to need to declare a Search Criteria object, that will contain a pageable to recover the first page and the first 100 values.

+
+
+
+
  @Test
+  public void findVisitorsTest() {
+
+    VisitorSearchCriteriaTo criteria = new VisitorSearchCriteriaTo();
+    Pageable pageable = PageRequest.of(0, 100);
+    criteria.setPageable(pageable);
+    Page<VisitorEto> result = this.visitormanagement.findVisitors(criteria);
+
+    assertThat(result).isNotNull();
+  }
+
+
+
+

Use import org.springframework.data.domain to solve the errors. +To run both tests (all the tests included in the class) we only need to do right click in any part of the class and select Run As > JUnit Test. All the methods annotated with @Test will be checked.

+
+
+
+JumpTheQueue Test Results 3 +
+
+
+
+
+

Extra Functionalities

+
+
+

The devon4j test module provide us with some extra functionalities that we can use to create tests in an easier way.

+
+
+

Extending ComponentTest class we also have available the doSetUp() and doTearDown() methods, that we can use to initialize and release resources in our test classes.

+
+
+

In our JumpTheQueue test class we could declare the visitor object in the doSetUp method, so we can use this resource in several test methods instead of declaring it again and again.

+
+
+

Doing this our test class would be as follows

+
+
+
+
@SpringBootTest(classes = SpringBootApp.class)
+public class VisitormanagementTest extends ComponentTest{
+
+	private VisitorEto visitorEto = new VisitorEto();
+
+	@Inject
+	private Visitormanagement visitormanagement;
+
+
+	@Override
+	protected void doSetUp() {
+		visitorEto.setName("Mary");
+		visitorEto.setUsername("mary@mary.com");
+		visitorEto.setPhoneNumber("123456789");
+		visitorEto.setPassword("test");
+		visitorEto.setUserType(false);
+		visitorEto.setAcceptedTerms(true);
+		visitorEto.setAcceptedCommercial(true);
+	}
+
+
+  @Test
+  public void saveVisitorTest() {
+
+    VisitorEto visitorEtoResult = this.visitormanagement.saveVisitor(visitorEto);
+
+    assertThat(visitorEtoResult.getId()).isNotNull();
+    assertThat(visitorEtoResult.getName()).isEqualTo("Mary");
+
+    this.visitormanagement.deleteVisitor(visitorEtoResult.getId());
+  }
+
+  @Test
+  public void findVisitorsTest() {
+
+    VisitorSearchCriteriaTo criteria = new VisitorSearchCriteriaTo();
+    Pageable pageable = PageRequest.of(0, 100);
+    criteria.setPageable(pageable);
+    Page<VisitorEto> result = this.visitormanagement.findVisitors(criteria);
+
+    assertThat(result).isNotNull();
+  }
+}
+
+
+
+
+
+

Running the Tests with Maven

+
+
+

We can use Maven to automate the testing of our project. To do so, just open a command prompt with access to Maven (in our devonfw project folder we can simply right click and select Open Devon CMD shell here). Now enter and run:

+
+
+
+
C:\...\workspaces\main\jump-the-queue\java\jtqj> mvn clean test
+
+
+
+

Maven will now scan for classes containing the word "Test" and execute all methods annotated with @Test in those classes. The result will look similar to this:

+
+
+
+JumpTheQueue Maven Testing +
+
+
+

Even though the test we made finished correctly, there are more tests — that devon4j generated automatically — which are going to contain one error related to role assignment. In this tutorial we are only going to implement the visitor side, so we won’t implement role restrictions. We encourage you to add the code for this functionality yourself, after finishing this tutorial.

+
+
+

After we have demonstrated how to create tests in devonfw, we are going to show you how to package and deploy your project in the next chapter.

+
+
+
+

Next Chapter: Deployment with devonfw

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4j-validations.html b/docs/jump-the-queue/1.0/devon4j-validations.html new file mode 100644 index 00000000..d30d6f97 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4j-validations.html @@ -0,0 +1,588 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4j Validations

+
+
+

For validations, devon4j includes the Hibernate Validator as one of the available libraries in the pom.xml file.

+
+
+
+
<dependency>
+    <groupId>org.hibernate</groupId>
+    <artifactId>hibernate-validator</artifactId>
+</dependency>
+
+
+
+

Hibernate Validator allow us to check received values, by adding annotations to our Java classes.

+
+
+
+
+

MyThaiStar Validations

+
+
+

In the MyThaiStar app, we can find validations for some fields that we receive from the client.

+
+
+

The main part of client inputs is related to the booking process. The client needs to provide: name, comment, bookingDate, email and assistants.

+
+
+
+
@NotNull
+private String name;
+
+...
+
+private String comment;
+
+@NotNull
+@Future
+private Timestamp bookingDate;
+
+...
+
+@NotNull
+@EmailExtended
+private String email;
+
+...
+
+@Min(value = 1, message = "Assistants must be greater than 0")
+@Digits(integer = 2, fraction = 0)
+private Integer assistants;
+
+
+
+
    +
  • +

    @NotNull: Checks that the field is not null before saving in the database.

    +
  • +
  • +

    @Future: Checks that a provided date is not in the past.

    +
  • +
  • +

    @Min: Declares a minimum value for an integer.

    +
  • +
  • +

    @Digits: Checks the format of an integer.

    +
  • +
  • +

    @Email: Is the standard validator for email addresses. In this case the standard validator is not checking the domain of the email, so for MyThaiStar we added a custom validator called @EmailExtended that is defined in a new general/common/api/validation/EmailExtended.java class. We will see it in more detail in the next section.

    +
  • +
+
+
+
+
+

Add your own Validations

+
+
+

In the JumpTheQueue app, we receive some inputs from the client. So let’s add some validations for that data, to avoid errors and ensure the consistency of the information, before saving it to the database.

+
+
+

When registering a visitor, the client provides the following information:

+
+
+
    +
  • +

    username: Must be not null and must match the format <name>@<domain.toplevel>.

    +
  • +
  • +

    name: Must be not null.

    +
  • +
  • +

    phoneNumber: Must be not null and must match a sequence of numbers and spaces.

    +
  • +
  • +

    password: Must be not null.

    +
  • +
  • +

    acceptedCommercial: Must be not null.

    +
  • +
  • +

    acceptedTerms: Must be not null.

    +
  • +
  • +

    userType: Must be not null.

    +
  • +
+
+
+
+
+

Name Validation

+
+
+

As we have just mentioned, the name of the visitor must be not null. To do so, Hibernate Validator provides us with the already mentioned @NotNull annotation (javax.validation.constraints.NotNull).

+
+
+

We are going to add the annotation in the jtqj-core to visitormanagement/dataaccess/api/VisitorEntity.java, just before the field name:

+
+
+
+
...
+
+  @NotNull
+  private String name;
+
+...
+
+
+
+

Run the app with Eclipse and — using Postman — call the register resource via POST:
+http://localhost:8081/jumpthequeue/services/rest/visitormanagement/v1/visitor

+
+
+

In the body, provide a visitor object without a name, like so:

+
+
+
+
{
+	"username" : "mary@mail.com",
+	"phoneNumber" : "1234567",
+	"password" : "12345",
+	"acceptedCommercial" : "true",
+	"acceptedTerms" : "true",
+	"userType" : "false"
+}
+
+
+
+

You will get a ValidationError message regarding the name field:

+
+
+
+JumpTheQueue Name Validation +
+
+
+
+
+

Email Validation

+
+
+

In the case of the email — as already explained in the MyThaiStar section — using the @Email annotation for validation, will allow users to enter emails such as something@something. This does not fit our app requirements, so we need to add a custom email validator.

+
+
+

In jtqj-core, add a class for our annotation, called EmailExtended.java, in a new general.common.api.validation package:

+
+
+
EmailExtended.java
+
+
...
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import javax.validation.Constraint;
+import javax.validation.Payload;
+import javax.validation.constraints.Email;
+import javax.validation.constraints.Pattern;
+
+@Email
+@Pattern(regexp = ".+@.+\\..+", message = "Email must specify a domain")
+@Target({ ElementType.METHOD, ElementType.FIELD, ElementType.ANNOTATION_TYPE })
+@Retention(RetentionPolicy.RUNTIME)
+@Constraint(validatedBy = {})
+@Documented
+public @interface EmailExtended {
+  String message() default "Please provide a valid email address";
+
+  Class<?>[] groups() default {};
+
+  Class<? extends Payload>[] payload() default {};
+}
+
+
+
+

This validator extends the @Email validation with an extra @Pattern, that defines a regular expression, which the fields annotated with @EmailExtended must match.

+
+
+

Now we can annotate the username field in our VisitorEntity.java with @NotNull and @EmailExtended, to fit the app requirements.

+
+
+
+
...
+
+  @NotNull
+  @EmailExtended
+  private String username;
+
+...
+
+
+
+

Then, if we restart the app and try to register a user without an email, we get a ValidationError with the message: "{username=[must not be null]}":

+
+
+
+JumpTheQueue Null E-Mail Validation +
+
+
+

And if we provide an email, that does not match the expected format, we get the related ValidationError with the message: "{username=[Email must specify a domain, …​]}":

+
+
+
+JumpTheQueue Wrong E-Mail Validation +
+
+
+

Finally, if we provide a valid email, the registration process ends successfully.

+
+
+
+
+

Phone Validation

+
+
+

For validating the phone, apart from the @NotNull annotation, we need to use another custom validation, based on the @Pattern annotation and a regular expression.

+
+
+

We are going to follow the same approach used for the EmailExtended validation.

+
+
+

In jtqj-core, add a class for our annotation, called Phone.java to the general.common.api.validation package. With the @Pattern annotation we can define a regular expression to filter for phone numbers ("consists of sequence of numbers or spaces"):

+
+
+
Phone.java
+
+
...
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import javax.validation.Constraint;
+import javax.validation.Payload;
+import javax.validation.constraints.Pattern;
+
+@Pattern(regexp = "[ 0-9]{0,14}$", message = "Phone must be valid")
+@Target({ ElementType.METHOD, ElementType.FIELD, ElementType.ANNOTATION_TYPE })
+@Retention(RetentionPolicy.RUNTIME)
+@Constraint(validatedBy = {})
+@Documented
+public @interface Phone {
+  String message() default "Phone must be well formed";
+
+  Class<?>[] groups() default {};
+
+  Class<? extends Payload>[] payload() default {};
+}
+
+
+
+

Then we only need to apply the new validation to our phone field in visitormanagement/dataaccess/api/VisitorEntity.java:

+
+
+
+
...
+
+  @NotNull
+  @Phone
+  private String phoneNumber;
+
+...
+
+
+
+

As a last step, we can now test our new validation. Call the previous service again, defining a wrong phone number. The response should be a ValidationError with the message: "{phoneNumber=[Phone must be valid]}":

+
+
+
+JumpTheQueue Wrong Phone Number Validation +
+
+
+

If we provide a valid phone number, the process should complete successfully.

+
+
+

In this chapter, we have seen how easy is to add validations to the server side of our devon4j applications. In the next chapter, we will show you how to test our components using Spring Test and devon4j's test module.

+
+
+
+

Next Chapter: Testing in devon4j

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4ng-adding-custom-functionality.html b/docs/jump-the-queue/1.0/devon4ng-adding-custom-functionality.html new file mode 100644 index 00000000..defca589 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4ng-adding-custom-functionality.html @@ -0,0 +1,2075 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4j adding Custom Functionality

+
+
+

Now we have a fully functional blank project. All we have to do now is to create the components and services which will compose our application.

+
+
+

First, we are going to develop the views of the app through its components, then we will create the services with the logic, security and back-end connection.

+
+
+
+
+

Creating Components

+
+ +
+
+
+

==

+
+
+

You have already learned about creating Components in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

Our app is going to consist of 3 main views:

+
+
+
    +
  • +

    Login

    +
  • +
  • +

    Register

    +
  • +
  • +

    ViewQueue

    +
  • +
+
+
+

To navigate between these views/components, we are going to implement routes using the Angular Router.

+
+
+

To see our progress, move to the root folder of the angular project and run ng serve -o again. This will recompile and publish our client app to http://localhost:4200. Angular will keep watching for changes, so whenever we modify the code, the app will automatically reload.

+
+
+
+
+

Root Component

+
+
+

app.component.ts inside angular/src/app will be our root component, so we don’t have to create a new file yet. We are going to add elements to the root component that will be common no matter what view will be displayed.

+
+
+
+
+

==

+
+
+

You have already learned about the Root Component in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

This applies to the header element which will be on top of the window and on top of all other components. If you want, you can read more about Covalent layouts, which we are going to use a lot from now on, for every view component.

+
+
+
+
+

==

+
+
+

You have already learned about Covalent Layouts in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

We don’t really need anything more than a header, so we are going to use the simplest layout for this purpose; the nav view.

+
+
+

In order to be able to use Covalent and Angular Material we are going to create a core module, which we will import into every other module where we want to use Covalent and Angular Material. First, we create a folder called shared in the angular/src/app directory. Inside there we are going to create a file called core.module.ts and will fill it with the following content:

+
+
+
+
import { NgModule } from '@angular/core';
+import { RouterModule } from '@angular/router';
+import { CommonModule } from '@angular/common';
+import { HttpClientModule, HTTP_INTERCEPTORS } from '@angular/common/http';
+import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
+import {
+  MatAutocompleteModule,
+  MatButtonModule,
+  MatButtonToggleModule,
+  MatCardModule,
+  MatCheckboxModule,
+  MatChipsModule,
+  MatDatepickerModule,
+  MatDialogModule,
+  MatExpansionModule,
+  MatGridListModule,
+  MatIconModule,
+  MatInputModule,
+  MatListModule,
+  MatMenuModule,
+  MatNativeDateModule,
+  MatPaginatorModule,
+  MatProgressBarModule,
+  MatProgressSpinnerModule,
+  MatRadioModule,
+  MatRippleModule,
+  MatSelectModule,
+  MatSidenavModule,
+  MatSliderModule,
+  MatSlideToggleModule,
+  MatSnackBarModule,
+  MatSortModule,
+  MatTableModule,
+  MatTabsModule,
+  MatToolbarModule,
+  MatTooltipModule,
+} from '@angular/material';
+import { CdkTableModule } from '@angular/cdk/table';
+import {
+  CovalentChipsModule,
+  CovalentLayoutModule,
+  CovalentExpansionPanelModule,
+  CovalentDataTableModule,
+  CovalentPagingModule,
+  CovalentDialogsModule,
+  CovalentLoadingModule,
+  CovalentMediaModule,
+  CovalentNotificationsModule,
+  CovalentCommonModule,
+} from '@covalent/core';
+
+@NgModule({
+  imports: [
+    RouterModule,
+    BrowserAnimationsModule,
+    MatCardModule,
+    MatButtonModule,
+    MatIconModule,
+    CovalentMediaModule,
+    CovalentLayoutModule,
+    CdkTableModule,
+  ],
+  exports: [
+    CommonModule,
+    CovalentChipsModule,
+    CovalentLayoutModule,
+    CovalentExpansionPanelModule,
+    CovalentDataTableModule,
+    CovalentPagingModule,
+    CovalentDialogsModule,
+    CovalentLoadingModule,
+    CovalentMediaModule,
+    CovalentNotificationsModule,
+    CovalentCommonModule,
+    CdkTableModule,
+    MatAutocompleteModule,
+    MatButtonModule,
+    MatButtonToggleModule,
+    MatCardModule,
+    MatCheckboxModule,
+    MatChipsModule,
+    MatDatepickerModule,
+    MatDialogModule,
+    MatExpansionModule,
+    MatGridListModule,
+    MatIconModule,
+    MatInputModule,
+    MatListModule,
+    MatMenuModule,
+    MatNativeDateModule,
+    MatPaginatorModule,
+    MatProgressBarModule,
+    MatProgressSpinnerModule,
+    MatRadioModule,
+    MatRippleModule,
+    MatSelectModule,
+    MatSidenavModule,
+    MatSliderModule,
+    MatSlideToggleModule,
+    MatSnackBarModule,
+    MatSortModule,
+    MatTableModule,
+    MatTabsModule,
+    MatToolbarModule,
+    MatTooltipModule,
+    HttpClientModule,
+  ],
+  declarations: [],
+  providers: [
+    HttpClientModule
+  ],
+})
+export class CoreModule {}
+
+
+
+
+
+

==

+
+
+

This CoreModule has almost every module of the different components for Angular Material and Covalent Teradata. If you decide to use a component that is not included yet, you need to add the corresponding module here. +== ==

+
+
+

Remember that we need to import this CoreModule module into the AppModule and inside every module of the different components that use Angular Material and Covalent Teradata. If a component does not have a module, it will be imported in the AppModule and hence automatically have the CoreModule. Our app.module.ts should have the following content:

+
+
+
+
import { BrowserModule } from '@angular/platform-browser';
+import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
+
+// Application components and services
+import { AppRoutingModule } from './app-routing.module';
+import { AppComponent } from './app.component';
+import { CoreModule } from './shared/core.module';
+
+@NgModule({
+  declarations: [
+    AppComponent
+  ],
+  imports: [
+    BrowserModule,
+    AppRoutingModule,
+    CoreModule,
+  ],
+  providers: [
+  ],
+  bootstrap: [AppComponent],
+  schemas: [ CUSTOM_ELEMENTS_SCHEMA ]
+})
+export class AppModule { }
+
+
+
+
+
+

==

+
+
+

Even if we setup module correctly the HTML file can give us this red flag: "If td-layout is a Web Component then add 'CUSTOM_ELEMENTS_SCHEMA' to the @NgModule.schemas of this component to suppress this message." +To solve this we add "schemas: [ CUSTOM_ELEMENTS_SCHEMA ]" inside the @NgModule of all the affected modules. +== ==

+
+
+
+
+

==

+
+
+

Remember this step because you will have to repeat it for every other component from Teradata you use in your app. +== ==

+
+
+

Now we can use this layout, so let’s implement it in app.component.html. Use the following code:

+
+
+
+
<td-layout-nav>             <!-- Layout tag-->
+  <div td-toolbar-content>
+    Jump The Queue          <!-- Header container-->
+  </div>
+  <h1>
+    app works!              <!-- Main content-->
+  </h1>
+</td-layout-nav>
+
+
+
+
+
+

==

+
+
+

You have already learned about Toolbars in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+
+
+

==

+
+
+

You have already learned about Toolbars in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

Once this is done, our app should have a header and "app works!" should appear in the body of the page:

+
+
+
+Root Header +
+
+
+

To go a step further, we have to modify the body of the root component because it should be the output of the router. Now it’s time to prepare the routing system.

+
+
+

First, we need to create a component to show as default which will be our access view. We will modify it later. Stop ng serve and run:

+
+
+
+
ng generate component form-login
+
+
+
+

It will add a folder to our project with all the files needed for a component. Now we can move on to the router task again. Run ng serve again to continue the development.

+
+
+

Let’s create a module that navigates between components when the Router checks for routes. The file app-routing.module.ts was created automatically when we chose to include Angular Routing during project creation and we only need to modify it now:

+
+
+
+
import { NgModule } from '@angular/core';
+import { RouterModule, Routes } from '@angular/router';
+import { FormLoginComponent } from './form-login/form-login.component';
+
+const appRoutes: Routes = [
+  { path: 'FormLogin', component: FormLoginComponent},        // Redirect if url path is /FormLogin.
+  { path: '**', redirectTo: '/FormLogin', pathMatch: 'full' } // Redirect if url path do not match any other route.
+];
+
+@NgModule({
+  imports: [
+    RouterModule.forRoot(
+      appRoutes,
+      { enableTracing: true }, // <-- debugging purposes only
+    ),
+  ],
+  exports: [RouterModule],
+})
+export class AppRoutingModule {}
+
+
+
+
+
+

==

+
+
+

You have already learned about Routing in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

Finally, we remove the <h1>app works!</h1> from app.component.html and replace it with a <router-outlet></router-outlet> tag. The final result of our root component will look like this:

+
+
+
+Root Router +
+
+
+

As you can see, now the body content is the HTML of FormLoginComponent. This is because we told the Router to redirect to login form when the path is /FormLogin, but also, redirect to it by default if any of the other routes match the given path.

+
+
+

For now we are going to leave the header like this. In the future we will separate it into another component inside a layout folder.

+
+
+
+
+

LoginForm Component

+
+
+

As we have already created this component from the section before, let’s move on to building the template of the login view.

+
+
+

First, we need to add the Covalent Layout and the card to the file form-login.component.html:

+
+
+
+
<td-layout>
+  <mat-card>
+    <mat-card-title>Login</mat-card-title>
+  </mat-card>
+</td-layout>
+
+
+
+

This will add a gray background to the view and a card on top of it with the title "Login" now that we have the basic structure of the view.

+
+
+

Now we are going to add this image:

+
+
+
+JumpTheQueue Logo Image +
+
+
+

In order to have it available, save it in the following path of the project: angular/src/assets/images/ and name it jumptheq.png.

+
+
+

The final code with the form added will look like this:

+
+
+
+
<td-layout>
+  <mat-card>
+    <img mat-card-image src="assets/images/jumptheq.png">
+  </mat-card>
+</td-layout>
+
+
+
+

This code will give us as a result similar to this:

+
+
+
+Form Login +
+
+
+

This is going to be the container for the login.
+Now we will continue with the second component: Login.

+
+
+
+
+

Login Component

+
+
+

Our first step will be to create the component in the exact same way we created the FormLogin component but this time we are going to generate it in a new folder called components inside formlogin. Putting every child component inside that folder will allow us to keep a good and clear structure. In order to do this, we use the command:

+
+
+
+
ng generate component form-login/components/login
+
+
+
+

After Angular/CLI has finished generating the component, we have to create two modules, one for the form-login and one for the login:

+
+
+

1.- We create a new file called login-module.ts in the login root:

+
+
+
+
import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
+import { CommonModule } from '@angular/common';
+import { CoreModule } from 'src/app/shared/core.module';
+import { LoginComponent } from './login.component';
+
+@NgModule({
+  imports: [CommonModule, CoreModule],
+  providers: [],
+  declarations: [LoginComponent],
+  exports: [LoginComponent],
+  schemas: [ CUSTOM_ELEMENTS_SCHEMA ]
+})
+export class LoginModule {}
+
+
+
+

2.- We create a new file called form-login-module.ts in the form-login root:

+
+
+
+
import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
+import { CommonModule } from '@angular/common';
+import { FormLoginComponent } from './form-login.component';
+import { CoreModule } from '../shared/core.module';
+import { LoginModule } from './components/login/login-module';
+
+@NgModule({
+  imports: [CommonModule, CoreModule, LoginModule],
+  providers: [],
+  declarations: [FormLoginComponent],
+  exports: [FormLoginComponent],
+  schemas: [ CUSTOM_ELEMENTS_SCHEMA ]
+})
+export class FormLoginModule {}
+
+
+
+

As you can see, the LoginModule is already added to the FormLoginModule. Once this is done, we need to remove the FormLoginComponent and the LoginComponent from the declarations since they are already declared in their own modules. Then add the FormLoginModule. This will be done inside AppModule:

+
+
+
+
...
+import { FormLoginModule } from './form-login/form-login-module';
+...
+  declarations: [
+    AppComponent,
+  ]
+
+  imports: [
+    BrowserModule,
+    FormLoginModule,
+    CoreModule,
+    AppRoutingModule
+  ]
+...
+
+
+
+
+
+

==

+
+
+

This is done so the form-login (container/wrapper) and the login stay separated allowing us to reuse the login without having the card around in other views. +== ==

+
+
+

After this, we modify the login.component.html and add the form:

+
+
+
+
<form #loginForm="ngForm" layout-padding>
+    <div layout="row" flex>
+        <mat-form-field flex>
+                <input matInput placeholder="Email" ngModel email name="username" required>
+        </mat-form-field>
+    </div>
+    <div layout="row" flex>
+        <mat-form-field flex>
+            <input matInput placeholder="Password" ngModel name="password" type="password" required>
+        </mat-form-field>
+    </div>
+    <div layout="row" flex>
+    </div>
+    <div layout="row" flex layout-margin>
+        <div layout="column" flex>
+            <button mat-raised-button [disabled]="!loginForm.form.valid">Login</button>
+        </div>
+        <div layout="column" flex>
+            <button mat-raised-button color="primary">Register</button>
+        </div>
+    </div>
+</form>
+
+
+
+
+
+

==

+
+
+

You have already learned about Forms in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

This form contains two input containers from Material. The containers enclose the input with the properties listed above.

+
+
+

We also need to add a button to send the information and redirect to the QueueViewer or show an error if something went wrong in the process. But for the moment, as we neither have another component nor the auth service yet, we will implement the button visually, as well as the validator to disable it if the form is not correct. We will tackle the on-click-event later.

+
+
+

As a last step we will add this component to the form-login-component.html:

+
+
+
+
<td-layout>
+    <mat-card>
+        <img mat-card-image src="assets/images/jumptheq.png">
+        <app-login></app-login>
+    </mat-card>
+</td-layout>
+
+
+
+

Now you should see something like this:

+
+
+
+JumpTheQueue Login Screen +
+
+
+

With two components already created, we need to use the router to navigate between them. Following the application flow of events, we are going to add a navigate function to the register button. When we press it, we will be redirected to our future register component.

+
+
+
+
+

Register Component

+
+
+

First, we are going to generate the register component via:

+
+
+
+
ng generate component register`
+
+
+
+

This will create our component so we can start working on it. Turning back to login.component.html we have to modify these lines of code:

+
+
+
+
<form (ngSubmit)="submitLogin()" #loginForm="ngForm" layout-padding>
+...
+<button mat-raised-button type="submit" [disabled]="!loginForm.form.valid">Login</button>
+...
+<button mat-raised-button (click)="onRegisterClick()" color="primary">Register</button>
+
+
+
+

Two events were added. First, when we submit the form, the method submitLogin() is going to be called. Second, when the user clicks the button (click) will send an event to the function onRegisterClick(). This function should be inside login.component.ts which is going to be created now:

+
+
+
+
  ...
+  import { Router } from '@angular/router';
+  ...
+  constructor(private router: Router) { }
+  ...
+  onRegisterClick(): void {
+    this.router.navigate(['Register']);
+  }
+
+  submitLogin(): void {
+  }
+
+
+
+

We need to inject an instance of the Router object and declare it with the name router in order to use it in the code, as we did with onRegisterClick(). Doing this will use the navigate function and redirect to the next view. In our case, it will redirect using the route we are going to define in app.routing.module.ts:

+
+
+
+
...
+import { RegisterComponent } from './register/register.component';
+...
+const appRoutes: Routes = [
+  { path: 'FormLogin', component: FormLoginComponent},          // Redirect if url path is /FormLogin.
+  { path: 'Register', component: RegisterComponent},            // Redirect if url path is /Register.
+  { path: '**', redirectTo: '/FormLogin', pathMatch: 'full' }   // Redirect if url path do not match any other route.
+];
+...
+
+
+
+
+
+

==

+
+
+

You have already learned about Dependency Injection in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

Now we are going to imitate the login to shape our register.component.html:

+
+
+
+
<form layout-padding (ngSubmit)="submitRegister()" #registerForm="ngForm">
+  <div layout="row" flex>
+      <mat-form-field flex>
+        <input matInput placeholder="Email" ngModel email name="username" required>
+      </mat-form-field>
+  </div>
+  <div layout="row" flex>
+      <mat-form-field flex>
+        <input matInput placeholder="Password" ngModel name="password" type="password" required>
+      </mat-form-field>
+  </div>
+  <div layout="row" flex>
+      <mat-form-field flex>
+        <input matInput placeholder="Name" ngModel name="name" required>
+      </mat-form-field>
+  </div>
+  <div layout="row" flex>
+      <mat-form-field flex>
+        <input matInput placeholder="Phone Number" ngModel name="phoneNumber" required>
+      </mat-form-field>
+  </div>
+  <div layout-xs="row" flex>
+      <div layout="column" flex>
+        <mat-checkbox name="acceptedTerms" ngModel required>Accept Terms And conditions</mat-checkbox>
+      </div>
+  </div>
+  <div layout-xs="row" flex>
+      <div layout="column" flex>
+        <mat-checkbox name="acceptedCommercial" ngModel required>I want to receive notifications</mat-checkbox>
+      </div>
+  </div>
+  <div layout="row" flex>
+  </div>
+  <div layout="row" flex>
+      <div layout="column" flex="10">
+        </div>
+      <div layout="column" flex>
+          <button mat-raised-button type="submit" [disabled]="!registerForm.form.valid">Register</button>
+      </div>
+      <div layout="column" flex="10">
+      </div>
+  </div>
+</form>
+
+
+
+

Now that we have a minimum of navigation flow inside our application, we are going to generate our first service using the command:

+
+
+
+
ng generate service register/services/register
+
+
+
+

This will create a folder "services" inside "register" and create the service itself. Services are where we keep the logic that connects to our database and fetches data which is going to be used by our component.ts.

+
+
+

In order to use the service, we are going to create some interface models. Let’s create a folder called backendModels inside "shared" and inside this folder a file called interfaces.ts in which we are going to add the model interfaces that will match our back-end:

+
+
+
+
export class Visitor {
+    id?: number;
+    username: string;
+    name: string;
+    password: string;
+    phoneNumber: string;
+    acceptedCommercial: boolean;
+    acceptedTerms: boolean;
+    userType: boolean;
+}
+export class VisitorArray {
+    content: Visitor[];
+}
+
+
+
+
+
+

==

+
+
+

You have already learned about creating new services in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

If we take a closer look, we can see that id has a ? behind it. This indicates that the id is optional.

+
+
+
+
+

==

+
+
+

At this point we are going to assume that you have finished the devon4j part of this tutorial, or have at least downloaded the project and have the back end running locally on http://localhost:8081. +== ==

+
+
+

After doing this, we are going to add an environment variable with our base-URL for the REST services. This way we won’t have to change every URL when we switch to production. Inside environments/environment.ts we add:

+
+
+
+
export const environment: {production: boolean, baseUrlRestServices: string} = {
+  production: false,
+  baseUrlRestServices: 'http://localhost:8081/jumpthequeue/services/rest'
+};
+
+
+
+

Now in the service, we are going to add a registerVisitor method.

+
+
+

To call the server in this method we are going to inject the Angular HttpClient class from @angular/common/http. This class is the standard used by Angular to perform HTTP calls. The register call demands a Visitor model which we created in the interfaces file. We are going to build a POST call and send the information to the proper URL of the server service. The call will return an observable:

+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Visitor} from 'src/app/shared/backendModels/interfaces';
+import { Observable } from 'rxjs';
+import { environment } from 'src/environments/environment';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class RegisterService {
+
+  private baseUrl = environment.baseUrlRestServices;
+
+  constructor(private http: HttpClient) { }
+
+  registerVisitor(visitor: Visitor): Observable<Visitor> {
+    return this.http.post<Visitor>(`${this.baseUrl}` + '/visitormanagement/v1/visitor', visitor);
+  }
+}
+
+
+
+

This method will send our model to the back-end and return an Observable that we will use on the component.ts.

+
+
+
+
+

==

+
+
+

You have already learned about Observables and RxJs in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

Now we are going to modify register.component.ts to call this service:

+
+
+
+
import { Component, OnInit } from '@angular/core';
+import { RegisterService } from './services/register.service';
+import { Visitor } from '../shared/backendModels/interfaces';
+import { Router } from '@angular/router';
+import { MatSnackBar } from '@angular/material/snack-bar';
+
+@Component({
+  selector: 'app-register',
+  templateUrl: './register.component.html',
+  styleUrls: ['./register.component.scss']
+})
+export class RegisterComponent implements OnInit {
+
+  constructor(private registerService: RegisterService, private router: Router, public snackBar: MatSnackBar) { }
+
+  submitRegister(formValue): void {
+    const visitor: Visitor = new Visitor();
+    visitor.username = formValue.username;
+    visitor.name = formValue.name;
+    visitor.phoneNumber = formValue.phoneNumber;
+    visitor.password = formValue.password;
+    visitor.acceptedCommercial = formValue.acceptedCommercial;
+    visitor.acceptedTerms = formValue.acceptedTerms;
+    visitor.userType = false;
+
+    this.registerService.registerVisitor(visitor).subscribe(
+      (visitorResult: Visitor) => console.log(JSON.stringify(visitorResult)), // When call is received
+      (err) =>  this.snackBar.open(err.error.message, 'OK', {
+        duration: 5000,
+      }), // When theres an error
+    );
+  }
+
+  ngOnInit() {
+  }
+}
+
+
+
+

In this file we injected RegisterService and Router to use them. Then, inside the method submitRegister, we created a visitor that we are going to pass to the service. We called the service method registerVisitor, we passed the visitor and we subscribed to the Observable<Visitor>, which we returned from the service. This subscription allows us to control three things:

+
+
+
    +
  1. +

    What to do when the data is received.

    +
  2. +
  3. +

    What to do when there’s an error.

    +
  4. +
  5. +

    What to do when the call is complete.

    +
  6. +
+
+
+

Finally, we modify the register.component.html to send the form values to the method:

+
+
+
+
...
+<form layout-padding (ngSubmit)="submitRegister(registerForm.form.value)" #registerForm="ngForm">
+...
+
+
+
+
+Register Page +
+
+
+

Using the method and taking a look at the browser console, we should see the visitor model being returned.

+
+
+
+
+

Creating Services

+
+
+

Now that we registered a Visitor, it’s time to create 3 important services:

+
+
+
    +
  • +

    AuthService

    +
  • +
  • +

    AuthGuardService

    +
  • +
  • +

    LoginService

    +
  • +
+
+
+

The AuthService will be the one that contains the login info, the AuthGuardService will check if a user is authorized to use a component (via the canActivate method), and the LoginService will be used to fill the AuthService.

+
+
+
+
+

==

+
+
+

To keep this tutorial simple, we are going to perform the password check client side. THIS IS NOT CORRECT! Usually, you would send the username and password to the back-end, check that the values are correct, and create a corresponding token which you would pass in the header and use it inside the AuthService — checking with some interceptors that the token is both in the AuthService and in the request. +== ==

+
+
+
+
+

Login, Auth and AuthGuard Services

+
+
+

We are going to create the 3 services via ng generate service <path>:

+
+
+
    +
  1. +

    LoginService via:
    +ng generate service form-login/components/login/services/login

    +
  2. +
  3. +

    Auth service via:
    +ng generate service core/authentication/auth

    +
  4. +
  5. +

    AuthGuard service via:
    +ng generate service core/authentication/auth-guard

    +
  6. +
+
+
+

After generating the services, we are going to start modifying the interfaces. Inside angular/src/app/shared/backendModels/interfaces we are going to add Role, FilterVisitor, Pageable and a Sort interface:

+
+
+
+
...
+export class FilterVisitor {
+    pageable: Pageable;
+    username?: string;
+    password?: string;
+}
+
+export class Pageable {
+    pageSize: number;
+    pageNumber: number;
+    sort: Sort[];
+}
+
+export class Sort {
+    property: string;
+    direction: string;
+}
+
+export class Role {
+    name: string;
+    permission: number;
+}
+
+
+
+
+
+

==

+
+
+

As you can see, we added a Pageable, since a lot of the search methods in the back-end are using SearchCriterias. These need pageables which specify a paseSize and pageNumber. Also, we can see that in this case FilterVisitor uses a pageable and adds parameters as a filter (username and password), which are optional. +== ==

+
+
+

Then we are going to create a config.ts file inside the root (angular/app). We are going to use that file to set up default config variables, for example: role names with their permission number, default pagination settings etc. For now we are just adding the roles:

+
+
+
+
export const config: any = {
+    roles: [
+        { name: 'VISITOR', permission: 0 },
+        { name: 'BOSS', permission: 1 },
+    ],
+};
+
+
+
+

After that, we are going to modify the auth.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import { find } from 'lodash';
+import { Role } from 'src/app/shared/backendModels/interfaces';
+import { config } from 'src/app/config';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class AuthService {
+  private logged = false;
+  private user = '';
+  private userId = 0;
+  private currentRole = 'NONE';
+  private token: string;
+
+  public isLogged(): boolean {
+    return this.logged;
+  }
+
+  public setLogged(login: boolean): void {
+    this.logged = login;
+  }
+
+  public getUser(): string {
+    return this.user;
+  }
+
+  public setUser(username: string): void {
+    this.user = username;
+  }
+
+  public getUserId(): number {
+    return this.userId;
+  }
+
+  public setUserId(userId: number): void {
+    this.userId = userId;
+  }
+
+  public getToken(): string {
+    return this.token;
+  }
+
+  public setToken(token: string): void {
+    this.token = token;
+  }
+
+  public setRole(role: string): void {
+    this.currentRole = role;
+  }
+
+  public getPermission(roleName: string): number {
+    const role: Role = <Role>find(config.roles, { name: roleName });
+    return role.permission;
+  }
+
+  public isPermited(userRole: string): boolean {
+    return (
+      this.getPermission(this.currentRole) == this.getPermission(userRole)
+    );
+  }
+}
+
+
+
+

We will use this service to fill it with information from the logged-in user once the user logs in. This will allow us to check the information of the logged-in user in any way necessary.

+
+
+
+
+

==

+
+
+

You have already learned about Authentication in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

Now we are going to use this class to fill the auth-guard.service.ts:

+
+
+
+
import { Injectable } from '@angular/core';
+import {
+  CanActivate,
+  Router,
+  ActivatedRouteSnapshot,
+  RouterStateSnapshot,
+} from '@angular/router';
+import { AuthService } from './auth.service';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class AuthGuardService implements CanActivate {
+  constructor(
+    private authService: AuthService,
+    private router: Router,
+  ) {}
+
+  canActivate(
+    route: ActivatedRouteSnapshot,
+    state: RouterStateSnapshot,
+  ): boolean {
+    if (this.authService.isLogged() && this.authService.isPermited('VISITOR')) { // If its logged in and its role is visitor
+      return true;
+    }
+
+    if (!this.authService.isLogged()) { // if its not logged in
+      console.log('Error login');
+    }
+
+    if (this.router.url == '/') {  // if the router is the app route
+      this.router.navigate(['/login']);
+    }
+    return false;
+  }
+}
+
+
+
+

This service will be slightly different because we have to implement an interface called CanActivate. It has a method called canActivate() returning a boolean. This method will be called when navigating to a specified route, and — depending on the return value of this implemented method — the navigation will proceed or be rejected.

+
+
+
+
+

==

+
+
+

You have already learned about Guards in devon4ng here.
+You can go back and read that section again to refresh your memory. +== ==

+
+
+

Once this is done, the last step is to fill the login.service.ts. In this case, there’s going to be three methods:

+
+
+
    +
  1. +

    getVisitorByUsername(username: string):
    +A method that recovers a single user corresponding to the email.

    +
  2. +
  3. +

    login(username: string, password: string):
    +A method, which is going to use the previous method, to check that the username and password match the form input and then fill the AuthService.

    +
  4. +
  5. +

    logout():
    +This is going to be used to reset the AuthService and log out the user.

    +
  6. +
+
+
+

Also, we see the first use of pipe and map:
+pipe allows us to execute a chain of functions, then map allows us to return the single visitor instead of all the parameters that the server will send us.

+
+
+
+
import { map, tap } from 'rxjs/operators';
+import { Injectable } from '@angular/core';
+import { Observable } from 'rxjs';
+import { Visitor, FilterVisitor, Pageable } from 'src/app/shared/backendModels/interfaces';
+import { HttpClient } from '@angular/common/http';
+import { environment } from 'src/environments/environment';
+import { AuthService } from 'src/app/core/authentication/auth.service';
+import { Router } from '@angular/router';
+import { MatSnackBar } from '@angular/material/snack-bar';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class LoginService {
+
+    private baseUrl = environment.baseUrlRestServices;
+    constructor(private router: Router, private http: HttpClient, private authService: AuthService, public snackBar: MatSnackBar) { }
+
+    getVisitorByUsername(username: string): Observable<Visitor> {
+        const filters: FilterVisitor = new FilterVisitor();
+        const pageable: Pageable = new Pageable();
+
+        pageable.pageNumber = 0;
+        pageable.pageSize = 1;
+        pageable.sort= [];
+        filters.username = username;
+        filters.pageable = pageable;
+        return this.http.post<VisitorArray>(`${this.baseUrl}` + '/visitormanagement/v1/visitor/search', filters)
+       .pipe(
+            map(visitors => visitors.content[0]),
+        );
+    }
+
+    login(username: string, password: string): void {
+      // Checks if given username and password are the ones aved in the database
+      this.getVisitorByUsername(username).subscribe(
+          (visitorFound) => {
+              if (visitorFound.username == username && visitorFound.password == password) {
+                  this.authService.setUserId(visitorFound.id);
+                  this.authService.setLogged(true);
+                  this.authService.setUser(visitorFound.username);
+                  if (visitorFound.userType == false) {
+                      this.authService.setRole('VISITOR');
+                      this.router.navigate(['ViewQueue']);
+                  } else {
+                      this.authService.setLogged(false);
+                      this.snackBar.open('access error', 'OK', {
+                          duration: 2000,
+                        });
+                  }
+              } else {
+                  this.snackBar.open('access error', 'OK', {
+                      duration: 2000,
+                    });
+              }
+          },
+          (err: any) => {
+            this.snackBar.open('access error', 'OK', {
+              duration: 2000,
+            });
+          },
+      );
+    }
+
+    logout(): void {
+        this.authService.setLogged(false);
+        this.authService.setUser('');
+        this.authService.setUserId(0);
+        this.router.navigate(['FormLogin']);
+    }
+}
+
+
+
+

If you remember the devon4j tutorial, we used Criteria in order to filter and to search the DB. The Criteria require a pageable and you can add extra parameters to get specific results. In getVisitorByUsername() you can see the creation of a FilterVisitor corresponding to the Criteria in the back-end. This FilterVisitor gets a Pageable and a username and will return a single result as soon as the POST call is performed. That’s why we return the first page and only a single result.

+
+
+
+
+

==

+
+
+

For the tutorial we are only considering the visitor side of the application. That’s why we setLogged(false) if it’s userType == true (BOSS side). +== ==

+
+
+

Then we add to the login-module.ts and LoginService:

+
+
+
+
...
+import { LoginService } from './services/login.service';
+
+@NgModule({
+  ...
+  providers: [LoginService],
+  ...
+})
+...
+
+
+
+

After that, we are going to add the AuthGuard and the Auth into the shared/core-module.ts. This will allow us to employ these two services when importing the core module avoiding having to provide these services in every component:

+
+
+
+
...
+  providers: [
+    HttpClientModule,
+    AuthService,
+    AuthGuardService,
+  ],
+...
+
+
+
+

You need to import these modules as well, as shown earlier.

+
+
+

Finally, we modify the login.component.html to send the form values to the login.component.ts like we did with the register form. Afterwards, we are going to modify the register.components.ts: When the visitor registers, we can log him in automatically to avoid any nuisances. Let’s start with the login.component.html:

+
+
+
+
...
+<form (ngSubmit)="submitLogin(loginForm.form.value)" #loginForm="ngForm" layout-padding>
+...
+
+
+
+

As you can see, in the form we just added, the values to the ngSubmit allow us to call the method submitLogin() within the logic, sending the loginForm.form.values which are the form’s input values. In the next step we are going to modify the login.components.ts, adding the submitLogin() method. This method calls the LoginService, providing the service with the necessary values received from the form (i.e. the loginFormValues).

+
+
+
+
...
+import { LoginService } from './services/login.service';
+...
+export class LoginComponent implements OnInit {
+  ...
+  constructor(private router: Router, private loginService: LoginService) {
+  }
+  ...
+  submitLogin(loginFormValues): void {
+    this.loginService.login(loginFormValues.username, loginFormValues.password);
+  }
+}
+
+
+
+

Finally, in the register.components.ts we are going to inject the LoginService and use it to login the visitor after registering him. This will also send the user to the ViewQueue, which we will create and secure later in the tutorial.

+
+
+
+
import { LoginService } from '../form-login/components/login/services/login.service';
+...
+constructor(private registerService: RegisterService, private router: Router, public snackBar: MatSnackBar,
+    private loginService: LoginService) { }
+...
+  submitRegister(formValue): void {
+    ...
+    this.registerService.registerVisitor(visitor).subscribe(
+      (visitorResult: Visitor) => {
+        this.loginService.login(visitorResult.username, visitorResult.password);
+      },
+      ...
+    );
+  }
+...
+
+
+
+
+
+

Finishing Touches

+
+
+

Now we only need to generate two more components (header and view-queue) and services (AccessCodeService and QueueService) in order to finish the implementation of our JumpTheQueue app.

+
+
+
+
+

Separating Header from Layout

+
+
+

By separating the header on top of the page from the layout, we enable the reuse of this component and reach a better separation of concerns across our application. To do this, we are going to generate a new component inside angular/src/app/layout/header via:

+
+
+
+
ng generate component layout/header
+
+
+
+

Now we are going to add it to the main view app.component.html:

+
+
+
+
...
+  <div td-toolbar-content flex>
+    <app-header layout-align="center center" layout="row" flex></app-header>
+  </div> <!-- Header container-->
+...
+
+
+
+

After adding the component to the header view (app-header), we are going to modify the HTML of the component (header.component.html) and the logic of the component (header.component.ts). As a first step, we are going to modify the HTML, adding an icon as a button, which checks whether or not the user is logged in via *ngIf by calling the auth service’s isLogged() method. This will make the icon appear only if the user is logged in:

+
+
+
+
Jump The Queue
+<span flex></span>
+<button mat-icon-button mdTooltip="Log out" (click)=onClickLogout() *ngIf="authService.isLogged()">
+  <mat-icon>exit_to_app</mat-icon>
+</button>
+
+
+
+

In the header logic (header.component.ts) we are simply going to inject the AuthService and LoginService, then we are going call logout() from LoginService in the OnClickLogout(). Finally, the AuthService is needed because it’s being used by the HTML template to control if the user is logged in with isLogged():

+
+
+
+
...
+  constructor(private authService: AuthService, private loginService: LoginService) { }
+...
+  onClickLogout(): void {
+    this.loginService.logout();
+  }
+...
+
+
+
+

Separating components will allow us to keep the code clean and easy to work with.

+
+
+
+
+

ViewQueue Component

+
+
+

For the last view, we are going to learn how to use our Observables on the HTML template directly without having to subscribe() to them.

+
+
+

First, we are going to generate the component via:

+
+
+
+
ng generate component view-queue
+
+
+
+

After that, we are going to include the component in the app-routing.module.ts, also adding the guard, to only allow users that are VISITOR to see the component. It is important to insert the following code before { path: '**', redirectTo: '/FormLogin', pathMatch: 'full' }:

+
+
+
+
...
+const appRoutes: Routes = [
+  ...
+  { path: 'ViewQueue',
+    component: ViewQueueComponent,
+    canActivate: [AuthGuardService]}, // Redirect if url path is /ViewQueue, check if canActivate() with the AuthGuardService.
+  ...
+];
+...
+
+
+
+

Now in order to make this view work, we are going to do these things:

+
+
+
    +
  1. +

    Add the Queue and AccessCode interface in our angular/src/app/shared/backendModels/interfaces and their corresponding filters.

    +
  2. +
  3. +

    Generate the QueueService and AccessCodeService and add the necessary methods.

    +
  4. +
  5. +

    Modify the view-queue.component.html.

    +
  6. +
  7. +

    Modify the logic of the component view-queue.component.ts.

    +
  8. +
+
+
+

First, we are going to add the necessary interfaces. We modify angular/src/app/shared/backendModels/interfaces.ts and add the FilterQueue, Queue, FilterAccessCode, AccessCode, QueueArray and AccessCodeArray. These are going to be necessary in order to communicate with the back-end.

+
+
+
+
...
+export class FilterAccessCode {
+    pageable: Pageable;
+    visitorId?: Number;
+    endTime?: string;
+}
+
+export class FilterQueue {
+    pageable: Pageable;
+    active: boolean;
+}
+
+export class AccessCode {
+    id?: number;
+    ticketNumber: string;
+    creationTime: string;
+    startTime?: string;
+    endTime?: string;
+    visitorId: number;
+    queueId: number;
+    content: any;
+}
+
+export class Queue {
+    id?: number;
+    name: string;
+    logo: string;
+    currentNumber: string;
+    attentionTime: string;
+    minAttentionTime: string;
+    active: boolean;
+    customers: number;
+    content: any;
+}
+
+export class QueueArray {
+    content: Queue[];
+}
+
+export class AccessCodeArray {
+    content: [{
+        accessCode: AccessCode
+    }];
+}
+...
+
+
+
+
+
+

AccessCode and Queue Services

+
+
+

After this is done, we are going to generate the AccessCodeService and the QueueService:

+
+
+
+
ng generate service view-queue/services/Queue
+
+ng generate service view-queue/services/AccessCode
+
+
+
+

Once this is done, we are going to modify them and add the necessary methods:

+
+
+
    +
  • +

    For the AccessCodeService we are going to need a full CRUD:

    +
  • +
+
+
+
+
import { Injectable } from '@angular/core';
+import { AuthService } from 'src/app/core/authentication/auth.service';
+import { Router } from '@angular/router';
+import { HttpClient } from '@angular/common/http';
+import { AccessCode, Pageable, FilterAccessCode } from 'src/app/shared/backendModels/interfaces';
+import { Observable } from 'rxjs';
+import { environment } from 'src/environments/environment';
+import { map } from 'rxjs/operators';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class AccessCodeService {
+
+  private baseUrl = environment.baseUrlRestServices;
+
+  constructor(private router: Router, private http: HttpClient, private authService: AuthService) { }
+
+  getCurrentlyAttendedAccessCode(): Observable<AccessCode> {
+    const filters: FilterAccessCode = new FilterAccessCode();
+    const pageable: Pageable = new Pageable();
+
+    filters.endTime = null;
+    pageable.pageNumber = 0;
+    pageable.pageSize = 1;
+    filters.pageable = pageable;
+    return this.http.post<AccessCodeArray>(`${this.baseUrl}` + '/accesscodemanagement/v1/accesscode/cto/search', filters)
+    .pipe(
+        map(accesscodes => {
+          if (!accesscodes.content[0]) {  // if theres no response it means theres noone in the queue
+            return null;
+          } else {
+            if (accesscodes.content[0].accessCode.startTime != null) {
+              // if start time is not null it means that hes being attended
+              return accesscodes.content[0].accessCode;
+            } else {
+              // noone being attended
+              return null;
+            }
+          }
+        }),
+     );
+  }
+
+  getVisitorAccessCode(visitorId: number): Observable<AccessCode> {
+    const filters: FilterAccessCode = new FilterAccessCode();
+    const pageable: Pageable = new Pageable();
+
+    pageable.pageNumber = 0;
+    pageable.pageSize = 1;
+    filters.visitorId = visitorId;
+    filters.pageable = pageable;
+    return this.http.post<AccessCodeArray>(`${this.baseUrl}` + '/accesscodemanagement/v1/accesscode/cto/search', filters)
+    .pipe(
+      map(accesscodes => {
+        if (accesscodes.content[0]) {
+          return accesscodes.content[0].accessCode;
+        } else {
+          return null;
+        }
+      }),
+    );
+  }
+
+  deleteAccessCode(codeAccessId: number) {
+    this.http.delete<AccessCode>(`${this.baseUrl}` + '/accesscodemanagement/v1/accesscode/' + codeAccessId + '/').subscribe();
+  }
+
+  saveAccessCode(visitorId: number, queueId: number) {
+    const accessCode: AccessCode = new AccessCode();
+    accessCode.visitorId = visitorId;
+    accessCode.queueId = queueId;
+    return this.http.post<AccessCode>(`${this.baseUrl}` + '/accesscodemanagement/v1/accesscode/', accessCode);
+  }
+}
+
+
+
+

In the methods getCurrentlyAttendedAccessCode and getVisitorAccessCode we can see the use of Pageable and FilterAccessCode to match the Criteria in the back-end like we explained in previous steps. In this case, the getVisitorAccessCode method will be used to see if the visitor has an AccessCode and the getCurrentlyAttendedAccessCode is going to recover the first AccessCode of the queue.

+
+
+
    +
  • +

    For the QueueService we are only going to need to find the active queue:

    +
  • +
+
+
+
+
import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Router } from '@angular/router';
+import { Observable } from 'rxjs';
+import { Queue, FilterQueue, Pageable } from 'src/app/shared/backendModels/interfaces';
+import { environment } from 'src/environments/environment';
+import { map } from 'rxjs/operators';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class QueueService {
+
+  private baseUrl = environment.baseUrlRestServices;
+
+  constructor(private router: Router, private http: HttpClient) { }
+
+  getActiveQueue(): Observable<Queue> {
+    const filters: FilterQueue = new FilterQueue();
+    filters.active = true;
+    const pageable: Pageable = new Pageable();
+    pageable.pageNumber = 0;
+    pageable.pageSize = 1;
+    pageable.sort = [];
+    filters.pageable = pageable;
+    return this.http.post<QueueArray>(`${this.baseUrl}` + '/queuemanagement/v1/queue/search', filters)
+    .pipe(
+         map(queues => queues.content[0]),
+     );
+  }
+}
+
+
+
+

Now we are going to create the template view-queue.component.html (which will use this data) and we will also introduce a new concept: async pipes in templates.

+
+
+
+
<td-layout *ngIf="{
+  accessCodeAttended: accessCodeAttended$ | async,
+  accessCodeVisitor: accessCodeVisitor$  | async,
+  queue: queue$ | async
+} as data;">
+  <div *ngIf="data.queue">
+    <mat-card>
+    <img mat-card-image src="assets/images/jumptheq.png">
+
+      <div *ngIf="data.accessCodeVisitor">
+        <div class="text-center row">
+          <h1 style="margin-bottom:10px;" class="text-left text-xl push-md">Your Number:</h1>
+        </div>
+        <div class="text-center row">
+          <h1 style="font-size: 75px; margin:0px;" class="text-center text-xxl push-left-md">{{data.accessCodeVisitor.ticketNumber}}</h1>
+        </div>
+        <div style="border-bottom: 2px solid black;" class="row">
+          <p class="push-left-md">Currently estimate time: 10:00:00</p>
+        </div>
+      </div>
+      <div class="text-center">
+        <div class="text-center row">
+          <h1 style="margin-bottom:10px;" class="text-left text-xl push-md">Currently Being Attended:</h1>
+        </div>
+        <div class="row">
+          <h1 style="font-size: 100px" class="text-center text-xxl push-lg">{{data.accessCodeAttended?.ticketNumber}}</h1>
+        </div>
+      </div>
+      <div style="border-top: 2px solid black;" class="pad-bottom-lg pad-top-lg text-center row" *ngIf="data.accessCodeVisitor == null">
+        <button mat-raised-button (click)="onJoinQueue(data.queue.id)" color="primary" class="text-upper">Join the queue</button>
+      </div>
+    </mat-card>
+    <div *ngIf="data.accessCodeVisitor" style="margin: 8px;" class="row text-right">
+        <button mat-raised-button (click)="onLeaveQueue(data.accessCodeVisitor.id)" color="primary" class="text-upper">Leave the queue</button>
+    </div>
+  </div>
+  <div *ngIf="data.queue == null || (data.queue !==  null && data.queue.active == false)" class="row">
+    <h1 style="font-size: 50px" class="text-center text-xxl push-lg">The queue is not active try again later</h1>
+  </div>
+</td-layout>
+
+
+
+

If you watch closely, the starting td-layout has an *ngIf inside it. This *ngIf allows us to asynchronously pipe the observables that we will assign in the next steps. This solution avoids having to use subscribe() (as it subscribes automatically) and — as a result — we don’t have to worry about where to unsubscribe() from the observables.

+
+
+

In this HTML, we give *ngif another use: We use it to hide certain panels. Using accessCodeVisitor, we hide the ticket number panel and the "leave the queue"-button and show the button to join the queue. On the contrary we can hide the ticket number and the "leave the queue"-button and only show the "join the queue"-button.

+
+
+
+
+

==

+
+
+

In this case, since we are using HTTP and the calls are finite, there wouldn’t be any problems if you don’t unsubscribe() from their corresponding observables. However, if — for example — we use an observable to keep track of an input and subscribe() to it but not controlling the unsubscribe() method, the app could end up containing a memory leak. This is because — every time we visit the component with the input — it is going to create another subscription without unsubscribing from the last one. +== ==

+
+
+

Finally, to adapt the async pipe, the ngOnInit() method inside view-queue.component.ts now does not subscribe to the observable. In its place, we equal the queue variable directly to the observable, so we can load it using *ngIf.

+
+
+
+
import { Component, OnInit } from '@angular/core';
+import { AccessCode, Queue } from '../shared/backendModels/interfaces';
+import { Observable, timer } from 'rxjs';
+import { AccessCodeService } from './services/access-code.service';
+import { switchMap } from 'rxjs/operators';
+import { AuthService } from '../core/authentication/auth.service';
+import { QueueService } from './services/queue.service';
+
+@Component({
+  selector: 'app-view-queue',
+  templateUrl: './view-queue.component.html',
+  styleUrls: ['./view-queue.component.scss']
+})
+export class ViewQueueComponent implements OnInit {
+
+  accessCodeAttended$: Observable<AccessCode>;
+  accessCodeVisitor$: Observable<AccessCode>;
+  queue$: Observable<Queue>;
+
+  constructor(private accessCodeService: AccessCodeService, private queueService: QueueService, private authService: AuthService) { }
+
+  ngOnInit() {
+     // Every minute we are going to update accessCodeAttended$ starting instantly
+    this.accessCodeAttended$ = timer(0, 60000).pipe(
+      // we switchMap and give it the value necesary from the accessCodeService
+      switchMap(() => {
+        return this.accessCodeService.getCurrentlyAttendedAccessCode();
+      })
+    );
+    this.accessCodeVisitor$ = this.accessCodeService.getVisitorAccessCode(this.authService.getUserId());
+    this.queue$ = this.queueService.getActiveQueue();
+  }
+
+  onJoinQueue(queueId: number): void {
+    this.accessCodeVisitor$ = this.accessCodeService.saveAccessCode(this.authService.getUserId(), queueId);
+  }
+
+  onLeaveQueue(accessCodeId: number): void {
+    this.accessCodeService.deleteAccessCode(accessCodeId);
+    this.accessCodeVisitor$ = null;
+  }
+}
+
+
+
+

In this last component we assign the Observables when the component is initiated. After that, when clicking the "join the queue"-button, we assign a new Observable called AccessCode to the accessCodeVisitor$. Finally, when we leave the queue, we delete the AccessCode and set the accessCodeVisitor to null. Since we are using an async pipe, every time we modify the status of the Observables, they are going to update the template.

+
+
+
+Queue Page with Access Code +
+
+
+
+Queue Page without Access Code +
+
+
+

This is all on how to build your own devon4ng application. Now it’s up to you to add features, change styles and do everything you can imagine doing with this app.

+
+
+

As a final step to complete the tutorial, however, we are going to run the app outside of our local machine by deploying it.

+
+
+
+

Next Chapter: Deploy your devon4ng App

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4ng-components.html b/docs/jump-the-queue/1.0/devon4ng-components.html new file mode 100644 index 00000000..6788a939 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4ng-components.html @@ -0,0 +1,456 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Components

+
+
+

In this chapter we are going to take a closer look at Angular components and learn how to work with them.

+
+
+
+
+

What are Angular Components?

+
+ +
+
+
+

A component controls a patch of screen called a view.

+
+
+

You define a component’s application logic — what it does to support the view — inside a class. The class interacts with the view through an API of properties and methods.

+
+
+

Components are the most basic building block of an UI in an Angular application. An Angular application is a tree of Angular components. They are internally composed by an HTML template and a class with all the methods needed to handle that template.

+
+
+
+
+

HTML is the language of the Angular template. Almost all HTML syntax is valid template syntax. The <script> element is a notable exception; it is forbidden, eliminating the risk of script injection attacks. In practice, <script> is ignored and a warning appears in the browser console. See the Security page for details.

+
+
+

Some legal HTML doesn’t make much sense in a template. The <html>, <body>, and <base> elements have no useful role. Pretty much everything else is fair game. Moreover, Angular has some extended HTML functionalities, involving data binding, structural directives like loops or if’s and property bindings.

+
+
+
+
+

A component must belong to a NgModule in order for it to be usable by another component or application. To specify that a component is a member of a NgModule, you should list it in the declarations field of that NgModule.

+
+
+
+
+

Every Angular application is composed of components depending on the root component: app.component.ts. You can route from one component to another, use their selector to instantiate a component inside of another component’s template, input data into a child component, in order to use it inside the child component, or send event-outputs to the parent component, to execute some actions when the event is triggered.

+
+
+

Basically, without components, there is no Angular application.

+
+
+
+
+

Create a new Component

+
+
+

Creating a component can be as simple as creating a file with a name like this: <component_name>.component.ts But a good component should include more files, which complete the environment needed for a component: A local style file to apply to the component template, the template in an HTML file separated from the component, and at least one spec file to test the component.

+
+
+

All of this files can easily be generated along with the component itself by using ng generate. Angular CLI offers the functionality to create a component, generate other, related files and add the new component to the app.module.ts automatically. The structure of the command is:

+
+
+
+
ng generate component <component_name>
+
+
+
+
+
+

Toolbars

+
+
+

Angular Material provides components that are specifically designed to be used with a certain page layout. This is also the case for Toolbars.

+
+
+

They are designed to be used as a container for page headers, titles, or actions. Toolbars apply the theme color and standard style to their child components. They can contain multiple rows and accept icon buttons.

+
+
+

Since they make the development of page layouts much easier, they are widely used in component libraries like Teradata Covalent, which have them integrated into their respective Layout Options.

+
+
+
+
+

Root Component

+
+
+

app.component.ts — as it is usually named, when a project is created via the Angular CLI — is the "root component" of an Angular app. It is called like this, because Angular apps follow a tree structure, where components are dependent on one another.

+
+
+

This root component should contain everything that is common across the whole application: The General layout of the app, headers, footers, sidenavs, etc. Even if we use a Router to navigate between components, these elements will remain the same and should not be created multiple times.

+
+
+

Using the root component to preserve some elements is useful, because we do not have to replicate the same HTML code for view components in every component. This gives us the opportunity to keep data from one view to another. For example, this is used in MyThaiStar to always have the order data available in the sidenav, no matter where the user navigates.

+
+
+
+
+

Routing

+
+
+

Angular has the functionality to navigate from component to component, in order to keep the architecture of the application easy to maintain. This functionality is provided by the Router.

+
+
+

Routing works by establishing routes to components via a special file. This special file exports a RouterModule, which has to be imported into app.module.ts. When the URL of the app ends with one of the defined routes, the <router-outlet> tag will display the component related to that route.

+
+
+

You can also configure routes to redirect to a certain component, when a passed URL is unknown, or configure a default page, when the app starts.

+
+
+

There are some cases when a component also has its own navigation contained inside of it. To make this sub-navigation possible, Routers can use child-routes. These are special properties of a route, which you can declare inside of a child-array. With this child-array correctly set up, you can navigate to a component, and have it sub-navigate to other components inside of it.

+
+
+

One last remark: Routes can be secured using a special service called Guards, which forbids or permits the navigation to a component, depending on the return of a Boolean value. This will be shown in the chapter devon4ng Services.

+
+
+
+
+

Forms

+
+
+

Angular provides a large amount of functionality for user input forms. The complete information can be found in the Angular User Input section.

+
+
+

Basically, forms can be built as always, using the <form> tag and adding some inputs and selectors to it. In the case of Angular however, forms have been extended to provide more utility:

+
+
+
    +
  • +

    By declaring the ngForm as a property of the form tag via #formName="ngForm", you get access to Angular’s form-functionalities.

    +
  • +
  • +

    By Adding ngModel, you can use Angular’s data binding to insert user input from the form directly into your code.

    +
  • +
  • +

    By adding a name property, you can pass the form via the submit action and make use of all the functionalities Angular forms provide, like accessing the form values by name, or resetting the form.

    +
  • +
  • +

    You can check for invalid input in the fields of a form, and use this information to disable the submit button for example.

    +
  • +
+
+
+

Angular forms have a lot more functionality to them, so once again, we recommended you to visit and read the Angular User Input section.

+
+
+
+
+

Teradata Covalent Components

+
+
+

Along with style utilities, Teradata Covalent comes with a library of components built using Angular Material. They extend the basic usage of Angular Material components and can be used in more complex situations. This is the case for data tables, layouts, steppers, etc. You can find them all in the Teradata Covalent Components Documentation.

+
+
+
+
+

Teradata Covalent Layouts

+
+
+

Material apps tend to have a similar structure. It is up to you, to customize your app and distinguish it from others. To make this task easier, Teradata Covalent delivers some custom Layouts, which might integrate better with the structure of your component view.

+
+
+

If you are going to use a layout for one page, it is recommended to use a layout for every page. Otherwise, you may encounter problems with the size of the page or with blank spaces. To avoid this, if you use a layout for your root component, add at least a <td-layout> tag to your other components as well, in order to achieve size coherence. The issue of incoherent sizing will only affect you, if you use layouts for some components, but not for others.

+
+
+
+
+

Teradata Covalent Data Table

+
+
+

Almost every application has to show data to the user at some point, so we’ll need an implementation of a table sooner or later. You can make use of the HTML <table> tag, but this means you will have to implement all interactions by hand. Teradata Covalent provides their own Data Table Component with advanced functionalities, which you can use. This avoids the implementation of a working data table from scratch.

+
+
+

The Covalent data table works with input- and output-events. It requires at least a reference to the data to be shown, and an array of named columns. The column names have to correspond to the names of the objects that should be displayed inside of them. You must also define a label for the component. Now you have a functioning data table, to which you can add events like sorting, paging, searching, and so on. For more details, please refer to Teradata’s documentation of the Data Table.

+
+
+
+

Next Chapter: devon4ng Services

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4ng-deployment.html b/docs/jump-the-queue/1.0/devon4ng-deployment.html new file mode 100644 index 00000000..9c63deea --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4ng-deployment.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Deploy a devon4ng Application

+
+
+

If you have a finished version of your app and want to deploy it somewhere, you only need to make sure that all tests are passed and that your app compiles correctly (this is tested continuously during ng serve). After this, run the following command inside your angular root folder:

+
+
+

devon ng build

+
+
+

This will compile the project and generate a folder called dist, which will contain all your TypeScript code transpiler into pure JavaScript:

+
+
+
+Compiled Dist Folder +
+
+
+

You can also use the devon ng deploy command to automatically deploy your project to a cloud platform of your choice. For more information on this, please read the guide on automatic deployment with the Angular/CLI.

+
+
+
+

Congratulations!!!! You have successfully completed the JumpTheQueue tutorial.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4ng-ide-setup.html b/docs/jump-the-queue/1.0/devon4ng-ide-setup.html new file mode 100644 index 00000000..8e61a621 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4ng-ide-setup.html @@ -0,0 +1,428 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

IDE Setup

+
+
+

This Tutorial explains how to setup Visual Studio Code to work on and contribute to devon4ng projects with your Windows computer.

+
+
+

To get started follow these steps:

+
+
+
    +
  1. +

    Download and install Git for Windows:

    +
    + + + + + +
    + + +Install with the option Use Git from the Windows Command Prompt but without Windows Explorer integration! +
    +
    +
    +
    +Git for Windows Setup +
    +
    +
    +
      +
    • +

      For Windows Explorer integration install Tortoise Git (optional)

      +
    • +
    +
    +
  2. +
  3. +

    Download and install the recommended .NET Framework Runtime & Developer Pack

    +
  4. +
  5. +

    Download and install the latest AdoptOpen JDK (with the HotSpot JVM):

    +
    +
      +
    • +

      During installation activate the option for Set JAVA_HOME variable

      +
      +
      +AdoptOpenJDK Setup +
      +
      +
      + + + + + +
      + + +Reboot your PC to put the newly set JAVA_HOME variable into effect! +
      +
      +
    • +
    +
    +
  6. +
  7. +

    Get Visual Studio Code:

    +
    +
      +
    • +

      If you are NOT a member of Capgemini, download VS Code, install and run it.

      +
    • +
    • +

      If you ARE a member of Capgemini, download the current devonfw distribution and unzip it to a directory of your choice. Navigate to ~\software\vscode and run Code.exe.

      +
    • +
    +
    +
  8. +
  9. +

    Install the devonfw Platform Extension Pack for VS Code:

    +
    +
      +
    • +

      In VS Code, select the 'Extensions' icon in the Activity Bar to the left, then search and install the devonfw Platform Extension Pack …​

      +
    • +
    • +

      or follow this link, click 'Install', confirm the dialogue and open the URL with VS Code.

      +
    • +
    +
    +
  10. +
  11. +

    Download and install the current version of Node.js

    +
  12. +
  13. +

    Install TypeScript support and the Angular CLI:

    +
    +
      +
    • +

      In VS Code, open a command prompt via Terminal > New Terminal

      +
    • +
    • +

      now run the command npm install –g typescript

      +
    • +
    • +

      also run the command npm install –g @angular/cli

      +
      + + + + + +
      + + +You can check, what modules you have installed by running npm list -g. You can also check, what version of the Angular CLI you are using by running ng version. +
      +
      +
    • +
    +
    +
  14. +
  15. +

    (OPTIONAL) Configure Yarn to be your package manager:

    +
    +
      +
    • +

      Download and install the stable version of Yarn

      +
    • +
    • +

      Now run the command ng config -g cli.packageManager yarn

      +
      + + + + + +
      + + +We feel like Yarn is quite a bit faster and generally more usable than npm. However, if you are already familiar with npm, you can continue using it. +
      +
      +
    • +
    +
    +
  16. +
+
+
+

Finally, if you are using the devonfw distribution, we recommend storing all your devon4ng projects in the folder workspaces_vs. This allows you to execute the script update-all-workspaces.bat, in order to generate a launch script for each individual project called vscode-<YourProjectName>.bat.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4ng-introduction.html b/docs/jump-the-queue/1.0/devon4ng-introduction.html new file mode 100644 index 00000000..a0067ea6 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4ng-introduction.html @@ -0,0 +1,415 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4ng

+
+
+

Created to support the development of Angular applications, devon4ng includes Google Material Design as main visual language. It takes maximum the advantage of Angular’s possibilities and Material components, making it possible to build modular, well-designed, responsive front-end applications.

+
+
+
+
+

devon4ng Technology Stack

+
+
+

devon4ng works on top of Angular but also provides several tools, libraries and code conventions to make your Angular apps easier to develop. It’s based on the following technologies:

+
+
+
    +
  • +

    Angular Framework as the main development framework

    +
  • +
  • +

    Angular Material as visual language and components for Angular.

    +
  • +
  • +

    Teradata Covalent as component and utilities library compatible with Angular Material.

    +
  • +
  • +

    Yarn as the main project dependency management tool.

    +
  • +
+
+
+

The main advantages of these technologies are listed here:

+
+
+
    +
  • +

    Teradata provides:

    +
    +
      +
    • +

      4 available layouts that fit latest design necessities.

      +
    • +
    • +

      Several tools and utilities regarding style conventions, such as text size, padding and margins, …​

      +
    • +
    • +

      Complex components such as: data tables, chips with auto-complete, pagination, …​

      +
    • +
    +
    +
  • +
  • +

    Angular Material is composed of a number of fancy components like tabs, cards, buttons, …​

    +
  • +
  • +

    Yarn is faster than NPM and provides more advanced functionalities to manage dependencies.

    +
  • +
+
+
+
+
+

devon4ng Tools

+
+ +
+
+
+

== IDE

+
+
+

There is no integrated IDE that comes with the framework. This means you are free to work with whatever IDE suits you best. However, we highly recommend the use of Visual Studio Code, for which we provide an extension pack that contains many useful plugins to make development with Typescript and Angular even easier.

+
+
+
+
+

== Angular CLI

+
+
+

The Agular CLI lets developers automatize common processes. It comes with Webpack as a bundler. It is widely used in the Angular community thanks to the boost in productivity it provides when creating new projects from scratch, serving and testing projects, creating new components, services, directives and much more.

+
+
+
+
+

== Testing

+
+
+

Testing ensures that the TypeScript code used in our services and components works properly, but we can also check HTML tags and properties. There are many options to test Angular apps, however Karma and Jasmine are the most widespread test frameworks.

+
+
+
+
+

devon4ng Architecture Overview

+
+
+

Since Angular is used as the foundational framework for devon4ng, the application architecture is going to be the same as the Angular architecture:

+
+
+
+Angular Architecture Overview +
+
+
+

This architecture will be enhanced with some functionalities from Teradata Covalent and Angular Material:

+
+
+
    +
  • +

    Theming: A functionality that Angular Material includes in its library and Teradata Covalent extends. It declares one primary color, one secondary color and one color for warnings and alerts to be used in all views of the application. Teradata Covalent also expects a color for the background and another for the foreground. Different color compositions can be stored in a number of themes, which can be changed at run-time by the user.

    +
  • +
+
+
+
+Angular Theming +
+
+
+
    +
  • +

    Flex-Box: Along with other CSS Utility Styles & Classes, Teradata Covalent comes with flex-box, useful for styling and organizing components inside a view, which is extended by Teradata Covalent to achieve responsiveness. You can declare styles that change, hide or transform the component, depending on the screen resolution of the device.

    +
  • +
+
+
+
+Flex Box +
+
+
+

In the following chapters we are going to see an example of a finished devon4ng application, explain how to build your own devon4ng app from scratch, and teach you about each part of the architecture so you can start building your own applications.

+
+
+
+

Next Chapter: A devon4ng Application

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devon4ng-services.html b/docs/jump-the-queue/1.0/devon4ng-services.html new file mode 100644 index 00000000..1fd84759 --- /dev/null +++ b/docs/jump-the-queue/1.0/devon4ng-services.html @@ -0,0 +1,432 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular Services

+
+
+

In this chapter we are going to take a closer look at Angular services and learn how to utilize them.

+
+
+
+
+

What are Angular Services?

+
+ +
+
+
+

Service is a broad category encompassing any value, function, or feature that your application needs.

+
+
+

Almost anything can be a service. A service is typically a class with a narrow, well-defined purpose. It should do something specific and do it well.

+
+
+

There is nothing specifically Angular about services. Angular has no definition of a service. There is no service base class, and no place to register a service.

+
+
+

Yet services are fundamental to any Angular application. Components are big consumers of services.

+
+
+
+
+

Services are often created in a shared folder, with the purpose of containing all the logic of a component or a complex type of operation. For example, in the case of MyThaiStar, all components which call the server, or have methods with more complex logic, have their own service that implements them. There are also services, which — instead of being used by just one component — are used several times throughout the code, in many components, like the price calculator for example.

+
+
+

Just like components, services have to be declared as a NgModule. In this case via the providers array.

+
+
+
+
+

Dependency Injection

+
+
+

The Angular Architecture Documentation says the following about DI:

+
+
+
+
+

Dependency injection is a way to supply a new instance of a class with the fully-formed dependencies it requires. Most dependencies are services. Angular uses dependency injection to provide new components with the services they need.

+
+
+

When Angular creates a component, it first asks an injector for the services that the component requires.

+
+
+
+Angular Injector +
+
+
+

An injector maintains a container of service instances that it has previously created. If a requested service instance is not in the container, the injector makes one and adds it to the container before returning the service to Angular. When all requested services have been resolved and returned, Angular can call the component’s constructor with those services as arguments. This is dependency injection.

+
+
+
+
+

For more information, please visit the Angular Dependency Injection guide.

+
+
+
+
+

Create a new Service

+
+
+

Security or other global services will be stored in a shared folder, at the same level as the rest of the components. Services that are specific to a certain component, will be stored in a folder for the respective component. We can specify the path to a service when creating it with the Angular CLI:

+
+
+
+
ng generate service <path>/<service_name>
+
+
+
+
+
+

Authentication

+
+
+

Authentication is a special service created to maintain user sessions in the application. In the case of our tutorial application it serves as an indicator of the log-in state and stores the name of the user, but it can be extended to store tokens, validate permissions of roles, and so on.

+
+
+

Basically, the objective of this service is to manage, what the user can or can not see, depending on their actions in the application.

+
+
+
+
+

Guards

+
+
+

Guards are services, that implement an interface called CanActivate. This interface forces developers to implement the canActivate method, which returns a simple boolean. Is up to you to decide, what conditions you are going to implement, to forbid or allow the navigation to a certain component.

+
+
+

Guards are strongly related to the Router, because routes can carry the property canActivate:[GuardServiceName]. You can create as many Guards as you want, to secure every single component of your app, just as you require.

+
+
+

If your application will have a login process, or special areas not accessible to everyone, we encourage you to use Guards, because even if you hide a navigation-button, the user can still modify the URL in the browser to access a component. With Guards in place, this type of navigation will be forbidden, and you app will not be compromised easily.

+
+
+
+
+

Server Communication

+
+
+

Angular uses HTTP to communicate with the application server, but the return call is implemented via an object from the RxJS library. RxJS is a third party library, endorsed by Angular, and used to manage asynchronous calls based on the Observer pattern.

+
+
+

You don’t need to install the RxJS package manually, it comes preinstalled when you create a project via the Angular CLI. You just have to import the correct operators and modules to use it correctly.

+
+
+

Observables work as follows:

+
+
+
    +
  1. +

    First you make a HTTP call to your server URL, calling for a service. The server will return an Observable that you can work with, using methods that you can find in the API. The most common method is .map, which is used to convert the server response object to JSON. This way you have easy access to any data the server sends to you.

    +
  2. +
  3. +

    When implemented, all operators/methods of the Observable should return the whole Observable, so the component that consumes the service can subscribe to this method and obtain the data.

    +
  4. +
  5. +

    A component calls the subscribe service function, to obtain an Observable object. The subscribe function accepts up to three parameters:

    +
    +
      +
    1. +

      A function, that executes while everything is going right. It sends a value such as a number, a string, an object, etc. It can be called multiple times, while the Observable is active.

      +
    2. +
    3. +

      A function, that executes only once if something fails. It sends a JavaScript error or exception.

      +
    4. +
    5. +

      An event, that triggers only once, when the Observable becomes inactive.

      +
    6. +
    +
    +
  6. +
+
+
+

This is the common workflow with Observables. It takes into account, that if you put some code after the subscribe, it will probably be executed before the subscription ends. If you need something to be executed after the subscribe function, you should put it inside of the subscription.

+
+
+

You can find out more about RxJS and Observables on Angular’s Observables page.

+
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devonfw-ide-setup.html b/docs/jump-the-queue/1.0/devonfw-ide-setup.html new file mode 100644 index 00000000..9a616d2b --- /dev/null +++ b/docs/jump-the-queue/1.0/devonfw-ide-setup.html @@ -0,0 +1,376 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

IDE Setup

+
+
+

This Tutorial explains how to setup the development environment to work on and contribute to devonfw4j with your Windows computer.

+
+
+

We are using a pre-configured devon-ide for development. To get started follow these steps:

+
+
+
    +
  1. +

    Get a Git client. For Windows use:

    +
    + +
    +
  2. +
  3. +

    Download the distribution

    +
    +
      +
    • +

      If you are a member of Capgemini: download the current devonfw distribution (for devonfw please find the setup guide within the devon-dist).

      +
      +
      +devonfw Distribution Download +
      +
      +
    • +
    +
    +
  4. +
  5. +

    Choose a project location for your project (e.g. C:\projects\devonfw, referred to with $projectLoc in this setup guides following steps). Avoid long paths and white spaces to prevent trouble. Extract the downloaded ZIP files via Extract Here (e.g. using 7-Zip). Do not use the Windows native ZIP tool to extract as this is not working properly on long paths and filenames.

    +
  6. +
  7. +

    Run the script update-all-workspaces.bat in $projectLoc.

    +
    +

    Hint: You can use update-all-workspaces.bat whenever you created a new folder in workspaces to separate different work-spaces. This update will create new Eclipse start batches allowing to run a number of Eclipse instances using different work-spaces in parallel.

    +
    +
  8. +
  9. +

    Open console.bat and check out the git repositories you need to work on into workspaces\main. with the following commands:

    +
    +
    +
    cd workspaces/main
    +git clone --recursive https://github.com/devonfw/my-thai-star.git
    +
    +
    +
    +

    Do another check whether there are files in folder workspaces\main\my-thai-star\!

    +
    +
  10. +
  11. +

    Run the script eclipse-main.bat to start the Eclipse IDE.

    +
  12. +
  13. +

    In Eclipse select File > Import > Maven > Existing Maven Projects and then choose the cloned projects from your workspace by clicking the Browse button and select the folder structure (workspaces\main\my-thai-star\java\MTSJ).

    +
  14. +
  15. +

    Execute the application by starting the SpringBootApp. Select the class and click the right mouse button. In the context menu select the entry Run as ⇒ Java Application (or Debug as …​). The application starts up and creates log entries in the Eclipse Console Tab.

    +
    +
    +Running an Application +
    +
    +
  16. +
  17. +

    Open console.bat.

    +
  18. +
  19. +

    Go to the folder workspaces\main\my-thai-star\angular in the console.bat terminal.

    +
  20. +
  21. +

    Execute the command: yarn install. Wait till everything is finished.

    +
  22. +
  23. +

    Execute the command: yarn start.

    +
  24. +
  25. +

    Once started, the sample application runs on http://localhost:4200/restaurant, login with waiter/waiter and have a look at the services list provided.

    +
  26. +
+
+
+
+

Next Chapter: Creating a devon4j Project

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/devonfw-intro.html b/docs/jump-the-queue/1.0/devonfw-intro.html new file mode 100644 index 00000000..b3cf432f --- /dev/null +++ b/docs/jump-the-queue/1.0/devonfw-intro.html @@ -0,0 +1,449 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devonfw Introduction

+
+
+
+devonfw Logo +
+
+
+

devonfw is a development platform aiming for standardization of processes and productivity boost, that provides an architecture blueprint for Java/JavaScript applications, alongside a set of tools to provide a fully functional out-of-the-box development environment.

+
+
+

You don’t need to click any links here. You can come back later if you like, but everything will be explained step by step later on.

+
+
+
+
+

Everything in a Zip Archive

+
+
+

The devonfw distributions can be obtained from the releases site and are packaged in a zip file that includes all the needed tools, software and configurations.

+
+
+
+
+

No Need of Installation or Configuration

+
+
+

Having all the dependencies serf-contained in the distribution’s zip file, users don’t need to install or configure anything. Just extracting the zip content is enough to have a fully functional devonfw.

+
+
+
+
+

Pre-packaged Software

+
+
+

The list of software pre-installed with devonfw distributions is:

+
+
+
    +
  • +

    Eclipse: pre-configured and fully functional IDE to develop Java based apps.

    +
  • +
  • +

    Java: all the Java environment configured and ready to be used within the distribution.

    +
  • +
  • +

    Maven: to manage project dependencies.

    +
  • +
  • +

    Node: a NodeJS environment configured and ready to be used within the distribution.

    +
  • +
  • +

    Sencha: devonfw also includes a installation of the Sencha CMD tool.

    +
  • +
  • +

    SonarQube: a code quality tool.

    +
  • +
  • +

    Tomcat: a web server ready to test the deploy of our artifacts.

    +
  • +
+
+
+
+
+

Server Solutions

+
+
+

For back-end solutions devonfw provides the devon4j platform as main implementation. With devon4j, users will find a standardized architecture blueprint for a Java applications based on Spring, and a set of best-of-breed technologies alongside a compendium of best practices and code conventions to build solid and reliable web applications.

+
+
+

With devonfw/devon4j developers have at their disposal an archetype to generate new server projects with all devon4j modules and pre-configurations ready to start to develop applications in a fully functional environment.

+
+
+
+
+

Client Solutions

+
+
+

For client applications devonfw includes two possible solutions, both based on JavaScript:

+
+
+ +
+
+
+
+

Custom Tools

+
+ +
+
+
+

Devcon

+
+
+

For project management and other life-cycle related tasks, devonfw provides also Devcon, a command line and graphic user interface cross platform tool.

+
+
+

With Devcon users can automate the creation of new projects (both server and client), build and run those and even, for server projects, deploy locally on Tomcat.

+
+
+
+Devcon +
+
+
+

All those tasks can be done manually using Maven, Tomcat, Sencha CMD, Bower, Gulp, etc. but with Devcon users have the possibility of managing the projects without the necessity of dealing with all those different tools.

+
+
+
+
+

CobiGen

+
+
+

CobiGen is a code generator included in the context of devonfw that allows users to generate all the structure and code of the components, helping to save a lot of time consumed in repetitive tasks.

+
+
+
+CobiGen +
+
+
+
+
+

ip modules

+
+
+

As part of the goal of productivity boosting, devonfw provides also to developers a set of modules, created from real projects requirements, that can be connected to projects saving all the work of a new implementation.

+
+
+

The current available modules are:

+
+
+
    +
  • +

    async: module to manage asynchronous web calls in a Spring based server app.

    +
  • +
  • +

    i18n: module for internationalization.

    +
  • +
  • +

    integration: implementation of Spring integration.

    +
  • +
  • +

    microservices: a set of archetypes to create a complete microservices infrastructure based on Spring Cloud Netflix.

    +
  • +
  • +

    reporting: a module to create reports based on Jasper reports library.

    +
  • +
  • +

    winauth active directory: a module to authenticate users against an Active Directory.

    +
  • +
  • +

    winauth single sign on: module that allows applications to authenticate the users by the Windows credentials.

    +
  • +
+
+
+
+

Next Chapter: devonfw IDE Setup

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/index.html b/docs/jump-the-queue/1.0/index.html new file mode 100644 index 00000000..5c9bc7b1 --- /dev/null +++ b/docs/jump-the-queue/1.0/index.html @@ -0,0 +1,415 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Home

+
+ +
+
+
+

General Concept

+
+
+

This documentation serves as an introductory course to application development under devonfw. It is structured in an alternating fashion: First a general explanation of different components and features is provided — using the complex MyThaiStar application as an example, then the actual implementation of these aspects is demonstrated via code-examples — which are part of the smaller JumpTheQueue application.

+
+
+

Here’s some general advice up front:

+
+
+
    +
  • +

    Carefully read each chapter and use the provided instructions in combination with the screenshots to complete your application code.

    +
  • +
  • +

    Work through the chapters in their correct order by following the "Next chapter:" link at the bottom of each page.

    +
  • +
  • +

    Don’t get lost in all the optional links provided within the text. They exist for the sake of completeness and you don’t have to follow them unless explicitly stated.

    +
  • +
  • +

    In Eclipse select the "Package Explorer" tab, press CTRL+F10 and under "Package Presentation" enable the option "Hierarchical" to gain a better overview over the project.

    +
  • +
  • +

    If Eclipse shows any warnings, ignore them for the time being.

    +
  • +
  • +

    If Eclipse detects errors as a result of its static code analysis, read the displayed tooltips and consider applying the suggested solutions. If this doesn’t resolve the issue or if you encounter runtime errors, do the following:

    +
    +
      +
    • +

      Backtrack your steps and make sure you haven’t made any mistakes previously. Be especially careful when using CobiGen, since generating wrong/unnecessary classes will most likely break the application.

      +
    • +
    • +

      Compare your own code with the finished application code in this repository to rule out any other potential mistakes. As a last resort you might copy/paste the provided files into your own project.

      +
    • +
    • +

      If all of the above fails, ask your supervisor for help and provide them with a link to the respective section in this guide. This way we can correct possible errors and improve this introductory course for future trainees.

      +
    • +
    +
    +
  • +
+
+
+ + + + + +
+ + +If you haven’t already, please read the devonfw getting started guide for a basic overview over the framework and the available resources. +
+
+
+
+
+

Document Overview

+
+ +
+
+ + +
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/jump-the-queue/1.0/jump-the-queue-design.html b/docs/jump-the-queue/1.0/jump-the-queue-design.html new file mode 100644 index 00000000..38a9ff9f --- /dev/null +++ b/docs/jump-the-queue/1.0/jump-the-queue-design.html @@ -0,0 +1,682 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Design

+
+
+
+JumpTheQueue Scene +
+
+
+
+
+

Introduction

+
+
+

When visiting public (free) or private (paid) events there are often large queues creating significant waiting times. Ideally the organizer of the event would like to streamline the entry of people into the venue. If people were to arrive right on time they could get into line more efficiently. A website or application could support this process by assigning visitors a queue number. This document describes the design of such a website/application, appropriately named JumpTheQueue.

+
+
+
+
+

==

+
+
+

This document is intended to reflect a real world use case. The design takes this into account by trying to be complete. The implementation however is simplified in order to be more comprehensible. The :information_source: symbol is denoted on this page, where implementation diverges from design, followed by a short comment about the nature of the difference. +== ==

+
+
+
+
+

User Stories (by Visitors of an Event)

+
+
+
+
As a < type of user >, I want < some goal > so that < some reason >.
+
+
+
+
+
+

Epic: Register Event

+
+
+

As a visitor of an event, I want to use a website or an app, which — after registration — provides me with a number (and optional date/time), so I can get convenient access to the event.

+
+
+
+
+

User Story: Register

+
+
+

As a user of JumpTheQueue, I want to register with my name, username, password and phone number, comply with the requirements and obtain my queue number.

+
+
+
+
+

== Acceptance Criteria

+
+
+

A full name (first name / last name) is mandatory. This name as well as the username, password and telephone number must be valid.

+
+
+
+
+

User Story: Terms and Conditions

+
+
+

As a user of JumpTheQueue, I accept that the organizer of the event can store my personal data and send me commercial notices (aka “spam”).

+
+
+
+
+

== Acceptance Criteria

+
+
+

The terms and conditions must be accepted by ticking the corresponding check boxes.

+
+
+
+
+

User Story: Join a Queue

+
+
+

As a user of JumpTheQueue, I want to join the queue at the event.

+
+
+
+
+

User Story: List Queued Visitors

+
+
+

As a user of JumpTheQueue, I want to see which queue number is currently being processed and what my own queue number is. Optionally, I want to know the estimated remaining time until it’s my turn.

+
+
+
+
+

==

+
+
+

The "remaining time" feature is not further developed/implemented. +== ==

+
+
+
+
+

User Story: Leave the Queue

+
+
+

As a user of JumpTheQueue, I want to be able to leave a queue I previously joined.

+
+
+
+
+

User Story: Verify contact data

+
+
+

As a user of JumpTheQueue, I have to confirm either my E-Mail address or my phone number by entering a code which was sent to me, so my contact-data can be verified.

+
+
+
+
+

==

+
+
+

This feature is not further developed/implemented. +== ==

+
+
+
+
+

User Interface

+
+ +
+
+
+

Flow

+
+
+
+JumpTheQueue App Flow +
+
Figure 1. The basic flow of the application.
+
+
+
    +
  • +

    for new users: fill in a form with your private data (first name, last name, phone number), choose a username and password, tick the box(es) to accept the terms and conditions and finally press a button to “register”

    +
  • +
  • +

    for returning users: enter username and password and press a button to "login"

    +
  • +
  • +

    in case of validation errors, a suitable error message will be shown

    +
  • +
  • +

    if there are no errors an access code will be generated, which will be shown on the following page (this code can optionally be appended with the access date/time)

    +
  • +
  • +

    this page could also show a visualization of the queue, listing all currently queued visitors

    +
  • +
+
+
+
+
+

Mock-Ups

+
+
+
+JumpTheQueue Mock-Up +
+
Figure 2. The pages/views of the application.
+
+
+
+
+

Model

+
+
+
+JumpTheQueue Model +
+
Figure 3. The logical components of the application and their interactions.
+
+
+
+JumpTheQueue Event +
+
Figure 4. Each event has multiple queues, each queue holds multiple visitors with access codes.
+
+
+
+
+

==

+
+
+

The "Event" item is not further developed/implemented. +== ==

+
+
+
+
+

Predicates

+
+ +
+
+
+

==

+
+
+
+
Definition
+
+
+
+
+
< function name > = < parameters > => < *pure* function >
+
+
+
+
+
or
+
+
+
+
+
< function name > = trivial : < trivial description >
+
+
+
+
+
+

==

+
+ +
+
+
+

==

+
+
+
+
isnull = (v) => v == null
+notnull = (v) => !isnull(v)
+
+isempty = (s: string) => s.length == 0
+notempty = (s: string) => !notempty(s)
+
+isEmailAddress = trivial: notnull + notempty + consists of <name>@<domain.toplevel>
+
+isTelephoneNumber = trivial: notnull + notempty + consists of sequence of numbers or spaces (i.e. “4 84 28 81”)
+
+
+
+
+
+

==

+
+ +
+
+
+

Types

+
+ +
+
+
+

==

+
+
+
+
Definition
+
+
+
+
+
type < alias > :: < type defs > with predicated: < list of predicates >
+
+
+
+
+
or
+
+
+
+
+
type < alias > :: trivial: < trivial description >
+
+
+
+
+
+

==

+
+ +
+
+
+

==

+
+
+
+
type ID :: trivial: Unique Atomic Identifier
+
+type NamedItem :: string
+with predicates: notnull, notempty
+
+type EmailAddress :: string
+with predicates: isEmailAddress
+
+type TelephoneNumber :: string
+with predicates: isTelephoneNumber
+
+type Option<T> :: None | T
+
+type Result<T> :: Error | T
+
+type Error :: trivial: Error information with code & error description
+
+
+
+
+
+

==

+
+ +
+
+
+

Entities & Value Objects

+
+
+

|== == == == == == == == == == == == == = +2+|Visitor (Entity) +s|Field s|Type +|Id |ID +|Username |NamedItem +|Name |NamedItem +|Password |NamedItem +|PhoneNumber |Option< TelephoneNumber > +|AcceptedComercial |boolean +|AcceptedTerms |boolean +|UserType |boolean +|== == == == == == == == == == == == == =

+
+
+

|== == == == == == == == == == == == == = +2+|AccessCode (Entity) +s|Field s|Type +|Id |ID +|Ticketnumber |NamedItem +|StartTime |Option< DateTime > +|EndTime |Option< DateTime > +|StartTime |Option< DateTime > +|Visitor |NamedItem +|Queue |NamedItem +|== == == == == == == == == == == == == =

+
+
+

|== == == == == == == == == == == == == = +2+|DailyQueue (Entity) +s|Field s|Type +|Id |ID +|Name |NamedItem +|Logo |NamedItem +|AttentionTime |Option< DateTime > +|MinAttentionTime |Option< DateTime > +|Active |boolean +|Customers |NamedItem +|== == == == == == == == == == == == == =

+
+
+

There must be a 1 - 1 relationship between a Visitor and a VisitorTicker.

+
+
+
+

Next Chapter: devon4j Overview

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/modules/ROOT/pages/index.adoc b/docs/modules/ROOT/pages/index.adoc deleted file mode 100644 index 93ae5122..00000000 --- a/docs/modules/ROOT/pages/index.adoc +++ /dev/null @@ -1,3 +0,0 @@ -= Antora-docs-evaluation - -This is the documentation for Antora-docs-evaluation diff --git a/docs/mrchecker/1.0/FAQ/Common-problems/I-cannot-find.html b/docs/mrchecker/1.0/FAQ/Common-problems/I-cannot-find.html new file mode 100644 index 00000000..f68a80fe --- /dev/null +++ b/docs/mrchecker/1.0/FAQ/Common-problems/I-cannot-find.html @@ -0,0 +1,279 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

I can’t find the boilerplate module. Has it been removed?

+
+
+

The boilerplate module has been removed from the GitHub project on purpose.

+
+
+

There were problems with naming and communication, not everybody was aware of the meaning of the word boilerplate.

+
+
+

The name of the folder has been changed to template. It can be found in the GitHub project.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/FAQ/Common-problems/It-is-possible.html b/docs/mrchecker/1.0/FAQ/Common-problems/It-is-possible.html new file mode 100644 index 00000000..9f4252a2 --- /dev/null +++ b/docs/mrchecker/1.0/FAQ/Common-problems/It-is-possible.html @@ -0,0 +1,279 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/FAQ/Common-problems/Tests-are-not-stable.html b/docs/mrchecker/1.0/FAQ/Common-problems/Tests-are-not-stable.html new file mode 100644 index 00000000..d4b11fdc --- /dev/null +++ b/docs/mrchecker/1.0/FAQ/Common-problems/Tests-are-not-stable.html @@ -0,0 +1,304 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Tests are not stable

+
+
+

Selenium tests perform actions much faster than a normal user would. Because pages can contain dynamically changing content, some web elements can still not be loaded when Selenium driver tries to access them.

+
+
+

getDriver().waitForPageLoaded() method checks ready state in the browser, that’s why stability problems may happen in advanced frontend projects.

+
+
+

To improve test stability you can:

+
+
+
    +
  • +

    add waiting methods before dynamically loading elements e.g. getDriver().waitForElement(By selector)

    +
  • +
  • +

    add timeout parameter in method getDriver().findElementDynamic(By selector, int timeOut)

    +
  • +
  • +

    change global waiting timeout value using method getDriver().manage().timeouts().implicitlyWait(long time, TimeUnit unit)

    +
  • +
+
+
+

Furthermore, if the page displays visible loading bars or spinners, create FluentWait method to wait until they disappear.

+
+
+

Notice that by increasing timeouts you may improve stability but too long waiting time makes tests run slower.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/FAQ/How-to/Change-timeouts.html b/docs/mrchecker/1.0/FAQ/How-to/Change-timeouts.html new file mode 100644 index 00000000..095558f9 --- /dev/null +++ b/docs/mrchecker/1.0/FAQ/How-to/Change-timeouts.html @@ -0,0 +1,325 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to: Change timeouts?

+
+
+

If you would like to change timeouts - you don’t have to change them globally. +It is possible to add waiting time parameter to searching methods, such as:

+
+
+

getDriver().findElementDynamic(By selector, int timeOut)
+timeout - in seconds

+
+
+

It is recommended to use methods that significantly level up the repetitiveness of the code:

+
+
+
+
getDriver().waitForElement(By selector);
+
+getDriver().waitForElementVisible(By selector);
+
+getDriver().waitForPageLoaded();
+
+getDriver().waitUntilElementIsClickable(By selector);
+
+
+
+

Or Fluent Wait methods with changed timeout and interval:

+
+
+
+
FluentWait<WebDriver> wait = new FluentWait<WebDriver>(getDriver())
+        .withTimeout(long duration, TimeUnit unit)
+        .pollingEvery(long duration, TimeUnit unit);
+wait.until((WebDriver wd) -> expectedCondition.isTrue());
+getWebDriverWait().withTimeout(millis, TimeUnit.MILLISECONDS)
+        .withTimeout(long duration, TimeUnit unit)
+        .pollingEvery(long duration, TimeUnit unit)
+        .until((WebDriver wd) -> expectedCondition.isTrue());
+
+
+
+

These methods allow You to change WebDriver timeouts values such as:

+
+
+

getDriver().manage().timeouts().pageLoadTimeout(long time, TimeUnit unit)
+the amount of time to wait for a page to load before throwing an exception. This is the default timeout for method getDriver().waitForPageLoaded()

+
+
+

getDriver().manage().timeouts().setScriptTimeout(long time, TimeUnit unit)
+the amount of time to wait for execution of script to finish before throwing an exception

+
+
+

getDriver().manage().timeouts().implicitlyWait(long time, TimeUnit unit) +the amount of time the driver should wait when searching for an element if it is not immediately present. After that time, it throws an exception. This the default timeout for methods such as getDriver().findElementDynamic(By selector) or getDriver().waitForElement(By selector)

+
+
+

Changing timeouts can improve test stability but can also make test run time longer.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/FAQ/How-to/Start-a-browser.html b/docs/mrchecker/1.0/FAQ/How-to/Start-a-browser.html new file mode 100644 index 00000000..459858f1 --- /dev/null +++ b/docs/mrchecker/1.0/FAQ/How-to/Start-a-browser.html @@ -0,0 +1,308 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to: Start a browser in Incognito/Private mode?

+
+
+

In MrChecker there is a fpossibility of changing browser options during runtime execution.

+
+
+

To run the browser in incognito mode:

+
+
+
    +
  1. +

    In Eclipse - open Run Configurations window:

    +
    +

    ht image1

    +
    +
  2. +
  3. +

    Select a test which you want to run and switch to arguments tab:

    +
    +

    ht image2

    +
    +
  4. +
  5. +

    Add VM argument:

    +
    +
      +
    • +

      for the incognito mode in chrome:

      +
      +

      ht image3

      +
      +
    • +
    +
    +
  6. +
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/FAQ/Installation-problems/Chromedriver-version.html b/docs/mrchecker/1.0/FAQ/Installation-problems/Chromedriver-version.html new file mode 100644 index 00000000..838b6280 --- /dev/null +++ b/docs/mrchecker/1.0/FAQ/Installation-problems/Chromedriver-version.html @@ -0,0 +1,317 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Chromedriver version is not compatible with Chrome browser

+
+
+

Problem:

+
+
+

During the tests your web browser window opens and immediately closes, all your tests are broken.

+
+
+

Following error message is visible in the test description:

+
+
+
+
session not created: This version of ChromeDriver only supports Chrome version 76
+Build info: version: '<build_version>', revision: '<build_revision>', time: '<time>'
+System info: host: '<your_computer_name>', ip: '<your_ip_address>', os.name: '<your_os_name>', os.arch: '<your_os_architecture>', os.version: '<your_os_version>', java.version: '<java_version_installed>'
+Driver info: driver.version: NewChromeDriver
+
+
+
+

Solution:

+
+
+
    +
  1. +

    Make a change in the following files:

    +
    +
      +
    • +

      MrChecker_Test_Framework\workspace\devonfw-testing\src\resources\settings.properties

      +
    • +
    • +

      For project template-app-under-test: MrChecker_Test_Framework\workspace\devonfw-testing\template\src\resources\settings.properties

      +
    • +
    • +

      For project example-app-under-test: MrChecker_Test_Framework\workspace\devonfw-testing\example\src\resources\settings.properties

      +
      +

      Change the value of selenium.driverAutoUpdate field form true to false

      +
      +
    • +
    +
    +
  2. +
  3. +

    Replace the following file with a version compatible with your browser: +MrChecker_Test_Framework\workspace\devonfw-testing\example\lib\webdrivers\chrome\chromedriver.exe .

    +
  4. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/FAQ/Installation-problems/My-browser-opens-up.html b/docs/mrchecker/1.0/FAQ/Installation-problems/My-browser-opens-up.html new file mode 100644 index 00000000..9d200e02 --- /dev/null +++ b/docs/mrchecker/1.0/FAQ/Installation-problems/My-browser-opens-up.html @@ -0,0 +1,282 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

My browser opens up in German by default

+
+
+

Problem:

+
+
+

I would like my browser to use the English language, but the default language for the browser is German. How can I change the settings?

+
+
+

Solution:

+
+
+

There is a Properties file installed together with MrCheker installation. It is possible to set the language in which a browser could be opened for testing purposes in Properties > Selenium configuration,.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/FAQ/Installation-problems/run-Mobile-Tests-with-runtime-parameters.html b/docs/mrchecker/1.0/FAQ/Installation-problems/run-Mobile-Tests-with-runtime-parameters.html new file mode 100644 index 00000000..e0dd5b1b --- /dev/null +++ b/docs/mrchecker/1.0/FAQ/Installation-problems/run-Mobile-Tests-with-runtime-parameters.html @@ -0,0 +1,306 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Where should I run the comand :

+
+
+
+
- mvn clean compile test  -Dapp="mio-file.apk -DautomationName="UiAutomator1" -Dthread.count=1
+
+
+
+
+
I tried from:
+----
+C:\MrChecker_Test_Framework\workspace\devonfw-testing
+----
+ but it doesn’t work because of a missing POM file .Then I tried from:
+ ----
+ C:\MrChecker_Test_Framework\workspace\devonfw-testing\example” and run “mvn clean compile test  -Dapp="mio-file.apk
+ DautomationName="UiAutomator1" -Dthread.count=1
+
+
+
+
+
 ----
+and I have the following errors:
+
+
+
+
+
image::images/imageerror.png[]
+
+
+
+

If I check the repository online http://repo1.maven.org/maven2 and I go in http://repo1.maven.org/maven2/com/capgemini/mrchecker/ - nothing is done about it

+
+
+
+
mrchecker-mobile-module:jar:7.2.0.1-SNAPSHOT
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Migration/Migration-guide.html b/docs/mrchecker/1.0/Migration/Migration-guide.html new file mode 100644 index 00000000..12cbb985 --- /dev/null +++ b/docs/mrchecker/1.0/Migration/Migration-guide.html @@ -0,0 +1,656 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Junit4 to Junit5 migration guide

+
+
+

mrchecker-core-module version 5.6.2.1 features the upgrade of Junit4 to Junit5. Consequently, the Junit4 features are now obsolete and current test projects require migration +in order to use the latest revision of MrChecker. This site provides guidance on the migration.

+
+ +
+
+
+

POM

+
+
+

The project pom.xml file needs to be adjusted in the first place. An exemplary POM file for download can be found here: https://github.com/devonfw/mrchecker/blob/develop/template/pom.xml

+
+
+
+
+

Test Annotations

+
+
+

Junit5 redefines annotations defining a test flow. The annotations need to be adjusted as per the following table.

+
+
+
+migration01 +
+
+
+
+
+

Rule, ClassRule, TestRule and TestMethod

+
+
+

Junit4 @Rule and @ClassRule annoations as well as TestRule and TestMethod interfaces have been replaced +with the Junit5 extension mechanism (https://junit.org/junit5/docs/current/user-guide/#extensions). +During the migration to Junit5, all the instances of the mentioned types need to be rewritten according to the Junit5 User Guide. +The extension mechanism is far more flexible than the Junit4 functionality based on rules.

+
+
+

Note: as per Junit5 API spec: ExpectedExceptionSupport, ExternalResourceSupport, VerifierSupport +provide native support of the correspoinding Junit4 rules.

+
+
+

Extension registration example:

+
+
+
+migration02 +
+
+
+
+migration arrow down +
+
+
+
+migration03 +
+
+
+

TestRule (TestWatcher and ExternalResource) to Extension (TestWatcher and AfterAllCallback) example:

+
+
+
+migration04 +
+
+
+
+migration arrow down +
+
+
+
+migration05 +
+
+
+
+
+

Page, BasePageAutoRegistration and PageFactory classes

+
+
+

Page class is a new MrChecker class. It was introduced to provide common implemenation for its subpages in specific MrChecker modules. +In order to receive test lifecycle notifications, particular Pages need to be registered by calling addToTestExecutionObserver() method. +To facilitate this process, PageFactory class was designed and it’s usage is a recommended way of creating Page objects for tests. +Although in MrChecker based on Junit4, the registration process was done in a specific BasePage constructor, it’s been considered error prone and reimplemented. +Furthermore, to reduce migration cost BasePageAutoRegistration classes are available in MrChceker modules. They use the old way of registration. +Given that three ways of migration are possible.

+
+
+

Migration with PageFactory class example (RECOMMENDED):

+
+
+
+migration06 +
+
+
+
+migration arrow down +
+
+
+
+migration07 +
+
+
+

Migration with calling addToTestExecutionObserver() method example:

+
+
+
+migration06 +
+
+
+
+migration arrow down +
+
+
+
+migration08 +
+
+
+

Migration with BasePageAutoRegistration class example:

+
+
+
+migration09 +
+
+
+
+migration arrow down +
+
+
+
+migration10 +
+
+
+
+
+

Test suites

+
+
+

Test suite migration example:

+
+
+
+migration11 +
+
+
+
+migration arrow down +
+
+
+
+migration12 +
+
+
+

Running tests from Maven:

+
+
+
+migration13 +
+
+
+
+migration arrow down +
+
+
+
+migration14 +
+
+
+
+
+

Concurrency

+
+
+

Junit5 provides native thread count and parallel execution control in contrast to Junit4 where it was controlled by Maven Surefire plugin. +To enable concurrent test execution, junit-platform.properties file needs to placed in the test/resources directory of a project.

+
+
+

Exemplary file contents:

+
+
+
+migration15 +
+
+
+

A ready-to-use file can be found here.

+
+
+

MrChecker supports only concurrent test class execution. +@ResourceLock can be used to synchronize between classes if needed:

+
+
+
+migration16 +
+
+
+
+
+

Cucumber

+
+
+

If Cucumber is used in a project, it is neccessary to change a hook class. +An exemplary hook source file for download can be found here.

+
+
+
+
+

Data driven tests

+
+
+

Junit5 implements new approach to data driven tests by various data resolution mechanisms.

+
+
+

An example of method source parameters migration version one:

+
+
+
+migration17 +
+
+
+
+migration arrow down +
+
+
+
+migration18 +
+
+
+

An example of method source parameters migration version two:

+
+
+
+migration17 +
+
+
+
+migration arrow down +
+
+
+
+migration19 +
+
+
+

An example of method source in another class parameters migration:

+
+
+
+migration20 +
+
+
+
+migration arrow down +
+
+
+
+migration21 +
+
+
+

Providing parameters directly in annotations has no analogy in Junit5 and needs to be replaced with e.g. method source:

+
+
+
+migration22 +
+
+
+
+migration arrow down +
+
+
+
+migration23 +
+
+
+

An example of csv parameters source with no header line migration:

+
+
+
+migration24 +
+
+
+
+migration arrow down +
+
+
+
+migration25 +
+
+
+

An example of csv parameters source with the header line migration:

+
+
+
+migration26 +
+
+
+
+migration arrow down +
+
+
+
+migration27 +
+
+
+

An example of csv parameters source with object mapping migration step1:

+
+
+
+migration28 +
+
+
+
+migration arrow down +
+
+
+
+migration29 +
+
+
+

An example of csv parameters source with object mapping migration step 2:

+
+
+
+migration30 +
+
+
+
+migration arrow down +
+
+
+
+migration31 +
+
+
+
+
+

setUp() and tearDown()

+
+
+

BaseTest.setUp() and BaseTest.tearDown() methods are now not abstract and need no implementation in subclasses. @Override when a custom implemenatation is needed.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/MrChecker-download/Mac.html b/docs/mrchecker/1.0/MrChecker-download/Mac.html new file mode 100644 index 00000000..270a88d1 --- /dev/null +++ b/docs/mrchecker/1.0/MrChecker-download/Mac.html @@ -0,0 +1,390 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MrChecker macOS installation

+
+
+

On this page, you can find all the details regarding MrChecker installation on your Mac.

+
+
+
+
+

Java installation

+
+
+

There is one important pre-requisite for Mr Checker installation - Java has to be installed on the computer and an environmental variable has to be set in order to obtain optimal functioning of the framework.

+
+
+
    +
  1. +

    Install Java 1.8 JDK 64bit

    +
    +

    Download and install Java download link

    +
    +
    +

    (To download JDK 8 from Oracle you have to have an account. It is recommended to get a JDK build based on OpenJDK from AdoptOpenJDK)

    +
    +
  2. +
  3. +

    Next, verify thx in the command line:

    +
    +
    +
    > java --version
    +
    +
    +
  4. +
+
+
+
+
+

Other components installation

+
+
+

Install each component separately, or update the existing ones on your Mac.

+
+
+
    +
  1. +

    Maven 3.5

    +
    +
      +
    • +

      Download Maven

      +
    • +
    • +

      Unzip Maven in the following location /maven

      +
    • +
    • +

      Add Maven to PATH

      +
      +
      +
      > $ export PATH=$PATH:/maven/apache-maven-3.5.0/bin/
      +
      +
      +
    • +
    • +

      Verify in terminal:

      +
      +
      +
      > $ mvn -version
      +
      +
      +
    • +
    +
    +
  2. +
  3. +

    Eclipse IDE

    +
    +
      +
    • +

      Download and unzip Eclipse

      +
    • +
    • +

      Download MrCheckerTestFramework source code

      +
    • +
    • +

      Import:

      +
      +
      +image0009 +
      +
      +
    • +
    • +

      Select Projects from folders:

      +
      +

      image00010

      +
      +
    • +
    • +

      Open already created projects:

      +
      +

      image00011

      +
      +
    • +
    • +

      Update project structure - ALT + F5

      +
      +

      image00012

      +
      +
    • +
    +
    +
  4. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/MrChecker-download/MyThaiStar.html b/docs/mrchecker/1.0/MrChecker-download/MyThaiStar.html new file mode 100644 index 00000000..d23e6595 --- /dev/null +++ b/docs/mrchecker/1.0/MrChecker-download/MyThaiStar.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

My Thai Star application setup

+
+
+

My Thai Star is a reference application for DevonFW so it was used extensively in majority of our examples. To make them run properly you definitely should set it up somewhere and configure environment.csv accordingly. +You can get the app from its official repository here https://github.com/devonfw/my-thai-star.

+
+
+
+
+

Setting up My Thai Start app

+
+
+

Most of the important informations are covered in https://github.com/devonfw/my-thai-star#deployment.

+
+
+
The quick summary would be:
+
    +
  1. +

    Get the machine with docker and docker-compose

    +
  2. +
  3. +

    Download the repository

    +
  4. +
  5. +

    Run docker-compose up

    +
  6. +
  7. +

    Go to your project to set up envrionment.csv

    +
  8. +
  9. +

    The variables we are interested in are MY_THAI_STAR_URL and MY_THAI_STAR_API_URL

    +
  10. +
  11. +

    If you set up My Thai Star application on different host adjust the values accordingly

    +
  12. +
  13. +

    The web aplication should be available using localhost:8081/restaurant

    +
  14. +
  15. +

    The web api should be avaulable using localhost:8081/api

    +
  16. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/MrChecker-download/Windows/Advanced-installation.html b/docs/mrchecker/1.0/MrChecker-download/Windows/Advanced-installation.html new file mode 100644 index 00000000..34145045 --- /dev/null +++ b/docs/mrchecker/1.0/MrChecker-download/Windows/Advanced-installation.html @@ -0,0 +1,425 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Advanced installation

+
+ +
+
+
+

Java installation

+
+
+

There is one important pre-requisite for Mr Checker installation - Java has to be installed on the computer and an environmental variable has to be set in order to obtain optimal functioning of the framework.

+
+
+
    +
  1. +

    Install Java 1.8 JDK 64bit

    +
    +

    Download and install Java download link

    +
    +
    +

    (To download JDK 8 from Oracle you have to have an account. It is recommended to get a JDK build based on OpenJDK from AdoptOpenJDK)

    +
    +
  2. +
  3. +

    Windows Local Environment - How to set:

    +
    +
      +
    • +

      Variable name: JAVA_HOME | Variable value: C:\Where_You’ve_Installed_Java

      +
    • +
    • +

      Variable name: PATH | Variable value: %JAVA_HOME%\bin;%JAVA_HOME%\lib

      +
      +
      +install win03 +
      +
      +
    • +
    +
    +
  4. +
  5. +

    Next, verify it in the command line:

    +
    +
    +
    > java --version
    +
    +
    +
  6. +
+
+
+
+
+

Other components installation

+
+
+

Install each component separately, or update the existing ones on your PC.

+
+
+
    +
  1. +

    Maven 3.5

    +
    +
      +
    • +

      Download Maven

      +
    • +
    • +

      Unzip Maven in following location C:\maven

      +
    • +
    • +

      Set Windows Local Environment

      +
      +
        +
      • +

        Variable name: M2_HOME | Variable value: C:\maven\apache-maven-3.5.0

        +
      • +
      • +

        Variable name: PATH | Variable value: %M2_HOME%\bin

        +
        +
        +install win04 +
        +
        +
      • +
      +
      +
    • +
    • +

      Verify it in the command line:

      +
      +
      +
      > mvn --version
      +
      +
      +
    • +
    +
    +
  2. +
  3. +

    IDE

    +
    +
      +
    • +

      Download a most recent Eclipse

      +
    • +
    • +

      Download a MrChecker Project https://downgit.github.io//home?url=https://github.com/devonfw/mrchecker/tree/develop/template[Template] to start a new project or Mrchecker Project https://downgit.github.io//home?url=https://github.com/devonfw/mrchecker/tree/develop/example[Example] to get better understanding what we are capable of.

      +
    • +
    • +

      You should consider installing some usefull plugins such as: csvedit, cucumber editor.

      +
    • +
    • +

      Import:

      +
      +
      +install win05 +
      +
      +
    • +
    • +

      Projects from folders

      +
      +
      +install win06 +
      +
      +
    • +
    • +

      Open already created projects:

      +
      +
      +install win07 +
      +
      +
    • +
    • +

      Update project structure - ALT + F5

      +
      +
      +install win08 +
      +
      +
    • +
    +
    +
  4. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/MrChecker-download/Windows/Easy-out-of-the-box.html b/docs/mrchecker/1.0/MrChecker-download/Windows/Easy-out-of-the-box.html new file mode 100644 index 00000000..2a2cbb69 --- /dev/null +++ b/docs/mrchecker/1.0/MrChecker-download/Windows/Easy-out-of-the-box.html @@ -0,0 +1,311 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Easy out of the Box

+
+
+
    +
  1. +

    Click on the link Ready to use MrChecker_Test_Environment for Junit4 or Ready to use MrChecker_Test_Environment for Junit5 and download the package

    +
  2. +
  3. +

    Unzip the downloaded MrChecker Test Framework into the folder C:\ on your PC - recommended tool: 7z All the necessary components, such as Eclipse, Java and Maven will be pre-installed for you. There is no need for any additional installations.

    +
    +

    Note: Please double check the place into which you have unzipped MrChecker_Test_Framework

    +
    +
  4. +
  5. +

    Go to folder C:\MrChecker_Test_Framework\ , into which Mr.Checker has been unzipped

    +
    +
    +install win01 +
    +
    +
  6. +
  7. +

    In order to run the program, double click on start-eclipse-with-java.bat

    +
    +

    (note that start-eclipse.bat won’t detect Java)

    +
    +
  8. +
  9. +

    Update project structure (ALT + F5)

    +
    +
    +install win02 +
    +
    +
  10. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/MrChecker-download/Windows/Out-of-the-box-installation.html b/docs/mrchecker/1.0/MrChecker-download/Windows/Out-of-the-box-installation.html new file mode 100644 index 00000000..deb729b0 --- /dev/null +++ b/docs/mrchecker/1.0/MrChecker-download/Windows/Out-of-the-box-installation.html @@ -0,0 +1,301 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Out of the box installation

+
+
+
    +
  1. +

    Start from Easy out of the box installation

    +
  2. +
  3. +

    Open Eclipse

    +
  4. +
  5. +

    Manually Delete folders that appear in Eclipse

    +
  6. +
  7. +

    Click inside Eclipse with a right mouse button and open Import

    +
  8. +
  9. +

    Select Maven → existing Maven project

    +
  10. +
  11. +

    Select Mr Checker → workspace → devonfw-testing and click OK

    +
  12. +
+
+
+

All test folders should be imported into Eclipse and ready to use.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-BDD-Gherkin-Cucumber-approach.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-BDD-Gherkin-Cucumber-approach.html new file mode 100644 index 00000000..630c0e3f --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-BDD-Gherkin-Cucumber-approach.html @@ -0,0 +1,587 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Overview

+
+
+

Cucumber / Selenium

+
+
+

Business and IT don’t always understand each other. Very often misunderstandings between business and IT result in the costly failure of IT projects. With this in mind, Cucumber was developed as a tool to support human collaboration between business and IT.

+
+
+

Cucumber uses executable specifications to encourage a close collaboration. This helps teams to keep the business goal in mind at all times. With Cucumber you can merge specification and test documentation into one cohesive whole, allowing your team to maintain one single source of truth. Because these executable specifications are automatically tested by Cucumber, your single source of truth is always up-to-date.

+
+
+
+image40 +
+
+
+

Cucumber supports testers when designing test cases. To automate these test cases, several languages can be used. Cucumber also works well with Browser Automation tools such as Selenium Webdriver.

+
+
+
+
+

== Selenium

+
+
+

Selenium automates browsers and is used for automating web applications for testing purposes. Selenium offers testers and developers full access to the properties of objects and the underlying tests, via a scripting environment and integrated debugging options.

+
+
+

Selenium consists of many parts. If you want to create robust, browser-based regression automation suites and tests, Selenium Webdriver is most appropriate. With Selenium Webdriver you can also scale and distribute scripts across many environments.

+
+
+
+
+

Strengths

+
+ +
+
+
+

== Supports BDD

+
+
+

Those familiar with Behavior Driven Development (BDD) recognize Cucumber as an excellent open source tool that supports this practice.

+
+
+
+
+

== All in one place

+
+
+

With Cucumber / Selenium you can automate at the UI level. Automation at the unit or API level can also be implemented using Cucumber. This means all tests, regardless of the level at which they are implemented, can be implemented in one tool.

+
+
+
+
+

== Maintainable test scripts

+
+
+

Many teams seem to prefer UI level automation, despite huge cost of maintaining UI level tests compared to the cost of maintaining API or unit tests. To lessen the maintenance of UI testing, when designing UI level functional tests, you can try describing the test and the automation at three levels: business rule, UI workflow, technical implementation.

+
+
+

When using Cucumber combined with Selenium, you can implement these three levels for better maintenance.

+
+
+
+
+

== Early start

+
+
+

Executable specifications can and should be written before the functionality is implemented. By starting early, teams get most return on investment from their test automation.

+
+
+
+
+

== Supported by a large community

+
+
+

Cucumber and Selenium are both open source tools with a large community, online resources and mailing lists.

+
+
+
+
+

How to run cucumber tests in Mr.Checker

+
+ +
+
+
+

Command line / Jenkins

+
+
+
    +
  • +

    Run cucumber tests and generate Allure report. Please use this for Jenkins execution. Report is saved under ./target/site.

    +
    +
    +
    mvn clean -P cucumber test site
    +
    +
    +
  • +
  • +

    Run and generate report

    +
    +
    +
    mvn clean -P cucumber test site allure:report
    +
    +
    +
  • +
  • +

    Run cucumber tests, generate Allure report and start standalone report server

    +
    +
    +
    mvn clean -P cucumber test site allure:serve
    +
    +
    +
  • +
+
+
+
+
+

Eclipse IDE

+
+
+
+image41 +
+
+
+
+
+

Tooling

+
+ +
+
+
+

== Cucumber

+
+
+

Cucumber supports over a dozen different software platforms. Every Cucumber implementation provides the same overall functionality, but they also have their own installation procedure and platform-specific functionality. See https://cucumber.io/docs for all Cucumber implementations and framework implementations.

+
+
+

Also, IDEs such as Intellij offer several plugins for Cucumber support.

+
+
+
+
+

== Selenium

+
+
+

Selenium has the support of some of the largest browser vendors who have taken (or are taking) steps to make Selenium a native part of their browser. It is also the core technology in countless other browser automation tools, APIs and frameworks.

+
+
+
+
+

Automation process

+
+ +
+
+
+

== Write a feature file

+
+
+

Test automation in Cucumber starts with writing a feature file. A feature normally consists of several (test)scenarios and each scenario consists of several steps.

+
+
+

Feature: Refund item

+
+
+

Scenario: Jeff returns a faulty microwave

+
+
+

Given Jeff has bought a microwave for $100

+
+
+

And he has a receipt

+
+
+

When he returns the microwave

+
+
+

Then Jeff should be refunded $100

+
+
+

Above example shows a feature “Refund item” with one scenario “Jeff returns a faulty microwave”. The scenario consists of four steps each starting with a key word (Given, And, When, Then).

+
+
+
+
+

== Implementing the steps

+
+
+

Next the steps are implemented. Assuming we use Java to implement the steps, the Java code will look something like this.

+
+
+
+
public class MyStepdefs \{
+
+	@Given("Jeff has bought a microwave for $(\d+)")
+
+	public void Jeff_has_bought_a_microwave_for(int amount) \{
+
+		// implementation can be plain java
+
+		// or selenium
+
+		driver.findElement(By.name("test")).sendKeys("This is an example\n");
+
+		driver.findElement(By.name("button")).click();// etc
+	}
+}
+
+
+
+

Cucumber uses an annotation (highlighted) to match the step from the feature file with the function implementing the step in the Java class. The name of the class and the function can be as the developer sees fit. Selenium code can be used within the function to automate interaction with the browser.

+
+
+
+
+

== Running scenarios

+
+
+

There are several ways to run scenarios with Cucumber, for example the JUnit runner, a command line runner and several third party runners.

+
+
+
+
+

== Reporting test results

+
+
+

Cucumber can report results in several different formats, using formatter plugins

+
+
+
+
+

Features

+
+ +
+
+
+

== Feature files using Gherkin

+
+
+

Cucumber executes your feature files. As shown in the example below, feature files in Gherkin are easy to read so they can be shared between IT and business. Data tables can be used to execute a scenario with different inputs.

+
+
+
+image42 +
+
+
+
+
+

== Organizing tests

+
+
+

Feature files are placed in a directory structure and together form a feature tree.

+
+
+

Tags can be used to group features based on all kinds of categories. Cucumber can include or exclude tests with certain tags when running the tests.

+
+
+
+
+

Reporting test results

+
+
+

Cucumber can report results in several formats, using formatter plugins. +Not supported option by Shared Services: The output from Cucumber can be used to present test results in Jenkins or Hudson depending of the preference of the project.

+
+
+
+image43 +
+
+
+
+
+

HOW IS Cucumber / Selenium USED AT Capgemini?

+
+ +
+
+
+

Tool deployment

+
+
+

Cucumber and Selenium are chosen as one of Capgemini’s test automation industrial tools. We support the Java implementation of Cucumber and Selenium Webdriver. We can help with creating Cucumber, Selenium projects in Eclipse and IntelliJ.

+
+
+
+
+

Application in ATaaS (Automated Testing as a Service)

+
+
+

In the context of industrialisation, Capgemini has developed a range of services to assist and support the projects in process and tools implementation.

+
+
+

In this context a team of experts assists projects using test automation.

+
+
+

The main services provided by the center of expertise are:

+
+
+
    +
  • +

    Advise on the feasibility of automation.

    +
  • +
  • +

    Support with installation.

    +
  • +
  • +

    Coaching teams in the use of BDD.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Data-driven-approach.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Data-driven-approach.html new file mode 100644 index 00000000..3f13922a --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Data-driven-approach.html @@ -0,0 +1,550 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Data driven approach

+
+
+

Data driven approach - External data driven

+
+
+

External data driven - Data as external file injected in test case

+
+
+

Test case - Categorize functionality and severity

+
+
+

You can find more information about data driven here and here

+
+
+

There are a few ways to define parameters for tests.

+
+
+
+
+

Internal Data driven approach

+
+
+

Data as part of test case

+
+
+

The different means to pass in parameters are shown below.

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

Static methods are used to provide the parameters.

+
+
+
+
+

A method in the test class:

+
+
+
+
@ParameterizedTest
+@MethodSource("argumentsStream")
+
+
+
+

OR

+
+
+
+
@ParameterizedTest
+@MethodSource("arrayStream")
+
+
+
+

In the first case the arguments are directly mapped to the test method parameters. In the second case the array is passed as the argument.

+
+
+
+image30 new +
+
+
+
+
+

A method in a different class:

+
+
+
+
@ParameterizedTest
+@MethodSource("com.capgemini.mrchecker.core.datadriven.MyContainsTestProvider#provideContainsTrueParameters")
+
+
+
+
+image32 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

Parameters that are passed into tests using the @Parameters annotation must be _Object[]_s

+
+
+
+
+

In the annotation:

+
+
+
+
@Parameters({"1, 2, 3", "3, 4, 7", "5, 6, 11", "7, 8, 15"})
+
+
+
+
+image30 +
+
+
+

The parameters must be primitive objects such as integers, strings, or booleans. Each set of parameters is contained within a single string and will be parsed to their correct values as defined by the test method’s signature.

+
+
+
+
+

In a method named in the annotation:

+
+
+
+
@Parameters(method = "addParameters")
+
+
+
+
+image31 +
+
+
+

A separate method can be defined and referred to for parameters. This method must return an Object[] and can contain normal objects.

+
+
+
+
+

In a class:

+
+
+
+
@Parameters(source = MyContainsTestProvider.class)
+
+
+
+
+image32 +
+
+
+

A separate class can be used to define parameters for the test. This test must contain at least one static method that returns an Object[], and its name must be prefixed with provide. The class could also contain multiple methods that provide parameters to the test, as long as they also meet the required criteria.

+
+
+
+
+

External Data Driven

+
+
+

Data as external file injected in test case

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

Tests use the annotation @CsvFileSource to inject CSVs file.

+
+
+
+
@CsvFileSource(resources = "/datadriven/test.csv", numLinesToSkip = 1)
+
+
+
+

A CSV can also be used to contain the parameters for the tests. It is pretty simple to set up, as it’s just a comma-separated list.

+
+
+
+
+

Classic CSV

+
+
+
+image33 new +
+
+
+

and CSV file structure

+
+
+
+image34 +
+
+
+
+
+

CSV with headers

+
+
+
+image35 new +
+
+
+

and CSV file structure

+
+
+
+image36 +
+
+
+
+
+

CSV with specific column mapper

+
+
+
+image37 new +
+
+
+

and Mapper implementation

+
+
+
+image38 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

Tests use the annotation @FileParameters to inject CSVs file.

+
+
+
+
@FileParameters("src/test/resources/datadriven/test.csv")
+
+
+
+

A CSV can also be used to contain the parameters for the tests. It is pretty simple to set up, as it’s just a comma-separated list.

+
+
+
+
+

Classic CSV

+
+
+
+image33 +
+
+
+

and CSV file structure

+
+
+
+image34 +
+
+
+
+
+

CSV with headers

+
+
+
+image35 +
+
+
+

and CSV file structure

+
+
+
+image36 +
+
+
+
+
+

CSV with specific column mapper

+
+
+
+image37 +
+
+
+

and Mapper implementation

+
+
+
+image38 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Different-Environments.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Different-Environments.html new file mode 100644 index 00000000..9ffd767d --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Different-Environments.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

System under test environments

+
+
+
+image080 +
+
+
+
    +
  • +

    Quality assurance or QA is a way of preventing mistakes or defects in the manufactured products and avoiding problems when delivering solutions or services to customers which ISO 9000 defines as "part of quality management focused on providing confidence that quality requirements will be fulfilled".

    +
  • +
  • +

    System integration testing or SIT is a high-level software testing process in which testers verify that all related systems maintain data integrity and can operate in coordination with other systems in the same environment. The testing process ensures that all sub-components are integrated successfully to provide expected results.

    +
  • +
  • +

    Development or Dev testing is performed by the software developer or engineer during the construction phase of the software development life-cycle. Rather than replace traditional QA focuses, it augments it. Development testing aims to eliminate construction errors before code is promoted to QA; this strategy is intended to increase the quality of the resulting software as well as the efficiency of the overall development and QA process.

    +
  • +
  • +

    Prod If the customer accepts the product, it is deployed to a production environment, making it available to all users of the system.

    +
  • +
+
+
+
+image051 +
+
+
+
+
+

How to use system environment

+
+
+

In Page classes, when you load / start web, it is uncommon to save fixed main url.

+
+
+

Value flexibility is a must, when your web application under test has different main url, depending on the environmnent (DEV, QA, SIT, …​, PROD)

+
+
+

Instead of hard coded main url variable, you build your Page classes with dynamic variable.

+
+
+

An example of dynamic variable GetEnvironmentParam.WWW_FONT_URL

+
+
+
+image081 +
+
+
+
+
+

How to create / update system environment

+
+ +
+
+
+

External file with variable values

+
+
+

Dynamic variable values are stored under mrchecker-app-under-test\src\resources\enviroments\environments.csv.

+
+
+

NOTE: As environments.csv is a comma-separated file, please be careful while editing and then save it under Excel.

+
+
+
+image082 +
+
+
+
+
+

Encrypting sensitive data

+
+
+

Some types of data you might want to store as environment settings are sensitive in nature (e.g. passwords). You might not want to store them (at least not in their plaintext form) in your repository. To be able to encrypt sensitive data you need to do following:

+
+
+
    +
  1. +

    Create a secret (long, random chain of characters) and store it under mrchecker-app-under-test\src\resources\secretData.txt. Example: LhwbTm9V3FUbBO5Tt5PiTUEQrXGgWrDLCMthnzLKNy1zA5FVTFiTdHRQAyPRIGXmsAjPUPlJSoSLeSBM

    +
  2. +
  3. +

    Exclude the file from being checked into the git repository by adding it to git.ignore. You will need to pass the file over a different channel among your teammates.

    +
  4. +
  5. +

    Encrypt the values before putting them into the environments.csv file by creating following script (put the script where your jasypt library resides, e.g. C:\MrChecker_Test_Framework\m2\repository\org\jasypt\jasypt\1.9.2):

    +
  6. +
+
+
+
+
@ECHO OFF
+
+set SCRIPT_NAME=encrypt.bat
+set EXECUTABLE_CLASS=org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI
+set EXEC_CLASSPATH=jasypt-1.9.2.jar
+if "%JASYPT_CLASSPATH%" ==  "" goto computeclasspath
+set EXEC_CLASSPATH=%EXEC_CLASSPATH%;%JASYPT_CLASSPATH%
+
+:computeclasspath
+IF "%OS%" ==  "Windows_NT" setlocal ENABLEDELAYEDEXPANSION
+FOR %%c in (%~dp0..\lib\*.jar) DO set EXEC_CLASSPATH=!EXEC_CLASSPATH!;%%c
+IF "%OS%" ==  "Windows_NT" setlocal DISABLEDELAYEDEXPANSION
+
+set JAVA_EXECUTABLE=java
+if "%JAVA_HOME%" ==  "" goto execute
+set JAVA_EXECUTABLE="%JAVA_HOME%\bin\java"
+
+:execute
+%JAVA_EXECUTABLE% -classpath %EXEC_CLASSPATH% %EXECUTABLE_CLASS% %SCRIPT_NAME% %*
+
+
+
+
    +
  1. +

    Encrypt the values by calling

    +
  2. +
+
+
+
+
.\encrypt.bat input=someinput password=secret
+
+----ENVIRONMENT-----------------
+
+Runtime: Oracle Corporation Java HotSpot(TM) 64-Bit Server VM 25.111-b14
+
+
+
+----ARGUMENTS-------------------
+
+input: someinput
+password: secret
+
+
+
+----OUTPUT----------------------
+
+JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD
+
+
+
+
    +
  1. +

    Mark the value as encrypted by adding a prefix 'ENC(' and suffix ')' like: ENC(JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD)

    +
  2. +
+
+
+
+image083 +
+
+
+
+
+

Bridge between external file nad Page class

+
+
+

To map values from external file with Page class you ought to use class GetEnvironmentParam

+
+
+

Therefore when you add new variable (row) in environments.csv you might need to add this variable to GetEnvironmentParam.

+
+
+
+image084 +
+
+
+
+
+

Run test case with system environment

+
+
+

To run test case with system environment, please use: +* -Denv=\<NameOfEnvironment\> +* \<NameOfEnvironment\> is taken as column name from file mrchecker-app-under-test\src\test\resources\enviroments\environments.csv

+
+
+

Since mrchecker-core-module version 5.6.2.1 +== Command Line

+
+
+
+
mvn test site -Dgroups=RegistryPageTestTag -Denv=DEV
+
+
+
+
+
+

Eclipse

+
+
+
+image085 +
+
+
+
+image086 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1 +== Command Line

+
+
+
+
mvn test site -Dtest=RegistryPageTest -Denv=DEV
+
+
+
+
+
+

Eclipse

+
+
+
+image085 +
+
+
+
+image086 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Externalize-test-environment-DEV-QA-SIT-PROD.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Externalize-test-environment-DEV-QA-SIT-PROD.html new file mode 100644 index 00000000..abbbe76f --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Externalize-test-environment-DEV-QA-SIT-PROD.html @@ -0,0 +1,476 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

System under test environments

+
+
+
+image44 +
+
+
+
    +
  • +

    Quality assurance or QA is a way of preventing mistakes or defects in manufactured products and avoiding problems when delivering solutions or services to customers; which ISO 9000 defines as "part of quality management focused on providing confidence that quality requirements will be fulfilled".

    +
  • +
  • +

    System integration testing or SIT is a high-level software testing process in which testers verify that all related systems maintain data integrity and can operate in coordination with other systems in the same environment. The testing process ensures that all sub-components are integrated successfully to provide expected results.

    +
  • +
  • +

    Development or Dev testing is performed by the software developer or engineer during the construction phase of the software development life-cycle. Rather than replace traditional QA focuses, it augments it. Development testing aims to eliminate construction errors before code is promoted to QA; this strategy is intended to increase the quality of the resulting software as well as the efficiency of the overall development and QA process.

    +
  • +
  • +

    Prod If the customer accepts the product, it is deployed to a production environment, making it available to all users of the system.

    +
  • +
+
+
+
+image45 +
+
+
+
+
+

How to use system environment

+
+
+

In Page classes, when you load / start web, it is uncommon to save fixed main url.

+
+
+

Value flexibility is a must, when your web application under test, have different main url, dependence on environmnent (DEV, QA, SIT, …​, PROD)

+
+
+

Instead of hard coded main url variable, you build your Page classe with dynamic variable.

+
+
+

Example of dynamic variable GetEnvironmentParam.WWW_FONT_URL

+
+
+
+image46 +
+
+
+
+
+

How to create / update system environment

+
+ +
+
+
+

External file with variable values

+
+
+

Dynamic variable values are stored under path mrchecker-app-under-test\src\resources\enviroments\environments.csv.

+
+
+

NOTE: As environments.csv is Comma-separated file, please be aware of any edition and then save it under Excel.

+
+
+
+image47 +
+
+
+
+
+

Encrypting sensitive data

+
+
+

Some types of data you might want to store as environment settings are sensitive in nature (e.g. passwords). You might not want to store them (at least not in their plaintext form) in your repository. To be able to encrypt sensitive data you need to do following:

+
+
+
    +
  1. +

    Create a secret (long, random chain of characters) and store it under mrchecker-app-under-test\src\resources\secretData.txt. Example: LhwbTm9V3FUbBO5Tt5PiTUEQrXGgWrDLCMthnzLKNy1zA5FVTFiTdHRQAyPRIGXmsAjPUPlJSoSLeSBM

    +
  2. +
  3. +

    Exclude the file from being checked into the git repository by adding it to git.ignore. You will need to pass the file over a different channel among your teammates.

    +
  4. +
  5. +

    Encrypt the values before putting them into the environments.csv file by creating following script (put the script where your jasypt library resides, e.g. C:\MrChecker_Test_Framework\m2\repository\org\jasypt\jasypt\1.9.2):

    +
    +
    +
    @ECHO OFF
    +
    +set SCRIPT_NAME=encrypt.bat
    +set EXECUTABLE_CLASS=org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI
    +set EXEC_CLASSPATH=jasypt-1.9.2.jar
    +if "%JASYPT_CLASSPATH%" ==  "" goto computeclasspath
    +set EXEC_CLASSPATH=%EXEC_CLASSPATH%;%JASYPT_CLASSPATH%
    +
    +:computeclasspath
    +IF "%OS%" ==  "Windows_NT" setlocal ENABLEDELAYEDEXPANSION
    +FOR %%c in (%~dp0..\lib\*.jar) DO set EXEC_CLASSPATH=!EXEC_CLASSPATH!;%%c
    +IF "%OS%" ==  "Windows_NT" setlocal DISABLEDELAYEDEXPANSION
    +
    +set JAVA_EXECUTABLE=java
    +if "%JAVA_HOME%" ==  "" goto execute
    +set JAVA_EXECUTABLE="%JAVA_HOME%\bin\java"
    +
    +:execute
    +%JAVA_EXECUTABLE% -classpath %EXEC_CLASSPATH% %EXECUTABLE_CLASS% %SCRIPT_NAME% %*
    +
    +
    +
  6. +
  7. +

    Encrypt the values by calling

    +
    +
    +
    .\encrypt.bat input=someinput password=secret
    +
    +----ENVIRONMENT-----------------
    +
    +Runtime: Oracle Corporation Java HotSpot(TM) 64-Bit Server VM 25.111-b14
    +
    +
    +
    +----ARGUMENTS-------------------
    +
    +input: someinput
    +password: secret
    +
    +
    +
    +----OUTPUT----------------------
    +
    +JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD
    +
    +
    +
  8. +
  9. +

    Mark the value as encrypted by adding a prefix 'ENC(' and suffix ')' like: ENC(JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD)

    +
    +
    +image48 +
    +
    +
  10. +
+
+
+
+
+

Bridge between external file nad Page class

+
+
+

To map values from external file with Page class you ought to use class GetEnvironmentParam.

+
+
+

Therefore when you add new variable (row) in environments.csv you might need to add this variable to GetEnvironmentParam.

+
+
+
+image49 +
+
+
+
+
+

Run test case with system environment

+
+
+

To run test case with system environment, please use:

+
+
+
    +
  • +

    -Denv=<NameOfEnvironment>

    +
  • +
  • +

    <NameOfEnvironment> is taken as column name from file mrchecker-app-under-test\src\test\resources\enviroments\environments.csv

    +
  • +
+
+
+
+
+

Command Line

+
+
+
+
mvn test site -Dtest=RegistryPageTest -Denv=DEV
+
+
+
+
+
+

Eclipse

+
+
+
+image50 +
+
+
+
+image51 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Framework-Test-Class.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Framework-Test-Class.html new file mode 100644 index 00000000..9dfb8a2e --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Framework-Test-Class.html @@ -0,0 +1,416 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Test Class

+
+ +
+
+
+

Overview

+
+
+

The following image gives a general overview of a test class "lifecycle".

+
+
+
+image52 +
+
+
+

More information on the methods and annotations used in this image can be found in the following chapter.

+
+
+
+
+

Methods and annotations

+
+
+

The actual tests that will be executed are located in the so-called Test Classes. Starting a new project, a new package should be created.

+
+
+

Source folder: mrchecker-app-under-test/src/test/java

+
+
+

Name: com.example.selenium.tests.tests.YOUR_PROJECT

+
+
+

Test classes have to extend the BaseTest class.

+
+
+
+
public class DemoTest extends BaseTest {
+
+	@Override
+	public void setUp() {
+
+	}
+
+	@Override
+	public void tearDown() {
+
+	}
+}
+
+
+
+
+
+

BasePage method: setUp

+
+
+

This method will be executed before the test. It allows objects to be instantiated, e.g. Page objects.

+
+
+
+
@Override
+public void setUp() {
+	someTestPage = new SomeTestPage();
+}
+
+
+
+
+
+

BasePage method: tearDown

+
+
+

The tearDown methods executes after the test. It allows the clean up of the testing environment.

+
+
+
+
+

Annotations

+
+
+

The @Test annotation indicates that the following method is a test method.

+
+
+

Additionally, there are two annotations that can help preparing and disassembling the test class: @BeforeClass and @AfterClass.

+
+
+

@BeforeClass will execute the following method once at the beginning, before running any test method. Compared to the setUp() method provided by the BaseTest class, this annotation will only run once, instead of before every single test method. The advantage here: things like login can be set up in @BeforeClass, as they can often be very time consuming. Logging in on a webapplication once and afterwards running all the test methods is more efficient than logging in before every test method, especially if they are being executed on the same page.

+
+
+

@AfterClass will execute after the last test method. Just like @BeforeClass this method will only run once, in contrary to the tearDown() method.

+
+
+

Initialize a new test method by using the @Test annotation.

+
+
+
+
@Test
+public void willResultBeShown() {
+
+}
+
+
+
+

This method will interact with a page object in order to test it.

+
+
+
+
+

Sample Setup

+
+
+
+
@BeforeClass
+public static void setUpBeforeClass() throws Exception {
+	BFLogger.logInfo("[Step1] Login as Account Administrator");
+}
+
+@AfterClass
+public static void tearDownAfterClass() throws Exception {
+	BFLogger.logInfo("[Step4] Logout");
+}
+
+@Override
+public void setUp() {
+	BFLogger.logInfo("Open home page before each test");
+}
+
+@Override
+public void tearDown() {
+	BFLogger.logInfo("Clean all data updated while executing each test");
+}
+
+@Test
+public void test1() {
+	BFLogger.logInfo("[Step2] Filter by \"Creation Date\" - Descending");
+	BFLogger.logInfo("[Step3] Set $1 for first 10 Users in column \"Invoice to pay\"");
+
+}
+
+@Test
+public void test2() {
+	BFLogger.logInfo("[Step2] Filter by \"Invoice to pay\" - Ascending");
+	BFLogger.logInfo("[Step3] Set $100 for first 10 Users in column \"Invoice to pay\"");
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Run-on-independent-Operating-Systems.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Run-on-independent-Operating-Systems.html new file mode 100644 index 00000000..711cf745 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Run-on-independent-Operating-Systems.html @@ -0,0 +1,324 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Run on independent Operation Systems

+
+
+

As E2E Allure test framework is build on top of:

+
+
+
    +
  • +

    Java 1.8

    +
  • +
  • +

    Maven 3.3

    +
  • +
+
+
+

This guarantees portability to all operating systems.

+
+
+

E2E Allure test framework can run on OS:

+
+
+
    +
  • +

    Windows,

    +
  • +
  • +

    Linux and

    +
  • +
  • +

    Mac.

    +
  • +
+
+
+

Test creation and maintenance in E2E Allure test framework can be done with any type of IDE:

+
+
+
    +
  • +

    Eclipse,

    +
  • +
  • +

    IntelliJ,

    +
  • +
  • +

    WebStorm,

    +
  • +
  • +

    Visual Studio Code,

    +
  • +
  • +

    many more that support Java + Maven.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Test-case-parallel-execution.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Test-case-parallel-execution.html new file mode 100644 index 00000000..8e3b1da0 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Test-case-parallel-execution.html @@ -0,0 +1,318 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

What is "Parallel test execution" ?

+
+
+

Parallel test execution means many "Test Classes" can run simultaneously.

+
+
+

"Test Class", as this is a Junit Test class, it can have one or more test cases - "Test case methods"

+
+
+
+image39 +
+
+
+
+
+

How many parallel test classes can run simultaneously?

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

JUnit5 supports parallelism natively. The feature is configured using a property file located at src\test\resources\junit-platform.properties. +As per default configuration, concurrent test execution is set to run test classes in parallel using the thread count equal to a number of your CPUs.

+
+
+
+image39a +
+
+
+

Visit JUnit5 site to learn more about parallel test execution.

+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

By default, number of parallel test classes is set to 8.

+
+
+

It can be updated as you please, on demand, by command line:

+
+
+
+
mvn test site -Dtest=TS_Tag1 -Dthread.count=16
+
+
+
+

-Dthread.count=16 - increase number of parallel Test Class execution to 16.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Test-reports-with-logs-and-or-screenshots.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Test-reports-with-logs-and-or-screenshots.html new file mode 100644 index 00000000..a248a59a --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-Test-reports-with-logs-and-or-screenshots.html @@ -0,0 +1,518 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Allure Logger → BFLogger

+
+
+

In Allure E2E Test Framework you have ability to use and log any additional information crucial for:

+
+
+
    +
  • +

    test steps

    +
  • +
  • +

    test exection

    +
  • +
  • +

    page object actions, and many more.

    +
  • +
+
+
+
+
+

Where to find saved logs

+
+
+

Every logged information is saved in a separate test file, as a result of parallel tests execution.

+
+
+

The places they are saved:

+
+
+
    +
  1. +

    In test folder C:\Allure_Test_Framework\allure-app-under-test\logs

    +
  2. +
  3. +

    In every Allure Test report, logs are always embedded as an attachment, according to test run.

    +
  4. +
+
+
+
+
+

How to use logger:

+
+
+
    +
  • +

    Start typing

    +
    +

    BFLogger

    +
    +
  • +
  • +

    Then type . (dot)

    +
  • +
+
+
+
+
+

Type of logger:

+
+
+
    +
  • +

    BFLogger.logInfo("Your text") - used for test steps

    +
  • +
  • +

    BFLogger.logDebug("Your text") - used for non official information, either during test build process or in Page Object files

    +
  • +
  • +

    BFLogger.logError("Your text") - used to emphasize critical information

    +
  • +
+
+
+
+image13 +
+
+
+

Console output:

+
+
+
+image14 +
+
+
+
+
+

Allure Reports

+
+
+
+image15 +
+
+
+

Allure is a tool designed for test reports.

+
+
+
+
+

Generate report - command line

+
+
+

You can generate a report using one of the following commands:

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:serve -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:serve -Dtest=TS_Tag1
+
+
+
+

A report will be generated into temp folder. Web server with results will start. You can additionally configure the server timeout. The default value is "3600" (one hour).

+
+
+

System property allure.serve.timeout.

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:report -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:report -Dtest=TS_Tag1
+
+
+
+

A report will be generated tо directory: target/site/allure-maven/index.html

+
+
+

NOTE: Please open index.html file under Firefox. Chrome has some limitations to presenting dynamic content. If you want to open a report with a Chromium based Web Browser, you need to launch it first with --allow-file-access-from-files argument.

+
+
+
+
+

Generate report - Eclipse

+
+
+

A report is created here allure-app-under-test\target\site\allure-report\index.html

+
+
+

NOTE: Please open index.html file under Firefox. Chrome has some limitations to presenting dynamic content. If you want to open a report with a Chromium based Web Browser, you need to launch it first with --allow-file-access-from-files argument.

+
+
+
+image17 +
+
+
+
+image18 +
+
+
+
+
+

Generate report - Jenkins

+
+
+

In our case, we’ll use the Allure Jenkins plugin. When integrating Allure in a Jenkins job configuration, we’ll have direct access to the build’s test report.

+
+
+
+image19 +
+
+
+

There are several ways to access the Allure Test Reports:

+
+
+
    +
  • +

    Using the "Allure Report" button on the left navigation bar or center of the general job overview

    +
  • +
  • +

    Using the "Allure Report" button on the left navigation bar or center of a specific build overview

    +
  • +
+
+
+

Afterwards you’ll be greeted with either the general Allure Dashboard (showing the newest build) or the Allure Dashboard for a specific (older) build.

+
+
+
+
+

Allure dashboard

+
+
+
+image20 +
+
+
+

The Dashboard provides a graphical overview on how many test cases were successful, failed or broken.

+
+
+
    +
  • +

    Passed means, that the test case was executed successfully.

    +
  • +
  • +

    Broken means, that there were mistakes, usually inside of the test method or test class. As tests are being treated as code, broken code has to be expected, resulting in occasionally broken test results.

    +
  • +
  • +

    Failed means that an assertion failed.

    +
  • +
+
+
+
+
+

Defects

+
+
+

The defects tab lists out all the defects that occurred, and also descriptions thereof. Clicking on a list item displays the test case which resulted in an error. Clicking on a test case allows the user to have a look at the test case steps, as well as Log files or Screenshots of the failure.

+
+
+
+
+

Graph

+
+
+

The graph page includes a pie chart of all tests, showing their result status (failed, passed, etc.). Another graph allows insight into the time elapsed during the tests. This is a very useful information to find and eliminate possible bottlenecks in test implementations.

+
+
+
+image21 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-test-groups-tags.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-test-groups-tags.html new file mode 100644 index 00000000..cbe7bbd0 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module-test-groups-tags.html @@ -0,0 +1,602 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Why join Test Cases in groups - Test Suites

+
+
+
+image22 +
+
+
+
+
+

Regresion Suite:

+
+
+

Regression testing is a type of software testing which verifies that software which was previously developed and tested still performs the same way after it was changed or interfaced with another software.

+
+
+
    +
  • +

    Smoke

    +
  • +
  • +

    Business vital functionalities

    +
  • +
  • +

    Full scope of test cases

    +
  • +
+
+
+
+
+

Functional Suite:

+
+
+
    +
  • +

    Smoke

    +
  • +
  • +

    Business function A

    +
  • +
  • +

    Business function B

    +
  • +
+
+
+
+
+

Single Responsibility Unit:

+
+
+
    +
  • +

    Single page

    +
  • +
  • +

    Specific test case

    +
  • +
+
+
+
+
+

How to build a Test Suite based on tags

+
+ +
+
+
+

Structure of the Test Suite

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+image23 new +
+
+
+

Where:

+
+
+
    +
  • +

    @RunWith(JUnitPlatform.class) - use Junit5 runner

    +
  • +
  • +

    @IncludeTags({"TestsTag1"}) - search all test files with the tag "TestsTag1"

    +
  • +
  • +

    @ExcludeTags({"TagToExclude"}) - exclude test files with the tag "TagToExclude"

    +
  • +
  • +

    @SelectPackages("com.capgemini.mrchecker.core.groupTestCases.testCases") - search only test files in "com.capgemini.mrchecker.core.groupTestCases.testCases" package

    +
  • +
  • +

    public class TS_Tag1 - the name of the Test Suite is "TS_Tag1"

    +
  • +
+
+
+

Most commonly used filters to build a Test Suite are ones using:

+
+
+
    +
  • +

    @IncludeTags({ })

    +
  • +
  • +

    @ExcludeTags({ })

    +
  • +
+
+
+

Example:

+
+
+
    +
  1. +

    @IncludeTags({ "TestsTag1" }) , @ExcludeTags({ }) → will execute all test cases with the tag TestsTag1

    +
  2. +
  3. +

    @IncludeTags({ "TestsTag1" }) , @ExcludeTags({ "SlowTest" }) → will execute all test cases with tag "TestsTag1" although it will exclude from this list the test cases with the tag "SlowTest"

    +
  4. +
  5. +

    @IncludeTags({ }) , @ExcludeTags({ "SlowTest" }) → It will exclude test cases with the tag "SlowTest"

    +
  6. +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+image23 +
+
+
+

Where:

+
+
+
    +
  • +

    @RunWith(WildcardPatternSuiteBF.class) - search for test files under /src/test/java

    +
  • +
  • +

    @IncludeCategories({ TestsTag1.class }) - search for all test files with the tag "TestsTag1.class"

    +
  • +
  • +

    @ExcludeCategories({ }) - exclude test files. In this example, there is no exclusion

    +
  • +
  • +

    @SuiteClasses({ "**/*Test.class" }) - search only test files, where the file name ends with "<anyChar/s>Test.class"

    +
  • +
  • +

    public class TS_Tag1 - the name of the Test Suite is "TS_Tag1"

    +
  • +
+
+
+

Most commonly used filters to build Test Suite are ones using:

+
+
+
    +
  • +

    @IncludeCategories({ })

    +
  • +
  • +

    @ExcludeCategories({ })

    +
  • +
+
+
+

Example:

+
+
+
    +
  1. +

    @IncludeCategories({ TestsTag1.class }) , @ExcludeCategories({ }) → will execute all test cases with the tag TestsTag1.class

    +
  2. +
  3. +

    @IncludeCategories({ TestsTag1.class }) , @ExcludeCategories({ SlowTest.class }) → will execute all test cases with the tag "TestsTag1.class" although it will exclude from this list the test cases with the tag "SlowTest.class"

    +
  4. +
  5. +

    @IncludeCategories({ }) , @ExcludeCategories({ SlowTest.class }) → will execute all test cases from /src/test/java, although it will exclude from this list the test cases with the tag "SlowTest.class"

    +
  6. +
+
+
+
+
+

Structure of Test Case

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+image24 new +
+
+
+

Where:

+
+
+
    +
  • +

    @TestsTag1, @TestsSmoke, @TestsSelenium - list of tags assigned to this test case - "TestsTag1, TestsSmoke, TestSelenium" annotations

    +
  • +
  • +

    public class FristTest_tag1_Test - the name of the test case is "FristTest_tag1_Test"

    +
  • +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+image24 +
+
+
+

Where:

+
+
+
    +
  • +

    @Category({ TestsTag1.class, TestsSmoke.class, TestSelenium.class }) - list of tags / categories assigned to this test case - "TestsTag1.class, TestsSmoke.class, TestSelenium.class"

    +
  • +
  • +

    public class FristTest_tag1_Test - the name of the test case is "FristTest_tag1_Test"

    +
  • +
+
+
+
+
+

Structure of Tags / Categories

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+

Tag name: TestsTag1 annotation

+
+
+
+image25 new +
+
+
+

Tag name: TestsSmoke annotation

+
+
+
+image26 new +
+
+
+

Tag name: TestSelenium annotation

+
+
+
+image27 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+

Tag name: TestsTag1.class

+
+
+
+image25 +
+
+
+

Tag name: TestsSmoke.class

+
+
+
+image26 +
+
+
+

Tag name: TestSelenium.class

+
+
+
+image27 +
+
+
+
+
+

How to run Test Suite

+
+
+

To run a Test Suite you perform the same steps as you do to run a test case

+
+
+

Command line

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+

JUnit5 disallows running suite classes from maven. Use -Dgroups=Tag1,Tag2 and -DexcludeGroups=Tag4,Tag5 to create test suites in maven.

+
+
+
+
mvn test site -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test site -Dtest=TS_Tag1
+
+
+
+

Eclipse

+
+
+
+image28 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module.html new file mode 100644 index 00000000..1d91965d --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Core-Test-Module.html @@ -0,0 +1,331 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Core Test Module

+
+ +
+
+
+

What is Core Test Module

+
+
+
+image1 new +
+
+
+
+ +
+

How to start?

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Database-Test-Module.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Database-Test-Module.html new file mode 100644 index 00000000..95afb442 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Database-Test-Module.html @@ -0,0 +1,311 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Database Test Module

+
+ +
+
+
+

What is MrChecker Database Test Module

+
+
+

Database module is based on Object-Relational Mapping programming technique. All functionalities are built using Java Persistence API but examples use Hibernate as a main provider.

+
+
+
+
+

JPA structure schema

+
+
+

This module was written to allow the use of any JPA provider. The structure is represented in the schema below.

+
+
+
+image3 +
+
+
+
+
+

ORM representation applied in Framework

+
+
+
+image4 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Delivery-CD.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Delivery-CD.html new file mode 100644 index 00000000..db06cef2 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Delivery-CD.html @@ -0,0 +1,323 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Continuous Delivery

+
+
+

Include quality with Continuous Delivery during product release.

+
+
+
+image87 +
+
+
+
+
+

Overview

+
+
+

CD from Jenkins point of view does not change a lot from Continuous Integration one.

+
+
+
+
+

Jenkins Overview

+
+
+

Use the same Jenkins settings for Jenkins CD setup as for CI, please. link. The only difference is:

+
+
+
    +
  • +

    What type of test you will execute. Before, we have been choosing test case(s), now we will choose test suite(s)

    +
  • +
  • +

    Who will trigger the given Smoke/Integration/Performance job

    +
  • +
  • +

    What is the name of official branch. This branch ought always to use be used in every CD execution. It will be either master or develop.

    +
  • +
+
+
+
+
+

Jenkins for Smoke Tests

+
+
+

In the $TESTNAME variable, where we input the test name( link ), please input the name of a test suite assembled together of tests tagged as smoke tests -( link ) thus running all the smoke tests.

+
+
+
+
+

Jenkins for Performance Tests

+
+
+

Under construction - added when WebAPI module is included.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Integration-CI.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Integration-CI.html new file mode 100644 index 00000000..c333b484 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Continuous-Integration-CI.html @@ -0,0 +1,414 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Continuous Integration

+
+
+

Embrace quality with Continuous Integration while you produce test case(s).

+
+
+
+
+

Overview

+
+
+

There are two ways to set up your Continuous Integration environment:

+
+
+
    +
  1. +

    Create a Jenkins instance from scratch (e.g. by using the Jenkins Docker image)

    +
    +

    Using a clean Jenkins instance requires the installation of additional plugins. The plugins required and their versions can be found on this page.

    +
    +
  2. +
  3. +

    Use thre pre-configured custom Docker image provided by us

    +
    +

    No more additional configuration is required (but optional) using this custom Docker image. Additionally, this Jenkins setup allows dynamical scaling across multiple machines and even cloud (AWS, Azure, Google Cloud etc.).

    +
    +
  4. +
+
+
+
+
+

Jenkins Overview

+
+
+

Jenkins is an Open Source Continuous Integration Tool. It allows the user to create automated build jobs which will run remotely on so called Jenkins Slaves. A build job can be triggered by several events, for example on new pull request on specified repositories or timed (e.g. at midnight).

+
+
+
+
+

Jenking Configuration

+
+
+

Tests created by using the testing framework can easily be implemented on a Jenkins instance. The following chapter will describe such a job configuration. If you’re running your own Jenkins instance, you may have to install additional plugins listed on the page Jenkins Plugins for a trouble-free integration of your tests.

+
+
+
+
+

== Initial Configuration

+
+
+

The test job is configured as a so-called parameterized job. This means, after starting the job, parameters can be specified, which will then be used in the build process. In this case, branch and testname will be expected when starting the job. These parameters specify which branch in the code repository should be checked out (possibly feature branch) and the name of the test that should be executed.

+
+
+
+image79 +
+
+
+
+
+

== Build Process Configuration

+
+
+
    +
  • +

    The first step inside the build process configuration is to get the author of the commit that was made. The mail will be extracted and gets stored in a file called build.properties. This way, the author can be notified if the build fails.

    +
    +
    +image80 +
    +
    +
  • +
  • +

    Next up, Maven will be used to check if the code can be compiled, without running any tests.

    +
    +
    +image81 +
    +
    +
    +

    After making sure that the code can be compiled, the actual tests will be executed.

    +
    +
    +
    +image82 +
    +
    +
  • +
  • +

    Finally, reports will be generated.

    +
    +
    +image83 +
    +
    +
  • +
+
+
+
+
+

== Post Build Configuration

+
+
+
    +
  • +

    At first, the results will be imported to the Allure System

    +
    +
    +image84 +
    +
    +
  • +
  • +

    JUnit test results will be reported as well. Using this step, the test result trend graph will be displayed on the Jenkins job overview.

    +
    +
    +image85 +
    +
    +
  • +
  • +

    Finally, an E-Mail will be sent to the previously extracted author of the commit.

    +
    +
    +image86 +
    +
    +
  • +
+
+
+
+
+

Using the Pre-Configured Custom Docker Image

+
+
+

If you are starting a new Jenkins instance for your tests, we’d suggest using the pre-configured Docker image. This image already contains all the configurations and additional features.

+
+
+

The configurations are e.g. Plugins and Pre-Installed job setup samples. This way, you don’t have to set up the entire CI-Environment from the ground up.

+
+
+

Additional features from this docker image allow dynamic creation and deletion of Jenkins slaves, by creating Docker containers. Also, Cloud Solutions can be implemented to allow wide-spread load balancing.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Docker-commands.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Docker-commands.html new file mode 100644 index 00000000..6b8ad1e0 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Docker-commands.html @@ -0,0 +1,546 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

What is Docker

+
+
+

Docker is an open source software platform to create, deploy and manage virtualized application containers on a common operating system (OS), with an ecosystem of allied tools.

+
+
+
+
+

Where do we use Docker

+
+
+

DevOps module consists of Docker images

+
+
+
    +
  1. +

    Jenkins image

    +
  2. +
  3. +

    Jenkins job image

    +
  4. +
  5. +

    Jenkins management image

    +
  6. +
  7. +

    Security image

    +
  8. +
+
+
+

in addition, each new node is also based on Docker

+
+
+
+
+

Exploring basic Docker options

+
+
+

Let’s show some of the most important commands that are needed when working with our DevOps module based on the Docker platform. Each command given below should be preceded by a sudo call by default. If you don’t want to use sudo command create a Unix group called docker and add a user to it.

+
+
+
+
$ sudo groupadd docker
+$ sudo usermod -aG docker $USER
+
+
+
+
+
+

Build an image from a Dockerfile

+
+
+
+
##docker build [OPTIONS] PATH | URL | -
+##
+##Options:
+## --tag , -t : Name and optionally a tag in the ‘name:tag’ format
+
+$ docker build -t vc_jenkins_jobs .
+
+
+
+
+
+

Container start

+
+
+
+
##docker run [OPTIONS] IMAGE[:TAG|@DIGEST] [COMMAND] [ARG...]
+#
+##Options:
+##-d : To start a container in detached mode (background)
+##-it : interactive terminal
+##--name : assign a container name
+##--rm : clean up
+##--volumes-from="": Mount all volumes from the given container(s)
+##-p : explicitly map a single port or range of ports
+##--volume : storage associated with the image
+
+$ docker run -d --name vc_jenkins_jobs vc_jenkins_jobs
+
+
+
+
+
+

Remove one or more containers

+
+
+
+
##docker rm [OPTIONS] CONTAINER
+#
+##Options:
+##--force , -f : Force the removal of a running container
+
+$ docker rm -f jenkins
+
+
+
+
+
+

List containers

+
+
+
+
##docker ps [OPTIONS]
+##--all, -a : Show all containers (default shows just running)
+
+$ docker ps
+
+
+
+
+
+

Pull an image or a repository from a registry

+
+
+
+
##docker pull [OPTIONS] NAME[:TAG|@DIGEST]
+
+$ docker pull jenkins/jenkins:2.73.1
+
+
+
+
+
+

Push the image or a repository to a registry

+
+
+

Pushing new image takes place in two steps. First save the image by adding container ID to the commit command and next use push:

+
+
+
+
##docker push [OPTIONS] NAME[:TAG]
+
+$ docker ps
+  # copy container ID from the result
+$ docker commit b46778v943fh vc_jenkins_mng:project_x
+$ docker push vc_jenkins_mng:project_x
+
+
+
+
+
+

Return information on Docker object

+
+
+
+
##docker inspect [OPTIONS] NAME|ID [NAME|ID...]
+#
+##Options:
+##--format , -f : output format
+
+$ docker inspect -f '{{ .Mounts }}' vc_jenkins_mng
+
+
+
+
+
+

List images

+
+
+
+
##docker images [OPTIONS] [REPOSITORY[:TAG]]
+#
+##Options:
+--all , -a : show all images with intermediate images
+
+$ docker images
+$ docker images jenkins
+
+
+
+
+
+

Remove one or more images

+
+
+
+
##docker rmi [OPTIONS] IMAGE [IMAGE...]
+#
+##Options:
+##  --force , -f : Force removal of the image
+
+$ docker rmi jenkins/jenkins:latest
+
+
+
+
+
+

Run a command in a running container

+
+
+
+
##docker exec [OPTIONS] CONTAINER COMMAND [ARG...]
+##-d : run command in the background
+##-it : interactive terminal
+##-w : working directory inside the container
+##-e : Set environment variables
+
+$ docker exec vc_jenkins_jobs sh -c "chmod 755 config.xml"
+
+
+
+
+
+

Advanced commands

+
+ +
+
+
+

Remove dangling images

+
+
+
+
$ docker rmi $(docker images -f dangling=true -q)
+
+
+
+
+
+

Remove all images

+
+
+
+
$ docker rmi $(docker images -a -q)
+
+
+
+
+
+

Removing images according to a pattern

+
+
+
+
$ docker images | grep "pattern" | awk '{print $2}' | xargs docker rm
+
+
+
+
+
+

Remove all exited containers

+
+
+
+
$ docker rm $(docker ps -a -f status=exited -q)
+
+
+
+
+
+

Remove all stopped containers

+
+
+
+
$ docker rm $(docker ps --no-trunc -aq)
+
+
+
+
+
+

Remove containers according to a pattern

+
+
+
+
$ docker ps -a | grep "pattern" | awk '{print $1}' | xargs docker rmi
+
+
+
+
+
+

Remove dangling volumes

+
+
+
+
$ docker volume rm $(docker volume ls -f dangling=true -q)
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Jenkins-Plugins.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Jenkins-Plugins.html new file mode 100644 index 00000000..f6436b82 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Jenkins-Plugins.html @@ -0,0 +1,712 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

List of Jenkins Plugins

+
+
+

|== =

+
+
+

|Plugin Name +|Version

+
+
+

|blueocean-github-pipeline +|1.1.4

+
+
+

|blueocean-display-url +|2.0

+
+
+

|blueocean +|1.1.4

+
+
+

|workflow-support +|2.14

+
+
+

|workflow-api +|2.18

+
+
+

|plain-credentials +|1.4

+
+
+

|pipeline-stage-tags-metadata +|1.1.8

+
+
+

|credentials-binding +|1.12

+
+
+

|git +|3.5.1

+
+
+

|maven-plugin +|2.17

+
+
+

|workflow-durable-task-step +|2.12

+
+
+

|job-dsl +|1.64

+
+
+

|git-server +|1.7

+
+
+

|windows-slaves +|1.3.1

+
+
+

|github +|1.27.0

+
+
+

|blueocean-personalization +|1.1.4

+
+
+

|jackson2-api +|2.7.3

+
+
+

|momentjs +|1.1.1

+
+
+

|workflow-basic-steps +|2.6

+
+
+

|workflow-aggregator +|2.5

+
+
+

|blueocean-rest +|1.1.4

+
+
+

|gradle +|1.27.1

+
+
+

|pipeline-maven +|3.0.0

+
+
+

|blueocean-pipeline-editor +|0.2.0

+
+
+

|durable-task +|1.14

+
+
+

|scm-api +|2.2.2

+
+
+

|pipeline-model-api +|1.1.8

+
+
+

|config-file-provider +|2.16.3

+
+
+

|github-api +|1.85.1

+
+
+

|pam-auth +|1.3

+
+
+

|workflow-cps-global-lib +|2.8

+
+
+

|github-organization-folder +|1.6

+
+
+

|workflow-job +|2.12.1

+
+
+

|variant +|1.1

+
+
+

|git-client +|2.5.0

+
+
+

|sse-gateway +|1.15

+
+
+

|script-security +|1.29.1

+
+
+

|token-macro +|2.1

+
+
+

|jquery-detached +|1.2.1

+
+
+

|blueocean-web +|1.1.4

+
+
+

|timestamper +|1.8.8

+
+
+

|greenballs +|1.15

+
+
+

|handlebars +|1.1.1

+
+
+

|blueocean-jwt +|1.1.4

+
+
+

|pipeline-stage-view +|2.8

+
+
+

|blueocean-i18n +|1.1.4

+
+
+

|blueocean-git-pipeline +|1.1.4

+
+
+

|ace-editor +|1.1

+
+
+

|pipeline-stage-step +|2.2

+
+
+

|email-ext +|2.58

+
+
+

|envinject-api +|1.2

+
+
+

|role-strategy +|2.5.1

+
+
+

|structs +|1.9

+
+
+

|locale +|1.2

+
+
+

|docker-workflow +|1.13

+
+
+

|ssh-credentials +|1.13

+
+
+

|blueocean-pipeline-scm-api +|1.1.4

+
+
+

|metrics +|3.1.2.10

+
+
+

|external-monitor-job +|1.7

+
+
+

|junit +|1.21

+
+
+

|github-branch-source +|2.0.6

+
+
+

|blueocean-config +|1.1.4

+
+
+

|cucumber-reports +|3.8.0

+
+
+

|pipeline-model-declarative-agent +|1.1.1

+
+
+

|blueocean-dashboard +|1.1.4

+
+
+

|subversion +|2.9

+
+
+

|blueocean-autofavorite +|1.0.0

+
+
+

|pipeline-rest-api +|2.8

+
+
+

|pipeline-input-step +|2.7

+
+
+

|matrix-project +|1.11

+
+
+

|pipeline-github-lib +|1.0

+
+
+

|workflow-multibranch +|2.16

+
+
+

|docker-plugin +|0.16.2

+
+
+

|resource-disposer +|0.6

+
+
+

|icon-shim +|2.0.3

+
+
+

|workflow-step-api +|2.12

+
+
+

|blueocean-events +|1.1.4

+
+
+

|workflow-scm-step +|2.6

+
+
+

|display-url-api +|2.0

+
+
+

|favorite +|2.3.0

+
+
+

|build-timeout +|1.18

+
+
+

|mapdb-api +|1.0.9.0

+
+
+

|pipeline-build-step +|2.5.1

+
+
+

|antisamy-markup-formatter +|1.5

+
+
+

|javadoc +|1.4

+
+
+

|blueocean-commons +|1.1.4

+
+
+

|cloudbees-folder +|6.1.2

+
+
+

|ssh-slaves +|1.20

+
+
+

|pubsub-light +|1.10

+
+
+

|pipeline-graph-analysis +|1.4

+
+
+

|allure-jenkins-plugin +|2.23

+
+
+

|mailer +|1.20

+
+
+

|ws-cleanup +|0.33

+
+
+

|authentication-tokens +|1.3

+
+
+

|blueocean-pipeline-api-impl +|1.1.4

+
+
+

|ldap +|1.16

+
+
+

|docker-commons +|1.8

+
+
+

|branch-api +|2.0.10

+
+
+

|workflow-cps +|2.36.1

+
+
+

|pipeline-model-definition +|1.1.8

+
+
+

|blueocean-rest-impl +|1.1.4

+
+
+

|ant +|1.7

+
+
+

|credentials +|2.1.14

+
+
+

|matrix-auth +|1.7

+
+
+

|pipeline-model-extensions +|1.1.8

+
+
+

|pipeline-milestone-step +|1.3.1

+
+
+

|jclouds-jenkins +|2.14

+
+
+

|bouncycastle-api +|2.16.1

+
+
+

|== =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Pipeline-structure.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Pipeline-structure.html new file mode 100644 index 00000000..f72e1ad9 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Pipeline-structure.html @@ -0,0 +1,423 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Pipeline structure

+
+ +
+
+
+

Pipeline configuration:

+
+
+

The default interaction with Jenkins required manual jobs. This keeps configuration of a job in Jenkins separate from source code. With Pipeline plugin users can implement a pipeline procedure in Jenkinsfile and store it in repository with other code. This approach is used in Mr Checker framework. More info: https://jenkins.io/solutions/pipeline/

+
+
+

Our CI & CD processes are divided into a few separate files: Jenkins_node.groovy is the file to manage all processes. It defines all operations executed on a Jenkins node, so all code in this file is closed in node closure. Workflow in Jenkinsfile:

+
+
+
    +
  • +

    Read all parameters from a Jenkins job

    +
  • +
  • +

    Execute stage to prepare the environment

    +
  • +
  • +

    Execute git pull command

    +
  • +
  • +

    Set Jenkins job description

    +
  • +
  • +

    Execute compilation of the project in a special prepared docker container

    +
  • +
  • +

    Execute unit tests

    +
  • +
  • +

    Execute integration tests

    +
  • +
  • +

    Deploy artifacts to a local repository

    +
  • +
  • +

    Deploy artifacts to an external repository (nexus/arifactory)

    +
  • +
+
+
+

Not all the steps must be present in the Jenkins files. This should be configured for particular job requirements.

+
+
+
+
+

Description of stages:

+
+ +
+
+
+

Stage “Prepare environment”

+
+
+

First thing to do in this stage is overwriting properties loaded from Jenkins job. It is defined in “overrideProperties” function. The next function, “setJenkinsJobVariables” defines environment variables such as :

+
+
+
    +
  • +

    JOB_NAME_UPSTREAM

    +
  • +
  • +

    BUILD_DISPLAY_NAME_UPSTREAM

    +
  • +
  • +

    BUILD_URL_UPSTREAM

    +
  • +
  • +

    GIT_CREDENTIALS

    +
  • +
  • +

    JENKINS_CREDENTIALS

    +
  • +
+
+
+

The last function in the stage – “setWorkspace” -creates an environment variable with path to local workspace. This is required beacuse when using pipeline plugin, Jenkins does not create the WORKSPACE env variables.

+
+
+
+
+

Stage "Git pull"

+
+
+

It pulls sources from the repository and loads “git pull” file which contains additional methods:

+
+
+
    +
  • +

    setGitAuthor – setting properties about git author to the file “build.properties” and loading created file

    +
  • +
  • +

    tryMergeWithBranch – checking if actual branch can be merged with default main branch

    +
  • +
+
+
+
+
+

Stage “Build compile”

+
+
+

Verify with maven that code builds without errors

+
+
+
+
+

Stage “Unit test”

+
+
+

Execute unit tests with mvn surefire test and publish reports in junit and allure format

+
+
+
+
+

Stage “Integration test”

+
+
+

Execute integration tests with mvn surefire test and publish reports in junit and allure format

+
+
+
+
+

Stage “Deploy – local repo”

+
+
+

Archive artifacts as a jar file in the local repository

+
+
+
+
+

Stage ”Deploy – nexu repo”

+
+
+

Deploy to the external repository with maven release deploy command with credentials stored in Jenkins machine. Additional files:

+
+
+
    +
  • +

    mailSender.groovy – contains methods for sending mail with generated content

    +
  • +
  • +

    stashNotification.groovy – send job status for bitbucket by a curl command

    +
  • +
  • +

    utils.groovy - contains additional functions to load properties, files and generate additional data

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Selenium-Grid.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Selenium-Grid.html new file mode 100644 index 00000000..b04bed8b --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module-Selenium-Grid.html @@ -0,0 +1,395 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Selenium Grid

+
+ +
+
+
+

What is Selenium Grid

+
+
+

Selenium Grid allows running web/mobile browsers test cases to fulfill basic factors, such as:

+
+
+
    +
  • +

    Independent infrastructure, similar to end-users'

    +
  • +
  • +

    Scalable infrastructure (\~50 simultaneous sessions at once)

    +
  • +
  • +

    Huge variety of web browsers (from mobile to desktop)

    +
  • +
  • +

    Continuous Integration and Continuous Delivery process

    +
  • +
  • +

    Supporting multi-type programming languages (java, javascript, python, …​).

    +
  • +
+
+
+
+image88 +
+
+
+

On a daily basis, a test automation engineer uses their local environments for test case execution/development. However, a created browser test case has to be able to run on any infrastructure. Selenium Grid enables this portability.

+
+
+
+
+

Selenium Grid Structure

+
+
+
+image89 +
+
+
+

Full documentation of Selenium Grid can be found here and here.

+
+
+

'Vanilla flavour' Selenium Grid is based on two, not very complicated ingredients:

+
+
+
    +
  1. +

    Selenium Hub - as one machine, accepting connections to grid from test cases executors. It also plays a managerial role in connection to/from Selenium Nodes

    +
  2. +
  3. +

    Selenium Node - from one to many machines, where on each machine a browser used during test case execution is installed.

    +
  4. +
+
+
+
+
+

How to setup

+
+
+

There are two options of Selenium Grid setup:

+
+
+
    +
  • +

    Classic, static solution - link

    +
  • +
  • +

    Cloud, scalable solution - link

    +
  • +
+
+
+

Advantages and disadvantages of both solutions:

+
+
+
+image90 +
+
+
+
+
+

How to use Selenium Grid with E2E Mr Checker Test Frameworks

+
+
+

Run the following command either in Eclipse or in Jenkins:

+
+
+
+
> mvn test -Dtest=com.capgemini.ntc.selenium.tests.samples.resolutions.ResolutionTest -DseleniumGrid="http://10.40.232.61:4444/wd/hub" -Dos=LINUX -Dbrowser=chrome
+
+
+
+

As a result of this command:

+
+
+
    +
  • +

    -Dtest=com.capgemini.ntc.selenium.features.samples.resolutions.ResolutionTest - name of test case to execute

    +
  • +
  • +

    -DseleniumGrid="http://10.40.232.61:4444/wd/hub" - IP address of Selenium Hub

    +
  • +
  • +

    -Dos=LINUX - what operating system must be assumed during test case execution

    +
  • +
  • +

    -Dbrowser=chrome - what type of browser will be used during test case execution

    +
  • +
+
+
+
+image91 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module.html new file mode 100644 index 00000000..be6d6b09 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/DevOPS-Test-Module.html @@ -0,0 +1,413 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

DevOPS Test Module

+
+ +
+
+
+

What does DevOps mean for us?

+
+
+

DevOps consists of a mixture of three key components in a technical project:

+
+
+
    +
  • +

    People’s skills and mindset

    +
  • +
  • +

    Processes

    +
  • +
  • +

    Tools

    +
  • +
+
+
+

Using E2E MrChecker Test Framework it is possible to cover the majority of these areas.

+
+
+
+
+

QA Team Goal

+
+
+

For QA engineers, it is essential to take care of the product code quality.

+
+
+

Therefore, we have to understand, that a test case is also code which has to be validated against quality gates. As a result, we must test our developed test case like it is done during standard Software Delivery Life Cycle.

+
+
+
+
+

Well rounded test case production process

+
+
+
    +
  • +

    How do we define top-notch test cases development process in E2E MrChecker Test Framework

    +
  • +
+
+
+
+image5 +
+
+
+
+
+

Continuous Integration (CI) and Continuous Delivery (CD)

+
+
+ +
+
+
+image6 +
+
+
+
+
+

What should you receive from this DevOps module

+
+
+
+image7 +
+
+
+
+
+

What will you gain with our DevOps module

+
+
+

The CI procedure has been divided into transparent modules. This solution makes configuration and maintenance very easy because everyone is able to manage versions and customize the configuration independently for each module. A separate security module ensures the protection of your credentials and assigned access roles regardless of changes in other modules.

+
+
+
+image8 +
+
+
+

Your CI process will be matched to the current project. You can easily go back to the previous configuration, test a new one or move a selected one to other projects.

+
+
+
+image9 +
+
+
+

DevOps module supports a delivery model in which executors are made available to the user as needed. It has such advantages as:

+
+
+
    +
  • +

    Saving computing resources

    +
  • +
  • +

    Eliminating guessing on your infrastructure capacity needs

    +
  • +
  • +

    Not spending time on running and maintaining additional executors +== How to build this DevOps module

    +
  • +
+
+
+

Once you have implemented the module, you can learn more about it here:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module-How-to-use.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module-How-to-use.html new file mode 100644 index 00000000..e99d5467 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module-How-to-use.html @@ -0,0 +1,359 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

How to use mobile test Module

+
+
+
    +
  1. +

    Install IDE with MrChecker

    +
  2. +
  3. +

    Switch branch to 'feature/Create-mobile-module-#213' - by default it is 'develop'

    +
  4. +
+
+
+
+
git checkout feature/Create-mobile-module-#213
+
+
+
+
    +
  1. +

    Install and setup git checkout feature/Create-mobile-module-#213[Appium Server]

    +
  2. +
  3. +

    Connect to local Device by Appium Server

    +
    +
    +
     1.
    +Install Android SDK    https://developer.android.com/studio/index.html#command-tools    ->
    +	2.
    +Download Platform and Build-Tools  (Android versions - >    https://en.wikipedia.org/wiki/Android_version_history   )
    +* sdkmanager "platform-tools" "platforms;android-19"
    +* sdkmanager "build-tools;19.0.0"
    +* copy from /build-tools  file "aapt.exe"  to /platform-tools
    +	3.
    +Set Environment:
    +ANDROID_SDK_ROOT = D:\sdk-tools-windows-4333796
    +PATH =  %PATH%; %ANDROID_SDK_ROOT%
    +	4.
    +Start Appium Server
    +	5.
    +Start Session in Appium Server, capabilities
    +{
    +  "platformName": "Android",
    +            "deviceName": "Android Emulator",
    +            "app": "D:\\Repo\\mrchecker-source\\mrchecker-framework-modules\\mrchecker-mobile-module\\src\\test\\resources\\Simple App_v2.0.1_apkpure.com.apk",
    +            "automationName": "UiAutomator1"
    +            }
    +
    +
    +
  4. +
  5. +

    Run Mobile tests with runtime parameters. +List of supported parameters could be found here

    +
    +
      +
    • +

      From command line (as in Jenkins):

      +
    • +
    +
    +
  6. +
+
+
+
+
mvn clean compile test  -Dapp=".\\Simple_App_v2.0.1_apkpure.com.apk" -DautomationName="UiAutomator1" -Dthread.count=1
+
+
+
+
    +
  • +

    from IDE:

    +
  • +
+
+
+
+image00100 +
+
+
+
+image00101 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module.html new file mode 100644 index 00000000..3fff629b --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Mobile-Test-Module.html @@ -0,0 +1,410 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Mobile Test Module

+
+ +
+
+
+

What is MrChecker E2E Mobile Test Module

+
+
+

MrChecker E2E Mobile test Module is a suitable solution for testing Remote Web Design, Mobile Browsers and application. +A user can write tests suitable for all mobile browsers with a full range of resolution. The way of working is similar to Selenium and uses the same rules and patterns as the Web Driver. For more information please look in the Selenium test module.

+
+
+
+
+

What is Page Object Architecture

+
+
+

Creating Selenium test cases can result in an unmaintainable project. One of the reasons is that too many duplicated code is used. Duplicated code could be caused by the duplicated functionality and this will result in duplicated usage of locators. The disadvantage of duplicated code is that the project is less maintainable. If some locator will change, you have to walk through the whole test code to adjust locators where necessary. By using the page object model we can make non-brittle test code and reduce or eliminate duplicate test code. Beside of that it improves the readability and allows us to create interactive documentation. Last but not least, we can create tests with less keystroke. An implementation of the page object model can be achieved by separating the abstraction of the test object and the test scripts.

+
+
+
+
+

Page Object Pattern

+
+
+
+Pom +
+
+
+
+
+

Mobile Structure

+
+
+

It is build on the top of the Appium library. +Appium is an open-source tool for automating native, mobile web, and hybrid applications on iOS mobile, Android mobile, and Windows desktop platforms. Native apps are those written using iOS, Android, or Windows SDKs. Mobile web apps are web apps accessed using a mobile browser (Appium supports Safari on iOS and Chrome or the built-in 'Browser' app on Android). Hybrid apps have a wrapper around a "webview" - a native control that enables interaction with web content.

+
+
+
+
+

Run on different mobile devices

+
+
+

To execute each test with chosen connected mobile devices, it is required to use specific arguments in Run configuration.

+
+
+
+image001 +
+
+
+
+image002 +
+
+
+

Default supported arguments in MrChecker:

+
+
+
    +
  • +

    deviceUrl - http url to Appium Server, default value "http://127.0.0.1:4723"

    +
  • +
  • +

    automationName - which automation engine to use , default value "Appium"

    +
  • +
  • +

    platformName - which mobile OS platform to use , default value "Appium"

    +
  • +
  • +

    platformVersion - mobile OS version , default value ""

    +
  • +
  • +

    deviceName - the kind of mobile device or emulator to use , default value "Android Emulator"

    +
  • +
  • +

    app - the absolute local path or remote http URL to a .ipa file (IOS), .app folder (IOS Simulator), .apk file (Android) or .apks file (Android App Bundle), or a .zip file, default value "."

    +
  • +
  • +

    browserName - name of mobile web browser to automate. Should be an empty string if automating an app instead, default value ""

    +
  • +
  • +

    newCommandTimeout - how long (in seconds) Appium will wait for a new command from the client before assuming the client quit and ending the session, default value "4000"

    +
  • +
  • +

    deviceOptions - any other capabilites not covered in essential ones, default value none

    +
  • +
+
+
+

Example usage:

+
+
+
+
mvn clean test -Dtest=MyTest -DdeviceUrl="http://192.168.0.1:1234" -DplatformName="iOS" -DdeviceName="iPhone Simulator" -Dapp=".\\Simple_App.ipa"
+
+
+
+
+
mvn clean test -Dtest=MyTest -Dapp=".\\Simple_App.apk -DdeviceOptions="orientation=LANDSCAPE;appActivity=MainActivity;chromeOptions=['--disable-popup-blocking']"
+
+
+
+

Check also:

+
+ + + +
+

+ +Full list of Generic Capabilities

+
+
+

+ +List of additional capabilities for Android

+
+
+

+ +List of additional capabilities for iOS

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Security-Test-Module.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Security-Test-Module.html new file mode 100644 index 00000000..afc85945 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Security-Test-Module.html @@ -0,0 +1,320 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Security Test Module

+
+ +
+
+
+

What is Security

+
+
+

Application Security is concerned with Integrity, Availability and Confidentiality of data processed, stored and transferred by the application.

+
+
+

Application Security is a cross-cutting concern which touches every aspect of the Software Development Lifecycle. You can introduce some SQL injection flaws in your application and make it exploitable, but you can also expose your secrets (which will have nothing to do with code itself) due to poor secret management process, and fail as well.

+
+
+

Because of this and many other reasons, not every aspect of security can be automatically verified. Manual tests and audits will still be needed. Nevertheless, every security requirement which is automatically verified will prevent code degeneration and misconfiguration in a continuous manner.

+
+
+
+
+

How to test Security

+
+
+

Security tests can be performed in many different ways, such as:

+
+
+
    +
  • +

    Static Code Analysis - improves the security by (usually) automated code review. A good way to search for vulnerabilities, which are 'obvious' on the code level ( e.g. SQL injection). The downside of this approach is that professional tools to perform such scans are very expensive and still produce many false positives.

    +
  • +
  • +

    Dynamic Code Analysis - tests are run against a working environment. A good way to search for vulnerabilities, which require all client- and server-side components to be present and running (like e.g. Cross-Site Scripting). Tests are performed in a semi-automated manner and require a proxy tool (like e.g. OWASP ZAP)

    +
  • +
  • +

    Unit tests - self-written and self-maintained tests. They usually work on the HTTP/REST level (this defines the trust boundary between the client and the server) and run against a working environment. Unit tests are best suited for verifying requirements which involve business knowledge of the system or which assure secure configuration on the HTTP level.

    +
  • +
+
+
+

In the current release of the Security Module, the main focus will be Unit Tests.

+
+
+

Although the most common choice of environment for running security tests on will be integration(the environment offers the right stability and should mirror the production closely), it is not uncommon for some security tests to run on production as well. This is done for e.g. TLS configuration testing to ensure proper configuration of the most relevant environment in a continuous manner.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Building-basic-Selenium-Test.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Building-basic-Selenium-Test.html new file mode 100644 index 00000000..f023f054 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Building-basic-Selenium-Test.html @@ -0,0 +1,616 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Sample Walkthrough

+
+
+

This page will walk you through the process of creating a test case. We’ll create a very simple test for the Google search engine.

+
+
+
+
+

Test Procedure

+
+
+

We would like to open the Google search engine, enter some search query and afterwards submit the form. We hope to see some results being listed, otherwise the test will fail. Summarized, the testing process would look like this.

+
+
+
    +
  1. +

    Open google.com

    +
  2. +
  3. +

    Enter the string "Test" into the searchbox

    +
  4. +
  5. +

    Submit the form

    +
  6. +
  7. +

    Get the results and check if the result list is empty

    +
  8. +
+
+
+
+
+

Creating new packages

+
+
+

We will need two new packages, one for the new page classes, the other for our test classes.

+
+
+
+
+

Creating package for test classes

+
+
+

Open Eclipse, use the "Project Explorer" on the left to navigate to

+
+
+

mrchecker-app-under-test → src/test/java → com.example → selenium.tests → tests

+
+
+

Right click on "tests", click on "New" → New Package. We’ll name the new package "com.example.selenium.tests.googleSearch".

+
+
+
+image65 +
+
+
+
+
+

Creating package for page classes

+
+
+

Navigate to

+
+
+

mrchecker-app-under-test → src/main/java → com.example → selenium → pages

+
+
+

Right click on "pages", click on "New" → New Package. The new package will be called "com.example.selenium.pages.googleSearch".

+
+
+
+image66 +
+
+
+
+
+

Creating the test class

+
+
+

The test class will contain the entire testing-routine. At first, we’ll create a new class inside our newly created "googleSearch" package (under src/test/java) and call it "GoogleSearchTest".

+
+
+
+image67 +
+
+
+

As "GoogleSearchTest" is a test class, it has to extend the BaseTest class. You may have to import some required packages and afterwards include a few required methods.

+
+
+
+
public class GoogleSearchTest extends BaseTest {
+
+	@Override
+	public void setUp() {
+
+	}
+
+	@Override
+	public void tearDown() {
+
+	}
+}
+
+
+
+

Now, we’ll need a new Page object, which will represent the Google Search page. The page class will be named "GoogleSearchPage".

+
+
+
+
private GoogleSearchPage googleSearchPage;
+
+@Override
+public void setUp() {
+	googleSearchPage = new GoogleSearchPage();
+}
+
+
+
+
+
+

Creating the GoogleSearchPage class

+
+
+

We have created a new field for the GoogleSearchPage class and instantiated an object in the setUp() method. As this class doesn’t exist yet, we’ll have to create it inside the googleSearch page class package.

+
+
+
+image68 +
+
+
+

We extend the BasePage class with GoogleSearchPage, import all necessary packages and include all the required methods.

+
+
+
+
public class GoogleSearchPage extends BasePage {
+
+	@Override
+	public boolean isLoaded() {
+		return false;
+	}
+
+	@Override
+	public void load() {
+
+	}
+
+	@Override
+	public String pageTitle() {
+		return "";
+	}
+}
+
+
+
+

As this page class represents the Google homepage, we have to set up selectors for web elements required in our test case. In our example we have to create a selector for the search bar which we’ll interact with. The selector will be implemented as a field.

+
+
+
+
private static final By selectorGoogleSearchInput = By.css(#lst-ib);
+
+
+
+

The input field’s id #lst-ib was found by using the developer console in Google Chrome.

+
+
+

This selector can be used to create a WebElement object of said search bar. Therefore, we’ll create a new method and call it "enterGoogleSearchInput".

+
+
+
+
public GoogleResultPage enterGoogleSearchInput(String searchText) {
+	WebElement googleSearchInput = getDriver().findDynamicElement(selectorGoogleSearchInput);
+	googleSearchInput.sendKeys(searchText);
+	googleSearchInput.submit();
+
+	return new GoogleResultPage();
+}
+
+
+
+

As you can see, we return another page object that wasn’t yet created. This step is required, as the results that we would like to check are on another Google Page. This means we’ll have to create another page class, which will be shown later.

+
+
+

Finally, the empty methods inherited from the BasePage class have to be filled:

+
+
+
+
@Override
+public boolean isLoaded() {
+	if(getDriver().getTitle().equals(pageTitle())) {
+		return true;
+	}
+	return false;
+}
+
+@Override
+public void load() {
+	getDriver().get("http://google.com");
+}
+
+@Override
+public String pageTitle() {
+	return "Google";
+}
+
+
+
+

The method isLoaded() checks if the page was loaded by comparing the actual title with the expected title provided by the method pageTitle(). The load() method simply loads a given URL, in this case http://google.com.

+
+
+

The completion of these methods finalizes our GoogleSearchPage class. We still have to create the GoogleResultPage class mentioned before. This page will deal with the elements on the Google search result page.

+
+
+
+
+

Creating the GoogleResultPage class

+
+
+

By right-clicking on the "pages" package, we’ll navigate to "new" → "Class" to create a new class.

+
+
+
+image69 +
+
+
+

The GoogleResultPage class also has to extend BasePage and include all required methods. Next, a new selector for the result list will be created. By using the result list, we can finally check if the result count is bigger than zero and thus, if the search request was successful.

+
+
+
+
private static final By selectorResultList = By.cssSelector("#res");
+
+
+
+

We’ll use this selector inside a new getter-method, which will return all ListElements.

+
+
+
+
public ListElements getResultList() {
+	return getDriver().elementList(selectorResultList);
+}
+
+
+
+

This method will allow the testcase to simply get the result list and afterwards check if the list is empty or not.

+
+
+

Finally, we have to complete all inherited methods.

+
+
+
+
@Override
+public boolean isLoaded() {
+	getDriver().waitForPageLoaded();
+	if(getDriver().getCurrentUrl().contains("search")) {
+		return true;
+	}
+	return false;
+}
+
+@Override
+public void load() {
+	BFLogger.logError("Google result page was not loaded.");
+}
+
+@Override
+public String getTitle() {
+	return "";
+}
+
+
+
+

The method isLoaded() differs from the same method in GoogleSearchPage, because this site is being loaded as a result from a previous action. That’s why we’ll have to use the method getDriver().waitForPageLoaded() to be certain that the page was loaded completely. Afterwards we check if the current URL contains the term "search", as it only occurs on the result page. This way we can check if we’re on the right page.

+
+
+

Another result of this page being loaded by another object is that we don’t have to load any specific URL. We just add a BFLogger instance to print an error message if the page was not successfully loaded.

+
+
+

As we don’t use the getTitle() method we simply return an empty String.

+
+
+

Finally, all required page classes are complete and we can finalize the test class.

+
+
+
+
+

Finalizing the test class

+
+
+

At this point, our GoogleSearchTest class looks like this:

+
+
+
+
public class GoogleSearchTest {
+
+	private GoogleSearchPage googleSearchPage;
+
+
+	@Override
+	public void setUp() {
+		googleSearchPage = new GoogleSearchPage();
+	}
+
+	@Override
+	public void tearDown() {
+
+	}
+}
+
+
+
+

Next, we’ll create the test method, let’s call it shouldResultReturn().

+
+
+
+
@Test
+public void shouldResultReturn() {
+	GoogleResultPage googleResultPage = googleSearchPage.enterGoogleSearchInput("Test");
+	ListElements results = googleResultPage.getResultList();
+	assertTrue("Number of results equals 0", results.getSize() > 0);
+}
+
+
+
+

Code explanation: At first, we will run the enterGoogleSearchInput() method on the GoogleSearchPage with the parameter "Test" to search for this exact string on Google. As this method returns a GoogleResultPage object, we will store this in the local variable googleResultPage. Afterwards, we get the result list by utilizing the getter method that we created before. Finally, we create an assertion: We expect the list size to be bigger than zero, meaning that the google search query was successful as we received results. If this assertion is wrong, a message will be printed out, stating that the number of results equals zero.

+
+
+

We can run the test by right clicking on the test method → Run as → JUnit test.

+
+
+
+image70 +
+
+
+

After starting the test, you’ll notice a browser window opening, resizing to given dimensions, opening Google, entering the query "Test" and submitting the form. After completing the test, you’ll see the test results on the right side of Eclipse. Green color indicator means that the test was successful, red means the test failed.

+
+
+
+image71 +
+
+
+

This walkthrough should’ve provided you with basic understanding on how the framework can be used to create test cases.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-CSS-selectors.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-CSS-selectors.html new file mode 100644 index 00000000..a47668e0 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-CSS-selectors.html @@ -0,0 +1,343 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

A css selector is used to select elements from an HTML page.

+
+
+

Selection by element tag, class or id are the most common selectors.

+
+
+
+
<p class='myText' id='123'>
+
+
+
+

This text element (p) can be found by using any one of the following selectors:

+
+
+
+
The HTML element: "p". Note: in practical use this will be too generic, if a preceding text section is added, the selected element will change.
+The class attribute preceded by ".": ".myText"
+The id attribute preceded by "#": "#123"
+
+
+
+

Using other attributes

+
+
+

When a class or an id attribute is not sufficient to identify an element, other attributes can be used as well, by using "[attribute=value]": For example:

+
+
+
+
<a href='https://ns.nl/example.html'>
+
+
+
+

This can be selected by using the entire value: "a[href='https://ns.nl/example.html'\]". For selecting links starting with, containing, ending with see the list below.

+
+
+
+
+

Using sub-elements

+
+
+

The css selectors can be stacked, by appending them:

+
+
+
+
<div id='1'><a href='ns.nl'></div>
+<div id='2'><a href='nsinternational.nl'></div>
+
+
+
+

In the example above, the link element to nsinternational can be obtained with: "#2 a".

+
+
+
+
+

When possible avoid

+
+
+
    +
  • +

    Using paths of commonly used HTML elements within the containers (HTML: div). This will cause failures when a container is added, a common occurrence during development, e.g. "div div p". Use class or id instead, if those are not available, request them to be added in the production code.

    +
  • +
  • +

    Magic order numbers. It is possible to get the second text element in its parent container by using the selector "p:nth-child(2)". If the items are representing different items, ask the developer to add specific attributes. It is also possible to request all items, with a selector similar to ".myList li", and iterate through them later.

    +
  • +
+
+
+
+
+

List

+
+
+

A good list with CSS Selectors can be found at W3Schools:
+https://www.w3schools.com/cssref/css_selectors.asp

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Construction-of-Framework-Page-Class.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Construction-of-Framework-Page-Class.html new file mode 100644 index 00000000..834b4ada --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Construction-of-Framework-Page-Class.html @@ -0,0 +1,586 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Page Class

+
+
+

Page Object Models allow for the representation of a webpage as a Java Class. The class contains all required web elements like buttons, textfields, labels, etc. When initializing a new project, create a new package to store the Page Object Models in.

+
+
+
+
+

Initialization

+
+
+

Source folder: allure-app-under-test/src/main/java

+
+
+

Name: com.example.selenium.pages.YOUR_PROJECT

+
+
+

Classes being created inside of this new package have to extend the BasePage class. As a result, a few abstract methods from BasePage have to be implemented.

+
+
+
+
public class DemoPage extends BasePage {
+
+	@Override
+	public boolean isLoaded() {
+
+	}
+
+	@Override
+	public void load() {
+
+	}
+
+	@Override
+	public String pageTitle() {
+
+	}
+}
+
+
+
+

The example above demonstrates a minimum valid Page Object class with all required methods included.

+
+
+
+
+

BasePage method: isLoaded

+
+
+

The inherited method isLoaded() can be used to check if the current Page Object Model has been loaded correctly. There are multiple ways to verify a correctly loaded page. One example would be to compare the actual page title with the expected page title.

+
+
+
+
public boolean isLoaded() {
+	if(getDriver().getTitle().equals("EXPECTED_TITLE")) {
+		return true;
+	}
+	return false;
+}
+
+
+
+
+
+

BasePage method: load

+
+
+

The method load() can be used to tell the webdriver to load a specific page.

+
+
+
+
public void load() {
+	getDriver().get("http://SOME_PAGE");
+}
+
+
+
+
+
+

BasePage method: pageTitle

+
+
+

The pageTitle() method returns a String containing the page title.

+
+
+
+
+

Creating a selector variable

+
+
+

To initialize web elements, a large variety of selectors can be used.

+
+
+

We recommend creating a private and constant field for every web element you’d like to represent in Java. Use the guide above to find the preferred selector and place it in the code below at "WEB_ELEMENT_SELECTOR".

+
+
+
+
private static final By someWebElementSelector = By.CSS("WEB_ELEMENT_SELECTOR");
+
+
+
+

As soon as you create the selector above, you can make use of it to initialize a WebElement object.

+
+
+
+
WebElement someWebElement = getDriver().findDynamicElement(someWebElementSelector);
+
+
+
+

Note: The examples displayed in the cssSelector.docx file use the Selenium method driver.findElement() to find elements. However, using this framework we recommend findDynamicElement() or findQuietlyElement().findDynamicElement() allows waiting for dynamic elements, for example buttons that pop up.

+
+
+
+
+

Creating a page method

+
+
+

To interact with the page object, we recommend creating methods for each action.

+
+
+
+
public void enterGoogleSearchInput(String query) {
+	...
+}
+
+
+
+

Creating a method like the one above allows the test case to run something like googleSearchPage.enterGoogleSearchInput("Hello") to interact with the page object.

+
+
+
+
+

Naming Conventions

+
+
+

For code uniformity and readability, we provide a few method naming conventions.

+
+
+

|== =

+
+
+

|Element +|Action +|Name (example)

+
+
+

|Form: Input text +|enter +|enterUsernameInput()

+
+
+

| +|is (label) +|isUsernameInputPresent()

+
+
+

| +|is (value) +|isUsernameEmpty()

+
+
+

| +|get +|getUsernameValue()

+
+
+

|Form: Label +|get +|getCashValue()

+
+
+

| +|is (value) +|isCashValueEmpty()

+
+
+

| +|is (label) +|isCashLabelPresent()

+
+
+

|Form: Submit Button +|submit +|submitLoginForm()

+
+
+

| +|is +|isLoginFormPresent()

+
+
+

|Page: Button +|click +|clickInfoButton()

+
+
+

| +|is +|isInfoButtonpresent()

+
+
+

|Checkbox +|set +|setRememberMeCheckbox()

+
+
+

| +|unset +|unsetRememberMeCheckbox()

+
+
+

| +|is (present) +|isRememberMeCheckboxPresent()

+
+
+

| +|is (value) +|isRememberMeCheckboxSet()

+
+
+

|Radio +|set +|setMaleRadioValue("Woman")

+
+
+

| +|is (present) +|isMaleRadioPresent()

+
+
+

| +|is (visible) +|isMaleRadioVisible()

+
+
+

| +|get +|getSelectedMaleValue()

+
+
+

|Elements (Tabs, Cards, Account, etc.) +|click +|clickPositionTab() / clickMyBilanceCard()

+
+
+

| +|is +|isMyBilanceCardPresent()

+
+
+

|Dropdown List +|select +|selectAccountTypeValue(typeName)

+
+
+

| +|unselect +|unselectAccountTypeValue(typeName)

+
+
+

| +|multiple select +|selectAccountTypesValues(List typeNames)

+
+
+

| +|is (list) +|isAccountTypeDropdownListPresent()

+
+
+

| +|is (element present) +|isAccountTypeElementPresent(typeName)

+
+
+

| +|is (element selected) +|isAccountTypeSelected(typeName)

+
+
+

|Link +|click +|clickMoreLink()

+
+
+

| +|is +|isMoreLinkPresent()

+
+
+

|Combobox +|select +|selectSortCombobox()

+
+
+

| +|is (present) +|isSortComboboxPresent(name)

+
+
+

| +|is (contain) +|selectSortComboboxContain(name)

+
+
+

|Element Attribute +|get +|getPositionTabCss()

+
+
+

| +|get +|getMoreLinkHref() / getRememberMeCheckboxName()

+
+
+

|== =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-List-of-web-elements.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-List-of-web-elements.html new file mode 100644 index 00000000..3712e2c2 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-List-of-web-elements.html @@ -0,0 +1,369 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Basic Web elements

+
+
+

This page will provide an overview of basic web elements.

+
+
+
+image57 +
+
+
+
+image58 +
+
+
+

|== = +|Name +|Method to use element

+
+
+

|Form: Input Text +|elementInputText()

+
+
+

|Form: Label +|elementLabel()

+
+
+

|Form: Submit Button +|elementButton()

+
+
+

|Page: Button +|elementButton()

+
+
+

|Checkbox +|elementCheckbox()

+
+
+

|Radio +|elementRadioButton()

+
+
+

|Elements (Tabs, Cards, Account, etc.) +|elementTab()

+
+
+

|Dropdown List +|elementDropdownList()

+
+
+

|Link +|-

+
+
+

|Combobox +|elementList() +|== =

+
+
+

Comparision how picking value from checkbox can be done:

+
+
+
    +
  • +

    by classic Selenium atomic actions

    +
  • +
  • +

    by our enhanced Selenium wrapper

    +
  • +
+
+
+

Classic Selenium atomic actions

+
+
+
+
List<WebElement> checkboxesList = getDriver()
+                .findElements(selectorHobby);
+WebElement currentElement;
+for (int i = 0; i < checkboxesList.size(); i++) {
+    currentElement = checkboxesList.get(i);
+    if (currentElement.getAttribute("value")
+                    .equals(hobby.toString()) && currentElement.isSelected() != true)
+                        {
+        currentElement.click();
+            }
+}
+
+
+
+

Enhanced Selenium in E2E test framework

+
+
+
+
getDriver().elementCheckbox(selectorHobby)
+				.setCheckBoxByValue(hobby.toString());
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Method-action-naming-convention.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Method-action-naming-convention.html new file mode 100644 index 00000000..ff53a4bc --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Method-action-naming-convention.html @@ -0,0 +1,520 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Description

+
+
+

The main idea is to write a specific function for a given element. Create names using template: actionElementNameElementType(), and for a verifying function: isElementNameElementTypePresent() or isElementNameElementTypeAction() To get a value of another element attribute try using this schema: getElementNameElementTypeAttributeName().

+
+
+

We have a few basic web elements. There are items for a form input, submit button, label. On a mage you can find button, checkbox, radio, dropdown list, tabs etc. On this element you can make an action: click, enter, submit, type, select, set, get, unset, expand, mouser over. You may also check a present element on a page.

+
+
+

Important: Visible and Present are not the same. First one means that a user sees this element. Present action means that element is in a HTML code but is not visible for a user (a css display attribute is set to none).

+
+
+
+
+

Naming convention to create functions in page package

+
+
+

Using this keyword you may create compilation of names. This method should be public. Every other method created onside this method should be private.

+
+
+

|== =

+
+
+

|Element +|Action +|Name (example)

+
+
+

|Form: Input text +|enter +|enterUsernameInput()

+
+
+

| +|is (label) +|isUsernameInputPresent()

+
+
+

| +|is (value) +|isUsernameEmpty()

+
+
+

| +|get +|getUsernameValue()

+
+
+

|Form: Label +|get +|getCashValue()

+
+
+

| +|is (value) +|isCashValueEmpty()

+
+
+

| +|is (label) +|isCashLabelPresent()

+
+
+

|Form: Submit Button +|submit +|submitLoginForm()

+
+
+

| +|is +|isLoginFormPresent()

+
+
+

|Page: Button +|click +|clickInfoButton()

+
+
+

| +|is +|isInfoButtonpresent()

+
+
+

|Checkbox +|set +|setRememberMeCheckbox()

+
+
+

| +|unset +|unsetRememberMeCheckbox()

+
+
+

| +|is (present) +|isRememberMeCheckboxPresent()

+
+
+

| +|is (value) +|isRememberMeCheckboxSet()

+
+
+

|Radio +|set +|setMaleRadioValue("Woman")

+
+
+

| +|is (present) +|isMaleRadioPresent()

+
+
+

| +|is (visible) +|isMaleRadioVisible()

+
+
+

| +|get +|getSelectedMaleValue()

+
+
+

|Elements (Tabs, Cards, Account, etc.) +|click +|clickPositionTab() / clickMyBilanceCard()

+
+
+

| +|is +|isMyBilanceCardPresent()

+
+
+

|Dropdown List +|select +|selectAccountTypeValue(typeName)

+
+
+

| +|unselect +|unselectAccountTypeValue(typeName)

+
+
+

| +|multiple select +|selectAccountTypesValues(List typeNames)

+
+
+

| +|is (list) +|isAccountTypeDropdownListPresent()

+
+
+

| +|is (element present) +|isAccountTypeElementPresent(typeName)

+
+
+

| +|is (element selected) +|isAccountTypeSelected(typeName)

+
+
+

|Link +|click +|clickMoreLink()

+
+
+

| +|is +|isMoreLinkPresent()

+
+
+

|Combobox +|select +|selectSortCombobox()

+
+
+

| +|is (present) +|isSortComboboxPresent(name)

+
+
+

| +|is (contain) +|selectSortComboboxContain(name)

+
+
+

|Element Attribute +|get +|getPositionTabCss()

+
+
+

| +|get +|getMoreLinkHref() / getRememberMeCheckboxName()

+
+
+

|== =

+
+
+
+
+

Naming convention used in the code

+
+
+

Identifiers which are used in the code should be named as follows:

+
+
+
    +
  • +

    packages: com.<clientname>.<projectname>tests

    +
  • +
  • +

    classes: UpperCamelCase

    +
  • +
  • +

    interfaces: UpperCamelCase

    +
  • +
  • +

    variables: lowerCamelCase

    +
  • +
  • +

    constants and final variables: UPPERCASE_SEPARATED_BY_UNDERSCORES

    +
    +
    +
    Initialisms/Abbreviations of three or more letters are CamelCase instead of UPPERCASE.
    +Example:
    +*parseDbmXmlFromIPAddress* instead of *parseDBMXMLFromIPAddress*
    +
    +
    +
  • +
+
+
+
+
+

Name convention to create selector naming in page package

+
+
+
    +
  • +

    selector variable must be By type

    +
  • +
  • +

    selector variable must be private final static

    +
  • +
  • +

    selector naming selector<UserFieldDescription>

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-on-different-browsers.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-on-different-browsers.html new file mode 100644 index 00000000..22d6a75b --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-on-different-browsers.html @@ -0,0 +1,304 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Run on different browsers

+
+
+
+image59 +
+
+
+

To execute each test with a chosen installed browser, specific arguments are required in Run configuration.

+
+
+
+image60 +
+
+
+
+image61 +
+
+
+

It is necessary to enter -Dbrowser= with browser parameter name as an argument (in 'Arguments' tab):

+
+
+

firefox +ie +phantomjs +chrome +chromeheadless +For example: -Dbrowser=ie

+
+
+
+
_-ea_ should be entered as an argument to restore default settings.
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-with-different-browser-options.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-with-different-browser-options.html new file mode 100644 index 00000000..9b8c2810 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-with-different-browser-options.html @@ -0,0 +1,311 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Browser options

+
+
+

To run a browser with specific options during runtime, please use

+
+
+

-DbrowserOptions="< options >"

+
+
+
+
> mvn test -DbrowserOptions="param1"
+> mvn test -DbrowserOptions="param1=value1"
+
+
+
+

examples:

+
+
+
    +
  • +

    One parameter -DbrowserOptions="headless"

    +
  • +
  • +

    One parameter -DbrowserOptions="--incognito"

    +
  • +
  • +

    Many parameters -DbrowserOptions="headless;param1=value1;testEquals=FirstEquals=SecondEquals;--testMe"

    +
  • +
+
+
+

List of options/capabilites supported by:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-with-full-range-of-resolution.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-with-full-range-of-resolution.html new file mode 100644 index 00000000..8ec9fc3f --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Run-with-full-range-of-resolution.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Run with full range of resolution

+
+
+
+image62 +
+
+
+

In order to execute tests in different browser resolutions, it is required to provide these resolutions as a test parameter.

+
+
+

Test example with resolutions included may be found in ResolutionTest test class

+
+
+
+image63 +
+
+
+

Example of resolution notation is available in ResolutionEnum class

+
+
+
+image64 +
+
+
+

Test with given resolution parameters will be launched as many times as the number of resolutions provided.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-Best-Practices.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-Best-Practices.html new file mode 100644 index 00000000..ae24fbef --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-Best-Practices.html @@ -0,0 +1,311 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Selenium Best Practices

+
+
+

The following table displays a few best practices that should be taken into consideration when developing Selenium test cases.

+
+
+

|== =

+
+
+

|Best Practices +|Description

+
+
+

|"Keep it Simple" +|Do not force use every Selenium feature available - Plan before creating the actual test cases

+
+
+

|Using Cucumber +|Cucumber can be used to create initial testcases for further decision making

+
+
+

|Supporting multiple browsers +|Test on multiple browsers (in parallel, if applicable) if the application is expected to support multiple environments

+
+
+

|Test reporting +|Make use of test reporting modules like Junit which is included in the framework

+
+
+

|Maintainability +|Always be aware of the maintainability of tests - You should always be able to adapt to changes

+
+
+

|Testing types +|Which tests should be created? Rule of thumb: 70% Unit test cases, 20% Integration test cases and 10% UI Test cases

+
+
+

|Test data +|Consider before actually developing tests and choosing tools: Where to get test data from, how to reset test data

+
+
+

|== =

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-UFT-Comparison.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-UFT-Comparison.html new file mode 100644 index 00000000..89192458 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-Selenium-UFT-Comparison.html @@ -0,0 +1,424 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Selenium UFT Comparison

+
+
+

|== =

+
+
+

|Subject +|HP UFT +|HP LeanFT +|Selenium +|Selenium IDE

+
+
+

|Language +|VBScript +|Same as Selenium +|Supports several languages. +Java +|Javascript

+
+
+

|Learning curve +|Based on VBScript which is relatively easy to learn +|Less intuitive, more coding knowledge necessary +|Less intuitive, more coding skills necessary +|Record/playback possible. Generated code difficult to maintain

+
+
+

|Project type +|Traditional +|Agile +|Agile +|Agile

+
+
+

|User oriented +|More Tester +|More Developer +|More Developer +|More Tester

+
+
+

|Object recognition +|Test object identification and storage in object repository +|Same as UFT +|With Firebug +|Same as SE

+
+
+

|Customizations +|Only the available standard. No custimization +|Same as UFT +|Lots of customizations possible +|Fewer then SE

+
+
+

|Framework +|Needed. +Exists in ATaaS +| +|Needed. +Integration with Fitnesse, Cucumber, Gauche +|No Framework. Limited capabilities of the tool.

+
+
+

|Operating System support +|Runs on Windows +|Runs on Windows +|Multiple OS support. With Grid: testing on multiple devices at same time +|Plugin for Firefox

+
+
+

|Application coverage +|Many +|Many +|Web only +|Web only

+
+
+

|Multiple browsers +|In UFT 12.5 available +|In 12.5 available +|Multiple tests in multiple browser windows at once and faster support for new browser versions +|Multiple tests in multiple browser windows at once and faster support for new browser versions

+
+
+

|System Load +|High system load (RAM & CPU usage) +|Lower load than HP UFT? +|Lower load than HP UFT +|Lower load than HP UFT

+
+
+

|ALM integration +|With HP ALM – full integration +| +|Jira, Jenkins +Not with ALM tool +|Same as SE

+
+
+

|Integration with other tools +|A lot can be built, but many are already covered. +|More than UFT. +|Freeware and can be integrated with different open source tools +|Freeware and can be integrated with different open source tools

+
+
+

|Addins +|Add-ins necessary to access all capabilities of the tool – license related +|Same as UFT +|See integration with other tools +|See integration with other tools

+
+
+

|Reporting +|Complete, link to ALM +|Same as UFT +|No native mechanism for generating reports, but multiple plugins available for reporting +|No native mechanism for generating reports, but multiple plugins available for reporting

+
+
+

|Support +|HP full support +|Same as UFT +|Limited support as it is open source +|Limited support as it is open source

+
+
+

|License costs +|About 17K – Capgemini price 5K. +Included in the S2 service charge +|Same price as HP UFT +|Free +|Free +limited functionality (no iterations / conditional statements)

+
+
+

|iVAL Service +|ATaaS +|Not in a S2 service +|Not in a S2 service +|Not in a S2 service

+
+
+

|== =

+
+
+

Bold for key differentiators.

+
+
+

Projects also choose an available resource and the knowledge of that resource.

+
+
+

Both: Framework determines the quality of automation. Needs to be set up by someone with experience with the tool

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-Page-Object-Model-Pattern.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-Page-Object-Model-Pattern.html new file mode 100644 index 00000000..0b2b57cd --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-Page-Object-Model-Pattern.html @@ -0,0 +1,283 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

What is Page Object Model?

+
+
+
+image55 +
+
+
+

Creating Selenium test cases can result in an unmaintainable project. One of the reasons is that too much duplicated code is used. Duplicated code could result from duplicated functionality leading to duplicated usage of locators. The main disadvantage of duplicated code is that the project is less maintainable. If a locator changes, you have to walk through the whole test code to adjust locators where necessary. By using the page object model we can make non-brittle test code and reduce or eliminate duplicate test code. In addition, it improves the readability and allows us to create interactive documentation. Last but not least, we can create tests with less keystroke. An implementation of the page object model can be achieved by separating the abstraction of the test object and the test scripts.

+
+
+
+image56 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-Selenium.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-Selenium.html new file mode 100644 index 00000000..6b3e7ba3 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-Selenium.html @@ -0,0 +1,346 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

What is Selenium

+
+
+

Selenium is a framework for testing browser applications. The test automation supports:

+
+
+
    +
  • +

    Frequent regression testing

    +
  • +
  • +

    Repeating test case executions

    +
  • +
  • +

    Documentation of test cases

    +
  • +
  • +

    Finding defects

    +
  • +
  • +

    Multiple Browsers

    +
  • +
+
+
+

The Selenium testing framework consists of multiple tools:

+
+
+
    +
  • +

    Selenium IDE

    +
    +

    The Selenium Integrated Development Environment is a prototyping tool for building test scripts. It is a Firefox Plugin and provides an easy-to-use interface for developing test cases. Additionally, Selenium IDE contains a recording feature, that allows the user to record user inputs that can be automatically re-executed in future.

    +
    +
  • +
  • +

    Selenium 1

    +
    +

    Selenium 1, also known as Selenium RC, commands a Selenium Server to launch and kill browsers, interpreting the Selenese commands passed from the test program. The Server acts as an HTTP proxy. This tool is deprecated.

    +
    +
  • +
  • +

    Selenium 2

    +
    +

    Selenium 2, also known as Selenium WebDriver, is designed to supply a well-designed, object-oriented API that provides improved support for modern advanced web-app testing problems.

    +
    +
  • +
  • +

    Selenium 3.0

    +
    +

    The major change in Selenium 3.0 is removing the original Selenium Core implementation and replacing it with one backed by WebDriver. There is now a W3C specification for browser automation, based on the Open Source WebDriver.

    +
    +
  • +
  • +

    Selenium Grid

    +
    +

    Selenium Grid allows the scaling of Selenium RC test cases, that must be run in multiple and potentially variable environments. The tests can be run in parallel on different remote machines.

    +
    +
  • +
+
+
+
+
+

Selenium on the Production Line

+
+
+

More information on Selenium on the Production Line can be found here.

+
+
+

tl;dr

+
+
+

The Production Line has containers running Chrome and Firefox Selenium Nodes. The communication with these nodes is accomplished using Selenium Grid.

+
+
+

Having issues using Selenium on the Production Line? Check the Production Line issue list, maybe it’s a known issue that can be worked around.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-WebDriver.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-WebDriver.html new file mode 100644 index 00000000..d8158805 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module-What-is-WebDriver.html @@ -0,0 +1,297 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

What is WebDriver

+
+
+

On the one hand, it is a very convenient API for a programmer that allows for interaction with the browser, on the other hand it is a driver concept that enables this direct communication.

+
+
+
+image53 +
+
+
+
+
+

== How does it work?

+
+
+
+image54 +
+
+
+

A tester, through their test script, can command WebDriver to perform certain actions on the WAUT on a certain browser. The way the user can command WebDriver to perform something is by using the client libraries or language bindings provided by WebDriver.

+
+
+

By using the language-binding client libraries, a tester can invoke browser-specific implementations of WebDriver, such as Firefox Driver, IE Driver, Opera Driver, and so on, to interact with the WAUT of the respective browser. These browser-specific implementations of WebDriver will work with the browser natively and execute commands from outside the browser to simulate exactly what the application user does.

+
+
+

After execution, WebDriver will send the test result back to the test script for developer’s analysis.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module.html new file mode 100644 index 00000000..a5dd31f9 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Selenium-Test-Module.html @@ -0,0 +1,392 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Selenium Test Module

+
+ +
+
+
+

What is MrChecker E2E Selenium Test Module

+
+
+
+image2 +
+
+
+
+
+

Selenium Structure

+ +
+
+

Framework Features

+
+
+ +
+
+
+
+

How to start?

+ +
+
+

Selenium Best Practices

+
+ +
+
+
+

Selenium UFT Comparison

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Standalone-Test-Module.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Standalone-Test-Module.html new file mode 100644 index 00000000..68595117 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Standalone-Test-Module.html @@ -0,0 +1,279 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Standalone Test Module

+
+
+

The inspiring content will be here soon.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-How-to-make-virtual-asset.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-How-to-make-virtual-asset.html new file mode 100644 index 00000000..cd6c7e5a --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-How-to-make-virtual-asset.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to make a virtual asset

+
+
+

This can be done in four ways:

+
+
+
    +
  • +

    Record all traffic (Mappings and Responses) that comes through proxy - by UI

    +
  • +
  • +

    Record all traffic (Mappings and Responses) that comes through proxy - by Code

    +
  • +
  • +

    Create Mappings and Responses manually by text files

    +
  • +
  • +

    Create Mappings and Responses manually by code

    +
  • +
+
+
+
+
+

Record all traffic (Mappings and Responses) that comes through proxy - UI

+
+
+

Full article here Wiremock record-playback.

+
+
+

First, start an instance of WireMock running standalone. Once that’s running, visit the recorder UI page at http://localhost:8080/__admin/recorder (assuming you started WireMock on the default port of 8080).

+
+
+
+image77 +
+
+
+

Enter the URL you wish to record from in the target URL field and click the Record button. You can use http://example.mocklab.io to try it out.

+
+
+

Now you need to make a request through WireMock to the target API so that it can be recorded. If you’re using the example URL, you can generate a request using curl:

+
+
+
+
$ curl http://localhost:8080/recordables/123
+
+
+
+

Now click stop. You should see a message indicating that one stub was captured.

+
+
+

You should also see that a file has been created called something like recordables_123-40a93c4a-d378-4e07-8321-6158d5dbcb29.json under the mappings directory created when WireMock started up, and that a new mapping has appeared at http://localhost:8080/__admin/mappings.

+
+
+

Requesting the same URL again (possibly disabling your wifi first if you want a firm proof) will now serve the recorded result:

+
+
+
+
$ curl http://localhost:8080/recordables/123
+
+{
+"message": "Congratulations on your first recording!"
+}
+
+
+
+
+
+

Record all traffic (Mappings and Responses) that comes through proxy - by Code

+
+
+

An example of how such a record can be achieved

+
+
+
+
@Test
+public void startRecording() {
+
+    SnapshotRecordResult recordedMappings;
+
+    DriverManager.getDriverVirtualService()
+            .start();
+    DriverManager.getDriverVirtualService()
+            .startRecording("http://example.mocklab.io");
+    recordedMappings = DriverManager.getDriverVirtualService()
+            .stopRecording();
+
+    BFLogger.logDebug("Recorded messages: " + recordedMappings.toString());
+
+}
+
+
+
+
+
+

Create Mappings and Responses manually by text files

+
+
+

EMPTY

+
+
+
+
+

Create Mappings and Responses manually by code

+
+
+

Link to full file structure: REST_FarenheitToCelsiusMethod_Test.java

+
+
+
+
+

Start up Virtual Server

+
+
+
+
public void startVirtualServer() {
+
+    // Start Virtual Server
+    WireMockServer driverVirtualService = DriverManager.getDriverVirtualService();
+
+    // Get Virtual Server running http and https ports
+    int httpPort = driverVirtualService.port();
+    int httpsPort = driverVirtualService.httpsPort();
+
+    // Print is Virtual server running
+    BFLogger.logDebug("Is Virtual server running: " + driverVirtualService.isRunning());
+
+    String baseURI = "http://localhost";
+    endpointBaseUri = baseURI + ":" + httpPort;
+}
+
+
+
+
+
+

Plug in a virtual asset

+
+
+

REST_FarenheitToCelsiusMethod_Test.java

+
+
+
+
public void activateVirtualAsset() {
+    /*
+    * ----------
+    * Mock response. Map request with virtual asset from file
+    * -----------
+    */
+    BFLogger.logInfo("#1 Create Stub content message");
+    BFLogger.logInfo("#2 Add resource to virtual server");
+    String restResourceUrl = "/some/thing";
+    String restResponseBody = "{ \"FahrenheitToCelsiusResponse\":{\"FahrenheitToCelsiusResult\":37.7777777777778}}";
+
+    new StubREST_Builder //For active virtual server ...
+            .StubBuilder(restResourceUrl) //Activate mapping, for this Url AND
+            .setResponse(restResponseBody) //Send this response  AND
+            .setStatusCode(200) // With status code 200 FINALLY
+            .build(); //Set and save mapping.
+
+}
+
+
+
+

Link to full file structure: StubREST_Builder.java

+
+
+

Source link to How to create Stub.

+
+
+

StubREST_Builder.java

+
+
+
+
public class StubREST_Builder {
+
+    // required parameters
+    private String endpointURI;
+
+    // optional parameters
+    private int statusCode;
+
+    public String getEndpointURI() {
+        return endpointURI;
+    }
+
+    public int getStatusCode() {
+        return statusCode;
+    }
+
+    private StubREST_Builder(StubBuilder builder) {
+        this.endpointURI = builder.endpointURI;
+        this.statusCode = builder.statusCode;
+    }
+
+    // Builder Class
+    public static class StubBuilder {
+
+        // required parameters
+        private String endpointURI;
+
+        // optional parameters
+        private int     statusCode  = 200;
+        private String  response    = "{ \"message\": \"Hello\" }";
+
+        public StubBuilder(String endpointURI) {
+            this.endpointURI = endpointURI;
+        }
+
+        public StubBuilder setStatusCode(int statusCode) {
+            this.statusCode = statusCode;
+            return this;
+        }
+
+        public StubBuilder setResponse(String response) {
+            this.response = response;
+            return this;
+        }
+
+        public StubREST_Builder build() {
+
+            // GET
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            get(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // POST
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            post(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // PUT
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            put(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // DELETE
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            delete(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // CATCH any other requests
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            any(anyUrl())
+                                    .atPriority(10)
+                                    .willReturn(aResponse()
+                                            .withStatus(404)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody("{\"status\":\"Error\",\"message\":\"Endpoint not found\"}")
+                                            .withTransformers("body-transformer")));
+
+            return new StubREST_Builder(this);
+        }
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-Smoke-Tests-virtualization.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-Smoke-Tests-virtualization.html new file mode 100644 index 00000000..f4444ce3 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-Smoke-Tests-virtualization.html @@ -0,0 +1,1017 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Start a virtual server

+
+
+

The following picture presents the process of executing Smoke Tests in a virtualized environment:

+
+
+
+image78 +
+
+
+
+
+

Install docker service

+
+
+

If docker is not already installed on machine (this should be checked during C2C creation), install docker, docker-compose, apache2-utils, openssl (You can use script to install docker & docker-compose OR refer to this post and add Alias for this machine <C2C_Alias_Name>):

+
+
+
    +
  • +

    run the script

    +
  • +
  • +

    sudo apt-get install -y apache2-utils

    +
  • +
+
+
+
+
+

Build a docker image

+
+
+

Dockerfile:

+
+
+
+
FROM docker.xxx.com/ubuntu:16.04
+MAINTAINER Maintainer Name "maintainer@email.address"
+LABEL name=ubuntu_java \
+           version=v1-8.0 \
+           base="ubuntu:16.04" \
+           build_date="03-22-2018" \
+           java="1.8.0_162" \
+           wiremock="2.14.0" \
+           description="Docker to use with Ubuntu, JAVA and WIREMOCK "
+
+##Update and install the applications needed
+COPY 80proxy /etc/apt/apt.conf.d/80proxy
+RUN apt-get update
+RUN apt-get install -y \
+            wget \
+            libfontconfig \
+            unzip \
+            zip
+            ksh \
+            curl \
+            git
+
+COPY wgetrc /etc/wgetrc
+
+#Env parameters
+
+### JAVA PART ###
+#TO UPDATE:please verify url link to JDK http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html
+##Download and install JAVA JDK8
+RUN mkdir /opt/jdk
+RUN wget -qq --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u162-b12/0da788060d494f509bf8624735fa2f1/jdk-8u162-linux-x64.tar.gz && tar -zxf jdk-8u162-linux-x64.tar.gz -C /opt/jdk && rm jdk-8u162-linux-x64.tar.gz && update-alternatives --install /usr/bin/javac javac /opt/jdk/jdk1.8.0_162/bin/javac 100 && java -version && chmod 755 -R /opt/jdk/jdk1.8.0_162/
+RUN java -version
+
+##Add user
+RUN useradd -u 29001 -g 100 srvpwiredev
+
+##Add app
+RUN mkdir -p -m 777 /app
+COPY wiremock-standalone-2.14.0.jar /app/wiremock-standalone-2.14.0.jar
+
+##Expose port
+EXPOSE 8080
+
+##Set workdir
+WORKDIR /App
+
+##Run app
+CDM java -jar /app/wiremock-standalone-2.14.0.jar
+
+
+
+

Execute the following steps with a specified version to build a docker image and push it to the repository :

+
+
+
+
## Build image
+sudo docker build -t docker.xxx.com/app/build/wiremock:v2.14.0.
+
+## Push image
+sudo docker login docker.xxx.com
+sudo docker push docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+
+
+

Run docker image

+
+
+

To run a docker image, execute the following command:

+
+
+
+
sudo docker run -td -p 8080:8080 -v /home/wiremock/repo/app/docker/QA/mappings:/app/mappings -v /home/wiremock/repo/app/docker/QA/__files:/app/__files --restart always docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+

Where:

+
+
+

-p - publish a container’s port to the host

+
+
+

-v - bind mount a volume. WireMock server creates two directories under the current one: mappings and __files. It is necessary to mount directories with already created mappings and responses to make it work.

+
+
+

-restart always - restart policy to apply when a container exists

+
+
+

All of the parameters are described in: official docker documentation

+
+
+
+
+

Map requests with virtual assets

+
+
+

What is WireMock?

+
+
+

WireMock is an HTTP mock server. At its core it is a web server that can be primed to serve canned responses to particular requests (stubing) and that captures incoming requests so that they can be checked later (verification). It also has an assortment of other useful features including record/playback of interactions with other APIs, injection of faults and delays, simulation of stateful behaviour.

+
+
+

Full documentation can be found under the following link: WireMock

+
+
+
+
+

Record / create virtual assets mappings

+
+
+

Record

+
+
+

WireMock can create stub mappings from requests it has received. Combined with its proxying feature, this allows you to "record" stub mappings from interaction with existing APIs.

+
+
+

Record and playback (Legacy): documentation

+
+
+
+
java -jar wiremock-standalone-2.16.0.jar --proxy-all="http://search.twitter.com" --record-mappings --verbose
+
+
+
+

Once it’s started and request is sent to it, it will be redirected to "http://search.twitter.com" and traffic (response) is saved to files in mappings and __files directories for further use.

+
+
+

Record and playback (New): documentation

+
+
+
+
+

Enable mappings in a virtual server

+
+
+

When the WireMock server starts, it creates two directories under the current one: mappings and __files. To create a stub, it is necessary to drop a file with a .json extension under mappings.

+
+
+

Run docker with mounted volumes

+
+
+

Mappings are in a repository. It is necessary to mount directories with already created mappings and responses to make it work:

+
+
+
+
sudo docker run -td -p 8080:8080 -v /home/wiremock/repo/app/docker/QA/mappings:/app/mappings -v /home/wiremock/repo/app/docker/QA/__files:/app/__files --restart always docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+

The description of how to build and run docker is available under: Docker run command description

+
+
+

Recorded mappings

+
+
+

Recorded mappings are kept in the project repository.

+
+
+
+
+

Create a user and map them to docker user

+
+
+

To enable the connection from Jenkins to Virtual Server (C2C), it is necessary to create a user and map them to docker group user. It can be done using the following command:

+
+
+
+
adduser -G docker -m wiremock
+
+
+
+

To set the password for a wiremock user:

+
+
+
+
passwd wiremock
+
+
+
+
+
+

Create SSH private and public keys for a wiremock user

+
+
+

SSH keys serve as a means of identifying yourself to an SSH server using public-key cryptography and challenge-response authentication. One immediate advantage this method has over traditional password is that you can be authenticated by the server without ever having to send your password over the network.

+
+
+

To create an SSH key, log in as wiremock (previously created user).

+
+
+
+
su wiremock
+
+
+
+

The .ssh directory is not by default created below user home directory. Therefore, it is necessary to create it:

+
+
+
+
mkdir ~/.ssh
+
+
+
+

Now we can proceed with creating an RSA key using ssh-keygen (a tool for creating new authentication key pairs for SSH):

+
+
+
+
ssh-keygen -t rsa
+
+
+
+

A key should be created under /.ssh/id_rsa +Appending the public keys to authorized_keys:

+
+
+
+
wiremock@vc2crptXXXXXXXn:~/ssh$ cat id_rsa.pub >> authorized_keys
+
+
+
+
+
+

Install an SSH key in Jenkins

+
+
+

To add an SSH key to Jenkins, go to credentials in your job location. Choose the folder within credentials, then 'global credentials', 'Add credentials'. Fill in the fields. Finally, the entry should be created.

+
+
+
+
+

Build a Jenkins Groovy script

+
+
+

The description of how to use SSH Agent plugin in Jenkins pipeline can be found under: https://www.karthikeyan.tech/2017/09/ssh-agent-blue-ocean-via-jenkins.html

+
+
+

Example of use:

+
+
+
+
sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+     sh """
+         ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "docker container restart ${env.WIREMOCK_CONTAINER_NAME}"
+     """
+}
+
+
+
+

Where: env.WIREMOCK_CREDENTIALS is a credential id of previously created wiremock credentials. Now that it is present, we can execute commands on a remote machine, where in ssh command: +env.WIREMOCK_USERNAME - user name of user connected with configured private key +env.WIREMOCK_IP_ADDRESS - ip address of the machine where this user with this private key exists

+
+
+
+
+

Pull repository with virtual assets

+
+
+

To pull the repository on a remote machine, it is necessary to use the previously described SSH Agent plugin. An example of use:

+
+
+
+
sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+withCredentials([usernamePassword(credentialsId: end.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+     sh """
+         ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd ~/${env.APPLICATION_DIRECTORY_WIREMOCK}/${env.PROJET_HOME}; git fetch https://&USER:$PASS@${env.GIT_WITHOUT_HTTPS} ${env.GIT_BRANCH}; git reset --hard FETCH_HEAD; git clean -df"
+      """
+    }
+}
+
+
+
+

Where:

+
+
+

withCredentials allows various kinds of credentials (secrets) to be used in idiosyncratic ways. Each binding will define an environment variable active within the scope of the step. Then the necessary commands are executed:

+
+
+

cd …​ - command will change from current directory to the specified directory with git repository

+
+
+

git fetch …​ ;git reset …​ ;git clean …​ - pull from GIT branch. Git pull or checkout are not used here to prevent the situation with wrong coding between Mac OSX/Linux etc.

+
+
+

PLEASE remember that when using this script for the first time, the code from previous block should be changed to:

+
+
+
+
stage("ssh-agent"){
+        sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+            withCredentials([usernamePassword(credentialsId: end.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+                sh """
+                        ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd ~/${env.APPLICATION_DIRECTORY_WIREMOCK} ;git clone --depth=1 --branch=develop https://&USER:$PASS@${env.GIT_WITHOUT_HTTPS}"';
+                """
+    }
+}
+
+
+
+
+
+

Install an application with Smoke environment

+
+ +
+
+
+

Update properties settings file

+
+
+

New settings file is pushed to the repository. Example configuration:

+
+
+
+
...
+   <key>autocomplete</key>
+   <string>http://server:port</string>
+   <key>benefitsummary</key>
+   <string>http://server:port</string>
+   <key>checkscan</key>
+   <string>http://server:port</string>
+   <key>dpesb</key>
+   <string>http://server:port</string>
+...
+
+
+
+

Address of service (backend) should be changed to wiremock address as it is shown on listing to change the default route.

+
+
+
+
+

Build an application with updated properties file

+
+
+

New versions of application are prepared by Jenkins job.

+
+
+
+
+

Install an application on target properties file

+
+
+

Installation of an application is actually executed in a non-automated way using SeeTest environment.

+
+
+
+
+

UI tests

+
+ +
+
+
+

Run Jenkins job

+
+
+

Jenkinsfile:

+
+
+
+
// Jenkins parameters are overriding the properties below
+def properties = [
+
+          JENKINS_LABELS                                 : 'PWI_LINUX_DEV',
+          APPLICATION_FOLDER                             : 'app_dir',
+          PROJECT_HOME                                   : 'app_home_folder',
+
+          //WIREMOCK
+          WIREMOCK_CREDENTIALS                           : 'vc2crptXXXXXXn',
+          WIREMOCK_USERNAME                              : 'wiremock',
+          WIREMOCK_ADDRESS                               : 'http://vc2crptXXXXXXn.xxx.com:8080',
+          WIREMOCK_IP_ADDRESS                            : '10.196.67.XXX',
+          WIREMOCK_CONTAINER_NAME                        : 'wiremock',
+          APPLICATION_DIRECTORY_WIREMOCK                 : 'repo',
+
+          //GIT
+          GIT_CREDENTIALS                                : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          GIT_BRANCH                                     : 'develop',
+          GIT_SSH                                        : 'ssh://git@stash.xxx.com/app/app.git'
+          GIT_HTTPS                                      : 'HTTPS://git@stash.xxx.com/app/app.git',
+
+          STASH_CREDENTIALS                              : 'e47742cc-bb66-4321-2341-a2342er24f2',
+
+
+          //DOCKER
+          ARTIFACTORY_USER_CREDENTIALS                   : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          SEETEST_DOCKER_IMAGE                           : 'docker.xxx.com/project/images/app:v1-8.3',
+
+          //SEETEST_DOCKER_IMAGE
+          SEETEST_APPLICATION_FOLDER                     : 'seetest_dir',
+          SEETEST_PROJECT_HOME                           : 'Automated Scripts',
+          SEETEST_GIT_SSH                                : 'ssh://git@stash.xxx.com/pr/seetest_automation_cucumber.git'
+          SEETEST_GIT_BRANCH                             : 'develop',
+          SEETEST_GRID_USER_CREDENTIALS                  : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          SEETEST_CUCUMBER_TAG                           : '@Virtualization',
+          SEETEST_CLOUD_NAME                             : 'Core Group',
+          SEETEST_IOS_VERSION                            : '11',
+          SEETEST_IOS_APP_URL                            : '',
+          SEETEST_INSTALL_APP                            : 'No',
+          SEETEST_APP_ENVIRONMENT                        : 'SmokeTests',
+          SEETEST_DEVICE_QUERY                           : '',
+]
+
+node(properties.JENKINS_LABELS) {
+    try {
+        prepareEnv(properties)
+        gitCheckout()
+        stageStartVirtualServer()
+        stageMapApiRequests()
+        stageInstallApplication()
+        stageUITests()
+     } catch(Exception ex) {
+        currentBuild.result = 'FAILURE'
+        error = 'Error' + ex
+     }
+}
+
+//== == == == == == == == == == == == == == == == == == END OF PIPELINE== == == == == == == == == == == == == == == == == == == == ==
+
+private void prepareEnv(properties) {
+    cleanWorkspace()
+    overrideProperties(properties)
+    setWorkspace()
+}
+
+private void gitCheckout() {
+    dir(env.APPLICATION_FOLDER) {
+        checkout([$class: 'GitSCM', branches: [[name: env.GIT_BRANCH]], doGenerateSubmoduleConfiguration: false, extensions: [[$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false, timeout: 50]], gitTool: 'Default', submoduleCfg: [], userRemoteConfigs: [[credentialsId: env.GIT_CREDENTIALS, url: env.GIT_SSH]])
+     }
+}
+
+private void stageStartVirtualServer() {
+    def module = load "${env.SUBMODULES_DIR}/stageStartVirtualServer.groovy"
+    module()
+}
+
+private void stageMapApiRequests() {
+    def module = load "${env.SUBMODULES_DIR}/stageMapApiRequests.groovy"
+    module()
+}
+
+private void stageInstallApplication() {
+    def module = load "${env.SUBMODULES_DIR}/stageInstallApplication.groovy"
+    module()
+}
+
+private void stageUITests() {
+    def module = load "${env.SUBMODULES_DIR}/stageUITests.groovy"
+    module()
+}
+
+private void setWorkspace() {
+    String workspace = pwd()
+    env.APPLICATION_DIRECTORY = "/${env.APPLICATION_DIRECTORY}"
+    env.WORKSPACE_LOCAL - workspace + env.APPLICATION_DIRECTORY
+    env.SEETEST_PROJECT_HOME_ABSOLute_PATH = "${workspace}/${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}"
+    env.SUBMODULES_DIR = env.WORKSPACE_LOCAL + "/pipelines/SmokeTests.submodules"
+    env.COMMONS_DIR    = env.WORKSPACE_LOCAL + "/pipelines/commons"
+}
+
+/*
+    function ovverrides env vales based on provided properties
+*/
+private void overrideProperties(properties) {
+    for (param in properties) {
+        if (env.(param.key) ==  null) {
+           echo "Adding parameter '${param.key}' with default value: '$param.value}'"
+           env.(param.key) = param.value
+        } else {
+           echo "Parameter '${param.key}' has overriden value: '${env.(param.key)}'"
+        }
+     }
+
+     echo sh(script: "env | sort", returnStdout: true)
+}
+
+private void cleanWorkspace() {
+   sh 'rm-rf *'
+}
+
+
+
+

stageStartVirtualServer.groovy:

+
+
+
+
def call () {
+    stage("Check virtual server") {
+        def statusCode
+
+        try {
+            def response = httpRequest "${env.WIREMOCK_ADDRESS}/__admin/"
+            statusCode = response.status
+        } catch(Exception ex) {
+            currentBuild.result = 'FAILURE'
+            error 'WireMock server os unreachable.'
+        }
+
+        if(statusCode !=200) {
+            currentBuild.result = 'FAILURE'
+            error 'WireMock server is unreachable. Return code: ${statusCode}'
+        }
+    }
+}
+
+
+
+

stageMapApiRequests.groovy:

+
+
+
+
def call() {
+    stage("Map API requests with virtual assets") {
+        checkoutRepository()
+        restartWiremock()
+        checkWiremockStatus()
+     }
+}
+
+private checkoutRepository() {
+    extractHTTPSUrl()
+    sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+        withCredentials([usernamePassword(credentialsId: env.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+            sh """
+                ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd~/${env.APPLICATION_DIRECTORY_WIREMOCK}/${env.PROJECT_HOME}; git fetch https://$USER:$PASS@${env.GIT_WITHOUT_HTTPS} ${env.GIT_BRANCH}; git reset --hard FETCH_HEAD; git clean -df"
+             """
+         }
+     }
+}
+
+private restartWiremock() {
+    sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+            sh """
+                ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "docker container restart ${env.WIREMOCK_CONTAINER_NAME}"
+             """
+     }
+}
+
+private checkWiremockStatus() {
+    int wiremockStatusCheckCounter =6
+    int sleepTimeInSeconds = 10
+    def wiremockStatus
+
+    for (i = 0; i < wiremockStatusCheckCounter; i++) {
+         try {
+             wiremockStatus = getHttpRequestStatus()
+             echo "WireMock server status code: ${wiremockStatus}"
+         } catch(Exceprion ex) {
+             echo "Exception when checking connection to WireMock"
+         }
+         if(wiremockStatus ==  200) break
+         else sh "sleep $(sleepTimeInSeconds}"
+      }
+
+      if(wiremockStatus != 200) {
+          currentBuild.result = 'FAILURE'
+          error 'WireMock server is unreachable. Return code: ${wiremockStatus}'
+      }
+}
+
+private def getHttpRequestStatus() {
+    def response = httpRequest "${env.WIREMOCK_ADDRESS}/__admin"
+    return response.status
+
+private extractHTTPSUrl() {
+    env.GIT_WITHOUT_HTTPS = env.GIT_HTTPS.replace("https://", "")
+}
+
+return this
+
+
+
+

stageInstallApplication.groovy:

+
+
+
+
def call() {
+    stage('Install application with smoke tests environment') {
+        dir(env.SEETEST_APPLICATION_FOLDER) {
+            checkout([$class: 'GitSCM', branches: [[name: env.SEETEST_GIT_BRANCH]], doGenerateSubmoduleConfigurations: false, extensions: [], gitTool: 'default', submoduleCfg: [], userRemoteConfigs: [[credentialsId: env.GIT_CREDENTIALS, url: env.SEETEST_GIT_SSH]])
+        }
+     }
+}
+
+return this
+
+
+
+

stageUITests.groovy:

+
+
+
+
def call() {
+    stage('UI tests') {
+        def utils = load "${env.SUBMODULES_DIR}/utils.groovy"
+
+        try {
+            utils.generateUserIDVariable(); //Generate USER_ID and USER_GROUP
+            docker.image(env.SEETEST_DOCKER_IMAGE).inside("-u ${env.USER_ID}:${env.USER_GROUP}") {
+                withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: "${env.ARTIFACTORY_USER_CREDENTIALS}", passwordVariable: 'ARTIFACTORY_PASSWORD', usernameVariable: 'ARTIFACTORY_USERNAME]]) {
+                    executeTests()
+                    compressArtifacts()
+                    publishJUnitTestResultReport()
+                    archiveArtifacts()
+                    publishHTMLReports()
+                    publishCucumberReports()
+                 }
+             }
+        } catch (Exception exc) {
+            throw exc
+        }
+   }
+}
+
+private executeTests() {
+    withCredentials([usernamePassword(credentialsId: env.SEETEST_GRID_USER_CREDENTIALS, passwordVariable: 'GRID_USER_PASSWORD', usernameVariable: 'GRID_USER_NAME')]) {
+            sh """
+                cd ${env.SEETEST_PROJECT_HOME_ABSOLUTE_PATH}
+                mvn clean test -B -Ddriver="grid" -Dtags="${env.SEETEST_CUCUMBER_TAG}" -DcloudName="${env.SEETEST_CLOUD_NAME}" -DdeviceQuery="${env.SEETEST_DEVICE_QUERY} -DgridUser="${GRID_USER_NAME}" -DgridPassword="${GRID_USER_PASSWORD}" -Dinstall="${env.SEETEST_INSTALL_APP}" -DiosUrl="${env.SEETEST_IOS_APP_URL}" -DdeviceType="iPhone" -DiosVersion="$env.SEETEST_IOS_VERSION}" -DparallelMode="allonall" -Denv="${env.SEETEST_APP_ENVIRONMENT}" site
+             """
+     }
+}
+
+private compressartifacts() {
+    echo "Compressing artifacts from /target/site"
+    sh """
+        zip -r allure_report.zip **/${env.SEETEST_PROJECT_homE}/target/site
+    """
+
+private publishJUnitTestResultReport() {
+    echo "Publishing JUnit reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/surefire-reports/junitreporters/*.xml"
+
+    try {
+        junit "${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/surefire-reports/junitreporters/*.xml"
+    } catch(e) {
+        echo("No JUnit report found")
+    }
+}
+
+private archiveArtifacts() {
+    echo "Archiving artifacts"
+
+    try {
+        archiveArtifacts allowEmptyArchive: true, artifacts: "**/allure_report.zip"
+    } catch(e) {
+        echo("No artifacts found")
+    }
+}
+
+private publishHTMLReports() {
+    echo "Publishing HTML reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/site/allure-maven-plugin"
+
+    try {
+        publishHTML([allowMissing: false, alwaysLinkToLastBuild: true, keepAll: true, reportDir: "${env.SEETEST_APPLICATION_FOLDER/${env.SEETEST_PROJECT_HOME}/target/site/allure-maven-plugin", reportFiles: 'index.html', reportName: 'Allure report', reportTitles: 'Allure report'])
+    } catch(e) {
+        echo("No artifacts found")
+    }
+}
+
+private publishCucumberREPORTS() {
+    echo "Publishing Cucumber reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/cucumber-parallel/*.json"
+
+    try {
+        step([$class: 'CucumberReportPublisher', fileExcludePattern '', fileIncludePattern: "#{env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/cucumber-parallel/*.json", ignoreFailedTests: false, jenkinsBasePath: '', jsonReportDirectory: '', missingFails: false, parallelTesting: false, pendingFails: false, skippedFails: false, undefinedFails: false])
+    } catch(e) {
+        echo("No Cucumber report found")
+    }
+}
+
+return this
+
+
+
+

Configuration

+
+
+

It is possible to configure Jenkins job in two ways. First one is to edit the Jenkinsfile. All of the properties are in properties collection as below:

+
+
+
+
def properties = [
+
+          JENKINS_LABELS                                : 'PWI_LINUX_DEV'
+
+          ...
+
+          //Docker
+          ARTIFACTORY_USER_CREDENTIALS                  : 'ba2e4f46-56f1-4467-ae97-17b356d6s643',
+          SEETEST_DOCKER_IMAGE                          : 'docker.XXX.com/app/base-images/seetest:v1-8.3',
+
+          //SeeTest
+          SEETEST_APPLICATION_FOLDER                    : 'seetest_dit',
+          SEETEST_PROJECT_HOME                          : 'Automated_Scripts',
+          SEETEST_GIT_SSH                               : 'ssh://stash.xxx.com/app/seetest_automation_cucumber.git',
+          SEETEST_GIT_BRANCH                            : 'develop',
+
+          ...
+]
+
+
+
+

Second way is to add properties in 'Configure job'. All of the properties there are overriding properties from Jenkinsfile (the have the highest priority). They can then be set durring 'Build with Paremeters' process.

+
+
+

Reports

+
+
+

After a job execution 'Allure report' and 'Cucumber-JVM' reports should be visible. If any tests fail, You can check on which screen (printscreen from failures is attached, why and etc.)

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-What-is-service-virtualization.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-What-is-service-virtualization.html new file mode 100644 index 00000000..a085d6c6 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-What-is-service-virtualization.html @@ -0,0 +1,351 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Is it doable to keep pace in QA with today’s software agile approach?

+
+
+

DevOps + Microservices + Shift left + Time to Market == ? Service virtualization ?

+
+
+
+image72 +
+
+
+

Test pyramid

+
+
+
+image73 +
+
+
+
+
+

What is service virtualization

+
+
+

Service Virtualization has become recognized as one of the best ways to speed up testing and accelerate your time to market.

+
+
+

Service virtualization lets you automatically execute tests even when the application under test’s dependent system components (APIs, third-party applications, etc.) cannot be properly accessed or configured for testing. By simulating these dependencies, you can ensure that your tests will encounter the appropriate dependency behaviour and data each and every time that they execute.

+
+
+

Service virtualization is the simulation of interfaces – not the virtualization of systems.

+
+
+

According to Wikipedia’s service virtualization entry: Service virtualization emulates the behaviour of software components to remove dependency constraints on development and testing teams. Such constraints occur in complex, interdependent environments when a component connected to the application under test is:

+
+
+
    +
  • +

    Not yet completed

    +
  • +
  • +

    Still evolving

    +
  • +
  • +

    Controlled by a third-party or partner

    +
  • +
  • +

    Available for testing only in a limited capacity or at inconvenient times

    +
  • +
  • +

    Difficult to provision or configure in a test environment

    +
  • +
  • +

    Needed for simultaneous access by different teams with varied test data setup and other requirements

    +
  • +
  • +

    Restricted or costly to use for load and performance testing

    +
  • +
+
+
+

For instance, instead of virtualizing an entire database (and performing all associated test data management as well as setting up the database for every test session), you monitor how the application interacts with the database, then you emulate the related database behaviour (the SQL queries that are passed to the database, the corresponding result sets that are returned, and so forth).

+
+
+
+
+

Mocks, stubs and virtual services

+
+
+

The most commonly discussed categories of test doubles are mocks, stubs and virtual services.

+
+
+

Stub: a minimal implementation of an interface that normally returns hardcoded data that is tightly coupled to the test suite. It is most useful when the suite of tests is simple and keeping the hardcoded data in the stub is not an issue. Some stubs are handwritten; some can be generated by tools. A stub is normally written by a developer for personal use. It can be shared with testers, but wider sharing is typically limited by interoperability issues related to software platform and deployment infrastructure dependencies that were hardcoded. A common practice is when a stub works in-process directly with classes, methods, and functions for the unit, module, and acceptance testing. Some developers will say that a stub can also be primed, but you cannot verify an invocation on a stub. Stubs can also be communicating "over the wire", for example, HTTP, but some would argue that they should be called virtual services in that case.

+
+
+

Mock: a programmable interface observer, that verifies outputs against expectations defined by the test. It is frequently created using a third party library, for example in Java that is Mockito, JMock or WireMock. It is most useful when you have a large suite of tests and a stub will not be sufficient because each test needs a different data set up and maintaining them in a stub would be costly. The mock lets us keep the data set-up in the test. A mock is normally written by a developer for personal use but it can be shared with testers. However, wider sharing is typically limited by interoperability issues related to software platform and deployment infrastructure dependencies that were hardcoded. They are most often work-in-progress directly with classes, methods, and functions for a unit, module, and acceptance testing. Mock provides responses based on a given request satisfying predefined criteria (also called request or parameter matching). A mock also focuses on interactions rather than state so mocks are usually stateful. For example, you can verify how many times a given method was called or the order of calls made to a given object.

+
+
+

Virtual service: a test double often provided as a Software-as-a-Service (SaaS), is always called remotely, and is never working in-process directly with methods or functions. A virtual service is often created by recording traffic using one of the service virtualization platforms instead of building the interaction pattern from scratch based on interface or API documentation. A virtual service can be used to establish a common ground for teams to communicate and facilitate artefact sharing with other development teams as well as testing teams. A virtual service is called remotely (over HTTP, TCP, etc.) normally supports multiple protocols (e.g. HTTP, MQ, TCP, etc.), while a stub or mock normally supports only one. Sometimes virtual services will require users to authorize, especially when deployed in environments with enterprise-wide visibility. Service virtualization tools used to create virtual services will most often have user interfaces that allow less tech-savvy software testers to hit the ground running, before diving into the details of how specific protocols work. They are sometimes backed by a database. They can also simulate non-functional characteristics of systems such as response times or slow connections. You can sometimes find virtual services that provide a set of stubbed responses for given request criteria and pass every other request to a live backend system (partial stubbing). Similar to mocks, virtual services can have quite complex request matchers, that allow having one response returned for many different types of requests. Sometimes, virtual services simulate system behaviours by constructing parts of the response based on request attributes and data.

+
+
+

It is often difficult to say definitely which of the following categories a test double fits into. They should be treated as a spectrum rather than strict definitions.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module.html new file mode 100644 index 00000000..c2a8ae3b --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module.html @@ -0,0 +1,292 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ + +
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Stages.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Stages.html new file mode 100644 index 00000000..7121a6e9 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Test-Stages.html @@ -0,0 +1,313 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Test stages

+
+ +
+
+
+

Unit test

+
+
+

A module is the smallest compilable unit of source code. It is often too small to be tested by the functional tests (black-box tests). However, it is the appropriate candidate for white-box testing. White-box tests have to be performed as the first static tests (e.g. Lint and inspections), followed by dynamic tests in order to check boundaries, branches and paths. Usually, this kind of testing would require enabling stubs and special test tools.

+
+
+
+
+

Component test

+
+
+

This is the black-box test of modules or groups of modules which represent certain functionalities. There are no rules about what could be called a component. Whatever a tester defines as a component, should make sense and be a testable unit. Components can be integrated into bigger components step by step and tested as such.

+
+
+
+
+

Integration test

+
+
+

Functions are tested by feeding them input and examining the output, and internal program structure is rarely considered. The software is completed step by step and tested by tests covering a collaboration between modules or classes. The integration depends on the kind of system. For example, the steps could be as follows: run the operating system first and gradually add one component after another, then check if the black-box tests are still running (the test cases will be extended together with every added component). The integration is done in the laboratory. It may be also completed by using simulators or emulators. Additionally, the input signals could be stimulated.

+
+
+
+
+

Software / System test

+
+
+

System testing is a type of testing conducted on a complete integrated system to evaluate the system’s compliance with its specified requirements. This is a type of black-box testing of the complete software in the target system. The most important factor in successful system testing is that the environmental conditions for the software have to be as realistic as possible (complete original hardware in the destination environment).

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-01-AB-Test-Control.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-01-AB-Test-Control.html new file mode 100644 index 00000000..a00dc9f4 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-01-AB-Test-Control.html @@ -0,0 +1,609 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example1 +
+
+
+

The goal of this test is to open A/B Test subpage and redirect to another website.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click A/B Testing link and go to A/B Test subpage

    +
  4. +
  5. +

    Click Elemental Selenium link and open it in new tab

    +
  6. +
  7. +

    Switch to Elemental Selenium page and check if it’s loaded

    +
  8. +
+
+
+
+example2 +
+
+
+

== Page Class

+
+
+

Create a Page class for AB Testing page. Override all the required methods:

+
+
+
+
 public class ABtestPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.ABTEST.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'A/B Test Control' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.ABTEST.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+ }
+
+
+
+
+
+

== How to use Enum?

+
+
+

Similarly as in environmental variables case, create an enum for storing values of subURLs:

+
+
+
+
 public enum PageSubURLsProjectYEnum {
+
+    BASIC_AUTH("basic_auth"),
+    NEW_WINDOW("windows/new"),
+    WINDOW("windows"),
+    CHECKBOX("checkboxes"),
+    CONTEXT_MENU("context_menu"),
+    KEY_PRESS("key_presses"),
+    DYNAMIC_CONTENT("dynamic_content"),
+    HOVERS("hovers"),
+    SORTABLE_DATA_TABLES("tables"),
+    REDIRECT("redirector"),
+    JAVASCRIPT_ALERTS("javascript_alerts"),
+    CHALLENGING_DOM("challenging_dom"),
+    STATUS_CODES("status_codes"),
+    LOGIN("login"),
+    ABTEST("abtest"),
+    BROKEN_IMAGES("broken_images"),
+    DROPDOWN("dropdown"),
+    HORIZONTAL_SLIDER("horizontal_slider"),
+    DOWNLOAD("download"),
+    FORGOT_PASSWORD("forgot_password"),
+    FORGOT_PASSWORD_EMAIL_SENT("email_sent"),
+    EXIT_INTENT("exit_intent"),
+    DYNAMIC_LOADING("dynamic_loading"),
+    DISAPPEARING_ELEMENTS("disappearing_elements"),
+    DRAG_AND_DROP("drag_and_drop"),
+    DYNAMIC_CONTROLS("dynamic_controls"),
+    UPLOAD("upload"),
+    FLOATING_MENU("floating_menu"),
+    FRAMES("frames"),
+    GEOLOCATION("geolocation"),
+    INFINITE_SCROLL("infinite_scroll"),
+    JQUERY_UI("jqueryui/menu"),
+    JAVASCRIPT_ERROR("javascript_error"),
+    LARGE_AND_DEEP_DOM("large"),
+    NESTED_FRAMES("nested_frames"),
+    NOTIFICATION_MESSAGE("notification_message"),
+    DOWNLOAD_SECURE("download_secure"),
+    SHIFTING_CONTENT("shifting_content"),
+    SLOW_RESOURCES("slow"),
+    TYPOS("typos"),
+    WYSIWYGEDITOR("tinymce");
+
+    /*
+     * Sub URLs are used as real locations in the test environment
+     */
+    private String subURL;
+
+    private PageSubURLsProjectYEnum(String subURL) {
+        this.subURL = subURL;
+    }
+
+    ;
+
+    private PageSubURLsProjectYEnum() {
+
+    }
+
+    @Override
+    public String toString() {
+        return getValue();
+    }
+
+    public String getValue() {
+        return subURL;
+    }
+
+}
+
+
+
+

Instead of mapping data from an external file, you can store and access them directly from the enum class:

+
+
+
+
PageSubURLsProjectYEnum.ABTEST.getValue()
+
+
+
+
+
+

== Selector

+
+
+

In this test case you need selector for only one page element:

+
+
+
+
private static final By elementalSeleniumLinkSelector = By.cssSelector("div > div > a");
+
+
+
+
+
+

== Page methods

+
+
+

You need two methods for performing page actions:

+
+
+
+
     /**
+     * Clicks 'Elemental Selenium' link at the bottom of the page.
+     *
+     * @return ElementalSeleniumPage object.
+     */
+    public ElementalSeleniumPage clickElementalSeleniumLink() {
+        getDriver().findElementDynamic(elementalSeleniumLinkSelector)
+                .click();
+        getDriver().waitForPageLoaded();
+        return new ElementalSeleniumPage();
+    }
+
+    /**
+     * Switches window to the next one - different than the current.
+     */
+    public void switchToNextTab() {
+        ArrayList<String> tabsList = new ArrayList<String>(getDriver().getWindowHandles());
+        getDriver().switchTo()
+                .window(tabsList.get(1));
+    }
+
+
+
+
+
+

== Elemental Selenium Page Class

+
+
+

To return new Elemental Selenium Page object, implement its class. You only need to write basic methods to check if the page is loaded. There is no need to interact with objects on the site:

+
+
+
+
 public class ElementalSeleniumPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(GetEnvironmentParam.ELEMENTAL_SELENIUM_PAGE.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Elemental Selenium' page.");
+        getDriver().get(GetEnvironmentParam.ELEMENTAL_SELENIUM_PAGE.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+
+

== Test Class

+
+
+

Create a Test class and write a @Test method to execute the scenario:

+
+
+
+
 @Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class ABtestingTest extends TheInternetBaseTest {
+
+    private static ABtestPage abTestPage;
+
+    @Test
+    public void shouldOpenElementalSeleniumPageWhenClickElementalSeleniumLink() {
+
+        logStep("Click Elemental Selenium link");
+        ElementalSeleniumPage elementalSeleniumPage = abTestPage.clickElementalSeleniumLink();
+
+        logStep("Switch browser's tab to newly opened one");
+        abTestPage.switchToNextTab();
+
+        logStep("Verify if Elemental Selenium Page is opened");
+        assertTrue("Unable to open Elemental Selenium page", elementalSeleniumPage.isLoaded());
+    }
+
+}
+
+
+
+
+
+

== Assert

+
+
+

Asserts methods are used for creating test pass or fail conditions. The optional first parameter is a message which will be displayed in the test failure description.

+
+
+
    +
  • +

    assertTrue(boolean condition) - test passes if condition returns true

    +
  • +
  • +

    assertFalse(boolean condition) - test passes if condition returns false

    +
  • +
+
+
+

Also, add the @BeforeClass method to open the tested page:

+
+
+
+
 @BeforeClass
+    public static void setUpBeforeClass() {
+        abTestPage = shouldTheInternetPageBeOpened().clickABtestingLink();
+        logStep("Verify if ABTest page is opened");
+        assertTrue("Unable to open ABTest page", abTestPage.isLoaded());
+    }
+
+
+
+

@BeforeClass method executes only once before all other +@Test cases in the class. There is also a possibility to create a +@AfterClass method which is performed also once after all @Test cases.

+
+
+

You don’t need to implement @setUp and @tearDown methods because they’re already in TheInternetBaseTest class which you extend.

+
+
+
+
+

== Categories

+
+
+

You can group tests in categories. It’s useful when running many tests at once. Use this parameter:

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+
+
+
+

Then create an interface representing each category. Example:

+
+
+
+
public interface TestsSelenium {
+    /* For test which are testing web pages considering UI (user interface) and using selenium webdriver */
+}
+
+
+
+

To run a test from specified category create Test Suite class:

+
+
+
+
@RunWith(WildcardPatternSuite.class) //search for test files under /src/test/java
+@IncludeCategories({ TestsChrome.class }) // search all test files with category TestsChrome.class
+@ExcludeCategories({ TestsLocal.class, TestsNONParallel.class }) //exclude all test files with category TestsLocal.class and TestsNONParallel.class
+@SuiteClasses({ "../**/*Test.class" }) //search only test files, where file name ends with <anyChar/s>Test.class
+
+public class _TestSuiteChrome {
+
+}
+
+
+
+

You can run a Test Suite as a JUnit test.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-02-Basic-Auth-Test.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-02-Basic-Auth-Test.html new file mode 100644 index 00000000..51fcedfc --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-02-Basic-Auth-Test.html @@ -0,0 +1,516 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example3 +
+
+
+

In this test case, the goal is to pass username and password authorization and login to the next page.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click on Basic Auth link

    +
  4. +
  5. +

    Open pop-up login window

    +
  6. +
  7. +

    Enter valid username and password

    +
  8. +
  9. +

    Open next subpage and verify if the user logged in successfully.

    +
  10. +
+
+
+

== Page Class

+
+
+

Create a page class which represents Basic Auth subpage after proper login.

+
+
+
+example4 +
+
+
+

Override all the required methods:

+
+
+
+
public class BasicAuthPage extends BasePage {
+
+    public BasicAuthPage() {
+
+    }
+
+    public BasicAuthPage(String login, String password) {
+        this.enterLoginAndPasswordByUrl(login, password);
+    }
+
+    @Override
+    public boolean isLoaded() {
+        return true;
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("load");
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+
+
+

In order to verify a login, create a selector to access the visible message.

+
+
+
+
 private static final By selectorTextMessage = By.cssSelector("#content > div > p");
+Then create a method to get message value:
+
+/**
+*       Returns message displayed by system after user's log in.
+*      @return String object representing message displayed by system after user's log in
+*/
+    public String getMessageValue() {
+                return getDriver().findElementDynamic(selectorTextMessage)
+                    .getText();
+}
+
+
+
+

Also, create a method to access the pop-up login window and enter user credentials:

+
+
+
+
    /**
+     * Authenticates user using standard simple authentication popup.
+     *
+     * @param login    User's login
+     * @param password User's password
+     * @throws AWTException
+     * @throws InterruptedException
+     */
+    public void enterLoginAndPassword(String login, String password) throws AWTException, InterruptedException {
+        Robot rb = new Robot();
+
+        Thread.sleep(2000);
+
+        StringSelection username = new StringSelection(login);
+        Toolkit.getDefaultToolkit()
+                .getSystemClipboard()
+                .setContents(username, null);
+        rb.keyPress(KeyEvent.VK_CONTROL);
+        rb.keyPress(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_CONTROL);
+
+        rb.keyPress(KeyEvent.VK_TAB);
+        rb.keyRelease(KeyEvent.VK_TAB);
+        Thread.sleep(2000);
+
+        StringSelection pwd = new StringSelection(password);
+        Toolkit.getDefaultToolkit()
+                .getSystemClipboard()
+                .setContents(pwd, null);
+        rb.keyPress(KeyEvent.VK_CONTROL);
+        rb.keyPress(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_CONTROL);
+
+        rb.keyPress(KeyEvent.VK_ENTER);
+        rb.keyRelease(KeyEvent.VK_ENTER);
+        Thread.sleep(2000);
+    }
+
+
+
+
+
+

== Robot class

+
+
+

Creating a Robot object allows performing basic system actions such as pressing keys, moving the mouse or taking screenshots. In this case, it’s used to paste login and password text from the clipboard using 'Ctrl + V' shortcut, go to the next field using 'Tab' key and submit by clicking 'Enter'.

+
+
+
+
+

Toolkit

+
+
+

Static class Toolkit can perform basic window actions such as scrolling to a specified position or moving context between components. In this case, it’s used to set clipboard content to username and password value.

+
+
+
+
Thread.sleep(long millis)
+
+
+
+

Web drivers like Selenium perform actions much faster than the normal user. This may cause unexpected consequences e.g. some elements may not be loaded before the driver wants to access them. To avoid this problem you can use Thread.sleep(long millis) to wait given time and let browser load wanted component.

+
+
+

BEWARE: Using Thread.sleep(long millis) is not the recommended approach. Selenium driver gives methods to wait for a specified element to be enabled or visible with a timeout parameter. This is a more stable and effective way. Also, method waitForPageLoaded() will not solve that issue because it only waits for the ready state from the browser while some javascript actions might be performed after that.

+
+
+
+
+

== Test Class

+
+
+

Create a Test class and write a @Test method to execute the scenario. Save parameters as class fields:

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class BasicAuthTest extends TheInternetBaseTest {
+
+    private static BasicAuthPage basicAuthPage;
+
+    private String login    = "admin";
+    private String password = "admin";
+    private String message  = "Congratulations! You must have the proper credentials.";
+
+    @Test
+    public void shouldUserLogInWithValidCredentials() throws InterruptedException, AWTException {
+        basicAuthPage = shouldTheInternetPageBeOpened().clickBasicAuthLink();
+
+        logStep("Enter login and password");
+        basicAuthPage.enterLoginAndPassword(login, password);
+
+        logStep("Verify if user logged in successfully");
+        assertEquals("Unable to login user with valid credentials", message,
+            basicAuthPage.getMessageValue());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        theInternetPage.load();
+    }
+}
+
+
+
+

assertEquals(Object expected, Object actual) - test passes if parameters are equal .

+
+
+
+
+

== Alternative scenario:

+
+
+

There is also a possibility to log in with credentials as a part of URL: http://login:password@the-internet.herokuapp.com/basic_auth

+
+
+

Another page class method:

+
+
+
+
/**
+     * Authenticates user passing credentials into URL.
+     *
+     * @param login    User's login
+     * @param password User's password
+     */
+    private void enterLoginAndPasswordByUrl(String login, String password) {
+        getDriver().get("http://" + login + ":" + password + "@" + "the-internet.herokuapp.com/" +
+            PageSubURLsProjectYEnum.BASIC_AUTH.getValue());
+    }
+
+
+
+

Another test class method:

+
+
+
+
@Test
+    public void shouldUserLogInWithValidCredentialsSetInURL() {
+        logStep("Enter user's credentials into URL to log in");
+        basicAuthPage = new BasicAuthPage(login, password);
+
+        logStep("Verify if user logged in successfully");
+        assertEquals("Unable to login user with valid credentials", message,
+            basicAuthPage.getMessageValue());
+    }
+
+
+
+

After running test class as a JUnit test, both test cases will be performed.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-03-Broken-Images-Test.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-03-Broken-Images-Test.html new file mode 100644 index 00000000..2d3b270c --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-03-Broken-Images-Test.html @@ -0,0 +1,396 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This test goal is to check the dimensions of broken images on the subpage.

+
+
+
+example5 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Broken Image link and go to Broken Image subpage

    +
  4. +
  5. +

    Get the 3 images' dimensions and compare them with expected values

    +
  6. +
+
+
+

== Page Class

+
+
+

In this case, create an array of selectors to access images by index number:

+
+
+
+
public class BrokenImagePage extends BasePage {
+
+    private static final By[] selectorsImages = { By.cssSelector("div > img:nth-child(2)"),
+            By.cssSelector("div > img:nth-child(3)"),
+            By.cssSelector("div > img:nth-child(4)") };
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.BROKEN_IMAGES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Broken Images' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.BROKEN_IMAGES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns an image height in pixels.
+     *
+     * @param imageIndex An index of given image.
+     * @return Height of an image in pixels.
+     */
+    public int getImageHeight(int imageIndex) {
+        return getImageDimension(imageIndex).getHeight();
+    }
+
+    /**
+     * Returns an image width in pixels.
+     *
+     * @param imageIndex An index of given image.
+     * @return Width of an image in pixels.
+     */
+    public int getImageWidth(int imageIndex) {
+        return getImageDimension(imageIndex).getWidth();
+    }
+
+    private Dimension getImageDimension(int imageIndex) {
+        return getDriver().findElementDynamic(selectorsImages[imageIndex])
+                .getSize();
+    }
+
+}
+
+
+
+
+
+

== Test Class

+
+
+

Create @Test and @BeforeClass methods. Save expected images' dimensions in class fields:

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class BrokenImagesTest extends TheInternetBaseTest {
+
+    private static BrokenImagePage brokenImagePage;
+
+    private final int expectedHeight = 90;
+    private final int expectedWidth  = 120;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        brokenImagePage = shouldTheInternetPageBeOpened().clickBrokenImageLink();
+
+        logStep("Verify if Broken Image page is opened");
+        assertTrue("Unable to open Broken Image page", brokenImagePage.isLoaded());
+    }
+
+    @Test
+    public void shouldImageSizesBeEqualToExpected() {
+        for (int i = 0; i < 3; i++) {
+            logStep("Verify size of image with index: " + i);
+            assertEquals("Height of image with index: " + i + " is incorrect", expectedHeight,
+                   brokenImagePage.getImageHeight(i));
+            assertEquals("Width of image with index: " + i + " is incorrect", expectedWidth,
+                   brokenImagePage.getImageWidth(i));
+        }
+    }
+
+}
+
+
+
+

The test will pass if every image had the correct width and height.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-04-Challenging-DOM-Test.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-04-Challenging-DOM-Test.html new file mode 100644 index 00000000..81f4e0ce --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-04-Challenging-DOM-Test.html @@ -0,0 +1,419 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This case goal is to find out how to create stable selectors.

+
+
+

In the browser’s developer mode, you can see how the page is built. Notice, that buttons' IDs change after click and values in the table haven’t got unique attributes, which might be helpful in order to find them.

+
+
+
+example6 +
+
+
+

== DOM - Document Object Model

+
+
+

HTML DOM is a model of the page created by the browser. The page could be represented as the tree of objects. Read more.

+
+
+

To create locators you can use element attributes such as id, class name etc.

+
+
+

It this case, since there are no unique attributes, the best approach is to use HTML document structure and identify page elements by their place in an object hierarchy.

+
+
+
+
Page Class
+public class ChallengingDomPage extends BasePage {
+
+    private final By selectorTableRows   = By.cssSelector(".large-10 > table > tbody > tr");
+    private final By selectorFirstButton = By.cssSelector(".large-2.columns > .button:nth-
+            child(1)");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.CHALLENGING_DOM.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Challenging DOM' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.CHALLENGING_DOM.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns table text content as a list of String objects.
+     *
+     * @return A list of table values.
+     */
+    public List<String> getTableValues() {
+        return JsoupHelper.findTexts(selectorTableRows);
+    }
+
+    /**
+     * Clicks top button on the page from available button set.
+     */
+    public void clickFirstButton() {
+        getDriver().elementButton(selectorFirstButton)
+                .click();
+        getDriver().waitForPageLoaded();
+    }
+
+}
+
+
+
+
+
+

== Jsoup Helper

+
+
+

Jsoup Helper is the tool which helps to parse HTML document and get searched values. This is especially useful when values are organized in a generic structure such as a table.

+
+
+

JsoupHelper.findTexts(By selector) - this method returns text content of a table as a list of Strings

+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Challenging DOM link and go to Challenging DOM subpage

    +
  4. +
  5. +

    Get and save table values

    +
  6. +
  7. +

    Click the first button

    +
  8. +
  9. +

    Get table values again

    +
  10. +
  11. +

    Compare table values before and after button click

    +
  12. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class ChallengingDomTest extends TheInternetBaseTest {
+
+    private static ChallengingDomPage challengingDomPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        challengingDomPage = shouldTheInternetPageBeOpened().clickChallengingDomLink();
+
+        logStep("Verify if Challenging Dom page is opened");
+        assertTrue("Unable to open Challenging Dom page", challengingDomPage.isLoaded());
+    }
+
+    @Test
+    public void shouldValuesInTableCellsStayUnchangedAfterClick() {
+
+        logStep("Get table values (before click any button)");
+        List<String> tableValuesBeforeClick = challengingDomPage.getTableValues();
+
+        logStep("Click first button");
+        challengingDomPage.clickFirstButton();
+
+        logStep("Get table values (after click first button)");
+        List<String> tableValuesAfterClick = challengingDomPage.getTableValues();
+
+        logStep("Verify equality of table values before and after click");
+        assertEquals("Values from table cells were changed after click", tableValuesBeforeClick,
+                tableValuesAfterClick);
+    }
+
+}
+
+
+
+

Because values in the table don’t change, the test should pass if object locators are solid.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-05-Checkboxes.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-05-Checkboxes.html new file mode 100644 index 00000000..6ee0d73c --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-05-Checkboxes.html @@ -0,0 +1,445 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

In this example, you will learn how to test checkboxes on the page.

+
+
+
+example7 +
+
+
+

A checkbox is a simple web element which can be selected or unselected by clicking on it.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Checkboxes link and go to Checkboxes page

    +
  4. +
  5. +

    Test if the first checkbox is unchecked

    +
  6. +
  7. +

    Select the first checkbox

    +
  8. +
  9. +

    Test if the first checkbox is checked

    +
  10. +
  11. +

    Test if the second checkbox is checked

    +
  12. +
  13. +

    Unselect second checkbox

    +
  14. +
  15. +

    Test if the second checkbox is unchecked

    +
  16. +
+
+
+

== Page Class

+
+
+

Because both checkboxes are in one form, it’s possible to locate them by one selector and then access each individual one by index.

+
+
+
+example8 +
+
+
+
+
public class CheckboxesPage extends BasePage {
+
+    private final static By checkboxesFormSelector = By.cssSelector("#checkboxes");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.CHECKBOX.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Checkboxes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.CHECKBOX.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if checkbox form is visible on the page.
+     *
+     * @return true if checkboxes are present and displayed on the page
+     */
+    public boolean isElementCheckboxesFormVisible() {
+        return getDriver().elementCheckbox(checkboxesFormSelector)
+                .isDisplayed();
+    }
+
+    /**
+     * Verifies if given checkbox is selected or not.
+     *
+     * @param index The index of given checkbox
+     * @return true if given checkbox is selected
+     */
+    public boolean isCheckboxSelected(int index) {
+        return getDriver().elementCheckbox(checkboxesFormSelector)
+                .isCheckBoxSetByIndex(index);
+    }
+
+    /**
+     * Selects given checkbox. Unselects, if it is already selected.
+     *
+     * @param index The index of given checkbox
+     */
+    public void selectCheckbox(int index) {
+        CheckBox checkbox = getDriver().elementCheckbox(checkboxesFormSelector);
+        if (isCheckboxSelected(index)) {
+            checkbox.unsetCheckBoxByIndex(index);
+        } else {
+            checkbox.setCheckBoxByIndex(index);
+        }
+    }
+
+}
+
+
+
+
+
+

== CheckBox

+
+
+

CheckBox class contains a method to perform actions on checkboxes such as setting and unsetting or verifying if the specified box is checked. +Use method elementCheckbox(By selector) to create CheckBox Object.

+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class CheckboxesTest extends TheInternetBaseTest {
+
+    private static CheckboxesPage checkboxesPage;
+
+    @Override
+    public void setUp() {
+        checkboxesPage = shouldTheInternetPageBeOpened().clickCheckboxesLink();
+
+        logStep("Verify if Checkboxes page is opened");
+        assertTrue("Unable to open Checkboxes page", checkboxesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldCheckboxBeSelectedAfterClick() {
+
+        logStep("Verify if first checkbox is not selected");
+        assertFalse("The checkbox is selected", checkboxesPage.isCheckboxSelected(0));
+
+        logStep("Select first checkbox");
+        checkboxesPage.selectCheckbox(0);
+
+        logStep("Verify if first checkbox is selected");
+        assertTrue("The checkbox is not selected", checkboxesPage.isCheckboxSelected(0));
+    }
+
+    @Test
+    public void shouldCheckboxBeUnselectedAfterClick() {
+
+        logStep("Verify if second checkbox is selected");
+        assertTrue("The checkbox is not selected", checkboxesPage.isCheckboxSelected(1));
+
+        logStep("Select second checkbox");
+        checkboxesPage.selectCheckbox(1);
+
+        logStep("Verify if second checkbox is not selected");
+        assertFalse("The checkbox is selected", checkboxesPage.isCheckboxSelected(1));
+    }
+
+}
+
+
+
+

After running Test Class both @Test cases will be performed. Before each one, overrode setUp method will be executed.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-06-Disappearing-Elements.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-06-Disappearing-Elements.html new file mode 100644 index 00000000..ce51195b --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-06-Disappearing-Elements.html @@ -0,0 +1,480 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This case will show how to test changing website content.

+
+
+
+example9 +
+
+
+

After refreshing page (F5) a few times, a new element should appear:

+
+
+
+example10 +
+
+
+

Then, after another couple of refreshes, it should disappear.

+
+
+

You can check in developer mode that Gallery element does not exist in HTML document either, until appearing on the page. The element is created by Javascript.

+
+
+
+example11 +
+
+
+
+example12 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Load The Internet Main Page

    +
  2. +
  3. +

    Click Disappearing Elements link and go to that subpage

    +
  4. +
  5. +

    Check if Menu Buttons exist on the page

    +
  6. +
  7. +

    Refresh the page until a new element appears

    +
  8. +
  9. +

    Check if Gallery Button exists

    +
  10. +
  11. +

    Check if the number of buttons equals the expected value

    +
  12. +
  13. +

    Refresh the page until an element disappears

    +
  14. +
  15. +

    Check if Gallery Button does not exist

    +
  16. +
  17. +

    Check if the number of buttons is smaller than before

    +
  18. +
+
+
+

== Page Class

+
+
+
+
public class DisappearingElementsPage extends BasePage {
+
+    private static final By selectorGalleryMenuButton = By.cssSelector("li > a[href*=gallery]");
+    private static final By selectorMenuButtons       = By.cssSelector("li");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DISAPPEARING_ELEMENTS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Disappearing Elements' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DISAPPEARING_ELEMENTS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns a number of WebElements representing menu buttons.
+     *
+     * @return A number of WebElements.
+     */
+    public int getNumberOfMenuButtons() {
+        return getDriver().findElementDynamics(selectorMenuButtons)
+                .size();
+    }
+
+    /**
+     * Returns WebElement representing disappearing element of menu.
+     *
+     * @return Disappearing WebElement if visible, null otherwise.
+     */
+    public WebElement getGalleryMenuElement() {
+        return getDriver().findElementQuietly(selectorGalleryMenuButton);
+    }
+
+    /**
+     * Refreshes web page as many times as it is required to appear/disappear menu button
+     * WebElement.
+     *
+     * @param shouldAppear Determines if element should appear (true) or disappear (false).
+     */
+    public void refreshPageUntilWebElementAppears(boolean shouldAppear) {
+        int numberOfAttempts = 5;
+        int counter = 0;
+        while (!isVisibilityAsExpected(shouldAppear) || isMaxNumberOfAttemptsReached(counter++,
+                numberOfAttempts)) {
+            refreshPage();
+        }
+    }
+
+    /**
+     * Verify if visibility of Gallery button is the same as expected
+     *
+     * @param expected Determines if element should be visible (true) or not visible (false).
+     */
+    private boolean isVisibilityAsExpected(boolean expected) {
+        boolean isVisibilityDifferentThanExpected = isGalleryMenuElementVisible() ^ expected;
+        return !isVisibilityDifferentThanExpected;
+    }
+
+    private boolean isGalleryMenuElementVisible() {
+        boolean result = false;
+        WebElement gallery = getGalleryMenuElement();
+        if (gallery != null)
+            result = gallery.isDisplayed();
+        return result;
+    }
+
+    private boolean isMaxNumberOfAttemptsReached(int attemptNo, int maxNumberOfAttempts) {
+        return attemptNo ==  maxNumberOfAttempts;
+    }
+
+}
+
+
+
+

findElementQuietly(By selector) works similar as findElementDynamics(By selector) but won’t throw an exception if an element wasn’t found. In this case, the searched WebElement will have a NULL value.

+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DisappearingElementsTest extends TheInternetBaseTest {
+
+    private static final int totalNumberOfMenuButtons = 5;
+    private static DisappearingElementsPage disappearingElementsPage;
+    private static       int numberOfMenuButtons      = 0;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        disappearingElementsPage = shouldTheInternetPageBeOpened().clickDisappearingElementsLink();
+
+        logStep("Verify if Disappearing Elements page is opened");
+        assertTrue("Unable to open Disappearing Elements page",
+                disappearingElementsPage.isLoaded());
+
+        logStep("Verify if menu button elements are visible");
+        numberOfMenuButtons = disappearingElementsPage.getNumberOfMenuButtons();
+        assertTrue("Unable to display menu", numberOfMenuButtons > 0);
+    }
+
+    @Test
+    public void shouldMenuButtonElementAppearAndDisappearAfterRefreshTest() {
+        logStep("Click refresh button until menu button appears");
+        disappearingElementsPage.refreshPageUntilWebElementAppears(true);
+
+        logStep("Verify if menu button element appeared");
+        assertNotNull("Unable to disappear menu button element",
+                disappearingElementsPage.getGalleryMenuElement());
+        assertEquals("The number of button elements after refresh is incorrect",
+                totalNumberOfMenuButtons, disappearingElementsPage.getNumberOfMenuButtons());
+
+        logStep("Click refresh button until menu button disappears");
+        disappearingElementsPage.refreshPageUntilWebElementAppears(false);
+
+        logStep("Verify if menu button element disappeared");
+        assertNull("Unable to appear menu button element",
+                disappearingElementsPage.getGalleryMenuElement());
+        assertTrue("The number of button elements after refresh is incorrect",
+                totalNumberOfMenuButtons > disappearingElementsPage.getNumberOfMenuButtons());
+    }
+
+}
+
+
+
+

assertNull(Objetc object) - test passes if Object returns NULL +assertNotNull(Objetc object) - test passes if Object does not return NULL

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-07-Drag-and-Drop.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-07-Drag-and-Drop.html new file mode 100644 index 00000000..52e91e81 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-07-Drag-and-Drop.html @@ -0,0 +1,588 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This case shows how to move draggable elements on the page. +image::images/example13.png[]

+
+
+

Try to move A to B position and see what happens. Also, open browser developer mode and see how the DOM changes.

+
+
+
+example14 +
+
+
+

The page can easily be broken. You can try to do so and check how the page structure changed in browser developer mode.

+
+
+
+example15 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Drag and Drop link and open subpage

    +
  4. +
  5. +

    Check if the Drag and Drop message is visible

    +
  6. +
  7. +

    Check if element A is in container A and B in container B

    +
  8. +
  9. +

    Move element A to position B

    +
  10. +
  11. +

    Check if element A is in container B and B in container A

    +
  12. +
  13. +

    Move element B to position A

    +
  14. +
  15. +

    Again check if element A is in container A and B in container B

    +
  16. +
+
+
+

== Page Class

+
+
+
+
public class DragAndDropPage extends BasePage {
+
+    private static final By selectorDragAndDropText    = By.cssSelector("div#content h3");
+    private static final By selectorAElementContainer  = By.cssSelector("div#column-a");
+    private static final By selectorBElementContainer  = By.cssSelector("div#column-b");
+    private static final By selectorDescriptionElement = By.cssSelector("header");
+
+    private static final String dndHelperPath = "src/test/resources/js/drag_and_drop_helper.js";
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DRAG_AND_DROP.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Drag and Drop' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() + PageSubURLsProjectYEnum.DRAG_AND_DROP.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if drag and drop message is visible or not.
+     *
+     * @return true if exit drag and drop message was found on web page.
+     */
+    public boolean isDragAndDropMessageVisible() {
+        return getDriver().findElementDynamic(selectorDragAndDropText)
+                .isDisplayed();
+    }
+
+    /**
+     * Verifies if specified element is placed in designated container.
+     *
+     * @param element WebElement to be verified.
+     * @return true if element described as A exists in container A or element B exists in container B, false otherwise.
+     */
+    public boolean isElementPlacedInCorrectContainer(String element) {
+        return getDescriptionElement(findElementByDescription(element)).getText()
+                .equals(element);
+    }
+
+    private WebElement findElementByDescription(String element) {
+        WebElement result;
+        switch (element) {
+            case "A":
+                result = getContainerElement(selectorAElementContainer);
+                break;
+            case "B":
+                result = getContainerElement(selectorBElementContainer);
+                break;
+            default:
+                result = null;
+                BFLogger.logDebug("Chosen element doesn't exist on web page");
+        }
+        return result;
+    }
+
+    private WebElement getContainerElement(By container) {
+        return getDriver().findElementDynamic(container);
+    }
+
+    private WebElement getDescriptionElement(WebElement container) {
+        return container.findElement(selectorDescriptionElement);
+    }
+
+    /**
+     * Drags element to designated container and drops it.
+     *
+     * @param element         String describing WebElement expected to be dragged.
+     * @param from            String describing WebElement representing container of element expected to be dragged.
+     * @param destinationDesc String describing WebElement representing destination container where other element will be dragged.
+     */
+    public void dragElementToPosition(String element, String from, String destinationDesc) {
+        WebElement source = findElementByDescription(from);
+        WebElement description = getDescriptionElement(source);
+        WebElement destination = findElementByDescription(destinationDesc);
+        if (description.getText()
+                .equals(element))
+            dragElement(source, destination);
+    }
+
+}
+
+
+
+

Since HTML5, normal Selenium drag-and-drop action stopped working, thus it’s necessary to execute Javascript which performs the drag-and-drop. To do so, create a JavascriptExecutor object, then read the script from a file drag_and_drop_helper.js and execute it with additional arguments using method executeScript(String script).

+
+
+

An example drag-and-drop solution:

+
+
+
+
    /**
+     * Drags and drops given WebElement to it's destination location.
+     * <p>
+     * Since HTML5 all Selenium Actions performing drag and drop operations stopped working as expected, e.g.
+     * original implementation, which was:
+     * <code>
+     * BasePage.getAction()
+     * .clickAndHold(draggable)
+     * .moveToElement(target)
+     * .release()
+     * .build()
+     * .perform();
+     * </code>
+     * finishes with no effect. For this reason, there is javaScript function used, to make sure that
+     * drag and drop operation will be successful.
+     * JavaScript function is stored under the following path: 'src/test/resources/js/drag_and_drop_helper.js'.
+     * Original source of the script:
+     * <a href="https://gist.github.com/rcorreia/2362544">drag_and_drop_helper</a>
+     * </p>
+     *
+     * @param draggable A WebElement to be dragged and dropped.
+     * @param target    A destination, where element will be dropped.
+     * @see JavascriptExecutor
+     * @see Actions
+     */
+    private void dragElement(WebElement draggable, WebElement target) {
+        JavascriptExecutor js;
+        INewWebDriver driver = getDriver();
+        List<String> fileContent;
+        String draggableId = draggable.getAttribute("id");
+        String targetId = target.getAttribute("id");
+        String script = null;
+        if (draggable.getAttribute("draggable")
+                .contains("true")) {
+            if (driver instanceof JavascriptExecutor) {
+                js = (JavascriptExecutor) driver;
+                Path path = Paths.get(dndHelperPath);
+                try {
+                    fileContent = Files.readAllLines(path);
+                    script = fileContent.stream()
+                            .collect(Collectors.joining());
+                } catch (IOException e) {
+                    BFLogger.logDebug("Unable to read file content: " + e.getMessage());
+                }
+                if (script != null && !script.isEmpty()) {
+                    String arguments = "$('#%s').simulateDragDrop({ dropTarget: '#%s'});";
+                    js.executeScript(script + String.format(arguments, draggableId, targetId));
+                }
+            }
+        }
+    }
+
+
+
+

Drag and Drop helper file:

+
+
+
+
(function( $ ) {
+        $.fn.simulateDragDrop = function(options) {
+                return this.each(function() {
+                        new $.simulateDragDrop(this, options);
+                });
+        };
+        $.simulateDragDrop = function(elem, options) {
+                this.options = options;
+                this.simulateEvent(elem, options);
+        };
+        $.extend($.simulateDragDrop.prototype, {
+                simulateEvent: function(elem, options) {
+                        /*Simulating drag start*/
+                        var type = 'dragstart';
+                        var event = this.createEvent(type);
+                        this.dispatchEvent(elem, type, event);
+
+                        /*Simulating drop*/
+                        type = 'drop';
+                        var dropEvent = this.createEvent(type, {});
+                        dropEvent.dataTransfer = event.dataTransfer;
+                        this.dispatchEvent($(options.dropTarget)[0], type, dropEvent);
+
+                        /*Simulating drag end*/
+                        type = 'dragend';
+                        var dragEndEvent = this.createEvent(type, {});
+                        dragEndEvent.dataTransfer = event.dataTransfer;
+                        this.dispatchEvent(elem, type, dragEndEvent);
+                },
+                createEvent: function(type) {
+                        var event = document.createEvent("CustomEvent");
+                        event.initCustomEvent(type, true, true, null);
+                        event.dataTransfer = {
+                                data: {
+                                },
+                                setData: function(type, val){
+                                        this.data[type] = val;
+                                },
+                                getData: function(type){
+                                        return this.data[type];
+                                }
+                        };
+                        return event;
+                },
+                dispatchEvent: function(elem, type, event) {
+                        if(elem.dispatchEvent) {
+                                elem.dispatchEvent(event);
+                        }else if( elem.fireEvent ) {
+                                elem.fireEvent("on"+type, event);
+                        }
+                }
+        });
+})(jQuery);
+
+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DragAndDropTest extends TheInternetBaseTest {
+
+    private static final String ELEMENT_A   = "A";
+    private static final String CONTAINER_A = "A";
+    private static final String ELEMENT_B   = "B";
+    private static final String CONTAINER_B = "B";
+
+    private static DragAndDropPage dragAndDropPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dragAndDropPage = shouldTheInternetPageBeOpened().clickDragAndDropLink();
+
+        logStep("Verify if Drag And Drop page is opened");
+        assertTrue("Unable to open Drag And Drop page", dragAndDropPage.isLoaded());
+
+        logStep("Verify if Drag And Drop message is visible");
+        assertTrue("Drag And Drop message is not visible", dragAndDropPage.isDragAndDropMessageVisible());
+    }
+
+    @Test
+    public void shouldDraggableElementBeMovedAndDropped() {
+        logStep("Verify if elements are placed in proper containers");
+        assertTrue("Element A doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertTrue("Element B doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+
+        logStep("Step 7: Drag and drop element A into container B");
+        dragAndDropPage.dragElementToPosition(ELEMENT_A, CONTAINER_A, CONTAINER_B);
+
+        logStep("Step 8: Verify if elements are placed in improper containers");
+        assertFalse("Element A doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertFalse("Element B doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+
+        logStep("Drag and drop element B back into container B");
+        dragAndDropPage.dragElementToPosition(ELEMENT_A, CONTAINER_B, CONTAINER_A);
+
+        logStep("Verify if elements are placed in proper containers");
+        assertTrue("Element A doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertTrue("Element B doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+    }
+
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-08-Dropdown-List.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-08-Dropdown-List.html new file mode 100644 index 00000000..6e9ac469 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-08-Dropdown-List.html @@ -0,0 +1,418 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This example shows how to select an element from the dropdown list.

+
+
+
+example16 +
+
+
+

Check in the developer mode how a Dropdown List’s content has been organized.

+
+
+
+example17 +
+
+
+

Notice that the Dropdown Options have different attributes, such as "disabled" or "selected".

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click the Dropdown link and go to the subpage

    +
  4. +
  5. +

    Select first dropdown Option

    +
  6. +
  7. +

    Check if Option 1 is selected

    +
  8. +
  9. +

    Select second dropdown Option

    +
  10. +
  11. +

    Check if Option 2 is selected

    +
  12. +
+
+
+

== Page Class

+
+
+
+
public class DropdownPage extends BasePage {
+
+    private static final By dropdownListSelector = By.cssSelector("#dropdown");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DROPDOWN.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dropdown List' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DROPDOWN.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Selects doropdown's value by given index.
+     *
+     * @param index Index of option to be selected
+     */
+    public void selectDropdownValueByIndex(int index) {
+        getDriver().elementDropdownList(dropdownListSelector)
+                .selectDropdownByIndex(index);
+    }
+
+    /**
+     * Returns text value of first selected dropdown's option.
+     *
+     * @return String object representing value of dropdown's option
+     */
+    public String getSelectedDropdownValue() {
+        return getDriver().elementDropdownList(dropdownListSelector)
+                .getFirstSelectedOptionText();
+    }
+}
+
+
+
+
+
+

== DropdownListElement class

+
+
+

DropdownListElement is MrChecker’s class, which contains methods for performing the dropdown list of actions:

+
+
+
+
elementDropdownList() - returns DropdownListElement Object
+
+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DropdownTest extends TheInternetBaseTest {
+
+    private static final String expectedFirstOptionValue  = "Option 1";
+    private static final String expectedSecondOptionValue = "Option 2";
+    private static DropdownPage dropdownPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dropdownPage = shouldTheInternetPageBeOpened().clickDropdownLink();
+
+        logStep("Verify if Dropdown page is opened");
+        assertTrue("Unable to open Dropdown page", dropdownPage.isLoaded());
+    }
+
+    @Test
+    public void shouldGetExpectedDropdownTextOptionAfterSelection() {
+
+        logStep("Select first drodown option");
+        dropdownPage.selectDropdownValueByIndex(1);
+
+        logStep("Verify if selected option text is equal to the expected one");
+        assertEquals("Selected value is different than expected", expectedFirstOptionValue,
+                dropdownPage.getSelectedDropdownValue());
+
+        logStep("Select first drodown option");
+        dropdownPage.selectDropdownValueByIndex(2);
+
+        logStep("Verify if selected option text is equal to the expected one");
+        assertEquals("Selected value is different than expected", expectedSecondOptionValue,
+                dropdownPage.getSelectedDropdownValue());
+    }
+
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-09-Dynamic-Content.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-09-Dynamic-Content.html new file mode 100644 index 00000000..90db6e8d --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-09-Dynamic-Content.html @@ -0,0 +1,448 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This case shows how to compare dynamic content.

+
+
+
+example18 +
+
+
+

Note that after site refresh, some of the content is different. You can see in the browser’s developer mode how the text and image sources are being changed.

+
+
+
+example19 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Dynamic Content link and load subpage

    +
  4. +
  5. +

    Save page images sources and descriptions before the refresh

    +
  6. +
  7. +

    Refresh page

    +
  8. +
  9. +

    Save page images sources and it’s descriptions after refresh

    +
  10. +
  11. +

    Compare page content before and after refresh and verify if it’s different

    +
  12. +
+
+
+

== Page Class

+
+
+
+
public class DynamicContentPage extends BasePage {
+
+    private static final By imagesLinksSelector        = By.cssSelector("div#content > div.row img");
+    private static final By imagesDescriptionsSelector = By.cssSelector("div#content > div.row div.large-10");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DYNAMIC_CONTENT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dynamic Content' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DYNAMIC_CONTENT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns list of picture descriptions being present on the web page.
+     *
+     * @return List of String objects representing descriptions
+     */
+    public List<String> getDescriptions() {
+        return new ListElements(imagesDescriptionsSelector).getTextList();
+    }
+
+    /**
+     * Returns a list of image links being present on the web page.
+     *
+     * @return List of String objects representing paths to pictures
+     */
+    public List<String> getImageLinks() {
+        return new ListElements(imagesLinksSelector)
+                .getList()
+                .stream()
+                .map(element -> element.getAttribute("src"))
+                .collect(Collectors.toList());
+    }
+}
+
+
+
+
+
+

== ListElements

+
+
+

ListElements is MrChecker collection which can store WebElement Objects. Constructing ListElements with cssSelector allows you to store every element on the page which fits the selector. Example methods:

+
+
+
+
getList() -  returns WebElements list,
+getTextList() - returns list of contents of each Element,
+getSize() - returns number of stored Elements
+In getImageLinks() example it's shown how to get a list of specified Elements' attributes.
+
+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DynamicContentTest extends TheInternetBaseTest {
+
+    private static DynamicContentPage dynamicContentPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dynamicContentPage = shouldTheInternetPageBeOpened().clickDynamicContentLink();
+
+        logStep("Verify if Dynamic Content page is opened");
+        assertTrue("Unable to open Dynamic Content page", dynamicContentPage.isLoaded());
+    }
+
+    @Test
+    public void shouldImagesAndDescriptionsDifferAfterRefresh() {
+
+        logStep("Read images and descriptions before refresh");
+        List<String> descriptionsBeforeRefresh = dynamicContentPage.getDescriptions();
+        List<String> imagesBeforeRefresh = dynamicContentPage.getImageLinks();
+
+        logStep("Refres page");
+        dynamicContentPage.refreshPage();
+        assertTrue("The Dynamic Content page hasn't been refreshed", dynamicContentPage.isLoaded());
+
+        logStep("Read images and descriptions after refresh");
+        List<String> descriptionsAfterRefresh = dynamicContentPage.getDescriptions();
+        List<String> imagesAfterRefresh = dynamicContentPage.getImageLinks();
+
+        logStep("Verify if descriptions are different after refresh");
+        assertEquals("Different number of descriptions before and after refresh",
+                descriptionsAfterRefresh.size(), descriptionsBeforeRefresh.size());
+
+        boolean diversity = false;
+        for (int i = 0; i < descriptionsAfterRefresh.size(); i++) {
+            if (!descriptionsAfterRefresh.get(i)
+                    .equals(descriptionsBeforeRefresh.get(i))) {
+                diversity = true;
+                break;
+            }
+        }
+        assertTrue("There are no differences between descriptions before and after refresh",
+                diversity);
+
+        logStep("Verify if images are different after refresh");
+        assertEquals("Different number of descriptions before and after refresh",
+                imagesAfterRefresh.size(), imagesBeforeRefresh.size());
+
+        diversity = false;
+        for (int i = 0; i < imagesAfterRefresh.size(); i++) {
+            if (!imagesAfterRefresh.get(i)
+                    .equals(imagesBeforeRefresh.get(i))) {
+                diversity = true;
+                break;
+            }
+        }
+        assertTrue("There are no differences between images before and after refresh", diversity);
+    }
+}
+
+
+
+

In the test method, during differences verification, the goal is to compare every element from the first and second list and find first diversity.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-10-Dynamically-loaded-elements.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-10-Dynamically-loaded-elements.html new file mode 100644 index 00000000..c981011f --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-10-Dynamically-loaded-elements.html @@ -0,0 +1,683 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

This example shows how to test a page with dynamically loading content. Some elements don’t load during page loading, but during JavaScript execution.

+
+
+
+example23 +
+
+
+

Go to Example 1:

+
+
+
+example24 +
+
+
+

Click "start" and see what happens:

+
+
+
+example25 +
+
+
+

When loading ends, you should see the following message:

+
+
+
+example26 +
+
+
+

In the developer mode, you can see that the element with the "Hello World!" message exists in page DOM but it’s not displayed. However, the loading bar does not exist there - it’s created by JavaScript. The script is also visible in developer mode:

+
+
+
+example27 +
+
+
+

After clicking the "Start" button, the element "Loading" is created by the script, and the "Start" button becomes invisible. When loading ends, "Hello World" message is displayed and the loading bar is hidden. Follow the changes the in developer mode:

+
+
+
+example28 +
+
+
+

Go to example 2: +From a user perspective, there is no difference in page functioning. However, in this case the element with the "Hello World!" message does not exist on the page before clicking "Start". It’s created by the script.

+
+
+
+example29 +
+
+
+

After clicking "Start", the element with the loading bar is been created.

+
+
+
+example30 +
+
+
+

After a certain time, the loading bar becomes invisible, and then the script creates "Hello World!" element and displays it.

+
+
+
+example31 +
+
+
+

== Page Class

+
+
+
+
public class DynamicLoadingPage extends BasePage {
+
+    private static final By selectorExampleOneLink     =
+            By.cssSelector("a[href*='dynamic_loading/1']");
+    private static final By selectorExampleTwoLink     =
+            By.cssSelector("a[href*='dynamic_loading/2']");
+    private static final By selectorDynamicLoadingText = By.cssSelector("div#content h3");
+    private static final By selectorStartButton        = By.cssSelector("div#start button");
+    private static final By selectorLoadingBar         = By.cssSelector("div#loading");
+    private static final By selectorExampleText        = By.cssSelector("div#finish h4");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DYNAMIC_LOADING.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dynamically Loaded Page Elements' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DYNAMIC_LOADING.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if dynamic loading message is visible or not.
+     *
+     * @return true if dynamic loading message was found on web page.
+     */
+    public boolean isDynamicLoadingMessageVisible() {
+        return getDriver().findElementDynamic(selectorDynamicLoadingText)
+                .isDisplayed();
+    }
+
+    /**
+     * Clicks Example 1 link.
+     */
+    public void clickExampleOneLink() {
+        getDriver().findElementDynamic(selectorExampleOneLink)
+                .click();
+    }
+
+    /**
+     * Clicks Example 2 link.
+     */
+    public void clickExampleTwoLink() {
+        getDriver().findElementDynamic(selectorExampleTwoLink)
+                .click();
+    }
+
+    /**
+     * Returns information if Start button is visible or not.
+     *
+     * @return true if Start button was found on web page.
+     */
+    public boolean isStartButtonVisible() {
+        return getDriver().findElementDynamic(selectorStartButton)
+                .isDisplayed();
+    }
+
+    /**
+     * Clicks Start button.
+     */
+    public void clickStartButton() {
+        getDriver().findElementDynamic(selectorStartButton)
+                .click();
+    }
+
+    /**
+     * Waits until WebElement representing waiting bar disappears and returns example text.
+     *
+     * @param waitTime The amount of time designated for waiting until waiting bar disappears.
+     * @return String representing example's text.
+     */
+    public String getExampleOneDynamicText(int waitTime) {
+        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((Function<? super WebDriver, Boolean>)
+                ExpectedConditions.invisibilityOfElementLocated(selectorLoadingBar));
+        return getDriver().findElementDynamic(selectorExampleText)
+                .getText();
+    }
+
+    /**
+     * Returns example text.
+     * <p>
+     * Waits until WebElement representing waiting bar disappear. Then waits until example text
+     * shows up.
+     * And after that returns example text.
+     * </p>
+     *
+     * @param waitTime The amount of time designated for waiting until waiting bar disappears and
+     * example text shows.
+     * @return String representing example's text.
+     */
+    public String getExampleTwoDynamicText(int waitTime) {
+        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((Function<? super WebDriver, Boolean>)
+                ExpectedConditions.invisibilityOfElementLocated(selectorLoadingBar));
+        wait.until((Function<? super WebDriver, WebElement>)
+                ExpectedConditions.visibilityOfElementLocated(selectorExampleText));
+        return getDriver().findElementDynamic(selectorExampleText)
+                .getText();
+    }
+
+}
+
+
+
+
+
+

== WebDriverWait

+
+
+

This class performs waiting for actions using Selenium Web Driver:

+
+
+
    +
  • +

    WebDriverWait(WebDriver driver, long timeOutInSeconds) - constructor, first parameter takes WebDriver, in a second you can specify a timeout in seconds. +FluentWait method:

    +
  • +
  • +

    until(Function<? super T, V> isTrue) - waits until condition function given as parameter returns expected value. If waiting time reaches timeout, it throws timeoutException.

    +
  • +
+
+
+

MrChecker implements various condition functions in the ExpectedConditions class :

+
+
+
    +
  • +

    visibilityOfElementLocated(By selector) - returns WebElement if it’s visible

    +
  • +
  • +

    invisibilityOfElementLocated(By selector) - returns true if Element under given selector is invisible

    +
  • +
+
+
+

WebDriver also has methods which wait for some conditions:

+
+
+
    +
  • +

    waitForElement(By selector)

    +
  • +
  • +

    waitForElementVisible(By selector)

    +
  • +
  • +

    waitUntilElementClickable(By selector)

    +
  • +
+
+
+

It’s possible to write your own condition function e.g.:

+
+
+
+
  public static ExpectedCondition<Boolean> invisibilityOfElementLocated(final By locator) {
+    return new ExpectedCondition<Boolean>() {
+      @Override
+      public Boolean apply(WebDriver driver) {
+        try {
+          return !(findElement(locator, driver).isDisplayed());
+        } catch (NoSuchElementException e) {
+          return true;
+        } catch (StaleElementReferenceException e) {
+          return true;
+        }
+      }
+    };
+  }
+
+
+
+

Or as a lambda expression:

+
+
+
+
        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((WebDriver driver) -> {
+            try {
+                return !(driver.findElement(selectorExampleText)
+                        .isDisplayed());
+            } catch (NoSuchElementException e) {
+                return true;
+            } catch (StaleElementReferenceException e) {
+                return true;
+            }
+        });
+
+
+
+
+
+

== Test Class

+
+
+

Case 1 steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Dynamic Loading link and go to a subpage with examples

    +
  4. +
  5. +

    Check if the page is loaded and "Dynamically Loaded Page Elements" header is visible

    +
  6. +
  7. +

    Click Example 1 link and load site

    +
  8. +
  9. +

    Verify if the "Start" button is visible

    +
  10. +
  11. +

    Click "Start"

    +
  12. +
  13. +

    Wait for the loading bar to disappear and check if the displayed message is as it should be

    +
  14. +
  15. +

    Go back to Dynamic Loading page

    +
  16. +
+
+
+

Case 2 steps:

+
+
+
    +
  1. +

    Check if the page is loaded and "Dynamically Loaded Page Elements" header is visible

    +
  2. +
  3. +

    Click Example 2 link and load site

    +
  4. +
  5. +

    Verify if the "Start" button is visible

    +
  6. +
  7. +

    Click "Start"

    +
  8. +
  9. +

    Wait for the loading bar to disappear

    +
  10. +
  11. +

    Wait for the message to appear and check if it is as it should be

    +
  12. +
  13. +

    Go back to Dynamic Loading page

    +
  14. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DynamicLoadingTest extends TheInternetBaseTest {
+
+    private static final int    EXAMPLE_WAITING_TIME = 30;
+    private static final String EXAMPLE_TEXT         = "Hello World!";
+
+    private static DynamicLoadingPage dynamicLoadingPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dynamicLoadingPage = shouldTheInternetPageBeOpened().clickDynamicLoadingLink();
+    }
+
+    @Override
+    public void setUp() {
+
+        logStep("Verify if Dynamic Loading page is opened");
+        assertTrue("Unable to open Dynamic Loading page", dynamicLoadingPage.isLoaded());
+
+        logStep("Verify if dynamic loading message is visible");
+        assertTrue("Dynamic loading message is invisible",
+                dynamicLoadingPage.isDynamicLoadingMessageVisible());
+    }
+
+    @Test
+    public void shouldExampleTextBeDisplayedAterRunExampleOne() {
+        logStep("Click Example 1 link");
+        dynamicLoadingPage.clickExampleOneLink();
+
+        logStep("Verify if Example 1 link opened content");
+        assertTrue("Fail to load Example 1 content", dynamicLoadingPage.isStartButtonVisible());
+
+        logStep("Click Start button");
+        dynamicLoadingPage.clickStartButton();
+
+        logStep("Verify if expected text is displayed on the screen");
+        assertEquals("Fail to display example text", EXAMPLE_TEXT,
+                dynamicLoadingPage.getExampleOneDynamicText(EXAMPLE_WAITING_TIME));
+    }
+
+    @Test
+    public void shouldExampleTextBeDisplayedAterRunExampleTwo() {
+        logStep("Click Example 2 link");
+        dynamicLoadingPage.clickExampleTwoLink();
+
+        logStep("Verify if Example 2 link opened content");
+        assertTrue("Fail to load Example 2 content", dynamicLoadingPage.isStartButtonVisible());
+
+        logStep("Click Start button");
+        dynamicLoadingPage.clickStartButton();
+
+        logStep("Verify if expected text is displayed on the screen");
+        assertEquals("Fail to display example text", EXAMPLE_TEXT,
+                dynamicLoadingPage.getExampleTwoDynamicText(EXAMPLE_WAITING_TIME));
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Click back to reset Dynamic Loading page");
+        BasePage.navigateBack();
+    }
+
+}
+
+
+
+

Perform both cases running Test Class as JUnit Test.

+
+
+

WARNING: In this example, there is a visible loading bar signalizing that content is loading.On many websites elements are created by scripts without clear communique. This may cause problems with test stability. When your tests aren’t finding page elements, try to add wait functions with a short timeout.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-11-Exit-Intent.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-11-Exit-Intent.html new file mode 100644 index 00000000..8e8cf98e --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-11-Exit-Intent.html @@ -0,0 +1,558 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example32 +
+
+
+

This case shows how to perform mouse actions and test modal windows.

+
+
+

After you move the mouse cursor out of the website, you should see a new window appearing:

+
+
+
+example33 +
+
+
+

Check in the browser’s developer mode if this window exists in Page DOM

+
+
+
+example34 +
+
+
+

Before you move the mouse out, the window exists, but it’s not displayed.

+
+
+

When the mouse is moved, JavaScript changes display attribute. It also hides window after clicking "Close".

+
+
+
+example35 +
+
+
+

== Page Class

+
+
+
+
public class ExitIntentPage extends BasePage {
+
+    private static final String MODAL_WINDOW_HIDDEN           = "display: none;";
+    private static final String MODAL_WINDOW_DISPLAYED        = "display: block;";
+    private static final String MODAL_WINDOW_STYLE_ATTRIBUTTE = "style";
+
+    private static final By selectorModalWindow            = By.cssSelector("div#ouibounce-modal");
+    private static final By selectorExitIntentText         = By.cssSelector("div#content h3");
+    private static final By selectorModalWindowTitle       = By.cssSelector("h3");
+    private static final By selectorModalWindowCloseButton = By.cssSelector("div.modal-footer > p");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.EXIT_INTENT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Exit Intent' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.EXIT_INTENT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if exit intent message is visible or not.
+     *
+     * @return true if exit intent message was found on web page.
+     */
+    public boolean isIntentMessageVisible() {
+        return getDriver().findElementDynamic(selectorExitIntentText)
+                .isDisplayed();
+    }
+
+    /**
+     * Returns information if modal window is hidden.
+     *
+     * @return true if modal window is hidden.
+     */
+    public boolean isModalWindowHidden() {
+        return getDriver().findElementDynamic(selectorModalWindow)
+                .getAttribute(MODAL_WINDOW_STYLE_ATTRIBUTTE)
+                .equals(MODAL_WINDOW_HIDDEN);
+    }
+
+    /**
+     * Returns information if modal window is showed on web page.
+     *
+     * @return true if modal window is displayed.
+     */
+    public boolean isModalWindowVisible() {
+        return getDriver().findElementDynamic(selectorModalWindow)
+                .getAttribute(MODAL_WINDOW_STYLE_ATTRIBUTTE)
+                .equals(MODAL_WINDOW_DISPLAYED);
+    }
+
+    /**
+     * Returns information if modal window title is shown and correct.
+     *
+     * @param expectedValue String representing expected value of modal window's title.
+     * @return true if modal window's title is equal to expected value.
+     */
+    public boolean verifyModalWindowTitle(String expectedValue) {
+        return getDriver().elementLabel(new ByChained(selectorModalWindow,
+                selectorModalWindowTitle))
+                .getText()
+                .equals(expectedValue);
+    }
+
+    /**
+     * Closes modal window by pressing 'close' button.
+     */
+    public void closeModalWindow() {
+        getDriver().elementButton(new ByChained(selectorModalWindow,
+                selectorModalWindowCloseButton))
+                .click();
+    }
+
+    /**
+     * Moves mouse pointer to the top middle of screen, then to the centre of screen and
+     * again to the top.
+     * <p>
+     * This move simulates leaving the viewport and encourages the modal to show up. There is
+     * java.awt.Robot used
+     * to move mouse pointer out of the viewport. There are timeouts used to let the browser detect
+     * mouse move.
+     * </p>
+     *
+     * @see java.awt.Robot
+     */
+    public void moveMouseOutOfViewport() {
+        Robot robot;
+        Dimension screenSize = getDriver().manage()
+                .window()
+                .getSize();
+        int halfWidth = new BigDecimal(screenSize.getWidth() / 2).intValue();
+        int halfHeight = new BigDecimal(screenSize.getHeight() / 2).intValue();
+
+        try {
+            robot = new Robot();
+            robot.mouseMove(halfWidth, 1);
+            getDriver().manage()
+                    .timeouts()
+                    .implicitlyWait(1, TimeUnit.SECONDS);
+            robot.mouseMove(halfWidth, halfHeight);
+            getDriver().manage()
+                    .timeouts()
+                    .implicitlyWait(1, TimeUnit.SECONDS);
+            robot.mouseMove(halfWidth, 1);
+        } catch (AWTException e) {
+            BFLogger.logError("Unable to connect with remote mouse");
+            e.printStackTrace();
+        }
+    }
+}
+
+
+
+
+
+

== Attributes

+
+
+

Elements on pages have attributes like "id", "class", "name", "style" etc. In order to check them, use method getAttribute(String name). In this case attribute "style" determinates if the element is displayed.

+
+
+
+
+

== Robot

+
+
+

Robot class can perform mouse movement. Method mouseMove(int x, int y) moves the remote mouse to given coordinates.

+
+
+
+
+

== Manage Timeouts

+
+
+

manage().timeouts() methods allows you to change WebDriver timeouts values such as:

+
+
+
    +
  • +

    pageLoadTimeout(long time, TimeUnit unit) - the amount of time to wait for a page to load before throwing an exception

    +
  • +
  • +

    setScriptTimeout(long time, TimeUnit unit) - the amount of time to wait for finish execution of a script before throwing an exception

    +
  • +
  • +

    implicitlyWait(long time, TimeUnit unit) - the amount of time the driver should wait when searching for an element if it is not immediately present. After that time, it throws an exception.

    +
  • +
+
+
+

Changing timeouts can improve test stability but can also make them run slower.

+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Exit Intent link and load subpage

    +
  4. +
  5. +

    Check if the page is loaded and "Exit Intent" message is visible

    +
  6. +
  7. +

    Verify if Modal Window is hidden

    +
  8. +
  9. +

    Move mouse out of the viewport

    +
  10. +
  11. +

    Check if Modal Window is visible

    +
  12. +
  13. +

    Verify if Modal Window title is correct

    +
  14. +
  15. +

    Click 'close' button

    +
  16. +
  17. +

    Again verify if Modal Window is hidden

    +
  18. +
+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class ExitIntentTest extends TheInternetBaseTest {
+
+    private static final String MODAL_WINDOW_TITLE = "This is a modal window";
+
+    private static ExitIntentPage exitIntentPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        exitIntentPage = shouldTheInternetPageBeOpened().clickExitIntentLink();
+
+        logStep("Verify if Exit Intent page is opened");
+        assertTrue("Unable to open Exit Intent page", exitIntentPage.isLoaded());
+
+        logStep("Verify if exit intent message is visible");
+        assertTrue("Exit intent message is not visible", exitIntentPage.isIntentMessageVisible());
+    }
+
+    @Test
+    public void shouldModalWindowAppearWhenMouseMovedOutOfViewportTest() {
+
+        logStep("Verify if modal window is hidden");
+        assertTrue("Fail to hide modal window", exitIntentPage.isModalWindowHidden());
+
+        logStep("Move mouse pointer out of viewport");
+        exitIntentPage.moveMouseOutOfViewport();
+
+        logStep("Verify if modal window showed up");
+        assertTrue("Fail to show up modal window", exitIntentPage.isModalWindowVisible());
+
+        logStep("Verify if modal window title displays properly");
+        assertTrue("Fail to display modal window's title",
+                exitIntentPage.verifyModalWindowTitle(MODAL_WINDOW_TITLE.toUpperCase()));
+
+        logStep("Close modal window");
+        exitIntentPage.closeModalWindow();
+
+        logStep("Verify if modal window is hidden again");
+        assertTrue("Fail to hide modal window", exitIntentPage.isModalWindowHidden());
+    }
+}
+
+
+
+

Remember not to move mouse manually during test execution.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-12-File-download-test.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-12-File-download-test.html new file mode 100644 index 00000000..3e3ce919 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-12-File-download-test.html @@ -0,0 +1,417 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example36 +
+
+
+

This example shows how to check if file downloads properly.

+
+
+

After clicking on one of these links, a specific file should be downloaded to your computer.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click on the File Download link and open subpage

    +
  4. +
  5. +

    Click on "some-file.txt" download link and download file

    +
  6. +
  7. +

    Check if the file exists in the appropriate folder

    +
  8. +
  9. +

    Delete the file

    +
  10. +
  11. +

    Check if the file doesn’t exist in the folder

    +
  12. +
+
+
+

== Page Class

+
+
+
+
public class FileDownloadPage extends BasePage {
+
+    private static final By selectorSomeFileTxt = By.cssSelector("a[href*=some-file]");
+
+    private final String DOWNLOAD_DIR = System.getProperty("java.io.tmpdir");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DOWNLOAD.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'File Downloader' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DOWNLOAD.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if the chosen file is already downloaded and if not, downloads it .
+     * Throws RuntimeException otherwise.
+     *
+     * @return Downloaded file
+     */
+    public File downloadTextFile() {
+        String nameOfDownloadFile = getNameOfDownloadFile();
+        File fileToDownload = new File(DOWNLOAD_DIR + nameOfDownloadFile);
+
+        if (fileToDownload.exists()) {
+            throw new RuntimeException("The file that you want to download already exists. "
+                    + "Please remove it manually. Path to the file: " + fileToDownload.getPath());
+        }
+
+        getDriver().elementButton(selectorSomeFileTxt)
+                .click();
+
+        waitForFileDownload(2000, fileToDownload);
+        return fileToDownload;
+    }
+
+    private void waitForFileDownload(int totalTimeoutInMillis, File expectedFile) {
+        FluentWait<WebDriver> wait = new FluentWait<WebDriver>(getDriver())
+                .withTimeout(totalTimeoutInMillis, TimeUnit.MILLISECONDS)
+                .pollingEvery(200, TimeUnit.MILLISECONDS);
+
+        wait.until((WebDriver wd) -> expectedFile.exists());
+    }
+
+    private String getNameOfDownloadFile() {
+        String urlToDownload = getDriver().findElementDynamic(selectorSomeFileTxt)
+                .getAttribute("href");
+        String[] urlHierachy = urlToDownload.split("/");
+        return urlHierachy[urlHierachy.length - 1];
+    }
+}
+
+
+
+

Use FluentWait class and create an expected condition using a lambda expression to wait until the file downloads.

+
+
+

To perform operations on files, use java File class. To get a file name, find it in download URL.

+
+
+
+
+

== Test Class

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class FileDownloadTest extends TheInternetBaseTest {
+
+    private static FileDownloadPage fileDownloadPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        fileDownloadPage = shouldTheInternetPageBeOpened().clickFileDownloadLink();
+
+        logStep("Verify if File Download page is opened");
+        assertTrue("Unable to open File Download page", fileDownloadPage.isLoaded());
+    }
+
+    @Test
+    public void shouldfileBeDownloaded() {
+
+        logStep("Download the some-file.txt");
+        File downloadedFile = fileDownloadPage.downloadTextFile();
+
+        logStep("Verify if downloaded file exists");
+        assertTrue("Downloaded file does not exist", downloadedFile.exists());
+
+        logStep("Remove downloaded file");
+        downloadedFile.delete();
+
+        logStep("Verify if downloaded file has been removed");
+        assertFalse("Downloaded file still exists", downloadedFile.exists());
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-13-Form-Authentication-Test.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-13-Form-Authentication-Test.html new file mode 100644 index 00000000..11d87b42 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-13-Form-Authentication-Test.html @@ -0,0 +1,559 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example37 +
+
+
+

This case shows how to pass through the standard authentication page.

+
+
+

When you enter the correct credentials, you should see the next page:

+
+
+
+example38 +
+
+
+

If user data is wrong, an appropriate message appears:

+
+
+
+example39 +
+
+
+

== Page Class

+
+
+
+
public class FormAuthenticationPage extends BasePage {
+
+    private final static By selectorInputUsername     = By.cssSelector("#username");
+    private final static By selectorInputUserPassword = By.cssSelector("#password");
+    private final static By selectorLoginMessage      = By.cssSelector("#flash");
+    private final static By selectorLoginButton       = By.cssSelector("#login > button > i");
+    private final static By selectorLogoutButton      = By.cssSelector("#content > div > a ");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.LOGIN.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Login Page' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() + PageSubURLsProjectYEnum.LOGIN.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Sets user name to designated form's field.
+     *
+     * @param username String representing a user's name
+     * @return FormAuthenticationPage object with user name set to the given one
+     */
+    public FormAuthenticationPage setUsername(String username) {
+        InputTextElement elementInputUsername = new InputTextElement(selectorInputUsername);
+        elementInputUsername.clearInputText();
+        elementInputUsername.setInputText(username);
+        return this;
+    }
+
+    /**
+     * Sets user password to designated form's field.
+     *
+     * @param userPassword String representing a user's password
+     * @return FormAuthenticationPage object with user's password set to the given one
+     */
+    public FormAuthenticationPage setUserPassword(String userPassword) {
+        InputTextElement elementInputPassword = new InputTextElement(selectorInputUserPassword);
+        elementInputPassword.clearInputText();
+        elementInputPassword.setInputText(userPassword);
+        return this;
+    }
+
+    /**
+     * Returns login message.
+     *
+     * @return String object representing the message returned after login operation is performed
+     */
+    public String getLoginMessageText() {
+        return new LabelElement(selectorLoginMessage).getText();
+    }
+
+    /**
+     * Clicks 'Login' button.
+     */
+    public void clickLoginButton() {
+        new Button(selectorLoginButton).click();
+    }
+
+    /**
+     * Clicks 'Logout' button.
+     */
+    public void clickLogoutButton() {
+        new Button(selectorLogoutButton).click();
+    }
+}
+
+
+
+
+
+

== == InputTextElement

+
+
+

Use methods from this class to perform actions on text fields:

+
+
+
    +
  • +

    clearInputText() - remove all text from selected input field

    +
  • +
  • +

    setInputText(String text) - enter given text

    +
  • +
+
+
+
+
+

== == LabelElement

+
+
+
    +
  • +

    String getText() method returns visible text from label

    +
  • +
+
+
+
+
+

== TestClass

+
+
+

Prepare six test cases:

+
+
+
    +
  1. +

    Try to login with empty user data and check if the error message appears

    +
  2. +
  3. +

    Try to login with empty username and valid password and check if the error message appears

    +
  4. +
  5. +

    Try to login with a valid username and empty password and check if the error message appears

    +
  6. +
  7. +

    Try to login with invalid username and invalid password and check if the error message appears

    +
  8. +
  9. +

    Try to login with a valid username and valid password and check if success login message appears, then log out

    +
  10. +
  11. +

    Try to login with a valid username and valid password and check if success login message appears, then log out and check if success logout message is displayed

    +
  12. +
+
+
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case: Click on the Form Authentication link and open login page

+
+
+

After each case: Go back to The Internet Main Page

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class FormAuthenticationTest extends TheInternetBaseTest {
+
+    private static FormAuthenticationPage formAuthenticationPage;
+
+    private String errorUsernameMessage = "Your username is invalid!\n" + "×";
+    private String errorPasswordMessage = "Your password is invalid!\n" + "×";
+    private String loginMessage         = "You logged into a secure area!\n" + "×";
+    private String logoutMessage        = "You logged out of the secure area!\n" + "×";
+    private String emptyUsername        = "";
+    private String emptyUserPassword    = "";
+    private String validUsername        = "tomsmith";
+    private String validPassword        = "SuperSecretPassword!";
+    private String randomUsername       = UUID.randomUUID()
+            .toString();
+    private String randomUserPassword   = UUID.randomUUID()
+            .toString();
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click subpage link");
+        formAuthenticationPage = theInternetPage.clickFormAuthenticationLink();
+
+        logStep("Verify if subpage is opened");
+        assertTrue("The Internet subpage: FormAuthenticationPage was not open", formAuthenticationPage.isLoaded());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithEmptyData() {
+        logStep("Log user with empty username and password");
+        formAuthenticationPage.setUsername(emptyUsername)
+                .setUserPassword(emptyUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty data", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithEmptyUsernameAndValidPassword() {
+        logStep("Log user with empty username and valid password");
+        formAuthenticationPage.setUsername(emptyUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty username", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithValidUsernameAndEmptyPassword() {
+        logStep("Log user with valid username and empty password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(emptyUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty password", errorPasswordMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithInvalidUsernameAndInvalidPassword() {
+        logStep("Log user with invalid username and invalid password");
+        formAuthenticationPage.setUsername(randomUsername)
+                .setUserPassword(randomUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with random credentials", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldUserLogInWithValidCredentials() {
+        logStep("Log user with valid username and valid password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unable to login user with valid credentials", loginMessage,
+                formAuthenticationPage.getLoginMessageText());
+        logStep("Log out user");
+        formAuthenticationPage.clickLogoutButton();
+    }
+
+    @Test
+    public void shouldUserLogOutAfterProperLogInAndClickLogoutButon() {
+        logStep("Log user with valid username and valid password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unable to login user with valid credentials", loginMessage,
+                formAuthenticationPage.getLoginMessageText());
+        logStep("Log out user");
+        formAuthenticationPage.clickLogoutButton();
+        assertEquals("User cannot log out after prper log in", logoutMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        theInternetPage.load();
+    }
+}
+
+
+
+

After running Test Class, cases might be performed in a different order.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-14-Hovers-Test.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-14-Hovers-Test.html new file mode 100644 index 00000000..6903259e --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-14-Hovers-Test.html @@ -0,0 +1,410 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example40 +
+
+
+

This example shows how to approach elements dynamically appearing after the user’s action.

+
+
+

Move the mouse over an image to see the additional label.

+
+
+
+example41 +
+
+
+

Labels exist in page DOM all the time but their display attributes change. In this case, there is no JavaScript. Elements' visibility is managed by CSS.

+
+
+
+example42 +
+
+
+

== Page Class

+
+
+
+
public class HoversPage extends BasePage {
+
+    private final static By selectorImages = By.cssSelector("div.figure > img");
+    private final static By selectorNames  = By.cssSelector("div.figcaption h5");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.HOVERS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Hovers' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.HOVERS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Moves mouse pointer over an image with given index.
+     *
+     * @param index An index of the picture, where mouse pointer should be moved
+     */
+    public void hoverOverAvatar(int index) {
+        Actions action = new Actions(getDriver());
+        WebElement avatarImage = getDriver().findElementDynamics(selectorImages)
+                .get(index);
+        action.moveToElement(avatarImage)
+                .perform();
+    }
+
+    /**
+     * Returns the information displayed under a picture with given index.
+     *
+     * @param index An index of the picture, where the information should be read
+     * @return String object representing picture's information
+     */
+    public String getAvatarsInformation(int index) {
+        return getDriver().findElementDynamics(selectorNames)
+                .get(index)
+                .getText();
+    }
+}
+
+
+
+
+
+

== == Actions

+
+
+

Actions class contains methods used to execute basic user actions such as mouse moving and clicking or keys sending. Action or actions series will be performed after calling perform() method.

+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Hovers page

    +
  4. +
  5. +

    Move mouse over random image

    +
  6. +
  7. +

    Check if displayed text is equal to expected.

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class HoversTest extends TheInternetBaseTest {
+
+    private static HoversPage    hoversPage;
+    private final String        names[]    = { "name: user1", "name: user2", "name: user3" };
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        hoversPage = shouldTheInternetPageBeOpened().clickHoversLink();
+
+        logStep("Verify if Hovers page is opened");
+        assertTrue("Unable to open Hovers page", hoversPage.isLoaded());
+    }
+
+    @Test
+    public void shouldProperInformationBeDisplayedWhenMousePointerHoveredOverRandomElement() {
+        logStep("Hover mouse pointer over random element");
+        int randomIndex = new Random().nextInt(names.length);
+        hoversPage.hoverOverAvatar(randomIndex);
+        assertEquals("Picture's information is different than expected", names[randomIndex],
+                hoversPage.getAvatarsInformation(randomIndex));
+    }
+}
+
+
+
+

Because in this case the tested content is being chosen randomly, each test run could check a different element.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-15-JavaScript-Alerts.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-15-JavaScript-Alerts.html new file mode 100644 index 00000000..92a9d3b7 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-15-JavaScript-Alerts.html @@ -0,0 +1,539 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example43 +
+
+
+

This case shows how to test pop-up JS alerts.

+
+
+

After clicking one of the buttons, an adequate alert should appear.

+
+
+
+example44 +
+
+
+

Performed action will be displayed under "Result" label.

+
+
+

In developer mode, you can view JavaScript which creates alerts.

+
+
+
+example45 +
+
+
+

== Page Class

+
+
+
+
public class JavaScriptAlertsPage extends BasePage {
+
+    private static final By selectorAlertButton   = By.cssSelector("button[onclick*=jsAlert]");
+    private static final By selectorConfirmButton = By.cssSelector("button[onclick*=jsConfirm]");
+    private static final By selectorPromptButton  = By.cssSelector("button[onclick*=jsPrompt]");
+    private static final By resultLabelSelector   = By.cssSelector("p#result");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.JAVASCRIPT_ALERTS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'JavaScript Alerts' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.JAVASCRIPT_ALERTS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'JS alert' button.
+     */
+    public void clickAlertButton() {
+        new Button(selectorAlertButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Clicks 'JS confirm' button.
+     */
+    public void clickConfirmButton() {
+        new Button(selectorConfirmButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Clicks 'JS prompt' button.
+     */
+    public void clickPromptButton() {
+        new Button(selectorPromptButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Returns message displayed by popup.
+     *
+     * @return String object representing message displayed by popup
+     */
+    public String readResultLabel() {
+        return new LabelElement(resultLabelSelector).getText();
+    }
+
+    /**
+     * Clicks alert's 'OK' button.
+     */
+    public void clickAlertAccept() {
+        getDriver().switchTo()
+                .alert()
+                .accept();
+    }
+
+    /**
+     * Clicks alert's 'Cancel' button.
+     */
+    public void clickAlertDismiss() {
+        getDriver().switchTo()
+                .alert()
+                .dismiss();
+    }
+
+    /**
+     * Types text into alert's text field.
+     *
+     * @param text String object sent into alert's text field
+     */
+    public void writeTextInAlert(String text) {
+        getDriver().switchTo()
+                .alert()
+                .sendKeys(text);
+    }
+}
+
+
+
+
+
+

== == alert()

+
+
+

Using switchTo() method you can change processed content. switchTo().alert() allows performing actions on appearing alerts such as accepting, dismissing or entering keys.

+
+
+
+
+

== Test Class

+
+
+

Before all tests: Open The Internet Main Page and go to JavaScript Alert page

+
+
+
    +
  1. +

    Click JS Alert button, accept alert and check if Result message returns performed an action

    +
  2. +
  3. +

    Click JS Confirm button, accept alert and check if Result message returns performed action

    +
  4. +
  5. +

    Click JS Confirm button, dismiss alert and check if Result message returns performed action

    +
  6. +
  7. +

    Click JS Prompt button, write random text, accept alert and check if Result message returns performed action with written text

    +
  8. +
  9. +

    Click JS Prompt button, dismiss the alert and check if Result message returns performed action

    +
  10. +
+
+
+

After each case: Refresh Page

+
+
+

After all tests: Navigate back to The Internet Main Page

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class JavaScriptAlertsTest extends TheInternetBaseTest {
+
+    private static JavaScriptAlertsPage javaScriptAlertsPage;
+
+    private final String jsAlertCofirmMessage    = "You successfuly clicked an alert";
+    private final String jsConfirmConfirmMessage = "You clicked: Ok";
+    private final String jsConfirmCancelMessage  = "You clicked: Cancel";
+    private final String jsPromptConfirmMessage  = "You entered: ";
+    private final String jsPromptCancelMessage   = "You entered: null";
+    private final String randomString            = "random";
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        javaScriptAlertsPage = shouldTheInternetPageBeOpened().clickJavaScriptAlertLink();
+
+        logStep("Verify if JavaScript Alerts page is opened");
+        assertTrue("Unable to open JavaScript Alerts page", javaScriptAlertsPage.isLoaded());
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+        logStep("Navigate back to The-Internet page");
+        BasePage.navigateBack();
+    }
+
+    @Test
+    public void shouldJSAlertCloseWithProperMessageAfterPressOkButton() {
+        logStep("Click Alert button");
+        javaScriptAlertsPage.clickAlertButton();
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsAlertCofirmMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSConfirmCloseWithProperMessageAfterPressOkButton() {
+        logStep("Click Confirm button");
+        javaScriptAlertsPage.clickConfirmButton();
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsConfirmConfirmMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSConfirmCloseWithProperMessageAfterPressCancelButton() {
+        logStep("Click Confirm button");
+        javaScriptAlertsPage.clickConfirmButton();
+
+        logStep("Click 'Cancel' button on alert");
+        javaScriptAlertsPage.clickAlertDismiss();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsConfirmCancelMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSPromptCloseWithProperMessageAfterPressOKButton() {
+        logStep("Click Prompt button");
+        javaScriptAlertsPage.clickPromptButton();
+
+        logStep("Insert text to alert: " + randomString);
+        javaScriptAlertsPage.writeTextInAlert(randomString);
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsPromptConfirmMessage + randomString, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSPromptCloseWithProperMessageAfterPressCancelButton() {
+        logStep("Click Prompt button");
+        javaScriptAlertsPage.clickPromptButton();
+
+        logStep("Click 'Cancel' button on alert");
+        javaScriptAlertsPage.clickAlertDismiss();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsPromptCancelMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Refresh JavaScriptAlersPage");
+        javaScriptAlertsPage.refreshPage();
+    }
+
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-16-Key-Presses-test.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-16-Key-Presses-test.html new file mode 100644 index 00000000..55adc356 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-16-Key-Presses-test.html @@ -0,0 +1,388 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example46 +
+
+
+

This simple case shows how to test key pressing

+
+
+

This site uses JavaScript to read the key pressed and display its value.

+
+
+
+example47 +
+
+
+

== Page Class

+
+
+
+
public class KeyPressesPage extends BasePage {
+
+    private static final By selectorResult = By.cssSelector("#result");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.KEY_PRESS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Key Presses' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.KEY_PRESS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Presses given keyboard key.
+     *
+     * @param keyToPress Key to be pressed on keyboard
+     */
+    public void pressKey(String keyToPress) {
+        getAction().sendKeys(keyToPress)
+                .perform();
+    }
+
+    /**
+     * Returns information from web page about pressed keyboard key.
+     *
+     * @return Information from web page about pressed key
+     */
+    public String getPressedKeyInformation() {
+        return getDriver().findElementDynamic(selectorResult)
+                .getText();
+    }
+}
+
+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Key Presses site

    +
  4. +
  5. +

    Press a key

    +
  6. +
  7. +

    Check if a displayed message contains the pressed key

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class KeyPressesTest extends TheInternetBaseTest {
+
+    private static KeyPressesPage keyPressesPage;
+
+    private final String keyToBePressed  = "Q";
+    private final String expectedMessage = "You entered: Q";
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        keyPressesPage = shouldTheInternetPageBeOpened().clickKeyPressesLink();
+
+        logStep("Verify if Key Presses page is opened");
+        assertTrue("Unable to open Key Presses page", keyPressesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldWebsiteReturnInformationAboutPressedKey() {
+        logStep("Press a keyboard key");
+        keyPressesPage.pressKey(keyToBePressed);
+
+        logStep("Verify if website give valid information about pressed keyboard key");
+        assertEquals("Information about the pressed key is invalid", expectedMessage,
+                keyPressesPage.getPressedKeyInformation());
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-17-Multiple-Windows.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-17-Multiple-Windows.html new file mode 100644 index 00000000..3d31eb89 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-17-Multiple-Windows.html @@ -0,0 +1,406 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example48 +
+
+
+

This simple example shows how operate on many browser tabs

+
+
+

When you click the link, a new website will be opened in the second tab.

+
+
+
+example49 +
+
+
+

== Page Class

+
+
+
+
public class MultipleWindowsPage extends BasePage {
+
+    private final static By selectorLink = By.cssSelector("#content > div > a");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.WINDOW.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Opening a new window' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.WINDOW.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'click here' link.
+     *
+     * @return NewWindowPage object
+     */
+    public NewWindowPage clickHereLink() {
+        getDriver().findElementDynamic(selectorLink)
+                .click();
+        getDriver().waitForPageLoaded();
+        return new NewWindowPage();
+    }
+}
+
+
+
+

You also need a second page class for New Window Page. Implement only the required methods.

+
+
+
+
public class NewWindowPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.NEW_WINDOW.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'New window' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.NEW_WINDOW.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Multiple Windows Page

    +
  4. +
  5. +

    Click the link

    +
  6. +
  7. +

    Check if a new page is opened in the second tab

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class MultipleWindowsTest extends TheInternetBaseTest {
+
+    private static MultipleWindowsPage    multipleWindowsPage;
+    private static NewWindowPage        newWindowPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        multipleWindowsPage = shouldTheInternetPageBeOpened().clickmultipleWindowsLink();
+
+        logStep("Verify if Multiple Windows page is opened");
+        assertTrue("Unable to open Multiple Windows page", multipleWindowsPage.isLoaded());
+    }
+
+    @Test
+    public void verifyIfNewBrowserWindowOpen() {
+        logStep("Click 'Click here' link");
+        newWindowPage = multipleWindowsPage.clickHereLink();
+
+        logStep("Verify if 'New window page' is opened");
+        assertTrue("Unable to open a new browser window", newWindowPage.isLoaded());
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-18-Redirection.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-18-Redirection.html new file mode 100644 index 00000000..2a2ff00a --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-18-Redirection.html @@ -0,0 +1,418 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example50 +
+
+
+

This simple case shows how to approach redirecting links.

+
+
+

After clicking on the link, you will be redirected to Status Codes Page.

+
+
+
+example51 +
+
+
+

== Page Class

+
+ +
+
+
+ +
+
+
+
public class RedirectLinkPage extends BasePage {
+
+    private static final By selectorRedirectHere = By.cssSelector("a#redirect");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.REDIRECT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Redirection' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.REDIRECT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'Redirect here' link.
+     *
+     * @return StatusCodesHomePage object
+     */
+    public StatusCodesHomePage clickRedirectHereLink() {
+        new Button(selectorRedirectHere).click();
+        return new StatusCodesHomePage();
+    }
+}
+
+
+
+
+
+

== == Status Codes Page

+
+
+
+
public class StatusCodesHomePage extends BasePage {
+
+    private static final By selectorLink200Code = By.linkText("200");
+    private static final By selectorLink301Code = By.linkText("301");
+    private static final By selectorLink404Code = By.linkText("404");
+    private static final By selectorLink500Code = By.linkText("500");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+
+

== Test Class

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Redirection Page

    +
  4. +
  5. +

    Click the link

    +
  6. +
  7. +

    Check if Status Codes Page is loaded

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class RedirectLinkTest extends TheInternetBaseTest {
+
+    private static RedirectLinkPage    redirectLinkPage;
+    private static StatusCodesHomePage statusCodesHomePage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        redirectLinkPage = shouldTheInternetPageBeOpened().clickRedirectLink();
+
+        logStep("Verify if Redirect Link page is opened");
+        assertTrue("Unable to open Redirect Link page", redirectLinkPage.isLoaded());
+    }
+
+    @Test
+    public void shouldUserBeRedirectedToStatusCodePage() {
+        logStep("Click 'Redirect here' link");
+        statusCodesHomePage = redirectLinkPage.clickRedirectHereLink();
+
+        logStep("Verify redirection to Status Code page");
+        assertTrue("User hasn't been redirected to the expected website",
+                statusCodesHomePage.isLoaded());
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-19-Slider-Test.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-19-Slider-Test.html new file mode 100644 index 00000000..bbb0f64a --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-19-Slider-Test.html @@ -0,0 +1,633 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example52 +
+
+
+

This case shows how to move horizontal slider.

+
+
+

You can move the slider by dragging it with a mouse or using arrow keys. The page uses a simple script to get slider position and display set value.

+
+
+
+example53 +
+
+
+

== Page Class

+
+
+
+
public class HorizontalSliderPage extends BasePage {
+
+    private static final By selectorHorizontalSlider = By.cssSelector("div.sliderContainer");
+    private static final By sliderSelector           = By.cssSelector("input");
+    private static final By valueSelector            = By.cssSelector("#range");
+
+    private HorizontalSliderElement horizontalSlider;
+
+    public HorizontalSliderPage() {
+        horizontalSlider = getDriver().elementHorizontalSlider(selectorHorizontalSlider,
+                sliderSelector, valueSelector, BigDecimal.ZERO, new BigDecimal(5),
+                new BigDecimal(0.5));
+    }
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.HORIZONTAL_SLIDER.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Horizontal Slider' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.HORIZONTAL_SLIDER.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Validates if WebElement representing horizontal slider is visible on the page.
+     *
+     * @return true if horizontal slider is visible, false otherwise.
+     */
+    public boolean isElementHorizontalSliderVisible() {
+        return getDriver().elementHorizontalSlider(selectorHorizontalSlider)
+                .isDisplayed();
+    }
+
+    /**
+     * Returns the value of slider's start position.
+     *
+     * @return BigDecimal representing the lowest possible value of slider.
+     */
+    public BigDecimal getStartPosition() {
+        return horizontalSlider.getMinRange();
+    }
+
+    /**
+     * Returns the value of slider's middle position.
+     *
+     * @return BigDecimal representing the average value between start and end position.
+     */
+    public BigDecimal getMiddlePosition() {
+        return horizontalSlider.getMaxRange()
+                .subtract(horizontalSlider.getMinRange())
+                .divide(new BigDecimal(2));
+    }
+
+    /**
+     * Returns the value of slider's end position.
+     *
+     * @return BigDecimal representing the highest possible value of slider.
+     */
+    public BigDecimal getEndPosition() {
+        return horizontalSlider.getMaxRange();
+    }
+
+    /**
+     * Returns current value of slider's position.
+     *
+     * @return BigDecimal representing current value of slider.
+     */
+    public BigDecimal getCurrentPosition() {
+        return horizontalSlider.getCurrentSliderValue();
+    }
+
+    /**
+     * Sets horizontal slider to a given position using one of the available methods: using keyboard
+     * or using mouse move.
+     *
+     * @param position
+     * @param method
+     */
+    public void setSliderPositionTo(BigDecimal position, int method) {
+        horizontalSlider.setSliderPositionTo(position, method);
+    }
+
+    /**
+     * Verifies the correctness of the given position value and rounds it when necessary.
+     *
+     * @param position
+     * @return Correct value of horizontal slider's position.
+     */
+    public BigDecimal verifyAndCorrectPositionValue(BigDecimal position) {
+        return horizontalSlider.verifyAndCorrectPositionValue(position);
+    }
+}
+
+
+
+
+
+

== == Horizontal Slider Element

+
+
+

This class implements methods wich can perform actions on slider:

+
+
+

Create Slider Object using method:

+
+
+
    +
  • +

    getDriver().elementHorizontalSlider(By sliderContainerSelector, By sliderSelector, By valueSelector, BigDecimal minRange, BigDecimal maxRange, BigDecimal step)

    +
  • +
+
+
+

And use:

+
+
+
    +
  • +

    BigDecimal getMaxRange()

    +
  • +
  • +

    BigDecimal getMinRange()

    +
  • +
  • +

    BigDecimal getCurrentSliderValue()

    +
  • +
  • +

    setSliderPositionTo(BigDecimal position, int method) - moves slider to a given position. If the position is not valid, it changes it to the nearest proper value. Second parameter determinates movement method: 0 - Keyboard, 1 - Mouse

    +
  • +
  • +

    BigDecimal verifyAndCorrectPositionValue(BigDecimal position) - returns nearest correct position

    +
  • +
+
+
+
+
+

== Test Class

+
+
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case:

+
+
+
    +
  1. +

    Go to Horizontal Slider Page

    +
  2. +
  3. +

    Check if the slider is visible

    +
  4. +
  5. +

    Save start, middle and end position

    +
  6. +
+
+
+

Case 1 - Moving with the keyboard:

+
+
+
    +
  1. +

    Move slider to start position, and check if the current position equals the beginning value

    +
  2. +
  3. +

    Move the slider to middle position, and check if the current position equals the middle value

    +
  4. +
  5. +

    Move slider to end position, and check if the current position equals the end value

    +
  6. +
  7. +

    Try to move slider before start position, and check if the current position equals the beginning value

    +
  8. +
  9. +

    Try to move slider after end position, and check if the current position equals the end value

    +
  10. +
  11. +

    Try to move the slider to an improperly defined position between start and middle, and check if the current position equals the corrected value

    +
  12. +
  13. +

    Try to move the slider to an improperly defined random position, and check if the current position equals the corrected value

    +
  14. +
  15. +

    Move the slider back to start position, and check if the current position equals the beginning value

    +
  16. +
+
+
+

Case 2 - Moving with a mouse: Repeat each Case 1 step using a mouse instead of keyboard

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class SliderTest extends TheInternetBaseTest {
+
+    private static HorizontalSliderPage horizontalSliderPage;
+
+    BigDecimal startPosition;
+    BigDecimal middlePosition;
+    BigDecimal endPosition;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click Horizontal Slider link");
+        horizontalSliderPage = theInternetPage.clickHorizontalSliderLink();
+
+        logStep("Verify if Horizontal Slider page is opened");
+        assertTrue("Unable to load Horizontal Slider page", horizontalSliderPage.isLoaded());
+
+        logStep("Verify if horizontal slider element is visible");
+        assertTrue("Horizontal slider is not visible",
+                horizontalSliderPage.isElementHorizontalSliderVisible());
+
+        startPosition = horizontalSliderPage.getStartPosition();
+        middlePosition = horizontalSliderPage.getMiddlePosition();
+        endPosition = horizontalSliderPage.getEndPosition();
+    }
+
+    @Test
+    public void shouldHorizontalSliderMoveWhenKeyboardArrowButtonsArePressed() {
+        BigDecimal position;
+        logStep("Move slider to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to middle position: " + middlePosition);
+        horizontalSliderPage.setSliderPositionTo(middlePosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(middlePosition),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to end position: " + endPosition);
+        horizontalSliderPage.setSliderPositionTo(endPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = startPosition.subtract(BigDecimal.ONE);
+        logStep("Move slider to position before start position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = endPosition.add(BigDecimal.ONE);
+        logStep("Move slider to position after end position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = middlePosition.divide(new BigDecimal(2));
+        logStep("Move slider to improperly defined position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        position = new BigDecimal(new BigInteger("233234"), 5);
+        logStep("Move slider to improperly defined random position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider back to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+    }
+
+    @Test
+    public void shouldHorizontalSliderMoveWhenMouseButtonIsPressedAndMouseIsMoving() {
+        BigDecimal position;
+        logStep("Move slider to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to middle position: " + middlePosition);
+        horizontalSliderPage.setSliderPositionTo(middlePosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(middlePosition),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to end position: " + endPosition);
+        horizontalSliderPage.setSliderPositionTo(endPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = startPosition.subtract(BigDecimal.ONE);
+        logStep("Move slider to position before start position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = endPosition.add(BigDecimal.ONE);
+        logStep("Move slider to position after end position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = middlePosition.divide(new BigDecimal(2));
+        logStep("Move slider to improperly defined position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        position = new BigDecimal(new BigInteger("212348"), 5);
+        logStep("Move slider to improperly defined random position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider back to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-20-Sortable-Data-Tables.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-20-Sortable-Data-Tables.html new file mode 100644 index 00000000..ea9ee73e --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-20-Sortable-Data-Tables.html @@ -0,0 +1,520 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example54 +
+
+
+

This example shows how to sort and read data from tables.

+
+
+

After clicking on a column header, the data will be sorted descending and after another click sorted ascending by selected attribute. Watch how both tables' content changes on page DOM. Sorting is performed by JavaScript functions.

+
+
+
+example55 +
+
+
+

== Page Class

+
+
+
+
public class SortableDataTablesPage extends BasePage {
+
+    private static final By selectorTable  = By.cssSelector("table.tablesorter");
+    private static final By selectorHeader = By.cssSelector("th");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.SORTABLE_DATA_TABLES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Data Tables' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.SORTABLE_DATA_TABLES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Sorts data in given column using ascending order.
+     *
+     * @param columnNumber The number of column where data should be sorted
+     * @param tableNumber  The number of table where data should be sorted
+     */
+    public void sortColumnAscending(int columnNumber, int tableNumber) {
+        WebElement header = this.getTableHeaders(columnNumber, tableNumber);
+        String className = header.getAttribute("class");
+        if (className.contains("headerSortUp") || !className.contains("headerSortDown")) {
+            header.click();
+        }
+    }
+
+    /**
+     * Sorts data in given column using descending order.
+     *
+     * @param columnNumber The number of the column where data should be sorted
+     * @param tableNumber  The number of the table where data should be sorted
+     */
+    public void sortColumnDescending(int columnNumber, int tableNumber) {
+        WebElement header = this.getTableHeaders(columnNumber, tableNumber);
+        String className = header.getAttribute("class");
+        if (!className.contains("headerSortUp")) {
+            header.click();
+            if (!className.contains("headerSortDown")) {
+                header.click();
+            }
+        }
+    }
+
+    /**
+     * Return given column values from chosen table.
+     *
+     * @param columnNumber The number of the column the data should be retrieved from
+     * @param tableNumber  The number of the table the data should be retrieved from
+     * @return list of values from given column
+     */
+    public List<String> getColumnValues(int columnNumber, int tableNumber) {
+        WebElement table = getTable(tableNumber);
+        return JsoupHelper.findTexts(table, By.cssSelector("tr > td:nth-child(" + (columnNumber + 1)
+                + ")"));
+    }
+
+    /**
+     * Returns column's class name.
+     *
+     * @param columnNumber The number of the column to get class number from
+     * @param tableNumber  The number of the table to get column class name from
+     * @return String object representing column's class name
+     */
+    public String readColumnClass(int columnNumber, int tableNumber) {
+        return this.getTableHeaders(columnNumber, tableNumber)
+                .getAttribute("class");
+    }
+
+    private WebElement getTable(int tableNumber) {
+        return new ListElements(selectorTable).getList()
+                .get(tableNumber);
+    }
+
+    private WebElement getTableHeaders(int columnNumber, int tableNumber) {
+        return getTable(tableNumber).findElements(selectorHeader)
+                .get(columnNumber);
+    }
+}
+
+
+
+
+
+

== == Finding values

+
+
+

Using proper selectors, save elements such as tables and their columns' headers as Web Element Lists. Afterwards, you can get the desired element finding it by index (e. g. table or column number). To get column values, use JsoupHelper and to check if the column is sorted get its class attribute.

+
+
+
+
+

== Test Class

+
+
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case: Go to Sortable Data Tables Page

+
+
+

Case 1:

+
+
+
    +
  1. +

    Choose a random table

    +
  2. +
  3. +

    Sort first column "Last Name" in ascending order

    +
  4. +
  5. +

    Check if column header class contains "headerSortDown"

    +
  6. +
  7. +

    Save column content to the List

    +
  8. +
  9. +

    Create List copy and sort it

    +
  10. +
  11. +

    Compare sorted values and values from the table

    +
  12. +
+
+
+

Case 2:

+
+
+
    +
  1. +

    Choose a random table

    +
  2. +
  3. +

    Sort second column "First Name" in descending order

    +
  4. +
  5. +

    Check if column header class contains "headerSortUp"

    +
  6. +
  7. +

    Save column content to the List

    +
  8. +
  9. +

    Create List copy and sort it then reverse it

    +
  10. +
  11. +

    Compare reversed sorted values and values from the table

    +
  12. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class SortableDataTablesTest extends TheInternetBaseTest {
+
+    private static SortableDataTablesPage sortableDataTablesPage;
+
+    private List<String> actualValues;
+    private List<String> expectedValues;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click subpage link");
+        sortableDataTablesPage = theInternetPage.clickSortableDataTablesLink();
+
+        logStep("Verify if subpage is opened");
+        assertTrue("Unable to open Sortable Data Tables page", sortableDataTablesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldLastNameColumnBeOrderedAscendingAfterSort() {
+        int columnNumber = 0;
+        int tableNumber = new Random().nextInt(2);
+
+        logStep("Sort 'Last Name' column");
+        sortableDataTablesPage.sortColumnAscending(columnNumber, tableNumber);
+        assertTrue("Unable to set ascending order for 'Last Name' column",
+                sortableDataTablesPage.readColumnClass(columnNumber, tableNumber)
+                        .contains("headerSortDown"));
+
+        logStep("Verify data order for 'Last Name' column");
+        actualValues = sortableDataTablesPage.getColumnValues(columnNumber, tableNumber);
+        expectedValues = new ArrayList<String>(actualValues);
+        Collections.sort(expectedValues);
+        assertEquals("'Last Name' column is not sorted in ascending order",
+                expectedValues, actualValues);
+    }
+
+    @Test
+    public void shouldFirstNameColumnBeOrderedDescendingAfterSort() {
+        int columnNumber = 1;
+        int tableNumber = new Random().nextInt(2);
+
+        logStep("Sort 'First Name' column");
+        sortableDataTablesPage.sortColumnDescending(columnNumber, tableNumber);
+        assertTrue("Unable to set descending order for 'First Name' column",
+                sortableDataTablesPage.readColumnClass(columnNumber, tableNumber)
+                        .contains("headerSortUp"));
+
+        logStep("Verify data order for 'First Name' column");
+        actualValues = sortableDataTablesPage.getColumnValues(columnNumber, tableNumber);
+        expectedValues = new ArrayList<String>(actualValues);
+        Collections.sort(expectedValues);
+        Collections.reverse(expectedValues);
+        assertEquals("'First Name' column is not sorted in descending order",
+                expectedValues, actualValues);
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-21-Status-Codes.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-21-Status-Codes.html new file mode 100644 index 00000000..900dde1f --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/Basic-Tests/Example-21-Status-Codes.html @@ -0,0 +1,556 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
+example56 +
+
+
+

This example shows how to process HTTP status codes returned by page

+
+
+

When you click status code link, you will be redirected to the subpage which returns the proper HTTP status code. In order to check what code was returned:

+
+
+
    +
  1. +

    Open developer tools

    +
  2. +
  3. +

    Go to Network tab

    +
  4. +
  5. +

    Click request name

    +
  6. +
  7. +

    Find a code number in Headers section

    +
  8. +
+
+
+
+example57 +
+
+
+

== Page Class

+
+
+

Add new methods to existing Status Codes Home Page Class

+
+
+
+
public class StatusCodesHomePage extends BasePage {
+
+    private static final By selectorLink200Code = By.linkText("200");
+    private static final By selectorLink301Code = By.linkText("301");
+    private static final By selectorLink404Code = By.linkText("404");
+    private static final By selectorLink500Code = By.linkText("500");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if given link is displayed.
+     *
+     * @param selector Selector of the given link
+     * @return true if link is displayed
+     */
+    public boolean isLinkCodeDisplayed(By selector) {
+        return getDriver().findElementDynamic(selector)
+                .isDisplayed();
+
+    }
+
+    /**
+     * Clicks '200' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode200Link() {
+        return clickCodeLink(selectorLink200Code);
+    }
+
+    /**
+     * Clicks '301' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode301Link() {
+        return clickCodeLink(selectorLink301Code);
+    }
+
+    /**
+     * Clicks '404' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode404Link() {
+        return clickCodeLink(selectorLink404Code);
+    }
+
+    /**
+     * Clicks '500' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode500Link() {
+        return clickCodeLink(selectorLink500Code);
+    }
+
+    /**
+     * Clicks code link according to given code number.
+     *
+     * @param code Given code
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCodeLink(String code) {
+        return clickCodeLink(By.linkText(code));
+    }
+
+    private StatusCodesCodePage clickCodeLink(By selector) {
+        String codeNumber = getCodeNumberToCheck(selector);
+        getDriver().findElementDynamic(selector)
+                .click();
+        return new StatusCodesCodePage(codeNumber);
+    }
+
+    private String getCodeNumberToCheck(By selector) {
+        return getDriver().findElementDynamic(selector)
+                .getText();
+    }
+}
+
+
+
+

Create a page class for status codes subpages as well. In the class constructor specify which code number should be returned.

+
+
+
+
public class StatusCodesCodePage extends BasePage {
+
+    private static final By selectorDisplayedText   = By.cssSelector("#content > div > p");
+    private static final By selectorLinkToCodesPage = By.cssSelector("#content > div > p > a");
+
+    private String codeNumber;
+
+    public StatusCodesCodePage(String codeNumber) {
+        this.codeNumber = codeNumber;
+    }
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue() + '/');
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue() + '/' + codeNumber);
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    public String getCodeNumber() {
+        return codeNumber;
+    }
+
+    /**
+     * Verifies if page is loaded with given code number.
+     *
+     * @param codeNumber Expected code number
+     * @return true if expected code number is loaded with web page
+     */
+    public boolean isLoadedWithStatusCode(String codeNumber) {
+        return getDriver().getCurrentUrl()
+                .equals(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue() + "/" + codeNumber);
+    }
+
+    /**
+     * Returns displayed code number.
+     * <p>
+     * Code number is retrieved from following text displayed on the page:<b>
+     * 'This page returned a *** status code.', where *** represent the code number to be
+     * returned.
+     * </p>
+     *
+     * @return String object representing the displayed code number retrieved from specific sentence.
+     */
+    public String getDisplayedCodeNumber() {
+        return getDriver().findElementDynamic(selectorDisplayedText)
+                .getText()
+                .substring(21, 24);
+    }
+
+    /**
+     * Clicks link to return to 'Code Page'.
+     *
+     * @return StatusCodesHomePage object
+     */
+    public StatusCodesHomePage clickLinkToCodePage() {
+        getDriver().findElementDynamic(selectorLinkToCodesPage)
+                .click();
+        return new StatusCodesHomePage();
+    }
+}
+
+
+
+
+
+

== Test Class

+
+
+

Before all tests: Open The Internet Main Page, go to Status Codes page

+
+
+

Steps:

+
+
+

For each status code

+
+
+
    +
  1. +

    Click code link

    +
  2. +
  3. +

    Check if the page is loaded with an expected code number

    +
  4. +
  5. +

    Check if the displayed code number equals the expected number

    +
  6. +
  7. +

    Go back to Status Codes Home Page

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class StatusCodeTest extends TheInternetBaseTest {
+
+    private static StatusCodesHomePage statusCodesHomePage;
+    private        StatusCodesCodePage statusCodesCodePage;
+
+    private String[] codes = { "200", "301", "404", "500" };
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        statusCodesHomePage = shouldTheInternetPageBeOpened().clickStatusCodesLink();
+
+        logStep("Verify if Status Codes Home page is opened");
+        assertTrue("Unable to open Status Codes Home page", statusCodesHomePage.isLoaded());
+    }
+
+    @Test
+    public void shouldProperCodeBeDisplayedAfterClickCodeLink() {
+
+        for (String code : codes) {
+            logStep("Click link to " + code + " code");
+            statusCodesCodePage = statusCodesHomePage.clickCodeLink(code);
+
+            logStep("Verify if proper web page corresponding to the code is opened");
+            assertTrue("Unable to open proper web page",
+                    statusCodesCodePage.isLoadedWithStatusCode(code));
+
+            logStep("Verify if the displayed code is equal to the expected one");
+            assertEquals(code, statusCodesCodePage.getDisplayedCodeNumber());
+
+            logStep("Click link to come back to 'Status Codes' page");
+            statusCodesCodePage.clickLinkToCodePage();
+        }
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/BFLogger.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/BFLogger.html new file mode 100644 index 00000000..ff9754a9 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/BFLogger.html @@ -0,0 +1,289 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

BFLogger

+
+
+

BFLogger is a default MrChecker logging tool. Use it to communicate important information from test execution. There are three basic logging methods:

+
+
+
    +
  • +

    logInfo(String message) - used for test steps

    +
  • +
  • +

    logDebug(String message) - used for non-official information, either during the test build process or in Page Object files

    +
  • +
  • +

    logError(String message) - used to emphasize critical information

    +
  • +
+
+
+

Logs will be visible in the console and in the log file under path: MrChecker_Test_Framework\workspace\project-folder\logs

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Elements-Types.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Elements-Types.html new file mode 100644 index 00000000..9648f884 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Elements-Types.html @@ -0,0 +1,296 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Elements types

+
+
+

MrChecker includes Object types for various elements existing on webpages such as Button, TextBox etc. There is also WebElement class and getDriver().findElementDynamic(By selector) method for creating webpage objects dynamically and performing basic actions:

+
+
+

Instead of using static types you can use:

+
+
+
+
    public TyposPage clickTyposLink() {
+        WebElement checkboxesLink = getDriver().findElementDynamic(checkboxesLinkSelector);
+        checkboxesLink.click();
+        return new TyposPage();
+    }
+
+
+
+

Or perform actions without creating a variable:

+
+
+
+
    public TyposPage clickTyposLink() {
+        getDriver().findElementDynamic(checkboxesLinkSelector).click();
+        return new TyposPage();
+    }
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Environment-variables.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Environment-variables.html new file mode 100644 index 00000000..d1373764 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Environment-variables.html @@ -0,0 +1,342 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Environment Variables

+
+
+

In Page classes, when you load/start web, it is uncommon to save fixed main URL.

+
+
+

Instead of hardcoded main URL variable, you build your Page class with a dynamic variable.

+
+
+
+
+

How to create / update system environment

+
+
+

Dynamic variable values are stored under path \src\resources\enviroments\environments.csv.

+
+
+
+image01 +
+
+
+

By default, the environment takes value from DEV column.

+
+
+
+
+

== Access to the external file variables

+
+
+

Create a class GetEnvironmentParam to map values from an external file with Page class:

+
+
+
+
public enum GetEnvironmentParam {
+
+    // Name if enum must be in line with cell name in /src/resources/environments/environment.csv
+    WWW_FONT_URL,
+    TOOLS_QA,
+    WEB_SERVICE,
+    THE_INTERNET_MAIN_PAGE,
+    ELEMENTAL_SELENIUM_PAGE;
+
+    public String getValue() {
+
+        if (null ==  BaseTest.getEnvironmentService()) {
+            throw new BFInputDataException("Environment Parameters class wasn't initialized properly");
+        }
+
+        return BaseTest.getEnvironmentService()
+                .getValue(this.name());
+
+    }
+
+    @Override
+    public String toString() {
+
+        return this.getValue();
+
+    }
+}
+
+
+
+

When you add a new row to environments.csv also add a new variable to GetEnvironmentParam class.

+
+
+

In Page class access environmental variable using this method:

+
+
+
+
GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue();
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Page-object.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Page-object.html new file mode 100644 index 00000000..3c15df57 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Page-object.html @@ -0,0 +1,322 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Page Object

+
+
+

Your Product Under Test will be the following website: http://the-internet.herokuapp.com/

+
+
+

At first, create an Object to represent The Internet Main Page:

+
+
+
+
public class TheInternetPage extends BasePage
+
+
+
+

Each class which extends BasePage class must override three methods:

+
+
+
    +
  • +

    public boolean isLoaded() - returns true if the page is loaded and false if not

    +
  • +
  • +

    public void load() - loads the page

    +
  • +
  • +

    public String pageTitle() - returns page title

    +
  • +
+
+
+
+
public class TheInternetPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        BFLogger.logDebug("The internet page is loaded: " + getDriver().getCurrentUrl());
+        return getDriver().getCurrentUrl()
+                .equals(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'The internet' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Selectors.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Selectors.html new file mode 100644 index 00000000..ac987afa --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/Selectors.html @@ -0,0 +1,579 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Selectors

+
+ +
+
+
+

Create selectors

+
+
+

Create a selector for every interactable element on a webpage using By type. Find elements and it’s attributes using browser developer mode (F12).

+
+
+
+image02 +
+
+
+
+
private static final By abTestLinkSelector               = By.cssSelector("li >
+            a[href*='abtest']");
+    private static final By basicAuthLinkSelector            = By.cssSelector("li >
+            a[href*='basic_auth']");
+    private static final By brokenImageLinkSelector          = By.cssSelector("li >
+            a[href*='broken_images']");
+    private static final By challengingDomLinkSelector       = By.cssSelector("li >
+            a[href*='challenging_dom']");
+    private static final By checkboxesLinkSelector           = By.cssSelector("li >
+            a[href*='checkboxes']");
+    private static final By contextMenuLinkSelector          = By.cssSelector("li >
+            a[href*='context_menu']");
+    private static final By disappearingElementsLinkSelector = By.cssSelector("li >
+            a[href*='disappearing_elements']");
+    private static final By dragAndDropLinkSelector          = By.cssSelector("li >
+            a[href*='drag_and_drop']");
+    private static final By dropdownLinkSelector             = By.cssSelector("li >
+            a[href*='dropdown']");
+    private static final By dynamicContentLinkSelector       = By.cssSelector("li >
+            a[href*='dynamic_content']");
+    private static final By dynamicControlsLinkSelector      = By.cssSelector("li >
+            a[href*='dynamic_controls']");
+    private static final By dynamicLoadingLinkSelector       = By.cssSelector("li >
+            a[href*='dynamic_loading']");
+    private static final By exitIntentLinkSelector           = By.cssSelector("li >
+            a[href*='exit_intent']");
+    private static final By fileDownloadLinkSelector         = By.cssSelector("li >
+            a[href$='download']");
+    private static final By fileUploadLinkSelector           = By.cssSelector("li >
+           a[href*='upload']");
+    private static final By floatingMenuLinkSelector         = By.cssSelector("li >
+           a[href*='floating_menu']");
+    private static final By forgotPasswordLinkSelector       = By.cssSelector("li >
+           a[href*='forgot_password']");
+    private static final By formAuthenticationLinkSelector   = By.cssSelector("li >
+           a[href*='login']");
+    private static final By framesLinkSelector               = By.cssSelector("li >
+           a[href*='frames']");
+    private static final By geolocationLinkSelector          = By.cssSelector("li >
+           a[href*='geolocation']");
+    private static final By horizontalSliderLinkSelector     = By.cssSelector("li >
+           a[href*='horizontal_slider']");
+    private static final By hoversLinkSelector               = By.cssSelector("li >
+           a[href*='hovers']");
+    private static final By infiniteScrollLinkSelector       = By.cssSelector("li >
+           a[href*='infinite_scroll']");
+    private static final By javaScriptAlertLinkSelector      = By.cssSelector("li >
+           a[href*='javascript_alerts']");
+    private static final By javaScriptErrorLinkSelector      = By.cssSelector("li >
+           a[href*='javascript_error']");
+    private static final By jQueryUIMenuLinkSelector         = By.cssSelector("li >
+           a[href*='jqueryui/menu']");
+    private static final By keyPressesLinkSelector           = By.cssSelector("li >
+           a[href*='key_presses']");
+    private static final By largeAndDeepDOMLinkSelector      = By.cssSelector("li >
+           a[href*='large']");
+    private static final By multipleWindowsLinkSelector      = By.cssSelector("li >
+           a[href*='windows']");
+    private static final By nestedFramesLinkSelector         = By.cssSelector("li >
+           a[href*='nested_frames']");
+    private static final By notificationMessagesLinkSelector = By.cssSelector("li >
+           a[href*='notification_message']");
+    private static final By redirectLinkSelector             = By.cssSelector("li >
+           a[href*='redirector']");
+    private static final By secureFileDownloadLinkSelector   = By.cssSelector("li >
+           a[href*='download_secure']");
+    private static final By shiftingContentLinkSelector      = By.cssSelector("li >
+           a[href*='shifting_content']");
+    private static final By slowResourcesLinkSelector        = By.cssSelector("li >
+           a[href*='slow']");
+    private static final By sortableDataTablesLinkSelector   = By.cssSelector("li >
+           a[href*='tables']");
+    private static final By statusCodesLinkSelector          = By.cssSelector("li >
+           a[href*='status_codes']");
+    private static final By typosLinkSelector                = By.cssSelector("li >
+           a[href*='typos']");
+    private static final By wYSIWYGEditorLinkSelector        = By.cssSelector("li >
+           a[href*='tinymce']");
+
+
+
+
+
+

Implement methods

+
+
+

Then use these selectors to create Objects and perform actions on page elements:

+
+
+
+
public ABtestPage clickABtestingLink() {
+        new Button(abTestLinkSelector).click();
+        return new ABtestPage();
+    }
+
+    public BasicAuthPage clickBasicAuthLink() {
+        getDriver().waitForPageLoaded();
+        WebElement link = getDriver().findElementDynamic(basicAuthLinkSelector);
+        JavascriptExecutor executor = (JavascriptExecutor) getDriver();
+        executor.executeScript("var elem=arguments[0]; setTimeout(function() {elem.click();}, 100)",
+                link);
+        return new BasicAuthPage();
+    }
+
+    public BrokenImagePage clickBrokenImageLink() {
+        new Button(brokenImageLinkSelector).click();
+        return new BrokenImagePage();
+    }
+
+    public ChallengingDomPage clickChallengingDomLink() {
+        new Button(challengingDomLinkSelector).click();
+        return new ChallengingDomPage();
+    }
+
+    public CheckboxesPage clickCheckboxesLink() {
+        new Button(checkboxesLinkSelector).click();
+        return new CheckboxesPage();
+    }
+
+    public ContextMenuPage clickContextMenuLink() {
+        new Button(contextMenuLinkSelector).click();
+        return new ContextMenuPage();
+    }
+
+    public DisappearingElementsPage clickDisappearingElementsLink() {
+        new Button(disappearingElementsLinkSelector).click();
+        return new DisappearingElementsPage();
+    }
+
+    public DragAndDropPage clickDragAndDropLink() {
+        new Button(dragAndDropLinkSelector).click();
+        return new DragAndDropPage();
+    }
+
+    public DropdownPage clickDropdownLink() {
+        new Button(dropdownLinkSelector).click();
+        return new DropdownPage();
+    }
+
+    public DynamicContentPage clickDynamicContentLink() {
+        new Button(dynamicContentLinkSelector).click();
+        return new DynamicContentPage();
+    }
+
+    public DynamicControlsPage clickDynamicControlsLink() {
+        new Button(dynamicControlsLinkSelector).click();
+        return new DynamicControlsPage();
+    }
+
+    public DynamicLoadingPage clickDynamicLoadingLink() {
+        new Button(dynamicLoadingLinkSelector).click();
+        return new DynamicLoadingPage();
+    }
+
+    public ExitIntentPage clickExitIntentLink() {
+        new Button(exitIntentLinkSelector).click();
+        return new ExitIntentPage();
+    }
+
+    public FileDownloadPage clickFileDownloadLink() {
+        new Button(fileDownloadLinkSelector).click();
+        return new FileDownloadPage();
+    }
+
+    public FileUploadPage clickFileUploadLink() {
+        new Button(fileUploadLinkSelector).click();
+        return new FileUploadPage();
+    }
+
+    public FloatingMenuPage clickFloatingMenuLink() {
+        new Button(floatingMenuLinkSelector).click();
+        return new FloatingMenuPage();
+    }
+
+    public ForgotPasswordPage clickForgotPasswordLink() {
+        new Button(forgotPasswordLinkSelector).click();
+        return new ForgotPasswordPage();
+    }
+
+    public FormAuthenticationPage clickFormAuthenticationLink() {
+        new Button(formAuthenticationLinkSelector).click();
+        return new FormAuthenticationPage();
+    }
+
+    public FramesPage clickFramesLink() {
+        new Button(framesLinkSelector).click();
+        return new FramesPage();
+    }
+
+    public GeolocationPage clickGeolocationLink() {
+        new Button(geolocationLinkSelector).click();
+        return new GeolocationPage();
+    }
+
+    public HorizontalSliderPage clickHorizontalSliderLink() {
+        new Button(horizontalSliderLinkSelector).click();
+        return new HorizontalSliderPage();
+    }
+
+    public HoversPage clickHoversLink() {
+        new Button(hoversLinkSelector).click();
+        return new HoversPage();
+    }
+
+    public InfiniteScrollPage clickInfiniteScrollLink() {
+        new Button(infiniteScrollLinkSelector).click();
+        return new InfiniteScrollPage();
+    }
+
+    public JavaScriptAlertsPage clickJavaScriptAlertLink() {
+        new Button(javaScriptAlertLinkSelector).click();
+        return new JavaScriptAlertsPage();
+    }
+
+    public JavaScriptErrorPage clickJavaScriptErrorLink() {
+        new Button(javaScriptErrorLinkSelector).click();
+        return new JavaScriptErrorPage();
+    }
+
+    public JQueryUIMenuPage clickJQueryUIMenuLink() {
+        new Button(jQueryUIMenuLinkSelector).click();
+        return new JQueryUIMenuPage();
+    }
+
+    public KeyPressesPage clickKeyPressesLink() {
+        new Button(keyPressesLinkSelector).click();
+        return new KeyPressesPage();
+    }
+
+    public LargeAndDeepDOMPage clickLargeAndDeepDOMLink() {
+        new Button(largeAndDeepDOMLinkSelector).click();
+        return new LargeAndDeepDOMPage();
+    }
+
+    public MultipleWindowsPage clickmultipleWindowsLink() {
+        new Button(multipleWindowsLinkSelector).click();
+        return new MultipleWindowsPage();
+    }
+
+    public NestedFramesPage clickNestedFramesLink() {
+        new Button(nestedFramesLinkSelector).click();
+        return new NestedFramesPage();
+    }
+
+    public NotificationMessagesPage clickNotificationMessagesLink() {
+        new Button(notificationMessagesLinkSelector).click();
+        return new NotificationMessagesPage();
+    }
+
+    public RedirectLinkPage clickRedirectLink() {
+        new Button(redirectLinkSelector).click();
+        return new RedirectLinkPage();
+    }
+
+    public SecureFileDownloadPage clickSecureFileDownloadLink() {
+        new Button(secureFileDownloadLinkSelector).click();
+        return new SecureFileDownloadPage();
+    }
+
+    public ShiftingContentPage clickShiftingContentLink() {
+        new Button(shiftingContentLinkSelector).click();
+        return new ShiftingContentPage();
+    }
+
+    public SlowResourcesPage clickSlowResourcesLink() {
+        new Button(slowResourcesLinkSelector).click();
+        return new SlowResourcesPage();
+    }
+
+    public SortableDataTablesPage clickSortableDataTablesLink() {
+        new Button(sortableDataTablesLinkSelector).click();
+        return new SortableDataTablesPage();
+    }
+
+    public StatusCodesHomePage clickStatusCodesLink() {
+        new Button(statusCodesLinkSelector).click();
+        return new StatusCodesHomePage();
+    }
+
+    public TyposPage clickTyposLink() {
+        new Button(typosLinkSelector).click();
+        return new TyposPage();
+    }
+
+    public WYSIWYGEditorPage clickWYSIWYGEditorLink() {
+        new Button(wYSIWYGEditorLinkSelector).click();
+        return new WYSIWYGEditorPage();
+    }
+
+
+
+

These methods create a Button object for every link on The Internet Page and click it to redirect on a different subpage.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/The-Internet-Base-Test.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/The-Internet-Base-Test.html new file mode 100644 index 00000000..1c661bd9 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Basic-Tutorials/First-Steps/The-Internet-Base-Test.html @@ -0,0 +1,350 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

The Internet Base Test

+
+ +
+
+
+

Test Class

+
+
+

Create Test class and override methods:

+
+
+
    +
  • +

    public void setUp() - executes before each test

    +
  • +
  • +

    public void tearDown() - executes after each test

    +
  • +
+
+
+
+
public class TheInternetBaseTest extends BaseTest {
+    @Override
+    public void setUp() {
+
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        BasePage.navigateBack();
+    }
+}
+
+
+
+

logStep(String message) method doesn’t exist yet so you should create it:

+
+
+
+
    protected static int             step = 0;
+
+     /**
+     * Logs test step including step number calculated individually for each test.
+     *
+     * @param message Text message representing step description.
+     */
+    public static void logStep(String message) {
+        BFLogger.logInfo("Step " + ++step + ": " + message);
+    }
+
+
+
+

Write a method for loading The Internet Page and checking if it is properly opened:

+
+
+
+
    protected static TheInternetPage theInternetPage;
+
+    /**
+     * Performs operations required for verifying if The Internet Page is properly opened.
+     *
+     * @return TheInternetPage
+     */
+    public static TheInternetPage shouldTheInternetPageBeOpened() {
+
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+
+        return theInternetPage;
+    }
+
+
+
+

This Test class can’t be launched because it doesn’t contain any @Test methods. It’s been created only for supporting other Test classes.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/E2E-Tutorials/Tutorial1.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/E2E-Tutorials/Tutorial1.html new file mode 100644 index 00000000..1b3b5cde --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/E2E-Tutorials/Tutorial1.html @@ -0,0 +1,664 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MrChecker E2E tutorials

+
+
+

In order to learn more about MrChecker structure, start from Project Organisation section and then check out our fantastic tutorials:

+
+
+
+
+

How to create a basic test in MrChecker

+
+ +
+
+
+

Example: Booking a table

+
+
+

As an example to test we will use MyThaiStar booking page.
+In order to book a table, do the following steps:

+
+
+
    +
  1. +

    Open MyThaiStar Book Table Page

    +
  2. +
  3. +

    Enter booking data: Date and time, Name, Email and number of Table guests

    +
  4. +
  5. +

    Click Accept terms

    +
  6. +
  7. +

    Click Book table

    +
  8. +
  9. +

    Display confirmation box and send booking

    +
  10. +
  11. +

    Check if the booking was successful.

    +
  12. +
+
+
+
+image1 +
+
+
+
+image2 +
+
+
+

You can go through these steps manually and doublecheck the result.

+
+
+
+
+

How to prepare a test

+
+ +
+
+
+

== 1. Create BookTablePage class

+
+
+

You will need a class which will represent MyThaiStart booking page.
+Fill the required methods with the following code:

+
+
+
+
public class BookTablePage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded(); //waits untli the page is loaded
+        return getDriver().getCurrentUrl()
+                .equals("https://mts-devonfw-core.cloud.okteto.net/bookTable"); //checks if current page address equals MyThaiStar booking page adress
+    }
+
+    @Override
+    public void load() {
+        getDriver().get("https://mts-devonfw-core.cloud.okteto.net/bookTable"); //loads page under specified adress
+        getDriver().waitForPageLoaded(); //waits until the page is loaded
+    }
+
+    @Override
+    public String pageTitle() {
+        return "My Thai Star"; //returns page title
+    }
+}
+
+
+
+

getDriver() method allows accessing Selenium Web Driver which performs actions on the webpage.

+
+
+

As this page class represents the MyThaiStar booking page, you have to set up selectors for web elements required in the test case. In the example you have to create selectors for elements we’ll interact with:

+
+
+
    +
  • +

    Date and time input field

    +
  • +
  • +

    Name input field

    +
  • +
  • +

    Email input field

    +
  • +
  • +

    Table guests input field

    +
  • +
  • +

    Accept terms checkbox

    +
  • +
  • +

    Book table button

    +
  • +
+
+
+

Selectors will be implemented as fields.

+
+
+

Example of the selector for Date and time input field:

+
+
+
+
/** Date field search criteria */
+private static final By dateSearch = By.cssSelector("input[formcontrolname='bookingDate']");
+
+
+
+

The input field’s name "bookingDate" was found by using the developer console in Google Chrome. How to prepare an everlasting selector?

+
+
+
+image3 +
+
+
+

This selector can be used to create a WebElement object of the said input field. Therefore, you will create a new method and call it "enterTimeAndDate".

+
+
+
+
public void enterTimeAndDate(String date) {
+    WebElement dateInput = getDriver().findElementDynamic(dateSearch); //creates a new WebElement to access Date and time input field
+    dateInput.sendKeys(date); //enters date value
+}
+
+
+
+

Now you can create other selectors and objects and methods for every element on the webpage:

+
+
+
+
/** Name input field search criteria */
+private static final By nameSearch = By.cssSelector("input[formcontrolname='name']");
+
+/** Email input field search criteria */
+private static final By emailSearch = By.cssSelector("input[formcontrolname='email']");
+
+/** Number of guests search criteria */
+private static final By guestsSearch = By.cssSelector("input[formcontrolname='assistants']");
+
+/** Check box search criteria */
+private static final By checkboxSearch = By.cssSelector("mat-checkbox[data-name='bookTableTerms']");
+
+/** Book table button search criteria */
+private static By bookTableSearch = By.name("bookTableSubmit");
+
+
+
+
+
public void enterName(String name) {
+    WebElement nameInput = getDriver().findElementDynamic(nameSearch); //creates a new WebElement to access name input field
+    nameInput.sendKeys(name); //enters name value
+}
+
+public void enterEmail(String email) {
+    WebElement emailInput = getDriver().findElementDynamic(emailSearch); //creates a new WebElement to access email input field
+    emailInput.sendKeys(email); //enters email value
+}
+
+public void enterGuests(int amountOfGuests) {
+    WebElement guestsInput = getDriver().findElementDynamic(guestsSearch); //creates a new WebElement to access amount of guests input field
+    guestsInput.sendKeys(Integer.toString(amountOfGuests)); //enters the number of guests value converted from integer to string
+}
+
+public void acceptTerms() {
+    WebElement checkbox = getDriver().findElementDynamic(checkboxSearch); //creates aa new WebElement to access accept terms checkbox
+    WebElement square = checkbox.findElement(By.className("mat-checkbox-inner-container")); //creates a new WebElement to access inner square
+    JavascriptExecutor js = (JavascriptExecutor) getDriver(); //creates a Javascript executor object
+    js.executeScript("arguments[0].click()", square); //executes a script which clicks the square
+
+}
+
+public void clickBookTable() {
+    WebElement buttonbutton = getDriver().findElementDynamic(bookTableSearch); //creates a new WebElement to access book table button
+    getDriver().waitUntilElementIsClickable(bookTableSearch); //waits until a button might be clicked
+    buttonbutton.click(); //clicks the button
+}
+
+
+
+

You can use those methods in order to create a new method to go through the whole booking process:

+
+
+
+
public ConfirmBookPage enterBookingData(String date, String name, String email, int guests) {
+    enterTimeAndDate(date);
+    enterName(name);
+    enterEmail(email);
+    enterGuests(guests);
+    acceptTerms();
+
+    clickBookTable();
+
+    return new ConfirmBookPage();
+}
+
+
+
+
+
+

== 2. Create ConfirmBookPage class

+
+
+

As you can see, this method returns another page object that has not yet been created. This step is required, as the booking information that you would like to check is on another webpage. This means that you will have to create another page class and call it ConfirmBookPage:

+
+
+
+
public class ConfirmBookPage extends BasePage {
+
+    /** Confirmation dialog search criteria */
+    private static final By confirmationDialogSearch = By.className("mat-dialog-container");
+
+    /** Send confirmation button search criteria */
+    private static final By sendButtonSearch = By.name("bookTableConfirm");
+
+    /** Cancel confirmation button search criteria */
+    private static final By cancelButtonSearch = By.name("bookTableCancel");
+
+    @Override
+    public boolean isLoaded() {
+        //creates a new WebElement to access confirmation dialog box
+        WebElement confirmationDialog = getDriver().findElementDynamic(confirmationDialogSearch);
+
+        return confirmationDialog.isDisplayed(); //checks if the box is displayed
+    }
+
+    //this method won't be called because the page is loaded only after clicking book table button
+    @Override
+    public void load() {
+        BFLogger.logError("MyThaiStar booking confirmation page was not loaded."); //logs error
+    }
+
+    @Override
+    public String pageTitle() {
+        return "My Thai Star";
+    }
+
+    public void confirmBookingData() {
+        WebElement sendButton = getDriver().findElementDynamic(sendButtonSearch); //creates a new WebElement to access confirmation button
+        sendButton.click(); //clicks the send button
+    }
+
+    public void cancelBookingData() {
+        WebElement cancelButton = getDriver().findElementDynamic(cancelButtonSearch); //creates a new WebElement to access resignation button
+        cancelButton.click(); //clicks the cancel button
+    }
+}
+
+
+
+
+image4 +
+
+
+

After the click on Send button - the green confirmation dialogue appears with the message "Table successfully booked":

+
+
+
+image5 +
+
+
+

To be able to check if the booking was successful, you should go back to the BookTablePage class and add one more method in order to check if the green box was displayed:

+
+
+
+
/** Dialog search criteria */
+private static final By dialogSearch = By.className("bgc-green-600");
+
+public boolean checkConfirmationDialog() {
+    WebElement greenConfirmationDialog = getDriver().findElementDynamic(dialogSearch); //creates a new WebElement to access confirmation dialog
+
+    return greenConfirmationDialog.isDisplayed(); //checks if the dialog is displayed
+}
+
+
+
+
+
+

== 3. Create BookTableTest class

+
+
+

At this point you can start creating a test class:

+
+
+
+
import static org.junit.Assert.assertTrue;
+
+public class BookTableTest extends BaseTest {
+    private static BookTablePage bookTablePage = new BookTablePage(); //the field contains book table page object
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        bookTablePage.load(); //loads book table page
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+
+    }
+
+    @Override
+    public void setUp() {
+        if (!bookTablePage.isLoaded()) {
+            bookTablePage.load(); //if the page is not loaded, loads it
+        }
+    }
+
+    @Override
+    public void tearDown() {
+
+    }
+}
+
+
+
+
+
+

== 4. Write the first test

+
+
+

You can prepare our first test method using the methods from page classes

+
+
+
+
@Test
+public void Test_BookTableAndCheckConfirmation() {
+    String date = "07/23/2019 1:00 PM"; //replace with tommorow's date in format "MM/dd/yyyy hh:mm a"
+    String name = "Smith"; //name field
+    String email = "smith@somemail.com"; //email field
+    int guests = 3; //number of guests
+
+    //enters booking data and returns a new confirmation page
+    ConfirmBookPage confirmBookPage = bookTablePage.enterBookingData(date, name, email, guests);
+    confirmBookPage.confirmBookingData(); //confirms booking
+
+    //checks if the green dialog box appears, if it does, test is passed, if not, the test failed and displays message given in the first argument
+    assertTrue("Test failed: Table not booked", bookTablePage.checkConfirmationDialog()); //returns true if dialog box appears and false if not
+}
+
+
+
+
+
+

== 5. Run the test

+
+
+

Run the test by right-clicking on the test method → Run as → JUnit test.

+
+
+
+image6 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Project-Organisation.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Project-Organisation.html new file mode 100644 index 00000000..e731c990 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/Project-Organisation.html @@ -0,0 +1,530 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Project organization

+
+ +
+
+
+

Importing projects

+
+
+

Every MrChecker project should be imported as a Maven Project.

+
+
+

Example from Eclipse IDE:

+
+
+
+1 +
+
+
+
+2 +
+
+
+

Enter the project path and select projects to import.

+
+
+
+3 +
+
+
+

When the import is finished, update the project structure - ALT + F5

+
+
+
+4 +
+
+
+
+
+

Exporting projects

+
+
+

In order to create a new standalone MrChecker project, you can use template-app-under-test and export it to the new folder:

+
+
+
+5 +
+
+
+
+6 +
+
+
+

Create a new folder for the project and enter its path. Select project and files to export:

+
+
+
+7 +
+
+
+

Change project name and other properties, if necessary, in pom.xml file:

+
+
+
+8 +
+
+
+

Then you can import the project to the workspace and create new packages and classes.

+
+
+
+
+

Creating new packages

+
+
+
    +
  1. +

    You will need two new packages: one for the new page classes, the other one for test classes:

    +
    +
      +
    • +

      Create a package for page classes

      +
      +
      +
      Open Eclipse
      +Use the "Project Explorer" on the left
      +Navigate to [your-project] → src/main/java → com.capgemini.mrchecker → selenium
      +Right-click on "selenium"
      +Click on "New" → New Package
      +Name the new package "com.capgemini.mrchecker.selenium.pages.[your-product-name]"
      +
      +
      +
    • +
    • +

      Create a package for test classes

      +
      +
      +
      Navigate to [your-project] → src/test/java → com.capgemini.mrchecker → selenium
      +Right click on "selenium"
      +Click on "New" → New Package
      +Name the new package "com.capgemini.mrchecker.selenium.tests.[your-product-name]"
      +
      +
      +
    • +
    +
    +
  2. +
+
+
+

Example:

+
+
+
+9 +
+
+
+
+
+

Creating new Page Classes

+
+
+
+
Navigate to: [your-project] → src/main/java → com.capgemini.mrchecker → selenium.pages.[your-product-name]
+Click on "New" → New Class
+Enter the name "YourPage"
+
+
+
+

Every Page Class should extend BasePage class. Import all necessary packages and override all required methods:

+
+
+
    +
  • +

    public boolean isLoaded() - returns true if the page is loaded and false if not

    +
  • +
  • +

    public void load() - loads the page

    +
  • +
  • +

    public String pageTitle() - returns page title

    +
  • +
+
+
+

Example:

+
+
+
+
 public class MainPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        return false;
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Main Page'");
+    }
+
+    @Override
+    public String pageTitle() {
+        return "Main Page Title";
+    }
+ }
+
+
+
+
+
+

Creating new Test Classes

+
+
+
+
Navigate to  [your-project] → src/test/java → com.capgemini.mrchecker → selenium.tests.[your-product-name]
+Click on "New" → New Class
+Enter the name "YourCaseTest"
+
+
+
+

Test classes should extend BaseTest class, import all necessary packages and override all required methods:

+
+
+
    +
  • +

    public void setUp() - executes before each test

    +
  • +
  • +

    public void tearDown() - executes after each test

    +
  • +
+
+
+

Optionally, it is also possible to implement the following methods:

+
+
+
    +
  • +

    @BeforeClass +public static void setUpBeforeClass() - runs only once before all tests

    +
  • +
  • +

    @AfterClass +public static void tearDownAfterClass() - runs only once after performing all tests

    +
  • +
+
+
+

Every test method has to be signed with "@Test" parameter.

+
+
+
+
 public class YourCaseTest extends BaseTest {
+    private static MainPage mainPage = new MainPage();
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        mainPage.load();
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+
+    }
+
+    @Override
+    public void setUp() {
+        if (!mainPage.isLoaded()) {
+            mainPage.load();
+        }
+    }
+
+    @Override
+    public void tearDown() {
+
+    }
+
+    @Test
+    public void shouldTestRunWithoutReturningError {
+
+    }
+ }
+
+
+
+
+
+

Running Tests

+
+
+

Run the test by right-clicking on the test method → Run as → JUnit test.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/tutorials.html b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/tutorials.html new file mode 100644 index 00000000..ea4d8fe7 --- /dev/null +++ b/docs/mrchecker/1.0/Who-Is-MrChecker/Tutorials/tutorials.html @@ -0,0 +1,277 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

In order to learn more about MrChecker structure, start from Project Organisation section and then check out our fantastic tutorials:

+
+
+

This tutorial will guide you through the series of test which perform basic actions on webpages using MrChecker.

+
+
+

Make sure you already have MrChecker Test Framework installed on your PC. How to install?

+
+
+

Your Product Under Test will be the following website: http://the-internet.herokuapp.com/

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/_images/1.png b/docs/mrchecker/1.0/_images/1.png new file mode 100644 index 00000000..ba345d97 Binary files /dev/null and b/docs/mrchecker/1.0/_images/1.png differ diff --git a/docs/mrchecker/1.0/_images/2.png b/docs/mrchecker/1.0/_images/2.png new file mode 100644 index 00000000..b39a4b14 Binary files /dev/null and b/docs/mrchecker/1.0/_images/2.png differ diff --git a/docs/mrchecker/1.0/_images/3.png b/docs/mrchecker/1.0/_images/3.png new file mode 100644 index 00000000..a078a12a Binary files /dev/null and b/docs/mrchecker/1.0/_images/3.png differ diff --git a/docs/mrchecker/1.0/_images/4.png b/docs/mrchecker/1.0/_images/4.png new file mode 100644 index 00000000..7c196534 Binary files /dev/null and b/docs/mrchecker/1.0/_images/4.png differ diff --git a/docs/mrchecker/1.0/_images/6.png b/docs/mrchecker/1.0/_images/6.png new file mode 100644 index 00000000..635d6a5c Binary files /dev/null and b/docs/mrchecker/1.0/_images/6.png differ diff --git a/docs/mrchecker/1.0/_images/7.png b/docs/mrchecker/1.0/_images/7.png new file mode 100644 index 00000000..0c0ab107 Binary files /dev/null and b/docs/mrchecker/1.0/_images/7.png differ diff --git a/docs/mrchecker/1.0/_images/8.png b/docs/mrchecker/1.0/_images/8.png new file mode 100644 index 00000000..6014c7a8 Binary files /dev/null and b/docs/mrchecker/1.0/_images/8.png differ diff --git a/docs/mrchecker/1.0/_images/9.png b/docs/mrchecker/1.0/_images/9.png new file mode 100644 index 00000000..e9d6fd82 Binary files /dev/null and b/docs/mrchecker/1.0/_images/9.png differ diff --git a/docs/mrchecker/1.0/_images/Pom.png b/docs/mrchecker/1.0/_images/Pom.png new file mode 100644 index 00000000..ee6f8787 Binary files /dev/null and b/docs/mrchecker/1.0/_images/Pom.png differ diff --git a/docs/mrchecker/1.0/_images/content.png b/docs/mrchecker/1.0/_images/content.png new file mode 100644 index 00000000..57dde1a3 Binary files /dev/null and b/docs/mrchecker/1.0/_images/content.png differ diff --git a/docs/mrchecker/1.0/_images/example1.png b/docs/mrchecker/1.0/_images/example1.png new file mode 100644 index 00000000..c23188c3 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example1.png differ diff --git a/docs/mrchecker/1.0/_images/example10.png b/docs/mrchecker/1.0/_images/example10.png new file mode 100644 index 00000000..67ef9c20 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example10.png differ diff --git a/docs/mrchecker/1.0/_images/example11.png b/docs/mrchecker/1.0/_images/example11.png new file mode 100644 index 00000000..6d8903e9 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example11.png differ diff --git a/docs/mrchecker/1.0/_images/example12.png b/docs/mrchecker/1.0/_images/example12.png new file mode 100644 index 00000000..24c523ea Binary files /dev/null and b/docs/mrchecker/1.0/_images/example12.png differ diff --git a/docs/mrchecker/1.0/_images/example13.png b/docs/mrchecker/1.0/_images/example13.png new file mode 100644 index 00000000..3d13cd05 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example13.png differ diff --git a/docs/mrchecker/1.0/_images/example14.png b/docs/mrchecker/1.0/_images/example14.png new file mode 100644 index 00000000..e34c4c64 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example14.png differ diff --git a/docs/mrchecker/1.0/_images/example15.png b/docs/mrchecker/1.0/_images/example15.png new file mode 100644 index 00000000..2d0ccde2 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example15.png differ diff --git a/docs/mrchecker/1.0/_images/example16.png b/docs/mrchecker/1.0/_images/example16.png new file mode 100644 index 00000000..0944dfff Binary files /dev/null and b/docs/mrchecker/1.0/_images/example16.png differ diff --git a/docs/mrchecker/1.0/_images/example17.png b/docs/mrchecker/1.0/_images/example17.png new file mode 100644 index 00000000..85e16336 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example17.png differ diff --git a/docs/mrchecker/1.0/_images/example18.png b/docs/mrchecker/1.0/_images/example18.png new file mode 100644 index 00000000..ca2095d7 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example18.png differ diff --git a/docs/mrchecker/1.0/_images/example19.png b/docs/mrchecker/1.0/_images/example19.png new file mode 100644 index 00000000..6144f3e1 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example19.png differ diff --git a/docs/mrchecker/1.0/_images/example2.png b/docs/mrchecker/1.0/_images/example2.png new file mode 100644 index 00000000..f4fbb13e Binary files /dev/null and b/docs/mrchecker/1.0/_images/example2.png differ diff --git a/docs/mrchecker/1.0/_images/example23.png b/docs/mrchecker/1.0/_images/example23.png new file mode 100644 index 00000000..52b0c3f4 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example23.png differ diff --git a/docs/mrchecker/1.0/_images/example24.png b/docs/mrchecker/1.0/_images/example24.png new file mode 100644 index 00000000..3a4a0cba Binary files /dev/null and b/docs/mrchecker/1.0/_images/example24.png differ diff --git a/docs/mrchecker/1.0/_images/example25.png b/docs/mrchecker/1.0/_images/example25.png new file mode 100644 index 00000000..cc7b7809 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example25.png differ diff --git a/docs/mrchecker/1.0/_images/example26.png b/docs/mrchecker/1.0/_images/example26.png new file mode 100644 index 00000000..482acf88 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example26.png differ diff --git a/docs/mrchecker/1.0/_images/example27.png b/docs/mrchecker/1.0/_images/example27.png new file mode 100644 index 00000000..91bb1ca6 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example27.png differ diff --git a/docs/mrchecker/1.0/_images/example28.png b/docs/mrchecker/1.0/_images/example28.png new file mode 100644 index 00000000..4a737c5b Binary files /dev/null and b/docs/mrchecker/1.0/_images/example28.png differ diff --git a/docs/mrchecker/1.0/_images/example29.png b/docs/mrchecker/1.0/_images/example29.png new file mode 100644 index 00000000..756ae948 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example29.png differ diff --git a/docs/mrchecker/1.0/_images/example3.png b/docs/mrchecker/1.0/_images/example3.png new file mode 100644 index 00000000..a7aa84b4 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example3.png differ diff --git a/docs/mrchecker/1.0/_images/example30.png b/docs/mrchecker/1.0/_images/example30.png new file mode 100644 index 00000000..c124512c Binary files /dev/null and b/docs/mrchecker/1.0/_images/example30.png differ diff --git a/docs/mrchecker/1.0/_images/example31.png b/docs/mrchecker/1.0/_images/example31.png new file mode 100644 index 00000000..0b820545 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example31.png differ diff --git a/docs/mrchecker/1.0/_images/example32.png b/docs/mrchecker/1.0/_images/example32.png new file mode 100644 index 00000000..7b8f9f6c Binary files /dev/null and b/docs/mrchecker/1.0/_images/example32.png differ diff --git a/docs/mrchecker/1.0/_images/example33.png b/docs/mrchecker/1.0/_images/example33.png new file mode 100644 index 00000000..d1825de2 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example33.png differ diff --git a/docs/mrchecker/1.0/_images/example34.png b/docs/mrchecker/1.0/_images/example34.png new file mode 100644 index 00000000..1ac33fd7 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example34.png differ diff --git a/docs/mrchecker/1.0/_images/example35.png b/docs/mrchecker/1.0/_images/example35.png new file mode 100644 index 00000000..a9ad830e Binary files /dev/null and b/docs/mrchecker/1.0/_images/example35.png differ diff --git a/docs/mrchecker/1.0/_images/example36.png b/docs/mrchecker/1.0/_images/example36.png new file mode 100644 index 00000000..57bb6987 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example36.png differ diff --git a/docs/mrchecker/1.0/_images/example37.png b/docs/mrchecker/1.0/_images/example37.png new file mode 100644 index 00000000..86f35ad3 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example37.png differ diff --git a/docs/mrchecker/1.0/_images/example38.png b/docs/mrchecker/1.0/_images/example38.png new file mode 100644 index 00000000..bcdd4a90 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example38.png differ diff --git a/docs/mrchecker/1.0/_images/example39.png b/docs/mrchecker/1.0/_images/example39.png new file mode 100644 index 00000000..ffc07f3c Binary files /dev/null and b/docs/mrchecker/1.0/_images/example39.png differ diff --git a/docs/mrchecker/1.0/_images/example4.png b/docs/mrchecker/1.0/_images/example4.png new file mode 100644 index 00000000..6258ae7b Binary files /dev/null and b/docs/mrchecker/1.0/_images/example4.png differ diff --git a/docs/mrchecker/1.0/_images/example40.png b/docs/mrchecker/1.0/_images/example40.png new file mode 100644 index 00000000..3835bb18 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example40.png differ diff --git a/docs/mrchecker/1.0/_images/example41.png b/docs/mrchecker/1.0/_images/example41.png new file mode 100644 index 00000000..0f02aec9 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example41.png differ diff --git a/docs/mrchecker/1.0/_images/example42.png b/docs/mrchecker/1.0/_images/example42.png new file mode 100644 index 00000000..fd0594af Binary files /dev/null and b/docs/mrchecker/1.0/_images/example42.png differ diff --git a/docs/mrchecker/1.0/_images/example43.png b/docs/mrchecker/1.0/_images/example43.png new file mode 100644 index 00000000..21aad882 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example43.png differ diff --git a/docs/mrchecker/1.0/_images/example44.png b/docs/mrchecker/1.0/_images/example44.png new file mode 100644 index 00000000..538f4fa4 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example44.png differ diff --git a/docs/mrchecker/1.0/_images/example45.png b/docs/mrchecker/1.0/_images/example45.png new file mode 100644 index 00000000..a6fbdac9 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example45.png differ diff --git a/docs/mrchecker/1.0/_images/example46.png b/docs/mrchecker/1.0/_images/example46.png new file mode 100644 index 00000000..279a0998 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example46.png differ diff --git a/docs/mrchecker/1.0/_images/example47.png b/docs/mrchecker/1.0/_images/example47.png new file mode 100644 index 00000000..89d52d2a Binary files /dev/null and b/docs/mrchecker/1.0/_images/example47.png differ diff --git a/docs/mrchecker/1.0/_images/example48.png b/docs/mrchecker/1.0/_images/example48.png new file mode 100644 index 00000000..a394b107 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example48.png differ diff --git a/docs/mrchecker/1.0/_images/example49.png b/docs/mrchecker/1.0/_images/example49.png new file mode 100644 index 00000000..e691840a Binary files /dev/null and b/docs/mrchecker/1.0/_images/example49.png differ diff --git a/docs/mrchecker/1.0/_images/example5.png b/docs/mrchecker/1.0/_images/example5.png new file mode 100644 index 00000000..11496ffc Binary files /dev/null and b/docs/mrchecker/1.0/_images/example5.png differ diff --git a/docs/mrchecker/1.0/_images/example50.png b/docs/mrchecker/1.0/_images/example50.png new file mode 100644 index 00000000..3a75a6da Binary files /dev/null and b/docs/mrchecker/1.0/_images/example50.png differ diff --git a/docs/mrchecker/1.0/_images/example51.png b/docs/mrchecker/1.0/_images/example51.png new file mode 100644 index 00000000..fa42b7b8 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example51.png differ diff --git a/docs/mrchecker/1.0/_images/example52.png b/docs/mrchecker/1.0/_images/example52.png new file mode 100644 index 00000000..9abdc9f6 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example52.png differ diff --git a/docs/mrchecker/1.0/_images/example53.png b/docs/mrchecker/1.0/_images/example53.png new file mode 100644 index 00000000..fa8cd2d2 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example53.png differ diff --git a/docs/mrchecker/1.0/_images/example54.png b/docs/mrchecker/1.0/_images/example54.png new file mode 100644 index 00000000..686c9e46 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example54.png differ diff --git a/docs/mrchecker/1.0/_images/example55.png b/docs/mrchecker/1.0/_images/example55.png new file mode 100644 index 00000000..5a89533c Binary files /dev/null and b/docs/mrchecker/1.0/_images/example55.png differ diff --git a/docs/mrchecker/1.0/_images/example56.png b/docs/mrchecker/1.0/_images/example56.png new file mode 100644 index 00000000..85324ddd Binary files /dev/null and b/docs/mrchecker/1.0/_images/example56.png differ diff --git a/docs/mrchecker/1.0/_images/example57.png b/docs/mrchecker/1.0/_images/example57.png new file mode 100644 index 00000000..8d6117fd Binary files /dev/null and b/docs/mrchecker/1.0/_images/example57.png differ diff --git a/docs/mrchecker/1.0/_images/example6.png b/docs/mrchecker/1.0/_images/example6.png new file mode 100644 index 00000000..d1867da1 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example6.png differ diff --git a/docs/mrchecker/1.0/_images/example7.png b/docs/mrchecker/1.0/_images/example7.png new file mode 100644 index 00000000..efa8c5ac Binary files /dev/null and b/docs/mrchecker/1.0/_images/example7.png differ diff --git a/docs/mrchecker/1.0/_images/example8.png b/docs/mrchecker/1.0/_images/example8.png new file mode 100644 index 00000000..adc9b395 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example8.png differ diff --git a/docs/mrchecker/1.0/_images/example9.png b/docs/mrchecker/1.0/_images/example9.png new file mode 100644 index 00000000..d9e6f8f7 Binary files /dev/null and b/docs/mrchecker/1.0/_images/example9.png differ diff --git a/docs/mrchecker/1.0/_images/ht_image1.png b/docs/mrchecker/1.0/_images/ht_image1.png new file mode 100644 index 00000000..d2e3333c Binary files /dev/null and b/docs/mrchecker/1.0/_images/ht_image1.png differ diff --git a/docs/mrchecker/1.0/_images/ht_image2.png b/docs/mrchecker/1.0/_images/ht_image2.png new file mode 100644 index 00000000..71cbf184 Binary files /dev/null and b/docs/mrchecker/1.0/_images/ht_image2.png differ diff --git a/docs/mrchecker/1.0/_images/ht_image3.png b/docs/mrchecker/1.0/_images/ht_image3.png new file mode 100644 index 00000000..c9dd7b6e Binary files /dev/null and b/docs/mrchecker/1.0/_images/ht_image3.png differ diff --git a/docs/mrchecker/1.0/_images/image0.png b/docs/mrchecker/1.0/_images/image0.png new file mode 100644 index 00000000..120800f9 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image0.png differ diff --git a/docs/mrchecker/1.0/_images/image001.png b/docs/mrchecker/1.0/_images/image001.png new file mode 100644 index 00000000..c7089828 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image001.png differ diff --git a/docs/mrchecker/1.0/_images/image00100.jpg b/docs/mrchecker/1.0/_images/image00100.jpg new file mode 100644 index 00000000..d6e95f0f Binary files /dev/null and b/docs/mrchecker/1.0/_images/image00100.jpg differ diff --git a/docs/mrchecker/1.0/_images/image00101.jpg b/docs/mrchecker/1.0/_images/image00101.jpg new file mode 100644 index 00000000..1d9ddc71 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image00101.jpg differ diff --git a/docs/mrchecker/1.0/_images/image002.png b/docs/mrchecker/1.0/_images/image002.png new file mode 100644 index 00000000..ea4eb988 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image002.png differ diff --git a/docs/mrchecker/1.0/_images/image01.png b/docs/mrchecker/1.0/_images/image01.png new file mode 100644 index 00000000..d5b5e661 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image01.png differ diff --git a/docs/mrchecker/1.0/_images/image02.png b/docs/mrchecker/1.0/_images/image02.png new file mode 100644 index 00000000..f1c6989e Binary files /dev/null and b/docs/mrchecker/1.0/_images/image02.png differ diff --git a/docs/mrchecker/1.0/_images/image051.png b/docs/mrchecker/1.0/_images/image051.png new file mode 100644 index 00000000..8a5504ae Binary files /dev/null and b/docs/mrchecker/1.0/_images/image051.png differ diff --git a/docs/mrchecker/1.0/_images/image080.png b/docs/mrchecker/1.0/_images/image080.png new file mode 100644 index 00000000..fb531d8e Binary files /dev/null and b/docs/mrchecker/1.0/_images/image080.png differ diff --git a/docs/mrchecker/1.0/_images/image081.png b/docs/mrchecker/1.0/_images/image081.png new file mode 100644 index 00000000..8595ec4d Binary files /dev/null and b/docs/mrchecker/1.0/_images/image081.png differ diff --git a/docs/mrchecker/1.0/_images/image082.png b/docs/mrchecker/1.0/_images/image082.png new file mode 100644 index 00000000..3bb19951 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image082.png differ diff --git a/docs/mrchecker/1.0/_images/image083.png b/docs/mrchecker/1.0/_images/image083.png new file mode 100644 index 00000000..0211d8d7 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image083.png differ diff --git a/docs/mrchecker/1.0/_images/image084.png b/docs/mrchecker/1.0/_images/image084.png new file mode 100644 index 00000000..7ea06582 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image084.png differ diff --git a/docs/mrchecker/1.0/_images/image085.png b/docs/mrchecker/1.0/_images/image085.png new file mode 100644 index 00000000..13afb174 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image085.png differ diff --git a/docs/mrchecker/1.0/_images/image086.png b/docs/mrchecker/1.0/_images/image086.png new file mode 100644 index 00000000..ea054e42 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image086.png differ diff --git a/docs/mrchecker/1.0/_images/image086_new.png b/docs/mrchecker/1.0/_images/image086_new.png new file mode 100644 index 00000000..4d4c0d8b Binary files /dev/null and b/docs/mrchecker/1.0/_images/image086_new.png differ diff --git a/docs/mrchecker/1.0/_images/image1.png b/docs/mrchecker/1.0/_images/image1.png new file mode 100644 index 00000000..9111fe87 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image1.png differ diff --git a/docs/mrchecker/1.0/_images/image10.png b/docs/mrchecker/1.0/_images/image10.png new file mode 100644 index 00000000..44dcb88e Binary files /dev/null and b/docs/mrchecker/1.0/_images/image10.png differ diff --git a/docs/mrchecker/1.0/_images/image11.png b/docs/mrchecker/1.0/_images/image11.png new file mode 100644 index 00000000..5d9d9ec2 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image11.png differ diff --git a/docs/mrchecker/1.0/_images/image12.png b/docs/mrchecker/1.0/_images/image12.png new file mode 100644 index 00000000..b3c0df64 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image12.png differ diff --git a/docs/mrchecker/1.0/_images/image13.png b/docs/mrchecker/1.0/_images/image13.png new file mode 100644 index 00000000..341497d2 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image13.png differ diff --git a/docs/mrchecker/1.0/_images/image14.png b/docs/mrchecker/1.0/_images/image14.png new file mode 100644 index 00000000..2c69f519 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image14.png differ diff --git a/docs/mrchecker/1.0/_images/image15.png b/docs/mrchecker/1.0/_images/image15.png new file mode 100644 index 00000000..8e345e26 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image15.png differ diff --git a/docs/mrchecker/1.0/_images/image16.png b/docs/mrchecker/1.0/_images/image16.png new file mode 100644 index 00000000..b80fb279 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image16.png differ diff --git a/docs/mrchecker/1.0/_images/image17.png b/docs/mrchecker/1.0/_images/image17.png new file mode 100644 index 00000000..b80fb279 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image17.png differ diff --git a/docs/mrchecker/1.0/_images/image18.png b/docs/mrchecker/1.0/_images/image18.png new file mode 100644 index 00000000..f206e104 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image18.png differ diff --git a/docs/mrchecker/1.0/_images/image19.png b/docs/mrchecker/1.0/_images/image19.png new file mode 100644 index 00000000..f7c4ec52 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image19.png differ diff --git a/docs/mrchecker/1.0/_images/image1_new.png b/docs/mrchecker/1.0/_images/image1_new.png new file mode 100644 index 00000000..f7a5a277 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image1_new.png differ diff --git a/docs/mrchecker/1.0/_images/image2.png b/docs/mrchecker/1.0/_images/image2.png new file mode 100644 index 00000000..9662ca13 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image2.png differ diff --git a/docs/mrchecker/1.0/_images/image20.png b/docs/mrchecker/1.0/_images/image20.png new file mode 100644 index 00000000..4dbda49c Binary files /dev/null and b/docs/mrchecker/1.0/_images/image20.png differ diff --git a/docs/mrchecker/1.0/_images/image21.png b/docs/mrchecker/1.0/_images/image21.png new file mode 100644 index 00000000..6d74a456 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image21.png differ diff --git a/docs/mrchecker/1.0/_images/image22.png b/docs/mrchecker/1.0/_images/image22.png new file mode 100644 index 00000000..30eb89b7 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image22.png differ diff --git a/docs/mrchecker/1.0/_images/image23.png b/docs/mrchecker/1.0/_images/image23.png new file mode 100644 index 00000000..1ad040a8 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image23.png differ diff --git a/docs/mrchecker/1.0/_images/image23_new.png b/docs/mrchecker/1.0/_images/image23_new.png new file mode 100644 index 00000000..a060ba38 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image23_new.png differ diff --git a/docs/mrchecker/1.0/_images/image24.png b/docs/mrchecker/1.0/_images/image24.png new file mode 100644 index 00000000..15fc8b1b Binary files /dev/null and b/docs/mrchecker/1.0/_images/image24.png differ diff --git a/docs/mrchecker/1.0/_images/image24_new.png b/docs/mrchecker/1.0/_images/image24_new.png new file mode 100644 index 00000000..95a26c8e Binary files /dev/null and b/docs/mrchecker/1.0/_images/image24_new.png differ diff --git a/docs/mrchecker/1.0/_images/image25.png b/docs/mrchecker/1.0/_images/image25.png new file mode 100644 index 00000000..ed11a270 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image25.png differ diff --git a/docs/mrchecker/1.0/_images/image25_new.png b/docs/mrchecker/1.0/_images/image25_new.png new file mode 100644 index 00000000..599026e5 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image25_new.png differ diff --git a/docs/mrchecker/1.0/_images/image26.png b/docs/mrchecker/1.0/_images/image26.png new file mode 100644 index 00000000..f443d230 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image26.png differ diff --git a/docs/mrchecker/1.0/_images/image26_new.png b/docs/mrchecker/1.0/_images/image26_new.png new file mode 100644 index 00000000..ab539721 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image26_new.png differ diff --git a/docs/mrchecker/1.0/_images/image27.png b/docs/mrchecker/1.0/_images/image27.png new file mode 100644 index 00000000..a3af5f72 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image27.png differ diff --git a/docs/mrchecker/1.0/_images/image27_new.png b/docs/mrchecker/1.0/_images/image27_new.png new file mode 100644 index 00000000..ec386d01 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image27_new.png differ diff --git a/docs/mrchecker/1.0/_images/image28.png b/docs/mrchecker/1.0/_images/image28.png new file mode 100644 index 00000000..9f566a26 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image28.png differ diff --git a/docs/mrchecker/1.0/_images/image29.png b/docs/mrchecker/1.0/_images/image29.png new file mode 100644 index 00000000..eadd217b Binary files /dev/null and b/docs/mrchecker/1.0/_images/image29.png differ diff --git a/docs/mrchecker/1.0/_images/image3.png b/docs/mrchecker/1.0/_images/image3.png new file mode 100644 index 00000000..d28d33b1 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image3.png differ diff --git a/docs/mrchecker/1.0/_images/image30.png b/docs/mrchecker/1.0/_images/image30.png new file mode 100644 index 00000000..8f1780df Binary files /dev/null and b/docs/mrchecker/1.0/_images/image30.png differ diff --git a/docs/mrchecker/1.0/_images/image30_new.png b/docs/mrchecker/1.0/_images/image30_new.png new file mode 100644 index 00000000..a958512f Binary files /dev/null and b/docs/mrchecker/1.0/_images/image30_new.png differ diff --git a/docs/mrchecker/1.0/_images/image31.png b/docs/mrchecker/1.0/_images/image31.png new file mode 100644 index 00000000..1170960f Binary files /dev/null and b/docs/mrchecker/1.0/_images/image31.png differ diff --git a/docs/mrchecker/1.0/_images/image32.png b/docs/mrchecker/1.0/_images/image32.png new file mode 100644 index 00000000..2dd1e936 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image32.png differ diff --git a/docs/mrchecker/1.0/_images/image32_new.png b/docs/mrchecker/1.0/_images/image32_new.png new file mode 100644 index 00000000..580f61f1 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image32_new.png differ diff --git a/docs/mrchecker/1.0/_images/image33.png b/docs/mrchecker/1.0/_images/image33.png new file mode 100644 index 00000000..0a1ac82f Binary files /dev/null and b/docs/mrchecker/1.0/_images/image33.png differ diff --git a/docs/mrchecker/1.0/_images/image33_new.png b/docs/mrchecker/1.0/_images/image33_new.png new file mode 100644 index 00000000..d84bd25b Binary files /dev/null and b/docs/mrchecker/1.0/_images/image33_new.png differ diff --git a/docs/mrchecker/1.0/_images/image34.png b/docs/mrchecker/1.0/_images/image34.png new file mode 100644 index 00000000..205dc6fa Binary files /dev/null and b/docs/mrchecker/1.0/_images/image34.png differ diff --git a/docs/mrchecker/1.0/_images/image35.png b/docs/mrchecker/1.0/_images/image35.png new file mode 100644 index 00000000..cf5b537e Binary files /dev/null and b/docs/mrchecker/1.0/_images/image35.png differ diff --git a/docs/mrchecker/1.0/_images/image35_new.png b/docs/mrchecker/1.0/_images/image35_new.png new file mode 100644 index 00000000..cf5029b0 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image35_new.png differ diff --git a/docs/mrchecker/1.0/_images/image36.png b/docs/mrchecker/1.0/_images/image36.png new file mode 100644 index 00000000..e033bc44 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image36.png differ diff --git a/docs/mrchecker/1.0/_images/image37.png b/docs/mrchecker/1.0/_images/image37.png new file mode 100644 index 00000000..3c79164b Binary files /dev/null and b/docs/mrchecker/1.0/_images/image37.png differ diff --git a/docs/mrchecker/1.0/_images/image37_new.png b/docs/mrchecker/1.0/_images/image37_new.png new file mode 100644 index 00000000..c6600d6c Binary files /dev/null and b/docs/mrchecker/1.0/_images/image37_new.png differ diff --git a/docs/mrchecker/1.0/_images/image38.png b/docs/mrchecker/1.0/_images/image38.png new file mode 100644 index 00000000..e9c2b411 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image38.png differ diff --git a/docs/mrchecker/1.0/_images/image38_new.png b/docs/mrchecker/1.0/_images/image38_new.png new file mode 100644 index 00000000..0dea0792 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image38_new.png differ diff --git a/docs/mrchecker/1.0/_images/image39.png b/docs/mrchecker/1.0/_images/image39.png new file mode 100644 index 00000000..2c8375c8 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image39.png differ diff --git a/docs/mrchecker/1.0/_images/image39a.png b/docs/mrchecker/1.0/_images/image39a.png new file mode 100644 index 00000000..39708505 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image39a.png differ diff --git a/docs/mrchecker/1.0/_images/image4.png b/docs/mrchecker/1.0/_images/image4.png new file mode 100644 index 00000000..835605d1 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image4.png differ diff --git a/docs/mrchecker/1.0/_images/image40.png b/docs/mrchecker/1.0/_images/image40.png new file mode 100644 index 00000000..a41952c2 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image40.png differ diff --git a/docs/mrchecker/1.0/_images/image41.png b/docs/mrchecker/1.0/_images/image41.png new file mode 100644 index 00000000..ad2a2f01 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image41.png differ diff --git a/docs/mrchecker/1.0/_images/image42.png b/docs/mrchecker/1.0/_images/image42.png new file mode 100644 index 00000000..00e52f61 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image42.png differ diff --git a/docs/mrchecker/1.0/_images/image43.png b/docs/mrchecker/1.0/_images/image43.png new file mode 100644 index 00000000..09c5ae11 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image43.png differ diff --git a/docs/mrchecker/1.0/_images/image44.png b/docs/mrchecker/1.0/_images/image44.png new file mode 100644 index 00000000..1e9f9f8e Binary files /dev/null and b/docs/mrchecker/1.0/_images/image44.png differ diff --git a/docs/mrchecker/1.0/_images/image45.png b/docs/mrchecker/1.0/_images/image45.png new file mode 100644 index 00000000..cb0deb26 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image45.png differ diff --git a/docs/mrchecker/1.0/_images/image46.png b/docs/mrchecker/1.0/_images/image46.png new file mode 100644 index 00000000..7b4931ad Binary files /dev/null and b/docs/mrchecker/1.0/_images/image46.png differ diff --git a/docs/mrchecker/1.0/_images/image47.png b/docs/mrchecker/1.0/_images/image47.png new file mode 100644 index 00000000..3ba3b220 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image47.png differ diff --git a/docs/mrchecker/1.0/_images/image48.png b/docs/mrchecker/1.0/_images/image48.png new file mode 100644 index 00000000..d81cf9b5 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image48.png differ diff --git a/docs/mrchecker/1.0/_images/image49.png b/docs/mrchecker/1.0/_images/image49.png new file mode 100644 index 00000000..c0a27f2a Binary files /dev/null and b/docs/mrchecker/1.0/_images/image49.png differ diff --git a/docs/mrchecker/1.0/_images/image5.png b/docs/mrchecker/1.0/_images/image5.png new file mode 100644 index 00000000..eb70eafb Binary files /dev/null and b/docs/mrchecker/1.0/_images/image5.png differ diff --git a/docs/mrchecker/1.0/_images/image50.png b/docs/mrchecker/1.0/_images/image50.png new file mode 100644 index 00000000..4ca51d44 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image50.png differ diff --git a/docs/mrchecker/1.0/_images/image51.png b/docs/mrchecker/1.0/_images/image51.png new file mode 100644 index 00000000..7d6048dd Binary files /dev/null and b/docs/mrchecker/1.0/_images/image51.png differ diff --git a/docs/mrchecker/1.0/_images/image52.png b/docs/mrchecker/1.0/_images/image52.png new file mode 100644 index 00000000..3c4e4f86 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image52.png differ diff --git a/docs/mrchecker/1.0/_images/image53.png b/docs/mrchecker/1.0/_images/image53.png new file mode 100644 index 00000000..92a8a519 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image53.png differ diff --git a/docs/mrchecker/1.0/_images/image54.png b/docs/mrchecker/1.0/_images/image54.png new file mode 100644 index 00000000..1ac37e36 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image54.png differ diff --git a/docs/mrchecker/1.0/_images/image55.png b/docs/mrchecker/1.0/_images/image55.png new file mode 100644 index 00000000..f64a5235 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image55.png differ diff --git a/docs/mrchecker/1.0/_images/image56.png b/docs/mrchecker/1.0/_images/image56.png new file mode 100644 index 00000000..f526dc8c Binary files /dev/null and b/docs/mrchecker/1.0/_images/image56.png differ diff --git a/docs/mrchecker/1.0/_images/image57.png b/docs/mrchecker/1.0/_images/image57.png new file mode 100644 index 00000000..66620deb Binary files /dev/null and b/docs/mrchecker/1.0/_images/image57.png differ diff --git a/docs/mrchecker/1.0/_images/image58.png b/docs/mrchecker/1.0/_images/image58.png new file mode 100644 index 00000000..8c74b804 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image58.png differ diff --git a/docs/mrchecker/1.0/_images/image59.png b/docs/mrchecker/1.0/_images/image59.png new file mode 100644 index 00000000..37402f77 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image59.png differ diff --git a/docs/mrchecker/1.0/_images/image6.png b/docs/mrchecker/1.0/_images/image6.png new file mode 100644 index 00000000..23ac54b5 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image6.png differ diff --git a/docs/mrchecker/1.0/_images/image60.png b/docs/mrchecker/1.0/_images/image60.png new file mode 100644 index 00000000..d05358b7 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image60.png differ diff --git a/docs/mrchecker/1.0/_images/image61.png b/docs/mrchecker/1.0/_images/image61.png new file mode 100644 index 00000000..aabfa444 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image61.png differ diff --git a/docs/mrchecker/1.0/_images/image62.png b/docs/mrchecker/1.0/_images/image62.png new file mode 100644 index 00000000..83f53936 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image62.png differ diff --git a/docs/mrchecker/1.0/_images/image63.png b/docs/mrchecker/1.0/_images/image63.png new file mode 100644 index 00000000..8650c5e5 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image63.png differ diff --git a/docs/mrchecker/1.0/_images/image64.png b/docs/mrchecker/1.0/_images/image64.png new file mode 100644 index 00000000..1ace42ba Binary files /dev/null and b/docs/mrchecker/1.0/_images/image64.png differ diff --git a/docs/mrchecker/1.0/_images/image65.png b/docs/mrchecker/1.0/_images/image65.png new file mode 100644 index 00000000..580445a5 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image65.png differ diff --git a/docs/mrchecker/1.0/_images/image66.png b/docs/mrchecker/1.0/_images/image66.png new file mode 100644 index 00000000..119e1085 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image66.png differ diff --git a/docs/mrchecker/1.0/_images/image67.png b/docs/mrchecker/1.0/_images/image67.png new file mode 100644 index 00000000..90b504a6 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image67.png differ diff --git a/docs/mrchecker/1.0/_images/image68.png b/docs/mrchecker/1.0/_images/image68.png new file mode 100644 index 00000000..5054824e Binary files /dev/null and b/docs/mrchecker/1.0/_images/image68.png differ diff --git a/docs/mrchecker/1.0/_images/image69.png b/docs/mrchecker/1.0/_images/image69.png new file mode 100644 index 00000000..cf991cb0 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image69.png differ diff --git a/docs/mrchecker/1.0/_images/image7.png b/docs/mrchecker/1.0/_images/image7.png new file mode 100644 index 00000000..f02a7489 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image7.png differ diff --git a/docs/mrchecker/1.0/_images/image70.png b/docs/mrchecker/1.0/_images/image70.png new file mode 100644 index 00000000..2dbbff06 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image70.png differ diff --git a/docs/mrchecker/1.0/_images/image71.png b/docs/mrchecker/1.0/_images/image71.png new file mode 100644 index 00000000..f78b7980 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image71.png differ diff --git a/docs/mrchecker/1.0/_images/image72.png b/docs/mrchecker/1.0/_images/image72.png new file mode 100644 index 00000000..52104845 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image72.png differ diff --git a/docs/mrchecker/1.0/_images/image73.png b/docs/mrchecker/1.0/_images/image73.png new file mode 100644 index 00000000..ddfc5780 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image73.png differ diff --git a/docs/mrchecker/1.0/_images/image74.png b/docs/mrchecker/1.0/_images/image74.png new file mode 100644 index 00000000..9a6732c6 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image74.png differ diff --git a/docs/mrchecker/1.0/_images/image75.png b/docs/mrchecker/1.0/_images/image75.png new file mode 100644 index 00000000..a67016f7 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image75.png differ diff --git a/docs/mrchecker/1.0/_images/image76.png b/docs/mrchecker/1.0/_images/image76.png new file mode 100644 index 00000000..1a57181f Binary files /dev/null and b/docs/mrchecker/1.0/_images/image76.png differ diff --git a/docs/mrchecker/1.0/_images/image77.png b/docs/mrchecker/1.0/_images/image77.png new file mode 100644 index 00000000..9f317c22 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image77.png differ diff --git a/docs/mrchecker/1.0/_images/image78.png b/docs/mrchecker/1.0/_images/image78.png new file mode 100644 index 00000000..d9da5acd Binary files /dev/null and b/docs/mrchecker/1.0/_images/image78.png differ diff --git a/docs/mrchecker/1.0/_images/image79.png b/docs/mrchecker/1.0/_images/image79.png new file mode 100644 index 00000000..efc1744c Binary files /dev/null and b/docs/mrchecker/1.0/_images/image79.png differ diff --git a/docs/mrchecker/1.0/_images/image8.png b/docs/mrchecker/1.0/_images/image8.png new file mode 100644 index 00000000..cc532bab Binary files /dev/null and b/docs/mrchecker/1.0/_images/image8.png differ diff --git a/docs/mrchecker/1.0/_images/image80.png b/docs/mrchecker/1.0/_images/image80.png new file mode 100644 index 00000000..a5218b33 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image80.png differ diff --git a/docs/mrchecker/1.0/_images/image81.png b/docs/mrchecker/1.0/_images/image81.png new file mode 100644 index 00000000..29f7fe94 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image81.png differ diff --git a/docs/mrchecker/1.0/_images/image82.png b/docs/mrchecker/1.0/_images/image82.png new file mode 100644 index 00000000..23b8aff9 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image82.png differ diff --git a/docs/mrchecker/1.0/_images/image83.png b/docs/mrchecker/1.0/_images/image83.png new file mode 100644 index 00000000..77fefd2c Binary files /dev/null and b/docs/mrchecker/1.0/_images/image83.png differ diff --git a/docs/mrchecker/1.0/_images/image84.png b/docs/mrchecker/1.0/_images/image84.png new file mode 100644 index 00000000..b1e6ac5e Binary files /dev/null and b/docs/mrchecker/1.0/_images/image84.png differ diff --git a/docs/mrchecker/1.0/_images/image85.png b/docs/mrchecker/1.0/_images/image85.png new file mode 100644 index 00000000..fcde60c8 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image85.png differ diff --git a/docs/mrchecker/1.0/_images/image86.png b/docs/mrchecker/1.0/_images/image86.png new file mode 100644 index 00000000..e9e7f09a Binary files /dev/null and b/docs/mrchecker/1.0/_images/image86.png differ diff --git a/docs/mrchecker/1.0/_images/image87.png b/docs/mrchecker/1.0/_images/image87.png new file mode 100644 index 00000000..57788f1c Binary files /dev/null and b/docs/mrchecker/1.0/_images/image87.png differ diff --git a/docs/mrchecker/1.0/_images/image88.png b/docs/mrchecker/1.0/_images/image88.png new file mode 100644 index 00000000..e2737d35 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image88.png differ diff --git a/docs/mrchecker/1.0/_images/image89.png b/docs/mrchecker/1.0/_images/image89.png new file mode 100644 index 00000000..8abe43fb Binary files /dev/null and b/docs/mrchecker/1.0/_images/image89.png differ diff --git a/docs/mrchecker/1.0/_images/image9.png b/docs/mrchecker/1.0/_images/image9.png new file mode 100644 index 00000000..4ea5eeed Binary files /dev/null and b/docs/mrchecker/1.0/_images/image9.png differ diff --git a/docs/mrchecker/1.0/_images/image90.png b/docs/mrchecker/1.0/_images/image90.png new file mode 100644 index 00000000..1573c9d9 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image90.png differ diff --git a/docs/mrchecker/1.0/_images/image91.png b/docs/mrchecker/1.0/_images/image91.png new file mode 100644 index 00000000..7fc6ebd1 Binary files /dev/null and b/docs/mrchecker/1.0/_images/image91.png differ diff --git a/docs/mrchecker/1.0/_images/install_win01.png b/docs/mrchecker/1.0/_images/install_win01.png new file mode 100644 index 00000000..9b0420cd Binary files /dev/null and b/docs/mrchecker/1.0/_images/install_win01.png differ diff --git a/docs/mrchecker/1.0/_images/install_win02.png b/docs/mrchecker/1.0/_images/install_win02.png new file mode 100644 index 00000000..9c75140d Binary files /dev/null and b/docs/mrchecker/1.0/_images/install_win02.png differ diff --git a/docs/mrchecker/1.0/_images/install_win03.png b/docs/mrchecker/1.0/_images/install_win03.png new file mode 100644 index 00000000..331c135d Binary files /dev/null and b/docs/mrchecker/1.0/_images/install_win03.png differ diff --git a/docs/mrchecker/1.0/_images/install_win04.png b/docs/mrchecker/1.0/_images/install_win04.png new file mode 100644 index 00000000..1dc09128 Binary files /dev/null and b/docs/mrchecker/1.0/_images/install_win04.png differ diff --git a/docs/mrchecker/1.0/_images/install_win05.png b/docs/mrchecker/1.0/_images/install_win05.png new file mode 100644 index 00000000..4702ce5b Binary files /dev/null and b/docs/mrchecker/1.0/_images/install_win05.png differ diff --git a/docs/mrchecker/1.0/_images/install_win06.png b/docs/mrchecker/1.0/_images/install_win06.png new file mode 100644 index 00000000..f07d9c51 Binary files /dev/null and b/docs/mrchecker/1.0/_images/install_win06.png differ diff --git a/docs/mrchecker/1.0/_images/install_win07.png b/docs/mrchecker/1.0/_images/install_win07.png new file mode 100644 index 00000000..f01fcbc0 Binary files /dev/null and b/docs/mrchecker/1.0/_images/install_win07.png differ diff --git a/docs/mrchecker/1.0/_images/install_win08.png b/docs/mrchecker/1.0/_images/install_win08.png new file mode 100644 index 00000000..2abc8919 Binary files /dev/null and b/docs/mrchecker/1.0/_images/install_win08.png differ diff --git a/docs/mrchecker/1.0/_images/migration01.png b/docs/mrchecker/1.0/_images/migration01.png new file mode 100644 index 00000000..2eca177e Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration01.png differ diff --git a/docs/mrchecker/1.0/_images/migration02.png b/docs/mrchecker/1.0/_images/migration02.png new file mode 100644 index 00000000..712ca452 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration02.png differ diff --git a/docs/mrchecker/1.0/_images/migration03.png b/docs/mrchecker/1.0/_images/migration03.png new file mode 100644 index 00000000..ecf920bb Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration03.png differ diff --git a/docs/mrchecker/1.0/_images/migration04.png b/docs/mrchecker/1.0/_images/migration04.png new file mode 100644 index 00000000..9b67bf7e Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration04.png differ diff --git a/docs/mrchecker/1.0/_images/migration05.png b/docs/mrchecker/1.0/_images/migration05.png new file mode 100644 index 00000000..b5ec2fc2 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration05.png differ diff --git a/docs/mrchecker/1.0/_images/migration06.png b/docs/mrchecker/1.0/_images/migration06.png new file mode 100644 index 00000000..e6a780ed Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration06.png differ diff --git a/docs/mrchecker/1.0/_images/migration07.png b/docs/mrchecker/1.0/_images/migration07.png new file mode 100644 index 00000000..d2598f21 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration07.png differ diff --git a/docs/mrchecker/1.0/_images/migration08.png b/docs/mrchecker/1.0/_images/migration08.png new file mode 100644 index 00000000..8daccc9b Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration08.png differ diff --git a/docs/mrchecker/1.0/_images/migration09.png b/docs/mrchecker/1.0/_images/migration09.png new file mode 100644 index 00000000..4d3b071c Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration09.png differ diff --git a/docs/mrchecker/1.0/_images/migration10.png b/docs/mrchecker/1.0/_images/migration10.png new file mode 100644 index 00000000..41e8a7f9 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration10.png differ diff --git a/docs/mrchecker/1.0/_images/migration11.png b/docs/mrchecker/1.0/_images/migration11.png new file mode 100644 index 00000000..4f7ec759 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration11.png differ diff --git a/docs/mrchecker/1.0/_images/migration12.png b/docs/mrchecker/1.0/_images/migration12.png new file mode 100644 index 00000000..a8897524 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration12.png differ diff --git a/docs/mrchecker/1.0/_images/migration13.png b/docs/mrchecker/1.0/_images/migration13.png new file mode 100644 index 00000000..fca0a733 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration13.png differ diff --git a/docs/mrchecker/1.0/_images/migration14.png b/docs/mrchecker/1.0/_images/migration14.png new file mode 100644 index 00000000..f5e1e07c Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration14.png differ diff --git a/docs/mrchecker/1.0/_images/migration15.png b/docs/mrchecker/1.0/_images/migration15.png new file mode 100644 index 00000000..b1f27b00 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration15.png differ diff --git a/docs/mrchecker/1.0/_images/migration16.png b/docs/mrchecker/1.0/_images/migration16.png new file mode 100644 index 00000000..ff742019 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration16.png differ diff --git a/docs/mrchecker/1.0/_images/migration17.png b/docs/mrchecker/1.0/_images/migration17.png new file mode 100644 index 00000000..2f32f144 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration17.png differ diff --git a/docs/mrchecker/1.0/_images/migration18.png b/docs/mrchecker/1.0/_images/migration18.png new file mode 100644 index 00000000..f470a17a Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration18.png differ diff --git a/docs/mrchecker/1.0/_images/migration19.png b/docs/mrchecker/1.0/_images/migration19.png new file mode 100644 index 00000000..74a14574 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration19.png differ diff --git a/docs/mrchecker/1.0/_images/migration20.png b/docs/mrchecker/1.0/_images/migration20.png new file mode 100644 index 00000000..b8436df1 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration20.png differ diff --git a/docs/mrchecker/1.0/_images/migration21.png b/docs/mrchecker/1.0/_images/migration21.png new file mode 100644 index 00000000..e577b277 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration21.png differ diff --git a/docs/mrchecker/1.0/_images/migration22.png b/docs/mrchecker/1.0/_images/migration22.png new file mode 100644 index 00000000..0cf6db66 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration22.png differ diff --git a/docs/mrchecker/1.0/_images/migration23.png b/docs/mrchecker/1.0/_images/migration23.png new file mode 100644 index 00000000..7db55a75 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration23.png differ diff --git a/docs/mrchecker/1.0/_images/migration24.png b/docs/mrchecker/1.0/_images/migration24.png new file mode 100644 index 00000000..5921870d Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration24.png differ diff --git a/docs/mrchecker/1.0/_images/migration25.png b/docs/mrchecker/1.0/_images/migration25.png new file mode 100644 index 00000000..1c6a1e24 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration25.png differ diff --git a/docs/mrchecker/1.0/_images/migration26.png b/docs/mrchecker/1.0/_images/migration26.png new file mode 100644 index 00000000..af11a30e Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration26.png differ diff --git a/docs/mrchecker/1.0/_images/migration27.png b/docs/mrchecker/1.0/_images/migration27.png new file mode 100644 index 00000000..4e7cf6c2 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration27.png differ diff --git a/docs/mrchecker/1.0/_images/migration28.png b/docs/mrchecker/1.0/_images/migration28.png new file mode 100644 index 00000000..cd911431 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration28.png differ diff --git a/docs/mrchecker/1.0/_images/migration29.png b/docs/mrchecker/1.0/_images/migration29.png new file mode 100644 index 00000000..a6f30bb4 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration29.png differ diff --git a/docs/mrchecker/1.0/_images/migration30.png b/docs/mrchecker/1.0/_images/migration30.png new file mode 100644 index 00000000..33b0a362 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration30.png differ diff --git a/docs/mrchecker/1.0/_images/migration31.png b/docs/mrchecker/1.0/_images/migration31.png new file mode 100644 index 00000000..34ebc4b4 Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration31.png differ diff --git a/docs/mrchecker/1.0/_images/migration_arrow_down.png b/docs/mrchecker/1.0/_images/migration_arrow_down.png new file mode 100644 index 00000000..dc4f1fec Binary files /dev/null and b/docs/mrchecker/1.0/_images/migration_arrow_down.png differ diff --git a/docs/mrchecker/1.0/_images/piramida.png b/docs/mrchecker/1.0/_images/piramida.png new file mode 100644 index 00000000..efcd3c21 Binary files /dev/null and b/docs/mrchecker/1.0/_images/piramida.png differ diff --git a/docs/mrchecker/1.0/benefits.html b/docs/mrchecker/1.0/benefits.html new file mode 100644 index 00000000..0f8e7c42 --- /dev/null +++ b/docs/mrchecker/1.0/benefits.html @@ -0,0 +1,298 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Benefits

+
+
+

Every customer may benefit from using MrChecker Test Framework. The main profits for your project are:

+
+
+
    +
  • +

    Resilient and robust building and validation process

    +
  • +
  • +

    Quality gates shifted closer to the software development process

    +
  • +
  • +

    Team quality awareness increase - including Unit Tests, Static Analysis, Security Tests, Performance in the testing process

    +
  • +
  • +

    Test execution environment transparent to any infrastructure

    +
  • +
  • +

    Touch base with the Cloud solution

    +
  • +
  • +

    Faster Quality and DevOps-driven delivery

    +
  • +
  • +

    Proven frameworks, technologies and processes.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/index.html b/docs/mrchecker/1.0/index.html new file mode 100644 index 00000000..354b67ac --- /dev/null +++ b/docs/mrchecker/1.0/index.html @@ -0,0 +1,323 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Who is MrChecker?

+
+
+

MrChecker Test Framework is an end to end test automation framework which is written in Java. +It is an automated testing framework for functional testing of web applications, API web services, Service Virtualization, Security, native mobile apps and, in the near future, databases. All modules have tangible examples of how to build resilient integration test cases based on delivered functions.

+
+
+
+
+

Where does MrChecker apply?

+
+
+

The aim of MrChecker is to achieve standardize way to build BlackBox tests. It provides the possibility to have one common software standard in order to build Component, Integration and System tests.

+
+
+

A Test Engineer does not have access to the application source code in order to perform BlackBox tests, but they are able to attach their tests to any application interfaces, such as - IP address - Domain Name - communication protocol - Command Line Interface.

+
+
+
+
+

MrChecker specification:

+
+
+
    +
  • +

    Responsive Web Design application: Selenium Browser

    +
  • +
  • +

    REST/SOAP: RestAssure

    +
  • +
  • +

    Service Virtualization: Wiremock

    +
  • +
  • +

    Database: JDBC drivers for SQL

    +
  • +
  • +

    Security: RestAssure + RestAssure Security lib

    +
  • +
  • +

    Standalone Java application: SWING

    +
  • +
  • +

    Native mobile application for Android: Appium

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/mrchecker/1.0/master-mrchecker.html b/docs/mrchecker/1.0/master-mrchecker.html new file mode 100644 index 00000000..685221ed --- /dev/null +++ b/docs/mrchecker/1.0/master-mrchecker.html @@ -0,0 +1,12556 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

MrChecker - devonfw testing tool

+
+ +
+
+
+

Who Is MrChecker

+
+
+

Unresolved include directive in modules/ROOT/pages/master-mrchecker.adoc - include::home.adoc[]

+
+
+
Benefits
+
+

Every customer may benefit from using MrChecker Test Framework. The main profits for your project are:

+
+
+
    +
  • +

    Resilient and robust building and validation process

    +
  • +
  • +

    Quality gates shifted closer to the software development process

    +
  • +
  • +

    Team quality awareness increase - including Unit Tests, Static Analysis, Security Tests, Performance in the testing process

    +
  • +
  • +

    Test execution environment transparent to any infrastructure

    +
  • +
  • +

    Touch base with the Cloud solution

    +
  • +
  • +

    Faster Quality and DevOps-driven delivery

    +
  • +
  • +

    Proven frameworks, technologies and processes.

    +
  • +
+
+ +
+
+
Test stages
+ +
+
+
Unit test
+
+

A module is the smallest compilable unit of source code. It is often too small to be tested by the functional tests (black-box tests). However, it is the appropriate candidate for white-box testing. White-box tests have to be performed as the first static tests (e.g. Lint and inspections), followed by dynamic tests in order to check boundaries, branches and paths. Usually, this kind of testing would require enabling stubs and special test tools.

+
+
+
+
Component test
+
+

This is the black-box test of modules or groups of modules which represent certain functionalities. There are no rules about what could be called a component. Whatever a tester defines as a component, should make sense and be a testable unit. Components can be integrated into bigger components step by step and tested as such.

+
+
+
+
Integration test
+
+

Functions are tested by feeding them input and examining the output, and internal program structure is rarely considered. The software is completed step by step and tested by tests covering a collaboration between modules or classes. The integration depends on the kind of system. For example, the steps could be as follows: run the operating system first and gradually add one component after another, then check if the black-box tests are still running (the test cases will be extended together with every added component). The integration is done in the laboratory. It may be also completed by using simulators or emulators. Additionally, the input signals could be stimulated.

+
+
+
+
Software / System test
+
+

System testing is a type of testing conducted on a complete integrated system to evaluate the system’s compliance with its specified requirements. This is a type of black-box testing of the complete software in the target system. The most important factor in successful system testing is that the environmental conditions for the software have to be as realistic as possible (complete original hardware in the destination environment).

+
+
+
+
+
+

Test Framework Modules

+
+
+

In this section, it is possible to find all the information regarding the main modules of MrChecker:

+
+
+
+
+

Core Test Module

+
+ +
+
Core Test Module
+ +
+
+
What is Core Test Module
+
+
+image1 new +
+
+
+ +
+
How to start?
+ +
+
+
Allure Logger → BFLogger
+
+

In Allure E2E Test Framework you have ability to use and log any additional information crucial for:

+
+
+
    +
  • +

    test steps

    +
  • +
  • +

    test exection

    +
  • +
  • +

    page object actions, and many more.

    +
  • +
+
+
+
+
Where to find saved logs
+
+

Every logged information is saved in a separate test file, as a result of parallel tests execution.

+
+
+

The places they are saved:

+
+
+
    +
  1. +

    In test folder C:\Allure_Test_Framework\allure-app-under-test\logs

    +
  2. +
  3. +

    In every Allure Test report, logs are always embedded as an attachment, according to test run.

    +
  4. +
+
+
+
+
How to use logger:
+
+
    +
  • +

    Start typing

    +
    +

    BFLogger

    +
    +
  • +
  • +

    Then type . (dot)

    +
  • +
+
+
+
+
Type of logger:
+
+
    +
  • +

    BFLogger.logInfo("Your text") - used for test steps

    +
  • +
  • +

    BFLogger.logDebug("Your text") - used for non official information, either during test build process or in Page Object files

    +
  • +
  • +

    BFLogger.logError("Your text") - used to emphasize critical information

    +
  • +
+
+
+
+image13 +
+
+
+

Console output:

+
+
+
+image14 +
+
+
+
+
Allure Reports
+
+
+image15 +
+
+
+

Allure is a tool designed for test reports.

+
+
+
+
Generate report - command line
+
+

You can generate a report using one of the following commands:

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:serve -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:serve -Dtest=TS_Tag1
+
+
+
+

A report will be generated into temp folder. Web server with results will start. You can additionally configure the server timeout. The default value is "3600" (one hour).

+
+
+

System property allure.serve.timeout.

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:report -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test allure:report -Dtest=TS_Tag1
+
+
+
+

A report will be generated tо directory: target/site/allure-maven/index.html

+
+
+

NOTE: Please open index.html file under Firefox. Chrome has some limitations to presenting dynamic content. If you want to open a report with a Chromium based Web Browser, you need to launch it first with --allow-file-access-from-files argument.

+
+
+
+
Generate report - Eclipse
+
+

A report is created here allure-app-under-test\target\site\allure-report\index.html

+
+
+

NOTE: Please open index.html file under Firefox. Chrome has some limitations to presenting dynamic content. If you want to open a report with a Chromium based Web Browser, you need to launch it first with --allow-file-access-from-files argument.

+
+
+
+image17 +
+
+
+
+image18 +
+
+
+
+
Generate report - Jenkins
+
+

In our case, we’ll use the Allure Jenkins plugin. When integrating Allure in a Jenkins job configuration, we’ll have direct access to the build’s test report.

+
+
+
+image19 +
+
+
+

There are several ways to access the Allure Test Reports:

+
+
+
    +
  • +

    Using the "Allure Report" button on the left navigation bar or center of the general job overview

    +
  • +
  • +

    Using the "Allure Report" button on the left navigation bar or center of a specific build overview

    +
  • +
+
+
+

Afterwards you’ll be greeted with either the general Allure Dashboard (showing the newest build) or the Allure Dashboard for a specific (older) build.

+
+
+
+
Allure dashboard
+
+
+image20 +
+
+
+

The Dashboard provides a graphical overview on how many test cases were successful, failed or broken.

+
+
+
    +
  • +

    Passed means, that the test case was executed successfully.

    +
  • +
  • +

    Broken means, that there were mistakes, usually inside of the test method or test class. As tests are being treated as code, broken code has to be expected, resulting in occasionally broken test results.

    +
  • +
  • +

    Failed means that an assertion failed.

    +
  • +
+
+
+
+
Defects
+
+

The defects tab lists out all the defects that occurred, and also descriptions thereof. Clicking on a list item displays the test case which resulted in an error. Clicking on a test case allows the user to have a look at the test case steps, as well as Log files or Screenshots of the failure.

+
+
+
+
Graph
+
+

The graph page includes a pie chart of all tests, showing their result status (failed, passed, etc.). Another graph allows insight into the time elapsed during the tests. This is a very useful information to find and eliminate possible bottlenecks in test implementations.

+
+
+
+image21 +
+
+
+
+
Why join Test Cases in groups - Test Suites
+
+
+image22 +
+
+
+
+
Regresion Suite:
+
+

Regression testing is a type of software testing which verifies that software which was previously developed and tested still performs the same way after it was changed or interfaced with another software.

+
+
+
    +
  • +

    Smoke

    +
  • +
  • +

    Business vital functionalities

    +
  • +
  • +

    Full scope of test cases

    +
  • +
+
+
+
+
Functional Suite:
+
+
    +
  • +

    Smoke

    +
  • +
  • +

    Business function A

    +
  • +
  • +

    Business function B

    +
  • +
+
+
+
+
Single Responsibility Unit:
+
+
    +
  • +

    Single page

    +
  • +
  • +

    Specific test case

    +
  • +
+
+
+
+
How to build a Test Suite based on tags
+ +
+
+
Structure of the Test Suite
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+image23 new +
+
+
+

Where:

+
+
+
    +
  • +

    @RunWith(JUnitPlatform.class) - use Junit5 runner

    +
  • +
  • +

    @IncludeTags({"TestsTag1"}) - search all test files with the tag "TestsTag1"

    +
  • +
  • +

    @ExcludeTags({"TagToExclude"}) - exclude test files with the tag "TagToExclude"

    +
  • +
  • +

    @SelectPackages("com.capgemini.mrchecker.core.groupTestCases.testCases") - search only test files in "com.capgemini.mrchecker.core.groupTestCases.testCases" package

    +
  • +
  • +

    public class TS_Tag1 - the name of the Test Suite is "TS_Tag1"

    +
  • +
+
+
+

Most commonly used filters to build a Test Suite are ones using:

+
+
+
    +
  • +

    @IncludeTags({ })

    +
  • +
  • +

    @ExcludeTags({ })

    +
  • +
+
+
+

Example:

+
+
+
    +
  1. +

    @IncludeTags({ "TestsTag1" }) , @ExcludeTags({ }) → will execute all test cases with the tag TestsTag1

    +
  2. +
  3. +

    @IncludeTags({ "TestsTag1" }) , @ExcludeTags({ "SlowTest" }) → will execute all test cases with tag "TestsTag1" although it will exclude from this list the test cases with the tag "SlowTest"

    +
  4. +
  5. +

    @IncludeTags({ }) , @ExcludeTags({ "SlowTest" }) → It will exclude test cases with the tag "SlowTest"

    +
  6. +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+image23 +
+
+
+

Where:

+
+
+
    +
  • +

    @RunWith(WildcardPatternSuiteBF.class) - search for test files under /src/test/java

    +
  • +
  • +

    @IncludeCategories({ TestsTag1.class }) - search for all test files with the tag "TestsTag1.class"

    +
  • +
  • +

    @ExcludeCategories({ }) - exclude test files. In this example, there is no exclusion

    +
  • +
  • +

    @SuiteClasses({ "**/*Test.class" }) - search only test files, where the file name ends with "<anyChar/s>Test.class"

    +
  • +
  • +

    public class TS_Tag1 - the name of the Test Suite is "TS_Tag1"

    +
  • +
+
+
+

Most commonly used filters to build Test Suite are ones using:

+
+
+
    +
  • +

    @IncludeCategories({ })

    +
  • +
  • +

    @ExcludeCategories({ })

    +
  • +
+
+
+

Example:

+
+
+
    +
  1. +

    @IncludeCategories({ TestsTag1.class }) , @ExcludeCategories({ }) → will execute all test cases with the tag TestsTag1.class

    +
  2. +
  3. +

    @IncludeCategories({ TestsTag1.class }) , @ExcludeCategories({ SlowTest.class }) → will execute all test cases with the tag "TestsTag1.class" although it will exclude from this list the test cases with the tag "SlowTest.class"

    +
  4. +
  5. +

    @IncludeCategories({ }) , @ExcludeCategories({ SlowTest.class }) → will execute all test cases from /src/test/java, although it will exclude from this list the test cases with the tag "SlowTest.class"

    +
  6. +
+
+
+
+
Structure of Test Case
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+
+image24 new +
+
+
+

Where:

+
+
+
    +
  • +

    @TestsTag1, @TestsSmoke, @TestsSelenium - list of tags assigned to this test case - "TestsTag1, TestsSmoke, TestSelenium" annotations

    +
  • +
  • +

    public class FristTest_tag1_Test - the name of the test case is "FristTest_tag1_Test"

    +
  • +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+image24 +
+
+
+

Where:

+
+
+
    +
  • +

    @Category({ TestsTag1.class, TestsSmoke.class, TestSelenium.class }) - list of tags / categories assigned to this test case - "TestsTag1.class, TestsSmoke.class, TestSelenium.class"

    +
  • +
  • +

    public class FristTest_tag1_Test - the name of the test case is "FristTest_tag1_Test"

    +
  • +
+
+
+
+
Structure of Tags / Categories
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+

Tag name: TestsTag1 annotation

+
+
+
+image25 new +
+
+
+

Tag name: TestsSmoke annotation

+
+
+
+image26 new +
+
+
+

Tag name: TestSelenium annotation

+
+
+
+image27 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+

Tag name: TestsTag1.class

+
+
+
+image25 +
+
+
+

Tag name: TestsSmoke.class

+
+
+
+image26 +
+
+
+

Tag name: TestSelenium.class

+
+
+
+image27 +
+
+
+
+
How to run Test Suite
+
+

To run a Test Suite you perform the same steps as you do to run a test case

+
+
+

Command line

+
+
+

Since mrchecker-core-module version 5.6.2.1:

+
+
+

JUnit5 disallows running suite classes from maven. Use -Dgroups=Tag1,Tag2 and -DexcludeGroups=Tag4,Tag5 to create test suites in maven.

+
+
+
+
mvn test site -Dgroups=TestsTag1
+
+
+
+

Prior to mrchecker-core-module version 5.6.2.1:

+
+
+
+
mvn test site -Dtest=TS_Tag1
+
+
+
+

Eclipse

+
+
+
+image28 +
+
+
+
+
Data driven approach
+
+

Data driven approach - External data driven

+
+
+

External data driven - Data as external file injected in test case

+
+
+

Test case - Categorize functionality and severity

+
+
+

You can find more information about data driven here and here

+
+
+

There are a few ways to define parameters for tests.

+
+
+
+
Internal Data driven approach
+
+

Data as part of test case

+
+
+

The different means to pass in parameters are shown below.

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

Static methods are used to provide the parameters.

+
+
+
+
A method in the test class:
+
+
+
@ParameterizedTest
+@MethodSource("argumentsStream")
+
+
+
+

OR

+
+
+
+
@ParameterizedTest
+@MethodSource("arrayStream")
+
+
+
+

In the first case the arguments are directly mapped to the test method parameters. In the second case the array is passed as the argument.

+
+
+
+image30 new +
+
+
+
+
A method in a different class:
+
+
+
@ParameterizedTest
+@MethodSource("com.capgemini.mrchecker.core.datadriven.MyContainsTestProvider#provideContainsTrueParameters")
+
+
+
+
+image32 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

Parameters that are passed into tests using the @Parameters annotation must be _Object[]_s

+
+
+
+
In the annotation:
+
+
+
@Parameters({"1, 2, 3", "3, 4, 7", "5, 6, 11", "7, 8, 15"})
+
+
+
+
+image30 +
+
+
+

The parameters must be primitive objects such as integers, strings, or booleans. Each set of parameters is contained within a single string and will be parsed to their correct values as defined by the test method’s signature.

+
+
+
+
In a method named in the annotation:
+
+
+
@Parameters(method = "addParameters")
+
+
+
+
+image31 +
+
+
+

A separate method can be defined and referred to for parameters. This method must return an Object[] and can contain normal objects.

+
+
+
+
In a class:
+
+
+
@Parameters(source = MyContainsTestProvider.class)
+
+
+
+
+image32 +
+
+
+

A separate class can be used to define parameters for the test. This test must contain at least one static method that returns an Object[], and its name must be prefixed with provide. The class could also contain multiple methods that provide parameters to the test, as long as they also meet the required criteria.

+
+
+
+
External Data Driven
+
+

Data as external file injected in test case

+
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

Tests use the annotation @CsvFileSource to inject CSVs file.

+
+
+
+
@CsvFileSource(resources = "/datadriven/test.csv", numLinesToSkip = 1)
+
+
+
+

A CSV can also be used to contain the parameters for the tests. It is pretty simple to set up, as it’s just a comma-separated list.

+
+
+
+
Classic CSV
+
+
+image33 new +
+
+
+

and CSV file structure

+
+
+
+image34 +
+
+
+
+
CSV with headers
+
+
+image35 new +
+
+
+

and CSV file structure

+
+
+
+image36 +
+
+
+
+
CSV with specific column mapper
+
+
+image37 new +
+
+
+

and Mapper implementation

+
+
+
+image38 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

Tests use the annotation @FileParameters to inject CSVs file.

+
+
+
+
@FileParameters("src/test/resources/datadriven/test.csv")
+
+
+
+

A CSV can also be used to contain the parameters for the tests. It is pretty simple to set up, as it’s just a comma-separated list.

+
+
+
+
Classic CSV
+
+
+image33 +
+
+
+

and CSV file structure

+
+
+
+image34 +
+
+
+
+
CSV with headers
+
+
+image35 +
+
+
+

and CSV file structure

+
+
+
+image36 +
+
+
+
+
CSV with specific column mapper
+
+
+image37 +
+
+
+

and Mapper implementation

+
+
+
+image38 +
+
+
+
+
What is "Parallel test execution" ?
+
+

Parallel test execution means many "Test Classes" can run simultaneously.

+
+
+

"Test Class", as this is a Junit Test class, it can have one or more test cases - "Test case methods"

+
+
+
+image39 +
+
+
+
+
How many parallel test classes can run simultaneously?
+
+

Since mrchecker-core-module version 5.6.2.1

+
+
+

JUnit5 supports parallelism natively. The feature is configured using a property file located at src\test\resources\junit-platform.properties. +As per default configuration, concurrent test execution is set to run test classes in parallel using the thread count equal to a number of your CPUs.

+
+
+
+image39a +
+
+
+

Visit JUnit5 site to learn more about parallel test execution.

+
+
+

Prior to mrchecker-core-module version 5.6.2.1

+
+
+

By default, number of parallel test classes is set to 8.

+
+
+

It can be updated as you please, on demand, by command line:

+
+
+
+
mvn test site -Dtest=TS_Tag1 -Dthread.count=16
+
+
+
+

-Dthread.count=16 - increase number of parallel Test Class execution to 16.

+
+
+
+
Overview
+
+

Cucumber / Selenium

+
+
+

Business and IT don’t always understand each other. Very often misunderstandings between business and IT result in the costly failure of IT projects. With this in mind, Cucumber was developed as a tool to support human collaboration between business and IT.

+
+
+

Cucumber uses executable specifications to encourage a close collaboration. This helps teams to keep the business goal in mind at all times. With Cucumber you can merge specification and test documentation into one cohesive whole, allowing your team to maintain one single source of truth. Because these executable specifications are automatically tested by Cucumber, your single source of truth is always up-to-date.

+
+
+
+image40 +
+
+
+

Cucumber supports testers when designing test cases. To automate these test cases, several languages can be used. Cucumber also works well with Browser Automation tools such as Selenium Webdriver.

+
+
+
+
== Selenium
+
+

Selenium automates browsers and is used for automating web applications for testing purposes. Selenium offers testers and developers full access to the properties of objects and the underlying tests, via a scripting environment and integrated debugging options.

+
+
+

Selenium consists of many parts. If you want to create robust, browser-based regression automation suites and tests, Selenium Webdriver is most appropriate. With Selenium Webdriver you can also scale and distribute scripts across many environments.

+
+
+
+
Strengths
+ +
+
+
== Supports BDD
+
+

Those familiar with Behavior Driven Development (BDD) recognize Cucumber as an excellent open source tool that supports this practice.

+
+
+
+
== All in one place
+
+

With Cucumber / Selenium you can automate at the UI level. Automation at the unit or API level can also be implemented using Cucumber. This means all tests, regardless of the level at which they are implemented, can be implemented in one tool.

+
+
+
+
== Maintainable test scripts
+
+

Many teams seem to prefer UI level automation, despite huge cost of maintaining UI level tests compared to the cost of maintaining API or unit tests. To lessen the maintenance of UI testing, when designing UI level functional tests, you can try describing the test and the automation at three levels: business rule, UI workflow, technical implementation.

+
+
+

When using Cucumber combined with Selenium, you can implement these three levels for better maintenance.

+
+
+
+
== Early start
+
+

Executable specifications can and should be written before the functionality is implemented. By starting early, teams get most return on investment from their test automation.

+
+
+
+
== Supported by a large community
+
+

Cucumber and Selenium are both open source tools with a large community, online resources and mailing lists.

+
+
+
+
How to run cucumber tests in Mr.Checker
+ +
+
+
Command line / Jenkins
+
+
    +
  • +

    Run cucumber tests and generate Allure report. Please use this for Jenkins execution. Report is saved under ./target/site.

    +
    +
    +
    mvn clean -P cucumber test site
    +
    +
    +
  • +
  • +

    Run and generate report

    +
    +
    +
    mvn clean -P cucumber test site allure:report
    +
    +
    +
  • +
  • +

    Run cucumber tests, generate Allure report and start standalone report server

    +
    +
    +
    mvn clean -P cucumber test site allure:serve
    +
    +
    +
  • +
+
+
+
+
Eclipse IDE
+
+
+image41 +
+
+
+
+
Tooling
+ +
+
+
== Cucumber
+
+

Cucumber supports over a dozen different software platforms. Every Cucumber implementation provides the same overall functionality, but they also have their own installation procedure and platform-specific functionality. See https://cucumber.io/docs for all Cucumber implementations and framework implementations.

+
+
+

Also, IDEs such as Intellij offer several plugins for Cucumber support.

+
+
+
+
== Selenium
+
+

Selenium has the support of some of the largest browser vendors who have taken (or are taking) steps to make Selenium a native part of their browser. It is also the core technology in countless other browser automation tools, APIs and frameworks.

+
+
+
+
Automation process
+ +
+
+
== Write a feature file
+
+

Test automation in Cucumber starts with writing a feature file. A feature normally consists of several (test)scenarios and each scenario consists of several steps.

+
+
+

Feature: Refund item

+
+
+

Scenario: Jeff returns a faulty microwave

+
+
+

Given Jeff has bought a microwave for $100

+
+
+

And he has a receipt

+
+
+

When he returns the microwave

+
+
+

Then Jeff should be refunded $100

+
+
+

Above example shows a feature “Refund item” with one scenario “Jeff returns a faulty microwave”. The scenario consists of four steps each starting with a key word (Given, And, When, Then).

+
+
+
+
== Implementing the steps
+
+

Next the steps are implemented. Assuming we use Java to implement the steps, the Java code will look something like this.

+
+
+
+
public class MyStepdefs \{
+
+	@Given("Jeff has bought a microwave for $(\d+)")
+
+	public void Jeff_has_bought_a_microwave_for(int amount) \{
+
+		// implementation can be plain java
+
+		// or selenium
+
+		driver.findElement(By.name("test")).sendKeys("This is an example\n");
+
+		driver.findElement(By.name("button")).click();// etc
+	}
+}
+
+
+
+

Cucumber uses an annotation (highlighted) to match the step from the feature file with the function implementing the step in the Java class. The name of the class and the function can be as the developer sees fit. Selenium code can be used within the function to automate interaction with the browser.

+
+
+
+
== Running scenarios
+
+

There are several ways to run scenarios with Cucumber, for example the JUnit runner, a command line runner and several third party runners.

+
+
+
+
== Reporting test results
+
+

Cucumber can report results in several different formats, using formatter plugins

+
+
+
+
Features
+ +
+
+
== Feature files using Gherkin
+
+

Cucumber executes your feature files. As shown in the example below, feature files in Gherkin are easy to read so they can be shared between IT and business. Data tables can be used to execute a scenario with different inputs.

+
+
+
+image42 +
+
+
+
+
== Organizing tests
+
+

Feature files are placed in a directory structure and together form a feature tree.

+
+
+

Tags can be used to group features based on all kinds of categories. Cucumber can include or exclude tests with certain tags when running the tests.

+
+
+
+
Reporting test results
+
+

Cucumber can report results in several formats, using formatter plugins. +Not supported option by Shared Services: The output from Cucumber can be used to present test results in Jenkins or Hudson depending of the preference of the project.

+
+
+
+image43 +
+
+
+
+
HOW IS Cucumber / Selenium USED AT Capgemini?
+ +
+
+
Tool deployment
+
+

Cucumber and Selenium are chosen as one of Capgemini’s test automation industrial tools. We support the Java implementation of Cucumber and Selenium Webdriver. We can help with creating Cucumber, Selenium projects in Eclipse and IntelliJ.

+
+
+
+
Application in ATaaS (Automated Testing as a Service)
+
+

In the context of industrialisation, Capgemini has developed a range of services to assist and support the projects in process and tools implementation.

+
+
+

In this context a team of experts assists projects using test automation.

+
+
+

The main services provided by the center of expertise are:

+
+
+
    +
  • +

    Advise on the feasibility of automation.

    +
  • +
  • +

    Support with installation.

    +
  • +
  • +

    Coaching teams in the use of BDD.

    +
  • +
+
+
+
+
Run on independent Operation Systems
+
+

As E2E Allure test framework is build on top of:

+
+
+
    +
  • +

    Java 1.8

    +
  • +
  • +

    Maven 3.3

    +
  • +
+
+
+

This guarantees portability to all operating systems.

+
+
+

E2E Allure test framework can run on OS:

+
+
+
    +
  • +

    Windows,

    +
  • +
  • +

    Linux and

    +
  • +
  • +

    Mac.

    +
  • +
+
+
+

Test creation and maintenance in E2E Allure test framework can be done with any type of IDE:

+
+
+
    +
  • +

    Eclipse,

    +
  • +
  • +

    IntelliJ,

    +
  • +
  • +

    WebStorm,

    +
  • +
  • +

    Visual Studio Code,

    +
  • +
  • +

    many more that support Java + Maven.

    +
  • +
+
+
+
+
System under test environments
+
+
+image44 +
+
+
+
    +
  • +

    Quality assurance or QA is a way of preventing mistakes or defects in manufactured products and avoiding problems when delivering solutions or services to customers; which ISO 9000 defines as "part of quality management focused on providing confidence that quality requirements will be fulfilled".

    +
  • +
  • +

    System integration testing or SIT is a high-level software testing process in which testers verify that all related systems maintain data integrity and can operate in coordination with other systems in the same environment. The testing process ensures that all sub-components are integrated successfully to provide expected results.

    +
  • +
  • +

    Development or Dev testing is performed by the software developer or engineer during the construction phase of the software development life-cycle. Rather than replace traditional QA focuses, it augments it. Development testing aims to eliminate construction errors before code is promoted to QA; this strategy is intended to increase the quality of the resulting software as well as the efficiency of the overall development and QA process.

    +
  • +
  • +

    Prod If the customer accepts the product, it is deployed to a production environment, making it available to all users of the system.

    +
  • +
+
+
+
+image45 +
+
+
+
+
How to use system environment
+
+

In Page classes, when you load / start web, it is uncommon to save fixed main url.

+
+
+

Value flexibility is a must, when your web application under test, have different main url, dependence on environmnent (DEV, QA, SIT, …​, PROD)

+
+
+

Instead of hard coded main url variable, you build your Page classe with dynamic variable.

+
+
+

Example of dynamic variable GetEnvironmentParam.WWW_FONT_URL

+
+
+
+image46 +
+
+
+
+
How to create / update system environment
+ +
+
+
External file with variable values
+
+

Dynamic variable values are stored under path mrchecker-app-under-test\src\resources\enviroments\environments.csv.

+
+
+

NOTE: As environments.csv is Comma-separated file, please be aware of any edition and then save it under Excel.

+
+
+
+image47 +
+
+
+
+
Encrypting sensitive data
+
+

Some types of data you might want to store as environment settings are sensitive in nature (e.g. passwords). You might not want to store them (at least not in their plaintext form) in your repository. To be able to encrypt sensitive data you need to do following:

+
+
+
    +
  1. +

    Create a secret (long, random chain of characters) and store it under mrchecker-app-under-test\src\resources\secretData.txt. Example: LhwbTm9V3FUbBO5Tt5PiTUEQrXGgWrDLCMthnzLKNy1zA5FVTFiTdHRQAyPRIGXmsAjPUPlJSoSLeSBM

    +
  2. +
  3. +

    Exclude the file from being checked into the git repository by adding it to git.ignore. You will need to pass the file over a different channel among your teammates.

    +
  4. +
  5. +

    Encrypt the values before putting them into the environments.csv file by creating following script (put the script where your jasypt library resides, e.g. C:\MrChecker_Test_Framework\m2\repository\org\jasypt\jasypt\1.9.2):

    +
    +
    +
    @ECHO OFF
    +
    +set SCRIPT_NAME=encrypt.bat
    +set EXECUTABLE_CLASS=org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI
    +set EXEC_CLASSPATH=jasypt-1.9.2.jar
    +if "%JASYPT_CLASSPATH%" ==  "" goto computeclasspath
    +set EXEC_CLASSPATH=%EXEC_CLASSPATH%;%JASYPT_CLASSPATH%
    +
    +:computeclasspath
    +IF "%OS%" ==  "Windows_NT" setlocal ENABLEDELAYEDEXPANSION
    +FOR %%c in (%~dp0..\lib\*.jar) DO set EXEC_CLASSPATH=!EXEC_CLASSPATH!;%%c
    +IF "%OS%" ==  "Windows_NT" setlocal DISABLEDELAYEDEXPANSION
    +
    +set JAVA_EXECUTABLE=java
    +if "%JAVA_HOME%" ==  "" goto execute
    +set JAVA_EXECUTABLE="%JAVA_HOME%\bin\java"
    +
    +:execute
    +%JAVA_EXECUTABLE% -classpath %EXEC_CLASSPATH% %EXECUTABLE_CLASS% %SCRIPT_NAME% %*
    +
    +
    +
  6. +
  7. +

    Encrypt the values by calling

    +
    +
    +
    .\encrypt.bat input=someinput password=secret
    +
    +----ENVIRONMENT-----------------
    +
    +Runtime: Oracle Corporation Java HotSpot(TM) 64-Bit Server VM 25.111-b14
    +
    +
    +
    +----ARGUMENTS-------------------
    +
    +input: someinput
    +password: secret
    +
    +
    +
    +----OUTPUT----------------------
    +
    +JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD
    +
    +
    +
  8. +
  9. +

    Mark the value as encrypted by adding a prefix 'ENC(' and suffix ')' like: ENC(JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD)

    +
    +
    +image48 +
    +
    +
  10. +
+
+
+
+
Bridge between external file nad Page class
+
+

To map values from external file with Page class you ought to use class GetEnvironmentParam.

+
+
+

Therefore when you add new variable (row) in environments.csv you might need to add this variable to GetEnvironmentParam.

+
+
+
+image49 +
+
+
+
+
Run test case with system environment
+
+

To run test case with system environment, please use:

+
+
+
    +
  • +

    -Denv=<NameOfEnvironment>

    +
  • +
  • +

    <NameOfEnvironment> is taken as column name from file mrchecker-app-under-test\src\test\resources\enviroments\environments.csv

    +
  • +
+
+
+
+
Command Line
+
+
+
mvn test site -Dtest=RegistryPageTest -Denv=DEV
+
+
+
+
+
Eclipse
+
+
+image50 +
+
+
+
+image51 +
+
+ +
+
+
System under test environments
+
+
+image080 +
+
+
+
    +
  • +

    Quality assurance or QA is a way of preventing mistakes or defects in the manufactured products and avoiding problems when delivering solutions or services to customers which ISO 9000 defines as "part of quality management focused on providing confidence that quality requirements will be fulfilled".

    +
  • +
  • +

    System integration testing or SIT is a high-level software testing process in which testers verify that all related systems maintain data integrity and can operate in coordination with other systems in the same environment. The testing process ensures that all sub-components are integrated successfully to provide expected results.

    +
  • +
  • +

    Development or Dev testing is performed by the software developer or engineer during the construction phase of the software development life-cycle. Rather than replace traditional QA focuses, it augments it. Development testing aims to eliminate construction errors before code is promoted to QA; this strategy is intended to increase the quality of the resulting software as well as the efficiency of the overall development and QA process.

    +
  • +
  • +

    Prod If the customer accepts the product, it is deployed to a production environment, making it available to all users of the system.

    +
  • +
+
+
+
+image051 +
+
+
+
+
How to use system environment
+
+

In Page classes, when you load / start web, it is uncommon to save fixed main url.

+
+
+

Value flexibility is a must, when your web application under test has different main url, depending on the environmnent (DEV, QA, SIT, …​, PROD)

+
+
+

Instead of hard coded main url variable, you build your Page classes with dynamic variable.

+
+
+

An example of dynamic variable GetEnvironmentParam.WWW_FONT_URL

+
+
+
+image081 +
+
+
+
+
How to create / update system environment
+ +
+
+
External file with variable values
+
+

Dynamic variable values are stored under mrchecker-app-under-test\src\resources\enviroments\environments.csv.

+
+
+

NOTE: As environments.csv is a comma-separated file, please be careful while editing and then save it under Excel.

+
+
+
+image082 +
+
+
+
+
Encrypting sensitive data
+
+

Some types of data you might want to store as environment settings are sensitive in nature (e.g. passwords). You might not want to store them (at least not in their plaintext form) in your repository. To be able to encrypt sensitive data you need to do following:

+
+
+
    +
  1. +

    Create a secret (long, random chain of characters) and store it under mrchecker-app-under-test\src\resources\secretData.txt. Example: LhwbTm9V3FUbBO5Tt5PiTUEQrXGgWrDLCMthnzLKNy1zA5FVTFiTdHRQAyPRIGXmsAjPUPlJSoSLeSBM

    +
  2. +
  3. +

    Exclude the file from being checked into the git repository by adding it to git.ignore. You will need to pass the file over a different channel among your teammates.

    +
  4. +
  5. +

    Encrypt the values before putting them into the environments.csv file by creating following script (put the script where your jasypt library resides, e.g. C:\MrChecker_Test_Framework\m2\repository\org\jasypt\jasypt\1.9.2):

    +
  6. +
+
+
+
+
@ECHO OFF
+
+set SCRIPT_NAME=encrypt.bat
+set EXECUTABLE_CLASS=org.jasypt.intf.cli.JasyptPBEStringEncryptionCLI
+set EXEC_CLASSPATH=jasypt-1.9.2.jar
+if "%JASYPT_CLASSPATH%" ==  "" goto computeclasspath
+set EXEC_CLASSPATH=%EXEC_CLASSPATH%;%JASYPT_CLASSPATH%
+
+:computeclasspath
+IF "%OS%" ==  "Windows_NT" setlocal ENABLEDELAYEDEXPANSION
+FOR %%c in (%~dp0..\lib\*.jar) DO set EXEC_CLASSPATH=!EXEC_CLASSPATH!;%%c
+IF "%OS%" ==  "Windows_NT" setlocal DISABLEDELAYEDEXPANSION
+
+set JAVA_EXECUTABLE=java
+if "%JAVA_HOME%" ==  "" goto execute
+set JAVA_EXECUTABLE="%JAVA_HOME%\bin\java"
+
+:execute
+%JAVA_EXECUTABLE% -classpath %EXEC_CLASSPATH% %EXECUTABLE_CLASS% %SCRIPT_NAME% %*
+
+
+
+
    +
  1. +

    Encrypt the values by calling

    +
  2. +
+
+
+
+
.\encrypt.bat input=someinput password=secret
+
+----ENVIRONMENT-----------------
+
+Runtime: Oracle Corporation Java HotSpot(TM) 64-Bit Server VM 25.111-b14
+
+
+
+----ARGUMENTS-------------------
+
+input: someinput
+password: secret
+
+
+
+----OUTPUT----------------------
+
+JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD
+
+
+
+
    +
  1. +

    Mark the value as encrypted by adding a prefix 'ENC(' and suffix ')' like: ENC(JN3nOFol2GMZoUxR5z2wI2qdipcNH1UD)

    +
  2. +
+
+
+
+image083 +
+
+
+
+
Bridge between external file nad Page class
+
+

To map values from external file with Page class you ought to use class GetEnvironmentParam

+
+
+

Therefore when you add new variable (row) in environments.csv you might need to add this variable to GetEnvironmentParam.

+
+
+
+image084 +
+
+
+
+
Run test case with system environment
+
+

To run test case with system environment, please use: +* -Denv=\<NameOfEnvironment\> +* \<NameOfEnvironment\> is taken as column name from file mrchecker-app-under-test\src\test\resources\enviroments\environments.csv

+
+
+

Since mrchecker-core-module version 5.6.2.1 +== Command Line

+
+
+
+
mvn test site -Dgroups=RegistryPageTestTag -Denv=DEV
+
+
+
+
+
Eclipse
+
+
+image085 +
+
+
+
+image086 new +
+
+
+

Prior to mrchecker-core-module version 5.6.2.1 +== Command Line

+
+
+
+
mvn test site -Dtest=RegistryPageTest -Denv=DEV
+
+
+
+
+
Eclipse
+
+
+image085 +
+
+
+
+image086 +
+
+
+
+
+
+

Selenium Module

+
+ +
+
Selenium Test Module
+ +
+
+
What is MrChecker E2E Selenium Test Module
+
+
+image2 +
+
+
+
+
Selenium Structure
+ +
+
+
Framework Features
+
+ +
+
+
+
How to start?
+ +
+
+
Selenium Best Practices
+ +
+
+
Selenium UFT Comparison
+ +
+
+
+
+

Selenium Structure

+
+
+
What is Selenium
+
+

Selenium is a framework for testing browser applications. The test automation supports:

+
+
+
    +
  • +

    Frequent regression testing

    +
  • +
  • +

    Repeating test case executions

    +
  • +
  • +

    Documentation of test cases

    +
  • +
  • +

    Finding defects

    +
  • +
  • +

    Multiple Browsers

    +
  • +
+
+
+

The Selenium testing framework consists of multiple tools:

+
+
+
    +
  • +

    Selenium IDE

    +
    +

    The Selenium Integrated Development Environment is a prototyping tool for building test scripts. It is a Firefox Plugin and provides an easy-to-use interface for developing test cases. Additionally, Selenium IDE contains a recording feature, that allows the user to record user inputs that can be automatically re-executed in future.

    +
    +
  • +
  • +

    Selenium 1

    +
    +

    Selenium 1, also known as Selenium RC, commands a Selenium Server to launch and kill browsers, interpreting the Selenese commands passed from the test program. The Server acts as an HTTP proxy. This tool is deprecated.

    +
    +
  • +
  • +

    Selenium 2

    +
    +

    Selenium 2, also known as Selenium WebDriver, is designed to supply a well-designed, object-oriented API that provides improved support for modern advanced web-app testing problems.

    +
    +
  • +
  • +

    Selenium 3.0

    +
    +

    The major change in Selenium 3.0 is removing the original Selenium Core implementation and replacing it with one backed by WebDriver. There is now a W3C specification for browser automation, based on the Open Source WebDriver.

    +
    +
  • +
  • +

    Selenium Grid

    +
    +

    Selenium Grid allows the scaling of Selenium RC test cases, that must be run in multiple and potentially variable environments. The tests can be run in parallel on different remote machines.

    +
    +
  • +
+
+
+
+
Selenium on the Production Line
+
+

More information on Selenium on the Production Line can be found here.

+
+
+

tl;dr

+
+
+

The Production Line has containers running Chrome and Firefox Selenium Nodes. The communication with these nodes is accomplished using Selenium Grid.

+
+
+

Having issues using Selenium on the Production Line? Check the Production Line issue list, maybe it’s a known issue that can be worked around.

+
+
+
+
What is WebDriver
+
+

On the one hand, it is a very convenient API for a programmer that allows for interaction with the browser, on the other hand it is a driver concept that enables this direct communication.

+
+
+
+image53 +
+
+
+
+
== How does it work?
+
+
+image54 +
+
+
+

A tester, through their test script, can command WebDriver to perform certain actions on the WAUT on a certain browser. The way the user can command WebDriver to perform something is by using the client libraries or language bindings provided by WebDriver.

+
+
+

By using the language-binding client libraries, a tester can invoke browser-specific implementations of WebDriver, such as Firefox Driver, IE Driver, Opera Driver, and so on, to interact with the WAUT of the respective browser. These browser-specific implementations of WebDriver will work with the browser natively and execute commands from outside the browser to simulate exactly what the application user does.

+
+
+

After execution, WebDriver will send the test result back to the test script for developer’s analysis.

+
+
+
+
What is Page Object Model?
+
+
+image55 +
+
+
+

Creating Selenium test cases can result in an unmaintainable project. One of the reasons is that too much duplicated code is used. Duplicated code could result from duplicated functionality leading to duplicated usage of locators. The main disadvantage of duplicated code is that the project is less maintainable. If a locator changes, you have to walk through the whole test code to adjust locators where necessary. By using the page object model we can make non-brittle test code and reduce or eliminate duplicate test code. In addition, it improves the readability and allows us to create interactive documentation. Last but not least, we can create tests with less keystroke. An implementation of the page object model can be achieved by separating the abstraction of the test object and the test scripts.

+
+
+
+image56 +
+
+
+
+
Basic Web elements
+
+

This page will provide an overview of basic web elements.

+
+
+
+image57 +
+
+
+
+image58 +
+
+
+

|== = +|Name +|Method to use element

+
+
+

|Form: Input Text +|elementInputText()

+
+
+

|Form: Label +|elementLabel()

+
+
+

|Form: Submit Button +|elementButton()

+
+
+

|Page: Button +|elementButton()

+
+
+

|Checkbox +|elementCheckbox()

+
+
+

|Radio +|elementRadioButton()

+
+
+

|Elements (Tabs, Cards, Account, etc.) +|elementTab()

+
+
+

|Dropdown List +|elementDropdownList()

+
+
+

|Link +|-

+
+
+

|Combobox +|elementList() +|== =

+
+
+

Comparision how picking value from checkbox can be done:

+
+
+
    +
  • +

    by classic Selenium atomic actions

    +
  • +
  • +

    by our enhanced Selenium wrapper

    +
  • +
+
+
+

Classic Selenium atomic actions

+
+
+
+
List<WebElement> checkboxesList = getDriver()
+                .findElements(selectorHobby);
+WebElement currentElement;
+for (int i = 0; i < checkboxesList.size(); i++) {
+    currentElement = checkboxesList.get(i);
+    if (currentElement.getAttribute("value")
+                    .equals(hobby.toString()) && currentElement.isSelected() != true)
+                        {
+        currentElement.click();
+            }
+}
+
+
+
+

Enhanced Selenium in E2E test framework

+
+
+
+
getDriver().elementCheckbox(selectorHobby)
+				.setCheckBoxByValue(hobby.toString());
+
+
+
+
+
+
+

Framework Features

+
+
+
Page Class
+
+

Page Object Models allow for the representation of a webpage as a Java Class. The class contains all required web elements like buttons, textfields, labels, etc. When initializing a new project, create a new package to store the Page Object Models in.

+
+
+
+
Initialization
+
+

Source folder: allure-app-under-test/src/main/java

+
+
+

Name: com.example.selenium.pages.YOUR_PROJECT

+
+
+

Classes being created inside of this new package have to extend the BasePage class. As a result, a few abstract methods from BasePage have to be implemented.

+
+
+
+
public class DemoPage extends BasePage {
+
+	@Override
+	public boolean isLoaded() {
+
+	}
+
+	@Override
+	public void load() {
+
+	}
+
+	@Override
+	public String pageTitle() {
+
+	}
+}
+
+
+
+

The example above demonstrates a minimum valid Page Object class with all required methods included.

+
+
+
+
BasePage method: isLoaded
+
+

The inherited method isLoaded() can be used to check if the current Page Object Model has been loaded correctly. There are multiple ways to verify a correctly loaded page. One example would be to compare the actual page title with the expected page title.

+
+
+
+
public boolean isLoaded() {
+	if(getDriver().getTitle().equals("EXPECTED_TITLE")) {
+		return true;
+	}
+	return false;
+}
+
+
+
+
+
BasePage method: load
+
+

The method load() can be used to tell the webdriver to load a specific page.

+
+
+
+
public void load() {
+	getDriver().get("http://SOME_PAGE");
+}
+
+
+
+
+
BasePage method: pageTitle
+
+

The pageTitle() method returns a String containing the page title.

+
+
+
+
Creating a selector variable
+
+

To initialize web elements, a large variety of selectors can be used.

+
+
+

We recommend creating a private and constant field for every web element you’d like to represent in Java. Use the guide above to find the preferred selector and place it in the code below at "WEB_ELEMENT_SELECTOR".

+
+
+
+
private static final By someWebElementSelector = By.CSS("WEB_ELEMENT_SELECTOR");
+
+
+
+

As soon as you create the selector above, you can make use of it to initialize a WebElement object.

+
+
+
+
WebElement someWebElement = getDriver().findDynamicElement(someWebElementSelector);
+
+
+
+

Note: The examples displayed in the cssSelector.docx file use the Selenium method driver.findElement() to find elements. However, using this framework we recommend findDynamicElement() or findQuietlyElement().findDynamicElement() allows waiting for dynamic elements, for example buttons that pop up.

+
+
+
+
Creating a page method
+
+

To interact with the page object, we recommend creating methods for each action.

+
+
+
+
public void enterGoogleSearchInput(String query) {
+	...
+}
+
+
+
+

Creating a method like the one above allows the test case to run something like googleSearchPage.enterGoogleSearchInput("Hello") to interact with the page object.

+
+
+
+
Naming Conventions
+
+

For code uniformity and readability, we provide a few method naming conventions.

+
+
+

|== =

+
+
+

|Element +|Action +|Name (example)

+
+
+

|Form: Input text +|enter +|enterUsernameInput()

+
+
+

| +|is (label) +|isUsernameInputPresent()

+
+
+

| +|is (value) +|isUsernameEmpty()

+
+
+

| +|get +|getUsernameValue()

+
+
+

|Form: Label +|get +|getCashValue()

+
+
+

| +|is (value) +|isCashValueEmpty()

+
+
+

| +|is (label) +|isCashLabelPresent()

+
+
+

|Form: Submit Button +|submit +|submitLoginForm()

+
+
+

| +|is +|isLoginFormPresent()

+
+
+

|Page: Button +|click +|clickInfoButton()

+
+
+

| +|is +|isInfoButtonpresent()

+
+
+

|Checkbox +|set +|setRememberMeCheckbox()

+
+
+

| +|unset +|unsetRememberMeCheckbox()

+
+
+

| +|is (present) +|isRememberMeCheckboxPresent()

+
+
+

| +|is (value) +|isRememberMeCheckboxSet()

+
+
+

|Radio +|set +|setMaleRadioValue("Woman")

+
+
+

| +|is (present) +|isMaleRadioPresent()

+
+
+

| +|is (visible) +|isMaleRadioVisible()

+
+
+

| +|get +|getSelectedMaleValue()

+
+
+

|Elements (Tabs, Cards, Account, etc.) +|click +|clickPositionTab() / clickMyBilanceCard()

+
+
+

| +|is +|isMyBilanceCardPresent()

+
+
+

|Dropdown List +|select +|selectAccountTypeValue(typeName)

+
+
+

| +|unselect +|unselectAccountTypeValue(typeName)

+
+
+

| +|multiple select +|selectAccountTypesValues(List typeNames)

+
+
+

| +|is (list) +|isAccountTypeDropdownListPresent()

+
+
+

| +|is (element present) +|isAccountTypeElementPresent(typeName)

+
+
+

| +|is (element selected) +|isAccountTypeSelected(typeName)

+
+
+

|Link +|click +|clickMoreLink()

+
+
+

| +|is +|isMoreLinkPresent()

+
+
+

|Combobox +|select +|selectSortCombobox()

+
+
+

| +|is (present) +|isSortComboboxPresent(name)

+
+
+

| +|is (contain) +|selectSortComboboxContain(name)

+
+
+

|Element Attribute +|get +|getPositionTabCss()

+
+
+

| +|get +|getMoreLinkHref() / getRememberMeCheckboxName()

+
+
+

|== =

+
+
+

A css selector is used to select elements from an HTML page.

+
+
+

Selection by element tag, class or id are the most common selectors.

+
+
+
+
<p class='myText' id='123'>
+
+
+
+

This text element (p) can be found by using any one of the following selectors:

+
+
+
+
The HTML element: "p". Note: in practical use this will be too generic, if a preceding text section is added, the selected element will change.
+The class attribute preceded by ".": ".myText"
+The id attribute preceded by "#": "#123"
+
+
+
+
+
Using other attributes
+
+

When a class or an id attribute is not sufficient to identify an element, other attributes can be used as well, by using "[attribute=value]": For example:

+
+
+
+
<a href='https://ns.nl/example.html'>
+
+
+
+

This can be selected by using the entire value: "a[href='https://ns.nl/example.html'\]". For selecting links starting with, containing, ending with see the list below.

+
+
+
+
Using sub-elements
+
+

The css selectors can be stacked, by appending them:

+
+
+
+
<div id='1'><a href='ns.nl'></div>
+<div id='2'><a href='nsinternational.nl'></div>
+
+
+
+

In the example above, the link element to nsinternational can be obtained with: "#2 a".

+
+
+
+
When possible avoid
+
+
    +
  • +

    Using paths of commonly used HTML elements within the containers (HTML: div). This will cause failures when a container is added, a common occurrence during development, e.g. "div div p". Use class or id instead, if those are not available, request them to be added in the production code.

    +
  • +
  • +

    Magic order numbers. It is possible to get the second text element in its parent container by using the selector "p:nth-child(2)". If the items are representing different items, ask the developer to add specific attributes. It is also possible to request all items, with a selector similar to ".myList li", and iterate through them later.

    +
  • +
+
+
+
+
List
+
+

A good list with CSS Selectors can be found at W3Schools:
+https://www.w3schools.com/cssref/css_selectors.asp

+
+
+
+
Selenium UFT Comparison
+
+

|== =

+
+
+

|Subject +|HP UFT +|HP LeanFT +|Selenium +|Selenium IDE

+
+
+

|Language +|VBScript +|Same as Selenium +|Supports several languages. +Java +|Javascript

+
+
+

|Learning curve +|Based on VBScript which is relatively easy to learn +|Less intuitive, more coding knowledge necessary +|Less intuitive, more coding skills necessary +|Record/playback possible. Generated code difficult to maintain

+
+
+

|Project type +|Traditional +|Agile +|Agile +|Agile

+
+
+

|User oriented +|More Tester +|More Developer +|More Developer +|More Tester

+
+
+

|Object recognition +|Test object identification and storage in object repository +|Same as UFT +|With Firebug +|Same as SE

+
+
+

|Customizations +|Only the available standard. No custimization +|Same as UFT +|Lots of customizations possible +|Fewer then SE

+
+
+

|Framework +|Needed. +Exists in ATaaS +| +|Needed. +Integration with Fitnesse, Cucumber, Gauche +|No Framework. Limited capabilities of the tool.

+
+
+

|Operating System support +|Runs on Windows +|Runs on Windows +|Multiple OS support. With Grid: testing on multiple devices at same time +|Plugin for Firefox

+
+
+

|Application coverage +|Many +|Many +|Web only +|Web only

+
+
+

|Multiple browsers +|In UFT 12.5 available +|In 12.5 available +|Multiple tests in multiple browser windows at once and faster support for new browser versions +|Multiple tests in multiple browser windows at once and faster support for new browser versions

+
+
+

|System Load +|High system load (RAM & CPU usage) +|Lower load than HP UFT? +|Lower load than HP UFT +|Lower load than HP UFT

+
+
+

|ALM integration +|With HP ALM – full integration +| +|Jira, Jenkins +Not with ALM tool +|Same as SE

+
+
+

|Integration with other tools +|A lot can be built, but many are already covered. +|More than UFT. +|Freeware and can be integrated with different open source tools +|Freeware and can be integrated with different open source tools

+
+
+

|Addins +|Add-ins necessary to access all capabilities of the tool – license related +|Same as UFT +|See integration with other tools +|See integration with other tools

+
+
+

|Reporting +|Complete, link to ALM +|Same as UFT +|No native mechanism for generating reports, but multiple plugins available for reporting +|No native mechanism for generating reports, but multiple plugins available for reporting

+
+
+

|Support +|HP full support +|Same as UFT +|Limited support as it is open source +|Limited support as it is open source

+
+
+

|License costs +|About 17K – Capgemini price 5K. +Included in the S2 service charge +|Same price as HP UFT +|Free +|Free +limited functionality (no iterations / conditional statements)

+
+
+

|iVAL Service +|ATaaS +|Not in a S2 service +|Not in a S2 service +|Not in a S2 service

+
+
+

|== =

+
+
+

Bold for key differentiators.

+
+
+

Projects also choose an available resource and the knowledge of that resource.

+
+
+

Both: Framework determines the quality of automation. Needs to be set up by someone with experience with the tool

+
+
+
+
Run on different browsers
+
+
+image59 +
+
+
+

To execute each test with a chosen installed browser, specific arguments are required in Run configuration.

+
+
+
+image60 +
+
+
+
+image61 +
+
+
+

It is necessary to enter -Dbrowser= with browser parameter name as an argument (in 'Arguments' tab):

+
+
+

firefox +ie +phantomjs +chrome +chromeheadless +For example: -Dbrowser=ie

+
+
+
+
_-ea_ should be entered as an argument to restore default settings.
+
+
+
+
+
Browser options
+
+

To run a browser with specific options during runtime, please use

+
+
+

-DbrowserOptions="< options >"

+
+
+
+
> mvn test -DbrowserOptions="param1"
+> mvn test -DbrowserOptions="param1=value1"
+
+
+
+

examples:

+
+
+
    +
  • +

    One parameter -DbrowserOptions="headless"

    +
  • +
  • +

    One parameter -DbrowserOptions="--incognito"

    +
  • +
  • +

    Many parameters -DbrowserOptions="headless;param1=value1;testEquals=FirstEquals=SecondEquals;--testMe"

    +
  • +
+
+
+

List of options/capabilites supported by:

+
+
+ +
+
+
+
Run with full range of resolution
+
+
+image62 +
+
+
+

In order to execute tests in different browser resolutions, it is required to provide these resolutions as a test parameter.

+
+
+

Test example with resolutions included may be found in ResolutionTest test class

+
+
+
+image63 +
+
+
+

Example of resolution notation is available in ResolutionEnum class

+
+
+
+image64 +
+
+
+

Test with given resolution parameters will be launched as many times as the number of resolutions provided.

+
+
+
+
Selenium Best Practices
+
+

The following table displays a few best practices that should be taken into consideration when developing Selenium test cases.

+
+
+

|== =

+
+
+

|Best Practices +|Description

+
+
+

|"Keep it Simple" +|Do not force use every Selenium feature available - Plan before creating the actual test cases

+
+
+

|Using Cucumber +|Cucumber can be used to create initial testcases for further decision making

+
+
+

|Supporting multiple browsers +|Test on multiple browsers (in parallel, if applicable) if the application is expected to support multiple environments

+
+
+

|Test reporting +|Make use of test reporting modules like Junit which is included in the framework

+
+
+

|Maintainability +|Always be aware of the maintainability of tests - You should always be able to adapt to changes

+
+
+

|Testing types +|Which tests should be created? Rule of thumb: 70% Unit test cases, 20% Integration test cases and 10% UI Test cases

+
+
+

|Test data +|Consider before actually developing tests and choosing tools: Where to get test data from, how to reset test data

+
+
+

|== =

+
+
+
+
+
+

Web API Module

+
+ + +
+
Is it doable to keep pace in QA with today’s software agile approach?
+
+

DevOps + Microservices + Shift left + Time to Market == ? Service virtualization ?

+
+
+
+image72 +
+
+
+

Test pyramid

+
+
+
+image73 +
+
+
+
+
What is service virtualization
+
+

Service Virtualization has become recognized as one of the best ways to speed up testing and accelerate your time to market.

+
+
+

Service virtualization lets you automatically execute tests even when the application under test’s dependent system components (APIs, third-party applications, etc.) cannot be properly accessed or configured for testing. By simulating these dependencies, you can ensure that your tests will encounter the appropriate dependency behaviour and data each and every time that they execute.

+
+
+

Service virtualization is the simulation of interfaces – not the virtualization of systems.

+
+
+

According to Wikipedia’s service virtualization entry: Service virtualization emulates the behaviour of software components to remove dependency constraints on development and testing teams. Such constraints occur in complex, interdependent environments when a component connected to the application under test is:

+
+
+
    +
  • +

    Not yet completed

    +
  • +
  • +

    Still evolving

    +
  • +
  • +

    Controlled by a third-party or partner

    +
  • +
  • +

    Available for testing only in a limited capacity or at inconvenient times

    +
  • +
  • +

    Difficult to provision or configure in a test environment

    +
  • +
  • +

    Needed for simultaneous access by different teams with varied test data setup and other requirements

    +
  • +
  • +

    Restricted or costly to use for load and performance testing

    +
  • +
+
+
+

For instance, instead of virtualizing an entire database (and performing all associated test data management as well as setting up the database for every test session), you monitor how the application interacts with the database, then you emulate the related database behaviour (the SQL queries that are passed to the database, the corresponding result sets that are returned, and so forth).

+
+
+
+
Mocks, stubs and virtual services
+
+

The most commonly discussed categories of test doubles are mocks, stubs and virtual services.

+
+
+

Stub: a minimal implementation of an interface that normally returns hardcoded data that is tightly coupled to the test suite. It is most useful when the suite of tests is simple and keeping the hardcoded data in the stub is not an issue. Some stubs are handwritten; some can be generated by tools. A stub is normally written by a developer for personal use. It can be shared with testers, but wider sharing is typically limited by interoperability issues related to software platform and deployment infrastructure dependencies that were hardcoded. A common practice is when a stub works in-process directly with classes, methods, and functions for the unit, module, and acceptance testing. Some developers will say that a stub can also be primed, but you cannot verify an invocation on a stub. Stubs can also be communicating "over the wire", for example, HTTP, but some would argue that they should be called virtual services in that case.

+
+
+

Mock: a programmable interface observer, that verifies outputs against expectations defined by the test. It is frequently created using a third party library, for example in Java that is Mockito, JMock or WireMock. It is most useful when you have a large suite of tests and a stub will not be sufficient because each test needs a different data set up and maintaining them in a stub would be costly. The mock lets us keep the data set-up in the test. A mock is normally written by a developer for personal use but it can be shared with testers. However, wider sharing is typically limited by interoperability issues related to software platform and deployment infrastructure dependencies that were hardcoded. They are most often work-in-progress directly with classes, methods, and functions for a unit, module, and acceptance testing. Mock provides responses based on a given request satisfying predefined criteria (also called request or parameter matching). A mock also focuses on interactions rather than state so mocks are usually stateful. For example, you can verify how many times a given method was called or the order of calls made to a given object.

+
+
+

Virtual service: a test double often provided as a Software-as-a-Service (SaaS), is always called remotely, and is never working in-process directly with methods or functions. A virtual service is often created by recording traffic using one of the service virtualization platforms instead of building the interaction pattern from scratch based on interface or API documentation. A virtual service can be used to establish a common ground for teams to communicate and facilitate artefact sharing with other development teams as well as testing teams. A virtual service is called remotely (over HTTP, TCP, etc.) normally supports multiple protocols (e.g. HTTP, MQ, TCP, etc.), while a stub or mock normally supports only one. Sometimes virtual services will require users to authorize, especially when deployed in environments with enterprise-wide visibility. Service virtualization tools used to create virtual services will most often have user interfaces that allow less tech-savvy software testers to hit the ground running, before diving into the details of how specific protocols work. They are sometimes backed by a database. They can also simulate non-functional characteristics of systems such as response times or slow connections. You can sometimes find virtual services that provide a set of stubbed responses for given request criteria and pass every other request to a live backend system (partial stubbing). Similar to mocks, virtual services can have quite complex request matchers, that allow having one response returned for many different types of requests. Sometimes, virtual services simulate system behaviours by constructing parts of the response based on request attributes and data.

+
+
+

It is often difficult to say definitely which of the following categories a test double fits into. They should be treated as a spectrum rather than strict definitions.

+
+
+

Unresolved include directive in modules/ROOT/pages/master-mrchecker.adoc - include::Who-Is-MrChecker/Test-Framework-Modules/Web-API-Test-Module-How-plug-in-service-virtualization-into-Application-Under-Test.adoc[]

+
+
+
+
How to make a virtual asset
+
+

This can be done in four ways:

+
+
+
    +
  • +

    Record all traffic (Mappings and Responses) that comes through proxy - by UI

    +
  • +
  • +

    Record all traffic (Mappings and Responses) that comes through proxy - by Code

    +
  • +
  • +

    Create Mappings and Responses manually by text files

    +
  • +
  • +

    Create Mappings and Responses manually by code

    +
  • +
+
+
+
+
Record all traffic (Mappings and Responses) that comes through proxy - UI
+
+

Full article here Wiremock record-playback.

+
+
+

First, start an instance of WireMock running standalone. Once that’s running, visit the recorder UI page at http://localhost:8080/__admin/recorder (assuming you started WireMock on the default port of 8080).

+
+
+
+image77 +
+
+
+

Enter the URL you wish to record from in the target URL field and click the Record button. You can use http://example.mocklab.io to try it out.

+
+
+

Now you need to make a request through WireMock to the target API so that it can be recorded. If you’re using the example URL, you can generate a request using curl:

+
+
+
+
$ curl http://localhost:8080/recordables/123
+
+
+
+

Now click stop. You should see a message indicating that one stub was captured.

+
+
+

You should also see that a file has been created called something like recordables_123-40a93c4a-d378-4e07-8321-6158d5dbcb29.json under the mappings directory created when WireMock started up, and that a new mapping has appeared at http://localhost:8080/__admin/mappings.

+
+
+

Requesting the same URL again (possibly disabling your wifi first if you want a firm proof) will now serve the recorded result:

+
+
+
+
$ curl http://localhost:8080/recordables/123
+
+{
+"message": "Congratulations on your first recording!"
+}
+
+
+
+
+
Record all traffic (Mappings and Responses) that comes through proxy - by Code
+
+

An example of how such a record can be achieved

+
+
+
+
@Test
+public void startRecording() {
+
+    SnapshotRecordResult recordedMappings;
+
+    DriverManager.getDriverVirtualService()
+            .start();
+    DriverManager.getDriverVirtualService()
+            .startRecording("http://example.mocklab.io");
+    recordedMappings = DriverManager.getDriverVirtualService()
+            .stopRecording();
+
+    BFLogger.logDebug("Recorded messages: " + recordedMappings.toString());
+
+}
+
+
+
+
+
Create Mappings and Responses manually by text files
+
+

EMPTY

+
+
+
+
Create Mappings and Responses manually by code
+
+

Link to full file structure: REST_FarenheitToCelsiusMethod_Test.java

+
+
+
+
Start up Virtual Server
+
+
+
public void startVirtualServer() {
+
+    // Start Virtual Server
+    WireMockServer driverVirtualService = DriverManager.getDriverVirtualService();
+
+    // Get Virtual Server running http and https ports
+    int httpPort = driverVirtualService.port();
+    int httpsPort = driverVirtualService.httpsPort();
+
+    // Print is Virtual server running
+    BFLogger.logDebug("Is Virtual server running: " + driverVirtualService.isRunning());
+
+    String baseURI = "http://localhost";
+    endpointBaseUri = baseURI + ":" + httpPort;
+}
+
+
+
+
+
Plug in a virtual asset
+
+

REST_FarenheitToCelsiusMethod_Test.java

+
+
+
+
public void activateVirtualAsset() {
+    /*
+    * ----------
+    * Mock response. Map request with virtual asset from file
+    * -----------
+    */
+    BFLogger.logInfo("#1 Create Stub content message");
+    BFLogger.logInfo("#2 Add resource to virtual server");
+    String restResourceUrl = "/some/thing";
+    String restResponseBody = "{ \"FahrenheitToCelsiusResponse\":{\"FahrenheitToCelsiusResult\":37.7777777777778}}";
+
+    new StubREST_Builder //For active virtual server ...
+            .StubBuilder(restResourceUrl) //Activate mapping, for this Url AND
+            .setResponse(restResponseBody) //Send this response  AND
+            .setStatusCode(200) // With status code 200 FINALLY
+            .build(); //Set and save mapping.
+
+}
+
+
+
+

Link to full file structure: StubREST_Builder.java

+
+
+

Source link to How to create Stub.

+
+
+

StubREST_Builder.java

+
+
+
+
public class StubREST_Builder {
+
+    // required parameters
+    private String endpointURI;
+
+    // optional parameters
+    private int statusCode;
+
+    public String getEndpointURI() {
+        return endpointURI;
+    }
+
+    public int getStatusCode() {
+        return statusCode;
+    }
+
+    private StubREST_Builder(StubBuilder builder) {
+        this.endpointURI = builder.endpointURI;
+        this.statusCode = builder.statusCode;
+    }
+
+    // Builder Class
+    public static class StubBuilder {
+
+        // required parameters
+        private String endpointURI;
+
+        // optional parameters
+        private int     statusCode  = 200;
+        private String  response    = "{ \"message\": \"Hello\" }";
+
+        public StubBuilder(String endpointURI) {
+            this.endpointURI = endpointURI;
+        }
+
+        public StubBuilder setStatusCode(int statusCode) {
+            this.statusCode = statusCode;
+            return this;
+        }
+
+        public StubBuilder setResponse(String response) {
+            this.response = response;
+            return this;
+        }
+
+        public StubREST_Builder build() {
+
+            // GET
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            get(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // POST
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            post(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // PUT
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            put(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // DELETE
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            // Given that request with ...
+                            delete(urlMatching(this.endpointURI))
+                                    .withHeader("Content-Type", equalTo(ContentType.JSON.toString()))
+                                    // Return given response ...
+                                    .willReturn(aResponse()
+                                            .withStatus(this.statusCode)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody(this.response)
+                                            .withTransformers("body-transformer")));
+
+            // CATCH any other requests
+            DriverManager.getDriverVirtualService()
+                    .givenThat(
+                            any(anyUrl())
+                                    .atPriority(10)
+                                    .willReturn(aResponse()
+                                            .withStatus(404)
+                                            .withHeader("Content-Type", ContentType.JSON.toString())
+                                            .withBody("{\"status\":\"Error\",\"message\":\"Endpoint not found\"}")
+                                            .withTransformers("body-transformer")));
+
+            return new StubREST_Builder(this);
+        }
+    }
+}
+
+
+
+
+
Start a virtual server
+
+

The following picture presents the process of executing Smoke Tests in a virtualized environment:

+
+
+
+image78 +
+
+
+
+
Install docker service
+
+

If docker is not already installed on machine (this should be checked during C2C creation), install docker, docker-compose, apache2-utils, openssl (You can use script to install docker & docker-compose OR refer to this post and add Alias for this machine <C2C_Alias_Name>):

+
+
+
    +
  • +

    run the script

    +
  • +
  • +

    sudo apt-get install -y apache2-utils

    +
  • +
+
+
+
+
Build a docker image
+
+

Dockerfile:

+
+
+
+
FROM docker.xxx.com/ubuntu:16.04
+MAINTAINER Maintainer Name "maintainer@email.address"
+LABEL name=ubuntu_java \
+           version=v1-8.0 \
+           base="ubuntu:16.04" \
+           build_date="03-22-2018" \
+           java="1.8.0_162" \
+           wiremock="2.14.0" \
+           description="Docker to use with Ubuntu, JAVA and WIREMOCK "
+
+##Update and install the applications needed
+COPY 80proxy /etc/apt/apt.conf.d/80proxy
+RUN apt-get update
+RUN apt-get install -y \
+            wget \
+            libfontconfig \
+            unzip \
+            zip
+            ksh \
+            curl \
+            git
+
+COPY wgetrc /etc/wgetrc
+
+#Env parameters
+
+### JAVA PART ###
+#TO UPDATE:please verify url link to JDK http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html
+##Download and install JAVA JDK8
+RUN mkdir /opt/jdk
+RUN wget -qq --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u162-b12/0da788060d494f509bf8624735fa2f1/jdk-8u162-linux-x64.tar.gz && tar -zxf jdk-8u162-linux-x64.tar.gz -C /opt/jdk && rm jdk-8u162-linux-x64.tar.gz && update-alternatives --install /usr/bin/javac javac /opt/jdk/jdk1.8.0_162/bin/javac 100 && java -version && chmod 755 -R /opt/jdk/jdk1.8.0_162/
+RUN java -version
+
+##Add user
+RUN useradd -u 29001 -g 100 srvpwiredev
+
+##Add app
+RUN mkdir -p -m 777 /app
+COPY wiremock-standalone-2.14.0.jar /app/wiremock-standalone-2.14.0.jar
+
+##Expose port
+EXPOSE 8080
+
+##Set workdir
+WORKDIR /App
+
+##Run app
+CDM java -jar /app/wiremock-standalone-2.14.0.jar
+
+
+
+

Execute the following steps with a specified version to build a docker image and push it to the repository :

+
+
+
+
## Build image
+sudo docker build -t docker.xxx.com/app/build/wiremock:v2.14.0.
+
+## Push image
+sudo docker login docker.xxx.com
+sudo docker push docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+
+
Run docker image
+
+

To run a docker image, execute the following command:

+
+
+
+
sudo docker run -td -p 8080:8080 -v /home/wiremock/repo/app/docker/QA/mappings:/app/mappings -v /home/wiremock/repo/app/docker/QA/__files:/app/__files --restart always docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+

Where:

+
+
+

-p - publish a container’s port to the host

+
+
+

-v - bind mount a volume. WireMock server creates two directories under the current one: mappings and __files. It is necessary to mount directories with already created mappings and responses to make it work.

+
+
+

-restart always - restart policy to apply when a container exists

+
+
+

All of the parameters are described in: official docker documentation

+
+
+
+
Map requests with virtual assets
+
+

What is WireMock?

+
+
+

WireMock is an HTTP mock server. At its core it is a web server that can be primed to serve canned responses to particular requests (stubing) and that captures incoming requests so that they can be checked later (verification). It also has an assortment of other useful features including record/playback of interactions with other APIs, injection of faults and delays, simulation of stateful behaviour.

+
+
+

Full documentation can be found under the following link: WireMock

+
+
+
+
Record / create virtual assets mappings
+
+

Record

+
+
+

WireMock can create stub mappings from requests it has received. Combined with its proxying feature, this allows you to "record" stub mappings from interaction with existing APIs.

+
+
+

Record and playback (Legacy): documentation

+
+
+
+
java -jar wiremock-standalone-2.16.0.jar --proxy-all="http://search.twitter.com" --record-mappings --verbose
+
+
+
+

Once it’s started and request is sent to it, it will be redirected to "http://search.twitter.com" and traffic (response) is saved to files in mappings and __files directories for further use.

+
+
+

Record and playback (New): documentation

+
+
+
+
Enable mappings in a virtual server
+
+

When the WireMock server starts, it creates two directories under the current one: mappings and __files. To create a stub, it is necessary to drop a file with a .json extension under mappings.

+
+
+

Run docker with mounted volumes

+
+
+

Mappings are in a repository. It is necessary to mount directories with already created mappings and responses to make it work:

+
+
+
+
sudo docker run -td -p 8080:8080 -v /home/wiremock/repo/app/docker/QA/mappings:/app/mappings -v /home/wiremock/repo/app/docker/QA/__files:/app/__files --restart always docker.xxx.com/app/build/wiremock:v2.14.0.
+
+
+
+

The description of how to build and run docker is available under: Docker run command description

+
+
+

Recorded mappings

+
+
+

Recorded mappings are kept in the project repository.

+
+
+
+
Create a user and map them to docker user
+
+

To enable the connection from Jenkins to Virtual Server (C2C), it is necessary to create a user and map them to docker group user. It can be done using the following command:

+
+
+
+
adduser -G docker -m wiremock
+
+
+
+

To set the password for a wiremock user:

+
+
+
+
passwd wiremock
+
+
+
+
+
Create SSH private and public keys for a wiremock user
+
+

SSH keys serve as a means of identifying yourself to an SSH server using public-key cryptography and challenge-response authentication. One immediate advantage this method has over traditional password is that you can be authenticated by the server without ever having to send your password over the network.

+
+
+

To create an SSH key, log in as wiremock (previously created user).

+
+
+
+
su wiremock
+
+
+
+

The .ssh directory is not by default created below user home directory. Therefore, it is necessary to create it:

+
+
+
+
mkdir ~/.ssh
+
+
+
+

Now we can proceed with creating an RSA key using ssh-keygen (a tool for creating new authentication key pairs for SSH):

+
+
+
+
ssh-keygen -t rsa
+
+
+
+

A key should be created under /.ssh/id_rsa +Appending the public keys to authorized_keys:

+
+
+
+
wiremock@vc2crptXXXXXXXn:~/ssh$ cat id_rsa.pub >> authorized_keys
+
+
+
+
+
Install an SSH key in Jenkins
+
+

To add an SSH key to Jenkins, go to credentials in your job location. Choose the folder within credentials, then 'global credentials', 'Add credentials'. Fill in the fields. Finally, the entry should be created.

+
+
+
+
Build a Jenkins Groovy script
+
+

The description of how to use SSH Agent plugin in Jenkins pipeline can be found under: https://www.karthikeyan.tech/2017/09/ssh-agent-blue-ocean-via-jenkins.html

+
+
+

Example of use:

+
+
+
+
sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+     sh """
+         ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "docker container restart ${env.WIREMOCK_CONTAINER_NAME}"
+     """
+}
+
+
+
+

Where: env.WIREMOCK_CREDENTIALS is a credential id of previously created wiremock credentials. Now that it is present, we can execute commands on a remote machine, where in ssh command: +env.WIREMOCK_USERNAME - user name of user connected with configured private key +env.WIREMOCK_IP_ADDRESS - ip address of the machine where this user with this private key exists

+
+
+
+
Pull repository with virtual assets
+
+

To pull the repository on a remote machine, it is necessary to use the previously described SSH Agent plugin. An example of use:

+
+
+
+
sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+withCredentials([usernamePassword(credentialsId: end.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+     sh """
+         ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd ~/${env.APPLICATION_DIRECTORY_WIREMOCK}/${env.PROJET_HOME}; git fetch https://&USER:$PASS@${env.GIT_WITHOUT_HTTPS} ${env.GIT_BRANCH}; git reset --hard FETCH_HEAD; git clean -df"
+      """
+    }
+}
+
+
+
+

Where:

+
+
+

withCredentials allows various kinds of credentials (secrets) to be used in idiosyncratic ways. Each binding will define an environment variable active within the scope of the step. Then the necessary commands are executed:

+
+
+

cd …​ - command will change from current directory to the specified directory with git repository

+
+
+

git fetch …​ ;git reset …​ ;git clean …​ - pull from GIT branch. Git pull or checkout are not used here to prevent the situation with wrong coding between Mac OSX/Linux etc.

+
+
+

PLEASE remember that when using this script for the first time, the code from previous block should be changed to:

+
+
+
+
stage("ssh-agent"){
+        sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+            withCredentials([usernamePassword(credentialsId: end.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+                sh """
+                        ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd ~/${env.APPLICATION_DIRECTORY_WIREMOCK} ;git clone --depth=1 --branch=develop https://&USER:$PASS@${env.GIT_WITHOUT_HTTPS}"';
+                """
+    }
+}
+
+
+
+
+
Install an application with Smoke environment
+ +
+
+
Update properties settings file
+
+

New settings file is pushed to the repository. Example configuration:

+
+
+
+
...
+   <key>autocomplete</key>
+   <string>http://server:port</string>
+   <key>benefitsummary</key>
+   <string>http://server:port</string>
+   <key>checkscan</key>
+   <string>http://server:port</string>
+   <key>dpesb</key>
+   <string>http://server:port</string>
+...
+
+
+
+

Address of service (backend) should be changed to wiremock address as it is shown on listing to change the default route.

+
+
+
+
Build an application with updated properties file
+
+

New versions of application are prepared by Jenkins job.

+
+
+
+
Install an application on target properties file
+
+

Installation of an application is actually executed in a non-automated way using SeeTest environment.

+
+
+
+
UI tests
+ +
+
+
Run Jenkins job
+
+

Jenkinsfile:

+
+
+
+
// Jenkins parameters are overriding the properties below
+def properties = [
+
+          JENKINS_LABELS                                 : 'PWI_LINUX_DEV',
+          APPLICATION_FOLDER                             : 'app_dir',
+          PROJECT_HOME                                   : 'app_home_folder',
+
+          //WIREMOCK
+          WIREMOCK_CREDENTIALS                           : 'vc2crptXXXXXXn',
+          WIREMOCK_USERNAME                              : 'wiremock',
+          WIREMOCK_ADDRESS                               : 'http://vc2crptXXXXXXn.xxx.com:8080',
+          WIREMOCK_IP_ADDRESS                            : '10.196.67.XXX',
+          WIREMOCK_CONTAINER_NAME                        : 'wiremock',
+          APPLICATION_DIRECTORY_WIREMOCK                 : 'repo',
+
+          //GIT
+          GIT_CREDENTIALS                                : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          GIT_BRANCH                                     : 'develop',
+          GIT_SSH                                        : 'ssh://git@stash.xxx.com/app/app.git'
+          GIT_HTTPS                                      : 'HTTPS://git@stash.xxx.com/app/app.git',
+
+          STASH_CREDENTIALS                              : 'e47742cc-bb66-4321-2341-a2342er24f2',
+
+
+          //DOCKER
+          ARTIFACTORY_USER_CREDENTIALS                   : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          SEETEST_DOCKER_IMAGE                           : 'docker.xxx.com/project/images/app:v1-8.3',
+
+          //SEETEST_DOCKER_IMAGE
+          SEETEST_APPLICATION_FOLDER                     : 'seetest_dir',
+          SEETEST_PROJECT_HOME                           : 'Automated Scripts',
+          SEETEST_GIT_SSH                                : 'ssh://git@stash.xxx.com/pr/seetest_automation_cucumber.git'
+          SEETEST_GIT_BRANCH                             : 'develop',
+          SEETEST_GRID_USER_CREDENTIALS                  : 'e47742cc-bb66-4321-2341-a2342er24f2',
+          SEETEST_CUCUMBER_TAG                           : '@Virtualization',
+          SEETEST_CLOUD_NAME                             : 'Core Group',
+          SEETEST_IOS_VERSION                            : '11',
+          SEETEST_IOS_APP_URL                            : '',
+          SEETEST_INSTALL_APP                            : 'No',
+          SEETEST_APP_ENVIRONMENT                        : 'SmokeTests',
+          SEETEST_DEVICE_QUERY                           : '',
+]
+
+node(properties.JENKINS_LABELS) {
+    try {
+        prepareEnv(properties)
+        gitCheckout()
+        stageStartVirtualServer()
+        stageMapApiRequests()
+        stageInstallApplication()
+        stageUITests()
+     } catch(Exception ex) {
+        currentBuild.result = 'FAILURE'
+        error = 'Error' + ex
+     }
+}
+
+//== == == == == == == == == == == == == == == == == == END OF PIPELINE== == == == == == == == == == == == == == == == == == == == ==
+
+private void prepareEnv(properties) {
+    cleanWorkspace()
+    overrideProperties(properties)
+    setWorkspace()
+}
+
+private void gitCheckout() {
+    dir(env.APPLICATION_FOLDER) {
+        checkout([$class: 'GitSCM', branches: [[name: env.GIT_BRANCH]], doGenerateSubmoduleConfiguration: false, extensions: [[$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false, timeout: 50]], gitTool: 'Default', submoduleCfg: [], userRemoteConfigs: [[credentialsId: env.GIT_CREDENTIALS, url: env.GIT_SSH]])
+     }
+}
+
+private void stageStartVirtualServer() {
+    def module = load "${env.SUBMODULES_DIR}/stageStartVirtualServer.groovy"
+    module()
+}
+
+private void stageMapApiRequests() {
+    def module = load "${env.SUBMODULES_DIR}/stageMapApiRequests.groovy"
+    module()
+}
+
+private void stageInstallApplication() {
+    def module = load "${env.SUBMODULES_DIR}/stageInstallApplication.groovy"
+    module()
+}
+
+private void stageUITests() {
+    def module = load "${env.SUBMODULES_DIR}/stageUITests.groovy"
+    module()
+}
+
+private void setWorkspace() {
+    String workspace = pwd()
+    env.APPLICATION_DIRECTORY = "/${env.APPLICATION_DIRECTORY}"
+    env.WORKSPACE_LOCAL - workspace + env.APPLICATION_DIRECTORY
+    env.SEETEST_PROJECT_HOME_ABSOLute_PATH = "${workspace}/${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}"
+    env.SUBMODULES_DIR = env.WORKSPACE_LOCAL + "/pipelines/SmokeTests.submodules"
+    env.COMMONS_DIR    = env.WORKSPACE_LOCAL + "/pipelines/commons"
+}
+
+/*
+    function ovverrides env vales based on provided properties
+*/
+private void overrideProperties(properties) {
+    for (param in properties) {
+        if (env.(param.key) ==  null) {
+           echo "Adding parameter '${param.key}' with default value: '$param.value}'"
+           env.(param.key) = param.value
+        } else {
+           echo "Parameter '${param.key}' has overriden value: '${env.(param.key)}'"
+        }
+     }
+
+     echo sh(script: "env | sort", returnStdout: true)
+}
+
+private void cleanWorkspace() {
+   sh 'rm-rf *'
+}
+
+
+
+

stageStartVirtualServer.groovy:

+
+
+
+
def call () {
+    stage("Check virtual server") {
+        def statusCode
+
+        try {
+            def response = httpRequest "${env.WIREMOCK_ADDRESS}/__admin/"
+            statusCode = response.status
+        } catch(Exception ex) {
+            currentBuild.result = 'FAILURE'
+            error 'WireMock server os unreachable.'
+        }
+
+        if(statusCode !=200) {
+            currentBuild.result = 'FAILURE'
+            error 'WireMock server is unreachable. Return code: ${statusCode}'
+        }
+    }
+}
+
+
+
+

stageMapApiRequests.groovy:

+
+
+
+
def call() {
+    stage("Map API requests with virtual assets") {
+        checkoutRepository()
+        restartWiremock()
+        checkWiremockStatus()
+     }
+}
+
+private checkoutRepository() {
+    extractHTTPSUrl()
+    sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+        withCredentials([usernamePassword(credentialsId: env.STASH_CREDENTIALS, passwordVariable: 'PASS', usernameVariable: 'USER')]) {
+            sh """
+                ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "cd~/${env.APPLICATION_DIRECTORY_WIREMOCK}/${env.PROJECT_HOME}; git fetch https://$USER:$PASS@${env.GIT_WITHOUT_HTTPS} ${env.GIT_BRANCH}; git reset --hard FETCH_HEAD; git clean -df"
+             """
+         }
+     }
+}
+
+private restartWiremock() {
+    sshagent (credentials: [env.WIREMOCK_CREDENTIALS]) {
+            sh """
+                ssh -T -o StrictHostKeyChecking=no -l ${env.WIREMOCK_USERNAME} ${env.WIREMOCK_IP_ADDRESS} "docker container restart ${env.WIREMOCK_CONTAINER_NAME}"
+             """
+     }
+}
+
+private checkWiremockStatus() {
+    int wiremockStatusCheckCounter =6
+    int sleepTimeInSeconds = 10
+    def wiremockStatus
+
+    for (i = 0; i < wiremockStatusCheckCounter; i++) {
+         try {
+             wiremockStatus = getHttpRequestStatus()
+             echo "WireMock server status code: ${wiremockStatus}"
+         } catch(Exceprion ex) {
+             echo "Exception when checking connection to WireMock"
+         }
+         if(wiremockStatus ==  200) break
+         else sh "sleep $(sleepTimeInSeconds}"
+      }
+
+      if(wiremockStatus != 200) {
+          currentBuild.result = 'FAILURE'
+          error 'WireMock server is unreachable. Return code: ${wiremockStatus}'
+      }
+}
+
+private def getHttpRequestStatus() {
+    def response = httpRequest "${env.WIREMOCK_ADDRESS}/__admin"
+    return response.status
+
+private extractHTTPSUrl() {
+    env.GIT_WITHOUT_HTTPS = env.GIT_HTTPS.replace("https://", "")
+}
+
+return this
+
+
+
+

stageInstallApplication.groovy:

+
+
+
+
def call() {
+    stage('Install application with smoke tests environment') {
+        dir(env.SEETEST_APPLICATION_FOLDER) {
+            checkout([$class: 'GitSCM', branches: [[name: env.SEETEST_GIT_BRANCH]], doGenerateSubmoduleConfigurations: false, extensions: [], gitTool: 'default', submoduleCfg: [], userRemoteConfigs: [[credentialsId: env.GIT_CREDENTIALS, url: env.SEETEST_GIT_SSH]])
+        }
+     }
+}
+
+return this
+
+
+
+

stageUITests.groovy:

+
+
+
+
def call() {
+    stage('UI tests') {
+        def utils = load "${env.SUBMODULES_DIR}/utils.groovy"
+
+        try {
+            utils.generateUserIDVariable(); //Generate USER_ID and USER_GROUP
+            docker.image(env.SEETEST_DOCKER_IMAGE).inside("-u ${env.USER_ID}:${env.USER_GROUP}") {
+                withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: "${env.ARTIFACTORY_USER_CREDENTIALS}", passwordVariable: 'ARTIFACTORY_PASSWORD', usernameVariable: 'ARTIFACTORY_USERNAME]]) {
+                    executeTests()
+                    compressArtifacts()
+                    publishJUnitTestResultReport()
+                    archiveArtifacts()
+                    publishHTMLReports()
+                    publishCucumberReports()
+                 }
+             }
+        } catch (Exception exc) {
+            throw exc
+        }
+   }
+}
+
+private executeTests() {
+    withCredentials([usernamePassword(credentialsId: env.SEETEST_GRID_USER_CREDENTIALS, passwordVariable: 'GRID_USER_PASSWORD', usernameVariable: 'GRID_USER_NAME')]) {
+            sh """
+                cd ${env.SEETEST_PROJECT_HOME_ABSOLUTE_PATH}
+                mvn clean test -B -Ddriver="grid" -Dtags="${env.SEETEST_CUCUMBER_TAG}" -DcloudName="${env.SEETEST_CLOUD_NAME}" -DdeviceQuery="${env.SEETEST_DEVICE_QUERY} -DgridUser="${GRID_USER_NAME}" -DgridPassword="${GRID_USER_PASSWORD}" -Dinstall="${env.SEETEST_INSTALL_APP}" -DiosUrl="${env.SEETEST_IOS_APP_URL}" -DdeviceType="iPhone" -DiosVersion="$env.SEETEST_IOS_VERSION}" -DparallelMode="allonall" -Denv="${env.SEETEST_APP_ENVIRONMENT}" site
+             """
+     }
+}
+
+private compressartifacts() {
+    echo "Compressing artifacts from /target/site"
+    sh """
+        zip -r allure_report.zip **/${env.SEETEST_PROJECT_homE}/target/site
+    """
+
+private publishJUnitTestResultReport() {
+    echo "Publishing JUnit reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/surefire-reports/junitreporters/*.xml"
+
+    try {
+        junit "${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/surefire-reports/junitreporters/*.xml"
+    } catch(e) {
+        echo("No JUnit report found")
+    }
+}
+
+private archiveArtifacts() {
+    echo "Archiving artifacts"
+
+    try {
+        archiveArtifacts allowEmptyArchive: true, artifacts: "**/allure_report.zip"
+    } catch(e) {
+        echo("No artifacts found")
+    }
+}
+
+private publishHTMLReports() {
+    echo "Publishing HTML reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/site/allure-maven-plugin"
+
+    try {
+        publishHTML([allowMissing: false, alwaysLinkToLastBuild: true, keepAll: true, reportDir: "${env.SEETEST_APPLICATION_FOLDER/${env.SEETEST_PROJECT_HOME}/target/site/allure-maven-plugin", reportFiles: 'index.html', reportName: 'Allure report', reportTitles: 'Allure report'])
+    } catch(e) {
+        echo("No artifacts found")
+    }
+}
+
+private publishCucumberREPORTS() {
+    echo "Publishing Cucumber reports from ${env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/cucumber-parallel/*.json"
+
+    try {
+        step([$class: 'CucumberReportPublisher', fileExcludePattern '', fileIncludePattern: "#{env.SEETEST_APPLICATION_FOLDER}/${env.SEETEST_PROJECT_HOME}/target/cucumber-parallel/*.json", ignoreFailedTests: false, jenkinsBasePath: '', jsonReportDirectory: '', missingFails: false, parallelTesting: false, pendingFails: false, skippedFails: false, undefinedFails: false])
+    } catch(e) {
+        echo("No Cucumber report found")
+    }
+}
+
+return this
+
+
+
+

Configuration

+
+
+

It is possible to configure Jenkins job in two ways. First one is to edit the Jenkinsfile. All of the properties are in properties collection as below:

+
+
+
+
def properties = [
+
+          JENKINS_LABELS                                : 'PWI_LINUX_DEV'
+
+          ...
+
+          //Docker
+          ARTIFACTORY_USER_CREDENTIALS                  : 'ba2e4f46-56f1-4467-ae97-17b356d6s643',
+          SEETEST_DOCKER_IMAGE                          : 'docker.XXX.com/app/base-images/seetest:v1-8.3',
+
+          //SeeTest
+          SEETEST_APPLICATION_FOLDER                    : 'seetest_dit',
+          SEETEST_PROJECT_HOME                          : 'Automated_Scripts',
+          SEETEST_GIT_SSH                               : 'ssh://stash.xxx.com/app/seetest_automation_cucumber.git',
+          SEETEST_GIT_BRANCH                            : 'develop',
+
+          ...
+]
+
+
+
+

Second way is to add properties in 'Configure job'. All of the properties there are overriding properties from Jenkinsfile (the have the highest priority). They can then be set durring 'Build with Paremeters' process.

+
+
+

Reports

+
+
+

After a job execution 'Allure report' and 'Cucumber-JVM' reports should be visible. If any tests fail, You can check on which screen (printscreen from failures is attached, why and etc.)

+
+
+
+
+
+

Security Module

+
+ +
+
Security Test Module
+ +
+
+
What is Security
+
+

Application Security is concerned with Integrity, Availability and Confidentiality of data processed, stored and transferred by the application.

+
+
+

Application Security is a cross-cutting concern which touches every aspect of the Software Development Lifecycle. You can introduce some SQL injection flaws in your application and make it exploitable, but you can also expose your secrets (which will have nothing to do with code itself) due to poor secret management process, and fail as well.

+
+
+

Because of this and many other reasons, not every aspect of security can be automatically verified. Manual tests and audits will still be needed. Nevertheless, every security requirement which is automatically verified will prevent code degeneration and misconfiguration in a continuous manner.

+
+
+
+
How to test Security
+
+

Security tests can be performed in many different ways, such as:

+
+
+
    +
  • +

    Static Code Analysis - improves the security by (usually) automated code review. A good way to search for vulnerabilities, which are 'obvious' on the code level ( e.g. SQL injection). The downside of this approach is that professional tools to perform such scans are very expensive and still produce many false positives.

    +
  • +
  • +

    Dynamic Code Analysis - tests are run against a working environment. A good way to search for vulnerabilities, which require all client- and server-side components to be present and running (like e.g. Cross-Site Scripting). Tests are performed in a semi-automated manner and require a proxy tool (like e.g. OWASP ZAP)

    +
  • +
  • +

    Unit tests - self-written and self-maintained tests. They usually work on the HTTP/REST level (this defines the trust boundary between the client and the server) and run against a working environment. Unit tests are best suited for verifying requirements which involve business knowledge of the system or which assure secure configuration on the HTTP level.

    +
  • +
+
+
+

In the current release of the Security Module, the main focus will be Unit Tests.

+
+
+

Although the most common choice of environment for running security tests on will be integration(the environment offers the right stability and should mirror the production closely), it is not uncommon for some security tests to run on production as well. This is done for e.g. TLS configuration testing to ensure proper configuration of the most relevant environment in a continuous manner.

+
+
+
+
+
+

Database Module

+
+ +
+
Database Test Module
+ +
+
+
What is MrChecker Database Test Module
+
+

Database module is based on Object-Relational Mapping programming technique. All functionalities are built using Java Persistence API but examples use Hibernate as a main provider.

+
+
+
+
JPA structure schema
+
+

This module was written to allow the use of any JPA provider. The structure is represented in the schema below.

+
+
+
+image3 +
+
+
+
+
ORM representation applied in Framework
+
+
+image4 +
+
+
+
+
+
+

Mobile Test Module

+
+ +
+
Mobile Test Module
+ +
+
+
What is MrChecker E2E Mobile Test Module
+
+

MrChecker E2E Mobile test Module is a suitable solution for testing Remote Web Design, Mobile Browsers and application. +A user can write tests suitable for all mobile browsers with a full range of resolution. The way of working is similar to Selenium and uses the same rules and patterns as the Web Driver. For more information please look in the Selenium test module.

+
+
+
+
What is Page Object Architecture
+
+

Creating Selenium test cases can result in an unmaintainable project. One of the reasons is that too many duplicated code is used. Duplicated code could be caused by the duplicated functionality and this will result in duplicated usage of locators. The disadvantage of duplicated code is that the project is less maintainable. If some locator will change, you have to walk through the whole test code to adjust locators where necessary. By using the page object model we can make non-brittle test code and reduce or eliminate duplicate test code. Beside of that it improves the readability and allows us to create interactive documentation. Last but not least, we can create tests with less keystroke. An implementation of the page object model can be achieved by separating the abstraction of the test object and the test scripts.

+
+
+
+
Page Object Pattern
+
+
+Pom +
+
+
+
+
Mobile Structure
+
+

It is build on the top of the Appium library. +Appium is an open-source tool for automating native, mobile web, and hybrid applications on iOS mobile, Android mobile, and Windows desktop platforms. Native apps are those written using iOS, Android, or Windows SDKs. Mobile web apps are web apps accessed using a mobile browser (Appium supports Safari on iOS and Chrome or the built-in 'Browser' app on Android). Hybrid apps have a wrapper around a "webview" - a native control that enables interaction with web content.

+
+
+
+
Run on different mobile devices
+
+

To execute each test with chosen connected mobile devices, it is required to use specific arguments in Run configuration.

+
+
+
+image001 +
+
+
+
+image002 +
+
+
+

Default supported arguments in MrChecker:

+
+
+
    +
  • +

    deviceUrl - http url to Appium Server, default value "http://127.0.0.1:4723"

    +
  • +
  • +

    automationName - which automation engine to use , default value "Appium"

    +
  • +
  • +

    platformName - which mobile OS platform to use , default value "Appium"

    +
  • +
  • +

    platformVersion - mobile OS version , default value ""

    +
  • +
  • +

    deviceName - the kind of mobile device or emulator to use , default value "Android Emulator"

    +
  • +
  • +

    app - the absolute local path or remote http URL to a .ipa file (IOS), .app folder (IOS Simulator), .apk file (Android) or .apks file (Android App Bundle), or a .zip file, default value "."

    +
  • +
  • +

    browserName - name of mobile web browser to automate. Should be an empty string if automating an app instead, default value ""

    +
  • +
  • +

    newCommandTimeout - how long (in seconds) Appium will wait for a new command from the client before assuming the client quit and ending the session, default value "4000"

    +
  • +
  • +

    deviceOptions - any other capabilites not covered in essential ones, default value none

    +
  • +
+
+
+

Example usage:

+
+
+
+
mvn clean test -Dtest=MyTest -DdeviceUrl="http://192.168.0.1:1234" -DplatformName="iOS" -DdeviceName="iPhone Simulator" -Dapp=".\\Simple_App.ipa"
+
+
+
+
+
mvn clean test -Dtest=MyTest -Dapp=".\\Simple_App.apk -DdeviceOptions="orientation=LANDSCAPE;appActivity=MainActivity;chromeOptions=['--disable-popup-blocking']"
+
+
+
+

Check also:

+
+ + + +
+

+ +Full list of Generic Capabilities

+
+
+

+ +List of additional capabilities for Android

+
+
+

+ +List of additional capabilities for iOS

+
+ +
+
+
How to use mobile test Module
+
+
    +
  1. +

    Install IDE with MrChecker

    +
  2. +
  3. +

    Switch branch to 'feature/Create-mobile-module-#213' - by default it is 'develop'

    +
  4. +
+
+
+
+
git checkout feature/Create-mobile-module-#213
+
+
+
+
    +
  1. +

    Install and setup git checkout feature/Create-mobile-module-#213[Appium Server]

    +
  2. +
  3. +

    Connect to local Device by Appium Server

    +
    +
    +
     1.
    +Install Android SDK    https://developer.android.com/studio/index.html#command-tools    ->
    +	2.
    +Download Platform and Build-Tools  (Android versions - >    https://en.wikipedia.org/wiki/Android_version_history   )
    +* sdkmanager "platform-tools" "platforms;android-19"
    +* sdkmanager "build-tools;19.0.0"
    +* copy from /build-tools  file "aapt.exe"  to /platform-tools
    +	3.
    +Set Environment:
    +ANDROID_SDK_ROOT = D:\sdk-tools-windows-4333796
    +PATH =  %PATH%; %ANDROID_SDK_ROOT%
    +	4.
    +Start Appium Server
    +	5.
    +Start Session in Appium Server, capabilities
    +{
    +  "platformName": "Android",
    +            "deviceName": "Android Emulator",
    +            "app": "D:\\Repo\\mrchecker-source\\mrchecker-framework-modules\\mrchecker-mobile-module\\src\\test\\resources\\Simple App_v2.0.1_apkpure.com.apk",
    +            "automationName": "UiAutomator1"
    +            }
    +
    +
    +
  4. +
  5. +

    Run Mobile tests with runtime parameters. +List of supported parameters could be found here

    +
    +
      +
    • +

      From command line (as in Jenkins):

      +
    • +
    +
    +
  6. +
+
+
+
+
mvn clean compile test  -Dapp=".\\Simple_App_v2.0.1_apkpure.com.apk" -DautomationName="UiAutomator1" -Dthread.count=1
+
+
+
+
    +
  • +

    from IDE:

    +
  • +
+
+
+
+image00100 +
+
+
+
+image00101 +
+
+
+
+
+
+

DevOps Test Module

+
+ +
+
DevOPS Test Module
+ +
+
+
What does DevOps mean for us?
+
+

DevOps consists of a mixture of three key components in a technical project:

+
+
+
    +
  • +

    People’s skills and mindset

    +
  • +
  • +

    Processes

    +
  • +
  • +

    Tools

    +
  • +
+
+
+

Using E2E MrChecker Test Framework it is possible to cover the majority of these areas.

+
+
+
+
QA Team Goal
+
+

For QA engineers, it is essential to take care of the product code quality.

+
+
+

Therefore, we have to understand, that a test case is also code which has to be validated against quality gates. As a result, we must test our developed test case like it is done during standard Software Delivery Life Cycle.

+
+
+
+
Well rounded test case production process
+
+
    +
  • +

    How do we define top-notch test cases development process in E2E MrChecker Test Framework

    +
  • +
+
+
+
+image5 +
+
+
+
+
Continuous Integration (CI) and Continuous Delivery (CD)
+
+ +
+
+
+image6 +
+
+
+
+
What should you receive from this DevOps module
+
+
+image7 +
+
+
+
+
What will you gain with our DevOps module
+
+

The CI procedure has been divided into transparent modules. This solution makes configuration and maintenance very easy because everyone is able to manage versions and customize the configuration independently for each module. A separate security module ensures the protection of your credentials and assigned access roles regardless of changes in other modules.

+
+
+
+image8 +
+
+
+

Your CI process will be matched to the current project. You can easily go back to the previous configuration, test a new one or move a selected one to other projects.

+
+
+
+image9 +
+
+
+

DevOps module supports a delivery model in which executors are made available to the user as needed. It has such advantages as:

+
+
+
    +
  • +

    Saving computing resources

    +
  • +
  • +

    Eliminating guessing on your infrastructure capacity needs

    +
  • +
  • +

    Not spending time on running and maintaining additional executors +== How to build this DevOps module

    +
  • +
+
+
+

Once you have implemented the module, you can learn more about it here:

+
+
+ +
+
+
+
Continuous Integration
+
+

Embrace quality with Continuous Integration while you produce test case(s).

+
+
+
+
Overview
+
+

There are two ways to set up your Continuous Integration environment:

+
+
+
    +
  1. +

    Create a Jenkins instance from scratch (e.g. by using the Jenkins Docker image)

    +
    +

    Using a clean Jenkins instance requires the installation of additional plugins. The plugins required and their versions can be found on this page.

    +
    +
  2. +
  3. +

    Use thre pre-configured custom Docker image provided by us

    +
    +

    No more additional configuration is required (but optional) using this custom Docker image. Additionally, this Jenkins setup allows dynamical scaling across multiple machines and even cloud (AWS, Azure, Google Cloud etc.).

    +
    +
  4. +
+
+
+
+
Jenkins Overview
+
+

Jenkins is an Open Source Continuous Integration Tool. It allows the user to create automated build jobs which will run remotely on so called Jenkins Slaves. A build job can be triggered by several events, for example on new pull request on specified repositories or timed (e.g. at midnight).

+
+
+
+
Jenking Configuration
+
+

Tests created by using the testing framework can easily be implemented on a Jenkins instance. The following chapter will describe such a job configuration. If you’re running your own Jenkins instance, you may have to install additional plugins listed on the page Jenkins Plugins for a trouble-free integration of your tests.

+
+
+
+
== Initial Configuration
+
+

The test job is configured as a so-called parameterized job. This means, after starting the job, parameters can be specified, which will then be used in the build process. In this case, branch and testname will be expected when starting the job. These parameters specify which branch in the code repository should be checked out (possibly feature branch) and the name of the test that should be executed.

+
+
+
+image79 +
+
+
+
+
== Build Process Configuration
+
+
    +
  • +

    The first step inside the build process configuration is to get the author of the commit that was made. The mail will be extracted and gets stored in a file called build.properties. This way, the author can be notified if the build fails.

    +
    +
    +image80 +
    +
    +
  • +
  • +

    Next up, Maven will be used to check if the code can be compiled, without running any tests.

    +
    +
    +image81 +
    +
    +
    +

    After making sure that the code can be compiled, the actual tests will be executed.

    +
    +
    +
    +image82 +
    +
    +
  • +
  • +

    Finally, reports will be generated.

    +
    +
    +image83 +
    +
    +
  • +
+
+
+
+
== Post Build Configuration
+
+
    +
  • +

    At first, the results will be imported to the Allure System

    +
    +
    +image84 +
    +
    +
  • +
  • +

    JUnit test results will be reported as well. Using this step, the test result trend graph will be displayed on the Jenkins job overview.

    +
    +
    +image85 +
    +
    +
  • +
  • +

    Finally, an E-Mail will be sent to the previously extracted author of the commit.

    +
    +
    +image86 +
    +
    +
  • +
+
+
+
+
Using the Pre-Configured Custom Docker Image
+
+

If you are starting a new Jenkins instance for your tests, we’d suggest using the pre-configured Docker image. This image already contains all the configurations and additional features.

+
+
+

The configurations are e.g. Plugins and Pre-Installed job setup samples. This way, you don’t have to set up the entire CI-Environment from the ground up.

+
+
+

Additional features from this docker image allow dynamic creation and deletion of Jenkins slaves, by creating Docker containers. Also, Cloud Solutions can be implemented to allow wide-spread load balancing.

+
+
+
+
Continuous Delivery
+
+

Include quality with Continuous Delivery during product release.

+
+
+
+image87 +
+
+
+
+
Overview
+
+

CD from Jenkins point of view does not change a lot from Continuous Integration one.

+
+
+
+
Jenkins Overview
+
+

Use the same Jenkins settings for Jenkins CD setup as for CI, please. link. The only difference is:

+
+
+
    +
  • +

    What type of test you will execute. Before, we have been choosing test case(s), now we will choose test suite(s)

    +
  • +
  • +

    Who will trigger the given Smoke/Integration/Performance job

    +
  • +
  • +

    What is the name of official branch. This branch ought always to use be used in every CD execution. It will be either master or develop.

    +
  • +
+
+
+
+
Jenkins for Smoke Tests
+
+

In the $TESTNAME variable, where we input the test name( link ), please input the name of a test suite assembled together of tests tagged as smoke tests -( link ) thus running all the smoke tests.

+
+
+
+
Jenkins for Performance Tests
+
+

Under construction - added when WebAPI module is included.

+
+
+
+
Pipeline structure
+ +
+
+
Pipeline configuration:
+
+

The default interaction with Jenkins required manual jobs. This keeps configuration of a job in Jenkins separate from source code. With Pipeline plugin users can implement a pipeline procedure in Jenkinsfile and store it in repository with other code. This approach is used in Mr Checker framework. More info: https://jenkins.io/solutions/pipeline/

+
+
+

Our CI & CD processes are divided into a few separate files: Jenkins_node.groovy is the file to manage all processes. It defines all operations executed on a Jenkins node, so all code in this file is closed in node closure. Workflow in Jenkinsfile:

+
+
+
    +
  • +

    Read all parameters from a Jenkins job

    +
  • +
  • +

    Execute stage to prepare the environment

    +
  • +
  • +

    Execute git pull command

    +
  • +
  • +

    Set Jenkins job description

    +
  • +
  • +

    Execute compilation of the project in a special prepared docker container

    +
  • +
  • +

    Execute unit tests

    +
  • +
  • +

    Execute integration tests

    +
  • +
  • +

    Deploy artifacts to a local repository

    +
  • +
  • +

    Deploy artifacts to an external repository (nexus/arifactory)

    +
  • +
+
+
+

Not all the steps must be present in the Jenkins files. This should be configured for particular job requirements.

+
+
+
+
Description of stages:
+ +
+
+
Stage “Prepare environment”
+
+

First thing to do in this stage is overwriting properties loaded from Jenkins job. It is defined in “overrideProperties” function. The next function, “setJenkinsJobVariables” defines environment variables such as :

+
+
+
    +
  • +

    JOB_NAME_UPSTREAM

    +
  • +
  • +

    BUILD_DISPLAY_NAME_UPSTREAM

    +
  • +
  • +

    BUILD_URL_UPSTREAM

    +
  • +
  • +

    GIT_CREDENTIALS

    +
  • +
  • +

    JENKINS_CREDENTIALS

    +
  • +
+
+
+

The last function in the stage – “setWorkspace” -creates an environment variable with path to local workspace. This is required beacuse when using pipeline plugin, Jenkins does not create the WORKSPACE env variables.

+
+
+
+
Stage "Git pull"
+
+

It pulls sources from the repository and loads “git pull” file which contains additional methods:

+
+
+
    +
  • +

    setGitAuthor – setting properties about git author to the file “build.properties” and loading created file

    +
  • +
  • +

    tryMergeWithBranch – checking if actual branch can be merged with default main branch

    +
  • +
+
+
+
+
Stage “Build compile”
+
+

Verify with maven that code builds without errors

+
+
+
+
Stage “Unit test”
+
+

Execute unit tests with mvn surefire test and publish reports in junit and allure format

+
+
+
+
Stage “Integration test”
+
+

Execute integration tests with mvn surefire test and publish reports in junit and allure format

+
+
+
+
Stage “Deploy – local repo”
+
+

Archive artifacts as a jar file in the local repository

+
+
+
+
Stage ”Deploy – nexu repo”
+
+

Deploy to the external repository with maven release deploy command with credentials stored in Jenkins machine. Additional files:

+
+
+
    +
  • +

    mailSender.groovy – contains methods for sending mail with generated content

    +
  • +
  • +

    stashNotification.groovy – send job status for bitbucket by a curl command

    +
  • +
  • +

    utils.groovy - contains additional functions to load properties, files and generate additional data

    +
  • +
+
+
+
+
Selenium Grid
+ +
+
+
What is Selenium Grid
+
+

Selenium Grid allows running web/mobile browsers test cases to fulfill basic factors, such as:

+
+
+
    +
  • +

    Independent infrastructure, similar to end-users'

    +
  • +
  • +

    Scalable infrastructure (\~50 simultaneous sessions at once)

    +
  • +
  • +

    Huge variety of web browsers (from mobile to desktop)

    +
  • +
  • +

    Continuous Integration and Continuous Delivery process

    +
  • +
  • +

    Supporting multi-type programming languages (java, javascript, python, …​).

    +
  • +
+
+
+
+image88 +
+
+
+

On a daily basis, a test automation engineer uses their local environments for test case execution/development. However, a created browser test case has to be able to run on any infrastructure. Selenium Grid enables this portability.

+
+
+
+
Selenium Grid Structure
+
+
+image89 +
+
+
+

Full documentation of Selenium Grid can be found here and here.

+
+
+

'Vanilla flavour' Selenium Grid is based on two, not very complicated ingredients:

+
+
+
    +
  1. +

    Selenium Hub - as one machine, accepting connections to grid from test cases executors. It also plays a managerial role in connection to/from Selenium Nodes

    +
  2. +
  3. +

    Selenium Node - from one to many machines, where on each machine a browser used during test case execution is installed.

    +
  4. +
+
+
+
+
How to setup
+
+

There are two options of Selenium Grid setup:

+
+
+
    +
  • +

    Classic, static solution - link

    +
  • +
  • +

    Cloud, scalable solution - link

    +
  • +
+
+
+

Advantages and disadvantages of both solutions:

+
+
+
+image90 +
+
+
+
+
How to use Selenium Grid with E2E Mr Checker Test Frameworks
+
+

Run the following command either in Eclipse or in Jenkins:

+
+
+
+
> mvn test -Dtest=com.capgemini.ntc.selenium.tests.samples.resolutions.ResolutionTest -DseleniumGrid="http://10.40.232.61:4444/wd/hub" -Dos=LINUX -Dbrowser=chrome
+
+
+
+

As a result of this command:

+
+
+
    +
  • +

    -Dtest=com.capgemini.ntc.selenium.features.samples.resolutions.ResolutionTest - name of test case to execute

    +
  • +
  • +

    -DseleniumGrid="http://10.40.232.61:4444/wd/hub" - IP address of Selenium Hub

    +
  • +
  • +

    -Dos=LINUX - what operating system must be assumed during test case execution

    +
  • +
  • +

    -Dbrowser=chrome - what type of browser will be used during test case execution

    +
  • +
+
+
+
+image91 +
+
+
+
+
List of Jenkins Plugins
+
+

|== =

+
+
+

|Plugin Name +|Version

+
+
+

|blueocean-github-pipeline +|1.1.4

+
+
+

|blueocean-display-url +|2.0

+
+
+

|blueocean +|1.1.4

+
+
+

|workflow-support +|2.14

+
+
+

|workflow-api +|2.18

+
+
+

|plain-credentials +|1.4

+
+
+

|pipeline-stage-tags-metadata +|1.1.8

+
+
+

|credentials-binding +|1.12

+
+
+

|git +|3.5.1

+
+
+

|maven-plugin +|2.17

+
+
+

|workflow-durable-task-step +|2.12

+
+
+

|job-dsl +|1.64

+
+
+

|git-server +|1.7

+
+
+

|windows-slaves +|1.3.1

+
+
+

|github +|1.27.0

+
+
+

|blueocean-personalization +|1.1.4

+
+
+

|jackson2-api +|2.7.3

+
+
+

|momentjs +|1.1.1

+
+
+

|workflow-basic-steps +|2.6

+
+
+

|workflow-aggregator +|2.5

+
+
+

|blueocean-rest +|1.1.4

+
+
+

|gradle +|1.27.1

+
+
+

|pipeline-maven +|3.0.0

+
+
+

|blueocean-pipeline-editor +|0.2.0

+
+
+

|durable-task +|1.14

+
+
+

|scm-api +|2.2.2

+
+
+

|pipeline-model-api +|1.1.8

+
+
+

|config-file-provider +|2.16.3

+
+
+

|github-api +|1.85.1

+
+
+

|pam-auth +|1.3

+
+
+

|workflow-cps-global-lib +|2.8

+
+
+

|github-organization-folder +|1.6

+
+
+

|workflow-job +|2.12.1

+
+
+

|variant +|1.1

+
+
+

|git-client +|2.5.0

+
+
+

|sse-gateway +|1.15

+
+
+

|script-security +|1.29.1

+
+
+

|token-macro +|2.1

+
+
+

|jquery-detached +|1.2.1

+
+
+

|blueocean-web +|1.1.4

+
+
+

|timestamper +|1.8.8

+
+
+

|greenballs +|1.15

+
+
+

|handlebars +|1.1.1

+
+
+

|blueocean-jwt +|1.1.4

+
+
+

|pipeline-stage-view +|2.8

+
+
+

|blueocean-i18n +|1.1.4

+
+
+

|blueocean-git-pipeline +|1.1.4

+
+
+

|ace-editor +|1.1

+
+
+

|pipeline-stage-step +|2.2

+
+
+

|email-ext +|2.58

+
+
+

|envinject-api +|1.2

+
+
+

|role-strategy +|2.5.1

+
+
+

|structs +|1.9

+
+
+

|locale +|1.2

+
+
+

|docker-workflow +|1.13

+
+
+

|ssh-credentials +|1.13

+
+
+

|blueocean-pipeline-scm-api +|1.1.4

+
+
+

|metrics +|3.1.2.10

+
+
+

|external-monitor-job +|1.7

+
+
+

|junit +|1.21

+
+
+

|github-branch-source +|2.0.6

+
+
+

|blueocean-config +|1.1.4

+
+
+

|cucumber-reports +|3.8.0

+
+
+

|pipeline-model-declarative-agent +|1.1.1

+
+
+

|blueocean-dashboard +|1.1.4

+
+
+

|subversion +|2.9

+
+
+

|blueocean-autofavorite +|1.0.0

+
+
+

|pipeline-rest-api +|2.8

+
+
+

|pipeline-input-step +|2.7

+
+
+

|matrix-project +|1.11

+
+
+

|pipeline-github-lib +|1.0

+
+
+

|workflow-multibranch +|2.16

+
+
+

|docker-plugin +|0.16.2

+
+
+

|resource-disposer +|0.6

+
+
+

|icon-shim +|2.0.3

+
+
+

|workflow-step-api +|2.12

+
+
+

|blueocean-events +|1.1.4

+
+
+

|workflow-scm-step +|2.6

+
+
+

|display-url-api +|2.0

+
+
+

|favorite +|2.3.0

+
+
+

|build-timeout +|1.18

+
+
+

|mapdb-api +|1.0.9.0

+
+
+

|pipeline-build-step +|2.5.1

+
+
+

|antisamy-markup-formatter +|1.5

+
+
+

|javadoc +|1.4

+
+
+

|blueocean-commons +|1.1.4

+
+
+

|cloudbees-folder +|6.1.2

+
+
+

|ssh-slaves +|1.20

+
+
+

|pubsub-light +|1.10

+
+
+

|pipeline-graph-analysis +|1.4

+
+
+

|allure-jenkins-plugin +|2.23

+
+
+

|mailer +|1.20

+
+
+

|ws-cleanup +|0.33

+
+
+

|authentication-tokens +|1.3

+
+
+

|blueocean-pipeline-api-impl +|1.1.4

+
+
+

|ldap +|1.16

+
+
+

|docker-commons +|1.8

+
+
+

|branch-api +|2.0.10

+
+
+

|workflow-cps +|2.36.1

+
+
+

|pipeline-model-definition +|1.1.8

+
+
+

|blueocean-rest-impl +|1.1.4

+
+
+

|ant +|1.7

+
+
+

|credentials +|2.1.14

+
+
+

|matrix-auth +|1.7

+
+
+

|pipeline-model-extensions +|1.1.8

+
+
+

|pipeline-milestone-step +|1.3.1

+
+
+

|jclouds-jenkins +|2.14

+
+
+

|bouncycastle-api +|2.16.1

+
+
+

|== =

+
+
+
+
What is Docker
+
+

Docker is an open source software platform to create, deploy and manage virtualized application containers on a common operating system (OS), with an ecosystem of allied tools.

+
+
+
+
Where do we use Docker
+
+

DevOps module consists of Docker images

+
+
+
    +
  1. +

    Jenkins image

    +
  2. +
  3. +

    Jenkins job image

    +
  4. +
  5. +

    Jenkins management image

    +
  6. +
  7. +

    Security image

    +
  8. +
+
+
+

in addition, each new node is also based on Docker

+
+
+
+
Exploring basic Docker options
+
+

Let’s show some of the most important commands that are needed when working with our DevOps module based on the Docker platform. Each command given below should be preceded by a sudo call by default. If you don’t want to use sudo command create a Unix group called docker and add a user to it.

+
+
+
+
$ sudo groupadd docker
+$ sudo usermod -aG docker $USER
+
+
+
+
+
Build an image from a Dockerfile
+
+
+
##docker build [OPTIONS] PATH | URL | -
+##
+##Options:
+## --tag , -t : Name and optionally a tag in the ‘name:tag’ format
+
+$ docker build -t vc_jenkins_jobs .
+
+
+
+
+
Container start
+
+
+
##docker run [OPTIONS] IMAGE[:TAG|@DIGEST] [COMMAND] [ARG...]
+#
+##Options:
+##-d : To start a container in detached mode (background)
+##-it : interactive terminal
+##--name : assign a container name
+##--rm : clean up
+##--volumes-from="": Mount all volumes from the given container(s)
+##-p : explicitly map a single port or range of ports
+##--volume : storage associated with the image
+
+$ docker run -d --name vc_jenkins_jobs vc_jenkins_jobs
+
+
+
+
+
Remove one or more containers
+
+
+
##docker rm [OPTIONS] CONTAINER
+#
+##Options:
+##--force , -f : Force the removal of a running container
+
+$ docker rm -f jenkins
+
+
+
+
+
List containers
+
+
+
##docker ps [OPTIONS]
+##--all, -a : Show all containers (default shows just running)
+
+$ docker ps
+
+
+
+
+
Pull an image or a repository from a registry
+
+
+
##docker pull [OPTIONS] NAME[:TAG|@DIGEST]
+
+$ docker pull jenkins/jenkins:2.73.1
+
+
+
+
+
Push the image or a repository to a registry
+
+

Pushing new image takes place in two steps. First save the image by adding container ID to the commit command and next use push:

+
+
+
+
##docker push [OPTIONS] NAME[:TAG]
+
+$ docker ps
+  # copy container ID from the result
+$ docker commit b46778v943fh vc_jenkins_mng:project_x
+$ docker push vc_jenkins_mng:project_x
+
+
+
+
+
Return information on Docker object
+
+
+
##docker inspect [OPTIONS] NAME|ID [NAME|ID...]
+#
+##Options:
+##--format , -f : output format
+
+$ docker inspect -f '{{ .Mounts }}' vc_jenkins_mng
+
+
+
+
+
List images
+
+
+
##docker images [OPTIONS] [REPOSITORY[:TAG]]
+#
+##Options:
+--all , -a : show all images with intermediate images
+
+$ docker images
+$ docker images jenkins
+
+
+
+
+
Remove one or more images
+
+
+
##docker rmi [OPTIONS] IMAGE [IMAGE...]
+#
+##Options:
+##  --force , -f : Force removal of the image
+
+$ docker rmi jenkins/jenkins:latest
+
+
+
+
+
Run a command in a running container
+
+
+
##docker exec [OPTIONS] CONTAINER COMMAND [ARG...]
+##-d : run command in the background
+##-it : interactive terminal
+##-w : working directory inside the container
+##-e : Set environment variables
+
+$ docker exec vc_jenkins_jobs sh -c "chmod 755 config.xml"
+
+
+
+
+
Advanced commands
+ +
+
+
Remove dangling images
+
+
+
$ docker rmi $(docker images -f dangling=true -q)
+
+
+
+
+
Remove all images
+
+
+
$ docker rmi $(docker images -a -q)
+
+
+
+
+
Removing images according to a pattern
+
+
+
$ docker images | grep "pattern" | awk '{print $2}' | xargs docker rm
+
+
+
+
+
Remove all exited containers
+
+
+
$ docker rm $(docker ps -a -f status=exited -q)
+
+
+
+
+
Remove all stopped containers
+
+
+
$ docker rm $(docker ps --no-trunc -aq)
+
+
+
+
+
Remove containers according to a pattern
+
+
+
$ docker ps -a | grep "pattern" | awk '{print $1}' | xargs docker rmi
+
+
+
+
+
Remove dangling volumes
+
+
+
$ docker volume rm $(docker volume ls -f dangling=true -q)
+
+
+
+
+
+
+

MrChecker download

+
+ +
+
+
+

Windows

+
+ +
+
Advanced installation
+ +
+
+
Java installation
+
+

There is one important pre-requisite for Mr Checker installation - Java has to be installed on the computer and an environmental variable has to be set in order to obtain optimal functioning of the framework.

+
+
+
    +
  1. +

    Install Java 1.8 JDK 64bit

    +
    +

    Download and install Java download link

    +
    +
    +

    (To download JDK 8 from Oracle you have to have an account. It is recommended to get a JDK build based on OpenJDK from AdoptOpenJDK)

    +
    +
  2. +
  3. +

    Windows Local Environment - How to set:

    +
    +
      +
    • +

      Variable name: JAVA_HOME | Variable value: C:\Where_You’ve_Installed_Java

      +
    • +
    • +

      Variable name: PATH | Variable value: %JAVA_HOME%\bin;%JAVA_HOME%\lib

      +
      +
      +install win03 +
      +
      +
    • +
    +
    +
  4. +
  5. +

    Next, verify it in the command line:

    +
    +
    +
    > java --version
    +
    +
    +
  6. +
+
+
+
+
Other components installation
+
+

Install each component separately, or update the existing ones on your PC.

+
+
+
    +
  1. +

    Maven 3.5

    +
    +
      +
    • +

      Download Maven

      +
    • +
    • +

      Unzip Maven in following location C:\maven

      +
    • +
    • +

      Set Windows Local Environment

      +
      +
        +
      • +

        Variable name: M2_HOME | Variable value: C:\maven\apache-maven-3.5.0

        +
      • +
      • +

        Variable name: PATH | Variable value: %M2_HOME%\bin

        +
        +
        +install win04 +
        +
        +
      • +
      +
      +
    • +
    • +

      Verify it in the command line:

      +
      +
      +
      > mvn --version
      +
      +
      +
    • +
    +
    +
  2. +
  3. +

    IDE

    +
    +
      +
    • +

      Download a most recent Eclipse

      +
    • +
    • +

      Download a MrChecker Project https://downgit.github.io//home?url=https://github.com/devonfw/mrchecker/tree/develop/template[Template] to start a new project or Mrchecker Project https://downgit.github.io//home?url=https://github.com/devonfw/mrchecker/tree/develop/example[Example] to get better understanding what we are capable of.

      +
    • +
    • +

      You should consider installing some usefull plugins such as: csvedit, cucumber editor.

      +
    • +
    • +

      Import:

      +
      +
      +install win05 +
      +
      +
    • +
    • +

      Projects from folders

      +
      +
      +install win06 +
      +
      +
    • +
    • +

      Open already created projects:

      +
      +
      +install win07 +
      +
      +
    • +
    • +

      Update project structure - ALT + F5

      +
      +
      +install win08 +
      +
      +
    • +
    +
    +
  4. +
+
+
+
+
+
+

Mac

+
+ +
+
MrChecker macOS installation
+
+

On this page, you can find all the details regarding MrChecker installation on your Mac.

+
+
+
+
Java installation
+
+

There is one important pre-requisite for Mr Checker installation - Java has to be installed on the computer and an environmental variable has to be set in order to obtain optimal functioning of the framework.

+
+
+
    +
  1. +

    Install Java 1.8 JDK 64bit

    +
    +

    Download and install Java download link

    +
    +
    +

    (To download JDK 8 from Oracle you have to have an account. It is recommended to get a JDK build based on OpenJDK from AdoptOpenJDK)

    +
    +
  2. +
  3. +

    Next, verify thx in the command line:

    +
    +
    +
    > java --version
    +
    +
    +
  4. +
+
+
+
+
Other components installation
+
+

Install each component separately, or update the existing ones on your Mac.

+
+
+
    +
  1. +

    Maven 3.5

    +
    +
      +
    • +

      Download Maven

      +
    • +
    • +

      Unzip Maven in the following location /maven

      +
    • +
    • +

      Add Maven to PATH

      +
      +
      +
      > $ export PATH=$PATH:/maven/apache-maven-3.5.0/bin/
      +
      +
      +
    • +
    • +

      Verify in terminal:

      +
      +
      +
      > $ mvn -version
      +
      +
      +
    • +
    +
    +
  2. +
  3. +

    Eclipse IDE

    +
    +
      +
    • +

      Download and unzip Eclipse

      +
    • +
    • +

      Download MrCheckerTestFramework source code

      +
    • +
    • +

      Import:

      +
      +
      +image0009 +
      +
      +
    • +
    • +

      Select Projects from folders:

      +
      +

      image00010

      +
      +
    • +
    • +

      Open already created projects:

      +
      +

      image00011

      +
      +
    • +
    • +

      Update project structure - ALT + F5

      +
      +

      image00012

      +
      +
    • +
    +
    +
  4. +
+
+
+
+
+
+

My Thai Star

+
+ +
+
My Thai Star application setup
+
+

My Thai Star is a reference application for DevonFW so it was used extensively in majority of our examples. To make them run properly you definitely should set it up somewhere and configure environment.csv accordingly. +You can get the app from its official repository here https://github.com/devonfw/my-thai-star.

+
+
+
+
Setting up My Thai Start app
+
+

Most of the important informations are covered in https://github.com/devonfw/my-thai-star#deployment.

+
+
+
The quick summary would be:
+
    +
  1. +

    Get the machine with docker and docker-compose

    +
  2. +
  3. +

    Download the repository

    +
  4. +
  5. +

    Run docker-compose up

    +
  6. +
  7. +

    Go to your project to set up envrionment.csv

    +
  8. +
  9. +

    The variables we are interested in are MY_THAI_STAR_URL and MY_THAI_STAR_API_URL

    +
  10. +
  11. +

    If you set up My Thai Star application on different host adjust the values accordingly

    +
  12. +
  13. +

    The web aplication should be available using localhost:8081/restaurant

    +
  14. +
  15. +

    The web api should be avaulable using localhost:8081/api

    +
  16. +
+
+
+
+
+
+

Tutorials

+
+
+

In order to learn more about MrChecker structure, start from Project Organisation section and then check out our fantastic tutorials:

+
+
+

This tutorial will guide you through the series of test which perform basic actions on webpages using MrChecker.

+
+
+

Make sure you already have MrChecker Test Framework installed on your PC. How to install?

+
+
+

Your Product Under Test will be the following website: http://the-internet.herokuapp.com/

+
+
+
Project organization
+ +
+
+
Importing projects
+
+

Every MrChecker project should be imported as a Maven Project.

+
+
+

Example from Eclipse IDE:

+
+
+
+1 +
+
+
+
+2 +
+
+
+

Enter the project path and select projects to import.

+
+
+
+3 +
+
+
+

When the import is finished, update the project structure - ALT + F5

+
+
+
+4 +
+
+
+
+
Exporting projects
+
+

In order to create a new standalone MrChecker project, you can use template-app-under-test and export it to the new folder:

+
+
+
+5 +
+
+
+
+6 +
+
+
+

Create a new folder for the project and enter its path. Select project and files to export:

+
+
+
+7 +
+
+
+

Change project name and other properties, if necessary, in pom.xml file:

+
+
+
+8 +
+
+
+

Then you can import the project to the workspace and create new packages and classes.

+
+
+
+
Creating new packages
+
+
    +
  1. +

    You will need two new packages: one for the new page classes, the other one for test classes:

    +
    +
      +
    • +

      Create a package for page classes

      +
      +
      +
      Open Eclipse
      +Use the "Project Explorer" on the left
      +Navigate to [your-project] → src/main/java → com.capgemini.mrchecker → selenium
      +Right-click on "selenium"
      +Click on "New" → New Package
      +Name the new package "com.capgemini.mrchecker.selenium.pages.[your-product-name]"
      +
      +
      +
    • +
    • +

      Create a package for test classes

      +
      +
      +
      Navigate to [your-project] → src/test/java → com.capgemini.mrchecker → selenium
      +Right click on "selenium"
      +Click on "New" → New Package
      +Name the new package "com.capgemini.mrchecker.selenium.tests.[your-product-name]"
      +
      +
      +
    • +
    +
    +
  2. +
+
+
+

Example:

+
+
+
+9 +
+
+
+
+
Creating new Page Classes
+
+
+
Navigate to: [your-project] → src/main/java → com.capgemini.mrchecker → selenium.pages.[your-product-name]
+Click on "New" → New Class
+Enter the name "YourPage"
+
+
+
+

Every Page Class should extend BasePage class. Import all necessary packages and override all required methods:

+
+
+
    +
  • +

    public boolean isLoaded() - returns true if the page is loaded and false if not

    +
  • +
  • +

    public void load() - loads the page

    +
  • +
  • +

    public String pageTitle() - returns page title

    +
  • +
+
+
+

Example:

+
+
+
+
 public class MainPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        return false;
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Main Page'");
+    }
+
+    @Override
+    public String pageTitle() {
+        return "Main Page Title";
+    }
+ }
+
+
+
+
+
Creating new Test Classes
+
+
+
Navigate to  [your-project] → src/test/java → com.capgemini.mrchecker → selenium.tests.[your-product-name]
+Click on "New" → New Class
+Enter the name "YourCaseTest"
+
+
+
+

Test classes should extend BaseTest class, import all necessary packages and override all required methods:

+
+
+
    +
  • +

    public void setUp() - executes before each test

    +
  • +
  • +

    public void tearDown() - executes after each test

    +
  • +
+
+
+

Optionally, it is also possible to implement the following methods:

+
+
+
    +
  • +

    @BeforeClass +public static void setUpBeforeClass() - runs only once before all tests

    +
  • +
  • +

    @AfterClass +public static void tearDownAfterClass() - runs only once after performing all tests

    +
  • +
+
+
+

Every test method has to be signed with "@Test" parameter.

+
+
+
+
 public class YourCaseTest extends BaseTest {
+    private static MainPage mainPage = new MainPage();
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        mainPage.load();
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+
+    }
+
+    @Override
+    public void setUp() {
+        if (!mainPage.isLoaded()) {
+            mainPage.load();
+        }
+    }
+
+    @Override
+    public void tearDown() {
+
+    }
+
+    @Test
+    public void shouldTestRunWithoutReturningError {
+
+    }
+ }
+
+
+
+
+
Running Tests
+
+

Run the test by right-clicking on the test method → Run as → JUnit test.

+
+
+
+
+
+

Basic Tutorials

+
+ +
+
+
+

== Basic Tests

+
+
+
+example1 +
+
+
+

The goal of this test is to open A/B Test subpage and redirect to another website.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click A/B Testing link and go to A/B Test subpage

    +
  4. +
  5. +

    Click Elemental Selenium link and open it in new tab

    +
  6. +
  7. +

    Switch to Elemental Selenium page and check if it’s loaded

    +
  8. +
+
+
+
+example2 +
+
+
+== Page Class +
+

Create a Page class for AB Testing page. Override all the required methods:

+
+
+
+
 public class ABtestPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.ABTEST.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'A/B Test Control' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.ABTEST.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+ }
+
+
+
+
+== How to use Enum? +
+

Similarly as in environmental variables case, create an enum for storing values of subURLs:

+
+
+
+
 public enum PageSubURLsProjectYEnum {
+
+    BASIC_AUTH("basic_auth"),
+    NEW_WINDOW("windows/new"),
+    WINDOW("windows"),
+    CHECKBOX("checkboxes"),
+    CONTEXT_MENU("context_menu"),
+    KEY_PRESS("key_presses"),
+    DYNAMIC_CONTENT("dynamic_content"),
+    HOVERS("hovers"),
+    SORTABLE_DATA_TABLES("tables"),
+    REDIRECT("redirector"),
+    JAVASCRIPT_ALERTS("javascript_alerts"),
+    CHALLENGING_DOM("challenging_dom"),
+    STATUS_CODES("status_codes"),
+    LOGIN("login"),
+    ABTEST("abtest"),
+    BROKEN_IMAGES("broken_images"),
+    DROPDOWN("dropdown"),
+    HORIZONTAL_SLIDER("horizontal_slider"),
+    DOWNLOAD("download"),
+    FORGOT_PASSWORD("forgot_password"),
+    FORGOT_PASSWORD_EMAIL_SENT("email_sent"),
+    EXIT_INTENT("exit_intent"),
+    DYNAMIC_LOADING("dynamic_loading"),
+    DISAPPEARING_ELEMENTS("disappearing_elements"),
+    DRAG_AND_DROP("drag_and_drop"),
+    DYNAMIC_CONTROLS("dynamic_controls"),
+    UPLOAD("upload"),
+    FLOATING_MENU("floating_menu"),
+    FRAMES("frames"),
+    GEOLOCATION("geolocation"),
+    INFINITE_SCROLL("infinite_scroll"),
+    JQUERY_UI("jqueryui/menu"),
+    JAVASCRIPT_ERROR("javascript_error"),
+    LARGE_AND_DEEP_DOM("large"),
+    NESTED_FRAMES("nested_frames"),
+    NOTIFICATION_MESSAGE("notification_message"),
+    DOWNLOAD_SECURE("download_secure"),
+    SHIFTING_CONTENT("shifting_content"),
+    SLOW_RESOURCES("slow"),
+    TYPOS("typos"),
+    WYSIWYGEDITOR("tinymce");
+
+    /*
+     * Sub URLs are used as real locations in the test environment
+     */
+    private String subURL;
+
+    private PageSubURLsProjectYEnum(String subURL) {
+        this.subURL = subURL;
+    }
+
+    ;
+
+    private PageSubURLsProjectYEnum() {
+
+    }
+
+    @Override
+    public String toString() {
+        return getValue();
+    }
+
+    public String getValue() {
+        return subURL;
+    }
+
+}
+
+
+
+

Instead of mapping data from an external file, you can store and access them directly from the enum class:

+
+
+
+
PageSubURLsProjectYEnum.ABTEST.getValue()
+
+
+
+
+== Selector +
+

In this test case you need selector for only one page element:

+
+
+
+
private static final By elementalSeleniumLinkSelector = By.cssSelector("div > div > a");
+
+
+
+
+== Page methods +
+

You need two methods for performing page actions:

+
+
+
+
     /**
+     * Clicks 'Elemental Selenium' link at the bottom of the page.
+     *
+     * @return ElementalSeleniumPage object.
+     */
+    public ElementalSeleniumPage clickElementalSeleniumLink() {
+        getDriver().findElementDynamic(elementalSeleniumLinkSelector)
+                .click();
+        getDriver().waitForPageLoaded();
+        return new ElementalSeleniumPage();
+    }
+
+    /**
+     * Switches window to the next one - different than the current.
+     */
+    public void switchToNextTab() {
+        ArrayList<String> tabsList = new ArrayList<String>(getDriver().getWindowHandles());
+        getDriver().switchTo()
+                .window(tabsList.get(1));
+    }
+
+
+
+
+== Elemental Selenium Page Class +
+

To return new Elemental Selenium Page object, implement its class. You only need to write basic methods to check if the page is loaded. There is no need to interact with objects on the site:

+
+
+
+
 public class ElementalSeleniumPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(GetEnvironmentParam.ELEMENTAL_SELENIUM_PAGE.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Elemental Selenium' page.");
+        getDriver().get(GetEnvironmentParam.ELEMENTAL_SELENIUM_PAGE.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+== Test Class +
+

Create a Test class and write a @Test method to execute the scenario:

+
+
+
+
 @Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class ABtestingTest extends TheInternetBaseTest {
+
+    private static ABtestPage abTestPage;
+
+    @Test
+    public void shouldOpenElementalSeleniumPageWhenClickElementalSeleniumLink() {
+
+        logStep("Click Elemental Selenium link");
+        ElementalSeleniumPage elementalSeleniumPage = abTestPage.clickElementalSeleniumLink();
+
+        logStep("Switch browser's tab to newly opened one");
+        abTestPage.switchToNextTab();
+
+        logStep("Verify if Elemental Selenium Page is opened");
+        assertTrue("Unable to open Elemental Selenium page", elementalSeleniumPage.isLoaded());
+    }
+
+}
+
+
+
+
+== Assert +
+

Asserts methods are used for creating test pass or fail conditions. The optional first parameter is a message which will be displayed in the test failure description.

+
+
+
    +
  • +

    assertTrue(boolean condition) - test passes if condition returns true

    +
  • +
  • +

    assertFalse(boolean condition) - test passes if condition returns false

    +
  • +
+
+
+

Also, add the @BeforeClass method to open the tested page:

+
+
+
+
 @BeforeClass
+    public static void setUpBeforeClass() {
+        abTestPage = shouldTheInternetPageBeOpened().clickABtestingLink();
+        logStep("Verify if ABTest page is opened");
+        assertTrue("Unable to open ABTest page", abTestPage.isLoaded());
+    }
+
+
+
+

@BeforeClass method executes only once before all other +@Test cases in the class. There is also a possibility to create a +@AfterClass method which is performed also once after all @Test cases.

+
+
+

You don’t need to implement @setUp and @tearDown methods because they’re already in TheInternetBaseTest class which you extend.

+
+
+
+== Categories +
+

You can group tests in categories. It’s useful when running many tests at once. Use this parameter:

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+
+
+
+

Then create an interface representing each category. Example:

+
+
+
+
public interface TestsSelenium {
+    /* For test which are testing web pages considering UI (user interface) and using selenium webdriver */
+}
+
+
+
+

To run a test from specified category create Test Suite class:

+
+
+
+
@RunWith(WildcardPatternSuite.class) //search for test files under /src/test/java
+@IncludeCategories({ TestsChrome.class }) // search all test files with category TestsChrome.class
+@ExcludeCategories({ TestsLocal.class, TestsNONParallel.class }) //exclude all test files with category TestsLocal.class and TestsNONParallel.class
+@SuiteClasses({ "../**/*Test.class" }) //search only test files, where file name ends with <anyChar/s>Test.class
+
+public class _TestSuiteChrome {
+
+}
+
+
+
+

You can run a Test Suite as a JUnit test.

+
+
+
+example3 +
+
+
+

In this test case, the goal is to pass username and password authorization and login to the next page.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click on Basic Auth link

    +
  4. +
  5. +

    Open pop-up login window

    +
  6. +
  7. +

    Enter valid username and password

    +
  8. +
  9. +

    Open next subpage and verify if the user logged in successfully.

    +
  10. +
+
+
+
+== Page Class +
+

Create a page class which represents Basic Auth subpage after proper login.

+
+
+
+example4 +
+
+
+

Override all the required methods:

+
+
+
+
public class BasicAuthPage extends BasePage {
+
+    public BasicAuthPage() {
+
+    }
+
+    public BasicAuthPage(String login, String password) {
+        this.enterLoginAndPasswordByUrl(login, password);
+    }
+
+    @Override
+    public boolean isLoaded() {
+        return true;
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("load");
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+
+
+

In order to verify a login, create a selector to access the visible message.

+
+
+
+
 private static final By selectorTextMessage = By.cssSelector("#content > div > p");
+Then create a method to get message value:
+
+/**
+*       Returns message displayed by system after user's log in.
+*      @return String object representing message displayed by system after user's log in
+*/
+    public String getMessageValue() {
+                return getDriver().findElementDynamic(selectorTextMessage)
+                    .getText();
+}
+
+
+
+

Also, create a method to access the pop-up login window and enter user credentials:

+
+
+
+
    /**
+     * Authenticates user using standard simple authentication popup.
+     *
+     * @param login    User's login
+     * @param password User's password
+     * @throws AWTException
+     * @throws InterruptedException
+     */
+    public void enterLoginAndPassword(String login, String password) throws AWTException, InterruptedException {
+        Robot rb = new Robot();
+
+        Thread.sleep(2000);
+
+        StringSelection username = new StringSelection(login);
+        Toolkit.getDefaultToolkit()
+                .getSystemClipboard()
+                .setContents(username, null);
+        rb.keyPress(KeyEvent.VK_CONTROL);
+        rb.keyPress(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_CONTROL);
+
+        rb.keyPress(KeyEvent.VK_TAB);
+        rb.keyRelease(KeyEvent.VK_TAB);
+        Thread.sleep(2000);
+
+        StringSelection pwd = new StringSelection(password);
+        Toolkit.getDefaultToolkit()
+                .getSystemClipboard()
+                .setContents(pwd, null);
+        rb.keyPress(KeyEvent.VK_CONTROL);
+        rb.keyPress(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_V);
+        rb.keyRelease(KeyEvent.VK_CONTROL);
+
+        rb.keyPress(KeyEvent.VK_ENTER);
+        rb.keyRelease(KeyEvent.VK_ENTER);
+        Thread.sleep(2000);
+    }
+
+
+
+
+== Robot class +
+

Creating a Robot object allows performing basic system actions such as pressing keys, moving the mouse or taking screenshots. In this case, it’s used to paste login and password text from the clipboard using 'Ctrl + V' shortcut, go to the next field using 'Tab' key and submit by clicking 'Enter'.

+
+
+
+Toolkit +
+

Static class Toolkit can perform basic window actions such as scrolling to a specified position or moving context between components. In this case, it’s used to set clipboard content to username and password value.

+
+
+
+
Thread.sleep(long millis)
+
+
+
+

Web drivers like Selenium perform actions much faster than the normal user. This may cause unexpected consequences e.g. some elements may not be loaded before the driver wants to access them. To avoid this problem you can use Thread.sleep(long millis) to wait given time and let browser load wanted component.

+
+
+

BEWARE: Using Thread.sleep(long millis) is not the recommended approach. Selenium driver gives methods to wait for a specified element to be enabled or visible with a timeout parameter. This is a more stable and effective way. Also, method waitForPageLoaded() will not solve that issue because it only waits for the ready state from the browser while some javascript actions might be performed after that.

+
+
+
+== Test Class +
+

Create a Test class and write a @Test method to execute the scenario. Save parameters as class fields:

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class BasicAuthTest extends TheInternetBaseTest {
+
+    private static BasicAuthPage basicAuthPage;
+
+    private String login    = "admin";
+    private String password = "admin";
+    private String message  = "Congratulations! You must have the proper credentials.";
+
+    @Test
+    public void shouldUserLogInWithValidCredentials() throws InterruptedException, AWTException {
+        basicAuthPage = shouldTheInternetPageBeOpened().clickBasicAuthLink();
+
+        logStep("Enter login and password");
+        basicAuthPage.enterLoginAndPassword(login, password);
+
+        logStep("Verify if user logged in successfully");
+        assertEquals("Unable to login user with valid credentials", message,
+            basicAuthPage.getMessageValue());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        theInternetPage.load();
+    }
+}
+
+
+
+

assertEquals(Object expected, Object actual) - test passes if parameters are equal .

+
+
+
+== Alternative scenario: +
+

There is also a possibility to log in with credentials as a part of URL: http://login:password@the-internet.herokuapp.com/basic_auth

+
+
+

Another page class method:

+
+
+
+
/**
+     * Authenticates user passing credentials into URL.
+     *
+     * @param login    User's login
+     * @param password User's password
+     */
+    private void enterLoginAndPasswordByUrl(String login, String password) {
+        getDriver().get("http://" + login + ":" + password + "@" + "the-internet.herokuapp.com/" +
+            PageSubURLsProjectYEnum.BASIC_AUTH.getValue());
+    }
+
+
+
+

Another test class method:

+
+
+
+
@Test
+    public void shouldUserLogInWithValidCredentialsSetInURL() {
+        logStep("Enter user's credentials into URL to log in");
+        basicAuthPage = new BasicAuthPage(login, password);
+
+        logStep("Verify if user logged in successfully");
+        assertEquals("Unable to login user with valid credentials", message,
+            basicAuthPage.getMessageValue());
+    }
+
+
+
+

After running test class as a JUnit test, both test cases will be performed.

+
+
+

This test goal is to check the dimensions of broken images on the subpage.

+
+
+
+example5 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Broken Image link and go to Broken Image subpage

    +
  4. +
  5. +

    Get the 3 images' dimensions and compare them with expected values

    +
  6. +
+
+
+
+== Page Class +
+

In this case, create an array of selectors to access images by index number:

+
+
+
+
public class BrokenImagePage extends BasePage {
+
+    private static final By[] selectorsImages = { By.cssSelector("div > img:nth-child(2)"),
+            By.cssSelector("div > img:nth-child(3)"),
+            By.cssSelector("div > img:nth-child(4)") };
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.BROKEN_IMAGES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Broken Images' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.BROKEN_IMAGES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns an image height in pixels.
+     *
+     * @param imageIndex An index of given image.
+     * @return Height of an image in pixels.
+     */
+    public int getImageHeight(int imageIndex) {
+        return getImageDimension(imageIndex).getHeight();
+    }
+
+    /**
+     * Returns an image width in pixels.
+     *
+     * @param imageIndex An index of given image.
+     * @return Width of an image in pixels.
+     */
+    public int getImageWidth(int imageIndex) {
+        return getImageDimension(imageIndex).getWidth();
+    }
+
+    private Dimension getImageDimension(int imageIndex) {
+        return getDriver().findElementDynamic(selectorsImages[imageIndex])
+                .getSize();
+    }
+
+}
+
+
+
+
+== Test Class +
+

Create @Test and @BeforeClass methods. Save expected images' dimensions in class fields:

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class BrokenImagesTest extends TheInternetBaseTest {
+
+    private static BrokenImagePage brokenImagePage;
+
+    private final int expectedHeight = 90;
+    private final int expectedWidth  = 120;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        brokenImagePage = shouldTheInternetPageBeOpened().clickBrokenImageLink();
+
+        logStep("Verify if Broken Image page is opened");
+        assertTrue("Unable to open Broken Image page", brokenImagePage.isLoaded());
+    }
+
+    @Test
+    public void shouldImageSizesBeEqualToExpected() {
+        for (int i = 0; i < 3; i++) {
+            logStep("Verify size of image with index: " + i);
+            assertEquals("Height of image with index: " + i + " is incorrect", expectedHeight,
+                   brokenImagePage.getImageHeight(i));
+            assertEquals("Width of image with index: " + i + " is incorrect", expectedWidth,
+                   brokenImagePage.getImageWidth(i));
+        }
+    }
+
+}
+
+
+
+

The test will pass if every image had the correct width and height.

+
+
+

This case goal is to find out how to create stable selectors.

+
+
+

In the browser’s developer mode, you can see how the page is built. Notice, that buttons' IDs change after click and values in the table haven’t got unique attributes, which might be helpful in order to find them.

+
+
+
+example6 +
+
+
+
+== DOM - Document Object Model +
+

HTML DOM is a model of the page created by the browser. The page could be represented as the tree of objects. Read more.

+
+
+

To create locators you can use element attributes such as id, class name etc.

+
+
+

It this case, since there are no unique attributes, the best approach is to use HTML document structure and identify page elements by their place in an object hierarchy.

+
+
+
+
Page Class
+public class ChallengingDomPage extends BasePage {
+
+    private final By selectorTableRows   = By.cssSelector(".large-10 > table > tbody > tr");
+    private final By selectorFirstButton = By.cssSelector(".large-2.columns > .button:nth-
+            child(1)");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.CHALLENGING_DOM.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Challenging DOM' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.CHALLENGING_DOM.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns table text content as a list of String objects.
+     *
+     * @return A list of table values.
+     */
+    public List<String> getTableValues() {
+        return JsoupHelper.findTexts(selectorTableRows);
+    }
+
+    /**
+     * Clicks top button on the page from available button set.
+     */
+    public void clickFirstButton() {
+        getDriver().elementButton(selectorFirstButton)
+                .click();
+        getDriver().waitForPageLoaded();
+    }
+
+}
+
+
+
+
+== Jsoup Helper +
+

Jsoup Helper is the tool which helps to parse HTML document and get searched values. This is especially useful when values are organized in a generic structure such as a table.

+
+
+

JsoupHelper.findTexts(By selector) - this method returns text content of a table as a list of Strings

+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Challenging DOM link and go to Challenging DOM subpage

    +
  4. +
  5. +

    Get and save table values

    +
  6. +
  7. +

    Click the first button

    +
  8. +
  9. +

    Get table values again

    +
  10. +
  11. +

    Compare table values before and after button click

    +
  12. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class ChallengingDomTest extends TheInternetBaseTest {
+
+    private static ChallengingDomPage challengingDomPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        challengingDomPage = shouldTheInternetPageBeOpened().clickChallengingDomLink();
+
+        logStep("Verify if Challenging Dom page is opened");
+        assertTrue("Unable to open Challenging Dom page", challengingDomPage.isLoaded());
+    }
+
+    @Test
+    public void shouldValuesInTableCellsStayUnchangedAfterClick() {
+
+        logStep("Get table values (before click any button)");
+        List<String> tableValuesBeforeClick = challengingDomPage.getTableValues();
+
+        logStep("Click first button");
+        challengingDomPage.clickFirstButton();
+
+        logStep("Get table values (after click first button)");
+        List<String> tableValuesAfterClick = challengingDomPage.getTableValues();
+
+        logStep("Verify equality of table values before and after click");
+        assertEquals("Values from table cells were changed after click", tableValuesBeforeClick,
+                tableValuesAfterClick);
+    }
+
+}
+
+
+
+

Because values in the table don’t change, the test should pass if object locators are solid.

+
+
+

In this example, you will learn how to test checkboxes on the page.

+
+
+
+example7 +
+
+
+

A checkbox is a simple web element which can be selected or unselected by clicking on it.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Checkboxes link and go to Checkboxes page

    +
  4. +
  5. +

    Test if the first checkbox is unchecked

    +
  6. +
  7. +

    Select the first checkbox

    +
  8. +
  9. +

    Test if the first checkbox is checked

    +
  10. +
  11. +

    Test if the second checkbox is checked

    +
  12. +
  13. +

    Unselect second checkbox

    +
  14. +
  15. +

    Test if the second checkbox is unchecked

    +
  16. +
+
+
+
+== Page Class +
+

Because both checkboxes are in one form, it’s possible to locate them by one selector and then access each individual one by index.

+
+
+
+example8 +
+
+
+
+
public class CheckboxesPage extends BasePage {
+
+    private final static By checkboxesFormSelector = By.cssSelector("#checkboxes");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.CHECKBOX.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Checkboxes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.CHECKBOX.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if checkbox form is visible on the page.
+     *
+     * @return true if checkboxes are present and displayed on the page
+     */
+    public boolean isElementCheckboxesFormVisible() {
+        return getDriver().elementCheckbox(checkboxesFormSelector)
+                .isDisplayed();
+    }
+
+    /**
+     * Verifies if given checkbox is selected or not.
+     *
+     * @param index The index of given checkbox
+     * @return true if given checkbox is selected
+     */
+    public boolean isCheckboxSelected(int index) {
+        return getDriver().elementCheckbox(checkboxesFormSelector)
+                .isCheckBoxSetByIndex(index);
+    }
+
+    /**
+     * Selects given checkbox. Unselects, if it is already selected.
+     *
+     * @param index The index of given checkbox
+     */
+    public void selectCheckbox(int index) {
+        CheckBox checkbox = getDriver().elementCheckbox(checkboxesFormSelector);
+        if (isCheckboxSelected(index)) {
+            checkbox.unsetCheckBoxByIndex(index);
+        } else {
+            checkbox.setCheckBoxByIndex(index);
+        }
+    }
+
+}
+
+
+
+
+== CheckBox +
+

CheckBox class contains a method to perform actions on checkboxes such as setting and unsetting or verifying if the specified box is checked. +Use method elementCheckbox(By selector) to create CheckBox Object.

+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class CheckboxesTest extends TheInternetBaseTest {
+
+    private static CheckboxesPage checkboxesPage;
+
+    @Override
+    public void setUp() {
+        checkboxesPage = shouldTheInternetPageBeOpened().clickCheckboxesLink();
+
+        logStep("Verify if Checkboxes page is opened");
+        assertTrue("Unable to open Checkboxes page", checkboxesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldCheckboxBeSelectedAfterClick() {
+
+        logStep("Verify if first checkbox is not selected");
+        assertFalse("The checkbox is selected", checkboxesPage.isCheckboxSelected(0));
+
+        logStep("Select first checkbox");
+        checkboxesPage.selectCheckbox(0);
+
+        logStep("Verify if first checkbox is selected");
+        assertTrue("The checkbox is not selected", checkboxesPage.isCheckboxSelected(0));
+    }
+
+    @Test
+    public void shouldCheckboxBeUnselectedAfterClick() {
+
+        logStep("Verify if second checkbox is selected");
+        assertTrue("The checkbox is not selected", checkboxesPage.isCheckboxSelected(1));
+
+        logStep("Select second checkbox");
+        checkboxesPage.selectCheckbox(1);
+
+        logStep("Verify if second checkbox is not selected");
+        assertFalse("The checkbox is selected", checkboxesPage.isCheckboxSelected(1));
+    }
+
+}
+
+
+
+

After running Test Class both @Test cases will be performed. Before each one, overrode setUp method will be executed.

+
+
+

This case will show how to test changing website content.

+
+
+
+example9 +
+
+
+

After refreshing page (F5) a few times, a new element should appear:

+
+
+
+example10 +
+
+
+

Then, after another couple of refreshes, it should disappear.

+
+
+

You can check in developer mode that Gallery element does not exist in HTML document either, until appearing on the page. The element is created by Javascript.

+
+
+
+example11 +
+
+
+
+example12 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Load The Internet Main Page

    +
  2. +
  3. +

    Click Disappearing Elements link and go to that subpage

    +
  4. +
  5. +

    Check if Menu Buttons exist on the page

    +
  6. +
  7. +

    Refresh the page until a new element appears

    +
  8. +
  9. +

    Check if Gallery Button exists

    +
  10. +
  11. +

    Check if the number of buttons equals the expected value

    +
  12. +
  13. +

    Refresh the page until an element disappears

    +
  14. +
  15. +

    Check if Gallery Button does not exist

    +
  16. +
  17. +

    Check if the number of buttons is smaller than before

    +
  18. +
+
+
+
+== Page Class +
+
+
public class DisappearingElementsPage extends BasePage {
+
+    private static final By selectorGalleryMenuButton = By.cssSelector("li > a[href*=gallery]");
+    private static final By selectorMenuButtons       = By.cssSelector("li");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DISAPPEARING_ELEMENTS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Disappearing Elements' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DISAPPEARING_ELEMENTS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns a number of WebElements representing menu buttons.
+     *
+     * @return A number of WebElements.
+     */
+    public int getNumberOfMenuButtons() {
+        return getDriver().findElementDynamics(selectorMenuButtons)
+                .size();
+    }
+
+    /**
+     * Returns WebElement representing disappearing element of menu.
+     *
+     * @return Disappearing WebElement if visible, null otherwise.
+     */
+    public WebElement getGalleryMenuElement() {
+        return getDriver().findElementQuietly(selectorGalleryMenuButton);
+    }
+
+    /**
+     * Refreshes web page as many times as it is required to appear/disappear menu button
+     * WebElement.
+     *
+     * @param shouldAppear Determines if element should appear (true) or disappear (false).
+     */
+    public void refreshPageUntilWebElementAppears(boolean shouldAppear) {
+        int numberOfAttempts = 5;
+        int counter = 0;
+        while (!isVisibilityAsExpected(shouldAppear) || isMaxNumberOfAttemptsReached(counter++,
+                numberOfAttempts)) {
+            refreshPage();
+        }
+    }
+
+    /**
+     * Verify if visibility of Gallery button is the same as expected
+     *
+     * @param expected Determines if element should be visible (true) or not visible (false).
+     */
+    private boolean isVisibilityAsExpected(boolean expected) {
+        boolean isVisibilityDifferentThanExpected = isGalleryMenuElementVisible() ^ expected;
+        return !isVisibilityDifferentThanExpected;
+    }
+
+    private boolean isGalleryMenuElementVisible() {
+        boolean result = false;
+        WebElement gallery = getGalleryMenuElement();
+        if (gallery != null)
+            result = gallery.isDisplayed();
+        return result;
+    }
+
+    private boolean isMaxNumberOfAttemptsReached(int attemptNo, int maxNumberOfAttempts) {
+        return attemptNo ==  maxNumberOfAttempts;
+    }
+
+}
+
+
+
+

findElementQuietly(By selector) works similar as findElementDynamics(By selector) but won’t throw an exception if an element wasn’t found. In this case, the searched WebElement will have a NULL value.

+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DisappearingElementsTest extends TheInternetBaseTest {
+
+    private static final int totalNumberOfMenuButtons = 5;
+    private static DisappearingElementsPage disappearingElementsPage;
+    private static       int numberOfMenuButtons      = 0;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        disappearingElementsPage = shouldTheInternetPageBeOpened().clickDisappearingElementsLink();
+
+        logStep("Verify if Disappearing Elements page is opened");
+        assertTrue("Unable to open Disappearing Elements page",
+                disappearingElementsPage.isLoaded());
+
+        logStep("Verify if menu button elements are visible");
+        numberOfMenuButtons = disappearingElementsPage.getNumberOfMenuButtons();
+        assertTrue("Unable to display menu", numberOfMenuButtons > 0);
+    }
+
+    @Test
+    public void shouldMenuButtonElementAppearAndDisappearAfterRefreshTest() {
+        logStep("Click refresh button until menu button appears");
+        disappearingElementsPage.refreshPageUntilWebElementAppears(true);
+
+        logStep("Verify if menu button element appeared");
+        assertNotNull("Unable to disappear menu button element",
+                disappearingElementsPage.getGalleryMenuElement());
+        assertEquals("The number of button elements after refresh is incorrect",
+                totalNumberOfMenuButtons, disappearingElementsPage.getNumberOfMenuButtons());
+
+        logStep("Click refresh button until menu button disappears");
+        disappearingElementsPage.refreshPageUntilWebElementAppears(false);
+
+        logStep("Verify if menu button element disappeared");
+        assertNull("Unable to appear menu button element",
+                disappearingElementsPage.getGalleryMenuElement());
+        assertTrue("The number of button elements after refresh is incorrect",
+                totalNumberOfMenuButtons > disappearingElementsPage.getNumberOfMenuButtons());
+    }
+
+}
+
+
+
+

assertNull(Objetc object) - test passes if Object returns NULL +assertNotNull(Objetc object) - test passes if Object does not return NULL

+
+
+

This case shows how to move draggable elements on the page. +image::images/example13.png[]

+
+
+

Try to move A to B position and see what happens. Also, open browser developer mode and see how the DOM changes.

+
+
+
+example14 +
+
+
+

The page can easily be broken. You can try to do so and check how the page structure changed in browser developer mode.

+
+
+
+example15 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Drag and Drop link and open subpage

    +
  4. +
  5. +

    Check if the Drag and Drop message is visible

    +
  6. +
  7. +

    Check if element A is in container A and B in container B

    +
  8. +
  9. +

    Move element A to position B

    +
  10. +
  11. +

    Check if element A is in container B and B in container A

    +
  12. +
  13. +

    Move element B to position A

    +
  14. +
  15. +

    Again check if element A is in container A and B in container B

    +
  16. +
+
+
+
+== Page Class +
+
+
public class DragAndDropPage extends BasePage {
+
+    private static final By selectorDragAndDropText    = By.cssSelector("div#content h3");
+    private static final By selectorAElementContainer  = By.cssSelector("div#column-a");
+    private static final By selectorBElementContainer  = By.cssSelector("div#column-b");
+    private static final By selectorDescriptionElement = By.cssSelector("header");
+
+    private static final String dndHelperPath = "src/test/resources/js/drag_and_drop_helper.js";
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DRAG_AND_DROP.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Drag and Drop' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() + PageSubURLsProjectYEnum.DRAG_AND_DROP.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if drag and drop message is visible or not.
+     *
+     * @return true if exit drag and drop message was found on web page.
+     */
+    public boolean isDragAndDropMessageVisible() {
+        return getDriver().findElementDynamic(selectorDragAndDropText)
+                .isDisplayed();
+    }
+
+    /**
+     * Verifies if specified element is placed in designated container.
+     *
+     * @param element WebElement to be verified.
+     * @return true if element described as A exists in container A or element B exists in container B, false otherwise.
+     */
+    public boolean isElementPlacedInCorrectContainer(String element) {
+        return getDescriptionElement(findElementByDescription(element)).getText()
+                .equals(element);
+    }
+
+    private WebElement findElementByDescription(String element) {
+        WebElement result;
+        switch (element) {
+            case "A":
+                result = getContainerElement(selectorAElementContainer);
+                break;
+            case "B":
+                result = getContainerElement(selectorBElementContainer);
+                break;
+            default:
+                result = null;
+                BFLogger.logDebug("Chosen element doesn't exist on web page");
+        }
+        return result;
+    }
+
+    private WebElement getContainerElement(By container) {
+        return getDriver().findElementDynamic(container);
+    }
+
+    private WebElement getDescriptionElement(WebElement container) {
+        return container.findElement(selectorDescriptionElement);
+    }
+
+    /**
+     * Drags element to designated container and drops it.
+     *
+     * @param element         String describing WebElement expected to be dragged.
+     * @param from            String describing WebElement representing container of element expected to be dragged.
+     * @param destinationDesc String describing WebElement representing destination container where other element will be dragged.
+     */
+    public void dragElementToPosition(String element, String from, String destinationDesc) {
+        WebElement source = findElementByDescription(from);
+        WebElement description = getDescriptionElement(source);
+        WebElement destination = findElementByDescription(destinationDesc);
+        if (description.getText()
+                .equals(element))
+            dragElement(source, destination);
+    }
+
+}
+
+
+
+

Since HTML5, normal Selenium drag-and-drop action stopped working, thus it’s necessary to execute Javascript which performs the drag-and-drop. To do so, create a JavascriptExecutor object, then read the script from a file drag_and_drop_helper.js and execute it with additional arguments using method executeScript(String script).

+
+
+

An example drag-and-drop solution:

+
+
+
+
    /**
+     * Drags and drops given WebElement to it's destination location.
+     * <p>
+     * Since HTML5 all Selenium Actions performing drag and drop operations stopped working as expected, e.g.
+     * original implementation, which was:
+     * <code>
+     * BasePage.getAction()
+     * .clickAndHold(draggable)
+     * .moveToElement(target)
+     * .release()
+     * .build()
+     * .perform();
+     * </code>
+     * finishes with no effect. For this reason, there is javaScript function used, to make sure that
+     * drag and drop operation will be successful.
+     * JavaScript function is stored under the following path: 'src/test/resources/js/drag_and_drop_helper.js'.
+     * Original source of the script:
+     * <a href="https://gist.github.com/rcorreia/2362544">drag_and_drop_helper</a>
+     * </p>
+     *
+     * @param draggable A WebElement to be dragged and dropped.
+     * @param target    A destination, where element will be dropped.
+     * @see JavascriptExecutor
+     * @see Actions
+     */
+    private void dragElement(WebElement draggable, WebElement target) {
+        JavascriptExecutor js;
+        INewWebDriver driver = getDriver();
+        List<String> fileContent;
+        String draggableId = draggable.getAttribute("id");
+        String targetId = target.getAttribute("id");
+        String script = null;
+        if (draggable.getAttribute("draggable")
+                .contains("true")) {
+            if (driver instanceof JavascriptExecutor) {
+                js = (JavascriptExecutor) driver;
+                Path path = Paths.get(dndHelperPath);
+                try {
+                    fileContent = Files.readAllLines(path);
+                    script = fileContent.stream()
+                            .collect(Collectors.joining());
+                } catch (IOException e) {
+                    BFLogger.logDebug("Unable to read file content: " + e.getMessage());
+                }
+                if (script != null && !script.isEmpty()) {
+                    String arguments = "$('#%s').simulateDragDrop({ dropTarget: '#%s'});";
+                    js.executeScript(script + String.format(arguments, draggableId, targetId));
+                }
+            }
+        }
+    }
+
+
+
+

Drag and Drop helper file:

+
+
+
+
(function( $ ) {
+        $.fn.simulateDragDrop = function(options) {
+                return this.each(function() {
+                        new $.simulateDragDrop(this, options);
+                });
+        };
+        $.simulateDragDrop = function(elem, options) {
+                this.options = options;
+                this.simulateEvent(elem, options);
+        };
+        $.extend($.simulateDragDrop.prototype, {
+                simulateEvent: function(elem, options) {
+                        /*Simulating drag start*/
+                        var type = 'dragstart';
+                        var event = this.createEvent(type);
+                        this.dispatchEvent(elem, type, event);
+
+                        /*Simulating drop*/
+                        type = 'drop';
+                        var dropEvent = this.createEvent(type, {});
+                        dropEvent.dataTransfer = event.dataTransfer;
+                        this.dispatchEvent($(options.dropTarget)[0], type, dropEvent);
+
+                        /*Simulating drag end*/
+                        type = 'dragend';
+                        var dragEndEvent = this.createEvent(type, {});
+                        dragEndEvent.dataTransfer = event.dataTransfer;
+                        this.dispatchEvent(elem, type, dragEndEvent);
+                },
+                createEvent: function(type) {
+                        var event = document.createEvent("CustomEvent");
+                        event.initCustomEvent(type, true, true, null);
+                        event.dataTransfer = {
+                                data: {
+                                },
+                                setData: function(type, val){
+                                        this.data[type] = val;
+                                },
+                                getData: function(type){
+                                        return this.data[type];
+                                }
+                        };
+                        return event;
+                },
+                dispatchEvent: function(elem, type, event) {
+                        if(elem.dispatchEvent) {
+                                elem.dispatchEvent(event);
+                        }else if( elem.fireEvent ) {
+                                elem.fireEvent("on"+type, event);
+                        }
+                }
+        });
+})(jQuery);
+
+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DragAndDropTest extends TheInternetBaseTest {
+
+    private static final String ELEMENT_A   = "A";
+    private static final String CONTAINER_A = "A";
+    private static final String ELEMENT_B   = "B";
+    private static final String CONTAINER_B = "B";
+
+    private static DragAndDropPage dragAndDropPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dragAndDropPage = shouldTheInternetPageBeOpened().clickDragAndDropLink();
+
+        logStep("Verify if Drag And Drop page is opened");
+        assertTrue("Unable to open Drag And Drop page", dragAndDropPage.isLoaded());
+
+        logStep("Verify if Drag And Drop message is visible");
+        assertTrue("Drag And Drop message is not visible", dragAndDropPage.isDragAndDropMessageVisible());
+    }
+
+    @Test
+    public void shouldDraggableElementBeMovedAndDropped() {
+        logStep("Verify if elements are placed in proper containers");
+        assertTrue("Element A doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertTrue("Element B doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+
+        logStep("Step 7: Drag and drop element A into container B");
+        dragAndDropPage.dragElementToPosition(ELEMENT_A, CONTAINER_A, CONTAINER_B);
+
+        logStep("Step 8: Verify if elements are placed in improper containers");
+        assertFalse("Element A doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertFalse("Element B doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+
+        logStep("Drag and drop element B back into container B");
+        dragAndDropPage.dragElementToPosition(ELEMENT_A, CONTAINER_B, CONTAINER_A);
+
+        logStep("Verify if elements are placed in proper containers");
+        assertTrue("Element A doesn't exist in container A", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_A));
+        assertTrue("Element B doesn't exist in container B", dragAndDropPage.isElementPlacedInCorrectContainer(ELEMENT_B));
+    }
+
+}
+
+
+
+

This example shows how to select an element from the dropdown list.

+
+
+
+example16 +
+
+
+

Check in the developer mode how a Dropdown List’s content has been organized.

+
+
+
+example17 +
+
+
+

Notice that the Dropdown Options have different attributes, such as "disabled" or "selected".

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click the Dropdown link and go to the subpage

    +
  4. +
  5. +

    Select first dropdown Option

    +
  6. +
  7. +

    Check if Option 1 is selected

    +
  8. +
  9. +

    Select second dropdown Option

    +
  10. +
  11. +

    Check if Option 2 is selected

    +
  12. +
+
+
+
+== Page Class +
+
+
public class DropdownPage extends BasePage {
+
+    private static final By dropdownListSelector = By.cssSelector("#dropdown");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DROPDOWN.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dropdown List' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DROPDOWN.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Selects doropdown's value by given index.
+     *
+     * @param index Index of option to be selected
+     */
+    public void selectDropdownValueByIndex(int index) {
+        getDriver().elementDropdownList(dropdownListSelector)
+                .selectDropdownByIndex(index);
+    }
+
+    /**
+     * Returns text value of first selected dropdown's option.
+     *
+     * @return String object representing value of dropdown's option
+     */
+    public String getSelectedDropdownValue() {
+        return getDriver().elementDropdownList(dropdownListSelector)
+                .getFirstSelectedOptionText();
+    }
+}
+
+
+
+
+== DropdownListElement class +
+

DropdownListElement is MrChecker’s class, which contains methods for performing the dropdown list of actions:

+
+
+
+
elementDropdownList() - returns DropdownListElement Object
+
+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DropdownTest extends TheInternetBaseTest {
+
+    private static final String expectedFirstOptionValue  = "Option 1";
+    private static final String expectedSecondOptionValue = "Option 2";
+    private static DropdownPage dropdownPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dropdownPage = shouldTheInternetPageBeOpened().clickDropdownLink();
+
+        logStep("Verify if Dropdown page is opened");
+        assertTrue("Unable to open Dropdown page", dropdownPage.isLoaded());
+    }
+
+    @Test
+    public void shouldGetExpectedDropdownTextOptionAfterSelection() {
+
+        logStep("Select first drodown option");
+        dropdownPage.selectDropdownValueByIndex(1);
+
+        logStep("Verify if selected option text is equal to the expected one");
+        assertEquals("Selected value is different than expected", expectedFirstOptionValue,
+                dropdownPage.getSelectedDropdownValue());
+
+        logStep("Select first drodown option");
+        dropdownPage.selectDropdownValueByIndex(2);
+
+        logStep("Verify if selected option text is equal to the expected one");
+        assertEquals("Selected value is different than expected", expectedSecondOptionValue,
+                dropdownPage.getSelectedDropdownValue());
+    }
+
+}
+
+
+
+

This case shows how to compare dynamic content.

+
+
+
+example18 +
+
+
+

Note that after site refresh, some of the content is different. You can see in the browser’s developer mode how the text and image sources are being changed.

+
+
+
+example19 +
+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Dynamic Content link and load subpage

    +
  4. +
  5. +

    Save page images sources and descriptions before the refresh

    +
  6. +
  7. +

    Refresh page

    +
  8. +
  9. +

    Save page images sources and it’s descriptions after refresh

    +
  10. +
  11. +

    Compare page content before and after refresh and verify if it’s different

    +
  12. +
+
+
+
+== Page Class +
+
+
public class DynamicContentPage extends BasePage {
+
+    private static final By imagesLinksSelector        = By.cssSelector("div#content > div.row img");
+    private static final By imagesDescriptionsSelector = By.cssSelector("div#content > div.row div.large-10");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DYNAMIC_CONTENT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dynamic Content' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DYNAMIC_CONTENT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns list of picture descriptions being present on the web page.
+     *
+     * @return List of String objects representing descriptions
+     */
+    public List<String> getDescriptions() {
+        return new ListElements(imagesDescriptionsSelector).getTextList();
+    }
+
+    /**
+     * Returns a list of image links being present on the web page.
+     *
+     * @return List of String objects representing paths to pictures
+     */
+    public List<String> getImageLinks() {
+        return new ListElements(imagesLinksSelector)
+                .getList()
+                .stream()
+                .map(element -> element.getAttribute("src"))
+                .collect(Collectors.toList());
+    }
+}
+
+
+
+
+== ListElements +
+

ListElements is MrChecker collection which can store WebElement Objects. Constructing ListElements with cssSelector allows you to store every element on the page which fits the selector. Example methods:

+
+
+
+
getList() -  returns WebElements list,
+getTextList() - returns list of contents of each Element,
+getSize() - returns number of stored Elements
+In getImageLinks() example it's shown how to get a list of specified Elements' attributes.
+
+
+
+
+== Test Class +
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DynamicContentTest extends TheInternetBaseTest {
+
+    private static DynamicContentPage dynamicContentPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dynamicContentPage = shouldTheInternetPageBeOpened().clickDynamicContentLink();
+
+        logStep("Verify if Dynamic Content page is opened");
+        assertTrue("Unable to open Dynamic Content page", dynamicContentPage.isLoaded());
+    }
+
+    @Test
+    public void shouldImagesAndDescriptionsDifferAfterRefresh() {
+
+        logStep("Read images and descriptions before refresh");
+        List<String> descriptionsBeforeRefresh = dynamicContentPage.getDescriptions();
+        List<String> imagesBeforeRefresh = dynamicContentPage.getImageLinks();
+
+        logStep("Refres page");
+        dynamicContentPage.refreshPage();
+        assertTrue("The Dynamic Content page hasn't been refreshed", dynamicContentPage.isLoaded());
+
+        logStep("Read images and descriptions after refresh");
+        List<String> descriptionsAfterRefresh = dynamicContentPage.getDescriptions();
+        List<String> imagesAfterRefresh = dynamicContentPage.getImageLinks();
+
+        logStep("Verify if descriptions are different after refresh");
+        assertEquals("Different number of descriptions before and after refresh",
+                descriptionsAfterRefresh.size(), descriptionsBeforeRefresh.size());
+
+        boolean diversity = false;
+        for (int i = 0; i < descriptionsAfterRefresh.size(); i++) {
+            if (!descriptionsAfterRefresh.get(i)
+                    .equals(descriptionsBeforeRefresh.get(i))) {
+                diversity = true;
+                break;
+            }
+        }
+        assertTrue("There are no differences between descriptions before and after refresh",
+                diversity);
+
+        logStep("Verify if images are different after refresh");
+        assertEquals("Different number of descriptions before and after refresh",
+                imagesAfterRefresh.size(), imagesBeforeRefresh.size());
+
+        diversity = false;
+        for (int i = 0; i < imagesAfterRefresh.size(); i++) {
+            if (!imagesAfterRefresh.get(i)
+                    .equals(imagesBeforeRefresh.get(i))) {
+                diversity = true;
+                break;
+            }
+        }
+        assertTrue("There are no differences between images before and after refresh", diversity);
+    }
+}
+
+
+
+

In the test method, during differences verification, the goal is to compare every element from the first and second list and find first diversity.

+
+
+

This example shows how to test a page with dynamically loading content. Some elements don’t load during page loading, but during JavaScript execution.

+
+
+
+example23 +
+
+
+

Go to Example 1:

+
+
+
+example24 +
+
+
+

Click "start" and see what happens:

+
+
+
+example25 +
+
+
+

When loading ends, you should see the following message:

+
+
+
+example26 +
+
+
+

In the developer mode, you can see that the element with the "Hello World!" message exists in page DOM but it’s not displayed. However, the loading bar does not exist there - it’s created by JavaScript. The script is also visible in developer mode:

+
+
+
+example27 +
+
+
+

After clicking the "Start" button, the element "Loading" is created by the script, and the "Start" button becomes invisible. When loading ends, "Hello World" message is displayed and the loading bar is hidden. Follow the changes the in developer mode:

+
+
+
+example28 +
+
+
+

Go to example 2: +From a user perspective, there is no difference in page functioning. However, in this case the element with the "Hello World!" message does not exist on the page before clicking "Start". It’s created by the script.

+
+
+
+example29 +
+
+
+

After clicking "Start", the element with the loading bar is been created.

+
+
+
+example30 +
+
+
+

After a certain time, the loading bar becomes invisible, and then the script creates "Hello World!" element and displays it.

+
+
+
+example31 +
+
+
+
+== Page Class +
+
+
public class DynamicLoadingPage extends BasePage {
+
+    private static final By selectorExampleOneLink     =
+            By.cssSelector("a[href*='dynamic_loading/1']");
+    private static final By selectorExampleTwoLink     =
+            By.cssSelector("a[href*='dynamic_loading/2']");
+    private static final By selectorDynamicLoadingText = By.cssSelector("div#content h3");
+    private static final By selectorStartButton        = By.cssSelector("div#start button");
+    private static final By selectorLoadingBar         = By.cssSelector("div#loading");
+    private static final By selectorExampleText        = By.cssSelector("div#finish h4");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DYNAMIC_LOADING.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Dynamically Loaded Page Elements' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DYNAMIC_LOADING.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if dynamic loading message is visible or not.
+     *
+     * @return true if dynamic loading message was found on web page.
+     */
+    public boolean isDynamicLoadingMessageVisible() {
+        return getDriver().findElementDynamic(selectorDynamicLoadingText)
+                .isDisplayed();
+    }
+
+    /**
+     * Clicks Example 1 link.
+     */
+    public void clickExampleOneLink() {
+        getDriver().findElementDynamic(selectorExampleOneLink)
+                .click();
+    }
+
+    /**
+     * Clicks Example 2 link.
+     */
+    public void clickExampleTwoLink() {
+        getDriver().findElementDynamic(selectorExampleTwoLink)
+                .click();
+    }
+
+    /**
+     * Returns information if Start button is visible or not.
+     *
+     * @return true if Start button was found on web page.
+     */
+    public boolean isStartButtonVisible() {
+        return getDriver().findElementDynamic(selectorStartButton)
+                .isDisplayed();
+    }
+
+    /**
+     * Clicks Start button.
+     */
+    public void clickStartButton() {
+        getDriver().findElementDynamic(selectorStartButton)
+                .click();
+    }
+
+    /**
+     * Waits until WebElement representing waiting bar disappears and returns example text.
+     *
+     * @param waitTime The amount of time designated for waiting until waiting bar disappears.
+     * @return String representing example's text.
+     */
+    public String getExampleOneDynamicText(int waitTime) {
+        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((Function<? super WebDriver, Boolean>)
+                ExpectedConditions.invisibilityOfElementLocated(selectorLoadingBar));
+        return getDriver().findElementDynamic(selectorExampleText)
+                .getText();
+    }
+
+    /**
+     * Returns example text.
+     * <p>
+     * Waits until WebElement representing waiting bar disappear. Then waits until example text
+     * shows up.
+     * And after that returns example text.
+     * </p>
+     *
+     * @param waitTime The amount of time designated for waiting until waiting bar disappears and
+     * example text shows.
+     * @return String representing example's text.
+     */
+    public String getExampleTwoDynamicText(int waitTime) {
+        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((Function<? super WebDriver, Boolean>)
+                ExpectedConditions.invisibilityOfElementLocated(selectorLoadingBar));
+        wait.until((Function<? super WebDriver, WebElement>)
+                ExpectedConditions.visibilityOfElementLocated(selectorExampleText));
+        return getDriver().findElementDynamic(selectorExampleText)
+                .getText();
+    }
+
+}
+
+
+
+
+== WebDriverWait +
+

This class performs waiting for actions using Selenium Web Driver:

+
+
+
    +
  • +

    WebDriverWait(WebDriver driver, long timeOutInSeconds) - constructor, first parameter takes WebDriver, in a second you can specify a timeout in seconds. +FluentWait method:

    +
  • +
  • +

    until(Function<? super T, V> isTrue) - waits until condition function given as parameter returns expected value. If waiting time reaches timeout, it throws timeoutException.

    +
  • +
+
+
+

MrChecker implements various condition functions in the ExpectedConditions class :

+
+
+
    +
  • +

    visibilityOfElementLocated(By selector) - returns WebElement if it’s visible

    +
  • +
  • +

    invisibilityOfElementLocated(By selector) - returns true if Element under given selector is invisible

    +
  • +
+
+
+

WebDriver also has methods which wait for some conditions:

+
+
+
    +
  • +

    waitForElement(By selector)

    +
  • +
  • +

    waitForElementVisible(By selector)

    +
  • +
  • +

    waitUntilElementClickable(By selector)

    +
  • +
+
+
+

It’s possible to write your own condition function e.g.:

+
+
+
+
  public static ExpectedCondition<Boolean> invisibilityOfElementLocated(final By locator) {
+    return new ExpectedCondition<Boolean>() {
+      @Override
+      public Boolean apply(WebDriver driver) {
+        try {
+          return !(findElement(locator, driver).isDisplayed());
+        } catch (NoSuchElementException e) {
+          return true;
+        } catch (StaleElementReferenceException e) {
+          return true;
+        }
+      }
+    };
+  }
+
+
+
+

Or as a lambda expression:

+
+
+
+
        WebDriverWait wait = new WebDriverWait(getDriver(), waitTime);
+        wait.until((WebDriver driver) -> {
+            try {
+                return !(driver.findElement(selectorExampleText)
+                        .isDisplayed());
+            } catch (NoSuchElementException e) {
+                return true;
+            } catch (StaleElementReferenceException e) {
+                return true;
+            }
+        });
+
+
+
+
+== Test Class +
+

Case 1 steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Dynamic Loading link and go to a subpage with examples

    +
  4. +
  5. +

    Check if the page is loaded and "Dynamically Loaded Page Elements" header is visible

    +
  6. +
  7. +

    Click Example 1 link and load site

    +
  8. +
  9. +

    Verify if the "Start" button is visible

    +
  10. +
  11. +

    Click "Start"

    +
  12. +
  13. +

    Wait for the loading bar to disappear and check if the displayed message is as it should be

    +
  14. +
  15. +

    Go back to Dynamic Loading page

    +
  16. +
+
+
+

Case 2 steps:

+
+
+
    +
  1. +

    Check if the page is loaded and "Dynamically Loaded Page Elements" header is visible

    +
  2. +
  3. +

    Click Example 2 link and load site

    +
  4. +
  5. +

    Verify if the "Start" button is visible

    +
  6. +
  7. +

    Click "Start"

    +
  8. +
  9. +

    Wait for the loading bar to disappear

    +
  10. +
  11. +

    Wait for the message to appear and check if it is as it should be

    +
  12. +
  13. +

    Go back to Dynamic Loading page

    +
  14. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class DynamicLoadingTest extends TheInternetBaseTest {
+
+    private static final int    EXAMPLE_WAITING_TIME = 30;
+    private static final String EXAMPLE_TEXT         = "Hello World!";
+
+    private static DynamicLoadingPage dynamicLoadingPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        dynamicLoadingPage = shouldTheInternetPageBeOpened().clickDynamicLoadingLink();
+    }
+
+    @Override
+    public void setUp() {
+
+        logStep("Verify if Dynamic Loading page is opened");
+        assertTrue("Unable to open Dynamic Loading page", dynamicLoadingPage.isLoaded());
+
+        logStep("Verify if dynamic loading message is visible");
+        assertTrue("Dynamic loading message is invisible",
+                dynamicLoadingPage.isDynamicLoadingMessageVisible());
+    }
+
+    @Test
+    public void shouldExampleTextBeDisplayedAterRunExampleOne() {
+        logStep("Click Example 1 link");
+        dynamicLoadingPage.clickExampleOneLink();
+
+        logStep("Verify if Example 1 link opened content");
+        assertTrue("Fail to load Example 1 content", dynamicLoadingPage.isStartButtonVisible());
+
+        logStep("Click Start button");
+        dynamicLoadingPage.clickStartButton();
+
+        logStep("Verify if expected text is displayed on the screen");
+        assertEquals("Fail to display example text", EXAMPLE_TEXT,
+                dynamicLoadingPage.getExampleOneDynamicText(EXAMPLE_WAITING_TIME));
+    }
+
+    @Test
+    public void shouldExampleTextBeDisplayedAterRunExampleTwo() {
+        logStep("Click Example 2 link");
+        dynamicLoadingPage.clickExampleTwoLink();
+
+        logStep("Verify if Example 2 link opened content");
+        assertTrue("Fail to load Example 2 content", dynamicLoadingPage.isStartButtonVisible());
+
+        logStep("Click Start button");
+        dynamicLoadingPage.clickStartButton();
+
+        logStep("Verify if expected text is displayed on the screen");
+        assertEquals("Fail to display example text", EXAMPLE_TEXT,
+                dynamicLoadingPage.getExampleTwoDynamicText(EXAMPLE_WAITING_TIME));
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Click back to reset Dynamic Loading page");
+        BasePage.navigateBack();
+    }
+
+}
+
+
+
+

Perform both cases running Test Class as JUnit Test.

+
+
+

WARNING: In this example, there is a visible loading bar signalizing that content is loading.On many websites elements are created by scripts without clear communique. This may cause problems with test stability. When your tests aren’t finding page elements, try to add wait functions with a short timeout.

+
+
+
+example32 +
+
+
+

This case shows how to perform mouse actions and test modal windows.

+
+
+

After you move the mouse cursor out of the website, you should see a new window appearing:

+
+
+
+example33 +
+
+
+

Check in the browser’s developer mode if this window exists in Page DOM

+
+
+
+example34 +
+
+
+

Before you move the mouse out, the window exists, but it’s not displayed.

+
+
+

When the mouse is moved, JavaScript changes display attribute. It also hides window after clicking "Close".

+
+
+
+example35 +
+
+
+
+== Page Class +
+
+
public class ExitIntentPage extends BasePage {
+
+    private static final String MODAL_WINDOW_HIDDEN           = "display: none;";
+    private static final String MODAL_WINDOW_DISPLAYED        = "display: block;";
+    private static final String MODAL_WINDOW_STYLE_ATTRIBUTTE = "style";
+
+    private static final By selectorModalWindow            = By.cssSelector("div#ouibounce-modal");
+    private static final By selectorExitIntentText         = By.cssSelector("div#content h3");
+    private static final By selectorModalWindowTitle       = By.cssSelector("h3");
+    private static final By selectorModalWindowCloseButton = By.cssSelector("div.modal-footer > p");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.EXIT_INTENT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Exit Intent' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.EXIT_INTENT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Returns information if exit intent message is visible or not.
+     *
+     * @return true if exit intent message was found on web page.
+     */
+    public boolean isIntentMessageVisible() {
+        return getDriver().findElementDynamic(selectorExitIntentText)
+                .isDisplayed();
+    }
+
+    /**
+     * Returns information if modal window is hidden.
+     *
+     * @return true if modal window is hidden.
+     */
+    public boolean isModalWindowHidden() {
+        return getDriver().findElementDynamic(selectorModalWindow)
+                .getAttribute(MODAL_WINDOW_STYLE_ATTRIBUTTE)
+                .equals(MODAL_WINDOW_HIDDEN);
+    }
+
+    /**
+     * Returns information if modal window is showed on web page.
+     *
+     * @return true if modal window is displayed.
+     */
+    public boolean isModalWindowVisible() {
+        return getDriver().findElementDynamic(selectorModalWindow)
+                .getAttribute(MODAL_WINDOW_STYLE_ATTRIBUTTE)
+                .equals(MODAL_WINDOW_DISPLAYED);
+    }
+
+    /**
+     * Returns information if modal window title is shown and correct.
+     *
+     * @param expectedValue String representing expected value of modal window's title.
+     * @return true if modal window's title is equal to expected value.
+     */
+    public boolean verifyModalWindowTitle(String expectedValue) {
+        return getDriver().elementLabel(new ByChained(selectorModalWindow,
+                selectorModalWindowTitle))
+                .getText()
+                .equals(expectedValue);
+    }
+
+    /**
+     * Closes modal window by pressing 'close' button.
+     */
+    public void closeModalWindow() {
+        getDriver().elementButton(new ByChained(selectorModalWindow,
+                selectorModalWindowCloseButton))
+                .click();
+    }
+
+    /**
+     * Moves mouse pointer to the top middle of screen, then to the centre of screen and
+     * again to the top.
+     * <p>
+     * This move simulates leaving the viewport and encourages the modal to show up. There is
+     * java.awt.Robot used
+     * to move mouse pointer out of the viewport. There are timeouts used to let the browser detect
+     * mouse move.
+     * </p>
+     *
+     * @see java.awt.Robot
+     */
+    public void moveMouseOutOfViewport() {
+        Robot robot;
+        Dimension screenSize = getDriver().manage()
+                .window()
+                .getSize();
+        int halfWidth = new BigDecimal(screenSize.getWidth() / 2).intValue();
+        int halfHeight = new BigDecimal(screenSize.getHeight() / 2).intValue();
+
+        try {
+            robot = new Robot();
+            robot.mouseMove(halfWidth, 1);
+            getDriver().manage()
+                    .timeouts()
+                    .implicitlyWait(1, TimeUnit.SECONDS);
+            robot.mouseMove(halfWidth, halfHeight);
+            getDriver().manage()
+                    .timeouts()
+                    .implicitlyWait(1, TimeUnit.SECONDS);
+            robot.mouseMove(halfWidth, 1);
+        } catch (AWTException e) {
+            BFLogger.logError("Unable to connect with remote mouse");
+            e.printStackTrace();
+        }
+    }
+}
+
+
+
+
+== Attributes +
+

Elements on pages have attributes like "id", "class", "name", "style" etc. In order to check them, use method getAttribute(String name). In this case attribute "style" determinates if the element is displayed.

+
+
+
+== Robot +
+

Robot class can perform mouse movement. Method mouseMove(int x, int y) moves the remote mouse to given coordinates.

+
+
+
+== Manage Timeouts +
+

manage().timeouts() methods allows you to change WebDriver timeouts values such as:

+
+
+
    +
  • +

    pageLoadTimeout(long time, TimeUnit unit) - the amount of time to wait for a page to load before throwing an exception

    +
  • +
  • +

    setScriptTimeout(long time, TimeUnit unit) - the amount of time to wait for finish execution of a script before throwing an exception

    +
  • +
  • +

    implicitlyWait(long time, TimeUnit unit) - the amount of time the driver should wait when searching for an element if it is not immediately present. After that time, it throws an exception.

    +
  • +
+
+
+

Changing timeouts can improve test stability but can also make them run slower.

+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click Exit Intent link and load subpage

    +
  4. +
  5. +

    Check if the page is loaded and "Exit Intent" message is visible

    +
  6. +
  7. +

    Verify if Modal Window is hidden

    +
  8. +
  9. +

    Move mouse out of the viewport

    +
  10. +
  11. +

    Check if Modal Window is visible

    +
  12. +
  13. +

    Verify if Modal Window title is correct

    +
  14. +
  15. +

    Click 'close' button

    +
  16. +
  17. +

    Again verify if Modal Window is hidden

    +
  18. +
+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class ExitIntentTest extends TheInternetBaseTest {
+
+    private static final String MODAL_WINDOW_TITLE = "This is a modal window";
+
+    private static ExitIntentPage exitIntentPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        exitIntentPage = shouldTheInternetPageBeOpened().clickExitIntentLink();
+
+        logStep("Verify if Exit Intent page is opened");
+        assertTrue("Unable to open Exit Intent page", exitIntentPage.isLoaded());
+
+        logStep("Verify if exit intent message is visible");
+        assertTrue("Exit intent message is not visible", exitIntentPage.isIntentMessageVisible());
+    }
+
+    @Test
+    public void shouldModalWindowAppearWhenMouseMovedOutOfViewportTest() {
+
+        logStep("Verify if modal window is hidden");
+        assertTrue("Fail to hide modal window", exitIntentPage.isModalWindowHidden());
+
+        logStep("Move mouse pointer out of viewport");
+        exitIntentPage.moveMouseOutOfViewport();
+
+        logStep("Verify if modal window showed up");
+        assertTrue("Fail to show up modal window", exitIntentPage.isModalWindowVisible());
+
+        logStep("Verify if modal window title displays properly");
+        assertTrue("Fail to display modal window's title",
+                exitIntentPage.verifyModalWindowTitle(MODAL_WINDOW_TITLE.toUpperCase()));
+
+        logStep("Close modal window");
+        exitIntentPage.closeModalWindow();
+
+        logStep("Verify if modal window is hidden again");
+        assertTrue("Fail to hide modal window", exitIntentPage.isModalWindowHidden());
+    }
+}
+
+
+
+

Remember not to move mouse manually during test execution.

+
+
+
+example36 +
+
+
+

This example shows how to check if file downloads properly.

+
+
+

After clicking on one of these links, a specific file should be downloaded to your computer.

+
+
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Click on the File Download link and open subpage

    +
  4. +
  5. +

    Click on "some-file.txt" download link and download file

    +
  6. +
  7. +

    Check if the file exists in the appropriate folder

    +
  8. +
  9. +

    Delete the file

    +
  10. +
  11. +

    Check if the file doesn’t exist in the folder

    +
  12. +
+
+
+
+== Page Class +
+
+
public class FileDownloadPage extends BasePage {
+
+    private static final By selectorSomeFileTxt = By.cssSelector("a[href*=some-file]");
+
+    private final String DOWNLOAD_DIR = System.getProperty("java.io.tmpdir");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.DOWNLOAD.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'File Downloader' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.DOWNLOAD.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if the chosen file is already downloaded and if not, downloads it .
+     * Throws RuntimeException otherwise.
+     *
+     * @return Downloaded file
+     */
+    public File downloadTextFile() {
+        String nameOfDownloadFile = getNameOfDownloadFile();
+        File fileToDownload = new File(DOWNLOAD_DIR + nameOfDownloadFile);
+
+        if (fileToDownload.exists()) {
+            throw new RuntimeException("The file that you want to download already exists. "
+                    + "Please remove it manually. Path to the file: " + fileToDownload.getPath());
+        }
+
+        getDriver().elementButton(selectorSomeFileTxt)
+                .click();
+
+        waitForFileDownload(2000, fileToDownload);
+        return fileToDownload;
+    }
+
+    private void waitForFileDownload(int totalTimeoutInMillis, File expectedFile) {
+        FluentWait<WebDriver> wait = new FluentWait<WebDriver>(getDriver())
+                .withTimeout(totalTimeoutInMillis, TimeUnit.MILLISECONDS)
+                .pollingEvery(200, TimeUnit.MILLISECONDS);
+
+        wait.until((WebDriver wd) -> expectedFile.exists());
+    }
+
+    private String getNameOfDownloadFile() {
+        String urlToDownload = getDriver().findElementDynamic(selectorSomeFileTxt)
+                .getAttribute("href");
+        String[] urlHierachy = urlToDownload.split("/");
+        return urlHierachy[urlHierachy.length - 1];
+    }
+}
+
+
+
+

Use FluentWait class and create an expected condition using a lambda expression to wait until the file downloads.

+
+
+

To perform operations on files, use java File class. To get a file name, find it in download URL.

+
+
+
+== Test Class +
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class FileDownloadTest extends TheInternetBaseTest {
+
+    private static FileDownloadPage fileDownloadPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        fileDownloadPage = shouldTheInternetPageBeOpened().clickFileDownloadLink();
+
+        logStep("Verify if File Download page is opened");
+        assertTrue("Unable to open File Download page", fileDownloadPage.isLoaded());
+    }
+
+    @Test
+    public void shouldfileBeDownloaded() {
+
+        logStep("Download the some-file.txt");
+        File downloadedFile = fileDownloadPage.downloadTextFile();
+
+        logStep("Verify if downloaded file exists");
+        assertTrue("Downloaded file does not exist", downloadedFile.exists());
+
+        logStep("Remove downloaded file");
+        downloadedFile.delete();
+
+        logStep("Verify if downloaded file has been removed");
+        assertFalse("Downloaded file still exists", downloadedFile.exists());
+    }
+}
+
+
+
+
+example37 +
+
+
+

This case shows how to pass through the standard authentication page.

+
+
+

When you enter the correct credentials, you should see the next page:

+
+
+
+example38 +
+
+
+

If user data is wrong, an appropriate message appears:

+
+
+
+example39 +
+
+
+
+== Page Class +
+
+
public class FormAuthenticationPage extends BasePage {
+
+    private final static By selectorInputUsername     = By.cssSelector("#username");
+    private final static By selectorInputUserPassword = By.cssSelector("#password");
+    private final static By selectorLoginMessage      = By.cssSelector("#flash");
+    private final static By selectorLoginButton       = By.cssSelector("#login > button > i");
+    private final static By selectorLogoutButton      = By.cssSelector("#content > div > a ");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.LOGIN.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Login Page' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() + PageSubURLsProjectYEnum.LOGIN.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Sets user name to designated form's field.
+     *
+     * @param username String representing a user's name
+     * @return FormAuthenticationPage object with user name set to the given one
+     */
+    public FormAuthenticationPage setUsername(String username) {
+        InputTextElement elementInputUsername = new InputTextElement(selectorInputUsername);
+        elementInputUsername.clearInputText();
+        elementInputUsername.setInputText(username);
+        return this;
+    }
+
+    /**
+     * Sets user password to designated form's field.
+     *
+     * @param userPassword String representing a user's password
+     * @return FormAuthenticationPage object with user's password set to the given one
+     */
+    public FormAuthenticationPage setUserPassword(String userPassword) {
+        InputTextElement elementInputPassword = new InputTextElement(selectorInputUserPassword);
+        elementInputPassword.clearInputText();
+        elementInputPassword.setInputText(userPassword);
+        return this;
+    }
+
+    /**
+     * Returns login message.
+     *
+     * @return String object representing the message returned after login operation is performed
+     */
+    public String getLoginMessageText() {
+        return new LabelElement(selectorLoginMessage).getText();
+    }
+
+    /**
+     * Clicks 'Login' button.
+     */
+    public void clickLoginButton() {
+        new Button(selectorLoginButton).click();
+    }
+
+    /**
+     * Clicks 'Logout' button.
+     */
+    public void clickLogoutButton() {
+        new Button(selectorLogoutButton).click();
+    }
+}
+
+
+
+
+== == InputTextElement +
+

Use methods from this class to perform actions on text fields:

+
+
+
    +
  • +

    clearInputText() - remove all text from selected input field

    +
  • +
  • +

    setInputText(String text) - enter given text

    +
  • +
+
+
+
+== == LabelElement +
+
    +
  • +

    String getText() method returns visible text from label

    +
  • +
+
+
+
+== TestClass +
+

Prepare six test cases:

+
+
+
    +
  1. +

    Try to login with empty user data and check if the error message appears

    +
  2. +
  3. +

    Try to login with empty username and valid password and check if the error message appears

    +
  4. +
  5. +

    Try to login with a valid username and empty password and check if the error message appears

    +
  6. +
  7. +

    Try to login with invalid username and invalid password and check if the error message appears

    +
  8. +
  9. +

    Try to login with a valid username and valid password and check if success login message appears, then log out

    +
  10. +
  11. +

    Try to login with a valid username and valid password and check if success login message appears, then log out and check if success logout message is displayed

    +
  12. +
+
+
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case: Click on the Form Authentication link and open login page

+
+
+

After each case: Go back to The Internet Main Page

+
+
+
+
@Category({ TestsLocal.class, TestsNONParallel.class })
+public class FormAuthenticationTest extends TheInternetBaseTest {
+
+    private static FormAuthenticationPage formAuthenticationPage;
+
+    private String errorUsernameMessage = "Your username is invalid!\n" + "×";
+    private String errorPasswordMessage = "Your password is invalid!\n" + "×";
+    private String loginMessage         = "You logged into a secure area!\n" + "×";
+    private String logoutMessage        = "You logged out of the secure area!\n" + "×";
+    private String emptyUsername        = "";
+    private String emptyUserPassword    = "";
+    private String validUsername        = "tomsmith";
+    private String validPassword        = "SuperSecretPassword!";
+    private String randomUsername       = UUID.randomUUID()
+            .toString();
+    private String randomUserPassword   = UUID.randomUUID()
+            .toString();
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click subpage link");
+        formAuthenticationPage = theInternetPage.clickFormAuthenticationLink();
+
+        logStep("Verify if subpage is opened");
+        assertTrue("The Internet subpage: FormAuthenticationPage was not open", formAuthenticationPage.isLoaded());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithEmptyData() {
+        logStep("Log user with empty username and password");
+        formAuthenticationPage.setUsername(emptyUsername)
+                .setUserPassword(emptyUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty data", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithEmptyUsernameAndValidPassword() {
+        logStep("Log user with empty username and valid password");
+        formAuthenticationPage.setUsername(emptyUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty username", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithValidUsernameAndEmptyPassword() {
+        logStep("Log user with valid username and empty password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(emptyUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with empty password", errorPasswordMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldErrorMessageBeDisplayedWhenUserLogsWithInvalidUsernameAndInvalidPassword() {
+        logStep("Log user with invalid username and invalid password");
+        formAuthenticationPage.setUsername(randomUsername)
+                .setUserPassword(randomUserPassword)
+                .clickLoginButton();
+        assertEquals("Unexpectedly user logged in with random credentials", errorUsernameMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Test
+    public void shouldUserLogInWithValidCredentials() {
+        logStep("Log user with valid username and valid password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unable to login user with valid credentials", loginMessage,
+                formAuthenticationPage.getLoginMessageText());
+        logStep("Log out user");
+        formAuthenticationPage.clickLogoutButton();
+    }
+
+    @Test
+    public void shouldUserLogOutAfterProperLogInAndClickLogoutButon() {
+        logStep("Log user with valid username and valid password");
+        formAuthenticationPage.setUsername(validUsername)
+                .setUserPassword(validPassword)
+                .clickLoginButton();
+        assertEquals("Unable to login user with valid credentials", loginMessage,
+                formAuthenticationPage.getLoginMessageText());
+        logStep("Log out user");
+        formAuthenticationPage.clickLogoutButton();
+        assertEquals("User cannot log out after prper log in", logoutMessage,
+                formAuthenticationPage.getLoginMessageText());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        theInternetPage.load();
+    }
+}
+
+
+
+

After running Test Class, cases might be performed in a different order.

+
+
+
+example40 +
+
+
+

This example shows how to approach elements dynamically appearing after the user’s action.

+
+
+

Move the mouse over an image to see the additional label.

+
+
+
+example41 +
+
+
+

Labels exist in page DOM all the time but their display attributes change. In this case, there is no JavaScript. Elements' visibility is managed by CSS.

+
+
+
+example42 +
+
+
+
+== Page Class +
+
+
public class HoversPage extends BasePage {
+
+    private final static By selectorImages = By.cssSelector("div.figure > img");
+    private final static By selectorNames  = By.cssSelector("div.figcaption h5");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.HOVERS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Hovers' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.HOVERS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Moves mouse pointer over an image with given index.
+     *
+     * @param index An index of the picture, where mouse pointer should be moved
+     */
+    public void hoverOverAvatar(int index) {
+        Actions action = new Actions(getDriver());
+        WebElement avatarImage = getDriver().findElementDynamics(selectorImages)
+                .get(index);
+        action.moveToElement(avatarImage)
+                .perform();
+    }
+
+    /**
+     * Returns the information displayed under a picture with given index.
+     *
+     * @param index An index of the picture, where the information should be read
+     * @return String object representing picture's information
+     */
+    public String getAvatarsInformation(int index) {
+        return getDriver().findElementDynamics(selectorNames)
+                .get(index)
+                .getText();
+    }
+}
+
+
+
+
+== == Actions +
+

Actions class contains methods used to execute basic user actions such as mouse moving and clicking or keys sending. Action or actions series will be performed after calling perform() method.

+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Hovers page

    +
  4. +
  5. +

    Move mouse over random image

    +
  6. +
  7. +

    Check if displayed text is equal to expected.

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class HoversTest extends TheInternetBaseTest {
+
+    private static HoversPage    hoversPage;
+    private final String        names[]    = { "name: user1", "name: user2", "name: user3" };
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        hoversPage = shouldTheInternetPageBeOpened().clickHoversLink();
+
+        logStep("Verify if Hovers page is opened");
+        assertTrue("Unable to open Hovers page", hoversPage.isLoaded());
+    }
+
+    @Test
+    public void shouldProperInformationBeDisplayedWhenMousePointerHoveredOverRandomElement() {
+        logStep("Hover mouse pointer over random element");
+        int randomIndex = new Random().nextInt(names.length);
+        hoversPage.hoverOverAvatar(randomIndex);
+        assertEquals("Picture's information is different than expected", names[randomIndex],
+                hoversPage.getAvatarsInformation(randomIndex));
+    }
+}
+
+
+
+

Because in this case the tested content is being chosen randomly, each test run could check a different element.

+
+
+
+example43 +
+
+
+

This case shows how to test pop-up JS alerts.

+
+
+

After clicking one of the buttons, an adequate alert should appear.

+
+
+
+example44 +
+
+
+

Performed action will be displayed under "Result" label.

+
+
+

In developer mode, you can view JavaScript which creates alerts.

+
+
+
+example45 +
+
+
+
+== Page Class +
+
+
public class JavaScriptAlertsPage extends BasePage {
+
+    private static final By selectorAlertButton   = By.cssSelector("button[onclick*=jsAlert]");
+    private static final By selectorConfirmButton = By.cssSelector("button[onclick*=jsConfirm]");
+    private static final By selectorPromptButton  = By.cssSelector("button[onclick*=jsPrompt]");
+    private static final By resultLabelSelector   = By.cssSelector("p#result");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.JAVASCRIPT_ALERTS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'JavaScript Alerts' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.JAVASCRIPT_ALERTS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'JS alert' button.
+     */
+    public void clickAlertButton() {
+        new Button(selectorAlertButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Clicks 'JS confirm' button.
+     */
+    public void clickConfirmButton() {
+        new Button(selectorConfirmButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Clicks 'JS prompt' button.
+     */
+    public void clickPromptButton() {
+        new Button(selectorPromptButton).click();
+        WebDriverWait wait = new WebDriverWait(getDriver(), 2);
+        wait.until(ExpectedConditions.alertIsPresent());
+    }
+
+    /**
+     * Returns message displayed by popup.
+     *
+     * @return String object representing message displayed by popup
+     */
+    public String readResultLabel() {
+        return new LabelElement(resultLabelSelector).getText();
+    }
+
+    /**
+     * Clicks alert's 'OK' button.
+     */
+    public void clickAlertAccept() {
+        getDriver().switchTo()
+                .alert()
+                .accept();
+    }
+
+    /**
+     * Clicks alert's 'Cancel' button.
+     */
+    public void clickAlertDismiss() {
+        getDriver().switchTo()
+                .alert()
+                .dismiss();
+    }
+
+    /**
+     * Types text into alert's text field.
+     *
+     * @param text String object sent into alert's text field
+     */
+    public void writeTextInAlert(String text) {
+        getDriver().switchTo()
+                .alert()
+                .sendKeys(text);
+    }
+}
+
+
+
+
+== == alert() +
+

Using switchTo() method you can change processed content. switchTo().alert() allows performing actions on appearing alerts such as accepting, dismissing or entering keys.

+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page and go to JavaScript Alert page

+
+
+
    +
  1. +

    Click JS Alert button, accept alert and check if Result message returns performed an action

    +
  2. +
  3. +

    Click JS Confirm button, accept alert and check if Result message returns performed action

    +
  4. +
  5. +

    Click JS Confirm button, dismiss alert and check if Result message returns performed action

    +
  6. +
  7. +

    Click JS Prompt button, write random text, accept alert and check if Result message returns performed action with written text

    +
  8. +
  9. +

    Click JS Prompt button, dismiss the alert and check if Result message returns performed action

    +
  10. +
+
+
+

After each case: Refresh Page

+
+
+

After all tests: Navigate back to The Internet Main Page

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class JavaScriptAlertsTest extends TheInternetBaseTest {
+
+    private static JavaScriptAlertsPage javaScriptAlertsPage;
+
+    private final String jsAlertCofirmMessage    = "You successfuly clicked an alert";
+    private final String jsConfirmConfirmMessage = "You clicked: Ok";
+    private final String jsConfirmCancelMessage  = "You clicked: Cancel";
+    private final String jsPromptConfirmMessage  = "You entered: ";
+    private final String jsPromptCancelMessage   = "You entered: null";
+    private final String randomString            = "random";
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        javaScriptAlertsPage = shouldTheInternetPageBeOpened().clickJavaScriptAlertLink();
+
+        logStep("Verify if JavaScript Alerts page is opened");
+        assertTrue("Unable to open JavaScript Alerts page", javaScriptAlertsPage.isLoaded());
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+        logStep("Navigate back to The-Internet page");
+        BasePage.navigateBack();
+    }
+
+    @Test
+    public void shouldJSAlertCloseWithProperMessageAfterPressOkButton() {
+        logStep("Click Alert button");
+        javaScriptAlertsPage.clickAlertButton();
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsAlertCofirmMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSConfirmCloseWithProperMessageAfterPressOkButton() {
+        logStep("Click Confirm button");
+        javaScriptAlertsPage.clickConfirmButton();
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsConfirmConfirmMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSConfirmCloseWithProperMessageAfterPressCancelButton() {
+        logStep("Click Confirm button");
+        javaScriptAlertsPage.clickConfirmButton();
+
+        logStep("Click 'Cancel' button on alert");
+        javaScriptAlertsPage.clickAlertDismiss();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsConfirmCancelMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSPromptCloseWithProperMessageAfterPressOKButton() {
+        logStep("Click Prompt button");
+        javaScriptAlertsPage.clickPromptButton();
+
+        logStep("Insert text to alert: " + randomString);
+        javaScriptAlertsPage.writeTextInAlert(randomString);
+
+        logStep("Click 'OK' button on alert");
+        javaScriptAlertsPage.clickAlertAccept();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsPromptConfirmMessage + randomString, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Test
+    public void shouldJSPromptCloseWithProperMessageAfterPressCancelButton() {
+        logStep("Click Prompt button");
+        javaScriptAlertsPage.clickPromptButton();
+
+        logStep("Click 'Cancel' button on alert");
+        javaScriptAlertsPage.clickAlertDismiss();
+
+        logStep("Verify returned message");
+        assertEquals("Incorrect message returned after click",
+                jsPromptCancelMessage, javaScriptAlertsPage.readResultLabel());
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Refresh JavaScriptAlersPage");
+        javaScriptAlertsPage.refreshPage();
+    }
+
+}
+
+
+
+
+example46 +
+
+
+

This simple case shows how to test key pressing

+
+
+

This site uses JavaScript to read the key pressed and display its value.

+
+
+
+example47 +
+
+
+
+== Page Class +
+
+
public class KeyPressesPage extends BasePage {
+
+    private static final By selectorResult = By.cssSelector("#result");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.KEY_PRESS.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Key Presses' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.KEY_PRESS.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Presses given keyboard key.
+     *
+     * @param keyToPress Key to be pressed on keyboard
+     */
+    public void pressKey(String keyToPress) {
+        getAction().sendKeys(keyToPress)
+                .perform();
+    }
+
+    /**
+     * Returns information from web page about pressed keyboard key.
+     *
+     * @return Information from web page about pressed key
+     */
+    public String getPressedKeyInformation() {
+        return getDriver().findElementDynamic(selectorResult)
+                .getText();
+    }
+}
+
+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Key Presses site

    +
  4. +
  5. +

    Press a key

    +
  6. +
  7. +

    Check if a displayed message contains the pressed key

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class KeyPressesTest extends TheInternetBaseTest {
+
+    private static KeyPressesPage keyPressesPage;
+
+    private final String keyToBePressed  = "Q";
+    private final String expectedMessage = "You entered: Q";
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        keyPressesPage = shouldTheInternetPageBeOpened().clickKeyPressesLink();
+
+        logStep("Verify if Key Presses page is opened");
+        assertTrue("Unable to open Key Presses page", keyPressesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldWebsiteReturnInformationAboutPressedKey() {
+        logStep("Press a keyboard key");
+        keyPressesPage.pressKey(keyToBePressed);
+
+        logStep("Verify if website give valid information about pressed keyboard key");
+        assertEquals("Information about the pressed key is invalid", expectedMessage,
+                keyPressesPage.getPressedKeyInformation());
+    }
+}
+
+
+
+
+example48 +
+
+
+

This simple example shows how operate on many browser tabs

+
+
+

When you click the link, a new website will be opened in the second tab.

+
+
+
+example49 +
+
+
+
+== Page Class +
+
+
public class MultipleWindowsPage extends BasePage {
+
+    private final static By selectorLink = By.cssSelector("#content > div > a");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.WINDOW.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Opening a new window' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.WINDOW.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'click here' link.
+     *
+     * @return NewWindowPage object
+     */
+    public NewWindowPage clickHereLink() {
+        getDriver().findElementDynamic(selectorLink)
+                .click();
+        getDriver().waitForPageLoaded();
+        return new NewWindowPage();
+    }
+}
+
+
+
+

You also need a second page class for New Window Page. Implement only the required methods.

+
+
+
+
public class NewWindowPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.NEW_WINDOW.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'New window' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.NEW_WINDOW.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Multiple Windows Page

    +
  4. +
  5. +

    Click the link

    +
  6. +
  7. +

    Check if a new page is opened in the second tab

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class MultipleWindowsTest extends TheInternetBaseTest {
+
+    private static MultipleWindowsPage    multipleWindowsPage;
+    private static NewWindowPage        newWindowPage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        multipleWindowsPage = shouldTheInternetPageBeOpened().clickmultipleWindowsLink();
+
+        logStep("Verify if Multiple Windows page is opened");
+        assertTrue("Unable to open Multiple Windows page", multipleWindowsPage.isLoaded());
+    }
+
+    @Test
+    public void verifyIfNewBrowserWindowOpen() {
+        logStep("Click 'Click here' link");
+        newWindowPage = multipleWindowsPage.clickHereLink();
+
+        logStep("Verify if 'New window page' is opened");
+        assertTrue("Unable to open a new browser window", newWindowPage.isLoaded());
+    }
+}
+
+
+
+
+example50 +
+
+
+

This simple case shows how to approach redirecting links.

+
+
+

After clicking on the link, you will be redirected to Status Codes Page.

+
+
+
+example51 +
+
+
+
+== Page Class + +
+
+== == Redirect Link Page +
+
+
public class RedirectLinkPage extends BasePage {
+
+    private static final By selectorRedirectHere = By.cssSelector("a#redirect");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.REDIRECT.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Redirection' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.REDIRECT.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Clicks 'Redirect here' link.
+     *
+     * @return StatusCodesHomePage object
+     */
+    public StatusCodesHomePage clickRedirectHereLink() {
+        new Button(selectorRedirectHere).click();
+        return new StatusCodesHomePage();
+    }
+}
+
+
+
+
+== == Status Codes Page +
+
+
public class StatusCodesHomePage extends BasePage {
+
+    private static final By selectorLink200Code = By.linkText("200");
+    private static final By selectorLink301Code = By.linkText("301");
+    private static final By selectorLink404Code = By.linkText("404");
+    private static final By selectorLink500Code = By.linkText("500");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+== Test Class +
+

Steps:

+
+
+
    +
  1. +

    Open The Internet Main Page

    +
  2. +
  3. +

    Go to Redirection Page

    +
  4. +
  5. +

    Click the link

    +
  6. +
  7. +

    Check if Status Codes Page is loaded

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class RedirectLinkTest extends TheInternetBaseTest {
+
+    private static RedirectLinkPage    redirectLinkPage;
+    private static StatusCodesHomePage statusCodesHomePage;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        redirectLinkPage = shouldTheInternetPageBeOpened().clickRedirectLink();
+
+        logStep("Verify if Redirect Link page is opened");
+        assertTrue("Unable to open Redirect Link page", redirectLinkPage.isLoaded());
+    }
+
+    @Test
+    public void shouldUserBeRedirectedToStatusCodePage() {
+        logStep("Click 'Redirect here' link");
+        statusCodesHomePage = redirectLinkPage.clickRedirectHereLink();
+
+        logStep("Verify redirection to Status Code page");
+        assertTrue("User hasn't been redirected to the expected website",
+                statusCodesHomePage.isLoaded());
+    }
+}
+
+
+
+
+example52 +
+
+
+

This case shows how to move horizontal slider.

+
+
+

You can move the slider by dragging it with a mouse or using arrow keys. The page uses a simple script to get slider position and display set value.

+
+
+
+example53 +
+
+
+
+== Page Class +
+
+
public class HorizontalSliderPage extends BasePage {
+
+    private static final By selectorHorizontalSlider = By.cssSelector("div.sliderContainer");
+    private static final By sliderSelector           = By.cssSelector("input");
+    private static final By valueSelector            = By.cssSelector("#range");
+
+    private HorizontalSliderElement horizontalSlider;
+
+    public HorizontalSliderPage() {
+        horizontalSlider = getDriver().elementHorizontalSlider(selectorHorizontalSlider,
+                sliderSelector, valueSelector, BigDecimal.ZERO, new BigDecimal(5),
+                new BigDecimal(0.5));
+    }
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.HORIZONTAL_SLIDER.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Horizontal Slider' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.HORIZONTAL_SLIDER.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Validates if WebElement representing horizontal slider is visible on the page.
+     *
+     * @return true if horizontal slider is visible, false otherwise.
+     */
+    public boolean isElementHorizontalSliderVisible() {
+        return getDriver().elementHorizontalSlider(selectorHorizontalSlider)
+                .isDisplayed();
+    }
+
+    /**
+     * Returns the value of slider's start position.
+     *
+     * @return BigDecimal representing the lowest possible value of slider.
+     */
+    public BigDecimal getStartPosition() {
+        return horizontalSlider.getMinRange();
+    }
+
+    /**
+     * Returns the value of slider's middle position.
+     *
+     * @return BigDecimal representing the average value between start and end position.
+     */
+    public BigDecimal getMiddlePosition() {
+        return horizontalSlider.getMaxRange()
+                .subtract(horizontalSlider.getMinRange())
+                .divide(new BigDecimal(2));
+    }
+
+    /**
+     * Returns the value of slider's end position.
+     *
+     * @return BigDecimal representing the highest possible value of slider.
+     */
+    public BigDecimal getEndPosition() {
+        return horizontalSlider.getMaxRange();
+    }
+
+    /**
+     * Returns current value of slider's position.
+     *
+     * @return BigDecimal representing current value of slider.
+     */
+    public BigDecimal getCurrentPosition() {
+        return horizontalSlider.getCurrentSliderValue();
+    }
+
+    /**
+     * Sets horizontal slider to a given position using one of the available methods: using keyboard
+     * or using mouse move.
+     *
+     * @param position
+     * @param method
+     */
+    public void setSliderPositionTo(BigDecimal position, int method) {
+        horizontalSlider.setSliderPositionTo(position, method);
+    }
+
+    /**
+     * Verifies the correctness of the given position value and rounds it when necessary.
+     *
+     * @param position
+     * @return Correct value of horizontal slider's position.
+     */
+    public BigDecimal verifyAndCorrectPositionValue(BigDecimal position) {
+        return horizontalSlider.verifyAndCorrectPositionValue(position);
+    }
+}
+
+
+
+
+== == Horizontal Slider Element +
+

This class implements methods wich can perform actions on slider:

+
+
+

Create Slider Object using method:

+
+
+
    +
  • +

    getDriver().elementHorizontalSlider(By sliderContainerSelector, By sliderSelector, By valueSelector, BigDecimal minRange, BigDecimal maxRange, BigDecimal step)

    +
  • +
+
+
+

And use:

+
+
+
    +
  • +

    BigDecimal getMaxRange()

    +
  • +
  • +

    BigDecimal getMinRange()

    +
  • +
  • +

    BigDecimal getCurrentSliderValue()

    +
  • +
  • +

    setSliderPositionTo(BigDecimal position, int method) - moves slider to a given position. If the position is not valid, it changes it to the nearest proper value. Second parameter determinates movement method: 0 - Keyboard, 1 - Mouse

    +
  • +
  • +

    BigDecimal verifyAndCorrectPositionValue(BigDecimal position) - returns nearest correct position

    +
  • +
+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case:

+
+
+
    +
  1. +

    Go to Horizontal Slider Page

    +
  2. +
  3. +

    Check if the slider is visible

    +
  4. +
  5. +

    Save start, middle and end position

    +
  6. +
+
+
+

Case 1 - Moving with the keyboard:

+
+
+
    +
  1. +

    Move slider to start position, and check if the current position equals the beginning value

    +
  2. +
  3. +

    Move the slider to middle position, and check if the current position equals the middle value

    +
  4. +
  5. +

    Move slider to end position, and check if the current position equals the end value

    +
  6. +
  7. +

    Try to move slider before start position, and check if the current position equals the beginning value

    +
  8. +
  9. +

    Try to move slider after end position, and check if the current position equals the end value

    +
  10. +
  11. +

    Try to move the slider to an improperly defined position between start and middle, and check if the current position equals the corrected value

    +
  12. +
  13. +

    Try to move the slider to an improperly defined random position, and check if the current position equals the corrected value

    +
  14. +
  15. +

    Move the slider back to start position, and check if the current position equals the beginning value

    +
  16. +
+
+
+

Case 2 - Moving with a mouse: Repeat each Case 1 step using a mouse instead of keyboard

+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class SliderTest extends TheInternetBaseTest {
+
+    private static HorizontalSliderPage horizontalSliderPage;
+
+    BigDecimal startPosition;
+    BigDecimal middlePosition;
+    BigDecimal endPosition;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click Horizontal Slider link");
+        horizontalSliderPage = theInternetPage.clickHorizontalSliderLink();
+
+        logStep("Verify if Horizontal Slider page is opened");
+        assertTrue("Unable to load Horizontal Slider page", horizontalSliderPage.isLoaded());
+
+        logStep("Verify if horizontal slider element is visible");
+        assertTrue("Horizontal slider is not visible",
+                horizontalSliderPage.isElementHorizontalSliderVisible());
+
+        startPosition = horizontalSliderPage.getStartPosition();
+        middlePosition = horizontalSliderPage.getMiddlePosition();
+        endPosition = horizontalSliderPage.getEndPosition();
+    }
+
+    @Test
+    public void shouldHorizontalSliderMoveWhenKeyboardArrowButtonsArePressed() {
+        BigDecimal position;
+        logStep("Move slider to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to middle position: " + middlePosition);
+        horizontalSliderPage.setSliderPositionTo(middlePosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(middlePosition),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to end position: " + endPosition);
+        horizontalSliderPage.setSliderPositionTo(endPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = startPosition.subtract(BigDecimal.ONE);
+        logStep("Move slider to position before start position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = endPosition.add(BigDecimal.ONE);
+        logStep("Move slider to position after end position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = middlePosition.divide(new BigDecimal(2));
+        logStep("Move slider to improperly defined position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        position = new BigDecimal(new BigInteger("233234"), 5);
+        logStep("Move slider to improperly defined random position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider back to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.KEYBOARD);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+    }
+
+    @Test
+    public void shouldHorizontalSliderMoveWhenMouseButtonIsPressedAndMouseIsMoving() {
+        BigDecimal position;
+        logStep("Move slider to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to middle position: " + middlePosition);
+        horizontalSliderPage.setSliderPositionTo(middlePosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(middlePosition),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider to end position: " + endPosition);
+        horizontalSliderPage.setSliderPositionTo(endPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = startPosition.subtract(BigDecimal.ONE);
+        logStep("Move slider to position before start position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = endPosition.add(BigDecimal.ONE);
+        logStep("Move slider to position after end position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", endPosition,
+                horizontalSliderPage.getCurrentPosition());
+
+        position = middlePosition.divide(new BigDecimal(2));
+        logStep("Move slider to improperly defined position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        position = new BigDecimal(new BigInteger("212348"), 5);
+        logStep("Move slider to improperly defined random position: " + position);
+        horizontalSliderPage.setSliderPositionTo(position, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position",
+                horizontalSliderPage.verifyAndCorrectPositionValue(position),
+                horizontalSliderPage.getCurrentPosition());
+
+        logStep("Move slider back to start position: " + startPosition);
+        horizontalSliderPage.setSliderPositionTo(startPosition, HorizontalSliderElement.MOUSE);
+        assertEquals("Fail to set horizontal sliders position", startPosition,
+                horizontalSliderPage.getCurrentPosition());
+    }
+}
+
+
+
+
+example54 +
+
+
+

This example shows how to sort and read data from tables.

+
+
+

After clicking on a column header, the data will be sorted descending and after another click sorted ascending by selected attribute. Watch how both tables' content changes on page DOM. Sorting is performed by JavaScript functions.

+
+
+
+example55 +
+
+
+
+== Page Class +
+
+
public class SortableDataTablesPage extends BasePage {
+
+    private static final By selectorTable  = By.cssSelector("table.tablesorter");
+    private static final By selectorHeader = By.cssSelector("th");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.SORTABLE_DATA_TABLES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Data Tables' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.SORTABLE_DATA_TABLES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Sorts data in given column using ascending order.
+     *
+     * @param columnNumber The number of column where data should be sorted
+     * @param tableNumber  The number of table where data should be sorted
+     */
+    public void sortColumnAscending(int columnNumber, int tableNumber) {
+        WebElement header = this.getTableHeaders(columnNumber, tableNumber);
+        String className = header.getAttribute("class");
+        if (className.contains("headerSortUp") || !className.contains("headerSortDown")) {
+            header.click();
+        }
+    }
+
+    /**
+     * Sorts data in given column using descending order.
+     *
+     * @param columnNumber The number of the column where data should be sorted
+     * @param tableNumber  The number of the table where data should be sorted
+     */
+    public void sortColumnDescending(int columnNumber, int tableNumber) {
+        WebElement header = this.getTableHeaders(columnNumber, tableNumber);
+        String className = header.getAttribute("class");
+        if (!className.contains("headerSortUp")) {
+            header.click();
+            if (!className.contains("headerSortDown")) {
+                header.click();
+            }
+        }
+    }
+
+    /**
+     * Return given column values from chosen table.
+     *
+     * @param columnNumber The number of the column the data should be retrieved from
+     * @param tableNumber  The number of the table the data should be retrieved from
+     * @return list of values from given column
+     */
+    public List<String> getColumnValues(int columnNumber, int tableNumber) {
+        WebElement table = getTable(tableNumber);
+        return JsoupHelper.findTexts(table, By.cssSelector("tr > td:nth-child(" + (columnNumber + 1)
+                + ")"));
+    }
+
+    /**
+     * Returns column's class name.
+     *
+     * @param columnNumber The number of the column to get class number from
+     * @param tableNumber  The number of the table to get column class name from
+     * @return String object representing column's class name
+     */
+    public String readColumnClass(int columnNumber, int tableNumber) {
+        return this.getTableHeaders(columnNumber, tableNumber)
+                .getAttribute("class");
+    }
+
+    private WebElement getTable(int tableNumber) {
+        return new ListElements(selectorTable).getList()
+                .get(tableNumber);
+    }
+
+    private WebElement getTableHeaders(int columnNumber, int tableNumber) {
+        return getTable(tableNumber).findElements(selectorHeader)
+                .get(columnNumber);
+    }
+}
+
+
+
+
+== == Finding values +
+

Using proper selectors, save elements such as tables and their columns' headers as Web Element Lists. Afterwards, you can get the desired element finding it by index (e. g. table or column number). To get column values, use JsoupHelper and to check if the column is sorted get its class attribute.

+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page

+
+
+

Before each case: Go to Sortable Data Tables Page

+
+
+

Case 1:

+
+
+
    +
  1. +

    Choose a random table

    +
  2. +
  3. +

    Sort first column "Last Name" in ascending order

    +
  4. +
  5. +

    Check if column header class contains "headerSortDown"

    +
  6. +
  7. +

    Save column content to the List

    +
  8. +
  9. +

    Create List copy and sort it

    +
  10. +
  11. +

    Compare sorted values and values from the table

    +
  12. +
+
+
+

Case 2:

+
+
+
    +
  1. +

    Choose a random table

    +
  2. +
  3. +

    Sort second column "First Name" in descending order

    +
  4. +
  5. +

    Check if column header class contains "headerSortUp"

    +
  6. +
  7. +

    Save column content to the List

    +
  8. +
  9. +

    Create List copy and sort it then reverse it

    +
  10. +
  11. +

    Compare reversed sorted values and values from the table

    +
  12. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class SortableDataTablesTest extends TheInternetBaseTest {
+
+    private static SortableDataTablesPage sortableDataTablesPage;
+
+    private List<String> actualValues;
+    private List<String> expectedValues;
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+    }
+
+    @Override
+    public void setUp() {
+        logStep("Click subpage link");
+        sortableDataTablesPage = theInternetPage.clickSortableDataTablesLink();
+
+        logStep("Verify if subpage is opened");
+        assertTrue("Unable to open Sortable Data Tables page", sortableDataTablesPage.isLoaded());
+    }
+
+    @Test
+    public void shouldLastNameColumnBeOrderedAscendingAfterSort() {
+        int columnNumber = 0;
+        int tableNumber = new Random().nextInt(2);
+
+        logStep("Sort 'Last Name' column");
+        sortableDataTablesPage.sortColumnAscending(columnNumber, tableNumber);
+        assertTrue("Unable to set ascending order for 'Last Name' column",
+                sortableDataTablesPage.readColumnClass(columnNumber, tableNumber)
+                        .contains("headerSortDown"));
+
+        logStep("Verify data order for 'Last Name' column");
+        actualValues = sortableDataTablesPage.getColumnValues(columnNumber, tableNumber);
+        expectedValues = new ArrayList<String>(actualValues);
+        Collections.sort(expectedValues);
+        assertEquals("'Last Name' column is not sorted in ascending order",
+                expectedValues, actualValues);
+    }
+
+    @Test
+    public void shouldFirstNameColumnBeOrderedDescendingAfterSort() {
+        int columnNumber = 1;
+        int tableNumber = new Random().nextInt(2);
+
+        logStep("Sort 'First Name' column");
+        sortableDataTablesPage.sortColumnDescending(columnNumber, tableNumber);
+        assertTrue("Unable to set descending order for 'First Name' column",
+                sortableDataTablesPage.readColumnClass(columnNumber, tableNumber)
+                        .contains("headerSortUp"));
+
+        logStep("Verify data order for 'First Name' column");
+        actualValues = sortableDataTablesPage.getColumnValues(columnNumber, tableNumber);
+        expectedValues = new ArrayList<String>(actualValues);
+        Collections.sort(expectedValues);
+        Collections.reverse(expectedValues);
+        assertEquals("'First Name' column is not sorted in descending order",
+                expectedValues, actualValues);
+    }
+}
+
+
+
+
+example56 +
+
+
+

This example shows how to process HTTP status codes returned by page

+
+
+

When you click status code link, you will be redirected to the subpage which returns the proper HTTP status code. In order to check what code was returned:

+
+
+
    +
  1. +

    Open developer tools

    +
  2. +
  3. +

    Go to Network tab

    +
  4. +
  5. +

    Click request name

    +
  6. +
  7. +

    Find a code number in Headers section

    +
  8. +
+
+
+
+example57 +
+
+
+
+== Page Class +
+

Add new methods to existing Status Codes Home Page Class

+
+
+
+
public class StatusCodesHomePage extends BasePage {
+
+    private static final By selectorLink200Code = By.linkText("200");
+    private static final By selectorLink301Code = By.linkText("301");
+    private static final By selectorLink404Code = By.linkText("404");
+    private static final By selectorLink500Code = By.linkText("500");
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    /**
+     * Verifies if given link is displayed.
+     *
+     * @param selector Selector of the given link
+     * @return true if link is displayed
+     */
+    public boolean isLinkCodeDisplayed(By selector) {
+        return getDriver().findElementDynamic(selector)
+                .isDisplayed();
+
+    }
+
+    /**
+     * Clicks '200' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode200Link() {
+        return clickCodeLink(selectorLink200Code);
+    }
+
+    /**
+     * Clicks '301' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode301Link() {
+        return clickCodeLink(selectorLink301Code);
+    }
+
+    /**
+     * Clicks '404' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode404Link() {
+        return clickCodeLink(selectorLink404Code);
+    }
+
+    /**
+     * Clicks '500' link.
+     *
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCode500Link() {
+        return clickCodeLink(selectorLink500Code);
+    }
+
+    /**
+     * Clicks code link according to given code number.
+     *
+     * @param code Given code
+     * @return StatusCodesCodePage object
+     */
+    public StatusCodesCodePage clickCodeLink(String code) {
+        return clickCodeLink(By.linkText(code));
+    }
+
+    private StatusCodesCodePage clickCodeLink(By selector) {
+        String codeNumber = getCodeNumberToCheck(selector);
+        getDriver().findElementDynamic(selector)
+                .click();
+        return new StatusCodesCodePage(codeNumber);
+    }
+
+    private String getCodeNumberToCheck(By selector) {
+        return getDriver().findElementDynamic(selector)
+                .getText();
+    }
+}
+
+
+
+

Create a page class for status codes subpages as well. In the class constructor specify which code number should be returned.

+
+
+
+
public class StatusCodesCodePage extends BasePage {
+
+    private static final By selectorDisplayedText   = By.cssSelector("#content > div > p");
+    private static final By selectorLinkToCodesPage = By.cssSelector("#content > div > p > a");
+
+    private String codeNumber;
+
+    public StatusCodesCodePage(String codeNumber) {
+        this.codeNumber = codeNumber;
+    }
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded();
+        return getDriver().getCurrentUrl()
+                .contains(PageSubURLsProjectYEnum.STATUS_CODES.getValue() + '/');
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'Status Codes' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue() + '/' + codeNumber);
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+
+    public String getCodeNumber() {
+        return codeNumber;
+    }
+
+    /**
+     * Verifies if page is loaded with given code number.
+     *
+     * @param codeNumber Expected code number
+     * @return true if expected code number is loaded with web page
+     */
+    public boolean isLoadedWithStatusCode(String codeNumber) {
+        return getDriver().getCurrentUrl()
+                .equals(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue() +
+                PageSubURLsProjectYEnum.STATUS_CODES.getValue() + "/" + codeNumber);
+    }
+
+    /**
+     * Returns displayed code number.
+     * <p>
+     * Code number is retrieved from following text displayed on the page:<b>
+     * 'This page returned a *** status code.', where *** represent the code number to be
+     * returned.
+     * </p>
+     *
+     * @return String object representing the displayed code number retrieved from specific sentence.
+     */
+    public String getDisplayedCodeNumber() {
+        return getDriver().findElementDynamic(selectorDisplayedText)
+                .getText()
+                .substring(21, 24);
+    }
+
+    /**
+     * Clicks link to return to 'Code Page'.
+     *
+     * @return StatusCodesHomePage object
+     */
+    public StatusCodesHomePage clickLinkToCodePage() {
+        getDriver().findElementDynamic(selectorLinkToCodesPage)
+                .click();
+        return new StatusCodesHomePage();
+    }
+}
+
+
+
+
+== Test Class +
+

Before all tests: Open The Internet Main Page, go to Status Codes page

+
+
+

Steps:

+
+
+

For each status code

+
+
+
    +
  1. +

    Click code link

    +
  2. +
  3. +

    Check if the page is loaded with an expected code number

    +
  4. +
  5. +

    Check if the displayed code number equals the expected number

    +
  6. +
  7. +

    Go back to Status Codes Home Page

    +
  8. +
+
+
+
+
@Category({ TestsSelenium.class, TestsChrome.class, TestsFirefox.class, TestsIE.class })
+public class StatusCodeTest extends TheInternetBaseTest {
+
+    private static StatusCodesHomePage statusCodesHomePage;
+    private        StatusCodesCodePage statusCodesCodePage;
+
+    private String[] codes = { "200", "301", "404", "500" };
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        statusCodesHomePage = shouldTheInternetPageBeOpened().clickStatusCodesLink();
+
+        logStep("Verify if Status Codes Home page is opened");
+        assertTrue("Unable to open Status Codes Home page", statusCodesHomePage.isLoaded());
+    }
+
+    @Test
+    public void shouldProperCodeBeDisplayedAfterClickCodeLink() {
+
+        for (String code : codes) {
+            logStep("Click link to " + code + " code");
+            statusCodesCodePage = statusCodesHomePage.clickCodeLink(code);
+
+            logStep("Verify if proper web page corresponding to the code is opened");
+            assertTrue("Unable to open proper web page",
+                    statusCodesCodePage.isLoadedWithStatusCode(code));
+
+            logStep("Verify if the displayed code is equal to the expected one");
+            assertEquals(code, statusCodesCodePage.getDisplayedCodeNumber());
+
+            logStep("Click link to come back to 'Status Codes' page");
+            statusCodesCodePage.clickLinkToCodePage();
+        }
+    }
+}
+
+
+
+
+
+
+

== First Steps

+
+
+Page Object +
+

Your Product Under Test will be the following website: http://the-internet.herokuapp.com/

+
+
+

At first, create an Object to represent The Internet Main Page:

+
+
+
+
public class TheInternetPage extends BasePage
+
+
+
+

Each class which extends BasePage class must override three methods:

+
+
+
    +
  • +

    public boolean isLoaded() - returns true if the page is loaded and false if not

    +
  • +
  • +

    public void load() - loads the page

    +
  • +
  • +

    public String pageTitle() - returns page title

    +
  • +
+
+
+
+
public class TheInternetPage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        BFLogger.logDebug("The internet page is loaded: " + getDriver().getCurrentUrl());
+        return getDriver().getCurrentUrl()
+                .equals(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue());
+    }
+
+    @Override
+    public void load() {
+        BFLogger.logDebug("Load 'The internet' page.");
+        getDriver().get(GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue());
+        getDriver().waitForPageLoaded();
+    }
+
+    @Override
+    public String pageTitle() {
+        return getActualPageTitle();
+    }
+}
+
+
+
+
+Environment Variables +
+

In Page classes, when you load/start web, it is uncommon to save fixed main URL.

+
+
+

Instead of hardcoded main URL variable, you build your Page class with a dynamic variable.

+
+
+
+How to create / update system environment +
+

Dynamic variable values are stored under path \src\resources\enviroments\environments.csv.

+
+
+
+image01 +
+
+
+

By default, the environment takes value from DEV column.

+
+
+
+== Access to the external file variables +
+

Create a class GetEnvironmentParam to map values from an external file with Page class:

+
+
+
+
public enum GetEnvironmentParam {
+
+    // Name if enum must be in line with cell name in /src/resources/environments/environment.csv
+    WWW_FONT_URL,
+    TOOLS_QA,
+    WEB_SERVICE,
+    THE_INTERNET_MAIN_PAGE,
+    ELEMENTAL_SELENIUM_PAGE;
+
+    public String getValue() {
+
+        if (null ==  BaseTest.getEnvironmentService()) {
+            throw new BFInputDataException("Environment Parameters class wasn't initialized properly");
+        }
+
+        return BaseTest.getEnvironmentService()
+                .getValue(this.name());
+
+    }
+
+    @Override
+    public String toString() {
+
+        return this.getValue();
+
+    }
+}
+
+
+
+

When you add a new row to environments.csv also add a new variable to GetEnvironmentParam class.

+
+
+

In Page class access environmental variable using this method:

+
+
+
+
GetEnvironmentParam.THE_INTERNET_MAIN_PAGE.getValue();
+
+
+
+
+Selectors + +
+
+Create selectors +
+

Create a selector for every interactable element on a webpage using By type. Find elements and it’s attributes using browser developer mode (F12).

+
+
+
+image02 +
+
+
+
+
private static final By abTestLinkSelector               = By.cssSelector("li >
+            a[href*='abtest']");
+    private static final By basicAuthLinkSelector            = By.cssSelector("li >
+            a[href*='basic_auth']");
+    private static final By brokenImageLinkSelector          = By.cssSelector("li >
+            a[href*='broken_images']");
+    private static final By challengingDomLinkSelector       = By.cssSelector("li >
+            a[href*='challenging_dom']");
+    private static final By checkboxesLinkSelector           = By.cssSelector("li >
+            a[href*='checkboxes']");
+    private static final By contextMenuLinkSelector          = By.cssSelector("li >
+            a[href*='context_menu']");
+    private static final By disappearingElementsLinkSelector = By.cssSelector("li >
+            a[href*='disappearing_elements']");
+    private static final By dragAndDropLinkSelector          = By.cssSelector("li >
+            a[href*='drag_and_drop']");
+    private static final By dropdownLinkSelector             = By.cssSelector("li >
+            a[href*='dropdown']");
+    private static final By dynamicContentLinkSelector       = By.cssSelector("li >
+            a[href*='dynamic_content']");
+    private static final By dynamicControlsLinkSelector      = By.cssSelector("li >
+            a[href*='dynamic_controls']");
+    private static final By dynamicLoadingLinkSelector       = By.cssSelector("li >
+            a[href*='dynamic_loading']");
+    private static final By exitIntentLinkSelector           = By.cssSelector("li >
+            a[href*='exit_intent']");
+    private static final By fileDownloadLinkSelector         = By.cssSelector("li >
+            a[href$='download']");
+    private static final By fileUploadLinkSelector           = By.cssSelector("li >
+           a[href*='upload']");
+    private static final By floatingMenuLinkSelector         = By.cssSelector("li >
+           a[href*='floating_menu']");
+    private static final By forgotPasswordLinkSelector       = By.cssSelector("li >
+           a[href*='forgot_password']");
+    private static final By formAuthenticationLinkSelector   = By.cssSelector("li >
+           a[href*='login']");
+    private static final By framesLinkSelector               = By.cssSelector("li >
+           a[href*='frames']");
+    private static final By geolocationLinkSelector          = By.cssSelector("li >
+           a[href*='geolocation']");
+    private static final By horizontalSliderLinkSelector     = By.cssSelector("li >
+           a[href*='horizontal_slider']");
+    private static final By hoversLinkSelector               = By.cssSelector("li >
+           a[href*='hovers']");
+    private static final By infiniteScrollLinkSelector       = By.cssSelector("li >
+           a[href*='infinite_scroll']");
+    private static final By javaScriptAlertLinkSelector      = By.cssSelector("li >
+           a[href*='javascript_alerts']");
+    private static final By javaScriptErrorLinkSelector      = By.cssSelector("li >
+           a[href*='javascript_error']");
+    private static final By jQueryUIMenuLinkSelector         = By.cssSelector("li >
+           a[href*='jqueryui/menu']");
+    private static final By keyPressesLinkSelector           = By.cssSelector("li >
+           a[href*='key_presses']");
+    private static final By largeAndDeepDOMLinkSelector      = By.cssSelector("li >
+           a[href*='large']");
+    private static final By multipleWindowsLinkSelector      = By.cssSelector("li >
+           a[href*='windows']");
+    private static final By nestedFramesLinkSelector         = By.cssSelector("li >
+           a[href*='nested_frames']");
+    private static final By notificationMessagesLinkSelector = By.cssSelector("li >
+           a[href*='notification_message']");
+    private static final By redirectLinkSelector             = By.cssSelector("li >
+           a[href*='redirector']");
+    private static final By secureFileDownloadLinkSelector   = By.cssSelector("li >
+           a[href*='download_secure']");
+    private static final By shiftingContentLinkSelector      = By.cssSelector("li >
+           a[href*='shifting_content']");
+    private static final By slowResourcesLinkSelector        = By.cssSelector("li >
+           a[href*='slow']");
+    private static final By sortableDataTablesLinkSelector   = By.cssSelector("li >
+           a[href*='tables']");
+    private static final By statusCodesLinkSelector          = By.cssSelector("li >
+           a[href*='status_codes']");
+    private static final By typosLinkSelector                = By.cssSelector("li >
+           a[href*='typos']");
+    private static final By wYSIWYGEditorLinkSelector        = By.cssSelector("li >
+           a[href*='tinymce']");
+
+
+
+
+Implement methods +
+

Then use these selectors to create Objects and perform actions on page elements:

+
+
+
+
public ABtestPage clickABtestingLink() {
+        new Button(abTestLinkSelector).click();
+        return new ABtestPage();
+    }
+
+    public BasicAuthPage clickBasicAuthLink() {
+        getDriver().waitForPageLoaded();
+        WebElement link = getDriver().findElementDynamic(basicAuthLinkSelector);
+        JavascriptExecutor executor = (JavascriptExecutor) getDriver();
+        executor.executeScript("var elem=arguments[0]; setTimeout(function() {elem.click();}, 100)",
+                link);
+        return new BasicAuthPage();
+    }
+
+    public BrokenImagePage clickBrokenImageLink() {
+        new Button(brokenImageLinkSelector).click();
+        return new BrokenImagePage();
+    }
+
+    public ChallengingDomPage clickChallengingDomLink() {
+        new Button(challengingDomLinkSelector).click();
+        return new ChallengingDomPage();
+    }
+
+    public CheckboxesPage clickCheckboxesLink() {
+        new Button(checkboxesLinkSelector).click();
+        return new CheckboxesPage();
+    }
+
+    public ContextMenuPage clickContextMenuLink() {
+        new Button(contextMenuLinkSelector).click();
+        return new ContextMenuPage();
+    }
+
+    public DisappearingElementsPage clickDisappearingElementsLink() {
+        new Button(disappearingElementsLinkSelector).click();
+        return new DisappearingElementsPage();
+    }
+
+    public DragAndDropPage clickDragAndDropLink() {
+        new Button(dragAndDropLinkSelector).click();
+        return new DragAndDropPage();
+    }
+
+    public DropdownPage clickDropdownLink() {
+        new Button(dropdownLinkSelector).click();
+        return new DropdownPage();
+    }
+
+    public DynamicContentPage clickDynamicContentLink() {
+        new Button(dynamicContentLinkSelector).click();
+        return new DynamicContentPage();
+    }
+
+    public DynamicControlsPage clickDynamicControlsLink() {
+        new Button(dynamicControlsLinkSelector).click();
+        return new DynamicControlsPage();
+    }
+
+    public DynamicLoadingPage clickDynamicLoadingLink() {
+        new Button(dynamicLoadingLinkSelector).click();
+        return new DynamicLoadingPage();
+    }
+
+    public ExitIntentPage clickExitIntentLink() {
+        new Button(exitIntentLinkSelector).click();
+        return new ExitIntentPage();
+    }
+
+    public FileDownloadPage clickFileDownloadLink() {
+        new Button(fileDownloadLinkSelector).click();
+        return new FileDownloadPage();
+    }
+
+    public FileUploadPage clickFileUploadLink() {
+        new Button(fileUploadLinkSelector).click();
+        return new FileUploadPage();
+    }
+
+    public FloatingMenuPage clickFloatingMenuLink() {
+        new Button(floatingMenuLinkSelector).click();
+        return new FloatingMenuPage();
+    }
+
+    public ForgotPasswordPage clickForgotPasswordLink() {
+        new Button(forgotPasswordLinkSelector).click();
+        return new ForgotPasswordPage();
+    }
+
+    public FormAuthenticationPage clickFormAuthenticationLink() {
+        new Button(formAuthenticationLinkSelector).click();
+        return new FormAuthenticationPage();
+    }
+
+    public FramesPage clickFramesLink() {
+        new Button(framesLinkSelector).click();
+        return new FramesPage();
+    }
+
+    public GeolocationPage clickGeolocationLink() {
+        new Button(geolocationLinkSelector).click();
+        return new GeolocationPage();
+    }
+
+    public HorizontalSliderPage clickHorizontalSliderLink() {
+        new Button(horizontalSliderLinkSelector).click();
+        return new HorizontalSliderPage();
+    }
+
+    public HoversPage clickHoversLink() {
+        new Button(hoversLinkSelector).click();
+        return new HoversPage();
+    }
+
+    public InfiniteScrollPage clickInfiniteScrollLink() {
+        new Button(infiniteScrollLinkSelector).click();
+        return new InfiniteScrollPage();
+    }
+
+    public JavaScriptAlertsPage clickJavaScriptAlertLink() {
+        new Button(javaScriptAlertLinkSelector).click();
+        return new JavaScriptAlertsPage();
+    }
+
+    public JavaScriptErrorPage clickJavaScriptErrorLink() {
+        new Button(javaScriptErrorLinkSelector).click();
+        return new JavaScriptErrorPage();
+    }
+
+    public JQueryUIMenuPage clickJQueryUIMenuLink() {
+        new Button(jQueryUIMenuLinkSelector).click();
+        return new JQueryUIMenuPage();
+    }
+
+    public KeyPressesPage clickKeyPressesLink() {
+        new Button(keyPressesLinkSelector).click();
+        return new KeyPressesPage();
+    }
+
+    public LargeAndDeepDOMPage clickLargeAndDeepDOMLink() {
+        new Button(largeAndDeepDOMLinkSelector).click();
+        return new LargeAndDeepDOMPage();
+    }
+
+    public MultipleWindowsPage clickmultipleWindowsLink() {
+        new Button(multipleWindowsLinkSelector).click();
+        return new MultipleWindowsPage();
+    }
+
+    public NestedFramesPage clickNestedFramesLink() {
+        new Button(nestedFramesLinkSelector).click();
+        return new NestedFramesPage();
+    }
+
+    public NotificationMessagesPage clickNotificationMessagesLink() {
+        new Button(notificationMessagesLinkSelector).click();
+        return new NotificationMessagesPage();
+    }
+
+    public RedirectLinkPage clickRedirectLink() {
+        new Button(redirectLinkSelector).click();
+        return new RedirectLinkPage();
+    }
+
+    public SecureFileDownloadPage clickSecureFileDownloadLink() {
+        new Button(secureFileDownloadLinkSelector).click();
+        return new SecureFileDownloadPage();
+    }
+
+    public ShiftingContentPage clickShiftingContentLink() {
+        new Button(shiftingContentLinkSelector).click();
+        return new ShiftingContentPage();
+    }
+
+    public SlowResourcesPage clickSlowResourcesLink() {
+        new Button(slowResourcesLinkSelector).click();
+        return new SlowResourcesPage();
+    }
+
+    public SortableDataTablesPage clickSortableDataTablesLink() {
+        new Button(sortableDataTablesLinkSelector).click();
+        return new SortableDataTablesPage();
+    }
+
+    public StatusCodesHomePage clickStatusCodesLink() {
+        new Button(statusCodesLinkSelector).click();
+        return new StatusCodesHomePage();
+    }
+
+    public TyposPage clickTyposLink() {
+        new Button(typosLinkSelector).click();
+        return new TyposPage();
+    }
+
+    public WYSIWYGEditorPage clickWYSIWYGEditorLink() {
+        new Button(wYSIWYGEditorLinkSelector).click();
+        return new WYSIWYGEditorPage();
+    }
+
+
+
+

These methods create a Button object for every link on The Internet Page and click it to redirect on a different subpage.

+
+
+
+Elements types +
+

MrChecker includes Object types for various elements existing on webpages such as Button, TextBox etc. There is also WebElement class and getDriver().findElementDynamic(By selector) method for creating webpage objects dynamically and performing basic actions:

+
+
+

Instead of using static types you can use:

+
+
+
+
    public TyposPage clickTyposLink() {
+        WebElement checkboxesLink = getDriver().findElementDynamic(checkboxesLinkSelector);
+        checkboxesLink.click();
+        return new TyposPage();
+    }
+
+
+
+

Or perform actions without creating a variable:

+
+
+
+
    public TyposPage clickTyposLink() {
+        getDriver().findElementDynamic(checkboxesLinkSelector).click();
+        return new TyposPage();
+    }
+
+
+
+
+The Internet Base Test + +
+
+Test Class +
+

Create Test class and override methods:

+
+
+
    +
  • +

    public void setUp() - executes before each test

    +
  • +
  • +

    public void tearDown() - executes after each test

    +
  • +
+
+
+
+
public class TheInternetBaseTest extends BaseTest {
+    @Override
+    public void setUp() {
+
+    }
+
+    @Override
+    public void tearDown() {
+        logStep("Navigate back to The-Internet page");
+        BasePage.navigateBack();
+    }
+}
+
+
+
+

logStep(String message) method doesn’t exist yet so you should create it:

+
+
+
+
    protected static int             step = 0;
+
+     /**
+     * Logs test step including step number calculated individually for each test.
+     *
+     * @param message Text message representing step description.
+     */
+    public static void logStep(String message) {
+        BFLogger.logInfo("Step " + ++step + ": " + message);
+    }
+
+
+
+

Write a method for loading The Internet Page and checking if it is properly opened:

+
+
+
+
    protected static TheInternetPage theInternetPage;
+
+    /**
+     * Performs operations required for verifying if The Internet Page is properly opened.
+     *
+     * @return TheInternetPage
+     */
+    public static TheInternetPage shouldTheInternetPageBeOpened() {
+
+        logStep("Open the Url http://the-internet.herokuapp.com/");
+        theInternetPage = new TheInternetPage();
+        theInternetPage.load();
+
+        logStep("Verify if Url http://the-internet.herokuapp.com/ is opened");
+        assertTrue("Unable to load The Internet Page", theInternetPage.isLoaded());
+
+        return theInternetPage;
+    }
+
+
+
+

This Test class can’t be launched because it doesn’t contain any @Test methods. It’s been created only for supporting other Test classes.

+
+
+
+BFLogger +
+

BFLogger is a default MrChecker logging tool. Use it to communicate important information from test execution. There are three basic logging methods:

+
+
+
    +
  • +

    logInfo(String message) - used for test steps

    +
  • +
  • +

    logDebug(String message) - used for non-official information, either during the test build process or in Page Object files

    +
  • +
  • +

    logError(String message) - used to emphasize critical information

    +
  • +
+
+
+

Logs will be visible in the console and in the log file under path: MrChecker_Test_Framework\workspace\project-folder\logs

+
+
+
+
+
+

E2E Tutorials

+
+ +
+
MrChecker E2E tutorials
+
+

In order to learn more about MrChecker structure, start from Project Organisation section and then check out our fantastic tutorials:

+
+
+
+
How to create a basic test in MrChecker
+ +
+
+
Example: Booking a table
+
+

As an example to test we will use MyThaiStar booking page.
+In order to book a table, do the following steps:

+
+
+
    +
  1. +

    Open MyThaiStar Book Table Page

    +
  2. +
  3. +

    Enter booking data: Date and time, Name, Email and number of Table guests

    +
  4. +
  5. +

    Click Accept terms

    +
  6. +
  7. +

    Click Book table

    +
  8. +
  9. +

    Display confirmation box and send booking

    +
  10. +
  11. +

    Check if the booking was successful.

    +
  12. +
+
+
+
+image1 +
+
+
+
+image2 +
+
+
+

You can go through these steps manually and doublecheck the result.

+
+
+
+
How to prepare a test
+ +
+
+
== 1. Create BookTablePage class
+
+

You will need a class which will represent MyThaiStart booking page.
+Fill the required methods with the following code:

+
+
+
+
public class BookTablePage extends BasePage {
+
+    @Override
+    public boolean isLoaded() {
+        getDriver().waitForPageLoaded(); //waits untli the page is loaded
+        return getDriver().getCurrentUrl()
+                .equals("https://mts-devonfw-core.cloud.okteto.net/bookTable"); //checks if current page address equals MyThaiStar booking page adress
+    }
+
+    @Override
+    public void load() {
+        getDriver().get("https://mts-devonfw-core.cloud.okteto.net/bookTable"); //loads page under specified adress
+        getDriver().waitForPageLoaded(); //waits until the page is loaded
+    }
+
+    @Override
+    public String pageTitle() {
+        return "My Thai Star"; //returns page title
+    }
+}
+
+
+
+

getDriver() method allows accessing Selenium Web Driver which performs actions on the webpage.

+
+
+

As this page class represents the MyThaiStar booking page, you have to set up selectors for web elements required in the test case. In the example you have to create selectors for elements we’ll interact with:

+
+
+
    +
  • +

    Date and time input field

    +
  • +
  • +

    Name input field

    +
  • +
  • +

    Email input field

    +
  • +
  • +

    Table guests input field

    +
  • +
  • +

    Accept terms checkbox

    +
  • +
  • +

    Book table button

    +
  • +
+
+
+

Selectors will be implemented as fields.

+
+
+

Example of the selector for Date and time input field:

+
+
+
+
/** Date field search criteria */
+private static final By dateSearch = By.cssSelector("input[formcontrolname='bookingDate']");
+
+
+
+

The input field’s name "bookingDate" was found by using the developer console in Google Chrome. How to prepare an everlasting selector?

+
+
+
+image3 +
+
+
+

This selector can be used to create a WebElement object of the said input field. Therefore, you will create a new method and call it "enterTimeAndDate".

+
+
+
+
public void enterTimeAndDate(String date) {
+    WebElement dateInput = getDriver().findElementDynamic(dateSearch); //creates a new WebElement to access Date and time input field
+    dateInput.sendKeys(date); //enters date value
+}
+
+
+
+

Now you can create other selectors and objects and methods for every element on the webpage:

+
+
+
+
/** Name input field search criteria */
+private static final By nameSearch = By.cssSelector("input[formcontrolname='name']");
+
+/** Email input field search criteria */
+private static final By emailSearch = By.cssSelector("input[formcontrolname='email']");
+
+/** Number of guests search criteria */
+private static final By guestsSearch = By.cssSelector("input[formcontrolname='assistants']");
+
+/** Check box search criteria */
+private static final By checkboxSearch = By.cssSelector("mat-checkbox[data-name='bookTableTerms']");
+
+/** Book table button search criteria */
+private static By bookTableSearch = By.name("bookTableSubmit");
+
+
+
+
+
public void enterName(String name) {
+    WebElement nameInput = getDriver().findElementDynamic(nameSearch); //creates a new WebElement to access name input field
+    nameInput.sendKeys(name); //enters name value
+}
+
+public void enterEmail(String email) {
+    WebElement emailInput = getDriver().findElementDynamic(emailSearch); //creates a new WebElement to access email input field
+    emailInput.sendKeys(email); //enters email value
+}
+
+public void enterGuests(int amountOfGuests) {
+    WebElement guestsInput = getDriver().findElementDynamic(guestsSearch); //creates a new WebElement to access amount of guests input field
+    guestsInput.sendKeys(Integer.toString(amountOfGuests)); //enters the number of guests value converted from integer to string
+}
+
+public void acceptTerms() {
+    WebElement checkbox = getDriver().findElementDynamic(checkboxSearch); //creates aa new WebElement to access accept terms checkbox
+    WebElement square = checkbox.findElement(By.className("mat-checkbox-inner-container")); //creates a new WebElement to access inner square
+    JavascriptExecutor js = (JavascriptExecutor) getDriver(); //creates a Javascript executor object
+    js.executeScript("arguments[0].click()", square); //executes a script which clicks the square
+
+}
+
+public void clickBookTable() {
+    WebElement buttonbutton = getDriver().findElementDynamic(bookTableSearch); //creates a new WebElement to access book table button
+    getDriver().waitUntilElementIsClickable(bookTableSearch); //waits until a button might be clicked
+    buttonbutton.click(); //clicks the button
+}
+
+
+
+

You can use those methods in order to create a new method to go through the whole booking process:

+
+
+
+
public ConfirmBookPage enterBookingData(String date, String name, String email, int guests) {
+    enterTimeAndDate(date);
+    enterName(name);
+    enterEmail(email);
+    enterGuests(guests);
+    acceptTerms();
+
+    clickBookTable();
+
+    return new ConfirmBookPage();
+}
+
+
+
+
+
== 2. Create ConfirmBookPage class
+
+

As you can see, this method returns another page object that has not yet been created. This step is required, as the booking information that you would like to check is on another webpage. This means that you will have to create another page class and call it ConfirmBookPage:

+
+
+
+
public class ConfirmBookPage extends BasePage {
+
+    /** Confirmation dialog search criteria */
+    private static final By confirmationDialogSearch = By.className("mat-dialog-container");
+
+    /** Send confirmation button search criteria */
+    private static final By sendButtonSearch = By.name("bookTableConfirm");
+
+    /** Cancel confirmation button search criteria */
+    private static final By cancelButtonSearch = By.name("bookTableCancel");
+
+    @Override
+    public boolean isLoaded() {
+        //creates a new WebElement to access confirmation dialog box
+        WebElement confirmationDialog = getDriver().findElementDynamic(confirmationDialogSearch);
+
+        return confirmationDialog.isDisplayed(); //checks if the box is displayed
+    }
+
+    //this method won't be called because the page is loaded only after clicking book table button
+    @Override
+    public void load() {
+        BFLogger.logError("MyThaiStar booking confirmation page was not loaded."); //logs error
+    }
+
+    @Override
+    public String pageTitle() {
+        return "My Thai Star";
+    }
+
+    public void confirmBookingData() {
+        WebElement sendButton = getDriver().findElementDynamic(sendButtonSearch); //creates a new WebElement to access confirmation button
+        sendButton.click(); //clicks the send button
+    }
+
+    public void cancelBookingData() {
+        WebElement cancelButton = getDriver().findElementDynamic(cancelButtonSearch); //creates a new WebElement to access resignation button
+        cancelButton.click(); //clicks the cancel button
+    }
+}
+
+
+
+
+image4 +
+
+
+

After the click on Send button - the green confirmation dialogue appears with the message "Table successfully booked":

+
+
+
+image5 +
+
+
+

To be able to check if the booking was successful, you should go back to the BookTablePage class and add one more method in order to check if the green box was displayed:

+
+
+
+
/** Dialog search criteria */
+private static final By dialogSearch = By.className("bgc-green-600");
+
+public boolean checkConfirmationDialog() {
+    WebElement greenConfirmationDialog = getDriver().findElementDynamic(dialogSearch); //creates a new WebElement to access confirmation dialog
+
+    return greenConfirmationDialog.isDisplayed(); //checks if the dialog is displayed
+}
+
+
+
+
+
== 3. Create BookTableTest class
+
+

At this point you can start creating a test class:

+
+
+
+
import static org.junit.Assert.assertTrue;
+
+public class BookTableTest extends BaseTest {
+    private static BookTablePage bookTablePage = new BookTablePage(); //the field contains book table page object
+
+    @BeforeClass
+    public static void setUpBeforeClass() {
+        bookTablePage.load(); //loads book table page
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() {
+
+    }
+
+    @Override
+    public void setUp() {
+        if (!bookTablePage.isLoaded()) {
+            bookTablePage.load(); //if the page is not loaded, loads it
+        }
+    }
+
+    @Override
+    public void tearDown() {
+
+    }
+}
+
+
+
+
+
== 4. Write the first test
+
+

You can prepare our first test method using the methods from page classes

+
+
+
+
@Test
+public void Test_BookTableAndCheckConfirmation() {
+    String date = "07/23/2019 1:00 PM"; //replace with tommorow's date in format "MM/dd/yyyy hh:mm a"
+    String name = "Smith"; //name field
+    String email = "smith@somemail.com"; //email field
+    int guests = 3; //number of guests
+
+    //enters booking data and returns a new confirmation page
+    ConfirmBookPage confirmBookPage = bookTablePage.enterBookingData(date, name, email, guests);
+    confirmBookPage.confirmBookingData(); //confirms booking
+
+    //checks if the green dialog box appears, if it does, test is passed, if not, the test failed and displays message given in the first argument
+    assertTrue("Test failed: Table not booked", bookTablePage.checkConfirmationDialog()); //returns true if dialog box appears and false if not
+}
+
+
+
+
+
== 5. Run the test
+
+

Run the test by right-clicking on the test method → Run as → JUnit test.

+
+
+
+image6 +
+
+
+
+
+
+

Migration from JUnit4 to JUnit5

+
+ +
+
+
+

Migration guide

+
+
+
Junit4 to Junit5 migration guide
+
+

mrchecker-core-module version 5.6.2.1 features the upgrade of Junit4 to Junit5. Consequently, the Junit4 features are now obsolete and current test projects require migration +in order to use the latest revision of MrChecker. This site provides guidance on the migration.

+
+ +
+
+
POM
+
+

The project pom.xml file needs to be adjusted in the first place. An exemplary POM file for download can be found here: https://github.com/devonfw/mrchecker/blob/develop/template/pom.xml

+
+
+
+
Test Annotations
+
+

Junit5 redefines annotations defining a test flow. The annotations need to be adjusted as per the following table.

+
+
+
+migration01 +
+
+
+
+
Rule, ClassRule, TestRule and TestMethod
+
+

Junit4 @Rule and @ClassRule annoations as well as TestRule and TestMethod interfaces have been replaced +with the Junit5 extension mechanism (https://junit.org/junit5/docs/current/user-guide/#extensions). +During the migration to Junit5, all the instances of the mentioned types need to be rewritten according to the Junit5 User Guide. +The extension mechanism is far more flexible than the Junit4 functionality based on rules.

+
+
+

Note: as per Junit5 API spec: ExpectedExceptionSupport, ExternalResourceSupport, VerifierSupport +provide native support of the correspoinding Junit4 rules.

+
+
+

Extension registration example:

+
+
+
+migration02 +
+
+
+
+migration arrow down +
+
+
+
+migration03 +
+
+
+

TestRule (TestWatcher and ExternalResource) to Extension (TestWatcher and AfterAllCallback) example:

+
+
+
+migration04 +
+
+
+
+migration arrow down +
+
+
+
+migration05 +
+
+
+
+
Page, BasePageAutoRegistration and PageFactory classes
+
+

Page class is a new MrChecker class. It was introduced to provide common implemenation for its subpages in specific MrChecker modules. +In order to receive test lifecycle notifications, particular Pages need to be registered by calling addToTestExecutionObserver() method. +To facilitate this process, PageFactory class was designed and it’s usage is a recommended way of creating Page objects for tests. +Although in MrChecker based on Junit4, the registration process was done in a specific BasePage constructor, it’s been considered error prone and reimplemented. +Furthermore, to reduce migration cost BasePageAutoRegistration classes are available in MrChceker modules. They use the old way of registration. +Given that three ways of migration are possible.

+
+
+

Migration with PageFactory class example (RECOMMENDED):

+
+
+
+migration06 +
+
+
+
+migration arrow down +
+
+
+
+migration07 +
+
+
+

Migration with calling addToTestExecutionObserver() method example:

+
+
+
+migration06 +
+
+
+
+migration arrow down +
+
+
+
+migration08 +
+
+
+

Migration with BasePageAutoRegistration class example:

+
+
+
+migration09 +
+
+
+
+migration arrow down +
+
+
+
+migration10 +
+
+
+
+
Test suites
+
+

Test suite migration example:

+
+
+
+migration11 +
+
+
+
+migration arrow down +
+
+
+
+migration12 +
+
+
+

Running tests from Maven:

+
+
+
+migration13 +
+
+
+
+migration arrow down +
+
+
+
+migration14 +
+
+
+
+
Concurrency
+
+

Junit5 provides native thread count and parallel execution control in contrast to Junit4 where it was controlled by Maven Surefire plugin. +To enable concurrent test execution, junit-platform.properties file needs to placed in the test/resources directory of a project.

+
+
+

Exemplary file contents:

+
+
+
+migration15 +
+
+
+

A ready-to-use file can be found here.

+
+
+

MrChecker supports only concurrent test class execution. +@ResourceLock can be used to synchronize between classes if needed:

+
+
+
+migration16 +
+
+
+
+
Cucumber
+
+

If Cucumber is used in a project, it is neccessary to change a hook class. +An exemplary hook source file for download can be found here.

+
+
+
+
Data driven tests
+
+

Junit5 implements new approach to data driven tests by various data resolution mechanisms.

+
+
+

An example of method source parameters migration version one:

+
+
+
+migration17 +
+
+
+
+migration arrow down +
+
+
+
+migration18 +
+
+
+

An example of method source parameters migration version two:

+
+
+
+migration17 +
+
+
+
+migration arrow down +
+
+
+
+migration19 +
+
+
+

An example of method source in another class parameters migration:

+
+
+
+migration20 +
+
+
+
+migration arrow down +
+
+
+
+migration21 +
+
+
+

Providing parameters directly in annotations has no analogy in Junit5 and needs to be replaced with e.g. method source:

+
+
+
+migration22 +
+
+
+
+migration arrow down +
+
+
+
+migration23 +
+
+
+

An example of csv parameters source with no header line migration:

+
+
+
+migration24 +
+
+
+
+migration arrow down +
+
+
+
+migration25 +
+
+
+

An example of csv parameters source with the header line migration:

+
+
+
+migration26 +
+
+
+
+migration arrow down +
+
+
+
+migration27 +
+
+
+

An example of csv parameters source with object mapping migration step1:

+
+
+
+migration28 +
+
+
+
+migration arrow down +
+
+
+
+migration29 +
+
+
+

An example of csv parameters source with object mapping migration step 2:

+
+
+
+migration30 +
+
+
+
+migration arrow down +
+
+
+
+migration31 +
+
+
+
+
setUp() and tearDown()
+
+

BaseTest.setUp() and BaseTest.tearDown() methods are now not abstract and need no implementation in subclasses. @Override when a custom implemenatation is needed.

+
+
+
+
+
+

FAQ

+
+
+

Here you can find the most frequently asked questions regarding working with MrChecker and installation problems.

+
+
+
+
+

Common problems

+
+
+
I can’t find the boilerplate module. Has it been removed?
+
+

The boilerplate module has been removed from the GitHub project on purpose.

+
+
+

There were problems with naming and communication, not everybody was aware of the meaning of the word boilerplate.

+
+
+

The name of the folder has been changed to template. It can be found in the GitHub project.

+
+
+
+
Is it possible to use Docker with MrChecker?
+
+

MrChecker works seamlessly with Docker.

+
+ +
+

Note that the structure of the folders can be changed. If that happens - search in repo for /pipeline/CI/Jenkinsfile_node.groovy

+
+
+
+
Tests are not stable
+
+

Selenium tests perform actions much faster than a normal user would. Because pages can contain dynamically changing content, some web elements can still not be loaded when Selenium driver tries to access them.

+
+
+

getDriver().waitForPageLoaded() method checks ready state in the browser, that’s why stability problems may happen in advanced frontend projects.

+
+
+

To improve test stability you can:

+
+
+
    +
  • +

    add waiting methods before dynamically loading elements e.g. getDriver().waitForElement(By selector)

    +
  • +
  • +

    add timeout parameter in method getDriver().findElementDynamic(By selector, int timeOut)

    +
  • +
  • +

    change global waiting timeout value using method getDriver().manage().timeouts().implicitlyWait(long time, TimeUnit unit)

    +
  • +
+
+
+

Furthermore, if the page displays visible loading bars or spinners, create FluentWait method to wait until they disappear.

+
+
+

Notice that by increasing timeouts you may improve stability but too long waiting time makes tests run slower.

+
+ +
+
+
+
+

How to

+
+
+
How to: Change timeouts?
+
+

If you would like to change timeouts - you don’t have to change them globally. +It is possible to add waiting time parameter to searching methods, such as:

+
+
+

getDriver().findElementDynamic(By selector, int timeOut)
+timeout - in seconds

+
+
+

It is recommended to use methods that significantly level up the repetitiveness of the code:

+
+
+
+
getDriver().waitForElement(By selector);
+
+getDriver().waitForElementVisible(By selector);
+
+getDriver().waitForPageLoaded();
+
+getDriver().waitUntilElementIsClickable(By selector);
+
+
+
+

Or Fluent Wait methods with changed timeout and interval:

+
+
+
+
FluentWait<WebDriver> wait = new FluentWait<WebDriver>(getDriver())
+        .withTimeout(long duration, TimeUnit unit)
+        .pollingEvery(long duration, TimeUnit unit);
+wait.until((WebDriver wd) -> expectedCondition.isTrue());
+getWebDriverWait().withTimeout(millis, TimeUnit.MILLISECONDS)
+        .withTimeout(long duration, TimeUnit unit)
+        .pollingEvery(long duration, TimeUnit unit)
+        .until((WebDriver wd) -> expectedCondition.isTrue());
+
+
+
+

These methods allow You to change WebDriver timeouts values such as:

+
+
+

getDriver().manage().timeouts().pageLoadTimeout(long time, TimeUnit unit)
+the amount of time to wait for a page to load before throwing an exception. This is the default timeout for method getDriver().waitForPageLoaded()

+
+
+

getDriver().manage().timeouts().setScriptTimeout(long time, TimeUnit unit)
+the amount of time to wait for execution of script to finish before throwing an exception

+
+
+

getDriver().manage().timeouts().implicitlyWait(long time, TimeUnit unit) +the amount of time the driver should wait when searching for an element if it is not immediately present. After that time, it throws an exception. This the default timeout for methods such as getDriver().findElementDynamic(By selector) or getDriver().waitForElement(By selector)

+
+
+

Changing timeouts can improve test stability but can also make test run time longer.

+
+
+
+
How to: Start a browser in Incognito/Private mode?
+
+

In MrChecker there is a fpossibility of changing browser options during runtime execution.

+
+
+

To run the browser in incognito mode:

+
+
+
    +
  1. +

    In Eclipse - open Run Configurations window:

    +
    +

    ht image1

    +
    +
  2. +
  3. +

    Select a test which you want to run and switch to arguments tab:

    +
    +

    ht image2

    +
    +
  4. +
  5. +

    Add VM argument:

    +
    +
      +
    • +

      for the incognito mode in chrome:

      +
      +

      ht image3

      +
      +
    • +
    +
    +
  6. +
+
+ +
+
+
+
+

Installation problems

+
+
+
Chromedriver version is not compatible with Chrome browser
+
+

Problem:

+
+
+

During the tests your web browser window opens and immediately closes, all your tests are broken.

+
+
+

Following error message is visible in the test description:

+
+
+
+
session not created: This version of ChromeDriver only supports Chrome version 76
+Build info: version: '<build_version>', revision: '<build_revision>', time: '<time>'
+System info: host: '<your_computer_name>', ip: '<your_ip_address>', os.name: '<your_os_name>', os.arch: '<your_os_architecture>', os.version: '<your_os_version>', java.version: '<java_version_installed>'
+Driver info: driver.version: NewChromeDriver
+
+
+
+

Solution:

+
+
+
    +
  1. +

    Make a change in the following files:

    +
    +
      +
    • +

      MrChecker_Test_Framework\workspace\devonfw-testing\src\resources\settings.properties

      +
    • +
    • +

      For project template-app-under-test: MrChecker_Test_Framework\workspace\devonfw-testing\template\src\resources\settings.properties

      +
    • +
    • +

      For project example-app-under-test: MrChecker_Test_Framework\workspace\devonfw-testing\example\src\resources\settings.properties

      +
      +

      Change the value of selenium.driverAutoUpdate field form true to false

      +
      +
    • +
    +
    +
  2. +
  3. +

    Replace the following file with a version compatible with your browser: +MrChecker_Test_Framework\workspace\devonfw-testing\example\lib\webdrivers\chrome\chromedriver.exe .

    +
  4. +
+
+
+
+
My browser opens up in German by default
+
+

Problem:

+
+
+

I would like my browser to use the English language, but the default language for the browser is German. How can I change the settings?

+
+
+

Solution:

+
+
+

There is a Properties file installed together with MrCheker installation. It is possible to set the language in which a browser could be opened for testing purposes in Properties > Selenium configuration,.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/My-Thai-Star-data-model.html b/docs/my-thai-star/1.0/My-Thai-Star-data-model.html new file mode 100644 index 00000000..67c3dacd --- /dev/null +++ b/docs/my-thai-star/1.0/My-Thai-Star-data-model.html @@ -0,0 +1,282 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Data Model

+
+
+
+mts datamodel +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/User-Stories.html b/docs/my-thai-star/1.0/User-Stories.html new file mode 100644 index 00000000..c3c7e4ec --- /dev/null +++ b/docs/my-thai-star/1.0/User-Stories.html @@ -0,0 +1,906 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

User Stories

+
+
+

The list of user stories, exported from JIRA, can be downloaded from here.

+
+
+
+
+

Epic: Invite friends

+
+ +
+
+
+

US: create invite for friends

+
+
+

Epic: Invite friends

+
+
+

As a guest I want to create an dinner event by entering date and time and adding potential guests by their emails so that each potential guest will receives an email in order to confirm or decline my invite.

+
+
+
+
+

== Acceptance criteria

+
+
+
    +
  1. +

    only date and time in future possible and both required

    +
  2. +
  3. +

    only valid email addresses: text@text.xx, one entered email-address is required

    +
  4. +
  5. +

    if AGB are not checked, an error message is shown

    +
  6. +
  7. +

    after the invite is done

    +
    +
      +
    1. +

      I see the confirmation screen of my invite (see wireframe)

      +
    2. +
    3. +

      I receive a confirmation email about my invite containing date, time and invited guests

      +
    4. +
    5. +

      all guests receive a mail with my invite

      +
    6. +
    +
    +
  8. +
+
+
+
+
+

US: create reservation

+
+
+

Epic: Invite friends

+
+
+

As a guest I want to create a reservation by entering date and time and number of adults and kids

+
+
+
+
+

== Acceptance criteria

+
+
+
    +
  1. +

    only date and time in future possible and both required

    +
  2. +
  3. +

    only valid email addresses: text@text.xx, one entered email-address is required

    +
  4. +
  5. +

    if AGB are not checked, an error message is shown

    +
  6. +
  7. +

    after the reservation is done

    +
    +
      +
    1. +

      I see a confirmation screen of my reservation with date-time, number of persons and kids

      +
    2. +
    3. +

      I receive a confirmation email about my reservation

      +
    4. +
    +
    +
  8. +
+
+
+
+
+

== Wireframes

+
+
+

see real time board

+
+
+
+
+

US: handle invite

+
+
+

As an invited guest I would like to receive an email - after somebody as invited me - with the option to accept or decline the invite so that the system knows about my participation

+
+
+
+
+

== AC:

+
+
+
    +
  1. +

    the mail contains the following information about the invite

    +
    +
      +
    1. +

      who has invited

      +
    2. +
    3. +

      who else is invited

      +
    4. +
    5. +

      date and time of the invite

      +
    6. +
    7. +

      button to accept or decline

      +
    8. +
    9. +

      after pressing the buttons the system will store the status (yes/no) of my invite

      +
    10. +
    +
    +
  2. +
+
+
+
+
+

US: revoke accepted invite

+
+
+

As an invited guest I would like to revoke my previous answer in order to inform the system and the inviter about my no showup

+
+
+
+
+

== AC:

+
+
+
    +
  1. +

    the inviter and myself receives an email about my cancellation

    +
  2. +
  3. +

    the system sets my status of my invite to no

    +
  4. +
  5. +

    in case I have placed an order, the order is also removed from the system.

    +
  6. +
  7. +

    the cancellation is only possible 10 minutes before the event takes place. The system shows a message that cancellation is not possible anymore.

    +
  8. +
+
+
+
+
+

US: calculate best table

+
+
+

As a guest I would like the system to check (1 hour before my invite) all my invites and to reserve a table fitting the number of accepted users

+
+
+
+
+

== Details

+
+
+

Pseudo-algorithm for reservation: +Find table for given date and time where seats of guests >= Count of invited guests plus one. In case no results, decline request and show error message to user. In case of any result, make a reservation for table…​. +For each decline of a guest remove guest and search with reduced number for new table. In case table is found, reserve it and remove reservation from previous table. In case not, do not change reservations.

+
+
+
+
+

US: find table by reservation info

+
+
+

As a waiter I would like to search by reference number or email address for the reserved table in order to know the table for my visit. (when arriving at the restaurant)

+
+
+
+
+

== AC:

+
+
+
    +
  1. +

    After entering the email the systems shows the number of the table. In case no reservation found, a message is shown.

    +
  2. +
  3. +

    Entered email address could be email of inviter or any invited guest.

    +
  4. +
+
+
+
+
+

US: cancel invite

+
+
+

Epic: Invite friends

+
+
+

As a guests who has sent an invite I want to be able to cancel my previous invite in order to inform the restaurant and my invited guests that I will not show up

+
+
+
+
+

== AC:

+
+
+
    +
  1. +

    the option to cancel the invite is available in the confirmation-mail about my invite

    +
  2. +
  3. +

    after my cancellation all invited guests receives a mail about the cancellation

    +
  4. +
  5. +

    I see a confirmation that my invite was canceled successfully

    +
  6. +
  7. +

    after my cancellation my invite and reservation and all orders related to it are deleted from the system and no one can accept or decline any invite for it

    +
  8. +
  9. +

    the cancellation is only possible one hour before the invite takes place. After that I am not allowed to cancel it any more.

    +
  10. +
+
+
+
+
+

Epic: Digital Menu

+
+ +
+
+
+

US: filter menu

+
+
+

As a guest I want to filter the menu so that I only see the dishes I am interested in

+
+
+
+
+

== AC:

+
+
+
    +
  1. +

    the guest can filter by

    +
    +
      +
    1. +

      type: starter | main dish | dessert; XOR; if nothing is selected all are shown (default value)

      +
    2. +
    3. +

      veggy (yes|no|does not matter (default))

      +
    4. +
    5. +

      vegan (yes|no|does not matter (default))

      +
    6. +
    7. +

      rice (yes|no|does not matter (default))

      +
    8. +
    9. +

      curry (yes|no|does not matter (default))

      +
    10. +
    11. +

      noodle (yes|no|does not matter (default))

      +
    12. +
    13. +

      price (range)

      +
    14. +
    15. +

      ratings (range)

      +
    16. +
    17. +

      my favorite (yes|no|does not matter (default)) — free text (search in title and description)

      +
    18. +
    +
    +
  2. +
  3. +

    the guest can sort by price asc, rating asc

    +
  4. +
  5. +

    after setting the filter only dishes are shown which fulfills those criteria

    +
  6. +
  7. +

    by pressing the button reset filter all filter are reset to the initial value

    +
  8. +
  9. +

    by pressing the filter button the filter is applied [or is it triggered after each change?]

    +
  10. +
+
+
+
+
+

US: Define order

+
+
+

As a guest I want to define my order by selecting dishes from the menu

+
+
+
+
+

== AC:

+
+
+
    +
  • +

    The guest can add each dish to the order

    +
  • +
  • +

    In case the guest adds the same dish multiple times, a counter in the order for this dish is increased for this dish

    +
  • +
  • +

    The guest can remove the dish from the order

    +
  • +
  • +

    The guest can add for each main dish the type of meat (pork, chicken, tofu)

    +
  • +
  • +

    The guest can add for each dish a free-text-comment

    +
  • +
  • +

    After adding/removing any dish the price is calculated including VAT

    +
  • +
+
+
+
+
+

US: Order the order

+
+
+

As a guest I want to order my selected dishes (order)

+
+
+

AC:

+
+
+
    +
  1. +

    I receive a mail containing my order with all dishes and the final price

    +
  2. +
  3. +

    precondition for ordering:

    +
    +
      +
    1. +

      Each order must be associated with a reservation / invite. Without any reference no order could be placed. The reference could be obtained from a previous reservation/invite (created during same session) or by the previous accepted invite (link in email) or by entering the reference id when asked by the system.

      +
      +
        +
      1. +

        In case precondition is not fulfilled, the guest is asked

        +
        +
          +
        1. +

          whether he/she would like to create a reservation/invite and is forwarded to US Invite Friends. Only after finalizing the reservation the order is accepted.

          +
        2. +
        3. +

          or he/she would enter previous created reservation-id he/she knows in order to associate his/her order with this reservation

          +
        4. +
        +
        +
      2. +
      +
      +
    2. +
    +
    +
  4. +
+
+
+
+
+

US: Cancel order

+
+
+

As a guest I want to cancel my order.

+
+
+

AC:

+
+
+
    +
  1. +

    in my received confirmation mail I have the option to cancel my order

    +
  2. +
  3. +

    the cancellation is only possible one hour before my reservation takes place

    +
  4. +
  5. +

    my order is deleted from the system

    +
  6. +
+
+
+

Remark: Changing the order is not possible. For that the order must be canceled and created from scratch again

+
+
+
+
+

US: Read twitter rating for dishes

+
+
+

As a guest I want to read for all dishes the rating done be twitter because I would like to know the opinion of others

+
+
+

AC:

+
+
+
    +
  1. +

    For each dish I see the latest 3 comments done by twitter for this vote (text, username, avatar)

    +
  2. +
  3. +

    For each dish I see the number of likes done by twitter

    +
  4. +
+
+
+
+
+

Epic: User Profile

+
+ +
+
+
+

US: User Profile

+
+
+

As a guest I want to have a user profile to associate it with my twitter account to be able to like/rate dishes

+
+
+

AC:

+
+
+
    +
  1. +

    Username of my profile is my email address

    +
  2. +
  3. +

    My profile is protected by password

    +
  4. +
  5. +

    I can log in and log out to my profile

    +
  6. +
  7. +

    I can reset my password by triggering the reset by mail

    +
  8. +
  9. +

    I can associate my profile with my twitter account in order to rate dishes and store my favorites by liking posts associated to dishes

    +
  10. +
+
+
+
+
+

Epic: Rate by twitter

+
+ +
+
+
+

US: Receive mail to rate your dish

+
+
+

As a guest I want to receive a mail by the system in order to rate my dish

+
+
+
+
+

US: Rate your dish

+
+
+

As a guest I want to add a comment or a like via my twitter account for a dish

+
+
+

AC:

+
+
+
    +
  1. +

    Before I write my rate I would like to be able to read all tweets of other users for this dish

    +
  2. +
  3. +

    I would like to see the number of likes for a dish

    +
  4. +
+
+
+
+
+

Epic: Waiter Cockpit

+
+ +
+
+
+

US: See all orders/reservations

+
+
+

As a waiter I want to see all orders/reservation in order to know what is going on in my restaurant

+
+
+

AC:

+
+
+
    +
  1. +

    all orders/reservations are shown in a list view (read-only). Those list can be filtered and sorted (similar to excel-data-filters)

    +
  2. +
  3. +

    orders/reservations are shown in separate lists.

    +
  4. +
  5. +

    for each order the dish, meat, comment, item, reservation-id, reservation date-time, creation-date-time is shown

    +
  6. +
  7. +

    for each reservation the inviters email, the guests-emails, the number of accepts and declines, calculated table number, the reservation-id, reservation date-time and creation-date-time are shown

    +
  8. +
  9. +

    the default filter for all lists is the today’s date for reservation date-time. this filter can be deleted.

    +
  10. +
  11. +

    only reservations and orders with reservation date in the future shall be available in this view. All other orders and reservation shall not be deleted; for data Analytics those orders and reservation shall still exist in the system.

    +
  12. +
+
+
+

checklist:

+
+
+

talk about:

+
+
+
    +
  • +

    who?

    +
  • +
  • +

    what?

    +
  • +
  • +

    why (purpose)

    +
  • +
  • +

    why (objective)

    +
  • +
  • +

    what happens outside the software

    +
  • +
  • +

    what might go wrong

    +
  • +
  • +

    any question or assumptions (write them down) , DoR should check that those sections are empty.

    +
  • +
  • +

    is there any better solution?

    +
  • +
  • +

    how (technical perspective)

    +
  • +
  • +

    do a rough estimate

    +
  • +
  • +

    check INVEST

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/_images/images/angular/back-end.png b/docs/my-thai-star/1.0/_images/images/angular/back-end.png new file mode 100644 index 00000000..097b0a0d Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/angular/back-end.png differ diff --git a/docs/my-thai-star/1.0/_images/images/angular/folder_organization.png b/docs/my-thai-star/1.0/_images/images/angular/folder_organization.png new file mode 100644 index 00000000..2e921692 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/angular/folder_organization.png differ diff --git a/docs/my-thai-star/1.0/_images/images/angular/routes.png b/docs/my-thai-star/1.0/_images/images/angular/routes.png new file mode 100644 index 00000000..c218eb7a Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/angular/routes.png differ diff --git a/docs/my-thai-star/1.0/_images/images/angular/testing.JPG b/docs/my-thai-star/1.0/_images/images/angular/testing.JPG new file mode 100644 index 00000000..2c9153e6 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/angular/testing.JPG differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/angular_directory.png b/docs/my-thai-star/1.0/_images/images/ci/angular/angular_directory.png new file mode 100644 index 00000000..af8ae5f1 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/angular_directory.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/angular_pipeline_flow.PNG b/docs/my-thai-star/1.0/_images/images/ci/angular/angular_pipeline_flow.PNG new file mode 100644 index 00000000..19240869 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/angular_pipeline_flow.PNG differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/chrome_installation.png b/docs/my-thai-star/1.0/_images/images/ci/angular/chrome_installation.png new file mode 100644 index 00000000..7e4c1c19 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/chrome_installation.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/container1.png b/docs/my-thai-star/1.0/_images/images/ci/angular/container1.png new file mode 100644 index 00000000..e2d56614 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/container1.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/container2.png b/docs/my-thai-star/1.0/_images/images/ci/angular/container2.png new file mode 100644 index 00000000..007662df Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/container2.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/container3.png b/docs/my-thai-star/1.0/_images/images/ci/angular/container3.png new file mode 100644 index 00000000..743c6753 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/container3.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/maven_tool.png b/docs/my-thai-star/1.0/_images/images/ci/angular/maven_tool.png new file mode 100644 index 00000000..0e74c771 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/maven_tool.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/nexus3_global_config.png b/docs/my-thai-star/1.0/_images/images/ci/angular/nexus3_global_config.png new file mode 100644 index 00000000..ea403bb3 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/nexus3_global_config.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/nexus3_groupid.png b/docs/my-thai-star/1.0/_images/images/ci/angular/nexus3_groupid.png new file mode 100644 index 00000000..6407d402 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/nexus3_groupid.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/nexus3_id.png b/docs/my-thai-star/1.0/_images/images/ci/angular/nexus3_id.png new file mode 100644 index 00000000..907334c2 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/nexus3_id.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/pipeline-config.png b/docs/my-thai-star/1.0/_images/images/ci/angular/pipeline-config.png new file mode 100644 index 00000000..41c2e972 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/pipeline-config.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/sonar-env.png b/docs/my-thai-star/1.0/_images/images/ci/angular/sonar-env.png new file mode 100644 index 00000000..cbac6079 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/sonar-env.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/sonar-scanner.png b/docs/my-thai-star/1.0/_images/images/ci/angular/sonar-scanner.png new file mode 100644 index 00000000..7c44f9a2 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/sonar-scanner.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/angular/src_directory.png b/docs/my-thai-star/1.0/_images/images/ci/angular/src_directory.png new file mode 100644 index 00000000..5a099a7d Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/angular/src_directory.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/clientserver/clientserver_pipeline_flow.PNG b/docs/my-thai-star/1.0/_images/images/ci/clientserver/clientserver_pipeline_flow.PNG new file mode 100644 index 00000000..b0a2bcd2 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/clientserver/clientserver_pipeline_flow.PNG differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/deployment/deployment_arch.png b/docs/my-thai-star/1.0/_images/images/ci/deployment/deployment_arch.png new file mode 100644 index 00000000..f8f050d1 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/deployment/deployment_arch.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/deployment/docker-compose.PNG b/docs/my-thai-star/1.0/_images/images/ci/deployment/docker-compose.PNG new file mode 100644 index 00000000..9cdf6bc1 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/deployment/docker-compose.PNG differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/deployment/docker.png b/docs/my-thai-star/1.0/_images/images/ci/deployment/docker.png new file mode 100644 index 00000000..68bd7d47 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/deployment/docker.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/deployment/dockerfile-angular.PNG b/docs/my-thai-star/1.0/_images/images/ci/deployment/dockerfile-angular.PNG new file mode 100644 index 00000000..d23463d5 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/deployment/dockerfile-angular.PNG differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/deployment/dockerfile-java.PNG b/docs/my-thai-star/1.0/_images/images/ci/deployment/dockerfile-java.PNG new file mode 100644 index 00000000..13a3cc9e Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/deployment/dockerfile-java.PNG differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/future/deployment_schema.PNG b/docs/my-thai-star/1.0/_images/images/ci/future/deployment_schema.PNG new file mode 100644 index 00000000..c42d555c Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/future/deployment_schema.PNG differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/java/java_directory.png b/docs/my-thai-star/1.0/_images/images/ci/java/java_directory.png new file mode 100644 index 00000000..01a9f3c8 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/java/java_directory.png differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/java/java_pipeline_flow.PNG b/docs/my-thai-star/1.0/_images/images/ci/java/java_pipeline_flow.PNG new file mode 100644 index 00000000..63b2c861 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/java/java_pipeline_flow.PNG differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/mts_folder.PNG b/docs/my-thai-star/1.0/_images/images/ci/mts_folder.PNG new file mode 100644 index 00000000..c09fbf79 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/mts_folder.PNG differ diff --git a/docs/my-thai-star/1.0/_images/images/ci/pl_logo.png b/docs/my-thai-star/1.0/_images/images/ci/pl_logo.png new file mode 100644 index 00000000..923f5c24 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/ci/pl_logo.png differ diff --git a/docs/my-thai-star/1.0/_images/images/java/component_structure.png b/docs/my-thai-star/1.0/_images/images/java/component_structure.png new file mode 100644 index 00000000..04af748f Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/java/component_structure.png differ diff --git a/docs/my-thai-star/1.0/_images/images/java/dependency_injection.png b/docs/my-thai-star/1.0/_images/images/java/dependency_injection.png new file mode 100644 index 00000000..e743a8ad Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/java/dependency_injection.png differ diff --git a/docs/my-thai-star/1.0/_images/images/java/jwt_schema.png b/docs/my-thai-star/1.0/_images/images/java/jwt_schema.png new file mode 100644 index 00000000..5a8320d0 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/java/jwt_schema.png differ diff --git a/docs/my-thai-star/1.0/_images/images/java/layer_api_impl.png b/docs/my-thai-star/1.0/_images/images/java/layer_api_impl.png new file mode 100644 index 00000000..495c4749 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/java/layer_api_impl.png differ diff --git a/docs/my-thai-star/1.0/_images/images/java/layers_impl.png b/docs/my-thai-star/1.0/_images/images/java/layers_impl.png new file mode 100644 index 00000000..3d203a31 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/java/layers_impl.png differ diff --git a/docs/my-thai-star/1.0/_images/images/java/logic_layer.png b/docs/my-thai-star/1.0/_images/images/java/logic_layer.png new file mode 100644 index 00000000..39f53135 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/java/logic_layer.png differ diff --git a/docs/my-thai-star/1.0/_images/images/java/mtsj_components.png b/docs/my-thai-star/1.0/_images/images/java/mtsj_components.png new file mode 100644 index 00000000..9e6d7f67 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/java/mtsj_components.png differ diff --git a/docs/my-thai-star/1.0/_images/images/java/project_modules.png b/docs/my-thai-star/1.0/_images/images/java/project_modules.png new file mode 100644 index 00000000..4f66f1f7 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/java/project_modules.png differ diff --git a/docs/my-thai-star/1.0/_images/images/java/test_results_eclipse.PNG b/docs/my-thai-star/1.0/_images/images/java/test_results_eclipse.PNG new file mode 100644 index 00000000..06685d6c Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/java/test_results_eclipse.PNG differ diff --git a/docs/my-thai-star/1.0/_images/images/jenkins/mts-pipelines-cicd.png b/docs/my-thai-star/1.0/_images/images/jenkins/mts-pipelines-cicd.png new file mode 100644 index 00000000..86f0effd Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/jenkins/mts-pipelines-cicd.png differ diff --git a/docs/my-thai-star/1.0/_images/images/jenkins/mts-pipelines.png b/docs/my-thai-star/1.0/_images/images/jenkins/mts-pipelines.png new file mode 100644 index 00000000..35003986 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/jenkins/mts-pipelines.png differ diff --git a/docs/my-thai-star/1.0/_images/images/jenkins/pipeline_output.PNG b/docs/my-thai-star/1.0/_images/images/jenkins/pipeline_output.PNG new file mode 100644 index 00000000..37dda415 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/jenkins/pipeline_output.PNG differ diff --git a/docs/my-thai-star/1.0/_images/images/methodology_1.png b/docs/my-thai-star/1.0/_images/images/methodology_1.png new file mode 100644 index 00000000..5c2b889e Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/methodology_1.png differ diff --git a/docs/my-thai-star/1.0/_images/images/methodology_2.png b/docs/my-thai-star/1.0/_images/images/methodology_2.png new file mode 100644 index 00000000..dec1ece4 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/methodology_2.png differ diff --git a/docs/my-thai-star/1.0/_images/images/methodology_3.png b/docs/my-thai-star/1.0/_images/images/methodology_3.png new file mode 100644 index 00000000..fad41fee Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/methodology_3.png differ diff --git a/docs/my-thai-star/1.0/_images/images/mts_datamodel.png b/docs/my-thai-star/1.0/_images/images/mts_datamodel.png new file mode 100644 index 00000000..3ab83ec7 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/mts_datamodel.png differ diff --git a/docs/my-thai-star/1.0/_images/images/mts_styleguide.png b/docs/my-thai-star/1.0/_images/images/mts_styleguide.png new file mode 100644 index 00000000..1987880c Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/mts_styleguide.png differ diff --git a/docs/my-thai-star/1.0/_images/images/nodejs/dynamodb-data-model-1.4.1.png b/docs/my-thai-star/1.0/_images/images/nodejs/dynamodb-data-model-1.4.1.png new file mode 100644 index 00000000..36ebf82a Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/nodejs/dynamodb-data-model-1.4.1.png differ diff --git a/docs/my-thai-star/1.0/_images/images/nodejs/folder_organization.png b/docs/my-thai-star/1.0/_images/images/nodejs/folder_organization.png new file mode 100644 index 00000000..a690d8e1 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/nodejs/folder_organization.png differ diff --git a/docs/my-thai-star/1.0/_images/images/saphana/Prediction_usecase.JPG b/docs/my-thai-star/1.0/_images/images/saphana/Prediction_usecase.JPG new file mode 100644 index 00000000..c755a7cf Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/saphana/Prediction_usecase.JPG differ diff --git a/docs/my-thai-star/1.0/_images/images/saphana/db_screenshot_1.JPG b/docs/my-thai-star/1.0/_images/images/saphana/db_screenshot_1.JPG new file mode 100644 index 00000000..b6c394c6 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/saphana/db_screenshot_1.JPG differ diff --git a/docs/my-thai-star/1.0/_images/images/saphana/mts_login.JPG b/docs/my-thai-star/1.0/_images/images/saphana/mts_login.JPG new file mode 100644 index 00000000..f2523036 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/saphana/mts_login.JPG differ diff --git a/docs/my-thai-star/1.0/_images/images/saphana/sap_hana_data_generator_output.JPG b/docs/my-thai-star/1.0/_images/images/saphana/sap_hana_data_generator_output.JPG new file mode 100644 index 00000000..0acd0cb6 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/saphana/sap_hana_data_generator_output.JPG differ diff --git a/docs/my-thai-star/1.0/_images/images/security/2FA_qr_code_menu.png b/docs/my-thai-star/1.0/_images/images/security/2FA_qr_code_menu.png new file mode 100644 index 00000000..565c80cc Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/security/2FA_qr_code_menu.png differ diff --git a/docs/my-thai-star/1.0/_images/images/security/2FA_secret_menu.png b/docs/my-thai-star/1.0/_images/images/security/2FA_secret_menu.png new file mode 100644 index 00000000..27509b4e Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/security/2FA_secret_menu.png differ diff --git a/docs/my-thai-star/1.0/_images/images/security/2FA_sidemenu.png b/docs/my-thai-star/1.0/_images/images/security/2FA_sidemenu.png new file mode 100644 index 00000000..c8303f91 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/security/2FA_sidemenu.png differ diff --git a/docs/my-thai-star/1.0/_images/images/security/filters_png.png b/docs/my-thai-star/1.0/_images/images/security/filters_png.png new file mode 100644 index 00000000..a0c7db85 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/security/filters_png.png differ diff --git a/docs/my-thai-star/1.0/_images/images/security/otp_prompt.png b/docs/my-thai-star/1.0/_images/images/security/otp_prompt.png new file mode 100644 index 00000000..96663e87 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/security/otp_prompt.png differ diff --git a/docs/my-thai-star/1.0/_images/images/security/security_cross_component.png b/docs/my-thai-star/1.0/_images/images/security/security_cross_component.png new file mode 100644 index 00000000..d14a7c18 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/security/security_cross_component.png differ diff --git a/docs/my-thai-star/1.0/_images/images/security/security_cross_component_twofactor.png b/docs/my-thai-star/1.0/_images/images/security/security_cross_component_twofactor.png new file mode 100644 index 00000000..99abcd32 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/security/security_cross_component_twofactor.png differ diff --git a/docs/my-thai-star/1.0/_images/images/serverless/folder_organization.png b/docs/my-thai-star/1.0/_images/images/serverless/folder_organization.png new file mode 100644 index 00000000..0dd63315 Binary files /dev/null and b/docs/my-thai-star/1.0/_images/images/serverless/folder_organization.png differ diff --git a/docs/my-thai-star/1.0/agile.html b/docs/my-thai-star/1.0/agile.html new file mode 100644 index 00000000..d761fa18 --- /dev/null +++ b/docs/my-thai-star/1.0/agile.html @@ -0,0 +1,414 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

1. My Thai Star – Agile Framework

+
+ +
+
+
+

1.1 Team Setup

+
+
+

The team working on the development of the My Thai Star app and the documentation beside the technical development works distributed in various locations across Germany, the Netherlands, Spain and Poland. For the communication part the team uses the two channels Skype and Mail and for the documentation part the team makes usage mainly of GitHub and JIRA.

+
+
+
+
+

1.2 Scrum events

+
+ +
+
+
+

Sprint Planning

+
+
+

Within the My Thai Star project we decided on having one hour Sprint Planning meetings for a four-week Sprints. This decision is based on the fact that this project is not the main project of the team members. As the backlog refinement is done during the Sprint Planning we make usage of the planningpoker.com tool for the estimation of the tasks.

+
+
+
+Screenshot of planningpoker.com +
+
Figure 1. Screenshot of planningpoker.com during Sprint 1 Planning
+
+
+

During the Sprint Planning meeting the team receives support from Devon colleagues outside the development. This feedback helps the team to focus on important functionalities and task by keeping the eyes on the overall aim which is to have a working application by the end of June 2017.

+
+
+
+
+

Sprint Review

+
+
+

The Sprint Review meetings are time boxed to one hour for the four week Sprint. Within the Sprint Review meeting the team plans to do a retrospective of the finished Sprint. As well as it is done during the Sprint Planning the team receives support from Devon colleagues.

+
+
+
+
+

Sprint Retrospective

+
+
+

For this project the team aligned on not having a specific Sprint Retrospective meeting. The team is going to have a retrospective of a finished Sprint during the Sprint Review.

+
+
+
+
+

Daily Stand-ups

+
+
+

The team aligned on having two weekly Stand-up meetings instead of a Daily Stand-up meeting. In comparison with the time boxed length of 15mins described in the CAF for this project the team extended the Stand-up meeting to 30mins. The content of the meetings remains the same.

+
+
+
+
+

Backlog refinement

+
+
+

The team decided that the backlog refinement meeting is part of the Sprint Planning meeting.

+
+
+
+
+

1.3 Establish Product Backlog

+
+
+

For the My Thai Stair project the team decided on using the JIRA agile documentation which is one of the widely used agile tools. JIRA is equipped with several of useful tools regarding the agile software development (e.g. Scrum-Board). One of the big advantages of JIRA are the extensive configuration and possibilities to personalize.

+
+
+

With having a list of the Epics and User Stories for the My Thai Star development in GitHub, the team transferred the User Stories into the JIRA backlog as it is shown in the screenshot below. All User Stories are labeled colorfully with the related Epic which shapes the backlog in clearly manner.

+
+
+
+Screenshot of planningpoker.com +
+
Figure 2. Screenshot of the JIRA backlog during Sprint 2
+
+
+

We decided on working with Sub-task as a single user story comprised a number of single and separated tasks. Another benefit of working with sub-task is that every single sub-task can be assigned to a single team member whereas a user story can only be assigned to one team member. By picking single sub-task the whole process of a user story is better organized.

+
+
+
+Screenshot of Sub-tasks +
+
Figure 3. Screenshots of Sub-tasks during Sprint 2
+
+
+
+
+

2. My Thai Star – Agile Diary

+
+
+

In parallel to the Diary Ideation we use this Agile Diary to document our Scrum events. The target of this diary is to describe the differences to the Scrum methodology as well as specific characteristics of the project. We also document the process on how we approach the Scrum methodology over the length of the project.

+
+
+
+
+

24.03.2017 Sprint 1 Planning

+
+
+

Within the Sprint 1 Planning we used planning poker.com for the estimation of the user stories. The estimation process usually is part of the backlog refinement meeting. Regarding the project circumstances we decided to estimate the user stories during the Sprint Planning. Starting the estimation process we noticed that we had to align our interpretation of the estimation effort as these story points are not equivalent to a certain time interval. The story points are relative values to compare the effort of the user stories. With this in mind we proceeded with the estimation of the user stories. We decided to start Sprint 1 with the following user stories and the total amount of 37 story points: +• ICSDSHOW-2 Create invite for friends (8 Story Points) +• ICSDSHOW-4 Create reservation (3) +• ICSDSHOW-5 Handle invite (3) +• ICSDSHOW-6 Revoke accepted invite (5) +• ICSDSHOW-9 Cancel invite (3) +• ICSDSHOW-11 Filter menu (5) +• ICSDSHOW-12 Define order (5) +• ICSDSHOW-13 Order the order (5) +As the Sprint Planning is time boxed to one hour we managed to hold this meeting within this time window.

+
+
+
+
+

27.04.2017 Sprint 1 Review

+
+
+

During the Sprint 1 Review we had a discussion about the data model proposal. For the discussion we extended this particular Review meeting to 90min. As this discussion took almost 2/3 of the Review meeting we only had a short time left for our review of Sprint 1. For the following scrum events we decided to focus on the primary target of these events and have discussions needed for alignments in separate meetings. +Regarding the topic of splitting user stories we had the example of a certain user story which included a functionality of a twitter integration (ICSDSHOW-17 User Profile and Twitter integration). As the twitter functionality could not have been implemented at this early point of time we thought about cutting the user story into two user stories. We aligned on mocking the twitter functionality until the dependencies are developed in order to test the components. As this user story is estimated with 13 story points it is a good example for the question whether to cut a user story into multiple user stories or not. +Unfortunately not all user stories of Sprint 1 could have been completed. Due this situation we discussed on whether pushing all unfinished user stories into the status done or moving them to Sprint 2. We aligned on transferring the unfinished user stories into the next Sprint. During the Sprint 1 the team underestimated that a lot of holidays crossed the Sprint 1 goals. As taking holidays and absences of team members into consideration is part of a Sprint Planning we have a learning effect on setting a Sprint Scope.

+
+
+
+
+

03.05.2017 Sprint 2 Planning

+
+
+

As we aligned during the Sprint 1 Review on transferring unfinished user stories into Sprint 2 the focus for Sprint 2 was on finishing these transferred user stories. During our discussion on how many user stories we could work on in Sprint 2 we needed to remind ourselves that the overall target is to develop an example application for the devonfw. Considering this we aligned on a clear target for Sprint 2: To focus on finishing User Stories as we need to aim for a practicable and realizable solution. Everybody aligned on the aim of having a working application at the end of Sprint 2. +For the estimation process of user stories we make again usage of planningpoker.com as the team prefers this “easy-to-use” tool. During our second estimation process we had the situation in which the estimated story points differs strongly from one team member to another. In this case the team members shortly explains how the understood and interpreted the user story. It turned out that team members misinterpreted the user stories. With having this discussion all team members got the same understanding of the specific functionality and scope of a user story. After the alignment the team members adjusted their estimations. +Beside this need for discussion the team estimated most of the user stories with very similar story points. This fact shows the increase within the effort estimation for each team member in comparison to Sprint 1 planning. Over the short time of two Sprint planning the team received a better understanding and feeling for the estimation with story points.

+
+
+
+
+

01.06.2017 Sprint 2 Review

+
+
+

As our Sprint 1 Review four weeks ago was not completely structured like a Sprint Review meeting we focused on the actual intention of a Sprint Review meeting during Sprint 2 Review. This means we demonstrated the completed and implemented functionalities with screen sharing and the product owner accepted the completed tasks. +Within the User Story ICSDSHOW-22 “See all orders/reservations” the functionality “filtering the list by date” could have not been implemented during Sprint 2. The team was unsure on how to proceed with this task. One team member added that especially in regards of having a coherent release, implementing less but working functionalities is much better than implementing more but not working functionalities. For this the team reminded itself focusing on completing functionalities and not working straight to a working application.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/angular-ci.html b/docs/my-thai-star/1.0/angular-ci.html new file mode 100644 index 00000000..c0c4929f --- /dev/null +++ b/docs/my-thai-star/1.0/angular-ci.html @@ -0,0 +1,630 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular CI

+
+
+

The Angular client-side of My Thai Star is going to have some specific needs for the CI-CD Pipeline to perform mandatory operations.

+
+
+
+
+

Pipeline

+
+
+

The Pipeline for the Angular client-side is going to be called MyThaiStar_FRONT-END_BUILD. It is located in the PL instance, under the MTS folder (as previously explained). It is going to follow a process flow like this one:

+
+
+
+angular pipeline flow +
+
+
+

Each of those steps are called stages in the Jenkins context.Let’s see what those steps mean in the context of the Angular application:

+
+
+
    +
  1. +

    Declarative: Checkout SCM

    +
    +

    Retrieves the project from the GitHub repository which it’s located. This step is not defined directly in our pipeline, but as it is loaded from the repository this step should always be done at the beginning.

    +
    +
    +
    +pipeline config +
    +
    +
  2. +
  3. +

    Declarative: Tool Install

    +
    +

    The Pipeline needs some Tools to perform some operations with the Angular project. These tool is a correct version of NodeJS (10.17.0 LTS) with Yarn installed as global package.

    +
    +
    +
    +
    tools {
    +    nodejs "NodeJS 10.14.0"
    +}
    +
    +
    +
  4. +
  5. +

    Loading Custom Tools

    +
    +

    The Pipeline also needs a browser in order to execute the tests, so in this step the chrome-stable will be loaded. We will use it in a headless mode.

    +
    +
    +
    +
    tool chrome
    +
    +
    +
  6. +
  7. +

    Fresh Dependency Installation

    +
    +

    The script $ yarn does a package installation. As we always clean the workspace after the pipeline, all packages must be installed in every execution.

    +
    +
  8. +
  9. +

    Code Linting

    +
    +

    This script executes a linting process of TypeScript. Rules can be defined in the tslint.json file of the project. It throws an exception whenever a file contains a non-compliant piece of code.

    +
    +
  10. +
  11. +

    Execute Angular tests

    +
    +

    The CI testing of the Angular client is different than the standard local testing (adapted to CI environments, as specified in the Adaptation section of document). This script just executes the following commands:

    +
    +
    +
    +
    ng test --browsers ChromeHeadless --watch=false
    +
    +
    +
  12. +
  13. +

    Check dependencies

    +
    +

    Before continue, we print the result of yarn audit. It shows the vulnerabilities in the dependencies. It do not process the response. The purpose is only to track the result of the command.

    +
    +
    +
    +
    yarn audit
    +
    +
    +
  14. +
  15. +

    SonarQube code analysis

    +
    +

    The script load and execute the tool sonar-scanner. This tool is loaded here because it’s not used in any other part of the pipeline. The sonar-scanner will take all code, upload it to SonarQube and wait until SonarQube send us a response with the quality of our code. If the code do not pass the quality gate, the pipeline will stop at this point.

    +
    +
  16. +
  17. +

    Build Application

    +
    +

    The building process of the Angular client would result in a folder called /dist in the main Angular’s directory. That folder is the one that is going to be served afterwards as an artifact. This process has also been adapted to some Deployment needs. This building script executes the following:

    +
    +
    +
    +
    ng build --configuration=docker
    +
    +
    +
  18. +
  19. +

    Deliver application into Nexus

    +
    +

    Once the scripts produce the Angular artifact (/dist folder), it’s time to package it and store into nexus.

    +
    +
  20. +
  21. +

    Declarative: Post Actions

    +
    +

    At the end, this step is always executed, even if a previous stage fail. We use this step to clean up the workspace for future executions

    +
    +
    +
    +
    post {
    +    always {
    +        cleanWs()
    +    }
    +}
    +
    +
    +
  22. +
+
+
+
+
+

Adjustments

+
+
+

The Angular project Pipeline needed some "extra" features to complete all planned processes. Those features resulted in some additions to the project.

+
+
+
+
+

Pipeline Environment

+
+
+

In order to easily reuse the pipeline in other angular projects, all variables have been defined in the block environment. All variables have the default values that Production Line uses, so if you’re going to work in production line you won’t have to change anything. Example:

+
+
+
+
environment {
+    // Script for build the application. Defined at package.json
+    buildScript = 'build --configuration=docker'
+    // Script for lint the application. Defined at package.json
+    lintScript = 'lint'
+    // Script for test the application. Defined at package.json
+    testScript = 'test:ci'
+    // Angular directory
+    angularDir = 'angular'
+    // SRC folder. It will be angularDir/srcDir
+    srcDir = 'src'
+    // Name of the custom tool for chrome stable
+    chrome = 'Chrome-stable'
+
+    // SonarQube
+    // Name of the SonarQube tool
+    sonarTool = 'SonarQube'
+    // Name of the SonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus
+    // Artifact groupId
+    groupId = 'com.devonfw.mythaistar'
+    // Nexus repository ID
+    repositoryId= 'pl-nexus'
+    // Nexus internal URL
+    repositoryUrl = 'http://nexus3-core:8081/nexus3/repository/maven-snapshots'
+    // Maven global settings configuration ID
+    globalSettingsId = 'MavenSettings'
+    // Maven tool id
+    mavenInstallation = 'Maven3'
+}
+
+
+
+
+
+

== Description

+
+
+
    +
  • +

    build Script: script for build the application. It must be defined at package.json.

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "build": "ng build",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${buildScript}"""
    +
    +
    +
  • +
  • +

    lint Script: Script for lint the application. Defined at package.json

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "lint": "ng lint",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${lintScript}"""
    +
    +
    +
  • +
  • +

    test Script: Script for test the application. Defined at package.json

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "test:ci": "npm run postinstall:web && ng test --browsers ChromeHeadless --watch=false",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${testScript}"""
    +
    +
    +
  • +
  • +

    angular-Dir: Relative route to angular application. In My Thai Star this is the angular folder. The actual directory (.) is also allowed.

    +
    +
    +angular directory +
    +
    +
  • +
  • +

    srcDir: Directory where you store the source code. For angular applications the default value is src

    +
    +
    +src directory +
    +
    +
  • +
  • +

    chrome: Since you need a browser to run your tests, we must provide one. This variable contains the name of the custom tool for google chrome.

    +
    +
    +chrome installation +
    +
    +
  • +
  • +

    sonar-Tool: Name of the SonarQube scanner installation.

    +
    +
    +sonar scanner +
    +
    +
  • +
  • +

    sonar-Env: Name of the SonarQube environment. SonarQube is the default value for PL.

    +
    +
    +sonar env +
    +
    +
  • +
  • +

    group-Id: Group id of the application. It will be used to storage the application in nexus3

    +
    +
    +nexus3 groupid +
    +
    +
  • +
  • +

    repository-Id: Id of the nexus3 repository. It must be defined at maven global config file.

    +
    +
    +nexus3 id +
    +
    +
  • +
  • +

    repository URL: The URL of the repository.

    +
  • +
  • +

    global Settings Id: The id of the global settings file.

    +
    +
    +nexus3 global config +
    +
    +
  • +
  • +

    maven Installation: The name of the maven tool.

    +
    +
    +maven tool +
    +
    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/angular-design.html b/docs/my-thai-star/1.0/angular-design.html new file mode 100644 index 00000000..d888cbb8 --- /dev/null +++ b/docs/my-thai-star/1.0/angular-design.html @@ -0,0 +1,804 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular design

+
+ +
+
+
+

Introduction

+
+
+

MyThaiStar client side has been built using latest frameworks, component libraries and designs:

+
+
+

Angular 4 as main front-end Framework. https://angular.io/

+
+
+

Angular/CLI 1.0.5 as Angular tool helper. https://github.com/angular/angular-cli

+
+
+

Covalent Teradata 1.0.0-beta4 as Angular native component library based on Material Design. https://teradata.github.io/covalent/#/

+
+
+

Angular/Material2 1.0.0-beta5 used by Covalent Teradata. https://github.com/angular/material2

+
+
+

Note: this dependencies are evolving at this moment and if it is possible, we are updating it on the project.

+
+
+
+
+

Basic project structure

+
+
+

The project is using the basic project seed that Angular/CLI provides with “ng new <project name>”. Then the app folder has been organized as Angular recommends and goes as follows:

+
+
+
    +
  • +

    app

    +
    +
      +
    • +

      components

      +
      +
        +
      • +

        sub-components

        +
      • +
      • +

        shared

        +
      • +
      • +

        component files

        +
      • +
      +
      +
    • +
    • +

      main app component

      +
    • +
    +
    +
  • +
  • +

    assets folder

    +
  • +
  • +

    environments folder

    +
  • +
  • +

    rest of angular files

    +
  • +
+
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
+
+

Main Views and components

+
+
+

List of components that serve as a main view to navigate or components developed to make atomically a group of functionalities which given their nature, can be highly reusable through the app.

+
+
+
+routes +
+
+
+

Note: no-name-route corresponds to whatever URL the user introduced and does not exist, it redirects to Home-Component.

+
+
+
+
+

Public area

+
+ +
+
+
+

== App Component

+
+
+

Contains the components that are on top of all views, including:

+
+
+
+
+

== Order sidenav

+
+
+

Sidenav where selected orders are displayed with their total price and some comments.

+
+
+
+
+

== Navigation sidenav (only for mobile)

+
+
+

This sidenav proposal is to let user navigate through the app when the screen is too small to show the navigation buttons on the header.

+
+
+
+
+

== Header

+
+
+

It contains the title, and some other basic functions regarding open and close sidenavs.

+
+
+
+
+ +
+
+

At the end of the page that shows only when open on desktop.

+
+
+
+
+

== Home-Component

+
+
+

Main view that shows up when the app initializes.

+
+
+
+
+

== Menu-Component

+
+
+

View where the users can view, filter and select the dishes (with their extras) they want to order it contains a component to each menu entry:

+
+
+
+
+

== Menu-card

+
+
+

This component composes all the data of a dish in a card. Component made to display indeterminate number of dishes easily.

+
+
+
+
+

== Book Table Component

+
+
+

View to make book a table in a given data with a given number of assistants or create a reservation with a number of invitations via email.

+
+
+
+
+

== Book-table-dialog

+
+
+

Dialog which opens as a result of fulfilling the booking form, it displays all the data of the booking attempt, if everything is correct, the user can send the information or cancel if something is wrong.

+
+
+
+
+

== Invitation-dialog

+
+
+

Dialog which opens as a result of fulfilling the invitation form, it displays all the data of the booking with friends attempt, if everything is correct, the user can send the information or cancel if something is wrong.

+
+
+
+
+

== User Area

+
+
+

Group of dialogs with the proposal of giving some functionalities to the user, as login, register, change password or connect with Twitter.

+
+
+
+
+

== Login-dialog

+
+
+

Dialog with a tab to navigate between login and register.

+
+
+
+
+

== Password-dialog

+
+
+

Functionality reserved to already logged users, in this dialog the user can change freely their password.

+
+
+
+
+

== Twitter-dialog

+
+
+

Dialog designed specifically to connect your user account with Twitter.

+
+
+
+
+

Waiter cockpit area

+
+
+

Restricted area to workers of the restaurant, here we can see all information about booked tables with the selected orders and the reservations with all the guests and their acceptance or decline of the event.

+
+
+
+
+

== Order Cockpit Component

+
+
+

Data table with all the booked tables and a filter to search them, to show more info about that table you can click on it and open a dialog.

+
+
+
+
+

== Order-dialog

+
+
+

Complete display of data regarding the selected table and its orders.

+
+
+
+
+

== Reservation Cockpit Component

+
+
+

Data table with all the reservations and a filter to search them, to show more info about that table you can click on it and open a dialog.

+
+
+
+
+

== Reservation-dialog

+
+
+

Complete display of data regarding the selected table and its guests.

+
+
+
+
+

Email Management

+
+
+

As the application send emails to both guests and hosts, we choose an approach based on URL where the email contain a button with an URL to a service in the app and a token, front-end read that token and depending on the URL, will redirect to one service or another. For example:

+
+
+
+
`http://localhost:4200/booking/cancel/CB_20170605_8fb5bc4c84a1c5049da1f6beb1968afc`
+
+
+
+

This URL will tell the app that is a cancellation of a booking with the token CB_20170605_8fb5bc4c84a1c5049da1f6beb1968afc. The app will process this information, send it to back-end with the correct headers, show the confirmation of the event and redirect to home page.

+
+
+

The main cases at the moment are:

+
+
+
+
+

== Accept Invite

+
+
+

A guest accept an invitation sent by a host. It will receive another email to decline if it change its mind later on.

+
+
+
+
+

== Reject Invite

+
+
+

A guest decline the invitation.

+
+
+
+
+

== Cancel Reservation

+
+
+

A host cancel the reservation, everybody that has accepted or not already answered will receive an email notifying this event is canceled. Also all the orders related to this reservations will be removed.

+
+
+
+
+

== Cancel Orders

+
+
+

When you have a reservation, you will be assigned to a token, with that token you can save your order in the restaurant. When sent, you will receive an email confirming the order and the possibility to remove it.

+
+
+
+
+

Services and directives

+
+
+

Services are where all the main logic between components of that view should be. This includes calling a remote server, composing objects, calculate prices, etc.

+
+
+

Directives are a single functionality that are related to a component.

+
+
+

As it can be seen in the basic structure, every view that has a minimum of logic or need to call a server has its own service located in the shared folder.

+
+
+

Also, services and directives can be created to compose a reusable piece of code that will be reused in some parts of the code:

+
+
+
+
+

Price-calculator-service

+
+
+

This service located in the shared folder of sidenav contains the basic logic to calculate the price of a single order (with all the possibilities) and to calculate the price of a full list of orders for a table. As this is used in the sidenav and in the waiter cockpit, it has been exported as a service to be imported where needed and easily testable.

+
+
+
+
+

Authentication

+
+
+

Authentication services serves as a validator of roles and login and, at the same time, stores the basic data regarding security and authentication.

+
+
+

Main task of this services is to provide visibility at app level of the current user information:

+
+
+
    +
  • +

    Check if the user is logged or not.

    +
  • +
  • +

    Check the permissions of the current user.

    +
  • +
  • +

    Store the username and the JWT token.

    +
  • +
+
+
+
+
+

Snack Service

+
+
+

Service created to serve as a factory of Angular Material Snackbars, which are used commonly through the app. This service accepts some parameters to customize the snackBar and opens it with this parameters.

+
+
+
+
+

Window Service

+
+
+

For responsiveness reasons, the dialogs have to accept a width parameter to adjust to screen width and this information is given by Window object, as it is a good practice to have it in an isolated service, which also calculates the width percentage to apply on the dialogs.

+
+
+
+
+

Equal-validator-directive

+
+
+

This directive located in the shared folder of userArea is used in 2 fields to make sure they have the same value. This directive is used in confirm password fields in register and change password.

+
+
+
+
+

Mock Back-end

+
+
+

To develop meanwhile a real back-end is being developed let us to make a more realistic application and to make easier the adaptation when the back-end is able to be connected and called. Its structure is as following:

+
+
+
+back end +
+
+
+

Contains the three main groups of functionalities in the application. Every group is composed by:

+
+
+
    +
  • +

    An interface with all the methods to implement.

    +
  • +
  • +

    A service that implements that interface, the main task of this service is to choose between real back-end and mock back-end depending on an environment variable.

    +
  • +
  • +

    Mock back-end service which implements all the methods declared in the interface using mock data stored in a local file and mainly uses Lodash to operate the arrays.

    +
  • +
  • +

    Real back-end service works as Mock back-end but in this case the methods call for server rest services through Http.

    +
  • +
+
+
+
+
+

Booking

+
+
+

The booking group of functionalities manages the calls to reserve a table with a given time and assistants or with guests, get reservations filtered, accept or decline invitations or cancel the reservation.

+
+
+
+
+

Orders

+
+
+

Management of the orders, including saving, filtering and cancel an order.

+
+
+
+
+

Dishes

+
+
+

The dishes group of functionalities manages the calls to get and filter dishes.

+
+
+
+
+

Login

+
+
+

Login manages the userArea logic: login, register and change password.

+
+
+
+
+

Security

+
+
+

My Thai Star security is composed by two main security services:

+
+
+
+
+

Auth-guard

+
+
+

Front-end security approach, this service implements an interface called CanActivate that comes from angular/router module. CanActivate interface forces you to implement a canActivate() function which returns a Boolean. +This service checks with the Auth-Service stored data if the user is logged and if he has enough permission to access the waiter cockpit. This prevents that a forbidden user could access to waiter cockpit just by editing the URL in the browser.

+
+
+
+
+

JWT

+
+
+

JSON Web Token consists of a token that is generated by the server when the user logs in. Once provided, the token has to be included in an Authentication header on every Http call to the rest service, otherwise the call will be forbidden. +JWT also has an expiration date and a role checking, so if a user has not enough permissions or keeps logged for a long certain amount of time that exceeds this expiration date, the next time he calls for a service call, the server will return an error and forbid the call. You can log again to restore the token.

+
+
+
+
+

== HttpClient

+
+
+

To implement this Authorization header management, an HttpClient service has been implemented. +This services works as an envelope of Http, providing some more functionalities, likes a header management and an automatically management of a server token error in case the JWT has expired, corrupted or not permitted.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/angular-testing.html b/docs/my-thai-star/1.0/angular-testing.html new file mode 100644 index 00000000..7e2702f8 --- /dev/null +++ b/docs/my-thai-star/1.0/angular-testing.html @@ -0,0 +1,453 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Angular testing

+
+
+
+testing +
+
+
+

MyThaiStar testing is made using Angular default testing environment and syntax language: Karma and Jasmine

+
+
+

To test an element of the application, you indicate that tests are a special type of files with the extension .spec.ts, then, in MyThaiStar angular/CLI config you can notice that there is an array with only one entry, Karma, with at the same time has one entry to Karma.config.js.

+
+
+

In the configuration of Karma we indicate which syntax language we are going to use (currently Jasmine as said before) between some other configurations, it is remarkable the last one: browsers. By default, the only available browser is chrome, that is because Karma works opening a chrome view to run all the tests, in that same window, Karma shows the result or errors of the test run. But we can add some other browser to adjust to our necessities, for example, in some automatic processes that run from console, it is not an option to open a chrome window, in that case, MyThaiStar used PhantomJS and ChromeHeadless.

+
+
+

Taking all of this into account, to run the test in MyThaiStar we need to move to project root folder and run this command : ng test --browser <browser>

+
+
+
+
+

==

+
+
+

If you run just ng test it will run the three browser options simultaneously, giving as a result three test runs and outputs, it can cause timeouts and unwanted behaviors, if you want a shortcut to run the test with chrome window you can just run yarn test so we really encourage to not use just ng test. +== ==

+
+
+

Here we are going to see how Client side testing of MyThaiStar has been done.

+
+
+
+
+

Testing Components

+
+
+

Angular components were created using angular/CLI ng create component so they already come with an spec file to test them. The only thing left to do is to add the providers and imports needed in the component to work as the component itself, once this is done, the most basic test is to be sure that all the dependencies and the component itself can be correctly created.

+
+
+

As an example, this is the spec.ts of the menu view component:

+
+
+
+
all the imports...
+
+describe('MenuComponent', () => {
+  let component: MenuComponent;
+  let fixture: ComponentFixture<MenuComponent>;
+
+  beforeEach(async(() => {
+    TestBed.configureTestingModule({
+      declarations: [ MenuComponent, MenuCardComponent ],
+      providers: [SidenavService, MenuService, SnackBarService],
+      imports: [
+        BrowserAnimationsModule,
+        BackendModule.forRoot({environmentType: 0, restServiceRoot: 'v1'}),
+        CovalentModule,
+      ],
+    })
+    .compileComponents();
+  }));
+
+  beforeEach(() => {
+    fixture = TestBed.createComponent(MenuComponent);
+    component = fixture.componentInstance;
+    fixture.detectChanges();
+  });
+
+  it('should create', () => {
+    expect(component).toBeTruthy();
+  });
+});
+
+
+
+

First we declare the component to be tested and a Fixture object, then, we configure the testingModule right in the same way we could configure the MenuModule with the difference here that tests always have to use the mock back-end because we do not want to really depend on a server to test our components.

+
+
+

Once configured the test module, we have to prepare the context of the test, in this case we create the component, that is exactly what is going on in the beforeEach() function.

+
+
+

Finally, we are ready to use the component and it’s fixture to check if the component has bee correctly created.

+
+
+

At this moment this is the case for most of the components, in the future, some work would be applied on this matter to have a full testing experience in MyThaiStar components.

+
+
+
+
+

Dialog components

+
+
+

Dialog components are in a special category because they can not be tested normally. In the way Material implements the opening of dialogs, you have to create a component that will load into a dialog, to tell the module to load this components when needed, they have to be added into a special array category: EntryComponents. So, to test them, we need to import them in the test file as well.

+
+
+

Also, the testing code to open the component is a bit different too:

+
+
+
+
...
+  beforeEach(() => {
+    dialog = TestBed.get(MdDialog);
+    component = dialog.open(CommentDialogComponent).componentInstance;
+  });
+...
+
+
+
+

That is right, the beforeEach() function is slightly different from the the example above, in this case we have to force to the test to know that the component is only displayed in a dialog, so we have to open a dialog with this component in order to access it.

+
+
+
+
+

Testing Services

+
+
+

As well as components, services can be tested too, actually, they are even more necessary to be tested because they have inside more complex logic and data management.

+
+
+

As an example of testing services i am going to use a well done services, with a specific purpose and with its logic completely tested, the price-calculator service:

+
+
+
+
...
+
+describe('PriceCalculatorService', () => {
+
+  beforeEach(() => {
+    TestBed.configureTestingModule({
+      providers: [PriceCalculatorService],
+    });
+  });
+
+  it('should be properly injected', inject([PriceCalculatorService], (service: PriceCalculatorService) => {
+    expect(service).toBeTruthy();
+  }));
+
+  describe('check getPrice method', () => {
+
+    it('should calculate price for single order without extras', inject([PriceCalculatorService], (service: PriceCalculatorService) => {
+      const order: OrderView = {
+        dish: {
+          id: 0,
+          price: 12.50,
+          name: 'Order without extras',
+        },
+        orderLine: {
+          comment: '',
+          amount: 1,
+        },
+        extras: [],
+      };
+
+      expect(service.getPrice(order)).toEqual(order.dish.price);
+    }));
+...
+
+
+
+

In services test, we have to inject the service in order to use it, then we can define some initializing contexts to test if the functions of the services returns the expected values, in the example we can see how an imaginary order is created and expected the function getPrice() to correctly calculate the price of that order.

+
+
+

In this same test file you can find some more test regarding all the possibilities of use in that services: orders with and without extras, single order, multiple orders and so on.

+
+
+

Some services as well as the components have only tested that they are correctly created and they dependencies properly injected, in the future, will be full covering regarding this services test coverage.

+
+
+
+
+

Testing in a CI environment

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/clientserver-ci.html b/docs/my-thai-star/1.0/clientserver-ci.html new file mode 100644 index 00000000..4afa51e8 --- /dev/null +++ b/docs/my-thai-star/1.0/clientserver-ci.html @@ -0,0 +1,296 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

Client and Server CI - deprecated

+
+
+

The fact that there are 2 different pipelines dedicated to 2 different technologies ( my_thai_star_angular and my_thai_star_java ) does not mean that both can be fusioned in another different one. That is the case of the MTS pipeline. Basically the greater difference is the way of deploying at the end of it. Both only-one-part pipelines use the first deployment strategy (deploying independent Docker containers) but this one uses the second one: Docker Compose.

+
+
+
+
+

Pipeline

+
+
+

The flow of processes is going to be almost exactly a merge of other 2 pipelines.

+
+
+
+clientserver pipeline flow +
+
+
+

The result is going to be exactly the same at the end of MTS. It will be possible to know if any aspect of both Angular client-side and Java server-side fails and there will be a complete application deployed in [serverPath]:8091 (client) and [serverPath]:9091.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/deployment-pipelines.html b/docs/my-thai-star/1.0/deployment-pipelines.html new file mode 100644 index 00000000..751e929a --- /dev/null +++ b/docs/my-thai-star/1.0/deployment-pipelines.html @@ -0,0 +1,452 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Deployment Pipelines

+
+
+

As PL does not support deployments, we have created separate pipelines for this purpose. Those pipelines are: MyThaiStar_REVERSE-PROXY_DEPLOY, MyThaiStar_FRONT-END_DEPLOY and MyThaiStar_SERVER_DEPLOY.

+
+
+

The application will be deployed using docker on a remote machine. The architecture is as follows:

+
+
+
+deployment arch +
+
+
+

The parts to be deployed are: an NGINX reverse proxy, the java application and the angular application.

+
+
+
+
+

MyThaiStar_SERVER_DEPLOY Pipeline

+
+
+

Deploys on the server the Java part of My Thai Star.

+
+
+
+
+

Parameters

+
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    dockerNetwork: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the dockerNetwork.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
+

Pipeline steps

+
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Deploy new image: Deploy a new java container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
+

MyThaiStar_FRONT-END_DEPLOY

+
+
+

Deploys on the server the Angular part of My Thai Star

+
+
+
+
+

Parameters

+
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    dockerNetwork: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the dockerNetwork.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
+

Pipeline steps

+
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Deploy new image: Deploy a new angular container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
+

MyThaiStar_REVERSE-PROXY_DEPLOY Pipeline

+
+
+ + + + + +
+ + +As reverse proxy connects to the Java and Angular application, both must be deployed before you execute this pipeline. +
+
+
+

The MyThaiStar_REVERSE-PROXY_DEPLOY pipeline will deploy the My Thai Star reverse proxy into a remote machine using docker.

+
+
+
+
+

Parameters

+
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    buildReverseProxy: If yes, it will build and publish a new version of reverse-proxy.

    +
  • +
  • +

    port: Port of the MTS application. You must ensure that those port is available in the deployment machine.

    +
  • +
  • +

    docker Network: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the port and the docker Network.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
+

Pipeline steps

+
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Create the Docker image: If build-Reverse-Proxy is enabled, this step will create a new docker image and publish it to the docker registry.

    +
  • +
  • +

    Deploy new image: Deploy a new reverse proxy container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/deployment-strategies.html b/docs/my-thai-star/1.0/deployment-strategies.html new file mode 100644 index 00000000..bd7d9d6c --- /dev/null +++ b/docs/my-thai-star/1.0/deployment-strategies.html @@ -0,0 +1,415 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Deployment Strategies

+
+
+

In this chapter different way of deploying My Thai Star are explained. Everything will be based in Docker.

+
+
+
+
+

Independent Docker containers

+
+
+

The first way of deployment will use isolated Docker containers. That means that if the client-side container is deployed, it does not affect the server-side container’s life cycle and vice versa.

+
+
+

Let’s show how the containers will behave during their life cycle.

+
+
+
    +
  • +

    0) Copy everything you need into the Deployment Server directory

    +
  • +
  • +

    1) Remove existing container (Nginx or Tomcat)

    +
    +
    +container1 +
    +
    +
  • +
  • +

    2) Run new one from the Docker images collection of the external Deployment Server.

    +
    +
    +container2 +
    +
    +
  • +
  • +

    3) Add the artifact /dist to the "deployable" folder of the Docker container (/usr/share/nginx/html/)

    +
    +
    +container3 +
    +
    +
    +

    Now, let’s see how it’s being executed in the command line (simplified due to documentation purposes). The next block of code represents what is inside of the last stage of the Pipeline.

    +
    +
    +
    +
    sshagent (credentials: ['my_ssh_token']) {
    +    sh """
    +        // Copy artifact from workspace to deployment server
    +
    +        // Manage container:
    +        docker rm -f [mts-container]
    +        docker run -itd --name=[mts-container] [base_image]
    +        docker exec [mts-container] bash -C \\"rm [container_deployment_folder]/*\\"
    +        docker cp [artifact] [mts-container]:[container_deployment_folder]
    +    """
    +}
    +
    +
    +
    +

    For every operation performed in the external Deployment Server, it is necessary to define where those commands are going to be executed. So, for each one of previous docker commands, this should appear before:

    +
    +
    +
    +
    `ssh -o StrictHostKeyChecking=no root@10.40.235.244`
    +
    +
    +
  • +
+
+
+
+
+

Docker Compose

+
+
+

The second way of deployment will be by orchestrating both elements of the application: The Angular client-side and the Java server-side. Both elements will be running in Docker containers as well, but in this case they won’t be independent anymore. Docker Compose will be in charge of keeping both containers up, or to put them down.

+
+
+
+
+

Project adjustment

+
+
+

In order to perform this second way of deployment, some files will be created in the project. The first one is the Dockerfile for the Angular client-side. This file will pull (if necessary) an Nginx Docker image and copy the Angular artifact (/dist folder) inside of the deployment folder of the image. It will be located in the main directory of the Angular client-side project.

+
+
+
+dockerfile angular +
+
+
+

The second file is the Dockerfile for the Java server-side. Its function will be quite similar to the Angular one. It will run a tomcat Docker image and copy the Java artifact (mythaistar.war file) in its deployment folder.

+
+
+
+dockerfile java +
+
+
+

Finally, as long as the docker-compose is being used, a file containing its configuration will be necessary as well. A new folder one the main My That Star’s directory is created, and it’s called /docker. Inside there is just a docker-compose.yml file. It contains all the information needed to orchestrate the deployment process. For example, which port both containers are going to be published on, and so on. This way of deployment will allow the application to be published or not just with one action.

+
+
+
+
docker-compose rm -f            # down
+docker-compose up --build -d    # up fresh containers
+
+
+
+
+docker compose +
+
+
+

Let’s have a look at the file itself:

+
+
+
+
version: '3'
+services:
+  client_compose:
+    build: "angular"
+    ports:
+      - "8091:80"
+    depends_on:
+      - server_compose
+  server_compose:
+    build: "java"
+    ports:
+      - "9091:8080"
+
+
+
+

This Orchestrated Deployment will offer some interesting possibilities for the future of the application.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/deployment.html b/docs/my-thai-star/1.0/deployment.html new file mode 100644 index 00000000..4285a6dd --- /dev/null +++ b/docs/my-thai-star/1.0/deployment.html @@ -0,0 +1,377 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Deployment

+
+
+

The main deployment tool used for My Thai Star is be Docker.

+
+
+
+docker +
+
+
+

It is a tool to run application in isolated environments. Those isolated environments will be what we call Docker containers. For instance, it won’t be necessary any installation of Nginx or Apache tomcat or anything necessary to deploy, because there will be some containers that actually have those technologies inside.

+
+
+
+
+

Where Docker containers will be running?

+
+
+

Of course, it is necessary to have an external Deployment Server. Every Docker process will run in it. It will be accessed from Production Line pipelines via SSH. Thus, the pipeline itself will manage the scenario of, if every previous process like testing passes as OK, stop actual containers and create new ones.

+
+
+

This external server will be located in https://mts-devonfw-core.cloud.okteto.net/

+
+
+
+
+

Container Schema

+
+
+

3 Docker containers are being used for the deployment of My Thai Star:

+
+
+
    +
  1. +

    Nginx for the Reverse Proxy

    +
  2. +
  3. +

    tomcat for the Java Server

    +
  4. +
  5. +

    Nginx for the Angular Client

    +
  6. +
+
+
+

The usage of the Reverse Proxy will allow the client to call via /api every single Java Server’s REST operation. Moreover, there will only be 1 port in usage in the remote Docker host, the one mapped for the Reverse Proxy: 8080. +Besides the deployment itself using Nginx and tomcat, both client and server are previously built using NodeJS and maven images. Artifacts produced by them will be pasted in servers' containers using multi-stage docker builds. It will all follow this schema:

+
+
+
+36028242 8998f41c 0d9e 11e8 93b3 6bfe50152bf8 +
+
+
+

This orchestration of all 3 containers will be done by using a docker-compose.yml file. To redirect traffic from one container to another (i.e. reverse-proxy to angular client or angular client to java server) will be done by using, as host names, the service name docker-compose defines for each of them, followed by the internally exposed port:

+
+ +
+ + + + + +
+ + +A implementation using Traefik as reverse proxy instead of NGINX is also available. +
+
+
+
+
+

Run My Thai Star

+
+
+

The steps to run My Thai Star are:

+
+
+
    +
  1. +

    Clone the repository $ git clone https://github.com/devonfw/my-thai-star.git

    +
  2. +
  3. +

    Run the docker compose command: $ docker-compose up

    +
  4. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/future-deployment.html b/docs/my-thai-star/1.0/future-deployment.html new file mode 100644 index 00000000..f82e0af3 --- /dev/null +++ b/docs/my-thai-star/1.0/future-deployment.html @@ -0,0 +1,291 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Future Deployment

+
+
+

The My Thai Star project is going to be built in many technologies. Thus, let’s think about one deployment schema that allow the Angular client to communicate to all three back ends: Java, Node and .NET.

+
+
+

As long as Docker containers are being used, it shouldn’t be that hard to deal with this "distributed" deployment. The schema represents 6 Docker containers that will have client-side(s) and server-side(s). Each of 3 Angular client containers (those in red) are going to communicate with different back-ends. So, when the deployment is finished, it would be possible to use all three server-sides just by changing the "port" in the URL.

+
+
+

Let’s see how it would look like:

+
+
+
+deployment schema +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/graphql-design.html b/docs/my-thai-star/1.0/graphql-design.html new file mode 100644 index 00000000..8de94bdb --- /dev/null +++ b/docs/my-thai-star/1.0/graphql-design.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

GraphQL design

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/graphql-testing.html b/docs/my-thai-star/1.0/graphql-testing.html new file mode 100644 index 00000000..c8f4a261 --- /dev/null +++ b/docs/my-thai-star/1.0/graphql-testing.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

GraphQL testing

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/index.html b/docs/my-thai-star/1.0/index.html new file mode 100644 index 00000000..83ff59ec --- /dev/null +++ b/docs/my-thai-star/1.0/index.html @@ -0,0 +1,467 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MyThaiStar Wiki

+
+ +
+
+
+

User Stories

+
+
+ +
+
+
+
+

Technical design

+
+ +
+
+
+

Data Model

+
+
+ +
+
+
+
+

Server Side

+ +
+
+

Client Side

+
+ +
+
+
+

SAP HANA Integration

+
+
+ +
+
+
+
+

Security

+ +
+
+

Testing

+
+ +
+
+
+

Server Side

+ +
+
+

Client Side

+
+ +
+
+
+

End to end

+
+
+ +
+
+
+
+

UI design

+
+
+ +
+
+
+
+

CI/CD

+ +
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/java-ci.html b/docs/my-thai-star/1.0/java-ci.html new file mode 100644 index 00000000..cb7a38d2 --- /dev/null +++ b/docs/my-thai-star/1.0/java-ci.html @@ -0,0 +1,500 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Java CI

+
+
+

The Java server-side of My Thai Star is an devon4j-based application. As long as Maven and a Java 8 are going to be needed, the Pipeline should have those tools available as well.

+
+
+
+
+

Pipeline

+
+
+

This Pipeline is called MyThaiStar_SERVER_BUILD, and it is located exactly in the same PL instance’s folder than MyThaiStar_FRONTEND_BUILD. Let’s see how the Pipeline’s flow behaves.

+
+
+
+java pipeline flow +
+
+
+

Check those Pipeline stages with more detail:

+
+
+
    +
  1. +

    Declarative: Checkout SCM

    +
    +

    Gets the code from https://github.com/devonfw/my-thai-star . This step is not defined directly in our pipeline, but as it is loaded from the repository this step should always be done at the beginning.

    +
    +
  2. +
  3. +

    Declarative: Tool Install

    +
    +

    The My Thai Star application works with JDK11. In this step, if JDK11 is not installed, we install it and then put the JDK folder into PATH.

    +
    +
    +
    +
    tools {
    +  jdk 'OpenJDK11'
    +}
    +
    +
    +
  4. +
  5. +

    Loading Custom Tools

    +
    +

    In this step we load the tools that can not be loaded in the previous step. As My Thai Star is delivered as docker container, in this step we load docker as custom tool.

    +
    +
    +
    +
    tool dockerTool
    +
    +
    +
  6. +
  7. +

    Install dependencies

    +
    +

    This step will download all project dependencies.

    +
    +
    +
    +
    mvn clean install -Dmaven.test.skip=true
    +
    +
    +
  8. +
  9. +

    Unit Tests

    +
    +

    This step will execute the project unit test with maven.

    +
    +
    +
    +
    mvn clean test
    +
    +
    +
  10. +
  11. +

    Dependency Checker

    +
    +

    Execute the OWASP Dependency Checker in order to validate the project dependencies. It will generate a report that can be used in SonarQube

    +
    +
    +
    +
    dependencyCheck additionalArguments: '--project "MTSJ" --scan java/mtsj --format XML', odcInstallation: 'dependency-check'
    +dependencyCheckPublisher pattern: ''
    +
    +
    +
  12. +
  13. +

    SonarQube analysis

    +
    +

    The code is evaluated using the integrated PL instance’s SonarQube. Also, it will wait for the quality gate status. If the status is failing, the pipeline execution will be stopped.

    +
    +
    +
    +
    withSonarQubeEnv(sonarEnv) {
    +    sh "mvn sonar:sonar"
    +}
    +
    +def qg = waitForQualityGate()
    +if (qg.status != 'OK') {
    +    error "Pipeline aborted due to quality gate failure: ${qg.status}"
    +}
    +
    +
    +
  14. +
  15. +

    Deliver application into Nexus

    +
    +

    Store all artifacts into nexus.

    +
    +
    +
    +
    mvn deploy -Dmaven.test.skip=true
    +
    +
    +
  16. +
  17. +

    Create the Docker image

    +
    +

    Create the docker image and then publish the image into a docker registry.

    +
    +
  18. +
+
+
+
+
+

Adjustments

+
+ +
+
+
+

Pipeline Environment

+
+
+

In order to easily reuse the pipeline in other java projects, all variables have been defined in the block environment. All variables have the default values that Production Line uses, so if you’re going to work in production line you won’t have to change anything. Example:

+
+
+
+
environment {
+    // Directory with java project
+    javaDir = 'java/mtsj'
+
+    // SonarQube
+    // Name of the SonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus 3
+    // Maven global settings configuration ID
+    `globalSettingsId = 'MavenSettings'`
+    // Maven tool id
+    `mavenInstallation = 'Maven3'`
+
+    // Docker
+    dockerRegistryCredentials = 'nexus-api'
+    dockerRegistryProtocol = 'https://\'
+    dockerTool = 'docker-global
+}
+
+
+
+
+
+

== Description

+
+
+
    +
  • +

    java Dir: Relative route to java application. In My Thai Star this is the java/mtsj folder. The actual directory (.) is also allowed.

    +
    +
    +java directory +
    +
    +
  • +
  • +

    sonar Env: Name of the SonarQube environment. SonarQube is the default value for PL.

    +
  • +
  • +

    global Settings Id: The id of the global settings file. MavenSettings is the default value for PL.

    +
    +
    +nexus3 global config +
    +
    +
  • +
  • +

    maven Installation: The name of the maven tool. Maven3 is the default value for PL.

    +
    +
    +maven tool +
    +
    +
  • +
+
+
+
+
+

Distribution management

+
+
+

The only extra thing that needs to be added to the Java server-side is some information that determines where the artifact of the project is going to be stored in Nexus. This is going to be a section in the main pom.xml file called <distributionManagement>. This section will point to the PL instance’s Nexus. Let’s have a look at it. It’s already configured with the PL default values.

+
+
+
+
<distributionManagement>
+    <repository>
+      <id>pl-nexus</id>
+      <name>PL Releases</name>
+      <url>http://nexus3-core:8081/nexus/content/repositories/maven-releases/</url>
+    </repository>
+    <snapshotRepository>
+      <id>pl-nexus</id>
+      <name>PL Snapshots</name>
+      <url>http://nexus3-core:8081/nexus3/repository/maven-snapshots</url>
+    </snapshotRepository>
+</distributionManagement>
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/java-design.html b/docs/my-thai-star/1.0/java-design.html new file mode 100644 index 00000000..a5089c23 --- /dev/null +++ b/docs/my-thai-star/1.0/java-design.html @@ -0,0 +1,1004 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Java design

+
+ +
+
+
+

Introduction

+
+
+

The Java back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    DEVON4J as the Java framework

    +
  • +
  • +

    Devonfw as the Development environment

    +
  • +
  • +

    CobiGen as code generation tool

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
+

Basic architecture details

+
+
+

Following the DEVON4J conventions the Java My Thai Star back-end is going to be developed dividing the application in Components and using a three layers architecture.

+
+
+
+
+

Project modules

+
+
+

Using the DEVON4J approach for the Java back-end project we will have a structure of a Maven project formed by three projects

+
+
+
+project modules +
+
+
+
    +
  • +

    api: Stores all the REST interfaces and corresponding Request/Response objects.

    +
  • +
  • +

    core: Stores all the logic and functionality of the application.

    +
  • +
  • +

    server: Configures the packaging of the application.

    +
  • +
+
+
+

We can automatically generate this project structure using the DEVON4J Maven archetype

+
+
+
+
+

Components

+
+
+

The application is going to be divided in different components to encapsulate the different domains of the application functionalities.

+
+
+
+mtsj components +
+
+
+

As main components we will find:

+
+
+
    +
  • +

    Bookingmanagement: Manages the bookings part of the application. With this component the users (anonymous/logged in) can create new bookings or cancel an existing booking. The users with waiter role can see all scheduled bookings.

    +
  • +
  • +

    Ordermanagement: This component handles the process to order dishes (related to bookings). A user (as a host or as a guest) can create orders (that contain dishes) or cancel an existing one. The users with waiter role can see all ordered orders.

    +
  • +
  • +

    Dishmanagement: This component groups the logic related to the menu (dishes) view. Its main feature is to provide the client with the data of the available dishes but also can be used by other components (Ordermanagement) as a data provider in some processes.

    +
  • +
  • +

    Usermanagement: Takes care of the User Profile management, allowing to create and update the data profiles.

    +
  • +
+
+
+

As common components (that don’t exactly represent an application’s area but provide functionalities that can be used by the main components):

+
+
+
    +
  • +

    Imagemanagement: Manages the images of the application. In a first approach the` Dishmanagement` component and the Usermanagement component will have an image as part of its data. The Imagemanagement component will expose the functionality to store and retrieve this kind of data.

    +
  • +
  • +

    Mailservice: with this service we will provide the functionality for sending email notifications. This is a shared service between different app components such as bookingmanagement or ordercomponent.

    +
  • +
+
+
+

Other components:

+
+
+
    +
  • +

    Security (will manage the access to the private part of the application using a jwt implementation).

    +
  • +
  • +

    Twitter integration: planned as a Microservice will provide the twitter integration needed for some specific functionalities of the application.

    +
  • +
+
+
+
+
+

Layers

+
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+

This architecture is going to be reflected dividing each component of the application in different packages to match those three layers.

+
+
+
+
+

Component structure

+
+
+

Each one of the components defined previously are going to be structured using the three-layers architecture. In each case we will have a service package, a logic package and a dataaccess package to fit the layers definition.

+
+
+
+component structure +
+
+
+
+
+

Dependency injection

+
+
+

As it is explained in the devonfw documentation we are going to implement the dependency injection pattern basing our solution on Spring and the Java standards: java.inject (JSR330) combined with JSR250.

+
+
+
+dependency injection +
+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different packages: api and impl. The api will store the interface with the methods definition and inside the impl we will store the class that implements the interface.

    +
  • +
+
+
+
+layer api impl +
+
+
+
    +
  • +

    Usage of JSR330: The Java standard set of annotations for dependency injection (@Named, @Inject, @PostConstruct, @PreDestroy, etc.) provides us with all the needed annotations to define our beans and inject them.

    +
  • +
+
+
+
+
@Named
+public class MyBeanImpl implements MyBean {
+  @Inject
+  private MyOtherBean myOtherBean;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+}
+
+
+
+
+
+

Layers communication

+
+
+

The connection between layers, to access to the functionalities of each one, will be solved using the dependency injection and the JSR330 annotations.

+
+
+
+layers impl +
+
+
+

Connection Service - Logic

+
+
+
+
@Named("DishmanagementRestService")
+public class DishmanagementRestServiceImpl implements DishmanagementRestService {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  // use the 'this.dishmanagement' object to access to the functionalities of the logic layer of the component
+
+  ...
+
+}
+
+
+
+

Connection Logic - Data Access

+
+
+
+
@Named
+public class DishmanagementImpl extends AbstractComponentFacade implements Dishmanagement {
+
+  @Inject
+  private DishDao dishDao;
+
+  // use the 'this.dishDao' to access to the functionalities of the data access layer of the component
+  ...
+
+}
+
+
+
+
+
+

Service layer

+
+
+

The services layer will be solved using REST services with the JAX-RS implementation.

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

Following the naming conventions proposed for Devon4j applications we will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+
+
+

Service API

+
+
+

The api.rest package in the service layer of a component will store the definition of the service by a Java interface. In this definition of the service we will set-up the endpoints of the service, the type of data expected and returned, the HTTP method for each endpoint of the service and other configurations if needed.

+
+
+
+
@Path("/dishmanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface DishmanagementRestService {
+
+  @GET
+  @Path("/dish/{id}/")
+  public DishCto getDish(@PathParam("id") long id);
+
+  ...
+
+}
+
+
+
+
+
+

Service impl

+
+
+

Once the service api is defined we need to implement it using the Java interface as reference. We will add the service implementation class to the impl.rest package and implement the RestService interface.

+
+
+
+
@Named("DishmanagementRestService")
+public class DishmanagementRestServiceImpl implements DishmanagementRestService {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Override
+  public DishCto getDish(long id) {
+    return this.dishmanagement.findDish(id);
+  }
+
+  ...
+
+}
+
+
+
+
+
+

==

+
+
+

You can see the Devon4j conventions for REST services here. And the My Thai Star services definition here as part of the My Thai Star project. +== ==

+
+
+
+
+

Logic layer

+
+
+

In the logic layer we will locate all the business logic of the application. We will keep the same schema as we have done for the service layer, having an api package with the definition of the methods and a impl package for the implementation.

+
+
+

Also, inside the api package, a to package will be the place to store the transfer objects needed to pass data through the layers of the component.

+
+
+
+logic layer +
+
+
+

The logic api definition:

+
+
+
+
public interface Dishmanagement {
+
+  DishCto findDish(Long id);
+
+  ...
+}
+
+
+
+

The logic impl class:

+
+
+
+
@Named
+public class DishmanagementImpl extends AbstractComponentFacade implements Dishmanagement {
+
+  @Inject
+  private DishDao dishDao;
+
+
+  @Override
+  public DishCto findDish(Long id) {
+
+    return getBeanMapper().map(this.dishDao.findOne(id), DishCto.class);
+  }
+
+  ...
+
+}
+
+
+
+

The BeanMapper will provide the needed transformations between entity and transfer objects.

+
+
+

Also, the logic layer is the place to add validation for Authorization based on roles as we will see later.

+
+
+
+
+

Data Access layer

+
+
+

The data-access layer is responsible for managing the connections to access and process data. The mapping between java objects to a relational database is done in Devon4j with the spring-data-jpa.

+
+
+

As in the previous layers, the data-access layer will have both api and impl packages. However, in this case, the implementation will be slightly different. The api package will store the component main entities and, inside the _api package, another api.repo package will store the Repositories. The repository interface will extend DefaultRepository interface (located in com.devonfw.module.jpa.dataaccess.api.data package of devon4j-starter-spring-data-jpa ).

+
+
+

For queries we will differentiate between static queries (that will be located in a mapped file) and dynamic queries (implemented with QueryDsl). You can find all the details about how to manage queries with Devon4j here.

+
+
+

The default data base included in the project will be the H2 instance included with the Devon4j projects.

+
+
+

To get more details about pagination, data base security, _concurrency control, inheritance or how to solve the different relationships between entities visit the official devon4j dataaccess documentation.

+
+
+
+
+

Security with Json Web Token

+
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
+

JWT basics

+
+
+
    +
  • +

    A user will provide a username / password combination to our Auth server.

    +
  • +
  • +

    The Auth server will try to identify the user and, if the credentials match, will issue a token.

    +
  • +
  • +

    The user will send the token as the Authorization header to access resources on server protected by JWT Authentication.

    +
  • +
+
+
+
+jwt schema +
+
+
+
+
+

JWT implementation details

+
+
+

The Json Web Token pattern will be implemented based on the Spring Security framework that is provided by default in the Devon4j projects.

+
+
+
+
+

== Authentication

+
+
+

Based on the Spring Security approach, we will implement a class extending WebSecurityConfigurerAdapter (Devon4j already provides the` BaseWebSecurityConfig` class) to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will set up the HttpSecurity object in the configure method of the class. We will define a JWTLoginFilter class that will handle the requests to the /login endpoint.

+
+
+
+
http.[...].antMatchers(HttpMethod.POST, "/login").permitAll().[...].addFilterBefore(new JWTLoginFilter("/login", authenticationManager()), UsernamePasswordAuthenticationFilter.class);
+
+
+
+

In the same HttpSecurity object we will set up the filter for the rest of the requests, to check the presence of the JWT token in the header. First we will need to create a JWTAuthenticationFilter class extending the GenericFilterBean class. Then we can add the filter to the HttpSecurity object

+
+
+
+
http.[...].addFilterBefore(new `JWTAuthenticationFilter()`, UsernamePasswordAuthenticationFilter.class);
+
+
+
+

Finally, as default users to start using the My Thai Star app we are going to define two profiles using the inMemoryAuthentication of the Spring Security framework. In the configure(AuthenticationManagerBuilder Auth) method we will create:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: Waiter

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: Customer

    +
  • +
+
+
+
+
auth.inMemoryAuthentication().withUser("waiter").password("waiter").roles("Waiter").and().withUser("user0").password("password").roles("Customer");
+
+
+
+
+
+

== Token set up

+
+
+

Following the official documentation the implementation details for the MyThaiStar’s JWT will be:

+
+
+
    +
  • +

    Secret: Used as part of the signature of the token, acting as a private key. For the showcase purposes we will use simply "ThisIsASecret".

    +
  • +
  • +

    Token Prefix schema: Bearer. The token will look like Bearer <token>

    +
  • +
  • +

    Header: Authorization. The response header where the token will be included. Also, in the requests, when checking the token it will be expected to be in the same header.

    +
  • +
  • +

    The Authorization header should be part of the Access-Control-Expose-Headers header to allow clients access to the Authorization header content (the token);

    +
  • +
  • +

    The claims are the content of the payload of the token. The claims are statements about the user, so we will include the user info in this section.

    +
    +
      +
    • +

      subject: "sub". The username.

      +
    • +
    • +

      issuer: "iss". Who creates the token. We could use the url of our service but, as this is a showcase app, we simply will use "MyThaiStarApp"

      +
    • +
    • +

      expiration date: "exp". Defines when the token expires.

      +
    • +
    • +

      creation date: "iat". Defines when the token has been created.

      +
    • +
    • +

      scope: "scope". Array of strings to store the user roles.

      +
    • +
    +
    +
  • +
  • +

    Signature Algorithm: To encrypt the token we will use the default algorithm HS512.

    +
  • +
+
+
+

An example of a token claims before encryption would be:

+
+
+

{sub=waiter, scope=[ROLE_Waiter], iss=MyThaiStarApp, exp=1496920280, iat=1496916680}

+
+
+
+
+

== Current User request

+
+
+

To provide to the client with the current user data our application should expose a service to return the user details. In Devon4j applications the /general/service/impl/rest/SecurityRestServiceImpl.java class is ready to do that.

+
+
+
+
@Path("/security/v1")
+@Named("SecurityRestService")
+public class SecurityRestServiceImpl {
+
+  @Produces(MediaType.APPLICATION_JSON)
+  @GET
+  @Path("/currentuser/")
+  public UserDetailsClientTo getCurrentUserDetails(@Context HttpServletRequest request) {
+
+  }
+}
+
+
+
+

we only will need to implement the getCurrentUserDetails method.

+
+
+
+
+

== Authorization

+
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

As part of the token we are providing the user Role. So, when validating the token, we can obtain that same information and build a UsernamePasswordAuthenticationToken with username and the roles as collection of Granted Authorities.

+
+
+

Doing so, afterwards, in the implementation class of the logic layer we can set up the related methods with the java security '@RolesAllowed' annotation to block the access to the resource to users that does not match the expected roles.

+
+
+
+
`@RolesAllowed(Roles.WAITER)`
+public PaginatedListTo<BookingEto> findBookings(BookingSearchCriteriaTo criteria) {
+  return findBookings(criteria);
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/java-testing.html b/docs/my-thai-star/1.0/java-testing.html new file mode 100644 index 00000000..feb49042 --- /dev/null +++ b/docs/my-thai-star/1.0/java-testing.html @@ -0,0 +1,396 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Java testing

+
+ +
+
+
+

Component testing

+
+
+

We are going to test our components as a unit using Spring Test and Devon4j-test modules.

+
+
+

In order to test a basic component of the app first we will create a test class in the src/test/java folder and inside the main package of the test module. We will name the class following the convention.

+
+
+
+
[Component]Test
+
+
+
+

Then, in the declaration of the test class, we will use the @SpringBootTest annotation to run the application context. In addition, we will extend the ComponentTest from Devon4j-test module to have access to the main functionalities of the module, see more details here.

+
+
+

Spring Test allows us to use Dependency Injection so we can inject our component directly using the @Inject annotation.

+
+
+

Each test will be represented by a method annotated with @Test. Inside the method we will test one functionality, evaluating the result thanks to the asserts provided by the ComponentTest class that we are extending.

+
+
+

A simple test example

+
+
+
+
@SpringBootTest(classes = SpringBootApp.class)
+public class DishmanagementTest extends `ComponentTest` {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Test
+  public void findAllDishes() {
+
+    PaginatedListTo<DishCto> result = this.dishmanagement.findDishes();
+    assertThat(result).isNotNull();
+  }
+
+  ...
+}
+
+
+
+
+
+

Running the tests

+
+ +
+
+
+

From Eclipse

+
+
+

We can run the test from within Eclipse with the contextual menu Run As > JUnit Test. This functionality can be launched from method level, class level or even package level. The results will be shown in the JUnit tab.

+
+
+
+test results eclipse +
+
+
+
+
+

From command line using Maven

+
+
+

We can also run tests using Maven and the command line, using the command mvn test (or mvn clean test).

+
+
+
+
`C:\MyThaiStar>mvn clean test`
+
+
+
+

Doing this we will run all the tests of the project (recognized by the Test word at the end of the classes) and the results will be shown by sub-project.

+
+
+
+
...
+
+[D: 2017-07-17 09:30:08,457] [P: INFO ] [C: ] [T: Thread-5] [L: org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean] - [M: Closing JPA EntityManagerFactory for persistence unit 'default']
+
+Results :
+
+Tests run: 11, Failures: 0, Errors: 0, Skipped: 1
+
+...
+
+[INFO]
+[INFO] --- maven-compiler-plugin:3.1:testCompile (default-testCompile) @ mtsj-server ---
+[INFO] No sources to compile
+[INFO]
+[INFO] --- maven-surefire-plugin:2.12.4:test (default-test) @ mtsj-server ---
+[INFO] No tests to run.
+[INFO] ------------------------------------------------------------------------
+[INFO] Reactor Summary:
+[INFO]
+[INFO] mtsj ............................................... SUCCESS [  0.902 s]
+[INFO] mtsj-core .......................................... SUCCESS [02:30 min]
+[INFO] mtsj-server ........................................ SUCCESS [  1.123 s]
+[INFO] ------------------------------------------------------------------------
+[INFO] BUILD SUCCESS
+[INFO] ------------------------------------------------------------------------
+[INFO] Total time: 02:35 min
+[INFO] Finished at: 20XX-07-17T09:30:13+02:00
+[INFO] Final Memory: 39M/193M
+[INFO] ------------------------------------------------------------------------
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/master-my-thai-star.html b/docs/my-thai-star/1.0/master-my-thai-star.html new file mode 100644 index 00000000..35937a35 --- /dev/null +++ b/docs/my-thai-star/1.0/master-my-thai-star.html @@ -0,0 +1,11384 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+
+

MyThaiStar

+
+
+

1. My Thai Star – Agile Framework

+ +
+
+

1.1 Team Setup

+
+

The team working on the development of the My Thai Star app and the documentation beside the technical development works distributed in various locations across Germany, the Netherlands, Spain and Poland. For the communication part the team uses the two channels Skype and Mail and for the documentation part the team makes usage mainly of GitHub and JIRA.

+
+
+
+

1.2 Scrum events

+ +
+
+

Sprint Planning

+
+

Within the My Thai Star project we decided on having one hour Sprint Planning meetings for a four-week Sprints. This decision is based on the fact that this project is not the main project of the team members. As the backlog refinement is done during the Sprint Planning we make usage of the planningpoker.com tool for the estimation of the tasks.

+
+
+
+Screenshot of planningpoker.com +
+
Figure 1. Screenshot of planningpoker.com during Sprint 1 Planning
+
+
+

During the Sprint Planning meeting the team receives support from Devon colleagues outside the development. This feedback helps the team to focus on important functionalities and task by keeping the eyes on the overall aim which is to have a working application by the end of June 2017.

+
+
+
+

Sprint Review

+
+

The Sprint Review meetings are time boxed to one hour for the four week Sprint. Within the Sprint Review meeting the team plans to do a retrospective of the finished Sprint. As well as it is done during the Sprint Planning the team receives support from Devon colleagues.

+
+
+
+

Sprint Retrospective

+
+

For this project the team aligned on not having a specific Sprint Retrospective meeting. The team is going to have a retrospective of a finished Sprint during the Sprint Review.

+
+
+
+

Daily Stand-ups

+
+

The team aligned on having two weekly Stand-up meetings instead of a Daily Stand-up meeting. In comparison with the time boxed length of 15mins described in the CAF for this project the team extended the Stand-up meeting to 30mins. The content of the meetings remains the same.

+
+
+
+

Backlog refinement

+
+

The team decided that the backlog refinement meeting is part of the Sprint Planning meeting.

+
+
+
+

1.3 Establish Product Backlog

+
+

For the My Thai Stair project the team decided on using the JIRA agile documentation which is one of the widely used agile tools. JIRA is equipped with several of useful tools regarding the agile software development (e.g. Scrum-Board). One of the big advantages of JIRA are the extensive configuration and possibilities to personalize.

+
+
+

With having a list of the Epics and User Stories for the My Thai Star development in GitHub, the team transferred the User Stories into the JIRA backlog as it is shown in the screenshot below. All User Stories are labeled colorfully with the related Epic which shapes the backlog in clearly manner.

+
+
+
+Screenshot of planningpoker.com +
+
Figure 2. Screenshot of the JIRA backlog during Sprint 2
+
+
+

We decided on working with Sub-task as a single user story comprised a number of single and separated tasks. Another benefit of working with sub-task is that every single sub-task can be assigned to a single team member whereas a user story can only be assigned to one team member. By picking single sub-task the whole process of a user story is better organized.

+
+
+
+Screenshot of Sub-tasks +
+
Figure 3. Screenshots of Sub-tasks during Sprint 2
+
+
+
+

2. My Thai Star – Agile Diary

+
+

In parallel to the Diary Ideation we use this Agile Diary to document our Scrum events. The target of this diary is to describe the differences to the Scrum methodology as well as specific characteristics of the project. We also document the process on how we approach the Scrum methodology over the length of the project.

+
+
+
+

24.03.2017 Sprint 1 Planning

+
+

Within the Sprint 1 Planning we used planning poker.com for the estimation of the user stories. The estimation process usually is part of the backlog refinement meeting. Regarding the project circumstances we decided to estimate the user stories during the Sprint Planning. Starting the estimation process we noticed that we had to align our interpretation of the estimation effort as these story points are not equivalent to a certain time interval. The story points are relative values to compare the effort of the user stories. With this in mind we proceeded with the estimation of the user stories. We decided to start Sprint 1 with the following user stories and the total amount of 37 story points: +• ICSDSHOW-2 Create invite for friends (8 Story Points) +• ICSDSHOW-4 Create reservation (3) +• ICSDSHOW-5 Handle invite (3) +• ICSDSHOW-6 Revoke accepted invite (5) +• ICSDSHOW-9 Cancel invite (3) +• ICSDSHOW-11 Filter menu (5) +• ICSDSHOW-12 Define order (5) +• ICSDSHOW-13 Order the order (5) +As the Sprint Planning is time boxed to one hour we managed to hold this meeting within this time window.

+
+
+
+

27.04.2017 Sprint 1 Review

+
+

During the Sprint 1 Review we had a discussion about the data model proposal. For the discussion we extended this particular Review meeting to 90min. As this discussion took almost 2/3 of the Review meeting we only had a short time left for our review of Sprint 1. For the following scrum events we decided to focus on the primary target of these events and have discussions needed for alignments in separate meetings. +Regarding the topic of splitting user stories we had the example of a certain user story which included a functionality of a twitter integration (ICSDSHOW-17 User Profile and Twitter integration). As the twitter functionality could not have been implemented at this early point of time we thought about cutting the user story into two user stories. We aligned on mocking the twitter functionality until the dependencies are developed in order to test the components. As this user story is estimated with 13 story points it is a good example for the question whether to cut a user story into multiple user stories or not. +Unfortunately not all user stories of Sprint 1 could have been completed. Due this situation we discussed on whether pushing all unfinished user stories into the status done or moving them to Sprint 2. We aligned on transferring the unfinished user stories into the next Sprint. During the Sprint 1 the team underestimated that a lot of holidays crossed the Sprint 1 goals. As taking holidays and absences of team members into consideration is part of a Sprint Planning we have a learning effect on setting a Sprint Scope.

+
+
+
+

03.05.2017 Sprint 2 Planning

+
+

As we aligned during the Sprint 1 Review on transferring unfinished user stories into Sprint 2 the focus for Sprint 2 was on finishing these transferred user stories. During our discussion on how many user stories we could work on in Sprint 2 we needed to remind ourselves that the overall target is to develop an example application for the devonfw. Considering this we aligned on a clear target for Sprint 2: To focus on finishing User Stories as we need to aim for a practicable and realizable solution. Everybody aligned on the aim of having a working application at the end of Sprint 2. +For the estimation process of user stories we make again usage of planningpoker.com as the team prefers this “easy-to-use” tool. During our second estimation process we had the situation in which the estimated story points differs strongly from one team member to another. In this case the team members shortly explains how the understood and interpreted the user story. It turned out that team members misinterpreted the user stories. With having this discussion all team members got the same understanding of the specific functionality and scope of a user story. After the alignment the team members adjusted their estimations. +Beside this need for discussion the team estimated most of the user stories with very similar story points. This fact shows the increase within the effort estimation for each team member in comparison to Sprint 1 planning. Over the short time of two Sprint planning the team received a better understanding and feeling for the estimation with story points.

+
+
+
+

01.06.2017 Sprint 2 Review

+
+

As our Sprint 1 Review four weeks ago was not completely structured like a Sprint Review meeting we focused on the actual intention of a Sprint Review meeting during Sprint 2 Review. This means we demonstrated the completed and implemented functionalities with screen sharing and the product owner accepted the completed tasks. +Within the User Story ICSDSHOW-22 “See all orders/reservations” the functionality “filtering the list by date” could have not been implemented during Sprint 2. The team was unsure on how to proceed with this task. One team member added that especially in regards of having a coherent release, implementing less but working functionalities is much better than implementing more but not working functionalities. For this the team reminded itself focusing on completing functionalities and not working straight to a working application.

+
+
+
Table of Contents
+ +
+
+
+

User Stories

+
+

The list of user stories, exported from JIRA, can be downloaded from here.

+
+
+
+

Epic: Invite friends

+ +
+
+

US: create invite for friends

+
+

Epic: Invite friends

+
+
+

As a guest I want to create an dinner event by entering date and time and adding potential guests by their emails so that each potential guest will receives an email in order to confirm or decline my invite.

+
+
+
+

== Acceptance criteria

+
+
    +
  1. +

    only date and time in future possible and both required

    +
  2. +
  3. +

    only valid email addresses: text@text.xx, one entered email-address is required

    +
  4. +
  5. +

    if AGB are not checked, an error message is shown

    +
  6. +
  7. +

    after the invite is done

    +
    +
      +
    1. +

      I see the confirmation screen of my invite (see wireframe)

      +
    2. +
    3. +

      I receive a confirmation email about my invite containing date, time and invited guests

      +
    4. +
    5. +

      all guests receive a mail with my invite

      +
    6. +
    +
    +
  8. +
+
+
+
+

US: create reservation

+
+

Epic: Invite friends

+
+
+

As a guest I want to create a reservation by entering date and time and number of adults and kids

+
+
+
+

== Acceptance criteria

+
+
    +
  1. +

    only date and time in future possible and both required

    +
  2. +
  3. +

    only valid email addresses: text@text.xx, one entered email-address is required

    +
  4. +
  5. +

    if AGB are not checked, an error message is shown

    +
  6. +
  7. +

    after the reservation is done

    +
    +
      +
    1. +

      I see a confirmation screen of my reservation with date-time, number of persons and kids

      +
    2. +
    3. +

      I receive a confirmation email about my reservation

      +
    4. +
    +
    +
  8. +
+
+
+
+

== Wireframes

+
+

see real time board

+
+
+
+

US: handle invite

+
+

As an invited guest I would like to receive an email - after somebody as invited me - with the option to accept or decline the invite so that the system knows about my participation

+
+
+
+

== AC:

+
+
    +
  1. +

    the mail contains the following information about the invite

    +
    +
      +
    1. +

      who has invited

      +
    2. +
    3. +

      who else is invited

      +
    4. +
    5. +

      date and time of the invite

      +
    6. +
    7. +

      button to accept or decline

      +
    8. +
    9. +

      after pressing the buttons the system will store the status (yes/no) of my invite

      +
    10. +
    +
    +
  2. +
+
+
+
+

US: revoke accepted invite

+
+

As an invited guest I would like to revoke my previous answer in order to inform the system and the inviter about my no showup

+
+
+
+

== AC:

+
+
    +
  1. +

    the inviter and myself receives an email about my cancellation

    +
  2. +
  3. +

    the system sets my status of my invite to no

    +
  4. +
  5. +

    in case I have placed an order, the order is also removed from the system.

    +
  6. +
  7. +

    the cancellation is only possible 10 minutes before the event takes place. The system shows a message that cancellation is not possible anymore.

    +
  8. +
+
+
+
+

US: calculate best table

+
+

As a guest I would like the system to check (1 hour before my invite) all my invites and to reserve a table fitting the number of accepted users

+
+
+
+

== Details

+
+

Pseudo-algorithm for reservation: +Find table for given date and time where seats of guests >= Count of invited guests plus one. In case no results, decline request and show error message to user. In case of any result, make a reservation for table…​. +For each decline of a guest remove guest and search with reduced number for new table. In case table is found, reserve it and remove reservation from previous table. In case not, do not change reservations.

+
+
+
+

US: find table by reservation info

+
+

As a waiter I would like to search by reference number or email address for the reserved table in order to know the table for my visit. (when arriving at the restaurant)

+
+
+
+

== AC:

+
+
    +
  1. +

    After entering the email the systems shows the number of the table. In case no reservation found, a message is shown.

    +
  2. +
  3. +

    Entered email address could be email of inviter or any invited guest.

    +
  4. +
+
+
+
+

US: cancel invite

+
+

Epic: Invite friends

+
+
+

As a guests who has sent an invite I want to be able to cancel my previous invite in order to inform the restaurant and my invited guests that I will not show up

+
+
+
+

== AC:

+
+
    +
  1. +

    the option to cancel the invite is available in the confirmation-mail about my invite

    +
  2. +
  3. +

    after my cancellation all invited guests receives a mail about the cancellation

    +
  4. +
  5. +

    I see a confirmation that my invite was canceled successfully

    +
  6. +
  7. +

    after my cancellation my invite and reservation and all orders related to it are deleted from the system and no one can accept or decline any invite for it

    +
  8. +
  9. +

    the cancellation is only possible one hour before the invite takes place. After that I am not allowed to cancel it any more.

    +
  10. +
+
+
+
+

Epic: Digital Menu

+ +
+
+

US: filter menu

+
+

As a guest I want to filter the menu so that I only see the dishes I am interested in

+
+
+
+

== AC:

+
+
    +
  1. +

    the guest can filter by

    +
    +
      +
    1. +

      type: starter | main dish | dessert; XOR; if nothing is selected all are shown (default value)

      +
    2. +
    3. +

      veggy (yes|no|does not matter (default))

      +
    4. +
    5. +

      vegan (yes|no|does not matter (default))

      +
    6. +
    7. +

      rice (yes|no|does not matter (default))

      +
    8. +
    9. +

      curry (yes|no|does not matter (default))

      +
    10. +
    11. +

      noodle (yes|no|does not matter (default))

      +
    12. +
    13. +

      price (range)

      +
    14. +
    15. +

      ratings (range)

      +
    16. +
    17. +

      my favorite (yes|no|does not matter (default)) — free text (search in title and description)

      +
    18. +
    +
    +
  2. +
  3. +

    the guest can sort by price asc, rating asc

    +
  4. +
  5. +

    after setting the filter only dishes are shown which fulfills those criteria

    +
  6. +
  7. +

    by pressing the button reset filter all filter are reset to the initial value

    +
  8. +
  9. +

    by pressing the filter button the filter is applied [or is it triggered after each change?]

    +
  10. +
+
+
+
+

US: Define order

+
+

As a guest I want to define my order by selecting dishes from the menu

+
+
+
+

== AC:

+
+
    +
  • +

    The guest can add each dish to the order

    +
  • +
  • +

    In case the guest adds the same dish multiple times, a counter in the order for this dish is increased for this dish

    +
  • +
  • +

    The guest can remove the dish from the order

    +
  • +
  • +

    The guest can add for each main dish the type of meat (pork, chicken, tofu)

    +
  • +
  • +

    The guest can add for each dish a free-text-comment

    +
  • +
  • +

    After adding/removing any dish the price is calculated including VAT

    +
  • +
+
+
+
+

US: Order the order

+
+

As a guest I want to order my selected dishes (order)

+
+
+

AC:

+
+
+
    +
  1. +

    I receive a mail containing my order with all dishes and the final price

    +
  2. +
  3. +

    precondition for ordering:

    +
    +
      +
    1. +

      Each order must be associated with a reservation / invite. Without any reference no order could be placed. The reference could be obtained from a previous reservation/invite (created during same session) or by the previous accepted invite (link in email) or by entering the reference id when asked by the system.

      +
      +
        +
      1. +

        In case precondition is not fulfilled, the guest is asked

        +
        +
          +
        1. +

          whether he/she would like to create a reservation/invite and is forwarded to US Invite Friends. Only after finalizing the reservation the order is accepted.

          +
        2. +
        3. +

          or he/she would enter previous created reservation-id he/she knows in order to associate his/her order with this reservation

          +
        4. +
        +
        +
      2. +
      +
      +
    2. +
    +
    +
  4. +
+
+
+
+

US: Cancel order

+
+

As a guest I want to cancel my order.

+
+
+

AC:

+
+
+
    +
  1. +

    in my received confirmation mail I have the option to cancel my order

    +
  2. +
  3. +

    the cancellation is only possible one hour before my reservation takes place

    +
  4. +
  5. +

    my order is deleted from the system

    +
  6. +
+
+
+

Remark: Changing the order is not possible. For that the order must be canceled and created from scratch again

+
+
+
+

US: Read twitter rating for dishes

+
+

As a guest I want to read for all dishes the rating done be twitter because I would like to know the opinion of others

+
+
+

AC:

+
+
+
    +
  1. +

    For each dish I see the latest 3 comments done by twitter for this vote (text, username, avatar)

    +
  2. +
  3. +

    For each dish I see the number of likes done by twitter

    +
  4. +
+
+
+
+

Epic: User Profile

+ +
+
+

US: User Profile

+
+

As a guest I want to have a user profile to associate it with my twitter account to be able to like/rate dishes

+
+
+

AC:

+
+
+
    +
  1. +

    Username of my profile is my email address

    +
  2. +
  3. +

    My profile is protected by password

    +
  4. +
  5. +

    I can log in and log out to my profile

    +
  6. +
  7. +

    I can reset my password by triggering the reset by mail

    +
  8. +
  9. +

    I can associate my profile with my twitter account in order to rate dishes and store my favorites by liking posts associated to dishes

    +
  10. +
+
+
+
+

Epic: Rate by twitter

+ +
+
+

US: Receive mail to rate your dish

+
+

As a guest I want to receive a mail by the system in order to rate my dish

+
+
+
+

US: Rate your dish

+
+

As a guest I want to add a comment or a like via my twitter account for a dish

+
+
+

AC:

+
+
+
    +
  1. +

    Before I write my rate I would like to be able to read all tweets of other users for this dish

    +
  2. +
  3. +

    I would like to see the number of likes for a dish

    +
  4. +
+
+
+
+

Epic: Waiter Cockpit

+ +
+
+

US: See all orders/reservations

+
+

As a waiter I want to see all orders/reservation in order to know what is going on in my restaurant

+
+
+

AC:

+
+
+
    +
  1. +

    all orders/reservations are shown in a list view (read-only). Those list can be filtered and sorted (similar to excel-data-filters)

    +
  2. +
  3. +

    orders/reservations are shown in separate lists.

    +
  4. +
  5. +

    for each order the dish, meat, comment, item, reservation-id, reservation date-time, creation-date-time is shown

    +
  6. +
  7. +

    for each reservation the inviters email, the guests-emails, the number of accepts and declines, calculated table number, the reservation-id, reservation date-time and creation-date-time are shown

    +
  8. +
  9. +

    the default filter for all lists is the today’s date for reservation date-time. this filter can be deleted.

    +
  10. +
  11. +

    only reservations and orders with reservation date in the future shall be available in this view. All other orders and reservation shall not be deleted; for data Analytics those orders and reservation shall still exist in the system.

    +
  12. +
+
+
+

checklist:

+
+
+

talk about:

+
+
+
    +
  • +

    who?

    +
  • +
  • +

    what?

    +
  • +
  • +

    why (purpose)

    +
  • +
  • +

    why (objective)

    +
  • +
  • +

    what happens outside the software

    +
  • +
  • +

    what might go wrong

    +
  • +
  • +

    any question or assumptions (write them down) , DoR should check that those sections are empty.

    +
  • +
  • +

    is there any better solution?

    +
  • +
  • +

    how (technical perspective)

    +
  • +
  • +

    do a rough estimate

    +
  • +
  • +

    check INVEST

    +
  • +
+
+
+
+
+
+

Technical design

+
+ +
+
+
+

Data Model

+
+
+
Table of Contents
+ +
+
+
Data Model
+
+
+mts datamodel +
+
+
+
Table of Contents
+ +
+
+
+
NoSQL Data Model
+
+
+dynamodb data model 1.4.1 +
+
+
+
+
+
+

Server Side

+
+
+
Table of Contents
+ +
+
+
Java design
+ +
+
+
Introduction
+
+

The Java back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    DEVON4J as the Java framework

    +
  • +
  • +

    Devonfw as the Development environment

    +
  • +
  • +

    CobiGen as code generation tool

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
Basic architecture details
+
+

Following the DEVON4J conventions the Java My Thai Star back-end is going to be developed dividing the application in Components and using a three layers architecture.

+
+
+
+
Project modules
+
+

Using the DEVON4J approach for the Java back-end project we will have a structure of a Maven project formed by three projects

+
+
+
+project modules +
+
+
+
    +
  • +

    api: Stores all the REST interfaces and corresponding Request/Response objects.

    +
  • +
  • +

    core: Stores all the logic and functionality of the application.

    +
  • +
  • +

    server: Configures the packaging of the application.

    +
  • +
+
+
+

We can automatically generate this project structure using the DEVON4J Maven archetype

+
+
+
+
Components
+
+

The application is going to be divided in different components to encapsulate the different domains of the application functionalities.

+
+
+
+mtsj components +
+
+
+

As main components we will find:

+
+
+
    +
  • +

    Bookingmanagement: Manages the bookings part of the application. With this component the users (anonymous/logged in) can create new bookings or cancel an existing booking. The users with waiter role can see all scheduled bookings.

    +
  • +
  • +

    Ordermanagement: This component handles the process to order dishes (related to bookings). A user (as a host or as a guest) can create orders (that contain dishes) or cancel an existing one. The users with waiter role can see all ordered orders.

    +
  • +
  • +

    Dishmanagement: This component groups the logic related to the menu (dishes) view. Its main feature is to provide the client with the data of the available dishes but also can be used by other components (Ordermanagement) as a data provider in some processes.

    +
  • +
  • +

    Usermanagement: Takes care of the User Profile management, allowing to create and update the data profiles.

    +
  • +
+
+
+

As common components (that don’t exactly represent an application’s area but provide functionalities that can be used by the main components):

+
+
+
    +
  • +

    Imagemanagement: Manages the images of the application. In a first approach the` Dishmanagement` component and the Usermanagement component will have an image as part of its data. The Imagemanagement component will expose the functionality to store and retrieve this kind of data.

    +
  • +
  • +

    Mailservice: with this service we will provide the functionality for sending email notifications. This is a shared service between different app components such as bookingmanagement or ordercomponent.

    +
  • +
+
+
+

Other components:

+
+
+
    +
  • +

    Security (will manage the access to the private part of the application using a jwt implementation).

    +
  • +
  • +

    Twitter integration: planned as a Microservice will provide the twitter integration needed for some specific functionalities of the application.

    +
  • +
+
+
+
+
Layers
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+

This architecture is going to be reflected dividing each component of the application in different packages to match those three layers.

+
+
+
+
Component structure
+
+

Each one of the components defined previously are going to be structured using the three-layers architecture. In each case we will have a service package, a logic package and a dataaccess package to fit the layers definition.

+
+
+
+component structure +
+
+
+
+
Dependency injection
+
+

As it is explained in the devonfw documentation we are going to implement the dependency injection pattern basing our solution on Spring and the Java standards: java.inject (JSR330) combined with JSR250.

+
+
+
+dependency injection +
+
+
+
    +
  • +

    Separation of API and implementation: Inside each layer we will separate the elements in different packages: api and impl. The api will store the interface with the methods definition and inside the impl we will store the class that implements the interface.

    +
  • +
+
+
+
+layer api impl +
+
+
+
    +
  • +

    Usage of JSR330: The Java standard set of annotations for dependency injection (@Named, @Inject, @PostConstruct, @PreDestroy, etc.) provides us with all the needed annotations to define our beans and inject them.

    +
  • +
+
+
+
+
@Named
+public class MyBeanImpl implements MyBean {
+  @Inject
+  private MyOtherBean myOtherBean;
+
+  @PostConstruct
+  public void init() {
+    // initialization if required (otherwise omit this method)
+  }
+
+  @PreDestroy
+  public void dispose() {
+    // shutdown bean, free resources if required (otherwise omit this method)
+  }
+}
+
+
+
+
+
Layers communication
+
+

The connection between layers, to access to the functionalities of each one, will be solved using the dependency injection and the JSR330 annotations.

+
+
+
+layers impl +
+
+
+

Connection Service - Logic

+
+
+
+
@Named("DishmanagementRestService")
+public class DishmanagementRestServiceImpl implements DishmanagementRestService {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  // use the 'this.dishmanagement' object to access to the functionalities of the logic layer of the component
+
+  ...
+
+}
+
+
+
+

Connection Logic - Data Access

+
+
+
+
@Named
+public class DishmanagementImpl extends AbstractComponentFacade implements Dishmanagement {
+
+  @Inject
+  private DishDao dishDao;
+
+  // use the 'this.dishDao' to access to the functionalities of the data access layer of the component
+  ...
+
+}
+
+
+
+
+
Service layer
+
+

The services layer will be solved using REST services with the JAX-RS implementation.

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

Following the naming conventions proposed for Devon4j applications we will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+
+
Service API
+
+

The api.rest package in the service layer of a component will store the definition of the service by a Java interface. In this definition of the service we will set-up the endpoints of the service, the type of data expected and returned, the HTTP method for each endpoint of the service and other configurations if needed.

+
+
+
+
@Path("/dishmanagement/v1")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public interface DishmanagementRestService {
+
+  @GET
+  @Path("/dish/{id}/")
+  public DishCto getDish(@PathParam("id") long id);
+
+  ...
+
+}
+
+
+
+
+
Service impl
+
+

Once the service api is defined we need to implement it using the Java interface as reference. We will add the service implementation class to the impl.rest package and implement the RestService interface.

+
+
+
+
@Named("DishmanagementRestService")
+public class DishmanagementRestServiceImpl implements DishmanagementRestService {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Override
+  public DishCto getDish(long id) {
+    return this.dishmanagement.findDish(id);
+  }
+
+  ...
+
+}
+
+
+
+
+
==
+
+

You can see the Devon4j conventions for REST services here. And the My Thai Star services definition here as part of the My Thai Star project. +== ==

+
+
+
+
Logic layer
+
+

In the logic layer we will locate all the business logic of the application. We will keep the same schema as we have done for the service layer, having an api package with the definition of the methods and a impl package for the implementation.

+
+
+

Also, inside the api package, a to package will be the place to store the transfer objects needed to pass data through the layers of the component.

+
+
+
+logic layer +
+
+
+

The logic api definition:

+
+
+
+
public interface Dishmanagement {
+
+  DishCto findDish(Long id);
+
+  ...
+}
+
+
+
+

The logic impl class:

+
+
+
+
@Named
+public class DishmanagementImpl extends AbstractComponentFacade implements Dishmanagement {
+
+  @Inject
+  private DishDao dishDao;
+
+
+  @Override
+  public DishCto findDish(Long id) {
+
+    return getBeanMapper().map(this.dishDao.findOne(id), DishCto.class);
+  }
+
+  ...
+
+}
+
+
+
+

The BeanMapper will provide the needed transformations between entity and transfer objects.

+
+
+

Also, the logic layer is the place to add validation for Authorization based on roles as we will see later.

+
+
+
+
Data Access layer
+
+

The data-access layer is responsible for managing the connections to access and process data. The mapping between java objects to a relational database is done in Devon4j with the spring-data-jpa.

+
+
+

As in the previous layers, the data-access layer will have both api and impl packages. However, in this case, the implementation will be slightly different. The api package will store the component main entities and, inside the _api package, another api.repo package will store the Repositories. The repository interface will extend DefaultRepository interface (located in com.devonfw.module.jpa.dataaccess.api.data package of devon4j-starter-spring-data-jpa ).

+
+
+

For queries we will differentiate between static queries (that will be located in a mapped file) and dynamic queries (implemented with QueryDsl). You can find all the details about how to manage queries with Devon4j here.

+
+
+

The default data base included in the project will be the H2 instance included with the Devon4j projects.

+
+
+

To get more details about pagination, data base security, _concurrency control, inheritance or how to solve the different relationships between entities visit the official devon4j dataaccess documentation.

+
+
+
+
Security with Json Web Token
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
JWT basics
+
+
    +
  • +

    A user will provide a username / password combination to our Auth server.

    +
  • +
  • +

    The Auth server will try to identify the user and, if the credentials match, will issue a token.

    +
  • +
  • +

    The user will send the token as the Authorization header to access resources on server protected by JWT Authentication.

    +
  • +
+
+
+
+jwt schema +
+
+
+
+
JWT implementation details
+
+

The Json Web Token pattern will be implemented based on the Spring Security framework that is provided by default in the Devon4j projects.

+
+
+
+
== Authentication
+
+

Based on the Spring Security approach, we will implement a class extending WebSecurityConfigurerAdapter (Devon4j already provides the` BaseWebSecurityConfig` class) to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will set up the HttpSecurity object in the configure method of the class. We will define a JWTLoginFilter class that will handle the requests to the /login endpoint.

+
+
+
+
http.[...].antMatchers(HttpMethod.POST, "/login").permitAll().[...].addFilterBefore(new JWTLoginFilter("/login", authenticationManager()), UsernamePasswordAuthenticationFilter.class);
+
+
+
+

In the same HttpSecurity object we will set up the filter for the rest of the requests, to check the presence of the JWT token in the header. First we will need to create a JWTAuthenticationFilter class extending the GenericFilterBean class. Then we can add the filter to the HttpSecurity object

+
+
+
+
http.[...].addFilterBefore(new `JWTAuthenticationFilter()`, UsernamePasswordAuthenticationFilter.class);
+
+
+
+

Finally, as default users to start using the My Thai Star app we are going to define two profiles using the inMemoryAuthentication of the Spring Security framework. In the configure(AuthenticationManagerBuilder Auth) method we will create:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: Waiter

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: Customer

    +
  • +
+
+
+
+
auth.inMemoryAuthentication().withUser("waiter").password("waiter").roles("Waiter").and().withUser("user0").password("password").roles("Customer");
+
+
+
+
+
== Token set up
+
+

Following the official documentation the implementation details for the MyThaiStar’s JWT will be:

+
+
+
    +
  • +

    Secret: Used as part of the signature of the token, acting as a private key. For the showcase purposes we will use simply "ThisIsASecret".

    +
  • +
  • +

    Token Prefix schema: Bearer. The token will look like Bearer <token>

    +
  • +
  • +

    Header: Authorization. The response header where the token will be included. Also, in the requests, when checking the token it will be expected to be in the same header.

    +
  • +
  • +

    The Authorization header should be part of the Access-Control-Expose-Headers header to allow clients access to the Authorization header content (the token);

    +
  • +
  • +

    The claims are the content of the payload of the token. The claims are statements about the user, so we will include the user info in this section.

    +
    +
      +
    • +

      subject: "sub". The username.

      +
    • +
    • +

      issuer: "iss". Who creates the token. We could use the url of our service but, as this is a showcase app, we simply will use "MyThaiStarApp"

      +
    • +
    • +

      expiration date: "exp". Defines when the token expires.

      +
    • +
    • +

      creation date: "iat". Defines when the token has been created.

      +
    • +
    • +

      scope: "scope". Array of strings to store the user roles.

      +
    • +
    +
    +
  • +
  • +

    Signature Algorithm: To encrypt the token we will use the default algorithm HS512.

    +
  • +
+
+
+

An example of a token claims before encryption would be:

+
+
+

{sub=waiter, scope=[ROLE_Waiter], iss=MyThaiStarApp, exp=1496920280, iat=1496916680}

+
+
+
+
== Current User request
+
+

To provide to the client with the current user data our application should expose a service to return the user details. In Devon4j applications the /general/service/impl/rest/SecurityRestServiceImpl.java class is ready to do that.

+
+
+
+
@Path("/security/v1")
+@Named("SecurityRestService")
+public class SecurityRestServiceImpl {
+
+  @Produces(MediaType.APPLICATION_JSON)
+  @GET
+  @Path("/currentuser/")
+  public UserDetailsClientTo getCurrentUserDetails(@Context HttpServletRequest request) {
+
+  }
+}
+
+
+
+

we only will need to implement the getCurrentUserDetails method.

+
+
+
+
== Authorization
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

As part of the token we are providing the user Role. So, when validating the token, we can obtain that same information and build a UsernamePasswordAuthenticationToken with username and the roles as collection of Granted Authorities.

+
+
+

Doing so, afterwards, in the implementation class of the logic layer we can set up the related methods with the java security '@RolesAllowed' annotation to block the access to the resource to users that does not match the expected roles.

+
+
+
+
`@RolesAllowed(Roles.WAITER)`
+public PaginatedListTo<BookingEto> findBookings(BookingSearchCriteriaTo criteria) {
+  return findBookings(criteria);
+}
+
+
+
+
Table of Contents
+ +
+
+
+
.NET design
+
+

TODO

+
+
+
Table of Contents
+ +
+
+
+
NodeJS design (deprecated)
+ +
+
+
Introduction
+
+

The NodeJS back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    ExpressJS as the web application framework

    +
  • +
  • +

    devon4node as data access layer framework

    +
  • +
  • +

    DynamoDB as NoSQL Database

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
Basic architecture details
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
    +
  • +

    public - All files which be exposed on the server directly

    +
  • +
  • +

    src

    +
    +
      +
    • +

      database folder - Folder with scripts to create/delete/seed the database

      +
    • +
    • +

      model - Folder with all data model

      +
    • +
    • +

      routes - Folder with all ExpressJS routers

      +
    • +
    • +

      utils - Folder with all utils like classes and functions

      +
    • +
    • +

      app.ts - File with ExpressJS declaration

      +
    • +
    • +

      config.ts - File with server configs

      +
    • +
    • +

      logic.ts - File with the business logic

      +
    • +
    +
    +
  • +
  • +

    test - Folder with all tests

    +
  • +
+
+
+
+
Layers
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+
+
Service layer
+
+

The services layer will be solved using REST services with ExpressJS

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

In order to be compatible with the other back-end implementations, we must follow the naming conventions proposed for Devon4j applications. We will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+

To treat these services separately, the following routers were created:

+
+
+
    +
  • +

    bookingmanagement: will answer all requests with the prefix /mythaistar/services/rest/bookingmanagement/v1

    +
  • +
  • +

    dishmanagement: will answer all requests with the prefix /mythaistar/services/rest/dishmanagement/v1

    +
  • +
  • +

    ordermanagement: will answer all requests with the prefix /mythaistar/services/rest/ordermanagement/v1

    +
  • +
+
+
+

These routers will define the behavior for each service and use the logical layer.

+
+
+

An example of service definition:

+
+
+
+
router.post('/booking/search', (req: types.CustomRequest, res: Response) => {
+    try {
+        // body content must be SearchCriteria
+        if (!types.isSearchCriteria(req.body)) {
+            throw {code: 400, message: 'No booking token given' };
+        }
+
+        // use the searchBooking method defined at business logic
+        business.searchBooking(req.body, (err: types.Error | null, bookingEntity: types.PaginatedList) => {
+            if (err) {
+                res.status(err.code || 500).json(err.message);
+            } else {
+                res.json(bookingEntity);
+            }
+        });
+    } catch (err) {
+        res.status(err.code || 500).json({ message: err.message });
+    }
+});
+
+
+
+
+
Logic layer and Data access layer
+
+

In the logic layer we will locate all the business logic of the application. It will be located in the file logic.ts. If in this layer we need to get access to the data, we make use of data access layer directly, in this case using devon4node with the DynamoDB adapter.

+
+
+

Example:

+
+
+
+
export async function cancelOrder(orderId: string, callback: (err: types.Error | null) => void) {
+    let order: dbtypes.Order;
+
+    try {
+        // Data access
+        order = await oasp4fn.table('Order', orderId).promise() as dbtypes.Order;
+
+        [...]
+    }
+}
+
+
+
+

We could define the data access layer separately, but devon4node allows us to do this in a simple and clear way. So, we decided to not separate the access layer to the logic business.

+
+
+
+
Security with Json Web Token
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
JWT basics
+
+

Refer to JWT basics for more information.

+
+
+
+
JWT implementation details
+
+

The Json Web Token pattern will be implemented based on the JSON web token library available on npm.

+
+
+
+
== Authentication
+
+

Based on the JSON web token approach, we will implement a class Authentication to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/\**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/\**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/\**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/\**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will create an instance of Authentication in the app file and then we will use the method auth for handle the requests to the /login endpoint.

+
+
+
+
app.post('/mythaistar/login', auth.auth);
+
+
+
+

To verify the presence of the Authorization token in the headers, we will register in the express the Authentication.registerAuthentication middleware. This middleware will check if the token is correct, if so, it will place the user in the request and continue to process it. If the token is not correct it will continue processing the request normally.

+
+
+
+
app.use(auth.registerAuthentication);
+
+
+
+

Finally, we have two default users created in the database:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: WAITER

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: CUSTOMER

    +
  • +
+
+
+
+
== Token set up
+
+

Following the official documentation the implementation details for the MyThaiStar’s JWT will be:

+
+
+
    +
  • +

    Secret: Used as part of the signature of the token, acting as a private key. It can be modified at config.ts file.

    +
  • +
  • +

    Token Prefix schema: Bearer. The token will look like Bearer <token>

    +
  • +
  • +

    Header: Authorization. The response header where the token will be included. Also, in the requests, when checking the token it will be expected to be in the same header.

    +
  • +
  • +

    The Authorization header should be part of the Access-Control-Expose-Headers header to allow clients access to the Authorization header content (the token);

    +
  • +
  • +

    Signature Algorithm: To encrypt the token we will use the default algorithm HS512.

    +
  • +
+
+
+
+
== Current User request
+
+

To provide to the client with the current user data our application should expose a service to return the user details. In this case the Authentication has a method called getCurrentUser which will return the user data. We only need register it at express.

+
+
+
+
app.get('/mythaistar/services/rest/security/v1/currentuser', auth.getCurrentUser);
+
+
+
+
+
== Authorization
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

To ensure this, the Authorization class has the securizedEndpoint method that guarantees access based on the role. This method can be used as middleware in secure services. As the role is included in the token, once validated we will have this information in the request and the middleware can guarantee access or return a 403 error.

+
+
+
+
app.use('/mythaistar/services/rest/ordermanagement/v1/order/filter', auth.securizedEndpoint('WAITER'));
+app.use('/mythaistar/services/rest/ordermanagement/v1/order/search', auth.securizedEndpoint('WAITER'));
+app.use('/mythaistar/services/rest/bookingmanagement/v1/booking/search', auth.securizedEndpoint('WAITER'));
+
+
+
+
Table of Contents
+ +
+
+
+
Serverless design (deprecated)
+ +
+
+
Introduction
+
+

The NodeJS back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    Serverless as serverless framework

    +
  • +
  • +

    devon4node as data access layer framework

    +
  • +
  • +

    DynamoDB as NoSQL Database

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
Basic architecture details
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
    +
  • +

    handlers - All function handlers following devon4node structure

    +
  • +
  • +

    src

    +
    +
      +
    • +

      model - Folder with all data model

      +
    • +
    • +

      utils - Folder with all utils like classes and functions

      +
    • +
    • +

      config.ts - File with server configs

      +
    • +
    • +

      logic.ts - File with the business logic

      +
    • +
    +
    +
  • +
  • +

    test - Folder with all tests

    +
  • +
+
+
+
+
Layers
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+
+
Service layer
+
+

The services layer will be solved using REST services with Serverless

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

In order to be compatible with the other back-end implementations, we must follow the naming conventions proposed for Devon4j applications. We will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+

To treat these Http services, we must define the handlers following the devon4node convention:

+
+
+
    +
  • +

    (handlers/Http/POST/dish-search-handler) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (handlers/Http/POST/booking-handler) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (handlers/Http/POST/order-handler) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (handlers/Http/POST/booking-search-handler) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (handlers/Http/POST/order-search-handler) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (handlers/Http/POST/order-filter-handler) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (handlers/Http/POST/login-handler) /mythaistar/login.

    +
  • +
  • +

    (handlers/Http/GET/current-user-handler) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

These handlers will define the behavior for each service and use the logical layer.

+
+
+

An example of handler definition:

+
+
+
+
oasp4fn.config({ path: '/mythaistar/services/rest/bookingmanagement/v1/booking/search' });
+export async function bookingSearch(event: HttpEvent, context: Context, callback: Function) {
+    try {
+        const search = <types.SearchCriteria>event.body;
+        const authToken = event.headers.Authorization;
+        // falta lo que viene siendo comprobar el token y eso
+
+        auth.decode(authToken, (err, decoded) => {
+            if (err || decoded.role !==  'WAITER') {
+                throw { code: 403, message: 'Forbidden'};
+            }
+
+            // body content must be SearchCriteria
+            if (!types.isSearchCriteria(search)) {
+                throw { code: 400, message: 'No booking token given' };
+            }
+
+            business.searchBooking(search, (err: types.Error | null, bookingEntity: types.PaginatedList) => {
+                if (err) {
+                    callback(new Error(`[${err.code || 500}] ${err.message}`));
+                } else {
+                    callback(null, bookingEntity);
+                }
+            });
+        });
+    } catch (err) {
+        callback(new Error(`[${err.code || 500}] ${err.message}`));
+    }
+}
+
+
+
+

The default integration for a handler is lambda. See devon4node documentation for more information about default values and how to change it.

+
+
+
+
==
+
+

If you change the integration to lambda-proxy, you must take care that in this case the data will not be parsed. You must do JSON.parse explicitly +== ==

+
+
+

After defining all the handlers, we must execute the fun command, which will generate the files serverless.yml and webpack.config.js.

+
+
+
+
Logic layer and Data access layer
+ +
+
+
Security with Json Web Token
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
JWT basics
+
+

Refer to JWT basics for more information.

+
+
+
+
JWT implementation details
+
+

The Json Web Token pattern will be implemented based on the JSON web token library available on npm.

+
+
+
+
== Authentication
+
+

Based on the JSON web token approach, we will implement two methods in order to verify and user + generate the token and decode the token + return the user data. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will create a handler called login and then we will use the method code to verify the user and generate the token.

+
+
+
+
app.post(oasp4fn.config({ integration: 'lambda-proxy', path: '/mythaistar/login' });
+export async function login(event: HttpEvent, context: Context, callback: Function) {
+.
+.
+.
+.
+}
+
+
+
+

We have two default users created in the database:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: WAITER

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: CUSTOMER

    +
  • +
+
+
+
+
== Token set up
+ +
+
+
== Current User request
+
+

To provide the client with the current user data our application should expose a service to return the user details. In order to do this, we must define a handler called current-user-handler. This handler must decode the Authorization token and return the user data.

+
+
+
+
oasp4fn.config({
+    path: '/mythaistar/services/rest/security/v1/currentuser',
+});
+export async function currentUser(event: HttpEvent, context: Context, callback: Function) {
+    let authToken = event.headers.Authorization;
+    try {
+        auth.decode(authToken, (err: any, decoded?: any) => {
+            if (err) {
+                callback(new Error(`[403] Forbidden`));
+            } else {
+                callback(null, decoded);
+            }
+        });
+    } catch (err) {
+        callback(new Error(`[${err.code || 500}] ${err.message}`));
+    }
+}
+
+
+
+
+
== Authorization
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

To ensure this, we must decode the Authorization token and check the result. As the role is included in the token, once validated we will have this information and can guarantee access or return a 403 error.

+
+
+
+
oasp4fn.config({ path: '/mythaistar/services/rest/bookingmanagement/v1/booking/search' });
+export async function bookingSearch(event: HttpEvent, context: Context, callback: Function) {
+    const authToken = event.headers.Authorization;
+    auth.decode(authToken, (err, decoded) => {
+        try {
+            if (err || decoded.role !==  'WAITER') {
+                throw { code: 403, message: 'Forbidden' };
+            }
+
+            [...]
+
+        } catch (err) {
+            callback(new Error(`[${err.code || 500}] ${err.message}`));
+        }
+    });
+}
+
+
+
+
Table of Contents
+ +
+
+
+
GraphQL design
+
+

TODO

+
+
+
+
+
+

Client Side

+
+
+
Table of Contents
+ +
+
+
Angular design
+ +
+
+
Introduction
+
+

MyThaiStar client side has been built using latest frameworks, component libraries and designs:

+
+
+

Angular 4 as main front-end Framework. https://angular.io/

+
+
+

Angular/CLI 1.0.5 as Angular tool helper. https://github.com/angular/angular-cli

+
+
+

Covalent Teradata 1.0.0-beta4 as Angular native component library based on Material Design. https://teradata.github.io/covalent/#/

+
+
+

Angular/Material2 1.0.0-beta5 used by Covalent Teradata. https://github.com/angular/material2

+
+
+

Note: this dependencies are evolving at this moment and if it is possible, we are updating it on the project.

+
+
+
+
Basic project structure
+
+

The project is using the basic project seed that Angular/CLI provides with “ng new <project name>”. Then the app folder has been organized as Angular recommends and goes as follows:

+
+
+
    +
  • +

    app

    +
    +
      +
    • +

      components

      +
      +
        +
      • +

        sub-components

        +
      • +
      • +

        shared

        +
      • +
      • +

        component files

        +
      • +
      +
      +
    • +
    • +

      main app component

      +
    • +
    +
    +
  • +
  • +

    assets folder

    +
  • +
  • +

    environments folder

    +
  • +
  • +

    rest of angular files

    +
  • +
+
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
+
Main Views and components
+
+

List of components that serve as a main view to navigate or components developed to make atomically a group of functionalities which given their nature, can be highly reusable through the app.

+
+
+
+routes +
+
+
+

Note: no-name-route corresponds to whatever URL the user introduced and does not exist, it redirects to Home-Component.

+
+
+
+
Public area
+ +
+
+
== App Component
+
+

Contains the components that are on top of all views, including:

+
+
+
+
== Order sidenav
+
+

Sidenav where selected orders are displayed with their total price and some comments.

+
+
+
+
== Navigation sidenav (only for mobile)
+
+

This sidenav proposal is to let user navigate through the app when the screen is too small to show the navigation buttons on the header.

+
+
+
+
== Header
+
+

It contains the title, and some other basic functions regarding open and close sidenavs.

+
+
+
+ +
+

At the end of the page that shows only when open on desktop.

+
+
+
+
== Home-Component
+
+

Main view that shows up when the app initializes.

+
+
+
+
== Menu-Component
+
+

View where the users can view, filter and select the dishes (with their extras) they want to order it contains a component to each menu entry:

+
+
+
+
== Menu-card
+
+

This component composes all the data of a dish in a card. Component made to display indeterminate number of dishes easily.

+
+
+
+
== Book Table Component
+
+

View to make book a table in a given data with a given number of assistants or create a reservation with a number of invitations via email.

+
+
+
+
== Book-table-dialog
+
+

Dialog which opens as a result of fulfilling the booking form, it displays all the data of the booking attempt, if everything is correct, the user can send the information or cancel if something is wrong.

+
+
+
+
== Invitation-dialog
+
+

Dialog which opens as a result of fulfilling the invitation form, it displays all the data of the booking with friends attempt, if everything is correct, the user can send the information or cancel if something is wrong.

+
+
+
+
== User Area
+
+

Group of dialogs with the proposal of giving some functionalities to the user, as login, register, change password or connect with Twitter.

+
+
+
+
== Login-dialog
+
+

Dialog with a tab to navigate between login and register.

+
+
+
+
== Password-dialog
+
+

Functionality reserved to already logged users, in this dialog the user can change freely their password.

+
+
+
+
== Twitter-dialog
+
+

Dialog designed specifically to connect your user account with Twitter.

+
+
+
+
Waiter cockpit area
+
+

Restricted area to workers of the restaurant, here we can see all information about booked tables with the selected orders and the reservations with all the guests and their acceptance or decline of the event.

+
+
+
+
== Order Cockpit Component
+
+

Data table with all the booked tables and a filter to search them, to show more info about that table you can click on it and open a dialog.

+
+
+
+
== Order-dialog
+
+

Complete display of data regarding the selected table and its orders.

+
+
+
+
== Reservation Cockpit Component
+
+

Data table with all the reservations and a filter to search them, to show more info about that table you can click on it and open a dialog.

+
+
+
+
== Reservation-dialog
+
+

Complete display of data regarding the selected table and its guests.

+
+
+
+
Email Management
+
+

As the application send emails to both guests and hosts, we choose an approach based on URL where the email contain a button with an URL to a service in the app and a token, front-end read that token and depending on the URL, will redirect to one service or another. For example:

+
+
+
+
`http://localhost:4200/booking/cancel/CB_20170605_8fb5bc4c84a1c5049da1f6beb1968afc`
+
+
+
+

This URL will tell the app that is a cancellation of a booking with the token CB_20170605_8fb5bc4c84a1c5049da1f6beb1968afc. The app will process this information, send it to back-end with the correct headers, show the confirmation of the event and redirect to home page.

+
+
+

The main cases at the moment are:

+
+
+
+
== Accept Invite
+
+

A guest accept an invitation sent by a host. It will receive another email to decline if it change its mind later on.

+
+
+
+
== Reject Invite
+
+

A guest decline the invitation.

+
+
+
+
== Cancel Reservation
+
+

A host cancel the reservation, everybody that has accepted or not already answered will receive an email notifying this event is canceled. Also all the orders related to this reservations will be removed.

+
+
+
+
== Cancel Orders
+
+

When you have a reservation, you will be assigned to a token, with that token you can save your order in the restaurant. When sent, you will receive an email confirming the order and the possibility to remove it.

+
+
+
+
Services and directives
+
+

Services are where all the main logic between components of that view should be. This includes calling a remote server, composing objects, calculate prices, etc.

+
+
+

Directives are a single functionality that are related to a component.

+
+
+

As it can be seen in the basic structure, every view that has a minimum of logic or need to call a server has its own service located in the shared folder.

+
+
+

Also, services and directives can be created to compose a reusable piece of code that will be reused in some parts of the code:

+
+
+
+
Price-calculator-service
+
+

This service located in the shared folder of sidenav contains the basic logic to calculate the price of a single order (with all the possibilities) and to calculate the price of a full list of orders for a table. As this is used in the sidenav and in the waiter cockpit, it has been exported as a service to be imported where needed and easily testable.

+
+
+
+
Authentication
+
+

Authentication services serves as a validator of roles and login and, at the same time, stores the basic data regarding security and authentication.

+
+
+

Main task of this services is to provide visibility at app level of the current user information:

+
+
+
    +
  • +

    Check if the user is logged or not.

    +
  • +
  • +

    Check the permissions of the current user.

    +
  • +
  • +

    Store the username and the JWT token.

    +
  • +
+
+
+
+
Snack Service
+
+

Service created to serve as a factory of Angular Material Snackbars, which are used commonly through the app. This service accepts some parameters to customize the snackBar and opens it with this parameters.

+
+
+
+
Window Service
+
+

For responsiveness reasons, the dialogs have to accept a width parameter to adjust to screen width and this information is given by Window object, as it is a good practice to have it in an isolated service, which also calculates the width percentage to apply on the dialogs.

+
+
+
+
Equal-validator-directive
+
+

This directive located in the shared folder of userArea is used in 2 fields to make sure they have the same value. This directive is used in confirm password fields in register and change password.

+
+
+
+
Mock Back-end
+
+

To develop meanwhile a real back-end is being developed let us to make a more realistic application and to make easier the adaptation when the back-end is able to be connected and called. Its structure is as following:

+
+
+
+back end +
+
+
+

Contains the three main groups of functionalities in the application. Every group is composed by:

+
+
+
    +
  • +

    An interface with all the methods to implement.

    +
  • +
  • +

    A service that implements that interface, the main task of this service is to choose between real back-end and mock back-end depending on an environment variable.

    +
  • +
  • +

    Mock back-end service which implements all the methods declared in the interface using mock data stored in a local file and mainly uses Lodash to operate the arrays.

    +
  • +
  • +

    Real back-end service works as Mock back-end but in this case the methods call for server rest services through Http.

    +
  • +
+
+
+
+
Booking
+
+

The booking group of functionalities manages the calls to reserve a table with a given time and assistants or with guests, get reservations filtered, accept or decline invitations or cancel the reservation.

+
+
+
+
Orders
+
+

Management of the orders, including saving, filtering and cancel an order.

+
+
+
+
Dishes
+
+

The dishes group of functionalities manages the calls to get and filter dishes.

+
+
+
+
Login
+
+

Login manages the userArea logic: login, register and change password.

+
+
+
+
Security
+
+

My Thai Star security is composed by two main security services:

+
+
+
+
Auth-guard
+
+

Front-end security approach, this service implements an interface called CanActivate that comes from angular/router module. CanActivate interface forces you to implement a canActivate() function which returns a Boolean. +This service checks with the Auth-Service stored data if the user is logged and if he has enough permission to access the waiter cockpit. This prevents that a forbidden user could access to waiter cockpit just by editing the URL in the browser.

+
+
+
+
JWT
+
+

JSON Web Token consists of a token that is generated by the server when the user logs in. Once provided, the token has to be included in an Authentication header on every Http call to the rest service, otherwise the call will be forbidden. +JWT also has an expiration date and a role checking, so if a user has not enough permissions or keeps logged for a long certain amount of time that exceeds this expiration date, the next time he calls for a service call, the server will return an error and forbid the call. You can log again to restore the token.

+
+
+
+
== HttpClient
+
+

To implement this Authorization header management, an HttpClient service has been implemented. +This services works as an envelope of Http, providing some more functionalities, likes a header management and an automatically management of a server token error in case the JWT has expired, corrupted or not permitted.

+
+
+
Table of Contents
+ +
+
+
+
Xamarin design
+
+

TODO

+
+
+
+
+
+

Security

+
+
+
Table of Contents
+ +
+
+

Two-Factor Authentication

+
+

Two-factor Authentication (2FA) provides an additional level of security to your account. Once enabled, in addition to supplying your username and password to login, you’ll be prompted for a code generated by your Google Authenticator. For example, a password manager on one of your devices.

+
+
+

By enabling 2FA, to log into your account an additional one-time password is required what requires access to your paired device. This massively increases the barrier for an attacker to break into your account.

+
+
+
+

Back-end mechanism

+
+

In the back-end, we utilize Spring Security for any authentication.

+
+
+

Following the arrows, one can see all processes regarding authentication. The main idea is to check all credentials depending on their 2FA status and then either grand access to the specific user or deny access. This picture illustrates a normal authentication with username and password.

+
+
+
+security cross component +
+
+
+

When dealing with 2FA, another provider and filter is handling the request from /verify

+
+
+
+security cross component twofactor +
+
+
+

Here you can observe which filter will be used. +JWT-Authentication-Filter does intercept any request, which enforces being authenticated via JWT

+
+
+
+filters png +
+
+
+ + + + + +
+ + +Whenever the secret or qr code gets transferred between two parties, one must enforce SSL/TLS or IPsec to be comply with RFC 6238. +
+
+
+
+

Activating Two-Factor Authentication

+
+

In the current state, TOTP +will be used for OTP generation. For this purpose we recommend the Google Authenticator or any TOTP generator out there.

+
+
+
    +
  • +

    Login with your account

    +
  • +
  • +

    Open the 2FA settings

    +
  • +
  • +

    Activate the 2FA Status

    +
  • +
  • +

    Initialize your device with either a QR-Code or a secret

    +
  • +
+
+
+
+

Frontend

+
+

These are the two main options, which you can obtain my toggling between QR-Code and secret.

+
+
+
+2FA qr code menu +
+
+
+
+2FA secret menu +
+
+
+

After an activation and logout. This prompt will ask you to enter the OTP given from your device.

+
+
+
+otp prompt +
+
+
+
+
+
+

Testing

+
+ +
+
+
+

Server Side

+
+
+
Table of Contents
+ +
+
+
Java testing
+ +
+
+
Component testing
+
+

We are going to test our components as a unit using Spring Test and Devon4j-test modules.

+
+
+

In order to test a basic component of the app first we will create a test class in the src/test/java folder and inside the main package of the test module. We will name the class following the convention.

+
+
+
+
[Component]Test
+
+
+
+

Then, in the declaration of the test class, we will use the @SpringBootTest annotation to run the application context. In addition, we will extend the ComponentTest from Devon4j-test module to have access to the main functionalities of the module, see more details here.

+
+
+

Spring Test allows us to use Dependency Injection so we can inject our component directly using the @Inject annotation.

+
+
+

Each test will be represented by a method annotated with @Test. Inside the method we will test one functionality, evaluating the result thanks to the asserts provided by the ComponentTest class that we are extending.

+
+
+

A simple test example

+
+
+
+
@SpringBootTest(classes = SpringBootApp.class)
+public class DishmanagementTest extends `ComponentTest` {
+
+  @Inject
+  private Dishmanagement dishmanagement;
+
+  @Test
+  public void findAllDishes() {
+
+    PaginatedListTo<DishCto> result = this.dishmanagement.findDishes();
+    assertThat(result).isNotNull();
+  }
+
+  ...
+}
+
+
+
+
+
Running the tests
+ +
+
+
From Eclipse
+
+

We can run the test from within Eclipse with the contextual menu Run As > JUnit Test. This functionality can be launched from method level, class level or even package level. The results will be shown in the JUnit tab.

+
+
+
+test results eclipse +
+
+
+
+
From command line using Maven
+
+

We can also run tests using Maven and the command line, using the command mvn test (or mvn clean test).

+
+
+
+
`C:\MyThaiStar>mvn clean test`
+
+
+
+

Doing this we will run all the tests of the project (recognized by the Test word at the end of the classes) and the results will be shown by sub-project.

+
+
+
+
...
+
+[D: 2017-07-17 09:30:08,457] [P: INFO ] [C: ] [T: Thread-5] [L: org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean] - [M: Closing JPA EntityManagerFactory for persistence unit 'default']
+
+Results :
+
+Tests run: 11, Failures: 0, Errors: 0, Skipped: 1
+
+...
+
+[INFO]
+[INFO] --- maven-compiler-plugin:3.1:testCompile (default-testCompile) @ mtsj-server ---
+[INFO] No sources to compile
+[INFO]
+[INFO] --- maven-surefire-plugin:2.12.4:test (default-test) @ mtsj-server ---
+[INFO] No tests to run.
+[INFO] ------------------------------------------------------------------------
+[INFO] Reactor Summary:
+[INFO]
+[INFO] mtsj ............................................... SUCCESS [  0.902 s]
+[INFO] mtsj-core .......................................... SUCCESS [02:30 min]
+[INFO] mtsj-server ........................................ SUCCESS [  1.123 s]
+[INFO] ------------------------------------------------------------------------
+[INFO] BUILD SUCCESS
+[INFO] ------------------------------------------------------------------------
+[INFO] Total time: 02:35 min
+[INFO] Finished at: 20XX-07-17T09:30:13+02:00
+[INFO] Final Memory: 39M/193M
+[INFO] ------------------------------------------------------------------------
+
+
+
+
Table of Contents
+ +
+
+
+
.NET testing
+
+

TODO

+
+
+
Table of Contents
+ +
+
+
+
NodeJS testing
+
+

TODO

+
+
+
Table of Contents
+ +
+
+
+
GraphQL testing
+
+

TODO

+
+
+
+
+
+

Client Side

+
+
+
Table of Contents
+ +
+
+
Angular testing
+
+
+testing +
+
+
+

MyThaiStar testing is made using Angular default testing environment and syntax language: Karma and Jasmine

+
+
+

To test an element of the application, you indicate that tests are a special type of files with the extension .spec.ts, then, in MyThaiStar angular/CLI config you can notice that there is an array with only one entry, Karma, with at the same time has one entry to Karma.config.js.

+
+
+

In the configuration of Karma we indicate which syntax language we are going to use (currently Jasmine as said before) between some other configurations, it is remarkable the last one: browsers. By default, the only available browser is chrome, that is because Karma works opening a chrome view to run all the tests, in that same window, Karma shows the result or errors of the test run. But we can add some other browser to adjust to our necessities, for example, in some automatic processes that run from console, it is not an option to open a chrome window, in that case, MyThaiStar used PhantomJS and ChromeHeadless.

+
+
+

Taking all of this into account, to run the test in MyThaiStar we need to move to project root folder and run this command : ng test --browser <browser>

+
+
+
+
==
+
+

If you run just ng test it will run the three browser options simultaneously, giving as a result three test runs and outputs, it can cause timeouts and unwanted behaviors, if you want a shortcut to run the test with chrome window you can just run yarn test so we really encourage to not use just ng test. +== ==

+
+
+

Here we are going to see how Client side testing of MyThaiStar has been done.

+
+
+
+
Testing Components
+
+

Angular components were created using angular/CLI ng create component so they already come with an spec file to test them. The only thing left to do is to add the providers and imports needed in the component to work as the component itself, once this is done, the most basic test is to be sure that all the dependencies and the component itself can be correctly created.

+
+
+

As an example, this is the spec.ts of the menu view component:

+
+
+
+
all the imports...
+
+describe('MenuComponent', () => {
+  let component: MenuComponent;
+  let fixture: ComponentFixture<MenuComponent>;
+
+  beforeEach(async(() => {
+    TestBed.configureTestingModule({
+      declarations: [ MenuComponent, MenuCardComponent ],
+      providers: [SidenavService, MenuService, SnackBarService],
+      imports: [
+        BrowserAnimationsModule,
+        BackendModule.forRoot({environmentType: 0, restServiceRoot: 'v1'}),
+        CovalentModule,
+      ],
+    })
+    .compileComponents();
+  }));
+
+  beforeEach(() => {
+    fixture = TestBed.createComponent(MenuComponent);
+    component = fixture.componentInstance;
+    fixture.detectChanges();
+  });
+
+  it('should create', () => {
+    expect(component).toBeTruthy();
+  });
+});
+
+
+
+

First we declare the component to be tested and a Fixture object, then, we configure the testingModule right in the same way we could configure the MenuModule with the difference here that tests always have to use the mock back-end because we do not want to really depend on a server to test our components.

+
+
+

Once configured the test module, we have to prepare the context of the test, in this case we create the component, that is exactly what is going on in the beforeEach() function.

+
+
+

Finally, we are ready to use the component and it’s fixture to check if the component has bee correctly created.

+
+
+

At this moment this is the case for most of the components, in the future, some work would be applied on this matter to have a full testing experience in MyThaiStar components.

+
+
+
+
Dialog components
+
+

Dialog components are in a special category because they can not be tested normally. In the way Material implements the opening of dialogs, you have to create a component that will load into a dialog, to tell the module to load this components when needed, they have to be added into a special array category: EntryComponents. So, to test them, we need to import them in the test file as well.

+
+
+

Also, the testing code to open the component is a bit different too:

+
+
+
+
...
+  beforeEach(() => {
+    dialog = TestBed.get(MdDialog);
+    component = dialog.open(CommentDialogComponent).componentInstance;
+  });
+...
+
+
+
+

That is right, the beforeEach() function is slightly different from the the example above, in this case we have to force to the test to know that the component is only displayed in a dialog, so we have to open a dialog with this component in order to access it.

+
+
+
+
Testing Services
+
+

As well as components, services can be tested too, actually, they are even more necessary to be tested because they have inside more complex logic and data management.

+
+
+

As an example of testing services i am going to use a well done services, with a specific purpose and with its logic completely tested, the price-calculator service:

+
+
+
+
...
+
+describe('PriceCalculatorService', () => {
+
+  beforeEach(() => {
+    TestBed.configureTestingModule({
+      providers: [PriceCalculatorService],
+    });
+  });
+
+  it('should be properly injected', inject([PriceCalculatorService], (service: PriceCalculatorService) => {
+    expect(service).toBeTruthy();
+  }));
+
+  describe('check getPrice method', () => {
+
+    it('should calculate price for single order without extras', inject([PriceCalculatorService], (service: PriceCalculatorService) => {
+      const order: OrderView = {
+        dish: {
+          id: 0,
+          price: 12.50,
+          name: 'Order without extras',
+        },
+        orderLine: {
+          comment: '',
+          amount: 1,
+        },
+        extras: [],
+      };
+
+      expect(service.getPrice(order)).toEqual(order.dish.price);
+    }));
+...
+
+
+
+

In services test, we have to inject the service in order to use it, then we can define some initializing contexts to test if the functions of the services returns the expected values, in the example we can see how an imaginary order is created and expected the function getPrice() to correctly calculate the price of that order.

+
+
+

In this same test file you can find some more test regarding all the possibilities of use in that services: orders with and without extras, single order, multiple orders and so on.

+
+
+

Some services as well as the components have only tested that they are correctly created and they dependencies properly injected, in the future, will be full covering regarding this services test coverage.

+
+
+
+
Testing in a CI environment
+
+
Table of Contents
+ +
+
+
+
Xamarin testing
+
+

TODO

+
+
+
+
+
+

End to end

+
+
+
Table of Contents
+ +
+
+
MrChecker E2E Testing
+ +
+
+
Introduction
+
+

MrChecker is a testing framework included in devonfw with several useful modules, from which we will focus on the Selenium Module, a module designed to make end-to-end testing easier to implement.

+
+
+
+
How to use it
+
+

First of all download the repository.

+
+
+

You must run My Thai Star front-end and back-end application and modify your URL to the front in mrchecker/endtoend-test/src/resources/settings.properties

+
+
+

Now you can run end to end test to check if the application works properly.

+
+
+

To run the e2e test you have two options:

+
+
+

The first option is using the command line in devonfw distribution

+
+
+
+
cd mrchecker/endtoend-test/
+mvn test -Dtest=MyThaiStarTest -Dbrowser=Chrome
+
+
+
+

optionally you can use it with a headless version or using another navigator:

+
+
+
+
// chrome headless (without visual component)
+mvn test -Dtest=MyThaiStarTest -Dbrowser=ChromeHeadless// use firefox navigator
+mvn test -Dtest=MyThaiStarTest -Dbrowser=FireFox
+
+
+
+

The second is importing the project in devonfw Eclipse and running MyThaiStarTest.java as JUnit (right click, run as JUnit)

+
+
+

They can be executed one by one or all in one go, comment or uncomment @Test before those tests to enable or disable them.

+
+
+

For more information about how to use MrChecker and build your own end to end test read: + * MrChecker documentation + * MrChecker tutorial for My Thai Star

+
+
+
+
End to end tests in My Thai Star
+
+

We have included a test suite with four tests to run in My Thai Star to verify everything works properly.

+
+
+

The included tests do the following:

+
+
+
    +
  • +

    Test_loginAndLogOut: Log in and log out.

    +
  • +
  • +

    Test_loginFake: Attempt to log in with a fake user.

    +
  • +
  • +

    Test_bookTable: Log in and book a table, then login with a waiter and check if the table was successfully booked.

    +
  • +
+
+
+

*` Test_orderMenu`: Log in and order food for a certain booked table.

+
+
+

These four tests can be found inside MyThaiStarTest.java located here.

+
+
+
+
+
+

UI design

+
+
+
Table of Contents
+ +
+
+

Style guide

+
+
+mts styleguide +
+
+
+ +
+
+
+

CI/CD

+
+
+
Table of Contents
+ +
+
+

My Thai Star in Production Line

+ +
+
+

What is PL?

+
+

The Production Line Project is a set of server-side collaboration tools for Capgemini engagements. It has been developed for supporting project engagements with individual tools like issue tracking, continuous integration, continuous deployment, documentation, binary storage and much more!

+
+
+
+pl logo +
+
+
+
+

Introduction

+
+

Although the PL Project is a wide set of tools, only 3 are going to be mainly used for My Thai Star projects to build a Continuous Integration and Continuos Delivery environment. All three are available in the PL instance used for this project.

+
+
+
    +
  1. +

    Jenkins

    +
    +

    This is going to be the "main tool". Jenkins helps to automate the non-human part of the development with Continuos Integration and is going to host all Pipelines (and, obviously, execute them).

    +
    +
  2. +
  3. +

    Nexus

    +
    +

    Nexus manages software "artifacts" required for development. It is possible to both download dependencies from Nexus and publish artifacts as well. It allows to share resources within an organization.

    +
    +
  4. +
  5. +

    SonarQube

    +
    +

    It is a platform for continuous inspection of the code. It is going to be used for the Java back-end.

    +
    +
  6. +
+
+
+
+

Where can I find all My Thai Star Pipelines?

+
+

They are located under the MTS folder of the PL instance:

+
+
+
+mts pipelines +
+
+
+

Those Jenkins Pipelines will not have any code to execute. They’re just pointing to all Jenkinsfiles under the /jenkins folder of the repository. They can be found here.

+
+
+
+

CI in My Thai Star stack

+
+ +
+
+
+

How to configure everything out of the box

+
+

Production Line currently has a template to integrate My Thai Star. All information can be found at devonfw production line repository

+
+
+
Table of Contents
+ +
+
+
Angular CI
+
+

The Angular client-side of My Thai Star is going to have some specific needs for the CI-CD Pipeline to perform mandatory operations.

+
+
+
+
Pipeline
+
+

The Pipeline for the Angular client-side is going to be called MyThaiStar_FRONT-END_BUILD. It is located in the PL instance, under the MTS folder (as previously explained). It is going to follow a process flow like this one:

+
+
+
+angular pipeline flow +
+
+
+

Each of those steps are called stages in the Jenkins context.Let’s see what those steps mean in the context of the Angular application:

+
+
+
    +
  1. +

    Declarative: Checkout SCM

    +
    +

    Retrieves the project from the GitHub repository which it’s located. This step is not defined directly in our pipeline, but as it is loaded from the repository this step should always be done at the beginning.

    +
    +
    +
    +pipeline config +
    +
    +
  2. +
  3. +

    Declarative: Tool Install

    +
    +

    The Pipeline needs some Tools to perform some operations with the Angular project. These tool is a correct version of NodeJS (10.17.0 LTS) with Yarn installed as global package.

    +
    +
    +
    +
    tools {
    +    nodejs "NodeJS 10.14.0"
    +}
    +
    +
    +
  4. +
  5. +

    Loading Custom Tools

    +
    +

    The Pipeline also needs a browser in order to execute the tests, so in this step the chrome-stable will be loaded. We will use it in a headless mode.

    +
    +
    +
    +
    tool chrome
    +
    +
    +
  6. +
  7. +

    Fresh Dependency Installation

    +
    +

    The script $ yarn does a package installation. As we always clean the workspace after the pipeline, all packages must be installed in every execution.

    +
    +
  8. +
  9. +

    Code Linting

    +
    +

    This script executes a linting process of TypeScript. Rules can be defined in the tslint.json file of the project. It throws an exception whenever a file contains a non-compliant piece of code.

    +
    +
  10. +
  11. +

    Execute Angular tests

    +
    +

    The CI testing of the Angular client is different than the standard local testing (adapted to CI environments, as specified in the Adaptation section of document). This script just executes the following commands:

    +
    +
    +
    +
    ng test --browsers ChromeHeadless --watch=false
    +
    +
    +
  12. +
  13. +

    Check dependencies

    +
    +

    Before continue, we print the result of yarn audit. It shows the vulnerabilities in the dependencies. It do not process the response. The purpose is only to track the result of the command.

    +
    +
    +
    +
    yarn audit
    +
    +
    +
  14. +
  15. +

    SonarQube code analysis

    +
    +

    The script load and execute the tool sonar-scanner. This tool is loaded here because it’s not used in any other part of the pipeline. The sonar-scanner will take all code, upload it to SonarQube and wait until SonarQube send us a response with the quality of our code. If the code do not pass the quality gate, the pipeline will stop at this point.

    +
    +
  16. +
  17. +

    Build Application

    +
    +

    The building process of the Angular client would result in a folder called /dist in the main Angular’s directory. That folder is the one that is going to be served afterwards as an artifact. This process has also been adapted to some Deployment needs. This building script executes the following:

    +
    +
    +
    +
    ng build --configuration=docker
    +
    +
    +
  18. +
  19. +

    Deliver application into Nexus

    +
    +

    Once the scripts produce the Angular artifact (/dist folder), it’s time to package it and store into nexus.

    +
    +
  20. +
  21. +

    Declarative: Post Actions

    +
    +

    At the end, this step is always executed, even if a previous stage fail. We use this step to clean up the workspace for future executions

    +
    +
    +
    +
    post {
    +    always {
    +        cleanWs()
    +    }
    +}
    +
    +
    +
  22. +
+
+
+
+
Adjustments
+
+

The Angular project Pipeline needed some "extra" features to complete all planned processes. Those features resulted in some additions to the project.

+
+
+
+
Pipeline Environment
+
+

In order to easily reuse the pipeline in other angular projects, all variables have been defined in the block environment. All variables have the default values that Production Line uses, so if you’re going to work in production line you won’t have to change anything. Example:

+
+
+
+
environment {
+    // Script for build the application. Defined at package.json
+    buildScript = 'build --configuration=docker'
+    // Script for lint the application. Defined at package.json
+    lintScript = 'lint'
+    // Script for test the application. Defined at package.json
+    testScript = 'test:ci'
+    // Angular directory
+    angularDir = 'angular'
+    // SRC folder. It will be angularDir/srcDir
+    srcDir = 'src'
+    // Name of the custom tool for chrome stable
+    chrome = 'Chrome-stable'
+
+    // SonarQube
+    // Name of the SonarQube tool
+    sonarTool = 'SonarQube'
+    // Name of the SonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus
+    // Artifact groupId
+    groupId = 'com.devonfw.mythaistar'
+    // Nexus repository ID
+    repositoryId= 'pl-nexus'
+    // Nexus internal URL
+    repositoryUrl = 'http://nexus3-core:8081/nexus3/repository/maven-snapshots'
+    // Maven global settings configuration ID
+    globalSettingsId = 'MavenSettings'
+    // Maven tool id
+    mavenInstallation = 'Maven3'
+}
+
+
+
+
+
== Description
+
+
    +
  • +

    build Script: script for build the application. It must be defined at package.json.

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "build": "ng build",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${buildScript}"""
    +
    +
    +
  • +
  • +

    lint Script: Script for lint the application. Defined at package.json

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "lint": "ng lint",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${lintScript}"""
    +
    +
    +
  • +
  • +

    test Script: Script for test the application. Defined at package.json

    +
    +

    Example (package.json):

    +
    +
    +
    +
    {
    +    "name": "mythaistar-restaurant",
    +    ...
    +    "scripts": {
    +        ...
    +        "test:ci": "npm run postinstall:web && ng test --browsers ChromeHeadless --watch=false",
    +        ...
    +    }
    +    ...
    +}
    +
    +
    +
    +

    This will be used as follows:

    +
    +
    +
    +
    sh """yarn ${testScript}"""
    +
    +
    +
  • +
  • +

    angular-Dir: Relative route to angular application. In My Thai Star this is the angular folder. The actual directory (.) is also allowed.

    +
    +
    +angular directory +
    +
    +
  • +
  • +

    srcDir: Directory where you store the source code. For angular applications the default value is src

    +
    +
    +src directory +
    +
    +
  • +
  • +

    chrome: Since you need a browser to run your tests, we must provide one. This variable contains the name of the custom tool for google chrome.

    +
    +
    +chrome installation +
    +
    +
  • +
  • +

    sonar-Tool: Name of the SonarQube scanner installation.

    +
    +
    +sonar scanner +
    +
    +
  • +
  • +

    sonar-Env: Name of the SonarQube environment. SonarQube is the default value for PL.

    +
    +
    +sonar env +
    +
    +
  • +
  • +

    group-Id: Group id of the application. It will be used to storage the application in nexus3

    +
    +
    +nexus3 groupid +
    +
    +
  • +
  • +

    repository-Id: Id of the nexus3 repository. It must be defined at maven global config file.

    +
    +
    +nexus3 id +
    +
    +
  • +
  • +

    repository URL: The URL of the repository.

    +
  • +
  • +

    global Settings Id: The id of the global settings file.

    +
    +
    +nexus3 global config +
    +
    +
  • +
  • +

    maven Installation: The name of the maven tool.

    +
    +
    +maven tool +
    +
    +
  • +
+
+
+
Table of Contents
+ +
+
+
+
Java CI
+
+

The Java server-side of My Thai Star is an devon4j-based application. As long as Maven and a Java 8 are going to be needed, the Pipeline should have those tools available as well.

+
+
+
+
Pipeline
+
+

This Pipeline is called MyThaiStar_SERVER_BUILD, and it is located exactly in the same PL instance’s folder than MyThaiStar_FRONTEND_BUILD. Let’s see how the Pipeline’s flow behaves.

+
+
+
+java pipeline flow +
+
+
+

Check those Pipeline stages with more detail:

+
+
+
    +
  1. +

    Declarative: Checkout SCM

    +
    +

    Gets the code from https://github.com/devonfw/my-thai-star . This step is not defined directly in our pipeline, but as it is loaded from the repository this step should always be done at the beginning.

    +
    +
  2. +
  3. +

    Declarative: Tool Install

    +
    +

    The My Thai Star application works with JDK11. In this step, if JDK11 is not installed, we install it and then put the JDK folder into PATH.

    +
    +
    +
    +
    tools {
    +  jdk 'OpenJDK11'
    +}
    +
    +
    +
  4. +
  5. +

    Loading Custom Tools

    +
    +

    In this step we load the tools that can not be loaded in the previous step. As My Thai Star is delivered as docker container, in this step we load docker as custom tool.

    +
    +
    +
    +
    tool dockerTool
    +
    +
    +
  6. +
  7. +

    Install dependencies

    +
    +

    This step will download all project dependencies.

    +
    +
    +
    +
    mvn clean install -Dmaven.test.skip=true
    +
    +
    +
  8. +
  9. +

    Unit Tests

    +
    +

    This step will execute the project unit test with maven.

    +
    +
    +
    +
    mvn clean test
    +
    +
    +
  10. +
  11. +

    Dependency Checker

    +
    +

    Execute the OWASP Dependency Checker in order to validate the project dependencies. It will generate a report that can be used in SonarQube

    +
    +
    +
    +
    dependencyCheck additionalArguments: '--project "MTSJ" --scan java/mtsj --format XML', odcInstallation: 'dependency-check'
    +dependencyCheckPublisher pattern: ''
    +
    +
    +
  12. +
  13. +

    SonarQube analysis

    +
    +

    The code is evaluated using the integrated PL instance’s SonarQube. Also, it will wait for the quality gate status. If the status is failing, the pipeline execution will be stopped.

    +
    +
    +
    +
    withSonarQubeEnv(sonarEnv) {
    +    sh "mvn sonar:sonar"
    +}
    +
    +def qg = waitForQualityGate()
    +if (qg.status != 'OK') {
    +    error "Pipeline aborted due to quality gate failure: ${qg.status}"
    +}
    +
    +
    +
  14. +
  15. +

    Deliver application into Nexus

    +
    +

    Store all artifacts into nexus.

    +
    +
    +
    +
    mvn deploy -Dmaven.test.skip=true
    +
    +
    +
  16. +
  17. +

    Create the Docker image

    +
    +

    Create the docker image and then publish the image into a docker registry.

    +
    +
  18. +
+
+
+
+
Adjustments
+ +
+
+
Pipeline Environment
+
+

In order to easily reuse the pipeline in other java projects, all variables have been defined in the block environment. All variables have the default values that Production Line uses, so if you’re going to work in production line you won’t have to change anything. Example:

+
+
+
+
environment {
+    // Directory with java project
+    javaDir = 'java/mtsj'
+
+    // SonarQube
+    // Name of the SonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus 3
+    // Maven global settings configuration ID
+    `globalSettingsId = 'MavenSettings'`
+    // Maven tool id
+    `mavenInstallation = 'Maven3'`
+
+    // Docker
+    dockerRegistryCredentials = 'nexus-api'
+    dockerRegistryProtocol = 'https://\'
+    dockerTool = 'docker-global
+}
+
+
+
+
+
== Description
+
+
    +
  • +

    java Dir: Relative route to java application. In My Thai Star this is the java/mtsj folder. The actual directory (.) is also allowed.

    +
    +
    +java directory +
    +
    +
  • +
  • +

    sonar Env: Name of the SonarQube environment. SonarQube is the default value for PL.

    +
  • +
  • +

    global Settings Id: The id of the global settings file. MavenSettings is the default value for PL.

    +
    +
    +nexus3 global config +
    +
    +
  • +
  • +

    maven Installation: The name of the maven tool. Maven3 is the default value for PL.

    +
    +
    +maven tool +
    +
    +
  • +
+
+
+
+
Distribution management
+
+

The only extra thing that needs to be added to the Java server-side is some information that determines where the artifact of the project is going to be stored in Nexus. This is going to be a section in the main pom.xml file called <distributionManagement>. This section will point to the PL instance’s Nexus. Let’s have a look at it. It’s already configured with the PL default values.

+
+
+
+
<distributionManagement>
+    <repository>
+      <id>pl-nexus</id>
+      <name>PL Releases</name>
+      <url>http://nexus3-core:8081/nexus/content/repositories/maven-releases/</url>
+    </repository>
+    <snapshotRepository>
+      <id>pl-nexus</id>
+      <name>PL Snapshots</name>
+      <url>http://nexus3-core:8081/nexus3/repository/maven-snapshots</url>
+    </snapshotRepository>
+</distributionManagement>
+
+
+
+
Table of Contents
+ +
+
+
+
+

Deployment

+
+

The main deployment tool used for My Thai Star is be Docker.

+
+
+
+docker +
+
+
+

It is a tool to run application in isolated environments. Those isolated environments will be what we call Docker containers. For instance, it won’t be necessary any installation of Nginx or Apache tomcat or anything necessary to deploy, because there will be some containers that actually have those technologies inside.

+
+
+
+

Where Docker containers will be running?

+
+

Of course, it is necessary to have an external Deployment Server. Every Docker process will run in it. It will be accessed from Production Line pipelines via SSH. Thus, the pipeline itself will manage the scenario of, if every previous process like testing passes as OK, stop actual containers and create new ones.

+
+
+

This external server will be located in https://mts-devonfw-core.cloud.okteto.net/

+
+
+
+

Container Schema

+
+

3 Docker containers are being used for the deployment of My Thai Star:

+
+
+
    +
  1. +

    Nginx for the Reverse Proxy

    +
  2. +
  3. +

    tomcat for the Java Server

    +
  4. +
  5. +

    Nginx for the Angular Client

    +
  6. +
+
+
+

The usage of the Reverse Proxy will allow the client to call via /api every single Java Server’s REST operation. Moreover, there will only be 1 port in usage in the remote Docker host, the one mapped for the Reverse Proxy: 8080. +Besides the deployment itself using Nginx and tomcat, both client and server are previously built using NodeJS and maven images. Artifacts produced by them will be pasted in servers' containers using multi-stage docker builds. It will all follow this schema:

+
+
+
+36028242 8998f41c 0d9e 11e8 93b3 6bfe50152bf8 +
+
+
+

This orchestration of all 3 containers will be done by using a docker-compose.yml file. To redirect traffic from one container to another (i.e. reverse-proxy to angular client or angular client to java server) will be done by using, as host names, the service name docker-compose defines for each of them, followed by the internally exposed port:

+
+ +
+ + + + + +
+ + +A implementation using Traefik as reverse proxy instead of NGINX is also available. +
+
+
+
+

Run My Thai Star

+
+

The steps to run My Thai Star are:

+
+
+
    +
  1. +

    Clone the repository $ git clone https://github.com/devonfw/my-thai-star.git

    +
  2. +
  3. +

    Run the docker compose command: $ docker-compose up

    +
  4. +
+
+
+
Table of Contents
+ +
+
+
Deployment Pipelines
+
+

As PL does not support deployments, we have created separate pipelines for this purpose. Those pipelines are: MyThaiStar_REVERSE-PROXY_DEPLOY, MyThaiStar_FRONT-END_DEPLOY and MyThaiStar_SERVER_DEPLOY.

+
+
+

The application will be deployed using docker on a remote machine. The architecture is as follows:

+
+
+
+deployment arch +
+
+
+

The parts to be deployed are: an NGINX reverse proxy, the java application and the angular application.

+
+
+
+
MyThaiStar_SERVER_DEPLOY Pipeline
+
+

Deploys on the server the Java part of My Thai Star.

+
+
+
+
Parameters
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    dockerNetwork: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the dockerNetwork.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
Pipeline steps
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Deploy new image: Deploy a new java container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
MyThaiStar_FRONT-END_DEPLOY
+
+

Deploys on the server the Angular part of My Thai Star

+
+
+
+
Parameters
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    dockerNetwork: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the dockerNetwork.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
Pipeline steps
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Deploy new image: Deploy a new angular container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
+
MyThaiStar_REVERSE-PROXY_DEPLOY Pipeline
+
+ + + + + +
+ + +As reverse proxy connects to the Java and Angular application, both must be deployed before you execute this pipeline. +
+
+
+

The MyThaiStar_REVERSE-PROXY_DEPLOY pipeline will deploy the My Thai Star reverse proxy into a remote machine using docker.

+
+
+
+
Parameters
+
+
    +
  • +

    registryUrl: The URL to the docker registry where the image is stored.

    +
  • +
  • +

    registryCredentialsId: Credentials to publish/download images from registry.

    +
  • +
  • +

    buildReverseProxy: If yes, it will build and publish a new version of reverse-proxy.

    +
  • +
  • +

    port: Port of the MTS application. You must ensure that those port is available in the deployment machine.

    +
  • +
  • +

    docker Network: Network of your My Thai Star application. You can deploy several versions of MTS in the same server by changing the port and the docker Network.

    +
  • +
  • +

    VERSION: The version that you can to deploy.

    +
  • +
+
+
+
+
Pipeline steps
+
+
    +
  • +

    Create docker network: Create the docker network with the name provided as parameter.

    +
  • +
  • +

    Create the Docker image: If build-Reverse-Proxy is enabled, this step will create a new docker image and publish it to the docker registry.

    +
  • +
  • +

    Deploy new image: Deploy a new reverse proxy container. If it already exists, first it delete the previous one.

    +
  • +
+
+
+
Table of Contents
+ +
+
+
+
Deployment Strategies
+
+

In this chapter different way of deploying My Thai Star are explained. Everything will be based in Docker.

+
+
+
+
Independent Docker containers
+
+

The first way of deployment will use isolated Docker containers. That means that if the client-side container is deployed, it does not affect the server-side container’s life cycle and vice versa.

+
+
+

Let’s show how the containers will behave during their life cycle.

+
+
+
    +
  • +

    0) Copy everything you need into the Deployment Server directory

    +
  • +
  • +

    1) Remove existing container (Nginx or Tomcat)

    +
    +
    +container1 +
    +
    +
  • +
  • +

    2) Run new one from the Docker images collection of the external Deployment Server.

    +
    +
    +container2 +
    +
    +
  • +
  • +

    3) Add the artifact /dist to the "deployable" folder of the Docker container (/usr/share/nginx/html/)

    +
    +
    +container3 +
    +
    +
    +

    Now, let’s see how it’s being executed in the command line (simplified due to documentation purposes). The next block of code represents what is inside of the last stage of the Pipeline.

    +
    +
    +
    +
    sshagent (credentials: ['my_ssh_token']) {
    +    sh """
    +        // Copy artifact from workspace to deployment server
    +
    +        // Manage container:
    +        docker rm -f [mts-container]
    +        docker run -itd --name=[mts-container] [base_image]
    +        docker exec [mts-container] bash -C \\"rm [container_deployment_folder]/*\\"
    +        docker cp [artifact] [mts-container]:[container_deployment_folder]
    +    """
    +}
    +
    +
    +
    +

    For every operation performed in the external Deployment Server, it is necessary to define where those commands are going to be executed. So, for each one of previous docker commands, this should appear before:

    +
    +
    +
    +
    `ssh -o StrictHostKeyChecking=no root@10.40.235.244`
    +
    +
    +
  • +
+
+
+
+
Docker Compose
+
+

The second way of deployment will be by orchestrating both elements of the application: The Angular client-side and the Java server-side. Both elements will be running in Docker containers as well, but in this case they won’t be independent anymore. Docker Compose will be in charge of keeping both containers up, or to put them down.

+
+
+
+
Project adjustment
+
+

In order to perform this second way of deployment, some files will be created in the project. The first one is the Dockerfile for the Angular client-side. This file will pull (if necessary) an Nginx Docker image and copy the Angular artifact (/dist folder) inside of the deployment folder of the image. It will be located in the main directory of the Angular client-side project.

+
+
+
+dockerfile angular +
+
+
+

The second file is the Dockerfile for the Java server-side. Its function will be quite similar to the Angular one. It will run a tomcat Docker image and copy the Java artifact (mythaistar.war file) in its deployment folder.

+
+
+
+dockerfile java +
+
+
+

Finally, as long as the docker-compose is being used, a file containing its configuration will be necessary as well. A new folder one the main My That Star’s directory is created, and it’s called /docker. Inside there is just a docker-compose.yml file. It contains all the information needed to orchestrate the deployment process. For example, which port both containers are going to be published on, and so on. This way of deployment will allow the application to be published or not just with one action.

+
+
+
+
docker-compose rm -f            # down
+docker-compose up --build -d    # up fresh containers
+
+
+
+
+docker compose +
+
+
+

Let’s have a look at the file itself:

+
+
+
+
version: '3'
+services:
+  client_compose:
+    build: "angular"
+    ports:
+      - "8091:80"
+    depends_on:
+      - server_compose
+  server_compose:
+    build: "java"
+    ports:
+      - "9091:8080"
+
+
+
+

This Orchestrated Deployment will offer some interesting possibilities for the future of the application.

+
+
+
Table of Contents
+ +
+
+
+
Future Deployment
+
+

The My Thai Star project is going to be built in many technologies. Thus, let’s think about one deployment schema that allow the Angular client to communicate to all three back ends: Java, Node and .NET.

+
+
+

As long as Docker containers are being used, it shouldn’t be that hard to deal with this "distributed" deployment. The schema represents 6 Docker containers that will have client-side(s) and server-side(s). Each of 3 Angular client containers (those in red) are going to communicate with different back-ends. So, when the deployment is finished, it would be possible to use all three server-sides just by changing the "port" in the URL.

+
+
+

Let’s see how it would look like:

+
+
+
+deployment schema +
+
+
+
Table of Contents
+ +
+
+
+
Reverse proxy strategy using Traefik
+
+

This implementation is the same as described at My Thai Star deployment wiki page. The only thing that changes is that Traefik is used instead of NGINX.

+
+
+

Using Traefik as reverse proxy, we can define the routes using labels in the docker containers instead of using a nginx.conf file. With this, it is not necessary to modify the reverse proxy container for each application. In addition, as Traefik is listening to the docker daemon, it can detect new containers and create routes for them without rebooting.

+
+
+

Example of labels:

+
+
+
+
labels:
+    - "traefik.http.routers.angular.rule=PathPrefix(`/`)"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.path=/health"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.interval=10s"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.scheme=http"
+
+
+
+
+
How to use it
+
+

If you want to build the images from code, change to My Thai Star root folder and execute:

+
+
+
+
$ docker-compose -f docker-compose.traefik.yml up -d --build
+
+
+
+

If you want to build the images from artifacts, change to Traefik folder (reverse-proxy/traefik) and execute:

+
+
+
+
$ docker-compose up -d --build
+
+
+
+

After a seconds, when the healthcheck detects that containers are running, your application will be available at http://localhost:8090. Also, the Traefik dashboard is available at http://localhost:8080.

+
+
+

If you want to check the behavior of the application when you scale up the back-end, you can execute:

+
+
+
+
$ docker-compose scale java=5
+
+
+
+

With this, the access to the java back-end will be using the load balancing method: Weighted Round Robin.

+
+
+
Table of Contents
+ +
+
+
+
+

MyThaiStar on Native Kubernetes as a Service (nKaaS)

+
+

The MyThaiStar sample application can be deployed on a nKaaS environment. The required Kubernetes configuration files can be found in the MyThaiStar repository. There are no additional changes required in order to deploy the application.

+
+
+
+

Setting up the environment

+ +
+
+

Following the nKaaS guide

+
+

After requesting access to the nKaaS platform you’ll be greeted with a welcome mail which contains your personal credentials. Make sure to change the given password to a personal one within the 24 hour time period, otherwise the credentials will expire.

+
+
+

After successfully following the guide mentioned in the welcome mail you should be able to establish a connection to the nKaaS VPN and have access to all their services (Jenkins, BitBucket, etc.). You should also be able to communicate with Kubernetes using kubectl.

+
+
+

Known issues: The nKaaS guide provides a download link for OpenVPN Connect. However, some users experienced connection issues with this client. If you’re having issues connecting to the VPN with OpenVPN Connect, you may try out the client by OVPN.

+
+
+
+

Requesting a namespace

+
+

Initially, you won’t be able to edit anything on Kubernetes, as you don’t have any privileges on any namespace. To request your own namespace you should raise a ticket at the Customer Support Portal containing your desired name for the namespace.

+
+
+

As soon as the namespace was created you can change your kubectl context:

+
+
+
+
kubectl config set-context --current -namespace=YOUR-NAMESPACE
+
+
+
+

On your own namespace you should have permissions to create/delete deployments/services etc. and perform other actions.

+
+
+
+

Setting up Harbor

+
+

Jenkins will build the MyThaiStar Docker images and push them to the nKaaS Harbor registry. The Jenkinsfile defaults to a Harbor project called "my-thai-star". If there’s no such project on Harbor, simply create a new one.

+
+
+
+

Setting up Jenkins

+
+

As MyThaiStar includes all required Jenkinsfiles for nKaaS, almost no configurations have to be performed by the user. +Create a new Pipeline on Jenkins and configure its definition to be a "Pipeline script from SCM". The SCM used is "Git" and the repository URL is the MyThaiStar repository https://github.com/devonfw/my-thai-star.git or your fork of it.

+
+
+

The Branch Specifier should point to */develop, the Script Path is jenkins/nKaaS/Jenkinsfile as that’s where the Jenkinsfile is located at the MyThaiStar repository. +Checking the "Lightweight checkout" could speed up the Pipeline.

+
+
+

Note: If you’re using the nKaaS Bitbucket as repository for your MyThaiStar clone you have to perform some additional configurations. First you’ll have to create a new SSH keypair, for example with ssh-keygen. Add the public key to the Bitbucket authentication methods and the private key in Jenkins to a new pair of credentials. This step is required for Jenkins to be able to authenticate against Bitbucket. +Afterwards, instead of the official MyThaiStar repository, specify your Bitbucket repository:

+
+
+
+
ssh://git@bitbucket.demo.xpaas.io:7999/YOUR-PROJECT/YOUR-MTS-REPO.git
+
+
+
+

Under "Credentials" choose the credentials that contain your Bitbucket private key you’ve created earlier.

+
+
+
+

Deploying MTS

+
+

After setting up the Jenkins Pipeline, you can simply run it by clicking on the "Build" button. This will trigger the pipeline, Jenkins will:

+
+
+
    +
  1. +

    Check out the MTS project

    +
  2. +
  3. +

    Build the docker images

    +
  4. +
  5. +

    Push the docker images to the Harbor registry

    +
  6. +
  7. +

    Deploy the MTS application onto Kubernetes

    +
  8. +
+
+
+

Finally, the applications should be available at http://my-thai-star.demo.xpaas.io.

+
+
+

The first part, my-thai-star, IST specified in the MTS ingress configuration at host. The second part, demo.xpaas.io, is the host of the nKaaS you’re working on.

+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/mrchecker.html b/docs/my-thai-star/1.0/mrchecker.html new file mode 100644 index 00000000..ed2f4025 --- /dev/null +++ b/docs/my-thai-star/1.0/mrchecker.html @@ -0,0 +1,365 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MrChecker E2E Testing

+
+ +
+
+
+

Introduction

+
+
+

MrChecker is a testing framework included in devonfw with several useful modules, from which we will focus on the Selenium Module, a module designed to make end-to-end testing easier to implement.

+
+
+
+
+

How to use it

+
+
+

First of all download the repository.

+
+
+

You must run My Thai Star front-end and back-end application and modify your URL to the front in mrchecker/endtoend-test/src/resources/settings.properties

+
+
+

Now you can run end to end test to check if the application works properly.

+
+
+

To run the e2e test you have two options:

+
+
+

The first option is using the command line in devonfw distribution

+
+
+
+
cd mrchecker/endtoend-test/
+mvn test -Dtest=MyThaiStarTest -Dbrowser=Chrome
+
+
+
+

optionally you can use it with a headless version or using another navigator:

+
+
+
+
// chrome headless (without visual component)
+mvn test -Dtest=MyThaiStarTest -Dbrowser=ChromeHeadless// use firefox navigator
+mvn test -Dtest=MyThaiStarTest -Dbrowser=FireFox
+
+
+
+

The second is importing the project in devonfw Eclipse and running MyThaiStarTest.java as JUnit (right click, run as JUnit)

+
+
+

They can be executed one by one or all in one go, comment or uncomment @Test before those tests to enable or disable them.

+
+
+

For more information about how to use MrChecker and build your own end to end test read: + * MrChecker documentation + * MrChecker tutorial for My Thai Star

+
+
+
+
+

End to end tests in My Thai Star

+
+
+

We have included a test suite with four tests to run in My Thai Star to verify everything works properly.

+
+
+

The included tests do the following:

+
+
+
    +
  • +

    Test_loginAndLogOut: Log in and log out.

    +
  • +
  • +

    Test_loginFake: Attempt to log in with a fake user.

    +
  • +
  • +

    Test_bookTable: Log in and book a table, then login with a waiter and check if the table was successfully booked.

    +
  • +
+
+
+

*` Test_orderMenu`: Log in and order food for a certain booked table.

+
+
+

These four tests can be found inside MyThaiStarTest.java located here.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/my-thai-star-nosql-data-model.html b/docs/my-thai-star/1.0/my-thai-star-nosql-data-model.html new file mode 100644 index 00000000..324cef96 --- /dev/null +++ b/docs/my-thai-star/1.0/my-thai-star-nosql-data-model.html @@ -0,0 +1,282 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

NoSQL Data Model

+
+
+
+dynamodb data model 1.4.1 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/my-thai-star-publish.html b/docs/my-thai-star/1.0/my-thai-star-publish.html new file mode 100644 index 00000000..55eae9b1 --- /dev/null +++ b/docs/my-thai-star/1.0/my-thai-star-publish.html @@ -0,0 +1,536 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Publishing the MyThaiStar Application

+
+
+

This page will explain how to build and deploy the application.

+
+
+
+
+

Production Line Instance

+
+
+

The Production Line instance being used can be found here. After logging in you’ll see a list of existing jobs and pipelines. +However, only a folder is relevant to this topic: MTS

+
+
+

Note: A user account is required for authentication. Contact a devon team member to request a new account.

+
+
+
+
+

Pipeline Script

+
+
+

We’ll have a closer look at the pipeline configuration script and its stages:

+
+
+

Note: Have a look at this wiki over here to get a basic idea on how to write pipeline scripts.

+
+
+
    +
  1. +

    Checking out MyThaiStar form GitHub

    +
    +

    This stage will check out the source code directly from GitHub:

    +
    +
    +
    +
    git credentialsId:'github-devonfw-ci', url:'https://github.com/devonfw/my-thai-star/'
    +
    +
    +
    +

    Credentials are required for authentication as we’re checking out a private repository. 'github-devonfw-ci' is a pair of credentials that was created for this sole purpose.

    +
    +
  2. +
  3. +

    Loading custom tools

    +
    +

    To build the application, two tools are required: Node 6 and Angular CLI.

    +
    +
    +

    They just have to be referenced, as the Custom Tool Plugin will handle the installation process:

    +
    +
    +
    +
    tool 'Node 6'
    +tool 'Angular CLI'
    +
    +
    +
  4. +
  5. +

    Fresh Dependency installation +To ensure that we get fresh dependencies, we’ll have to make sure that our dependencies folder node_modules is removed and the installation process is run again.

    +
    +
    +
    find . -name "node_modules" -exec rm -rf '{}' +
    +npm i
    +
    +
    +
  6. +
  7. +

    Code Linting

    +
    +

    By "linting" our Angular code we check the quality of the code. TypeScript provides a useful tool for that. It is call TSLint. This process is triggered via Angular CLI.

    +
    +
    +
    +
    ng lint --format checkstyle
    +
    +
    +
  8. +
  9. +

    Execute Unit Tests

    +
    +

    By default, Angular tests are executed using the Chrome browser. That can be a problem when they need to be executed in a CI environment, such as Jenkins (which is the case) or Travis. Angular projects can be prepared to deal with it, using the PhantomJS browser instead of chrome.

    +
    +
    +

    We can prepare a script for that in our package.json file, or we can directly write it in the command line. Also, it is necessary to make sure that those test will just executed once, because by default it will be watching for changes.

    +
    +
    +
    +
    ng test --browsers PhantomJS --single-run
    +
    +
    +
    +

    or

    +
    +
    +
    +
    yarn test:ci
    +
    +
    +
  10. +
  11. +

    Build application

    +
    +

    The building process needs to be sufficiently flexible to be adapted for different deployments. As long as the My Thai Star Angular client needs (or will need) to point to different servers (devon4j, Node and .NET), it is mandatory to have the chance to separately "prepare" the artifact for all of them.

    +
    +
    +

    What does that mean? There are some files dedicated to those situations. They’re called environment. They’ll define some data to be used under different circumstances.

    +
    +
    +
    +
    ng build --aot --environment=prod
    +
    +
    +
    +

    or

    +
    +
    +
    +
    yarn build:prod
    +
    +
    +
    +

    The ng build command creates a dist folder which contains the application.

    +
    +
  12. +
  13. +

    Deployment

    +
    +

    The deployment step has to be approved by a human. Otherwise it won’t proceed.

    +
    +
    +

    The user can decide on whether to proceed and deploy the application or to abort and just keep the generated files inside the dist directory.

    +
    +
    +

    After clicking on proceed, the following lines will be executed:

    +
    +
    +
    +
    ##Change to Angular directory
    +cd angular
    +
    +##Copy "dist" folder from workspace to deployment server
    +scp -o StrictHostKeyChecking=no -r dist root@10.40.235.244:/root/mythaistar/
    +
    +##Launch application in Docker container
    +ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker rm -f mythaistar
    +ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker run -itd --name=mythaistar -p 8090:80 nginx
    +ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker exec mythaistar bash -c \\"rm /usr/share/nginx/html/*\\"
    +ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker cp mythaistar/dist/. mythaistar:/usr/share/nginx/html/
    +
    +
    +
    +

    After deploying the application will be available at http://de-mucdevondepl01:8090

    +
    +
  14. +
+
+
+
+
+

Complete Pipeline Script:

+
+
+

The complete Groovy script:

+
+
+
+
node {
+    stage 'Checking out my-thai-star from GitHub'
+        node {
+            git branch: 'develop', credentialsId: 'github-devonfw-ci', url: 'https://github.com/devonfw/my-thai-star/'
+        }
+
+    stage 'Loading Custom Tools'
+        node {
+            tool 'Node 6'
+            tool 'Angular CLI'
+        }
+
+    stage 'Fresh Dependency Installation'
+        node {
+            sh """
+                cd angular
+                find . -name "node_modules" -exec rm -rf '{}' +
+                npm i
+            """
+        }
+
+    stage 'Code Linting'
+        node {
+            sh """
+                cd angular
+                ng lint --format checkstyle
+            """
+        }
+
+    stage 'Execute Angular tests'
+        node {
+            sh """
+                cd angular
+                ng test --browsers PhantomJS --single-run
+            """
+        }
+
+    stage 'Build Application'
+        node {
+            sh """
+                cd angular
+                ng build --aot --prod
+            """
+        }
+
+    stage 'Deployment'
+        input 'Should this build be deployed?'
+            node {
+                sshagent (credentials: ['3d0fa2a4-5cf0-4cf5-a3fd-23655eb33c11']) {
+                    sh """
+                        cd angular
+                        # Copy resulting "dist" folder from workspace to deployment server
+                        scp -o StrictHostKeyChecking=no -r dist root@10.40.235.244:/root/mythaistar/
+
+                        # Launch application in Docker container
+                        ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker rm -f mythaistar
+                        ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker run -itd --name=mythaistar -p 8090:80 nginx
+                        ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker exec mythaistar bash -c \\"rm /usr/share/nginx/html/*\\"
+                        ssh -o StrictHostKeyChecking=no root@10.40.235.244 docker cp mythaistar/dist/. mythaistar:/usr/share/nginx/html/
+
+                    """
+                }
+                sh 'echo \\"Application available at http://de-mucdevondepl01:8090\\"'
+            }
+}
+
+
+
+
+
+

Accessing MyThaiStar

+
+
+

Finally, the application will be available at this URL: http://de-mucdevondepl01:8090.

+
+
+
+
+

Notes

+
+
+

Make sure not to launch multiple instances of this pipeline in parallel. While a pipeline is waiting for approval it’ll still be blocking a build executor. +This PL instance is set up to have two build executors.

+
+
+

This means: When launching this pipeline two times in parallel without approving the build, other jobs/pipeline won’t be able +to run properly.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/net-design.html b/docs/my-thai-star/1.0/net-design.html new file mode 100644 index 00000000..923aa3e5 --- /dev/null +++ b/docs/my-thai-star/1.0/net-design.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

.NET design

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/net-testing.html b/docs/my-thai-star/1.0/net-testing.html new file mode 100644 index 00000000..8eed45e5 --- /dev/null +++ b/docs/my-thai-star/1.0/net-testing.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

.NET testing

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/nkaas.html b/docs/my-thai-star/1.0/nkaas.html new file mode 100644 index 00000000..b6133dc8 --- /dev/null +++ b/docs/my-thai-star/1.0/nkaas.html @@ -0,0 +1,387 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MyThaiStar on Native Kubernetes as a Service (nKaaS)

+
+
+

The MyThaiStar sample application can be deployed on a nKaaS environment. The required Kubernetes configuration files can be found in the MyThaiStar repository. There are no additional changes required in order to deploy the application.

+
+
+
+
+

Setting up the environment

+
+ +
+
+
+

Following the nKaaS guide

+
+
+

After requesting access to the nKaaS platform you’ll be greeted with a welcome mail which contains your personal credentials. Make sure to change the given password to a personal one within the 24 hour time period, otherwise the credentials will expire.

+
+
+

After successfully following the guide mentioned in the welcome mail you should be able to establish a connection to the nKaaS VPN and have access to all their services (Jenkins, BitBucket, etc.). You should also be able to communicate with Kubernetes using kubectl.

+
+
+

Known issues: The nKaaS guide provides a download link for OpenVPN Connect. However, some users experienced connection issues with this client. If you’re having issues connecting to the VPN with OpenVPN Connect, you may try out the client by OVPN.

+
+
+
+
+

Requesting a namespace

+
+
+

Initially, you won’t be able to edit anything on Kubernetes, as you don’t have any privileges on any namespace. To request your own namespace you should raise a ticket at the Customer Support Portal containing your desired name for the namespace.

+
+
+

As soon as the namespace was created you can change your kubectl context:

+
+
+
+
kubectl config set-context --current -namespace=YOUR-NAMESPACE
+
+
+
+

On your own namespace you should have permissions to create/delete deployments/services etc. and perform other actions.

+
+
+
+
+

Setting up Harbor

+
+
+

Jenkins will build the MyThaiStar Docker images and push them to the nKaaS Harbor registry. The Jenkinsfile defaults to a Harbor project called "my-thai-star". If there’s no such project on Harbor, simply create a new one.

+
+
+
+
+

Setting up Jenkins

+
+
+

As MyThaiStar includes all required Jenkinsfiles for nKaaS, almost no configurations have to be performed by the user. +Create a new Pipeline on Jenkins and configure its definition to be a "Pipeline script from SCM". The SCM used is "Git" and the repository URL is the MyThaiStar repository https://github.com/devonfw/my-thai-star.git or your fork of it.

+
+
+

The Branch Specifier should point to */develop, the Script Path is jenkins/nKaaS/Jenkinsfile as that’s where the Jenkinsfile is located at the MyThaiStar repository. +Checking the "Lightweight checkout" could speed up the Pipeline.

+
+
+

Note: If you’re using the nKaaS Bitbucket as repository for your MyThaiStar clone you have to perform some additional configurations. First you’ll have to create a new SSH keypair, for example with ssh-keygen. Add the public key to the Bitbucket authentication methods and the private key in Jenkins to a new pair of credentials. This step is required for Jenkins to be able to authenticate against Bitbucket. +Afterwards, instead of the official MyThaiStar repository, specify your Bitbucket repository:

+
+
+
+
ssh://git@bitbucket.demo.xpaas.io:7999/YOUR-PROJECT/YOUR-MTS-REPO.git
+
+
+
+

Under "Credentials" choose the credentials that contain your Bitbucket private key you’ve created earlier.

+
+
+
+
+

Deploying MTS

+
+
+

After setting up the Jenkins Pipeline, you can simply run it by clicking on the "Build" button. This will trigger the pipeline, Jenkins will:

+
+
+
    +
  1. +

    Check out the MTS project

    +
  2. +
  3. +

    Build the docker images

    +
  4. +
  5. +

    Push the docker images to the Harbor registry

    +
  6. +
  7. +

    Deploy the MTS application onto Kubernetes

    +
  8. +
+
+
+

Finally, the applications should be available at http://my-thai-star.demo.xpaas.io.

+
+
+

The first part, my-thai-star, IST specified in the MTS ingress configuration at host. The second part, demo.xpaas.io, is the host of the nKaaS you’re working on.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/nodejs-design.html b/docs/my-thai-star/1.0/nodejs-design.html new file mode 100644 index 00000000..a72e3122 --- /dev/null +++ b/docs/my-thai-star/1.0/nodejs-design.html @@ -0,0 +1,707 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

NodeJS design (deprecated)

+
+ +
+
+
+

Introduction

+
+
+

The NodeJS back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    ExpressJS as the web application framework

    +
  • +
  • +

    devon4node as data access layer framework

    +
  • +
  • +

    DynamoDB as NoSQL Database

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
+

Basic architecture details

+
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
    +
  • +

    public - All files which be exposed on the server directly

    +
  • +
  • +

    src

    +
    +
      +
    • +

      database folder - Folder with scripts to create/delete/seed the database

      +
    • +
    • +

      model - Folder with all data model

      +
    • +
    • +

      routes - Folder with all ExpressJS routers

      +
    • +
    • +

      utils - Folder with all utils like classes and functions

      +
    • +
    • +

      app.ts - File with ExpressJS declaration

      +
    • +
    • +

      config.ts - File with server configs

      +
    • +
    • +

      logic.ts - File with the business logic

      +
    • +
    +
    +
  • +
  • +

    test - Folder with all tests

    +
  • +
+
+
+
+
+

Layers

+
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+
+
+

Service layer

+
+
+

The services layer will be solved using REST services with ExpressJS

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

In order to be compatible with the other back-end implementations, we must follow the naming conventions proposed for Devon4j applications. We will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+

To treat these services separately, the following routers were created:

+
+
+
    +
  • +

    bookingmanagement: will answer all requests with the prefix /mythaistar/services/rest/bookingmanagement/v1

    +
  • +
  • +

    dishmanagement: will answer all requests with the prefix /mythaistar/services/rest/dishmanagement/v1

    +
  • +
  • +

    ordermanagement: will answer all requests with the prefix /mythaistar/services/rest/ordermanagement/v1

    +
  • +
+
+
+

These routers will define the behavior for each service and use the logical layer.

+
+
+

An example of service definition:

+
+
+
+
router.post('/booking/search', (req: types.CustomRequest, res: Response) => {
+    try {
+        // body content must be SearchCriteria
+        if (!types.isSearchCriteria(req.body)) {
+            throw {code: 400, message: 'No booking token given' };
+        }
+
+        // use the searchBooking method defined at business logic
+        business.searchBooking(req.body, (err: types.Error | null, bookingEntity: types.PaginatedList) => {
+            if (err) {
+                res.status(err.code || 500).json(err.message);
+            } else {
+                res.json(bookingEntity);
+            }
+        });
+    } catch (err) {
+        res.status(err.code || 500).json({ message: err.message });
+    }
+});
+
+
+
+
+
+

Logic layer and Data access layer

+
+
+

In the logic layer we will locate all the business logic of the application. It will be located in the file logic.ts. If in this layer we need to get access to the data, we make use of data access layer directly, in this case using devon4node with the DynamoDB adapter.

+
+
+

Example:

+
+
+
+
export async function cancelOrder(orderId: string, callback: (err: types.Error | null) => void) {
+    let order: dbtypes.Order;
+
+    try {
+        // Data access
+        order = await oasp4fn.table('Order', orderId).promise() as dbtypes.Order;
+
+        [...]
+    }
+}
+
+
+
+

We could define the data access layer separately, but devon4node allows us to do this in a simple and clear way. So, we decided to not separate the access layer to the logic business.

+
+
+
+
+

Security with Json Web Token

+
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
+

JWT basics

+
+
+

Refer to JWT basics for more information.

+
+
+
+
+

JWT implementation details

+
+
+

The Json Web Token pattern will be implemented based on the JSON web token library available on npm.

+
+
+
+
+

== Authentication

+
+
+

Based on the JSON web token approach, we will implement a class Authentication to define the security entry point and filters. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/\**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/\**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/\**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/\**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will create an instance of Authentication in the app file and then we will use the method auth for handle the requests to the /login endpoint.

+
+
+
+
app.post('/mythaistar/login', auth.auth);
+
+
+
+

To verify the presence of the Authorization token in the headers, we will register in the express the Authentication.registerAuthentication middleware. This middleware will check if the token is correct, if so, it will place the user in the request and continue to process it. If the token is not correct it will continue processing the request normally.

+
+
+
+
app.use(auth.registerAuthentication);
+
+
+
+

Finally, we have two default users created in the database:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: WAITER

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: CUSTOMER

    +
  • +
+
+
+
+
+

== Token set up

+
+
+

Following the official documentation the implementation details for the MyThaiStar’s JWT will be:

+
+
+
    +
  • +

    Secret: Used as part of the signature of the token, acting as a private key. It can be modified at config.ts file.

    +
  • +
  • +

    Token Prefix schema: Bearer. The token will look like Bearer <token>

    +
  • +
  • +

    Header: Authorization. The response header where the token will be included. Also, in the requests, when checking the token it will be expected to be in the same header.

    +
  • +
  • +

    The Authorization header should be part of the Access-Control-Expose-Headers header to allow clients access to the Authorization header content (the token);

    +
  • +
  • +

    Signature Algorithm: To encrypt the token we will use the default algorithm HS512.

    +
  • +
+
+
+
+
+

== Current User request

+
+
+

To provide to the client with the current user data our application should expose a service to return the user details. In this case the Authentication has a method called getCurrentUser which will return the user data. We only need register it at express.

+
+
+
+
app.get('/mythaistar/services/rest/security/v1/currentuser', auth.getCurrentUser);
+
+
+
+
+
+

== Authorization

+
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

To ensure this, the Authorization class has the securizedEndpoint method that guarantees access based on the role. This method can be used as middleware in secure services. As the role is included in the token, once validated we will have this information in the request and the middleware can guarantee access or return a 403 error.

+
+
+
+
app.use('/mythaistar/services/rest/ordermanagement/v1/order/filter', auth.securizedEndpoint('WAITER'));
+app.use('/mythaistar/services/rest/ordermanagement/v1/order/search', auth.securizedEndpoint('WAITER'));
+app.use('/mythaistar/services/rest/bookingmanagement/v1/booking/search', auth.securizedEndpoint('WAITER'));
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/nodejs-testing.html b/docs/my-thai-star/1.0/nodejs-testing.html new file mode 100644 index 00000000..4388c2fb --- /dev/null +++ b/docs/my-thai-star/1.0/nodejs-testing.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

NodeJS testing

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/production-line-ci.html b/docs/my-thai-star/1.0/production-line-ci.html new file mode 100644 index 00000000..bc1811a5 --- /dev/null +++ b/docs/my-thai-star/1.0/production-line-ci.html @@ -0,0 +1,364 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

My Thai Star in Production Line

+
+ +
+
+
+

What is PL?

+
+
+

The Production Line Project is a set of server-side collaboration tools for Capgemini engagements. It has been developed for supporting project engagements with individual tools like issue tracking, continuous integration, continuous deployment, documentation, binary storage and much more!

+
+
+
+pl logo +
+
+
+
+
+

Introduction

+
+
+

Although the PL Project is a wide set of tools, only 3 are going to be mainly used for My Thai Star projects to build a Continuous Integration and Continuos Delivery environment. All three are available in the PL instance used for this project.

+
+
+
    +
  1. +

    Jenkins

    +
    +

    This is going to be the "main tool". Jenkins helps to automate the non-human part of the development with Continuos Integration and is going to host all Pipelines (and, obviously, execute them).

    +
    +
  2. +
  3. +

    Nexus

    +
    +

    Nexus manages software "artifacts" required for development. It is possible to both download dependencies from Nexus and publish artifacts as well. It allows to share resources within an organization.

    +
    +
  4. +
  5. +

    SonarQube

    +
    +

    It is a platform for continuous inspection of the code. It is going to be used for the Java back-end.

    +
    +
  6. +
+
+
+
+
+

Where can I find all My Thai Star Pipelines?

+
+
+

They are located under the MTS folder of the PL instance:

+
+
+
+mts pipelines +
+
+
+

Those Jenkins Pipelines will not have any code to execute. They’re just pointing to all Jenkinsfiles under the /jenkins folder of the repository. They can be found here.

+
+
+
+
+

CI in My Thai Star stack

+
+
+ +
+
+
+
+

How to configure everything out of the box

+
+
+

Production Line currently has a template to integrate My Thai Star. All information can be found at devonfw production line repository

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/sap-hana-guide.html b/docs/my-thai-star/1.0/sap-hana-guide.html new file mode 100644 index 00000000..16a6d42e --- /dev/null +++ b/docs/my-thai-star/1.0/sap-hana-guide.html @@ -0,0 +1,538 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

SAP HANA

+
+ +
+
+
+

Download/Install VMware/SAP HANA

+
+
+ +
+
+
+
+

Run SAP HANA Database Server

+
+
+
    +
  • +

    Once the .ova file has been opened inside VMware workstation. Click on the image and go to Edit Virtual Machine Settings. Set the memory allocation to 5GB. And Network Connection to NAT . NAT shows the IP for the virtual machine which will be used to establish JDBC connection

    +
  • +
  • +

    Click Play Virtual Machine. When first time the virtual machine runs it display following. Copy the IP address which will be used for JDBC connection

    +
  • +
  • +

    Type hxeadm, which is the username and hit Enter. Next it will ask for password which is HXEHana1. Once successfully logged in it will ask to set a new password. Choose a new password and remember.

    +
  • +
  • +

    You need to set Master password for SAP HANA database. Set it as you like and remember.

    +
  • +
  • +

    For “proceed with configuration” type y and hit Enter. HANA database has started in the background.

    +
  • +
  • +

    Try connecting with following command, replace the password with the master password

    +
  • +
+
+
+
+
hxehost:hxeadm>hdbsql
+   \c -d SYSTEMDB -n localhost:39013 -u SYSTEM -p <>
+
+
+
+
+
+

Setting up Database for MTSJ

+
+
+

Once you have install SAP HANA with VMware , you need to setup the DB.

+
+
+
+
+

Connect to DB

+
+
+
    +
  • +

    After you start VMware, login with hxeadm as login and the password. +At the prompt - hxehost:hxeadm>hdbsql +Please note the IP address, that need to be put in MTSJ java back-end

    +
  • +
  • +

    On prompt hdbsql> type below to connect to the DB

    +
  • +
+
+
+
+
\c -d SYSTEMDB -n localhost:39013 -u SYSTEM -p <password>
+
+
+
+
    +
  • +

    Type below query to see, if you have access to tenant database i.e. HXE

    +
  • +
+
+
+
+
SELECT DATABASE_NAME,  ACTIVE_STATUS FROM SYS.M_DATABASES ORDER BY 1;
+
+
+
+
+
+

Enabling the script server

+
+
+

Run the below for enabling the script server

+
+
+
+
ALTER DATABASE HXE ADD 'scriptserver'
+
+
+
+

To check if the script server is enable, execute below statement

+
+
+
+
SELECT SERVICE_NAME, PORT, ACTIVE_STATUS FROM SYS.M_SERVICES ORDER BY 1;
+
+
+
+

It should see the scriptserver in it.

+
+
+
+
+

Creating a User on HXE

+
+
+
    +
  • +

    Connect using the below

    +
  • +
+
+
+
+
\c -d hxe -n localhost:39013 -u system -p <password>
+
+
+
+
    +
  • +

    To create a user

    +
  • +
+
+
+
+
Create user hanauser1 password <password> no force_first_password_change
+
+
+
+
    +
  • +

    Grant below permission to the user

    +
  • +
+
+
+
+
GRANT AFLPM_CREATOR_ERASER_EXECUTE TO hanauser1
+GRANT AFL__SYS_AFL_AFLPAL_EXECUTE TO hanauser1 – here we have 2 underscore
+grant AFL__SYS_AFL_AFLPAL_EXECUTE_WITH_GRANT_OPTION to hanauser1
+grant AFLPM_CREATOR_ERASER_EXECUTE to hanauser
+GRANT DATA ADMIN TO hanauser1
+GRANT IMPORT TO hanauser1
+
+GRANT EXECUTE on _SYS_REPO.GRANT_ACTIVATED_ROLE TO hanauser1
+GRANT EXECUTE ON system.afl_wrapper_generator to hanauser1
+
+GRANT EXECUTE ON system.afl_wrapper_eraser to hanauser1
+GRANT MODELING TO hanauser1
+
+
+
+
    +
  • +

    Now connect to HXE tenant using below

    +
  • +
+
+
+
+
\c -d hxe -n localhost:39013 -u hanauser1 -p <password>
+
+
+
+
+
+

Setting up MTSJ Java back-end

+
+
+
    +
  • +

    Update application.properties file

    +
  • +
+
+
+
+
##update the below
+`spring.flyway.locations=classpath:db/migration,classpath:db/specific/hana`
+##Add the below
+spring.jpa.database=default
+spring.jpa.database-platform=org.hibernate.dialect.HANAColumnStoreDialect
+spring.datasource.driver-class-name=com.sap.db.jdbc.Driver
+spring.jpa.properties.hibernate.jdbc.lob.non_contextual_creation=true
+
+#Comment the below
+#spring.profiles.active=h2mem
+
+spring.profiles.active=hana
+
+
+
+
    +
  • +

    Update config/application.properties file

    +
  • +
+
+
+
+
##update the below
+spring.flyway.locations=classpath:db/migration,classpath:db/specific/hana
+spring.datasource.url=jdbc:sap://ip:port/?databaseName=hxe
+spring.datasource.username=username
+spring.datasource.password=password
+
+
+
+
+
+

Enabling prediction UseCase in MTSJ

+
+ +
+
+
+

Setting up MTSJ angular

+
+
+

update the following property in config file in my-thai-star\angular\src\app\core\config

+
+
+
+
enablePrediction: true,
+
+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/serverless-design.html b/docs/my-thai-star/1.0/serverless-design.html new file mode 100644 index 00000000..b0714e89 --- /dev/null +++ b/docs/my-thai-star/1.0/serverless-design.html @@ -0,0 +1,727 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Serverless design (deprecated)

+
+ +
+
+
+

Introduction

+
+
+

The NodeJS back-end for My Thai Star application is going to be based on:

+
+
+
    +
  • +

    Serverless as serverless framework

    +
  • +
  • +

    devon4node as data access layer framework

    +
  • +
  • +

    DynamoDB as NoSQL Database

    +
  • +
+
+
+

To know more details about the above technologies please visit the following documentation:

+
+
+ +
+
+
+
+

Basic architecture details

+
+
+

This structure can be shown in the following example image:

+
+
+
+folder organization +
+
+
+
    +
  • +

    handlers - All function handlers following devon4node structure

    +
  • +
  • +

    src

    +
    +
      +
    • +

      model - Folder with all data model

      +
    • +
    • +

      utils - Folder with all utils like classes and functions

      +
    • +
    • +

      config.ts - File with server configs

      +
    • +
    • +

      logic.ts - File with the business logic

      +
    • +
    +
    +
  • +
  • +

    test - Folder with all tests

    +
  • +
+
+
+
+
+

Layers

+
+
+
    +
  • +

    Service Layer: this layer will expose the REST API to exchange information with the client applications.

    +
  • +
  • +

    Logic Layer: the layer in charge of hosting the business logic of the application.

    +
  • +
  • +

    Data Access Layer: the layer to communicate with the data base.

    +
  • +
+
+
+
+
+

Service layer

+
+
+

The services layer will be solved using REST services with Serverless

+
+
+

To give service to the defined User Stories we will need to implement the following services:

+
+
+
    +
  • +

    provide all available dishes.

    +
  • +
  • +

    save a booking.

    +
  • +
  • +

    save an order.

    +
  • +
  • +

    provide a list of bookings (only for waiters) and allow filtering.

    +
  • +
  • +

    provide a list of orders (only for waiters) and allow filtering.

    +
  • +
  • +

    login service (see the Security section).

    +
  • +
  • +

    provide the current user data (see the Security section)

    +
  • +
+
+
+

In order to be compatible with the other back-end implementations, we must follow the naming conventions proposed for Devon4j applications. We will define the following end points for the listed services.

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (POST) /mythaistar/login.

    +
  • +
  • +

    (GET) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

You can find all the details for the services implementation in the Swagger definition included in the My Thai Star project on Github.

+
+
+

To treat these Http services, we must define the handlers following the devon4node convention:

+
+
+
    +
  • +

    (handlers/Http/POST/dish-search-handler) /mythaistar/services/rest/dishmanagement/v1/dish/search.

    +
  • +
  • +

    (handlers/Http/POST/booking-handler) /mythaistar/services/rest/bookingmanagement/v1/booking.

    +
  • +
  • +

    (handlers/Http/POST/order-handler) /mythaistar/services/rest/ordermanagement/v1/order.

    +
  • +
  • +

    (handlers/Http/POST/booking-search-handler) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (handlers/Http/POST/order-search-handler) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (handlers/Http/POST/order-filter-handler) /mythaistar/services/rest/ordermanagement/v1/order/filter (to filter with fields that does not belong to the Order entity).

    +
  • +
  • +

    (handlers/Http/POST/login-handler) /mythaistar/login.

    +
  • +
  • +

    (handlers/Http/GET/current-user-handler) /mythaistar/services/rest/security/v1/currentuser/.

    +
  • +
+
+
+

These handlers will define the behavior for each service and use the logical layer.

+
+
+

An example of handler definition:

+
+
+
+
oasp4fn.config({ path: '/mythaistar/services/rest/bookingmanagement/v1/booking/search' });
+export async function bookingSearch(event: HttpEvent, context: Context, callback: Function) {
+    try {
+        const search = <types.SearchCriteria>event.body;
+        const authToken = event.headers.Authorization;
+        // falta lo que viene siendo comprobar el token y eso
+
+        auth.decode(authToken, (err, decoded) => {
+            if (err || decoded.role !==  'WAITER') {
+                throw { code: 403, message: 'Forbidden'};
+            }
+
+            // body content must be SearchCriteria
+            if (!types.isSearchCriteria(search)) {
+                throw { code: 400, message: 'No booking token given' };
+            }
+
+            business.searchBooking(search, (err: types.Error | null, bookingEntity: types.PaginatedList) => {
+                if (err) {
+                    callback(new Error(`[${err.code || 500}] ${err.message}`));
+                } else {
+                    callback(null, bookingEntity);
+                }
+            });
+        });
+    } catch (err) {
+        callback(new Error(`[${err.code || 500}] ${err.message}`));
+    }
+}
+
+
+
+

The default integration for a handler is lambda. See devon4node documentation for more information about default values and how to change it.

+
+
+
+
+

==

+
+
+

If you change the integration to lambda-proxy, you must take care that in this case the data will not be parsed. You must do JSON.parse explicitly +== ==

+
+
+

After defining all the handlers, we must execute the fun command, which will generate the files serverless.yml and webpack.config.js.

+
+
+
+
+

Logic layer and Data access layer

+ +
+
+

Security with Json Web Token

+
+
+

For the Authentication and Authorization the app will implement the json web token protocol.

+
+
+
+
+

JWT basics

+
+
+

Refer to JWT basics for more information.

+
+
+
+
+

JWT implementation details

+
+
+

The Json Web Token pattern will be implemented based on the JSON web token library available on npm.

+
+
+
+
+

== Authentication

+
+
+

Based on the JSON web token approach, we will implement two methods in order to verify and user + generate the token and decode the token + return the user data. Also, as My Thai Star is a mainly public application, we will define here the resources that won’t be secured.

+
+
+

List of unsecured resources:

+
+
+
    +
  • +

    /services/rest/dishmanagement/**: to allow anonymous users to see the dishes info in the menu section.

    +
  • +
  • +

    /services/rest/ordermanagement/v1/order: to allow anonymous users to save an order. They will need a booking token but they won’t be authenticated to do this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking: to allow anonymous users to create a booking. Only a booking token is necessary to accomplish this task.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/booking/cancel/**: to allow canceling a booking from an email. Only the booking token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/accept/**: to allow guests to accept an invite. Only a guest token is needed.

    +
  • +
  • +

    /services/rest/bookingmanagement/v1/invitedguest/decline/**: to allow guests to reject an invite. Only a guest token is needed.

    +
  • +
+
+
+

To configure the login we will create a handler called login and then we will use the method code to verify the user and generate the token.

+
+
+
+
app.post(oasp4fn.config({ integration: 'lambda-proxy', path: '/mythaistar/login' });
+export async function login(event: HttpEvent, context: Context, callback: Function) {
+.
+.
+.
+.
+}
+
+
+
+

We have two default users created in the database:

+
+
+
    +
  • +

    user: waiter

    +
  • +
  • +

    password: waiter

    +
  • +
  • +

    role: WAITER

    +
  • +
  • +

    user: user0

    +
  • +
  • +

    password: password

    +
  • +
  • +

    role: CUSTOMER

    +
  • +
+
+
+
+
+

== Token set up

+ +
+
+

== Current User request

+
+
+

To provide the client with the current user data our application should expose a service to return the user details. In order to do this, we must define a handler called current-user-handler. This handler must decode the Authorization token and return the user data.

+
+
+
+
oasp4fn.config({
+    path: '/mythaistar/services/rest/security/v1/currentuser',
+});
+export async function currentUser(event: HttpEvent, context: Context, callback: Function) {
+    let authToken = event.headers.Authorization;
+    try {
+        auth.decode(authToken, (err: any, decoded?: any) => {
+            if (err) {
+                callback(new Error(`[403] Forbidden`));
+            } else {
+                callback(null, decoded);
+            }
+        });
+    } catch (err) {
+        callback(new Error(`[${err.code || 500}] ${err.message}`));
+    }
+}
+
+
+
+
+
+

== Authorization

+
+
+

We need to secure three services, that only should be accessible for users with role Waiter:

+
+
+
    +
  • +

    (POST) /mythaistar/services/rest/bookingmanagement/v1/booking/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/search.

    +
  • +
  • +

    (POST) /mythaistar/services/rest/ordermanagement/v1/order/filter.

    +
  • +
+
+
+

To ensure this, we must decode the Authorization token and check the result. As the role is included in the token, once validated we will have this information and can guarantee access or return a 403 error.

+
+
+
+
oasp4fn.config({ path: '/mythaistar/services/rest/bookingmanagement/v1/booking/search' });
+export async function bookingSearch(event: HttpEvent, context: Context, callback: Function) {
+    const authToken = event.headers.Authorization;
+    auth.decode(authToken, (err, decoded) => {
+        try {
+            if (err || decoded.role !==  'WAITER') {
+                throw { code: 403, message: 'Forbidden' };
+            }
+
+            [...]
+
+        } catch (err) {
+            callback(new Error(`[${err.code || 500}] ${err.message}`));
+        }
+    });
+}
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/style-guide.html b/docs/my-thai-star/1.0/style-guide.html new file mode 100644 index 00000000..ccbdc81e --- /dev/null +++ b/docs/my-thai-star/1.0/style-guide.html @@ -0,0 +1,313 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/traefik-reverse-proxy.html b/docs/my-thai-star/1.0/traefik-reverse-proxy.html new file mode 100644 index 00000000..f714014a --- /dev/null +++ b/docs/my-thai-star/1.0/traefik-reverse-proxy.html @@ -0,0 +1,330 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Reverse proxy strategy using Traefik

+
+
+

This implementation is the same as described at My Thai Star deployment wiki page. The only thing that changes is that Traefik is used instead of NGINX.

+
+
+

Using Traefik as reverse proxy, we can define the routes using labels in the docker containers instead of using a nginx.conf file. With this, it is not necessary to modify the reverse proxy container for each application. In addition, as Traefik is listening to the docker daemon, it can detect new containers and create routes for them without rebooting.

+
+
+

Example of labels:

+
+
+
+
labels:
+    - "traefik.http.routers.angular.rule=PathPrefix(`/`)"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.path=/health"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.interval=10s"
+    - "traefik.http.services.angular.loadBalancer.healthcheck.scheme=http"
+
+
+
+
+
+

How to use it

+
+
+

If you want to build the images from code, change to My Thai Star root folder and execute:

+
+
+
+
$ docker-compose -f docker-compose.traefik.yml up -d --build
+
+
+
+

If you want to build the images from artifacts, change to Traefik folder (reverse-proxy/traefik) and execute:

+
+
+
+
$ docker-compose up -d --build
+
+
+
+

After a seconds, when the healthcheck detects that containers are running, your application will be available at http://localhost:8090. Also, the Traefik dashboard is available at http://localhost:8080.

+
+
+

If you want to check the behavior of the application when you scale up the back-end, you can execute:

+
+
+
+
$ docker-compose scale java=5
+
+
+
+

With this, the access to the java back-end will be using the load balancing method: Weighted Round Robin.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/twofactor.html b/docs/my-thai-star/1.0/twofactor.html new file mode 100644 index 00000000..6e4721e8 --- /dev/null +++ b/docs/my-thai-star/1.0/twofactor.html @@ -0,0 +1,381 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Two-Factor Authentication

+
+
+

Two-factor Authentication (2FA) provides an additional level of security to your account. Once enabled, in addition to supplying your username and password to login, you’ll be prompted for a code generated by your Google Authenticator. For example, a password manager on one of your devices.

+
+
+

By enabling 2FA, to log into your account an additional one-time password is required what requires access to your paired device. This massively increases the barrier for an attacker to break into your account.

+
+
+
+
+

Back-end mechanism

+
+
+

In the back-end, we utilize Spring Security for any authentication.

+
+
+

Following the arrows, one can see all processes regarding authentication. The main idea is to check all credentials depending on their 2FA status and then either grand access to the specific user or deny access. This picture illustrates a normal authentication with username and password.

+
+
+
+security cross component +
+
+
+

When dealing with 2FA, another provider and filter is handling the request from /verify

+
+
+
+security cross component twofactor +
+
+
+

Here you can observe which filter will be used. +JWT-Authentication-Filter does intercept any request, which enforces being authenticated via JWT

+
+
+
+filters png +
+
+
+ + + + + +
+ + +Whenever the secret or qr code gets transferred between two parties, one must enforce SSL/TLS or IPsec to be comply with RFC 6238. +
+
+
+
+
+

Activating Two-Factor Authentication

+
+
+

In the current state, TOTP +will be used for OTP generation. For this purpose we recommend the Google Authenticator or any TOTP generator out there.

+
+
+
    +
  • +

    Login with your account

    +
  • +
  • +

    Open the 2FA settings

    +
  • +
  • +

    Activate the 2FA Status

    +
  • +
  • +

    Initialize your device with either a QR-Code or a secret

    +
  • +
+
+
+
+
+

Frontend

+
+
+

These are the two main options, which you can obtain my toggling between QR-Code and secret.

+
+
+
+2FA qr code menu +
+
+
+
+2FA secret menu +
+
+
+

After an activation and logout. This prompt will ask you to enter the OTP given from your device.

+
+
+
+otp prompt +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/xamarin-design.html b/docs/my-thai-star/1.0/xamarin-design.html new file mode 100644 index 00000000..e85f1455 --- /dev/null +++ b/docs/my-thai-star/1.0/xamarin-design.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Xamarin design

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/my-thai-star/1.0/xamarin-testing.html b/docs/my-thai-star/1.0/xamarin-testing.html new file mode 100644 index 00000000..eef9155f --- /dev/null +++ b/docs/my-thai-star/1.0/xamarin-testing.html @@ -0,0 +1,280 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Xamarin testing

+
+
+

TODO

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/JenkinsDeployParameters.png b/docs/production-line/1.0/_images/images/devon4j-mts/JenkinsDeployParameters.png new file mode 100644 index 00000000..2662d3a9 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/JenkinsDeployParameters.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/SonarqubeWebhook.png b/docs/production-line/1.0/_images/images/devon4j-mts/SonarqubeWebhook.png new file mode 100644 index 00000000..b479c328 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/SonarqubeWebhook.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/allure.JPG b/docs/production-line/1.0/_images/images/devon4j-mts/allure.JPG new file mode 100644 index 00000000..97d4f53f Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/allure.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/created_token.JPG b/docs/production-line/1.0/_images/images/devon4j-mts/created_token.JPG new file mode 100644 index 00000000..9bd39497 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/created_token.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/credential.png b/docs/production-line/1.0/_images/images/devon4j-mts/credential.png new file mode 100644 index 00000000..865731f8 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/credential.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/devonfw.png b/docs/production-line/1.0/_images/images/devon4j-mts/devonfw.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/devonfw.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/jenkinsonarscanner.PNG b/docs/production-line/1.0/_images/images/devon4j-mts/jenkinsonarscanner.PNG new file mode 100644 index 00000000..63c9e859 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/jenkinsonarscanner.PNG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/job.PNG b/docs/production-line/1.0/_images/images/devon4j-mts/job.PNG new file mode 100644 index 00000000..ac2f1046 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/job.PNG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/mavensettings.PNG b/docs/production-line/1.0/_images/images/devon4j-mts/mavensettings.PNG new file mode 100644 index 00000000..ba365448 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/mavensettings.PNG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/mavensettings2.PNG b/docs/production-line/1.0/_images/images/devon4j-mts/mavensettings2.PNG new file mode 100644 index 00000000..67de91c4 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/mavensettings2.PNG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/nexusadmin.png b/docs/production-line/1.0/_images/images/devon4j-mts/nexusadmin.png new file mode 100644 index 00000000..9536b077 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/nexusadmin.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/nexususer.png b/docs/production-line/1.0/_images/images/devon4j-mts/nexususer.png new file mode 100644 index 00000000..03d4b1ed Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/nexususer.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/pen.png b/docs/production-line/1.0/_images/images/devon4j-mts/pen.png new file mode 100644 index 00000000..a5993c09 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/pen.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/profile.png b/docs/production-line/1.0/_images/images/devon4j-mts/profile.png new file mode 100644 index 00000000..8209c816 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/profile.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/sonaraccount.PNG b/docs/production-line/1.0/_images/images/devon4j-mts/sonaraccount.PNG new file mode 100644 index 00000000..63018fe9 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/sonaraccount.PNG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/sonarjenkins.PNG b/docs/production-line/1.0/_images/images/devon4j-mts/sonarjenkins.PNG new file mode 100644 index 00000000..947237a7 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/sonarjenkins.PNG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-mts/token.JPG b/docs/production-line/1.0/_images/images/devon4j-mts/token.JPG new file mode 100644 index 00000000..ff44568d Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-mts/token.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-pl/allure.JPG b/docs/production-line/1.0/_images/images/devon4j-pl/allure.JPG new file mode 100644 index 00000000..97d4f53f Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-pl/allure.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-pl/created_token.JPG b/docs/production-line/1.0/_images/images/devon4j-pl/created_token.JPG new file mode 100644 index 00000000..9bd39497 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-pl/created_token.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-pl/devon4j.JPG b/docs/production-line/1.0/_images/images/devon4j-pl/devon4j.JPG new file mode 100644 index 00000000..0907afa4 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-pl/devon4j.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-pl/devonfw.png b/docs/production-line/1.0/_images/images/devon4j-pl/devonfw.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-pl/devonfw.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-pl/jenkinsfile.JPG b/docs/production-line/1.0/_images/images/devon4j-pl/jenkinsfile.JPG new file mode 100644 index 00000000..e0467cc6 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-pl/jenkinsfile.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-pl/maven.JPG b/docs/production-line/1.0/_images/images/devon4j-pl/maven.JPG new file mode 100644 index 00000000..14e801ea Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-pl/maven.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4j-pl/pen.png b/docs/production-line/1.0/_images/images/devon4j-pl/pen.png new file mode 100644 index 00000000..a5993c09 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-pl/pen.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-pl/pl.png b/docs/production-line/1.0/_images/images/devon4j-pl/pl.png new file mode 100644 index 00000000..f788df66 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-pl/pl.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-pl/profile.png b/docs/production-line/1.0/_images/images/devon4j-pl/profile.png new file mode 100644 index 00000000..8209c816 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-pl/profile.png differ diff --git a/docs/production-line/1.0/_images/images/devon4j-pl/token.JPG b/docs/production-line/1.0/_images/images/devon4j-pl/token.JPG new file mode 100644 index 00000000..ff44568d Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4j-pl/token.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4ng-pl/allure.JPG b/docs/production-line/1.0/_images/images/devon4ng-pl/allure.JPG new file mode 100644 index 00000000..97d4f53f Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4ng-pl/allure.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4ng-pl/created_token.JPG b/docs/production-line/1.0/_images/images/devon4ng-pl/created_token.JPG new file mode 100644 index 00000000..9bd39497 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4ng-pl/created_token.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4ng-pl/devon4ng.JPG b/docs/production-line/1.0/_images/images/devon4ng-pl/devon4ng.JPG new file mode 100644 index 00000000..cafaf809 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4ng-pl/devon4ng.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4ng-pl/devonfw.png b/docs/production-line/1.0/_images/images/devon4ng-pl/devonfw.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4ng-pl/devonfw.png differ diff --git a/docs/production-line/1.0/_images/images/devon4ng-pl/jenkinsfile.JPG b/docs/production-line/1.0/_images/images/devon4ng-pl/jenkinsfile.JPG new file mode 100644 index 00000000..e0467cc6 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4ng-pl/jenkinsfile.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4ng-pl/maven.JPG b/docs/production-line/1.0/_images/images/devon4ng-pl/maven.JPG new file mode 100644 index 00000000..14e801ea Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4ng-pl/maven.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4ng-pl/pen.png b/docs/production-line/1.0/_images/images/devon4ng-pl/pen.png new file mode 100644 index 00000000..a5993c09 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4ng-pl/pen.png differ diff --git a/docs/production-line/1.0/_images/images/devon4ng-pl/pl.png b/docs/production-line/1.0/_images/images/devon4ng-pl/pl.png new file mode 100644 index 00000000..f788df66 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4ng-pl/pl.png differ diff --git a/docs/production-line/1.0/_images/images/devon4ng-pl/profile.png b/docs/production-line/1.0/_images/images/devon4ng-pl/profile.png new file mode 100644 index 00000000..8209c816 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4ng-pl/profile.png differ diff --git a/docs/production-line/1.0/_images/images/devon4ng-pl/token.JPG b/docs/production-line/1.0/_images/images/devon4ng-pl/token.JPG new file mode 100644 index 00000000..ff44568d Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4ng-pl/token.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4node-pl/allure.JPG b/docs/production-line/1.0/_images/images/devon4node-pl/allure.JPG new file mode 100644 index 00000000..97d4f53f Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4node-pl/allure.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4node-pl/created_token.JPG b/docs/production-line/1.0/_images/images/devon4node-pl/created_token.JPG new file mode 100644 index 00000000..9bd39497 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4node-pl/created_token.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4node-pl/devon4node.JPG b/docs/production-line/1.0/_images/images/devon4node-pl/devon4node.JPG new file mode 100644 index 00000000..31955c18 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4node-pl/devon4node.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4node-pl/devonfw.png b/docs/production-line/1.0/_images/images/devon4node-pl/devonfw.png new file mode 100644 index 00000000..a8c61b09 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4node-pl/devonfw.png differ diff --git a/docs/production-line/1.0/_images/images/devon4node-pl/jenkinsfile.JPG b/docs/production-line/1.0/_images/images/devon4node-pl/jenkinsfile.JPG new file mode 100644 index 00000000..e0467cc6 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4node-pl/jenkinsfile.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4node-pl/maven.JPG b/docs/production-line/1.0/_images/images/devon4node-pl/maven.JPG new file mode 100644 index 00000000..14e801ea Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4node-pl/maven.JPG differ diff --git a/docs/production-line/1.0/_images/images/devon4node-pl/pen.png b/docs/production-line/1.0/_images/images/devon4node-pl/pen.png new file mode 100644 index 00000000..a5993c09 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4node-pl/pen.png differ diff --git a/docs/production-line/1.0/_images/images/devon4node-pl/pl.png b/docs/production-line/1.0/_images/images/devon4node-pl/pl.png new file mode 100644 index 00000000..f788df66 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4node-pl/pl.png differ diff --git a/docs/production-line/1.0/_images/images/devon4node-pl/profile.png b/docs/production-line/1.0/_images/images/devon4node-pl/profile.png new file mode 100644 index 00000000..8209c816 Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4node-pl/profile.png differ diff --git a/docs/production-line/1.0/_images/images/devon4node-pl/token.JPG b/docs/production-line/1.0/_images/images/devon4node-pl/token.JPG new file mode 100644 index 00000000..ff44568d Binary files /dev/null and b/docs/production-line/1.0/_images/images/devon4node-pl/token.JPG differ diff --git a/docs/production-line/1.0/_images/images/docker-configuration/docker-configuration.png b/docs/production-line/1.0/_images/images/docker-configuration/docker-configuration.png new file mode 100644 index 00000000..7f5ca594 Binary files /dev/null and b/docs/production-line/1.0/_images/images/docker-configuration/docker-configuration.png differ diff --git a/docs/production-line/1.0/_images/images/docker-configuration/docker-configuration2.png b/docs/production-line/1.0/_images/images/docker-configuration/docker-configuration2.png new file mode 100644 index 00000000..fff9e6e8 Binary files /dev/null and b/docs/production-line/1.0/_images/images/docker-configuration/docker-configuration2.png differ diff --git a/docs/production-line/1.0/_images/images/docker-configuration/docker-custom-tool.png b/docs/production-line/1.0/_images/images/docker-configuration/docker-custom-tool.png new file mode 100644 index 00000000..f4a51e95 Binary files /dev/null and b/docs/production-line/1.0/_images/images/docker-configuration/docker-custom-tool.png differ diff --git a/docs/production-line/1.0/_images/images/docker-configuration/docker-env-var.png b/docs/production-line/1.0/_images/images/docker-configuration/docker-env-var.png new file mode 100644 index 00000000..86a753d7 Binary files /dev/null and b/docs/production-line/1.0/_images/images/docker-configuration/docker-env-var.png differ diff --git a/docs/production-line/1.0/_images/images/initialize-instance/create-account.png b/docs/production-line/1.0/_images/images/initialize-instance/create-account.png new file mode 100644 index 00000000..f0807f48 Binary files /dev/null and b/docs/production-line/1.0/_images/images/initialize-instance/create-account.png differ diff --git a/docs/production-line/1.0/_images/images/initialize-instance/create-account2.png b/docs/production-line/1.0/_images/images/initialize-instance/create-account2.png new file mode 100644 index 00000000..3c75a3ac Binary files /dev/null and b/docs/production-line/1.0/_images/images/initialize-instance/create-account2.png differ diff --git a/docs/production-line/1.0/_images/images/initialize-instance/create-account3.png b/docs/production-line/1.0/_images/images/initialize-instance/create-account3.png new file mode 100644 index 00000000..60f6f1c7 Binary files /dev/null and b/docs/production-line/1.0/_images/images/initialize-instance/create-account3.png differ diff --git a/docs/production-line/1.0/_images/images/initialize-instance/create-account4.png b/docs/production-line/1.0/_images/images/initialize-instance/create-account4.png new file mode 100644 index 00000000..2849e99f Binary files /dev/null and b/docs/production-line/1.0/_images/images/initialize-instance/create-account4.png differ diff --git a/docs/production-line/1.0/_images/images/initialize-instance/create-account5.png b/docs/production-line/1.0/_images/images/initialize-instance/create-account5.png new file mode 100644 index 00000000..5f253399 Binary files /dev/null and b/docs/production-line/1.0/_images/images/initialize-instance/create-account5.png differ diff --git a/docs/production-line/1.0/_images/images/initialize-instance/initialize-instance.png b/docs/production-line/1.0/_images/images/initialize-instance/initialize-instance.png new file mode 100644 index 00000000..48d85ba8 Binary files /dev/null and b/docs/production-line/1.0/_images/images/initialize-instance/initialize-instance.png differ diff --git a/docs/production-line/1.0/_images/images/initialize-instance/initialize-instance2.png b/docs/production-line/1.0/_images/images/initialize-instance/initialize-instance2.png new file mode 100644 index 00000000..afd44043 Binary files /dev/null and b/docs/production-line/1.0/_images/images/initialize-instance/initialize-instance2.png differ diff --git a/docs/production-line/1.0/_images/images/initialize-instance/maven-config.png b/docs/production-line/1.0/_images/images/initialize-instance/maven-config.png new file mode 100644 index 00000000..73107aad Binary files /dev/null and b/docs/production-line/1.0/_images/images/initialize-instance/maven-config.png differ diff --git a/docs/production-line/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin.png b/docs/production-line/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin.png new file mode 100644 index 00000000..927317ab Binary files /dev/null and b/docs/production-line/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin.png differ diff --git a/docs/production-line/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin2.png b/docs/production-line/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin2.png new file mode 100644 index 00000000..5744ab7b Binary files /dev/null and b/docs/production-line/1.0/_images/images/install-sonar-plugin/build-install-sonar-plugin2.png differ diff --git a/docs/production-line/1.0/_images/images/install-sonar-plugin/sonar-plugin.png b/docs/production-line/1.0/_images/images/install-sonar-plugin/sonar-plugin.png new file mode 100644 index 00000000..cc096944 Binary files /dev/null and b/docs/production-line/1.0/_images/images/install-sonar-plugin/sonar-plugin.png differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/allure.JPG b/docs/production-line/1.0/_images/images/mrchecker/allure.JPG new file mode 100644 index 00000000..97d4f53f Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/allure.JPG differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/allure_report.JPG b/docs/production-line/1.0/_images/images/mrchecker/allure_report.JPG new file mode 100644 index 00000000..eeffc793 Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/allure_report.JPG differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/created_token.JPG b/docs/production-line/1.0/_images/images/mrchecker/created_token.JPG new file mode 100644 index 00000000..9bd39497 Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/created_token.JPG differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/maven.JPG b/docs/production-line/1.0/_images/images/mrchecker/maven.JPG new file mode 100644 index 00000000..14e801ea Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/maven.JPG differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/mrchecker&pl.JPG b/docs/production-line/1.0/_images/images/mrchecker/mrchecker&pl.JPG new file mode 100644 index 00000000..97a787d8 Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/mrchecker&pl.JPG differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/mrchecker.png b/docs/production-line/1.0/_images/images/mrchecker/mrchecker.png new file mode 100644 index 00000000..e3709bfe Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/mrchecker.png differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/pen.png b/docs/production-line/1.0/_images/images/mrchecker/pen.png new file mode 100644 index 00000000..a5993c09 Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/pen.png differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/pipeline_script.JPG b/docs/production-line/1.0/_images/images/mrchecker/pipeline_script.JPG new file mode 100644 index 00000000..08aa6ca7 Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/pipeline_script.JPG differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/pl.png b/docs/production-line/1.0/_images/images/mrchecker/pl.png new file mode 100644 index 00000000..f788df66 Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/pl.png differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/pljob.JPG b/docs/production-line/1.0/_images/images/mrchecker/pljob.JPG new file mode 100644 index 00000000..cb505ebd Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/pljob.JPG differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/profile.png b/docs/production-line/1.0/_images/images/mrchecker/profile.png new file mode 100644 index 00000000..8209c816 Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/profile.png differ diff --git a/docs/production-line/1.0/_images/images/mrchecker/token.JPG b/docs/production-line/1.0/_images/images/mrchecker/token.JPG new file mode 100644 index 00000000..ff44568d Binary files /dev/null and b/docs/production-line/1.0/_images/images/mrchecker/token.JPG differ diff --git a/docs/production-line/1.0/_images/images/newjenkinsjob.PNG b/docs/production-line/1.0/_images/images/newjenkinsjob.PNG new file mode 100644 index 00000000..9ee3c1d9 Binary files /dev/null and b/docs/production-line/1.0/_images/images/newjenkinsjob.PNG differ diff --git a/docs/production-line/1.0/_images/images/openshift-configuration/openshift-clusters.png b/docs/production-line/1.0/_images/images/openshift-configuration/openshift-clusters.png new file mode 100644 index 00000000..e8e90a8c Binary files /dev/null and b/docs/production-line/1.0/_images/images/openshift-configuration/openshift-clusters.png differ diff --git a/docs/production-line/1.0/_images/images/openshift-configuration/openshift-configuration.png b/docs/production-line/1.0/_images/images/openshift-configuration/openshift-configuration.png new file mode 100644 index 00000000..73dc0d8f Binary files /dev/null and b/docs/production-line/1.0/_images/images/openshift-configuration/openshift-configuration.png differ diff --git a/docs/production-line/1.0/_images/images/openshift-configuration/openshift-configuration2.png b/docs/production-line/1.0/_images/images/openshift-configuration/openshift-configuration2.png new file mode 100644 index 00000000..e1ad83b5 Binary files /dev/null and b/docs/production-line/1.0/_images/images/openshift-configuration/openshift-configuration2.png differ diff --git a/docs/production-line/1.0/_images/images/openshift-configuration/openshift-configuration3.png b/docs/production-line/1.0/_images/images/openshift-configuration/openshift-configuration3.png new file mode 100644 index 00000000..3260daae Binary files /dev/null and b/docs/production-line/1.0/_images/images/openshift-configuration/openshift-configuration3.png differ diff --git a/docs/production-line/1.0/_images/images/pipelinesettings.PNG b/docs/production-line/1.0/_images/images/pipelinesettings.PNG new file mode 100644 index 00000000..9cc5edc7 Binary files /dev/null and b/docs/production-line/1.0/_images/images/pipelinesettings.PNG differ diff --git a/docs/production-line/1.0/devon4j-mts.html b/docs/production-line/1.0/devon4j-mts.html new file mode 100644 index 00000000..81e3e73a --- /dev/null +++ b/docs/production-line/1.0/devon4j-mts.html @@ -0,0 +1,711 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

devon4j My-Thai-Star Sample Application Template for Production Line

+
+ +
+
+
+

Introduction

+
+
+

Please read all of the following sections carefully.

+
+
+
+
+

Overview

+
+
+

This template will configure your PL instance to have a 'ready to use' My-Thai-Star devonfw application. It is only an example. In order to start a new project, please use the other templates. This includes:

+
+
+
    +
  • +

    Cloning the official My-Thai-Star (https://github.com/devonfw/my-thai-star) repository into your GitLab, which allows you to do customizations on your own.

    +
  • +
  • +

    Adding a build job for the Angular front-end, including a SonarQube analysis and a delivery to Nexus as zip and docker image.

    +
  • +
  • +

    Adding a build job for the Java back-end, including a SonarQube analysis and a deployment to Nexus as zip and docker image.

    +
  • +
  • +

    Adding a deployment job for the Angular front-end

    +
  • +
  • +

    Adding a deployment job for the Java back-end

    +
  • +
  • +

    Adding a deployment job for the reverse proxy. Please see My Thai Star deployment documentation

    +
  • +
+
+
+

Especially the build and deployment jobs require several additional Jenkins plugins, which are not part of the PL by default. The Template will also take care of those installations.

+
+
+

All build and deployment jobs are taken from the official My-Thai-Star (https://github.com/devonfw/my-thai-star) repository. The created build and deployment jobs inside Jenkins will use the Jenkinsfiles from the cloned repo in Gitlab. These are currently the following Jenkinsfiles:

+
+
+
+
+

Jenkins Jobs

+
+
+
Jenkins Jobs
+

|== == == == == == == == == == +| Jenkins job name | Path to Jenkinsfile in repo | Description +| MyThaiStar_FRONTEND_BUILD | jenkins/angular/cicd/Jenkinsfile | Builds and tests the Angular frontend. Pushes artifacts to Nexus. +| MyThaiStar_SERVER_BUILD | jenkins/java/cicd/Jenkinsfile | Builds and tests the Java backend. Pushes artifacts to Nexus. +| MyThaiStar_FRONTEND_DEPLOY | jenkins/angular/deployment/Jenkinsfile | Frontend deployment job. Downloads the docker images from Nexus3 and starts a new container using that image. +| MyThaiStar_SERVER_DEPLOY | jenkins/java/deployment/Jenkinsfile | Backend deployment job. Downloads the docker images from Nexus3 and starts a new container using that image. +| MyThaiStar_REVERSE-PROXY_DEPLOY | jenkins/deployment/Jenkinsfile | Reverse proxy deployment job. Downloads the docker images from Nexus3 and starts a new container using that image. With this job you can also build the reverse proxy image. +|== == == == == == == == == ==

+
+
+
+
+

How to report Issues

+
+
+

This template is independent from PL and devonfw releases and is also not really connected to one of the projects. Therefore issues that occur during the template setup or execution should be tracked in the issue section of this GitHub project.

+
+
+
+
+

How to contribute

+
+
+

In case you see improvements we would love to see a Pull Request.

+
+
+
+
+

Prerequisities before running the template

+
+ +
+
+
+

Production Line Components

+
+
+

To use the template you need to make sure that your PL has the following components installed:

+
+
+
    +
  • +

    Jenkins (required to run the template and to execute the build/deployment Jobs)

    +
  • +
  • +

    SonarQube (required for a static code analysis)

    +
  • +
  • +

    GitLab (required as a repository)

    +
  • +
  • +

    Nexus3 (required to store the build artifacts)

    +
  • +
+
+
+
+
+

==

+
+
+

Additional components can be ordered from the ProductionLine service team. +== ==

+
+
+
+
+

Technical User Setup

+
+
+

In order to configure the services, we need technical users for the following components:

+
+
+
    +
  • +

    Gitlab

    +
  • +
  • +

    Nexus3

    +
  • +
  • +

    SonarQube

    +
  • +
+
+
+

The following sections describe how to configure the components to enable technical users and tokens.

+
+
+
+
+

== Manual configuration

+
+
+

In order to configure the Production Line components manually you can follow this guide

+
+
+
+
+

== Automatic configuration

+
+
+

In order to configure the Production Line components automatically you can follow this guide

+
+
+

There is one thing that initialize-template can not do automatically: the gitlab token creation.

+
+
+

The creation of the GitLab Group and Project will require a private GitLab token which has to be created manually. The token can be obtained like this:

+
+
+
    +
  1. +

    Go to your Profile in Gitlab

    +
  2. +
+
+
+
+500 +
+
+
+
    +
  1. +

    Next click on the pen icon

    +
  2. +
+
+
+
+500 +
+
+
+
    +
  1. +

    On the left menu choose Access Tokens and put token name and check fields like below

    +
  2. +
+
+
+
+600 +
+
+
+
    +
  1. +

    Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

    +
  2. +
+
+
+
+600 +
+
+
+
+
+

==

+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps: +== ==

+
+
+
    +
  1. +

    Enter the Admin control panel

    +
  2. +
  3. +

    Select 'Users'

    +
  4. +
  5. +

    Select the user(s) in question and click 'Edit'

    +
  6. +
  7. +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  8. +
+
+
+
+
+

Build/Deployment Requirements

+
+
+

The My Thai Star CICD pipelines will create a docker image and then the deployment pipelines will use it in order to deploy the application. As Production Line do not include a docker daemon, you need an additional server to do it. Those server needs:

+
+
+ +
+
+
+
+

How to run it

+
+ +
+
+
+

==

+
+
+

If Jenkins needs to install plugins, a restart will be performed. +So please make sure, that nothing important is running. +== ==

+
+
+
+
+

==

+
+
+

We have job-parameters inside the template Jenkinsfile that will only be active if Jenkins has run the job at least once! +== ==

+
+
+
+
+

Setup template job in Jenkins

+
+
+

The guide on how to add a template to your Jenkins can be found in the root directory of the template repository: https://github.com/devonfw/production-line.git

+
+
+
+
+

Execute the Jenkins job in your Jenkins

+
+
+
    +
  • +

    Go to the Jenkins job.

    +
  • +
  • +

    Execute job.

    +
  • +
  • +

    It will try to configure and setup the PL components such as Jenkins/Gitlab and Nexus.

    +
  • +
+
+
+
+
+

==

+
+
+

If a restart was needed, you need to trigger the job again! +== ==

+
+
+
    +
  • +

    The job should now show the required parameters, you only need to change the GITLAB PRIVATE TOKEN that you should have generated in the prerequisite section

    +
  • +
+
+
+
+600 +
+
+
+

When everything is "green" the template is done and you can have a look in the created "MTS" folder in Jenkins.

+
+
+
+
+

==

+
+
+

It will take a few minutes to clone the official MTS repository to the internal Gitlab. So you need to wait before executing the build jobs at the first time. +== ==

+
+
+
+
+

== Build Jobs

+
+
+

You can now execute the build for the frontend and also the backend. They do not require any parameters to run. The expected result is, that both jobs can run without any errors. They will build, test and deploy the artifacts to Nexus3.

+
+
+
+
+

== Deployment Jobs

+
+
+

All deployment jobs have several parameters configured in their Jenkinsfile. Unfortunately, Jenkins does not pick them up immediately, so you need to execute the job once, by pressing the "Build now" button. +The run should fail quite fast and once you refresh the page, the "Build now" button should have changed to "Build with Parameters". If you now click on the button you should see the parameters below:

+
+
+
+Jenkins Deployment Parameters +
+
+
+

You need to set the following parameters in order to get it running:

+
+
+
Required Parameters
+

|== == == == == == == == == == +| Parameter | Description +| registryUrl | The docker registry URL where image is stored. +| registryCredentialsId | The nexus credentials to access to the docker registry. +| VERSION | The version of the image that was built in the build jobs. For example "1.12.3-SNAPSHOT". +| dockerNetwork | The docker network where the container will be deployed. +|== == == == == == == == == ==

+
+
+

Also, the reverse proxy deployment has two more parameters:

+
+
+
Reverse Proxy extra parameters
+

|== == == == == == == == == == +| Parameter | Description +| buildReverseProxy | If true, it will build a new reverse proxy docker image and then deploy that image. +| port | The port where the application will be listening. It’s a host port, not a container port. +|== == == == == == == == == ==

+
+
+
+
+

==

+
+
+

You can deploy multiple versions of My Thai Star in the same machine by changing the docker network in all deployments and the port in the reverse proxy deployment. +== ==

+
+
+
+
+

==

+
+
+

You must choose the same docker network for all deployments +== ==

+
+
+
+
+

==

+
+
+

You need to deploy the angular and java applications before the reverse proxy. Also, the first you need to check the buildReverseProxy parameter in order to create the reverse proxy image and then deploy the container. +== ==

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/devon4j-pl.html b/docs/production-line/1.0/devon4j-pl.html new file mode 100644 index 00000000..fa95c4fb --- /dev/null +++ b/docs/production-line/1.0/devon4j-pl.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+400 +
+
+
+
+400 +
+
+
+

devon4j Template for Production Line

+
+ +
+
+
+

Overview

+
+
+

This template will configure your PL instance to have a 'ready to use' devon4j template. It can be used as a starting point for your Java projects.
+This includes CICD files for a devonfw technology stack with configuration for:

+
+
+
    +
  • +

    docker or openshift deployment

    +
  • +
  • +

    pushing artifacts to nexus3

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins devon4j job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+ +
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+800 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and enable 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+

In order to add the template, you can follow the guide.

+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job with parameters:

    +
    +
      +
    • +

      PROJECT_NAME: The project name.

      +
    • +
    • +

      PROJECT_SUFFIX: The project name suffix. As your project can have multiple assets (backend, frontend, middleware…​), you can define a suffix in order to identify each one with a different name

      +
    • +
    • +

      DB_TYPE: The type of the database. Possible values: h2|postgresql|mysql|mariadb|oracle|hana|db2

      +
    • +
    • +

      GROUP_ID: The group id of the project.

      +
    • +
    • +

      GITLAB_USER_PRIVATE_TOKEN: Private Token of a Production Line Gitlab User that can be used to create repositories. Created as prerequisite, you only need to add it as credential with GitLab API token Kind.

      +
    • +
    • +

      GITLAB_CREATE_GROUP_NAME: Name of the GitLab group. The repository will be create inside this group.

      +
    • +
    • +

      GITLAB_CREATE_PROJECT_DESCRIPTION: Description of the repository.

      +
    • +
    • +

      DEPLOY: Choose the environment where you want to deploy. The deployment could be none, docker or openshift. If docker or openshift were selected, extra parameters will be required in their dedicated steps:

      +
      +
        +
      • +

        Configuring DOCKER:

        +
        +
          +
        • +

          DOCKER_URL: The remote docker daemon URL

          +
        • +
        • +

          DOCKER_CERT: Credentials to access docker daemon. If the daemon is not secure, you can leave this empty.

          +
        • +
        +
        +
      • +
      • +

        Configuring Openshift:

        +
        +
          +
        • +

          OC_NAME: Openshift cluster name. It was defined in the Openshift Configuration template

          +
        • +
        • +

          DOCKER_REGISTRY_CREDENTIALS: Nexus docker registry user credentials. It was created in the initialize instance pipeline. The default username is nexus-api, the default password is the same as your service account.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

After executing this template, you will have:

+
+
+
    +
  • +

    A new GitLab repository.

    +
    +
      +
    • +

      The repository group is the value passed in the GITLAB_CREATE_GROUP_NAME parameter.

      +
    • +
    • +

      The repository name is PROJECT_NAME-PROJECT_SUFFIX

      +
    • +
    • +

      The repository contains a clean devon4j project.

      +
    • +
    • +

      The repository contains a Jenkinsfile.

      +
    • +
    • +

      The repository has already setted the jenkins webhook.

      +
    • +
    • +

      The repository protects the branches master and release/* to only maintainers to push. Develop is the default branch.

      +
    • +
    +
    +
  • +
  • +

    A new multibranch pipeline in jenkins inside the folder PROJECT_NAME with the name PROJECT_NAME-PROJECT_SUFFIX. As the webhook is already configured, it should be executed on every push to GitLab repository.

    +
  • +
  • +

    If you choose docker for deployment, your Jenkinsfile should contain two extra stages in order to build and deploy the docker image. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
  • +

    If you choose OpenShift for deployment, three new applications should be created in your OpenShift. Those applications represent three environments of your application: develop, uat and stage. Also, your Jenkinsfile should contain three extra stages in order to build and deploy the docker image and check that the pod is running without errors. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/devon4net.html b/docs/production-line/1.0/devon4net.html new file mode 100644 index 00000000..eddce84f --- /dev/null +++ b/docs/production-line/1.0/devon4net.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+400 +
+
+
+
+400 +
+
+
+

devon4net Template for Production Line

+
+ +
+
+
+

Overview

+
+
+

This template will configure your PL instance to have a 'ready to use' devon4net template. It can be used as a starting point for your .NET projects.
+This includes CICD files for a devonfw technology stack with configuration for:

+
+
+
    +
  • +

    ProductionLine instance

    +
  • +
  • +

    docker or openshift deployment

    +
  • +
  • +

    pushing artifacts to nexus3

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins Node job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+ +
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+600 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+

In order to add the template, you can follow the guide.

+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job with parameters:

    +
    +
      +
    • +

      PROJECT_NAME: The project name.

      +
    • +
    • +

      PROJECT_SUFFIX: The project name suffix. As your project can have multiple assets (backend, frontend, middleware…​), you can define a suffix in order to identify each one with a different name

      +
    • +
    • +

      GROUP_ID: The group id of the project.

      +
    • +
    • +

      GITLAB_USER_PRIVATE_TOKEN: Private Token of a Production Line Gitlab User that can be used to create repositories. Created as prerequisite, you only need to add it as credential with GitLab API token Kind.

      +
    • +
    • +

      GITLAB_CREATE_GROUP_NAME: Name of the GitLab group. The repository will be create inside this group.

      +
    • +
    • +

      GITLAB_CREATE_PROJECT_DESCRIPTION: Description of the repository.

      +
    • +
    • +

      DEPLOY: Choose the environment where you want to deploy. The deployment could be none, docker or openshift. If docker or openshift were selected, extra parameters will be required in their dedicated steps:

      +
      +
        +
      • +

        Configuring DOCKER:

        +
        +
          +
        • +

          DOCKER_URL: The remote docker daemon URL

          +
        • +
        • +

          DOCKER_CERT: Credentials to access docker daemon. If the daemon is not secure, you can leave this empty.

          +
        • +
        +
        +
      • +
      • +

        Configuring Openshift:

        +
        +
          +
        • +

          OC_NAME: Openshift cluster name. It was defined in the Openshift Configuration template

          +
        • +
        • +

          DOCKER_REGISTRY_CREDENTIALS: Nexus docker registry user credentials. It was created in the initialize instance pipeline. The default username is nexus-api, the default password is the same as your service account.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

After executing this template, you will have:

+
+
+
    +
  • +

    A new GitLab repository.

    +
    +
      +
    • +

      The repository group is the value passed in the GITLAB_CREATE_GROUP_NAME parameter.

      +
    • +
    • +

      The repository name is PROJECT_NAME-PROJECT_SUFFIX

      +
    • +
    • +

      The repository contains a clean devon4net project.

      +
    • +
    • +

      The repository contains a Jenkinsfile.

      +
    • +
    • +

      The repository has already configured the jenkins webhook.

      +
    • +
    • +

      The repository protects the branches master and release/* to only maintainers to push. Develop is the default branch.

      +
    • +
    +
    +
  • +
  • +

    A new multibranch pipeline in jenkins inside the folder PROJECT_NAME with the name PROJECT_NAME-PROJECT_SUFFIX. As the webhook is already configured, it should be executed on every push to GitLab repository.

    +
  • +
  • +

    If you choose docker for deployment, your Jenkinsfile should contain two extra stages in order to build and deploy the docker image. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
  • +

    If you choose OpenShift for deployment, three new applications should be created in your OpenShift. Those applications represent three environments of your application: develop, uat and stage. Also, your Jenkinsfile should contain three extra stages in order to build and deploy the docker image and check that the pod is running without errors. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/devon4ng-pl.html b/docs/production-line/1.0/devon4ng-pl.html new file mode 100644 index 00000000..0d2c7a00 --- /dev/null +++ b/docs/production-line/1.0/devon4ng-pl.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+400 +
+
+
+
+400 +
+
+
+

devon4ng Template for Production Line

+
+ +
+
+
+

Overview

+
+
+

This template will configure your PL instance to have a 'ready to use' devon4ng template. It can be used as a starting point for your Angular projects.
+This includes CICD files for a devonfw technology stack with configuration for:

+
+
+
    +
  • +

    ProductionLine instance

    +
  • +
  • +

    docker or openshift deployment

    +
  • +
  • +

    pushing artifacts to nexus3

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins Angular job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+ +
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+600 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+

In order to add the template, you can follow the guide.

+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job with parameters:

    +
    +
      +
    • +

      PROJECT_NAME: The project name.

      +
    • +
    • +

      PROJECT_SUFFIX: The project name suffix. As your project can have multiple assets (backend, frontend, middleware…​), you can define a suffix in order to identify each one with a different name

      +
    • +
    • +

      GROUP_ID: The group id of the project.

      +
    • +
    • +

      GITLAB_USER_PRIVATE_TOKEN: Private Token of a Production Line Gitlab User that can be used to create repositories. Created as prerequisite, you only need to add it as credential with GitLab API token Kind.

      +
    • +
    • +

      GITLAB_CREATE_GROUP_NAME: Name of the GitLab group. The repository will be create inside this group.

      +
    • +
    • +

      GITLAB_CREATE_PROJECT_DESCRIPTION: Description of the repository.

      +
    • +
    • +

      DEPLOY: Choose the environment where you want to deploy. The deployment could be none, docker or openshift. If docker or openshift were selected, extra parameters will be required in their dedicated steps:

      +
      +
        +
      • +

        Configuring DOCKER:

        +
        +
          +
        • +

          DOCKER_URL: The remote docker daemon URL

          +
        • +
        • +

          DOCKER_CERT: Credentials to access docker daemon. If the daemon is not secure, you can leave this empty.

          +
        • +
        +
        +
      • +
      • +

        Configuring Openshift:

        +
        +
          +
        • +

          OC_NAME: Openshift cluster name. It was defined in the Openshift Configuration template

          +
        • +
        • +

          DOCKER_REGISTRY_CREDENTIALS: Nexus docker registry user credentials. It was created in the initialize instance pipeline. The default username is nexus-api, the default password is the same as your service account.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

After executing this template, you will have:

+
+
+
    +
  • +

    A new GitLab repository.

    +
    +
      +
    • +

      The repository group is the value passed in the GITLAB_CREATE_GROUP_NAME parameter.

      +
    • +
    • +

      The repository name is PROJECT_NAME-PROJECT_SUFFIX

      +
    • +
    • +

      The repository contains a clean devon4ng project.

      +
    • +
    • +

      The repository contains a Jenkinsfile.

      +
    • +
    • +

      The repository has already configured the jenkins webhook.

      +
    • +
    • +

      The repository protects the branches master and release/* to only maintainers to push. Develop is the default branch.

      +
    • +
    +
    +
  • +
  • +

    A new multibranch pipeline in jenkins inside the folder PROJECT_NAME with the name PROJECT_NAME-PROJECT_SUFFIX. As the webhook is already configured, it should be executed on every push to GitLab repository.

    +
  • +
  • +

    If you choose docker for deployment, your Jenkinsfile should contain two extra stages in order to build and deploy the docker image. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
  • +

    If you choose OpenShift for deployment, three new applications should be created in your OpenShift. Those applications represent three environments of your application: develop, uat and stage. Also, your Jenkinsfile should contain three extra stages in order to build and deploy the docker image and check that the pod is running without errors. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/devon4node-pl.html b/docs/production-line/1.0/devon4node-pl.html new file mode 100644 index 00000000..d5415249 --- /dev/null +++ b/docs/production-line/1.0/devon4node-pl.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+400 +
+
+
+
+400 +
+
+
+

devon4node Template for Production Line

+
+ +
+
+
+

Overview

+
+
+

This template will configure your PL instance to have a 'ready to use' devon4node template. It can be used as a starting point for your Node projects.
+This includes CICD files for a devonfw technology stack with configuration for:

+
+
+
    +
  • +

    ProductionLine instance

    +
  • +
  • +

    docker or openshift deployment

    +
  • +
  • +

    pushing artifacts to nexus3

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins Node job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+ +
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+600 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+

In order to add the template, you can follow the guide.

+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job with parameters:

    +
    +
      +
    • +

      PROJECT_NAME: The project name.

      +
    • +
    • +

      PROJECT_SUFFIX: The project name suffix. As your project can have multiple assets (backend, frontend, middleware…​), you can define a suffix in order to identify each one with a different name

      +
    • +
    • +

      GROUP_ID: The group id of the project.

      +
    • +
    • +

      GITLAB_USER_PRIVATE_TOKEN: Private Token of a Production Line Gitlab User that can be used to create repositories. Created as prerequisite, you only need to add it as credential with GitLab API token Kind.

      +
    • +
    • +

      GITLAB_CREATE_GROUP_NAME: Name of the GitLab group. The repository will be create inside this group.

      +
    • +
    • +

      GITLAB_CREATE_PROJECT_DESCRIPTION: Description of the repository.

      +
    • +
    • +

      DEPLOY: Choose the environment where you want to deploy. The deployment could be none, docker or openshift. If docker or openshift were selected, extra parameters will be required in their dedicated steps:

      +
      +
        +
      • +

        Configuring DOCKER:

        +
        +
          +
        • +

          DOCKER_URL: The remote docker daemon URL

          +
        • +
        • +

          DOCKER_CERT: Credentials to access docker daemon. If the daemon is not secure, you can leave this empty.

          +
        • +
        +
        +
      • +
      • +

        Configuring Openshift:

        +
        +
          +
        • +

          OC_NAME: Openshift cluster name. It was defined in the Openshift Configuration template

          +
        • +
        • +

          DOCKER_REGISTRY_CREDENTIALS: Nexus docker registry user credentials. It was created in the initialize instance pipeline. The default username is nexus-api, the default password is the same as your service account.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

After executing this template, you will have:

+
+
+
    +
  • +

    A new GitLab repository.

    +
    +
      +
    • +

      The repository group is the value passed in the GITLAB_CREATE_GROUP_NAME parameter.

      +
    • +
    • +

      The repository name is PROJECT_NAME-PROJECT_SUFFIX

      +
    • +
    • +

      The repository contains a clean devon4node project.

      +
    • +
    • +

      The repository contains a Jenkinsfile.

      +
    • +
    • +

      The repository has already configured the jenkins webhook.

      +
    • +
    • +

      The repository protects the branches master and release/* to only maintainers to push. Develop is the default branch.

      +
    • +
    +
    +
  • +
  • +

    A new multibranch pipeline in jenkins inside the folder PROJECT_NAME with the name PROJECT_NAME-PROJECT_SUFFIX. As the webhook is already configured, it should be executed on every push to GitLab repository.

    +
  • +
  • +

    If you choose docker for deployment, your Jenkinsfile should contain two extra stages in order to build and deploy the docker image. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
  • +

    If you choose OpenShift for deployment, three new applications should be created in your OpenShift. Those applications represent three environments of your application: develop, uat and stage. Also, your Jenkinsfile should contain three extra stages in order to build and deploy the docker image and check that the pod is running without errors. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/docker-configuration.html b/docs/production-line/1.0/docker-configuration.html new file mode 100644 index 00000000..8882cbc8 --- /dev/null +++ b/docs/production-line/1.0/docker-configuration.html @@ -0,0 +1,453 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Docker Configuration

+
+ +
+
+
+

Introduction

+
+
+

Docker is the most popular container technology. It allows you to build your application in an image and then deploy it into a container.

+
+
+
+
+

Overview

+
+
+

This template allow you to configure Jenkins in order to work with docker.

+
+
+

It will:

+
+
+
    +
  • +

    Add docker client as custom tool.

    +
  • +
  • +

    Configure docker to work with an external docker dameon.

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

In order to execute this template, you need the following plugins installed in your Jenkins:

+
+ +
+ + + + + +
+ + +The initialize instance template will install all plugins if you select 'Docker' or 'Docker+Openshift' in the installDeploymentPlugins parameter +
+
+
+
+
+

Template

+
+
+

This template will be automatically created in your jenkins after executing the Initialize_Instance template inside the UTILS folder with the name Docker_Configuration.

+
+
+

For manual creation see: How to add a Template

+
+
+ + + + + +
+ + +This template needs the devonfw Production Line Shared Lib +
+
+
+
+
+

Parameters

+
+
+

The only parameter required is remote docker daemon URL. Example: tcp://127.0.0.1:2367

+
+
+ + + + + +
+ + +You need to expose the docker daemon manually in your machine. Here you can find how to do it +
+
+
+ + + + + +
+ + +This configuration requires that the docker daemon has no security. It’s prepared for development environments, for production environments please add security to your docker daemon. +
+
+
+
+
+

Execution

+
+
+
    +
  1. +

    Press the Build with Parameters button

    +
  2. +
  3. +

    Insert remote docker daemon URL.

    +
  4. +
  5. +

    Press the Build button.

    +
  6. +
  7. +

    Wait until the pipeline ends.

    +
  8. +
+
+
+
+docker configuration +
+
+
+
+docker configuration2 +
+
+
+

Then, you can see that the docker is configured and the remote docker daemon environment variable is set:

+
+
+
+docker env var +
+
+
+
+docker custom tool +
+
+
+

The environment variable is configured globally, if you want to use another remote docker daemon for a specific build, you can override the DOCKER_HOST environment variable in your job.

+
+
+

If the DOCKER_HOST is already configured globally, when you execute again this template the value will not be changed. You need to change the value manually at: Jenkins → Manage Jenkins → Configure System → Global properties

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/from-existing-devonfw.html b/docs/production-line/1.0/from-existing-devonfw.html new file mode 100644 index 00000000..b51f9b55 --- /dev/null +++ b/docs/production-line/1.0/from-existing-devonfw.html @@ -0,0 +1,484 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+
+400 +
+
+
+
+400 +
+
+
+

From existing devonfw Template for Production Line

+
+ +
+
+
+

Overview

+
+
+

From existing devonfw template is very similar to devon4j, devon4ng, devon4net and devon4node templates. The main difference is from existing devonfw template will no create a new devonfw project, it takes an existing project from GitLab and then add/create everything in order to apply a CICD strategy to your project.

+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins Node job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+
    +
  • +

    Jenkins

    +
    + +
    +
  • +
  • +

    Gitlab

    +
    +
      +
    • +

      Create a project and upload your current code. In order to start a new project in your local machine, you can use the devonfw-ide. The project must be a devon4j, devon4ng, devon4net or devon4node project.

      +
    • +
    • +

      Generate User Private Token
      +Go to your Profile in Gitlab

      +
    • +
    +
    +
  • +
+
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+600 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+

In order to add the template, you can follow the guide.

+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job with parameters:

    +
    +
      +
    • +

      REPOSITORY_URL: The internal repository URL. Without protocol. Example: gitlab-core:80/gitlab/mygroup/myproject-frontend.

      +
    • +
    • +

      GIT_BRANCH: The branch where you want to apply the CICD changes.

      +
    • +
    • +

      MERGE_STRATEGY: Choose the merge strategy for cicdgen. For more information see the CICDGEN merge documentation page

      +
    • +
    • +

      GITLAB_USER_PRIVATE_TOKEN: Private Token of a Production Line Gitlab User that can be used to create/update repositories. The token proprietary user must have admin rights in the repository. Created as prerequisite, you only need to add it as credential with GitLab API token Kind.

      +
    • +
    • +

      DEPLOY: Choose the environment where you want to deploy. The deployment could be none, docker or openshift. If docker or openshift were selected, extra parameters will be required in their dedicated steps:

      +
      +
        +
      • +

        Configuring DOCKER:

        +
        +
          +
        • +

          DOCKER_URL: The remote docker daemon URL

          +
        • +
        • +

          DOCKER_CERT: Credentials to access docker daemon. If the daemon is not secure, you can leave this empty.

          +
        • +
        +
        +
      • +
      • +

        Configuring Openshift:

        +
        +
          +
        • +

          OC_NAME: Openshift cluster name. It was defined in the Openshift Configuration template

          +
        • +
        • +

          DOCKER_REGISTRY_CREDENTIALS: Nexus docker registry user credentials. It was created in the initialize instance pipeline. The default username is nexus-api, the default password is the same as your service account.

          +
        • +
        +
        +
      • +
      +
      +
    • +
    +
    +
  • +
+
+
+

After executing this template, you will have:

+
+
+
    +
  • +

    Your GitLab project updated.

    +
    +
      +
    • +

      Added a Jenkinsfile with all CICD stages.

      +
    • +
    • +

      The repository is updated in order to have the jenkins webhook.

      +
    • +
    +
    +
  • +
  • +

    A new multibranch pipeline in jenkins inside the folder PROJECT_NAME with the name PROJECT_NAME-PROJECT_SUFFIX. As the webhook is already configured, it should be executed on every push to GitLab repository.

    +
  • +
  • +

    If you choose docker for deployment, your Jenkinsfile should contain two extra stages in order to build and deploy the docker image. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
  • +

    If you choose OpenShift for deployment, three new applications should be created in your OpenShift. Those applications represent three environments of your application: develop, uat and stage. Also, your Jenkinsfile should contain three extra stages in order to build and deploy the docker image and check that the pod is running without errors. Also, the repository should contain the Dockerfiles to create the docker images.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/how-to-add-a-template.html b/docs/production-line/1.0/how-to-add-a-template.html new file mode 100644 index 00000000..0b9a9258 --- /dev/null +++ b/docs/production-line/1.0/how-to-add-a-template.html @@ -0,0 +1,325 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

How to add a Template to your PL instance

+
+
+
    +
  • +

    Go to Jenkins.

    +
  • +
  • +

    On the upper left click on "New Element" to create a new Jenkins job.

    +
  • +
  • +

    Chose a name for the job such as "MTS-template-seed-job". The job type has to be "Pipeline". Click on ok.

    +
  • +
+
+
+
+newjenkinsjob +
+
+
+
    +
  • +

    Scroll down to the bottom of the job creation page where you will find the "Pipeline" section.

    +
    +
      +
    • +

      Switch to "Pipeline script from SCM".

      +
    • +
    • +

      Set "SCM" to "Git".

      +
    • +
    • +

      Set "Repository URL" to: https://github.com/devonfw/production-line.git

      +
    • +
    • +

      Credentials can be left empty, because the repository is public.

      +
    • +
    • +

      Set "Script Path" to the template that you want to use e.g. "devon4j-mts/Jenkinsfile".

      +
    • +
    +
    +
  • +
+
+
+
+pipelinesettings +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/index.html b/docs/production-line/1.0/index.html new file mode 100644 index 00000000..0d61e52b --- /dev/null +++ b/docs/production-line/1.0/index.html @@ -0,0 +1,368 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Production Line Templates

+
+
+

This repository contains a collection of templates that can be used inside a Production Line Jenkins to setup/configure and execute certain tasks.

+
+
+
+ + +
+

MrChecker

+
+
+ +
+
+
+
+

Samples

+
+ +
+
+
+

Troubleshooting

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/initialize-instance-manually.html b/docs/production-line/1.0/initialize-instance-manually.html new file mode 100644 index 00000000..5a4adb57 --- /dev/null +++ b/docs/production-line/1.0/initialize-instance-manually.html @@ -0,0 +1,606 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Initialize Instance Template for Production Line

+
+ +
+
+
+

Technical User Setup

+
+
+

In order to configure the services, we need technical users for the following components:

+
+
+
    +
  • +

    Gitlab

    +
  • +
  • +

    Nexus3

    +
  • +
  • +

    SonarQube

    +
  • +
+
+
+

The following sections describe how to configure the components to enable technical users and tokens.

+
+
+
+
+

Technical Gitlab User and settings

+
+
+

The creation of the GitLab Group and Project will require a private GitLab token which has to be created manually. The token can be obtained like this:

+
+
+
    +
  1. +

    Go to your Profile in Gitlab

    +
  2. +
+
+
+
+500 +
+
+
+
    +
  1. +

    Next click on the pen icon

    +
  2. +
+
+
+
+500 +
+
+
+
    +
  1. +

    On the left menu choose Access Tokens and put token name and check fields like below

    +
  2. +
+
+
+
+600 +
+
+
+
    +
  1. +

    Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

    +
  2. +
+
+
+
+600 +
+
+
+
+
+

==

+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps: +== ==

+
+
+
    +
  1. +

    Enter the Admin control panel

    +
  2. +
  3. +

    Select 'Users'

    +
  4. +
  5. +

    Select the user(s) in question and click 'Edit'

    +
  6. +
  7. +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  8. +
+
+
+
+
+

Technical Nexus3 user and settings

+
+
+

Nexus3 is used to store build artifacts such as the frontend and the backend. In the future it might also be used to store docker images of MTS.

+
+
+
+
+

== Create the technical Nexus User

+
+
+
    +
  1. +

    The nexus3-api user should be created in section Administration

    +
  2. +
+
+
+
+600 +
+
+
+
    +
  1. +

    New user should have added roles: Admins, nx-admins

    +
  2. +
+
+
+
+600 +
+
+
+
+
+

== Add it as credential in Jenkins

+
+
+

Credentials 'nexus-api' user should be added to Jenkins +Jenkins → Credentials → System → Global credentials (unrestricted) → Add Credentials

+
+
+
+800 +
+
+
+
+
+

== Add the user to maven global settings in Jenkins:

+
+
+
    +
  1. +

    Jenkins → Settings → Managed Files → Edit Global Maven Settings XML

    +
  2. +
+
+
+
+600 +
+
+
+
    +
  1. +

    Add the credential to the settings xml, use the ID "pl-nexus"

    +
  2. +
+
+
+
+600 +
+
+
+
+
+

Jenkins Preparation

+
+
+
    +
  • +

    Install required plugins:
    +HTTP Request Plugin
    +Allure Jenkins Plugin

    +
  • +
  • +

    In Jenkins Global Tool Configuration configure Allure Commandline and Maven like

    +
  • +
+
+
+
+500 +
+
+
+
+
+

== Sonarqube Server configuration in Jenkins

+
+
+

SonarQube must be configured in Jenkins, so that we can easily use the SonarQube server in our builds.

+
+
+

Go to Jenkins → Settings → Configuration → SonarQube Servers

+
+
+

Add the following data

+
+
+
+600 +
+
+
+
+
+

Technical SonarQube user and settings

+
+ +
+
+
+

== User Token for SonarQube

+
+
+
    +
  • +

    Go to SonarQube.

    +
  • +
  • +

    Go to your account.

    +
  • +
+
+
+
+600 +
+
+
+
    +
  • +

    Go to Security tab.

    +
  • +
  • +

    Generate the token.

    +
  • +
+
+
+
+
+

== Install SonarQube plugins from Marketplace

+
+
+

In order to analyze devonfw projects in SonarQube properly, you need to install manually some plugins. To do that you only need to open your SonarQube website and go to Administration → Marketplace. Then, you need to install the following plugins:

+
+
+
    +
  • +

    Checkstyle

    +
  • +
  • +

    Cobertura

    +
  • +
+
+
+
+
+

== SonarQube Webhook to inform Jenkins

+
+
+

A part of the Build Job will ask SonarQube if the quality gate has been passed. For this step a so called "webhook" has to be configured in SonarQube. To do so,

+
+
+
    +
  1. +

    Go to SonarQube

    +
  2. +
  3. +

    Select 'Administration'

    +
  4. +
  5. +

    Select 'Configuration', 'General Settings' and select 'Webhooks' in the left menu

    +
  6. +
  7. +

    Add the following webhook

    +
  8. +
+
+
+
+SonarqubeWebhook +
+
+
+
    +
  1. +

    Press 'Save'

    +
  2. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/initialize-instance.html b/docs/production-line/1.0/initialize-instance.html new file mode 100644 index 00000000..d303d860 --- /dev/null +++ b/docs/production-line/1.0/initialize-instance.html @@ -0,0 +1,708 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Initialize Instance Template for Production Line

+
+ +
+
+
+

Introduction

+
+
+

Production Line Templates allows you to create/configure certain task. In order to work properly, Production Line Templates needs some previous configurations. You can do it manually or executing the Initialize Instance Template.

+
+
+
+
+

Prerequisites

+
+
+

In order to be able to start this template, you need:

+
+
+ +
+
+

Production Line provides by default the Shared Lib and the plugins, so no actions are required. The only thing that you need to do manually is the creation of the service account.

+
+
+

In order to create the service account you need:

+
+
+
    +
  1. +

    Open the LAM

    +
  2. +
  3. +

    Press the New User button

    +
    +
    +create account +
    +
    +
  4. +
  5. +

    Enter the required parameters

    +
    +
    +create account2 +
    +
    +
  6. +
  7. +

    Change to Unix tab and enter the required parameters

    +
    +
    +create account3 +
    +
    +
    +

    The user name will be used later in order to login. As this user will do some configuration changes, its primary group must be admins.

    +
    +
  8. +
  9. +

    Set a password for the user.

    +
    +
    +create account4 +
    +
    +
  10. +
  11. +

    Press the Save button

    +
    +
    +create account5 +
    +
    +
  12. +
+
+
+
+
+

Template

+
+
+

In order to execute this template, you need to add it into Jenkins manually. In order to do that, you can follow this guide

+
+
+
+
+

Parameters

+
+
+

The required parameters are:

+
+
+
    +
  • +

    svcaccount: The service account created as prerequisite. It must be added as a Jenkins credential.

    +
  • +
  • +

    installDeploymentPlugins: With this parameter you can install extra plugins into Jenkins. Also, you can add extra template utils.

    +
  • +
+
+
+
+
+

Execution

+
+
+
    +
  1. +

    Press the Build with Parameters button

    +
  2. +
  3. +

    Insert the parameters.

    +
  4. +
  5. +

    If the service account is not added as credential, please add a new entry.

    +
  6. +
  7. +

    Press the Build button.

    +
  8. +
  9. +

    Wait until the pipeline ends.

    +
  10. +
+
+
+ + + + + +
+ + +if any plugin is installed, Jenkins will be restarted and the pipeline will fail. You need to execute it again with the same parameters. +
+
+
+
+initialize instance +
+
+
+
+initialize instance2 +
+
+
+
+
+

The result

+
+
+
    +
  • +

    Install plugins stage

    +
    +

    In this stage the following plugins will be installed:

    +
    +
    + +
    +
  • +
  • +

    Configure SonarQube stage

    +
    +

    This stage is the responsible of configure the Jenkins-SonarQube integration. It will:

    +
    +
    +
      +
    • +

      Generate a SonarQube API token for the user Admin

      +
    • +
    • +

      Register the token in Jenkins as credential with the id sonar-token

      +
    • +
    • +

      Add the SonarQube server in Jenkins → Manage Jenkins → Configure System → SonarQube servers. The values used are:

      +
      +
        +
      • +

        Name: SonarQube

        +
      • +
      • +

        Server URL: http://sonarqube-core:9000/sonarqube (default Production Line SonarQube URL)

        +
      • +
      • +

        Server authentication token: sonar-token (generated in the previous step)

        +
      • +
      +
      +
    • +
    • +

      Add a webhook in SonarQube:

      +
      + +
      +
    • +
    • +

      Install the following SonarQube plugins:

      +
      +
        +
      • +

        java

        +
      • +
      • +

        javascript

        +
      • +
      • +

        typescript

        +
      • +
      • +

        csharp

        +
      • +
      • +

        web

        +
      • +
      • +

        cssfamily

        +
      • +
      • +

        jacoco

        +
      • +
      • +

        checkstyle

        +
      • +
      • +

        cobertura

        +
      • +
      • +

        smells

        +
      • +
      • +

        findbugs

        +
      • +
      • +

        scmgit

        +
      • +
      • +

        ansible

        +
      • +
      • +

        sonar-dependency-check-plugin

        +
      • +
      +
      +
    • +
    • +

      Restart the SonarQube server in order to enable the plugins installed.

      +
    • +
    +
    +
  • +
  • +

    Create UTIL templates stage

    +
    +

    Some templates needs that Jenkins has installed some plugins. If the plugins are not installed, the template will fail. In order to prevent this behaviour, we use the initialize-instance to install all plugins required in order templates. Then, we create another templates that will use the plugins installed by initialize-instance. In this stage we create some template utils to configure Jenkins after all required plugins are installed. Those templates are:

    +
    +
    + +
    +
  • +
  • +

    Configure Nexus 3 stage

    +
    +

    This stage will configure the Production Line Nexus3

    +
    +
    +
      +
    • +

      Enable anonymous access

      +
    • +
    • +

      Add a internal user to download/upload docker images

      +
      +
        +
      • +

        username: nexus-api

        +
      • +
      • +

        password: The same as the service account created in LAM

        +
      • +
      +
      +
    • +
    • +

      Create the maven repositories: maven-central, maven-snapshots, maven-release, maven-plugin

      +
    • +
    • +

      Create the docker repository

      +
    • +
    • +

      Create the npmjs repositories: npmjs, npm-registry, npm

      +
    • +
    • +

      Create in Jenkins a new credential with the id nexus-api with the username and password created in nexus3

      +
    • +
    +
    +
  • +
  • +

    Configure Maven File stage

    +
    +

    This stage adds the nexus3 credentials created in the previous stage to the maven global configuration file with the id pl-nexus

    +
    +
    +
    +maven config +
    +
    +
  • +
+
+
+

Now, you are able to execute other templates adding them manually or using the Production Line Market Place.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/install-sonar-plugin.html b/docs/production-line/1.0/install-sonar-plugin.html new file mode 100644 index 00000000..4eef35d5 --- /dev/null +++ b/docs/production-line/1.0/install-sonar-plugin.html @@ -0,0 +1,396 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Install SonarQube Plugin

+
+ +
+
+
+

Introduction

+
+
+

SonarQube can extends its behaviour by adding plugins. Some on them can be installed by using the SonarQube Marketplace, others can be installed by copying the .jar into the SonarQube plugins folder.

+
+
+
+
+

Overview

+
+
+

This template will help you to install SonarQube plugins by copying the .jar into the SonarQube plugins folder. As you do not have access to the Production Line volumes, it will help you when you want to install a plugin that is not installed in the SonarQube Marketplace.

+
+
+

It will:

+
+
+
    +
  • +

    Download the .jar file from a provided URL.

    +
  • +
  • +

    Copy the .jar file to the plugins folder.

    +
  • +
  • +

    Restart the SonarQube server in order to enable the plugin.

    +
  • +
+
+
+ + + + + +
+ + +this template only works in a Production Line instance. +
+
+
+
+
+

Template

+
+
+

This template will be automatically created in your jenkins after executing the Initialize_Instance template inside the UTILS folder with the name Install_SonarQube_Plugin.

+
+
+

For manual creation see: How to add a Template

+
+
+ + + + + +
+ + +This template needs the devonfw Production Line Shared Lib +
+
+
+
+
+

Parameters

+
+
+

The only parameter required is the plugin download URL.

+
+
+
+
+

Execution

+
+
+
    +
  1. +

    Press the Build with Parameters button

    +
  2. +
  3. +

    Insert plugin the download url. Example: https://github.com/dependency-check/dependency-check-sonar-plugin/releases/download/1.2.6/sonar-dependency-check-plugin-1.2.6.jar

    +
  4. +
  5. +

    Press the Build button.

    +
  6. +
  7. +

    Wait until the pipeline ends.

    +
  8. +
+
+
+
+build install sonar plugin +
+
+
+
+build install sonar plugin2 +
+
+
+

After the execution, when the SonarQube is restarted, you can check that your plugin is installed visiting the Marketplace.

+
+
+
+sonar plugin +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/master-production-line.html b/docs/production-line/1.0/master-production-line.html new file mode 100644 index 00000000..6234d60e --- /dev/null +++ b/docs/production-line/1.0/master-production-line.html @@ -0,0 +1,273 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Production Line Templates

+
+
+

production-line

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/mrchecker.html b/docs/production-line/1.0/mrchecker.html new file mode 100644 index 00000000..46c4eb76 --- /dev/null +++ b/docs/production-line/1.0/mrchecker.html @@ -0,0 +1,521 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

MrChecker under ProductionLine

+
+
+
+500 +
+
+
+
+
+

Introduction

+
+
+

MrChecker is end to end automation test framework written in Java. It has been released +by devonfw but it is not supported by the devonfw core team.

+
+
+

This framework consist of eight test modules:

+
+
+
    +
  • +

    Core test module

    +
  • +
  • +

    Selenium test module

    +
  • +
  • +

    WebAPI test module

    +
  • +
  • +

    Security test module

    +
  • +
  • +

    DataBase test module

    +
  • +
  • +

    Standalone test module

    +
  • +
  • +

    DevOps module

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

To be able to run Jenkins MrChecker job under ProductionLine you need to configure below settings in Jenkins and Gitlab

+
+
+
    +
  • +

    Jenkins

    +
    +
      +
    • +

      Add Jenkins Shared Library using documentation https://github.com/devonfw/production-line-shared-lib

      +
    • +
    • +

      Install required plugins:
      +HTTP Request Plugin
      +Allure Jenkins Plugin

      +
    • +
    • +

      In Jenkins Global Tool Configuration configure Allure Commandline and Maven like

      +
    • +
    +
    +
  • +
+
+
+
+500 +
+
+
+
+500 +
+
+
+
    +
  • +

    Gitlab

    +
    +
      +
    • +

      Generate User Private Token
      +Go to your Profile in Gitlab

      +
    • +
    +
    +
  • +
+
+
+
+500 +
+
+
+

Next click on the pen icon

+
+
+
+500 +
+
+
+

On the left menu choose Access Tokens and put token name and check fields like below

+
+
+
+600 +
+
+
+

Click "Create personal access token", you should receive notification about created token and token string. Copy the token string.

+
+
+
+600 +
+
+
+

The GitLab API user needs to have API access and the rights to create a new group. To set this permission follow the next steps:

+
+
+
    +
  • +

    Enter the Admin control panel

    +
  • +
  • +

    Select 'Users'

    +
  • +
  • +

    Select the user(s) in question and click 'Edit'

    +
  • +
  • +

    Scroll down to 'Access' and un-tick 'Can Create Group'

    +
  • +
+
+
+
+
+

How to insert the Template

+
+
+
    +
  • +

    Create new Jenkins Pipeline Job

    +
  • +
  • +

    In job configuration check "This project is parametrized", choose "String parameter and provide
    +Name: GITLAB_USER_PRIVATE_TOKEN
    +Default Value: <GITLAB_TOKEN_STRING_YOU_JUST_CREATED>

    +
  • +
  • +

    Add the template
    +The guide on how to add a template to your Jenkins can be found in the root directory of the template repository: https://github.com/devonfw/production-line.git

    +
  • +
  • +

    Save job configuration

    +
  • +
+
+
+
+
+

How to run the Template

+
+
+
    +
  • +

    Build the job

    +
  • +
  • +

    After job ends with success wait few seconds for repository import to Gitlab

    +
  • +
  • +

    As output of the build new Jenkins Pipline job is created with name "MrChecker_Example_Tests" also new repository "Mrchecker" will be created in Gitlab

    +
  • +
  • +

    Build "MrChecker_Example_Tests" job

    +
  • +
+
+
+
+500 +
+
+
+
+
+

Expected Result

+
+
+
    +
  • +

    As output of this job Allure Report will be generated

    +
  • +
+
+
+
+500 +
+
+
+
+
+

Summary

+
+
+

Using this documentation you should be able to run MrChercker test framework on ProductionLine.
+MrChecker offers two projects to your disposal:

+
+
+
    +
  • +

    First project "mrchecker-app-under-test/pipelines/CI/Jenkinsfile_ProductionLine.groovy" has all tests included in the project and is the default project used in "MrChecker_Example_Tests" job.

    +
  • +
  • +

    Second project "mrchecker-app-under-testboilerplate/pipelines/CI/Jenkinsfile_ProductionLine.groovy" here tests are not included, therefore if you choose to run "MrChecker_Example_Tests" job Allure report will be not generated.

    +
  • +
+
+
+

To change the project change script path at the bottom of the "MrChecker_Example_Tests" job.

+
+
+
+500 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/openshift-configuration.html b/docs/production-line/1.0/openshift-configuration.html new file mode 100644 index 00000000..0f739f9d --- /dev/null +++ b/docs/production-line/1.0/openshift-configuration.html @@ -0,0 +1,445 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Docker Configuration

+
+ +
+
+
+

Introduction

+
+
+

OpenShift is a docker container orchestrator built on top Kubernetes.

+
+
+
+
+

Overview

+
+
+

This template allow you to configure Jenkins in order to work with OpenShift.

+
+
+

It will:

+
+
+
    +
  • +

    Add OpenShift client as custom tool.

    +
  • +
  • +

    Configure an OpenShift cluster to work with.

    +
  • +
+
+
+
+
+

Prerequisites

+
+
+

In order to execute this template, you need the following plugins installed in your Jenkins:

+
+ +
+ + + + + +
+ + +The initialize instance template will install all plugins if you select Openshift or Docker+Openshift in the installDeploymentPlugins parameter +
+
+
+
+
+

Template

+
+
+

This template will be automatically created in your jenkins after executing the Initialize_Instance template inside the UTILS folder with the name Openshift_Configuration.

+
+
+

For manual creation see: How to add a Template

+
+
+ + + + + +
+ + +This template needs the devonfw Production Line Shared Lib +
+
+
+
+
+

Parameters

+
+
+

The required parameters are:

+
+
+
    +
  • +

    ocName: The name of the OpenShift connection. You can define multiple OpenShift connections by changing the name.

    +
  • +
  • +

    ocUrl: The OpenShift URL.

    +
  • +
  • +

    ocProject: The OpenShift Project.

    +
  • +
  • +

    ocToken: The OpenShift token. In order to have a long-term token, this token should be a service account token.

    +
  • +
+
+
+
+
+

Execution

+
+
+
    +
  1. +

    Press the Build with Parameters button

    +
  2. +
  3. +

    Insert the parameters.

    +
  4. +
  5. +

    If the OpenShift token is not added as credential, please add a new entry.

    +
  6. +
  7. +

    Press the Build button.

    +
  8. +
  9. +

    Wait until the pipeline ends.

    +
  10. +
+
+
+ + + + + +
+ + +If a cluster already exists with the provided name, it will not modify anything. +
+
+
+
+openshift configuration +
+
+
+
+openshift configuration2 +
+
+
+
+openshift configuration3 +
+
+
+

You can add more clusters by executing the template again or in Jenkins → Manage Jenkins → Configure System

+
+
+
+openshift clusters +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/production-line/1.0/troubleshoot.html b/docs/production-line/1.0/troubleshoot.html new file mode 100644 index 00000000..4dd951bd --- /dev/null +++ b/docs/production-line/1.0/troubleshoot.html @@ -0,0 +1,335 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Troubleshootibng

+
+ +
+
+
+

Introduction

+
+
+

In this section you can find the solution of the most common errors using the templates.

+
+
+
+
+

Template startup failed

+
+
+

Sometimes, when you execute any template you will see this an error like:

+
+
+
+
+

==

+
+
+

org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed: +/home/pl/jobs/devon4j-mts_PL_Template/builds/8/libs/ProductionLineTemplateLib/src/com/capgemini/productionline/configuration/JenkinsConfiguration.groovy: 38: unable to resolve class ru.yandex.qatools.allure.jenkins.tools.AllureCommandlineInstaller + @ line 38, column 1. + import ru.yandex.qatools.allure.jenkins.tools.AllureCommandlineInstaller +== ==

+
+
+

In most of our templates we use the Production Line Shared Lib. In order to work, the Shared Lib needs some plugins installed in your Jenkins, so to solve this error you need to install those plugins manually using the Manage Plugins.

+
+
+

In this specific case the problem is the Allure plugin is not installed. Just install it, restart Jenkins and execute again the template.

+
+
+
+
+

Build Now instead Build with Parameters

+
+
+

Sometimes, when you go to execute a template, mostly the first time, the Build Now button is available instead Build with Parameters button. The root cause of this problem is the parameters are defined in the Jenkinsfile and, as you never execute it before, Jenkins do not have those Jenkinsfile yet. For this reason it does not knows the parameters required.

+
+
+

To solve this problem, you only need to press the Build Now button. Then, the execution will start and fail. It’s not a problem as you do not enter any parameter. Now you only need to reload the page and the Build with Parameters button will be available.

+
+
+
+
+

Error at Install plugins stage

+
+
+

In some templates you can see the Install plugins stage. In this stage some plugins required for the template will be installed. In order to properly load the plugins, Jekins needs to be restarted, for that reason the pipeline fails on that stage. It is not a bug or problem, so do not worry about that. You only need to wait until Jenkins is restarted and execute the template again.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/TODO-dsf-provisioning-dsf4openshift.html b/docs/shop-floor/1.0/TODO-dsf-provisioning-dsf4openshift.html new file mode 100644 index 00000000..e2130cac --- /dev/null +++ b/docs/shop-floor/1.0/TODO-dsf-provisioning-dsf4openshift.html @@ -0,0 +1,278 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

TODO

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/_images/images/configuration/gitlab-new-prject-form.jpg b/docs/shop-floor/1.0/_images/images/configuration/gitlab-new-prject-form.jpg new file mode 100644 index 00000000..427a1e08 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/gitlab-new-prject-form.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/gitlab-new-prject.jpg b/docs/shop-floor/1.0/_images/images/configuration/gitlab-new-prject.jpg new file mode 100644 index 00000000..ff235b79 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/gitlab-new-prject.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-add.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-add.jpg new file mode 100644 index 00000000..5a4836cb Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-add.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-configuration.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-configuration.jpg new file mode 100644 index 00000000..a40c30ff Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-configuration.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-output-config.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-output-config.jpg new file mode 100644 index 00000000..dbe0b50e Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-output-config.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-output.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-output.jpg new file mode 100644 index 00000000..9ce1112b Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-build-monitor-view-output.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-config-file-management.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkins-config-file-management.jpg new file mode 100644 index 00000000..b881b616 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-config-file-management.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-config-fp.png b/docs/shop-floor/1.0/_images/images/configuration/jenkins-config-fp.png new file mode 100644 index 00000000..183527a6 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-config-fp.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-edit-configuration-file.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkins-edit-configuration-file.jpg new file mode 100644 index 00000000..54bce934 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-edit-configuration-file.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-first-admin-user.png b/docs/shop-floor/1.0/_images/images/configuration/jenkins-first-admin-user.png new file mode 100644 index 00000000..c53a6c4e Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-first-admin-user.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-global-maven.png b/docs/shop-floor/1.0/_images/images/configuration/jenkins-global-maven.png new file mode 100644 index 00000000..ba365448 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-global-maven.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-mave-tool-name.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkins-mave-tool-name.jpg new file mode 100644 index 00000000..5ab16f30 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-mave-tool-name.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-maven-settings.png b/docs/shop-floor/1.0/_images/images/configuration/jenkins-maven-settings.png new file mode 100644 index 00000000..fc38a7c5 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-maven-settings.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-new-view.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkins-new-view.jpg new file mode 100644 index 00000000..a8193a4c Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-new-view.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-node-tool-name.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkins-node-tool-name.jpg new file mode 100644 index 00000000..02c5f21f Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-node-tool-name.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-url.png b/docs/shop-floor/1.0/_images/images/configuration/jenkins-url.png new file mode 100644 index 00000000..d4dee095 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-url.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkins-yarn-tool-name.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkins-yarn-tool-name.jpg new file mode 100644 index 00000000..bcdc71f4 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkins-yarn-tool-name.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkinsfile-cicd-activity-diagram.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkinsfile-cicd-activity-diagram.jpg new file mode 100644 index 00000000..10315158 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkinsfile-cicd-activity-diagram.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/jenkinsfile-stages.jpg b/docs/shop-floor/1.0/_images/images/configuration/jenkinsfile-stages.jpg new file mode 100644 index 00000000..c1b51e30 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/jenkinsfile-stages.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/nexus-create-repository-form.png b/docs/shop-floor/1.0/_images/images/configuration/nexus-create-repository-form.png new file mode 100644 index 00000000..a3403eca Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/nexus-create-repository-form.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/nexus-create-repository.png b/docs/shop-floor/1.0/_images/images/configuration/nexus-create-repository.png new file mode 100644 index 00000000..7fafd300 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/nexus-create-repository.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/nexus-create-user-form.png b/docs/shop-floor/1.0/_images/images/configuration/nexus-create-user-form.png new file mode 100644 index 00000000..03d4b1ed Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/nexus-create-user-form.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/nexus-create-user.png b/docs/shop-floor/1.0/_images/images/configuration/nexus-create-user.png new file mode 100644 index 00000000..0c5750cb Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/nexus-create-user.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/nexus-jenkins-credentials-form.png b/docs/shop-floor/1.0/_images/images/configuration/nexus-jenkins-credentials-form.png new file mode 100644 index 00000000..865731f8 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/nexus-jenkins-credentials-form.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/nexus-jenkins-credentials.png b/docs/shop-floor/1.0/_images/images/configuration/nexus-jenkins-credentials.png new file mode 100644 index 00000000..2dc26974 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/nexus-jenkins-credentials.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/nexus-jenkins-global-maven-form.png b/docs/shop-floor/1.0/_images/images/configuration/nexus-jenkins-global-maven-form.png new file mode 100644 index 00000000..3b50093f Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/nexus-jenkins-global-maven-form.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/nexus-stored-artifacts.png b/docs/shop-floor/1.0/_images/images/configuration/nexus-stored-artifacts.png new file mode 100644 index 00000000..0d7bd156 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/nexus-stored-artifacts.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-actions.png b/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-actions.png new file mode 100644 index 00000000..0a6b9ac2 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-actions.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-menu.png b/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-menu.png new file mode 100644 index 00000000..585a4cde Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-menu.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-resource-limits.png b/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-resource-limits.png new file mode 100644 index 00000000..39709b6b Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-resource-limits.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-yaml-resources.png b/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-yaml-resources.png new file mode 100644 index 00000000..89123d38 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/openshift-deployments-yaml-resources.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/openshift-jenkins-configure-environments-repo.jpg b/docs/shop-floor/1.0/_images/images/configuration/openshift-jenkins-configure-environments-repo.jpg new file mode 100644 index 00000000..82a87080 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/openshift-jenkins-configure-environments-repo.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/openshift-jenkins-plugin-name.jpg b/docs/shop-floor/1.0/_images/images/configuration/openshift-jenkins-plugin-name.jpg new file mode 100644 index 00000000..6206d30f Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/openshift-jenkins-plugin-name.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/openshift-jenkins-plugin.png b/docs/shop-floor/1.0/_images/images/configuration/openshift-jenkins-plugin.png new file mode 100644 index 00000000..3c973820 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/openshift-jenkins-plugin.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/openshift-namespace-name.jpg b/docs/shop-floor/1.0/_images/images/configuration/openshift-namespace-name.jpg new file mode 100644 index 00000000..3e2925d1 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/openshift-namespace-name.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/openshift-secrets-menu.jpg b/docs/shop-floor/1.0/_images/images/configuration/openshift-secrets-menu.jpg new file mode 100644 index 00000000..d73675c2 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/openshift-secrets-menu.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/plugins-jenkins.png b/docs/shop-floor/1.0/_images/images/configuration/plugins-jenkins.png new file mode 100644 index 00000000..fa61a478 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/plugins-jenkins.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/sa-secret.png b/docs/shop-floor/1.0/_images/images/configuration/sa-secret.png new file mode 100644 index 00000000..8a4d915b Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/sa-secret.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/sa-secret2.png b/docs/shop-floor/1.0/_images/images/configuration/sa-secret2.png new file mode 100644 index 00000000..e8aeccef Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/sa-secret2.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/sonarqube-administration.png b/docs/shop-floor/1.0/_images/images/configuration/sonarqube-administration.png new file mode 100644 index 00000000..9f17519b Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/sonarqube-administration.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/sonarqube-jenkins-scanner.png b/docs/shop-floor/1.0/_images/images/configuration/sonarqube-jenkins-scanner.png new file mode 100644 index 00000000..510608fc Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/sonarqube-jenkins-scanner.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/sonarqube-jenkins-server.png b/docs/shop-floor/1.0/_images/images/configuration/sonarqube-jenkins-server.png new file mode 100644 index 00000000..ea657b26 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/sonarqube-jenkins-server.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/sonarqube-token.png b/docs/shop-floor/1.0/_images/images/configuration/sonarqube-token.png new file mode 100644 index 00000000..745dfde1 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/sonarqube-token.png differ diff --git a/docs/shop-floor/1.0/_images/images/configuration/sonarqube-webhook.png b/docs/shop-floor/1.0/_images/images/configuration/sonarqube-webhook.png new file mode 100644 index 00000000..6a895f77 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/configuration/sonarqube-webhook.png differ diff --git a/docs/shop-floor/1.0/_images/images/devonfw-shop-floor.jpg b/docs/shop-floor/1.0/_images/images/devonfw-shop-floor.jpg new file mode 100644 index 00000000..32c6fa52 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/devonfw-shop-floor.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/dsf4docker/docker.png b/docs/shop-floor/1.0/_images/images/dsf4docker/docker.png new file mode 100644 index 00000000..47f7523b Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/dsf4docker/docker.png differ diff --git a/docs/shop-floor/1.0/_images/images/dsf4docker/dsf-docker-arch.png b/docs/shop-floor/1.0/_images/images/dsf4docker/dsf-docker-arch.png new file mode 100644 index 00000000..111bf45a Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/dsf4docker/dsf-docker-arch.png differ diff --git a/docs/shop-floor/1.0/_images/images/dsf4openshift/primed.jpg b/docs/shop-floor/1.0/_images/images/dsf4openshift/primed.jpg new file mode 100644 index 00000000..6bd07bb3 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/dsf4openshift/primed.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/dsf4pl/pl.png b/docs/shop-floor/1.0/_images/images/dsf4pl/pl.png new file mode 100644 index 00000000..dce37c09 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/dsf4pl/pl.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/appservice.png b/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/appservice.png new file mode 100644 index 00000000..b1af7779 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/appservice.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/connection-string.png b/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/connection-string.png new file mode 100644 index 00000000..4b83c4d2 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/connection-string.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/create.png b/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/create.png new file mode 100644 index 00000000..d6a9243d Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/create.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/db.png b/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/db.png new file mode 100644 index 00000000..9f042dd7 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/db.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/resource-group.png b/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/resource-group.png new file mode 100644 index 00000000..8baded97 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-connection-strings/resource-group.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-pipelines/configuration.png b/docs/shop-floor/1.0/_images/images/others/azure-pipelines/configuration.png new file mode 100644 index 00000000..d5a9ff10 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-pipelines/configuration.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-pipelines/pipeline-settings.png b/docs/shop-floor/1.0/_images/images/others/azure-pipelines/pipeline-settings.png new file mode 100644 index 00000000..21d62273 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-pipelines/pipeline-settings.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/ServiceConnection.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/ServiceConnection.png new file mode 100644 index 00000000..c9c37409 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/ServiceConnection.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/extension.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/extension.png new file mode 100644 index 00000000..45b47515 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/extension.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/install.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/install.png new file mode 100644 index 00000000..531b02e4 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/install.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/marketplace.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/marketplace.png new file mode 100644 index 00000000..fa819c76 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/marketplace.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/share-unshare.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/share-unshare.png new file mode 100644 index 00000000..016e736c Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/share-unshare.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/wizard.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/wizard.png new file mode 100644 index 00000000..66e852ba Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/custom-plugin/wizard.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/install-sonar.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/install-sonar.png new file mode 100644 index 00000000..a1d13c22 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/install-sonar.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/publish.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/publish.png new file mode 100644 index 00000000..da9105b4 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/publish.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/runAnalysis.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/runAnalysis.png new file mode 100644 index 00000000..91de46b3 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/runAnalysis.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/sonarprepare.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/sonarprepare.png new file mode 100644 index 00000000..35f3f836 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/sonarprepare.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/vm-connection.png b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/vm-connection.png new file mode 100644 index 00000000..c1187206 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/azure-sonarqube/vm-connection.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/done.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/done.png new file mode 100644 index 00000000..a351e314 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/done.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step0.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step0.png new file mode 100644 index 00000000..b8bf5e57 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step0.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step1.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step1.png new file mode 100644 index 00000000..a8688ca9 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step1.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.1.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.1.png new file mode 100644 index 00000000..ce4ff1ef Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.1.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.2.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.2.png new file mode 100644 index 00000000..312f1f26 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.2.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.3.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.3.png new file mode 100644 index 00000000..bdfb006c Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.3.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.4.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.4.png new file mode 100644 index 00000000..fd0a81ff Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.4.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.5.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.5.png new file mode 100644 index 00000000..9fd4409f Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step2.5.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step3a.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step3a.png new file mode 100644 index 00000000..2f3da533 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step3a.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step3b.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step3b.png new file mode 100644 index 00000000..3bf16c81 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step3b.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.2.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.2.png new file mode 100644 index 00000000..8489f86e Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.2.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.3.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.3.png new file mode 100644 index 00000000..fa97577f Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.3.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.4.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.4.png new file mode 100644 index 00000000..ce1aaf5b Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.4.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.6.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.6.png new file mode 100644 index 00000000..d540cc37 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.6.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.7.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.7.png new file mode 100644 index 00000000..91711612 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/step4.7.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/done.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/done.png new file mode 100644 index 00000000..71251d2d Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/done.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc0.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc0.png new file mode 100644 index 00000000..d3390499 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc0.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc1.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc1.png new file mode 100644 index 00000000..2aaf268c Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc1.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc2.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc2.png new file mode 100644 index 00000000..a3fb991a Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc2.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc3.png b/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc3.png new file mode 100644 index 00000000..f8ec6ef1 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/bitbucket/xtraconfig/pvc3.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image1.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image1.png new file mode 100644 index 00000000..78a6931c Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image1.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image10.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image10.png new file mode 100644 index 00000000..b7b107b3 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image10.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image11.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image11.png new file mode 100644 index 00000000..7259b5b5 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image11.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image12.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image12.png new file mode 100644 index 00000000..bb713cb6 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image12.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image13.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image13.png new file mode 100644 index 00000000..72ec8d1a Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image13.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image14.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image14.png new file mode 100644 index 00000000..d5f7dc5f Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image14.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image2.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image2.png new file mode 100644 index 00000000..d2f459d6 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image2.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image3.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image3.png new file mode 100644 index 00000000..4ef5e0af Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image3.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image4.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image4.png new file mode 100644 index 00000000..affef4e2 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image4.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image5.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image5.png new file mode 100644 index 00000000..831e2dd6 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image5.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image6.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image6.png new file mode 100644 index 00000000..bd9501ce Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image6.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image7.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image7.png new file mode 100644 index 00000000..de9eccf6 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image7.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image8.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image8.png new file mode 100644 index 00000000..fe736e77 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image8.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/istio/media/image9.png b/docs/shop-floor/1.0/_images/images/others/istio/media/image9.png new file mode 100644 index 00000000..bbd20e1a Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/istio/media/image9.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generated.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generated.png new file mode 100644 index 00000000..62e77c6a Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generated.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generation.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generation.png new file mode 100644 index 00000000..b62e7972 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token-generation.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token.png new file mode 100644 index 00000000..7a913948 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-access-token.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-webhook.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-webhook.png new file mode 100644 index 00000000..2a062ea4 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/gitlab-webhook.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-complete.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-complete.png new file mode 100644 index 00000000..ff84d342 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-complete.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-kind.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-kind.png new file mode 100644 index 00000000..8592d1cc Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials-kind.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials.png new file mode 100644 index 00000000..01d864c2 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-api-token-credentials.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-connection.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-connection.png new file mode 100644 index 00000000..b5ed0122 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-connection.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-plugins.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-plugins.png new file mode 100644 index 00000000..fdc97cc1 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-gitlab-plugins.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-build-trigger.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-build-trigger.png new file mode 100644 index 00000000..c9639fdd Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-build-trigger.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-gitlab-connection.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-gitlab-connection.png new file mode 100644 index 00000000..73493ef5 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-gitlab-connection.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-result.png b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-result.png new file mode 100644 index 00000000..60ef3034 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-gitlab/jenkins-pipeline-result.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers-credentials.png b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers-credentials.png new file mode 100644 index 00000000..97b62441 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers-credentials.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers.png b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers.png new file mode 100644 index 00000000..a70f4507 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings-servers.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings.png b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings.png new file mode 100644 index 00000000..ef14002a Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-maven-settings.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-completed.png b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-completed.png new file mode 100644 index 00000000..916dd413 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-completed.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-filled.png b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-filled.png new file mode 100644 index 00000000..cf898fef Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials-filled.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials.png b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials.png new file mode 100644 index 00000000..fa518dcb Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-nexus/jenkins-new-nexus-credentials.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-slave/1.jpg b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/1.jpg new file mode 100644 index 00000000..72602804 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/1.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-slave/2.jpg b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/2.jpg new file mode 100644 index 00000000..498b915d Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/2.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-slave/3.jpg b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/3.jpg new file mode 100644 index 00000000..d4c54a0c Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/3.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-slave/4.jpg b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/4.jpg new file mode 100644 index 00000000..6c1b79b3 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/4.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-slave/7.jpg b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/7.jpg new file mode 100644 index 00000000..d68e936d Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/7.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-slave/root_directory.jpg b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/root_directory.jpg new file mode 100644 index 00000000..12a1b455 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-slave/root_directory.jpg differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-feedback.png b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-feedback.png new file mode 100644 index 00000000..ad55e886 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-feedback.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-plugin.png b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-plugin.png new file mode 100644 index 00000000..e49da617 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-plugin.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-scanner.png b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-scanner.png new file mode 100644 index 00000000..6796d3be Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-scanner.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-server-setup.png b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-server-setup.png new file mode 100644 index 00000000..025e1bc8 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/jenkins-sonarqube-server-setup.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-login.png b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-login.png new file mode 100644 index 00000000..d218d386 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-login.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-project-result.png b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-project-result.png new file mode 100644 index 00000000..17c7fbe1 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-project-result.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-done.png b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-done.png new file mode 100644 index 00000000..567b9214 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-done.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-generation.png b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-generation.png new file mode 100644 index 00000000..c8acbdba Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-generation.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-name.png b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-name.png new file mode 100644 index 00000000..072a257d Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/jenkins-sonarqube/sonarqube-token-name.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/hub/done1.png b/docs/shop-floor/1.0/_images/images/others/selenium/hub/done1.png new file mode 100644 index 00000000..2d58140d Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/hub/done1.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/hub/done2.png b/docs/shop-floor/1.0/_images/images/others/selenium/hub/done2.png new file mode 100644 index 00000000..c01f4420 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/hub/done2.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/hub/step1.png b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step1.png new file mode 100644 index 00000000..2c16ec97 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step1.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/hub/step2.png b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step2.png new file mode 100644 index 00000000..d0cd2a19 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step2.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/hub/step3a.png b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step3a.png new file mode 100644 index 00000000..240da9a8 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step3a.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/hub/step3b.png b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step3b.png new file mode 100644 index 00000000..e839aa41 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step3b.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/hub/step4.png b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step4.png new file mode 100644 index 00000000..10165a0b Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step4.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/hub/step5.png b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step5.png new file mode 100644 index 00000000..b6fea9be Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/hub/step5.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/node/done1.png b/docs/shop-floor/1.0/_images/images/others/selenium/node/done1.png new file mode 100644 index 00000000..c6c90866 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/node/done1.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/node/done2.png b/docs/shop-floor/1.0/_images/images/others/selenium/node/done2.png new file mode 100644 index 00000000..1ec7f6ab Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/node/done2.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/node/done3.png b/docs/shop-floor/1.0/_images/images/others/selenium/node/done3.png new file mode 100644 index 00000000..99705780 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/node/done3.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/node/step1.png b/docs/shop-floor/1.0/_images/images/others/selenium/node/step1.png new file mode 100644 index 00000000..2c16ec97 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/node/step1.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/node/step2.png b/docs/shop-floor/1.0/_images/images/others/selenium/node/step2.png new file mode 100644 index 00000000..9cd019f7 Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/node/step2.png differ diff --git a/docs/shop-floor/1.0/_images/images/others/selenium/node/step3.png b/docs/shop-floor/1.0/_images/images/others/selenium/node/step3.png new file mode 100644 index 00000000..9481e1ac Binary files /dev/null and b/docs/shop-floor/1.0/_images/images/others/selenium/node/step3.png differ diff --git a/docs/shop-floor/1.0/devonfw-shop-floor-doc.html b/docs/shop-floor/1.0/devonfw-shop-floor-doc.html new file mode 100644 index 00000000..af380459 --- /dev/null +++ b/docs/shop-floor/1.0/devonfw-shop-floor-doc.html @@ -0,0 +1,3284 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw shop floor ${project.version}

+
+
+

The devonfw community +${project.version}, ${buildtime}: Subtitle {doctitle} +:description: comprehensive documentation for the devonfw shop floor. +:sectnums: +:toc: +:toc-title: Table of Contents +:toclevels: 3 +:imagesdir: ./ +:footnote: test footnote +:productname: test productname

+
+
+

Unresolved include directive in modules/ROOT/pages/devonfw-shop-floor-doc.adoc - include::Home.adoc[]

+
+
+

How to use it

+
+

This is the documentation about shop floor and its different tools. Here you are going to learn how to create new projects, so that they can include continuous integration and continuous delivery processes, and be deployed automatically in different environments.

+
+
+
+

Prerequisites - Provisioning environment

+
+

To start working you need to have some services running in your provisioning environment, such as Jenkins (automation server), GitLab (git repository), SonarQube (program analysis), Nexus (software repository) or similar.

+
+
+

To host those services we recommend to have a Production Line instance but you can use other platforms. Here is the list for the different options:

+
+
+ +
+
+
+

Step 1 - Configuration and services integration

+
+

The first step is configuring your services and integrate them with jenkins. Here you have an example about how to manually configure the next services:

+
+
+ +
+
+
+

Step 2 - Create the project

+ +
+
+

Create and integrate git repository

+
+

The second is create or git repository and integrate it with Jenkins.

+
+
+

Here you can find a manual guide about how it:

+
+
+ +
+
+
+

Start new devonfw project

+
+

It is time to create your devonfw project:

+
+
+

You can find all that you need about how to create a new devonfw project

+
+
+
+

cicd configuration

+
+

Now you need to add cicd files in your project.

+
+
+
+

== Manual configuration

+ +
+
+

== Jenkinsfile

+
+

Here you can find all that you need to know to do your Jenkinsfile.

+
+
+
+

== Dockerfile

+
+

Here you can find all that you need to know to do your Dockerfile.

+
+
+
+

== Automatic configuration

+ +
+
+

== cicdgen

+
+

If you are using production line for provisioning you could use cicdgen to configure automatically almost everything explained in the manual configuration. To do it see the cicdgen documentation.

+
+
+
+

Step 3 - Deployment

+
+

The third is configure our deployment environment. Here is the list for the different options:

+
+
+ +
+
+
+

Step 4 - Monitoring

+
+

Here you can find information about tools for monitoring:

+
+
+
    +
  • +

    build monitor view for Jenkins. With this tool you will be able to see in real time what is the state of your Jenkins pipelines.

    +
  • +
+
+
+
+
+
+

Provisioning environments

+
+ +
+

Production Line provisioning environment

+
+

pl

+
+
+

The Production Line Project is a set of server-side collaboration tools for Capgemini engagements. It has been developed for supporting project engagements with individual tools like issue tracking, continuous integration, continuous deployment, documentation, binary storage and much more!

+
+
+

For additional information use the official documentation.

+
+
+
+

How to obtain your Production Line

+
+

You can order your Production Line environment instance following the official guide. Remember that you need to order at least the next tools: + * Jenkins + * GitLab + * SonarQube + * Nexus

+
+
+
+

Back.

+
+
+
+

dsf4docker provisioning environment

+
+
+docker +
+
+
+
+

Architecture overview

+
+

dsf docker arch

+
+
+
+

Prerequisite

+
+

To use dsf4docker provisioning environment you need a remote server and you must clone or download devonfw shop floor.

+
+
+
+

How to use it

+
+

Navigate to ./devonfw-shop-floor/dsf4docker/environment and here you can find one scripts to install it, and another one to uninstall it.

+
+
+
+

Install devonfw shop floor 4 Docker

+
+

There is an installation script to do so, so the complete installation should be completed by running it. Make sure this script has execution permissions in the Docker Host:

+
+
+
+
 chmod +x dsf4docker-install.sh
+ sudo ./dsf4docker-install.sh
+
+
+
+

This script, besides the container "installation" itself, will also adapt the docker-compose.yml file to your host (using sed to replace the IP_ADDRESS word of the file for your real Docker Host’s IP address).

+
+
+
+

Uninstall devonfw shop floor 4 Docker

+
+

As well as for the installation, if we want to remove everything concerning devonfw shop floor 4 Docker from our Docker Host, we’ll run this script:

+
+
+
+
 chmod +x dsf4docker-uninstall.sh
+ sudo ./dsf4docker-uninstall.sh
+
+
+
+
+

Troubleshooting

+
+

When trying to execute the install or uninstall .sh there may be some problems related to the windows/linux format file, so if you see this error log while executing the script:

+
+
+
+
./dsf4docker-install.sh: line 16: $'\r': command not found
+
+
+
+

You need to do a file conversion with this command:

+
+
+
+
dos2unix dsf4docker-install.sh
+
+
+
+

or

+
+
+
+
dos2unix dsf4docker-uninstall.sh
+
+
+
+
+

A little history

+
+

The Docker part of the shop floor is created based on the experience of the environment setup of the project Mirabaud Advisory, and intended to be updated to latest versions. Mirabaud Advisory is a web service developed with devonfw (Java) that, alongside its own implementation, it needed an environment both for the team to follow CICD rules through their 1-week-long sprints and for the client (Mirabaud) to check the already done work.

+
+
+

There is a practical experience about the Mirabaud Case.

+
+
+
+

Back.

+
+
+
+
+
+

Configuration and services integration

+
+
+

Nexus Configuration

+
+

In this document you will see how you can configure Nexus repository and how to integrate it with jenkins.

+
+
+
+

Login in Nexus

+
+

The first time you enter in Nexus you need to log in with the user 'admin' and the password that is inside the path: /volumes/nexus/nexus-data +Then you can change that password and create a new one.

+
+
+
+

Prerequisites

+ +
+
+

Repositories

+
+

You need to have one repository for snapshots, another for releases and another one for release-candidates. Normally you use maven2 (hosted) repositories and if you are going to use a docker registry, you need docker (hosted) too.

+
+
+

To create a repository in Nexus go to the administration clicking on the gear icon at top menu bar. Then on the left menu click on Repositories and press the Create repository button.

+
+
+
+nexus create repository +
+
+
+

Now you must choose the type of the repository and configure it. This is an example for Snapshot:

+
+
+
+nexus create repository form +
+
+
+
+

Create user to upload/download content

+
+

Once you have the repositories, you need a user to upload/download content. To do it go to the administration clicking on the gear icon at top menu bar. Then on the left menu click on Users and press the Create local user button.

+
+
+
+nexus create user +
+
+
+

Now you need to fill a form like this:

+
+
+
+nexus create user form +
+
+
+
+

Jenkins integration

+
+

To use Nexus in our pipelines you need to configure Jenkins.

+
+
+
+

Customize jenkins

+
+

The first time you enter jenkins, you are asked fot the pluggins to be installed. +We select install suggested plugins and later we can add the plugins that we need depending on the project necessities.

+
+
+
+plugins jenkins +
+
+
+

Then we need to create our first admin user, we can do it like this:

+
+
+
+jenkins first admin user +
+
+
+

The next step is the jenkins URL:

+
+
+
+jenkins url +
+
+
+

Your jenkins setup is ready!

+
+
+
+

Add nexus user credentials

+
+

First of all you need to add the user created in the step before to Jenkins. To do it (on the left menu) click on Credentials, then on System. Now you could access to Global credentials (unrestricted).

+
+
+
+nexus jenkins credentials +
+
+
+

Enter on it and you could see a button on the left to Add credentials. Click on it and fill a form like this:

+
+
+
+nexus jenkins credentials form +
+
+
+
+

Add the nexus user to maven global settings

+
+

In order to do this, you will need the Config File Provider plugin so we need to download it.Go to Jenkins→Manage jenkins→Manage plugins and "available" tab and search for it:

+
+
+
+jenkins config fp +
+
+
+

Click on "Download now and install after restart".

+
+
+

Now you need to go to Manage Jenkins clicking on left menu and enter in Managed files.

+
+
+

Click on Add a new config/Global Maven settings.xml, change the id for a new one more readable:

+
+
+
+jenkins maven settings +
+
+
+

Then click on "Submit"

+
+
+
+jenkins global maven +
+
+
+

Edit the Global Maven settings.xml to add your nexus repositories credentials(the ones you created before) as you could see in the next image:

+
+
+
+nexus jenkins global maven form +
+
+
+

And you are done.

+
+
+
+

SonarQube Configuration

+
+

To use SonarQube you need to use a token to connect, and to know the results of the analysis you need a webhook. Also, you need to install and configure SonarQube in Jenkins.

+
+
+
+

Generate user token

+
+

To generate the user token, go to your account clicking in the left icon on the top menu bar.

+
+
+ + + + + +
+ + +If you don’t have any account, you can use the admin/admin user/pass +
+
+
+
+sonarqube administration +
+
+
+

Go to security tab and generate the token.

+
+
+
+sonarqube token +
+
+
+
+

Webhook

+
+

When you execute our SonarQube scanner in our pipeline job, you need to ask SonarQube if the quality gate has been passed. To do it you need to create a webhook.

+
+
+

Go to administration clicking the option on the top bar menu and select the tab for Configuration.

+
+
+

Then search in the left menu to go to webhook section and create your webhook.

+
+
+

An example for Production Line:

+
+
+
+sonarqube webhook +
+
+
+
+

Jenkins integration

+
+

To use SonarQube in our pipelines you need to configure Jenkins to integrate SonarQube.

+
+
+
+

SonarQube Scanner

+
+

First, you need to configure the scanner. Go to Manage Jenkins clicking on left menu and enter in Global Tool Configuration.

+
+
+

Go to SonarQube Scanner section and add a new SonarQube scanner like this.

+
+
+
+sonarqube jenkins scanner +
+
+
+
+

SonarQube Server

+
+

Now you need to configure where is our SonarQube server using the user token that you create before. Go to Manage Jenkins clicking on left menu and enter in Configure System.

+
+
+

For example, in Production Line the server is the next:

+
+
+
+sonarqube jenkins server +
+
+
+ + + + + +
+ + +Remember, the token was created at the beginning of this SonarQube configuration. +
+
+
+
+

SonarQube configuration

+
+

Now is time to configure your sonar in order to measure the quality of your code. To do it, please follow the official documentation about our plugins and Quality Gates and Profiles here.

+
+
+
+

How to ignore files

+
+

Usually the developers need to ignore some files from Sonar analysis. To do that, they must add the next line as a parameter of the sonar execution to their Jenkinsfile in the SonarQube code analysis step.

+
+
+
+
-Dsonar.exclusions='**/*.spec.ts, **/*.model.ts, **/*mock.ts'
+
+
+
+
+
+
+

Create project

+
+ +
+
+
+

Create and integrate git repository

+
+
+

include::dsf-configure-gitlab.adoc[leveloffset=2].

+
+
+
+
+

start new devonfw project

+
+
+

It is time to create your devonfw project:

+
+
+ +
+
+
+
+

cicd configuration

+
+ +
+
+
+

== Manual configuration

+
+ +
+
+
+

== Jenkinsfile

+
+
+

include::dsf-configure-jenkins.adoc[leveloffset=2].

+
+
+
+
+

Deployment

+
+
+

include::dsf-deployment-dsf4openshift.adoc[leveloffset=2].

+
+
+
+
+

Annexes

+
+ +
+
+
+

Custom Services

+
+ +
+
+
+

== BitBucket

+
+
+

[Under construction]

+
+
+
+

The purpose of the present document is to provide the basic steps carried out to setup a BitBucket server in OpenShift.

+
+
+
Introduction
+
+

BitBucket is the Atlassian tool that extends the Git functionality, by adding integration with JIRA, Confluence, or Trello, as well as incorporates extra features for security or management of user accounts (See BitBucket).

+
+
+

BitBucket server is the Atlassian tool that runs the BitBucket services (See BitBucket server).

+
+
+

The followed approach has been not using command line, but OpenShift Web Console, by deploying the Docker image atlassian/bitbucket-server (available in Docker Hub) in the existing project Deployment.

+
+
+

The procedure below exposed consists basically in three main steps:

+
+
+
    +
  1. +

    Deploy the BitBucket server image (from OpenShift web console)

    +
  2. +
  3. +

    Add a route for the external traffic (from OpenShift web console)

    +
  4. +
  5. +

    Configure the BitBucket server (from BitBucket server web console)

    +
  6. +
+
+
+
+
Prerequisites
+
+
    +
  • +

    OpenShift up & running

    +
  • +
  • +

    Atlassian account (with personal account key). Not required for OpenShift, but for the initial BitBucket server configuration.

    +
  • +
+
+
+
+
Procedure
+ +
+
+
Step 0: Log into our OpenShift Web console
+
+
+step0 +
+
+
+
+
Step 1: Get into Development project
+
+
+] +
+
+
+
+
Step 2.1: Deploy a new image to the project
+
+
+step2.1 +
+
+
+
+ +
+

Image name: atlassian/bitbucket-server

+
+
+
+step2.2 +
+
+
+
+
Step 2.3: Leave by the moment the default config. since it is enough for the basic setup. Press Create
+
+
+step2.3 +
+
+
+
+
Step 2.4: Copy the oc commands in case it is required to work via command line, and Go to overview
+
+
+step2.4 +
+
+
+
+
Step 2.5: Wait until OpenShift deploys and starts up the image. All the info will be available.
+
+

Please notice that there are no pre-configured routes, hence the application is not accessible from outside the cluster.

+
+
+
+step2.5 +
+
+
+
+
Step 3: Create a route in order for the application to be accessible from outside the cluster (external traffic). Press Create
+
+

Please notice that there are different fields that can be specified (hostname, port). If required, the value of those fields can be modified later.

+
+
+
+step3a +
+
+
+

Leave by the moment the default config. as it is enough for the basic setup.

+
+
+

The route for external traffic is now available.

+
+
+
+step3b +
+
+
+
+

Now the BitBucker server container is up & running in our cluster.

+
+
+

The below steps correspond to the basic configuration of our BitBucket server.

+
+
+
+
+ + +
+
+
Step 4.2: Leave by the moment the Internal database since it is enough for the basic setup (and it can be modified later), and click Next
+
+
+step4.2 +
+
+
+
+
Step 4.3: Select the evaluation license, and click I have an account
+
+
+step4.3 +
+
+
+
+
Step 4.4: Select the option Bitbucker (Server)
+
+
+step4.4 +
+
+
+
+
Step 4.5: Introduce your organization (Capgemini), and click Generate License
+ +
+
+
Step 4.6: Confirm that you want to install the license on the BitBucket server
+
+
+step4.6 +
+
+
+

The license key will be automatically generated. Click Next

+
+
+
+
Step 4.7: Introduce the details of the Administration account.
+
+

Since our BitBucket server is not going to be integrated with JIRA, click on Go to Bitbucket. The integration with JIRA can be configured later.

+
+
+
+step4.7 +
+
+
+
+
Step 4.8: Log in with the admin account that has been just created
+ +
+
+
DONE !!
+
+
+done +
+
+
+

[Under construction]

+
+
+
+

The purpose of the present document is to provide the basic steps carried out to improve the configuration of BitBucket server in OpenShift.

+
+
+

The improved configuration consists on:

+
+
+
    +
  • +

    Persistent Volume Claims

    +
  • +
  • +

    Health Checks (pending to be completed)

    +
  • +
+
+
+
+
Persistent Volume Claims.
+
+

Please notice that the BitBucket server container does not use persistent volume claims by default, which means that the data (e.g.: BitBucket server config.) will be lost from one deployment to another.

+
+
+
+pvc0 +
+
+
+

It is very important to create a persistent volume claim in order to prevent the mentioned loss of data.

+
+
+
+
Step 1: Add storage
+
+
+pvc1 +
+
+
+
+
Step 2: Select the appropriate storage, or create it from scratch if necessary
+
+
+pvc2 +
+
+
+
+
Step 3: Introduce the required information
+
+
    +
  • +

    Path as it is specified in the BitBucket server Docker image (/var/atlassian/application-data/bitbucket)

    +
  • +
  • +

    Volume name with a unique name to clearly identify the volume

    +
  • +
+
+
+
+pvc3 +
+
+
+
+
The change will be inmediately applied
+
+
+done +
+
+
+
+

Mirabaud CICD Environment Setup

+
+

Initial requirements:

+
+
+
    +
  • +

    OS: RHEL 6.5

    +
  • +
+
+
+

Remote setup in CI machine (located in the Netherlands)

+
+
+
+
    - Jenkins
+    - Nexus
+    - GitLab
+    - Mattermost
+    - Atlassian Crucible
+    - SonarQube
+
+
+
+
+

1. Install Docker and Docker Compose in RHEL 6.5

+ +
+
+

Docker

+
+

Due to that OS version, the only way to have Docker running in the CI machine is by installing it from the EPEL repository (Extra Packages for Enterprise Linux).

+
+
+
    +
  1. +

    Add EPEL

    +
  2. +
+
+
+
+
##rpm -iUvh http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm
+
+
+
+
    +
  1. +

    Install docker.io from that repository

    +
  2. +
+
+
+
+
##yum -y install docker-io
+
+
+
+
    +
  1. +

    Start Docker daemon

    +
  2. +
+
+
+
+
##service docker start
+
+
+
+
    +
  1. +

    Check the installation

    +
  2. +
+
+
+
+
##docker -v
+Docker version 1.7.1, build 786b29d/1.7.1
+
+
+
+
+

Docker Compose

+
+

Download and install it via curl. It will use this site.

+
+
+
+
##curl -L https://github.com/docker/compose/releases/download/1.5.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
+
+##chmod +x /usr/local/bin/docker-compose
+
+
+
+

Add it to your sudo path:

+
+
+
    +
  1. +

    Find out where it is:

    +
  2. +
+
+
+
+
##echo $PATH
+
+
+
+
    +
  1. +

    Copy the docker-compose file from /usr/local/bin/ to your sudo PATH.

    +
  2. +
+
+
+
+
##docker-compose -v
+docker-compose version 1.5.2, build 7240ff3
+
+
+
+
+

2. Directories structure

+
+

Several directories had been added to organize some files related to docker (like docker-compose.yml) and docker volumes for each service. Here’s how it looks:

+
+
+
+
/home
+    /[username]
+        /jenkins
+            /volumes
+                /jenkins_home
+        /sonarqube
+            /volumes
+                /conf
+                /data
+                /extensions
+                /lib
+                    /bundled-plugins
+        /nexus
+            /volumes
+                /nexus-data
+        /crucible
+            /volumes
+                /
+        /gitlab
+            docker-compose.yml
+            /volumes
+                /etc
+                    /gitlab
+                /var
+                    /log
+                    /opt
+        /mattermost
+            docker-compose.yml
+            /volumes
+                /db
+                    /var
+                        /lib
+                            /postgresql
+                                /data
+                /app
+                    /mattermost
+                        /config
+                        /data
+                        /logs
+                /web
+                    /cert
+
+
+
+
+

3. CICD Services with Docker

+
+

Some naming conventions had been followed as naming containers as mirabaud_[service].

+
+
+

Several folders have been created to store each service’s volumes, docker-compose.yml(s), extra configuration settings and so on:

+
+
+
+

Jenkins

+ +
+
+

== Command

+
+
+
##docker run -d -p 8080:8080 -p 50000:50000 --name=mirabaud_jenkins \
+    -v /home/[username]/jenkins/volumes/jenkins_home:/var/jenkins_home \
+    jenkins
+
+
+
+
+

== Generate keystore

+
+
+
keytool -importkeystore -srckeystore server.p12 -srcstoretype pkcs12 -srcalias 1 -destkeystore newserver.jks -deststoretype jks -destalias server
+
+
+
+
+

== Start jekins with SSL (TODO: make a docker-compose.yml for this):

+
+
+
sudo docker run -d --name mirabaud_jenkins -v /jenkins:/var/jenkins_home -p 8080:8443 jenkins --httpPort=-1 --httpsPort=8443 --httpsKeyStore=/var/jenkins_home/certs/keystore.jks --httpsKeyStorePassword=Mirabaud2017
+
+
+
+
+

== Volumes

+
+
+
volumes/jenkins_home:/var/jenkins_home
+
+
+
+
+

SonarQube

+ +
+
+

== Command

+
+
+
##docker run -d -p 9000:9000 -p 9092:9092 --name=mirabaud_sonarqube \
+    -v /home/[username]/sonarqube/volumes/conf:/opt/sonarqube/conf \
+    -v /home/[username]/sonarqube/volumes/data:/opt/sonarqube/data \
+    -v /home/[username]/sonarqube/volumes/extensions:/opt/sonarqube/extensions \
+    -v /home/[username]/sonarqube/volumes/lib/bundled-plugins:/opt/sonarqube//lib/bundled-plugins \
+    sonarqube
+
+
+
+
+

== Volumes

+
+
+
volumes/conf:/opt/sonarqube/conf
+volumes/data:/opt/sonarqube/data
+volumes/extensions:/opt/sonarqube/extensions
+volumes/lib/bundled-plugins:/opt/sonarqube/lib/bundled-plugins
+
+
+
+
+

Nexus

+ +
+
+

== Command

+
+
+
##docker run -d -p 8081:8081 --name=mirabaud_nexus\
+    -v /home/[username]/nexus/nexus-data:/sonatype-work
+    sonatype/nexus
+
+
+
+
+

== Volumes

+
+
+
volumes/nexus-data/:/sonatype-work
+
+
+
+
+

Atlassian Crucible

+ +
+
+

== Command

+
+
+
##docker run -d -p 8084:8080 --name=mirabaud_crucible \
+    -v /home/[username]/crucible/volumes/data:/atlassian/data/crucible
+    mswinarski/atlassian-crucible:latest
+
+
+
+
+

== Volumes

+
+
+
volumes/data:/atlassian/data/crucible
+
+
+
+
+

4. CICD Services with Docker Compose

+
+

Both Services had been deploying by using the # docker-compose up -d command from their root directories (/gitlab and /mattermost). The syntax of the two docker-compose.yml files is the one corresponding with the 1st version (due to the docker-compose v1.5).

+
+
+
+

GitLab

+ +
+
+

== docker-compose.yml

+
+
+
mirabaud:
+    image: 'gitlab/gitlab-ce:latest'
+    restart: always
+    ports:
+            - '8888:80'
+    volumes:
+            - '/home/[username]/gitlab/volumes/etc/gilab:/etc/gitlab'
+            - '/home/[username]/gitlab/volumes/var/log:/var/log/gitlab'
+            - '/home/[username]/gitlab/volumes/var/opt:/var/opt/gitlab'
+
+
+
+
+

== Command (docker)

+
+
+
docker run -d -p 8888:80 --name=mirabaud_gitlab \
+    -v /home/[username]/gitlab/volumes/etc/gitlab/:/etc/gitlab \
+    -v /home/[username]/gitlab/volumes/var/log:/var/log/gitlab \
+    -v /home/[username]/gitlab/volumes/var/opt:/var/opt/gitlab \
+    gitlab/gitlab-ce
+
+
+
+
+

== Volumes

+
+
+
volumes/etc/gitlab:/etc/gitlab
+volumes/var/opt:/var/log/gitlab
+volumes/var/log:/var/log/gitlab
+
+
+
+
+

Mattermost

+ +
+
+

== docker-compose.yml:

+
+
+
db:
+  image: mattermost/mattermost-prod-db
+  restart: unless-stopped
+  volumes:
+    - ./volumes/db/var/lib/postgresql/data:/var/lib/postgresql/data
+    - /etc/localtime:/etc/localtime:ro
+  environment:
+    - POSTGRES_USER=mmuser
+    - POSTGRES_PASSWORD=mmuser_password
+    - POSTGRES_DB=mattermost
+
+app:
+  image: mattermost/mattermost-prod-app
+  links:
+    - db:db
+  restart: unless-stopped
+  volumes:
+    - ./volumes/app/mattermost/config:/mattermost/config:rw
+    - ./volumes/app/mattermost/data:/mattermost/data:rw
+    - ./volumes/app/mattermost/logs:/mattermost/logs:rw
+    - /etc/localtime:/etc/localtime:ro
+  environment:
+    - MM_USERNAME=mmuser
+    - MM_PASSWORD=mmuser_password
+    - MM_DBNAME=mattermost
+
+web:
+  image: mattermost/mattermost-prod-web
+  ports:
+    - "8088:80"
+    - "8089:443"
+  links:
+    - app:app
+  restart: unless-stopped
+  volumes:
+    - ./volumes/web/cert:/cert:ro
+    - /etc/localtime:/etc/localtime:ro
+
+
+
+
+

== SSL Certificate

+
+

How to generate the certificates:

+
+
+

Get the crt and key from CA or generate a new one self-signed. Then:

+
+
+
+
// 1. create the p12 keystore
+##openssl pkcs12 -export -in cert.crt -inkey mycert.key -out certkeystore.p12
+
+// 2. export the pem certificate with password
+##openssl pkcs12 -in certkeystore.p12 -out cert.pem
+
+// 3. export the pem certificate without password
+##openssl rsa -in cert.pem -out key-no-password.pem
+
+
+
+

SSL:

+
+
+

Copy the cert and the key without password at:

+
+
+

./volumes/web/cert/cert.pem

+
+
+

and

+
+
+

./volumes/web/cert/key-no-password.pem

+
+
+

Restart the server and the SSL should be enabled at port 8089 using HTTPS.

+
+
+
+

== Volumes

+
+
+
-- db --
+volumes/db/var/lib/postgresql/data:/var/lib/postgresql/data
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+-- app --
+volumes/app/mattermost/config:/mattermost/config:rw
+volumes/app/mattermost/data:/mattermost/data:rw
+volumes/app/mattermost/logs:/mattermost/logs:rw
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+-- web --
+volumes/web/cert:/cert:ro
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+
+
+
+

5. Service Integration

+
+

All integrations had been done following CICD Services Integration guides:

+
+ +
+ + + + + +
+ + +These guides may be obsolete. You can find here the official configuration guides, +
+
+
+
Jenkins - GitLab integration
+
+

The first step to have a Continuous Integration system for your development is to make sure that all your changes to your team’s remote repository are evaluated by the time they are pushed. That usually implies the usage of so-called webhooks. You’ll find a fancy explanation about what Webhooks are in here.

+
+
+

To resume what we’re doing here, we are going to prepare our Jenkins and our GitLab so when a developer pushes some changes to the GitLab repository, a pipeline in Jenkins gets triggered. Just like that, in an automatic way.

+
+
+
+
1. Jenkins GitLab plugin
+
+

As it usually happens, some Jenkins plug-in(s) must be installed. In this case, let’s install those related with GitLab:

+
+
+
+jenkins gitlab plugins +
+
+
+
+
2. GitLab API Token
+
+

To communicate with GitLab from Jenkins, we will need to create an authentication token from your GitLab user settings. A good practice for this would be to create it from a machine user. Something like (i.e.) devonfw-ci/******.

+
+
+
+gitlab access token +
+
+
+

Simply by adding a name to it and a date for it expire is enough:

+
+
+
+gitlab access token generation +
+
+
+
+gitlab access token generated +
+
+
+

As GitLab said, you should make sure you don’t lose your token. Otherwise you would need to create a new one.

+
+
+

This will allow Jenkins to connect with right permissions to our GitLab server.

+
+
+
+
3. Create "GitLab API" Token credentials
+
+

Those credentials will use that token already generated in GitLab to connect once we declare the GitLab server in the Global Jenkins configuration. Obviously, those credentials must be GitLab API token-like.

+
+
+
+jenkins gitlab api token credentials kind +
+
+
+

Then, we add the generated token in the API token field:

+
+
+
+jenkins gitlab api token credentials complete +
+
+
+

Look in your Global credentials if they had been correctly created:

+
+
+
+jenkins gitlab api token credentials +
+
+
+
+
4. Create GitLab connection in Jenkins
+
+

Specify a GitLab connection in your Jenkins’s Manage Jenkins > Configure System configuration. This will tell Jenkins where is our GitLab server, a user to access it from and so on.

+
+
+

You’ll need to give it a name, for example, related with what this GitLab is dedicated for (specific clients, internal projects…​). Then, the Gitlab host URL is just where your GitLab server is. If you have it locally, that field should look similar to:

+
+
+
    +
  • +

    Connection name: my-local-gitlab

    +
  • +
  • +

    Gitlab host URL: http://localhost:${PORT_NUMBER}

    +
  • +
+
+
+

Finally, we select our recently GitLab API token as credentials.

+
+
+
+jenkins gitlab connection +
+
+
+
+
5. Jenkins Pipeline changes
+ +
+
+
5.1 Choose GitLab connection in Pipeline’s General configuration
+
+

First, our pipeline should allow us to add a GitLab connection to connect to (the already created one).

+
+
+
+jenkins pipeline gitlab connection +
+
+
+

In the case of the local example, could be like this:

+
+
+
    +
  • +

    GitLab connection: my-local-gitlab

    +
  • +
  • +

    GitLab Repository Name: myusername/webhook-test (for example)

    +
  • +
+
+
+
+
5.2 Create a Build Trigger
+
+
    +
  1. +

    You should already see your GitLab project’s URL (as you stated in the General settings of the Pipeline).

    +
  2. +
  3. +

    Write .*build.* in the comment for triggering a build

    +
  4. +
  5. +

    Specify or filter the branch of your repo you want use as target. That means, whenever a git action is done to that branch (for example, master), this Pipeline is going to be built.

    +
  6. +
  7. +

    Generate a Secret token (to be added in the yet-to-be-created GitLab webhook).

    +
  8. +
+
+
+
+jenkins pipeline build trigger +
+
+
+
+
6. GitLab Webhook
+
+
    +
  1. +

    Go to you GitLab project’s Settings > Integration section.

    +
  2. +
  3. +

    Add the path to your Jenkins Pipeline. Make sure you add project instead of job in the path.

    +
  4. +
  5. +

    Paste the generated Secret token of your Jenkins pipeline

    +
  6. +
  7. +

    Select your git action that will trigger the build.

    +
  8. +
+
+
+
+gitlab webhook +
+
+
+
+
7. Results
+
+

After all those steps you should have a result similar to this in your Pipeline:

+
+
+
+jenkins pipeline result +
+
+
+

Enjoy the Continuous Integration! :)

+
+
+
+
Jenkins - Nexus integration
+
+

Nexus is used to both host dependencies for devonfw projects to download (common Maven ones, custom ones such as ojdb and even devonfw so-far-IP modules). Moreover, it will host our projects' build artifacts (.jar, .war, …​) and expose them for us to download, wget and so on. A team should have a bidirectional relation with its Nexus repository.

+
+
+
+
1. Jenkins credentials to access Nexus
+
+

By default, when Nexus is installed, it contains 3 user credentials for different purposes. The admin ones look like this: admin/admin123. There are also other 2: deployment/deployment123 and TODO.

+
+
+
+
// ADD USER TABLE IMAGE FROM NEXUS
+
+
+
+

In this case, let’s use the ones with the greater permissions: admin/admin123.

+
+
+

Go to Credentials > System (left sidebar of Jenkins) then to Global credentials (unrestricted) on the page table and on the left sidebar again click on Add Credentials.

+
+
+

This should be shown in your Jenkins:

+
+
+
+jenkins new nexus credentials +
+
+
+

Fill the form like this:

+
+
+
+jenkins new nexus credentials filled +
+
+
+

And click in OK to create them. Check if the whole thing went as expected:

+
+
+
+jenkins new nexus credentials completed +
+
+
+
+
2. Jenkins Maven Settings
+
+

Those settings are also configured (or maybe not-yet-configured) in our devonfw distributions in:

+
+
+
+
/${devonfw-dist-path}
+    /software
+        /maven
+            /conf
+                settings.xml
+
+
+
+

Go to Manage Jenkins > Managed files and select Add a new Config in the left sidebar.

+
+
+
+jenkins new maven settings +
+
+
+

The ID field will get automatically filled with a unique value if you don’t set it up. No problems about that. Click on Submit and let’s create some Servers Credentials:

+
+
+
+jenkins new maven settings servers +
+
+
+

Those Server Credentials will allow Jenkins to access to the different repositories/servers that are going to be declared afterwards.

+
+
+

Let’s create 4 server credentials.

+
+
+
    +
  • +

    my.nexus: Will serve as general profile for Maven.

    +
  • +
  • +

    mynexus.releases: When a mvn deploy process is executed, this will tell Maven where to push releases to.

    +
  • +
  • +

    mynexus.snapshots: The same as before, but with snapshots instead.

    +
  • +
  • +

    mynexus.central: Just in case we want to install an specific dependency that is not by default in the Maven Central repository (such as ojdbc), Maven will point to it instead.

    +
  • +
+
+
+
+jenkins new maven settings servers credentials +
+
+
+

A more or less complete Jenkins Maven settings would look look like this:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+          xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">
+
+    <mirrors>
+        <mirror>
+            <id>mynexus.central</id>
+            <mirrorOf>central</mirrorOf>
+            <name>central</name>
+            <url>http://${URL-TO-YOUR-NEXUS-REPOS}/central</url>
+        </mirror>
+    </mirrors>
+
+    <profiles>
+        <profile>
+            <id>my.nexus</id>
+            <!-- 3 REPOS ARE DECLARED -->
+            <repositories>
+                <repository>
+                    <id>mynexus.releases</id>
+                    <name>mynexus Releases</name>
+                    <url>http://${URL-TO-YOUR-NEXUS-REPOS}/releases</url>
+                    <releases>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>false</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </repository>
+                <repository>
+                    <id>mynexus.snapshots</id>
+                    <name>mynexus Snapshots</name>
+                    <url>http://${URL-TO-YOUR-NEXUS-REPOS}/snapshots</url>
+                    <releases>
+                        <enabled>false</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </repository>
+            </repositories>
+            <pluginRepositories>
+                <pluginRepository>
+                    <id>public</id>
+                    <name>Public Repositories</name>
+                    <url>http://${URL-TO-YOUR-NEXUS}/nexus/content/groups/public/</url>
+                    <releases>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </pluginRepository>
+            </pluginRepositories>
+        </profile>
+    </profiles>
+    <!-- HERE IS WHERE WE TELL MAVEN TO CHOOSE THE my.nexus PROFILE -->
+    <activeProfiles>
+        <activeProfile>my.nexus</activeProfile>
+    </activeProfiles>
+</settings>
+
+
+
+
+
3. Use it in Jenkins Pipelines
+ +
+
+
Jenkins - SonarQube integration
+
+

First thing is installing both tools by, for example, Docker or Docker Compose. Then, we have to think about how they should collaborate to create a more efficient Continuous Integration process.

+
+
+

Once our project’s pipeline is triggered (it could also be triggered in a fancy way, such as when a merge to the develop branch is done).

+
+
+
+
1. Jenkins SonarQube plugin
+
+

Typically in those integration cases, Jenkins plug-in installations become a must. Let’s look for some available SonarQube plug-in(s) for Jenkins:

+
+
+
+jenkins sonarqube plugin +
+
+
+
+
2. SonarQube token
+
+

Once installed let’s create a token in SonarQube so that Jenkins can communicate with it to trigger their Jobs. Once we install SonarQube in our CI/CD machine (ideally a remote machine) let’s login with admin/admin credentials:

+
+
+
+sonarqube login +
+
+
+

Afterwards, SonarQube itself asks you to create this token we talked about (the name is up to you):

+
+
+
+sonarqube token name +
+
+
+

Then a token is generated:

+
+
+
+sonarqube token generation +
+
+
+

You click in "continue" and the token’s generation is completed:

+
+
+
+sonarqube token done +
+
+
+
+
3. Jenkins SonarQube Server setup
+
+

Now we need to tell Jenkins where is SonarQube and how to communicate with it. In Manage Jenkins > Configure Settings. We add a name for the server (up to you), where it is located (URL), version and the Server authentication token created in point 2.

+
+
+
+jenkins sonarqube server setup +
+
+
+
+
4. Jenkins SonarQube Scanner
+
+

Install a SonarQube Scanner as a Global tool in Jenkins to be used in the project’s pipeline.

+
+
+
+jenkins sonarqube scanner +
+
+
+
+
5. Pipeline code
+
+

Last step is to add the SonarQube process in our project’s Jenkins pipeline. The following code will trigger a SonarQube process that will evaluate our code’s quality looking for bugs, duplications, and so on.

+
+
+
+
    stage 'SonarQube Analysis'
+        def scannerHome = tool 'SonarQube scanner';
+        sh "${scannerHome}/bin/sonar-scanner \
+             -Dsonar.host.url=http://url-to-your-sq-server:9000/ \
+             -Dsonar.login=[SONAR_USER] -Dsonar.password=[SONAR_PASS] \
+             -Dsonar.projectKey=[PROJECT_KEY] \
+             -Dsonar.projectName=[PROJECT_NAME] -Dsonar.projectVersion=[PROJECT_VERSION] \
+             -Dsonar.sources=. -Dsonar.java.binaries=. \
+             -Dsonar.java.source=1.8 -Dsonar.language=java"
+
+
+
+
+
6. Results
+
+

After all this, you should end up having something like this in Jenkins:

+
+
+
+jenkins sonarqube feedback +
+
+
+

And in SonarQube:

+
+
+
+sonarqube project result +
+
+
+
+
7. Changes in a devonfw project to execute SonarQube tests with Coverage
+
+

The plugin used to have Coverage reports in the SonarQube for devonfw projects is Jacoco. There are some changes in the project’s parent pom.xml that are mandatory to use it.

+
+
+

Inside of the <properties> tag:

+
+
+
+
<properties>
+
+    (...)
+
+    <sonar.jacoco.version>3.8</sonar.jacoco.version>
+    <sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>
+    <sonar.core.codeCoveragePlugin>jacoco</sonar.core.codeCoveragePlugin>
+    <sonar.dynamicAnalysis>reuseReports</sonar.dynamicAnalysis>
+    <sonar.language>java</sonar.language>
+    <sonar.java.source>1.7</sonar.java.source>
+    <sonar.junit.reportPaths>target/surefire-reports</sonar.junit.reportPaths>
+    <sonar.jacoco.reportPaths>target/jacoco.exec</sonar.jacoco.reportPaths>
+    <sonar.sourceEncoding>UTF-8</sonar.sourceEncoding>
+    <sonar.exclusions>
+        **/generated-sources/**/*,
+        **io/oasp/mirabaud/general/**/*,
+        **/*Dao.java,
+        **/*Entity.java,
+        **/*Cto.java,
+        **/*Eto.java,
+        **/*SearchCriteriaTo.java,
+        **/*management.java,
+        **/*SpringBootApp.java,
+        **/*SpringBootBatchApp.java,
+        **/*.xml,
+        **/*.jsp
+    </sonar.exclusions>
+    <sonar.coverage.exclusions>
+        **io/oasp/mirabaud/general/**/*,
+        **/*Dao.java,
+        **/*Entity.java,
+        **/*Cto.java,
+        **/*Eto.java,
+        **/*SearchCriteriaTo.java,
+        **/*management.java,
+        **/*SpringBootApp.java,
+        **/*SpringBootBatchApp.java,
+        **/*.xml,
+        **/*.jsp
+    </sonar.coverage.exclusions>
+    <sonar.host.url>http://${YOUR_SONAR_SERVER_URL}/</sonar.host.url>
+    <jacoco.version>0.7.9</jacoco.version>
+
+    <war.plugin.version>3.2.0</war.plugin.version>
+    <assembly.plugin.version>3.1.0</assembly.plugin.version>
+</properties>
+
+
+
+

Of course, those sonar amd sonar.coverage can/must be changed to fit with other projects.

+
+
+

Now add the Jacoco Listener as a dependency:

+
+
+
+
<dependencies>
+    <dependency>
+        <groupId>org.sonarsource.java</groupId>
+        <artifactId>sonar-jacoco-listeners</artifactId>
+        <scope>test</scope>
+    </dependency>
+</dependencies>
+
+
+
+

Plugin Management declarations:

+
+
+
+
<pluginManagement>
+    <plugins>
+        <plugin>
+            <groupId>org.sonarsource.scanner.maven</groupId>
+            <artifactId>sonar-maven-plugin</artifactId>
+            <version>3.2</version>
+        </plugin>
+        <plugin>
+            <groupId>org.jacoco</groupId>
+            <artifactId>jacoco-maven-plugin</artifactId>
+            <version>${jacoco.version}</version>
+        </plugin>
+    </plugins>
+<pluginManagement>
+
+
+
+

Plugins:

+
+
+
+
<plugins>
+
+    (...)
+
+    <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.20.1</version>
+        <configuration>
+            <argLine>-XX:-UseSplitVerifier -Xmx2048m ${surefireArgLine}</argLine>
+            <testFailureIgnore>false</testFailureIgnore>
+            <useFile>false</useFile>
+            <reportsDirectory>${project.basedir}/${sonar.junit.reportPaths}</reportsDirectory>
+            <argLine>${jacoco.agent.argLine}</argLine>
+            <excludedGroups>${oasp.test.excluded.groups}</excludedGroups>
+            <alwaysGenerateSurefireReport>true</alwaysGenerateSurefireReport>
+            <aggregate>true</aggregate>
+            <properties>
+                <property>
+                    <name>listener</name>
+                    <value>org.sonar.java.jacoco.JUnitListener</value>
+                </property>
+            </properties>
+        </configuration>
+    </plugin>
+    <plugin>
+        <groupId>org.jacoco</groupId>
+        <artifactId>jacoco-maven-plugin</artifactId>
+        <configuration>
+            <argLine>-Xmx128m</argLine>
+            <append>true</append>
+            <propertyName>jacoco.agent.argLine</propertyName>
+            <destFile>${sonar.jacoco.reportPath}</destFile>
+            <excludes>
+                <exclude>**/generated-sources/**/*,</exclude>
+                <exclude>**io/oasp/${PROJECT_NAME}/general/**/*</exclude>
+                <exclude>**/*Dao.java</exclude>
+                <exclude>**/*Entity.java</exclude>
+                <exclude>**/*Cto.java</exclude>
+                <exclude>**/*Eto.java</exclude>
+                <exclude>**/*SearchCriteriaTo.java</exclude>
+                <exclude>**/*management.java</exclude>
+                <exclude>**/*SpringBootApp.java</exclude>
+                <exclude>**/*SpringBootBatchApp.java</exclude>
+                <exclude>**/*.class</exclude>
+            </excludes>
+        </configuration>
+        <executions>
+            <execution>
+                <id>prepare-agent</id>
+                <phase>initialize</phase>
+                <goals>
+                    <goal>prepare-agent</goal>
+                </goals>
+                <configuration>
+                    <destFile>${sonar.jacoco.reportPath}</destFile>
+                    <append>true</append>
+                </configuration>
+            </execution>
+            <execution>
+                <id>report-aggregate</id>
+                <phase>verify</phase>
+                <goals>
+                    <goal>report-aggregate</goal>
+                </goals>
+            </execution>
+            <execution>
+                <id>jacoco-site</id>
+                <phase>verify</phase>
+                <goals>
+                    <goal>report</goal>
+                </goals>
+            </execution>
+        </executions>
+    </plugin>
+</plugins>
+
+
+
+
+
Jenkins SonarQube execution
+
+

If the previous configuration is already setup, once Jenkins execute the sonar maven plugin, it will automatically execute coverage as well.

+
+
+

This is an example of a block of code from a devonfw project’s Jenkinsfile:

+
+
+
+
    withMaven(globalMavenSettingsConfig: 'YOUR_GLOBAL_MAVEN_SETTINGS', jdk: 'OpenJDK 1.8', maven: 'Maven_3.3.9') {
+        sh "mvn sonar:sonar -Dsonar.login=[USERNAME] -Dsonar.password=[PASSWORD]"
+    }
+
+
+
+
+
+

OKD (OpenShift Origin)

+ +
+
+

What is OKD

+
+

OKD is a distribution of Kubernetes optimized for continuous application development and multi-tenant deployment. OKD is the upstream Kubernetes distribution embedded in Red Hat OpenShift.

+
+
+

OKD embeds Kubernetes and extends it with security and other integrated concepts. OKD is also referred to as Origin in github and in the documentation.

+
+
+

OKD provides a complete open source container application platform. If you are looking for enterprise-level support, or information on partner certification, Red Hat also offers Red Hat OpenShift Container Platform.

+
+
+

Continue reading…​

+ +
+
Install OKD (Openshift Origin)
+ +
+
+
Pre-requisites
+ +
+
+
Install docker
+ +
+
+
$ sudo groupadd docker
+$ sudo usermod -aG docker $USER
+
+
+
+
+
Download Openshift Origin Client
+
+

Download Openshift Origin Client from here

+
+
+

When the download it’s complete, only extract it on the directory that you want, for example /home/administrador/oc

+
+
+
+
Add oc to path
+
+
+
$ export PATH=$PATH:/home/administrador/oc
+
+
+
+
+
Install Openshift Cluster
+ +
+
+
Add the insecure registry
+
+

Create file /etc/docker/daemon.json with the next content:

+
+
+
+
{
+    "insecure-registries" : [ "172.30.0.0/16" ]
+}
+
+
+
+
+
Download docker images for openshift
+
+
+
$ oc cluster up
+
+
+
+
+
Install Oc Cluster Wrapper
+
+

To manage easier the cluster persistent, we are going to use oc cluster wrapper.

+
+
+
+
cd /home/administrador/oc
+wget https://raw.githubusercontent.com/openshift-evangelists/oc-cluster-wrapper/master/oc-cluster
+
+
+
+

oc-cluster up devonfw-shop-floor --public-hostname X.X.X.X

+
+
+
+
Configure iptables
+
+

We must create iptables rules to allow traffic from other machines.

+
+
+
+
- The next commands it's to let all traffic, don't do it on a real server.
+
+- $ iptables -F
+- $ iptables -X
+- $ iptables -t nat -F
+- $ iptables -t nat -X
+- $ iptables -t mangle -F
+- $ iptables -t mangle -X
+- $ iptables -P INPUT ACCEPT
+- $ iptables -P OUTPUT ACCEPT
+- $ iptables -P FORWARD ACCEPT
+
+
+
+
+
How to use Oc Cluster Wrapper
+
+

With oc cluster wrapper we could have different clusters with different context.

+
+
+
+
Cluster up
+
+
+
$ oc-cluster up devonfw-shop-floor --public-hostname X.X.X.X
+
+
+
+
+
Cluster down
+
+
+
$ oc-cluster down
+
+
+
+
+
Use non-persistent cluster
+
+
+
oc cluster up --image openshift/origin --public-hostname X.X.X.X --routing-suffix apps.X.X.X.X.nip.io
+
+
+
+
+
devonfw Openshift Origin Initial Setup
+
+

These are scripts to customize an Openshift cluster to be a devonfw Openshift.

+
+
+
+
How to use
+ +
+
+
Prerequisite: Customize Openshift
+
+

devonfw Openshift Origin use custom icons, and we need to add it to openshift. More information:

+
+ +
+
+
Script initial-setup
+
+

Download this script and execute it.

+
+
+

More information about what this script does here.

+
+
+
+
Known issues
+ +
+
+
Failed to push image
+
+

If you receive an error like this:

+
+
+
+
error: build error: Failed to push image: After retrying 6 times, Push image still failed due to error: Get http://172.30.1.1:5000/v2/:  dial tcp 172.30.1.1:5000: getsockopt: connection refused
+
+
+
+

It’s because the registry isn’t working, go to openshift console and enter into the default project https://x.x.x.x:8443/console/project/default/overview and you must see two resources, docker-registry and router they must be running. If they don’t work, try to deploy them and look at the logs what is happen.

+
+
+
+
s2i devonfw
+
+

This are the s2i source and templates to build an s2i images. It provides OpenShift builder images for components of the devonfw (at this moment only for angular and java).

+
+
+

This work is totally based on the implementation of Michael Kuehl from RedHat for Oasp s2i.

+
+
+

All this information is used as a part of the initial setup for openshift.

+
+
+
+
Previous setup
+
+

In order to build all of this, it will be necessary, first, to have a running OpenShift cluster. How to install it here.

+
+
+
+
Usage
+
+

Before using the builder images, add them to the OpenShift cluster.

+
+
+
+
Deploy the Source-2-Image builder images
+
+

First, create a dedicated devonfw project as admin.

+
+
+
+
$ oc new-project devonfw --display-name='devonfw' --description='devonfw Application Standard Platform'
+
+
+
+

Now add the builder image configuration and start their build.

+
+
+
+
oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/s2i/java/s2i-devonfw-java-imagestream.json --namespace=devonfw
+oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/s2i/angular/s2i-devonfw-angular-imagestream.json --namespace=devonfw
+oc start-build s2i-devonfw-java --namespace=devonfw
+oc start-build s2i-devonfw-angular --namespace=devonfw
+
+
+
+

Make sure other projects can access the builder images:

+
+
+
+
oc policy add-role-to-group system:image-puller system:authenticated --namespace=devonfw
+
+
+
+

That’s all!

+
+
+
+
Deploy devonfw templates
+
+

Now, it’s time to create devonfw templates to use this s2i and add it to the browse catalog. More information here.

+
+
+
+
Build All
+
+

Use this script to automatically install and build all image streams. The script also creates templates devonfw-angular and devonfw-java inside the project 'openshift' to be used by everyone.

+
+
+
    +
  1. +

    Open a bash shell as Administrator

    +
  2. +
  3. +

    Execute shell file:

    +
  4. +
+
+
+
+
$ /PATH/TO/BUILD/FILE/initial-setup.sh
+
+
+
+

More information about what this script does here.

+
+
+
+ +
+

This is a list of useful articles, etc, that I found while creating the templates.

+
+ +
+
+
devonfw templates
+
+

This are the devonfw templates to build devonfw apps for Openshift using the s2i images. They are based on the work of Mickuehl in Oasp templates/mythaistar for deploy My Thai Star.

+
+
+
    +
  • +

    Inside the example-mythaistar we have an example to deploy My Thai Star application using devonfw templates.

    +
  • +
+
+
+

All this information is used as a part of the initial setup for openshift.

+
+
+
+
How to use
+ +
+
+
Previous requirements
+ +
+
+
== Deploy the Source-2-Image builder images
+
+

Remember that this templates need a build image from s2i-devonfw-angular and s2i-devonfw-java. More information:

+
+ +
+
+
== Customize Openshift
+
+

Remember that this templates also have custom icons, and to use it, we must modify the master-config.yml inside openshift. More information:

+
+
+ +
+
+
+
Deploy devonfw templates
+
+

Now, it’s time to create devonfw templates to use this s2i and add it to the browse catalog.

+
+
+

To let all user to use these templates in all openshift projects, we should create it in an openshift namespace. To do that, we must log in as an admin.

+
+
+
+
oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/templates/devonfw-java-template.json --namespace=openshift
+oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/templates/devonfw-angular-template.json --namespace=openshift
+
+
+
+

When it finishes, remember to logout as an admin and enter with our normal user.

+
+
+
+
$ oc login
+
+
+
+
+
How to use devonfw templates in openshift
+
+

To use these templates with openshift, we can override any parameter values defined in the file by adding the --param-file=paramfile option.

+
+
+

This file must be a list of <name>=<value> pairs. A parameter reference may appear in any text field inside the template items.

+
+
+

The parameters that we must override are the following

+
+
+
+
$ cat paramfile
+  APPLICATION_NAME=app-Name
+  APPLICATION_GROUP_NAME=group-Name
+  GIT_URI=Git uri
+  GIT_REF=master
+  CONTEXT_DIR=/context
+
+
+
+

The following parameters are optional

+
+
+
+
$ cat paramfile
+  APPLICATION_HOSTNAME=Custom hostname for service routes. Leave blank for default hostname, e.g.: <application-name>.<project>.<default-domain-suffix>,
+  # Only for angular
+  REST_ENDPOINT_URL=The URL of the backend's REST API endpoint. This can be declared after,
+  REST_ENDPOINT_PATTERN=The pattern URL of the backend's REST API endpoint that must be modify by the REST_ENDPOINT_URL variable,
+
+
+
+

For example, to deploy My Thai Star Java

+
+
+
+
$ cat paramfile
+  APPLICATION_NAME="mythaistar-java"
+  APPLICATION_GROUP_NAME="My-Thai-Star"
+  GIT_URI="https://github.com/devonfw/my-thai-star.git"
+  GIT_REF="develop"
+  CONTEXT_DIR="/java/mtsj"
+
+$ oc new-app --template=devonfw-java --namespace=mythaistar --param-file=paramfile
+
+
+
+
+
Customize Openshift Origin for devonfw
+
+

This is a guide to customize Openshift cluster.

+
+
+
+
Images Styles
+
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+
How to use
+
+

To use it, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own css in the stylesheetURLs and javascript in the scriptURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. Scripts must be served with Content-Type: application/javascript and stylesheets with Content-Type: text/css.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+  [...]
+    extensions:
+      scriptURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/scripts/catalog-categories.js
+      stylesheetURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/stylesheet/icons.css
+  [...]
+
+
+
+
+
More information
+
+ +
+
+
+
Old versions
+
+ +
+
+How to add Custom Icons inside openshift +
+

This is a guide to add custom icons into an Openshift cluster.

+
+
+

Here we can find an icons.css example to use the devonfw icons.

+
+
+
+Images Styles +
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+Create a css + +
+
+Custom logo for openshift cluster +
+

For this example, we are going to call the css icons.css but you can call as you wish. +Openshift cluster draw their icon by the id header-logo, then we only need to add to our icons.css the next Style Attribute ID

+
+
+
+
#header-logo {
+  background-image: url("https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/images/devonfw-openshift.png);
+  width: 230px;
+  height: 40px;
+}
+
+
+
+
+Custom icons for templates +
+

To use a custom icon to a template openshift use a class name. Then, we need to insert inside our icons.css the next Style Class

+
+
+
+
.devonfw-logo {
+  background-image: url("https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/images/devonfw.png");
+  width: 50px;
+  height: 50px;
+}
+
+
+
+

To show that custom icon on a template, we only need to write the name of our class in the tag "iconClass" of our template.

+
+
+
+
{
+    ...
+    "items": [
+        {
+            ...
+            "metadata": {
+                ...
+                "annotations": {
+                    ...
+                    "iconClass": "devonfw-logo",
+                    ...
+                }
+            },
+            ...
+        }
+    ]
+}
+
+
+
+
+Use our own css inside openshift +
+

To do that, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own css in the stylesheetURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. stylesheets must be served with Content-Type: text/css.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+	[...]
+    extensions:
+      stylesheetURLs:
+		- https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/stylesheet/icons.css
+    [...]
+
+
+
+
+How to add custom catalog categories inside openshift +
+

This is a guide to add custom Catalog Categories into an Openshift cluster.

+
+
+

Here we can find a catalog-categories.js example to use the devonfw catalog categories.

+
+
+
+Create a scrip to add custom langauges and custom catalog categories + +
+
+Custom language +
+

For this example, we are going add a new language into the languages category. To do that we must create a script and we named as catalog-categories.js

+
+
+
+
// Find the Languages category.
+var category = _.find(window.OPENSHIFT_CONSTANTS.SERVICE_CATALOG_CATEGORIES,
+                      { id: 'languages' });
+// Add Go as a new subcategory under Languages.
+category.subCategories.splice(2,0,{ // Insert at the third spot.
+  // Required. Must be unique.
+  id: "devonfw-languages",
+  // Required.
+  label: "devonfw",
+  // Optional. If specified, defines a unique icon for this item.
+  icon: "devonfw-logo-language",
+  // Required. Items matching any tag will appear in this subcategory.
+  tags: [
+    "devonfw",
+    "devonfw-angular",
+    "devonfw-java"
+  ]
+});
+
+
+
+
+Custom category +
+

For this example, we are going add a new category into the category tab. To do that we must create a script and we named as catalog-categories.js

+
+
+
+
// Add a Featured category as the first category tab.
+window.OPENSHIFT_CONSTANTS.SERVICE_CATALOG_CATEGORIES.unshift({
+  // Required. Must be unique.
+  id: "devonfw-featured",
+  // Required
+  label: "devonfw",
+  subCategories: [
+    {
+      // Required. Must be unique.
+      id: "devonfw-languages",
+      // Required.
+      label: "devonfw",
+      // Optional. If specified, defines a unique icon for this item.
+      icon: "devonfw-logo-language",
+      // Required. Items matching any tag will appear in this subcategory.
+      tags: [
+        "devonfw",
+        "devonfw-angular",
+        "devonfw-java"
+      ]
+    }
+  ]
+});
+
+
+
+
+Use our own javascript inside openshift +
+

To do that, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own javascript in the scriptURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. Scripts must be served with Content-Type: application/javascript.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+  [...]
+    extensions:
+      scriptURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/scripts/catalog-categories.js
+  [...]
+
+
+
+
+Customize Openshift Origin v3.7 for devonfw +
+

This is a guide to customize Openshift cluster. For more information read the next:

+
+
+ +
+
+
+Images Styles +
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+Quick Use +
+

This is a quick example to add custom icons and categories inside openshift.

+
+
+

To modify the icons inside openshift, we must to modify our master-config.yaml of our openshift cluster. This file is inside the openshift container and to obtain a copy of it, we must to know what’s our openshift container name.

+
+
+
+Obtain the master-config.yaml of our openshift cluster + +
+
+== Obtain the name of our openshift container +
+

To obtain it, we can know it executing the next:

+
+
+
+
$ docker container ls
+CONTAINER ID        IMAGE                                           COMMAND                  CREATED             STATUS              PORTS                                     NAMES
+83a4e3acda5b        openshift/origin:v3.7.0                         "/usr/bin/openshift …"   6 days ago          Up 6 days                                                     origin
+
+
+
+

Here we can see that the name of the container is origin. Normaly the container it’s called as origin.

+
+
+
+== Copy the master-config.yaml of our openshift container to our directory +
+

This file is inside the openshift container in the next directory: /var/lib/origin/openshift.local.config/master/master-config.yaml and we can copy it with the next command:

+
+
+
+
$ docker cp origin:/var/lib/origin/openshift.local.config/master/master-config.yaml ./
+
+
+
+

Now we have a file with the configuration of our openshift cluster.

+
+
+
+Copy all customize files inside the openshift container +
+

To use our customization of devonfw Openshift, we need to copy our files inside the openshift container.

+
+
+

To do this we need to copy the images, scripts and stylesheets from here inside openshift +container, for example, we could put it all inside a folder called openshift.local.devonfw. On the step one we obtain the name of this container, for this example we assume that it’s called origin. Then our images are located inside openshift container and we can see an access it in /var/lib/origin/openshift.local.devonfw/images.

+
+
+
+
$ docker cp ./openshift.local.devonfw origin:/var/lib/origin/
+
+
+
+
+Edit and copy the master-config.yaml to use our customize files +
+

The master-config.yaml have a sections to charge our custom files. All these sections are inside the assetConfig and their names are the next:

+
+
+
    +
  • +

    The custom stylessheets are into extensionStylesheets.

    +
  • +
  • +

    The custom scripts are into extensionScripts.

    +
  • +
  • +

    The custom images are into extensions.

    +
  • +
+
+
+

To use all our custom elements only need to add the directory routes of each element in their appropriate section of the master-config.yaml

+
+
+
+
...
+assetConfig:
+  ...
+  extensionScripts:
+  - /var/lib/origin/openshift.local.devonfw/scripts/catalog-categories.js
+  extensionStylesheets:
+  - /var/lib/origin/openshift.local.devonfw/stylesheet/icons.css
+  extensions:
+  - name: images
+    sourceDirectory: /var/lib/origin/openshift.local.devonfw/images
+  ...
+...
+
+
+
+

Now we only need to copy that master-config.yaml inside openshift, and restart it to load the new configuration. To do that execute the next:

+
+
+
+
$ docker cp ./master-config.yaml origin:/var/lib/origin/openshift.local.config/master/master-config.yaml
+
+
+
+

To re-start openshift do oc cluster down and start again your persistent openshift cluster.

+
+
+
+More information +
+ +
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-azure-connection-strings.html b/docs/shop-floor/1.0/dsf-azure-connection-strings.html new file mode 100644 index 00000000..a200a670 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-azure-connection-strings.html @@ -0,0 +1,340 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

CONNECTION STRINGS

+
+
+

Once your database is created, you will need to connect that DB to your backend application, this can be made using connection strings.

+
+
+
+
+

== CREATE THE CONNECTION STRING

+
+
+

Go to the Azure portal,select the App Service that you want to connect with the DB, to be able to establish this connection, both your DB and your App Service must be under the same resource group.

+
+
+

P.E

+
+
+
+resource group +
+
+
+

As we can see here, both the app service and the DB exist under the same resource group 'BW-dev'

+
+
+

Select your app service and go to 'settings > Configuration', scroll down looking for 'Connection strings' and click on "New connection string"

+
+
+
+appservice +
+
+
+

Put the name you want(we’ve put the name 'Context', this name will be used later in your appSettings.json) and select the DB type, and for fill the value box go to 'Home>SQL databases', click on the target DB and click on 'Show database connection strings', copy the value that appears there and paste it in the value box.

+
+
+
+db +
+
+
+
+connection string +
+
+
+

Paste the connection string in the 'value' box and click OK

+
+
+

Your connection string has been created.

+
+
+
+
+

== USE THE CONNECTION STRING

+
+
+

Go to your project, open the file AppSettings.json and add the connection string

+
+
+
+
  "ConnectionStrings": {
+    "Context": "Source=(localdb)\\MSSQLLocalDB;Initial Catalog=my-db;Integrated Security=True;"
+  }
+
+
+
+

Context is the name that we choose for the connection string that we’ve created before and that value is only for local purposes.

+
+
+

When the application is deployed,the value for context will be replaced for the value of the connection string that we’ve created in the earlier steps, using this we avoid to put the user and the password into the code and we use them as secrets that will be replaced in the deployment.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-azure-install-sonar-with-docker-in-a-virtual-machine.html b/docs/shop-floor/1.0/dsf-azure-install-sonar-with-docker-in-a-virtual-machine.html new file mode 100644 index 00000000..44743932 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-azure-install-sonar-with-docker-in-a-virtual-machine.html @@ -0,0 +1,447 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Connect to a Virtual Machine(VM) in Azure

+
+ +
+
+
+

== Pre-requisites

+
+
+

Have a VM created and a private key in order to connect to it

+
+
+
+
+

== Establish a connection

+
+
+

1- Open the client of your choice(putty,cmder,bash)

+
+
+

2- Ensure you have read-only access to the private key.

+
+
+
+
chmod 400 azureuser.pem
+
+
+
+

3- Run this command to connect to your VM

+
+
+
+
ssh -i <private key path> azureuser@51.103.78.61
+
+
+
+

note: To get the IP go to your azure portal, click on your VM, click on Networking and you will find the IP needed to establish the connection

+
+
+

You are connected:

+
+
+
+vm connection +
+
+
+
+
+

Install Sonar using Docker and Docker-compose

+
+
+

As an example we will use the practical case of Bad Weather, a project where we were asked to install Sonar inside a VM in Azure portal

+
+
+
+install sonar +
+
+
+

We had 2 possible scenarios, we went for the case A since no other service will be installed in this VM

+
+
+
+
+

== Steps

+
+
+

1- Install docker and docker compose in the VM

+
+
+
+
sudo dnf config-manager --add-repo=https://download.docker.com/linux/centos/docker-ce.repo
+sudo dnf list docker-ce
+sudo dnf install docker-ce --nobest -y
+sudo systemctl start docker
+sudo systemctl enable docker
+docker --version
+sudo dnf install curl -y
+sudo curl -L "https://github.com/docker/compose/releases/download/1.25.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
+sudo chmod +x /usr/local/bin/docker-compose
+sudo ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
+docker-compose --version
+
+
+
+

2- Deploy SonarQube and Postgress

+
+
+

2.1- Set necesary parameters for sonarqube

+
+
+
+
sudo sysctl -w vm.max_map_count=262144
+sudo sysctl -w fs.file-max=65536
+sudo ulimit -n 65536
+sudo ulimit -u 4096
+
+
+
+

2.2- Use docker-compose with the next definition to deploy it:

+
+
+

vim /home/sonar/docker-compose.yaml

+
+
+
+
version: "3"
+
+services:
+  sonarqube:
+    image: "sonarqube:7.9-community"
+    networks:
+      - sonar
+    environment:
+      - sonar.jdbc.username=user
+      - sonar.jdbc.password=pass
+      - sonar.jdbc.url=jdbc:postgresql://sonarqube-db:5432/sonar
+    ports:
+      - "80:9000"
+	depends_on:
+      - "sonarqube-db"
+    volumes:
+      - "$PWD/volumes/sonarqube/conf:/opt/sonarqube/conf"
+      - "$PWD/volumes/sonarqube/data:/opt/sonarqube/data"
+      - "$PWD/volumes/sonarqube/extensions:/opt/sonarqube/extensions"
+      - "$PWD/volumes/sonarqube/logs:/opt/sonarqube/logs"
+    ulimits:
+      nofile:
+        soft: 65536
+        hard: 65536
+  sonarqube-db:
+    image: "postgres:12-alpine"
+    networks:
+      - sonar
+    volumes:
+      - "$PWD/volumes/sonarqube-db/data:/var/lib/postgresql/data"
+    environment:
+      - POSTGRES_USER=youruser
+      - POSTGRES_PASSWORD=yourpass
+      - POSTGRES_DB=sonar
+      - PGDATA=/var/lib/postgresql/data
+
+networks:
+  sonar:
+    driver: bridge
+
+
+
+

3- Update the start configuration to set automatically the correct values and run the docker-compose

+
+
+
+
vim /usr/local/sbin/start.sh
+
+sysctl -w vm.max_map_count=262144
+sysctl -w fs.file-max=65536
+ulimit -n 65536
+ulimit -u 4096
+
+cd /home/sonar && docker-compose up -d
+
+
+
+

4- Add this to execute the docker-compose file every time the machine turns on

+
+
+
+
crontab -e
+@reboot /usr/local/sbin/start.sh
+
+vim /etc/sysctl.conf
+vm.max_map_count=262144
+fs.file-max=65536
+
+
+
+

Your Sonar is Up and running in your VM

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-azure-pipelines.html b/docs/shop-floor/1.0/dsf-azure-pipelines.html new file mode 100644 index 00000000..d7321221 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-azure-pipelines.html @@ -0,0 +1,514 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Create an Azure pipeline from scratch

+
+
+

The following steps will allow you to create a basic pipeline in Azure Devops from scratch

+
+
+

In order to deploy in Azure, we’ve created an automatic pipeline in Azure Devops that will be executed automaticaly when developers make a push to the Azure repositories, the pipeline will compile the code, build the application and ensure with automatic tests that the build is not going to break the application, to ensure a good quality code the code will be analyzed by sonar as well as your code coverage and last but not least, your application will be deployed using Azure App Services.

+
+
+
+
+

Steps

+
+
+

1- Sign in to your Azure DevOps organization and navigate to your project.

+
+
+

2- Go to Pipelines, and then select New Pipeline.

+
+
+

3- Choose the location of your source code(Github, Bitbucket,Azure repos..etc), in this case we have our code in Azure Repos Git.

+
+
+

A list of your repositories will be shown here:

+
+
+

4- When the list of repositories appears, select your repository.

+
+
+

Depending on your project type(Java, .NET, Python or JavaScript) the following configuration will change, in this case our project is a .NET, for more type of projects please follow the official documentation.

+
+
+

5- When the Configure tab appears, select ASP.NET Core(or the one according to your project)

+
+
+
+configuration +
+
+
+

6- A .yaml file in your ./ location will be generated with all the required steps to run your pipeline. +The name of this .yaml file is 'azure-pipelines.yaml' wich is the default name that will be used in your pipeline settings.

+
+
+

Note:If you change the name or the location, you will need to specify in the pipeline settings the new name or location:

+
+
+
+pipeline settings +
+
+
+

The pipeline is created with the minimum required steps to run it which are the following:

+
+
+
+
+

TRIGGERS

+
+
+

Triggers that will activate the pipeline execution

+
+
+
+
trigger:
+- master
+- develop
+
+
+
+
+
+

VARIABLES

+
+
+

Variables that will be used in the next steps

+
+
+
+
variables:
+  solution: '**/*.sln'
+  buildPlatform: 'Any CPU'
+  buildConfiguration: 'Release'
+
+
+
+
+
+

TOOLS AND LIBRARIES

+
+
+

For .NET:

+
+
+

-NuGet Tool Installer task:

+
+
+
+
- task: NuGetToolInstaller@1
+
+
+
+

Use this task to find, download, and cache a specified version of NuGet and add it to the PATH.

+
+
+

-The NuGet command to run:

+
+
+
+
- task: NuGetCommand@2
+  inputs:
+    restoreSolution: '$(solution)'
+
+
+
+

The NuGet command to run.

+
+
+

For more info use the official documentation.

+
+
+
+
+

BUILD

+
+
+

-Visual Studio Build task:

+
+
+
+
- task: VSBuild@1
+  inputs:
+    solution: '$(solution)'
+    msbuildArgs: '/p:DeployOnBuild=true /p:WebPublishMethod=Package /p:PackageAsSingleFile=true /p:SkipInvalidConfigurations=true /p:DesktopBuildPackageLocation="$(build.artifactStagingDirectory)\WebApp.zip" /p:DeployIisAppPath="Default Web Site"'
+    platform: '$(buildPlatform)'
+    configuration: '$(buildConfiguration)'
+
+
+
+

Use this task to build with MSBuild and set the Visual Studio version property.

+
+
+

For more info use the official documentation

+
+
+
+
+

TEST

+
+
+

-Visual Studio Test task:

+
+
+
+
- task: DotNetCoreCLI@2
+  inputs:
+    command: 'test'
+    arguments: '/p:CollectCoverage=true /p:CoverletOutputFormat=opencover /p:CoverletOutput=$(Agent.TempDirectory)/'
+    projects: '$(solution)'
+    publishTestResults: true
+  continueOnError: false
+  displayName: 'Dot Net Core CLI Test'
+
+
+
+

Use this task to run unit and functional tests (Selenium, Appium, Coded UI test, and more) using the Visual Studio Test Runner.

+
+
+

For more info use the official documentation

+
+
+

This steps are the ones generated when your pipeline is created, we can create the ones we need using the Azure Devops wizard in an easy way.

+
+
+

In our case, apart from build and test, we also need to deploy

+
+
+
+
+

DEPLOY

+
+ +
+
+
+

App Services

+
+
+

While deploying with App Services, 2 steps are required:

+
+
+
+
+

== Step 1: Publish

+
+
+

Use this task in a pipeline to publish artifacts for the Azure Pipeline

+
+
+
+
- task: PublishPipelineArtifact@0
+  inputs:
+    artifactName: 'Bad_Weather_Backend'
+    targetPath: '$(Build.ArtifactStagingDirectory)'
+
+
+
+

To know more about the use of predefined variables in azure take a look at the documentation

+
+
+
+
+

== Step 2: Deployment

+
+
+

Use this task to deploy to a range of App Services on Azure

+
+
+
+
- task: AzureRmWebAppDeployment@4
+  inputs:
+    ConnectionType: 'AzureRM'
+    azureSubscription: 'bad-weather-poc-rs-bw-dev'
+    appType: 'webApp'
+    WebAppName: 'bwbackendbe'
+    packageForLinux: '$(build.artifactStagingDirectory)\WebApp.zip'
+
+
+
+

This task has 2 prerequisites:

+
+
+

1-App Service instance:

+
+
+

The task is used to deploy a Web App project or Azure Function project to an existing Azure App Service instance, which must exist before the task runs.

+
+
+

2-Azure Subscription:

+
+
+

In order to deploy to Azure, an Azure subscription must be linked to the pipeline.

+
+
+

To know more about the input arguments for this task, make use of the offcial documentation

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-azure-sonarqube-integration.html b/docs/shop-floor/1.0/dsf-azure-sonarqube-integration.html new file mode 100644 index 00000000..b4ff32f2 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-azure-sonarqube-integration.html @@ -0,0 +1,380 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Integrate the SonarQube plugin in an Azure DevOps pipeline

+
+
+

The purpose of this readme is that you can configure your Azure Devops pipeline in order to be able to run a code analysis, analyse the code coverage and publish the results through the Sonar plugin.

+
+
+
+
+

How to do it

+
+ +
+
+
+

== Step 1: Create a service connection

+
+
+

The first thing to do is to declare your SonarQube server as a service endpoint in your Azure DevOps project settings.

+
+
+

Go to project settings → pipelines → service connections and create and choose 'SonarQube'.

+
+
+

Create service connection

+
+
+

Specify the server url and the connection name of your SonarQube server and the token Auth +Go to your SonarQube server and log in as admin, +once inside, go to administration → Security → Users → Administrator → Tokens→ And generate the token. +Copy the generated token(once created it will never appear again so don’t lose it) and paste it and click on save .

+
+
+
+ServiceConnection +
+
+
+

The service connection has been created. +Once this step is done your service creation will appear now in the service connections side bar.

+
+
+

For more info regarding the Authentication part please read the official documentation

+
+
+
+
+

== Step 2: Add the required tasks in the azure pipeline

+
+
+

In order to integrate the SonarQube in the pipeline, 3 steps or tasks are required(Depending on the different solutions like .NET, Java, C..etc some of this tasks can be optional), this tasks are:

+
+
+

Prepare Analysis configuration +Run Code Analysis +Publish Quality Gate result

+
+
+

We can use the wizard to create this in an easy way, search "SonarQube" and let’s configure the tasks one by one.

+
+
+

Prepare Analysis configuration:

+
+
+

Fill the required fields and click on add

+
+
+

The prepare task will be now shown in the pipeline code:

+
+
+
+sonarprepare +
+
+
+

Follow the official documentation if you have doubts while filling the fields:

+
+
+

Once the prepare is done, continue with the code analysis.

+
+
+

Run Code Analysis

+
+
+

Select this from the task assistant and just like happened with the first task, the code will appear in your pipeline.

+
+
+
+runAnalysis +
+
+
+

Now, let’s publish the result of the analysis.

+
+
+

Publish quality gate result

+
+
+

Same as we did before, select in the display the publish extension and add it

+
+
+
+publish +
+
+
+
+
+

== Step 3: Run the pipeline

+
+
+

With this, all the required steps to integrate SonarQube in your Azure DevOps pipeline are done, the last thing you need to do is run your pipeline and your code will be analyzed and the results published.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-configure-dockerfile.html b/docs/shop-floor/1.0/dsf-configure-dockerfile.html new file mode 100644 index 00000000..88ebe2ab --- /dev/null +++ b/docs/shop-floor/1.0/dsf-configure-dockerfile.html @@ -0,0 +1,308 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

Dockerfile

+
+
+

You have examples of dockerfiles in cicdgen repository.

+
+
+

inside these folders you could find all the files that you need to use those dockerfiles. Two dockerfiles are provaided, Dockerfile and Dockerfile.ci, the first one is to compile the code and create the docker image used normally in local, and Dockerfile.ci is to use in Jenkins or similar, after building the application.

+
+
+ +
+
+ + + + + +
+ + +Dockerfile.ci should be copied to de artifacts and renamed as Dockerfile to work. In the case or devon4ng and devon4node this is the dist folder, in case of devon4ng is on server/target folder. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-configure-gitlab.html b/docs/shop-floor/1.0/dsf-configure-gitlab.html new file mode 100644 index 00000000..e39ebe55 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-configure-gitlab.html @@ -0,0 +1,312 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

GitLab Configuration

+
+ +
+
+
+

Create new repository

+
+
+

To create a new project in GitLab, go to your dashboard and click the green New project button or use the plus icon in the navigation bar.

+
+
+
+gitlab new prject +
+
+
+

This opens the New project page. Choose your group and fill the name of your project, the description and the visibility level in the next form:

+
+
+
+gitlab new prject form +
+
+
+ + + + + +
+ + +more information about how to create projects in GitLab in the official documentation +
+
+
+
+
+

Service integration

+
+
+

To learn how to configure the integration between GitLab and Jenkins see the next example

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-configure-jenkins-build-monitor-view.html b/docs/shop-floor/1.0/dsf-configure-jenkins-build-monitor-view.html new file mode 100644 index 00000000..1bf9996d --- /dev/null +++ b/docs/shop-floor/1.0/dsf-configure-jenkins-build-monitor-view.html @@ -0,0 +1,341 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Build monitor view

+
+
+

This tool you will be able to see in real time what is the state of your Jenkins pipelines.

+
+
+
+
+

Prerequisites

+
+ +
+
+
+

Add build monitor view plugin

+
+
+

To integrate it, you need to have installed the build monitor view. To install it go to Manage Jenkins clicking on left menu and enter in Manage Plugins. Go to Available tab and search it using the filter textbox in the top right corner and install it.

+
+
+
+
+

How to use it

+
+
+

When you have build monitor view installed, you could add a new view clicking on the + tab in the top bar.

+
+
+
+jenkins new view +
+
+
+

Now you need to fill which is the name that you are goint to give to your view and select Build Monitor View option.

+
+
+
+jenkins build monitor view add +
+
+
+

Then you can see the configuration.

+
+
+
+jenkins build monitor view configuration +
+
+
+

In Job Filters section you can specify which resources are going to be showed and whether subfolders should be included in the search.

+
+
+

In Build Monitor - View Settings you could specify which is the name at the top of the view and what is the ordering criterion.

+
+
+

In Build Monitor - Widget Settings you could specify if you want to show the committers and which is the field to display if it fails.

+
+
+

And this is the output:

+
+
+
+jenkins build monitor view output +
+
+
+

You could limit the colums and the text scale clicking on the gear button at the right top corner.

+
+
+
+jenkins build monitor view output config +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-configure-jenkins.html b/docs/shop-floor/1.0/dsf-configure-jenkins.html new file mode 100644 index 00000000..dcd59481 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-configure-jenkins.html @@ -0,0 +1,904 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Jenkinsfile

+
+ +
+
+
+

Introduction

+
+
+
+jenkinsfile cicd activity diagram +
+
+
+

Here you are going to learn how you should configure the jenkinsfile of your project to apply CI/CD operations and enables automated application deployment.

+
+
+

Here you can find examples of the Jenkinsfile generated by cicdgen:

+
+
+ +
+
+

Next you could find an explanation about what is done in these Jenkinsfiles.

+
+
+
+
+

Environment values

+
+
+

At the top of the pipeline you should add the environment variables. in this tutorial you need the next variables:

+
+
+
+
    // sonarQube
+    // Name of the sonarQube tool
+    sonarTool = 'SonarQube'
+    // Name of the sonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus
+    // Artifact groupId
+    groupId = '<%= groupid %>'
+    // Nexus repository ID
+    repositoryId = 'pl-nexus'
+    // Nexus internal URL
+    repositoryUrl = 'http://nexus3-core:8081/nexus3/repository/'
+    // Maven global settings configuration ID
+    globalSettingsId = 'MavenSettings'
+    // Maven tool id
+    mavenInstallation = 'Maven3'
+
+    // Docker registry
+    dockerRegistry = 'docker-registry-<%= plurl %>'
+    dockerRegistryCredentials = 'nexus-docker'
+    dockerTool = 'docker-global'
+
+    // OpenShift
+    openshiftUrl = '<%= ocurl %>'
+    openShiftCredentials = 'openshift'
+    openShiftNamespace = '<%= ocn %>'
+
+
+
+
+
+

Stages

+
+
+

The pipeline consists of stages, and at the beginning of each stage it is declared for which branches the step will be executed.

+
+
+
+jenkinsfile stages +
+
+
+

Now it is time to create the stages.

+
+
+
+
+

Setup Jenkins tools

+
+
+

The first stage is one of the most dangerous, because in it on one hand the tools are added to the pipeline and to the path and on other hand the values are tagged depending on the branch that is being executed. If you are going to create a ci/cd for a new branch or you are going to modify something, be very careful with everything that this first step declares.

+
+
+

This is an example of this stage:

+
+
+
+
script {
+    tool yarn
+    tool Chrome-stable
+    tool dockerTool
+
+    if (env.BRANCH_NAME.startsWith('release')) {
+        dockerTag = "release"
+        repositoryName = 'maven-releases'
+        dockerEnvironment = "_uat"
+        openShiftNamespace += "-uat"
+        sonarProjectKey = '-release'
+    }
+
+    if (env.BRANCH_NAME ==  'develop') {
+        dockerTag = "latest"
+        repositoryName = 'maven-snapshots'
+        dockerEnvironment = "_dev"
+        openShiftNamespace += "-dev"
+        sonarProjectKey = '-develop'
+    }
+
+    if (env.BRANCH_NAME ==  'master') {
+        dockerTag = "production"
+        repositoryName = 'maven-releases'
+        dockerEnvironment = '_prod'
+        openShiftNamespace += "-prod"
+        sonarProjectKey = ''
+    }
+
+    sh "yarn"
+}
+
+
+
+
+
+

Code lint analysis

+
+
+

The next stage is to analyze the code making a lint analysis. To do it your project should have a tslint file with the configuration (tslint.json).

+
+
+

analyzing the code in your pipeline is as simple as executing the following command:

+
+
+
+
sh """yarn lint"""
+
+
+
+ + + + + +
+ + +Your project need to have an script with tslint configuration (tslint.json). +
+
+
+
+
+

Execute tests

+
+
+

To test you application first of all your application should have created the tests and you should use one of the next two options:

+
+
+

Execute test with maven (It should be used by devon4j).

+
+
+
+
withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+    sh "mvn clean test"
+}
+
+
+
+

Execute test with yarn (It should be used by devon4ng or devon4node).

+
+
+
+
sh """yarn test:ci"""
+
+
+
+ + + + + +
+ + +Remeber that your project should have the tests created and in case of do it with yarn or npm, you package.json should have the script declared. This is an example "test:ci": "ng test --browsers ChromeHeadless --watch=false". +
+
+
+
+
+

SonarQube Analisys

+
+
+

It is time to see if your application complies the requirements of the sonar analysis.

+
+
+

To do it you could use one of the next two options:

+
+
+

Execute Sonar with sonarTool (It should be used by devon4ng or devon4node).

+
+
+
+
script {
+    def scannerHome = tool sonarTool
+    def props = readJSON file: 'package.json'
+    withSonarQubeEnv(sonarEnv) {
+        sh """
+            ${scannerHome}/bin/sonar-scanner \
+                -Dsonar.projectKey=${props.name}${sonarProjectKey} \
+                -Dsonar.projectName=${props.name}${sonarProjectKey} \
+                -Dsonar.projectVersion=${props.version} \
+                -Dsonar.sources=${srcDir} \
+                -Dsonar.typescript.lcov.reportPaths=coverage/lcov.info
+        """
+    }
+    timeout(time: 1, unit: 'HOURS') {
+        def qg = waitForQualityGate()
+        if (qg.status != 'OK') {
+            error "Pipeline aborted due to quality gate failure: ${qg.status}"
+        }
+    }
+}
+
+
+
+

Execute Sonar with maven (It should be used by devon4j).

+
+
+
+
script {
+    withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+        withSonarQubeEnv(sonarEnv) {
+            // Change the project name (in order to simulate branches with the free version)
+            sh "cp pom.xml pom.xml.bak"
+            sh "cp api/pom.xml api/pom.xml.bak"
+            sh "cp core/pom.xml core/pom.xml.bak"
+            sh "cp server/pom.xml server/pom.xml.bak"
+
+            def pom = readMavenPom file: './pom.xml';
+            pom.artifactId = "${pom.artifactId}${sonarProjectKey}"
+            writeMavenPom model: pom, file: 'pom.xml'
+
+            def apiPom = readMavenPom file: 'api/pom.xml'
+            apiPom.parent.artifactId = pom.artifactId
+            apiPom.artifactId = "${pom.artifactId}-api"
+            writeMavenPom model: apiPom, file: 'api/pom.xml'
+
+            def corePom = readMavenPom file: 'core/pom.xml'
+            corePom.parent.artifactId = pom.artifactId
+            corePom.artifactId = "${pom.artifactId}-core"
+            writeMavenPom model: corePom, file: 'core/pom.xml'
+
+            def serverPom = readMavenPom file: 'server/pom.xml'
+            serverPom.parent.artifactId = pom.artifactId
+            serverPom.artifactId = "${pom.artifactId}-server"
+            writeMavenPom model: serverPom, file: 'server/pom.xml'
+
+            sh "mvn sonar:sonar"
+
+            sh "mv pom.xml.bak pom.xml"
+            sh "mv api/pom.xml.bak api/pom.xml"
+            sh "mv core/pom.xml.bak core/pom.xml"
+            sh "mv server/pom.xml.bak server/pom.xml"
+        }
+    }
+    timeout(time: 1, unit: 'HOURS') {
+        def qg = waitForQualityGate()
+        if (qg.status != 'OK') {
+            error "Pipeline aborted due to quality gate failure: ${qg.status}"
+        }
+    }
+}
+
+
+
+
+
+

Build

+
+
+

If SonarQube is passed, you could build your application. To do it, if you are using devon4ng or devon4node you only need to add the next command:

+
+
+

sh """yarn build"""

+
+
+ + + + + +
+ + +If you are using devon4j this and the next step Store in Nexus are making together using mvn deploy. +
+
+
+
+
+

Store in Nexus

+
+
+

One time the application has been built the code of the application you could find the the artifacts stored in the dist folder. You should push these artifacts to store them in Nexus.

+
+
+

You can do it following one of the next options:

+
+
+

Use maven deploy config of your project (It should be used by devon4j).

+
+
+
+
withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+    sh "mvn deploy -Dmaven.test.skip=true"
+}
+
+
+
+

Configure maven deploy in your pipeline (It should be used by devon4ng and devon4node).

+
+
+
+
script {
+    def props = readJSON file: 'package.json'
+    zip dir: 'dist/', zipFile: """${props.name}.zip"""
+    version = props.version
+    if (!version.endsWith("-SNAPSHOT") && env.BRANCH_NAME ==  'develop') {
+        version = "${version}-SNAPSHOT"
+        version = version.replace("-RC", "")
+    }
+
+    if (!version.endsWith("-RC") && env.BRANCH_NAME.startsWith('release')) {
+        version = "${version}-RC"
+        version = version.replace("-SNAPSHOT", "")
+    }
+
+    if (env.BRANCH_NAME ==  'master' && (version.endsWith("-RC") || version.endsWith("-SNAPSHOT"))){
+        version = version.replace("-RC", "")
+        version = version.replace("-SNAPSHOT", "")
+    }
+
+    withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+        sh """
+            mvn deploy:deploy-file \
+                -DgroupId=${groupId} \
+                -DartifactId=${props.name} \
+                -Dversion=${version} \
+                -Dpackaging=zip \
+                -Dfile=${props.name}.zip \
+                -DrepositoryId=${repositoryId} \
+                -Durl=${repositoryUrl}${repositoryName}
+        """
+    }
+}
+
+
+
+
+
+

Create docker image

+
+
+

Now we need to use this artifacts to create a Docker image. To create the docker image you need an external server to do it. You could do it using one of the next:

+
+
+

Create docker image using OpenShift cluster

+
+
+

To create the docker image with this option you need to configure your OpenShift. You could read how to configure it here.

+
+
+
+
props = readJSON file: 'package.json'
+withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+    sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+    try {
+        sh "oc start-build ${props.name} --namespace=${openShiftNamespace} --from-dir=dist --wait"
+        sh "oc import-image ${props.name} --namespace=${openShiftNamespace} --from=${dockerRegistry}/${props.name}:${dockerTag} --confirm"
+    } catch (e) {
+        sh """
+            oc logs \$(oc get builds -l build=${props.name} --namespace=${openShiftNamespace} --sort-by=.metadata.creationTimestamp -o name | tail -n 1) --namespace=${namespace}
+            throw e
+        """
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} in the code. +
+
+
+

Create docker image using docker server

+
+
+

To create the docker image with this option you need to install docker and configure where is the docker host in your jenkins.

+
+
+
+
docker.withRegistry("""${dockerRegistryProtocol}${dockerRegistry}""", dockerRegistryCredentials) {
+    def props = readJSON file: 'package.json'
+    def customImage = docker.build("${props.name}:${props.version}", "-f ${dockerFileName} .")
+    customImage.push()
+    customImage.push(dockerTag);
+}
+
+
+
+

here

+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} and ${props.version} for ${pom.version} in the code. +
+
+
+
+
+

Deploy docker image

+
+
+

Once you have the docker image in the registry we only need to import it into your deployment environment. We can do it executing one of the next commands:

+
+
+

Deploy docker image in OpenShift cluster

+
+
+

To deploy the docker image with this option you need to configure your OpenShift. You could read how to configure it here.

+
+
+
+
script {
+    props = readJSON file: 'package.json'
+    withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+        sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+        try {
+            sh "oc import-image ${props.name} --namespace=${openShiftNamespace} --from=${dockerRegistry}/${props.name}:${dockerTag} --confirm"
+        } catch (e) {
+            sh """
+                oc logs \$(oc get builds -l build=${props.name} --namespace=${openShiftNamespace} --sort-by=.metadata.creationTimestamp -o name | tail -n 1) --namespace=${openShiftNamespace}
+                throw e
+            """
+        }
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} in the code. +
+
+
+

Deploy docker image using docker server

+
+
+

To deploy the docker image with this option you need to install docker and configure your docker server and also integrate it with Jenkins.

+
+
+
+
script {
+    docker.withRegistry("""${dockerRegistryProtocol}${dockerRegistry}""", dockerRegistryCredentials) {
+        def props = readJSON file: 'package.json'
+        docker.image("${props.name}:${props.version}").pull()
+
+        def containerId = sh returnStdout: true, script: """docker ps -aqf "name=${containerName}${dockerEnvironment}" """
+        if (containerId?.trim()) {
+            sh "docker rm -f ${containerId.trim()}"
+        }
+
+        println """docker run -d --name ${containerName}${dockerEnvironment} --network=${networkName} ${dockerRegistry}/${props.name}:${props.version}"""
+        sh """docker run -d --name ${containerName}${dockerEnvironment} --network=${networkName} ${dockerRegistry}/${props.name}:${props.version}"""
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} and ${props.version} for ${pom.version} in the code. +
+
+
+
+
+

Check status

+
+
+

Now is time to check if your pods are running ok.

+
+
+

To check if your pods are ok in OpenShift you should add the next code to your pipeline:

+
+
+
+
script {
+    props = readJSON file: 'package.json'
+    sleep 30
+    withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+        sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+        sh "oc project ${openShiftNamespace}"
+
+        def oldRetry = -1;
+        def oldState = "";
+
+        sh "oc get pods -l app=${props.name} > out"
+        def status = sh (
+            script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f3",
+            returnStdout: true
+        ).trim()
+
+        def retry = sh (
+            script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f4",
+            returnStdout: true
+        ).trim().toInteger();
+
+        while (retry < 5 && (oldRetry != retry || oldState != status)) {
+            sleep 30
+            oldRetry = retry
+            oldState = status
+
+            sh """oc get pods -l app=${props.name} > out"""
+            status = sh (
+                script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f3",
+                returnStdout: true
+            ).trim()
+
+            retry = sh (
+                script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f4",
+                returnStdout: true
+            ).trim().toInteger();
+        }
+
+        if(status != "Running"){
+            try {
+                sh """oc logs \$(oc get pods -l app=${props.name} --sort-by=.metadata.creationTimestamp -o name | tail -n 1)"""
+            } catch (e) {
+                sh "echo error reading logs"
+            }
+            error("The pod is not running, cause: " + status)
+        }
+    }
+}
+
+
+
+
+
+

Post operations

+
+
+

When all its finish, remember to clean your workspace.

+
+
+

post { + cleanup { + cleanWs() + } +}

+
+
+ + + + + +
+ + +You could also delete your dir adding the next command deleteDir(). +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-configure-jenkinsfile.html b/docs/shop-floor/1.0/dsf-configure-jenkinsfile.html new file mode 100644 index 00000000..30fb5980 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-configure-jenkinsfile.html @@ -0,0 +1,904 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Jenkinsfile

+
+ +
+
+
+

Introduction

+
+
+
+jenkinsfile cicd activity diagram +
+
+
+

Here you are going to learn how you should configure the jenkinsfile of your project to apply CI/CD operations and enables automated application deployment.

+
+
+

Here you can find examples of the Jenkinsfile generated by cicdgen:

+
+
+ +
+
+

Next you could find an explanation about what is done in these Jenkinsfiles.

+
+
+
+
+

Environment values

+
+
+

At the top of the pipeline you should add the environment variables. in this tutorial you need the next variables:

+
+
+
+
    // sonarQube
+    // Name of the sonarQube tool
+    sonarTool = 'SonarQube'
+    // Name of the sonarQube environment
+    sonarEnv = "SonarQube"
+
+    // Nexus
+    // Artifact groupId
+    groupId = '<%= groupid %>'
+    // Nexus repository ID
+    repositoryId = 'pl-nexus'
+    // Nexus internal URL
+    repositoryUrl = 'http://nexus3-core:8081/nexus3/repository/'
+    // Maven global settings configuration ID
+    globalSettingsId = 'MavenSettings'
+    // Maven tool id
+    mavenInstallation = 'Maven3'
+
+    // Docker registry
+    dockerRegistry = 'docker-registry-<%= plurl %>'
+    dockerRegistryCredentials = 'nexus-docker'
+    dockerTool = 'docker-global'
+
+    // OpenShift
+    openshiftUrl = '<%= ocurl %>'
+    openShiftCredentials = 'openshift'
+    openShiftNamespace = '<%= ocn %>'
+
+
+
+
+
+

Stages

+
+
+

The pipeline consists of stages, and at the beginning of each stage it is declared for which branches the step will be executed.

+
+
+
+jenkinsfile stages +
+
+
+

Now it is time to create the stages.

+
+
+
+
+

Setup Jenkins tools

+
+
+

The first stage is one of the most dangerous, because in it on one hand the tools are added to the pipeline and to the path and on other hand the values are tagged depending on the branch that is being executed. If you are going to create a ci/cd for a new branch or you are going to modify something, be very careful with everything that this first step declares.

+
+
+

This is an example of this stage:

+
+
+
+
script {
+    tool yarn
+    tool Chrome-stable
+    tool dockerTool
+
+    if (env.BRANCH_NAME.startsWith('release')) {
+        dockerTag = "release"
+        repositoryName = 'maven-releases'
+        dockerEnvironment = "_uat"
+        openShiftNamespace += "-uat"
+        sonarProjectKey = '-release'
+    }
+
+    if (env.BRANCH_NAME ==  'develop') {
+        dockerTag = "latest"
+        repositoryName = 'maven-snapshots'
+        dockerEnvironment = "_dev"
+        openShiftNamespace += "-dev"
+        sonarProjectKey = '-develop'
+    }
+
+    if (env.BRANCH_NAME ==  'master') {
+        dockerTag = "production"
+        repositoryName = 'maven-releases'
+        dockerEnvironment = '_prod'
+        openShiftNamespace += "-prod"
+        sonarProjectKey = ''
+    }
+
+    sh "yarn"
+}
+
+
+
+
+
+

Code lint analysis

+
+
+

The next stage is to analyze the code making a lint analysis. To do it your project should have a tslint file with the configuration (tslint.json).

+
+
+

analyzing the code in your pipeline is as simple as executing the following command:

+
+
+
+
sh """yarn lint"""
+
+
+
+ + + + + +
+ + +Your project need to have an script with tslint configuration (tslint.json). +
+
+
+
+
+

Execute tests

+
+
+

To test you application first of all your application should have created the tests and you should use one of the next two options:

+
+
+

Execute test with maven (It should be used by devon4j).

+
+
+
+
withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+    sh "mvn clean test"
+}
+
+
+
+

Execute test with yarn (It should be used by devon4ng or devon4node).

+
+
+
+
sh """yarn test:ci"""
+
+
+
+ + + + + +
+ + +Remember that your project should have the tests created and in case of do it with yarn or npm, you package.json should have the script declared. This is an example "test:ci": "ng test --browsers ChromeHeadless --watch=false". +
+
+
+
+
+

SonarQube Analisys

+
+
+

It is time to see if your application complies the requirements of the sonar analysis.

+
+
+

To do it you could use one of the next two options:

+
+
+

Execute Sonar with sonarTool (It should be used by devon4ng or devon4node).

+
+
+
+
script {
+    def scannerHome = tool sonarTool
+    def props = readJSON file: 'package.json'
+    withSonarQubeEnv(sonarEnv) {
+        sh """
+            ${scannerHome}/bin/sonar-scanner \
+                -Dsonar.projectKey=${props.name}${sonarProjectKey} \
+                -Dsonar.projectName=${props.name}${sonarProjectKey} \
+                -Dsonar.projectVersion=${props.version} \
+                -Dsonar.sources=${srcDir} \
+                -Dsonar.typescript.lcov.reportPaths=coverage/lcov.info
+        """
+    }
+    timeout(time: 1, unit: 'HOURS') {
+        def qg = waitForQualityGate()
+        if (qg.status != 'OK') {
+            error "Pipeline aborted due to quality gate failure: ${qg.status}"
+        }
+    }
+}
+
+
+
+

Execute Sonar with maven (It should be used by devon4j).

+
+
+
+
script {
+    withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+        withSonarQubeEnv(sonarEnv) {
+            // Change the project name (in order to simulate branches with the free version)
+            sh "cp pom.xml pom.xml.bak"
+            sh "cp api/pom.xml api/pom.xml.bak"
+            sh "cp core/pom.xml core/pom.xml.bak"
+            sh "cp server/pom.xml server/pom.xml.bak"
+
+            def pom = readMavenPom file: './pom.xml';
+            pom.artifactId = "${pom.artifactId}${sonarProjectKey}"
+            writeMavenPom model: pom, file: 'pom.xml'
+
+            def apiPom = readMavenPom file: 'api/pom.xml'
+            apiPom.parent.artifactId = pom.artifactId
+            apiPom.artifactId = "${pom.artifactId}-api"
+            writeMavenPom model: apiPom, file: 'api/pom.xml'
+
+            def corePom = readMavenPom file: 'core/pom.xml'
+            corePom.parent.artifactId = pom.artifactId
+            corePom.artifactId = "${pom.artifactId}-core"
+            writeMavenPom model: corePom, file: 'core/pom.xml'
+
+            def serverPom = readMavenPom file: 'server/pom.xml'
+            serverPom.parent.artifactId = pom.artifactId
+            serverPom.artifactId = "${pom.artifactId}-server"
+            writeMavenPom model: serverPom, file: 'server/pom.xml'
+
+            sh "mvn sonar:sonar"
+
+            sh "mv pom.xml.bak pom.xml"
+            sh "mv api/pom.xml.bak api/pom.xml"
+            sh "mv core/pom.xml.bak core/pom.xml"
+            sh "mv server/pom.xml.bak server/pom.xml"
+        }
+    }
+    timeout(time: 1, unit: 'HOURS') {
+        def qg = waitForQualityGate()
+        if (qg.status != 'OK') {
+            error "Pipeline aborted due to quality gate failure: ${qg.status}"
+        }
+    }
+}
+
+
+
+
+
+

Build

+
+
+

If SonarQube is passed, you could build your application. To do it, if you are using devon4ng or devon4node you only need to add the next command:

+
+
+

sh """yarn build"""

+
+
+ + + + + +
+ + +If you are using devon4j this and the next step Store in Nexus are making together using mvn deploy. +
+
+
+
+
+

Store in Nexus

+
+
+

One time the application has been built the code of the application you could find the artifacts stored in the dist folder. You should push these artifacts to store them in Nexus.

+
+
+

You can do it following one of the next options:

+
+
+

Use maven deploy config of your project (It should be used by devon4j).

+
+
+
+
withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+    sh "mvn deploy -Dmaven.test.skip=true"
+}
+
+
+
+

Configure maven deploy in your pipeline (It should be used by devon4ng and devon4node).

+
+
+
+
script {
+    def props = readJSON file: 'package.json'
+    zip dir: 'dist/', zipFile: """${props.name}.zip"""
+    version = props.version
+    if (!version.endsWith("-SNAPSHOT") && env.BRANCH_NAME ==  'develop') {
+        version = "${version}-SNAPSHOT"
+        version = version.replace("-RC", "")
+    }
+
+    if (!version.endsWith("-RC") && env.BRANCH_NAME.startsWith('release')) {
+        version = "${version}-RC"
+        version = version.replace("-SNAPSHOT", "")
+    }
+
+    if (env.BRANCH_NAME ==  'master' && (version.endsWith("-RC") || version.endsWith("-SNAPSHOT"))){
+        version = version.replace("-RC", "")
+        version = version.replace("-SNAPSHOT", "")
+    }
+
+    withMaven(globalMavenSettingsConfig: globalSettingsId, maven: mavenInstallation) {
+        sh """
+            mvn deploy:deploy-file \
+                -DgroupId=${groupId} \
+                -DartifactId=${props.name} \
+                -Dversion=${version} \
+                -Dpackaging=zip \
+                -Dfile=${props.name}.zip \
+                -DrepositoryId=${repositoryId} \
+                -Durl=${repositoryUrl}${repositoryName}
+        """
+    }
+}
+
+
+
+
+
+

Create docker image

+
+
+

Now we need to use this artifacts to create a Docker image. To create the docker image you need an external server to do it. You could do it using one of the next:

+
+
+

Create docker image using OpenShift cluster

+
+
+

To create the docker image with this option you need to configure your OpenShift. You could read how to configure it here.

+
+
+
+
props = readJSON file: 'package.json'
+withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+    sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+    try {
+        sh "oc start-build ${props.name} --namespace=${openShiftNamespace} --from-dir=dist --wait"
+        sh "oc import-image ${props.name} --namespace=${openShiftNamespace} --from=${dockerRegistry}/${props.name}:${dockerTag} --confirm"
+    } catch (e) {
+        sh """
+            oc logs \$(oc get builds -l build=${props.name} --namespace=${openShiftNamespace} --sort-by=.metadata.creationTimestamp -o name | tail -n 1) --namespace=${namespace}
+            throw e
+        """
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} in the code. +
+
+
+

Create docker image using docker server

+
+
+

To create the docker image with this option you need to install docker and configure where is the docker host in your jenkins.

+
+
+
+
docker.withRegistry("""${dockerRegistryProtocol}${dockerRegistry}""", dockerRegistryCredentials) {
+    def props = readJSON file: 'package.json'
+    def customImage = docker.build("${props.name}:${props.version}", "-f ${dockerFileName} .")
+    customImage.push()
+    customImage.push(dockerTag);
+}
+
+
+
+

here

+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} and ${props.version} for ${pom.version} in the code. +
+
+
+
+
+

Deploy docker image

+
+
+

Once you have the docker image in the registry we only need to import it into your deployment environment. We can do it executing one of the next commands:

+
+
+

Deploy docker image in OpenShift cluster

+
+
+

To deploy the docker image with this option you need to configure your OpenShift. You could read how to configure it here.

+
+
+
+
script {
+    props = readJSON file: 'package.json'
+    withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+        sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+        try {
+            sh "oc import-image ${props.name} --namespace=${openShiftNamespace} --from=${dockerRegistry}/${props.name}:${dockerTag} --confirm"
+        } catch (e) {
+            sh """
+                oc logs \$(oc get builds -l build=${props.name} --namespace=${openShiftNamespace} --sort-by=.metadata.creationTimestamp -o name | tail -n 1) --namespace=${openShiftNamespace}
+                throw e
+            """
+        }
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} in the code. +
+
+
+

Deploy docker image using docker server

+
+
+

To deploy the docker image with this option you need to install docker and configure your docker server and also integrate it with Jenkins.

+
+
+
+
script {
+    docker.withRegistry("""${dockerRegistryProtocol}${dockerRegistry}""", dockerRegistryCredentials) {
+        def props = readJSON file: 'package.json'
+        docker.image("${props.name}:${props.version}").pull()
+
+        def containerId = sh returnStdout: true, script: """docker ps -aqf "name=${containerName}${dockerEnvironment}" """
+        if (containerId?.trim()) {
+            sh "docker rm -f ${containerId.trim()}"
+        }
+
+        println """docker run -d --name ${containerName}${dockerEnvironment} --network=${networkName} ${dockerRegistry}/${props.name}:${props.version}"""
+        sh """docker run -d --name ${containerName}${dockerEnvironment} --network=${networkName} ${dockerRegistry}/${props.name}:${props.version}"""
+    }
+}
+
+
+
+ + + + + +
+ + +if your project is a maven project you should read the pom.xml file instead of the package.json, you could do it with the next command def pom = readMavenPom file: 'pom.xml'. Due to the fact that there are different variable names between those two files, remember to modify ${props.name} for ${pom.artifactId} and ${props.version} for ${pom.version} in the code. +
+
+
+
+
+

Check status

+
+
+

Now is time to check if your pods are running ok.

+
+
+

To check if your pods are ok in OpenShift you should add the next code to your pipeline:

+
+
+
+
script {
+    props = readJSON file: 'package.json'
+    sleep 30
+    withCredentials([usernamePassword(credentialsId: "${openShiftCredentials}", passwordVariable: 'pass', usernameVariable: 'user')]) {
+        sh "oc login -u ${user} -p ${pass} ${openshiftUrl} --insecure-skip-tls-verify"
+        sh "oc project ${openShiftNamespace}"
+
+        def oldRetry = -1;
+        def oldState = "";
+
+        sh "oc get pods -l app=${props.name} > out"
+        def status = sh (
+            script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f3",
+            returnStdout: true
+        ).trim()
+
+        def retry = sh (
+            script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f4",
+            returnStdout: true
+        ).trim().toInteger();
+
+        while (retry < 5 && (oldRetry != retry || oldState != status)) {
+            sleep 30
+            oldRetry = retry
+            oldState = status
+
+            sh """oc get pods -l app=${props.name} > out"""
+            status = sh (
+                script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f3",
+                returnStdout: true
+            ).trim()
+
+            retry = sh (
+                script: "sed 's/[\t ][\t ]*/ /g' < out | sed '2q;d' | cut -d' ' -f4",
+                returnStdout: true
+            ).trim().toInteger();
+        }
+
+        if(status != "Running"){
+            try {
+                sh """oc logs \$(oc get pods -l app=${props.name} --sort-by=.metadata.creationTimestamp -o name | tail -n 1)"""
+            } catch (e) {
+                sh "echo error reading logs"
+            }
+            error("The pod is not running, cause: " + status)
+        }
+    }
+}
+
+
+
+
+
+

Post operations

+
+
+

When all its finish, remember to clean your workspace.

+
+
+

post { + cleanup { + cleanWs() + } +}

+
+
+ + + + + +
+ + +You could also delete your dir adding the next command deleteDir(). +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-configure-nexus.html b/docs/shop-floor/1.0/dsf-configure-nexus.html new file mode 100644 index 00000000..5831147f --- /dev/null +++ b/docs/shop-floor/1.0/dsf-configure-nexus.html @@ -0,0 +1,441 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Nexus Configuration

+
+
+

In this document you will see how you can configure Nexus repository and how to integrate it with jenkins.

+
+
+
+
+

Login in Nexus

+
+
+

The first time you enter in Nexus you need to log in with the user 'admin' and the password that is inside the path: /volumes/nexus/nexus-data +Then you can change that password and create a new one.

+
+
+
+
+

Prerequisites

+
+ +
+
+
+

Repositories

+
+
+

You need to have one repository for snapshots, another for releases and another one for release-candidates. Normally you use maven2 (hosted) repositories and if you are going to use a docker registry, you need docker (hosted) too.

+
+
+

To create a repository in Nexus go to the administration clicking on the gear icon at top menu bar. Then on the left menu click on Repositories and press the Create repository button.

+
+
+
+nexus create repository +
+
+
+

Now you must choose the type of the repository and configure it. This is an example for Snapshot:

+
+
+
+nexus create repository form +
+
+
+
+
+

Create user to upload/download content

+
+
+

Once you have the repositories, you need a user to upload/download content. To do it go to the administration clicking on the gear icon at top menu bar. Then on the left menu click on Users and press the Create local user button.

+
+
+
+nexus create user +
+
+
+

Now you need to fill a form like this:

+
+
+
+nexus create user form +
+
+
+
+
+

Jenkins integration

+
+
+

To use Nexus in our pipelines you need to configure Jenkins.

+
+
+
+
+

Customize jenkins

+
+
+

The first time you enter jenkins, you are asked fot the pluggins to be installed. +We select install suggested plugins and later we can add the plugins that we need depending on the project necessities.

+
+
+
+plugins jenkins +
+
+
+

Then we need to create our first admin user, we can do it like this:

+
+
+
+jenkins first admin user +
+
+
+

The next step is the jenkins URL:

+
+
+
+jenkins url +
+
+
+

Your jenkins setup is ready!

+
+
+
+
+

Add nexus user credentials

+
+
+

First of all you need to add the user created in the step before to Jenkins. To do it (on the left menu) click on Credentials, then on System. Now you could access to Global credentials (unrestricted).

+
+
+
+nexus jenkins credentials +
+
+
+

Enter on it and you could see a button on the left to Add credentials. Click on it and fill a form like this:

+
+
+
+nexus jenkins credentials form +
+
+
+
+
+

Add the nexus user to maven global settings

+
+
+

In order to do this, you will need the Config File Provider plugin so we need to download it.Go to Jenkins→Manage jenkins→Manage plugins and "available" tab and search for it:

+
+
+
+jenkins config fp +
+
+
+

Click on "Download now and install after restart".

+
+
+

Now you need to go to Manage Jenkins clicking on left menu and enter in Managed files.

+
+
+

Click on Add a new config/Global Maven settings.xml, change the id for a new one more readable:

+
+
+
+jenkins maven settings +
+
+
+

Then click on "Submit"

+
+
+
+jenkins global maven +
+
+
+

Edit the Global Maven settings.xml to add your nexus repositories credentials(the ones you created before) as you could see in the next image:

+
+
+
+nexus jenkins global maven form +
+
+
+

And you are done.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-configure-sonarqube.html b/docs/shop-floor/1.0/dsf-configure-sonarqube.html new file mode 100644 index 00000000..4b276337 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-configure-sonarqube.html @@ -0,0 +1,401 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

SonarQube Configuration

+
+
+

To use SonarQube you need to use a token to connect, and to know the results of the analysis you need a webhook. Also, you need to install and configure SonarQube in Jenkins.

+
+
+
+
+

Generate user token

+
+
+

To generate the user token, go to your account clicking in the left icon on the top menu bar.

+
+
+ + + + + +
+ + +If you don’t have any account, you can use the admin/admin user/pass +
+
+
+
+sonarqube administration +
+
+
+

Go to security tab and generate the token.

+
+
+
+sonarqube token +
+
+
+
+
+

Webhook

+
+
+

When you execute our SonarQube scanner in our pipeline job, you need to ask SonarQube if the quality gate has been passed. To do it you need to create a webhook.

+
+
+

Go to administration clicking the option on the top bar menu and select the tab for Configuration.

+
+
+

Then search in the left menu to go to webhook section and create your webhook.

+
+
+

An example for Production Line:

+
+
+
+sonarqube webhook +
+
+
+
+
+

Jenkins integration

+
+
+

To use SonarQube in our pipelines you need to configure Jenkins to integrate SonarQube.

+
+
+
+
+

SonarQube Scanner

+
+
+

First, you need to configure the scanner. Go to Manage Jenkins clicking on left menu and enter in Global Tool Configuration.

+
+
+

Go to SonarQube Scanner section and add a new SonarQube scanner like this.

+
+
+
+sonarqube jenkins scanner +
+
+
+
+
+

SonarQube Server

+
+
+

Now you need to configure where is our SonarQube server using the user token that you create before. Go to Manage Jenkins clicking on left menu and enter in Configure System.

+
+
+

For example, in Production Line the server is the next:

+
+
+
+sonarqube jenkins server +
+
+
+ + + + + +
+ + +Remember, the token was created at the beginning of this SonarQube configuration. +
+
+
+
+
+

SonarQube configuration

+
+
+

Now is time to configure your sonar in order to measure the quality of your code. To do it, please follow the official documentation about our plugins and Quality Gates and Profiles here.

+
+
+
+
+

How to ignore files

+
+
+

Usually the developers need to ignore some files from Sonar analysis. To do that, they must add the next line as a parameter of the sonar execution to their Jenkinsfile in the SonarQube code analysis step.

+
+
+
+
-Dsonar.exclusions='**/*.spec.ts, **/*.model.ts, **/*mock.ts'
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-create-new-devonfw-project.html b/docs/shop-floor/1.0/dsf-create-new-devonfw-project.html new file mode 100644 index 00000000..a5bbe851 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-create-new-devonfw-project.html @@ -0,0 +1,290 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

How to create new devonfw project

+
+
+

Here you can find the official guides to start new devonfw projects:

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-custom-plugin-for-sonar-AzureDevops.html b/docs/shop-floor/1.0/dsf-custom-plugin-for-sonar-AzureDevops.html new file mode 100644 index 00000000..6f4abf9f --- /dev/null +++ b/docs/shop-floor/1.0/dsf-custom-plugin-for-sonar-AzureDevops.html @@ -0,0 +1,335 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Install and use custom sonar plugin in Azure Devops

+
+
+

By default, the sonar plugin is not capable to be used in every branch you want to, to do this you need to purchase a license or customize the current plugin in order to satisfy our needs.

+
+
+

How to costumize the plugin is not the purpose of this documentation, this documentation is for the intallment and use of it.

+
+
+

If you want to install a custom plugin, sign into your Azure Devops organization and once you are in, click on the marketplace icon:

+
+
+
+marketplace +
+
+
+

Select *browse marketplace>publish extension

+
+
+

Choose the extension you want to install and clik on the options

+
+
+
+extension +
+
+
+

Important:

+
+
+

You need to choose the organization for which you are going to use the extension and share it, if not, you won’t be able to install it.

+
+
+
+share unshare +
+
+
+

Once you’ve done this click on View extension and 'Get it free', the extension will be downloaded and you will be able to use it in the next screen

+
+
+
+install +
+
+
+

If there are no organizations you can seee the possible causes here.

+
+
+

Another cause might be that you forgot to share the extension.

+
+
+

Note: If the install button does not appear, it’s possible that you don’t have permissions to install it so you will need to talk with the owner of the org. +Another posibility is that you can request an installation.

+
+
+

Once installed, in the pipeline wizard it will appear and you will be able to select it.

+
+
+
+wizard +
+
+
+

We can see in the image the default plugin and the customized one.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-deployment-dsf4openshift-automatic-configuration.html b/docs/shop-floor/1.0/dsf-deployment-dsf4openshift-automatic-configuration.html new file mode 100644 index 00000000..2d48ee47 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-deployment-dsf4openshift-automatic-configuration.html @@ -0,0 +1,556 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

OpenShift deployment environment automatic configuration

+
+
+

In this section you will see how you can create a new environment instance in your OpenShift cluster to deploy devonfw projects using docker images.

+
+
+
+
+

Prerequisites

+
+ +
+
+
+

Add OpenShift Client to Jenkins

+
+
+

To integrate it, you need to have installed the plugin OpenShift Client. To install it go to Manage Jenkins clicking on left menu and enter in Manage Plugins. Go to Available tab and search it using the filter textbox in the top right corner and install it.

+
+
+
+
+

Configuration OpenShift Client in Jenkins

+
+
+

Second, you need to configure the OC Client. Go to Manage Jenkins clicking on left menu and enter in Global Tool Configuration.

+
+
+

Go to OpenShift Client Tools section and add a new one like this.

+
+
+
+openshift jenkins plugin +
+
+
+
+
+

devonfw project

+
+
+

You need to have a devonfw project in a git repository or a docker image uploaded to a docker registry.

+
+
+
+
+

Comunication between components

+
+
+

Jenkins must have access to git, docker registry and OpenShift.

+
+
+

Openshift must have access to docker registry.

+
+
+
+
+

Jenkinsfiles to Configure OpenShift

+
+
+

You can find one Jenkinsfile per devonfw technology in devonfw shop floor repository to configure automatically your OpenShift cluster.

+
+
+
+
+

How to use it

+
+
+

To use it you need to follow the next steps

+
+
+
+
+

Create a new pipeline

+
+
+

You need to create a new pipeline in your repository and point it to Jenkinsfile in devonfw shop floor repository.

+
+
+
+openshift jenkins configure environments repo +
+
+
+

Note: In the script path section you should use the Jenkinsfile of the technology that you need.

+
+
+
+
+

Build with parameters

+
+
+

The first time that you execute the pipeline is going to fail because Jenkins does not know that this pipeline needs parameters to execute. The better that you can do is stop it manually when Declarative: Checkout SCM is over.

+
+
+

Then you could see a button to Build with Parameters, click on it and fill the next form, these are the parameters:

+
+
+

Docker registry credentials for OpenShift

+
+
+

CREATE_SECRET: This option allows you to add the credentials of your docker registry in your OpenShift and stored it as a secret called docker-registry + registry_secret_name_suffix value.

+
+
+

Remember that you only need one secret to connect with your registry per namespace, if you are going to add more than one application in the same namespace that use the same registry, use the same name suffix and please do not create more than one secret in the same namespace. The namespace is the OpenShift project when you are going to deploy your application.

+
+
+

You can see your secrets stored in OpenShift going to OpenShift and click on the left menu:

+
+
+
+openshift secrets menu +
+
+
+ + + + + +
+ + +If the secret exists, you should uncheck the checkbox and fill the name suffix to use it. +
+
+
+

REGISTRY_SECRET_NAME_SUFFIX: This is the suffix of the name for your docker registry credentials stored in OpenShift as a secret. The name is going to be docker-registry + this suffix, if you use more than one docker-registry in the same namespace you need to add a suffix. For example you could add the name of your project, then to have the name as docker-registry-myprojectname you should use -myprojectname value.

+
+
+

Build your docker image using OpenShift and store it in your docker registry

+
+
+

CREATE_DOCKER_BUILDER: This option allows you to create a build configuration in your OpenShift to create the docker images of your project and store them in your docker registry. If you are going to create the builder, your application is needed, you need to specify where is your git repository and which is the branch and credentials to use it.

+
+
+

The following parameters of this section are only necessary if a builder is to be created.

+
+
+

GIT_REPOSITORY: This is the url of your git repository.

+
+
+ + + + + +
+ + +If you are using production line, remember to use the internal rout of your repository, to use it you must change the base url of your production line for the internal route http://gitlab-core:80/gitlab. For example, if your production line repository is for example https://shared-services.pl.s2-eu.capgemini.com/gitlab/boat/boat-frontend.git use http://gitlab-core:80/gitlab/boat/boat-frontend.git) +
+
+
+

GIT_BRANCH: This is the branch that we are going to use for creating the first docker image. The next time that you are going to use the builder you could use another branches.

+
+
+

GIT_CREDENTIALS: This is the credentials id stored in your jenkins to download the code from your git repository.

+
+
+

BUILD_SCRIPT: In case of use devon4ng or devon4node you could specify which is the build script used to build and create the first docker image with this builder.

+
+
+

JAVA_VERSION In case of use devon4j this is the java version used for your docker image.

+
+
+

Docker registry information

+
+
+

DOCKER_REGISTRY: This is the url of your docker registry.

+
+
+ + + + + +
+ + +If you are using production line, the url of your registry is docker-registry- + your production line url. For example, if your production line is shared-services.pl.s2-eu.capgemini.com your docker registry is docker-registry-shared-services.pl.s2-eu.capgemini.com. +
+
+
+

If you cannot access to your docker registry, please open an incident in i4u.

+
+
+

DOCKER_REGISTRY_CREDENTIALS: This is the credentials id stored in your jenkins to download or upload docker images in your docker registry.

+
+
+

DOCKER_TAG: This is the tag that is going to be used for the builder to push the docker image and for the deployment config to pull and deploy it.

+
+
+

OpenShift cluster information

+
+
+

OPENSHIFT_URL: This is the url of your OpenShift cluster.

+
+
+

OPENSHIFT_CREDENTIALS: This is the credentials id stored in your jenkins to use OpenShift.

+
+
+

OPENSHIFT_NAMESPACE: This is the name of the project in your OpenShift where you are going to use. The name of the project in OpenShift is called namespace.

+
+
+

Take care because although you see at the top of your OpenShift interface the name of the project that you are using, this name is the display-name and not the value that you need. To obtain the correct value you must check your OpenShift url like you see in the next image:

+
+
+
+openshift namespace name +
+
+
+

APP_NAME_SUFFIX: The name of all things created in your OpenShift project are going to be called as the configuration of your application says. Normaly, our projects use a suffix that depends on the environment. You can see the values in the next list:

+
+
+
    +
  • +

    For develop branch we use -dev

    +
  • +
  • +

    For release branch we use -uat

    +
  • +
  • +

    For master branch we use -prod

    +
  • +
+
+
+

HOSTNAME: If you do not specify nothing, OpenShift is going to autogenerate a valid url for your application. You could modify the value by default but be sure that you configure everything to server your application in the route that you specify.

+
+
+

SECURED_PROTOCOL: If true, the protocol for the route will be https otherwise will be http.

+
+
+

Jenkins tools

+
+
+

All those parameters are the name of the tools in your Jenkinsfile.

+
+
+

To obtain it you need enter in your Jenkins and go to Manage Jenkins clicking on left menu and enter in Global Tool Configuration or in Managed files.

+
+
+

OPENSHIFT_TOOL: Is located in Global tool configuration.

+
+
+
+openshift jenkins plugin name +
+
+
+

NODEJS_TOOL: Is located in Global tool configuration.

+
+
+
+jenkins openshift tool +
+
+
+

YARN_TOOL: Is located in Global tool configuration, inside the custom tools.

+
+
+
+jenkins yarn tool name +
+
+
+

GLOBAL_SETTINGS_ID Is located in Managed files. You need to click on edit button and take the id.

+
+
+
+jenkins config file management +
+
+
+
+jenkins edit configuration file +
+
+
+

MAVEN_INSTALLATION Is located in Global tool configuration.

+
+
+
+jenkins mave tool name +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-deployment-dsf4openshift-manual-configuration.html b/docs/shop-floor/1.0/dsf-deployment-dsf4openshift-manual-configuration.html new file mode 100644 index 00000000..77b44323 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-deployment-dsf4openshift-manual-configuration.html @@ -0,0 +1,773 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

OpenShift deployment environment manual configuration

+
+
+

In this section you will see how you can create a new environment instance in your OpenShift cluster to deploy devonfw projects using docker images.

+
+
+
+
+

Prerequisites

+
+ +
+
+
+

devonfw project

+
+
+

Yo need to have a devonfw project in a git repository or a docker image uploaded to a docker registry.

+
+
+
+
+

Comunication between components

+
+
+

Openshift must have access to docker registry.

+
+
+
+
+

Download OpenShift Client Tools

+
+
+

First of all you need to download the OpenShift client, you can find it here.

+
+
+

Remember that what you need to download oc Client Tools and not OKD Server.

+
+
+ + + + + +
+ + +This tutorial has been made with the version 3.10.0 of the client, it is recommended to use the most current client, but if it does not work, it is possible that the instructions have become obsolete or that the OpenShift used needs another older/newer version of the client. To download a specific version of the client you can find here the older versions and the version 3.10.0. +
+
+
+
+
+

Add oc client to path

+
+
+

Once you have downloaded the client you have to add it to the PATH environment variable.

+
+
+
+
+

Log into OpenShift with admin account

+
+
+

You can log using a terminal and executing the next instructions:

+
+
+
+
oc login $OpenShiftUrl
+
+
+
+ + + + + +
+ + +You need a valid user to log in. +
+
+
+
+
+

Select the project where you are going to create the environment

+
+
+
+
oc project $projectName
+
+
+
+
+
+

Add all the secrets that you need

+
+
+

For example, to create a secret for a nexus repository you should execute the next commands:

+
+
+
+
oc create secret docker-registry $nameForSecret --docker-server=${dockerRegistry} --docker-username=${user} --docker-password=${pass} --docker-email=no-reply@email.com
+
+
+
+
+
+

Configure OpenShift

+
+ +
+
+
+

Configure builds to create docker image using OpenShift

+
+
+

If you need to create docker images of your projects you could use OpenShift to do it (Off course only if you have enough rights).

+
+
+

To do it, follow the next steps.

+
+
+
+
+

== Create new builds configs

+
+
+

The first thing you need to do for create a new environment is prepare the buildconfigs for the front and for the middleware and rise default memory limits for the middleware. You can do it using a terminal and executing the next instructions:

+
+
+

These are a summary about the parameters used in our commands:

+
+
+
    +
  • +

    ${dockerRegistry}: The url of the docker repository.

    +
  • +
  • +

    ${props.name}: The name of the project (for example could be find on package.json)

    +
  • +
  • +

    ${dockerTag}: The tag of the image

    +
  • +
+
+
+ + + + + +
+ + +From now on you will refer to the name that you are going to give to the environment as $enviroment. Remember to modify it for the correct value in all instructions. +
+
+
+
+
+

== devon4ng build config

+
+
+

You need to create nginx build config with docker.

+
+
+
+
oc new-build --strategy docker --binary --docker-image nginx:alpine-perl --name=${props.name}-$environment --to=${dockerRegistry}/${props.name}:${dockerTag} --to-docker=true
+
+
+
+ + + + + +
+ + +You need nginx:alpine-perl to read the environment config file in openshift, if you are not going to use it, you could use nginx:latest instead. +
+
+
+
+
+

== devon4node build config

+
+
+
+
oc new-build --strategy docker --binary --docker-image node:lts --name=${props.name}-$environment --to=${dockerRegistry}/${props.name}:${dockerTag} --to-docker=true
+
+
+
+
+
+

== devon4j build config

+
+
+
+
oc new-build --strategy docker --binary --docker-image openjdk:<version> --name=${props.name}-$environment --to=${dockerRegistry}/${props.name}:${dockerTag} --to-docker=true
+
+
+
+ + + + + +
+ + +You need to specify the <version> of java used for your project. Also you can use the -alpine image. This image is based on the popular Alpine Linux project. Alpine Linux is much smaller than most distribution base images (~5MB), and thus leads to much slimmer images in general. More information on docker hub. +
+
+
+
+
+

== How to use the build

+
+
+

In this step is where you will build a docker image from a compiled application.

+
+
+
+
+

== == Prerequisite

+
+
+

To build the source in OpenShift, first of all you need to compile your source and obtain the artifacts "dist folder" or download it from a repository. Normally the artifacts have been built on Jenkins and have been stored in Nexus.

+
+
+

To download it, you can access to your registry, select the last version and download the ".tar". The next image shows an example of where is the link to download it, marked in yellow:

+
+
+
+nexus stored artifacts +
+
+
+
+
+

== == Build in OpenShift

+
+
+

When you have the artifacts, you can send them to your openshift and build them using your buildconfig that you created on the previous step. This is going to create a new docker image and push it to your registry.

+
+
+

If your docker registry need credentials you should use a secret. You could add it to your buildconfig using the next command:

+
+
+
+
oc set build-secret --push bc/${props.name}-$environment ${nameForSecret}
+
+
+
+

Now you can use your build config and push the docker image to your registry. To do it you need to use a terminal and execute the following:

+
+
+
+
oc start-build ${props.name}-$environment --from-dir=${artifactsPath} --follow
+
+
+
+ + + + + +
+ + +${artifactsPath} is the path where you have the artifacts of the prerequisite (On jenkins is the dist folder generated by the build). +
+
+
+ + + + + +
+ + +Maybe you need to raise your memory or CPU limits. +
+
+
+
+
+

Configure new environment

+
+
+

Now it is time to configure the environment.

+
+
+
+
+

== Prerequisite

+
+
+

You need a docker image of your application. You could create it using OpenShift as you see in the last step.

+
+
+
+
+

== Create new app on OpenShift

+
+
+

To create new app you need to use the next command.

+
+
+
+
oc new-app --docker-image=${artifactsPath} --name=${props.name}-$environment --source-secret=${nameForSecret}
+
+
+
+ + + + + +
+ + +You could add environment variables using -e $name=$value +
+
+
+ + + + + +
+ + +If you do not need to use a secret remove the end part of the command --source-secret=${nameForSecret} +
+
+
+
+
+

== Create routes

+
+
+

Finally, you need add a route to access the service.

+
+
+

Add http route

+
+
+

If you want to create an http route execute the following command in a terminal:

+
+
+
+
oc expose svc/${props.name}-$environment
+
+
+
+

Add https route

+
+
+

If you want to create an https route you can do it executing the following command:

+
+
+
+
oc create route edge --service=${props.name}-$environment
+
+
+
+

If you want to change the default route path you can use the command --hostname=$url. For example:

+
+
+
+
oc expose svc/${props.name}-$environment --hostname=$url
+
+oc create route edge --service=${props.name}-$environment --hostname=$url
+
+
+
+
+
+

Import new images from registry

+
+
+

When you have new images in the registry you must import them to OpenShift. You could do it executing the next commands:

+
+
+
+
oc import-image ${props.name}-$environment --from=${dockerRegistry}/${props.name}:${dockerTag} --confirm
+
+
+
+ + + + + +
+ + +Maybe you need to raise your memory or CPU limits. It is explained below. +
+
+
+
+
+

Raise/decrease memory or CPU limits

+
+
+

If you need to raise (or decrease) the memory or CPU limits that you need you could do it for your deployments and builders configurations following the next steps.

+
+
+
+
+

== For deployments

+
+
+

You could do it in OpenShift using the user interface. To do it you should enter in OpenShift and go to deployments.

+
+
+
+openshift deployments menu +
+
+
+

At the right top, you could see a drop down actions, click on it and you could edit the resource limits of the container.

+
+
+
+openshift deployments actions +
+
+
+
+openshift deployments resource limits +
+
+
+

Maybe you should modify the resource limits of the pod too. To do it you should click on drop down actions and go to edit YAML. Then you could see something like the next image.

+
+
+
+openshift deployments yaml resources +
+
+
+

In the image, you could see that appear resources two times. One at the bottom of the image, this are the container resources that you modified on the previous paragraph and another one at the top of the image. The resources of the top are for the pod, you should give to it at least the same of the sum for all containers that the pod use.

+
+
+

Also you could do it using command line interface and executing the next command:

+
+
+

To modify pod limits

+
+
+
+
oc patch dc/boat-frontend-test --patch '{"spec":{"strategy":{"resources":{"limits":{"cpu": "100m", "memory": "100Mi"}, "requests":{"cpu": "100m", "memory": "100Mi"}}}}}'
+
+
+
+

To modify container limits

+
+
+

When this guide was written Openshift have a bug and you cannot do it from command line interface.

+
+
+ + + + + +
+ + +If that command did not work and you received an error like this error: unable to parse "'{spec:…​": yaml: found unexpected end of stream, try to use the patch using "" instead of ''. It looks like this: --patch "{\"spec\":…​\"}}}}" +
+
+
+
+
+

== For builders

+
+
+

You could do it using command line interface and executing the next command:

+
+
+
+
oc patch bc/${props.name}${APP_NAME_SUFFIX} --patch '{"spec":{"resources":{"limits":{"cpu": "125m", "memory": "400Mi"},"requests":{"cpu": "125m", "memory": "400Mi"}}}}'
+
+
+
+ + + + + +
+ + +If that command did not work and you received an error like this error: unable to parse "'{spec:…​": yaml: found unexpected end of stream, try to use the patch using "" instead of ''. It looks like this: --patch "{\"spec\":…​\"}}}}" +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-deployment-dsf4openshift.html b/docs/shop-floor/1.0/dsf-deployment-dsf4openshift.html new file mode 100644 index 00000000..e7e8b64a --- /dev/null +++ b/docs/shop-floor/1.0/dsf-deployment-dsf4openshift.html @@ -0,0 +1,371 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

dsf4openshift deployment environment

+
+
+

In this section you will see how you can create a new environment instance in OpenShift and the things that you must add to the Jenkinsfiles of your repository to deploy a branch in this new environment. To conclude you are going to see how to add config files for environment in the source code of the applications.

+
+
+
+
+

Configure your OpenShift to deploy your devonfw projects

+
+ +
+
+
+

Prerequisites

+
+ +
+
+
+

== OpenShift Cluster

+
+
+

To have your deployment environment with OpenShift you need to have an OpenShift Cluster.

+
+
+
+
+

Manual configuration

+
+
+

Here you can find all that you need to know to configure OpenShift manually.

+
+
+
+
+

Automatic configuration

+
+
+

Here you can find all that you need to know to configure OpenShift automatically.

+
+
+
+
+

Service integration with jenkins

+
+ +
+
+
+

Prerequisites

+
+
+

To integrate it, you need to have installed the plugin OpenShift Client. To install it go to Manage Jenkins clicking on left menu and enter in Manage Plugins. Go to Available tab and search it using the filter textbox in the top right corner and install it.

+
+
+
+
+

Configuration

+
+
+

Second, you need to configure the OC Client. Go to Manage Jenkins clicking on left menu and enter in Global Tool Configuration.

+
+
+

Go to OpenShift Client Tools section and add a new one like this.

+
+
+
+openshift jenkins plugin +
+
+
+
+
+

Upgrade your Jenkinsfile

+
+
+

Now it is time to add/upgrade the next stages in to your Jenkinsfile:

+
+
+

Add create docker image stage.

+
+
+

Add deploy docker image stage.

+
+
+

Add check status stage.

+
+
+

Upgrade Setup Jenkins tools stage.

+
+
+ + + + + +
+ + +Remember to upgrade your parameters to difference which environment is used per branch. +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-how-to-use.html b/docs/shop-floor/1.0/dsf-how-to-use.html new file mode 100644 index 00000000..57e8a2e7 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-how-to-use.html @@ -0,0 +1,421 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to use it

+
+
+

This is the documentation about shop floor and its different tools. Here you are going to learn how to create new projects, so that they can include continuous integration and continuous delivery processes, and be deployed automatically in different environments.

+
+
+
+
+

Prerequisites - Provisioning environment

+
+
+

To start working you need to have some services running in your provisioning environment, such as Jenkins (automation server), GitLab (git repository), SonarQube (program analysis), Nexus (software repository) or similar.

+
+
+

To host those services we recommend to have a Production Line instance but you can use other platforms. Here is the list for the different options:

+
+
+ +
+
+
+
+

Step 1 - Configuration and services integration

+
+
+

The first step is configuring your services and integrate them with jenkins. Here you have an example about how to manually configure the next services:

+
+
+ +
+
+
+
+

Step 2 - Create the project

+
+ +
+
+
+

Create and integrate git repository

+
+
+

The second is create or git repository and integrate it with Jenkins.

+
+
+

Here you can find a manual guide about how it:

+
+
+ +
+
+
+
+

Start new devonfw project

+
+
+

It is time to create your devonfw project:

+
+
+

You can find all that you need about how to create a new devonfw project

+
+
+
+
+

cicd configuration

+
+
+

Now you need to add cicd files in your project.

+
+
+
+
+

== Manual configuration

+
+ +
+
+
+

== Jenkinsfile

+
+
+

Here you can find all that you need to know to do your Jenkinsfile.

+
+
+
+
+

== Dockerfile

+
+
+

Here you can find all that you need to know to do your Dockerfile.

+
+
+
+
+

== Automatic configuration

+
+ +
+
+
+

== cicdgen

+
+
+

If you are using production line for provisioning you could use cicdgen to configure automatically almost everything explained in the manual configuration. To do it see the cicdgen documentation.

+
+
+
+
+

Step 3 - Deployment

+
+
+

The third is configure our deployment environment. Here is the list for the different options:

+
+
+ +
+
+
+
+

Step 4 - Monitoring

+
+
+

Here you can find information about tools for monitoring:

+
+
+
    +
  • +

    build monitor view for Jenkins. With this tool you will be able to see in real time what is the state of your Jenkins pipelines.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-istio-guide.html b/docs/shop-floor/1.0/dsf-istio-guide.html new file mode 100644 index 00000000..5add9263 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-istio-guide.html @@ -0,0 +1,855 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

ISTIO Service Mesh Implementation Guide

+
+ +
+
+
+

Introduction

+
+
+

A service mesh separating applications from network functions like resilience, fault tolerance, etc,.

+
+
+

A service mesh addresses the below functions without changing the application code.

+
+
+
    +
  • +

    Test the new versions of services without impacting the users.

    +
  • +
  • +

    Scale the services.

    +
  • +
  • +

    Find the services with the help of service registry.

    +
  • +
  • +

    Test against failures.

    +
  • +
  • +

    Secure service-to-service communication.

    +
  • +
  • +

    Route traffic to a specific way.

    +
  • +
  • +

    Circuit breaking and fault injection.

    +
  • +
  • +

    Monitor the services and collect matrices.

    +
  • +
  • +

    Tracing.

    +
  • +
+
+
+

ISTIO service mesh is an open environment for Connecting, Securing, Monitoring services across the environments.

+
+
+
+
+

ISTIO Architecture

+
+
+

image

+
+
+

ISTIO is split into data plane and control plane. Refer ISTIO Architecture

+
+
+
+
+

Data Plane

+
+
+

The data plane is a set of intelligent proxies (Envoy) deployed as sidecars that mediate and control all network communication among microservices.

+
+
+

image

+
+
+
+
+

Control Plane

+
+
+

The control plane is managing and configuring proxies to route traffic and enforcing policies.

+
+
+
    +
  • +

    Pilot manages all the proxies and responsible for routing

    +
  • +
  • +

    Mixer collects telemetry and policy check

    +
  • +
  • +

    Citadel does Certificate management (TLS certs to Envoys)

    +
  • +
+
+
+
+
+

ISTIO installation

+
+
+

Download ISTIO from releases

+
+
+

istioctl install --set profile=demo

+
+
+

Here used demo profile, there are other profiles for production.

+
+
+

Verify installation:

+
+
+

kubectl get all -n istio-system

+
+
+

Inject sidecar container automatically by issuing the below command.

+
+
+

kubectl label namespace default istio-injection=enabled

+
+
+

Verify:

+
+
+

kubectl get namespace -L istio-injection

+
+
+

For more installation guides, refer ISTIO Installation

+
+
+
+
+

Traffic Management

+
+
+

ISITO’s traffic management model relies on the Envoy proxies which deployed as sidecars to services.

+
+
+

Below are the traffic management API resources

+
+
+
    +
  • +

    Virtual Services

    +
  • +
  • +

    Destination Rules

    +
  • +
  • +

    Gateways

    +
  • +
  • +

    Service Entries

    +
  • +
  • +

    Sidecars

    +
  • +
+
+
+

A virtual service, higher level abstraction of Kubernetes Service, lets you configure how requests are routed to a service within an Istio service mesh. Your mesh may have multiple virtual services or none. Virtual service consists of routing rules that are evaluated in order.

+
+
+
+
+

Dark Launch

+
+
+

The following virtual service routes requests to different versions of a service depending on whether the request comes from a testuser. If the testuser calls then version v1 will be used, and for others version v2.

+
+
+

image

+
+
+
+
+

Blue/Green deployment

+
+
+

In blue/green deployment two versions of the application running. Both versions are live on different domain names, in this example it is mtsj.com and test.mtsj.com.

+
+
+
    +
  1. +

    Define 2 virtual services for mtsj v1 and v2 versions.

    +
  2. +
  3. +

    Define DestinationRule and configure the subsets for v1 and v2.

    +
  4. +
+
+
+

image

+
+
+

When end user browses mtsj.com, the gateway call goes to subset v1 of the virtual service and redirects to destination version v1, and for test.mtsj.com to version v2.

+
+
+
+
+

Canary Deployment (Traffic Splitting)

+
+
+

In canary deployment old and new versions of the application alive. ISTIO can be configured, how much percentage of traffic can go to each version.

+
+
+

image

+
+
+

Here, the traffic is divided 75% to the version V1, and 25% to the version V2, as we gain confidence the percentage can be increased the latest version and gradually the traffic to the old version can be reduced and removed.

+
+
+

You may refer ISTIO Traffic Management for more details.

+
+
+
+
+

== MyThaiStar Implementation

+
+
+

In this example dish will have two versions and the traffic will be routed alternately using the ISTIO configuration.

+
+
+

Find all configuration files in istio/trafficmanagement/canary directory under mythaistarmicroservices example.

+
+
+
    +
  1. +

    MyThaiStar defines below

    +
    +
      +
    1. +

      Service

      +
    2. +
    3. +

      Service Account

      +
    4. +
    5. +

      Deployment

      +
    6. +
    +
    +
  2. +
+
+
+

The above configurations are defined in a single yaml file for all the different services like angular, dish, image etc.

+
+
+
    +
  1. +

    dish-v2: Dish Version 2 can be kept separately in different yaml file.

    +
  2. +
  3. +

    mts-gateway defines the ingress gateway which routes the outbound request to each service.

    +
  4. +
  5. +

    destination-rule-all defines the subsets here for later traffic routing

    +
  6. +
  7. +

    dish-50-50: traffic routing for different versions of dishmanagement.

    +
  8. +
+
+
+
+
+

Network Resilience

+
+ +
+
+
+

== Timeout

+
+
+

Istio lets you adjust the timeouts using virtual services. The default timeout is 15 seconds.

+
+
+

image

+
+
+
+
+

== Retry

+
+
+

A retry setting specifies the maximum number of times an Envoy proxy attempts to connect to a service if the initial call fails.

+
+
+

image

+
+
+

Retries can also be configured on Gateway Error, Connection failure, Connection Refused or any 5xx error from the application.

+
+
+

retryOn: gateway-error,connect-failure,refused-stream,5xx

+
+
+
+
+

== Circuit Breakers

+
+
+

By defining the destination rule, set limits for calls to individual hosts within a service, such as the number of concurrent connections or how many times calls to this host have failed once the limit reached.

+
+
+
    +
  • +

    Outlier Detection is an ISTIO Resiliency strategy to detect unusual host behaviour and evict the unhealthy hosts from the set of load balanced healthy hosts inside a cluster.

    +
  • +
  • +

    If a request is sent to a service instance and it fails (returns a 50X error code), then ISTIO ejects the instance from the load balanced pool for a specified duration.

    +
  • +
+
+
+

image

+
+
+
+
+

== Fault Injection

+
+
+

Two types of faults can be generated using ISTIO. This is useful for the testing.

+
+
+

Delays: timing failures.

+
+
+

Aborts: crash failures.

+
+
+

Below example is a crash failure Virtual Service. The below example configured to receive http status 500 error for the testuser. The application works fine for all other users.

+
+
+

image

+
+
+

The below virtual service configured to wait 10s for all requests.

+
+
+

image

+
+
+
+
+

Security

+
+
+

ISTIO provides security solution has the below functions.

+
+
+
    +
  • +

    Traffic encryption

    +
  • +
  • +

    Mutual TLS and fine-grained access policies.

    +
  • +
  • +

    Auditing tools

    +
  • +
+
+
+
+
+

Authentication

+
+
+

ISTIO provides two types of authentication.

+
+
+
    +
  • +

    Peer authentication, secures service to service authentication

    +
  • +
  • +

    Request authentication is end user authentication to verify credential attached to the request.

    +
  • +
+
+
+
+
+

Mutual TLS Authentication

+
+
+

By default, the TLS protocol only proves the identity of the server to the client. Mutual TLS authentication ensures that traffic has been traffic is secure and trusted in both the directions between the client and server.

+
+
+

All traffic between services with proxies uses mutual TLS by default.

+
+
+
+
+

Peer Authentication

+
+
+

Peer authentication has Permissive, Strict and Disabled mode. With permissive mode, a service accepts both plain text and mutual TLS traffic. Permissive mode is good at the time of onboarding and should switch to Strict later.

+
+
+

The authentication policy can be applied to mesh-wide, namespace wide or workload specific using the selector field.

+
+
+

image

+
+
+

Here the policy applied to the workload bookings.

+
+
+

Check the default mesh policy:

+
+
+

kubectl describe meshpolicy default

+
+
+
+
+

Request authentication

+
+
+

Request authentication policies specify the values needed to validate JWT tokens.

+
+
+

|== = +|Authentication |Applies to |Uses |Identity +|Peer authentication |Service to service |mTLS |source.principal +|Request authentication |End User authentication |JWT |request.auth.principal +|== =

+
+
+
+
+

Authorization

+
+
+

Apply an authorization policy to the workload/namespace/mesh to enforce the access control. Supports ALLOW and DENY actions.

+
+
+
+
+

== Deny All

+
+
+

Below example authorization policy without any rules denies access to all workloads in admin namespace.

+
+
+

image

+
+
+

Example below allowing the GET methods from order service.

+
+
+

image

+
+
+

Example below denies the request to the /registered path for requests without request principals.

+
+
+

image

+
+
+

You may refer ISTIO Security for more details.

+
+
+
+
+

Observability

+
+
+

ISTIO generates

+
+
+
    +
  • +

    Metrics - for monitor latency, traffic, errors and saturation.

    +
  • +
  • +

    Distributed Traces to identify call flows and service dependencies

    +
  • +
  • +

    Access Logs enables audit service behaviour to the individual service level.

    +
  • +
+
+
+
+
+

Grafana dashboard

+
+
+

Grafana and Prometheus are preconfigured addons on ISTIO. To enable, choose the configuration profile which has Prometheus and Grafana enabled. Eg: Demo profile

+
+
+

Verify Prometheus and Grafana running in the cluster.

+
+
+

kubectl get pods -n istio-system

+
+
+
+
+

Kiali dashboard

+
+
+

The Kiali dashboard helps you understand the structure of your service mesh by displaying the topology. The demo profile enables Kiali dashboard also.

+
+
+

Access the Kiali dashboard. The default user name is admin and default password is admin.

+
+
+

istioctl dashboard kiali

+
+
+

You may refer ISTIO Observability

+
+
+
+
+

Minikube Troubleshooting Tips

+
+
+

This documentation provides the troubleshooting tips while working with minikube in a local machine.

+
+
+
    +
  1. +

    Always start minikube with a minimum of 4GB of memory or more if available. Using command minikube start --memory=4096

    +
  2. +
  3. +

    If minikube is not starting or throwing any error even after multiple attempts. Try the below tips:

    +
    +
      +
    1. +

      Delete the minikube in your local machine using minikube delete and do a fresh minikube start.

      +
    2. +
    3. +

      In any case, if minikube is not starting even after the above step, go to .minikube folder under the users directory and delete it manually. Now try starting minikube.

      +
    4. +
    +
    +
  4. +
  5. +

    Set docker environment in minikube using minikube docker-env. Now all the docker commands that are run will be on the docker inside minikube. So building your application after executing the above command will have the application docker images available to minikube.

    +
    +
      +
    1. +

      To exit minikube docker environment use minikube docker-env -u

      +
    2. +
    +
    +
  6. +
  7. +

    In any case, if you face any error related to docker image such as Failed to pull image, or image not found errors we will have to manually push the application docker image to minikube docker cache using the below commands.

    +
  8. +
  9. +

    For better results - stop minikube using minikube stop command.

    +
  10. +
  11. +

    Execute the command minikube cache add imageName/tagName.

    +
  12. +
  13. +

    Now start the minikube. To verify if the docker image has been added to minikube docker execute minikube ssh docker images.

    +
  14. +
  15. +

    To remove any docker image from minikube docker stop any containers running that docker image and then execute minikube cache delete imageName/tagName.

    +
  16. +
  17. +

    To reload any docker image to minikube docker environment, execute minikube cache reload.

    +
  18. +
  19. +

    In any case, if the docker images are not getting removed from minikube docker environment then navigate to .minikube/cache/images and then delete the particular image.

    +
  20. +
+
+
+

Execute the below command to make the Grafana available.

+
+
+

kubectl -n istio-system port-forward $(kubectl -n istio-system get pod -l app=grafana -o jsonpath='\{.items[0].metadata.name}') 3000:3000 &

+
+
+

Use the below URLs to view the dashboard in local machine.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-mirabaud-cicd-environment-setup.html b/docs/shop-floor/1.0/dsf-mirabaud-cicd-environment-setup.html new file mode 100644 index 00000000..c483759c --- /dev/null +++ b/docs/shop-floor/1.0/dsf-mirabaud-cicd-environment-setup.html @@ -0,0 +1,807 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Mirabaud CICD Environment Setup

+
+
+

Initial requirements:

+
+
+
    +
  • +

    OS: RHEL 6.5

    +
  • +
+
+
+

Remote setup in CI machine (located in the Netherlands)

+
+
+
+
    - Jenkins
+    - Nexus
+    - GitLab
+    - Mattermost
+    - Atlassian Crucible
+    - SonarQube
+
+
+
+
+
+

1. Install Docker and Docker Compose in RHEL 6.5

+
+ +
+
+
+

Docker

+
+
+

Due to that OS version, the only way to have Docker running in the CI machine is by installing it from the EPEL repository (Extra Packages for Enterprise Linux).

+
+
+
    +
  1. +

    Add EPEL

    +
  2. +
+
+
+
+
##rpm -iUvh http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm
+
+
+
+
    +
  1. +

    Install docker.io from that repository

    +
  2. +
+
+
+
+
##yum -y install docker-io
+
+
+
+
    +
  1. +

    Start Docker daemon

    +
  2. +
+
+
+
+
##service docker start
+
+
+
+
    +
  1. +

    Check the installation

    +
  2. +
+
+
+
+
##docker -v
+Docker version 1.7.1, build 786b29d/1.7.1
+
+
+
+
+
+

Docker Compose

+
+
+

Download and install it via curl. It will use this site.

+
+
+
+
##curl -L https://github.com/docker/compose/releases/download/1.5.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
+
+##chmod +x /usr/local/bin/docker-compose
+
+
+
+

Add it to your sudo path:

+
+
+
    +
  1. +

    Find out where it is:

    +
  2. +
+
+
+
+
##echo $PATH
+
+
+
+
    +
  1. +

    Copy the docker-compose file from /usr/local/bin/ to your sudo PATH.

    +
  2. +
+
+
+
+
##docker-compose -v
+docker-compose version 1.5.2, build 7240ff3
+
+
+
+
+
+

2. Directories structure

+
+
+

Several directories had been added to organize some files related to docker (like docker-compose.yml) and docker volumes for each service. Here’s how it looks:

+
+
+
+
/home
+    /[username]
+        /jenkins
+            /volumes
+                /jenkins_home
+        /sonarqube
+            /volumes
+                /conf
+                /data
+                /extensions
+                /lib
+                    /bundled-plugins
+        /nexus
+            /volumes
+                /nexus-data
+        /crucible
+            /volumes
+                /
+        /gitlab
+            docker-compose.yml
+            /volumes
+                /etc
+                    /gitlab
+                /var
+                    /log
+                    /opt
+        /mattermost
+            docker-compose.yml
+            /volumes
+                /db
+                    /var
+                        /lib
+                            /postgresql
+                                /data
+                /app
+                    /mattermost
+                        /config
+                        /data
+                        /logs
+                /web
+                    /cert
+
+
+
+
+
+

3. CICD Services with Docker

+
+
+

Some naming conventions had been followed as naming containers as mirabaud_[service].

+
+
+

Several folders have been created to store each service’s volumes, docker-compose.yml(s), extra configuration settings and so on:

+
+
+
+
+

Jenkins

+
+ +
+
+
+

== Command

+
+
+
+
##docker run -d -p 8080:8080 -p 50000:50000 --name=mirabaud_jenkins \
+    -v /home/[username]/jenkins/volumes/jenkins_home:/var/jenkins_home \
+    jenkins
+
+
+
+
+
+

== Generate keystore

+
+
+
+
keytool -importkeystore -srckeystore server.p12 -srcstoretype pkcs12 -srcalias 1 -destkeystore newserver.jks -deststoretype jks -destalias server
+
+
+
+
+
+

== Start jekins with SSL (TODO: make a docker-compose.yml for this):

+
+
+
+
sudo docker run -d --name mirabaud_jenkins -v /jenkins:/var/jenkins_home -p 8080:8443 jenkins --httpPort=-1 --httpsPort=8443 --httpsKeyStore=/var/jenkins_home/certs/keystore.jks --httpsKeyStorePassword=Mirabaud2017
+
+
+
+
+
+

== Volumes

+
+
+
+
volumes/jenkins_home:/var/jenkins_home
+
+
+
+
+
+

SonarQube

+
+ +
+
+
+

== Command

+
+
+
+
##docker run -d -p 9000:9000 -p 9092:9092 --name=mirabaud_sonarqube \
+    -v /home/[username]/sonarqube/volumes/conf:/opt/sonarqube/conf \
+    -v /home/[username]/sonarqube/volumes/data:/opt/sonarqube/data \
+    -v /home/[username]/sonarqube/volumes/extensions:/opt/sonarqube/extensions \
+    -v /home/[username]/sonarqube/volumes/lib/bundled-plugins:/opt/sonarqube//lib/bundled-plugins \
+    sonarqube
+
+
+
+
+
+

== Volumes

+
+
+
+
volumes/conf:/opt/sonarqube/conf
+volumes/data:/opt/sonarqube/data
+volumes/extensions:/opt/sonarqube/extensions
+volumes/lib/bundled-plugins:/opt/sonarqube/lib/bundled-plugins
+
+
+
+
+
+

Nexus

+
+ +
+
+
+

== Command

+
+
+
+
##docker run -d -p 8081:8081 --name=mirabaud_nexus\
+    -v /home/[username]/nexus/nexus-data:/sonatype-work
+    sonatype/nexus
+
+
+
+
+
+

== Volumes

+
+
+
+
volumes/nexus-data/:/sonatype-work
+
+
+
+
+
+

Atlassian Crucible

+
+ +
+
+
+

== Command

+
+
+
+
##docker run -d -p 8084:8080 --name=mirabaud_crucible \
+    -v /home/[username]/crucible/volumes/data:/atlassian/data/crucible
+    mswinarski/atlassian-crucible:latest
+
+
+
+
+
+

== Volumes

+
+
+
+
volumes/data:/atlassian/data/crucible
+
+
+
+
+
+

4. CICD Services with Docker Compose

+
+
+

Both Services had been deploying by using the # docker-compose up -d command from their root directories (/gitlab and /mattermost). The syntax of the two docker-compose.yml files is the one corresponding with the 1st version (due to the docker-compose v1.5).

+
+
+
+
+

GitLab

+
+ +
+
+
+

== docker-compose.yml

+
+
+
+
mirabaud:
+    image: 'gitlab/gitlab-ce:latest'
+    restart: always
+    ports:
+            - '8888:80'
+    volumes:
+            - '/home/[username]/gitlab/volumes/etc/gilab:/etc/gitlab'
+            - '/home/[username]/gitlab/volumes/var/log:/var/log/gitlab'
+            - '/home/[username]/gitlab/volumes/var/opt:/var/opt/gitlab'
+
+
+
+
+
+

== Command (docker)

+
+
+
+
docker run -d -p 8888:80 --name=mirabaud_gitlab \
+    -v /home/[username]/gitlab/volumes/etc/gitlab/:/etc/gitlab \
+    -v /home/[username]/gitlab/volumes/var/log:/var/log/gitlab \
+    -v /home/[username]/gitlab/volumes/var/opt:/var/opt/gitlab \
+    gitlab/gitlab-ce
+
+
+
+
+
+

== Volumes

+
+
+
+
volumes/etc/gitlab:/etc/gitlab
+volumes/var/opt:/var/log/gitlab
+volumes/var/log:/var/log/gitlab
+
+
+
+
+
+

Mattermost

+
+ +
+
+
+

== docker-compose.yml:

+
+
+
+
db:
+  image: mattermost/mattermost-prod-db
+  restart: unless-stopped
+  volumes:
+    - ./volumes/db/var/lib/postgresql/data:/var/lib/postgresql/data
+    - /etc/localtime:/etc/localtime:ro
+  environment:
+    - POSTGRES_USER=mmuser
+    - POSTGRES_PASSWORD=mmuser_password
+    - POSTGRES_DB=mattermost
+
+app:
+  image: mattermost/mattermost-prod-app
+  links:
+    - db:db
+  restart: unless-stopped
+  volumes:
+    - ./volumes/app/mattermost/config:/mattermost/config:rw
+    - ./volumes/app/mattermost/data:/mattermost/data:rw
+    - ./volumes/app/mattermost/logs:/mattermost/logs:rw
+    - /etc/localtime:/etc/localtime:ro
+  environment:
+    - MM_USERNAME=mmuser
+    - MM_PASSWORD=mmuser_password
+    - MM_DBNAME=mattermost
+
+web:
+  image: mattermost/mattermost-prod-web
+  ports:
+    - "8088:80"
+    - "8089:443"
+  links:
+    - app:app
+  restart: unless-stopped
+  volumes:
+    - ./volumes/web/cert:/cert:ro
+    - /etc/localtime:/etc/localtime:ro
+
+
+
+
+
+

== SSL Certificate

+
+
+

How to generate the certificates:

+
+
+

Get the crt and key from CA or generate a new one self-signed. Then:

+
+
+
+
// 1. create the p12 keystore
+##openssl pkcs12 -export -in cert.crt -inkey mycert.key -out certkeystore.p12
+
+// 2. export the pem certificate with password
+##openssl pkcs12 -in certkeystore.p12 -out cert.pem
+
+// 3. export the pem certificate without password
+##openssl rsa -in cert.pem -out key-no-password.pem
+
+
+
+

SSL:

+
+
+

Copy the cert and the key without password at:

+
+
+

./volumes/web/cert/cert.pem

+
+
+

and

+
+
+

./volumes/web/cert/key-no-password.pem

+
+
+

Restart the server and the SSL should be enabled at port 8089 using HTTPS.

+
+
+
+
+

== Volumes

+
+
+
+
-- db --
+volumes/db/var/lib/postgresql/data:/var/lib/postgresql/data
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+-- app --
+volumes/app/mattermost/config:/mattermost/config:rw
+volumes/app/mattermost/data:/mattermost/data:rw
+volumes/app/mattermost/logs:/mattermost/logs:rw
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+-- web --
+volumes/web/cert:/cert:ro
+/etc/localtime:/etc/localtime:ro                                # absolute path
+
+
+
+
+
+

5. Service Integration

+
+
+

All integrations had been done following CICD Services Integration guides:

+
+ +
+ + + + + +
+ + +These guides may be obsolete. You can find here the official configuration guides, +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-mirabaud-jenkins-gitLab-integration.html b/docs/shop-floor/1.0/dsf-mirabaud-jenkins-gitLab-integration.html new file mode 100644 index 00000000..e9caf74b --- /dev/null +++ b/docs/shop-floor/1.0/dsf-mirabaud-jenkins-gitLab-integration.html @@ -0,0 +1,479 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Jenkins - GitLab integration

+
+
+

The first step to have a Continuous Integration system for your development is to make sure that all your changes to your team’s remote repository are evaluated by the time they are pushed. That usually implies the usage of so-called webhooks. You’ll find a fancy explanation about what Webhooks are in here.

+
+
+

To resume what we’re doing here, we are going to prepare our Jenkins and our GitLab so when a developer pushes some changes to the GitLab repository, a pipeline in Jenkins gets triggered. Just like that, in an automatic way.

+
+
+
+
+

1. Jenkins GitLab plugin

+
+
+

As it usually happens, some Jenkins plug-in(s) must be installed. In this case, let’s install those related with GitLab:

+
+
+
+jenkins gitlab plugins +
+
+
+
+
+

2. GitLab API Token

+
+
+

To communicate with GitLab from Jenkins, we will need to create an authentication token from your GitLab user settings. A good practice for this would be to create it from a machine user. Something like (i.e.) devonfw-ci/******.

+
+
+
+gitlab access token +
+
+
+

Simply by adding a name to it and a date for it expire is enough:

+
+
+
+gitlab access token generation +
+
+
+
+gitlab access token generated +
+
+
+

As GitLab said, you should make sure you don’t lose your token. Otherwise you would need to create a new one.

+
+
+

This will allow Jenkins to connect with right permissions to our GitLab server.

+
+
+
+
+

3. Create "GitLab API" Token credentials

+
+
+

Those credentials will use that token already generated in GitLab to connect once we declare the GitLab server in the Global Jenkins configuration. Obviously, those credentials must be GitLab API token-like.

+
+
+
+jenkins gitlab api token credentials kind +
+
+
+

Then, we add the generated token in the API token field:

+
+
+
+jenkins gitlab api token credentials complete +
+
+
+

Look in your Global credentials if they had been correctly created:

+
+
+
+jenkins gitlab api token credentials +
+
+
+
+
+

4. Create GitLab connection in Jenkins

+
+
+

Specify a GitLab connection in your Jenkins’s Manage Jenkins > Configure System configuration. This will tell Jenkins where is our GitLab server, a user to access it from and so on.

+
+
+

You’ll need to give it a name, for example, related with what this GitLab is dedicated for (specific clients, internal projects…​). Then, the Gitlab host URL is just where your GitLab server is. If you have it locally, that field should look similar to:

+
+
+
    +
  • +

    Connection name: my-local-gitlab

    +
  • +
  • +

    Gitlab host URL: http://localhost:${PORT_NUMBER}

    +
  • +
+
+
+

Finally, we select our recently GitLab API token as credentials.

+
+
+
+jenkins gitlab connection +
+
+
+
+
+

5. Jenkins Pipeline changes

+
+ +
+
+
+

5.1 Choose GitLab connection in Pipeline’s General configuration

+
+
+

First, our pipeline should allow us to add a GitLab connection to connect to (the already created one).

+
+
+
+jenkins pipeline gitlab connection +
+
+
+

In the case of the local example, could be like this:

+
+
+
    +
  • +

    GitLab connection: my-local-gitlab

    +
  • +
  • +

    GitLab Repository Name: myusername/webhook-test (for example)

    +
  • +
+
+
+
+
+

5.2 Create a Build Trigger

+
+
+
    +
  1. +

    You should already see your GitLab project’s URL (as you stated in the General settings of the Pipeline).

    +
  2. +
  3. +

    Write .*build.* in the comment for triggering a build

    +
  4. +
  5. +

    Specify or filter the branch of your repo you want use as target. That means, whenever a git action is done to that branch (for example, master), this Pipeline is going to be built.

    +
  6. +
  7. +

    Generate a Secret token (to be added in the yet-to-be-created GitLab webhook).

    +
  8. +
+
+
+
+jenkins pipeline build trigger +
+
+
+
+
+

6. GitLab Webhook

+
+
+
    +
  1. +

    Go to you GitLab project’s Settings > Integration section.

    +
  2. +
  3. +

    Add the path to your Jenkins Pipeline. Make sure you add project instead of job in the path.

    +
  4. +
  5. +

    Paste the generated Secret token of your Jenkins pipeline

    +
  6. +
  7. +

    Select your git action that will trigger the build.

    +
  8. +
+
+
+
+gitlab webhook +
+
+
+
+
+

7. Results

+
+
+

After all those steps you should have a result similar to this in your Pipeline:

+
+
+
+jenkins pipeline result +
+
+
+

Enjoy the Continuous Integration! :)

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-mirabaud-jenkins-nexus-integration.html b/docs/shop-floor/1.0/dsf-mirabaud-jenkins-nexus-integration.html new file mode 100644 index 00000000..7e1fa247 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-mirabaud-jenkins-nexus-integration.html @@ -0,0 +1,457 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Jenkins - Nexus integration

+
+
+

Nexus is used to both host dependencies for devonfw projects to download (common Maven ones, custom ones such as ojdb and even devonfw so-far-IP modules). Moreover, it will host our projects' build artifacts (.jar, .war, …​) and expose them for us to download, wget and so on. A team should have a bidirectional relation with its Nexus repository.

+
+
+
+
+

1. Jenkins credentials to access Nexus

+
+
+

By default, when Nexus is installed, it contains 3 user credentials for different purposes. The admin ones look like this: admin/admin123. There are also other 2: deployment/deployment123 and TODO.

+
+
+
+
// ADD USER TABLE IMAGE FROM NEXUS
+
+
+
+

In this case, let’s use the ones with the greater permissions: admin/admin123.

+
+
+

Go to Credentials > System (left sidebar of Jenkins) then to Global credentials (unrestricted) on the page table and on the left sidebar again click on Add Credentials.

+
+
+

This should be shown in your Jenkins:

+
+
+
+jenkins new nexus credentials +
+
+
+

Fill the form like this:

+
+
+
+jenkins new nexus credentials filled +
+
+
+

And click in OK to create them. Check if the whole thing went as expected:

+
+
+
+jenkins new nexus credentials completed +
+
+
+
+
+

2. Jenkins Maven Settings

+
+
+

Those settings are also configured (or maybe not-yet-configured) in our devonfw distributions in:

+
+
+
+
/${devonfw-dist-path}
+    /software
+        /maven
+            /conf
+                settings.xml
+
+
+
+

Go to Manage Jenkins > Managed files and select Add a new Config in the left sidebar.

+
+
+
+jenkins new maven settings +
+
+
+

The ID field will get automatically filled with a unique value if you don’t set it up. No problems about that. Click on Submit and let’s create some Servers Credentials:

+
+
+
+jenkins new maven settings servers +
+
+
+

Those Server Credentials will allow Jenkins to access to the different repositories/servers that are going to be declared afterwards.

+
+
+

Let’s create 4 server credentials.

+
+
+
    +
  • +

    my.nexus: Will serve as general profile for Maven.

    +
  • +
  • +

    mynexus.releases: When a mvn deploy process is executed, this will tell Maven where to push releases to.

    +
  • +
  • +

    mynexus.snapshots: The same as before, but with snapshots instead.

    +
  • +
  • +

    mynexus.central: Just in case we want to install an specific dependency that is not by default in the Maven Central repository (such as ojdbc), Maven will point to it instead.

    +
  • +
+
+
+
+jenkins new maven settings servers credentials +
+
+
+

A more or less complete Jenkins Maven settings would look look like this:

+
+
+
+
<?xml version="1.0" encoding="UTF-8"?>
+<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+          xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">
+
+    <mirrors>
+        <mirror>
+            <id>mynexus.central</id>
+            <mirrorOf>central</mirrorOf>
+            <name>central</name>
+            <url>http://${URL-TO-YOUR-NEXUS-REPOS}/central</url>
+        </mirror>
+    </mirrors>
+
+    <profiles>
+        <profile>
+            <id>my.nexus</id>
+            <!-- 3 REPOS ARE DECLARED -->
+            <repositories>
+                <repository>
+                    <id>mynexus.releases</id>
+                    <name>mynexus Releases</name>
+                    <url>http://${URL-TO-YOUR-NEXUS-REPOS}/releases</url>
+                    <releases>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>false</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </repository>
+                <repository>
+                    <id>mynexus.snapshots</id>
+                    <name>mynexus Snapshots</name>
+                    <url>http://${URL-TO-YOUR-NEXUS-REPOS}/snapshots</url>
+                    <releases>
+                        <enabled>false</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </repository>
+            </repositories>
+            <pluginRepositories>
+                <pluginRepository>
+                    <id>public</id>
+                    <name>Public Repositories</name>
+                    <url>http://${URL-TO-YOUR-NEXUS}/nexus/content/groups/public/</url>
+                    <releases>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </releases>
+                    <snapshots>
+                        <enabled>true</enabled>
+                        <updatePolicy>always</updatePolicy>
+                    </snapshots>
+                </pluginRepository>
+            </pluginRepositories>
+        </profile>
+    </profiles>
+    <!-- HERE IS WHERE WE TELL MAVEN TO CHOOSE THE my.nexus PROFILE -->
+    <activeProfiles>
+        <activeProfile>my.nexus</activeProfile>
+    </activeProfiles>
+</settings>
+
+
+
+
+
+

3. Use it in Jenkins Pipelines

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-mirabaud-jenkins-sonarqube-integration.html b/docs/shop-floor/1.0/dsf-mirabaud-jenkins-sonarqube-integration.html new file mode 100644 index 00000000..9d14f6bc --- /dev/null +++ b/docs/shop-floor/1.0/dsf-mirabaud-jenkins-sonarqube-integration.html @@ -0,0 +1,594 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Jenkins - SonarQube integration

+
+
+

First thing is installing both tools by, for example, Docker or Docker Compose. Then, we have to think about how they should collaborate to create a more efficient Continuous Integration process.

+
+
+

Once our project’s pipeline is triggered (it could also be triggered in a fancy way, such as when a merge to the develop branch is done).

+
+
+
+
+

1. Jenkins SonarQube plugin

+
+
+

Typically in those integration cases, Jenkins plug-in installations become a must. Let’s look for some available SonarQube plug-in(s) for Jenkins:

+
+
+
+jenkins sonarqube plugin +
+
+
+
+
+

2. SonarQube token

+
+
+

Once installed let’s create a token in SonarQube so that Jenkins can communicate with it to trigger their Jobs. Once we install SonarQube in our CI/CD machine (ideally a remote machine) let’s login with admin/admin credentials:

+
+
+
+sonarqube login +
+
+
+

Afterwards, SonarQube itself asks you to create this token we talked about (the name is up to you):

+
+
+
+sonarqube token name +
+
+
+

Then a token is generated:

+
+
+
+sonarqube token generation +
+
+
+

You click in "continue" and the token’s generation is completed:

+
+
+
+sonarqube token done +
+
+
+
+
+

3. Jenkins SonarQube Server setup

+
+
+

Now we need to tell Jenkins where is SonarQube and how to communicate with it. In Manage Jenkins > Configure Settings. We add a name for the server (up to you), where it is located (URL), version and the Server authentication token created in point 2.

+
+
+
+jenkins sonarqube server setup +
+
+
+
+
+

4. Jenkins SonarQube Scanner

+
+
+

Install a SonarQube Scanner as a Global tool in Jenkins to be used in the project’s pipeline.

+
+
+
+jenkins sonarqube scanner +
+
+
+
+
+

5. Pipeline code

+
+
+

Last step is to add the SonarQube process in our project’s Jenkins pipeline. The following code will trigger a SonarQube process that will evaluate our code’s quality looking for bugs, duplications, and so on.

+
+
+
+
    stage 'SonarQube Analysis'
+        def scannerHome = tool 'SonarQube scanner';
+        sh "${scannerHome}/bin/sonar-scanner \
+             -Dsonar.host.url=http://url-to-your-sq-server:9000/ \
+             -Dsonar.login=[SONAR_USER] -Dsonar.password=[SONAR_PASS] \
+             -Dsonar.projectKey=[PROJECT_KEY] \
+             -Dsonar.projectName=[PROJECT_NAME] -Dsonar.projectVersion=[PROJECT_VERSION] \
+             -Dsonar.sources=. -Dsonar.java.binaries=. \
+             -Dsonar.java.source=1.8 -Dsonar.language=java"
+
+
+
+
+
+

6. Results

+
+
+

After all this, you should end up having something like this in Jenkins:

+
+
+
+jenkins sonarqube feedback +
+
+
+

And in SonarQube:

+
+
+
+sonarqube project result +
+
+
+
+
+

7. Changes in a devonfw project to execute SonarQube tests with Coverage

+
+
+

The plugin used to have Coverage reports in the SonarQube for devonfw projects is Jacoco. There are some changes in the project’s parent pom.xml that are mandatory to use it.

+
+
+

Inside of the <properties> tag:

+
+
+
+
<properties>
+
+    (...)
+
+    <sonar.jacoco.version>3.8</sonar.jacoco.version>
+    <sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>
+    <sonar.core.codeCoveragePlugin>jacoco</sonar.core.codeCoveragePlugin>
+    <sonar.dynamicAnalysis>reuseReports</sonar.dynamicAnalysis>
+    <sonar.language>java</sonar.language>
+    <sonar.java.source>1.7</sonar.java.source>
+    <sonar.junit.reportPaths>target/surefire-reports</sonar.junit.reportPaths>
+    <sonar.jacoco.reportPaths>target/jacoco.exec</sonar.jacoco.reportPaths>
+    <sonar.sourceEncoding>UTF-8</sonar.sourceEncoding>
+    <sonar.exclusions>
+        **/generated-sources/**/*,
+        **io/oasp/mirabaud/general/**/*,
+        **/*Dao.java,
+        **/*Entity.java,
+        **/*Cto.java,
+        **/*Eto.java,
+        **/*SearchCriteriaTo.java,
+        **/*management.java,
+        **/*SpringBootApp.java,
+        **/*SpringBootBatchApp.java,
+        **/*.xml,
+        **/*.jsp
+    </sonar.exclusions>
+    <sonar.coverage.exclusions>
+        **io/oasp/mirabaud/general/**/*,
+        **/*Dao.java,
+        **/*Entity.java,
+        **/*Cto.java,
+        **/*Eto.java,
+        **/*SearchCriteriaTo.java,
+        **/*management.java,
+        **/*SpringBootApp.java,
+        **/*SpringBootBatchApp.java,
+        **/*.xml,
+        **/*.jsp
+    </sonar.coverage.exclusions>
+    <sonar.host.url>http://${YOUR_SONAR_SERVER_URL}/</sonar.host.url>
+    <jacoco.version>0.7.9</jacoco.version>
+
+    <war.plugin.version>3.2.0</war.plugin.version>
+    <assembly.plugin.version>3.1.0</assembly.plugin.version>
+</properties>
+
+
+
+

Of course, those sonar amd sonar.coverage can/must be changed to fit with other projects.

+
+
+

Now add the Jacoco Listener as a dependency:

+
+
+
+
<dependencies>
+    <dependency>
+        <groupId>org.sonarsource.java</groupId>
+        <artifactId>sonar-jacoco-listeners</artifactId>
+        <scope>test</scope>
+    </dependency>
+</dependencies>
+
+
+
+

Plugin Management declarations:

+
+
+
+
<pluginManagement>
+    <plugins>
+        <plugin>
+            <groupId>org.sonarsource.scanner.maven</groupId>
+            <artifactId>sonar-maven-plugin</artifactId>
+            <version>3.2</version>
+        </plugin>
+        <plugin>
+            <groupId>org.jacoco</groupId>
+            <artifactId>jacoco-maven-plugin</artifactId>
+            <version>${jacoco.version}</version>
+        </plugin>
+    </plugins>
+<pluginManagement>
+
+
+
+

Plugins:

+
+
+
+
<plugins>
+
+    (...)
+
+    <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.20.1</version>
+        <configuration>
+            <argLine>-XX:-UseSplitVerifier -Xmx2048m ${surefireArgLine}</argLine>
+            <testFailureIgnore>false</testFailureIgnore>
+            <useFile>false</useFile>
+            <reportsDirectory>${project.basedir}/${sonar.junit.reportPaths}</reportsDirectory>
+            <argLine>${jacoco.agent.argLine}</argLine>
+            <excludedGroups>${oasp.test.excluded.groups}</excludedGroups>
+            <alwaysGenerateSurefireReport>true</alwaysGenerateSurefireReport>
+            <aggregate>true</aggregate>
+            <properties>
+                <property>
+                    <name>listener</name>
+                    <value>org.sonar.java.jacoco.JUnitListener</value>
+                </property>
+            </properties>
+        </configuration>
+    </plugin>
+    <plugin>
+        <groupId>org.jacoco</groupId>
+        <artifactId>jacoco-maven-plugin</artifactId>
+        <configuration>
+            <argLine>-Xmx128m</argLine>
+            <append>true</append>
+            <propertyName>jacoco.agent.argLine</propertyName>
+            <destFile>${sonar.jacoco.reportPath}</destFile>
+            <excludes>
+                <exclude>**/generated-sources/**/*,</exclude>
+                <exclude>**io/oasp/${PROJECT_NAME}/general/**/*</exclude>
+                <exclude>**/*Dao.java</exclude>
+                <exclude>**/*Entity.java</exclude>
+                <exclude>**/*Cto.java</exclude>
+                <exclude>**/*Eto.java</exclude>
+                <exclude>**/*SearchCriteriaTo.java</exclude>
+                <exclude>**/*management.java</exclude>
+                <exclude>**/*SpringBootApp.java</exclude>
+                <exclude>**/*SpringBootBatchApp.java</exclude>
+                <exclude>**/*.class</exclude>
+            </excludes>
+        </configuration>
+        <executions>
+            <execution>
+                <id>prepare-agent</id>
+                <phase>initialize</phase>
+                <goals>
+                    <goal>prepare-agent</goal>
+                </goals>
+                <configuration>
+                    <destFile>${sonar.jacoco.reportPath}</destFile>
+                    <append>true</append>
+                </configuration>
+            </execution>
+            <execution>
+                <id>report-aggregate</id>
+                <phase>verify</phase>
+                <goals>
+                    <goal>report-aggregate</goal>
+                </goals>
+            </execution>
+            <execution>
+                <id>jacoco-site</id>
+                <phase>verify</phase>
+                <goals>
+                    <goal>report</goal>
+                </goals>
+            </execution>
+        </executions>
+    </plugin>
+</plugins>
+
+
+
+
+
+

Jenkins SonarQube execution

+
+
+

If the previous configuration is already setup, once Jenkins execute the sonar maven plugin, it will automatically execute coverage as well.

+
+
+

This is an example of a block of code from a devonfw project’s Jenkinsfile:

+
+
+
+
    withMaven(globalMavenSettingsConfig: 'YOUR_GLOBAL_MAVEN_SETTINGS', jdk: 'OpenJDK 1.8', maven: 'Maven_3.3.9') {
+        sh "mvn sonar:sonar -Dsonar.login=[USERNAME] -Dsonar.password=[PASSWORD]"
+    }
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-okd-customize-catalog.html b/docs/shop-floor/1.0/dsf-okd-customize-catalog.html new file mode 100644 index 00000000..e7935686 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-okd-customize-catalog.html @@ -0,0 +1,383 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to add custom catalog categories inside openshift

+
+
+

This is a guide to add custom Catalog Categories into an Openshift cluster.

+
+
+

Here we can find a catalog-categories.js example to use the devonfw catalog categories.

+
+
+
+
+

Create a scrip to add custom langauges and custom catalog categories

+
+ +
+
+
+

Custom language

+
+
+

For this example, we are going add a new language into the languages category. To do that we must create a script and we named as catalog-categories.js

+
+
+
+
// Find the Languages category.
+var category = _.find(window.OPENSHIFT_CONSTANTS.SERVICE_CATALOG_CATEGORIES,
+                      { id: 'languages' });
+// Add Go as a new subcategory under Languages.
+category.subCategories.splice(2,0,{ // Insert at the third spot.
+  // Required. Must be unique.
+  id: "devonfw-languages",
+  // Required.
+  label: "devonfw",
+  // Optional. If specified, defines a unique icon for this item.
+  icon: "devonfw-logo-language",
+  // Required. Items matching any tag will appear in this subcategory.
+  tags: [
+    "devonfw",
+    "devonfw-angular",
+    "devonfw-java"
+  ]
+});
+
+
+
+
+
+

Custom category

+
+
+

For this example, we are going add a new category into the category tab. To do that we must create a script and we named as catalog-categories.js

+
+
+
+
// Add a Featured category as the first category tab.
+window.OPENSHIFT_CONSTANTS.SERVICE_CATALOG_CATEGORIES.unshift({
+  // Required. Must be unique.
+  id: "devonfw-featured",
+  // Required
+  label: "devonfw",
+  subCategories: [
+    {
+      // Required. Must be unique.
+      id: "devonfw-languages",
+      // Required.
+      label: "devonfw",
+      // Optional. If specified, defines a unique icon for this item.
+      icon: "devonfw-logo-language",
+      // Required. Items matching any tag will appear in this subcategory.
+      tags: [
+        "devonfw",
+        "devonfw-angular",
+        "devonfw-java"
+      ]
+    }
+  ]
+});
+
+
+
+
+
+

Use our own javascript inside openshift

+
+
+

To do that, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own javascript in the scriptURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. Scripts must be served with Content-Type: application/javascript.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+  [...]
+    extensions:
+      scriptURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/scripts/catalog-categories.js
+  [...]
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-okd-customize-icons.html b/docs/shop-floor/1.0/dsf-okd-customize-icons.html new file mode 100644 index 00000000..f32808c1 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-okd-customize-icons.html @@ -0,0 +1,398 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

How to add Custom Icons inside openshift

+
+
+

This is a guide to add custom icons into an Openshift cluster.

+
+
+

Here we can find an icons.css example to use the devonfw icons.

+
+
+
+
+

Images Styles

+
+
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+
+

Create a css

+
+ +
+
+
+

Custom logo for openshift cluster

+
+
+

For this example, we are going to call the css icons.css but you can call as you wish. +Openshift cluster draw their icon by the id header-logo, then we only need to add to our icons.css the next Style Attribute ID

+
+
+
+
#header-logo {
+  background-image: url("https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/images/devonfw-openshift.png);
+  width: 230px;
+  height: 40px;
+}
+
+
+
+
+
+

Custom icons for templates

+
+
+

To use a custom icon to a template openshift use a class name. Then, we need to insert inside our icons.css the next Style Class

+
+
+
+
.devonfw-logo {
+  background-image: url("https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/images/devonfw.png");
+  width: 50px;
+  height: 50px;
+}
+
+
+
+

To show that custom icon on a template, we only need to write the name of our class in the tag "iconClass" of our template.

+
+
+
+
{
+    ...
+    "items": [
+        {
+            ...
+            "metadata": {
+                ...
+                "annotations": {
+                    ...
+                    "iconClass": "devonfw-logo",
+                    ...
+                }
+            },
+            ...
+        }
+    ]
+}
+
+
+
+
+
+

Use our own css inside openshift

+
+
+

To do that, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own css in the stylesheetURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. stylesheets must be served with Content-Type: text/css.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+	[...]
+    extensions:
+      stylesheetURLs:
+		- https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/stylesheet/icons.css
+    [...]
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-okd-customize-v3-7.html b/docs/shop-floor/1.0/dsf-okd-customize-v3-7.html new file mode 100644 index 00000000..52bdd89d --- /dev/null +++ b/docs/shop-floor/1.0/dsf-okd-customize-v3-7.html @@ -0,0 +1,438 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Customize Openshift Origin v3.7 for devonfw

+
+
+

This is a guide to customize Openshift cluster. For more information read the next:

+
+
+ +
+
+
+
+

Images Styles

+
+
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+
+

Quick Use

+
+
+

This is a quick example to add custom icons and categories inside openshift.

+
+
+

To modify the icons inside openshift, we must to modify our master-config.yaml of our openshift cluster. This file is inside the openshift container and to obtain a copy of it, we must to know what’s our openshift container name.

+
+
+
+
+

Obtain the master-config.yaml of our openshift cluster

+
+ +
+
+
+

== Obtain the name of our openshift container

+
+
+

To obtain it, we can know it executing the next:

+
+
+
+
$ docker container ls
+CONTAINER ID        IMAGE                                           COMMAND                  CREATED             STATUS              PORTS                                     NAMES
+83a4e3acda5b        openshift/origin:v3.7.0                         "/usr/bin/openshift …"   6 days ago          Up 6 days                                                     origin
+
+
+
+

Here we can see that the name of the container is origin. Normaly the container it’s called as origin.

+
+
+
+
+

== Copy the master-config.yaml of our openshift container to our directory

+
+
+

This file is inside the openshift container in the next directory: /var/lib/origin/openshift.local.config/master/master-config.yaml and we can copy it with the next command:

+
+
+
+
$ docker cp origin:/var/lib/origin/openshift.local.config/master/master-config.yaml ./
+
+
+
+

Now we have a file with the configuration of our openshift cluster.

+
+
+
+
+

Copy all customize files inside the openshift container

+
+
+

To use our customization of devonfw Openshift, we need to copy our files inside the openshift container.

+
+
+

To do this we need to copy the images, scripts and stylesheets from here inside openshift +container, for example, we could put it all inside a folder called openshift.local.devonfw. On the step one we obtain the name of this container, for this example we assume that it’s called origin. Then our images are located inside openshift container and we can see an access it in /var/lib/origin/openshift.local.devonfw/images.

+
+
+
+
$ docker cp ./openshift.local.devonfw origin:/var/lib/origin/
+
+
+
+
+
+

Edit and copy the master-config.yaml to use our customize files

+
+
+

The master-config.yaml have a sections to charge our custom files. All these sections are inside the assetConfig and their names are the next:

+
+
+
    +
  • +

    The custom stylessheets are into extensionStylesheets.

    +
  • +
  • +

    The custom scripts are into extensionScripts.

    +
  • +
  • +

    The custom images are into extensions.

    +
  • +
+
+
+

To use all our custom elements only need to add the directory routes of each element in their appropriate section of the master-config.yaml

+
+
+
+
...
+assetConfig:
+  ...
+  extensionScripts:
+  - /var/lib/origin/openshift.local.devonfw/scripts/catalog-categories.js
+  extensionStylesheets:
+  - /var/lib/origin/openshift.local.devonfw/stylesheet/icons.css
+  extensions:
+  - name: images
+    sourceDirectory: /var/lib/origin/openshift.local.devonfw/images
+  ...
+...
+
+
+
+

Now we only need to copy that master-config.yaml inside openshift, and restart it to load the new configuration. To do that execute the next:

+
+
+
+
$ docker cp ./master-config.yaml origin:/var/lib/origin/openshift.local.config/master/master-config.yaml
+
+
+
+

To re-start openshift do oc cluster down and start again your persistent openshift cluster.

+
+
+
+
+

More information

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-okd-customize.html b/docs/shop-floor/1.0/dsf-okd-customize.html new file mode 100644 index 00000000..5a7f5fca --- /dev/null +++ b/docs/shop-floor/1.0/dsf-okd-customize.html @@ -0,0 +1,363 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Customize Openshift Origin for devonfw

+
+
+

This is a guide to customize Openshift cluster.

+
+
+
+
+

Images Styles

+
+
+

The icons for templates must measure the same as below or the images don’t show right:

+
+
+
    +
  • +

    Openshift logo: 230px x 40px.

    +
  • +
  • +

    Template logo: 50px x 50px.

    +
  • +
  • +

    Category logo: 110px x 36px.

    +
  • +
+
+
+
+
+

How to use

+
+
+

To use it, we need to enter in openshift as an admin and use the next command:

+
+
+
+
$ oc login
+
+$ oc edit configmap/webconsole-config -n openshift-web-console
+
+
+
+

After this, we can see in our shell the webconsole-config.yaml, we only need to navigate until extensions and add the url for our own css in the stylesheetURLs and javascript in the scriptURLs section.

+
+
+

IMPORTANT: Scripts and stylesheets must be served with the correct content type or they will not be run by the browser. Scripts must be served with Content-Type: application/javascript and stylesheets with Content-Type: text/css.

+
+
+

In git repositories, the content type of raw is text/plain. You can use rawgit to convert a raw from a git repository to the correct content type.

+
+
+

Example:

+
+
+
+
webconsole-config.yaml: |
+  [...]
+    extensions:
+      scriptURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/scripts/catalog-categories.js
+      stylesheetURLs:
+        - https://cdn.rawgit.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-cluster-setup/initial-setup/customizeOpenshift/stylesheet/icons.css
+  [...]
+
+
+
+
+
+

More information

+
+
+ +
+
+
+
+

Old versions

+
+
+ +
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-okd-how-to-install.html b/docs/shop-floor/1.0/dsf-okd-how-to-install.html new file mode 100644 index 00000000..ea3c3da0 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-okd-how-to-install.html @@ -0,0 +1,421 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Install OKD (Openshift Origin)

+
+ +
+
+
+

Pre-requisites

+
+ +
+
+
+

Install docker

+
+ +
+
+
$ sudo groupadd docker
+$ sudo usermod -aG docker $USER
+
+
+
+
+
+

Download Openshift Origin Client

+
+
+

Download Openshift Origin Client from here

+
+
+

When the download it’s complete, only extract it on the directory that you want, for example /home/administrador/oc

+
+
+
+
+

Add oc to path

+
+
+
+
$ export PATH=$PATH:/home/administrador/oc
+
+
+
+
+
+

Install Openshift Cluster

+
+ +
+
+
+

Add the insecure registry

+
+
+

Create file /etc/docker/daemon.json with the next content:

+
+
+
+
{
+    "insecure-registries" : [ "172.30.0.0/16" ]
+}
+
+
+
+
+
+

Download docker images for openshift

+
+
+
+
$ oc cluster up
+
+
+
+
+
+

Install Oc Cluster Wrapper

+
+
+

To manage easier the cluster persistent, we are going to use oc cluster wrapper.

+
+
+
+
cd /home/administrador/oc
+wget https://raw.githubusercontent.com/openshift-evangelists/oc-cluster-wrapper/master/oc-cluster
+
+
+
+

oc-cluster up devonfw-shop-floor --public-hostname X.X.X.X

+
+
+
+
+

Configure iptables

+
+
+

We must create iptables rules to allow traffic from other machines.

+
+
+
+
- The next commands it's to let all traffic, don't do it on a real server.
+
+- $ iptables -F
+- $ iptables -X
+- $ iptables -t nat -F
+- $ iptables -t nat -X
+- $ iptables -t mangle -F
+- $ iptables -t mangle -X
+- $ iptables -P INPUT ACCEPT
+- $ iptables -P OUTPUT ACCEPT
+- $ iptables -P FORWARD ACCEPT
+
+
+
+
+
+

How to use Oc Cluster Wrapper

+
+
+

With oc cluster wrapper we could have different clusters with different context.

+
+
+
+
+

Cluster up

+
+
+
+
$ oc-cluster up devonfw-shop-floor --public-hostname X.X.X.X
+
+
+
+
+
+

Cluster down

+
+
+
+
$ oc-cluster down
+
+
+
+
+
+

Use non-persistent cluster

+
+
+
+
oc cluster up --image openshift/origin --public-hostname X.X.X.X --routing-suffix apps.X.X.X.X.nip.io
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-okd-initial-setup.html b/docs/shop-floor/1.0/dsf-okd-initial-setup.html new file mode 100644 index 00000000..0f590a3e --- /dev/null +++ b/docs/shop-floor/1.0/dsf-okd-initial-setup.html @@ -0,0 +1,327 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw Openshift Origin Initial Setup

+
+
+

These are scripts to customize an Openshift cluster to be a devonfw Openshift.

+
+
+
+
+

How to use

+
+ +
+
+
+

Prerequisite: Customize Openshift

+
+
+

devonfw Openshift Origin use custom icons, and we need to add it to openshift. More information:

+
+ +
+
+
+

Script initial-setup

+
+
+

Download this script and execute it.

+
+
+

More information about what this script does here.

+
+
+
+
+

Known issues

+
+ +
+
+
+

Failed to push image

+
+
+

If you receive an error like this:

+
+
+
+
error: build error: Failed to push image: After retrying 6 times, Push image still failed due to error: Get http://172.30.1.1:5000/v2/:  dial tcp 172.30.1.1:5000: getsockopt: connection refused
+
+
+
+

It’s because the registry isn’t working, go to openshift console and enter into the default project https://x.x.x.x:8443/console/project/default/overview and you must see two resources, docker-registry and router they must be running. If they don’t work, try to deploy them and look at the logs what is happen.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-okd-s2i.html b/docs/shop-floor/1.0/dsf-okd-s2i.html new file mode 100644 index 00000000..e467f1ca --- /dev/null +++ b/docs/shop-floor/1.0/dsf-okd-s2i.html @@ -0,0 +1,385 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

s2i devonfw

+
+
+

This are the s2i source and templates to build an s2i images. It provides OpenShift builder images for components of the devonfw (at this moment only for angular and java).

+
+
+

This work is totally based on the implementation of Michael Kuehl from RedHat for Oasp s2i.

+
+
+

All this information is used as a part of the initial setup for openshift.

+
+
+
+
+

Previous setup

+
+
+

In order to build all of this, it will be necessary, first, to have a running OpenShift cluster. How to install it here.

+
+
+
+
+

Usage

+
+
+

Before using the builder images, add them to the OpenShift cluster.

+
+
+
+
+

Deploy the Source-2-Image builder images

+
+
+

First, create a dedicated devonfw project as admin.

+
+
+
+
$ oc new-project devonfw --display-name='devonfw' --description='devonfw Application Standard Platform'
+
+
+
+

Now add the builder image configuration and start their build.

+
+
+
+
oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/s2i/java/s2i-devonfw-java-imagestream.json --namespace=devonfw
+oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/s2i/angular/s2i-devonfw-angular-imagestream.json --namespace=devonfw
+oc start-build s2i-devonfw-java --namespace=devonfw
+oc start-build s2i-devonfw-angular --namespace=devonfw
+
+
+
+

Make sure other projects can access the builder images:

+
+
+
+
oc policy add-role-to-group system:image-puller system:authenticated --namespace=devonfw
+
+
+
+

That’s all!

+
+
+
+
+

Deploy devonfw templates

+
+
+

Now, it’s time to create devonfw templates to use this s2i and add it to the browse catalog. More information here.

+
+
+
+
+

Build All

+
+
+

Use this script to automatically install and build all image streams. The script also creates templates devonfw-angular and devonfw-java inside the project 'openshift' to be used by everyone.

+
+
+
    +
  1. +

    Open a bash shell as Administrator

    +
  2. +
  3. +

    Execute shell file:

    +
  4. +
+
+
+
+
$ /PATH/TO/BUILD/FILE/initial-setup.sh
+
+
+
+

More information about what this script does here.

+
+
+
+
+ +
+
+

This is a list of useful articles, etc, that I found while creating the templates.

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-okd-templates.html b/docs/shop-floor/1.0/dsf-okd-templates.html new file mode 100644 index 00000000..029088e4 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-okd-templates.html @@ -0,0 +1,401 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw templates

+
+
+

This are the devonfw templates to build devonfw apps for Openshift using the s2i images. They are based on the work of Mickuehl in Oasp templates/mythaistar for deploy My Thai Star.

+
+
+
    +
  • +

    Inside the example-mythaistar we have an example to deploy My Thai Star application using devonfw templates.

    +
  • +
+
+
+

All this information is used as a part of the initial setup for openshift.

+
+
+
+
+

How to use

+
+ +
+
+
+

Previous requirements

+
+ +
+
+
+

== Deploy the Source-2-Image builder images

+
+
+

Remember that this templates need a build image from s2i-devonfw-angular and s2i-devonfw-java. More information:

+
+ +
+
+
+

== Customize Openshift

+
+
+

Remember that this templates also have custom icons, and to use it, we must modify the master-config.yml inside openshift. More information:

+
+
+ +
+
+
+
+

Deploy devonfw templates

+
+
+

Now, it’s time to create devonfw templates to use this s2i and add it to the browse catalog.

+
+
+

To let all user to use these templates in all openshift projects, we should create it in an openshift namespace. To do that, we must log in as an admin.

+
+
+
+
oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/templates/devonfw-java-template.json --namespace=openshift
+oc create -f https://raw.githubusercontent.com/devonfw/devonfw-shop-floor/master/dsf4openshift/openshift-devonfw-deployment/templates/devonfw-angular-template.json --namespace=openshift
+
+
+
+

When it finishes, remember to logout as an admin and enter with our normal user.

+
+
+
+
$ oc login
+
+
+
+
+
+

How to use devonfw templates in openshift

+
+
+

To use these templates with openshift, we can override any parameter values defined in the file by adding the --param-file=paramfile option.

+
+
+

This file must be a list of <name>=<value> pairs. A parameter reference may appear in any text field inside the template items.

+
+
+

The parameters that we must override are the following

+
+
+
+
$ cat paramfile
+  APPLICATION_NAME=app-Name
+  APPLICATION_GROUP_NAME=group-Name
+  GIT_URI=Git uri
+  GIT_REF=master
+  CONTEXT_DIR=/context
+
+
+
+

The following parameters are optional

+
+
+
+
$ cat paramfile
+  APPLICATION_HOSTNAME=Custom hostname for service routes. Leave blank for default hostname, e.g.: <application-name>.<project>.<default-domain-suffix>,
+  # Only for angular
+  REST_ENDPOINT_URL=The URL of the backend's REST API endpoint. This can be declared after,
+  REST_ENDPOINT_PATTERN=The pattern URL of the backend's REST API endpoint that must be modify by the REST_ENDPOINT_URL variable,
+
+
+
+

For example, to deploy My Thai Star Java

+
+
+
+
$ cat paramfile
+  APPLICATION_NAME="mythaistar-java"
+  APPLICATION_GROUP_NAME="My-Thai-Star"
+  GIT_URI="https://github.com/devonfw/my-thai-star.git"
+  GIT_REF="develop"
+  CONTEXT_DIR="/java/mtsj"
+
+$ oc new-app --template=devonfw-java --namespace=mythaistar --param-file=paramfile
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-okd.html b/docs/shop-floor/1.0/dsf-okd.html new file mode 100644 index 00000000..a4a6402b --- /dev/null +++ b/docs/shop-floor/1.0/dsf-okd.html @@ -0,0 +1,320 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

OKD (OpenShift Origin)

+
+ +
+
+
+

What is OKD

+
+
+

OKD is a distribution of Kubernetes optimized for continuous application development and multi-tenant deployment. OKD is the upstream Kubernetes distribution embedded in Red Hat OpenShift.

+
+
+

OKD embeds Kubernetes and extends it with security and other integrated concepts. OKD is also referred to as Origin in github and in the documentation.

+
+
+

OKD provides a complete open source container application platform. If you are looking for enterprise-level support, or information on partner certification, Red Hat also offers Red Hat OpenShift Container Platform.

+
+
+

Continue reading…​

+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-openshift-service-account.html b/docs/shop-floor/1.0/dsf-openshift-service-account.html new file mode 100644 index 00000000..05437b10 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-openshift-service-account.html @@ -0,0 +1,345 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

OpenShift Service Accounts

+
+
+

The service accounts are special system users associated with projects. As the regular users, service accounts have a token to connect with the OpenShift API. The main difference is service account tokens are long-lived. By using the service account tokens you can access to the OpenShift API without sharing/exposing your user password/token.

+
+
+
+
+

How to create a Service Account

+
+
+

The process to create a service account is very simple, you only need to execute the following command:

+
+
+
+
oc create sa <service account name>
+
+
+
+

This command will create a service account in your current project. You can create a service account in another project by using the -n parameter.

+
+
+
+
+

How to give rights to a Service Account

+
+
+

By default the service account has no rights. In order to give rights to edit the project, you need to execute the command:

+
+
+
+
oc policy add-role-to-user edit -z <service account name>
+
+
+
+
+
+

Get the service account role

+
+
+

In order to get the service account token, you only need to:

+
+
+
+
oc describe sa <service account name>
+
+
+
+

Get the secret name for the token:

+
+
+
+sa secret +
+
+
+

Then:

+
+
+
+
oc describe secret <previous step token secret name>
+
+
+
+
+sa secret2 +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-openshift-services-bitbucket-basic-server-setup.html b/docs/shop-floor/1.0/dsf-openshift-services-bitbucket-basic-server-setup.html new file mode 100644 index 00000000..5ef0857c --- /dev/null +++ b/docs/shop-floor/1.0/dsf-openshift-services-bitbucket-basic-server-setup.html @@ -0,0 +1,515 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

[Under construction]

+
+
+
+

The purpose of the present document is to provide the basic steps carried out to setup a BitBucket server in OpenShift.

+
+
+

Introduction

+
+
+

BitBucket is the Atlassian tool that extends the Git functionality, by adding integration with JIRA, Confluence, or Trello, as well as incorporates extra features for security or management of user accounts (See BitBucket).

+
+
+

BitBucket server is the Atlassian tool that runs the BitBucket services (See BitBucket server).

+
+
+

The followed approach has been not using command line, but OpenShift Web Console, by deploying the Docker image atlassian/bitbucket-server (available in Docker Hub) in the existing project Deployment.

+
+
+

The procedure below exposed consists basically in three main steps:

+
+
+
    +
  1. +

    Deploy the BitBucket server image (from OpenShift web console)

    +
  2. +
  3. +

    Add a route for the external traffic (from OpenShift web console)

    +
  4. +
  5. +

    Configure the BitBucket server (from BitBucket server web console)

    +
  6. +
+
+
+
+
+

Prerequisites

+
+
+
    +
  • +

    OpenShift up & running

    +
  • +
  • +

    Atlassian account (with personal account key). Not required for OpenShift, but for the initial BitBucket server configuration.

    +
  • +
+
+
+
+
+

Procedure

+
+ +
+
+
+

Step 0: Log into our OpenShift Web console

+
+
+
+step0 +
+
+
+
+
+

Step 1: Get into Development project

+
+
+
+] +
+
+
+
+
+

Step 2.1: Deploy a new image to the project

+
+
+
+step2.1 +
+
+
+
+
+ +
+
+

Image name: atlassian/bitbucket-server

+
+
+
+step2.2 +
+
+
+
+
+

Step 2.3: Leave by the moment the default config. since it is enough for the basic setup. Press Create

+
+
+
+step2.3 +
+
+
+
+
+

Step 2.4: Copy the oc commands in case it is required to work via command line, and Go to overview

+
+
+
+step2.4 +
+
+
+
+
+

Step 2.5: Wait until OpenShift deploys and starts up the image. All the info will be available.

+
+
+

Please notice that there are no pre-configured routes, hence the application is not accessible from outside the cluster.

+
+
+
+step2.5 +
+
+
+
+
+

Step 3: Create a route in order for the application to be accessible from outside the cluster (external traffic). Press Create

+
+
+

Please notice that there are different fields that can be specified (hostname, port). If required, the value of those fields can be modified later.

+
+
+
+step3a +
+
+
+

Leave by the moment the default config. as it is enough for the basic setup.

+
+
+

The route for external traffic is now available.

+
+
+
+step3b +
+
+
+
+

Now the BitBucker server container is up & running in our cluster.

+
+
+

The below steps correspond to the basic configuration of our BitBucket server.

+
+
+
+
+
+ +
+ +
+
+
+

Step 4.2: Leave by the moment the Internal database since it is enough for the basic setup (and it can be modified later), and click Next

+
+
+
+step4.2 +
+
+
+
+
+

Step 4.3: Select the evaluation license, and click I have an account

+
+
+
+step4.3 +
+
+
+
+
+

Step 4.4: Select the option Bitbucker (Server)

+
+
+
+step4.4 +
+
+
+
+
+

Step 4.5: Introduce your organization (Capgemini), and click Generate License

+
+ +
+
+
+

Step 4.6: Confirm that you want to install the license on the BitBucket server

+
+
+
+step4.6 +
+
+
+

The license key will be automatically generated. Click Next

+
+
+
+
+

Step 4.7: Introduce the details of the Administration account.

+
+
+

Since our BitBucket server is not going to be integrated with JIRA, click on Go to Bitbucket. The integration with JIRA can be configured later.

+
+
+
+step4.7 +
+
+
+
+
+

Step 4.8: Log in with the admin account that has been just created

+
+ +
+
+
+

DONE !!

+
+
+
+done +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-openshift-services-bitbucket-extra-server-configuration.html b/docs/shop-floor/1.0/dsf-openshift-services-bitbucket-extra-server-configuration.html new file mode 100644 index 00000000..3f2e1a47 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-openshift-services-bitbucket-extra-server-configuration.html @@ -0,0 +1,351 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

[Under construction]

+
+
+
+

The purpose of the present document is to provide the basic steps carried out to improve the configuration of BitBucket server in OpenShift.

+
+
+

The improved configuration consists on:

+
+
+
    +
  • +

    Persistent Volume Claims

    +
  • +
  • +

    Health Checks (pending to be completed)

    +
  • +
+
+
+

Persistent Volume Claims.

+
+
+

Please notice that the BitBucket server container does not use persistent volume claims by default, which means that the data (e.g.: BitBucket server config.) will be lost from one deployment to another.

+
+
+
+pvc0 +
+
+
+

It is very important to create a persistent volume claim in order to prevent the mentioned loss of data.

+
+
+
+
+

Step 1: Add storage

+
+
+
+pvc1 +
+
+
+
+
+

Step 2: Select the appropriate storage, or create it from scratch if necessary

+
+
+
+pvc2 +
+
+
+
+
+

Step 3: Introduce the required information

+
+
+
    +
  • +

    Path as it is specified in the BitBucket server Docker image (/var/atlassian/application-data/bitbucket)

    +
  • +
  • +

    Volume name with a unique name to clearly identify the volume

    +
  • +
+
+
+
+pvc3 +
+
+
+
+
+

The change will be inmediately applied

+
+
+
+done +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-openshift-services-selenium-basic-grid.html b/docs/shop-floor/1.0/dsf-openshift-services-selenium-basic-grid.html new file mode 100644 index 00000000..751396c2 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-openshift-services-selenium-basic-grid.html @@ -0,0 +1,563 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Basic Selenium Grid setup in OpenShift

+
+
+

[Under construction]

+
+
+
+

The purpose of the present document is to provide the basic steps carried out to setup a Selenium Grid (Hub + Nodes) in OpenShift.

+
+
+
+
+

Introduction

+
+
+

Selenium is a tool to automate web browser across many platforms. It allows the automation of the testing in many different browsers, operating systems, programing laguages, or testing frameworks. (for further information pelase see Selenium)

+
+
+

Selenium Grid is the platform provided by Selenium in order to perform the execution of tests in parallel and in a distributed way.

+
+
+

It basically consists on a Selenium Server (also known as hub or simply server) which redirects the requests it receives to the appropriate node (Firefox node, Chrome node, …​) depending on how the Selenium WebDriver is configured or implemented (See Selenium Doc.)

+
+
+
+ +
+

Prerequisites

+
+
+
    +
  • +

    OpenShift up & running

    +
  • +
+
+
+
+
+

Procedure

+
+
+

The present procedure is divided into two different main parts: +* First part: Selenium Hub (server) installation +* Second part: Selenium node installation (Firefox & Chrome) +* Create persistent volumes for the hub and the node(s)

+
+
+
+
+

Selenium Hub installation

+
+
+

The followed approach consists on deploying new image from the OpenShift WenConsole.

+
+
+

The image as well as its documentation and details can be found at Selenium Hub Docker Image

+
+
+
+
+

== Step 1: Deploy Image

+
+
+
+step1 +
+
+
+
+
+

== Step 2: Image Name

+
+
+

As it is specified in the documentation (selenium/hub)

+
+
+

(Please notice that, as it is described in the additional documentation of the above links, the server will run by default on 4444 port)

+
+
+
+step2 +
+
+
+
+
+

== Step 3: Introduce the appropriate resource name

+
+
+

(selenium-hub in this case)

+
+
+

(No additional config. is required by the moment)

+
+
+
+step3a +
+
+
+

Once the image is deployed, you will be able to check & review the config. of the container. Please notice by, by default, no route is created for external traffic, hence the application (the selenium server or hub) is not reachable from outside the cluster

+
+
+
+step3b +
+
+
+
+
+

== Step 4: Create a route for external traffic

+
+
+
+step4 +
+
+
+
+
+

== Step 5: Change the default config. if necessary

+
+
+
+step5 +
+
+
+
+
+

== DONE !!

+
+
+

The Selenium Server is now accesible from outside the cluster. Click on the link of the route and you will be able to see the server home page.

+
+
+
+done1 +
+
+
+
+
+

== console/view config to see the default server config.

+
+
+

Please notice that the server is not detecting any node up & running, since we have not yet installed none of them.

+
+
+
+done2 +
+
+
+
+
+
+

Selenium Node Firefox installation

+
+
+

(Same steps apply for Selenium Node Chrome with the selenium/node-chrome Docker image)

+
+
+

The key point of the nodes installation is to specify the host name and port of the hub. If this step is not correctly done, the container will be setup but the application will not run.

+
+
+

The followed approach consists on deploying new image from the OpenShift WenConsole.

+
+
+

The image as well as its documentation and details can be found at Selenium Hub Docker Image (firefox node in this case)

+
+
+
+
+

== Step 1: Deploy Image

+
+
+

Introduce the appropriate Docker Image name as it is specified in the documentation (selenium/node-firefox)

+
+
+
+step1 +
+
+
+
+
+

== Step 2: Introduce the appropriate resource name

+
+
+

(selenium-node-firefox in this case)

+
+
+
+step2 +
+
+
+
+
+

== Step 3: Introduce, as environment variables, the host name and port of the selenium hub previously created

+
+
+

Env. var. for selenium hub host name

+
+
+
    +
  • +

    Name: HUB_PORT_4444_TCP_ADDR

    +
  • +
  • +

    Value: The Selenium hub host name. It’s recommended to use the service name of the internal OpenShift service.

    +
  • +
+
+
+

Env. var. for host selenium hub host port

+
+
+
    +
  • +

    Name: HUB_PORT_4444_TCP_PORT

    +
  • +
  • +

    Value: 4444 (by default), or the appropriate one if it was changed during the installation.

    +
  • +
+
+
+
+step3 +
+
+
+
+
+

== DONE !!

+
+
+

If the creation of the container was correct, we will be able to see our new selenium-node-firefox application up & running, as well as we will be able to see that the firefox node has correctly detected the selenium hub (in the log of the POD)

+
+
+
+done1 +
+
+
+
+done2 +
+
+
+

If we go back to the configuration of the SeleniumHub through the WebConsole, we also will be able to see the our new firefox node

+
+
+
+done3 +
+
+
+
+
+
+

Persistent Volumes

+
+
+

Last part of the installation of the Selenium Grid consists on creating persistent volumes for both, the hub container and the node container.

+
+
+

Persistent Volumes can be easely created folling the the BitBucket Extra server configuration

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-provisioning-dsf4docker.html b/docs/shop-floor/1.0/dsf-provisioning-dsf4docker.html new file mode 100644 index 00000000..ce3300d6 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-provisioning-dsf4docker.html @@ -0,0 +1,374 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

dsf4docker provisioning environment

+
+
+
+docker +
+
+
+
+
+

Architecture overview

+
+
+

dsf docker arch

+
+
+
+
+

Prerequisite

+
+
+

To use dsf4docker provisioning environment you need a remote server and you must clone or download devonfw shop floor.

+
+
+
+
+

How to use it

+
+
+

Navigate to ./devonfw-shop-floor/dsf4docker/environment and here you can find one scripts to install it, and another one to uninstall it.

+
+
+
+
+

Install devonfw shop floor 4 Docker

+
+
+

There is an installation script to do so, so the complete installation should be completed by running it. Make sure this script has execution permissions in the Docker Host:

+
+
+
+
 chmod +x dsf4docker-install.sh
+ sudo ./dsf4docker-install.sh
+
+
+
+

This script, besides the container "installation" itself, will also adapt the docker-compose.yml file to your host (using sed to replace the IP_ADDRESS word of the file for your real Docker Host’s IP address).

+
+
+
+
+

Uninstall devonfw shop floor 4 Docker

+
+
+

As well as for the installation, if we want to remove everything concerning devonfw shop floor 4 Docker from our Docker Host, we’ll run this script:

+
+
+
+
 chmod +x dsf4docker-uninstall.sh
+ sudo ./dsf4docker-uninstall.sh
+
+
+
+
+
+

Troubleshooting

+
+
+

When trying to execute the install or uninstall .sh there may be some problems related to the windows/linux format file, so if you see this error log while executing the script:

+
+
+
+
./dsf4docker-install.sh: line 16: $'\r': command not found
+
+
+
+

You need to do a file conversion with this command:

+
+
+
+
dos2unix dsf4docker-install.sh
+
+
+
+

or

+
+
+
+
dos2unix dsf4docker-uninstall.sh
+
+
+
+
+
+

A little history

+
+
+

The Docker part of the shop floor is created based on the experience of the environment setup of the project Mirabaud Advisory, and intended to be updated to latest versions. Mirabaud Advisory is a web service developed with devonfw (Java) that, alongside its own implementation, it needed an environment both for the team to follow CICD rules through their 1-week-long sprints and for the client (Mirabaud) to check the already done work.

+
+
+

There is a practical experience about the Mirabaud Case.

+
+
+
+

Back.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/dsf-provisioning-production-line.html b/docs/shop-floor/1.0/dsf-provisioning-production-line.html new file mode 100644 index 00000000..1bf2fba1 --- /dev/null +++ b/docs/shop-floor/1.0/dsf-provisioning-production-line.html @@ -0,0 +1,303 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ + +
+

Production Line provisioning environment

+
+
+

pl

+
+
+

The Production Line Project is a set of server-side collaboration tools for Capgemini engagements. It has been developed for supporting project engagements with individual tools like issue tracking, continuous integration, continuous deployment, documentation, binary storage and much more!

+
+
+

For additional information use the official documentation.

+
+
+
+
+

How to obtain your Production Line

+
+
+

You can order your Production Line environment instance following the official guide. Remember that you need to order at least the next tools: + * Jenkins + * GitLab + * SonarQube + * Nexus

+
+
+
+

Back.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/index.html b/docs/shop-floor/1.0/index.html new file mode 100644 index 00000000..16f9920d --- /dev/null +++ b/docs/shop-floor/1.0/index.html @@ -0,0 +1,302 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

What is devonfw shop floor?

+
+
+
+devonfw shop floor +
+
+
+

devonfw shop floor is a platform to industrialize continuous delivery and continuous integration processes.

+
+
+

devonfw shop floor is a set of documentation, tools and methodologies used to configure the provisioning, development and uat environments used in your projects. devonfw shop floor allows the administrators of those environments to apply CI/CD operations and enables automated application deployment.

+
+
+

devonfw shop floor is mainly oriented to configure the provisioning environment provided by Production Line and deploy applications on an OpenShift cluster. In the cases where Production Line or OpenShift cluster are not available, there will be alternatives to achieve similar goals.

+
+
+

The devonfw shop floor 4 OpenShift is a solution based on the experience of priming devonfw for OpenShift by RedHat.

+
+
+
+primed +
+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/jenkins-slave.html b/docs/shop-floor/1.0/jenkins-slave.html new file mode 100644 index 00000000..fad0d137 --- /dev/null +++ b/docs/shop-floor/1.0/jenkins-slave.html @@ -0,0 +1,341 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Slaves creation for jenkins

+
+
+

Click on "Manage Jenkins" → "Manage Nodes" → "New Node": +We define a name and select "Permanent Agent"

+
+
+
+1 +
+
+
+

As we can see in the following image, we need to fill in the fields:

+
+
+

-Name: The slave name

+
+
+

-# of executors: The maximum number of concurrent builds that Jenkins may perform on this node. +(can be modified later)

+
+
+

-Remote root directory :An agent needs to have a directory dedicated to Jenkins. Specify the path to this directory on the agent.

+
+
+

We have this path in our docker-compose.yml

+
+
+
+root directory +
+
+
+

-Usage:In this mode, Jenkins uses this node freely. Whenever there is a build that can be done by using this node, Jenkins will use it.

+
+
+

Launch method: Controls how Jenkins starts this agent.

+
+
+
+2 +
+
+
+

Once is filled, we click on 'save' button and we can see our slave created:

+
+
+
+3 +
+
+
+

If we click in our slave we can see the 'secret' that we need to add to our docker-compose.yml:

+
+
+
+4 +
+
+
+

We add it to the docker-compose.yml file:

+
+
+
+7 +
+
+
+

Once it’s done, we go to the command line and write:

+
+
+

$docker-compose up -d

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/shop-floor/1.0/master-devonfw-shop-floor.html b/docs/shop-floor/1.0/master-devonfw-shop-floor.html new file mode 100644 index 00000000..c4ddde1d --- /dev/null +++ b/docs/shop-floor/1.0/master-devonfw-shop-floor.html @@ -0,0 +1,273 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw shop floor

+
+
+

devonfw-shop-floor

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/solicitor/1.0/_images/images/datamodel.drawio b/docs/solicitor/1.0/_images/images/datamodel.drawio new file mode 100644 index 00000000..d3e28c4e --- /dev/null +++ b/docs/solicitor/1.0/_images/images/datamodel.drawio @@ -0,0 +1 @@ +7VpLc9sgEP41PqZjPWwrx9h59JB0mrgzbU4dIrDEFIGKcGzn1xcs0APZlqO4bWaiTA5iWXZh+b6FlTzwZsn6hoM0vmMQkYE7hOuBdzlwXcd33YH6H8JNLpmMhrkg4hhqpVIwxy9IC43aEkOU1RQFY0TgtC4MGaUoFDUZ4Jyt6moLRupeUxChhmAeAtKUfsdQxLk0cCel/DPCUWw8O+PzvCcBRlmvJIsBZKuKyLsaeDPOmMifkvUMERU8E5d83PWe3mJiHFFxzIDPZ8ibO9D7ubjzw/nyPvBv47NRbuUZkKVe8BWN5JoSZTSfttiYWGQrnBBAZWu6YFTMdc9QtsMYE3gLNmyp5pIJEP4yrWnMOH6R+oDILkcKZDcXeqvdsbKGCZkxwrgUULZ1UA6aK2PaDUeZHPbVrNmxRHdgXVO8BZkwE2SEgDTDT9spq4EJ4BGmUyYES7RSM6Q6ys+IC7SuiHSIbxBLkOAbqWJ6x3q7N4YBur0q0VPoxBXkGBnQgI0K04W3BwlwQCO5gsKdN7TcOU7T3XCHOxn3mjtABOIUCDRlSwqzKpLkQ2WlpWiLr1dgbdzA2gNY3eIQ0Qz1WOuANb++977v91DLoRY0oHaRpgSHQGBGZyxJ5cb3Ce4kCc7zd8DgY6LuvIG6L4wngMiNgn2e6w45N+gT3R7IOX4Dc7SBMUThhboKV5AgEgMc2alh47gGR0YbYpAwCr/FmFoQk3614BqrGW+3GkF5ddZuGRcxixgF5KqUTiUXImnqkqCFQrGCgEzK5EKLnzRw5KRkWH4os59GpvlovKjG5brW2pjWGovKMNl6NMuUz+Ug1SjGUFhZw164ZmzJQ9R+lZYxiZA4tGW5ngrWUfm2hsUzUxdxRORh9lyvUg7g/CvD6sArKFWHuDu0mJKvVg+qFhWWHSdoMZSH44Aho8gWiwzVdE7GkUOXgT4hn+AO4PZFTgG2ZpXTJ+T/k5DN66K2hBy8i4Q8sjAejLplZNuQ61uG3kNGnvQkqZKkBviSMu7xnOnOkuBIlozfA0sczyo+x5aJrizxRpahPSyRGAObilqqFLJD9yyrdLFf0toLdA/qy4d8Bv+ess1XKj1lm5SdvCfKnnei7FsPMsdCsNORojbX/clfoqj9dqGFot74/1P0xlmmk8kQ/J5O7omIA/gFpzvqnO3nuAf1hemDVjmNkmYH4Y6ucj5kkbMTac2vhq88Clrzd7e6oVsu35klWzPzIQq2ZubRbuC9MfV6gQ2hjm91Rn6LoT2p91T4apYHTgNg27QAnoq0VYGXOfx5TpL9pz8BT4hMZU6KuOKKyVsD11ts/0x21FeREiz61wba/aB4ndaSb/aT6WSHcm0ryrjLZvlLg3ybyt9reFd/AA== \ No newline at end of file diff --git a/docs/solicitor/1.0/_images/images/datamodel.png b/docs/solicitor/1.0/_images/images/datamodel.png new file mode 100644 index 00000000..33060517 Binary files /dev/null and b/docs/solicitor/1.0/_images/images/datamodel.png differ diff --git a/docs/solicitor/1.0/_images/images/domain_model.png b/docs/solicitor/1.0/_images/images/domain_model.png new file mode 100644 index 00000000..ae30fc87 Binary files /dev/null and b/docs/solicitor/1.0/_images/images/domain_model.png differ diff --git a/docs/solicitor/1.0/_images/images/solution.png b/docs/solicitor/1.0/_images/images/solution.png new file mode 100644 index 00000000..f0d1d5d0 Binary files /dev/null and b/docs/solicitor/1.0/_images/images/solution.png differ diff --git a/docs/solicitor/1.0/index.html b/docs/solicitor/1.0/index.html new file mode 100644 index 00000000..008d2443 --- /dev/null +++ b/docs/solicitor/1.0/index.html @@ -0,0 +1,2581 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Solicitor User Guide

+
+
+

SPDX-License-Identifier: Apache-2.0

+
+
+
+
+

1. Introduction

+
+
+

Todays software projects often make use of large amounts of Open Source software. Being +compliant with the license obligations of the used software components is a prerequisite for every such project. This results in different requirements that the project might need to fulfill. Those requirements can be grouped into two main categories:

+
+
+
    +
  • +

    Things that need to be done to actually fulfill license obligations

    +
  • +
  • +

    Things that need to be done to monitor / report fulfillment of license obligations

    +
  • +
+
+
+

Most of the above activities share common points:

+
+
+
    +
  • +

    The need to have an inventory of used (open source) components and their licenses

    +
  • +
  • +

    Some rule based evaluation and reporting based on this inventory

    +
  • +
+
+
+

In practice these easy looking tasks might get complex due to various aspects:

+
+
+
    +
  • +

    The number of open source components might be quite large (>> 100 for a typical webapplication based on state of the art programming frameworks)

    +
  • +
  • +

    Agile development and rapid changes of used components result in frequent changes of the inventory

    +
  • +
  • +

    Open Source usage scenarios and license obligations might be OK in one context (e.g. in the relation between a software developer and his client) but might be completely inacceptable in another context (e.g. when the client distributes the same software to end customers)

    +
  • +
  • +

    Legal interpretation of license conditions often differ from organisation to organisation and result in different compliance rules to be respected.

    +
  • +
  • +

    License information for components is often not available in a standardized form which would allow automatic processing

    +
  • +
  • +

    Tools for supporting the license management processes are often specific to a technology or build tool and do not support all aspects of OSS license management.

    +
  • +
+
+
+

Of course there are specific commercial tool suites which address the IP rights and license domain. But due to high complexity and license costs those tools are out of reach for most projects - at least for permanent use.

+
+
+

Solicitor tries to address some of the issues hightlighted above. In its initial version it is a tool for programmatically executing a process which was originally defined as an Excel-supported manual process.

+
+
+

When running Solicitor three subsequent processing steps are executed:

+
+
+
    +
  • +

    Creating an initial component and license inventory based on technology specific input files

    +
  • +
  • +

    Rule based normalization and evaluation of licenses

    +
  • +
  • +

    Generation of output documents

    +
  • +
+
+
+ + + + + +
+ + +Solicitor comes with a set of sample rules for the normalization and evaluation of licenses. +Even though these included rules are not "intentionally wrong" they are only samples and you should never rely on these builtin rules without checking and possibly modifying their content and consulting your lawyer. Solicitor is a tool +for technically supporting the management of OSS licenses within your project. Solicitor neither gives legal advice nor is a replacement for a lawyer. +
+
+
+
+
+

2. Licensing of Solicitor

+
+
+

The Solicitor code and accompanying resources (including this userguide) as stored in the GIT Repository https://github.com/devonfw/solicitor are licensed as Open Source under Apache 2 license (https://www.apache.org/licenses/LICENSE-2.0).

+
+
+ + + + + +
+ + +Specifically observe the "Disclaimer of Warranty" and "Limitation of Liability" which are part of the license. +
+
+
+ + + + + +
+ + +The executable JAR file which is created by the Maven based build process includes numerous other Open Source components which are subject to different Open Source licenses. Any distribution of the Solicitor executable JAR file needs to comply with the license conditions of all those components. +If you are running Solicitor from the executable JAR you might use the -eug option to store detailed license information as file solicitor_licenseinfo.html in your current working directory (together with a copy of this user guide). +
+
+
+
+
+

3. Architecture

+
+
+

The following picture show a business oriented view of Solicitor.

+
+
+

domain model

+
+
+

Raw data about the components and attached licenses within an application is gathered by scanning with technology and build chain specific tools. This happens outside Solicitor.

+
+
+

The import step reads this data and transforms it into a common technology independent internal format.

+
+
+

In the normalization step the license information is completed and unified. Information not contained in the raw data is added. Where possible the applicable licenses are expressed by SPDX-IDs.

+
+
+

Many open source compontents are available via multi licensing models. Within qualification the finally applicable licenses are selected.

+
+
+

In the legal assessment the compliance of applicable licenses will be checked based on generic rules defined in company wide policies and possibly project specific project specific extensions. Defining those rules is considered as "legal advice" and possibly needs to be done by lawyers which are authorized to do so. For this step Solicitor only provides a framework / tool to support the process here but does not deliver any predefined rules.

+
+
+

The final export step produces documents based on the internal data model. This might be the list of licenses to be forwarded to the customer or a license compliance report. Data might also be fed into other systems.

+
+
+

A more technical oriented view of Solicitor is given below.

+
+
+

solution

+
+
+

There are three major technical components: The reader and writer components are performing import and export of data. The business logic - doing normalization, qualification and legal assessment is done by a rule engine. Rules are mainly defined via decision tables. Solicitor comes with a starting set of rules for normalization and qualification but these rulesets need to be extended within the projects. Rules for legal evaluation need to be completely defined by the user.

+
+
+

Solicitor is working without additional persisted data: When being executed it generates the output direcly from the read input data after processing the business rules.

+
+
+
+
+

4. Data Model

+
+
+

datamodel

+
+
+

The internal business data model consists of 6 entities:

+
+
+
    +
  • +

    ModelRoot: root object of the business data model which holds metadata about the data processing

    +
  • +
  • +

    Engagement: the masterdata of the overall project

    +
  • +
  • +

    Application: a deliverable within the Engagement

    +
  • +
  • +

    ApplicationComponent: component within an Application

    +
  • +
  • +

    RawLicense: License info attached to an ApplicationComponent as it is read from the input data

    +
  • +
  • +

    NormalizedLicense: License info attached to an ApplicationComponent processed by the business rules

    +
  • +
+
+
+
+
+

5. == ModelRoot

+
+
+

|== = +| Property | Type | Description +| modelVersion | int | version number of the data model +| executionTime | String | timestamp when the data was processed +| solicitorVersion | String | Solicitor version which processed the model +| solicitorGitHash | String | buildnumber / GitHash of the Solicitor build +| solicitorBuilddate | String | build date of the Solicitor build +| extensionArtifactId | String | artifactId of the active Solicitor Extension ("NONE" if no extension) +| extensionVersion | String | Version of the active Extension (or "NONE") +| extensionGitHash | String | Buildnumber / GitHash of the Extension (or "NONE") +| extensionBuilddate | String build date of the Extension (or "NONE") +|== =

+
+
+
+
+

6. == Engagement

+
+
+

|== = +| Property | Type | Description +| engagementName | String | the engagement name +| engagementType | EngagementType | the engagement type; possible values: INTERN, EXTERN +| clientName | String | name of the client +| goToMarketModel | GoToMarketModel | the go-to-market-model; possible values: LICENSE +| contractAllowsOss | boolean | does the contract explicitely allow OSS? +| ossPolicyFollowed | boolean | is the companies OSS policy followed? +| customerProvidesOss | boolean | does the customer provide the OSS? +|== =

+
+
+
+
+

7. == Application

+
+
+

|== = +| Property | Type | Description +| applicationName | String | the name of the application / deliverable +| releaseId | String | version identifier of the application +| releaseDate | Sting | release data of the application +| sourceRepo | String | URL of the source repo of the application (should be an URL) +| programmingEcosystem | String | programming ecosystem (e.g. Java8; Android/Java, iOS / Objective C) +|== =

+
+
+
+
+

8. == ApplicationComponent

+
+
+

|== = +| Property | Type | Description +| usagePattern | UsagePattern | possible values: DYNAMIC_LINKING, STATIC_LINKING, STANDALONE_PRODUCT +| ossModified | boolean | is the OSS modified? +| ossHomepage | String | URL of the OSS homepage +| groupId | String | component identifier: maven group +| artifactId | String | component identifier: maven artifactId +| version | String | component identifier: Version +| repoType | String | component identifier: RepoType +|== =

+
+
+
+
+

9. == RawLicense

+
+
+

|== = +| Property | Type | Description +| declaredLicense | String | name of the declared license +| licenseUrl | String | URL of the declared license +| trace | String | detail info of history of this data record +| specialHandling | boolean | (for controlling rule processing) +|== =

+
+
+
+
+

10. == NormalizedLicense

+
+
+

|== = +| Property | Type | Description +| declaredLicense | String | name of the declared license (copied from RawLicense) +| licenseUrl | String | URL of the declared license (copied from RawLicense +| declaredLicenseContent | String | resolved content of licenseUrl +| normalizedLicenseType | String | type of the license, see [License types] +| normalizedLicense | String | name of the license in normalized form (SPDX-Id) or special "pseudo license id", see [Pseudo License Ids] +| normalizedLicenseUrl | String | URL pointing to a normalized form of the license +| normalizedLicenseType | String | type of the license, see [License types] +| effectiveNormalizedLicenseType | String | type of the effective license, see [License types]| effectiveNormalizedLicense | String | effective normalized license (SPDX-Id) or "pseudo license id"; this is the information after selecting the right license in case of multi licensing or any license override due to a component being redistributed under a different license +| effectiveNormalizedLicenseUrl | String | URL pointing to the effective normalized license +| effectiveNormalizedLicenseContent | String | resolved content of effectiveNormalizedLicenseUrl +| legalPreApproved | String | indicates whether the license is pre approved based on company standard policy +| copyLeft | String | indicates the type of copyleft of the license +| licenseCompliance | String | indicates if the license is compliant according to the default company policy +| licenseRefUrl | String | URL to the reference license information (TBD) +| licenseRefContent | String | resolved content of licenseRefUrl +| includeLicense | String | does the license require to include the license text ? +| includeSource | String | does the license require to deliver source code of OSS component ? +| reviewedForRelease | String | for which release was the legal evaluation done? +| comments | String | comments on the component/license (mainly as input to legal) +| legalApproved | String | indicates whether this usage is legally approved +| legalComments | String | comments from legal, possibly indicating additional conditions to be fulfilled +| trace | String | detail info of history of this data record (rule executions) +|== =

+
+
+

For the mechanism how Solicitor resolves the content of URLs and how the result +might be influenced see Resolving of License URLs

+
+
+
+
+

11. == License types

+
+
+

Defines the type of license

+
+
+
    +
  • +

    OSS-SPDX - An OSS license which has a corresponding SPDX-Id

    +
  • +
  • +

    OSS-OTHER - An OSS license which has no SPDX-Id

    +
  • +
  • +

    COMMERCIAL - Commercial (non OSS) license; this might also include code which is owned by the project

    +
  • +
  • +

    UNKNOWN- License is unknown

    +
  • +
  • +

    IGNORED- license will be ignored (non selected license in multi licensing case; only to be used as "Effective Normalized License Type")

    +
  • +
+
+
+
+
+

12. == Pseudo License Ids

+
+
+

A "normalized" license id might be either a SPDX-Id or a "pseudo license id" which is used to indicate a specific situation. The following pseudo license ids are used:

+
+
+
    +
  • +

    OSS specific - a nonstandard OSS license which could not be mapped to a SPDX-Id

    +
  • +
  • +

    PublicDomain - any form of public domain which is not represented by an explicit SPDX-Id

    +
  • +
  • +

    Ignored - license will be ignored (non selected license in multi licensing case; only to be used as "Effective Normalized License")

    +
  • +
  • +

    NonOSS - commercial license, not OSS

    +
  • +
+
+
+
+
+

13. Usage

+
+ +
+
+
+

14. Executing Solicitor

+
+
+

Solicitor is a standalone Java (Spring Boot) application. Prerequisite for running it is an existing Java 8 or 11 runtime environment. If you do not yet have a the Solicitor executable JAR (solicitor.jar) you need to build it as given on the project GitHub homepage https://github.com/devonfw/solicitor .

+
+
+

Solicitor is executed with the following command:

+
+
+
+
java -jar solicitor.jar -c <configfile>
+
+
+
+

where <configfile> is to be replaced by the location of the [Configuration File].

+
+
+

To get a first idea on what Solicitor does you might call

+
+
+
+
java -jar solicitor.jar -c classpath:samples/solicitor_sample.cfg
+
+
+
+

This executes Solicitor with default configuration on it own list of internal components and produces sample output.

+
+
+

To get an overview of the available command line options use

+
+
+
+
java -jar solicitor.jar -h
+
+
+
+
+
Adressing of resources
+
+

For unique adressing of resources to be read (configuration files, input data, rule templates and decision tables) Solicitor makes use of the Spring ResourceLoader functionality, see https://docs.spring.io/spring-framework/docs/current/spring-framework-reference/core.html#resources-resourceloader . This allows to load from the classpath, the filesystem or even via http get.

+
+
+

If you want to reference a file in the filesystem you need to write it as follows: file:path/to/file.txt

+
+
+

Note that this only applies to resources being read. Output files are adressed without that prefix.

+
+
+
+
+
+
+

15. Project Configuration File

+
+
+

The project configuration of Solicitor is done via a configuration file in +JSON format. This configuration file defines the engagements and applications master data, configures the readers for importing component and license information, references the business rules to be applied and defines the exports to be done.

+
+
+

The config file has the following skeleton:

+
+
+
+
{
+  "version" : 1,
+  "comment" : "Sample Solicitor configuration file",
+  "engagementName" : "DevonFW", (1)
+  .
+  .
+  .
+  "applications" : [ ... ], (2)
+  "rules" : [ ... ],  (3)
+  "writers" : [ ... ] (4)
+}
+
+
+
+ + + + + + + + + + + + + + + + + +
1The leading data defines the engagement master data, see [Header and Engagement Master Data]
2applications defines the applications within the engagement and configures the readers to import the component/license information, see [Readers / Applications]
3rules references the rules to apply to the imported data, see [Business Rules]
4writers configures how the processed data should be exported, see [Writers / Reporting]
+
+
+ + + + + +
+ + +The following section describes all sections of the Solicitor configuration file format. Often the configuration of writers and especially rules will be identical for projects. To facilitate the project specific configuration setup Solicitor internally provides a base configuration which contains reasonable defaults for the rules and writers section. If the project specific configuration file omits the rules and/or writers sections then the corresponding settings from the base configuration will be taken. For details see Default Base Configuration. +
+
+
+ + + + + +
+ + +If locations of files are specified within the configuration files as relative +pathnames then this is always evaluated relative to the current working directory (which +might differ from the location of the configuration file). If some file location +should be given relative to the location of the configuration file this might be done +using the special placeholder ${cfgdir} as described in the following. +
+
+
+
+
+

16. == Placeholders within the configuration file

+
+
+

Within certain parts of the configuration file (path and filenames) special placeholders might be used to parameterize the configuration. These areas are explicitely marked in the following +description.

+
+
+

These placeholders are available:

+
+
+
    +
  • +

    ${project} - A simplified project name (taking the engagement name, +removing all non-word characters and converting to lowercase).

    +
  • +
  • +

    ${cfgdir} - If the config file was loaded from the filesystem this denotes the directory where the config file resides, . otherwise. This can be used to reference locations relative to the location of the config file.

    +
  • +
+
+
+
+
+

17. == Header and Engagement Master Data

+
+
+

The leading section of the config file defines some metadata and the engagement master data.

+
+
+
+
  "version" : 1, (1)
+  "comment" : "Sample Solicitor configuration file", (2)
+  "engagementName" : "DevonFW", (3)
+  "engagementType" : "INTERN", (4)
+  "clientName" : "none", (5)
+  "goToMarketModel" : "LICENSE", (6)
+  "contractAllowsOss" : true, (7)
+  "ossPolicyFollowed" : true, (8)
+  "customerProvidesOss" : false, (9)
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
1version of the config file format (currently needs to be 1)
2is a free text comment (no further function at the moment)
3the engagement name (any string)
4the engagement type; possible values: INTERN, EXTERN
5name of the client (any string)
6the go-to-market-model; possible values: LICENSE
7does the contract explicitely allow OSS? (boolean)
8is the companies OSS policy followed? (boolean)
9does the customer provide the OSS? (boolean)
+
+
+
+
+

18. == Applications

+
+
+

Within this section the different applications (=deliverables) of the engagement are defined. Furtheron for each application at least one reader needs to be defined which imports the component and license information.

+
+
+
+
 "applications" : [ {
+    "name" : "Devon4J", (1)
+    "releaseId" : "3.1.0-SNAPSHOT", (2)
+    "sourceRepo" : "https://github.com/devonfw/devon4j.git", (3)
+    "programmingEcosystem" : "Java8", (4)
+    "readers" : [ { (5)
+      "type" : "maven", (6)
+      "source" : "classpath:samples/licenses_devon4j.xml", (7) (10)
+      "usagePattern" : "DYNAMIC_LINKING", (8)
+      "repoType" : "maven" (9)
+    } ]
+  } ],
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
1The name of the application / deliverable (any string)
2Version identifier of the application (any string)
3URL of the source repo of the application (string; should be an URL)
4programming ecosystem (any string; e.g. Java8; Android/Java, iOS / Objective C)
5multiple readers might be defined per application
6the type of reader; for possible values see Reading License Information with Readers
7location of the source file to read (ResourceLoader-URL)
8usage pattern; possible values: DYNAMIC_LINKING, STATIC_LINKING, STANDALONE_PRODUCT
9repoType: Repository to download the sources from: currently possible values: maven, npm; if omitted then "maven" will be taken as default
10placeholder patterns might be used here
+
+
+

The different readers are described in chapter Reading License Information with Readers

+
+
+
+
+

19. == Business Rules

+
+
+

Business rules are executed within a Drools rule engine. They are defined as a sequence of rule templates and corresponding XLS files which together represent decision tables.

+
+
+
+
  "rules" : [ {
+    "type" : "dt", (1)
+    "optional" : false, (2)
+    "ruleSource" : "classpath:samples/LicenseAssignmentSample.xls", (3) (7)
+    "templateSource" : "classpath:com/.../rules/rule_templates/LicenseAssignment.drt", (4) (7)
+    "ruleGroup" : "LicenseAssignment", (5)
+    "description" : "setting license in case that no one was detected" (6)
+  },
+  .
+  .
+  .
+,{
+    "type" : "dt",
+    "optional" : false,
+    "ruleSource" : "classpath:samples/LegalEvaluationSample.xls",
+    "templateSource" : "classpath:com/.../rules/rule_templates/LegalEvaluation.drt",
+    "ruleGroup" : "LegalEvaluation",
+    "decription" : "final legal evaluation based on the rules defined by legal"
+  } ],
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
1type of the rule; only possible value: dt which stands for "decision table"
2if set to true the processing of this group of rules will be skipped if the XLS with table +data (given by ruleSource) does not exist; if set to false a missing XLS table will result +in program termination
3location of the tabular decision table data
4location of the drools rule template to be used to define the rules together with the decision table data
5id of the group of rules; used to reference it e.g. when doing logging
6some textual description of the rule group
7placeholder patterns might be used here
+
+
+

When running, Solicitor will execute the rules of each rule group separately and in the order +given by the configuration. Only if there are no more rules to fire in a group Solicitor will +move to the next rule group and start firing those rules.

+
+
+

Normally a project will only customize (part of) the data of the decision tables and thus will only change the ruleSource and the data in the XLS. All other configuration (the different templates and processing order) is part of the Solicitor application itself and should not be changed by end users.

+
+
+

See Working with Decision Tables and Standard Business Rules for further information on the business rules.

+
+
+
+
+

20. == Writers / Reporting

+
+
+

The writer configuration defines how the processed data will be exported and/or reported.

+
+
+
+
  "writers" : [ {
+    "type" : "xls", (1)
+    "templateSource" : "classpath:samples/Solicitor_Output_Template_Sample.xlsx", (2) (6)
+    "target" : "OSS-Inventory-DevonFW.xlsx", (3) (6)
+    "description" : "The XLS OSS-Inventory document", (4)
+    "dataTables" : { (5)
+      "ENGAGEMENT"  : "classpath:com/devonfw/tools/solicitor/sql/allden_engagements.sql",
+      "LICENSE" : "classpath:com/devonfw/tools/solicitor/sql/allden_normalizedlicenses.sql"
+    }
+  } ]
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
1type of writer to be selected; possible values: xls, velo
2path to the template to be used
3location of the output file
4some textual description
5reference to SQL statements used to transform the internal data model to data tables used for reporting
6placeholder patterns might be used here
+
+
+

For details on the writer configuration see Reporting / Creating output documents.

+
+
+
+
+

21. Starting a new project

+
+
+

To simplify setting up a new project Solicitor provides an option to create a project starter configuration in a given directory.

+
+
+
+
java -jar solicitor.jar -wiz some/directory/path
+
+
+
+

Besides the necessary configuration file this includes also empty XLS files for defining project +specific rules which amend the builtin rules. Furtheron a sample license.xml file is provided to +directly enable execution of solicitor and check functionality.

+
+
+

This configuration then serves as starting point for project specific configuration.

+
+
+
+
+

22. Exporting the Builtin Configuration

+
+
+

When working with Solicitor it might be necessary to get access to the builtin base configuration, e.g. for reviewing the builtin sample rules or using builtin reporting templates as starting point for the creation of own templates.

+
+
+

The command

+
+
+
+
java -jar solicitor.jar -ec some/directory/path
+
+
+
+

will export all internal configuration to the given directory. This includes:

+
+
+ +
+
+
+
+

23. Configuration of Technical Properties

+
+
+

Besides the project configuration done via the above described file there are a set of technical settings in Solicitor which are done via properties. Solicitor is implemented as a Spring Boot Application and makes use +of the standard configuration mechanism provided by the Spring Boot Platform which provides several ways to define/override properties.

+
+
+

The default property values are given in Built in Default Properties.

+
+
+

In case that a property shall be overridden when executing Solicitor this can easiest be done via the command line when executing +Solicitor:

+
+
+
+
java -Dsome.property.name1=value -Dsome.property.name2=another_value -jar solicitor.jar <any other arguments>
+
+
+
+
+
+

24. Reading License Information with Readers

+
+
+

Different Readers are available to import raw component / license information for different +technologies. This chapter describes how to setup the different build / dependency management systems to create the required input and how to configure the corresponding reader.

+
+
+
+
+

25. Maven

+
+
+

For the export of the licenses from a maven based project the license-maven-plugin is used, which can directly be called without the need to change anything in the pom.xml.

+
+
+

To generate the input file required for Solicitor the License Plugin needs to be executed with the following command:

+
+
+
+
mvn org.codehaus.mojo:license-maven-plugin:1.14:aggregate-download-licenses -Dlicense.excludedScopes=test,provided
+
+
+
+

The generated output file named licenses.xml (in the directory specified in the +plugin config) should look like the following:

+
+
+
+
Unresolved include directive in modules/ROOT/pages/index.adoc - include::licenses.xml[]
+
+
+
+

In Solicitor the data is read with the following reader config:

+
+
+
+
"readers" : [ {
+  "type" : "maven",
+  "source" : "file:target/generated-resouces/licenses.xml",
+  "usagePattern" : "DYNAMIC_LINKING"
+} ]
+
+
+
+

(the above assumes that Solicitor is executed in the maven projects main directory)

+
+
+
+
+

26. CSV

+
+
+

The CSV input is normally manually generated and should look like this (The csv File is ";" separated):

+
+
+
+
Unresolved include directive in modules/ROOT/pages/index.adoc - include::csvlicenses.csv[]
+
+
+
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "csv",
+  "source" : "file:path/to/the/file.csv",
+  "usagePattern" : "DYNAMIC_LINKING"
+} ]
+
+
+
+

The following 5 columns need to be contained:

+
+
+
    +
  • +

    groupId

    +
  • +
  • +

    artifactId

    +
  • +
  • +

    version

    +
  • +
  • +

    license name

    +
  • +
  • +

    license URL

    +
  • +
+
+
+

In case that a component has multiple licenses attached, there needs to be a separate +line in the file for each license.

+
+
+
+
+

27. NPM

+
+
+

For NPM based projects either the NPM License Crawler (https://www.npmjs.com/package/npm-license-crawler) or the NPM License Checker (https://www.npmjs.com/package/license-checker) might be used. The NPM License Crawler can process several node packages in one run.

+
+
+
+
+

28. == NPM License Crawler

+
+
+

To install the NPM License Crawler the following command needs to be executed.

+
+
+
+
npm i npm-license-crawler -g
+
+
+
+

To get the licenses, the crawler needs to be executed like the following example

+
+
+
+
npm-license-crawler --dependencies --csv licenses.csv
+
+
+
+

The export should look like the following (The csv file is "," separated)

+
+
+
+
Unresolved include directive in modules/ROOT/pages/index.adoc - include::licenses.csv[]
+
+
+ +
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "npm-license-crawler-csv",
+  "source" : "file:path/to/licenses.csv",
+  "usagePattern" : "DYNAMIC_LINKING",
+  "repoType" : "npm"
+} ]
+
+
+
+
+
+

29. == NPM License Checker

+
+
+

To install the NPM License Checker the following command needs to be executed.

+
+
+
+
npm i license-checker -g
+
+
+
+

To get the licenses, the checker needs to be executed like the following example (we require JSON output here)

+
+
+
+
license-checker --json > /path/to/licenses.json
+
+
+
+

The export should look like the following

+
+
+
+
Unresolved include directive in modules/ROOT/pages/index.adoc - include::licensesNpmLicenseChecker.json[]
+
+
+ +
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "npm-license-checker",
+  "source" : "file:path/to/licenses.json",
+  "usagePattern" : "DYNAMIC_LINKING",
+  "repoType" : "npm"
+} ]
+
+
+
+
+
+

30. Gradle (Windows)

+
+
+

For the export of the licenses from a Gradle based project the Gradle License Plugin is used.

+
+
+

To install the plugin some changes need to be done in build.gradle, like following example

+
+
+
+
buildscript {
+  repositories {
+    maven { url 'https://oss.jfrog.org/artifactory/oss-snapshot-local/' }
+  }
+
+  dependencies {
+    classpath 'com.jaredsburrows:gradle-license-plugin:0.8.5-SNAPSHOT'
+  }
+}
+
+apply plugin: 'java-library'
+apply plugin: 'com.jaredsburrows.license'
+
+
+
+

Afterwards execute the following command in the console:

+
+
+

For Windows (Java Application)

+
+
+
+
gradlew licenseReport
+
+
+
+

The Export should look like this:

+
+
+
+
Unresolved include directive in modules/ROOT/pages/index.adoc - include::licenses.json[]
+
+
+ +
+

In Solicitor the data is read with the following part of the config

+
+
+
+
"readers" : [ {
+  "type" : "gradle2",
+  "source" : "file:path/to/licenses.json",
+  "usagePattern" : "DYNAMIC_LINKING"
+} ]
+
+
+
+ + + + + +
+ + +The former reader of type gradle is deprecated and should no longer be used. See List of Deprecated Features. +
+
+
+
+
+

31. Gradle (Android)

+
+
+

For the Export of the the Licenses from a Gradle based Android Projects the Gradle License Plugin is used.

+
+
+

To install the Plugin some changes need to be done in the build.gradle of the Project, like following example

+
+
+
+
buildscript {
+  repositories {
+    jcenter()
+  }
+
+  dependencies {
+    classpath 'com.jaredsburrows:gradle-license-plugin:0.8.5'
+  }
+}
+
+
+
+

Also there is a change in the build.gradle of the App. Add the line in the second line

+
+
+
+
apply plugin: 'com.android.application'
+
+
+
+

Afterwards execute the following command in the Terminal of Android studio: +For Windows(Android Application)

+
+
+
+
gradlew licenseDebugReport
+
+
+
+

The Export is in the following folder

+
+
+
+
$Projectfolder\app\build\reports\licenses
+
+
+
+

It should look like this:

+
+
+
+
Unresolved include directive in modules/ROOT/pages/index.adoc - include::licenseDebugReport.json[]
+
+
+ +
+

In Solicitor the Data is read with the following part of the config

+
+
+
+
"readers" : [ {
+      "type" : "gradle2",
+      "source" : "file:$/input/licenses.json",
+      "usagePattern" : "DYNAMIC_LINKING"
+   	} ]
+
+
+
+ + + + + +
+ + +The former reader of type gradle is deprecated and should no longer be used. See List of Deprecated Features. +
+
+
+
+
+

32. Working with Decision Tables

+
+
+

Solicitor uses the Drools rule engine to execute business rules. Business rules are +defined as "extended" decision tables. Each such decision table consists of two artifacts:

+
+
+
    +
  • +

    A rule template file in specific drools template format

    +
  • +
  • +

    An Excel (XLSX) table which defines the decision table data

    +
  • +
+
+
+

When processing, Solicitor will internally use the rule template to create one or multiple rules for every record found in the Excel sheet. The following points are important here:

+
+
+
    +
  • +

    Rule templates:

    +
    +
      +
    • +

      Rule templates should be regarded as part of the Solicitor implementation and should not be changed on an engagement level.

      +
    • +
    +
    +
  • +
  • +

    Excel decision table data

    +
    +
      +
    • +

      The Excel tables might be extended or changed on a per project level.

      +
    • +
    • +

      The rules defined by the tabular data will have decreasing "salience" (priority) from top to bottom

      +
    • +
    • +

      In general multiple rules defined within a table might fire for the same data to be processed; the definition of the rules within the rule template will normally ensure that once a rule from the decision table was processed no other rule from that table will be processed for the same data

      +
    • +
    • +

      The excel tables contain header information in the first row which is only there for documentation purposes; the first row is completely ignored when creating rules from the xls

      +
    • +
    • +

      The rows starting from the second row contain decision table data

      +
    • +
    • +

      The first "empty" row (which does not contain data in any of the defined columns) ends the decision table

      +
    • +
    • +

      Decision tables might use multiple condition columns which define the data that a rule matches. Often such conditions are optional: If left free in the Excel table the condition will be omitted from the rule conditions. This allows to define very specific rules (which only fire on exact data patterns) or quite general rules which get activated on large groups of data. Defining general rules further down in the table (with lower salience/priority) ensures that more specific rules get fired earlier. This even allows to define a default rule at the end of the table which gets fired if no other rule could be applied.

      +
    • +
    +
    +
  • +
  • +

    rule groups: Business rules are executed within groups. All rules resulting from a single decision table are assigned to the same rule group. The order of execution of the rule groups +is defined by the sequence of declaration in the config file. Processing of the current group will +be finished when there are no more rules to fire in that group. Processing of the next group will then start. Rule groups which have been finished processing will not be resumed even if rules within that group might have been activated again due to changes of the facts.

    +
  • +
+
+
+
+
+

33. Extended comparison syntax

+
+
+

By default any condtions given in the fields of decision tables are simple textual comparisons: The condition +is true if the property of the model is identical to the given value in the XLS sheet.

+
+
+

Depending on the configuration of the rule templates for some fields, an extended syntax might be available. For those fields the following syntax applies:

+
+
+
    +
  • +

    If the given value of the XLS field starts with the prefix NOT: then the outcome of the remaining condition is logically negated, i.e. this field condition is true if the rest of the condition is NOT fulfilled.

    +
  • +
  • +

    A prefix of REGEX: indicates that the remainder of the field defines a Java Regular Expression. For the condition to become true the whole property needs to match the given regular expression.

    +
  • +
  • +

    The prefix RANGE: indicates that the remainder of the field defines +a Maven Version Range. Using this makes only sense on the artifact version property.

    +
  • +
  • +

    If no such prefix is detected, then the behavior is identical to the normal (verbatim) comparison logic

    +
  • +
+
+
+

Fields which are subject to this extended syntax are marked explicitly in the following section.

+
+
+
+
+

34. Standard Business Rules

+
+
+

The processing of business rules is organized in different phases. Each phase might consist of multiple decision tables to be processed in order.

+
+
+
+
+

35. Phase 1: Determining assigned Licenses

+
+
+

In this phase the license data imported via the readers is cleaned and normalized. At the end of this phase the internal data model should clearly represent all components and their assigned licenses in normalized form.

+
+
+

The phase itself consists of two decision tables / rule groups:

+
+
+
+
+

36. == Decision Table: Explicitely setting Licenses

+
+
+

With this decision table is is possible to explicitely assign NormalizedLicenses to components. This will be used if the imported RawLicense data is either incomplete or incorrect. Items which have been processed by rules of this group will not be reprocessed by the next rule group.

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      Engagement.clientName

      +
    • +
    • +

      Engagement.engagementName

      +
    • +
    • +

      Application.applicationName

      +
    • +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationCompomnent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      RawLicense.declaredLicense

      +
    • +
    • +

      RawLicense.url

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.normalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.normalizedLicense

      +
    • +
    • +

      NormalizedLicense.normalizedLicenseUrl

      +
    • +
    • +

      NormalizedLicense.comment

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+

All RawLicenses which are in scope of fired rules will be marked so that they do not get reprocessed by the following decision table.

+
+
+
+
+

37. == Decision Table: Detecting Licenses from Imported Data

+
+
+

With this decision table the license info from the RawLicense is mapped to the NormalizedLicense. This is based on the name and/or URL of the license as imported via the readers.

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      RawLicense.declaredLicense

      +
    • +
    • +

      RawLicense.url

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.normalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.normalizedLicense

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+
+
+

38. Phase 2: Selecting applicable Licenses

+
+
+

Within this phase the actually applicable licenses will be selected for each component.

+
+
+

This phase consists of two decision tables.

+
+
+
+
+

39. == Choosing specific License in case of Multi-Licensing

+
+
+

This group of rules has the speciality that it might match to a group of NormalizedLicenses associated to an ApplicationComponent. In case that multiple licenses are associated to an ApplicationComponent one of them might be selected as "effective" license and the others might be marked as Ignored.

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationComponent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToTake; mandatory)

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToIgnore1; mandatory)

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToIgnore2; optional)

      +
    • +
    • +

      NormalizedLicense.normalizedLicense (licenseToIgnore3; optional)

      +
    • +
    +
    +
  • +
  • +

    RHS result

    +
    +
      +
    • +

      license matching "licenseToTake" will get this value assigned to effectiveNormalizedLicense

      +
    • +
    • +

      licenses matching "licenseToIgnoreN" will get IGNORED assigned to effectiveNormalizedLicenseType Ignored assigned to effectiveNormalizedLicense

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+

It is important to note that the rules only match, if all licenses given in the conditions actually exist and are assigned to the same ApplicationComponent.

+
+
+
+
+

40. == Selecting / Overriding applicable License

+
+
+

The second decision table in this group is used to define the effectiveNormalizedLicense (if not already handled by the decision table before).

+
+
+
    +
  • +

    LHS conditions:

    +
    +
      +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationComponent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      NormalizedLicense.normalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.normalizedLicense

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.effectiveNormalizedLicenseType (if empty in the decision table then the value of normalizedLicenseType will be taken)

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicense (if empty in the decision table then the value of normalizedLicense will be taken)

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicenseUrl (if empty in the decision table then the value of normalizedLicenseUrl will be taken)

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+
+
+ +
+
+

The third phase ist the legal evaluation of the licenses and the check, whether OSS usage is according to defined legal policies. Again this phase comprises two decision tables.

+
+
+
+
+

42. == Pre-Evaluation based on common rules

+
+
+

Within the pre evaluation the license info is checked against standard OSS usage policies. This roughly qualifies the usage and might already determine licenses which are OK in any case or which need to be further evaluated. Furtheron they qualify whether the license text or source code needs to be included in the distribution. The rules in this decision table are only based on the effectiveNormalizedLicense and do not consider any project, application of component information.

+
+
+
    +
  • +

    LHS condition:

    +
    +
      +
    • +

      NormalizedLicense.effectiveNormalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicense

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.legalPreApproved

      +
    • +
    • +

      NormalizedLicense.copyLeft

      +
    • +
    • +

      NormalizedLicense.licenseCompliance

      +
    • +
    • +

      NormalizedLicense.licenseRefUrl

      +
    • +
    • +

      NormalizedLicense.includeLicense

      +
    • +
    • +

      NormalizedLicense.includeSource

      +
    • +
    +
    +
  • +
+
+
+
+
+

43. == Final evaluation

+
+
+

The decision table for final legal evaluation defines all rules which are needed +to create the result of the legal evaluation. Rules here might be general for all projects or even very specific to a project if the rule can not be applied to other projects.

+
+
+
    +
  • +

    LHS condition:

    +
    +
      +
    • +

      Engagement.clientName

      +
    • +
    • +

      Engagement.engagementName

      +
    • +
    • +

      Engagement.customerProvidesOss

      +
    • +
    • +

      Application.applicationName

      +
    • +
    • +

      ApplicationComponent.groupId

      +
    • +
    • +

      ApplicationComponent.artifactId

      +
    • +
    • +

      ApplicationComponent.version

      +
    • +
    • +

      ApplicationComponent.usagePattern

      +
    • +
    • +

      ApplicationComponent.ossModified

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicenseType

      +
    • +
    • +

      NormalizedLicense.effectiveNormalizedLicense

      +
    • +
    +
    +
  • +
  • +

    RHS result:

    +
    +
      +
    • +

      NormalizedLicense.legalApproved

      +
    • +
    • +

      NormalizedLicense.legalComments

      +
    • +
    +
    +
  • +
+
+
+

: On these fields the Extended comparison syntax might be used

+
+
+
+
+

44. Amending the builtin decision tables with own rules

+
+
+

The standard process as described before consists of 6 decision tables / rule +groups to be processed in sequence. When using the builtin base configuration all those decision tables use the internal sample data / rules as contained in Solicitor.

+
+
+

To use your own rule data there are three approaches:

+
+
+
    +
  • +

    Include your own rules section in the project configuration file (so not inheriting from +the builtin base configuration file) and reference your own decision tables there.

    +
  • +
  • +

    Create your own "Solicitor Extension" which might completely redefine/replace the buitin Solicitor setup including all decision tables and the base configuration file. See Extending Solicitor for details.

    +
  • +
  • +

    Make use of the optional project specific decision tables which are defined in the default base configuration: For every builtin decision table there is an optional external decision table (expected in the filesystem) which will be checked for existence. If such external decision table exists it will be processed first - before processing the builtin decision table. Thus is it possible to amend / override the builtin rules by project specific rules. When you create the starter configuration of your project as described in Starting a new project, those project specific decision tables are automatically created.

    +
  • +
+
+
+
+
+

45. Reporting / Creating output documents

+
+
+

After applying the business rules the resulting data can can be used to create reports and +other output documents.

+
+
+

Creating such reports consists of three steps:

+
+
+
    +
  • +

    transform and filter the model data by using an embedded SQL database

    +
  • +
  • +

    determining difference to previously stored model (optional)

    +
  • +
  • +

    Template based reporting via

    +
    +
      +
    • +

      Velocity templates (for textual output like e.g. HTML)

      +
    • +
    • +

      Excel templates

      +
    • +
    +
    +
  • +
+
+
+
+
+

46. SQL transformation and filtering

+
+ +
+
+
+

47. == Database structure

+
+
+

After the business rules have been processed (or a Solicitor data model has been loaded via +command line option -l) the model data is stored in a dynamically created internal SQL database.

+
+
+
    +
  • +

    For each type of model object a separate table is created. The tablename is the name of model object type written in uppercase characters. (E.g. type NormalizedLicense stored in table NORMALIZEDLICENSE)

    +
  • +
  • +

    All properties of the model objects are stored as strings in fields named like the properties within the database table. Field names are case sensitive (see note below for handling this in SQL statements).

    +
  • +
  • +

    An additional primary key is defined for each table, named ID_<TABLENAME>.

    +
  • +
  • +

    For all model elements that belong to some parent in the object hierarchy (i.e. all objects except ModelRoot) a foreign key field is added named PARENT_<TABLENAME> which contains the unique key of the corresponding parent

    +
  • +
+
+
+
+
+

48. == SQL queries for filtering and transformation

+
+
+

Each Writer configuration (see [Writers / Reporting]) includes a section which references SQL select statements that are applied on the database data. The result of the SQL select statements is made accessible for the subsequent processing of the Writer via the dataTable name given in the configuration.

+
+
+
+
+

49. == Postprocessing of data selected from the database tables

+
+
+

Before the result of the SQL select statement is handed over to the Writer the following postprocessing +is done:

+
+
+
    +
  • +

    a rowCount column is added to the result which gives the position of the entry in the result set (starting with 1).

    +
  • +
  • +

    Columns named ID_<TABLENAME> are replaced with columns named OBJ_<TABLENAME>. The fields of those columns are filled with the corresponding original model objects (java objects).

    +
  • +
+
+
+ + + + + +
+ + +The result table column OBJ_<TABLENAME> gives access to the native Solicitor data model (java objects), e.g. in the Velocity writer. As this breaks the decoupling done via the SQL database using this feature is explicitely discouraged. It should only be used with high caution and in exceptional situations. The feature might be discontinued in future versions without prior notice. +
+
+
+
+
+

50. Determining difference to previously stored model

+
+
+

When using the command line option -d Solicitor can determine difference information between two different data models (e.g. the difference between the licenses of the current release and a former release.) The difference is calculated on the result of the above described SQL statements:

+
+
+
    +
  • +

    First the internal reporting database is created for the current data model and all defined SQL statements are executed

    +
  • +
  • +

    Then the internal database is recreated for the "old" data model and all defined SQL stements are executed again

    +
  • +
  • +

    Finally for each defined result table the difference between the current result and the "old" result +is calculated

    +
  • +
+
+
+

To correctly correlate corresponding rows of the two different versions of table data it is necessary to define explicit correlation keys for each table in the SQL select statement. +It is possible to define up to 10 correlation keys named CORR_KEY_X with X in the range from 0 to 9. CORR_KEY_0 has highest priority, CORR_KEY_9 has lowest priority.

+
+
+

The correlation algorithm will first try to match rows using CORR_KEY_0. It will then attempt to correlate unmatched rows using CORR_KEY_1 e.t.c.. Correlation will stop, when

+
+
+
    +
  • +

    all correlations keys CORR_KEY_0 to CORR_KEY_9 have been processed OR

    +
  • +
  • +

    the required correlation key column does not exist in the SQL select result OR

    +
  • +
  • +

    there are no unmatched "new" rows OR

    +
  • +
  • +

    there are no unmatched "old" rows

    +
  • +
+
+
+

The result of the correlation / difference calulation is stored in the reporting table data structure. For each row the status is accessible if

+
+
+
    +
  • +

    The row is "new" (did not exist in the old data)

    +
  • +
  • +

    The row is unchanged (no changes in the field values representing the properties of the Solicitor data model)

    +
  • +
  • +

    The row is changed (at least one field corresponding to the Solicitor data model changed)

    +
  • +
+
+
+

For each field of "changed" or "unchanged" rows the following status is available:

+
+
+
    +
  • +

    Field is "changed"

    +
  • +
  • +

    Field is "unchanged"

    +
  • +
+
+
+

For each field of such rows it is furtheron possible to access the new and the old field value.

+
+
+
+
+

51. Sample SQL statement

+
+
+

The following shows a sample SQL statement showing some join over multiple tables and the use of +correlations keys.

+
+
+
+
Unresolved include directive in modules/ROOT/pages/index.adoc - include::../resources/com/devonfw/tools/solicitor/sql/allden_normalizedlicenses.sql[]
+
+
+
+ + + + + +
+ + +Above example also shows how the case sensitive column names have to be handled within the SQL +
+
+
+
+
+

52. Writers

+
+
+

The above dscribed SQL processing is identical for all Writers. Writers only differ in the +way how the output document is created based on a template and the reporting table data +obtained by the SQL transformation.

+
+
+
+
+

53. == Velocity Writer

+
+
+

The Velocity Writer uses the Apache Velocity Templating Engine +to create text based reports. The reporting data tables created by the SQL transformation are +directly put to the into Velocity Context.

+
+
+

For further information see the

+
+
+
    +
  • +

    Velocity Documentation

    +
  • +
  • +

    The Solicitor JavaDoc (which also includes datails on how to access the diff information for rows and fields of reporting data tables)

    +
  • +
  • +

    The samples included in Solicitor

    +
  • +
+
+
+
+
+

54. == Excel Writer

+
+ +
+
+
+

55. == Using Placeholders in Excel Spreadsheets

+
+
+

Within Excel spreadsheet templates there are two kinds of placeholders / markers possible, which control the processing:

+
+
+
+
+

56. == Iterator Control

+
+
+

The templating logic searches within the XLSX workbook for fields containing the names of the +reporting data tables as defined in the Writer configuration like e.g.:

+
+
+
    +
  • +

    #ENGAGEMENT#

    +
  • +
  • +

    #LICENSE#

    +
  • +
+
+
+

Whenever such a string is found in a cell this indicates that this row is a template row. For each entry in the respective resporting data table a copy of this row is created and the attribute replacement will be done with the data from that reporting table. (The pattern #…​# will be removed when copying.)

+
+
+
+
+

57. == Attribute replacement

+
+
+

Within each row which was copied in the previous step the templating logic searches for the string pattern $someAttributeName$ where someAttributeName corresponds to the column names of the reporting table. Any such occurence is replaced with the corresponding data value.

+
+
+
+
+

58. == Representation of Diff Information

+
+
+

In case that a difference processing (new vs. old model data) was done this will be represented +as follows when using the XLS templating:

+
+
+
    +
  • +

    For rows that are "new" (so no corresponding old row available) an Excel note indicating that this row is new will be attached to the field that contained the #…​# placeholder.

    +
  • +
  • +

    Fields in non-new rows that have changed their value will be marked with an Excel note indicating the old value.

    +
  • +
+
+
+
+
+

59. Resolving of License URLs

+
+
+

Resolving of the content of license texts which are referenced by the URLs given in NormalizedLicense is done in the following way:

+
+
+
    +
  • +

    If the content is found as a resource in the classpath under licenses this will be taken. (The Solicitor application might include a set of often used license texts and thus it is not necessary to fetch those via the net.) If the classpath does not contain the content of the URL the next step is taken.

    +
  • +
  • +

    If the content is found as a file in subdirectory licenses of the current working directory this is taken. If no such file exists the content is fetched via the net. The result will be written to the file directory, so any content will only be fetched once. (The user might alter the files in that directory to change/correct its content.) A file of length zero indicates that no content could be fetched.

    +
  • +
+
+
+
+
+

60. Encoding of URLs

+
+
+

When creating the resource or filename for given URLs in the above steps the following encoding scheme will be applied to ensure that always a valid name can be created:

+
+
+

All "non-word" characters (i.e. characters outside the set [a-zA-Z_0-9]) are replaced by underscores (“_”).

+
+
+
+
+

61. Feature Deprecation

+
+
+

Within the lifecycle of the Solicitor development features might be discontinued due +to various reasons. In case that such discontinuation is expected to break existing projects +a two stage deprecation mechanism is used:

+
+
+
    +
  • +

    Stage 1: Usage of a deprecated feature will produce a warning only giving details on +what needs to be changed.

    +
  • +
  • +

    Stage 2: When a deprecated feature is used Solicitor by default will terminate with an error +message giving information about the deprecation.

    +
  • +
+
+
+

By setting the property solicitor.deprecated-features-allowed to true +(e.g. via the command line, see Configuration of Technical Properties), even in second stage +the feature will still be available and only a warning will be logged. The project setup should in any +case ASAP be changed to no longer use the feature as it might soon be removed without further +notice.

+
+
+ + + + + +
+ + +Enabling the use of deprecated feature via the above property should only be +a temporary workaround and not a standard setting. +
+
+
+ + + + + +
+ + +If usage of a feature should be discontinued immediately (e.g. because it might lead to +wrong/misleading output) the first stage of deprecation will be skipped. +
+
+
+
+
+

62. List of Deprecated Features

+
+
+

The following features are deprecated via the above mechanism:

+
+
+ +
+
+
+
+

Appendix A: Default Base Configuration

+
+
+

The builtin default base configuration contains settings for the rules and writers section +of the Solicitor configuration file which will be used if the project specific config file omits those sections.

+
+
+
Default Configuration
+
+
Unresolved include directive in modules/ROOT/pages/index.adoc - include::../resources/com/devonfw/tools/solicitor/config/solicitor_base.cfg[]
+
+
+
+
+
+

Appendix B: Built in Default Properties

+
+
+

The following lists the default settings of technical properties as given by the built in application.properties file.

+
+
+

If required these values might be overridden on the command line when starting Solicitor:

+
+
+
+
java -Dpropertyname1=value1 -Dpropertyname2=value2 -jar solicitor.jar <any other arguments>
+
+
+
+
application.properties
+
+
Unresolved include directive in modules/ROOT/pages/index.adoc - include::../resources/application.properties[]
+
+
+
+
+
+

Appendix C: Extending Solicitor

+
+
+

Solicitor comes with a sample rule data set and sample reporting templates. In general it will +be required to correct, supplement and extend this data sets and templates. This can be done straightforward +by creating copies of the appropriate resources (rule data XLS and template files), adopting them and furtheron referencing those copies instead of the original resources from the project configuration file.

+
+
+

Even though this approach is possible it will result in hard to maintain configurations, +especially in the case of multiple projects using Solicitor in parallel.

+
+
+

To support such scenarios Solicitor provides an easy extension mechanism which allows +to package all those customized configurations into a single archive and reference it from the +command line when starting Solicitor.

+
+
+

This facilitates configuration management, distribution and deployment of such extensions.

+
+
+
+
+

63. Format of the extension file

+
+
+

The extensions might be provided as JAR file or even as a simple ZIP file. There is only +one mandatory file which contains (at least metadata) about the extension and which needs +to be included in this archive in the root folder.

+
+
+
application-extension.properties
+
+
Unresolved include directive in modules/ROOT/pages/index.adoc - include::../resources/samples/application-extension.properties[]
+
+
+
+

This file is included via the standard Spring Boot profile mechanism. Besides containing +naming and version info on the extension this file might override any +property values defined within Solicitor.

+
+
+

Any other resources (like rule data or templates) which need to be part of the Extension +can be included in the archive as well - either in the root directory or any subdirectories. +If the extension is active those resources will be available on the classpath like any +resources included in the Solicitor jar.

+
+
+

Overriding / redefining the default base configuration within the Extension enables to +update all rule data and templates without the need to touch the projects configuration +file.

+
+
+
+
+

64. Activating the Extension

+
+
+

The Extension will be activated by referencing it as follows when starting Solicitor:

+
+
+
+
java -Dloader.path=path/to/the/extension.zip -jar solicitor.jar <any other arguments>
+
+
+
+
+
+

Appendix D: Release Notes

+
+
+
+
Changes in 1.1.1
+
+
+
    +
  • +

    Corrected order of license name mapping which prevented Unlicense, The W3C License, WTFPL, Zlib and +Zope Public License 2.1 to be mapped.

    +
  • +
+
+
+
Changes in 1.1.0
+
+
+
    +
  • +

    https://github.com/devonfw/solicitor/issues/67: Inclusion of detailed license information for the +dependencies included in the executable JAR. Use the '-eug' command line option to store this file +(together with a copy of the user guide) in the current work directory.

    +
  • +
  • +

    Additional rules for license name mappings in decision table LicenseNameMappingSample.xls.

    +
  • +
  • +

    https://github.com/devonfw/solicitor/pull/61: Solicitor can now run with Java 8 or Java 11.

    +
  • +
+
+
+
Changes in 1.0.8
+
+
+
    +
  • +

    https://github.com/devonfw/solicitor/issues/62: New Reader of type npm-license-checker +for reading component/license data collected by NPM License Checker (https://www.npmjs.com/package/license-checker). +The type of the existing Reader for reading CSV data from the NPM License Crawler has been changed from npm +to npm-license-crawler-csv. (npm is still available but deprecated.) Projects should adopt their Reader +configuration and replace type npm by npm-license-crawler-csv.

    +
  • +
+
+
+
Changes in 1.0.7
+
+
+
    +
  • +

    https://github.com/devonfw/solicitor/issues/56: Enable continuing analysis in +multiapplication projects even is some license files are unavailable.

    +
  • +
  • +

    Described simplified usage of license-maven-plugin without need to change pom.xml. (Documentation only)

    +
  • +
  • +

    Ensure consistent sorting even in case that multiple "Ignored" licenses exist for a component

    +
  • +
+
+
+
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/sonar-devon4j-plugin/1.0/_images/images/DevonArchitectureRules.drawio b/docs/sonar-devon4j-plugin/1.0/_images/images/DevonArchitectureRules.drawio new file mode 100644 index 00000000..8b133810 --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/_images/images/DevonArchitectureRules.drawio @@ -0,0 +1 @@ +7V1dc5u4Gv41mTnnIh4k9AGXiZO22/G23Xa72/aO2MSmIcYHkybprz+SjTBIGAsHsEzszjS2wEKWHr163g+9OrOH909vY28x+zOa+OEZtCZPZ/bVGYQAWIT94SXPaYll2euSaRxM0rJNwZfgty9uTEsfgom/LNyYRFGYBIti4Tiaz/1xUijz4jh6LN52G4XFpy68qa8UfBl7oVr6bzBJZuKHEXdz4Z0fTGfpox1I1xfuPXFz+kuWM28SPeaK7OszexhHUbJ+d/809EPee6Jf1t97s+Vq1rDYnyc6X3j7wX4YX8zG0cPPvybf4e9/rmfhOaBwXc8vL3xIf/IZJCGr8fKGNzp5TnuC/O+Bt/TyNpon57fefRCyIb044w8l3v2CXTmDto3YXy8OvFAp5Xeu/l9VsFwNNP86IIunTe3s3ZT/vfSS8Uw0hP2mG3Eh7cusWZB164K/HT/csD+Xj7Mg8b8svDEve2SgZGWz5D5knwD/TdHDfOJPRjdZgTe+m8a89ONDEgZzPy2fePHdR1ZNkHDkWgMLFwvhqpTfuUzi6M4fRmEUr1pkW6tXdkXABqSd9ybtu6t3fvjLT4Kxl15IwQ9WfRgG0zn/VWxw/ZjXlV7k9d4GYZh73hvI/7HyX37Mqwsv0i8n0SLrsTxOUujw2/2nXFGKm7d+dO8n8TO7Jb1KYYr2dBZD6gzwuuRxMynYL1+XzXLzAVMgbvXSqTjN6t+glb1JAVsHvAQbCd5hGPC+PqH3wOjNYZM6JdiEwGkNm2aK1S9+zPrthMxDI7NSjhK3MTn6denHH29+cmIErdC78cMiFnn3bIfjLjTlEL2BVzD2z0fe8zaUZWXrR+fBx1Byt27eLEk4vbvgPwu+mQbJ7OFmMI7u2YeJ/yua3z6Kd+gnrz6MbvgP95Z8TNmVaPxwz0bFS4JozivgHPJ8mTYu5I0beMtxELAb2TgXLqybUjm17dyYpqIgN2/i9UzgMN49bbrBeDl+JbRfW1f02mqGLwBkFfiC7QAF5RgBFeXAcmtjnH3MwfwQsB9F02BsJujDVdMUyOeKNQCPegn4C3pF37QEeIwtBfAOKBHrQBS2C/hDco4rL/HOL8Zjf7msmiMdTIYJa4m3bogyI/i18/xFjXmBezkvrqwL9q+deUFht/NCj6tThfz+Wk6e/rhi75ENpR6aR5zLXk5jb8K1PKm4OI5p4WpCXmaEWLpaxEkOE2WQaWBMCEBFWaUOiU3UEUGoJU3Jqep9u17vpzoLkwLzcRD+Z/7jcnQVXUyvrj9/sn7bX/54/uk/Wl++fp5+Yc+5HD24d3/+iL6NZo8f3I+sIHm4uwjjD4v3U/Dl99U79Ne/HFOfpx++T3/8fffzAn29e3z/dfr223zK7iZX38Dv0e199Hw/+RYB+jTDv+8/vyd/IXj57/B2DKP3t5MLJkguv/9i/118coc39ON7+x2rFL5/C959ePubvXXQXfj37NM/wzFr+X9ryALYNrSqJos+3hwFXqKoE3i5VfBCJ3gdLbyet8KrZD1xWkKXWKbK4YVP8Dp2eB0YX8JsWWJHfJkSy7j0PafGFarpvlz2BcY4BSEKN+X/tjNZ/oC01bwlsygOfrMyL0xb3gB9gkVGi1T2hMrwYdXns7uUuuXCmzdoxVh7LbTMGOsnd6i5jddtU7S2fLmGwgYA6YfKtuTG8vl05N8m5RNlyKaJ25ISBywV86RMKLr7uFf21uIAKvO5SIKMLzQL/T7J4gq8G1GDVdlXtkWL4sHVU65ac5Eie3entCvdwxVId8j2dMnXXMU3Q62vhQws23Fs5GKAMIF2ybJOBy5hEgJbyMHIJg7qdJzQKx+ndMKQgU1LQgvgwAIuAhRRTKEDcMnYOAMC2xueskiDVzg80B3AfYdnj9HpmAKtoo7MZEA3q6YpBChXrMN/SE/4j8J2Lq3hRSkvaoD/ULGIV3gzoVVmxG7Gm6kbyVe20q8BuyidI/dePA3m52vBwmeJlTl20kvx+reUXlsNxOoKn1rFazdRkjC8K5dZhyfnAYPYXKl1EsTsB3PEry6ESbx9vmprvij37Bc7u8ZrvK2/KsBe1fz0UugnDMrnKTTVTuHBMecCNqtm8wfg7HokomVW17b3Sl4kDaP7BVsKVt30Y6vY2qL7Z8WLrfaA8lkpLVU5Qxg4GcK6MoSVqIYFq0gD8pAUHUg2KItiAqQsigm2EcXUtHdbiedkX15RA3+53Xe9Y+Gl3S+8Yuz3oZlyKNzqpWBJ3zzRCOycomptqbo1wCWgox26kgEVFuYKRcGfM3kSr/YCjENvuQzGxQH2J1NfdHIUJ7NoGs298HpTKg00KgfFcLgChf8UJN9Y2bk1sCwnLfh+tgq+hOnHq6cUeqsPz7kPn/w4YL3Fudi6bM56jlcHBhZ2REG+Ov55U9/q03P+k1zjVmgso4d47Fd1ttgQwliHX6XoCCs679hKqOXj08rEV1oW+yFj57/8QnPL4JU+4VMUrJZigXNXMqipccjr355+bwNTtSqryE0FCRT1rLtGqWcF9+x3v0AvFlT44HC/clMZmAGUw51IAMVgF0LTyWINXIqLc2X93fqTpTm8CxjvxLuYGKbg3SpKbkUv0kU7BDsq2gJ3Bj/vOXfbgt+wbGdCqEbokRjf3JQIw2Cx9Hev9d5ysd7Vdhs8cX6gKsgNrKxukc8RXELn7JJoXbsCCS8ztzka1tCOxUomGCxIioLB2lMwpNXZtCikXBd3u4wiTaliGyVUMouLCGMDZE+hIlWEHD2h0pzIwAq2+xx/6FDJnGar7sROAxCBQ6oG4BSCaFoQTzZjjiYIETiVMcanMMRjhpgZgYhOZRz1KRKxBwhrDWLbDKw/2Ed2begt/bW21LaX82Hpj9nD8h7OtOhMw8jq5mNzTkbWfciZbGTF1GUKyealIvDwJldXwwbVdtyXI4JWRL85u/qt2zAwVzVLKH1UL36lArvNxLXoylG3vgZieCyY23jM3jEOltkBYW7j8XpHPEYdR4XpDREpGaBTOMyrD4f5dgqHeU3aWPvhMFi2H6vmzNJYmH22BGkuTiohzs+AixcuVG1GLmtvd2l4EIFj7xxE2tAgbtO2mWSyrpjMjaZM2R5mwU2xEDsiu2Ymk4VkTj/Ob5aLnMTafDEb/FSJrwbPmsictOgXiYTMhZqFeJQ5VQ+vO0MNVaNt3ZmovQVLXdCdasywvwQ/G/TeaMyw8e05xzhYZmvMsEwhe61jZMg+qot9KYf1UpZhHyCLYs9YBpDitkzlGLbGSto2x5Bj3ExgGHZ/F61syHvDMOz+rl41BstshmFr7L14NWNkEMNI00J3E0IQ+8skHz/w03s6j/UMHydG0jAjMXOLlgEmD5mOsAl5cDrSX3tH78wdPSaOPaEiPWaLR0xEjieWEZ7ISNNkBFNngMwOZTTBGwNdOZJxV7d1SlP665bpnVemv06Zvvhk+uuSOVqPTFc5bfiXr+dJkAScDpXElEjenc/+IloGSRQHVUlwWqZUPxdenk75vP3PGmzq5GxqnE1R5JrOpkz0OzEpYtK+kP66oHrngeqvA6ov/qf+up+O1vvEQ2qPw/uEThTlpRQFW0XLhZHeJ5087+0H3CIp4NYA/1PzGdZNkZ6ob1QE9ZaK1Bgqo6kI6i0VqT9CBlGR4/E/4RMdaYCOSKvsEXigsBH0hMgdZ5YPCveWquC+URXcW6pSY6iMpiq4t1Sl/ggZQlW68kFxSvRqfFDrQzNOjKpRRnUEXiiiQRa632FtmB+K9HaZzoa/L4yK9Ha9rjFURjMqkcLxNEKGZiYDrjpCI1cZI7MOiciOQM3y7UN1zejykIgsb785h0Rs7WZDzkxA0kEskO598JBSlZwIqfZRLJrPwaLJ2i2Tv8HerFuxwV4DxzisktHJk7oEkEZNatstpr2C9sEnNTRtUufOarEJPCseKLVa3mqf1bI5TAbb4vAYcfgLPat7mIzxUgfhgUOhlb1oAXNsNRxAC+Hs8r4SCSn2dgwGYmlvWiwJH6h4VHqIyfbGVd7flkhS95OMbMNFEhLHwZgjkgw+jOpVyA8HdCI/xNFtwkQCW2I00nNsukN0VN/fluhQzVYjZLroIEUha4DoaOA0WFUwiOlfnPq4euJnLKbIYHadNacIoWtniPFV1YDtPrc17YSdJ84hs46cQ6CIL0RQmRG1tuCRYItAZbUtH0cHLQ3z0QmzW2/EZmHWls4aRthuArMYKCGk1RW3jlrXXJJmQVRAPacojZG0ncjFRuERYJeR1hxOirKPAjIAVu7yfvBkTylUS4gel2sMjkC1TY5ACSKN4k9ABFeKXhPM82D8qSSTvSmzuoN1bP85b+suVmbJBmVRIXZp4F9tcUDkiuWIQqolHuqqegBIi69dbbqW76e0C8O1kEt5WaUeJmmWqJLs1lh8Ppyk0kjdYoKkovuJKtierMKassosXp3FFYmpza3PGxOVcGPUJy6SBHAka1ZDkskpPsZJLcXbmiXdLlxsLcsl1e46EgLSWMEk6fIEqnsQOxZMGqFVnQgmyxoOJcVoY61OFSMMaytGmcQ654qVK/nnMNwht0r9c/sLM0FYd0ozYpQ0Q9Qd2EBRuzLpMICOq1ytK9tswkdo8wK0zlNa19gaMNW25HgGA4vRiyKwdy7IOxzPrPuLUy+zUXRhkyCas0RMJ1OmCSyug7JVQdviS+2BCBfJGGzBeibSxuy97Dc/P1QD29B0gwZS9K8DL8ZQtWcMgeGdCDFUtPJDd6OqsRrfjZggwzpR1ViN70QHOhIW7cPP6QbCQwzzl2VUff8VXnTC0fnLUNGyRIA9ECpUbSeZkmelrLa2ia1OGtZDKYA4h25Qie39kZgeNabDNqFRUASSkdOGEkfcD5W27Q4solJNvae0jlaDzRWdoFVXbkKzwApJ0WaAqKTB6MITOrbsqxHzoDMMmmIK2GBwE4OOC2v7LiNAp9jVXvNts9Z8GbtE3h+sjV3iDIqbTygq3WfbGZIbiOV6jUjGupRBKHWGIBlTtrbTbXZcgnAjsV+VD6GIHBTxpsSBvRjx+6MXaYd/mCWHsSOFFJLS7A618SpVi6lVVW3bCBWWCIMQ2jHDNYu5YsdiOnnOLVUAC5At8fqok2zPwOoYZ6ZsWzoUzkSmit16v4iRNgSRDoJFgSVDRxeDSkW4awyaZ3sygX/uVu6RcYgcoKJgJNU8rwZE5Zop6npF1rA5tZ2iyZHtJ8A1KUET7O/hBpvh70uKpqxBr3uwhKAyMkkTtDUU1VczRq8s8eWPs1eU+DLlMsW5ccp8WWvBoUQ2Kxuf9xIiDTtH+6xK6jez0l5CoZ/1cBFAtUFuOqtCGlt8+j9YZrMqneOVXs0YGZn8Euoc9FQ/Xm57zoecxcd1QMHmk98w0sj+EEPMNa4tGROcPWOFXNth66Wyq02sppBv4SnW3LatpuTsqZHYJWBs2KorYkSFHdYiKvHoNGgVtZLkZZ89X5sMZnvs+epsQ712QIBpvgQipdNjvKoJsy1TQIrVAqllrcuBBvz8hkVdN5ClSDvU1bC4FRsXQ12pw1D68jQwGKA61baNWdFZLQtdJQNAF2jeH7OiE44Ns1ROVCo7V7VlqVyRZirt5nBZsivN+DSxliUnIIMl1pxOSRVWrTlD43NmKv2IRb7bw/Vjyc40bHw/AnmvDz14P6rGkCE5un6kxC07m7rbnmxgV8p2trlXLjFQb31uJSemdoSVYYGmWTrAbF84GVDJ4KUdwAKViW+RAURbVazWV3M1UmNIDZ/1RAoixyKd5+FmfIdZcGFLObla0TA1ZzwgZmW4Uac8W1WcXDaaPcN8HdEUzXpbn/2qXWRk+ppP3GIf2i48+IpPGtDV20oHVBQDdGcmoH0SAb6EFehm9zGLFFCEi+mpivOa8FMmcq896QKxeFay7U/RM0l1mPeHqMaBkel0gkpHhCBEDi9QVOOAKQKFZ/8qyJRGfaJHRxWoZIwGcnZgbcOelMwTI72N7HWTeW5rcLvpOYlqpRmZbqWhtnQ2YHsnxLCPcRQl+U5nv3n2ZzTh0abX/wc= \ No newline at end of file diff --git a/docs/sonar-devon4j-plugin/1.0/_images/images/DevonArchitectureRules.png b/docs/sonar-devon4j-plugin/1.0/_images/images/DevonArchitectureRules.png new file mode 100644 index 00000000..c7a0b01b Binary files /dev/null and b/docs/sonar-devon4j-plugin/1.0/_images/images/DevonArchitectureRules.png differ diff --git a/docs/sonar-devon4j-plugin/1.0/_images/images/DevonScopeRules.drawio b/docs/sonar-devon4j-plugin/1.0/_images/images/DevonScopeRules.drawio new file mode 100644 index 00000000..309a5d5a --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/_images/images/DevonScopeRules.drawio @@ -0,0 +1 @@ +7Z1bk6I4FIB/TVfNPoyVK8JjN32ZmdqtnSpra6cfaY3KLoqD9MX99RuEKCQIaTtcdHQehoQQwvE75yQnB/sKu4u3h8hbzf8IJyy4QmDydoVvrxCCGED+X1KzyWqgDdKaWeRPsrp9xcj/j2WVotmzP2HrQsM4DIPYXxUrx+FyycZxoc6LovC12GwaBsW7rrwZUypGYy9Qa//2J/FcPIbl7E98Yf5snt3aRsP0xMITjbMnWc+9Sfiaq8J3V9iNwjBOjxZvLgsS6Qm5pNfdHzi7G1jElrHOBZtveOywyebBHU0fpt9er4cvPz9baS8vXvCcPXA22HgjJMCWk+tEkLw0Drz12h9f4Zt5vAh4BeSHbMIFljUPo3gezsKlF9zta2/WcRT+uxMfSa558+Mf4np+/MiPwYBmpdsEHyAKG1FYxtFmd1FSyF2VFPeXbUviuvTubhiE0faBMACuC5Iz6aMm4z8o0qxqHT5HY1Yhx4yG2ItmLK5oN9x98VxlWLhgfKT8utc9WjTDZZ6DStRFLPBi/6U4XC8jfLbrbneH76HPHwQBoY4k6ydTRoIpl16hk/RBs+vyMNV0NRxKHaWSUDriJHmbXLNV0mB9eMg2KN4HE1A5Lrk9BIX2/CAdgSjlvoV91VZxtsW/1iz68+mfxLAgEHhPLBBKYgX8u7l54gezeItSWjENt6PZq4/18zkUJz6vt+btmjeAZPW2Pyl6uf7+9fNoHK7Yrv9InPrkrfzfRDUfZXqj4s15dW5AqE738yxmtiKn9lH4vJywSaZEr3M/ZqOVt1WCV27rizZA0TEbPWHLUnQfZpK49xZ+kHxFX1jwwmJ/7GUnMgcAEyPhBf5syQsBmyZP9MKipGFwnVXHYTKINR+Tv5z9vm1zi5Ju/CDIjWRCmT0hO4kkvbC3q0p9V/VzR1cBLlHMaa9ll6iv0JUyTc2RmQePF3PsdcHijbdmB2F84icN02g3SKM1ttnTtBc0esyejhuhEYG+4aiJ3hYZiT4zDH9drAK24OLkXjNcfs6D/IGxZUoBPvm8e8Na4DSoBU82JRT0QQum9piNm9ECjHqlBVwUSy3YrDJ+FdPuhotVuGRbnUhGicDv3oYlOP8oB25Xlw5Em0MITIP4IdzG/JH5Yx4N3D3/uK5duhy4ub25dYZmYETydFWFEaISGPHwvGh8NEwjOjMaXff+vpzGO3BD7oAZGgkAYn33S/A4Y0sWecGOxnG4WIRLwyjis0LRBum/MhSd7ccMila/DePEfynlUMcIJjGxxAHDGtC299DnjJwVZ/fbjyEnC6VlOFVZAmUstYdSHSvILCv0woomKwiQwenhgs3iYl1w0cQFo5OD5dGwHzIeJT4bWAiUp9en54keDXsi41HcM6YFAXp6vujRsC8yHvA8Y2BO0Bs9uGbdETq/uKQpXqyTXxY9uGa9UUFgF1iqYDnJddGDa9YZoXMLMzfHS8uuSC+ZDeHsSVvOZlMip7cOAEAksP242iezvSvPbVfQy3NLH9VINluWwlibzeaUM9NONpsjZXpBiTTdVDapH0ylfgxlskkJc1JiWl1zKe/tvXlsevoDoaI/I4FMToOCwF+tWb0J9NarNCN36r8lZlM1bAYMEyIafgzDEsNUAdwBw6RphUg3VmhnawbAtgv2BsBhpcU5nCGbNPvOIp9LJvnCTNsZ3axZuxyBduwMGjoFxIjtHGdo5I6obQ+Is//YjdgdOVNX2LuDmb3YqWrfjOURs4nOPfee/J0XHgAH5T0xHABQ54u3JVlxcgqa9rhXUAwrFfQjatiV2nCOBpTm8JaDX3AgLOW7k8+ru4YIDGDurIM/qFgFd2CCdgNvXVTZf5qDqwasPeU0zzio4VtNW7FdSm/NegdINN1DYf+zddAxLU5BCD0SawppdUcHwDWG5fCCpQ6WVBfL/D5rB9MWacGByCBvFI+fxUjzCUjkl4ia5tS+cKrDqe4qHnY7vZYSuCEucnospvLbEah1TJ0LpjqY2rqYdhptUsJNxAymUrcIt42pCNBeMK3EVCSn1mLa7aoLy++/UiOYyt0i0jqmamzygukh+nQwRZ1iKm0nQcsMpvKuJm0dU9QEplphqgKMsAbGcvBBJfgmnb72GqrjH0xwBtQC+49VxAvCgXhN4gPU1twF4qPCWcaINrCp+isQrTuNLWQwdR4VKFm962JLhnJ+Wl2IoWlSDWy8HUdqYRpQu1F/zI/fmCRVO6zaqe3VWMBrk2orpFYHGZoG1cBu168Aqm4Aq2OTWr+E1wbVUUCtDjM0DWojG1XnB6q27+9VbKBkEa+9VaW8WV4TaGga1Ea3rt6DVc+2oDoN7Z/NzihuccepesVy7OqoHSyt08DSkrC0jsUS0eqOmsaykR0mjWzmFqKlnVjLTneY5OAlxk5xNX0cpGQIJVdNIajquGFoRTriBVpDq/Re2VIL46qc1ndYVvnVakCOSJY1Bm0ju0/nB632ir3ThVBTqVFqRJQvi1regCLoQqrRJTvqlNRmkqPUgChCrYPazr6SAupRPznf3at4+muqc0yPUgOiSP3N+qZBbWdb6eRB1X2Zq1uL2lCClBoQRaR1UBvZVjIfcep6p14b1F5NUrEh16/OUdsHVd1WGpWg2rP3jIsL3d1rlV29ZkzULY8RPjEhIqtrIaqB/ZHVdyE6qChEe9ixENUw9IiemhBFhKcrIVI1LDoiJyZEKCYcnQmx5Acs+u5YCJEzPjq2iVSNIPXeschCRNTuWIhqdKP36kxEUL036qyuvHvvWGQhdu9Y1FVh76c4xBkWhWhbHQuxZMXSd5to9W2yTUtWLH33zlZ73pkX938MNl1q7/+mLr77Hw== \ No newline at end of file diff --git a/docs/sonar-devon4j-plugin/1.0/_images/images/DevonScopeRules.png b/docs/sonar-devon4j-plugin/1.0/_images/images/DevonScopeRules.png new file mode 100644 index 00000000..1b35fc23 Binary files /dev/null and b/docs/sonar-devon4j-plugin/1.0/_images/images/DevonScopeRules.png differ diff --git a/docs/sonar-devon4j-plugin/1.0/configuration.html b/docs/sonar-devon4j-plugin/1.0/configuration.html new file mode 100644 index 00000000..7b2fbcad --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/configuration.html @@ -0,0 +1,330 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Configuration

+
+
+

Add a file called architecture.json to the toplevel directory of your project and configure your components and their dependencies. You should commit this file to your version control system (e.g. git). The following example shows an architecture definition for the my-thai-star sample application:

+
+
+
+
{
+  "architecture": {
+    "components": [
+        {"name":"bookingmanagement","dependencies":["ordermanagement","usermanagement","mailservice"]},
+        {"name":"dishmanagement","dependencies":["imagemanagement"]},
+        {"name":"imagemanagement","dependencies":[]},
+        {"name":"ordermanagement","dependencies":["dishmanagement"]},
+        {"name":"usermanagement","dependencies":[]},
+        {"name":"mailservice","dependencies":[]}
+      ]
+  }
+}
+
+
+
+

As you can see all you need to do is declare the components of your application with their allowed dependencies. In case you need dependencies to other devonfw apps (microservices) you can also add them as dependencies with qualified packages (e.g. com.devonfw.sampleapp.samplecomponent). As the technical architecture is standardized by a blueprint in devonfw, you do not need any further configuration and everything can already be validated out of the box.

+
+
+
+
+

Custom packages

+
+
+

If for some reason you can not or do not want to follow the devon4j packaging conventions and architecture mapping but still map your architecture to the code and want to use this plugin, you can do so. +All you need to do is to configure a custom mapping by adding a packages configuration to your architecture.json:

+
+
+
+
{
+  "architecture": {
+    "components": [ ... ]
+    "packages": {
+        "pattern": "([a-zA-Z0-9_]+\.)+(persistence|core|service|gui)\.([a-zA-Z0-9_]+)\.(api|base|impl)(\.[a-zA-Z0-9_]+)*",
+        "groups": [ "root", "layer", "component", "scope", "detail" ],
+        "mappings": {  "persistence": "dataaccess", "core": "logic", "gui": "client"}
+    }
+  }
+
+
+
+

The packages object has the following properties:

+
+
+
    +
  • +

    The pattern property defines a regular expression your packages will match.

    +
  • +
  • +

    The groups property maps the groups of the regular expression from pattern in the order of occurence to the according architecture segments. In case your regular expression needs some extra bracket and therefore "artificial" groups, you can assign none to such groups.

    +
  • +
  • +

    The mappings maps segments from your custom architecture definition to the devon4j architecture terms.

    +
  • +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/sonar-devon4j-plugin/1.0/devonfw-sonar-devon4j-plugin-doc.html b/docs/sonar-devon4j-plugin/1.0/devonfw-sonar-devon4j-plugin-doc.html new file mode 100644 index 00000000..649398af --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/devonfw-sonar-devon4j-plugin-doc.html @@ -0,0 +1,595 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

sonar-devon4j-plugin

+
+
+

Plugin for SonarQube to validate devon4j architecture. +${project.version}, ${buildtime}

+
+
+

.1. Motivation

+
+

With devon4j you can build business applications very efficiently following elaborated guidelines and best practices. +This includes a profound architecture blueprint that is mapped to the code via clear packaging conventions.

+
+
+

This sonar-devon4j-plugin provides a plugin extending SonarQube with the ability to validate your Java code according to the devon4j architecture.

+
+
+
+
+

.2. Installation

+
+

If you have SonarQube installed, you only need to go to its marketplace and install the according plugins. +Please note that the following other plugins are required as pre-requisite:

+
+
+ +
+
+

If the above plugins are installed, you can additionally install the latest version of this sonar-devon4j-plugin from the marketplace as well.

+
+
+
+
+

.3. Configuration

+
+

Add a file called architecture.json to the toplevel directory of your project and configure your components and their dependencies. You should commit this file to your version control system (e.g. git). The following example shows an architecture definition for the my-thai-star sample application:

+
+
+
+
{
+  "architecture": {
+    "components": [
+        {"name":"bookingmanagement","dependencies":["ordermanagement","usermanagement","mailservice"]},
+        {"name":"dishmanagement","dependencies":["imagemanagement"]},
+        {"name":"imagemanagement","dependencies":[]},
+        {"name":"ordermanagement","dependencies":["dishmanagement"]},
+        {"name":"usermanagement","dependencies":[]},
+        {"name":"mailservice","dependencies":[]}
+      ]
+  }
+}
+
+
+
+

As you can see all you need to do is declare the components of your application with their allowed dependencies. In case you need dependencies to other devonfw apps (microservices) you can also add them as dependencies with qualified packages (e.g. com.devonfw.sampleapp.samplecomponent). As the technical architecture is standardized by a blueprint in devonfw, you do not need any further configuration and everything can already be validated out of the box.

+
+
+
+

.4. Custom packages

+
+

If for some reason you can not or do not want to follow the devon4j packaging conventions and architecture mapping but still map your architecture to the code and want to use this plugin, you can do so. +All you need to do is to configure a custom mapping by adding a packages configuration to your architecture.json:

+
+
+
+
{
+  "architecture": {
+    "components": [ ... ]
+    "packages": {
+        "pattern": "([a-zA-Z0-9_]+\.)+(persistence|core|service|gui)\.([a-zA-Z0-9_]+)\.(api|base|impl)(\.[a-zA-Z0-9_]+)*",
+        "groups": [ "root", "layer", "component", "scope", "detail" ],
+        "mappings": {  "persistence": "dataaccess", "core": "logic", "gui": "client"}
+    }
+  }
+
+
+
+

The packages object has the following properties:

+
+
+
    +
  • +

    The pattern property defines a regular expression your packages will match.

    +
  • +
  • +

    The groups property maps the groups of the regular expression from pattern in the order of occurence to the according architecture segments. In case your regular expression needs some extra bracket and therefore "artificial" groups, you can assign none to such groups.

    +
  • +
  • +

    The mappings maps segments from your custom architecture definition to the devon4j architecture terms.

    +
  • +
+
+
+ +
+
+

.5. Rules

+
+

This sections describes all SonarQube rules provided by this plugin.

+
+
+
+

.6. Architectural Dependency Rules

+
+

The following image illustrates the devonfw architecture rules. The arrows show the allowed dependencies in green, discouraged dependencies in orange and forbidden dependencies in red. +Devon Architecture Rules

+
+
+

Within the same component you are always allowed to call code from the same layer and scope as well as the API from the next layer below. Discouraged and forbidden dependencies are always implemented by its own SonarQube rule. This gives you absolute flexibility to customize the severity of such architecture violation. These dependencies have a white circle with a unique label. For each label we link the according rule to give you transparency and help you customizing.

+
+
+ + + +
+

.10. Package Rules

+
+

Additionally, there is a generic rule that checks the devonfw packaging conventions:

+
+ +
+
+

.11. 3rd Party Rules

+
+

Further, there are rules checking the proper usage of third-party library code:

+
+ +
+ +
+

.13. Security Rules

+
+

As of version 3.2.1, we have started adding security-related rules to our plugin:

+
+ +
+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/sonar-devon4j-plugin/1.0/guide-sonar-qube-setup.html b/docs/sonar-devon4j-plugin/1.0/guide-sonar-qube-setup.html new file mode 100644 index 00000000..c1bf947b --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/guide-sonar-qube-setup.html @@ -0,0 +1,370 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+
Table of Contents
+ +
+ +
+

SonarQube is the recommended tool for continuous quality analysis. Ideally you will get it out-of-the-box and pre-configured with the ProductionLine otherwise you can set it up yourself what is described here.

+
+
+

Prerequisites

+
+
+

You need to have the following components installed on your server:

+
+
+
    +
  • +

    SonarQube version 6.7.1 (LTS) (or above)

    +
  • +
  • +

    Java/JDK version 1.8

    +
  • +
  • +

    NodeJS (latest LTS)

    +
  • +
  • +

    NPM (simply do npm install -g npm@latest)

    +
  • +
  • +

    TSLint (simply do npm install -g tslint)

    +
  • +
+
+
+

Inside SonarQube you need to install the following plugins (most of them via Marketplace as admin):

+
+
+ +
+
+
+
+

Quality Profiles

+
+
+

Import all quality profiles from here.

+
+
+

Each profile should be set as default. If you hit issues in your projects (missing findings, false-positives, etc.) feel free to adjust the config to your needs. However, be aware that we put a lot of energy into this config and the rules are there for reason. Only drop rules or lower the severity of rules if this really makes sense for your project (and not just for being lazy and getting rid of warnings - instead consider the findings as valuable input).

+
+
+

Further the following rules are not activated in our config because they only make sense in specific situations and need to be configured according to your project:

+
+
+
    +
  • +

    squid:S1451 (Track lack of copyright and license headers)

    +
  • +
+
+
+
+
+

Quality Gates

+
+
+

Quality gates unfortunately can not be imported and exported.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/sonar-devon4j-plugin/1.0/guide-sonar-qube.html b/docs/sonar-devon4j-plugin/1.0/guide-sonar-qube.html new file mode 100644 index 00000000..8234243f --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/guide-sonar-qube.html @@ -0,0 +1,316 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/sonar-devon4j-plugin/1.0/index.html b/docs/sonar-devon4j-plugin/1.0/index.html new file mode 100644 index 00000000..20b99643 --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/index.html @@ -0,0 +1,268 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Welcome to the sonar-devon4j-plugin wiki!

+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/sonar-devon4j-plugin/1.0/installation.html b/docs/sonar-devon4j-plugin/1.0/installation.html new file mode 100644 index 00000000..bf6ef06a --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/installation.html @@ -0,0 +1,296 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

Installation

+
+
+

If you have SonarQube installed, you only need to go to its marketplace and install the according plugins. +Please note that the following other plugins are required as pre-requisite:

+
+
+ +
+
+

If the above plugins are installed, you can additionally install the latest version of this sonar-devon4j-plugin from the marketplace as well.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/sonar-devon4j-plugin/1.0/motivation.html b/docs/sonar-devon4j-plugin/1.0/motivation.html new file mode 100644 index 00000000..7363cf70 --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/motivation.html @@ -0,0 +1,277 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ + +
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/sonar-devon4j-plugin/1.0/qualityprofile.html b/docs/sonar-devon4j-plugin/1.0/qualityprofile.html new file mode 100644 index 00000000..654469d2 --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/qualityprofile.html @@ -0,0 +1,281 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+
+

devonfw Java Quality Profile

+
+
+

This plugin comes with the quality profile devonfw Java that is automatically added and maintained with every update. +The devonfw Java quality profile contains not only the previously described rules, but also Java based rules from other plugins that are vital to ensure optimal code quality (see prerequisites in installation). +For devon4j projects we recommend to use this qualtiy profile for your projects and ideally make it the default. +However, if you want to use this quality profile but need to make (small) changes to it, you should create a copy of it as SonarQube admin, make the desired changes and then make it default and/or assign your projects to it. +Please be aware that new versions of this plugin may automatically update and improve the devonfw Java quality profile. +So, if you copied it, you have to maintain your own "fork" manually.

+
+
+

For further information on quality profiles and their setup, please follow the SonarQube documentation on Quality Profiles.

+
+
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/docs/sonar-devon4j-plugin/1.0/rules.html b/docs/sonar-devon4j-plugin/1.0/rules.html new file mode 100644 index 00000000..2bf859d5 --- /dev/null +++ b/docs/sonar-devon4j-plugin/1.0/rules.html @@ -0,0 +1,513 @@ + + + + + + Untitled :: Devonfw.github.io Docs + + + + + +
+ +
+
+ +
+ +
+ +
+ +
+

Rules

+
+
+

This sections describes all SonarQube rules provided by this plugin.

+
+
+
+
+

Architectural Dependency Rules

+
+
+

The following image illustrates the devonfw architecture rules. The arrows show the allowed dependencies in green, discouraged dependencies in orange and forbidden dependencies in red. +Devon Architecture Rules

+
+
+

Within the same component you are always allowed to call code from the same layer and scope as well as the API from the next layer below. Discouraged and forbidden dependencies are always implemented by its own SonarQube rule. This gives you absolute flexibility to customize the severity of such architecture violation. These dependencies have a white circle with a unique label. For each label we link the according rule to give you transparency and help you customizing.

+
+
+
+ + + +
+

Package Rules

+
+
+

Additionally, there is a generic rule that checks the devonfw packaging conventions:

+
+ +
+
+
+

3rd Party Rules

+ +
+ +
+

Security Rules

+
+
+

As of version 3.2.1, we have started adding security-related rules to our plugin:

+
+ +
+
+
+
+
+
+
+

This page was built using the Antora default UI.

+

The source code for this UI is licensed under the terms of the MPL-2.0 license.

+
+ + + + diff --git a/index.html b/index.html index c90832fd..ba36b8fd 100644 --- a/index.html +++ b/index.html @@ -1,8 +1,8 @@ - - + + Redirect Notice

Redirect Notice

-

The page you requested has been relocated to component-b/2.0/index.html.

+

The page you requested has been relocated to docs/1.0/index.html.